aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorChih-hung Hsieh <chh@google.com>2016-01-20 17:50:13 +0000
committerandroid-build-merger <android-build-merger@google.com>2016-01-20 17:50:13 +0000
commitb3cb8ab4ede8bb77f0bdef2715efc2c1e6267072 (patch)
tree28c4cf735dd5bd9cc8f1ccd06fff8a173b20d1cb
parenta4acd9d6bc9b3b033d7d274316e75ee067df8d20 (diff)
parent9a337512d97e37afc142dee4fd50a41b741a87d2 (diff)
downloadwebrtc-nougat-mr1.7-release.tar.gz
Merge "Merge upstream SHA 04cb763"android-cts_7.1_r1android-cts-7.1_r9android-cts-7.1_r8android-cts-7.1_r7android-cts-7.1_r6android-cts-7.1_r5android-cts-7.1_r4android-cts-7.1_r3android-cts-7.1_r29android-cts-7.1_r28android-cts-7.1_r27android-cts-7.1_r26android-cts-7.1_r25android-cts-7.1_r24android-cts-7.1_r23android-cts-7.1_r22android-cts-7.1_r21android-cts-7.1_r20android-cts-7.1_r2android-cts-7.1_r19android-cts-7.1_r18android-cts-7.1_r17android-cts-7.1_r16android-cts-7.1_r15android-cts-7.1_r14android-cts-7.1_r13android-cts-7.1_r12android-cts-7.1_r11android-cts-7.1_r10android-cts-7.1_r1android-cts-7.0_r9android-cts-7.0_r8android-cts-7.0_r7android-cts-7.0_r6android-cts-7.0_r5android-cts-7.0_r4android-cts-7.0_r33android-cts-7.0_r32android-cts-7.0_r31android-cts-7.0_r30android-cts-7.0_r3android-cts-7.0_r29android-cts-7.0_r28android-cts-7.0_r27android-cts-7.0_r26android-cts-7.0_r25android-cts-7.0_r24android-cts-7.0_r23android-cts-7.0_r22android-cts-7.0_r21android-cts-7.0_r20android-cts-7.0_r2android-cts-7.0_r19android-cts-7.0_r18android-cts-7.0_r17android-cts-7.0_r16android-cts-7.0_r15android-cts-7.0_r14android-cts-7.0_r13android-cts-7.0_r12android-cts-7.0_r11android-cts-7.0_r10android-cts-7.0_r1android-7.1.2_r9android-7.1.2_r8android-7.1.2_r6android-7.1.2_r5android-7.1.2_r4android-7.1.2_r39android-7.1.2_r38android-7.1.2_r37android-7.1.2_r36android-7.1.2_r33android-7.1.2_r32android-7.1.2_r30android-7.1.2_r3android-7.1.2_r29android-7.1.2_r28android-7.1.2_r27android-7.1.2_r25android-7.1.2_r24android-7.1.2_r23android-7.1.2_r2android-7.1.2_r19android-7.1.2_r18android-7.1.2_r17android-7.1.2_r16android-7.1.2_r15android-7.1.2_r14android-7.1.2_r13android-7.1.2_r12android-7.1.2_r11android-7.1.2_r10android-7.1.2_r1android-7.1.1_r9android-7.1.1_r8android-7.1.1_r7android-7.1.1_r61android-7.1.1_r60android-7.1.1_r6android-7.1.1_r59android-7.1.1_r58android-7.1.1_r57android-7.1.1_r56android-7.1.1_r55android-7.1.1_r54android-7.1.1_r53android-7.1.1_r52android-7.1.1_r51android-7.1.1_r50android-7.1.1_r49android-7.1.1_r48android-7.1.1_r47android-7.1.1_r46android-7.1.1_r45android-7.1.1_r44android-7.1.1_r43android-7.1.1_r42android-7.1.1_r41android-7.1.1_r40android-7.1.1_r4android-7.1.1_r39android-7.1.1_r38android-7.1.1_r35android-7.1.1_r33android-7.1.1_r32android-7.1.1_r31android-7.1.1_r3android-7.1.1_r28android-7.1.1_r27android-7.1.1_r26android-7.1.1_r25android-7.1.1_r24android-7.1.1_r23android-7.1.1_r22android-7.1.1_r21android-7.1.1_r20android-7.1.1_r2android-7.1.1_r17android-7.1.1_r16android-7.1.1_r15android-7.1.1_r14android-7.1.1_r13android-7.1.1_r12android-7.1.1_r11android-7.1.1_r10android-7.1.1_r1android-7.1.0_r7android-7.1.0_r6android-7.1.0_r5android-7.1.0_r4android-7.1.0_r3android-7.1.0_r2android-7.1.0_r1android-7.0.0_r9android-7.0.0_r8android-7.0.0_r7android-7.0.0_r6android-7.0.0_r5android-7.0.0_r4android-7.0.0_r36android-7.0.0_r35android-7.0.0_r34android-7.0.0_r33android-7.0.0_r32android-7.0.0_r31android-7.0.0_r30android-7.0.0_r3android-7.0.0_r29android-7.0.0_r28android-7.0.0_r27android-7.0.0_r24android-7.0.0_r21android-7.0.0_r19android-7.0.0_r17android-7.0.0_r15android-7.0.0_r14android-7.0.0_r13android-7.0.0_r12android-7.0.0_r11android-7.0.0_r10android-7.0.0_r1nougat-releasenougat-mr2.3-releasenougat-mr2.2-releasenougat-mr2.1-releasenougat-mr2-security-releasenougat-mr2-releasenougat-mr2-pixel-releasenougat-mr2-devnougat-mr1.8-releasenougat-mr1.7-releasenougat-mr1.6-releasenougat-mr1.5-releasenougat-mr1.4-releasenougat-mr1.3-releasenougat-mr1.2-releasenougat-mr1.1-releasenougat-mr1-volantis-releasenougat-mr1-security-releasenougat-mr1-releasenougat-mr1-flounder-releasenougat-mr1-devnougat-mr1-cts-releasenougat-mr0.5-releasenougat-dr1-releasenougat-devnougat-cts-releasenougat-bugfix-release
am: 9a337512d9 * commit '9a337512d97e37afc142dee4fd50a41b741a87d2': (797 commits) Add tests for verifying transport feedback for audio and video. Eliminate defines in talk/ Revert of Update with new default boringssl no-aes cipher suites. Re-enable tests. (patchset #3 id:40001 of https://codereview.webrtc.org/1550773002/ ) Remove assert which was incorrectly added to TcpPort::OnSentPacket. Reland Connect TurnPort and TCPPort to AsyncPacketSocket::SignalSentPacket. Update with new default boringssl no-aes cipher suites. Re-enable tests. Revert of Connect TurnPort and TCPPort to AsyncPacketSocket::SignalSentPacket. (patchset #3 id:40001 of https://codereview.webrtc.org/1577873003/ ) Re-land: "Use an explicit identifier in Config" Connect TurnPort and TCPPort to AsyncPacketSocket::SignalSentPacket. Revert of Delete remnants of non-square pixel support from cricket::VideoFrame. (patchset #1 id:1 of https://codereview.webrtc.org/1586613002/ ) Remove libfuzzer trybot from default trybot set. Add ramp-up tests for transport sequence number with and w/o audio. Delete remnants of non-square pixel support from cricket::VideoFrame. Fix IPAddress::ToSensitiveString() to avoid dependency on inet_ntop(). Revert of Storing raw audio sink for default audio track. (patchset #7 id:120001 of https://codereview.chromium.org/1551813002/ ) Re-enable tests that failed under Linux_Msan. Revert of Use an explicit identifier in Config (patchset #4 id:60001 of https://codereview.webrtc.org/1538643004/ ) Roll chromium_revision 346fea9..099be58 (369082:369139) Disable WebRtcVideoChannel2BaseTest.SendManyResizeOnce for TSan Add build_protobuf variable. ...
-rw-r--r--.gitignore20
-rw-r--r--.gn2
-rw-r--r--AUTHORS4
-rw-r--r--DEPS5
-rw-r--r--OWNERS1
-rwxr-xr-xPRESUBMIT.py124
-rw-r--r--WATCHLISTS119
-rw-r--r--all.gyp1
-rw-r--r--chromium/.gclient1
-rw-r--r--infra/config/cq.cfg23
-rw-r--r--resources/audio_coding/neteq4_network_stats.dat.sha12
-rw-r--r--resources/audio_coding/neteq4_opus_network_stats.dat.sha11
-rw-r--r--resources/audio_coding/neteq4_opus_ref.pcm.sha11
-rw-r--r--resources/audio_coding/neteq4_opus_ref_win_32.pcm.sha11
-rw-r--r--resources/audio_coding/neteq4_opus_ref_win_64.pcm.sha11
-rw-r--r--resources/audio_coding/neteq4_opus_rtcp_stats.dat.sha11
-rw-r--r--resources/audio_coding/neteq4_rtcp_stats.dat.sha12
-rw-r--r--resources/audio_coding/neteq_network_stats.dat.sha11
-rw-r--r--resources/audio_coding/neteq_network_stats_win_32.dat.sha11
-rw-r--r--resources/audio_coding/neteq_opus.rtp.sha11
-rw-r--r--resources/audio_coding/neteq_rtcp_stats.dat.sha11
-rw-r--r--resources/audio_coding/neteq_universal_ref.pcm.sha11
-rw-r--r--resources/audio_coding/neteq_universal_ref_win_32.pcm.sha11
-rwxr-xr-xsetup_links.py12
-rwxr-xr-xsync_chromium.py2
-rw-r--r--talk/app/webrtc/OWNERS6
-rw-r--r--talk/app/webrtc/androidtests/src/org/webrtc/GlRectDrawerTest.java29
-rw-r--r--talk/app/webrtc/androidtests/src/org/webrtc/MediaCodecVideoEncoderTest.java180
-rw-r--r--talk/app/webrtc/androidtests/src/org/webrtc/SurfaceTextureHelperTest.java152
-rw-r--r--talk/app/webrtc/androidtests/src/org/webrtc/SurfaceViewRendererOnMeasureTest.java6
-rw-r--r--talk/app/webrtc/androidtests/src/org/webrtc/VideoCapturerAndroidTest.java117
-rw-r--r--talk/app/webrtc/androidtests/src/org/webrtc/VideoCapturerAndroidTestFixtures.java214
-rw-r--r--talk/app/webrtc/androidvideocapturer.cc22
-rw-r--r--talk/app/webrtc/androidvideocapturer.h2
-rw-r--r--talk/app/webrtc/audiotrack.cc77
-rw-r--r--talk/app/webrtc/audiotrack.h43
-rw-r--r--talk/app/webrtc/dtlsidentitystore.cc16
-rw-r--r--talk/app/webrtc/dtlsidentitystore.h3
-rw-r--r--talk/app/webrtc/fakeportallocatorfactory.h76
-rw-r--r--talk/app/webrtc/java/android/org/webrtc/Camera2Enumerator.java3
-rw-r--r--talk/app/webrtc/java/android/org/webrtc/CameraEnumerationAndroid.java75
-rw-r--r--talk/app/webrtc/java/android/org/webrtc/CameraEnumerator.java13
-rw-r--r--talk/app/webrtc/java/android/org/webrtc/EglBase.java288
-rw-r--r--talk/app/webrtc/java/android/org/webrtc/EglBase10.java299
-rw-r--r--talk/app/webrtc/java/android/org/webrtc/EglBase14.java254
-rw-r--r--talk/app/webrtc/java/android/org/webrtc/GlRectDrawer.java146
-rw-r--r--talk/app/webrtc/java/android/org/webrtc/NetworkMonitorAutoDetect.java22
-rw-r--r--talk/app/webrtc/java/android/org/webrtc/RendererCommon.java74
-rw-r--r--talk/app/webrtc/java/android/org/webrtc/SurfaceTextureHelper.java283
-rw-r--r--talk/app/webrtc/java/android/org/webrtc/SurfaceViewRenderer.java281
-rw-r--r--talk/app/webrtc/java/android/org/webrtc/ThreadUtils.java51
-rw-r--r--talk/app/webrtc/java/android/org/webrtc/VideoCapturerAndroid.java395
-rw-r--r--talk/app/webrtc/java/android/org/webrtc/VideoRendererGui.java81
-rw-r--r--talk/app/webrtc/java/jni/androidmediacodeccommon.h2
-rw-r--r--talk/app/webrtc/java/jni/androidmediadecoder_jni.cc303
-rw-r--r--talk/app/webrtc/java/jni/androidmediaencoder_jni.cc473
-rw-r--r--talk/app/webrtc/java/jni/androidmediaencoder_jni.h3
-rw-r--r--talk/app/webrtc/java/jni/androidvideocapturer_jni.cc100
-rw-r--r--talk/app/webrtc/java/jni/androidvideocapturer_jni.h18
-rw-r--r--talk/app/webrtc/java/jni/classreferenceholder.cc5
-rw-r--r--talk/app/webrtc/java/jni/jni_helpers.cc25
-rw-r--r--talk/app/webrtc/java/jni/jni_onload.cc55
-rw-r--r--talk/app/webrtc/java/jni/native_handle_impl.cc163
-rw-r--r--talk/app/webrtc/java/jni/native_handle_impl.h52
-rw-r--r--talk/app/webrtc/java/jni/peerconnection_jni.cc169
-rw-r--r--talk/app/webrtc/java/jni/surfacetexturehelper_jni.cc31
-rw-r--r--talk/app/webrtc/java/jni/surfacetexturehelper_jni.h18
-rw-r--r--talk/app/webrtc/java/src/org/webrtc/MediaCodecVideoDecoder.java368
-rw-r--r--talk/app/webrtc/java/src/org/webrtc/MediaCodecVideoEncoder.java221
-rw-r--r--talk/app/webrtc/java/src/org/webrtc/PeerConnection.java13
-rw-r--r--talk/app/webrtc/java/src/org/webrtc/PeerConnectionFactory.java61
-rw-r--r--talk/app/webrtc/java/src/org/webrtc/RtpSender.java25
-rw-r--r--talk/app/webrtc/java/src/org/webrtc/VideoRenderer.java23
-rw-r--r--talk/app/webrtc/jsepsessiondescription.cc3
-rw-r--r--talk/app/webrtc/localaudiosource.cc4
-rw-r--r--talk/app/webrtc/localaudiosource.h15
-rw-r--r--talk/app/webrtc/localaudiosource_unittest.cc40
-rw-r--r--talk/app/webrtc/mediacontroller.cc14
-rw-r--r--talk/app/webrtc/mediastream_unittest.cc22
-rw-r--r--talk/app/webrtc/mediastreaminterface.h43
-rw-r--r--talk/app/webrtc/mediastreamobserver.cc101
-rw-r--r--talk/app/webrtc/mediastreamobserver.h65
-rw-r--r--talk/app/webrtc/mediastreamprovider.h19
-rw-r--r--talk/app/webrtc/mediastreamsignaling.cc30
-rw-r--r--talk/app/webrtc/mediastreamsignaling.h28
-rw-r--r--talk/app/webrtc/objc/README54
-rw-r--r--talk/app/webrtc/objc/RTCFileLogger.mm41
-rw-r--r--talk/app/webrtc/objc/RTCPeerConnection.mm6
-rw-r--r--talk/app/webrtc/objc/RTCPeerConnectionInterface.mm10
-rw-r--r--talk/app/webrtc/objc/avfoundationvideocapturer.h1
-rw-r--r--talk/app/webrtc/objc/avfoundationvideocapturer.mm26
-rw-r--r--talk/app/webrtc/objc/public/RTCFileLogger.h23
-rw-r--r--talk/app/webrtc/objc/public/RTCPeerConnectionInterface.h4
-rw-r--r--talk/app/webrtc/peerconnection.cc645
-rw-r--r--talk/app/webrtc/peerconnection.h83
-rw-r--r--talk/app/webrtc/peerconnection_unittest.cc714
-rw-r--r--talk/app/webrtc/peerconnectionendtoend_unittest.cc92
-rw-r--r--talk/app/webrtc/peerconnectionfactory.cc50
-rw-r--r--talk/app/webrtc/peerconnectionfactory.h22
-rw-r--r--talk/app/webrtc/peerconnectionfactory_unittest.cc300
-rw-r--r--talk/app/webrtc/peerconnectionfactoryproxy.h13
-rw-r--r--talk/app/webrtc/peerconnectioninterface.h116
-rw-r--r--talk/app/webrtc/peerconnectioninterface_unittest.cc175
-rw-r--r--talk/app/webrtc/peerconnectionproxy.h4
-rw-r--r--talk/app/webrtc/portallocatorfactory.cc68
-rw-r--r--talk/app/webrtc/portallocatorfactory.h43
-rw-r--r--talk/app/webrtc/remoteaudiosource.cc132
-rw-r--r--talk/app/webrtc/remoteaudiosource.h42
-rw-r--r--talk/app/webrtc/remoteaudiotrack.cc28
-rw-r--r--talk/app/webrtc/remoteaudiotrack.h28
-rw-r--r--talk/app/webrtc/rtpreceiver.cc2
-rw-r--r--talk/app/webrtc/rtpreceiver.h8
-rw-r--r--talk/app/webrtc/rtpsender.cc211
-rw-r--r--talk/app/webrtc/rtpsender.h71
-rw-r--r--talk/app/webrtc/rtpsenderinterface.h20
-rw-r--r--talk/app/webrtc/rtpsenderreceiver_unittest.cc251
-rw-r--r--talk/app/webrtc/statscollector.cc71
-rw-r--r--talk/app/webrtc/statscollector.h1
-rw-r--r--talk/app/webrtc/statscollector_unittest.cc47
-rw-r--r--talk/app/webrtc/statstypes.cc5
-rw-r--r--talk/app/webrtc/statstypes.h1
-rw-r--r--talk/app/webrtc/test/DEPS5
-rw-r--r--talk/app/webrtc/test/androidtestinitializer.cc74
-rw-r--r--talk/app/webrtc/test/androidtestinitializer.h37
-rw-r--r--talk/app/webrtc/test/fakeaudiocapturemodule_unittest.cc4
-rw-r--r--talk/app/webrtc/test/fakedtlsidentitystore.h113
-rw-r--r--talk/app/webrtc/test/fakemediastreamsignaling.h140
-rw-r--r--talk/app/webrtc/test/peerconnectiontestwrapper.cc20
-rw-r--r--talk/app/webrtc/test/peerconnectiontestwrapper.h7
-rw-r--r--talk/app/webrtc/videosource.cc28
-rw-r--r--talk/app/webrtc/videosource.h11
-rw-r--r--talk/app/webrtc/videosource_unittest.cc41
-rw-r--r--talk/app/webrtc/videosourceproxy.h1
-rw-r--r--talk/app/webrtc/videotrack.cc4
-rw-r--r--talk/app/webrtc/videotrack_unittest.cc2
-rw-r--r--talk/app/webrtc/videotrackrenderers.cc4
-rw-r--r--talk/app/webrtc/videotrackrenderers.h1
-rw-r--r--talk/app/webrtc/webrtcsdp.cc69
-rw-r--r--talk/app/webrtc/webrtcsdp_unittest.cc89
-rw-r--r--talk/app/webrtc/webrtcsession.cc119
-rw-r--r--talk/app/webrtc/webrtcsession.h10
-rw-r--r--talk/app/webrtc/webrtcsession_unittest.cc198
-rw-r--r--talk/app/webrtc/webrtcsessiondescriptionfactory.cc37
-rw-r--r--talk/build/common.gypi12
-rw-r--r--talk/build/merge_ios_libs.gyp2
-rw-r--r--talk/codereview.settings7
-rwxr-xr-xtalk/libjingle.gyp115
-rwxr-xr-xtalk/libjingle_tests.gyp64
-rw-r--r--talk/media/base/audiorenderer.h2
-rw-r--r--talk/media/base/capturemanager_unittest.cc3
-rw-r--r--talk/media/base/codec.cc45
-rw-r--r--talk/media/base/codec.h63
-rw-r--r--talk/media/base/codec_unittest.cc49
-rw-r--r--talk/media/base/constants.cc1
-rw-r--r--talk/media/base/constants.h3
-rw-r--r--talk/media/base/cryptoparams.h6
-rw-r--r--talk/media/base/executablehelpers.h14
-rw-r--r--talk/media/base/fakemediaengine.h94
-rw-r--r--talk/media/base/fakemediaprocessor.h29
-rw-r--r--talk/media/base/mediachannel.h342
-rw-r--r--talk/media/base/mediaengine.h133
-rw-r--r--talk/media/base/streamparams_unittest.cc23
-rw-r--r--talk/media/base/testutils.cc4
-rw-r--r--talk/media/base/testutils.h3
-rw-r--r--talk/media/base/videocapturer.cc27
-rw-r--r--talk/media/base/videocapturer.h22
-rw-r--r--talk/media/base/videocapturer_unittest.cc33
-rw-r--r--talk/media/base/videocommon.cc8
-rw-r--r--talk/media/base/videoengine_unittest.h347
-rw-r--r--talk/media/base/videoframe.cc3
-rw-r--r--talk/media/base/videoframe.h2
-rw-r--r--talk/media/base/videoframefactory.cc4
-rw-r--r--talk/media/base/videorenderer.h11
-rwxr-xr-xtalk/media/base/voiceprocessor.h29
-rw-r--r--talk/media/devices/carbonvideorenderer.cc1
-rw-r--r--talk/media/devices/carbonvideorenderer.h1
-rw-r--r--talk/media/devices/devicemanager.cc2
-rw-r--r--talk/media/devices/devicemanager_unittest.cc17
-rw-r--r--talk/media/devices/fakedevicemanager.h2
-rw-r--r--talk/media/devices/mobiledevicemanager.cc2
-rw-r--r--talk/media/devices/v4llookup.h4
-rw-r--r--talk/media/devices/videorendererfactory.h9
-rw-r--r--talk/media/devices/win32devicemanager.cc3
-rw-r--r--talk/media/sctp/sctpdataengine.cc13
-rw-r--r--talk/media/sctp/sctpdataengine_unittest.cc18
-rw-r--r--talk/media/webrtc/fakewebrtccall.cc26
-rw-r--r--talk/media/webrtc/fakewebrtccall.h28
-rw-r--r--talk/media/webrtc/fakewebrtcvideoengine.h2
-rw-r--r--talk/media/webrtc/fakewebrtcvoiceengine.h269
-rwxr-xr-xtalk/media/webrtc/simulcast.cc6
-rw-r--r--talk/media/webrtc/webrtcmediaengine.cc106
-rw-r--r--talk/media/webrtc/webrtcmediaengine.h19
-rw-r--r--talk/media/webrtc/webrtcmediaengine_unittest.cc205
-rw-r--r--talk/media/webrtc/webrtcvideocapturer.cc14
-rw-r--r--talk/media/webrtc/webrtcvideocapturer.h2
-rw-r--r--talk/media/webrtc/webrtcvideocapturer_unittest.cc1
-rw-r--r--talk/media/webrtc/webrtcvideoengine2.cc476
-rw-r--r--talk/media/webrtc/webrtcvideoengine2.h40
-rw-r--r--talk/media/webrtc/webrtcvideoengine2_unittest.cc258
-rw-r--r--talk/media/webrtc/webrtcvideoframe.cc19
-rw-r--r--talk/media/webrtc/webrtcvideoframe.h11
-rw-r--r--talk/media/webrtc/webrtcvoe.h24
-rw-r--r--talk/media/webrtc/webrtcvoiceengine.cc1823
-rw-r--r--talk/media/webrtc/webrtcvoiceengine.h145
-rw-r--r--talk/media/webrtc/webrtcvoiceengine_unittest.cc724
-rwxr-xr-xtalk/session/media/bundlefilter.cc65
-rwxr-xr-xtalk/session/media/bundlefilter.h25
-rwxr-xr-xtalk/session/media/bundlefilter_unittest.cc148
-rw-r--r--talk/session/media/channel.cc302
-rw-r--r--talk/session/media/channel.h43
-rw-r--r--talk/session/media/channel_unittest.cc78
-rw-r--r--talk/session/media/channelmanager.cc93
-rw-r--r--talk/session/media/channelmanager.h14
-rw-r--r--talk/session/media/channelmanager_unittest.cc59
-rw-r--r--talk/session/media/mediasession.cc161
-rw-r--r--talk/session/media/mediasession.h61
-rw-r--r--talk/session/media/mediasession_unittest.cc37
-rw-r--r--talk/session/media/srtpfilter.cc37
-rw-r--r--talk/session/media/srtpfilter.h17
-rw-r--r--talk/session/media/srtpfilter_unittest.cc102
-rw-r--r--third_party/gflags/BUILD.gn13
-rw-r--r--third_party/gflags/gflags.gyp2
-rw-r--r--third_party/gtest-parallel/README.webrtc2
-rwxr-xr-xthird_party/gtest-parallel/gtest-parallel155
-rw-r--r--third_party/winsdk_samples/winsdk_samples.gyp26
-rw-r--r--tools/OWNERS1
-rwxr-xr-xtools/autoroller/roll_chromium_revision.py10
-rw-r--r--tools/refactoring/addfileheader.py163
-rw-r--r--tools/refactoring/filemanagement.py72
-rw-r--r--tools/refactoring/fixincludeguards.py145
-rw-r--r--tools/refactoring/fixnames.py387
-rw-r--r--tools/refactoring/integratefiles.py100
-rw-r--r--tools/refactoring/p4commands.py31
-rw-r--r--tools/refactoring/removetrace.py161
-rw-r--r--tools/refactoring/stringmanipulation.py303
-rw-r--r--tools/refactoring/trim.py29
-rw-r--r--tools/refactoring/trimall.py59
-rwxr-xr-xtools/refactoring/webrtc_reformat.py212
-rw-r--r--tools/sslroots/generate_sslroots.py190
-rw-r--r--tools/valgrind-webrtc/drmemory/suppressions.txt35
-rw-r--r--tools/valgrind-webrtc/gtest_exclude/libjingle_peerconnection_unittest.gtest-drmemory_win32.txt2
-rw-r--r--tools/valgrind-webrtc/gtest_exclude/libjingle_peerconnection_unittest.gtest-memcheck.txt2
-rw-r--r--tools/valgrind-webrtc/gtest_exclude/modules_tests.gtest-drmemory.txt1
-rw-r--r--tools/valgrind-webrtc/gtest_exclude/rtc_unittests.gtest-drmemory.txt3
-rw-r--r--[-rwxr-xr-x]tools/valgrind-webrtc/gtest_exclude/system_wrappers_unittests.gtest-drmemory_win32.txt7
-rw-r--r--tools/valgrind-webrtc/gtest_exclude/video_engine_tests.gtest-drmemory_win32.txt13
-rw-r--r--tools/valgrind-webrtc/gtest_exclude/video_engine_tests.gtest-memcheck.txt2
-rw-r--r--tools/valgrind-webrtc/memcheck/suppressions.txt153
-rw-r--r--webrtc/.gitignore28
-rw-r--r--webrtc/BUILD.gn23
-rw-r--r--webrtc/api/BUILD.gn76
-rw-r--r--webrtc/api/OWNERS1
-rw-r--r--webrtc/api/api.gyp83
-rw-r--r--webrtc/api/api_tests.gyp40
-rw-r--r--webrtc/api/objc/OWNERS1
-rw-r--r--webrtc/api/objc/README3
-rw-r--r--webrtc/api/objc/RTCEAGLVideoView.h35
-rw-r--r--webrtc/api/objc/RTCEAGLVideoView.m259
-rw-r--r--webrtc/api/objc/RTCIceCandidate+Private.h36
-rw-r--r--webrtc/api/objc/RTCIceCandidate.h44
-rw-r--r--webrtc/api/objc/RTCIceCandidate.mm70
-rw-r--r--webrtc/api/objc/RTCIceServer+Private.h28
-rw-r--r--webrtc/api/objc/RTCIceServer.h42
-rw-r--r--webrtc/api/objc/RTCIceServer.mm64
-rw-r--r--webrtc/api/objc/RTCMediaConstraints+Private.h53
-rw-r--r--webrtc/api/objc/RTCMediaConstraints.h28
-rw-r--r--webrtc/api/objc/RTCMediaConstraints.mm92
-rw-r--r--webrtc/api/objc/RTCMediaSource+Private.h41
-rw-r--r--webrtc/api/objc/RTCMediaSource.h31
-rw-r--r--webrtc/api/objc/RTCMediaSource.mm84
-rw-r--r--webrtc/api/objc/RTCMediaStreamTrack+Private.h45
-rw-r--r--webrtc/api/objc/RTCMediaStreamTrack.h47
-rw-r--r--webrtc/api/objc/RTCMediaStreamTrack.mm105
-rw-r--r--webrtc/api/objc/RTCNSGLVideoView.h34
-rw-r--r--webrtc/api/objc/RTCNSGLVideoView.m141
-rw-r--r--webrtc/api/objc/RTCOpenGLVideoRenderer.h58
-rw-r--r--webrtc/api/objc/RTCOpenGLVideoRenderer.mm485
-rw-r--r--webrtc/api/objc/RTCSessionDescription+Private.h41
-rw-r--r--webrtc/api/objc/RTCSessionDescription.h41
-rw-r--r--webrtc/api/objc/RTCSessionDescription.mm92
-rw-r--r--webrtc/api/objc/RTCStatsReport+Private.h24
-rw-r--r--webrtc/api/objc/RTCStatsReport.h34
-rw-r--r--webrtc/api/objc/RTCStatsReport.mm62
-rw-r--r--webrtc/api/objc/RTCVideoFrame+Private.h24
-rw-r--r--webrtc/api/objc/RTCVideoFrame.h37
-rw-r--r--webrtc/api/objc/RTCVideoFrame.mm79
-rw-r--r--webrtc/api/objc/RTCVideoRenderer.h30
-rw-r--r--webrtc/api/objc/WebRTC-Prefix.pch13
-rw-r--r--webrtc/api/objctests/RTCIceCandidateTest.mm74
-rw-r--r--webrtc/api/objctests/RTCIceServerTest.mm84
-rw-r--r--webrtc/api/objctests/RTCMediaConstraintsTest.mm66
-rw-r--r--webrtc/api/objctests/RTCSessionDescriptionTest.mm144
-rw-r--r--webrtc/audio/BUILD.gn5
-rw-r--r--webrtc/audio/audio_receive_stream.cc244
-rw-r--r--webrtc/audio/audio_receive_stream.h27
-rw-r--r--webrtc/audio/audio_receive_stream_unittest.cc364
-rw-r--r--webrtc/audio/audio_send_stream.cc185
-rw-r--r--webrtc/audio/audio_send_stream.h23
-rw-r--r--webrtc/audio/audio_send_stream_unittest.cc243
-rw-r--r--webrtc/audio/audio_sink.h53
-rw-r--r--webrtc/audio/audio_state.cc79
-rw-r--r--webrtc/audio/audio_state.h61
-rw-r--r--webrtc/audio/audio_state_unittest.cc80
-rw-r--r--webrtc/audio/webrtc_audio.gypi3
-rw-r--r--webrtc/audio_receive_stream.h23
-rw-r--r--webrtc/audio_send_stream.h13
-rw-r--r--webrtc/audio_state.h48
-rw-r--r--webrtc/base/Android.mk1
-rw-r--r--webrtc/base/BUILD.gn66
-rw-r--r--webrtc/base/OWNERS6
-rw-r--r--webrtc/base/array_view.h75
-rw-r--r--webrtc/base/array_view_unittest.cc16
-rw-r--r--webrtc/base/atomicops.h19
-rw-r--r--webrtc/base/autodetectproxy_unittest.cc1
-rw-r--r--webrtc/base/base.gyp79
-rw-r--r--webrtc/base/base_tests.gyp17
-rw-r--r--webrtc/base/basicdefs.h20
-rw-r--r--webrtc/base/bitbuffer_unittest.cc5
-rw-r--r--webrtc/base/buffer.cc5
-rw-r--r--webrtc/base/buffer.h14
-rw-r--r--webrtc/base/buffer_unittest.cc4
-rw-r--r--webrtc/base/bufferqueue.cc14
-rw-r--r--webrtc/base/bufferqueue.h17
-rw-r--r--webrtc/base/bytebuffer_unittest.cc3
-rw-r--r--webrtc/base/callback_unittest.cc59
-rw-r--r--webrtc/base/common.h12
-rw-r--r--webrtc/base/cpumonitor_unittest.cc389
-rw-r--r--webrtc/base/crc32.cc6
-rw-r--r--webrtc/base/criticalsection.h2
-rw-r--r--webrtc/base/criticalsection_unittest.cc24
-rw-r--r--webrtc/base/deprecation.h45
-rw-r--r--webrtc/base/diskcache.cc13
-rw-r--r--webrtc/base/event_tracer.cc220
-rw-r--r--webrtc/base/event_tracer.h14
-rw-r--r--webrtc/base/fakenetwork.h33
-rw-r--r--webrtc/base/fakesslidentity.h14
-rw-r--r--webrtc/base/filerotatingstream.cc2
-rw-r--r--webrtc/base/fileutils.cc5
-rw-r--r--webrtc/base/format_macros.h2
-rw-r--r--webrtc/base/gunit.h14
-rw-r--r--webrtc/base/helpers.cc57
-rw-r--r--webrtc/base/helpers.h3
-rw-r--r--webrtc/base/helpers_unittest.cc7
-rw-r--r--webrtc/base/httpcommon-inl.h7
-rw-r--r--webrtc/base/httpcommon.cc3
-rw-r--r--webrtc/base/ifaddrs_converter.cc60
-rw-r--r--webrtc/base/ifaddrs_converter.h45
-rw-r--r--webrtc/base/ipaddress.cc42
-rw-r--r--webrtc/base/ipaddress.h5
-rw-r--r--webrtc/base/ipaddress_unittest.cc33
-rw-r--r--webrtc/base/keep_ref_until_done.h43
-rw-r--r--webrtc/base/latebindingsymboltable_unittest.cc13
-rw-r--r--webrtc/base/linux.cc27
-rw-r--r--webrtc/base/logging.cc8
-rw-r--r--webrtc/base/logging.h2
-rw-r--r--webrtc/base/logging_unittest.cc1
-rw-r--r--webrtc/base/macasyncsocket.cc2
-rw-r--r--webrtc/base/macifaddrs_converter.cc281
-rw-r--r--webrtc/base/macutils.cc4
-rw-r--r--webrtc/base/maybe.h121
-rw-r--r--webrtc/base/maybe_unittest.cc485
-rw-r--r--webrtc/base/messagehandler.h6
-rw-r--r--webrtc/base/messagequeue_unittest.cc1
-rw-r--r--webrtc/base/nat_unittest.cc8
-rw-r--r--webrtc/base/natsocketfactory.cc3
-rw-r--r--webrtc/base/network.cc263
-rw-r--r--webrtc/base/network.h83
-rw-r--r--webrtc/base/network_unittest.cc218
-rw-r--r--webrtc/base/nullsocketserver_unittest.cc1
-rw-r--r--webrtc/base/objc/NSString+StdString.h26
-rw-r--r--webrtc/base/objc/NSString+StdString.mm33
-rw-r--r--webrtc/base/objc/OWNERS1
-rw-r--r--webrtc/base/objc/RTCCameraPreviewView.h28
-rw-r--r--webrtc/base/objc/RTCCameraPreviewView.m47
-rw-r--r--webrtc/base/objc/RTCDispatcher.h35
-rw-r--r--webrtc/base/objc/RTCDispatcher.m46
-rw-r--r--webrtc/base/objc/RTCLogging.h75
-rw-r--r--webrtc/base/objc/RTCLogging.mm47
-rw-r--r--webrtc/base/openssladapter.cc11
-rw-r--r--webrtc/base/openssladapter.h4
-rw-r--r--webrtc/base/opensslidentity.cc30
-rw-r--r--webrtc/base/opensslidentity.h2
-rw-r--r--webrtc/base/opensslstreamadapter.cc65
-rw-r--r--webrtc/base/opensslstreamadapter.h6
-rw-r--r--webrtc/base/optional.h139
-rw-r--r--webrtc/base/optional_unittest.cc489
-rw-r--r--webrtc/base/physicalsocketserver.cc1234
-rw-r--r--webrtc/base/physicalsocketserver.h102
-rw-r--r--webrtc/base/physicalsocketserver_unittest.cc165
-rw-r--r--webrtc/base/platform_thread.cc174
-rw-r--r--webrtc/base/platform_thread.h87
-rw-r--r--webrtc/base/platform_thread_types.h32
-rw-r--r--webrtc/base/platform_thread_unittest.cc51
-rw-r--r--webrtc/base/proxy_unittest.cc1
-rw-r--r--webrtc/base/proxydetect.cc5
-rw-r--r--webrtc/base/random.cc86
-rw-r--r--webrtc/base/random.h82
-rw-r--r--webrtc/base/random_unittest.cc302
-rw-r--r--webrtc/base/ratetracker.cc5
-rw-r--r--webrtc/base/rtccertificate.cc14
-rw-r--r--webrtc/base/rtccertificate.h7
-rw-r--r--webrtc/base/rtccertificate_unittests.cc118
-rw-r--r--webrtc/base/scoped_ptr.h64
-rw-r--r--webrtc/base/sec_buffer.h2
-rw-r--r--webrtc/base/sharedexclusivelock_unittest.cc1
-rw-r--r--webrtc/base/signalthread.cc7
-rw-r--r--webrtc/base/signalthread.h3
-rw-r--r--webrtc/base/signalthread_unittest.cc1
-rw-r--r--webrtc/base/sigslot.h18
-rw-r--r--webrtc/base/socket_unittest.cc7
-rw-r--r--webrtc/base/socket_unittest.h16
-rw-r--r--webrtc/base/socketadapters.cc2
-rw-r--r--webrtc/base/socketaddress.cc33
-rw-r--r--webrtc/base/socketaddress.h11
-rw-r--r--webrtc/base/socketaddress_unittest.cc24
-rw-r--r--webrtc/base/sslidentity.cc71
-rw-r--r--webrtc/base/sslidentity.h14
-rw-r--r--webrtc/base/sslidentity_unittest.cc117
-rw-r--r--webrtc/base/sslroots.h7230
-rw-r--r--webrtc/base/sslstreamadapter.cc28
-rw-r--r--webrtc/base/sslstreamadapter.h23
-rw-r--r--webrtc/base/sslstreamadapter_unittest.cc365
-rw-r--r--webrtc/base/sslstreamadapterhelper.cc3
-rw-r--r--webrtc/base/stream_unittest.cc1
-rw-r--r--webrtc/base/stringencode_unittest.cc5
-rw-r--r--webrtc/base/stringutils.cc4
-rw-r--r--webrtc/base/systeminfo.cc2
-rw-r--r--webrtc/base/task.cc8
-rw-r--r--webrtc/base/task_unittest.cc6
-rw-r--r--webrtc/base/taskparent.cc6
-rw-r--r--webrtc/base/taskparent.h2
-rw-r--r--webrtc/base/taskrunner.cc6
-rw-r--r--webrtc/base/taskrunner.h4
-rw-r--r--webrtc/base/testclient_unittest.cc1
-rw-r--r--webrtc/base/testutils.h3
-rw-r--r--webrtc/base/thread.cc72
-rw-r--r--webrtc/base/thread.h12
-rw-r--r--webrtc/base/thread_checker_impl.cc2
-rw-r--r--webrtc/base/thread_checker_impl.h2
-rw-r--r--webrtc/base/thread_checker_unittest.cc1
-rw-r--r--webrtc/base/thread_unittest.cc138
-rw-r--r--webrtc/base/timeutils.cc44
-rw-r--r--webrtc/base/timeutils.h6
-rw-r--r--webrtc/base/timeutils_unittest.cc96
-rw-r--r--webrtc/base/trace_event.h2
-rw-r--r--webrtc/base/unittest_main.cc8
-rw-r--r--webrtc/base/unixfilesystem.cc13
-rw-r--r--webrtc/base/urlencode_unittest.cc13
-rw-r--r--webrtc/base/virtualsocket_unittest.cc8
-rw-r--r--webrtc/base/win32.cc3
-rw-r--r--webrtc/base/win32filesystem.cc29
-rw-r--r--webrtc/base/win32regkey_unittest.cc5
-rw-r--r--webrtc/base/win32socketserver.cc6
-rw-r--r--webrtc/base/win32windowpicker.cc3
-rw-r--r--webrtc/base/win32windowpicker_unittest.cc3
-rw-r--r--webrtc/build/android/AndroidManifest.xml14
-rw-r--r--webrtc/build/android/suppressions.xml23
-rwxr-xr-xwebrtc/build/android/test_runner.py4
-rw-r--r--webrtc/build/apk_test.gypi40
-rw-r--r--webrtc/build/apk_tests.gyp31
-rw-r--r--webrtc/build/apk_tests_noop.gyp8
-rw-r--r--webrtc/build/common.gypi28
-rw-r--r--webrtc/build/protoc.gypi22
-rw-r--r--webrtc/build/sanitizers/tsan_suppressions_webrtc.cc9
-rw-r--r--webrtc/build/webrtc.gni13
-rw-r--r--webrtc/call.h19
-rw-r--r--webrtc/call/BUILD.gn1
-rw-r--r--webrtc/call/bitrate_allocator.cc194
-rw-r--r--webrtc/call/bitrate_allocator.h102
-rw-r--r--webrtc/call/bitrate_allocator_unittest.cc212
-rw-r--r--webrtc/call/bitrate_estimator_tests.cc201
-rw-r--r--webrtc/call/call.cc209
-rw-r--r--webrtc/call/call_perf_tests.cc208
-rw-r--r--webrtc/call/call_unittest.cc11
-rw-r--r--webrtc/call/congestion_controller.cc50
-rw-r--r--webrtc/call/congestion_controller.h66
-rw-r--r--webrtc/call/mock/mock_congestion_controller.h52
-rw-r--r--webrtc/call/packet_injection_tests.cc12
-rw-r--r--webrtc/call/rampup_tests.cc587
-rw-r--r--webrtc/call/rampup_tests.h137
-rw-r--r--webrtc/call/rtc_event_log.cc92
-rw-r--r--webrtc/call/rtc_event_log.h5
-rw-r--r--webrtc/call/rtc_event_log.proto62
-rw-r--r--webrtc/call/rtc_event_log_unittest.cc237
-rw-r--r--webrtc/call/webrtc_call.gypi1
-rw-r--r--webrtc/codereview.settings7
-rw-r--r--webrtc/common.h27
-rw-r--r--webrtc/common_audio/BUILD.gn1
-rw-r--r--webrtc/common_audio/OWNERS1
-rw-r--r--webrtc/common_audio/audio_converter.cc33
-rw-r--r--webrtc/common_audio/audio_converter.h14
-rw-r--r--webrtc/common_audio/audio_converter_unittest.cc38
-rw-r--r--webrtc/common_audio/blocker.cc28
-rw-r--r--webrtc/common_audio/blocker.h16
-rw-r--r--webrtc/common_audio/blocker_unittest.cc115
-rw-r--r--webrtc/common_audio/channel_buffer.cc6
-rw-r--r--webrtc/common_audio/channel_buffer.h20
-rw-r--r--webrtc/common_audio/common_audio.gyp5
-rw-r--r--webrtc/common_audio/include/audio_util.h12
-rw-r--r--webrtc/common_audio/lapped_transform.cc12
-rw-r--r--webrtc/common_audio/lapped_transform.h22
-rw-r--r--webrtc/common_audio/lapped_transform_unittest.cc44
-rw-r--r--webrtc/common_audio/real_fourier.cc2
-rw-r--r--webrtc/common_audio/real_fourier.h2
-rw-r--r--webrtc/common_audio/real_fourier_unittest.cc8
-rw-r--r--webrtc/common_audio/resampler/include/push_resampler.h4
-rw-r--r--webrtc/common_audio/resampler/include/resampler.h8
-rw-r--r--webrtc/common_audio/resampler/push_resampler.cc8
-rw-r--r--webrtc/common_audio/resampler/resampler.cc6
-rw-r--r--webrtc/common_audio/signal_processing/real_fft_unittest.cc1
-rw-r--r--webrtc/common_audio/swap_queue.h210
-rw-r--r--webrtc/common_audio/swap_queue_unittest.cc225
-rw-r--r--webrtc/common_audio/wav_file.cc35
-rw-r--r--webrtc/common_audio/wav_file.h33
-rw-r--r--webrtc/common_audio/wav_file_unittest.cc24
-rw-r--r--webrtc/common_audio/wav_header.cc59
-rw-r--r--webrtc/common_audio/wav_header.h18
-rw-r--r--webrtc/common_audio/wav_header_unittest.cc28
-rw-r--r--webrtc/common_types.h17
-rw-r--r--webrtc/common_video/BUILD.gn8
-rw-r--r--webrtc/common_video/common_video.gyp10
-rw-r--r--webrtc/common_video/common_video_unittests.gyp1
-rw-r--r--webrtc/common_video/i420_buffer_pool.cc2
-rw-r--r--webrtc/common_video/i420_buffer_pool_unittest.cc2
-rw-r--r--webrtc/common_video/i420_video_frame_unittest.cc56
-rw-r--r--webrtc/common_video/include/i420_buffer_pool.h43
-rw-r--r--webrtc/common_video/include/incoming_video_stream.h107
-rw-r--r--webrtc/common_video/include/video_frame_buffer.h157
-rw-r--r--webrtc/common_video/include/video_image.h17
-rw-r--r--webrtc/common_video/incoming_video_stream.cc115
-rw-r--r--webrtc/common_video/interface/i420_buffer_pool.h43
-rw-r--r--webrtc/common_video/interface/incoming_video_stream.h102
-rw-r--r--webrtc/common_video/interface/video_frame_buffer.h157
-rw-r--r--webrtc/common_video/interface/video_image.h17
-rw-r--r--webrtc/common_video/libyuv/include/scaler.h2
-rw-r--r--webrtc/common_video/libyuv/include/webrtc_libyuv.h3
-rw-r--r--webrtc/common_video/libyuv/libyuv_unittest.cc34
-rw-r--r--webrtc/common_video/libyuv/scaler_unittest.cc26
-rw-r--r--webrtc/common_video/libyuv/webrtc_libyuv.cc12
-rw-r--r--webrtc/common_video/plane.cc80
-rw-r--r--webrtc/common_video/plane.h75
-rw-r--r--webrtc/common_video/video_frame.cc40
-rw-r--r--webrtc/common_video/video_frame_buffer.cc12
-rw-r--r--webrtc/common_video/video_render_frames.cc4
-rw-r--r--webrtc/config.h16
-rw-r--r--webrtc/engine_configurations.h13
-rw-r--r--webrtc/examples/android/media_demo/AndroidManifest.xml29
-rw-r--r--webrtc/examples/android/media_demo/README24
-rw-r--r--webrtc/examples/android/media_demo/build.xml92
-rw-r--r--webrtc/examples/android/media_demo/jni/jni_helpers.cc82
-rw-r--r--webrtc/examples/android/media_demo/jni/jni_helpers.h79
-rw-r--r--webrtc/examples/android/media_demo/jni/on_load.cc48
-rw-r--r--webrtc/examples/android/media_demo/jni/voice_engine_jni.cc423
-rw-r--r--webrtc/examples/android/media_demo/jni/voice_engine_jni.h31
-rw-r--r--webrtc/examples/android/media_demo/project.properties14
-rw-r--r--webrtc/examples/android/media_demo/res/drawable/logo.pngbin3305 -> 0 bytes
-rw-r--r--webrtc/examples/android/media_demo/res/layout/audiomenu.xml80
-rw-r--r--webrtc/examples/android/media_demo/res/layout/dropdownitems.xml17
-rw-r--r--webrtc/examples/android/media_demo/res/layout/mainmenu.xml26
-rw-r--r--webrtc/examples/android/media_demo/res/layout/settingsmenu.xml36
-rw-r--r--webrtc/examples/android/media_demo/res/menu/main_activity_actions.xml5
-rw-r--r--webrtc/examples/android/media_demo/res/values/bools.xml13
-rw-r--r--webrtc/examples/android/media_demo/res/values/integers.xml13
-rw-r--r--webrtc/examples/android/media_demo/res/values/strings.xml41
-rw-r--r--webrtc/examples/android/media_demo/src/org/webrtc/webrtcdemo/AudioMenuFragment.java156
-rw-r--r--webrtc/examples/android/media_demo/src/org/webrtc/webrtcdemo/CodecInst.java39
-rw-r--r--webrtc/examples/android/media_demo/src/org/webrtc/webrtcdemo/MainMenuFragment.java123
-rw-r--r--webrtc/examples/android/media_demo/src/org/webrtc/webrtcdemo/MediaEngine.java321
-rw-r--r--webrtc/examples/android/media_demo/src/org/webrtc/webrtcdemo/MediaEngineObserver.java15
-rw-r--r--webrtc/examples/android/media_demo/src/org/webrtc/webrtcdemo/MenuStateProvider.java15
-rw-r--r--webrtc/examples/android/media_demo/src/org/webrtc/webrtcdemo/NativeWebRtcContextRegistry.java22
-rw-r--r--webrtc/examples/android/media_demo/src/org/webrtc/webrtcdemo/RtcpStatistics.java32
-rw-r--r--webrtc/examples/android/media_demo/src/org/webrtc/webrtcdemo/SettingsMenuFragment.java129
-rw-r--r--webrtc/examples/android/media_demo/src/org/webrtc/webrtcdemo/SpinnerAdapter.java49
-rw-r--r--webrtc/examples/android/media_demo/src/org/webrtc/webrtcdemo/VoiceEngine.java117
-rw-r--r--webrtc/examples/android/media_demo/src/org/webrtc/webrtcdemo/WebRTCDemo.java210
-rw-r--r--webrtc/examples/androidapp/AndroidManifest.xml2
-rw-r--r--webrtc/examples/androidapp/res/values/strings.xml25
-rw-r--r--webrtc/examples/androidapp/res/xml/preferences.xml30
-rw-r--r--webrtc/examples/androidapp/src/org/appspot/apprtc/CallActivity.java25
-rw-r--r--webrtc/examples/androidapp/src/org/appspot/apprtc/ConnectActivity.java39
-rw-r--r--webrtc/examples/androidapp/src/org/appspot/apprtc/PeerConnectionClient.java104
-rw-r--r--webrtc/examples/androidapp/src/org/appspot/apprtc/SettingsActivity.java20
-rw-r--r--webrtc/examples/androidtests/src/org/appspot/apprtc/test/PeerConnectionClientTest.java90
-rw-r--r--webrtc/examples/objc/AppRTCDemo/ios/ARDAppDelegate.m2
-rw-r--r--webrtc/examples/objc/AppRTCDemo/ios/ARDMainView.m29
-rw-r--r--webrtc/examples/objc/AppRTCDemo/ios/ARDVideoCallView.h3
-rw-r--r--webrtc/examples/objc/AppRTCDemo/ios/ARDVideoCallView.m36
-rw-r--r--webrtc/examples/objc/AppRTCDemo/ios/ARDVideoCallViewController.m11
-rw-r--r--webrtc/examples/peerconnection/client/conductor.cc16
-rw-r--r--webrtc/examples/peerconnection/client/conductor.h6
-rw-r--r--webrtc/examples/peerconnection/client/defaults.cc9
-rw-r--r--webrtc/examples/peerconnection/client/defaults.h6
-rw-r--r--webrtc/examples/peerconnection/client/flagdefs.h6
-rw-r--r--webrtc/examples/peerconnection/client/linux/main.cc12
-rw-r--r--webrtc/examples/peerconnection/client/linux/main_wnd.cc8
-rw-r--r--webrtc/examples/peerconnection/client/linux/main_wnd.h8
-rw-r--r--webrtc/examples/peerconnection/client/main_wnd.cc3
-rw-r--r--webrtc/examples/peerconnection/client/main_wnd.h9
-rw-r--r--webrtc/examples/peerconnection/client/peer_connection_client.cc4
-rw-r--r--webrtc/examples/peerconnection/client/peer_connection_client.h6
-rw-r--r--webrtc/examples/peerconnection/server/data_socket.h6
-rw-r--r--webrtc/examples/peerconnection/server/peer_channel.cc3
-rw-r--r--webrtc/examples/peerconnection/server/peer_channel.h6
-rw-r--r--webrtc/examples/peerconnection/server/utils.h6
-rw-r--r--webrtc/examples/stunserver/stunserver_main.cc2
-rw-r--r--webrtc/libjingle/xmllite/xmlelement_unittest.cc1
-rw-r--r--webrtc/libjingle/xmpp/chatroommoduleimpl.cc3
-rw-r--r--webrtc/libjingle/xmpp/constants.cc1
-rw-r--r--webrtc/libjingle/xmpp/presenceouttask.cc3
-rw-r--r--webrtc/libjingle/xmpp/xmppclient.cc4
-rw-r--r--webrtc/libjingle/xmpp/xmppclient.h1
-rw-r--r--webrtc/libjingle/xmpp/xmpplogintask.cc8
-rw-r--r--webrtc/libjingle/xmpp/xmpplogintask.h4
-rw-r--r--webrtc/libjingle/xmpp/xmppsocket.cc1
-rw-r--r--webrtc/libjingle/xmpp/xmpptask.cc4
-rw-r--r--webrtc/libjingle/xmpp/xmpptask.h4
-rwxr-xr-xwebrtc/libjingle_examples.gyp409
-rw-r--r--webrtc/modules/audio_coding/BUILD.gn93
-rw-r--r--webrtc/modules/audio_coding/OWNERS2
-rw-r--r--webrtc/modules/audio_coding/acm2/acm_codec_database.cc333
-rw-r--r--webrtc/modules/audio_coding/acm2/acm_codec_database.h83
-rw-r--r--webrtc/modules/audio_coding/acm2/acm_common_defs.h32
-rw-r--r--webrtc/modules/audio_coding/acm2/acm_neteq_unittest.cc (renamed from webrtc/modules/audio_coding/main/acm2/acm_neteq_unittest.cc)0
-rw-r--r--webrtc/modules/audio_coding/acm2/acm_receive_test_oldapi.cc222
-rw-r--r--webrtc/modules/audio_coding/acm2/acm_receive_test_oldapi.h97
-rw-r--r--webrtc/modules/audio_coding/acm2/acm_receiver.cc541
-rw-r--r--webrtc/modules/audio_coding/acm2/acm_receiver.h307
-rw-r--r--webrtc/modules/audio_coding/acm2/acm_receiver_unittest_oldapi.cc398
-rw-r--r--webrtc/modules/audio_coding/acm2/acm_resampler.cc63
-rw-r--r--webrtc/modules/audio_coding/acm2/acm_resampler.h39
-rw-r--r--webrtc/modules/audio_coding/acm2/acm_send_test_oldapi.cc158
-rw-r--r--webrtc/modules/audio_coding/acm2/acm_send_test_oldapi.h91
-rw-r--r--webrtc/modules/audio_coding/acm2/audio_coding_module.cc98
-rw-r--r--webrtc/modules/audio_coding/acm2/audio_coding_module_impl.cc828
-rw-r--r--webrtc/modules/audio_coding/acm2/audio_coding_module_impl.h283
-rw-r--r--webrtc/modules/audio_coding/acm2/audio_coding_module_unittest_oldapi.cc1789
-rw-r--r--webrtc/modules/audio_coding/acm2/call_statistics.cc55
-rw-r--r--webrtc/modules/audio_coding/acm2/call_statistics.h63
-rw-r--r--webrtc/modules/audio_coding/acm2/call_statistics_unittest.cc55
-rw-r--r--webrtc/modules/audio_coding/acm2/codec_manager.cc194
-rw-r--r--webrtc/modules/audio_coding/acm2/codec_manager.h66
-rw-r--r--webrtc/modules/audio_coding/acm2/codec_manager_unittest.cc73
-rw-r--r--webrtc/modules/audio_coding/acm2/initial_delay_manager.cc242
-rw-r--r--webrtc/modules/audio_coding/acm2/initial_delay_manager.h120
-rw-r--r--webrtc/modules/audio_coding/acm2/initial_delay_manager_unittest.cc376
-rw-r--r--webrtc/modules/audio_coding/acm2/rent_a_codec.cc307
-rw-r--r--webrtc/modules/audio_coding/acm2/rent_a_codec.h249
-rw-r--r--webrtc/modules/audio_coding/acm2/rent_a_codec_unittest.cc222
-rw-r--r--webrtc/modules/audio_coding/audio_coding.gypi185
-rw-r--r--webrtc/modules/audio_coding/codecs/audio_decoder.cc9
-rw-r--r--webrtc/modules/audio_coding/codecs/audio_decoder.h28
-rw-r--r--webrtc/modules/audio_coding/codecs/audio_encoder.cc17
-rw-r--r--webrtc/modules/audio_coding/codecs/audio_encoder.h10
-rw-r--r--webrtc/modules/audio_coding/codecs/cng/audio_encoder_cng.cc20
-rw-r--r--webrtc/modules/audio_coding/codecs/cng/audio_encoder_cng.h95
-rw-r--r--webrtc/modules/audio_coding/codecs/cng/audio_encoder_cng_unittest.cc8
-rw-r--r--webrtc/modules/audio_coding/codecs/cng/cng.gypi16
-rw-r--r--webrtc/modules/audio_coding/codecs/cng/include/audio_encoder_cng.h95
-rw-r--r--webrtc/modules/audio_coding/codecs/cng/include/webrtc_cng.h163
-rw-r--r--webrtc/modules/audio_coding/codecs/cng/webrtc_cng.h163
-rw-r--r--webrtc/modules/audio_coding/codecs/g711/audio_decoder_pcm.cc4
-rw-r--r--webrtc/modules/audio_coding/codecs/g711/audio_decoder_pcm.h63
-rw-r--r--webrtc/modules/audio_coding/codecs/g711/audio_encoder_pcm.cc34
-rw-r--r--webrtc/modules/audio_coding/codecs/g711/audio_encoder_pcm.h117
-rw-r--r--webrtc/modules/audio_coding/codecs/g711/g711.gypi20
-rw-r--r--webrtc/modules/audio_coding/codecs/g711/g711_interface.h135
-rw-r--r--webrtc/modules/audio_coding/codecs/g711/include/audio_decoder_pcm.h63
-rw-r--r--webrtc/modules/audio_coding/codecs/g711/include/audio_encoder_pcm.h117
-rw-r--r--webrtc/modules/audio_coding/codecs/g711/include/g711_interface.h135
-rw-r--r--webrtc/modules/audio_coding/codecs/g711/test/testG711.cc2
-rw-r--r--webrtc/modules/audio_coding/codecs/g722/audio_decoder_g722.cc4
-rw-r--r--webrtc/modules/audio_coding/codecs/g722/audio_decoder_g722.h72
-rw-r--r--webrtc/modules/audio_coding/codecs/g722/audio_encoder_g722.cc22
-rw-r--r--webrtc/modules/audio_coding/codecs/g722/audio_encoder_g722.h73
-rw-r--r--webrtc/modules/audio_coding/codecs/g722/g722.gypi18
-rw-r--r--webrtc/modules/audio_coding/codecs/g722/g722_interface.h182
-rw-r--r--webrtc/modules/audio_coding/codecs/g722/include/audio_decoder_g722.h72
-rw-r--r--webrtc/modules/audio_coding/codecs/g722/include/audio_encoder_g722.h73
-rw-r--r--webrtc/modules/audio_coding/codecs/g722/include/g722_interface.h182
-rw-r--r--webrtc/modules/audio_coding/codecs/g722/test/testG722.cc2
-rw-r--r--webrtc/modules/audio_coding/codecs/ilbc/audio_decoder_ilbc.cc4
-rw-r--r--webrtc/modules/audio_coding/codecs/ilbc/audio_decoder_ilbc.h42
-rw-r--r--webrtc/modules/audio_coding/codecs/ilbc/audio_encoder_ilbc.cc16
-rw-r--r--webrtc/modules/audio_coding/codecs/ilbc/audio_encoder_ilbc.h63
-rw-r--r--webrtc/modules/audio_coding/codecs/ilbc/ilbc.gypi16
-rw-r--r--webrtc/modules/audio_coding/codecs/ilbc/ilbc.h258
-rw-r--r--webrtc/modules/audio_coding/codecs/ilbc/include/audio_decoder_ilbc.h42
-rw-r--r--webrtc/modules/audio_coding/codecs/ilbc/include/audio_encoder_ilbc.h63
-rw-r--r--webrtc/modules/audio_coding/codecs/ilbc/include/ilbc.h258
-rw-r--r--webrtc/modules/audio_coding/codecs/ilbc/test/iLBC_test.c2
-rw-r--r--webrtc/modules/audio_coding/codecs/ilbc/test/iLBC_testLib.c2
-rw-r--r--webrtc/modules/audio_coding/codecs/ilbc/test/iLBC_testprogram.c14
-rw-r--r--webrtc/modules/audio_coding/codecs/isac/audio_encoder_isac_t.h4
-rw-r--r--webrtc/modules/audio_coding/codecs/isac/audio_encoder_isac_t_impl.h6
-rw-r--r--webrtc/modules/audio_coding/codecs/isac/fix/test/isac_speed_test.cc2
-rw-r--r--webrtc/modules/audio_coding/codecs/isac/fix/test/test_iSACfixfloat.c16
-rw-r--r--webrtc/modules/audio_coding/codecs/isac/isac_test.gypi14
-rw-r--r--webrtc/modules/audio_coding/codecs/isac/isacfix.gypi5
-rw-r--r--webrtc/modules/audio_coding/codecs/isac/main/test/ReleaseTest-API/ReleaseTest-API.cc12
-rw-r--r--webrtc/modules/audio_coding/codecs/mock/mock_audio_encoder.h4
-rw-r--r--webrtc/modules/audio_coding/codecs/opus/audio_decoder_opus.cc4
-rw-r--r--webrtc/modules/audio_coding/codecs/opus/audio_decoder_opus.h51
-rw-r--r--webrtc/modules/audio_coding/codecs/opus/audio_encoder_opus.cc28
-rw-r--r--webrtc/modules/audio_coding/codecs/opus/audio_encoder_opus.h102
-rw-r--r--webrtc/modules/audio_coding/codecs/opus/audio_encoder_opus_unittest.cc2
-rw-r--r--webrtc/modules/audio_coding/codecs/opus/include/audio_decoder_opus.h51
-rw-r--r--webrtc/modules/audio_coding/codecs/opus/include/audio_encoder_opus.h102
-rw-r--r--webrtc/modules/audio_coding/codecs/opus/include/opus_interface.h349
-rw-r--r--webrtc/modules/audio_coding/codecs/opus/opus.gypi12
-rw-r--r--webrtc/modules/audio_coding/codecs/opus/opus_fec_test.cc9
-rw-r--r--webrtc/modules/audio_coding/codecs/opus/opus_inst.h11
-rw-r--r--webrtc/modules/audio_coding/codecs/opus/opus_interface.c115
-rw-r--r--webrtc/modules/audio_coding/codecs/opus/opus_interface.h349
-rw-r--r--webrtc/modules/audio_coding/codecs/opus/opus_speed_test.cc4
-rw-r--r--webrtc/modules/audio_coding/codecs/opus/opus_unittest.cc190
-rw-r--r--webrtc/modules/audio_coding/codecs/pcm16b/audio_decoder_pcm16b.cc4
-rw-r--r--webrtc/modules/audio_coding/codecs/pcm16b/audio_decoder_pcm16b.h40
-rw-r--r--webrtc/modules/audio_coding/codecs/pcm16b/audio_encoder_pcm16b.cc6
-rw-r--r--webrtc/modules/audio_coding/codecs/pcm16b/audio_encoder_pcm16b.h48
-rw-r--r--webrtc/modules/audio_coding/codecs/pcm16b/include/audio_decoder_pcm16b.h40
-rw-r--r--webrtc/modules/audio_coding/codecs/pcm16b/include/audio_encoder_pcm16b.h48
-rw-r--r--webrtc/modules/audio_coding/codecs/pcm16b/include/pcm16b.h68
-rw-r--r--webrtc/modules/audio_coding/codecs/pcm16b/pcm16b.gypi16
-rw-r--r--webrtc/modules/audio_coding/codecs/pcm16b/pcm16b.h68
-rw-r--r--webrtc/modules/audio_coding/codecs/red/audio_encoder_copy_red.cc9
-rw-r--r--webrtc/modules/audio_coding/codecs/red/audio_encoder_copy_red.h4
-rw-r--r--webrtc/modules/audio_coding/codecs/red/audio_encoder_copy_red_unittest.cc14
-rw-r--r--webrtc/modules/audio_coding/codecs/tools/audio_codec_speed_test.cc12
-rw-r--r--webrtc/modules/audio_coding/codecs/tools/audio_codec_speed_test.h9
-rw-r--r--webrtc/modules/audio_coding/include/audio_coding_module.h746
-rw-r--r--webrtc/modules/audio_coding/include/audio_coding_module_typedefs.h51
-rw-r--r--webrtc/modules/audio_coding/main/acm2/OWNERS5
-rw-r--r--webrtc/modules/audio_coding/main/acm2/acm_codec_database.cc364
-rw-r--r--webrtc/modules/audio_coding/main/acm2/acm_codec_database.h115
-rw-r--r--webrtc/modules/audio_coding/main/acm2/acm_common_defs.h32
-rw-r--r--webrtc/modules/audio_coding/main/acm2/acm_receive_test_oldapi.cc221
-rw-r--r--webrtc/modules/audio_coding/main/acm2/acm_receive_test_oldapi.h94
-rw-r--r--webrtc/modules/audio_coding/main/acm2/acm_receiver.cc739
-rw-r--r--webrtc/modules/audio_coding/main/acm2/acm_receiver.h348
-rw-r--r--webrtc/modules/audio_coding/main/acm2/acm_receiver_unittest_oldapi.cc368
-rw-r--r--webrtc/modules/audio_coding/main/acm2/acm_resampler.cc68
-rw-r--r--webrtc/modules/audio_coding/main/acm2/acm_resampler.h39
-rw-r--r--webrtc/modules/audio_coding/main/acm2/acm_send_test_oldapi.cc158
-rw-r--r--webrtc/modules/audio_coding/main/acm2/acm_send_test_oldapi.h91
-rw-r--r--webrtc/modules/audio_coding/main/acm2/audio_coding_module.cc97
-rw-r--r--webrtc/modules/audio_coding/main/acm2/audio_coding_module_impl.cc802
-rw-r--r--webrtc/modules/audio_coding/main/acm2/audio_coding_module_impl.h285
-rw-r--r--webrtc/modules/audio_coding/main/acm2/audio_coding_module_unittest_oldapi.cc1792
-rw-r--r--webrtc/modules/audio_coding/main/acm2/call_statistics.cc55
-rw-r--r--webrtc/modules/audio_coding/main/acm2/call_statistics.h63
-rw-r--r--webrtc/modules/audio_coding/main/acm2/call_statistics_unittest.cc55
-rw-r--r--webrtc/modules/audio_coding/main/acm2/codec_manager.cc465
-rw-r--r--webrtc/modules/audio_coding/main/acm2/codec_manager.h90
-rw-r--r--webrtc/modules/audio_coding/main/acm2/codec_owner.cc213
-rw-r--r--webrtc/modules/audio_coding/main/acm2/codec_owner.h86
-rw-r--r--webrtc/modules/audio_coding/main/acm2/codec_owner_unittest.cc210
-rw-r--r--webrtc/modules/audio_coding/main/acm2/initial_delay_manager.cc242
-rw-r--r--webrtc/modules/audio_coding/main/acm2/initial_delay_manager.h120
-rw-r--r--webrtc/modules/audio_coding/main/acm2/initial_delay_manager_unittest.cc376
-rw-r--r--webrtc/modules/audio_coding/main/acm2/rent_a_codec.cc70
-rw-r--r--webrtc/modules/audio_coding/main/acm2/rent_a_codec.h162
-rw-r--r--webrtc/modules/audio_coding/main/audio_coding_module.gypi193
-rw-r--r--webrtc/modules/audio_coding/main/include/audio_coding_module.h758
-rw-r--r--webrtc/modules/audio_coding/main/include/audio_coding_module_typedefs.h51
-rw-r--r--webrtc/modules/audio_coding/main/test/ACMTest.h21
-rw-r--r--webrtc/modules/audio_coding/main/test/APITest.cc1117
-rw-r--r--webrtc/modules/audio_coding/main/test/APITest.h163
-rw-r--r--webrtc/modules/audio_coding/main/test/Channel.cc424
-rw-r--r--webrtc/modules/audio_coding/main/test/Channel.h130
-rw-r--r--webrtc/modules/audio_coding/main/test/EncodeDecodeTest.cc352
-rw-r--r--webrtc/modules/audio_coding/main/test/EncodeDecodeTest.h123
-rw-r--r--webrtc/modules/audio_coding/main/test/PCMFile.cc204
-rw-r--r--webrtc/modules/audio_coding/main/test/PCMFile.h68
-rw-r--r--webrtc/modules/audio_coding/main/test/PacketLossTest.cc168
-rw-r--r--webrtc/modules/audio_coding/main/test/PacketLossTest.h67
-rw-r--r--webrtc/modules/audio_coding/main/test/RTPFile.h126
-rw-r--r--webrtc/modules/audio_coding/main/test/SpatialAudio.cc196
-rw-r--r--webrtc/modules/audio_coding/main/test/SpatialAudio.h47
-rw-r--r--webrtc/modules/audio_coding/main/test/TestAllCodecs.cc486
-rw-r--r--webrtc/modules/audio_coding/main/test/TestAllCodecs.h84
-rw-r--r--webrtc/modules/audio_coding/main/test/TestRedFec.cc476
-rw-r--r--webrtc/modules/audio_coding/main/test/TestRedFec.h51
-rw-r--r--webrtc/modules/audio_coding/main/test/TestStereo.cc837
-rw-r--r--webrtc/modules/audio_coding/main/test/TestStereo.h117
-rw-r--r--webrtc/modules/audio_coding/main/test/TestVADDTX.cc271
-rw-r--r--webrtc/modules/audio_coding/main/test/TestVADDTX.h102
-rw-r--r--webrtc/modules/audio_coding/main/test/Tester.cc171
-rw-r--r--webrtc/modules/audio_coding/main/test/TimedTrace.h36
-rw-r--r--webrtc/modules/audio_coding/main/test/TwoWayCommunication.cc301
-rw-r--r--webrtc/modules/audio_coding/main/test/TwoWayCommunication.h60
-rw-r--r--webrtc/modules/audio_coding/main/test/delay_test.cc270
-rw-r--r--webrtc/modules/audio_coding/main/test/iSACTest.cc340
-rw-r--r--webrtc/modules/audio_coding/main/test/iSACTest.h79
-rw-r--r--webrtc/modules/audio_coding/main/test/initial_delay_unittest.cc175
-rw-r--r--webrtc/modules/audio_coding/main/test/insert_packet_with_timing.cc311
-rw-r--r--webrtc/modules/audio_coding/main/test/opus_test.cc381
-rw-r--r--webrtc/modules/audio_coding/main/test/opus_test.h57
-rw-r--r--webrtc/modules/audio_coding/main/test/target_delay_unittest.cc223
-rw-r--r--webrtc/modules/audio_coding/main/test/utility.cc303
-rw-r--r--webrtc/modules/audio_coding/main/test/utility.h139
-rw-r--r--webrtc/modules/audio_coding/neteq/audio_decoder_impl.cc12
-rw-r--r--webrtc/modules/audio_coding/neteq/audio_decoder_impl.h6
-rw-r--r--webrtc/modules/audio_coding/neteq/audio_decoder_unittest.cc37
-rw-r--r--webrtc/modules/audio_coding/neteq/comfort_noise.cc2
-rw-r--r--webrtc/modules/audio_coding/neteq/decision_logic.cc3
-rw-r--r--webrtc/modules/audio_coding/neteq/decision_logic_normal.cc2
-rw-r--r--webrtc/modules/audio_coding/neteq/decoder_database.cc14
-rw-r--r--webrtc/modules/audio_coding/neteq/decoder_database.h44
-rw-r--r--webrtc/modules/audio_coding/neteq/decoder_database_unittest.cc56
-rw-r--r--webrtc/modules/audio_coding/neteq/delay_manager.cc2
-rw-r--r--webrtc/modules/audio_coding/neteq/expand.cc2
-rw-r--r--webrtc/modules/audio_coding/neteq/include/neteq.h27
-rw-r--r--webrtc/modules/audio_coding/neteq/mock/mock_audio_decoder.h5
-rw-r--r--webrtc/modules/audio_coding/neteq/mock/mock_decoder_database.h12
-rw-r--r--webrtc/modules/audio_coding/neteq/mock/mock_external_decoder_pcm16b.h10
-rw-r--r--webrtc/modules/audio_coding/neteq/nack.cc2
-rw-r--r--webrtc/modules/audio_coding/neteq/nack.h2
-rw-r--r--webrtc/modules/audio_coding/neteq/nack_unittest.cc2
-rw-r--r--webrtc/modules/audio_coding/neteq/neteq.cc2
-rw-r--r--webrtc/modules/audio_coding/neteq/neteq_external_decoder_unittest.cc44
-rw-r--r--webrtc/modules/audio_coding/neteq/neteq_impl.cc100
-rw-r--r--webrtc/modules/audio_coding/neteq/neteq_impl.h24
-rw-r--r--webrtc/modules/audio_coding/neteq/neteq_impl_unittest.cc237
-rw-r--r--webrtc/modules/audio_coding/neteq/neteq_network_stats_unittest.cc3
-rw-r--r--webrtc/modules/audio_coding/neteq/neteq_stereo_unittest.cc52
-rw-r--r--webrtc/modules/audio_coding/neteq/neteq_tests.gypi17
-rw-r--r--webrtc/modules/audio_coding/neteq/neteq_unittest.cc470
-rw-r--r--webrtc/modules/audio_coding/neteq/neteq_unittest.proto29
-rw-r--r--webrtc/modules/audio_coding/neteq/normal.cc2
-rw-r--r--webrtc/modules/audio_coding/neteq/packet.h2
-rw-r--r--webrtc/modules/audio_coding/neteq/payload_splitter_unittest.cc14
-rw-r--r--webrtc/modules/audio_coding/neteq/rtcp.cc2
-rw-r--r--webrtc/modules/audio_coding/neteq/statistics_calculator.cc8
-rw-r--r--webrtc/modules/audio_coding/neteq/test/NETEQTEST_RTPpacket.h2
-rw-r--r--webrtc/modules/audio_coding/neteq/test/PayloadTypes.h2
-rw-r--r--webrtc/modules/audio_coding/neteq/test/RTPencode.cc63
-rw-r--r--webrtc/modules/audio_coding/neteq/test/neteq_ilbc_quality_test.cc10
-rw-r--r--webrtc/modules/audio_coding/neteq/test/neteq_isac_quality_test.cc2
-rw-r--r--webrtc/modules/audio_coding/neteq/test/neteq_opus_quality_test.cc2
-rw-r--r--webrtc/modules/audio_coding/neteq/test/neteq_pcmu_quality_test.cc10
-rw-r--r--webrtc/modules/audio_coding/neteq/timestamp_scaler.cc14
-rw-r--r--webrtc/modules/audio_coding/neteq/tools/audio_loop.cc7
-rw-r--r--webrtc/modules/audio_coding/neteq/tools/audio_loop.h8
-rw-r--r--webrtc/modules/audio_coding/neteq/tools/audio_sink.h2
-rw-r--r--webrtc/modules/audio_coding/neteq/tools/constant_pcm_packet_source.cc2
-rw-r--r--webrtc/modules/audio_coding/neteq/tools/neteq_external_decoder_test.cc27
-rw-r--r--webrtc/modules/audio_coding/neteq/tools/neteq_external_decoder_test.h11
-rw-r--r--webrtc/modules/audio_coding/neteq/tools/neteq_performance_test.cc32
-rw-r--r--webrtc/modules/audio_coding/neteq/tools/neteq_quality_test.cc14
-rw-r--r--webrtc/modules/audio_coding/neteq/tools/neteq_quality_test.h2
-rw-r--r--webrtc/modules/audio_coding/neteq/tools/neteq_rtpplay.cc72
-rw-r--r--webrtc/modules/audio_coding/neteq/tools/packet.cc4
-rw-r--r--webrtc/modules/audio_coding/neteq/tools/rtc_event_log_source.cc2
-rw-r--r--webrtc/modules/audio_coding/neteq/tools/rtc_event_log_source.h2
-rw-r--r--webrtc/modules/audio_coding/neteq/tools/rtp_file_source.cc14
-rw-r--r--webrtc/modules/audio_coding/neteq/tools/rtp_file_source.h6
-rw-r--r--webrtc/modules/audio_coding/neteq/tools/rtp_generator.h2
-rw-r--r--webrtc/modules/audio_coding/test/ACMTest.h21
-rw-r--r--webrtc/modules/audio_coding/test/APITest.cc1104
-rw-r--r--webrtc/modules/audio_coding/test/APITest.h163
-rw-r--r--webrtc/modules/audio_coding/test/Channel.cc424
-rw-r--r--webrtc/modules/audio_coding/test/Channel.h130
-rw-r--r--webrtc/modules/audio_coding/test/EncodeDecodeTest.cc355
-rw-r--r--webrtc/modules/audio_coding/test/EncodeDecodeTest.h123
-rw-r--r--webrtc/modules/audio_coding/test/PCMFile.cc221
-rw-r--r--webrtc/modules/audio_coding/test/PCMFile.h80
-rw-r--r--webrtc/modules/audio_coding/test/PacketLossTest.cc167
-rw-r--r--webrtc/modules/audio_coding/test/PacketLossTest.h67
-rw-r--r--webrtc/modules/audio_coding/test/RTPFile.cc (renamed from webrtc/modules/audio_coding/main/test/RTPFile.cc)0
-rw-r--r--webrtc/modules/audio_coding/test/RTPFile.h126
-rw-r--r--webrtc/modules/audio_coding/test/SpatialAudio.cc196
-rw-r--r--webrtc/modules/audio_coding/test/SpatialAudio.h47
-rw-r--r--webrtc/modules/audio_coding/test/TestAllCodecs.cc489
-rw-r--r--webrtc/modules/audio_coding/test/TestAllCodecs.h84
-rw-r--r--webrtc/modules/audio_coding/test/TestRedFec.cc480
-rw-r--r--webrtc/modules/audio_coding/test/TestRedFec.h51
-rw-r--r--webrtc/modules/audio_coding/test/TestStereo.cc844
-rw-r--r--webrtc/modules/audio_coding/test/TestStereo.h117
-rw-r--r--webrtc/modules/audio_coding/test/TestVADDTX.cc276
-rw-r--r--webrtc/modules/audio_coding/test/TestVADDTX.h102
-rw-r--r--webrtc/modules/audio_coding/test/Tester.cc181
-rw-r--r--webrtc/modules/audio_coding/test/TimedTrace.cc (renamed from webrtc/modules/audio_coding/main/test/TimedTrace.cc)0
-rw-r--r--webrtc/modules/audio_coding/test/TimedTrace.h36
-rw-r--r--webrtc/modules/audio_coding/test/TwoWayCommunication.cc299
-rw-r--r--webrtc/modules/audio_coding/test/TwoWayCommunication.h60
-rw-r--r--webrtc/modules/audio_coding/test/delay_test.cc265
-rw-r--r--webrtc/modules/audio_coding/test/iSACTest.cc343
-rw-r--r--webrtc/modules/audio_coding/test/iSACTest.h79
-rw-r--r--webrtc/modules/audio_coding/test/insert_packet_with_timing.cc307
-rw-r--r--webrtc/modules/audio_coding/test/opus_test.cc383
-rw-r--r--webrtc/modules/audio_coding/test/opus_test.h60
-rw-r--r--webrtc/modules/audio_coding/test/target_delay_unittest.cc249
-rw-r--r--webrtc/modules/audio_coding/test/utility.cc303
-rw-r--r--webrtc/modules/audio_coding/test/utility.h139
-rw-r--r--webrtc/modules/audio_conference_mixer/BUILD.gn8
-rw-r--r--webrtc/modules/audio_conference_mixer/OWNERS7
-rw-r--r--webrtc/modules/audio_conference_mixer/audio_conference_mixer.gypi4
-rw-r--r--webrtc/modules/audio_conference_mixer/include/audio_conference_mixer.h77
-rw-r--r--webrtc/modules/audio_conference_mixer/include/audio_conference_mixer_defines.h60
-rw-r--r--webrtc/modules/audio_conference_mixer/interface/audio_conference_mixer.h77
-rw-r--r--webrtc/modules/audio_conference_mixer/interface/audio_conference_mixer_defines.h60
-rw-r--r--webrtc/modules/audio_conference_mixer/source/audio_conference_mixer_impl.cc10
-rw-r--r--webrtc/modules/audio_conference_mixer/source/audio_conference_mixer_impl.h4
-rw-r--r--webrtc/modules/audio_conference_mixer/source/audio_frame_manipulator.cc2
-rw-r--r--webrtc/modules/audio_conference_mixer/test/audio_conference_mixer_unittest.cc4
-rw-r--r--webrtc/modules/audio_device/BUILD.gn95
-rw-r--r--webrtc/modules/audio_device/OWNERS2
-rw-r--r--webrtc/modules/audio_device/android/audio_device_unittest.cc14
-rw-r--r--webrtc/modules/audio_device/android/audio_manager.cc31
-rw-r--r--webrtc/modules/audio_device/android/audio_manager.h17
-rw-r--r--webrtc/modules/audio_device/android/audio_manager_unittest.cc8
-rw-r--r--webrtc/modules/audio_device/android/audio_record_jni.cc23
-rw-r--r--webrtc/modules/audio_device/android/audio_record_jni.h6
-rw-r--r--webrtc/modules/audio_device/android/audio_track_jni.cc18
-rw-r--r--webrtc/modules/audio_device/android/audio_track_jni.h4
-rw-r--r--webrtc/modules/audio_device/android/build_info.cc2
-rw-r--r--webrtc/modules/audio_device/android/build_info.h2
-rw-r--r--webrtc/modules/audio_device/android/ensure_initialized.cc3
-rw-r--r--webrtc/modules/audio_device/android/java/src/org/webrtc/voiceengine/WebRtcAudioEffects.java28
-rw-r--r--webrtc/modules/audio_device/android/java/src/org/webrtc/voiceengine/WebRtcAudioManager.java48
-rw-r--r--webrtc/modules/audio_device/android/java/src/org/webrtc/voiceengine/WebRtcAudioRecord.java4
-rw-r--r--webrtc/modules/audio_device/android/java/src/org/webrtc/voiceengine/WebRtcAudioTrack.java53
-rw-r--r--webrtc/modules/audio_device/android/java/src/org/webrtc/voiceengine/WebRtcAudioUtils.java37
-rw-r--r--webrtc/modules/audio_device/android/opensles_player.cc37
-rw-r--r--webrtc/modules/audio_device/android/opensles_player.h23
-rw-r--r--webrtc/modules/audio_device/audio_device.gypi118
-rw-r--r--webrtc/modules/audio_device/audio_device_buffer.cc10
-rw-r--r--webrtc/modules/audio_device/audio_device_buffer.h12
-rw-r--r--webrtc/modules/audio_device/dummy/file_audio_device.cc66
-rw-r--r--webrtc/modules/audio_device/dummy/file_audio_device.h10
-rw-r--r--webrtc/modules/audio_device/include/audio_device.h2
-rw-r--r--webrtc/modules/audio_device/include/audio_device_defines.h26
-rw-r--r--webrtc/modules/audio_device/ios/audio_device_ios.h5
-rw-r--r--webrtc/modules/audio_device/ios/audio_device_ios.mm350
-rw-r--r--webrtc/modules/audio_device/ios/audio_device_unittest_ios.cc72
-rw-r--r--webrtc/modules/audio_device/linux/audio_device_alsa_linux.cc40
-rw-r--r--webrtc/modules/audio_device/linux/audio_device_alsa_linux.h8
-rw-r--r--webrtc/modules/audio_device/linux/audio_device_pulse_linux.cc36
-rw-r--r--webrtc/modules/audio_device/linux/audio_device_pulse_linux.h9
-rw-r--r--webrtc/modules/audio_device/mac/audio_device_mac.cc4841
-rw-r--r--webrtc/modules/audio_device/mac/audio_device_mac.h587
-rw-r--r--webrtc/modules/audio_device/mac/audio_mixer_manager_mac.cc1907
-rw-r--r--webrtc/modules/audio_device/mac/audio_mixer_manager_mac.h99
-rw-r--r--webrtc/modules/audio_device/main/interface/audio_device.h16
-rw-r--r--webrtc/modules/audio_device/main/source/OWNERS5
-rw-r--r--webrtc/modules/audio_device/main/source/audio_device.gypi14
-rw-r--r--webrtc/modules/audio_device/test/audio_device_test_api.cc40
-rw-r--r--webrtc/modules/audio_device/test/audio_device_test_defines.h2
-rw-r--r--webrtc/modules/audio_device/test/func_test_manager.cc34
-rw-r--r--webrtc/modules/audio_device/test/func_test_manager.h28
-rw-r--r--webrtc/modules/audio_device/win/audio_device_wave_win.cc30
-rw-r--r--webrtc/modules/audio_device/win/audio_device_wave_win.h5
-rw-r--r--webrtc/modules/audio_processing/BUILD.gn8
-rw-r--r--webrtc/modules/audio_processing/OWNERS1
-rw-r--r--webrtc/modules/audio_processing/aec/aec_core.c625
-rw-r--r--webrtc/modules/audio_processing/aec/aec_core_internal.h47
-rw-r--r--webrtc/modules/audio_processing/aec/aec_core_mips.c89
-rw-r--r--webrtc/modules/audio_processing/aec/aec_core_neon.c184
-rw-r--r--webrtc/modules/audio_processing/aec/aec_core_sse2.c193
-rw-r--r--webrtc/modules/audio_processing/aec/echo_cancellation.c139
-rw-r--r--webrtc/modules/audio_processing/aec/echo_cancellation.h251
-rw-r--r--webrtc/modules/audio_processing/aec/echo_cancellation_internal.h2
-rw-r--r--webrtc/modules/audio_processing/aec/echo_cancellation_unittest.cc2
-rw-r--r--webrtc/modules/audio_processing/aec/include/echo_cancellation.h245
-rw-r--r--webrtc/modules/audio_processing/aec/system_delay_unittest.cc3
-rw-r--r--webrtc/modules/audio_processing/aecm/aecm_core.c2
-rw-r--r--webrtc/modules/audio_processing/aecm/aecm_core_c.c2
-rw-r--r--webrtc/modules/audio_processing/aecm/aecm_core_mips.c2
-rw-r--r--webrtc/modules/audio_processing/aecm/echo_control_mobile.c160
-rw-r--r--webrtc/modules/audio_processing/aecm/echo_control_mobile.h209
-rw-r--r--webrtc/modules/audio_processing/aecm/include/echo_control_mobile.h218
-rw-r--r--webrtc/modules/audio_processing/agc/agc.cc2
-rw-r--r--webrtc/modules/audio_processing/agc/agc_manager_direct.cc15
-rw-r--r--webrtc/modules/audio_processing/agc/agc_unittest.cc2
-rw-r--r--webrtc/modules/audio_processing/agc/histogram.cc2
-rw-r--r--webrtc/modules/audio_processing/agc/legacy/analog_agc.c47
-rw-r--r--webrtc/modules/audio_processing/agc/legacy/gain_control.h14
-rw-r--r--webrtc/modules/audio_processing/agc/mock_agc.h2
-rw-r--r--webrtc/modules/audio_processing/audio_buffer.cc43
-rw-r--r--webrtc/modules/audio_processing/audio_buffer.h24
-rw-r--r--webrtc/modules/audio_processing/audio_processing.gypi8
-rw-r--r--webrtc/modules/audio_processing/audio_processing_impl.cc1099
-rw-r--r--webrtc/modules/audio_processing/audio_processing_impl.h366
-rw-r--r--webrtc/modules/audio_processing/audio_processing_impl_locking_unittest.cc1133
-rw-r--r--webrtc/modules/audio_processing/audio_processing_impl_unittest.cc2
-rw-r--r--webrtc/modules/audio_processing/audio_processing_performance_unittest.cc724
-rw-r--r--webrtc/modules/audio_processing/audio_processing_tests.gypi6
-rw-r--r--webrtc/modules/audio_processing/beamformer/array_util.cc25
-rw-r--r--webrtc/modules/audio_processing/beamformer/array_util.h9
-rw-r--r--webrtc/modules/audio_processing/beamformer/complex_matrix.h10
-rw-r--r--webrtc/modules/audio_processing/beamformer/covariance_matrix_generator.cc14
-rw-r--r--webrtc/modules/audio_processing/beamformer/matrix.h48
-rw-r--r--webrtc/modules/audio_processing/beamformer/matrix_test_helpers.h16
-rw-r--r--webrtc/modules/audio_processing/beamformer/nonlinear_beamformer.cc42
-rw-r--r--webrtc/modules/audio_processing/beamformer/nonlinear_beamformer.h8
-rw-r--r--webrtc/modules/audio_processing/beamformer/nonlinear_beamformer_test.cc5
-rw-r--r--webrtc/modules/audio_processing/common.h4
-rw-r--r--webrtc/modules/audio_processing/echo_cancellation_impl.cc269
-rw-r--r--webrtc/modules/audio_processing/echo_cancellation_impl.h50
-rw-r--r--webrtc/modules/audio_processing/echo_cancellation_impl_unittest.cc1
-rw-r--r--webrtc/modules/audio_processing/echo_control_mobile_impl.cc245
-rw-r--r--webrtc/modules/audio_processing/echo_control_mobile_impl.h40
-rw-r--r--webrtc/modules/audio_processing/gain_control_impl.cc218
-rw-r--r--webrtc/modules/audio_processing/gain_control_impl.h51
-rw-r--r--webrtc/modules/audio_processing/high_pass_filter_impl.cc222
-rw-r--r--webrtc/modules/audio_processing/high_pass_filter_impl.h35
-rw-r--r--webrtc/modules/audio_processing/include/audio_processing.h25
-rw-r--r--webrtc/modules/audio_processing/include/mock_audio_processing.h6
-rw-r--r--webrtc/modules/audio_processing/intelligibility/intelligibility_enhancer.cc12
-rw-r--r--webrtc/modules/audio_processing/intelligibility/intelligibility_enhancer.h16
-rw-r--r--webrtc/modules/audio_processing/intelligibility/test/intelligibility_proc.cc2
-rw-r--r--webrtc/modules/audio_processing/level_estimator_impl.cc75
-rw-r--r--webrtc/modules/audio_processing/level_estimator_impl.h39
-rw-r--r--webrtc/modules/audio_processing/logging/aec_logging.h22
-rw-r--r--webrtc/modules/audio_processing/noise_suppression_impl.cc220
-rw-r--r--webrtc/modules/audio_processing/noise_suppression_impl.h47
-rw-r--r--webrtc/modules/audio_processing/ns/include/noise_suppression.h116
-rw-r--r--webrtc/modules/audio_processing/ns/include/noise_suppression_x.h88
-rw-r--r--webrtc/modules/audio_processing/ns/noise_suppression.c2
-rw-r--r--webrtc/modules/audio_processing/ns/noise_suppression.h116
-rw-r--r--webrtc/modules/audio_processing/ns/noise_suppression_x.c2
-rw-r--r--webrtc/modules/audio_processing/ns/noise_suppression_x.h88
-rw-r--r--webrtc/modules/audio_processing/ns/ns_core.c2
-rw-r--r--webrtc/modules/audio_processing/ns/nsx_core.c2
-rw-r--r--webrtc/modules/audio_processing/ns/nsx_core_c.c2
-rw-r--r--webrtc/modules/audio_processing/ns/nsx_core_mips.c2
-rw-r--r--webrtc/modules/audio_processing/processing_component.cc14
-rw-r--r--webrtc/modules/audio_processing/processing_component.h24
-rw-r--r--webrtc/modules/audio_processing/splitting_filter.cc16
-rw-r--r--webrtc/modules/audio_processing/splitting_filter.h2
-rw-r--r--webrtc/modules/audio_processing/test/audio_file_processor.cc180
-rw-r--r--webrtc/modules/audio_processing/test/audio_file_processor.h139
-rw-r--r--webrtc/modules/audio_processing/test/audio_processing_unittest.cc237
-rw-r--r--webrtc/modules/audio_processing/test/audioproc_float.cc217
-rw-r--r--webrtc/modules/audio_processing/test/debug_dump_test.cc612
-rw-r--r--webrtc/modules/audio_processing/test/process_test.cc53
-rw-r--r--webrtc/modules/audio_processing/test/test_utils.cc63
-rw-r--r--webrtc/modules/audio_processing/test/test_utils.h48
-rw-r--r--webrtc/modules/audio_processing/test/unpack.cc37
-rw-r--r--webrtc/modules/audio_processing/transient/file_utils_unittest.cc96
-rw-r--r--webrtc/modules/audio_processing/transient/transient_detector_unittest.cc9
-rw-r--r--webrtc/modules/audio_processing/transient/transient_suppression_test.cc2
-rw-r--r--webrtc/modules/audio_processing/transient/wpd_tree_unittest.cc7
-rw-r--r--webrtc/modules/audio_processing/typing_detection.h2
-rw-r--r--webrtc/modules/audio_processing/vad/pitch_based_vad.cc2
-rw-r--r--webrtc/modules/audio_processing/vad/standalone_vad.cc4
-rw-r--r--webrtc/modules/audio_processing/vad/standalone_vad_unittest.cc9
-rw-r--r--webrtc/modules/audio_processing/vad/vad_audio_proc.cc2
-rw-r--r--webrtc/modules/audio_processing/vad/vad_audio_proc_unittest.cc2
-rw-r--r--webrtc/modules/audio_processing/vad/voice_activity_detector.cc2
-rw-r--r--webrtc/modules/audio_processing/voice_detection_impl.cc184
-rw-r--r--webrtc/modules/audio_processing/voice_detection_impl.h51
-rw-r--r--webrtc/modules/bitrate_controller/BUILD.gn1
-rw-r--r--webrtc/modules/bitrate_controller/bitrate_allocator.cc190
-rw-r--r--webrtc/modules/bitrate_controller/bitrate_allocator_unittest.cc212
-rw-r--r--webrtc/modules/bitrate_controller/bitrate_controller.gypi2
-rw-r--r--webrtc/modules/bitrate_controller/bitrate_controller_impl.cc7
-rw-r--r--webrtc/modules/bitrate_controller/bitrate_controller_impl.h2
-rw-r--r--webrtc/modules/bitrate_controller/bitrate_controller_unittest.cc2
-rw-r--r--webrtc/modules/bitrate_controller/include/bitrate_allocator.h99
-rw-r--r--webrtc/modules/bitrate_controller/include/bitrate_controller.h7
-rw-r--r--webrtc/modules/bitrate_controller/include/mock/mock_bitrate_controller.h30
-rw-r--r--webrtc/modules/bitrate_controller/send_side_bandwidth_estimation.cc44
-rw-r--r--webrtc/modules/bitrate_controller/send_side_bandwidth_estimation.h8
-rw-r--r--webrtc/modules/desktop_capture/differ_block.cc4
-rw-r--r--webrtc/modules/desktop_capture/screen_capturer_win.cc4
-rw-r--r--webrtc/modules/desktop_capture/win/screen_capturer_win_magnifier.cc13
-rw-r--r--webrtc/modules/desktop_capture/window_capturer_win.cc11
-rw-r--r--webrtc/modules/include/module.h81
-rw-r--r--webrtc/modules/include/module_common_types.h809
-rw-r--r--webrtc/modules/interface/module.h81
-rw-r--r--webrtc/modules/interface/module_common_types.h810
-rw-r--r--webrtc/modules/media_file/BUILD.gn12
-rw-r--r--webrtc/modules/media_file/OWNERS15
-rw-r--r--webrtc/modules/media_file/interface/media_file.h180
-rw-r--r--webrtc/modules/media_file/interface/media_file_defines.h51
-rw-r--r--webrtc/modules/media_file/media_file.gypi12
-rw-r--r--webrtc/modules/media_file/media_file.h180
-rw-r--r--webrtc/modules/media_file/media_file_defines.h51
-rw-r--r--webrtc/modules/media_file/media_file_impl.cc1137
-rw-r--r--webrtc/modules/media_file/media_file_impl.h148
-rw-r--r--webrtc/modules/media_file/media_file_unittest.cc106
-rw-r--r--webrtc/modules/media_file/media_file_utility.cc1559
-rw-r--r--webrtc/modules/media_file/media_file_utility.h284
-rw-r--r--webrtc/modules/media_file/source/OWNERS5
-rw-r--r--webrtc/modules/media_file/source/media_file_impl.cc1137
-rw-r--r--webrtc/modules/media_file/source/media_file_impl.h148
-rw-r--r--webrtc/modules/media_file/source/media_file_unittest.cc96
-rw-r--r--webrtc/modules/media_file/source/media_file_utility.cc1656
-rw-r--r--webrtc/modules/media_file/source/media_file_utility.h284
-rw-r--r--webrtc/modules/module_common_types_unittest.cc2
-rw-r--r--webrtc/modules/modules.gyp756
-rw-r--r--webrtc/modules/modules_unittests.isolate3
-rw-r--r--webrtc/modules/pacing/BUILD.gn4
-rw-r--r--webrtc/modules/pacing/bitrate_prober.cc4
-rw-r--r--webrtc/modules/pacing/include/mock/mock_paced_sender.h38
-rw-r--r--webrtc/modules/pacing/include/paced_sender.h153
-rw-r--r--webrtc/modules/pacing/include/packet_router.h66
-rw-r--r--webrtc/modules/pacing/mock/mock_paced_sender.h38
-rw-r--r--webrtc/modules/pacing/paced_sender.cc141
-rw-r--r--webrtc/modules/pacing/paced_sender.h164
-rw-r--r--webrtc/modules/pacing/paced_sender_unittest.cc100
-rw-r--r--webrtc/modules/pacing/pacing.gypi4
-rw-r--r--webrtc/modules/pacing/packet_router.cc6
-rw-r--r--webrtc/modules/pacing/packet_router.h66
-rw-r--r--webrtc/modules/pacing/packet_router_unittest.cc4
-rw-r--r--webrtc/modules/remote_bitrate_estimator/aimd_rate_control.cc19
-rw-r--r--webrtc/modules/remote_bitrate_estimator/aimd_rate_control.h2
-rw-r--r--webrtc/modules/remote_bitrate_estimator/bwe_simulations.cc26
-rw-r--r--webrtc/modules/remote_bitrate_estimator/include/bwe_defines.h57
-rw-r--r--webrtc/modules/remote_bitrate_estimator/include/mock/mock_remote_bitrate_observer.h6
-rw-r--r--webrtc/modules/remote_bitrate_estimator/include/remote_bitrate_estimator.h6
-rw-r--r--webrtc/modules/remote_bitrate_estimator/include/send_time_history.h6
-rw-r--r--webrtc/modules/remote_bitrate_estimator/inter_arrival.cc5
-rw-r--r--webrtc/modules/remote_bitrate_estimator/overuse_detector.cc6
-rw-r--r--webrtc/modules/remote_bitrate_estimator/overuse_detector.h2
-rw-r--r--webrtc/modules/remote_bitrate_estimator/overuse_detector_unittest.cc147
-rw-r--r--webrtc/modules/remote_bitrate_estimator/overuse_estimator.cc5
-rw-r--r--webrtc/modules/remote_bitrate_estimator/remote_bitrate_estimator_abs_send_time.cc7
-rw-r--r--webrtc/modules/remote_bitrate_estimator/remote_bitrate_estimator_abs_send_time_unittest.cc1
-rw-r--r--webrtc/modules/remote_bitrate_estimator/remote_bitrate_estimator_single_stream.cc33
-rw-r--r--webrtc/modules/remote_bitrate_estimator/remote_bitrate_estimator_single_stream_unittest.cc1
-rw-r--r--webrtc/modules/remote_bitrate_estimator/remote_bitrate_estimator_unittest_helper.cc21
-rw-r--r--webrtc/modules/remote_bitrate_estimator/remote_bitrate_estimator_unittest_helper.h1
-rw-r--r--webrtc/modules/remote_bitrate_estimator/remote_bitrate_estimators_test.cc34
-rw-r--r--webrtc/modules/remote_bitrate_estimator/remote_estimator_proxy.cc6
-rw-r--r--webrtc/modules/remote_bitrate_estimator/remote_estimator_proxy.h2
-rw-r--r--webrtc/modules/remote_bitrate_estimator/remote_estimator_proxy_unittest.cc2
-rw-r--r--webrtc/modules/remote_bitrate_estimator/test/bwe.h3
-rw-r--r--webrtc/modules/remote_bitrate_estimator/test/bwe_test.cc17
-rw-r--r--webrtc/modules/remote_bitrate_estimator/test/bwe_test_baselinefile.h2
-rw-r--r--webrtc/modules/remote_bitrate_estimator/test/bwe_test_fileutils.h2
-rw-r--r--webrtc/modules/remote_bitrate_estimator/test/bwe_test_framework.cc13
-rw-r--r--webrtc/modules/remote_bitrate_estimator/test/bwe_test_framework.h19
-rw-r--r--webrtc/modules/remote_bitrate_estimator/test/bwe_test_framework_unittest.cc62
-rw-r--r--webrtc/modules/remote_bitrate_estimator/test/bwe_test_logging.cc28
-rw-r--r--webrtc/modules/remote_bitrate_estimator/test/bwe_test_logging.h28
-rw-r--r--webrtc/modules/remote_bitrate_estimator/test/bwe_unittest.cc3
-rw-r--r--webrtc/modules/remote_bitrate_estimator/test/estimators/nada.cc5
-rw-r--r--webrtc/modules/remote_bitrate_estimator/test/estimators/nada.h2
-rw-r--r--webrtc/modules/remote_bitrate_estimator/test/estimators/nada_unittest.cc7
-rw-r--r--webrtc/modules/remote_bitrate_estimator/test/estimators/remb.cc2
-rw-r--r--webrtc/modules/remote_bitrate_estimator/test/estimators/tcp.cc2
-rw-r--r--webrtc/modules/remote_bitrate_estimator/test/metric_recorder.cc4
-rw-r--r--webrtc/modules/remote_bitrate_estimator/test/packet.h2
-rw-r--r--webrtc/modules/remote_bitrate_estimator/test/packet_receiver.cc4
-rw-r--r--webrtc/modules/remote_bitrate_estimator/test/packet_sender.cc2
-rw-r--r--webrtc/modules/remote_bitrate_estimator/test/packet_sender.h5
-rw-r--r--webrtc/modules/remote_bitrate_estimator/tools/bwe_rtp.cc8
-rw-r--r--webrtc/modules/remote_bitrate_estimator/tools/bwe_rtp_play.cc4
-rw-r--r--webrtc/modules/remote_bitrate_estimator/tools/rtp_to_text.cc4
-rw-r--r--webrtc/modules/remote_bitrate_estimator/transport_feedback_adapter.cc5
-rw-r--r--webrtc/modules/remote_bitrate_estimator/transport_feedback_adapter.h4
-rw-r--r--webrtc/modules/remote_bitrate_estimator/transport_feedback_adapter_unittest.cc4
-rw-r--r--webrtc/modules/rtp_rtcp/BUILD.gn48
-rw-r--r--webrtc/modules/rtp_rtcp/OWNERS17
-rw-r--r--webrtc/modules/rtp_rtcp/include/fec_receiver.h46
-rw-r--r--webrtc/modules/rtp_rtcp/include/receive_statistics.h102
-rw-r--r--webrtc/modules/rtp_rtcp/include/remote_ntp_time_estimator.h51
-rw-r--r--webrtc/modules/rtp_rtcp/include/rtp_cvo.h54
-rw-r--r--webrtc/modules/rtp_rtcp/include/rtp_header_parser.h44
-rw-r--r--webrtc/modules/rtp_rtcp/include/rtp_payload_registry.h203
-rw-r--r--webrtc/modules/rtp_rtcp/include/rtp_receiver.h103
-rw-r--r--webrtc/modules/rtp_rtcp/include/rtp_rtcp.h653
-rw-r--r--webrtc/modules/rtp_rtcp/include/rtp_rtcp_defines.h416
-rw-r--r--webrtc/modules/rtp_rtcp/interface/fec_receiver.h46
-rw-r--r--webrtc/modules/rtp_rtcp/interface/receive_statistics.h102
-rw-r--r--webrtc/modules/rtp_rtcp/interface/remote_ntp_time_estimator.h51
-rw-r--r--webrtc/modules/rtp_rtcp/interface/rtp_cvo.h54
-rw-r--r--webrtc/modules/rtp_rtcp/interface/rtp_header_parser.h44
-rw-r--r--webrtc/modules/rtp_rtcp/interface/rtp_payload_registry.h193
-rw-r--r--webrtc/modules/rtp_rtcp/interface/rtp_receiver.h103
-rw-r--r--webrtc/modules/rtp_rtcp/interface/rtp_rtcp.h641
-rw-r--r--webrtc/modules/rtp_rtcp/interface/rtp_rtcp_defines.h440
-rw-r--r--webrtc/modules/rtp_rtcp/mocks/mock_rtp_rtcp.h33
-rw-r--r--webrtc/modules/rtp_rtcp/rtp_rtcp.gypi48
-rw-r--r--webrtc/modules/rtp_rtcp/source/CPPLINT.cfg6
-rw-r--r--webrtc/modules/rtp_rtcp/source/byte_io.h15
-rw-r--r--webrtc/modules/rtp_rtcp/source/dtmf_queue.cc6
-rw-r--r--webrtc/modules/rtp_rtcp/source/fec_private_tables_bursty.h6
-rw-r--r--webrtc/modules/rtp_rtcp/source/fec_private_tables_random.h12
-rw-r--r--webrtc/modules/rtp_rtcp/source/fec_receiver_impl.h6
-rw-r--r--webrtc/modules/rtp_rtcp/source/fec_receiver_unittest.cc4
-rw-r--r--webrtc/modules/rtp_rtcp/source/fec_test_helper.h4
-rw-r--r--webrtc/modules/rtp_rtcp/source/forward_error_correction.cc178
-rw-r--r--webrtc/modules/rtp_rtcp/source/forward_error_correction.h2
-rw-r--r--webrtc/modules/rtp_rtcp/source/forward_error_correction_internal.cc106
-rw-r--r--webrtc/modules/rtp_rtcp/source/h264_bitstream_parser.h2
-rw-r--r--webrtc/modules/rtp_rtcp/source/h264_sps_parser_unittest.cc11
-rw-r--r--webrtc/modules/rtp_rtcp/source/mock/mock_rtp_payload_strategy.h12
-rw-r--r--webrtc/modules/rtp_rtcp/source/nack_rtx_unittest.cc68
-rw-r--r--webrtc/modules/rtp_rtcp/source/producer_fec_unittest.cc49
-rw-r--r--webrtc/modules/rtp_rtcp/source/receive_statistics_impl.cc29
-rw-r--r--webrtc/modules/rtp_rtcp/source/receive_statistics_impl.h11
-rw-r--r--webrtc/modules/rtp_rtcp/source/receive_statistics_unittest.cc2
-rw-r--r--webrtc/modules/rtp_rtcp/source/remote_ntp_time_estimator.cc2
-rw-r--r--webrtc/modules/rtp_rtcp/source/remote_ntp_time_estimator_unittest.cc2
-rw-r--r--webrtc/modules/rtp_rtcp/source/rtcp_format_remb_unittest.cc17
-rw-r--r--webrtc/modules/rtp_rtcp/source/rtcp_packet.cc647
-rw-r--r--webrtc/modules/rtp_rtcp/source/rtcp_packet.h692
-rw-r--r--webrtc/modules/rtp_rtcp/source/rtcp_packet/app.cc79
-rw-r--r--webrtc/modules/rtp_rtcp/source/rtcp_packet/app.h66
-rw-r--r--webrtc/modules/rtp_rtcp/source/rtcp_packet/app_unittest.cc81
-rw-r--r--webrtc/modules/rtp_rtcp/source/rtcp_packet/bye.cc133
-rw-r--r--webrtc/modules/rtp_rtcp/source/rtcp_packet/bye.h63
-rw-r--r--webrtc/modules/rtp_rtcp/source/rtcp_packet/bye_unittest.cc173
-rw-r--r--webrtc/modules/rtp_rtcp/source/rtcp_packet/compound_packet.cc28
-rw-r--r--webrtc/modules/rtp_rtcp/source/rtcp_packet/compound_packet.h41
-rw-r--r--webrtc/modules/rtp_rtcp/source/rtcp_packet/compound_packet_unittest.cc157
-rw-r--r--webrtc/modules/rtp_rtcp/source/rtcp_packet/dlrr.cc100
-rw-r--r--webrtc/modules/rtp_rtcp/source/rtcp_packet/dlrr.h63
-rw-r--r--webrtc/modules/rtp_rtcp/source/rtcp_packet/dlrr_unittest.cc102
-rw-r--r--webrtc/modules/rtp_rtcp/source/rtcp_packet/extended_jitter_report.cc95
-rw-r--r--webrtc/modules/rtp_rtcp/source/rtcp_packet/extended_jitter_report.h63
-rw-r--r--webrtc/modules/rtp_rtcp/source/rtcp_packet/extended_jitter_report_unittest.cc98
-rw-r--r--webrtc/modules/rtp_rtcp/source/rtcp_packet/nack.cc163
-rw-r--r--webrtc/modules/rtp_rtcp/source/rtcp_packet/nack.h63
-rw-r--r--webrtc/modules/rtp_rtcp/source/rtcp_packet/nack_unittest.cc190
-rw-r--r--webrtc/modules/rtp_rtcp/source/rtcp_packet/pli.cc70
-rw-r--r--webrtc/modules/rtp_rtcp/source/rtcp_packet/pli.h49
-rw-r--r--webrtc/modules/rtp_rtcp/source/rtcp_packet/pli_unittest.cc66
-rw-r--r--webrtc/modules/rtp_rtcp/source/rtcp_packet/psfb.cc45
-rw-r--r--webrtc/modules/rtp_rtcp/source/rtcp_packet/psfb.h48
-rw-r--r--webrtc/modules/rtp_rtcp/source/rtcp_packet/receiver_report.cc89
-rw-r--r--webrtc/modules/rtp_rtcp/source/rtcp_packet/receiver_report.h66
-rw-r--r--webrtc/modules/rtp_rtcp/source/rtcp_packet/receiver_report_unittest.cc145
-rw-r--r--webrtc/modules/rtp_rtcp/source/rtcp_packet/report_block.cc89
-rw-r--r--webrtc/modules/rtp_rtcp/source/rtcp_packet/report_block.h67
-rw-r--r--webrtc/modules/rtp_rtcp/source/rtcp_packet/report_block_unittest.cc86
-rw-r--r--webrtc/modules/rtp_rtcp/source/rtcp_packet/rrtr.cc49
-rw-r--r--webrtc/modules/rtp_rtcp/source/rtcp_packet/rrtr.h49
-rw-r--r--webrtc/modules/rtp_rtcp/source/rtcp_packet/rrtr_unittest.cc51
-rw-r--r--webrtc/modules/rtp_rtcp/source/rtcp_packet/rtpfb.cc45
-rw-r--r--webrtc/modules/rtp_rtcp/source/rtcp_packet/rtpfb.h48
-rw-r--r--webrtc/modules/rtp_rtcp/source/rtcp_packet/sli.cc108
-rw-r--r--webrtc/modules/rtp_rtcp/source/rtcp_packet/sli.h81
-rw-r--r--webrtc/modules/rtp_rtcp/source/rtcp_packet/sli_unittest.cc91
-rw-r--r--webrtc/modules/rtp_rtcp/source/rtcp_packet/tmmbn.cc119
-rw-r--r--webrtc/modules/rtp_rtcp/source/rtcp_packet/tmmbn.h60
-rw-r--r--webrtc/modules/rtp_rtcp/source/rtcp_packet/tmmbn_unittest.cc84
-rw-r--r--webrtc/modules/rtp_rtcp/source/rtcp_packet/tmmbr.cc105
-rw-r--r--webrtc/modules/rtp_rtcp/source/rtcp_packet/tmmbr.h64
-rw-r--r--webrtc/modules/rtp_rtcp/source/rtcp_packet/tmmbr_unittest.cc43
-rw-r--r--webrtc/modules/rtp_rtcp/source/rtcp_packet/transport_feedback.h2
-rw-r--r--webrtc/modules/rtp_rtcp/source/rtcp_packet/voip_metric.cc107
-rw-r--r--webrtc/modules/rtp_rtcp/source/rtcp_packet/voip_metric.h53
-rw-r--r--webrtc/modules/rtp_rtcp/source/rtcp_packet/voip_metric_unittest.cc93
-rw-r--r--webrtc/modules/rtp_rtcp/source/rtcp_packet_unittest.cc561
-rw-r--r--webrtc/modules/rtp_rtcp/source/rtcp_receiver.cc13
-rw-r--r--webrtc/modules/rtp_rtcp/source/rtcp_receiver.h6
-rw-r--r--webrtc/modules/rtp_rtcp/source/rtcp_receiver_help.cc3
-rw-r--r--webrtc/modules/rtp_rtcp/source/rtcp_receiver_help.h4
-rw-r--r--webrtc/modules/rtp_rtcp/source/rtcp_receiver_unittest.cc34
-rw-r--r--webrtc/modules/rtp_rtcp/source/rtcp_sender.cc787
-rw-r--r--webrtc/modules/rtp_rtcp/source/rtcp_sender.h484
-rw-r--r--webrtc/modules/rtp_rtcp/source/rtcp_utility.cc5
-rw-r--r--webrtc/modules/rtp_rtcp/source/rtcp_utility.h4
-rw-r--r--webrtc/modules/rtp_rtcp/source/rtp_fec_unittest.cc29
-rw-r--r--webrtc/modules/rtp_rtcp/source/rtp_format.h4
-rw-r--r--webrtc/modules/rtp_rtcp/source/rtp_format_h264.cc2
-rw-r--r--webrtc/modules/rtp_rtcp/source/rtp_format_h264_unittest.cc2
-rw-r--r--webrtc/modules/rtp_rtcp/source/rtp_format_video_generic.cc2
-rw-r--r--webrtc/modules/rtp_rtcp/source/rtp_format_vp8.h2
-rw-r--r--webrtc/modules/rtp_rtcp/source/rtp_format_vp8_test_helper.h2
-rw-r--r--webrtc/modules/rtp_rtcp/source/rtp_format_vp9.cc72
-rw-r--r--webrtc/modules/rtp_rtcp/source/rtp_format_vp9.h2
-rw-r--r--webrtc/modules/rtp_rtcp/source/rtp_format_vp9_unittest.cc33
-rw-r--r--webrtc/modules/rtp_rtcp/source/rtp_header_extension.h13
-rw-r--r--webrtc/modules/rtp_rtcp/source/rtp_header_extension_unittest.cc2
-rw-r--r--webrtc/modules/rtp_rtcp/source/rtp_header_parser.cc4
-rw-r--r--webrtc/modules/rtp_rtcp/source/rtp_packet_history.cc2
-rw-r--r--webrtc/modules/rtp_rtcp/source/rtp_packet_history.h12
-rw-r--r--webrtc/modules/rtp_rtcp/source/rtp_packet_history_unittest.cc8
-rw-r--r--webrtc/modules/rtp_rtcp/source/rtp_payload_registry.cc30
-rw-r--r--webrtc/modules/rtp_rtcp/source/rtp_payload_registry_unittest.cc141
-rw-r--r--webrtc/modules/rtp_rtcp/source/rtp_receiver_audio.h20
-rw-r--r--webrtc/modules/rtp_rtcp/source/rtp_receiver_impl.cc32
-rw-r--r--webrtc/modules/rtp_rtcp/source/rtp_receiver_impl.h8
-rw-r--r--webrtc/modules/rtp_rtcp/source/rtp_receiver_strategy.h6
-rw-r--r--webrtc/modules/rtp_rtcp/source/rtp_receiver_video.cc4
-rw-r--r--webrtc/modules/rtp_rtcp/source/rtp_receiver_video.h2
-rw-r--r--webrtc/modules/rtp_rtcp/source/rtp_rtcp_config.h39
-rw-r--r--webrtc/modules/rtp_rtcp/source/rtp_rtcp_impl.cc28
-rw-r--r--webrtc/modules/rtp_rtcp/source/rtp_rtcp_impl.h14
-rw-r--r--webrtc/modules/rtp_rtcp/source/rtp_rtcp_impl_unittest.cc32
-rw-r--r--webrtc/modules/rtp_rtcp/source/rtp_sender.cc63
-rw-r--r--webrtc/modules/rtp_rtcp/source/rtp_sender.h23
-rw-r--r--webrtc/modules/rtp_rtcp/source/rtp_sender_audio.cc450
-rw-r--r--webrtc/modules/rtp_rtcp/source/rtp_sender_audio.h171
-rw-r--r--webrtc/modules/rtp_rtcp/source/rtp_sender_unittest.cc270
-rw-r--r--webrtc/modules/rtp_rtcp/source/rtp_sender_video.cc46
-rw-r--r--webrtc/modules/rtp_rtcp/source/rtp_sender_video.h18
-rw-r--r--webrtc/modules/rtp_rtcp/source/rtp_utility.cc151
-rw-r--r--webrtc/modules/rtp_rtcp/source/rtp_utility.h114
-rw-r--r--webrtc/modules/rtp_rtcp/source/ssrc_database.cc111
-rw-r--r--webrtc/modules/rtp_rtcp/source/ssrc_database.h48
-rw-r--r--webrtc/modules/rtp_rtcp/source/time_util.h48
-rw-r--r--webrtc/modules/rtp_rtcp/source/time_util_unittest.cc62
-rw-r--r--webrtc/modules/rtp_rtcp/source/tmmbr_help.cc4
-rw-r--r--webrtc/modules/rtp_rtcp/source/video_codec_information.h13
-rw-r--r--webrtc/modules/rtp_rtcp/source/vp8_partition_aggregator.cc36
-rw-r--r--webrtc/modules/rtp_rtcp/source/vp8_partition_aggregator.h4
-rw-r--r--webrtc/modules/rtp_rtcp/source/vp8_partition_aggregator_unittest.cc2
-rw-r--r--webrtc/modules/rtp_rtcp/test/BWEStandAlone/BWEStandAlone.cc199
-rw-r--r--webrtc/modules/rtp_rtcp/test/BWEStandAlone/MatlabPlot.cc1055
-rw-r--r--webrtc/modules/rtp_rtcp/test/BWEStandAlone/MatlabPlot.h170
-rw-r--r--webrtc/modules/rtp_rtcp/test/BWEStandAlone/TestLoadGenerator.cc432
-rw-r--r--webrtc/modules/rtp_rtcp/test/BWEStandAlone/TestLoadGenerator.h149
-rw-r--r--webrtc/modules/rtp_rtcp/test/BWEStandAlone/TestSenderReceiver.cc411
-rw-r--r--webrtc/modules/rtp_rtcp/test/BWEStandAlone/TestSenderReceiver.h153
-rw-r--r--webrtc/modules/rtp_rtcp/test/bwe_standalone.gypi85
-rw-r--r--webrtc/modules/rtp_rtcp/test/testAPI/test_api.cc7
-rw-r--r--webrtc/modules/rtp_rtcp/test/testAPI/test_api.h15
-rw-r--r--webrtc/modules/rtp_rtcp/test/testAPI/test_api_audio.cc23
-rw-r--r--webrtc/modules/rtp_rtcp/test/testAPI/test_api_rtcp.cc29
-rw-r--r--webrtc/modules/rtp_rtcp/test/testAPI/test_api_video.cc6
-rw-r--r--webrtc/modules/rtp_rtcp/test/testFec/average_residual_loss_xor_codes.h7
-rw-r--r--webrtc/modules/rtp_rtcp/test/testFec/test_fec.cc120
-rw-r--r--webrtc/modules/rtp_rtcp/test/testFec/test_packet_masks_metrics.cc27
-rw-r--r--webrtc/modules/utility/BUILD.gn12
-rw-r--r--webrtc/modules/utility/OWNERS5
-rw-r--r--webrtc/modules/utility/include/audio_frame_operations.h58
-rw-r--r--webrtc/modules/utility/include/file_player.h111
-rw-r--r--webrtc/modules/utility/include/file_recorder.h84
-rw-r--r--webrtc/modules/utility/include/helpers_android.h87
-rw-r--r--webrtc/modules/utility/include/helpers_ios.h59
-rw-r--r--webrtc/modules/utility/include/jvm_android.h185
-rw-r--r--webrtc/modules/utility/include/mock/mock_process_thread.h38
-rw-r--r--webrtc/modules/utility/include/process_thread.h66
-rw-r--r--webrtc/modules/utility/interface/audio_frame_operations.h58
-rw-r--r--webrtc/modules/utility/interface/file_player.h111
-rw-r--r--webrtc/modules/utility/interface/file_recorder.h84
-rw-r--r--webrtc/modules/utility/interface/helpers_android.h87
-rw-r--r--webrtc/modules/utility/interface/helpers_ios.h59
-rw-r--r--webrtc/modules/utility/interface/jvm_android.h185
-rw-r--r--webrtc/modules/utility/interface/mock/mock_process_thread.h38
-rw-r--r--webrtc/modules/utility/interface/process_thread.h66
-rw-r--r--webrtc/modules/utility/source/audio_frame_operations.cc4
-rw-r--r--webrtc/modules/utility/source/audio_frame_operations_unittest.cc4
-rw-r--r--webrtc/modules/utility/source/coder.cc2
-rw-r--r--webrtc/modules/utility/source/coder.h2
-rw-r--r--webrtc/modules/utility/source/file_player_impl.h6
-rw-r--r--webrtc/modules/utility/source/file_player_unittests.cc17
-rw-r--r--webrtc/modules/utility/source/file_recorder_impl.cc2
-rw-r--r--webrtc/modules/utility/source/file_recorder_impl.h10
-rw-r--r--webrtc/modules/utility/source/helpers_android.cc2
-rw-r--r--webrtc/modules/utility/source/helpers_ios.mm2
-rw-r--r--webrtc/modules/utility/source/jvm_android.cc2
-rw-r--r--webrtc/modules/utility/source/process_thread_impl.cc13
-rw-r--r--webrtc/modules/utility/source/process_thread_impl.h7
-rw-r--r--webrtc/modules/utility/source/process_thread_impl_unittest.cc13
-rw-r--r--webrtc/modules/utility/utility.gypi14
-rw-r--r--webrtc/modules/video_capture/BUILD.gn6
-rw-r--r--webrtc/modules/video_capture/device_info_impl.h2
-rw-r--r--webrtc/modules/video_capture/include/video_capture.h160
-rw-r--r--webrtc/modules/video_capture/include/video_capture_defines.h118
-rw-r--r--webrtc/modules/video_capture/include/video_capture_factory.h45
-rw-r--r--webrtc/modules/video_capture/ios/device_info_ios_objc.h2
-rw-r--r--webrtc/modules/video_capture/linux/video_capture_linux.cc6
-rw-r--r--webrtc/modules/video_capture/linux/video_capture_linux.h5
-rw-r--r--webrtc/modules/video_capture/mac/qtkit/video_capture_qtkit_info.mm2
-rw-r--r--webrtc/modules/video_capture/test/video_capture_unittest.cc36
-rw-r--r--webrtc/modules/video_capture/video_capture.gypi23
-rw-r--r--webrtc/modules/video_capture/video_capture.h160
-rw-r--r--webrtc/modules/video_capture/video_capture_defines.h118
-rw-r--r--webrtc/modules/video_capture/video_capture_factory.cc2
-rw-r--r--webrtc/modules/video_capture/video_capture_factory.h45
-rw-r--r--webrtc/modules/video_capture/video_capture_impl.cc2
-rw-r--r--webrtc/modules/video_capture/video_capture_impl.h2
-rw-r--r--webrtc/modules/video_capture/windows/sink_filter_ds.h2
-rw-r--r--webrtc/modules/video_coding/BUILD.gn146
-rw-r--r--webrtc/modules/video_coding/OWNERS5
-rw-r--r--webrtc/modules/video_coding/codec_database.cc616
-rw-r--r--webrtc/modules/video_coding/codec_database.h167
-rw-r--r--webrtc/modules/video_coding/codec_timer.cc96
-rw-r--r--webrtc/modules/video_coding/codec_timer.h57
-rw-r--r--webrtc/modules/video_coding/codecs/h264/h264_video_toolbox_decoder.cc19
-rw-r--r--webrtc/modules/video_coding/codecs/h264/h264_video_toolbox_decoder.h2
-rw-r--r--webrtc/modules/video_coding/codecs/h264/h264_video_toolbox_encoder.cc43
-rw-r--r--webrtc/modules/video_coding/codecs/h264/h264_video_toolbox_encoder.h2
-rw-r--r--webrtc/modules/video_coding/codecs/h264/h264_video_toolbox_nalu.cc9
-rw-r--r--webrtc/modules/video_coding/codecs/h264/h264_video_toolbox_nalu.h17
-rw-r--r--webrtc/modules/video_coding/codecs/h264/include/h264.h2
-rw-r--r--webrtc/modules/video_coding/codecs/i420/i420.cc55
-rw-r--r--webrtc/modules/video_coding/codecs/i420/include/i420.h145
-rw-r--r--webrtc/modules/video_coding/codecs/interface/mock/mock_video_codec_interface.h43
-rw-r--r--webrtc/modules/video_coding/codecs/interface/video_codec_interface.h28
-rw-r--r--webrtc/modules/video_coding/codecs/interface/video_error_codes.h9
-rw-r--r--webrtc/modules/video_coding/codecs/test/packet_manipulator.cc8
-rw-r--r--webrtc/modules/video_coding/codecs/test/packet_manipulator.h13
-rw-r--r--webrtc/modules/video_coding/codecs/test/packet_manipulator_unittest.cc27
-rw-r--r--webrtc/modules/video_coding/codecs/test/predictive_packet_manipulator.cc17
-rw-r--r--webrtc/modules/video_coding/codecs/test/predictive_packet_manipulator.h1
-rw-r--r--webrtc/modules/video_coding/codecs/test/stats.cc69
-rw-r--r--webrtc/modules/video_coding/codecs/test/stats.h2
-rw-r--r--webrtc/modules/video_coding/codecs/test/stats_unittest.cc16
-rw-r--r--webrtc/modules/video_coding/codecs/test/videoprocessor.cc67
-rw-r--r--webrtc/modules/video_coding/codecs/test/videoprocessor.h15
-rw-r--r--webrtc/modules/video_coding/codecs/test/videoprocessor_integrationtest.cc346
-rw-r--r--webrtc/modules/video_coding/codecs/test/videoprocessor_unittest.cc44
-rw-r--r--webrtc/modules/video_coding/codecs/tools/video_quality_measurement.cc320
-rw-r--r--webrtc/modules/video_coding/codecs/vp8/default_temporal_layers.cc26
-rw-r--r--webrtc/modules/video_coding/codecs/vp8/default_temporal_layers_unittest.cc157
-rw-r--r--webrtc/modules/video_coding/codecs/vp8/include/vp8.h9
-rw-r--r--webrtc/modules/video_coding/codecs/vp8/include/vp8_common_types.h14
-rw-r--r--webrtc/modules/video_coding/codecs/vp8/realtime_temporal_layers.cc43
-rw-r--r--webrtc/modules/video_coding/codecs/vp8/reference_picture_selection.cc20
-rw-r--r--webrtc/modules/video_coding/codecs/vp8/reference_picture_selection_unittest.cc40
-rw-r--r--webrtc/modules/video_coding/codecs/vp8/screenshare_layers.cc6
-rw-r--r--webrtc/modules/video_coding/codecs/vp8/screenshare_layers.h2
-rw-r--r--webrtc/modules/video_coding/codecs/vp8/screenshare_layers_unittest.cc4
-rw-r--r--webrtc/modules/video_coding/codecs/vp8/simulcast_encoder_adapter.cc58
-rw-r--r--webrtc/modules/video_coding/codecs/vp8/simulcast_encoder_adapter.h10
-rw-r--r--webrtc/modules/video_coding/codecs/vp8/simulcast_encoder_adapter_unittest.cc33
-rw-r--r--webrtc/modules/video_coding/codecs/vp8/simulcast_unittest.cc14
-rw-r--r--webrtc/modules/video_coding/codecs/vp8/simulcast_unittest.h282
-rw-r--r--webrtc/modules/video_coding/codecs/vp8/temporal_layers.h5
-rw-r--r--webrtc/modules/video_coding/codecs/vp8/test/vp8_impl_unittest.cc22
-rw-r--r--webrtc/modules/video_coding/codecs/vp8/vp8_factory.h1
-rw-r--r--webrtc/modules/video_coding/codecs/vp8/vp8_impl.cc262
-rw-r--r--webrtc/modules/video_coding/codecs/vp8/vp8_impl.h24
-rw-r--r--webrtc/modules/video_coding/codecs/vp8/vp8_sequence_coder.cc93
-rw-r--r--webrtc/modules/video_coding/codecs/vp9/include/vp9.h3
-rw-r--r--webrtc/modules/video_coding/codecs/vp9/screenshare_layers.cc93
-rw-r--r--webrtc/modules/video_coding/codecs/vp9/screenshare_layers.h66
-rw-r--r--webrtc/modules/video_coding/codecs/vp9/screenshare_layers_unittest.cc323
-rw-r--r--webrtc/modules/video_coding/codecs/vp9/vp9.gyp32
-rw-r--r--webrtc/modules/video_coding/codecs/vp9/vp9_dummy_impl.cc19
-rw-r--r--webrtc/modules/video_coding/codecs/vp9/vp9_frame_buffer_pool.cc2
-rw-r--r--webrtc/modules/video_coding/codecs/vp9/vp9_impl.cc411
-rw-r--r--webrtc/modules/video_coding/codecs/vp9/vp9_impl.h49
-rw-r--r--webrtc/modules/video_coding/content_metrics_processing.cc124
-rw-r--r--webrtc/modules/video_coding/content_metrics_processing.h72
-rw-r--r--webrtc/modules/video_coding/decoding_state.cc285
-rw-r--r--webrtc/modules/video_coding/decoding_state.h82
-rw-r--r--webrtc/modules/video_coding/decoding_state_unittest.cc699
-rw-r--r--webrtc/modules/video_coding/encoded_frame.cc225
-rw-r--r--webrtc/modules/video_coding/encoded_frame.h132
-rw-r--r--webrtc/modules/video_coding/fec_tables_xor.h459
-rw-r--r--webrtc/modules/video_coding/frame_buffer.cc270
-rw-r--r--webrtc/modules/video_coding/frame_buffer.h92
-rw-r--r--webrtc/modules/video_coding/generic_decoder.cc192
-rw-r--r--webrtc/modules/video_coding/generic_decoder.h111
-rw-r--r--webrtc/modules/video_coding/generic_encoder.cc311
-rw-r--r--webrtc/modules/video_coding/generic_encoder.h149
-rw-r--r--webrtc/modules/video_coding/include/mock/mock_vcm_callbacks.h34
-rw-r--r--webrtc/modules/video_coding/include/mock/mock_video_codec_interface.h81
-rw-r--r--webrtc/modules/video_coding/include/video_codec_interface.h99
-rw-r--r--webrtc/modules/video_coding/include/video_coding.h519
-rw-r--r--webrtc/modules/video_coding/include/video_coding_defines.h198
-rw-r--r--webrtc/modules/video_coding/include/video_error_codes.h32
-rw-r--r--webrtc/modules/video_coding/inter_frame_delay.cc107
-rw-r--r--webrtc/modules/video_coding/inter_frame_delay.h67
-rw-r--r--webrtc/modules/video_coding/internal_defines.h41
-rw-r--r--webrtc/modules/video_coding/jitter_buffer.cc1346
-rw-r--r--webrtc/modules/video_coding/jitter_buffer.h389
-rw-r--r--webrtc/modules/video_coding/jitter_buffer_common.h72
-rw-r--r--webrtc/modules/video_coding/jitter_buffer_unittest.cc2571
-rw-r--r--webrtc/modules/video_coding/jitter_estimator.cc443
-rw-r--r--webrtc/modules/video_coding/jitter_estimator.h170
-rw-r--r--webrtc/modules/video_coding/jitter_estimator_tests.cc160
-rw-r--r--webrtc/modules/video_coding/main/interface/mock/mock_vcm_callbacks.h35
-rw-r--r--webrtc/modules/video_coding/main/interface/video_coding.h544
-rw-r--r--webrtc/modules/video_coding/main/interface/video_coding_defines.h201
-rw-r--r--webrtc/modules/video_coding/main/source/OWNERS5
-rw-r--r--webrtc/modules/video_coding/main/source/codec_database.cc687
-rw-r--r--webrtc/modules/video_coding/main/source/codec_database.h184
-rw-r--r--webrtc/modules/video_coding/main/source/codec_timer.cc136
-rw-r--r--webrtc/modules/video_coding/main/source/codec_timer.h62
-rw-r--r--webrtc/modules/video_coding/main/source/content_metrics_processing.cc125
-rw-r--r--webrtc/modules/video_coding/main/source/content_metrics_processing.h76
-rw-r--r--webrtc/modules/video_coding/main/source/decoding_state.cc223
-rw-r--r--webrtc/modules/video_coding/main/source/decoding_state.h70
-rw-r--r--webrtc/modules/video_coding/main/source/decoding_state_unittest.cc449
-rw-r--r--webrtc/modules/video_coding/main/source/encoded_frame.cc229
-rw-r--r--webrtc/modules/video_coding/main/source/encoded_frame.h127
-rw-r--r--webrtc/modules/video_coding/main/source/fec_tables_xor.h6481
-rw-r--r--webrtc/modules/video_coding/main/source/frame_buffer.cc297
-rw-r--r--webrtc/modules/video_coding/main/source/frame_buffer.h92
-rw-r--r--webrtc/modules/video_coding/main/source/generic_decoder.cc198
-rw-r--r--webrtc/modules/video_coding/main/source/generic_decoder.h112
-rw-r--r--webrtc/modules/video_coding/main/source/generic_encoder.cc298
-rw-r--r--webrtc/modules/video_coding/main/source/generic_encoder.h142
-rw-r--r--webrtc/modules/video_coding/main/source/inter_frame_delay.cc114
-rw-r--r--webrtc/modules/video_coding/main/source/inter_frame_delay.h66
-rw-r--r--webrtc/modules/video_coding/main/source/internal_defines.h68
-rw-r--r--webrtc/modules/video_coding/main/source/jitter_buffer.cc1339
-rw-r--r--webrtc/modules/video_coding/main/source/jitter_buffer.h396
-rw-r--r--webrtc/modules/video_coding/main/source/jitter_buffer_common.h72
-rw-r--r--webrtc/modules/video_coding/main/source/jitter_buffer_unittest.cc2575
-rw-r--r--webrtc/modules/video_coding/main/source/jitter_estimator.cc482
-rw-r--r--webrtc/modules/video_coding/main/source/jitter_estimator.h165
-rw-r--r--webrtc/modules/video_coding/main/source/jitter_estimator_tests.cc160
-rw-r--r--webrtc/modules/video_coding/main/source/media_opt_util.cc774
-rw-r--r--webrtc/modules/video_coding/main/source/media_opt_util.h364
-rw-r--r--webrtc/modules/video_coding/main/source/media_optimization.cc648
-rw-r--r--webrtc/modules/video_coding/main/source/media_optimization.h180
-rw-r--r--webrtc/modules/video_coding/main/source/media_optimization_unittest.cc155
-rw-r--r--webrtc/modules/video_coding/main/source/nack_fec_tables.h126
-rw-r--r--webrtc/modules/video_coding/main/source/packet.cc154
-rw-r--r--webrtc/modules/video_coding/main/source/packet.h59
-rw-r--r--webrtc/modules/video_coding/main/source/qm_select.cc958
-rw-r--r--webrtc/modules/video_coding/main/source/qm_select.h373
-rw-r--r--webrtc/modules/video_coding/main/source/qm_select_data.h227
-rw-r--r--webrtc/modules/video_coding/main/source/qm_select_unittest.cc1311
-rw-r--r--webrtc/modules/video_coding/main/source/receiver.cc268
-rw-r--r--webrtc/modules/video_coding/main/source/receiver.h92
-rw-r--r--webrtc/modules/video_coding/main/source/receiver_unittest.cc526
-rw-r--r--webrtc/modules/video_coding/main/source/rtt_filter.cc202
-rw-r--r--webrtc/modules/video_coding/main/source/rtt_filter.h68
-rw-r--r--webrtc/modules/video_coding/main/source/session_info.cc580
-rw-r--r--webrtc/modules/video_coding/main/source/session_info.h172
-rw-r--r--webrtc/modules/video_coding/main/source/session_info_unittest.cc1064
-rw-r--r--webrtc/modules/video_coding/main/source/test/stream_generator.cc127
-rw-r--r--webrtc/modules/video_coding/main/source/test/stream_generator.h72
-rw-r--r--webrtc/modules/video_coding/main/source/timestamp_map.cc65
-rw-r--r--webrtc/modules/video_coding/main/source/timestamp_map.h47
-rw-r--r--webrtc/modules/video_coding/main/source/timing.cc279
-rw-r--r--webrtc/modules/video_coding/main/source/timing.h127
-rw-r--r--webrtc/modules/video_coding/main/source/timing_unittest.cc147
-rw-r--r--webrtc/modules/video_coding/main/source/video_coding_impl.cc359
-rw-r--r--webrtc/modules/video_coding/main/source/video_coding_impl.h237
-rw-r--r--webrtc/modules/video_coding/main/source/video_coding_robustness_unittest.cc238
-rw-r--r--webrtc/modules/video_coding/main/source/video_receiver.cc578
-rw-r--r--webrtc/modules/video_coding/main/source/video_receiver_unittest.cc211
-rw-r--r--webrtc/modules/video_coding/main/source/video_sender.cc376
-rw-r--r--webrtc/modules/video_coding/main/source/video_sender_unittest.cc494
-rw-r--r--webrtc/modules/video_coding/main/test/receiver_tests.h43
-rw-r--r--webrtc/modules/video_coding/main/test/release_test.h17
-rw-r--r--webrtc/modules/video_coding/main/test/rtp_player.cc493
-rw-r--r--webrtc/modules/video_coding/main/test/rtp_player.h97
-rw-r--r--webrtc/modules/video_coding/main/test/test_util.cc139
-rw-r--r--webrtc/modules/video_coding/main/test/test_util.h86
-rw-r--r--webrtc/modules/video_coding/main/test/tester_main.cc75
-rw-r--r--webrtc/modules/video_coding/main/test/vcm_payload_sink_factory.cc210
-rw-r--r--webrtc/modules/video_coding/main/test/vcm_payload_sink_factory.h63
-rw-r--r--webrtc/modules/video_coding/main/test/video_rtp_play.cc88
-rw-r--r--webrtc/modules/video_coding/main/test/video_source.h82
-rw-r--r--webrtc/modules/video_coding/media_opt_util.cc682
-rw-r--r--webrtc/modules/video_coding/media_opt_util.h361
-rw-r--r--webrtc/modules/video_coding/media_optimization.cc633
-rw-r--r--webrtc/modules/video_coding/media_optimization.h174
-rw-r--r--webrtc/modules/video_coding/media_optimization_unittest.cc154
-rw-r--r--webrtc/modules/video_coding/nack_fec_tables.h31
-rw-r--r--webrtc/modules/video_coding/packet.cc153
-rw-r--r--webrtc/modules/video_coding/packet.h59
-rw-r--r--webrtc/modules/video_coding/qm_select.cc953
-rw-r--r--webrtc/modules/video_coding/qm_select.h356
-rw-r--r--webrtc/modules/video_coding/qm_select_data.h227
-rw-r--r--webrtc/modules/video_coding/qm_select_unittest.cc1307
-rw-r--r--webrtc/modules/video_coding/receiver.cc269
-rw-r--r--webrtc/modules/video_coding/receiver.h92
-rw-r--r--webrtc/modules/video_coding/receiver_unittest.cc575
-rw-r--r--webrtc/modules/video_coding/rtt_filter.cc165
-rw-r--r--webrtc/modules/video_coding/rtt_filter.h66
-rw-r--r--webrtc/modules/video_coding/session_info.cc569
-rw-r--r--webrtc/modules/video_coding/session_info.h170
-rw-r--r--webrtc/modules/video_coding/session_info_unittest.cc1030
-rw-r--r--webrtc/modules/video_coding/test/plotJitterEstimate.m (renamed from webrtc/modules/video_coding/main/test/plotJitterEstimate.m)0
-rw-r--r--webrtc/modules/video_coding/test/plotReceiveTrace.m (renamed from webrtc/modules/video_coding/main/test/plotReceiveTrace.m)0
-rw-r--r--webrtc/modules/video_coding/test/plotTimingTest.m (renamed from webrtc/modules/video_coding/main/test/plotTimingTest.m)0
-rw-r--r--webrtc/modules/video_coding/test/receiver_tests.h43
-rw-r--r--webrtc/modules/video_coding/test/release_test.h17
-rw-r--r--webrtc/modules/video_coding/test/rtp_player.cc492
-rw-r--r--webrtc/modules/video_coding/test/rtp_player.h100
-rw-r--r--webrtc/modules/video_coding/test/stream_generator.cc130
-rw-r--r--webrtc/modules/video_coding/test/stream_generator.h72
-rw-r--r--webrtc/modules/video_coding/test/subfigure.m (renamed from webrtc/modules/video_coding/main/test/subfigure.m)0
-rw-r--r--webrtc/modules/video_coding/test/test_util.cc142
-rw-r--r--webrtc/modules/video_coding/test/test_util.h86
-rw-r--r--webrtc/modules/video_coding/test/tester_main.cc78
-rw-r--r--webrtc/modules/video_coding/test/vcm_payload_sink_factory.cc204
-rw-r--r--webrtc/modules/video_coding/test/vcm_payload_sink_factory.h70
-rw-r--r--webrtc/modules/video_coding/test/video_rtp_play.cc88
-rw-r--r--webrtc/modules/video_coding/test/video_source.h85
-rw-r--r--webrtc/modules/video_coding/timestamp_map.cc63
-rw-r--r--webrtc/modules/video_coding/timestamp_map.h47
-rw-r--r--webrtc/modules/video_coding/timing.cc284
-rw-r--r--webrtc/modules/video_coding/timing.h126
-rw-r--r--webrtc/modules/video_coding/timing_unittest.cc149
-rw-r--r--webrtc/modules/video_coding/utility/frame_dropper.cc529
-rw-r--r--webrtc/modules/video_coding/utility/frame_dropper.h96
-rw-r--r--webrtc/modules/video_coding/utility/include/frame_dropper.h98
-rw-r--r--webrtc/modules/video_coding/utility/include/mock/mock_frame_dropper.h41
-rw-r--r--webrtc/modules/video_coding/utility/include/moving_average.h71
-rw-r--r--webrtc/modules/video_coding/utility/include/qp_parser.h30
-rw-r--r--webrtc/modules/video_coding/utility/include/quality_scaler.h67
-rw-r--r--webrtc/modules/video_coding/utility/include/vp8_header_parser.h77
-rw-r--r--webrtc/modules/video_coding/utility/mock/mock_frame_dropper.h34
-rw-r--r--webrtc/modules/video_coding/utility/moving_average.h71
-rw-r--r--webrtc/modules/video_coding/utility/qp_parser.cc4
-rw-r--r--webrtc/modules/video_coding/utility/qp_parser.h30
-rw-r--r--webrtc/modules/video_coding/utility/quality_scaler.cc18
-rw-r--r--webrtc/modules/video_coding/utility/quality_scaler.h67
-rw-r--r--webrtc/modules/video_coding/utility/quality_scaler_unittest.cc34
-rw-r--r--webrtc/modules/video_coding/utility/video_coding_utility.gyp10
-rw-r--r--webrtc/modules/video_coding/utility/vp8_header_parser.cc29
-rw-r--r--webrtc/modules/video_coding/utility/vp8_header_parser.h68
-rw-r--r--webrtc/modules/video_coding/video_coding.gypi102
-rw-r--r--webrtc/modules/video_coding/video_coding_impl.cc320
-rw-r--r--webrtc/modules/video_coding/video_coding_impl.h222
-rw-r--r--webrtc/modules/video_coding/video_coding_robustness_unittest.cc226
-rw-r--r--webrtc/modules/video_coding/video_coding_test.gypi16
-rw-r--r--webrtc/modules/video_coding/video_receiver.cc549
-rw-r--r--webrtc/modules/video_coding/video_receiver_unittest.cc209
-rw-r--r--webrtc/modules/video_coding/video_sender.cc352
-rw-r--r--webrtc/modules/video_coding/video_sender_unittest.cc488
-rw-r--r--webrtc/modules/video_processing/BUILD.gn65
-rw-r--r--webrtc/modules/video_processing/OWNERS5
-rw-r--r--webrtc/modules/video_processing/brightness_detection.cc136
-rw-r--r--webrtc/modules/video_processing/brightness_detection.h35
-rw-r--r--webrtc/modules/video_processing/content_analysis.cc281
-rw-r--r--webrtc/modules/video_processing/content_analysis.h87
-rw-r--r--webrtc/modules/video_processing/content_analysis_sse2.cc271
-rw-r--r--webrtc/modules/video_processing/deflickering.cc402
-rw-r--r--webrtc/modules/video_processing/deflickering.h55
-rw-r--r--webrtc/modules/video_processing/frame_preprocessor.cc141
-rw-r--r--webrtc/modules/video_processing/frame_preprocessor.h84
-rw-r--r--webrtc/modules/video_processing/include/video_processing.h102
-rw-r--r--webrtc/modules/video_processing/include/video_processing_defines.h41
-rw-r--r--webrtc/modules/video_processing/main/interface/video_processing.h270
-rw-r--r--webrtc/modules/video_processing/main/interface/video_processing_defines.h41
-rw-r--r--webrtc/modules/video_processing/main/source/OWNERS5
-rw-r--r--webrtc/modules/video_processing/main/source/brighten.cc45
-rw-r--r--webrtc/modules/video_processing/main/source/brighten.h25
-rw-r--r--webrtc/modules/video_processing/main/source/brightness_detection.cc133
-rw-r--r--webrtc/modules/video_processing/main/source/brightness_detection.h37
-rw-r--r--webrtc/modules/video_processing/main/source/content_analysis.cc274
-rw-r--r--webrtc/modules/video_processing/main/source/content_analysis.h87
-rw-r--r--webrtc/modules/video_processing/main/source/content_analysis_sse2.cc264
-rw-r--r--webrtc/modules/video_processing/main/source/deflickering.cc398
-rw-r--r--webrtc/modules/video_processing/main/source/deflickering.h56
-rw-r--r--webrtc/modules/video_processing/main/source/frame_preprocessor.cc136
-rw-r--r--webrtc/modules/video_processing/main/source/frame_preprocessor.h79
-rw-r--r--webrtc/modules/video_processing/main/source/spatial_resampler.cc98
-rw-r--r--webrtc/modules/video_processing/main/source/spatial_resampler.h61
-rw-r--r--webrtc/modules/video_processing/main/source/video_decimator.cc146
-rw-r--r--webrtc/modules/video_processing/main/source/video_decimator.h58
-rw-r--r--webrtc/modules/video_processing/main/source/video_processing_impl.cc183
-rw-r--r--webrtc/modules/video_processing/main/source/video_processing_impl.h75
-rw-r--r--webrtc/modules/video_processing/main/test/unit_test/brightness_detection_test.cc121
-rw-r--r--webrtc/modules/video_processing/main/test/unit_test/content_metrics_test.cc44
-rw-r--r--webrtc/modules/video_processing/main/test/unit_test/createTable.m179
-rw-r--r--webrtc/modules/video_processing/main/test/unit_test/deflickering_test.cc100
-rw-r--r--webrtc/modules/video_processing/main/test/unit_test/readYUV420file.m45
-rw-r--r--webrtc/modules/video_processing/main/test/unit_test/video_processing_unittest.cc390
-rw-r--r--webrtc/modules/video_processing/main/test/unit_test/video_processing_unittest.h47
-rw-r--r--webrtc/modules/video_processing/main/test/unit_test/writeYUV420file.m22
-rw-r--r--webrtc/modules/video_processing/spatial_resampler.cc97
-rw-r--r--webrtc/modules/video_processing/spatial_resampler.h60
-rw-r--r--webrtc/modules/video_processing/test/brightness_detection_test.cc120
-rw-r--r--webrtc/modules/video_processing/test/content_metrics_test.cc48
-rw-r--r--webrtc/modules/video_processing/test/createTable.m179
-rw-r--r--webrtc/modules/video_processing/test/deflickering_test.cc98
-rw-r--r--webrtc/modules/video_processing/test/denoiser_test.cc156
-rw-r--r--webrtc/modules/video_processing/test/readYUV420file.m45
-rw-r--r--webrtc/modules/video_processing/test/video_processing_unittest.cc415
-rw-r--r--webrtc/modules/video_processing/test/video_processing_unittest.h47
-rw-r--r--webrtc/modules/video_processing/test/writeYUV420file.m22
-rw-r--r--webrtc/modules/video_processing/util/denoiser_filter.cc54
-rw-r--r--webrtc/modules/video_processing/util/denoiser_filter.h63
-rw-r--r--webrtc/modules/video_processing/util/denoiser_filter_c.cc194
-rw-r--r--webrtc/modules/video_processing/util/denoiser_filter_c.h46
-rw-r--r--webrtc/modules/video_processing/util/denoiser_filter_neon.cc283
-rw-r--r--webrtc/modules/video_processing/util/denoiser_filter_neon.h46
-rw-r--r--webrtc/modules/video_processing/util/denoiser_filter_sse2.cc280
-rw-r--r--webrtc/modules/video_processing/util/denoiser_filter_sse2.h46
-rw-r--r--webrtc/modules/video_processing/util/skin_detection.cc65
-rwxr-xr-xwebrtc/modules/video_processing/util/skin_detection.h28
-rw-r--r--webrtc/modules/video_processing/video_decimator.cc148
-rw-r--r--webrtc/modules/video_processing/video_decimator.h58
-rw-r--r--webrtc/modules/video_processing/video_denoiser.cc147
-rw-r--r--webrtc/modules/video_processing/video_denoiser.h38
-rw-r--r--webrtc/modules/video_processing/video_processing.gypi62
-rw-r--r--webrtc/modules/video_processing/video_processing_impl.cc179
-rw-r--r--webrtc/modules/video_processing/video_processing_impl.h55
-rw-r--r--webrtc/modules/video_render/BUILD.gn4
-rw-r--r--webrtc/modules/video_render/android/video_render_android_impl.cc17
-rw-r--r--webrtc/modules/video_render/android/video_render_android_impl.h5
-rw-r--r--webrtc/modules/video_render/android/video_render_android_native_opengl2.h2
-rw-r--r--webrtc/modules/video_render/android/video_render_android_surface_view.h2
-rw-r--r--webrtc/modules/video_render/android/video_render_opengles20.h2
-rw-r--r--webrtc/modules/video_render/external/video_render_external_impl.h2
-rw-r--r--webrtc/modules/video_render/i_video_render.h2
-rw-r--r--webrtc/modules/video_render/include/video_render.h268
-rw-r--r--webrtc/modules/video_render/include/video_render_defines.h70
-rw-r--r--webrtc/modules/video_render/ios/open_gles20.h2
-rw-r--r--webrtc/modules/video_render/ios/video_render_ios_channel.h2
-rw-r--r--webrtc/modules/video_render/ios/video_render_ios_gles20.h5
-rw-r--r--webrtc/modules/video_render/ios/video_render_ios_gles20.mm8
-rw-r--r--webrtc/modules/video_render/linux/video_x11_channel.h2
-rw-r--r--webrtc/modules/video_render/linux/video_x11_render.h2
-rw-r--r--webrtc/modules/video_render/mac/video_render_agl.cc23
-rw-r--r--webrtc/modules/video_render/mac/video_render_agl.h7
-rw-r--r--webrtc/modules/video_render/mac/video_render_nsopengl.h10
-rw-r--r--webrtc/modules/video_render/mac/video_render_nsopengl.mm36
-rw-r--r--webrtc/modules/video_render/test/testAPI/testAPI.cc10
-rw-r--r--webrtc/modules/video_render/test/testAPI/testAPI.h2
-rw-r--r--webrtc/modules/video_render/test/testAPI/testAPI_mac.mm8
-rw-r--r--webrtc/modules/video_render/video_render.gypi23
-rw-r--r--webrtc/modules/video_render/video_render.h268
-rw-r--r--webrtc/modules/video_render/video_render_defines.h70
-rw-r--r--webrtc/modules/video_render/video_render_impl.cc7
-rw-r--r--webrtc/modules/video_render/video_render_impl.h2
-rw-r--r--webrtc/modules/video_render/video_render_internal_impl.cc7
-rw-r--r--webrtc/modules/video_render/windows/i_video_render_win.h2
-rw-r--r--webrtc/modules/video_render/windows/video_render_direct3d9.cc8
-rw-r--r--webrtc/modules/video_render/windows/video_render_direct3d9.h7
-rw-r--r--webrtc/p2p/OWNERS5
-rw-r--r--webrtc/p2p/base/candidate.h1
-rw-r--r--webrtc/p2p/base/dtlstransport.h2
-rw-r--r--webrtc/p2p/base/dtlstransportchannel.cc59
-rw-r--r--webrtc/p2p/base/dtlstransportchannel.h7
-rw-r--r--webrtc/p2p/base/dtlstransportchannel_unittest.cc178
-rw-r--r--webrtc/p2p/base/faketransportcontroller.h29
-rw-r--r--webrtc/p2p/base/p2ptransportchannel.cc279
-rw-r--r--webrtc/p2p/base/p2ptransportchannel.h64
-rw-r--r--webrtc/p2p/base/p2ptransportchannel_unittest.cc230
-rw-r--r--webrtc/p2p/base/port.cc121
-rw-r--r--webrtc/p2p/base/port.h34
-rw-r--r--webrtc/p2p/base/port_unittest.cc91
-rw-r--r--webrtc/p2p/base/portallocator.h55
-rw-r--r--webrtc/p2p/base/portinterface.h4
-rw-r--r--webrtc/p2p/base/pseudotcp.cc3
-rw-r--r--webrtc/p2p/base/relayport.cc2
-rw-r--r--webrtc/p2p/base/relayport.h34
-rw-r--r--webrtc/p2p/base/stun_unittest.cc11
-rw-r--r--webrtc/p2p/base/stunport.cc119
-rw-r--r--webrtc/p2p/base/stunport.h34
-rw-r--r--webrtc/p2p/base/stunrequest.cc10
-rw-r--r--webrtc/p2p/base/stunrequest.h7
-rw-r--r--webrtc/p2p/base/tcpport.cc21
-rw-r--r--webrtc/p2p/base/tcpport.h44
-rw-r--r--webrtc/p2p/base/transport.cc4
-rw-r--r--webrtc/p2p/base/transport.h8
-rw-r--r--webrtc/p2p/base/transportchannel.cc15
-rw-r--r--webrtc/p2p/base/transportchannel.h21
-rw-r--r--webrtc/p2p/base/transportcontroller.cc16
-rw-r--r--webrtc/p2p/base/transportcontroller.h10
-rw-r--r--webrtc/p2p/base/transportcontroller_unittest.cc15
-rw-r--r--webrtc/p2p/base/transportdescription.cc4
-rw-r--r--webrtc/p2p/base/transportdescriptionfactory_unittest.cc9
-rw-r--r--webrtc/p2p/base/turnport.cc256
-rw-r--r--webrtc/p2p/base/turnport.h47
-rw-r--r--webrtc/p2p/base/turnport_unittest.cc238
-rw-r--r--webrtc/p2p/base/turnserver.cc6
-rw-r--r--webrtc/p2p/base/turnserver.h6
-rw-r--r--webrtc/p2p/client/basicportallocator.cc67
-rw-r--r--webrtc/p2p/client/basicportallocator.h68
-rw-r--r--webrtc/p2p/client/fakeportallocator.h88
-rw-r--r--webrtc/p2p/client/httpportallocator.cc1
-rw-r--r--webrtc/p2p/client/portallocator_unittest.cc233
-rw-r--r--webrtc/p2p/stunprober/stunprober.cc14
-rw-r--r--webrtc/p2p/stunprober/stunprober.h7
-rw-r--r--webrtc/sound/OWNERS4
-rw-r--r--webrtc/sound/alsasoundsystem.cc24
-rw-r--r--webrtc/sound/automaticallychosensoundsystem_unittest.cc16
-rw-r--r--webrtc/sound/linuxsoundsystem.h3
-rw-r--r--webrtc/sound/pulseaudiosoundsystem.cc47
-rw-r--r--webrtc/sound/soundinputstreaminterface.h2
-rw-r--r--webrtc/supplement.gypi9
-rw-r--r--webrtc/system_wrappers/BUILD.gn17
-rw-r--r--webrtc/system_wrappers/OWNERS5
-rw-r--r--webrtc/system_wrappers/include/aligned_array.h34
-rw-r--r--webrtc/system_wrappers/include/aligned_malloc.h6
-rw-r--r--webrtc/system_wrappers/include/asm_defines.h6
-rw-r--r--webrtc/system_wrappers/include/atomic32.h6
-rw-r--r--webrtc/system_wrappers/include/clock.h6
-rw-r--r--webrtc/system_wrappers/include/compile_assert_c.h6
-rw-r--r--webrtc/system_wrappers/include/condition_variable_wrapper.h6
-rw-r--r--webrtc/system_wrappers/include/cpu_features_wrapper.h6
-rw-r--r--webrtc/system_wrappers/include/cpu_info.h6
-rw-r--r--webrtc/system_wrappers/include/critical_section_wrapper.h6
-rw-r--r--webrtc/system_wrappers/include/data_log.h6
-rw-r--r--webrtc/system_wrappers/include/data_log_c.h6
-rw-r--r--webrtc/system_wrappers/include/data_log_impl.h12
-rw-r--r--webrtc/system_wrappers/include/event_tracer.h33
-rw-r--r--webrtc/system_wrappers/include/event_wrapper.h6
-rw-r--r--webrtc/system_wrappers/include/field_trial.h6
-rw-r--r--webrtc/system_wrappers/include/field_trial_default.h8
-rw-r--r--webrtc/system_wrappers/include/file_wrapper.h6
-rw-r--r--webrtc/system_wrappers/include/logcat_trace_context.h6
-rw-r--r--webrtc/system_wrappers/include/logging.h37
-rw-r--r--webrtc/system_wrappers/include/metrics.h110
-rw-r--r--webrtc/system_wrappers/include/ntp_time.h63
-rw-r--r--webrtc/system_wrappers/include/ref_count.h6
-rw-r--r--webrtc/system_wrappers/include/rtp_to_ntp.h6
-rw-r--r--webrtc/system_wrappers/include/rw_lock_wrapper.h6
-rw-r--r--webrtc/system_wrappers/include/scoped_vector.h17
-rw-r--r--webrtc/system_wrappers/include/sleep.h6
-rw-r--r--webrtc/system_wrappers/include/sort.h6
-rw-r--r--webrtc/system_wrappers/include/static_instance.h6
-rw-r--r--webrtc/system_wrappers/include/stl_util.h6
-rw-r--r--webrtc/system_wrappers/include/stringize_macros.h6
-rw-r--r--webrtc/system_wrappers/include/thread_wrapper.h95
-rw-r--r--webrtc/system_wrappers/include/tick_util.h138
-rw-r--r--webrtc/system_wrappers/include/timestamp_extrapolator.h6
-rw-r--r--webrtc/system_wrappers/include/trace.h6
-rw-r--r--webrtc/system_wrappers/include/utf_util_win.h6
-rw-r--r--webrtc/system_wrappers/source/Android.mk2
-rw-r--r--webrtc/system_wrappers/source/aligned_array_unittest.cc10
-rw-r--r--webrtc/system_wrappers/source/condition_variable_unittest.cc12
-rw-r--r--webrtc/system_wrappers/source/critical_section_unittest.cc14
-rw-r--r--webrtc/system_wrappers/source/data_log.cc34
-rw-r--r--webrtc/system_wrappers/source/event_timer_posix.cc12
-rw-r--r--webrtc/system_wrappers/source/event_timer_posix.h5
-rw-r--r--webrtc/system_wrappers/source/event_tracer.cc12
-rw-r--r--webrtc/system_wrappers/source/event_tracer_unittest.cc12
-rw-r--r--webrtc/system_wrappers/source/field_trial_default.cc4
-rw-r--r--webrtc/system_wrappers/source/logging_unittest.cc13
-rw-r--r--webrtc/system_wrappers/source/metrics_unittest.cc91
-rw-r--r--webrtc/system_wrappers/source/ntp_time_unittest.cc69
-rw-r--r--webrtc/system_wrappers/source/scoped_vector_unittest.cc12
-rw-r--r--webrtc/system_wrappers/source/thread.cc33
-rw-r--r--webrtc/system_wrappers/source/thread_posix.cc166
-rw-r--r--webrtc/system_wrappers/source/thread_posix.h53
-rw-r--r--webrtc/system_wrappers/source/thread_posix_unittest.cc30
-rw-r--r--webrtc/system_wrappers/source/thread_unittest.cc53
-rw-r--r--webrtc/system_wrappers/source/thread_win.cc107
-rw-r--r--webrtc/system_wrappers/source/thread_win.h48
-rw-r--r--webrtc/system_wrappers/source/tick_util.cc123
-rw-r--r--webrtc/system_wrappers/source/trace_impl.cc3
-rw-r--r--webrtc/system_wrappers/source/trace_impl.h2
-rw-r--r--webrtc/system_wrappers/system_wrappers.gyp13
-rw-r--r--webrtc/system_wrappers/system_wrappers_tests.gyp9
-rw-r--r--webrtc/test/BUILD.gn6
-rw-r--r--webrtc/test/call_test.cc394
-rw-r--r--webrtc/test/call_test.h111
-rw-r--r--webrtc/test/channel_transport/channel_transport.cc6
-rw-r--r--webrtc/test/channel_transport/channel_transport.h56
-rw-r--r--webrtc/test/channel_transport/include/channel_transport.h56
-rw-r--r--webrtc/test/channel_transport/udp_socket2_manager_win.cc21
-rw-r--r--webrtc/test/channel_transport/udp_socket2_manager_win.h6
-rw-r--r--webrtc/test/channel_transport/udp_socket2_win.cc10
-rw-r--r--webrtc/test/channel_transport/udp_socket_manager_posix.cc27
-rw-r--r--webrtc/test/channel_transport/udp_socket_manager_posix.h4
-rw-r--r--webrtc/test/common_unittest.cc3
-rw-r--r--webrtc/test/configurable_frame_size_encoder.cc4
-rw-r--r--webrtc/test/direct_transport.cc41
-rw-r--r--webrtc/test/direct_transport.h16
-rw-r--r--webrtc/test/fake_audio_device.cc19
-rw-r--r--webrtc/test/fake_audio_device.h4
-rw-r--r--webrtc/test/fake_decoder.cc6
-rw-r--r--webrtc/test/fake_decoder.h12
-rw-r--r--webrtc/test/fake_encoder.cc12
-rw-r--r--webrtc/test/fake_encoder.h9
-rw-r--r--webrtc/test/fake_network_pipe.cc22
-rw-r--r--webrtc/test/fake_network_pipe.h25
-rw-r--r--webrtc/test/fake_network_pipe_unittest.cc54
-rw-r--r--webrtc/test/fake_texture_frame.cc27
-rw-r--r--webrtc/test/fake_texture_frame.h22
-rw-r--r--webrtc/test/fake_voice_engine.cc70
-rw-r--r--webrtc/test/fake_voice_engine.h504
-rw-r--r--webrtc/test/field_trial.cc36
-rw-r--r--webrtc/test/field_trial.h3
-rw-r--r--webrtc/test/frame_generator_capturer.cc19
-rw-r--r--webrtc/test/frame_generator_capturer.h10
-rw-r--r--webrtc/test/fuzzers/BUILD.gn115
-rw-r--r--webrtc/test/fuzzers/OWNERS1
-rw-r--r--webrtc/test/fuzzers/audio_decoder_fuzzer.cc49
-rw-r--r--webrtc/test/fuzzers/audio_decoder_fuzzer.h31
-rw-r--r--webrtc/test/fuzzers/audio_decoder_ilbc_fuzzer.cc22
-rw-r--r--webrtc/test/fuzzers/audio_decoder_isac_fuzzer.cc22
-rw-r--r--webrtc/test/fuzzers/audio_decoder_isacfix_fuzzer.cc22
-rw-r--r--webrtc/test/fuzzers/audio_decoder_opus_fuzzer.cc23
-rw-r--r--webrtc/test/fuzzers/h264_depacketizer_fuzzer.cc18
-rw-r--r--webrtc/test/fuzzers/producer_fec_fuzzer.cc60
-rw-r--r--webrtc/test/fuzzers/vp8_depacketizer_fuzzer.cc18
-rw-r--r--webrtc/test/fuzzers/vp8_qp_parser_fuzzer.cc17
-rw-r--r--webrtc/test/fuzzers/vp9_depacketizer_fuzzer.cc18
-rw-r--r--webrtc/test/fuzzers/webrtc_fuzzer.gni28
-rw-r--r--webrtc/test/fuzzers/webrtc_fuzzer_main.cc41
-rw-r--r--webrtc/test/gl/gl_renderer.h6
-rw-r--r--webrtc/test/histogram.cc49
-rw-r--r--webrtc/test/histogram.h2
-rw-r--r--webrtc/test/layer_filtering_transport.cc61
-rw-r--r--webrtc/test/layer_filtering_transport.h17
-rw-r--r--webrtc/test/linux/glx_renderer.h6
-rw-r--r--webrtc/test/mac/video_renderer_mac.h6
-rw-r--r--webrtc/test/mock_voe_channel_proxy.h48
-rw-r--r--webrtc/test/mock_voice_engine.h337
-rw-r--r--webrtc/test/null_transport.h6
-rw-r--r--webrtc/test/random.cc57
-rw-r--r--webrtc/test/random.h49
-rw-r--r--webrtc/test/rtp_file_reader.cc2
-rw-r--r--webrtc/test/rtp_file_reader_unittest.cc3
-rw-r--r--webrtc/test/rtp_rtcp_observer.h22
-rw-r--r--webrtc/test/run_loop.h6
-rw-r--r--webrtc/test/statistics.h6
-rw-r--r--webrtc/test/test.gyp24
-rw-r--r--webrtc/test/test_main.cc6
-rw-r--r--webrtc/test/testsupport/fileutils_unittest.cc17
-rw-r--r--webrtc/test/testsupport/gtest_disable.h57
-rw-r--r--webrtc/test/vcm_capturer.cc2
-rw-r--r--webrtc/test/vcm_capturer.h8
-rw-r--r--webrtc/test/video_capturer.h6
-rw-r--r--webrtc/test/video_renderer.h6
-rw-r--r--webrtc/test/webrtc_test_common.gyp24
-rw-r--r--webrtc/test/win/d3d_renderer.h6
-rw-r--r--webrtc/tools/agc/activity_metric.cc6
-rw-r--r--webrtc/tools/agc/agc_harness.cc7
-rw-r--r--webrtc/tools/agc/test_utils.cc2
-rw-r--r--webrtc/tools/e2e_quality/audio/audio_e2e_harness.cc2
-rw-r--r--webrtc/tools/force_mic_volume_max/force_mic_volume_max.cc2
-rw-r--r--webrtc/tools/frame_analyzer/video_quality_analysis.cc2
-rw-r--r--webrtc/tools/frame_editing/frame_editing_lib.cc1
-rw-r--r--webrtc/tools/frame_editing/frame_editing_lib.h6
-rw-r--r--webrtc/tools/psnr_ssim_analyzer/psnr_ssim_analyzer.cc6
-rw-r--r--webrtc/tools/rtcbot/OWNERS1
-rw-r--r--webrtc/transport.h6
-rw-r--r--webrtc/typedefs.h23
-rw-r--r--webrtc/video/BUILD.gn47
-rw-r--r--webrtc/video/call_stats.cc168
-rw-r--r--webrtc/video/call_stats.h83
-rw-r--r--webrtc/video/call_stats_unittest.cc204
-rw-r--r--webrtc/video/encoded_frame_callback_adapter.cc2
-rw-r--r--webrtc/video/encoded_frame_callback_adapter.h2
-rw-r--r--webrtc/video/encoder_state_feedback.cc124
-rw-r--r--webrtc/video/encoder_state_feedback.h71
-rw-r--r--webrtc/video/encoder_state_feedback_unittest.cc143
-rw-r--r--webrtc/video/end_to_end_tests.cc1000
-rw-r--r--webrtc/video/full_stack.cc50
-rw-r--r--webrtc/video/overuse_frame_detector.cc364
-rw-r--r--webrtc/video/overuse_frame_detector.h164
-rw-r--r--webrtc/video/overuse_frame_detector_unittest.cc310
-rw-r--r--webrtc/video/payload_router.cc101
-rw-r--r--webrtc/video/payload_router.h85
-rw-r--r--webrtc/video/payload_router_unittest.cc209
-rw-r--r--webrtc/video/rampup_tests.cc509
-rw-r--r--webrtc/video/rampup_tests.h135
-rw-r--r--webrtc/video/receive_statistics_proxy.cc31
-rw-r--r--webrtc/video/receive_statistics_proxy.h7
-rw-r--r--webrtc/video/replay.cc2
-rw-r--r--webrtc/video/report_block_stats.cc111
-rw-r--r--webrtc/video/report_block_stats.h62
-rw-r--r--webrtc/video/report_block_stats_unittest.cc146
-rw-r--r--webrtc/video/screenshare_loopback.cc153
-rw-r--r--webrtc/video/send_statistics_proxy.cc173
-rw-r--r--webrtc/video/send_statistics_proxy.h67
-rw-r--r--webrtc/video/send_statistics_proxy_unittest.cc33
-rw-r--r--webrtc/video/stream_synchronization.cc226
-rw-r--r--webrtc/video/stream_synchronization.h59
-rw-r--r--webrtc/video/stream_synchronization_unittest.cc563
-rw-r--r--webrtc/video/video_capture_input.cc34
-rw-r--r--webrtc/video/video_capture_input.h19
-rw-r--r--webrtc/video/video_capture_input_unittest.cc36
-rw-r--r--webrtc/video/video_decoder.cc15
-rw-r--r--webrtc/video/video_decoder_unittest.cc30
-rw-r--r--webrtc/video/video_encoder.cc9
-rw-r--r--webrtc/video/video_encoder_unittest.cc15
-rw-r--r--webrtc/video/video_loopback.cc143
-rw-r--r--webrtc/video/video_quality_test.cc572
-rw-r--r--webrtc/video/video_quality_test.h49
-rw-r--r--webrtc/video/video_receive_stream.cc40
-rw-r--r--webrtc/video/video_receive_stream.h8
-rw-r--r--webrtc/video/video_send_stream.cc85
-rw-r--r--webrtc/video/video_send_stream.h9
-rw-r--r--webrtc/video/video_send_stream_tests.cc914
-rw-r--r--webrtc/video/vie_channel.cc1218
-rw-r--r--webrtc/video/vie_channel.h454
-rw-r--r--webrtc/video/vie_codec_unittest.cc (renamed from webrtc/video_engine/vie_codec_unittest.cc)0
-rw-r--r--webrtc/video/vie_encoder.cc634
-rw-r--r--webrtc/video/vie_encoder.h196
-rw-r--r--webrtc/video/vie_receiver.cc483
-rw-r--r--webrtc/video/vie_receiver.h132
-rw-r--r--webrtc/video/vie_remb.cc144
-rw-r--r--webrtc/video/vie_remb.h79
-rw-r--r--webrtc/video/vie_remb_unittest.cc253
-rw-r--r--webrtc/video/vie_sync_module.cc174
-rw-r--r--webrtc/video/vie_sync_module.h62
-rw-r--r--webrtc/video/webrtc_video.gypi45
-rw-r--r--webrtc/video_decoder.h22
-rw-r--r--webrtc/video_encoder.h4
-rw-r--r--webrtc/video_engine/OWNERS13
-rw-r--r--webrtc/video_engine/call_stats.cc167
-rw-r--r--webrtc/video_engine/call_stats.h81
-rw-r--r--webrtc/video_engine/call_stats_unittest.cc203
-rw-r--r--webrtc/video_engine/encoder_state_feedback.cc124
-rw-r--r--webrtc/video_engine/encoder_state_feedback.h71
-rw-r--r--webrtc/video_engine/encoder_state_feedback_unittest.cc143
-rw-r--r--webrtc/video_engine/overuse_frame_detector.cc422
-rw-r--r--webrtc/video_engine/overuse_frame_detector.h181
-rw-r--r--webrtc/video_engine/overuse_frame_detector_unittest.cc405
-rw-r--r--webrtc/video_engine/payload_router.cc101
-rw-r--r--webrtc/video_engine/payload_router.h85
-rw-r--r--webrtc/video_engine/payload_router_unittest.cc209
-rw-r--r--webrtc/video_engine/report_block_stats.cc111
-rw-r--r--webrtc/video_engine/report_block_stats.h62
-rw-r--r--webrtc/video_engine/report_block_stats_unittest.cc146
-rw-r--r--webrtc/video_engine/stream_synchronization.cc226
-rw-r--r--webrtc/video_engine/stream_synchronization.h59
-rw-r--r--webrtc/video_engine/stream_synchronization_unittest.cc562
-rw-r--r--webrtc/video_engine/video_engine_core_unittests.gyp74
-rw-r--r--webrtc/video_engine/video_engine_core_unittests.isolate23
-rw-r--r--webrtc/video_engine/vie_channel.cc1253
-rw-r--r--webrtc/video_engine/vie_channel.h458
-rw-r--r--webrtc/video_engine/vie_defines.h120
-rw-r--r--webrtc/video_engine/vie_encoder.cc710
-rw-r--r--webrtc/video_engine/vie_encoder.h201
-rw-r--r--webrtc/video_engine/vie_receiver.cc482
-rw-r--r--webrtc/video_engine/vie_receiver.h131
-rw-r--r--webrtc/video_engine/vie_remb.cc143
-rw-r--r--webrtc/video_engine/vie_remb.h78
-rw-r--r--webrtc/video_engine/vie_remb_unittest.cc251
-rw-r--r--webrtc/video_engine/vie_sync_module.cc188
-rw-r--r--webrtc/video_engine/vie_sync_module.h65
-rw-r--r--webrtc/video_engine_tests.isolate1
-rw-r--r--webrtc/video_frame.h4
-rw-r--r--webrtc/video_receive_stream.h13
-rw-r--r--webrtc/video_renderer.h11
-rw-r--r--webrtc/video_send_stream.h5
-rw-r--r--webrtc/voice_engine/BUILD.gn3
-rw-r--r--webrtc/voice_engine/channel.cc434
-rw-r--r--webrtc/voice_engine/channel.h43
-rw-r--r--webrtc/voice_engine/channel_proxy.cc153
-rw-r--r--webrtc/voice_engine/channel_proxy.h79
-rw-r--r--webrtc/voice_engine/include/voe_base.h2
-rw-r--r--webrtc/voice_engine/include/voe_rtp_rtcp.h14
-rw-r--r--webrtc/voice_engine/include/voe_video_sync.h7
-rw-r--r--webrtc/voice_engine/level_indicator.cc2
-rw-r--r--webrtc/voice_engine/monitor_module.h2
-rw-r--r--webrtc/voice_engine/output_mixer.cc35
-rw-r--r--webrtc/voice_engine/output_mixer.h9
-rw-r--r--webrtc/voice_engine/shared_data.h2
-rw-r--r--webrtc/voice_engine/test/android/android_test/jni/android_test.cc6
-rw-r--r--webrtc/voice_engine/test/auto_test/fakes/conference_transport.cc10
-rw-r--r--webrtc/voice_engine/test/auto_test/fakes/conference_transport.h6
-rw-r--r--webrtc/voice_engine/test/auto_test/fixtures/after_initialization_fixture.h12
-rw-r--r--webrtc/voice_engine/test/auto_test/fixtures/before_initialization_fixture.h1
-rw-r--r--webrtc/voice_engine/test/auto_test/standard/codec_test.cc14
-rw-r--r--webrtc/voice_engine/test/auto_test/standard/external_media_test.cc2
-rw-r--r--webrtc/voice_engine/test/auto_test/standard/rtp_rtcp_extensions.cc4
-rw-r--r--webrtc/voice_engine/test/auto_test/standard/rtp_rtcp_test.cc3
-rw-r--r--webrtc/voice_engine/test/auto_test/voe_cpu_test.cc2
-rw-r--r--webrtc/voice_engine/test/auto_test/voe_output_test.cc203
-rw-r--r--webrtc/voice_engine/test/auto_test/voe_standard_test.cc13
-rw-r--r--webrtc/voice_engine/test/auto_test/voe_standard_test.h1
-rw-r--r--webrtc/voice_engine/test/auto_test/voe_stress_test.cc10
-rw-r--r--webrtc/voice_engine/test/auto_test/voe_stress_test.h8
-rw-r--r--webrtc/voice_engine/test/cmd_test/voe_cmd_test.cc7
-rw-r--r--webrtc/voice_engine/transmit_mixer.cc55
-rw-r--r--webrtc/voice_engine/transmit_mixer.h16
-rw-r--r--webrtc/voice_engine/utility.cc27
-rw-r--r--webrtc/voice_engine/utility.h6
-rw-r--r--webrtc/voice_engine/utility_unittest.cc2
-rw-r--r--webrtc/voice_engine/voe_audio_processing_impl.cc7
-rw-r--r--webrtc/voice_engine/voe_base_impl.cc118
-rw-r--r--webrtc/voice_engine/voe_base_impl.h79
-rw-r--r--webrtc/voice_engine/voe_base_unittest.cc6
-rw-r--r--webrtc/voice_engine/voe_codec_impl.cc88
-rw-r--r--webrtc/voice_engine/voe_codec_impl.h6
-rw-r--r--webrtc/voice_engine/voe_codec_unittest.cc1
-rw-r--r--webrtc/voice_engine/voe_file_impl.cc2
-rw-r--r--webrtc/voice_engine/voe_neteq_stats_impl.cc2
-rw-r--r--webrtc/voice_engine/voe_network_impl.cc2
-rw-r--r--webrtc/voice_engine/voe_video_sync_impl.cc19
-rw-r--r--webrtc/voice_engine/voe_video_sync_impl.h2
-rw-r--r--webrtc/voice_engine/voice_engine.gyp168
-rw-r--r--webrtc/voice_engine/voice_engine_defines.h7
-rw-r--r--webrtc/voice_engine/voice_engine_impl.cc41
-rw-r--r--webrtc/voice_engine/voice_engine_impl.h8
-rw-r--r--webrtc/webrtc.gyp2
-rwxr-xr-x[-rw-r--r--]webrtc/webrtc_examples.gyp454
-rw-r--r--webrtc/webrtc_tests.gypi22
2065 files changed, 118802 insertions, 113255 deletions
diff --git a/.gitignore b/.gitignore
index b29171c7d8..7090f4aa66 100644
--- a/.gitignore
+++ b/.gitignore
@@ -44,11 +44,9 @@
/chromium/.last_sync_chromium
/chromium/_bad_scm
/chromium/src
-/google_apis
/gyp-mac-tool
/links
/links.db
-/net
/out
/resources/**/*.aecdump
/resources/**/*.bin
@@ -79,7 +77,6 @@
/testing
/third_party/WebKit/Tools/Scripts
/third_party/android_platform
-/third_party/android_testrunner
/third_party/android_tools
/third_party/appurify-python
/third_party/asan
@@ -87,6 +84,7 @@
/third_party/binutils
/third_party/boringssl
/third_party/BUILD.gn
+/third_party/catapult
/third_party/clang_format
/third_party/class-dump
/third_party/colorama
@@ -94,6 +92,7 @@
/third_party/directxsdk
/third_party/drmemory
/third_party/expat
+/third_party/ffmpeg
/third_party/gaeunit
/third_party/gflags/src
/third_party/google-visualization-python
@@ -105,6 +104,7 @@
/third_party/junit
/third_party/junit-jar
/third_party/libc++
+/third_party/libc++-static
/third_party/libc++abi
/third_party/libevent
/third_party/libjingle
@@ -123,6 +123,7 @@
/third_party/nss
/third_party/oauth2
/third_party/ocmock
+/third_party/openh264
/third_party/openmax_dl
/third_party/opus
/third_party/proguard
@@ -139,7 +140,6 @@
/third_party/zlib
/tools/android
/tools/clang
-/tools/find_depot_tools.py
/tools/generate_library_loader
/tools/gn
/tools/grit
@@ -148,21 +148,15 @@
/tools/memory
/tools/protoc_wrapper
/tools/python
-/tools/relocation_packer
/tools/sanitizer_options
/tools/swarming_client
+/tools/telemetry
/tools/tsan_suppressions
/tools/valgrind
/tools/vim
/tools/win
/tools/xdisplaycheck
/tools/whitespace.txt
-/webrtc/examples/android/media_demo/bin
-/webrtc/examples/android/media_demo/gen
-/webrtc/examples/android/media_demo/libs
-/webrtc/examples/android/media_demo/local.properties
-/webrtc/examples/android/media_demo/obj
-/webrtc/examples/android/media_demo/proguard-project.txt
/webrtc/examples/android/opensl_loopback/bin
/webrtc/examples/android/opensl_loopback/gen
/webrtc/examples/android/opensl_loopback/libs
@@ -172,9 +166,5 @@
/webrtc/modules/audio_device/android/test/bin/
/webrtc/modules/audio_device/android/test/gen/
/webrtc/modules/audio_device/android/test/libs/
-/webrtc/video_engine/test/android/bin
-/webrtc/video_engine/test/android/gen
-/webrtc/video_engine/test/android/libs
-/webrtc/video_engine/test/android/obj
/x86-generic_out/
/xcodebuild
diff --git a/.gn b/.gn
index d078116168..f849ef7281 100644
--- a/.gn
+++ b/.gn
@@ -35,6 +35,7 @@ exec_script_whitelist = [
"//build/config/linux/pkg_config.gni",
"//build/config/mac/mac_sdk.gni",
"//build/config/posix/BUILD.gn",
+ "//build/config/sysroot.gni",
"//build/config/win/visual_studio_version.gni",
"//build/gn_helpers.py",
"//build/gypi_to_gn.py",
@@ -42,6 +43,7 @@ exec_script_whitelist = [
"//build/toolchain/mac/BUILD.gn",
"//build/toolchain/win/BUILD.gn",
"//third_party/boringssl/BUILD.gn",
+ "//third_party/openh264/BUILD.gn",
"//third_party/opus/BUILD.gn",
"//webrtc/modules/video_render/BUILD.gn",
]
diff --git a/AUTHORS b/AUTHORS
index 0d9ff5abd2..6f11ee66d8 100644
--- a/AUTHORS
+++ b/AUTHORS
@@ -1,6 +1,7 @@
# Names should be added to this file like so:
# Name or Organization <email address>
+Andrew MacDonald <andrew@webrtc.org>
Anil Kumar <an1kumar@gmail.com>
Ben Strong <bstrong@gmail.com>
Bob Withers <bwit@pobox.com>
@@ -22,6 +23,7 @@ Pali Rohar
Paul Kapustin <pkapustin@gmail.com>
Rafael Lopez Diez <rafalopezdiez@gmail.com>
Ralph Giles <giles@ghostscript.com>
+Riku Voipio <riku.voipio@linaro.org>
Robert Nagy <robert.nagy@gmail.com>
Ryan Yoakum <ryoakum@skobalt.com>
Sarah Thompson <sarah@telergy.com>
@@ -30,8 +32,10 @@ Silviu Caragea <silviu.cpp@gmail.com>
Steve Reid <sreid@sea-to-sky.net>
Vicken Simonian <vsimon@gmail.com>
Victor Costan <costan@gmail.com>
+Alexander Brauckmann <a.brauckmann@gmail.com>
&yet LLC
+Agora IO
ARM Holdings
BroadSoft Inc.
Google Inc.
diff --git a/DEPS b/DEPS
index 3ceff790e3..dcf56bc1b6 100644
--- a/DEPS
+++ b/DEPS
@@ -6,7 +6,7 @@
vars = {
'extra_gyp_flag': '-Dextra_gyp_flag=0',
'chromium_git': 'https://chromium.googlesource.com',
- 'chromium_revision': '657e8d9a9139da0735d73fc0fc6d90a05f980319',
+ 'chromium_revision': '099be58b08dadb64b1dc9f359ae097e978df5416',
}
# NOTE: Prefer revision numbers to tags for svn deps. Use http rather than
@@ -24,7 +24,7 @@ deps = {
deps_os = {
'win': {
'src/third_party/winsdk_samples/src':
- Var('chromium_git') + '/external/webrtc/deps/third_party/winsdk_samples_v71@c0cbedd854cb610a53226d9817416c4ab9a7d1e9', # from svn revision 7951
+ Var('chromium_git') + '/external/webrtc/deps/third_party/winsdk_samples_v71@e71b549167a665d7424d6f1dadfbff4b4aad1589',
},
}
@@ -102,6 +102,7 @@ hooks = [
'--recursive',
'--num_threads=10',
'--no_auth',
+ '--quiet',
'--bucket', 'chromium-webrtc-resources',
'src/resources'],
},
diff --git a/OWNERS b/OWNERS
index 3ecf2a2571..5812db5ca7 100644
--- a/OWNERS
+++ b/OWNERS
@@ -1,4 +1,3 @@
-andrew@webrtc.org
henrika@webrtc.org
mflodman@webrtc.org
niklas.enbom@webrtc.org
diff --git a/PRESUBMIT.py b/PRESUBMIT.py
index e7ceac94cf..08dd68dd74 100755
--- a/PRESUBMIT.py
+++ b/PRESUBMIT.py
@@ -14,6 +14,100 @@ import subprocess
import sys
+# Directories that will be scanned by cpplint by the presubmit script.
+CPPLINT_DIRS = [
+ 'webrtc/audio',
+ 'webrtc/call',
+ 'webrtc/common_video',
+ 'webrtc/examples',
+ 'webrtc/modules/remote_bitrate_estimator',
+ 'webrtc/modules/rtp_rtcp',
+ 'webrtc/modules/video_coding',
+ 'webrtc/modules/video_processing',
+ 'webrtc/sound',
+ 'webrtc/tools',
+ 'webrtc/video',
+]
+
+# List of directories of "supported" native APIs. That means changes to headers
+# will be done in a compatible way following this scheme:
+# 1. Non-breaking changes are made.
+# 2. The old APIs as marked as deprecated (with comments).
+# 3. Deprecation is announced to discuss-webrtc@googlegroups.com and
+# webrtc-users@google.com (internal list).
+# 4. (later) The deprecated APIs are removed.
+# Directories marked as DEPRECATED should not be used. They're only present in
+# the list to support legacy downstream code.
+NATIVE_API_DIRS = (
+ 'talk/app/webrtc',
+ 'webrtc',
+ 'webrtc/base', # DEPRECATED.
+ 'webrtc/common_audio/include', # DEPRECATED.
+ 'webrtc/modules/audio_coding/include',
+ 'webrtc/modules/audio_conference_mixer/include', # DEPRECATED.
+ 'webrtc/modules/audio_device/include',
+ 'webrtc/modules/audio_processing/include',
+ 'webrtc/modules/bitrate_controller/include',
+ 'webrtc/modules/include',
+ 'webrtc/modules/remote_bitrate_estimator/include',
+ 'webrtc/modules/rtp_rtcp/include',
+ 'webrtc/modules/rtp_rtcp/source', # DEPRECATED.
+ 'webrtc/modules/utility/include',
+ 'webrtc/modules/video_coding/codecs/h264/include',
+ 'webrtc/modules/video_coding/codecs/i420/include',
+ 'webrtc/modules/video_coding/codecs/vp8/include',
+ 'webrtc/modules/video_coding/codecs/vp9/include',
+ 'webrtc/modules/video_coding/include',
+ 'webrtc/system_wrappers/include', # DEPRECATED.
+ 'webrtc/voice_engine/include',
+)
+
+
+def _VerifyNativeApiHeadersListIsValid(input_api, output_api):
+ """Ensures the list of native API header directories is up to date."""
+ non_existing_paths = []
+ native_api_full_paths = [
+ input_api.os_path.join(input_api.PresubmitLocalPath(),
+ *path.split('/')) for path in NATIVE_API_DIRS]
+ for path in native_api_full_paths:
+ if not os.path.isdir(path):
+ non_existing_paths.append(path)
+ if non_existing_paths:
+ return [output_api.PresubmitError(
+ 'Directories to native API headers have changed which has made the '
+ 'list in PRESUBMIT.py outdated.\nPlease update it to the current '
+ 'location of our native APIs.',
+ non_existing_paths)]
+ return []
+
+
+def _CheckNativeApiHeaderChanges(input_api, output_api):
+ """Checks to remind proper changing of native APIs."""
+ files = []
+ for f in input_api.AffectedSourceFiles(input_api.FilterSourceFile):
+ if f.LocalPath().endswith('.h'):
+ for path in NATIVE_API_DIRS:
+ if os.path.dirname(f.LocalPath()) == path:
+ files.append(f)
+
+ if files:
+ return [output_api.PresubmitNotifyResult(
+ 'You seem to be changing native API header files. Please make sure '
+ 'you:\n'
+ ' 1. Make compatible changes that don\'t break existing clients.\n'
+ ' 2. Mark the old APIs as deprecated.\n'
+ ' 3. Create a timeline and plan for when the deprecated method will '
+ 'be removed (preferably 3 months or so).\n'
+ ' 4. Update/inform existing downstream code owners to stop using the '
+ 'deprecated APIs: \n'
+ 'send announcement to discuss-webrtc@googlegroups.com and '
+ 'webrtc-users@google.com.\n'
+ ' 5. (after ~3 months) remove the deprecated API.\n'
+ 'Related files:',
+ files)]
+ return []
+
+
def _CheckNoIOStreamInHeaders(input_api, output_api):
"""Checks to make sure no .h files include <iostream>."""
files = []
@@ -54,6 +148,14 @@ def _CheckNoFRIEND_TEST(input_api, output_api):
'use FRIEND_TEST_ALL_PREFIXES() instead.\n' + '\n'.join(problems))]
+def _IsLintWhitelisted(whitelist_dirs, file_path):
+ """ Checks if a file is whitelisted for lint check."""
+ for path in whitelist_dirs:
+ if os.path.dirname(file_path).startswith(path):
+ return True
+ return False
+
+
def _CheckApprovedFilesLintClean(input_api, output_api,
source_file_filter=None):
"""Checks that all new or whitelisted .cc and .h files pass cpplint.py.
@@ -68,12 +170,9 @@ def _CheckApprovedFilesLintClean(input_api, output_api,
# pylint: disable=W0212
cpplint._cpplint_state.ResetErrorCounts()
- # Justifications for each filter:
- #
- # - build/header_guard : WebRTC coding style says they should be prefixed
- # with WEBRTC_, which is not possible to configure in
- # cpplint.py.
- cpplint._SetFilters('-build/header_guard')
+ # Create a platform independent whitelist for the CPPLINT_DIRS.
+ whitelist_dirs = [input_api.os_path.join(*path.split('/'))
+ for path in CPPLINT_DIRS]
# Use the strictest verbosity level for cpplint.py (level 1) which is the
# default when running cpplint.py from command line.
@@ -83,7 +182,7 @@ def _CheckApprovedFilesLintClean(input_api, output_api,
files = []
for f in input_api.AffectedSourceFiles(source_file_filter):
# Note that moved/renamed files also count as added.
- if f.Action() == 'A':
+ if f.Action() == 'A' or _IsLintWhitelisted(whitelist_dirs, f.LocalPath()):
files.append(f.AbsoluteLocalPath())
for file_name in files:
@@ -256,6 +355,14 @@ def _RunPythonTests(input_api, output_api):
def _CommonChecks(input_api, output_api):
"""Checks common to both upload and commit."""
results = []
+ # Filter out files that are in objc or ios dirs from being cpplint-ed since
+ # they do not follow C++ lint rules.
+ black_list = input_api.DEFAULT_BLACK_LIST + (
+ r".*\bobjc[\\\/].*",
+ )
+ source_file_filter = lambda x: input_api.FilterSourceFile(x, None, black_list)
+ results.extend(_CheckApprovedFilesLintClean(
+ input_api, output_api, source_file_filter))
results.extend(input_api.canned_checks.RunPylint(input_api, output_api,
black_list=(r'^.*gviz_api\.py$',
r'^.*gaeunit\.py$',
@@ -305,7 +412,7 @@ def _CommonChecks(input_api, output_api):
input_api, output_api))
results.extend(input_api.canned_checks.CheckChangeTodoHasOwner(
input_api, output_api))
- results.extend(_CheckApprovedFilesLintClean(input_api, output_api))
+ results.extend(_CheckNativeApiHeaderChanges(input_api, output_api))
results.extend(_CheckNoIOStreamInHeaders(input_api, output_api))
results.extend(_CheckNoFRIEND_TEST(input_api, output_api))
results.extend(_CheckGypChanges(input_api, output_api))
@@ -325,6 +432,7 @@ def CheckChangeOnUpload(input_api, output_api):
def CheckChangeOnCommit(input_api, output_api):
results = []
results.extend(_CommonChecks(input_api, output_api))
+ results.extend(_VerifyNativeApiHeadersListIsValid(input_api, output_api))
results.extend(input_api.canned_checks.CheckOwners(input_api, output_api))
results.extend(input_api.canned_checks.CheckChangeWasUploaded(
input_api, output_api))
diff --git a/WATCHLISTS b/WATCHLISTS
index c89a28e32a..ba7af53d5a 100644
--- a/WATCHLISTS
+++ b/WATCHLISTS
@@ -16,14 +16,14 @@
},
'all_webrtc': {
# NOTE: if you like this you might like webrtc-reviews@webrtc.org!
- 'filepath': 'webrtc/.*',
+ 'filepath': '^webrtc/.*',
},
'root_files': {
# webrtc/build/ and non-recursive contents of ./ and webrtc/
- 'filepath': '^[^/]*$|webrtc/[^/]*$|webrtc/build/.*',
+ 'filepath': '^[^/]*$|^webrtc/[^/]*$|^webrtc/build/.*',
},
'documented_interfaces': {
- 'filepath': 'webrtc/[^/]*\.h$|'\
+ 'filepath': '^webrtc/[^/]*\.h$|'\
'webrtc/voice_engine/include/.*',
},
'build_files': {
@@ -32,9 +32,14 @@
'java_files': {
'filepath': '\.java$|\.xml$',
},
- 'video_engine': {
- 'filepath': 'webrtc/video_engine/.*|'\
- 'webrtc/video/.*',
+ 'audio': {
+ 'filepath': 'webrtc/audio/.*',
+ },
+ 'call': {
+ 'filepath': 'webrtc/call/.*',
+ },
+ 'video': {
+ 'filepath': 'webrtc/video/.*',
},
'voice_engine': {
'filepath': 'webrtc/voice_engine/.*',
@@ -89,59 +94,93 @@
'WATCHLISTS': {
'this_file': [''],
'all_webrtc': ['tterriberry@mozilla.com'],
- 'root_files': ['andrew@webrtc.org',
- 'niklas.enbom@webrtc.org',
- 'yujie.mao@webrtc.org',
+ 'root_files': ['niklas.enbom@webrtc.org',
+ 'peah@webrtc.org',
'qiang.lu@intel.com',
- 'peah@webrtc.org'],
+ 'yujie.mao@webrtc.org'],
'documented_interfaces': ['interface-changes@webrtc.org',
'rwolff@gocast.it'],
'common_audio': ['aluebs@webrtc.org',
'andrew@webrtc.org',
+ 'audio-team@agora.io',
'bjornv@webrtc.org',
+ 'minyue@webrtc.org',
'peah@webrtc.org'],
- 'video_engine': ['andresp@webrtc.org',
- 'mflodman@webrtc.org',
- 'perkj@webrtc.org',
- 'stefan@webrtc.org',
- 'yujie.mao@webrtc.org',
- 'solenberg@webrtc.org'],
- 'voice_engine': ['henrika@webrtc.org',
+ 'audio': ['solenberg@webrtc.org',
+ 'tina.legrand@webrtc.org'],
+ 'call': ['mflodman@webrtc.org',
+ 'pbos@webrtc.org',
+ 'solenberg@webrtc.org',
+ 'stefan@webrtc.org'],
+ 'video': ['andresp@webrtc.org',
+ 'mflodman@webrtc.org',
+ 'pbos@webrtc.org',
+ 'perkj@webrtc.org',
+ 'solenberg@webrtc.org',
+ 'stefan@webrtc.org',
+ 'video-team@agora.io',
+ 'yujie.mao@webrtc.org',
+ 'zhengzhonghou@agora.io'],
+ 'voice_engine': ['andrew@webrtc.org',
+ 'audio-team@agora.io',
+ 'henrika@webrtc.org',
'henrik.lundin@webrtc.org',
- 'solenberg@webrtc.org',
- 'peah@webrtc.org'],
+ 'minyue@webrtc.org',
+ 'peah@webrtc.org',
+ 'solenberg@webrtc.org'],
'video_capture': ['mflodman@webrtc.org',
- 'perkj@webrtc.org'],
+ 'perkj@webrtc.org',
+ 'sdk-team@agora.io',
+ 'zhengzhonghou@agora.io'],
'video_render': ['mflodman@webrtc.org',
- 'perkj@webrtc.org'],
- 'audio_device': ['henrika@webrtc.org',
- 'peah@webrtc.org'],
- 'audio_coding': ['tina.legrand@webrtc.org',
+ 'perkj@webrtc.org',
+ 'sdk-team@agora.io',
+ 'zhengzhonghou@agora.io'],
+ 'audio_device': ['audio-team@agora.io',
+ 'henrika@webrtc.org',
+ 'peah@webrtc.org',
+ 'sdk-team@agora.io'],
+ 'audio_coding': ['audio-team@agora.io',
'henrik.lundin@webrtc.org',
'kwiberg@webrtc.org',
- 'peah@webrtc.org'],
- 'neteq': ['henrik.lundin@webrtc.org'],
+ 'minyue@webrtc.org',
+ 'peah@webrtc.org',
+ 'tina.legrand@webrtc.org'],
+ 'neteq': ['audio-team@agora.io',
+ 'henrik.lundin@webrtc.org',
+ 'minyue@webrtc.org'],
'audio_processing': ['aluebs@webrtc.org',
'andrew@webrtc.org',
+ 'audio-team@agora.io',
'bjornv@webrtc.org',
- 'kwiberg@webrtc.org',
'henrik.lundin@webrtc.org',
+ 'kwiberg@webrtc.org',
+ 'minyue@webrtc.org',
'peah@webrtc.org',
'solenberg@webrtc.org'],
- 'video_coding': ['stefan@webrtc.org',
- 'mflodman@webrtc.org'],
- 'video_processing': ['stefan@webrtc.org'],
- 'bitrate_controller': ['stefan@webrtc.org',
- 'mflodman@webrtc.org'],
- 'remote_bitrate_estimator': ['stefan@webrtc.org',
- 'mflodman@webrtc.org'],
- 'pacing': ['stefan@webrtc.org',
- 'mflodman@webrtc.org'],
+ 'video_coding': ['mflodman@webrtc.org',
+ 'stefan@webrtc.org',
+ 'video-team@agora.io',
+ 'zhengzhonghou@agora.io'],
+ 'video_processing': ['stefan@webrtc.org',
+ 'video-team@agora.io',
+ 'zhengzhonghou@agora.io'],
+ 'bitrate_controller': ['mflodman@webrtc.org',
+ 'stefan@webrtc.org',
+ 'zhuangzesen@agora.io'],
+ 'remote_bitrate_estimator': ['mflodman@webrtc.org',
+ 'stefan@webrtc.org',
+ 'zhuangzesen@agora.io'],
+ 'pacing': ['mflodman@webrtc.org',
+ 'stefan@webrtc.org',
+ 'zhuangzesen@agora.io'],
'rtp_rtcp': ['mflodman@webrtc.org',
- 'stefan@webrtc.org'],
- 'system_wrappers': ['mflodman@webrtc.org',
+ 'stefan@webrtc.org',
+ 'zhuangzesen@agora.io'],
+ 'system_wrappers': ['fengyue@agora.io',
'henrika@webrtc.org',
- 'andrew@webrtc.org',
- 'peah@webrtc.org'],
+ 'mflodman@webrtc.org',
+ 'peah@webrtc.org',
+ 'zhengzhonghou@agora.io'],
},
}
diff --git a/all.gyp b/all.gyp
index 40dbc135a6..0b11c8f7d6 100644
--- a/all.gyp
+++ b/all.gyp
@@ -24,7 +24,6 @@
'conditions': [
['include_examples==1', {
'dependencies': [
- 'webrtc/libjingle_examples.gyp:*',
'webrtc/webrtc_examples.gyp:*',
],
}],
diff --git a/chromium/.gclient b/chromium/.gclient
index 7f46333e45..9d5dfcf7cf 100644
--- a/chromium/.gclient
+++ b/chromium/.gclient
@@ -10,7 +10,6 @@ solutions = [{
'src/chrome/tools/test/reference_build/chrome_win': None,
'src/native_client': None,
'src/third_party/cld_2/src': None,
- 'src/third_party/ffmpeg': None,
'src/third_party/hunspell_dictionaries': None,
'src/third_party/liblouis/src': None,
'src/third_party/pdfium': None,
diff --git a/infra/config/cq.cfg b/infra/config/cq.cfg
index 80f6f60a77..4c49280f0f 100644
--- a/infra/config/cq.cfg
+++ b/infra/config/cq.cfg
@@ -30,6 +30,8 @@ verifiers {
builders { name: "android_rel" }
builders { name: "android_arm64_rel" }
builders { name: "android_clang_dbg" }
+ builders { name: "android_compile_x86_dbg" }
+ builders { name: "android_compile_x64_dbg" }
builders { name: "android_gn_dbg" }
builders { name: "android_gn_rel" }
builders { name: "ios_arm64_dbg" }
@@ -38,27 +40,34 @@ verifiers {
builders { name: "ios_rel" }
builders { name: "ios32_sim_dbg" }
builders { name: "ios64_sim_dbg" }
- builders { name: "linux_compile_dbg" }
builders { name: "linux_asan" }
+ builders { name: "linux_baremetal" }
+ builders { name: "linux_compile_dbg" }
builders { name: "linux_gn_dbg" }
builders { name: "linux_gn_rel" }
+ # Disabled, see http://crbug.com/577566 for details.
+ #builders { name: "linux_libfuzzer_rel" }
builders { name: "linux_msan" }
builders { name: "linux_rel" }
builders { name: "linux_tsan2" }
- builders { name: "mac_compile_dbg" }
- builders { name: "mac_compile_x64_dbg" }
builders { name: "mac_asan" }
+ builders { name: "mac_baremetal" }
+ builders { name: "mac_compile_dbg" }
builders { name: "mac_rel" }
- builders { name: "mac_x64_gn_dbg" }
- builders { name: "mac_x64_gn_rel" }
- builders { name: "mac_x64_rel" }
+ builders { name: "mac_gn_dbg" }
+ builders { name: "mac_gn_rel" }
builders { name: "presubmit" }
+ builders { name: "win_baremetal" }
+ builders { name: "win_clang_dbg" }
+ builders { name: "win_clang_rel" }
builders { name: "win_compile_dbg" }
+ builders { name: "win_drmemory_light" }
builders { name: "win_rel" }
+ builders { name: "win_x64_clang_dbg" }
+ builders { name: "win_x64_clang_rel" }
builders { name: "win_x64_gn_dbg" }
builders { name: "win_x64_gn_rel" }
builders { name: "win_x64_rel" }
- builders { name: "win_drmemory_light" }
}
}
}
diff --git a/resources/audio_coding/neteq4_network_stats.dat.sha1 b/resources/audio_coding/neteq4_network_stats.dat.sha1
index 72a94994df..f51a02a738 100644
--- a/resources/audio_coding/neteq4_network_stats.dat.sha1
+++ b/resources/audio_coding/neteq4_network_stats.dat.sha1
@@ -1 +1 @@
-e5e2d0ff26d16339cf0f37a3512bfa2d390a9a9a \ No newline at end of file
+2cf380a05ee07080bd72471e8ec7777a39644ec9 \ No newline at end of file
diff --git a/resources/audio_coding/neteq4_opus_network_stats.dat.sha1 b/resources/audio_coding/neteq4_opus_network_stats.dat.sha1
new file mode 100644
index 0000000000..6a9e7ee9e9
--- /dev/null
+++ b/resources/audio_coding/neteq4_opus_network_stats.dat.sha1
@@ -0,0 +1 @@
+cc9fa62d0a8f46ffebc782aea2610dda67bb5558 \ No newline at end of file
diff --git a/resources/audio_coding/neteq4_opus_ref.pcm.sha1 b/resources/audio_coding/neteq4_opus_ref.pcm.sha1
new file mode 100644
index 0000000000..5cecc50d9c
--- /dev/null
+++ b/resources/audio_coding/neteq4_opus_ref.pcm.sha1
@@ -0,0 +1 @@
+301895f1aaa9cd9eae0f5d04d179d63491d744cc \ No newline at end of file
diff --git a/resources/audio_coding/neteq4_opus_ref_win_32.pcm.sha1 b/resources/audio_coding/neteq4_opus_ref_win_32.pcm.sha1
new file mode 100644
index 0000000000..b7cf990697
--- /dev/null
+++ b/resources/audio_coding/neteq4_opus_ref_win_32.pcm.sha1
@@ -0,0 +1 @@
+fbad99878c7a26958e755190027c976692708334 \ No newline at end of file
diff --git a/resources/audio_coding/neteq4_opus_ref_win_64.pcm.sha1 b/resources/audio_coding/neteq4_opus_ref_win_64.pcm.sha1
new file mode 100644
index 0000000000..b7cf990697
--- /dev/null
+++ b/resources/audio_coding/neteq4_opus_ref_win_64.pcm.sha1
@@ -0,0 +1 @@
+fbad99878c7a26958e755190027c976692708334 \ No newline at end of file
diff --git a/resources/audio_coding/neteq4_opus_rtcp_stats.dat.sha1 b/resources/audio_coding/neteq4_opus_rtcp_stats.dat.sha1
new file mode 100644
index 0000000000..05570b898f
--- /dev/null
+++ b/resources/audio_coding/neteq4_opus_rtcp_stats.dat.sha1
@@ -0,0 +1 @@
+e37c797e3de6a64dda88c9ade7a013d022a2e1e0 \ No newline at end of file
diff --git a/resources/audio_coding/neteq4_rtcp_stats.dat.sha1 b/resources/audio_coding/neteq4_rtcp_stats.dat.sha1
index ae63c7683a..1fa337bb80 100644
--- a/resources/audio_coding/neteq4_rtcp_stats.dat.sha1
+++ b/resources/audio_coding/neteq4_rtcp_stats.dat.sha1
@@ -1 +1 @@
-948753a2087fbb5b74a3ea0b1aef8593c9c30b10 \ No newline at end of file
+b8880bf9fed2487efbddcb8d94b9937a29ae521d \ No newline at end of file
diff --git a/resources/audio_coding/neteq_network_stats.dat.sha1 b/resources/audio_coding/neteq_network_stats.dat.sha1
deleted file mode 100644
index e02ad84b92..0000000000
--- a/resources/audio_coding/neteq_network_stats.dat.sha1
+++ /dev/null
@@ -1 +0,0 @@
-e3c189b500d92fd0f10cb4c770c298cee7008749 \ No newline at end of file
diff --git a/resources/audio_coding/neteq_network_stats_win_32.dat.sha1 b/resources/audio_coding/neteq_network_stats_win_32.dat.sha1
deleted file mode 100644
index a197fc18b5..0000000000
--- a/resources/audio_coding/neteq_network_stats_win_32.dat.sha1
+++ /dev/null
@@ -1 +0,0 @@
-343061419a64ca99323fc0d25a43149a5d40cf07 \ No newline at end of file
diff --git a/resources/audio_coding/neteq_opus.rtp.sha1 b/resources/audio_coding/neteq_opus.rtp.sha1
new file mode 100644
index 0000000000..ff5b8fee3a
--- /dev/null
+++ b/resources/audio_coding/neteq_opus.rtp.sha1
@@ -0,0 +1 @@
+21c8f8aaf9518a629d6c6def87fe6ea1305d5c91 \ No newline at end of file
diff --git a/resources/audio_coding/neteq_rtcp_stats.dat.sha1 b/resources/audio_coding/neteq_rtcp_stats.dat.sha1
deleted file mode 100644
index 42d37ac748..0000000000
--- a/resources/audio_coding/neteq_rtcp_stats.dat.sha1
+++ /dev/null
@@ -1 +0,0 @@
-759d57e87517e0290144add9ba53d5c6dec1c27e \ No newline at end of file
diff --git a/resources/audio_coding/neteq_universal_ref.pcm.sha1 b/resources/audio_coding/neteq_universal_ref.pcm.sha1
deleted file mode 100644
index 95e9de7d12..0000000000
--- a/resources/audio_coding/neteq_universal_ref.pcm.sha1
+++ /dev/null
@@ -1 +0,0 @@
-236da353f05d329f6c83e441b80dbfcc18706cfb \ No newline at end of file
diff --git a/resources/audio_coding/neteq_universal_ref_win_32.pcm.sha1 b/resources/audio_coding/neteq_universal_ref_win_32.pcm.sha1
deleted file mode 100644
index 47c90cfd4d..0000000000
--- a/resources/audio_coding/neteq_universal_ref_win_32.pcm.sha1
+++ /dev/null
@@ -1 +0,0 @@
-1d2d353be4345d30506866ca32fa72825c6d65b8 \ No newline at end of file
diff --git a/setup_links.py b/setup_links.py
index 9aeb1e8764..492c38b70e 100755
--- a/setup_links.py
+++ b/setup_links.py
@@ -34,17 +34,16 @@ import textwrap
DIRECTORIES = [
'build',
'buildtools',
- 'google_apis', # Needed by build/common.gypi.
- 'net',
'testing',
'third_party/binutils',
'third_party/boringssl',
'third_party/colorama',
'third_party/drmemory',
'third_party/expat',
- 'third_party/icu',
+ 'third_party/ffmpeg',
'third_party/instrumented_libraries',
'third_party/jsoncpp',
+ 'third_party/libc++-static',
'third_party/libjpeg',
'third_party/libjpeg_turbo',
'third_party/libsrtp',
@@ -55,6 +54,7 @@ DIRECTORIES = [
'third_party/lss',
'third_party/nss',
'third_party/ocmock',
+ 'third_party/openh264',
'third_party/openmax_dl',
'third_party/opus',
'third_party/proguard',
@@ -84,10 +84,11 @@ if 'android' in target_os:
DIRECTORIES += [
'base',
'third_party/android_platform',
- 'third_party/android_testrunner',
'third_party/android_tools',
'third_party/appurify-python',
'third_party/ashmem',
+ 'third_party/catapult',
+ 'third_party/icu',
'third_party/ijar',
'third_party/jsr-305',
'third_party/junit',
@@ -99,13 +100,12 @@ if 'android' in target_os:
'third_party/robolectric',
'tools/android',
'tools/grit',
- 'tools/relocation_packer'
+ 'tools/telemetry',
]
if 'ios' in target_os:
DIRECTORIES.append('third_party/class-dump')
FILES = {
- 'tools/find_depot_tools.py': None,
'tools/isolate_driver.py': None,
'third_party/BUILD.gn': None,
}
diff --git a/sync_chromium.py b/sync_chromium.py
index 442ddcde91..b37e0da263 100755
--- a/sync_chromium.py
+++ b/sync_chromium.py
@@ -31,7 +31,7 @@ import textwrap
# Bump this whenever the algorithm changes and you need bots/devs to re-sync,
# ignoring the .last_sync_chromium file
-SCRIPT_VERSION = 5
+SCRIPT_VERSION = 7
ROOT_DIR = os.path.dirname(os.path.abspath(__file__))
CHROMIUM_NO_HISTORY = 'CHROMIUM_NO_HISTORY'
diff --git a/talk/app/webrtc/OWNERS b/talk/app/webrtc/OWNERS
index ffd78e1777..20a1fdf80d 100644
--- a/talk/app/webrtc/OWNERS
+++ b/talk/app/webrtc/OWNERS
@@ -1,5 +1,5 @@
glaznev@webrtc.org
-juberti@google.com
-perkj@google.com
+juberti@webrtc.org
+perkj@webrtc.org
tkchin@webrtc.org
-tommi@google.com
+tommi@webrtc.org
diff --git a/talk/app/webrtc/androidtests/src/org/webrtc/GlRectDrawerTest.java b/talk/app/webrtc/androidtests/src/org/webrtc/GlRectDrawerTest.java
index 1c01ffa0b8..63c05fb616 100644
--- a/talk/app/webrtc/androidtests/src/org/webrtc/GlRectDrawerTest.java
+++ b/talk/app/webrtc/androidtests/src/org/webrtc/GlRectDrawerTest.java
@@ -28,7 +28,6 @@ package org.webrtc;
import android.graphics.SurfaceTexture;
import android.opengl.GLES20;
-import android.opengl.Matrix;
import android.test.ActivityTestCase;
import android.test.suitebuilder.annotation.MediumTest;
import android.test.suitebuilder.annotation.SmallTest;
@@ -36,9 +35,6 @@ import android.test.suitebuilder.annotation.SmallTest;
import java.nio.ByteBuffer;
import java.util.Random;
-import javax.microedition.khronos.egl.EGL10;
-import javax.microedition.khronos.egl.EGLContext;
-
public final class GlRectDrawerTest extends ActivityTestCase {
// Resolution of the test image.
private static final int WIDTH = 16;
@@ -46,7 +42,7 @@ public final class GlRectDrawerTest extends ActivityTestCase {
// Seed for random pixel creation.
private static final int SEED = 42;
// When comparing pixels, allow some slack for float arithmetic and integer rounding.
- private static final float MAX_DIFF = 1.0f;
+ private static final float MAX_DIFF = 1.5f;
private static float normalizedByte(byte b) {
return (b & 0xFF) / 255.0f;
@@ -100,7 +96,7 @@ public final class GlRectDrawerTest extends ActivityTestCase {
@SmallTest
public void testRgbRendering() {
// Create EGL base with a pixel buffer as display output.
- final EglBase eglBase = new EglBase(EGL10.EGL_NO_CONTEXT, EglBase.ConfigType.PIXEL_BUFFER);
+ final EglBase eglBase = EglBase.create(null, EglBase.CONFIG_PIXEL_BUFFER);
eglBase.createPbufferSurface(WIDTH, HEIGHT);
eglBase.makeCurrent();
@@ -119,7 +115,7 @@ public final class GlRectDrawerTest extends ActivityTestCase {
// Draw the RGB frame onto the pixel buffer.
final GlRectDrawer drawer = new GlRectDrawer();
- drawer.drawRgb(rgbTexture, RendererCommon.identityMatrix());
+ drawer.drawRgb(rgbTexture, RendererCommon.identityMatrix(), 0, 0, WIDTH, HEIGHT);
// Download the pixels in the pixel buffer as RGBA. Not all platforms support RGB, e.g. Nexus 9.
final ByteBuffer rgbaData = ByteBuffer.allocateDirect(WIDTH * HEIGHT * 4);
@@ -137,7 +133,7 @@ public final class GlRectDrawerTest extends ActivityTestCase {
@SmallTest
public void testYuvRendering() {
// Create EGL base with a pixel buffer as display output.
- EglBase eglBase = new EglBase(EGL10.EGL_NO_CONTEXT, EglBase.ConfigType.PIXEL_BUFFER);
+ EglBase eglBase = EglBase.create(null, EglBase.CONFIG_PIXEL_BUFFER);
eglBase.createPbufferSurface(WIDTH, HEIGHT);
eglBase.makeCurrent();
@@ -166,7 +162,7 @@ public final class GlRectDrawerTest extends ActivityTestCase {
// Draw the YUV frame onto the pixel buffer.
final GlRectDrawer drawer = new GlRectDrawer();
- drawer.drawYuv(yuvTextures, RendererCommon.identityMatrix());
+ drawer.drawYuv(yuvTextures, RendererCommon.identityMatrix(), 0, 0, WIDTH, HEIGHT);
// Download the pixels in the pixel buffer as RGBA. Not all platforms support RGB, e.g. Nexus 9.
final ByteBuffer data = ByteBuffer.allocateDirect(WIDTH * HEIGHT * 4);
@@ -231,8 +227,9 @@ public final class GlRectDrawerTest extends ActivityTestCase {
private final int rgbTexture;
public StubOesTextureProducer(
- EGLContext sharedContext, SurfaceTexture surfaceTexture, int width, int height) {
- eglBase = new EglBase(sharedContext, EglBase.ConfigType.PLAIN);
+ EglBase.Context sharedContext, SurfaceTexture surfaceTexture, int width,
+ int height) {
+ eglBase = EglBase.create(sharedContext, EglBase.CONFIG_PLAIN);
surfaceTexture.setDefaultBufferSize(width, height);
eglBase.createSurface(surfaceTexture);
assertEquals(eglBase.surfaceWidth(), width);
@@ -253,7 +250,7 @@ public final class GlRectDrawerTest extends ActivityTestCase {
GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, 0, GLES20.GL_RGB, WIDTH,
HEIGHT, 0, GLES20.GL_RGB, GLES20.GL_UNSIGNED_BYTE, rgbPlane);
// Draw the RGB data onto the SurfaceTexture.
- drawer.drawRgb(rgbTexture, RendererCommon.identityMatrix());
+ drawer.drawRgb(rgbTexture, RendererCommon.identityMatrix(), 0, 0, WIDTH, HEIGHT);
eglBase.swapBuffers();
}
@@ -266,14 +263,14 @@ public final class GlRectDrawerTest extends ActivityTestCase {
}
// Create EGL base with a pixel buffer as display output.
- final EglBase eglBase = new EglBase(EGL10.EGL_NO_CONTEXT, EglBase.ConfigType.PIXEL_BUFFER);
+ final EglBase eglBase = EglBase.create(null, EglBase.CONFIG_PIXEL_BUFFER);
eglBase.createPbufferSurface(WIDTH, HEIGHT);
// Create resources for generating OES textures.
final SurfaceTextureHelper surfaceTextureHelper =
- SurfaceTextureHelper.create(eglBase.getContext());
+ SurfaceTextureHelper.create(eglBase.getEglBaseContext());
final StubOesTextureProducer oesProducer = new StubOesTextureProducer(
- eglBase.getContext(), surfaceTextureHelper.getSurfaceTexture(), WIDTH, HEIGHT);
+ eglBase.getEglBaseContext(), surfaceTextureHelper.getSurfaceTexture(), WIDTH, HEIGHT);
final SurfaceTextureHelperTest.MockTextureListener listener =
new SurfaceTextureHelperTest.MockTextureListener();
surfaceTextureHelper.setListener(listener);
@@ -291,7 +288,7 @@ public final class GlRectDrawerTest extends ActivityTestCase {
// Draw the OES texture on the pixel buffer.
eglBase.makeCurrent();
final GlRectDrawer drawer = new GlRectDrawer();
- drawer.drawOes(listener.oesTextureId, listener.transformMatrix);
+ drawer.drawOes(listener.oesTextureId, listener.transformMatrix, 0, 0, WIDTH, HEIGHT);
// Download the pixels in the pixel buffer as RGBA. Not all platforms support RGB, e.g. Nexus 9.
final ByteBuffer rgbaData = ByteBuffer.allocateDirect(WIDTH * HEIGHT * 4);
diff --git a/talk/app/webrtc/androidtests/src/org/webrtc/MediaCodecVideoEncoderTest.java b/talk/app/webrtc/androidtests/src/org/webrtc/MediaCodecVideoEncoderTest.java
new file mode 100644
index 0000000000..b1ec5dda0e
--- /dev/null
+++ b/talk/app/webrtc/androidtests/src/org/webrtc/MediaCodecVideoEncoderTest.java
@@ -0,0 +1,180 @@
+/*
+ * libjingle
+ * Copyright 2015 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+package org.webrtc;
+
+import android.annotation.TargetApi;
+import android.opengl.GLES11Ext;
+import android.opengl.GLES20;
+import android.os.Build;
+import android.test.ActivityTestCase;
+import android.test.suitebuilder.annotation.SmallTest;
+import android.util.Log;
+
+import org.webrtc.MediaCodecVideoEncoder.OutputBufferInfo;
+
+import java.nio.ByteBuffer;
+
+@TargetApi(Build.VERSION_CODES.ICE_CREAM_SANDWICH_MR1)
+public final class MediaCodecVideoEncoderTest extends ActivityTestCase {
+ final static String TAG = "MediaCodecVideoEncoderTest";
+
+ @SmallTest
+ public static void testInitializeUsingByteBuffer() {
+ if (!MediaCodecVideoEncoder.isVp8HwSupported()) {
+ Log.i(TAG,
+ "Hardware does not support VP8 encoding, skipping testInitReleaseUsingByteBuffer");
+ return;
+ }
+ MediaCodecVideoEncoder encoder = new MediaCodecVideoEncoder();
+ assertTrue(encoder.initEncode(
+ MediaCodecVideoEncoder.VideoCodecType.VIDEO_CODEC_VP8, 640, 480, 300, 30, null));
+ encoder.release();
+ }
+
+ @SmallTest
+ public static void testInitilizeUsingTextures() {
+ if (!MediaCodecVideoEncoder.isVp8HwSupportedUsingTextures()) {
+ Log.i(TAG, "hardware does not support VP8 encoding, skipping testEncoderUsingTextures");
+ return;
+ }
+ EglBase14 eglBase = new EglBase14(null, EglBase.CONFIG_PLAIN);
+ MediaCodecVideoEncoder encoder = new MediaCodecVideoEncoder();
+ assertTrue(encoder.initEncode(
+ MediaCodecVideoEncoder.VideoCodecType.VIDEO_CODEC_VP8, 640, 480, 300, 30,
+ eglBase.getEglBaseContext()));
+ encoder.release();
+ eglBase.release();
+ }
+
+ @SmallTest
+ public static void testInitializeUsingByteBufferReInitilizeUsingTextures() {
+ if (!MediaCodecVideoEncoder.isVp8HwSupportedUsingTextures()) {
+ Log.i(TAG, "hardware does not support VP8 encoding, skipping testEncoderUsingTextures");
+ return;
+ }
+ MediaCodecVideoEncoder encoder = new MediaCodecVideoEncoder();
+ assertTrue(encoder.initEncode(
+ MediaCodecVideoEncoder.VideoCodecType.VIDEO_CODEC_VP8, 640, 480, 300, 30,
+ null));
+ encoder.release();
+ EglBase14 eglBase = new EglBase14(null, EglBase.CONFIG_PLAIN);
+ assertTrue(encoder.initEncode(
+ MediaCodecVideoEncoder.VideoCodecType.VIDEO_CODEC_VP8, 640, 480, 300, 30,
+ eglBase.getEglBaseContext()));
+ encoder.release();
+ eglBase.release();
+ }
+
+ @SmallTest
+ public static void testEncoderUsingByteBuffer() throws InterruptedException {
+ if (!MediaCodecVideoEncoder.isVp8HwSupported()) {
+ Log.i(TAG, "Hardware does not support VP8 encoding, skipping testEncoderUsingByteBuffer");
+ return;
+ }
+
+ final int width = 640;
+ final int height = 480;
+ final int min_size = width * height * 3 / 2;
+ final long presentationTimestampUs = 2;
+
+ MediaCodecVideoEncoder encoder = new MediaCodecVideoEncoder();
+
+ assertTrue(encoder.initEncode(
+ MediaCodecVideoEncoder.VideoCodecType.VIDEO_CODEC_VP8, width, height, 300, 30, null));
+ ByteBuffer[] inputBuffers = encoder.getInputBuffers();
+ assertNotNull(inputBuffers);
+ assertTrue(min_size <= inputBuffers[0].capacity());
+
+ int bufferIndex;
+ do {
+ Thread.sleep(10);
+ bufferIndex = encoder.dequeueInputBuffer();
+ } while (bufferIndex == -1); // |-1| is returned when there is no buffer available yet.
+
+ assertTrue(bufferIndex >= 0);
+ assertTrue(bufferIndex < inputBuffers.length);
+ assertTrue(encoder.encodeBuffer(true, bufferIndex, min_size, presentationTimestampUs));
+
+ OutputBufferInfo info;
+ do {
+ info = encoder.dequeueOutputBuffer();
+ Thread.sleep(10);
+ } while (info == null);
+ assertTrue(info.index >= 0);
+ assertEquals(presentationTimestampUs, info.presentationTimestampUs);
+ assertTrue(info.buffer.capacity() > 0);
+ encoder.releaseOutputBuffer(info.index);
+
+ encoder.release();
+ }
+
+ @SmallTest
+ public static void testEncoderUsingTextures() throws InterruptedException {
+ if (!MediaCodecVideoEncoder.isVp8HwSupportedUsingTextures()) {
+ Log.i(TAG, "Hardware does not support VP8 encoding, skipping testEncoderUsingTextures");
+ return;
+ }
+
+ final int width = 640;
+ final int height = 480;
+ final long presentationTs = 2;
+
+ final EglBase14 eglOesBase = new EglBase14(null, EglBase.CONFIG_PIXEL_BUFFER);
+ eglOesBase.createDummyPbufferSurface();
+ eglOesBase.makeCurrent();
+ int oesTextureId = GlUtil.generateTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES);
+
+ // TODO(perkj): This test is week since we don't fill the texture with valid data with correct
+ // width and height and verify the encoded data. Fill the OES texture and figure out a way to
+ // verify that the output make sense.
+
+ MediaCodecVideoEncoder encoder = new MediaCodecVideoEncoder();
+
+ assertTrue(encoder.initEncode(
+ MediaCodecVideoEncoder.VideoCodecType.VIDEO_CODEC_VP8, width, height, 300, 30,
+ eglOesBase.getEglBaseContext()));
+ assertTrue(encoder.encodeTexture(true, oesTextureId, RendererCommon.identityMatrix(),
+ presentationTs));
+ GlUtil.checkNoGLES2Error("encodeTexture");
+
+ // It should be Ok to delete the texture after calling encodeTexture.
+ GLES20.glDeleteTextures(1, new int[] {oesTextureId}, 0);
+
+ OutputBufferInfo info = encoder.dequeueOutputBuffer();
+ while (info == null) {
+ info = encoder.dequeueOutputBuffer();
+ Thread.sleep(20);
+ }
+ assertTrue(info.index != -1);
+ assertTrue(info.buffer.capacity() > 0);
+ assertEquals(presentationTs, info.presentationTimestampUs);
+ encoder.releaseOutputBuffer(info.index);
+
+ encoder.release();
+ eglOesBase.release();
+ }
+}
diff --git a/talk/app/webrtc/androidtests/src/org/webrtc/SurfaceTextureHelperTest.java b/talk/app/webrtc/androidtests/src/org/webrtc/SurfaceTextureHelperTest.java
index 882fde1875..9e0164d4b8 100644
--- a/talk/app/webrtc/androidtests/src/org/webrtc/SurfaceTextureHelperTest.java
+++ b/talk/app/webrtc/androidtests/src/org/webrtc/SurfaceTextureHelperTest.java
@@ -37,8 +37,6 @@ import android.test.suitebuilder.annotation.SmallTest;
import java.nio.ByteBuffer;
-import javax.microedition.khronos.egl.EGL10;
-
public final class SurfaceTextureHelperTest extends ActivityTestCase {
/**
* Mock texture listener with blocking wait functionality.
@@ -99,6 +97,14 @@ public final class SurfaceTextureHelperTest extends ActivityTestCase {
}
}
+ /** Assert that two integers are close, with difference at most
+ * {@code threshold}. */
+ public static void assertClose(int threshold, int expected, int actual) {
+ if (Math.abs(expected - actual) <= threshold)
+ return;
+ failNotEquals("Not close enough, threshold " + threshold, expected, actual);
+ }
+
/**
* Test normal use by receiving three uniform texture frames. Texture frames are returned as early
* as possible. The texture pixel values are inspected by drawing the texture frame to a pixel
@@ -109,20 +115,21 @@ public final class SurfaceTextureHelperTest extends ActivityTestCase {
final int width = 16;
final int height = 16;
// Create EGL base with a pixel buffer as display output.
- final EglBase eglBase = new EglBase(EGL10.EGL_NO_CONTEXT, EglBase.ConfigType.PIXEL_BUFFER);
+ final EglBase eglBase = EglBase.create(null, EglBase.CONFIG_PIXEL_BUFFER);
eglBase.createPbufferSurface(width, height);
final GlRectDrawer drawer = new GlRectDrawer();
// Create SurfaceTextureHelper and listener.
final SurfaceTextureHelper surfaceTextureHelper =
- SurfaceTextureHelper.create(eglBase.getContext());
+ SurfaceTextureHelper.create(eglBase.getEglBaseContext());
final MockTextureListener listener = new MockTextureListener();
surfaceTextureHelper.setListener(listener);
surfaceTextureHelper.getSurfaceTexture().setDefaultBufferSize(width, height);
// Create resources for stubbing an OES texture producer. |eglOesBase| has the SurfaceTexture in
// |surfaceTextureHelper| as the target EGLSurface.
- final EglBase eglOesBase = new EglBase(eglBase.getContext(), EglBase.ConfigType.PLAIN);
+ final EglBase eglOesBase =
+ EglBase.create(eglBase.getEglBaseContext(), EglBase.CONFIG_PLAIN);
eglOesBase.createSurface(surfaceTextureHelper.getSurfaceTexture());
assertEquals(eglOesBase.surfaceWidth(), width);
assertEquals(eglOesBase.surfaceHeight(), height);
@@ -142,7 +149,7 @@ public final class SurfaceTextureHelperTest extends ActivityTestCase {
// Wait for an OES texture to arrive and draw it onto the pixel buffer.
listener.waitForNewFrame();
eglBase.makeCurrent();
- drawer.drawOes(listener.oesTextureId, listener.transformMatrix);
+ drawer.drawOes(listener.oesTextureId, listener.transformMatrix, 0, 0, width, height);
surfaceTextureHelper.returnTextureFrame();
@@ -176,19 +183,20 @@ public final class SurfaceTextureHelperTest extends ActivityTestCase {
final int width = 16;
final int height = 16;
// Create EGL base with a pixel buffer as display output.
- final EglBase eglBase = new EglBase(EGL10.EGL_NO_CONTEXT, EglBase.ConfigType.PIXEL_BUFFER);
+ final EglBase eglBase = EglBase.create(null, EglBase.CONFIG_PIXEL_BUFFER);
eglBase.createPbufferSurface(width, height);
// Create SurfaceTextureHelper and listener.
final SurfaceTextureHelper surfaceTextureHelper =
- SurfaceTextureHelper.create(eglBase.getContext());
+ SurfaceTextureHelper.create(eglBase.getEglBaseContext());
final MockTextureListener listener = new MockTextureListener();
surfaceTextureHelper.setListener(listener);
surfaceTextureHelper.getSurfaceTexture().setDefaultBufferSize(width, height);
// Create resources for stubbing an OES texture producer. |eglOesBase| has the SurfaceTexture in
// |surfaceTextureHelper| as the target EGLSurface.
- final EglBase eglOesBase = new EglBase(eglBase.getContext(), EglBase.ConfigType.PLAIN);
+ final EglBase eglOesBase =
+ EglBase.create(eglBase.getEglBaseContext(), EglBase.CONFIG_PLAIN);
eglOesBase.createSurface(surfaceTextureHelper.getSurfaceTexture());
assertEquals(eglOesBase.surfaceWidth(), width);
assertEquals(eglOesBase.surfaceHeight(), height);
@@ -212,7 +220,7 @@ public final class SurfaceTextureHelperTest extends ActivityTestCase {
// Draw the pending texture frame onto the pixel buffer.
eglBase.makeCurrent();
final GlRectDrawer drawer = new GlRectDrawer();
- drawer.drawOes(listener.oesTextureId, listener.transformMatrix);
+ drawer.drawOes(listener.oesTextureId, listener.transformMatrix, 0, 0, width, height);
drawer.release();
// Download the pixels in the pixel buffer as RGBA. Not all platforms support RGB, e.g. Nexus 9.
@@ -240,11 +248,11 @@ public final class SurfaceTextureHelperTest extends ActivityTestCase {
public static void testDisconnect() throws InterruptedException {
// Create SurfaceTextureHelper and listener.
final SurfaceTextureHelper surfaceTextureHelper =
- SurfaceTextureHelper.create(EGL10.EGL_NO_CONTEXT);
+ SurfaceTextureHelper.create(null);
final MockTextureListener listener = new MockTextureListener();
surfaceTextureHelper.setListener(listener);
// Create EglBase with the SurfaceTexture as target EGLSurface.
- final EglBase eglBase = new EglBase(EGL10.EGL_NO_CONTEXT, EglBase.ConfigType.PLAIN);
+ final EglBase eglBase = EglBase.create(null, EglBase.CONFIG_PLAIN);
eglBase.createSurface(surfaceTextureHelper.getSurfaceTexture());
eglBase.makeCurrent();
// Assert no frame has been received yet.
@@ -276,7 +284,7 @@ public final class SurfaceTextureHelperTest extends ActivityTestCase {
@SmallTest
public static void testDisconnectImmediately() {
final SurfaceTextureHelper surfaceTextureHelper =
- SurfaceTextureHelper.create(EGL10.EGL_NO_CONTEXT);
+ SurfaceTextureHelper.create(null);
surfaceTextureHelper.disconnect();
}
@@ -292,14 +300,14 @@ public final class SurfaceTextureHelperTest extends ActivityTestCase {
// Create SurfaceTextureHelper and listener.
final SurfaceTextureHelper surfaceTextureHelper =
- SurfaceTextureHelper.create(EGL10.EGL_NO_CONTEXT, handler);
+ SurfaceTextureHelper.create(null, handler);
// Create a mock listener and expect frames to be delivered on |thread|.
final MockTextureListener listener = new MockTextureListener(thread);
surfaceTextureHelper.setListener(listener);
// Create resources for stubbing an OES texture producer. |eglOesBase| has the
// SurfaceTexture in |surfaceTextureHelper| as the target EGLSurface.
- final EglBase eglOesBase = new EglBase(EGL10.EGL_NO_CONTEXT, EglBase.ConfigType.PLAIN);
+ final EglBase eglOesBase = EglBase.create(null, EglBase.CONFIG_PLAIN);
eglOesBase.createSurface(surfaceTextureHelper.getSurfaceTexture());
eglOesBase.makeCurrent();
// Draw a frame onto the SurfaceTexture.
@@ -313,7 +321,119 @@ public final class SurfaceTextureHelperTest extends ActivityTestCase {
// Return the frame from this thread.
surfaceTextureHelper.returnTextureFrame();
+ surfaceTextureHelper.disconnect(handler);
+ }
+
+ /**
+ * Test use SurfaceTextureHelper on a separate thread. A uniform texture frame is created and
+ * received on a thread separate from the test thread and returned after disconnect.
+ */
+ @MediumTest
+ public static void testLateReturnFrameOnSeparateThread() throws InterruptedException {
+ final HandlerThread thread = new HandlerThread("SurfaceTextureHelperTestThread");
+ thread.start();
+ final Handler handler = new Handler(thread.getLooper());
+
+ // Create SurfaceTextureHelper and listener.
+ final SurfaceTextureHelper surfaceTextureHelper =
+ SurfaceTextureHelper.create(null, handler);
+ // Create a mock listener and expect frames to be delivered on |thread|.
+ final MockTextureListener listener = new MockTextureListener(thread);
+ surfaceTextureHelper.setListener(listener);
+
+ // Create resources for stubbing an OES texture producer. |eglOesBase| has the
+ // SurfaceTexture in |surfaceTextureHelper| as the target EGLSurface.
+ final EglBase eglOesBase = EglBase.create(null, EglBase.CONFIG_PLAIN);
+ eglOesBase.createSurface(surfaceTextureHelper.getSurfaceTexture());
+ eglOesBase.makeCurrent();
+ // Draw a frame onto the SurfaceTexture.
+ GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
+ // swapBuffers() will ultimately trigger onTextureFrameAvailable().
+ eglOesBase.swapBuffers();
+ eglOesBase.release();
+
+ // Wait for an OES texture to arrive.
+ listener.waitForNewFrame();
+
+ surfaceTextureHelper.disconnect(handler);
+
+ surfaceTextureHelper.returnTextureFrame();
+ }
+
+ @MediumTest
+ public static void testTexturetoYUV() throws InterruptedException {
+ final int width = 16;
+ final int height = 16;
+
+ final EglBase eglBase = EglBase.create(null, EglBase.CONFIG_PLAIN);
+
+ // Create SurfaceTextureHelper and listener.
+ final SurfaceTextureHelper surfaceTextureHelper =
+ SurfaceTextureHelper.create(eglBase.getEglBaseContext());
+ final MockTextureListener listener = new MockTextureListener();
+ surfaceTextureHelper.setListener(listener);
+ surfaceTextureHelper.getSurfaceTexture().setDefaultBufferSize(width, height);
+
+ // Create resources for stubbing an OES texture producer. |eglBase| has the SurfaceTexture in
+ // |surfaceTextureHelper| as the target EGLSurface.
+
+ eglBase.createSurface(surfaceTextureHelper.getSurfaceTexture());
+ assertEquals(eglBase.surfaceWidth(), width);
+ assertEquals(eglBase.surfaceHeight(), height);
+
+ final int red[] = new int[] {79, 144, 185};
+ final int green[] = new int[] {66, 210, 162};
+ final int blue[] = new int[] {161, 117, 158};
+
+ final int ref_y[] = new int[] {81, 180, 168};
+ final int ref_u[] = new int[] {173, 93, 122};
+ final int ref_v[] = new int[] {127, 103, 140};
+
+ // Draw three frames.
+ for (int i = 0; i < 3; ++i) {
+ // Draw a constant color frame onto the SurfaceTexture.
+ eglBase.makeCurrent();
+ GLES20.glClearColor(red[i] / 255.0f, green[i] / 255.0f, blue[i] / 255.0f, 1.0f);
+ GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
+ // swapBuffers() will ultimately trigger onTextureFrameAvailable().
+ eglBase.swapBuffers();
+
+ // Wait for an OES texture to arrive.
+ listener.waitForNewFrame();
+
+ // Memory layout: Lines are 16 bytes. First 16 lines are
+ // the Y data. These are followed by 8 lines with 8 bytes of U
+ // data on the left and 8 bytes of V data on the right.
+ //
+ // Offset
+ // 0 YYYYYYYY YYYYYYYY
+ // 16 YYYYYYYY YYYYYYYY
+ // ...
+ // 240 YYYYYYYY YYYYYYYY
+ // 256 UUUUUUUU VVVVVVVV
+ // 272 UUUUUUUU VVVVVVVV
+ // ...
+ // 368 UUUUUUUU VVVVVVVV
+ // 384 buffer end
+ ByteBuffer buffer = ByteBuffer.allocateDirect(width * height * 3 / 2);
+ surfaceTextureHelper.textureToYUV(buffer, width, height, width,
+ listener.oesTextureId, listener.transformMatrix);
+
+ surfaceTextureHelper.returnTextureFrame();
+
+ // Allow off-by-one differences due to different rounding.
+ while (buffer.position() < width*height) {
+ assertClose(1, buffer.get() & 0xff, ref_y[i]);
+ }
+ while (buffer.hasRemaining()) {
+ if (buffer.position() % width < width/2)
+ assertClose(1, buffer.get() & 0xff, ref_u[i]);
+ else
+ assertClose(1, buffer.get() & 0xff, ref_v[i]);
+ }
+ }
+
surfaceTextureHelper.disconnect();
- thread.quitSafely();
+ eglBase.release();
}
}
diff --git a/talk/app/webrtc/androidtests/src/org/webrtc/SurfaceViewRendererOnMeasureTest.java b/talk/app/webrtc/androidtests/src/org/webrtc/SurfaceViewRendererOnMeasureTest.java
index 47fe780124..341c632b58 100644
--- a/talk/app/webrtc/androidtests/src/org/webrtc/SurfaceViewRendererOnMeasureTest.java
+++ b/talk/app/webrtc/androidtests/src/org/webrtc/SurfaceViewRendererOnMeasureTest.java
@@ -36,8 +36,6 @@ import java.nio.ByteBuffer;
import java.util.Arrays;
import java.util.List;
-import javax.microedition.khronos.egl.EGL10;
-
public final class SurfaceViewRendererOnMeasureTest extends ActivityTestCase {
/**
* List with all possible scaling types.
@@ -111,7 +109,7 @@ public final class SurfaceViewRendererOnMeasureTest extends ActivityTestCase {
}
// Test behaviour after SurfaceViewRenderer.init() is called, but still no frame.
- surfaceViewRenderer.init(EGL10.EGL_NO_CONTEXT, null);
+ surfaceViewRenderer.init((EglBase.Context) null, null);
for (RendererCommon.ScalingType scalingType : scalingTypes) {
for (int measureSpecMode : measureSpecModes) {
final int zeroMeasureSize = MeasureSpec.makeMeasureSpec(0, measureSpecMode);
@@ -134,7 +132,7 @@ public final class SurfaceViewRendererOnMeasureTest extends ActivityTestCase {
public void testFrame1280x720() {
final SurfaceViewRenderer surfaceViewRenderer =
new SurfaceViewRenderer(getInstrumentation().getContext());
- surfaceViewRenderer.init(EGL10.EGL_NO_CONTEXT, null);
+ surfaceViewRenderer.init((EglBase.Context) null, null);
// Test different rotation degress, but same rotated size.
for (int rotationDegree : new int[] {0, 90, 180, 270}) {
diff --git a/talk/app/webrtc/androidtests/src/org/webrtc/VideoCapturerAndroidTest.java b/talk/app/webrtc/androidtests/src/org/webrtc/VideoCapturerAndroidTest.java
index dbbe5963cd..1b97201a0a 100644
--- a/talk/app/webrtc/androidtests/src/org/webrtc/VideoCapturerAndroidTest.java
+++ b/talk/app/webrtc/androidtests/src/org/webrtc/VideoCapturerAndroidTest.java
@@ -29,7 +29,6 @@ package org.webrtc;
import android.test.ActivityTestCase;
import android.test.suitebuilder.annotation.MediumTest;
import android.test.suitebuilder.annotation.SmallTest;
-import android.util.Log;
import android.util.Size;
import org.webrtc.CameraEnumerationAndroid.CaptureFormat;
@@ -37,8 +36,6 @@ import org.webrtc.CameraEnumerationAndroid.CaptureFormat;
import java.util.HashSet;
import java.util.Set;
-import javax.microedition.khronos.egl.EGL10;
-
@SuppressWarnings("deprecation")
public class VideoCapturerAndroidTest extends ActivityTestCase {
static final String TAG = "VideoCapturerAndroidTest";
@@ -87,8 +84,10 @@ public class VideoCapturerAndroidTest extends ActivityTestCase {
@SmallTest
public void testCreateAndReleaseUsingTextures() {
+ EglBase eglBase = EglBase.create();
VideoCapturerAndroidTestFixtures.release(
- VideoCapturerAndroid.create("", null, EGL10.EGL_NO_CONTEXT));
+ VideoCapturerAndroid.create("", null, eglBase.getEglBaseContext()));
+ eglBase.release();
}
@SmallTest
@@ -108,12 +107,13 @@ public class VideoCapturerAndroidTest extends ActivityTestCase {
VideoCapturerAndroidTestFixtures.startCapturerAndRender(capturer);
}
- // TODO(perkj): Enable once VideoCapture to texture support has landed in C++.
@SmallTest
- public void DISABLED_testStartVideoCapturerUsingTextures() throws InterruptedException {
+ public void testStartVideoCapturerUsingTextures() throws InterruptedException {
+ EglBase eglBase = EglBase.create();
VideoCapturerAndroid capturer =
- VideoCapturerAndroid.create("", null, EGL10.EGL_NO_CONTEXT);
+ VideoCapturerAndroid.create("", null, eglBase.getEglBaseContext());
VideoCapturerAndroidTestFixtures.startCapturerAndRender(capturer);
+ eglBase.release();
}
@SmallTest
@@ -151,11 +151,13 @@ public class VideoCapturerAndroidTest extends ActivityTestCase {
VideoCapturerAndroidTestFixtures.switchCamera(capturer);
}
- // TODO(perkj): Enable once VideoCapture to texture support has landed in C++.
@SmallTest
- public void DISABLED_testSwitchVideoCapturerUsingTextures() throws InterruptedException {
- VideoCapturerAndroid capturer = VideoCapturerAndroid.create("", null, EGL10.EGL_NO_CONTEXT);
+ public void testSwitchVideoCapturerUsingTextures() throws InterruptedException {
+ EglBase eglBase = EglBase.create();
+ VideoCapturerAndroid capturer =
+ VideoCapturerAndroid.create("", null, eglBase.getEglBaseContext());
VideoCapturerAndroidTestFixtures.switchCamera(capturer);
+ eglBase.release();
}
@MediumTest
@@ -179,12 +181,14 @@ public class VideoCapturerAndroidTest extends ActivityTestCase {
@MediumTest
public void testCameraCallsAfterStopUsingTextures() throws InterruptedException {
+ EglBase eglBase = EglBase.create();
final String deviceName = CameraEnumerationAndroid.getDeviceName(0);
final VideoCapturerAndroid capturer = VideoCapturerAndroid.create(deviceName, null,
- EGL10.EGL_NO_CONTEXT);
+ eglBase.getEglBaseContext());
VideoCapturerAndroidTestFixtures.cameraCallsAfterStop(capturer,
getInstrumentation().getContext());
+ eglBase.release();
}
@SmallTest
@@ -195,11 +199,13 @@ public class VideoCapturerAndroidTest extends ActivityTestCase {
VideoCapturerAndroidTestFixtures.stopRestartVideoSource(capturer);
}
- // TODO(perkj): Enable once VideoCapture to texture support has landed in C++.
@SmallTest
- public void DISABLED_testStopRestartVideoSourceUsingTextures() throws InterruptedException {
- VideoCapturerAndroid capturer = VideoCapturerAndroid.create("", null, EGL10.EGL_NO_CONTEXT);
+ public void testStopRestartVideoSourceUsingTextures() throws InterruptedException {
+ EglBase eglBase = EglBase.create();
+ VideoCapturerAndroid capturer =
+ VideoCapturerAndroid.create("", null, eglBase.getEglBaseContext());
VideoCapturerAndroidTestFixtures.stopRestartVideoSource(capturer);
+ eglBase.release();
}
@SmallTest
@@ -215,13 +221,50 @@ public class VideoCapturerAndroidTest extends ActivityTestCase {
@SmallTest
public void testStartStopWithDifferentResolutionsUsingTextures() throws InterruptedException {
+ EglBase eglBase = EglBase.create();
String deviceName = CameraEnumerationAndroid.getDeviceName(0);
VideoCapturerAndroid capturer =
- VideoCapturerAndroid.create(deviceName, null, EGL10.EGL_NO_CONTEXT);
+ VideoCapturerAndroid.create(deviceName, null, eglBase.getEglBaseContext());
VideoCapturerAndroidTestFixtures.startStopWithDifferentResolutions(capturer,
getInstrumentation().getContext());
+ eglBase.release();
+ }
+
+ @SmallTest
+ // This test that an error is reported if the camera is already opened
+ // when VideoCapturerAndroid is started.
+ public void testStartWhileCameraAlreadyOpened() throws InterruptedException {
+ String deviceName = CameraEnumerationAndroid.getDeviceName(0);
+ VideoCapturerAndroid capturer =
+ VideoCapturerAndroid.create(deviceName, null);
+ VideoCapturerAndroidTestFixtures.startWhileCameraIsAlreadyOpen(
+ capturer, getInstrumentation().getContext());
+ }
+
+ @SmallTest
+ // This test that VideoCapturerAndroid can be started, even if the camera is already opened
+ // if the camera is closed while VideoCapturerAndroid is re-trying to start.
+ public void testStartWhileCameraIsAlreadyOpenAndCloseCamera() throws InterruptedException {
+ String deviceName = CameraEnumerationAndroid.getDeviceName(0);
+ VideoCapturerAndroid capturer =
+ VideoCapturerAndroid.create(deviceName, null);
+ VideoCapturerAndroidTestFixtures.startWhileCameraIsAlreadyOpenAndCloseCamera(
+ capturer, getInstrumentation().getContext());
+ }
+
+ @SmallTest
+ // This test that VideoCapturerAndroid.stop can be called while VideoCapturerAndroid is
+ // re-trying to start.
+ public void startWhileCameraIsAlreadyOpenAndStop() throws InterruptedException {
+ String deviceName = CameraEnumerationAndroid.getDeviceName(0);
+ VideoCapturerAndroid capturer =
+ VideoCapturerAndroid.create(deviceName, null);
+ VideoCapturerAndroidTestFixtures.startWhileCameraIsAlreadyOpenAndStop(
+ capturer, getInstrumentation().getContext());
}
+
+
@SmallTest
// This test what happens if buffers are returned after the capturer have
// been stopped and restarted. It does not test or use the C++ layer.
@@ -235,11 +278,13 @@ public class VideoCapturerAndroidTest extends ActivityTestCase {
@SmallTest
public void testReturnBufferLateUsingTextures() throws InterruptedException {
+ EglBase eglBase = EglBase.create();
String deviceName = CameraEnumerationAndroid.getDeviceName(0);
VideoCapturerAndroid capturer =
- VideoCapturerAndroid.create(deviceName, null, EGL10.EGL_NO_CONTEXT);
+ VideoCapturerAndroid.create(deviceName, null, eglBase.getEglBaseContext());
VideoCapturerAndroidTestFixtures.returnBufferLate(capturer,
getInstrumentation().getContext());
+ eglBase.release();
}
@MediumTest
@@ -251,11 +296,45 @@ public class VideoCapturerAndroidTest extends ActivityTestCase {
VideoCapturerAndroidTestFixtures.returnBufferLateEndToEnd(capturer);
}
- // TODO(perkj): Enable once VideoCapture to texture support has landed in C++.
@MediumTest
- public void DISABLED_testReturnBufferLateEndToEndUsingTextures() throws InterruptedException {
+ public void testReturnBufferLateEndToEndUsingTextures() throws InterruptedException {
+ EglBase eglBase = EglBase.create();
final VideoCapturerAndroid capturer =
- VideoCapturerAndroid.create("", null, EGL10.EGL_NO_CONTEXT);
+ VideoCapturerAndroid.create("", null, eglBase.getEglBaseContext());
VideoCapturerAndroidTestFixtures.returnBufferLateEndToEnd(capturer);
+ eglBase.release();
+ }
+
+ @MediumTest
+ // This test that CameraEventsHandler.onError is triggered if video buffers are not returned to
+ // the capturer.
+ public void testCameraFreezedEventOnBufferStarvationUsingTextures() throws InterruptedException {
+ EglBase eglBase = EglBase.create();
+ VideoCapturerAndroidTestFixtures.CameraEvents cameraEvents =
+ VideoCapturerAndroidTestFixtures.createCameraEvents();
+ VideoCapturerAndroid capturer = VideoCapturerAndroid.create("", cameraEvents,
+ eglBase.getEglBaseContext());
+ VideoCapturerAndroidTestFixtures.cameraFreezedEventOnBufferStarvationUsingTextures(capturer,
+ cameraEvents, getInstrumentation().getContext());
+ eglBase.release();
+ }
+
+ @MediumTest
+ // This test that frames forwarded to a renderer is scaled if onOutputFormatRequest is
+ // called. This test both Java and C++ parts of of the stack.
+ public void testScaleCameraOutput() throws InterruptedException {
+ VideoCapturerAndroid capturer = VideoCapturerAndroid.create("", null);
+ VideoCapturerAndroidTestFixtures.scaleCameraOutput(capturer);
+ }
+
+ @MediumTest
+ // This test that frames forwarded to a renderer is scaled if onOutputFormatRequest is
+ // called. This test both Java and C++ parts of of the stack.
+ public void testScaleCameraOutputUsingTextures() throws InterruptedException {
+ EglBase eglBase = EglBase.create();
+ VideoCapturerAndroid capturer =
+ VideoCapturerAndroid.create("", null, eglBase.getEglBaseContext());
+ VideoCapturerAndroidTestFixtures.scaleCameraOutput(capturer);
+ eglBase.release();
}
}
diff --git a/talk/app/webrtc/androidtests/src/org/webrtc/VideoCapturerAndroidTestFixtures.java b/talk/app/webrtc/androidtests/src/org/webrtc/VideoCapturerAndroidTestFixtures.java
index 11b3ce98a0..0b42e33785 100644
--- a/talk/app/webrtc/androidtests/src/org/webrtc/VideoCapturerAndroidTestFixtures.java
+++ b/talk/app/webrtc/androidtests/src/org/webrtc/VideoCapturerAndroidTestFixtures.java
@@ -29,6 +29,7 @@ package org.webrtc;
import android.content.Context;
import android.hardware.Camera;
+import org.webrtc.VideoCapturerAndroidTestFixtures;
import org.webrtc.CameraEnumerationAndroid.CaptureFormat;
import org.webrtc.VideoRenderer.I420Frame;
@@ -42,16 +43,32 @@ public class VideoCapturerAndroidTestFixtures {
static class RendererCallbacks implements VideoRenderer.Callbacks {
private int framesRendered = 0;
private Object frameLock = 0;
+ private int width = 0;
+ private int height = 0;
@Override
public void renderFrame(I420Frame frame) {
synchronized (frameLock) {
++framesRendered;
+ width = frame.rotatedWidth();
+ height = frame.rotatedHeight();
frameLock.notify();
}
VideoRenderer.renderFrameDone(frame);
}
+ public int frameWidth() {
+ synchronized (frameLock) {
+ return width;
+ }
+ }
+
+ public int frameHeight() {
+ synchronized (frameLock) {
+ return height;
+ }
+ }
+
public int WaitForNextFrameToRender() throws InterruptedException {
synchronized (frameLock) {
frameLock.wait();
@@ -102,11 +119,11 @@ public class VideoCapturerAndroidTestFixtures {
}
@Override
- public void onByteBufferFrameCaptured(byte[] frame, int length, int width, int height,
- int rotation, long timeStamp) {
+ public void onByteBufferFrameCaptured(byte[] frame, int width, int height, int rotation,
+ long timeStamp) {
synchronized (frameLock) {
++framesCaptured;
- frameSize = length;
+ frameSize = frame.length;
frameWidth = width;
frameHeight = height;
timestamps.add(timeStamp);
@@ -115,7 +132,8 @@ public class VideoCapturerAndroidTestFixtures {
}
@Override
public void onTextureFrameCaptured(
- int width, int height, int oesTextureId, float[] transformMatrix, long timeStamp) {
+ int width, int height, int oesTextureId, float[] transformMatrix, int rotation,
+ long timeStamp) {
synchronized (frameLock) {
++framesCaptured;
frameWidth = width;
@@ -174,9 +192,20 @@ public class VideoCapturerAndroidTestFixtures {
VideoCapturerAndroid.CameraEventsHandler {
public boolean onCameraOpeningCalled;
public boolean onFirstFrameAvailableCalled;
+ public final Object onCameraFreezedLock = new Object();
+ private String onCameraFreezedDescription;
@Override
- public void onCameraError(String errorDescription) { }
+ public void onCameraError(String errorDescription) {
+ }
+
+ @Override
+ public void onCameraFreezed(String errorDescription) {
+ synchronized (onCameraFreezedLock) {
+ onCameraFreezedDescription = errorDescription;
+ onCameraFreezedLock.notifyAll();
+ }
+ }
@Override
public void onCameraOpening(int cameraId) {
@@ -190,6 +219,13 @@ public class VideoCapturerAndroidTestFixtures {
@Override
public void onCameraClosed() { }
+
+ public String WaitForCameraFreezed() throws InterruptedException {
+ synchronized (onCameraFreezedLock) {
+ onCameraFreezedLock.wait();
+ return onCameraFreezedDescription;
+ }
+ }
}
static public CameraEvents createCameraEvents() {
@@ -275,8 +311,8 @@ public class VideoCapturerAndroidTestFixtures {
assertTrue(observer.WaitForCapturerToStart());
observer.WaitForNextCapturedFrame();
capturer.stopCapture();
- for (long timeStamp : observer.getCopyAndResetListOftimeStamps()) {
- capturer.returnBuffer(timeStamp);
+ if (capturer.isCapturingToTexture()) {
+ capturer.surfaceHelper.returnTextureFrame();
}
capturer.dispose();
@@ -296,9 +332,10 @@ public class VideoCapturerAndroidTestFixtures {
// Make sure camera is started and then stop it.
assertTrue(observer.WaitForCapturerToStart());
capturer.stopCapture();
- for (long timeStamp : observer.getCopyAndResetListOftimeStamps()) {
- capturer.returnBuffer(timeStamp);
+ if (capturer.isCapturingToTexture()) {
+ capturer.surfaceHelper.returnTextureFrame();
}
+
// We can't change |capturer| at this point, but we should not crash.
capturer.switchCamera(null);
capturer.onOutputFormatRequest(640, 480, 15);
@@ -357,17 +394,90 @@ public class VideoCapturerAndroidTestFixtures {
if (capturer.isCapturingToTexture()) {
assertEquals(0, observer.frameSize());
} else {
- assertEquals(format.frameSize(), observer.frameSize());
+ assertTrue(format.frameSize() <= observer.frameSize());
}
capturer.stopCapture();
- for (long timestamp : observer.getCopyAndResetListOftimeStamps()) {
- capturer.returnBuffer(timestamp);
+ if (capturer.isCapturingToTexture()) {
+ capturer.surfaceHelper.returnTextureFrame();
}
}
capturer.dispose();
assertTrue(capturer.isReleased());
}
+ static void waitUntilIdle(VideoCapturerAndroid capturer) throws InterruptedException {
+ final CountDownLatch barrier = new CountDownLatch(1);
+ capturer.getCameraThreadHandler().post(new Runnable() {
+ @Override public void run() {
+ barrier.countDown();
+ }
+ });
+ barrier.await();
+ }
+
+ static public void startWhileCameraIsAlreadyOpen(
+ VideoCapturerAndroid capturer, Context appContext) throws InterruptedException {
+ Camera camera = Camera.open(capturer.getCurrentCameraId());
+
+ final List<CaptureFormat> formats = capturer.getSupportedFormats();
+ final CameraEnumerationAndroid.CaptureFormat format = formats.get(0);
+
+ final FakeCapturerObserver observer = new FakeCapturerObserver();
+ capturer.startCapture(format.width, format.height, format.maxFramerate,
+ appContext, observer);
+
+ if (android.os.Build.VERSION.SDK_INT > android.os.Build.VERSION_CODES.LOLLIPOP_MR1) {
+ // The first opened camera client will be evicted.
+ assertTrue(observer.WaitForCapturerToStart());
+ capturer.stopCapture();
+ } else {
+ assertFalse(observer.WaitForCapturerToStart());
+ }
+
+ capturer.dispose();
+ camera.release();
+ }
+
+ static public void startWhileCameraIsAlreadyOpenAndCloseCamera(
+ VideoCapturerAndroid capturer, Context appContext) throws InterruptedException {
+ Camera camera = Camera.open(capturer.getCurrentCameraId());
+
+ final List<CaptureFormat> formats = capturer.getSupportedFormats();
+ final CameraEnumerationAndroid.CaptureFormat format = formats.get(0);
+
+ final FakeCapturerObserver observer = new FakeCapturerObserver();
+ capturer.startCapture(format.width, format.height, format.maxFramerate,
+ appContext, observer);
+ waitUntilIdle(capturer);
+
+ camera.release();
+
+ // Make sure camera is started and first frame is received and then stop it.
+ assertTrue(observer.WaitForCapturerToStart());
+ observer.WaitForNextCapturedFrame();
+ capturer.stopCapture();
+ if (capturer.isCapturingToTexture()) {
+ capturer.surfaceHelper.returnTextureFrame();
+ }
+ capturer.dispose();
+ assertTrue(capturer.isReleased());
+ }
+
+ static public void startWhileCameraIsAlreadyOpenAndStop(
+ VideoCapturerAndroid capturer, Context appContext) throws InterruptedException {
+ Camera camera = Camera.open(capturer.getCurrentCameraId());
+ final List<CaptureFormat> formats = capturer.getSupportedFormats();
+ final CameraEnumerationAndroid.CaptureFormat format = formats.get(0);
+
+ final FakeCapturerObserver observer = new FakeCapturerObserver();
+ capturer.startCapture(format.width, format.height, format.maxFramerate,
+ appContext, observer);
+ capturer.stopCapture();
+ capturer.dispose();
+ assertTrue(capturer.isReleased());
+ camera.release();
+ }
+
static public void returnBufferLate(VideoCapturerAndroid capturer,
Context appContext) throws InterruptedException {
FakeCapturerObserver observer = new FakeCapturerObserver();
@@ -387,9 +497,8 @@ public class VideoCapturerAndroidTestFixtures {
capturer.startCapture(format.width, format.height, format.maxFramerate,
appContext, observer);
observer.WaitForCapturerToStart();
-
- for (Long timeStamp : listOftimestamps) {
- capturer.returnBuffer(timeStamp);
+ if (capturer.isCapturingToTexture()) {
+ capturer.surfaceHelper.returnTextureFrame();
}
observer.WaitForNextCapturedFrame();
@@ -397,9 +506,10 @@ public class VideoCapturerAndroidTestFixtures {
listOftimestamps = observer.getCopyAndResetListOftimeStamps();
assertTrue(listOftimestamps.size() >= 1);
- for (Long timeStamp : listOftimestamps) {
- capturer.returnBuffer(timeStamp);
+ if (capturer.isCapturingToTexture()) {
+ capturer.surfaceHelper.returnTextureFrame();
}
+
capturer.dispose();
assertTrue(capturer.isReleased());
}
@@ -410,6 +520,7 @@ public class VideoCapturerAndroidTestFixtures {
final VideoSource source = factory.createVideoSource(capturer, new MediaConstraints());
final VideoTrack track = factory.createVideoTrack("dummy", source);
final FakeAsyncRenderer renderer = new FakeAsyncRenderer();
+
track.addRenderer(new VideoRenderer(renderer));
// Wait for at least one frame that has not been returned.
assertFalse(renderer.waitForPendingFrames().isEmpty());
@@ -420,9 +531,7 @@ public class VideoCapturerAndroidTestFixtures {
track.dispose();
source.dispose();
factory.dispose();
-
- // The pending frames should keep the JNI parts and |capturer| alive.
- assertFalse(capturer.isReleased());
+ assertTrue(capturer.isReleased());
// Return the frame(s), on a different thread out of spite.
final List<I420Frame> pendingFrames = renderer.waitForPendingFrames();
@@ -436,8 +545,71 @@ public class VideoCapturerAndroidTestFixtures {
});
returnThread.start();
returnThread.join();
+ }
+
+ static public void cameraFreezedEventOnBufferStarvationUsingTextures(
+ VideoCapturerAndroid capturer,
+ CameraEvents events, Context appContext) throws InterruptedException {
+ assertTrue("Not capturing to textures.", capturer.isCapturingToTexture());
- // Check that frames have successfully returned. This will cause |capturer| to be released.
+ final List<CaptureFormat> formats = capturer.getSupportedFormats();
+ final CameraEnumerationAndroid.CaptureFormat format = formats.get(0);
+
+ final FakeCapturerObserver observer = new FakeCapturerObserver();
+ capturer.startCapture(format.width, format.height, format.maxFramerate,
+ appContext, observer);
+ // Make sure camera is started.
+ assertTrue(observer.WaitForCapturerToStart());
+ // Since we don't return the buffer, we should get a starvation message if we are
+ // capturing to a texture.
+ assertEquals("Camera failure. Client must return video buffers.",
+ events.WaitForCameraFreezed());
+
+ capturer.stopCapture();
+ if (capturer.isCapturingToTexture()) {
+ capturer.surfaceHelper.returnTextureFrame();
+ }
+
+ capturer.dispose();
assertTrue(capturer.isReleased());
}
+
+ static public void scaleCameraOutput(VideoCapturerAndroid capturer) throws InterruptedException {
+ PeerConnectionFactory factory = new PeerConnectionFactory();
+ VideoSource source =
+ factory.createVideoSource(capturer, new MediaConstraints());
+ VideoTrack track = factory.createVideoTrack("dummy", source);
+ RendererCallbacks renderer = new RendererCallbacks();
+ track.addRenderer(new VideoRenderer(renderer));
+ assertTrue(renderer.WaitForNextFrameToRender() > 0);
+
+ final int startWidth = renderer.frameWidth();
+ final int startHeight = renderer.frameHeight();
+ final int frameRate = 30;
+ final int scaledWidth = startWidth / 2;
+ final int scaledHeight = startHeight / 2;
+
+ // Request the captured frames to be scaled.
+ capturer.onOutputFormatRequest(scaledWidth, scaledHeight, frameRate);
+
+ boolean gotExpectedResolution = false;
+ int numberOfInspectedFrames = 0;
+
+ do {
+ renderer.WaitForNextFrameToRender();
+ ++numberOfInspectedFrames;
+
+ gotExpectedResolution = (renderer.frameWidth() == scaledWidth
+ && renderer.frameHeight() == scaledHeight);
+ } while (!gotExpectedResolution && numberOfInspectedFrames < 30);
+
+ source.stop();
+ track.dispose();
+ source.dispose();
+ factory.dispose();
+ assertTrue(capturer.isReleased());
+
+ assertTrue(gotExpectedResolution);
+ }
+
}
diff --git a/talk/app/webrtc/androidvideocapturer.cc b/talk/app/webrtc/androidvideocapturer.cc
index afcfb5bb7c..d8f12174db 100644
--- a/talk/app/webrtc/androidvideocapturer.cc
+++ b/talk/app/webrtc/androidvideocapturer.cc
@@ -26,6 +26,7 @@
*/
#include "talk/app/webrtc/androidvideocapturer.h"
+#include "talk/app/webrtc/java/jni/native_handle_impl.h"
#include "talk/media/webrtc/webrtcvideoframe.h"
#include "webrtc/base/common.h"
#include "webrtc/base/json.h"
@@ -57,11 +58,13 @@ class AndroidVideoCapturer::FrameFactory : public cricket::VideoFrameFactory {
const rtc::scoped_refptr<webrtc::VideoFrameBuffer>& buffer,
int rotation,
int64_t time_stamp_in_ns) {
+ RTC_DCHECK(rotation == 0 || rotation == 90 || rotation == 180 ||
+ rotation == 270);
buffer_ = buffer;
captured_frame_.width = buffer->width();
captured_frame_.height = buffer->height();
captured_frame_.time_stamp = time_stamp_in_ns;
- captured_frame_.rotation = rotation;
+ captured_frame_.rotation = static_cast<webrtc::VideoRotation>(rotation);
}
void ClearCapturedFrame() {
@@ -85,7 +88,7 @@ class AndroidVideoCapturer::FrameFactory : public cricket::VideoFrameFactory {
rtc::scoped_ptr<cricket::VideoFrame> frame(new cricket::WebRtcVideoFrame(
ShallowCenterCrop(buffer_, dst_width, dst_height),
- captured_frame->time_stamp, captured_frame->GetRotation()));
+ captured_frame->time_stamp, captured_frame->rotation));
// Caller takes ownership.
// TODO(magjed): Change CreateAliasedFrame() to return a rtc::scoped_ptr.
return apply_rotation_ ? frame->GetCopyWithRotationApplied()->Copy()
@@ -99,10 +102,17 @@ class AndroidVideoCapturer::FrameFactory : public cricket::VideoFrameFactory {
int output_width,
int output_height) const override {
if (buffer_->native_handle() != nullptr) {
- // TODO(perkj): Implement CreateAliasedFrame properly for textures.
- rtc::scoped_ptr<cricket::VideoFrame> frame(new cricket::WebRtcVideoFrame(
- buffer_, input_frame->time_stamp, input_frame->GetRotation()));
- return frame.release();
+ // TODO(perkj) Implement cropping.
+ RTC_CHECK_EQ(cropped_input_width, buffer_->width());
+ RTC_CHECK_EQ(cropped_input_height, buffer_->height());
+ rtc::scoped_refptr<webrtc::VideoFrameBuffer> scaled_buffer(
+ static_cast<webrtc_jni::AndroidTextureBuffer*>(buffer_.get())
+ ->ScaleAndRotate(output_width, output_height,
+ apply_rotation_ ? input_frame->rotation :
+ webrtc::kVideoRotation_0));
+ return new cricket::WebRtcVideoFrame(
+ scaled_buffer, input_frame->time_stamp,
+ apply_rotation_ ? webrtc::kVideoRotation_0 : input_frame->rotation);
}
return VideoFrameFactory::CreateAliasedFrame(input_frame,
cropped_input_width,
diff --git a/talk/app/webrtc/androidvideocapturer.h b/talk/app/webrtc/androidvideocapturer.h
index df783bdf6f..c665eabd91 100644
--- a/talk/app/webrtc/androidvideocapturer.h
+++ b/talk/app/webrtc/androidvideocapturer.h
@@ -32,7 +32,7 @@
#include "talk/media/base/videocapturer.h"
#include "webrtc/base/thread_checker.h"
-#include "webrtc/common_video/interface/video_frame_buffer.h"
+#include "webrtc/common_video/include/video_frame_buffer.h"
namespace webrtc {
diff --git a/talk/app/webrtc/audiotrack.cc b/talk/app/webrtc/audiotrack.cc
index b0c91296f9..b3223cd29f 100644
--- a/talk/app/webrtc/audiotrack.cc
+++ b/talk/app/webrtc/audiotrack.cc
@@ -27,27 +27,82 @@
#include "talk/app/webrtc/audiotrack.h"
-#include <string>
+#include "webrtc/base/checks.h"
+
+using rtc::scoped_refptr;
namespace webrtc {
-static const char kAudioTrackKind[] = "audio";
+const char MediaStreamTrackInterface::kAudioKind[] = "audio";
+
+// static
+scoped_refptr<AudioTrack> AudioTrack::Create(
+ const std::string& id,
+ const scoped_refptr<AudioSourceInterface>& source) {
+ return new rtc::RefCountedObject<AudioTrack>(id, source);
+}
AudioTrack::AudioTrack(const std::string& label,
- AudioSourceInterface* audio_source)
- : MediaStreamTrack<AudioTrackInterface>(label),
- audio_source_(audio_source) {
+ const scoped_refptr<AudioSourceInterface>& source)
+ : MediaStreamTrack<AudioTrackInterface>(label), audio_source_(source) {
+ if (audio_source_) {
+ audio_source_->RegisterObserver(this);
+ OnChanged();
+ }
+}
+
+AudioTrack::~AudioTrack() {
+ RTC_DCHECK(thread_checker_.CalledOnValidThread());
+ set_state(MediaStreamTrackInterface::kEnded);
+ if (audio_source_)
+ audio_source_->UnregisterObserver(this);
}
std::string AudioTrack::kind() const {
- return kAudioTrackKind;
+ RTC_DCHECK(thread_checker_.CalledOnValidThread());
+ return kAudioKind;
+}
+
+AudioSourceInterface* AudioTrack::GetSource() const {
+ RTC_DCHECK(thread_checker_.CalledOnValidThread());
+ return audio_source_.get();
}
-rtc::scoped_refptr<AudioTrack> AudioTrack::Create(
- const std::string& id, AudioSourceInterface* source) {
- rtc::RefCountedObject<AudioTrack>* track =
- new rtc::RefCountedObject<AudioTrack>(id, source);
- return track;
+void AudioTrack::AddSink(AudioTrackSinkInterface* sink) {
+ RTC_DCHECK(thread_checker_.CalledOnValidThread());
+ if (audio_source_)
+ audio_source_->AddSink(sink);
+}
+
+void AudioTrack::RemoveSink(AudioTrackSinkInterface* sink) {
+ RTC_DCHECK(thread_checker_.CalledOnValidThread());
+ if (audio_source_)
+ audio_source_->RemoveSink(sink);
+}
+
+void AudioTrack::OnChanged() {
+ RTC_DCHECK(thread_checker_.CalledOnValidThread());
+ if (state() == kFailed)
+ return; // We can't recover from this state (do we ever set it?).
+
+ TrackState new_state = kInitializing;
+
+ // |audio_source_| must be non-null if we ever get here.
+ switch (audio_source_->state()) {
+ case MediaSourceInterface::kLive:
+ case MediaSourceInterface::kMuted:
+ new_state = kLive;
+ break;
+ case MediaSourceInterface::kEnded:
+ new_state = kEnded;
+ break;
+ case MediaSourceInterface::kInitializing:
+ default:
+ // use kInitializing.
+ break;
+ }
+
+ set_state(new_state);
}
} // namespace webrtc
diff --git a/talk/app/webrtc/audiotrack.h b/talk/app/webrtc/audiotrack.h
index 750f272ba2..55f4837714 100644
--- a/talk/app/webrtc/audiotrack.h
+++ b/talk/app/webrtc/audiotrack.h
@@ -28,40 +28,47 @@
#ifndef TALK_APP_WEBRTC_AUDIOTRACK_H_
#define TALK_APP_WEBRTC_AUDIOTRACK_H_
+#include <string>
+
#include "talk/app/webrtc/mediastreaminterface.h"
#include "talk/app/webrtc/mediastreamtrack.h"
#include "talk/app/webrtc/notifier.h"
#include "webrtc/base/scoped_ptr.h"
#include "webrtc/base/scoped_ref_ptr.h"
+#include "webrtc/base/thread_checker.h"
namespace webrtc {
-class AudioTrack : public MediaStreamTrack<AudioTrackInterface> {
+class AudioTrack : public MediaStreamTrack<AudioTrackInterface>,
+ public ObserverInterface {
+ protected:
+ // Protected ctor to force use of factory method.
+ AudioTrack(const std::string& label,
+ const rtc::scoped_refptr<AudioSourceInterface>& source);
+ ~AudioTrack() override;
+
public:
static rtc::scoped_refptr<AudioTrack> Create(
- const std::string& id, AudioSourceInterface* source);
-
- // AudioTrackInterface implementation.
- AudioSourceInterface* GetSource() const override {
- return audio_source_.get();
- }
- // TODO(xians): Implement these methods.
- void AddSink(AudioTrackSinkInterface* sink) override {}
- void RemoveSink(AudioTrackSinkInterface* sink) override {}
- bool GetSignalLevel(int* level) override { return false; }
- rtc::scoped_refptr<AudioProcessorInterface> GetAudioProcessor() override {
- return NULL;
- }
- cricket::AudioRenderer* GetRenderer() override { return NULL; }
+ const std::string& id,
+ const rtc::scoped_refptr<AudioSourceInterface>& source);
+ private:
// MediaStreamTrack implementation.
std::string kind() const override;
- protected:
- AudioTrack(const std::string& label, AudioSourceInterface* audio_source);
+ // AudioTrackInterface implementation.
+ AudioSourceInterface* GetSource() const override;
+
+ void AddSink(AudioTrackSinkInterface* sink) override;
+ void RemoveSink(AudioTrackSinkInterface* sink) override;
+
+ // ObserverInterface implementation.
+ void OnChanged() override;
private:
- rtc::scoped_refptr<AudioSourceInterface> audio_source_;
+ const rtc::scoped_refptr<AudioSourceInterface> audio_source_;
+ rtc::ThreadChecker thread_checker_;
+ RTC_DISALLOW_IMPLICIT_CONSTRUCTORS(AudioTrack);
};
} // namespace webrtc
diff --git a/talk/app/webrtc/dtlsidentitystore.cc b/talk/app/webrtc/dtlsidentitystore.cc
index 27587796bc..390ec0d0b7 100644
--- a/talk/app/webrtc/dtlsidentitystore.cc
+++ b/talk/app/webrtc/dtlsidentitystore.cc
@@ -27,6 +27,8 @@
#include "talk/app/webrtc/dtlsidentitystore.h"
+#include <utility>
+
#include "talk/app/webrtc/webrtcsessiondescriptionfactory.h"
#include "webrtc/base/logging.h"
@@ -72,7 +74,7 @@ class DtlsIdentityStoreImpl::WorkerTask : public sigslot::has_slots<>,
// Posting to |this| avoids touching |store_| on threads other than
// |signaling_thread_| and thus avoids having to use locks.
IdentityResultMessageData* msg = new IdentityResultMessageData(
- new IdentityResult(key_type_, identity.Pass()));
+ new IdentityResult(key_type_, std::move(identity)));
signaling_thread_->Post(this, MSG_GENERATE_IDENTITY_RESULT, msg);
}
@@ -93,7 +95,7 @@ class DtlsIdentityStoreImpl::WorkerTask : public sigslot::has_slots<>,
static_cast<IdentityResultMessageData*>(msg->pdata));
if (store_) {
store_->OnIdentityGenerated(pdata->data()->key_type_,
- pdata->data()->identity_.Pass());
+ std::move(pdata->data()->identity_));
}
}
break;
@@ -152,7 +154,7 @@ void DtlsIdentityStoreImpl::OnMessage(rtc::Message* msg) {
rtc::scoped_ptr<IdentityResultMessageData> pdata(
static_cast<IdentityResultMessageData*>(msg->pdata));
OnIdentityGenerated(pdata->data()->key_type_,
- pdata->data()->identity_.Pass());
+ std::move(pdata->data()->identity_));
break;
}
}
@@ -178,9 +180,9 @@ void DtlsIdentityStoreImpl::GenerateIdentity(
// Return identity async - post even though we are on |signaling_thread_|.
LOG(LS_VERBOSE) << "Using a free DTLS identity.";
++request_info_[key_type].gen_in_progress_counts_;
- IdentityResultMessageData* msg = new IdentityResultMessageData(
- new IdentityResult(key_type,
- request_info_[key_type].free_identity_.Pass()));
+ IdentityResultMessageData* msg =
+ new IdentityResultMessageData(new IdentityResult(
+ key_type, std::move(request_info_[key_type].free_identity_)));
signaling_thread_->Post(this, MSG_GENERATE_IDENTITY_RESULT, msg);
return;
}
@@ -228,7 +230,7 @@ void DtlsIdentityStoreImpl::OnIdentityGenerated(
// Return the result to the observer.
if (identity.get()) {
LOG(LS_VERBOSE) << "A DTLS identity is returned to an observer.";
- observer->OnSuccess(identity.Pass());
+ observer->OnSuccess(std::move(identity));
} else {
LOG(LS_WARNING) << "Failed to generate DTLS identity.";
observer->OnFailure(0);
diff --git a/talk/app/webrtc/dtlsidentitystore.h b/talk/app/webrtc/dtlsidentitystore.h
index a0eef98e1b..2a5309d34b 100644
--- a/talk/app/webrtc/dtlsidentitystore.h
+++ b/talk/app/webrtc/dtlsidentitystore.h
@@ -30,6 +30,7 @@
#include <queue>
#include <string>
+#include <utility>
#include "webrtc/base/messagehandler.h"
#include "webrtc/base/messagequeue.h"
@@ -129,7 +130,7 @@ class DtlsIdentityStoreImpl : public DtlsIdentityStoreInterface,
struct IdentityResult {
IdentityResult(rtc::KeyType key_type,
rtc::scoped_ptr<rtc::SSLIdentity> identity)
- : key_type_(key_type), identity_(identity.Pass()) {}
+ : key_type_(key_type), identity_(std::move(identity)) {}
rtc::KeyType key_type_;
rtc::scoped_ptr<rtc::SSLIdentity> identity_;
diff --git a/talk/app/webrtc/fakeportallocatorfactory.h b/talk/app/webrtc/fakeportallocatorfactory.h
deleted file mode 100644
index f326b62043..0000000000
--- a/talk/app/webrtc/fakeportallocatorfactory.h
+++ /dev/null
@@ -1,76 +0,0 @@
-/*
- * libjingle
- * Copyright 2011 Google Inc.
- *
- * Redistribution and use in source and binary forms, with or without
- * modification, are permitted provided that the following conditions are met:
- *
- * 1. Redistributions of source code must retain the above copyright notice,
- * this list of conditions and the following disclaimer.
- * 2. Redistributions in binary form must reproduce the above copyright notice,
- * this list of conditions and the following disclaimer in the documentation
- * and/or other materials provided with the distribution.
- * 3. The name of the author may not be used to endorse or promote products
- * derived from this software without specific prior written permission.
- *
- * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
- * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
- * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
- * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
- * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
- * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
- * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
- * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
- * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
- * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
- */
-
-// This file defines a fake port allocator factory used for testing.
-// This implementation creates instances of cricket::FakePortAllocator.
-
-#ifndef TALK_APP_WEBRTC_FAKEPORTALLOCATORFACTORY_H_
-#define TALK_APP_WEBRTC_FAKEPORTALLOCATORFACTORY_H_
-
-#include "talk/app/webrtc/peerconnectioninterface.h"
-#include "webrtc/p2p/client/fakeportallocator.h"
-
-namespace webrtc {
-
-class FakePortAllocatorFactory : public PortAllocatorFactoryInterface {
- public:
- static FakePortAllocatorFactory* Create() {
- rtc::RefCountedObject<FakePortAllocatorFactory>* allocator =
- new rtc::RefCountedObject<FakePortAllocatorFactory>();
- return allocator;
- }
-
- virtual cricket::PortAllocator* CreatePortAllocator(
- const std::vector<StunConfiguration>& stun_configurations,
- const std::vector<TurnConfiguration>& turn_configurations) {
- stun_configs_ = stun_configurations;
- turn_configs_ = turn_configurations;
- return new cricket::FakePortAllocator(rtc::Thread::Current(), NULL);
- }
-
- const std::vector<StunConfiguration>& stun_configs() const {
- return stun_configs_;
- }
-
- const std::vector<TurnConfiguration>& turn_configs() const {
- return turn_configs_;
- }
-
- void SetNetworkIgnoreMask(int network_ignore_mask) {}
-
- protected:
- FakePortAllocatorFactory() {}
- ~FakePortAllocatorFactory() {}
-
- private:
- std::vector<PortAllocatorFactoryInterface::StunConfiguration> stun_configs_;
- std::vector<PortAllocatorFactoryInterface::TurnConfiguration> turn_configs_;
-};
-
-} // namespace webrtc
-
-#endif // TALK_APP_WEBRTC_FAKEPORTALLOCATORFACTORY_H_
diff --git a/talk/app/webrtc/java/android/org/webrtc/Camera2Enumerator.java b/talk/app/webrtc/java/android/org/webrtc/Camera2Enumerator.java
index 097d1cd906..3444529596 100644
--- a/talk/app/webrtc/java/android/org/webrtc/Camera2Enumerator.java
+++ b/talk/app/webrtc/java/android/org/webrtc/Camera2Enumerator.java
@@ -27,7 +27,9 @@
package org.webrtc;
+import android.annotation.TargetApi;
import android.content.Context;
+
import android.graphics.ImageFormat;
import android.hardware.camera2.CameraCharacteristics;
import android.hardware.camera2.CameraManager;
@@ -45,6 +47,7 @@ import java.util.HashMap;
import java.util.List;
import java.util.Map;
+@TargetApi(21)
public class Camera2Enumerator implements CameraEnumerationAndroid.Enumerator {
private final static String TAG = "Camera2Enumerator";
private final static double NANO_SECONDS_PER_SECOND = 1.0e9;
diff --git a/talk/app/webrtc/java/android/org/webrtc/CameraEnumerationAndroid.java b/talk/app/webrtc/java/android/org/webrtc/CameraEnumerationAndroid.java
index 3e37f6afdc..5f68c3759e 100644
--- a/talk/app/webrtc/java/android/org/webrtc/CameraEnumerationAndroid.java
+++ b/talk/app/webrtc/java/android/org/webrtc/CameraEnumerationAndroid.java
@@ -29,7 +29,6 @@ package org.webrtc;
import static java.lang.Math.abs;
import static java.lang.Math.ceil;
-import android.hardware.Camera;
import android.graphics.ImageFormat;
import org.json.JSONArray;
@@ -72,7 +71,7 @@ public class CameraEnumerationAndroid {
// other image formats then this needs to be updated and
// VideoCapturerAndroid.getSupportedFormats need to return CaptureFormats of
// all imageFormats.
- public final int imageFormat = ImageFormat.YV12;
+ public final int imageFormat = ImageFormat.NV21;
public CaptureFormat(int width, int height, int minFramerate,
int maxFramerate) {
@@ -88,25 +87,15 @@ public class CameraEnumerationAndroid {
}
// Calculates the frame size of the specified image format. Currently only
- // supporting ImageFormat.YV12. The YV12's stride is the closest rounded up
- // multiple of 16 of the width and width and height are always even.
- // Android guarantees this:
- // http://developer.android.com/reference/android/hardware/Camera.Parameters.html#setPreviewFormat%28int%29
+ // supporting ImageFormat.NV21.
+ // The size is width * height * number of bytes per pixel.
+ // http://developer.android.com/reference/android/hardware/Camera.html#addCallbackBuffer(byte[])
public static int frameSize(int width, int height, int imageFormat) {
- if (imageFormat != ImageFormat.YV12) {
+ if (imageFormat != ImageFormat.NV21) {
throw new UnsupportedOperationException("Don't know how to calculate "
- + "the frame size of non-YV12 image formats.");
+ + "the frame size of non-NV21 image formats.");
}
- int yStride = roundUp(width, 16);
- int uvStride = roundUp(yStride / 2, 16);
- int ySize = yStride * height;
- int uvSize = uvStride * height / 2;
- return ySize + uvSize * 2;
- }
-
- // Rounds up |x| to the closest value that is a multiple of |alignment|.
- private static int roundUp(int x, int alignment) {
- return (int)ceil(x / (double)alignment) * alignment;
+ return (width * height * ImageFormat.getBitsPerPixel(imageFormat)) / 8;
}
@Override
@@ -114,21 +103,19 @@ public class CameraEnumerationAndroid {
return width + "x" + height + "@[" + minFramerate + ":" + maxFramerate + "]";
}
- @Override
- public boolean equals(Object that) {
- if (!(that instanceof CaptureFormat)) {
+ public boolean isSameFormat(final CaptureFormat that) {
+ if (that == null) {
return false;
}
- final CaptureFormat c = (CaptureFormat) that;
- return width == c.width && height == c.height && maxFramerate == c.maxFramerate
- && minFramerate == c.minFramerate;
+ return width == that.width && height == that.height && maxFramerate == that.maxFramerate
+ && minFramerate == that.minFramerate;
}
}
// Returns device names that can be used to create a new VideoCapturerAndroid.
public static String[] getDeviceNames() {
- String[] names = new String[Camera.getNumberOfCameras()];
- for (int i = 0; i < Camera.getNumberOfCameras(); ++i) {
+ String[] names = new String[android.hardware.Camera.getNumberOfCameras()];
+ for (int i = 0; i < android.hardware.Camera.getNumberOfCameras(); ++i) {
names[i] = getDeviceName(i);
}
return names;
@@ -136,22 +123,22 @@ public class CameraEnumerationAndroid {
// Returns number of cameras on device.
public static int getDeviceCount() {
- return Camera.getNumberOfCameras();
+ return android.hardware.Camera.getNumberOfCameras();
}
// Returns the name of the camera with camera index. Returns null if the
// camera can not be used.
public static String getDeviceName(int index) {
- Camera.CameraInfo info = new Camera.CameraInfo();
+ android.hardware.Camera.CameraInfo info = new android.hardware.Camera.CameraInfo();
try {
- Camera.getCameraInfo(index, info);
+ android.hardware.Camera.getCameraInfo(index, info);
} catch (Exception e) {
Logging.e(TAG, "getCameraInfo failed on index " + index,e);
return null;
}
String facing =
- (info.facing == Camera.CameraInfo.CAMERA_FACING_FRONT) ? "front" : "back";
+ (info.facing == android.hardware.Camera.CameraInfo.CAMERA_FACING_FRONT) ? "front" : "back";
return "Camera " + index + ", Facing " + facing
+ ", Orientation " + info.orientation;
}
@@ -159,13 +146,13 @@ public class CameraEnumerationAndroid {
// Returns the name of the front facing camera. Returns null if the
// camera can not be used or does not exist.
public static String getNameOfFrontFacingDevice() {
- return getNameOfDevice(Camera.CameraInfo.CAMERA_FACING_FRONT);
+ return getNameOfDevice(android.hardware.Camera.CameraInfo.CAMERA_FACING_FRONT);
}
// Returns the name of the back facing camera. Returns null if the
// camera can not be used or does not exist.
public static String getNameOfBackFacingDevice() {
- return getNameOfDevice(Camera.CameraInfo.CAMERA_FACING_BACK);
+ return getNameOfDevice(android.hardware.Camera.CameraInfo.CAMERA_FACING_BACK);
}
public static String getSupportedFormatsAsJson(int id) throws JSONException {
@@ -194,7 +181,8 @@ public class CameraEnumerationAndroid {
}
}
- public static int[] getFramerateRange(Camera.Parameters parameters, final int framerate) {
+ public static int[] getFramerateRange(android.hardware.Camera.Parameters parameters,
+ final int framerate) {
List<int[]> listFpsRange = parameters.getSupportedPreviewFpsRange();
if (listFpsRange.isEmpty()) {
Logging.w(TAG, "No supported preview fps range");
@@ -203,27 +191,30 @@ public class CameraEnumerationAndroid {
return Collections.min(listFpsRange,
new ClosestComparator<int[]>() {
@Override int diff(int[] range) {
- return abs(framerate - range[Camera.Parameters.PREVIEW_FPS_MIN_INDEX])
- + abs(framerate - range[Camera.Parameters.PREVIEW_FPS_MAX_INDEX]);
+ final int maxFpsWeight = 10;
+ return range[android.hardware.Camera.Parameters.PREVIEW_FPS_MIN_INDEX]
+ + maxFpsWeight * abs(framerate
+ - range[android.hardware.Camera.Parameters.PREVIEW_FPS_MAX_INDEX]);
}
});
}
- public static Camera.Size getClosestSupportedSize(
- List<Camera.Size> supportedSizes, final int requestedWidth, final int requestedHeight) {
+ public static android.hardware.Camera.Size getClosestSupportedSize(
+ List<android.hardware.Camera.Size> supportedSizes, final int requestedWidth,
+ final int requestedHeight) {
return Collections.min(supportedSizes,
- new ClosestComparator<Camera.Size>() {
- @Override int diff(Camera.Size size) {
+ new ClosestComparator<android.hardware.Camera.Size>() {
+ @Override int diff(android.hardware.Camera.Size size) {
return abs(requestedWidth - size.width) + abs(requestedHeight - size.height);
}
});
}
private static String getNameOfDevice(int facing) {
- final Camera.CameraInfo info = new Camera.CameraInfo();
- for (int i = 0; i < Camera.getNumberOfCameras(); ++i) {
+ final android.hardware.Camera.CameraInfo info = new android.hardware.Camera.CameraInfo();
+ for (int i = 0; i < android.hardware.Camera.getNumberOfCameras(); ++i) {
try {
- Camera.getCameraInfo(i, info);
+ android.hardware.Camera.getCameraInfo(i, info);
if (info.facing == facing) {
return getDeviceName(i);
}
diff --git a/talk/app/webrtc/java/android/org/webrtc/CameraEnumerator.java b/talk/app/webrtc/java/android/org/webrtc/CameraEnumerator.java
index 2f35dc3493..54469cc341 100644
--- a/talk/app/webrtc/java/android/org/webrtc/CameraEnumerator.java
+++ b/talk/app/webrtc/java/android/org/webrtc/CameraEnumerator.java
@@ -27,7 +27,6 @@
package org.webrtc;
-import android.hardware.Camera;
import android.os.SystemClock;
import org.webrtc.CameraEnumerationAndroid.CaptureFormat;
@@ -60,11 +59,11 @@ public class CameraEnumerator implements CameraEnumerationAndroid.Enumerator {
private List<CaptureFormat> enumerateFormats(int cameraId) {
Logging.d(TAG, "Get supported formats for camera index " + cameraId + ".");
final long startTimeMs = SystemClock.elapsedRealtime();
- final Camera.Parameters parameters;
- Camera camera = null;
+ final android.hardware.Camera.Parameters parameters;
+ android.hardware.Camera camera = null;
try {
Logging.d(TAG, "Opening camera with index " + cameraId);
- camera = Camera.open(cameraId);
+ camera = android.hardware.Camera.open(cameraId);
parameters = camera.getParameters();
} catch (RuntimeException e) {
Logging.e(TAG, "Open camera failed on camera index " + cameraId, e);
@@ -84,10 +83,10 @@ public class CameraEnumerator implements CameraEnumerationAndroid.Enumerator {
// getSupportedPreviewFpsRange() returns a sorted list. Take the fps range
// corresponding to the highest fps.
final int[] range = listFpsRange.get(listFpsRange.size() - 1);
- minFps = range[Camera.Parameters.PREVIEW_FPS_MIN_INDEX];
- maxFps = range[Camera.Parameters.PREVIEW_FPS_MAX_INDEX];
+ minFps = range[android.hardware.Camera.Parameters.PREVIEW_FPS_MIN_INDEX];
+ maxFps = range[android.hardware.Camera.Parameters.PREVIEW_FPS_MAX_INDEX];
}
- for (Camera.Size size : parameters.getSupportedPreviewSizes()) {
+ for (android.hardware.Camera.Size size : parameters.getSupportedPreviewSizes()) {
formatList.add(new CaptureFormat(size.width, size.height, minFps, maxFps));
}
} catch (Exception e) {
diff --git a/talk/app/webrtc/java/android/org/webrtc/EglBase.java b/talk/app/webrtc/java/android/org/webrtc/EglBase.java
index 2ee36882e8..035645bdd1 100644
--- a/talk/app/webrtc/java/android/org/webrtc/EglBase.java
+++ b/talk/app/webrtc/java/android/org/webrtc/EglBase.java
@@ -28,244 +28,108 @@
package org.webrtc;
import android.graphics.SurfaceTexture;
-import android.view.SurfaceHolder;
-
-import org.webrtc.Logging;
+import android.view.Surface;
import javax.microedition.khronos.egl.EGL10;
-import javax.microedition.khronos.egl.EGLConfig;
-import javax.microedition.khronos.egl.EGLContext;
-import javax.microedition.khronos.egl.EGLDisplay;
-import javax.microedition.khronos.egl.EGLSurface;
+
/**
- * Holds EGL state and utility methods for handling an EGLContext, an EGLDisplay, and an EGLSurface.
+ * Holds EGL state and utility methods for handling an egl 1.0 EGLContext, an EGLDisplay,
+ * and an EGLSurface.
*/
-public final class EglBase {
- private static final String TAG = "EglBase";
+public abstract class EglBase {
+ // EGL wrapper for an actual EGLContext.
+ public static class Context {
+ }
+
// These constants are taken from EGL14.EGL_OPENGL_ES2_BIT and EGL14.EGL_CONTEXT_CLIENT_VERSION.
// https://android.googlesource.com/platform/frameworks/base/+/master/opengl/java/android/opengl/EGL14.java
// This is similar to how GlSurfaceView does:
// http://grepcode.com/file/repository.grepcode.com/java/ext/com.google.android/android/5.1.1_r1/android/opengl/GLSurfaceView.java#760
private static final int EGL_OPENGL_ES2_BIT = 4;
- private static final int EGL_CONTEXT_CLIENT_VERSION = 0x3098;
// Android-specific extension.
private static final int EGL_RECORDABLE_ANDROID = 0x3142;
- private final EGL10 egl;
- private EGLContext eglContext;
- private ConfigType configType;
- private EGLConfig eglConfig;
- private EGLDisplay eglDisplay;
- private EGLSurface eglSurface = EGL10.EGL_NO_SURFACE;
-
- // EGLConfig constructor type. Influences eglChooseConfig arguments.
- public static enum ConfigType {
- // No special parameters.
- PLAIN,
- // Configures with EGL_SURFACE_TYPE = EGL_PBUFFER_BIT.
- PIXEL_BUFFER,
- // Configures with EGL_RECORDABLE_ANDROID = 1.
- // Discourages EGL from using pixel formats that cannot efficiently be
- // converted to something usable by the video encoder.
- RECORDABLE
- }
-
- // Create root context without any EGLSurface or parent EGLContext. This can be used for branching
- // new contexts that share data.
- public EglBase() {
- this(EGL10.EGL_NO_CONTEXT, ConfigType.PLAIN);
- }
-
- // Create a new context with the specified config type, sharing data with sharedContext.
- public EglBase(EGLContext sharedContext, ConfigType configType) {
- this.egl = (EGL10) EGLContext.getEGL();
- this.configType = configType;
- eglDisplay = getEglDisplay();
- eglConfig = getEglConfig(eglDisplay, configType);
- eglContext = createEglContext(sharedContext, eglDisplay, eglConfig);
- }
-
- // Create EGLSurface from the Android SurfaceHolder.
- public void createSurface(SurfaceHolder surfaceHolder) {
- createSurfaceInternal(surfaceHolder);
- }
+ public static final int[] CONFIG_PLAIN = {
+ EGL10.EGL_RED_SIZE, 8,
+ EGL10.EGL_GREEN_SIZE, 8,
+ EGL10.EGL_BLUE_SIZE, 8,
+ EGL10.EGL_RENDERABLE_TYPE, EGL_OPENGL_ES2_BIT,
+ EGL10.EGL_NONE
+ };
+ public static final int[] CONFIG_RGBA = {
+ EGL10.EGL_RED_SIZE, 8,
+ EGL10.EGL_GREEN_SIZE, 8,
+ EGL10.EGL_BLUE_SIZE, 8,
+ EGL10.EGL_ALPHA_SIZE, 8,
+ EGL10.EGL_RENDERABLE_TYPE, EGL_OPENGL_ES2_BIT,
+ EGL10.EGL_NONE
+ };
+ public static final int[] CONFIG_PIXEL_BUFFER = {
+ EGL10.EGL_RED_SIZE, 8,
+ EGL10.EGL_GREEN_SIZE, 8,
+ EGL10.EGL_BLUE_SIZE, 8,
+ EGL10.EGL_RENDERABLE_TYPE, EGL_OPENGL_ES2_BIT,
+ EGL10.EGL_SURFACE_TYPE, EGL10.EGL_PBUFFER_BIT,
+ EGL10.EGL_NONE
+ };
+ public static final int[] CONFIG_PIXEL_RGBA_BUFFER = {
+ EGL10.EGL_RED_SIZE, 8,
+ EGL10.EGL_GREEN_SIZE, 8,
+ EGL10.EGL_BLUE_SIZE, 8,
+ EGL10.EGL_ALPHA_SIZE, 8,
+ EGL10.EGL_RENDERABLE_TYPE, EGL_OPENGL_ES2_BIT,
+ EGL10.EGL_SURFACE_TYPE, EGL10.EGL_PBUFFER_BIT,
+ EGL10.EGL_NONE
+ };
+ public static final int[] CONFIG_RECORDABLE = {
+ EGL10.EGL_RED_SIZE, 8,
+ EGL10.EGL_GREEN_SIZE, 8,
+ EGL10.EGL_BLUE_SIZE, 8,
+ EGL10.EGL_RENDERABLE_TYPE, EGL_OPENGL_ES2_BIT,
+ EGL_RECORDABLE_ANDROID, 1,
+ EGL10.EGL_NONE
+ };
+
+ // Create a new context with the specified config attributes, sharing data with sharedContext.
+ // |sharedContext| can be null.
+ public static EglBase create(Context sharedContext, int[] configAttributes) {
+ return (EglBase14.isEGL14Supported()
+ && (sharedContext == null || sharedContext instanceof EglBase14.Context))
+ ? new EglBase14((EglBase14.Context) sharedContext, configAttributes)
+ : new EglBase10((EglBase10.Context) sharedContext, configAttributes);
+ }
+
+ public static EglBase create() {
+ return create(null, CONFIG_PLAIN);
+ }
+
+ public abstract void createSurface(Surface surface);
// Create EGLSurface from the Android SurfaceTexture.
- public void createSurface(SurfaceTexture surfaceTexture) {
- createSurfaceInternal(surfaceTexture);
- }
-
- // Create EGLSurface from either a SurfaceHolder or a SurfaceTexture.
- private void createSurfaceInternal(Object nativeWindow) {
- if (!(nativeWindow instanceof SurfaceHolder) && !(nativeWindow instanceof SurfaceTexture)) {
- throw new IllegalStateException("Input must be either a SurfaceHolder or SurfaceTexture");
- }
- checkIsNotReleased();
- if (configType == ConfigType.PIXEL_BUFFER) {
- Logging.w(TAG, "This EGL context is configured for PIXEL_BUFFER, but uses regular Surface");
- }
- if (eglSurface != EGL10.EGL_NO_SURFACE) {
- throw new RuntimeException("Already has an EGLSurface");
- }
- int[] surfaceAttribs = {EGL10.EGL_NONE};
- eglSurface = egl.eglCreateWindowSurface(eglDisplay, eglConfig, nativeWindow, surfaceAttribs);
- if (eglSurface == EGL10.EGL_NO_SURFACE) {
- throw new RuntimeException("Failed to create window surface");
- }
- }
+ public abstract void createSurface(SurfaceTexture surfaceTexture);
// Create dummy 1x1 pixel buffer surface so the context can be made current.
- public void createDummyPbufferSurface() {
- createPbufferSurface(1, 1);
- }
-
- public void createPbufferSurface(int width, int height) {
- checkIsNotReleased();
- if (configType != ConfigType.PIXEL_BUFFER) {
- throw new RuntimeException(
- "This EGL context is not configured to use a pixel buffer: " + configType);
- }
- if (eglSurface != EGL10.EGL_NO_SURFACE) {
- throw new RuntimeException("Already has an EGLSurface");
- }
- int[] surfaceAttribs = {EGL10.EGL_WIDTH, width, EGL10.EGL_HEIGHT, height, EGL10.EGL_NONE};
- eglSurface = egl.eglCreatePbufferSurface(eglDisplay, eglConfig, surfaceAttribs);
- if (eglSurface == EGL10.EGL_NO_SURFACE) {
- throw new RuntimeException("Failed to create pixel buffer surface");
- }
- }
+ public abstract void createDummyPbufferSurface();
- public EGLContext getContext() {
- return eglContext;
- }
+ public abstract void createPbufferSurface(int width, int height);
- public boolean hasSurface() {
- return eglSurface != EGL10.EGL_NO_SURFACE;
- }
+ public abstract Context getEglBaseContext();
- public int surfaceWidth() {
- final int widthArray[] = new int[1];
- egl.eglQuerySurface(eglDisplay, eglSurface, EGL10.EGL_WIDTH, widthArray);
- return widthArray[0];
- }
+ public abstract boolean hasSurface();
- public int surfaceHeight() {
- final int heightArray[] = new int[1];
- egl.eglQuerySurface(eglDisplay, eglSurface, EGL10.EGL_HEIGHT, heightArray);
- return heightArray[0];
- }
+ public abstract int surfaceWidth();
- public void releaseSurface() {
- if (eglSurface != EGL10.EGL_NO_SURFACE) {
- egl.eglDestroySurface(eglDisplay, eglSurface);
- eglSurface = EGL10.EGL_NO_SURFACE;
- }
- }
+ public abstract int surfaceHeight();
- private void checkIsNotReleased() {
- if (eglDisplay == EGL10.EGL_NO_DISPLAY || eglContext == EGL10.EGL_NO_CONTEXT
- || eglConfig == null) {
- throw new RuntimeException("This object has been released");
- }
- }
+ public abstract void releaseSurface();
- public void release() {
- checkIsNotReleased();
- releaseSurface();
- detachCurrent();
- egl.eglDestroyContext(eglDisplay, eglContext);
- egl.eglTerminate(eglDisplay);
- eglContext = EGL10.EGL_NO_CONTEXT;
- eglDisplay = EGL10.EGL_NO_DISPLAY;
- eglConfig = null;
- }
+ public abstract void release();
- public void makeCurrent() {
- checkIsNotReleased();
- if (eglSurface == EGL10.EGL_NO_SURFACE) {
- throw new RuntimeException("No EGLSurface - can't make current");
- }
- if (!egl.eglMakeCurrent(eglDisplay, eglSurface, eglSurface, eglContext)) {
- throw new RuntimeException("eglMakeCurrent failed");
- }
- }
+ public abstract void makeCurrent();
// Detach the current EGL context, so that it can be made current on another thread.
- public void detachCurrent() {
- if (!egl.eglMakeCurrent(
- eglDisplay, EGL10.EGL_NO_SURFACE, EGL10.EGL_NO_SURFACE, EGL10.EGL_NO_CONTEXT)) {
- throw new RuntimeException("eglMakeCurrent failed");
- }
- }
+ public abstract void detachCurrent();
- public void swapBuffers() {
- checkIsNotReleased();
- if (eglSurface == EGL10.EGL_NO_SURFACE) {
- throw new RuntimeException("No EGLSurface - can't swap buffers");
- }
- egl.eglSwapBuffers(eglDisplay, eglSurface);
- }
-
- // Return an EGLDisplay, or die trying.
- private EGLDisplay getEglDisplay() {
- EGLDisplay eglDisplay = egl.eglGetDisplay(EGL10.EGL_DEFAULT_DISPLAY);
- if (eglDisplay == EGL10.EGL_NO_DISPLAY) {
- throw new RuntimeException("Unable to get EGL10 display");
- }
- int[] version = new int[2];
- if (!egl.eglInitialize(eglDisplay, version)) {
- throw new RuntimeException("Unable to initialize EGL10");
- }
- return eglDisplay;
- }
-
- // Return an EGLConfig, or die trying.
- private EGLConfig getEglConfig(EGLDisplay eglDisplay, ConfigType configType) {
- // Always RGB888, GLES2.
- int[] configAttributes = {
- EGL10.EGL_RED_SIZE, 8,
- EGL10.EGL_GREEN_SIZE, 8,
- EGL10.EGL_BLUE_SIZE, 8,
- EGL10.EGL_RENDERABLE_TYPE, EGL_OPENGL_ES2_BIT,
- EGL10.EGL_NONE, 0, // Allocate dummy fields for specific options.
- EGL10.EGL_NONE
- };
-
- // Fill in dummy fields based on configType.
- switch (configType) {
- case PLAIN:
- break;
- case PIXEL_BUFFER:
- configAttributes[configAttributes.length - 3] = EGL10.EGL_SURFACE_TYPE;
- configAttributes[configAttributes.length - 2] = EGL10.EGL_PBUFFER_BIT;
- break;
- case RECORDABLE:
- configAttributes[configAttributes.length - 3] = EGL_RECORDABLE_ANDROID;
- configAttributes[configAttributes.length - 2] = 1;
- break;
- default:
- throw new IllegalArgumentException();
- }
-
- EGLConfig[] configs = new EGLConfig[1];
- int[] numConfigs = new int[1];
- if (!egl.eglChooseConfig(
- eglDisplay, configAttributes, configs, configs.length, numConfigs)) {
- throw new RuntimeException("Unable to find RGB888 " + configType + " EGL config");
- }
- return configs[0];
- }
-
- // Return an EGLConfig, or die trying.
- private EGLContext createEglContext(
- EGLContext sharedContext, EGLDisplay eglDisplay, EGLConfig eglConfig) {
- int[] contextAttributes = {EGL_CONTEXT_CLIENT_VERSION, 2, EGL10.EGL_NONE};
- EGLContext eglContext =
- egl.eglCreateContext(eglDisplay, eglConfig, sharedContext, contextAttributes);
- if (eglContext == EGL10.EGL_NO_CONTEXT) {
- throw new RuntimeException("Failed to create EGL context");
- }
- return eglContext;
- }
+ public abstract void swapBuffers();
}
diff --git a/talk/app/webrtc/java/android/org/webrtc/EglBase10.java b/talk/app/webrtc/java/android/org/webrtc/EglBase10.java
new file mode 100644
index 0000000000..f2aa9857fa
--- /dev/null
+++ b/talk/app/webrtc/java/android/org/webrtc/EglBase10.java
@@ -0,0 +1,299 @@
+/*
+ * libjingle
+ * Copyright 2015 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+package org.webrtc;
+
+import android.graphics.Canvas;
+import android.graphics.SurfaceTexture;
+import android.graphics.Rect;
+import android.view.Surface;
+import android.view.SurfaceHolder;
+
+import javax.microedition.khronos.egl.EGL10;
+import javax.microedition.khronos.egl.EGLConfig;
+import javax.microedition.khronos.egl.EGLContext;
+import javax.microedition.khronos.egl.EGLDisplay;
+import javax.microedition.khronos.egl.EGLSurface;
+
+/**
+ * Holds EGL state and utility methods for handling an egl 1.0 EGLContext, an EGLDisplay,
+ * and an EGLSurface.
+ */
+final class EglBase10 extends EglBase {
+ // This constant is taken from EGL14.EGL_CONTEXT_CLIENT_VERSION.
+ private static final int EGL_CONTEXT_CLIENT_VERSION = 0x3098;
+
+ private final EGL10 egl;
+ private EGLContext eglContext;
+ private EGLConfig eglConfig;
+ private EGLDisplay eglDisplay;
+ private EGLSurface eglSurface = EGL10.EGL_NO_SURFACE;
+
+ // EGL wrapper for an actual EGLContext.
+ public static class Context extends EglBase.Context {
+ private final EGLContext eglContext;
+
+ public Context(EGLContext eglContext) {
+ this.eglContext = eglContext;
+ }
+ }
+
+ // Create a new context with the specified config type, sharing data with sharedContext.
+ EglBase10(Context sharedContext, int[] configAttributes) {
+ this.egl = (EGL10) EGLContext.getEGL();
+ eglDisplay = getEglDisplay();
+ eglConfig = getEglConfig(eglDisplay, configAttributes);
+ eglContext = createEglContext(sharedContext, eglDisplay, eglConfig);
+ }
+
+ @Override
+ public void createSurface(Surface surface) {
+ /**
+ * We have to wrap Surface in a SurfaceHolder because for some reason eglCreateWindowSurface
+ * couldn't actually take a Surface object until API 17. Older versions fortunately just call
+ * SurfaceHolder.getSurface(), so we'll do that. No other methods are relevant.
+ */
+ class FakeSurfaceHolder implements SurfaceHolder {
+ private final Surface surface;
+
+ FakeSurfaceHolder(Surface surface) {
+ this.surface = surface;
+ }
+
+ @Override
+ public void addCallback(Callback callback) {}
+
+ @Override
+ public void removeCallback(Callback callback) {}
+
+ @Override
+ public boolean isCreating() {
+ return false;
+ }
+
+ @Deprecated
+ @Override
+ public void setType(int i) {}
+
+ @Override
+ public void setFixedSize(int i, int i2) {}
+
+ @Override
+ public void setSizeFromLayout() {}
+
+ @Override
+ public void setFormat(int i) {}
+
+ @Override
+ public void setKeepScreenOn(boolean b) {}
+
+ @Override
+ public Canvas lockCanvas() {
+ return null;
+ }
+
+ @Override
+ public Canvas lockCanvas(Rect rect) {
+ return null;
+ }
+
+ @Override
+ public void unlockCanvasAndPost(Canvas canvas) {}
+
+ @Override
+ public Rect getSurfaceFrame() {
+ return null;
+ }
+
+ @Override
+ public Surface getSurface() {
+ return surface;
+ }
+ }
+
+ createSurfaceInternal(new FakeSurfaceHolder(surface));
+ }
+
+ // Create EGLSurface from the Android SurfaceTexture.
+ @Override
+ public void createSurface(SurfaceTexture surfaceTexture) {
+ createSurfaceInternal(surfaceTexture);
+ }
+
+ // Create EGLSurface from either a SurfaceHolder or a SurfaceTexture.
+ private void createSurfaceInternal(Object nativeWindow) {
+ if (!(nativeWindow instanceof SurfaceHolder) && !(nativeWindow instanceof SurfaceTexture)) {
+ throw new IllegalStateException("Input must be either a SurfaceHolder or SurfaceTexture");
+ }
+ checkIsNotReleased();
+ if (eglSurface != EGL10.EGL_NO_SURFACE) {
+ throw new RuntimeException("Already has an EGLSurface");
+ }
+ int[] surfaceAttribs = {EGL10.EGL_NONE};
+ eglSurface = egl.eglCreateWindowSurface(eglDisplay, eglConfig, nativeWindow, surfaceAttribs);
+ if (eglSurface == EGL10.EGL_NO_SURFACE) {
+ throw new RuntimeException("Failed to create window surface");
+ }
+ }
+
+ // Create dummy 1x1 pixel buffer surface so the context can be made current.
+ @Override
+ public void createDummyPbufferSurface() {
+ createPbufferSurface(1, 1);
+ }
+
+ @Override
+ public void createPbufferSurface(int width, int height) {
+ checkIsNotReleased();
+ if (eglSurface != EGL10.EGL_NO_SURFACE) {
+ throw new RuntimeException("Already has an EGLSurface");
+ }
+ int[] surfaceAttribs = {EGL10.EGL_WIDTH, width, EGL10.EGL_HEIGHT, height, EGL10.EGL_NONE};
+ eglSurface = egl.eglCreatePbufferSurface(eglDisplay, eglConfig, surfaceAttribs);
+ if (eglSurface == EGL10.EGL_NO_SURFACE) {
+ throw new RuntimeException("Failed to create pixel buffer surface");
+ }
+ }
+
+ @Override
+ public org.webrtc.EglBase.Context getEglBaseContext() {
+ return new EglBase10.Context(eglContext);
+ }
+
+ @Override
+ public boolean hasSurface() {
+ return eglSurface != EGL10.EGL_NO_SURFACE;
+ }
+
+ @Override
+ public int surfaceWidth() {
+ final int widthArray[] = new int[1];
+ egl.eglQuerySurface(eglDisplay, eglSurface, EGL10.EGL_WIDTH, widthArray);
+ return widthArray[0];
+ }
+
+ @Override
+ public int surfaceHeight() {
+ final int heightArray[] = new int[1];
+ egl.eglQuerySurface(eglDisplay, eglSurface, EGL10.EGL_HEIGHT, heightArray);
+ return heightArray[0];
+ }
+
+ @Override
+ public void releaseSurface() {
+ if (eglSurface != EGL10.EGL_NO_SURFACE) {
+ egl.eglDestroySurface(eglDisplay, eglSurface);
+ eglSurface = EGL10.EGL_NO_SURFACE;
+ }
+ }
+
+ private void checkIsNotReleased() {
+ if (eglDisplay == EGL10.EGL_NO_DISPLAY || eglContext == EGL10.EGL_NO_CONTEXT
+ || eglConfig == null) {
+ throw new RuntimeException("This object has been released");
+ }
+ }
+
+ @Override
+ public void release() {
+ checkIsNotReleased();
+ releaseSurface();
+ detachCurrent();
+ egl.eglDestroyContext(eglDisplay, eglContext);
+ egl.eglTerminate(eglDisplay);
+ eglContext = EGL10.EGL_NO_CONTEXT;
+ eglDisplay = EGL10.EGL_NO_DISPLAY;
+ eglConfig = null;
+ }
+
+ @Override
+ public void makeCurrent() {
+ checkIsNotReleased();
+ if (eglSurface == EGL10.EGL_NO_SURFACE) {
+ throw new RuntimeException("No EGLSurface - can't make current");
+ }
+ if (!egl.eglMakeCurrent(eglDisplay, eglSurface, eglSurface, eglContext)) {
+ throw new RuntimeException("eglMakeCurrent failed");
+ }
+ }
+
+ // Detach the current EGL context, so that it can be made current on another thread.
+ @Override
+ public void detachCurrent() {
+ if (!egl.eglMakeCurrent(
+ eglDisplay, EGL10.EGL_NO_SURFACE, EGL10.EGL_NO_SURFACE, EGL10.EGL_NO_CONTEXT)) {
+ throw new RuntimeException("eglMakeCurrent failed");
+ }
+ }
+
+ @Override
+ public void swapBuffers() {
+ checkIsNotReleased();
+ if (eglSurface == EGL10.EGL_NO_SURFACE) {
+ throw new RuntimeException("No EGLSurface - can't swap buffers");
+ }
+ egl.eglSwapBuffers(eglDisplay, eglSurface);
+ }
+
+ // Return an EGLDisplay, or die trying.
+ private EGLDisplay getEglDisplay() {
+ EGLDisplay eglDisplay = egl.eglGetDisplay(EGL10.EGL_DEFAULT_DISPLAY);
+ if (eglDisplay == EGL10.EGL_NO_DISPLAY) {
+ throw new RuntimeException("Unable to get EGL10 display");
+ }
+ int[] version = new int[2];
+ if (!egl.eglInitialize(eglDisplay, version)) {
+ throw new RuntimeException("Unable to initialize EGL10");
+ }
+ return eglDisplay;
+ }
+
+ // Return an EGLConfig, or die trying.
+ private EGLConfig getEglConfig(EGLDisplay eglDisplay, int[] configAttributes) {
+ EGLConfig[] configs = new EGLConfig[1];
+ int[] numConfigs = new int[1];
+ if (!egl.eglChooseConfig(
+ eglDisplay, configAttributes, configs, configs.length, numConfigs)) {
+ throw new RuntimeException("Unable to find any matching EGL config");
+ }
+ return configs[0];
+ }
+
+ // Return an EGLConfig, or die trying.
+ private EGLContext createEglContext(
+ Context sharedContext, EGLDisplay eglDisplay, EGLConfig eglConfig) {
+ int[] contextAttributes = {EGL_CONTEXT_CLIENT_VERSION, 2, EGL10.EGL_NONE};
+ EGLContext rootContext =
+ sharedContext == null ? EGL10.EGL_NO_CONTEXT : sharedContext.eglContext;
+ EGLContext eglContext =
+ egl.eglCreateContext(eglDisplay, eglConfig, rootContext, contextAttributes);
+ if (eglContext == EGL10.EGL_NO_CONTEXT) {
+ throw new RuntimeException("Failed to create EGL context");
+ }
+ return eglContext;
+ }
+}
diff --git a/talk/app/webrtc/java/android/org/webrtc/EglBase14.java b/talk/app/webrtc/java/android/org/webrtc/EglBase14.java
new file mode 100644
index 0000000000..c6f98c3b31
--- /dev/null
+++ b/talk/app/webrtc/java/android/org/webrtc/EglBase14.java
@@ -0,0 +1,254 @@
+/*
+ * libjingle
+ * Copyright 2015 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+package org.webrtc;
+
+import android.annotation.TargetApi;
+import android.graphics.SurfaceTexture;
+import android.opengl.EGL14;
+import android.opengl.EGLConfig;
+import android.opengl.EGLContext;
+import android.opengl.EGLDisplay;
+import android.opengl.EGLExt;
+import android.opengl.EGLSurface;
+import android.view.Surface;
+
+import org.webrtc.Logging;
+
+/**
+ * Holds EGL state and utility methods for handling an EGL14 EGLContext, an EGLDisplay,
+ * and an EGLSurface.
+ */
+@TargetApi(18)
+final class EglBase14 extends EglBase {
+ private static final String TAG = "EglBase14";
+ private static final int EGLExt_SDK_VERSION = android.os.Build.VERSION_CODES.JELLY_BEAN_MR2;
+ private static final int CURRENT_SDK_VERSION = android.os.Build.VERSION.SDK_INT;
+ private EGLContext eglContext;
+ private EGLConfig eglConfig;
+ private EGLDisplay eglDisplay;
+ private EGLSurface eglSurface = EGL14.EGL_NO_SURFACE;
+
+ // EGL 1.4 is supported from API 17. But EGLExt that is used for setting presentation
+ // time stamp on a surface is supported from 18 so we require 18.
+ public static boolean isEGL14Supported() {
+ Logging.d(TAG, "SDK version: " + CURRENT_SDK_VERSION
+ + ". isEGL14Supported: " + (CURRENT_SDK_VERSION >= EGLExt_SDK_VERSION));
+ return (CURRENT_SDK_VERSION >= EGLExt_SDK_VERSION);
+ }
+
+ public static class Context extends EglBase.Context {
+ private final android.opengl.EGLContext egl14Context;
+
+ Context(android.opengl.EGLContext eglContext) {
+ this.egl14Context = eglContext;
+ }
+ }
+
+ // Create a new context with the specified config type, sharing data with sharedContext.
+ // |sharedContext| may be null.
+ EglBase14(EglBase14.Context sharedContext, int[] configAttributes) {
+ eglDisplay = getEglDisplay();
+ eglConfig = getEglConfig(eglDisplay, configAttributes);
+ eglContext = createEglContext(sharedContext, eglDisplay, eglConfig);
+ }
+
+ // Create EGLSurface from the Android Surface.
+ @Override
+ public void createSurface(Surface surface) {
+ createSurfaceInternal(surface);
+ }
+
+ // Create EGLSurface from the Android SurfaceTexture.
+ @Override
+ public void createSurface(SurfaceTexture surfaceTexture) {
+ createSurfaceInternal(surfaceTexture);
+ }
+
+ // Create EGLSurface from either Surface or SurfaceTexture.
+ private void createSurfaceInternal(Object surface) {
+ if (!(surface instanceof Surface) && !(surface instanceof SurfaceTexture)) {
+ throw new IllegalStateException("Input must be either a Surface or SurfaceTexture");
+ }
+ checkIsNotReleased();
+ if (eglSurface != EGL14.EGL_NO_SURFACE) {
+ throw new RuntimeException("Already has an EGLSurface");
+ }
+ int[] surfaceAttribs = {EGL14.EGL_NONE};
+ eglSurface = EGL14.eglCreateWindowSurface(eglDisplay, eglConfig, surface, surfaceAttribs, 0);
+ if (eglSurface == EGL14.EGL_NO_SURFACE) {
+ throw new RuntimeException("Failed to create window surface");
+ }
+ }
+
+ @Override
+ public void createDummyPbufferSurface() {
+ createPbufferSurface(1, 1);
+ }
+
+ @Override
+ public void createPbufferSurface(int width, int height) {
+ checkIsNotReleased();
+ if (eglSurface != EGL14.EGL_NO_SURFACE) {
+ throw new RuntimeException("Already has an EGLSurface");
+ }
+ int[] surfaceAttribs = {EGL14.EGL_WIDTH, width, EGL14.EGL_HEIGHT, height, EGL14.EGL_NONE};
+ eglSurface = EGL14.eglCreatePbufferSurface(eglDisplay, eglConfig, surfaceAttribs, 0);
+ if (eglSurface == EGL14.EGL_NO_SURFACE) {
+ throw new RuntimeException("Failed to create pixel buffer surface");
+ }
+ }
+
+ @Override
+ public Context getEglBaseContext() {
+ return new EglBase14.Context(eglContext);
+ }
+
+ @Override
+ public boolean hasSurface() {
+ return eglSurface != EGL14.EGL_NO_SURFACE;
+ }
+
+ @Override
+ public int surfaceWidth() {
+ final int widthArray[] = new int[1];
+ EGL14.eglQuerySurface(eglDisplay, eglSurface, EGL14.EGL_WIDTH, widthArray, 0);
+ return widthArray[0];
+ }
+
+ @Override
+ public int surfaceHeight() {
+ final int heightArray[] = new int[1];
+ EGL14.eglQuerySurface(eglDisplay, eglSurface, EGL14.EGL_HEIGHT, heightArray, 0);
+ return heightArray[0];
+ }
+
+ @Override
+ public void releaseSurface() {
+ if (eglSurface != EGL14.EGL_NO_SURFACE) {
+ EGL14.eglDestroySurface(eglDisplay, eglSurface);
+ eglSurface = EGL14.EGL_NO_SURFACE;
+ }
+ }
+
+ private void checkIsNotReleased() {
+ if (eglDisplay == EGL14.EGL_NO_DISPLAY || eglContext == EGL14.EGL_NO_CONTEXT
+ || eglConfig == null) {
+ throw new RuntimeException("This object has been released");
+ }
+ }
+
+ @Override
+ public void release() {
+ checkIsNotReleased();
+ releaseSurface();
+ detachCurrent();
+ EGL14.eglDestroyContext(eglDisplay, eglContext);
+ EGL14.eglReleaseThread();
+ EGL14.eglTerminate(eglDisplay);
+ eglContext = EGL14.EGL_NO_CONTEXT;
+ eglDisplay = EGL14.EGL_NO_DISPLAY;
+ eglConfig = null;
+ }
+
+ @Override
+ public void makeCurrent() {
+ checkIsNotReleased();
+ if (eglSurface == EGL14.EGL_NO_SURFACE) {
+ throw new RuntimeException("No EGLSurface - can't make current");
+ }
+ if (!EGL14.eglMakeCurrent(eglDisplay, eglSurface, eglSurface, eglContext)) {
+ throw new RuntimeException("eglMakeCurrent failed");
+ }
+ }
+
+ // Detach the current EGL context, so that it can be made current on another thread.
+ @Override
+ public void detachCurrent() {
+ if (!EGL14.eglMakeCurrent(
+ eglDisplay, EGL14.EGL_NO_SURFACE, EGL14.EGL_NO_SURFACE, EGL14.EGL_NO_CONTEXT)) {
+ throw new RuntimeException("eglMakeCurrent failed");
+ }
+ }
+
+ @Override
+ public void swapBuffers() {
+ checkIsNotReleased();
+ if (eglSurface == EGL14.EGL_NO_SURFACE) {
+ throw new RuntimeException("No EGLSurface - can't swap buffers");
+ }
+ EGL14.eglSwapBuffers(eglDisplay, eglSurface);
+ }
+
+ public void swapBuffers(long timeStampNs) {
+ checkIsNotReleased();
+ if (eglSurface == EGL14.EGL_NO_SURFACE) {
+ throw new RuntimeException("No EGLSurface - can't swap buffers");
+ }
+ // See https://android.googlesource.com/platform/frameworks/native/+/tools_r22.2/opengl/specs/EGL_ANDROID_presentation_time.txt
+ EGLExt.eglPresentationTimeANDROID(eglDisplay, eglSurface, timeStampNs);
+ EGL14.eglSwapBuffers(eglDisplay, eglSurface);
+ }
+
+ // Return an EGLDisplay, or die trying.
+ private static EGLDisplay getEglDisplay() {
+ EGLDisplay eglDisplay = EGL14.eglGetDisplay(EGL14.EGL_DEFAULT_DISPLAY);
+ if (eglDisplay == EGL14.EGL_NO_DISPLAY) {
+ throw new RuntimeException("Unable to get EGL14 display");
+ }
+ int[] version = new int[2];
+ if (!EGL14.eglInitialize(eglDisplay, version, 0, version, 1)) {
+ throw new RuntimeException("Unable to initialize EGL14");
+ }
+ return eglDisplay;
+ }
+
+ // Return an EGLConfig, or die trying.
+ private static EGLConfig getEglConfig(EGLDisplay eglDisplay, int[] configAttributes) {
+ EGLConfig[] configs = new EGLConfig[1];
+ int[] numConfigs = new int[1];
+ if (!EGL14.eglChooseConfig(
+ eglDisplay, configAttributes, 0, configs, 0, configs.length, numConfigs, 0)) {
+ throw new RuntimeException("Unable to find any matching EGL config");
+ }
+ return configs[0];
+ }
+
+ // Return an EGLConfig, or die trying.
+ private static EGLContext createEglContext(
+ EglBase14.Context sharedContext, EGLDisplay eglDisplay, EGLConfig eglConfig) {
+ int[] contextAttributes = {EGL14.EGL_CONTEXT_CLIENT_VERSION, 2, EGL14.EGL_NONE};
+ EGLContext rootContext =
+ sharedContext == null ? EGL14.EGL_NO_CONTEXT : sharedContext.egl14Context;
+ EGLContext eglContext =
+ EGL14.eglCreateContext(eglDisplay, eglConfig, rootContext, contextAttributes, 0);
+ if (eglContext == EGL14.EGL_NO_CONTEXT) {
+ throw new RuntimeException("Failed to create EGL context");
+ }
+ return eglContext;
+ }
+}
diff --git a/talk/app/webrtc/java/android/org/webrtc/GlRectDrawer.java b/talk/app/webrtc/java/android/org/webrtc/GlRectDrawer.java
index 2cb8af754d..6d3d5d2563 100644
--- a/talk/app/webrtc/java/android/org/webrtc/GlRectDrawer.java
+++ b/talk/app/webrtc/java/android/org/webrtc/GlRectDrawer.java
@@ -40,13 +40,13 @@ import java.util.IdentityHashMap;
import java.util.Map;
/**
- * Helper class to draw a quad that covers the entire viewport. Rotation, mirror, and cropping is
- * specified using a 4x4 texture coordinate transform matrix. The frame input can either be an OES
- * texture or YUV textures in I420 format. The GL state must be preserved between draw calls, this
- * is intentional to maximize performance. The function release() must be called manually to free
- * the resources held by this object.
+ * Helper class to draw an opaque quad on the target viewport location. Rotation, mirror, and
+ * cropping is specified using a 4x4 texture coordinate transform matrix. The frame input can either
+ * be an OES texture or YUV textures in I420 format. The GL state must be preserved between draw
+ * calls, this is intentional to maximize performance. The function release() must be called
+ * manually to free the resources held by this object.
*/
-public class GlRectDrawer {
+public class GlRectDrawer implements RendererCommon.GlDrawer {
// Simple vertex shader, used for both YUV and OES.
private static final String VERTEX_SHADER_STRING =
"varying vec2 interp_tc;\n"
@@ -118,67 +118,31 @@ public class GlRectDrawer {
1.0f, 1.0f // Top right.
});
- // The keys are one of the fragments shaders above.
- private final Map<String, GlShader> shaders = new IdentityHashMap<String, GlShader>();
- private GlShader currentShader;
- private float[] currentTexMatrix;
- private int texMatrixLocation;
- // Intermediate copy buffer for uploading yuv frames that are not packed, i.e. stride > width.
- // TODO(magjed): Investigate when GL_UNPACK_ROW_LENGTH is available, or make a custom shader that
- // handles stride and compare performance with intermediate copy.
- private ByteBuffer copyBuffer;
+ private static class Shader {
+ public final GlShader glShader;
+ public final int texMatrixLocation;
- /**
- * Upload |planes| into |outputYuvTextures|, taking stride into consideration. |outputYuvTextures|
- * must have been generated in advance.
- */
- public void uploadYuvData(
- int[] outputYuvTextures, int width, int height, int[] strides, ByteBuffer[] planes) {
- // Make a first pass to see if we need a temporary copy buffer.
- int copyCapacityNeeded = 0;
- for (int i = 0; i < 3; ++i) {
- final int planeWidth = (i == 0) ? width : width / 2;
- final int planeHeight = (i == 0) ? height : height / 2;
- if (strides[i] > planeWidth) {
- copyCapacityNeeded = Math.max(copyCapacityNeeded, planeWidth * planeHeight);
- }
- }
- // Allocate copy buffer if necessary.
- if (copyCapacityNeeded > 0
- && (copyBuffer == null || copyBuffer.capacity() < copyCapacityNeeded)) {
- copyBuffer = ByteBuffer.allocateDirect(copyCapacityNeeded);
- }
- // Upload each plane.
- for (int i = 0; i < 3; ++i) {
- GLES20.glActiveTexture(GLES20.GL_TEXTURE0 + i);
- GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, outputYuvTextures[i]);
- final int planeWidth = (i == 0) ? width : width / 2;
- final int planeHeight = (i == 0) ? height : height / 2;
- // GLES only accepts packed data, i.e. stride == planeWidth.
- final ByteBuffer packedByteBuffer;
- if (strides[i] == planeWidth) {
- // Input is packed already.
- packedByteBuffer = planes[i];
- } else {
- VideoRenderer.nativeCopyPlane(
- planes[i], planeWidth, planeHeight, strides[i], copyBuffer, planeWidth);
- packedByteBuffer = copyBuffer;
- }
- GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, 0, GLES20.GL_LUMINANCE, planeWidth, planeHeight, 0,
- GLES20.GL_LUMINANCE, GLES20.GL_UNSIGNED_BYTE, packedByteBuffer);
+ public Shader(String fragmentShader) {
+ this.glShader = new GlShader(VERTEX_SHADER_STRING, fragmentShader);
+ this.texMatrixLocation = glShader.getUniformLocation("texMatrix");
}
}
+ // The keys are one of the fragments shaders above.
+ private final Map<String, Shader> shaders = new IdentityHashMap<String, Shader>();
+
/**
* Draw an OES texture frame with specified texture transformation matrix. Required resources are
* allocated at the first call to this function.
*/
- public void drawOes(int oesTextureId, float[] texMatrix) {
- prepareShader(OES_FRAGMENT_SHADER_STRING);
+ @Override
+ public void drawOes(int oesTextureId, float[] texMatrix, int x, int y, int width, int height) {
+ prepareShader(OES_FRAGMENT_SHADER_STRING, texMatrix);
+ GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
// updateTexImage() may be called from another thread in another EGL context, so we need to
// bind/unbind the texture in each draw call so that GLES understads it's a new texture.
GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, oesTextureId);
- drawRectangle(texMatrix);
+ drawRectangle(x, y, width, height);
GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, 0);
}
@@ -186,10 +150,12 @@ public class GlRectDrawer {
* Draw a RGB(A) texture frame with specified texture transformation matrix. Required resources
* are allocated at the first call to this function.
*/
- public void drawRgb(int textureId, float[] texMatrix) {
- prepareShader(RGB_FRAGMENT_SHADER_STRING);
+ @Override
+ public void drawRgb(int textureId, float[] texMatrix, int x, int y, int width, int height) {
+ prepareShader(RGB_FRAGMENT_SHADER_STRING, texMatrix);
+ GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, textureId);
- drawRectangle(texMatrix);
+ drawRectangle(x, y, width, height);
// Unbind the texture as a precaution.
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, 0);
}
@@ -198,14 +164,15 @@ public class GlRectDrawer {
* Draw a YUV frame with specified texture transformation matrix. Required resources are
* allocated at the first call to this function.
*/
- public void drawYuv(int[] yuvTextures, float[] texMatrix) {
- prepareShader(YUV_FRAGMENT_SHADER_STRING);
+ @Override
+ public void drawYuv(int[] yuvTextures, float[] texMatrix, int x, int y, int width, int height) {
+ prepareShader(YUV_FRAGMENT_SHADER_STRING, texMatrix);
// Bind the textures.
for (int i = 0; i < 3; ++i) {
GLES20.glActiveTexture(GLES20.GL_TEXTURE0 + i);
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, yuvTextures[i]);
}
- drawRectangle(texMatrix);
+ drawRectangle(x, y, width, height);
// Unbind the textures as a precaution..
for (int i = 0; i < 3; ++i) {
GLES20.glActiveTexture(GLES20.GL_TEXTURE0 + i);
@@ -213,60 +180,51 @@ public class GlRectDrawer {
}
}
- private void drawRectangle(float[] texMatrix) {
- // Try avoid uploading the texture if possible.
- if (!Arrays.equals(currentTexMatrix, texMatrix)) {
- currentTexMatrix = texMatrix.clone();
- // Copy the texture transformation matrix over.
- GLES20.glUniformMatrix4fv(texMatrixLocation, 1, false, texMatrix, 0);
- }
+ private void drawRectangle(int x, int y, int width, int height) {
// Draw quad.
+ GLES20.glViewport(x, y, width, height);
GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4);
}
- private void prepareShader(String fragmentShader) {
- // Lazy allocation.
- if (!shaders.containsKey(fragmentShader)) {
- final GlShader shader = new GlShader(VERTEX_SHADER_STRING, fragmentShader);
+ private void prepareShader(String fragmentShader, float[] texMatrix) {
+ final Shader shader;
+ if (shaders.containsKey(fragmentShader)) {
+ shader = shaders.get(fragmentShader);
+ } else {
+ // Lazy allocation.
+ shader = new Shader(fragmentShader);
shaders.put(fragmentShader, shader);
- shader.useProgram();
+ shader.glShader.useProgram();
// Initialize fragment shader uniform values.
if (fragmentShader == YUV_FRAGMENT_SHADER_STRING) {
- GLES20.glUniform1i(shader.getUniformLocation("y_tex"), 0);
- GLES20.glUniform1i(shader.getUniformLocation("u_tex"), 1);
- GLES20.glUniform1i(shader.getUniformLocation("v_tex"), 2);
+ GLES20.glUniform1i(shader.glShader.getUniformLocation("y_tex"), 0);
+ GLES20.glUniform1i(shader.glShader.getUniformLocation("u_tex"), 1);
+ GLES20.glUniform1i(shader.glShader.getUniformLocation("v_tex"), 2);
} else if (fragmentShader == RGB_FRAGMENT_SHADER_STRING) {
- GLES20.glUniform1i(shader.getUniformLocation("rgb_tex"), 0);
+ GLES20.glUniform1i(shader.glShader.getUniformLocation("rgb_tex"), 0);
} else if (fragmentShader == OES_FRAGMENT_SHADER_STRING) {
- GLES20.glUniform1i(shader.getUniformLocation("oes_tex"), 0);
+ GLES20.glUniform1i(shader.glShader.getUniformLocation("oes_tex"), 0);
} else {
throw new IllegalStateException("Unknown fragment shader: " + fragmentShader);
}
GlUtil.checkNoGLES2Error("Initialize fragment shader uniform values.");
// Initialize vertex shader attributes.
- shader.setVertexAttribArray("in_pos", 2, FULL_RECTANGLE_BUF);
- shader.setVertexAttribArray("in_tc", 2, FULL_RECTANGLE_TEX_BUF);
- }
-
- // Update GLES state if shader is not already current.
- final GlShader shader = shaders.get(fragmentShader);
- if (currentShader != shader) {
- currentShader = shader;
- shader.useProgram();
- GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
- currentTexMatrix = null;
- texMatrixLocation = shader.getUniformLocation("texMatrix");
+ shader.glShader.setVertexAttribArray("in_pos", 2, FULL_RECTANGLE_BUF);
+ shader.glShader.setVertexAttribArray("in_tc", 2, FULL_RECTANGLE_TEX_BUF);
}
+ shader.glShader.useProgram();
+ // Copy the texture transformation matrix over.
+ GLES20.glUniformMatrix4fv(shader.texMatrixLocation, 1, false, texMatrix, 0);
}
/**
* Release all GLES resources. This needs to be done manually, otherwise the resources are leaked.
*/
+ @Override
public void release() {
- for (GlShader shader : shaders.values()) {
- shader.release();
+ for (Shader shader : shaders.values()) {
+ shader.glShader.release();
}
shaders.clear();
- copyBuffer = null;
}
}
diff --git a/talk/app/webrtc/java/android/org/webrtc/NetworkMonitorAutoDetect.java b/talk/app/webrtc/java/android/org/webrtc/NetworkMonitorAutoDetect.java
index e3a7850db4..950dcdfa44 100644
--- a/talk/app/webrtc/java/android/org/webrtc/NetworkMonitorAutoDetect.java
+++ b/talk/app/webrtc/java/android/org/webrtc/NetworkMonitorAutoDetect.java
@@ -55,7 +55,7 @@ import android.util.Log;
* ACCESS_NETWORK_STATE permission.
*/
public class NetworkMonitorAutoDetect extends BroadcastReceiver {
- static enum ConnectionType {
+ public static enum ConnectionType {
CONNECTION_UNKNOWN,
CONNECTION_ETHERNET,
CONNECTION_WIFI,
@@ -96,6 +96,10 @@ public class NetworkMonitorAutoDetect extends BroadcastReceiver {
/** Queries the ConnectivityManager for information about the current connection. */
static class ConnectivityManagerDelegate {
+ /**
+ * Note: In some rare Android systems connectivityManager is null. We handle that
+ * gracefully below.
+ */
private final ConnectivityManager connectivityManager;
ConnectivityManagerDelegate(Context context) {
@@ -114,6 +118,9 @@ public class NetworkMonitorAutoDetect extends BroadcastReceiver {
* default network.
*/
NetworkState getNetworkState() {
+ if (connectivityManager == null) {
+ return new NetworkState(false, -1, -1);
+ }
return getNetworkState(connectivityManager.getActiveNetworkInfo());
}
@@ -123,6 +130,9 @@ public class NetworkMonitorAutoDetect extends BroadcastReceiver {
*/
@SuppressLint("NewApi")
NetworkState getNetworkState(Network network) {
+ if (connectivityManager == null) {
+ return new NetworkState(false, -1, -1);
+ }
return getNetworkState(connectivityManager.getNetworkInfo(network));
}
@@ -142,6 +152,9 @@ public class NetworkMonitorAutoDetect extends BroadcastReceiver {
*/
@SuppressLint("NewApi")
Network[] getAllNetworks() {
+ if (connectivityManager == null) {
+ return new Network[0];
+ }
return connectivityManager.getAllNetworks();
}
@@ -152,6 +165,9 @@ public class NetworkMonitorAutoDetect extends BroadcastReceiver {
*/
@SuppressLint("NewApi")
int getDefaultNetId() {
+ if (connectivityManager == null) {
+ return INVALID_NET_ID;
+ }
// Android Lollipop had no API to get the default network; only an
// API to return the NetworkInfo for the default network. To
// determine the default network one can find the network with
@@ -188,6 +204,9 @@ public class NetworkMonitorAutoDetect extends BroadcastReceiver {
*/
@SuppressLint("NewApi")
boolean hasInternetCapability(Network network) {
+ if (connectivityManager == null) {
+ return false;
+ }
final NetworkCapabilities capabilities =
connectivityManager.getNetworkCapabilities(network);
return capabilities != null && capabilities.hasCapability(NET_CAPABILITY_INTERNET);
@@ -240,7 +259,6 @@ public class NetworkMonitorAutoDetect extends BroadcastReceiver {
static final int INVALID_NET_ID = -1;
private static final String TAG = "NetworkMonitorAutoDetect";
- private static final int UNKNOWN_LINK_SPEED = -1;
private final IntentFilter intentFilter;
// Observer for the connection type change.
diff --git a/talk/app/webrtc/java/android/org/webrtc/RendererCommon.java b/talk/app/webrtc/java/android/org/webrtc/RendererCommon.java
index 94d180da5a..5ada4cc416 100644
--- a/talk/app/webrtc/java/android/org/webrtc/RendererCommon.java
+++ b/talk/app/webrtc/java/android/org/webrtc/RendererCommon.java
@@ -28,8 +28,11 @@
package org.webrtc;
import android.graphics.Point;
+import android.opengl.GLES20;
import android.opengl.Matrix;
+import java.nio.ByteBuffer;
+
/**
* Static helper functions for renderer implementations.
*/
@@ -47,6 +50,73 @@ public class RendererCommon {
public void onFrameResolutionChanged(int videoWidth, int videoHeight, int rotation);
}
+ /** Interface for rendering frames on an EGLSurface. */
+ public static interface GlDrawer {
+ /**
+ * Functions for drawing frames with different sources. The rendering surface target is
+ * implied by the current EGL context of the calling thread and requires no explicit argument.
+ * The coordinates specify the viewport location on the surface target.
+ */
+ void drawOes(int oesTextureId, float[] texMatrix, int x, int y, int width, int height);
+ void drawRgb(int textureId, float[] texMatrix, int x, int y, int width, int height);
+ void drawYuv(int[] yuvTextures, float[] texMatrix, int x, int y, int width, int height);
+
+ /**
+ * Release all GL resources. This needs to be done manually, otherwise resources may leak.
+ */
+ void release();
+ }
+
+ /**
+ * Helper class for uploading YUV bytebuffer frames to textures that handles stride > width. This
+ * class keeps an internal ByteBuffer to avoid unnecessary allocations for intermediate copies.
+ */
+ public static class YuvUploader {
+ // Intermediate copy buffer for uploading yuv frames that are not packed, i.e. stride > width.
+ // TODO(magjed): Investigate when GL_UNPACK_ROW_LENGTH is available, or make a custom shader
+ // that handles stride and compare performance with intermediate copy.
+ private ByteBuffer copyBuffer;
+
+ /**
+ * Upload |planes| into |outputYuvTextures|, taking stride into consideration.
+ * |outputYuvTextures| must have been generated in advance.
+ */
+ public void uploadYuvData(
+ int[] outputYuvTextures, int width, int height, int[] strides, ByteBuffer[] planes) {
+ final int[] planeWidths = new int[] {width, width / 2, width / 2};
+ final int[] planeHeights = new int[] {height, height / 2, height / 2};
+ // Make a first pass to see if we need a temporary copy buffer.
+ int copyCapacityNeeded = 0;
+ for (int i = 0; i < 3; ++i) {
+ if (strides[i] > planeWidths[i]) {
+ copyCapacityNeeded = Math.max(copyCapacityNeeded, planeWidths[i] * planeHeights[i]);
+ }
+ }
+ // Allocate copy buffer if necessary.
+ if (copyCapacityNeeded > 0
+ && (copyBuffer == null || copyBuffer.capacity() < copyCapacityNeeded)) {
+ copyBuffer = ByteBuffer.allocateDirect(copyCapacityNeeded);
+ }
+ // Upload each plane.
+ for (int i = 0; i < 3; ++i) {
+ GLES20.glActiveTexture(GLES20.GL_TEXTURE0 + i);
+ GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, outputYuvTextures[i]);
+ // GLES only accepts packed data, i.e. stride == planeWidth.
+ final ByteBuffer packedByteBuffer;
+ if (strides[i] == planeWidths[i]) {
+ // Input is packed already.
+ packedByteBuffer = planes[i];
+ } else {
+ VideoRenderer.nativeCopyPlane(
+ planes[i], planeWidths[i], planeHeights[i], strides[i], copyBuffer, planeWidths[i]);
+ packedByteBuffer = copyBuffer;
+ }
+ GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, 0, GLES20.GL_LUMINANCE, planeWidths[i],
+ planeHeights[i], 0, GLES20.GL_LUMINANCE, GLES20.GL_UNSIGNED_BYTE, packedByteBuffer);
+ }
+ }
+ }
+
// Types of video scaling:
// SCALE_ASPECT_FIT - video frame is scaled to fit the size of the view by
// maintaining the aspect ratio (black borders may be displayed).
@@ -182,9 +252,9 @@ public class RendererCommon {
}
// Each dimension is constrained on max display size and how much we are allowed to crop.
final int width = Math.min(maxDisplayWidth,
- (int) (maxDisplayHeight / minVisibleFraction * videoAspectRatio));
+ Math.round(maxDisplayHeight / minVisibleFraction * videoAspectRatio));
final int height = Math.min(maxDisplayHeight,
- (int) (maxDisplayWidth / minVisibleFraction / videoAspectRatio));
+ Math.round(maxDisplayWidth / minVisibleFraction / videoAspectRatio));
return new Point(width, height);
}
}
diff --git a/talk/app/webrtc/java/android/org/webrtc/SurfaceTextureHelper.java b/talk/app/webrtc/java/android/org/webrtc/SurfaceTextureHelper.java
index b9c158f848..b001d2a101 100644
--- a/talk/app/webrtc/java/android/org/webrtc/SurfaceTextureHelper.java
+++ b/talk/app/webrtc/java/android/org/webrtc/SurfaceTextureHelper.java
@@ -35,12 +35,12 @@ import android.os.Handler;
import android.os.HandlerThread;
import android.os.SystemClock;
+import java.nio.ByteBuffer;
+import java.nio.FloatBuffer;
import java.util.concurrent.Callable;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.TimeUnit;
-import javax.microedition.khronos.egl.EGLContext;
-
/**
* Helper class to create and synchronize access to a SurfaceTexture. The caller will get notified
* of new frames in onTextureFrameAvailable(), and should call returnTextureFrame() when done with
@@ -51,7 +51,7 @@ import javax.microedition.khronos.egl.EGLContext;
* wrapping texture frames into webrtc::VideoFrames and also handles calling returnTextureFrame()
* when the webrtc::VideoFrame is no longer used.
*/
-final class SurfaceTextureHelper {
+class SurfaceTextureHelper {
private static final String TAG = "SurfaceTextureHelper";
/**
* Callback interface for being notified that a new texture frame is available. The calls will be
@@ -65,7 +65,7 @@ final class SurfaceTextureHelper {
int oesTextureId, float[] transformMatrix, long timestampNs);
}
- public static SurfaceTextureHelper create(EGLContext sharedContext) {
+ public static SurfaceTextureHelper create(EglBase.Context sharedContext) {
return create(sharedContext, null);
}
@@ -74,7 +74,8 @@ final class SurfaceTextureHelper {
* |handler| is non-null, the callback will be executed on that handler's thread. If |handler| is
* null, a dedicated private thread is created for the callbacks.
*/
- public static SurfaceTextureHelper create(final EGLContext sharedContext, final Handler handler) {
+ public static SurfaceTextureHelper create(final EglBase.Context sharedContext,
+ final Handler handler) {
final Handler finalHandler;
if (handler != null) {
finalHandler = handler;
@@ -94,25 +95,240 @@ final class SurfaceTextureHelper {
});
}
+ // State for YUV conversion, instantiated on demand.
+ static private class YuvConverter {
+ private final EglBase eglBase;
+ private final GlShader shader;
+ private boolean released = false;
+
+ // Vertex coordinates in Normalized Device Coordinates, i.e.
+ // (-1, -1) is bottom-left and (1, 1) is top-right.
+ private static final FloatBuffer DEVICE_RECTANGLE =
+ GlUtil.createFloatBuffer(new float[] {
+ -1.0f, -1.0f, // Bottom left.
+ 1.0f, -1.0f, // Bottom right.
+ -1.0f, 1.0f, // Top left.
+ 1.0f, 1.0f, // Top right.
+ });
+
+ // Texture coordinates - (0, 0) is bottom-left and (1, 1) is top-right.
+ private static final FloatBuffer TEXTURE_RECTANGLE =
+ GlUtil.createFloatBuffer(new float[] {
+ 0.0f, 0.0f, // Bottom left.
+ 1.0f, 0.0f, // Bottom right.
+ 0.0f, 1.0f, // Top left.
+ 1.0f, 1.0f // Top right.
+ });
+
+ private static final String VERTEX_SHADER =
+ "varying vec2 interp_tc;\n"
+ + "attribute vec4 in_pos;\n"
+ + "attribute vec4 in_tc;\n"
+ + "\n"
+ + "uniform mat4 texMatrix;\n"
+ + "\n"
+ + "void main() {\n"
+ + " gl_Position = in_pos;\n"
+ + " interp_tc = (texMatrix * in_tc).xy;\n"
+ + "}\n";
+
+ private static final String FRAGMENT_SHADER =
+ "#extension GL_OES_EGL_image_external : require\n"
+ + "precision mediump float;\n"
+ + "varying vec2 interp_tc;\n"
+ + "\n"
+ + "uniform samplerExternalOES oesTex;\n"
+ // Difference in texture coordinate corresponding to one
+ // sub-pixel in the x direction.
+ + "uniform vec2 xUnit;\n"
+ // Color conversion coefficients, including constant term
+ + "uniform vec4 coeffs;\n"
+ + "\n"
+ + "void main() {\n"
+ // Since the alpha read from the texture is always 1, this could
+ // be written as a mat4 x vec4 multiply. However, that seems to
+ // give a worse framerate, possibly because the additional
+ // multiplies by 1.0 consume resources. TODO(nisse): Could also
+ // try to do it as a vec3 x mat3x4, followed by an add in of a
+ // constant vector.
+ + " gl_FragColor.r = coeffs.a + dot(coeffs.rgb,\n"
+ + " texture2D(oesTex, interp_tc - 1.5 * xUnit).rgb);\n"
+ + " gl_FragColor.g = coeffs.a + dot(coeffs.rgb,\n"
+ + " texture2D(oesTex, interp_tc - 0.5 * xUnit).rgb);\n"
+ + " gl_FragColor.b = coeffs.a + dot(coeffs.rgb,\n"
+ + " texture2D(oesTex, interp_tc + 0.5 * xUnit).rgb);\n"
+ + " gl_FragColor.a = coeffs.a + dot(coeffs.rgb,\n"
+ + " texture2D(oesTex, interp_tc + 1.5 * xUnit).rgb);\n"
+ + "}\n";
+
+ private int texMatrixLoc;
+ private int xUnitLoc;
+ private int coeffsLoc;;
+
+ YuvConverter (EglBase.Context sharedContext) {
+ eglBase = EglBase.create(sharedContext, EglBase.CONFIG_PIXEL_RGBA_BUFFER);
+ eglBase.createDummyPbufferSurface();
+ eglBase.makeCurrent();
+
+ shader = new GlShader(VERTEX_SHADER, FRAGMENT_SHADER);
+ shader.useProgram();
+ texMatrixLoc = shader.getUniformLocation("texMatrix");
+ xUnitLoc = shader.getUniformLocation("xUnit");
+ coeffsLoc = shader.getUniformLocation("coeffs");
+ GLES20.glUniform1i(shader.getUniformLocation("oesTex"), 0);
+ GlUtil.checkNoGLES2Error("Initialize fragment shader uniform values.");
+ // Initialize vertex shader attributes.
+ shader.setVertexAttribArray("in_pos", 2, DEVICE_RECTANGLE);
+ // If the width is not a multiple of 4 pixels, the texture
+ // will be scaled up slightly and clipped at the right border.
+ shader.setVertexAttribArray("in_tc", 2, TEXTURE_RECTANGLE);
+ eglBase.detachCurrent();
+ }
+
+ synchronized void convert(ByteBuffer buf,
+ int width, int height, int stride, int textureId, float [] transformMatrix) {
+ if (released) {
+ throw new IllegalStateException(
+ "YuvConverter.convert called on released object");
+ }
+
+ // We draw into a buffer laid out like
+ //
+ // +---------+
+ // | |
+ // | Y |
+ // | |
+ // | |
+ // +----+----+
+ // | U | V |
+ // | | |
+ // +----+----+
+ //
+ // In memory, we use the same stride for all of Y, U and V. The
+ // U data starts at offset |height| * |stride| from the Y data,
+ // and the V data starts at at offset |stride/2| from the U
+ // data, with rows of U and V data alternating.
+ //
+ // Now, it would have made sense to allocate a pixel buffer with
+ // a single byte per pixel (EGL10.EGL_COLOR_BUFFER_TYPE,
+ // EGL10.EGL_LUMINANCE_BUFFER,), but that seems to be
+ // unsupported by devices. So do the following hack: Allocate an
+ // RGBA buffer, of width |stride|/4. To render each of these
+ // large pixels, sample the texture at 4 different x coordinates
+ // and store the results in the four components.
+ //
+ // Since the V data needs to start on a boundary of such a
+ // larger pixel, it is not sufficient that |stride| is even, it
+ // has to be a multiple of 8 pixels.
+
+ if (stride % 8 != 0) {
+ throw new IllegalArgumentException(
+ "Invalid stride, must be a multiple of 8");
+ }
+ if (stride < width){
+ throw new IllegalArgumentException(
+ "Invalid stride, must >= width");
+ }
+
+ int y_width = (width+3) / 4;
+ int uv_width = (width+7) / 8;
+ int uv_height = (height+1)/2;
+ int total_height = height + uv_height;
+ int size = stride * total_height;
+
+ if (buf.capacity() < size) {
+ throw new IllegalArgumentException("YuvConverter.convert called with too small buffer");
+ }
+ // Produce a frame buffer starting at top-left corner, not
+ // bottom-left.
+ transformMatrix =
+ RendererCommon.multiplyMatrices(transformMatrix,
+ RendererCommon.verticalFlipMatrix());
+
+ // Create new pBuffferSurface with the correct size if needed.
+ if (eglBase.hasSurface()) {
+ if (eglBase.surfaceWidth() != stride/4 ||
+ eglBase.surfaceHeight() != total_height){
+ eglBase.releaseSurface();
+ eglBase.createPbufferSurface(stride/4, total_height);
+ }
+ } else {
+ eglBase.createPbufferSurface(stride/4, total_height);
+ }
+
+ eglBase.makeCurrent();
+
+ GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
+ GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, textureId);
+ GLES20.glUniformMatrix4fv(texMatrixLoc, 1, false, transformMatrix, 0);
+
+ // Draw Y
+ GLES20.glViewport(0, 0, y_width, height);
+ // Matrix * (1;0;0;0) / width. Note that opengl uses column major order.
+ GLES20.glUniform2f(xUnitLoc,
+ transformMatrix[0] / width,
+ transformMatrix[1] / width);
+ // Y'UV444 to RGB888, see
+ // https://en.wikipedia.org/wiki/YUV#Y.27UV444_to_RGB888_conversion.
+ // We use the ITU-R coefficients for U and V */
+ GLES20.glUniform4f(coeffsLoc, 0.299f, 0.587f, 0.114f, 0.0f);
+ GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4);
+
+ // Draw U
+ GLES20.glViewport(0, height, uv_width, uv_height);
+ // Matrix * (1;0;0;0) / (2*width). Note that opengl uses column major order.
+ GLES20.glUniform2f(xUnitLoc,
+ transformMatrix[0] / (2.0f*width),
+ transformMatrix[1] / (2.0f*width));
+ GLES20.glUniform4f(coeffsLoc, -0.169f, -0.331f, 0.499f, 0.5f);
+ GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4);
+
+ // Draw V
+ GLES20.glViewport(stride/8, height, uv_width, uv_height);
+ GLES20.glUniform4f(coeffsLoc, 0.499f, -0.418f, -0.0813f, 0.5f);
+ GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4);
+
+ GLES20.glReadPixels(0, 0, stride/4, total_height, GLES20.GL_RGBA,
+ GLES20.GL_UNSIGNED_BYTE, buf);
+
+ GlUtil.checkNoGLES2Error("YuvConverter.convert");
+
+ // Unbind texture. Reportedly needed on some devices to get
+ // the texture updated from the camera.
+ GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, 0);
+ eglBase.detachCurrent();
+ }
+
+ synchronized void release() {
+ released = true;
+ eglBase.makeCurrent();
+ shader.release();
+ eglBase.release();
+ }
+ }
+
private final Handler handler;
- private final boolean isOwningThread;
+ private boolean isOwningThread;
private final EglBase eglBase;
private final SurfaceTexture surfaceTexture;
private final int oesTextureId;
+ private YuvConverter yuvConverter;
+
private OnTextureFrameAvailableListener listener;
// The possible states of this class.
private boolean hasPendingTexture = false;
- private boolean isTextureInUse = false;
+ private volatile boolean isTextureInUse = false;
private boolean isQuitting = false;
- private SurfaceTextureHelper(EGLContext sharedContext, Handler handler, boolean isOwningThread) {
+ private SurfaceTextureHelper(EglBase.Context sharedContext,
+ Handler handler, boolean isOwningThread) {
if (handler.getLooper().getThread() != Thread.currentThread()) {
throw new IllegalStateException("SurfaceTextureHelper must be created on the handler thread");
}
this.handler = handler;
this.isOwningThread = isOwningThread;
- eglBase = new EglBase(sharedContext, EglBase.ConfigType.PIXEL_BUFFER);
+ eglBase = EglBase.create(sharedContext, EglBase.CONFIG_PIXEL_BUFFER);
eglBase.createDummyPbufferSurface();
eglBase.makeCurrent();
@@ -120,6 +336,18 @@ final class SurfaceTextureHelper {
surfaceTexture = new SurfaceTexture(oesTextureId);
}
+ private YuvConverter getYuvConverter() {
+ // yuvConverter is assigned once
+ if (yuvConverter != null)
+ return yuvConverter;
+
+ synchronized(this) {
+ if (yuvConverter == null)
+ yuvConverter = new YuvConverter(eglBase.getEglBaseContext());
+ return yuvConverter;
+ }
+ }
+
/**
* Start to stream textures to the given |listener|.
* A Listener can only be set once.
@@ -164,12 +392,19 @@ final class SurfaceTextureHelper {
});
}
+ public boolean isTextureInUse() {
+ return isTextureInUse;
+ }
+
/**
* Call disconnect() to stop receiving frames. Resources are released when the texture frame has
* been returned by a call to returnTextureFrame(). You are guaranteed to not receive any more
* onTextureFrameAvailable() after this function returns.
*/
public void disconnect() {
+ if (!isOwningThread) {
+ throw new IllegalStateException("Must call disconnect(handler).");
+ }
if (handler.getLooper().getThread() == Thread.currentThread()) {
isQuitting = true;
if (!isTextureInUse) {
@@ -190,6 +425,28 @@ final class SurfaceTextureHelper {
ThreadUtils.awaitUninterruptibly(barrier);
}
+ /**
+ * Call disconnect() to stop receiving frames and quit the looper used by |handler|.
+ * Resources are released when the texture frame has been returned by a call to
+ * returnTextureFrame(). You are guaranteed to not receive any more
+ * onTextureFrameAvailable() after this function returns.
+ */
+ public void disconnect(Handler handler) {
+ if (this.handler != handler) {
+ throw new IllegalStateException("Wrong handler.");
+ }
+ isOwningThread = true;
+ disconnect();
+ }
+
+ public void textureToYUV(ByteBuffer buf,
+ int width, int height, int stride, int textureId, float [] transformMatrix) {
+ if (textureId != oesTextureId)
+ throw new IllegalStateException("textureToByteBuffer called with unexpected textureId");
+
+ getYuvConverter().convert(buf, width, height, stride, textureId, transformMatrix);
+ }
+
private void tryDeliverTextureFrame() {
if (handler.getLooper().getThread() != Thread.currentThread()) {
throw new IllegalStateException("Wrong thread.");
@@ -218,12 +475,14 @@ final class SurfaceTextureHelper {
if (isTextureInUse || !isQuitting) {
throw new IllegalStateException("Unexpected release.");
}
+ synchronized (this) {
+ if (yuvConverter != null)
+ yuvConverter.release();
+ }
eglBase.makeCurrent();
GLES20.glDeleteTextures(1, new int[] {oesTextureId}, 0);
surfaceTexture.release();
eglBase.release();
- if (isOwningThread) {
- handler.getLooper().quit();
- }
+ handler.getLooper().quit();
}
}
diff --git a/talk/app/webrtc/java/android/org/webrtc/SurfaceViewRenderer.java b/talk/app/webrtc/java/android/org/webrtc/SurfaceViewRenderer.java
index d7c9e2af0a..fa199b33c8 100644
--- a/talk/app/webrtc/java/android/org/webrtc/SurfaceViewRenderer.java
+++ b/talk/app/webrtc/java/android/org/webrtc/SurfaceViewRenderer.java
@@ -28,10 +28,9 @@
package org.webrtc;
import android.content.Context;
+import android.content.res.Resources.NotFoundException;
import android.graphics.Point;
-import android.graphics.SurfaceTexture;
import android.opengl.GLES20;
-import android.opengl.Matrix;
import android.os.Handler;
import android.os.HandlerThread;
import android.util.AttributeSet;
@@ -67,7 +66,8 @@ public class SurfaceViewRenderer extends SurfaceView
// EGL and GL resources for drawing YUV/OES textures. After initilization, these are only accessed
// from the render thread.
private EglBase eglBase;
- private GlRectDrawer drawer;
+ private final RendererCommon.YuvUploader yuvUploader = new RendererCommon.YuvUploader();
+ private RendererCommon.GlDrawer drawer;
// Texture ids for YUV frames. Allocated on first arrival of a YUV frame.
private int[] yuvTextures = null;
@@ -77,23 +77,22 @@ public class SurfaceViewRenderer extends SurfaceView
// These variables are synchronized on |layoutLock|.
private final Object layoutLock = new Object();
- // These three different dimension values are used to keep track of the state in these functions:
- // requestLayout() -> onMeasure() -> onLayout() -> surfaceChanged().
- // requestLayout() is triggered internally by frame size changes, but can also be triggered
- // externally by layout update requests.
- // Most recent measurement specification from onMeasure().
- private int widthSpec;
- private int heightSpec;
- // Current size on screen in pixels. Updated in onLayout(), and should be consistent with
- // |widthSpec|/|heightSpec| after that.
- private int layoutWidth;
- private int layoutHeight;
- // Current surface size of the underlying Surface. Updated in surfaceChanged(), and should be
- // consistent with |layoutWidth|/|layoutHeight| after that.
+ // These dimension values are used to keep track of the state in these functions: onMeasure(),
+ // onLayout(), and surfaceChanged(). A new layout is triggered with requestLayout(). This happens
+ // internally when the incoming frame size changes. requestLayout() can also be triggered
+ // externally. The layout change is a two pass process: first onMeasure() is called in a top-down
+ // traversal of the View tree, followed by an onLayout() pass that is also top-down. During the
+ // onLayout() pass, each parent is responsible for positioning its children using the sizes
+ // computed in the measure pass.
+ // |desiredLayoutsize| is the layout size we have requested in onMeasure() and are waiting for to
+ // take effect.
+ private Point desiredLayoutSize = new Point();
+ // |layoutSize|/|surfaceSize| is the actual current layout/surface size. They are updated in
+ // onLayout() and surfaceChanged() respectively.
+ private final Point layoutSize = new Point();
// TODO(magjed): Enable hardware scaler with SurfaceHolder.setFixedSize(). This will decouple
// layout and surface size.
- private int surfaceWidth;
- private int surfaceHeight;
+ private final Point surfaceSize = new Point();
// |isSurfaceCreated| keeps track of the current status in surfaceCreated()/surfaceDestroyed().
private boolean isSurfaceCreated;
// Last rendered frame dimensions, or 0 if no frame has been rendered yet.
@@ -121,12 +120,18 @@ public class SurfaceViewRenderer extends SurfaceView
// Time in ns spent in renderFrameOnRenderThread() function.
private long renderTimeNs;
- // Runnable for posting frames to render thread..
+ // Runnable for posting frames to render thread.
private final Runnable renderFrameRunnable = new Runnable() {
@Override public void run() {
renderFrameOnRenderThread();
}
};
+ // Runnable for clearing Surface to black.
+ private final Runnable makeBlackRunnable = new Runnable() {
+ @Override public void run() {
+ makeBlack();
+ }
+ };
/**
* Standard View constructor. In order to render something, you must first call init().
@@ -149,17 +154,28 @@ public class SurfaceViewRenderer extends SurfaceView
* reinitialize the renderer after a previous init()/release() cycle.
*/
public void init(
- EGLContext sharedContext, RendererCommon.RendererEvents rendererEvents) {
+ EglBase.Context sharedContext, RendererCommon.RendererEvents rendererEvents) {
+ init(sharedContext, rendererEvents, EglBase.CONFIG_PLAIN, new GlRectDrawer());
+ }
+
+ /**
+ * Initialize this class, sharing resources with |sharedContext|. The custom |drawer| will be used
+ * for drawing frames on the EGLSurface. This class is responsible for calling release() on
+ * |drawer|. It is allowed to call init() to reinitialize the renderer after a previous
+ * init()/release() cycle.
+ */
+ public void init(EglBase.Context sharedContext, RendererCommon.RendererEvents rendererEvents,
+ int[] configAttributes, RendererCommon.GlDrawer drawer) {
synchronized (handlerLock) {
if (renderThreadHandler != null) {
- throw new IllegalStateException("Already initialized");
+ throw new IllegalStateException(getResourceName() + "Already initialized");
}
- Logging.d(TAG, "Initializing");
+ Logging.d(TAG, getResourceName() + "Initializing.");
this.rendererEvents = rendererEvents;
+ this.drawer = drawer;
renderThread = new HandlerThread(TAG);
renderThread.start();
- drawer = new GlRectDrawer();
- eglBase = new EglBase(sharedContext, EglBase.ConfigType.PLAIN);
+ eglBase = EglBase.create(sharedContext, configAttributes);
renderThreadHandler = new Handler(renderThread.getLooper());
}
tryCreateEglSurface();
@@ -174,8 +190,8 @@ public class SurfaceViewRenderer extends SurfaceView
runOnRenderThread(new Runnable() {
@Override public void run() {
synchronized (layoutLock) {
- if (isSurfaceCreated) {
- eglBase.createSurface(getHolder());
+ if (isSurfaceCreated && !eglBase.hasSurface()) {
+ eglBase.createSurface(getHolder().getSurface());
eglBase.makeCurrent();
// Necessary for YUV frames with odd width.
GLES20.glPixelStorei(GLES20.GL_UNPACK_ALIGNMENT, 1);
@@ -195,7 +211,7 @@ public class SurfaceViewRenderer extends SurfaceView
final CountDownLatch eglCleanupBarrier = new CountDownLatch(1);
synchronized (handlerLock) {
if (renderThreadHandler == null) {
- Logging.d(TAG, "Already released");
+ Logging.d(TAG, getResourceName() + "Already released");
return;
}
// Release EGL and GL resources on render thread.
@@ -210,11 +226,8 @@ public class SurfaceViewRenderer extends SurfaceView
GLES20.glDeleteTextures(3, yuvTextures, 0);
yuvTextures = null;
}
- if (eglBase.hasSurface()) {
- // Clear last rendered image to black.
- GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
- eglBase.swapBuffers();
- }
+ // Clear last rendered image to black.
+ makeBlack();
eglBase.release();
eglBase = null;
eglCleanupBarrier.countDown();
@@ -242,6 +255,14 @@ public class SurfaceViewRenderer extends SurfaceView
frameRotation = 0;
rendererEvents = null;
}
+ resetStatistics();
+ }
+
+ /**
+ * Reset statistics. This will reset the logged statistics in logStatistics(), and
+ * RendererEvents.onFirstFrameRendered() will be called for the next frame.
+ */
+ public void resetStatistics() {
synchronized (statisticsLock) {
framesReceived = 0;
framesDropped = 0;
@@ -277,27 +298,28 @@ public class SurfaceViewRenderer extends SurfaceView
}
synchronized (handlerLock) {
if (renderThreadHandler == null) {
- Logging.d(TAG, "Dropping frame - SurfaceViewRenderer not initialized or already released.");
- } else {
- synchronized (frameLock) {
- if (pendingFrame == null) {
- updateFrameDimensionsAndReportEvents(frame);
- pendingFrame = frame;
- renderThreadHandler.post(renderFrameRunnable);
- return;
+ Logging.d(TAG, getResourceName()
+ + "Dropping frame - Not initialized or already released.");
+ VideoRenderer.renderFrameDone(frame);
+ return;
+ }
+ synchronized (frameLock) {
+ if (pendingFrame != null) {
+ // Drop old frame.
+ synchronized (statisticsLock) {
+ ++framesDropped;
}
+ VideoRenderer.renderFrameDone(pendingFrame);
}
+ pendingFrame = frame;
+ updateFrameDimensionsAndReportEvents(frame);
+ renderThreadHandler.post(renderFrameRunnable);
}
}
- // Drop frame.
- synchronized (statisticsLock) {
- ++framesDropped;
- }
- VideoRenderer.renderFrameDone(frame);
}
// Returns desired layout size given current measure specification and video aspect ratio.
- private Point getDesiredLayoutSize() {
+ private Point getDesiredLayoutSize(int widthSpec, int heightSpec) {
synchronized (layoutLock) {
final int maxWidth = getDefaultSize(Integer.MAX_VALUE, widthSpec);
final int maxHeight = getDefaultSize(Integer.MAX_VALUE, heightSpec);
@@ -317,18 +339,30 @@ public class SurfaceViewRenderer extends SurfaceView
@Override
protected void onMeasure(int widthSpec, int heightSpec) {
synchronized (layoutLock) {
- this.widthSpec = widthSpec;
- this.heightSpec = heightSpec;
- final Point size = getDesiredLayoutSize();
- setMeasuredDimension(size.x, size.y);
+ if (frameWidth == 0 || frameHeight == 0) {
+ super.onMeasure(widthSpec, heightSpec);
+ return;
+ }
+ desiredLayoutSize = getDesiredLayoutSize(widthSpec, heightSpec);
+ if (desiredLayoutSize.x != getMeasuredWidth() || desiredLayoutSize.y != getMeasuredHeight()) {
+ // Clear the surface asap before the layout change to avoid stretched video and other
+ // render artifacs. Don't wait for it to finish because the IO thread should never be
+ // blocked, so it's a best-effort attempt.
+ synchronized (handlerLock) {
+ if (renderThreadHandler != null) {
+ renderThreadHandler.postAtFrontOfQueue(makeBlackRunnable);
+ }
+ }
+ }
+ setMeasuredDimension(desiredLayoutSize.x, desiredLayoutSize.y);
}
}
@Override
protected void onLayout(boolean changed, int left, int top, int right, int bottom) {
synchronized (layoutLock) {
- layoutWidth = right - left;
- layoutHeight = bottom - top;
+ layoutSize.x = right - left;
+ layoutSize.y = bottom - top;
}
// Might have a pending frame waiting for a layout of correct size.
runOnRenderThread(renderFrameRunnable);
@@ -337,7 +371,7 @@ public class SurfaceViewRenderer extends SurfaceView
// SurfaceHolder.Callback interface.
@Override
public void surfaceCreated(final SurfaceHolder holder) {
- Logging.d(TAG, "Surface created");
+ Logging.d(TAG, getResourceName() + "Surface created.");
synchronized (layoutLock) {
isSurfaceCreated = true;
}
@@ -346,11 +380,11 @@ public class SurfaceViewRenderer extends SurfaceView
@Override
public void surfaceDestroyed(SurfaceHolder holder) {
- Logging.d(TAG, "Surface destroyed");
+ Logging.d(TAG, getResourceName() + "Surface destroyed.");
synchronized (layoutLock) {
isSurfaceCreated = false;
- surfaceWidth = 0;
- surfaceHeight = 0;
+ surfaceSize.x = 0;
+ surfaceSize.y = 0;
}
runOnRenderThread(new Runnable() {
@Override public void run() {
@@ -361,10 +395,10 @@ public class SurfaceViewRenderer extends SurfaceView
@Override
public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) {
- Logging.d(TAG, "Surface changed: " + width + "x" + height);
+ Logging.d(TAG, getResourceName() + "Surface changed: " + width + "x" + height);
synchronized (layoutLock) {
- surfaceWidth = width;
- surfaceHeight = height;
+ surfaceSize.x = width;
+ surfaceSize.y = height;
}
// Might have a pending frame waiting for a surface of correct size.
runOnRenderThread(renderFrameRunnable);
@@ -381,26 +415,35 @@ public class SurfaceViewRenderer extends SurfaceView
}
}
+ private String getResourceName() {
+ try {
+ return getResources().getResourceEntryName(getId()) + ": ";
+ } catch (NotFoundException e) {
+ return "";
+ }
+ }
+
+ private void makeBlack() {
+ if (Thread.currentThread() != renderThread) {
+ throw new IllegalStateException(getResourceName() + "Wrong thread.");
+ }
+ if (eglBase != null && eglBase.hasSurface()) {
+ GLES20.glClearColor(0, 0, 0, 0);
+ GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
+ eglBase.swapBuffers();
+ }
+ }
+
/**
* Requests new layout if necessary. Returns true if layout and surface size are consistent.
*/
private boolean checkConsistentLayout() {
+ if (Thread.currentThread() != renderThread) {
+ throw new IllegalStateException(getResourceName() + "Wrong thread.");
+ }
synchronized (layoutLock) {
- final Point desiredLayoutSize = getDesiredLayoutSize();
- if (desiredLayoutSize.x != layoutWidth || desiredLayoutSize.y != layoutHeight) {
- Logging.d(TAG, "Requesting new layout with size: "
- + desiredLayoutSize.x + "x" + desiredLayoutSize.y);
- // Request layout update on UI thread.
- post(new Runnable() {
- @Override public void run() {
- requestLayout();
- }
- });
- return false;
- }
- // Wait for requestLayout() to propagate through this sequence before returning true:
- // requestLayout() -> onMeasure() -> onLayout() -> surfaceChanged().
- return surfaceWidth == layoutWidth && surfaceHeight == layoutHeight;
+ // Return false while we are in the middle of a layout change.
+ return layoutSize.equals(desiredLayoutSize) && surfaceSize.equals(layoutSize);
}
}
@@ -408,61 +451,51 @@ public class SurfaceViewRenderer extends SurfaceView
* Renders and releases |pendingFrame|.
*/
private void renderFrameOnRenderThread() {
+ if (Thread.currentThread() != renderThread) {
+ throw new IllegalStateException(getResourceName() + "Wrong thread.");
+ }
+ // Fetch and render |pendingFrame|.
+ final VideoRenderer.I420Frame frame;
+ synchronized (frameLock) {
+ if (pendingFrame == null) {
+ return;
+ }
+ frame = pendingFrame;
+ pendingFrame = null;
+ }
if (eglBase == null || !eglBase.hasSurface()) {
- Logging.d(TAG, "No surface to draw on");
+ Logging.d(TAG, getResourceName() + "No surface to draw on");
+ VideoRenderer.renderFrameDone(frame);
return;
}
if (!checkConsistentLayout()) {
// Output intermediate black frames while the layout is updated.
- GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
- eglBase.swapBuffers();
+ makeBlack();
+ VideoRenderer.renderFrameDone(frame);
return;
}
// After a surface size change, the EGLSurface might still have a buffer of the old size in the
// pipeline. Querying the EGLSurface will show if the underlying buffer dimensions haven't yet
// changed. Such a buffer will be rendered incorrectly, so flush it with a black frame.
synchronized (layoutLock) {
- if (eglBase.surfaceWidth() != surfaceWidth || eglBase.surfaceHeight() != surfaceHeight) {
- GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
- eglBase.swapBuffers();
+ if (eglBase.surfaceWidth() != surfaceSize.x || eglBase.surfaceHeight() != surfaceSize.y) {
+ makeBlack();
}
}
- // Fetch and render |pendingFrame|.
- final VideoRenderer.I420Frame frame;
- synchronized (frameLock) {
- if (pendingFrame == null) {
- return;
- }
- frame = pendingFrame;
- pendingFrame = null;
- }
final long startTimeNs = System.nanoTime();
- final float[] samplingMatrix;
- if (frame.yuvFrame) {
- // The convention in WebRTC is that the first element in a ByteBuffer corresponds to the
- // top-left corner of the image, but in glTexImage2D() the first element corresponds to the
- // bottom-left corner. We correct this discrepancy by setting a vertical flip as sampling
- // matrix.
- samplingMatrix = RendererCommon.verticalFlipMatrix();
- } else {
- // TODO(magjed): Move updateTexImage() to the video source instead.
- SurfaceTexture surfaceTexture = (SurfaceTexture) frame.textureObject;
- surfaceTexture.updateTexImage();
- samplingMatrix = new float[16];
- surfaceTexture.getTransformMatrix(samplingMatrix);
- }
-
final float[] texMatrix;
synchronized (layoutLock) {
final float[] rotatedSamplingMatrix =
- RendererCommon.rotateTextureMatrix(samplingMatrix, frame.rotationDegree);
+ RendererCommon.rotateTextureMatrix(frame.samplingMatrix, frame.rotationDegree);
final float[] layoutMatrix = RendererCommon.getLayoutMatrix(
- mirror, frameAspectRatio(), (float) layoutWidth / layoutHeight);
+ mirror, frameAspectRatio(), (float) layoutSize.x / layoutSize.y);
texMatrix = RendererCommon.multiplyMatrices(rotatedSamplingMatrix, layoutMatrix);
}
- GLES20.glViewport(0, 0, surfaceWidth, surfaceHeight);
+ // TODO(magjed): glClear() shouldn't be necessary since every pixel is covered anyway, but it's
+ // a workaround for bug 5147. Performance will be slightly worse.
+ GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
if (frame.yuvFrame) {
// Make sure YUV textures are allocated.
if (yuvTextures == null) {
@@ -471,11 +504,11 @@ public class SurfaceViewRenderer extends SurfaceView
yuvTextures[i] = GlUtil.generateTexture(GLES20.GL_TEXTURE_2D);
}
}
- drawer.uploadYuvData(
+ yuvUploader.uploadYuvData(
yuvTextures, frame.width, frame.height, frame.yuvStrides, frame.yuvPlanes);
- drawer.drawYuv(yuvTextures, texMatrix);
+ drawer.drawYuv(yuvTextures, texMatrix, 0, 0, surfaceSize.x, surfaceSize.y);
} else {
- drawer.drawOes(frame.textureId, texMatrix);
+ drawer.drawOes(frame.textureId, texMatrix, 0, 0, surfaceSize.x, surfaceSize.y);
}
eglBase.swapBuffers();
@@ -483,6 +516,12 @@ public class SurfaceViewRenderer extends SurfaceView
synchronized (statisticsLock) {
if (framesRendered == 0) {
firstFrameTimeNs = startTimeNs;
+ synchronized (layoutLock) {
+ Logging.d(TAG, getResourceName() + "Reporting first rendered frame.");
+ if (rendererEvents != null) {
+ rendererEvents.onFirstFrameRendered();
+ }
+ }
}
++framesRendered;
renderTimeNs += (System.nanoTime() - startTimeNs);
@@ -508,32 +547,32 @@ public class SurfaceViewRenderer extends SurfaceView
synchronized (layoutLock) {
if (frameWidth != frame.width || frameHeight != frame.height
|| frameRotation != frame.rotationDegree) {
+ Logging.d(TAG, getResourceName() + "Reporting frame resolution changed to "
+ + frame.width + "x" + frame.height + " with rotation " + frame.rotationDegree);
if (rendererEvents != null) {
- final String id = getResources().getResourceEntryName(getId());
- if (frameWidth == 0 || frameHeight == 0) {
- Logging.d(TAG, "ID: " + id + ". Reporting first rendered frame.");
- rendererEvents.onFirstFrameRendered();
- }
- Logging.d(TAG, "ID: " + id + ". Reporting frame resolution changed to "
- + frame.width + "x" + frame.height + " with rotation " + frame.rotationDegree);
rendererEvents.onFrameResolutionChanged(frame.width, frame.height, frame.rotationDegree);
}
frameWidth = frame.width;
frameHeight = frame.height;
frameRotation = frame.rotationDegree;
+ post(new Runnable() {
+ @Override public void run() {
+ requestLayout();
+ }
+ });
}
}
}
private void logStatistics() {
synchronized (statisticsLock) {
- Logging.d(TAG, "ID: " + getResources().getResourceEntryName(getId()) + ". Frames received: "
+ Logging.d(TAG, getResourceName() + "Frames received: "
+ framesReceived + ". Dropped: " + framesDropped + ". Rendered: " + framesRendered);
if (framesReceived > 0 && framesRendered > 0) {
final long timeSinceFirstFrameNs = System.nanoTime() - firstFrameTimeNs;
- Logging.d(TAG, "Duration: " + (int) (timeSinceFirstFrameNs / 1e6) +
- " ms. FPS: " + (float) framesRendered * 1e9 / timeSinceFirstFrameNs);
- Logging.d(TAG, "Average render time: "
+ Logging.d(TAG, getResourceName() + "Duration: " + (int) (timeSinceFirstFrameNs / 1e6) +
+ " ms. FPS: " + framesRendered * 1e9 / timeSinceFirstFrameNs);
+ Logging.d(TAG, getResourceName() + "Average render time: "
+ (int) (renderTimeNs / (1000 * framesRendered)) + " us.");
}
}
diff --git a/talk/app/webrtc/java/android/org/webrtc/ThreadUtils.java b/talk/app/webrtc/java/android/org/webrtc/ThreadUtils.java
index 0d8968aba9..e60ead9f00 100644
--- a/talk/app/webrtc/java/android/org/webrtc/ThreadUtils.java
+++ b/talk/app/webrtc/java/android/org/webrtc/ThreadUtils.java
@@ -28,11 +28,13 @@
package org.webrtc;
import android.os.Handler;
+import android.os.SystemClock;
import java.util.concurrent.Callable;
import java.util.concurrent.CountDownLatch;
+import java.util.concurrent.TimeUnit;
-final class ThreadUtils {
+public class ThreadUtils {
/**
* Utility class to be used for checking that a method is called on the correct thread.
*/
@@ -86,6 +88,29 @@ final class ThreadUtils {
}
}
+ public static boolean joinUninterruptibly(final Thread thread, long timeoutMs) {
+ final long startTimeMs = SystemClock.elapsedRealtime();
+ long timeRemainingMs = timeoutMs;
+ boolean wasInterrupted = false;
+ while (timeRemainingMs > 0) {
+ try {
+ thread.join(timeRemainingMs);
+ break;
+ } catch (InterruptedException e) {
+ // Someone is asking us to return early at our convenience. We can't cancel this operation,
+ // but we should preserve the information and pass it along.
+ wasInterrupted = true;
+ final long elapsedTimeMs = SystemClock.elapsedRealtime() - startTimeMs;
+ timeRemainingMs = timeoutMs - elapsedTimeMs;
+ }
+ }
+ // Pass interruption information along.
+ if (wasInterrupted) {
+ Thread.currentThread().interrupt();
+ }
+ return !thread.isAlive();
+ }
+
public static void joinUninterruptibly(final Thread thread) {
executeUninterruptibly(new BlockingOperation() {
@Override
@@ -104,6 +129,30 @@ final class ThreadUtils {
});
}
+ public static boolean awaitUninterruptibly(CountDownLatch barrier, long timeoutMs) {
+ final long startTimeMs = SystemClock.elapsedRealtime();
+ long timeRemainingMs = timeoutMs;
+ boolean wasInterrupted = false;
+ boolean result = false;
+ do {
+ try {
+ result = barrier.await(timeRemainingMs, TimeUnit.MILLISECONDS);
+ break;
+ } catch (InterruptedException e) {
+ // Someone is asking us to return early at our convenience. We can't cancel this operation,
+ // but we should preserve the information and pass it along.
+ wasInterrupted = true;
+ final long elapsedTimeMs = SystemClock.elapsedRealtime() - startTimeMs;
+ timeRemainingMs = timeoutMs - elapsedTimeMs;
+ }
+ } while (timeRemainingMs > 0);
+ // Pass interruption information along.
+ if (wasInterrupted) {
+ Thread.currentThread().interrupt();
+ }
+ return result;
+ }
+
/**
* Post |callable| to |handler| and wait for the result.
*/
diff --git a/talk/app/webrtc/java/android/org/webrtc/VideoCapturerAndroid.java b/talk/app/webrtc/java/android/org/webrtc/VideoCapturerAndroid.java
index 4caefc513d..36f60edd5c 100644
--- a/talk/app/webrtc/java/android/org/webrtc/VideoCapturerAndroid.java
+++ b/talk/app/webrtc/java/android/org/webrtc/VideoCapturerAndroid.java
@@ -28,9 +28,6 @@
package org.webrtc;
import android.content.Context;
-import android.graphics.SurfaceTexture;
-import android.hardware.Camera;
-import android.hardware.Camera.PreviewCallback;
import android.os.Handler;
import android.os.HandlerThread;
import android.os.SystemClock;
@@ -53,9 +50,6 @@ import java.util.Set;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.TimeUnit;
-import javax.microedition.khronos.egl.EGLContext;
-import javax.microedition.khronos.egl.EGL10;
-
// Android specific implementation of VideoCapturer.
// An instance of this class can be created by an application using
// VideoCapturerAndroid.create();
@@ -68,21 +62,22 @@ import javax.microedition.khronos.egl.EGL10;
// camera thread. The internal *OnCameraThread() methods must check |camera| for null to check if
// the camera has been stopped.
@SuppressWarnings("deprecation")
-public class VideoCapturerAndroid extends VideoCapturer implements PreviewCallback,
+public class VideoCapturerAndroid extends VideoCapturer implements
+ android.hardware.Camera.PreviewCallback,
SurfaceTextureHelper.OnTextureFrameAvailableListener {
private final static String TAG = "VideoCapturerAndroid";
private final static int CAMERA_OBSERVER_PERIOD_MS = 2000;
+ private final static int CAMERA_FREEZE_REPORT_TIMOUT_MS = 6000;
- private Camera camera; // Only non-null while capturing.
+ private android.hardware.Camera camera; // Only non-null while capturing.
private HandlerThread cameraThread;
private final Handler cameraThreadHandler;
private Context applicationContext;
// Synchronization lock for |id|.
private final Object cameraIdLock = new Object();
private int id;
- private Camera.CameraInfo info;
- private final FramePool videoBuffers;
- private final CameraStatistics cameraStatistics = new CameraStatistics();
+ private android.hardware.Camera.CameraInfo info;
+ private final CameraStatistics cameraStatistics;
// Remember the requested format in case we want to switch cameras.
private int requestedWidth;
private int requestedHeight;
@@ -94,17 +89,28 @@ public class VideoCapturerAndroid extends VideoCapturer implements PreviewCallba
private CapturerObserver frameObserver = null;
private final CameraEventsHandler eventsHandler;
private boolean firstFrameReported;
+ // Arbitrary queue depth. Higher number means more memory allocated & held,
+ // lower number means more sensitivity to processing time in the client (and
+ // potentially stalling the capturer if it runs out of buffers to write to).
+ private static final int NUMBER_OF_CAPTURE_BUFFERS = 3;
+ private final Set<byte[]> queuedBuffers = new HashSet<byte[]>();
private final boolean isCapturingToTexture;
- private final SurfaceTextureHelper surfaceHelper;
+ final SurfaceTextureHelper surfaceHelper; // Package visible for testing purposes.
// The camera API can output one old frame after the camera has been switched or the resolution
// has been changed. This flag is used for dropping the first frame after camera restart.
private boolean dropNextFrame = false;
+ // |openCameraOnCodecThreadRunner| is used for retrying to open the camera if it is in use by
+ // another application when startCaptureOnCameraThread is called.
+ private Runnable openCameraOnCodecThreadRunner;
+ private final static int MAX_OPEN_CAMERA_ATTEMPTS = 3;
+ private final static int OPEN_CAMERA_DELAY_MS = 500;
+ private int openCameraAttempts;
// Camera error callback.
- private final Camera.ErrorCallback cameraErrorCallback =
- new Camera.ErrorCallback() {
+ private final android.hardware.Camera.ErrorCallback cameraErrorCallback =
+ new android.hardware.Camera.ErrorCallback() {
@Override
- public void onError(int error, Camera camera) {
+ public void onError(int error, android.hardware.Camera camera) {
String errorMessage;
if (error == android.hardware.Camera.CAMERA_ERROR_SERVER_DIED) {
errorMessage = "Camera server died!";
@@ -120,47 +126,45 @@ public class VideoCapturerAndroid extends VideoCapturer implements PreviewCallba
// Camera observer - monitors camera framerate. Observer is executed on camera thread.
private final Runnable cameraObserver = new Runnable() {
+ private int freezePeriodCount;
@Override
public void run() {
int cameraFramesCount = cameraStatistics.getAndResetFrameCount();
int cameraFps = (cameraFramesCount * 1000 + CAMERA_OBSERVER_PERIOD_MS / 2)
/ CAMERA_OBSERVER_PERIOD_MS;
- Logging.d(TAG, "Camera fps: " + cameraFps +
- ". Pending buffers: " + cameraStatistics.pendingFramesTimeStamps());
+ Logging.d(TAG, "Camera fps: " + cameraFps +".");
if (cameraFramesCount == 0) {
- Logging.e(TAG, "Camera freezed.");
- if (eventsHandler != null) {
- eventsHandler.onCameraError("Camera failure.");
+ ++freezePeriodCount;
+ if (CAMERA_OBSERVER_PERIOD_MS * freezePeriodCount > CAMERA_FREEZE_REPORT_TIMOUT_MS
+ && eventsHandler != null) {
+ Logging.e(TAG, "Camera freezed.");
+ if (surfaceHelper.isTextureInUse()) {
+ // This can only happen if we are capturing to textures.
+ eventsHandler.onCameraFreezed("Camera failure. Client must return video buffers.");
+ } else {
+ eventsHandler.onCameraFreezed("Camera failure.");
+ }
+ return;
}
} else {
- cameraThreadHandler.postDelayed(this, CAMERA_OBSERVER_PERIOD_MS);
+ freezePeriodCount = 0;
}
+ cameraThreadHandler.postDelayed(this, CAMERA_OBSERVER_PERIOD_MS);
}
};
private static class CameraStatistics {
private int frameCount = 0;
private final ThreadUtils.ThreadChecker threadChecker = new ThreadUtils.ThreadChecker();
- private final Set<Long> timeStampsNs = new HashSet<Long>();
CameraStatistics() {
threadChecker.detachThread();
}
- public void addPendingFrame(long timestamp) {
+ public void addFrame() {
threadChecker.checkIsOnValidThread();
++frameCount;
- timeStampsNs.add(timestamp);
- }
-
- public void frameReturned(long timestamp) {
- threadChecker.checkIsOnValidThread();
- if (!timeStampsNs.contains(timestamp)) {
- throw new IllegalStateException(
- "CameraStatistics.frameReturned called with unknown timestamp " + timestamp);
- }
- timeStampsNs.remove(timestamp);
}
public int getAndResetFrameCount() {
@@ -169,28 +173,16 @@ public class VideoCapturerAndroid extends VideoCapturer implements PreviewCallba
frameCount = 0;
return count;
}
-
- // Return number of pending frames that have not been returned.
- public int pendingFramesCount() {
- threadChecker.checkIsOnValidThread();
- return timeStampsNs.size();
- }
-
- public String pendingFramesTimeStamps() {
- threadChecker.checkIsOnValidThread();
- List<Long> timeStampsMs = new ArrayList<Long>();
- for (long ts : timeStampsNs) {
- timeStampsMs.add(TimeUnit.NANOSECONDS.toMillis(ts));
- }
- return timeStampsMs.toString();
- }
}
public static interface CameraEventsHandler {
- // Camera error handler - invoked when camera stops receiving frames
+ // Camera error handler - invoked when camera can not be opened
// or any camera exception happens on camera thread.
void onCameraError(String errorDescription);
+ // Invoked when camera stops receiving frames
+ void onCameraFreezed(String errorDescription);
+
// Callback invoked when camera is opening.
void onCameraOpening(int cameraId);
@@ -216,7 +208,7 @@ public class VideoCapturerAndroid extends VideoCapturer implements PreviewCallba
}
public static VideoCapturerAndroid create(String name,
- CameraEventsHandler eventsHandler, EGLContext sharedEglContext) {
+ CameraEventsHandler eventsHandler, EglBase.Context sharedEglContext) {
final int cameraId = lookupDeviceName(name);
if (cameraId == -1) {
return null;
@@ -224,7 +216,8 @@ public class VideoCapturerAndroid extends VideoCapturer implements PreviewCallba
final VideoCapturerAndroid capturer = new VideoCapturerAndroid(cameraId, eventsHandler,
sharedEglContext);
- capturer.setNativeCapturer(nativeCreateVideoCapturer(capturer));
+ capturer.setNativeCapturer(
+ nativeCreateVideoCapturer(capturer, capturer.surfaceHelper));
return capturer;
}
@@ -243,7 +236,7 @@ public class VideoCapturerAndroid extends VideoCapturer implements PreviewCallba
// Switch camera to the next valid camera id. This can only be called while
// the camera is running.
public void switchCamera(final CameraSwitchHandler handler) {
- if (Camera.getNumberOfCameras() < 2) {
+ if (android.hardware.Camera.getNumberOfCameras() < 2) {
if (handler != null) {
handler.onCameraSwitchError("No camera to switch to.");
}
@@ -274,7 +267,8 @@ public class VideoCapturerAndroid extends VideoCapturer implements PreviewCallba
pendingCameraSwitch = false;
}
if (handler != null) {
- handler.onCameraSwitchDone(info.facing == Camera.CameraInfo.CAMERA_FACING_FRONT);
+ handler.onCameraSwitchDone(
+ info.facing == android.hardware.Camera.CameraInfo.CAMERA_FACING_FRONT);
}
}
});
@@ -282,6 +276,8 @@ public class VideoCapturerAndroid extends VideoCapturer implements PreviewCallba
// Requests a new output format from the video capturer. Captured frames
// by the camera will be scaled/or dropped by the video capturer.
+ // It does not matter if width and height are flipped. I.E, |width| = 640, |height| = 480 produce
+ // the same result as |width| = 480, |height| = 640.
// TODO(magjed/perkj): Document what this function does. Change name?
public void onOutputFormatRequest(final int width, final int height, final int framerate) {
cameraThreadHandler.post(new Runnable() {
@@ -303,7 +299,7 @@ public class VideoCapturerAndroid extends VideoCapturer implements PreviewCallba
// Helper function to retrieve the current camera id synchronously. Note that the camera id might
// change at any point by switchCamera() calls.
- private int getCurrentCameraId() {
+ int getCurrentCameraId() {
synchronized (cameraIdLock) {
return id;
}
@@ -329,20 +325,19 @@ public class VideoCapturerAndroid extends VideoCapturer implements PreviewCallba
}
private VideoCapturerAndroid(int cameraId, CameraEventsHandler eventsHandler,
- EGLContext sharedContext) {
- Logging.d(TAG, "VideoCapturerAndroid");
+ EglBase.Context sharedContext) {
this.id = cameraId;
this.eventsHandler = eventsHandler;
cameraThread = new HandlerThread(TAG);
cameraThread.start();
cameraThreadHandler = new Handler(cameraThread.getLooper());
- videoBuffers = new FramePool(cameraThread);
isCapturingToTexture = (sharedContext != null);
- surfaceHelper = SurfaceTextureHelper.create(
- isCapturingToTexture ? sharedContext : EGL10.EGL_NO_CONTEXT, cameraThreadHandler);
+ cameraStatistics = new CameraStatistics();
+ surfaceHelper = SurfaceTextureHelper.create(sharedContext, cameraThreadHandler);
if (isCapturingToTexture) {
surfaceHelper.setListener(this);
}
+ Logging.d(TAG, "VideoCapturerAndroid isCapturingToTexture : " + isCapturingToTexture);
}
private void checkIsOnCameraThread() {
@@ -355,13 +350,13 @@ public class VideoCapturerAndroid extends VideoCapturer implements PreviewCallba
// found. If |deviceName| is empty, the first available device is used.
private static int lookupDeviceName(String deviceName) {
Logging.d(TAG, "lookupDeviceName: " + deviceName);
- if (deviceName == null || Camera.getNumberOfCameras() == 0) {
+ if (deviceName == null || android.hardware.Camera.getNumberOfCameras() == 0) {
return -1;
}
if (deviceName.isEmpty()) {
return 0;
}
- for (int i = 0; i < Camera.getNumberOfCameras(); ++i) {
+ for (int i = 0; i < android.hardware.Camera.getNumberOfCameras(); ++i) {
if (deviceName.equals(CameraEnumerationAndroid.getDeviceName(i))) {
return i;
}
@@ -382,14 +377,9 @@ public class VideoCapturerAndroid extends VideoCapturer implements PreviewCallba
if (camera != null) {
throw new IllegalStateException("Release called while camera is running");
}
- if (cameraStatistics.pendingFramesCount() != 0) {
- throw new IllegalStateException("Release called with pending frames left");
- }
}
});
- surfaceHelper.disconnect();
- cameraThread.quit();
- ThreadUtils.joinUninterruptibly(cameraThread);
+ surfaceHelper.disconnect(cameraThreadHandler);
cameraThread = null;
}
@@ -413,6 +403,7 @@ public class VideoCapturerAndroid extends VideoCapturer implements PreviewCallba
if (frameObserver == null) {
throw new RuntimeException("frameObserver not set.");
}
+
cameraThreadHandler.post(new Runnable() {
@Override public void run() {
startCaptureOnCameraThread(width, height, framerate, frameObserver,
@@ -422,8 +413,8 @@ public class VideoCapturerAndroid extends VideoCapturer implements PreviewCallba
}
private void startCaptureOnCameraThread(
- int width, int height, int framerate, CapturerObserver frameObserver,
- Context applicationContext) {
+ final int width, final int height, final int framerate, final CapturerObserver frameObserver,
+ final Context applicationContext) {
Throwable error = null;
checkIsOnCameraThread();
if (camera != null) {
@@ -431,17 +422,36 @@ public class VideoCapturerAndroid extends VideoCapturer implements PreviewCallba
}
this.applicationContext = applicationContext;
this.frameObserver = frameObserver;
+ this.firstFrameReported = false;
+
try {
- synchronized (cameraIdLock) {
- Logging.d(TAG, "Opening camera " + id);
- firstFrameReported = false;
- if (eventsHandler != null) {
- eventsHandler.onCameraOpening(id);
+ try {
+ synchronized (cameraIdLock) {
+ Logging.d(TAG, "Opening camera " + id);
+ if (eventsHandler != null) {
+ eventsHandler.onCameraOpening(id);
+ }
+ camera = android.hardware.Camera.open(id);
+ info = new android.hardware.Camera.CameraInfo();
+ android.hardware.Camera.getCameraInfo(id, info);
+ }
+ } catch (RuntimeException e) {
+ openCameraAttempts++;
+ if (openCameraAttempts < MAX_OPEN_CAMERA_ATTEMPTS) {
+ Logging.e(TAG, "Camera.open failed, retrying", e);
+ openCameraOnCodecThreadRunner = new Runnable() {
+ @Override public void run() {
+ startCaptureOnCameraThread(width, height, framerate, frameObserver,
+ applicationContext);
+ }
+ };
+ cameraThreadHandler.postDelayed(openCameraOnCodecThreadRunner, OPEN_CAMERA_DELAY_MS);
+ return;
}
- camera = Camera.open(id);
- info = new Camera.CameraInfo();
- Camera.getCameraInfo(id, info);
+ openCameraAttempts = 0;
+ throw e;
}
+
try {
camera.setPreviewTexture(surfaceHelper.getSurfaceTexture());
} catch (IOException e) {
@@ -485,17 +495,18 @@ public class VideoCapturerAndroid extends VideoCapturer implements PreviewCallba
requestedFramerate = framerate;
// Find closest supported format for |width| x |height| @ |framerate|.
- final Camera.Parameters parameters = camera.getParameters();
+ final android.hardware.Camera.Parameters parameters = camera.getParameters();
final int[] range = CameraEnumerationAndroid.getFramerateRange(parameters, framerate * 1000);
- final Camera.Size previewSize = CameraEnumerationAndroid.getClosestSupportedSize(
- parameters.getSupportedPreviewSizes(), width, height);
+ final android.hardware.Camera.Size previewSize =
+ CameraEnumerationAndroid.getClosestSupportedSize(
+ parameters.getSupportedPreviewSizes(), width, height);
final CaptureFormat captureFormat = new CaptureFormat(
previewSize.width, previewSize.height,
- range[Camera.Parameters.PREVIEW_FPS_MIN_INDEX],
- range[Camera.Parameters.PREVIEW_FPS_MAX_INDEX]);
+ range[android.hardware.Camera.Parameters.PREVIEW_FPS_MIN_INDEX],
+ range[android.hardware.Camera.Parameters.PREVIEW_FPS_MAX_INDEX]);
// Check if we are already using this capture format, then we don't need to do anything.
- if (captureFormat.equals(this.captureFormat)) {
+ if (captureFormat.isSameFormat(this.captureFormat)) {
return;
}
@@ -511,11 +522,15 @@ public class VideoCapturerAndroid extends VideoCapturer implements PreviewCallba
parameters.setPreviewFpsRange(captureFormat.minFramerate, captureFormat.maxFramerate);
}
parameters.setPreviewSize(captureFormat.width, captureFormat.height);
- parameters.setPreviewFormat(captureFormat.imageFormat);
+
+ if (!isCapturingToTexture) {
+ parameters.setPreviewFormat(captureFormat.imageFormat);
+ }
// Picture size is for taking pictures and not for preview/video, but we need to set it anyway
// as a workaround for an aspect ratio problem on Nexus 7.
- final Camera.Size pictureSize = CameraEnumerationAndroid.getClosestSupportedSize(
- parameters.getSupportedPictureSizes(), width, height);
+ final android.hardware.Camera.Size pictureSize =
+ CameraEnumerationAndroid.getClosestSupportedSize(
+ parameters.getSupportedPictureSizes(), width, height);
parameters.setPictureSize(pictureSize.width, pictureSize.height);
// Temporarily stop preview if it's already running.
@@ -532,13 +547,19 @@ public class VideoCapturerAndroid extends VideoCapturer implements PreviewCallba
this.captureFormat = captureFormat;
List<String> focusModes = parameters.getSupportedFocusModes();
- if (focusModes.contains(Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO)) {
- parameters.setFocusMode(Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO);
+ if (focusModes.contains(android.hardware.Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO)) {
+ parameters.setFocusMode(android.hardware.Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO);
}
camera.setParameters(parameters);
if (!isCapturingToTexture) {
- videoBuffers.queueCameraBuffers(captureFormat.frameSize(), camera);
+ queuedBuffers.clear();
+ final int frameSize = captureFormat.frameSize();
+ for (int i = 0; i < NUMBER_OF_CAPTURE_BUFFERS; ++i) {
+ final ByteBuffer buffer = ByteBuffer.allocateDirect(frameSize);
+ queuedBuffers.add(buffer.array());
+ camera.addCallbackBuffer(buffer.array());
+ }
camera.setPreviewCallbackWithBuffer(this);
}
camera.startPreview();
@@ -561,6 +582,10 @@ public class VideoCapturerAndroid extends VideoCapturer implements PreviewCallba
private void stopCaptureOnCameraThread() {
checkIsOnCameraThread();
Logging.d(TAG, "stopCaptureOnCameraThread");
+ if (openCameraOnCodecThreadRunner != null) {
+ cameraThreadHandler.removeCallbacks(openCameraOnCodecThreadRunner);
+ }
+ openCameraAttempts = 0;
if (camera == null) {
Logging.e(TAG, "Calling stopCapture() for already stopped camera.");
return;
@@ -571,13 +596,7 @@ public class VideoCapturerAndroid extends VideoCapturer implements PreviewCallba
Logging.d(TAG, "Stop preview.");
camera.stopPreview();
camera.setPreviewCallbackWithBuffer(null);
- if (!isCapturingToTexture()) {
- videoBuffers.stopReturnBuffersToCamera();
- Logging.d(TAG, "stopReturnBuffersToCamera called."
- + (cameraStatistics.pendingFramesCount() == 0?
- " All buffers have been returned."
- : " Pending buffers: " + cameraStatistics.pendingFramesTimeStamps() + "."));
- }
+ queuedBuffers.clear();
captureFormat = null;
Logging.d(TAG, "Release camera.");
@@ -593,7 +612,7 @@ public class VideoCapturerAndroid extends VideoCapturer implements PreviewCallba
Logging.d(TAG, "switchCameraOnCameraThread");
stopCaptureOnCameraThread();
synchronized (cameraIdLock) {
- id = (id + 1) % Camera.getNumberOfCameras();
+ id = (id + 1) % android.hardware.Camera.getNumberOfCameras();
}
dropNextFrame = true;
startCaptureOnCameraThread(requestedWidth, requestedHeight, requestedFramerate, frameObserver,
@@ -612,17 +631,9 @@ public class VideoCapturerAndroid extends VideoCapturer implements PreviewCallba
frameObserver.onOutputFormatRequest(width, height, framerate);
}
- public void returnBuffer(final long timeStamp) {
- cameraThreadHandler.post(new Runnable() {
- @Override public void run() {
- cameraStatistics.frameReturned(timeStamp);
- if (isCapturingToTexture) {
- surfaceHelper.returnTextureFrame();
- } else {
- videoBuffers.returnBuffer(timeStamp);
- }
- }
- });
+ // Exposed for testing purposes only.
+ Handler getCameraThreadHandler() {
+ return cameraThreadHandler;
}
private int getDeviceOrientation() {
@@ -650,7 +661,7 @@ public class VideoCapturerAndroid extends VideoCapturer implements PreviewCallba
private int getFrameOrientation() {
int rotation = getDeviceOrientation();
- if (info.facing == Camera.CameraInfo.CAMERA_FACING_BACK) {
+ if (info.facing == android.hardware.Camera.CameraInfo.CAMERA_FACING_BACK) {
rotation = 360 - rotation;
}
return (info.orientation + rotation) % 360;
@@ -658,9 +669,10 @@ public class VideoCapturerAndroid extends VideoCapturer implements PreviewCallba
// Called on cameraThread so must not "synchronized".
@Override
- public void onPreviewFrame(byte[] data, Camera callbackCamera) {
+ public void onPreviewFrame(byte[] data, android.hardware.Camera callbackCamera) {
checkIsOnCameraThread();
- if (camera == null) {
+ if (camera == null || !queuedBuffers.contains(data)) {
+ // The camera has been stopped or |data| is an old invalid buffer.
return;
}
if (camera != callbackCamera) {
@@ -675,16 +687,10 @@ public class VideoCapturerAndroid extends VideoCapturer implements PreviewCallba
firstFrameReported = true;
}
- // Mark the frame owning |data| as used.
- // Note that since data is directBuffer,
- // data.length >= videoBuffers.frameSize.
- if (videoBuffers.reserveByteBuffer(data, captureTimeNs)) {
- cameraStatistics.addPendingFrame(captureTimeNs);
- frameObserver.onByteBufferFrameCaptured(data, videoBuffers.frameSize, captureFormat.width,
- captureFormat.height, getFrameOrientation(), captureTimeNs);
- } else {
- Logging.w(TAG, "reserveByteBuffer failed - dropping frame.");
- }
+ cameraStatistics.addFrame();
+ frameObserver.onByteBufferFrameCaptured(data, captureFormat.width, captureFormat.height,
+ getFrameOrientation(), captureTimeNs);
+ camera.addCallbackBuffer(data);
}
@Override
@@ -696,135 +702,22 @@ public class VideoCapturerAndroid extends VideoCapturer implements PreviewCallba
surfaceHelper.returnTextureFrame();
return;
}
- if (!dropNextFrame) {
+ if (dropNextFrame) {
surfaceHelper.returnTextureFrame();
- dropNextFrame = true;
+ dropNextFrame = false;
return;
}
int rotation = getFrameOrientation();
- if (info.facing == Camera.CameraInfo.CAMERA_FACING_FRONT) {
+ if (info.facing == android.hardware.Camera.CameraInfo.CAMERA_FACING_FRONT) {
// Undo the mirror that the OS "helps" us with.
// http://developer.android.com/reference/android/hardware/Camera.html#setDisplayOrientation(int)
transformMatrix =
RendererCommon.multiplyMatrices(transformMatrix, RendererCommon.horizontalFlipMatrix());
}
- transformMatrix = RendererCommon.rotateTextureMatrix(transformMatrix, rotation);
-
- final int rotatedWidth = (rotation % 180 == 0) ? captureFormat.width : captureFormat.height;
- final int rotatedHeight = (rotation % 180 == 0) ? captureFormat.height : captureFormat.width;
- cameraStatistics.addPendingFrame(timestampNs);
- frameObserver.onTextureFrameCaptured(rotatedWidth, rotatedHeight, oesTextureId,
- transformMatrix, timestampNs);
- }
-
- // Class used for allocating and bookkeeping video frames. All buffers are
- // direct allocated so that they can be directly used from native code. This class is
- // not thread-safe, and enforces single thread use.
- private static class FramePool {
- // Thread that all calls should be made on.
- private final Thread thread;
- // Arbitrary queue depth. Higher number means more memory allocated & held,
- // lower number means more sensitivity to processing time in the client (and
- // potentially stalling the capturer if it runs out of buffers to write to).
- private static final int numCaptureBuffers = 3;
- // This container tracks the buffers added as camera callback buffers. It is needed for finding
- // the corresponding ByteBuffer given a byte[].
- private final Map<byte[], ByteBuffer> queuedBuffers = new IdentityHashMap<byte[], ByteBuffer>();
- // This container tracks the frames that have been sent but not returned. It is needed for
- // keeping the buffers alive and for finding the corresponding ByteBuffer given a timestamp.
- private final Map<Long, ByteBuffer> pendingBuffers = new HashMap<Long, ByteBuffer>();
- private int frameSize = 0;
- private Camera camera;
-
- public FramePool(Thread thread) {
- this.thread = thread;
- }
-
- private void checkIsOnValidThread() {
- if (Thread.currentThread() != thread) {
- throw new IllegalStateException("Wrong thread");
- }
- }
-
- // Discards previous queued buffers and adds new callback buffers to camera.
- public void queueCameraBuffers(int frameSize, Camera camera) {
- checkIsOnValidThread();
- this.camera = camera;
- this.frameSize = frameSize;
-
- queuedBuffers.clear();
- for (int i = 0; i < numCaptureBuffers; ++i) {
- final ByteBuffer buffer = ByteBuffer.allocateDirect(frameSize);
- camera.addCallbackBuffer(buffer.array());
- queuedBuffers.put(buffer.array(), buffer);
- }
- Logging.d(TAG, "queueCameraBuffers enqueued " + numCaptureBuffers
- + " buffers of size " + frameSize + ".");
- }
-
- public void stopReturnBuffersToCamera() {
- checkIsOnValidThread();
- this.camera = null;
- queuedBuffers.clear();
- // Frames in |pendingBuffers| need to be kept alive until they are returned.
- }
-
- public boolean reserveByteBuffer(byte[] data, long timeStamp) {
- checkIsOnValidThread();
- final ByteBuffer buffer = queuedBuffers.remove(data);
- if (buffer == null) {
- // Frames might be posted to |onPreviewFrame| with the previous format while changing
- // capture format in |startPreviewOnCameraThread|. Drop these old frames.
- Logging.w(TAG, "Received callback buffer from previous configuration with length: "
- + (data == null ? "null" : data.length));
- return false;
- }
- if (buffer.capacity() != frameSize) {
- throw new IllegalStateException("Callback buffer has unexpected frame size");
- }
- if (pendingBuffers.containsKey(timeStamp)) {
- Logging.e(TAG, "Timestamp already present in pending buffers - they need to be unique");
- return false;
- }
- pendingBuffers.put(timeStamp, buffer);
- if (queuedBuffers.isEmpty()) {
- Logging.d(TAG, "Camera is running out of capture buffers.");
- }
- return true;
- }
-
- public void returnBuffer(long timeStamp) {
- checkIsOnValidThread();
- final ByteBuffer returnedFrame = pendingBuffers.remove(timeStamp);
- if (returnedFrame == null) {
- throw new RuntimeException("unknown data buffer with time stamp "
- + timeStamp + "returned?!?");
- }
-
- if (camera != null && returnedFrame.capacity() == frameSize) {
- camera.addCallbackBuffer(returnedFrame.array());
- if (queuedBuffers.isEmpty()) {
- Logging.d(TAG, "Frame returned when camera is running out of capture"
- + " buffers for TS " + TimeUnit.NANOSECONDS.toMillis(timeStamp));
- }
- queuedBuffers.put(returnedFrame.array(), returnedFrame);
- return;
- }
-
- if (returnedFrame.capacity() != frameSize) {
- Logging.d(TAG, "returnBuffer with time stamp "
- + TimeUnit.NANOSECONDS.toMillis(timeStamp)
- + " called with old frame size, " + returnedFrame.capacity() + ".");
- // Since this frame has the wrong size, don't requeue it. Frames with the correct size are
- // created in queueCameraBuffers so this must be an old buffer.
- return;
- }
-
- Logging.d(TAG, "returnBuffer with time stamp "
- + TimeUnit.NANOSECONDS.toMillis(timeStamp)
- + " called after camera has been stopped.");
- }
+ cameraStatistics.addFrame();
+ frameObserver.onTextureFrameCaptured(captureFormat.width, captureFormat.height, oesTextureId,
+ transformMatrix, rotation, timestampNs);
}
// Interface used for providing callbacks to an observer.
@@ -835,13 +728,14 @@ public class VideoCapturerAndroid extends VideoCapturer implements PreviewCallba
// Delivers a captured frame. Called on a Java thread owned by
// VideoCapturerAndroid.
- abstract void onByteBufferFrameCaptured(byte[] data, int length, int width, int height,
- int rotation, long timeStamp);
+ abstract void onByteBufferFrameCaptured(byte[] data, int width, int height, int rotation,
+ long timeStamp);
// Delivers a captured frame in a texture with id |oesTextureId|. Called on a Java thread
// owned by VideoCapturerAndroid.
abstract void onTextureFrameCaptured(
- int width, int height, int oesTextureId, float[] transformMatrix, long timestamp);
+ int width, int height, int oesTextureId, float[] transformMatrix, int rotation,
+ long timestamp);
// Requests an output format from the video capturer. Captured frames
// by the camera will be scaled/or dropped by the video capturer.
@@ -864,17 +758,18 @@ public class VideoCapturerAndroid extends VideoCapturer implements PreviewCallba
}
@Override
- public void onByteBufferFrameCaptured(byte[] data, int length, int width, int height,
+ public void onByteBufferFrameCaptured(byte[] data, int width, int height,
int rotation, long timeStamp) {
- nativeOnByteBufferFrameCaptured(nativeCapturer, data, length, width, height, rotation,
+ nativeOnByteBufferFrameCaptured(nativeCapturer, data, data.length, width, height, rotation,
timeStamp);
}
@Override
public void onTextureFrameCaptured(
- int width, int height, int oesTextureId, float[] transformMatrix, long timestamp) {
+ int width, int height, int oesTextureId, float[] transformMatrix, int rotation,
+ long timestamp) {
nativeOnTextureFrameCaptured(nativeCapturer, width, height, oesTextureId, transformMatrix,
- timestamp);
+ rotation, timestamp);
}
@Override
@@ -887,10 +782,12 @@ public class VideoCapturerAndroid extends VideoCapturer implements PreviewCallba
private native void nativeOnByteBufferFrameCaptured(long nativeCapturer,
byte[] data, int length, int width, int height, int rotation, long timeStamp);
private native void nativeOnTextureFrameCaptured(long nativeCapturer, int width, int height,
- int oesTextureId, float[] transformMatrix, long timestamp);
+ int oesTextureId, float[] transformMatrix, int rotation, long timestamp);
private native void nativeOnOutputFormatRequest(long nativeCapturer,
int width, int height, int framerate);
}
- private static native long nativeCreateVideoCapturer(VideoCapturerAndroid videoCapturer);
+ private static native long nativeCreateVideoCapturer(
+ VideoCapturerAndroid videoCapturer,
+ SurfaceTextureHelper surfaceHelper);
}
diff --git a/talk/app/webrtc/java/android/org/webrtc/VideoRendererGui.java b/talk/app/webrtc/java/android/org/webrtc/VideoRendererGui.java
index bacd0cf11f..bb6f01cea2 100644
--- a/talk/app/webrtc/java/android/org/webrtc/VideoRendererGui.java
+++ b/talk/app/webrtc/java/android/org/webrtc/VideoRendererGui.java
@@ -38,7 +38,7 @@ import javax.microedition.khronos.opengles.GL10;
import android.annotation.SuppressLint;
import android.graphics.Point;
import android.graphics.Rect;
-import android.graphics.SurfaceTexture;
+import android.opengl.EGL14;
import android.opengl.GLES20;
import android.opengl.GLSurfaceView;
@@ -59,7 +59,7 @@ public class VideoRendererGui implements GLSurfaceView.Renderer {
private static Runnable eglContextReady = null;
private static final String TAG = "VideoRendererGui";
private GLSurfaceView surface;
- private static EGLContext eglContext = null;
+ private static EglBase.Context eglContext = null;
// Indicates if SurfaceView.Renderer.onSurfaceCreated was called.
// If true then for every newly created yuv image renderer createTexture()
// should be called. The variable is accessed on multiple threads and
@@ -69,8 +69,6 @@ public class VideoRendererGui implements GLSurfaceView.Renderer {
private int screenHeight;
// List of yuv renderers.
private final ArrayList<YuvImageRenderer> yuvImageRenderers;
- // |drawer| is synchronized on |yuvImageRenderers|.
- private GlRectDrawer drawer;
// Render and draw threads.
private static Thread renderFrameThread;
private static Thread drawThread;
@@ -99,6 +97,8 @@ public class VideoRendererGui implements GLSurfaceView.Renderer {
// currently leaking resources to avoid a rare crash in release() where the EGLContext has
// become invalid beforehand.
private int[] yuvTextures = { 0, 0, 0 };
+ private final RendererCommon.YuvUploader yuvUploader = new RendererCommon.YuvUploader();
+ private final RendererCommon.GlDrawer drawer;
// Resources for making a deep copy of incoming OES texture frame.
private GlTextureFrameBuffer textureCopy;
@@ -157,12 +157,13 @@ public class VideoRendererGui implements GLSurfaceView.Renderer {
private YuvImageRenderer(
GLSurfaceView surface, int id,
int x, int y, int width, int height,
- RendererCommon.ScalingType scalingType, boolean mirror) {
+ RendererCommon.ScalingType scalingType, boolean mirror, RendererCommon.GlDrawer drawer) {
Logging.d(TAG, "YuvImageRenderer.Create id: " + id);
this.surface = surface;
this.id = id;
this.scalingType = scalingType;
this.mirror = mirror;
+ this.drawer = drawer;
layoutInPercentage = new Rect(x, y, Math.min(100, x + width), Math.min(100, y + height));
updateLayoutProperties = false;
rotationDegree = 0;
@@ -174,6 +175,7 @@ public class VideoRendererGui implements GLSurfaceView.Renderer {
private synchronized void release() {
surface = null;
+ drawer.release();
synchronized (pendingFrameLock) {
if (pendingFrame != null) {
VideoRenderer.renderFrameDone(pendingFrame);
@@ -226,7 +228,7 @@ public class VideoRendererGui implements GLSurfaceView.Renderer {
}
}
- private void draw(GlRectDrawer drawer) {
+ private void draw() {
if (!seenFrame) {
// No frame received yet - nothing to render.
return;
@@ -241,29 +243,15 @@ public class VideoRendererGui implements GLSurfaceView.Renderer {
}
if (isNewFrame) {
+ rotatedSamplingMatrix = RendererCommon.rotateTextureMatrix(
+ pendingFrame.samplingMatrix, pendingFrame.rotationDegree);
if (pendingFrame.yuvFrame) {
rendererType = RendererType.RENDERER_YUV;
- drawer.uploadYuvData(yuvTextures, pendingFrame.width, pendingFrame.height,
+ yuvUploader.uploadYuvData(yuvTextures, pendingFrame.width, pendingFrame.height,
pendingFrame.yuvStrides, pendingFrame.yuvPlanes);
- // The convention in WebRTC is that the first element in a ByteBuffer corresponds to the
- // top-left corner of the image, but in glTexImage2D() the first element corresponds to
- // the bottom-left corner. We correct this discrepancy by setting a vertical flip as
- // sampling matrix.
- final float[] samplingMatrix = RendererCommon.verticalFlipMatrix();
- rotatedSamplingMatrix =
- RendererCommon.rotateTextureMatrix(samplingMatrix, pendingFrame.rotationDegree);
} else {
rendererType = RendererType.RENDERER_TEXTURE;
- // External texture rendering. Update texture image to latest and make a deep copy of
- // the external texture.
- // TODO(magjed): Move updateTexImage() to the video source instead.
- final SurfaceTexture surfaceTexture = (SurfaceTexture) pendingFrame.textureObject;
- surfaceTexture.updateTexImage();
- final float[] samplingMatrix = new float[16];
- surfaceTexture.getTransformMatrix(samplingMatrix);
- rotatedSamplingMatrix =
- RendererCommon.rotateTextureMatrix(samplingMatrix, pendingFrame.rotationDegree);
-
+ // External texture rendering. Make a deep copy of the external texture.
// Reallocate offscreen texture if necessary.
textureCopy.setSize(pendingFrame.rotatedWidth(), pendingFrame.rotatedHeight());
@@ -272,12 +260,13 @@ public class VideoRendererGui implements GLSurfaceView.Renderer {
GlUtil.checkNoGLES2Error("glBindFramebuffer");
// Copy the OES texture content. This will also normalize the sampling matrix.
- GLES20.glViewport(0, 0, textureCopy.getWidth(), textureCopy.getHeight());
- drawer.drawOes(pendingFrame.textureId, rotatedSamplingMatrix);
+ drawer.drawOes(pendingFrame.textureId, rotatedSamplingMatrix,
+ 0, 0, textureCopy.getWidth(), textureCopy.getHeight());
rotatedSamplingMatrix = RendererCommon.identityMatrix();
// Restore normal framebuffer.
GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, 0);
+ GLES20.glFinish();
}
copyTimeNs += (System.nanoTime() - now);
VideoRenderer.renderFrameDone(pendingFrame);
@@ -285,17 +274,17 @@ public class VideoRendererGui implements GLSurfaceView.Renderer {
}
}
- // OpenGL defaults to lower left origin - flip vertically.
- GLES20.glViewport(displayLayout.left, screenHeight - displayLayout.bottom,
- displayLayout.width(), displayLayout.height());
-
updateLayoutMatrix();
final float[] texMatrix =
RendererCommon.multiplyMatrices(rotatedSamplingMatrix, layoutMatrix);
+ // OpenGL defaults to lower left origin - flip viewport position vertically.
+ final int viewportY = screenHeight - displayLayout.bottom;
if (rendererType == RendererType.RENDERER_YUV) {
- drawer.drawYuv(yuvTextures, texMatrix);
+ drawer.drawYuv(yuvTextures, texMatrix,
+ displayLayout.left, viewportY, displayLayout.width(), displayLayout.height());
} else {
- drawer.drawRgb(textureCopy.getTextureId(), texMatrix);
+ drawer.drawRgb(textureCopy.getTextureId(), texMatrix,
+ displayLayout.left, viewportY, displayLayout.width(), displayLayout.height());
}
if (isNewFrame) {
@@ -314,7 +303,7 @@ public class VideoRendererGui implements GLSurfaceView.Renderer {
". Dropped: " + framesDropped + ". Rendered: " + framesRendered);
if (framesReceived > 0 && framesRendered > 0) {
Logging.d(TAG, "Duration: " + (int)(timeSinceFirstFrameNs / 1e6) +
- " ms. FPS: " + (float)framesRendered * 1e9 / timeSinceFirstFrameNs);
+ " ms. FPS: " + framesRendered * 1e9 / timeSinceFirstFrameNs);
Logging.d(TAG, "Draw time: " +
(int) (drawTimeNs / (1000 * framesRendered)) + " us. Copy time: " +
(int) (copyTimeNs / (1000 * framesReceived)) + " us");
@@ -429,7 +418,7 @@ public class VideoRendererGui implements GLSurfaceView.Renderer {
eglContextReady = eglContextReadyCallback;
}
- public static synchronized EGLContext getEGLContext() {
+ public static synchronized EglBase.Context getEglBaseContext() {
return eglContext;
}
@@ -477,6 +466,16 @@ public class VideoRendererGui implements GLSurfaceView.Renderer {
*/
public static synchronized YuvImageRenderer create(int x, int y, int width, int height,
RendererCommon.ScalingType scalingType, boolean mirror) {
+ return create(x, y, width, height, scalingType, mirror, new GlRectDrawer());
+ }
+
+ /**
+ * Creates VideoRenderer.Callbacks with top left corner at (x, y) and resolution (width, height).
+ * All parameters are in percentage of screen resolution. The custom |drawer| will be used for
+ * drawing frames on the EGLSurface. This class is responsible for calling release() on |drawer|.
+ */
+ public static synchronized YuvImageRenderer create(int x, int y, int width, int height,
+ RendererCommon.ScalingType scalingType, boolean mirror, RendererCommon.GlDrawer drawer) {
// Check display region parameters.
if (x < 0 || x > 100 || y < 0 || y > 100 ||
width < 0 || width > 100 || height < 0 || height > 100 ||
@@ -490,7 +489,7 @@ public class VideoRendererGui implements GLSurfaceView.Renderer {
}
final YuvImageRenderer yuvImageRenderer = new YuvImageRenderer(
instance.surface, instance.yuvImageRenderers.size(),
- x, y, width, height, scalingType, mirror);
+ x, y, width, height, scalingType, mirror, drawer);
synchronized (instance.yuvImageRenderers) {
if (instance.onSurfaceCreatedCalled) {
// onSurfaceCreated has already been called for VideoRendererGui -
@@ -498,6 +497,7 @@ public class VideoRendererGui implements GLSurfaceView.Renderer {
// rendering list.
final CountDownLatch countDownLatch = new CountDownLatch(1);
instance.surface.queueEvent(new Runnable() {
+ @Override
public void run() {
yuvImageRenderer.createTextures();
yuvImageRenderer.setScreenSize(
@@ -608,13 +608,16 @@ public class VideoRendererGui implements GLSurfaceView.Renderer {
Logging.d(TAG, "VideoRendererGui.onSurfaceCreated");
// Store render EGL context.
synchronized (VideoRendererGui.class) {
- eglContext = ((EGL10) EGLContext.getEGL()).eglGetCurrentContext();
+ if (EglBase14.isEGL14Supported()) {
+ eglContext = new EglBase14.Context(EGL14.eglGetCurrentContext());
+ } else {
+ eglContext = new EglBase10.Context(((EGL10) EGLContext.getEGL()).eglGetCurrentContext());
+ }
+
Logging.d(TAG, "VideoRendererGui EGL Context: " + eglContext);
}
synchronized (yuvImageRenderers) {
- // Create drawer for YUV/OES frames.
- drawer = new GlRectDrawer();
// Create textures for all images.
for (YuvImageRenderer yuvImageRenderer : yuvImageRenderers) {
yuvImageRenderer.createTextures();
@@ -655,7 +658,7 @@ public class VideoRendererGui implements GLSurfaceView.Renderer {
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
synchronized (yuvImageRenderers) {
for (YuvImageRenderer yuvImageRenderer : yuvImageRenderers) {
- yuvImageRenderer.draw(drawer);
+ yuvImageRenderer.draw();
}
}
}
diff --git a/talk/app/webrtc/java/jni/androidmediacodeccommon.h b/talk/app/webrtc/java/jni/androidmediacodeccommon.h
index 348a716496..92ea135f12 100644
--- a/talk/app/webrtc/java/jni/androidmediacodeccommon.h
+++ b/talk/app/webrtc/java/jni/androidmediacodeccommon.h
@@ -72,6 +72,8 @@ enum { kMediaCodecTimeoutMs = 1000 };
enum { kMediaCodecStatisticsIntervalMs = 3000 };
// Maximum amount of pending frames for VP8 decoder.
enum { kMaxPendingFramesVp8 = 1 };
+// Maximum amount of pending frames for VP9 decoder.
+enum { kMaxPendingFramesVp9 = 1 };
// Maximum amount of pending frames for H.264 decoder.
enum { kMaxPendingFramesH264 = 30 };
// Maximum amount of decoded frames for which per-frame logging is enabled.
diff --git a/talk/app/webrtc/java/jni/androidmediadecoder_jni.cc b/talk/app/webrtc/java/jni/androidmediadecoder_jni.cc
index b664f16e2e..c3d287ce0d 100644
--- a/talk/app/webrtc/java/jni/androidmediadecoder_jni.cc
+++ b/talk/app/webrtc/java/jni/androidmediadecoder_jni.cc
@@ -33,14 +33,15 @@
#include "talk/app/webrtc/java/jni/androidmediacodeccommon.h"
#include "talk/app/webrtc/java/jni/classreferenceholder.h"
#include "talk/app/webrtc/java/jni/native_handle_impl.h"
+#include "talk/app/webrtc/java/jni/surfacetexturehelper_jni.h"
#include "webrtc/base/bind.h"
#include "webrtc/base/checks.h"
#include "webrtc/base/logging.h"
#include "webrtc/base/scoped_ref_ptr.h"
#include "webrtc/base/thread.h"
#include "webrtc/base/timeutils.h"
-#include "webrtc/common_video/interface/i420_buffer_pool.h"
-#include "webrtc/modules/video_coding/codecs/interface/video_codec_interface.h"
+#include "webrtc/common_video/include/i420_buffer_pool.h"
+#include "webrtc/modules/video_coding/include/video_codec_interface.h"
#include "webrtc/system_wrappers/include/logcat_trace_context.h"
#include "webrtc/system_wrappers/include/tick_util.h"
#include "third_party/libyuv/include/libyuv/convert.h"
@@ -62,6 +63,7 @@ using webrtc::VideoCodec;
using webrtc::VideoCodecType;
using webrtc::kVideoCodecH264;
using webrtc::kVideoCodecVP8;
+using webrtc::kVideoCodecVP9;
namespace webrtc_jni {
@@ -87,9 +89,14 @@ class MediaCodecVideoDecoder : public webrtc::VideoDecoder,
int32_t Release() override;
int32_t Reset() override;
+
+ bool PrefersLateDecoding() const override { return true; }
+
// rtc::MessageHandler implementation.
void OnMessage(rtc::Message* msg) override;
+ const char* ImplementationName() const override;
+
private:
// CHECK-fail if not running on |codec_thread_|.
void CheckOnCodecThread();
@@ -105,13 +112,17 @@ class MediaCodecVideoDecoder : public webrtc::VideoDecoder,
// Type of video codec.
VideoCodecType codecType_;
+ // Render EGL context - owned by factory, should not be allocated/destroyed
+ // by VideoDecoder.
+ jobject render_egl_context_;
+
bool key_frame_required_;
bool inited_;
bool sw_fallback_required_;
bool use_surface_;
VideoCodec codec_;
webrtc::I420BufferPool decoded_frame_pool_;
- NativeHandleImpl native_handle_;
+ rtc::scoped_refptr<SurfaceTextureHelper> surface_texture_helper_;
DecodedImageCallback* callback_;
int frames_received_; // Number of frames received by decoder.
int frames_decoded_; // Number of frames decoded by decoder.
@@ -120,10 +131,6 @@ class MediaCodecVideoDecoder : public webrtc::VideoDecoder,
int current_bytes_; // Encoded bytes in the current statistics interval.
int current_decoding_time_ms_; // Overall decoding time in the current second
uint32_t max_pending_frames_; // Maximum number of pending input frames
- std::vector<int32_t> timestamps_;
- std::vector<int64_t> ntp_times_ms_;
- std::vector<int64_t> frame_rtc_times_ms_; // Time when video frame is sent to
- // decoder input.
// State that is constant for the lifetime of this object once the ctor
// returns.
@@ -134,7 +141,8 @@ class MediaCodecVideoDecoder : public webrtc::VideoDecoder,
jmethodID j_release_method_;
jmethodID j_dequeue_input_buffer_method_;
jmethodID j_queue_input_buffer_method_;
- jmethodID j_dequeue_output_buffer_method_;
+ jmethodID j_dequeue_byte_buffer_method_;
+ jmethodID j_dequeue_texture_buffer_method_;
jmethodID j_return_decoded_byte_buffer_method_;
// MediaCodecVideoDecoder fields.
jfieldID j_input_buffers_field_;
@@ -144,24 +152,23 @@ class MediaCodecVideoDecoder : public webrtc::VideoDecoder,
jfieldID j_height_field_;
jfieldID j_stride_field_;
jfieldID j_slice_height_field_;
- jfieldID j_surface_texture_field_;
// MediaCodecVideoDecoder.DecodedTextureBuffer fields.
- jfieldID j_textureID_field_;
- jfieldID j_texture_presentation_timestamp_us_field_;
- // MediaCodecVideoDecoder.DecodedByteBuffer fields.
+ jfieldID j_texture_id_field_;
+ jfieldID j_transform_matrix_field_;
+ jfieldID j_texture_timestamp_ms_field_;
+ jfieldID j_texture_ntp_timestamp_ms_field_;
+ jfieldID j_texture_decode_time_ms_field_;
+ jfieldID j_texture_frame_delay_ms_field_;
+ // MediaCodecVideoDecoder.DecodedOutputBuffer fields.
jfieldID j_info_index_field_;
jfieldID j_info_offset_field_;
jfieldID j_info_size_field_;
- jfieldID j_info_presentation_timestamp_us_field_;
+ jfieldID j_info_timestamp_ms_field_;
+ jfieldID j_info_ntp_timestamp_ms_field_;
+ jfieldID j_byte_buffer_decode_time_ms_field_;
// Global references; must be deleted in Release().
std::vector<jobject> input_buffers_;
- jobject surface_texture_;
- jobject previous_surface_texture_;
-
- // Render EGL context - owned by factory, should not be allocated/destroyed
- // by VideoDecoder.
- jobject render_egl_context_;
};
MediaCodecVideoDecoder::MediaCodecVideoDecoder(
@@ -171,8 +178,6 @@ MediaCodecVideoDecoder::MediaCodecVideoDecoder(
key_frame_required_(true),
inited_(false),
sw_fallback_required_(false),
- surface_texture_(NULL),
- previous_surface_texture_(NULL),
codec_thread_(new Thread()),
j_media_codec_video_decoder_class_(
jni,
@@ -191,19 +196,22 @@ MediaCodecVideoDecoder::MediaCodecVideoDecoder(
j_init_decode_method_ = GetMethodID(
jni, *j_media_codec_video_decoder_class_, "initDecode",
"(Lorg/webrtc/MediaCodecVideoDecoder$VideoCodecType;"
- "IILjavax/microedition/khronos/egl/EGLContext;)Z");
+ "IILorg/webrtc/SurfaceTextureHelper;)Z");
j_release_method_ =
GetMethodID(jni, *j_media_codec_video_decoder_class_, "release", "()V");
j_dequeue_input_buffer_method_ = GetMethodID(
jni, *j_media_codec_video_decoder_class_, "dequeueInputBuffer", "()I");
j_queue_input_buffer_method_ = GetMethodID(
- jni, *j_media_codec_video_decoder_class_, "queueInputBuffer", "(IIJ)Z");
- j_dequeue_output_buffer_method_ = GetMethodID(
+ jni, *j_media_codec_video_decoder_class_, "queueInputBuffer", "(IIJJJ)Z");
+ j_dequeue_byte_buffer_method_ = GetMethodID(
jni, *j_media_codec_video_decoder_class_, "dequeueOutputBuffer",
- "(I)Ljava/lang/Object;");
+ "(I)Lorg/webrtc/MediaCodecVideoDecoder$DecodedOutputBuffer;");
+ j_dequeue_texture_buffer_method_ = GetMethodID(
+ jni, *j_media_codec_video_decoder_class_, "dequeueTextureBuffer",
+ "(I)Lorg/webrtc/MediaCodecVideoDecoder$DecodedTextureBuffer;");
j_return_decoded_byte_buffer_method_ =
GetMethodID(jni, *j_media_codec_video_decoder_class_,
- "returnDecodedByteBuffer", "(I)V");
+ "returnDecodedOutputBuffer", "(I)V");
j_input_buffers_field_ = GetFieldID(
jni, *j_media_codec_video_decoder_class_,
@@ -221,28 +229,36 @@ MediaCodecVideoDecoder::MediaCodecVideoDecoder(
jni, *j_media_codec_video_decoder_class_, "stride", "I");
j_slice_height_field_ = GetFieldID(
jni, *j_media_codec_video_decoder_class_, "sliceHeight", "I");
- j_surface_texture_field_ = GetFieldID(
- jni, *j_media_codec_video_decoder_class_, "surfaceTexture",
- "Landroid/graphics/SurfaceTexture;");
- jclass j_decoder_decoded_texture_buffer_class = FindClass(jni,
+ jclass j_decoded_texture_buffer_class = FindClass(jni,
"org/webrtc/MediaCodecVideoDecoder$DecodedTextureBuffer");
- j_textureID_field_ = GetFieldID(
- jni, j_decoder_decoded_texture_buffer_class, "textureID", "I");
- j_texture_presentation_timestamp_us_field_ =
- GetFieldID(jni, j_decoder_decoded_texture_buffer_class,
- "presentationTimestampUs", "J");
-
- jclass j_decoder_decoded_byte_buffer_class = FindClass(jni,
- "org/webrtc/MediaCodecVideoDecoder$DecodedByteBuffer");
+ j_texture_id_field_ = GetFieldID(
+ jni, j_decoded_texture_buffer_class, "textureID", "I");
+ j_transform_matrix_field_ = GetFieldID(
+ jni, j_decoded_texture_buffer_class, "transformMatrix", "[F");
+ j_texture_timestamp_ms_field_ = GetFieldID(
+ jni, j_decoded_texture_buffer_class, "timeStampMs", "J");
+ j_texture_ntp_timestamp_ms_field_ = GetFieldID(
+ jni, j_decoded_texture_buffer_class, "ntpTimeStampMs", "J");
+ j_texture_decode_time_ms_field_ = GetFieldID(
+ jni, j_decoded_texture_buffer_class, "decodeTimeMs", "J");
+ j_texture_frame_delay_ms_field_ = GetFieldID(
+ jni, j_decoded_texture_buffer_class, "frameDelayMs", "J");
+
+ jclass j_decoded_output_buffer_class = FindClass(jni,
+ "org/webrtc/MediaCodecVideoDecoder$DecodedOutputBuffer");
j_info_index_field_ = GetFieldID(
- jni, j_decoder_decoded_byte_buffer_class, "index", "I");
+ jni, j_decoded_output_buffer_class, "index", "I");
j_info_offset_field_ = GetFieldID(
- jni, j_decoder_decoded_byte_buffer_class, "offset", "I");
+ jni, j_decoded_output_buffer_class, "offset", "I");
j_info_size_field_ = GetFieldID(
- jni, j_decoder_decoded_byte_buffer_class, "size", "I");
- j_info_presentation_timestamp_us_field_ = GetFieldID(
- jni, j_decoder_decoded_byte_buffer_class, "presentationTimestampUs", "J");
+ jni, j_decoded_output_buffer_class, "size", "I");
+ j_info_timestamp_ms_field_ = GetFieldID(
+ jni, j_decoded_output_buffer_class, "timeStampMs", "J");
+ j_info_ntp_timestamp_ms_field_ = GetFieldID(
+ jni, j_decoded_output_buffer_class, "ntpTimeStampMs", "J");
+ j_byte_buffer_decode_time_ms_field_ = GetFieldID(
+ jni, j_decoded_output_buffer_class, "decodeTimeMs", "J");
CHECK_EXCEPTION(jni) << "MediaCodecVideoDecoder ctor failed";
use_surface_ = (render_egl_context_ != NULL);
@@ -254,14 +270,6 @@ MediaCodecVideoDecoder::MediaCodecVideoDecoder(
MediaCodecVideoDecoder::~MediaCodecVideoDecoder() {
// Call Release() to ensure no more callbacks to us after we are deleted.
Release();
- // Delete global references.
- JNIEnv* jni = AttachCurrentThreadIfNeeded();
- if (previous_surface_texture_ != NULL) {
- jni->DeleteGlobalRef(previous_surface_texture_);
- }
- if (surface_texture_ != NULL) {
- jni->DeleteGlobalRef(surface_texture_);
- }
}
int32_t MediaCodecVideoDecoder::InitDecode(const VideoCodec* inst,
@@ -312,6 +320,21 @@ int32_t MediaCodecVideoDecoder::InitDecodeOnCodecThread() {
frames_received_ = 0;
frames_decoded_ = 0;
+ jobject java_surface_texture_helper_ = nullptr;
+ if (use_surface_) {
+ java_surface_texture_helper_ = jni->CallStaticObjectMethod(
+ FindClass(jni, "org/webrtc/SurfaceTextureHelper"),
+ GetStaticMethodID(jni,
+ FindClass(jni, "org/webrtc/SurfaceTextureHelper"),
+ "create",
+ "(Lorg/webrtc/EglBase$Context;)"
+ "Lorg/webrtc/SurfaceTextureHelper;"),
+ render_egl_context_);
+ RTC_CHECK(java_surface_texture_helper_ != nullptr);
+ surface_texture_helper_ = new rtc::RefCountedObject<SurfaceTextureHelper>(
+ jni, java_surface_texture_helper_);
+ }
+
jobject j_video_codec_enum = JavaEnumFromIndex(
jni, "MediaCodecVideoDecoder$VideoCodecType", codecType_);
bool success = jni->CallBooleanMethod(
@@ -320,7 +343,7 @@ int32_t MediaCodecVideoDecoder::InitDecodeOnCodecThread() {
j_video_codec_enum,
codec_.width,
codec_.height,
- use_surface_ ? render_egl_context_ : nullptr);
+ java_surface_texture_helper_);
if (CheckException(jni) || !success) {
ALOGE << "Codec initialization error - fallback to SW codec.";
sw_fallback_required_ = true;
@@ -332,6 +355,9 @@ int32_t MediaCodecVideoDecoder::InitDecodeOnCodecThread() {
case kVideoCodecVP8:
max_pending_frames_ = kMaxPendingFramesVp8;
break;
+ case kVideoCodecVP9:
+ max_pending_frames_ = kMaxPendingFramesVp9;
+ break;
case kVideoCodecH264:
max_pending_frames_ = kMaxPendingFramesH264;
break;
@@ -342,9 +368,6 @@ int32_t MediaCodecVideoDecoder::InitDecodeOnCodecThread() {
current_frames_ = 0;
current_bytes_ = 0;
current_decoding_time_ms_ = 0;
- timestamps_.clear();
- ntp_times_ms_.clear();
- frame_rtc_times_ms_.clear();
jobjectArray input_buffers = (jobjectArray)GetObjectField(
jni, *j_media_codec_video_decoder_, j_input_buffers_field_);
@@ -361,15 +384,6 @@ int32_t MediaCodecVideoDecoder::InitDecodeOnCodecThread() {
}
}
- if (use_surface_) {
- jobject surface_texture = GetObjectField(
- jni, *j_media_codec_video_decoder_, j_surface_texture_field_);
- if (previous_surface_texture_ != NULL) {
- jni->DeleteGlobalRef(previous_surface_texture_);
- }
- previous_surface_texture_ = surface_texture_;
- surface_texture_ = jni->NewGlobalRef(surface_texture);
- }
codec_thread_->PostDelayed(kMediaCodecPollMs, this);
return WEBRTC_VIDEO_CODEC_OK;
@@ -395,6 +409,7 @@ int32_t MediaCodecVideoDecoder::ReleaseOnCodecThread() {
}
input_buffers_.clear();
jni->CallVoidMethod(*j_media_codec_video_decoder_, j_release_method_);
+ surface_texture_helper_ = nullptr;
inited_ = false;
rtc::MessageQueueManager::Clear(this);
if (CheckException(jni)) {
@@ -501,19 +516,21 @@ int32_t MediaCodecVideoDecoder::DecodeOnCodecThread(
// Try to drain the decoder and wait until output is not too
// much behind the input.
- if (frames_received_ > frames_decoded_ + max_pending_frames_) {
+ const int64 drain_start = GetCurrentTimeMs();
+ while ((frames_received_ > frames_decoded_ + max_pending_frames_) &&
+ (GetCurrentTimeMs() - drain_start) < kMediaCodecTimeoutMs) {
ALOGV("Received: %d. Decoded: %d. Wait for output...",
frames_received_, frames_decoded_);
- if (!DeliverPendingOutputs(jni, kMediaCodecTimeoutMs * 1000)) {
+ if (!DeliverPendingOutputs(jni, kMediaCodecPollMs)) {
ALOGE << "DeliverPendingOutputs error. Frames received: " <<
frames_received_ << ". Frames decoded: " << frames_decoded_;
return ProcessHWErrorOnCodecThread();
}
- if (frames_received_ > frames_decoded_ + max_pending_frames_) {
- ALOGE << "Output buffer dequeue timeout. Frames received: " <<
- frames_received_ << ". Frames decoded: " << frames_decoded_;
- return ProcessHWErrorOnCodecThread();
- }
+ }
+ if (frames_received_ > frames_decoded_ + max_pending_frames_) {
+ ALOGE << "Output buffer dequeue timeout. Frames received: " <<
+ frames_received_ << ". Frames decoded: " << frames_decoded_;
+ return ProcessHWErrorOnCodecThread();
}
// Get input buffer.
@@ -535,11 +552,14 @@ int32_t MediaCodecVideoDecoder::DecodeOnCodecThread(
" is bigger than buffer size " << buffer_capacity;
return ProcessHWErrorOnCodecThread();
}
- jlong timestamp_us = (frames_received_ * 1000000) / codec_.maxFramerate;
+ jlong presentation_timestamp_us =
+ (frames_received_ * 1000000) / codec_.maxFramerate;
if (frames_decoded_ < kMaxDecodedLogFrames) {
ALOGD << "Decoder frame in # " << frames_received_ << ". Type: "
<< inputImage._frameType << ". Buffer # " <<
- j_input_buffer_index << ". TS: " << (int)(timestamp_us / 1000)
+ j_input_buffer_index << ". pTS: "
+ << (int)(presentation_timestamp_us / 1000)
+ << ". TS: " << inputImage._timeStamp
<< ". Size: " << inputImage._length;
}
memcpy(buffer, inputImage._buffer, inputImage._length);
@@ -547,16 +567,16 @@ int32_t MediaCodecVideoDecoder::DecodeOnCodecThread(
// Save input image timestamps for later output.
frames_received_++;
current_bytes_ += inputImage._length;
- timestamps_.push_back(inputImage._timeStamp);
- ntp_times_ms_.push_back(inputImage.ntp_time_ms_);
- frame_rtc_times_ms_.push_back(GetCurrentTimeMs());
// Feed input to decoder.
- bool success = jni->CallBooleanMethod(*j_media_codec_video_decoder_,
- j_queue_input_buffer_method_,
- j_input_buffer_index,
- inputImage._length,
- timestamp_us);
+ bool success = jni->CallBooleanMethod(
+ *j_media_codec_video_decoder_,
+ j_queue_input_buffer_method_,
+ j_input_buffer_index,
+ inputImage._length,
+ presentation_timestamp_us,
+ static_cast<int64_t> (inputImage._timeStamp),
+ inputImage.ntp_time_ms_);
if (CheckException(jni) || !success) {
ALOGE << "queueInputBuffer error";
return ProcessHWErrorOnCodecThread();
@@ -572,16 +592,18 @@ int32_t MediaCodecVideoDecoder::DecodeOnCodecThread(
}
bool MediaCodecVideoDecoder::DeliverPendingOutputs(
- JNIEnv* jni, int dequeue_timeout_us) {
+ JNIEnv* jni, int dequeue_timeout_ms) {
if (frames_received_ <= frames_decoded_) {
// No need to query for output buffers - decoder is drained.
return true;
}
// Get decoder output.
- jobject j_decoder_output_buffer = jni->CallObjectMethod(
- *j_media_codec_video_decoder_,
- j_dequeue_output_buffer_method_,
- dequeue_timeout_us);
+ jobject j_decoder_output_buffer =
+ jni->CallObjectMethod(*j_media_codec_video_decoder_,
+ use_surface_ ? j_dequeue_texture_buffer_method_
+ : j_dequeue_byte_buffer_method_,
+ dequeue_timeout_ms);
+
if (CheckException(jni)) {
ALOGE << "dequeueOutputBuffer() error";
return false;
@@ -601,19 +623,35 @@ bool MediaCodecVideoDecoder::DeliverPendingOutputs(
j_slice_height_field_);
rtc::scoped_refptr<webrtc::VideoFrameBuffer> frame_buffer;
- long output_timestamps_ms = 0;
+ int64_t output_timestamps_ms = 0;
+ int64_t output_ntp_timestamps_ms = 0;
+ int decode_time_ms = 0;
+ int64_t frame_delayed_ms = 0;
if (use_surface_) {
// Extract data from Java DecodedTextureBuffer.
const int texture_id =
- GetIntField(jni, j_decoder_output_buffer, j_textureID_field_);
- const int64_t timestamp_us =
- GetLongField(jni, j_decoder_output_buffer,
- j_texture_presentation_timestamp_us_field_);
- output_timestamps_ms = timestamp_us / rtc::kNumMicrosecsPerMillisec;
- // Create webrtc::VideoFrameBuffer with native texture handle.
- native_handle_.SetTextureObject(surface_texture_, texture_id);
- frame_buffer = new rtc::RefCountedObject<JniNativeHandleBuffer>(
- &native_handle_, width, height);
+ GetIntField(jni, j_decoder_output_buffer, j_texture_id_field_);
+ if (texture_id != 0) { // |texture_id| == 0 represents a dropped frame.
+ const jfloatArray j_transform_matrix =
+ reinterpret_cast<jfloatArray>(GetObjectField(
+ jni, j_decoder_output_buffer, j_transform_matrix_field_));
+ const int64_t timestamp_us =
+ GetLongField(jni, j_decoder_output_buffer,
+ j_texture_timestamp_ms_field_);
+ output_timestamps_ms = GetLongField(jni, j_decoder_output_buffer,
+ j_texture_timestamp_ms_field_);
+ output_ntp_timestamps_ms =
+ GetLongField(jni, j_decoder_output_buffer,
+ j_texture_ntp_timestamp_ms_field_);
+ decode_time_ms = GetLongField(jni, j_decoder_output_buffer,
+ j_texture_decode_time_ms_field_);
+ frame_delayed_ms = GetLongField(jni, j_decoder_output_buffer,
+ j_texture_frame_delay_ms_field_);
+
+ // Create webrtc::VideoFrameBuffer with native texture handle.
+ frame_buffer = surface_texture_helper_->CreateTextureFrame(
+ width, height, NativeHandleImpl(jni, texture_id, j_transform_matrix));
+ }
} else {
// Extract data from Java ByteBuffer and create output yuv420 frame -
// for non surface decoding only.
@@ -623,9 +661,14 @@ bool MediaCodecVideoDecoder::DeliverPendingOutputs(
GetIntField(jni, j_decoder_output_buffer, j_info_offset_field_);
const int output_buffer_size =
GetIntField(jni, j_decoder_output_buffer, j_info_size_field_);
- const int64_t timestamp_us = GetLongField(
- jni, j_decoder_output_buffer, j_info_presentation_timestamp_us_field_);
- output_timestamps_ms = timestamp_us / rtc::kNumMicrosecsPerMillisec;
+ output_timestamps_ms = GetLongField(jni, j_decoder_output_buffer,
+ j_info_timestamp_ms_field_);
+ output_ntp_timestamps_ms =
+ GetLongField(jni, j_decoder_output_buffer,
+ j_info_ntp_timestamp_ms_field_);
+
+ decode_time_ms = GetLongField(jni, j_decoder_output_buffer,
+ j_byte_buffer_decode_time_ms_field_);
if (output_buffer_size < width * height * 3 / 2) {
ALOGE << "Insufficient output buffer size: " << output_buffer_size;
@@ -683,41 +726,31 @@ bool MediaCodecVideoDecoder::DeliverPendingOutputs(
j_return_decoded_byte_buffer_method_,
output_buffer_index);
if (CheckException(jni)) {
- ALOGE << "returnDecodedByteBuffer error";
+ ALOGE << "returnDecodedOutputBuffer error";
return false;
}
}
VideoFrame decoded_frame(frame_buffer, 0, 0, webrtc::kVideoRotation_0);
+ decoded_frame.set_timestamp(output_timestamps_ms);
+ decoded_frame.set_ntp_time_ms(output_ntp_timestamps_ms);
- // Get frame timestamps from a queue.
- if (timestamps_.size() > 0) {
- decoded_frame.set_timestamp(timestamps_.front());
- timestamps_.erase(timestamps_.begin());
- }
- if (ntp_times_ms_.size() > 0) {
- decoded_frame.set_ntp_time_ms(ntp_times_ms_.front());
- ntp_times_ms_.erase(ntp_times_ms_.begin());
- }
- int64_t frame_decoding_time_ms = 0;
- if (frame_rtc_times_ms_.size() > 0) {
- frame_decoding_time_ms = GetCurrentTimeMs() - frame_rtc_times_ms_.front();
- frame_rtc_times_ms_.erase(frame_rtc_times_ms_.begin());
- }
if (frames_decoded_ < kMaxDecodedLogFrames) {
ALOGD << "Decoder frame out # " << frames_decoded_ << ". " << width <<
" x " << height << ". " << stride << " x " << slice_height <<
- ". Color: " << color_format << ". TS:" << (int)output_timestamps_ms <<
- ". DecTime: " << (int)frame_decoding_time_ms;
+ ". Color: " << color_format << ". TS:" << decoded_frame.timestamp() <<
+ ". DecTime: " << (int)decode_time_ms <<
+ ". DelayTime: " << (int)frame_delayed_ms;
}
// Calculate and print decoding statistics - every 3 seconds.
frames_decoded_++;
current_frames_++;
- current_decoding_time_ms_ += frame_decoding_time_ms;
+ current_decoding_time_ms_ += decode_time_ms;
int statistic_time_ms = GetCurrentTimeMs() - start_time_ms_;
if (statistic_time_ms >= kMediaCodecStatisticsIntervalMs &&
current_frames_ > 0) {
- ALOGD << "Decoded frames: " << frames_decoded_ << ". Bitrate: " <<
+ ALOGD << "Decoded frames: " << frames_decoded_ << ". Received frames: "
+ << frames_received_ << ". Bitrate: " <<
(current_bytes_ * 8 / statistic_time_ms) << " kbps, fps: " <<
((current_frames_ * 1000 + statistic_time_ms / 2) / statistic_time_ms)
<< ". decTime: " << (current_decoding_time_ms_ / current_frames_) <<
@@ -728,12 +761,15 @@ bool MediaCodecVideoDecoder::DeliverPendingOutputs(
current_decoding_time_ms_ = 0;
}
- // Callback - output decoded frame.
- const int32_t callback_status = callback_->Decoded(decoded_frame);
- if (callback_status > 0) {
- ALOGE << "callback error";
+ // |.IsZeroSize())| returns true when a frame has been dropped.
+ if (!decoded_frame.IsZeroSize()) {
+ // Callback - output decoded frame.
+ const int32_t callback_status =
+ callback_->Decoded(decoded_frame, decode_time_ms);
+ if (callback_status > 0) {
+ ALOGE << "callback error";
+ }
}
-
return true;
}
@@ -790,6 +826,17 @@ MediaCodecVideoDecoderFactory::MediaCodecVideoDecoderFactory() :
supported_codec_types_.push_back(kVideoCodecVP8);
}
+ bool is_vp9_hw_supported = jni->CallStaticBooleanMethod(
+ j_decoder_class,
+ GetStaticMethodID(jni, j_decoder_class, "isVp9HwSupported", "()Z"));
+ if (CheckException(jni)) {
+ is_vp9_hw_supported = false;
+ }
+ if (is_vp9_hw_supported) {
+ ALOGD << "VP9 HW Decoder supported.";
+ supported_codec_types_.push_back(kVideoCodecVP9);
+ }
+
bool is_h264_hw_supported = jni->CallStaticBooleanMethod(
j_decoder_class,
GetStaticMethodID(jni, j_decoder_class, "isH264HwSupported", "()Z"));
@@ -825,7 +872,7 @@ void MediaCodecVideoDecoderFactory::SetEGLContext(
render_egl_context_ = NULL;
} else {
jclass j_egl_context_class =
- FindClass(jni, "javax/microedition/khronos/egl/EGLContext");
+ FindClass(jni, "org/webrtc/EglBase$Context");
if (!jni->IsInstanceOf(render_egl_context_, j_egl_context_class)) {
ALOGE << "Wrong EGL Context.";
jni->DeleteGlobalRef(render_egl_context_);
@@ -841,7 +888,7 @@ void MediaCodecVideoDecoderFactory::SetEGLContext(
webrtc::VideoDecoder* MediaCodecVideoDecoderFactory::CreateVideoDecoder(
VideoCodecType type) {
if (supported_codec_types_.empty()) {
- ALOGE << "No HW video decoder for type " << (int)type;
+ ALOGW << "No HW video decoder for type " << (int)type;
return NULL;
}
for (VideoCodecType codec_type : supported_codec_types_) {
@@ -851,7 +898,7 @@ webrtc::VideoDecoder* MediaCodecVideoDecoderFactory::CreateVideoDecoder(
AttachCurrentThreadIfNeeded(), type, render_egl_context_);
}
}
- ALOGE << "Can not find HW video decoder for type " << (int)type;
+ ALOGW << "Can not find HW video decoder for type " << (int)type;
return NULL;
}
@@ -861,5 +908,9 @@ void MediaCodecVideoDecoderFactory::DestroyVideoDecoder(
delete decoder;
}
+const char* MediaCodecVideoDecoder::ImplementationName() const {
+ return "MediaCodec";
+}
+
} // namespace webrtc_jni
diff --git a/talk/app/webrtc/java/jni/androidmediaencoder_jni.cc b/talk/app/webrtc/java/jni/androidmediaencoder_jni.cc
index ac349e7faf..64831c3174 100644
--- a/talk/app/webrtc/java/jni/androidmediaencoder_jni.cc
+++ b/talk/app/webrtc/java/jni/androidmediaencoder_jni.cc
@@ -29,14 +29,16 @@
#include "talk/app/webrtc/java/jni/androidmediaencoder_jni.h"
#include "talk/app/webrtc/java/jni/classreferenceholder.h"
#include "talk/app/webrtc/java/jni/androidmediacodeccommon.h"
+#include "talk/app/webrtc/java/jni/native_handle_impl.h"
#include "webrtc/base/bind.h"
#include "webrtc/base/checks.h"
#include "webrtc/base/logging.h"
#include "webrtc/base/thread.h"
+#include "webrtc/base/thread_checker.h"
#include "webrtc/modules/rtp_rtcp/source/h264_bitstream_parser.h"
-#include "webrtc/modules/video_coding/codecs/interface/video_codec_interface.h"
-#include "webrtc/modules/video_coding/utility/include/quality_scaler.h"
-#include "webrtc/modules/video_coding/utility/include/vp8_header_parser.h"
+#include "webrtc/modules/video_coding/include/video_codec_interface.h"
+#include "webrtc/modules/video_coding/utility/quality_scaler.h"
+#include "webrtc/modules/video_coding/utility/vp8_header_parser.h"
#include "webrtc/system_wrappers/include/field_trial.h"
#include "webrtc/system_wrappers/include/logcat_trace_context.h"
#include "third_party/libyuv/include/libyuv/convert.h"
@@ -56,6 +58,7 @@ using webrtc::VideoCodec;
using webrtc::VideoCodecType;
using webrtc::kVideoCodecH264;
using webrtc::kVideoCodecVP8;
+using webrtc::kVideoCodecVP9;
namespace webrtc_jni {
@@ -79,7 +82,9 @@ class MediaCodecVideoEncoder : public webrtc::VideoEncoder,
public rtc::MessageHandler {
public:
virtual ~MediaCodecVideoEncoder();
- explicit MediaCodecVideoEncoder(JNIEnv* jni, VideoCodecType codecType);
+ MediaCodecVideoEncoder(JNIEnv* jni,
+ VideoCodecType codecType,
+ jobject egl_context);
// webrtc::VideoEncoder implementation. Everything trampolines to
// |codec_thread_| for execution.
@@ -103,13 +108,18 @@ class MediaCodecVideoEncoder : public webrtc::VideoEncoder,
int GetTargetFramerate() override;
+ bool SupportsNativeHandle() const override { return true; }
+ const char* ImplementationName() const override;
+
private:
// CHECK-fail if not running on |codec_thread_|.
void CheckOnCodecThread();
- // Release() and InitEncode() in an attempt to restore the codec to an
+ private:
+ // ResetCodecOnCodecThread() calls ReleaseOnCodecThread() and
+ // InitEncodeOnCodecThread() in an attempt to restore the codec to an
// operable state. Necessary after all manner of OMX-layer errors.
- void ResetCodec();
+ bool ResetCodecOnCodecThread();
// Implementation of webrtc::VideoEncoder methods above, all running on the
// codec thread exclusively.
@@ -117,10 +127,20 @@ class MediaCodecVideoEncoder : public webrtc::VideoEncoder,
// If width==0 then this is assumed to be a re-initialization and the
// previously-current values are reused instead of the passed parameters
// (makes it easier to reason about thread-safety).
- int32_t InitEncodeOnCodecThread(int width, int height, int kbps, int fps);
+ int32_t InitEncodeOnCodecThread(int width, int height, int kbps, int fps,
+ bool use_surface);
+ // Reconfigure to match |frame| in width, height. Also reconfigures the
+ // encoder if |frame| is a texture/byte buffer and the encoder is initialized
+ // for byte buffer/texture. Returns false if reconfiguring fails.
+ bool MaybeReconfigureEncoderOnCodecThread(const webrtc::VideoFrame& frame);
int32_t EncodeOnCodecThread(
const webrtc::VideoFrame& input_image,
const std::vector<webrtc::FrameType>* frame_types);
+ bool EncodeByteBufferOnCodecThread(JNIEnv* jni,
+ bool key_frame, const webrtc::VideoFrame& frame, int input_buffer_index);
+ bool EncodeTextureOnCodecThread(JNIEnv* jni,
+ bool key_frame, const webrtc::VideoFrame& frame);
+
int32_t RegisterEncodeCompleteCallbackOnCodecThread(
webrtc::EncodedImageCallback* callback);
int32_t ReleaseOnCodecThread();
@@ -150,11 +170,14 @@ class MediaCodecVideoEncoder : public webrtc::VideoEncoder,
// State that is constant for the lifetime of this object once the ctor
// returns.
scoped_ptr<Thread> codec_thread_; // Thread on which to operate MediaCodec.
+ rtc::ThreadChecker codec_thread_checker_;
ScopedGlobalRef<jclass> j_media_codec_video_encoder_class_;
ScopedGlobalRef<jobject> j_media_codec_video_encoder_;
jmethodID j_init_encode_method_;
+ jmethodID j_get_input_buffers_method_;
jmethodID j_dequeue_input_buffer_method_;
- jmethodID j_encode_method_;
+ jmethodID j_encode_buffer_method_;
+ jmethodID j_encode_texture_method_;
jmethodID j_release_method_;
jmethodID j_set_rates_method_;
jmethodID j_dequeue_output_buffer_method_;
@@ -170,6 +193,7 @@ class MediaCodecVideoEncoder : public webrtc::VideoEncoder,
int width_; // Frame width in pixels.
int height_; // Frame height in pixels.
bool inited_;
+ bool use_surface_;
uint16_t picture_id_;
enum libyuv::FourCC encoder_fourcc_; // Encoder color space format.
int last_set_bitrate_kbps_; // Last-requested bitrate in kbps.
@@ -205,6 +229,16 @@ class MediaCodecVideoEncoder : public webrtc::VideoEncoder,
// H264 bitstream parser, used to extract QP from encoded bitstreams.
webrtc::H264BitstreamParser h264_bitstream_parser_;
+
+ // VP9 variables to populate codec specific structure.
+ webrtc::GofInfoVP9 gof_; // Contains each frame's temporal information for
+ // non-flexible VP9 mode.
+ uint8_t tl0_pic_idx_;
+ size_t gof_idx_;
+
+ // EGL context - owned by factory, should not be allocated/destroyed
+ // by MediaCodecVideoEncoder.
+ jobject egl_context_;
};
MediaCodecVideoEncoder::~MediaCodecVideoEncoder() {
@@ -213,11 +247,9 @@ MediaCodecVideoEncoder::~MediaCodecVideoEncoder() {
}
MediaCodecVideoEncoder::MediaCodecVideoEncoder(
- JNIEnv* jni, VideoCodecType codecType) :
+ JNIEnv* jni, VideoCodecType codecType, jobject egl_context) :
codecType_(codecType),
callback_(NULL),
- inited_(false),
- picture_id_(0),
codec_thread_(new Thread()),
j_media_codec_video_encoder_class_(
jni,
@@ -228,7 +260,11 @@ MediaCodecVideoEncoder::MediaCodecVideoEncoder(
GetMethodID(jni,
*j_media_codec_video_encoder_class_,
"<init>",
- "()V"))) {
+ "()V"))),
+ inited_(false),
+ use_surface_(false),
+ picture_id_(0),
+ egl_context_(egl_context) {
ScopedLocalRefFrame local_ref_frame(jni);
// It would be nice to avoid spinning up a new thread per MediaCodec, and
// instead re-use e.g. the PeerConnectionFactory's |worker_thread_|, but bug
@@ -239,19 +275,27 @@ MediaCodecVideoEncoder::MediaCodecVideoEncoder(
// thread.
codec_thread_->SetName("MediaCodecVideoEncoder", NULL);
RTC_CHECK(codec_thread_->Start()) << "Failed to start MediaCodecVideoEncoder";
-
+ codec_thread_checker_.DetachFromThread();
jclass j_output_buffer_info_class =
FindClass(jni, "org/webrtc/MediaCodecVideoEncoder$OutputBufferInfo");
j_init_encode_method_ = GetMethodID(
jni,
*j_media_codec_video_encoder_class_,
"initEncode",
- "(Lorg/webrtc/MediaCodecVideoEncoder$VideoCodecType;IIII)"
- "[Ljava/nio/ByteBuffer;");
+ "(Lorg/webrtc/MediaCodecVideoEncoder$VideoCodecType;"
+ "IIIILorg/webrtc/EglBase14$Context;)Z");
+ j_get_input_buffers_method_ = GetMethodID(
+ jni,
+ *j_media_codec_video_encoder_class_,
+ "getInputBuffers",
+ "()[Ljava/nio/ByteBuffer;");
j_dequeue_input_buffer_method_ = GetMethodID(
jni, *j_media_codec_video_encoder_class_, "dequeueInputBuffer", "()I");
- j_encode_method_ = GetMethodID(
- jni, *j_media_codec_video_encoder_class_, "encode", "(ZIIJ)Z");
+ j_encode_buffer_method_ = GetMethodID(
+ jni, *j_media_codec_video_encoder_class_, "encodeBuffer", "(ZIIJ)Z");
+ j_encode_texture_method_ = GetMethodID(
+ jni, *j_media_codec_video_encoder_class_, "encodeTexture",
+ "(ZI[FJ)Z");
j_release_method_ =
GetMethodID(jni, *j_media_codec_video_encoder_class_, "release", "()V");
j_set_rates_method_ = GetMethodID(
@@ -275,6 +319,7 @@ MediaCodecVideoEncoder::MediaCodecVideoEncoder(
j_info_presentation_timestamp_us_field_ = GetFieldID(
jni, j_output_buffer_info_class, "presentationTimestampUs", "J");
CHECK_EXCEPTION(jni) << "MediaCodecVideoEncoder ctor failed";
+ srand(time(NULL));
AllowBlockingCalls();
}
@@ -295,8 +340,8 @@ int32_t MediaCodecVideoEncoder::InitEncode(
<< codecType_;
ALOGD << "InitEncode request";
- scale_ = webrtc::field_trial::FindFullName(
- "WebRTC-MediaCodecVideoEncoder-AutomaticResize") == "Enabled";
+ scale_ = (codecType_ != kVideoCodecVP9) && (webrtc::field_trial::FindFullName(
+ "WebRTC-MediaCodecVideoEncoder-AutomaticResize") == "Enabled");
ALOGD << "Encoder automatic resize " << (scale_ ? "enabled" : "disabled");
if (scale_) {
if (codecType_ == kVideoCodecVP8) {
@@ -331,7 +376,8 @@ int32_t MediaCodecVideoEncoder::InitEncode(
codec_settings->width,
codec_settings->height,
codec_settings->startBitrate,
- codec_settings->maxFramerate));
+ codec_settings->maxFramerate,
+ false /* use_surface */));
}
int32_t MediaCodecVideoEncoder::Encode(
@@ -374,6 +420,7 @@ int32_t MediaCodecVideoEncoder::SetRates(uint32_t new_bit_rate,
}
void MediaCodecVideoEncoder::OnMessage(rtc::Message* msg) {
+ RTC_DCHECK(codec_thread_checker_.CalledOnValidThread());
JNIEnv* jni = AttachCurrentThreadIfNeeded();
ScopedLocalRefFrame local_ref_frame(jni);
@@ -381,7 +428,6 @@ void MediaCodecVideoEncoder::OnMessage(rtc::Message* msg) {
// functor), so expect no ID/data.
RTC_CHECK(!msg->message_id) << "Unexpected message!";
RTC_CHECK(!msg->pdata) << "Unexpected message!";
- CheckOnCodecThread();
if (!inited_) {
return;
}
@@ -393,26 +439,24 @@ void MediaCodecVideoEncoder::OnMessage(rtc::Message* msg) {
codec_thread_->PostDelayed(kMediaCodecPollMs, this);
}
-void MediaCodecVideoEncoder::CheckOnCodecThread() {
- RTC_CHECK(codec_thread_ == ThreadManager::Instance()->CurrentThread())
- << "Running on wrong thread!";
-}
-
-void MediaCodecVideoEncoder::ResetCodec() {
- ALOGE << "ResetCodec";
- if (Release() != WEBRTC_VIDEO_CODEC_OK ||
- codec_thread_->Invoke<int32_t>(Bind(
- &MediaCodecVideoEncoder::InitEncodeOnCodecThread, this,
- width_, height_, 0, 0)) != WEBRTC_VIDEO_CODEC_OK) {
+bool MediaCodecVideoEncoder::ResetCodecOnCodecThread() {
+ RTC_DCHECK(codec_thread_checker_.CalledOnValidThread());
+ ALOGE << "ResetOnCodecThread";
+ if (ReleaseOnCodecThread() != WEBRTC_VIDEO_CODEC_OK ||
+ InitEncodeOnCodecThread(width_, height_, 0, 0, false) !=
+ WEBRTC_VIDEO_CODEC_OK) {
// TODO(fischman): wouldn't it be nice if there was a way to gracefully
// degrade to a SW encoder at this point? There isn't one AFAICT :(
// https://code.google.com/p/webrtc/issues/detail?id=2920
+ return false;
}
+ return true;
}
int32_t MediaCodecVideoEncoder::InitEncodeOnCodecThread(
- int width, int height, int kbps, int fps) {
- CheckOnCodecThread();
+ int width, int height, int kbps, int fps, bool use_surface) {
+ RTC_DCHECK(codec_thread_checker_.CalledOnValidThread());
+ RTC_CHECK(!use_surface || egl_context_ != nullptr) << "EGL context not set.";
JNIEnv* jni = AttachCurrentThreadIfNeeded();
ScopedLocalRefFrame local_ref_frame(jni);
@@ -448,52 +492,63 @@ int32_t MediaCodecVideoEncoder::InitEncodeOnCodecThread(
render_times_ms_.clear();
frame_rtc_times_ms_.clear();
drop_next_input_frame_ = false;
+ use_surface_ = use_surface;
picture_id_ = static_cast<uint16_t>(rand()) & 0x7FFF;
+ gof_.SetGofInfoVP9(webrtc::TemporalStructureMode::kTemporalStructureMode1);
+ tl0_pic_idx_ = static_cast<uint8_t>(rand());
+ gof_idx_ = 0;
+
// We enforce no extra stride/padding in the format creation step.
jobject j_video_codec_enum = JavaEnumFromIndex(
jni, "MediaCodecVideoEncoder$VideoCodecType", codecType_);
- jobjectArray input_buffers = reinterpret_cast<jobjectArray>(
- jni->CallObjectMethod(*j_media_codec_video_encoder_,
- j_init_encode_method_,
- j_video_codec_enum,
- width_,
- height_,
- kbps,
- fps));
- CHECK_EXCEPTION(jni);
- if (IsNull(jni, input_buffers)) {
+ const bool encode_status = jni->CallBooleanMethod(
+ *j_media_codec_video_encoder_, j_init_encode_method_,
+ j_video_codec_enum, width, height, kbps, fps,
+ (use_surface ? egl_context_ : nullptr));
+ if (!encode_status) {
+ ALOGE << "Failed to configure encoder.";
return WEBRTC_VIDEO_CODEC_ERROR;
}
+ CHECK_EXCEPTION(jni);
- inited_ = true;
- switch (GetIntField(jni, *j_media_codec_video_encoder_,
- j_color_format_field_)) {
- case COLOR_FormatYUV420Planar:
- encoder_fourcc_ = libyuv::FOURCC_YU12;
- break;
- case COLOR_FormatYUV420SemiPlanar:
- case COLOR_QCOM_FormatYUV420SemiPlanar:
- case COLOR_QCOM_FORMATYUV420PackedSemiPlanar32m:
- encoder_fourcc_ = libyuv::FOURCC_NV12;
- break;
- default:
- LOG(LS_ERROR) << "Wrong color format.";
- return WEBRTC_VIDEO_CODEC_ERROR;
- }
- size_t num_input_buffers = jni->GetArrayLength(input_buffers);
- RTC_CHECK(input_buffers_.empty())
- << "Unexpected double InitEncode without Release";
- input_buffers_.resize(num_input_buffers);
- for (size_t i = 0; i < num_input_buffers; ++i) {
- input_buffers_[i] =
- jni->NewGlobalRef(jni->GetObjectArrayElement(input_buffers, i));
- int64_t yuv_buffer_capacity =
- jni->GetDirectBufferCapacity(input_buffers_[i]);
+ if (!use_surface) {
+ jobjectArray input_buffers = reinterpret_cast<jobjectArray>(
+ jni->CallObjectMethod(*j_media_codec_video_encoder_,
+ j_get_input_buffers_method_));
CHECK_EXCEPTION(jni);
- RTC_CHECK(yuv_buffer_capacity >= yuv_size_) << "Insufficient capacity";
+ if (IsNull(jni, input_buffers)) {
+ return WEBRTC_VIDEO_CODEC_ERROR;
+ }
+
+ switch (GetIntField(jni, *j_media_codec_video_encoder_,
+ j_color_format_field_)) {
+ case COLOR_FormatYUV420Planar:
+ encoder_fourcc_ = libyuv::FOURCC_YU12;
+ break;
+ case COLOR_FormatYUV420SemiPlanar:
+ case COLOR_QCOM_FormatYUV420SemiPlanar:
+ case COLOR_QCOM_FORMATYUV420PackedSemiPlanar32m:
+ encoder_fourcc_ = libyuv::FOURCC_NV12;
+ break;
+ default:
+ LOG(LS_ERROR) << "Wrong color format.";
+ return WEBRTC_VIDEO_CODEC_ERROR;
+ }
+ size_t num_input_buffers = jni->GetArrayLength(input_buffers);
+ RTC_CHECK(input_buffers_.empty())
+ << "Unexpected double InitEncode without Release";
+ input_buffers_.resize(num_input_buffers);
+ for (size_t i = 0; i < num_input_buffers; ++i) {
+ input_buffers_[i] =
+ jni->NewGlobalRef(jni->GetObjectArrayElement(input_buffers, i));
+ int64_t yuv_buffer_capacity =
+ jni->GetDirectBufferCapacity(input_buffers_[i]);
+ CHECK_EXCEPTION(jni);
+ RTC_CHECK(yuv_buffer_capacity >= yuv_size_) << "Insufficient capacity";
+ }
}
- CHECK_EXCEPTION(jni);
+ inited_ = true;
codec_thread_->PostDelayed(kMediaCodecPollMs, this);
return WEBRTC_VIDEO_CODEC_OK;
}
@@ -501,40 +556,53 @@ int32_t MediaCodecVideoEncoder::InitEncodeOnCodecThread(
int32_t MediaCodecVideoEncoder::EncodeOnCodecThread(
const webrtc::VideoFrame& frame,
const std::vector<webrtc::FrameType>* frame_types) {
- CheckOnCodecThread();
+ RTC_DCHECK(codec_thread_checker_.CalledOnValidThread());
JNIEnv* jni = AttachCurrentThreadIfNeeded();
ScopedLocalRefFrame local_ref_frame(jni);
if (!inited_) {
return WEBRTC_VIDEO_CODEC_UNINITIALIZED;
}
+
frames_received_++;
if (!DeliverPendingOutputs(jni)) {
- ResetCodec();
- // Continue as if everything's fine.
+ if (!ResetCodecOnCodecThread())
+ return WEBRTC_VIDEO_CODEC_ERROR;
}
if (drop_next_input_frame_) {
- ALOGV("Encoder drop frame - failed callback.");
+ ALOGW << "Encoder drop frame - failed callback.";
drop_next_input_frame_ = false;
return WEBRTC_VIDEO_CODEC_OK;
}
RTC_CHECK(frame_types->size() == 1) << "Unexpected stream count";
- // Check framerate before spatial resolution change.
- if (scale_)
- quality_scaler_.OnEncodeFrame(frame);
- const VideoFrame& input_frame =
- scale_ ? quality_scaler_.GetScaledFrame(frame) : frame;
+ VideoFrame input_frame = frame;
+ if (scale_) {
+ // Check framerate before spatial resolution change.
+ quality_scaler_.OnEncodeFrame(frame);
+ const webrtc::QualityScaler::Resolution scaled_resolution =
+ quality_scaler_.GetScaledResolution();
+ if (scaled_resolution.width != frame.width() ||
+ scaled_resolution.height != frame.height()) {
+ if (frame.native_handle() != nullptr) {
+ rtc::scoped_refptr<webrtc::VideoFrameBuffer> scaled_buffer(
+ static_cast<AndroidTextureBuffer*>(
+ frame.video_frame_buffer().get())->ScaleAndRotate(
+ scaled_resolution.width,
+ scaled_resolution.height,
+ webrtc::kVideoRotation_0));
+ input_frame.set_video_frame_buffer(scaled_buffer);
+ } else {
+ input_frame = quality_scaler_.GetScaledFrame(frame);
+ }
+ }
+ }
- if (input_frame.width() != width_ || input_frame.height() != height_) {
- ALOGD << "Frame resolution change from " << width_ << " x " << height_ <<
- " to " << input_frame.width() << " x " << input_frame.height();
- width_ = input_frame.width();
- height_ = input_frame.height();
- ResetCodec();
- return WEBRTC_VIDEO_CODEC_OK;
+ if (!MaybeReconfigureEncoderOnCodecThread(input_frame)) {
+ ALOGE << "Failed to reconfigure encoder.";
+ return WEBRTC_VIDEO_CODEC_ERROR;
}
// Check if we accumulated too many frames in encoder input buffers
@@ -552,65 +620,138 @@ int32_t MediaCodecVideoEncoder::EncodeOnCodecThread(
}
}
- int j_input_buffer_index = jni->CallIntMethod(*j_media_codec_video_encoder_,
- j_dequeue_input_buffer_method_);
- CHECK_EXCEPTION(jni);
- if (j_input_buffer_index == -1) {
- // Video codec falls behind - no input buffer available.
- ALOGV("Encoder drop frame - no input buffers available");
- frames_dropped_++;
- // Report dropped frame to quality_scaler_.
- OnDroppedFrame();
- return WEBRTC_VIDEO_CODEC_OK; // TODO(fischman): see webrtc bug 2887.
- }
- if (j_input_buffer_index == -2) {
- ResetCodec();
+ const bool key_frame = frame_types->front() != webrtc::kVideoFrameDelta;
+ bool encode_status = true;
+ if (!input_frame.native_handle()) {
+ int j_input_buffer_index = jni->CallIntMethod(*j_media_codec_video_encoder_,
+ j_dequeue_input_buffer_method_);
+ CHECK_EXCEPTION(jni);
+ if (j_input_buffer_index == -1) {
+ // Video codec falls behind - no input buffer available.
+ ALOGW << "Encoder drop frame - no input buffers available";
+ frames_dropped_++;
+ // Report dropped frame to quality_scaler_.
+ OnDroppedFrame();
+ return WEBRTC_VIDEO_CODEC_OK; // TODO(fischman): see webrtc bug 2887.
+ }
+ if (j_input_buffer_index == -2) {
+ ResetCodecOnCodecThread();
+ return WEBRTC_VIDEO_CODEC_ERROR;
+ }
+ encode_status = EncodeByteBufferOnCodecThread(jni, key_frame, input_frame,
+ j_input_buffer_index);
+ } else {
+ encode_status = EncodeTextureOnCodecThread(jni, key_frame, input_frame);
+ }
+
+ if (!encode_status) {
+ ALOGE << "Failed encode frame with timestamp: " << input_frame.timestamp();
+ ResetCodecOnCodecThread();
return WEBRTC_VIDEO_CODEC_ERROR;
}
+ last_input_timestamp_ms_ =
+ current_timestamp_us_ / rtc::kNumMicrosecsPerMillisec;
+ frames_in_queue_++;
+
+ // Save input image timestamps for later output
+ timestamps_.push_back(input_frame.timestamp());
+ render_times_ms_.push_back(input_frame.render_time_ms());
+ frame_rtc_times_ms_.push_back(GetCurrentTimeMs());
+ current_timestamp_us_ += rtc::kNumMicrosecsPerSec / last_set_fps_;
+
+ if (!DeliverPendingOutputs(jni)) {
+ ALOGE << "Failed deliver pending outputs.";
+ ResetCodecOnCodecThread();
+ return WEBRTC_VIDEO_CODEC_ERROR;
+ }
+ return WEBRTC_VIDEO_CODEC_OK;
+}
+
+bool MediaCodecVideoEncoder::MaybeReconfigureEncoderOnCodecThread(
+ const webrtc::VideoFrame& frame) {
+ RTC_DCHECK(codec_thread_checker_.CalledOnValidThread());
+
+ const bool is_texture_frame = frame.native_handle() != nullptr;
+ const bool reconfigure_due_to_format = is_texture_frame != use_surface_;
+ const bool reconfigure_due_to_size =
+ frame.width() != width_ || frame.height() != height_;
+
+ if (reconfigure_due_to_format) {
+ ALOGD << "Reconfigure encoder due to format change. "
+ << (use_surface_ ?
+ "Reconfiguring to encode from byte buffer." :
+ "Reconfiguring to encode from texture.");
+ }
+ if (reconfigure_due_to_size) {
+ ALOGD << "Reconfigure encoder due to frame resolution change from "
+ << width_ << " x " << height_ << " to " << frame.width() << " x "
+ << frame.height();
+ width_ = frame.width();
+ height_ = frame.height();
+ }
+
+ if (!reconfigure_due_to_format && !reconfigure_due_to_size)
+ return true;
+
+ ReleaseOnCodecThread();
+
+ return InitEncodeOnCodecThread(width_, height_, 0, 0 , is_texture_frame) ==
+ WEBRTC_VIDEO_CODEC_OK;
+}
+
+bool MediaCodecVideoEncoder::EncodeByteBufferOnCodecThread(JNIEnv* jni,
+ bool key_frame, const webrtc::VideoFrame& frame, int input_buffer_index) {
+ RTC_DCHECK(codec_thread_checker_.CalledOnValidThread());
+ RTC_CHECK(!use_surface_);
+
ALOGV("Encoder frame in # %d. TS: %lld. Q: %d",
frames_received_ - 1, current_timestamp_us_ / 1000, frames_in_queue_);
- jobject j_input_buffer = input_buffers_[j_input_buffer_index];
+ jobject j_input_buffer = input_buffers_[input_buffer_index];
uint8_t* yuv_buffer =
reinterpret_cast<uint8_t*>(jni->GetDirectBufferAddress(j_input_buffer));
CHECK_EXCEPTION(jni);
RTC_CHECK(yuv_buffer) << "Indirect buffer??";
RTC_CHECK(!libyuv::ConvertFromI420(
- input_frame.buffer(webrtc::kYPlane), input_frame.stride(webrtc::kYPlane),
- input_frame.buffer(webrtc::kUPlane), input_frame.stride(webrtc::kUPlane),
- input_frame.buffer(webrtc::kVPlane), input_frame.stride(webrtc::kVPlane),
+ frame.buffer(webrtc::kYPlane), frame.stride(webrtc::kYPlane),
+ frame.buffer(webrtc::kUPlane), frame.stride(webrtc::kUPlane),
+ frame.buffer(webrtc::kVPlane), frame.stride(webrtc::kVPlane),
yuv_buffer, width_, width_, height_, encoder_fourcc_))
<< "ConvertFromI420 failed";
- last_input_timestamp_ms_ = current_timestamp_us_ / 1000;
- frames_in_queue_++;
- // Save input image timestamps for later output
- timestamps_.push_back(input_frame.timestamp());
- render_times_ms_.push_back(input_frame.render_time_ms());
- frame_rtc_times_ms_.push_back(GetCurrentTimeMs());
-
- bool key_frame = frame_types->front() != webrtc::kVideoFrameDelta;
bool encode_status = jni->CallBooleanMethod(*j_media_codec_video_encoder_,
- j_encode_method_,
+ j_encode_buffer_method_,
key_frame,
- j_input_buffer_index,
+ input_buffer_index,
yuv_size_,
current_timestamp_us_);
CHECK_EXCEPTION(jni);
- current_timestamp_us_ += 1000000 / last_set_fps_;
+ return encode_status;
+}
- if (!encode_status || !DeliverPendingOutputs(jni)) {
- ResetCodec();
- return WEBRTC_VIDEO_CODEC_ERROR;
- }
+bool MediaCodecVideoEncoder::EncodeTextureOnCodecThread(JNIEnv* jni,
+ bool key_frame, const webrtc::VideoFrame& frame) {
+ RTC_DCHECK(codec_thread_checker_.CalledOnValidThread());
+ RTC_CHECK(use_surface_);
+ NativeHandleImpl* handle =
+ static_cast<NativeHandleImpl*>(frame.native_handle());
+ jfloatArray sampling_matrix = jni->NewFloatArray(16);
+ jni->SetFloatArrayRegion(sampling_matrix, 0, 16, handle->sampling_matrix);
- return WEBRTC_VIDEO_CODEC_OK;
+ bool encode_status = jni->CallBooleanMethod(*j_media_codec_video_encoder_,
+ j_encode_texture_method_,
+ key_frame,
+ handle->oes_texture_id,
+ sampling_matrix,
+ current_timestamp_us_);
+ CHECK_EXCEPTION(jni);
+ return encode_status;
}
int32_t MediaCodecVideoEncoder::RegisterEncodeCompleteCallbackOnCodecThread(
webrtc::EncodedImageCallback* callback) {
- CheckOnCodecThread();
+ RTC_DCHECK(codec_thread_checker_.CalledOnValidThread());
JNIEnv* jni = AttachCurrentThreadIfNeeded();
ScopedLocalRefFrame local_ref_frame(jni);
callback_ = callback;
@@ -618,10 +759,10 @@ int32_t MediaCodecVideoEncoder::RegisterEncodeCompleteCallbackOnCodecThread(
}
int32_t MediaCodecVideoEncoder::ReleaseOnCodecThread() {
+ RTC_DCHECK(codec_thread_checker_.CalledOnValidThread());
if (!inited_) {
return WEBRTC_VIDEO_CODEC_OK;
}
- CheckOnCodecThread();
JNIEnv* jni = AttachCurrentThreadIfNeeded();
ALOGD << "EncoderReleaseOnCodecThread: Frames received: " <<
frames_received_ << ". Encoded: " << frames_encoded_ <<
@@ -634,13 +775,14 @@ int32_t MediaCodecVideoEncoder::ReleaseOnCodecThread() {
CHECK_EXCEPTION(jni);
rtc::MessageQueueManager::Clear(this);
inited_ = false;
+ use_surface_ = false;
ALOGD << "EncoderReleaseOnCodecThread done.";
return WEBRTC_VIDEO_CODEC_OK;
}
int32_t MediaCodecVideoEncoder::SetRatesOnCodecThread(uint32_t new_bit_rate,
uint32_t frame_rate) {
- CheckOnCodecThread();
+ RTC_DCHECK(codec_thread_checker_.CalledOnValidThread());
if (last_set_bitrate_kbps_ == new_bit_rate &&
last_set_fps_ == frame_rate) {
return WEBRTC_VIDEO_CODEC_OK;
@@ -659,7 +801,7 @@ int32_t MediaCodecVideoEncoder::SetRatesOnCodecThread(uint32_t new_bit_rate,
last_set_fps_);
CHECK_EXCEPTION(jni);
if (!ret) {
- ResetCodec();
+ ResetCodecOnCodecThread();
return WEBRTC_VIDEO_CODEC_ERROR;
}
return WEBRTC_VIDEO_CODEC_OK;
@@ -691,6 +833,7 @@ jlong MediaCodecVideoEncoder::GetOutputBufferInfoPresentationTimestampUs(
}
bool MediaCodecVideoEncoder::DeliverPendingOutputs(JNIEnv* jni) {
+ RTC_DCHECK(codec_thread_checker_.CalledOnValidThread());
while (true) {
jobject j_output_buffer_info = jni->CallObjectMethod(
*j_media_codec_video_encoder_, j_dequeue_output_buffer_method_);
@@ -702,7 +845,7 @@ bool MediaCodecVideoEncoder::DeliverPendingOutputs(JNIEnv* jni) {
int output_buffer_index =
GetOutputBufferInfoIndex(jni, j_output_buffer_info);
if (output_buffer_index == -1) {
- ResetCodec();
+ ResetCodecOnCodecThread();
return false;
}
@@ -786,19 +929,42 @@ bool MediaCodecVideoEncoder::DeliverPendingOutputs(JNIEnv* jni) {
info.codecSpecific.VP8.layerSync = false;
info.codecSpecific.VP8.tl0PicIdx = webrtc::kNoTl0PicIdx;
info.codecSpecific.VP8.keyIdx = webrtc::kNoKeyIdx;
- picture_id_ = (picture_id_ + 1) & 0x7FFF;
+ } else if (codecType_ == kVideoCodecVP9) {
+ if (key_frame) {
+ gof_idx_ = 0;
+ }
+ info.codecSpecific.VP9.picture_id = picture_id_;
+ info.codecSpecific.VP9.inter_pic_predicted = key_frame ? false : true;
+ info.codecSpecific.VP9.flexible_mode = false;
+ info.codecSpecific.VP9.ss_data_available = key_frame ? true : false;
+ info.codecSpecific.VP9.tl0_pic_idx = tl0_pic_idx_++;
+ info.codecSpecific.VP9.temporal_idx = webrtc::kNoTemporalIdx;
+ info.codecSpecific.VP9.spatial_idx = webrtc::kNoSpatialIdx;
+ info.codecSpecific.VP9.temporal_up_switch = true;
+ info.codecSpecific.VP9.inter_layer_predicted = false;
+ info.codecSpecific.VP9.gof_idx =
+ static_cast<uint8_t>(gof_idx_++ % gof_.num_frames_in_gof);
+ info.codecSpecific.VP9.num_spatial_layers = 1;
+ info.codecSpecific.VP9.spatial_layer_resolution_present = false;
+ if (info.codecSpecific.VP9.ss_data_available) {
+ info.codecSpecific.VP9.spatial_layer_resolution_present = true;
+ info.codecSpecific.VP9.width[0] = width_;
+ info.codecSpecific.VP9.height[0] = height_;
+ info.codecSpecific.VP9.gof.CopyGofInfoVP9(gof_);
+ }
}
+ picture_id_ = (picture_id_ + 1) & 0x7FFF;
// Generate a header describing a single fragment.
webrtc::RTPFragmentationHeader header;
memset(&header, 0, sizeof(header));
- if (codecType_ == kVideoCodecVP8) {
+ if (codecType_ == kVideoCodecVP8 || codecType_ == kVideoCodecVP9) {
header.VerifyAndAllocateFragmentationHeader(1);
header.fragmentationOffset[0] = 0;
header.fragmentationLength[0] = image->_length;
header.fragmentationPlType[0] = 0;
header.fragmentationTimeDiff[0] = 0;
- if (scale_) {
+ if (codecType_ == kVideoCodecVP8 && scale_) {
int qp;
if (webrtc::vp8::GetQp(payload, payload_size, &qp))
quality_scaler_.ReportQP(qp);
@@ -829,7 +995,7 @@ bool MediaCodecVideoEncoder::DeliverPendingOutputs(JNIEnv* jni) {
ALOGE << "Data:" << image->_buffer[0] << " " << image->_buffer[1]
<< " " << image->_buffer[2] << " " << image->_buffer[3]
<< " " << image->_buffer[4] << " " << image->_buffer[5];
- ResetCodec();
+ ResetCodecOnCodecThread();
return false;
}
scPositions[scPositionsLength] = payload_size;
@@ -852,7 +1018,7 @@ bool MediaCodecVideoEncoder::DeliverPendingOutputs(JNIEnv* jni) {
output_buffer_index);
CHECK_EXCEPTION(jni);
if (!success) {
- ResetCodec();
+ ResetCodecOnCodecThread();
return false;
}
@@ -907,7 +1073,12 @@ int MediaCodecVideoEncoder::GetTargetFramerate() {
return scale_ ? quality_scaler_.GetTargetFramerate() : -1;
}
-MediaCodecVideoEncoderFactory::MediaCodecVideoEncoderFactory() {
+const char* MediaCodecVideoEncoder::ImplementationName() const {
+ return "MediaCodec";
+}
+
+MediaCodecVideoEncoderFactory::MediaCodecVideoEncoderFactory()
+ : egl_context_(nullptr) {
JNIEnv* jni = AttachCurrentThreadIfNeeded();
ScopedLocalRefFrame local_ref_frame(jni);
jclass j_encoder_class = FindClass(jni, "org/webrtc/MediaCodecVideoEncoder");
@@ -923,6 +1094,16 @@ MediaCodecVideoEncoderFactory::MediaCodecVideoEncoderFactory() {
MAX_VIDEO_WIDTH, MAX_VIDEO_HEIGHT, MAX_VIDEO_FPS));
}
+ bool is_vp9_hw_supported = jni->CallStaticBooleanMethod(
+ j_encoder_class,
+ GetStaticMethodID(jni, j_encoder_class, "isVp9HwSupported", "()Z"));
+ CHECK_EXCEPTION(jni);
+ if (is_vp9_hw_supported) {
+ ALOGD << "VP9 HW Encoder supported.";
+ supported_codecs_.push_back(VideoCodec(kVideoCodecVP9, "VP9",
+ MAX_VIDEO_WIDTH, MAX_VIDEO_HEIGHT, MAX_VIDEO_FPS));
+ }
+
bool is_h264_hw_supported = jni->CallStaticBooleanMethod(
j_encoder_class,
GetStaticMethodID(jni, j_encoder_class, "isH264HwSupported", "()Z"));
@@ -936,9 +1117,37 @@ MediaCodecVideoEncoderFactory::MediaCodecVideoEncoderFactory() {
MediaCodecVideoEncoderFactory::~MediaCodecVideoEncoderFactory() {}
+void MediaCodecVideoEncoderFactory::SetEGLContext(
+ JNIEnv* jni, jobject render_egl_context) {
+ ALOGD << "MediaCodecVideoEncoderFactory::SetEGLContext";
+ if (egl_context_) {
+ jni->DeleteGlobalRef(egl_context_);
+ egl_context_ = NULL;
+ }
+ if (!IsNull(jni, render_egl_context)) {
+ egl_context_ = jni->NewGlobalRef(render_egl_context);
+ if (CheckException(jni)) {
+ ALOGE << "error calling NewGlobalRef for EGL Context.";
+ egl_context_ = NULL;
+ } else {
+ jclass j_egl_context_class =
+ FindClass(jni, "org/webrtc/EglBase14$Context");
+ if (!jni->IsInstanceOf(egl_context_, j_egl_context_class)) {
+ ALOGE << "Wrong EGL Context.";
+ jni->DeleteGlobalRef(egl_context_);
+ egl_context_ = NULL;
+ }
+ }
+ }
+ if (egl_context_ == NULL) {
+ ALOGW << "NULL VideoDecoder EGL context - HW surface encoding is disabled.";
+ }
+}
+
webrtc::VideoEncoder* MediaCodecVideoEncoderFactory::CreateVideoEncoder(
VideoCodecType type) {
if (supported_codecs_.empty()) {
+ ALOGW << "No HW video encoder for type " << (int)type;
return NULL;
}
for (std::vector<VideoCodec>::const_iterator it = supported_codecs_.begin();
@@ -946,9 +1155,11 @@ webrtc::VideoEncoder* MediaCodecVideoEncoderFactory::CreateVideoEncoder(
if (it->type == type) {
ALOGD << "Create HW video encoder for type " << (int)type <<
" (" << it->name << ").";
- return new MediaCodecVideoEncoder(AttachCurrentThreadIfNeeded(), type);
+ return new MediaCodecVideoEncoder(AttachCurrentThreadIfNeeded(), type,
+ egl_context_);
}
}
+ ALOGW << "Can not find HW video encoder for type " << (int)type;
return NULL;
}
diff --git a/talk/app/webrtc/java/jni/androidmediaencoder_jni.h b/talk/app/webrtc/java/jni/androidmediaencoder_jni.h
index ff124aa146..8ff8164c3b 100644
--- a/talk/app/webrtc/java/jni/androidmediaencoder_jni.h
+++ b/talk/app/webrtc/java/jni/androidmediaencoder_jni.h
@@ -43,6 +43,8 @@ class MediaCodecVideoEncoderFactory
MediaCodecVideoEncoderFactory();
virtual ~MediaCodecVideoEncoderFactory();
+ void SetEGLContext(JNIEnv* jni, jobject render_egl_context);
+
// WebRtcVideoEncoderFactory implementation.
webrtc::VideoEncoder* CreateVideoEncoder(webrtc::VideoCodecType type)
override;
@@ -50,6 +52,7 @@ class MediaCodecVideoEncoderFactory
void DestroyVideoEncoder(webrtc::VideoEncoder* encoder) override;
private:
+ jobject egl_context_;
// Empty if platform support is lacking, const after ctor returns.
std::vector<VideoCodec> supported_codecs_;
};
diff --git a/talk/app/webrtc/java/jni/androidvideocapturer_jni.cc b/talk/app/webrtc/java/jni/androidvideocapturer_jni.cc
index 02b9f22015..8813c89de4 100644
--- a/talk/app/webrtc/java/jni/androidvideocapturer_jni.cc
+++ b/talk/app/webrtc/java/jni/androidvideocapturer_jni.cc
@@ -29,8 +29,9 @@
#include "talk/app/webrtc/java/jni/androidvideocapturer_jni.h"
#include "talk/app/webrtc/java/jni/classreferenceholder.h"
#include "talk/app/webrtc/java/jni/native_handle_impl.h"
+#include "talk/app/webrtc/java/jni/surfacetexturehelper_jni.h"
+#include "third_party/libyuv/include/libyuv/convert.h"
#include "webrtc/base/bind.h"
-#include "webrtc/common_video/libyuv/include/webrtc_libyuv.h"
namespace webrtc_jni {
@@ -47,15 +48,19 @@ int AndroidVideoCapturerJni::SetAndroidObjects(JNIEnv* jni,
return 0;
}
-AndroidVideoCapturerJni::AndroidVideoCapturerJni(JNIEnv* jni,
- jobject j_video_capturer)
- : j_capturer_global_(jni, j_video_capturer),
+AndroidVideoCapturerJni::AndroidVideoCapturerJni(
+ JNIEnv* jni,
+ jobject j_video_capturer,
+ jobject j_surface_texture_helper)
+ : j_video_capturer_(jni, j_video_capturer),
j_video_capturer_class_(
jni, FindClass(jni, "org/webrtc/VideoCapturerAndroid")),
j_observer_class_(
jni,
FindClass(jni,
"org/webrtc/VideoCapturerAndroid$NativeObserver")),
+ surface_texture_helper_(new rtc::RefCountedObject<SurfaceTextureHelper>(
+ jni, j_surface_texture_helper)),
capturer_(nullptr) {
LOG(LS_INFO) << "AndroidVideoCapturerJni ctor";
thread_checker_.DetachFromThread();
@@ -64,7 +69,7 @@ AndroidVideoCapturerJni::AndroidVideoCapturerJni(JNIEnv* jni,
AndroidVideoCapturerJni::~AndroidVideoCapturerJni() {
LOG(LS_INFO) << "AndroidVideoCapturerJni dtor";
jni()->CallVoidMethod(
- *j_capturer_global_,
+ *j_video_capturer_,
GetMethodID(jni(), *j_video_capturer_class_, "release", "()V"));
CHECK_EXCEPTION(jni()) << "error during VideoCapturerAndroid.release()";
}
@@ -90,7 +95,7 @@ void AndroidVideoCapturerJni::Start(int width, int height, int framerate,
jni(), *j_video_capturer_class_, "startCapture",
"(IIILandroid/content/Context;"
"Lorg/webrtc/VideoCapturerAndroid$CapturerObserver;)V");
- jni()->CallVoidMethod(*j_capturer_global_,
+ jni()->CallVoidMethod(*j_video_capturer_,
m, width, height,
framerate,
application_context_,
@@ -109,7 +114,7 @@ void AndroidVideoCapturerJni::Stop() {
}
jmethodID m = GetMethodID(jni(), *j_video_capturer_class_,
"stopCapture", "()V");
- jni()->CallVoidMethod(*j_capturer_global_, m);
+ jni()->CallVoidMethod(*j_video_capturer_, m);
CHECK_EXCEPTION(jni()) << "error during VideoCapturerAndroid.stopCapture";
LOG(LS_INFO) << "AndroidVideoCapturerJni stop done";
}
@@ -127,19 +132,12 @@ void AndroidVideoCapturerJni::AsyncCapturerInvoke(
invoker_->AsyncInvoke<void>(rtc::Bind(method, capturer_, args...));
}
-void AndroidVideoCapturerJni::ReturnBuffer(int64_t time_stamp) {
- jmethodID m = GetMethodID(jni(), *j_video_capturer_class_,
- "returnBuffer", "(J)V");
- jni()->CallVoidMethod(*j_capturer_global_, m, time_stamp);
- CHECK_EXCEPTION(jni()) << "error during VideoCapturerAndroid.returnBuffer";
-}
-
std::string AndroidVideoCapturerJni::GetSupportedFormats() {
jmethodID m =
GetMethodID(jni(), *j_video_capturer_class_,
"getSupportedFormatsAsJson", "()Ljava/lang/String;");
jstring j_json_caps =
- (jstring) jni()->CallObjectMethod(*j_capturer_global_, m);
+ (jstring) jni()->CallObjectMethod(*j_video_capturer_, m);
CHECK_EXCEPTION(jni()) << "error during supportedFormatsAsJson";
return JavaToStdString(jni(), j_json_caps);
}
@@ -158,46 +156,33 @@ void AndroidVideoCapturerJni::OnMemoryBufferFrame(void* video_frame,
int rotation,
int64_t timestamp_ns) {
const uint8_t* y_plane = static_cast<uint8_t*>(video_frame);
- // Android guarantees that the stride is a multiple of 16.
- // http://developer.android.com/reference/android/hardware/Camera.Parameters.html#setPreviewFormat%28int%29
- int y_stride;
- int uv_stride;
- webrtc::Calc16ByteAlignedStride(width, &y_stride, &uv_stride);
- const uint8_t* v_plane = y_plane + y_stride * height;
- const uint8_t* u_plane =
- v_plane + uv_stride * webrtc::AlignInt(height, 2) / 2;
-
- // Wrap the Java buffer, and call ReturnBuffer() in the wrapped
- // VideoFrameBuffer destructor.
- rtc::scoped_refptr<webrtc::VideoFrameBuffer> buffer(
- new rtc::RefCountedObject<webrtc::WrappedI420Buffer>(
- width, height, y_plane, y_stride, u_plane, uv_stride, v_plane,
- uv_stride,
- rtc::Bind(&AndroidVideoCapturerJni::ReturnBuffer, this,
- timestamp_ns)));
+ const uint8_t* vu_plane = y_plane + width * height;
+
+ rtc::scoped_refptr<webrtc::VideoFrameBuffer> buffer =
+ buffer_pool_.CreateBuffer(width, height);
+ libyuv::NV21ToI420(
+ y_plane, width,
+ vu_plane, width,
+ buffer->MutableData(webrtc::kYPlane), buffer->stride(webrtc::kYPlane),
+ buffer->MutableData(webrtc::kUPlane), buffer->stride(webrtc::kUPlane),
+ buffer->MutableData(webrtc::kVPlane), buffer->stride(webrtc::kVPlane),
+ width, height);
AsyncCapturerInvoke("OnIncomingFrame",
&webrtc::AndroidVideoCapturer::OnIncomingFrame,
buffer, rotation, timestamp_ns);
}
-void AndroidVideoCapturerJni::OnTextureFrame(
- int width,
- int height,
- int64_t timestamp_ns,
- const NativeTextureHandleImpl& handle) {
- // TODO(magjed): Fix this. See bug webrtc:4993.
- RTC_NOTREACHED()
- << "The rest of the stack for Android expects the native "
- "handle to be a NativeHandleImpl with a SurfaceTexture, not a "
- "NativeTextureHandleImpl";
+void AndroidVideoCapturerJni::OnTextureFrame(int width,
+ int height,
+ int rotation,
+ int64_t timestamp_ns,
+ const NativeHandleImpl& handle) {
rtc::scoped_refptr<webrtc::VideoFrameBuffer> buffer(
- new rtc::RefCountedObject<AndroidTextureBuffer>(
- width, height, handle,
- rtc::Bind(&AndroidVideoCapturerJni::ReturnBuffer, this,
- timestamp_ns)));
+ surface_texture_helper_->CreateTextureFrame(width, height, handle));
+
AsyncCapturerInvoke("OnIncomingFrame",
&webrtc::AndroidVideoCapturer::OnIncomingFrame,
- buffer, 0, timestamp_ns);
+ buffer, rotation, timestamp_ns);
}
void AndroidVideoCapturerJni::OnOutputFormatRequest(int width,
@@ -216,13 +201,6 @@ JOW(void,
jint width, jint height, jint rotation, jlong timestamp) {
jboolean is_copy = true;
jbyte* bytes = jni->GetByteArrayElements(j_frame, &is_copy);
- // If this is a copy of the original frame, it means that the memory
- // is not direct memory and thus VideoCapturerAndroid does not guarantee
- // that the memory is valid when we have released |j_frame|.
- // TODO(magjed): Move ReleaseByteArrayElements() into ReturnBuffer() and
- // remove this check.
- RTC_CHECK(!is_copy)
- << "NativeObserver_nativeOnFrameCaptured: frame is a copy";
reinterpret_cast<AndroidVideoCapturerJni*>(j_capturer)
->OnMemoryBufferFrame(bytes, length, width, height, rotation, timestamp);
jni->ReleaseByteArrayElements(j_frame, bytes, JNI_ABORT);
@@ -231,11 +209,11 @@ JOW(void,
JOW(void, VideoCapturerAndroid_00024NativeObserver_nativeOnTextureFrameCaptured)
(JNIEnv* jni, jclass, jlong j_capturer, jint j_width, jint j_height,
jint j_oes_texture_id, jfloatArray j_transform_matrix,
- jlong j_timestamp) {
+ jint j_rotation, jlong j_timestamp) {
reinterpret_cast<AndroidVideoCapturerJni*>(j_capturer)
- ->OnTextureFrame(j_width, j_height, j_timestamp,
- NativeTextureHandleImpl(jni, j_oes_texture_id,
- j_transform_matrix));
+ ->OnTextureFrame(j_width, j_height, j_rotation, j_timestamp,
+ NativeHandleImpl(jni, j_oes_texture_id,
+ j_transform_matrix));
}
JOW(void, VideoCapturerAndroid_00024NativeObserver_nativeCapturerStarted)
@@ -254,9 +232,11 @@ JOW(void, VideoCapturerAndroid_00024NativeObserver_nativeOnOutputFormatRequest)
}
JOW(jlong, VideoCapturerAndroid_nativeCreateVideoCapturer)
- (JNIEnv* jni, jclass, jobject j_video_capturer) {
+ (JNIEnv* jni, jclass,
+ jobject j_video_capturer, jobject j_surface_texture_helper) {
rtc::scoped_refptr<webrtc::AndroidVideoCapturerDelegate> delegate =
- new rtc::RefCountedObject<AndroidVideoCapturerJni>(jni, j_video_capturer);
+ new rtc::RefCountedObject<AndroidVideoCapturerJni>(
+ jni, j_video_capturer, j_surface_texture_helper);
rtc::scoped_ptr<cricket::VideoCapturer> capturer(
new webrtc::AndroidVideoCapturer(delegate));
// Caller takes ownership of the cricket::VideoCapturer* pointer.
diff --git a/talk/app/webrtc/java/jni/androidvideocapturer_jni.h b/talk/app/webrtc/java/jni/androidvideocapturer_jni.h
index d1eb3a0ad0..89ecacb3a5 100644
--- a/talk/app/webrtc/java/jni/androidvideocapturer_jni.h
+++ b/talk/app/webrtc/java/jni/androidvideocapturer_jni.h
@@ -36,10 +36,12 @@
#include "webrtc/base/asyncinvoker.h"
#include "webrtc/base/criticalsection.h"
#include "webrtc/base/thread_checker.h"
+#include "webrtc/common_video/include/i420_buffer_pool.h"
namespace webrtc_jni {
-class NativeTextureHandleImpl;
+struct NativeHandleImpl;
+class SurfaceTextureHelper;
// AndroidVideoCapturerJni implements AndroidVideoCapturerDelegate.
// The purpose of the delegate is to hide the JNI specifics from the C++ only
@@ -48,7 +50,9 @@ class AndroidVideoCapturerJni : public webrtc::AndroidVideoCapturerDelegate {
public:
static int SetAndroidObjects(JNIEnv* jni, jobject appliction_context);
- AndroidVideoCapturerJni(JNIEnv* jni, jobject j_video_capturer);
+ AndroidVideoCapturerJni(JNIEnv* jni,
+ jobject j_video_capturer,
+ jobject j_surface_texture_helper);
void Start(int width, int height, int framerate,
webrtc::AndroidVideoCapturer* capturer) override;
@@ -60,15 +64,14 @@ class AndroidVideoCapturerJni : public webrtc::AndroidVideoCapturerDelegate {
void OnCapturerStarted(bool success);
void OnMemoryBufferFrame(void* video_frame, int length, int width,
int height, int rotation, int64_t timestamp_ns);
- void OnTextureFrame(int width, int height, int64_t timestamp_ns,
- const NativeTextureHandleImpl& handle);
+ void OnTextureFrame(int width, int height, int rotation, int64_t timestamp_ns,
+ const NativeHandleImpl& handle);
void OnOutputFormatRequest(int width, int height, int fps);
protected:
~AndroidVideoCapturerJni();
private:
- void ReturnBuffer(int64_t time_stamp);
JNIEnv* jni();
// To avoid deducing Args from the 3rd parameter of AsyncCapturerInvoke.
@@ -85,10 +88,13 @@ class AndroidVideoCapturerJni : public webrtc::AndroidVideoCapturerDelegate {
void (webrtc::AndroidVideoCapturer::*method)(Args...),
typename Identity<Args>::type... args);
- const ScopedGlobalRef<jobject> j_capturer_global_;
+ const ScopedGlobalRef<jobject> j_video_capturer_;
const ScopedGlobalRef<jclass> j_video_capturer_class_;
const ScopedGlobalRef<jclass> j_observer_class_;
+ // Used on the Java thread running the camera.
+ webrtc::I420BufferPool buffer_pool_;
+ rtc::scoped_refptr<SurfaceTextureHelper> surface_texture_helper_;
rtc::ThreadChecker thread_checker_;
// |capturer| is a guaranteed to be a valid pointer between a call to
diff --git a/talk/app/webrtc/java/jni/classreferenceholder.cc b/talk/app/webrtc/java/jni/classreferenceholder.cc
index 4c836f8252..5fe8ec707c 100644
--- a/talk/app/webrtc/java/jni/classreferenceholder.cc
+++ b/talk/app/webrtc/java/jni/classreferenceholder.cc
@@ -72,20 +72,21 @@ ClassReferenceHolder::ClassReferenceHolder(JNIEnv* jni) {
LoadClass(jni, "org/webrtc/IceCandidate");
#if defined(ANDROID) && !defined(WEBRTC_CHROMIUM_BUILD)
LoadClass(jni, "android/graphics/SurfaceTexture");
- LoadClass(jni, "javax/microedition/khronos/egl/EGLContext");
LoadClass(jni, "org/webrtc/CameraEnumerator");
LoadClass(jni, "org/webrtc/Camera2Enumerator");
LoadClass(jni, "org/webrtc/CameraEnumerationAndroid");
LoadClass(jni, "org/webrtc/VideoCapturerAndroid");
LoadClass(jni, "org/webrtc/VideoCapturerAndroid$NativeObserver");
LoadClass(jni, "org/webrtc/EglBase");
+ LoadClass(jni, "org/webrtc/EglBase$Context");
+ LoadClass(jni, "org/webrtc/EglBase14$Context");
LoadClass(jni, "org/webrtc/NetworkMonitor");
LoadClass(jni, "org/webrtc/MediaCodecVideoEncoder");
LoadClass(jni, "org/webrtc/MediaCodecVideoEncoder$OutputBufferInfo");
LoadClass(jni, "org/webrtc/MediaCodecVideoEncoder$VideoCodecType");
LoadClass(jni, "org/webrtc/MediaCodecVideoDecoder");
LoadClass(jni, "org/webrtc/MediaCodecVideoDecoder$DecodedTextureBuffer");
- LoadClass(jni, "org/webrtc/MediaCodecVideoDecoder$DecodedByteBuffer");
+ LoadClass(jni, "org/webrtc/MediaCodecVideoDecoder$DecodedOutputBuffer");
LoadClass(jni, "org/webrtc/MediaCodecVideoDecoder$VideoCodecType");
LoadClass(jni, "org/webrtc/SurfaceTextureHelper");
#endif
diff --git a/talk/app/webrtc/java/jni/jni_helpers.cc b/talk/app/webrtc/java/jni/jni_helpers.cc
index 755698e379..3a7ff21e77 100644
--- a/talk/app/webrtc/java/jni/jni_helpers.cc
+++ b/talk/app/webrtc/java/jni/jni_helpers.cc
@@ -1,4 +1,3 @@
-
/*
* libjingle
* Copyright 2015 Google Inc.
@@ -33,8 +32,6 @@
#include <sys/syscall.h>
#include <unistd.h>
-#include "unicode/unistr.h"
-
namespace webrtc_jni {
static JavaVM* g_jvm = nullptr;
@@ -46,8 +43,6 @@ static pthread_once_t g_jni_ptr_once = PTHREAD_ONCE_INIT;
// were attached by the JVM because of a Java->native call.
static pthread_key_t g_jni_ptr;
-using icu::UnicodeString;
-
JavaVM *GetJVM() {
RTC_CHECK(g_jvm) << "JNI_OnLoad failed to run?";
return g_jvm;
@@ -232,22 +227,20 @@ bool IsNull(JNIEnv* jni, jobject obj) {
// Given a UTF-8 encoded |native| string return a new (UTF-16) jstring.
jstring JavaStringFromStdString(JNIEnv* jni, const std::string& native) {
- UnicodeString ustr(UnicodeString::fromUTF8(native));
- jstring jstr = jni->NewString(ustr.getBuffer(), ustr.length());
- CHECK_EXCEPTION(jni) << "error during NewString";
+ jstring jstr = jni->NewStringUTF(native.c_str());
+ CHECK_EXCEPTION(jni) << "error during NewStringUTF";
return jstr;
}
// Given a (UTF-16) jstring return a new UTF-8 native string.
std::string JavaToStdString(JNIEnv* jni, const jstring& j_string) {
- const jchar* jchars = jni->GetStringChars(j_string, NULL);
- CHECK_EXCEPTION(jni) << "Error during GetStringChars";
- UnicodeString ustr(jchars, jni->GetStringLength(j_string));
- CHECK_EXCEPTION(jni) << "Error during GetStringLength";
- jni->ReleaseStringChars(j_string, jchars);
- CHECK_EXCEPTION(jni) << "Error during ReleaseStringChars";
- std::string ret;
- return ustr.toUTF8String(ret);
+ const char* chars = jni->GetStringUTFChars(j_string, NULL);
+ CHECK_EXCEPTION(jni) << "Error during GetStringUTFChars";
+ std::string str(chars, jni->GetStringUTFLength(j_string));
+ CHECK_EXCEPTION(jni) << "Error during GetStringUTFLength";
+ jni->ReleaseStringUTFChars(j_string, chars);
+ CHECK_EXCEPTION(jni) << "Error during ReleaseStringUTFChars";
+ return str;
}
// Return the (singleton) Java Enum object corresponding to |index|;
diff --git a/talk/app/webrtc/java/jni/jni_onload.cc b/talk/app/webrtc/java/jni/jni_onload.cc
new file mode 100644
index 0000000000..9664ecdca6
--- /dev/null
+++ b/talk/app/webrtc/java/jni/jni_onload.cc
@@ -0,0 +1,55 @@
+/*
+ * libjingle
+ * Copyright 2015 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#include <jni.h>
+#undef JNIEXPORT
+#define JNIEXPORT __attribute__((visibility("default")))
+
+#include "talk/app/webrtc/java/jni/classreferenceholder.h"
+#include "talk/app/webrtc/java/jni/jni_helpers.h"
+#include "webrtc/base/ssladapter.h"
+
+namespace webrtc_jni {
+
+extern "C" jint JNIEXPORT JNICALL JNI_OnLoad(JavaVM *jvm, void *reserved) {
+ jint ret = InitGlobalJniVariables(jvm);
+ RTC_DCHECK_GE(ret, 0);
+ if (ret < 0)
+ return -1;
+
+ RTC_CHECK(rtc::InitializeSSL()) << "Failed to InitializeSSL()";
+ LoadGlobalClassReferenceHolder();
+
+ return ret;
+}
+
+extern "C" void JNIEXPORT JNICALL JNI_OnUnLoad(JavaVM *jvm, void *reserved) {
+ FreeGlobalClassReferenceHolder();
+ RTC_CHECK(rtc::CleanupSSL()) << "Failed to CleanupSSL()";
+}
+
+} // namespace webrtc_jni
diff --git a/talk/app/webrtc/java/jni/native_handle_impl.cc b/talk/app/webrtc/java/jni/native_handle_impl.cc
index ac3e0455df..1757184154 100644
--- a/talk/app/webrtc/java/jni/native_handle_impl.cc
+++ b/talk/app/webrtc/java/jni/native_handle_impl.cc
@@ -27,14 +27,65 @@
#include "talk/app/webrtc/java/jni/native_handle_impl.h"
+#include "talk/app/webrtc/java/jni/jni_helpers.h"
+#include "webrtc/base/bind.h"
#include "webrtc/base/checks.h"
+#include "webrtc/base/keep_ref_until_done.h"
+#include "webrtc/base/scoped_ptr.h"
+#include "webrtc/base/scoped_ref_ptr.h"
+#include "webrtc/base/logging.h"
+
+using webrtc::NativeHandleBuffer;
+
+namespace {
+
+void RotateMatrix(float a[16], webrtc::VideoRotation rotation) {
+ // Texture coordinates are in the range 0 to 1. The transformation of the last
+ // row in each rotation matrix is needed for proper translation, e.g, to
+ // mirror x, we don't replace x by -x, but by 1-x.
+ switch (rotation) {
+ case webrtc::kVideoRotation_0:
+ break;
+ case webrtc::kVideoRotation_90: {
+ const float ROTATE_90[16] =
+ { a[4], a[5], a[6], a[7],
+ -a[0], -a[1], -a[2], -a[3],
+ a[8], a[9], a[10], a[11],
+ a[0] + a[12], a[1] + a[13], a[2] + a[14], a[3] + a[15]};
+ memcpy(a, ROTATE_90, sizeof(ROTATE_90));
+ } break;
+ case webrtc::kVideoRotation_180: {
+ const float ROTATE_180[16] =
+ { -a[0], -a[1], -a[2], -a[3],
+ -a[4], -a[5], -a[6], -a[7],
+ a[8], a[9], a[10], a[11],
+ a[0] + a[4] + a[12], a[1] +a[5] + a[13], a[2] + a[6] + a[14],
+ a[3] + a[11]+ a[15]};
+ memcpy(a, ROTATE_180, sizeof(ROTATE_180));
+ }
+ break;
+ case webrtc::kVideoRotation_270: {
+ const float ROTATE_270[16] =
+ { -a[4], -a[5], -a[6], -a[7],
+ a[0], a[1], a[2], a[3],
+ a[8], a[9], a[10], a[11],
+ a[4] + a[12], a[5] + a[13], a[6] + a[14], a[7] + a[15]};
+ memcpy(a, ROTATE_270, sizeof(ROTATE_270));
+ } break;
+ }
+}
+
+} // anonymouse namespace
namespace webrtc_jni {
-NativeTextureHandleImpl::NativeTextureHandleImpl(JNIEnv* jni,
- jint j_oes_texture_id,
- jfloatArray j_transform_matrix)
- : oes_texture_id(j_oes_texture_id) {
+// Aligning pointer to 64 bytes for improved performance, e.g. use SIMD.
+static const int kBufferAlignment = 64;
+
+NativeHandleImpl::NativeHandleImpl(JNIEnv* jni,
+ jint j_oes_texture_id,
+ jfloatArray j_transform_matrix)
+ : oes_texture_id(j_oes_texture_id) {
RTC_CHECK_EQ(16, jni->GetArrayLength(j_transform_matrix));
jfloat* transform_matrix_ptr =
jni->GetFloatArrayElements(j_transform_matrix, nullptr);
@@ -44,41 +95,15 @@ NativeTextureHandleImpl::NativeTextureHandleImpl(JNIEnv* jni,
jni->ReleaseFloatArrayElements(j_transform_matrix, transform_matrix_ptr, 0);
}
-NativeHandleImpl::NativeHandleImpl() : texture_object_(NULL), texture_id_(-1) {}
-
-void* NativeHandleImpl::GetHandle() {
- return texture_object_;
-}
-
-int NativeHandleImpl::GetTextureId() {
- return texture_id_;
-}
-
-void NativeHandleImpl::SetTextureObject(void* texture_object, int texture_id) {
- texture_object_ = reinterpret_cast<jobject>(texture_object);
- texture_id_ = texture_id;
-}
-
-JniNativeHandleBuffer::JniNativeHandleBuffer(void* native_handle,
- int width,
- int height)
- : NativeHandleBuffer(native_handle, width, height) {}
-
-rtc::scoped_refptr<webrtc::VideoFrameBuffer>
-JniNativeHandleBuffer::NativeToI420Buffer() {
- // TODO(pbos): Implement before using this in the encoder pipeline (or
- // remove the RTC_CHECK() in VideoCapture).
- RTC_NOTREACHED();
- return nullptr;
-}
-
AndroidTextureBuffer::AndroidTextureBuffer(
int width,
int height,
- const NativeTextureHandleImpl& native_handle,
+ const NativeHandleImpl& native_handle,
+ jobject surface_texture_helper,
const rtc::Callback0<void>& no_longer_used)
: webrtc::NativeHandleBuffer(&native_handle_, width, height),
native_handle_(native_handle),
+ surface_texture_helper_(surface_texture_helper),
no_longer_used_cb_(no_longer_used) {}
AndroidTextureBuffer::~AndroidTextureBuffer() {
@@ -87,9 +112,75 @@ AndroidTextureBuffer::~AndroidTextureBuffer() {
rtc::scoped_refptr<webrtc::VideoFrameBuffer>
AndroidTextureBuffer::NativeToI420Buffer() {
- RTC_NOTREACHED()
- << "AndroidTextureBuffer::NativeToI420Buffer not implemented.";
- return nullptr;
+ int uv_width = (width()+7) / 8;
+ int stride = 8 * uv_width;
+ int uv_height = (height()+1)/2;
+ size_t size = stride * (height() + uv_height);
+ // The data is owned by the frame, and the normal case is that the
+ // data is deleted by the frame's destructor callback.
+ //
+ // TODO(nisse): Use an I420BufferPool. We then need to extend that
+ // class, and I420Buffer, to support our memory layout.
+ rtc::scoped_ptr<uint8_t, webrtc::AlignedFreeDeleter> yuv_data(
+ static_cast<uint8_t*>(webrtc::AlignedMalloc(size, kBufferAlignment)));
+ // See SurfaceTextureHelper.java for the required layout.
+ uint8_t* y_data = yuv_data.get();
+ uint8_t* u_data = y_data + height() * stride;
+ uint8_t* v_data = u_data + stride/2;
+
+ rtc::scoped_refptr<webrtc::VideoFrameBuffer> copy =
+ new rtc::RefCountedObject<webrtc::WrappedI420Buffer>(
+ width(), height(),
+ y_data, stride,
+ u_data, stride,
+ v_data, stride,
+ rtc::Bind(&webrtc::AlignedFree, yuv_data.release()));
+
+ JNIEnv* jni = AttachCurrentThreadIfNeeded();
+ ScopedLocalRefFrame local_ref_frame(jni);
+
+ jmethodID transform_mid = GetMethodID(
+ jni,
+ GetObjectClass(jni, surface_texture_helper_),
+ "textureToYUV",
+ "(Ljava/nio/ByteBuffer;IIII[F)V");
+
+ jobject byte_buffer = jni->NewDirectByteBuffer(y_data, size);
+
+ // TODO(nisse): Keep java transform matrix around.
+ jfloatArray sampling_matrix = jni->NewFloatArray(16);
+ jni->SetFloatArrayRegion(sampling_matrix, 0, 16,
+ native_handle_.sampling_matrix);
+
+ jni->CallVoidMethod(surface_texture_helper_,
+ transform_mid,
+ byte_buffer, width(), height(), stride,
+ native_handle_.oes_texture_id, sampling_matrix);
+ CHECK_EXCEPTION(jni) << "textureToYUV throwed an exception";
+
+ return copy;
+}
+
+rtc::scoped_refptr<AndroidTextureBuffer>
+AndroidTextureBuffer::ScaleAndRotate(int dst_widht,
+ int dst_height,
+ webrtc::VideoRotation rotation) {
+ if (width() == dst_widht && height() == dst_height &&
+ rotation == webrtc::kVideoRotation_0) {
+ return this;
+ }
+ int rotated_width = (rotation % 180 == 0) ? dst_widht : dst_height;
+ int rotated_height = (rotation % 180 == 0) ? dst_height : dst_widht;
+
+ // Here we use Bind magic to add a reference count to |this| until the newly
+ // created AndroidTextureBuffer is destructed
+ rtc::scoped_refptr<AndroidTextureBuffer> buffer(
+ new rtc::RefCountedObject<AndroidTextureBuffer>(
+ rotated_width, rotated_height, native_handle_,
+ surface_texture_helper_, rtc::KeepRefUntilDone(this)));
+
+ RotateMatrix(buffer->native_handle_.sampling_matrix, rotation);
+ return buffer;
}
} // namespace webrtc_jni
diff --git a/talk/app/webrtc/java/jni/native_handle_impl.h b/talk/app/webrtc/java/jni/native_handle_impl.h
index dd04bc20b1..1d0f601d0d 100644
--- a/talk/app/webrtc/java/jni/native_handle_impl.h
+++ b/talk/app/webrtc/java/jni/native_handle_impl.h
@@ -31,56 +31,44 @@
#include <jni.h>
-#include "webrtc/common_video/interface/video_frame_buffer.h"
+#include "webrtc/common_video/include/video_frame_buffer.h"
+#include "webrtc/common_video/rotation.h"
namespace webrtc_jni {
// Wrapper for texture object.
-struct NativeTextureHandleImpl {
- NativeTextureHandleImpl(JNIEnv* jni,
- jint j_oes_texture_id,
- jfloatArray j_transform_matrix);
+struct NativeHandleImpl {
+ NativeHandleImpl(JNIEnv* jni,
+ jint j_oes_texture_id,
+ jfloatArray j_transform_matrix);
const int oes_texture_id;
float sampling_matrix[16];
};
-// Native handle for SurfaceTexture + texture id.
-class NativeHandleImpl {
- public:
- NativeHandleImpl();
-
- void* GetHandle();
- int GetTextureId();
- void SetTextureObject(void* texture_object, int texture_id);
-
- private:
- jobject texture_object_;
- int32_t texture_id_;
-};
-
-class JniNativeHandleBuffer : public webrtc::NativeHandleBuffer {
- public:
- JniNativeHandleBuffer(void* native_handle, int width, int height);
-
- // TODO(pbos): Override destructor to release native handle, at the moment the
- // native handle is not released based on refcount.
-
- private:
- rtc::scoped_refptr<webrtc::VideoFrameBuffer> NativeToI420Buffer() override;
-};
-
class AndroidTextureBuffer : public webrtc::NativeHandleBuffer {
public:
AndroidTextureBuffer(int width,
int height,
- const NativeTextureHandleImpl& native_handle,
+ const NativeHandleImpl& native_handle,
+ jobject surface_texture_helper,
const rtc::Callback0<void>& no_longer_used);
~AndroidTextureBuffer();
rtc::scoped_refptr<VideoFrameBuffer> NativeToI420Buffer() override;
+ rtc::scoped_refptr<AndroidTextureBuffer> ScaleAndRotate(
+ int dst_widht,
+ int dst_height,
+ webrtc::VideoRotation rotation);
+
private:
- NativeTextureHandleImpl native_handle_;
+ NativeHandleImpl native_handle_;
+ // Raw object pointer, relying on the caller, i.e.,
+ // AndroidVideoCapturerJni or the C++ SurfaceTextureHelper, to keep
+ // a global reference. TODO(nisse): Make this a reference to the C++
+ // SurfaceTextureHelper instead, but that requires some refactoring
+ // of AndroidVideoCapturerJni.
+ jobject surface_texture_helper_;
rtc::Callback0<void> no_longer_used_cb_;
};
diff --git a/talk/app/webrtc/java/jni/peerconnection_jni.cc b/talk/app/webrtc/java/jni/peerconnection_jni.cc
index e75cd553b6..5ea63f74ae 100644
--- a/talk/app/webrtc/java/jni/peerconnection_jni.cc
+++ b/talk/app/webrtc/java/jni/peerconnection_jni.cc
@@ -57,6 +57,7 @@
#define JNIEXPORT __attribute__((visibility("default")))
#include <limits>
+#include <utility>
#include "talk/app/webrtc/java/jni/classreferenceholder.h"
#include "talk/app/webrtc/java/jni/jni_helpers.h"
@@ -74,10 +75,11 @@
#include "talk/media/webrtc/webrtcvideoencoderfactory.h"
#include "webrtc/base/bind.h"
#include "webrtc/base/checks.h"
+#include "webrtc/base/event_tracer.h"
#include "webrtc/base/logging.h"
#include "webrtc/base/logsinks.h"
-#include "webrtc/base/networkmonitor.h"
#include "webrtc/base/messagequeue.h"
+#include "webrtc/base/networkmonitor.h"
#include "webrtc/base/ssladapter.h"
#include "webrtc/base/stringutils.h"
#include "webrtc/system_wrappers/include/field_trial_default.h"
@@ -141,22 +143,6 @@ static bool factory_static_initialized = false;
static bool video_hw_acceleration_enabled = true;
#endif
-extern "C" jint JNIEXPORT JNICALL JNI_OnLoad(JavaVM *jvm, void *reserved) {
- jint ret = InitGlobalJniVariables(jvm);
- if (ret < 0)
- return -1;
-
- RTC_CHECK(rtc::InitializeSSL()) << "Failed to InitializeSSL()";
- LoadGlobalClassReferenceHolder();
-
- return ret;
-}
-
-extern "C" void JNIEXPORT JNICALL JNI_OnUnLoad(JavaVM *jvm, void *reserved) {
- FreeGlobalClassReferenceHolder();
- RTC_CHECK(rtc::CleanupSSL()) << "Failed to CleanupSSL()";
-}
-
// Return the (singleton) Java Enum object corresponding to |index|;
// |state_class_fragment| is something like "MediaSource$State".
static jobject JavaEnumFromIndex(
@@ -545,7 +531,7 @@ class SdpObserverWrapper : public T {
protected:
// Common implementation for failure of Set & Create types, distinguished by
// |op| being "Set" or "Create".
- void OnFailure(const std::string& op, const std::string& error) {
+ void DoOnFailure(const std::string& op, const std::string& error) {
jmethodID m = GetMethodID(jni(), *j_observer_class_, "on" + op + "Failure",
"(Ljava/lang/String;)V");
jstring j_error_string = JavaStringFromStdString(jni(), error);
@@ -572,7 +558,7 @@ class CreateSdpObserverWrapper
void OnFailure(const std::string& error) override {
ScopedLocalRefFrame local_ref_frame(jni());
- SdpObserverWrapper::OnFailure(std::string("Create"), error);
+ SdpObserverWrapper::DoOnFailure(std::string("Create"), error);
}
};
@@ -585,7 +571,7 @@ class SetSdpObserverWrapper
void OnFailure(const std::string& error) override {
ScopedLocalRefFrame local_ref_frame(jni());
- SdpObserverWrapper::OnFailure(std::string("Set"), error);
+ SdpObserverWrapper::DoOnFailure(std::string("Set"), error);
}
};
@@ -773,7 +759,7 @@ class JavaVideoRendererWrapper : public VideoRendererInterface {
jni, *j_frame_class_, "<init>", "(III[I[Ljava/nio/ByteBuffer;J)V")),
j_texture_frame_ctor_id_(GetMethodID(
jni, *j_frame_class_, "<init>",
- "(IIILjava/lang/Object;IJ)V")),
+ "(IIII[FJ)V")),
j_byte_buffer_class_(jni, FindClass(jni, "java/nio/ByteBuffer")) {
CHECK_EXCEPTION(jni);
}
@@ -829,13 +815,13 @@ class JavaVideoRendererWrapper : public VideoRendererInterface {
jobject CricketToJavaTextureFrame(const cricket::VideoFrame* frame) {
NativeHandleImpl* handle =
reinterpret_cast<NativeHandleImpl*>(frame->GetNativeHandle());
- jobject texture_object = reinterpret_cast<jobject>(handle->GetHandle());
- int texture_id = handle->GetTextureId();
+ jfloatArray sampling_matrix = jni()->NewFloatArray(16);
+ jni()->SetFloatArrayRegion(sampling_matrix, 0, 16, handle->sampling_matrix);
return jni()->NewObject(
*j_frame_class_, j_texture_frame_ctor_id_,
frame->GetWidth(), frame->GetHeight(),
static_cast<int>(frame->GetVideoRotation()),
- texture_object, texture_id, javaShallowCopy(frame));
+ handle->oes_texture_id, sampling_matrix, javaShallowCopy(frame));
}
JNIEnv* jni() {
@@ -1054,6 +1040,32 @@ JOW(void, PeerConnectionFactory_initializeFieldTrials)(
webrtc::field_trial::InitFieldTrialsFromString(field_trials_init_string);
}
+JOW(void, PeerConnectionFactory_initializeInternalTracer)(JNIEnv* jni, jclass) {
+ rtc::tracing::SetupInternalTracer();
+}
+
+JOW(jboolean, PeerConnectionFactory_startInternalTracingCapture)(
+ JNIEnv* jni, jclass, jstring j_event_tracing_filename) {
+ if (!j_event_tracing_filename)
+ return false;
+
+ const char* init_string =
+ jni->GetStringUTFChars(j_event_tracing_filename, NULL);
+ LOG(LS_INFO) << "Starting internal tracing to: " << init_string;
+ bool ret = rtc::tracing::StartInternalCapture(init_string);
+ jni->ReleaseStringUTFChars(j_event_tracing_filename, init_string);
+ return ret;
+}
+
+JOW(void, PeerConnectionFactory_stopInternalTracingCapture)(
+ JNIEnv* jni, jclass) {
+ rtc::tracing::StopInternalCapture();
+}
+
+JOW(void, PeerConnectionFactory_shutdownInternalTracer)(JNIEnv* jni, jclass) {
+ rtc::tracing::ShutdownInternalTracer();
+}
+
// Helper struct for working around the fact that CreatePeerConnectionFactory()
// comes in two flavors: either entirely automagical (constructing its own
// threads and deleting them on teardown, but no external codec factory support)
@@ -1251,6 +1263,46 @@ JOW(jlong, PeerConnectionFactory_nativeCreateAudioTrack)(
return (jlong)track.release();
}
+JOW(jboolean, PeerConnectionFactory_nativeStartAecDump)(
+ JNIEnv* jni, jclass, jlong native_factory, jint file) {
+#if defined(ANDROID)
+ rtc::scoped_refptr<PeerConnectionFactoryInterface> factory(
+ factoryFromJava(native_factory));
+ return factory->StartAecDump(file);
+#else
+ return false;
+#endif
+}
+
+JOW(void, PeerConnectionFactory_nativeStopAecDump)(
+ JNIEnv* jni, jclass, jlong native_factory) {
+#if defined(ANDROID)
+ rtc::scoped_refptr<PeerConnectionFactoryInterface> factory(
+ factoryFromJava(native_factory));
+ factory->StopAecDump();
+#endif
+}
+
+JOW(jboolean, PeerConnectionFactory_nativeStartRtcEventLog)(
+ JNIEnv* jni, jclass, jlong native_factory, jint file) {
+#if defined(ANDROID)
+ rtc::scoped_refptr<PeerConnectionFactoryInterface> factory(
+ factoryFromJava(native_factory));
+ return factory->StartRtcEventLog(file);
+#else
+ return false;
+#endif
+}
+
+JOW(void, PeerConnectionFactory_nativeStopRtcEventLog)(
+ JNIEnv* jni, jclass, jlong native_factory) {
+#if defined(ANDROID)
+ rtc::scoped_refptr<PeerConnectionFactoryInterface> factory(
+ factoryFromJava(native_factory));
+ factory->StopRtcEventLog();
+#endif
+}
+
JOW(void, PeerConnectionFactory_nativeSetOptions)(
JNIEnv* jni, jclass, jlong native_factory, jobject options) {
rtc::scoped_refptr<PeerConnectionFactoryInterface> factory(
@@ -1292,21 +1344,35 @@ JOW(void, PeerConnectionFactory_nativeSetOptions)(
}
JOW(void, PeerConnectionFactory_nativeSetVideoHwAccelerationOptions)(
- JNIEnv* jni, jclass, jlong native_factory, jobject render_egl_context) {
+ JNIEnv* jni, jclass, jlong native_factory, jobject local_egl_context,
+ jobject remote_egl_context) {
#if defined(ANDROID) && !defined(WEBRTC_CHROMIUM_BUILD)
OwnedFactoryAndThreads* owned_factory =
reinterpret_cast<OwnedFactoryAndThreads*>(native_factory);
+
+ jclass j_eglbase14_context_class =
+ FindClass(jni, "org/webrtc/EglBase14$Context");
+
+ MediaCodecVideoEncoderFactory* encoder_factory =
+ static_cast<MediaCodecVideoEncoderFactory*>
+ (owned_factory->encoder_factory());
+ if (encoder_factory &&
+ jni->IsInstanceOf(local_egl_context, j_eglbase14_context_class)) {
+ LOG(LS_INFO) << "Set EGL context for HW encoding.";
+ encoder_factory->SetEGLContext(jni, local_egl_context);
+ }
+
MediaCodecVideoDecoderFactory* decoder_factory =
static_cast<MediaCodecVideoDecoderFactory*>
(owned_factory->decoder_factory());
- if (decoder_factory) {
- LOG(LS_INFO) << "Set EGL context for HW acceleration.";
- decoder_factory->SetEGLContext(jni, render_egl_context);
+ if (decoder_factory &&
+ jni->IsInstanceOf(remote_egl_context, j_eglbase14_context_class)) {
+ LOG(LS_INFO) << "Set EGL context for HW decoding.";
+ decoder_factory->SetEGLContext(jni, remote_egl_context);
}
#endif
}
-
static std::string
GetJavaEnumName(JNIEnv* jni, const std::string& className, jobject j_enum) {
jclass enumClass = FindClass(jni, className.c_str());
@@ -1503,6 +1569,9 @@ static void JavaRTCConfigurationToJsepRTCConfiguration(
jfieldID j_ice_connection_receiving_timeout_id =
GetFieldID(jni, j_rtc_config_class, "iceConnectionReceivingTimeout", "I");
+ jfieldID j_ice_backup_candidate_pair_ping_interval_id = GetFieldID(
+ jni, j_rtc_config_class, "iceBackupCandidatePairPingInterval", "I");
+
jfieldID j_continual_gathering_policy_id =
GetFieldID(jni, j_rtc_config_class, "continualGatheringPolicy",
"Lorg/webrtc/PeerConnection$ContinualGatheringPolicy;");
@@ -1524,6 +1593,8 @@ static void JavaRTCConfigurationToJsepRTCConfiguration(
jni, j_rtc_config, j_audio_jitter_buffer_fast_accelerate_id);
rtc_config->ice_connection_receiving_timeout =
GetIntField(jni, j_rtc_config, j_ice_connection_receiving_timeout_id);
+ rtc_config->ice_backup_candidate_pair_ping_interval = GetIntField(
+ jni, j_rtc_config, j_ice_backup_candidate_pair_ping_interval_id);
rtc_config->continual_gathering_policy =
JavaContinualGatheringPolicyToNativeType(
jni, j_continual_gathering_policy);
@@ -1550,7 +1621,7 @@ JOW(jlong, PeerConnectionFactory_nativeCreatePeerConnection)(
rtc::SSLIdentity::Generate(webrtc::kIdentityName, rtc::KT_ECDSA));
if (ssl_identity.get()) {
rtc_config.certificates.push_back(
- rtc::RTCCertificate::Create(ssl_identity.Pass()));
+ rtc::RTCCertificate::Create(std::move(ssl_identity)));
LOG(LS_INFO) << "ECDSA certificate created.";
} else {
// Failing to create certificate should not abort peer connection
@@ -1704,6 +1775,29 @@ JOW(void, PeerConnection_nativeRemoveLocalStream)(
reinterpret_cast<MediaStreamInterface*>(native_stream));
}
+JOW(jobject, PeerConnection_nativeCreateSender)(
+ JNIEnv* jni, jobject j_pc, jstring j_kind, jstring j_stream_id) {
+ jclass j_rtp_sender_class = FindClass(jni, "org/webrtc/RtpSender");
+ jmethodID j_rtp_sender_ctor =
+ GetMethodID(jni, j_rtp_sender_class, "<init>", "(J)V");
+
+ std::string kind = JavaToStdString(jni, j_kind);
+ std::string stream_id = JavaToStdString(jni, j_stream_id);
+ rtc::scoped_refptr<RtpSenderInterface> sender =
+ ExtractNativePC(jni, j_pc)->CreateSender(kind, stream_id);
+ if (!sender.get()) {
+ return nullptr;
+ }
+ jlong nativeSenderPtr = jlongFromPointer(sender.get());
+ jobject j_sender =
+ jni->NewObject(j_rtp_sender_class, j_rtp_sender_ctor, nativeSenderPtr);
+ CHECK_EXCEPTION(jni) << "error during NewObject";
+ // Sender is now owned by the Java object, and will be freed from
+ // RtpSender.dispose(), called by PeerConnection.dispose() or getSenders().
+ sender->AddRef();
+ return j_sender;
+}
+
JOW(jobject, PeerConnection_nativeGetSenders)(JNIEnv* jni, jobject j_pc) {
jclass j_array_list_class = FindClass(jni, "java/util/ArrayList");
jmethodID j_array_list_ctor =
@@ -1723,7 +1817,8 @@ JOW(jobject, PeerConnection_nativeGetSenders)(JNIEnv* jni, jobject j_pc) {
jobject j_sender =
jni->NewObject(j_rtp_sender_class, j_rtp_sender_ctor, nativeSenderPtr);
CHECK_EXCEPTION(jni) << "error during NewObject";
- // Sender is now owned by Java object, and will be freed from there.
+ // Sender is now owned by the Java object, and will be freed from
+ // RtpSender.dispose(), called by PeerConnection.dispose() or getSenders().
sender->AddRef();
jni->CallBooleanMethod(j_senders, j_array_list_add, j_sender);
CHECK_EXCEPTION(jni) << "error during CallBooleanMethod";
@@ -1802,6 +1897,7 @@ JOW(jobject, VideoCapturer_nativeCreateVideoCapturer)(
// Since we can't create platform specific java implementations in Java, we
// defer the creation to C land.
#if defined(ANDROID)
+ // TODO(nisse): This case is intended to be deleted.
jclass j_video_capturer_class(
FindClass(jni, "org/webrtc/VideoCapturerAndroid"));
const int camera_id = jni->CallStaticIntMethod(
@@ -1816,8 +1912,13 @@ JOW(jobject, VideoCapturer_nativeCreateVideoCapturer)(
j_video_capturer_class,
GetMethodID(jni, j_video_capturer_class, "<init>", "(I)V"), camera_id);
CHECK_EXCEPTION(jni) << "error during creation of VideoCapturerAndroid";
+ jfieldID helper_fid = GetFieldID(jni, j_video_capturer_class, "surfaceHelper",
+ "Lorg/webrtc/SurfaceTextureHelper;");
+
rtc::scoped_refptr<webrtc::AndroidVideoCapturerDelegate> delegate =
- new rtc::RefCountedObject<AndroidVideoCapturerJni>(jni, j_video_capturer);
+ new rtc::RefCountedObject<AndroidVideoCapturerJni>(
+ jni, j_video_capturer,
+ GetObjectField(jni, j_video_capturer, helper_fid));
rtc::scoped_ptr<cricket::VideoCapturer> capturer(
new webrtc::AndroidVideoCapturer(delegate));
@@ -2003,11 +2104,11 @@ JOW(jbyteArray, CallSessionFileRotatingLogSink_nativeGetLogData)(
return result;
}
-JOW(void, RtpSender_nativeSetTrack)(JNIEnv* jni,
+JOW(jboolean, RtpSender_nativeSetTrack)(JNIEnv* jni,
jclass,
jlong j_rtp_sender_pointer,
jlong j_track_pointer) {
- reinterpret_cast<RtpSenderInterface*>(j_rtp_sender_pointer)
+ return reinterpret_cast<RtpSenderInterface*>(j_rtp_sender_pointer)
->SetTrack(reinterpret_cast<MediaStreamTrackInterface*>(j_track_pointer));
}
diff --git a/talk/app/webrtc/java/jni/surfacetexturehelper_jni.cc b/talk/app/webrtc/java/jni/surfacetexturehelper_jni.cc
index 05f1b23768..3e32b9a6fe 100644
--- a/talk/app/webrtc/java/jni/surfacetexturehelper_jni.cc
+++ b/talk/app/webrtc/java/jni/surfacetexturehelper_jni.cc
@@ -35,25 +35,14 @@
namespace webrtc_jni {
-SurfaceTextureHelper::SurfaceTextureHelper(JNIEnv* jni,
- jobject egl_shared_context)
- : j_surface_texture_helper_class_(
- jni,
- FindClass(jni, "org/webrtc/SurfaceTextureHelper")),
- j_surface_texture_helper_(
- jni,
- jni->CallStaticObjectMethod(
- *j_surface_texture_helper_class_,
- GetStaticMethodID(jni,
- *j_surface_texture_helper_class_,
- "create",
- "(Ljavax/microedition/khronos/egl/EGLContext;)"
- "Lorg/webrtc/SurfaceTextureHelper;"),
- egl_shared_context)),
- j_return_texture_method_(GetMethodID(jni,
- *j_surface_texture_helper_class_,
- "returnTextureFrame",
- "()V")) {
+SurfaceTextureHelper::SurfaceTextureHelper(
+ JNIEnv* jni, jobject surface_texture_helper)
+ : j_surface_texture_helper_(jni, surface_texture_helper),
+ j_return_texture_method_(
+ GetMethodID(jni,
+ FindClass(jni, "org/webrtc/SurfaceTextureHelper"),
+ "returnTextureFrame",
+ "()V")) {
CHECK_EXCEPTION(jni) << "error during initialization of SurfaceTextureHelper";
}
@@ -70,9 +59,9 @@ void SurfaceTextureHelper::ReturnTextureFrame() const {
rtc::scoped_refptr<webrtc::VideoFrameBuffer>
SurfaceTextureHelper::CreateTextureFrame(int width, int height,
- const NativeTextureHandleImpl& native_handle) {
+ const NativeHandleImpl& native_handle) {
return new rtc::RefCountedObject<AndroidTextureBuffer>(
- width, height, native_handle,
+ width, height, native_handle, *j_surface_texture_helper_,
rtc::Bind(&SurfaceTextureHelper::ReturnTextureFrame, this));
}
diff --git a/talk/app/webrtc/java/jni/surfacetexturehelper_jni.h b/talk/app/webrtc/java/jni/surfacetexturehelper_jni.h
index dc9d2b853d..8dde2b54ed 100644
--- a/talk/app/webrtc/java/jni/surfacetexturehelper_jni.h
+++ b/talk/app/webrtc/java/jni/surfacetexturehelper_jni.h
@@ -35,7 +35,7 @@
#include "talk/app/webrtc/java/jni/native_handle_impl.h"
#include "webrtc/base/refcount.h"
#include "webrtc/base/scoped_ref_ptr.h"
-#include "webrtc/common_video/interface/video_frame_buffer.h"
+#include "webrtc/common_video/include/video_frame_buffer.h"
namespace webrtc_jni {
@@ -49,24 +49,19 @@ namespace webrtc_jni {
// destroyed while a VideoFrameBuffer is in use.
// This class is the C++ counterpart of the java class SurfaceTextureHelper.
// Usage:
-// 1. Create an instance of this class.
-// 2. Call GetJavaSurfaceTextureHelper to get the Java SurfaceTextureHelper.
+// 1. Create an java instance of SurfaceTextureHelper.
+// 2. Create an instance of this class.
// 3. Register a listener to the Java SurfaceListener and start producing
// new buffers.
-// 3. Call CreateTextureFrame to wrap the Java texture in a VideoFrameBuffer.
+// 4. Call CreateTextureFrame to wrap the Java texture in a VideoFrameBuffer.
class SurfaceTextureHelper : public rtc::RefCountInterface {
public:
- SurfaceTextureHelper(JNIEnv* jni, jobject shared_egl_context);
-
- // Returns the Java SurfaceTextureHelper.
- jobject GetJavaSurfaceTextureHelper() const {
- return *j_surface_texture_helper_;
- }
+ SurfaceTextureHelper(JNIEnv* jni, jobject surface_texture_helper);
rtc::scoped_refptr<webrtc::VideoFrameBuffer> CreateTextureFrame(
int width,
int height,
- const NativeTextureHandleImpl& native_handle);
+ const NativeHandleImpl& native_handle);
protected:
~SurfaceTextureHelper();
@@ -75,7 +70,6 @@ class SurfaceTextureHelper : public rtc::RefCountInterface {
// May be called on arbitrary thread.
void ReturnTextureFrame() const;
- const ScopedGlobalRef<jclass> j_surface_texture_helper_class_;
const ScopedGlobalRef<jobject> j_surface_texture_helper_;
const jmethodID j_return_texture_method_;
};
diff --git a/talk/app/webrtc/java/src/org/webrtc/MediaCodecVideoDecoder.java b/talk/app/webrtc/java/src/org/webrtc/MediaCodecVideoDecoder.java
index 42af9c7fd0..19002f70e1 100644
--- a/talk/app/webrtc/java/src/org/webrtc/MediaCodecVideoDecoder.java
+++ b/talk/app/webrtc/java/src/org/webrtc/MediaCodecVideoDecoder.java
@@ -33,23 +33,23 @@ import android.media.MediaCodecInfo;
import android.media.MediaCodecInfo.CodecCapabilities;
import android.media.MediaCodecList;
import android.media.MediaFormat;
-import android.opengl.GLES11Ext;
-import android.opengl.GLES20;
import android.os.Build;
+import android.os.SystemClock;
import android.view.Surface;
import org.webrtc.Logging;
import java.nio.ByteBuffer;
import java.util.Arrays;
+import java.util.LinkedList;
import java.util.List;
-
-import javax.microedition.khronos.egl.EGLContext;
+import java.util.concurrent.CountDownLatch;
+import java.util.Queue;
+import java.util.concurrent.TimeUnit;
// Java-side of peerconnection_jni.cc:MediaCodecVideoDecoder.
// This class is an implementation detail of the Java PeerConnection API.
-// MediaCodec is thread-hostile so this class must be operated on a single
-// thread.
+@SuppressWarnings("deprecation")
public class MediaCodecVideoDecoder {
// This class is constructed, operated, and destroyed by its C++ incarnation,
// so the class and its methods have non-public visibility. The API this
@@ -66,18 +66,26 @@ public class MediaCodecVideoDecoder {
}
private static final int DEQUEUE_INPUT_TIMEOUT = 500000; // 500 ms timeout.
+ private static final int MEDIA_CODEC_RELEASE_TIMEOUT_MS = 5000; // Timeout for codec releasing.
// Active running decoder instance. Set in initDecode() (called from native code)
// and reset to null in release() call.
private static MediaCodecVideoDecoder runningInstance = null;
+ private static MediaCodecVideoDecoderErrorCallback errorCallback = null;
+ private static int codecErrors = 0;
+
private Thread mediaCodecThread;
private MediaCodec mediaCodec;
private ByteBuffer[] inputBuffers;
private ByteBuffer[] outputBuffers;
private static final String VP8_MIME_TYPE = "video/x-vnd.on2.vp8";
+ private static final String VP9_MIME_TYPE = "video/x-vnd.on2.vp9";
private static final String H264_MIME_TYPE = "video/avc";
// List of supported HW VP8 decoders.
private static final String[] supportedVp8HwCodecPrefixes =
{"OMX.qcom.", "OMX.Nvidia.", "OMX.Exynos.", "OMX.Intel." };
+ // List of supported HW VP9 decoders.
+ private static final String[] supportedVp9HwCodecPrefixes =
+ {"OMX.qcom.", "OMX.Exynos." };
// List of supported HW H.264 decoders.
private static final String[] supportedH264HwCodecPrefixes =
{"OMX.qcom.", "OMX.Intel." };
@@ -96,13 +104,29 @@ public class MediaCodecVideoDecoder {
private int height;
private int stride;
private int sliceHeight;
+ private boolean hasDecodedFirstFrame;
+ private final Queue<TimeStamps> decodeStartTimeMs = new LinkedList<TimeStamps>();
private boolean useSurface;
- private int textureID = 0;
- private SurfaceTexture surfaceTexture = null;
+
+ // The below variables are only used when decoding to a Surface.
+ private TextureListener textureListener;
+ // Max number of output buffers queued before starting to drop decoded frames.
+ private static final int MAX_QUEUED_OUTPUTBUFFERS = 3;
+ private int droppedFrames;
private Surface surface = null;
- private EglBase eglBase;
+ private final Queue<DecodedOutputBuffer>
+ dequeuedSurfaceOutputBuffers = new LinkedList<DecodedOutputBuffer>();
+
+ // MediaCodec error handler - invoked when critical error happens which may prevent
+ // further use of media codec API. Now it means that one of media codec instances
+ // is hanging and can no longer be used in the next call.
+ public static interface MediaCodecVideoDecoderErrorCallback {
+ void onMediaCodecVideoDecoderCriticalError(int codecErrors);
+ }
- private MediaCodecVideoDecoder() {
+ public static void setErrorCallback(MediaCodecVideoDecoderErrorCallback errorCallback) {
+ Logging.d(TAG, "Set error callback");
+ MediaCodecVideoDecoder.errorCallback = errorCallback;
}
// Helper struct for findVp8Decoder() below.
@@ -120,6 +144,7 @@ public class MediaCodecVideoDecoder {
if (Build.VERSION.SDK_INT < Build.VERSION_CODES.KITKAT) {
return null; // MediaCodec.setParameters is missing.
}
+ Logging.d(TAG, "Trying to find HW decoder for mime " + mime);
for (int i = 0; i < MediaCodecList.getCodecCount(); ++i) {
MediaCodecInfo info = MediaCodecList.getCodecInfoAt(i);
if (info.isEncoder()) {
@@ -135,7 +160,7 @@ public class MediaCodecVideoDecoder {
if (name == null) {
continue; // No HW support in this codec; try the next one.
}
- Logging.v(TAG, "Found candidate decoder " + name);
+ Logging.d(TAG, "Found candidate decoder " + name);
// Check if this is supported decoder.
boolean supportedCodec = false;
@@ -166,6 +191,7 @@ public class MediaCodecVideoDecoder {
}
}
}
+ Logging.d(TAG, "No HW decoder found for mime " + mime);
return null; // No HW decoder.
}
@@ -173,6 +199,10 @@ public class MediaCodecVideoDecoder {
return findDecoder(VP8_MIME_TYPE, supportedVp8HwCodecPrefixes) != null;
}
+ public static boolean isVp9HwSupported() {
+ return findDecoder(VP9_MIME_TYPE, supportedVp9HwCodecPrefixes) != null;
+ }
+
public static boolean isH264HwSupported() {
return findDecoder(H264_MIME_TYPE, supportedH264HwCodecPrefixes) != null;
}
@@ -197,17 +227,21 @@ public class MediaCodecVideoDecoder {
}
}
- // Pass null in |sharedContext| to configure the codec for ByteBuffer output.
- private boolean initDecode(VideoCodecType type, int width, int height, EGLContext sharedContext) {
+ // Pass null in |surfaceTextureHelper| to configure the codec for ByteBuffer output.
+ private boolean initDecode(
+ VideoCodecType type, int width, int height, SurfaceTextureHelper surfaceTextureHelper) {
if (mediaCodecThread != null) {
throw new RuntimeException("Forgot to release()?");
}
- useSurface = (sharedContext != null);
+ useSurface = (surfaceTextureHelper != null);
String mime = null;
String[] supportedCodecPrefixes = null;
if (type == VideoCodecType.VIDEO_CODEC_VP8) {
mime = VP8_MIME_TYPE;
supportedCodecPrefixes = supportedVp8HwCodecPrefixes;
+ } else if (type == VideoCodecType.VIDEO_CODEC_VP9) {
+ mime = VP9_MIME_TYPE;
+ supportedCodecPrefixes = supportedVp9HwCodecPrefixes;
} else if (type == VideoCodecType.VIDEO_CODEC_H264) {
mime = H264_MIME_TYPE;
supportedCodecPrefixes = supportedH264HwCodecPrefixes;
@@ -221,9 +255,6 @@ public class MediaCodecVideoDecoder {
Logging.d(TAG, "Java initDecode: " + type + " : "+ width + " x " + height +
". Color: 0x" + Integer.toHexString(properties.colorFormat) +
". Use Surface: " + useSurface);
- if (sharedContext != null) {
- Logging.d(TAG, "Decoder shared EGL Context: " + sharedContext);
- }
runningInstance = this; // Decoder is now running and can be queried for stack traces.
mediaCodecThread = Thread.currentThread();
try {
@@ -233,16 +264,8 @@ public class MediaCodecVideoDecoder {
sliceHeight = height;
if (useSurface) {
- // Create shared EGL context.
- eglBase = new EglBase(sharedContext, EglBase.ConfigType.PIXEL_BUFFER);
- eglBase.createDummyPbufferSurface();
- eglBase.makeCurrent();
-
- // Create output surface
- textureID = GlUtil.generateTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES);
- Logging.d(TAG, "Video decoder TextureID = " + textureID);
- surfaceTexture = new SurfaceTexture(textureID);
- surface = new Surface(surfaceTexture);
+ textureListener = new TextureListener(surfaceTextureHelper);
+ surface = new Surface(surfaceTextureHelper.getSurfaceTexture());
}
MediaFormat format = MediaFormat.createVideoFormat(mime, width, height);
@@ -261,6 +284,10 @@ public class MediaCodecVideoDecoder {
colorFormat = properties.colorFormat;
outputBuffers = mediaCodec.getOutputBuffers();
inputBuffers = mediaCodec.getInputBuffers();
+ decodeStartTimeMs.clear();
+ hasDecodedFirstFrame = false;
+ dequeuedSurfaceOutputBuffers.clear();
+ droppedFrames = 0;
Logging.d(TAG, "Input buffers: " + inputBuffers.length +
". Output buffers: " + outputBuffers.length);
return true;
@@ -271,25 +298,45 @@ public class MediaCodecVideoDecoder {
}
private void release() {
- Logging.d(TAG, "Java releaseDecoder");
+ Logging.d(TAG, "Java releaseDecoder. Total number of dropped frames: " + droppedFrames);
checkOnMediaCodecThread();
- try {
- mediaCodec.stop();
- mediaCodec.release();
- } catch (IllegalStateException e) {
- Logging.e(TAG, "release failed", e);
+
+ // Run Mediacodec stop() and release() on separate thread since sometime
+ // Mediacodec.stop() may hang.
+ final CountDownLatch releaseDone = new CountDownLatch(1);
+
+ Runnable runMediaCodecRelease = new Runnable() {
+ @Override
+ public void run() {
+ try {
+ Logging.d(TAG, "Java releaseDecoder on release thread");
+ mediaCodec.stop();
+ mediaCodec.release();
+ Logging.d(TAG, "Java releaseDecoder on release thread done");
+ } catch (Exception e) {
+ Logging.e(TAG, "Media decoder release failed", e);
+ }
+ releaseDone.countDown();
+ }
+ };
+ new Thread(runMediaCodecRelease).start();
+
+ if (!ThreadUtils.awaitUninterruptibly(releaseDone, MEDIA_CODEC_RELEASE_TIMEOUT_MS)) {
+ Logging.e(TAG, "Media decoder release timeout");
+ codecErrors++;
+ if (errorCallback != null) {
+ Logging.e(TAG, "Invoke codec error callback. Errors: " + codecErrors);
+ errorCallback.onMediaCodecVideoDecoderCriticalError(codecErrors);
+ }
}
+
mediaCodec = null;
mediaCodecThread = null;
runningInstance = null;
if (useSurface) {
surface.release();
surface = null;
- Logging.d(TAG, "Delete video decoder TextureID " + textureID);
- GLES20.glDeleteTextures(1, new int[] {textureID}, 0);
- textureID = 0;
- eglBase.release();
- eglBase = null;
+ textureListener.release();
}
Logging.d(TAG, "Java releaseDecoder done");
}
@@ -306,13 +353,15 @@ public class MediaCodecVideoDecoder {
}
}
- private boolean queueInputBuffer(
- int inputBufferIndex, int size, long timestampUs) {
+ private boolean queueInputBuffer(int inputBufferIndex, int size, long presentationTimeStamUs,
+ long timeStampMs, long ntpTimeStamp) {
checkOnMediaCodecThread();
try {
inputBuffers[inputBufferIndex].position(0);
inputBuffers[inputBufferIndex].limit(size);
- mediaCodec.queueInputBuffer(inputBufferIndex, 0, size, timestampUs, 0);
+ decodeStartTimeMs.add(new TimeStamps(SystemClock.elapsedRealtime(), timeStampMs,
+ ntpTimeStamp));
+ mediaCodec.queueInputBuffer(inputBufferIndex, 0, size, presentationTimeStamUs, 0);
return true;
}
catch (IllegalStateException e) {
@@ -321,56 +370,183 @@ public class MediaCodecVideoDecoder {
}
}
- // Helper structs for dequeueOutputBuffer() below.
- private static class DecodedByteBuffer {
- public DecodedByteBuffer(int index, int offset, int size, long presentationTimestampUs) {
+ private static class TimeStamps {
+ public TimeStamps(long decodeStartTimeMs, long timeStampMs, long ntpTimeStampMs) {
+ this.decodeStartTimeMs = decodeStartTimeMs;
+ this.timeStampMs = timeStampMs;
+ this.ntpTimeStampMs = ntpTimeStampMs;
+ }
+ private final long decodeStartTimeMs; // Time when this frame was queued for decoding.
+ private final long timeStampMs; // Only used for bookkeeping in Java. Used in C++;
+ private final long ntpTimeStampMs; // Only used for bookkeeping in Java. Used in C++;
+ }
+
+ // Helper struct for dequeueOutputBuffer() below.
+ private static class DecodedOutputBuffer {
+ public DecodedOutputBuffer(int index, int offset, int size, long timeStampMs,
+ long ntpTimeStampMs, long decodeTime, long endDecodeTime) {
this.index = index;
this.offset = offset;
this.size = size;
- this.presentationTimestampUs = presentationTimestampUs;
+ this.timeStampMs = timeStampMs;
+ this.ntpTimeStampMs = ntpTimeStampMs;
+ this.decodeTimeMs = decodeTime;
+ this.endDecodeTimeMs = endDecodeTime;
}
private final int index;
private final int offset;
private final int size;
- private final long presentationTimestampUs;
+ private final long timeStampMs;
+ private final long ntpTimeStampMs;
+ // Number of ms it took to decode this frame.
+ private final long decodeTimeMs;
+ // System time when this frame finished decoding.
+ private final long endDecodeTimeMs;
}
+ // Helper struct for dequeueTextureBuffer() below.
private static class DecodedTextureBuffer {
private final int textureID;
- private final long presentationTimestampUs;
+ private final float[] transformMatrix;
+ private final long timeStampMs;
+ private final long ntpTimeStampMs;
+ private final long decodeTimeMs;
+ // Interval from when the frame finished decoding until this buffer has been created.
+ // Since there is only one texture, this interval depend on the time from when
+ // a frame is decoded and provided to C++ and until that frame is returned to the MediaCodec
+ // so that the texture can be updated with the next decoded frame.
+ private final long frameDelayMs;
- public DecodedTextureBuffer(int textureID, long presentationTimestampUs) {
+ // A DecodedTextureBuffer with zero |textureID| has special meaning and represents a frame
+ // that was dropped.
+ public DecodedTextureBuffer(int textureID, float[] transformMatrix, long timeStampMs,
+ long ntpTimeStampMs, long decodeTimeMs, long frameDelay) {
this.textureID = textureID;
- this.presentationTimestampUs = presentationTimestampUs;
+ this.transformMatrix = transformMatrix;
+ this.timeStampMs = timeStampMs;
+ this.ntpTimeStampMs = ntpTimeStampMs;
+ this.decodeTimeMs = decodeTimeMs;
+ this.frameDelayMs = frameDelay;
}
}
- // Returns null if no decoded buffer is available, and otherwise either a DecodedByteBuffer or
- // DecodedTexturebuffer depending on |useSurface| configuration.
+ // Poll based texture listener.
+ private static class TextureListener
+ implements SurfaceTextureHelper.OnTextureFrameAvailableListener {
+ private final SurfaceTextureHelper surfaceTextureHelper;
+ // |newFrameLock| is used to synchronize arrival of new frames with wait()/notifyAll().
+ private final Object newFrameLock = new Object();
+ // |bufferToRender| is non-null when waiting for transition between addBufferToRender() to
+ // onTextureFrameAvailable().
+ private DecodedOutputBuffer bufferToRender;
+ private DecodedTextureBuffer renderedBuffer;
+
+ public TextureListener(SurfaceTextureHelper surfaceTextureHelper) {
+ this.surfaceTextureHelper = surfaceTextureHelper;
+ surfaceTextureHelper.setListener(this);
+ }
+
+ public void addBufferToRender(DecodedOutputBuffer buffer) {
+ if (bufferToRender != null) {
+ Logging.e(TAG,
+ "Unexpected addBufferToRender() called while waiting for a texture.");
+ throw new IllegalStateException("Waiting for a texture.");
+ }
+ bufferToRender = buffer;
+ }
+
+ public boolean isWaitingForTexture() {
+ synchronized (newFrameLock) {
+ return bufferToRender != null;
+ }
+ }
+
+ // Callback from |surfaceTextureHelper|. May be called on an arbitrary thread.
+ @Override
+ public void onTextureFrameAvailable(
+ int oesTextureId, float[] transformMatrix, long timestampNs) {
+ synchronized (newFrameLock) {
+ if (renderedBuffer != null) {
+ Logging.e(TAG,
+ "Unexpected onTextureFrameAvailable() called while already holding a texture.");
+ throw new IllegalStateException("Already holding a texture.");
+ }
+ // |timestampNs| is always zero on some Android versions.
+ renderedBuffer = new DecodedTextureBuffer(oesTextureId, transformMatrix,
+ bufferToRender.timeStampMs, bufferToRender.ntpTimeStampMs, bufferToRender.decodeTimeMs,
+ SystemClock.elapsedRealtime() - bufferToRender.endDecodeTimeMs);
+ bufferToRender = null;
+ newFrameLock.notifyAll();
+ }
+ }
+
+ // Dequeues and returns a DecodedTextureBuffer if available, or null otherwise.
+ public DecodedTextureBuffer dequeueTextureBuffer(int timeoutMs) {
+ synchronized (newFrameLock) {
+ if (renderedBuffer == null && timeoutMs > 0 && isWaitingForTexture()) {
+ try {
+ newFrameLock.wait(timeoutMs);
+ } catch(InterruptedException e) {
+ // Restore the interrupted status by reinterrupting the thread.
+ Thread.currentThread().interrupt();
+ }
+ }
+ DecodedTextureBuffer returnedBuffer = renderedBuffer;
+ renderedBuffer = null;
+ return returnedBuffer;
+ }
+ }
+
+ public void release() {
+ // SurfaceTextureHelper.disconnect() will block until any onTextureFrameAvailable() in
+ // progress is done. Therefore, the call to disconnect() must be outside any synchronized
+ // statement that is also used in the onTextureFrameAvailable() above to avoid deadlocks.
+ surfaceTextureHelper.disconnect();
+ synchronized (newFrameLock) {
+ if (renderedBuffer != null) {
+ surfaceTextureHelper.returnTextureFrame();
+ renderedBuffer = null;
+ }
+ }
+ }
+ }
+
+ // Returns null if no decoded buffer is available, and otherwise a DecodedByteBuffer.
// Throws IllegalStateException if call is made on the wrong thread, if color format changes to an
// unsupported format, or if |mediaCodec| is not in the Executing state. Throws CodecException
// upon codec error.
- private Object dequeueOutputBuffer(int dequeueTimeoutUs)
- throws IllegalStateException, MediaCodec.CodecException {
+ private DecodedOutputBuffer dequeueOutputBuffer(int dequeueTimeoutMs) {
checkOnMediaCodecThread();
+ if (decodeStartTimeMs.isEmpty()) {
+ return null;
+ }
// Drain the decoder until receiving a decoded buffer or hitting
// MediaCodec.INFO_TRY_AGAIN_LATER.
final MediaCodec.BufferInfo info = new MediaCodec.BufferInfo();
while (true) {
- final int result = mediaCodec.dequeueOutputBuffer(info, dequeueTimeoutUs);
+ final int result = mediaCodec.dequeueOutputBuffer(
+ info, TimeUnit.MILLISECONDS.toMicros(dequeueTimeoutMs));
switch (result) {
- case MediaCodec.INFO_TRY_AGAIN_LATER:
- return null;
case MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED:
outputBuffers = mediaCodec.getOutputBuffers();
Logging.d(TAG, "Decoder output buffers changed: " + outputBuffers.length);
+ if (hasDecodedFirstFrame) {
+ throw new RuntimeException("Unexpected output buffer change event.");
+ }
break;
case MediaCodec.INFO_OUTPUT_FORMAT_CHANGED:
MediaFormat format = mediaCodec.getOutputFormat();
Logging.d(TAG, "Decoder format changed: " + format.toString());
+ int new_width = format.getInteger(MediaFormat.KEY_WIDTH);
+ int new_height = format.getInteger(MediaFormat.KEY_HEIGHT);
+ if (hasDecodedFirstFrame && (new_width != width || new_height != height)) {
+ throw new RuntimeException("Unexpected size change. Configured " + width + "*" +
+ height + ". New " + new_width + "*" + new_height);
+ }
width = format.getInteger(MediaFormat.KEY_WIDTH);
height = format.getInteger(MediaFormat.KEY_HEIGHT);
+
if (!useSurface && format.containsKey(MediaFormat.KEY_COLOR_FORMAT)) {
colorFormat = format.getInteger(MediaFormat.KEY_COLOR_FORMAT);
Logging.d(TAG, "Color: 0x" + Integer.toHexString(colorFormat));
@@ -388,18 +564,76 @@ public class MediaCodecVideoDecoder {
stride = Math.max(width, stride);
sliceHeight = Math.max(height, sliceHeight);
break;
+ case MediaCodec.INFO_TRY_AGAIN_LATER:
+ return null;
default:
- // Output buffer decoded.
- if (useSurface) {
- mediaCodec.releaseOutputBuffer(result, true /* render */);
- // TODO(magjed): Wait for SurfaceTexture.onFrameAvailable() before returning a texture
- // frame.
- return new DecodedTextureBuffer(textureID, info.presentationTimeUs);
- } else {
- return new DecodedByteBuffer(result, info.offset, info.size, info.presentationTimeUs);
- }
+ hasDecodedFirstFrame = true;
+ TimeStamps timeStamps = decodeStartTimeMs.remove();
+ return new DecodedOutputBuffer(result, info.offset, info.size, timeStamps.timeStampMs,
+ timeStamps.ntpTimeStampMs,
+ SystemClock.elapsedRealtime() - timeStamps.decodeStartTimeMs,
+ SystemClock.elapsedRealtime());
+ }
+ }
+ }
+
+ // Returns null if no decoded buffer is available, and otherwise a DecodedTextureBuffer.
+ // Throws IllegalStateException if call is made on the wrong thread, if color format changes to an
+ // unsupported format, or if |mediaCodec| is not in the Executing state. Throws CodecException
+ // upon codec error. If |dequeueTimeoutMs| > 0, the oldest decoded frame will be dropped if
+ // a frame can't be returned.
+ private DecodedTextureBuffer dequeueTextureBuffer(int dequeueTimeoutMs) {
+ checkOnMediaCodecThread();
+ if (!useSurface) {
+ throw new IllegalStateException("dequeueTexture() called for byte buffer decoding.");
+ }
+ DecodedOutputBuffer outputBuffer = dequeueOutputBuffer(dequeueTimeoutMs);
+ if (outputBuffer != null) {
+ dequeuedSurfaceOutputBuffers.add(outputBuffer);
+ }
+
+ MaybeRenderDecodedTextureBuffer();
+ // Check if there is texture ready now by waiting max |dequeueTimeoutMs|.
+ DecodedTextureBuffer renderedBuffer = textureListener.dequeueTextureBuffer(dequeueTimeoutMs);
+ if (renderedBuffer != null) {
+ MaybeRenderDecodedTextureBuffer();
+ return renderedBuffer;
+ }
+
+ if ((dequeuedSurfaceOutputBuffers.size()
+ >= Math.min(MAX_QUEUED_OUTPUTBUFFERS, outputBuffers.length)
+ || (dequeueTimeoutMs > 0 && !dequeuedSurfaceOutputBuffers.isEmpty()))) {
+ ++droppedFrames;
+ // Drop the oldest frame still in dequeuedSurfaceOutputBuffers.
+ // The oldest frame is owned by |textureListener| and can't be dropped since
+ // mediaCodec.releaseOutputBuffer has already been called.
+ final DecodedOutputBuffer droppedFrame = dequeuedSurfaceOutputBuffers.remove();
+ if (dequeueTimeoutMs > 0) {
+ // TODO(perkj): Re-add the below log when VideoRenderGUI has been removed or fixed to
+ // return the one and only texture even if it does not render.
+ // Logging.w(TAG, "Draining decoder. Dropping frame with TS: "
+ // + droppedFrame.timeStampMs + ". Total number of dropped frames: " + droppedFrames);
+ } else {
+ Logging.w(TAG, "Too many output buffers. Dropping frame with TS: "
+ + droppedFrame.timeStampMs + ". Total number of dropped frames: " + droppedFrames);
}
+
+ mediaCodec.releaseOutputBuffer(droppedFrame.index, false /* render */);
+ return new DecodedTextureBuffer(0, null, droppedFrame.timeStampMs,
+ droppedFrame.ntpTimeStampMs, droppedFrame.decodeTimeMs,
+ SystemClock.elapsedRealtime() - droppedFrame.endDecodeTimeMs);
+ }
+ return null;
+ }
+
+ private void MaybeRenderDecodedTextureBuffer() {
+ if (dequeuedSurfaceOutputBuffers.isEmpty() || textureListener.isWaitingForTexture()) {
+ return;
}
+ // Get the first frame in the queue and render to the decoder output surface.
+ final DecodedOutputBuffer buffer = dequeuedSurfaceOutputBuffers.remove();
+ textureListener.addBufferToRender(buffer);
+ mediaCodec.releaseOutputBuffer(buffer.index, true /* render */);
}
// Release a dequeued output byte buffer back to the codec for re-use. Should only be called for
@@ -407,11 +641,11 @@ public class MediaCodecVideoDecoder {
// Throws IllegalStateException if the call is made on the wrong thread, if codec is configured
// for surface decoding, or if |mediaCodec| is not in the Executing state. Throws
// MediaCodec.CodecException upon codec error.
- private void returnDecodedByteBuffer(int index)
+ private void returnDecodedOutputBuffer(int index)
throws IllegalStateException, MediaCodec.CodecException {
checkOnMediaCodecThread();
if (useSurface) {
- throw new IllegalStateException("returnDecodedByteBuffer() called for surface decoding.");
+ throw new IllegalStateException("returnDecodedOutputBuffer() called for surface decoding.");
}
mediaCodec.releaseOutputBuffer(index, false /* render */);
}
diff --git a/talk/app/webrtc/java/src/org/webrtc/MediaCodecVideoEncoder.java b/talk/app/webrtc/java/src/org/webrtc/MediaCodecVideoEncoder.java
index f3f03c1d20..5c8f9dc77e 100644
--- a/talk/app/webrtc/java/src/org/webrtc/MediaCodecVideoEncoder.java
+++ b/talk/app/webrtc/java/src/org/webrtc/MediaCodecVideoEncoder.java
@@ -27,24 +27,29 @@
package org.webrtc;
+import android.annotation.TargetApi;
import android.media.MediaCodec;
import android.media.MediaCodecInfo.CodecCapabilities;
import android.media.MediaCodecInfo;
import android.media.MediaCodecList;
import android.media.MediaFormat;
+import android.opengl.GLES20;
import android.os.Build;
import android.os.Bundle;
+import android.view.Surface;
import org.webrtc.Logging;
import java.nio.ByteBuffer;
import java.util.Arrays;
import java.util.List;
+import java.util.concurrent.CountDownLatch;
+import java.util.concurrent.TimeUnit;
// Java-side of peerconnection_jni.cc:MediaCodecVideoEncoder.
// This class is an implementation detail of the Java PeerConnection API.
-// MediaCodec is thread-hostile so this class must be operated on a single
-// thread.
+@TargetApi(19)
+@SuppressWarnings("deprecation")
public class MediaCodecVideoEncoder {
// This class is constructed, operated, and destroyed by its C++ incarnation,
// so the class and its methods have non-public visibility. The API this
@@ -60,18 +65,31 @@ public class MediaCodecVideoEncoder {
VIDEO_CODEC_H264
}
+ private static final int MEDIA_CODEC_RELEASE_TIMEOUT_MS = 5000; // Timeout for codec releasing.
private static final int DEQUEUE_TIMEOUT = 0; // Non-blocking, no wait.
- // Active running encoder instance. Set in initDecode() (called from native code)
+ // Active running encoder instance. Set in initEncode() (called from native code)
// and reset to null in release() call.
private static MediaCodecVideoEncoder runningInstance = null;
+ private static MediaCodecVideoEncoderErrorCallback errorCallback = null;
+ private static int codecErrors = 0;
+
private Thread mediaCodecThread;
private MediaCodec mediaCodec;
private ByteBuffer[] outputBuffers;
+ private EglBase14 eglBase;
+ private int width;
+ private int height;
+ private Surface inputSurface;
+ private GlRectDrawer drawer;
private static final String VP8_MIME_TYPE = "video/x-vnd.on2.vp8";
+ private static final String VP9_MIME_TYPE = "video/x-vnd.on2.vp9";
private static final String H264_MIME_TYPE = "video/avc";
// List of supported HW VP8 codecs.
private static final String[] supportedVp8HwCodecPrefixes =
{"OMX.qcom.", "OMX.Intel." };
+ // List of supported HW VP9 decoders.
+ private static final String[] supportedVp9HwCodecPrefixes =
+ {"OMX.qcom."};
// List of supported HW H.264 codecs.
private static final String[] supportedH264HwCodecPrefixes =
{"OMX.qcom." };
@@ -99,13 +117,25 @@ public class MediaCodecVideoEncoder {
CodecCapabilities.COLOR_QCOM_FormatYUV420SemiPlanar,
COLOR_QCOM_FORMATYUV420PackedSemiPlanar32m
};
- private int colorFormat;
- // Video encoder type.
+ private static final int[] supportedSurfaceColorList = {
+ CodecCapabilities.COLOR_FormatSurface
+ };
private VideoCodecType type;
+ private int colorFormat; // Used by native code.
+
// SPS and PPS NALs (Config frame) for H.264.
private ByteBuffer configData = null;
- private MediaCodecVideoEncoder() {
+ // MediaCodec error handler - invoked when critical error happens which may prevent
+ // further use of media codec API. Now it means that one of media codec instances
+ // is hanging and can no longer be used in the next call.
+ public static interface MediaCodecVideoEncoderErrorCallback {
+ void onMediaCodecVideoEncoderCriticalError(int codecErrors);
+ }
+
+ public static void setErrorCallback(MediaCodecVideoEncoderErrorCallback errorCallback) {
+ Logging.d(TAG, "Set error callback");
+ MediaCodecVideoEncoder.errorCallback = errorCallback;
}
// Helper struct for findHwEncoder() below.
@@ -119,7 +149,7 @@ public class MediaCodecVideoEncoder {
}
private static EncoderProperties findHwEncoder(
- String mime, String[] supportedHwCodecPrefixes) {
+ String mime, String[] supportedHwCodecPrefixes, int[] colorList) {
// MediaCodec.setParameters is missing for JB and below, so bitrate
// can not be adjusted dynamically.
if (Build.VERSION.SDK_INT < Build.VERSION_CODES.KITKAT) {
@@ -130,8 +160,7 @@ public class MediaCodecVideoEncoder {
if (mime.equals(H264_MIME_TYPE)) {
List<String> exceptionModels = Arrays.asList(H264_HW_EXCEPTION_MODELS);
if (exceptionModels.contains(Build.MODEL)) {
- Logging.w(TAG, "Model: " + Build.MODEL +
- " has black listed H.264 encoder.");
+ Logging.w(TAG, "Model: " + Build.MODEL + " has black listed H.264 encoder.");
return null;
}
}
@@ -170,8 +199,7 @@ public class MediaCodecVideoEncoder {
Logging.v(TAG, " Color: 0x" + Integer.toHexString(colorFormat));
}
- // Check if codec supports either yuv420 or nv12.
- for (int supportedColorFormat : supportedColorList) {
+ for (int supportedColorFormat : colorList) {
for (int codecColorFormat : capabilities.colorFormats) {
if (codecColorFormat == supportedColorFormat) {
// Found supported HW encoder.
@@ -182,15 +210,34 @@ public class MediaCodecVideoEncoder {
}
}
}
- return null; // No HW VP8 encoder.
+ return null; // No HW encoder.
}
public static boolean isVp8HwSupported() {
- return findHwEncoder(VP8_MIME_TYPE, supportedVp8HwCodecPrefixes) != null;
+ return findHwEncoder(VP8_MIME_TYPE, supportedVp8HwCodecPrefixes, supportedColorList) != null;
+ }
+
+ public static boolean isVp9HwSupported() {
+ return findHwEncoder(VP9_MIME_TYPE, supportedVp9HwCodecPrefixes, supportedColorList) != null;
}
public static boolean isH264HwSupported() {
- return findHwEncoder(H264_MIME_TYPE, supportedH264HwCodecPrefixes) != null;
+ return findHwEncoder(H264_MIME_TYPE, supportedH264HwCodecPrefixes, supportedColorList) != null;
+ }
+
+ public static boolean isVp8HwSupportedUsingTextures() {
+ return findHwEncoder(
+ VP8_MIME_TYPE, supportedVp8HwCodecPrefixes, supportedSurfaceColorList) != null;
+ }
+
+ public static boolean isVp9HwSupportedUsingTextures() {
+ return findHwEncoder(
+ VP9_MIME_TYPE, supportedVp9HwCodecPrefixes, supportedSurfaceColorList) != null;
+ }
+
+ public static boolean isH264HwSupportedUsingTextures() {
+ return findHwEncoder(
+ H264_MIME_TYPE, supportedH264HwCodecPrefixes, supportedSurfaceColorList) != null;
}
private void checkOnMediaCodecThread() {
@@ -223,32 +270,43 @@ public class MediaCodecVideoEncoder {
}
}
- // Return the array of input buffers, or null on failure.
- private ByteBuffer[] initEncode(
- VideoCodecType type, int width, int height, int kbps, int fps) {
+ boolean initEncode(VideoCodecType type, int width, int height, int kbps, int fps,
+ EglBase14.Context sharedContext) {
+ final boolean useSurface = sharedContext != null;
Logging.d(TAG, "Java initEncode: " + type + " : " + width + " x " + height +
- ". @ " + kbps + " kbps. Fps: " + fps +
- ". Color: 0x" + Integer.toHexString(colorFormat));
+ ". @ " + kbps + " kbps. Fps: " + fps + ". Encode from texture : " + useSurface);
+
+ this.width = width;
+ this.height = height;
if (mediaCodecThread != null) {
throw new RuntimeException("Forgot to release()?");
}
- this.type = type;
EncoderProperties properties = null;
String mime = null;
int keyFrameIntervalSec = 0;
if (type == VideoCodecType.VIDEO_CODEC_VP8) {
mime = VP8_MIME_TYPE;
- properties = findHwEncoder(VP8_MIME_TYPE, supportedVp8HwCodecPrefixes);
+ properties = findHwEncoder(VP8_MIME_TYPE, supportedVp8HwCodecPrefixes,
+ useSurface ? supportedSurfaceColorList : supportedColorList);
+ keyFrameIntervalSec = 100;
+ } else if (type == VideoCodecType.VIDEO_CODEC_VP9) {
+ mime = VP9_MIME_TYPE;
+ properties = findHwEncoder(VP9_MIME_TYPE, supportedH264HwCodecPrefixes,
+ useSurface ? supportedSurfaceColorList : supportedColorList);
keyFrameIntervalSec = 100;
} else if (type == VideoCodecType.VIDEO_CODEC_H264) {
mime = H264_MIME_TYPE;
- properties = findHwEncoder(H264_MIME_TYPE, supportedH264HwCodecPrefixes);
+ properties = findHwEncoder(H264_MIME_TYPE, supportedH264HwCodecPrefixes,
+ useSurface ? supportedSurfaceColorList : supportedColorList);
keyFrameIntervalSec = 20;
}
if (properties == null) {
throw new RuntimeException("Can not find HW encoder for " + type);
}
runningInstance = this; // Encoder is now running and can be queried for stack traces.
+ colorFormat = properties.colorFormat;
+ Logging.d(TAG, "Color format: " + colorFormat);
+
mediaCodecThread = Thread.currentThread();
try {
MediaFormat format = MediaFormat.createVideoFormat(mime, width, height);
@@ -259,26 +317,39 @@ public class MediaCodecVideoEncoder {
format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, keyFrameIntervalSec);
Logging.d(TAG, " Format: " + format);
mediaCodec = createByCodecName(properties.codecName);
+ this.type = type;
if (mediaCodec == null) {
Logging.e(TAG, "Can not create media encoder");
- return null;
+ return false;
}
mediaCodec.configure(
format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
+
+ if (useSurface) {
+ eglBase = new EglBase14(sharedContext, EglBase.CONFIG_RECORDABLE);
+ // Create an input surface and keep a reference since we must release the surface when done.
+ inputSurface = mediaCodec.createInputSurface();
+ eglBase.createSurface(inputSurface);
+ drawer = new GlRectDrawer();
+ }
mediaCodec.start();
- colorFormat = properties.colorFormat;
outputBuffers = mediaCodec.getOutputBuffers();
- ByteBuffer[] inputBuffers = mediaCodec.getInputBuffers();
- Logging.d(TAG, "Input buffers: " + inputBuffers.length +
- ". Output buffers: " + outputBuffers.length);
- return inputBuffers;
+ Logging.d(TAG, "Output buffers: " + outputBuffers.length);
+
} catch (IllegalStateException e) {
Logging.e(TAG, "initEncode failed", e);
- return null;
+ return false;
}
+ return true;
+ }
+
+ ByteBuffer[] getInputBuffers() {
+ ByteBuffer[] inputBuffers = mediaCodec.getInputBuffers();
+ Logging.d(TAG, "Input buffers: " + inputBuffers.length);
+ return inputBuffers;
}
- private boolean encode(
+ boolean encodeBuffer(
boolean isKeyframe, int inputBuffer, int size,
long presentationTimestampUs) {
checkOnMediaCodecThread();
@@ -298,22 +369,82 @@ public class MediaCodecVideoEncoder {
return true;
}
catch (IllegalStateException e) {
- Logging.e(TAG, "encode failed", e);
+ Logging.e(TAG, "encodeBuffer failed", e);
return false;
}
}
- private void release() {
- Logging.d(TAG, "Java releaseEncoder");
+ boolean encodeTexture(boolean isKeyframe, int oesTextureId, float[] transformationMatrix,
+ long presentationTimestampUs) {
checkOnMediaCodecThread();
try {
- mediaCodec.stop();
- mediaCodec.release();
- } catch (IllegalStateException e) {
- Logging.e(TAG, "release failed", e);
+ if (isKeyframe) {
+ Logging.d(TAG, "Sync frame request");
+ Bundle b = new Bundle();
+ b.putInt(MediaCodec.PARAMETER_KEY_REQUEST_SYNC_FRAME, 0);
+ mediaCodec.setParameters(b);
+ }
+ eglBase.makeCurrent();
+ // TODO(perkj): glClear() shouldn't be necessary since every pixel is covered anyway,
+ // but it's a workaround for bug webrtc:5147.
+ GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
+ drawer.drawOes(oesTextureId, transformationMatrix, 0, 0, width, height);
+ eglBase.swapBuffers(TimeUnit.MICROSECONDS.toNanos(presentationTimestampUs));
+ return true;
+ }
+ catch (RuntimeException e) {
+ Logging.e(TAG, "encodeTexture failed", e);
+ return false;
}
+ }
+
+ void release() {
+ Logging.d(TAG, "Java releaseEncoder");
+ checkOnMediaCodecThread();
+
+ // Run Mediacodec stop() and release() on separate thread since sometime
+ // Mediacodec.stop() may hang.
+ final CountDownLatch releaseDone = new CountDownLatch(1);
+
+ Runnable runMediaCodecRelease = new Runnable() {
+ @Override
+ public void run() {
+ try {
+ Logging.d(TAG, "Java releaseEncoder on release thread");
+ mediaCodec.stop();
+ mediaCodec.release();
+ Logging.d(TAG, "Java releaseEncoder on release thread done");
+ } catch (Exception e) {
+ Logging.e(TAG, "Media encoder release failed", e);
+ }
+ releaseDone.countDown();
+ }
+ };
+ new Thread(runMediaCodecRelease).start();
+
+ if (!ThreadUtils.awaitUninterruptibly(releaseDone, MEDIA_CODEC_RELEASE_TIMEOUT_MS)) {
+ Logging.e(TAG, "Media encoder release timeout");
+ codecErrors++;
+ if (errorCallback != null) {
+ Logging.e(TAG, "Invoke codec error callback. Errors: " + codecErrors);
+ errorCallback.onMediaCodecVideoEncoderCriticalError(codecErrors);
+ }
+ }
+
mediaCodec = null;
mediaCodecThread = null;
+ if (drawer != null) {
+ drawer.release();
+ drawer = null;
+ }
+ if (eglBase != null) {
+ eglBase.release();
+ eglBase = null;
+ }
+ if (inputSurface != null) {
+ inputSurface.release();
+ inputSurface = null;
+ }
runningInstance = null;
Logging.d(TAG, "Java releaseEncoder done");
}
@@ -336,7 +467,7 @@ public class MediaCodecVideoEncoder {
// Dequeue an input buffer and return its index, -1 if no input buffer is
// available, or -2 if the codec is no longer operative.
- private int dequeueInputBuffer() {
+ int dequeueInputBuffer() {
checkOnMediaCodecThread();
try {
return mediaCodec.dequeueInputBuffer(DEQUEUE_TIMEOUT);
@@ -347,7 +478,7 @@ public class MediaCodecVideoEncoder {
}
// Helper struct for dequeueOutputBuffer() below.
- private static class OutputBufferInfo {
+ static class OutputBufferInfo {
public OutputBufferInfo(
int index, ByteBuffer buffer,
boolean isKeyFrame, long presentationTimestampUs) {
@@ -357,15 +488,15 @@ public class MediaCodecVideoEncoder {
this.presentationTimestampUs = presentationTimestampUs;
}
- private final int index;
- private final ByteBuffer buffer;
- private final boolean isKeyFrame;
- private final long presentationTimestampUs;
+ public final int index;
+ public final ByteBuffer buffer;
+ public final boolean isKeyFrame;
+ public final long presentationTimestampUs;
}
// Dequeue and return an output buffer, or null if no output is ready. Return
// a fake OutputBufferInfo with index -1 if the codec is no longer operable.
- private OutputBufferInfo dequeueOutputBuffer() {
+ OutputBufferInfo dequeueOutputBuffer() {
checkOnMediaCodecThread();
try {
MediaCodec.BufferInfo info = new MediaCodec.BufferInfo();
@@ -434,7 +565,7 @@ public class MediaCodecVideoEncoder {
// Release a dequeued output buffer back to the codec for re-use. Return
// false if the codec is no longer operable.
- private boolean releaseOutputBuffer(int index) {
+ boolean releaseOutputBuffer(int index) {
checkOnMediaCodecThread();
try {
mediaCodec.releaseOutputBuffer(index, false);
diff --git a/talk/app/webrtc/java/src/org/webrtc/PeerConnection.java b/talk/app/webrtc/java/src/org/webrtc/PeerConnection.java
index 50023001d7..36cd07595c 100644
--- a/talk/app/webrtc/java/src/org/webrtc/PeerConnection.java
+++ b/talk/app/webrtc/java/src/org/webrtc/PeerConnection.java
@@ -28,7 +28,6 @@
package org.webrtc;
-import java.util.ArrayList;
import java.util.Collections;
import java.util.LinkedList;
import java.util.List;
@@ -151,6 +150,7 @@ public class PeerConnection {
public int audioJitterBufferMaxPackets;
public boolean audioJitterBufferFastAccelerate;
public int iceConnectionReceivingTimeout;
+ public int iceBackupCandidatePairPingInterval;
public KeyType keyType;
public ContinualGatheringPolicy continualGatheringPolicy;
@@ -163,6 +163,7 @@ public class PeerConnection {
audioJitterBufferMaxPackets = 50;
audioJitterBufferFastAccelerate = false;
iceConnectionReceivingTimeout = -1;
+ iceBackupCandidatePairPingInterval = -1;
keyType = KeyType.ECDSA;
continualGatheringPolicy = ContinualGatheringPolicy.GATHER_ONCE;
}
@@ -223,6 +224,14 @@ public class PeerConnection {
localStreams.remove(stream);
}
+ public RtpSender createSender(String kind, String stream_id) {
+ RtpSender new_sender = nativeCreateSender(kind, stream_id);
+ if (new_sender != null) {
+ senders.add(new_sender);
+ }
+ return new_sender;
+ }
+
// Note that calling getSenders will dispose of the senders previously
// returned (and same goes for getReceivers).
public List<RtpSender> getSenders() {
@@ -288,6 +297,8 @@ public class PeerConnection {
private native boolean nativeGetStats(
StatsObserver observer, long nativeTrack);
+ private native RtpSender nativeCreateSender(String kind, String stream_id);
+
private native List<RtpSender> nativeGetSenders();
private native List<RtpReceiver> nativeGetReceivers();
diff --git a/talk/app/webrtc/java/src/org/webrtc/PeerConnectionFactory.java b/talk/app/webrtc/java/src/org/webrtc/PeerConnectionFactory.java
index 83999ece98..d759c69271 100644
--- a/talk/app/webrtc/java/src/org/webrtc/PeerConnectionFactory.java
+++ b/talk/app/webrtc/java/src/org/webrtc/PeerConnectionFactory.java
@@ -73,6 +73,15 @@ public class PeerConnectionFactory {
// Field trial initialization. Must be called before PeerConnectionFactory
// is created.
public static native void initializeFieldTrials(String fieldTrialsInitString);
+ // Internal tracing initialization. Must be called before PeerConnectionFactory is created to
+ // prevent racing with tracing code.
+ public static native void initializeInternalTracer();
+ // Internal tracing shutdown, called to prevent resource leaks. Must be called after
+ // PeerConnectionFactory is gone to prevent races with code performing tracing.
+ public static native void shutdownInternalTracer();
+ // Start/stop internal capturing of internal tracing.
+ public static native boolean startInternalTracingCapture(String tracing_filename);
+ public static native void stopInternalTracingCapture();
public PeerConnectionFactory() {
nativeFactory = nativeCreatePeerConnectionFactory();
@@ -131,12 +140,52 @@ public class PeerConnectionFactory {
nativeFactory, id, source.nativeSource));
}
+ // Starts recording an AEC dump. Ownership of the file is transfered to the
+ // native code. If an AEC dump is already in progress, it will be stopped and
+ // a new one will start using the provided file.
+ public boolean startAecDump(int file_descriptor) {
+ return nativeStartAecDump(nativeFactory, file_descriptor);
+ }
+
+ // Stops recording an AEC dump. If no AEC dump is currently being recorded,
+ // this call will have no effect.
+ public void stopAecDump() {
+ nativeStopAecDump(nativeFactory);
+ }
+
+ // Starts recording an RTC event log. Ownership of the file is transfered to
+ // the native code. If an RTC event log is already being recorded, it will be
+ // stopped and a new one will start using the provided file.
+ public boolean startRtcEventLog(int file_descriptor) {
+ return nativeStartRtcEventLog(nativeFactory, file_descriptor);
+ }
+
+ // Stops recording an RTC event log. If no RTC event log is currently being
+ // recorded, this call will have no effect.
+ public void StopRtcEventLog() {
+ nativeStopRtcEventLog(nativeFactory);
+ }
+
public void setOptions(Options options) {
nativeSetOptions(nativeFactory, options);
}
+ @Deprecated
public void setVideoHwAccelerationOptions(Object renderEGLContext) {
- nativeSetVideoHwAccelerationOptions(nativeFactory, renderEGLContext);
+ nativeSetVideoHwAccelerationOptions(nativeFactory, renderEGLContext, renderEGLContext);
+ }
+
+ /** Set the EGL context used by HW Video encoding and decoding.
+ *
+ *
+ * @param localEGLContext An instance of javax.microedition.khronos.egl.EGLContext.
+ * Must be the same as used by VideoCapturerAndroid and any local
+ * video renderer.
+ * @param remoteEGLContext An instance of javax.microedition.khronos.egl.EGLContext.
+ * Must be the same as used by any remote video renderer.
+ */
+ public void setVideoHwAccelerationOptions(Object localEGLContext, Object remoteEGLContext) {
+ nativeSetVideoHwAccelerationOptions(nativeFactory, localEGLContext, remoteEGLContext);
}
public void dispose() {
@@ -201,10 +250,18 @@ public class PeerConnectionFactory {
private static native long nativeCreateAudioTrack(
long nativeFactory, String id, long nativeSource);
+ private static native boolean nativeStartAecDump(long nativeFactory, int file_descriptor);
+
+ private static native void nativeStopAecDump(long nativeFactory);
+
+ private static native boolean nativeStartRtcEventLog(long nativeFactory, int file_descriptor);
+
+ private static native void nativeStopRtcEventLog(long nativeFactory);
+
public native void nativeSetOptions(long nativeFactory, Options options);
private static native void nativeSetVideoHwAccelerationOptions(
- long nativeFactory, Object renderEGLContext);
+ long nativeFactory, Object localEGLContext, Object remoteEGLContext);
private static native void nativeThreadsCallbacks(long nativeFactory);
diff --git a/talk/app/webrtc/java/src/org/webrtc/RtpSender.java b/talk/app/webrtc/java/src/org/webrtc/RtpSender.java
index 37357c0657..9ac2e7034f 100644
--- a/talk/app/webrtc/java/src/org/webrtc/RtpSender.java
+++ b/talk/app/webrtc/java/src/org/webrtc/RtpSender.java
@@ -32,6 +32,7 @@ public class RtpSender {
final long nativeRtpSender;
private MediaStreamTrack cachedTrack;
+ private boolean ownsTrack = true;
public RtpSender(long nativeRtpSender) {
this.nativeRtpSender = nativeRtpSender;
@@ -40,14 +41,22 @@ public class RtpSender {
cachedTrack = (track == 0) ? null : new MediaStreamTrack(track);
}
- // NOTE: This should not be called with a track that's already used by
- // another RtpSender, because then it would be double-disposed.
- public void setTrack(MediaStreamTrack track) {
- if (cachedTrack != null) {
+ // If |takeOwnership| is true, the RtpSender takes ownership of the track
+ // from the caller, and will auto-dispose of it when no longer needed.
+ // |takeOwnership| should only be used if the caller owns the track; it is
+ // not appropriate when the track is owned by, for example, another RtpSender
+ // or a MediaStream.
+ public boolean setTrack(MediaStreamTrack track, boolean takeOwnership) {
+ if (!nativeSetTrack(nativeRtpSender,
+ (track == null) ? 0 : track.nativeTrack)) {
+ return false;
+ }
+ if (cachedTrack != null && ownsTrack) {
cachedTrack.dispose();
}
cachedTrack = track;
- nativeSetTrack(nativeRtpSender, (track == null) ? 0 : track.nativeTrack);
+ ownsTrack = takeOwnership;
+ return true;
}
public MediaStreamTrack track() {
@@ -59,14 +68,14 @@ public class RtpSender {
}
public void dispose() {
- if (cachedTrack != null) {
+ if (cachedTrack != null && ownsTrack) {
cachedTrack.dispose();
}
free(nativeRtpSender);
}
- private static native void nativeSetTrack(long nativeRtpSender,
- long nativeTrack);
+ private static native boolean nativeSetTrack(long nativeRtpSender,
+ long nativeTrack);
// This should increment the reference count of the track.
// Will be released in dispose() or setTrack().
diff --git a/talk/app/webrtc/java/src/org/webrtc/VideoRenderer.java b/talk/app/webrtc/java/src/org/webrtc/VideoRenderer.java
index 3c255dd123..2e307fc54b 100644
--- a/talk/app/webrtc/java/src/org/webrtc/VideoRenderer.java
+++ b/talk/app/webrtc/java/src/org/webrtc/VideoRenderer.java
@@ -46,7 +46,11 @@ public class VideoRenderer {
public final int[] yuvStrides;
public ByteBuffer[] yuvPlanes;
public final boolean yuvFrame;
- public Object textureObject;
+ // Matrix that transforms standard coordinates to their proper sampling locations in
+ // the texture. This transform compensates for any properties of the video source that
+ // cause it to appear different from a normalized texture. This matrix does not take
+ // |rotationDegree| into account.
+ public final float[] samplingMatrix;
public int textureId;
// Frame pointer in C++.
private long nativeFramePointer;
@@ -70,19 +74,27 @@ public class VideoRenderer {
if (rotationDegree % 90 != 0) {
throw new IllegalArgumentException("Rotation degree not multiple of 90: " + rotationDegree);
}
+ // The convention in WebRTC is that the first element in a ByteBuffer corresponds to the
+ // top-left corner of the image, but in glTexImage2D() the first element corresponds to the
+ // bottom-left corner. This discrepancy is corrected by setting a vertical flip as sampling
+ // matrix.
+ samplingMatrix = new float[] {
+ 1, 0, 0, 0,
+ 0, -1, 0, 0,
+ 0, 0, 1, 0,
+ 0, 1, 0, 1};
}
/**
* Construct a texture frame of the given dimensions with data in SurfaceTexture
*/
- I420Frame(
- int width, int height, int rotationDegree,
- Object textureObject, int textureId, long nativeFramePointer) {
+ I420Frame(int width, int height, int rotationDegree, int textureId, float[] samplingMatrix,
+ long nativeFramePointer) {
this.width = width;
this.height = height;
this.yuvStrides = null;
this.yuvPlanes = null;
- this.textureObject = textureObject;
+ this.samplingMatrix = samplingMatrix;
this.textureId = textureId;
this.yuvFrame = false;
this.rotationDegree = rotationDegree;
@@ -125,7 +137,6 @@ public class VideoRenderer {
*/
public static void renderFrameDone(I420Frame frame) {
frame.yuvPlanes = null;
- frame.textureObject = null;
frame.textureId = 0;
if (frame.nativeFramePointer != 0) {
releaseNativeFrame(frame.nativeFramePointer);
diff --git a/talk/app/webrtc/jsepsessiondescription.cc b/talk/app/webrtc/jsepsessiondescription.cc
index 24bd9d4195..226432db69 100644
--- a/talk/app/webrtc/jsepsessiondescription.cc
+++ b/talk/app/webrtc/jsepsessiondescription.cc
@@ -29,6 +29,7 @@
#include "talk/app/webrtc/webrtcsdp.h"
#include "talk/session/media/mediasession.h"
+#include "webrtc/base/arraysize.h"
#include "webrtc/base/stringencode.h"
using rtc::scoped_ptr;
@@ -44,7 +45,7 @@ static const char* kSupportedTypes[] = {
static bool IsTypeSupported(const std::string& type) {
bool type_supported = false;
- for (size_t i = 0; i < ARRAY_SIZE(kSupportedTypes); ++i) {
+ for (size_t i = 0; i < arraysize(kSupportedTypes); ++i) {
if (kSupportedTypes[i] == type) {
type_supported = true;
break;
diff --git a/talk/app/webrtc/localaudiosource.cc b/talk/app/webrtc/localaudiosource.cc
index 63c6f13a3d..591877aa8b 100644
--- a/talk/app/webrtc/localaudiosource.cc
+++ b/talk/app/webrtc/localaudiosource.cc
@@ -49,7 +49,7 @@ void FromConstraints(const MediaConstraintsInterface::Constraints& constraints,
// a different algorithm will be required.
struct {
const char* name;
- cricket::Settable<bool>& value;
+ rtc::Optional<bool>& value;
} key_to_value[] = {
{MediaConstraintsInterface::kGoogEchoCancellation,
options->echo_cancellation},
@@ -78,7 +78,7 @@ void FromConstraints(const MediaConstraintsInterface::Constraints& constraints,
for (auto& entry : key_to_value) {
if (constraint.key.compare(entry.name) == 0)
- entry.value.Set(value);
+ entry.value = rtc::Optional<bool>(value);
}
}
}
diff --git a/talk/app/webrtc/localaudiosource.h b/talk/app/webrtc/localaudiosource.h
index 557745b8b8..5158eb1215 100644
--- a/talk/app/webrtc/localaudiosource.h
+++ b/talk/app/webrtc/localaudiosource.h
@@ -48,16 +48,17 @@ class LocalAudioSource : public Notifier<AudioSourceInterface> {
const PeerConnectionFactoryInterface::Options& options,
const MediaConstraintsInterface* constraints);
- virtual SourceState state() const { return source_state_; }
+ SourceState state() const override { return source_state_; }
+ bool remote() const override { return false; }
+
virtual const cricket::AudioOptions& options() const { return options_; }
- protected:
- LocalAudioSource()
- : source_state_(kInitializing) {
- }
+ void AddSink(AudioTrackSinkInterface* sink) override {}
+ void RemoveSink(AudioTrackSinkInterface* sink) override {}
- ~LocalAudioSource() {
- }
+ protected:
+ LocalAudioSource() : source_state_(kInitializing) {}
+ ~LocalAudioSource() override {}
private:
void Initialize(const PeerConnectionFactoryInterface::Options& options,
diff --git a/talk/app/webrtc/localaudiosource_unittest.cc b/talk/app/webrtc/localaudiosource_unittest.cc
index 8e05c18287..75d0c35462 100644
--- a/talk/app/webrtc/localaudiosource_unittest.cc
+++ b/talk/app/webrtc/localaudiosource_unittest.cc
@@ -58,23 +58,14 @@ TEST(LocalAudioSourceTest, SetValidOptions) {
LocalAudioSource::Create(PeerConnectionFactoryInterface::Options(),
&constraints);
- bool value;
- EXPECT_TRUE(source->options().echo_cancellation.Get(&value));
- EXPECT_FALSE(value);
- EXPECT_TRUE(source->options().extended_filter_aec.Get(&value));
- EXPECT_TRUE(value);
- EXPECT_TRUE(source->options().delay_agnostic_aec.Get(&value));
- EXPECT_TRUE(value);
- EXPECT_TRUE(source->options().auto_gain_control.Get(&value));
- EXPECT_TRUE(value);
- EXPECT_TRUE(source->options().experimental_agc.Get(&value));
- EXPECT_TRUE(value);
- EXPECT_TRUE(source->options().noise_suppression.Get(&value));
- EXPECT_FALSE(value);
- EXPECT_TRUE(source->options().highpass_filter.Get(&value));
- EXPECT_TRUE(value);
- EXPECT_TRUE(source->options().aec_dump.Get(&value));
- EXPECT_TRUE(value);
+ EXPECT_EQ(rtc::Optional<bool>(false), source->options().echo_cancellation);
+ EXPECT_EQ(rtc::Optional<bool>(true), source->options().extended_filter_aec);
+ EXPECT_EQ(rtc::Optional<bool>(true), source->options().delay_agnostic_aec);
+ EXPECT_EQ(rtc::Optional<bool>(true), source->options().auto_gain_control);
+ EXPECT_EQ(rtc::Optional<bool>(true), source->options().experimental_agc);
+ EXPECT_EQ(rtc::Optional<bool>(false), source->options().noise_suppression);
+ EXPECT_EQ(rtc::Optional<bool>(true), source->options().highpass_filter);
+ EXPECT_EQ(rtc::Optional<bool>(true), source->options().aec_dump);
}
TEST(LocalAudioSourceTest, OptionNotSet) {
@@ -82,8 +73,7 @@ TEST(LocalAudioSourceTest, OptionNotSet) {
rtc::scoped_refptr<LocalAudioSource> source =
LocalAudioSource::Create(PeerConnectionFactoryInterface::Options(),
&constraints);
- bool value;
- EXPECT_FALSE(source->options().highpass_filter.Get(&value));
+ EXPECT_EQ(rtc::Optional<bool>(), source->options().highpass_filter);
}
TEST(LocalAudioSourceTest, MandatoryOverridesOptional) {
@@ -97,9 +87,7 @@ TEST(LocalAudioSourceTest, MandatoryOverridesOptional) {
LocalAudioSource::Create(PeerConnectionFactoryInterface::Options(),
&constraints);
- bool value;
- EXPECT_TRUE(source->options().echo_cancellation.Get(&value));
- EXPECT_FALSE(value);
+ EXPECT_EQ(rtc::Optional<bool>(false), source->options().echo_cancellation);
}
TEST(LocalAudioSourceTest, InvalidOptional) {
@@ -112,9 +100,7 @@ TEST(LocalAudioSourceTest, InvalidOptional) {
&constraints);
EXPECT_EQ(MediaSourceInterface::kLive, source->state());
- bool value;
- EXPECT_TRUE(source->options().highpass_filter.Get(&value));
- EXPECT_FALSE(value);
+ EXPECT_EQ(rtc::Optional<bool>(false), source->options().highpass_filter);
}
TEST(LocalAudioSourceTest, InvalidMandatory) {
@@ -127,7 +113,5 @@ TEST(LocalAudioSourceTest, InvalidMandatory) {
&constraints);
EXPECT_EQ(MediaSourceInterface::kLive, source->state());
- bool value;
- EXPECT_TRUE(source->options().highpass_filter.Get(&value));
- EXPECT_FALSE(value);
+ EXPECT_EQ(rtc::Optional<bool>(false), source->options().highpass_filter);
}
diff --git a/talk/app/webrtc/mediacontroller.cc b/talk/app/webrtc/mediacontroller.cc
index f7d85116b1..24f5877483 100644
--- a/talk/app/webrtc/mediacontroller.cc
+++ b/talk/app/webrtc/mediacontroller.cc
@@ -47,11 +47,10 @@ class MediaController : public webrtc::MediaControllerInterface,
RTC_DCHECK(nullptr != worker_thread);
worker_thread_->Invoke<void>(
rtc::Bind(&MediaController::Construct_w, this,
- channel_manager_->media_engine()->GetVoE()));
+ channel_manager_->media_engine()));
}
~MediaController() override {
- worker_thread_->Invoke<void>(
- rtc::Bind(&MediaController::Destruct_w, this));
+ worker_thread_->Invoke<void>(rtc::Bind(&MediaController::Destruct_w, this));
}
webrtc::Call* call_w() override {
@@ -64,10 +63,11 @@ class MediaController : public webrtc::MediaControllerInterface,
}
private:
- void Construct_w(webrtc::VoiceEngine* voice_engine) {
+ void Construct_w(cricket::MediaEngineInterface* media_engine) {
RTC_DCHECK(worker_thread_->IsCurrent());
+ RTC_DCHECK(media_engine);
webrtc::Call::Config config;
- config.voice_engine = voice_engine;
+ config.audio_state = media_engine->GetAudioState();
config.bitrate_config.min_bitrate_bps = kMinBandwidthBps;
config.bitrate_config.start_bitrate_bps = kStartBandwidthBps;
config.bitrate_config.max_bitrate_bps = kMaxBandwidthBps;
@@ -84,7 +84,7 @@ class MediaController : public webrtc::MediaControllerInterface,
RTC_DISALLOW_IMPLICIT_CONSTRUCTORS(MediaController);
};
-} // namespace {
+} // namespace {
namespace webrtc {
@@ -93,4 +93,4 @@ MediaControllerInterface* MediaControllerInterface::Create(
cricket::ChannelManager* channel_manager) {
return new MediaController(worker_thread, channel_manager);
}
-} // namespace webrtc
+} // namespace webrtc
diff --git a/talk/app/webrtc/mediastream_unittest.cc b/talk/app/webrtc/mediastream_unittest.cc
index 2cf930c4c0..f19b9456a6 100644
--- a/talk/app/webrtc/mediastream_unittest.cc
+++ b/talk/app/webrtc/mediastream_unittest.cc
@@ -48,9 +48,23 @@ namespace webrtc {
// Helper class to test Observer.
class MockObserver : public ObserverInterface {
public:
- MockObserver() {}
+ explicit MockObserver(NotifierInterface* notifier) : notifier_(notifier) {
+ notifier_->RegisterObserver(this);
+ }
+
+ ~MockObserver() { Unregister(); }
+
+ void Unregister() {
+ if (notifier_) {
+ notifier_->UnregisterObserver(this);
+ notifier_ = nullptr;
+ }
+ }
MOCK_METHOD0(OnChanged, void());
+
+ private:
+ NotifierInterface* notifier_;
};
class MediaStreamTest: public testing::Test {
@@ -75,8 +89,7 @@ class MediaStreamTest: public testing::Test {
}
void ChangeTrack(MediaStreamTrackInterface* track) {
- MockObserver observer;
- track->RegisterObserver(&observer);
+ MockObserver observer(track);
EXPECT_CALL(observer, OnChanged())
.Times(Exactly(1));
@@ -127,8 +140,7 @@ TEST_F(MediaStreamTest, GetTrackInfo) {
}
TEST_F(MediaStreamTest, RemoveTrack) {
- MockObserver observer;
- stream_->RegisterObserver(&observer);
+ MockObserver observer(stream_);
EXPECT_CALL(observer, OnChanged())
.Times(Exactly(2));
diff --git a/talk/app/webrtc/mediastreaminterface.h b/talk/app/webrtc/mediastreaminterface.h
index 5911e85e8e..9b137d9f76 100644
--- a/talk/app/webrtc/mediastreaminterface.h
+++ b/talk/app/webrtc/mediastreaminterface.h
@@ -71,8 +71,6 @@ class NotifierInterface {
// Base class for sources. A MediaStreamTrack have an underlying source that
// provide media. A source can be shared with multiple tracks.
-// TODO(perkj): Implement sources for local and remote audio tracks and
-// remote video tracks.
class MediaSourceInterface : public rtc::RefCountInterface,
public NotifierInterface {
public:
@@ -85,6 +83,8 @@ class MediaSourceInterface : public rtc::RefCountInterface,
virtual SourceState state() const = 0;
+ virtual bool remote() const = 0;
+
protected:
virtual ~MediaSourceInterface() {}
};
@@ -100,6 +100,9 @@ class MediaStreamTrackInterface : public rtc::RefCountInterface,
kFailed = 3, // Track negotiation failed.
};
+ static const char kAudioKind[];
+ static const char kVideoKind[];
+
virtual std::string kind() const = 0;
virtual std::string id() const = 0;
virtual bool enabled() const = 0;
@@ -115,13 +118,6 @@ class MediaStreamTrackInterface : public rtc::RefCountInterface,
// Interface for rendering VideoFrames from a VideoTrack
class VideoRendererInterface {
public:
- // TODO(guoweis): Remove this function. Obsolete. The implementation of
- // VideoRendererInterface should be able to handle different frame size as
- // well as pending rotation. If it can't apply the frame rotation by itself,
- // it should call |frame|.GetCopyWithRotationApplied() to get a frame that has
- // the rotation applied.
- virtual void SetSize(int width, int height) {}
-
// |frame| may have pending rotation. For clients which can't apply rotation,
// |frame|->GetCopyWithRotationApplied() will return a frame that has the
// rotation applied.
@@ -149,6 +145,19 @@ class VideoTrackInterface : public MediaStreamTrackInterface {
virtual ~VideoTrackInterface() {}
};
+// Interface for receiving audio data from a AudioTrack.
+class AudioTrackSinkInterface {
+ public:
+ virtual void OnData(const void* audio_data,
+ int bits_per_sample,
+ int sample_rate,
+ size_t number_of_channels,
+ size_t number_of_frames) = 0;
+
+ protected:
+ virtual ~AudioTrackSinkInterface() {}
+};
+
// AudioSourceInterface is a reference counted source used for AudioTracks.
// The same source can be used in multiple AudioTracks.
class AudioSourceInterface : public MediaSourceInterface {
@@ -164,23 +173,17 @@ class AudioSourceInterface : public MediaSourceInterface {
// TODO(xians): Makes all the interface pure virtual after Chrome has their
// implementations.
// Sets the volume to the source. |volume| is in the range of [0, 10].
+ // TODO(tommi): This method should be on the track and ideally volume should
+ // be applied in the track in a way that does not affect clones of the track.
virtual void SetVolume(double volume) {}
// Registers/unregisters observer to the audio source.
virtual void RegisterAudioObserver(AudioObserver* observer) {}
virtual void UnregisterAudioObserver(AudioObserver* observer) {}
-};
-// Interface for receiving audio data from a AudioTrack.
-class AudioTrackSinkInterface {
- public:
- virtual void OnData(const void* audio_data,
- int bits_per_sample,
- int sample_rate,
- int number_of_channels,
- size_t number_of_frames) = 0;
- protected:
- virtual ~AudioTrackSinkInterface() {}
+ // TODO(tommi): Make pure virtual.
+ virtual void AddSink(AudioTrackSinkInterface* sink) {}
+ virtual void RemoveSink(AudioTrackSinkInterface* sink) {}
};
// Interface of the audio processor used by the audio track to collect
diff --git a/talk/app/webrtc/mediastreamobserver.cc b/talk/app/webrtc/mediastreamobserver.cc
new file mode 100644
index 0000000000..2650b9a6f7
--- /dev/null
+++ b/talk/app/webrtc/mediastreamobserver.cc
@@ -0,0 +1,101 @@
+/*
+ * libjingle
+ * Copyright 2015 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#include "talk/app/webrtc/mediastreamobserver.h"
+
+#include <algorithm>
+
+namespace webrtc {
+
+MediaStreamObserver::MediaStreamObserver(MediaStreamInterface* stream)
+ : stream_(stream),
+ cached_audio_tracks_(stream->GetAudioTracks()),
+ cached_video_tracks_(stream->GetVideoTracks()) {
+ stream_->RegisterObserver(this);
+}
+
+MediaStreamObserver::~MediaStreamObserver() {
+ stream_->UnregisterObserver(this);
+}
+
+void MediaStreamObserver::OnChanged() {
+ AudioTrackVector new_audio_tracks = stream_->GetAudioTracks();
+ VideoTrackVector new_video_tracks = stream_->GetVideoTracks();
+
+ // Find removed audio tracks.
+ for (const auto& cached_track : cached_audio_tracks_) {
+ auto it = std::find_if(
+ new_audio_tracks.begin(), new_audio_tracks.end(),
+ [cached_track](const AudioTrackVector::value_type& new_track) {
+ return new_track->id().compare(cached_track->id()) == 0;
+ });
+ if (it == new_audio_tracks.end()) {
+ SignalAudioTrackRemoved(cached_track.get(), stream_);
+ }
+ }
+
+ // Find added audio tracks.
+ for (const auto& new_track : new_audio_tracks) {
+ auto it = std::find_if(
+ cached_audio_tracks_.begin(), cached_audio_tracks_.end(),
+ [new_track](const AudioTrackVector::value_type& cached_track) {
+ return new_track->id().compare(cached_track->id()) == 0;
+ });
+ if (it == cached_audio_tracks_.end()) {
+ SignalAudioTrackAdded(new_track.get(), stream_);
+ }
+ }
+
+ // Find removed video tracks.
+ for (const auto& cached_track : cached_video_tracks_) {
+ auto it = std::find_if(
+ new_video_tracks.begin(), new_video_tracks.end(),
+ [cached_track](const VideoTrackVector::value_type& new_track) {
+ return new_track->id().compare(cached_track->id()) == 0;
+ });
+ if (it == new_video_tracks.end()) {
+ SignalVideoTrackRemoved(cached_track.get(), stream_);
+ }
+ }
+
+ // Find added video tracks.
+ for (const auto& new_track : new_video_tracks) {
+ auto it = std::find_if(
+ cached_video_tracks_.begin(), cached_video_tracks_.end(),
+ [new_track](const VideoTrackVector::value_type& cached_track) {
+ return new_track->id().compare(cached_track->id()) == 0;
+ });
+ if (it == cached_video_tracks_.end()) {
+ SignalVideoTrackAdded(new_track.get(), stream_);
+ }
+ }
+
+ cached_audio_tracks_ = new_audio_tracks;
+ cached_video_tracks_ = new_video_tracks;
+}
+
+} // namespace webrtc
diff --git a/talk/app/webrtc/mediastreamobserver.h b/talk/app/webrtc/mediastreamobserver.h
new file mode 100644
index 0000000000..1dd6c4c118
--- /dev/null
+++ b/talk/app/webrtc/mediastreamobserver.h
@@ -0,0 +1,65 @@
+/*
+ * libjingle
+ * Copyright 2015 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#ifndef TALK_APP_WEBRTC_MEDIASTREAMOBSERVER_H_
+#define TALK_APP_WEBRTC_MEDIASTREAMOBSERVER_H_
+
+#include "talk/app/webrtc/mediastreaminterface.h"
+#include "webrtc/base/scoped_ref_ptr.h"
+#include "webrtc/base/sigslot.h"
+
+namespace webrtc {
+
+// Helper class which will listen for changes to a stream and emit the
+// corresponding signals.
+class MediaStreamObserver : public ObserverInterface {
+ public:
+ explicit MediaStreamObserver(MediaStreamInterface* stream);
+ ~MediaStreamObserver();
+
+ const MediaStreamInterface* stream() const { return stream_; }
+
+ void OnChanged() override;
+
+ sigslot::signal2<AudioTrackInterface*, MediaStreamInterface*>
+ SignalAudioTrackAdded;
+ sigslot::signal2<AudioTrackInterface*, MediaStreamInterface*>
+ SignalAudioTrackRemoved;
+ sigslot::signal2<VideoTrackInterface*, MediaStreamInterface*>
+ SignalVideoTrackAdded;
+ sigslot::signal2<VideoTrackInterface*, MediaStreamInterface*>
+ SignalVideoTrackRemoved;
+
+ private:
+ rtc::scoped_refptr<MediaStreamInterface> stream_;
+ AudioTrackVector cached_audio_tracks_;
+ VideoTrackVector cached_video_tracks_;
+};
+
+} // namespace webrtc
+
+#endif // TALK_APP_WEBRTC_MEDIASTREAMOBSERVER_H_
diff --git a/talk/app/webrtc/mediastreamprovider.h b/talk/app/webrtc/mediastreamprovider.h
index 1c62daf9f1..585d51bcc8 100644
--- a/talk/app/webrtc/mediastreamprovider.h
+++ b/talk/app/webrtc/mediastreamprovider.h
@@ -29,6 +29,7 @@
#define TALK_APP_WEBRTC_MEDIASTREAMPROVIDER_H_
#include "webrtc/base/basictypes.h"
+#include "webrtc/base/scoped_ptr.h"
namespace cricket {
@@ -42,6 +43,8 @@ struct VideoOptions;
namespace webrtc {
+class AudioSinkInterface;
+
// TODO(deadbeef): Change the key from an ssrc to a "sender_id" or
// "receiver_id" string, which will be the MSID in the short term and MID in
// the long term.
@@ -50,8 +53,8 @@ namespace webrtc {
// RtpSenders/Receivers to get to the BaseChannels. These interfaces should be
// refactored away eventually, as the classes converge.
-// This interface is called by AudioTrackHandler classes in mediastreamhandler.h
-// to change the settings of an audio track connected to certain PeerConnection.
+// This interface is called by AudioRtpSender/Receivers to change the settings
+// of an audio track connected to certain PeerConnection.
class AudioProviderInterface {
public:
// Enable/disable the audio playout of a remote audio track with |ssrc|.
@@ -67,13 +70,19 @@ class AudioProviderInterface {
// |volume| is in the range of [0, 10].
virtual void SetAudioPlayoutVolume(uint32_t ssrc, double volume) = 0;
+ // Allows for setting a direct audio sink for an incoming audio source.
+ // Only one audio sink is supported per ssrc and ownership of the sink is
+ // passed to the provider.
+ virtual void SetRawAudioSink(
+ uint32_t ssrc,
+ rtc::scoped_ptr<webrtc::AudioSinkInterface> sink) = 0;
+
protected:
virtual ~AudioProviderInterface() {}
};
-// This interface is called by VideoTrackHandler classes in mediastreamhandler.h
-// to change the settings of a video track connected to a certain
-// PeerConnection.
+// This interface is called by VideoRtpSender/Receivers to change the settings
+// of a video track connected to a certain PeerConnection.
class VideoProviderInterface {
public:
virtual bool SetCaptureDevice(uint32_t ssrc,
diff --git a/talk/app/webrtc/mediastreamsignaling.cc b/talk/app/webrtc/mediastreamsignaling.cc
deleted file mode 100644
index b405273902..0000000000
--- a/talk/app/webrtc/mediastreamsignaling.cc
+++ /dev/null
@@ -1,30 +0,0 @@
-/*
- * libjingle
- * Copyright 2012 Google Inc.
- *
- * Redistribution and use in source and binary forms, with or without
- * modification, are permitted provided that the following conditions are met:
- *
- * 1. Redistributions of source code must retain the above copyright notice,
- * this list of conditions and the following disclaimer.
- * 2. Redistributions in binary form must reproduce the above copyright notice,
- * this list of conditions and the following disclaimer in the documentation
- * and/or other materials provided with the distribution.
- * 3. The name of the author may not be used to endorse or promote products
- * derived from this software without specific prior written permission.
- *
- * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
- * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
- * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
- * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
- * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
- * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
- * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
- * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
- * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
- * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
- */
-
-#include "talk/app/webrtc/mediastreamsignaling.h"
-
-// TODO(deadbeef): Remove this file once Chrome build files don't reference it.
diff --git a/talk/app/webrtc/mediastreamsignaling.h b/talk/app/webrtc/mediastreamsignaling.h
deleted file mode 100644
index e8c5c110d0..0000000000
--- a/talk/app/webrtc/mediastreamsignaling.h
+++ /dev/null
@@ -1,28 +0,0 @@
-/*
- * libjingle
- * Copyright 2012 Google Inc.
- *
- * Redistribution and use in source and binary forms, with or without
- * modification, are permitted provided that the following conditions are met:
- *
- * 1. Redistributions of source code must retain the above copyright notice,
- * this list of conditions and the following disclaimer.
- * 2. Redistributions in binary form must reproduce the above copyright notice,
- * this list of conditions and the following disclaimer in the documentation
- * and/or other materials provided with the distribution.
- * 3. The name of the author may not be used to endorse or promote products
- * derived from this software without specific prior written permission.
- *
- * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
- * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
- * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
- * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
- * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
- * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
- * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
- * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
- * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
- * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
- */
-
-// TODO(deadbeef): Remove this file once Chrome build files don't reference it.
diff --git a/talk/app/webrtc/objc/README b/talk/app/webrtc/objc/README
index 692fbbc564..c323e73ed1 100644
--- a/talk/app/webrtc/objc/README
+++ b/talk/app/webrtc/objc/README
@@ -12,69 +12,59 @@ Prerequisites:
up for building for iOS-device, iOS-simulator, and Mac (resp) are:
function wrbase() {
cd /path/to/webrtc/trunk
- export GYP_DEFINES="build_with_libjingle=1 build_with_chromium=0 libjingle_objc=1"
+ export GYP_DEFINES="build_with_libjingle=1 build_with_chromium=0"
export GYP_GENERATORS="ninja"
}
function wrios() {
wrbase
- export GYP_DEFINES="$GYP_DEFINES OS=ios target_arch=armv7"
+ export GYP_DEFINES="$GYP_DEFINES OS=ios"
export GYP_GENERATOR_FLAGS="$GYP_GENERATOR_FLAGS output_dir=out_ios"
export GYP_CROSSCOMPILE=1
}
+function wrios32() {
+ wrios
+ export GYP_DEFINES="$GYP_DEFINES target_arch=arm"
+}
+
+function wrios64() {
+ wrios
+ export GYP_DEFINES="$GYP_DEFINES target_arch=arm64"
+}
+
function wrsim() {
wrbase
- export GYP_DEFINES="$GYP_DEFINES OS=ios target_arch=ia32"
+ export GYP_DEFINES="$GYP_DEFINES OS=ios target_subarch=arm32 target_arch=ia32"
export GYP_GENERATOR_FLAGS="$GYP_GENERATOR_FLAGS output_dir=out_sim"
export GYP_CROSSCOMPILE=1
}
function wrmac() {
wrbase
- export GYP_DEFINES="$GYP_DEFINES OS=mac target_arch=x64"
+ export GYP_DEFINES="$GYP_DEFINES OS=mac target_subarch=arm64 target_arch=x64"
export GYP_GENERATOR_FLAGS="$GYP_GENERATOR_FLAGS output_dir=out_mac"
}
-- Finally, run "gclient runhooks" to generate ninja files.
+- Finally, run "webrtc/build/gyp_webrtc" to generate ninja files.
Example of building & using the unittest & app:
- To build & run the unittest (must target mac):
- wrmac && gclient runhooks && \
+ wrmac && ./webrtc/build/gyp_webrtc && \
ninja -C out_mac/Debug libjingle_peerconnection_objc_test && \
./out_mac/Debug/libjingle_peerconnection_objc_test.app/Contents/MacOS/libjingle_peerconnection_objc_test
- To build & launch the sample app on OSX:
- wrmac && gclient runhooks && ninja -C out_mac/Debug AppRTCDemo && \
+ wrmac && ./webrtc/build/gyp_webrtc && ninja -C out_mac/Debug AppRTCDemo && \
./out_mac/Debug/AppRTCDemo.app/Contents/MacOS/AppRTCDemo
- To build & launch the sample app on the iOS simulator:
- wrsim && gclient runhooks && ninja -C out_sim/Debug iossim AppRTCDemo && \
+ wrsim && ./webrtc/build/gyp_webrtc && ninja -C out_sim/Debug iossim AppRTCDemo && \
./out_sim/Debug/iossim out_sim/Debug/AppRTCDemo.app
-- To build & sign the sample app for an iOS device:
- wrios && gclient runhooks && ninja -C out_ios/Debug-iphoneos AppRTCDemo
-
-- To install the sample app on an iOS device:
- ideviceinstaller -i out_ios/Debug-iphoneos/AppRTCDemo.app
- (if installing ideviceinstaller from brew, use --HEAD to get support
- for .app directories)
-- Alternatively, use iPhone Configuration Utility:
- - Open "iPhone Configuration Utility" (http://support.apple.com/kb/DL1465)
- - Click the "Add" icon (command-o)
- - Open the app under out_ios/Debug-iphoneos/AppRTCDemo (should be added to the Applications tab)
- - Click the device's name in the left-hand panel and select the Applications tab
- - Click Install on the AppRTCDemo line.
- (If you have any problems deploying for the first time, check
- the Info.plist file to ensure that the Bundle Identifier matches
- your phone provisioning profile, or use a development wildcard
- provisioning profile.)
-- Alternately, use ios-deploy:
- ios-deploy -d -b out_ios/Debug-iphoneos/AppRTCDemo.app
+- To build & sign the sample app for an iOS device (32 bit):
+ wrios32 && ./webrtc/build/gyp_webrtc && ninja -C out_ios/Debug-iphoneos AppRTCDemo
-- Once installed:
- - Tap AppRTCDemo on the iOS device's home screen (might have to scroll to find it).
- - In desktop chrome, navigate to http://apprtc.appspot.com and note
- the r=<NNN> room number in the resulting URL; enter that number
- into the text field on the phone.
+- To build & sign the sample app for an iOS device (64 bit):
+ wrios64 && ./webrtc/build/gyp_webrtc && ninja -C out_ios/Debug-iphoneos AppRTCDemo
diff --git a/talk/app/webrtc/objc/RTCFileLogger.mm b/talk/app/webrtc/objc/RTCFileLogger.mm
index c4e469655d..44ada3e22e 100644
--- a/talk/app/webrtc/objc/RTCFileLogger.mm
+++ b/talk/app/webrtc/objc/RTCFileLogger.mm
@@ -35,15 +35,17 @@
NSString *const kDefaultLogDirName = @"webrtc_logs";
NSUInteger const kDefaultMaxFileSize = 10 * 1024 * 1024; // 10MB.
+const char *kRTCFileLoggerRotatingLogPrefix = "rotating_log";
@implementation RTCFileLogger {
BOOL _hasStarted;
NSString *_dirPath;
NSUInteger _maxFileSize;
- rtc::scoped_ptr<rtc::CallSessionFileRotatingLogSink> _logSink;
+ rtc::scoped_ptr<rtc::FileRotatingLogSink> _logSink;
}
@synthesize severity = _severity;
+@synthesize rotationType = _rotationType;
- (instancetype)init {
NSArray *paths = NSSearchPathForDirectoriesInDomains(
@@ -57,6 +59,14 @@ NSUInteger const kDefaultMaxFileSize = 10 * 1024 * 1024; // 10MB.
- (instancetype)initWithDirPath:(NSString *)dirPath
maxFileSize:(NSUInteger)maxFileSize {
+ return [self initWithDirPath:dirPath
+ maxFileSize:maxFileSize
+ rotationType:kRTCFileLoggerTypeCall];
+}
+
+- (instancetype)initWithDirPath:(NSString *)dirPath
+ maxFileSize:(NSUInteger)maxFileSize
+ rotationType:(RTCFileLoggerRotationType)rotationType {
NSParameterAssert(dirPath.length);
NSParameterAssert(maxFileSize);
if (self = [super init]) {
@@ -91,8 +101,20 @@ NSUInteger const kDefaultMaxFileSize = 10 * 1024 * 1024; // 10MB.
if (_hasStarted) {
return;
}
- _logSink.reset(new rtc::CallSessionFileRotatingLogSink(_dirPath.UTF8String,
- _maxFileSize));
+ switch (_rotationType) {
+ case kRTCFileLoggerTypeApp:
+ _logSink.reset(
+ new rtc::FileRotatingLogSink(_dirPath.UTF8String,
+ kRTCFileLoggerRotatingLogPrefix,
+ _maxFileSize,
+ _maxFileSize / 10));
+ break;
+ case kRTCFileLoggerTypeCall:
+ _logSink.reset(
+ new rtc::CallSessionFileRotatingLogSink(_dirPath.UTF8String,
+ _maxFileSize));
+ break;
+ }
if (!_logSink->Init()) {
LOG(LS_ERROR) << "Failed to open log files at path: "
<< _dirPath.UTF8String;
@@ -120,8 +142,17 @@ NSUInteger const kDefaultMaxFileSize = 10 * 1024 * 1024; // 10MB.
return nil;
}
NSMutableData* logData = [NSMutableData data];
- rtc::scoped_ptr<rtc::CallSessionFileRotatingStream> stream(
- new rtc::CallSessionFileRotatingStream(_dirPath.UTF8String));
+ rtc::scoped_ptr<rtc::FileRotatingStream> stream;
+ switch(_rotationType) {
+ case kRTCFileLoggerTypeApp:
+ stream.reset(
+ new rtc::FileRotatingStream(_dirPath.UTF8String,
+ kRTCFileLoggerRotatingLogPrefix));
+ break;
+ case kRTCFileLoggerTypeCall:
+ stream.reset(new rtc::CallSessionFileRotatingStream(_dirPath.UTF8String));
+ break;
+ }
if (!stream->Open()) {
return logData;
}
diff --git a/talk/app/webrtc/objc/RTCPeerConnection.mm b/talk/app/webrtc/objc/RTCPeerConnection.mm
index 44d39cb090..f814f06ad8 100644
--- a/talk/app/webrtc/objc/RTCPeerConnection.mm
+++ b/talk/app/webrtc/objc/RTCPeerConnection.mm
@@ -271,11 +271,13 @@ class RTCStatsObserver : public StatsObserver {
- (instancetype)initWithFactory:(webrtc::PeerConnectionFactoryInterface*)factory
iceServers:(const webrtc::PeerConnectionInterface::IceServers&)iceServers
constraints:(const webrtc::MediaConstraintsInterface*)constraints {
- NSParameterAssert(factory != NULL);
+ NSParameterAssert(factory != nullptr);
if (self = [super init]) {
+ webrtc::PeerConnectionInterface::RTCConfiguration config;
+ config.servers = iceServers;
_observer.reset(new webrtc::RTCPeerConnectionObserver(self));
_peerConnection = factory->CreatePeerConnection(
- iceServers, constraints, NULL, NULL, _observer.get());
+ config, constraints, nullptr, nullptr, _observer.get());
_localStreams = [[NSMutableArray alloc] init];
}
return self;
diff --git a/talk/app/webrtc/objc/RTCPeerConnectionInterface.mm b/talk/app/webrtc/objc/RTCPeerConnectionInterface.mm
index 58d12ace4c..ff45bd2bac 100644
--- a/talk/app/webrtc/objc/RTCPeerConnectionInterface.mm
+++ b/talk/app/webrtc/objc/RTCPeerConnectionInterface.mm
@@ -39,6 +39,7 @@
@synthesize tcpCandidatePolicy = _tcpCandidatePolicy;
@synthesize audioJitterBufferMaxPackets = _audioJitterBufferMaxPackets;
@synthesize iceConnectionReceivingTimeout = _iceConnectionReceivingTimeout;
+@synthesize iceBackupCandidatePairPingInterval = _iceBackupCandidatePairPingInterval;
- (instancetype)init {
if (self = [super init]) {
@@ -51,6 +52,7 @@
[RTCEnumConverter tcpCandidatePolicyForNativeEnum:config.tcp_candidate_policy];
_audioJitterBufferMaxPackets = config.audio_jitter_buffer_max_packets;
_iceConnectionReceivingTimeout = config.ice_connection_receiving_timeout;
+ _iceBackupCandidatePairPingInterval = config.ice_backup_candidate_pair_ping_interval;
}
return self;
}
@@ -60,7 +62,8 @@
rtcpMuxPolicy:(RTCRtcpMuxPolicy)rtcpMuxPolicy
tcpCandidatePolicy:(RTCTcpCandidatePolicy)tcpCandidatePolicy
audioJitterBufferMaxPackets:(int)audioJitterBufferMaxPackets
- iceConnectionReceivingTimeout:(int)iceConnectionReceivingTimeout {
+ iceConnectionReceivingTimeout:(int)iceConnectionReceivingTimeout
+ iceBackupCandidatePairPingInterval:(int)iceBackupCandidatePairPingInterval {
if (self = [super init]) {
_iceTransportsType = iceTransportsType;
_bundlePolicy = bundlePolicy;
@@ -68,6 +71,7 @@
_tcpCandidatePolicy = tcpCandidatePolicy;
_audioJitterBufferMaxPackets = audioJitterBufferMaxPackets;
_iceConnectionReceivingTimeout = iceConnectionReceivingTimeout;
+ _iceBackupCandidatePairPingInterval = iceBackupCandidatePairPingInterval;
}
return self;
}
@@ -85,8 +89,8 @@
nativeConfig.tcp_candidate_policy =
[RTCEnumConverter nativeEnumForTcpCandidatePolicy:_tcpCandidatePolicy];
nativeConfig.audio_jitter_buffer_max_packets = _audioJitterBufferMaxPackets;
- nativeConfig.ice_connection_receiving_timeout =
- _iceConnectionReceivingTimeout;
+ nativeConfig.ice_connection_receiving_timeout = _iceConnectionReceivingTimeout;
+ nativeConfig.ice_backup_candidate_pair_ping_interval = _iceBackupCandidatePairPingInterval;
return nativeConfig;
}
diff --git a/talk/app/webrtc/objc/avfoundationvideocapturer.h b/talk/app/webrtc/objc/avfoundationvideocapturer.h
index ded80f6647..32de09aadd 100644
--- a/talk/app/webrtc/objc/avfoundationvideocapturer.h
+++ b/talk/app/webrtc/objc/avfoundationvideocapturer.h
@@ -71,7 +71,6 @@ class AVFoundationVideoCapturer : public cricket::VideoCapturer {
RTCAVFoundationVideoCapturerInternal* _capturer;
rtc::Thread* _startThread; // Set in Start(), unset in Stop().
- uint64_t _startTime;
}; // AVFoundationVideoCapturer
} // namespace webrtc
diff --git a/talk/app/webrtc/objc/avfoundationvideocapturer.mm b/talk/app/webrtc/objc/avfoundationvideocapturer.mm
index e1b0f88fb6..0f9dc6825e 100644
--- a/talk/app/webrtc/objc/avfoundationvideocapturer.mm
+++ b/talk/app/webrtc/objc/avfoundationvideocapturer.mm
@@ -33,6 +33,8 @@
#import <Foundation/Foundation.h>
#import <UIKit/UIKit.h>
+#import "webrtc/base/objc/RTCDispatcher.h"
+
// TODO(tkchin): support other formats.
static NSString* const kDefaultPreset = AVCaptureSessionPreset640x480;
static cricket::VideoFormat const kDefaultFormat =
@@ -41,11 +43,6 @@ static cricket::VideoFormat const kDefaultFormat =
cricket::VideoFormat::FpsToInterval(30),
cricket::FOURCC_NV12);
-// This queue is used to start and stop the capturer without blocking the
-// calling thread. -[AVCaptureSession startRunning] blocks until the camera is
-// running.
-static dispatch_queue_t kBackgroundQueue = nil;
-
// This class used to capture frames using AVFoundation APIs on iOS. It is meant
// to be owned by an instance of AVFoundationVideoCapturer. The reason for this
// because other webrtc objects own cricket::VideoCapturer, which is not
@@ -80,15 +77,6 @@ static dispatch_queue_t kBackgroundQueue = nil;
@synthesize useBackCamera = _useBackCamera;
@synthesize isRunning = _isRunning;
-+ (void)initialize {
- static dispatch_once_t onceToken;
- dispatch_once(&onceToken, ^{
- kBackgroundQueue = dispatch_queue_create(
- "com.google.webrtc.RTCAVFoundationCapturerBackground",
- DISPATCH_QUEUE_SERIAL);
- });
-}
-
- (instancetype)initWithCapturer:(webrtc::AVFoundationVideoCapturer*)capturer {
NSParameterAssert(capturer);
if (self = [super init]) {
@@ -132,9 +120,10 @@ static dispatch_queue_t kBackgroundQueue = nil;
_orientationHasChanged = NO;
[[UIDevice currentDevice] beginGeneratingDeviceOrientationNotifications];
AVCaptureSession* session = _captureSession;
- dispatch_async(kBackgroundQueue, ^{
+ [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
+ block:^{
[session startRunning];
- });
+ }];
_isRunning = YES;
}
@@ -144,9 +133,10 @@ static dispatch_queue_t kBackgroundQueue = nil;
}
[_videoOutput setSampleBufferDelegate:nil queue:nullptr];
AVCaptureSession* session = _captureSession;
- dispatch_async(kBackgroundQueue, ^{
+ [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
+ block:^{
[session stopRunning];
- });
+ }];
[[UIDevice currentDevice] endGeneratingDeviceOrientationNotifications];
_isRunning = NO;
}
diff --git a/talk/app/webrtc/objc/public/RTCFileLogger.h b/talk/app/webrtc/objc/public/RTCFileLogger.h
index 3900cb6fbe..70b3825307 100644
--- a/talk/app/webrtc/objc/public/RTCFileLogger.h
+++ b/talk/app/webrtc/objc/public/RTCFileLogger.h
@@ -39,21 +39,38 @@ typedef NS_ENUM(NSUInteger, RTCFileLoggerSeverity) {
kRTCFileLoggerSeverityError
};
+typedef NS_ENUM(NSUInteger, RTCFileLoggerRotationType) {
+ kRTCFileLoggerTypeCall,
+ kRTCFileLoggerTypeApp,
+};
+
// This class intercepts WebRTC logs and saves them to a file. The file size
// will not exceed the given maximum bytesize. When the maximum bytesize is
-// reached logs from the beginning and the end are preserved while the middle
-// section is overwritten instead.
+// reached, logs are rotated according to the rotationType specified.
+// For kRTCFileLoggerTypeCall, logs from the beginning and the end
+// are preserved while the middle section is overwritten instead.
+// For kRTCFileLoggerTypeApp, the oldest log is overwritten.
// This class is not threadsafe.
@interface RTCFileLogger : NSObject
// The severity level to capture. The default is kRTCFileLoggerSeverityInfo.
@property(nonatomic, assign) RTCFileLoggerSeverity severity;
-// Default constructor provides default settings for dir path and file size.
+// The rotation type for this file logger. The default is
+// kRTCFileLoggerTypeCall.
+@property(nonatomic, readonly) RTCFileLoggerRotationType rotationType;
+
+// Default constructor provides default settings for dir path, file size and
+// rotation type.
- (instancetype)init;
+// Create file logger with default rotation type.
+- (instancetype)initWithDirPath:(NSString *)dirPath
+ maxFileSize:(NSUInteger)maxFileSize;
+
- (instancetype)initWithDirPath:(NSString *)dirPath
maxFileSize:(NSUInteger)maxFileSize
+ rotationType:(RTCFileLoggerRotationType)rotationType
NS_DESIGNATED_INITIALIZER;
// Starts writing WebRTC logs to disk if not already started. Overwrites any
diff --git a/talk/app/webrtc/objc/public/RTCPeerConnectionInterface.h b/talk/app/webrtc/objc/public/RTCPeerConnectionInterface.h
index b0cc72b5b7..44b971c85e 100644
--- a/talk/app/webrtc/objc/public/RTCPeerConnectionInterface.h
+++ b/talk/app/webrtc/objc/public/RTCPeerConnectionInterface.h
@@ -64,12 +64,14 @@ typedef NS_ENUM(NSInteger, RTCTcpCandidatePolicy) {
@property(nonatomic, assign) RTCTcpCandidatePolicy tcpCandidatePolicy;
@property(nonatomic, assign) int audioJitterBufferMaxPackets;
@property(nonatomic, assign) int iceConnectionReceivingTimeout;
+@property(nonatomic, assign) int iceBackupCandidatePairPingInterval;
- (instancetype)initWithIceTransportsType:(RTCIceTransportsType)iceTransportsType
bundlePolicy:(RTCBundlePolicy)bundlePolicy
rtcpMuxPolicy:(RTCRtcpMuxPolicy)rtcpMuxPolicy
tcpCandidatePolicy:(RTCTcpCandidatePolicy)tcpCandidatePolicy
audioJitterBufferMaxPackets:(int)audioJitterBufferMaxPackets
- iceConnectionReceivingTimeout:(int)iceConnectionReceivingTimeout;
+ iceConnectionReceivingTimeout:(int)iceConnectionReceivingTimeout
+ iceBackupCandidatePairPingInterval:(int)iceBackupCandidatePairPingInterval;
@end
diff --git a/talk/app/webrtc/peerconnection.cc b/talk/app/webrtc/peerconnection.cc
index 0d519b280b..ccca18af67 100644
--- a/talk/app/webrtc/peerconnection.cc
+++ b/talk/app/webrtc/peerconnection.cc
@@ -27,8 +27,10 @@
#include "talk/app/webrtc/peerconnection.h"
-#include <vector>
+#include <algorithm>
#include <cctype> // for isdigit
+#include <utility>
+#include <vector>
#include "talk/app/webrtc/audiotrack.h"
#include "talk/app/webrtc/dtmfsender.h"
@@ -36,6 +38,7 @@
#include "talk/app/webrtc/jsepsessiondescription.h"
#include "talk/app/webrtc/mediaconstraintsinterface.h"
#include "talk/app/webrtc/mediastream.h"
+#include "talk/app/webrtc/mediastreamobserver.h"
#include "talk/app/webrtc/mediastreamproxy.h"
#include "talk/app/webrtc/mediastreamtrackproxy.h"
#include "talk/app/webrtc/remoteaudiosource.h"
@@ -46,11 +49,13 @@
#include "talk/app/webrtc/videosource.h"
#include "talk/app/webrtc/videotrack.h"
#include "talk/media/sctp/sctpdataengine.h"
-#include "webrtc/p2p/client/basicportallocator.h"
#include "talk/session/media/channelmanager.h"
+#include "webrtc/base/arraysize.h"
#include "webrtc/base/logging.h"
#include "webrtc/base/stringencode.h"
#include "webrtc/base/stringutils.h"
+#include "webrtc/base/trace_event.h"
+#include "webrtc/p2p/client/basicportallocator.h"
#include "webrtc/system_wrappers/include/field_trial.h"
namespace {
@@ -59,13 +64,8 @@ using webrtc::DataChannel;
using webrtc::MediaConstraintsInterface;
using webrtc::MediaStreamInterface;
using webrtc::PeerConnectionInterface;
+using webrtc::RtpSenderInterface;
using webrtc::StreamCollection;
-using webrtc::StunConfigurations;
-using webrtc::TurnConfigurations;
-typedef webrtc::PortAllocatorFactoryInterface::StunConfiguration
- StunConfiguration;
-typedef webrtc::PortAllocatorFactoryInterface::TurnConfiguration
- TurnConfiguration;
static const char kDefaultStreamLabel[] = "default";
static const char kDefaultAudioTrackLabel[] = "defaulta0";
@@ -80,8 +80,6 @@ static const size_t kTurnTransportTokensNum = 2;
static const int kDefaultStunPort = 3478;
static const int kDefaultStunTlsPort = 5349;
static const char kTransport[] = "transport";
-static const char kUdpTransportType[] = "udp";
-static const char kTcpTransportType[] = "tcp";
// NOTE: Must be in the same order as the ServiceType enum.
static const char* kValidIceServiceTypes[] = {"stun", "stuns", "turn", "turns"};
@@ -95,7 +93,7 @@ enum ServiceType {
TURNS, // Indicates a TURN server used with a TLS session.
INVALID, // Unknown.
};
-static_assert(INVALID == ARRAY_SIZE(kValidIceServiceTypes),
+static_assert(INVALID == arraysize(kValidIceServiceTypes),
"kValidIceServiceTypes must have as many strings as ServiceType "
"has values.");
@@ -104,6 +102,7 @@ enum {
MSG_SET_SESSIONDESCRIPTION_FAILED,
MSG_CREATE_SESSIONDESCRIPTION_FAILED,
MSG_GETSTATS,
+ MSG_FREE_DATACHANNELS,
};
struct SetSessionDescriptionMsg : public rtc::MessageData {
@@ -156,7 +155,7 @@ bool GetServiceTypeAndHostnameFromUri(const std::string& in_str,
return false;
}
*service_type = INVALID;
- for (size_t i = 0; i < ARRAY_SIZE(kValidIceServiceTypes); ++i) {
+ for (size_t i = 0; i < arraysize(kValidIceServiceTypes); ++i) {
if (in_str.compare(0, colonpos, kValidIceServiceTypes[i]) == 0) {
*service_type = static_cast<ServiceType>(i);
break;
@@ -216,12 +215,12 @@ bool ParseHostnameAndPortFromString(const std::string& in_str,
return !host->empty();
}
-// Adds a StunConfiguration or TurnConfiguration to the appropriate list,
+// Adds a STUN or TURN server to the appropriate list,
// by parsing |url| and using the username/password in |server|.
bool ParseIceServerUrl(const PeerConnectionInterface::IceServer& server,
const std::string& url,
- StunConfigurations* stun_config,
- TurnConfigurations* turn_config) {
+ cricket::ServerAddresses* stun_servers,
+ std::vector<cricket::RelayServerConfig>* turn_servers) {
// draft-nandakumar-rtcweb-stun-uri-01
// stunURI = scheme ":" stun-host [ ":" stun-port ]
// scheme = "stun" / "stuns"
@@ -236,10 +235,10 @@ bool ParseIceServerUrl(const PeerConnectionInterface::IceServer& server,
// transport-ext = 1*unreserved
// turn-host = IP-literal / IPv4address / reg-name
// turn-port = *DIGIT
- RTC_DCHECK(stun_config != nullptr);
- RTC_DCHECK(turn_config != nullptr);
+ RTC_DCHECK(stun_servers != nullptr);
+ RTC_DCHECK(turn_servers != nullptr);
std::vector<std::string> tokens;
- std::string turn_transport_type = kUdpTransportType;
+ cricket::ProtocolType turn_transport_type = cricket::PROTO_UDP;
RTC_DCHECK(!url.empty());
rtc::tokenize(url, '?', &tokens);
std::string uri_without_transport = tokens[0];
@@ -250,11 +249,12 @@ bool ParseIceServerUrl(const PeerConnectionInterface::IceServer& server,
if (tokens[0] == kTransport) {
// As per above grammar transport param will be consist of lower case
// letters.
- if (tokens[1] != kUdpTransportType && tokens[1] != kTcpTransportType) {
+ if (!cricket::StringToProto(tokens[1].c_str(), &turn_transport_type) ||
+ (turn_transport_type != cricket::PROTO_UDP &&
+ turn_transport_type != cricket::PROTO_TCP)) {
LOG(LS_WARNING) << "Transport param should always be udp or tcp.";
return false;
}
- turn_transport_type = tokens[1];
}
}
@@ -293,7 +293,7 @@ bool ParseIceServerUrl(const PeerConnectionInterface::IceServer& server,
int port = kDefaultStunPort;
if (service_type == TURNS) {
port = kDefaultStunTlsPort;
- turn_transport_type = kTcpTransportType;
+ turn_transport_type = cricket::PROTO_TCP;
}
std::string address;
@@ -310,16 +310,14 @@ bool ParseIceServerUrl(const PeerConnectionInterface::IceServer& server,
switch (service_type) {
case STUN:
case STUNS:
- stun_config->push_back(StunConfiguration(address, port));
+ stun_servers->insert(rtc::SocketAddress(address, port));
break;
case TURN:
case TURNS: {
bool secure = (service_type == TURNS);
- turn_config->push_back(TurnConfiguration(address, port,
- username,
- server.password,
- turn_transport_type,
- secure));
+ turn_servers->push_back(
+ cricket::RelayServerConfig(address, port, username, server.password,
+ turn_transport_type, secure));
break;
}
case INVALID:
@@ -365,25 +363,15 @@ bool IsValidOfferToReceiveMedia(int value) {
}
// Add the stream and RTP data channel info to |session_options|.
-void SetStreams(cricket::MediaSessionOptions* session_options,
- rtc::scoped_refptr<StreamCollection> streams,
- const std::map<std::string, rtc::scoped_refptr<DataChannel>>&
- rtp_data_channels) {
+void AddSendStreams(
+ cricket::MediaSessionOptions* session_options,
+ const std::vector<rtc::scoped_refptr<RtpSenderInterface>>& senders,
+ const std::map<std::string, rtc::scoped_refptr<DataChannel>>&
+ rtp_data_channels) {
session_options->streams.clear();
- if (streams != nullptr) {
- for (size_t i = 0; i < streams->count(); ++i) {
- MediaStreamInterface* stream = streams->at(i);
- // For each audio track in the stream, add it to the MediaSessionOptions.
- for (const auto& track : stream->GetAudioTracks()) {
- session_options->AddSendStream(cricket::MEDIA_TYPE_AUDIO, track->id(),
- stream->label());
- }
- // For each video track in the stream, add it to the MediaSessionOptions.
- for (const auto& track : stream->GetVideoTracks()) {
- session_options->AddSendStream(cricket::MEDIA_TYPE_VIDEO, track->id(),
- stream->label());
- }
- }
+ for (const auto& sender : senders) {
+ session_options->AddSendStream(sender->media_type(), sender->id(),
+ sender->stream_id());
}
// Check for data channels.
@@ -421,10 +409,12 @@ class RemoteMediaStreamFactory {
MediaStream::Create(stream_label));
}
- AudioTrackInterface* AddAudioTrack(webrtc::MediaStreamInterface* stream,
+ AudioTrackInterface* AddAudioTrack(uint32_t ssrc,
+ AudioProviderInterface* provider,
+ webrtc::MediaStreamInterface* stream,
const std::string& track_id) {
return AddTrack<AudioTrackInterface, AudioTrack, AudioTrackProxy>(
- stream, track_id, RemoteAudioSource::Create().get());
+ stream, track_id, RemoteAudioSource::Create(ssrc, provider));
}
VideoTrackInterface* AddVideoTrack(webrtc::MediaStreamInterface* stream,
@@ -432,7 +422,7 @@ class RemoteMediaStreamFactory {
return AddTrack<VideoTrackInterface, VideoTrack, VideoTrackProxy>(
stream, track_id,
VideoSource::Create(channel_manager_, new RemoteVideoCapturer(),
- nullptr)
+ nullptr, true)
.get());
}
@@ -440,7 +430,7 @@ class RemoteMediaStreamFactory {
template <typename TI, typename T, typename TP, typename S>
TI* AddTrack(MediaStreamInterface* stream,
const std::string& track_id,
- S* source) {
+ const S& source) {
rtc::scoped_refptr<TI> track(
TP::Create(signaling_thread_, T::Create(track_id, source)));
track->set_state(webrtc::MediaStreamTrackInterface::kLive);
@@ -471,7 +461,11 @@ bool ConvertRtcOptionsForOffer(
}
session_options->vad_enabled = rtc_options.voice_activity_detection;
- session_options->transport_options.ice_restart = rtc_options.ice_restart;
+ session_options->audio_transport_options.ice_restart =
+ rtc_options.ice_restart;
+ session_options->video_transport_options.ice_restart =
+ rtc_options.ice_restart;
+ session_options->data_transport_options.ice_restart = rtc_options.ice_restart;
session_options->bundle_enabled = rtc_options.use_rtp_mux;
return true;
@@ -517,10 +511,14 @@ bool ParseConstraintsForAnswer(const MediaConstraintsInterface* constraints,
if (FindConstraint(constraints, MediaConstraintsInterface::kIceRestart,
&value, &mandatory_constraints_satisfied)) {
- session_options->transport_options.ice_restart = value;
+ session_options->audio_transport_options.ice_restart = value;
+ session_options->video_transport_options.ice_restart = value;
+ session_options->data_transport_options.ice_restart = value;
} else {
// kIceRestart defaults to false according to spec.
- session_options->transport_options.ice_restart = false;
+ session_options->audio_transport_options.ice_restart = false;
+ session_options->video_transport_options.ice_restart = false;
+ session_options->data_transport_options.ice_restart = false;
}
if (!constraints) {
@@ -530,8 +528,8 @@ bool ParseConstraintsForAnswer(const MediaConstraintsInterface* constraints,
}
bool ParseIceServers(const PeerConnectionInterface::IceServers& servers,
- StunConfigurations* stun_config,
- TurnConfigurations* turn_config) {
+ cricket::ServerAddresses* stun_servers,
+ std::vector<cricket::RelayServerConfig>* turn_servers) {
for (const webrtc::PeerConnectionInterface::IceServer& server : servers) {
if (!server.urls.empty()) {
for (const std::string& url : server.urls) {
@@ -539,13 +537,13 @@ bool ParseIceServers(const PeerConnectionInterface::IceServers& servers,
LOG(LS_ERROR) << "Empty uri.";
return false;
}
- if (!ParseIceServerUrl(server, url, stun_config, turn_config)) {
+ if (!ParseIceServerUrl(server, url, stun_servers, turn_servers)) {
return false;
}
}
} else if (!server.uri.empty()) {
// Fallback to old .uri if new .urls isn't present.
- if (!ParseIceServerUrl(server, server.uri, stun_config, turn_config)) {
+ if (!ParseIceServerUrl(server, server.uri, stun_servers, turn_servers)) {
return false;
}
} else {
@@ -553,6 +551,13 @@ bool ParseIceServers(const PeerConnectionInterface::IceServers& servers,
return false;
}
}
+ // Candidates must have unique priorities, so that connectivity checks
+ // are performed in a well-defined order.
+ int priority = static_cast<int>(turn_servers->size() - 1);
+ for (cricket::RelayServerConfig& turn_server : *turn_servers) {
+ // First in the list gets highest priority.
+ turn_server.priority = priority--;
+ }
return true;
}
@@ -568,6 +573,7 @@ PeerConnection::PeerConnection(PeerConnectionFactory* factory)
remote_streams_(StreamCollection::Create()) {}
PeerConnection::~PeerConnection() {
+ TRACE_EVENT0("webrtc", "PeerConnection::~PeerConnection");
RTC_DCHECK(signaling_thread()->IsCurrent());
// Need to detach RTP senders/receivers from WebRtcSession,
// since it's about to be destroyed.
@@ -582,22 +588,24 @@ PeerConnection::~PeerConnection() {
bool PeerConnection::Initialize(
const PeerConnectionInterface::RTCConfiguration& configuration,
const MediaConstraintsInterface* constraints,
- PortAllocatorFactoryInterface* allocator_factory,
+ rtc::scoped_ptr<cricket::PortAllocator> allocator,
rtc::scoped_ptr<DtlsIdentityStoreInterface> dtls_identity_store,
PeerConnectionObserver* observer) {
+ TRACE_EVENT0("webrtc", "PeerConnection::Initialize");
RTC_DCHECK(observer != nullptr);
if (!observer) {
return false;
}
observer_ = observer;
- std::vector<PortAllocatorFactoryInterface::StunConfiguration> stun_config;
- std::vector<PortAllocatorFactoryInterface::TurnConfiguration> turn_config;
- if (!ParseIceServers(configuration.servers, &stun_config, &turn_config)) {
+ port_allocator_ = std::move(allocator);
+
+ cricket::ServerAddresses stun_servers;
+ std::vector<cricket::RelayServerConfig> turn_servers;
+ if (!ParseIceServers(configuration.servers, &stun_servers, &turn_servers)) {
return false;
}
- port_allocator_.reset(
- allocator_factory->CreatePortAllocator(stun_config, turn_config));
+ port_allocator_->SetIceServers(stun_servers, turn_servers);
// To handle both internal and externally created port allocator, we will
// enable BUNDLE here.
@@ -637,7 +645,7 @@ bool PeerConnection::Initialize(
// Initialize the WebRtcSession. It creates transport channels etc.
if (!session_->Initialize(factory_->options(), constraints,
- dtls_identity_store.Pass(), configuration)) {
+ std::move(dtls_identity_store), configuration)) {
return false;
}
@@ -668,9 +676,8 @@ PeerConnection::remote_streams() {
return remote_streams_;
}
-// TODO(deadbeef): Create RtpSenders immediately here, even if local
-// description hasn't yet been set.
bool PeerConnection::AddStream(MediaStreamInterface* local_stream) {
+ TRACE_EVENT0("webrtc", "PeerConnection::AddStream");
if (IsClosed()) {
return false;
}
@@ -679,25 +686,22 @@ bool PeerConnection::AddStream(MediaStreamInterface* local_stream) {
}
local_streams_->AddStream(local_stream);
+ MediaStreamObserver* observer = new MediaStreamObserver(local_stream);
+ observer->SignalAudioTrackAdded.connect(this,
+ &PeerConnection::OnAudioTrackAdded);
+ observer->SignalAudioTrackRemoved.connect(
+ this, &PeerConnection::OnAudioTrackRemoved);
+ observer->SignalVideoTrackAdded.connect(this,
+ &PeerConnection::OnVideoTrackAdded);
+ observer->SignalVideoTrackRemoved.connect(
+ this, &PeerConnection::OnVideoTrackRemoved);
+ stream_observers_.push_back(rtc::scoped_ptr<MediaStreamObserver>(observer));
- // Find tracks that have already been configured in SDP. This can occur if a
- // local session description that contains the MSID of these tracks is set
- // before AddLocalStream is called. It can also occur if the local session
- // description is not changed and RemoveLocalStream is called and later
- // AddLocalStream is called again with the same stream.
for (const auto& track : local_stream->GetAudioTracks()) {
- const TrackInfo* track_info =
- FindTrackInfo(local_audio_tracks_, local_stream->label(), track->id());
- if (track_info) {
- CreateAudioSender(local_stream, track.get(), track_info->ssrc);
- }
+ OnAudioTrackAdded(track.get(), local_stream);
}
for (const auto& track : local_stream->GetVideoTracks()) {
- const TrackInfo* track_info =
- FindTrackInfo(local_video_tracks_, local_stream->label(), track->id());
- if (track_info) {
- CreateVideoSender(local_stream, track.get(), track_info->ssrc);
- }
+ OnVideoTrackAdded(track.get(), local_stream);
}
stats_->AddStream(local_stream);
@@ -705,25 +709,24 @@ bool PeerConnection::AddStream(MediaStreamInterface* local_stream) {
return true;
}
-// TODO(deadbeef): Don't destroy RtpSenders here; they should be kept around
-// indefinitely.
void PeerConnection::RemoveStream(MediaStreamInterface* local_stream) {
+ TRACE_EVENT0("webrtc", "PeerConnection::RemoveStream");
for (const auto& track : local_stream->GetAudioTracks()) {
- const TrackInfo* track_info =
- FindTrackInfo(local_audio_tracks_, local_stream->label(), track->id());
- if (track_info) {
- DestroyAudioSender(local_stream, track.get(), track_info->ssrc);
- }
+ OnAudioTrackRemoved(track.get(), local_stream);
}
for (const auto& track : local_stream->GetVideoTracks()) {
- const TrackInfo* track_info =
- FindTrackInfo(local_video_tracks_, local_stream->label(), track->id());
- if (track_info) {
- DestroyVideoSender(local_stream, track.get());
- }
+ OnVideoTrackRemoved(track.get(), local_stream);
}
local_streams_->RemoveStream(local_stream);
+ stream_observers_.erase(
+ std::remove_if(
+ stream_observers_.begin(), stream_observers_.end(),
+ [local_stream](const rtc::scoped_ptr<MediaStreamObserver>& observer) {
+ return observer->stream()->label().compare(local_stream->label()) ==
+ 0;
+ }),
+ stream_observers_.end());
if (IsClosed()) {
return;
@@ -733,6 +736,7 @@ void PeerConnection::RemoveStream(MediaStreamInterface* local_stream) {
rtc::scoped_refptr<DtmfSenderInterface> PeerConnection::CreateDtmfSender(
AudioTrackInterface* track) {
+ TRACE_EVENT0("webrtc", "PeerConnection::CreateDtmfSender");
if (!track) {
LOG(LS_ERROR) << "CreateDtmfSender - track is NULL.";
return NULL;
@@ -751,6 +755,26 @@ rtc::scoped_refptr<DtmfSenderInterface> PeerConnection::CreateDtmfSender(
return DtmfSenderProxy::Create(signaling_thread(), sender.get());
}
+rtc::scoped_refptr<RtpSenderInterface> PeerConnection::CreateSender(
+ const std::string& kind,
+ const std::string& stream_id) {
+ TRACE_EVENT0("webrtc", "PeerConnection::CreateSender");
+ RtpSenderInterface* new_sender;
+ if (kind == MediaStreamTrackInterface::kAudioKind) {
+ new_sender = new AudioRtpSender(session_.get(), stats_.get());
+ } else if (kind == MediaStreamTrackInterface::kVideoKind) {
+ new_sender = new VideoRtpSender(session_.get());
+ } else {
+ LOG(LS_ERROR) << "CreateSender called with invalid kind: " << kind;
+ return rtc::scoped_refptr<RtpSenderInterface>();
+ }
+ if (!stream_id.empty()) {
+ new_sender->set_stream_id(stream_id);
+ }
+ senders_.push_back(new_sender);
+ return RtpSenderProxy::Create(signaling_thread(), new_sender);
+}
+
std::vector<rtc::scoped_refptr<RtpSenderInterface>> PeerConnection::GetSenders()
const {
std::vector<rtc::scoped_refptr<RtpSenderInterface>> senders;
@@ -773,6 +797,7 @@ PeerConnection::GetReceivers() const {
bool PeerConnection::GetStats(StatsObserver* observer,
MediaStreamTrackInterface* track,
StatsOutputLevel level) {
+ TRACE_EVENT0("webrtc", "PeerConnection::GetStats");
RTC_DCHECK(signaling_thread()->IsCurrent());
if (!VERIFY(observer != NULL)) {
LOG(LS_ERROR) << "GetStats - observer is NULL.";
@@ -807,6 +832,7 @@ rtc::scoped_refptr<DataChannelInterface>
PeerConnection::CreateDataChannel(
const std::string& label,
const DataChannelInit* config) {
+ TRACE_EVENT0("webrtc", "PeerConnection::CreateDataChannel");
bool first_datachannel = !HasDataChannels();
rtc::scoped_ptr<InternalDataChannelInit> internal_config;
@@ -830,6 +856,7 @@ PeerConnection::CreateDataChannel(
void PeerConnection::CreateOffer(CreateSessionDescriptionObserver* observer,
const MediaConstraintsInterface* constraints) {
+ TRACE_EVENT0("webrtc", "PeerConnection::CreateOffer");
if (!VERIFY(observer != nullptr)) {
LOG(LS_ERROR) << "CreateOffer - observer is NULL.";
return;
@@ -881,6 +908,7 @@ void PeerConnection::CreateOffer(CreateSessionDescriptionObserver* observer,
void PeerConnection::CreateOffer(CreateSessionDescriptionObserver* observer,
const RTCOfferAnswerOptions& options) {
+ TRACE_EVENT0("webrtc", "PeerConnection::CreateOffer");
if (!VERIFY(observer != nullptr)) {
LOG(LS_ERROR) << "CreateOffer - observer is NULL.";
return;
@@ -900,6 +928,7 @@ void PeerConnection::CreateOffer(CreateSessionDescriptionObserver* observer,
void PeerConnection::CreateAnswer(
CreateSessionDescriptionObserver* observer,
const MediaConstraintsInterface* constraints) {
+ TRACE_EVENT0("webrtc", "PeerConnection::CreateAnswer");
if (!VERIFY(observer != nullptr)) {
LOG(LS_ERROR) << "CreateAnswer - observer is NULL.";
return;
@@ -919,6 +948,7 @@ void PeerConnection::CreateAnswer(
void PeerConnection::SetLocalDescription(
SetSessionDescriptionObserver* observer,
SessionDescriptionInterface* desc) {
+ TRACE_EVENT0("webrtc", "PeerConnection::SetLocalDescription");
if (!VERIFY(observer != nullptr)) {
LOG(LS_ERROR) << "SetLocalDescription - observer is NULL.";
return;
@@ -940,7 +970,7 @@ void PeerConnection::SetLocalDescription(
// SCTP sids.
rtc::SSLRole role;
if (session_->data_channel_type() == cricket::DCT_SCTP &&
- session_->GetSslRole(&role)) {
+ session_->GetSslRole(session_->data_channel(), &role)) {
AllocateSctpSids(role);
}
@@ -949,19 +979,27 @@ void PeerConnection::SetLocalDescription(
const cricket::ContentInfo* audio_content =
GetFirstAudioContent(desc->description());
if (audio_content) {
- const cricket::AudioContentDescription* audio_desc =
- static_cast<const cricket::AudioContentDescription*>(
- audio_content->description);
- UpdateLocalTracks(audio_desc->streams(), audio_desc->type());
+ if (audio_content->rejected) {
+ RemoveTracks(cricket::MEDIA_TYPE_AUDIO);
+ } else {
+ const cricket::AudioContentDescription* audio_desc =
+ static_cast<const cricket::AudioContentDescription*>(
+ audio_content->description);
+ UpdateLocalTracks(audio_desc->streams(), audio_desc->type());
+ }
}
const cricket::ContentInfo* video_content =
GetFirstVideoContent(desc->description());
if (video_content) {
- const cricket::VideoContentDescription* video_desc =
- static_cast<const cricket::VideoContentDescription*>(
- video_content->description);
- UpdateLocalTracks(video_desc->streams(), video_desc->type());
+ if (video_content->rejected) {
+ RemoveTracks(cricket::MEDIA_TYPE_VIDEO);
+ } else {
+ const cricket::VideoContentDescription* video_desc =
+ static_cast<const cricket::VideoContentDescription*>(
+ video_content->description);
+ UpdateLocalTracks(video_desc->streams(), video_desc->type());
+ }
}
const cricket::ContentInfo* data_content =
@@ -988,6 +1026,7 @@ void PeerConnection::SetLocalDescription(
void PeerConnection::SetRemoteDescription(
SetSessionDescriptionObserver* observer,
SessionDescriptionInterface* desc) {
+ TRACE_EVENT0("webrtc", "PeerConnection::SetRemoteDescription");
if (!VERIFY(observer != nullptr)) {
LOG(LS_ERROR) << "SetRemoteDescription - observer is NULL.";
return;
@@ -1009,11 +1048,27 @@ void PeerConnection::SetRemoteDescription(
// SCTP sids.
rtc::SSLRole role;
if (session_->data_channel_type() == cricket::DCT_SCTP &&
- session_->GetSslRole(&role)) {
+ session_->GetSslRole(session_->data_channel(), &role)) {
AllocateSctpSids(role);
}
const cricket::SessionDescription* remote_desc = desc->description();
+ const cricket::ContentInfo* audio_content = GetFirstAudioContent(remote_desc);
+ const cricket::ContentInfo* video_content = GetFirstVideoContent(remote_desc);
+ const cricket::AudioContentDescription* audio_desc =
+ GetFirstAudioContentDescription(remote_desc);
+ const cricket::VideoContentDescription* video_desc =
+ GetFirstVideoContentDescription(remote_desc);
+ const cricket::DataContentDescription* data_desc =
+ GetFirstDataContentDescription(remote_desc);
+
+ // Check if the descriptions include streams, just in case the peer supports
+ // MSID, but doesn't indicate so with "a=msid-semantic".
+ if (remote_desc->msid_supported() ||
+ (audio_desc && !audio_desc->streams().empty()) ||
+ (video_desc && !video_desc->streams().empty())) {
+ remote_peer_supports_msid_ = true;
+ }
// We wait to signal new streams until we finish processing the description,
// since only at that point will new streams have all their tracks.
@@ -1021,39 +1076,39 @@ void PeerConnection::SetRemoteDescription(
// Find all audio rtp streams and create corresponding remote AudioTracks
// and MediaStreams.
- const cricket::ContentInfo* audio_content = GetFirstAudioContent(remote_desc);
if (audio_content) {
- const cricket::AudioContentDescription* desc =
- static_cast<const cricket::AudioContentDescription*>(
- audio_content->description);
- UpdateRemoteStreamsList(GetActiveStreams(desc), desc->type(), new_streams);
- remote_info_.default_audio_track_needed =
- !remote_desc->msid_supported() && desc->streams().empty() &&
- MediaContentDirectionHasSend(desc->direction());
+ if (audio_content->rejected) {
+ RemoveTracks(cricket::MEDIA_TYPE_AUDIO);
+ } else {
+ bool default_audio_track_needed =
+ !remote_peer_supports_msid_ &&
+ MediaContentDirectionHasSend(audio_desc->direction());
+ UpdateRemoteStreamsList(GetActiveStreams(audio_desc),
+ default_audio_track_needed, audio_desc->type(),
+ new_streams);
+ }
}
// Find all video rtp streams and create corresponding remote VideoTracks
// and MediaStreams.
- const cricket::ContentInfo* video_content = GetFirstVideoContent(remote_desc);
if (video_content) {
- const cricket::VideoContentDescription* desc =
- static_cast<const cricket::VideoContentDescription*>(
- video_content->description);
- UpdateRemoteStreamsList(GetActiveStreams(desc), desc->type(), new_streams);
- remote_info_.default_video_track_needed =
- !remote_desc->msid_supported() && desc->streams().empty() &&
- MediaContentDirectionHasSend(desc->direction());
+ if (video_content->rejected) {
+ RemoveTracks(cricket::MEDIA_TYPE_VIDEO);
+ } else {
+ bool default_video_track_needed =
+ !remote_peer_supports_msid_ &&
+ MediaContentDirectionHasSend(video_desc->direction());
+ UpdateRemoteStreamsList(GetActiveStreams(video_desc),
+ default_video_track_needed, video_desc->type(),
+ new_streams);
+ }
}
// Update the DataChannels with the information from the remote peer.
- const cricket::ContentInfo* data_content = GetFirstDataContent(remote_desc);
- if (data_content) {
- const cricket::DataContentDescription* desc =
- static_cast<const cricket::DataContentDescription*>(
- data_content->description);
- if (rtc::starts_with(desc->protocol().data(),
+ if (data_desc) {
+ if (rtc::starts_with(data_desc->protocol().data(),
cricket::kMediaProtocolRtpPrefix)) {
- UpdateRemoteRtpDataChannels(GetActiveStreams(desc));
+ UpdateRemoteRtpDataChannels(GetActiveStreams(data_desc));
}
}
@@ -1064,58 +1119,21 @@ void PeerConnection::SetRemoteDescription(
observer_->OnAddStream(new_stream);
}
- // Find removed MediaStreams.
- if (remote_info_.IsDefaultMediaStreamNeeded() &&
- remote_streams_->find(kDefaultStreamLabel) != nullptr) {
- // The default media stream already exists. No need to do anything.
- } else {
- UpdateEndedRemoteMediaStreams();
- remote_info_.msid_supported |= remote_streams_->count() > 0;
- }
- MaybeCreateDefaultStream();
+ UpdateEndedRemoteMediaStreams();
SetSessionDescriptionMsg* msg = new SetSessionDescriptionMsg(observer);
signaling_thread()->Post(this, MSG_SET_SESSIONDESCRIPTION_SUCCESS, msg);
}
bool PeerConnection::SetConfiguration(const RTCConfiguration& config) {
+ TRACE_EVENT0("webrtc", "PeerConnection::SetConfiguration");
if (port_allocator_) {
- std::vector<PortAllocatorFactoryInterface::StunConfiguration> stuns;
- std::vector<PortAllocatorFactoryInterface::TurnConfiguration> turns;
- if (!ParseIceServers(config.servers, &stuns, &turns)) {
+ cricket::ServerAddresses stun_servers;
+ std::vector<cricket::RelayServerConfig> turn_servers;
+ if (!ParseIceServers(config.servers, &stun_servers, &turn_servers)) {
return false;
}
-
- std::vector<rtc::SocketAddress> stun_hosts;
- typedef std::vector<StunConfiguration>::const_iterator StunIt;
- for (StunIt stun_it = stuns.begin(); stun_it != stuns.end(); ++stun_it) {
- stun_hosts.push_back(stun_it->server);
- }
-
- rtc::SocketAddress stun_addr;
- if (!stun_hosts.empty()) {
- stun_addr = stun_hosts.front();
- LOG(LS_INFO) << "SetConfiguration: StunServer Address: "
- << stun_addr.ToString();
- }
-
- for (size_t i = 0; i < turns.size(); ++i) {
- cricket::RelayCredentials credentials(turns[i].username,
- turns[i].password);
- cricket::RelayServerConfig relay_server(cricket::RELAY_TURN);
- cricket::ProtocolType protocol;
- if (cricket::StringToProto(turns[i].transport_type.c_str(), &protocol)) {
- relay_server.ports.push_back(cricket::ProtocolAddress(
- turns[i].server, protocol, turns[i].secure));
- relay_server.credentials = credentials;
- LOG(LS_INFO) << "SetConfiguration: TurnServer Address: "
- << turns[i].server.ToString();
- } else {
- LOG(LS_WARNING) << "Ignoring TURN server " << turns[i].server << ". "
- << "Reason= Incorrect " << turns[i].transport_type
- << " transport parameter.";
- }
- }
+ port_allocator_->SetIceServers(stun_servers, turn_servers);
}
session_->SetIceConfig(session_->ParseIceConfig(config));
return session_->SetIceTransports(config.type);
@@ -1123,10 +1141,12 @@ bool PeerConnection::SetConfiguration(const RTCConfiguration& config) {
bool PeerConnection::AddIceCandidate(
const IceCandidateInterface* ice_candidate) {
+ TRACE_EVENT0("webrtc", "PeerConnection::AddIceCandidate");
return session_->ProcessIceMessage(ice_candidate);
}
void PeerConnection::RegisterUMAObserver(UMAObserver* observer) {
+ TRACE_EVENT0("webrtc", "PeerConnection::RegisterUmaObserver");
uma_observer_ = observer;
if (session_) {
@@ -1156,6 +1176,7 @@ const SessionDescriptionInterface* PeerConnection::remote_description() const {
}
void PeerConnection::Close() {
+ TRACE_EVENT0("webrtc", "PeerConnection::Close");
// Update stats here so that we have the most recent stats for tracks and
// streams before the channels are closed.
stats_->UpdateStats(kStatsOutputLevelStandard);
@@ -1223,6 +1244,10 @@ void PeerConnection::OnMessage(rtc::Message* msg) {
delete param;
break;
}
+ case MSG_FREE_DATACHANNELS: {
+ sctp_data_channels_to_free_.clear();
+ break;
+ }
default:
RTC_DCHECK(false && "Not implemented");
break;
@@ -1267,49 +1292,6 @@ void PeerConnection::DestroyVideoReceiver(MediaStreamInterface* stream,
}
}
-void PeerConnection::CreateAudioSender(MediaStreamInterface* stream,
- AudioTrackInterface* audio_track,
- uint32_t ssrc) {
- senders_.push_back(new AudioRtpSender(audio_track, ssrc, session_.get()));
- stats_->AddLocalAudioTrack(audio_track, ssrc);
-}
-
-void PeerConnection::CreateVideoSender(MediaStreamInterface* stream,
- VideoTrackInterface* video_track,
- uint32_t ssrc) {
- senders_.push_back(new VideoRtpSender(video_track, ssrc, session_.get()));
-}
-
-// TODO(deadbeef): Keep RtpSenders around even if track goes away in local
-// description.
-void PeerConnection::DestroyAudioSender(MediaStreamInterface* stream,
- AudioTrackInterface* audio_track,
- uint32_t ssrc) {
- auto it = FindSenderForTrack(audio_track);
- if (it == senders_.end()) {
- LOG(LS_WARNING) << "RtpSender for track with id " << audio_track->id()
- << " doesn't exist.";
- return;
- } else {
- (*it)->Stop();
- senders_.erase(it);
- }
- stats_->RemoveLocalAudioTrack(audio_track, ssrc);
-}
-
-void PeerConnection::DestroyVideoSender(MediaStreamInterface* stream,
- VideoTrackInterface* video_track) {
- auto it = FindSenderForTrack(video_track);
- if (it == senders_.end()) {
- LOG(LS_WARNING) << "RtpSender for track with id " << video_track->id()
- << " doesn't exist.";
- return;
- } else {
- (*it)->Stop();
- senders_.erase(it);
- }
-}
-
void PeerConnection::OnIceConnectionChange(
PeerConnectionInterface::IceConnectionState new_state) {
RTC_DCHECK(signaling_thread()->IsCurrent());
@@ -1362,6 +1344,80 @@ void PeerConnection::ChangeSignalingState(
observer_->OnStateChange(PeerConnectionObserver::kSignalingState);
}
+void PeerConnection::OnAudioTrackAdded(AudioTrackInterface* track,
+ MediaStreamInterface* stream) {
+ auto sender = FindSenderForTrack(track);
+ if (sender != senders_.end()) {
+ // We already have a sender for this track, so just change the stream_id
+ // so that it's correct in the next call to CreateOffer.
+ (*sender)->set_stream_id(stream->label());
+ return;
+ }
+
+ // Normal case; we've never seen this track before.
+ AudioRtpSender* new_sender =
+ new AudioRtpSender(track, stream->label(), session_.get(), stats_.get());
+ senders_.push_back(new_sender);
+ // If the sender has already been configured in SDP, we call SetSsrc,
+ // which will connect the sender to the underlying transport. This can
+ // occur if a local session description that contains the ID of the sender
+ // is set before AddStream is called. It can also occur if the local
+ // session description is not changed and RemoveStream is called, and
+ // later AddStream is called again with the same stream.
+ const TrackInfo* track_info =
+ FindTrackInfo(local_audio_tracks_, stream->label(), track->id());
+ if (track_info) {
+ new_sender->SetSsrc(track_info->ssrc);
+ }
+}
+
+// TODO(deadbeef): Don't destroy RtpSenders here; they should be kept around
+// indefinitely, when we have unified plan SDP.
+void PeerConnection::OnAudioTrackRemoved(AudioTrackInterface* track,
+ MediaStreamInterface* stream) {
+ auto sender = FindSenderForTrack(track);
+ if (sender == senders_.end()) {
+ LOG(LS_WARNING) << "RtpSender for track with id " << track->id()
+ << " doesn't exist.";
+ return;
+ }
+ (*sender)->Stop();
+ senders_.erase(sender);
+}
+
+void PeerConnection::OnVideoTrackAdded(VideoTrackInterface* track,
+ MediaStreamInterface* stream) {
+ auto sender = FindSenderForTrack(track);
+ if (sender != senders_.end()) {
+ // We already have a sender for this track, so just change the stream_id
+ // so that it's correct in the next call to CreateOffer.
+ (*sender)->set_stream_id(stream->label());
+ return;
+ }
+
+ // Normal case; we've never seen this track before.
+ VideoRtpSender* new_sender =
+ new VideoRtpSender(track, stream->label(), session_.get());
+ senders_.push_back(new_sender);
+ const TrackInfo* track_info =
+ FindTrackInfo(local_video_tracks_, stream->label(), track->id());
+ if (track_info) {
+ new_sender->SetSsrc(track_info->ssrc);
+ }
+}
+
+void PeerConnection::OnVideoTrackRemoved(VideoTrackInterface* track,
+ MediaStreamInterface* stream) {
+ auto sender = FindSenderForTrack(track);
+ if (sender == senders_.end()) {
+ LOG(LS_WARNING) << "RtpSender for track with id " << track->id()
+ << " doesn't exist.";
+ return;
+ }
+ (*sender)->Stop();
+ senders_.erase(sender);
+}
+
void PeerConnection::PostSetSessionDescriptionFailure(
SetSessionDescriptionObserver* observer,
const std::string& error) {
@@ -1385,7 +1441,7 @@ bool PeerConnection::GetOptionsForOffer(
return false;
}
- SetStreams(session_options, local_streams_, rtp_data_channels_);
+ AddSendStreams(session_options, senders_, rtp_data_channels_);
// Offer to receive audio/video if the constraint is not set and there are
// send streams, or we're currently receiving.
if (rtc_options.offer_to_receive_audio == RTCOfferAnswerOptions::kUndefined) {
@@ -1418,7 +1474,7 @@ bool PeerConnection::GetOptionsForAnswer(
return false;
}
- SetStreams(session_options, local_streams_, rtp_data_channels_);
+ AddSendStreams(session_options, senders_, rtp_data_channels_);
session_options->bundle_enabled =
session_options->bundle_enabled &&
(session_options->has_audio() || session_options->has_video() ||
@@ -1433,25 +1489,34 @@ bool PeerConnection::GetOptionsForAnswer(
return true;
}
+void PeerConnection::RemoveTracks(cricket::MediaType media_type) {
+ UpdateLocalTracks(std::vector<cricket::StreamParams>(), media_type);
+ UpdateRemoteStreamsList(std::vector<cricket::StreamParams>(), false,
+ media_type, nullptr);
+}
+
void PeerConnection::UpdateRemoteStreamsList(
const cricket::StreamParamsVec& streams,
+ bool default_track_needed,
cricket::MediaType media_type,
StreamCollection* new_streams) {
TrackInfos* current_tracks = GetRemoteTracks(media_type);
// Find removed tracks. I.e., tracks where the track id or ssrc don't match
- // the
- // new StreamParam.
+ // the new StreamParam.
auto track_it = current_tracks->begin();
while (track_it != current_tracks->end()) {
const TrackInfo& info = *track_it;
const cricket::StreamParams* params =
cricket::GetStreamBySsrc(streams, info.ssrc);
- if (!params || params->id != info.track_id) {
+ bool track_exists = params && params->id == info.track_id;
+ // If this is a default track, and we still need it, don't remove it.
+ if ((info.stream_label == kDefaultStreamLabel && default_track_needed) ||
+ track_exists) {
+ ++track_it;
+ } else {
OnRemoteTrackRemoved(info.stream_label, info.track_id, media_type);
track_it = current_tracks->erase(track_it);
- } else {
- ++track_it;
}
}
@@ -1479,6 +1544,29 @@ void PeerConnection::UpdateRemoteStreamsList(
OnRemoteTrackSeen(stream_label, track_id, ssrc, media_type);
}
}
+
+ // Add default track if necessary.
+ if (default_track_needed) {
+ rtc::scoped_refptr<MediaStreamInterface> default_stream =
+ remote_streams_->find(kDefaultStreamLabel);
+ if (!default_stream) {
+ // Create the new default MediaStream.
+ default_stream =
+ remote_stream_factory_->CreateMediaStream(kDefaultStreamLabel);
+ remote_streams_->AddStream(default_stream);
+ new_streams->AddStream(default_stream);
+ }
+ std::string default_track_id = (media_type == cricket::MEDIA_TYPE_AUDIO)
+ ? kDefaultAudioTrackLabel
+ : kDefaultVideoTrackLabel;
+ const TrackInfo* default_track_info =
+ FindTrackInfo(*current_tracks, kDefaultStreamLabel, default_track_id);
+ if (!default_track_info) {
+ current_tracks->push_back(
+ TrackInfo(kDefaultStreamLabel, default_track_id, 0));
+ OnRemoteTrackSeen(kDefaultStreamLabel, default_track_id, 0, media_type);
+ }
+ }
}
void PeerConnection::OnRemoteTrackSeen(const std::string& stream_label,
@@ -1488,8 +1576,8 @@ void PeerConnection::OnRemoteTrackSeen(const std::string& stream_label,
MediaStreamInterface* stream = remote_streams_->find(stream_label);
if (media_type == cricket::MEDIA_TYPE_AUDIO) {
- AudioTrackInterface* audio_track =
- remote_stream_factory_->AddAudioTrack(stream, track_id);
+ AudioTrackInterface* audio_track = remote_stream_factory_->AddAudioTrack(
+ ssrc, session_.get(), stream, track_id);
CreateAudioReceiver(stream, audio_track, ssrc);
} else if (media_type == cricket::MEDIA_TYPE_VIDEO) {
VideoTrackInterface* video_track =
@@ -1541,41 +1629,6 @@ void PeerConnection::UpdateEndedRemoteMediaStreams() {
}
}
-void PeerConnection::MaybeCreateDefaultStream() {
- if (!remote_info_.IsDefaultMediaStreamNeeded()) {
- return;
- }
-
- bool default_created = false;
-
- rtc::scoped_refptr<MediaStreamInterface> default_remote_stream =
- remote_streams_->find(kDefaultStreamLabel);
- if (default_remote_stream == nullptr) {
- default_created = true;
- default_remote_stream =
- remote_stream_factory_->CreateMediaStream(kDefaultStreamLabel);
- remote_streams_->AddStream(default_remote_stream);
- }
- if (remote_info_.default_audio_track_needed &&
- default_remote_stream->GetAudioTracks().size() == 0) {
- remote_audio_tracks_.push_back(
- TrackInfo(kDefaultStreamLabel, kDefaultAudioTrackLabel, 0));
- OnRemoteTrackSeen(kDefaultStreamLabel, kDefaultAudioTrackLabel, 0,
- cricket::MEDIA_TYPE_AUDIO);
- }
- if (remote_info_.default_video_track_needed &&
- default_remote_stream->GetVideoTracks().size() == 0) {
- remote_video_tracks_.push_back(
- TrackInfo(kDefaultStreamLabel, kDefaultVideoTrackLabel, 0));
- OnRemoteTrackSeen(kDefaultStreamLabel, kDefaultVideoTrackLabel, 0,
- cricket::MEDIA_TYPE_VIDEO);
- }
- if (default_created) {
- stats_->AddStream(default_remote_stream);
- observer_->OnAddStream(default_remote_stream);
- }
-}
-
void PeerConnection::EndRemoteTracks(cricket::MediaType media_type) {
TrackInfos* current_tracks = GetRemoteTracks(media_type);
for (TrackInfos::iterator track_it = current_tracks->begin();
@@ -1643,62 +1696,44 @@ void PeerConnection::OnLocalTrackSeen(const std::string& stream_label,
const std::string& track_id,
uint32_t ssrc,
cricket::MediaType media_type) {
- MediaStreamInterface* stream = local_streams_->find(stream_label);
- if (!stream) {
- LOG(LS_WARNING) << "An unknown local MediaStream with label "
- << stream_label << " has been configured.";
+ RtpSenderInterface* sender = FindSenderById(track_id);
+ if (!sender) {
+ LOG(LS_WARNING) << "An unknown RtpSender with id " << track_id
+ << " has been configured in the local description.";
return;
}
- if (media_type == cricket::MEDIA_TYPE_AUDIO) {
- AudioTrackInterface* audio_track = stream->FindAudioTrack(track_id);
- if (!audio_track) {
- LOG(LS_WARNING) << "An unknown local AudioTrack with id , " << track_id
- << " has been configured.";
- return;
- }
- CreateAudioSender(stream, audio_track, ssrc);
- } else if (media_type == cricket::MEDIA_TYPE_VIDEO) {
- VideoTrackInterface* video_track = stream->FindVideoTrack(track_id);
- if (!video_track) {
- LOG(LS_WARNING) << "An unknown local VideoTrack with id , " << track_id
- << " has been configured.";
- return;
- }
- CreateVideoSender(stream, video_track, ssrc);
- } else {
- RTC_DCHECK(false && "Invalid media type");
+ if (sender->media_type() != media_type) {
+ LOG(LS_WARNING) << "An RtpSender has been configured in the local"
+ << " description with an unexpected media type.";
+ return;
}
+
+ sender->set_stream_id(stream_label);
+ sender->SetSsrc(ssrc);
}
void PeerConnection::OnLocalTrackRemoved(const std::string& stream_label,
const std::string& track_id,
uint32_t ssrc,
cricket::MediaType media_type) {
- MediaStreamInterface* stream = local_streams_->find(stream_label);
- if (!stream) {
- // This is the normal case. I.e., RemoveLocalStream has been called and the
+ RtpSenderInterface* sender = FindSenderById(track_id);
+ if (!sender) {
+ // This is the normal case. I.e., RemoveStream has been called and the
// SessionDescriptions has been renegotiated.
return;
}
- // A track has been removed from the SessionDescription but the MediaStream
- // is still associated with PeerConnection. This only occurs if the SDP
- // doesn't match with the calls to AddLocalStream and RemoveLocalStream.
- if (media_type == cricket::MEDIA_TYPE_AUDIO) {
- AudioTrackInterface* audio_track = stream->FindAudioTrack(track_id);
- if (!audio_track) {
- return;
- }
- DestroyAudioSender(stream, audio_track, ssrc);
- } else if (media_type == cricket::MEDIA_TYPE_VIDEO) {
- VideoTrackInterface* video_track = stream->FindVideoTrack(track_id);
- if (!video_track) {
- return;
- }
- DestroyVideoSender(stream, video_track);
- } else {
- RTC_DCHECK(false && "Invalid media type.");
+
+ // A sender has been removed from the SessionDescription but it's still
+ // associated with the PeerConnection. This only occurs if the SDP doesn't
+ // match with the calls to CreateSender, AddStream and RemoveStream.
+ if (sender->media_type() != media_type) {
+ LOG(LS_WARNING) << "An RtpSender has been configured in the local"
+ << " description with an unexpected media type.";
+ return;
}
+
+ sender->SetSsrc(0);
}
void PeerConnection::UpdateLocalRtpDataChannels(
@@ -1806,7 +1841,7 @@ rtc::scoped_refptr<DataChannel> PeerConnection::InternalCreateDataChannel(
if (session_->data_channel_type() == cricket::DCT_SCTP) {
if (new_config.id < 0) {
rtc::SSLRole role;
- if (session_->GetSslRole(&role) &&
+ if ((session_->GetSslRole(session_->data_channel(), &role)) &&
!sid_allocator_.AllocateSid(role, &new_config.id)) {
LOG(LS_ERROR) << "No id can be allocated for the SCTP data channel.";
return nullptr;
@@ -1860,13 +1895,18 @@ void PeerConnection::AllocateSctpSids(rtc::SSLRole role) {
}
void PeerConnection::OnSctpDataChannelClosed(DataChannel* channel) {
+ RTC_DCHECK(signaling_thread()->IsCurrent());
for (auto it = sctp_data_channels_.begin(); it != sctp_data_channels_.end();
++it) {
if (it->get() == channel) {
if (channel->id() >= 0) {
sid_allocator_.ReleaseSid(channel->id());
}
+ // Since this method is triggered by a signal from the DataChannel,
+ // we can't free it directly here; we need to free it asynchronously.
+ sctp_data_channels_to_free_.push_back(*it);
sctp_data_channels_.erase(it);
+ signaling_thread()->Post(this, MSG_FREE_DATACHANNELS, nullptr);
return;
}
}
@@ -1916,6 +1956,15 @@ void PeerConnection::OnDataChannelOpenMessage(
DataChannelProxy::Create(signaling_thread(), channel));
}
+RtpSenderInterface* PeerConnection::FindSenderById(const std::string& id) {
+ auto it =
+ std::find_if(senders_.begin(), senders_.end(),
+ [id](const rtc::scoped_refptr<RtpSenderInterface>& sender) {
+ return sender->id() == id;
+ });
+ return it != senders_.end() ? it->get() : nullptr;
+}
+
std::vector<rtc::scoped_refptr<RtpSenderInterface>>::iterator
PeerConnection::FindSenderForTrack(MediaStreamTrackInterface* track) {
return std::find_if(
diff --git a/talk/app/webrtc/peerconnection.h b/talk/app/webrtc/peerconnection.h
index 2d388ae9f9..6e2b967fb4 100644
--- a/talk/app/webrtc/peerconnection.h
+++ b/talk/app/webrtc/peerconnection.h
@@ -42,13 +42,9 @@
namespace webrtc {
+class MediaStreamObserver;
class RemoteMediaStreamFactory;
-typedef std::vector<PortAllocatorFactoryInterface::StunConfiguration>
- StunConfigurations;
-typedef std::vector<PortAllocatorFactoryInterface::TurnConfiguration>
- TurnConfigurations;
-
// Populates |session_options| from |rtc_options|, and returns true if options
// are valid.
bool ConvertRtcOptionsForOffer(
@@ -60,11 +56,11 @@ bool ConvertRtcOptionsForOffer(
bool ParseConstraintsForAnswer(const MediaConstraintsInterface* constraints,
cricket::MediaSessionOptions* session_options);
-// Parses the URLs for each server in |servers| to build |stun_config| and
-// |turn_config|.
+// Parses the URLs for each server in |servers| to build |stun_servers| and
+// |turn_servers|.
bool ParseIceServers(const PeerConnectionInterface::IceServers& servers,
- StunConfigurations* stun_config,
- TurnConfigurations* turn_config);
+ cricket::ServerAddresses* stun_servers,
+ std::vector<cricket::RelayServerConfig>* turn_servers);
// PeerConnection implements the PeerConnectionInterface interface.
// It uses WebRtcSession to implement the PeerConnection functionality.
@@ -78,9 +74,10 @@ class PeerConnection : public PeerConnectionInterface,
bool Initialize(
const PeerConnectionInterface::RTCConfiguration& configuration,
const MediaConstraintsInterface* constraints,
- PortAllocatorFactoryInterface* allocator_factory,
+ rtc::scoped_ptr<cricket::PortAllocator> allocator,
rtc::scoped_ptr<DtlsIdentityStoreInterface> dtls_identity_store,
PeerConnectionObserver* observer);
+
rtc::scoped_refptr<StreamCollectionInterface> local_streams() override;
rtc::scoped_refptr<StreamCollectionInterface> remote_streams() override;
bool AddStream(MediaStreamInterface* local_stream) override;
@@ -91,6 +88,10 @@ class PeerConnection : public PeerConnectionInterface,
rtc::scoped_refptr<DtmfSenderInterface> CreateDtmfSender(
AudioTrackInterface* track) override;
+ rtc::scoped_refptr<RtpSenderInterface> CreateSender(
+ const std::string& kind,
+ const std::string& stream_id) override;
+
std::vector<rtc::scoped_refptr<RtpSenderInterface>> GetSenders()
const override;
std::vector<rtc::scoped_refptr<RtpReceiverInterface>> GetReceivers()
@@ -148,32 +149,16 @@ class PeerConnection : public PeerConnectionInterface,
const std::string track_id,
uint32_t ssrc)
: stream_label(stream_label), track_id(track_id), ssrc(ssrc) {}
+ bool operator==(const TrackInfo& other) {
+ return this->stream_label == other.stream_label &&
+ this->track_id == other.track_id && this->ssrc == other.ssrc;
+ }
std::string stream_label;
std::string track_id;
uint32_t ssrc;
};
typedef std::vector<TrackInfo> TrackInfos;
- struct RemotePeerInfo {
- RemotePeerInfo()
- : msid_supported(false),
- default_audio_track_needed(false),
- default_video_track_needed(false) {}
- // True if it has been discovered that the remote peer support MSID.
- bool msid_supported;
- // The remote peer indicates in the session description that audio will be
- // sent but no MSID is given.
- bool default_audio_track_needed;
- // The remote peer indicates in the session description that video will be
- // sent but no MSID is given.
- bool default_video_track_needed;
-
- bool IsDefaultMediaStreamNeeded() {
- return !msid_supported &&
- (default_audio_track_needed || default_video_track_needed);
- }
- };
-
// Implements MessageHandler.
void OnMessage(rtc::Message* msg) override;
@@ -187,12 +172,6 @@ class PeerConnection : public PeerConnectionInterface,
AudioTrackInterface* audio_track);
void DestroyVideoReceiver(MediaStreamInterface* stream,
VideoTrackInterface* video_track);
- void CreateAudioSender(MediaStreamInterface* stream,
- AudioTrackInterface* audio_track,
- uint32_t ssrc);
- void CreateVideoSender(MediaStreamInterface* stream,
- VideoTrackInterface* video_track,
- uint32_t ssrc);
void DestroyAudioSender(MediaStreamInterface* stream,
AudioTrackInterface* audio_track,
uint32_t ssrc);
@@ -210,6 +189,16 @@ class PeerConnection : public PeerConnectionInterface,
void OnSessionStateChange(WebRtcSession* session, WebRtcSession::State state);
void ChangeSignalingState(SignalingState signaling_state);
+ // Signals from MediaStreamObserver.
+ void OnAudioTrackAdded(AudioTrackInterface* track,
+ MediaStreamInterface* stream);
+ void OnAudioTrackRemoved(AudioTrackInterface* track,
+ MediaStreamInterface* stream);
+ void OnVideoTrackAdded(VideoTrackInterface* track,
+ MediaStreamInterface* stream);
+ void OnVideoTrackRemoved(VideoTrackInterface* track,
+ MediaStreamInterface* stream);
+
rtc::Thread* signaling_thread() const {
return factory_->signaling_thread();
}
@@ -236,12 +225,19 @@ class PeerConnection : public PeerConnectionInterface,
const MediaConstraintsInterface* constraints,
cricket::MediaSessionOptions* session_options);
- // Makes sure a MediaStream Track is created for each StreamParam in
- // |streams|. |media_type| is the type of the |streams| and can be either
- // audio or video.
+ // Remove all local and remote tracks of type |media_type|.
+ // Called when a media type is rejected (m-line set to port 0).
+ void RemoveTracks(cricket::MediaType media_type);
+
+ // Makes sure a MediaStreamTrack is created for each StreamParam in |streams|,
+ // and existing MediaStreamTracks are removed if there is no corresponding
+ // StreamParam. If |default_track_needed| is true, a default MediaStreamTrack
+ // is created if it doesn't exist; if false, it's removed if it exists.
+ // |media_type| is the type of the |streams| and can be either audio or video.
// If a new MediaStream is created it is added to |new_streams|.
void UpdateRemoteStreamsList(
const std::vector<cricket::StreamParams>& streams,
+ bool default_track_needed,
cricket::MediaType media_type,
StreamCollection* new_streams);
@@ -265,8 +261,6 @@ class PeerConnection : public PeerConnectionInterface,
// exist.
void UpdateEndedRemoteMediaStreams();
- void MaybeCreateDefaultStream();
-
// Set the MediaStreamTrackInterface::TrackState to |kEnded| on all remote
// tracks of type |media_type|.
void EndRemoteTracks(cricket::MediaType media_type);
@@ -328,6 +322,8 @@ class PeerConnection : public PeerConnectionInterface,
void OnDataChannelOpenMessage(const std::string& label,
const InternalDataChannelInit& config);
+ RtpSenderInterface* FindSenderById(const std::string& id);
+
std::vector<rtc::scoped_refptr<RtpSenderInterface>>::iterator
FindSenderForTrack(MediaStreamTrackInterface* track);
std::vector<rtc::scoped_refptr<RtpReceiverInterface>>::iterator
@@ -366,6 +362,8 @@ class PeerConnection : public PeerConnectionInterface,
// Streams created as a result of SetRemoteDescription.
rtc::scoped_refptr<StreamCollection> remote_streams_;
+ std::vector<rtc::scoped_ptr<MediaStreamObserver>> stream_observers_;
+
// These lists store track info seen in local/remote descriptions.
TrackInfos remote_audio_tracks_;
TrackInfos remote_video_tracks_;
@@ -376,8 +374,9 @@ class PeerConnection : public PeerConnectionInterface,
// label -> DataChannel
std::map<std::string, rtc::scoped_refptr<DataChannel>> rtp_data_channels_;
std::vector<rtc::scoped_refptr<DataChannel>> sctp_data_channels_;
+ std::vector<rtc::scoped_refptr<DataChannel>> sctp_data_channels_to_free_;
- RemotePeerInfo remote_info_;
+ bool remote_peer_supports_msid_ = false;
rtc::scoped_ptr<RemoteMediaStreamFactory> remote_stream_factory_;
std::vector<rtc::scoped_refptr<RtpSenderInterface>> senders_;
diff --git a/talk/app/webrtc/peerconnection_unittest.cc b/talk/app/webrtc/peerconnection_unittest.cc
index 3cf66d64d8..8d0793e25f 100644
--- a/talk/app/webrtc/peerconnection_unittest.cc
+++ b/talk/app/webrtc/peerconnection_unittest.cc
@@ -30,11 +30,11 @@
#include <algorithm>
#include <list>
#include <map>
+#include <utility>
#include <vector>
#include "talk/app/webrtc/dtmfsender.h"
#include "talk/app/webrtc/fakemetricsobserver.h"
-#include "talk/app/webrtc/fakeportallocatorfactory.h"
#include "talk/app/webrtc/localaudiosource.h"
#include "talk/app/webrtc/mediastreaminterface.h"
#include "talk/app/webrtc/peerconnection.h"
@@ -58,6 +58,7 @@
#include "webrtc/base/virtualsocketserver.h"
#include "webrtc/p2p/base/constants.h"
#include "webrtc/p2p/base/sessiondescription.h"
+#include "webrtc/p2p/client/fakeportallocator.h"
#define MAYBE_SKIP_TEST(feature) \
if (!(feature())) { \
@@ -78,11 +79,13 @@ using webrtc::DtmfSenderInterface;
using webrtc::DtmfSenderObserverInterface;
using webrtc::FakeConstraints;
using webrtc::MediaConstraintsInterface;
+using webrtc::MediaStreamInterface;
using webrtc::MediaStreamTrackInterface;
using webrtc::MockCreateSessionDescriptionObserver;
using webrtc::MockDataChannelObserver;
using webrtc::MockSetSessionDescriptionObserver;
using webrtc::MockStatsObserver;
+using webrtc::ObserverInterface;
using webrtc::PeerConnectionInterface;
using webrtc::PeerConnectionFactory;
using webrtc::SessionDescriptionInterface;
@@ -96,6 +99,7 @@ static const int kMaxWaitMs = 10000;
#if !defined(THREAD_SANITIZER)
static const int kMaxWaitForStatsMs = 3000;
#endif
+static const int kMaxWaitForActivationMs = 5000;
static const int kMaxWaitForFramesMs = 10000;
static const int kEndAudioFrameCount = 3;
static const int kEndVideoFrameCount = 3;
@@ -111,7 +115,7 @@ static const char kDataChannelLabel[] = "data_channel";
#if !defined(THREAD_SANITIZER)
// SRTP cipher name negotiated by the tests. This must be updated if the
// default changes.
-static const char kDefaultSrtpCipher[] = "AES_CM_128_HMAC_SHA1_32";
+static const int kDefaultSrtpCryptoSuite = rtc::SRTP_AES128_CM_SHA1_32;
#endif
static void RemoveLinesFromSdp(const std::string& line_start,
@@ -139,26 +143,35 @@ class SignalingMessageReceiver {
};
class PeerConnectionTestClient : public webrtc::PeerConnectionObserver,
- public SignalingMessageReceiver {
+ public SignalingMessageReceiver,
+ public ObserverInterface {
public:
- static PeerConnectionTestClient* CreateClient(
+ static PeerConnectionTestClient* CreateClientWithDtlsIdentityStore(
const std::string& id,
const MediaConstraintsInterface* constraints,
- const PeerConnectionFactory::Options* options) {
+ const PeerConnectionFactory::Options* options,
+ rtc::scoped_ptr<webrtc::DtlsIdentityStoreInterface> dtls_identity_store) {
PeerConnectionTestClient* client(new PeerConnectionTestClient(id));
- if (!client->Init(constraints, options)) {
+ if (!client->Init(constraints, options, std::move(dtls_identity_store))) {
delete client;
return nullptr;
}
return client;
}
+ static PeerConnectionTestClient* CreateClient(
+ const std::string& id,
+ const MediaConstraintsInterface* constraints,
+ const PeerConnectionFactory::Options* options) {
+ rtc::scoped_ptr<FakeDtlsIdentityStore> dtls_identity_store(
+ rtc::SSLStreamAdapter::HaveDtlsSrtp() ? new FakeDtlsIdentityStore()
+ : nullptr);
+
+ return CreateClientWithDtlsIdentityStore(id, constraints, options,
+ std::move(dtls_identity_store));
+ }
+
~PeerConnectionTestClient() {
- while (!fake_video_renderers_.empty()) {
- RenderMap::iterator it = fake_video_renderers_.begin();
- delete it->second;
- fake_video_renderers_.erase(it);
- }
}
void Negotiate() { Negotiate(true, true); }
@@ -206,16 +219,17 @@ class PeerConnectionTestClient : public webrtc::PeerConnectionObserver,
webrtc::PeerConnectionInterface::SignalingState new_state) override {
EXPECT_EQ(pc()->signaling_state(), new_state);
}
- void OnAddStream(webrtc::MediaStreamInterface* media_stream) override {
+ void OnAddStream(MediaStreamInterface* media_stream) override {
+ media_stream->RegisterObserver(this);
for (size_t i = 0; i < media_stream->GetVideoTracks().size(); ++i) {
const std::string id = media_stream->GetVideoTracks()[i]->id();
ASSERT_TRUE(fake_video_renderers_.find(id) ==
fake_video_renderers_.end());
- fake_video_renderers_[id] =
- new webrtc::FakeVideoTrackRenderer(media_stream->GetVideoTracks()[i]);
+ fake_video_renderers_[id].reset(new webrtc::FakeVideoTrackRenderer(
+ media_stream->GetVideoTracks()[i]));
}
}
- void OnRemoveStream(webrtc::MediaStreamInterface* media_stream) override {}
+ void OnRemoveStream(MediaStreamInterface* media_stream) override {}
void OnRenegotiationNeeded() override {}
void OnIceConnectionChange(
webrtc::PeerConnectionInterface::IceConnectionState new_state) override {
@@ -238,6 +252,40 @@ class PeerConnectionTestClient : public webrtc::PeerConnectionObserver,
candidate->sdp_mid(), candidate->sdp_mline_index(), ice_sdp);
}
+ // MediaStreamInterface callback
+ void OnChanged() override {
+ // Track added or removed from MediaStream, so update our renderers.
+ rtc::scoped_refptr<StreamCollectionInterface> remote_streams =
+ pc()->remote_streams();
+ // Remove renderers for tracks that were removed.
+ for (auto it = fake_video_renderers_.begin();
+ it != fake_video_renderers_.end();) {
+ if (remote_streams->FindVideoTrack(it->first) == nullptr) {
+ auto to_remove = it++;
+ removed_fake_video_renderers_.push_back(std::move(to_remove->second));
+ fake_video_renderers_.erase(to_remove);
+ } else {
+ ++it;
+ }
+ }
+ // Create renderers for new video tracks.
+ for (size_t stream_index = 0; stream_index < remote_streams->count();
+ ++stream_index) {
+ MediaStreamInterface* remote_stream = remote_streams->at(stream_index);
+ for (size_t track_index = 0;
+ track_index < remote_stream->GetVideoTracks().size();
+ ++track_index) {
+ const std::string id =
+ remote_stream->GetVideoTracks()[track_index]->id();
+ if (fake_video_renderers_.find(id) != fake_video_renderers_.end()) {
+ continue;
+ }
+ fake_video_renderers_[id].reset(new webrtc::FakeVideoTrackRenderer(
+ remote_stream->GetVideoTracks()[track_index]));
+ }
+ }
+ }
+
void SetVideoConstraints(const webrtc::FakeConstraints& video_constraint) {
video_constraints_ = video_constraint;
}
@@ -246,22 +294,11 @@ class PeerConnectionTestClient : public webrtc::PeerConnectionObserver,
std::string stream_label =
kStreamLabelBase +
rtc::ToString<int>(static_cast<int>(pc()->local_streams()->count()));
- rtc::scoped_refptr<webrtc::MediaStreamInterface> stream =
+ rtc::scoped_refptr<MediaStreamInterface> stream =
peer_connection_factory_->CreateLocalMediaStream(stream_label);
if (audio && can_receive_audio()) {
- FakeConstraints constraints;
- // Disable highpass filter so that we can get all the test audio frames.
- constraints.AddMandatory(
- MediaConstraintsInterface::kHighpassFilter, false);
- rtc::scoped_refptr<webrtc::AudioSourceInterface> source =
- peer_connection_factory_->CreateAudioSource(&constraints);
- // TODO(perkj): Test audio source when it is implemented. Currently audio
- // always use the default input.
- std::string label = stream_label + kAudioTrackLabelBase;
- rtc::scoped_refptr<webrtc::AudioTrackInterface> audio_track(
- peer_connection_factory_->CreateAudioTrack(label, source));
- stream->AddTrack(audio_track);
+ stream->AddTrack(CreateLocalAudioTrack(stream_label));
}
if (video && can_receive_video()) {
stream->AddTrack(CreateLocalVideoTrack(stream_label));
@@ -276,6 +313,12 @@ class PeerConnectionTestClient : public webrtc::PeerConnectionObserver,
return pc()->signaling_state() == webrtc::PeerConnectionInterface::kStable;
}
+ // Automatically add a stream when receiving an offer, if we don't have one.
+ // Defaults to true.
+ void set_auto_add_stream(bool auto_add_stream) {
+ auto_add_stream_ = auto_add_stream;
+ }
+
void set_signaling_message_receiver(
SignalingMessageReceiver* signaling_message_receiver) {
signaling_message_receiver_ = signaling_message_receiver;
@@ -357,6 +400,35 @@ class PeerConnectionTestClient : public webrtc::PeerConnectionObserver,
data_observer_.reset(new MockDataChannelObserver(data_channel_));
}
+ rtc::scoped_refptr<webrtc::AudioTrackInterface> CreateLocalAudioTrack(
+ const std::string& stream_label) {
+ FakeConstraints constraints;
+ // Disable highpass filter so that we can get all the test audio frames.
+ constraints.AddMandatory(MediaConstraintsInterface::kHighpassFilter, false);
+ rtc::scoped_refptr<webrtc::AudioSourceInterface> source =
+ peer_connection_factory_->CreateAudioSource(&constraints);
+ // TODO(perkj): Test audio source when it is implemented. Currently audio
+ // always use the default input.
+ std::string label = stream_label + kAudioTrackLabelBase;
+ return peer_connection_factory_->CreateAudioTrack(label, source);
+ }
+
+ rtc::scoped_refptr<webrtc::VideoTrackInterface> CreateLocalVideoTrack(
+ const std::string& stream_label) {
+ // Set max frame rate to 10fps to reduce the risk of the tests to be flaky.
+ FakeConstraints source_constraints = video_constraints_;
+ source_constraints.SetMandatoryMaxFrameRate(10);
+
+ cricket::FakeVideoCapturer* fake_capturer =
+ new webrtc::FakePeriodicVideoCapturer();
+ video_capturers_.push_back(fake_capturer);
+ rtc::scoped_refptr<webrtc::VideoSourceInterface> source =
+ peer_connection_factory_->CreateVideoSource(fake_capturer,
+ &source_constraints);
+ std::string label = stream_label + kVideoTrackLabelBase;
+ return peer_connection_factory_->CreateVideoTrack(label, source);
+ }
+
DataChannelInterface* data_channel() { return data_channel_; }
const MockDataChannelObserver* data_observer() const {
return data_observer_.get();
@@ -376,6 +448,10 @@ class PeerConnectionTestClient : public webrtc::PeerConnectionObserver,
return number_of_frames <= fake_audio_capture_module_->frames_received();
}
+ int audio_frames_received() const {
+ return fake_audio_capture_module_->frames_received();
+ }
+
bool VideoFramesReceivedCheck(int number_of_frames) {
if (video_decoder_factory_enabled_) {
const std::vector<FakeWebRtcVideoDecoder*>& decoders
@@ -384,9 +460,8 @@ class PeerConnectionTestClient : public webrtc::PeerConnectionObserver,
return number_of_frames <= 0;
}
- for (std::vector<FakeWebRtcVideoDecoder*>::const_iterator
- it = decoders.begin(); it != decoders.end(); ++it) {
- if (number_of_frames > (*it)->GetNumFramesReceived()) {
+ for (FakeWebRtcVideoDecoder* decoder : decoders) {
+ if (number_of_frames > decoder->GetNumFramesReceived()) {
return false;
}
}
@@ -396,9 +471,8 @@ class PeerConnectionTestClient : public webrtc::PeerConnectionObserver,
return number_of_frames <= 0;
}
- for (RenderMap::const_iterator it = fake_video_renderers_.begin();
- it != fake_video_renderers_.end(); ++it) {
- if (number_of_frames > it->second->num_rendered_frames()) {
+ for (const auto& pair : fake_video_renderers_) {
+ if (number_of_frames > pair.second->num_rendered_frames()) {
return false;
}
}
@@ -406,6 +480,25 @@ class PeerConnectionTestClient : public webrtc::PeerConnectionObserver,
}
}
+ int video_frames_received() const {
+ int total = 0;
+ if (video_decoder_factory_enabled_) {
+ const std::vector<FakeWebRtcVideoDecoder*>& decoders =
+ fake_video_decoder_factory_->decoders();
+ for (const FakeWebRtcVideoDecoder* decoder : decoders) {
+ total += decoder->GetNumFramesReceived();
+ }
+ } else {
+ for (const auto& pair : fake_video_renderers_) {
+ total += pair.second->num_rendered_frames();
+ }
+ for (const auto& renderer : removed_fake_video_renderers_) {
+ total += renderer->num_rendered_frames();
+ }
+ }
+ return total;
+ }
+
// Verify the CreateDtmfSender interface
void VerifyDtmf() {
rtc::scoped_ptr<DummyDtmfObserver> observer(new DummyDtmfObserver());
@@ -641,14 +734,14 @@ class PeerConnectionTestClient : public webrtc::PeerConnectionObserver,
explicit PeerConnectionTestClient(const std::string& id) : id_(id) {}
- bool Init(const MediaConstraintsInterface* constraints,
- const PeerConnectionFactory::Options* options) {
+ bool Init(
+ const MediaConstraintsInterface* constraints,
+ const PeerConnectionFactory::Options* options,
+ rtc::scoped_ptr<webrtc::DtlsIdentityStoreInterface> dtls_identity_store) {
EXPECT_TRUE(!peer_connection_);
EXPECT_TRUE(!peer_connection_factory_);
- allocator_factory_ = webrtc::FakePortAllocatorFactory::Create();
- if (!allocator_factory_) {
- return false;
- }
+ rtc::scoped_ptr<cricket::PortAllocator> port_allocator(
+ new cricket::FakePortAllocator(rtc::Thread::Current(), nullptr));
fake_audio_capture_module_ = FakeAudioCaptureModule::Create();
if (fake_audio_capture_module_ == nullptr) {
@@ -666,46 +759,29 @@ class PeerConnectionTestClient : public webrtc::PeerConnectionObserver,
if (options) {
peer_connection_factory_->SetOptions(*options);
}
- peer_connection_ = CreatePeerConnection(allocator_factory_.get(),
- constraints);
+ peer_connection_ = CreatePeerConnection(
+ std::move(port_allocator), constraints, std::move(dtls_identity_store));
return peer_connection_.get() != nullptr;
}
- rtc::scoped_refptr<webrtc::VideoTrackInterface>
- CreateLocalVideoTrack(const std::string stream_label) {
- // Set max frame rate to 10fps to reduce the risk of the tests to be flaky.
- FakeConstraints source_constraints = video_constraints_;
- source_constraints.SetMandatoryMaxFrameRate(10);
-
- cricket::FakeVideoCapturer* fake_capturer =
- new webrtc::FakePeriodicVideoCapturer();
- video_capturers_.push_back(fake_capturer);
- rtc::scoped_refptr<webrtc::VideoSourceInterface> source =
- peer_connection_factory_->CreateVideoSource(
- fake_capturer, &source_constraints);
- std::string label = stream_label + kVideoTrackLabelBase;
- return peer_connection_factory_->CreateVideoTrack(label, source);
- }
-
rtc::scoped_refptr<webrtc::PeerConnectionInterface> CreatePeerConnection(
- webrtc::PortAllocatorFactoryInterface* factory,
- const MediaConstraintsInterface* constraints) {
- // CreatePeerConnection with IceServers.
- webrtc::PeerConnectionInterface::IceServers ice_servers;
+ rtc::scoped_ptr<cricket::PortAllocator> port_allocator,
+ const MediaConstraintsInterface* constraints,
+ rtc::scoped_ptr<webrtc::DtlsIdentityStoreInterface> dtls_identity_store) {
+ // CreatePeerConnection with RTCConfiguration.
+ webrtc::PeerConnectionInterface::RTCConfiguration config;
webrtc::PeerConnectionInterface::IceServer ice_server;
ice_server.uri = "stun:stun.l.google.com:19302";
- ice_servers.push_back(ice_server);
+ config.servers.push_back(ice_server);
- rtc::scoped_ptr<webrtc::DtlsIdentityStoreInterface> dtls_identity_store(
- rtc::SSLStreamAdapter::HaveDtlsSrtp() ? new FakeDtlsIdentityStore()
- : nullptr);
return peer_connection_factory_->CreatePeerConnection(
- ice_servers, constraints, factory, dtls_identity_store.Pass(), this);
+ config, constraints, std::move(port_allocator),
+ std::move(dtls_identity_store), this);
}
void HandleIncomingOffer(const std::string& msg) {
LOG(INFO) << id_ << "HandleIncomingOffer ";
- if (NumberOfLocalMediaStreams() == 0) {
+ if (NumberOfLocalMediaStreams() == 0 && auto_add_stream_) {
// If we are not sending any streams ourselves it is time to add some.
AddMediaStream(true, true);
}
@@ -807,20 +883,24 @@ class PeerConnectionTestClient : public webrtc::PeerConnectionObserver,
std::string id_;
- rtc::scoped_refptr<webrtc::PortAllocatorFactoryInterface> allocator_factory_;
rtc::scoped_refptr<webrtc::PeerConnectionInterface> peer_connection_;
rtc::scoped_refptr<webrtc::PeerConnectionFactoryInterface>
peer_connection_factory_;
+ bool auto_add_stream_ = true;
+
typedef std::pair<std::string, std::string> IceUfragPwdPair;
std::map<int, IceUfragPwdPair> ice_ufrag_pwd_;
bool expect_ice_restart_ = false;
- // Needed to keep track of number of frames send.
+ // Needed to keep track of number of frames sent.
rtc::scoped_refptr<FakeAudioCaptureModule> fake_audio_capture_module_;
// Needed to keep track of number of frames received.
- typedef std::map<std::string, webrtc::FakeVideoTrackRenderer*> RenderMap;
- RenderMap fake_video_renderers_;
+ std::map<std::string, rtc::scoped_ptr<webrtc::FakeVideoTrackRenderer>>
+ fake_video_renderers_;
+ // Needed to ensure frames aren't received for removed tracks.
+ std::vector<rtc::scoped_ptr<webrtc::FakeVideoTrackRenderer>>
+ removed_fake_video_renderers_;
// Needed to keep track of number of frames received when external decoder
// used.
FakeWebRtcVideoDecoderFactory* fake_video_decoder_factory_ = nullptr;
@@ -846,11 +926,9 @@ class PeerConnectionTestClient : public webrtc::PeerConnectionObserver,
rtc::scoped_ptr<MockDataChannelObserver> data_observer_;
};
-// TODO(deadbeef): Rename this to P2PTestConductor once the Linux memcheck and
-// Windows DrMemory Full bots' blacklists are updated.
-class JsepPeerConnectionP2PTestClient : public testing::Test {
+class P2PTestConductor : public testing::Test {
public:
- JsepPeerConnectionP2PTestClient()
+ P2PTestConductor()
: pss_(new rtc::PhysicalSocketServer),
ss_(new rtc::VirtualSocketServer(pss_.get())),
ss_scope_(ss_.get()) {}
@@ -882,13 +960,26 @@ class JsepPeerConnectionP2PTestClient : public testing::Test {
}
void TestUpdateOfferWithRejectedContent() {
+ // Renegotiate, rejecting the video m-line.
initiating_client_->Negotiate(true, false);
- EXPECT_TRUE_WAIT(
- FramesNotPending(kEndAudioFrameCount * 2, kEndVideoFrameCount),
- kMaxWaitForFramesMs);
- // There shouldn't be any more video frame after the new offer is
- // negotiated.
- EXPECT_FALSE(VideoFramesReceivedCheck(kEndVideoFrameCount + 1));
+ ASSERT_TRUE_WAIT(SessionActive(), kMaxWaitForActivationMs);
+
+ int pc1_audio_received = initiating_client_->audio_frames_received();
+ int pc1_video_received = initiating_client_->video_frames_received();
+ int pc2_audio_received = receiving_client_->audio_frames_received();
+ int pc2_video_received = receiving_client_->video_frames_received();
+
+ // Wait for some additional audio frames to be received.
+ EXPECT_TRUE_WAIT(initiating_client_->AudioFramesReceivedCheck(
+ pc1_audio_received + kEndAudioFrameCount) &&
+ receiving_client_->AudioFramesReceivedCheck(
+ pc2_audio_received + kEndAudioFrameCount),
+ kMaxWaitForFramesMs);
+
+ // During this time, we shouldn't have received any additional video frames
+ // for the rejected video tracks.
+ EXPECT_EQ(pc1_video_received, initiating_client_->video_frames_received());
+ EXPECT_EQ(pc2_video_received, receiving_client_->video_frames_received());
}
void VerifyRenderedSize(int width, int height) {
@@ -905,7 +996,7 @@ class JsepPeerConnectionP2PTestClient : public testing::Test {
receiving_client_->VerifyLocalIceUfragAndPassword();
}
- ~JsepPeerConnectionP2PTestClient() {
+ ~P2PTestConductor() {
if (initiating_client_) {
initiating_client_->set_signaling_message_receiver(nullptr);
}
@@ -922,6 +1013,11 @@ class JsepPeerConnectionP2PTestClient : public testing::Test {
nullptr);
}
+ void SetSignalingReceivers() {
+ initiating_client_->set_signaling_message_receiver(receiving_client_.get());
+ receiving_client_->set_signaling_message_receiver(initiating_client_.get());
+ }
+
bool CreateTestClients(MediaConstraintsInterface* init_constraints,
PeerConnectionFactory::Options* init_options,
MediaConstraintsInterface* recv_constraints,
@@ -933,8 +1029,7 @@ class JsepPeerConnectionP2PTestClient : public testing::Test {
if (!initiating_client_ || !receiving_client_) {
return false;
}
- initiating_client_->set_signaling_message_receiver(receiving_client_.get());
- receiving_client_->set_signaling_message_receiver(initiating_client_.get());
+ SetSignalingReceivers();
return true;
}
@@ -957,13 +1052,11 @@ class JsepPeerConnectionP2PTestClient : public testing::Test {
initiating_client_->AddMediaStream(true, true);
}
initiating_client_->Negotiate();
- const int kMaxWaitForActivationMs = 5000;
// Assert true is used here since next tests are guaranteed to fail and
// would eat up 5 seconds.
ASSERT_TRUE_WAIT(SessionActive(), kMaxWaitForActivationMs);
VerifySessionDescriptions();
-
int audio_frame_count = kEndAudioFrameCount;
// TODO(ronghuawu): Add test to cover the case of sendonly and recvonly.
if (!initiating_client_->can_receive_audio() ||
@@ -1013,6 +1106,32 @@ class JsepPeerConnectionP2PTestClient : public testing::Test {
kMaxWaitForFramesMs);
}
+ void SetupAndVerifyDtlsCall() {
+ MAYBE_SKIP_TEST(rtc::SSLStreamAdapter::HaveDtlsSrtp);
+ FakeConstraints setup_constraints;
+ setup_constraints.AddMandatory(MediaConstraintsInterface::kEnableDtlsSrtp,
+ true);
+ ASSERT_TRUE(CreateTestClients(&setup_constraints, &setup_constraints));
+ LocalP2PTest();
+ VerifyRenderedSize(640, 480);
+ }
+
+ PeerConnectionTestClient* CreateDtlsClientWithAlternateKey() {
+ FakeConstraints setup_constraints;
+ setup_constraints.AddMandatory(MediaConstraintsInterface::kEnableDtlsSrtp,
+ true);
+
+ rtc::scoped_ptr<FakeDtlsIdentityStore> dtls_identity_store(
+ rtc::SSLStreamAdapter::HaveDtlsSrtp() ? new FakeDtlsIdentityStore()
+ : nullptr);
+ dtls_identity_store->use_alternate_key();
+
+ // Make sure the new client is using a different certificate.
+ return PeerConnectionTestClient::CreateClientWithDtlsIdentityStore(
+ "New Peer: ", &setup_constraints, nullptr,
+ std::move(dtls_identity_store));
+ }
+
void SendRtpData(webrtc::DataChannelInterface* dc, const std::string& data) {
// Messages may get lost on the unreliable DataChannel, so we send multiple
// times to avoid test flakiness.
@@ -1026,10 +1145,29 @@ class JsepPeerConnectionP2PTestClient : public testing::Test {
PeerConnectionTestClient* initializing_client() {
return initiating_client_.get();
}
+
+ // Set the |initiating_client_| to the |client| passed in and return the
+ // original |initiating_client_|.
+ PeerConnectionTestClient* set_initializing_client(
+ PeerConnectionTestClient* client) {
+ PeerConnectionTestClient* old = initiating_client_.release();
+ initiating_client_.reset(client);
+ return old;
+ }
+
PeerConnectionTestClient* receiving_client() {
return receiving_client_.get();
}
+ // Set the |receiving_client_| to the |client| passed in and return the
+ // original |receiving_client_|.
+ PeerConnectionTestClient* set_receiving_client(
+ PeerConnectionTestClient* client) {
+ PeerConnectionTestClient* old = receiving_client_.release();
+ receiving_client_.reset(client);
+ return old;
+ }
+
private:
rtc::scoped_ptr<rtc::PhysicalSocketServer> pss_;
rtc::scoped_ptr<rtc::VirtualSocketServer> ss_;
@@ -1045,7 +1183,7 @@ class JsepPeerConnectionP2PTestClient : public testing::Test {
// This test sets up a Jsep call between two parties and test Dtmf.
// TODO(holmer): Disabled due to sometimes crashing on buildbots.
// See issue webrtc/2378.
-TEST_F(JsepPeerConnectionP2PTestClient, DISABLED_LocalP2PTestDtmf) {
+TEST_F(P2PTestConductor, DISABLED_LocalP2PTestDtmf) {
ASSERT_TRUE(CreateTestClients());
LocalP2PTest();
VerifyDtmf();
@@ -1053,7 +1191,7 @@ TEST_F(JsepPeerConnectionP2PTestClient, DISABLED_LocalP2PTestDtmf) {
// This test sets up a Jsep call between two parties and test that we can get a
// video aspect ratio of 16:9.
-TEST_F(JsepPeerConnectionP2PTestClient, LocalP2PTest16To9) {
+TEST_F(P2PTestConductor, LocalP2PTest16To9) {
ASSERT_TRUE(CreateTestClients());
FakeConstraints constraint;
double requested_ratio = 640.0/360;
@@ -1078,7 +1216,7 @@ TEST_F(JsepPeerConnectionP2PTestClient, LocalP2PTest16To9) {
// received video has a resolution of 1280*720.
// TODO(mallinath): Enable when
// http://code.google.com/p/webrtc/issues/detail?id=981 is fixed.
-TEST_F(JsepPeerConnectionP2PTestClient, DISABLED_LocalP2PTest1280By720) {
+TEST_F(P2PTestConductor, DISABLED_LocalP2PTest1280By720) {
ASSERT_TRUE(CreateTestClients());
FakeConstraints constraint;
constraint.SetMandatoryMinWidth(1280);
@@ -1090,34 +1228,84 @@ TEST_F(JsepPeerConnectionP2PTestClient, DISABLED_LocalP2PTest1280By720) {
// This test sets up a call between two endpoints that are configured to use
// DTLS key agreement. As a result, DTLS is negotiated and used for transport.
-TEST_F(JsepPeerConnectionP2PTestClient, LocalP2PTestDtls) {
+TEST_F(P2PTestConductor, LocalP2PTestDtls) {
+ SetupAndVerifyDtlsCall();
+}
+
+// This test sets up a audio call initially and then upgrades to audio/video,
+// using DTLS.
+TEST_F(P2PTestConductor, LocalP2PTestDtlsRenegotiate) {
MAYBE_SKIP_TEST(rtc::SSLStreamAdapter::HaveDtlsSrtp);
FakeConstraints setup_constraints;
setup_constraints.AddMandatory(MediaConstraintsInterface::kEnableDtlsSrtp,
true);
ASSERT_TRUE(CreateTestClients(&setup_constraints, &setup_constraints));
+ receiving_client()->SetReceiveAudioVideo(true, false);
+ LocalP2PTest();
+ receiving_client()->SetReceiveAudioVideo(true, true);
+ receiving_client()->Negotiate();
+}
+
+// This test sets up a call transfer to a new caller with a different DTLS
+// fingerprint.
+TEST_F(P2PTestConductor, LocalP2PTestDtlsTransferCallee) {
+ MAYBE_SKIP_TEST(rtc::SSLStreamAdapter::HaveDtlsSrtp);
+ SetupAndVerifyDtlsCall();
+
+ // Keeping the original peer around which will still send packets to the
+ // receiving client. These SRTP packets will be dropped.
+ rtc::scoped_ptr<PeerConnectionTestClient> original_peer(
+ set_initializing_client(CreateDtlsClientWithAlternateKey()));
+ original_peer->pc()->Close();
+
+ SetSignalingReceivers();
+ receiving_client()->SetExpectIceRestart(true);
LocalP2PTest();
VerifyRenderedSize(640, 480);
}
-// This test sets up a audio call initially and then upgrades to audio/video,
-// using DTLS.
-TEST_F(JsepPeerConnectionP2PTestClient, LocalP2PTestDtlsRenegotiate) {
+// This test sets up a non-bundle call and apply bundle during ICE restart. When
+// bundle is in effect in the restart, the channel can successfully reset its
+// DTLS-SRTP context.
+TEST_F(P2PTestConductor, LocalP2PTestDtlsBundleInIceRestart) {
MAYBE_SKIP_TEST(rtc::SSLStreamAdapter::HaveDtlsSrtp);
FakeConstraints setup_constraints;
setup_constraints.AddMandatory(MediaConstraintsInterface::kEnableDtlsSrtp,
true);
ASSERT_TRUE(CreateTestClients(&setup_constraints, &setup_constraints));
- receiving_client()->SetReceiveAudioVideo(true, false);
+ receiving_client()->RemoveBundleFromReceivedSdp(true);
LocalP2PTest();
- receiving_client()->SetReceiveAudioVideo(true, true);
- receiving_client()->Negotiate();
+ VerifyRenderedSize(640, 480);
+
+ initializing_client()->IceRestart();
+ receiving_client()->SetExpectIceRestart(true);
+ receiving_client()->RemoveBundleFromReceivedSdp(false);
+ LocalP2PTest();
+ VerifyRenderedSize(640, 480);
+}
+
+// This test sets up a call transfer to a new callee with a different DTLS
+// fingerprint.
+TEST_F(P2PTestConductor, LocalP2PTestDtlsTransferCaller) {
+ MAYBE_SKIP_TEST(rtc::SSLStreamAdapter::HaveDtlsSrtp);
+ SetupAndVerifyDtlsCall();
+
+ // Keeping the original peer around which will still send packets to the
+ // receiving client. These SRTP packets will be dropped.
+ rtc::scoped_ptr<PeerConnectionTestClient> original_peer(
+ set_receiving_client(CreateDtlsClientWithAlternateKey()));
+ original_peer->pc()->Close();
+
+ SetSignalingReceivers();
+ initializing_client()->IceRestart();
+ LocalP2PTest();
+ VerifyRenderedSize(640, 480);
}
// This test sets up a call between two endpoints that are configured to use
// DTLS key agreement. The offerer don't support SDES. As a result, DTLS is
// negotiated and used for transport.
-TEST_F(JsepPeerConnectionP2PTestClient, LocalP2PTestOfferDtlsButNotSdes) {
+TEST_F(P2PTestConductor, LocalP2PTestOfferDtlsButNotSdes) {
MAYBE_SKIP_TEST(rtc::SSLStreamAdapter::HaveDtlsSrtp);
FakeConstraints setup_constraints;
setup_constraints.AddMandatory(MediaConstraintsInterface::kEnableDtlsSrtp,
@@ -1130,7 +1318,7 @@ TEST_F(JsepPeerConnectionP2PTestClient, LocalP2PTestOfferDtlsButNotSdes) {
// This test sets up a Jsep call between two parties, and the callee only
// accept to receive video.
-TEST_F(JsepPeerConnectionP2PTestClient, LocalP2PTestAnswerVideo) {
+TEST_F(P2PTestConductor, LocalP2PTestAnswerVideo) {
ASSERT_TRUE(CreateTestClients());
receiving_client()->SetReceiveAudioVideo(false, true);
LocalP2PTest();
@@ -1138,7 +1326,7 @@ TEST_F(JsepPeerConnectionP2PTestClient, LocalP2PTestAnswerVideo) {
// This test sets up a Jsep call between two parties, and the callee only
// accept to receive audio.
-TEST_F(JsepPeerConnectionP2PTestClient, LocalP2PTestAnswerAudio) {
+TEST_F(P2PTestConductor, LocalP2PTestAnswerAudio) {
ASSERT_TRUE(CreateTestClients());
receiving_client()->SetReceiveAudioVideo(true, false);
LocalP2PTest();
@@ -1146,7 +1334,7 @@ TEST_F(JsepPeerConnectionP2PTestClient, LocalP2PTestAnswerAudio) {
// This test sets up a Jsep call between two parties, and the callee reject both
// audio and video.
-TEST_F(JsepPeerConnectionP2PTestClient, LocalP2PTestAnswerNone) {
+TEST_F(P2PTestConductor, LocalP2PTestAnswerNone) {
ASSERT_TRUE(CreateTestClients());
receiving_client()->SetReceiveAudioVideo(false, false);
LocalP2PTest();
@@ -1156,9 +1344,7 @@ TEST_F(JsepPeerConnectionP2PTestClient, LocalP2PTestAnswerNone) {
// runs for a while (10 frames), the caller sends an update offer with video
// being rejected. Once the re-negotiation is done, the video flow should stop
// and the audio flow should continue.
-// Disabled due to b/14955157.
-TEST_F(JsepPeerConnectionP2PTestClient,
- DISABLED_UpdateOfferWithRejectedContent) {
+TEST_F(P2PTestConductor, UpdateOfferWithRejectedContent) {
ASSERT_TRUE(CreateTestClients());
LocalP2PTest();
TestUpdateOfferWithRejectedContent();
@@ -1166,8 +1352,7 @@ TEST_F(JsepPeerConnectionP2PTestClient,
// This test sets up a Jsep call between two parties. The MSID is removed from
// the SDP strings from the caller.
-// Disabled due to b/14955157.
-TEST_F(JsepPeerConnectionP2PTestClient, DISABLED_LocalP2PTestWithoutMsid) {
+TEST_F(P2PTestConductor, LocalP2PTestWithoutMsid) {
ASSERT_TRUE(CreateTestClients());
receiving_client()->RemoveMsidFromReceivedSdp(true);
// TODO(perkj): Currently there is a bug that cause audio to stop playing if
@@ -1182,7 +1367,7 @@ TEST_F(JsepPeerConnectionP2PTestClient, DISABLED_LocalP2PTestWithoutMsid) {
// sends two steams.
// TODO(perkj): Disabled due to
// https://code.google.com/p/webrtc/issues/detail?id=1454
-TEST_F(JsepPeerConnectionP2PTestClient, DISABLED_LocalP2PTestTwoStreams) {
+TEST_F(P2PTestConductor, DISABLED_LocalP2PTestTwoStreams) {
ASSERT_TRUE(CreateTestClients());
// Set optional video constraint to max 320pixels to decrease CPU usage.
FakeConstraints constraint;
@@ -1196,7 +1381,7 @@ TEST_F(JsepPeerConnectionP2PTestClient, DISABLED_LocalP2PTestTwoStreams) {
}
// Test that we can receive the audio output level from a remote audio track.
-TEST_F(JsepPeerConnectionP2PTestClient, GetAudioOutputLevelStats) {
+TEST_F(P2PTestConductor, GetAudioOutputLevelStats) {
ASSERT_TRUE(CreateTestClients());
LocalP2PTest();
@@ -1215,7 +1400,7 @@ TEST_F(JsepPeerConnectionP2PTestClient, GetAudioOutputLevelStats) {
}
// Test that an audio input level is reported.
-TEST_F(JsepPeerConnectionP2PTestClient, GetAudioInputLevelStats) {
+TEST_F(P2PTestConductor, GetAudioInputLevelStats) {
ASSERT_TRUE(CreateTestClients());
LocalP2PTest();
@@ -1226,7 +1411,7 @@ TEST_F(JsepPeerConnectionP2PTestClient, GetAudioInputLevelStats) {
}
// Test that we can get incoming byte counts from both audio and video tracks.
-TEST_F(JsepPeerConnectionP2PTestClient, GetBytesReceivedStats) {
+TEST_F(P2PTestConductor, GetBytesReceivedStats) {
ASSERT_TRUE(CreateTestClients());
LocalP2PTest();
@@ -1248,7 +1433,7 @@ TEST_F(JsepPeerConnectionP2PTestClient, GetBytesReceivedStats) {
}
// Test that we can get outgoing byte counts from both audio and video tracks.
-TEST_F(JsepPeerConnectionP2PTestClient, GetBytesSentStats) {
+TEST_F(P2PTestConductor, GetBytesSentStats) {
ASSERT_TRUE(CreateTestClients());
LocalP2PTest();
@@ -1270,7 +1455,7 @@ TEST_F(JsepPeerConnectionP2PTestClient, GetBytesSentStats) {
}
// Test that DTLS 1.0 is used if both sides only support DTLS 1.0.
-TEST_F(JsepPeerConnectionP2PTestClient, GetDtls12None) {
+TEST_F(P2PTestConductor, GetDtls12None) {
PeerConnectionFactory::Options init_options;
init_options.ssl_max_version = rtc::SSL_PROTOCOL_DTLS_10;
PeerConnectionFactory::Options recv_options;
@@ -1282,7 +1467,7 @@ TEST_F(JsepPeerConnectionP2PTestClient, GetDtls12None) {
initializing_client()->pc()->RegisterUMAObserver(init_observer);
LocalP2PTest();
- EXPECT_EQ_WAIT(rtc::SSLStreamAdapter::GetSslCipherSuiteName(
+ EXPECT_EQ_WAIT(rtc::SSLStreamAdapter::SslCipherSuiteToName(
rtc::SSLStreamAdapter::GetDefaultSslCipherForTest(
rtc::SSL_PROTOCOL_DTLS_10, rtc::KT_DEFAULT)),
initializing_client()->GetDtlsCipherStats(),
@@ -1292,16 +1477,23 @@ TEST_F(JsepPeerConnectionP2PTestClient, GetDtls12None) {
rtc::SSLStreamAdapter::GetDefaultSslCipherForTest(
rtc::SSL_PROTOCOL_DTLS_10, rtc::KT_DEFAULT)));
- EXPECT_EQ_WAIT(kDefaultSrtpCipher,
+ EXPECT_EQ_WAIT(rtc::SrtpCryptoSuiteToName(kDefaultSrtpCryptoSuite),
initializing_client()->GetSrtpCipherStats(),
kMaxWaitForStatsMs);
- EXPECT_EQ(1, init_observer->GetEnumCounter(
- webrtc::kEnumCounterAudioSrtpCipher,
- rtc::GetSrtpCryptoSuiteFromName(kDefaultSrtpCipher)));
+ EXPECT_EQ(1,
+ init_observer->GetEnumCounter(webrtc::kEnumCounterAudioSrtpCipher,
+ kDefaultSrtpCryptoSuite));
}
+#if defined(MEMORY_SANITIZER)
+// Fails under MemorySanitizer:
+// See https://code.google.com/p/webrtc/issues/detail?id=5381.
+#define MAYBE_GetDtls12Both DISABLED_GetDtls12Both
+#else
+#define MAYBE_GetDtls12Both GetDtls12Both
+#endif
// Test that DTLS 1.2 is used if both ends support it.
-TEST_F(JsepPeerConnectionP2PTestClient, GetDtls12Both) {
+TEST_F(P2PTestConductor, MAYBE_GetDtls12Both) {
PeerConnectionFactory::Options init_options;
init_options.ssl_max_version = rtc::SSL_PROTOCOL_DTLS_12;
PeerConnectionFactory::Options recv_options;
@@ -1313,7 +1505,7 @@ TEST_F(JsepPeerConnectionP2PTestClient, GetDtls12Both) {
initializing_client()->pc()->RegisterUMAObserver(init_observer);
LocalP2PTest();
- EXPECT_EQ_WAIT(rtc::SSLStreamAdapter::GetSslCipherSuiteName(
+ EXPECT_EQ_WAIT(rtc::SSLStreamAdapter::SslCipherSuiteToName(
rtc::SSLStreamAdapter::GetDefaultSslCipherForTest(
rtc::SSL_PROTOCOL_DTLS_12, rtc::KT_DEFAULT)),
initializing_client()->GetDtlsCipherStats(),
@@ -1323,17 +1515,17 @@ TEST_F(JsepPeerConnectionP2PTestClient, GetDtls12Both) {
rtc::SSLStreamAdapter::GetDefaultSslCipherForTest(
rtc::SSL_PROTOCOL_DTLS_12, rtc::KT_DEFAULT)));
- EXPECT_EQ_WAIT(kDefaultSrtpCipher,
+ EXPECT_EQ_WAIT(rtc::SrtpCryptoSuiteToName(kDefaultSrtpCryptoSuite),
initializing_client()->GetSrtpCipherStats(),
kMaxWaitForStatsMs);
- EXPECT_EQ(1, init_observer->GetEnumCounter(
- webrtc::kEnumCounterAudioSrtpCipher,
- rtc::GetSrtpCryptoSuiteFromName(kDefaultSrtpCipher)));
+ EXPECT_EQ(1,
+ init_observer->GetEnumCounter(webrtc::kEnumCounterAudioSrtpCipher,
+ kDefaultSrtpCryptoSuite));
}
// Test that DTLS 1.0 is used if the initator supports DTLS 1.2 and the
// received supports 1.0.
-TEST_F(JsepPeerConnectionP2PTestClient, GetDtls12Init) {
+TEST_F(P2PTestConductor, GetDtls12Init) {
PeerConnectionFactory::Options init_options;
init_options.ssl_max_version = rtc::SSL_PROTOCOL_DTLS_12;
PeerConnectionFactory::Options recv_options;
@@ -1345,7 +1537,7 @@ TEST_F(JsepPeerConnectionP2PTestClient, GetDtls12Init) {
initializing_client()->pc()->RegisterUMAObserver(init_observer);
LocalP2PTest();
- EXPECT_EQ_WAIT(rtc::SSLStreamAdapter::GetSslCipherSuiteName(
+ EXPECT_EQ_WAIT(rtc::SSLStreamAdapter::SslCipherSuiteToName(
rtc::SSLStreamAdapter::GetDefaultSslCipherForTest(
rtc::SSL_PROTOCOL_DTLS_10, rtc::KT_DEFAULT)),
initializing_client()->GetDtlsCipherStats(),
@@ -1355,17 +1547,17 @@ TEST_F(JsepPeerConnectionP2PTestClient, GetDtls12Init) {
rtc::SSLStreamAdapter::GetDefaultSslCipherForTest(
rtc::SSL_PROTOCOL_DTLS_10, rtc::KT_DEFAULT)));
- EXPECT_EQ_WAIT(kDefaultSrtpCipher,
+ EXPECT_EQ_WAIT(rtc::SrtpCryptoSuiteToName(kDefaultSrtpCryptoSuite),
initializing_client()->GetSrtpCipherStats(),
kMaxWaitForStatsMs);
- EXPECT_EQ(1, init_observer->GetEnumCounter(
- webrtc::kEnumCounterAudioSrtpCipher,
- rtc::GetSrtpCryptoSuiteFromName(kDefaultSrtpCipher)));
+ EXPECT_EQ(1,
+ init_observer->GetEnumCounter(webrtc::kEnumCounterAudioSrtpCipher,
+ kDefaultSrtpCryptoSuite));
}
// Test that DTLS 1.0 is used if the initator supports DTLS 1.0 and the
// received supports 1.2.
-TEST_F(JsepPeerConnectionP2PTestClient, GetDtls12Recv) {
+TEST_F(P2PTestConductor, GetDtls12Recv) {
PeerConnectionFactory::Options init_options;
init_options.ssl_max_version = rtc::SSL_PROTOCOL_DTLS_10;
PeerConnectionFactory::Options recv_options;
@@ -1377,7 +1569,7 @@ TEST_F(JsepPeerConnectionP2PTestClient, GetDtls12Recv) {
initializing_client()->pc()->RegisterUMAObserver(init_observer);
LocalP2PTest();
- EXPECT_EQ_WAIT(rtc::SSLStreamAdapter::GetSslCipherSuiteName(
+ EXPECT_EQ_WAIT(rtc::SSLStreamAdapter::SslCipherSuiteToName(
rtc::SSLStreamAdapter::GetDefaultSslCipherForTest(
rtc::SSL_PROTOCOL_DTLS_10, rtc::KT_DEFAULT)),
initializing_client()->GetDtlsCipherStats(),
@@ -1387,16 +1579,17 @@ TEST_F(JsepPeerConnectionP2PTestClient, GetDtls12Recv) {
rtc::SSLStreamAdapter::GetDefaultSslCipherForTest(
rtc::SSL_PROTOCOL_DTLS_10, rtc::KT_DEFAULT)));
- EXPECT_EQ_WAIT(kDefaultSrtpCipher,
+ EXPECT_EQ_WAIT(rtc::SrtpCryptoSuiteToName(kDefaultSrtpCryptoSuite),
initializing_client()->GetSrtpCipherStats(),
kMaxWaitForStatsMs);
- EXPECT_EQ(1, init_observer->GetEnumCounter(
- webrtc::kEnumCounterAudioSrtpCipher,
- rtc::GetSrtpCryptoSuiteFromName(kDefaultSrtpCipher)));
+ EXPECT_EQ(1,
+ init_observer->GetEnumCounter(webrtc::kEnumCounterAudioSrtpCipher,
+ kDefaultSrtpCryptoSuite));
}
-// This test sets up a call between two parties with audio, video and data.
-TEST_F(JsepPeerConnectionP2PTestClient, LocalP2PTestDataChannel) {
+// This test sets up a call between two parties with audio, video and an RTP
+// data channel.
+TEST_F(P2PTestConductor, LocalP2PTestRtpDataChannel) {
FakeConstraints setup_constraints;
setup_constraints.SetAllowRtpDataChannels();
ASSERT_TRUE(CreateTestClients(&setup_constraints, &setup_constraints));
@@ -1426,6 +1619,34 @@ TEST_F(JsepPeerConnectionP2PTestClient, LocalP2PTestDataChannel) {
EXPECT_FALSE(receiving_client()->data_observer()->IsOpen());
}
+// This test sets up a call between two parties with audio, video and an SCTP
+// data channel.
+TEST_F(P2PTestConductor, LocalP2PTestSctpDataChannel) {
+ ASSERT_TRUE(CreateTestClients());
+ initializing_client()->CreateDataChannel();
+ LocalP2PTest();
+ ASSERT_TRUE(initializing_client()->data_channel() != nullptr);
+ EXPECT_TRUE_WAIT(receiving_client()->data_channel() != nullptr, kMaxWaitMs);
+ EXPECT_TRUE_WAIT(initializing_client()->data_observer()->IsOpen(),
+ kMaxWaitMs);
+ EXPECT_TRUE_WAIT(receiving_client()->data_observer()->IsOpen(), kMaxWaitMs);
+
+ std::string data = "hello world";
+
+ initializing_client()->data_channel()->Send(DataBuffer(data));
+ EXPECT_EQ_WAIT(data, receiving_client()->data_observer()->last_message(),
+ kMaxWaitMs);
+
+ receiving_client()->data_channel()->Send(DataBuffer(data));
+ EXPECT_EQ_WAIT(data, initializing_client()->data_observer()->last_message(),
+ kMaxWaitMs);
+
+ receiving_client()->data_channel()->Close();
+ EXPECT_TRUE_WAIT(!initializing_client()->data_observer()->IsOpen(),
+ kMaxWaitMs);
+ EXPECT_TRUE_WAIT(!receiving_client()->data_observer()->IsOpen(), kMaxWaitMs);
+}
+
// This test sets up a call between two parties and creates a data channel.
// The test tests that received data is buffered unless an observer has been
// registered.
@@ -1433,7 +1654,7 @@ TEST_F(JsepPeerConnectionP2PTestClient, LocalP2PTestDataChannel) {
// transport has detected that a channel is writable and thus data can be
// received before the data channel state changes to open. That is hard to test
// but the same buffering is used in that case.
-TEST_F(JsepPeerConnectionP2PTestClient, RegisterDataChannelObserver) {
+TEST_F(P2PTestConductor, RegisterDataChannelObserver) {
FakeConstraints setup_constraints;
setup_constraints.SetAllowRtpDataChannels();
ASSERT_TRUE(CreateTestClients(&setup_constraints, &setup_constraints));
@@ -1463,7 +1684,7 @@ TEST_F(JsepPeerConnectionP2PTestClient, RegisterDataChannelObserver) {
// This test sets up a call between two parties with audio, video and but only
// the initiating client support data.
-TEST_F(JsepPeerConnectionP2PTestClient, LocalP2PTestReceiverDoesntSupportData) {
+TEST_F(P2PTestConductor, LocalP2PTestReceiverDoesntSupportData) {
FakeConstraints setup_constraints_1;
setup_constraints_1.SetAllowRtpDataChannels();
// Must disable DTLS to make negotiation succeed.
@@ -1482,7 +1703,7 @@ TEST_F(JsepPeerConnectionP2PTestClient, LocalP2PTestReceiverDoesntSupportData) {
// This test sets up a call between two parties with audio, video. When audio
// and video is setup and flowing and data channel is negotiated.
-TEST_F(JsepPeerConnectionP2PTestClient, AddDataChannelAfterRenegotiation) {
+TEST_F(P2PTestConductor, AddDataChannelAfterRenegotiation) {
FakeConstraints setup_constraints;
setup_constraints.SetAllowRtpDataChannels();
ASSERT_TRUE(CreateTestClients(&setup_constraints, &setup_constraints));
@@ -1501,7 +1722,7 @@ TEST_F(JsepPeerConnectionP2PTestClient, AddDataChannelAfterRenegotiation) {
// This test sets up a Jsep call with SCTP DataChannel and verifies the
// negotiation is completed without error.
#ifdef HAVE_SCTP
-TEST_F(JsepPeerConnectionP2PTestClient, CreateOfferWithSctpDataChannel) {
+TEST_F(P2PTestConductor, CreateOfferWithSctpDataChannel) {
MAYBE_SKIP_TEST(rtc::SSLStreamAdapter::HaveDtlsSrtp);
FakeConstraints constraints;
constraints.SetMandatory(
@@ -1515,7 +1736,7 @@ TEST_F(JsepPeerConnectionP2PTestClient, CreateOfferWithSctpDataChannel) {
// This test sets up a call between two parties with audio, and video.
// During the call, the initializing side restart ice and the test verifies that
// new ice candidates are generated and audio and video still can flow.
-TEST_F(JsepPeerConnectionP2PTestClient, IceRestart) {
+TEST_F(P2PTestConductor, IceRestart) {
ASSERT_TRUE(CreateTestClients());
// Negotiate and wait for ice completion and make sure audio and video plays.
@@ -1562,17 +1783,69 @@ TEST_F(JsepPeerConnectionP2PTestClient, IceRestart) {
EXPECT_NE(receiver_candidate, receiver_candidate_restart);
}
+// This test sets up a call between two parties with audio, and video.
+// It then renegotiates setting the video m-line to "port 0", then later
+// renegotiates again, enabling video.
+TEST_F(P2PTestConductor, LocalP2PTestVideoDisableEnable) {
+ ASSERT_TRUE(CreateTestClients());
+
+ // Do initial negotiation. Will result in video and audio sendonly m-lines.
+ receiving_client()->set_auto_add_stream(false);
+ initializing_client()->AddMediaStream(true, true);
+ initializing_client()->Negotiate();
+
+ // Negotiate again, disabling the video m-line (receiving client will
+ // set port to 0 due to mandatory "OfferToReceiveVideo: false" constraint).
+ receiving_client()->SetReceiveVideo(false);
+ initializing_client()->Negotiate();
+
+ // Enable video and do negotiation again, making sure video is received
+ // end-to-end.
+ receiving_client()->SetReceiveVideo(true);
+ receiving_client()->AddMediaStream(true, true);
+ LocalP2PTest();
+}
+
// This test sets up a Jsep call between two parties with external
// VideoDecoderFactory.
// TODO(holmer): Disabled due to sometimes crashing on buildbots.
// See issue webrtc/2378.
-TEST_F(JsepPeerConnectionP2PTestClient,
- DISABLED_LocalP2PTestWithVideoDecoderFactory) {
+TEST_F(P2PTestConductor, DISABLED_LocalP2PTestWithVideoDecoderFactory) {
ASSERT_TRUE(CreateTestClients());
EnableVideoDecoderFactory();
LocalP2PTest();
}
+// This tests that if we negotiate after calling CreateSender but before we
+// have a track, then set a track later, frames from the newly-set track are
+// received end-to-end.
+TEST_F(P2PTestConductor, EarlyWarmupTest) {
+ ASSERT_TRUE(CreateTestClients());
+ auto audio_sender =
+ initializing_client()->pc()->CreateSender("audio", "stream_id");
+ auto video_sender =
+ initializing_client()->pc()->CreateSender("video", "stream_id");
+ initializing_client()->Negotiate();
+ // Wait for ICE connection to complete, without any tracks.
+ // Note that the receiving client WILL (in HandleIncomingOffer) create
+ // tracks, so it's only the initiator here that's doing early warmup.
+ ASSERT_TRUE_WAIT(SessionActive(), kMaxWaitForActivationMs);
+ VerifySessionDescriptions();
+ EXPECT_EQ_WAIT(webrtc::PeerConnectionInterface::kIceConnectionCompleted,
+ initializing_client()->ice_connection_state(),
+ kMaxWaitForFramesMs);
+ EXPECT_EQ_WAIT(webrtc::PeerConnectionInterface::kIceConnectionConnected,
+ receiving_client()->ice_connection_state(),
+ kMaxWaitForFramesMs);
+ // Now set the tracks, and expect frames to immediately start flowing.
+ EXPECT_TRUE(
+ audio_sender->SetTrack(initializing_client()->CreateLocalAudioTrack("")));
+ EXPECT_TRUE(
+ video_sender->SetTrack(initializing_client()->CreateLocalVideoTrack("")));
+ EXPECT_TRUE_WAIT(FramesNotPending(kEndAudioFrameCount, kEndVideoFrameCount),
+ kMaxWaitForFramesMs);
+}
+
class IceServerParsingTest : public testing::Test {
public:
// Convenience for parsing a single URL.
@@ -1589,38 +1862,37 @@ class IceServerParsingTest : public testing::Test {
server.username = username;
server.password = password;
servers.push_back(server);
- return webrtc::ParseIceServers(servers, &stun_configurations_,
- &turn_configurations_);
+ return webrtc::ParseIceServers(servers, &stun_servers_, &turn_servers_);
}
protected:
- webrtc::StunConfigurations stun_configurations_;
- webrtc::TurnConfigurations turn_configurations_;
+ cricket::ServerAddresses stun_servers_;
+ std::vector<cricket::RelayServerConfig> turn_servers_;
};
// Make sure all STUN/TURN prefixes are parsed correctly.
TEST_F(IceServerParsingTest, ParseStunPrefixes) {
EXPECT_TRUE(ParseUrl("stun:hostname"));
- EXPECT_EQ(1U, stun_configurations_.size());
- EXPECT_EQ(0U, turn_configurations_.size());
- stun_configurations_.clear();
+ EXPECT_EQ(1U, stun_servers_.size());
+ EXPECT_EQ(0U, turn_servers_.size());
+ stun_servers_.clear();
EXPECT_TRUE(ParseUrl("stuns:hostname"));
- EXPECT_EQ(1U, stun_configurations_.size());
- EXPECT_EQ(0U, turn_configurations_.size());
- stun_configurations_.clear();
+ EXPECT_EQ(1U, stun_servers_.size());
+ EXPECT_EQ(0U, turn_servers_.size());
+ stun_servers_.clear();
EXPECT_TRUE(ParseUrl("turn:hostname"));
- EXPECT_EQ(0U, stun_configurations_.size());
- EXPECT_EQ(1U, turn_configurations_.size());
- EXPECT_FALSE(turn_configurations_[0].secure);
- turn_configurations_.clear();
+ EXPECT_EQ(0U, stun_servers_.size());
+ EXPECT_EQ(1U, turn_servers_.size());
+ EXPECT_FALSE(turn_servers_[0].ports[0].secure);
+ turn_servers_.clear();
EXPECT_TRUE(ParseUrl("turns:hostname"));
- EXPECT_EQ(0U, stun_configurations_.size());
- EXPECT_EQ(1U, turn_configurations_.size());
- EXPECT_TRUE(turn_configurations_[0].secure);
- turn_configurations_.clear();
+ EXPECT_EQ(0U, stun_servers_.size());
+ EXPECT_EQ(1U, turn_servers_.size());
+ EXPECT_TRUE(turn_servers_[0].ports[0].secure);
+ turn_servers_.clear();
// invalid prefixes
EXPECT_FALSE(ParseUrl("stunn:hostname"));
@@ -1632,67 +1904,69 @@ TEST_F(IceServerParsingTest, ParseStunPrefixes) {
TEST_F(IceServerParsingTest, VerifyDefaults) {
// TURNS defaults
EXPECT_TRUE(ParseUrl("turns:hostname"));
- EXPECT_EQ(1U, turn_configurations_.size());
- EXPECT_EQ(5349, turn_configurations_[0].server.port());
- EXPECT_EQ("tcp", turn_configurations_[0].transport_type);
- turn_configurations_.clear();
+ EXPECT_EQ(1U, turn_servers_.size());
+ EXPECT_EQ(5349, turn_servers_[0].ports[0].address.port());
+ EXPECT_EQ(cricket::PROTO_TCP, turn_servers_[0].ports[0].proto);
+ turn_servers_.clear();
// TURN defaults
EXPECT_TRUE(ParseUrl("turn:hostname"));
- EXPECT_EQ(1U, turn_configurations_.size());
- EXPECT_EQ(3478, turn_configurations_[0].server.port());
- EXPECT_EQ("udp", turn_configurations_[0].transport_type);
- turn_configurations_.clear();
+ EXPECT_EQ(1U, turn_servers_.size());
+ EXPECT_EQ(3478, turn_servers_[0].ports[0].address.port());
+ EXPECT_EQ(cricket::PROTO_UDP, turn_servers_[0].ports[0].proto);
+ turn_servers_.clear();
// STUN defaults
EXPECT_TRUE(ParseUrl("stun:hostname"));
- EXPECT_EQ(1U, stun_configurations_.size());
- EXPECT_EQ(3478, stun_configurations_[0].server.port());
- stun_configurations_.clear();
+ EXPECT_EQ(1U, stun_servers_.size());
+ EXPECT_EQ(3478, stun_servers_.begin()->port());
+ stun_servers_.clear();
}
// Check that the 6 combinations of IPv4/IPv6/hostname and with/without port
// can be parsed correctly.
TEST_F(IceServerParsingTest, ParseHostnameAndPort) {
EXPECT_TRUE(ParseUrl("stun:1.2.3.4:1234"));
- EXPECT_EQ(1U, stun_configurations_.size());
- EXPECT_EQ("1.2.3.4", stun_configurations_[0].server.hostname());
- EXPECT_EQ(1234, stun_configurations_[0].server.port());
- stun_configurations_.clear();
+ EXPECT_EQ(1U, stun_servers_.size());
+ EXPECT_EQ("1.2.3.4", stun_servers_.begin()->hostname());
+ EXPECT_EQ(1234, stun_servers_.begin()->port());
+ stun_servers_.clear();
EXPECT_TRUE(ParseUrl("stun:[1:2:3:4:5:6:7:8]:4321"));
- EXPECT_EQ(1U, stun_configurations_.size());
- EXPECT_EQ("1:2:3:4:5:6:7:8", stun_configurations_[0].server.hostname());
- EXPECT_EQ(4321, stun_configurations_[0].server.port());
- stun_configurations_.clear();
+ EXPECT_EQ(1U, stun_servers_.size());
+ EXPECT_EQ("1:2:3:4:5:6:7:8", stun_servers_.begin()->hostname());
+ EXPECT_EQ(4321, stun_servers_.begin()->port());
+ stun_servers_.clear();
EXPECT_TRUE(ParseUrl("stun:hostname:9999"));
- EXPECT_EQ(1U, stun_configurations_.size());
- EXPECT_EQ("hostname", stun_configurations_[0].server.hostname());
- EXPECT_EQ(9999, stun_configurations_[0].server.port());
- stun_configurations_.clear();
+ EXPECT_EQ(1U, stun_servers_.size());
+ EXPECT_EQ("hostname", stun_servers_.begin()->hostname());
+ EXPECT_EQ(9999, stun_servers_.begin()->port());
+ stun_servers_.clear();
EXPECT_TRUE(ParseUrl("stun:1.2.3.4"));
- EXPECT_EQ(1U, stun_configurations_.size());
- EXPECT_EQ("1.2.3.4", stun_configurations_[0].server.hostname());
- EXPECT_EQ(3478, stun_configurations_[0].server.port());
- stun_configurations_.clear();
+ EXPECT_EQ(1U, stun_servers_.size());
+ EXPECT_EQ("1.2.3.4", stun_servers_.begin()->hostname());
+ EXPECT_EQ(3478, stun_servers_.begin()->port());
+ stun_servers_.clear();
EXPECT_TRUE(ParseUrl("stun:[1:2:3:4:5:6:7:8]"));
- EXPECT_EQ(1U, stun_configurations_.size());
- EXPECT_EQ("1:2:3:4:5:6:7:8", stun_configurations_[0].server.hostname());
- EXPECT_EQ(3478, stun_configurations_[0].server.port());
- stun_configurations_.clear();
+ EXPECT_EQ(1U, stun_servers_.size());
+ EXPECT_EQ("1:2:3:4:5:6:7:8", stun_servers_.begin()->hostname());
+ EXPECT_EQ(3478, stun_servers_.begin()->port());
+ stun_servers_.clear();
EXPECT_TRUE(ParseUrl("stun:hostname"));
- EXPECT_EQ(1U, stun_configurations_.size());
- EXPECT_EQ("hostname", stun_configurations_[0].server.hostname());
- EXPECT_EQ(3478, stun_configurations_[0].server.port());
- stun_configurations_.clear();
+ EXPECT_EQ(1U, stun_servers_.size());
+ EXPECT_EQ("hostname", stun_servers_.begin()->hostname());
+ EXPECT_EQ(3478, stun_servers_.begin()->port());
+ stun_servers_.clear();
// Try some invalid hostname:port strings.
EXPECT_FALSE(ParseUrl("stun:hostname:99a99"));
EXPECT_FALSE(ParseUrl("stun:hostname:-1"));
+ EXPECT_FALSE(ParseUrl("stun:hostname:port:more"));
+ EXPECT_FALSE(ParseUrl("stun:hostname:port more"));
EXPECT_FALSE(ParseUrl("stun:hostname:"));
EXPECT_FALSE(ParseUrl("stun:[1:2:3:4:5:6:7:8]junk:1000"));
EXPECT_FALSE(ParseUrl("stun::5555"));
@@ -1702,14 +1976,14 @@ TEST_F(IceServerParsingTest, ParseHostnameAndPort) {
// Test parsing the "?transport=xxx" part of the URL.
TEST_F(IceServerParsingTest, ParseTransport) {
EXPECT_TRUE(ParseUrl("turn:hostname:1234?transport=tcp"));
- EXPECT_EQ(1U, turn_configurations_.size());
- EXPECT_EQ("tcp", turn_configurations_[0].transport_type);
- turn_configurations_.clear();
+ EXPECT_EQ(1U, turn_servers_.size());
+ EXPECT_EQ(cricket::PROTO_TCP, turn_servers_[0].ports[0].proto);
+ turn_servers_.clear();
EXPECT_TRUE(ParseUrl("turn:hostname?transport=udp"));
- EXPECT_EQ(1U, turn_configurations_.size());
- EXPECT_EQ("udp", turn_configurations_[0].transport_type);
- turn_configurations_.clear();
+ EXPECT_EQ(1U, turn_servers_.size());
+ EXPECT_EQ(cricket::PROTO_UDP, turn_servers_[0].ports[0].proto);
+ turn_servers_.clear();
EXPECT_FALSE(ParseUrl("turn:hostname?transport=invalid"));
}
@@ -1717,9 +1991,9 @@ TEST_F(IceServerParsingTest, ParseTransport) {
// Test parsing ICE username contained in URL.
TEST_F(IceServerParsingTest, ParseUsername) {
EXPECT_TRUE(ParseUrl("turn:user@hostname"));
- EXPECT_EQ(1U, turn_configurations_.size());
- EXPECT_EQ("user", turn_configurations_[0].username);
- turn_configurations_.clear();
+ EXPECT_EQ(1U, turn_servers_.size());
+ EXPECT_EQ("user", turn_servers_[0].credentials.username);
+ turn_servers_.clear();
EXPECT_FALSE(ParseUrl("turn:@hostname"));
EXPECT_FALSE(ParseUrl("turn:username@"));
@@ -1728,12 +2002,12 @@ TEST_F(IceServerParsingTest, ParseUsername) {
}
// Test that username and password from IceServer is copied into the resulting
-// TurnConfiguration.
+// RelayServerConfig.
TEST_F(IceServerParsingTest, CopyUsernameAndPasswordFromIceServer) {
EXPECT_TRUE(ParseUrl("turn:hostname", "username", "password"));
- EXPECT_EQ(1U, turn_configurations_.size());
- EXPECT_EQ("username", turn_configurations_[0].username);
- EXPECT_EQ("password", turn_configurations_[0].password);
+ EXPECT_EQ(1U, turn_servers_.size());
+ EXPECT_EQ("username", turn_servers_[0].credentials.username);
+ EXPECT_EQ("password", turn_servers_[0].credentials.password);
}
// Ensure that if a server has multiple URLs, each one is parsed.
@@ -1743,10 +2017,22 @@ TEST_F(IceServerParsingTest, ParseMultipleUrls) {
server.urls.push_back("stun:hostname");
server.urls.push_back("turn:hostname");
servers.push_back(server);
- EXPECT_TRUE(webrtc::ParseIceServers(servers, &stun_configurations_,
- &turn_configurations_));
- EXPECT_EQ(1U, stun_configurations_.size());
- EXPECT_EQ(1U, turn_configurations_.size());
+ EXPECT_TRUE(webrtc::ParseIceServers(servers, &stun_servers_, &turn_servers_));
+ EXPECT_EQ(1U, stun_servers_.size());
+ EXPECT_EQ(1U, turn_servers_.size());
+}
+
+// Ensure that TURN servers are given unique priorities,
+// so that their resulting candidates have unique priorities.
+TEST_F(IceServerParsingTest, TurnServerPrioritiesUnique) {
+ PeerConnectionInterface::IceServers servers;
+ PeerConnectionInterface::IceServer server;
+ server.urls.push_back("turn:hostname");
+ server.urls.push_back("turn:hostname2");
+ servers.push_back(server);
+ EXPECT_TRUE(webrtc::ParseIceServers(servers, &stun_servers_, &turn_servers_));
+ EXPECT_EQ(2U, turn_servers_.size());
+ EXPECT_NE(turn_servers_[0].priority, turn_servers_[1].priority);
}
#endif // if !defined(THREAD_SANITIZER)
diff --git a/talk/app/webrtc/peerconnectionendtoend_unittest.cc b/talk/app/webrtc/peerconnectionendtoend_unittest.cc
index eacedd4eea..1a180317ac 100644
--- a/talk/app/webrtc/peerconnectionendtoend_unittest.cc
+++ b/talk/app/webrtc/peerconnectionendtoend_unittest.cc
@@ -27,6 +27,9 @@
#include "talk/app/webrtc/test/peerconnectiontestwrapper.h"
#include "talk/app/webrtc/test/mockpeerconnectionobservers.h"
+#ifdef WEBRTC_ANDROID
+#include "talk/app/webrtc/test/androidtestinitializer.h"
+#endif
#include "webrtc/base/gunit.h"
#include "webrtc/base/logging.h"
#include "webrtc/base/ssladapter.h"
@@ -50,56 +53,6 @@ namespace {
const size_t kMaxWait = 10000;
-void RemoveLinesFromSdp(const std::string& line_start,
- std::string* sdp) {
- const char kSdpLineEnd[] = "\r\n";
- size_t ssrc_pos = 0;
- while ((ssrc_pos = sdp->find(line_start, ssrc_pos)) !=
- std::string::npos) {
- size_t end_ssrc = sdp->find(kSdpLineEnd, ssrc_pos);
- sdp->erase(ssrc_pos, end_ssrc - ssrc_pos + strlen(kSdpLineEnd));
- }
-}
-
-// Add |newlines| to the |message| after |line|.
-void InjectAfter(const std::string& line,
- const std::string& newlines,
- std::string* message) {
- const std::string tmp = line + newlines;
- rtc::replace_substrs(line.c_str(), line.length(),
- tmp.c_str(), tmp.length(), message);
-}
-
-void Replace(const std::string& line,
- const std::string& newlines,
- std::string* message) {
- rtc::replace_substrs(line.c_str(), line.length(),
- newlines.c_str(), newlines.length(), message);
-}
-
-void UseExternalSdes(std::string* sdp) {
- // Remove current crypto specification.
- RemoveLinesFromSdp("a=crypto", sdp);
- RemoveLinesFromSdp("a=fingerprint", sdp);
- // Add external crypto.
- const char kAudioSdes[] =
- "a=crypto:1 AES_CM_128_HMAC_SHA1_80 "
- "inline:PS1uQCVeeCFCanVmcjkpPywjNWhcYD0mXXtxaVBR\r\n";
- const char kVideoSdes[] =
- "a=crypto:1 AES_CM_128_HMAC_SHA1_80 "
- "inline:d0RmdmcmVCspeEc3QGZiNWpVLFJhQX1cfHAwJSoj\r\n";
- const char kDataSdes[] =
- "a=crypto:1 AES_CM_128_HMAC_SHA1_80 "
- "inline:NzB4d1BINUAvLEw6UzF3WSJ+PSdFcGdUJShpX1Zj\r\n";
- InjectAfter("a=mid:audio\r\n", kAudioSdes, sdp);
- InjectAfter("a=mid:video\r\n", kVideoSdes, sdp);
- InjectAfter("a=mid:data\r\n", kDataSdes, sdp);
-}
-
-void RemoveBundle(std::string* sdp) {
- RemoveLinesFromSdp("a=group:BUNDLE", sdp);
-}
-
} // namespace
class PeerConnectionEndToEndTest
@@ -114,6 +67,9 @@ class PeerConnectionEndToEndTest
"caller")),
callee_(new rtc::RefCountedObject<PeerConnectionTestWrapper>(
"callee")) {
+#ifdef WEBRTC_ANDROID
+ webrtc::InitializeAndroidObjects();
+#endif
}
void CreatePcs() {
@@ -217,15 +173,20 @@ class PeerConnectionEndToEndTest
DataChannelList callee_signaled_data_channels_;
};
+// Disabled for TSan v2, see
+// https://bugs.chromium.org/p/webrtc/issues/detail?id=4719 for details.
+// Disabled for Mac, see
+// https://bugs.chromium.org/p/webrtc/issues/detail?id=5231 for details.
+#if !defined(THREAD_SANITIZER) && !defined(WEBRTC_MAC)
TEST_F(PeerConnectionEndToEndTest, Call) {
CreatePcs();
GetAndAddUserMedia();
Negotiate();
WaitForCallEstablished();
}
+#endif // if !defined(THREAD_SANITIZER) && !defined(WEBRTC_MAC)
-// Disabled per b/14899892
-TEST_F(PeerConnectionEndToEndTest, DISABLED_CallWithLegacySdp) {
+TEST_F(PeerConnectionEndToEndTest, CallWithLegacySdp) {
FakeConstraints pc_constraints;
pc_constraints.AddMandatory(MediaConstraintsInterface::kEnableDtlsSrtp,
false);
@@ -396,3 +357,30 @@ TEST_F(PeerConnectionEndToEndTest,
CloseDataChannels(caller_dc, callee_signaled_data_channels_, 1);
}
+
+// This tests that if a data channel is closed remotely while not referenced
+// by the application (meaning only the PeerConnection contributes to its
+// reference count), no memory access violation will occur.
+// See: https://code.google.com/p/chromium/issues/detail?id=565048
+TEST_F(PeerConnectionEndToEndTest, CloseDataChannelRemotelyWhileNotReferenced) {
+ MAYBE_SKIP_TEST(rtc::SSLStreamAdapter::HaveDtlsSrtp);
+
+ CreatePcs();
+
+ webrtc::DataChannelInit init;
+ rtc::scoped_refptr<DataChannelInterface> caller_dc(
+ caller_->CreateDataChannel("data", init));
+
+ Negotiate();
+ WaitForConnection();
+
+ WaitForDataChannelsToOpen(caller_dc, callee_signaled_data_channels_, 0);
+ // This removes the reference to the remote data channel that we hold.
+ callee_signaled_data_channels_.clear();
+ caller_dc->Close();
+ EXPECT_EQ_WAIT(DataChannelInterface::kClosed, caller_dc->state(), kMaxWait);
+
+ // Wait for a bit longer so the remote data channel will receive the
+ // close message and be destroyed.
+ rtc::Thread::Current()->ProcessMessages(100);
+}
diff --git a/talk/app/webrtc/peerconnectionfactory.cc b/talk/app/webrtc/peerconnectionfactory.cc
index b46b4b68d3..c58f88cb41 100644
--- a/talk/app/webrtc/peerconnectionfactory.cc
+++ b/talk/app/webrtc/peerconnectionfactory.cc
@@ -27,6 +27,8 @@
#include "talk/app/webrtc/peerconnectionfactory.h"
+#include <utility>
+
#include "talk/app/webrtc/audiotrack.h"
#include "talk/app/webrtc/localaudiosource.h"
#include "talk/app/webrtc/mediastream.h"
@@ -35,7 +37,6 @@
#include "talk/app/webrtc/peerconnection.h"
#include "talk/app/webrtc/peerconnectionfactoryproxy.h"
#include "talk/app/webrtc/peerconnectionproxy.h"
-#include "talk/app/webrtc/portallocatorfactory.h"
#include "talk/app/webrtc/videosource.h"
#include "talk/app/webrtc/videosourceproxy.h"
#include "talk/app/webrtc/videotrack.h"
@@ -44,6 +45,8 @@
#include "talk/media/webrtc/webrtcvideoencoderfactory.h"
#include "webrtc/base/bind.h"
#include "webrtc/modules/audio_device/include/audio_device.h"
+#include "webrtc/p2p/base/basicpacketsocketfactory.h"
+#include "webrtc/p2p/client/basicportallocator.h"
namespace webrtc {
@@ -153,11 +156,13 @@ PeerConnectionFactory::PeerConnectionFactory(
PeerConnectionFactory::~PeerConnectionFactory() {
RTC_DCHECK(signaling_thread_->IsCurrent());
channel_manager_.reset(nullptr);
- default_allocator_factory_ = nullptr;
// Make sure |worker_thread_| and |signaling_thread_| outlive
- // |dtls_identity_store_|.
+ // |dtls_identity_store_|, |default_socket_factory_| and
+ // |default_network_manager_|.
dtls_identity_store_ = nullptr;
+ default_socket_factory_ = nullptr;
+ default_network_manager_ = nullptr;
if (owns_ptrs_) {
if (wraps_current_thread_)
@@ -170,9 +175,16 @@ bool PeerConnectionFactory::Initialize() {
RTC_DCHECK(signaling_thread_->IsCurrent());
rtc::InitRandom(rtc::Time());
- default_allocator_factory_ = PortAllocatorFactory::Create(worker_thread_);
- if (!default_allocator_factory_)
+ default_network_manager_.reset(new rtc::BasicNetworkManager());
+ if (!default_network_manager_) {
return false;
+ }
+
+ default_socket_factory_.reset(
+ new rtc::BasicPacketSocketFactory(worker_thread_));
+ if (!default_socket_factory_) {
+ return false;
+ }
// TODO: Need to make sure only one VoE is created inside
// WebRtcMediaEngine.
@@ -208,8 +220,8 @@ PeerConnectionFactory::CreateVideoSource(
cricket::VideoCapturer* capturer,
const MediaConstraintsInterface* constraints) {
RTC_DCHECK(signaling_thread_->IsCurrent());
- rtc::scoped_refptr<VideoSource> source(
- VideoSource::Create(channel_manager_.get(), capturer, constraints));
+ rtc::scoped_refptr<VideoSource> source(VideoSource::Create(
+ channel_manager_.get(), capturer, constraints, false));
return VideoSourceProxy::Create(signaling_thread_, source);
}
@@ -237,11 +249,10 @@ rtc::scoped_refptr<PeerConnectionInterface>
PeerConnectionFactory::CreatePeerConnection(
const PeerConnectionInterface::RTCConfiguration& configuration,
const MediaConstraintsInterface* constraints,
- PortAllocatorFactoryInterface* allocator_factory,
+ rtc::scoped_ptr<cricket::PortAllocator> allocator,
rtc::scoped_ptr<DtlsIdentityStoreInterface> dtls_identity_store,
PeerConnectionObserver* observer) {
RTC_DCHECK(signaling_thread_->IsCurrent());
- RTC_DCHECK(allocator_factory || default_allocator_factory_);
if (!dtls_identity_store.get()) {
// Because |pc|->Initialize takes ownership of the store we need a new
@@ -251,19 +262,17 @@ PeerConnectionFactory::CreatePeerConnection(
new DtlsIdentityStoreWrapper(dtls_identity_store_));
}
- PortAllocatorFactoryInterface* chosen_allocator_factory =
- allocator_factory ? allocator_factory : default_allocator_factory_.get();
- chosen_allocator_factory->SetNetworkIgnoreMask(options_.network_ignore_mask);
+ if (!allocator) {
+ allocator.reset(new cricket::BasicPortAllocator(
+ default_network_manager_.get(), default_socket_factory_.get()));
+ }
+ allocator->SetNetworkIgnoreMask(options_.network_ignore_mask);
rtc::scoped_refptr<PeerConnection> pc(
new rtc::RefCountedObject<PeerConnection>(this));
- if (!pc->Initialize(
- configuration,
- constraints,
- chosen_allocator_factory,
- dtls_identity_store.Pass(),
- observer)) {
- return NULL;
+ if (!pc->Initialize(configuration, constraints, std::move(allocator),
+ std::move(dtls_identity_store), observer)) {
+ return nullptr;
}
return PeerConnectionProxy::Create(signaling_thread(), pc);
}
@@ -289,8 +298,7 @@ rtc::scoped_refptr<AudioTrackInterface>
PeerConnectionFactory::CreateAudioTrack(const std::string& id,
AudioSourceInterface* source) {
RTC_DCHECK(signaling_thread_->IsCurrent());
- rtc::scoped_refptr<AudioTrackInterface> track(
- AudioTrack::Create(id, source));
+ rtc::scoped_refptr<AudioTrackInterface> track(AudioTrack::Create(id, source));
return AudioTrackProxy::Create(signaling_thread_, track);
}
diff --git a/talk/app/webrtc/peerconnectionfactory.h b/talk/app/webrtc/peerconnectionfactory.h
index af4117a9d3..8b274e118c 100644
--- a/talk/app/webrtc/peerconnectionfactory.h
+++ b/talk/app/webrtc/peerconnectionfactory.h
@@ -39,6 +39,11 @@
#include "webrtc/base/scoped_ref_ptr.h"
#include "webrtc/base/thread.h"
+namespace rtc {
+class BasicNetworkManager;
+class BasicPacketSocketFactory;
+}
+
namespace webrtc {
typedef rtc::RefCountedObject<DtlsIdentityStoreImpl>
@@ -50,14 +55,12 @@ class PeerConnectionFactory : public PeerConnectionFactoryInterface {
options_ = options;
}
- // webrtc::PeerConnectionFactoryInterface override;
- rtc::scoped_refptr<PeerConnectionInterface>
- CreatePeerConnection(
- const PeerConnectionInterface::RTCConfiguration& configuration,
- const MediaConstraintsInterface* constraints,
- PortAllocatorFactoryInterface* allocator_factory,
- rtc::scoped_ptr<DtlsIdentityStoreInterface> dtls_identity_store,
- PeerConnectionObserver* observer) override;
+ rtc::scoped_refptr<PeerConnectionInterface> CreatePeerConnection(
+ const PeerConnectionInterface::RTCConfiguration& configuration,
+ const MediaConstraintsInterface* constraints,
+ rtc::scoped_ptr<cricket::PortAllocator> allocator,
+ rtc::scoped_ptr<DtlsIdentityStoreInterface> dtls_identity_store,
+ PeerConnectionObserver* observer) override;
bool Initialize();
@@ -107,7 +110,6 @@ class PeerConnectionFactory : public PeerConnectionFactoryInterface {
rtc::Thread* signaling_thread_;
rtc::Thread* worker_thread_;
Options options_;
- rtc::scoped_refptr<PortAllocatorFactoryInterface> default_allocator_factory_;
// External Audio device used for audio playback.
rtc::scoped_refptr<AudioDeviceModule> default_adm_;
rtc::scoped_ptr<cricket::ChannelManager> channel_manager_;
@@ -119,6 +121,8 @@ class PeerConnectionFactory : public PeerConnectionFactoryInterface {
// injected any. In that case, video engine will use the internal SW decoder.
rtc::scoped_ptr<cricket::WebRtcVideoDecoderFactory>
video_decoder_factory_;
+ rtc::scoped_ptr<rtc::BasicNetworkManager> default_network_manager_;
+ rtc::scoped_ptr<rtc::BasicPacketSocketFactory> default_socket_factory_;
rtc::scoped_refptr<RefCountedDtlsIdentityStore> dtls_identity_store_;
};
diff --git a/talk/app/webrtc/peerconnectionfactory_unittest.cc b/talk/app/webrtc/peerconnectionfactory_unittest.cc
index f1d5353abd..9fb013b54f 100644
--- a/talk/app/webrtc/peerconnectionfactory_unittest.cc
+++ b/talk/app/webrtc/peerconnectionfactory_unittest.cc
@@ -26,10 +26,13 @@
*/
#include <string>
+#include <utility>
-#include "talk/app/webrtc/fakeportallocatorfactory.h"
#include "talk/app/webrtc/mediastreaminterface.h"
#include "talk/app/webrtc/peerconnectionfactory.h"
+#ifdef WEBRTC_ANDROID
+#include "talk/app/webrtc/test/androidtestinitializer.h"
+#endif
#include "talk/app/webrtc/test/fakedtlsidentitystore.h"
#include "talk/app/webrtc/test/fakevideotrackrenderer.h"
#include "talk/app/webrtc/videosourceinterface.h"
@@ -39,6 +42,7 @@
#include "webrtc/base/gunit.h"
#include "webrtc/base/scoped_ptr.h"
#include "webrtc/base/thread.h"
+#include "webrtc/p2p/client/fakeportallocator.h"
using webrtc::DataChannelInterface;
using webrtc::DtlsIdentityStoreInterface;
@@ -47,17 +51,11 @@ using webrtc::MediaStreamInterface;
using webrtc::PeerConnectionFactoryInterface;
using webrtc::PeerConnectionInterface;
using webrtc::PeerConnectionObserver;
-using webrtc::PortAllocatorFactoryInterface;
using webrtc::VideoSourceInterface;
using webrtc::VideoTrackInterface;
namespace {
-typedef std::vector<PortAllocatorFactoryInterface::StunConfiguration>
- StunConfigurations;
-typedef std::vector<PortAllocatorFactoryInterface::TurnConfiguration>
- TurnConfigurations;
-
static const char kStunIceServer[] = "stun:stun.l.google.com:19302";
static const char kTurnIceServer[] = "turn:test%40hello.com@test.com:1234";
static const char kTurnIceServerWithTransport[] =
@@ -103,6 +101,9 @@ class NullPeerConnectionObserver : public PeerConnectionObserver {
class PeerConnectionFactoryTest : public testing::Test {
void SetUp() {
+#ifdef WEBRTC_ANDROID
+ webrtc::InitializeAndroidObjects();
+#endif
factory_ = webrtc::CreatePeerConnectionFactory(rtc::Thread::Current(),
rtc::Thread::Current(),
NULL,
@@ -110,57 +111,58 @@ class PeerConnectionFactoryTest : public testing::Test {
NULL);
ASSERT_TRUE(factory_.get() != NULL);
- allocator_factory_ = webrtc::FakePortAllocatorFactory::Create();
+ port_allocator_.reset(
+ new cricket::FakePortAllocator(rtc::Thread::Current(), nullptr));
+ raw_port_allocator_ = port_allocator_.get();
}
protected:
- void VerifyStunConfigurations(StunConfigurations stun_config) {
- webrtc::FakePortAllocatorFactory* allocator =
- static_cast<webrtc::FakePortAllocatorFactory*>(
- allocator_factory_.get());
- ASSERT_TRUE(allocator != NULL);
- EXPECT_EQ(stun_config.size(), allocator->stun_configs().size());
- for (size_t i = 0; i < stun_config.size(); ++i) {
- EXPECT_EQ(stun_config[i].server.ToString(),
- allocator->stun_configs()[i].server.ToString());
- }
+ void VerifyStunServers(cricket::ServerAddresses stun_servers) {
+ EXPECT_EQ(stun_servers, raw_port_allocator_->stun_servers());
}
- void VerifyTurnConfigurations(TurnConfigurations turn_config) {
- webrtc::FakePortAllocatorFactory* allocator =
- static_cast<webrtc::FakePortAllocatorFactory*>(
- allocator_factory_.get());
- ASSERT_TRUE(allocator != NULL);
- EXPECT_EQ(turn_config.size(), allocator->turn_configs().size());
- for (size_t i = 0; i < turn_config.size(); ++i) {
- EXPECT_EQ(turn_config[i].server.ToString(),
- allocator->turn_configs()[i].server.ToString());
- EXPECT_EQ(turn_config[i].username, allocator->turn_configs()[i].username);
- EXPECT_EQ(turn_config[i].password, allocator->turn_configs()[i].password);
- EXPECT_EQ(turn_config[i].transport_type,
- allocator->turn_configs()[i].transport_type);
+ void VerifyTurnServers(std::vector<cricket::RelayServerConfig> turn_servers) {
+ EXPECT_EQ(turn_servers.size(), raw_port_allocator_->turn_servers().size());
+ for (size_t i = 0; i < turn_servers.size(); ++i) {
+ ASSERT_EQ(1u, turn_servers[i].ports.size());
+ EXPECT_EQ(1u, raw_port_allocator_->turn_servers()[i].ports.size());
+ EXPECT_EQ(
+ turn_servers[i].ports[0].address.ToString(),
+ raw_port_allocator_->turn_servers()[i].ports[0].address.ToString());
+ EXPECT_EQ(turn_servers[i].ports[0].proto,
+ raw_port_allocator_->turn_servers()[i].ports[0].proto);
+ EXPECT_EQ(turn_servers[i].credentials.username,
+ raw_port_allocator_->turn_servers()[i].credentials.username);
+ EXPECT_EQ(turn_servers[i].credentials.password,
+ raw_port_allocator_->turn_servers()[i].credentials.password);
}
}
rtc::scoped_refptr<PeerConnectionFactoryInterface> factory_;
NullPeerConnectionObserver observer_;
- rtc::scoped_refptr<PortAllocatorFactoryInterface> allocator_factory_;
+ rtc::scoped_ptr<cricket::FakePortAllocator> port_allocator_;
+ // Since the PC owns the port allocator after it's been initialized,
+ // this should only be used when known to be safe.
+ cricket::FakePortAllocator* raw_port_allocator_;
};
// Verify creation of PeerConnection using internal ADM, video factory and
// internal libjingle threads.
TEST(PeerConnectionFactoryTestInternal, CreatePCUsingInternalModules) {
+#ifdef WEBRTC_ANDROID
+ webrtc::InitializeAndroidObjects();
+#endif
+
rtc::scoped_refptr<PeerConnectionFactoryInterface> factory(
webrtc::CreatePeerConnectionFactory());
NullPeerConnectionObserver observer;
- webrtc::PeerConnectionInterface::IceServers servers;
+ webrtc::PeerConnectionInterface::RTCConfiguration config;
rtc::scoped_ptr<FakeDtlsIdentityStore> dtls_identity_store(
new FakeDtlsIdentityStore());
- rtc::scoped_refptr<PeerConnectionInterface> pc(
- factory->CreatePeerConnection(
- servers, nullptr, nullptr, dtls_identity_store.Pass(), &observer));
+ rtc::scoped_refptr<PeerConnectionInterface> pc(factory->CreatePeerConnection(
+ config, nullptr, nullptr, std::move(dtls_identity_store), &observer));
EXPECT_TRUE(pc.get() != nullptr);
}
@@ -180,25 +182,22 @@ TEST_F(PeerConnectionFactoryTest, CreatePCUsingIceServers) {
config.servers.push_back(ice_server);
rtc::scoped_ptr<DtlsIdentityStoreInterface> dtls_identity_store(
new FakeDtlsIdentityStore());
- rtc::scoped_refptr<PeerConnectionInterface> pc(
- factory_->CreatePeerConnection(config, nullptr,
- allocator_factory_.get(),
- dtls_identity_store.Pass(),
- &observer_));
- EXPECT_TRUE(pc.get() != NULL);
- StunConfigurations stun_configs;
- webrtc::PortAllocatorFactoryInterface::StunConfiguration stun1(
- "stun.l.google.com", 19302);
- stun_configs.push_back(stun1);
- VerifyStunConfigurations(stun_configs);
- TurnConfigurations turn_configs;
- webrtc::PortAllocatorFactoryInterface::TurnConfiguration turn1(
- "test.com", 1234, "test@hello.com", kTurnPassword, "udp", false);
- turn_configs.push_back(turn1);
- webrtc::PortAllocatorFactoryInterface::TurnConfiguration turn2(
- "hello.com", kDefaultStunPort, "test", kTurnPassword, "tcp", false);
- turn_configs.push_back(turn2);
- VerifyTurnConfigurations(turn_configs);
+ rtc::scoped_refptr<PeerConnectionInterface> pc(factory_->CreatePeerConnection(
+ config, nullptr, std::move(port_allocator_),
+ std::move(dtls_identity_store), &observer_));
+ ASSERT_TRUE(pc.get() != NULL);
+ cricket::ServerAddresses stun_servers;
+ rtc::SocketAddress stun1("stun.l.google.com", 19302);
+ stun_servers.insert(stun1);
+ VerifyStunServers(stun_servers);
+ std::vector<cricket::RelayServerConfig> turn_servers;
+ cricket::RelayServerConfig turn1("test.com", 1234, "test@hello.com",
+ kTurnPassword, cricket::PROTO_UDP, false);
+ turn_servers.push_back(turn1);
+ cricket::RelayServerConfig turn2("hello.com", kDefaultStunPort, "test",
+ kTurnPassword, cricket::PROTO_TCP, false);
+ turn_servers.push_back(turn2);
+ VerifyTurnServers(turn_servers);
}
// This test verifies creation of PeerConnection with valid STUN and TURN
@@ -213,63 +212,22 @@ TEST_F(PeerConnectionFactoryTest, CreatePCUsingIceServersUrls) {
config.servers.push_back(ice_server);
rtc::scoped_ptr<DtlsIdentityStoreInterface> dtls_identity_store(
new FakeDtlsIdentityStore());
- rtc::scoped_refptr<PeerConnectionInterface> pc(
- factory_->CreatePeerConnection(config, nullptr,
- allocator_factory_.get(),
- dtls_identity_store.Pass(),
- &observer_));
- EXPECT_TRUE(pc.get() != NULL);
- StunConfigurations stun_configs;
- webrtc::PortAllocatorFactoryInterface::StunConfiguration stun1(
- "stun.l.google.com", 19302);
- stun_configs.push_back(stun1);
- VerifyStunConfigurations(stun_configs);
- TurnConfigurations turn_configs;
- webrtc::PortAllocatorFactoryInterface::TurnConfiguration turn1(
- "test.com", 1234, "test@hello.com", kTurnPassword, "udp", false);
- turn_configs.push_back(turn1);
- webrtc::PortAllocatorFactoryInterface::TurnConfiguration turn2(
- "hello.com", kDefaultStunPort, "test", kTurnPassword, "tcp", false);
- turn_configs.push_back(turn2);
- VerifyTurnConfigurations(turn_configs);
-}
-
-// This test verifies creation of PeerConnection with valid STUN and TURN
-// configuration. Also verifies the URL's parsed correctly as expected.
-// This version doesn't use RTCConfiguration.
-// TODO(mallinath) - Remove this method after clients start using RTCConfig.
-TEST_F(PeerConnectionFactoryTest, CreatePCUsingIceServersOldSignature) {
- webrtc::PeerConnectionInterface::IceServers ice_servers;
- webrtc::PeerConnectionInterface::IceServer ice_server;
- ice_server.uri = kStunIceServer;
- ice_servers.push_back(ice_server);
- ice_server.uri = kTurnIceServer;
- ice_server.password = kTurnPassword;
- ice_servers.push_back(ice_server);
- ice_server.uri = kTurnIceServerWithTransport;
- ice_server.password = kTurnPassword;
- ice_servers.push_back(ice_server);
- rtc::scoped_ptr<DtlsIdentityStoreInterface> dtls_identity_store(
- new FakeDtlsIdentityStore());
- rtc::scoped_refptr<PeerConnectionInterface> pc(
- factory_->CreatePeerConnection(ice_servers, nullptr,
- allocator_factory_.get(),
- dtls_identity_store.Pass(),
- &observer_));
- EXPECT_TRUE(pc.get() != NULL);
- StunConfigurations stun_configs;
- webrtc::PortAllocatorFactoryInterface::StunConfiguration stun1(
- "stun.l.google.com", 19302);
- stun_configs.push_back(stun1);
- VerifyStunConfigurations(stun_configs);
- TurnConfigurations turn_configs;
- webrtc::PortAllocatorFactoryInterface::TurnConfiguration turn1(
- "test.com", 1234, "test@hello.com", kTurnPassword, "udp", false);
- turn_configs.push_back(turn1);
- webrtc::PortAllocatorFactoryInterface::TurnConfiguration turn2(
- "hello.com", kDefaultStunPort, "test", kTurnPassword, "tcp", false);
- turn_configs.push_back(turn2);
- VerifyTurnConfigurations(turn_configs);
+ rtc::scoped_refptr<PeerConnectionInterface> pc(factory_->CreatePeerConnection(
+ config, nullptr, std::move(port_allocator_),
+ std::move(dtls_identity_store), &observer_));
+ ASSERT_TRUE(pc.get() != NULL);
+ cricket::ServerAddresses stun_servers;
+ rtc::SocketAddress stun1("stun.l.google.com", 19302);
+ stun_servers.insert(stun1);
+ VerifyStunServers(stun_servers);
+ std::vector<cricket::RelayServerConfig> turn_servers;
+ cricket::RelayServerConfig turn1("test.com", 1234, "test@hello.com",
+ kTurnPassword, cricket::PROTO_UDP, false);
+ turn_servers.push_back(turn1);
+ cricket::RelayServerConfig turn2("hello.com", kDefaultStunPort, "test",
+ kTurnPassword, cricket::PROTO_TCP, false);
+ turn_servers.push_back(turn2);
+ VerifyTurnServers(turn_servers);
}
TEST_F(PeerConnectionFactoryTest, CreatePCUsingNoUsernameInUri) {
@@ -283,17 +241,15 @@ TEST_F(PeerConnectionFactoryTest, CreatePCUsingNoUsernameInUri) {
config.servers.push_back(ice_server);
rtc::scoped_ptr<DtlsIdentityStoreInterface> dtls_identity_store(
new FakeDtlsIdentityStore());
- rtc::scoped_refptr<PeerConnectionInterface> pc(
- factory_->CreatePeerConnection(config, nullptr,
- allocator_factory_.get(),
- dtls_identity_store.Pass(),
- &observer_));
- EXPECT_TRUE(pc.get() != NULL);
- TurnConfigurations turn_configs;
- webrtc::PortAllocatorFactoryInterface::TurnConfiguration turn(
- "test.com", 1234, kTurnUsername, kTurnPassword, "udp", false);
- turn_configs.push_back(turn);
- VerifyTurnConfigurations(turn_configs);
+ rtc::scoped_refptr<PeerConnectionInterface> pc(factory_->CreatePeerConnection(
+ config, nullptr, std::move(port_allocator_),
+ std::move(dtls_identity_store), &observer_));
+ ASSERT_TRUE(pc.get() != NULL);
+ std::vector<cricket::RelayServerConfig> turn_servers;
+ cricket::RelayServerConfig turn("test.com", 1234, kTurnUsername,
+ kTurnPassword, cricket::PROTO_UDP, false);
+ turn_servers.push_back(turn);
+ VerifyTurnServers(turn_servers);
}
// This test verifies the PeerConnection created properly with TURN url which
@@ -306,17 +262,15 @@ TEST_F(PeerConnectionFactoryTest, CreatePCUsingTurnUrlWithTransportParam) {
config.servers.push_back(ice_server);
rtc::scoped_ptr<DtlsIdentityStoreInterface> dtls_identity_store(
new FakeDtlsIdentityStore());
- rtc::scoped_refptr<PeerConnectionInterface> pc(
- factory_->CreatePeerConnection(config, nullptr,
- allocator_factory_.get(),
- dtls_identity_store.Pass(),
- &observer_));
- EXPECT_TRUE(pc.get() != NULL);
- TurnConfigurations turn_configs;
- webrtc::PortAllocatorFactoryInterface::TurnConfiguration turn(
- "hello.com", kDefaultStunPort, "test", kTurnPassword, "tcp", false);
- turn_configs.push_back(turn);
- VerifyTurnConfigurations(turn_configs);
+ rtc::scoped_refptr<PeerConnectionInterface> pc(factory_->CreatePeerConnection(
+ config, nullptr, std::move(port_allocator_),
+ std::move(dtls_identity_store), &observer_));
+ ASSERT_TRUE(pc.get() != NULL);
+ std::vector<cricket::RelayServerConfig> turn_servers;
+ cricket::RelayServerConfig turn("hello.com", kDefaultStunPort, "test",
+ kTurnPassword, cricket::PROTO_TCP, false);
+ turn_servers.push_back(turn);
+ VerifyTurnServers(turn_servers);
}
TEST_F(PeerConnectionFactoryTest, CreatePCUsingSecureTurnUrl) {
@@ -333,25 +287,23 @@ TEST_F(PeerConnectionFactoryTest, CreatePCUsingSecureTurnUrl) {
config.servers.push_back(ice_server);
rtc::scoped_ptr<DtlsIdentityStoreInterface> dtls_identity_store(
new FakeDtlsIdentityStore());
- rtc::scoped_refptr<PeerConnectionInterface> pc(
- factory_->CreatePeerConnection(config, nullptr,
- allocator_factory_.get(),
- dtls_identity_store.Pass(),
- &observer_));
- EXPECT_TRUE(pc.get() != NULL);
- TurnConfigurations turn_configs;
- webrtc::PortAllocatorFactoryInterface::TurnConfiguration turn1(
- "hello.com", kDefaultStunTlsPort, "test", kTurnPassword, "tcp", true);
- turn_configs.push_back(turn1);
+ rtc::scoped_refptr<PeerConnectionInterface> pc(factory_->CreatePeerConnection(
+ config, nullptr, std::move(port_allocator_),
+ std::move(dtls_identity_store), &observer_));
+ ASSERT_TRUE(pc.get() != NULL);
+ std::vector<cricket::RelayServerConfig> turn_servers;
+ cricket::RelayServerConfig turn1("hello.com", kDefaultStunTlsPort, "test",
+ kTurnPassword, cricket::PROTO_TCP, true);
+ turn_servers.push_back(turn1);
// TURNS with transport param should be default to tcp.
- webrtc::PortAllocatorFactoryInterface::TurnConfiguration turn2(
- "hello.com", 443, "test_no_transport", kTurnPassword, "tcp", true);
- turn_configs.push_back(turn2);
- webrtc::PortAllocatorFactoryInterface::TurnConfiguration turn3(
- "hello.com", kDefaultStunTlsPort, "test_no_transport",
- kTurnPassword, "tcp", true);
- turn_configs.push_back(turn3);
- VerifyTurnConfigurations(turn_configs);
+ cricket::RelayServerConfig turn2("hello.com", 443, "test_no_transport",
+ kTurnPassword, cricket::PROTO_TCP, true);
+ turn_servers.push_back(turn2);
+ cricket::RelayServerConfig turn3("hello.com", kDefaultStunTlsPort,
+ "test_no_transport", kTurnPassword,
+ cricket::PROTO_TCP, true);
+ turn_servers.push_back(turn3);
+ VerifyTurnServers(turn_servers);
}
TEST_F(PeerConnectionFactoryTest, CreatePCUsingIPLiteralAddress) {
@@ -370,32 +322,26 @@ TEST_F(PeerConnectionFactoryTest, CreatePCUsingIPLiteralAddress) {
config.servers.push_back(ice_server);
rtc::scoped_ptr<DtlsIdentityStoreInterface> dtls_identity_store(
new FakeDtlsIdentityStore());
- rtc::scoped_refptr<PeerConnectionInterface> pc(
- factory_->CreatePeerConnection(config, nullptr,
- allocator_factory_.get(),
- dtls_identity_store.Pass(),
- &observer_));
- EXPECT_TRUE(pc.get() != NULL);
- StunConfigurations stun_configs;
- webrtc::PortAllocatorFactoryInterface::StunConfiguration stun1(
- "1.2.3.4", 1234);
- stun_configs.push_back(stun1);
- webrtc::PortAllocatorFactoryInterface::StunConfiguration stun2(
- "1.2.3.4", 3478);
- stun_configs.push_back(stun2); // Default port
- webrtc::PortAllocatorFactoryInterface::StunConfiguration stun3(
- "2401:fa00:4::", 1234);
- stun_configs.push_back(stun3);
- webrtc::PortAllocatorFactoryInterface::StunConfiguration stun4(
- "2401:fa00:4::", 3478);
- stun_configs.push_back(stun4); // Default port
- VerifyStunConfigurations(stun_configs);
+ rtc::scoped_refptr<PeerConnectionInterface> pc(factory_->CreatePeerConnection(
+ config, nullptr, std::move(port_allocator_),
+ std::move(dtls_identity_store), &observer_));
+ ASSERT_TRUE(pc.get() != NULL);
+ cricket::ServerAddresses stun_servers;
+ rtc::SocketAddress stun1("1.2.3.4", 1234);
+ stun_servers.insert(stun1);
+ rtc::SocketAddress stun2("1.2.3.4", 3478);
+ stun_servers.insert(stun2); // Default port
+ rtc::SocketAddress stun3("2401:fa00:4::", 1234);
+ stun_servers.insert(stun3);
+ rtc::SocketAddress stun4("2401:fa00:4::", 3478);
+ stun_servers.insert(stun4); // Default port
+ VerifyStunServers(stun_servers);
- TurnConfigurations turn_configs;
- webrtc::PortAllocatorFactoryInterface::TurnConfiguration turn1(
- "2401:fa00:4::", 1234, "test", kTurnPassword, "udp", false);
- turn_configs.push_back(turn1);
- VerifyTurnConfigurations(turn_configs);
+ std::vector<cricket::RelayServerConfig> turn_servers;
+ cricket::RelayServerConfig turn1("2401:fa00:4::", 1234, "test", kTurnPassword,
+ cricket::PROTO_UDP, false);
+ turn_servers.push_back(turn1);
+ VerifyTurnServers(turn_servers);
}
// This test verifies the captured stream is rendered locally using a
diff --git a/talk/app/webrtc/peerconnectionfactoryproxy.h b/talk/app/webrtc/peerconnectionfactoryproxy.h
index 5e924df3a1..714ce6b7eb 100644
--- a/talk/app/webrtc/peerconnectionfactoryproxy.h
+++ b/talk/app/webrtc/peerconnectionfactoryproxy.h
@@ -29,6 +29,7 @@
#define TALK_APP_WEBRTC_PEERCONNECTIONFACTORYPROXY_H_
#include <string>
+#include <utility>
#include "talk/app/webrtc/peerconnectioninterface.h"
#include "talk/app/webrtc/proxy.h"
@@ -38,17 +39,17 @@ namespace webrtc {
BEGIN_PROXY_MAP(PeerConnectionFactory)
PROXY_METHOD1(void, SetOptions, const Options&)
- // Can't use PROXY_METHOD5 because scoped_ptr must be Pass()ed.
+ // Can't use PROXY_METHOD5 because scoped_ptr must be moved.
// TODO(tommi,hbos): Use of templates to support scoped_ptr?
rtc::scoped_refptr<PeerConnectionInterface> CreatePeerConnection(
const PeerConnectionInterface::RTCConfiguration& a1,
const MediaConstraintsInterface* a2,
- PortAllocatorFactoryInterface* a3,
+ rtc::scoped_ptr<cricket::PortAllocator> a3,
rtc::scoped_ptr<DtlsIdentityStoreInterface> a4,
PeerConnectionObserver* a5) override {
return owner_thread_->Invoke<rtc::scoped_refptr<PeerConnectionInterface>>(
rtc::Bind(&PeerConnectionFactoryProxy::CreatePeerConnection_ot, this,
- a1, a2, a3, a4.release(), a5));
+ a1, a2, a3.release(), a4.release(), a5));
}
PROXY_METHOD1(rtc::scoped_refptr<MediaStreamInterface>,
CreateLocalMediaStream, const std::string&)
@@ -70,11 +71,13 @@ BEGIN_PROXY_MAP(PeerConnectionFactory)
rtc::scoped_refptr<PeerConnectionInterface> CreatePeerConnection_ot(
const PeerConnectionInterface::RTCConfiguration& a1,
const MediaConstraintsInterface* a2,
- PortAllocatorFactoryInterface* a3,
+ cricket::PortAllocator* a3,
DtlsIdentityStoreInterface* a4,
PeerConnectionObserver* a5) {
+ rtc::scoped_ptr<cricket::PortAllocator> ptr_a3(a3);
rtc::scoped_ptr<DtlsIdentityStoreInterface> ptr_a4(a4);
- return c_->CreatePeerConnection(a1, a2, a3, ptr_a4.Pass(), a5);
+ return c_->CreatePeerConnection(a1, a2, std::move(ptr_a3),
+ std::move(ptr_a4), a5);
}
END_PROXY()
diff --git a/talk/app/webrtc/peerconnectioninterface.h b/talk/app/webrtc/peerconnectioninterface.h
index 77caa9d78b..b9afbad204 100644
--- a/talk/app/webrtc/peerconnectioninterface.h
+++ b/talk/app/webrtc/peerconnectioninterface.h
@@ -69,6 +69,7 @@
#define TALK_APP_WEBRTC_PEERCONNECTIONINTERFACE_H_
#include <string>
+#include <utility>
#include <vector>
#include "talk/app/webrtc/datachannelinterface.h"
@@ -86,6 +87,7 @@
#include "webrtc/base/rtccertificate.h"
#include "webrtc/base/sslstreamadapter.h"
#include "webrtc/base/socketaddress.h"
+#include "webrtc/p2p/base/portallocator.h"
namespace rtc {
class SSLIdentity;
@@ -93,7 +95,6 @@ class Thread;
}
namespace cricket {
-class PortAllocator;
class WebRtcVideoDecoderFactory;
class WebRtcVideoEncoderFactory;
}
@@ -248,28 +249,27 @@ class PeerConnectionInterface : public rtc::RefCountInterface {
// TODO(pthatcher): Rename this ice_servers, but update Chromium
// at the same time.
IceServers servers;
- // A localhost candidate is signaled whenever a candidate with the any
- // address is allocated.
- bool enable_localhost_ice_candidate;
BundlePolicy bundle_policy;
RtcpMuxPolicy rtcp_mux_policy;
TcpCandidatePolicy tcp_candidate_policy;
int audio_jitter_buffer_max_packets;
bool audio_jitter_buffer_fast_accelerate;
- int ice_connection_receiving_timeout;
+ int ice_connection_receiving_timeout; // ms
+ int ice_backup_candidate_pair_ping_interval; // ms
ContinualGatheringPolicy continual_gathering_policy;
std::vector<rtc::scoped_refptr<rtc::RTCCertificate>> certificates;
-
+ bool disable_prerenderer_smoothing;
RTCConfiguration()
: type(kAll),
- enable_localhost_ice_candidate(false),
bundle_policy(kBundlePolicyBalanced),
rtcp_mux_policy(kRtcpMuxPolicyNegotiate),
tcp_candidate_policy(kTcpCandidatePolicyEnabled),
audio_jitter_buffer_max_packets(kAudioJitterBufferMaxPackets),
audio_jitter_buffer_fast_accelerate(false),
ice_connection_receiving_timeout(kUndefined),
- continual_gathering_policy(GATHER_ONCE) {}
+ ice_backup_candidate_pair_ping_interval(kUndefined),
+ continual_gathering_policy(GATHER_ONCE),
+ disable_prerenderer_smoothing(false) {}
};
struct RTCOfferAnswerOptions {
@@ -337,6 +337,15 @@ class PeerConnectionInterface : public rtc::RefCountInterface {
AudioTrackInterface* track) = 0;
// TODO(deadbeef): Make these pure virtual once all subclasses implement them.
+ // |kind| must be "audio" or "video".
+ // |stream_id| is used to populate the msid attribute; if empty, one will
+ // be generated automatically.
+ virtual rtc::scoped_refptr<RtpSenderInterface> CreateSender(
+ const std::string& kind,
+ const std::string& stream_id) {
+ return rtc::scoped_refptr<RtpSenderInterface>();
+ }
+
virtual std::vector<rtc::scoped_refptr<RtpSenderInterface>> GetSenders()
const {
return std::vector<rtc::scoped_refptr<RtpSenderInterface>>();
@@ -480,51 +489,6 @@ class PeerConnectionObserver {
~PeerConnectionObserver() {}
};
-// Factory class used for creating cricket::PortAllocator that is used
-// for ICE negotiation.
-class PortAllocatorFactoryInterface : public rtc::RefCountInterface {
- public:
- struct StunConfiguration {
- StunConfiguration(const std::string& address, int port)
- : server(address, port) {}
- // STUN server address and port.
- rtc::SocketAddress server;
- };
-
- struct TurnConfiguration {
- TurnConfiguration(const std::string& address,
- int port,
- const std::string& username,
- const std::string& password,
- const std::string& transport_type,
- bool secure)
- : server(address, port),
- username(username),
- password(password),
- transport_type(transport_type),
- secure(secure) {}
- rtc::SocketAddress server;
- std::string username;
- std::string password;
- std::string transport_type;
- bool secure;
- };
-
- virtual cricket::PortAllocator* CreatePortAllocator(
- const std::vector<StunConfiguration>& stun_servers,
- const std::vector<TurnConfiguration>& turn_configurations) = 0;
-
- // TODO(phoglund): Make pure virtual when Chrome's factory implements this.
- // After this method is called, the port allocator should consider loopback
- // network interfaces as well.
- virtual void SetNetworkIgnoreMask(int network_ignore_mask) {
- }
-
- protected:
- PortAllocatorFactoryInterface() {}
- ~PortAllocatorFactoryInterface() {}
-};
-
// PeerConnectionFactoryInterface is the factory interface use for creating
// PeerConnection, MediaStream and media tracks.
// PeerConnectionFactoryInterface will create required libjingle threads,
@@ -532,19 +496,18 @@ class PortAllocatorFactoryInterface : public rtc::RefCountInterface {
// If an application decides to provide its own threads and network
// implementation of these classes it should use the alternate
// CreatePeerConnectionFactory method which accepts threads as input and use the
-// CreatePeerConnection version that takes a PortAllocatorFactoryInterface as
+// CreatePeerConnection version that takes a PortAllocator as an
// argument.
class PeerConnectionFactoryInterface : public rtc::RefCountInterface {
public:
class Options {
public:
- Options() :
- disable_encryption(false),
- disable_sctp_data_channels(false),
- disable_network_monitor(false),
- network_ignore_mask(rtc::kDefaultNetworkIgnoreMask),
- ssl_max_version(rtc::SSL_PROTOCOL_DTLS_10) {
- }
+ Options()
+ : disable_encryption(false),
+ disable_sctp_data_channels(false),
+ disable_network_monitor(false),
+ network_ignore_mask(rtc::kDefaultNetworkIgnoreMask),
+ ssl_max_version(rtc::SSL_PROTOCOL_DTLS_12) {}
bool disable_encryption;
bool disable_sctp_data_channels;
bool disable_network_monitor;
@@ -562,31 +525,12 @@ class PeerConnectionFactoryInterface : public rtc::RefCountInterface {
virtual void SetOptions(const Options& options) = 0;
- virtual rtc::scoped_refptr<PeerConnectionInterface>
- CreatePeerConnection(
- const PeerConnectionInterface::RTCConfiguration& configuration,
- const MediaConstraintsInterface* constraints,
- PortAllocatorFactoryInterface* allocator_factory,
- rtc::scoped_ptr<DtlsIdentityStoreInterface> dtls_identity_store,
- PeerConnectionObserver* observer) = 0;
-
- // TODO(hbos): Remove below version after clients are updated to above method.
- // In latest W3C WebRTC draft, PC constructor will take RTCConfiguration,
- // and not IceServers. RTCConfiguration is made up of ice servers and
- // ice transport type.
- // http://dev.w3.org/2011/webrtc/editor/webrtc.html
- inline rtc::scoped_refptr<PeerConnectionInterface>
- CreatePeerConnection(
- const PeerConnectionInterface::IceServers& servers,
- const MediaConstraintsInterface* constraints,
- PortAllocatorFactoryInterface* allocator_factory,
- rtc::scoped_ptr<DtlsIdentityStoreInterface> dtls_identity_store,
- PeerConnectionObserver* observer) {
- PeerConnectionInterface::RTCConfiguration rtc_config;
- rtc_config.servers = servers;
- return CreatePeerConnection(rtc_config, constraints, allocator_factory,
- dtls_identity_store.Pass(), observer);
- }
+ virtual rtc::scoped_refptr<PeerConnectionInterface> CreatePeerConnection(
+ const PeerConnectionInterface::RTCConfiguration& configuration,
+ const MediaConstraintsInterface* constraints,
+ rtc::scoped_ptr<cricket::PortAllocator> allocator,
+ rtc::scoped_ptr<DtlsIdentityStoreInterface> dtls_identity_store,
+ PeerConnectionObserver* observer) = 0;
virtual rtc::scoped_refptr<MediaStreamInterface>
CreateLocalMediaStream(const std::string& label) = 0;
diff --git a/talk/app/webrtc/peerconnectioninterface_unittest.cc b/talk/app/webrtc/peerconnectioninterface_unittest.cc
index 63163fd651..c3789b7dd8 100644
--- a/talk/app/webrtc/peerconnectioninterface_unittest.cc
+++ b/talk/app/webrtc/peerconnectioninterface_unittest.cc
@@ -26,9 +26,9 @@
*/
#include <string>
+#include <utility>
#include "talk/app/webrtc/audiotrack.h"
-#include "talk/app/webrtc/fakeportallocatorfactory.h"
#include "talk/app/webrtc/jsepsessiondescription.h"
#include "talk/app/webrtc/mediastream.h"
#include "talk/app/webrtc/mediastreaminterface.h"
@@ -37,6 +37,9 @@
#include "talk/app/webrtc/rtpreceiverinterface.h"
#include "talk/app/webrtc/rtpsenderinterface.h"
#include "talk/app/webrtc/streamcollection.h"
+#ifdef WEBRTC_ANDROID
+#include "talk/app/webrtc/test/androidtestinitializer.h"
+#endif
#include "talk/app/webrtc/test/fakeconstraints.h"
#include "talk/app/webrtc/test/fakedtlsidentitystore.h"
#include "talk/app/webrtc/test/mockpeerconnectionobservers.h"
@@ -52,6 +55,7 @@
#include "webrtc/base/sslstreamadapter.h"
#include "webrtc/base/stringutils.h"
#include "webrtc/base/thread.h"
+#include "webrtc/p2p/client/fakeportallocator.h"
static const char kStreamLabel1[] = "local_stream_1";
static const char kStreamLabel2[] = "local_stream_2";
@@ -258,7 +262,6 @@ using webrtc::AudioTrackInterface;
using webrtc::DataBuffer;
using webrtc::DataChannelInterface;
using webrtc::FakeConstraints;
-using webrtc::FakePortAllocatorFactory;
using webrtc::IceCandidateInterface;
using webrtc::MediaConstraintsInterface;
using webrtc::MediaStream;
@@ -270,7 +273,6 @@ using webrtc::MockSetSessionDescriptionObserver;
using webrtc::MockStatsObserver;
using webrtc::PeerConnectionInterface;
using webrtc::PeerConnectionObserver;
-using webrtc::PortAllocatorFactoryInterface;
using webrtc::RtpReceiverInterface;
using webrtc::RtpSenderInterface;
using webrtc::SdpParseError;
@@ -515,6 +517,12 @@ class MockPeerConnectionObserver : public PeerConnectionObserver {
class PeerConnectionInterfaceTest : public testing::Test {
protected:
+ PeerConnectionInterfaceTest() {
+#ifdef WEBRTC_ANDROID
+ webrtc::InitializeAndroidObjects();
+#endif
+ }
+
virtual void SetUp() {
pc_factory_ = webrtc::CreatePeerConnectionFactory(
rtc::Thread::Current(), rtc::Thread::Current(), NULL, NULL,
@@ -533,15 +541,17 @@ class PeerConnectionInterfaceTest : public testing::Test {
void CreatePeerConnection(const std::string& uri,
const std::string& password,
webrtc::MediaConstraintsInterface* constraints) {
+ PeerConnectionInterface::RTCConfiguration config;
PeerConnectionInterface::IceServer server;
- PeerConnectionInterface::IceServers servers;
if (!uri.empty()) {
server.uri = uri;
server.password = password;
- servers.push_back(server);
+ config.servers.push_back(server);
}
- port_allocator_factory_ = FakePortAllocatorFactory::Create();
+ rtc::scoped_ptr<cricket::FakePortAllocator> port_allocator(
+ new cricket::FakePortAllocator(rtc::Thread::Current(), nullptr));
+ port_allocator_ = port_allocator.get();
// DTLS does not work in a loopback call, so is disabled for most of the
// tests in this file. We only create a FakeIdentityService if the test
@@ -562,52 +572,47 @@ class PeerConnectionInterfaceTest : public testing::Test {
nullptr) && dtls) {
dtls_identity_store.reset(new FakeDtlsIdentityStore());
}
- pc_ = pc_factory_->CreatePeerConnection(servers, constraints,
- port_allocator_factory_.get(),
- dtls_identity_store.Pass(),
- &observer_);
+ pc_ = pc_factory_->CreatePeerConnection(
+ config, constraints, std::move(port_allocator),
+ std::move(dtls_identity_store), &observer_);
ASSERT_TRUE(pc_.get() != NULL);
observer_.SetPeerConnectionInterface(pc_.get());
EXPECT_EQ(PeerConnectionInterface::kStable, observer_.state_);
}
void CreatePeerConnectionExpectFail(const std::string& uri) {
+ PeerConnectionInterface::RTCConfiguration config;
PeerConnectionInterface::IceServer server;
- PeerConnectionInterface::IceServers servers;
server.uri = uri;
- servers.push_back(server);
+ config.servers.push_back(server);
- scoped_ptr<webrtc::DtlsIdentityStoreInterface> dtls_identity_store;
- port_allocator_factory_ = FakePortAllocatorFactory::Create();
scoped_refptr<PeerConnectionInterface> pc;
- pc = pc_factory_->CreatePeerConnection(
- servers, nullptr, port_allocator_factory_.get(),
- dtls_identity_store.Pass(), &observer_);
- ASSERT_EQ(nullptr, pc);
+ pc = pc_factory_->CreatePeerConnection(config, nullptr, nullptr, nullptr,
+ &observer_);
+ EXPECT_EQ(nullptr, pc);
}
void CreatePeerConnectionWithDifferentConfigurations() {
CreatePeerConnection(kStunAddressOnly, "", NULL);
- EXPECT_EQ(1u, port_allocator_factory_->stun_configs().size());
- EXPECT_EQ(0u, port_allocator_factory_->turn_configs().size());
- EXPECT_EQ("address",
- port_allocator_factory_->stun_configs()[0].server.hostname());
+ EXPECT_EQ(1u, port_allocator_->stun_servers().size());
+ EXPECT_EQ(0u, port_allocator_->turn_servers().size());
+ EXPECT_EQ("address", port_allocator_->stun_servers().begin()->hostname());
EXPECT_EQ(kDefaultStunPort,
- port_allocator_factory_->stun_configs()[0].server.port());
+ port_allocator_->stun_servers().begin()->port());
CreatePeerConnectionExpectFail(kStunInvalidPort);
CreatePeerConnectionExpectFail(kStunAddressPortAndMore1);
CreatePeerConnectionExpectFail(kStunAddressPortAndMore2);
CreatePeerConnection(kTurnIceServerUri, kTurnPassword, NULL);
- EXPECT_EQ(0u, port_allocator_factory_->stun_configs().size());
- EXPECT_EQ(1u, port_allocator_factory_->turn_configs().size());
+ EXPECT_EQ(0u, port_allocator_->stun_servers().size());
+ EXPECT_EQ(1u, port_allocator_->turn_servers().size());
EXPECT_EQ(kTurnUsername,
- port_allocator_factory_->turn_configs()[0].username);
+ port_allocator_->turn_servers()[0].credentials.username);
EXPECT_EQ(kTurnPassword,
- port_allocator_factory_->turn_configs()[0].password);
+ port_allocator_->turn_servers()[0].credentials.password);
EXPECT_EQ(kTurnHostname,
- port_allocator_factory_->turn_configs()[0].server.hostname());
+ port_allocator_->turn_servers()[0].ports[0].address.hostname());
}
void ReleasePeerConnection() {
@@ -926,7 +931,7 @@ class PeerConnectionInterfaceTest : public testing::Test {
ASSERT_TRUE(stream->AddTrack(video_track));
}
- scoped_refptr<FakePortAllocatorFactory> port_allocator_factory_;
+ cricket::FakePortAllocator* port_allocator_ = nullptr;
scoped_refptr<webrtc::PeerConnectionFactoryInterface> pc_factory_;
scoped_refptr<PeerConnectionInterface> pc_;
MockPeerConnectionObserver observer_;
@@ -1156,6 +1161,64 @@ TEST_F(PeerConnectionInterfaceTest, SsrcInOfferAnswer) {
EXPECT_NE(audio_ssrc, video_ssrc);
}
+// Test that it's possible to call AddTrack on a MediaStream after adding
+// the stream to a PeerConnection.
+// TODO(deadbeef): Remove this test once this behavior is no longer supported.
+TEST_F(PeerConnectionInterfaceTest, AddTrackAfterAddStream) {
+ CreatePeerConnection();
+ // Create audio stream and add to PeerConnection.
+ AddVoiceStream(kStreamLabel1);
+ MediaStreamInterface* stream = pc_->local_streams()->at(0);
+
+ // Add video track to the audio-only stream.
+ scoped_refptr<VideoTrackInterface> video_track(
+ pc_factory_->CreateVideoTrack("video_label", nullptr));
+ stream->AddTrack(video_track.get());
+
+ scoped_ptr<SessionDescriptionInterface> offer;
+ ASSERT_TRUE(DoCreateOffer(offer.use(), nullptr));
+
+ const cricket::MediaContentDescription* video_desc =
+ cricket::GetFirstVideoContentDescription(offer->description());
+ EXPECT_TRUE(video_desc != nullptr);
+}
+
+// Test that it's possible to call RemoveTrack on a MediaStream after adding
+// the stream to a PeerConnection.
+// TODO(deadbeef): Remove this test once this behavior is no longer supported.
+TEST_F(PeerConnectionInterfaceTest, RemoveTrackAfterAddStream) {
+ CreatePeerConnection();
+ // Create audio/video stream and add to PeerConnection.
+ AddAudioVideoStream(kStreamLabel1, "audio_label", "video_label");
+ MediaStreamInterface* stream = pc_->local_streams()->at(0);
+
+ // Remove the video track.
+ stream->RemoveTrack(stream->GetVideoTracks()[0]);
+
+ scoped_ptr<SessionDescriptionInterface> offer;
+ ASSERT_TRUE(DoCreateOffer(offer.use(), nullptr));
+
+ const cricket::MediaContentDescription* video_desc =
+ cricket::GetFirstVideoContentDescription(offer->description());
+ EXPECT_TRUE(video_desc == nullptr);
+}
+
+// Test creating a sender with a stream ID, and ensure the ID is populated
+// in the offer.
+TEST_F(PeerConnectionInterfaceTest, CreateSenderWithStream) {
+ CreatePeerConnection();
+ pc_->CreateSender("video", kStreamLabel1);
+
+ scoped_ptr<SessionDescriptionInterface> offer;
+ ASSERT_TRUE(DoCreateOffer(offer.use(), nullptr));
+
+ const cricket::MediaContentDescription* video_desc =
+ cricket::GetFirstVideoContentDescription(offer->description());
+ ASSERT_TRUE(video_desc != nullptr);
+ ASSERT_EQ(1u, video_desc->streams().size());
+ EXPECT_EQ(kStreamLabel1, video_desc->streams()[0].sync_label);
+}
+
// Test that we can specify a certain track that we want statistics about.
TEST_F(PeerConnectionInterfaceTest, GetStatsForSpecificTrack) {
InitiateCall();
@@ -1660,6 +1723,22 @@ TEST_F(PeerConnectionInterfaceTest, CreateSubsequentInactiveOffer) {
ASSERT_EQ(cricket::MD_INACTIVE, audio_desc->direction());
}
+// Test that we can use SetConfiguration to change the ICE servers of the
+// PortAllocator.
+TEST_F(PeerConnectionInterfaceTest, SetConfigurationChangesIceServers) {
+ CreatePeerConnection();
+
+ PeerConnectionInterface::RTCConfiguration config;
+ PeerConnectionInterface::IceServer server;
+ server.uri = "stun:test_hostname";
+ config.servers.push_back(server);
+ EXPECT_TRUE(pc_->SetConfiguration(config));
+
+ EXPECT_EQ(1u, port_allocator_->stun_servers().size());
+ EXPECT_EQ("test_hostname",
+ port_allocator_->stun_servers().begin()->hostname());
+}
+
// Test that PeerConnection::Close changes the states to closed and all remote
// tracks change state to ended.
TEST_F(PeerConnectionInterfaceTest, CloseAndTestStreamsAndStates) {
@@ -1977,6 +2056,28 @@ TEST_F(PeerConnectionInterfaceTest, SdpWithMsidDontCreatesDefaultStream) {
EXPECT_EQ(0u, observer_.remote_streams()->count());
}
+// This tests that when setting a new description, the old default tracks are
+// not destroyed and recreated.
+// See: https://bugs.chromium.org/p/webrtc/issues/detail?id=5250
+TEST_F(PeerConnectionInterfaceTest, DefaultTracksNotDestroyedAndRecreated) {
+ FakeConstraints constraints;
+ constraints.AddMandatory(webrtc::MediaConstraintsInterface::kEnableDtlsSrtp,
+ true);
+ CreatePeerConnection(&constraints);
+ CreateAndSetRemoteOffer(kSdpStringWithoutStreamsAudioOnly);
+
+ ASSERT_EQ(1u, observer_.remote_streams()->count());
+ MediaStreamInterface* remote_stream = observer_.remote_streams()->at(0);
+ ASSERT_EQ(1u, remote_stream->GetAudioTracks().size());
+
+ // Set the track to "disabled", then set a new description and ensure the
+ // track is still disabled, which ensures it hasn't been recreated.
+ remote_stream->GetAudioTracks()[0]->set_enabled(false);
+ CreateAndSetRemoteOffer(kSdpStringWithoutStreamsAudioOnly);
+ ASSERT_EQ(1u, remote_stream->GetAudioTracks().size());
+ EXPECT_FALSE(remote_stream->GetAudioTracks()[0]->enabled());
+}
+
// This tests that a default MediaStream is not created if a remote session
// description is updated to not have any MediaStreams.
TEST_F(PeerConnectionInterfaceTest, VerifyDefaultStreamIsNotCreated) {
@@ -2020,8 +2121,10 @@ TEST_F(PeerConnectionInterfaceTest, LocalDescriptionChanged) {
EXPECT_TRUE(ContainsSender(senders, kVideoTracks[1]));
// Remove an audio and video track.
+ pc_->RemoveStream(reference_collection_->at(0));
rtc::scoped_ptr<SessionDescriptionInterface> desc_2;
CreateSessionDescriptionAndReference(1, 1, desc_2.accept());
+ pc_->AddStream(reference_collection_->at(0));
EXPECT_TRUE(DoSetLocalDescription(desc_2.release()));
senders = pc_->GetSenders();
EXPECT_EQ(2u, senders.size());
@@ -2220,7 +2323,9 @@ TEST(CreateSessionOptionsTest, GetDefaultMediaSessionOptionsForOffer) {
EXPECT_FALSE(options.has_video());
EXPECT_TRUE(options.bundle_enabled);
EXPECT_TRUE(options.vad_enabled);
- EXPECT_FALSE(options.transport_options.ice_restart);
+ EXPECT_FALSE(options.audio_transport_options.ice_restart);
+ EXPECT_FALSE(options.video_transport_options.ice_restart);
+ EXPECT_FALSE(options.data_transport_options.ice_restart);
}
// Test that a correct MediaSessionOptions is created for an offer if
@@ -2255,18 +2360,22 @@ TEST(CreateSessionOptionsTest,
// Test that a correct MediaSessionOptions is created to restart ice if
// IceRestart is set. It also tests that subsequent MediaSessionOptions don't
-// have |transport_options.ice_restart| set.
+// have |audio_transport_options.ice_restart| etc. set.
TEST(CreateSessionOptionsTest, GetMediaSessionOptionsForOfferWithIceRestart) {
RTCOfferAnswerOptions rtc_options;
rtc_options.ice_restart = true;
cricket::MediaSessionOptions options;
EXPECT_TRUE(ConvertRtcOptionsForOffer(rtc_options, &options));
- EXPECT_TRUE(options.transport_options.ice_restart);
+ EXPECT_TRUE(options.audio_transport_options.ice_restart);
+ EXPECT_TRUE(options.video_transport_options.ice_restart);
+ EXPECT_TRUE(options.data_transport_options.ice_restart);
rtc_options = RTCOfferAnswerOptions();
EXPECT_TRUE(ConvertRtcOptionsForOffer(rtc_options, &options));
- EXPECT_FALSE(options.transport_options.ice_restart);
+ EXPECT_FALSE(options.audio_transport_options.ice_restart);
+ EXPECT_FALSE(options.video_transport_options.ice_restart);
+ EXPECT_FALSE(options.data_transport_options.ice_restart);
}
// Test that the MediaConstraints in an answer don't affect if audio and video
diff --git a/talk/app/webrtc/peerconnectionproxy.h b/talk/app/webrtc/peerconnectionproxy.h
index d207fbbdd8..3c983d73c9 100644
--- a/talk/app/webrtc/peerconnectionproxy.h
+++ b/talk/app/webrtc/peerconnectionproxy.h
@@ -43,6 +43,10 @@ BEGIN_PROXY_MAP(PeerConnection)
PROXY_METHOD1(void, RemoveStream, MediaStreamInterface*)
PROXY_METHOD1(rtc::scoped_refptr<DtmfSenderInterface>,
CreateDtmfSender, AudioTrackInterface*)
+ PROXY_METHOD2(rtc::scoped_refptr<RtpSenderInterface>,
+ CreateSender,
+ const std::string&,
+ const std::string&)
PROXY_CONSTMETHOD0(std::vector<rtc::scoped_refptr<RtpSenderInterface>>,
GetSenders)
PROXY_CONSTMETHOD0(std::vector<rtc::scoped_refptr<RtpReceiverInterface>>,
diff --git a/talk/app/webrtc/portallocatorfactory.cc b/talk/app/webrtc/portallocatorfactory.cc
index bd6caccc80..64d714cd50 100644
--- a/talk/app/webrtc/portallocatorfactory.cc
+++ b/talk/app/webrtc/portallocatorfactory.cc
@@ -1,6 +1,6 @@
/*
* libjingle
- * Copyright 2004--2011 Google Inc.
+ * Copyright 2011 Google Inc.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
@@ -24,69 +24,7 @@
* OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
+// TODO(deadbeef): Remove this file once chromium build files no longer
+// reference it.
#include "talk/app/webrtc/portallocatorfactory.h"
-
-#include "webrtc/p2p/base/basicpacketsocketfactory.h"
-#include "webrtc/p2p/client/basicportallocator.h"
-#include "webrtc/base/logging.h"
-#include "webrtc/base/network.h"
-#include "webrtc/base/thread.h"
-
-namespace webrtc {
-
-using rtc::scoped_ptr;
-
-rtc::scoped_refptr<PortAllocatorFactoryInterface>
-PortAllocatorFactory::Create(
- rtc::Thread* worker_thread) {
- rtc::RefCountedObject<PortAllocatorFactory>* allocator =
- new rtc::RefCountedObject<PortAllocatorFactory>(worker_thread);
- return allocator;
-}
-
-PortAllocatorFactory::PortAllocatorFactory(rtc::Thread* worker_thread)
- : network_manager_(new rtc::BasicNetworkManager()),
- socket_factory_(new rtc::BasicPacketSocketFactory(worker_thread)) {
-}
-
-PortAllocatorFactory::~PortAllocatorFactory() {}
-
-void PortAllocatorFactory::SetNetworkIgnoreMask(int network_ignore_mask) {
- network_manager_->set_network_ignore_mask(network_ignore_mask);
-}
-
-cricket::PortAllocator* PortAllocatorFactory::CreatePortAllocator(
- const std::vector<StunConfiguration>& stun,
- const std::vector<TurnConfiguration>& turn) {
- cricket::ServerAddresses stun_hosts;
- typedef std::vector<StunConfiguration>::const_iterator StunIt;
- for (StunIt stun_it = stun.begin(); stun_it != stun.end(); ++stun_it) {
- stun_hosts.insert(stun_it->server);
- }
-
- scoped_ptr<cricket::BasicPortAllocator> allocator(
- new cricket::BasicPortAllocator(
- network_manager_.get(), socket_factory_.get(), stun_hosts));
-
- for (size_t i = 0; i < turn.size(); ++i) {
- cricket::RelayCredentials credentials(turn[i].username, turn[i].password);
- cricket::RelayServerConfig relay_server(cricket::RELAY_TURN);
- cricket::ProtocolType protocol;
- if (cricket::StringToProto(turn[i].transport_type.c_str(), &protocol)) {
- relay_server.ports.push_back(cricket::ProtocolAddress(
- turn[i].server, protocol, turn[i].secure));
- relay_server.credentials = credentials;
- // First in the list gets highest priority.
- relay_server.priority = static_cast<int>(turn.size() - i - 1);
- allocator->AddRelay(relay_server);
- } else {
- LOG(LS_WARNING) << "Ignoring TURN server " << turn[i].server << ". "
- << "Reason= Incorrect " << turn[i].transport_type
- << " transport parameter.";
- }
- }
- return allocator.release();
-}
-
-} // namespace webrtc
diff --git a/talk/app/webrtc/portallocatorfactory.h b/talk/app/webrtc/portallocatorfactory.h
index 83376d0b84..bb6cf4741f 100644
--- a/talk/app/webrtc/portallocatorfactory.h
+++ b/talk/app/webrtc/portallocatorfactory.h
@@ -24,49 +24,10 @@
* OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
-
-// This file defines the default implementation of
-// PortAllocatorFactoryInterface.
-// This implementation creates instances of cricket::HTTPPortAllocator and uses
-// the BasicNetworkManager and BasicPacketSocketFactory.
+// TODO(deadbeef): Remove this file once chromium build files no longer
+// reference it.
#ifndef TALK_APP_WEBRTC_PORTALLOCATORFACTORY_H_
#define TALK_APP_WEBRTC_PORTALLOCATORFACTORY_H_
-#include "talk/app/webrtc/peerconnectioninterface.h"
-#include "webrtc/base/scoped_ptr.h"
-
-namespace cricket {
-class PortAllocator;
-}
-
-namespace rtc {
-class BasicNetworkManager;
-class BasicPacketSocketFactory;
-}
-
-namespace webrtc {
-
-class PortAllocatorFactory : public PortAllocatorFactoryInterface {
- public:
- static rtc::scoped_refptr<PortAllocatorFactoryInterface> Create(
- rtc::Thread* worker_thread);
-
- virtual cricket::PortAllocator* CreatePortAllocator(
- const std::vector<StunConfiguration>& stun,
- const std::vector<TurnConfiguration>& turn);
-
- virtual void SetNetworkIgnoreMask(int network_ignore_mask);
-
- protected:
- explicit PortAllocatorFactory(rtc::Thread* worker_thread);
- ~PortAllocatorFactory();
-
- private:
- rtc::scoped_ptr<rtc::BasicNetworkManager> network_manager_;
- rtc::scoped_ptr<rtc::BasicPacketSocketFactory> socket_factory_;
-};
-
-} // namespace webrtc
-
#endif // TALK_APP_WEBRTC_PORTALLOCATORFACTORY_H_
diff --git a/talk/app/webrtc/remoteaudiosource.cc b/talk/app/webrtc/remoteaudiosource.cc
index 41f3d8798a..e904dd9192 100644
--- a/talk/app/webrtc/remoteaudiosource.cc
+++ b/talk/app/webrtc/remoteaudiosource.cc
@@ -29,44 +29,148 @@
#include <algorithm>
#include <functional>
+#include <utility>
+#include "talk/app/webrtc/mediastreamprovider.h"
+#include "webrtc/base/checks.h"
#include "webrtc/base/logging.h"
+#include "webrtc/base/thread.h"
namespace webrtc {
-rtc::scoped_refptr<RemoteAudioSource> RemoteAudioSource::Create() {
- return new rtc::RefCountedObject<RemoteAudioSource>();
+class RemoteAudioSource::MessageHandler : public rtc::MessageHandler {
+ public:
+ explicit MessageHandler(RemoteAudioSource* source) : source_(source) {}
+
+ private:
+ ~MessageHandler() override {}
+
+ void OnMessage(rtc::Message* msg) override {
+ source_->OnMessage(msg);
+ delete this;
+ }
+
+ const rtc::scoped_refptr<RemoteAudioSource> source_;
+ RTC_DISALLOW_IMPLICIT_CONSTRUCTORS(MessageHandler);
+};
+
+class RemoteAudioSource::Sink : public AudioSinkInterface {
+ public:
+ explicit Sink(RemoteAudioSource* source) : source_(source) {}
+ ~Sink() override { source_->OnAudioProviderGone(); }
+
+ private:
+ void OnData(const AudioSinkInterface::Data& audio) override {
+ if (source_)
+ source_->OnData(audio);
+ }
+
+ const rtc::scoped_refptr<RemoteAudioSource> source_;
+ RTC_DISALLOW_IMPLICIT_CONSTRUCTORS(Sink);
+};
+
+rtc::scoped_refptr<RemoteAudioSource> RemoteAudioSource::Create(
+ uint32_t ssrc,
+ AudioProviderInterface* provider) {
+ rtc::scoped_refptr<RemoteAudioSource> ret(
+ new rtc::RefCountedObject<RemoteAudioSource>());
+ ret->Initialize(ssrc, provider);
+ return ret;
}
-RemoteAudioSource::RemoteAudioSource() {
+RemoteAudioSource::RemoteAudioSource()
+ : main_thread_(rtc::Thread::Current()),
+ state_(MediaSourceInterface::kLive) {
+ RTC_DCHECK(main_thread_);
}
RemoteAudioSource::~RemoteAudioSource() {
- ASSERT(audio_observers_.empty());
+ RTC_DCHECK(main_thread_->IsCurrent());
+ RTC_DCHECK(audio_observers_.empty());
+ RTC_DCHECK(sinks_.empty());
+}
+
+void RemoteAudioSource::Initialize(uint32_t ssrc,
+ AudioProviderInterface* provider) {
+ RTC_DCHECK(main_thread_->IsCurrent());
+ // To make sure we always get notified when the provider goes out of scope,
+ // we register for callbacks here and not on demand in AddSink.
+ if (provider) { // May be null in tests.
+ provider->SetRawAudioSink(
+ ssrc, rtc::scoped_ptr<AudioSinkInterface>(new Sink(this)));
+ }
}
MediaSourceInterface::SourceState RemoteAudioSource::state() const {
- return MediaSourceInterface::kLive;
+ RTC_DCHECK(main_thread_->IsCurrent());
+ return state_;
+}
+
+bool RemoteAudioSource::remote() const {
+ RTC_DCHECK(main_thread_->IsCurrent());
+ return true;
}
void RemoteAudioSource::SetVolume(double volume) {
- ASSERT(volume >= 0 && volume <= 10);
- for (AudioObserverList::iterator it = audio_observers_.begin();
- it != audio_observers_.end(); ++it) {
- (*it)->OnSetVolume(volume);
- }
+ RTC_DCHECK(volume >= 0 && volume <= 10);
+ for (auto* observer : audio_observers_)
+ observer->OnSetVolume(volume);
}
void RemoteAudioSource::RegisterAudioObserver(AudioObserver* observer) {
- ASSERT(observer != NULL);
- ASSERT(std::find(audio_observers_.begin(), audio_observers_.end(),
- observer) == audio_observers_.end());
+ RTC_DCHECK(observer != NULL);
+ RTC_DCHECK(std::find(audio_observers_.begin(), audio_observers_.end(),
+ observer) == audio_observers_.end());
audio_observers_.push_back(observer);
}
void RemoteAudioSource::UnregisterAudioObserver(AudioObserver* observer) {
- ASSERT(observer != NULL);
+ RTC_DCHECK(observer != NULL);
audio_observers_.remove(observer);
}
+void RemoteAudioSource::AddSink(AudioTrackSinkInterface* sink) {
+ RTC_DCHECK(main_thread_->IsCurrent());
+ RTC_DCHECK(sink);
+
+ if (state_ != MediaSourceInterface::kLive) {
+ LOG(LS_ERROR) << "Can't register sink as the source isn't live.";
+ return;
+ }
+
+ rtc::CritScope lock(&sink_lock_);
+ RTC_DCHECK(std::find(sinks_.begin(), sinks_.end(), sink) == sinks_.end());
+ sinks_.push_back(sink);
+}
+
+void RemoteAudioSource::RemoveSink(AudioTrackSinkInterface* sink) {
+ RTC_DCHECK(main_thread_->IsCurrent());
+ RTC_DCHECK(sink);
+
+ rtc::CritScope lock(&sink_lock_);
+ sinks_.remove(sink);
+}
+
+void RemoteAudioSource::OnData(const AudioSinkInterface::Data& audio) {
+ // Called on the externally-owned audio callback thread, via/from webrtc.
+ rtc::CritScope lock(&sink_lock_);
+ for (auto* sink : sinks_) {
+ sink->OnData(audio.data, 16, audio.sample_rate, audio.channels,
+ audio.samples_per_channel);
+ }
+}
+
+void RemoteAudioSource::OnAudioProviderGone() {
+ // Called when the data provider is deleted. It may be the worker thread
+ // in libjingle or may be a different worker thread.
+ main_thread_->Post(new MessageHandler(this));
+}
+
+void RemoteAudioSource::OnMessage(rtc::Message* msg) {
+ RTC_DCHECK(main_thread_->IsCurrent());
+ sinks_.clear();
+ state_ = MediaSourceInterface::kEnded;
+ FireOnChanged();
+}
+
} // namespace webrtc
diff --git a/talk/app/webrtc/remoteaudiosource.h b/talk/app/webrtc/remoteaudiosource.h
index e49aca5684..d648ba4604 100644
--- a/talk/app/webrtc/remoteaudiosource.h
+++ b/talk/app/webrtc/remoteaudiosource.h
@@ -29,36 +29,66 @@
#define TALK_APP_WEBRTC_REMOTEAUDIOSOURCE_H_
#include <list>
+#include <string>
#include "talk/app/webrtc/mediastreaminterface.h"
#include "talk/app/webrtc/notifier.h"
+#include "talk/media/base/audiorenderer.h"
+#include "webrtc/audio/audio_sink.h"
+#include "webrtc/base/criticalsection.h"
+
+namespace rtc {
+struct Message;
+class Thread;
+} // namespace rtc
namespace webrtc {
-using webrtc::AudioSourceInterface;
+class AudioProviderInterface;
// This class implements the audio source used by the remote audio track.
class RemoteAudioSource : public Notifier<AudioSourceInterface> {
public:
// Creates an instance of RemoteAudioSource.
- static rtc::scoped_refptr<RemoteAudioSource> Create();
+ static rtc::scoped_refptr<RemoteAudioSource> Create(
+ uint32_t ssrc,
+ AudioProviderInterface* provider);
+
+ // MediaSourceInterface implementation.
+ MediaSourceInterface::SourceState state() const override;
+ bool remote() const override;
+
+ void AddSink(AudioTrackSinkInterface* sink) override;
+ void RemoveSink(AudioTrackSinkInterface* sink) override;
protected:
RemoteAudioSource();
- virtual ~RemoteAudioSource();
+ ~RemoteAudioSource() override;
+
+ // Post construction initialize where we can do things like save a reference
+ // to ourselves (need to be fully constructed).
+ void Initialize(uint32_t ssrc, AudioProviderInterface* provider);
private:
typedef std::list<AudioObserver*> AudioObserverList;
- // MediaSourceInterface implementation.
- MediaSourceInterface::SourceState state() const override;
-
// AudioSourceInterface implementation.
void SetVolume(double volume) override;
void RegisterAudioObserver(AudioObserver* observer) override;
void UnregisterAudioObserver(AudioObserver* observer) override;
+ class Sink;
+ void OnData(const AudioSinkInterface::Data& audio);
+ void OnAudioProviderGone();
+
+ class MessageHandler;
+ void OnMessage(rtc::Message* msg);
+
AudioObserverList audio_observers_;
+ rtc::CriticalSection sink_lock_;
+ std::list<AudioTrackSinkInterface*> sinks_;
+ rtc::Thread* const main_thread_;
+ SourceState state_;
};
} // namespace webrtc
diff --git a/talk/app/webrtc/remoteaudiotrack.cc b/talk/app/webrtc/remoteaudiotrack.cc
new file mode 100644
index 0000000000..5f0b23e59e
--- /dev/null
+++ b/talk/app/webrtc/remoteaudiotrack.cc
@@ -0,0 +1,28 @@
+/*
+ * libjingle
+ * Copyright 2015 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+// TODO(tommi): Delete this file when removed from build files in Chromium.
diff --git a/talk/app/webrtc/remoteaudiotrack.h b/talk/app/webrtc/remoteaudiotrack.h
new file mode 100644
index 0000000000..5f0b23e59e
--- /dev/null
+++ b/talk/app/webrtc/remoteaudiotrack.h
@@ -0,0 +1,28 @@
+/*
+ * libjingle
+ * Copyright 2015 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+// TODO(tommi): Delete this file when removed from build files in Chromium.
diff --git a/talk/app/webrtc/rtpreceiver.cc b/talk/app/webrtc/rtpreceiver.cc
index b88554f0ac..9540f36f2f 100644
--- a/talk/app/webrtc/rtpreceiver.cc
+++ b/talk/app/webrtc/rtpreceiver.cc
@@ -39,6 +39,7 @@ AudioRtpReceiver::AudioRtpReceiver(AudioTrackInterface* track,
ssrc_(ssrc),
provider_(provider),
cached_track_enabled_(track->enabled()) {
+ RTC_DCHECK(track_->GetSource()->remote());
track_->RegisterObserver(this);
track_->GetSource()->RegisterAudioObserver(this);
Reconfigure();
@@ -85,6 +86,7 @@ VideoRtpReceiver::VideoRtpReceiver(VideoTrackInterface* track,
uint32_t ssrc,
VideoProviderInterface* provider)
: id_(track->id()), track_(track), ssrc_(ssrc), provider_(provider) {
+ RTC_DCHECK(track_->GetSource()->remote());
provider_->SetVideoPlayout(ssrc_, true, track_->GetSource()->FrameInput());
}
diff --git a/talk/app/webrtc/rtpreceiver.h b/talk/app/webrtc/rtpreceiver.h
index a93ccbcbfe..db021baf68 100644
--- a/talk/app/webrtc/rtpreceiver.h
+++ b/talk/app/webrtc/rtpreceiver.h
@@ -68,10 +68,10 @@ class AudioRtpReceiver : public ObserverInterface,
private:
void Reconfigure();
- std::string id_;
- rtc::scoped_refptr<AudioTrackInterface> track_;
- uint32_t ssrc_;
- AudioProviderInterface* provider_;
+ const std::string id_;
+ const rtc::scoped_refptr<AudioTrackInterface> track_;
+ const uint32_t ssrc_;
+ AudioProviderInterface* provider_; // Set to null in Stop().
bool cached_track_enabled_;
};
diff --git a/talk/app/webrtc/rtpsender.cc b/talk/app/webrtc/rtpsender.cc
index 3a78f4598a..91e484b733 100644
--- a/talk/app/webrtc/rtpsender.cc
+++ b/talk/app/webrtc/rtpsender.cc
@@ -29,6 +29,7 @@
#include "talk/app/webrtc/localaudiosource.h"
#include "talk/app/webrtc/videosourceinterface.h"
+#include "webrtc/base/helpers.h"
namespace webrtc {
@@ -43,7 +44,7 @@ LocalAudioSinkAdapter::~LocalAudioSinkAdapter() {
void LocalAudioSinkAdapter::OnData(const void* audio_data,
int bits_per_sample,
int sample_rate,
- int number_of_channels,
+ size_t number_of_channels,
size_t number_of_frames) {
rtc::CritScope lock(&lock_);
if (sink_) {
@@ -59,34 +60,49 @@ void LocalAudioSinkAdapter::SetSink(cricket::AudioRenderer::Sink* sink) {
}
AudioRtpSender::AudioRtpSender(AudioTrackInterface* track,
- uint32_t ssrc,
- AudioProviderInterface* provider)
+ const std::string& stream_id,
+ AudioProviderInterface* provider,
+ StatsCollector* stats)
: id_(track->id()),
- track_(track),
- ssrc_(ssrc),
+ stream_id_(stream_id),
provider_(provider),
+ stats_(stats),
+ track_(track),
cached_track_enabled_(track->enabled()),
sink_adapter_(new LocalAudioSinkAdapter()) {
+ RTC_DCHECK(provider != nullptr);
track_->RegisterObserver(this);
track_->AddSink(sink_adapter_.get());
- Reconfigure();
}
+AudioRtpSender::AudioRtpSender(AudioProviderInterface* provider,
+ StatsCollector* stats)
+ : id_(rtc::CreateRandomUuid()),
+ stream_id_(rtc::CreateRandomUuid()),
+ provider_(provider),
+ stats_(stats),
+ sink_adapter_(new LocalAudioSinkAdapter()) {}
+
AudioRtpSender::~AudioRtpSender() {
- track_->RemoveSink(sink_adapter_.get());
- track_->UnregisterObserver(this);
Stop();
}
void AudioRtpSender::OnChanged() {
+ RTC_DCHECK(!stopped_);
if (cached_track_enabled_ != track_->enabled()) {
cached_track_enabled_ = track_->enabled();
- Reconfigure();
+ if (can_send_track()) {
+ SetAudioSend();
+ }
}
}
bool AudioRtpSender::SetTrack(MediaStreamTrackInterface* track) {
- if (track->kind() != "audio") {
+ if (stopped_) {
+ LOG(LS_ERROR) << "SetTrack can't be called on a stopped RtpSender.";
+ return false;
+ }
+ if (track && track->kind() != MediaStreamTrackInterface::kAudioKind) {
LOG(LS_ERROR) << "SetTrack called on audio RtpSender with " << track->kind()
<< " track.";
return false;
@@ -94,36 +110,84 @@ bool AudioRtpSender::SetTrack(MediaStreamTrackInterface* track) {
AudioTrackInterface* audio_track = static_cast<AudioTrackInterface*>(track);
// Detach from old track.
- track_->RemoveSink(sink_adapter_.get());
- track_->UnregisterObserver(this);
+ if (track_) {
+ track_->RemoveSink(sink_adapter_.get());
+ track_->UnregisterObserver(this);
+ }
+
+ if (can_send_track() && stats_) {
+ stats_->RemoveLocalAudioTrack(track_.get(), ssrc_);
+ }
// Attach to new track.
+ bool prev_can_send_track = can_send_track();
track_ = audio_track;
- cached_track_enabled_ = track_->enabled();
- track_->RegisterObserver(this);
- track_->AddSink(sink_adapter_.get());
- Reconfigure();
+ if (track_) {
+ cached_track_enabled_ = track_->enabled();
+ track_->RegisterObserver(this);
+ track_->AddSink(sink_adapter_.get());
+ }
+
+ // Update audio provider.
+ if (can_send_track()) {
+ SetAudioSend();
+ if (stats_) {
+ stats_->AddLocalAudioTrack(track_.get(), ssrc_);
+ }
+ } else if (prev_can_send_track) {
+ cricket::AudioOptions options;
+ provider_->SetAudioSend(ssrc_, false, options, nullptr);
+ }
return true;
}
-void AudioRtpSender::Stop() {
- // TODO(deadbeef): Need to do more here to fully stop sending packets.
- if (!provider_) {
+void AudioRtpSender::SetSsrc(uint32_t ssrc) {
+ if (stopped_ || ssrc == ssrc_) {
return;
}
- cricket::AudioOptions options;
- provider_->SetAudioSend(ssrc_, false, options, nullptr);
- provider_ = nullptr;
+ // If we are already sending with a particular SSRC, stop sending.
+ if (can_send_track()) {
+ cricket::AudioOptions options;
+ provider_->SetAudioSend(ssrc_, false, options, nullptr);
+ if (stats_) {
+ stats_->RemoveLocalAudioTrack(track_.get(), ssrc_);
+ }
+ }
+ ssrc_ = ssrc;
+ if (can_send_track()) {
+ SetAudioSend();
+ if (stats_) {
+ stats_->AddLocalAudioTrack(track_.get(), ssrc_);
+ }
+ }
}
-void AudioRtpSender::Reconfigure() {
- if (!provider_) {
+void AudioRtpSender::Stop() {
+ // TODO(deadbeef): Need to do more here to fully stop sending packets.
+ if (stopped_) {
return;
}
+ if (track_) {
+ track_->RemoveSink(sink_adapter_.get());
+ track_->UnregisterObserver(this);
+ }
+ if (can_send_track()) {
+ cricket::AudioOptions options;
+ provider_->SetAudioSend(ssrc_, false, options, nullptr);
+ if (stats_) {
+ stats_->RemoveLocalAudioTrack(track_.get(), ssrc_);
+ }
+ }
+ stopped_ = true;
+}
+
+void AudioRtpSender::SetAudioSend() {
+ RTC_DCHECK(!stopped_ && can_send_track());
cricket::AudioOptions options;
- if (track_->enabled() && track_->GetSource()) {
+ if (track_->enabled() && track_->GetSource() &&
+ !track_->GetSource()->remote()) {
// TODO(xians): Remove this static_cast since we should be able to connect
- // a remote audio track to peer connection.
+ // a remote audio track to a peer connection.
options = static_cast<LocalAudioSource*>(track_->GetSource())->options();
}
@@ -136,35 +200,42 @@ void AudioRtpSender::Reconfigure() {
}
VideoRtpSender::VideoRtpSender(VideoTrackInterface* track,
- uint32_t ssrc,
+ const std::string& stream_id,
VideoProviderInterface* provider)
: id_(track->id()),
- track_(track),
- ssrc_(ssrc),
+ stream_id_(stream_id),
provider_(provider),
+ track_(track),
cached_track_enabled_(track->enabled()) {
+ RTC_DCHECK(provider != nullptr);
track_->RegisterObserver(this);
- VideoSourceInterface* source = track_->GetSource();
- if (source) {
- provider_->SetCaptureDevice(ssrc_, source->GetVideoCapturer());
- }
- Reconfigure();
}
+VideoRtpSender::VideoRtpSender(VideoProviderInterface* provider)
+ : id_(rtc::CreateRandomUuid()),
+ stream_id_(rtc::CreateRandomUuid()),
+ provider_(provider) {}
+
VideoRtpSender::~VideoRtpSender() {
- track_->UnregisterObserver(this);
Stop();
}
void VideoRtpSender::OnChanged() {
+ RTC_DCHECK(!stopped_);
if (cached_track_enabled_ != track_->enabled()) {
cached_track_enabled_ = track_->enabled();
- Reconfigure();
+ if (can_send_track()) {
+ SetVideoSend();
+ }
}
}
bool VideoRtpSender::SetTrack(MediaStreamTrackInterface* track) {
- if (track->kind() != "video") {
+ if (stopped_) {
+ LOG(LS_ERROR) << "SetTrack can't be called on a stopped RtpSender.";
+ return false;
+ }
+ if (track && track->kind() != MediaStreamTrackInterface::kVideoKind) {
LOG(LS_ERROR) << "SetTrack called on video RtpSender with " << track->kind()
<< " track.";
return false;
@@ -172,30 +243,72 @@ bool VideoRtpSender::SetTrack(MediaStreamTrackInterface* track) {
VideoTrackInterface* video_track = static_cast<VideoTrackInterface*>(track);
// Detach from old track.
- track_->UnregisterObserver(this);
+ if (track_) {
+ track_->UnregisterObserver(this);
+ }
// Attach to new track.
+ bool prev_can_send_track = can_send_track();
track_ = video_track;
- cached_track_enabled_ = track_->enabled();
- track_->RegisterObserver(this);
- Reconfigure();
+ if (track_) {
+ cached_track_enabled_ = track_->enabled();
+ track_->RegisterObserver(this);
+ }
+
+ // Update video provider.
+ if (can_send_track()) {
+ VideoSourceInterface* source = track_->GetSource();
+ // TODO(deadbeef): If SetTrack is called with a disabled track, and the
+ // previous track was enabled, this could cause a frame from the new track
+ // to slip out. Really, what we need is for SetCaptureDevice and
+ // SetVideoSend
+ // to be combined into one atomic operation, all the way down to
+ // WebRtcVideoSendStream.
+ provider_->SetCaptureDevice(ssrc_,
+ source ? source->GetVideoCapturer() : nullptr);
+ SetVideoSend();
+ } else if (prev_can_send_track) {
+ provider_->SetCaptureDevice(ssrc_, nullptr);
+ provider_->SetVideoSend(ssrc_, false, nullptr);
+ }
return true;
}
-void VideoRtpSender::Stop() {
- // TODO(deadbeef): Need to do more here to fully stop sending packets.
- if (!provider_) {
+void VideoRtpSender::SetSsrc(uint32_t ssrc) {
+ if (stopped_ || ssrc == ssrc_) {
return;
}
- provider_->SetCaptureDevice(ssrc_, nullptr);
- provider_->SetVideoSend(ssrc_, false, nullptr);
- provider_ = nullptr;
+ // If we are already sending with a particular SSRC, stop sending.
+ if (can_send_track()) {
+ provider_->SetCaptureDevice(ssrc_, nullptr);
+ provider_->SetVideoSend(ssrc_, false, nullptr);
+ }
+ ssrc_ = ssrc;
+ if (can_send_track()) {
+ VideoSourceInterface* source = track_->GetSource();
+ provider_->SetCaptureDevice(ssrc_,
+ source ? source->GetVideoCapturer() : nullptr);
+ SetVideoSend();
+ }
}
-void VideoRtpSender::Reconfigure() {
- if (!provider_) {
+void VideoRtpSender::Stop() {
+ // TODO(deadbeef): Need to do more here to fully stop sending packets.
+ if (stopped_) {
return;
}
+ if (track_) {
+ track_->UnregisterObserver(this);
+ }
+ if (can_send_track()) {
+ provider_->SetCaptureDevice(ssrc_, nullptr);
+ provider_->SetVideoSend(ssrc_, false, nullptr);
+ }
+ stopped_ = true;
+}
+
+void VideoRtpSender::SetVideoSend() {
+ RTC_DCHECK(!stopped_ && can_send_track());
const cricket::VideoOptions* options = nullptr;
VideoSourceInterface* source = track_->GetSource();
if (track_->enabled() && source) {
diff --git a/talk/app/webrtc/rtpsender.h b/talk/app/webrtc/rtpsender.h
index 3741909323..dd846b556c 100644
--- a/talk/app/webrtc/rtpsender.h
+++ b/talk/app/webrtc/rtpsender.h
@@ -36,6 +36,7 @@
#include "talk/app/webrtc/mediastreamprovider.h"
#include "talk/app/webrtc/rtpsenderinterface.h"
+#include "talk/app/webrtc/statscollector.h"
#include "talk/media/base/audiorenderer.h"
#include "webrtc/base/basictypes.h"
#include "webrtc/base/criticalsection.h"
@@ -56,7 +57,7 @@ class LocalAudioSinkAdapter : public AudioTrackSinkInterface,
void OnData(const void* audio_data,
int bits_per_sample,
int sample_rate,
- int number_of_channels,
+ size_t number_of_channels,
size_t number_of_frames) override;
// cricket::AudioRenderer implementation.
@@ -70,9 +71,15 @@ class LocalAudioSinkAdapter : public AudioTrackSinkInterface,
class AudioRtpSender : public ObserverInterface,
public rtc::RefCountedObject<RtpSenderInterface> {
public:
+ // StatsCollector provided so that Add/RemoveLocalAudioTrack can be called
+ // at the appropriate times.
AudioRtpSender(AudioTrackInterface* track,
- uint32_t ssrc,
- AudioProviderInterface* provider);
+ const std::string& stream_id,
+ AudioProviderInterface* provider,
+ StatsCollector* stats);
+
+ // Randomly generates id and stream_id.
+ AudioRtpSender(AudioProviderInterface* provider, StatsCollector* stats);
virtual ~AudioRtpSender();
@@ -85,18 +92,37 @@ class AudioRtpSender : public ObserverInterface,
return track_.get();
}
+ void SetSsrc(uint32_t ssrc) override;
+
+ uint32_t ssrc() const override { return ssrc_; }
+
+ cricket::MediaType media_type() const override {
+ return cricket::MEDIA_TYPE_AUDIO;
+ }
+
std::string id() const override { return id_; }
+ void set_stream_id(const std::string& stream_id) override {
+ stream_id_ = stream_id;
+ }
+ std::string stream_id() const override { return stream_id_; }
+
void Stop() override;
private:
- void Reconfigure();
+ bool can_send_track() const { return track_ && ssrc_; }
+ // Helper function to construct options for
+ // AudioProviderInterface::SetAudioSend.
+ void SetAudioSend();
std::string id_;
- rtc::scoped_refptr<AudioTrackInterface> track_;
- uint32_t ssrc_;
+ std::string stream_id_;
AudioProviderInterface* provider_;
- bool cached_track_enabled_;
+ StatsCollector* stats_;
+ rtc::scoped_refptr<AudioTrackInterface> track_;
+ uint32_t ssrc_ = 0;
+ bool cached_track_enabled_ = false;
+ bool stopped_ = false;
// Used to pass the data callback from the |track_| to the other end of
// cricket::AudioRenderer.
@@ -107,9 +133,12 @@ class VideoRtpSender : public ObserverInterface,
public rtc::RefCountedObject<RtpSenderInterface> {
public:
VideoRtpSender(VideoTrackInterface* track,
- uint32_t ssrc,
+ const std::string& stream_id,
VideoProviderInterface* provider);
+ // Randomly generates id and stream_id.
+ explicit VideoRtpSender(VideoProviderInterface* provider);
+
virtual ~VideoRtpSender();
// ObserverInterface implementation
@@ -121,18 +150,36 @@ class VideoRtpSender : public ObserverInterface,
return track_.get();
}
+ void SetSsrc(uint32_t ssrc) override;
+
+ uint32_t ssrc() const override { return ssrc_; }
+
+ cricket::MediaType media_type() const override {
+ return cricket::MEDIA_TYPE_VIDEO;
+ }
+
std::string id() const override { return id_; }
+ void set_stream_id(const std::string& stream_id) override {
+ stream_id_ = stream_id;
+ }
+ std::string stream_id() const override { return stream_id_; }
+
void Stop() override;
private:
- void Reconfigure();
+ bool can_send_track() const { return track_ && ssrc_; }
+ // Helper function to construct options for
+ // VideoProviderInterface::SetVideoSend.
+ void SetVideoSend();
std::string id_;
- rtc::scoped_refptr<VideoTrackInterface> track_;
- uint32_t ssrc_;
+ std::string stream_id_;
VideoProviderInterface* provider_;
- bool cached_track_enabled_;
+ rtc::scoped_refptr<VideoTrackInterface> track_;
+ uint32_t ssrc_ = 0;
+ bool cached_track_enabled_ = false;
+ bool stopped_ = false;
};
} // namespace webrtc
diff --git a/talk/app/webrtc/rtpsenderinterface.h b/talk/app/webrtc/rtpsenderinterface.h
index fca98f21db..f54e8ca090 100644
--- a/talk/app/webrtc/rtpsenderinterface.h
+++ b/talk/app/webrtc/rtpsenderinterface.h
@@ -35,6 +35,7 @@
#include "talk/app/webrtc/proxy.h"
#include "talk/app/webrtc/mediastreaminterface.h"
+#include "talk/session/media/mediasession.h"
#include "webrtc/base/refcount.h"
#include "webrtc/base/scoped_ref_ptr.h"
@@ -47,10 +48,24 @@ class RtpSenderInterface : public rtc::RefCountInterface {
virtual bool SetTrack(MediaStreamTrackInterface* track) = 0;
virtual rtc::scoped_refptr<MediaStreamTrackInterface> track() const = 0;
+ // Used to set the SSRC of the sender, once a local description has been set.
+ // If |ssrc| is 0, this indiates that the sender should disconnect from the
+ // underlying transport (this occurs if the sender isn't seen in a local
+ // description).
+ virtual void SetSsrc(uint32_t ssrc) = 0;
+ virtual uint32_t ssrc() const = 0;
+
+ // Audio or video sender?
+ virtual cricket::MediaType media_type() const = 0;
+
// Not to be confused with "mid", this is a field we can temporarily use
// to uniquely identify a receiver until we implement Unified Plan SDP.
virtual std::string id() const = 0;
+ // TODO(deadbeef): Support one sender having multiple stream ids.
+ virtual void set_stream_id(const std::string& stream_id) = 0;
+ virtual std::string stream_id() const = 0;
+
virtual void Stop() = 0;
protected:
@@ -61,7 +76,12 @@ class RtpSenderInterface : public rtc::RefCountInterface {
BEGIN_PROXY_MAP(RtpSender)
PROXY_METHOD1(bool, SetTrack, MediaStreamTrackInterface*)
PROXY_CONSTMETHOD0(rtc::scoped_refptr<MediaStreamTrackInterface>, track)
+PROXY_METHOD1(void, SetSsrc, uint32_t)
+PROXY_CONSTMETHOD0(uint32_t, ssrc)
+PROXY_CONSTMETHOD0(cricket::MediaType, media_type)
PROXY_CONSTMETHOD0(std::string, id)
+PROXY_METHOD1(void, set_stream_id, const std::string&)
+PROXY_CONSTMETHOD0(std::string, stream_id)
PROXY_METHOD0(void, Stop)
END_PROXY()
diff --git a/talk/app/webrtc/rtpsenderreceiver_unittest.cc b/talk/app/webrtc/rtpsenderreceiver_unittest.cc
index c9d7e008c3..a590e1d01f 100644
--- a/talk/app/webrtc/rtpsenderreceiver_unittest.cc
+++ b/talk/app/webrtc/rtpsenderreceiver_unittest.cc
@@ -26,6 +26,7 @@
*/
#include <string>
+#include <utility>
#include "talk/app/webrtc/audiotrack.h"
#include "talk/app/webrtc/mediastream.h"
@@ -48,14 +49,17 @@ static const char kStreamLabel1[] = "local_stream_1";
static const char kVideoTrackId[] = "video_1";
static const char kAudioTrackId[] = "audio_1";
static const uint32_t kVideoSsrc = 98;
+static const uint32_t kVideoSsrc2 = 100;
static const uint32_t kAudioSsrc = 99;
+static const uint32_t kAudioSsrc2 = 101;
namespace webrtc {
// Helper class to test RtpSender/RtpReceiver.
class MockAudioProvider : public AudioProviderInterface {
public:
- virtual ~MockAudioProvider() {}
+ ~MockAudioProvider() override {}
+
MOCK_METHOD2(SetAudioPlayout,
void(uint32_t ssrc,
bool enable));
@@ -65,6 +69,14 @@ class MockAudioProvider : public AudioProviderInterface {
const cricket::AudioOptions& options,
cricket::AudioRenderer* renderer));
MOCK_METHOD2(SetAudioPlayoutVolume, void(uint32_t ssrc, double volume));
+
+ void SetRawAudioSink(uint32_t,
+ rtc::scoped_ptr<AudioSinkInterface> sink) override {
+ sink_ = std::move(sink);
+ }
+
+ private:
+ rtc::scoped_ptr<AudioSinkInterface> sink_;
};
// Helper class to test RtpSender/RtpReceiver.
@@ -85,8 +97,8 @@ class MockVideoProvider : public VideoProviderInterface {
class FakeVideoSource : public Notifier<VideoSourceInterface> {
public:
- static rtc::scoped_refptr<FakeVideoSource> Create() {
- return new rtc::RefCountedObject<FakeVideoSource>();
+ static rtc::scoped_refptr<FakeVideoSource> Create(bool remote) {
+ return new rtc::RefCountedObject<FakeVideoSource>(remote);
}
virtual cricket::VideoCapturer* GetVideoCapturer() { return &fake_capturer_; }
virtual void Stop() {}
@@ -94,16 +106,18 @@ class FakeVideoSource : public Notifier<VideoSourceInterface> {
virtual void AddSink(cricket::VideoRenderer* output) {}
virtual void RemoveSink(cricket::VideoRenderer* output) {}
virtual SourceState state() const { return state_; }
+ virtual bool remote() const { return remote_; }
virtual const cricket::VideoOptions* options() const { return &options_; }
virtual cricket::VideoRenderer* FrameInput() { return NULL; }
protected:
- FakeVideoSource() : state_(kLive) {}
+ explicit FakeVideoSource(bool remote) : state_(kLive), remote_(remote) {}
~FakeVideoSource() {}
private:
cricket::FakeVideoCapturer fake_capturer_;
SourceState state_;
+ bool remote_;
cricket::VideoOptions options_;
};
@@ -111,7 +125,11 @@ class RtpSenderReceiverTest : public testing::Test {
public:
virtual void SetUp() {
stream_ = MediaStream::Create(kStreamLabel1);
- rtc::scoped_refptr<VideoSourceInterface> source(FakeVideoSource::Create());
+ }
+
+ void AddVideoTrack(bool remote) {
+ rtc::scoped_refptr<VideoSourceInterface> source(
+ FakeVideoSource::Create(remote));
video_track_ = VideoTrack::Create(kVideoTrackId, source);
EXPECT_TRUE(stream_->AddTrack(video_track_));
}
@@ -120,17 +138,21 @@ class RtpSenderReceiverTest : public testing::Test {
audio_track_ = AudioTrack::Create(kAudioTrackId, NULL);
EXPECT_TRUE(stream_->AddTrack(audio_track_));
EXPECT_CALL(audio_provider_, SetAudioSend(kAudioSsrc, true, _, _));
- audio_rtp_sender_ = new AudioRtpSender(stream_->GetAudioTracks()[0],
- kAudioSsrc, &audio_provider_);
+ audio_rtp_sender_ =
+ new AudioRtpSender(stream_->GetAudioTracks()[0], stream_->label(),
+ &audio_provider_, nullptr);
+ audio_rtp_sender_->SetSsrc(kAudioSsrc);
}
void CreateVideoRtpSender() {
+ AddVideoTrack(false);
EXPECT_CALL(video_provider_,
SetCaptureDevice(
kVideoSsrc, video_track_->GetSource()->GetVideoCapturer()));
EXPECT_CALL(video_provider_, SetVideoSend(kVideoSsrc, true, _));
video_rtp_sender_ = new VideoRtpSender(stream_->GetVideoTracks()[0],
- kVideoSsrc, &video_provider_);
+ stream_->label(), &video_provider_);
+ video_rtp_sender_->SetSsrc(kVideoSsrc);
}
void DestroyAudioRtpSender() {
@@ -146,8 +168,8 @@ class RtpSenderReceiverTest : public testing::Test {
}
void CreateAudioRtpReceiver() {
- audio_track_ =
- AudioTrack::Create(kAudioTrackId, RemoteAudioSource::Create().get());
+ audio_track_ = AudioTrack::Create(
+ kAudioTrackId, RemoteAudioSource::Create(kAudioSsrc, NULL));
EXPECT_TRUE(stream_->AddTrack(audio_track_));
EXPECT_CALL(audio_provider_, SetAudioPlayout(kAudioSsrc, true));
audio_rtp_receiver_ = new AudioRtpReceiver(stream_->GetAudioTracks()[0],
@@ -155,6 +177,7 @@ class RtpSenderReceiverTest : public testing::Test {
}
void CreateVideoRtpReceiver() {
+ AddVideoTrack(true);
EXPECT_CALL(video_provider_,
SetVideoPlayout(kVideoSsrc, true,
video_track_->GetSource()->FrameInput()));
@@ -280,4 +303,212 @@ TEST_F(RtpSenderReceiverTest, RemoteAudioTrackSetVolume) {
DestroyAudioRtpReceiver();
}
+// Test that provider methods aren't called without both a track and an SSRC.
+TEST_F(RtpSenderReceiverTest, AudioSenderWithoutTrackAndSsrc) {
+ rtc::scoped_refptr<AudioRtpSender> sender =
+ new AudioRtpSender(&audio_provider_, nullptr);
+ rtc::scoped_refptr<AudioTrackInterface> track =
+ AudioTrack::Create(kAudioTrackId, nullptr);
+ EXPECT_TRUE(sender->SetTrack(track));
+ EXPECT_TRUE(sender->SetTrack(nullptr));
+ sender->SetSsrc(kAudioSsrc);
+ sender->SetSsrc(0);
+ // Just let it get destroyed and make sure it doesn't call any methods on the
+ // provider interface.
+}
+
+// Test that provider methods aren't called without both a track and an SSRC.
+TEST_F(RtpSenderReceiverTest, VideoSenderWithoutTrackAndSsrc) {
+ rtc::scoped_refptr<VideoRtpSender> sender =
+ new VideoRtpSender(&video_provider_);
+ EXPECT_TRUE(sender->SetTrack(video_track_));
+ EXPECT_TRUE(sender->SetTrack(nullptr));
+ sender->SetSsrc(kVideoSsrc);
+ sender->SetSsrc(0);
+ // Just let it get destroyed and make sure it doesn't call any methods on the
+ // provider interface.
+}
+
+// Test that an audio sender calls the expected methods on the provider once
+// it has a track and SSRC, when the SSRC is set first.
+TEST_F(RtpSenderReceiverTest, AudioSenderEarlyWarmupSsrcThenTrack) {
+ rtc::scoped_refptr<AudioRtpSender> sender =
+ new AudioRtpSender(&audio_provider_, nullptr);
+ rtc::scoped_refptr<AudioTrackInterface> track =
+ AudioTrack::Create(kAudioTrackId, nullptr);
+ sender->SetSsrc(kAudioSsrc);
+ EXPECT_CALL(audio_provider_, SetAudioSend(kAudioSsrc, true, _, _));
+ sender->SetTrack(track);
+
+ // Calls expected from destructor.
+ EXPECT_CALL(audio_provider_, SetAudioSend(kAudioSsrc, false, _, _)).Times(1);
+}
+
+// Test that an audio sender calls the expected methods on the provider once
+// it has a track and SSRC, when the SSRC is set last.
+TEST_F(RtpSenderReceiverTest, AudioSenderEarlyWarmupTrackThenSsrc) {
+ rtc::scoped_refptr<AudioRtpSender> sender =
+ new AudioRtpSender(&audio_provider_, nullptr);
+ rtc::scoped_refptr<AudioTrackInterface> track =
+ AudioTrack::Create(kAudioTrackId, nullptr);
+ sender->SetTrack(track);
+ EXPECT_CALL(audio_provider_, SetAudioSend(kAudioSsrc, true, _, _));
+ sender->SetSsrc(kAudioSsrc);
+
+ // Calls expected from destructor.
+ EXPECT_CALL(audio_provider_, SetAudioSend(kAudioSsrc, false, _, _)).Times(1);
+}
+
+// Test that a video sender calls the expected methods on the provider once
+// it has a track and SSRC, when the SSRC is set first.
+TEST_F(RtpSenderReceiverTest, VideoSenderEarlyWarmupSsrcThenTrack) {
+ AddVideoTrack(false);
+ rtc::scoped_refptr<VideoRtpSender> sender =
+ new VideoRtpSender(&video_provider_);
+ sender->SetSsrc(kVideoSsrc);
+ EXPECT_CALL(video_provider_,
+ SetCaptureDevice(kVideoSsrc,
+ video_track_->GetSource()->GetVideoCapturer()));
+ EXPECT_CALL(video_provider_, SetVideoSend(kVideoSsrc, true, _));
+ sender->SetTrack(video_track_);
+
+ // Calls expected from destructor.
+ EXPECT_CALL(video_provider_, SetCaptureDevice(kVideoSsrc, nullptr)).Times(1);
+ EXPECT_CALL(video_provider_, SetVideoSend(kVideoSsrc, false, _)).Times(1);
+}
+
+// Test that a video sender calls the expected methods on the provider once
+// it has a track and SSRC, when the SSRC is set last.
+TEST_F(RtpSenderReceiverTest, VideoSenderEarlyWarmupTrackThenSsrc) {
+ AddVideoTrack(false);
+ rtc::scoped_refptr<VideoRtpSender> sender =
+ new VideoRtpSender(&video_provider_);
+ sender->SetTrack(video_track_);
+ EXPECT_CALL(video_provider_,
+ SetCaptureDevice(kVideoSsrc,
+ video_track_->GetSource()->GetVideoCapturer()));
+ EXPECT_CALL(video_provider_, SetVideoSend(kVideoSsrc, true, _));
+ sender->SetSsrc(kVideoSsrc);
+
+ // Calls expected from destructor.
+ EXPECT_CALL(video_provider_, SetCaptureDevice(kVideoSsrc, nullptr)).Times(1);
+ EXPECT_CALL(video_provider_, SetVideoSend(kVideoSsrc, false, _)).Times(1);
+}
+
+// Test that the sender is disconnected from the provider when its SSRC is
+// set to 0.
+TEST_F(RtpSenderReceiverTest, AudioSenderSsrcSetToZero) {
+ rtc::scoped_refptr<AudioTrackInterface> track =
+ AudioTrack::Create(kAudioTrackId, nullptr);
+ EXPECT_CALL(audio_provider_, SetAudioSend(kAudioSsrc, true, _, _));
+ rtc::scoped_refptr<AudioRtpSender> sender =
+ new AudioRtpSender(track, kStreamLabel1, &audio_provider_, nullptr);
+ sender->SetSsrc(kAudioSsrc);
+
+ EXPECT_CALL(audio_provider_, SetAudioSend(kAudioSsrc, false, _, _)).Times(1);
+ sender->SetSsrc(0);
+
+ // Make sure it's SetSsrc that called methods on the provider, and not the
+ // destructor.
+ EXPECT_CALL(audio_provider_, SetAudioSend(_, _, _, _)).Times(0);
+}
+
+// Test that the sender is disconnected from the provider when its SSRC is
+// set to 0.
+TEST_F(RtpSenderReceiverTest, VideoSenderSsrcSetToZero) {
+ AddVideoTrack(false);
+ EXPECT_CALL(video_provider_,
+ SetCaptureDevice(kVideoSsrc,
+ video_track_->GetSource()->GetVideoCapturer()));
+ EXPECT_CALL(video_provider_, SetVideoSend(kVideoSsrc, true, _));
+ rtc::scoped_refptr<VideoRtpSender> sender =
+ new VideoRtpSender(video_track_, kStreamLabel1, &video_provider_);
+ sender->SetSsrc(kVideoSsrc);
+
+ EXPECT_CALL(video_provider_, SetCaptureDevice(kVideoSsrc, nullptr)).Times(1);
+ EXPECT_CALL(video_provider_, SetVideoSend(kVideoSsrc, false, _)).Times(1);
+ sender->SetSsrc(0);
+
+ // Make sure it's SetSsrc that called methods on the provider, and not the
+ // destructor.
+ EXPECT_CALL(video_provider_, SetCaptureDevice(_, _)).Times(0);
+ EXPECT_CALL(video_provider_, SetVideoSend(_, _, _)).Times(0);
+}
+
+TEST_F(RtpSenderReceiverTest, AudioSenderTrackSetToNull) {
+ rtc::scoped_refptr<AudioTrackInterface> track =
+ AudioTrack::Create(kAudioTrackId, nullptr);
+ EXPECT_CALL(audio_provider_, SetAudioSend(kAudioSsrc, true, _, _));
+ rtc::scoped_refptr<AudioRtpSender> sender =
+ new AudioRtpSender(track, kStreamLabel1, &audio_provider_, nullptr);
+ sender->SetSsrc(kAudioSsrc);
+
+ EXPECT_CALL(audio_provider_, SetAudioSend(kAudioSsrc, false, _, _)).Times(1);
+ EXPECT_TRUE(sender->SetTrack(nullptr));
+
+ // Make sure it's SetTrack that called methods on the provider, and not the
+ // destructor.
+ EXPECT_CALL(audio_provider_, SetAudioSend(_, _, _, _)).Times(0);
+}
+
+TEST_F(RtpSenderReceiverTest, VideoSenderTrackSetToNull) {
+ AddVideoTrack(false);
+ EXPECT_CALL(video_provider_,
+ SetCaptureDevice(kVideoSsrc,
+ video_track_->GetSource()->GetVideoCapturer()));
+ EXPECT_CALL(video_provider_, SetVideoSend(kVideoSsrc, true, _));
+ rtc::scoped_refptr<VideoRtpSender> sender =
+ new VideoRtpSender(video_track_, kStreamLabel1, &video_provider_);
+ sender->SetSsrc(kVideoSsrc);
+
+ EXPECT_CALL(video_provider_, SetCaptureDevice(kVideoSsrc, nullptr)).Times(1);
+ EXPECT_CALL(video_provider_, SetVideoSend(kVideoSsrc, false, _)).Times(1);
+ EXPECT_TRUE(sender->SetTrack(nullptr));
+
+ // Make sure it's SetTrack that called methods on the provider, and not the
+ // destructor.
+ EXPECT_CALL(video_provider_, SetCaptureDevice(_, _)).Times(0);
+ EXPECT_CALL(video_provider_, SetVideoSend(_, _, _)).Times(0);
+}
+
+TEST_F(RtpSenderReceiverTest, AudioSenderSsrcChanged) {
+ AddVideoTrack(false);
+ rtc::scoped_refptr<AudioTrackInterface> track =
+ AudioTrack::Create(kAudioTrackId, nullptr);
+ EXPECT_CALL(audio_provider_, SetAudioSend(kAudioSsrc, true, _, _));
+ rtc::scoped_refptr<AudioRtpSender> sender =
+ new AudioRtpSender(track, kStreamLabel1, &audio_provider_, nullptr);
+ sender->SetSsrc(kAudioSsrc);
+
+ EXPECT_CALL(audio_provider_, SetAudioSend(kAudioSsrc, false, _, _)).Times(1);
+ EXPECT_CALL(audio_provider_, SetAudioSend(kAudioSsrc2, true, _, _)).Times(1);
+ sender->SetSsrc(kAudioSsrc2);
+
+ // Calls expected from destructor.
+ EXPECT_CALL(audio_provider_, SetAudioSend(kAudioSsrc2, false, _, _)).Times(1);
+}
+
+TEST_F(RtpSenderReceiverTest, VideoSenderSsrcChanged) {
+ AddVideoTrack(false);
+ EXPECT_CALL(video_provider_,
+ SetCaptureDevice(kVideoSsrc,
+ video_track_->GetSource()->GetVideoCapturer()));
+ EXPECT_CALL(video_provider_, SetVideoSend(kVideoSsrc, true, _));
+ rtc::scoped_refptr<VideoRtpSender> sender =
+ new VideoRtpSender(video_track_, kStreamLabel1, &video_provider_);
+ sender->SetSsrc(kVideoSsrc);
+
+ EXPECT_CALL(video_provider_, SetCaptureDevice(kVideoSsrc, nullptr)).Times(1);
+ EXPECT_CALL(video_provider_, SetVideoSend(kVideoSsrc, false, _)).Times(1);
+ EXPECT_CALL(video_provider_,
+ SetCaptureDevice(kVideoSsrc2,
+ video_track_->GetSource()->GetVideoCapturer()));
+ EXPECT_CALL(video_provider_, SetVideoSend(kVideoSsrc2, true, _));
+ sender->SetSsrc(kVideoSsrc2);
+
+ // Calls expected from destructor.
+ EXPECT_CALL(video_provider_, SetCaptureDevice(kVideoSsrc2, nullptr)).Times(1);
+ EXPECT_CALL(video_provider_, SetVideoSend(kVideoSsrc2, false, _)).Times(1);
+}
+
} // namespace webrtc
diff --git a/talk/app/webrtc/statscollector.cc b/talk/app/webrtc/statscollector.cc
index 347a84640c..b514b42fee 100644
--- a/talk/app/webrtc/statscollector.cc
+++ b/talk/app/webrtc/statscollector.cc
@@ -115,17 +115,17 @@ void ExtractCommonReceiveProperties(const cricket::MediaReceiverInfo& info,
report->AddString(StatsReport::kStatsValueNameCodecName, info.codec_name);
}
-void SetAudioProcessingStats(StatsReport* report, int signal_level,
- bool typing_noise_detected, int echo_return_loss,
- int echo_return_loss_enhancement, int echo_delay_median_ms,
- float aec_quality_min, int echo_delay_std_ms) {
+void SetAudioProcessingStats(StatsReport* report,
+ bool typing_noise_detected,
+ int echo_return_loss,
+ int echo_return_loss_enhancement,
+ int echo_delay_median_ms,
+ float aec_quality_min,
+ int echo_delay_std_ms) {
report->AddBoolean(StatsReport::kStatsValueNameTypingNoiseState,
typing_noise_detected);
report->AddFloat(StatsReport::kStatsValueNameEchoCancellationQualityMin,
aec_quality_min);
- // Don't overwrite the previous signal level if it's not available now.
- if (signal_level >= 0)
- report->AddInt(StatsReport::kStatsValueNameAudioInputLevel, signal_level);
const IntForAdd ints[] = {
{ StatsReport::kStatsValueNameEchoReturnLoss, echo_return_loss },
{ StatsReport::kStatsValueNameEchoReturnLossEnhancement,
@@ -182,11 +182,14 @@ void ExtractStats(const cricket::VoiceReceiverInfo& info, StatsReport* report) {
void ExtractStats(const cricket::VoiceSenderInfo& info, StatsReport* report) {
ExtractCommonSendProperties(info, report);
- SetAudioProcessingStats(report, info.audio_level, info.typing_noise_detected,
- info.echo_return_loss, info.echo_return_loss_enhancement,
- info.echo_delay_median_ms, info.aec_quality_min, info.echo_delay_std_ms);
+ SetAudioProcessingStats(
+ report, info.typing_noise_detected, info.echo_return_loss,
+ info.echo_return_loss_enhancement, info.echo_delay_median_ms,
+ info.aec_quality_min, info.echo_delay_std_ms);
+ RTC_DCHECK_GE(info.audio_level, 0);
const IntForAdd ints[] = {
+ { StatsReport::kStatsValueNameAudioInputLevel, info.audio_level},
{ StatsReport::kStatsValueNameJitterReceived, info.jitter_ms },
{ StatsReport::kStatsValueNamePacketsLost, info.packets_lost },
{ StatsReport::kStatsValueNamePacketsSent, info.packets_sent },
@@ -198,6 +201,8 @@ void ExtractStats(const cricket::VoiceSenderInfo& info, StatsReport* report) {
void ExtractStats(const cricket::VideoReceiverInfo& info, StatsReport* report) {
ExtractCommonReceiveProperties(info, report);
+ report->AddString(StatsReport::kStatsValueNameCodecImplementationName,
+ info.decoder_implementation_name);
report->AddInt64(StatsReport::kStatsValueNameBytesReceived,
info.bytes_rcvd);
report->AddInt64(StatsReport::kStatsValueNameCaptureStartNtpTimeMs,
@@ -230,6 +235,8 @@ void ExtractStats(const cricket::VideoReceiverInfo& info, StatsReport* report) {
void ExtractStats(const cricket::VideoSenderInfo& info, StatsReport* report) {
ExtractCommonSendProperties(info, report);
+ report->AddString(StatsReport::kStatsValueNameCodecImplementationName,
+ info.encoder_implementation_name);
report->AddBoolean(StatsReport::kStatsValueNameBandwidthLimitedResolution,
(info.adapt_reason & 0x2) > 0);
report->AddBoolean(StatsReport::kStatsValueNameCpuLimitedResolution,
@@ -730,17 +737,20 @@ void StatsCollector::ExtractSessionInfo() {
channel_report->AddId(StatsReport::kStatsValueNameRemoteCertificateId,
remote_cert_report_id);
}
- const std::string& srtp_cipher = channel_iter.srtp_cipher;
- if (!srtp_cipher.empty()) {
- channel_report->AddString(StatsReport::kStatsValueNameSrtpCipher,
- srtp_cipher);
+ int srtp_crypto_suite = channel_iter.srtp_crypto_suite;
+ if (srtp_crypto_suite != rtc::SRTP_INVALID_CRYPTO_SUITE &&
+ rtc::SrtpCryptoSuiteToName(srtp_crypto_suite).length()) {
+ channel_report->AddString(
+ StatsReport::kStatsValueNameSrtpCipher,
+ rtc::SrtpCryptoSuiteToName(srtp_crypto_suite));
}
- int ssl_cipher = channel_iter.ssl_cipher;
- if (ssl_cipher &&
- rtc::SSLStreamAdapter::GetSslCipherSuiteName(ssl_cipher).length()) {
+ int ssl_cipher_suite = channel_iter.ssl_cipher_suite;
+ if (ssl_cipher_suite != rtc::TLS_NULL_WITH_NULL_NULL &&
+ rtc::SSLStreamAdapter::SslCipherSuiteToName(ssl_cipher_suite)
+ .length()) {
channel_report->AddString(
StatsReport::kStatsValueNameDtlsCipher,
- rtc::SSLStreamAdapter::GetSslCipherSuiteName(ssl_cipher));
+ rtc::SSLStreamAdapter::SslCipherSuiteToName(ssl_cipher_suite));
}
int connection_id = 0;
@@ -888,21 +898,24 @@ void StatsCollector::UpdateReportFromAudioTrack(AudioTrackInterface* track,
RTC_DCHECK(pc_->session()->signaling_thread()->IsCurrent());
RTC_DCHECK(track != NULL);
- int signal_level = 0;
- if (!track->GetSignalLevel(&signal_level))
- signal_level = -1;
+ // Don't overwrite report values if they're not available.
+ int signal_level;
+ if (track->GetSignalLevel(&signal_level)) {
+ RTC_DCHECK_GE(signal_level, 0);
+ report->AddInt(StatsReport::kStatsValueNameAudioInputLevel, signal_level);
+ }
- rtc::scoped_refptr<AudioProcessorInterface> audio_processor(
- track->GetAudioProcessor());
+ auto audio_processor(track->GetAudioProcessor());
- AudioProcessorInterface::AudioProcessorStats stats;
- if (audio_processor.get())
+ if (audio_processor.get()) {
+ AudioProcessorInterface::AudioProcessorStats stats;
audio_processor->GetStats(&stats);
- SetAudioProcessingStats(report, signal_level, stats.typing_noise_detected,
- stats.echo_return_loss, stats.echo_return_loss_enhancement,
- stats.echo_delay_median_ms, stats.aec_quality_min,
- stats.echo_delay_std_ms);
+ SetAudioProcessingStats(
+ report, stats.typing_noise_detected, stats.echo_return_loss,
+ stats.echo_return_loss_enhancement, stats.echo_delay_median_ms,
+ stats.aec_quality_min, stats.echo_delay_std_ms);
+ }
}
bool StatsCollector::GetTrackIdBySsrc(uint32_t ssrc,
diff --git a/talk/app/webrtc/statscollector.h b/talk/app/webrtc/statscollector.h
index 18a345d71d..56db79de20 100644
--- a/talk/app/webrtc/statscollector.h
+++ b/talk/app/webrtc/statscollector.h
@@ -36,7 +36,6 @@
#include <vector>
#include "talk/app/webrtc/mediastreaminterface.h"
-#include "talk/app/webrtc/mediastreamsignaling.h"
#include "talk/app/webrtc/peerconnectioninterface.h"
#include "talk/app/webrtc/statstypes.h"
#include "talk/app/webrtc/webrtcsession.h"
diff --git a/talk/app/webrtc/statscollector_unittest.cc b/talk/app/webrtc/statscollector_unittest.cc
index 9121c691b1..e7ee91190e 100644
--- a/talk/app/webrtc/statscollector_unittest.cc
+++ b/talk/app/webrtc/statscollector_unittest.cc
@@ -35,7 +35,6 @@
#include "talk/app/webrtc/peerconnectionfactory.h"
#include "talk/app/webrtc/mediastream.h"
#include "talk/app/webrtc/mediastreaminterface.h"
-#include "talk/app/webrtc/mediastreamsignaling.h"
#include "talk/app/webrtc/mediastreamtrack.h"
#include "talk/app/webrtc/test/fakedatachannelprovider.h"
#include "talk/app/webrtc/videotrack.h"
@@ -683,8 +682,8 @@ class StatsCollectorTest : public testing::Test {
// Fake stats to process.
cricket::TransportChannelStats channel_stats;
channel_stats.component = 1;
- channel_stats.srtp_cipher = "the-srtp-cipher";
- channel_stats.ssl_cipher = TLS_ECDHE_RSA_WITH_AES_256_CBC_SHA;
+ channel_stats.srtp_crypto_suite = rtc::SRTP_AES128_CM_SHA1_80;
+ channel_stats.ssl_cipher_suite = TLS_ECDHE_RSA_WITH_AES_256_CBC_SHA;
cricket::TransportStats transport_stats;
transport_stats.transport_name = "audio";
@@ -697,8 +696,7 @@ class StatsCollectorTest : public testing::Test {
// Fake certificate to report
rtc::scoped_refptr<rtc::RTCCertificate> local_certificate(
rtc::RTCCertificate::Create(rtc::scoped_ptr<rtc::FakeSSLIdentity>(
- new rtc::FakeSSLIdentity(local_cert))
- .Pass()));
+ new rtc::FakeSSLIdentity(local_cert))));
// Configure MockWebRtcSession
EXPECT_CALL(session_,
@@ -747,18 +745,17 @@ class StatsCollectorTest : public testing::Test {
}
// Check negotiated ciphers.
- std::string dtls_cipher = ExtractStatsValue(
- StatsReport::kStatsReportTypeComponent,
- reports,
- StatsReport::kStatsValueNameDtlsCipher);
- EXPECT_EQ(rtc::SSLStreamAdapter::GetSslCipherSuiteName(
+ std::string dtls_cipher_suite =
+ ExtractStatsValue(StatsReport::kStatsReportTypeComponent, reports,
+ StatsReport::kStatsValueNameDtlsCipher);
+ EXPECT_EQ(rtc::SSLStreamAdapter::SslCipherSuiteToName(
TLS_ECDHE_RSA_WITH_AES_256_CBC_SHA),
- dtls_cipher);
- std::string srtp_cipher = ExtractStatsValue(
- StatsReport::kStatsReportTypeComponent,
- reports,
- StatsReport::kStatsValueNameSrtpCipher);
- EXPECT_EQ("the-srtp-cipher", srtp_cipher);
+ dtls_cipher_suite);
+ std::string srtp_crypto_suite =
+ ExtractStatsValue(StatsReport::kStatsReportTypeComponent, reports,
+ StatsReport::kStatsValueNameSrtpCipher);
+ EXPECT_EQ(rtc::SrtpCryptoSuiteToName(rtc::SRTP_AES128_CM_SHA1_80),
+ srtp_crypto_suite);
}
cricket::FakeMediaEngine* media_engine_;
@@ -1407,16 +1404,14 @@ TEST_F(StatsCollectorTest, NoTransport) {
ASSERT_EQ(kNotFound, remote_certificate_id);
// Check that the negotiated ciphers are absent.
- std::string dtls_cipher = ExtractStatsValue(
- StatsReport::kStatsReportTypeComponent,
- reports,
- StatsReport::kStatsValueNameDtlsCipher);
- ASSERT_EQ(kNotFound, dtls_cipher);
- std::string srtp_cipher = ExtractStatsValue(
- StatsReport::kStatsReportTypeComponent,
- reports,
- StatsReport::kStatsValueNameSrtpCipher);
- ASSERT_EQ(kNotFound, srtp_cipher);
+ std::string dtls_cipher_suite =
+ ExtractStatsValue(StatsReport::kStatsReportTypeComponent, reports,
+ StatsReport::kStatsValueNameDtlsCipher);
+ ASSERT_EQ(kNotFound, dtls_cipher_suite);
+ std::string srtp_crypto_suite =
+ ExtractStatsValue(StatsReport::kStatsReportTypeComponent, reports,
+ StatsReport::kStatsValueNameSrtpCipher);
+ ASSERT_EQ(kNotFound, srtp_crypto_suite);
}
// This test verifies that the stats are generated correctly when the transport
diff --git a/talk/app/webrtc/statstypes.cc b/talk/app/webrtc/statstypes.cc
index e45833c668..19cb1f5d78 100644
--- a/talk/app/webrtc/statstypes.cc
+++ b/talk/app/webrtc/statstypes.cc
@@ -408,6 +408,8 @@ const char* StatsReport::Value::display_name() const {
return "state";
case kStatsValueNameDataChannelId:
return "datachannelid";
+ case kStatsValueNameCodecImplementationName:
+ return "codecImplementationName";
// 'goog' prefixed constants.
case kStatsValueNameAccelerateRate:
@@ -592,9 +594,6 @@ const char* StatsReport::Value::display_name() const {
return "googViewLimitedResolution";
case kStatsValueNameWritable:
return "googWritable";
- default:
- RTC_DCHECK(false);
- break;
}
return nullptr;
diff --git a/talk/app/webrtc/statstypes.h b/talk/app/webrtc/statstypes.h
index 7fa9f3212d..60439b9bc8 100644
--- a/talk/app/webrtc/statstypes.h
+++ b/talk/app/webrtc/statstypes.h
@@ -120,6 +120,7 @@ class StatsReport {
kStatsValueNameAudioOutputLevel,
kStatsValueNameBytesReceived,
kStatsValueNameBytesSent,
+ kStatsValueNameCodecImplementationName,
kStatsValueNameDataChannelId,
kStatsValueNamePacketsLost,
kStatsValueNamePacketsReceived,
diff --git a/talk/app/webrtc/test/DEPS b/talk/app/webrtc/test/DEPS
new file mode 100644
index 0000000000..a814b152f2
--- /dev/null
+++ b/talk/app/webrtc/test/DEPS
@@ -0,0 +1,5 @@
+include_rules = [
+ # Allow include of Chrome base/android to allow inclusion of headers needed
+ # for accessing the JVM and Application context in gtest.
+ "+base/android",
+]
diff --git a/talk/app/webrtc/test/androidtestinitializer.cc b/talk/app/webrtc/test/androidtestinitializer.cc
new file mode 100644
index 0000000000..883c2d8178
--- /dev/null
+++ b/talk/app/webrtc/test/androidtestinitializer.cc
@@ -0,0 +1,74 @@
+/*
+ * libjingle
+ * Copyright 2015 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#include "talk/app/webrtc/test/androidtestinitializer.h"
+
+#include <pthread.h>
+
+// Note: this dependency is dangerous since it reaches into Chromium's base.
+// There's a risk of e.g. macro clashes. This file may only be used in tests.
+// Since we use Chromes build system for creating the gtest binary, this should
+// be fine.
+#include "base/android/context_utils.h"
+#include "base/android/jni_android.h"
+
+#include "talk/app/webrtc/java/jni/classreferenceholder.h"
+#include "talk/app/webrtc/java/jni/jni_helpers.h"
+#include "webrtc/base/checks.h"
+#include "webrtc/base/ssladapter.h"
+#include "webrtc/voice_engine/include/voe_base.h"
+
+namespace webrtc {
+
+namespace {
+
+static pthread_once_t g_initialize_once = PTHREAD_ONCE_INIT;
+
+// There can only be one JNI_OnLoad in each binary. So since this is a GTEST
+// C++ runner binary, we want to initialize the same global objects we normally
+// do if this had been a Java binary.
+void EnsureInitializedOnce() {
+ RTC_CHECK(::base::android::IsVMInitialized());
+ JNIEnv* jni = ::base::android::AttachCurrentThread();
+ JavaVM* jvm = NULL;
+ RTC_CHECK_EQ(0, jni->GetJavaVM(&jvm));
+ jobject context = ::base::android::GetApplicationContext();
+
+ RTC_CHECK_GE(webrtc_jni::InitGlobalJniVariables(jvm), 0);
+ RTC_CHECK(rtc::InitializeSSL()) << "Failed to InitializeSSL()";
+ webrtc_jni::LoadGlobalClassReferenceHolder();
+
+ webrtc::VoiceEngine::SetAndroidObjects(jvm, context);
+}
+
+} // anonymous namespace
+
+void InitializeAndroidObjects() {
+ RTC_CHECK_EQ(0, pthread_once(&g_initialize_once, &EnsureInitializedOnce));
+}
+
+} // namespace webrtc
diff --git a/talk/app/webrtc/test/androidtestinitializer.h b/talk/app/webrtc/test/androidtestinitializer.h
new file mode 100644
index 0000000000..e6992825dd
--- /dev/null
+++ b/talk/app/webrtc/test/androidtestinitializer.h
@@ -0,0 +1,37 @@
+/*
+ * libjingle
+ * Copyright 2015 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#ifndef TALK_APP_WEBRTC_TEST_ANDROIDTESTINITIALIZER_H_
+#define TALK_APP_WEBRTC_TEST_ANDROIDTESTINITIALIZER_H_
+
+namespace webrtc {
+
+void InitializeAndroidObjects();
+
+} // namespace webrtc
+
+#endif // TALK_APP_WEBRTC_TEST_ANDROIDTESTINITIALIZER_H_
diff --git a/talk/app/webrtc/test/fakeaudiocapturemodule_unittest.cc b/talk/app/webrtc/test/fakeaudiocapturemodule_unittest.cc
index e2dc12375b..6b675a9395 100644
--- a/talk/app/webrtc/test/fakeaudiocapturemodule_unittest.cc
+++ b/talk/app/webrtc/test/fakeaudiocapturemodule_unittest.cc
@@ -58,7 +58,7 @@ class FakeAdmTest : public testing::Test,
int32_t RecordedDataIsAvailable(const void* audioSamples,
const size_t nSamples,
const size_t nBytesPerSample,
- const uint8_t nChannels,
+ const size_t nChannels,
const uint32_t samplesPerSec,
const uint32_t totalDelayMS,
const int32_t clockDrift,
@@ -82,7 +82,7 @@ class FakeAdmTest : public testing::Test,
// ADM is pulling data.
int32_t NeedMorePlayData(const size_t nSamples,
const size_t nBytesPerSample,
- const uint8_t nChannels,
+ const size_t nChannels,
const uint32_t samplesPerSec,
void* audioSamples,
size_t& nSamplesOut,
diff --git a/talk/app/webrtc/test/fakedtlsidentitystore.h b/talk/app/webrtc/test/fakedtlsidentitystore.h
index 0f9bdb9e6c..98074c742a 100644
--- a/talk/app/webrtc/test/fakedtlsidentitystore.h
+++ b/talk/app/webrtc/test/fakedtlsidentitystore.h
@@ -29,41 +29,73 @@
#define TALK_APP_WEBRTC_TEST_FAKEDTLSIDENTITYSERVICE_H_
#include <string>
+#include <utility>
#include "talk/app/webrtc/dtlsidentitystore.h"
#include "talk/app/webrtc/peerconnectioninterface.h"
#include "webrtc/base/rtccertificate.h"
-static const char kRSA_PRIVATE_KEY_PEM[] =
- "-----BEGIN RSA PRIVATE KEY-----\n"
- "MIICdwIBADANBgkqhkiG9w0BAQEFAASCAmEwggJdAgEAAoGBAMYRkbhmI7kVA/rM\n"
- "czsZ+6JDhDvnkF+vn6yCAGuRPV03zuRqZtDy4N4to7PZu9PjqrRl7nDMXrG3YG9y\n"
- "rlIAZ72KjcKKFAJxQyAKLCIdawKRyp8RdK3LEySWEZb0AV58IadqPZDTNHHRX8dz\n"
- "5aTSMsbbkZ+C/OzTnbiMqLL/vg6jAgMBAAECgYAvgOs4FJcgvp+TuREx7YtiYVsH\n"
- "mwQPTum2z/8VzWGwR8BBHBvIpVe1MbD/Y4seyI2aco/7UaisatSgJhsU46/9Y4fq\n"
- "2TwXH9QANf4at4d9n/R6rzwpAJOpgwZgKvdQjkfrKTtgLV+/dawvpxUYkRH4JZM1\n"
- "CVGukMfKNrSVH4Ap4QJBAOJmGV1ASPnB4r4nc99at7JuIJmd7fmuVUwUgYi4XgaR\n"
- "WhScBsgYwZ/JoywdyZJgnbcrTDuVcWG56B3vXbhdpMsCQQDf9zeJrjnPZ3Cqm79y\n"
- "kdqANep0uwZciiNiWxsQrCHztywOvbFhdp8iYVFG9EK8DMY41Y5TxUwsHD+67zao\n"
- "ZNqJAkEA1suLUP/GvL8IwuRneQd2tWDqqRQ/Td3qq03hP7e77XtF/buya3Ghclo5\n"
- "54czUR89QyVfJEC6278nzA7n2h1uVQJAcG6mztNL6ja/dKZjYZye2CY44QjSlLo0\n"
- "MTgTSjdfg/28fFn2Jjtqf9Pi/X+50LWI/RcYMC2no606wRk9kyOuIQJBAK6VSAim\n"
- "1pOEjsYQn0X5KEIrz1G3bfCbB848Ime3U2/FWlCHMr6ch8kCZ5d1WUeJD3LbwMNG\n"
- "UCXiYxSsu20QNVw=\n"
- "-----END RSA PRIVATE KEY-----\n";
-
-static const char kCERT_PEM[] =
- "-----BEGIN CERTIFICATE-----\n"
- "MIIBmTCCAQKgAwIBAgIEbzBSAjANBgkqhkiG9w0BAQsFADARMQ8wDQYDVQQDEwZX\n"
- "ZWJSVEMwHhcNMTQwMTAyMTgyNDQ3WhcNMTQwMjAxMTgyNDQ3WjARMQ8wDQYDVQQD\n"
- "EwZXZWJSVEMwgZ8wDQYJKoZIhvcNAQEBBQADgY0AMIGJAoGBAMYRkbhmI7kVA/rM\n"
- "czsZ+6JDhDvnkF+vn6yCAGuRPV03zuRqZtDy4N4to7PZu9PjqrRl7nDMXrG3YG9y\n"
- "rlIAZ72KjcKKFAJxQyAKLCIdawKRyp8RdK3LEySWEZb0AV58IadqPZDTNHHRX8dz\n"
- "5aTSMsbbkZ+C/OzTnbiMqLL/vg6jAgMBAAEwDQYJKoZIhvcNAQELBQADgYEAUflI\n"
- "VUe5Krqf5RVa5C3u/UTAOAUJBiDS3VANTCLBxjuMsvqOG0WvaYWP3HYPgrz0jXK2\n"
- "LJE/mGw3MyFHEqi81jh95J+ypl6xKW6Rm8jKLR87gUvCaVYn/Z4/P3AqcQTB7wOv\n"
- "UD0A8qfhfDM+LK6rPAnCsVN0NRDY3jvd6rzix9M=\n"
- "-----END CERTIFICATE-----\n";
+static const struct {
+ const char* rsa_private_key_pem;
+ const char* cert_pem;
+} kKeysAndCerts[] = {
+ {"-----BEGIN RSA PRIVATE KEY-----\n"
+ "MIICdwIBADANBgkqhkiG9w0BAQEFAASCAmEwggJdAgEAAoGBAMYRkbhmI7kVA/rM\n"
+ "czsZ+6JDhDvnkF+vn6yCAGuRPV03zuRqZtDy4N4to7PZu9PjqrRl7nDMXrG3YG9y\n"
+ "rlIAZ72KjcKKFAJxQyAKLCIdawKRyp8RdK3LEySWEZb0AV58IadqPZDTNHHRX8dz\n"
+ "5aTSMsbbkZ+C/OzTnbiMqLL/vg6jAgMBAAECgYAvgOs4FJcgvp+TuREx7YtiYVsH\n"
+ "mwQPTum2z/8VzWGwR8BBHBvIpVe1MbD/Y4seyI2aco/7UaisatSgJhsU46/9Y4fq\n"
+ "2TwXH9QANf4at4d9n/R6rzwpAJOpgwZgKvdQjkfrKTtgLV+/dawvpxUYkRH4JZM1\n"
+ "CVGukMfKNrSVH4Ap4QJBAOJmGV1ASPnB4r4nc99at7JuIJmd7fmuVUwUgYi4XgaR\n"
+ "WhScBsgYwZ/JoywdyZJgnbcrTDuVcWG56B3vXbhdpMsCQQDf9zeJrjnPZ3Cqm79y\n"
+ "kdqANep0uwZciiNiWxsQrCHztywOvbFhdp8iYVFG9EK8DMY41Y5TxUwsHD+67zao\n"
+ "ZNqJAkEA1suLUP/GvL8IwuRneQd2tWDqqRQ/Td3qq03hP7e77XtF/buya3Ghclo5\n"
+ "54czUR89QyVfJEC6278nzA7n2h1uVQJAcG6mztNL6ja/dKZjYZye2CY44QjSlLo0\n"
+ "MTgTSjdfg/28fFn2Jjtqf9Pi/X+50LWI/RcYMC2no606wRk9kyOuIQJBAK6VSAim\n"
+ "1pOEjsYQn0X5KEIrz1G3bfCbB848Ime3U2/FWlCHMr6ch8kCZ5d1WUeJD3LbwMNG\n"
+ "UCXiYxSsu20QNVw=\n"
+ "-----END RSA PRIVATE KEY-----\n",
+ "-----BEGIN CERTIFICATE-----\n"
+ "MIIBmTCCAQKgAwIBAgIEbzBSAjANBgkqhkiG9w0BAQsFADARMQ8wDQYDVQQDEwZX\n"
+ "ZWJSVEMwHhcNMTQwMTAyMTgyNDQ3WhcNMTQwMjAxMTgyNDQ3WjARMQ8wDQYDVQQD\n"
+ "EwZXZWJSVEMwgZ8wDQYJKoZIhvcNAQEBBQADgY0AMIGJAoGBAMYRkbhmI7kVA/rM\n"
+ "czsZ+6JDhDvnkF+vn6yCAGuRPV03zuRqZtDy4N4to7PZu9PjqrRl7nDMXrG3YG9y\n"
+ "rlIAZ72KjcKKFAJxQyAKLCIdawKRyp8RdK3LEySWEZb0AV58IadqPZDTNHHRX8dz\n"
+ "5aTSMsbbkZ+C/OzTnbiMqLL/vg6jAgMBAAEwDQYJKoZIhvcNAQELBQADgYEAUflI\n"
+ "VUe5Krqf5RVa5C3u/UTAOAUJBiDS3VANTCLBxjuMsvqOG0WvaYWP3HYPgrz0jXK2\n"
+ "LJE/mGw3MyFHEqi81jh95J+ypl6xKW6Rm8jKLR87gUvCaVYn/Z4/P3AqcQTB7wOv\n"
+ "UD0A8qfhfDM+LK6rPAnCsVN0NRDY3jvd6rzix9M=\n"
+ "-----END CERTIFICATE-----\n"},
+ {"-----BEGIN RSA PRIVATE KEY-----\n"
+ "MIICXQIBAAKBgQDeYqlyJ1wuiMsi905e3X81/WA/G3ym50PIDZBVtSwZi7JVQPgj\n"
+ "Bl8CPZMvDh9EwB4Ji9ytA8dZZbQ4WbJWPr73zPpJSCvQqz6sOXSlenBRi72acNaQ\n"
+ "sOR/qPvviJx5I6Hqo4qemfnjZhAW85a5BpgrAwKgMLIQTHCTLWwVSyrDrwIDAQAB\n"
+ "AoGARni9eY8/hv+SX+I+05EdXt6MQXNUbQ+cSykBNCfVccLzIFEWUQMT2IHqwl6X\n"
+ "ShIXcq7/n1QzOAEiuzixauM3YHg4xZ1Um2Ha9a7ig5Xg4v6b43bmMkNE6LkoAtYs\n"
+ "qnQdfMh442b1liDud6IMb1Qk0amt3fSrgRMc547TZQVx4QECQQDxUeDm94r3p4ng\n"
+ "5rCLLC1K5/6HSTZsh7jatKPlz7GfP/IZlYV7iE5784/n0wRiCjZOS7hQRy/8m2Gp\n"
+ "pf4aZq+DAkEA6+np4d36FYikydvUrupLT3FkdRHGn/v83qOll/VmeNh+L1xMZlIP\n"
+ "tM26hAXCcQb7O5+J9y3cx2CAQsBS11ZXZQJAfGgTo76WG9p5UEJdXUInD2jOZPwv\n"
+ "XIATolxh6kXKcijLLLlSmT7KB0inNYIpzkkpee+7U1d/u6B3FriGaSHq9QJBAM/J\n"
+ "ICnDdLCgwNvWVraVQC3BpwSB2pswvCFwq7py94V60XFvbw80Ogc6qIv98qvQxVlX\n"
+ "hJIEgA/PjEi+0ng94Q0CQQDm8XSDby35gmjO+6eRmJtAjtB7nguLvrPXM6CPXRmD\n"
+ "sRoBocpHw6j9UdzZ6qYG0FkdXZghezXFY58ro2BYYRR3\n"
+ "-----END RSA PRIVATE KEY-----\n",
+ "-----BEGIN CERTIFICATE-----\n"
+ "MIICWDCCAcGgAwIBAgIJALgDjxMbBOhbMA0GCSqGSIb3DQEBCwUAMEUxCzAJBgNV\n"
+ "BAYTAkFVMRMwEQYDVQQIDApTb21lLVN0YXRlMSEwHwYDVQQKDBhJbnRlcm5ldCBX\n"
+ "aWRnaXRzIFB0eSBMdGQwHhcNMTUxMTEzMjIzMjEzWhcNMTYxMTEyMjIzMjEzWjBF\n"
+ "MQswCQYDVQQGEwJBVTETMBEGA1UECAwKU29tZS1TdGF0ZTEhMB8GA1UECgwYSW50\n"
+ "ZXJuZXQgV2lkZ2l0cyBQdHkgTHRkMIGfMA0GCSqGSIb3DQEBAQUAA4GNADCBiQKB\n"
+ "gQDeYqlyJ1wuiMsi905e3X81/WA/G3ym50PIDZBVtSwZi7JVQPgjBl8CPZMvDh9E\n"
+ "wB4Ji9ytA8dZZbQ4WbJWPr73zPpJSCvQqz6sOXSlenBRi72acNaQsOR/qPvviJx5\n"
+ "I6Hqo4qemfnjZhAW85a5BpgrAwKgMLIQTHCTLWwVSyrDrwIDAQABo1AwTjAdBgNV\n"
+ "HQ4EFgQUx2tbJdlcSTCepn09UdYORXKuSTAwHwYDVR0jBBgwFoAUx2tbJdlcSTCe\n"
+ "pn09UdYORXKuSTAwDAYDVR0TBAUwAwEB/zANBgkqhkiG9w0BAQsFAAOBgQAmp9Id\n"
+ "E716gHMqeBG4S2FCgVFCr0a0ugkaneQAN/c2L9CbMemEN9W6jvucUIVOtYd90dDW\n"
+ "lXuowWmT/JctPe3D2qt4yvYW3puECHk2tVQmrJOZiZiTRtWm6HxkmoUYHYp/DtaS\n"
+ "1Xe29gSTnZtI5sQCrGMzk3SGRSSs7ejLKiVDBQ==\n"
+ "-----END CERTIFICATE-----\n"}};
class FakeDtlsIdentityStore : public webrtc::DtlsIdentityStoreInterface,
public rtc::MessageHandler {
@@ -77,6 +109,9 @@ class FakeDtlsIdentityStore : public webrtc::DtlsIdentityStoreInterface,
should_fail_ = should_fail;
}
+ void use_original_key() { key_index_ = 0; }
+ void use_alternate_key() { key_index_ = 1; }
+
void RequestIdentity(
rtc::KeyType key_type,
const rtc::scoped_refptr<webrtc::DtlsIdentityRequestObserver>&
@@ -92,8 +127,9 @@ class FakeDtlsIdentityStore : public webrtc::DtlsIdentityStoreInterface,
static rtc::scoped_refptr<rtc::RTCCertificate> GenerateCertificate() {
std::string cert;
std::string key;
- rtc::SSLIdentity::PemToDer("CERTIFICATE", kCERT_PEM, &cert);
- rtc::SSLIdentity::PemToDer("RSA PRIVATE KEY", kRSA_PRIVATE_KEY_PEM, &key);
+ rtc::SSLIdentity::PemToDer("CERTIFICATE", kKeysAndCerts[0].cert_pem, &cert);
+ rtc::SSLIdentity::PemToDer("RSA PRIVATE KEY",
+ kKeysAndCerts[0].rsa_private_key_pem, &key);
std::string pem_cert = rtc::SSLIdentity::DerToPem(
rtc::kPemTypeCertificate,
@@ -106,7 +142,7 @@ class FakeDtlsIdentityStore : public webrtc::DtlsIdentityStoreInterface,
rtc::scoped_ptr<rtc::SSLIdentity> identity(
rtc::SSLIdentity::FromPEMStrings(pem_key, pem_cert));
- return rtc::RTCCertificate::Create(identity.Pass());
+ return rtc::RTCCertificate::Create(std::move(identity));
}
private:
@@ -115,6 +151,11 @@ class FakeDtlsIdentityStore : public webrtc::DtlsIdentityStoreInterface,
MSG_FAILURE,
};
+ const char* get_key() {
+ return kKeysAndCerts[key_index_].rsa_private_key_pem;
+ }
+ const char* get_cert() { return kKeysAndCerts[key_index_].cert_pem; }
+
// rtc::MessageHandler implementation.
void OnMessage(rtc::Message* msg) {
MessageData* message_data = static_cast<MessageData*>(msg->pdata);
@@ -124,9 +165,8 @@ class FakeDtlsIdentityStore : public webrtc::DtlsIdentityStoreInterface,
case MSG_SUCCESS: {
std::string cert;
std::string key;
- rtc::SSLIdentity::PemToDer("CERTIFICATE", kCERT_PEM, &cert);
- rtc::SSLIdentity::PemToDer("RSA PRIVATE KEY", kRSA_PRIVATE_KEY_PEM,
- &key);
+ rtc::SSLIdentity::PemToDer("CERTIFICATE", get_cert(), &cert);
+ rtc::SSLIdentity::PemToDer("RSA PRIVATE KEY", get_key(), &key);
observer->OnSuccess(cert, key);
break;
}
@@ -138,6 +178,7 @@ class FakeDtlsIdentityStore : public webrtc::DtlsIdentityStoreInterface,
}
bool should_fail_;
+ int key_index_ = 0;
};
#endif // TALK_APP_WEBRTC_TEST_FAKEDTLSIDENTITYSERVICE_H_
diff --git a/talk/app/webrtc/test/fakemediastreamsignaling.h b/talk/app/webrtc/test/fakemediastreamsignaling.h
deleted file mode 100644
index 562c4ad306..0000000000
--- a/talk/app/webrtc/test/fakemediastreamsignaling.h
+++ /dev/null
@@ -1,140 +0,0 @@
-/*
- * libjingle
- * Copyright 2013 Google Inc.
- *
- * Redistribution and use in source and binary forms, with or without
- * modification, are permitted provided that the following conditions are met:
- *
- * 1. Redistributions of source code must retain the above copyright notice,
- * this list of conditions and the following disclaimer.
- * 2. Redistributions in binary form must reproduce the above copyright notice,
- * this list of conditions and the following disclaimer in the documentation
- * and/or other materials provided with the distribution.
- * 3. The name of the author may not be used to endorse or promote products
- * derived from this software without specific prior written permission.
- *
- * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
- * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
- * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
- * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
- * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
- * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
- * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
- * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
- * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
- * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
- */
-
-#ifndef TALK_APP_WEBRTC_TEST_FAKEMEDIASTREAMSIGNALING_H_
-#define TALK_APP_WEBRTC_TEST_FAKEMEDIASTREAMSIGNALING_H_
-
-#include "talk/app/webrtc/audiotrack.h"
-#include "talk/app/webrtc/mediastreamsignaling.h"
-#include "talk/app/webrtc/videotrack.h"
-
-static const char kStream1[] = "stream1";
-static const char kVideoTrack1[] = "video1";
-static const char kAudioTrack1[] = "audio1";
-
-static const char kStream2[] = "stream2";
-static const char kVideoTrack2[] = "video2";
-static const char kAudioTrack2[] = "audio2";
-
-class FakeMediaStreamSignaling : public webrtc::MediaStreamSignaling,
- public webrtc::MediaStreamSignalingObserver {
- public:
- explicit FakeMediaStreamSignaling(cricket::ChannelManager* channel_manager) :
- webrtc::MediaStreamSignaling(rtc::Thread::Current(), this,
- channel_manager) {
- }
-
- void SendAudioVideoStream1() {
- ClearLocalStreams();
- AddLocalStream(CreateStream(kStream1, kAudioTrack1, kVideoTrack1));
- }
-
- void SendAudioVideoStream2() {
- ClearLocalStreams();
- AddLocalStream(CreateStream(kStream2, kAudioTrack2, kVideoTrack2));
- }
-
- void SendAudioVideoStream1And2() {
- ClearLocalStreams();
- AddLocalStream(CreateStream(kStream1, kAudioTrack1, kVideoTrack1));
- AddLocalStream(CreateStream(kStream2, kAudioTrack2, kVideoTrack2));
- }
-
- void SendNothing() {
- ClearLocalStreams();
- }
-
- void UseOptionsAudioOnly() {
- ClearLocalStreams();
- AddLocalStream(CreateStream(kStream2, kAudioTrack2, ""));
- }
-
- void UseOptionsVideoOnly() {
- ClearLocalStreams();
- AddLocalStream(CreateStream(kStream2, "", kVideoTrack2));
- }
-
- void ClearLocalStreams() {
- while (local_streams()->count() != 0) {
- RemoveLocalStream(local_streams()->at(0));
- }
- }
-
- // Implements MediaStreamSignalingObserver.
- virtual void OnAddRemoteStream(webrtc::MediaStreamInterface* stream) {}
- virtual void OnRemoveRemoteStream(webrtc::MediaStreamInterface* stream) {}
- virtual void OnAddDataChannel(webrtc::DataChannelInterface* data_channel) {}
- virtual void OnAddLocalAudioTrack(webrtc::MediaStreamInterface* stream,
- webrtc::AudioTrackInterface* audio_track,
- uint32_t ssrc) {}
- virtual void OnAddLocalVideoTrack(webrtc::MediaStreamInterface* stream,
- webrtc::VideoTrackInterface* video_track,
- uint32_t ssrc) {}
- virtual void OnAddRemoteAudioTrack(webrtc::MediaStreamInterface* stream,
- webrtc::AudioTrackInterface* audio_track,
- uint32_t ssrc) {}
- virtual void OnAddRemoteVideoTrack(webrtc::MediaStreamInterface* stream,
- webrtc::VideoTrackInterface* video_track,
- uint32_t ssrc) {}
- virtual void OnRemoveRemoteAudioTrack(
- webrtc::MediaStreamInterface* stream,
- webrtc::AudioTrackInterface* audio_track) {}
- virtual void OnRemoveRemoteVideoTrack(
- webrtc::MediaStreamInterface* stream,
- webrtc::VideoTrackInterface* video_track) {}
- virtual void OnRemoveLocalAudioTrack(webrtc::MediaStreamInterface* stream,
- webrtc::AudioTrackInterface* audio_track,
- uint32_t ssrc) {}
- virtual void OnRemoveLocalVideoTrack(
- webrtc::MediaStreamInterface* stream,
- webrtc::VideoTrackInterface* video_track) {}
- virtual void OnRemoveLocalStream(webrtc::MediaStreamInterface* stream) {}
-
- private:
- rtc::scoped_refptr<webrtc::MediaStreamInterface> CreateStream(
- const std::string& stream_label,
- const std::string& audio_track_id,
- const std::string& video_track_id) {
- rtc::scoped_refptr<webrtc::MediaStreamInterface> stream(
- webrtc::MediaStream::Create(stream_label));
-
- if (!audio_track_id.empty()) {
- rtc::scoped_refptr<webrtc::AudioTrackInterface> audio_track(
- webrtc::AudioTrack::Create(audio_track_id, NULL));
- stream->AddTrack(audio_track);
- }
-
- if (!video_track_id.empty()) {
- rtc::scoped_refptr<webrtc::VideoTrackInterface> video_track(
- webrtc::VideoTrack::Create(video_track_id, NULL));
- stream->AddTrack(video_track);
- }
- return stream;
- }
-};
-
-#endif // TALK_APP_WEBRTC_TEST_FAKEMEDIASTREAMSIGNALING_H_
diff --git a/talk/app/webrtc/test/peerconnectiontestwrapper.cc b/talk/app/webrtc/test/peerconnectiontestwrapper.cc
index 2eb24d9700..86b7842517 100644
--- a/talk/app/webrtc/test/peerconnectiontestwrapper.cc
+++ b/talk/app/webrtc/test/peerconnectiontestwrapper.cc
@@ -25,13 +25,15 @@
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
-#include "talk/app/webrtc/fakeportallocatorfactory.h"
+#include <utility>
+
#include "talk/app/webrtc/test/fakedtlsidentitystore.h"
#include "talk/app/webrtc/test/fakeperiodicvideocapturer.h"
#include "talk/app/webrtc/test/mockpeerconnectionobservers.h"
#include "talk/app/webrtc/test/peerconnectiontestwrapper.h"
#include "talk/app/webrtc/videosourceinterface.h"
#include "webrtc/base/gunit.h"
+#include "webrtc/p2p/client/fakeportallocator.h"
static const char kStreamLabelBase[] = "stream_label";
static const char kVideoTrackLabelBase[] = "video_track";
@@ -70,10 +72,8 @@ PeerConnectionTestWrapper::~PeerConnectionTestWrapper() {}
bool PeerConnectionTestWrapper::CreatePc(
const MediaConstraintsInterface* constraints) {
- allocator_factory_ = webrtc::FakePortAllocatorFactory::Create();
- if (!allocator_factory_) {
- return false;
- }
+ rtc::scoped_ptr<cricket::PortAllocator> port_allocator(
+ new cricket::FakePortAllocator(rtc::Thread::Current(), nullptr));
fake_audio_capture_module_ = FakeAudioCaptureModule::Create();
if (fake_audio_capture_module_ == NULL) {
@@ -87,17 +87,17 @@ bool PeerConnectionTestWrapper::CreatePc(
return false;
}
- // CreatePeerConnection with IceServers.
- webrtc::PeerConnectionInterface::IceServers ice_servers;
+ // CreatePeerConnection with RTCConfiguration.
+ webrtc::PeerConnectionInterface::RTCConfiguration config;
webrtc::PeerConnectionInterface::IceServer ice_server;
ice_server.uri = "stun:stun.l.google.com:19302";
- ice_servers.push_back(ice_server);
+ config.servers.push_back(ice_server);
rtc::scoped_ptr<webrtc::DtlsIdentityStoreInterface> dtls_identity_store(
rtc::SSLStreamAdapter::HaveDtlsSrtp() ?
new FakeDtlsIdentityStore() : nullptr);
peer_connection_ = peer_connection_factory_->CreatePeerConnection(
- ice_servers, constraints, allocator_factory_.get(),
- dtls_identity_store.Pass(), this);
+ config, constraints, std::move(port_allocator),
+ std::move(dtls_identity_store), this);
return peer_connection_.get() != NULL;
}
diff --git a/talk/app/webrtc/test/peerconnectiontestwrapper.h b/talk/app/webrtc/test/peerconnectiontestwrapper.h
index b65426326f..883f2f2454 100644
--- a/talk/app/webrtc/test/peerconnectiontestwrapper.h
+++ b/talk/app/webrtc/test/peerconnectiontestwrapper.h
@@ -34,11 +34,6 @@
#include "talk/app/webrtc/test/fakevideotrackrenderer.h"
#include "webrtc/base/sigslot.h"
-namespace webrtc {
-class DtlsIdentityStoreInterface;
-class PortAllocatorFactoryInterface;
-}
-
class PeerConnectionTestWrapper
: public webrtc::PeerConnectionObserver,
public webrtc::CreateSessionDescriptionObserver,
@@ -110,8 +105,6 @@ class PeerConnectionTestWrapper
bool video, const webrtc::FakeConstraints& video_constraints);
std::string name_;
- rtc::scoped_refptr<webrtc::PortAllocatorFactoryInterface>
- allocator_factory_;
rtc::scoped_refptr<webrtc::PeerConnectionInterface> peer_connection_;
rtc::scoped_refptr<webrtc::PeerConnectionFactoryInterface>
peer_connection_factory_;
diff --git a/talk/app/webrtc/videosource.cc b/talk/app/webrtc/videosource.cc
index b33f5f9e13..4b371e3ed5 100644
--- a/talk/app/webrtc/videosource.cc
+++ b/talk/app/webrtc/videosource.cc
@@ -32,6 +32,7 @@
#include "talk/app/webrtc/mediaconstraintsinterface.h"
#include "talk/session/media/channelmanager.h"
+#include "webrtc/base/arraysize.h"
using cricket::CaptureState;
using webrtc::MediaConstraintsInterface;
@@ -267,11 +268,12 @@ const cricket::VideoFormat& GetBestCaptureFormat(
// Set |option| to the highest-priority value of |key| in the constraints.
// Return false if the key is mandatory, and the value is invalid.
bool ExtractOption(const MediaConstraintsInterface* all_constraints,
- const std::string& key, cricket::Settable<bool>* option) {
+ const std::string& key,
+ rtc::Optional<bool>* option) {
size_t mandatory = 0;
bool value;
if (FindConstraint(all_constraints, key, &value, &mandatory)) {
- option->Set(value);
+ *option = rtc::Optional<bool>(value);
return true;
}
@@ -302,8 +304,6 @@ class FrameInputWrapper : public cricket::VideoRenderer {
virtual ~FrameInputWrapper() {}
// VideoRenderer implementation.
- bool SetSize(int width, int height, int reserved) override { return true; }
-
bool RenderFrame(const cricket::VideoFrame* frame) override {
if (!capturer_->IsRunning()) {
return true;
@@ -329,21 +329,23 @@ namespace webrtc {
rtc::scoped_refptr<VideoSource> VideoSource::Create(
cricket::ChannelManager* channel_manager,
cricket::VideoCapturer* capturer,
- const webrtc::MediaConstraintsInterface* constraints) {
+ const webrtc::MediaConstraintsInterface* constraints,
+ bool remote) {
ASSERT(channel_manager != NULL);
ASSERT(capturer != NULL);
- rtc::scoped_refptr<VideoSource> source(
- new rtc::RefCountedObject<VideoSource>(channel_manager,
- capturer));
+ rtc::scoped_refptr<VideoSource> source(new rtc::RefCountedObject<VideoSource>(
+ channel_manager, capturer, remote));
source->Initialize(constraints);
return source;
}
VideoSource::VideoSource(cricket::ChannelManager* channel_manager,
- cricket::VideoCapturer* capturer)
+ cricket::VideoCapturer* capturer,
+ bool remote)
: channel_manager_(channel_manager),
video_capturer_(capturer),
- state_(kInitializing) {
+ state_(kInitializing),
+ remote_(remote) {
channel_manager_->SignalVideoCaptureStateChange.connect(
this, &VideoSource::OnStateChange);
}
@@ -368,7 +370,7 @@ void VideoSource::Initialize(
} else {
// The VideoCapturer implementation doesn't support capability
// enumeration. We need to guess what the camera supports.
- for (int i = 0; i < ARRAY_SIZE(kVideoFormats); ++i) {
+ for (int i = 0; i < arraysize(kVideoFormats); ++i) {
formats.push_back(cricket::VideoFormat(kVideoFormats[i]));
}
}
@@ -460,7 +462,9 @@ void VideoSource::OnStateChange(cricket::VideoCapturer* capturer,
}
void VideoSource::SetState(SourceState new_state) {
- if (VERIFY(state_ != new_state)) {
+ // TODO(hbos): Temporarily disabled VERIFY due to webrtc:4776.
+ // if (VERIFY(state_ != new_state)) {
+ if (state_ != new_state) {
state_ = new_state;
FireOnChanged();
}
diff --git a/talk/app/webrtc/videosource.h b/talk/app/webrtc/videosource.h
index 8253cbac18..98c1e083a3 100644
--- a/talk/app/webrtc/videosource.h
+++ b/talk/app/webrtc/videosource.h
@@ -66,9 +66,12 @@ class VideoSource : public Notifier<VideoSourceInterface>,
static rtc::scoped_refptr<VideoSource> Create(
cricket::ChannelManager* channel_manager,
cricket::VideoCapturer* capturer,
- const webrtc::MediaConstraintsInterface* constraints);
+ const webrtc::MediaConstraintsInterface* constraints,
+ bool remote);
+
+ SourceState state() const override { return state_; }
+ bool remote() const override { return remote_; }
- virtual SourceState state() const { return state_; }
virtual const cricket::VideoOptions* options() const { return &options_; }
virtual cricket::VideoRenderer* FrameInput();
@@ -86,7 +89,8 @@ class VideoSource : public Notifier<VideoSourceInterface>,
protected:
VideoSource(cricket::ChannelManager* channel_manager,
- cricket::VideoCapturer* capturer);
+ cricket::VideoCapturer* capturer,
+ bool remote);
virtual ~VideoSource();
void Initialize(const webrtc::MediaConstraintsInterface* constraints);
@@ -104,6 +108,7 @@ class VideoSource : public Notifier<VideoSourceInterface>,
cricket::VideoFormat format_;
cricket::VideoOptions options_;
SourceState state_;
+ const bool remote_;
};
} // namespace webrtc
diff --git a/talk/app/webrtc/videosource_unittest.cc b/talk/app/webrtc/videosource_unittest.cc
index 2efcc1d84e..6f1df3434e 100644
--- a/talk/app/webrtc/videosource_unittest.cc
+++ b/talk/app/webrtc/videosource_unittest.cc
@@ -144,9 +144,9 @@ class VideoSourceTest : public testing::Test {
void CreateVideoSource(
const webrtc::MediaConstraintsInterface* constraints) {
// VideoSource take ownership of |capturer_|
- source_ = VideoSource::Create(channel_manager_.get(),
- capturer_cleanup_.release(),
- constraints);
+ source_ =
+ VideoSource::Create(channel_manager_.get(), capturer_cleanup_.release(),
+ constraints, false);
ASSERT_TRUE(source_.get() != NULL);
EXPECT_EQ(capturer_, source_->GetVideoCapturer());
@@ -210,8 +210,7 @@ TEST_F(VideoSourceTest, StopRestart) {
// RemoteVideoCapturer and takes video frames from FrameInput.
TEST_F(VideoSourceTest, StartStopRemote) {
source_ = VideoSource::Create(channel_manager_.get(),
- new webrtc::RemoteVideoCapturer(),
- NULL);
+ new webrtc::RemoteVideoCapturer(), NULL, true);
ASSERT_TRUE(source_.get() != NULL);
EXPECT_TRUE(NULL != source_->GetVideoCapturer());
@@ -392,16 +391,14 @@ TEST_F(VideoSourceTest, SetValidOptionValues) {
CreateVideoSource(&constraints);
- bool value = true;
- EXPECT_TRUE(source_->options()->video_noise_reduction.Get(&value));
- EXPECT_FALSE(value);
+ EXPECT_EQ(rtc::Optional<bool>(false),
+ source_->options()->video_noise_reduction);
}
TEST_F(VideoSourceTest, OptionNotSet) {
FakeConstraints constraints;
CreateVideoSource(&constraints);
- bool value;
- EXPECT_FALSE(source_->options()->video_noise_reduction.Get(&value));
+ EXPECT_EQ(rtc::Optional<bool>(), source_->options()->video_noise_reduction);
}
TEST_F(VideoSourceTest, MandatoryOptionOverridesOptional) {
@@ -413,9 +410,8 @@ TEST_F(VideoSourceTest, MandatoryOptionOverridesOptional) {
CreateVideoSource(&constraints);
- bool value = false;
- EXPECT_TRUE(source_->options()->video_noise_reduction.Get(&value));
- EXPECT_TRUE(value);
+ EXPECT_EQ(rtc::Optional<bool>(true),
+ source_->options()->video_noise_reduction);
}
TEST_F(VideoSourceTest, InvalidOptionKeyOptional) {
@@ -428,9 +424,8 @@ TEST_F(VideoSourceTest, InvalidOptionKeyOptional) {
EXPECT_EQ_WAIT(MediaSourceInterface::kLive, state_observer_->state(),
kMaxWaitMs);
- bool value = true;
- EXPECT_TRUE(source_->options()->video_noise_reduction.Get(&value));
- EXPECT_FALSE(value);
+ EXPECT_EQ(rtc::Optional<bool>(false),
+ source_->options()->video_noise_reduction);
}
TEST_F(VideoSourceTest, InvalidOptionKeyMandatory) {
@@ -443,8 +438,7 @@ TEST_F(VideoSourceTest, InvalidOptionKeyMandatory) {
EXPECT_EQ_WAIT(MediaSourceInterface::kEnded, state_observer_->state(),
kMaxWaitMs);
- bool value;
- EXPECT_FALSE(source_->options()->video_noise_reduction.Get(&value));
+ EXPECT_EQ(rtc::Optional<bool>(), source_->options()->video_noise_reduction);
}
TEST_F(VideoSourceTest, InvalidOptionValueOptional) {
@@ -456,8 +450,7 @@ TEST_F(VideoSourceTest, InvalidOptionValueOptional) {
EXPECT_EQ_WAIT(MediaSourceInterface::kLive, state_observer_->state(),
kMaxWaitMs);
- bool value = false;
- EXPECT_FALSE(source_->options()->video_noise_reduction.Get(&value));
+ EXPECT_EQ(rtc::Optional<bool>(), source_->options()->video_noise_reduction);
}
TEST_F(VideoSourceTest, InvalidOptionValueMandatory) {
@@ -473,8 +466,7 @@ TEST_F(VideoSourceTest, InvalidOptionValueMandatory) {
EXPECT_EQ_WAIT(MediaSourceInterface::kEnded, state_observer_->state(),
kMaxWaitMs);
- bool value;
- EXPECT_FALSE(source_->options()->video_noise_reduction.Get(&value));
+ EXPECT_EQ(rtc::Optional<bool>(), source_->options()->video_noise_reduction);
}
TEST_F(VideoSourceTest, MixedOptionsAndConstraints) {
@@ -497,9 +489,8 @@ TEST_F(VideoSourceTest, MixedOptionsAndConstraints) {
EXPECT_EQ(288, format->height);
EXPECT_EQ(30, format->framerate());
- bool value = true;
- EXPECT_TRUE(source_->options()->video_noise_reduction.Get(&value));
- EXPECT_FALSE(value);
+ EXPECT_EQ(rtc::Optional<bool>(false),
+ source_->options()->video_noise_reduction);
}
// Tests that the source starts video with the default resolution for
diff --git a/talk/app/webrtc/videosourceproxy.h b/talk/app/webrtc/videosourceproxy.h
index 677fa9cf0f..ce96e8e6d1 100644
--- a/talk/app/webrtc/videosourceproxy.h
+++ b/talk/app/webrtc/videosourceproxy.h
@@ -38,6 +38,7 @@ namespace webrtc {
// signaling thread.
BEGIN_PROXY_MAP(VideoSource)
PROXY_CONSTMETHOD0(SourceState, state)
+ PROXY_CONSTMETHOD0(bool, remote)
PROXY_METHOD0(cricket::VideoCapturer*, GetVideoCapturer)
PROXY_METHOD0(void, Stop)
PROXY_METHOD0(void, Restart)
diff --git a/talk/app/webrtc/videotrack.cc b/talk/app/webrtc/videotrack.cc
index 7c78aea91f..f138240068 100644
--- a/talk/app/webrtc/videotrack.cc
+++ b/talk/app/webrtc/videotrack.cc
@@ -31,7 +31,7 @@
namespace webrtc {
-static const char kVideoTrackKind[] = "video";
+const char MediaStreamTrackInterface::kVideoKind[] = "video";
VideoTrack::VideoTrack(const std::string& label,
VideoSourceInterface* video_source)
@@ -47,7 +47,7 @@ VideoTrack::~VideoTrack() {
}
std::string VideoTrack::kind() const {
- return kVideoTrackKind;
+ return kVideoKind;
}
void VideoTrack::AddRenderer(VideoRendererInterface* renderer) {
diff --git a/talk/app/webrtc/videotrack_unittest.cc b/talk/app/webrtc/videotrack_unittest.cc
index 609ee80ffc..013d925cd2 100644
--- a/talk/app/webrtc/videotrack_unittest.cc
+++ b/talk/app/webrtc/videotrack_unittest.cc
@@ -62,7 +62,7 @@ class VideoTrackTest : public testing::Test {
video_track_ = VideoTrack::Create(
kVideoTrackId,
VideoSource::Create(channel_manager_.get(),
- new webrtc::RemoteVideoCapturer(), NULL));
+ new webrtc::RemoteVideoCapturer(), NULL, true));
}
protected:
diff --git a/talk/app/webrtc/videotrackrenderers.cc b/talk/app/webrtc/videotrackrenderers.cc
index 3c47c6edab..3f9301b718 100644
--- a/talk/app/webrtc/videotrackrenderers.cc
+++ b/talk/app/webrtc/videotrackrenderers.cc
@@ -54,10 +54,6 @@ void VideoTrackRenderers::SetEnabled(bool enable) {
enabled_ = enable;
}
-bool VideoTrackRenderers::SetSize(int width, int height, int reserved) {
- return true;
-}
-
bool VideoTrackRenderers::RenderFrame(const cricket::VideoFrame* frame) {
rtc::CritScope cs(&critical_section_);
if (!enabled_) {
diff --git a/talk/app/webrtc/videotrackrenderers.h b/talk/app/webrtc/videotrackrenderers.h
index 15274a1530..3262e22dff 100644
--- a/talk/app/webrtc/videotrackrenderers.h
+++ b/talk/app/webrtc/videotrackrenderers.h
@@ -48,7 +48,6 @@ class VideoTrackRenderers : public cricket::VideoRenderer {
~VideoTrackRenderers();
// Implements cricket::VideoRenderer
- virtual bool SetSize(int width, int height, int reserved);
virtual bool RenderFrame(const cricket::VideoFrame* frame);
void AddRenderer(VideoRendererInterface* renderer);
diff --git a/talk/app/webrtc/webrtcsdp.cc b/talk/app/webrtc/webrtcsdp.cc
index 3fa9a7d469..e287e90916 100644
--- a/talk/app/webrtc/webrtcsdp.cc
+++ b/talk/app/webrtc/webrtcsdp.cc
@@ -45,6 +45,7 @@
#include "webrtc/p2p/base/constants.h"
#include "webrtc/p2p/base/port.h"
#include "talk/session/media/mediasession.h"
+#include "webrtc/base/arraysize.h"
#include "webrtc/base/common.h"
#include "webrtc/base/logging.h"
#include "webrtc/base/messagedigest.h"
@@ -121,6 +122,7 @@ static const char kLineTypeAttributes = 'a';
static const char kAttributeGroup[] = "group";
static const char kAttributeMid[] = "mid";
static const char kAttributeRtcpMux[] = "rtcp-mux";
+static const char kAttributeRtcpReducedSize[] = "rtcp-rsize";
static const char kAttributeSsrc[] = "ssrc";
static const char kSsrcAttributeCname[] = "cname";
static const char kAttributeExtmap[] = "extmap";
@@ -138,8 +140,8 @@ static const char kAttributeCandidate[] = "candidate";
static const char kAttributeCandidateTyp[] = "typ";
static const char kAttributeCandidateRaddr[] = "raddr";
static const char kAttributeCandidateRport[] = "rport";
-static const char kAttributeCandidateUsername[] = "username";
-static const char kAttributeCandidatePassword[] = "password";
+static const char kAttributeCandidateUfrag[] = "ufrag";
+static const char kAttributeCandidatePwd[] = "pwd";
static const char kAttributeCandidateGeneration[] = "generation";
static const char kAttributeFingerprint[] = "fingerprint";
static const char kAttributeSetup[] = "setup";
@@ -260,6 +262,7 @@ static void BuildRtpMap(const MediaContentDescription* media_desc,
const MediaType media_type,
std::string* message);
static void BuildCandidate(const std::vector<Candidate>& candidates,
+ bool include_ufrag,
std::string* message);
static void BuildIceOptions(const std::vector<std::string>& transport_options,
std::string* message);
@@ -876,7 +879,7 @@ std::string SdpSerializeCandidate(
std::string message;
std::vector<cricket::Candidate> candidates;
candidates.push_back(candidate.candidate());
- BuildCandidate(candidates, &message);
+ BuildCandidate(candidates, true, &message);
// From WebRTC draft section 4.8.1.1 candidate-attribute will be
// just candidate:<candidate> not a=candidate:<blah>CRLF
ASSERT(message.find("a=") == 0);
@@ -1070,10 +1073,9 @@ bool ParseCandidate(const std::string& message, Candidate* candidate,
}
// Extension
- // Empty string as the candidate username and password.
- // Will be updated later with the ice-ufrag and ice-pwd.
- // TODO: Remove the username/password extension, which is currently
- // kept for backwards compatibility.
+ // Though non-standard, we support the ICE ufrag and pwd being signaled on
+ // the candidate to avoid issues with confusing which generation a candidate
+ // belongs to when trickling multiple generations at the same time.
std::string username;
std::string password;
uint32_t generation = 0;
@@ -1084,9 +1086,9 @@ bool ParseCandidate(const std::string& message, Candidate* candidate,
if (!GetValueFromString(first_line, fields[++i], &generation, error)) {
return false;
}
- } else if (fields[i] == kAttributeCandidateUsername) {
+ } else if (fields[i] == kAttributeCandidateUfrag) {
username = fields[++i];
- } else if (fields[i] == kAttributeCandidatePassword) {
+ } else if (fields[i] == kAttributeCandidatePwd) {
password = fields[++i];
} else {
// Skip the unknown extension.
@@ -1283,8 +1285,9 @@ void BuildMediaDescription(const ContentInfo* content_info,
}
}
- // Build the a=candidate lines.
- BuildCandidate(candidates, message);
+ // Build the a=candidate lines. We don't include ufrag and pwd in the
+ // candidates in the SDP to avoid redundancy.
+ BuildCandidate(candidates, false, message);
// Use the transport_info to build the media level ice-ufrag and ice-pwd.
if (transport_info) {
@@ -1292,13 +1295,17 @@ void BuildMediaDescription(const ContentInfo* content_info,
// ice-pwd-att = "ice-pwd" ":" password
// ice-ufrag-att = "ice-ufrag" ":" ufrag
// ice-ufrag
- InitAttrLine(kAttributeIceUfrag, &os);
- os << kSdpDelimiterColon << transport_info->description.ice_ufrag;
- AddLine(os.str(), message);
+ if (!transport_info->description.ice_ufrag.empty()) {
+ InitAttrLine(kAttributeIceUfrag, &os);
+ os << kSdpDelimiterColon << transport_info->description.ice_ufrag;
+ AddLine(os.str(), message);
+ }
// ice-pwd
- InitAttrLine(kAttributeIcePwd, &os);
- os << kSdpDelimiterColon << transport_info->description.ice_pwd;
- AddLine(os.str(), message);
+ if (!transport_info->description.ice_pwd.empty()) {
+ InitAttrLine(kAttributeIcePwd, &os);
+ os << kSdpDelimiterColon << transport_info->description.ice_pwd;
+ AddLine(os.str(), message);
+ }
// draft-petithuguenin-mmusic-ice-attributes-level-03
BuildIceOptions(transport_info->description.transport_options, message);
@@ -1399,6 +1406,13 @@ void BuildRtpContentAttributes(
AddLine(os.str(), message);
}
+ // RFC 5506
+ // a=rtcp-rsize
+ if (media_desc->rtcp_reduced_size()) {
+ InitAttrLine(kAttributeRtcpReducedSize, &os);
+ AddLine(os.str(), message);
+ }
+
// RFC 4568
// a=crypto:<tag> <crypto-suite> <key-params> [<session-params>]
for (std::vector<CryptoParams>::const_iterator it =
@@ -1525,7 +1539,7 @@ bool IsFmtpParam(const std::string& name) {
kCodecParamMaxAverageBitrate, kCodecParamMaxPlaybackRate,
kCodecParamAssociatedPayloadType
};
- for (size_t i = 0; i < ARRAY_SIZE(kFmtpParams); ++i) {
+ for (size_t i = 0; i < arraysize(kFmtpParams); ++i) {
if (_stricmp(name.c_str(), kFmtpParams[i]) == 0) {
return true;
}
@@ -1708,6 +1722,7 @@ void BuildRtpMap(const MediaContentDescription* media_desc,
}
void BuildCandidate(const std::vector<Candidate>& candidates,
+ bool include_ufrag,
std::string* message) {
std::ostringstream os;
@@ -1757,6 +1772,9 @@ void BuildCandidate(const std::vector<Candidate>& candidates,
// Extensions
os << kAttributeCandidateGeneration << " " << it->generation();
+ if (include_ufrag && !it->username().empty()) {
+ os << " " << kAttributeCandidateUfrag << " " << it->username();
+ }
AddLine(os.str(), message);
}
@@ -2046,7 +2064,7 @@ static bool ParseDtlsSetup(const std::string& line,
struct StaticPayloadAudioCodec {
const char* name;
int clockrate;
- int channels;
+ size_t channels;
};
static const StaticPayloadAudioCodec kStaticPayloadAudioCodecs[] = {
{ "PCMU", 8000, 1 },
@@ -2082,10 +2100,10 @@ void MaybeCreateStaticPayloadAudioCodecs(
int payload_type = *it;
if (!media_desc->HasCodec(payload_type) &&
payload_type >= 0 &&
- payload_type < ARRAY_SIZE(kStaticPayloadAudioCodecs)) {
+ payload_type < arraysize(kStaticPayloadAudioCodecs)) {
std::string encoding_name = kStaticPayloadAudioCodecs[payload_type].name;
int clock_rate = kStaticPayloadAudioCodecs[payload_type].clockrate;
- int channels = kStaticPayloadAudioCodecs[payload_type].channels;
+ size_t channels = kStaticPayloadAudioCodecs[payload_type].channels;
media_desc->AddCodec(cricket::AudioCodec(payload_type, encoding_name,
clock_rate, 0, channels,
preference));
@@ -2552,6 +2570,8 @@ bool ParseContent(const std::string& message,
//
if (HasAttribute(line, kAttributeRtcpMux)) {
media_desc->set_rtcp_mux(true);
+ } else if (HasAttribute(line, kAttributeRtcpReducedSize)) {
+ media_desc->set_rtcp_reduced_size(true);
} else if (HasAttribute(line, kAttributeSsrcGroup)) {
if (!ParseSsrcGroupAttribute(line, &ssrc_groups, error)) {
return false;
@@ -2666,7 +2686,8 @@ bool ParseContent(const std::string& message,
// Update the candidates with the media level "ice-pwd" and "ice-ufrag".
for (Candidates::iterator it = candidates_orig.begin();
it != candidates_orig.end(); ++it) {
- ASSERT((*it).username().empty());
+ ASSERT((*it).username().empty() ||
+ (*it).username() == transport->ice_ufrag);
(*it).set_username(transport->ice_ufrag);
ASSERT((*it).password().empty());
(*it).set_password(transport->ice_pwd);
@@ -2817,7 +2838,7 @@ bool ParseCryptoAttribute(const std::string& line,
// Updates or creates a new codec entry in the audio description with according
// to |name|, |clockrate|, |bitrate|, |channels| and |preference|.
void UpdateCodec(int payload_type, const std::string& name, int clockrate,
- int bitrate, int channels, int preference,
+ int bitrate, size_t channels, int preference,
AudioContentDescription* audio_desc) {
// Codec may already be populated with (only) optional parameters
// (from an fmtp).
@@ -2916,7 +2937,7 @@ bool ParseRtpmapAttribute(const std::string& line,
// of audio channels. This parameter is OPTIONAL and may be
// omitted if the number of channels is one, provided that no
// additional parameters are needed.
- int channels = 1;
+ size_t channels = 1;
if (codec_params.size() == 3) {
if (!GetValueFromString(line, codec_params[2], &channels, error)) {
return false;
diff --git a/talk/app/webrtc/webrtcsdp_unittest.cc b/talk/app/webrtc/webrtcsdp_unittest.cc
index cb6a392ab4..15fc8083b4 100644
--- a/talk/app/webrtc/webrtcsdp_unittest.cc
+++ b/talk/app/webrtc/webrtcsdp_unittest.cc
@@ -30,6 +30,9 @@
#include <vector>
#include "talk/app/webrtc/jsepsessiondescription.h"
+#ifdef WEBRTC_ANDROID
+#include "talk/app/webrtc/test/androidtestinitializer.h"
+#endif
#include "talk/app/webrtc/webrtcsdp.h"
#include "talk/media/base/constants.h"
#include "webrtc/p2p/base/constants.h"
@@ -80,11 +83,13 @@ static const char kSessionTime[] = "t=0 0\r\n";
static const uint32_t kCandidatePriority = 2130706432U; // pref = 1.0
static const char kCandidateUfragVoice[] = "ufrag_voice";
static const char kCandidatePwdVoice[] = "pwd_voice";
+static const char kAttributeIceUfragVoice[] = "a=ice-ufrag:ufrag_voice\r\n";
static const char kAttributeIcePwdVoice[] = "a=ice-pwd:pwd_voice\r\n";
static const char kCandidateUfragVideo[] = "ufrag_video";
static const char kCandidatePwdVideo[] = "pwd_video";
static const char kCandidateUfragData[] = "ufrag_data";
static const char kCandidatePwdData[] = "pwd_data";
+static const char kAttributeIceUfragVideo[] = "a=ice-ufrag:ufrag_video\r\n";
static const char kAttributeIcePwdVideo[] = "a=ice-pwd:pwd_video\r\n";
static const uint32_t kCandidateGeneration = 2;
static const char kCandidateFoundation1[] = "a0+B/1";
@@ -153,6 +158,7 @@ static const char kSdpFullString[] =
"a=mid:audio_content_name\r\n"
"a=sendrecv\r\n"
"a=rtcp-mux\r\n"
+ "a=rtcp-rsize\r\n"
"a=crypto:1 AES_CM_128_HMAC_SHA1_32 "
"inline:NzB4d1BINUAvLEw6UzF3WSJ+PSdFcGdUJShpX1Zj|2^20|1:32 "
"dummy_session_params\r\n"
@@ -220,6 +226,7 @@ static const char kSdpString[] =
"a=mid:audio_content_name\r\n"
"a=sendrecv\r\n"
"a=rtcp-mux\r\n"
+ "a=rtcp-rsize\r\n"
"a=crypto:1 AES_CM_128_HMAC_SHA1_32 "
"inline:NzB4d1BINUAvLEw6UzF3WSJ+PSdFcGdUJShpX1Zj|2^20|1:32 "
"dummy_session_params\r\n"
@@ -394,9 +401,9 @@ static const char kRawIPV6Candidate[] =
"abcd::abcd::abcd::abcd::abcd::abcd::abcd::abcd 1234 typ host generation 2";
// One candidate reference string.
-static const char kSdpOneCandidateOldFormat[] =
+static const char kSdpOneCandidateWithUfragPwd[] =
"a=candidate:a0+B/1 1 udp 2130706432 192.168.1.5 1234 typ host network_name"
- " eth0 username user_rtp password password_rtp generation 2\r\n";
+ " eth0 ufrag user_rtp pwd password_rtp generation 2\r\n";
// Session id and version
static const char kSessionId[] = "18446744069414584320";
@@ -523,10 +530,14 @@ static void ReplaceDirection(cricket::MediaContentDirection direction,
static void ReplaceRejected(bool audio_rejected, bool video_rejected,
std::string* message) {
if (audio_rejected) {
- Replace("m=audio 2345", "m=audio 0", message);
+ Replace("m=audio 9", "m=audio 0", message);
+ Replace(kAttributeIceUfragVoice, "", message);
+ Replace(kAttributeIcePwdVoice, "", message);
}
if (video_rejected) {
- Replace("m=video 3457", "m=video 0", message);
+ Replace("m=video 9", "m=video 0", message);
+ Replace(kAttributeIceUfragVideo, "", message);
+ Replace(kAttributeIcePwdVideo, "", message);
}
}
@@ -536,6 +547,9 @@ class WebRtcSdpTest : public testing::Test {
public:
WebRtcSdpTest()
: jdesc_(kDummyString) {
+#ifdef WEBRTC_ANDROID
+ webrtc::InitializeAndroidObjects();
+#endif
// AudioContentDescription
audio_desc_ = CreateAudioContentDescription();
AudioCodec opus(111, "opus", 48000, 0, 2, 3);
@@ -704,6 +718,7 @@ class WebRtcSdpTest : public testing::Test {
AudioContentDescription* CreateAudioContentDescription() {
AudioContentDescription* audio = new AudioContentDescription();
audio->set_rtcp_mux(true);
+ audio->set_rtcp_reduced_size(true);
StreamParams audio_stream1;
audio_stream1.id = kAudioTrackId1;
audio_stream1.cname = kStream1Cname;
@@ -735,6 +750,9 @@ class WebRtcSdpTest : public testing::Test {
// rtcp_mux
EXPECT_EQ(cd1->rtcp_mux(), cd2->rtcp_mux());
+ // rtcp_reduced_size
+ EXPECT_EQ(cd1->rtcp_reduced_size(), cd2->rtcp_reduced_size());
+
// cryptos
EXPECT_EQ(cd1->cryptos().size(), cd2->cryptos().size());
if (cd1->cryptos().size() != cd2->cryptos().size()) {
@@ -979,6 +997,18 @@ class WebRtcSdpTest : public testing::Test {
desc_.AddTransportInfo(transport_info);
}
+ void SetIceUfragPwd(const std::string& content_name,
+ const std::string& ice_ufrag,
+ const std::string& ice_pwd) {
+ ASSERT_TRUE(desc_.GetTransportInfoByName(content_name) != NULL);
+ cricket::TransportInfo transport_info =
+ *(desc_.GetTransportInfoByName(content_name));
+ desc_.RemoveTransportInfoByName(content_name);
+ transport_info.description.ice_ufrag = ice_ufrag;
+ transport_info.description.ice_pwd = ice_pwd;
+ desc_.AddTransportInfo(transport_info);
+ }
+
void AddFingerprint() {
desc_.RemoveTransportInfoByName(kAudioContentName);
desc_.RemoveTransportInfoByName(kVideoContentName);
@@ -1050,15 +1080,22 @@ class WebRtcSdpTest : public testing::Test {
audio_desc_);
desc_.AddContent(kVideoContentName, NS_JINGLE_RTP, video_rejected,
video_desc_);
- std::string new_sdp = kSdpFullString;
+ SetIceUfragPwd(kAudioContentName,
+ audio_rejected ? "" : kCandidateUfragVoice,
+ audio_rejected ? "" : kCandidatePwdVoice);
+ SetIceUfragPwd(kVideoContentName,
+ video_rejected ? "" : kCandidateUfragVideo,
+ video_rejected ? "" : kCandidatePwdVideo);
+
+ std::string new_sdp = kSdpString;
ReplaceRejected(audio_rejected, video_rejected, &new_sdp);
- if (!jdesc_.Initialize(desc_.Copy(),
- jdesc_.session_id(),
- jdesc_.session_version())) {
+ JsepSessionDescription jdesc_no_candidates(kDummyString);
+ if (!jdesc_no_candidates.Initialize(desc_.Copy(), kSessionId,
+ kSessionVersion)) {
return false;
}
- std::string message = webrtc::SdpSerialize(jdesc_);
+ std::string message = webrtc::SdpSerialize(jdesc_no_candidates);
EXPECT_EQ(new_sdp, message);
return true;
}
@@ -1121,11 +1158,11 @@ class WebRtcSdpTest : public testing::Test {
}
bool TestDeserializeRejected(bool audio_rejected, bool video_rejected) {
- std::string new_sdp = kSdpFullString;
+ std::string new_sdp = kSdpString;
ReplaceRejected(audio_rejected, video_rejected, &new_sdp);
JsepSessionDescription new_jdesc(JsepSessionDescription::kOffer);
-
EXPECT_TRUE(SdpDeserialize(new_sdp, &new_jdesc));
+
audio_desc_ = static_cast<AudioContentDescription*>(
audio_desc_->Copy());
video_desc_ = static_cast<VideoContentDescription*>(
@@ -1136,12 +1173,18 @@ class WebRtcSdpTest : public testing::Test {
audio_desc_);
desc_.AddContent(kVideoContentName, NS_JINGLE_RTP, video_rejected,
video_desc_);
- if (!jdesc_.Initialize(desc_.Copy(),
- jdesc_.session_id(),
- jdesc_.session_version())) {
+ SetIceUfragPwd(kAudioContentName,
+ audio_rejected ? "" : kCandidateUfragVoice,
+ audio_rejected ? "" : kCandidatePwdVoice);
+ SetIceUfragPwd(kVideoContentName,
+ video_rejected ? "" : kCandidateUfragVideo,
+ video_rejected ? "" : kCandidatePwdVideo);
+ JsepSessionDescription jdesc_no_candidates(kDummyString);
+ if (!jdesc_no_candidates.Initialize(desc_.Copy(), jdesc_.session_id(),
+ jdesc_.session_version())) {
return false;
}
- EXPECT_TRUE(CompareSessionDescription(jdesc_, new_jdesc));
+ EXPECT_TRUE(CompareSessionDescription(jdesc_no_candidates, new_jdesc));
return true;
}
@@ -1540,8 +1583,8 @@ TEST_F(WebRtcSdpTest, SerializeSessionDescriptionWithFingerprintNoCryptos) {
TEST_F(WebRtcSdpTest, SerializeSessionDescriptionWithoutCandidates) {
// JsepSessionDescription with desc but without candidates.
JsepSessionDescription jdesc_no_candidates(kDummyString);
- ASSERT_TRUE(jdesc_no_candidates.Initialize(desc_.Copy(),
- kSessionId, kSessionVersion));
+ ASSERT_TRUE(jdesc_no_candidates.Initialize(desc_.Copy(), kSessionId,
+ kSessionVersion));
std::string message = webrtc::SdpSerialize(jdesc_no_candidates);
EXPECT_EQ(std::string(kSdpString), message);
}
@@ -1721,6 +1764,13 @@ TEST_F(WebRtcSdpTest, SerializeSessionDescriptionWithExtmap) {
TEST_F(WebRtcSdpTest, SerializeCandidates) {
std::string message = webrtc::SdpSerializeCandidate(*jcandidate_);
EXPECT_EQ(std::string(kRawCandidate), message);
+
+ Candidate candidate_with_ufrag(candidates_.front());
+ candidate_with_ufrag.set_username("ABC");
+ jcandidate_.reset(new JsepIceCandidate(std::string("audio_content_name"), 0,
+ candidate_with_ufrag));
+ message = webrtc::SdpSerializeCandidate(*jcandidate_);
+ EXPECT_EQ(std::string(kRawCandidate) + " ufrag ABC", message);
}
// TODO(mallinath) : Enable this test once WebRTCSdp capable of parsing
@@ -2317,9 +2367,10 @@ TEST_F(WebRtcSdpTest, DeserializeCandidateWithDifferentTransport) {
EXPECT_TRUE(jcandidate.candidate().IsEquivalent(jcandidate_->candidate()));
}
-TEST_F(WebRtcSdpTest, DeserializeCandidateOldFormat) {
+TEST_F(WebRtcSdpTest, DeserializeCandidateWithUfragPwd) {
JsepIceCandidate jcandidate(kDummyMid, kDummyIndex);
- EXPECT_TRUE(SdpDeserializeCandidate(kSdpOneCandidateOldFormat,&jcandidate));
+ EXPECT_TRUE(
+ SdpDeserializeCandidate(kSdpOneCandidateWithUfragPwd, &jcandidate));
EXPECT_EQ(kDummyMid, jcandidate.sdp_mid());
EXPECT_EQ(kDummyIndex, jcandidate.sdp_mline_index());
Candidate ref_candidate = jcandidate_->candidate();
diff --git a/talk/app/webrtc/webrtcsession.cc b/talk/app/webrtc/webrtcsession.cc
index 95abeab77a..d8f76379c1 100644
--- a/talk/app/webrtc/webrtcsession.cc
+++ b/talk/app/webrtc/webrtcsession.cc
@@ -30,13 +30,13 @@
#include <limits.h>
#include <algorithm>
-#include <vector>
#include <set>
+#include <utility>
+#include <vector>
#include "talk/app/webrtc/jsepicecandidate.h"
#include "talk/app/webrtc/jsepsessiondescription.h"
#include "talk/app/webrtc/mediaconstraintsinterface.h"
-#include "talk/app/webrtc/mediastreamsignaling.h"
#include "talk/app/webrtc/peerconnectioninterface.h"
#include "talk/app/webrtc/sctputils.h"
#include "talk/app/webrtc/webrtcsessiondescriptionfactory.h"
@@ -45,6 +45,7 @@
#include "talk/session/media/channel.h"
#include "talk/session/media/channelmanager.h"
#include "talk/session/media/mediasession.h"
+#include "webrtc/audio/audio_sink.h"
#include "webrtc/base/basictypes.h"
#include "webrtc/base/checks.h"
#include "webrtc/base/helpers.h"
@@ -441,10 +442,11 @@ static std::string MakeTdErrorString(const std::string& desc) {
// Set |option| to the highest-priority value of |key| in the optional
// constraints if the key is found and has a valid value.
-template<typename T>
+template <typename T>
static void SetOptionFromOptionalConstraint(
const MediaConstraintsInterface* constraints,
- const std::string& key, cricket::Settable<T>* option) {
+ const std::string& key,
+ rtc::Optional<T>* option) {
if (!constraints) {
return;
}
@@ -452,7 +454,7 @@ static void SetOptionFromOptionalConstraint(
T value;
if (constraints->GetOptional().FindFirst(key, &string_value)) {
if (rtc::FromString(string_value, &value)) {
- option->Set(value);
+ *option = rtc::Optional<T>(value);
}
}
}
@@ -492,9 +494,13 @@ class IceRestartAnswerLatch {
}
}
+ // This method has two purposes: 1. Return whether |new_desc| requests
+ // an ICE restart (i.e., new ufrag/pwd). 2. If it requests an ICE restart
+ // and it is an OFFER, remember this in |ice_restart_| so that the next
+ // Local Answer will be created with new ufrag and pwd.
bool CheckForRemoteIceRestart(const SessionDescriptionInterface* old_desc,
const SessionDescriptionInterface* new_desc) {
- if (!old_desc || new_desc->type() != SessionDescriptionInterface::kOffer) {
+ if (!old_desc) {
return false;
}
const SessionDescription* new_sd = new_desc->description();
@@ -520,7 +526,9 @@ class IceRestartAnswerLatch {
new_transport_desc->ice_ufrag,
new_transport_desc->ice_pwd)) {
LOG(LS_INFO) << "Remote peer request ice restart.";
- ice_restart_ = true;
+ if (new_desc->type() == SessionDescriptionInterface::kOffer) {
+ ice_restart_ = true;
+ }
return true;
}
}
@@ -593,6 +601,8 @@ bool WebRtcSession::Initialize(
const PeerConnectionInterface::RTCConfiguration& rtc_configuration) {
bundle_policy_ = rtc_configuration.bundle_policy;
rtcp_mux_policy_ = rtc_configuration.rtcp_mux_policy;
+ video_options_.disable_prerenderer_smoothing =
+ rtc::Optional<bool>(rtc_configuration.disable_prerenderer_smoothing);
transport_controller_->SetSslMaxProtocolVersion(options.ssl_max_version);
// Obtain a certificate from RTCConfiguration if any were provided (optional).
@@ -644,8 +654,8 @@ bool WebRtcSession::Initialize(
constraints,
MediaConstraintsInterface::kEnableDscp,
&value, NULL)) {
- audio_options_.dscp.Set(value);
- video_options_.dscp.Set(value);
+ audio_options_.dscp = rtc::Optional<bool>(value);
+ video_options_.dscp = rtc::Optional<bool>(value);
}
// Find Suspend Below Min Bitrate constraint.
@@ -654,7 +664,7 @@ bool WebRtcSession::Initialize(
MediaConstraintsInterface::kEnableVideoSuspendBelowMinBitrate,
&value,
NULL)) {
- video_options_.suspend_below_min_bitrate.Set(value);
+ video_options_.suspend_below_min_bitrate = rtc::Optional<bool>(value);
}
SetOptionFromOptionalConstraint(constraints,
@@ -684,12 +694,10 @@ bool WebRtcSession::Initialize(
SetOptionFromOptionalConstraint(constraints,
MediaConstraintsInterface::kNumUnsignalledRecvStreams,
&video_options_.unsignalled_recv_stream_limit);
- if (video_options_.unsignalled_recv_stream_limit.IsSet()) {
- int stream_limit;
- video_options_.unsignalled_recv_stream_limit.Get(&stream_limit);
- stream_limit = std::min(kMaxUnsignalledRecvStreams, stream_limit);
- stream_limit = std::max(0, stream_limit);
- video_options_.unsignalled_recv_stream_limit.Set(stream_limit);
+ if (video_options_.unsignalled_recv_stream_limit) {
+ video_options_.unsignalled_recv_stream_limit = rtc::Optional<int>(
+ std::max(0, std::min(kMaxUnsignalledRecvStreams,
+ *video_options_.unsignalled_recv_stream_limit)));
}
SetOptionFromOptionalConstraint(constraints,
@@ -700,22 +708,12 @@ bool WebRtcSession::Initialize(
MediaConstraintsInterface::kCombinedAudioVideoBwe,
&audio_options_.combined_audio_video_bwe);
- audio_options_.audio_jitter_buffer_max_packets.Set(
- rtc_configuration.audio_jitter_buffer_max_packets);
+ audio_options_.audio_jitter_buffer_max_packets =
+ rtc::Optional<int>(rtc_configuration.audio_jitter_buffer_max_packets);
- audio_options_.audio_jitter_buffer_fast_accelerate.Set(
+ audio_options_.audio_jitter_buffer_fast_accelerate = rtc::Optional<bool>(
rtc_configuration.audio_jitter_buffer_fast_accelerate);
- const cricket::VideoCodec default_codec(
- JsepSessionDescription::kDefaultVideoCodecId,
- JsepSessionDescription::kDefaultVideoCodecName,
- JsepSessionDescription::kMaxVideoCodecWidth,
- JsepSessionDescription::kMaxVideoCodecHeight,
- JsepSessionDescription::kDefaultVideoCodecFramerate,
- JsepSessionDescription::kDefaultVideoCodecPreference);
- channel_manager_->SetDefaultVideoEncoderConfig(
- cricket::VideoEncoderConfig(default_codec));
-
if (!dtls_enabled_) {
// Construct with DTLS disabled.
webrtc_session_desc_factory_.reset(new WebRtcSessionDescriptionFactory(
@@ -726,7 +724,7 @@ bool WebRtcSession::Initialize(
// Use the |dtls_identity_store| to generate a certificate.
RTC_DCHECK(dtls_identity_store);
webrtc_session_desc_factory_.reset(new WebRtcSessionDescriptionFactory(
- signaling_thread(), channel_manager_, dtls_identity_store.Pass(),
+ signaling_thread(), channel_manager_, std::move(dtls_identity_store),
this, id()));
} else {
// Use the already generated certificate.
@@ -744,12 +742,6 @@ bool WebRtcSession::Initialize(
port_allocator()->set_candidate_filter(
ConvertIceTransportTypeToCandidateFilter(rtc_configuration.type));
- if (rtc_configuration.enable_localhost_ice_candidate) {
- port_allocator()->set_flags(
- port_allocator()->flags() |
- cricket::PORTALLOCATOR_ENABLE_LOCALHOST_CANDIDATE);
- }
-
return true;
}
@@ -769,14 +761,20 @@ cricket::SecurePolicy WebRtcSession::SdesPolicy() const {
return webrtc_session_desc_factory_->SdesPolicy();
}
-bool WebRtcSession::GetSslRole(rtc::SSLRole* role) {
+bool WebRtcSession::GetSslRole(const std::string& transport_name,
+ rtc::SSLRole* role) {
if (!local_desc_ || !remote_desc_) {
LOG(LS_INFO) << "Local and Remote descriptions must be applied to get "
<< "SSL Role of the session.";
return false;
}
- return transport_controller_->GetSslRole(role);
+ return transport_controller_->GetSslRole(transport_name, role);
+}
+
+bool WebRtcSession::GetSslRole(const cricket::BaseChannel* channel,
+ rtc::SSLRole* role) {
+ return channel && GetSslRole(channel->transport_name(), role);
}
void WebRtcSession::CreateOffer(
@@ -978,15 +976,12 @@ bool WebRtcSession::UpdateSessionState(
return BadPranswerSdp(source, GetSessionErrorMsg(), err_desc);
}
} else if (action == kAnswer) {
- if (!PushdownTransportDescription(source, cricket::CA_ANSWER, &td_err)) {
- return BadAnswerSdp(source, MakeTdErrorString(td_err), err_desc);
- }
const cricket::ContentGroup* local_bundle =
local_desc_->description()->GetGroupByName(cricket::GROUP_TYPE_BUNDLE);
const cricket::ContentGroup* remote_bundle =
remote_desc_->description()->GetGroupByName(cricket::GROUP_TYPE_BUNDLE);
if (local_bundle && remote_bundle) {
- // The answerer decides the transport to bundle on
+ // The answerer decides the transport to bundle on.
const cricket::ContentGroup* answer_bundle =
(source == cricket::CS_LOCAL ? local_bundle : remote_bundle);
if (!EnableBundle(*answer_bundle)) {
@@ -994,6 +989,11 @@ bool WebRtcSession::UpdateSessionState(
return BadAnswerSdp(source, kEnableBundleFailed, err_desc);
}
}
+ // Only push down the transport description after enabling BUNDLE; we don't
+ // want to push down a description on a transport about to be destroyed.
+ if (!PushdownTransportDescription(source, cricket::CA_ANSWER, &td_err)) {
+ return BadAnswerSdp(source, MakeTdErrorString(td_err), err_desc);
+ }
EnableChannels();
SetState(STATE_INPROGRESS);
if (!PushdownMediaDescription(cricket::CA_ANSWER, source, err_desc)) {
@@ -1250,6 +1250,8 @@ cricket::IceConfig WebRtcSession::ParseIceConfig(
const PeerConnectionInterface::RTCConfiguration& config) const {
cricket::IceConfig ice_config;
ice_config.receiving_timeout_ms = config.ice_connection_receiving_timeout;
+ ice_config.backup_connection_ping_interval =
+ config.ice_backup_candidate_pair_ping_interval;
ice_config.gather_continually = (config.continual_gathering_policy ==
PeerConnectionInterface::GATHER_CONTINUALLY);
return ice_config;
@@ -1326,6 +1328,15 @@ void WebRtcSession::SetAudioPlayoutVolume(uint32_t ssrc, double volume) {
}
}
+void WebRtcSession::SetRawAudioSink(uint32_t ssrc,
+ rtc::scoped_ptr<AudioSinkInterface> sink) {
+ ASSERT(signaling_thread()->IsCurrent());
+ if (!voice_channel_)
+ return;
+
+ voice_channel_->SetRawAudioSink(ssrc, std::move(sink));
+}
+
bool WebRtcSession::SetCaptureDevice(uint32_t ssrc,
cricket::VideoCapturer* camera) {
ASSERT(signaling_thread()->IsCurrent());
@@ -1409,8 +1420,7 @@ bool WebRtcSession::InsertDtmf(const std::string& track_id,
LOG(LS_ERROR) << "InsertDtmf: Track does not exist: " << track_id;
return false;
}
- if (!voice_channel_->InsertDtmf(send_ssrc, code, duration,
- cricket::DF_SEND)) {
+ if (!voice_channel_->InsertDtmf(send_ssrc, code, duration)) {
LOG(LS_ERROR) << "Failed to insert DTMF to channel.";
return false;
}
@@ -1747,7 +1757,6 @@ void WebRtcSession::RemoveUnusedChannels(const SessionDescription* desc) {
cricket::GetFirstVideoContent(desc);
if ((!video_info || video_info->rejected) && video_channel_) {
SignalVideoChannelDestroyed();
- const std::string content_name = video_channel_->content_name();
channel_manager_->DestroyVideoChannel(video_channel_.release());
}
@@ -1755,7 +1764,6 @@ void WebRtcSession::RemoveUnusedChannels(const SessionDescription* desc) {
cricket::GetFirstAudioContent(desc);
if ((!voice_info || voice_info->rejected) && voice_channel_) {
SignalVoiceChannelDestroyed();
- const std::string content_name = voice_channel_->content_name();
channel_manager_->DestroyVoiceChannel(voice_channel_.release());
}
@@ -1763,7 +1771,6 @@ void WebRtcSession::RemoveUnusedChannels(const SessionDescription* desc) {
cricket::GetFirstDataContent(desc);
if ((!data_info || data_info->rejected) && data_channel_) {
SignalDataChannelDestroyed();
- const std::string content_name = data_channel_->content_name();
channel_manager_->DestroyDataChannel(data_channel_.release());
}
}
@@ -2164,9 +2171,10 @@ void WebRtcSession::ReportNegotiatedCiphers(
return;
}
- const std::string& srtp_cipher = stats.channel_stats[0].srtp_cipher;
- int ssl_cipher = stats.channel_stats[0].ssl_cipher;
- if (srtp_cipher.empty() && !ssl_cipher) {
+ int srtp_crypto_suite = stats.channel_stats[0].srtp_crypto_suite;
+ int ssl_cipher_suite = stats.channel_stats[0].ssl_cipher_suite;
+ if (srtp_crypto_suite == rtc::SRTP_INVALID_CRYPTO_SUITE &&
+ ssl_cipher_suite == rtc::TLS_NULL_WITH_NULL_NULL) {
return;
}
@@ -2186,12 +2194,13 @@ void WebRtcSession::ReportNegotiatedCiphers(
return;
}
- if (!srtp_cipher.empty()) {
- metrics_observer_->IncrementSparseEnumCounter(
- srtp_counter_type, rtc::GetSrtpCryptoSuiteFromName(srtp_cipher));
+ if (srtp_crypto_suite != rtc::SRTP_INVALID_CRYPTO_SUITE) {
+ metrics_observer_->IncrementSparseEnumCounter(srtp_counter_type,
+ srtp_crypto_suite);
}
- if (ssl_cipher) {
- metrics_observer_->IncrementSparseEnumCounter(ssl_counter_type, ssl_cipher);
+ if (ssl_cipher_suite != rtc::TLS_NULL_WITH_NULL_NULL) {
+ metrics_observer_->IncrementSparseEnumCounter(ssl_counter_type,
+ ssl_cipher_suite);
}
}
diff --git a/talk/app/webrtc/webrtcsession.h b/talk/app/webrtc/webrtcsession.h
index d9c40d1a83..b79e0ec270 100644
--- a/talk/app/webrtc/webrtcsession.h
+++ b/talk/app/webrtc/webrtcsession.h
@@ -38,11 +38,11 @@
#include "talk/app/webrtc/peerconnectioninterface.h"
#include "talk/app/webrtc/statstypes.h"
#include "talk/media/base/mediachannel.h"
-#include "webrtc/p2p/base/transportcontroller.h"
#include "talk/session/media/mediasession.h"
#include "webrtc/base/sigslot.h"
#include "webrtc/base/sslidentity.h"
#include "webrtc/base/thread.h"
+#include "webrtc/p2p/base/transportcontroller.h"
namespace cricket {
@@ -204,7 +204,11 @@ class WebRtcSession : public AudioProviderInterface,
cricket::SecurePolicy SdesPolicy() const;
// Get current ssl role from transport.
- bool GetSslRole(rtc::SSLRole* role);
+ bool GetSslRole(const std::string& transport_name, rtc::SSLRole* role);
+
+ // Get current SSL role for this channel's transport.
+ // If |transport| is null, returns false.
+ bool GetSslRole(const cricket::BaseChannel* channel, rtc::SSLRole* role);
void CreateOffer(
CreateSessionDescriptionObserver* observer,
@@ -250,6 +254,8 @@ class WebRtcSession : public AudioProviderInterface,
const cricket::AudioOptions& options,
cricket::AudioRenderer* renderer) override;
void SetAudioPlayoutVolume(uint32_t ssrc, double volume) override;
+ void SetRawAudioSink(uint32_t ssrc,
+ rtc::scoped_ptr<AudioSinkInterface> sink) override;
// Implements VideoMediaProviderInterface.
bool SetCaptureDevice(uint32_t ssrc, cricket::VideoCapturer* camera) override;
diff --git a/talk/app/webrtc/webrtcsession_unittest.cc b/talk/app/webrtc/webrtcsession_unittest.cc
index 3eb46f1d3c..e81b8b5b54 100644
--- a/talk/app/webrtc/webrtcsession_unittest.cc
+++ b/talk/app/webrtc/webrtcsession_unittest.cc
@@ -25,6 +25,7 @@
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
+#include <utility>
#include <vector>
#include "talk/app/webrtc/audiotrack.h"
@@ -33,7 +34,6 @@
#include "talk/app/webrtc/jsepicecandidate.h"
#include "talk/app/webrtc/jsepsessiondescription.h"
#include "talk/app/webrtc/peerconnection.h"
-#include "talk/app/webrtc/mediastreamsignaling.h"
#include "talk/app/webrtc/sctputils.h"
#include "talk/app/webrtc/streamcollection.h"
#include "talk/app/webrtc/streamcollection.h"
@@ -72,8 +72,6 @@
return; \
}
-using cricket::DF_PLAY;
-using cricket::DF_SEND;
using cricket::FakeVoiceMediaChannel;
using cricket::TransportInfo;
using rtc::SocketAddress;
@@ -173,15 +171,6 @@ static const char kAudioTrack2[] = "audio2";
enum RTCCertificateGenerationMethod { ALREADY_GENERATED, DTLS_IDENTITY_STORE };
-// Add some extra |newlines| to the |message| after |line|.
-static void InjectAfter(const std::string& line,
- const std::string& newlines,
- std::string* message) {
- const std::string tmp = line + newlines;
- rtc::replace_substrs(line.c_str(), line.length(), tmp.c_str(), tmp.length(),
- message);
-}
-
class MockIceObserver : public webrtc::IceObserver {
public:
MockIceObserver()
@@ -428,7 +417,7 @@ class WebRtcSessionTest
observer_.ice_gathering_state_);
EXPECT_TRUE(session_->Initialize(options_, constraints_.get(),
- dtls_identity_store.Pass(),
+ std::move(dtls_identity_store),
rtc_configuration));
session_->set_metrics_observer(metrics_observer_);
}
@@ -479,7 +468,7 @@ class WebRtcSessionTest
} else {
RTC_CHECK(false);
}
- Init(dtls_identity_store.Pass(), configuration);
+ Init(std::move(dtls_identity_store), configuration);
}
// Init with DTLS with a store that will fail to generate a certificate.
@@ -488,7 +477,7 @@ class WebRtcSessionTest
new FakeDtlsIdentityStore());
dtls_identity_store->set_should_fail(true);
PeerConnectionInterface::RTCConfiguration configuration;
- Init(dtls_identity_store.Pass(), configuration);
+ Init(std::move(dtls_identity_store), configuration);
}
void InitWithDtmfCodec() {
@@ -726,9 +715,9 @@ class WebRtcSessionTest
std::string identity_name = "WebRTC" +
rtc::ToString(rtc::CreateRandomId());
// Confirmed to work with KT_RSA and KT_ECDSA.
- tdesc_factory_->set_certificate(rtc::RTCCertificate::Create(
- rtc::scoped_ptr<rtc::SSLIdentity>(rtc::SSLIdentity::Generate(
- identity_name, rtc::KT_DEFAULT)).Pass()));
+ tdesc_factory_->set_certificate(
+ rtc::RTCCertificate::Create(rtc::scoped_ptr<rtc::SSLIdentity>(
+ rtc::SSLIdentity::Generate(identity_name, rtc::KT_DEFAULT))));
tdesc_factory_->set_secure(cricket::SEC_REQUIRED);
}
@@ -789,7 +778,7 @@ class WebRtcSessionTest
ASSERT_TRUE(video_channel_ != NULL);
const cricket::VideoOptions& video_options = video_channel_->options();
EXPECT_EQ(value_expected,
- video_options.unsignalled_recv_stream_limit.GetWithDefaultIfUnset(-1));
+ video_options.unsignalled_recv_stream_limit.value_or(-1));
}
void CompareIceUfragAndPassword(const cricket::SessionDescription* desc1,
@@ -1442,12 +1431,12 @@ class WebRtcSessionTest
}
void ConfigureAllocatorWithTurn() {
- cricket::RelayServerConfig relay_server(cricket::RELAY_TURN);
+ cricket::RelayServerConfig turn_server(cricket::RELAY_TURN);
cricket::RelayCredentials credentials(kTurnUsername, kTurnPassword);
- relay_server.credentials = credentials;
- relay_server.ports.push_back(cricket::ProtocolAddress(
- kTurnUdpIntAddr, cricket::PROTO_UDP, false));
- allocator_->AddRelay(relay_server);
+ turn_server.credentials = credentials;
+ turn_server.ports.push_back(
+ cricket::ProtocolAddress(kTurnUdpIntAddr, cricket::PROTO_UDP, false));
+ allocator_->AddTurnServer(turn_server);
allocator_->set_step_delay(cricket::kMinimumStepDelay);
allocator_->set_flags(cricket::PORTALLOCATOR_DISABLE_TCP);
}
@@ -1968,6 +1957,67 @@ TEST_P(WebRtcSessionTest, TestCreateAnswerReceiveOfferWithoutEncryption) {
SetLocalDescriptionWithoutError(answer);
}
+// Test that we can create and set an answer correctly when different
+// SSL roles have been negotiated for different transports.
+// See: https://bugs.chromium.org/p/webrtc/issues/detail?id=4525
+TEST_P(WebRtcSessionTest, TestCreateAnswerWithDifferentSslRoles) {
+ SendAudioVideoStream1();
+ InitWithDtls(GetParam());
+ SetFactoryDtlsSrtp();
+
+ SessionDescriptionInterface* offer = CreateOffer();
+ SetLocalDescriptionWithoutError(offer);
+
+ cricket::MediaSessionOptions options;
+ options.recv_video = true;
+
+ // First, negotiate different SSL roles.
+ SessionDescriptionInterface* answer =
+ CreateRemoteAnswer(offer, options, cricket::SEC_DISABLED);
+ TransportInfo* audio_transport_info =
+ answer->description()->GetTransportInfoByName("audio");
+ audio_transport_info->description.connection_role =
+ cricket::CONNECTIONROLE_ACTIVE;
+ TransportInfo* video_transport_info =
+ answer->description()->GetTransportInfoByName("video");
+ video_transport_info->description.connection_role =
+ cricket::CONNECTIONROLE_PASSIVE;
+ SetRemoteDescriptionWithoutError(answer);
+
+ // Now create an offer in the reverse direction, and ensure the initial
+ // offerer responds with an answer with correct SSL roles.
+ offer = CreateRemoteOfferWithVersion(options, cricket::SEC_DISABLED,
+ kSessionVersion,
+ session_->remote_description());
+ SetRemoteDescriptionWithoutError(offer);
+
+ answer = CreateAnswer(nullptr);
+ audio_transport_info = answer->description()->GetTransportInfoByName("audio");
+ EXPECT_EQ(cricket::CONNECTIONROLE_PASSIVE,
+ audio_transport_info->description.connection_role);
+ video_transport_info = answer->description()->GetTransportInfoByName("video");
+ EXPECT_EQ(cricket::CONNECTIONROLE_ACTIVE,
+ video_transport_info->description.connection_role);
+ SetLocalDescriptionWithoutError(answer);
+
+ // Lastly, start BUNDLE-ing on "audio", expecting that the "passive" role of
+ // audio is transferred over to video in the answer that completes the BUNDLE
+ // negotiation.
+ options.bundle_enabled = true;
+ offer = CreateRemoteOfferWithVersion(options, cricket::SEC_DISABLED,
+ kSessionVersion,
+ session_->remote_description());
+ SetRemoteDescriptionWithoutError(offer);
+ answer = CreateAnswer(nullptr);
+ audio_transport_info = answer->description()->GetTransportInfoByName("audio");
+ EXPECT_EQ(cricket::CONNECTIONROLE_PASSIVE,
+ audio_transport_info->description.connection_role);
+ video_transport_info = answer->description()->GetTransportInfoByName("video");
+ EXPECT_EQ(cricket::CONNECTIONROLE_PASSIVE,
+ video_transport_info->description.connection_role);
+ SetLocalDescriptionWithoutError(answer);
+}
+
TEST_F(WebRtcSessionTest, TestSetLocalOfferTwice) {
Init();
SendNothing();
@@ -2809,10 +2859,9 @@ TEST_F(WebRtcSessionTest, TestSetRemoteDescriptionInvalidIceCredentials) {
EXPECT_FALSE(session_->SetRemoteDescription(modified_offer, &error));
}
-// Test that if the remote description indicates the peer requested ICE restart
-// (via a new ufrag or pwd), the old ICE candidates are not copied,
-// and vice versa.
-TEST_F(WebRtcSessionTest, TestSetRemoteDescriptionWithIceRestart) {
+// Test that if the remote offer indicates the peer requested ICE restart (via
+// a new ufrag or pwd), the old ICE candidates are not copied, and vice versa.
+TEST_F(WebRtcSessionTest, TestSetRemoteOfferWithIceRestart) {
Init();
scoped_ptr<SessionDescriptionInterface> offer(CreateRemoteOffer());
@@ -2866,6 +2915,64 @@ TEST_F(WebRtcSessionTest, TestSetRemoteDescriptionWithIceRestart) {
EXPECT_EQ(0, session_->remote_description()->candidates(0)->count());
}
+// Test that if the remote answer indicates the peer requested ICE restart (via
+// a new ufrag or pwd), the old ICE candidates are not copied, and vice versa.
+TEST_F(WebRtcSessionTest, TestSetRemoteAnswerWithIceRestart) {
+ Init();
+ SessionDescriptionInterface* offer = CreateOffer();
+ SetLocalDescriptionWithoutError(offer);
+ scoped_ptr<SessionDescriptionInterface> answer(CreateRemoteAnswer(offer));
+
+ // Create the first answer.
+ std::string sdp;
+ ModifyIceUfragPwdLines(answer.get(), "0123456789012345",
+ "abcdefghijklmnopqrstuvwx", &sdp);
+ SessionDescriptionInterface* answer1 =
+ CreateSessionDescription(JsepSessionDescription::kPrAnswer, sdp, NULL);
+ cricket::Candidate candidate1(1, "udp", rtc::SocketAddress("1.1.1.1", 5000),
+ 0, "", "", "relay", 0, "");
+ JsepIceCandidate ice_candidate1(kMediaContentName0, kMediaContentIndex0,
+ candidate1);
+ EXPECT_TRUE(answer1->AddCandidate(&ice_candidate1));
+ SetRemoteDescriptionWithoutError(answer1);
+ EXPECT_EQ(1, session_->remote_description()->candidates(0)->count());
+
+ // The second answer has the same ufrag and pwd but different address.
+ sdp.clear();
+ ModifyIceUfragPwdLines(answer.get(), "0123456789012345",
+ "abcdefghijklmnopqrstuvwx", &sdp);
+ SessionDescriptionInterface* answer2 =
+ CreateSessionDescription(JsepSessionDescription::kPrAnswer, sdp, NULL);
+ candidate1.set_address(rtc::SocketAddress("1.1.1.1", 6000));
+ JsepIceCandidate ice_candidate2(kMediaContentName0, kMediaContentIndex0,
+ candidate1);
+ EXPECT_TRUE(answer2->AddCandidate(&ice_candidate2));
+ SetRemoteDescriptionWithoutError(answer2);
+ EXPECT_EQ(2, session_->remote_description()->candidates(0)->count());
+
+ // The third answer has a different ufrag and different address.
+ sdp.clear();
+ ModifyIceUfragPwdLines(answer.get(), "0123456789012333",
+ "abcdefghijklmnopqrstuvwx", &sdp);
+ SessionDescriptionInterface* answer3 =
+ CreateSessionDescription(JsepSessionDescription::kPrAnswer, sdp, NULL);
+ candidate1.set_address(rtc::SocketAddress("1.1.1.1", 7000));
+ JsepIceCandidate ice_candidate3(kMediaContentName0, kMediaContentIndex0,
+ candidate1);
+ EXPECT_TRUE(answer3->AddCandidate(&ice_candidate3));
+ SetRemoteDescriptionWithoutError(answer3);
+ EXPECT_EQ(1, session_->remote_description()->candidates(0)->count());
+
+ // The fourth answer has no candidate but a different ufrag/pwd.
+ sdp.clear();
+ ModifyIceUfragPwdLines(answer.get(), "0123456789012444",
+ "abcdefghijklmnopqrstuvyz", &sdp);
+ SessionDescriptionInterface* offer4 =
+ CreateSessionDescription(JsepSessionDescription::kPrAnswer, sdp, NULL);
+ SetRemoteDescriptionWithoutError(offer4);
+ EXPECT_EQ(0, session_->remote_description()->candidates(0)->count());
+}
+
// Test that candidates sent to the "video" transport do not get pushed down to
// the "audio" transport channel when bundling.
TEST_F(WebRtcSessionTest, TestIgnoreCandidatesForUnusedTransportWhenBundling) {
@@ -3297,20 +3404,18 @@ TEST_F(WebRtcSessionTest, SetAudioSend) {
EXPECT_FALSE(channel->IsStreamMuted(send_ssrc));
cricket::AudioOptions options;
- options.echo_cancellation.Set(true);
+ options.echo_cancellation = rtc::Optional<bool>(true);
rtc::scoped_ptr<FakeAudioRenderer> renderer(new FakeAudioRenderer());
session_->SetAudioSend(send_ssrc, false, options, renderer.get());
EXPECT_TRUE(channel->IsStreamMuted(send_ssrc));
- EXPECT_FALSE(channel->options().echo_cancellation.IsSet());
+ EXPECT_EQ(rtc::Optional<bool>(), channel->options().echo_cancellation);
EXPECT_TRUE(renderer->sink() != NULL);
// This will trigger SetSink(NULL) to the |renderer|.
session_->SetAudioSend(send_ssrc, true, options, NULL);
EXPECT_FALSE(channel->IsStreamMuted(send_ssrc));
- bool value;
- EXPECT_TRUE(channel->options().echo_cancellation.Get(&value));
- EXPECT_TRUE(value);
+ EXPECT_EQ(rtc::Optional<bool>(true), channel->options().echo_cancellation);
EXPECT_TRUE(renderer->sink() == NULL);
}
@@ -3387,7 +3492,6 @@ TEST_F(WebRtcSessionTest, InsertDtmf) {
EXPECT_EQ(0U, channel->dtmf_info_queue().size());
// Insert DTMF
- const int expected_flags = DF_SEND;
const int expected_duration = 90;
session_->InsertDtmf(kAudioTrack1, 0, expected_duration);
session_->InsertDtmf(kAudioTrack1, 1, expected_duration);
@@ -3397,11 +3501,11 @@ TEST_F(WebRtcSessionTest, InsertDtmf) {
ASSERT_EQ(3U, channel->dtmf_info_queue().size());
const uint32_t send_ssrc = channel->send_streams()[0].first_ssrc();
EXPECT_TRUE(CompareDtmfInfo(channel->dtmf_info_queue()[0], send_ssrc, 0,
- expected_duration, expected_flags));
+ expected_duration));
EXPECT_TRUE(CompareDtmfInfo(channel->dtmf_info_queue()[1], send_ssrc, 1,
- expected_duration, expected_flags));
+ expected_duration));
EXPECT_TRUE(CompareDtmfInfo(channel->dtmf_info_queue()[2], send_ssrc, 2,
- expected_duration, expected_flags));
+ expected_duration));
}
// This test verifies the |initial_offerer| flag when session initiates the
@@ -3582,7 +3686,9 @@ TEST_F(WebRtcSessionTest, TestCreateAnswerWithNewUfragAndPassword) {
SetLocalDescriptionWithoutError(answer.release());
// Receive an offer with new ufrag and password.
- options.transport_options.ice_restart = true;
+ options.audio_transport_options.ice_restart = true;
+ options.video_transport_options.ice_restart = true;
+ options.data_transport_options.ice_restart = true;
rtc::scoped_ptr<JsepSessionDescription> updated_offer1(
CreateRemoteOffer(options, session_->remote_description()));
SetRemoteDescriptionWithoutError(updated_offer1.release());
@@ -3613,7 +3719,9 @@ TEST_F(WebRtcSessionTest, TestCreateAnswerWithOldUfragAndPassword) {
SetLocalDescriptionWithoutError(answer.release());
// Receive an offer without changed ufrag or password.
- options.transport_options.ice_restart = false;
+ options.audio_transport_options.ice_restart = false;
+ options.video_transport_options.ice_restart = false;
+ options.data_transport_options.ice_restart = false;
rtc::scoped_ptr<JsepSessionDescription> updated_offer2(
CreateRemoteOffer(options, session_->remote_description()));
SetRemoteDescriptionWithoutError(updated_offer2.release());
@@ -3993,10 +4101,8 @@ TEST_F(WebRtcSessionTest, TestDscpConstraint) {
ASSERT_TRUE(voice_channel_ != NULL);
const cricket::AudioOptions& audio_options = voice_channel_->options();
const cricket::VideoOptions& video_options = video_channel_->options();
- EXPECT_TRUE(audio_options.dscp.IsSet());
- EXPECT_TRUE(audio_options.dscp.GetWithDefaultIfUnset(false));
- EXPECT_TRUE(video_options.dscp.IsSet());
- EXPECT_TRUE(video_options.dscp.GetWithDefaultIfUnset(false));
+ EXPECT_EQ(rtc::Optional<bool>(true), audio_options.dscp);
+ EXPECT_EQ(rtc::Optional<bool>(true), video_options.dscp);
}
TEST_F(WebRtcSessionTest, TestSuspendBelowMinBitrateConstraint) {
@@ -4014,8 +4120,7 @@ TEST_F(WebRtcSessionTest, TestSuspendBelowMinBitrateConstraint) {
ASSERT_TRUE(video_channel_ != NULL);
const cricket::VideoOptions& video_options = video_channel_->options();
- EXPECT_TRUE(
- video_options.suspend_below_min_bitrate.GetWithDefaultIfUnset(false));
+ EXPECT_EQ(rtc::Optional<bool>(true), video_options.suspend_below_min_bitrate);
}
TEST_F(WebRtcSessionTest, TestNumUnsignalledRecvStreamsConstraint) {
@@ -4042,8 +4147,7 @@ TEST_F(WebRtcSessionTest, TestCombinedAudioVideoBweConstraint) {
ASSERT_TRUE(voice_channel_ != NULL);
const cricket::AudioOptions& audio_options = voice_channel_->options();
- EXPECT_TRUE(
- audio_options.combined_audio_video_bwe.GetWithDefaultIfUnset(false));
+ EXPECT_EQ(rtc::Optional<bool>(true), audio_options.combined_audio_video_bwe);
}
// Tests that we can renegotiate new media content with ICE candidates in the
diff --git a/talk/app/webrtc/webrtcsessiondescriptionfactory.cc b/talk/app/webrtc/webrtcsessiondescriptionfactory.cc
index 25965af79d..f08b77eb40 100644
--- a/talk/app/webrtc/webrtcsessiondescriptionfactory.cc
+++ b/talk/app/webrtc/webrtcsessiondescriptionfactory.cc
@@ -27,6 +27,8 @@
#include "talk/app/webrtc/webrtcsessiondescriptionfactory.h"
+#include <utility>
+
#include "talk/app/webrtc/dtlsidentitystore.h"
#include "talk/app/webrtc/jsep.h"
#include "talk/app/webrtc/jsepsessiondescription.h"
@@ -99,12 +101,12 @@ void WebRtcIdentityRequestObserver::OnSuccess(
der_private_key.length());
rtc::scoped_ptr<rtc::SSLIdentity> identity(
rtc::SSLIdentity::FromPEMStrings(pem_key, pem_cert));
- SignalCertificateReady(rtc::RTCCertificate::Create(identity.Pass()));
+ SignalCertificateReady(rtc::RTCCertificate::Create(std::move(identity)));
}
void WebRtcIdentityRequestObserver::OnSuccess(
rtc::scoped_ptr<rtc::SSLIdentity> identity) {
- SignalCertificateReady(rtc::RTCCertificate::Create(identity.Pass()));
+ SignalCertificateReady(rtc::RTCCertificate::Create(std::move(identity)));
}
// static
@@ -143,7 +145,7 @@ WebRtcSessionDescriptionFactory::WebRtcSessionDescriptionFactory(
// to just use a random number as session id and start version from
// |kInitSessionVersion|.
session_version_(kInitSessionVersion),
- dtls_identity_store_(dtls_identity_store.Pass()),
+ dtls_identity_store_(std::move(dtls_identity_store)),
identity_request_observer_(identity_request_observer),
session_(session),
session_id_(session_id),
@@ -177,7 +179,7 @@ WebRtcSessionDescriptionFactory::WebRtcSessionDescriptionFactory(
: WebRtcSessionDescriptionFactory(
signaling_thread,
channel_manager,
- dtls_identity_store.Pass(),
+ std::move(dtls_identity_store),
new rtc::RefCountedObject<WebRtcIdentityRequestObserver>(),
session,
session_id,
@@ -390,7 +392,9 @@ void WebRtcSessionDescriptionFactory::InternalCreateOffer(
return;
}
if (session_->local_description() &&
- !request.options.transport_options.ice_restart) {
+ !request.options.audio_transport_options.ice_restart &&
+ !request.options.video_transport_options.ice_restart &&
+ !request.options.data_transport_options.ice_restart) {
// Include all local ice candidates in the SessionDescription unless
// the an ice restart has been requested.
CopyCandidatesFromSessionDescription(session_->local_description(), offer);
@@ -403,12 +407,25 @@ void WebRtcSessionDescriptionFactory::InternalCreateAnswer(
// According to http://tools.ietf.org/html/rfc5245#section-9.2.1.1
// an answer should also contain new ice ufrag and password if an offer has
// been received with new ufrag and password.
- request.options.transport_options.ice_restart = session_->IceRestartPending();
+ request.options.audio_transport_options.ice_restart =
+ session_->IceRestartPending();
+ request.options.video_transport_options.ice_restart =
+ session_->IceRestartPending();
+ request.options.data_transport_options.ice_restart =
+ session_->IceRestartPending();
// We should pass current ssl role to the transport description factory, if
// there is already an existing ongoing session.
rtc::SSLRole ssl_role;
- if (session_->GetSslRole(&ssl_role)) {
- request.options.transport_options.prefer_passive_role =
+ if (session_->GetSslRole(session_->voice_channel(), &ssl_role)) {
+ request.options.audio_transport_options.prefer_passive_role =
+ (rtc::SSL_SERVER == ssl_role);
+ }
+ if (session_->GetSslRole(session_->video_channel(), &ssl_role)) {
+ request.options.video_transport_options.prefer_passive_role =
+ (rtc::SSL_SERVER == ssl_role);
+ }
+ if (session_->GetSslRole(session_->data_channel(), &ssl_role)) {
+ request.options.data_transport_options.prefer_passive_role =
(rtc::SSL_SERVER == ssl_role);
}
@@ -437,7 +454,9 @@ void WebRtcSessionDescriptionFactory::InternalCreateAnswer(
return;
}
if (session_->local_description() &&
- !request.options.transport_options.ice_restart) {
+ !request.options.audio_transport_options.ice_restart &&
+ !request.options.video_transport_options.ice_restart &&
+ !request.options.data_transport_options.ice_restart) {
// Include all local ice candidates in the SessionDescription unless
// the remote peer has requested an ice restart.
CopyCandidatesFromSessionDescription(session_->local_description(), answer);
diff --git a/talk/build/common.gypi b/talk/build/common.gypi
index 36a96c5c55..061b06ba70 100644
--- a/talk/build/common.gypi
+++ b/talk/build/common.gypi
@@ -41,7 +41,6 @@
],
# Disable these to not build components which can be externally provided.
'build_expat%': 1,
- 'build_icu%': 1,
'build_json%': 1,
'build_libsrtp%': 1,
'build_libyuv%': 1,
@@ -61,17 +60,9 @@
'../../webrtc',
],
'defines': [
- 'EXPAT_RELATIVE_PATH',
- 'FEATURE_ENABLE_VOICEMAIL',
- 'GTEST_RELATIVE_PATH',
- 'JSONCPP_RELATIVE_PATH',
- 'LOGGING=1',
'SRTP_RELATIVE_PATH',
# Feature selection
- 'FEATURE_ENABLE_SSL',
- 'FEATURE_ENABLE_VOICEMAIL',
- 'FEATURE_ENABLE_PSTN',
'HAVE_SCTP',
'HAVE_SRTP',
'HAVE_WEBRTC_VIDEO',
@@ -80,7 +71,6 @@
'conditions': [
['OS=="linux"', {
'defines': [
- 'LINUX',
'WEBRTC_LINUX',
],
# Remove Chromium's disabling of the -Wformat warning.
@@ -112,7 +102,6 @@
}],
['OS=="mac"', {
'defines': [
- 'OSX',
'WEBRTC_MAC',
],
}],
@@ -129,7 +118,6 @@
}],
['OS=="ios"', {
'defines': [
- 'IOS',
'WEBRTC_MAC',
'WEBRTC_IOS',
],
diff --git a/talk/build/merge_ios_libs.gyp b/talk/build/merge_ios_libs.gyp
index 0c7114da14..f7e4875eba 100644
--- a/talk/build/merge_ios_libs.gyp
+++ b/talk/build/merge_ios_libs.gyp
@@ -27,7 +27,7 @@
{
'includes': ['common.gypi',],
'conditions': [
- ['OS=="ios" or (OS=="mac" and mac_sdk>="10.8")', {
+ ['OS=="ios" or OS=="mac"', {
'targets': [
{
'target_name': 'libjingle_peerconnection_objc_no_op',
diff --git a/talk/codereview.settings b/talk/codereview.settings
index 97bee14549..c441cc61bc 100644
--- a/talk/codereview.settings
+++ b/talk/codereview.settings
@@ -1,4 +1,5 @@
-Creating CLs from this location is not supported!
-Please create a full WebRTC checkout using 'fetch webrtc'
-or by cloning https://chromium.googlesource.com/external/webrtc
+Creating CLs from this location is not supported! Please make sure the current
+working directory is the parent directory of this directory.
+If you're working with a Chromium checkout, you'll have to create a full WebRTC
+checkout and upload a CL from that. See http://www.webrtc.org for instructions.
diff --git a/talk/libjingle.gyp b/talk/libjingle.gyp
index 81d723a0d9..6e0f8a3424 100755
--- a/talk/libjingle.gyp
+++ b/talk/libjingle.gyp
@@ -43,8 +43,8 @@
['OS=="linux" or OS=="android"', {
'targets': [
{
- 'target_name': 'libjingle_peerconnection_so',
- 'type': 'shared_library',
+ 'target_name': 'libjingle_peerconnection_jni',
+ 'type': 'static_library',
'dependencies': [
'<(webrtc_root)/system_wrappers/system_wrappers.gyp:field_trial_default',
'libjingle_peerconnection',
@@ -62,11 +62,55 @@
'<(libyuv_dir)/include',
],
'conditions': [
- ['build_icu==1', {
+ ['OS=="linux"', {
+ 'include_dirs': [
+ '<(java_home)/include',
+ '<(java_home)/include/linux',
+ ],
+ }],
+ ['build_json==1', {
'dependencies': [
- '<(DEPTH)/third_party/icu/icu.gyp:icuuc',
+ '<(DEPTH)/third_party/jsoncpp/jsoncpp.gyp:jsoncpp',
+ ],
+ 'export_dependent_settings': [
+ '<(DEPTH)/third_party/jsoncpp/jsoncpp.gyp:jsoncpp',
],
}],
+ ['OS=="android"', {
+ 'sources': [
+ 'app/webrtc/androidvideocapturer.cc',
+ 'app/webrtc/androidvideocapturer.h',
+ 'app/webrtc/java/jni/androidmediacodeccommon.h',
+ 'app/webrtc/java/jni/androidmediadecoder_jni.cc',
+ 'app/webrtc/java/jni/androidmediadecoder_jni.h',
+ 'app/webrtc/java/jni/androidmediaencoder_jni.cc',
+ 'app/webrtc/java/jni/androidmediaencoder_jni.h',
+ 'app/webrtc/java/jni/androidnetworkmonitor_jni.cc',
+ 'app/webrtc/java/jni/androidnetworkmonitor_jni.h',
+ 'app/webrtc/java/jni/androidvideocapturer_jni.cc',
+ 'app/webrtc/java/jni/androidvideocapturer_jni.h',
+ 'app/webrtc/java/jni/surfacetexturehelper_jni.cc',
+ 'app/webrtc/java/jni/surfacetexturehelper_jni.h',
+ ]
+ }],
+ ],
+ },
+ {
+ 'target_name': 'libjingle_peerconnection_so',
+ 'type': 'shared_library',
+ 'dependencies': [
+ 'libjingle_peerconnection',
+ 'libjingle_peerconnection_jni',
+ ],
+ 'sources': [
+ 'app/webrtc/java/jni/jni_onload.cc',
+ ],
+ 'variables': {
+ # This library uses native JNI exports; tell GYP so that the
+ # required symbols will be kept.
+ 'use_native_jni_exports': 1,
+ },
+ 'conditions': [
['OS=="linux"', {
'defines': [
'HAVE_GTK',
@@ -86,30 +130,6 @@
}],
],
}],
- ['OS=="android"', {
- 'sources': [
- 'app/webrtc/java/jni/androidvideocapturer_jni.cc',
- 'app/webrtc/java/jni/androidvideocapturer_jni.h',
- ],
- 'variables': {
- # This library uses native JNI exports; tell GYP so that the
- # required symbols will be kept.
- 'use_native_jni_exports': 1,
- },
- }],
- ['OS=="android" and build_with_chromium==0', {
- 'sources': [
- 'app/webrtc/java/jni/androidmediacodeccommon.h',
- 'app/webrtc/java/jni/androidmediadecoder_jni.cc',
- 'app/webrtc/java/jni/androidmediadecoder_jni.h',
- 'app/webrtc/java/jni/androidmediaencoder_jni.cc',
- 'app/webrtc/java/jni/androidmediaencoder_jni.h',
- 'app/webrtc/java/jni/androidnetworkmonitor_jni.cc',
- 'app/webrtc/java/jni/androidnetworkmonitor_jni.h',
- 'app/webrtc/java/jni/surfacetexturehelper_jni.cc',
- 'app/webrtc/java/jni/surfacetexturehelper_jni.h',
- ]
- }],
],
},
{
@@ -154,6 +174,8 @@
'app/webrtc/java/android/org/webrtc/CameraEnumerationAndroid.java',
'app/webrtc/java/android/org/webrtc/CameraEnumerator.java',
'app/webrtc/java/android/org/webrtc/EglBase.java',
+ 'app/webrtc/java/android/org/webrtc/EglBase10.java',
+ 'app/webrtc/java/android/org/webrtc/EglBase14.java',
'app/webrtc/java/android/org/webrtc/GlRectDrawer.java',
'app/webrtc/java/android/org/webrtc/GlShader.java',
'app/webrtc/java/android/org/webrtc/GlUtil.java',
@@ -232,6 +254,12 @@
'libjingle_peerconnection_so',
],
'variables': {
+ # Designate as Chromium code and point to our lint settings to
+ # enable linting of the WebRTC code (this is the only way to make
+ # lint_action invoke the Android linter).
+ 'android_manifest_path': '<(webrtc_root)/build/android/AndroidManifest.xml',
+ 'suppressions_file': '<(webrtc_root)/build/android/suppressions.xml',
+ 'chromium_code': 1,
'java_in_dir': 'app/webrtc/java',
'webrtc_base_dir': '<(webrtc_root)/base',
'webrtc_modules_dir': '<(webrtc_root)/modules',
@@ -246,7 +274,7 @@
}, # libjingle_peerconnection_java
]
}],
- ['OS=="ios" or (OS=="mac" and target_arch!="ia32" and mac_sdk>="10.7")', {
+ ['OS=="ios" or (OS=="mac" and target_arch!="ia32")', {
# The >= 10.7 above is required for ARC.
'targets': [
{
@@ -354,6 +382,9 @@
# common.gypi enables this for mac but we want this to be disabled
# like it is for ios.
'CLANG_WARN_OBJC_MISSING_PROPERTY_SYNTHESIS': 'NO',
+ # Disabled due to failing when compiled with -Wall, see
+ # https://bugs.chromium.org/p/webrtc/issues/detail?id=5397
+ 'WARNING_CFLAGS': ['-Wno-unused-property-ivar'],
},
'conditions': [
['OS=="ios"', {
@@ -366,6 +397,9 @@
'app/webrtc/objc/public/RTCEAGLVideoView.h',
'app/webrtc/objc/public/RTCAVFoundationVideoSource.h',
],
+ 'dependencies': [
+ '<(webrtc_root)/base/base.gyp:rtc_base_objc',
+ ],
'link_settings': {
'xcode_settings': {
'OTHER_LDFLAGS': [
@@ -534,7 +568,7 @@
'include_dirs': [
# TODO(jiayl): move this into the direct_dependent_settings of
# usrsctp.gyp.
- '<(DEPTH)/third_party/usrsctp',
+ '<(DEPTH)/third_party/usrsctp/usrsctplib',
],
'dependencies': [
'<(DEPTH)/third_party/usrsctp/usrsctp.gyp:usrsctplib',
@@ -678,10 +712,16 @@
'include_dirs': [
'<(DEPTH)/testing/gtest/include',
],
+ 'include_dirs!': [
+ '<(DEPTH)/webrtc',
+ ],
'direct_dependent_settings': {
'include_dirs': [
'<(DEPTH)/testing/gtest/include',
],
+ 'include_dirs!': [
+ '<(DEPTH)/webrtc',
+ ],
},
'sources': [
'session/media/audiomonitor.cc',
@@ -725,7 +765,6 @@
'app/webrtc/dtmfsender.cc',
'app/webrtc/dtmfsender.h',
'app/webrtc/dtmfsenderinterface.h',
- 'app/webrtc/fakeportallocatorfactory.h',
'app/webrtc/jsep.h',
'app/webrtc/jsepicecandidate.cc',
'app/webrtc/jsepicecandidate.h',
@@ -740,10 +779,10 @@
'app/webrtc/mediastream.cc',
'app/webrtc/mediastream.h',
'app/webrtc/mediastreaminterface.h',
+ 'app/webrtc/mediastreamobserver.cc',
+ 'app/webrtc/mediastreamobserver.h',
'app/webrtc/mediastreamprovider.h',
'app/webrtc/mediastreamproxy.h',
- 'app/webrtc/mediastreamsignaling.cc',
- 'app/webrtc/mediastreamsignaling.h',
'app/webrtc/mediastreamtrack.h',
'app/webrtc/mediastreamtrackproxy.h',
'app/webrtc/notifier.h',
@@ -754,8 +793,6 @@
'app/webrtc/peerconnectionfactoryproxy.h',
'app/webrtc/peerconnectioninterface.h',
'app/webrtc/peerconnectionproxy.h',
- 'app/webrtc/portallocatorfactory.cc',
- 'app/webrtc/portallocatorfactory.h',
'app/webrtc/proxy.h',
'app/webrtc/remoteaudiosource.cc',
'app/webrtc/remoteaudiosource.h',
@@ -789,14 +826,6 @@
'app/webrtc/webrtcsessiondescriptionfactory.cc',
'app/webrtc/webrtcsessiondescriptionfactory.h',
],
- 'conditions': [
- ['OS=="android" and build_with_chromium==0', {
- 'sources': [
- 'app/webrtc/androidvideocapturer.h',
- 'app/webrtc/androidvideocapturer.cc',
- ],
- }],
- ],
}, # target libjingle_peerconnection
],
}
diff --git a/talk/libjingle_tests.gyp b/talk/libjingle_tests.gyp
index 41b38b345d..1dc3649186 100755
--- a/talk/libjingle_tests.gyp
+++ b/talk/libjingle_tests.gyp
@@ -91,15 +91,15 @@
'media/base/videocapturer_unittest.cc',
'media/base/videocommon_unittest.cc',
'media/base/videoengine_unittest.h',
+ 'media/base/videoframe_unittest.h',
'media/devices/dummydevicemanager_unittest.cc',
'media/devices/filevideocapturer_unittest.cc',
'media/sctp/sctpdataengine_unittest.cc',
'media/webrtc/simulcast_unittest.cc',
+ 'media/webrtc/webrtcmediaengine_unittest.cc',
'media/webrtc/webrtcvideocapturer_unittest.cc',
- 'media/base/videoframe_unittest.h',
'media/webrtc/webrtcvideoframe_unittest.cc',
'media/webrtc/webrtcvideoframefactory_unittest.cc',
-
# Disabled because some tests fail.
# TODO(ronghuawu): Reenable these tests.
# 'media/devices/devicemanager_unittest.cc',
@@ -128,6 +128,17 @@
},
},
}],
+ ['OS=="win" and clang==1', {
+ 'msvs_settings': {
+ 'VCCLCompilerTool': {
+ 'AdditionalOptions': [
+ # Disable warnings failing when compiling with Clang on Windows.
+ # https://bugs.chromium.org/p/webrtc/issues/detail?id=5366
+ '-Wno-unused-function',
+ ],
+ },
+ },
+ },],
['OS=="ios"', {
'sources!': [
'media/sctp/sctpdataengine_unittest.cc',
@@ -176,7 +187,7 @@
}, # target libjingle_p2p_unittest
{
'target_name': 'libjingle_peerconnection_unittest',
- 'type': 'executable',
+ 'type': '<(gtest_target_type)',
'dependencies': [
'<(DEPTH)/testing/gmock.gyp:gmock',
'<(webrtc_root)/base/base_tests.gyp:rtc_base_tests_utils',
@@ -207,7 +218,6 @@
# 'app/webrtc/peerconnectionproxy_unittest.cc',
'app/webrtc/remotevideocapturer_unittest.cc',
'app/webrtc/rtpsenderreceiver_unittest.cc',
- 'app/webrtc/sctputils.cc',
'app/webrtc/statscollector_unittest.cc',
'app/webrtc/test/fakeaudiocapturemodule.cc',
'app/webrtc/test/fakeaudiocapturemodule.h',
@@ -215,7 +225,6 @@
'app/webrtc/test/fakeconstraints.h',
'app/webrtc/test/fakedatachannelprovider.h',
'app/webrtc/test/fakedtlsidentitystore.h',
- 'app/webrtc/test/fakemediastreamsignaling.h',
'app/webrtc/test/fakeperiodicvideocapturer.h',
'app/webrtc/test/fakevideotrackrenderer.h',
'app/webrtc/test/mockpeerconnectionobservers.h',
@@ -229,17 +238,25 @@
],
'conditions': [
['OS=="android"', {
- # We want gmock features that use tr1::tuple, but we currently
- # don't support the variadic templates used by libstdc++'s
- # implementation. gmock supports this scenario by providing its
- # own implementation but we must opt in to it.
- 'defines': [
- 'GTEST_USE_OWN_TR1_TUPLE=1',
- # GTEST_USE_OWN_TR1_TUPLE only works if GTEST_HAS_TR1_TUPLE is set.
- # gmock r625 made it so that GTEST_HAS_TR1_TUPLE is set to 0
- # automatically on android, so it has to be set explicitly here.
- 'GTEST_HAS_TR1_TUPLE=1',
- ],
+ 'sources': [
+ 'app/webrtc/test/androidtestinitializer.cc',
+ 'app/webrtc/test/androidtestinitializer.h',
+ ],
+ 'dependencies': [
+ '<(DEPTH)/testing/android/native_test.gyp:native_test_native_code',
+ 'libjingle.gyp:libjingle_peerconnection_jni',
+ ],
+ }],
+ ['OS=="win" and clang==1', {
+ 'msvs_settings': {
+ 'VCCLCompilerTool': {
+ 'AdditionalOptions': [
+ # Disable warnings failing when compiling with Clang on Windows.
+ # https://bugs.chromium.org/p/webrtc/issues/detail?id=5366
+ '-Wno-unused-function',
+ ],
+ },
+ },
}],
],
}, # target libjingle_peerconnection_unittest
@@ -333,7 +350,7 @@
},
], # targets
}], # OS=="android"
- ['OS=="ios" or (OS=="mac" and target_arch!="ia32" and mac_sdk>="10.7")', {
+ ['OS=="ios" or (OS=="mac" and target_arch!="ia32")', {
# The >=10.7 above is required to make ARC link cleanly (e.g. as
# opposed to _compile_ cleanly, which the library under test
# does just fine on 10.6 too).
@@ -378,7 +395,7 @@
'<(webrtc_root)/base/base_tests.gyp:rtc_base_tests_utils',
'<(webrtc_root)/system_wrappers/system_wrappers.gyp:field_trial_default',
'<(DEPTH)/third_party/ocmock/ocmock.gyp:ocmock',
- '<(webrtc_root)/libjingle_examples.gyp:apprtc_signaling',
+ '<(webrtc_root)/webrtc_examples.gyp:apprtc_signaling',
],
'sources': [
'app/webrtc/objctests/mac/main.mm',
@@ -394,6 +411,17 @@
}, # target apprtc_signaling_gunit_test
],
}],
+ ['OS=="android"', {
+ 'targets': [
+ {
+ 'target_name': 'libjingle_peerconnection_unittest_apk_target',
+ 'type': 'none',
+ 'dependencies': [
+ '<(DEPTH)/webrtc/build/apk_tests.gyp:libjingle_peerconnection_unittest_apk',
+ ],
+ },
+ ],
+ }],
['test_isolation_mode != "noop"', {
'targets': [
{
diff --git a/talk/media/base/audiorenderer.h b/talk/media/base/audiorenderer.h
index 229c36e8b1..a42cd7de8f 100644
--- a/talk/media/base/audiorenderer.h
+++ b/talk/media/base/audiorenderer.h
@@ -41,7 +41,7 @@ class AudioRenderer {
virtual void OnData(const void* audio_data,
int bits_per_sample,
int sample_rate,
- int number_of_channels,
+ size_t number_of_channels,
size_t number_of_frames) = 0;
// Called when the AudioRenderer is going away.
diff --git a/talk/media/base/capturemanager_unittest.cc b/talk/media/base/capturemanager_unittest.cc
index e9903425b8..84086abae4 100644
--- a/talk/media/base/capturemanager_unittest.cc
+++ b/talk/media/base/capturemanager_unittest.cc
@@ -29,6 +29,7 @@
#include "talk/media/base/fakevideocapturer.h"
#include "talk/media/base/fakevideorenderer.h"
+#include "webrtc/base/arraysize.h"
#include "webrtc/base/gunit.h"
#include "webrtc/base/sigslot.h"
@@ -57,7 +58,7 @@ class CaptureManagerTest : public ::testing::Test, public sigslot::has_slots<> {
}
void PopulateSupportedFormats() {
std::vector<cricket::VideoFormat> formats;
- for (int i = 0; i < ARRAY_SIZE(kCameraFormats); ++i) {
+ for (int i = 0; i < arraysize(kCameraFormats); ++i) {
formats.push_back(cricket::VideoFormat(kCameraFormats[i]));
}
video_capturer_.ResetSupportedFormats(formats);
diff --git a/talk/media/base/codec.cc b/talk/media/base/codec.cc
index 5b747d1917..59708b37dd 100644
--- a/talk/media/base/codec.cc
+++ b/talk/media/base/codec.cc
@@ -163,13 +163,15 @@ void Codec::IntersectFeedbackParams(const Codec& other) {
feedback_params.Intersect(other.feedback_params);
}
-AudioCodec::AudioCodec(int pt,
- const std::string& nm,
- int cr,
- int br,
- int cs,
- int pr)
- : Codec(pt, nm, cr, pr), bitrate(br), channels(cs) {
+AudioCodec::AudioCodec(int id,
+ const std::string& name,
+ int clockrate,
+ int bitrate,
+ size_t channels,
+ int preference)
+ : Codec(id, name, clockrate, preference),
+ bitrate(bitrate),
+ channels(channels) {
}
AudioCodec::AudioCodec() : Codec(), bitrate(0), channels(0) {
@@ -219,20 +221,20 @@ std::string VideoCodec::ToString() const {
return os.str();
}
-VideoCodec::VideoCodec(int pt,
- const std::string& nm,
- int w,
- int h,
- int fr,
- int pr)
- : Codec(pt, nm, kVideoCodecClockrate, pr),
- width(w),
- height(h),
- framerate(fr) {
+VideoCodec::VideoCodec(int id,
+ const std::string& name,
+ int width,
+ int height,
+ int framerate,
+ int preference)
+ : Codec(id, name, kVideoCodecClockrate, preference),
+ width(width),
+ height(height),
+ framerate(framerate) {
}
-VideoCodec::VideoCodec(int pt, const std::string& nm)
- : Codec(pt, nm, kVideoCodecClockrate, 0),
+VideoCodec::VideoCodec(int id, const std::string& name)
+ : Codec(id, name, kVideoCodecClockrate, 0),
width(0),
height(0),
framerate(0) {
@@ -334,6 +336,11 @@ bool HasRemb(const VideoCodec& codec) {
FeedbackParam(kRtcpFbParamRemb, kParamValueEmpty));
}
+bool HasTransportCc(const VideoCodec& codec) {
+ return codec.HasFeedbackParam(
+ FeedbackParam(kRtcpFbParamTransportCc, kParamValueEmpty));
+}
+
bool CodecNamesEq(const std::string& name1, const std::string& name2) {
return _stricmp(name1.c_str(), name2.c_str()) == 0;
}
diff --git a/talk/media/base/codec.h b/talk/media/base/codec.h
index 3bb08e7c7a..da78e1c627 100644
--- a/talk/media/base/codec.h
+++ b/talk/media/base/codec.h
@@ -128,10 +128,15 @@ struct Codec {
struct AudioCodec : public Codec {
int bitrate;
- int channels;
+ size_t channels;
// Creates a codec with the given parameters.
- AudioCodec(int pt, const std::string& nm, int cr, int br, int cs, int pr);
+ AudioCodec(int id,
+ const std::string& name,
+ int clockrate,
+ int bitrate,
+ size_t channels,
+ int preference);
// Creates an empty codec.
AudioCodec();
AudioCodec(const AudioCodec& c);
@@ -161,8 +166,13 @@ struct VideoCodec : public Codec {
int framerate;
// Creates a codec with the given parameters.
- VideoCodec(int pt, const std::string& nm, int w, int h, int fr, int pr);
- VideoCodec(int pt, const std::string& nm);
+ VideoCodec(int id,
+ const std::string& name,
+ int width,
+ int height,
+ int framerate,
+ int preference);
+ VideoCodec(int id, const std::string& name);
// Creates an empty codec.
VideoCodec();
VideoCodec(const VideoCodec& c);
@@ -209,50 +219,6 @@ struct DataCodec : public Codec {
std::string ToString() const;
};
-struct VideoEncoderConfig {
- static const int kDefaultMaxThreads = -1;
- static const int kDefaultCpuProfile = -1;
-
- VideoEncoderConfig()
- : max_codec(),
- num_threads(kDefaultMaxThreads),
- cpu_profile(kDefaultCpuProfile) {
- }
-
- VideoEncoderConfig(const VideoCodec& c)
- : max_codec(c),
- num_threads(kDefaultMaxThreads),
- cpu_profile(kDefaultCpuProfile) {
- }
-
- VideoEncoderConfig(const VideoCodec& c, int t, int p)
- : max_codec(c),
- num_threads(t),
- cpu_profile(p) {
- }
-
- VideoEncoderConfig& operator=(const VideoEncoderConfig& config) {
- max_codec = config.max_codec;
- num_threads = config.num_threads;
- cpu_profile = config.cpu_profile;
- return *this;
- }
-
- bool operator==(const VideoEncoderConfig& config) const {
- return max_codec == config.max_codec &&
- num_threads == config.num_threads &&
- cpu_profile == config.cpu_profile;
- }
-
- bool operator!=(const VideoEncoderConfig& config) const {
- return !(*this == config);
- }
-
- VideoCodec max_codec;
- int num_threads;
- int cpu_profile;
-};
-
// Get the codec setting associated with |payload_type|. If there
// is no codec associated with that payload type it returns false.
template <class Codec>
@@ -271,6 +237,7 @@ bool FindCodecById(const std::vector<Codec>& codecs,
bool CodecNamesEq(const std::string& name1, const std::string& name2);
bool HasNack(const VideoCodec& codec);
bool HasRemb(const VideoCodec& codec);
+bool HasTransportCc(const VideoCodec& codec);
} // namespace cricket
diff --git a/talk/media/base/codec_unittest.cc b/talk/media/base/codec_unittest.cc
index 7bd3735a9b..b2aff507ea 100644
--- a/talk/media/base/codec_unittest.cc
+++ b/talk/media/base/codec_unittest.cc
@@ -33,7 +33,6 @@ using cricket::Codec;
using cricket::DataCodec;
using cricket::FeedbackParam;
using cricket::VideoCodec;
-using cricket::VideoEncoderConfig;
using cricket::kCodecParamAssociatedPayloadType;
using cricket::kCodecParamMaxBitrate;
using cricket::kCodecParamMinBitrate;
@@ -214,54 +213,6 @@ TEST_F(CodecTest, TestVideoCodecMatches) {
EXPECT_FALSE(c1.Matches(VideoCodec(95, "V", 640, 400, 15, 0)));
}
-TEST_F(CodecTest, TestVideoEncoderConfigOperators) {
- VideoEncoderConfig c1(VideoCodec(
- 96, "SVC", 320, 200, 30, 3), 1, 2);
- VideoEncoderConfig c2(VideoCodec(
- 95, "SVC", 320, 200, 30, 3), 1, 2);
- VideoEncoderConfig c3(VideoCodec(
- 96, "xxx", 320, 200, 30, 3), 1, 2);
- VideoEncoderConfig c4(VideoCodec(
- 96, "SVC", 120, 200, 30, 3), 1, 2);
- VideoEncoderConfig c5(VideoCodec(
- 96, "SVC", 320, 100, 30, 3), 1, 2);
- VideoEncoderConfig c6(VideoCodec(
- 96, "SVC", 320, 200, 10, 3), 1, 2);
- VideoEncoderConfig c7(VideoCodec(
- 96, "SVC", 320, 200, 30, 1), 1, 2);
- VideoEncoderConfig c8(VideoCodec(
- 96, "SVC", 320, 200, 30, 3), 0, 2);
- VideoEncoderConfig c9(VideoCodec(
- 96, "SVC", 320, 200, 30, 3), 1, 1);
- EXPECT_TRUE(c1 != c2);
- EXPECT_TRUE(c1 != c2);
- EXPECT_TRUE(c1 != c3);
- EXPECT_TRUE(c1 != c4);
- EXPECT_TRUE(c1 != c5);
- EXPECT_TRUE(c1 != c6);
- EXPECT_TRUE(c1 != c7);
- EXPECT_TRUE(c1 != c8);
- EXPECT_TRUE(c1 != c9);
-
- VideoEncoderConfig c10;
- VideoEncoderConfig c11(VideoCodec(
- 0, "", 0, 0, 0, 0));
- VideoEncoderConfig c12(VideoCodec(
- 0, "", 0, 0, 0, 0),
- VideoEncoderConfig::kDefaultMaxThreads,
- VideoEncoderConfig::kDefaultCpuProfile);
- VideoEncoderConfig c13 = c1;
- VideoEncoderConfig c14(VideoCodec(
- 0, "", 0, 0, 0, 0), 0, 0);
-
- EXPECT_TRUE(c11 == c10);
- EXPECT_TRUE(c12 == c10);
- EXPECT_TRUE(c13 != c10);
- EXPECT_TRUE(c13 == c1);
- EXPECT_TRUE(c14 != c11);
- EXPECT_TRUE(c14 != c12);
-}
-
TEST_F(CodecTest, TestDataCodecMatches) {
// Test a codec with a static payload type.
DataCodec c0(95, "D", 0);
diff --git a/talk/media/base/constants.cc b/talk/media/base/constants.cc
index 4063004968..2361be6f50 100644
--- a/talk/media/base/constants.cc
+++ b/talk/media/base/constants.cc
@@ -90,6 +90,7 @@ const int kPreferredUseInbandFec = 0;
const char kRtcpFbParamNack[] = "nack";
const char kRtcpFbNackParamPli[] = "pli";
const char kRtcpFbParamRemb[] = "goog-remb";
+const char kRtcpFbParamTransportCc[] = "transport-cc";
const char kRtcpFbParamCcm[] = "ccm";
const char kRtcpFbCcmParamFir[] = "fir";
diff --git a/talk/media/base/constants.h b/talk/media/base/constants.h
index b6a9e5681f..706a7bdc87 100644
--- a/talk/media/base/constants.h
+++ b/talk/media/base/constants.h
@@ -107,6 +107,9 @@ extern const char kRtcpFbNackParamPli[];
// rtcp-fb messages according to
// http://tools.ietf.org/html/draft-alvestrand-rmcat-remb-00
extern const char kRtcpFbParamRemb[];
+// rtcp-fb messages according to
+// https://tools.ietf.org/html/draft-holmer-rmcat-transport-wide-cc-extensions-01
+extern const char kRtcpFbParamTransportCc[];
// ccm submessages according to RFC 5104
extern const char kRtcpFbParamCcm[];
extern const char kRtcpFbCcmParamFir[];
diff --git a/talk/media/base/cryptoparams.h b/talk/media/base/cryptoparams.h
index 9dd1db5166..589953db3e 100644
--- a/talk/media/base/cryptoparams.h
+++ b/talk/media/base/cryptoparams.h
@@ -35,8 +35,10 @@ namespace cricket {
// Parameters for SRTP negotiation, as described in RFC 4568.
struct CryptoParams {
CryptoParams() : tag(0) {}
- CryptoParams(int t, const std::string& cs,
- const std::string& kp, const std::string& sp)
+ CryptoParams(int t,
+ const std::string& cs,
+ const std::string& kp,
+ const std::string& sp)
: tag(t), cipher_suite(cs), key_params(kp), session_params(sp) {}
bool Matches(const CryptoParams& params) const {
diff --git a/talk/media/base/executablehelpers.h b/talk/media/base/executablehelpers.h
index 401890f4e8..dd165c25da 100644
--- a/talk/media/base/executablehelpers.h
+++ b/talk/media/base/executablehelpers.h
@@ -28,7 +28,7 @@
#ifndef TALK_MEDIA_BASE_EXECUTABLEHELPERS_H_
#define TALK_MEDIA_BASE_EXECUTABLEHELPERS_H_
-#ifdef OSX
+#if defined(WEBRTC_MAC) && !defined(WEBRTC_IOS)
#include <mach-o/dyld.h>
#endif
@@ -62,15 +62,15 @@ inline Pathname GetExecutablePath() {
#else // UNICODE
rtc::Pathname path(exe_path_buffer);
#endif // UNICODE
-#elif defined(OSX) || defined(LINUX)
+#elif (defined(WEBRTC_MAC) && !defined(WEBRTC_IOS)) || defined(WEBRTC_LINUX)
char exe_path_buffer[kMaxExePathSize];
-#ifdef OSX
+#if defined(WEBRTC_MAC) && !defined(WEBRTC_IOS)
uint32_t copied_length = kMaxExePathSize - 1;
if (_NSGetExecutablePath(exe_path_buffer, &copied_length) == -1) {
LOG(LS_ERROR) << "Buffer too small";
return rtc::Pathname();
}
-#elif defined LINUX
+#elif defined WEBRTC_LINUX
int32_t copied_length = kMaxExePathSize - 1;
const char* kProcExeFmt = "/proc/%d/exe";
char proc_exe_link[40];
@@ -86,11 +86,11 @@ inline Pathname GetExecutablePath() {
return rtc::Pathname();
}
exe_path_buffer[copied_length] = '\0';
-#endif // LINUX
+#endif // WEBRTC_LINUX
rtc::Pathname path(exe_path_buffer);
-#else // Android || IOS
+#else // Android || iOS
rtc::Pathname path;
-#endif // OSX || LINUX
+#endif // Mac || Linux
return path;
}
diff --git a/talk/media/base/fakemediaengine.h b/talk/media/base/fakemediaengine.h
index a6fa960dee..149704f92d 100644
--- a/talk/media/base/fakemediaengine.h
+++ b/talk/media/base/fakemediaengine.h
@@ -38,9 +38,10 @@
#include "talk/media/base/mediaengine.h"
#include "talk/media/base/rtputils.h"
#include "talk/media/base/streamparams.h"
-#include "webrtc/p2p/base/sessiondescription.h"
+#include "webrtc/audio/audio_sink.h"
#include "webrtc/base/buffer.h"
#include "webrtc/base/stringutils.h"
+#include "webrtc/p2p/base/sessiondescription.h"
namespace cricket {
@@ -229,15 +230,13 @@ template <class Base> class RtpHelper : public Base {
class FakeVoiceMediaChannel : public RtpHelper<VoiceMediaChannel> {
public:
struct DtmfInfo {
- DtmfInfo(uint32_t ssrc, int event_code, int duration, int flags)
+ DtmfInfo(uint32_t ssrc, int event_code, int duration)
: ssrc(ssrc),
event_code(event_code),
- duration(duration),
- flags(flags) {}
+ duration(duration) {}
uint32_t ssrc;
int event_code;
int duration;
- int flags;
};
explicit FakeVoiceMediaChannel(FakeVoiceEngine* engine,
const AudioOptions& options)
@@ -321,9 +320,8 @@ class FakeVoiceMediaChannel : public RtpHelper<VoiceMediaChannel> {
}
virtual bool InsertDtmf(uint32_t ssrc,
int event_code,
- int duration,
- int flags) {
- dtmf_info_queue_.push_back(DtmfInfo(ssrc, event_code, duration, flags));
+ int duration) {
+ dtmf_info_queue_.push_back(DtmfInfo(ssrc, event_code, duration));
return true;
}
@@ -349,6 +347,12 @@ class FakeVoiceMediaChannel : public RtpHelper<VoiceMediaChannel> {
virtual bool GetStats(VoiceMediaInfo* info) { return false; }
+ virtual void SetRawAudioSink(
+ uint32_t ssrc,
+ rtc::scoped_ptr<webrtc::AudioSinkInterface> sink) {
+ sink_ = std::move(sink);
+ }
+
private:
class VoiceChannelAudioSink : public AudioRenderer::Sink {
public:
@@ -364,7 +368,7 @@ class FakeVoiceMediaChannel : public RtpHelper<VoiceMediaChannel> {
void OnData(const void* audio_data,
int bits_per_sample,
int sample_rate,
- int number_of_channels,
+ size_t number_of_channels,
size_t number_of_frames) override {}
void OnClose() override { renderer_ = NULL; }
AudioRenderer* renderer() const { return renderer_; }
@@ -421,16 +425,16 @@ class FakeVoiceMediaChannel : public RtpHelper<VoiceMediaChannel> {
int time_since_last_typing_;
AudioOptions options_;
std::map<uint32_t, VoiceChannelAudioSink*> local_renderers_;
+ rtc::scoped_ptr<webrtc::AudioSinkInterface> sink_;
};
// A helper function to compare the FakeVoiceMediaChannel::DtmfInfo.
inline bool CompareDtmfInfo(const FakeVoiceMediaChannel::DtmfInfo& info,
uint32_t ssrc,
int event_code,
- int duration,
- int flags) {
+ int duration) {
return (info.duration == duration && info.event_code == event_code &&
- info.flags == flags && info.ssrc == ssrc);
+ info.ssrc == ssrc);
}
class FakeVideoMediaChannel : public RtpHelper<VideoMediaChannel> {
@@ -694,33 +698,23 @@ class FakeDataMediaChannel : public RtpHelper<DataMediaChannel> {
class FakeBaseEngine {
public:
FakeBaseEngine()
- : loglevel_(-1),
- options_changed_(false),
+ : options_changed_(false),
fail_create_channel_(false) {}
- void SetLogging(int level, const char* filter) {
- loglevel_ = level;
- logfilter_ = filter;
- }
-
void set_fail_create_channel(bool fail) { fail_create_channel_ = fail; }
- const std::vector<RtpHeaderExtension>& rtp_header_extensions() const {
- return rtp_header_extensions_;
- }
+ RtpCapabilities GetCapabilities() const { return capabilities_; }
void set_rtp_header_extensions(
const std::vector<RtpHeaderExtension>& extensions) {
- rtp_header_extensions_ = extensions;
+ capabilities_.header_extensions = extensions;
}
protected:
- int loglevel_;
- std::string logfilter_;
// Flag used by optionsmessagehandler_unittest for checking whether any
// relevant setting has been updated.
// TODO(thaloun): Replace with explicit checks of before & after values.
bool options_changed_;
bool fail_create_channel_;
- std::vector<RtpHeaderExtension> rtp_header_extensions_;
+ RtpCapabilities capabilities_;
};
class FakeVoiceEngine : public FakeBaseEngine {
@@ -733,14 +727,8 @@ class FakeVoiceEngine : public FakeBaseEngine {
}
bool Init(rtc::Thread* worker_thread) { return true; }
void Terminate() {}
- webrtc::VoiceEngine* GetVoE() { return nullptr; }
- AudioOptions GetOptions() const {
- return options_;
- }
- bool SetOptions(const AudioOptions& options) {
- options_ = options;
- options_changed_ = true;
- return true;
+ rtc::scoped_refptr<webrtc::AudioState> GetAudioState() const {
+ return rtc::scoped_refptr<webrtc::AudioState>();
}
VoiceMediaChannel* CreateChannel(webrtc::Call* call,
@@ -763,21 +751,12 @@ class FakeVoiceEngine : public FakeBaseEngine {
const std::vector<AudioCodec>& codecs() { return codecs_; }
void SetCodecs(const std::vector<AudioCodec> codecs) { codecs_ = codecs; }
- bool SetDevices(const Device* in_device, const Device* out_device) {
- in_device_ = (in_device) ? in_device->name : "";
- out_device_ = (out_device) ? out_device->name : "";
- options_changed_ = true;
- return true;
- }
-
bool GetOutputVolume(int* level) {
*level = output_volume_;
return true;
}
-
bool SetOutputVolume(int level) {
output_volume_ = level;
- options_changed_ = true;
return true;
}
@@ -795,9 +774,6 @@ class FakeVoiceEngine : public FakeBaseEngine {
std::vector<FakeVoiceMediaChannel*> channels_;
std::vector<AudioCodec> codecs_;
int output_volume_;
- std::string in_device_;
- std::string out_device_;
- AudioOptions options_;
friend class FakeMediaEngine;
};
@@ -815,13 +791,6 @@ class FakeVideoEngine : public FakeBaseEngine {
options_changed_ = true;
return true;
}
- bool SetDefaultEncoderConfig(const VideoEncoderConfig& config) {
- default_encoder_config_ = config;
- return true;
- }
- const VideoEncoderConfig& default_encoder_config() const {
- return default_encoder_config_;
- }
VideoMediaChannel* CreateChannel(webrtc::Call* call,
const VideoOptions& options) {
@@ -864,7 +833,6 @@ class FakeVideoEngine : public FakeBaseEngine {
private:
std::vector<FakeVideoMediaChannel*> channels_;
std::vector<VideoCodec> codecs_;
- VideoEncoderConfig default_encoder_config_;
std::string in_device_;
bool capture_;
VideoOptions options_;
@@ -875,10 +843,7 @@ class FakeVideoEngine : public FakeBaseEngine {
class FakeMediaEngine :
public CompositeMediaEngine<FakeVoiceEngine, FakeVideoEngine> {
public:
- FakeMediaEngine() {
- voice_ = FakeVoiceEngine();
- video_ = FakeVideoEngine();
- }
+ FakeMediaEngine() {}
virtual ~FakeMediaEngine() {}
void SetAudioCodecs(const std::vector<AudioCodec>& codecs) {
@@ -904,24 +869,13 @@ class FakeMediaEngine :
return video_.GetChannel(index);
}
- AudioOptions audio_options() const { return voice_.options_; }
int output_volume() const { return voice_.output_volume_; }
- const VideoEncoderConfig& default_video_encoder_config() const {
- return video_.default_encoder_config_;
- }
- const std::string& audio_in_device() const { return voice_.in_device_; }
- const std::string& audio_out_device() const { return voice_.out_device_; }
- int voice_loglevel() const { return voice_.loglevel_; }
- const std::string& voice_logfilter() const { return voice_.logfilter_; }
- int video_loglevel() const { return video_.loglevel_; }
- const std::string& video_logfilter() const { return video_.logfilter_; }
bool capture() const { return video_.capture_; }
bool options_changed() const {
- return voice_.options_changed_ || video_.options_changed_;
+ return video_.options_changed_;
}
void clear_options_changed() {
video_.options_changed_ = false;
- voice_.options_changed_ = false;
}
void set_fail_create_channel(bool fail) {
voice_.set_fail_create_channel(fail);
diff --git a/talk/media/base/fakemediaprocessor.h b/talk/media/base/fakemediaprocessor.h
deleted file mode 100644
index 8de2678c95..0000000000
--- a/talk/media/base/fakemediaprocessor.h
+++ /dev/null
@@ -1,29 +0,0 @@
-/*
- * libjingle
- * Copyright 2004 Google Inc.
- *
- * Redistribution and use in source and binary forms, with or without
- * modification, are permitted provided that the following conditions are met:
- *
- * 1. Redistributions of source code must retain the above copyright notice,
- * this list of conditions and the following disclaimer.
- * 2. Redistributions in binary form must reproduce the above copyright notice,
- * this list of conditions and the following disclaimer in the documentation
- * and/or other materials provided with the distribution.
- * 3. The name of the author may not be used to endorse or promote products
- * derived from this software without specific prior written permission.
- *
- * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
- * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
- * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
- * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
- * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
- * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
- * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
- * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
- * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
- * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
- */
-
-// TODO(solenberg): Remove this file once Chromium's libjingle.gyp/.gn are
-// updated.
diff --git a/talk/media/base/mediachannel.h b/talk/media/base/mediachannel.h
index 14660847fa..f6fb77d8a6 100644
--- a/talk/media/base/mediachannel.h
+++ b/talk/media/base/mediachannel.h
@@ -38,6 +38,7 @@
#include "webrtc/base/buffer.h"
#include "webrtc/base/dscp.h"
#include "webrtc/base/logging.h"
+#include "webrtc/base/optional.h"
#include "webrtc/base/sigslot.h"
#include "webrtc/base/socket.h"
#include "webrtc/base/window.h"
@@ -50,88 +51,30 @@ class RateLimiter;
class Timing;
}
+namespace webrtc {
+class AudioSinkInterface;
+}
+
namespace cricket {
class AudioRenderer;
-struct RtpHeader;
class ScreencastId;
-struct VideoFormat;
class VideoCapturer;
class VideoRenderer;
+struct RtpHeader;
+struct VideoFormat;
const int kMinRtpHeaderExtensionId = 1;
const int kMaxRtpHeaderExtensionId = 255;
const int kScreencastDefaultFps = 5;
-// Used in AudioOptions and VideoOptions to signify "unset" values.
-template <class T>
-class Settable {
- public:
- Settable() : set_(false), val_() {}
- explicit Settable(T val) : set_(true), val_(val) {}
-
- bool IsSet() const {
- return set_;
- }
-
- bool Get(T* out) const {
- *out = val_;
- return set_;
- }
-
- T GetWithDefaultIfUnset(const T& default_value) const {
- return set_ ? val_ : default_value;
- }
-
- void Set(T val) {
- set_ = true;
- val_ = val;
- }
-
- void Clear() {
- Set(T());
- set_ = false;
- }
-
- void SetFrom(const Settable<T>& o) {
- // Set this value based on the value of o, iff o is set. If this value is
- // set and o is unset, the current value will be unchanged.
- T val;
- if (o.Get(&val)) {
- Set(val);
- }
- }
-
- std::string ToString() const {
- return set_ ? rtc::ToString(val_) : "";
- }
-
- bool operator==(const Settable<T>& o) const {
- // Equal if both are unset with any value or both set with the same value.
- return (set_ == o.set_) && (!set_ || (val_ == o.val_));
- }
-
- bool operator!=(const Settable<T>& o) const {
- return !operator==(o);
- }
-
- protected:
- void InitializeValue(const T &val) {
- val_ = val;
- }
-
- private:
- bool set_;
- T val_;
-};
-
template <class T>
-static std::string ToStringIfSet(const char* key, const Settable<T>& val) {
+static std::string ToStringIfSet(const char* key, const rtc::Optional<T>& val) {
std::string str;
- if (val.IsSet()) {
+ if (val) {
str = key;
str += ": ";
- str += val.ToString();
+ str += val ? rtc::ToString(*val) : "";
str += ", ";
}
return str;
@@ -157,32 +100,32 @@ static std::string VectorToString(const std::vector<T>& vals) {
// but some things currently still use flags.
struct AudioOptions {
void SetAll(const AudioOptions& change) {
- echo_cancellation.SetFrom(change.echo_cancellation);
- auto_gain_control.SetFrom(change.auto_gain_control);
- noise_suppression.SetFrom(change.noise_suppression);
- highpass_filter.SetFrom(change.highpass_filter);
- stereo_swapping.SetFrom(change.stereo_swapping);
- audio_jitter_buffer_max_packets.SetFrom(
- change.audio_jitter_buffer_max_packets);
- audio_jitter_buffer_fast_accelerate.SetFrom(
- change.audio_jitter_buffer_fast_accelerate);
- typing_detection.SetFrom(change.typing_detection);
- aecm_generate_comfort_noise.SetFrom(change.aecm_generate_comfort_noise);
- conference_mode.SetFrom(change.conference_mode);
- adjust_agc_delta.SetFrom(change.adjust_agc_delta);
- experimental_agc.SetFrom(change.experimental_agc);
- extended_filter_aec.SetFrom(change.extended_filter_aec);
- delay_agnostic_aec.SetFrom(change.delay_agnostic_aec);
- experimental_ns.SetFrom(change.experimental_ns);
- aec_dump.SetFrom(change.aec_dump);
- tx_agc_target_dbov.SetFrom(change.tx_agc_target_dbov);
- tx_agc_digital_compression_gain.SetFrom(
- change.tx_agc_digital_compression_gain);
- tx_agc_limiter.SetFrom(change.tx_agc_limiter);
- recording_sample_rate.SetFrom(change.recording_sample_rate);
- playout_sample_rate.SetFrom(change.playout_sample_rate);
- dscp.SetFrom(change.dscp);
- combined_audio_video_bwe.SetFrom(change.combined_audio_video_bwe);
+ SetFrom(&echo_cancellation, change.echo_cancellation);
+ SetFrom(&auto_gain_control, change.auto_gain_control);
+ SetFrom(&noise_suppression, change.noise_suppression);
+ SetFrom(&highpass_filter, change.highpass_filter);
+ SetFrom(&stereo_swapping, change.stereo_swapping);
+ SetFrom(&audio_jitter_buffer_max_packets,
+ change.audio_jitter_buffer_max_packets);
+ SetFrom(&audio_jitter_buffer_fast_accelerate,
+ change.audio_jitter_buffer_fast_accelerate);
+ SetFrom(&typing_detection, change.typing_detection);
+ SetFrom(&aecm_generate_comfort_noise, change.aecm_generate_comfort_noise);
+ SetFrom(&conference_mode, change.conference_mode);
+ SetFrom(&adjust_agc_delta, change.adjust_agc_delta);
+ SetFrom(&experimental_agc, change.experimental_agc);
+ SetFrom(&extended_filter_aec, change.extended_filter_aec);
+ SetFrom(&delay_agnostic_aec, change.delay_agnostic_aec);
+ SetFrom(&experimental_ns, change.experimental_ns);
+ SetFrom(&aec_dump, change.aec_dump);
+ SetFrom(&tx_agc_target_dbov, change.tx_agc_target_dbov);
+ SetFrom(&tx_agc_digital_compression_gain,
+ change.tx_agc_digital_compression_gain);
+ SetFrom(&tx_agc_limiter, change.tx_agc_limiter);
+ SetFrom(&recording_sample_rate, change.recording_sample_rate);
+ SetFrom(&playout_sample_rate, change.playout_sample_rate);
+ SetFrom(&dscp, change.dscp);
+ SetFrom(&combined_audio_video_bwe, change.combined_audio_video_bwe);
}
bool operator==(const AudioOptions& o) const {
@@ -247,39 +190,47 @@ struct AudioOptions {
// Audio processing that attempts to filter away the output signal from
// later inbound pickup.
- Settable<bool> echo_cancellation;
+ rtc::Optional<bool> echo_cancellation;
// Audio processing to adjust the sensitivity of the local mic dynamically.
- Settable<bool> auto_gain_control;
+ rtc::Optional<bool> auto_gain_control;
// Audio processing to filter out background noise.
- Settable<bool> noise_suppression;
+ rtc::Optional<bool> noise_suppression;
// Audio processing to remove background noise of lower frequencies.
- Settable<bool> highpass_filter;
+ rtc::Optional<bool> highpass_filter;
// Audio processing to swap the left and right channels.
- Settable<bool> stereo_swapping;
+ rtc::Optional<bool> stereo_swapping;
// Audio receiver jitter buffer (NetEq) max capacity in number of packets.
- Settable<int> audio_jitter_buffer_max_packets;
+ rtc::Optional<int> audio_jitter_buffer_max_packets;
// Audio receiver jitter buffer (NetEq) fast accelerate mode.
- Settable<bool> audio_jitter_buffer_fast_accelerate;
+ rtc::Optional<bool> audio_jitter_buffer_fast_accelerate;
// Audio processing to detect typing.
- Settable<bool> typing_detection;
- Settable<bool> aecm_generate_comfort_noise;
- Settable<bool> conference_mode;
- Settable<int> adjust_agc_delta;
- Settable<bool> experimental_agc;
- Settable<bool> extended_filter_aec;
- Settable<bool> delay_agnostic_aec;
- Settable<bool> experimental_ns;
- Settable<bool> aec_dump;
+ rtc::Optional<bool> typing_detection;
+ rtc::Optional<bool> aecm_generate_comfort_noise;
+ rtc::Optional<bool> conference_mode;
+ rtc::Optional<int> adjust_agc_delta;
+ rtc::Optional<bool> experimental_agc;
+ rtc::Optional<bool> extended_filter_aec;
+ rtc::Optional<bool> delay_agnostic_aec;
+ rtc::Optional<bool> experimental_ns;
+ rtc::Optional<bool> aec_dump;
// Note that tx_agc_* only applies to non-experimental AGC.
- Settable<uint16_t> tx_agc_target_dbov;
- Settable<uint16_t> tx_agc_digital_compression_gain;
- Settable<bool> tx_agc_limiter;
- Settable<uint32_t> recording_sample_rate;
- Settable<uint32_t> playout_sample_rate;
+ rtc::Optional<uint16_t> tx_agc_target_dbov;
+ rtc::Optional<uint16_t> tx_agc_digital_compression_gain;
+ rtc::Optional<bool> tx_agc_limiter;
+ rtc::Optional<uint32_t> recording_sample_rate;
+ rtc::Optional<uint32_t> playout_sample_rate;
// Set DSCP value for packet sent from audio channel.
- Settable<bool> dscp;
+ rtc::Optional<bool> dscp;
// Enable combined audio+bandwidth BWE.
- Settable<bool> combined_audio_video_bwe;
+ rtc::Optional<bool> combined_audio_video_bwe;
+
+ private:
+ template <typename T>
+ static void SetFrom(rtc::Optional<T>* s, const rtc::Optional<T>& o) {
+ if (o) {
+ *s = o;
+ }
+ }
};
// Options that can be applied to a VideoMediaChannel or a VideoMediaEngine.
@@ -287,38 +238,41 @@ struct AudioOptions {
// We are moving all of the setting of options to structs like this,
// but some things currently still use flags.
struct VideoOptions {
- VideoOptions() {
- process_adaptation_threshhold.Set(kProcessCpuThreshold);
- system_low_adaptation_threshhold.Set(kLowSystemCpuThreshold);
- system_high_adaptation_threshhold.Set(kHighSystemCpuThreshold);
- unsignalled_recv_stream_limit.Set(kNumDefaultUnsignalledVideoRecvStreams);
- }
+ VideoOptions()
+ : process_adaptation_threshhold(kProcessCpuThreshold),
+ system_low_adaptation_threshhold(kLowSystemCpuThreshold),
+ system_high_adaptation_threshhold(kHighSystemCpuThreshold),
+ unsignalled_recv_stream_limit(kNumDefaultUnsignalledVideoRecvStreams) {}
void SetAll(const VideoOptions& change) {
- adapt_input_to_cpu_usage.SetFrom(change.adapt_input_to_cpu_usage);
- adapt_cpu_with_smoothing.SetFrom(change.adapt_cpu_with_smoothing);
- video_adapt_third.SetFrom(change.video_adapt_third);
- video_noise_reduction.SetFrom(change.video_noise_reduction);
- video_start_bitrate.SetFrom(change.video_start_bitrate);
- cpu_overuse_detection.SetFrom(change.cpu_overuse_detection);
- cpu_underuse_threshold.SetFrom(change.cpu_underuse_threshold);
- cpu_overuse_threshold.SetFrom(change.cpu_overuse_threshold);
- cpu_underuse_encode_rsd_threshold.SetFrom(
- change.cpu_underuse_encode_rsd_threshold);
- cpu_overuse_encode_rsd_threshold.SetFrom(
- change.cpu_overuse_encode_rsd_threshold);
- cpu_overuse_encode_usage.SetFrom(change.cpu_overuse_encode_usage);
- conference_mode.SetFrom(change.conference_mode);
- process_adaptation_threshhold.SetFrom(change.process_adaptation_threshhold);
- system_low_adaptation_threshhold.SetFrom(
- change.system_low_adaptation_threshhold);
- system_high_adaptation_threshhold.SetFrom(
- change.system_high_adaptation_threshhold);
- dscp.SetFrom(change.dscp);
- suspend_below_min_bitrate.SetFrom(change.suspend_below_min_bitrate);
- unsignalled_recv_stream_limit.SetFrom(change.unsignalled_recv_stream_limit);
- use_simulcast_adapter.SetFrom(change.use_simulcast_adapter);
- screencast_min_bitrate.SetFrom(change.screencast_min_bitrate);
+ SetFrom(&adapt_input_to_cpu_usage, change.adapt_input_to_cpu_usage);
+ SetFrom(&adapt_cpu_with_smoothing, change.adapt_cpu_with_smoothing);
+ SetFrom(&video_adapt_third, change.video_adapt_third);
+ SetFrom(&video_noise_reduction, change.video_noise_reduction);
+ SetFrom(&video_start_bitrate, change.video_start_bitrate);
+ SetFrom(&cpu_overuse_detection, change.cpu_overuse_detection);
+ SetFrom(&cpu_underuse_threshold, change.cpu_underuse_threshold);
+ SetFrom(&cpu_overuse_threshold, change.cpu_overuse_threshold);
+ SetFrom(&cpu_underuse_encode_rsd_threshold,
+ change.cpu_underuse_encode_rsd_threshold);
+ SetFrom(&cpu_overuse_encode_rsd_threshold,
+ change.cpu_overuse_encode_rsd_threshold);
+ SetFrom(&cpu_overuse_encode_usage, change.cpu_overuse_encode_usage);
+ SetFrom(&conference_mode, change.conference_mode);
+ SetFrom(&process_adaptation_threshhold,
+ change.process_adaptation_threshhold);
+ SetFrom(&system_low_adaptation_threshhold,
+ change.system_low_adaptation_threshhold);
+ SetFrom(&system_high_adaptation_threshhold,
+ change.system_high_adaptation_threshhold);
+ SetFrom(&dscp, change.dscp);
+ SetFrom(&suspend_below_min_bitrate, change.suspend_below_min_bitrate);
+ SetFrom(&unsignalled_recv_stream_limit,
+ change.unsignalled_recv_stream_limit);
+ SetFrom(&use_simulcast_adapter, change.use_simulcast_adapter);
+ SetFrom(&screencast_min_bitrate, change.screencast_min_bitrate);
+ SetFrom(&disable_prerenderer_smoothing,
+ change.disable_prerenderer_smoothing);
}
bool operator==(const VideoOptions& o) const {
@@ -345,7 +299,8 @@ struct VideoOptions {
suspend_below_min_bitrate == o.suspend_below_min_bitrate &&
unsignalled_recv_stream_limit == o.unsignalled_recv_stream_limit &&
use_simulcast_adapter == o.use_simulcast_adapter &&
- screencast_min_bitrate == o.screencast_min_bitrate;
+ screencast_min_bitrate == o.screencast_min_bitrate &&
+ disable_prerenderer_smoothing == o.disable_prerenderer_smoothing;
}
std::string ToString() const {
@@ -381,56 +336,71 @@ struct VideoOptions {
}
// Enable CPU adaptation?
- Settable<bool> adapt_input_to_cpu_usage;
+ rtc::Optional<bool> adapt_input_to_cpu_usage;
// Enable CPU adaptation smoothing?
- Settable<bool> adapt_cpu_with_smoothing;
+ rtc::Optional<bool> adapt_cpu_with_smoothing;
// Enable video adapt third?
- Settable<bool> video_adapt_third;
+ rtc::Optional<bool> video_adapt_third;
// Enable denoising?
- Settable<bool> video_noise_reduction;
+ rtc::Optional<bool> video_noise_reduction;
// Experimental: Enable WebRtc higher start bitrate?
- Settable<int> video_start_bitrate;
+ rtc::Optional<int> video_start_bitrate;
// Enable WebRTC Cpu Overuse Detection, which is a new version of the CPU
// adaptation algorithm. So this option will override the
// |adapt_input_to_cpu_usage|.
- Settable<bool> cpu_overuse_detection;
+ rtc::Optional<bool> cpu_overuse_detection;
// Low threshold (t1) for cpu overuse adaptation. (Adapt up)
// Metric: encode usage (m1). m1 < t1 => underuse.
- Settable<int> cpu_underuse_threshold;
+ rtc::Optional<int> cpu_underuse_threshold;
// High threshold (t1) for cpu overuse adaptation. (Adapt down)
// Metric: encode usage (m1). m1 > t1 => overuse.
- Settable<int> cpu_overuse_threshold;
+ rtc::Optional<int> cpu_overuse_threshold;
// Low threshold (t2) for cpu overuse adaptation. (Adapt up)
// Metric: relative standard deviation of encode time (m2).
// Optional threshold. If set, (m1 < t1 && m2 < t2) => underuse.
// Note: t2 will have no effect if t1 is not set.
- Settable<int> cpu_underuse_encode_rsd_threshold;
+ rtc::Optional<int> cpu_underuse_encode_rsd_threshold;
// High threshold (t2) for cpu overuse adaptation. (Adapt down)
// Metric: relative standard deviation of encode time (m2).
// Optional threshold. If set, (m1 > t1 || m2 > t2) => overuse.
// Note: t2 will have no effect if t1 is not set.
- Settable<int> cpu_overuse_encode_rsd_threshold;
+ rtc::Optional<int> cpu_overuse_encode_rsd_threshold;
// Use encode usage for cpu detection.
- Settable<bool> cpu_overuse_encode_usage;
+ rtc::Optional<bool> cpu_overuse_encode_usage;
// Use conference mode?
- Settable<bool> conference_mode;
+ rtc::Optional<bool> conference_mode;
// Threshhold for process cpu adaptation. (Process limit)
- Settable<float> process_adaptation_threshhold;
+ rtc::Optional<float> process_adaptation_threshhold;
// Low threshhold for cpu adaptation. (Adapt up)
- Settable<float> system_low_adaptation_threshhold;
+ rtc::Optional<float> system_low_adaptation_threshhold;
// High threshhold for cpu adaptation. (Adapt down)
- Settable<float> system_high_adaptation_threshhold;
+ rtc::Optional<float> system_high_adaptation_threshhold;
// Set DSCP value for packet sent from video channel.
- Settable<bool> dscp;
+ rtc::Optional<bool> dscp;
// Enable WebRTC suspension of video. No video frames will be sent when the
// bitrate is below the configured minimum bitrate.
- Settable<bool> suspend_below_min_bitrate;
+ rtc::Optional<bool> suspend_below_min_bitrate;
// Limit on the number of early receive channels that can be created.
- Settable<int> unsignalled_recv_stream_limit;
+ rtc::Optional<int> unsignalled_recv_stream_limit;
// Enable use of simulcast adapter.
- Settable<bool> use_simulcast_adapter;
+ rtc::Optional<bool> use_simulcast_adapter;
// Force screencast to use a minimum bitrate
- Settable<int> screencast_min_bitrate;
+ rtc::Optional<int> screencast_min_bitrate;
+ // Set to true if the renderer has an algorithm of frame selection.
+ // If the value is true, then WebRTC will hand over a frame as soon as
+ // possible without delay, and rendering smoothness is completely the duty
+ // of the renderer;
+ // If the value is false, then WebRTC is responsible to delay frame release
+ // in order to increase rendering smoothness.
+ rtc::Optional<bool> disable_prerenderer_smoothing;
+
+ private:
+ template <typename T>
+ static void SetFrom(rtc::Optional<T>* s, const rtc::Optional<T>& o) {
+ if (o) {
+ *s = o;
+ }
+ }
};
struct RtpHeaderExtension {
@@ -447,8 +417,8 @@ struct RtpHeaderExtension {
std::string ToString() const {
std::ostringstream ost;
ost << "{";
- ost << "id: , " << id;
ost << "uri: " << uri;
+ ost << ", id: " << id;
ost << "}";
return ost.str();
}
@@ -481,12 +451,6 @@ enum VoiceMediaChannelOptions {
OPT_AGC_MINUS_10DB = 0x80000000
};
-// DTMF flags to control if a DTMF tone should be played and/or sent.
-enum DtmfFlags {
- DF_PLAY = 0x01,
- DF_SEND = 0x02,
-};
-
class MediaChannel : public sigslot::has_slots<> {
public:
class NetworkInterface {
@@ -593,7 +557,6 @@ class MediaChannel : public sigslot::has_slots<> {
enum SendFlags {
SEND_NOTHING,
- SEND_RINGBACKTONE,
SEND_MICROPHONE
};
@@ -820,6 +783,7 @@ struct VideoSenderInfo : public MediaSenderInfo {
}
std::vector<SsrcGroup> ssrc_groups;
+ std::string encoder_implementation_name;
int packets_cached;
int firs_rcvd;
int plis_rcvd;
@@ -865,6 +829,7 @@ struct VideoReceiverInfo : public MediaReceiverInfo {
}
std::vector<SsrcGroup> ssrc_groups;
+ std::string decoder_implementation_name;
int packets_concealed;
int firs_sent;
int plis_sent;
@@ -968,9 +933,13 @@ struct DataMediaInfo {
std::vector<DataReceiverInfo> receivers;
};
+struct RtcpParameters {
+ bool reduced_size = false;
+};
+
template <class Codec>
struct RtpParameters {
- virtual std::string ToString() {
+ virtual std::string ToString() const {
std::ostringstream ost;
ost << "{";
ost << "codecs: " << VectorToString(codecs) << ", ";
@@ -982,11 +951,12 @@ struct RtpParameters {
std::vector<Codec> codecs;
std::vector<RtpHeaderExtension> extensions;
// TODO(pthatcher): Add streams.
+ RtcpParameters rtcp;
};
template <class Codec, class Options>
struct RtpSendParameters : RtpParameters<Codec> {
- std::string ToString() override {
+ std::string ToString() const override {
std::ostringstream ost;
ost << "{";
ost << "codecs: " << VectorToString(this->codecs) << ", ";
@@ -1056,18 +1026,18 @@ class VoiceMediaChannel : public MediaChannel {
// Set speaker output volume of the specified ssrc.
virtual bool SetOutputVolume(uint32_t ssrc, double volume) = 0;
// Returns if the telephone-event has been negotiated.
- virtual bool CanInsertDtmf() { return false; }
- // Send and/or play a DTMF |event| according to the |flags|.
- // The DTMF out-of-band signal will be used on sending.
+ virtual bool CanInsertDtmf() = 0;
+ // Send a DTMF |event|. The DTMF out-of-band signal will be used.
// The |ssrc| should be either 0 or a valid send stream ssrc.
// The valid value for the |event| are 0 to 15 which corresponding to
// DTMF event 0-9, *, #, A-D.
- virtual bool InsertDtmf(uint32_t ssrc,
- int event,
- int duration,
- int flags) = 0;
+ virtual bool InsertDtmf(uint32_t ssrc, int event, int duration) = 0;
// Gets quality stats for the channel.
virtual bool GetStats(VoiceMediaInfo* info) = 0;
+
+ virtual void SetRawAudioSink(
+ uint32_t ssrc,
+ rtc::scoped_ptr<webrtc::AudioSinkInterface> sink) = 0;
};
struct VideoSendParameters : RtpSendParameters<VideoCodec, VideoOptions> {
@@ -1194,13 +1164,13 @@ struct SendDataParams {
enum SendDataResult { SDR_SUCCESS, SDR_ERROR, SDR_BLOCK };
struct DataOptions {
- std::string ToString() {
+ std::string ToString() const {
return "{}";
}
};
struct DataSendParameters : RtpSendParameters<DataCodec, DataOptions> {
- std::string ToString() {
+ std::string ToString() const {
std::ostringstream ost;
// Options and extensions aren't used.
ost << "{";
diff --git a/talk/media/base/mediaengine.h b/talk/media/base/mediaengine.h
index 1a992d7d4a..467614bb3e 100644
--- a/talk/media/base/mediaengine.h
+++ b/talk/media/base/mediaengine.h
@@ -28,7 +28,7 @@
#ifndef TALK_MEDIA_BASE_MEDIAENGINE_H_
#define TALK_MEDIA_BASE_MEDIAENGINE_H_
-#ifdef OSX
+#if defined(WEBRTC_MAC) && !defined(WEBRTC_IOS)
#include <CoreAudio/CoreAudio.h>
#endif
@@ -40,8 +40,8 @@
#include "talk/media/base/mediacommon.h"
#include "talk/media/base/videocapturer.h"
#include "talk/media/base/videocommon.h"
-#include "talk/media/base/voiceprocessor.h"
#include "talk/media/devices/devicemanager.h"
+#include "webrtc/audio_state.h"
#include "webrtc/base/fileutils.h"
#include "webrtc/base/sigslotrepeater.h"
@@ -51,13 +51,16 @@
namespace webrtc {
class Call;
-class VoiceEngine;
}
namespace cricket {
class VideoCapturer;
+struct RtpCapabilities {
+ std::vector<RtpHeaderExtension> header_extensions;
+};
+
// MediaEngineInterface is an abstraction of a media engine which can be
// subclassed to support different media componentry backends.
// It supports voice and video operations in the same class to facilitate
@@ -72,7 +75,7 @@ class MediaEngineInterface {
// Shuts down the engine.
virtual void Terminate() = 0;
// TODO(solenberg): Remove once VoE API refactoring is done.
- virtual webrtc::VoiceEngine* GetVoE() = 0;
+ virtual rtc::scoped_refptr<webrtc::AudioState> GetAudioState() const = 0;
// MediaChannel creation
// Creates a voice media channel. Returns NULL on failure.
@@ -85,20 +88,6 @@ class MediaEngineInterface {
webrtc::Call* call,
const VideoOptions& options) = 0;
- // Configuration
- // Gets global audio options.
- virtual AudioOptions GetAudioOptions() const = 0;
- // Sets global audio options. "options" are from AudioOptions, above.
- virtual bool SetAudioOptions(const AudioOptions& options) = 0;
- // Sets the default (maximum) codec/resolution and encoder option to capture
- // and encode video.
- virtual bool SetDefaultVideoEncoderConfig(const VideoEncoderConfig& config)
- = 0;
-
- // Device selection
- virtual bool SetSoundDevices(const Device* in_device,
- const Device* out_device) = 0;
-
// Device configuration
// Gets the current speaker volume, as a value between 0 and 255.
virtual bool GetOutputVolume(int* level) = 0;
@@ -109,15 +98,9 @@ class MediaEngineInterface {
virtual int GetInputLevel() = 0;
virtual const std::vector<AudioCodec>& audio_codecs() = 0;
- virtual const std::vector<RtpHeaderExtension>&
- audio_rtp_header_extensions() = 0;
+ virtual RtpCapabilities GetAudioCapabilities() = 0;
virtual const std::vector<VideoCodec>& video_codecs() = 0;
- virtual const std::vector<RtpHeaderExtension>&
- video_rtp_header_extensions() = 0;
-
- // Logging control
- virtual void SetVoiceLogging(int min_sev, const char* filter) = 0;
- virtual void SetVideoLogging(int min_sev, const char* filter) = 0;
+ virtual RtpCapabilities GetVideoCapabilities() = 0;
// Starts AEC dump using existing file.
virtual bool StartAecDump(rtc::PlatformFile file) = 0;
@@ -167,8 +150,8 @@ class CompositeMediaEngine : public MediaEngineInterface {
voice_.Terminate();
}
- virtual webrtc::VoiceEngine* GetVoE() {
- return voice_.GetVoE();
+ virtual rtc::scoped_refptr<webrtc::AudioState> GetAudioState() const {
+ return voice_.GetAudioState();
}
virtual VoiceMediaChannel* CreateChannel(webrtc::Call* call,
const AudioOptions& options) {
@@ -179,21 +162,6 @@ class CompositeMediaEngine : public MediaEngineInterface {
return video_.CreateChannel(call, options);
}
- virtual AudioOptions GetAudioOptions() const {
- return voice_.GetOptions();
- }
- virtual bool SetAudioOptions(const AudioOptions& options) {
- return voice_.SetOptions(options);
- }
- virtual bool SetDefaultVideoEncoderConfig(const VideoEncoderConfig& config) {
- return video_.SetDefaultEncoderConfig(config);
- }
-
- virtual bool SetSoundDevices(const Device* in_device,
- const Device* out_device) {
- return voice_.SetDevices(in_device, out_device);
- }
-
virtual bool GetOutputVolume(int* level) {
return voice_.GetOutputVolume(level);
}
@@ -207,21 +175,14 @@ class CompositeMediaEngine : public MediaEngineInterface {
virtual const std::vector<AudioCodec>& audio_codecs() {
return voice_.codecs();
}
- virtual const std::vector<RtpHeaderExtension>& audio_rtp_header_extensions() {
- return voice_.rtp_header_extensions();
+ virtual RtpCapabilities GetAudioCapabilities() {
+ return voice_.GetCapabilities();
}
virtual const std::vector<VideoCodec>& video_codecs() {
return video_.codecs();
}
- virtual const std::vector<RtpHeaderExtension>& video_rtp_header_extensions() {
- return video_.rtp_header_extensions();
- }
-
- virtual void SetVoiceLogging(int min_sev, const char* filter) {
- voice_.SetLogging(min_sev, filter);
- }
- virtual void SetVideoLogging(int min_sev, const char* filter) {
- video_.SetLogging(min_sev, filter);
+ virtual RtpCapabilities GetVideoCapabilities() {
+ return video_.GetCapabilities();
}
virtual bool StartAecDump(rtc::PlatformFile file) {
@@ -243,70 +204,6 @@ class CompositeMediaEngine : public MediaEngineInterface {
VIDEO video_;
};
-// NullVoiceEngine can be used with CompositeMediaEngine in the case where only
-// a video engine is desired.
-class NullVoiceEngine {
- public:
- bool Init(rtc::Thread* worker_thread) { return true; }
- void Terminate() {}
- // If you need this to return an actual channel, use FakeMediaEngine instead.
- VoiceMediaChannel* CreateChannel(const AudioOptions& options) {
- return nullptr;
- }
- AudioOptions GetOptions() const { return AudioOptions(); }
- bool SetOptions(const AudioOptions& options) { return true; }
- bool SetDevices(const Device* in_device, const Device* out_device) {
- return true;
- }
- bool GetOutputVolume(int* level) {
- *level = 0;
- return true;
- }
- bool SetOutputVolume(int level) { return true; }
- int GetInputLevel() { return 0; }
- const std::vector<AudioCodec>& codecs() { return codecs_; }
- const std::vector<RtpHeaderExtension>& rtp_header_extensions() {
- return rtp_header_extensions_;
- }
- void SetLogging(int min_sev, const char* filter) {}
- bool StartAecDump(rtc::PlatformFile file) { return false; }
- bool StartRtcEventLog(rtc::PlatformFile file) { return false; }
- void StopRtcEventLog() {}
-
- private:
- std::vector<AudioCodec> codecs_;
- std::vector<RtpHeaderExtension> rtp_header_extensions_;
-};
-
-// NullVideoEngine can be used with CompositeMediaEngine in the case where only
-// a voice engine is desired.
-class NullVideoEngine {
- public:
- bool Init(rtc::Thread* worker_thread) { return true; }
- void Terminate() {}
- // If you need this to return an actual channel, use FakeMediaEngine instead.
- VideoMediaChannel* CreateChannel(
- const VideoOptions& options,
- VoiceMediaChannel* voice_media_channel) {
- return NULL;
- }
- bool SetOptions(const VideoOptions& options) { return true; }
- bool SetDefaultEncoderConfig(const VideoEncoderConfig& config) {
- return true;
- }
- const std::vector<VideoCodec>& codecs() { return codecs_; }
- const std::vector<RtpHeaderExtension>& rtp_header_extensions() {
- return rtp_header_extensions_;
- }
- void SetLogging(int min_sev, const char* filter) {}
-
- private:
- std::vector<VideoCodec> codecs_;
- std::vector<RtpHeaderExtension> rtp_header_extensions_;
-};
-
-typedef CompositeMediaEngine<NullVoiceEngine, NullVideoEngine> NullMediaEngine;
-
enum DataChannelType {
DCT_NONE = 0,
DCT_RTP = 1,
diff --git a/talk/media/base/streamparams_unittest.cc b/talk/media/base/streamparams_unittest.cc
index a9e1ce3531..a0164733d4 100644
--- a/talk/media/base/streamparams_unittest.cc
+++ b/talk/media/base/streamparams_unittest.cc
@@ -27,6 +27,7 @@
#include "talk/media/base/streamparams.h"
#include "talk/media/base/testutils.h"
+#include "webrtc/base/arraysize.h"
#include "webrtc/base/gunit.h"
static const uint32_t kSsrcs1[] = {1};
@@ -54,8 +55,8 @@ TEST(SsrcGroup, EqualNotEqual) {
cricket::SsrcGroup("abc", MAKE_VECTOR(kSsrcs2)),
};
- for (size_t i = 0; i < ARRAY_SIZE(ssrc_groups); ++i) {
- for (size_t j = 0; j < ARRAY_SIZE(ssrc_groups); ++j) {
+ for (size_t i = 0; i < arraysize(ssrc_groups); ++i) {
+ for (size_t j = 0; j < arraysize(ssrc_groups); ++j) {
EXPECT_EQ((ssrc_groups[i] == ssrc_groups[j]), (i == j));
EXPECT_EQ((ssrc_groups[i] != ssrc_groups[j]), (i != j));
}
@@ -92,7 +93,7 @@ TEST(StreamParams, CreateLegacy) {
TEST(StreamParams, HasSsrcGroup) {
cricket::StreamParams sp =
- CreateStreamParamsWithSsrcGroup("XYZ", kSsrcs2, ARRAY_SIZE(kSsrcs2));
+ CreateStreamParamsWithSsrcGroup("XYZ", kSsrcs2, arraysize(kSsrcs2));
EXPECT_EQ(2U, sp.ssrcs.size());
EXPECT_EQ(kSsrcs2[0], sp.first_ssrc());
EXPECT_TRUE(sp.has_ssrcs());
@@ -107,7 +108,7 @@ TEST(StreamParams, HasSsrcGroup) {
TEST(StreamParams, GetSsrcGroup) {
cricket::StreamParams sp =
- CreateStreamParamsWithSsrcGroup("XYZ", kSsrcs2, ARRAY_SIZE(kSsrcs2));
+ CreateStreamParamsWithSsrcGroup("XYZ", kSsrcs2, arraysize(kSsrcs2));
EXPECT_EQ(NULL, sp.get_ssrc_group("xyz"));
EXPECT_EQ(&sp.ssrc_groups[0], sp.get_ssrc_group("XYZ"));
}
@@ -116,17 +117,17 @@ TEST(StreamParams, EqualNotEqual) {
cricket::StreamParams l1 = cricket::StreamParams::CreateLegacy(1);
cricket::StreamParams l2 = cricket::StreamParams::CreateLegacy(2);
cricket::StreamParams sg1 =
- CreateStreamParamsWithSsrcGroup("ABC", kSsrcs1, ARRAY_SIZE(kSsrcs1));
+ CreateStreamParamsWithSsrcGroup("ABC", kSsrcs1, arraysize(kSsrcs1));
cricket::StreamParams sg2 =
- CreateStreamParamsWithSsrcGroup("ABC", kSsrcs2, ARRAY_SIZE(kSsrcs2));
+ CreateStreamParamsWithSsrcGroup("ABC", kSsrcs2, arraysize(kSsrcs2));
cricket::StreamParams sg3 =
- CreateStreamParamsWithSsrcGroup("Abc", kSsrcs2, ARRAY_SIZE(kSsrcs2));
+ CreateStreamParamsWithSsrcGroup("Abc", kSsrcs2, arraysize(kSsrcs2));
cricket::StreamParams sg4 =
- CreateStreamParamsWithSsrcGroup("abc", kSsrcs2, ARRAY_SIZE(kSsrcs2));
+ CreateStreamParamsWithSsrcGroup("abc", kSsrcs2, arraysize(kSsrcs2));
cricket::StreamParams sps[] = {l1, l2, sg1, sg2, sg3, sg4};
- for (size_t i = 0; i < ARRAY_SIZE(sps); ++i) {
- for (size_t j = 0; j < ARRAY_SIZE(sps); ++j) {
+ for (size_t i = 0; i < arraysize(sps); ++i) {
+ for (size_t j = 0; j < arraysize(sps); ++j) {
EXPECT_EQ((sps[i] == sps[j]), (i == j));
EXPECT_EQ((sps[i] != sps[j]), (i != j));
}
@@ -195,7 +196,7 @@ TEST(StreamParams, GetPrimaryAndFidSsrcs) {
TEST(StreamParams, ToString) {
cricket::StreamParams sp =
- CreateStreamParamsWithSsrcGroup("XYZ", kSsrcs2, ARRAY_SIZE(kSsrcs2));
+ CreateStreamParamsWithSsrcGroup("XYZ", kSsrcs2, arraysize(kSsrcs2));
EXPECT_STREQ("{ssrcs:[1,2];ssrc_groups:{semantics:XYZ;ssrcs:[1,2]};}",
sp.ToString().c_str());
}
diff --git a/talk/media/base/testutils.cc b/talk/media/base/testutils.cc
index 3b1fcf0513..49a78e63dd 100644
--- a/talk/media/base/testutils.cc
+++ b/talk/media/base/testutils.cc
@@ -132,8 +132,8 @@ const RawRtcpPacket RtpTestUtility::kTestRawRtcpPackets[] = {
};
size_t RtpTestUtility::GetTestPacketCount() {
- return std::min(ARRAY_SIZE(kTestRawRtpPackets),
- ARRAY_SIZE(kTestRawRtcpPackets));
+ return std::min(arraysize(kTestRawRtpPackets),
+ arraysize(kTestRawRtcpPackets));
}
bool RtpTestUtility::WriteTestPackets(size_t count,
diff --git a/talk/media/base/testutils.h b/talk/media/base/testutils.h
index cb4146d707..20c0d62ab7 100644
--- a/talk/media/base/testutils.h
+++ b/talk/media/base/testutils.h
@@ -35,6 +35,7 @@
#include "talk/media/base/mediachannel.h"
#include "talk/media/base/videocapturer.h"
#include "talk/media/base/videocommon.h"
+#include "webrtc/base/arraysize.h"
#include "webrtc/base/basictypes.h"
#include "webrtc/base/sigslot.h"
#include "webrtc/base/window.h"
@@ -54,7 +55,7 @@ namespace cricket {
template <class T> inline std::vector<T> MakeVector(const T a[], size_t s) {
return std::vector<T>(a, a + s);
}
-#define MAKE_VECTOR(a) cricket::MakeVector(a, ARRAY_SIZE(a))
+#define MAKE_VECTOR(a) cricket::MakeVector(a, arraysize(a))
struct RtpDumpPacket;
class RtpDumpWriter;
diff --git a/talk/media/base/videocapturer.cc b/talk/media/base/videocapturer.cc
index ca4b9069f1..d525a4188e 100644
--- a/talk/media/base/videocapturer.cc
+++ b/talk/media/base/videocapturer.cc
@@ -59,7 +59,7 @@ enum {
};
static const int64_t kMaxDistance = ~(static_cast<int64_t>(1) << 63);
-#ifdef LINUX
+#ifdef WEBRTC_LINUX
static const int kYU12Penalty = 16; // Needs to be higher than MJPG index.
#endif
static const int kDefaultScreencastFps = 5;
@@ -82,7 +82,7 @@ CapturedFrame::CapturedFrame()
pixel_height(0),
time_stamp(0),
data_size(0),
- rotation(0),
+ rotation(webrtc::kVideoRotation_0),
data(NULL) {}
// TODO(fbarchard): Remove this function once lmimediaengine stops using it.
@@ -94,11 +94,6 @@ bool CapturedFrame::GetDataSize(uint32_t* size) const {
return true;
}
-webrtc::VideoRotation CapturedFrame::GetRotation() const {
- ASSERT(rotation == 0 || rotation == 90 || rotation == 180 || rotation == 270);
- return static_cast<webrtc::VideoRotation>(rotation);
-}
-
/////////////////////////////////////////////////////////////////////
// Implementation of class VideoCapturer
/////////////////////////////////////////////////////////////////////
@@ -126,7 +121,6 @@ void VideoCapturer::Construct() {
SignalFrameCaptured.connect(this, &VideoCapturer::OnFrameCaptured);
scaled_width_ = 0;
scaled_height_ = 0;
- screencast_max_pixels_ = 0;
muted_ = false;
black_frame_count_down_ = kNumBlackFramesOnMute;
enable_video_adapter_ = true;
@@ -365,16 +359,11 @@ void VideoCapturer::OnFrameCaptured(VideoCapturer*,
if (IsScreencast()) {
int scaled_width, scaled_height;
- if (screencast_max_pixels_ > 0) {
- ComputeScaleMaxPixels(captured_frame->width, captured_frame->height,
- screencast_max_pixels_, &scaled_width, &scaled_height);
- } else {
- int desired_screencast_fps = capture_format_.get() ?
- VideoFormat::IntervalToFps(capture_format_->interval) :
- kDefaultScreencastFps;
- ComputeScale(captured_frame->width, captured_frame->height,
- desired_screencast_fps, &scaled_width, &scaled_height);
- }
+ int desired_screencast_fps = capture_format_.get() ?
+ VideoFormat::IntervalToFps(capture_format_->interval) :
+ kDefaultScreencastFps;
+ ComputeScale(captured_frame->width, captured_frame->height,
+ desired_screencast_fps, &scaled_width, &scaled_height);
if (FOURCC_ARGB == captured_frame->fourcc &&
(scaled_width != captured_frame->width ||
@@ -605,7 +594,7 @@ int64_t VideoCapturer::GetFormatDistance(const VideoFormat& desired,
for (size_t i = 0; i < preferred_fourccs.size(); ++i) {
if (supported_fourcc == CanonicalFourCC(preferred_fourccs[i])) {
delta_fourcc = i;
-#ifdef LINUX
+#ifdef WEBRTC_LINUX
// For HD avoid YU12 which is a software conversion and has 2 bugs
// b/7326348 b/6960899. Reenable when fixed.
if (supported.height >= 720 && (supported_fourcc == FOURCC_YU12 ||
diff --git a/talk/media/base/videocapturer.h b/talk/media/base/videocapturer.h
index 0a11ed09c1..a13c201b8b 100644
--- a/talk/media/base/videocapturer.h
+++ b/talk/media/base/videocapturer.h
@@ -78,10 +78,6 @@ struct CapturedFrame {
// fourcc. Return true if succeeded.
bool GetDataSize(uint32_t* size) const;
- // TODO(guoweis): Change the type of |rotation| from int to
- // webrtc::VideoRotation once chromium gets the code.
- webrtc::VideoRotation GetRotation() const;
-
// The width and height of the captured frame could be different from those
// of VideoFormat. Once the first frame is captured, the width, height,
// fourcc, pixel_width, and pixel_height should keep the same over frames.
@@ -90,15 +86,11 @@ struct CapturedFrame {
uint32_t fourcc; // compression
uint32_t pixel_width; // width of a pixel, default is 1
uint32_t pixel_height; // height of a pixel, default is 1
- // TODO(magjed): |elapsed_time| is deprecated - remove once not used anymore.
- int64_t elapsed_time;
int64_t time_stamp; // timestamp of when the frame was captured, in unix
// time with nanosecond units.
uint32_t data_size; // number of bytes of the frame data
- // TODO(guoweis): This can't be converted to VideoRotation yet as it's
- // used by chrome now.
- int rotation; // rotation in degrees of the frame (0, 90, 180, 270)
+ webrtc::VideoRotation rotation; // rotation in degrees of the frame.
void* data; // pointer to the frame data. This object allocates the
// memory or points to an existing memory.
@@ -270,17 +262,6 @@ class VideoCapturer
sigslot::signal2<VideoCapturer*, const VideoFrame*,
sigslot::multi_threaded_local> SignalVideoFrame;
- // If 'screencast_max_pixels' is set greater than zero, screencasts will be
- // scaled to be no larger than this value.
- // If set to zero, the max pixels will be limited to
- // Retina MacBookPro 15" resolution of 2880 x 1800.
- // For high fps, maximum pixels limit is set based on common 24" monitor
- // resolution of 2048 x 1280.
- int screencast_max_pixels() const { return screencast_max_pixels_; }
- void set_screencast_max_pixels(int p) {
- screencast_max_pixels_ = std::max(0, p);
- }
-
// If true, run video adaptation. By default, video adaptation is enabled
// and users must call video_adapter()->OnOutputFormatRequest()
// to receive frames.
@@ -377,7 +358,6 @@ class VideoCapturer
bool square_pixel_aspect_ratio_; // Enable scaling to square pixels.
int scaled_width_; // Current output size from ComputeScale.
int scaled_height_;
- int screencast_max_pixels_; // Downscale screencasts further if requested.
bool muted_;
int black_frame_count_down_;
diff --git a/talk/media/base/videocapturer_unittest.cc b/talk/media/base/videocapturer_unittest.cc
index 359fe9552a..6d1d8aa395 100644
--- a/talk/media/base/videocapturer_unittest.cc
+++ b/talk/media/base/videocapturer_unittest.cc
@@ -196,39 +196,6 @@ TEST_F(VideoCapturerTest, CameraOffOnMute) {
EXPECT_EQ(33, video_frames_received());
}
-TEST_F(VideoCapturerTest, ScreencastScaledMaxPixels) {
- capturer_.SetScreencast(true);
-
- int kWidth = 1280;
- int kHeight = 720;
-
- // Screencasts usually have large weird dimensions and are ARGB.
- std::vector<cricket::VideoFormat> formats;
- formats.push_back(cricket::VideoFormat(kWidth, kHeight,
- cricket::VideoFormat::FpsToInterval(5), cricket::FOURCC_ARGB));
- formats.push_back(cricket::VideoFormat(2 * kWidth, 2 * kHeight,
- cricket::VideoFormat::FpsToInterval(5), cricket::FOURCC_ARGB));
- capturer_.ResetSupportedFormats(formats);
-
-
- EXPECT_EQ(0, capturer_.screencast_max_pixels());
- EXPECT_EQ(cricket::CS_RUNNING, capturer_.Start(cricket::VideoFormat(
- 2 * kWidth,
- 2 * kHeight,
- cricket::VideoFormat::FpsToInterval(30),
- cricket::FOURCC_ARGB)));
- EXPECT_TRUE(capturer_.IsRunning());
- EXPECT_EQ(0, renderer_.num_rendered_frames());
- renderer_.SetSize(2 * kWidth, 2 * kHeight, 0);
- EXPECT_TRUE(capturer_.CaptureFrame());
- EXPECT_EQ(1, renderer_.num_rendered_frames());
-
- capturer_.set_screencast_max_pixels(kWidth * kHeight);
- renderer_.SetSize(kWidth, kHeight, 0);
- EXPECT_TRUE(capturer_.CaptureFrame());
- EXPECT_EQ(2, renderer_.num_rendered_frames());
-}
-
TEST_F(VideoCapturerTest, ScreencastScaledOddWidth) {
capturer_.SetScreencast(true);
diff --git a/talk/media/base/videocommon.cc b/talk/media/base/videocommon.cc
index 7b6aac206b..faf6450b56 100644
--- a/talk/media/base/videocommon.cc
+++ b/talk/media/base/videocommon.cc
@@ -31,6 +31,7 @@
#include <math.h>
#include <sstream>
+#include "webrtc/base/arraysize.h"
#include "webrtc/base/common.h"
namespace cricket {
@@ -58,7 +59,7 @@ static const FourCCAliasEntry kFourCCAliases[] = {
};
uint32_t CanonicalFourCC(uint32_t fourcc) {
- for (int i = 0; i < ARRAY_SIZE(kFourCCAliases); ++i) {
+ for (int i = 0; i < arraysize(kFourCCAliases); ++i) {
if (kFourCCAliases[i].alias == fourcc) {
return kFourCCAliases[i].canonical;
}
@@ -75,7 +76,7 @@ static float kScaleFactors[] = {
1.f / 16.f // 1/16 scale.
};
-static const int kNumScaleFactors = ARRAY_SIZE(kScaleFactors);
+static const int kNumScaleFactors = arraysize(kScaleFactors);
// Finds the scale factor that, when applied to width and height, produces
// fewer than num_pixels.
@@ -106,9 +107,6 @@ void ComputeScaleMaxPixels(int frame_width, int frame_height, int max_pixels,
ASSERT(scaled_width != NULL);
ASSERT(scaled_height != NULL);
ASSERT(max_pixels > 0);
- // For VP8 the values for max width and height can be found here
- // webrtc/src/video_engine/vie_defines.h (kViEMaxCodecWidth and
- // kViEMaxCodecHeight)
const int kMaxWidth = 4096;
const int kMaxHeight = 3072;
int new_frame_width = frame_width;
diff --git a/talk/media/base/videoengine_unittest.h b/talk/media/base/videoengine_unittest.h
index d89b3e6f43..d7fa00d558 100644
--- a/talk/media/base/videoengine_unittest.h
+++ b/talk/media/base/videoengine_unittest.h
@@ -126,327 +126,6 @@ class VideoEngineOverride : public T {
}
};
-template<class E>
-class VideoEngineTest : public testing::Test {
- protected:
- // Tests starting and stopping the engine, and creating a channel.
- void StartupShutdown() {
- EXPECT_TRUE(engine_.Init(rtc::Thread::Current()));
- cricket::VideoMediaChannel* channel = engine_.CreateChannel(NULL);
- EXPECT_TRUE(channel != NULL);
- delete channel;
- engine_.Terminate();
- }
-
- void ConstrainNewCodecBody() {
- cricket::VideoCodec empty, in, out;
- cricket::VideoCodec max_settings(engine_.codecs()[0].id,
- engine_.codecs()[0].name,
- 1280, 800, 30, 0);
-
- // set max settings of 1280x800x30
- EXPECT_TRUE(engine_.SetDefaultEncoderConfig(
- cricket::VideoEncoderConfig(max_settings)));
-
- // don't constrain the max resolution
- in = max_settings;
- EXPECT_TRUE(engine_.CanSendCodec(in, empty, &out));
- EXPECT_PRED2(IsEqualCodec, out, in);
-
- // constrain resolution greater than the max and wider aspect,
- // picking best aspect (16:10)
- in.width = 1380;
- in.height = 800;
- EXPECT_TRUE(engine_.CanSendCodec(in, empty, &out));
- EXPECT_PRED4(IsEqualRes, out, 1280, 720, 30);
-
- // constrain resolution greater than the max and narrow aspect,
- // picking best aspect (16:9)
- in.width = 1280;
- in.height = 740;
- EXPECT_TRUE(engine_.CanSendCodec(in, empty, &out));
- EXPECT_PRED4(IsEqualRes, out, 1280, 720, 30);
-
- // constrain resolution greater than the max, picking equal aspect (4:3)
- in.width = 1280;
- in.height = 960;
- EXPECT_TRUE(engine_.CanSendCodec(in, empty, &out));
- EXPECT_PRED4(IsEqualRes, out, 1280, 800, 30);
-
- // constrain resolution greater than the max, picking equal aspect (16:10)
- in.width = 1280;
- in.height = 800;
- EXPECT_TRUE(engine_.CanSendCodec(in, empty, &out));
- EXPECT_PRED4(IsEqualRes, out, 1280, 800, 30);
-
- // reduce max settings to 640x480x30
- max_settings.width = 640;
- max_settings.height = 480;
- EXPECT_TRUE(engine_.SetDefaultEncoderConfig(
- cricket::VideoEncoderConfig(max_settings)));
-
- // don't constrain the max resolution
- in = max_settings;
- in.width = 640;
- in.height = 480;
- EXPECT_TRUE(engine_.CanSendCodec(in, empty, &out));
- EXPECT_PRED2(IsEqualCodec, out, in);
-
- // keep 16:10 if they request it
- in.height = 400;
- EXPECT_TRUE(engine_.CanSendCodec(in, empty, &out));
- EXPECT_PRED2(IsEqualCodec, out, in);
-
- // don't constrain lesser 4:3 resolutions
- in.width = 320;
- in.height = 240;
- EXPECT_TRUE(engine_.CanSendCodec(in, empty, &out));
- EXPECT_PRED2(IsEqualCodec, out, in);
-
- // don't constrain lesser 16:10 resolutions
- in.width = 320;
- in.height = 200;
- EXPECT_TRUE(engine_.CanSendCodec(in, empty, &out));
- EXPECT_PRED2(IsEqualCodec, out, in);
-
- // requested resolution of 0x0 succeeds
- in.width = 0;
- in.height = 0;
- EXPECT_TRUE(engine_.CanSendCodec(in, empty, &out));
- EXPECT_PRED2(IsEqualCodec, out, in);
-
- // constrain resolution lesser than the max and wider aspect,
- // picking best aspect (16:9)
- in.width = 350;
- in.height = 201;
- EXPECT_TRUE(engine_.CanSendCodec(in, empty, &out));
- EXPECT_PRED4(IsEqualRes, out, 320, 180, 30);
-
- // constrain resolution greater than the max and narrow aspect,
- // picking best aspect (4:3)
- in.width = 350;
- in.height = 300;
- EXPECT_TRUE(engine_.CanSendCodec(in, empty, &out));
- EXPECT_PRED4(IsEqualRes, out, 320, 240, 30);
-
- // constrain resolution greater than the max and wider aspect,
- // picking best aspect (16:9)
- in.width = 1380;
- in.height = 800;
- EXPECT_TRUE(engine_.CanSendCodec(in, empty, &out));
- EXPECT_PRED4(IsEqualRes, out, 640, 360, 30);
-
- // constrain resolution greater than the max and narrow aspect,
- // picking best aspect (4:3)
- in.width = 1280;
- in.height = 900;
- EXPECT_TRUE(engine_.CanSendCodec(in, empty, &out));
- EXPECT_PRED4(IsEqualRes, out, 640, 480, 30);
-
- // constrain resolution greater than the max, picking equal aspect (4:3)
- in.width = 1280;
- in.height = 960;
- EXPECT_TRUE(engine_.CanSendCodec(in, empty, &out));
- EXPECT_PRED4(IsEqualRes, out, 640, 480, 30);
-
- // constrain resolution greater than the max, picking equal aspect (16:10)
- in.width = 1280;
- in.height = 800;
- EXPECT_TRUE(engine_.CanSendCodec(in, empty, &out));
- EXPECT_PRED4(IsEqualRes, out, 640, 400, 30);
-
- // constrain res & fps greater than the max
- in.framerate = 50;
- EXPECT_TRUE(engine_.CanSendCodec(in, empty, &out));
- EXPECT_PRED4(IsEqualRes, out, 640, 400, 30);
-
- // reduce max settings to 160x100x10
- max_settings.width = 160;
- max_settings.height = 100;
- max_settings.framerate = 10;
- EXPECT_TRUE(engine_.SetDefaultEncoderConfig(
- cricket::VideoEncoderConfig(max_settings)));
-
- // constrain res & fps to new max
- EXPECT_TRUE(engine_.CanSendCodec(in, empty, &out));
- EXPECT_PRED4(IsEqualRes, out, 160, 100, 10);
-
- // allow 4:3 "comparable" resolutions
- in.width = 160;
- in.height = 120;
- in.framerate = 10;
- EXPECT_TRUE(engine_.CanSendCodec(in, empty, &out));
- EXPECT_PRED4(IsEqualRes, out, 160, 120, 10);
- }
-
- // This is the new way of constraining codec size, where we no longer maintain
- // a list of the supported formats. Instead, CanSendCodec will just downscale
- // the resolution by 2 until the width is below clamp.
- void ConstrainNewCodec2Body() {
- cricket::VideoCodec empty, in, out;
- cricket::VideoCodec max_settings(engine_.codecs()[0].id,
- engine_.codecs()[0].name,
- 1280, 800, 30, 0);
-
- // Set max settings of 1280x800x30
- EXPECT_TRUE(engine_.SetDefaultEncoderConfig(
- cricket::VideoEncoderConfig(max_settings)));
-
- // Don't constrain the max resolution
- in = max_settings;
- EXPECT_TRUE(engine_.CanSendCodec(in, empty, &out));
- EXPECT_PRED2(IsEqualCodec, out, in);
-
- // Constrain resolution greater than the max width.
- in.width = 1380;
- in.height = 800;
- EXPECT_TRUE(engine_.CanSendCodec(in, empty, &out));
- EXPECT_PRED4(IsEqualRes, out, 690, 400, 30);
-
- // Don't constrain resolution when only the height is greater than max.
- in.width = 960;
- in.height = 1280;
- EXPECT_TRUE(engine_.CanSendCodec(in, empty, &out));
- EXPECT_PRED4(IsEqualRes, out, 960, 1280, 30);
-
- // Don't constrain smaller format.
- in.width = 640;
- in.height = 480;
- EXPECT_TRUE(engine_.CanSendCodec(in, empty, &out));
- EXPECT_PRED4(IsEqualRes, out, 640, 480, 30);
- }
-
- void ConstrainRunningCodecBody() {
- cricket::VideoCodec in, out, current;
- cricket::VideoCodec max_settings(engine_.codecs()[0].id,
- engine_.codecs()[0].name,
- 1280, 800, 30, 0);
-
- // set max settings of 1280x960x30
- EXPECT_TRUE(engine_.SetDefaultEncoderConfig(
- cricket::VideoEncoderConfig(max_settings)));
-
- // establish current call at 1280x800x30 (16:10)
- current = max_settings;
- current.height = 800;
-
- // Don't constrain current resolution
- in = current;
- EXPECT_TRUE(engine_.CanSendCodec(in, current, &out));
- EXPECT_PRED2(IsEqualCodec, out, in);
-
- // requested resolution of 0x0 succeeds
- in.width = 0;
- in.height = 0;
- EXPECT_TRUE(engine_.CanSendCodec(in, current, &out));
- EXPECT_PRED2(IsEqualCodec, out, in);
-
- // Reduce an intermediate resolution down to the next lowest one, preserving
- // aspect ratio.
- in.width = 800;
- in.height = 600;
- EXPECT_TRUE(engine_.CanSendCodec(in, current, &out));
- EXPECT_PRED4(IsEqualRes, out, 640, 400, 30);
-
- // Clamping by aspect ratio, but still never return a dimension higher than
- // requested.
- in.width = 1280;
- in.height = 720;
- EXPECT_TRUE(engine_.CanSendCodec(in, current, &out));
- EXPECT_PRED4(IsEqualRes, out, 1280, 720, 30);
-
- in.width = 1279;
- EXPECT_TRUE(engine_.CanSendCodec(in, current, &out));
- EXPECT_PRED4(IsEqualRes, out, 960, 600, 30);
-
- in.width = 1281;
- EXPECT_TRUE(engine_.CanSendCodec(in, current, &out));
- EXPECT_PRED4(IsEqualRes, out, 1280, 720, 30);
-
- // Clamp large resolutions down, always preserving aspect
- in.width = 1920;
- in.height = 1080;
- EXPECT_TRUE(engine_.CanSendCodec(in, current, &out));
- EXPECT_PRED4(IsEqualRes, out, 1280, 800, 30);
-
- in.width = 1921;
- EXPECT_TRUE(engine_.CanSendCodec(in, current, &out));
- EXPECT_PRED4(IsEqualRes, out, 1280, 800, 30);
-
- in.width = 1919;
- EXPECT_TRUE(engine_.CanSendCodec(in, current, &out));
- EXPECT_PRED4(IsEqualRes, out, 1280, 800, 30);
-
- // reduce max settings to 640x480x30
- max_settings.width = 640;
- max_settings.height = 480;
- EXPECT_TRUE(engine_.SetDefaultEncoderConfig(
- cricket::VideoEncoderConfig(max_settings)));
-
- // establish current call at 640x400x30 (16:10)
- current = max_settings;
- current.height = 400;
-
- // Don't constrain current resolution
- in = current;
- EXPECT_TRUE(engine_.CanSendCodec(in, current, &out));
- EXPECT_PRED2(IsEqualCodec, out, in);
-
- // requested resolution of 0x0 succeeds
- in.width = 0;
- in.height = 0;
- EXPECT_TRUE(engine_.CanSendCodec(in, current, &out));
- EXPECT_PRED2(IsEqualCodec, out, in);
-
- // Reduce an intermediate resolution down to the next lowest one, preserving
- // aspect ratio.
- in.width = 400;
- in.height = 300;
- EXPECT_TRUE(engine_.CanSendCodec(in, current, &out));
- EXPECT_PRED4(IsEqualRes, out, 320, 200, 30);
-
- // Clamping by aspect ratio, but still never return a dimension higher than
- // requested.
- in.width = 640;
- in.height = 360;
- EXPECT_TRUE(engine_.CanSendCodec(in, current, &out));
- EXPECT_PRED4(IsEqualRes, out, 640, 360, 30);
-
- in.width = 639;
- EXPECT_TRUE(engine_.CanSendCodec(in, current, &out));
- EXPECT_PRED4(IsEqualRes, out, 480, 300, 30);
-
- in.width = 641;
- EXPECT_TRUE(engine_.CanSendCodec(in, current, &out));
- EXPECT_PRED4(IsEqualRes, out, 640, 360, 30);
-
- // Clamp large resolutions down, always preserving aspect
- in.width = 1280;
- in.height = 800;
- EXPECT_TRUE(engine_.CanSendCodec(in, current, &out));
- EXPECT_PRED4(IsEqualRes, out, 640, 400, 30);
-
- in.width = 1281;
- EXPECT_TRUE(engine_.CanSendCodec(in, current, &out));
- EXPECT_PRED4(IsEqualRes, out, 640, 400, 30);
-
- in.width = 1279;
- EXPECT_TRUE(engine_.CanSendCodec(in, current, &out));
- EXPECT_PRED4(IsEqualRes, out, 640, 400, 30);
-
- // Should fail for any that are smaller than our supported formats
- in.width = 80;
- in.height = 80;
- EXPECT_FALSE(engine_.CanSendCodec(in, current, &out));
-
- in.height = 50;
- EXPECT_FALSE(engine_.CanSendCodec(in, current, &out));
- }
-
- VideoEngineOverride<E> engine_;
- rtc::scoped_ptr<cricket::FakeVideoCapturer> video_capturer_;
-};
-
template<class E, class C>
class VideoMediaChannelTest : public testing::Test,
public sigslot::has_slots<> {
@@ -875,7 +554,7 @@ class VideoMediaChannelTest : public testing::Test,
EXPECT_TRUE(SetOneCodec(DefaultCodec()));
cricket::VideoSendParameters parameters;
parameters.codecs.push_back(DefaultCodec());
- parameters.options.conference_mode.Set(true);
+ parameters.options.conference_mode = rtc::Optional<bool>(true);
EXPECT_TRUE(channel_->SetSendParameters(parameters));
EXPECT_TRUE(SetSend(true));
EXPECT_TRUE(channel_->AddRecvStream(
@@ -926,7 +605,7 @@ class VideoMediaChannelTest : public testing::Test,
EXPECT_TRUE(SetOneCodec(DefaultCodec()));
cricket::VideoSendParameters parameters;
parameters.codecs.push_back(DefaultCodec());
- parameters.options.conference_mode.Set(true);
+ parameters.options.conference_mode = rtc::Optional<bool>(true);
EXPECT_TRUE(channel_->SetSendParameters(parameters));
EXPECT_TRUE(channel_->AddRecvStream(
cricket::StreamParams::CreateLegacy(kSsrc)));
@@ -1009,8 +688,10 @@ class VideoMediaChannelTest : public testing::Test,
rtc::scoped_ptr<const rtc::Buffer> p(GetRtpPacket(0));
ParseRtpPacket(p.get(), NULL, NULL, NULL, NULL, &ssrc, NULL);
EXPECT_EQ(kSsrc, ssrc);
- EXPECT_EQ(NumRtpPackets(), NumRtpPackets(ssrc));
- EXPECT_EQ(NumRtpBytes(), NumRtpBytes(ssrc));
+ // Packets are being paced out, so these can mismatch between the first and
+ // second call to NumRtpPackets until pending packets are paced out.
+ EXPECT_EQ_WAIT(NumRtpPackets(), NumRtpPackets(ssrc), kTimeout);
+ EXPECT_EQ_WAIT(NumRtpBytes(), NumRtpBytes(ssrc), kTimeout);
EXPECT_EQ(1, NumSentSsrcs());
EXPECT_EQ(0, NumRtpPackets(kSsrc - 1));
EXPECT_EQ(0, NumRtpBytes(kSsrc - 1));
@@ -1031,8 +712,10 @@ class VideoMediaChannelTest : public testing::Test,
rtc::scoped_ptr<const rtc::Buffer> p(GetRtpPacket(0));
ParseRtpPacket(p.get(), NULL, NULL, NULL, NULL, &ssrc, NULL);
EXPECT_EQ(999u, ssrc);
- EXPECT_EQ(NumRtpPackets(), NumRtpPackets(ssrc));
- EXPECT_EQ(NumRtpBytes(), NumRtpBytes(ssrc));
+ // Packets are being paced out, so these can mismatch between the first and
+ // second call to NumRtpPackets until pending packets are paced out.
+ EXPECT_EQ_WAIT(NumRtpPackets(), NumRtpPackets(ssrc), kTimeout);
+ EXPECT_EQ_WAIT(NumRtpBytes(), NumRtpBytes(ssrc), kTimeout);
EXPECT_EQ(1, NumSentSsrcs());
EXPECT_EQ(0, NumRtpPackets(kSsrc));
EXPECT_EQ(0, NumRtpBytes(kSsrc));
@@ -1236,7 +919,7 @@ class VideoMediaChannelTest : public testing::Test,
EXPECT_TRUE(SetDefaultCodec());
cricket::VideoSendParameters parameters;
parameters.codecs.push_back(DefaultCodec());
- parameters.options.conference_mode.Set(true);
+ parameters.options.conference_mode = rtc::Optional<bool>(true);
EXPECT_TRUE(channel_->SetSendParameters(parameters));
EXPECT_TRUE(SetSend(true));
EXPECT_TRUE(channel_->AddRecvStream(
@@ -1746,8 +1429,8 @@ class VideoMediaChannelTest : public testing::Test,
// Tests that we can send and receive frames with early receive.
void TwoStreamsSendAndUnsignalledRecv(const cricket::VideoCodec& codec) {
cricket::VideoSendParameters parameters;
- parameters.options.conference_mode.Set(true);
- parameters.options.unsignalled_recv_stream_limit.Set(1);
+ parameters.options.conference_mode = rtc::Optional<bool>(true);
+ parameters.options.unsignalled_recv_stream_limit = rtc::Optional<int>(1);
EXPECT_TRUE(channel_->SetSendParameters(parameters));
SetUpSecondStreamWithNoRecv();
// Test sending and receiving on first stream.
@@ -1780,8 +1463,8 @@ class VideoMediaChannelTest : public testing::Test,
void TwoStreamsAddAndRemoveUnsignalledRecv(
const cricket::VideoCodec& codec) {
cricket::VideoOptions vmo;
- vmo.conference_mode.Set(true);
- vmo.unsignalled_recv_stream_limit.Set(1);
+ vmo.conference_mode = rtc::Optional<bool>(true);
+ vmo.unsignalled_recv_stream_limit = rtc::Optional<int>(1);
EXPECT_TRUE(channel_->SetOptions(vmo));
SetUpSecondStreamWithNoRecv();
// Sending and receiving on first stream.
diff --git a/talk/media/base/videoframe.cc b/talk/media/base/videoframe.cc
index 2b604b085b..3e4d60a258 100644
--- a/talk/media/base/videoframe.cc
+++ b/talk/media/base/videoframe.cc
@@ -33,6 +33,7 @@
#include "libyuv/planar_functions.h"
#include "libyuv/scale.h"
#include "talk/media/base/videocommon.h"
+#include "webrtc/base/arraysize.h"
#include "webrtc/base/checks.h"
#include "webrtc/base/logging.h"
@@ -318,7 +319,7 @@ bool VideoFrame::Validate(uint32_t fourcc,
}
// TODO(fbarchard): Make function to dump information about frames.
uint8_t four_samples[4] = {0, 0, 0, 0};
- for (size_t i = 0; i < ARRAY_SIZE(four_samples) && i < sample_size; ++i) {
+ for (size_t i = 0; i < arraysize(four_samples) && i < sample_size; ++i) {
four_samples[i] = sample[i];
}
if (sample_size < expected_size) {
diff --git a/talk/media/base/videoframe.h b/talk/media/base/videoframe.h
index 217732fa18..f81c678d61 100644
--- a/talk/media/base/videoframe.h
+++ b/talk/media/base/videoframe.h
@@ -30,7 +30,7 @@
#include "webrtc/base/basictypes.h"
#include "webrtc/base/stream.h"
-#include "webrtc/common_video/interface/video_frame_buffer.h"
+#include "webrtc/common_video/include/video_frame_buffer.h"
#include "webrtc/common_video/rotation.h"
namespace cricket {
diff --git a/talk/media/base/videoframefactory.cc b/talk/media/base/videoframefactory.cc
index dfd97c6faa..fb81096c31 100644
--- a/talk/media/base/videoframefactory.cc
+++ b/talk/media/base/videoframefactory.cc
@@ -51,8 +51,8 @@ VideoFrame* VideoFrameFactory::CreateAliasedFrame(
// If the frame is rotated, we need to switch the width and height.
if (apply_rotation_ &&
- (input_frame->GetRotation() == webrtc::kVideoRotation_90 ||
- input_frame->GetRotation() == webrtc::kVideoRotation_270)) {
+ (input_frame->rotation == webrtc::kVideoRotation_90 ||
+ input_frame->rotation == webrtc::kVideoRotation_270)) {
std::swap(output_width, output_height);
}
diff --git a/talk/media/base/videorenderer.h b/talk/media/base/videorenderer.h
index 0a0ee51817..a18c4e3c29 100644
--- a/talk/media/base/videorenderer.h
+++ b/talk/media/base/videorenderer.h
@@ -42,11 +42,12 @@ class VideoFrame;
class VideoRenderer {
public:
virtual ~VideoRenderer() {}
- // Called when the video has changed size. This is also used as an
- // initialization method to set the UI size before any video frame
- // rendered. webrtc::ExternalRenderer's FrameSizeChange will invoke this when
- // it's called or later when a VideoRenderer is attached.
- virtual bool SetSize(int width, int height, int reserved) = 0;
+ // Called when the video has changed size.
+ // TODO(nisse): This method is not really used, and should be
+ // deleted. Provide a default do-nothing implementation, to easy the
+ // transition as the method is deleted in subclasses, in particular,
+ // chrome's MockVideoRenderer class.
+ virtual bool SetSize(int width, int height, int reserved) { return true; };
// Called when a new frame is available for display.
virtual bool RenderFrame(const VideoFrame *frame) = 0;
diff --git a/talk/media/base/voiceprocessor.h b/talk/media/base/voiceprocessor.h
deleted file mode 100755
index 8de2678c95..0000000000
--- a/talk/media/base/voiceprocessor.h
+++ /dev/null
@@ -1,29 +0,0 @@
-/*
- * libjingle
- * Copyright 2004 Google Inc.
- *
- * Redistribution and use in source and binary forms, with or without
- * modification, are permitted provided that the following conditions are met:
- *
- * 1. Redistributions of source code must retain the above copyright notice,
- * this list of conditions and the following disclaimer.
- * 2. Redistributions in binary form must reproduce the above copyright notice,
- * this list of conditions and the following disclaimer in the documentation
- * and/or other materials provided with the distribution.
- * 3. The name of the author may not be used to endorse or promote products
- * derived from this software without specific prior written permission.
- *
- * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
- * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
- * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
- * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
- * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
- * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
- * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
- * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
- * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
- * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
- */
-
-// TODO(solenberg): Remove this file once Chromium's libjingle.gyp/.gn are
-// updated.
diff --git a/talk/media/devices/carbonvideorenderer.cc b/talk/media/devices/carbonvideorenderer.cc
index 846135d925..b711ae4fbd 100644
--- a/talk/media/devices/carbonvideorenderer.cc
+++ b/talk/media/devices/carbonvideorenderer.cc
@@ -40,7 +40,6 @@ CarbonVideoRenderer::CarbonVideoRenderer(int x, int y)
image_height_(0),
x_(x),
y_(y),
- image_ref_(NULL),
window_ref_(NULL) {
}
diff --git a/talk/media/devices/carbonvideorenderer.h b/talk/media/devices/carbonvideorenderer.h
index 52c974060c..e8329ea031 100644
--- a/talk/media/devices/carbonvideorenderer.h
+++ b/talk/media/devices/carbonvideorenderer.h
@@ -65,7 +65,6 @@ class CarbonVideoRenderer : public VideoRenderer {
int image_height_;
int x_;
int y_;
- CGImageRef image_ref_;
WindowRef window_ref_;
};
diff --git a/talk/media/devices/devicemanager.cc b/talk/media/devices/devicemanager.cc
index 1d7ac5baf1..eca14a5def 100644
--- a/talk/media/devices/devicemanager.cc
+++ b/talk/media/devices/devicemanager.cc
@@ -123,7 +123,7 @@ bool DeviceManager::GetAudioOutputDevice(const std::string& name, Device* out) {
bool DeviceManager::GetVideoCaptureDevices(std::vector<Device>* devices) {
devices->clear();
-#if defined(ANDROID) || defined(IOS)
+#if defined(ANDROID) || defined(WEBRTC_IOS)
// On Android and iOS, we treat the camera(s) as a single device. Even if
// there are multiple cameras, that's abstracted away at a higher level.
Device dev("camera", "1"); // name and ID
diff --git a/talk/media/devices/devicemanager_unittest.cc b/talk/media/devices/devicemanager_unittest.cc
index f259c7d0d3..606a05e7c3 100644
--- a/talk/media/devices/devicemanager_unittest.cc
+++ b/talk/media/devices/devicemanager_unittest.cc
@@ -39,6 +39,7 @@
#include "talk/media/base/videocapturerfactory.h"
#include "talk/media/devices/filevideocapturer.h"
#include "talk/media/devices/v4llookup.h"
+#include "webrtc/base/arraysize.h"
#include "webrtc/base/fileutils.h"
#include "webrtc/base/gunit.h"
#include "webrtc/base/logging.h"
@@ -47,10 +48,10 @@
#include "webrtc/base/stream.h"
#include "webrtc/base/windowpickerfactory.h"
-#ifdef LINUX
+#ifdef WEBRTC_LINUX
// TODO(juberti): Figure out why this doesn't compile on Windows.
#include "webrtc/base/fileutils_mock.h"
-#endif // LINUX
+#endif // WEBRTC_LINUX
using rtc::Pathname;
using rtc::FileTimeType;
@@ -269,22 +270,22 @@ TEST(DeviceManagerTest, VerifyFilterDevices) {
"device5",
};
std::vector<Device> devices;
- for (int i = 0; i < ARRAY_SIZE(kTotalDevicesName); ++i) {
+ for (int i = 0; i < arraysize(kTotalDevicesName); ++i) {
devices.push_back(Device(kTotalDevicesName[i], i));
}
EXPECT_TRUE(CompareDeviceList(devices, kTotalDevicesName,
- ARRAY_SIZE(kTotalDevicesName)));
+ arraysize(kTotalDevicesName)));
// Return false if given NULL as the exclusion list.
EXPECT_TRUE(DeviceManager::FilterDevices(&devices, NULL));
// The devices should not change.
EXPECT_TRUE(CompareDeviceList(devices, kTotalDevicesName,
- ARRAY_SIZE(kTotalDevicesName)));
+ arraysize(kTotalDevicesName)));
EXPECT_TRUE(DeviceManager::FilterDevices(&devices, kFilteredDevicesName));
EXPECT_TRUE(CompareDeviceList(devices, kDevicesName,
- ARRAY_SIZE(kDevicesName)));
+ arraysize(kDevicesName)));
}
-#ifdef LINUX
+#ifdef WEBRTC_LINUX
class FakeV4LLookup : public cricket::V4LLookup {
public:
explicit FakeV4LLookup(std::vector<std::string> device_paths)
@@ -376,7 +377,7 @@ TEST(DeviceManagerTest, GetVideoCaptureDevices_KUnknown) {
EXPECT_EQ("/dev/video0", video_ins.at(0).name);
EXPECT_EQ("/dev/video5", video_ins.at(1).name);
}
-#endif // LINUX
+#endif // WEBRTC_LINUX
// TODO(noahric): These are flaky on windows on headless machines.
#ifndef WIN32
diff --git a/talk/media/devices/fakedevicemanager.h b/talk/media/devices/fakedevicemanager.h
index a4b2b86e44..77a83424b2 100644
--- a/talk/media/devices/fakedevicemanager.h
+++ b/talk/media/devices/fakedevicemanager.h
@@ -156,7 +156,7 @@ class FakeDeviceManager : public DeviceManagerInterface {
return true;
}
-#ifdef OSX
+#if defined(WEBRTC_MAC) && !defined(WEBRTC_IOS)
bool QtKitToSgDevice(const std::string& qtkit_name, Device* out) {
out->name = qtkit_name;
out->id = "sg:" + qtkit_name;
diff --git a/talk/media/devices/mobiledevicemanager.cc b/talk/media/devices/mobiledevicemanager.cc
index 2a886a36d4..5739c7e8d6 100644
--- a/talk/media/devices/mobiledevicemanager.cc
+++ b/talk/media/devices/mobiledevicemanager.cc
@@ -27,7 +27,7 @@
#include "talk/media/devices/devicemanager.h"
#include "webrtc/base/arraysize.h"
-#include "webrtc/modules/video_capture/include/video_capture_factory.h"
+#include "webrtc/modules/video_capture/video_capture_factory.h"
namespace cricket {
diff --git a/talk/media/devices/v4llookup.h b/talk/media/devices/v4llookup.h
index 1bed90b650..5c53ede99f 100644
--- a/talk/media/devices/v4llookup.h
+++ b/talk/media/devices/v4llookup.h
@@ -37,7 +37,7 @@
#include <string>
-#ifdef LINUX
+#ifdef WEBRTC_LINUX
namespace cricket {
class V4LLookup {
public:
@@ -66,5 +66,5 @@ class V4LLookup {
} // namespace cricket
-#endif // LINUX
+#endif // WEBRTC_LINUX
#endif // TALK_MEDIA_DEVICES_V4LLOOKUP_H_
diff --git a/talk/media/devices/videorendererfactory.h b/talk/media/devices/videorendererfactory.h
index 416f05b297..b7128f625d 100644
--- a/talk/media/devices/videorendererfactory.h
+++ b/talk/media/devices/videorendererfactory.h
@@ -32,9 +32,9 @@
#define TALK_MEDIA_DEVICES_VIDEORENDERERFACTORY_H_
#include "talk/media/base/videorenderer.h"
-#if defined(LINUX) && defined(HAVE_GTK)
+#if defined(WEBRTC_LINUX) && defined(HAVE_GTK)
#include "talk/media/devices/gtkvideorenderer.h"
-#elif defined(OSX) && !defined(CARBON_DEPRECATED)
+#elif defined(WEBRTC_MAC) && !defined(WEBRTC_IOS) && !defined(CARBON_DEPRECATED)
#include "talk/media/devices/carbonvideorenderer.h"
#elif defined(WIN32)
#include "talk/media/devices/gdivideorenderer.h"
@@ -45,9 +45,10 @@ namespace cricket {
class VideoRendererFactory {
public:
static VideoRenderer* CreateGuiVideoRenderer(int x, int y) {
- #if defined(LINUX) && defined(HAVE_GTK)
+ #if defined(WEBRTC_LINUX) && defined(HAVE_GTK)
return new GtkVideoRenderer(x, y);
- #elif defined(OSX) && !defined(CARBON_DEPRECATED)
+ #elif defined(WEBRTC_MAC) && !defined(WEBRTC_IOS) && \
+ !defined(CARBON_DEPRECATED)
CarbonVideoRenderer* renderer = new CarbonVideoRenderer(x, y);
// Needs to be initialized on the main thread.
if (renderer->Initialize()) {
diff --git a/talk/media/devices/win32devicemanager.cc b/talk/media/devices/win32devicemanager.cc
index 1b9e9d86f6..f34e3c44eb 100644
--- a/talk/media/devices/win32devicemanager.cc
+++ b/talk/media/devices/win32devicemanager.cc
@@ -48,6 +48,7 @@ EXTERN_C const PROPERTYKEY PKEY_AudioEndpoint_GUID = { {
} }, 4
};
+#include "webrtc/base/arraysize.h"
#include "webrtc/base/logging.h"
#include "webrtc/base/stringutils.h"
#include "webrtc/base/thread.h"
@@ -148,7 +149,7 @@ bool Win32DeviceManager::GetDefaultVideoCaptureDevice(Device* device) {
*device = devices[0];
for (size_t i = 0; i < devices.size(); ++i) {
if (strnicmp(devices[i].id.c_str(), kUsbDevicePathPrefix,
- ARRAY_SIZE(kUsbDevicePathPrefix) - 1) == 0) {
+ arraysize(kUsbDevicePathPrefix) - 1) == 0) {
*device = devices[i];
break;
}
diff --git a/talk/media/sctp/sctpdataengine.cc b/talk/media/sctp/sctpdataengine.cc
index c88882d42d..3753cd22c0 100644
--- a/talk/media/sctp/sctpdataengine.cc
+++ b/talk/media/sctp/sctpdataengine.cc
@@ -36,6 +36,7 @@
#include "talk/media/base/constants.h"
#include "talk/media/base/streamparams.h"
#include "usrsctplib/usrsctp.h"
+#include "webrtc/base/arraysize.h"
#include "webrtc/base/buffer.h"
#include "webrtc/base/helpers.h"
#include "webrtc/base/logging.h"
@@ -76,7 +77,7 @@ std::string ListFlags(int flags) {
MAKEFLAG(SCTP_STREAM_CHANGE_DENIED)
};
#undef MAKEFLAG
- for (int i = 0; i < ARRAY_SIZE(flaginfo); ++i) {
+ for (int i = 0; i < arraysize(flaginfo); ++i) {
if (flags & flaginfo[i].value) {
if (!first) result << " | ";
result << flaginfo[i].name;
@@ -473,7 +474,7 @@ bool SctpDataMediaChannel::OpenSctpSocket() {
struct sctp_event event = {0};
event.se_assoc_id = SCTP_ALL_ASSOC;
event.se_on = 1;
- for (size_t i = 0; i < ARRAY_SIZE(event_types); i++) {
+ for (size_t i = 0; i < arraysize(event_types); i++) {
event.se_type = event_types[i];
if (usrsctp_setsockopt(sock_, IPPROTO_SCTP, SCTP_EVENT, &event,
sizeof(event)) < 0) {
@@ -728,7 +729,13 @@ bool SctpDataMediaChannel::AddStream(const StreamParams& stream) {
}
const uint32_t ssrc = stream.first_ssrc();
- if (open_streams_.find(ssrc) != open_streams_.end()) {
+ if (ssrc >= cricket::kMaxSctpSid) {
+ LOG(LS_WARNING) << debug_name_ << "->Add(Send|Recv)Stream(...): "
+ << "Not adding data stream '" << stream.id
+ << "' with ssrc=" << ssrc
+ << " because stream ssrc is too high.";
+ return false;
+ } else if (open_streams_.find(ssrc) != open_streams_.end()) {
LOG(LS_WARNING) << debug_name_ << "->Add(Send|Recv)Stream(...): "
<< "Not adding data stream '" << stream.id
<< "' with ssrc=" << ssrc
diff --git a/talk/media/sctp/sctpdataengine_unittest.cc b/talk/media/sctp/sctpdataengine_unittest.cc
index 4706368b9d..d673c69c98 100644
--- a/talk/media/sctp/sctpdataengine_unittest.cc
+++ b/talk/media/sctp/sctpdataengine_unittest.cc
@@ -270,12 +270,14 @@ class SctpDataMediaChannelTest : public testing::Test,
ProcessMessagesUntilIdle();
}
- void AddStream(int ssrc) {
+ bool AddStream(int ssrc) {
+ bool ret = true;
cricket::StreamParams p(cricket::StreamParams::CreateLegacy(ssrc));
- chan1_->AddSendStream(p);
- chan1_->AddRecvStream(p);
- chan2_->AddSendStream(p);
- chan2_->AddRecvStream(p);
+ ret = ret && chan1_->AddSendStream(p);
+ ret = ret && chan1_->AddRecvStream(p);
+ ret = ret && chan2_->AddSendStream(p);
+ ret = ret && chan2_->AddRecvStream(p);
+ return ret;
}
cricket::SctpDataMediaChannel* CreateChannel(
@@ -504,6 +506,12 @@ TEST_F(SctpDataMediaChannelTest, EngineSignalsRightChannel) {
EXPECT_GT(channel1_ready_to_send_count(), prior_count);
}
+TEST_F(SctpDataMediaChannelTest, RefusesHighNumberedChannels) {
+ SetupConnectedChannels();
+ EXPECT_TRUE(AddStream(1022));
+ EXPECT_FALSE(AddStream(1023));
+}
+
// Flaky on Linux and Windows. See webrtc:4453.
#if defined(WEBRTC_WIN) || defined(WEBRTC_LINUX)
#define MAYBE_ReusesAStream DISABLED_ReusesAStream
diff --git a/talk/media/webrtc/fakewebrtccall.cc b/talk/media/webrtc/fakewebrtccall.cc
index d86bfb553c..d50a53cb63 100644
--- a/talk/media/webrtc/fakewebrtccall.cc
+++ b/talk/media/webrtc/fakewebrtccall.cc
@@ -28,10 +28,12 @@
#include "talk/media/webrtc/fakewebrtccall.h"
#include <algorithm>
+#include <utility>
#include "talk/media/base/rtputils.h"
#include "webrtc/base/checks.h"
#include "webrtc/base/gunit.h"
+#include "webrtc/audio/audio_sink.h"
namespace cricket {
FakeAudioSendStream::FakeAudioSendStream(
@@ -39,14 +41,27 @@ FakeAudioSendStream::FakeAudioSendStream(
RTC_DCHECK(config.voe_channel_id != -1);
}
+const webrtc::AudioSendStream::Config&
+ FakeAudioSendStream::GetConfig() const {
+ return config_;
+}
+
void FakeAudioSendStream::SetStats(
const webrtc::AudioSendStream::Stats& stats) {
stats_ = stats;
}
-const webrtc::AudioSendStream::Config&
- FakeAudioSendStream::GetConfig() const {
- return config_;
+FakeAudioSendStream::TelephoneEvent
+ FakeAudioSendStream::GetLatestTelephoneEvent() const {
+ return latest_telephone_event_;
+}
+
+bool FakeAudioSendStream::SendTelephoneEvent(int payload_type, uint8_t event,
+ uint32_t duration_ms) {
+ latest_telephone_event_.payload_type = payload_type;
+ latest_telephone_event_.event_code = event;
+ latest_telephone_event_.duration_ms = duration_ms;
+ return true;
}
webrtc::AudioSendStream::Stats FakeAudioSendStream::GetStats() const {
@@ -77,6 +92,11 @@ webrtc::AudioReceiveStream::Stats FakeAudioReceiveStream::GetStats() const {
return stats_;
}
+void FakeAudioReceiveStream::SetSink(
+ rtc::scoped_ptr<webrtc::AudioSinkInterface> sink) {
+ sink_ = std::move(sink);
+}
+
FakeVideoSendStream::FakeVideoSendStream(
const webrtc::VideoSendStream::Config& config,
const webrtc::VideoEncoderConfig& encoder_config)
diff --git a/talk/media/webrtc/fakewebrtccall.h b/talk/media/webrtc/fakewebrtccall.h
index 88edc60d78..3528c7a7b1 100644
--- a/talk/media/webrtc/fakewebrtccall.h
+++ b/talk/media/webrtc/fakewebrtccall.h
@@ -47,14 +47,19 @@
#include "webrtc/video_send_stream.h"
namespace cricket {
-
-class FakeAudioSendStream : public webrtc::AudioSendStream {
+class FakeAudioSendStream final : public webrtc::AudioSendStream {
public:
- explicit FakeAudioSendStream(
- const webrtc::AudioSendStream::Config& config);
+ struct TelephoneEvent {
+ int payload_type = -1;
+ uint8_t event_code = 0;
+ uint32_t duration_ms = 0;
+ };
+
+ explicit FakeAudioSendStream(const webrtc::AudioSendStream::Config& config);
const webrtc::AudioSendStream::Config& GetConfig() const;
void SetStats(const webrtc::AudioSendStream::Stats& stats);
+ TelephoneEvent GetLatestTelephoneEvent() const;
private:
// webrtc::SendStream implementation.
@@ -66,13 +71,16 @@ class FakeAudioSendStream : public webrtc::AudioSendStream {
}
// webrtc::AudioSendStream implementation.
+ bool SendTelephoneEvent(int payload_type, uint8_t event,
+ uint32_t duration_ms) override;
webrtc::AudioSendStream::Stats GetStats() const override;
+ TelephoneEvent latest_telephone_event_;
webrtc::AudioSendStream::Config config_;
webrtc::AudioSendStream::Stats stats_;
};
-class FakeAudioReceiveStream : public webrtc::AudioReceiveStream {
+class FakeAudioReceiveStream final : public webrtc::AudioReceiveStream {
public:
explicit FakeAudioReceiveStream(
const webrtc::AudioReceiveStream::Config& config);
@@ -98,14 +106,16 @@ class FakeAudioReceiveStream : public webrtc::AudioReceiveStream {
// webrtc::AudioReceiveStream implementation.
webrtc::AudioReceiveStream::Stats GetStats() const override;
+ void SetSink(rtc::scoped_ptr<webrtc::AudioSinkInterface> sink) override;
webrtc::AudioReceiveStream::Config config_;
webrtc::AudioReceiveStream::Stats stats_;
int received_packets_;
+ rtc::scoped_ptr<webrtc::AudioSinkInterface> sink_;
};
-class FakeVideoSendStream : public webrtc::VideoSendStream,
- public webrtc::VideoCaptureInput {
+class FakeVideoSendStream final : public webrtc::VideoSendStream,
+ public webrtc::VideoCaptureInput {
public:
FakeVideoSendStream(const webrtc::VideoSendStream::Config& config,
const webrtc::VideoEncoderConfig& encoder_config);
@@ -153,7 +163,7 @@ class FakeVideoSendStream : public webrtc::VideoSendStream,
webrtc::VideoSendStream::Stats stats_;
};
-class FakeVideoReceiveStream : public webrtc::VideoReceiveStream {
+class FakeVideoReceiveStream final : public webrtc::VideoReceiveStream {
public:
explicit FakeVideoReceiveStream(
const webrtc::VideoReceiveStream::Config& config);
@@ -188,7 +198,7 @@ class FakeVideoReceiveStream : public webrtc::VideoReceiveStream {
webrtc::VideoReceiveStream::Stats stats_;
};
-class FakeCall : public webrtc::Call, public webrtc::PacketReceiver {
+class FakeCall final : public webrtc::Call, public webrtc::PacketReceiver {
public:
explicit FakeCall(const webrtc::Call::Config& config);
~FakeCall() override;
diff --git a/talk/media/webrtc/fakewebrtcvideoengine.h b/talk/media/webrtc/fakewebrtcvideoengine.h
index 8e4c7c87f8..e0d4db52f8 100644
--- a/talk/media/webrtc/fakewebrtcvideoengine.h
+++ b/talk/media/webrtc/fakewebrtcvideoengine.h
@@ -41,7 +41,7 @@
#include "webrtc/base/gunit.h"
#include "webrtc/base/stringutils.h"
#include "webrtc/base/thread_annotations.h"
-#include "webrtc/modules/video_coding/codecs/interface/video_error_codes.h"
+#include "webrtc/modules/video_coding/include/video_error_codes.h"
#include "webrtc/video_decoder.h"
#include "webrtc/video_encoder.h"
diff --git a/talk/media/webrtc/fakewebrtcvoiceengine.h b/talk/media/webrtc/fakewebrtcvoiceengine.h
index 2405e07b5f..65ba927cc5 100644
--- a/talk/media/webrtc/fakewebrtcvoiceengine.h
+++ b/talk/media/webrtc/fakewebrtcvoiceengine.h
@@ -41,19 +41,11 @@
#include "webrtc/base/gunit.h"
#include "webrtc/base/stringutils.h"
#include "webrtc/config.h"
+#include "webrtc/modules/audio_coding/acm2/rent_a_codec.h"
#include "webrtc/modules/audio_processing/include/audio_processing.h"
namespace cricket {
-static const char kFakeDefaultDeviceName[] = "Fake Default";
-static const int kFakeDefaultDeviceId = -1;
-static const char kFakeDeviceName[] = "Fake Device";
-#ifdef WIN32
-static const int kFakeDeviceId = 0;
-#else
-static const int kFakeDeviceId = 1;
-#endif
-
static const int kOpusBandwidthNb = 4000;
static const int kOpusBandwidthMb = 6000;
static const int kOpusBandwidthWb = 8000;
@@ -63,18 +55,6 @@ static const int kOpusBandwidthFb = 20000;
#define WEBRTC_CHECK_CHANNEL(channel) \
if (channels_.find(channel) == channels_.end()) return -1;
-#define WEBRTC_ASSERT_CHANNEL(channel) \
- RTC_DCHECK(channels_.find(channel) != channels_.end());
-
-// Verify the header extension ID, if enabled, is within the bounds specified in
-// [RFC5285]: 1-14 inclusive.
-#define WEBRTC_CHECK_HEADER_EXTENSION_ID(enable, id) \
- do { \
- if (enable && (id < 1 || id > 14)) { \
- return -1; \
- } \
- } while (0);
-
class FakeAudioProcessing : public webrtc::AudioProcessing {
public:
FakeAudioProcessing() : experimental_ns_enabled_(false) {}
@@ -94,11 +74,13 @@ class FakeAudioProcessing : public webrtc::AudioProcessing {
experimental_ns_enabled_ = config.Get<webrtc::ExperimentalNs>().enabled;
}
+ WEBRTC_STUB_CONST(input_sample_rate_hz, ());
WEBRTC_STUB_CONST(proc_sample_rate_hz, ());
WEBRTC_STUB_CONST(proc_split_sample_rate_hz, ());
- WEBRTC_STUB_CONST(num_input_channels, ());
- WEBRTC_STUB_CONST(num_output_channels, ());
- WEBRTC_STUB_CONST(num_reverse_channels, ());
+ size_t num_input_channels() const override { return 0; }
+ size_t num_proc_channels() const override { return 0; }
+ size_t num_output_channels() const override { return 0; }
+ size_t num_reverse_channels() const override { return 0; }
WEBRTC_VOID_STUB(set_output_will_be_muted, (bool muted));
WEBRTC_STUB(ProcessStream, (webrtc::AudioFrame* frame));
WEBRTC_STUB(ProcessStream, (
@@ -156,20 +138,11 @@ class FakeAudioProcessing : public webrtc::AudioProcessing {
class FakeWebRtcVoiceEngine
: public webrtc::VoEAudioProcessing,
- public webrtc::VoEBase, public webrtc::VoECodec, public webrtc::VoEDtmf,
+ public webrtc::VoEBase, public webrtc::VoECodec,
public webrtc::VoEHardware,
public webrtc::VoENetwork, public webrtc::VoERTP_RTCP,
public webrtc::VoEVolumeControl {
public:
- struct DtmfInfo {
- DtmfInfo()
- : dtmf_event_code(-1),
- dtmf_out_of_band(false),
- dtmf_length_ms(-1) {}
- int dtmf_event_code;
- bool dtmf_out_of_band;
- int dtmf_length_ms;
- };
struct Channel {
explicit Channel()
: external_transport(false),
@@ -184,15 +157,11 @@ class FakeWebRtcVoiceEngine
nack(false),
cn8_type(13),
cn16_type(105),
- dtmf_type(106),
red_type(117),
nack_max_packets(0),
send_ssrc(0),
- send_audio_level_ext_(-1),
- receive_audio_level_ext_(-1),
- send_absolute_sender_time_ext_(-1),
- receive_absolute_sender_time_ext_(-1),
associate_send_channel(-1),
+ recv_codecs(),
neteq_capacity(-1),
neteq_fast_accelerate(false) {
memset(&send_codec, 0, sizeof(send_codec));
@@ -209,16 +178,10 @@ class FakeWebRtcVoiceEngine
bool nack;
int cn8_type;
int cn16_type;
- int dtmf_type;
int red_type;
int nack_max_packets;
uint32_t send_ssrc;
- int send_audio_level_ext_;
- int receive_audio_level_ext_;
- int send_absolute_sender_time_ext_;
- int receive_absolute_sender_time_ext_;
int associate_send_channel;
- DtmfInfo dtmf_info;
std::vector<webrtc::CodecInst> recv_codecs;
webrtc::CodecInst send_codec;
webrtc::PacketTime last_rtp_packet_time;
@@ -227,13 +190,10 @@ class FakeWebRtcVoiceEngine
bool neteq_fast_accelerate;
};
- FakeWebRtcVoiceEngine(const cricket::AudioCodec* const* codecs,
- int num_codecs)
+ FakeWebRtcVoiceEngine()
: inited_(false),
last_channel_(-1),
fail_create_channel_(false),
- codecs_(codecs),
- num_codecs_(num_codecs),
num_set_send_codecs_(0),
ec_enabled_(false),
ec_metrics_enabled_(false),
@@ -255,26 +215,13 @@ class FakeWebRtcVoiceEngine
memset(&agc_config_, 0, sizeof(agc_config_));
}
~FakeWebRtcVoiceEngine() {
- // Ought to have all been deleted by the WebRtcVoiceMediaChannel
- // destructors, but just in case ...
- for (std::map<int, Channel*>::const_iterator i = channels_.begin();
- i != channels_.end(); ++i) {
- delete i->second;
- }
+ RTC_CHECK(channels_.empty());
}
bool ec_metrics_enabled() const { return ec_metrics_enabled_; }
bool IsInited() const { return inited_; }
int GetLastChannel() const { return last_channel_; }
- int GetChannelFromLocalSsrc(uint32_t local_ssrc) const {
- for (std::map<int, Channel*>::const_iterator iter = channels_.begin();
- iter != channels_.end(); ++iter) {
- if (local_ssrc == iter->second->send_ssrc)
- return iter->first;
- }
- return -1;
- }
int GetNumChannels() const { return static_cast<int>(channels_.size()); }
uint32_t GetLocalSSRC(int channel) {
return channels_[channel]->send_ssrc;
@@ -307,7 +254,7 @@ class FakeWebRtcVoiceEngine
return channels_[channel]->nack_max_packets;
}
const webrtc::PacketTime& GetLastRtpPacketTime(int channel) {
- WEBRTC_ASSERT_CHANNEL(channel);
+ RTC_DCHECK(channels_.find(channel) != channels_.end());
return channels_[channel]->last_rtp_packet_time;
}
int GetSendCNPayloadType(int channel, bool wideband) {
@@ -315,9 +262,6 @@ class FakeWebRtcVoiceEngine
channels_[channel]->cn16_type :
channels_[channel]->cn8_type;
}
- int GetSendTelephoneEventPayloadType(int channel) {
- return channels_[channel]->dtmf_type;
- }
int GetSendREDPayloadType(int channel) {
return channels_[channel]->red_type;
}
@@ -351,11 +295,8 @@ class FakeWebRtcVoiceEngine
return -1;
}
Channel* ch = new Channel();
- for (int i = 0; i < NumOfCodecs(); ++i) {
- webrtc::CodecInst codec;
- GetCodec(i, codec);
- ch->recv_codecs.push_back(codec);
- }
+ auto db = webrtc::acm2::RentACodec::Database();
+ ch->recv_codecs.assign(db.begin(), db.end());
if (config.Get<webrtc::NetEqCapacityConfig>().enabled) {
ch->neteq_capacity = config.Get<webrtc::NetEqCapacityConfig>().capacity;
}
@@ -364,24 +305,6 @@ class FakeWebRtcVoiceEngine
channels_[++last_channel_] = ch;
return last_channel_;
}
- int GetSendRtpExtensionId(int channel, const std::string& extension) {
- WEBRTC_ASSERT_CHANNEL(channel);
- if (extension == kRtpAudioLevelHeaderExtension) {
- return channels_[channel]->send_audio_level_ext_;
- } else if (extension == kRtpAbsoluteSenderTimeHeaderExtension) {
- return channels_[channel]->send_absolute_sender_time_ext_;
- }
- return -1;
- }
- int GetReceiveRtpExtensionId(int channel, const std::string& extension) {
- WEBRTC_ASSERT_CHANNEL(channel);
- if (extension == kRtpAudioLevelHeaderExtension) {
- return channels_[channel]->receive_audio_level_ext_;
- } else if (extension == kRtpAbsoluteSenderTimeHeaderExtension) {
- return channels_[channel]->receive_absolute_sender_time_ext_;
- }
- return -1;
- }
int GetNumSetSendCodecs() const { return num_set_send_codecs_; }
@@ -473,22 +396,8 @@ class FakeWebRtcVoiceEngine
webrtc::RtcEventLog* GetEventLog() { return nullptr; }
// webrtc::VoECodec
- WEBRTC_FUNC(NumOfCodecs, ()) {
- return num_codecs_;
- }
- WEBRTC_FUNC(GetCodec, (int index, webrtc::CodecInst& codec)) {
- if (index < 0 || index >= NumOfCodecs()) {
- return -1;
- }
- const cricket::AudioCodec& c(*codecs_[index]);
- codec.pltype = c.id;
- rtc::strcpyn(codec.plname, sizeof(codec.plname), c.name.c_str());
- codec.plfreq = c.clockrate;
- codec.pacsize = 0;
- codec.channels = c.channels;
- codec.rate = c.bitrate;
- return 0;
- }
+ WEBRTC_STUB(NumOfCodecs, ());
+ WEBRTC_STUB(GetCodec, (int index, webrtc::CodecInst& codec));
WEBRTC_FUNC(SetSendCodec, (int channel, const webrtc::CodecInst& codec)) {
WEBRTC_CHECK_CHANNEL(channel);
// To match the behavior of the real implementation.
@@ -526,16 +435,17 @@ class FakeWebRtcVoiceEngine
}
}
// Otherwise try to find this codec and update its payload type.
+ int result = -1; // not found
for (std::vector<webrtc::CodecInst>::iterator it = ch->recv_codecs.begin();
it != ch->recv_codecs.end(); ++it) {
if (strcmp(it->plname, codec.plname) == 0 &&
- it->plfreq == codec.plfreq) {
+ it->plfreq == codec.plfreq &&
+ it->channels == codec.channels) {
it->pltype = codec.pltype;
- it->channels = codec.channels;
- return 0;
+ result = 0;
}
}
- return -1; // not found
+ return result;
}
WEBRTC_FUNC(SetSendCNPayloadType, (int channel, int type,
webrtc::PayloadFrequencies frequency)) {
@@ -620,46 +530,11 @@ class FakeWebRtcVoiceEngine
return 0;
}
- // webrtc::VoEDtmf
- WEBRTC_FUNC(SendTelephoneEvent, (int channel, int event_code,
- bool out_of_band = true, int length_ms = 160, int attenuation_db = 10)) {
- channels_[channel]->dtmf_info.dtmf_event_code = event_code;
- channels_[channel]->dtmf_info.dtmf_out_of_band = out_of_band;
- channels_[channel]->dtmf_info.dtmf_length_ms = length_ms;
- return 0;
- }
-
- WEBRTC_FUNC(SetSendTelephoneEventPayloadType,
- (int channel, unsigned char type)) {
- channels_[channel]->dtmf_type = type;
- return 0;
- };
- WEBRTC_STUB(GetSendTelephoneEventPayloadType,
- (int channel, unsigned char& type));
-
- WEBRTC_STUB(SetDtmfFeedbackStatus, (bool enable, bool directFeedback));
- WEBRTC_STUB(GetDtmfFeedbackStatus, (bool& enabled, bool& directFeedback));
-
- WEBRTC_FUNC(PlayDtmfTone,
- (int event_code, int length_ms = 200, int attenuation_db = 10)) {
- dtmf_info_.dtmf_event_code = event_code;
- dtmf_info_.dtmf_length_ms = length_ms;
- return 0;
- }
-
// webrtc::VoEHardware
- WEBRTC_FUNC(GetNumOfRecordingDevices, (int& num)) {
- return GetNumDevices(num);
- }
- WEBRTC_FUNC(GetNumOfPlayoutDevices, (int& num)) {
- return GetNumDevices(num);
- }
- WEBRTC_FUNC(GetRecordingDeviceName, (int i, char* name, char* guid)) {
- return GetDeviceName(i, name, guid);
- }
- WEBRTC_FUNC(GetPlayoutDeviceName, (int i, char* name, char* guid)) {
- return GetDeviceName(i, name, guid);
- }
+ WEBRTC_STUB(GetNumOfRecordingDevices, (int& num));
+ WEBRTC_STUB(GetNumOfPlayoutDevices, (int& num));
+ WEBRTC_STUB(GetRecordingDeviceName, (int i, char* name, char* guid));
+ WEBRTC_STUB(GetPlayoutDeviceName, (int i, char* name, char* guid));
WEBRTC_STUB(SetRecordingDevice, (int, webrtc::StereoChannel));
WEBRTC_STUB(SetPlayoutDevice, (int));
WEBRTC_STUB(SetAudioDeviceLayer, (webrtc::AudioLayers));
@@ -729,35 +604,14 @@ class FakeWebRtcVoiceEngine
}
WEBRTC_STUB(GetLocalSSRC, (int channel, unsigned int& ssrc));
WEBRTC_STUB(GetRemoteSSRC, (int channel, unsigned int& ssrc));
- WEBRTC_FUNC(SetSendAudioLevelIndicationStatus, (int channel, bool enable,
- unsigned char id)) {
- WEBRTC_CHECK_CHANNEL(channel);
- WEBRTC_CHECK_HEADER_EXTENSION_ID(enable, id);
- channels_[channel]->send_audio_level_ext_ = (enable) ? id : -1;
- return 0;
- }
- WEBRTC_FUNC(SetReceiveAudioLevelIndicationStatus, (int channel, bool enable,
- unsigned char id)) {
- WEBRTC_CHECK_CHANNEL(channel);
- WEBRTC_CHECK_HEADER_EXTENSION_ID(enable, id);
- channels_[channel]->receive_audio_level_ext_ = (enable) ? id : -1;
- return 0;
- }
- WEBRTC_FUNC(SetSendAbsoluteSenderTimeStatus, (int channel, bool enable,
- unsigned char id)) {
- WEBRTC_CHECK_CHANNEL(channel);
- WEBRTC_CHECK_HEADER_EXTENSION_ID(enable, id);
- channels_[channel]->send_absolute_sender_time_ext_ = (enable) ? id : -1;
- return 0;
- }
- WEBRTC_FUNC(SetReceiveAbsoluteSenderTimeStatus, (int channel, bool enable,
- unsigned char id)) {
- WEBRTC_CHECK_CHANNEL(channel);
- WEBRTC_CHECK_HEADER_EXTENSION_ID(enable, id);
- channels_[channel]->receive_absolute_sender_time_ext_ = (enable) ? id : -1;
- return 0;
- }
-
+ WEBRTC_STUB(SetSendAudioLevelIndicationStatus, (int channel, bool enable,
+ unsigned char id));
+ WEBRTC_STUB(SetReceiveAudioLevelIndicationStatus, (int channel, bool enable,
+ unsigned char id));
+ WEBRTC_STUB(SetSendAbsoluteSenderTimeStatus, (int channel, bool enable,
+ unsigned char id));
+ WEBRTC_STUB(SetReceiveAbsoluteSenderTimeStatus, (int channel, bool enable,
+ unsigned char id));
WEBRTC_STUB(SetRTCPStatus, (int channel, bool enable));
WEBRTC_STUB(GetRTCPStatus, (int channel, bool& enabled));
WEBRTC_STUB(SetRTCP_CNAME, (int channel, const char cname[256]));
@@ -776,22 +630,12 @@ class FakeWebRtcVoiceEngine
unsigned int& discardedPackets));
WEBRTC_STUB(GetRTCPStatistics, (int channel, webrtc::CallStatistics& stats));
WEBRTC_FUNC(SetREDStatus, (int channel, bool enable, int redPayloadtype)) {
- return SetFECStatus(channel, enable, redPayloadtype);
- }
- // TODO(minyue): remove the below function when transition to SetREDStatus
- // is finished.
- WEBRTC_FUNC(SetFECStatus, (int channel, bool enable, int redPayloadtype)) {
WEBRTC_CHECK_CHANNEL(channel);
channels_[channel]->red = enable;
channels_[channel]->red_type = redPayloadtype;
return 0;
}
WEBRTC_FUNC(GetREDStatus, (int channel, bool& enable, int& redPayloadtype)) {
- return GetFECStatus(channel, enable, redPayloadtype);
- }
- // TODO(minyue): remove the below function when transition to GetREDStatus
- // is finished.
- WEBRTC_FUNC(GetFECStatus, (int channel, bool& enable, int& redPayloadtype)) {
WEBRTC_CHECK_CHANNEL(channel);
enable = channels_[channel]->red;
redPayloadtype = channels_[channel]->red_type;
@@ -937,15 +781,6 @@ class FakeWebRtcVoiceEngine
void EnableStereoChannelSwapping(bool enable) {
stereo_swapping_enabled_ = enable;
}
- bool WasSendTelephoneEventCalled(int channel, int event_code, int length_ms) {
- return (channels_[channel]->dtmf_info.dtmf_event_code == event_code &&
- channels_[channel]->dtmf_info.dtmf_out_of_band == true &&
- channels_[channel]->dtmf_info.dtmf_length_ms == length_ms);
- }
- bool WasPlayDtmfToneCalled(int event_code, int length_ms) {
- return (dtmf_info_.dtmf_event_code == event_code &&
- dtmf_info_.dtmf_length_ms == length_ms);
- }
int GetNetEqCapacity() const {
auto ch = channels_.find(last_channel_);
ASSERT(ch != channels_.end());
@@ -958,47 +793,10 @@ class FakeWebRtcVoiceEngine
}
private:
- int GetNumDevices(int& num) {
-#ifdef WIN32
- num = 1;
-#else
- // On non-Windows platforms VE adds a special entry for the default device,
- // so if there is one physical device then there are two entries in the
- // list.
- num = 2;
-#endif
- return 0;
- }
-
- int GetDeviceName(int i, char* name, char* guid) {
- const char *s;
-#ifdef WIN32
- if (0 == i) {
- s = kFakeDeviceName;
- } else {
- return -1;
- }
-#else
- // See comment above.
- if (0 == i) {
- s = kFakeDefaultDeviceName;
- } else if (1 == i) {
- s = kFakeDeviceName;
- } else {
- return -1;
- }
-#endif
- strcpy(name, s);
- guid[0] = '\0';
- return 0;
- }
-
bool inited_;
int last_channel_;
std::map<int, Channel*> channels_;
bool fail_create_channel_;
- const cricket::AudioCodec* const* codecs_;
- int num_codecs_;
int num_set_send_codecs_; // how many times we call SetSendCodec().
bool ec_enabled_;
bool ec_metrics_enabled_;
@@ -1018,12 +816,9 @@ class FakeWebRtcVoiceEngine
int send_fail_channel_;
int recording_sample_rate_;
int playout_sample_rate_;
- DtmfInfo dtmf_info_;
FakeAudioProcessing audio_processing_;
};
-#undef WEBRTC_CHECK_HEADER_EXTENSION_ID
-
} // namespace cricket
#endif // TALK_SESSION_PHONE_FAKEWEBRTCVOICEENGINE_H_
diff --git a/talk/media/webrtc/simulcast.cc b/talk/media/webrtc/simulcast.cc
index f55d9606a5..b67a363a76 100755
--- a/talk/media/webrtc/simulcast.cc
+++ b/talk/media/webrtc/simulcast.cc
@@ -29,9 +29,11 @@
#include "talk/media/base/streamparams.h"
#include "talk/media/webrtc/simulcast.h"
+#include "webrtc/base/arraysize.h"
#include "webrtc/base/common.h"
#include "webrtc/base/logging.h"
#include "webrtc/system_wrappers/include/field_trial.h"
+
namespace cricket {
struct SimulcastFormat {
@@ -93,7 +95,7 @@ void MaybeExchangeWidthHeight(int* width, int* height) {
int FindSimulcastFormatIndex(int width, int height) {
MaybeExchangeWidthHeight(&width, &height);
- for (int i = 0; i < ARRAY_SIZE(kSimulcastFormats); ++i) {
+ for (int i = 0; i < arraysize(kSimulcastFormats); ++i) {
if (width >= kSimulcastFormats[i].width &&
height >= kSimulcastFormats[i].height) {
return i;
@@ -105,7 +107,7 @@ int FindSimulcastFormatIndex(int width, int height) {
int FindSimulcastFormatIndex(int width, int height, size_t max_layers) {
MaybeExchangeWidthHeight(&width, &height);
- for (int i = 0; i < ARRAY_SIZE(kSimulcastFormats); ++i) {
+ for (int i = 0; i < arraysize(kSimulcastFormats); ++i) {
if (width >= kSimulcastFormats[i].width &&
height >= kSimulcastFormats[i].height &&
max_layers == kSimulcastFormats[i].max_layers) {
diff --git a/talk/media/webrtc/webrtcmediaengine.cc b/talk/media/webrtc/webrtcmediaengine.cc
index af202bd613..31e5025a55 100644
--- a/talk/media/webrtc/webrtcmediaengine.cc
+++ b/talk/media/webrtc/webrtcmediaengine.cc
@@ -26,6 +26,9 @@
*/
#include "talk/media/webrtc/webrtcmediaengine.h"
+
+#include <algorithm>
+
#include "talk/media/webrtc/webrtcvideoengine2.h"
#include "talk/media/webrtc/webrtcvoiceengine.h"
@@ -68,44 +71,85 @@ MediaEngineInterface* WebRtcMediaEngineFactory::Create(
return CreateWebRtcMediaEngine(adm, encoder_factory, decoder_factory);
}
-const char* kBweExtensionPriorities[] = {
- kRtpTransportSequenceNumberHeaderExtension,
- kRtpAbsoluteSenderTimeHeaderExtension, kRtpTimestampOffsetHeaderExtension};
-
-const size_t kBweExtensionPrioritiesLength =
- ARRAY_SIZE(kBweExtensionPriorities);
+namespace {
+// Remove mutually exclusive extensions with lower priority.
+void DiscardRedundantExtensions(
+ std::vector<webrtc::RtpExtension>* extensions,
+ rtc::ArrayView<const char*> extensions_decreasing_prio) {
+ RTC_DCHECK(extensions);
+ bool found = false;
+ for (const char* name : extensions_decreasing_prio) {
+ auto it = std::find_if(extensions->begin(), extensions->end(),
+ [name](const webrtc::RtpExtension& rhs) {
+ return rhs.name == name;
+ });
+ if (it != extensions->end()) {
+ if (found) {
+ extensions->erase(it);
+ }
+ found = true;
+ }
+ }
+}
+} // namespace
-int GetPriority(const RtpHeaderExtension& extension,
- const char* extension_prios[],
- size_t extension_prios_length) {
- for (size_t i = 0; i < extension_prios_length; ++i) {
- if (extension.uri == extension_prios[i])
- return static_cast<int>(i);
+bool ValidateRtpExtensions(const std::vector<RtpHeaderExtension>& extensions) {
+ bool id_used[14] = {false};
+ for (const auto& extension : extensions) {
+ if (extension.id <= 0 || extension.id >= 15) {
+ LOG(LS_ERROR) << "Bad RTP extension ID: " << extension.ToString();
+ return false;
+ }
+ if (id_used[extension.id - 1]) {
+ LOG(LS_ERROR) << "Duplicate RTP extension ID: " << extension.ToString();
+ return false;
+ }
+ id_used[extension.id - 1] = true;
}
- return -1;
+ return true;
}
-std::vector<RtpHeaderExtension> FilterRedundantRtpExtensions(
+std::vector<webrtc::RtpExtension> FilterRtpExtensions(
const std::vector<RtpHeaderExtension>& extensions,
- const char* extension_prios[],
- size_t extension_prios_length) {
- if (extensions.empty())
- return std::vector<RtpHeaderExtension>();
- std::vector<RtpHeaderExtension> filtered;
- std::map<int, const RtpHeaderExtension*> sorted;
- for (auto& extension : extensions) {
- int priority =
- GetPriority(extension, extension_prios, extension_prios_length);
- if (priority == -1) {
- filtered.push_back(extension);
- continue;
+ bool (*supported)(const std::string&),
+ bool filter_redundant_extensions) {
+ RTC_DCHECK(ValidateRtpExtensions(extensions));
+ RTC_DCHECK(supported);
+ std::vector<webrtc::RtpExtension> result;
+
+ // Ignore any extensions that we don't recognize.
+ for (const auto& extension : extensions) {
+ if (supported(extension.uri)) {
+ result.push_back({extension.uri, extension.id});
} else {
- sorted[priority] = &extension;
+ LOG(LS_WARNING) << "Unsupported RTP extension: " << extension.ToString();
}
}
- if (!sorted.empty())
- filtered.push_back(*sorted.begin()->second);
- return filtered;
-}
+ // Sort by name, ascending, so that we don't reset extensions if they were
+ // specified in a different order (also allows us to use std::unique below).
+ std::sort(result.begin(), result.end(),
+ [](const webrtc::RtpExtension& rhs, const webrtc::RtpExtension& lhs) {
+ return rhs.name < lhs.name;
+ });
+
+ // Remove unnecessary extensions (used on send side).
+ if (filter_redundant_extensions) {
+ auto it = std::unique(result.begin(), result.end(),
+ [](const webrtc::RtpExtension& rhs, const webrtc::RtpExtension& lhs) {
+ return rhs.name == lhs.name;
+ });
+ result.erase(it, result.end());
+
+ // Keep just the highest priority extension of any in the following list.
+ static const char* kBweExtensionPriorities[] = {
+ kRtpTransportSequenceNumberHeaderExtension,
+ kRtpAbsoluteSenderTimeHeaderExtension,
+ kRtpTimestampOffsetHeaderExtension
+ };
+ DiscardRedundantExtensions(&result, kBweExtensionPriorities);
+ }
+
+ return result;
+}
} // namespace cricket
diff --git a/talk/media/webrtc/webrtcmediaengine.h b/talk/media/webrtc/webrtcmediaengine.h
index 8d7540404d..831d0725e8 100644
--- a/talk/media/webrtc/webrtcmediaengine.h
+++ b/talk/media/webrtc/webrtcmediaengine.h
@@ -28,7 +28,11 @@
#ifndef TALK_MEDIA_WEBRTCMEDIAENGINE_H_
#define TALK_MEDIA_WEBRTCMEDIAENGINE_H_
+#include <string>
+#include <vector>
+
#include "talk/media/base/mediaengine.h"
+#include "webrtc/config.h"
namespace webrtc {
class AudioDeviceModule;
@@ -48,13 +52,18 @@ class WebRtcMediaEngineFactory {
WebRtcVideoDecoderFactory* decoder_factory);
};
-extern const char* kBweExtensionPriorities[];
-extern const size_t kBweExtensionPrioritiesLength;
+// Verify that extension IDs are within 1-byte extension range and are not
+// overlapping.
+bool ValidateRtpExtensions(const std::vector<RtpHeaderExtension>& extensions);
-std::vector<RtpHeaderExtension> FilterRedundantRtpExtensions(
+// Convert cricket::RtpHeaderExtension:s to webrtc::RtpExtension:s, discarding
+// any extensions not validated by the 'supported' predicate. Duplicate
+// extensions are removed if 'filter_redundant_extensions' is set, and also any
+// mutually exclusive extensions (see implementation for details).
+std::vector<webrtc::RtpExtension> FilterRtpExtensions(
const std::vector<RtpHeaderExtension>& extensions,
- const char* extension_prios[],
- size_t extension_prios_length);
+ bool (*supported)(const std::string&),
+ bool filter_redundant_extensions);
} // namespace cricket
diff --git a/talk/media/webrtc/webrtcmediaengine_unittest.cc b/talk/media/webrtc/webrtcmediaengine_unittest.cc
new file mode 100644
index 0000000000..7c80e77301
--- /dev/null
+++ b/talk/media/webrtc/webrtcmediaengine_unittest.cc
@@ -0,0 +1,205 @@
+/*
+ * libjingle
+ * Copyright 2015 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#include "testing/gtest/include/gtest/gtest.h"
+
+#include "talk/media/webrtc/webrtcmediaengine.h"
+
+namespace cricket {
+namespace {
+
+std::vector<RtpHeaderExtension> MakeUniqueExtensions() {
+ std::vector<RtpHeaderExtension> result;
+ char name[] = "a";
+ for (int i = 0; i < 7; ++i) {
+ result.push_back(RtpHeaderExtension(name, 1 + i));
+ name[0]++;
+ result.push_back(RtpHeaderExtension(name, 14 - i));
+ name[0]++;
+ }
+ return result;
+}
+
+std::vector<RtpHeaderExtension> MakeRedundantExtensions() {
+ std::vector<RtpHeaderExtension> result;
+ char name[] = "a";
+ for (int i = 0; i < 7; ++i) {
+ result.push_back(RtpHeaderExtension(name, 1 + i));
+ result.push_back(RtpHeaderExtension(name, 14 - i));
+ name[0]++;
+ }
+ return result;
+}
+
+bool SupportedExtensions1(const std::string& name) {
+ return name == "c" || name == "i";
+}
+
+bool SupportedExtensions2(const std::string& name) {
+ return name != "a" && name != "n";
+}
+
+bool IsSorted(const std::vector<webrtc::RtpExtension>& extensions) {
+ const std::string* last = nullptr;
+ for (const auto& extension : extensions) {
+ if (last && *last > extension.name) {
+ return false;
+ }
+ last = &extension.name;
+ }
+ return true;
+}
+} // namespace
+
+TEST(WebRtcMediaEngineTest, ValidateRtpExtensions_EmptyList) {
+ std::vector<RtpHeaderExtension> extensions;
+ EXPECT_TRUE(ValidateRtpExtensions(extensions));
+}
+
+TEST(WebRtcMediaEngineTest, ValidateRtpExtensions_AllGood) {
+ std::vector<RtpHeaderExtension> extensions = MakeUniqueExtensions();
+ EXPECT_TRUE(ValidateRtpExtensions(extensions));
+}
+
+TEST(WebRtcMediaEngineTest, ValidateRtpExtensions_OutOfRangeId_Low) {
+ std::vector<RtpHeaderExtension> extensions = MakeUniqueExtensions();
+ extensions.push_back(RtpHeaderExtension("foo", 0));
+ EXPECT_FALSE(ValidateRtpExtensions(extensions));
+}
+
+TEST(WebRtcMediaEngineTest, ValidateRtpExtensions_OutOfRangeId_High) {
+ std::vector<RtpHeaderExtension> extensions = MakeUniqueExtensions();
+ extensions.push_back(RtpHeaderExtension("foo", 15));
+ EXPECT_FALSE(ValidateRtpExtensions(extensions));
+}
+
+TEST(WebRtcMediaEngineTest, ValidateRtpExtensions_OverlappingIds_StartOfSet) {
+ std::vector<RtpHeaderExtension> extensions = MakeUniqueExtensions();
+ extensions.push_back(RtpHeaderExtension("foo", 1));
+ EXPECT_FALSE(ValidateRtpExtensions(extensions));
+}
+
+TEST(WebRtcMediaEngineTest, ValidateRtpExtensions_OverlappingIds_EndOfSet) {
+ std::vector<RtpHeaderExtension> extensions = MakeUniqueExtensions();
+ extensions.push_back(RtpHeaderExtension("foo", 14));
+ EXPECT_FALSE(ValidateRtpExtensions(extensions));
+}
+
+TEST(WebRtcMediaEngineTest, FilterRtpExtensions_EmptyList) {
+ std::vector<RtpHeaderExtension> extensions;
+ std::vector<webrtc::RtpExtension> filtered =
+ FilterRtpExtensions(extensions, SupportedExtensions1, true);
+ EXPECT_EQ(0, filtered.size());
+}
+
+TEST(WebRtcMediaEngineTest, FilterRtpExtensions_IncludeOnlySupported) {
+ std::vector<RtpHeaderExtension> extensions = MakeUniqueExtensions();
+ std::vector<webrtc::RtpExtension> filtered =
+ FilterRtpExtensions(extensions, SupportedExtensions1, false);
+ EXPECT_EQ(2, filtered.size());
+ EXPECT_EQ("c", filtered[0].name);
+ EXPECT_EQ("i", filtered[1].name);
+}
+
+TEST(WebRtcMediaEngineTest, FilterRtpExtensions_SortedByName_1) {
+ std::vector<RtpHeaderExtension> extensions = MakeUniqueExtensions();
+ std::vector<webrtc::RtpExtension> filtered =
+ FilterRtpExtensions(extensions, SupportedExtensions2, false);
+ EXPECT_EQ(12, filtered.size());
+ EXPECT_TRUE(IsSorted(filtered));
+}
+
+TEST(WebRtcMediaEngineTest, FilterRtpExtensions_SortedByName_2) {
+ std::vector<RtpHeaderExtension> extensions = MakeUniqueExtensions();
+ std::vector<webrtc::RtpExtension> filtered =
+ FilterRtpExtensions(extensions, SupportedExtensions2, true);
+ EXPECT_EQ(12, filtered.size());
+ EXPECT_TRUE(IsSorted(filtered));
+}
+
+TEST(WebRtcMediaEngineTest, FilterRtpExtensions_DontRemoveRedundant) {
+ std::vector<RtpHeaderExtension> extensions = MakeRedundantExtensions();
+ std::vector<webrtc::RtpExtension> filtered =
+ FilterRtpExtensions(extensions, SupportedExtensions2, false);
+ EXPECT_EQ(12, filtered.size());
+ EXPECT_TRUE(IsSorted(filtered));
+ EXPECT_EQ(filtered[0].name, filtered[1].name);
+}
+
+TEST(WebRtcMediaEngineTest, FilterRtpExtensions_RemoveRedundant) {
+ std::vector<RtpHeaderExtension> extensions = MakeRedundantExtensions();
+ std::vector<webrtc::RtpExtension> filtered =
+ FilterRtpExtensions(extensions, SupportedExtensions2, true);
+ EXPECT_EQ(6, filtered.size());
+ EXPECT_TRUE(IsSorted(filtered));
+ EXPECT_NE(filtered[0].name, filtered[1].name);
+}
+
+TEST(WebRtcMediaEngineTest, FilterRtpExtensions_RemoveRedundantBwe_1) {
+ std::vector<RtpHeaderExtension> extensions;
+ extensions.push_back(
+ RtpHeaderExtension(kRtpTransportSequenceNumberHeaderExtension, 3));
+ extensions.push_back(
+ RtpHeaderExtension(kRtpTimestampOffsetHeaderExtension, 9));
+ extensions.push_back(
+ RtpHeaderExtension(kRtpAbsoluteSenderTimeHeaderExtension, 6));
+ extensions.push_back(
+ RtpHeaderExtension(kRtpTransportSequenceNumberHeaderExtension, 1));
+ extensions.push_back(
+ RtpHeaderExtension(kRtpTimestampOffsetHeaderExtension, 14));
+ std::vector<webrtc::RtpExtension> filtered =
+ FilterRtpExtensions(extensions, SupportedExtensions2, true);
+ EXPECT_EQ(1, filtered.size());
+ EXPECT_EQ(kRtpTransportSequenceNumberHeaderExtension, filtered[0].name);
+}
+
+TEST(WebRtcMediaEngineTest, FilterRtpExtensions_RemoveRedundantBwe_2) {
+ std::vector<RtpHeaderExtension> extensions;
+ extensions.push_back(
+ RtpHeaderExtension(kRtpTimestampOffsetHeaderExtension, 1));
+ extensions.push_back(
+ RtpHeaderExtension(kRtpAbsoluteSenderTimeHeaderExtension, 14));
+ extensions.push_back(
+ RtpHeaderExtension(kRtpTimestampOffsetHeaderExtension, 7));
+ std::vector<webrtc::RtpExtension> filtered =
+ FilterRtpExtensions(extensions, SupportedExtensions2, true);
+ EXPECT_EQ(1, filtered.size());
+ EXPECT_EQ(kRtpAbsoluteSenderTimeHeaderExtension, filtered[0].name);
+}
+
+TEST(WebRtcMediaEngineTest, FilterRtpExtensions_RemoveRedundantBwe_3) {
+ std::vector<RtpHeaderExtension> extensions;
+ extensions.push_back(
+ RtpHeaderExtension(kRtpTimestampOffsetHeaderExtension, 2));
+ extensions.push_back(
+ RtpHeaderExtension(kRtpTimestampOffsetHeaderExtension, 14));
+ std::vector<webrtc::RtpExtension> filtered =
+ FilterRtpExtensions(extensions, SupportedExtensions2, true);
+ EXPECT_EQ(1, filtered.size());
+ EXPECT_EQ(kRtpTimestampOffsetHeaderExtension, filtered[0].name);
+}
+} // namespace cricket
diff --git a/talk/media/webrtc/webrtcvideocapturer.cc b/talk/media/webrtc/webrtcvideocapturer.cc
index 7d72128d61..ee4db5b1d2 100644
--- a/talk/media/webrtc/webrtcvideocapturer.cc
+++ b/talk/media/webrtc/webrtcvideocapturer.cc
@@ -34,6 +34,7 @@
#ifdef HAVE_WEBRTC_VIDEO
#include "talk/media/webrtc/webrtcvideoframe.h"
#include "talk/media/webrtc/webrtcvideoframefactory.h"
+#include "webrtc/base/arraysize.h"
#include "webrtc/base/bind.h"
#include "webrtc/base/checks.h"
#include "webrtc/base/criticalsection.h"
@@ -43,7 +44,7 @@
#include "webrtc/base/timeutils.h"
#include "webrtc/base/win32.h" // Need this to #include the impl files.
-#include "webrtc/modules/video_capture/include/video_capture_factory.h"
+#include "webrtc/modules/video_capture/video_capture_factory.h"
#include "webrtc/system_wrappers/include/field_trial.h"
namespace cricket {
@@ -83,7 +84,7 @@ class WebRtcVcmFactory : public WebRtcVcmFactoryInterface {
static bool CapabilityToFormat(const webrtc::VideoCaptureCapability& cap,
VideoFormat* format) {
uint32_t fourcc = 0;
- for (size_t i = 0; i < ARRAY_SIZE(kSupportedFourCCs); ++i) {
+ for (size_t i = 0; i < arraysize(kSupportedFourCCs); ++i) {
if (kSupportedFourCCs[i].webrtc_type == cap.rawType) {
fourcc = kSupportedFourCCs[i].fourcc;
break;
@@ -103,7 +104,7 @@ static bool CapabilityToFormat(const webrtc::VideoCaptureCapability& cap,
static bool FormatToCapability(const VideoFormat& format,
webrtc::VideoCaptureCapability* cap) {
webrtc::RawVideoType webrtc_type = webrtc::kVideoUnknown;
- for (size_t i = 0; i < ARRAY_SIZE(kSupportedFourCCs); ++i) {
+ for (size_t i = 0; i < arraysize(kSupportedFourCCs); ++i) {
if (kSupportedFourCCs[i].fourcc == format.fourcc) {
webrtc_type = kSupportedFourCCs[i].webrtc_type;
break;
@@ -171,8 +172,8 @@ bool WebRtcVideoCapturer::Init(const Device& device) {
bool found = false;
for (int index = 0; index < num_cams; ++index) {
char vcm_name[256];
- if (info->GetDeviceName(index, vcm_name, ARRAY_SIZE(vcm_name),
- vcm_id, ARRAY_SIZE(vcm_id)) != -1) {
+ if (info->GetDeviceName(index, vcm_name, arraysize(vcm_name), vcm_id,
+ arraysize(vcm_id)) != -1) {
if (device.name == reinterpret_cast<char*>(vcm_name)) {
found = true;
break;
@@ -349,6 +350,7 @@ void WebRtcVideoCapturer::Stop() {
SetCaptureFormat(NULL);
start_thread_ = nullptr;
+ SetCaptureState(CS_STOPPED);
}
bool WebRtcVideoCapturer::IsRunning() {
@@ -361,7 +363,7 @@ bool WebRtcVideoCapturer::GetPreferredFourccs(std::vector<uint32_t>* fourccs) {
}
fourccs->clear();
- for (size_t i = 0; i < ARRAY_SIZE(kSupportedFourCCs); ++i) {
+ for (size_t i = 0; i < arraysize(kSupportedFourCCs); ++i) {
fourccs->push_back(kSupportedFourCCs[i].fourcc);
}
return true;
diff --git a/talk/media/webrtc/webrtcvideocapturer.h b/talk/media/webrtc/webrtcvideocapturer.h
index 0a99884fe1..591e46f629 100644
--- a/talk/media/webrtc/webrtcvideocapturer.h
+++ b/talk/media/webrtc/webrtcvideocapturer.h
@@ -39,7 +39,7 @@
#include "webrtc/base/messagehandler.h"
#include "webrtc/base/scoped_ptr.h"
#include "webrtc/common_video/libyuv/include/webrtc_libyuv.h"
-#include "webrtc/modules/video_capture/include/video_capture.h"
+#include "webrtc/modules/video_capture/video_capture.h"
namespace cricket {
diff --git a/talk/media/webrtc/webrtcvideocapturer_unittest.cc b/talk/media/webrtc/webrtcvideocapturer_unittest.cc
index d560fc554e..85db32e7d2 100644
--- a/talk/media/webrtc/webrtcvideocapturer_unittest.cc
+++ b/talk/media/webrtc/webrtcvideocapturer_unittest.cc
@@ -111,6 +111,7 @@ TEST_F(WebRtcVideoCapturerTest, TestCapture) {
capturer_->Stop();
EXPECT_FALSE(capturer_->IsRunning());
EXPECT_TRUE(capturer_->GetCaptureFormat() == NULL);
+ EXPECT_EQ_WAIT(cricket::CS_STOPPED, listener_.last_capture_state(), 1000);
}
TEST_F(WebRtcVideoCapturerTest, TestCaptureVcm) {
diff --git a/talk/media/webrtc/webrtcvideoengine2.cc b/talk/media/webrtc/webrtcvideoengine2.cc
index bcd513ee2d..55c07426d0 100644
--- a/talk/media/webrtc/webrtcvideoengine2.cc
+++ b/talk/media/webrtc/webrtcvideoengine2.cc
@@ -152,9 +152,7 @@ bool CodecIsInternallySupported(const std::string& codec_name) {
return true;
}
if (CodecNamesEq(codec_name, kVp9CodecName)) {
- const std::string group_name =
- webrtc::field_trial::FindFullName("WebRTC-SupportVP9");
- return group_name == "Enabled" || group_name == "EnabledByFlag";
+ return true;
}
if (CodecNamesEq(codec_name, kH264CodecName)) {
return webrtc::H264Encoder::IsSupported() &&
@@ -168,6 +166,8 @@ void AddDefaultFeedbackParams(VideoCodec* codec) {
codec->AddFeedbackParam(FeedbackParam(kRtcpFbParamNack, kParamValueEmpty));
codec->AddFeedbackParam(FeedbackParam(kRtcpFbParamNack, kRtcpFbNackParamPli));
codec->AddFeedbackParam(FeedbackParam(kRtcpFbParamRemb, kParamValueEmpty));
+ codec->AddFeedbackParam(
+ FeedbackParam(kRtcpFbParamTransportCc, kParamValueEmpty));
}
static VideoCodec MakeVideoCodecWithDefaultFeedbackParams(int payload_type,
@@ -243,20 +243,6 @@ static bool ValidateStreamParams(const StreamParams& sp) {
return true;
}
-static std::string RtpExtensionsToString(
- const std::vector<RtpHeaderExtension>& extensions) {
- std::stringstream out;
- out << '{';
- for (size_t i = 0; i < extensions.size(); ++i) {
- out << "{" << extensions[i].uri << ": " << extensions[i].id << "}";
- if (i != extensions.size() - 1) {
- out << ", ";
- }
- }
- out << '}';
- return out.str();
-}
-
inline const webrtc::RtpExtension* FindHeaderExtension(
const std::vector<webrtc::RtpExtension>& extensions,
const std::string& name) {
@@ -303,7 +289,8 @@ static void MergeFecConfig(const webrtc::FecConfig& other,
// Returns true if the given codec is disallowed from doing simulcast.
bool IsCodecBlacklistedForSimulcast(const std::string& codec_name) {
- return CodecNamesEq(codec_name, kH264CodecName);
+ return CodecNamesEq(codec_name, kH264CodecName) ||
+ CodecNamesEq(codec_name, kVp9CodecName);
}
// The selected thresholds for QVGA and VGA corresponded to a QP around 10.
@@ -339,13 +326,13 @@ static const int kDefaultRtcpReceiverReportSsrc = 1;
std::vector<VideoCodec> DefaultVideoCodecList() {
std::vector<VideoCodec> codecs;
+ codecs.push_back(MakeVideoCodecWithDefaultFeedbackParams(kDefaultVp8PlType,
+ kVp8CodecName));
if (CodecIsInternallySupported(kVp9CodecName)) {
codecs.push_back(MakeVideoCodecWithDefaultFeedbackParams(kDefaultVp9PlType,
kVp9CodecName));
// TODO(andresp): Add rtx codec for vp9 and verify it works.
}
- codecs.push_back(MakeVideoCodecWithDefaultFeedbackParams(kDefaultVp8PlType,
- kVp8CodecName));
if (CodecIsInternallySupported(kH264CodecName)) {
codecs.push_back(MakeVideoCodecWithDefaultFeedbackParams(kDefaultH264PlType,
kH264CodecName));
@@ -357,72 +344,6 @@ std::vector<VideoCodec> DefaultVideoCodecList() {
return codecs;
}
-static bool FindFirstMatchingCodec(const std::vector<VideoCodec>& codecs,
- const VideoCodec& requested_codec,
- VideoCodec* matching_codec) {
- for (size_t i = 0; i < codecs.size(); ++i) {
- if (requested_codec.Matches(codecs[i])) {
- *matching_codec = codecs[i];
- return true;
- }
- }
- return false;
-}
-
-static bool ValidateRtpHeaderExtensionIds(
- const std::vector<RtpHeaderExtension>& extensions) {
- std::set<int> extensions_used;
- for (size_t i = 0; i < extensions.size(); ++i) {
- if (extensions[i].id <= 0 || extensions[i].id >= 15 ||
- !extensions_used.insert(extensions[i].id).second) {
- LOG(LS_ERROR) << "RTP extensions are with incorrect or duplicate ids.";
- return false;
- }
- }
- return true;
-}
-
-static bool CompareRtpHeaderExtensionIds(
- const webrtc::RtpExtension& extension1,
- const webrtc::RtpExtension& extension2) {
- // Sorting on ID is sufficient, more than one extension per ID is unsupported.
- return extension1.id > extension2.id;
-}
-
-static std::vector<webrtc::RtpExtension> FilterRtpExtensions(
- const std::vector<RtpHeaderExtension>& extensions) {
- std::vector<webrtc::RtpExtension> webrtc_extensions;
- for (size_t i = 0; i < extensions.size(); ++i) {
- // Unsupported extensions will be ignored.
- if (webrtc::RtpExtension::IsSupportedForVideo(extensions[i].uri)) {
- webrtc_extensions.push_back(webrtc::RtpExtension(
- extensions[i].uri, extensions[i].id));
- } else {
- LOG(LS_WARNING) << "Unsupported RTP extension: " << extensions[i].uri;
- }
- }
-
- // Sort filtered headers to make sure that they can later be compared
- // regardless of in which order they were entered.
- std::sort(webrtc_extensions.begin(), webrtc_extensions.end(),
- CompareRtpHeaderExtensionIds);
- return webrtc_extensions;
-}
-
-static bool RtpExtensionsHaveChanged(
- const std::vector<webrtc::RtpExtension>& before,
- const std::vector<webrtc::RtpExtension>& after) {
- if (before.size() != after.size())
- return true;
- for (size_t i = 0; i < before.size(); ++i) {
- if (before[i].id != after[i].id)
- return true;
- if (before[i].name != after[i].name)
- return true;
- }
- return false;
-}
-
std::vector<webrtc::VideoStream>
WebRtcVideoChannel2::WebRtcVideoSendStream::CreateSimulcastVideoStreams(
const VideoCodec& codec,
@@ -489,7 +410,8 @@ void* WebRtcVideoChannel2::WebRtcVideoSendStream::ConfigureVideoEncoderSettings(
denoising = false;
} else {
// Use codec default if video_noise_reduction is unset.
- codec_default_denoising = !options.video_noise_reduction.Get(&denoising);
+ codec_default_denoising = !options.video_noise_reduction;
+ denoising = options.video_noise_reduction.value_or(false);
}
if (CodecNamesEq(codec.name, kVp8CodecName)) {
@@ -554,20 +476,6 @@ WebRtcVideoEngine2::WebRtcVideoEngine2()
external_encoder_factory_(NULL) {
LOG(LS_INFO) << "WebRtcVideoEngine2::WebRtcVideoEngine2()";
video_codecs_ = GetSupportedCodecs();
- rtp_header_extensions_.push_back(
- RtpHeaderExtension(kRtpTimestampOffsetHeaderExtension,
- kRtpTimestampOffsetHeaderExtensionDefaultId));
- rtp_header_extensions_.push_back(
- RtpHeaderExtension(kRtpAbsoluteSenderTimeHeaderExtension,
- kRtpAbsoluteSenderTimeHeaderExtensionDefaultId));
- rtp_header_extensions_.push_back(
- RtpHeaderExtension(kRtpVideoRotationHeaderExtension,
- kRtpVideoRotationHeaderExtensionDefaultId));
- if (webrtc::field_trial::FindFullName("WebRTC-SendSideBwe") == "Enabled") {
- rtp_header_extensions_.push_back(RtpHeaderExtension(
- kRtpTransportSequenceNumberHeaderExtension,
- kRtpTransportSequenceNumberHeaderExtensionDefaultId));
- }
}
WebRtcVideoEngine2::~WebRtcVideoEngine2() {
@@ -579,29 +487,6 @@ void WebRtcVideoEngine2::Init() {
initialized_ = true;
}
-bool WebRtcVideoEngine2::SetDefaultEncoderConfig(
- const VideoEncoderConfig& config) {
- const VideoCodec& codec = config.max_codec;
- bool supports_codec = false;
- for (size_t i = 0; i < video_codecs_.size(); ++i) {
- if (CodecNamesEq(video_codecs_[i].name, codec.name)) {
- video_codecs_[i].width = codec.width;
- video_codecs_[i].height = codec.height;
- video_codecs_[i].framerate = codec.framerate;
- supports_codec = true;
- break;
- }
- }
-
- if (!supports_codec) {
- LOG(LS_ERROR) << "SetDefaultEncoderConfig, codec not supported: "
- << codec.ToString();
- return false;
- }
-
- return true;
-}
-
WebRtcVideoChannel2* WebRtcVideoEngine2::CreateChannel(
webrtc::Call* call,
const VideoOptions& options) {
@@ -615,19 +500,23 @@ const std::vector<VideoCodec>& WebRtcVideoEngine2::codecs() const {
return video_codecs_;
}
-const std::vector<RtpHeaderExtension>&
-WebRtcVideoEngine2::rtp_header_extensions() const {
- return rtp_header_extensions_;
-}
-
-void WebRtcVideoEngine2::SetLogging(int min_sev, const char* filter) {
- // TODO(pbos): Set up logging.
- LOG(LS_VERBOSE) << "SetLogging: " << min_sev << '"' << filter << '"';
- // if min_sev == -1, we keep the current log level.
- if (min_sev < 0) {
- RTC_DCHECK(min_sev == -1);
- return;
+RtpCapabilities WebRtcVideoEngine2::GetCapabilities() const {
+ RtpCapabilities capabilities;
+ capabilities.header_extensions.push_back(
+ RtpHeaderExtension(kRtpTimestampOffsetHeaderExtension,
+ kRtpTimestampOffsetHeaderExtensionDefaultId));
+ capabilities.header_extensions.push_back(
+ RtpHeaderExtension(kRtpAbsoluteSenderTimeHeaderExtension,
+ kRtpAbsoluteSenderTimeHeaderExtensionDefaultId));
+ capabilities.header_extensions.push_back(
+ RtpHeaderExtension(kRtpVideoRotationHeaderExtension,
+ kRtpVideoRotationHeaderExtensionDefaultId));
+ if (webrtc::field_trial::FindFullName("WebRTC-SendSideBwe") == "Enabled") {
+ capabilities.header_extensions.push_back(RtpHeaderExtension(
+ kRtpTransportSequenceNumberHeaderExtension,
+ kRtpTransportSequenceNumberHeaderExtensionDefaultId));
}
+ return capabilities;
}
void WebRtcVideoEngine2::SetExternalDecoderFactory(
@@ -677,48 +566,6 @@ bool WebRtcVideoEngine2::FindCodec(const VideoCodec& in) {
return false;
}
-// Tells whether the |requested| codec can be transmitted or not. If it can be
-// transmitted |out| is set with the best settings supported. Aspect ratio will
-// be set as close to |current|'s as possible. If not set |requested|'s
-// dimensions will be used for aspect ratio matching.
-bool WebRtcVideoEngine2::CanSendCodec(const VideoCodec& requested,
- const VideoCodec& current,
- VideoCodec* out) {
- RTC_DCHECK(out != NULL);
-
- if (requested.width != requested.height &&
- (requested.height == 0 || requested.width == 0)) {
- // 0xn and nx0 are invalid resolutions.
- return false;
- }
-
- VideoCodec matching_codec;
- if (!FindFirstMatchingCodec(video_codecs_, requested, &matching_codec)) {
- // Codec not supported.
- return false;
- }
-
- out->id = requested.id;
- out->name = requested.name;
- out->preference = requested.preference;
- out->params = requested.params;
- out->framerate = std::min(requested.framerate, matching_codec.framerate);
- out->params = requested.params;
- out->feedback_params = requested.feedback_params;
- out->width = requested.width;
- out->height = requested.height;
- if (requested.width == 0 && requested.height == 0) {
- return true;
- }
-
- while (out->width > matching_codec.width) {
- out->width /= 2;
- out->height /= 2;
- }
-
- return out->width > 0 && out->height > 0;
-}
-
// Ignore spammy trace messages, mostly from the stats API when we haven't
// gotten RTCP info yet from the remote side.
bool WebRtcVideoEngine2::ShouldIgnoreTrace(const std::string& trace) {
@@ -777,7 +624,8 @@ WebRtcVideoChannel2::WebRtcVideoChannel2(
RTC_DCHECK(thread_checker_.CalledOnValidThread());
SetDefaultOptions();
options_.SetAll(options);
- options_.cpu_overuse_detection.Get(&signal_cpu_adaptation_);
+ if (options_.cpu_overuse_detection)
+ signal_cpu_adaptation_ = *options_.cpu_overuse_detection;
rtcp_receiver_report_ssrc_ = kDefaultRtcpReceiverReportSsrc;
sending_ = false;
default_send_ssrc_ = 0;
@@ -785,10 +633,10 @@ WebRtcVideoChannel2::WebRtcVideoChannel2(
}
void WebRtcVideoChannel2::SetDefaultOptions() {
- options_.cpu_overuse_detection.Set(true);
- options_.dscp.Set(false);
- options_.suspend_below_min_bitrate.Set(false);
- options_.screencast_min_bitrate.Set(0);
+ options_.cpu_overuse_detection = rtc::Optional<bool>(true);
+ options_.dscp = rtc::Optional<bool>(false);
+ options_.suspend_below_min_bitrate = rtc::Optional<bool>(false);
+ options_.screencast_min_bitrate = rtc::Optional<int>(0);
}
WebRtcVideoChannel2::~WebRtcVideoChannel2() {
@@ -863,19 +711,43 @@ bool WebRtcVideoChannel2::ReceiveCodecsHaveChanged(
}
bool WebRtcVideoChannel2::SetSendParameters(const VideoSendParameters& params) {
+ TRACE_EVENT0("webrtc", "WebRtcVideoChannel2::SetSendParameters");
+ LOG(LS_INFO) << "SetSendParameters: " << params.ToString();
// TODO(pbos): Refactor this to only recreate the send streams once
// instead of 4 times.
- return (SetSendCodecs(params.codecs) &&
- SetSendRtpHeaderExtensions(params.extensions) &&
- SetMaxSendBandwidth(params.max_bandwidth_bps) &&
- SetOptions(params.options));
+ if (!SetSendCodecs(params.codecs) ||
+ !SetSendRtpHeaderExtensions(params.extensions) ||
+ !SetMaxSendBandwidth(params.max_bandwidth_bps) ||
+ !SetOptions(params.options)) {
+ return false;
+ }
+ if (send_params_.rtcp.reduced_size != params.rtcp.reduced_size) {
+ rtc::CritScope stream_lock(&stream_crit_);
+ for (auto& kv : send_streams_) {
+ kv.second->SetSendParameters(params);
+ }
+ }
+ send_params_ = params;
+ return true;
}
bool WebRtcVideoChannel2::SetRecvParameters(const VideoRecvParameters& params) {
+ TRACE_EVENT0("webrtc", "WebRtcVideoChannel2::SetRecvParameters");
+ LOG(LS_INFO) << "SetRecvParameters: " << params.ToString();
// TODO(pbos): Refactor this to only recreate the recv streams once
// instead of twice.
- return (SetRecvCodecs(params.codecs) &&
- SetRecvRtpHeaderExtensions(params.extensions));
+ if (!SetRecvCodecs(params.codecs) ||
+ !SetRecvRtpHeaderExtensions(params.extensions)) {
+ return false;
+ }
+ if (recv_params_.rtcp.reduced_size != params.rtcp.reduced_size) {
+ rtc::CritScope stream_lock(&stream_crit_);
+ for (auto& kv : receive_streams_) {
+ kv.second->SetRecvParameters(params);
+ }
+ }
+ recv_params_ = params;
+ return true;
}
std::string WebRtcVideoChannel2::CodecSettingsVectorToString(
@@ -952,15 +824,15 @@ bool WebRtcVideoChannel2::SetSendCodecs(const std::vector<VideoCodec>& codecs) {
LOG(LS_INFO) << "Using codec: " << supported_codecs.front().codec.ToString();
- VideoCodecSettings old_codec;
- if (send_codec_.Get(&old_codec) && supported_codecs.front() == old_codec) {
+ if (send_codec_ && supported_codecs.front() == *send_codec_) {
LOG(LS_INFO) << "Ignore call to SetSendCodecs because first supported "
"codec hasn't changed.";
// Using same codec, avoid reconfiguring.
return true;
}
- send_codec_.Set(supported_codecs.front());
+ send_codec_ = rtc::Optional<WebRtcVideoChannel2::VideoCodecSettings>(
+ supported_codecs.front());
rtc::CritScope stream_lock(&stream_crit_);
LOG(LS_INFO) << "Change the send codec because SetSendCodecs has a different "
@@ -969,12 +841,15 @@ bool WebRtcVideoChannel2::SetSendCodecs(const std::vector<VideoCodec>& codecs) {
RTC_DCHECK(kv.second != nullptr);
kv.second->SetCodec(supported_codecs.front());
}
- LOG(LS_INFO) << "SetNackAndRemb on all the receive streams because the send "
- "codec has changed.";
+ LOG(LS_INFO)
+ << "SetFeedbackOptions on all the receive streams because the send "
+ "codec has changed.";
for (auto& kv : receive_streams_) {
RTC_DCHECK(kv.second != nullptr);
- kv.second->SetNackAndRemb(HasNack(supported_codecs.front().codec),
- HasRemb(supported_codecs.front().codec));
+ kv.second->SetFeedbackParameters(
+ HasNack(supported_codecs.front().codec),
+ HasRemb(supported_codecs.front().codec),
+ HasTransportCc(supported_codecs.front().codec));
}
// TODO(holmer): Changing the codec parameters shouldn't necessarily mean that
@@ -1006,12 +881,11 @@ bool WebRtcVideoChannel2::SetSendCodecs(const std::vector<VideoCodec>& codecs) {
}
bool WebRtcVideoChannel2::GetSendCodec(VideoCodec* codec) {
- VideoCodecSettings codec_settings;
- if (!send_codec_.Get(&codec_settings)) {
+ if (!send_codec_) {
LOG(LS_VERBOSE) << "GetSendCodec: No send codec set.";
return false;
}
- *codec = codec_settings.codec;
+ *codec = send_codec_->codec;
return true;
}
@@ -1028,7 +902,7 @@ bool WebRtcVideoChannel2::SetSendStreamFormat(uint32_t ssrc,
bool WebRtcVideoChannel2::SetSend(bool send) {
LOG(LS_VERBOSE) << "SetSend: " << (send ? "true" : "false");
- if (send && !send_codec_.IsSet()) {
+ if (send && !send_codec_) {
LOG(LS_ERROR) << "SetSend(true) called before setting codec.";
return false;
}
@@ -1094,15 +968,10 @@ bool WebRtcVideoChannel2::AddSendStream(const StreamParams& sp) {
webrtc::VideoSendStream::Config config(this);
config.overuse_callback = this;
- WebRtcVideoSendStream* stream =
- new WebRtcVideoSendStream(call_,
- sp,
- config,
- external_encoder_factory_,
- options_,
- bitrate_config_.max_bitrate_bps,
- send_codec_,
- send_rtp_extensions_);
+ WebRtcVideoSendStream* stream = new WebRtcVideoSendStream(
+ call_, sp, config, external_encoder_factory_, options_,
+ bitrate_config_.max_bitrate_bps, send_codec_, send_rtp_extensions_,
+ send_params_);
uint32_t ssrc = sp.first_ssrc();
RTC_DCHECK(ssrc != 0);
@@ -1224,15 +1093,13 @@ bool WebRtcVideoChannel2::AddRecvStream(const StreamParams& sp,
// Set up A/V sync group based on sync label.
config.sync_group = sp.sync_label;
- config.rtp.remb = false;
- VideoCodecSettings send_codec;
- if (send_codec_.Get(&send_codec)) {
- config.rtp.remb = HasRemb(send_codec.codec);
- }
+ config.rtp.remb = send_codec_ ? HasRemb(send_codec_->codec) : false;
+ config.rtp.transport_cc =
+ send_codec_ ? HasTransportCc(send_codec_->codec) : false;
receive_streams_[ssrc] = new WebRtcVideoReceiveStream(
call_, sp, config, external_decoder_factory_, default_stream,
- recv_codecs_);
+ recv_codecs_, options_.disable_prerenderer_smoothing.value_or(false));
return true;
}
@@ -1246,6 +1113,9 @@ void WebRtcVideoChannel2::ConfigureReceiverRtp(
config->rtp.local_ssrc = rtcp_receiver_report_ssrc_;
config->rtp.extensions = recv_rtp_extensions_;
+ config->rtp.rtcp_mode = recv_params_.rtcp.reduced_size
+ ? webrtc::RtcpMode::kReducedSize
+ : webrtc::RtcpMode::kCompound;
// TODO(pbos): This protection is against setting the same local ssrc as
// remote which is not permitted by the lower-level API. RTCP requires a
@@ -1482,12 +1352,14 @@ void WebRtcVideoChannel2::OnRtcpReceived(
const rtc::PacketTime& packet_time) {
const webrtc::PacketTime webrtc_packet_time(packet_time.timestamp,
packet_time.not_before);
- if (call_->Receiver()->DeliverPacket(
- webrtc::MediaType::VIDEO,
- reinterpret_cast<const uint8_t*>(packet->data()), packet->size(),
- webrtc_packet_time) != webrtc::PacketReceiver::DELIVERY_OK) {
- LOG(LS_WARNING) << "Failed to deliver RTCP packet.";
- }
+ // TODO(pbos): Check webrtc::PacketReceiver::DELIVERY_OK once we deliver
+ // for both audio and video on the same path. Since BundleFilter doesn't
+ // filter RTCP anymore incoming RTCP packets could've been going to audio (so
+ // logging failures spam the log).
+ call_->Receiver()->DeliverPacket(
+ webrtc::MediaType::VIDEO,
+ reinterpret_cast<const uint8_t*>(packet->data()), packet->size(),
+ webrtc_packet_time);
}
void WebRtcVideoChannel2::OnReadyToSend(bool ready) {
@@ -1512,20 +1384,17 @@ bool WebRtcVideoChannel2::MuteStream(uint32_t ssrc, bool mute) {
bool WebRtcVideoChannel2::SetRecvRtpHeaderExtensions(
const std::vector<RtpHeaderExtension>& extensions) {
TRACE_EVENT0("webrtc", "WebRtcVideoChannel2::SetRecvRtpHeaderExtensions");
- LOG(LS_INFO) << "SetRecvRtpHeaderExtensions: "
- << RtpExtensionsToString(extensions);
- if (!ValidateRtpHeaderExtensionIds(extensions))
+ if (!ValidateRtpExtensions(extensions)) {
return false;
-
- std::vector<webrtc::RtpExtension> filtered_extensions =
- FilterRtpExtensions(extensions);
- if (!RtpExtensionsHaveChanged(recv_rtp_extensions_, filtered_extensions)) {
+ }
+ std::vector<webrtc::RtpExtension> filtered_extensions = FilterRtpExtensions(
+ extensions, webrtc::RtpExtension::IsSupportedForVideo, false);
+ if (recv_rtp_extensions_ == filtered_extensions) {
LOG(LS_INFO) << "Ignoring call to SetRecvRtpHeaderExtensions because "
"header extensions haven't changed.";
return true;
}
-
- recv_rtp_extensions_ = filtered_extensions;
+ recv_rtp_extensions_.swap(filtered_extensions);
rtc::CritScope stream_lock(&stream_crit_);
for (std::map<uint32_t, WebRtcVideoReceiveStream*>::iterator it =
@@ -1539,21 +1408,17 @@ bool WebRtcVideoChannel2::SetRecvRtpHeaderExtensions(
bool WebRtcVideoChannel2::SetSendRtpHeaderExtensions(
const std::vector<RtpHeaderExtension>& extensions) {
TRACE_EVENT0("webrtc", "WebRtcVideoChannel2::SetSendRtpHeaderExtensions");
- LOG(LS_INFO) << "SetSendRtpHeaderExtensions: "
- << RtpExtensionsToString(extensions);
- if (!ValidateRtpHeaderExtensionIds(extensions))
+ if (!ValidateRtpExtensions(extensions)) {
return false;
-
- std::vector<webrtc::RtpExtension> filtered_extensions =
- FilterRtpExtensions(FilterRedundantRtpExtensions(
- extensions, kBweExtensionPriorities, kBweExtensionPrioritiesLength));
- if (!RtpExtensionsHaveChanged(send_rtp_extensions_, filtered_extensions)) {
- LOG(LS_INFO) << "Ignoring call to SetSendRtpHeaderExtensions because "
+ }
+ std::vector<webrtc::RtpExtension> filtered_extensions = FilterRtpExtensions(
+ extensions, webrtc::RtpExtension::IsSupportedForVideo, true);
+ if (send_rtp_extensions_ == filtered_extensions) {
+ LOG(LS_INFO) << "Ignoring call to SetRecvRtpHeaderExtensions because "
"header extensions haven't changed.";
return true;
}
-
- send_rtp_extensions_ = filtered_extensions;
+ send_rtp_extensions_.swap(filtered_extensions);
const webrtc::RtpExtension* cvo_extension = FindHeaderExtension(
send_rtp_extensions_, kRtpVideoRotationHeaderExtension);
@@ -1612,11 +1477,11 @@ bool WebRtcVideoChannel2::SetOptions(const VideoOptions& options) {
}
{
rtc::CritScope lock(&capturer_crit_);
- options_.cpu_overuse_detection.Get(&signal_cpu_adaptation_);
+ if (options_.cpu_overuse_detection)
+ signal_cpu_adaptation_ = *options_.cpu_overuse_detection;
}
- rtc::DiffServCodePoint dscp = options_.dscp.GetWithDefaultIfUnset(false)
- ? rtc::DSCP_AF41
- : rtc::DSCP_DEFAULT;
+ rtc::DiffServCodePoint dscp =
+ options_.dscp.value_or(false) ? rtc::DSCP_AF41 : rtc::DSCP_DEFAULT;
MediaChannel::SetDscp(dscp);
rtc::CritScope stream_lock(&stream_crit_);
for (std::map<uint32_t, WebRtcVideoSendStream*>::iterator it =
@@ -1708,12 +1573,11 @@ WebRtcVideoChannel2::WebRtcVideoSendStream::VideoSendStreamParameters::
const webrtc::VideoSendStream::Config& config,
const VideoOptions& options,
int max_bitrate_bps,
- const Settable<VideoCodecSettings>& codec_settings)
+ const rtc::Optional<VideoCodecSettings>& codec_settings)
: config(config),
options(options),
max_bitrate_bps(max_bitrate_bps),
- codec_settings(codec_settings) {
-}
+ codec_settings(codec_settings) {}
WebRtcVideoChannel2::WebRtcVideoSendStream::AllocatedEncoder::AllocatedEncoder(
webrtc::VideoEncoder* encoder,
@@ -1737,8 +1601,11 @@ WebRtcVideoChannel2::WebRtcVideoSendStream::WebRtcVideoSendStream(
WebRtcVideoEncoderFactory* external_encoder_factory,
const VideoOptions& options,
int max_bitrate_bps,
- const Settable<VideoCodecSettings>& codec_settings,
- const std::vector<webrtc::RtpExtension>& rtp_extensions)
+ const rtc::Optional<VideoCodecSettings>& codec_settings,
+ const std::vector<webrtc::RtpExtension>& rtp_extensions,
+ // TODO(deadbeef): Don't duplicate information between send_params,
+ // rtp_extensions, options, etc.
+ const VideoSendParameters& send_params)
: ssrcs_(sp.ssrcs),
ssrc_groups_(sp.ssrc_groups),
call_(call),
@@ -1759,10 +1626,12 @@ WebRtcVideoChannel2::WebRtcVideoSendStream::WebRtcVideoSendStream(
&parameters_.config.rtp.rtx.ssrcs);
parameters_.config.rtp.c_name = sp.cname;
parameters_.config.rtp.extensions = rtp_extensions;
+ parameters_.config.rtp.rtcp_mode = send_params.rtcp.reduced_size
+ ? webrtc::RtcpMode::kReducedSize
+ : webrtc::RtcpMode::kCompound;
- VideoCodecSettings params;
- if (codec_settings.Get(&params)) {
- SetCodec(params);
+ if (codec_settings) {
+ SetCodec(*codec_settings);
}
}
@@ -1940,11 +1809,10 @@ void WebRtcVideoChannel2::WebRtcVideoSendStream::SetApplyRotation(
void WebRtcVideoChannel2::WebRtcVideoSendStream::SetOptions(
const VideoOptions& options) {
rtc::CritScope cs(&lock_);
- VideoCodecSettings codec_settings;
- if (parameters_.codec_settings.Get(&codec_settings)) {
+ if (parameters_.codec_settings) {
LOG(LS_INFO) << "SetCodecAndOptions because of SetOptions; options="
<< options.ToString();
- SetCodecAndOptions(codec_settings, options);
+ SetCodecAndOptions(*parameters_.codec_settings, options);
} else {
parameters_.options = options;
}
@@ -2049,10 +1917,12 @@ void WebRtcVideoChannel2::WebRtcVideoSendStream::SetCodecAndOptions(
parameters_.config.rtp.nack.rtp_history_ms =
HasNack(codec_settings.codec) ? kNackHistoryMs : 0;
- options.suspend_below_min_bitrate.Get(
- &parameters_.config.suspend_below_min_bitrate);
+ RTC_CHECK(options.suspend_below_min_bitrate);
+ parameters_.config.suspend_below_min_bitrate =
+ *options.suspend_below_min_bitrate;
- parameters_.codec_settings.Set(codec_settings);
+ parameters_.codec_settings =
+ rtc::Optional<WebRtcVideoChannel2::VideoCodecSettings>(codec_settings);
parameters_.options = options;
LOG(LS_INFO)
@@ -2075,17 +1945,27 @@ void WebRtcVideoChannel2::WebRtcVideoSendStream::SetRtpExtensions(
}
}
+void WebRtcVideoChannel2::WebRtcVideoSendStream::SetSendParameters(
+ const VideoSendParameters& send_params) {
+ rtc::CritScope cs(&lock_);
+ parameters_.config.rtp.rtcp_mode = send_params.rtcp.reduced_size
+ ? webrtc::RtcpMode::kReducedSize
+ : webrtc::RtcpMode::kCompound;
+ if (stream_ != nullptr) {
+ LOG(LS_INFO) << "RecreateWebRtcStream (send) because of SetSendParameters";
+ RecreateWebRtcStream();
+ }
+}
+
webrtc::VideoEncoderConfig
WebRtcVideoChannel2::WebRtcVideoSendStream::CreateVideoEncoderConfig(
const Dimensions& dimensions,
const VideoCodec& codec) const {
webrtc::VideoEncoderConfig encoder_config;
if (dimensions.is_screencast) {
- int screencast_min_bitrate_kbps;
- parameters_.options.screencast_min_bitrate.Get(
- &screencast_min_bitrate_kbps);
+ RTC_CHECK(parameters_.options.screencast_min_bitrate);
encoder_config.min_transmit_bitrate_bps =
- screencast_min_bitrate_kbps * 1000;
+ *parameters_.options.screencast_min_bitrate * 1000;
encoder_config.content_type =
webrtc::VideoEncoderConfig::ContentType::kScreen;
} else {
@@ -2121,7 +2001,7 @@ WebRtcVideoChannel2::WebRtcVideoSendStream::CreateVideoEncoderConfig(
parameters_.max_bitrate_bps, stream_count);
// Conference mode screencast uses 2 temporal layers split at 100kbit.
- if (parameters_.options.conference_mode.GetWithDefaultIfUnset(false) &&
+ if (parameters_.options.conference_mode.value_or(false) &&
dimensions.is_screencast && encoder_config.streams.size() == 1) {
ScreenshareLayerConfig config = ScreenshareLayerConfig::GetDefault();
@@ -2156,8 +2036,8 @@ void WebRtcVideoChannel2::WebRtcVideoSendStream::SetDimensions(
RTC_DCHECK(!parameters_.encoder_config.streams.empty());
- VideoCodecSettings codec_settings;
- parameters_.codec_settings.Get(&codec_settings);
+ RTC_CHECK(parameters_.codec_settings);
+ VideoCodecSettings codec_settings = *parameters_.codec_settings;
webrtc::VideoEncoderConfig encoder_config =
CreateVideoEncoderConfig(last_dimensions_, codec_settings.codec);
@@ -2202,9 +2082,8 @@ WebRtcVideoChannel2::WebRtcVideoSendStream::GetVideoSenderInfo() {
for (uint32_t ssrc : parameters_.config.rtp.ssrcs)
info.add_ssrc(ssrc);
- VideoCodecSettings codec_settings;
- if (parameters_.codec_settings.Get(&codec_settings))
- info.codec_name = codec_settings.codec.name;
+ if (parameters_.codec_settings)
+ info.codec_name = parameters_.codec_settings->codec.name;
for (size_t i = 0; i < parameters_.encoder_config.streams.size(); ++i) {
if (i == parameters_.encoder_config.streams.size() - 1) {
info.preferred_bitrate +=
@@ -2238,6 +2117,15 @@ WebRtcVideoChannel2::WebRtcVideoSendStream::GetVideoSenderInfo() {
}
}
}
+
+ // Get bandwidth limitation info from stream_->GetStats().
+ // Input resolution (output from video_adapter) can be further scaled down or
+ // higher video layer(s) can be dropped due to bitrate constraints.
+ // Note, adapt_changes only include changes from the video_adapter.
+ if (stats.bw_limited_resolution)
+ info.adapt_reason |= CoordinatedVideoAdapter::ADAPTREASON_BANDWIDTH;
+
+ info.encoder_implementation_name = stats.encoder_implementation_name;
info.ssrc_groups = ssrc_groups_;
info.framerate_input = stats.input_frame_rate;
info.framerate_sent = stats.encode_frame_rate;
@@ -2316,11 +2204,10 @@ void WebRtcVideoChannel2::WebRtcVideoSendStream::RecreateWebRtcStream() {
call_->DestroyVideoSendStream(stream_);
}
- VideoCodecSettings codec_settings;
- parameters_.codec_settings.Get(&codec_settings);
+ RTC_CHECK(parameters_.codec_settings);
parameters_.encoder_config.encoder_specific_settings =
ConfigureVideoEncoderSettings(
- codec_settings.codec, parameters_.options,
+ parameters_.codec_settings->codec, parameters_.options,
parameters_.encoder_config.content_type ==
webrtc::VideoEncoderConfig::ContentType::kScreen);
@@ -2345,7 +2232,8 @@ WebRtcVideoChannel2::WebRtcVideoReceiveStream::WebRtcVideoReceiveStream(
const webrtc::VideoReceiveStream::Config& config,
WebRtcVideoDecoderFactory* external_decoder_factory,
bool default_stream,
- const std::vector<VideoCodecSettings>& recv_codecs)
+ const std::vector<VideoCodecSettings>& recv_codecs,
+ bool disable_prerenderer_smoothing)
: call_(call),
ssrcs_(sp.ssrcs),
ssrc_groups_(sp.ssrc_groups),
@@ -2353,6 +2241,7 @@ WebRtcVideoChannel2::WebRtcVideoReceiveStream::WebRtcVideoReceiveStream(
default_stream_(default_stream),
config_(config),
external_decoder_factory_(external_decoder_factory),
+ disable_prerenderer_smoothing_(disable_prerenderer_smoothing),
renderer_(NULL),
last_width_(-1),
last_height_(-1),
@@ -2457,10 +2346,10 @@ void WebRtcVideoChannel2::WebRtcVideoReceiveStream::SetRecvCodecs(
config_.rtp.nack.rtp_history_ms =
HasNack(recv_codecs.begin()->codec) ? kNackHistoryMs : 0;
- ClearDecoders(&old_decoders);
LOG(LS_INFO) << "RecreateWebRtcStream (recv) because of SetRecvCodecs: "
<< CodecSettingsVectorToString(recv_codecs);
RecreateWebRtcStream();
+ ClearDecoders(&old_decoders);
}
void WebRtcVideoChannel2::WebRtcVideoReceiveStream::SetLocalSsrc(
@@ -2482,20 +2371,28 @@ void WebRtcVideoChannel2::WebRtcVideoReceiveStream::SetLocalSsrc(
RecreateWebRtcStream();
}
-void WebRtcVideoChannel2::WebRtcVideoReceiveStream::SetNackAndRemb(
- bool nack_enabled, bool remb_enabled) {
+void WebRtcVideoChannel2::WebRtcVideoReceiveStream::SetFeedbackParameters(
+ bool nack_enabled,
+ bool remb_enabled,
+ bool transport_cc_enabled) {
int nack_history_ms = nack_enabled ? kNackHistoryMs : 0;
if (config_.rtp.nack.rtp_history_ms == nack_history_ms &&
- config_.rtp.remb == remb_enabled) {
- LOG(LS_INFO) << "Ignoring call to SetNackAndRemb because parameters are "
- "unchanged; nack=" << nack_enabled
- << ", remb=" << remb_enabled;
+ config_.rtp.remb == remb_enabled &&
+ config_.rtp.transport_cc == transport_cc_enabled) {
+ LOG(LS_INFO)
+ << "Ignoring call to SetFeedbackParameters because parameters are "
+ "unchanged; nack="
+ << nack_enabled << ", remb=" << remb_enabled
+ << ", transport_cc=" << transport_cc_enabled;
return;
}
config_.rtp.remb = remb_enabled;
config_.rtp.nack.rtp_history_ms = nack_history_ms;
- LOG(LS_INFO) << "RecreateWebRtcStream (recv) because of SetNackAndRemb; nack="
- << nack_enabled << ", remb=" << remb_enabled;
+ config_.rtp.transport_cc = transport_cc_enabled;
+ LOG(LS_INFO)
+ << "RecreateWebRtcStream (recv) because of SetFeedbackParameters; nack="
+ << nack_enabled << ", remb=" << remb_enabled
+ << ", transport_cc=" << transport_cc_enabled;
RecreateWebRtcStream();
}
@@ -2506,6 +2403,15 @@ void WebRtcVideoChannel2::WebRtcVideoReceiveStream::SetRtpExtensions(
RecreateWebRtcStream();
}
+void WebRtcVideoChannel2::WebRtcVideoReceiveStream::SetRecvParameters(
+ const VideoRecvParameters& recv_params) {
+ config_.rtp.rtcp_mode = recv_params.rtcp.reduced_size
+ ? webrtc::RtcpMode::kReducedSize
+ : webrtc::RtcpMode::kCompound;
+ LOG(LS_INFO) << "RecreateWebRtcStream (recv) because of SetRecvParameters";
+ RecreateWebRtcStream();
+}
+
void WebRtcVideoChannel2::WebRtcVideoReceiveStream::RecreateWebRtcStream() {
if (stream_ != NULL) {
call_->DestroyVideoReceiveStream(stream_);
@@ -2560,6 +2466,11 @@ bool WebRtcVideoChannel2::WebRtcVideoReceiveStream::IsTextureSupported() const {
return true;
}
+bool WebRtcVideoChannel2::WebRtcVideoReceiveStream::SmoothsRenderedFrames()
+ const {
+ return disable_prerenderer_smoothing_;
+}
+
bool WebRtcVideoChannel2::WebRtcVideoReceiveStream::IsDefaultStream() const {
return default_stream_;
}
@@ -2607,6 +2518,7 @@ WebRtcVideoChannel2::WebRtcVideoReceiveStream::GetVideoReceiverInfo() {
info.ssrc_groups = ssrc_groups_;
info.add_ssrc(config_.rtp.remote_ssrc);
webrtc::VideoReceiveStream::Stats stats = stream_->GetStats();
+ info.decoder_implementation_name = stats.decoder_implementation_name;
info.bytes_rcvd = stats.rtp_stats.transmitted.payload_bytes +
stats.rtp_stats.transmitted.header_bytes +
stats.rtp_stats.transmitted.padding_bytes;
diff --git a/talk/media/webrtc/webrtcvideoengine2.h b/talk/media/webrtc/webrtcvideoengine2.h
index 7096135cdd..1b8da16368 100644
--- a/talk/media/webrtc/webrtcvideoengine2.h
+++ b/talk/media/webrtc/webrtcvideoengine2.h
@@ -112,14 +112,11 @@ class WebRtcVideoEngine2 {
// Basic video engine implementation.
void Init();
- bool SetDefaultEncoderConfig(const VideoEncoderConfig& config);
-
WebRtcVideoChannel2* CreateChannel(webrtc::Call* call,
const VideoOptions& options);
const std::vector<VideoCodec>& codecs() const;
- const std::vector<RtpHeaderExtension>& rtp_header_extensions() const;
- void SetLogging(int min_sev, const char* filter);
+ RtpCapabilities GetCapabilities() const;
// Set a WebRtcVideoDecoderFactory for external decoding. Video engine does
// not take the ownership of |decoder_factory|. The caller needs to make sure
@@ -134,9 +131,6 @@ class WebRtcVideoEngine2 {
bool EnableTimedRender();
bool FindCodec(const VideoCodec& in);
- bool CanSendCodec(const VideoCodec& in,
- const VideoCodec& current,
- VideoCodec* out);
// Check whether the supplied trace should be ignored.
bool ShouldIgnoreTrace(const std::string& trace);
@@ -144,7 +138,6 @@ class WebRtcVideoEngine2 {
std::vector<VideoCodec> GetSupportedCodecs() const;
std::vector<VideoCodec> video_codecs_;
- std::vector<RtpHeaderExtension> rtp_header_extensions_;
bool initialized_;
@@ -250,14 +243,18 @@ class WebRtcVideoChannel2 : public rtc::MessageHandler,
WebRtcVideoEncoderFactory* external_encoder_factory,
const VideoOptions& options,
int max_bitrate_bps,
- const Settable<VideoCodecSettings>& codec_settings,
- const std::vector<webrtc::RtpExtension>& rtp_extensions);
+ const rtc::Optional<VideoCodecSettings>& codec_settings,
+ const std::vector<webrtc::RtpExtension>& rtp_extensions,
+ const VideoSendParameters& send_params);
~WebRtcVideoSendStream();
void SetOptions(const VideoOptions& options);
void SetCodec(const VideoCodecSettings& codec);
void SetRtpExtensions(
const std::vector<webrtc::RtpExtension>& rtp_extensions);
+ // TODO(deadbeef): Move logic from SetCodec/SetRtpExtensions/etc.
+ // into this method. Currently this method only sets the RTCP mode.
+ void SetSendParameters(const VideoSendParameters& send_params);
void InputFrame(VideoCapturer* capturer, const VideoFrame* frame);
bool SetCapturer(VideoCapturer* capturer);
@@ -286,11 +283,11 @@ class WebRtcVideoChannel2 : public rtc::MessageHandler,
const webrtc::VideoSendStream::Config& config,
const VideoOptions& options,
int max_bitrate_bps,
- const Settable<VideoCodecSettings>& codec_settings);
+ const rtc::Optional<VideoCodecSettings>& codec_settings);
webrtc::VideoSendStream::Config config;
VideoOptions options;
int max_bitrate_bps;
- Settable<VideoCodecSettings> codec_settings;
+ rtc::Optional<VideoCodecSettings> codec_settings;
// Sent resolutions + bitrates etc. by the underlying VideoSendStream,
// typically changes when setting a new resolution or reconfiguring
// bitrates.
@@ -395,19 +392,26 @@ class WebRtcVideoChannel2 : public rtc::MessageHandler,
const webrtc::VideoReceiveStream::Config& config,
WebRtcVideoDecoderFactory* external_decoder_factory,
bool default_stream,
- const std::vector<VideoCodecSettings>& recv_codecs);
+ const std::vector<VideoCodecSettings>& recv_codecs,
+ bool disable_prerenderer_smoothing);
~WebRtcVideoReceiveStream();
const std::vector<uint32_t>& GetSsrcs() const;
void SetLocalSsrc(uint32_t local_ssrc);
- void SetNackAndRemb(bool nack_enabled, bool remb_enabled);
+ void SetFeedbackParameters(bool nack_enabled,
+ bool remb_enabled,
+ bool transport_cc_enabled);
void SetRecvCodecs(const std::vector<VideoCodecSettings>& recv_codecs);
void SetRtpExtensions(const std::vector<webrtc::RtpExtension>& extensions);
+ // TODO(deadbeef): Move logic from SetRecvCodecs/SetRtpExtensions/etc.
+ // into this method. Currently this method only sets the RTCP mode.
+ void SetRecvParameters(const VideoRecvParameters& recv_params);
void RenderFrame(const webrtc::VideoFrame& frame,
int time_to_render_ms) override;
bool IsTextureSupported() const override;
+ bool SmoothsRenderedFrames() const override;
bool IsDefaultStream() const;
void SetRenderer(cricket::VideoRenderer* renderer);
@@ -448,6 +452,8 @@ class WebRtcVideoChannel2 : public rtc::MessageHandler,
WebRtcVideoDecoderFactory* const external_decoder_factory_;
std::vector<AllocatedDecoder> allocated_decoders_;
+ const bool disable_prerenderer_smoothing_;
+
rtc::CriticalSection renderer_lock_;
cricket::VideoRenderer* renderer_ GUARDED_BY(renderer_lock_);
int last_width_ GUARDED_BY(renderer_lock_);
@@ -512,7 +518,7 @@ class WebRtcVideoChannel2 : public rtc::MessageHandler,
std::set<uint32_t> send_ssrcs_ GUARDED_BY(stream_crit_);
std::set<uint32_t> receive_ssrcs_ GUARDED_BY(stream_crit_);
- Settable<VideoCodecSettings> send_codec_;
+ rtc::Optional<VideoCodecSettings> send_codec_;
std::vector<webrtc::RtpExtension> send_rtp_extensions_;
WebRtcVideoEncoderFactory* const external_encoder_factory_;
@@ -521,6 +527,10 @@ class WebRtcVideoChannel2 : public rtc::MessageHandler,
std::vector<webrtc::RtpExtension> recv_rtp_extensions_;
webrtc::Call::Config::BitrateConfig bitrate_config_;
VideoOptions options_;
+ // TODO(deadbeef): Don't duplicate information between
+ // send_params/recv_params, rtp_extensions, options, etc.
+ VideoSendParameters send_params_;
+ VideoRecvParameters recv_params_;
};
} // namespace cricket
diff --git a/talk/media/webrtc/webrtcvideoengine2_unittest.cc b/talk/media/webrtc/webrtcvideoengine2_unittest.cc
index c0cd2ffa50..41e04a9fa7 100644
--- a/talk/media/webrtc/webrtcvideoengine2_unittest.cc
+++ b/talk/media/webrtc/webrtcvideoengine2_unittest.cc
@@ -75,6 +75,8 @@ void VerifyCodecHasDefaultFeedbackParams(const cricket::VideoCodec& codec) {
EXPECT_TRUE(codec.HasFeedbackParam(cricket::FeedbackParam(
cricket::kRtcpFbParamRemb, cricket::kParamValueEmpty)));
EXPECT_TRUE(codec.HasFeedbackParam(cricket::FeedbackParam(
+ cricket::kRtcpFbParamTransportCc, cricket::kParamValueEmpty)));
+ EXPECT_TRUE(codec.HasFeedbackParam(cricket::FeedbackParam(
cricket::kRtcpFbParamCcm, cricket::kRtcpFbCcmParamFir)));
}
@@ -205,26 +207,6 @@ TEST_F(WebRtcVideoEngine2Test, FindCodec) {
EXPECT_TRUE(engine_.FindCodec(rtx));
}
-TEST_F(WebRtcVideoEngine2Test, SetDefaultEncoderConfigPreservesFeedbackParams) {
- cricket::VideoCodec max_settings(
- engine_.codecs()[0].id, engine_.codecs()[0].name,
- engine_.codecs()[0].width / 2, engine_.codecs()[0].height / 2, 30, 0);
- // This codec shouldn't have NACK by default or the test is pointless.
- EXPECT_FALSE(max_settings.HasFeedbackParam(
- FeedbackParam(kRtcpFbParamNack, kParamValueEmpty)));
- // The engine should by default have it however.
- EXPECT_TRUE(engine_.codecs()[0].HasFeedbackParam(
- FeedbackParam(kRtcpFbParamNack, kParamValueEmpty)));
-
- // Set constrained max codec settings.
- EXPECT_TRUE(engine_.SetDefaultEncoderConfig(
- cricket::VideoEncoderConfig(max_settings)));
-
- // Verify that feedback parameters are retained.
- EXPECT_TRUE(engine_.codecs()[0].HasFeedbackParam(
- FeedbackParam(kRtcpFbParamNack, kParamValueEmpty)));
-}
-
TEST_F(WebRtcVideoEngine2Test, DefaultRtxCodecHasAssociatedPayloadTypeSet) {
std::vector<VideoCodec> engine_codecs = engine_.codecs();
for (size_t i = 0; i < engine_codecs.size(); ++i) {
@@ -240,11 +222,11 @@ TEST_F(WebRtcVideoEngine2Test, DefaultRtxCodecHasAssociatedPayloadTypeSet) {
}
TEST_F(WebRtcVideoEngine2Test, SupportsTimestampOffsetHeaderExtension) {
- std::vector<RtpHeaderExtension> extensions = engine_.rtp_header_extensions();
- ASSERT_FALSE(extensions.empty());
- for (size_t i = 0; i < extensions.size(); ++i) {
- if (extensions[i].uri == kRtpTimestampOffsetHeaderExtension) {
- EXPECT_EQ(kRtpTimestampOffsetHeaderExtensionDefaultId, extensions[i].id);
+ RtpCapabilities capabilities = engine_.GetCapabilities();
+ ASSERT_FALSE(capabilities.header_extensions.empty());
+ for (const RtpHeaderExtension& extension : capabilities.header_extensions) {
+ if (extension.uri == kRtpTimestampOffsetHeaderExtension) {
+ EXPECT_EQ(kRtpTimestampOffsetHeaderExtensionDefaultId, extension.id);
return;
}
}
@@ -252,12 +234,11 @@ TEST_F(WebRtcVideoEngine2Test, SupportsTimestampOffsetHeaderExtension) {
}
TEST_F(WebRtcVideoEngine2Test, SupportsAbsoluteSenderTimeHeaderExtension) {
- std::vector<RtpHeaderExtension> extensions = engine_.rtp_header_extensions();
- ASSERT_FALSE(extensions.empty());
- for (size_t i = 0; i < extensions.size(); ++i) {
- if (extensions[i].uri == kRtpAbsoluteSenderTimeHeaderExtension) {
- EXPECT_EQ(kRtpAbsoluteSenderTimeHeaderExtensionDefaultId,
- extensions[i].id);
+ RtpCapabilities capabilities = engine_.GetCapabilities();
+ ASSERT_FALSE(capabilities.header_extensions.empty());
+ for (const RtpHeaderExtension& extension : capabilities.header_extensions) {
+ if (extension.uri == kRtpAbsoluteSenderTimeHeaderExtension) {
+ EXPECT_EQ(kRtpAbsoluteSenderTimeHeaderExtensionDefaultId, extension.id);
return;
}
}
@@ -272,12 +253,12 @@ class WebRtcVideoEngine2WithSendSideBweTest : public WebRtcVideoEngine2Test {
TEST_F(WebRtcVideoEngine2WithSendSideBweTest,
SupportsTransportSequenceNumberHeaderExtension) {
- std::vector<RtpHeaderExtension> extensions = engine_.rtp_header_extensions();
- ASSERT_FALSE(extensions.empty());
- for (size_t i = 0; i < extensions.size(); ++i) {
- if (extensions[i].uri == kRtpTransportSequenceNumberHeaderExtension) {
+ RtpCapabilities capabilities = engine_.GetCapabilities();
+ ASSERT_FALSE(capabilities.header_extensions.empty());
+ for (const RtpHeaderExtension& extension : capabilities.header_extensions) {
+ if (extension.uri == kRtpTransportSequenceNumberHeaderExtension) {
EXPECT_EQ(kRtpTransportSequenceNumberHeaderExtensionDefaultId,
- extensions[i].id);
+ extension.id);
return;
}
}
@@ -285,11 +266,11 @@ TEST_F(WebRtcVideoEngine2WithSendSideBweTest,
}
TEST_F(WebRtcVideoEngine2Test, SupportsVideoRotationHeaderExtension) {
- std::vector<RtpHeaderExtension> extensions = engine_.rtp_header_extensions();
- ASSERT_FALSE(extensions.empty());
- for (size_t i = 0; i < extensions.size(); ++i) {
- if (extensions[i].uri == kRtpVideoRotationHeaderExtension) {
- EXPECT_EQ(kRtpVideoRotationHeaderExtensionDefaultId, extensions[i].id);
+ RtpCapabilities capabilities = engine_.GetCapabilities();
+ ASSERT_FALSE(capabilities.header_extensions.empty());
+ for (const RtpHeaderExtension& extension : capabilities.header_extensions) {
+ if (extension.uri == kRtpVideoRotationHeaderExtension) {
+ EXPECT_EQ(kRtpVideoRotationHeaderExtensionDefaultId, extension.id);
return;
}
}
@@ -794,17 +775,6 @@ TEST_F(WebRtcVideoEngine2Test, RegisterExternalH264DecoderIfSupported) {
ASSERT_EQ(1u, decoder_factory.decoders().size());
}
-class WebRtcVideoEngine2BaseTest
- : public VideoEngineTest<cricket::WebRtcVideoEngine2> {
- protected:
- typedef VideoEngineTest<cricket::WebRtcVideoEngine2> Base;
-};
-
-#define WEBRTC_ENGINE_BASE_TEST(test) \
- TEST_F(WebRtcVideoEngine2BaseTest, test) { Base::test##Body(); }
-
-WEBRTC_ENGINE_BASE_TEST(ConstrainNewCodec2);
-
class WebRtcVideoChannel2BaseTest
: public VideoMediaChannelTest<WebRtcVideoEngine2, WebRtcVideoChannel2> {
protected:
@@ -894,7 +864,10 @@ TEST_F(WebRtcVideoChannel2BaseTest, TwoStreamsReUseFirstStream) {
Base::TwoStreamsReUseFirstStream(kVp8Codec);
}
+//Disabled for TSan: https://bugs.chromium.org/p/webrtc/issues/detail?id=4963
+#if !defined(THREAD_SANITIZER)
WEBRTC_BASE_TEST(SendManyResizeOnce);
+#endif // THREAD_SANITIZER
// TODO(pbos): Enable and figure out why this fails (or should work).
TEST_F(WebRtcVideoChannel2BaseTest, DISABLED_SendVp8HdAndReceiveAdaptedVp8Vga) {
@@ -1097,7 +1070,7 @@ class WebRtcVideoChannel2Test : public WebRtcVideoEngine2Test {
FakeVideoSendStream* SetDenoisingOption(
const cricket::VideoSendParameters& parameters, bool enabled) {
cricket::VideoSendParameters params = parameters;
- params.options.video_noise_reduction.Set(enabled);
+ params.options.video_noise_reduction = rtc::Optional<bool>(enabled);
channel_->SetSendParameters(params);
return fake_call_->GetVideoSendStreams().back();
}
@@ -1148,7 +1121,7 @@ TEST_F(WebRtcVideoChannel2Test, RecvStreamWithSimAndRtx) {
parameters.codecs = engine_.codecs();
EXPECT_TRUE(channel_->SetSendParameters(parameters));
EXPECT_TRUE(channel_->SetSend(true));
- parameters.options.conference_mode.Set(true);
+ parameters.options.conference_mode = rtc::Optional<bool>(true);
EXPECT_TRUE(channel_->SetSendParameters(parameters));
// Send side.
@@ -1451,6 +1424,11 @@ TEST_F(WebRtcVideoChannel2Test, RembIsEnabledByDefault) {
EXPECT_TRUE(stream->GetConfig().rtp.remb);
}
+TEST_F(WebRtcVideoChannel2Test, TransportCcIsEnabledByDefault) {
+ FakeVideoReceiveStream* stream = AddRecvStream();
+ EXPECT_TRUE(stream->GetConfig().rtp.transport_cc);
+}
+
TEST_F(WebRtcVideoChannel2Test, RembCanBeEnabledAndDisabled) {
FakeVideoReceiveStream* stream = AddRecvStream();
EXPECT_TRUE(stream->GetConfig().rtp.remb);
@@ -1471,6 +1449,27 @@ TEST_F(WebRtcVideoChannel2Test, RembCanBeEnabledAndDisabled) {
EXPECT_TRUE(stream->GetConfig().rtp.remb);
}
+TEST_F(WebRtcVideoChannel2Test, TransportCcCanBeEnabledAndDisabled) {
+ FakeVideoReceiveStream* stream = AddRecvStream();
+ EXPECT_TRUE(stream->GetConfig().rtp.transport_cc);
+
+ // Verify that transport cc feedback is turned off when send(!) codecs without
+ // transport cc feedback are set.
+ cricket::VideoSendParameters parameters;
+ parameters.codecs.push_back(kVp8Codec);
+ EXPECT_TRUE(parameters.codecs[0].feedback_params.params().empty());
+ EXPECT_TRUE(channel_->SetSendParameters(parameters));
+ stream = fake_call_->GetVideoReceiveStreams()[0];
+ EXPECT_FALSE(stream->GetConfig().rtp.transport_cc);
+
+ // Verify that transport cc feedback is turned on when setting default codecs
+ // since the default codecs have transport cc feedback enabled.
+ parameters.codecs = engine_.codecs();
+ EXPECT_TRUE(channel_->SetSendParameters(parameters));
+ stream = fake_call_->GetVideoReceiveStreams()[0];
+ EXPECT_TRUE(stream->GetConfig().rtp.transport_cc);
+}
+
TEST_F(WebRtcVideoChannel2Test, NackIsEnabledByDefault) {
VerifyCodecHasDefaultFeedbackParams(default_codec_);
@@ -1558,7 +1557,8 @@ TEST_F(WebRtcVideoChannel2Test, UsesCorrectSettingsForScreencast) {
cricket::VideoCodec codec = kVp8Codec360p;
cricket::VideoSendParameters parameters;
parameters.codecs.push_back(codec);
- parameters.options.screencast_min_bitrate.Set(kScreenshareMinBitrateKbps);
+ parameters.options.screencast_min_bitrate =
+ rtc::Optional<int>(kScreenshareMinBitrateKbps);
EXPECT_TRUE(channel_->SetSendParameters(parameters));
AddSendStream();
@@ -1612,7 +1612,7 @@ TEST_F(WebRtcVideoChannel2Test,
ConferenceModeScreencastConfiguresTemporalLayer) {
static const int kConferenceScreencastTemporalBitrateBps =
ScreenshareLayerConfig::GetDefault().tl0_bitrate_kbps * 1000;
- send_parameters_.options.conference_mode.Set(true);
+ send_parameters_.options.conference_mode = rtc::Optional<bool>(true);
channel_->SetSendParameters(send_parameters_);
AddSendStream();
@@ -1659,13 +1659,15 @@ TEST_F(WebRtcVideoChannel2Test, SuspendBelowMinBitrateDisabledByDefault) {
}
TEST_F(WebRtcVideoChannel2Test, SetOptionsWithSuspendBelowMinBitrate) {
- send_parameters_.options.suspend_below_min_bitrate.Set(true);
+ send_parameters_.options.suspend_below_min_bitrate =
+ rtc::Optional<bool>(true);
channel_->SetSendParameters(send_parameters_);
FakeVideoSendStream* stream = AddSendStream();
EXPECT_TRUE(stream->GetConfig().suspend_below_min_bitrate);
- send_parameters_.options.suspend_below_min_bitrate.Set(false);
+ send_parameters_.options.suspend_below_min_bitrate =
+ rtc::Optional<bool>(false);
channel_->SetSendParameters(send_parameters_);
stream = fake_call_->GetVideoSendStreams()[0];
@@ -1853,7 +1855,7 @@ void WebRtcVideoChannel2Test::TestCpuAdaptation(bool enable_overuse,
cricket::VideoSendParameters parameters;
parameters.codecs.push_back(codec);
if (!enable_overuse) {
- parameters.options.cpu_overuse_detection.Set(false);
+ parameters.options.cpu_overuse_detection = rtc::Optional<bool>(false);
}
EXPECT_TRUE(channel_->SetSendParameters(parameters));
@@ -2375,19 +2377,55 @@ TEST_F(WebRtcVideoChannel2Test, TestSetDscpOptions) {
cricket::VideoSendParameters parameters = send_parameters_;
EXPECT_TRUE(channel_->SetSendParameters(parameters));
EXPECT_EQ(rtc::DSCP_NO_CHANGE, network_interface->dscp());
- parameters.options.dscp.Set(true);
+ parameters.options.dscp = rtc::Optional<bool>(true);
EXPECT_TRUE(channel_->SetSendParameters(parameters));
EXPECT_EQ(rtc::DSCP_AF41, network_interface->dscp());
// Verify previous value is not modified if dscp option is not set.
cricket::VideoSendParameters parameters1 = send_parameters_;
EXPECT_TRUE(channel_->SetSendParameters(parameters1));
EXPECT_EQ(rtc::DSCP_AF41, network_interface->dscp());
- parameters1.options.dscp.Set(false);
+ parameters1.options.dscp = rtc::Optional<bool>(false);
EXPECT_TRUE(channel_->SetSendParameters(parameters1));
EXPECT_EQ(rtc::DSCP_DEFAULT, network_interface->dscp());
channel_->SetInterface(NULL);
}
+// This test verifies that the RTCP reduced size mode is properly applied to
+// send video streams.
+TEST_F(WebRtcVideoChannel2Test, TestSetSendRtcpReducedSize) {
+ // Create stream, expecting that default mode is "compound".
+ FakeVideoSendStream* stream1 = AddSendStream();
+ EXPECT_EQ(webrtc::RtcpMode::kCompound, stream1->GetConfig().rtp.rtcp_mode);
+
+ // Now enable reduced size mode.
+ send_parameters_.rtcp.reduced_size = true;
+ EXPECT_TRUE(channel_->SetSendParameters(send_parameters_));
+ stream1 = fake_call_->GetVideoSendStreams()[0];
+ EXPECT_EQ(webrtc::RtcpMode::kReducedSize, stream1->GetConfig().rtp.rtcp_mode);
+
+ // Create a new stream and ensure it picks up the reduced size mode.
+ FakeVideoSendStream* stream2 = AddSendStream();
+ EXPECT_EQ(webrtc::RtcpMode::kReducedSize, stream2->GetConfig().rtp.rtcp_mode);
+}
+
+// This test verifies that the RTCP reduced size mode is properly applied to
+// receive video streams.
+TEST_F(WebRtcVideoChannel2Test, TestSetRecvRtcpReducedSize) {
+ // Create stream, expecting that default mode is "compound".
+ FakeVideoReceiveStream* stream1 = AddRecvStream();
+ EXPECT_EQ(webrtc::RtcpMode::kCompound, stream1->GetConfig().rtp.rtcp_mode);
+
+ // Now enable reduced size mode.
+ recv_parameters_.rtcp.reduced_size = true;
+ EXPECT_TRUE(channel_->SetRecvParameters(recv_parameters_));
+ stream1 = fake_call_->GetVideoReceiveStreams()[0];
+ EXPECT_EQ(webrtc::RtcpMode::kReducedSize, stream1->GetConfig().rtp.rtcp_mode);
+
+ // Create a new stream and ensure it picks up the reduced size mode.
+ FakeVideoReceiveStream* stream2 = AddRecvStream();
+ EXPECT_EQ(webrtc::RtcpMode::kReducedSize, stream2->GetConfig().rtp.rtcp_mode);
+}
+
TEST_F(WebRtcVideoChannel2Test, OnReadyToSendSignalsNetworkState) {
EXPECT_EQ(webrtc::kNetworkUp, fake_call_->GetNetworkState());
@@ -2410,6 +2448,18 @@ TEST_F(WebRtcVideoChannel2Test, GetStatsReportsSentCodecName) {
EXPECT_EQ(kVp8Codec.name, info.senders[0].codec_name);
}
+TEST_F(WebRtcVideoChannel2Test, GetStatsReportsEncoderImplementationName) {
+ FakeVideoSendStream* stream = AddSendStream();
+ webrtc::VideoSendStream::Stats stats;
+ stats.encoder_implementation_name = "encoder_implementation_name";
+ stream->SetStats(stats);
+
+ cricket::VideoMediaInfo info;
+ ASSERT_TRUE(channel_->GetStats(&info));
+ EXPECT_EQ(stats.encoder_implementation_name,
+ info.senders[0].encoder_implementation_name);
+}
+
TEST_F(WebRtcVideoChannel2Test, GetStatsReportsCpuOveruseMetrics) {
FakeVideoSendStream* stream = AddSendStream();
webrtc::VideoSendStream::Stats stats;
@@ -2460,7 +2510,7 @@ TEST_F(WebRtcVideoChannel2Test, GetStatsTracksAdaptationStats) {
EXPECT_TRUE(channel_->SetSend(true));
// Verify that the CpuOveruseObserver is registered and trigger downgrade.
- parameters.options.cpu_overuse_detection.Set(true);
+ parameters.options.cpu_overuse_detection = rtc::Optional<bool>(true);
EXPECT_TRUE(channel_->SetSendParameters(parameters));
// Trigger overuse.
@@ -2518,6 +2568,87 @@ TEST_F(WebRtcVideoChannel2Test, GetStatsTracksAdaptationStats) {
EXPECT_TRUE(channel_->SetCapturer(kSsrcs3[0], NULL));
}
+TEST_F(WebRtcVideoChannel2Test, GetStatsTracksAdaptationAndBandwidthStats) {
+ AddSendStream(cricket::CreateSimStreamParams("cname", MAKE_VECTOR(kSsrcs3)));
+
+ // Capture format VGA.
+ cricket::FakeVideoCapturer video_capturer_vga;
+ const std::vector<cricket::VideoFormat>* formats =
+ video_capturer_vga.GetSupportedFormats();
+ cricket::VideoFormat capture_format_vga = (*formats)[1];
+ EXPECT_EQ(cricket::CS_RUNNING, video_capturer_vga.Start(capture_format_vga));
+ EXPECT_TRUE(channel_->SetCapturer(kSsrcs3[0], &video_capturer_vga));
+ EXPECT_TRUE(video_capturer_vga.CaptureFrame());
+
+ cricket::VideoCodec send_codec(100, "VP8", 640, 480, 30, 0);
+ cricket::VideoSendParameters parameters;
+ parameters.codecs.push_back(send_codec);
+ EXPECT_TRUE(channel_->SetSendParameters(parameters));
+ EXPECT_TRUE(channel_->SetSend(true));
+
+ // Verify that the CpuOveruseObserver is registered and trigger downgrade.
+ parameters.options.cpu_overuse_detection = rtc::Optional<bool>(true);
+ EXPECT_TRUE(channel_->SetSendParameters(parameters));
+
+ // Trigger overuse -> adapt CPU.
+ ASSERT_EQ(1u, fake_call_->GetVideoSendStreams().size());
+ webrtc::LoadObserver* overuse_callback =
+ fake_call_->GetVideoSendStreams().front()->GetConfig().overuse_callback;
+ ASSERT_TRUE(overuse_callback != NULL);
+ overuse_callback->OnLoadUpdate(webrtc::LoadObserver::kOveruse);
+ EXPECT_TRUE(video_capturer_vga.CaptureFrame());
+ cricket::VideoMediaInfo info;
+ EXPECT_TRUE(channel_->GetStats(&info));
+ ASSERT_EQ(1U, info.senders.size());
+ EXPECT_EQ(CoordinatedVideoAdapter::ADAPTREASON_CPU,
+ info.senders[0].adapt_reason);
+
+ // Set bandwidth limitation stats for the stream -> adapt CPU + BW.
+ webrtc::VideoSendStream::Stats stats;
+ stats.bw_limited_resolution = true;
+ fake_call_->GetVideoSendStreams().front()->SetStats(stats);
+ info.Clear();
+ EXPECT_TRUE(channel_->GetStats(&info));
+ ASSERT_EQ(1U, info.senders.size());
+ EXPECT_EQ(CoordinatedVideoAdapter::ADAPTREASON_CPU +
+ CoordinatedVideoAdapter::ADAPTREASON_BANDWIDTH,
+ info.senders[0].adapt_reason);
+
+ // Trigger upgrade -> adapt BW.
+ overuse_callback->OnLoadUpdate(webrtc::LoadObserver::kUnderuse);
+ EXPECT_TRUE(video_capturer_vga.CaptureFrame());
+ info.Clear();
+ EXPECT_TRUE(channel_->GetStats(&info));
+ ASSERT_EQ(1U, info.senders.size());
+ EXPECT_EQ(CoordinatedVideoAdapter::ADAPTREASON_BANDWIDTH,
+ info.senders[0].adapt_reason);
+
+ // Reset bandwidth limitation state -> adapt NONE.
+ stats.bw_limited_resolution = false;
+ fake_call_->GetVideoSendStreams().front()->SetStats(stats);
+ info.Clear();
+ EXPECT_TRUE(channel_->GetStats(&info));
+ ASSERT_EQ(1U, info.senders.size());
+ EXPECT_EQ(CoordinatedVideoAdapter::ADAPTREASON_NONE,
+ info.senders[0].adapt_reason);
+
+ EXPECT_TRUE(channel_->SetCapturer(kSsrcs3[0], NULL));
+}
+
+TEST_F(WebRtcVideoChannel2Test,
+ GetStatsTranslatesBandwidthLimitedResolutionCorrectly) {
+ FakeVideoSendStream* stream = AddSendStream();
+ webrtc::VideoSendStream::Stats stats;
+ stats.bw_limited_resolution = true;
+ stream->SetStats(stats);
+
+ cricket::VideoMediaInfo info;
+ EXPECT_TRUE(channel_->GetStats(&info));
+ ASSERT_EQ(1U, info.senders.size());
+ EXPECT_EQ(CoordinatedVideoAdapter::ADAPTREASON_BANDWIDTH,
+ info.senders[0].adapt_reason);
+}
+
TEST_F(WebRtcVideoChannel2Test,
GetStatsTranslatesSendRtcpPacketTypesCorrectly) {
FakeVideoSendStream* stream = AddSendStream();
@@ -2561,6 +2692,7 @@ TEST_F(WebRtcVideoChannel2Test,
TEST_F(WebRtcVideoChannel2Test, GetStatsTranslatesDecodeStatsCorrectly) {
FakeVideoReceiveStream* stream = AddRecvStream();
webrtc::VideoReceiveStream::Stats stats;
+ stats.decoder_implementation_name = "decoder_implementation_name";
stats.decode_ms = 2;
stats.max_decode_ms = 3;
stats.current_delay_ms = 4;
@@ -2572,6 +2704,8 @@ TEST_F(WebRtcVideoChannel2Test, GetStatsTranslatesDecodeStatsCorrectly) {
cricket::VideoMediaInfo info;
ASSERT_TRUE(channel_->GetStats(&info));
+ EXPECT_EQ(stats.decoder_implementation_name,
+ info.receivers[0].decoder_implementation_name);
EXPECT_EQ(stats.decode_ms, info.receivers[0].decode_ms);
EXPECT_EQ(stats.max_decode_ms, info.receivers[0].max_decode_ms);
EXPECT_EQ(stats.current_delay_ms, info.receivers[0].current_delay_ms);
diff --git a/talk/media/webrtc/webrtcvideoframe.cc b/talk/media/webrtc/webrtcvideoframe.cc
index 7da7e3b7fb..fcc991c753 100644
--- a/talk/media/webrtc/webrtcvideoframe.cc
+++ b/talk/media/webrtc/webrtcvideoframe.cc
@@ -56,17 +56,6 @@ WebRtcVideoFrame::WebRtcVideoFrame(
rotation_(rotation) {
}
-WebRtcVideoFrame::WebRtcVideoFrame(
- const rtc::scoped_refptr<webrtc::VideoFrameBuffer>& buffer,
- int64_t elapsed_time_ns,
- int64_t time_stamp_ns)
- : video_frame_buffer_(buffer),
- pixel_width_(1),
- pixel_height_(1),
- time_stamp_ns_(time_stamp_ns),
- rotation_(webrtc::kVideoRotation_0) {
-}
-
WebRtcVideoFrame::~WebRtcVideoFrame() {}
bool WebRtcVideoFrame::Init(uint32_t format,
@@ -90,13 +79,7 @@ bool WebRtcVideoFrame::Init(const CapturedFrame* frame, int dw, int dh,
return Reset(frame->fourcc, frame->width, frame->height, dw, dh,
static_cast<uint8_t*>(frame->data), frame->data_size,
frame->pixel_width, frame->pixel_height, frame->time_stamp,
- frame->GetRotation(), apply_rotation);
-}
-
-bool WebRtcVideoFrame::InitToBlack(int w, int h, size_t pixel_width,
- size_t pixel_height, int64_t,
- int64_t time_stamp_ns) {
- return InitToBlack(w, h, pixel_width, pixel_height, time_stamp_ns);
+ frame->rotation, apply_rotation);
}
bool WebRtcVideoFrame::InitToBlack(int w, int h, size_t pixel_width,
diff --git a/talk/media/webrtc/webrtcvideoframe.h b/talk/media/webrtc/webrtcvideoframe.h
index 0928c59324..827cf28821 100644
--- a/talk/media/webrtc/webrtcvideoframe.h
+++ b/talk/media/webrtc/webrtcvideoframe.h
@@ -33,7 +33,7 @@
#include "webrtc/base/refcount.h"
#include "webrtc/base/scoped_ref_ptr.h"
#include "webrtc/common_types.h"
-#include "webrtc/common_video/interface/video_frame_buffer.h"
+#include "webrtc/common_video/include/video_frame_buffer.h"
namespace cricket {
@@ -46,11 +46,6 @@ class WebRtcVideoFrame : public VideoFrame {
int64_t time_stamp_ns,
webrtc::VideoRotation rotation);
- // TODO(guoweis): Remove this when chrome code base is updated.
- WebRtcVideoFrame(const rtc::scoped_refptr<webrtc::VideoFrameBuffer>& buffer,
- int64_t elapsed_time_ns,
- int64_t time_stamp_ns);
-
~WebRtcVideoFrame();
// Creates a frame from a raw sample with FourCC "format" and size "w" x "h".
@@ -74,10 +69,6 @@ class WebRtcVideoFrame : public VideoFrame {
void InitToEmptyBuffer(int w, int h, size_t pixel_width, size_t pixel_height,
int64_t time_stamp_ns);
- // TODO(magjed): Remove once Chromium is updated.
- bool InitToBlack(int w, int h, size_t pixel_width, size_t pixel_height,
- int64_t elapsed_time_ns, int64_t time_stamp_ns);
-
bool InitToBlack(int w, int h, size_t pixel_width, size_t pixel_height,
int64_t time_stamp_ns) override;
diff --git a/talk/media/webrtc/webrtcvoe.h b/talk/media/webrtc/webrtcvoe.h
index db6a64a1fe..aa705a014d 100644
--- a/talk/media/webrtc/webrtcvoe.h
+++ b/talk/media/webrtc/webrtcvoe.h
@@ -36,7 +36,6 @@
#include "webrtc/voice_engine/include/voe_audio_processing.h"
#include "webrtc/voice_engine/include/voe_base.h"
#include "webrtc/voice_engine/include/voe_codec.h"
-#include "webrtc/voice_engine/include/voe_dtmf.h"
#include "webrtc/voice_engine/include/voe_errors.h"
#include "webrtc/voice_engine/include/voe_hardware.h"
#include "webrtc/voice_engine/include/voe_network.h"
@@ -91,14 +90,13 @@ class VoEWrapper {
public:
VoEWrapper()
: engine_(webrtc::VoiceEngine::Create()), processing_(engine_),
- base_(engine_), codec_(engine_), dtmf_(engine_),
+ base_(engine_), codec_(engine_),
hw_(engine_), network_(engine_),
rtp_(engine_), volume_(engine_) {
}
VoEWrapper(webrtc::VoEAudioProcessing* processing,
webrtc::VoEBase* base,
webrtc::VoECodec* codec,
- webrtc::VoEDtmf* dtmf,
webrtc::VoEHardware* hw,
webrtc::VoENetwork* network,
webrtc::VoERTP_RTCP* rtp,
@@ -107,7 +105,6 @@ class VoEWrapper {
processing_(processing),
base_(base),
codec_(codec),
- dtmf_(dtmf),
hw_(hw),
network_(network),
rtp_(rtp),
@@ -118,7 +115,6 @@ class VoEWrapper {
webrtc::VoEAudioProcessing* processing() const { return processing_.get(); }
webrtc::VoEBase* base() const { return base_.get(); }
webrtc::VoECodec* codec() const { return codec_.get(); }
- webrtc::VoEDtmf* dtmf() const { return dtmf_.get(); }
webrtc::VoEHardware* hw() const { return hw_.get(); }
webrtc::VoENetwork* network() const { return network_.get(); }
webrtc::VoERTP_RTCP* rtp() const { return rtp_.get(); }
@@ -130,29 +126,11 @@ class VoEWrapper {
scoped_voe_ptr<webrtc::VoEAudioProcessing> processing_;
scoped_voe_ptr<webrtc::VoEBase> base_;
scoped_voe_ptr<webrtc::VoECodec> codec_;
- scoped_voe_ptr<webrtc::VoEDtmf> dtmf_;
scoped_voe_ptr<webrtc::VoEHardware> hw_;
scoped_voe_ptr<webrtc::VoENetwork> network_;
scoped_voe_ptr<webrtc::VoERTP_RTCP> rtp_;
scoped_voe_ptr<webrtc::VoEVolumeControl> volume_;
};
-
-// Adds indirection to static WebRtc functions, allowing them to be mocked.
-class VoETraceWrapper {
- public:
- virtual ~VoETraceWrapper() {}
-
- virtual int SetTraceFilter(const unsigned int filter) {
- return webrtc::VoiceEngine::SetTraceFilter(filter);
- }
- virtual int SetTraceFile(const char* fileNameUTF8) {
- return webrtc::VoiceEngine::SetTraceFile(fileNameUTF8);
- }
- virtual int SetTraceCallback(webrtc::TraceCallback* callback) {
- return webrtc::VoiceEngine::SetTraceCallback(callback);
- }
-};
-
} // namespace cricket
#endif // TALK_MEDIA_WEBRTCVOE_H_
diff --git a/talk/media/webrtc/webrtcvoiceengine.cc b/talk/media/webrtc/webrtcvoiceengine.cc
index 27ca1deb2d..9192b72539 100644
--- a/talk/media/webrtc/webrtcvoiceengine.cc
+++ b/talk/media/webrtc/webrtcvoiceengine.cc
@@ -42,7 +42,10 @@
#include "talk/media/base/audiorenderer.h"
#include "talk/media/base/constants.h"
#include "talk/media/base/streamparams.h"
+#include "talk/media/webrtc/webrtcmediaengine.h"
#include "talk/media/webrtc/webrtcvoe.h"
+#include "webrtc/audio/audio_sink.h"
+#include "webrtc/base/arraysize.h"
#include "webrtc/base/base64.h"
#include "webrtc/base/byteorder.h"
#include "webrtc/base/common.h"
@@ -52,53 +55,26 @@
#include "webrtc/base/stringutils.h"
#include "webrtc/call/rtc_event_log.h"
#include "webrtc/common.h"
+#include "webrtc/modules/audio_coding/acm2/rent_a_codec.h"
#include "webrtc/modules/audio_processing/include/audio_processing.h"
#include "webrtc/system_wrappers/include/field_trial.h"
+#include "webrtc/system_wrappers/include/trace.h"
namespace cricket {
namespace {
-const int kMaxNumPacketSize = 6;
-struct CodecPref {
- const char* name;
- int clockrate;
- int channels;
- int payload_type;
- bool is_multi_rate;
- int packet_sizes_ms[kMaxNumPacketSize];
-};
-// Note: keep the supported packet sizes in ascending order.
-const CodecPref kCodecPrefs[] = {
- { kOpusCodecName, 48000, 2, 111, true, { 10, 20, 40, 60 } },
- { kIsacCodecName, 16000, 1, 103, true, { 30, 60 } },
- { kIsacCodecName, 32000, 1, 104, true, { 30 } },
- // G722 should be advertised as 8000 Hz because of the RFC "bug".
- { kG722CodecName, 8000, 1, 9, false, { 10, 20, 30, 40, 50, 60 } },
- { kIlbcCodecName, 8000, 1, 102, false, { 20, 30, 40, 60 } },
- { kPcmuCodecName, 8000, 1, 0, false, { 10, 20, 30, 40, 50, 60 } },
- { kPcmaCodecName, 8000, 1, 8, false, { 10, 20, 30, 40, 50, 60 } },
- { kCnCodecName, 32000, 1, 106, false, { } },
- { kCnCodecName, 16000, 1, 105, false, { } },
- { kCnCodecName, 8000, 1, 13, false, { } },
- { kRedCodecName, 8000, 1, 127, false, { } },
- { kDtmfCodecName, 8000, 1, 126, false, { } },
-};
+const int kDefaultTraceFilter = webrtc::kTraceNone | webrtc::kTraceTerseInfo |
+ webrtc::kTraceWarning | webrtc::kTraceError |
+ webrtc::kTraceCritical;
+const int kElevatedTraceFilter = kDefaultTraceFilter | webrtc::kTraceStateInfo |
+ webrtc::kTraceInfo;
-// For Linux/Mac, using the default device is done by specifying index 0 for
-// VoE 4.0 and not -1 (which was the case for VoE 3.5).
-//
// On Windows Vista and newer, Microsoft introduced the concept of "Default
// Communications Device". This means that there are two types of default
// devices (old Wave Audio style default and Default Communications Device).
//
// On Windows systems which only support Wave Audio style default, uses either
// -1 or 0 to select the default device.
-//
-// On Windows systems which support both "Default Communication Device" and
-// old Wave Audio style default, use -1 for Default Communications Device and
-// -2 for Wave Audio style default, which is what we want to use for clips.
-// It's not clear yet whether the -2 index is handled properly on other OSes.
-
#ifdef WIN32
const int kDefaultAudioDeviceId = -1;
#else
@@ -150,6 +126,12 @@ const char kAecDumpByAudioOptionFilename[] = "/sdcard/audio.aecdump";
const char kAecDumpByAudioOptionFilename[] = "audio.aecdump";
#endif
+// Constants from voice_engine_defines.h.
+const int kMinTelephoneEventCode = 0; // RFC4733 (Section 2.3.1)
+const int kMaxTelephoneEventCode = 255;
+const int kMinTelephoneEventDuration = 100;
+const int kMaxTelephoneEventDuration = 60000; // Actual limit is 2^16
+
bool ValidateStreamParams(const StreamParams& sp) {
if (sp.ssrcs.empty()) {
LOG(LS_ERROR) << "No SSRCs in stream parameters: " << sp.ToString();
@@ -177,32 +159,6 @@ std::string ToString(const webrtc::CodecInst& codec) {
return ss.str();
}
-void LogMultiline(rtc::LoggingSeverity sev, char* text) {
- const char* delim = "\r\n";
- for (char* tok = strtok(text, delim); tok; tok = strtok(NULL, delim)) {
- LOG_V(sev) << tok;
- }
-}
-
-// Severity is an integer because it comes is assumed to be from command line.
-int SeverityToFilter(int severity) {
- int filter = webrtc::kTraceNone;
- switch (severity) {
- case rtc::LS_VERBOSE:
- filter |= webrtc::kTraceAll;
- FALLTHROUGH();
- case rtc::LS_INFO:
- filter |= (webrtc::kTraceStateInfo | webrtc::kTraceInfo);
- FALLTHROUGH();
- case rtc::LS_WARNING:
- filter |= (webrtc::kTraceTerseInfo | webrtc::kTraceWarning);
- FALLTHROUGH();
- case rtc::LS_ERROR:
- filter |= (webrtc::kTraceError | webrtc::kTraceCritical);
- }
- return filter;
-}
-
bool IsCodec(const AudioCodec& codec, const char* ref_name) {
return (_stricmp(codec.name.c_str(), ref_name) == 0);
}
@@ -211,19 +167,9 @@ bool IsCodec(const webrtc::CodecInst& codec, const char* ref_name) {
return (_stricmp(codec.plname, ref_name) == 0);
}
-bool IsCodecMultiRate(const webrtc::CodecInst& codec) {
- for (size_t i = 0; i < ARRAY_SIZE(kCodecPrefs); ++i) {
- if (IsCodec(codec, kCodecPrefs[i].name) &&
- kCodecPrefs[i].clockrate == codec.plfreq) {
- return kCodecPrefs[i].is_multi_rate;
- }
- }
- return false;
-}
-
bool FindCodec(const std::vector<AudioCodec>& codecs,
- const AudioCodec& codec,
- AudioCodec* found_codec) {
+ const AudioCodec& codec,
+ AudioCodec* found_codec) {
for (const AudioCodec& c : codecs) {
if (c.Matches(codec)) {
if (found_codec != NULL) {
@@ -253,38 +199,8 @@ bool IsNackEnabled(const AudioCodec& codec) {
kParamValueEmpty));
}
-int SelectPacketSize(const CodecPref& codec_pref, int ptime_ms) {
- int selected_packet_size_ms = codec_pref.packet_sizes_ms[0];
- for (int packet_size_ms : codec_pref.packet_sizes_ms) {
- if (packet_size_ms && packet_size_ms <= ptime_ms) {
- selected_packet_size_ms = packet_size_ms;
- }
- }
- return selected_packet_size_ms;
-}
-
-// If the AudioCodec param kCodecParamPTime is set, then we will set it to codec
-// pacsize if it's valid, or we will pick the next smallest value we support.
-// TODO(Brave): Query supported packet sizes from ACM when the API is ready.
-bool SetPTimeAsPacketSize(webrtc::CodecInst* codec, int ptime_ms) {
- for (const CodecPref& codec_pref : kCodecPrefs) {
- if ((IsCodec(*codec, codec_pref.name) &&
- codec_pref.clockrate == codec->plfreq) ||
- IsCodec(*codec, kG722CodecName)) {
- int packet_size_ms = SelectPacketSize(codec_pref, ptime_ms);
- if (packet_size_ms) {
- // Convert unit from milli-seconds to samples.
- codec->pacsize = (codec->plfreq / 1000) * packet_size_ms;
- return true;
- }
- }
- }
- return false;
-}
-
// Return true if codec.params[feature] == "1", false otherwise.
-bool IsCodecFeatureEnabled(const AudioCodec& codec,
- const char* feature) {
+bool IsCodecFeatureEnabled(const AudioCodec& codec, const char* feature) {
int value;
return codec.GetParam(feature, &value) && value == 1;
}
@@ -351,109 +267,29 @@ void GetOpusConfig(const AudioCodec& codec, webrtc::CodecInst* voe_codec,
voe_codec->rate = GetOpusBitrate(codec, *max_playback_rate);
}
-// Changes RTP timestamp rate of G722. This is due to the "bug" in the RFC
-// which says that G722 should be advertised as 8 kHz although it is a 16 kHz
-// codec.
-void MaybeFixupG722(webrtc::CodecInst* voe_codec, int new_plfreq) {
- if (IsCodec(*voe_codec, kG722CodecName)) {
- // If the ASSERT triggers, the codec definition in WebRTC VoiceEngine
- // has changed, and this special case is no longer needed.
- RTC_DCHECK(voe_codec->plfreq != new_plfreq);
- voe_codec->plfreq = new_plfreq;
- }
-}
-
-// Gets the default set of options applied to the engine. Historically, these
-// were supplied as a combination of flags from the channel manager (ec, agc,
-// ns, and highpass) and the rest hardcoded in InitInternal.
-AudioOptions GetDefaultEngineOptions() {
- AudioOptions options;
- options.echo_cancellation.Set(true);
- options.auto_gain_control.Set(true);
- options.noise_suppression.Set(true);
- options.highpass_filter.Set(true);
- options.stereo_swapping.Set(false);
- options.audio_jitter_buffer_max_packets.Set(50);
- options.audio_jitter_buffer_fast_accelerate.Set(false);
- options.typing_detection.Set(true);
- options.adjust_agc_delta.Set(0);
- options.experimental_agc.Set(false);
- options.extended_filter_aec.Set(false);
- options.delay_agnostic_aec.Set(false);
- options.experimental_ns.Set(false);
- options.aec_dump.Set(false);
- return options;
-}
-
-std::string GetEnableString(bool enable) {
- return enable ? "enable" : "disable";
-}
-} // namespace {
-
-WebRtcVoiceEngine::WebRtcVoiceEngine()
- : voe_wrapper_(new VoEWrapper()),
- tracing_(new VoETraceWrapper()),
- adm_(NULL),
- log_filter_(SeverityToFilter(kDefaultLogSeverity)),
- is_dumping_aec_(false) {
- Construct();
-}
-
-WebRtcVoiceEngine::WebRtcVoiceEngine(VoEWrapper* voe_wrapper,
- VoETraceWrapper* tracing)
- : voe_wrapper_(voe_wrapper),
- tracing_(tracing),
- adm_(NULL),
- log_filter_(SeverityToFilter(kDefaultLogSeverity)),
- is_dumping_aec_(false) {
- Construct();
-}
-
-void WebRtcVoiceEngine::Construct() {
- SetTraceFilter(log_filter_);
- initialized_ = false;
- LOG(LS_VERBOSE) << "WebRtcVoiceEngine::WebRtcVoiceEngine";
- SetTraceOptions("");
- if (tracing_->SetTraceCallback(this) == -1) {
- LOG_RTCERR0(SetTraceCallback);
- }
- if (voe_wrapper_->base()->RegisterVoiceEngineObserver(*this) == -1) {
- LOG_RTCERR0(RegisterVoiceEngineObserver);
- }
- // Clear the default agc state.
- memset(&default_agc_config_, 0, sizeof(default_agc_config_));
-
- // Load our audio codec list.
- ConstructCodecs();
-
- // Load our RTP Header extensions.
- rtp_header_extensions_.push_back(
- RtpHeaderExtension(kRtpAudioLevelHeaderExtension,
- kRtpAudioLevelHeaderExtensionDefaultId));
- rtp_header_extensions_.push_back(
- RtpHeaderExtension(kRtpAbsoluteSenderTimeHeaderExtension,
- kRtpAbsoluteSenderTimeHeaderExtensionDefaultId));
- if (webrtc::field_trial::FindFullName("WebRTC-SendSideBwe") == "Enabled") {
- rtp_header_extensions_.push_back(RtpHeaderExtension(
- kRtpTransportSequenceNumberHeaderExtension,
- kRtpTransportSequenceNumberHeaderExtensionDefaultId));
- }
- options_ = GetDefaultEngineOptions();
+webrtc::AudioState::Config MakeAudioStateConfig(VoEWrapper* voe_wrapper) {
+ webrtc::AudioState::Config config;
+ config.voice_engine = voe_wrapper->engine();
+ return config;
}
-void WebRtcVoiceEngine::ConstructCodecs() {
- LOG(LS_INFO) << "WebRtc VoiceEngine codecs:";
- int ncodecs = voe_wrapper_->codec()->NumOfCodecs();
- for (int i = 0; i < ncodecs; ++i) {
- webrtc::CodecInst voe_codec;
- if (GetVoeCodec(i, &voe_codec)) {
+class WebRtcVoiceCodecs final {
+ public:
+ // TODO(solenberg): Do this filtering once off-line, add a simple AudioCodec
+ // list and add a test which verifies VoE supports the listed codecs.
+ static std::vector<AudioCodec> SupportedCodecs() {
+ LOG(LS_INFO) << "WebRtc VoiceEngine codecs:";
+ std::vector<AudioCodec> result;
+ for (webrtc::CodecInst voe_codec : webrtc::acm2::RentACodec::Database()) {
+ // Change the sample rate of G722 to 8000 to match SDP.
+ MaybeFixupG722(&voe_codec, 8000);
// Skip uncompressed formats.
if (IsCodec(voe_codec, kL16CodecName)) {
continue;
}
const CodecPref* pref = NULL;
- for (size_t j = 0; j < ARRAY_SIZE(kCodecPrefs); ++j) {
+ for (size_t j = 0; j < arraysize(kCodecPrefs); ++j) {
if (IsCodec(voe_codec, kCodecPrefs[j].name) &&
kCodecPrefs[j].clockrate == voe_codec.plfreq &&
kCodecPrefs[j].channels == voe_codec.channels) {
@@ -465,9 +301,10 @@ void WebRtcVoiceEngine::ConstructCodecs() {
if (pref) {
// Use the payload type that we've configured in our pref table;
// use the offset in our pref table to determine the sort order.
- AudioCodec codec(pref->payload_type, voe_codec.plname, voe_codec.plfreq,
- voe_codec.rate, voe_codec.channels,
- ARRAY_SIZE(kCodecPrefs) - (pref - kCodecPrefs));
+ AudioCodec codec(
+ pref->payload_type, voe_codec.plname, voe_codec.plfreq,
+ voe_codec.rate, voe_codec.channels,
+ static_cast<int>(arraysize(kCodecPrefs)) - (pref - kCodecPrefs));
LOG(LS_INFO) << ToString(codec);
if (IsCodec(codec, kIsacCodecName)) {
// Indicate auto-bitrate in signaling.
@@ -488,40 +325,183 @@ void WebRtcVoiceEngine::ConstructCodecs() {
// TODO(hellner): Add ptime, sprop-stereo, and stereo
// when they can be set to values other than the default.
}
- codecs_.push_back(codec);
+ result.push_back(codec);
} else {
LOG(LS_WARNING) << "Unexpected codec: " << ToString(voe_codec);
}
}
+ // Make sure they are in local preference order.
+ std::sort(result.begin(), result.end(), &AudioCodec::Preferable);
+ return result;
+ }
+
+ static bool ToCodecInst(const AudioCodec& in,
+ webrtc::CodecInst* out) {
+ for (webrtc::CodecInst voe_codec : webrtc::acm2::RentACodec::Database()) {
+ // Change the sample rate of G722 to 8000 to match SDP.
+ MaybeFixupG722(&voe_codec, 8000);
+ AudioCodec codec(voe_codec.pltype, voe_codec.plname, voe_codec.plfreq,
+ voe_codec.rate, voe_codec.channels, 0);
+ bool multi_rate = IsCodecMultiRate(voe_codec);
+ // Allow arbitrary rates for ISAC to be specified.
+ if (multi_rate) {
+ // Set codec.bitrate to 0 so the check for codec.Matches() passes.
+ codec.bitrate = 0;
+ }
+ if (codec.Matches(in)) {
+ if (out) {
+ // Fixup the payload type.
+ voe_codec.pltype = in.id;
+
+ // Set bitrate if specified.
+ if (multi_rate && in.bitrate != 0) {
+ voe_codec.rate = in.bitrate;
+ }
+
+ // Reset G722 sample rate to 16000 to match WebRTC.
+ MaybeFixupG722(&voe_codec, 16000);
+
+ // Apply codec-specific settings.
+ if (IsCodec(codec, kIsacCodecName)) {
+ // If ISAC and an explicit bitrate is not specified,
+ // enable auto bitrate adjustment.
+ voe_codec.rate = (in.bitrate > 0) ? in.bitrate : -1;
+ }
+ *out = voe_codec;
+ }
+ return true;
+ }
+ }
+ return false;
}
- // Make sure they are in local preference order.
- std::sort(codecs_.begin(), codecs_.end(), &AudioCodec::Preferable);
-}
-bool WebRtcVoiceEngine::GetVoeCodec(int index, webrtc::CodecInst* codec) {
- if (voe_wrapper_->codec()->GetCodec(index, *codec) == -1) {
+ static bool IsCodecMultiRate(const webrtc::CodecInst& codec) {
+ for (size_t i = 0; i < arraysize(kCodecPrefs); ++i) {
+ if (IsCodec(codec, kCodecPrefs[i].name) &&
+ kCodecPrefs[i].clockrate == codec.plfreq) {
+ return kCodecPrefs[i].is_multi_rate;
+ }
+ }
return false;
}
- // Change the sample rate of G722 to 8000 to match SDP.
- MaybeFixupG722(codec, 8000);
- return true;
+
+ // If the AudioCodec param kCodecParamPTime is set, then we will set it to
+ // codec pacsize if it's valid, or we will pick the next smallest value we
+ // support.
+ // TODO(Brave): Query supported packet sizes from ACM when the API is ready.
+ static bool SetPTimeAsPacketSize(webrtc::CodecInst* codec, int ptime_ms) {
+ for (const CodecPref& codec_pref : kCodecPrefs) {
+ if ((IsCodec(*codec, codec_pref.name) &&
+ codec_pref.clockrate == codec->plfreq) ||
+ IsCodec(*codec, kG722CodecName)) {
+ int packet_size_ms = SelectPacketSize(codec_pref, ptime_ms);
+ if (packet_size_ms) {
+ // Convert unit from milli-seconds to samples.
+ codec->pacsize = (codec->plfreq / 1000) * packet_size_ms;
+ return true;
+ }
+ }
+ }
+ return false;
+ }
+
+ private:
+ static const int kMaxNumPacketSize = 6;
+ struct CodecPref {
+ const char* name;
+ int clockrate;
+ size_t channels;
+ int payload_type;
+ bool is_multi_rate;
+ int packet_sizes_ms[kMaxNumPacketSize];
+ };
+ // Note: keep the supported packet sizes in ascending order.
+ static const CodecPref kCodecPrefs[12];
+
+ static int SelectPacketSize(const CodecPref& codec_pref, int ptime_ms) {
+ int selected_packet_size_ms = codec_pref.packet_sizes_ms[0];
+ for (int packet_size_ms : codec_pref.packet_sizes_ms) {
+ if (packet_size_ms && packet_size_ms <= ptime_ms) {
+ selected_packet_size_ms = packet_size_ms;
+ }
+ }
+ return selected_packet_size_ms;
+ }
+
+ // Changes RTP timestamp rate of G722. This is due to the "bug" in the RFC
+ // which says that G722 should be advertised as 8 kHz although it is a 16 kHz
+ // codec.
+ static void MaybeFixupG722(webrtc::CodecInst* voe_codec, int new_plfreq) {
+ if (IsCodec(*voe_codec, kG722CodecName)) {
+ // If the ASSERT triggers, the codec definition in WebRTC VoiceEngine
+ // has changed, and this special case is no longer needed.
+ RTC_DCHECK(voe_codec->plfreq != new_plfreq);
+ voe_codec->plfreq = new_plfreq;
+ }
+ }
+};
+
+const WebRtcVoiceCodecs::CodecPref WebRtcVoiceCodecs::kCodecPrefs[12] = {
+ { kOpusCodecName, 48000, 2, 111, true, { 10, 20, 40, 60 } },
+ { kIsacCodecName, 16000, 1, 103, true, { 30, 60 } },
+ { kIsacCodecName, 32000, 1, 104, true, { 30 } },
+ // G722 should be advertised as 8000 Hz because of the RFC "bug".
+ { kG722CodecName, 8000, 1, 9, false, { 10, 20, 30, 40, 50, 60 } },
+ { kIlbcCodecName, 8000, 1, 102, false, { 20, 30, 40, 60 } },
+ { kPcmuCodecName, 8000, 1, 0, false, { 10, 20, 30, 40, 50, 60 } },
+ { kPcmaCodecName, 8000, 1, 8, false, { 10, 20, 30, 40, 50, 60 } },
+ { kCnCodecName, 32000, 1, 106, false, { } },
+ { kCnCodecName, 16000, 1, 105, false, { } },
+ { kCnCodecName, 8000, 1, 13, false, { } },
+ { kRedCodecName, 8000, 1, 127, false, { } },
+ { kDtmfCodecName, 8000, 1, 126, false, { } },
+};
+} // namespace {
+
+bool WebRtcVoiceEngine::ToCodecInst(const AudioCodec& in,
+ webrtc::CodecInst* out) {
+ return WebRtcVoiceCodecs::ToCodecInst(in, out);
+}
+
+WebRtcVoiceEngine::WebRtcVoiceEngine()
+ : voe_wrapper_(new VoEWrapper()),
+ audio_state_(webrtc::AudioState::Create(MakeAudioStateConfig(voe()))) {
+ Construct();
+}
+
+WebRtcVoiceEngine::WebRtcVoiceEngine(VoEWrapper* voe_wrapper)
+ : voe_wrapper_(voe_wrapper) {
+ Construct();
+}
+
+void WebRtcVoiceEngine::Construct() {
+ RTC_DCHECK(worker_thread_checker_.CalledOnValidThread());
+ LOG(LS_VERBOSE) << "WebRtcVoiceEngine::WebRtcVoiceEngine";
+
+ signal_thread_checker_.DetachFromThread();
+ std::memset(&default_agc_config_, 0, sizeof(default_agc_config_));
+ voe_config_.Set<webrtc::VoicePacing>(new webrtc::VoicePacing(true));
+
+ webrtc::Trace::set_level_filter(kDefaultTraceFilter);
+ webrtc::Trace::SetTraceCallback(this);
+
+ // Load our audio codec list.
+ codecs_ = WebRtcVoiceCodecs::SupportedCodecs();
}
WebRtcVoiceEngine::~WebRtcVoiceEngine() {
+ RTC_DCHECK(worker_thread_checker_.CalledOnValidThread());
LOG(LS_VERBOSE) << "WebRtcVoiceEngine::~WebRtcVoiceEngine";
- if (voe_wrapper_->base()->DeRegisterVoiceEngineObserver() == -1) {
- LOG_RTCERR0(DeRegisterVoiceEngineObserver);
- }
if (adm_) {
voe_wrapper_.reset();
adm_->Release();
adm_ = NULL;
}
-
- tracing_->SetTraceCallback(NULL);
+ webrtc::Trace::SetTraceCallback(nullptr);
}
bool WebRtcVoiceEngine::Init(rtc::Thread* worker_thread) {
+ RTC_DCHECK(worker_thread_checker_.CalledOnValidThread());
RTC_DCHECK(worker_thread == rtc::Thread::Current());
LOG(LS_INFO) << "WebRtcVoiceEngine::Init";
bool res = InitInternal();
@@ -535,59 +515,37 @@ bool WebRtcVoiceEngine::Init(rtc::Thread* worker_thread) {
}
bool WebRtcVoiceEngine::InitInternal() {
+ RTC_DCHECK(worker_thread_checker_.CalledOnValidThread());
// Temporarily turn logging level up for the Init call
- int old_filter = log_filter_;
- int extended_filter = log_filter_ | SeverityToFilter(rtc::LS_INFO);
- SetTraceFilter(extended_filter);
- SetTraceOptions("");
-
- // Init WebRtc VoiceEngine.
+ webrtc::Trace::set_level_filter(kElevatedTraceFilter);
+ LOG(LS_INFO) << webrtc::VoiceEngine::GetVersionString();
if (voe_wrapper_->base()->Init(adm_) == -1) {
LOG_RTCERR0_EX(Init, voe_wrapper_->error());
- SetTraceFilter(old_filter);
return false;
}
-
- SetTraceFilter(old_filter);
- SetTraceOptions(log_options_);
-
- // Log the VoiceEngine version info
- char buffer[1024] = "";
- voe_wrapper_->base()->GetVersion(buffer);
- LOG(LS_INFO) << "WebRtc VoiceEngine Version:";
- LogMultiline(rtc::LS_INFO, buffer);
+ webrtc::Trace::set_level_filter(kDefaultTraceFilter);
// Save the default AGC configuration settings. This must happen before
- // calling SetOptions or the default will be overwritten.
+ // calling ApplyOptions or the default will be overwritten.
if (voe_wrapper_->processing()->GetAgcConfig(default_agc_config_) == -1) {
LOG_RTCERR0(GetAgcConfig);
return false;
}
- // Set defaults for options, so that ApplyOptions applies them explicitly
- // when we clear option (channel) overrides. External clients can still
- // modify the defaults via SetOptions (on the media engine).
- if (!SetOptions(GetDefaultEngineOptions())) {
- return false;
- }
-
// Print our codec list again for the call diagnostic log
LOG(LS_INFO) << "WebRtc VoiceEngine codecs:";
for (const AudioCodec& codec : codecs_) {
LOG(LS_INFO) << ToString(codec);
}
- // Disable the DTMF playout when a tone is sent.
- // PlayDtmfTone will be used if local playout is needed.
- if (voe_wrapper_->dtmf()->SetDtmfFeedbackStatus(false) == -1) {
- LOG_RTCERR1(SetDtmfFeedbackStatus, false);
- }
+ SetDefaultDevices();
initialized_ = true;
return true;
}
void WebRtcVoiceEngine::Terminate() {
+ RTC_DCHECK(worker_thread_checker_.CalledOnValidThread());
LOG(LS_INFO) << "WebRtcVoiceEngine::Terminate";
initialized_ = false;
@@ -596,62 +554,81 @@ void WebRtcVoiceEngine::Terminate() {
voe_wrapper_->base()->Terminate();
}
+rtc::scoped_refptr<webrtc::AudioState>
+ WebRtcVoiceEngine::GetAudioState() const {
+ RTC_DCHECK(worker_thread_checker_.CalledOnValidThread());
+ return audio_state_;
+}
+
VoiceMediaChannel* WebRtcVoiceEngine::CreateChannel(webrtc::Call* call,
const AudioOptions& options) {
+ RTC_DCHECK(worker_thread_checker_.CalledOnValidThread());
return new WebRtcVoiceMediaChannel(this, options, call);
}
-bool WebRtcVoiceEngine::SetOptions(const AudioOptions& options) {
- if (!ApplyOptions(options)) {
- return false;
- }
- options_ = options;
- return true;
-}
-
-// AudioOptions defaults are set in InitInternal (for options with corresponding
-// MediaEngineInterface flags) and in SetOptions(int) for flagless options.
bool WebRtcVoiceEngine::ApplyOptions(const AudioOptions& options_in) {
+ RTC_DCHECK(worker_thread_checker_.CalledOnValidThread());
LOG(LS_INFO) << "ApplyOptions: " << options_in.ToString();
- AudioOptions options = options_in; // The options are modified below.
+
+ // Default engine options.
+ AudioOptions options;
+ options.echo_cancellation = rtc::Optional<bool>(true);
+ options.auto_gain_control = rtc::Optional<bool>(true);
+ options.noise_suppression = rtc::Optional<bool>(true);
+ options.highpass_filter = rtc::Optional<bool>(true);
+ options.stereo_swapping = rtc::Optional<bool>(false);
+ options.audio_jitter_buffer_max_packets = rtc::Optional<int>(50);
+ options.audio_jitter_buffer_fast_accelerate = rtc::Optional<bool>(false);
+ options.typing_detection = rtc::Optional<bool>(true);
+ options.adjust_agc_delta = rtc::Optional<int>(0);
+ options.experimental_agc = rtc::Optional<bool>(false);
+ options.extended_filter_aec = rtc::Optional<bool>(false);
+ options.delay_agnostic_aec = rtc::Optional<bool>(false);
+ options.experimental_ns = rtc::Optional<bool>(false);
+ options.aec_dump = rtc::Optional<bool>(false);
+
+ // Apply any given options on top.
+ options.SetAll(options_in);
+
// kEcConference is AEC with high suppression.
webrtc::EcModes ec_mode = webrtc::kEcConference;
webrtc::AecmModes aecm_mode = webrtc::kAecmSpeakerphone;
webrtc::AgcModes agc_mode = webrtc::kAgcAdaptiveAnalog;
webrtc::NsModes ns_mode = webrtc::kNsHighSuppression;
- bool aecm_comfort_noise = false;
- if (options.aecm_generate_comfort_noise.Get(&aecm_comfort_noise)) {
+ if (options.aecm_generate_comfort_noise) {
LOG(LS_VERBOSE) << "Comfort noise explicitly set to "
- << aecm_comfort_noise << " (default is false).";
+ << *options.aecm_generate_comfort_noise
+ << " (default is false).";
}
-#if defined(IOS)
+#if defined(WEBRTC_IOS)
// On iOS, VPIO provides built-in EC and AGC.
- options.echo_cancellation.Set(false);
- options.auto_gain_control.Set(false);
+ options.echo_cancellation = rtc::Optional<bool>(false);
+ options.auto_gain_control = rtc::Optional<bool>(false);
LOG(LS_INFO) << "Always disable AEC and AGC on iOS. Use built-in instead.";
#elif defined(ANDROID)
ec_mode = webrtc::kEcAecm;
#endif
-#if defined(IOS) || defined(ANDROID)
+#if defined(WEBRTC_IOS) || defined(ANDROID)
// Set the AGC mode for iOS as well despite disabling it above, to avoid
// unsupported configuration errors from webrtc.
agc_mode = webrtc::kAgcFixedDigital;
- options.typing_detection.Set(false);
- options.experimental_agc.Set(false);
- options.extended_filter_aec.Set(false);
- options.experimental_ns.Set(false);
+ options.typing_detection = rtc::Optional<bool>(false);
+ options.experimental_agc = rtc::Optional<bool>(false);
+ options.extended_filter_aec = rtc::Optional<bool>(false);
+ options.experimental_ns = rtc::Optional<bool>(false);
#endif
// Delay Agnostic AEC automatically turns on EC if not set except on iOS
// where the feature is not supported.
bool use_delay_agnostic_aec = false;
-#if !defined(IOS)
- if (options.delay_agnostic_aec.Get(&use_delay_agnostic_aec)) {
+#if !defined(WEBRTC_IOS)
+ if (options.delay_agnostic_aec) {
+ use_delay_agnostic_aec = *options.delay_agnostic_aec;
if (use_delay_agnostic_aec) {
- options.echo_cancellation.Set(true);
- options.extended_filter_aec.Set(true);
+ options.echo_cancellation = rtc::Optional<bool>(true);
+ options.extended_filter_aec = rtc::Optional<bool>(true);
ec_mode = webrtc::kEcConference;
}
}
@@ -659,8 +636,7 @@ bool WebRtcVoiceEngine::ApplyOptions(const AudioOptions& options_in) {
webrtc::VoEAudioProcessing* voep = voe_wrapper_->processing();
- bool echo_cancellation = false;
- if (options.echo_cancellation.Get(&echo_cancellation)) {
+ if (options.echo_cancellation) {
// Check if platform supports built-in EC. Currently only supported on
// Android and in combination with Java based audio layer.
// TODO(henrika): investigate possibility to support built-in EC also
@@ -671,63 +647,61 @@ bool WebRtcVoiceEngine::ApplyOptions(const AudioOptions& options_in) {
// overriding it. Enable/Disable it according to the echo_cancellation
// audio option.
const bool enable_built_in_aec =
- echo_cancellation && !use_delay_agnostic_aec;
+ *options.echo_cancellation && !use_delay_agnostic_aec;
if (voe_wrapper_->hw()->EnableBuiltInAEC(enable_built_in_aec) == 0 &&
enable_built_in_aec) {
// Disable internal software EC if built-in EC is enabled,
// i.e., replace the software EC with the built-in EC.
- options.echo_cancellation.Set(false);
- echo_cancellation = false;
+ options.echo_cancellation = rtc::Optional<bool>(false);
LOG(LS_INFO) << "Disabling EC since built-in EC will be used instead";
}
}
- if (voep->SetEcStatus(echo_cancellation, ec_mode) == -1) {
- LOG_RTCERR2(SetEcStatus, echo_cancellation, ec_mode);
+ if (voep->SetEcStatus(*options.echo_cancellation, ec_mode) == -1) {
+ LOG_RTCERR2(SetEcStatus, *options.echo_cancellation, ec_mode);
return false;
} else {
- LOG(LS_INFO) << "Echo control set to " << echo_cancellation
+ LOG(LS_INFO) << "Echo control set to " << *options.echo_cancellation
<< " with mode " << ec_mode;
}
#if !defined(ANDROID)
// TODO(ajm): Remove the error return on Android from webrtc.
- if (voep->SetEcMetricsStatus(echo_cancellation) == -1) {
- LOG_RTCERR1(SetEcMetricsStatus, echo_cancellation);
+ if (voep->SetEcMetricsStatus(*options.echo_cancellation) == -1) {
+ LOG_RTCERR1(SetEcMetricsStatus, *options.echo_cancellation);
return false;
}
#endif
if (ec_mode == webrtc::kEcAecm) {
- if (voep->SetAecmMode(aecm_mode, aecm_comfort_noise) != 0) {
- LOG_RTCERR2(SetAecmMode, aecm_mode, aecm_comfort_noise);
+ bool cn = options.aecm_generate_comfort_noise.value_or(false);
+ if (voep->SetAecmMode(aecm_mode, cn) != 0) {
+ LOG_RTCERR2(SetAecmMode, aecm_mode, cn);
return false;
}
}
}
- bool auto_gain_control = false;
- if (options.auto_gain_control.Get(&auto_gain_control)) {
+ if (options.auto_gain_control) {
const bool built_in_agc = voe_wrapper_->hw()->BuiltInAGCIsAvailable();
if (built_in_agc) {
- if (voe_wrapper_->hw()->EnableBuiltInAGC(auto_gain_control) == 0 &&
- auto_gain_control) {
+ if (voe_wrapper_->hw()->EnableBuiltInAGC(*options.auto_gain_control) ==
+ 0 &&
+ *options.auto_gain_control) {
// Disable internal software AGC if built-in AGC is enabled,
// i.e., replace the software AGC with the built-in AGC.
- options.auto_gain_control.Set(false);
- auto_gain_control = false;
+ options.auto_gain_control = rtc::Optional<bool>(false);
LOG(LS_INFO) << "Disabling AGC since built-in AGC will be used instead";
}
}
- if (voep->SetAgcStatus(auto_gain_control, agc_mode) == -1) {
- LOG_RTCERR2(SetAgcStatus, auto_gain_control, agc_mode);
+ if (voep->SetAgcStatus(*options.auto_gain_control, agc_mode) == -1) {
+ LOG_RTCERR2(SetAgcStatus, *options.auto_gain_control, agc_mode);
return false;
} else {
- LOG(LS_INFO) << "Auto gain set to " << auto_gain_control << " with mode "
- << agc_mode;
+ LOG(LS_INFO) << "Auto gain set to " << *options.auto_gain_control
+ << " with mode " << agc_mode;
}
}
- if (options.tx_agc_target_dbov.IsSet() ||
- options.tx_agc_digital_compression_gain.IsSet() ||
- options.tx_agc_limiter.IsSet()) {
+ if (options.tx_agc_target_dbov || options.tx_agc_digital_compression_gain ||
+ options.tx_agc_limiter) {
// Override default_agc_config_. Generally, an unset option means "leave
// the VoE bits alone" in this function, so we want whatever is set to be
// stored as the new "default". If we didn't, then setting e.g.
@@ -736,15 +710,13 @@ bool WebRtcVoiceEngine::ApplyOptions(const AudioOptions& options_in) {
// Also, if we don't update default_agc_config_, then adjust_agc_delta
// would be an offset from the original values, and not whatever was set
// explicitly.
- default_agc_config_.targetLeveldBOv =
- options.tx_agc_target_dbov.GetWithDefaultIfUnset(
- default_agc_config_.targetLeveldBOv);
+ default_agc_config_.targetLeveldBOv = options.tx_agc_target_dbov.value_or(
+ default_agc_config_.targetLeveldBOv);
default_agc_config_.digitalCompressionGaindB =
- options.tx_agc_digital_compression_gain.GetWithDefaultIfUnset(
+ options.tx_agc_digital_compression_gain.value_or(
default_agc_config_.digitalCompressionGaindB);
default_agc_config_.limiterEnable =
- options.tx_agc_limiter.GetWithDefaultIfUnset(
- default_agc_config_.limiterEnable);
+ options.tx_agc_limiter.value_or(default_agc_config_.limiterEnable);
if (voe_wrapper_->processing()->SetAgcConfig(default_agc_config_) == -1) {
LOG_RTCERR3(SetAgcConfig,
default_agc_config_.targetLeveldBOv,
@@ -754,84 +726,79 @@ bool WebRtcVoiceEngine::ApplyOptions(const AudioOptions& options_in) {
}
}
- bool noise_suppression = false;
- if (options.noise_suppression.Get(&noise_suppression)) {
+ if (options.noise_suppression) {
const bool built_in_ns = voe_wrapper_->hw()->BuiltInNSIsAvailable();
if (built_in_ns) {
- if (voe_wrapper_->hw()->EnableBuiltInNS(noise_suppression) == 0 &&
- noise_suppression) {
+ if (voe_wrapper_->hw()->EnableBuiltInNS(*options.noise_suppression) ==
+ 0 &&
+ *options.noise_suppression) {
// Disable internal software NS if built-in NS is enabled,
// i.e., replace the software NS with the built-in NS.
- options.noise_suppression.Set(false);
- noise_suppression = false;
+ options.noise_suppression = rtc::Optional<bool>(false);
LOG(LS_INFO) << "Disabling NS since built-in NS will be used instead";
}
}
- if (voep->SetNsStatus(noise_suppression, ns_mode) == -1) {
- LOG_RTCERR2(SetNsStatus, noise_suppression, ns_mode);
+ if (voep->SetNsStatus(*options.noise_suppression, ns_mode) == -1) {
+ LOG_RTCERR2(SetNsStatus, *options.noise_suppression, ns_mode);
return false;
} else {
- LOG(LS_INFO) << "Noise suppression set to " << noise_suppression
+ LOG(LS_INFO) << "Noise suppression set to " << *options.noise_suppression
<< " with mode " << ns_mode;
}
}
- bool highpass_filter;
- if (options.highpass_filter.Get(&highpass_filter)) {
- LOG(LS_INFO) << "High pass filter enabled? " << highpass_filter;
- if (voep->EnableHighPassFilter(highpass_filter) == -1) {
- LOG_RTCERR1(SetHighpassFilterStatus, highpass_filter);
+ if (options.highpass_filter) {
+ LOG(LS_INFO) << "High pass filter enabled? " << *options.highpass_filter;
+ if (voep->EnableHighPassFilter(*options.highpass_filter) == -1) {
+ LOG_RTCERR1(SetHighpassFilterStatus, *options.highpass_filter);
return false;
}
}
- bool stereo_swapping;
- if (options.stereo_swapping.Get(&stereo_swapping)) {
- LOG(LS_INFO) << "Stereo swapping enabled? " << stereo_swapping;
- voep->EnableStereoChannelSwapping(stereo_swapping);
- if (voep->IsStereoChannelSwappingEnabled() != stereo_swapping) {
- LOG_RTCERR1(EnableStereoChannelSwapping, stereo_swapping);
+ if (options.stereo_swapping) {
+ LOG(LS_INFO) << "Stereo swapping enabled? " << *options.stereo_swapping;
+ voep->EnableStereoChannelSwapping(*options.stereo_swapping);
+ if (voep->IsStereoChannelSwappingEnabled() != *options.stereo_swapping) {
+ LOG_RTCERR1(EnableStereoChannelSwapping, *options.stereo_swapping);
return false;
}
}
- int audio_jitter_buffer_max_packets;
- if (options.audio_jitter_buffer_max_packets.Get(
- &audio_jitter_buffer_max_packets)) {
- LOG(LS_INFO) << "NetEq capacity is " << audio_jitter_buffer_max_packets;
+ if (options.audio_jitter_buffer_max_packets) {
+ LOG(LS_INFO) << "NetEq capacity is "
+ << *options.audio_jitter_buffer_max_packets;
voe_config_.Set<webrtc::NetEqCapacityConfig>(
- new webrtc::NetEqCapacityConfig(audio_jitter_buffer_max_packets));
+ new webrtc::NetEqCapacityConfig(
+ *options.audio_jitter_buffer_max_packets));
}
- bool audio_jitter_buffer_fast_accelerate;
- if (options.audio_jitter_buffer_fast_accelerate.Get(
- &audio_jitter_buffer_fast_accelerate)) {
- LOG(LS_INFO) << "NetEq fast mode? " << audio_jitter_buffer_fast_accelerate;
+ if (options.audio_jitter_buffer_fast_accelerate) {
+ LOG(LS_INFO) << "NetEq fast mode? "
+ << *options.audio_jitter_buffer_fast_accelerate;
voe_config_.Set<webrtc::NetEqFastAccelerate>(
- new webrtc::NetEqFastAccelerate(audio_jitter_buffer_fast_accelerate));
+ new webrtc::NetEqFastAccelerate(
+ *options.audio_jitter_buffer_fast_accelerate));
}
- bool typing_detection;
- if (options.typing_detection.Get(&typing_detection)) {
- LOG(LS_INFO) << "Typing detection is enabled? " << typing_detection;
- if (voep->SetTypingDetectionStatus(typing_detection) == -1) {
+ if (options.typing_detection) {
+ LOG(LS_INFO) << "Typing detection is enabled? "
+ << *options.typing_detection;
+ if (voep->SetTypingDetectionStatus(*options.typing_detection) == -1) {
// In case of error, log the info and continue
- LOG_RTCERR1(SetTypingDetectionStatus, typing_detection);
+ LOG_RTCERR1(SetTypingDetectionStatus, *options.typing_detection);
}
}
- int adjust_agc_delta;
- if (options.adjust_agc_delta.Get(&adjust_agc_delta)) {
- LOG(LS_INFO) << "Adjust agc delta is " << adjust_agc_delta;
- if (!AdjustAgcLevel(adjust_agc_delta)) {
+ if (options.adjust_agc_delta) {
+ LOG(LS_INFO) << "Adjust agc delta is " << *options.adjust_agc_delta;
+ if (!AdjustAgcLevel(*options.adjust_agc_delta)) {
return false;
}
}
- bool aec_dump;
- if (options.aec_dump.Get(&aec_dump)) {
- LOG(LS_INFO) << "Aec dump is enabled? " << aec_dump;
- if (aec_dump)
+ if (options.aec_dump) {
+ LOG(LS_INFO) << "Aec dump is enabled? " << *options.aec_dump;
+ if (*options.aec_dump)
StartAecDump(kAecDumpByAudioOptionFilename);
else
StopAecDump();
@@ -839,28 +806,30 @@ bool WebRtcVoiceEngine::ApplyOptions(const AudioOptions& options_in) {
webrtc::Config config;
- delay_agnostic_aec_.SetFrom(options.delay_agnostic_aec);
- bool delay_agnostic_aec;
- if (delay_agnostic_aec_.Get(&delay_agnostic_aec)) {
- LOG(LS_INFO) << "Delay agnostic aec is enabled? " << delay_agnostic_aec;
+ if (options.delay_agnostic_aec)
+ delay_agnostic_aec_ = options.delay_agnostic_aec;
+ if (delay_agnostic_aec_) {
+ LOG(LS_INFO) << "Delay agnostic aec is enabled? " << *delay_agnostic_aec_;
config.Set<webrtc::DelayAgnostic>(
- new webrtc::DelayAgnostic(delay_agnostic_aec));
+ new webrtc::DelayAgnostic(*delay_agnostic_aec_));
}
- extended_filter_aec_.SetFrom(options.extended_filter_aec);
- bool extended_filter;
- if (extended_filter_aec_.Get(&extended_filter)) {
- LOG(LS_INFO) << "Extended filter aec is enabled? " << extended_filter;
+ if (options.extended_filter_aec) {
+ extended_filter_aec_ = options.extended_filter_aec;
+ }
+ if (extended_filter_aec_) {
+ LOG(LS_INFO) << "Extended filter aec is enabled? " << *extended_filter_aec_;
config.Set<webrtc::ExtendedFilter>(
- new webrtc::ExtendedFilter(extended_filter));
+ new webrtc::ExtendedFilter(*extended_filter_aec_));
}
- experimental_ns_.SetFrom(options.experimental_ns);
- bool experimental_ns;
- if (experimental_ns_.Get(&experimental_ns)) {
- LOG(LS_INFO) << "Experimental ns is enabled? " << experimental_ns;
+ if (options.experimental_ns) {
+ experimental_ns_ = options.experimental_ns;
+ }
+ if (experimental_ns_) {
+ LOG(LS_INFO) << "Experimental ns is enabled? " << *experimental_ns_;
config.Set<webrtc::ExperimentalNs>(
- new webrtc::ExperimentalNs(experimental_ns));
+ new webrtc::ExperimentalNs(*experimental_ns_));
}
// We check audioproc for the benefit of tests, since FakeWebRtcVoiceEngine
@@ -870,167 +839,58 @@ bool WebRtcVoiceEngine::ApplyOptions(const AudioOptions& options_in) {
audioproc->SetExtraOptions(config);
}
- uint32_t recording_sample_rate;
- if (options.recording_sample_rate.Get(&recording_sample_rate)) {
- LOG(LS_INFO) << "Recording sample rate is " << recording_sample_rate;
- if (voe_wrapper_->hw()->SetRecordingSampleRate(recording_sample_rate)) {
- LOG_RTCERR1(SetRecordingSampleRate, recording_sample_rate);
+ if (options.recording_sample_rate) {
+ LOG(LS_INFO) << "Recording sample rate is "
+ << *options.recording_sample_rate;
+ if (voe_wrapper_->hw()->SetRecordingSampleRate(
+ *options.recording_sample_rate)) {
+ LOG_RTCERR1(SetRecordingSampleRate, *options.recording_sample_rate);
}
}
- uint32_t playout_sample_rate;
- if (options.playout_sample_rate.Get(&playout_sample_rate)) {
- LOG(LS_INFO) << "Playout sample rate is " << playout_sample_rate;
- if (voe_wrapper_->hw()->SetPlayoutSampleRate(playout_sample_rate)) {
- LOG_RTCERR1(SetPlayoutSampleRate, playout_sample_rate);
+ if (options.playout_sample_rate) {
+ LOG(LS_INFO) << "Playout sample rate is " << *options.playout_sample_rate;
+ if (voe_wrapper_->hw()->SetPlayoutSampleRate(
+ *options.playout_sample_rate)) {
+ LOG_RTCERR1(SetPlayoutSampleRate, *options.playout_sample_rate);
}
}
return true;
}
-// TODO(juberti): Refactor this so that the core logic can be used to set the
-// soundclip device. At that time, reinstate the soundclip pause/resume code.
-bool WebRtcVoiceEngine::SetDevices(const Device* in_device,
- const Device* out_device) {
-#if !defined(IOS)
- int in_id = in_device ? rtc::FromString<int>(in_device->id) :
- kDefaultAudioDeviceId;
- int out_id = out_device ? rtc::FromString<int>(out_device->id) :
- kDefaultAudioDeviceId;
- // The device manager uses -1 as the default device, which was the case for
- // VoE 3.5. VoE 4.0, however, uses 0 as the default in Linux and Mac.
-#ifndef WIN32
- if (-1 == in_id) {
- in_id = kDefaultAudioDeviceId;
- }
- if (-1 == out_id) {
- out_id = kDefaultAudioDeviceId;
- }
-#endif
-
- std::string in_name = (in_id != kDefaultAudioDeviceId) ?
- in_device->name : "Default device";
- std::string out_name = (out_id != kDefaultAudioDeviceId) ?
- out_device->name : "Default device";
- LOG(LS_INFO) << "Setting microphone to (id=" << in_id << ", name=" << in_name
- << ") and speaker to (id=" << out_id << ", name=" << out_name
- << ")";
+void WebRtcVoiceEngine::SetDefaultDevices() {
+ RTC_DCHECK(worker_thread_checker_.CalledOnValidThread());
+#if !defined(WEBRTC_IOS)
+ int in_id = kDefaultAudioDeviceId;
+ int out_id = kDefaultAudioDeviceId;
+ LOG(LS_INFO) << "Setting microphone to (id=" << in_id
+ << ") and speaker to (id=" << out_id << ")";
- // Must also pause all audio playback and capture.
bool ret = true;
- for (WebRtcVoiceMediaChannel* channel : channels_) {
- if (!channel->PausePlayout()) {
- LOG(LS_WARNING) << "Failed to pause playout";
- ret = false;
- }
- if (!channel->PauseSend()) {
- LOG(LS_WARNING) << "Failed to pause send";
- ret = false;
- }
- }
-
- // Find the recording device id in VoiceEngine and set recording device.
- if (!FindWebRtcAudioDeviceId(true, in_name, in_id, &in_id)) {
+ if (voe_wrapper_->hw()->SetRecordingDevice(in_id) == -1) {
+ LOG_RTCERR1(SetRecordingDevice, in_id);
ret = false;
}
- if (ret) {
- if (voe_wrapper_->hw()->SetRecordingDevice(in_id) == -1) {
- LOG_RTCERR2(SetRecordingDevice, in_name, in_id);
- ret = false;
- }
- webrtc::AudioProcessing* ap = voe()->base()->audio_processing();
- if (ap)
- ap->Initialize();
+ webrtc::AudioProcessing* ap = voe()->base()->audio_processing();
+ if (ap) {
+ ap->Initialize();
}
- // Find the playout device id in VoiceEngine and set playout device.
- if (!FindWebRtcAudioDeviceId(false, out_name, out_id, &out_id)) {
- LOG(LS_WARNING) << "Failed to find VoiceEngine device id for " << out_name;
+ if (voe_wrapper_->hw()->SetPlayoutDevice(out_id) == -1) {
+ LOG_RTCERR1(SetPlayoutDevice, out_id);
ret = false;
}
- if (ret) {
- if (voe_wrapper_->hw()->SetPlayoutDevice(out_id) == -1) {
- LOG_RTCERR2(SetPlayoutDevice, out_name, out_id);
- ret = false;
- }
- }
-
- // Resume all audio playback and capture.
- for (WebRtcVoiceMediaChannel* channel : channels_) {
- if (!channel->ResumePlayout()) {
- LOG(LS_WARNING) << "Failed to resume playout";
- ret = false;
- }
- if (!channel->ResumeSend()) {
- LOG(LS_WARNING) << "Failed to resume send";
- ret = false;
- }
- }
if (ret) {
- LOG(LS_INFO) << "Set microphone to (id=" << in_id <<" name=" << in_name
- << ") and speaker to (id="<< out_id << " name=" << out_name
- << ")";
+ LOG(LS_INFO) << "Set microphone to (id=" << in_id
+ << ") and speaker to (id=" << out_id << ")";
}
-
- return ret;
-#else
- return true;
-#endif // !IOS
-}
-
-bool WebRtcVoiceEngine::FindWebRtcAudioDeviceId(
- bool is_input, const std::string& dev_name, int dev_id, int* rtc_id) {
- // In Linux, VoiceEngine uses the same device dev_id as the device manager.
-#if defined(LINUX) || defined(ANDROID)
- *rtc_id = dev_id;
- return true;
-#else
- // In Windows and Mac, we need to find the VoiceEngine device id by name
- // unless the input dev_id is the default device id.
- if (kDefaultAudioDeviceId == dev_id) {
- *rtc_id = dev_id;
- return true;
- }
-
- // Get the number of VoiceEngine audio devices.
- int count = 0;
- if (is_input) {
- if (-1 == voe_wrapper_->hw()->GetNumOfRecordingDevices(count)) {
- LOG_RTCERR0(GetNumOfRecordingDevices);
- return false;
- }
- } else {
- if (-1 == voe_wrapper_->hw()->GetNumOfPlayoutDevices(count)) {
- LOG_RTCERR0(GetNumOfPlayoutDevices);
- return false;
- }
- }
-
- for (int i = 0; i < count; ++i) {
- char name[128];
- char guid[128];
- if (is_input) {
- voe_wrapper_->hw()->GetRecordingDeviceName(i, name, guid);
- LOG(LS_VERBOSE) << "VoiceEngine microphone " << i << ": " << name;
- } else {
- voe_wrapper_->hw()->GetPlayoutDeviceName(i, name, guid);
- LOG(LS_VERBOSE) << "VoiceEngine speaker " << i << ": " << name;
- }
-
- std::string webrtc_name(name);
- if (dev_name.compare(0, webrtc_name.size(), webrtc_name) == 0) {
- *rtc_id = i;
- return true;
- }
- }
- LOG(LS_WARNING) << "VoiceEngine cannot find device: " << dev_name;
- return false;
-#endif
+#endif // !WEBRTC_IOS
}
bool WebRtcVoiceEngine::GetOutputVolume(int* level) {
+ RTC_DCHECK(worker_thread_checker_.CalledOnValidThread());
unsigned int ulevel;
if (voe_wrapper_->volume()->GetSpeakerVolume(ulevel) == -1) {
LOG_RTCERR1(GetSpeakerVolume, level);
@@ -1041,6 +901,7 @@ bool WebRtcVoiceEngine::GetOutputVolume(int* level) {
}
bool WebRtcVoiceEngine::SetOutputVolume(int level) {
+ RTC_DCHECK(worker_thread_checker_.CalledOnValidThread());
RTC_DCHECK(level >= 0 && level <= 255);
if (voe_wrapper_->volume()->SetSpeakerVolume(level) == -1) {
LOG_RTCERR1(SetSpeakerVolume, level);
@@ -1050,136 +911,36 @@ bool WebRtcVoiceEngine::SetOutputVolume(int level) {
}
int WebRtcVoiceEngine::GetInputLevel() {
+ RTC_DCHECK(worker_thread_checker_.CalledOnValidThread());
unsigned int ulevel;
return (voe_wrapper_->volume()->GetSpeechInputLevel(ulevel) != -1) ?
static_cast<int>(ulevel) : -1;
}
const std::vector<AudioCodec>& WebRtcVoiceEngine::codecs() {
+ RTC_DCHECK(signal_thread_checker_.CalledOnValidThread());
return codecs_;
}
-bool WebRtcVoiceEngine::FindCodec(const AudioCodec& in) {
- return FindWebRtcCodec(in, NULL);
-}
-
-// Get the VoiceEngine codec that matches |in|, with the supplied settings.
-bool WebRtcVoiceEngine::FindWebRtcCodec(const AudioCodec& in,
- webrtc::CodecInst* out) {
- int ncodecs = voe_wrapper_->codec()->NumOfCodecs();
- for (int i = 0; i < ncodecs; ++i) {
- webrtc::CodecInst voe_codec;
- if (GetVoeCodec(i, &voe_codec)) {
- AudioCodec codec(voe_codec.pltype, voe_codec.plname, voe_codec.plfreq,
- voe_codec.rate, voe_codec.channels, 0);
- bool multi_rate = IsCodecMultiRate(voe_codec);
- // Allow arbitrary rates for ISAC to be specified.
- if (multi_rate) {
- // Set codec.bitrate to 0 so the check for codec.Matches() passes.
- codec.bitrate = 0;
- }
- if (codec.Matches(in)) {
- if (out) {
- // Fixup the payload type.
- voe_codec.pltype = in.id;
-
- // Set bitrate if specified.
- if (multi_rate && in.bitrate != 0) {
- voe_codec.rate = in.bitrate;
- }
-
- // Reset G722 sample rate to 16000 to match WebRTC.
- MaybeFixupG722(&voe_codec, 16000);
-
- // Apply codec-specific settings.
- if (IsCodec(codec, kIsacCodecName)) {
- // If ISAC and an explicit bitrate is not specified,
- // enable auto bitrate adjustment.
- voe_codec.rate = (in.bitrate > 0) ? in.bitrate : -1;
- }
- *out = voe_codec;
- }
- return true;
- }
- }
- }
- return false;
-}
-const std::vector<RtpHeaderExtension>&
-WebRtcVoiceEngine::rtp_header_extensions() const {
- return rtp_header_extensions_;
-}
-
-void WebRtcVoiceEngine::SetLogging(int min_sev, const char* filter) {
- // if min_sev == -1, we keep the current log level.
- if (min_sev >= 0) {
- SetTraceFilter(SeverityToFilter(min_sev));
- }
- log_options_ = filter;
- SetTraceOptions(initialized_ ? log_options_ : "");
+RtpCapabilities WebRtcVoiceEngine::GetCapabilities() const {
+ RTC_DCHECK(signal_thread_checker_.CalledOnValidThread());
+ RtpCapabilities capabilities;
+ capabilities.header_extensions.push_back(RtpHeaderExtension(
+ kRtpAudioLevelHeaderExtension, kRtpAudioLevelHeaderExtensionDefaultId));
+ capabilities.header_extensions.push_back(
+ RtpHeaderExtension(kRtpAbsoluteSenderTimeHeaderExtension,
+ kRtpAbsoluteSenderTimeHeaderExtensionDefaultId));
+ return capabilities;
}
int WebRtcVoiceEngine::GetLastEngineError() {
+ RTC_DCHECK(worker_thread_checker_.CalledOnValidThread());
return voe_wrapper_->error();
}
-void WebRtcVoiceEngine::SetTraceFilter(int filter) {
- log_filter_ = filter;
- tracing_->SetTraceFilter(filter);
-}
-
-// We suppport three different logging settings for VoiceEngine:
-// 1. Observer callback that goes into talk diagnostic logfile.
-// Use --logfile and --loglevel
-//
-// 2. Encrypted VoiceEngine log for debugging VoiceEngine.
-// Use --voice_loglevel --voice_logfilter "tracefile file_name"
-//
-// 3. EC log and dump for debugging QualityEngine.
-// Use --voice_loglevel --voice_logfilter "recordEC file_name"
-//
-// For more details see: "https://sites.google.com/a/google.com/wavelet/Home/
-// Magic-Flute--RTC-Engine-/Magic-Flute-Command-Line-Parameters"
-void WebRtcVoiceEngine::SetTraceOptions(const std::string& options) {
- // Set encrypted trace file.
- std::vector<std::string> opts;
- rtc::tokenize(options, ' ', '"', '"', &opts);
- std::vector<std::string>::iterator tracefile =
- std::find(opts.begin(), opts.end(), "tracefile");
- if (tracefile != opts.end() && ++tracefile != opts.end()) {
- // Write encrypted debug output (at same loglevel) to file
- // EncryptedTraceFile no longer supported.
- if (tracing_->SetTraceFile(tracefile->c_str()) == -1) {
- LOG_RTCERR1(SetTraceFile, *tracefile);
- }
- }
-
- // Allow trace options to override the trace filter. We default
- // it to log_filter_ (as a translation of libjingle log levels)
- // elsewhere, but this allows clients to explicitly set webrtc
- // log levels.
- std::vector<std::string>::iterator tracefilter =
- std::find(opts.begin(), opts.end(), "tracefilter");
- if (tracefilter != opts.end() && ++tracefilter != opts.end()) {
- if (!tracing_->SetTraceFilter(rtc::FromString<int>(*tracefilter))) {
- LOG_RTCERR1(SetTraceFilter, *tracefilter);
- }
- }
-
- // Set AEC dump file
- std::vector<std::string>::iterator recordEC =
- std::find(opts.begin(), opts.end(), "recordEC");
- if (recordEC != opts.end()) {
- ++recordEC;
- if (recordEC != opts.end())
- StartAecDump(recordEC->c_str());
- else
- StopAecDump();
- }
-}
-
void WebRtcVoiceEngine::Print(webrtc::TraceLevel level, const char* trace,
int length) {
+ // Note: This callback can happen on any thread!
rtc::LoggingSeverity sev = rtc::LS_VERBOSE;
if (level == webrtc::kTraceError || level == webrtc::kTraceCritical)
sev = rtc::LS_ERROR;
@@ -1201,34 +962,24 @@ void WebRtcVoiceEngine::Print(webrtc::TraceLevel level, const char* trace,
}
}
-void WebRtcVoiceEngine::CallbackOnError(int channel_id, int err_code) {
- RTC_DCHECK(channel_id == -1);
- LOG(LS_WARNING) << "VoiceEngine error " << err_code << " reported on channel "
- << channel_id << ".";
- rtc::CritScope lock(&channels_cs_);
- for (WebRtcVoiceMediaChannel* channel : channels_) {
- channel->OnError(err_code);
- }
-}
-
void WebRtcVoiceEngine::RegisterChannel(WebRtcVoiceMediaChannel* channel) {
- RTC_DCHECK(channel != NULL);
- rtc::CritScope lock(&channels_cs_);
+ RTC_DCHECK(worker_thread_checker_.CalledOnValidThread());
+ RTC_DCHECK(channel);
channels_.push_back(channel);
}
void WebRtcVoiceEngine::UnregisterChannel(WebRtcVoiceMediaChannel* channel) {
- rtc::CritScope lock(&channels_cs_);
+ RTC_DCHECK(worker_thread_checker_.CalledOnValidThread());
auto it = std::find(channels_.begin(), channels_.end(), channel);
- if (it != channels_.end()) {
- channels_.erase(it);
- }
+ RTC_DCHECK(it != channels_.end());
+ channels_.erase(it);
}
// Adjusts the default AGC target level by the specified delta.
// NB: If we start messing with other config fields, we'll want
// to save the current webrtc::AgcConfig as well.
bool WebRtcVoiceEngine::AdjustAgcLevel(int delta) {
+ RTC_DCHECK(worker_thread_checker_.CalledOnValidThread());
webrtc::AgcConfig config = default_agc_config_;
config.targetLeveldBOv -= delta;
@@ -1244,6 +995,7 @@ bool WebRtcVoiceEngine::AdjustAgcLevel(int delta) {
}
bool WebRtcVoiceEngine::SetAudioDeviceModule(webrtc::AudioDeviceModule* adm) {
+ RTC_DCHECK(worker_thread_checker_.CalledOnValidThread());
if (initialized_) {
LOG(LS_WARNING) << "SetAudioDeviceModule can not be called after Init.";
return false;
@@ -1260,6 +1012,7 @@ bool WebRtcVoiceEngine::SetAudioDeviceModule(webrtc::AudioDeviceModule* adm) {
}
bool WebRtcVoiceEngine::StartAecDump(rtc::PlatformFile file) {
+ RTC_DCHECK(worker_thread_checker_.CalledOnValidThread());
FILE* aec_dump_file_stream = rtc::FdopenPlatformFileForWriting(file);
if (!aec_dump_file_stream) {
LOG(LS_ERROR) << "Could not open AEC dump file stream.";
@@ -1279,6 +1032,7 @@ bool WebRtcVoiceEngine::StartAecDump(rtc::PlatformFile file) {
}
void WebRtcVoiceEngine::StartAecDump(const std::string& filename) {
+ RTC_DCHECK(worker_thread_checker_.CalledOnValidThread());
if (!is_dumping_aec_) {
// Start dumping AEC when we are not dumping.
if (voe_wrapper_->processing()->StartDebugRecording(
@@ -1291,6 +1045,7 @@ void WebRtcVoiceEngine::StartAecDump(const std::string& filename) {
}
void WebRtcVoiceEngine::StopAecDump() {
+ RTC_DCHECK(worker_thread_checker_.CalledOnValidThread());
if (is_dumping_aec_) {
// Stop dumping AEC when we are dumping.
if (voe_wrapper_->processing()->StopDebugRecording() !=
@@ -1302,14 +1057,17 @@ void WebRtcVoiceEngine::StopAecDump() {
}
bool WebRtcVoiceEngine::StartRtcEventLog(rtc::PlatformFile file) {
+ RTC_DCHECK(worker_thread_checker_.CalledOnValidThread());
return voe_wrapper_->codec()->GetEventLog()->StartLogging(file);
}
void WebRtcVoiceEngine::StopRtcEventLog() {
+ RTC_DCHECK(worker_thread_checker_.CalledOnValidThread());
voe_wrapper_->codec()->GetEventLog()->StopLogging();
}
int WebRtcVoiceEngine::CreateVoEChannel() {
+ RTC_DCHECK(worker_thread_checker_.CalledOnValidThread());
return voe_wrapper_->base()->CreateChannel(voe_config_);
}
@@ -1317,33 +1075,61 @@ class WebRtcVoiceMediaChannel::WebRtcAudioSendStream
: public AudioRenderer::Sink {
public:
WebRtcAudioSendStream(int ch, webrtc::AudioTransport* voe_audio_transport,
- uint32_t ssrc, webrtc::Call* call)
- : channel_(ch),
- voe_audio_transport_(voe_audio_transport),
- call_(call) {
+ uint32_t ssrc, const std::string& c_name,
+ const std::vector<webrtc::RtpExtension>& extensions,
+ webrtc::Call* call)
+ : voe_audio_transport_(voe_audio_transport),
+ call_(call),
+ config_(nullptr) {
RTC_DCHECK_GE(ch, 0);
// TODO(solenberg): Once we're not using FakeWebRtcVoiceEngine anymore:
// RTC_DCHECK(voe_audio_transport);
RTC_DCHECK(call);
audio_capture_thread_checker_.DetachFromThread();
- webrtc::AudioSendStream::Config config(nullptr);
- config.voe_channel_id = channel_;
- config.rtp.ssrc = ssrc;
- stream_ = call_->CreateAudioSendStream(config);
- RTC_DCHECK(stream_);
+ config_.rtp.ssrc = ssrc;
+ config_.rtp.c_name = c_name;
+ config_.voe_channel_id = ch;
+ RecreateAudioSendStream(extensions);
}
+
~WebRtcAudioSendStream() override {
- RTC_DCHECK(signal_thread_checker_.CalledOnValidThread());
+ RTC_DCHECK(worker_thread_checker_.CalledOnValidThread());
Stop();
call_->DestroyAudioSendStream(stream_);
}
+ void RecreateAudioSendStream(
+ const std::vector<webrtc::RtpExtension>& extensions) {
+ RTC_DCHECK(worker_thread_checker_.CalledOnValidThread());
+ if (stream_) {
+ call_->DestroyAudioSendStream(stream_);
+ stream_ = nullptr;
+ }
+ config_.rtp.extensions = extensions;
+ RTC_DCHECK(!stream_);
+ stream_ = call_->CreateAudioSendStream(config_);
+ RTC_CHECK(stream_);
+ }
+
+ bool SendTelephoneEvent(int payload_type, uint8_t event,
+ uint32_t duration_ms) {
+ RTC_DCHECK(worker_thread_checker_.CalledOnValidThread());
+ RTC_DCHECK(stream_);
+ return stream_->SendTelephoneEvent(payload_type, event, duration_ms);
+ }
+
+ webrtc::AudioSendStream::Stats GetStats() const {
+ RTC_DCHECK(worker_thread_checker_.CalledOnValidThread());
+ RTC_DCHECK(stream_);
+ return stream_->GetStats();
+ }
+
// Starts the rendering by setting a sink to the renderer to get data
// callback.
// This method is called on the libjingle worker thread.
// TODO(xians): Make sure Start() is called only once.
void Start(AudioRenderer* renderer) {
- RTC_DCHECK(signal_thread_checker_.CalledOnValidThread());
+ RTC_DCHECK(worker_thread_checker_.CalledOnValidThread());
RTC_DCHECK(renderer);
if (renderer_) {
RTC_DCHECK(renderer_ == renderer);
@@ -1353,16 +1139,11 @@ class WebRtcVoiceMediaChannel::WebRtcAudioSendStream
renderer_ = renderer;
}
- webrtc::AudioSendStream::Stats GetStats() const {
- RTC_DCHECK(signal_thread_checker_.CalledOnValidThread());
- return stream_->GetStats();
- }
-
// Stops rendering by setting the sink of the renderer to nullptr. No data
// callback will be received after this method.
// This method is called on the libjingle worker thread.
void Stop() {
- RTC_DCHECK(signal_thread_checker_.CalledOnValidThread());
+ RTC_DCHECK(worker_thread_checker_.CalledOnValidThread());
if (renderer_) {
renderer_->SetSink(nullptr);
renderer_ = nullptr;
@@ -1374,11 +1155,12 @@ class WebRtcVoiceMediaChannel::WebRtcAudioSendStream
void OnData(const void* audio_data,
int bits_per_sample,
int sample_rate,
- int number_of_channels,
+ size_t number_of_channels,
size_t number_of_frames) override {
+ RTC_DCHECK(!worker_thread_checker_.CalledOnValidThread());
RTC_DCHECK(audio_capture_thread_checker_.CalledOnValidThread());
RTC_DCHECK(voe_audio_transport_);
- voe_audio_transport_->OnData(channel_,
+ voe_audio_transport_->OnData(config_.voe_channel_id,
audio_data,
bits_per_sample,
sample_rate,
@@ -1389,7 +1171,7 @@ class WebRtcVoiceMediaChannel::WebRtcAudioSendStream
// Callback from the |renderer_| when it is going away. In case Start() has
// never been called, this callback won't be triggered.
void OnClose() override {
- RTC_DCHECK(signal_thread_checker_.CalledOnValidThread());
+ RTC_DCHECK(worker_thread_checker_.CalledOnValidThread());
// Set |renderer_| to nullptr to make sure no more callback will get into
// the renderer.
renderer_ = nullptr;
@@ -1397,16 +1179,18 @@ class WebRtcVoiceMediaChannel::WebRtcAudioSendStream
// Accessor to the VoE channel ID.
int channel() const {
- RTC_DCHECK(signal_thread_checker_.CalledOnValidThread());
- return channel_;
+ RTC_DCHECK(worker_thread_checker_.CalledOnValidThread());
+ return config_.voe_channel_id;
}
private:
- rtc::ThreadChecker signal_thread_checker_;
+ rtc::ThreadChecker worker_thread_checker_;
rtc::ThreadChecker audio_capture_thread_checker_;
- const int channel_ = -1;
webrtc::AudioTransport* const voe_audio_transport_ = nullptr;
webrtc::Call* call_ = nullptr;
+ webrtc::AudioSendStream::Config config_;
+ // The stream is owned by WebRtcAudioSendStream and may be reallocated if
+ // configuration changes.
webrtc::AudioSendStream* stream_ = nullptr;
// Raw pointer to AudioRenderer owned by LocalAudioTrackHandler.
@@ -1419,80 +1203,163 @@ class WebRtcVoiceMediaChannel::WebRtcAudioSendStream
class WebRtcVoiceMediaChannel::WebRtcAudioReceiveStream {
public:
- explicit WebRtcAudioReceiveStream(int voe_channel_id)
- : channel_(voe_channel_id) {}
+ WebRtcAudioReceiveStream(int ch, uint32_t remote_ssrc, uint32_t local_ssrc,
+ bool use_combined_bwe, const std::string& sync_group,
+ const std::vector<webrtc::RtpExtension>& extensions,
+ webrtc::Call* call)
+ : call_(call),
+ config_() {
+ RTC_DCHECK_GE(ch, 0);
+ RTC_DCHECK(call);
+ config_.rtp.remote_ssrc = remote_ssrc;
+ config_.rtp.local_ssrc = local_ssrc;
+ config_.voe_channel_id = ch;
+ config_.sync_group = sync_group;
+ RecreateAudioReceiveStream(use_combined_bwe, extensions);
+ }
- int channel() { return channel_; }
+ ~WebRtcAudioReceiveStream() {
+ RTC_DCHECK(worker_thread_checker_.CalledOnValidThread());
+ call_->DestroyAudioReceiveStream(stream_);
+ }
+
+ void RecreateAudioReceiveStream(
+ const std::vector<webrtc::RtpExtension>& extensions) {
+ RTC_DCHECK(worker_thread_checker_.CalledOnValidThread());
+ RecreateAudioReceiveStream(config_.combined_audio_video_bwe, extensions);
+ }
+ void RecreateAudioReceiveStream(bool use_combined_bwe) {
+ RTC_DCHECK(worker_thread_checker_.CalledOnValidThread());
+ RecreateAudioReceiveStream(use_combined_bwe, config_.rtp.extensions);
+ }
+
+ webrtc::AudioReceiveStream::Stats GetStats() const {
+ RTC_DCHECK(worker_thread_checker_.CalledOnValidThread());
+ RTC_DCHECK(stream_);
+ return stream_->GetStats();
+ }
+
+ int channel() const {
+ RTC_DCHECK(worker_thread_checker_.CalledOnValidThread());
+ return config_.voe_channel_id;
+ }
+
+ void SetRawAudioSink(rtc::scoped_ptr<webrtc::AudioSinkInterface> sink) {
+ RTC_DCHECK(worker_thread_checker_.CalledOnValidThread());
+ stream_->SetSink(std::move(sink));
+ }
private:
- int channel_;
+ void RecreateAudioReceiveStream(bool use_combined_bwe,
+ const std::vector<webrtc::RtpExtension>& extensions) {
+ RTC_DCHECK(worker_thread_checker_.CalledOnValidThread());
+ if (stream_) {
+ call_->DestroyAudioReceiveStream(stream_);
+ stream_ = nullptr;
+ }
+ config_.rtp.extensions = extensions;
+ config_.combined_audio_video_bwe = use_combined_bwe;
+ RTC_DCHECK(!stream_);
+ stream_ = call_->CreateAudioReceiveStream(config_);
+ RTC_CHECK(stream_);
+ }
+
+ rtc::ThreadChecker worker_thread_checker_;
+ webrtc::Call* call_ = nullptr;
+ webrtc::AudioReceiveStream::Config config_;
+ // The stream is owned by WebRtcAudioReceiveStream and may be reallocated if
+ // configuration changes.
+ webrtc::AudioReceiveStream* stream_ = nullptr;
RTC_DISALLOW_IMPLICIT_CONSTRUCTORS(WebRtcAudioReceiveStream);
};
-// WebRtcVoiceMediaChannel
WebRtcVoiceMediaChannel::WebRtcVoiceMediaChannel(WebRtcVoiceEngine* engine,
const AudioOptions& options,
webrtc::Call* call)
- : engine_(engine),
- send_bitrate_setting_(false),
- send_bitrate_bps_(0),
- options_(),
- dtmf_allowed_(false),
- desired_playout_(false),
- nack_enabled_(false),
- playout_(false),
- typing_noise_detected_(false),
- desired_send_(SEND_NOTHING),
- send_(SEND_NOTHING),
- call_(call) {
+ : engine_(engine), call_(call) {
LOG(LS_VERBOSE) << "WebRtcVoiceMediaChannel::WebRtcVoiceMediaChannel";
- RTC_DCHECK(nullptr != call);
+ RTC_DCHECK(call);
engine->RegisterChannel(this);
SetOptions(options);
}
WebRtcVoiceMediaChannel::~WebRtcVoiceMediaChannel() {
- RTC_DCHECK(thread_checker_.CalledOnValidThread());
+ RTC_DCHECK(worker_thread_checker_.CalledOnValidThread());
LOG(LS_VERBOSE) << "WebRtcVoiceMediaChannel::~WebRtcVoiceMediaChannel";
-
- // Remove any remaining send streams.
+ // TODO(solenberg): Should be able to delete the streams directly, without
+ // going through RemoveNnStream(), once stream objects handle
+ // all (de)configuration.
while (!send_streams_.empty()) {
RemoveSendStream(send_streams_.begin()->first);
}
-
- // Remove any remaining receive streams.
- while (!receive_channels_.empty()) {
- RemoveRecvStream(receive_channels_.begin()->first);
+ while (!recv_streams_.empty()) {
+ RemoveRecvStream(recv_streams_.begin()->first);
}
- RTC_DCHECK(receive_streams_.empty());
-
- // Unregister ourselves from the engine.
engine()->UnregisterChannel(this);
}
bool WebRtcVoiceMediaChannel::SetSendParameters(
const AudioSendParameters& params) {
- RTC_DCHECK(thread_checker_.CalledOnValidThread());
+ RTC_DCHECK(worker_thread_checker_.CalledOnValidThread());
+ LOG(LS_INFO) << "WebRtcVoiceMediaChannel::SetSendParameters: "
+ << params.ToString();
// TODO(pthatcher): Refactor this to be more clean now that we have
// all the information at once.
- return (SetSendCodecs(params.codecs) &&
- SetSendRtpHeaderExtensions(params.extensions) &&
- SetMaxSendBandwidth(params.max_bandwidth_bps) &&
- SetOptions(params.options));
+
+ if (!SetSendCodecs(params.codecs)) {
+ return false;
+ }
+
+ if (!ValidateRtpExtensions(params.extensions)) {
+ return false;
+ }
+ std::vector<webrtc::RtpExtension> filtered_extensions =
+ FilterRtpExtensions(params.extensions,
+ webrtc::RtpExtension::IsSupportedForAudio, true);
+ if (send_rtp_extensions_ != filtered_extensions) {
+ send_rtp_extensions_.swap(filtered_extensions);
+ for (auto& it : send_streams_) {
+ it.second->RecreateAudioSendStream(send_rtp_extensions_);
+ }
+ }
+
+ if (!SetMaxSendBandwidth(params.max_bandwidth_bps)) {
+ return false;
+ }
+ return SetOptions(params.options);
}
bool WebRtcVoiceMediaChannel::SetRecvParameters(
const AudioRecvParameters& params) {
- RTC_DCHECK(thread_checker_.CalledOnValidThread());
+ RTC_DCHECK(worker_thread_checker_.CalledOnValidThread());
+ LOG(LS_INFO) << "WebRtcVoiceMediaChannel::SetRecvParameters: "
+ << params.ToString();
// TODO(pthatcher): Refactor this to be more clean now that we have
// all the information at once.
- return (SetRecvCodecs(params.codecs) &&
- SetRecvRtpHeaderExtensions(params.extensions));
+
+ if (!SetRecvCodecs(params.codecs)) {
+ return false;
+ }
+
+ if (!ValidateRtpExtensions(params.extensions)) {
+ return false;
+ }
+ std::vector<webrtc::RtpExtension> filtered_extensions =
+ FilterRtpExtensions(params.extensions,
+ webrtc::RtpExtension::IsSupportedForAudio, false);
+ if (recv_rtp_extensions_ != filtered_extensions) {
+ recv_rtp_extensions_.swap(filtered_extensions);
+ for (auto& it : recv_streams_) {
+ it.second->RecreateAudioReceiveStream(recv_rtp_extensions_);
+ }
+ }
+
+ return true;
}
bool WebRtcVoiceMediaChannel::SetOptions(const AudioOptions& options) {
- RTC_DCHECK(thread_checker_.CalledOnValidThread());
+ RTC_DCHECK(worker_thread_checker_.CalledOnValidThread());
LOG(LS_INFO) << "Setting voice channel options: "
<< options.ToString();
@@ -1503,26 +1370,27 @@ bool WebRtcVoiceMediaChannel::SetOptions(const AudioOptions& options) {
// on top. This means there is no way to "clear" options such that
// they go back to the engine default.
options_.SetAll(options);
-
- if (send_ != SEND_NOTHING) {
- if (!engine()->ApplyOptions(options_)) {
- LOG(LS_WARNING) <<
- "Failed to apply engine options during channel SetOptions.";
- return false;
- }
+ if (!engine()->ApplyOptions(options_)) {
+ LOG(LS_WARNING) <<
+ "Failed to apply engine options during channel SetOptions.";
+ return false;
}
if (dscp_option_changed) {
rtc::DiffServCodePoint dscp = rtc::DSCP_DEFAULT;
- if (options_.dscp.GetWithDefaultIfUnset(false))
+ if (options_.dscp.value_or(false)) {
dscp = kAudioDscpValue;
+ }
if (MediaChannel::SetDscp(dscp) != 0) {
LOG(LS_WARNING) << "Failed to set DSCP settings for audio channel";
}
}
// TODO(solenberg): Don't recreate unless options changed.
- RecreateAudioReceiveStreams();
+ for (auto& it : recv_streams_) {
+ it.second->RecreateAudioReceiveStream(
+ options_.combined_audio_video_bwe.value_or(false));
+ }
LOG(LS_INFO) << "Set voice channel options. Current options: "
<< options_.ToString();
@@ -1531,7 +1399,7 @@ bool WebRtcVoiceMediaChannel::SetOptions(const AudioOptions& options) {
bool WebRtcVoiceMediaChannel::SetRecvCodecs(
const std::vector<AudioCodec>& codecs) {
- RTC_DCHECK(thread_checker_.CalledOnValidThread());
+ RTC_DCHECK(worker_thread_checker_.CalledOnValidThread());
// Set the payload types to be used for incoming media.
LOG(LS_INFO) << "Setting receive voice codecs.";
@@ -1568,7 +1436,26 @@ bool WebRtcVoiceMediaChannel::SetRecvCodecs(
PausePlayout();
}
- bool result = SetRecvCodecsInternal(new_codecs);
+ bool result = true;
+ for (const AudioCodec& codec : new_codecs) {
+ webrtc::CodecInst voe_codec;
+ if (WebRtcVoiceEngine::ToCodecInst(codec, &voe_codec)) {
+ LOG(LS_INFO) << ToString(codec);
+ voe_codec.pltype = codec.id;
+ for (const auto& ch : recv_streams_) {
+ if (engine()->voe()->codec()->SetRecPayloadType(
+ ch.second->channel(), voe_codec) == -1) {
+ LOG_RTCERR2(SetRecPayloadType, ch.second->channel(),
+ ToString(voe_codec));
+ result = false;
+ }
+ }
+ } else {
+ LOG(LS_WARNING) << "Unknown codec " << ToString(codec);
+ result = false;
+ break;
+ }
+ }
if (result) {
recv_codecs_ = codecs;
}
@@ -1588,7 +1475,7 @@ bool WebRtcVoiceMediaChannel::SetSendCodecs(
engine()->voe()->codec()->SetFECStatus(channel, false);
// Scan through the list to figure out the codec to use for sending, along
- // with the proper configuration for VAD and DTMF.
+ // with the proper configuration for VAD.
bool found_send_codec = false;
webrtc::CodecInst send_codec;
memset(&send_codec, 0, sizeof(send_codec));
@@ -1603,7 +1490,7 @@ bool WebRtcVoiceMediaChannel::SetSendCodecs(
// Ignore codecs we don't know about. The negotiation step should prevent
// this, but double-check to be sure.
webrtc::CodecInst voe_codec;
- if (!engine()->FindWebRtcCodec(codec, &voe_codec)) {
+ if (!WebRtcVoiceEngine::ToCodecInst(codec, &voe_codec)) {
LOG(LS_WARNING) << "Unknown codec " << ToString(codec);
continue;
}
@@ -1644,7 +1531,7 @@ bool WebRtcVoiceMediaChannel::SetSendCodecs(
// Set packet size if the AudioCodec param kCodecParamPTime is set.
int ptime_ms = 0;
if (codec.GetParam(kCodecParamPTime, &ptime_ms)) {
- if (!SetPTimeAsPacketSize(&send_codec, ptime_ms)) {
+ if (!WebRtcVoiceCodecs::SetPTimeAsPacketSize(&send_codec, ptime_ms)) {
LOG(LS_WARNING) << "Failed to set packet size for codec "
<< send_codec.plname;
return false;
@@ -1687,7 +1574,7 @@ bool WebRtcVoiceMediaChannel::SetSendCodecs(
// Set Opus internal DTX.
LOG(LS_INFO) << "Attempt to "
- << GetEnableString(enable_opus_dtx)
+ << (enable_opus_dtx ? "enable" : "disable")
<< " Opus DTX on channel "
<< channel;
if (engine()->voe()->codec()->SetOpusDtx(channel, enable_opus_dtx)) {
@@ -1717,25 +1604,17 @@ bool WebRtcVoiceMediaChannel::SetSendCodecs(
SetSendBitrateInternal(send_bitrate_bps_);
}
- // Loop through the codecs list again to config the telephone-event/CN codec.
+ // Loop through the codecs list again to config the CN codec.
for (const AudioCodec& codec : codecs) {
// Ignore codecs we don't know about. The negotiation step should prevent
// this, but double-check to be sure.
webrtc::CodecInst voe_codec;
- if (!engine()->FindWebRtcCodec(codec, &voe_codec)) {
+ if (!WebRtcVoiceEngine::ToCodecInst(codec, &voe_codec)) {
LOG(LS_WARNING) << "Unknown codec " << ToString(codec);
continue;
}
- // Find the DTMF telephone event "codec" and tell VoiceEngine channels
- // about it.
- if (IsCodec(codec, kDtmfCodecName)) {
- if (engine()->voe()->dtmf()->SetSendTelephoneEventPayloadType(
- channel, codec.id) == -1) {
- LOG_RTCERR2(SetSendTelephoneEventPayloadType, channel, codec.id);
- return false;
- }
- } else if (IsCodec(codec, kCnCodecName)) {
+ if (IsCodec(codec, kCnCodecName)) {
// Turn voice activity detection/comfort noise on if supported.
// Set the wideband CN payload type appropriately.
// (narrowband always uses the static payload type 13).
@@ -1789,13 +1668,17 @@ bool WebRtcVoiceMediaChannel::SetSendCodecs(
bool WebRtcVoiceMediaChannel::SetSendCodecs(
const std::vector<AudioCodec>& codecs) {
- RTC_DCHECK(thread_checker_.CalledOnValidThread());
+ RTC_DCHECK(worker_thread_checker_.CalledOnValidThread());
+ // TODO(solenberg): Validate input - that payload types don't overlap, are
+ // within range, filter out codecs we don't support,
+ // redundant codecs etc.
- dtmf_allowed_ = false;
+ // Find the DTMF telephone event "codec" payload type.
+ dtmf_payload_type_ = rtc::Optional<int>();
for (const AudioCodec& codec : codecs) {
- // Find the DTMF telephone event "codec".
if (IsCodec(codec, kDtmfCodecName)) {
- dtmf_allowed_ = true;
+ dtmf_payload_type_ = rtc::Optional<int>(codec.id);
+ break;
}
}
@@ -1808,7 +1691,7 @@ bool WebRtcVoiceMediaChannel::SetSendCodecs(
}
// Set nack status on receive channels and update |nack_enabled_|.
- for (const auto& ch : receive_channels_) {
+ for (const auto& ch : recv_streams_) {
SetNack(ch.second->channel(), nack_enabled_);
}
@@ -1844,106 +1727,6 @@ bool WebRtcVoiceMediaChannel::SetSendCodec(
return true;
}
-bool WebRtcVoiceMediaChannel::SetRecvRtpHeaderExtensions(
- const std::vector<RtpHeaderExtension>& extensions) {
- RTC_DCHECK(thread_checker_.CalledOnValidThread());
- if (receive_extensions_ == extensions) {
- return true;
- }
-
- for (const auto& ch : receive_channels_) {
- if (!SetChannelRecvRtpHeaderExtensions(ch.second->channel(), extensions)) {
- return false;
- }
- }
-
- receive_extensions_ = extensions;
-
- // Recreate AudioReceiveStream:s.
- {
- std::vector<webrtc::RtpExtension> exts;
-
- const RtpHeaderExtension* audio_level_extension =
- FindHeaderExtension(extensions, kRtpAudioLevelHeaderExtension);
- if (audio_level_extension) {
- exts.push_back({
- kRtpAudioLevelHeaderExtension, audio_level_extension->id});
- }
-
- const RtpHeaderExtension* send_time_extension =
- FindHeaderExtension(extensions, kRtpAbsoluteSenderTimeHeaderExtension);
- if (send_time_extension) {
- exts.push_back({
- kRtpAbsoluteSenderTimeHeaderExtension, send_time_extension->id});
- }
-
- recv_rtp_extensions_.swap(exts);
- RecreateAudioReceiveStreams();
- }
-
- return true;
-}
-
-bool WebRtcVoiceMediaChannel::SetChannelRecvRtpHeaderExtensions(
- int channel_id, const std::vector<RtpHeaderExtension>& extensions) {
- const RtpHeaderExtension* audio_level_extension =
- FindHeaderExtension(extensions, kRtpAudioLevelHeaderExtension);
- if (!SetHeaderExtension(
- &webrtc::VoERTP_RTCP::SetReceiveAudioLevelIndicationStatus, channel_id,
- audio_level_extension)) {
- return false;
- }
-
- const RtpHeaderExtension* send_time_extension =
- FindHeaderExtension(extensions, kRtpAbsoluteSenderTimeHeaderExtension);
- if (!SetHeaderExtension(
- &webrtc::VoERTP_RTCP::SetReceiveAbsoluteSenderTimeStatus, channel_id,
- send_time_extension)) {
- return false;
- }
-
- return true;
-}
-
-bool WebRtcVoiceMediaChannel::SetSendRtpHeaderExtensions(
- const std::vector<RtpHeaderExtension>& extensions) {
- RTC_DCHECK(thread_checker_.CalledOnValidThread());
- if (send_extensions_ == extensions) {
- return true;
- }
-
- for (const auto& ch : send_streams_) {
- if (!SetChannelSendRtpHeaderExtensions(ch.second->channel(), extensions)) {
- return false;
- }
- }
-
- send_extensions_ = extensions;
- return true;
-}
-
-bool WebRtcVoiceMediaChannel::SetChannelSendRtpHeaderExtensions(
- int channel_id, const std::vector<RtpHeaderExtension>& extensions) {
- const RtpHeaderExtension* audio_level_extension =
- FindHeaderExtension(extensions, kRtpAudioLevelHeaderExtension);
-
- if (!SetHeaderExtension(
- &webrtc::VoERTP_RTCP::SetSendAudioLevelIndicationStatus, channel_id,
- audio_level_extension)) {
- return false;
- }
-
- const RtpHeaderExtension* send_time_extension =
- FindHeaderExtension(extensions, kRtpAbsoluteSenderTimeHeaderExtension);
- if (!SetHeaderExtension(
- &webrtc::VoERTP_RTCP::SetSendAbsoluteSenderTimeStatus, channel_id,
- send_time_extension)) {
- return false;
- }
-
- return true;
-}
-
bool WebRtcVoiceMediaChannel::SetPlayout(bool playout) {
desired_playout_ = playout;
return ChangePlayout(desired_playout_);
@@ -1958,12 +1741,12 @@ bool WebRtcVoiceMediaChannel::ResumePlayout() {
}
bool WebRtcVoiceMediaChannel::ChangePlayout(bool playout) {
- RTC_DCHECK(thread_checker_.CalledOnValidThread());
+ RTC_DCHECK(worker_thread_checker_.CalledOnValidThread());
if (playout_ == playout) {
return true;
}
- for (const auto& ch : receive_channels_) {
+ for (const auto& ch : recv_streams_) {
if (!SetPlayout(ch.second->channel(), playout)) {
LOG(LS_ERROR) << "SetPlayout " << playout << " on channel "
<< ch.second->channel() << " failed";
@@ -1995,7 +1778,7 @@ bool WebRtcVoiceMediaChannel::ChangeSend(SendFlags send) {
return true;
}
- // Apply channel specific options.
+ // Apply channel specific options when channel is enabled for sending.
if (send == SEND_MICROPHONE) {
engine()->ApplyOptions(options_);
}
@@ -2007,13 +1790,6 @@ bool WebRtcVoiceMediaChannel::ChangeSend(SendFlags send) {
}
}
- // Clear up the options after stopping sending. Since we may previously have
- // applied the channel specific options, now apply the original options stored
- // in WebRtcVoiceEngine.
- if (send == SEND_NOTHING) {
- engine()->ApplyOptions(engine()->GetOptions());
- }
-
send_ = send;
return true;
}
@@ -2039,7 +1815,7 @@ bool WebRtcVoiceMediaChannel::SetAudioSend(uint32_t ssrc,
bool enable,
const AudioOptions* options,
AudioRenderer* renderer) {
- RTC_DCHECK(thread_checker_.CalledOnValidThread());
+ RTC_DCHECK(worker_thread_checker_.CalledOnValidThread());
// TODO(solenberg): The state change should be fully rolled back if any one of
// these calls fail.
if (!SetLocalRenderer(ssrc, renderer)) {
@@ -2068,7 +1844,7 @@ int WebRtcVoiceMediaChannel::CreateVoEChannel() {
return id;
}
-bool WebRtcVoiceMediaChannel::DeleteChannel(int channel) {
+bool WebRtcVoiceMediaChannel::DeleteVoEChannel(int channel) {
if (engine()->voe()->network()->DeRegisterExternalTransport(channel) == -1) {
LOG_RTCERR1(DeRegisterExternalTransport, channel);
}
@@ -2080,7 +1856,7 @@ bool WebRtcVoiceMediaChannel::DeleteChannel(int channel) {
}
bool WebRtcVoiceMediaChannel::AddSendStream(const StreamParams& sp) {
- RTC_DCHECK(thread_checker_.CalledOnValidThread());
+ RTC_DCHECK(worker_thread_checker_.CalledOnValidThread());
LOG(LS_INFO) << "AddSendStream: " << sp.ToString();
uint32_t ssrc = sp.first_ssrc();
@@ -2097,33 +1873,12 @@ bool WebRtcVoiceMediaChannel::AddSendStream(const StreamParams& sp) {
return false;
}
- // Enable RTCP (for quality stats and feedback messages).
- if (engine()->voe()->rtp()->SetRTCPStatus(channel, true) == -1) {
- LOG_RTCERR2(SetRTCPStatus, channel, 1);
- }
-
- SetChannelSendRtpHeaderExtensions(channel, send_extensions_);
-
- // Set the local (send) SSRC.
- if (engine()->voe()->rtp()->SetLocalSSRC(channel, ssrc) == -1) {
- LOG_RTCERR2(SetLocalSSRC, channel, ssrc);
- DeleteChannel(channel);
- return false;
- }
-
- if (engine()->voe()->rtp()->SetRTCP_CNAME(channel, sp.cname.c_str()) == -1) {
- LOG_RTCERR2(SetRTCP_CNAME, channel, sp.cname);
- DeleteChannel(channel);
- return false;
- }
-
// Save the channel to send_streams_, so that RemoveSendStream() can still
// delete the channel in case failure happens below.
webrtc::AudioTransport* audio_transport =
engine()->voe()->base()->audio_transport();
- send_streams_.insert(
- std::make_pair(ssrc,
- new WebRtcAudioSendStream(channel, audio_transport, ssrc, call_)));
+ send_streams_.insert(std::make_pair(ssrc, new WebRtcAudioSendStream(
+ channel, audio_transport, ssrc, sp.cname, send_rtp_extensions_, call_)));
// Set the current codecs to be used for the new channel. We need to do this
// after adding the channel to send_channels_, because of how max bitrate is
@@ -2138,10 +1893,10 @@ bool WebRtcVoiceMediaChannel::AddSendStream(const StreamParams& sp) {
// with the same SSRC in order to send receiver reports.
if (send_streams_.size() == 1) {
receiver_reports_ssrc_ = ssrc;
- for (const auto& ch : receive_channels_) {
- int recv_channel = ch.second->channel();
+ for (const auto& stream : recv_streams_) {
+ int recv_channel = stream.second->channel();
if (engine()->voe()->rtp()->SetLocalSSRC(recv_channel, ssrc) != 0) {
- LOG_RTCERR2(SetLocalSSRC, ch.second->channel(), ssrc);
+ LOG_RTCERR2(SetLocalSSRC, recv_channel, ssrc);
return false;
}
engine()->voe()->base()->AssociateSendChannel(recv_channel, channel);
@@ -2154,7 +1909,9 @@ bool WebRtcVoiceMediaChannel::AddSendStream(const StreamParams& sp) {
}
bool WebRtcVoiceMediaChannel::RemoveSendStream(uint32_t ssrc) {
- RTC_DCHECK(thread_checker_.CalledOnValidThread());
+ RTC_DCHECK(worker_thread_checker_.CalledOnValidThread());
+ LOG(LS_INFO) << "RemoveSendStream: " << ssrc;
+
auto it = send_streams_.find(ssrc);
if (it == send_streams_.end()) {
LOG(LS_WARNING) << "Try to remove stream with ssrc " << ssrc
@@ -2165,15 +1922,12 @@ bool WebRtcVoiceMediaChannel::RemoveSendStream(uint32_t ssrc) {
int channel = it->second->channel();
ChangeSend(channel, SEND_NOTHING);
- // Delete the WebRtcVoiceChannelRenderer object connected to the channel,
- // this will disconnect the audio renderer with the send channel.
- delete it->second;
- send_streams_.erase(it);
-
- // Clean up and delete the send channel.
+ // Clean up and delete the send stream+channel.
LOG(LS_INFO) << "Removing audio send stream " << ssrc
<< " with VoiceEngine channel #" << channel << ".";
- if (!DeleteChannel(channel)) {
+ delete it->second;
+ send_streams_.erase(it);
+ if (!DeleteVoEChannel(channel)) {
return false;
}
if (send_streams_.empty()) {
@@ -2183,14 +1937,14 @@ bool WebRtcVoiceMediaChannel::RemoveSendStream(uint32_t ssrc) {
}
bool WebRtcVoiceMediaChannel::AddRecvStream(const StreamParams& sp) {
- RTC_DCHECK(thread_checker_.CalledOnValidThread());
+ RTC_DCHECK(worker_thread_checker_.CalledOnValidThread());
LOG(LS_INFO) << "AddRecvStream: " << sp.ToString();
if (!ValidateStreamParams(sp)) {
return false;
}
- uint32_t ssrc = sp.first_ssrc();
+ const uint32_t ssrc = sp.first_ssrc();
if (ssrc == 0) {
LOG(LS_WARNING) << "AddRecvStream with ssrc==0 is not supported.";
return false;
@@ -2202,114 +1956,87 @@ bool WebRtcVoiceMediaChannel::AddRecvStream(const StreamParams& sp) {
RemoveRecvStream(ssrc);
}
- if (receive_channels_.find(ssrc) != receive_channels_.end()) {
+ if (GetReceiveChannelId(ssrc) != -1) {
LOG(LS_ERROR) << "Stream already exists with ssrc " << ssrc;
return false;
}
- RTC_DCHECK(receive_stream_params_.find(ssrc) == receive_stream_params_.end());
// Create a new channel for receiving audio data.
- int channel = CreateVoEChannel();
+ const int channel = CreateVoEChannel();
if (channel == -1) {
return false;
}
- if (!ConfigureRecvChannel(channel)) {
- DeleteChannel(channel);
- return false;
- }
-
- WebRtcAudioReceiveStream* stream = new WebRtcAudioReceiveStream(channel);
- receive_channels_.insert(std::make_pair(ssrc, stream));
- receive_stream_params_[ssrc] = sp;
- AddAudioReceiveStream(ssrc);
-
- LOG(LS_INFO) << "New audio stream " << ssrc
- << " registered to VoiceEngine channel #"
- << channel << ".";
- return true;
-}
-
-bool WebRtcVoiceMediaChannel::ConfigureRecvChannel(int channel) {
- RTC_DCHECK(thread_checker_.CalledOnValidThread());
-
- int send_channel = GetSendChannelId(receiver_reports_ssrc_);
- if (send_channel != -1) {
- // Associate receive channel with first send channel (so the receive channel
- // can obtain RTT from the send channel)
- engine()->voe()->base()->AssociateSendChannel(channel, send_channel);
- LOG(LS_INFO) << "VoiceEngine channel #" << channel
- << " is associated with channel #" << send_channel << ".";
- }
- if (engine()->voe()->rtp()->SetLocalSSRC(channel,
- receiver_reports_ssrc_) == -1) {
- LOG_RTCERR1(SetLocalSSRC, channel);
- return false;
- }
// Turn off all supported codecs.
- int ncodecs = engine()->voe()->codec()->NumOfCodecs();
- for (int i = 0; i < ncodecs; ++i) {
- webrtc::CodecInst voe_codec;
- if (engine()->voe()->codec()->GetCodec(i, voe_codec) != -1) {
- voe_codec.pltype = -1;
- if (engine()->voe()->codec()->SetRecPayloadType(
- channel, voe_codec) == -1) {
- LOG_RTCERR2(SetRecPayloadType, channel, ToString(voe_codec));
- return false;
- }
+ // TODO(solenberg): Remove once "no codecs" is the default state of a stream.
+ for (webrtc::CodecInst voe_codec : webrtc::acm2::RentACodec::Database()) {
+ voe_codec.pltype = -1;
+ if (engine()->voe()->codec()->SetRecPayloadType(channel, voe_codec) == -1) {
+ LOG_RTCERR2(SetRecPayloadType, channel, ToString(voe_codec));
+ DeleteVoEChannel(channel);
+ return false;
}
}
// Only enable those configured for this channel.
for (const auto& codec : recv_codecs_) {
webrtc::CodecInst voe_codec;
- if (engine()->FindWebRtcCodec(codec, &voe_codec)) {
+ if (WebRtcVoiceEngine::ToCodecInst(codec, &voe_codec)) {
voe_codec.pltype = codec.id;
if (engine()->voe()->codec()->SetRecPayloadType(
channel, voe_codec) == -1) {
LOG_RTCERR2(SetRecPayloadType, channel, ToString(voe_codec));
+ DeleteVoEChannel(channel);
return false;
}
}
}
- SetNack(channel, nack_enabled_);
-
- // Set RTP header extension for the new channel.
- if (!SetChannelRecvRtpHeaderExtensions(channel, receive_extensions_)) {
- return false;
+ const int send_channel = GetSendChannelId(receiver_reports_ssrc_);
+ if (send_channel != -1) {
+ // Associate receive channel with first send channel (so the receive channel
+ // can obtain RTT from the send channel)
+ engine()->voe()->base()->AssociateSendChannel(channel, send_channel);
+ LOG(LS_INFO) << "VoiceEngine channel #" << channel
+ << " is associated with channel #" << send_channel << ".";
}
+ recv_streams_.insert(std::make_pair(ssrc, new WebRtcAudioReceiveStream(
+ channel, ssrc, receiver_reports_ssrc_,
+ options_.combined_audio_video_bwe.value_or(false), sp.sync_label,
+ recv_rtp_extensions_, call_)));
+
+ SetNack(channel, nack_enabled_);
SetPlayout(channel, playout_);
+
return true;
}
bool WebRtcVoiceMediaChannel::RemoveRecvStream(uint32_t ssrc) {
- RTC_DCHECK(thread_checker_.CalledOnValidThread());
+ RTC_DCHECK(worker_thread_checker_.CalledOnValidThread());
LOG(LS_INFO) << "RemoveRecvStream: " << ssrc;
- auto it = receive_channels_.find(ssrc);
- if (it == receive_channels_.end()) {
+ const auto it = recv_streams_.find(ssrc);
+ if (it == recv_streams_.end()) {
LOG(LS_WARNING) << "Try to remove stream with ssrc " << ssrc
<< " which doesn't exist.";
return false;
}
- RemoveAudioReceiveStream(ssrc);
- receive_stream_params_.erase(ssrc);
-
- const int channel = it->second->channel();
- delete it->second;
- receive_channels_.erase(it);
-
// Deregister default channel, if that's the one being destroyed.
if (IsDefaultRecvStream(ssrc)) {
default_recv_ssrc_ = -1;
}
- LOG(LS_INFO) << "Removing audio stream " << ssrc
+ const int channel = it->second->channel();
+
+ // Clean up and delete the receive stream+channel.
+ LOG(LS_INFO) << "Removing audio receive stream " << ssrc
<< " with VoiceEngine channel #" << channel << ".";
- return DeleteChannel(channel);
+ it->second->SetRawAudioSink(nullptr);
+ delete it->second;
+ recv_streams_.erase(it);
+ return DeleteVoEChannel(channel);
}
bool WebRtcVoiceMediaChannel::SetLocalRenderer(uint32_t ssrc,
@@ -2337,9 +2064,9 @@ bool WebRtcVoiceMediaChannel::SetLocalRenderer(uint32_t ssrc,
bool WebRtcVoiceMediaChannel::GetActiveStreams(
AudioInfo::StreamList* actives) {
- RTC_DCHECK(thread_checker_.CalledOnValidThread());
+ RTC_DCHECK(worker_thread_checker_.CalledOnValidThread());
actives->clear();
- for (const auto& ch : receive_channels_) {
+ for (const auto& ch : recv_streams_) {
int level = GetOutputLevel(ch.second->channel());
if (level > 0) {
actives->push_back(std::make_pair(ch.first, level));
@@ -2349,9 +2076,9 @@ bool WebRtcVoiceMediaChannel::GetActiveStreams(
}
int WebRtcVoiceMediaChannel::GetOutputLevel() {
- RTC_DCHECK(thread_checker_.CalledOnValidThread());
+ RTC_DCHECK(worker_thread_checker_.CalledOnValidThread());
int highest = 0;
- for (const auto& ch : receive_channels_) {
+ for (const auto& ch : recv_streams_) {
highest = std::max(GetOutputLevel(ch.second->channel()), highest);
}
return highest;
@@ -2383,7 +2110,7 @@ void WebRtcVoiceMediaChannel::SetTypingDetectionParameters(int time_window,
}
bool WebRtcVoiceMediaChannel::SetOutputVolume(uint32_t ssrc, double volume) {
- RTC_DCHECK(thread_checker_.CalledOnValidThread());
+ RTC_DCHECK(worker_thread_checker_.CalledOnValidThread());
if (ssrc == 0) {
default_recv_volume_ = volume;
if (default_recv_ssrc_ == -1) {
@@ -2408,64 +2135,48 @@ bool WebRtcVoiceMediaChannel::SetOutputVolume(uint32_t ssrc, double volume) {
}
bool WebRtcVoiceMediaChannel::CanInsertDtmf() {
- return dtmf_allowed_;
+ return dtmf_payload_type_ ? true : false;
}
-bool WebRtcVoiceMediaChannel::InsertDtmf(uint32_t ssrc,
- int event,
- int duration,
- int flags) {
- RTC_DCHECK(thread_checker_.CalledOnValidThread());
- if (!dtmf_allowed_) {
+bool WebRtcVoiceMediaChannel::InsertDtmf(uint32_t ssrc, int event,
+ int duration) {
+ RTC_DCHECK(worker_thread_checker_.CalledOnValidThread());
+ LOG(LS_INFO) << "WebRtcVoiceMediaChannel::InsertDtmf";
+ if (!dtmf_payload_type_) {
return false;
}
- // Send the event.
- if (flags & cricket::DF_SEND) {
- int channel = -1;
- if (ssrc == 0) {
- if (send_streams_.size() > 0) {
- channel = send_streams_.begin()->second->channel();
- }
- } else {
- channel = GetSendChannelId(ssrc);
- }
- if (channel == -1) {
- LOG(LS_WARNING) << "InsertDtmf - The specified ssrc "
- << ssrc << " is not in use.";
- return false;
- }
- // Send DTMF using out-of-band DTMF. ("true", as 3rd arg)
- if (engine()->voe()->dtmf()->SendTelephoneEvent(
- channel, event, true, duration) == -1) {
- LOG_RTCERR4(SendTelephoneEvent, channel, event, true, duration);
- return false;
- }
+ // Figure out which WebRtcAudioSendStream to send the event on.
+ auto it = ssrc != 0 ? send_streams_.find(ssrc) : send_streams_.begin();
+ if (it == send_streams_.end()) {
+ LOG(LS_WARNING) << "The specified ssrc " << ssrc << " is not in use.";
+ return false;
}
-
- // Play the event.
- if (flags & cricket::DF_PLAY) {
- // Play DTMF tone locally.
- if (engine()->voe()->dtmf()->PlayDtmfTone(event, duration) == -1) {
- LOG_RTCERR2(PlayDtmfTone, event, duration);
- return false;
- }
+ if (event < kMinTelephoneEventCode ||
+ event > kMaxTelephoneEventCode) {
+ LOG(LS_WARNING) << "DTMF event code " << event << " out of range.";
+ return false;
}
-
- return true;
+ if (duration < kMinTelephoneEventDuration ||
+ duration > kMaxTelephoneEventDuration) {
+ LOG(LS_WARNING) << "DTMF event duration " << duration << " out of range.";
+ return false;
+ }
+ return it->second->SendTelephoneEvent(*dtmf_payload_type_, event, duration);
}
void WebRtcVoiceMediaChannel::OnPacketReceived(
rtc::Buffer* packet, const rtc::PacketTime& packet_time) {
- RTC_DCHECK(thread_checker_.CalledOnValidThread());
+ RTC_DCHECK(worker_thread_checker_.CalledOnValidThread());
uint32_t ssrc = 0;
if (!GetRtpSsrc(packet->data(), packet->size(), &ssrc)) {
return;
}
- if (receive_channels_.empty()) {
- // Create new channel, which will be the default receive channel.
+ // If we don't have a default channel, and the SSRC is unknown, create a
+ // default channel.
+ if (default_recv_ssrc_ == -1 && GetReceiveChannelId(ssrc) == -1) {
StreamParams sp;
sp.ssrcs.push_back(ssrc);
LOG(LS_INFO) << "Creating default receive stream for SSRC=" << ssrc << ".";
@@ -2485,7 +2196,13 @@ void WebRtcVoiceMediaChannel::OnPacketReceived(
reinterpret_cast<const uint8_t*>(packet->data()), packet->size(),
webrtc_packet_time);
if (webrtc::PacketReceiver::DELIVERY_OK != delivery_result) {
- return;
+ // If the SSRC is unknown here, route it to the default channel, if we have
+ // one. See: https://bugs.chromium.org/p/webrtc/issues/detail?id=5208
+ if (default_recv_ssrc_ == -1) {
+ return;
+ } else {
+ ssrc = default_recv_ssrc_;
+ }
}
// Find the channel to send this packet to. It must exist since webrtc::Call
@@ -2500,7 +2217,7 @@ void WebRtcVoiceMediaChannel::OnPacketReceived(
void WebRtcVoiceMediaChannel::OnRtcpReceived(
rtc::Buffer* packet, const rtc::PacketTime& packet_time) {
- RTC_DCHECK(thread_checker_.CalledOnValidThread());
+ RTC_DCHECK(worker_thread_checker_.CalledOnValidThread());
// Forward packet to Call as well.
const webrtc::PacketTime webrtc_packet_time(packet_time.timestamp,
@@ -2542,7 +2259,7 @@ void WebRtcVoiceMediaChannel::OnRtcpReceived(
}
bool WebRtcVoiceMediaChannel::MuteStream(uint32_t ssrc, bool muted) {
- RTC_DCHECK(thread_checker_.CalledOnValidThread());
+ RTC_DCHECK(worker_thread_checker_.CalledOnValidThread());
int channel = GetSendChannelId(ssrc);
if (channel == -1) {
LOG(LS_WARNING) << "The specified ssrc " << ssrc << " is not in use.";
@@ -2601,7 +2318,7 @@ bool WebRtcVoiceMediaChannel::SetSendBitrateInternal(int bps) {
return true;
webrtc::CodecInst codec = *send_codec_;
- bool is_multi_rate = IsCodecMultiRate(codec);
+ bool is_multi_rate = WebRtcVoiceCodecs::IsCodecMultiRate(codec);
if (is_multi_rate) {
// If codec is multi-rate then just set the bitrate.
@@ -2629,7 +2346,7 @@ bool WebRtcVoiceMediaChannel::SetSendBitrateInternal(int bps) {
}
bool WebRtcVoiceMediaChannel::GetStats(VoiceMediaInfo* info) {
- RTC_DCHECK(thread_checker_.CalledOnValidThread());
+ RTC_DCHECK(worker_thread_checker_.CalledOnValidThread());
RTC_DCHECK(info);
// Get SSRC and stats for each sender.
@@ -2652,15 +2369,14 @@ bool WebRtcVoiceMediaChannel::GetStats(VoiceMediaInfo* info) {
sinfo.echo_delay_std_ms = stats.echo_delay_std_ms;
sinfo.echo_return_loss = stats.echo_return_loss;
sinfo.echo_return_loss_enhancement = stats.echo_return_loss_enhancement;
- sinfo.typing_noise_detected = typing_noise_detected_;
- // TODO(solenberg): Move to AudioSendStream.
- // sinfo.typing_noise_detected = stats.typing_noise_detected;
+ sinfo.typing_noise_detected =
+ (send_ == SEND_NOTHING ? false : stats.typing_noise_detected);
info->senders.push_back(sinfo);
}
// Get SSRC and stats for each receiver.
RTC_DCHECK(info->receivers.size() == 0);
- for (const auto& stream : receive_streams_) {
+ for (const auto& stream : recv_streams_) {
webrtc::AudioReceiveStream::Stats stats = stream.second->GetStats();
VoiceReceiverInfo rinfo;
rinfo.add_ssrc(stats.remote_ssrc);
@@ -2694,15 +2410,17 @@ bool WebRtcVoiceMediaChannel::GetStats(VoiceMediaInfo* info) {
return true;
}
-void WebRtcVoiceMediaChannel::OnError(int error) {
- if (send_ == SEND_NOTHING) {
+void WebRtcVoiceMediaChannel::SetRawAudioSink(
+ uint32_t ssrc,
+ rtc::scoped_ptr<webrtc::AudioSinkInterface> sink) {
+ RTC_DCHECK(worker_thread_checker_.CalledOnValidThread());
+ LOG(LS_VERBOSE) << "WebRtcVoiceMediaChannel::SetRawAudioSink";
+ const auto it = recv_streams_.find(ssrc);
+ if (it == recv_streams_.end()) {
+ LOG(LS_WARNING) << "SetRawAudioSink: no recv stream" << ssrc;
return;
}
- if (error == VE_TYPING_NOISE_WARNING) {
- typing_noise_detected_ = true;
- } else if (error == VE_TYPING_NOISE_OFF_WARNING) {
- typing_noise_detected_ = false;
- }
+ it->second->SetRawAudioSink(std::move(sink));
}
int WebRtcVoiceMediaChannel::GetOutputLevel(int channel) {
@@ -2712,16 +2430,16 @@ int WebRtcVoiceMediaChannel::GetOutputLevel(int channel) {
}
int WebRtcVoiceMediaChannel::GetReceiveChannelId(uint32_t ssrc) const {
- RTC_DCHECK(thread_checker_.CalledOnValidThread());
- const auto it = receive_channels_.find(ssrc);
- if (it != receive_channels_.end()) {
+ RTC_DCHECK(worker_thread_checker_.CalledOnValidThread());
+ const auto it = recv_streams_.find(ssrc);
+ if (it != recv_streams_.end()) {
return it->second->channel();
}
return -1;
}
int WebRtcVoiceMediaChannel::GetSendChannelId(uint32_t ssrc) const {
- RTC_DCHECK(thread_checker_.CalledOnValidThread());
+ RTC_DCHECK(worker_thread_checker_.CalledOnValidThread());
const auto it = send_streams_.find(ssrc);
if (it != send_streams_.end()) {
return it->second->channel();
@@ -2762,7 +2480,7 @@ bool WebRtcVoiceMediaChannel::GetRedSendCodec(const AudioCodec& red_codec,
if (codec.id == red_pt) {
// If we find the right codec, that will be the codec we pass to
// SetSendCodec, with the desired payload type.
- if (engine()->FindWebRtcCodec(codec, send_codec)) {
+ if (WebRtcVoiceEngine::ToCodecInst(codec, send_codec)) {
return true;
} else {
break;
@@ -2786,117 +2504,6 @@ bool WebRtcVoiceMediaChannel::SetPlayout(int channel, bool playout) {
}
return true;
}
-
-// Convert VoiceEngine error code into VoiceMediaChannel::Error enum.
-VoiceMediaChannel::Error
- WebRtcVoiceMediaChannel::WebRtcErrorToChannelError(int err_code) {
- switch (err_code) {
- case 0:
- return ERROR_NONE;
- case VE_CANNOT_START_RECORDING:
- case VE_MIC_VOL_ERROR:
- case VE_GET_MIC_VOL_ERROR:
- case VE_CANNOT_ACCESS_MIC_VOL:
- return ERROR_REC_DEVICE_OPEN_FAILED;
- case VE_SATURATION_WARNING:
- return ERROR_REC_DEVICE_SATURATION;
- case VE_REC_DEVICE_REMOVED:
- return ERROR_REC_DEVICE_REMOVED;
- case VE_RUNTIME_REC_WARNING:
- case VE_RUNTIME_REC_ERROR:
- return ERROR_REC_RUNTIME_ERROR;
- case VE_CANNOT_START_PLAYOUT:
- case VE_SPEAKER_VOL_ERROR:
- case VE_GET_SPEAKER_VOL_ERROR:
- case VE_CANNOT_ACCESS_SPEAKER_VOL:
- return ERROR_PLAY_DEVICE_OPEN_FAILED;
- case VE_RUNTIME_PLAY_WARNING:
- case VE_RUNTIME_PLAY_ERROR:
- return ERROR_PLAY_RUNTIME_ERROR;
- case VE_TYPING_NOISE_WARNING:
- return ERROR_REC_TYPING_NOISE_DETECTED;
- default:
- return VoiceMediaChannel::ERROR_OTHER;
- }
-}
-
-bool WebRtcVoiceMediaChannel::SetHeaderExtension(ExtensionSetterFunction setter,
- int channel_id, const RtpHeaderExtension* extension) {
- bool enable = false;
- int id = 0;
- std::string uri;
- if (extension) {
- enable = true;
- id = extension->id;
- uri = extension->uri;
- }
- if ((engine()->voe()->rtp()->*setter)(channel_id, enable, id) != 0) {
- LOG_RTCERR4(*setter, uri, channel_id, enable, id);
- return false;
- }
- return true;
-}
-
-void WebRtcVoiceMediaChannel::RecreateAudioReceiveStreams() {
- RTC_DCHECK(thread_checker_.CalledOnValidThread());
- for (const auto& it : receive_channels_) {
- RemoveAudioReceiveStream(it.first);
- }
- for (const auto& it : receive_channels_) {
- AddAudioReceiveStream(it.first);
- }
-}
-
-void WebRtcVoiceMediaChannel::AddAudioReceiveStream(uint32_t ssrc) {
- RTC_DCHECK(thread_checker_.CalledOnValidThread());
- WebRtcAudioReceiveStream* stream = receive_channels_[ssrc];
- RTC_DCHECK(stream != nullptr);
- RTC_DCHECK(receive_streams_.find(ssrc) == receive_streams_.end());
- webrtc::AudioReceiveStream::Config config;
- config.rtp.remote_ssrc = ssrc;
- // Only add RTP extensions if we support combined A/V BWE.
- config.rtp.extensions = recv_rtp_extensions_;
- config.combined_audio_video_bwe =
- options_.combined_audio_video_bwe.GetWithDefaultIfUnset(false);
- config.voe_channel_id = stream->channel();
- config.sync_group = receive_stream_params_[ssrc].sync_label;
- webrtc::AudioReceiveStream* s = call_->CreateAudioReceiveStream(config);
- receive_streams_.insert(std::make_pair(ssrc, s));
-}
-
-void WebRtcVoiceMediaChannel::RemoveAudioReceiveStream(uint32_t ssrc) {
- RTC_DCHECK(thread_checker_.CalledOnValidThread());
- auto stream_it = receive_streams_.find(ssrc);
- if (stream_it != receive_streams_.end()) {
- call_->DestroyAudioReceiveStream(stream_it->second);
- receive_streams_.erase(stream_it);
- }
-}
-
-bool WebRtcVoiceMediaChannel::SetRecvCodecsInternal(
- const std::vector<AudioCodec>& new_codecs) {
- RTC_DCHECK(thread_checker_.CalledOnValidThread());
- for (const AudioCodec& codec : new_codecs) {
- webrtc::CodecInst voe_codec;
- if (engine()->FindWebRtcCodec(codec, &voe_codec)) {
- LOG(LS_INFO) << ToString(codec);
- voe_codec.pltype = codec.id;
- for (const auto& ch : receive_channels_) {
- if (engine()->voe()->codec()->SetRecPayloadType(
- ch.second->channel(), voe_codec) == -1) {
- LOG_RTCERR2(SetRecPayloadType, ch.second->channel(),
- ToString(voe_codec));
- return false;
- }
- }
- } else {
- LOG(LS_WARNING) << "Unknown codec " << ToString(codec);
- return false;
- }
- }
- return true;
-}
-
} // namespace cricket
#endif // HAVE_WEBRTC_VOICE
diff --git a/talk/media/webrtc/webrtcvoiceengine.h b/talk/media/webrtc/webrtcvoiceengine.h
index 1cf05e71a2..0f2f59e492 100644
--- a/talk/media/webrtc/webrtcvoiceengine.h
+++ b/talk/media/webrtc/webrtcvoiceengine.h
@@ -29,7 +29,6 @@
#define TALK_MEDIA_WEBRTCVOICEENGINE_H_
#include <map>
-#include <set>
#include <string>
#include <vector>
@@ -37,9 +36,8 @@
#include "talk/media/webrtc/webrtccommon.h"
#include "talk/media/webrtc/webrtcvoe.h"
#include "talk/session/media/channel.h"
+#include "webrtc/audio_state.h"
#include "webrtc/base/buffer.h"
-#include "webrtc/base/byteorder.h"
-#include "webrtc/base/logging.h"
#include "webrtc/base/scoped_ptr.h"
#include "webrtc/base/stream.h"
#include "webrtc/base/thread_checker.h"
@@ -51,43 +49,34 @@ namespace cricket {
class AudioDeviceModule;
class AudioRenderer;
-class VoETraceWrapper;
class VoEWrapper;
class WebRtcVoiceMediaChannel;
// WebRtcVoiceEngine is a class to be used with CompositeMediaEngine.
// It uses the WebRtc VoiceEngine library for audio handling.
-class WebRtcVoiceEngine
- : public webrtc::VoiceEngineObserver,
- public webrtc::TraceCallback {
+class WebRtcVoiceEngine final : public webrtc::TraceCallback {
friend class WebRtcVoiceMediaChannel;
-
public:
+ // Exposed for the WVoE/MC unit test.
+ static bool ToCodecInst(const AudioCodec& in, webrtc::CodecInst* out);
+
WebRtcVoiceEngine();
// Dependency injection for testing.
- WebRtcVoiceEngine(VoEWrapper* voe_wrapper, VoETraceWrapper* tracing);
+ explicit WebRtcVoiceEngine(VoEWrapper* voe_wrapper);
~WebRtcVoiceEngine();
bool Init(rtc::Thread* worker_thread);
void Terminate();
- webrtc::VoiceEngine* GetVoE() { return voe()->engine(); }
+ rtc::scoped_refptr<webrtc::AudioState> GetAudioState() const;
VoiceMediaChannel* CreateChannel(webrtc::Call* call,
const AudioOptions& options);
- AudioOptions GetOptions() const { return options_; }
- bool SetOptions(const AudioOptions& options);
- bool SetDevices(const Device* in_device, const Device* out_device);
bool GetOutputVolume(int* level);
bool SetOutputVolume(int level);
int GetInputLevel();
const std::vector<AudioCodec>& codecs();
- bool FindCodec(const AudioCodec& codec);
- bool FindWebRtcCodec(const AudioCodec& codec, webrtc::CodecInst* gcodec);
-
- const std::vector<RtpHeaderExtension>& rtp_header_extensions() const;
-
- void SetLogging(int min_sev, const char* filter);
+ RtpCapabilities GetCapabilities() const;
// For tracking WebRtc channels. Needed because we have to pause them
// all when switching devices.
@@ -120,68 +109,49 @@ class WebRtcVoiceEngine
private:
void Construct();
- void ConstructCodecs();
- bool GetVoeCodec(int index, webrtc::CodecInst* codec);
bool InitInternal();
- void SetTraceFilter(int filter);
- void SetTraceOptions(const std::string& options);
// Every option that is "set" will be applied. Every option not "set" will be
// ignored. This allows us to selectively turn on and off different options
// easily at any time.
bool ApplyOptions(const AudioOptions& options);
+ void SetDefaultDevices();
// webrtc::TraceCallback:
void Print(webrtc::TraceLevel level, const char* trace, int length) override;
- // webrtc::VoiceEngineObserver:
- void CallbackOnError(int channel_id, int errCode) override;
-
- // Given the device type, name, and id, find device id. Return true and
- // set the output parameter rtc_id if successful.
- bool FindWebRtcAudioDeviceId(
- bool is_input, const std::string& dev_name, int dev_id, int* rtc_id);
-
void StartAecDump(const std::string& filename);
int CreateVoEChannel();
- static const int kDefaultLogSeverity = rtc::LS_WARNING;
+ rtc::ThreadChecker signal_thread_checker_;
+ rtc::ThreadChecker worker_thread_checker_;
// The primary instance of WebRtc VoiceEngine.
rtc::scoped_ptr<VoEWrapper> voe_wrapper_;
- rtc::scoped_ptr<VoETraceWrapper> tracing_;
+ rtc::scoped_refptr<webrtc::AudioState> audio_state_;
// The external audio device manager
- webrtc::AudioDeviceModule* adm_;
- int log_filter_;
- std::string log_options_;
- bool is_dumping_aec_;
+ webrtc::AudioDeviceModule* adm_ = nullptr;
std::vector<AudioCodec> codecs_;
- std::vector<RtpHeaderExtension> rtp_header_extensions_;
std::vector<WebRtcVoiceMediaChannel*> channels_;
- // channels_ can be read from WebRtc callback thread. We need a lock on that
- // callback as well as the RegisterChannel/UnregisterChannel.
- rtc::CriticalSection channels_cs_;
- webrtc::AgcConfig default_agc_config_;
-
webrtc::Config voe_config_;
+ bool initialized_ = false;
+ bool is_dumping_aec_ = false;
- bool initialized_;
- AudioOptions options_;
-
+ webrtc::AgcConfig default_agc_config_;
// Cache received extended_filter_aec, delay_agnostic_aec and experimental_ns
// values, and apply them in case they are missing in the audio options. We
// need to do this because SetExtraOptions() will revert to defaults for
// options which are not provided.
- Settable<bool> extended_filter_aec_;
- Settable<bool> delay_agnostic_aec_;
- Settable<bool> experimental_ns_;
+ rtc::Optional<bool> extended_filter_aec_;
+ rtc::Optional<bool> delay_agnostic_aec_;
+ rtc::Optional<bool> experimental_ns_;
RTC_DISALLOW_COPY_AND_ASSIGN(WebRtcVoiceEngine);
};
// WebRtcVoiceMediaChannel is an implementation of VoiceMediaChannel that uses
// WebRtc Voice Engine.
-class WebRtcVoiceMediaChannel : public VoiceMediaChannel,
- public webrtc::Transport {
+class WebRtcVoiceMediaChannel final : public VoiceMediaChannel,
+ public webrtc::Transport {
public:
WebRtcVoiceMediaChannel(WebRtcVoiceEngine* engine,
const AudioOptions& options,
@@ -217,7 +187,7 @@ class WebRtcVoiceMediaChannel : public VoiceMediaChannel,
bool SetOutputVolume(uint32_t ssrc, double volume) override;
bool CanInsertDtmf() override;
- bool InsertDtmf(uint32_t ssrc, int event, int duration, int flags) override;
+ bool InsertDtmf(uint32_t ssrc, int event, int duration) override;
void OnPacketReceived(rtc::Buffer* packet,
const rtc::PacketTime& packet_time) override;
@@ -226,6 +196,10 @@ class WebRtcVoiceMediaChannel : public VoiceMediaChannel,
void OnReadyToSend(bool ready) override {}
bool GetStats(VoiceMediaInfo* info) override;
+ void SetRawAudioSink(
+ uint32_t ssrc,
+ rtc::scoped_ptr<webrtc::AudioSinkInterface> sink) override;
+
// implements Transport interface
bool SendRtp(const uint8_t* data,
size_t len,
@@ -243,20 +217,14 @@ class WebRtcVoiceMediaChannel : public VoiceMediaChannel,
return VoiceMediaChannel::SendRtcp(&packet, rtc::PacketOptions());
}
- void OnError(int error);
-
int GetReceiveChannelId(uint32_t ssrc) const;
int GetSendChannelId(uint32_t ssrc) const;
private:
bool SetSendCodecs(const std::vector<AudioCodec>& codecs);
- bool SetSendRtpHeaderExtensions(
- const std::vector<RtpHeaderExtension>& extensions);
bool SetOptions(const AudioOptions& options);
bool SetMaxSendBandwidth(int bps);
bool SetRecvCodecs(const std::vector<AudioCodec>& codecs);
- bool SetRecvRtpHeaderExtensions(
- const std::vector<RtpHeaderExtension>& extensions);
bool SetLocalRenderer(uint32_t ssrc, AudioRenderer* renderer);
bool MuteStream(uint32_t ssrc, bool mute);
@@ -267,82 +235,55 @@ class WebRtcVoiceMediaChannel : public VoiceMediaChannel,
const std::vector<AudioCodec>& all_codecs,
webrtc::CodecInst* send_codec);
bool SetPlayout(int channel, bool playout);
- static Error WebRtcErrorToChannelError(int err_code);
-
- typedef int (webrtc::VoERTP_RTCP::* ExtensionSetterFunction)(int, bool,
- unsigned char);
-
void SetNack(int channel, bool nack_enabled);
bool SetSendCodec(int channel, const webrtc::CodecInst& send_codec);
bool ChangePlayout(bool playout);
bool ChangeSend(SendFlags send);
bool ChangeSend(int channel, SendFlags send);
- bool ConfigureRecvChannel(int channel);
int CreateVoEChannel();
- bool DeleteChannel(int channel);
+ bool DeleteVoEChannel(int channel);
bool IsDefaultRecvStream(uint32_t ssrc) {
return default_recv_ssrc_ == static_cast<int64_t>(ssrc);
}
bool SetSendCodecs(int channel, const std::vector<AudioCodec>& codecs);
bool SetSendBitrateInternal(int bps);
- bool SetHeaderExtension(ExtensionSetterFunction setter, int channel_id,
- const RtpHeaderExtension* extension);
- void RecreateAudioReceiveStreams();
- void AddAudioReceiveStream(uint32_t ssrc);
- void RemoveAudioReceiveStream(uint32_t ssrc);
- bool SetRecvCodecsInternal(const std::vector<AudioCodec>& new_codecs);
-
- bool SetChannelRecvRtpHeaderExtensions(
- int channel_id,
- const std::vector<RtpHeaderExtension>& extensions);
- bool SetChannelSendRtpHeaderExtensions(
- int channel_id,
- const std::vector<RtpHeaderExtension>& extensions);
+ rtc::ThreadChecker worker_thread_checker_;
- rtc::ThreadChecker thread_checker_;
-
- WebRtcVoiceEngine* const engine_;
+ WebRtcVoiceEngine* const engine_ = nullptr;
std::vector<AudioCodec> recv_codecs_;
std::vector<AudioCodec> send_codecs_;
rtc::scoped_ptr<webrtc::CodecInst> send_codec_;
- bool send_bitrate_setting_;
- int send_bitrate_bps_;
+ bool send_bitrate_setting_ = false;
+ int send_bitrate_bps_ = 0;
AudioOptions options_;
- bool dtmf_allowed_;
- bool desired_playout_;
- bool nack_enabled_;
- bool playout_;
- bool typing_noise_detected_;
- SendFlags desired_send_;
- SendFlags send_;
- webrtc::Call* const call_;
+ rtc::Optional<int> dtmf_payload_type_;
+ bool desired_playout_ = false;
+ bool nack_enabled_ = false;
+ bool playout_ = false;
+ SendFlags desired_send_ = SEND_NOTHING;
+ SendFlags send_ = SEND_NOTHING;
+ webrtc::Call* const call_ = nullptr;
// SSRC of unsignalled receive stream, or -1 if there isn't one.
int64_t default_recv_ssrc_ = -1;
// Volume for unsignalled stream, which may be set before the stream exists.
double default_recv_volume_ = 1.0;
- // SSRC to use for RTCP receiver reports; default to 1 in case of no signaled
+ // Default SSRC to use for RTCP receiver reports in case of no signaled
// send streams. See: https://code.google.com/p/webrtc/issues/detail?id=4740
- uint32_t receiver_reports_ssrc_ = 1;
+ // and https://code.google.com/p/chromium/issues/detail?id=547661
+ uint32_t receiver_reports_ssrc_ = 0xFA17FA17u;
class WebRtcAudioSendStream;
std::map<uint32_t, WebRtcAudioSendStream*> send_streams_;
- std::vector<RtpHeaderExtension> send_extensions_;
+ std::vector<webrtc::RtpExtension> send_rtp_extensions_;
class WebRtcAudioReceiveStream;
- std::map<uint32_t, WebRtcAudioReceiveStream*> receive_channels_;
- std::map<uint32_t, webrtc::AudioReceiveStream*> receive_streams_;
- std::map<uint32_t, StreamParams> receive_stream_params_;
- // receive_channels_ can be read from WebRtc callback thread. Access from
- // the WebRtc thread must be synchronized with edits on the worker thread.
- // Reads on the worker thread are ok.
- std::vector<RtpHeaderExtension> receive_extensions_;
+ std::map<uint32_t, WebRtcAudioReceiveStream*> recv_streams_;
std::vector<webrtc::RtpExtension> recv_rtp_extensions_;
RTC_DISALLOW_IMPLICIT_CONSTRUCTORS(WebRtcVoiceMediaChannel);
};
-
} // namespace cricket
#endif // TALK_MEDIA_WEBRTCVOICEENGINE_H_
diff --git a/talk/media/webrtc/webrtcvoiceengine_unittest.cc b/talk/media/webrtc/webrtcvoiceengine_unittest.cc
index ce5115cb10..a62bcb225f 100644
--- a/talk/media/webrtc/webrtcvoiceengine_unittest.cc
+++ b/talk/media/webrtc/webrtcvoiceengine_unittest.cc
@@ -25,6 +25,7 @@
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
+#include "webrtc/base/arraysize.h"
#include "webrtc/base/byteorder.h"
#include "webrtc/base/gunit.h"
#include "webrtc/call.h"
@@ -53,10 +54,6 @@ const cricket::AudioCodec kCn8000Codec(13, "CN", 8000, 0, 1, 0);
const cricket::AudioCodec kCn16000Codec(105, "CN", 16000, 0, 1, 0);
const cricket::AudioCodec kTelephoneEventCodec(106, "telephone-event", 8000, 0,
1, 0);
-const cricket::AudioCodec* const kAudioCodecs[] = {
- &kPcmuCodec, &kIsacCodec, &kOpusCodec, &kG722CodecVoE, &kRedCodec,
- &kCn8000Codec, &kCn16000Codec, &kTelephoneEventCodec,
-};
const uint32_t kSsrc1 = 0x99;
const uint32_t kSsrc2 = 0x98;
const uint32_t kSsrcs4[] = { 1, 2, 3, 4 };
@@ -67,37 +64,22 @@ class FakeVoEWrapper : public cricket::VoEWrapper {
: cricket::VoEWrapper(engine, // processing
engine, // base
engine, // codec
- engine, // dtmf
engine, // hw
engine, // network
engine, // rtp
engine) { // volume
}
};
-
-class FakeVoETraceWrapper : public cricket::VoETraceWrapper {
- public:
- int SetTraceFilter(const unsigned int filter) override {
- filter_ = filter;
- return 0;
- }
- int SetTraceFile(const char* fileNameUTF8) override { return 0; }
- int SetTraceCallback(webrtc::TraceCallback* callback) override { return 0; }
- unsigned int filter_;
-};
} // namespace
class WebRtcVoiceEngineTestFake : public testing::Test {
public:
WebRtcVoiceEngineTestFake()
: call_(webrtc::Call::Config()),
- voe_(kAudioCodecs, ARRAY_SIZE(kAudioCodecs)),
- trace_wrapper_(new FakeVoETraceWrapper()),
- engine_(new FakeVoEWrapper(&voe_), trace_wrapper_),
+ engine_(new FakeVoEWrapper(&voe_)),
channel_(nullptr) {
send_parameters_.codecs.push_back(kPcmuCodec);
recv_parameters_.codecs.push_back(kPcmuCodec);
- options_adjust_agc_.adjust_agc_delta.Set(-10);
}
bool SetupEngine() {
if (!engine_.Init(rtc::Thread::Current())) {
@@ -123,12 +105,10 @@ class WebRtcVoiceEngineTestFake : public testing::Test {
void SetupForMultiSendStream() {
EXPECT_TRUE(SetupEngineWithSendStream());
// Remove stream added in Setup.
- int default_channel_num = voe_.GetLastChannel();
- EXPECT_EQ(kSsrc1, voe_.GetLocalSSRC(default_channel_num));
+ EXPECT_TRUE(call_.GetAudioSendStream(kSsrc1));
EXPECT_TRUE(channel_->RemoveSendStream(kSsrc1));
-
// Verify the channel does not exist.
- EXPECT_EQ(-1, voe_.GetChannelFromLocalSsrc(kSsrc1));
+ EXPECT_FALSE(call_.GetAudioSendStream(kSsrc1));
}
void DeliverPacket(const void* data, int len) {
rtc::Buffer packet(reinterpret_cast<const uint8_t*>(data), len);
@@ -139,6 +119,24 @@ class WebRtcVoiceEngineTestFake : public testing::Test {
engine_.Terminate();
}
+ const cricket::FakeAudioSendStream& GetSendStream(uint32_t ssrc) {
+ const auto* send_stream = call_.GetAudioSendStream(ssrc);
+ EXPECT_TRUE(send_stream);
+ return *send_stream;
+ }
+
+ const webrtc::AudioSendStream::Config& GetSendStreamConfig(uint32_t ssrc) {
+ const auto* send_stream = call_.GetAudioSendStream(ssrc);
+ EXPECT_TRUE(send_stream);
+ return send_stream->GetConfig();
+ }
+
+ const webrtc::AudioReceiveStream::Config& GetRecvStreamConfig(uint32_t ssrc) {
+ const auto* recv_stream = call_.GetAudioReceiveStream(ssrc);
+ EXPECT_TRUE(recv_stream);
+ return recv_stream->GetConfig();
+ }
+
void TestInsertDtmf(uint32_t ssrc, bool caller) {
EXPECT_TRUE(engine_.Init(rtc::Thread::Current()));
channel_ = engine_.CreateChannel(&call_, cricket::AudioOptions());
@@ -154,39 +152,30 @@ class WebRtcVoiceEngineTestFake : public testing::Test {
EXPECT_TRUE(channel_->SetSendParameters(send_parameters_));
EXPECT_TRUE(channel_->SetSend(cricket::SEND_MICROPHONE));
EXPECT_FALSE(channel_->CanInsertDtmf());
- EXPECT_FALSE(channel_->InsertDtmf(ssrc, 1, 111, cricket::DF_SEND));
+ EXPECT_FALSE(channel_->InsertDtmf(ssrc, 1, 111));
send_parameters_.codecs.push_back(kTelephoneEventCodec);
EXPECT_TRUE(channel_->SetSendParameters(send_parameters_));
EXPECT_TRUE(channel_->CanInsertDtmf());
if (!caller) {
// If this is callee, there's no active send channel yet.
- EXPECT_FALSE(channel_->InsertDtmf(ssrc, 2, 123, cricket::DF_SEND));
+ EXPECT_FALSE(channel_->InsertDtmf(ssrc, 2, 123));
EXPECT_TRUE(channel_->AddSendStream(
cricket::StreamParams::CreateLegacy(kSsrc1)));
}
// Check we fail if the ssrc is invalid.
- EXPECT_FALSE(channel_->InsertDtmf(-1, 1, 111, cricket::DF_SEND));
-
- // Test send
- int channel_id = voe_.GetLastChannel();
- EXPECT_FALSE(voe_.WasSendTelephoneEventCalled(channel_id, 2, 123));
- EXPECT_TRUE(channel_->InsertDtmf(ssrc, 2, 123, cricket::DF_SEND));
- EXPECT_TRUE(voe_.WasSendTelephoneEventCalled(channel_id, 2, 123));
-
- // Test play
- EXPECT_FALSE(voe_.WasPlayDtmfToneCalled(3, 134));
- EXPECT_TRUE(channel_->InsertDtmf(ssrc, 3, 134, cricket::DF_PLAY));
- EXPECT_TRUE(voe_.WasPlayDtmfToneCalled(3, 134));
-
- // Test send and play
- EXPECT_FALSE(voe_.WasSendTelephoneEventCalled(channel_id, 4, 145));
- EXPECT_FALSE(voe_.WasPlayDtmfToneCalled(4, 145));
- EXPECT_TRUE(channel_->InsertDtmf(ssrc, 4, 145,
- cricket::DF_PLAY | cricket::DF_SEND));
- EXPECT_TRUE(voe_.WasSendTelephoneEventCalled(channel_id, 4, 145));
- EXPECT_TRUE(voe_.WasPlayDtmfToneCalled(4, 145));
+ EXPECT_FALSE(channel_->InsertDtmf(-1, 1, 111));
+
+ // Test send.
+ cricket::FakeAudioSendStream::TelephoneEvent telephone_event =
+ GetSendStream(kSsrc1).GetLatestTelephoneEvent();
+ EXPECT_EQ(-1, telephone_event.payload_type);
+ EXPECT_TRUE(channel_->InsertDtmf(ssrc, 2, 123));
+ telephone_event = GetSendStream(kSsrc1).GetLatestTelephoneEvent();
+ EXPECT_EQ(kTelephoneEventCodec.id, telephone_event.payload_type);
+ EXPECT_EQ(2, telephone_event.event_code);
+ EXPECT_EQ(123, telephone_event.duration_ms);
}
// Test that send bandwidth is set correctly.
@@ -211,81 +200,85 @@ class WebRtcVoiceEngineTestFake : public testing::Test {
void TestSetSendRtpHeaderExtensions(const std::string& ext) {
EXPECT_TRUE(SetupEngineWithSendStream());
- int channel_num = voe_.GetLastChannel();
// Ensure extensions are off by default.
- EXPECT_EQ(-1, voe_.GetSendRtpExtensionId(channel_num, ext));
+ EXPECT_EQ(0u, GetSendStreamConfig(kSsrc1).rtp.extensions.size());
// Ensure unknown extensions won't cause an error.
send_parameters_.extensions.push_back(cricket::RtpHeaderExtension(
"urn:ietf:params:unknownextention", 1));
EXPECT_TRUE(channel_->SetSendParameters(send_parameters_));
- EXPECT_EQ(-1, voe_.GetSendRtpExtensionId(channel_num, ext));
+ EXPECT_EQ(0u, GetSendStreamConfig(kSsrc1).rtp.extensions.size());
// Ensure extensions stay off with an empty list of headers.
send_parameters_.extensions.clear();
EXPECT_TRUE(channel_->SetSendParameters(send_parameters_));
- EXPECT_EQ(-1, voe_.GetSendRtpExtensionId(channel_num, ext));
+ EXPECT_EQ(0u, GetSendStreamConfig(kSsrc1).rtp.extensions.size());
// Ensure extension is set properly.
const int id = 1;
send_parameters_.extensions.push_back(cricket::RtpHeaderExtension(ext, id));
EXPECT_TRUE(channel_->SetSendParameters(send_parameters_));
- EXPECT_EQ(id, voe_.GetSendRtpExtensionId(channel_num, ext));
+ EXPECT_EQ(1u, GetSendStreamConfig(kSsrc1).rtp.extensions.size());
+ EXPECT_EQ(ext, GetSendStreamConfig(kSsrc1).rtp.extensions[0].name);
+ EXPECT_EQ(id, GetSendStreamConfig(kSsrc1).rtp.extensions[0].id);
- // Ensure extension is set properly on new channels.
+ // Ensure extension is set properly on new stream.
EXPECT_TRUE(channel_->AddSendStream(
cricket::StreamParams::CreateLegacy(kSsrc2)));
- int new_channel_num = voe_.GetLastChannel();
- EXPECT_NE(channel_num, new_channel_num);
- EXPECT_EQ(id, voe_.GetSendRtpExtensionId(new_channel_num, ext));
+ EXPECT_NE(call_.GetAudioSendStream(kSsrc1),
+ call_.GetAudioSendStream(kSsrc2));
+ EXPECT_EQ(1u, GetSendStreamConfig(kSsrc2).rtp.extensions.size());
+ EXPECT_EQ(ext, GetSendStreamConfig(kSsrc2).rtp.extensions[0].name);
+ EXPECT_EQ(id, GetSendStreamConfig(kSsrc2).rtp.extensions[0].id);
// Ensure all extensions go back off with an empty list.
send_parameters_.codecs.push_back(kPcmuCodec);
send_parameters_.extensions.clear();
EXPECT_TRUE(channel_->SetSendParameters(send_parameters_));
- EXPECT_EQ(-1, voe_.GetSendRtpExtensionId(channel_num, ext));
- EXPECT_EQ(-1, voe_.GetSendRtpExtensionId(new_channel_num, ext));
+ EXPECT_EQ(0u, GetSendStreamConfig(kSsrc1).rtp.extensions.size());
+ EXPECT_EQ(0u, GetSendStreamConfig(kSsrc2).rtp.extensions.size());
}
void TestSetRecvRtpHeaderExtensions(const std::string& ext) {
EXPECT_TRUE(SetupEngineWithRecvStream());
- int channel_num = voe_.GetLastChannel();
// Ensure extensions are off by default.
- EXPECT_EQ(-1, voe_.GetReceiveRtpExtensionId(channel_num, ext));
+ EXPECT_EQ(0u, GetRecvStreamConfig(kSsrc1).rtp.extensions.size());
- cricket::AudioRecvParameters parameters;
// Ensure unknown extensions won't cause an error.
- parameters.extensions.push_back(cricket::RtpHeaderExtension(
+ recv_parameters_.extensions.push_back(cricket::RtpHeaderExtension(
"urn:ietf:params:unknownextention", 1));
- EXPECT_TRUE(channel_->SetRecvParameters(parameters));
- EXPECT_EQ(-1, voe_.GetReceiveRtpExtensionId(channel_num, ext));
+ EXPECT_TRUE(channel_->SetRecvParameters(recv_parameters_));
+ EXPECT_EQ(0u, GetRecvStreamConfig(kSsrc1).rtp.extensions.size());
// Ensure extensions stay off with an empty list of headers.
- parameters.extensions.clear();
- EXPECT_TRUE(channel_->SetRecvParameters(parameters));
- EXPECT_EQ(-1, voe_.GetReceiveRtpExtensionId(channel_num, ext));
+ recv_parameters_.extensions.clear();
+ EXPECT_TRUE(channel_->SetRecvParameters(recv_parameters_));
+ EXPECT_EQ(0u, GetRecvStreamConfig(kSsrc1).rtp.extensions.size());
// Ensure extension is set properly.
const int id = 2;
- parameters.extensions.push_back(cricket::RtpHeaderExtension(ext, id));
- EXPECT_TRUE(channel_->SetRecvParameters(parameters));
- EXPECT_EQ(id, voe_.GetReceiveRtpExtensionId(channel_num, ext));
+ recv_parameters_.extensions.push_back(cricket::RtpHeaderExtension(ext, id));
+ EXPECT_TRUE(channel_->SetRecvParameters(recv_parameters_));
+ EXPECT_EQ(1u, GetRecvStreamConfig(kSsrc1).rtp.extensions.size());
+ EXPECT_EQ(ext, GetRecvStreamConfig(kSsrc1).rtp.extensions[0].name);
+ EXPECT_EQ(id, GetRecvStreamConfig(kSsrc1).rtp.extensions[0].id);
- // Ensure extension is set properly on new channel.
- // The first stream to occupy the default channel.
+ // Ensure extension is set properly on new stream.
EXPECT_TRUE(channel_->AddRecvStream(
cricket::StreamParams::CreateLegacy(kSsrc2)));
- int new_channel_num = voe_.GetLastChannel();
- EXPECT_NE(channel_num, new_channel_num);
- EXPECT_EQ(id, voe_.GetReceiveRtpExtensionId(new_channel_num, ext));
+ EXPECT_NE(call_.GetAudioReceiveStream(kSsrc1),
+ call_.GetAudioReceiveStream(kSsrc2));
+ EXPECT_EQ(1u, GetRecvStreamConfig(kSsrc2).rtp.extensions.size());
+ EXPECT_EQ(ext, GetRecvStreamConfig(kSsrc2).rtp.extensions[0].name);
+ EXPECT_EQ(id, GetRecvStreamConfig(kSsrc2).rtp.extensions[0].id);
// Ensure all extensions go back off with an empty list.
- parameters.extensions.clear();
- EXPECT_TRUE(channel_->SetRecvParameters(parameters));
- EXPECT_EQ(-1, voe_.GetReceiveRtpExtensionId(channel_num, ext));
- EXPECT_EQ(-1, voe_.GetReceiveRtpExtensionId(new_channel_num, ext));
+ recv_parameters_.extensions.clear();
+ EXPECT_TRUE(channel_->SetRecvParameters(recv_parameters_));
+ EXPECT_EQ(0u, GetRecvStreamConfig(kSsrc1).rtp.extensions.size());
+ EXPECT_EQ(0u, GetRecvStreamConfig(kSsrc2).rtp.extensions.size());
}
webrtc::AudioSendStream::Stats GetAudioSendStreamStats() const {
@@ -313,7 +306,8 @@ class WebRtcVoiceEngineTestFake : public testing::Test {
s->SetStats(GetAudioSendStreamStats());
}
}
- void VerifyVoiceSenderInfo(const cricket::VoiceSenderInfo& info) {
+ void VerifyVoiceSenderInfo(const cricket::VoiceSenderInfo& info,
+ bool is_sending) {
const auto stats = GetAudioSendStreamStats();
EXPECT_EQ(info.ssrc(), stats.local_ssrc);
EXPECT_EQ(info.bytes_sent, stats.bytes_sent);
@@ -331,8 +325,8 @@ class WebRtcVoiceEngineTestFake : public testing::Test {
EXPECT_EQ(info.echo_return_loss, stats.echo_return_loss);
EXPECT_EQ(info.echo_return_loss_enhancement,
stats.echo_return_loss_enhancement);
- // TODO(solenberg): Move typing noise detection into AudioSendStream.
- // EXPECT_EQ(info.typing_noise_detected, stats.typing_noise_detected);
+ EXPECT_EQ(info.typing_noise_detected,
+ stats.typing_noise_detected && is_sending);
}
webrtc::AudioReceiveStream::Stats GetAudioReceiveStreamStats() const {
@@ -401,13 +395,10 @@ class WebRtcVoiceEngineTestFake : public testing::Test {
protected:
cricket::FakeCall call_;
cricket::FakeWebRtcVoiceEngine voe_;
- FakeVoETraceWrapper* trace_wrapper_;
cricket::WebRtcVoiceEngine engine_;
cricket::VoiceMediaChannel* channel_;
-
cricket::AudioSendParameters send_parameters_;
cricket::AudioRecvParameters recv_parameters_;
- cricket::AudioOptions options_adjust_agc_;
};
// Tests that our stub library "works".
@@ -448,32 +439,33 @@ TEST_F(WebRtcVoiceEngineTestFake, FindCodec) {
cricket::AudioCodec codec;
webrtc::CodecInst codec_inst;
// Find PCMU with explicit clockrate and bitrate.
- EXPECT_TRUE(engine_.FindWebRtcCodec(kPcmuCodec, &codec_inst));
+ EXPECT_TRUE(cricket::WebRtcVoiceEngine::ToCodecInst(kPcmuCodec, &codec_inst));
// Find ISAC with explicit clockrate and 0 bitrate.
- EXPECT_TRUE(engine_.FindWebRtcCodec(kIsacCodec, &codec_inst));
+ EXPECT_TRUE(cricket::WebRtcVoiceEngine::ToCodecInst(kIsacCodec, &codec_inst));
// Find telephone-event with explicit clockrate and 0 bitrate.
- EXPECT_TRUE(engine_.FindWebRtcCodec(kTelephoneEventCodec, &codec_inst));
+ EXPECT_TRUE(cricket::WebRtcVoiceEngine::ToCodecInst(kTelephoneEventCodec,
+ &codec_inst));
// Find ISAC with a different payload id.
codec = kIsacCodec;
codec.id = 127;
- EXPECT_TRUE(engine_.FindWebRtcCodec(codec, &codec_inst));
+ EXPECT_TRUE(cricket::WebRtcVoiceEngine::ToCodecInst(codec, &codec_inst));
EXPECT_EQ(codec.id, codec_inst.pltype);
// Find PCMU with a 0 clockrate.
codec = kPcmuCodec;
codec.clockrate = 0;
- EXPECT_TRUE(engine_.FindWebRtcCodec(codec, &codec_inst));
+ EXPECT_TRUE(cricket::WebRtcVoiceEngine::ToCodecInst(codec, &codec_inst));
EXPECT_EQ(codec.id, codec_inst.pltype);
EXPECT_EQ(8000, codec_inst.plfreq);
// Find PCMU with a 0 bitrate.
codec = kPcmuCodec;
codec.bitrate = 0;
- EXPECT_TRUE(engine_.FindWebRtcCodec(codec, &codec_inst));
+ EXPECT_TRUE(cricket::WebRtcVoiceEngine::ToCodecInst(codec, &codec_inst));
EXPECT_EQ(codec.id, codec_inst.pltype);
EXPECT_EQ(64000, codec_inst.rate);
// Find ISAC with an explicit bitrate.
codec = kIsacCodec;
codec.bitrate = 32000;
- EXPECT_TRUE(engine_.FindWebRtcCodec(codec, &codec_inst));
+ EXPECT_TRUE(cricket::WebRtcVoiceEngine::ToCodecInst(codec, &codec_inst));
EXPECT_EQ(codec.id, codec_inst.pltype);
EXPECT_EQ(32000, codec_inst.rate);
}
@@ -492,14 +484,13 @@ TEST_F(WebRtcVoiceEngineTestFake, SetRecvCodecs) {
cricket::StreamParams::CreateLegacy(kSsrc1)));
int channel_num = voe_.GetLastChannel();
webrtc::CodecInst gcodec;
- rtc::strcpyn(gcodec.plname, ARRAY_SIZE(gcodec.plname), "ISAC");
+ rtc::strcpyn(gcodec.plname, arraysize(gcodec.plname), "ISAC");
gcodec.plfreq = 16000;
gcodec.channels = 1;
EXPECT_EQ(0, voe_.GetRecPayloadType(channel_num, gcodec));
EXPECT_EQ(106, gcodec.pltype);
EXPECT_STREQ("ISAC", gcodec.plname);
- rtc::strcpyn(gcodec.plname, ARRAY_SIZE(gcodec.plname),
- "telephone-event");
+ rtc::strcpyn(gcodec.plname, arraysize(gcodec.plname), "telephone-event");
gcodec.plfreq = 8000;
EXPECT_EQ(0, voe_.GetRecPayloadType(channel_num, gcodec));
EXPECT_EQ(126, gcodec.pltype);
@@ -537,7 +528,7 @@ TEST_F(WebRtcVoiceEngineTestFake, SetRecvCodecsWithOpusNoStereo) {
cricket::StreamParams::CreateLegacy(kSsrc1)));
int channel_num = voe_.GetLastChannel();
webrtc::CodecInst opus;
- engine_.FindWebRtcCodec(kOpusCodec, &opus);
+ cricket::WebRtcVoiceEngine::ToCodecInst(kOpusCodec, &opus);
// Even without stereo parameters, recv codecs still specify channels = 2.
EXPECT_EQ(2, opus.channels);
EXPECT_EQ(111, opus.pltype);
@@ -560,7 +551,7 @@ TEST_F(WebRtcVoiceEngineTestFake, SetRecvCodecsWithOpus0Stereo) {
cricket::StreamParams::CreateLegacy(kSsrc1)));
int channel_num2 = voe_.GetLastChannel();
webrtc::CodecInst opus;
- engine_.FindWebRtcCodec(kOpusCodec, &opus);
+ cricket::WebRtcVoiceEngine::ToCodecInst(kOpusCodec, &opus);
// Even when stereo is off, recv codecs still specify channels = 2.
EXPECT_EQ(2, opus.channels);
EXPECT_EQ(111, opus.pltype);
@@ -583,7 +574,7 @@ TEST_F(WebRtcVoiceEngineTestFake, SetRecvCodecsWithOpus1Stereo) {
cricket::StreamParams::CreateLegacy(kSsrc1)));
int channel_num2 = voe_.GetLastChannel();
webrtc::CodecInst opus;
- engine_.FindWebRtcCodec(kOpusCodec, &opus);
+ cricket::WebRtcVoiceEngine::ToCodecInst(kOpusCodec, &opus);
EXPECT_EQ(2, opus.channels);
EXPECT_EQ(111, opus.pltype);
EXPECT_STREQ("opus", opus.plname);
@@ -606,14 +597,13 @@ TEST_F(WebRtcVoiceEngineTestFake, SetRecvCodecsWithMultipleStreams) {
cricket::StreamParams::CreateLegacy(kSsrc1)));
int channel_num2 = voe_.GetLastChannel();
webrtc::CodecInst gcodec;
- rtc::strcpyn(gcodec.plname, ARRAY_SIZE(gcodec.plname), "ISAC");
+ rtc::strcpyn(gcodec.plname, arraysize(gcodec.plname), "ISAC");
gcodec.plfreq = 16000;
gcodec.channels = 1;
EXPECT_EQ(0, voe_.GetRecPayloadType(channel_num2, gcodec));
EXPECT_EQ(106, gcodec.pltype);
EXPECT_STREQ("ISAC", gcodec.plname);
- rtc::strcpyn(gcodec.plname, ARRAY_SIZE(gcodec.plname),
- "telephone-event");
+ rtc::strcpyn(gcodec.plname, arraysize(gcodec.plname), "telephone-event");
gcodec.plfreq = 8000;
gcodec.channels = 1;
EXPECT_EQ(0, voe_.GetRecPayloadType(channel_num2, gcodec));
@@ -630,7 +620,7 @@ TEST_F(WebRtcVoiceEngineTestFake, SetRecvCodecsAfterAddingStreams) {
int channel_num2 = voe_.GetLastChannel();
webrtc::CodecInst gcodec;
- rtc::strcpyn(gcodec.plname, ARRAY_SIZE(gcodec.plname), "ISAC");
+ rtc::strcpyn(gcodec.plname, arraysize(gcodec.plname), "ISAC");
gcodec.plfreq = 16000;
gcodec.channels = 1;
EXPECT_EQ(0, voe_.GetRecPayloadType(channel_num2, gcodec));
@@ -669,7 +659,7 @@ TEST_F(WebRtcVoiceEngineTestFake, AddRecvCodecsWhilePlaying) {
int channel_num = voe_.GetLastChannel();
EXPECT_TRUE(voe_.GetPlayout(channel_num));
webrtc::CodecInst gcodec;
- EXPECT_TRUE(engine_.FindWebRtcCodec(kOpusCodec, &gcodec));
+ EXPECT_TRUE(cricket::WebRtcVoiceEngine::ToCodecInst(kOpusCodec, &gcodec));
EXPECT_EQ(kOpusCodec.id, gcodec.pltype);
}
@@ -782,7 +772,7 @@ TEST_F(WebRtcVoiceEngineTestFake, SetSendCodecs) {
EXPECT_FALSE(voe_.GetRED(channel_num));
EXPECT_EQ(13, voe_.GetSendCNPayloadType(channel_num, false));
EXPECT_EQ(105, voe_.GetSendCNPayloadType(channel_num, true));
- EXPECT_EQ(106, voe_.GetSendTelephoneEventPayloadType(channel_num));
+ EXPECT_FALSE(channel_->CanInsertDtmf());
}
// Test that VoE Channel doesn't call SetSendCodec again if same codec is tried
@@ -1623,7 +1613,7 @@ TEST_F(WebRtcVoiceEngineTestFake, SetSendCodecsDTMFOnTop) {
EXPECT_EQ(0, voe_.GetSendCodec(channel_num, gcodec));
EXPECT_EQ(96, gcodec.pltype);
EXPECT_STREQ("ISAC", gcodec.plname);
- EXPECT_EQ(98, voe_.GetSendTelephoneEventPayloadType(channel_num));
+ EXPECT_TRUE(channel_->CanInsertDtmf());
}
// Test that we can set send codecs even with CN codec as the first
@@ -1669,7 +1659,7 @@ TEST_F(WebRtcVoiceEngineTestFake, SetSendCodecsCNandDTMFAsCaller) {
EXPECT_FALSE(voe_.GetRED(channel_num));
EXPECT_EQ(13, voe_.GetSendCNPayloadType(channel_num, false));
EXPECT_EQ(97, voe_.GetSendCNPayloadType(channel_num, true));
- EXPECT_EQ(98, voe_.GetSendTelephoneEventPayloadType(channel_num));
+ EXPECT_TRUE(channel_->CanInsertDtmf());
}
// Test that we set VAD and DTMF types correctly as callee.
@@ -1702,7 +1692,7 @@ TEST_F(WebRtcVoiceEngineTestFake, SetSendCodecsCNandDTMFAsCallee) {
EXPECT_FALSE(voe_.GetRED(channel_num));
EXPECT_EQ(13, voe_.GetSendCNPayloadType(channel_num, false));
EXPECT_EQ(97, voe_.GetSendCNPayloadType(channel_num, true));
- EXPECT_EQ(98, voe_.GetSendTelephoneEventPayloadType(channel_num));
+ EXPECT_TRUE(channel_->CanInsertDtmf());
}
// Test that we only apply VAD if we have a CN codec that matches the
@@ -1766,7 +1756,7 @@ TEST_F(WebRtcVoiceEngineTestFake, SetSendCodecsCaseInsensitive) {
EXPECT_FALSE(voe_.GetRED(channel_num));
EXPECT_EQ(13, voe_.GetSendCNPayloadType(channel_num, false));
EXPECT_EQ(97, voe_.GetSendCNPayloadType(channel_num, true));
- EXPECT_EQ(98, voe_.GetSendTelephoneEventPayloadType(channel_num));
+ EXPECT_TRUE(channel_->CanInsertDtmf());
}
// Test that we set up RED correctly as caller.
@@ -1976,21 +1966,16 @@ TEST_F(WebRtcVoiceEngineTestFake, CreateAndDeleteMultipleSendStreams) {
for (uint32_t ssrc : kSsrcs4) {
EXPECT_TRUE(channel_->AddSendStream(
cricket::StreamParams::CreateLegacy(ssrc)));
- EXPECT_NE(nullptr, call_.GetAudioSendStream(ssrc));
-
// Verify that we are in a sending state for all the created streams.
- int channel_num = voe_.GetChannelFromLocalSsrc(ssrc);
- EXPECT_TRUE(voe_.GetSend(channel_num));
+ EXPECT_TRUE(voe_.GetSend(GetSendStreamConfig(ssrc).voe_channel_id));
}
- EXPECT_EQ(ARRAY_SIZE(kSsrcs4), call_.GetAudioSendStreams().size());
+ EXPECT_EQ(arraysize(kSsrcs4), call_.GetAudioSendStreams().size());
// Delete the send streams.
for (uint32_t ssrc : kSsrcs4) {
EXPECT_TRUE(channel_->RemoveSendStream(ssrc));
- EXPECT_EQ(nullptr, call_.GetAudioSendStream(ssrc));
- // Stream should already be deleted.
+ EXPECT_FALSE(call_.GetAudioSendStream(ssrc));
EXPECT_FALSE(channel_->RemoveSendStream(ssrc));
- EXPECT_EQ(-1, voe_.GetChannelFromLocalSsrc(ssrc));
}
EXPECT_EQ(0u, call_.GetAudioSendStreams().size());
}
@@ -2015,7 +2000,7 @@ TEST_F(WebRtcVoiceEngineTestFake, SetSendCodecsWithMultipleSendStreams) {
// Verify ISAC and VAD are corrected configured on all send channels.
webrtc::CodecInst gcodec;
for (uint32_t ssrc : kSsrcs4) {
- int channel_num = voe_.GetChannelFromLocalSsrc(ssrc);
+ int channel_num = GetSendStreamConfig(ssrc).voe_channel_id;
EXPECT_EQ(0, voe_.GetSendCodec(channel_num, gcodec));
EXPECT_STREQ("ISAC", gcodec.plname);
EXPECT_TRUE(voe_.GetVAD(channel_num));
@@ -2026,7 +2011,7 @@ TEST_F(WebRtcVoiceEngineTestFake, SetSendCodecsWithMultipleSendStreams) {
parameters.codecs[0] = kPcmuCodec;
EXPECT_TRUE(channel_->SetSendParameters(parameters));
for (uint32_t ssrc : kSsrcs4) {
- int channel_num = voe_.GetChannelFromLocalSsrc(ssrc);
+ int channel_num = GetSendStreamConfig(ssrc).voe_channel_id;
EXPECT_EQ(0, voe_.GetSendCodec(channel_num, gcodec));
EXPECT_STREQ("PCMU", gcodec.plname);
EXPECT_FALSE(voe_.GetVAD(channel_num));
@@ -2049,7 +2034,7 @@ TEST_F(WebRtcVoiceEngineTestFake, SetSendWithMultipleSendStreams) {
EXPECT_TRUE(channel_->SetSend(cricket::SEND_MICROPHONE));
for (uint32_t ssrc : kSsrcs4) {
// Verify that we are in a sending state for all the send streams.
- int channel_num = voe_.GetChannelFromLocalSsrc(ssrc);
+ int channel_num = GetSendStreamConfig(ssrc).voe_channel_id;
EXPECT_TRUE(voe_.GetSend(channel_num));
}
@@ -2057,7 +2042,7 @@ TEST_F(WebRtcVoiceEngineTestFake, SetSendWithMultipleSendStreams) {
EXPECT_TRUE(channel_->SetSend(cricket::SEND_NOTHING));
for (uint32_t ssrc : kSsrcs4) {
// Verify that we are in a stop state for all the send streams.
- int channel_num = voe_.GetChannelFromLocalSsrc(ssrc);
+ int channel_num = GetSendStreamConfig(ssrc).voe_channel_id;
EXPECT_FALSE(voe_.GetSend(channel_num));
}
}
@@ -2087,9 +2072,9 @@ TEST_F(WebRtcVoiceEngineTestFake, GetStatsWithMultipleSendStreams) {
EXPECT_EQ(true, channel_->GetStats(&info));
// We have added 4 send streams. We should see empty stats for all.
- EXPECT_EQ(static_cast<size_t>(ARRAY_SIZE(kSsrcs4)), info.senders.size());
+ EXPECT_EQ(static_cast<size_t>(arraysize(kSsrcs4)), info.senders.size());
for (const auto& sender : info.senders) {
- VerifyVoiceSenderInfo(sender);
+ VerifyVoiceSenderInfo(sender, false);
}
// We have added one receive stream. We should see empty stats.
@@ -2102,7 +2087,7 @@ TEST_F(WebRtcVoiceEngineTestFake, GetStatsWithMultipleSendStreams) {
cricket::VoiceMediaInfo info;
EXPECT_TRUE(channel_->RemoveRecvStream(kSsrc2));
EXPECT_EQ(true, channel_->GetStats(&info));
- EXPECT_EQ(static_cast<size_t>(ARRAY_SIZE(kSsrcs4)), info.senders.size());
+ EXPECT_EQ(static_cast<size_t>(arraysize(kSsrcs4)), info.senders.size());
EXPECT_EQ(0u, info.receivers.size());
}
@@ -2113,7 +2098,7 @@ TEST_F(WebRtcVoiceEngineTestFake, GetStatsWithMultipleSendStreams) {
DeliverPacket(kPcmuFrame, sizeof(kPcmuFrame));
SetAudioReceiveStreamStats();
EXPECT_EQ(true, channel_->GetStats(&info));
- EXPECT_EQ(static_cast<size_t>(ARRAY_SIZE(kSsrcs4)), info.senders.size());
+ EXPECT_EQ(static_cast<size_t>(arraysize(kSsrcs4)), info.senders.size());
EXPECT_EQ(1u, info.receivers.size());
VerifyVoiceReceiverInfo(info.receivers[0]);
}
@@ -2173,96 +2158,17 @@ TEST_F(WebRtcVoiceEngineTestFake, PlayoutWithMultipleStreams) {
EXPECT_FALSE(voe_.GetPlayout(channel_num1));
}
-// Test that we can set the devices to use.
-TEST_F(WebRtcVoiceEngineTestFake, SetDevices) {
- EXPECT_TRUE(SetupEngineWithSendStream());
- int send_channel = voe_.GetLastChannel();
- EXPECT_TRUE(channel_->AddRecvStream(cricket::StreamParams::CreateLegacy(2)));
- int recv_channel = voe_.GetLastChannel();
- EXPECT_TRUE(channel_->SetSendParameters(send_parameters_));
-
- cricket::Device default_dev(cricket::kFakeDefaultDeviceName,
- cricket::kFakeDefaultDeviceId);
- cricket::Device dev(cricket::kFakeDeviceName,
- cricket::kFakeDeviceId);
-
- // Test SetDevices() while not sending or playing.
- EXPECT_TRUE(engine_.SetDevices(&default_dev, &default_dev));
-
- // Test SetDevices() while sending and playing.
- EXPECT_TRUE(channel_->SetSend(cricket::SEND_MICROPHONE));
- EXPECT_TRUE(channel_->SetPlayout(true));
- EXPECT_TRUE(voe_.GetSend(send_channel));
- EXPECT_TRUE(voe_.GetPlayout(recv_channel));
-
- EXPECT_TRUE(engine_.SetDevices(&dev, &dev));
-
- EXPECT_TRUE(voe_.GetSend(send_channel));
- EXPECT_TRUE(voe_.GetPlayout(recv_channel));
-
- // Test that failure to open newly selected devices does not prevent opening
- // ones after that.
- voe_.set_playout_fail_channel(recv_channel);
- voe_.set_send_fail_channel(send_channel);
-
- EXPECT_FALSE(engine_.SetDevices(&default_dev, &default_dev));
-
- EXPECT_FALSE(voe_.GetSend(send_channel));
- EXPECT_FALSE(voe_.GetPlayout(recv_channel));
-
- voe_.set_playout_fail_channel(-1);
- voe_.set_send_fail_channel(-1);
-
- EXPECT_TRUE(engine_.SetDevices(&dev, &dev));
-
- EXPECT_TRUE(voe_.GetSend(send_channel));
- EXPECT_TRUE(voe_.GetPlayout(recv_channel));
-}
-
-// Test that we can set the devices to use even if we failed to
-// open the initial ones.
-TEST_F(WebRtcVoiceEngineTestFake, SetDevicesWithInitiallyBadDevices) {
- EXPECT_TRUE(SetupEngineWithSendStream());
- int send_channel = voe_.GetLastChannel();
- EXPECT_TRUE(channel_->AddRecvStream(cricket::StreamParams::CreateLegacy(2)));
- int recv_channel = voe_.GetLastChannel();
- EXPECT_TRUE(channel_->SetSendParameters(send_parameters_));
-
- cricket::Device default_dev(cricket::kFakeDefaultDeviceName,
- cricket::kFakeDefaultDeviceId);
- cricket::Device dev(cricket::kFakeDeviceName,
- cricket::kFakeDeviceId);
-
- // Test that failure to open devices selected before starting
- // send/play does not prevent opening newly selected ones after that.
- voe_.set_playout_fail_channel(recv_channel);
- voe_.set_send_fail_channel(send_channel);
-
- EXPECT_TRUE(engine_.SetDevices(&default_dev, &default_dev));
-
- EXPECT_FALSE(channel_->SetSend(cricket::SEND_MICROPHONE));
- EXPECT_FALSE(channel_->SetPlayout(true));
- EXPECT_FALSE(voe_.GetSend(send_channel));
- EXPECT_FALSE(voe_.GetPlayout(recv_channel));
-
- voe_.set_playout_fail_channel(-1);
- voe_.set_send_fail_channel(-1);
-
- EXPECT_TRUE(engine_.SetDevices(&dev, &dev));
-
- EXPECT_TRUE(voe_.GetSend(send_channel));
- EXPECT_TRUE(voe_.GetPlayout(recv_channel));
-}
-
// Test that we can create a channel configured for Codian bridges,
// and start sending on it.
TEST_F(WebRtcVoiceEngineTestFake, CodianSend) {
EXPECT_TRUE(SetupEngineWithSendStream());
+ cricket::AudioOptions options_adjust_agc;
+ options_adjust_agc.adjust_agc_delta = rtc::Optional<int>(-10);
int channel_num = voe_.GetLastChannel();
webrtc::AgcConfig agc_config;
EXPECT_EQ(0, voe_.GetAgcConfig(agc_config));
EXPECT_EQ(0, agc_config.targetLeveldBOv);
- send_parameters_.options = options_adjust_agc_;
+ send_parameters_.options = options_adjust_agc;
EXPECT_TRUE(channel_->SetSendParameters(send_parameters_));
EXPECT_TRUE(channel_->SetSend(cricket::SEND_MICROPHONE));
EXPECT_TRUE(voe_.GetSend(channel_num));
@@ -2271,7 +2177,6 @@ TEST_F(WebRtcVoiceEngineTestFake, CodianSend) {
EXPECT_TRUE(channel_->SetSend(cricket::SEND_NOTHING));
EXPECT_FALSE(voe_.GetSend(channel_num));
EXPECT_EQ(0, voe_.GetAgcConfig(agc_config));
- EXPECT_EQ(0, agc_config.targetLeveldBOv); // level was restored
}
TEST_F(WebRtcVoiceEngineTestFake, TxAgcConfigViaOptions) {
@@ -2279,14 +2184,12 @@ TEST_F(WebRtcVoiceEngineTestFake, TxAgcConfigViaOptions) {
webrtc::AgcConfig agc_config;
EXPECT_EQ(0, voe_.GetAgcConfig(agc_config));
EXPECT_EQ(0, agc_config.targetLeveldBOv);
-
- cricket::AudioOptions options;
- options.tx_agc_target_dbov.Set(3);
- options.tx_agc_digital_compression_gain.Set(9);
- options.tx_agc_limiter.Set(true);
- options.auto_gain_control.Set(true);
- EXPECT_TRUE(engine_.SetOptions(options));
-
+ send_parameters_.options.tx_agc_target_dbov = rtc::Optional<uint16_t>(3);
+ send_parameters_.options.tx_agc_digital_compression_gain =
+ rtc::Optional<uint16_t>(9);
+ send_parameters_.options.tx_agc_limiter = rtc::Optional<bool>(true);
+ send_parameters_.options.auto_gain_control = rtc::Optional<bool>(true);
+ EXPECT_TRUE(channel_->SetSendParameters(send_parameters_));
EXPECT_EQ(0, voe_.GetAgcConfig(agc_config));
EXPECT_EQ(3, agc_config.targetLeveldBOv);
EXPECT_EQ(9, agc_config.digitalCompressionGaindB);
@@ -2294,19 +2197,18 @@ TEST_F(WebRtcVoiceEngineTestFake, TxAgcConfigViaOptions) {
// Check interaction with adjust_agc_delta. Both should be respected, for
// backwards compatibility.
- options.adjust_agc_delta.Set(-10);
- EXPECT_TRUE(engine_.SetOptions(options));
-
+ send_parameters_.options.adjust_agc_delta = rtc::Optional<int>(-10);
+ EXPECT_TRUE(channel_->SetSendParameters(send_parameters_));
EXPECT_EQ(0, voe_.GetAgcConfig(agc_config));
EXPECT_EQ(13, agc_config.targetLeveldBOv);
}
TEST_F(WebRtcVoiceEngineTestFake, SampleRatesViaOptions) {
EXPECT_TRUE(SetupEngineWithSendStream());
- cricket::AudioOptions options;
- options.recording_sample_rate.Set(48000u);
- options.playout_sample_rate.Set(44100u);
- EXPECT_TRUE(engine_.SetOptions(options));
+ send_parameters_.options.recording_sample_rate =
+ rtc::Optional<uint32_t>(48000);
+ send_parameters_.options.playout_sample_rate = rtc::Optional<uint32_t>(44100);
+ EXPECT_TRUE(channel_->SetSendParameters(send_parameters_));
unsigned int recording_sample_rate, playout_sample_rate;
EXPECT_EQ(0, voe_.RecordingSampleRate(&recording_sample_rate));
@@ -2315,30 +2217,11 @@ TEST_F(WebRtcVoiceEngineTestFake, SampleRatesViaOptions) {
EXPECT_EQ(44100u, playout_sample_rate);
}
-TEST_F(WebRtcVoiceEngineTestFake, TraceFilterViaTraceOptions) {
- EXPECT_TRUE(SetupEngineWithSendStream());
- engine_.SetLogging(rtc::LS_INFO, "");
- EXPECT_EQ(
- // Info:
- webrtc::kTraceStateInfo | webrtc::kTraceInfo |
- // Warning:
- webrtc::kTraceTerseInfo | webrtc::kTraceWarning |
- // Error:
- webrtc::kTraceError | webrtc::kTraceCritical,
- static_cast<int>(trace_wrapper_->filter_));
- // Now set it explicitly
- std::string filter =
- "tracefilter " + rtc::ToString(webrtc::kTraceDefault);
- engine_.SetLogging(rtc::LS_VERBOSE, filter.c_str());
- EXPECT_EQ(static_cast<unsigned int>(webrtc::kTraceDefault),
- trace_wrapper_->filter_);
-}
-
// Test that we can set the outgoing SSRC properly.
// SSRC is set in SetupEngine by calling AddSendStream.
TEST_F(WebRtcVoiceEngineTestFake, SetSendSsrc) {
EXPECT_TRUE(SetupEngineWithSendStream());
- EXPECT_EQ(kSsrc1, voe_.GetLocalSSRC(voe_.GetLastChannel()));
+ EXPECT_TRUE(call_.GetAudioSendStream(kSsrc1));
}
TEST_F(WebRtcVoiceEngineTestFake, GetStats) {
@@ -2359,12 +2242,20 @@ TEST_F(WebRtcVoiceEngineTestFake, GetStats) {
// We have added one send stream. We should see the stats we've set.
EXPECT_EQ(1u, info.senders.size());
- VerifyVoiceSenderInfo(info.senders[0]);
+ VerifyVoiceSenderInfo(info.senders[0], false);
// We have added one receive stream. We should see empty stats.
EXPECT_EQ(info.receivers.size(), 1u);
EXPECT_EQ(info.receivers[0].ssrc(), 0);
}
+ // Start sending - this affects some reported stats.
+ {
+ cricket::VoiceMediaInfo info;
+ EXPECT_TRUE(channel_->SetSend(cricket::SEND_MICROPHONE));
+ EXPECT_EQ(true, channel_->GetStats(&info));
+ VerifyVoiceSenderInfo(info.senders[0], true);
+ }
+
// Remove the kSsrc2 stream. No receiver stats.
{
cricket::VoiceMediaInfo info;
@@ -2391,9 +2282,10 @@ TEST_F(WebRtcVoiceEngineTestFake, GetStats) {
// SSRC is set in SetupEngine by calling AddSendStream.
TEST_F(WebRtcVoiceEngineTestFake, SetSendSsrcWithMultipleStreams) {
EXPECT_TRUE(SetupEngineWithSendStream());
- EXPECT_EQ(kSsrc1, voe_.GetLocalSSRC(voe_.GetLastChannel()));
- EXPECT_TRUE(channel_->AddRecvStream(cricket::StreamParams::CreateLegacy(2)));
- EXPECT_EQ(kSsrc1, voe_.GetLocalSSRC(voe_.GetLastChannel()));
+ EXPECT_TRUE(call_.GetAudioSendStream(kSsrc1));
+ EXPECT_TRUE(channel_->AddRecvStream(
+ cricket::StreamParams::CreateLegacy(kSsrc2)));
+ EXPECT_EQ(kSsrc1, GetRecvStreamConfig(kSsrc2).rtp.local_ssrc);
}
// Test that the local SSRC is the same on sending and receiving channels if the
@@ -2406,25 +2298,23 @@ TEST_F(WebRtcVoiceEngineTestFake, SetSendSsrcAfterCreatingReceiveChannel) {
int receive_channel_num = voe_.GetLastChannel();
EXPECT_TRUE(channel_->AddSendStream(
cricket::StreamParams::CreateLegacy(1234)));
- int send_channel_num = voe_.GetLastChannel();
- EXPECT_EQ(1234U, voe_.GetLocalSSRC(send_channel_num));
+ EXPECT_TRUE(call_.GetAudioSendStream(1234));
EXPECT_EQ(1234U, voe_.GetLocalSSRC(receive_channel_num));
}
// Test that we can properly receive packets.
TEST_F(WebRtcVoiceEngineTestFake, Recv) {
EXPECT_TRUE(SetupEngine());
+ EXPECT_TRUE(channel_->AddRecvStream(cricket::StreamParams::CreateLegacy(1)));
DeliverPacket(kPcmuFrame, sizeof(kPcmuFrame));
int channel_num = voe_.GetLastChannel();
- EXPECT_TRUE(voe_.CheckPacket(channel_num, kPcmuFrame,
- sizeof(kPcmuFrame)));
+ EXPECT_TRUE(voe_.CheckPacket(channel_num, kPcmuFrame, sizeof(kPcmuFrame)));
}
// Test that we can properly receive packets on multiple streams.
TEST_F(WebRtcVoiceEngineTestFake, RecvWithMultipleStreams) {
- EXPECT_TRUE(SetupEngineWithSendStream());
- EXPECT_TRUE(channel_->SetSendParameters(send_parameters_));
+ EXPECT_TRUE(SetupEngine());
EXPECT_TRUE(channel_->AddRecvStream(cricket::StreamParams::CreateLegacy(1)));
int channel_num1 = voe_.GetLastChannel();
EXPECT_TRUE(channel_->AddRecvStream(cricket::StreamParams::CreateLegacy(2)));
@@ -2433,37 +2323,97 @@ TEST_F(WebRtcVoiceEngineTestFake, RecvWithMultipleStreams) {
int channel_num3 = voe_.GetLastChannel();
// Create packets with the right SSRCs.
char packets[4][sizeof(kPcmuFrame)];
- for (size_t i = 0; i < ARRAY_SIZE(packets); ++i) {
+ for (size_t i = 0; i < arraysize(packets); ++i) {
memcpy(packets[i], kPcmuFrame, sizeof(kPcmuFrame));
rtc::SetBE32(packets[i] + 8, static_cast<uint32_t>(i));
}
EXPECT_TRUE(voe_.CheckNoPacket(channel_num1));
EXPECT_TRUE(voe_.CheckNoPacket(channel_num2));
EXPECT_TRUE(voe_.CheckNoPacket(channel_num3));
+
DeliverPacket(packets[0], sizeof(packets[0]));
EXPECT_TRUE(voe_.CheckNoPacket(channel_num1));
EXPECT_TRUE(voe_.CheckNoPacket(channel_num2));
EXPECT_TRUE(voe_.CheckNoPacket(channel_num3));
+
DeliverPacket(packets[1], sizeof(packets[1]));
- EXPECT_TRUE(voe_.CheckPacket(channel_num1, packets[1],
- sizeof(packets[1])));
+ EXPECT_TRUE(voe_.CheckPacket(channel_num1, packets[1], sizeof(packets[1])));
EXPECT_TRUE(voe_.CheckNoPacket(channel_num2));
EXPECT_TRUE(voe_.CheckNoPacket(channel_num3));
+
DeliverPacket(packets[2], sizeof(packets[2]));
EXPECT_TRUE(voe_.CheckNoPacket(channel_num1));
- EXPECT_TRUE(voe_.CheckPacket(channel_num2, packets[2],
- sizeof(packets[2])));
+ EXPECT_TRUE(voe_.CheckPacket(channel_num2, packets[2], sizeof(packets[2])));
EXPECT_TRUE(voe_.CheckNoPacket(channel_num3));
+
DeliverPacket(packets[3], sizeof(packets[3]));
EXPECT_TRUE(voe_.CheckNoPacket(channel_num1));
EXPECT_TRUE(voe_.CheckNoPacket(channel_num2));
- EXPECT_TRUE(voe_.CheckPacket(channel_num3, packets[3],
- sizeof(packets[3])));
+ EXPECT_TRUE(voe_.CheckPacket(channel_num3, packets[3], sizeof(packets[3])));
+
EXPECT_TRUE(channel_->RemoveRecvStream(3));
EXPECT_TRUE(channel_->RemoveRecvStream(2));
EXPECT_TRUE(channel_->RemoveRecvStream(1));
}
+// Test that receiving on an unsignalled stream works (default channel will be
+// created).
+TEST_F(WebRtcVoiceEngineTestFake, RecvUnsignalled) {
+ EXPECT_TRUE(SetupEngine());
+ DeliverPacket(kPcmuFrame, sizeof(kPcmuFrame));
+ int channel_num = voe_.GetLastChannel();
+ EXPECT_TRUE(voe_.CheckPacket(channel_num, kPcmuFrame, sizeof(kPcmuFrame)));
+}
+
+// Test that receiving on an unsignalled stream works (default channel will be
+// created), and that packets will be forwarded to the default channel
+// regardless of their SSRCs.
+TEST_F(WebRtcVoiceEngineTestFake, RecvUnsignalledWithSsrcSwitch) {
+ EXPECT_TRUE(SetupEngine());
+ char packet[sizeof(kPcmuFrame)];
+ memcpy(packet, kPcmuFrame, sizeof(kPcmuFrame));
+
+ // Note that the first unknown SSRC cannot be 0, because we only support
+ // creating receive streams for SSRC!=0.
+ DeliverPacket(packet, sizeof(packet));
+ int channel_num = voe_.GetLastChannel();
+ EXPECT_TRUE(voe_.CheckPacket(channel_num, packet, sizeof(packet)));
+ // Once we have the default channel, SSRC==0 will be ok.
+ for (uint32_t ssrc = 0; ssrc < 10; ++ssrc) {
+ rtc::SetBE32(&packet[8], ssrc);
+ DeliverPacket(packet, sizeof(packet));
+ EXPECT_TRUE(voe_.CheckPacket(channel_num, packet, sizeof(packet)));
+ }
+}
+
+// Test that a default channel is created even after a signalled stream has been
+// added, and that this stream will get any packets for unknown SSRCs.
+TEST_F(WebRtcVoiceEngineTestFake, RecvUnsignalledAfterSignalled) {
+ EXPECT_TRUE(SetupEngine());
+ char packet[sizeof(kPcmuFrame)];
+ memcpy(packet, kPcmuFrame, sizeof(kPcmuFrame));
+
+ // Add a known stream, send packet and verify we got it.
+ EXPECT_TRUE(channel_->AddRecvStream(cricket::StreamParams::CreateLegacy(1)));
+ int signalled_channel_num = voe_.GetLastChannel();
+ DeliverPacket(packet, sizeof(packet));
+ EXPECT_TRUE(voe_.CheckPacket(signalled_channel_num, packet, sizeof(packet)));
+
+ // Note that the first unknown SSRC cannot be 0, because we only support
+ // creating receive streams for SSRC!=0.
+ rtc::SetBE32(&packet[8], 7011);
+ DeliverPacket(packet, sizeof(packet));
+ int channel_num = voe_.GetLastChannel();
+ EXPECT_NE(channel_num, signalled_channel_num);
+ EXPECT_TRUE(voe_.CheckPacket(channel_num, packet, sizeof(packet)));
+ // Once we have the default channel, SSRC==0 will be ok.
+ for (uint32_t ssrc = 0; ssrc < 20; ssrc += 2) {
+ rtc::SetBE32(&packet[8], ssrc);
+ DeliverPacket(packet, sizeof(packet));
+ EXPECT_TRUE(voe_.CheckPacket(channel_num, packet, sizeof(packet)));
+ }
+}
+
// Test that we properly handle failures to add a receive stream.
TEST_F(WebRtcVoiceEngineTestFake, AddRecvStreamFail) {
EXPECT_TRUE(SetupEngine());
@@ -2498,7 +2448,7 @@ TEST_F(WebRtcVoiceEngineTestFake, AddRecvStreamUnsupportedCodec) {
cricket::StreamParams::CreateLegacy(kSsrc1)));
int channel_num2 = voe_.GetLastChannel();
webrtc::CodecInst gcodec;
- rtc::strcpyn(gcodec.plname, ARRAY_SIZE(gcodec.plname), "opus");
+ rtc::strcpyn(gcodec.plname, arraysize(gcodec.plname), "opus");
gcodec.plfreq = 48000;
gcodec.channels = 2;
EXPECT_EQ(-1, voe_.GetRecPayloadType(channel_num2, gcodec));
@@ -2602,10 +2552,12 @@ TEST_F(WebRtcVoiceEngineTestFake, SetAudioOptions) {
EXPECT_TRUE(typing_detection_enabled);
EXPECT_EQ(ec_mode, webrtc::kEcConference);
EXPECT_EQ(ns_mode, webrtc::kNsHighSuppression);
+ EXPECT_EQ(50, voe_.GetNetEqCapacity());
+ EXPECT_FALSE(voe_.GetNetEqFastAccelerate());
- // Nothing set, so all ignored.
- cricket::AudioOptions options;
- ASSERT_TRUE(engine_.SetOptions(options));
+ // Nothing set in AudioOptions, so everything should be as default.
+ send_parameters_.options = cricket::AudioOptions();
+ EXPECT_TRUE(channel_->SetSendParameters(send_parameters_));
voe_.GetEcStatus(ec_enabled, ec_mode);
voe_.GetAecmMode(aecm_mode, cng_enabled);
voe_.GetAgcStatus(agc_enabled, agc_mode);
@@ -2625,20 +2577,19 @@ TEST_F(WebRtcVoiceEngineTestFake, SetAudioOptions) {
EXPECT_TRUE(typing_detection_enabled);
EXPECT_EQ(ec_mode, webrtc::kEcConference);
EXPECT_EQ(ns_mode, webrtc::kNsHighSuppression);
- EXPECT_EQ(50, voe_.GetNetEqCapacity()); // From GetDefaultEngineOptions().
- EXPECT_FALSE(
- voe_.GetNetEqFastAccelerate()); // From GetDefaultEngineOptions().
+ EXPECT_EQ(50, voe_.GetNetEqCapacity());
+ EXPECT_FALSE(voe_.GetNetEqFastAccelerate());
// Turn echo cancellation off
- options.echo_cancellation.Set(false);
- ASSERT_TRUE(engine_.SetOptions(options));
+ send_parameters_.options.echo_cancellation = rtc::Optional<bool>(false);
+ EXPECT_TRUE(channel_->SetSendParameters(send_parameters_));
voe_.GetEcStatus(ec_enabled, ec_mode);
EXPECT_FALSE(ec_enabled);
// Turn echo cancellation back on, with settings, and make sure
// nothing else changed.
- options.echo_cancellation.Set(true);
- ASSERT_TRUE(engine_.SetOptions(options));
+ send_parameters_.options.echo_cancellation = rtc::Optional<bool>(true);
+ EXPECT_TRUE(channel_->SetSendParameters(send_parameters_));
voe_.GetEcStatus(ec_enabled, ec_mode);
voe_.GetAecmMode(aecm_mode, cng_enabled);
voe_.GetAgcStatus(agc_enabled, agc_mode);
@@ -2660,8 +2611,8 @@ TEST_F(WebRtcVoiceEngineTestFake, SetAudioOptions) {
// Turn on delay agnostic aec and make sure nothing change w.r.t. echo
// control.
- options.delay_agnostic_aec.Set(true);
- ASSERT_TRUE(engine_.SetOptions(options));
+ send_parameters_.options.delay_agnostic_aec = rtc::Optional<bool>(true);
+ EXPECT_TRUE(channel_->SetSendParameters(send_parameters_));
voe_.GetEcStatus(ec_enabled, ec_mode);
voe_.GetAecmMode(aecm_mode, cng_enabled);
EXPECT_TRUE(ec_enabled);
@@ -2669,41 +2620,41 @@ TEST_F(WebRtcVoiceEngineTestFake, SetAudioOptions) {
EXPECT_EQ(ec_mode, webrtc::kEcConference);
// Turn off echo cancellation and delay agnostic aec.
- options.delay_agnostic_aec.Set(false);
- options.extended_filter_aec.Set(false);
- options.echo_cancellation.Set(false);
- ASSERT_TRUE(engine_.SetOptions(options));
+ send_parameters_.options.delay_agnostic_aec = rtc::Optional<bool>(false);
+ send_parameters_.options.extended_filter_aec = rtc::Optional<bool>(false);
+ send_parameters_.options.echo_cancellation = rtc::Optional<bool>(false);
+ EXPECT_TRUE(channel_->SetSendParameters(send_parameters_));
voe_.GetEcStatus(ec_enabled, ec_mode);
EXPECT_FALSE(ec_enabled);
// Turning delay agnostic aec back on should also turn on echo cancellation.
- options.delay_agnostic_aec.Set(true);
- ASSERT_TRUE(engine_.SetOptions(options));
+ send_parameters_.options.delay_agnostic_aec = rtc::Optional<bool>(true);
+ EXPECT_TRUE(channel_->SetSendParameters(send_parameters_));
voe_.GetEcStatus(ec_enabled, ec_mode);
EXPECT_TRUE(ec_enabled);
EXPECT_TRUE(voe_.ec_metrics_enabled());
EXPECT_EQ(ec_mode, webrtc::kEcConference);
// Turn off AGC
- options.auto_gain_control.Set(false);
- ASSERT_TRUE(engine_.SetOptions(options));
+ send_parameters_.options.auto_gain_control = rtc::Optional<bool>(false);
+ EXPECT_TRUE(channel_->SetSendParameters(send_parameters_));
voe_.GetAgcStatus(agc_enabled, agc_mode);
EXPECT_FALSE(agc_enabled);
// Turn AGC back on
- options.auto_gain_control.Set(true);
- options.adjust_agc_delta.Clear();
- ASSERT_TRUE(engine_.SetOptions(options));
+ send_parameters_.options.auto_gain_control = rtc::Optional<bool>(true);
+ send_parameters_.options.adjust_agc_delta = rtc::Optional<int>();
+ EXPECT_TRUE(channel_->SetSendParameters(send_parameters_));
voe_.GetAgcStatus(agc_enabled, agc_mode);
EXPECT_TRUE(agc_enabled);
voe_.GetAgcConfig(agc_config);
EXPECT_EQ(0, agc_config.targetLeveldBOv);
// Turn off other options (and stereo swapping on).
- options.noise_suppression.Set(false);
- options.highpass_filter.Set(false);
- options.typing_detection.Set(false);
- options.stereo_swapping.Set(true);
- ASSERT_TRUE(engine_.SetOptions(options));
+ send_parameters_.options.noise_suppression = rtc::Optional<bool>(false);
+ send_parameters_.options.highpass_filter = rtc::Optional<bool>(false);
+ send_parameters_.options.typing_detection = rtc::Optional<bool>(false);
+ send_parameters_.options.stereo_swapping = rtc::Optional<bool>(true);
+ EXPECT_TRUE(channel_->SetSendParameters(send_parameters_));
voe_.GetNsStatus(ns_enabled, ns_mode);
highpass_filter_enabled = voe_.IsHighPassFilterEnabled();
stereo_swapping_enabled = voe_.IsStereoChannelSwappingEnabled();
@@ -2714,7 +2665,7 @@ TEST_F(WebRtcVoiceEngineTestFake, SetAudioOptions) {
EXPECT_TRUE(stereo_swapping_enabled);
// Set options again to ensure it has no impact.
- ASSERT_TRUE(engine_.SetOptions(options));
+ EXPECT_TRUE(channel_->SetSendParameters(send_parameters_));
voe_.GetEcStatus(ec_enabled, ec_mode);
voe_.GetNsStatus(ns_enabled, ns_mode);
EXPECT_TRUE(ec_enabled);
@@ -2785,9 +2736,9 @@ TEST_F(WebRtcVoiceEngineTestFake, SetOptionOverridesViaChannels) {
// AEC and AGC and NS
cricket::AudioSendParameters parameters_options_all = send_parameters_;
- parameters_options_all.options.echo_cancellation.Set(true);
- parameters_options_all.options.auto_gain_control.Set(true);
- parameters_options_all.options.noise_suppression.Set(true);
+ parameters_options_all.options.echo_cancellation = rtc::Optional<bool>(true);
+ parameters_options_all.options.auto_gain_control = rtc::Optional<bool>(true);
+ parameters_options_all.options.noise_suppression = rtc::Optional<bool>(true);
ASSERT_TRUE(channel1->SetSendParameters(parameters_options_all));
EXPECT_EQ(parameters_options_all.options, channel1->options());
ASSERT_TRUE(channel2->SetSendParameters(parameters_options_all));
@@ -2795,24 +2746,26 @@ TEST_F(WebRtcVoiceEngineTestFake, SetOptionOverridesViaChannels) {
// unset NS
cricket::AudioSendParameters parameters_options_no_ns = send_parameters_;
- parameters_options_no_ns.options.noise_suppression.Set(false);
+ parameters_options_no_ns.options.noise_suppression =
+ rtc::Optional<bool>(false);
ASSERT_TRUE(channel1->SetSendParameters(parameters_options_no_ns));
cricket::AudioOptions expected_options = parameters_options_all.options;
- expected_options.echo_cancellation.Set(true);
- expected_options.auto_gain_control.Set(true);
- expected_options.noise_suppression.Set(false);
+ expected_options.echo_cancellation = rtc::Optional<bool>(true);
+ expected_options.auto_gain_control = rtc::Optional<bool>(true);
+ expected_options.noise_suppression = rtc::Optional<bool>(false);
EXPECT_EQ(expected_options, channel1->options());
// unset AGC
cricket::AudioSendParameters parameters_options_no_agc = send_parameters_;
- parameters_options_no_agc.options.auto_gain_control.Set(false);
+ parameters_options_no_agc.options.auto_gain_control =
+ rtc::Optional<bool>(false);
ASSERT_TRUE(channel2->SetSendParameters(parameters_options_no_agc));
- expected_options.echo_cancellation.Set(true);
- expected_options.auto_gain_control.Set(false);
- expected_options.noise_suppression.Set(true);
+ expected_options.echo_cancellation = rtc::Optional<bool>(true);
+ expected_options.auto_gain_control = rtc::Optional<bool>(false);
+ expected_options.noise_suppression = rtc::Optional<bool>(true);
EXPECT_EQ(expected_options, channel2->options());
- ASSERT_TRUE(engine_.SetOptions(parameters_options_all.options));
+ ASSERT_TRUE(channel_->SetSendParameters(parameters_options_all));
bool ec_enabled;
webrtc::EcModes ec_mode;
bool agc_enabled;
@@ -2834,14 +2787,6 @@ TEST_F(WebRtcVoiceEngineTestFake, SetOptionOverridesViaChannels) {
EXPECT_TRUE(agc_enabled);
EXPECT_FALSE(ns_enabled);
- channel1->SetSend(cricket::SEND_NOTHING);
- voe_.GetEcStatus(ec_enabled, ec_mode);
- voe_.GetAgcStatus(agc_enabled, agc_mode);
- voe_.GetNsStatus(ns_enabled, ns_mode);
- EXPECT_TRUE(ec_enabled);
- EXPECT_TRUE(agc_enabled);
- EXPECT_TRUE(ns_enabled);
-
channel2->SetSend(cricket::SEND_MICROPHONE);
voe_.GetEcStatus(ec_enabled, ec_mode);
voe_.GetAgcStatus(agc_enabled, agc_mode);
@@ -2850,25 +2795,19 @@ TEST_F(WebRtcVoiceEngineTestFake, SetOptionOverridesViaChannels) {
EXPECT_FALSE(agc_enabled);
EXPECT_TRUE(ns_enabled);
- channel2->SetSend(cricket::SEND_NOTHING);
- voe_.GetEcStatus(ec_enabled, ec_mode);
- voe_.GetAgcStatus(agc_enabled, agc_mode);
- voe_.GetNsStatus(ns_enabled, ns_mode);
- EXPECT_TRUE(ec_enabled);
- EXPECT_TRUE(agc_enabled);
- EXPECT_TRUE(ns_enabled);
-
// Make sure settings take effect while we are sending.
- ASSERT_TRUE(engine_.SetOptions(parameters_options_all.options));
+ ASSERT_TRUE(channel_->SetSendParameters(parameters_options_all));
cricket::AudioSendParameters parameters_options_no_agc_nor_ns =
send_parameters_;
- parameters_options_no_agc_nor_ns.options.auto_gain_control.Set(false);
- parameters_options_no_agc_nor_ns.options.noise_suppression.Set(false);
+ parameters_options_no_agc_nor_ns.options.auto_gain_control =
+ rtc::Optional<bool>(false);
+ parameters_options_no_agc_nor_ns.options.noise_suppression =
+ rtc::Optional<bool>(false);
channel2->SetSend(cricket::SEND_MICROPHONE);
channel2->SetSendParameters(parameters_options_no_agc_nor_ns);
- expected_options.echo_cancellation.Set(true);
- expected_options.auto_gain_control.Set(false);
- expected_options.noise_suppression.Set(false);
+ expected_options.echo_cancellation = rtc::Optional<bool>(true);
+ expected_options.auto_gain_control = rtc::Optional<bool>(false);
+ expected_options.noise_suppression = rtc::Optional<bool>(false);
EXPECT_EQ(expected_options, channel2->options());
voe_.GetEcStatus(ec_enabled, ec_mode);
voe_.GetAgcStatus(agc_enabled, agc_mode);
@@ -2887,13 +2826,13 @@ TEST_F(WebRtcVoiceEngineTestFake, TestSetDscpOptions) {
new cricket::FakeNetworkInterface);
channel->SetInterface(network_interface.get());
cricket::AudioSendParameters parameters = send_parameters_;
- parameters.options.dscp.Set(true);
+ parameters.options.dscp = rtc::Optional<bool>(true);
EXPECT_TRUE(channel->SetSendParameters(parameters));
EXPECT_EQ(rtc::DSCP_EF, network_interface->dscp());
// Verify previous value is not modified if dscp option is not set.
EXPECT_TRUE(channel->SetSendParameters(send_parameters_));
EXPECT_EQ(rtc::DSCP_EF, network_interface->dscp());
- parameters.options.dscp.Set(false);
+ parameters.options.dscp = rtc::Optional<bool>(false);
EXPECT_TRUE(channel->SetSendParameters(parameters));
EXPECT_EQ(rtc::DSCP_DEFAULT, network_interface->dscp());
}
@@ -3002,7 +2941,7 @@ TEST_F(WebRtcVoiceEngineTestFake, CanChangeCombinedBweOption) {
}
// Enable combined BWE option - now it should be set up.
- send_parameters_.options.combined_audio_video_bwe.Set(true);
+ send_parameters_.options.combined_audio_video_bwe = rtc::Optional<bool>(true);
EXPECT_TRUE(media_channel->SetSendParameters(send_parameters_));
for (uint32_t ssrc : ssrcs) {
const auto* s = call_.GetAudioReceiveStream(ssrc);
@@ -3011,7 +2950,8 @@ TEST_F(WebRtcVoiceEngineTestFake, CanChangeCombinedBweOption) {
}
// Disable combined BWE option - should be disabled again.
- send_parameters_.options.combined_audio_video_bwe.Set(false);
+ send_parameters_.options.combined_audio_video_bwe =
+ rtc::Optional<bool>(false);
EXPECT_TRUE(media_channel->SetSendParameters(send_parameters_));
for (uint32_t ssrc : ssrcs) {
const auto* s = call_.GetAudioReceiveStream(ssrc);
@@ -3028,18 +2968,19 @@ TEST_F(WebRtcVoiceEngineTestFake, ConfigureCombinedBweForNewRecvStreams) {
EXPECT_TRUE(SetupEngineWithSendStream());
cricket::WebRtcVoiceMediaChannel* media_channel =
static_cast<cricket::WebRtcVoiceMediaChannel*>(channel_);
- send_parameters_.options.combined_audio_video_bwe.Set(true);
+ send_parameters_.options.combined_audio_video_bwe = rtc::Optional<bool>(true);
EXPECT_TRUE(media_channel->SetSendParameters(send_parameters_));
- static const uint32_t kSsrcs[] = {1, 2, 3, 4};
- for (unsigned int i = 0; i < ARRAY_SIZE(kSsrcs); ++i) {
+ for (uint32_t ssrc : kSsrcs4) {
EXPECT_TRUE(media_channel->AddRecvStream(
- cricket::StreamParams::CreateLegacy(kSsrcs[i])));
- EXPECT_NE(nullptr, call_.GetAudioReceiveStream(kSsrcs[i]));
+ cricket::StreamParams::CreateLegacy(ssrc)));
+ EXPECT_NE(nullptr, call_.GetAudioReceiveStream(ssrc));
}
- EXPECT_EQ(ARRAY_SIZE(kSsrcs), call_.GetAudioReceiveStreams().size());
+ EXPECT_EQ(arraysize(kSsrcs4), call_.GetAudioReceiveStreams().size());
}
+// TODO(solenberg): Remove, once recv streams are configured through Call.
+// (This is then covered by TestSetRecvRtpHeaderExtensions.)
TEST_F(WebRtcVoiceEngineTestFake, ConfiguresAudioReceiveStreamRtpExtensions) {
// Test that setting the header extensions results in the expected state
// changes on an associated Call.
@@ -3050,7 +2991,7 @@ TEST_F(WebRtcVoiceEngineTestFake, ConfiguresAudioReceiveStreamRtpExtensions) {
EXPECT_TRUE(SetupEngineWithSendStream());
cricket::WebRtcVoiceMediaChannel* media_channel =
static_cast<cricket::WebRtcVoiceMediaChannel*>(channel_);
- send_parameters_.options.combined_audio_video_bwe.Set(true);
+ send_parameters_.options.combined_audio_video_bwe = rtc::Optional<bool>(true);
EXPECT_TRUE(media_channel->SetSendParameters(send_parameters_));
for (uint32_t ssrc : ssrcs) {
EXPECT_TRUE(media_channel->AddRecvStream(
@@ -3066,17 +3007,17 @@ TEST_F(WebRtcVoiceEngineTestFake, ConfiguresAudioReceiveStreamRtpExtensions) {
}
// Set up receive extensions.
- const auto& e_exts = engine_.rtp_header_extensions();
+ cricket::RtpCapabilities capabilities = engine_.GetCapabilities();
cricket::AudioRecvParameters recv_parameters;
- recv_parameters.extensions = e_exts;
+ recv_parameters.extensions = capabilities.header_extensions;
channel_->SetRecvParameters(recv_parameters);
EXPECT_EQ(2, call_.GetAudioReceiveStreams().size());
for (uint32_t ssrc : ssrcs) {
const auto* s = call_.GetAudioReceiveStream(ssrc);
EXPECT_NE(nullptr, s);
const auto& s_exts = s->GetConfig().rtp.extensions;
- EXPECT_EQ(e_exts.size(), s_exts.size());
- for (const auto& e_ext : e_exts) {
+ EXPECT_EQ(capabilities.header_extensions.size(), s_exts.size());
+ for (const auto& e_ext : capabilities.header_extensions) {
for (const auto& s_ext : s_exts) {
if (e_ext.id == s_ext.id) {
EXPECT_EQ(e_ext.uri, s_ext.name);
@@ -3109,7 +3050,7 @@ TEST_F(WebRtcVoiceEngineTestFake, DeliverAudioPacket_Call) {
EXPECT_TRUE(SetupEngineWithSendStream());
cricket::WebRtcVoiceMediaChannel* media_channel =
static_cast<cricket::WebRtcVoiceMediaChannel*>(channel_);
- send_parameters_.options.combined_audio_video_bwe.Set(true);
+ send_parameters_.options.combined_audio_video_bwe = rtc::Optional<bool>(true);
EXPECT_TRUE(channel_->SetSendParameters(send_parameters_));
EXPECT_TRUE(media_channel->AddRecvStream(
cricket::StreamParams::CreateLegacy(kAudioSsrc)));
@@ -3164,18 +3105,6 @@ TEST_F(WebRtcVoiceEngineTestFake, AssociateChannelResetUponDeleteChannnel) {
EXPECT_EQ(voe_.GetAssociateSendChannel(recv_ch), -1);
}
-// Tests for the actual WebRtc VoE library.
-
-TEST(WebRtcVoiceEngineTest, TestDefaultOptionsBeforeInit) {
- cricket::WebRtcVoiceEngine engine;
- cricket::AudioOptions options = engine.GetOptions();
- // The default options should have at least a few things set. We purposefully
- // don't check the option values here, though.
- EXPECT_TRUE(options.echo_cancellation.IsSet());
- EXPECT_TRUE(options.auto_gain_control.IsSet());
- EXPECT_TRUE(options.noise_suppression.IsSet());
-}
-
// Tests that the library initializes and shuts down properly.
TEST(WebRtcVoiceEngineTest, StartupShutdown) {
cricket::WebRtcVoiceEngine engine;
@@ -3195,54 +3124,60 @@ TEST(WebRtcVoiceEngineTest, StartupShutdown) {
// Tests that the library is configured with the codecs we want.
TEST(WebRtcVoiceEngineTest, HasCorrectCodecs) {
- cricket::WebRtcVoiceEngine engine;
// Check codecs by name.
- EXPECT_TRUE(engine.FindCodec(
- cricket::AudioCodec(96, "OPUS", 48000, 0, 2, 0)));
- EXPECT_TRUE(engine.FindCodec(
- cricket::AudioCodec(96, "ISAC", 16000, 0, 1, 0)));
- EXPECT_TRUE(engine.FindCodec(
- cricket::AudioCodec(96, "ISAC", 32000, 0, 1, 0)));
+ EXPECT_TRUE(cricket::WebRtcVoiceEngine::ToCodecInst(
+ cricket::AudioCodec(96, "OPUS", 48000, 0, 2, 0), nullptr));
+ EXPECT_TRUE(cricket::WebRtcVoiceEngine::ToCodecInst(
+ cricket::AudioCodec(96, "ISAC", 16000, 0, 1, 0), nullptr));
+ EXPECT_TRUE(cricket::WebRtcVoiceEngine::ToCodecInst(
+ cricket::AudioCodec(96, "ISAC", 32000, 0, 1, 0), nullptr));
// Check that name matching is case-insensitive.
- EXPECT_TRUE(engine.FindCodec(
- cricket::AudioCodec(96, "ILBC", 8000, 0, 1, 0)));
- EXPECT_TRUE(engine.FindCodec(
- cricket::AudioCodec(96, "iLBC", 8000, 0, 1, 0)));
- EXPECT_TRUE(engine.FindCodec(
- cricket::AudioCodec(96, "PCMU", 8000, 0, 1, 0)));
- EXPECT_TRUE(engine.FindCodec(
- cricket::AudioCodec(96, "PCMA", 8000, 0, 1, 0)));
- EXPECT_TRUE(engine.FindCodec(
- cricket::AudioCodec(96, "G722", 8000, 0, 1, 0)));
- EXPECT_TRUE(engine.FindCodec(
- cricket::AudioCodec(96, "red", 8000, 0, 1, 0)));
- EXPECT_TRUE(engine.FindCodec(
- cricket::AudioCodec(96, "CN", 32000, 0, 1, 0)));
- EXPECT_TRUE(engine.FindCodec(
- cricket::AudioCodec(96, "CN", 16000, 0, 1, 0)));
- EXPECT_TRUE(engine.FindCodec(
- cricket::AudioCodec(96, "CN", 8000, 0, 1, 0)));
- EXPECT_TRUE(engine.FindCodec(
- cricket::AudioCodec(96, "telephone-event", 8000, 0, 1, 0)));
+ EXPECT_TRUE(cricket::WebRtcVoiceEngine::ToCodecInst(
+ cricket::AudioCodec(96, "ILBC", 8000, 0, 1, 0), nullptr));
+ EXPECT_TRUE(cricket::WebRtcVoiceEngine::ToCodecInst(
+ cricket::AudioCodec(96, "iLBC", 8000, 0, 1, 0), nullptr));
+ EXPECT_TRUE(cricket::WebRtcVoiceEngine::ToCodecInst(
+ cricket::AudioCodec(96, "PCMU", 8000, 0, 1, 0), nullptr));
+ EXPECT_TRUE(cricket::WebRtcVoiceEngine::ToCodecInst(
+ cricket::AudioCodec(96, "PCMA", 8000, 0, 1, 0), nullptr));
+ EXPECT_TRUE(cricket::WebRtcVoiceEngine::ToCodecInst(
+ cricket::AudioCodec(96, "G722", 8000, 0, 1, 0), nullptr));
+ EXPECT_TRUE(cricket::WebRtcVoiceEngine::ToCodecInst(
+ cricket::AudioCodec(96, "red", 8000, 0, 1, 0), nullptr));
+ EXPECT_TRUE(cricket::WebRtcVoiceEngine::ToCodecInst(
+ cricket::AudioCodec(96, "CN", 32000, 0, 1, 0), nullptr));
+ EXPECT_TRUE(cricket::WebRtcVoiceEngine::ToCodecInst(
+ cricket::AudioCodec(96, "CN", 16000, 0, 1, 0), nullptr));
+ EXPECT_TRUE(cricket::WebRtcVoiceEngine::ToCodecInst(
+ cricket::AudioCodec(96, "CN", 8000, 0, 1, 0), nullptr));
+ EXPECT_TRUE(cricket::WebRtcVoiceEngine::ToCodecInst(
+ cricket::AudioCodec(96, "telephone-event", 8000, 0, 1, 0), nullptr));
// Check codecs with an id by id.
- EXPECT_TRUE(engine.FindCodec(
- cricket::AudioCodec(0, "", 8000, 0, 1, 0))); // PCMU
- EXPECT_TRUE(engine.FindCodec(
- cricket::AudioCodec(8, "", 8000, 0, 1, 0))); // PCMA
- EXPECT_TRUE(engine.FindCodec(
- cricket::AudioCodec(9, "", 8000, 0, 1, 0))); // G722
- EXPECT_TRUE(engine.FindCodec(
- cricket::AudioCodec(13, "", 8000, 0, 1, 0))); // CN
+ EXPECT_TRUE(cricket::WebRtcVoiceEngine::ToCodecInst(
+ cricket::AudioCodec(0, "", 8000, 0, 1, 0), nullptr)); // PCMU
+ EXPECT_TRUE(cricket::WebRtcVoiceEngine::ToCodecInst(
+ cricket::AudioCodec(8, "", 8000, 0, 1, 0), nullptr)); // PCMA
+ EXPECT_TRUE(cricket::WebRtcVoiceEngine::ToCodecInst(
+ cricket::AudioCodec(9, "", 8000, 0, 1, 0), nullptr)); // G722
+ EXPECT_TRUE(cricket::WebRtcVoiceEngine::ToCodecInst(
+ cricket::AudioCodec(13, "", 8000, 0, 1, 0), nullptr)); // CN
// Check sample/bitrate matching.
- EXPECT_TRUE(engine.FindCodec(
- cricket::AudioCodec(0, "PCMU", 8000, 64000, 1, 0)));
+ EXPECT_TRUE(cricket::WebRtcVoiceEngine::ToCodecInst(
+ cricket::AudioCodec(0, "PCMU", 8000, 64000, 1, 0), nullptr));
// Check that bad codecs fail.
- EXPECT_FALSE(engine.FindCodec(cricket::AudioCodec(99, "ABCD", 0, 0, 1, 0)));
- EXPECT_FALSE(engine.FindCodec(cricket::AudioCodec(88, "", 0, 0, 1, 0)));
- EXPECT_FALSE(engine.FindCodec(cricket::AudioCodec(0, "", 0, 0, 2, 0)));
- EXPECT_FALSE(engine.FindCodec(cricket::AudioCodec(0, "", 5000, 0, 1, 0)));
- EXPECT_FALSE(engine.FindCodec(cricket::AudioCodec(0, "", 0, 5000, 1, 0)));
+ EXPECT_FALSE(cricket::WebRtcVoiceEngine::ToCodecInst(
+ cricket::AudioCodec(99, "ABCD", 0, 0, 1, 0), nullptr));
+ EXPECT_FALSE(cricket::WebRtcVoiceEngine::ToCodecInst(
+ cricket::AudioCodec(88, "", 0, 0, 1, 0), nullptr));
+ EXPECT_FALSE(cricket::WebRtcVoiceEngine::ToCodecInst(
+ cricket::AudioCodec(0, "", 0, 0, 2, 0), nullptr));
+ EXPECT_FALSE(cricket::WebRtcVoiceEngine::ToCodecInst(
+ cricket::AudioCodec(0, "", 5000, 0, 1, 0), nullptr));
+ EXPECT_FALSE(cricket::WebRtcVoiceEngine::ToCodecInst(
+ cricket::AudioCodec(0, "", 0, 5000, 1, 0), nullptr));
+
// Verify the payload id of common audio codecs, including CN, ISAC, and G722.
+ cricket::WebRtcVoiceEngine engine;
for (std::vector<cricket::AudioCodec>::const_iterator it =
engine.codecs().begin(); it != engine.codecs().end(); ++it) {
if (it->name == "CN" && it->clockrate == 16000) {
@@ -3269,7 +3204,6 @@ TEST(WebRtcVoiceEngineTest, HasCorrectCodecs) {
EXPECT_EQ("1", it->params.find("useinbandfec")->second);
}
}
-
engine.Terminate();
}
@@ -3282,7 +3216,7 @@ TEST(WebRtcVoiceEngineTest, Has32Channels) {
cricket::VoiceMediaChannel* channels[32];
int num_channels = 0;
- while (num_channels < ARRAY_SIZE(channels)) {
+ while (num_channels < arraysize(channels)) {
cricket::VoiceMediaChannel* channel =
engine.CreateChannel(call.get(), cricket::AudioOptions());
if (!channel)
@@ -3290,7 +3224,7 @@ TEST(WebRtcVoiceEngineTest, Has32Channels) {
channels[num_channels++] = channel;
}
- int expected = ARRAY_SIZE(channels);
+ int expected = arraysize(channels);
EXPECT_EQ(expected, num_channels);
while (num_channels > 0) {
diff --git a/talk/session/media/bundlefilter.cc b/talk/session/media/bundlefilter.cc
index b47d47fb27..670befeb7d 100755
--- a/talk/session/media/bundlefilter.cc
+++ b/talk/session/media/bundlefilter.cc
@@ -32,78 +32,29 @@
namespace cricket {
-static const uint32_t kSsrc01 = 0x01;
-
BundleFilter::BundleFilter() {
}
BundleFilter::~BundleFilter() {
}
-bool BundleFilter::DemuxPacket(const char* data, size_t len, bool rtcp) {
- // For rtp packets, we check whether the payload type can be found.
- // For rtcp packets, we check whether the ssrc can be found or is the special
- // value 1 except for SDES packets which always pass through. Plus, if
- // |streams_| is empty, we will allow all rtcp packets pass through provided
- // that they are valid rtcp packets in case that they are for early media.
- if (!rtcp) {
- // It may not be a RTP packet (e.g. SCTP).
- if (!IsRtpPacket(data, len))
- return false;
-
- int payload_type = 0;
- if (!GetRtpPayloadType(data, len, &payload_type)) {
- return false;
- }
- return FindPayloadType(payload_type);
+bool BundleFilter::DemuxPacket(const uint8_t* data, size_t len) {
+ // For RTP packets, we check whether the payload type can be found.
+ if (!IsRtpPacket(data, len)) {
+ return false;
}
- // Rtcp packets using ssrc filter.
- int pl_type = 0;
- uint32_t ssrc = 0;
- if (!GetRtcpType(data, len, &pl_type)) return false;
- if (pl_type == kRtcpTypeSDES) {
- // SDES packet parsing not supported.
- LOG(LS_INFO) << "SDES packet received for demux.";
- return true;
- } else {
- if (!GetRtcpSsrc(data, len, &ssrc)) return false;
- if (ssrc == kSsrc01) {
- // SSRC 1 has a special meaning and indicates generic feedback on
- // some systems and should never be dropped. If it is forwarded
- // incorrectly it will be ignored by lower layers anyway.
- return true;
- }
+ int payload_type = 0;
+ if (!GetRtpPayloadType(data, len, &payload_type)) {
+ return false;
}
- // Pass through if |streams_| is empty to allow early rtcp packets in.
- return !HasStreams() || FindStream(ssrc);
+ return FindPayloadType(payload_type);
}
void BundleFilter::AddPayloadType(int payload_type) {
payload_types_.insert(payload_type);
}
-bool BundleFilter::AddStream(const StreamParams& stream) {
- if (GetStreamBySsrc(streams_, stream.first_ssrc())) {
- LOG(LS_WARNING) << "Stream already added to filter";
- return false;
- }
- streams_.push_back(stream);
- return true;
-}
-
-bool BundleFilter::RemoveStream(uint32_t ssrc) {
- return RemoveStreamBySsrc(&streams_, ssrc);
-}
-
-bool BundleFilter::HasStreams() const {
- return !streams_.empty();
-}
-
-bool BundleFilter::FindStream(uint32_t ssrc) const {
- return ssrc == 0 ? false : GetStreamBySsrc(streams_, ssrc) != nullptr;
-}
-
bool BundleFilter::FindPayloadType(int pl_type) const {
return payload_types_.find(pl_type) != payload_types_.end();
}
diff --git a/talk/session/media/bundlefilter.h b/talk/session/media/bundlefilter.h
index 3717376668..d9d952f4ee 100755
--- a/talk/session/media/bundlefilter.h
+++ b/talk/session/media/bundlefilter.h
@@ -28,6 +28,8 @@
#ifndef TALK_SESSION_MEDIA_BUNDLEFILTER_H_
#define TALK_SESSION_MEDIA_BUNDLEFILTER_H_
+#include <stdint.h>
+
#include <set>
#include <vector>
@@ -37,42 +39,31 @@
namespace cricket {
// In case of single RTP session and single transport channel, all session
-// ( or media) channels share a common transport channel. Hence they all get
+// (or media) channels share a common transport channel. Hence they all get
// SignalReadPacket when packet received on transport channel. This requires
// cricket::BaseChannel to know all the valid sources, else media channel
// will decode invalid packets.
//
// This class determines whether a packet is destined for cricket::BaseChannel.
-// For rtp packets, this is decided based on the payload type. For rtcp packets,
-// this is decided based on the sender ssrc values.
+// This is only to be used for RTP packets as RTCP packets are not filtered.
+// For RTP packets, this is decided based on the payload type.
class BundleFilter {
public:
BundleFilter();
~BundleFilter();
- // Determines packet belongs to valid cricket::BaseChannel.
- bool DemuxPacket(const char* data, size_t len, bool rtcp);
+ // Determines if a RTP packet belongs to valid cricket::BaseChannel.
+ bool DemuxPacket(const uint8_t* data, size_t len);
// Adds the supported payload type.
void AddPayloadType(int payload_type);
- // Adding a valid source to the filter.
- bool AddStream(const StreamParams& stream);
-
- // Removes source from the filter.
- bool RemoveStream(uint32_t ssrc);
-
- // Utility methods added for unitest.
- // True if |streams_| is not empty.
- bool HasStreams() const;
- bool FindStream(uint32_t ssrc) const;
+ // Public for unittests.
bool FindPayloadType(int pl_type) const;
void ClearAllPayloadTypes();
-
private:
std::set<int> payload_types_;
- std::vector<StreamParams> streams_;
};
} // namespace cricket
diff --git a/talk/session/media/bundlefilter_unittest.cc b/talk/session/media/bundlefilter_unittest.cc
index 806d6bab09..f2c35fc1d8 100755
--- a/talk/session/media/bundlefilter_unittest.cc
+++ b/talk/session/media/bundlefilter_unittest.cc
@@ -30,9 +30,6 @@
using cricket::StreamParams;
-static const int kSsrc1 = 0x1111;
-static const int kSsrc2 = 0x2222;
-static const int kSsrc3 = 0x3333;
static const int kPayloadType1 = 0x11;
static const int kPayloadType2 = 0x22;
static const int kPayloadType3 = 0x33;
@@ -55,56 +52,6 @@ static const unsigned char kRtpPacketPt3Ssrc2[] = {
0x22,
};
-// PT = 200 = SR, len = 28, SSRC of sender = 0x0001
-// NTP TS = 0, RTP TS = 0, packet count = 0
-static const unsigned char kRtcpPacketSrSsrc01[] = {
- 0x80, 0xC8, 0x00, 0x1B, 0x00, 0x00, 0x00, 0x01,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00,
-};
-
-// PT = 200 = SR, len = 28, SSRC of sender = 0x2222
-// NTP TS = 0, RTP TS = 0, packet count = 0
-static const unsigned char kRtcpPacketSrSsrc2[] = {
- 0x80, 0xC8, 0x00, 0x1B, 0x00, 0x00, 0x22, 0x22,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00,
-};
-
-// First packet - SR = PT = 200, len = 0, SSRC of sender = 0x1111
-// NTP TS = 0, RTP TS = 0, packet count = 0
-// second packet - SDES = PT = 202, count = 0, SSRC = 0x1111, cname len = 0
-static const unsigned char kRtcpPacketCompoundSrSdesSsrc1[] = {
- 0x80, 0xC8, 0x00, 0x01, 0x00, 0x00, 0x11, 0x11,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00,
- 0x81, 0xCA, 0x00, 0x00, 0x00, 0x00, 0x11, 0x11, 0x01, 0x00,
-};
-
-// SDES = PT = 202, count = 0, SSRC = 0x2222, cname len = 0
-static const unsigned char kRtcpPacketSdesSsrc2[] = {
- 0x81, 0xCA, 0x00, 0x00, 0x00, 0x00, 0x22, 0x22, 0x01, 0x00,
-};
-
-// Packet has only mandatory fixed RTCP header
-static const unsigned char kRtcpPacketFixedHeaderOnly[] = {
- 0x80, 0xC8, 0x00, 0x00,
-};
-
-// Small packet for SSRC demux.
-static const unsigned char kRtcpPacketTooSmall[] = {
- 0x80, 0xC8, 0x00, 0x00, 0x00, 0x00,
-};
-
-// PT = 206, FMT = 1, Sender SSRC = 0x1111, Media SSRC = 0x1111
-// No FCI information is needed for PLI.
-static const unsigned char kRtcpPacketNonCompoundRtcpPliFeedback[] = {
- 0x81, 0xCE, 0x00, 0x0C, 0x00, 0x00, 0x11, 0x11, 0x00, 0x00, 0x11, 0x11,
-};
-
// An SCTP packet.
static const unsigned char kSctpPacket[] = {
0x00, 0x01, 0x00, 0x01,
@@ -114,100 +61,29 @@ static const unsigned char kSctpPacket[] = {
0x00, 0x00, 0x00, 0x00,
};
-TEST(BundleFilterTest, AddRemoveStreamTest) {
- cricket::BundleFilter bundle_filter;
- EXPECT_FALSE(bundle_filter.HasStreams());
- EXPECT_TRUE(bundle_filter.AddStream(StreamParams::CreateLegacy(kSsrc1)));
- StreamParams stream2;
- stream2.ssrcs.push_back(kSsrc2);
- stream2.ssrcs.push_back(kSsrc3);
- EXPECT_TRUE(bundle_filter.AddStream(stream2));
-
- EXPECT_TRUE(bundle_filter.HasStreams());
- EXPECT_TRUE(bundle_filter.FindStream(kSsrc1));
- EXPECT_TRUE(bundle_filter.FindStream(kSsrc2));
- EXPECT_TRUE(bundle_filter.FindStream(kSsrc3));
- EXPECT_TRUE(bundle_filter.RemoveStream(kSsrc1));
- EXPECT_FALSE(bundle_filter.FindStream(kSsrc1));
- EXPECT_TRUE(bundle_filter.RemoveStream(kSsrc3));
- EXPECT_FALSE(bundle_filter.RemoveStream(kSsrc2)); // Already removed.
- EXPECT_FALSE(bundle_filter.HasStreams());
-}
-
TEST(BundleFilterTest, RtpPacketTest) {
cricket::BundleFilter bundle_filter;
bundle_filter.AddPayloadType(kPayloadType1);
- EXPECT_TRUE(bundle_filter.DemuxPacket(
- reinterpret_cast<const char*>(kRtpPacketPt1Ssrc1),
- sizeof(kRtpPacketPt1Ssrc1), false));
+ EXPECT_TRUE(bundle_filter.DemuxPacket(kRtpPacketPt1Ssrc1,
+ sizeof(kRtpPacketPt1Ssrc1)));
bundle_filter.AddPayloadType(kPayloadType2);
- EXPECT_TRUE(bundle_filter.DemuxPacket(
- reinterpret_cast<const char*>(kRtpPacketPt2Ssrc2),
- sizeof(kRtpPacketPt2Ssrc2), false));
+ EXPECT_TRUE(bundle_filter.DemuxPacket(kRtpPacketPt2Ssrc2,
+ sizeof(kRtpPacketPt2Ssrc2)));
// Payload type 0x33 is not added.
- EXPECT_FALSE(bundle_filter.DemuxPacket(
- reinterpret_cast<const char*>(kRtpPacketPt3Ssrc2),
- sizeof(kRtpPacketPt3Ssrc2), false));
+ EXPECT_FALSE(bundle_filter.DemuxPacket(kRtpPacketPt3Ssrc2,
+ sizeof(kRtpPacketPt3Ssrc2)));
// Size is too small.
- EXPECT_FALSE(bundle_filter.DemuxPacket(
- reinterpret_cast<const char*>(kRtpPacketPt1Ssrc1), 11, false));
+ EXPECT_FALSE(bundle_filter.DemuxPacket(kRtpPacketPt1Ssrc1, 11));
bundle_filter.ClearAllPayloadTypes();
- EXPECT_FALSE(bundle_filter.DemuxPacket(
- reinterpret_cast<const char*>(kRtpPacketPt1Ssrc1),
- sizeof(kRtpPacketPt1Ssrc1), false));
- EXPECT_FALSE(bundle_filter.DemuxPacket(
- reinterpret_cast<const char*>(kRtpPacketPt2Ssrc2),
- sizeof(kRtpPacketPt2Ssrc2), false));
-}
-
-TEST(BundleFilterTest, RtcpPacketTest) {
- cricket::BundleFilter bundle_filter;
- EXPECT_TRUE(bundle_filter.AddStream(StreamParams::CreateLegacy(kSsrc1)));
- EXPECT_TRUE(bundle_filter.DemuxPacket(
- reinterpret_cast<const char*>(kRtcpPacketCompoundSrSdesSsrc1),
- sizeof(kRtcpPacketCompoundSrSdesSsrc1), true));
- EXPECT_TRUE(bundle_filter.AddStream(StreamParams::CreateLegacy(kSsrc2)));
- EXPECT_TRUE(bundle_filter.DemuxPacket(
- reinterpret_cast<const char*>(kRtcpPacketSrSsrc2),
- sizeof(kRtcpPacketSrSsrc2), true));
- EXPECT_TRUE(bundle_filter.DemuxPacket(
- reinterpret_cast<const char*>(kRtcpPacketSdesSsrc2),
- sizeof(kRtcpPacketSdesSsrc2), true));
- EXPECT_TRUE(bundle_filter.RemoveStream(kSsrc2));
- // RTCP Packets other than SR and RR are demuxed regardless of SSRC.
- EXPECT_TRUE(bundle_filter.DemuxPacket(
- reinterpret_cast<const char*>(kRtcpPacketSdesSsrc2),
- sizeof(kRtcpPacketSdesSsrc2), true));
- // RTCP Packets with 'special' SSRC 0x01 are demuxed also
- EXPECT_TRUE(bundle_filter.DemuxPacket(
- reinterpret_cast<const char*>(kRtcpPacketSrSsrc01),
- sizeof(kRtcpPacketSrSsrc01), true));
- EXPECT_FALSE(bundle_filter.DemuxPacket(
- reinterpret_cast<const char*>(kRtcpPacketSrSsrc2),
- sizeof(kRtcpPacketSrSsrc2), true));
- EXPECT_FALSE(bundle_filter.DemuxPacket(
- reinterpret_cast<const char*>(kRtcpPacketFixedHeaderOnly),
- sizeof(kRtcpPacketFixedHeaderOnly), true));
- EXPECT_FALSE(bundle_filter.DemuxPacket(
- reinterpret_cast<const char*>(kRtcpPacketTooSmall),
- sizeof(kRtcpPacketTooSmall), true));
- EXPECT_TRUE(bundle_filter.DemuxPacket(
- reinterpret_cast<const char*>(kRtcpPacketNonCompoundRtcpPliFeedback),
- sizeof(kRtcpPacketNonCompoundRtcpPliFeedback), true));
- // If the streams_ is empty, rtcp packet passes through
- EXPECT_TRUE(bundle_filter.RemoveStream(kSsrc1));
- EXPECT_FALSE(bundle_filter.HasStreams());
- EXPECT_TRUE(bundle_filter.DemuxPacket(
- reinterpret_cast<const char*>(kRtcpPacketSrSsrc2),
- sizeof(kRtcpPacketSrSsrc2), true));
+ EXPECT_FALSE(bundle_filter.DemuxPacket(kRtpPacketPt1Ssrc1,
+ sizeof(kRtpPacketPt1Ssrc1)));
+ EXPECT_FALSE(bundle_filter.DemuxPacket(kRtpPacketPt2Ssrc2,
+ sizeof(kRtpPacketPt2Ssrc2)));
}
TEST(BundleFilterTest, InvalidRtpPacket) {
cricket::BundleFilter bundle_filter;
- EXPECT_TRUE(bundle_filter.AddStream(StreamParams::CreateLegacy(kSsrc1)));
- EXPECT_FALSE(bundle_filter.DemuxPacket(
- reinterpret_cast<const char*>(kSctpPacket),
- sizeof(kSctpPacket), false));
+ EXPECT_FALSE(bundle_filter.DemuxPacket(kSctpPacket, sizeof(kSctpPacket)));
}
diff --git a/talk/session/media/channel.cc b/talk/session/media/channel.cc
index 91a6d8cb5a..a59c3f82b7 100644
--- a/talk/session/media/channel.cc
+++ b/talk/session/media/channel.cc
@@ -25,23 +25,36 @@
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
+#include <utility>
+
#include "talk/session/media/channel.h"
#include "talk/media/base/constants.h"
#include "talk/media/base/rtputils.h"
-#include "webrtc/p2p/base/transportchannel.h"
#include "talk/session/media/channelmanager.h"
+#include "webrtc/audio/audio_sink.h"
#include "webrtc/base/bind.h"
#include "webrtc/base/buffer.h"
#include "webrtc/base/byteorder.h"
#include "webrtc/base/common.h"
#include "webrtc/base/dscp.h"
#include "webrtc/base/logging.h"
+#include "webrtc/base/trace_event.h"
+#include "webrtc/p2p/base/transportchannel.h"
namespace cricket {
-
using rtc::Bind;
+namespace {
+// See comment below for why we need to use a pointer to a scoped_ptr.
+bool SetRawAudioSink_w(VoiceMediaChannel* channel,
+ uint32_t ssrc,
+ rtc::scoped_ptr<webrtc::AudioSinkInterface>* sink) {
+ channel->SetRawAudioSink(ssrc, std::move(*sink));
+ return true;
+}
+} // namespace
+
enum {
MSG_EARLYMEDIATIMEOUT = 1,
MSG_SCREENCASTWINDOWEVENT,
@@ -101,15 +114,6 @@ struct DataChannelErrorMessageData : public rtc::MessageData {
DataMediaChannel::Error error;
};
-
-struct VideoChannel::ScreencastDetailsData {
- explicit ScreencastDetailsData(uint32_t s)
- : ssrc(s), fps(0), screencast_max_pixels(0) {}
- uint32_t ssrc;
- int fps;
- int screencast_max_pixels;
-};
-
static const char* PacketType(bool rtcp) {
return (!rtcp) ? "RTP" : "RTCP";
}
@@ -151,6 +155,7 @@ void RtpParametersFromMediaDescription(
if (desc->rtp_header_extensions_set()) {
params->extensions = desc->rtp_header_extensions();
}
+ params->rtcp.reduced_size = desc->rtcp_reduced_size();
}
template <class Codec, class Options>
@@ -218,11 +223,11 @@ bool BaseChannel::Init() {
return false;
}
- if (!SetDtlsSrtpCiphers(transport_channel(), false)) {
+ if (!SetDtlsSrtpCryptoSuites(transport_channel(), false)) {
return false;
}
if (rtcp_transport_enabled() &&
- !SetDtlsSrtpCiphers(rtcp_transport_channel(), true)) {
+ !SetDtlsSrtpCryptoSuites(rtcp_transport_channel(), true)) {
return false;
}
@@ -249,21 +254,43 @@ bool BaseChannel::SetTransport_w(const std::string& transport_name) {
return true;
}
- set_transport_channel(transport_controller_->CreateTransportChannel_w(
- transport_name, cricket::ICE_CANDIDATE_COMPONENT_RTP));
- if (!transport_channel()) {
- return false;
+ // When using DTLS-SRTP, we must reset the SrtpFilter every time the transport
+ // changes and wait until the DTLS handshake is complete to set the newly
+ // negotiated parameters.
+ if (ShouldSetupDtlsSrtp()) {
+ // Set |writable_| to false such that UpdateWritableState_w can set up
+ // DTLS-SRTP when the writable_ becomes true again.
+ writable_ = false;
+ srtp_filter_.ResetParams();
}
+
+ // TODO(guoweis): Remove this grossness when we remove non-muxed RTCP.
if (rtcp_transport_enabled()) {
LOG(LS_INFO) << "Create RTCP TransportChannel for " << content_name()
<< " on " << transport_name << " transport ";
- set_rtcp_transport_channel(transport_controller_->CreateTransportChannel_w(
- transport_name, cricket::ICE_CANDIDATE_COMPONENT_RTCP));
+ set_rtcp_transport_channel(
+ transport_controller_->CreateTransportChannel_w(
+ transport_name, cricket::ICE_CANDIDATE_COMPONENT_RTCP),
+ false /* update_writablity */);
if (!rtcp_transport_channel()) {
return false;
}
}
+ // We're not updating the writablity during the transition state.
+ set_transport_channel(transport_controller_->CreateTransportChannel_w(
+ transport_name, cricket::ICE_CANDIDATE_COMPONENT_RTP));
+ if (!transport_channel()) {
+ return false;
+ }
+
+ // TODO(guoweis): Remove this grossness when we remove non-muxed RTCP.
+ if (rtcp_transport_enabled()) {
+ // We can only update the RTCP ready to send after set_transport_channel has
+ // handled channel writability.
+ SetReadyToSend(
+ true, rtcp_transport_channel() && rtcp_transport_channel()->writable());
+ }
transport_name_ = transport_name;
return true;
}
@@ -299,7 +326,8 @@ void BaseChannel::set_transport_channel(TransportChannel* new_tc) {
SetReadyToSend(false, new_tc && new_tc->writable());
}
-void BaseChannel::set_rtcp_transport_channel(TransportChannel* new_tc) {
+void BaseChannel::set_rtcp_transport_channel(TransportChannel* new_tc,
+ bool update_writablity) {
ASSERT(worker_thread_ == rtc::Thread::Current());
TransportChannel* old_tc = rtcp_transport_channel_;
@@ -318,16 +346,21 @@ void BaseChannel::set_rtcp_transport_channel(TransportChannel* new_tc) {
rtcp_transport_channel_ = new_tc;
if (new_tc) {
+ RTC_CHECK(!(ShouldSetupDtlsSrtp() && srtp_filter_.IsActive()))
+ << "Setting RTCP for DTLS/SRTP after SrtpFilter is active "
+ << "should never happen.";
ConnectToTransportChannel(new_tc);
for (const auto& pair : rtcp_socket_options_) {
new_tc->SetOption(pair.first, pair.second);
}
}
- // Update aggregate writable/ready-to-send state between RTP and RTCP upon
- // setting new channel
- UpdateWritableState_w();
- SetReadyToSend(true, new_tc && new_tc->writable());
+ if (update_writablity) {
+ // Update aggregate writable/ready-to-send state between RTP and RTCP upon
+ // setting new channel
+ UpdateWritableState_w();
+ SetReadyToSend(true, new_tc && new_tc->writable());
+ }
}
void BaseChannel::ConnectToTransportChannel(TransportChannel* tc) {
@@ -336,6 +369,7 @@ void BaseChannel::ConnectToTransportChannel(TransportChannel* tc) {
tc->SignalWritableState.connect(this, &BaseChannel::OnWritableState);
tc->SignalReadPacket.connect(this, &BaseChannel::OnChannelRead);
tc->SignalReadyToSend.connect(this, &BaseChannel::OnReadyToSend);
+ tc->SignalDtlsState.connect(this, &BaseChannel::OnDtlsState);
}
void BaseChannel::DisconnectFromTransportChannel(TransportChannel* tc) {
@@ -344,6 +378,7 @@ void BaseChannel::DisconnectFromTransportChannel(TransportChannel* tc) {
tc->SignalWritableState.disconnect(this);
tc->SignalReadPacket.disconnect(this);
tc->SignalReadyToSend.disconnect(this);
+ tc->SignalDtlsState.disconnect(this);
}
bool BaseChannel::Enable(bool enable) {
@@ -374,6 +409,7 @@ bool BaseChannel::RemoveSendStream(uint32_t ssrc) {
bool BaseChannel::SetLocalContent(const MediaContentDescription* content,
ContentAction action,
std::string* error_desc) {
+ TRACE_EVENT0("webrtc", "BaseChannel::SetLocalContent");
return InvokeOnWorker(Bind(&BaseChannel::SetLocalContent_w,
this, content, action, error_desc));
}
@@ -381,6 +417,7 @@ bool BaseChannel::SetLocalContent(const MediaContentDescription* content,
bool BaseChannel::SetRemoteContent(const MediaContentDescription* content,
ContentAction action,
std::string* error_desc) {
+ TRACE_EVENT0("webrtc", "BaseChannel::SetRemoteContent");
return InvokeOnWorker(Bind(&BaseChannel::SetRemoteContent_w,
this, content, action, error_desc));
}
@@ -416,10 +453,10 @@ bool BaseChannel::IsReadyToReceive() const {
bool BaseChannel::IsReadyToSend() const {
// Send outgoing data if we are enabled, have local and remote content,
// and we have had some form of connectivity.
- return enabled() &&
- IsReceiveContentDirection(remote_content_direction_) &&
+ return enabled() && IsReceiveContentDirection(remote_content_direction_) &&
IsSendContentDirection(local_content_direction_) &&
- was_ever_writable();
+ was_ever_writable() &&
+ (srtp_filter_.IsActive() || !ShouldSetupDtlsSrtp());
}
bool BaseChannel::SendPacket(rtc::Buffer* packet,
@@ -459,6 +496,7 @@ void BaseChannel::OnChannelRead(TransportChannel* channel,
const char* data, size_t len,
const rtc::PacketTime& packet_time,
int flags) {
+ TRACE_EVENT0("webrtc", "BaseChannel::OnChannelRead");
// OnChannelRead gets called from P2PSocket; now pass data to MediaEngine
ASSERT(worker_thread_ == rtc::Thread::Current());
@@ -474,6 +512,22 @@ void BaseChannel::OnReadyToSend(TransportChannel* channel) {
SetReadyToSend(channel == rtcp_transport_channel_, true);
}
+void BaseChannel::OnDtlsState(TransportChannel* channel,
+ DtlsTransportState state) {
+ if (!ShouldSetupDtlsSrtp()) {
+ return;
+ }
+
+ // Reset the srtp filter if it's not the CONNECTED state. For the CONNECTED
+ // state, setting up DTLS-SRTP context is deferred to ChannelWritable_w to
+ // cover other scenarios like the whole channel is writable (not just this
+ // TransportChannel) or when TransportChannel is attached after DTLS is
+ // negotiated.
+ if (state != DTLS_TRANSPORT_CONNECTED) {
+ srtp_filter_.ResetParams();
+ }
+}
+
void BaseChannel::SetReadyToSend(bool rtcp, bool ready) {
if (rtcp) {
rtcp_ready_to_send_ = ready;
@@ -512,7 +566,7 @@ bool BaseChannel::SendPacket(bool rtcp,
// Avoid a copy by transferring the ownership of the packet data.
int message_id = (!rtcp) ? MSG_RTPPACKET : MSG_RTCPPACKET;
PacketMessageData* data = new PacketMessageData;
- data->packet = packet->Pass();
+ data->packet = std::move(*packet);
data->options = options;
worker_thread_->Post(this, message_id, data);
return true;
@@ -628,9 +682,12 @@ bool BaseChannel::WantsPacket(bool rtcp, rtc::Buffer* packet) {
<< " packet: wrong size=" << packet->size();
return false;
}
-
- // Bundle filter handles both rtp and rtcp packets.
- return bundle_filter_.DemuxPacket(packet->data<char>(), packet->size(), rtcp);
+ if (rtcp) {
+ // Permit all (seemingly valid) RTCP packets.
+ return true;
+ }
+ // Check whether we handle this payload.
+ return bundle_filter_.DemuxPacket(packet->data<uint8_t>(), packet->size());
}
void BaseChannel::HandlePacket(bool rtcp, rtc::Buffer* packet,
@@ -758,8 +815,9 @@ void BaseChannel::UpdateWritableState_w() {
void BaseChannel::ChannelWritable_w() {
ASSERT(worker_thread_ == rtc::Thread::Current());
- if (writable_)
+ if (writable_) {
return;
+ }
LOG(LS_INFO) << "Channel writable (" << content_name_ << ")"
<< (was_ever_writable_ ? "" : " for the first time");
@@ -775,22 +833,8 @@ void BaseChannel::ChannelWritable_w() {
}
}
- // If we're doing DTLS-SRTP, now is the time.
- if (!was_ever_writable_ && ShouldSetupDtlsSrtp()) {
- if (!SetupDtlsSrtp(false)) {
- SignalDtlsSetupFailure_w(false);
- return;
- }
-
- if (rtcp_transport_channel_) {
- if (!SetupDtlsSrtp(true)) {
- SignalDtlsSetupFailure_w(true);
- return;
- }
- }
- }
-
was_ever_writable_ = true;
+ MaybeSetupDtlsSrtp_w();
writable_ = true;
ChangeState();
}
@@ -806,20 +850,21 @@ void BaseChannel::SignalDtlsSetupFailure_s(bool rtcp) {
SignalDtlsSetupFailure(this, rtcp);
}
-bool BaseChannel::SetDtlsSrtpCiphers(TransportChannel *tc, bool rtcp) {
- std::vector<std::string> ciphers;
- // We always use the default SRTP ciphers for RTCP, but we may use different
- // ciphers for RTP depending on the media type.
+bool BaseChannel::SetDtlsSrtpCryptoSuites(TransportChannel* tc, bool rtcp) {
+ std::vector<int> crypto_suites;
+ // We always use the default SRTP crypto suites for RTCP, but we may use
+ // different crypto suites for RTP depending on the media type.
if (!rtcp) {
- GetSrtpCryptoSuiteNames(&ciphers);
+ GetSrtpCryptoSuites(&crypto_suites);
} else {
- GetDefaultSrtpCryptoSuiteNames(&ciphers);
+ GetDefaultSrtpCryptoSuites(&crypto_suites);
}
- return tc->SetSrtpCiphers(ciphers);
+ return tc->SetSrtpCryptoSuites(crypto_suites);
}
bool BaseChannel::ShouldSetupDtlsSrtp() const {
- return true;
+ // Since DTLS is applied to all channels, checking RTP should be enough.
+ return transport_channel_ && transport_channel_->IsDtlsActive();
}
// This function returns true if either DTLS-SRTP is not in use
@@ -830,14 +875,12 @@ bool BaseChannel::SetupDtlsSrtp(bool rtcp_channel) {
TransportChannel* channel =
rtcp_channel ? rtcp_transport_channel_ : transport_channel_;
- // No DTLS
- if (!channel->IsDtlsActive())
- return true;
+ RTC_DCHECK(channel->IsDtlsActive());
- std::string selected_cipher;
+ int selected_crypto_suite;
- if (!channel->GetSrtpCryptoSuite(&selected_cipher)) {
- LOG(LS_ERROR) << "No DTLS-SRTP selected cipher";
+ if (!channel->GetSrtpCryptoSuite(&selected_crypto_suite)) {
+ LOG(LS_ERROR) << "No DTLS-SRTP selected crypto suite";
return false;
}
@@ -893,21 +936,15 @@ bool BaseChannel::SetupDtlsSrtp(bool rtcp_channel) {
}
if (rtcp_channel) {
- ret = srtp_filter_.SetRtcpParams(
- selected_cipher,
- &(*send_key)[0],
- static_cast<int>(send_key->size()),
- selected_cipher,
- &(*recv_key)[0],
- static_cast<int>(recv_key->size()));
+ ret = srtp_filter_.SetRtcpParams(selected_crypto_suite, &(*send_key)[0],
+ static_cast<int>(send_key->size()),
+ selected_crypto_suite, &(*recv_key)[0],
+ static_cast<int>(recv_key->size()));
} else {
- ret = srtp_filter_.SetRtpParams(
- selected_cipher,
- &(*send_key)[0],
- static_cast<int>(send_key->size()),
- selected_cipher,
- &(*recv_key)[0],
- static_cast<int>(recv_key->size()));
+ ret = srtp_filter_.SetRtpParams(selected_crypto_suite, &(*send_key)[0],
+ static_cast<int>(send_key->size()),
+ selected_crypto_suite, &(*recv_key)[0],
+ static_cast<int>(recv_key->size()));
}
if (!ret)
@@ -918,6 +955,28 @@ bool BaseChannel::SetupDtlsSrtp(bool rtcp_channel) {
return ret;
}
+void BaseChannel::MaybeSetupDtlsSrtp_w() {
+ if (srtp_filter_.IsActive()) {
+ return;
+ }
+
+ if (!ShouldSetupDtlsSrtp()) {
+ return;
+ }
+
+ if (!SetupDtlsSrtp(false)) {
+ SignalDtlsSetupFailure_w(false);
+ return;
+ }
+
+ if (rtcp_transport_channel_) {
+ if (!SetupDtlsSrtp(true)) {
+ SignalDtlsSetupFailure_w(true);
+ return;
+ }
+ }
+}
+
void BaseChannel::ChannelNotWritable_w() {
ASSERT(worker_thread_ == rtc::Thread::Current());
if (!writable_)
@@ -1022,7 +1081,7 @@ void BaseChannel::ActivateRtcpMux() {
void BaseChannel::ActivateRtcpMux_w() {
if (!rtcp_mux_filter_.IsActive()) {
rtcp_mux_filter_.SetActive();
- set_rtcp_transport_channel(nullptr);
+ set_rtcp_transport_channel(nullptr, true);
rtcp_transport_enabled_ = false;
}
}
@@ -1045,7 +1104,7 @@ bool BaseChannel::SetRtcpMux_w(bool enable, ContentAction action,
LOG(LS_INFO) << "Enabling rtcp-mux for " << content_name()
<< " by destroying RTCP transport channel for "
<< transport_name();
- set_rtcp_transport_channel(nullptr);
+ set_rtcp_transport_channel(nullptr, true);
rtcp_transport_enabled_ = false;
}
break;
@@ -1075,15 +1134,11 @@ bool BaseChannel::SetRtcpMux_w(bool enable, ContentAction action,
bool BaseChannel::AddRecvStream_w(const StreamParams& sp) {
ASSERT(worker_thread() == rtc::Thread::Current());
- if (!media_channel()->AddRecvStream(sp))
- return false;
-
- return bundle_filter_.AddStream(sp);
+ return media_channel()->AddRecvStream(sp);
}
bool BaseChannel::RemoveRecvStream_w(uint32_t ssrc) {
ASSERT(worker_thread() == rtc::Thread::Current());
- bundle_filter_.RemoveStream(ssrc);
return media_channel()->RemoveRecvStream(ssrc);
}
@@ -1243,6 +1298,7 @@ void BaseChannel::MaybeCacheRtpAbsSendTimeHeaderExtension(
}
void BaseChannel::OnMessage(rtc::Message *pmsg) {
+ TRACE_EVENT0("webrtc", "BaseChannel::OnMessage");
switch (pmsg->message_id) {
case MSG_RTPPACKET:
case MSG_RTCPPACKET: {
@@ -1324,15 +1380,6 @@ void VoiceChannel::SetEarlyMedia(bool enable) {
}
}
-bool VoiceChannel::PressDTMF(int digit, bool playout) {
- int flags = DF_SEND;
- if (playout) {
- flags |= DF_PLAY;
- }
- int duration_ms = 160;
- return InsertDtmf(0, digit, duration_ms, flags);
-}
-
bool VoiceChannel::CanInsertDtmf() {
return InvokeOnWorker(Bind(&VoiceMediaChannel::CanInsertDtmf,
media_channel()));
@@ -1340,10 +1387,9 @@ bool VoiceChannel::CanInsertDtmf() {
bool VoiceChannel::InsertDtmf(uint32_t ssrc,
int event_code,
- int duration,
- int flags) {
+ int duration) {
return InvokeOnWorker(Bind(&VoiceChannel::InsertDtmf_w, this,
- ssrc, event_code, duration, flags));
+ ssrc, event_code, duration));
}
bool VoiceChannel::SetOutputVolume(uint32_t ssrc, double volume) {
@@ -1351,6 +1397,15 @@ bool VoiceChannel::SetOutputVolume(uint32_t ssrc, double volume) {
media_channel(), ssrc, volume));
}
+void VoiceChannel::SetRawAudioSink(
+ uint32_t ssrc,
+ rtc::scoped_ptr<webrtc::AudioSinkInterface> sink) {
+ // We need to work around Bind's lack of support for scoped_ptr and ownership
+ // passing. So we invoke to our own little routine that gets a pointer to
+ // our local variable. This is OK since we're synchronously invoking.
+ InvokeOnWorker(Bind(&SetRawAudioSink_w, media_channel(), ssrc, &sink));
+}
+
bool VoiceChannel::GetStats(VoiceMediaInfo* stats) {
return InvokeOnWorker(Bind(&VoiceMediaChannel::GetStats,
media_channel(), stats));
@@ -1440,6 +1495,7 @@ const ContentInfo* VoiceChannel::GetFirstContent(
bool VoiceChannel::SetLocalContent_w(const MediaContentDescription* content,
ContentAction action,
std::string* error_desc) {
+ TRACE_EVENT0("webrtc", "VoiceChannel::SetLocalContent_w");
ASSERT(worker_thread() == rtc::Thread::Current());
LOG(LS_INFO) << "Setting local voice description";
@@ -1484,6 +1540,7 @@ bool VoiceChannel::SetLocalContent_w(const MediaContentDescription* content,
bool VoiceChannel::SetRemoteContent_w(const MediaContentDescription* content,
ContentAction action,
std::string* error_desc) {
+ TRACE_EVENT0("webrtc", "VoiceChannel::SetRemoteContent_w");
ASSERT(worker_thread() == rtc::Thread::Current());
LOG(LS_INFO) << "Setting remote voice description";
@@ -1502,7 +1559,7 @@ bool VoiceChannel::SetRemoteContent_w(const MediaContentDescription* content,
AudioSendParameters send_params = last_send_params_;
RtpSendParametersFromMediaDescription(audio, &send_params);
if (audio->agc_minus_10db()) {
- send_params.options.adjust_agc_delta.Set(kAgcMinus10db);
+ send_params.options.adjust_agc_delta = rtc::Optional<int>(kAgcMinus10db);
}
if (!media_channel()->SetSendParameters(send_params)) {
SafeSetError("Failed to set remote audio description send parameters.",
@@ -1539,13 +1596,11 @@ void VoiceChannel::HandleEarlyMediaTimeout() {
bool VoiceChannel::InsertDtmf_w(uint32_t ssrc,
int event,
- int duration,
- int flags) {
+ int duration) {
if (!enabled()) {
return false;
}
-
- return media_channel()->InsertDtmf(ssrc, event, duration, flags);
+ return media_channel()->InsertDtmf(ssrc, event, duration);
}
void VoiceChannel::OnMessage(rtc::Message *pmsg) {
@@ -1581,9 +1636,8 @@ void VoiceChannel::OnAudioMonitorUpdate(AudioMonitor* monitor,
SignalAudioMonitor(this, info);
}
-void VoiceChannel::GetSrtpCryptoSuiteNames(
- std::vector<std::string>* ciphers) const {
- GetSupportedAudioCryptoSuites(ciphers);
+void VoiceChannel::GetSrtpCryptoSuites(std::vector<int>* crypto_suites) const {
+ GetSupportedAudioCryptoSuites(crypto_suites);
}
VideoChannel::VideoChannel(rtc::Thread* thread,
@@ -1653,20 +1707,6 @@ bool VideoChannel::IsScreencasting() {
return InvokeOnWorker(Bind(&VideoChannel::IsScreencasting_w, this));
}
-int VideoChannel::GetScreencastFps(uint32_t ssrc) {
- ScreencastDetailsData data(ssrc);
- worker_thread()->Invoke<void>(Bind(
- &VideoChannel::GetScreencastDetails_w, this, &data));
- return data.fps;
-}
-
-int VideoChannel::GetScreencastMaxPixels(uint32_t ssrc) {
- ScreencastDetailsData data(ssrc);
- worker_thread()->Invoke<void>(Bind(
- &VideoChannel::GetScreencastDetails_w, this, &data));
- return data.screencast_max_pixels;
-}
-
bool VideoChannel::SendIntraFrame() {
worker_thread()->Invoke<void>(Bind(
&VideoMediaChannel::SendIntraFrame, media_channel()));
@@ -1726,6 +1766,7 @@ const ContentInfo* VideoChannel::GetFirstContent(
bool VideoChannel::SetLocalContent_w(const MediaContentDescription* content,
ContentAction action,
std::string* error_desc) {
+ TRACE_EVENT0("webrtc", "VideoChannel::SetLocalContent_w");
ASSERT(worker_thread() == rtc::Thread::Current());
LOG(LS_INFO) << "Setting local video description";
@@ -1770,6 +1811,7 @@ bool VideoChannel::SetLocalContent_w(const MediaContentDescription* content,
bool VideoChannel::SetRemoteContent_w(const MediaContentDescription* content,
ContentAction action,
std::string* error_desc) {
+ TRACE_EVENT0("webrtc", "VideoChannel::SetRemoteContent_w");
ASSERT(worker_thread() == rtc::Thread::Current());
LOG(LS_INFO) << "Setting remote video description";
@@ -1789,7 +1831,7 @@ bool VideoChannel::SetRemoteContent_w(const MediaContentDescription* content,
VideoSendParameters send_params = last_send_params_;
RtpSendParametersFromMediaDescription(video, &send_params);
if (video->conference_mode()) {
- send_params.options.conference_mode.Set(true);
+ send_params.options.conference_mode = rtc::Optional<bool>(true);
}
if (!media_channel()->SetSendParameters(send_params)) {
SafeSetError("Failed to set remote video description send parameters.",
@@ -1877,18 +1919,6 @@ bool VideoChannel::IsScreencasting_w() const {
return !screencast_capturers_.empty();
}
-void VideoChannel::GetScreencastDetails_w(
- ScreencastDetailsData* data) const {
- ScreencastMap::const_iterator iter = screencast_capturers_.find(data->ssrc);
- if (iter == screencast_capturers_.end()) {
- return;
- }
- VideoCapturer* capturer = iter->second;
- const VideoFormat* video_format = capturer->GetCaptureFormat();
- data->fps = VideoFormat::IntervalToFps(video_format->interval);
- data->screencast_max_pixels = capturer->screencast_max_pixels();
-}
-
void VideoChannel::OnScreencastWindowEvent_s(uint32_t ssrc,
rtc::WindowEvent we) {
ASSERT(signaling_thread() == rtc::Thread::Current());
@@ -1971,9 +2001,8 @@ bool VideoChannel::GetLocalSsrc(const VideoCapturer* capturer, uint32_t* ssrc) {
return false;
}
-void VideoChannel::GetSrtpCryptoSuiteNames(
- std::vector<std::string>* ciphers) const {
- GetSupportedVideoCryptoSuites(ciphers);
+void VideoChannel::GetSrtpCryptoSuites(std::vector<int>* crypto_suites) const {
+ GetSupportedVideoCryptoSuites(crypto_suites);
}
DataChannel::DataChannel(rtc::Thread* thread,
@@ -2067,6 +2096,7 @@ bool DataChannel::SetDataChannelTypeFromContent(
bool DataChannel::SetLocalContent_w(const MediaContentDescription* content,
ContentAction action,
std::string* error_desc) {
+ TRACE_EVENT0("webrtc", "DataChannel::SetLocalContent_w");
ASSERT(worker_thread() == rtc::Thread::Current());
LOG(LS_INFO) << "Setting local data description";
@@ -2122,6 +2152,7 @@ bool DataChannel::SetLocalContent_w(const MediaContentDescription* content,
bool DataChannel::SetRemoteContent_w(const MediaContentDescription* content,
ContentAction action,
std::string* error_desc) {
+ TRACE_EVENT0("webrtc", "DataChannel::SetRemoteContent_w");
ASSERT(worker_thread() == rtc::Thread::Current());
const DataContentDescription* data =
@@ -2279,13 +2310,12 @@ void DataChannel::OnDataChannelReadyToSend(bool writable) {
new DataChannelReadyToSendMessageData(writable));
}
-void DataChannel::GetSrtpCryptoSuiteNames(
- std::vector<std::string>* ciphers) const {
- GetSupportedDataCryptoSuites(ciphers);
+void DataChannel::GetSrtpCryptoSuites(std::vector<int>* crypto_suites) const {
+ GetSupportedDataCryptoSuites(crypto_suites);
}
bool DataChannel::ShouldSetupDtlsSrtp() const {
- return (data_channel_type_ == DCT_RTP);
+ return (data_channel_type_ == DCT_RTP) && BaseChannel::ShouldSetupDtlsSrtp();
}
void DataChannel::OnStreamClosedRemotely(uint32_t sid) {
diff --git a/talk/session/media/channel.h b/talk/session/media/channel.h
index 603115cee7..d8fde670a0 100644
--- a/talk/session/media/channel.h
+++ b/talk/session/media/channel.h
@@ -38,19 +38,24 @@
#include "talk/media/base/mediaengine.h"
#include "talk/media/base/streamparams.h"
#include "talk/media/base/videocapturer.h"
-#include "webrtc/p2p/base/transportcontroller.h"
-#include "webrtc/p2p/client/socketmonitor.h"
#include "talk/session/media/audiomonitor.h"
#include "talk/session/media/bundlefilter.h"
#include "talk/session/media/mediamonitor.h"
#include "talk/session/media/mediasession.h"
#include "talk/session/media/rtcpmuxfilter.h"
#include "talk/session/media/srtpfilter.h"
+#include "webrtc/audio/audio_sink.h"
#include "webrtc/base/asyncudpsocket.h"
#include "webrtc/base/criticalsection.h"
#include "webrtc/base/network.h"
#include "webrtc/base/sigslot.h"
#include "webrtc/base/window.h"
+#include "webrtc/p2p/base/transportcontroller.h"
+#include "webrtc/p2p/client/socketmonitor.h"
+
+namespace webrtc {
+class AudioSinkInterface;
+} // namespace webrtc
namespace cricket {
@@ -174,8 +179,11 @@ class BaseChannel
// Sets the |transport_channel_| (and |rtcp_transport_channel_|, if |rtcp_| is
// true). Gets the transport channels from |transport_controller_|.
bool SetTransport_w(const std::string& transport_name);
+
void set_transport_channel(TransportChannel* transport);
- void set_rtcp_transport_channel(TransportChannel* transport);
+ void set_rtcp_transport_channel(TransportChannel* transport,
+ bool update_writablity);
+
bool was_ever_writable() const { return was_ever_writable_; }
void set_local_content_direction(MediaContentDirection direction) {
local_content_direction_ = direction;
@@ -213,6 +221,8 @@ class BaseChannel
int flags);
void OnReadyToSend(TransportChannel* channel);
+ void OnDtlsState(TransportChannel* channel, DtlsTransportState state);
+
bool PacketIsRtcp(const TransportChannel* channel, const char* data,
size_t len);
bool SendPacket(bool rtcp,
@@ -235,8 +245,9 @@ class BaseChannel
// Do the DTLS key expansion and impose it on the SRTP/SRTCP filters.
// |rtcp_channel| indicates whether to set up the RTP or RTCP filter.
bool SetupDtlsSrtp(bool rtcp_channel);
+ void MaybeSetupDtlsSrtp_w();
// Set the DTLS-SRTP cipher policy on this channel as appropriate.
- bool SetDtlsSrtpCiphers(TransportChannel *tc, bool rtcp);
+ bool SetDtlsSrtpCryptoSuites(TransportChannel* tc, bool rtcp);
virtual void ChangeState() = 0;
@@ -282,9 +293,8 @@ class BaseChannel
void OnMessage(rtc::Message* pmsg) override;
// Handled in derived classes
- // Get the SRTP ciphers to use for RTP media
- virtual void GetSrtpCryptoSuiteNames(
- std::vector<std::string>* ciphers) const = 0;
+ // Get the SRTP crypto suites to use for RTP media
+ virtual void GetSrtpCryptoSuites(std::vector<int>* crypto_suites) const = 0;
virtual void OnConnectionMonitorUpdate(ConnectionMonitor* monitor,
const std::vector<ConnectionInfo>& infos) = 0;
@@ -356,8 +366,6 @@ class VoiceChannel : public BaseChannel {
// own ringing sound
sigslot::signal1<VoiceChannel*> SignalEarlyMediaTimeout;
- // TODO(ronghuawu): Replace PressDTMF with InsertDtmf.
- bool PressDTMF(int digit, bool playout);
// Returns if the telephone-event has been negotiated.
bool CanInsertDtmf();
// Send and/or play a DTMF |event| according to the |flags|.
@@ -365,8 +373,11 @@ class VoiceChannel : public BaseChannel {
// The |ssrc| should be either 0 or a valid send stream ssrc.
// The valid value for the |event| are 0 which corresponding to DTMF
// event 0-9, *, #, A-D.
- bool InsertDtmf(uint32_t ssrc, int event_code, int duration, int flags);
+ bool InsertDtmf(uint32_t ssrc, int event_code, int duration);
bool SetOutputVolume(uint32_t ssrc, double volume);
+ void SetRawAudioSink(uint32_t ssrc,
+ rtc::scoped_ptr<webrtc::AudioSinkInterface> sink);
+
// Get statistics about the current media session.
bool GetStats(VoiceMediaInfo* stats);
@@ -402,12 +413,12 @@ class VoiceChannel : public BaseChannel {
ContentAction action,
std::string* error_desc);
void HandleEarlyMediaTimeout();
- bool InsertDtmf_w(uint32_t ssrc, int event, int duration, int flags);
+ bool InsertDtmf_w(uint32_t ssrc, int event, int duration);
bool SetOutputVolume_w(uint32_t ssrc, double volume);
bool GetStats_w(VoiceMediaInfo* stats);
virtual void OnMessage(rtc::Message* pmsg);
- virtual void GetSrtpCryptoSuiteNames(std::vector<std::string>* ciphers) const;
+ virtual void GetSrtpCryptoSuites(std::vector<int>* crypto_suites) const;
virtual void OnConnectionMonitorUpdate(
ConnectionMonitor* monitor, const std::vector<ConnectionInfo>& infos);
virtual void OnMediaMonitorUpdate(
@@ -456,8 +467,6 @@ class VideoChannel : public BaseChannel {
// True if we've added a screencast. Doesn't matter if the capturer
// has been started or not.
bool IsScreencasting();
- int GetScreencastFps(uint32_t ssrc);
- int GetScreencastMaxPixels(uint32_t ssrc);
// Get statistics about the current media session.
bool GetStats(VideoMediaInfo* stats);
@@ -476,7 +485,6 @@ class VideoChannel : public BaseChannel {
private:
typedef std::map<uint32_t, VideoCapturer*> ScreencastMap;
- struct ScreencastDetailsData;
// overrides from BaseChannel
virtual void ChangeState();
@@ -493,11 +501,10 @@ class VideoChannel : public BaseChannel {
bool RemoveScreencast_w(uint32_t ssrc);
void OnScreencastWindowEvent_s(uint32_t ssrc, rtc::WindowEvent we);
bool IsScreencasting_w() const;
- void GetScreencastDetails_w(ScreencastDetailsData* d) const;
bool GetStats_w(VideoMediaInfo* stats);
virtual void OnMessage(rtc::Message* pmsg);
- virtual void GetSrtpCryptoSuiteNames(std::vector<std::string>* ciphers) const;
+ virtual void GetSrtpCryptoSuites(std::vector<int>* crypto_suites) const;
virtual void OnConnectionMonitorUpdate(
ConnectionMonitor* monitor, const std::vector<ConnectionInfo>& infos);
virtual void OnMediaMonitorUpdate(
@@ -614,7 +621,7 @@ class DataChannel : public BaseChannel {
virtual bool WantsPacket(bool rtcp, rtc::Buffer* packet);
virtual void OnMessage(rtc::Message* pmsg);
- virtual void GetSrtpCryptoSuiteNames(std::vector<std::string>* ciphers) const;
+ virtual void GetSrtpCryptoSuites(std::vector<int>* crypto_suites) const;
virtual void OnConnectionMonitorUpdate(
ConnectionMonitor* monitor, const std::vector<ConnectionInfo>& infos);
virtual void OnMediaMonitorUpdate(
diff --git a/talk/session/media/channel_unittest.cc b/talk/session/media/channel_unittest.cc
index 18233202b6..6b1d66fe39 100644
--- a/talk/session/media/channel_unittest.cc
+++ b/talk/session/media/channel_unittest.cc
@@ -33,8 +33,8 @@
#include "talk/media/base/rtpdump.h"
#include "talk/media/base/screencastid.h"
#include "talk/media/base/testutils.h"
-#include "webrtc/p2p/base/faketransportcontroller.h"
#include "talk/session/media/channel.h"
+#include "webrtc/base/arraysize.h"
#include "webrtc/base/fileutils.h"
#include "webrtc/base/gunit.h"
#include "webrtc/base/helpers.h"
@@ -44,6 +44,7 @@
#include "webrtc/base/ssladapter.h"
#include "webrtc/base/sslidentity.h"
#include "webrtc/base/window.h"
+#include "webrtc/p2p/base/faketransportcontroller.h"
#define MAYBE_SKIP_TEST(feature) \
if (!(rtc::SSLStreamAdapter::feature())) { \
@@ -174,17 +175,15 @@ class ChannelTest : public testing::Test, public sigslot::has_slots<> {
if (flags1 & DTLS) {
// Confirmed to work with KT_RSA and KT_ECDSA.
- transport_controller1_.SetLocalCertificate(rtc::RTCCertificate::Create(
- rtc::scoped_ptr<rtc::SSLIdentity>(
- rtc::SSLIdentity::Generate("session1", rtc::KT_DEFAULT))
- .Pass()));
+ transport_controller1_.SetLocalCertificate(
+ rtc::RTCCertificate::Create(rtc::scoped_ptr<rtc::SSLIdentity>(
+ rtc::SSLIdentity::Generate("session1", rtc::KT_DEFAULT))));
}
if (flags2 & DTLS) {
// Confirmed to work with KT_RSA and KT_ECDSA.
- transport_controller2_.SetLocalCertificate(rtc::RTCCertificate::Create(
- rtc::scoped_ptr<rtc::SSLIdentity>(
- rtc::SSLIdentity::Generate("session2", rtc::KT_DEFAULT))
- .Pass()));
+ transport_controller2_.SetLocalCertificate(
+ rtc::RTCCertificate::Create(rtc::scoped_ptr<rtc::SSLIdentity>(
+ rtc::SSLIdentity::Generate("session2", rtc::KT_DEFAULT))));
}
// Add stream information (SSRC) to the local content but not to the remote
@@ -1473,12 +1472,6 @@ class ChannelTest : public testing::Test, public sigslot::has_slots<> {
EXPECT_TRUE(channel2_->bundle_filter()->FindPayloadType(pl_type1));
EXPECT_FALSE(channel1_->bundle_filter()->FindPayloadType(pl_type2));
EXPECT_FALSE(channel2_->bundle_filter()->FindPayloadType(pl_type2));
- // channel1 - should only have media_content2 as remote. i.e. kSsrc2
- EXPECT_TRUE(channel1_->bundle_filter()->FindStream(kSsrc2));
- EXPECT_FALSE(channel1_->bundle_filter()->FindStream(kSsrc1));
- // channel2 - should only have media_content1 as remote. i.e. kSsrc1
- EXPECT_TRUE(channel2_->bundle_filter()->FindStream(kSsrc1));
- EXPECT_FALSE(channel2_->bundle_filter()->FindStream(kSsrc2));
// Both channels can receive pl_type1 only.
EXPECT_TRUE(SendCustomRtp1(kSsrc1, ++sequence_number1_1, pl_type1));
@@ -1503,8 +1496,9 @@ class ChannelTest : public testing::Test, public sigslot::has_slots<> {
EXPECT_TRUE(SendCustomRtcp1(kSsrc2));
EXPECT_TRUE(SendCustomRtcp2(kSsrc1));
- EXPECT_FALSE(CheckCustomRtcp1(kSsrc1));
- EXPECT_FALSE(CheckCustomRtcp2(kSsrc2));
+ // Bundle filter shouldn't filter out any RTCP.
+ EXPECT_TRUE(CheckCustomRtcp1(kSsrc1));
+ EXPECT_TRUE(CheckCustomRtcp2(kSsrc2));
}
// Test that the media monitor can be run and gives timely callbacks.
@@ -2116,23 +2110,6 @@ TEST_F(VoiceChannelTest, TestMediaMonitor) {
Base::TestMediaMonitor();
}
-// Test that PressDTMF properly forwards to the media channel.
-TEST_F(VoiceChannelTest, TestDtmf) {
- CreateChannels(0, 0);
- EXPECT_TRUE(SendInitiate());
- EXPECT_TRUE(SendAccept());
- EXPECT_EQ(0U, media_channel1_->dtmf_info_queue().size());
-
- EXPECT_TRUE(channel1_->PressDTMF(1, true));
- EXPECT_TRUE(channel1_->PressDTMF(8, false));
-
- ASSERT_EQ(2U, media_channel1_->dtmf_info_queue().size());
- EXPECT_TRUE(CompareDtmfInfo(media_channel1_->dtmf_info_queue()[0],
- 0, 1, 160, cricket::DF_PLAY | cricket::DF_SEND));
- EXPECT_TRUE(CompareDtmfInfo(media_channel1_->dtmf_info_queue()[1],
- 0, 8, 160, cricket::DF_SEND));
-}
-
// Test that InsertDtmf properly forwards to the media channel.
TEST_F(VoiceChannelTest, TestInsertDtmf) {
CreateChannels(0, 0);
@@ -2140,18 +2117,17 @@ TEST_F(VoiceChannelTest, TestInsertDtmf) {
EXPECT_TRUE(SendAccept());
EXPECT_EQ(0U, media_channel1_->dtmf_info_queue().size());
- EXPECT_TRUE(channel1_->InsertDtmf(1, 3, 100, cricket::DF_SEND));
- EXPECT_TRUE(channel1_->InsertDtmf(2, 5, 110, cricket::DF_PLAY));
- EXPECT_TRUE(channel1_->InsertDtmf(3, 7, 120,
- cricket::DF_PLAY | cricket::DF_SEND));
+ EXPECT_TRUE(channel1_->InsertDtmf(1, 3, 100));
+ EXPECT_TRUE(channel1_->InsertDtmf(2, 5, 110));
+ EXPECT_TRUE(channel1_->InsertDtmf(3, 7, 120));
ASSERT_EQ(3U, media_channel1_->dtmf_info_queue().size());
EXPECT_TRUE(CompareDtmfInfo(media_channel1_->dtmf_info_queue()[0],
- 1, 3, 100, cricket::DF_SEND));
+ 1, 3, 100));
EXPECT_TRUE(CompareDtmfInfo(media_channel1_->dtmf_info_queue()[1],
- 2, 5, 110, cricket::DF_PLAY));
+ 2, 5, 110));
EXPECT_TRUE(CompareDtmfInfo(media_channel1_->dtmf_info_queue()[2],
- 3, 7, 120, cricket::DF_PLAY | cricket::DF_SEND));
+ 3, 7, 120));
}
TEST_F(VoiceChannelTest, TestSetContentFailure) {
@@ -2253,21 +2229,19 @@ TEST_F(VoiceChannelTest, TestScaleVolumeMultiwayCall) {
}
TEST_F(VoiceChannelTest, SendBundleToBundle) {
- Base::SendBundleToBundle(kAudioPts, ARRAY_SIZE(kAudioPts), false, false);
+ Base::SendBundleToBundle(kAudioPts, arraysize(kAudioPts), false, false);
}
TEST_F(VoiceChannelTest, SendBundleToBundleSecure) {
- Base::SendBundleToBundle(kAudioPts, ARRAY_SIZE(kAudioPts), false, true);
+ Base::SendBundleToBundle(kAudioPts, arraysize(kAudioPts), false, true);
}
TEST_F(VoiceChannelTest, SendBundleToBundleWithRtcpMux) {
- Base::SendBundleToBundle(
- kAudioPts, ARRAY_SIZE(kAudioPts), true, false);
+ Base::SendBundleToBundle(kAudioPts, arraysize(kAudioPts), true, false);
}
TEST_F(VoiceChannelTest, SendBundleToBundleWithRtcpMuxSecure) {
- Base::SendBundleToBundle(
- kAudioPts, ARRAY_SIZE(kAudioPts), true, true);
+ Base::SendBundleToBundle(kAudioPts, arraysize(kAudioPts), true, true);
}
// VideoChannelTest
@@ -2501,21 +2475,19 @@ TEST_F(VideoChannelTest, TestFlushRtcp) {
}
TEST_F(VideoChannelTest, SendBundleToBundle) {
- Base::SendBundleToBundle(kVideoPts, ARRAY_SIZE(kVideoPts), false, false);
+ Base::SendBundleToBundle(kVideoPts, arraysize(kVideoPts), false, false);
}
TEST_F(VideoChannelTest, SendBundleToBundleSecure) {
- Base::SendBundleToBundle(kVideoPts, ARRAY_SIZE(kVideoPts), false, true);
+ Base::SendBundleToBundle(kVideoPts, arraysize(kVideoPts), false, true);
}
TEST_F(VideoChannelTest, SendBundleToBundleWithRtcpMux) {
- Base::SendBundleToBundle(
- kVideoPts, ARRAY_SIZE(kVideoPts), true, false);
+ Base::SendBundleToBundle(kVideoPts, arraysize(kVideoPts), true, false);
}
TEST_F(VideoChannelTest, SendBundleToBundleWithRtcpMuxSecure) {
- Base::SendBundleToBundle(
- kVideoPts, ARRAY_SIZE(kVideoPts), true, true);
+ Base::SendBundleToBundle(kVideoPts, arraysize(kVideoPts), true, true);
}
TEST_F(VideoChannelTest, TestSrtpError) {
diff --git a/talk/session/media/channelmanager.cc b/talk/session/media/channelmanager.cc
index e7e1cd44a2..e7a4b8bddb 100644
--- a/talk/session/media/channelmanager.cc
+++ b/talk/session/media/channelmanager.cc
@@ -49,6 +49,7 @@
#include "webrtc/base/sigslotrepeater.h"
#include "webrtc/base/stringencode.h"
#include "webrtc/base/stringutils.h"
+#include "webrtc/base/trace_event.h"
namespace cricket {
@@ -101,8 +102,6 @@ void ChannelManager::Construct(MediaEngineInterface* me,
initialized_ = false;
main_thread_ = rtc::Thread::Current();
worker_thread_ = worker_thread;
- // Get the default audio options from the media engine.
- audio_options_ = media_engine_->GetAudioOptions();
audio_output_volume_ = kNotSetOutputVolume;
local_renderer_ = NULL;
capturing_ = false;
@@ -156,7 +155,7 @@ void ChannelManager::GetSupportedAudioCodecs(
void ChannelManager::GetSupportedAudioRtpHeaderExtensions(
RtpHeaderExtensions* ext) const {
- *ext = media_engine_->audio_rtp_header_extensions();
+ *ext = media_engine_->GetAudioCapabilities().header_extensions;
}
void ChannelManager::GetSupportedVideoCodecs(
@@ -175,7 +174,7 @@ void ChannelManager::GetSupportedVideoCodecs(
void ChannelManager::GetSupportedVideoRtpHeaderExtensions(
RtpHeaderExtensions* ext) const {
- *ext = media_engine_->video_rtp_header_extensions();
+ *ext = media_engine_->GetVideoCapabilities().header_extensions;
}
void ChannelManager::GetSupportedDataCodecs(
@@ -205,11 +204,6 @@ bool ChannelManager::Init() {
return false;
}
- if (!SetAudioOptions(audio_options_)) {
- LOG(LS_WARNING) << "Failed to SetAudioOptions with options: "
- << audio_options_.ToString();
- }
-
// If audio_output_volume_ has been set via SetOutputVolume(), set the
// audio output volume of the engine.
if (kNotSetOutputVolume != audio_output_volume_ &&
@@ -218,11 +212,6 @@ bool ChannelManager::Init() {
<< audio_output_volume_;
}
- // Now apply the default video codec that has been set earlier.
- if (default_video_encoder_config_.max_codec.id != 0) {
- SetDefaultVideoEncoderConfig(default_video_encoder_config_);
- }
-
return initialized_;
}
@@ -295,6 +284,7 @@ VoiceChannel* ChannelManager::CreateVoiceChannel_w(
}
void ChannelManager::DestroyVoiceChannel(VoiceChannel* voice_channel) {
+ TRACE_EVENT0("webrtc", "ChannelManager::DestroyVoiceChannel");
if (voice_channel) {
worker_thread_->Invoke<void>(
Bind(&ChannelManager::DestroyVoiceChannel_w, this, voice_channel));
@@ -302,6 +292,7 @@ void ChannelManager::DestroyVoiceChannel(VoiceChannel* voice_channel) {
}
void ChannelManager::DestroyVoiceChannel_w(VoiceChannel* voice_channel) {
+ TRACE_EVENT0("webrtc", "ChannelManager::DestroyVoiceChannel_w");
// Destroy voice channel.
ASSERT(initialized_);
ASSERT(worker_thread_ == rtc::Thread::Current());
@@ -351,6 +342,7 @@ VideoChannel* ChannelManager::CreateVideoChannel_w(
}
void ChannelManager::DestroyVideoChannel(VideoChannel* video_channel) {
+ TRACE_EVENT0("webrtc", "ChannelManager::DestroyVideoChannel");
if (video_channel) {
worker_thread_->Invoke<void>(
Bind(&ChannelManager::DestroyVideoChannel_w, this, video_channel));
@@ -358,6 +350,7 @@ void ChannelManager::DestroyVideoChannel(VideoChannel* video_channel) {
}
void ChannelManager::DestroyVideoChannel_w(VideoChannel* video_channel) {
+ TRACE_EVENT0("webrtc", "ChannelManager::DestroyVideoChannel_w");
// Destroy video channel.
ASSERT(initialized_);
ASSERT(worker_thread_ == rtc::Thread::Current());
@@ -408,6 +401,7 @@ DataChannel* ChannelManager::CreateDataChannel_w(
}
void ChannelManager::DestroyDataChannel(DataChannel* data_channel) {
+ TRACE_EVENT0("webrtc", "ChannelManager::DestroyDataChannel");
if (data_channel) {
worker_thread_->Invoke<void>(
Bind(&ChannelManager::DestroyDataChannel_w, this, data_channel));
@@ -415,6 +409,7 @@ void ChannelManager::DestroyDataChannel(DataChannel* data_channel) {
}
void ChannelManager::DestroyDataChannel_w(DataChannel* data_channel) {
+ TRACE_EVENT0("webrtc", "ChannelManager::DestroyDataChannel_w");
// Destroy data channel.
ASSERT(initialized_);
DataChannels::iterator it = std::find(data_channels_.begin(),
@@ -427,43 +422,6 @@ void ChannelManager::DestroyDataChannel_w(DataChannel* data_channel) {
delete data_channel;
}
-bool ChannelManager::SetAudioOptions(const AudioOptions& options) {
- // "Get device ids from DeviceManager" - these are the defaults returned.
- Device in_dev("", -1);
- Device out_dev("", -1);
-
- // If we're initialized, pass the settings to the media engine.
- bool ret = true;
- if (initialized_) {
- ret = worker_thread_->Invoke<bool>(
- Bind(&ChannelManager::SetAudioOptions_w, this,
- options, &in_dev, &out_dev));
- }
-
- // If all worked well, save the values for use in GetAudioOptions.
- if (ret) {
- audio_options_ = options;
- }
- return ret;
-}
-
-bool ChannelManager::SetAudioOptions_w(
- const AudioOptions& options,
- const Device* in_dev, const Device* out_dev) {
- ASSERT(worker_thread_ == rtc::Thread::Current());
- ASSERT(initialized_);
-
- // Set audio options
- bool ret = media_engine_->SetAudioOptions(options);
-
- // Set the audio devices
- if (ret) {
- ret = media_engine_->SetSoundDevices(in_dev, out_dev);
- }
-
- return ret;
-}
-
bool ChannelManager::GetOutputVolume(int* level) {
if (!initialized_) {
return false;
@@ -487,39 +445,6 @@ bool ChannelManager::SetOutputVolume(int level) {
return ret;
}
-bool ChannelManager::SetDefaultVideoEncoderConfig(const VideoEncoderConfig& c) {
- bool ret = true;
- if (initialized_) {
- ret = worker_thread_->Invoke<bool>(
- Bind(&MediaEngineInterface::SetDefaultVideoEncoderConfig,
- media_engine_.get(), c));
- }
- if (ret) {
- default_video_encoder_config_ = c;
- }
- return ret;
-}
-
-void ChannelManager::SetVoiceLogging(int level, const char* filter) {
- if (initialized_) {
- worker_thread_->Invoke<void>(
- Bind(&MediaEngineInterface::SetVoiceLogging,
- media_engine_.get(), level, filter));
- } else {
- media_engine_->SetVoiceLogging(level, filter);
- }
-}
-
-void ChannelManager::SetVideoLogging(int level, const char* filter) {
- if (initialized_) {
- worker_thread_->Invoke<void>(
- Bind(&MediaEngineInterface::SetVideoLogging,
- media_engine_.get(), level, filter));
- } else {
- media_engine_->SetVideoLogging(level, filter);
- }
-}
-
std::vector<cricket::VideoFormat> ChannelManager::GetSupportedFormats(
VideoCapturer* capturer) const {
ASSERT(capturer != NULL);
diff --git a/talk/session/media/channelmanager.h b/talk/session/media/channelmanager.h
index 6312e61e06..2bc516bfaa 100644
--- a/talk/session/media/channelmanager.h
+++ b/talk/session/media/channelmanager.h
@@ -129,7 +129,6 @@ class ChannelManager : public rtc::MessageHandler,
bool GetOutputVolume(int* level);
bool SetOutputVolume(int level);
- bool SetDefaultVideoEncoderConfig(const VideoEncoderConfig& config);
// RTX will be enabled/disabled in engines that support it. The supporting
// engines will start offering an RTX codec. Must be called before Init().
bool SetVideoRtxEnabled(bool enable);
@@ -137,10 +136,6 @@ class ChannelManager : public rtc::MessageHandler,
// Starts/stops the local microphone and enables polling of the input level.
bool capturing() const { return capturing_; }
- // Configures the logging output of the mediaengine(s).
- void SetVoiceLogging(int level, const char* filter);
- void SetVideoLogging(int level, const char* filter);
-
// Gets capturer's supported formats in a thread safe manner
std::vector<cricket::VideoFormat> GetSupportedFormats(
VideoCapturer* capturer) const;
@@ -181,11 +176,6 @@ class ChannelManager : public rtc::MessageHandler,
sigslot::signal2<VideoCapturer*, CaptureState> SignalVideoCaptureStateChange;
- protected:
- // Adds non-transient parameters which can only be changed through the
- // options store.
- bool SetAudioOptions(const AudioOptions& options);
-
private:
typedef std::vector<VoiceChannel*> VoiceChannels;
typedef std::vector<VideoChannel*> VideoChannels;
@@ -217,8 +207,6 @@ class ChannelManager : public rtc::MessageHandler,
bool rtcp,
DataChannelType data_channel_type);
void DestroyDataChannel_w(DataChannel* data_channel);
- bool SetAudioOptions_w(const AudioOptions& options,
- const Device* in_dev, const Device* out_dev);
void OnVideoCaptureStateChange(VideoCapturer* capturer,
CaptureState result);
void GetSupportedFormats_w(
@@ -238,9 +226,7 @@ class ChannelManager : public rtc::MessageHandler,
VideoChannels video_channels_;
DataChannels data_channels_;
- AudioOptions audio_options_;
int audio_output_volume_;
- VideoEncoderConfig default_video_encoder_config_;
VideoRenderer* local_renderer_;
bool enable_rtx_;
diff --git a/talk/session/media/channelmanager_unittest.cc b/talk/session/media/channelmanager_unittest.cc
index fa6aa2cab6..4740f0f37d 100644
--- a/talk/session/media/channelmanager_unittest.cc
+++ b/talk/session/media/channelmanager_unittest.cc
@@ -183,38 +183,6 @@ TEST_F(ChannelManagerTest, NoTransportChannelTest) {
cm_->Terminate();
}
-// Test that SetDefaultVideoCodec passes through the right values.
-TEST_F(ChannelManagerTest, SetDefaultVideoEncoderConfig) {
- cricket::VideoCodec codec(96, "G264", 1280, 720, 60, 0);
- cricket::VideoEncoderConfig config(codec, 1, 2);
- EXPECT_TRUE(cm_->Init());
- EXPECT_TRUE(cm_->SetDefaultVideoEncoderConfig(config));
- EXPECT_EQ(config, fme_->default_video_encoder_config());
-}
-
-struct GetCapturerFrameSize : public sigslot::has_slots<> {
- void OnVideoFrame(VideoCapturer* capturer, const VideoFrame* frame) {
- width = frame->GetWidth();
- height = frame->GetHeight();
- }
- GetCapturerFrameSize(VideoCapturer* capturer) : width(0), height(0) {
- capturer->SignalVideoFrame.connect(this,
- &GetCapturerFrameSize::OnVideoFrame);
- static_cast<FakeVideoCapturer*>(capturer)->CaptureFrame();
- }
- size_t width;
- size_t height;
-};
-
-// Test that SetDefaultVideoCodec passes through the right values.
-TEST_F(ChannelManagerTest, SetDefaultVideoCodecBeforeInit) {
- cricket::VideoCodec codec(96, "G264", 1280, 720, 60, 0);
- cricket::VideoEncoderConfig config(codec, 1, 2);
- EXPECT_TRUE(cm_->SetDefaultVideoEncoderConfig(config));
- EXPECT_TRUE(cm_->Init());
- EXPECT_EQ(config, fme_->default_video_encoder_config());
-}
-
TEST_F(ChannelManagerTest, GetSetOutputVolumeBeforeInit) {
int level;
// Before init, SetOutputVolume() remembers the volume but does not change the
@@ -250,33 +218,6 @@ TEST_F(ChannelManagerTest, GetSetOutputVolume) {
EXPECT_EQ(60, level);
}
-// Test that logging options set before Init are applied properly,
-// and retained even after Init.
-TEST_F(ChannelManagerTest, SetLoggingBeforeInit) {
- cm_->SetVoiceLogging(rtc::LS_INFO, "test-voice");
- cm_->SetVideoLogging(rtc::LS_VERBOSE, "test-video");
- EXPECT_EQ(rtc::LS_INFO, fme_->voice_loglevel());
- EXPECT_STREQ("test-voice", fme_->voice_logfilter().c_str());
- EXPECT_EQ(rtc::LS_VERBOSE, fme_->video_loglevel());
- EXPECT_STREQ("test-video", fme_->video_logfilter().c_str());
- EXPECT_TRUE(cm_->Init());
- EXPECT_EQ(rtc::LS_INFO, fme_->voice_loglevel());
- EXPECT_STREQ("test-voice", fme_->voice_logfilter().c_str());
- EXPECT_EQ(rtc::LS_VERBOSE, fme_->video_loglevel());
- EXPECT_STREQ("test-video", fme_->video_logfilter().c_str());
-}
-
-// Test that logging options set after Init are applied properly.
-TEST_F(ChannelManagerTest, SetLogging) {
- EXPECT_TRUE(cm_->Init());
- cm_->SetVoiceLogging(rtc::LS_INFO, "test-voice");
- cm_->SetVideoLogging(rtc::LS_VERBOSE, "test-video");
- EXPECT_EQ(rtc::LS_INFO, fme_->voice_loglevel());
- EXPECT_STREQ("test-voice", fme_->voice_logfilter().c_str());
- EXPECT_EQ(rtc::LS_VERBOSE, fme_->video_loglevel());
- EXPECT_STREQ("test-video", fme_->video_logfilter().c_str());
-}
-
TEST_F(ChannelManagerTest, SetVideoRtxEnabled) {
std::vector<VideoCodec> codecs;
const VideoCodec rtx_codec(96, "rtx", 0, 0, 0, 0);
diff --git a/talk/session/media/mediasession.cc b/talk/session/media/mediasession.cc
index 7413026092..24f01b4463 100644
--- a/talk/session/media/mediasession.cc
+++ b/talk/session/media/mediasession.cc
@@ -50,6 +50,17 @@ static const uint32_t kMaxSctpSid = 1023;
namespace {
const char kInline[] = "inline:";
+
+void GetSupportedCryptoSuiteNames(void (*func)(std::vector<int>*),
+ std::vector<std::string>* names) {
+#ifdef HAVE_SRTP
+ std::vector<int> crypto_suites;
+ func(&crypto_suites);
+ for (const auto crypto : crypto_suites) {
+ names->push_back(rtc::SrtpCryptoSuiteToName(crypto));
+ }
+#endif
+}
}
namespace cricket {
@@ -152,30 +163,50 @@ bool FindMatchingCrypto(const CryptoParamsVec& cryptos,
}
// For audio, HMAC 32 is prefered because of the low overhead.
-void GetSupportedAudioCryptoSuites(
- std::vector<std::string>* crypto_suites) {
+void GetSupportedAudioCryptoSuites(std::vector<int>* crypto_suites) {
#ifdef HAVE_SRTP
- crypto_suites->push_back(rtc::CS_AES_CM_128_HMAC_SHA1_32);
- crypto_suites->push_back(rtc::CS_AES_CM_128_HMAC_SHA1_80);
+ crypto_suites->push_back(rtc::SRTP_AES128_CM_SHA1_32);
+ crypto_suites->push_back(rtc::SRTP_AES128_CM_SHA1_80);
#endif
}
-void GetSupportedVideoCryptoSuites(
- std::vector<std::string>* crypto_suites) {
- GetDefaultSrtpCryptoSuiteNames(crypto_suites);
+void GetSupportedAudioCryptoSuiteNames(
+ std::vector<std::string>* crypto_suite_names) {
+ GetSupportedCryptoSuiteNames(GetSupportedAudioCryptoSuites,
+ crypto_suite_names);
+}
+
+void GetSupportedVideoCryptoSuites(std::vector<int>* crypto_suites) {
+ GetDefaultSrtpCryptoSuites(crypto_suites);
+}
+
+void GetSupportedVideoCryptoSuiteNames(
+ std::vector<std::string>* crypto_suite_names) {
+ GetSupportedCryptoSuiteNames(GetSupportedVideoCryptoSuites,
+ crypto_suite_names);
}
-void GetSupportedDataCryptoSuites(
- std::vector<std::string>* crypto_suites) {
- GetDefaultSrtpCryptoSuiteNames(crypto_suites);
+void GetSupportedDataCryptoSuites(std::vector<int>* crypto_suites) {
+ GetDefaultSrtpCryptoSuites(crypto_suites);
}
-void GetDefaultSrtpCryptoSuiteNames(std::vector<std::string>* crypto_suites) {
+void GetSupportedDataCryptoSuiteNames(
+ std::vector<std::string>* crypto_suite_names) {
+ GetSupportedCryptoSuiteNames(GetSupportedDataCryptoSuites,
+ crypto_suite_names);
+}
+
+void GetDefaultSrtpCryptoSuites(std::vector<int>* crypto_suites) {
#ifdef HAVE_SRTP
- crypto_suites->push_back(rtc::CS_AES_CM_128_HMAC_SHA1_80);
+ crypto_suites->push_back(rtc::SRTP_AES128_CM_SHA1_80);
#endif
}
+void GetDefaultSrtpCryptoSuiteNames(
+ std::vector<std::string>* crypto_suite_names) {
+ GetSupportedCryptoSuiteNames(GetDefaultSrtpCryptoSuites, crypto_suite_names);
+}
+
// For video support only 80-bit SHA1 HMAC. For audio 32-bit HMAC is
// tolerated unless bundle is enabled because it is low overhead. Pick the
// crypto in the list that is supported.
@@ -518,8 +549,8 @@ static bool AddStreamParams(
// Updates the transport infos of the |sdesc| according to the given
// |bundle_group|. The transport infos of the content names within the
-// |bundle_group| should be updated to use the ufrag and pwd of the first
-// content within the |bundle_group|.
+// |bundle_group| should be updated to use the ufrag, pwd and DTLS role of the
+// first content within the |bundle_group|.
static bool UpdateTransportInfoForBundle(const ContentGroup& bundle_group,
SessionDescription* sdesc) {
// The bundle should not be empty.
@@ -540,6 +571,8 @@ static bool UpdateTransportInfoForBundle(const ContentGroup& bundle_group,
selected_transport_info->description.ice_ufrag;
const std::string& selected_pwd =
selected_transport_info->description.ice_pwd;
+ ConnectionRole selected_connection_role =
+ selected_transport_info->description.connection_role;
for (TransportInfos::iterator it =
sdesc->transport_infos().begin();
it != sdesc->transport_infos().end(); ++it) {
@@ -547,6 +580,7 @@ static bool UpdateTransportInfoForBundle(const ContentGroup& bundle_group,
it->content_name != selected_content_name) {
it->description.ice_ufrag = selected_ufrag;
it->description.ice_pwd = selected_pwd;
+ it->description.connection_role = selected_connection_role;
}
}
return true;
@@ -602,6 +636,11 @@ static void PruneCryptos(const CryptoParamsVec& filter,
target_cryptos->end());
}
+static bool IsRtpProtocol(const std::string& protocol) {
+ return protocol.empty() ||
+ (protocol.find(cricket::kMediaProtocolRtpPrefix) != std::string::npos);
+}
+
static bool IsRtpContent(SessionDescription* sdesc,
const std::string& content_name) {
bool is_rtp = false;
@@ -612,9 +651,7 @@ static bool IsRtpContent(SessionDescription* sdesc,
if (!media_desc) {
return false;
}
- is_rtp = media_desc->protocol().empty() ||
- (media_desc->protocol().find(cricket::kMediaProtocolRtpPrefix) !=
- std::string::npos);
+ is_rtp = IsRtpProtocol(media_desc->protocol());
}
return is_rtp;
}
@@ -726,6 +763,11 @@ static bool CreateMediaContentOffer(
offer->set_crypto_required(CT_SDES);
}
offer->set_rtcp_mux(options.rtcp_mux_enabled);
+ // TODO(deadbeef): Once we're sure this works correctly, enable it in
+ // CreateOffer.
+ // if (offer->type() == cricket::MEDIA_TYPE_VIDEO) {
+ // offer->set_rtcp_reduced_size(true);
+ // }
offer->set_multistream(options.is_muc);
offer->set_rtp_header_extensions(rtp_extensions);
@@ -1004,6 +1046,11 @@ static bool CreateMediaContentAnswer(
answer->set_rtp_header_extensions(negotiated_rtp_extensions);
answer->set_rtcp_mux(options.rtcp_mux_enabled && offer->rtcp_mux());
+ // TODO(deadbeef): Once we're sure this works correctly, enable it in
+ // CreateAnswer.
+ // if (answer->type() == cricket::MEDIA_TYPE_VIDEO) {
+ // answer->set_rtcp_reduced_size(offer->rtcp_reduced_size());
+ // }
if (sdes_policy != SEC_DISABLED) {
CryptoParams crypto;
@@ -1036,12 +1083,16 @@ static bool CreateMediaContentAnswer(
answer->set_direction(MD_RECVONLY);
break;
case MD_RECVONLY:
- answer->set_direction(answer->streams().empty() ? MD_INACTIVE
- : MD_SENDONLY);
+ answer->set_direction(IsRtpProtocol(answer->protocol()) &&
+ answer->streams().empty()
+ ? MD_INACTIVE
+ : MD_SENDONLY);
break;
case MD_SENDRECV:
- answer->set_direction(answer->streams().empty() ? MD_RECVONLY
- : MD_SENDRECV);
+ answer->set_direction(IsRtpProtocol(answer->protocol()) &&
+ answer->streams().empty()
+ ? MD_RECVONLY
+ : MD_SENDRECV);
break;
default:
RTC_DCHECK(false && "MediaContentDescription has unexpected direction.");
@@ -1508,13 +1559,18 @@ bool MediaSessionDescriptionFactory::AddAudioContentForOffer(
const AudioCodecs& audio_codecs,
StreamParamsVec* current_streams,
SessionDescription* desc) const {
+ const ContentInfo* current_audio_content =
+ GetFirstAudioContent(current_description);
+ std::string content_name =
+ current_audio_content ? current_audio_content->name : CN_AUDIO;
+
cricket::SecurePolicy sdes_policy =
- IsDtlsActive(CN_AUDIO, current_description) ?
- cricket::SEC_DISABLED : secure();
+ IsDtlsActive(content_name, current_description) ? cricket::SEC_DISABLED
+ : secure();
scoped_ptr<AudioContentDescription> audio(new AudioContentDescription());
std::vector<std::string> crypto_suites;
- GetSupportedAudioCryptoSuites(&crypto_suites);
+ GetSupportedAudioCryptoSuiteNames(&crypto_suites);
if (!CreateMediaContentOffer(
options,
audio_codecs,
@@ -1546,8 +1602,8 @@ bool MediaSessionDescriptionFactory::AddAudioContentForOffer(
}
}
- desc->AddContent(CN_AUDIO, NS_JINGLE_RTP, audio.release());
- if (!AddTransportOffer(CN_AUDIO, options.transport_options,
+ desc->AddContent(content_name, NS_JINGLE_RTP, audio.release());
+ if (!AddTransportOffer(content_name, options.audio_transport_options,
current_description, desc)) {
return false;
}
@@ -1562,13 +1618,18 @@ bool MediaSessionDescriptionFactory::AddVideoContentForOffer(
const VideoCodecs& video_codecs,
StreamParamsVec* current_streams,
SessionDescription* desc) const {
+ const ContentInfo* current_video_content =
+ GetFirstVideoContent(current_description);
+ std::string content_name =
+ current_video_content ? current_video_content->name : CN_VIDEO;
+
cricket::SecurePolicy sdes_policy =
- IsDtlsActive(CN_VIDEO, current_description) ?
- cricket::SEC_DISABLED : secure();
+ IsDtlsActive(content_name, current_description) ? cricket::SEC_DISABLED
+ : secure();
scoped_ptr<VideoContentDescription> video(new VideoContentDescription());
std::vector<std::string> crypto_suites;
- GetSupportedVideoCryptoSuites(&crypto_suites);
+ GetSupportedVideoCryptoSuiteNames(&crypto_suites);
if (!CreateMediaContentOffer(
options,
video_codecs,
@@ -1601,8 +1662,8 @@ bool MediaSessionDescriptionFactory::AddVideoContentForOffer(
}
}
- desc->AddContent(CN_VIDEO, NS_JINGLE_RTP, video.release());
- if (!AddTransportOffer(CN_VIDEO, options.transport_options,
+ desc->AddContent(content_name, NS_JINGLE_RTP, video.release());
+ if (!AddTransportOffer(content_name, options.video_transport_options,
current_description, desc)) {
return false;
}
@@ -1623,9 +1684,14 @@ bool MediaSessionDescriptionFactory::AddDataContentForOffer(
FilterDataCodecs(data_codecs, is_sctp);
+ const ContentInfo* current_data_content =
+ GetFirstDataContent(current_description);
+ std::string content_name =
+ current_data_content ? current_data_content->name : CN_DATA;
+
cricket::SecurePolicy sdes_policy =
- IsDtlsActive(CN_DATA, current_description) ?
- cricket::SEC_DISABLED : secure();
+ IsDtlsActive(content_name, current_description) ? cricket::SEC_DISABLED
+ : secure();
std::vector<std::string> crypto_suites;
if (is_sctp) {
// SDES doesn't make sense for SCTP, so we disable it, and we only
@@ -1638,7 +1704,7 @@ bool MediaSessionDescriptionFactory::AddDataContentForOffer(
data->set_protocol(
secure_transport ? kMediaProtocolDtlsSctp : kMediaProtocolSctp);
} else {
- GetSupportedDataCryptoSuites(&crypto_suites);
+ GetSupportedDataCryptoSuiteNames(&crypto_suites);
}
if (!CreateMediaContentOffer(
@@ -1655,13 +1721,13 @@ bool MediaSessionDescriptionFactory::AddDataContentForOffer(
}
if (is_sctp) {
- desc->AddContent(CN_DATA, NS_JINGLE_DRAFT_SCTP, data.release());
+ desc->AddContent(content_name, NS_JINGLE_DRAFT_SCTP, data.release());
} else {
data->set_bandwidth(options.data_bandwidth);
SetMediaProtocol(secure_transport, data.get());
- desc->AddContent(CN_DATA, NS_JINGLE_RTP, data.release());
+ desc->AddContent(content_name, NS_JINGLE_RTP, data.release());
}
- if (!AddTransportOffer(CN_DATA, options.transport_options,
+ if (!AddTransportOffer(content_name, options.data_transport_options,
current_description, desc)) {
return false;
}
@@ -1676,10 +1742,9 @@ bool MediaSessionDescriptionFactory::AddAudioContentForAnswer(
SessionDescription* answer) const {
const ContentInfo* audio_content = GetFirstAudioContent(offer);
- scoped_ptr<TransportDescription> audio_transport(
- CreateTransportAnswer(audio_content->name, offer,
- options.transport_options,
- current_description));
+ scoped_ptr<TransportDescription> audio_transport(CreateTransportAnswer(
+ audio_content->name, offer, options.audio_transport_options,
+ current_description));
if (!audio_transport) {
return false;
}
@@ -1735,10 +1800,9 @@ bool MediaSessionDescriptionFactory::AddVideoContentForAnswer(
StreamParamsVec* current_streams,
SessionDescription* answer) const {
const ContentInfo* video_content = GetFirstVideoContent(offer);
- scoped_ptr<TransportDescription> video_transport(
- CreateTransportAnswer(video_content->name, offer,
- options.transport_options,
- current_description));
+ scoped_ptr<TransportDescription> video_transport(CreateTransportAnswer(
+ video_content->name, offer, options.video_transport_options,
+ current_description));
if (!video_transport) {
return false;
}
@@ -1791,10 +1855,9 @@ bool MediaSessionDescriptionFactory::AddDataContentForAnswer(
StreamParamsVec* current_streams,
SessionDescription* answer) const {
const ContentInfo* data_content = GetFirstDataContent(offer);
- scoped_ptr<TransportDescription> data_transport(
- CreateTransportAnswer(data_content->name, offer,
- options.transport_options,
- current_description));
+ scoped_ptr<TransportDescription> data_transport(CreateTransportAnswer(
+ data_content->name, offer, options.data_transport_options,
+ current_description));
if (!data_transport) {
return false;
}
diff --git a/talk/session/media/mediasession.h b/talk/session/media/mediasession.h
index e92628e711..1540274665 100644
--- a/talk/session/media/mediasession.h
+++ b/talk/session/media/mediasession.h
@@ -134,6 +134,10 @@ struct MediaSessionOptions {
bool HasSendMediaStream(MediaType type) const;
+ // TODO(deadbeef): Put all the audio/video/data-specific options into a map
+ // structure (content name -> options).
+ // MediaSessionDescriptionFactory assumes there will never be more than one
+ // audio/video/data content, but this will change with unified plan.
bool recv_audio;
bool recv_video;
DataChannelType data_channel_type;
@@ -144,7 +148,9 @@ struct MediaSessionOptions {
// bps. -1 == auto.
int video_bandwidth;
int data_bandwidth;
- TransportOptions transport_options;
+ TransportOptions audio_transport_options;
+ TransportOptions video_transport_options;
+ TransportOptions data_transport_options;
struct Stream {
Stream(MediaType type,
@@ -167,17 +173,7 @@ struct MediaSessionOptions {
// "content" (as used in XEP-0166) descriptions for voice and video.
class MediaContentDescription : public ContentDescription {
public:
- MediaContentDescription()
- : rtcp_mux_(false),
- bandwidth_(kAutoBandwidth),
- crypto_required_(CT_NONE),
- rtp_header_extensions_set_(false),
- multistream_(false),
- conference_mode_(false),
- partial_(false),
- buffered_mode_latency_(kBufferedModeDisabled),
- direction_(MD_SENDRECV) {
- }
+ MediaContentDescription() {}
virtual MediaType type() const = 0;
virtual bool has_codecs() const = 0;
@@ -195,6 +191,11 @@ class MediaContentDescription : public ContentDescription {
bool rtcp_mux() const { return rtcp_mux_; }
void set_rtcp_mux(bool mux) { rtcp_mux_ = mux; }
+ bool rtcp_reduced_size() const { return rtcp_reduced_size_; }
+ void set_rtcp_reduced_size(bool reduced_size) {
+ rtcp_reduced_size_ = reduced_size;
+ }
+
int bandwidth() const { return bandwidth_; }
void set_bandwidth(int bandwidth) { bandwidth_ = bandwidth; }
@@ -291,19 +292,20 @@ class MediaContentDescription : public ContentDescription {
int buffered_mode_latency() const { return buffered_mode_latency_; }
protected:
- bool rtcp_mux_;
- int bandwidth_;
+ bool rtcp_mux_ = false;
+ bool rtcp_reduced_size_ = false;
+ int bandwidth_ = kAutoBandwidth;
std::string protocol_;
std::vector<CryptoParams> cryptos_;
- CryptoType crypto_required_;
+ CryptoType crypto_required_ = CT_NONE;
std::vector<RtpHeaderExtension> rtp_header_extensions_;
- bool rtp_header_extensions_set_;
- bool multistream_;
+ bool rtp_header_extensions_set_ = false;
+ bool multistream_ = false;
StreamParamsVec streams_;
- bool conference_mode_;
- bool partial_;
- int buffered_mode_latency_;
- MediaContentDirection direction_;
+ bool conference_mode_ = false;
+ bool partial_ = false;
+ int buffered_mode_latency_ = kBufferedModeDisabled;
+ MediaContentDirection direction_ = MD_SENDRECV;
};
template <class C>
@@ -547,10 +549,19 @@ const VideoContentDescription* GetFirstVideoContentDescription(
const DataContentDescription* GetFirstDataContentDescription(
const SessionDescription* sdesc);
-void GetSupportedAudioCryptoSuites(std::vector<std::string>* crypto_suites);
-void GetSupportedVideoCryptoSuites(std::vector<std::string>* crypto_suites);
-void GetSupportedDataCryptoSuites(std::vector<std::string>* crypto_suites);
-void GetDefaultSrtpCryptoSuiteNames(std::vector<std::string>* crypto_suites);
+void GetSupportedAudioCryptoSuites(std::vector<int>* crypto_suites);
+void GetSupportedVideoCryptoSuites(std::vector<int>* crypto_suites);
+void GetSupportedDataCryptoSuites(std::vector<int>* crypto_suites);
+void GetDefaultSrtpCryptoSuites(std::vector<int>* crypto_suites);
+void GetSupportedAudioCryptoSuiteNames(
+ std::vector<std::string>* crypto_suite_names);
+void GetSupportedVideoCryptoSuiteNames(
+ std::vector<std::string>* crypto_suite_names);
+void GetSupportedDataCryptoSuiteNames(
+ std::vector<std::string>* crypto_suite_names);
+void GetDefaultSrtpCryptoSuiteNames(
+ std::vector<std::string>* crypto_suite_names);
+
} // namespace cricket
#endif // TALK_SESSION_MEDIA_MEDIASESSION_H_
diff --git a/talk/session/media/mediasession_unittest.cc b/talk/session/media/mediasession_unittest.cc
index 72aefc884c..20b72e9394 100644
--- a/talk/session/media/mediasession_unittest.cc
+++ b/talk/session/media/mediasession_unittest.cc
@@ -69,6 +69,9 @@ using cricket::CryptoParamsVec;
using cricket::AudioContentDescription;
using cricket::VideoContentDescription;
using cricket::DataContentDescription;
+using cricket::GetFirstAudioContent;
+using cricket::GetFirstVideoContent;
+using cricket::GetFirstDataContent;
using cricket::GetFirstAudioContentDescription;
using cricket::GetFirstVideoContentDescription;
using cricket::GetFirstDataContentDescription;
@@ -235,11 +238,9 @@ class MediaSessionDescriptionFactoryTest : public testing::Test {
f2_.set_video_codecs(MAKE_VECTOR(kVideoCodecs2));
f2_.set_data_codecs(MAKE_VECTOR(kDataCodecs2));
tdf1_.set_certificate(rtc::RTCCertificate::Create(
- rtc::scoped_ptr<rtc::SSLIdentity>(
- new rtc::FakeSSLIdentity("id1")).Pass()));
+ rtc::scoped_ptr<rtc::SSLIdentity>(new rtc::FakeSSLIdentity("id1"))));
tdf2_.set_certificate(rtc::RTCCertificate::Create(
- rtc::scoped_ptr<rtc::SSLIdentity>(
- new rtc::FakeSSLIdentity("id2")).Pass()));
+ rtc::scoped_ptr<rtc::SSLIdentity>(new rtc::FakeSSLIdentity("id2"))));
}
// Create a video StreamParamsVec object with:
@@ -607,6 +608,7 @@ TEST_F(MediaSessionDescriptionFactoryTest,
ASSERT_CRYPTO(dcd, 1U, CS_AES_CM_128_HMAC_SHA1_80);
EXPECT_EQ(std::string(cricket::kMediaProtocolSavpf), dcd->protocol());
}
+
// Create a RTP data offer, and ensure it matches what we expect.
TEST_F(MediaSessionDescriptionFactoryTest, TestCreateRtpDataOffer) {
MediaSessionOptions opts;
@@ -2313,3 +2315,30 @@ TEST_F(MediaSessionDescriptionFactoryTest, TestVADEnableOption) {
audio_content = answer->GetContentByName("audio");
EXPECT_TRUE(VerifyNoCNCodecs(audio_content));
}
+
+// Test that the content name ("mid" in SDP) is unchanged when creating a
+// new offer.
+TEST_F(MediaSessionDescriptionFactoryTest,
+ TestContentNameNotChangedInSubsequentOffers) {
+ MediaSessionOptions opts;
+ opts.recv_audio = true;
+ opts.recv_video = true;
+ opts.data_channel_type = cricket::DCT_SCTP;
+ // Create offer and modify the default content names.
+ rtc::scoped_ptr<SessionDescription> offer(f1_.CreateOffer(opts, nullptr));
+ for (ContentInfo& content : offer->contents()) {
+ content.name.append("_modified");
+ }
+
+ rtc::scoped_ptr<SessionDescription> updated_offer(
+ f1_.CreateOffer(opts, offer.get()));
+ const ContentInfo* audio_content = GetFirstAudioContent(updated_offer.get());
+ const ContentInfo* video_content = GetFirstVideoContent(updated_offer.get());
+ const ContentInfo* data_content = GetFirstDataContent(updated_offer.get());
+ ASSERT_TRUE(audio_content != nullptr);
+ ASSERT_TRUE(video_content != nullptr);
+ ASSERT_TRUE(data_content != nullptr);
+ EXPECT_EQ("audio_modified", audio_content->name);
+ EXPECT_EQ("video_modified", video_content->name);
+ EXPECT_EQ("data_modified", data_content->name);
+}
diff --git a/talk/session/media/srtpfilter.cc b/talk/session/media/srtpfilter.cc
index 079ddfb57e..a200a3c4c2 100644
--- a/talk/session/media/srtpfilter.cc
+++ b/talk/session/media/srtpfilter.cc
@@ -146,10 +146,10 @@ bool SrtpFilter::SetProvisionalAnswer(
return DoSetAnswer(answer_params, source, false);
}
-bool SrtpFilter::SetRtpParams(const std::string& send_cs,
+bool SrtpFilter::SetRtpParams(int send_cs,
const uint8_t* send_key,
int send_key_len,
- const std::string& recv_cs,
+ int recv_cs,
const uint8_t* recv_key,
int recv_key_len) {
if (IsActive()) {
@@ -179,10 +179,10 @@ bool SrtpFilter::SetRtpParams(const std::string& send_cs,
// SrtpSession.
// - In the muxed case, they are keyed with the same keys, so
// this function is not needed
-bool SrtpFilter::SetRtcpParams(const std::string& send_cs,
+bool SrtpFilter::SetRtcpParams(int send_cs,
const uint8_t* send_key,
int send_key_len,
- const std::string& recv_cs,
+ int recv_cs,
const uint8_t* recv_key,
int recv_key_len) {
// This can only be called once, but can be safely called after
@@ -428,10 +428,12 @@ bool SrtpFilter::ApplyParams(const CryptoParams& send_params,
ParseKeyParams(recv_params.key_params, recv_key, sizeof(recv_key)));
if (ret) {
CreateSrtpSessions();
- ret = (send_session_->SetSend(send_params.cipher_suite,
- send_key, sizeof(send_key)) &&
- recv_session_->SetRecv(recv_params.cipher_suite,
- recv_key, sizeof(recv_key)));
+ ret = (send_session_->SetSend(
+ rtc::SrtpCryptoSuiteFromName(send_params.cipher_suite), send_key,
+ sizeof(send_key)) &&
+ recv_session_->SetRecv(
+ rtc::SrtpCryptoSuiteFromName(recv_params.cipher_suite), recv_key,
+ sizeof(recv_key)));
}
if (ret) {
LOG(LS_INFO) << "SRTP activated with negotiated parameters:"
@@ -448,6 +450,10 @@ bool SrtpFilter::ApplyParams(const CryptoParams& send_params,
bool SrtpFilter::ResetParams() {
offer_params_.clear();
state_ = ST_INIT;
+ send_session_ = nullptr;
+ recv_session_ = nullptr;
+ send_rtcp_session_ = nullptr;
+ recv_rtcp_session_ = nullptr;
LOG(LS_INFO) << "SRTP reset to init state";
return true;
}
@@ -507,11 +513,11 @@ SrtpSession::~SrtpSession() {
}
}
-bool SrtpSession::SetSend(const std::string& cs, const uint8_t* key, int len) {
+bool SrtpSession::SetSend(int cs, const uint8_t* key, int len) {
return SetKey(ssrc_any_outbound, cs, key, len);
}
-bool SrtpSession::SetRecv(const std::string& cs, const uint8_t* key, int len) {
+bool SrtpSession::SetRecv(int cs, const uint8_t* key, int len) {
return SetKey(ssrc_any_inbound, cs, key, len);
}
@@ -658,10 +664,7 @@ void SrtpSession::set_signal_silent_time(uint32_t signal_silent_time_in_ms) {
srtp_stat_->set_signal_silent_time(signal_silent_time_in_ms);
}
-bool SrtpSession::SetKey(int type,
- const std::string& cs,
- const uint8_t* key,
- int len) {
+bool SrtpSession::SetKey(int type, int cs, const uint8_t* key, int len) {
if (session_) {
LOG(LS_ERROR) << "Failed to create SRTP session: "
<< "SRTP session already created";
@@ -675,15 +678,15 @@ bool SrtpSession::SetKey(int type,
srtp_policy_t policy;
memset(&policy, 0, sizeof(policy));
- if (cs == rtc::CS_AES_CM_128_HMAC_SHA1_80) {
+ if (cs == rtc::SRTP_AES128_CM_SHA1_80) {
crypto_policy_set_aes_cm_128_hmac_sha1_80(&policy.rtp);
crypto_policy_set_aes_cm_128_hmac_sha1_80(&policy.rtcp);
- } else if (cs == rtc::CS_AES_CM_128_HMAC_SHA1_32) {
+ } else if (cs == rtc::SRTP_AES128_CM_SHA1_32) {
crypto_policy_set_aes_cm_128_hmac_sha1_32(&policy.rtp); // rtp is 32,
crypto_policy_set_aes_cm_128_hmac_sha1_80(&policy.rtcp); // rtcp still 80
} else {
LOG(LS_WARNING) << "Failed to create SRTP session: unsupported"
- << " cipher_suite " << cs.c_str();
+ << " cipher_suite " << cs;
return false;
}
diff --git a/talk/session/media/srtpfilter.h b/talk/session/media/srtpfilter.h
index 3c3a8e848b..6b941f32fd 100644
--- a/talk/session/media/srtpfilter.h
+++ b/talk/session/media/srtpfilter.h
@@ -104,16 +104,16 @@ class SrtpFilter {
// Just set up both sets of keys directly.
// Used with DTLS-SRTP.
- bool SetRtpParams(const std::string& send_cs,
+ bool SetRtpParams(int send_cs,
const uint8_t* send_key,
int send_key_len,
- const std::string& recv_cs,
+ int recv_cs,
const uint8_t* recv_key,
int recv_key_len);
- bool SetRtcpParams(const std::string& send_cs,
+ bool SetRtcpParams(int send_cs,
const uint8_t* send_key,
int send_key_len,
- const std::string& recv_cs,
+ int recv_cs,
const uint8_t* recv_key,
int recv_key_len);
@@ -138,6 +138,8 @@ class SrtpFilter {
// Update the silent threshold (in ms) for signaling errors.
void set_signal_silent_time(uint32_t signal_silent_time_in_ms);
+ bool ResetParams();
+
sigslot::repeater3<uint32_t, Mode, Error> SignalSrtpError;
protected:
@@ -153,7 +155,6 @@ class SrtpFilter {
CryptoParams* selected_params);
bool ApplyParams(const CryptoParams& send_params,
const CryptoParams& recv_params);
- bool ResetParams();
static bool ParseKeyParams(const std::string& params, uint8_t* key, int len);
private:
@@ -199,10 +200,10 @@ class SrtpSession {
// Configures the session for sending data using the specified
// cipher-suite and key. Receiving must be done by a separate session.
- bool SetSend(const std::string& cs, const uint8_t* key, int len);
+ bool SetSend(int cs, const uint8_t* key, int len);
// Configures the session for receiving data using the specified
// cipher-suite and key. Sending must be done by a separate session.
- bool SetRecv(const std::string& cs, const uint8_t* key, int len);
+ bool SetRecv(int cs, const uint8_t* key, int len);
// Encrypts/signs an individual RTP/RTCP packet, in-place.
// If an HMAC is used, this will increase the packet size.
@@ -232,7 +233,7 @@ class SrtpSession {
SignalSrtpError;
private:
- bool SetKey(int type, const std::string& cs, const uint8_t* key, int len);
+ bool SetKey(int type, int cs, const uint8_t* key, int len);
// Returns send stream current packet index from srtp db.
bool GetSendStreamPacketIndex(void* data, int in_len, int64_t* index);
diff --git a/talk/session/media/srtpfilter_unittest.cc b/talk/session/media/srtpfilter_unittest.cc
index 8122455205..11874380e2 100644
--- a/talk/session/media/srtpfilter_unittest.cc
+++ b/talk/session/media/srtpfilter_unittest.cc
@@ -508,21 +508,17 @@ TEST_F(SrtpFilterTest, TestDisableEncryption) {
// Test directly setting the params with AES_CM_128_HMAC_SHA1_80
TEST_F(SrtpFilterTest, TestProtect_SetParamsDirect_AES_CM_128_HMAC_SHA1_80) {
- EXPECT_TRUE(f1_.SetRtpParams(CS_AES_CM_128_HMAC_SHA1_80,
- kTestKey1, kTestKeyLen,
- CS_AES_CM_128_HMAC_SHA1_80,
+ EXPECT_TRUE(f1_.SetRtpParams(rtc::SRTP_AES128_CM_SHA1_80, kTestKey1,
+ kTestKeyLen, rtc::SRTP_AES128_CM_SHA1_80,
kTestKey2, kTestKeyLen));
- EXPECT_TRUE(f2_.SetRtpParams(CS_AES_CM_128_HMAC_SHA1_80,
- kTestKey2, kTestKeyLen,
- CS_AES_CM_128_HMAC_SHA1_80,
+ EXPECT_TRUE(f2_.SetRtpParams(rtc::SRTP_AES128_CM_SHA1_80, kTestKey2,
+ kTestKeyLen, rtc::SRTP_AES128_CM_SHA1_80,
kTestKey1, kTestKeyLen));
- EXPECT_TRUE(f1_.SetRtcpParams(CS_AES_CM_128_HMAC_SHA1_80,
- kTestKey1, kTestKeyLen,
- CS_AES_CM_128_HMAC_SHA1_80,
+ EXPECT_TRUE(f1_.SetRtcpParams(rtc::SRTP_AES128_CM_SHA1_80, kTestKey1,
+ kTestKeyLen, rtc::SRTP_AES128_CM_SHA1_80,
kTestKey2, kTestKeyLen));
- EXPECT_TRUE(f2_.SetRtcpParams(CS_AES_CM_128_HMAC_SHA1_80,
- kTestKey2, kTestKeyLen,
- CS_AES_CM_128_HMAC_SHA1_80,
+ EXPECT_TRUE(f2_.SetRtcpParams(rtc::SRTP_AES128_CM_SHA1_80, kTestKey2,
+ kTestKeyLen, rtc::SRTP_AES128_CM_SHA1_80,
kTestKey1, kTestKeyLen));
EXPECT_TRUE(f1_.IsActive());
EXPECT_TRUE(f2_.IsActive());
@@ -531,21 +527,17 @@ TEST_F(SrtpFilterTest, TestProtect_SetParamsDirect_AES_CM_128_HMAC_SHA1_80) {
// Test directly setting the params with AES_CM_128_HMAC_SHA1_32
TEST_F(SrtpFilterTest, TestProtect_SetParamsDirect_AES_CM_128_HMAC_SHA1_32) {
- EXPECT_TRUE(f1_.SetRtpParams(CS_AES_CM_128_HMAC_SHA1_32,
- kTestKey1, kTestKeyLen,
- CS_AES_CM_128_HMAC_SHA1_32,
+ EXPECT_TRUE(f1_.SetRtpParams(rtc::SRTP_AES128_CM_SHA1_32, kTestKey1,
+ kTestKeyLen, rtc::SRTP_AES128_CM_SHA1_32,
kTestKey2, kTestKeyLen));
- EXPECT_TRUE(f2_.SetRtpParams(CS_AES_CM_128_HMAC_SHA1_32,
- kTestKey2, kTestKeyLen,
- CS_AES_CM_128_HMAC_SHA1_32,
+ EXPECT_TRUE(f2_.SetRtpParams(rtc::SRTP_AES128_CM_SHA1_32, kTestKey2,
+ kTestKeyLen, rtc::SRTP_AES128_CM_SHA1_32,
kTestKey1, kTestKeyLen));
- EXPECT_TRUE(f1_.SetRtcpParams(CS_AES_CM_128_HMAC_SHA1_32,
- kTestKey1, kTestKeyLen,
- CS_AES_CM_128_HMAC_SHA1_32,
+ EXPECT_TRUE(f1_.SetRtcpParams(rtc::SRTP_AES128_CM_SHA1_32, kTestKey1,
+ kTestKeyLen, rtc::SRTP_AES128_CM_SHA1_32,
kTestKey2, kTestKeyLen));
- EXPECT_TRUE(f2_.SetRtcpParams(CS_AES_CM_128_HMAC_SHA1_32,
- kTestKey2, kTestKeyLen,
- CS_AES_CM_128_HMAC_SHA1_32,
+ EXPECT_TRUE(f2_.SetRtcpParams(rtc::SRTP_AES128_CM_SHA1_32, kTestKey2,
+ kTestKeyLen, rtc::SRTP_AES128_CM_SHA1_32,
kTestKey1, kTestKeyLen));
EXPECT_TRUE(f1_.IsActive());
EXPECT_TRUE(f2_.IsActive());
@@ -554,25 +546,21 @@ TEST_F(SrtpFilterTest, TestProtect_SetParamsDirect_AES_CM_128_HMAC_SHA1_32) {
// Test directly setting the params with bogus keys
TEST_F(SrtpFilterTest, TestSetParamsKeyTooShort) {
- EXPECT_FALSE(f1_.SetRtpParams(CS_AES_CM_128_HMAC_SHA1_80,
- kTestKey1, kTestKeyLen - 1,
- CS_AES_CM_128_HMAC_SHA1_80,
+ EXPECT_FALSE(f1_.SetRtpParams(rtc::SRTP_AES128_CM_SHA1_80, kTestKey1,
+ kTestKeyLen - 1, rtc::SRTP_AES128_CM_SHA1_80,
kTestKey1, kTestKeyLen - 1));
- EXPECT_FALSE(f1_.SetRtcpParams(CS_AES_CM_128_HMAC_SHA1_80,
- kTestKey1, kTestKeyLen - 1,
- CS_AES_CM_128_HMAC_SHA1_80,
+ EXPECT_FALSE(f1_.SetRtcpParams(rtc::SRTP_AES128_CM_SHA1_80, kTestKey1,
+ kTestKeyLen - 1, rtc::SRTP_AES128_CM_SHA1_80,
kTestKey1, kTestKeyLen - 1));
}
#if defined(ENABLE_EXTERNAL_AUTH)
TEST_F(SrtpFilterTest, TestGetSendAuthParams) {
- EXPECT_TRUE(f1_.SetRtpParams(CS_AES_CM_128_HMAC_SHA1_32,
- kTestKey1, kTestKeyLen,
- CS_AES_CM_128_HMAC_SHA1_32,
+ EXPECT_TRUE(f1_.SetRtpParams(rtc::SRTP_AES128_CM_SHA1_32, kTestKey1,
+ kTestKeyLen, rtc::SRTP_AES128_CM_SHA1_32,
kTestKey2, kTestKeyLen));
- EXPECT_TRUE(f1_.SetRtcpParams(CS_AES_CM_128_HMAC_SHA1_32,
- kTestKey1, kTestKeyLen,
- CS_AES_CM_128_HMAC_SHA1_32,
+ EXPECT_TRUE(f1_.SetRtcpParams(rtc::SRTP_AES128_CM_SHA1_32, kTestKey1,
+ kTestKeyLen, rtc::SRTP_AES128_CM_SHA1_32,
kTestKey2, kTestKeyLen));
uint8_t* auth_key = NULL;
int auth_key_len = 0, auth_tag_len = 0;
@@ -629,28 +617,30 @@ class SrtpSessionTest : public testing::Test {
// Test that we can set up the session and keys properly.
TEST_F(SrtpSessionTest, TestGoodSetup) {
- EXPECT_TRUE(s1_.SetSend(CS_AES_CM_128_HMAC_SHA1_80, kTestKey1, kTestKeyLen));
- EXPECT_TRUE(s2_.SetRecv(CS_AES_CM_128_HMAC_SHA1_80, kTestKey1, kTestKeyLen));
+ EXPECT_TRUE(s1_.SetSend(rtc::SRTP_AES128_CM_SHA1_80, kTestKey1, kTestKeyLen));
+ EXPECT_TRUE(s2_.SetRecv(rtc::SRTP_AES128_CM_SHA1_80, kTestKey1, kTestKeyLen));
}
// Test that we can't change the keys once set.
TEST_F(SrtpSessionTest, TestBadSetup) {
- EXPECT_TRUE(s1_.SetSend(CS_AES_CM_128_HMAC_SHA1_80, kTestKey1, kTestKeyLen));
- EXPECT_TRUE(s2_.SetRecv(CS_AES_CM_128_HMAC_SHA1_80, kTestKey1, kTestKeyLen));
- EXPECT_FALSE(s1_.SetSend(CS_AES_CM_128_HMAC_SHA1_80, kTestKey2, kTestKeyLen));
- EXPECT_FALSE(s2_.SetRecv(CS_AES_CM_128_HMAC_SHA1_80, kTestKey2, kTestKeyLen));
+ EXPECT_TRUE(s1_.SetSend(rtc::SRTP_AES128_CM_SHA1_80, kTestKey1, kTestKeyLen));
+ EXPECT_TRUE(s2_.SetRecv(rtc::SRTP_AES128_CM_SHA1_80, kTestKey1, kTestKeyLen));
+ EXPECT_FALSE(
+ s1_.SetSend(rtc::SRTP_AES128_CM_SHA1_80, kTestKey2, kTestKeyLen));
+ EXPECT_FALSE(
+ s2_.SetRecv(rtc::SRTP_AES128_CM_SHA1_80, kTestKey2, kTestKeyLen));
}
// Test that we fail keys of the wrong length.
TEST_F(SrtpSessionTest, TestKeysTooShort) {
- EXPECT_FALSE(s1_.SetSend(CS_AES_CM_128_HMAC_SHA1_80, kTestKey1, 1));
- EXPECT_FALSE(s2_.SetRecv(CS_AES_CM_128_HMAC_SHA1_80, kTestKey1, 1));
+ EXPECT_FALSE(s1_.SetSend(rtc::SRTP_AES128_CM_SHA1_80, kTestKey1, 1));
+ EXPECT_FALSE(s2_.SetRecv(rtc::SRTP_AES128_CM_SHA1_80, kTestKey1, 1));
}
// Test that we can encrypt and decrypt RTP/RTCP using AES_CM_128_HMAC_SHA1_80.
TEST_F(SrtpSessionTest, TestProtect_AES_CM_128_HMAC_SHA1_80) {
- EXPECT_TRUE(s1_.SetSend(CS_AES_CM_128_HMAC_SHA1_80, kTestKey1, kTestKeyLen));
- EXPECT_TRUE(s2_.SetRecv(CS_AES_CM_128_HMAC_SHA1_80, kTestKey1, kTestKeyLen));
+ EXPECT_TRUE(s1_.SetSend(rtc::SRTP_AES128_CM_SHA1_80, kTestKey1, kTestKeyLen));
+ EXPECT_TRUE(s2_.SetRecv(rtc::SRTP_AES128_CM_SHA1_80, kTestKey1, kTestKeyLen));
TestProtectRtp(CS_AES_CM_128_HMAC_SHA1_80);
TestProtectRtcp(CS_AES_CM_128_HMAC_SHA1_80);
TestUnprotectRtp(CS_AES_CM_128_HMAC_SHA1_80);
@@ -659,8 +649,8 @@ TEST_F(SrtpSessionTest, TestProtect_AES_CM_128_HMAC_SHA1_80) {
// Test that we can encrypt and decrypt RTP/RTCP using AES_CM_128_HMAC_SHA1_32.
TEST_F(SrtpSessionTest, TestProtect_AES_CM_128_HMAC_SHA1_32) {
- EXPECT_TRUE(s1_.SetSend(CS_AES_CM_128_HMAC_SHA1_32, kTestKey1, kTestKeyLen));
- EXPECT_TRUE(s2_.SetRecv(CS_AES_CM_128_HMAC_SHA1_32, kTestKey1, kTestKeyLen));
+ EXPECT_TRUE(s1_.SetSend(rtc::SRTP_AES128_CM_SHA1_32, kTestKey1, kTestKeyLen));
+ EXPECT_TRUE(s2_.SetRecv(rtc::SRTP_AES128_CM_SHA1_32, kTestKey1, kTestKeyLen));
TestProtectRtp(CS_AES_CM_128_HMAC_SHA1_32);
TestProtectRtcp(CS_AES_CM_128_HMAC_SHA1_32);
TestUnprotectRtp(CS_AES_CM_128_HMAC_SHA1_32);
@@ -668,7 +658,7 @@ TEST_F(SrtpSessionTest, TestProtect_AES_CM_128_HMAC_SHA1_32) {
}
TEST_F(SrtpSessionTest, TestGetSendStreamPacketIndex) {
- EXPECT_TRUE(s1_.SetSend(CS_AES_CM_128_HMAC_SHA1_32, kTestKey1, kTestKeyLen));
+ EXPECT_TRUE(s1_.SetSend(rtc::SRTP_AES128_CM_SHA1_32, kTestKey1, kTestKeyLen));
int64_t index;
int out_len = 0;
EXPECT_TRUE(s1_.ProtectRtp(rtp_packet_, rtp_len_,
@@ -681,8 +671,8 @@ TEST_F(SrtpSessionTest, TestGetSendStreamPacketIndex) {
// Test that we fail to unprotect if someone tampers with the RTP/RTCP paylaods.
TEST_F(SrtpSessionTest, TestTamperReject) {
int out_len;
- EXPECT_TRUE(s1_.SetSend(CS_AES_CM_128_HMAC_SHA1_80, kTestKey1, kTestKeyLen));
- EXPECT_TRUE(s2_.SetRecv(CS_AES_CM_128_HMAC_SHA1_80, kTestKey1, kTestKeyLen));
+ EXPECT_TRUE(s1_.SetSend(rtc::SRTP_AES128_CM_SHA1_80, kTestKey1, kTestKeyLen));
+ EXPECT_TRUE(s2_.SetRecv(rtc::SRTP_AES128_CM_SHA1_80, kTestKey1, kTestKeyLen));
TestProtectRtp(CS_AES_CM_128_HMAC_SHA1_80);
TestProtectRtcp(CS_AES_CM_128_HMAC_SHA1_80);
rtp_packet_[0] = 0x12;
@@ -694,8 +684,8 @@ TEST_F(SrtpSessionTest, TestTamperReject) {
// Test that we fail to unprotect if the payloads are not authenticated.
TEST_F(SrtpSessionTest, TestUnencryptReject) {
int out_len;
- EXPECT_TRUE(s1_.SetSend(CS_AES_CM_128_HMAC_SHA1_80, kTestKey1, kTestKeyLen));
- EXPECT_TRUE(s2_.SetRecv(CS_AES_CM_128_HMAC_SHA1_80, kTestKey1, kTestKeyLen));
+ EXPECT_TRUE(s1_.SetSend(rtc::SRTP_AES128_CM_SHA1_80, kTestKey1, kTestKeyLen));
+ EXPECT_TRUE(s2_.SetRecv(rtc::SRTP_AES128_CM_SHA1_80, kTestKey1, kTestKeyLen));
EXPECT_FALSE(s2_.UnprotectRtp(rtp_packet_, rtp_len_, &out_len));
EXPECT_FALSE(s2_.UnprotectRtcp(rtcp_packet_, rtcp_len_, &out_len));
}
@@ -703,7 +693,7 @@ TEST_F(SrtpSessionTest, TestUnencryptReject) {
// Test that we fail when using buffers that are too small.
TEST_F(SrtpSessionTest, TestBuffersTooSmall) {
int out_len;
- EXPECT_TRUE(s1_.SetSend(CS_AES_CM_128_HMAC_SHA1_80, kTestKey1, kTestKeyLen));
+ EXPECT_TRUE(s1_.SetSend(rtc::SRTP_AES128_CM_SHA1_80, kTestKey1, kTestKeyLen));
EXPECT_FALSE(s1_.ProtectRtp(rtp_packet_, rtp_len_,
sizeof(rtp_packet_) - 10, &out_len));
EXPECT_FALSE(s1_.ProtectRtcp(rtcp_packet_, rtcp_len_,
@@ -717,8 +707,8 @@ TEST_F(SrtpSessionTest, TestReplay) {
static const uint16_t replay_window = 1024;
int out_len;
- EXPECT_TRUE(s1_.SetSend(CS_AES_CM_128_HMAC_SHA1_80, kTestKey1, kTestKeyLen));
- EXPECT_TRUE(s2_.SetRecv(CS_AES_CM_128_HMAC_SHA1_80, kTestKey1, kTestKeyLen));
+ EXPECT_TRUE(s1_.SetSend(rtc::SRTP_AES128_CM_SHA1_80, kTestKey1, kTestKeyLen));
+ EXPECT_TRUE(s2_.SetRecv(rtc::SRTP_AES128_CM_SHA1_80, kTestKey1, kTestKeyLen));
// Initial sequence number.
rtc::SetBE16(reinterpret_cast<uint8_t*>(rtp_packet_) + 2, seqnum_big);
diff --git a/third_party/gflags/BUILD.gn b/third_party/gflags/BUILD.gn
index a2f1c3d413..e8a5a13ff6 100644
--- a/third_party/gflags/BUILD.gn
+++ b/third_party/gflags/BUILD.gn
@@ -13,7 +13,7 @@ if (is_win) {
}
config("gflags_config") {
- include_dirs = [
+ include_dirs = [
"$gflags_gen_arch_root/include", # For configured files.
"src", # For everything else.
]
@@ -25,6 +25,13 @@ config("gflags_config") {
"GFLAGS_DLL_DECLARE_FLAG=",
"GFLAGS_DLL_DEFINE_FLAG=",
]
+
+ # GN orders flags on a target before flags from configs. The default config
+ # adds -Wall, and this flag have to be after -Wall -- so they need to
+ # come from a config and can't be on the target directly.
+ if (is_clang) {
+ cflags = [ "-Wno-unused-local-typedef" ]
+ }
}
source_set("gflags") {
@@ -42,9 +49,7 @@ source_set("gflags") {
]
}
- include_dirs = [
- "$gflags_gen_arch_root/include/private", # For config.h
- ]
+ include_dirs = [ "$gflags_gen_arch_root/include/private" ] # For config.h
public_configs = [ ":gflags_config" ]
diff --git a/third_party/gflags/gflags.gyp b/third_party/gflags/gflags.gyp
index 76d2448b7e..d3f2788664 100644
--- a/third_party/gflags/gflags.gyp
+++ b/third_party/gflags/gflags.gyp
@@ -79,8 +79,10 @@
},
}],
['clang==1', {
+ 'cflags': ['-Wno-unused-local-typedef',],
'cflags!': ['-Wheader-hygiene',],
'xcode_settings': {
+ 'WARNING_CFLAGS': ['-Wno-unused-local-typedef',],
'WARNING_CFLAGS!': ['-Wheader-hygiene',],
},
}],
diff --git a/third_party/gtest-parallel/README.webrtc b/third_party/gtest-parallel/README.webrtc
index acea04fb6e..7e7fdda48f 100644
--- a/third_party/gtest-parallel/README.webrtc
+++ b/third_party/gtest-parallel/README.webrtc
@@ -1,5 +1,5 @@
URL: https://github.com/google/gtest-parallel
-Version: c0f8ded77566c657ccc7f745fd9cb070750cccf8
+Version: 92eb6adf9df6eee34bb768b40af984e68e86d7cf
License: Apache 2.0
License File: LICENSE
diff --git a/third_party/gtest-parallel/gtest-parallel b/third_party/gtest-parallel/gtest-parallel
index 0be59e4b4e..3e2fdb4ba8 100755
--- a/third_party/gtest-parallel/gtest-parallel
+++ b/third_party/gtest-parallel/gtest-parallel
@@ -13,16 +13,67 @@
# See the License for the specific language governing permissions and
# limitations under the License.
import cPickle
+import errno
import gzip
import multiprocessing
import optparse
import os
+import signal
import subprocess
import sys
+import tempfile
+import thread
import threading
import time
import zlib
+# An object that catches SIGINT sent to the Python process and notices
+# if processes passed to wait() die by SIGINT (we need to look for
+# both of those cases, because pressing Ctrl+C can result in either
+# the main process or one of the subprocesses getting the signal).
+#
+# Before a SIGINT is seen, wait(p) will simply call p.wait() and
+# return the result. Once a SIGINT has been seen (in the main process
+# or a subprocess, including the one the current call is waiting for),
+# wait(p) will call p.terminate() and raise ProcessWasInterrupted.
+class SigintHandler(object):
+ class ProcessWasInterrupted(Exception): pass
+ sigint_returncodes = {-signal.SIGINT, # Unix
+ -1073741510, # Windows
+ }
+ def __init__(self):
+ self.__lock = threading.Lock()
+ self.__processes = set()
+ self.__got_sigint = False
+ signal.signal(signal.SIGINT, self.__sigint_handler)
+ def __on_sigint(self):
+ self.__got_sigint = True
+ while self.__processes:
+ try:
+ self.__processes.pop().terminate()
+ except OSError:
+ pass
+ def __sigint_handler(self, signal_num, frame):
+ with self.__lock:
+ self.__on_sigint()
+ def got_sigint(self):
+ with self.__lock:
+ return self.__got_sigint
+ def wait(self, p):
+ with self.__lock:
+ if self.__got_sigint:
+ p.terminate()
+ self.__processes.add(p)
+ code = p.wait()
+ with self.__lock:
+ self.__processes.discard(p)
+ if code in self.sigint_returncodes:
+ self.__on_sigint()
+ if self.__got_sigint:
+ raise self.ProcessWasInterrupted
+ return code
+sigint_handler = SigintHandler()
+
# Return the width of the terminal, or None if it couldn't be
# determined (e.g. because we're not being run interactively).
def term_width(out):
@@ -53,15 +104,21 @@ class Outputter(object):
else:
self.__out_file.write("\r" + msg[:self.__width].ljust(self.__width))
self.__previous_line_was_transient = True
- def permanent_line(self, msg):
+ def flush_transient_output(self):
if self.__previous_line_was_transient:
self.__out_file.write("\n")
self.__previous_line_was_transient = False
+ def permanent_line(self, msg):
+ self.flush_transient_output()
self.__out_file.write(msg + "\n")
stdout_lock = threading.Lock()
class FilterFormat:
+ if sys.stdout.isatty():
+ # stdout needs to be unbuffered since the output is interactive.
+ sys.stdout = os.fdopen(sys.stdout.fileno(), 'w', 0)
+
out = Outputter(sys.stdout)
total_tests = 0
finished_tests = 0
@@ -80,7 +137,6 @@ class FilterFormat:
if command == "TEST":
(binary, test) = arg.split(' ', 1)
self.tests[job_id] = (binary, test.strip())
- self.outputs[job_id] = []
elif command == "EXIT":
(exit_code, time_ms) = [int(x) for x in arg.split(' ', 1)]
self.finished_tests += 1
@@ -88,8 +144,9 @@ class FilterFormat:
self.print_test_status(test, time_ms)
if exit_code != 0:
self.failures.append(self.tests[job_id])
- for line in self.outputs[job_id]:
- self.out.permanent_line(line)
+ with open(self.outputs[job_id]) as f:
+ for line in f.readlines():
+ self.out.permanent_line(line.rstrip())
self.out.permanent_line(
"[%d/%d] %s returned/aborted with exit code %d (%d ms)"
% (self.finished_tests, self.total_tests, test, exit_code, time_ms))
@@ -97,17 +154,15 @@ class FilterFormat:
self.total_tests = int(arg.split(' ', 1)[1])
self.out.transient_line("[0/%d] Running tests..." % self.total_tests)
- def add_stdout(self, job_id, output):
- self.outputs[job_id].append(output)
+ def logfile(self, job_id, name):
+ self.outputs[job_id] = name
def log(self, line):
stdout_lock.acquire()
(prefix, output) = line.split(' ', 1)
- if prefix[-1] == ':':
- self.handle_meta(int(prefix[:-1]), output)
- else:
- self.add_stdout(int(prefix[:-1]), output)
+ assert prefix[-1] == ':'
+ self.handle_meta(int(prefix[:-1]), output)
stdout_lock.release()
def end(self):
@@ -116,6 +171,7 @@ class FilterFormat:
% (len(self.failures), self.total_tests))
for (binary, test) in self.failures:
self.out.permanent_line(" " + binary + ": " + test)
+ self.out.flush_transient_output()
class RawFormat:
def log(self, line):
@@ -123,6 +179,10 @@ class RawFormat:
sys.stdout.write(line + "\n")
sys.stdout.flush()
stdout_lock.release()
+ def logfile(self, job_id, name):
+ with open(self.outputs[job_id]) as f:
+ for line in f.readlines():
+ self.log(str(job_id) + '> ' + line.rstrip())
def end(self):
pass
@@ -149,17 +209,19 @@ class TestTimes(object):
return
for ((test_binary, test_name), runtime) in times.items():
if (type(test_binary) is not str or type(test_name) is not str
- or type(runtime) not in {int, long}):
+ or type(runtime) not in {int, long, type(None)}):
return
self.__times = times
def get_test_time(self, binary, testname):
- "Return the last duration for the given test, or 0 if there's no record."
- return self.__times.get((binary, testname), 0)
+ """Return the last duration for the given test as an integer number of
+ milliseconds, or None if the test failed or if there's no record for it."""
+ return self.__times.get((binary, testname), None)
def record_test_time(self, binary, testname, runtime_ms):
- "Record that the given test ran in the specified number of milliseconds."
+ """Record that the given test ran in the specified number of
+ milliseconds. If the test failed, runtime_ms should be None."""
with self.__lock:
self.__times[(binary, testname)] = runtime_ms
@@ -184,6 +246,9 @@ for i in range(len(sys.argv)):
parser = optparse.OptionParser(
usage = 'usage: %prog [options] binary [binary ...] -- [additional args]')
+parser.add_option('-d', '--output_dir', type='string',
+ default=os.path.join(tempfile.gettempdir(), "gtest-parallel"),
+ help='output directory for test logs')
parser.add_option('-r', '--repeat', type='int', default=1,
help='repeat tests')
parser.add_option('-w', '--workers', type='int',
@@ -197,6 +262,8 @@ parser.add_option('--gtest_also_run_disabled_tests', action='store_true',
default=False, help='run disabled tests too')
parser.add_option('--format', type='string', default='filter',
help='output format (raw,filter)')
+parser.add_option('--print_test_times', action='store_true', default=False,
+ help='When done, list the run time of each test')
(options, binaries) = parser.parse_args()
@@ -240,17 +307,21 @@ for test_binary in binaries:
if line[0] != " ":
test_group = line.strip()
continue
- line = line.strip()
- if not options.gtest_also_run_disabled_tests and 'DISABLED' in line:
- continue
+ # Remove comments for parameterized tests and strip whitespace.
line = line.split('#')[0].strip()
if not line:
continue
test = test_group + line
+ if not options.gtest_also_run_disabled_tests and 'DISABLED_' in test:
+ continue
tests.append((times.get_test_time(test_binary, test),
test_binary, test, command))
-tests.sort(reverse=True)
+
+# Sort tests by falling runtime (with None, which is what we get for
+# new and failing tests, being considered larger than any real
+# runtime).
+tests.sort(reverse=True, key=lambda x: ((1 if x[0] is None else 0), x))
# Repeat tests (-r flag).
tests *= options.repeat
@@ -260,31 +331,41 @@ logger.log(str(-1) + ': TESTCNT ' + ' ' + str(len(tests)))
exit_code = 0
+# Create directory for test log output.
+try:
+ os.makedirs(options.output_dir)
+except OSError as e:
+ # Ignore errors if this directory already exists.
+ if e.errno != errno.EEXIST or not os.path.isdir(options.output_dir):
+ raise e
+# Remove files from old test runs.
+for logfile in os.listdir(options.output_dir):
+ os.remove(os.path.join(options.output_dir, logfile))
+
# Run the specified job. Return the elapsed time in milliseconds if
-# the job succeeds, or a very large number (larger than any reasonable
-# elapsed time) if the job fails. (This ensures that failing tests
-# will run first the next time.)
+# the job succeeds, or None if the job fails. (This ensures that
+# failing tests will run first the next time.)
def run_job((command, job_id, test)):
begin = time.time()
- sub = subprocess.Popen(command + ['--gtest_filter=' + test] +
- ['--gtest_color=' + options.gtest_color],
- stdout = subprocess.PIPE,
- stderr = subprocess.STDOUT)
- while True:
- line = sub.stdout.readline()
- if line == '':
- break
- logger.log(str(job_id) + '> ' + line.rstrip())
+ with tempfile.NamedTemporaryFile(dir=options.output_dir, delete=False) as log:
+ sub = subprocess.Popen(command + ['--gtest_filter=' + test] +
+ ['--gtest_color=' + options.gtest_color],
+ stdout=log.file,
+ stderr=log.file)
+ try:
+ code = sigint_handler.wait(sub)
+ except sigint_handler.ProcessWasInterrupted:
+ thread.exit()
+ runtime_ms = int(1000 * (time.time() - begin))
+ logger.logfile(job_id, log.name)
- code = sub.wait()
- runtime_ms = int(1000 * (time.time() - begin))
logger.log("%s: EXIT %s %d" % (job_id, code, runtime_ms))
if code == 0:
return runtime_ms
global exit_code
exit_code = code
- return sys.maxint
+ return None
def worker():
global job_id
@@ -312,4 +393,10 @@ workers = [start_daemon(worker) for i in range(options.workers)]
[t.join() for t in workers]
logger.end()
times.write_to_file(save_file)
-sys.exit(exit_code)
+if options.print_test_times:
+ ts = sorted((times.get_test_time(test_binary, test), test_binary, test)
+ for (_, test_binary, test, _) in tests
+ if times.get_test_time(test_binary, test) is not None)
+ for (time_ms, test_binary, test) in ts:
+ print "%8s %s" % ("%dms" % time_ms, test)
+sys.exit(-signal.SIGINT if sigint_handler.got_sigint() else exit_code)
diff --git a/third_party/winsdk_samples/winsdk_samples.gyp b/third_party/winsdk_samples/winsdk_samples.gyp
index a9b8598adf..12bc265ec7 100644
--- a/third_party/winsdk_samples/winsdk_samples.gyp
+++ b/third_party/winsdk_samples/winsdk_samples.gyp
@@ -85,6 +85,32 @@
'<(baseclasses_dir)/wxutil.cpp',
'<(baseclasses_dir)/wxutil.h',
],
+ 'conditions': [
+ ['clang==1', {
+ 'msvs_settings': {
+ 'VCCLCompilerTool': {
+ 'AdditionalOptions': [
+ # Disable warnings failing when compiling with Clang on Windows.
+ # https://bugs.chromium.org/p/webrtc/issues/detail?id=5366
+ '-Wno-comment',
+ '-Wno-delete-non-virtual-dtor',
+ '-Wno-ignored-attributes',
+ '-Wno-logical-op-parentheses',
+ '-Wno-non-pod-varargs',
+ '-Wno-microsoft-extra-qualification',
+ '-Wno-missing-braces',
+ '-Wno-overloaded-virtual',
+ '-Wno-parentheses',
+ '-Wno-reorder',
+ '-Wno-string-conversion',
+ '-Wno-tautological-constant-out-of-range-compare',
+ '-Wno-unused-private-field',
+ '-Wno-writable-strings',
+ ],
+ },
+ },
+ },],
+ ], # conditions.
},
],
}
diff --git a/tools/OWNERS b/tools/OWNERS
index 965de1e933..3a3f60ca73 100644
--- a/tools/OWNERS
+++ b/tools/OWNERS
@@ -1,4 +1,3 @@
kjellander@webrtc.org
phoglund@webrtc.org
niklas.enbom@webrtc.org
-andrew@webrtc.org
diff --git a/tools/autoroller/roll_chromium_revision.py b/tools/autoroller/roll_chromium_revision.py
index 917ecd6570..1f974e02ef 100755
--- a/tools/autoroller/roll_chromium_revision.py
+++ b/tools/autoroller/roll_chromium_revision.py
@@ -25,9 +25,6 @@ CHROMIUM_COMMIT_TEMPLATE = CHROMIUM_SRC_URL + '/+/%s'
CHROMIUM_LOG_TEMPLATE = CHROMIUM_SRC_URL + '/+log/%s'
CHROMIUM_FILE_TEMPLATE = CHROMIUM_SRC_URL + '/+/%s/%s'
-# Run these CQ trybots in addition to the default ones in infra/config/cq.cfg.
-EXTRA_TRYBOTS = 'tryserver.webrtc:win_baremetal,mac_baremetal,linux_baremetal'
-
COMMIT_POSITION_RE = re.compile('^Cr-Commit-Position: .*#([0-9]+).*$')
CLANG_REVISION_RE = re.compile(r'^CLANG_REVISION=(\d+)$')
ROLL_BRANCH_NAME = 'roll_chromium_revision'
@@ -38,14 +35,14 @@ CHECKOUT_ROOT_DIR = os.path.realpath(os.path.join(SCRIPT_DIR, os.pardir,
sys.path.append(CHECKOUT_ROOT_DIR)
import setup_links
-sys.path.append(os.path.join(CHECKOUT_ROOT_DIR, 'tools'))
+sys.path.append(os.path.join(CHECKOUT_ROOT_DIR, 'build'))
import find_depot_tools
find_depot_tools.add_depot_tools_to_path()
from gclient import GClientKeywords
-CLANG_UPDATE_SCRIPT_URL_PATH = 'tools/clang/scripts/update.sh'
+CLANG_UPDATE_SCRIPT_URL_PATH = 'tools/clang/scripts/update.py'
CLANG_UPDATE_SCRIPT_LOCAL_PATH = os.path.join('tools', 'clang', 'scripts',
- 'update.sh')
+ 'update.py')
DepsEntry = collections.namedtuple('DepsEntry', 'path url revision')
ChangedDep = collections.namedtuple('ChangedDep',
@@ -292,7 +289,6 @@ def GenerateCommitMessage(current_cr_rev, new_cr_rev, current_commit_pos,
commit_msg.append('No update to Clang.\n')
commit_msg.append('TBR=%s' % tbr_authors)
- commit_msg.append('CQ_EXTRA_TRYBOTS=%s' % EXTRA_TRYBOTS)
return '\n'.join(commit_msg)
diff --git a/tools/refactoring/addfileheader.py b/tools/refactoring/addfileheader.py
deleted file mode 100644
index 01c8a8b4e1..0000000000
--- a/tools/refactoring/addfileheader.py
+++ /dev/null
@@ -1,163 +0,0 @@
-#!/usr/bin/env python
-
-import stringmanipulation
-import filemanagement
-import sys
-
-extensions = ['.h','.cc','.c','.cpp']
-
-ignore_these = ['my_ignore_header.h']
-
-if((len(sys.argv) != 2) and (len(sys.argv) != 3)):
- print 'parameters are: directory [--commit]'
- quit()
-
-directory = sys.argv[1];
-if(not filemanagement.pathexist(directory)):
- print 'path ' + directory + ' does not exist'
- quit()
-
-if((len(sys.argv) == 3) and (sys.argv[2] != '--commit')):
- print 'parameters are: parent directory extension new extension [--commit]'
- quit()
-
-commit = False
-if(len(sys.argv) == 3):
- commit = True
-
-files_to_fix = []
-for extension in extensions:
- files_to_fix.extend(filemanagement.listallfilesinfolder(directory,\
- extension))
-
-# Just steal the header from the template
-def fileheaderasstring():
- template_file_name = 'license_template.txt'
- if (not filemanagement.fileexist(template_file_name)):
- print 'File ' + template_file_name + ' not found!'
- quit()
- template_file = open(template_file_name,'r')
- return_string = ''
- for line in template_file:
- return_string += line
- return return_string
-
-# Just steal the header from the template
-def fileheaderasarray():
- template_file_name = 'license_template.txt'
- if (not filemanagement.fileexist(template_file_name)):
- print 'File ' + template_file_name + ' not found!'
- quit()
- template_file = open(template_file_name,'r')
- return_value = []
- for line in template_file:
- return_value.append(line)
- return return_value
-
-
-def findheader(path, file_name):
- full_file_name = path + file_name
- if (not filemanagement.fileexist(full_file_name)):
- print 'File ' + file_name + ' not found!'
- print 'Unexpected error!'
- quit()
- file_handle = open(full_file_name)
- template_file_content = fileheaderasarray()
- compare_content = []
- # load the same number of lines from file as the fileheader
- for index in range(len(template_file_content)):
- line = file_handle.readline()
- if (line == ''):
- return False
- compare_content.append(line)
-
- while (True):
- found = True
- for index in range(len(template_file_content)):
- line1 = template_file_content[index]
- line2 = compare_content[index]
- if(line1 != line2):
- found = False
- break
- if (found):
- return True
- compare_content = compare_content[1:len(compare_content)]
- line = file_handle.readline()
- if (line == ''):
- return False
- compare_content.append(line)
- return False
-
-# Used to store temporary result before flushing to real file when finished
-def temporaryfilename(old_file_name):
- return old_file_name + '.deleteme'
-
-def updatefile(path, old_file_name):
- full_old_file_name = path + old_file_name
- if (not filemanagement.fileexist(full_old_file_name)):
- print 'File ' + full_old_file_name + ' is not found.'
- print 'Should not happen! Ever!'
- quit()
-
- full_temporary_file_name = path + temporaryfilename(old_file_name)
-
- # Make sure that the files are closed by putting them out of scope
- old_file = open(full_old_file_name,'r')
- temporary_file = open(full_temporary_file_name,'w')
-
- temporary_file.writelines(fileheaderasstring())
- remove_whitespaces = True
- for line in old_file:
- if (remove_whitespaces and (len(line.split()) == 0)):
- continue
- else:
- remove_whitespaces = False
- temporary_file.writelines(line)
- old_file.close()
- temporary_file.close()
-
- filemanagement.copyfile(full_old_file_name,full_temporary_file_name)
- filemanagement.deletefile(full_temporary_file_name)
-
-
-failed_files = []
-skipped_files = []
-for index in range(len(files_to_fix)):
- if(commit):
- print (100*index)/len(files_to_fix)
- path_dir = files_to_fix[index][0]
- filename = files_to_fix[index][1]
- is_ignore = False
- for ignore_names in ignore_these:
- if(filename == ignore_names):
- is_ignore = True
- break
- if(is_ignore):
- continue
-
-# Let the word copyright be our sanity, i.e. make sure there is only one
-# copy right occurance or report that there will be no change
- if(filemanagement.findstringinfile(path_dir,filename,'Copyright') or
- filemanagement.findstringinfile(path_dir,filename,'copyright') or
- filemanagement.findstringinfile(path_dir,filename,'COPYRIGHT')):
- if(findheader(path_dir,filename)):
- skipped_files.append(path_dir + filename)
- else:
- failed_files.append(path_dir + filename)
- continue
-
- if (not commit):
- print 'File ' + path_dir + filename + ' will be updated'
- continue
- updatefile(path_dir,filename)
-
-tense = 'will be'
-if (commit):
- tense = 'has been'
-if (len(skipped_files) > 0):
- print str(len(skipped_files)) + ' file(s) ' + tense + ' skipped since they already have the correct header'
-
-if (len(failed_files) > 0):
- print 'Following files seem to have an invalid file header:'
-for line in failed_files:
- print line
diff --git a/tools/refactoring/filemanagement.py b/tools/refactoring/filemanagement.py
deleted file mode 100644
index 4ff64ceb22..0000000000
--- a/tools/refactoring/filemanagement.py
+++ /dev/null
@@ -1,72 +0,0 @@
-import fnmatch
-import os
-import stringmanipulation
-
-def fileexist( file_name ):
- return os.path.isfile(file_name)
-
-def pathexist( path ):
- return os.path.exists(path)
-
-def fixpath( path ):
- return_value = path
- if( return_value[len(return_value) - 1] != '/'):
- return_value = return_value + '/'
- return return_value
-
-def listallfilesinfolder( path, extension ):
- matches = []
- signature = '*' + extension
- for root, dirnames, filenames in os.walk(path):
- for filename in fnmatch.filter(filenames, signature):
- matches.append([fixpath(root), filename])
- return matches
-
-def copyfile(to_file, from_file):
- if(not fileexist(from_file)):
- return
- command = 'cp -f ' + from_file + ' ' + to_file
- os.system(command)
- #print command
-
-def deletefile(file_to_delete):
- if(not fileexist(file_to_delete)):
- return
- os.system('rm ' + file_to_delete)
-
-# very ugly but works, so keep for now
-def findstringinfile(path,file_name,search_string):
- command = 'grep \'' + search_string + '\' ' + path + file_name + ' > deleteme.txt'
- return_value = os.system(command)
-# print command
- return (return_value == 0)
-
-def replacestringinfolder( path, old_string, new_string, extension ):
- if(not stringmanipulation.isextension(extension)):
- print 'failed to search and replace'
- return
- if(len(old_string) == 0):
- print 'failed to search and replace'
- return
- find_command = 'ls '+ path + '/*' + extension
- sed_command = 'sed -i \'s/' + old_string + '/' + new_string +\
- '/g\' *' + extension
- command_string = find_command + ' | xargs ' + sed_command + ' 2> deleteme.txt'
- os.system(command_string)
- #print command_string
-
-#find ./ -name "*.h" -type f | xargs -P 0 sed -i 's/process_thread_wrapper.h/process_thread.h/g' *.h deleteme.txt
-def replacestringinallsubfolders( old_string, new_string, extension):
- if(not stringmanipulation.isextension(extension)):
- print 'failed to search and replace'
- return
- if(len(old_string) == 0):
- print 'failed to search and replace'
- return
-
- find_command = 'find ./ -name \"*' + extension + '\" -type f'
- sed_command = 'sed -i \'s/' + old_string + '/' + new_string +\
- '/g\' *' + extension
- command_string = find_command + ' | xargs -P 0 ' + sed_command + ' 2> deleteme.txt'
- os.system(command_string)
- #print command_string
diff --git a/tools/refactoring/fixincludeguards.py b/tools/refactoring/fixincludeguards.py
deleted file mode 100644
index 0b563556ba..0000000000
--- a/tools/refactoring/fixincludeguards.py
+++ /dev/null
@@ -1,145 +0,0 @@
-#!/usr/bin/env python
-
-import stringmanipulation
-import filemanagement
-import sys
-
-extensions = ['.h']
-
-ignore_these = ['my_ignore_header.h']
-
-if((len(sys.argv) != 2) and (len(sys.argv) != 3)):
- print 'parameters are: directory [--commit]'
- quit()
-
-directory = sys.argv[1];
-if(not filemanagement.pathexist(directory)):
- print 'path ' + directory + ' does not exist'
- quit()
-
-if((len(sys.argv) == 3) and (sys.argv[2] != '--commit')):
- print 'parameters are: parent directory extension new extension [--commit]'
- quit()
-
-commit = False
-if(len(sys.argv) == 3):
- commit = True
-
-for extension in extensions:
- files_to_fix = filemanagement.listallfilesinfolder(directory,\
- extension)
-
-def buildincludeguardname(path,filename):
- full_file_name = 'WEBRTC_' + path + filename
- full_file_name = full_file_name.upper()
- full_file_name = stringmanipulation.replaceoccurances(full_file_name, '/', '_')
- full_file_name = stringmanipulation.replaceoccurances(full_file_name, '\\', '_')
- full_file_name = stringmanipulation.replaceoccurances(full_file_name, '.', '_')
- full_file_name += '_'
- return full_file_name
-
-def buildnewincludeguardset(path,filename):
- include_guard_name = buildincludeguardname(path,filename)
- if(include_guard_name == ''):
- return []
- return_value = []
- return_value.append('#ifndef ' + include_guard_name)
- return_value.append('#define ' + include_guard_name)
- return_value.append(include_guard_name)
- return return_value
-
-def printincludeguardset(include_guard_set):
- print 'First line: ' + include_guard_set[0]
- print 'Second line: ' + include_guard_set[1]
- print 'Last line: ' + include_guard_set[2]
- return
-
-include_guard_begin_identifier = ['#ifndef', '#if !defined']
-include_guard_second_identifier = ['#define']
-def findincludeguardidentifier(line):
- for begin_identifier in include_guard_begin_identifier:
- line = stringmanipulation.removealloccurances(line,begin_identifier)
- for second_identifier in include_guard_begin_identifier:
- line = stringmanipulation.removealloccurances(line,second_identifier)
- removed_prefix = [True,'']
- line = stringmanipulation.whitespacestoonespace(line)
- while(removed_prefix[0]):
- removed_prefix = stringmanipulation.removeprefix(line,' ')
- line = removed_prefix[1]
- line = stringmanipulation.removealloccurances(line,'(')
- if(line == ''):
- return ''
- word_pos = stringmanipulation.getword(line,0)
- return_value = line[0:word_pos[1]]
- return_value = return_value.rstrip('\r\n')
- return return_value
-
-def findoldincludeguardset(path,filename):
- return_value = []
- full_file_name = path + filename
- file_pointer = open(full_file_name,'r')
- include_guard_name = ''
- for line in file_pointer:
- if (include_guard_name == ''):
- for compare_string in include_guard_begin_identifier:
- if (stringmanipulation.issubstring(compare_string, line) != -1):
- include_guard_name = findincludeguardidentifier(line)
- if (include_guard_name == ''):
- break
- line = line.rstrip('\r\n')
- return_value.append(line)
- break
- else:
- for compare_string in include_guard_second_identifier:
- if (stringmanipulation.issubstring(compare_string, line) != -1):
- if (stringmanipulation.issubstring(include_guard_name, line) != -1):
- line = line.rstrip('\r\n')
- return_value.append(line)
- return_value.append(include_guard_name)
- return return_value
- include_guard_name = ''
- return_value = []
- return []
-
-failed_files = []
-for index in range(len(files_to_fix)):
- if(commit):
- print (100*index)/len(files_to_fix)
- path_dir = files_to_fix[index][0]
- filename = files_to_fix[index][1]
- is_ignore = False
- for ignore_names in ignore_these:
- if(filename == ignore_names):
- is_ignore = True
- break
- if(is_ignore):
- continue
- old_include_guard_set = findoldincludeguardset(path_dir,filename)
- if (len(old_include_guard_set) != 3) :
- failed_files.append('unable to figure out the include guards for ' + filename)
- continue
-
- new_include_guard_set = buildnewincludeguardset(path_dir,filename)
- if (len(new_include_guard_set) != 3) :
- failed_files.append('unable to figure out new the include guards for ' + filename)
- continue
-
- if(not commit):
- print 'old guard: ' + old_include_guard_set[2]
- print 'new guard: ' + new_include_guard_set[2]
- continue
-
- for index in range(2):
- # enough to only replace for file. However, no function for that
- for extension in extensions:
- filemanagement.replacestringinfolder(path_dir,old_include_guard_set[index],new_include_guard_set[index],extension)
- # special case for last to avoid complications
- for extension in extensions:
- filemanagement.replacestringinfolder(path_dir,' ' + old_include_guard_set[2],' ' + new_include_guard_set[2],extension)
- filemanagement.replacestringinfolder(path_dir,'\\/\\/' + old_include_guard_set[2],'\\/\\/ ' + new_include_guard_set[2],extension)
-
-
-if(len(failed_files) > 0):
- print 'Following failures should be investigated manually:'
-for line in failed_files:
- print line
diff --git a/tools/refactoring/fixnames.py b/tools/refactoring/fixnames.py
deleted file mode 100644
index 15381e38d4..0000000000
--- a/tools/refactoring/fixnames.py
+++ /dev/null
@@ -1,387 +0,0 @@
-#!/usr/bin/env python
-
-import stringmanipulation
-import filemanagement
-import p4commands
-import sys
-
-name_space_to_ignore = 'GIPS::'
-#only allow one prefix to be removed since allowing multiple will complicate
-# things
-prefix_to_filter = 'gips'
-#words_to_filter = ['Module']
-# it might be dangerous to remove GIPS but keep it default
-words_to_filter = ['Module','GIPS']
-
-# This script finds all the words that should be replaced in an h-file. Once
-# all words that should be replaced are found it does a global search and
-# replace.
-
-extensions_to_edit = ['.cpp','.cc','.h']
-
-#line = ' ~hiGIPSCriticalSectionScoped()'
-#print line
-#position = stringmanipulation.getword(line,11)
-#old_word = line[position[0]:position[0]+position[1]]
-#result = stringmanipulation.removealloccurances(old_word,'gips')
-#new_word = result
-#print old_word
-#print position[0]
-#print position[0]+position[1]
-#print new_word
-#quit()
-
-# Ignore whole line if any item in this table is a substring of the line
-do_not_replace_line_table = []
-do_not_replace_line_table.append('namespace GIPS')
-
-# [old_string,new_string]
-# List of things to remove that are static:
-manual_replace_table = []
-#manual_replace_table.append(['using namespace GIPS;',''])
-#manual_replace_table.append(['CreateGipsEvent','CreateEvent'])
-#manual_replace_table.append(['CreateGIPSTrace','CreateTrace'])
-#manual_replace_table.append(['ReturnGIPSTrace','ReturnTrace'])
-#manual_replace_table.append(['CreateGIPSFile','CreateFile'])
-replace_table = manual_replace_table
-#replace_table.append(['GIPS::','webrtc::'])
-# List of things to not remove that are static, i.e. exceptions:
-# don't replace any of the GIPS_Words since that will affect all files
-# do that in a separate script!
-do_not_replace_table = []
-do_not_replace_table.append('GIPS_CipherTypes')
-do_not_replace_table.append('GIPS_AuthenticationTypes')
-do_not_replace_table.append('GIPS_SecurityLevels')
-do_not_replace_table.append('GIPS_encryption')
-do_not_replace_table.append('~GIPS_encryption')
-do_not_replace_table.append('GIPS_transport')
-do_not_replace_table.append('~GIPS_transport')
-do_not_replace_table.append('GIPSTraceCallback')
-do_not_replace_table.append('~GIPSTraceCallback')
-do_not_replace_table.append('GIPS_RTP_CSRC_SIZE')
-do_not_replace_table.append('GIPS_RTPDirections')
-do_not_replace_table.append('GIPS_RTP_INCOMING')
-do_not_replace_table.append('GIPS_RTP_OUTGOING')
-do_not_replace_table.append('GIPSFrameType')
-do_not_replace_table.append('GIPS_FRAME_EMPTY')
-do_not_replace_table.append('GIPS_AUDIO_FRAME_SPEECH')
-do_not_replace_table.append('GIPS_AUDIO_FRAME_CN')
-do_not_replace_table.append('GIPS_VIDEO_FRAME_KEY')
-do_not_replace_table.append('GIPS_VIDEO_FRAME_DELTA')
-do_not_replace_table.append('GIPS_VIDEO_FRAME_GOLDEN')
-do_not_replace_table.append('GIPS_VIDEO_FRAME_DELTA_KEY')
-do_not_replace_table.append('GIPS_PacketType')
-do_not_replace_table.append('GIPS_PACKET_TYPE_RTP')
-do_not_replace_table.append('GIPS_PACKET_TYPE_KEEP_ALIVE')
-do_not_replace_table.append('GIPS_AudioLayers')
-do_not_replace_table.append('GIPS_AUDIO_PLATFORM_DEFAULT')
-do_not_replace_table.append('GIPS_AUDIO_WINDOWS_WAVE')
-do_not_replace_table.append('GIPS_AUDIO_WINDOWS_CORE')
-do_not_replace_table.append('GIPS_AUDIO_LINUX_ALSA')
-do_not_replace_table.append('GIPS_AUDIO_LINUX_PULSE')
-do_not_replace_table.append('GIPS_AUDIO_FORMAT')
-do_not_replace_table.append('GIPS_PCM_16_16KHZ')
-do_not_replace_table.append('GIPS_PCM_16_8KHZ')
-do_not_replace_table.append('GIPS_G729')
-do_not_replace_table.append('GIPSAMRmode')
-do_not_replace_table.append('GIPS_RFC3267_BWEFFICIENT')
-do_not_replace_table.append('GIPS_RFC3267_OCTETALIGNED')
-do_not_replace_table.append('GIPS_RFC3267_FILESTORAGE')
-do_not_replace_table.append('GIPS_NCModes')
-do_not_replace_table.append('GIPS_NC_OFF')
-do_not_replace_table.append('GIPS_NC_MILD')
-do_not_replace_table.append('GIPS_NC_MODERATE')
-do_not_replace_table.append('GIPS_NC_AGGRESSIVE')
-do_not_replace_table.append('GIPS_NC_VERY_AGGRESSIVE')
-do_not_replace_table.append('GIPS_AGCModes')
-do_not_replace_table.append('GIPS_AGC_OFF')
-do_not_replace_table.append('GIPS_AGC_ANALOG')
-do_not_replace_table.append('GIPS_AGC_DIGITAL')
-do_not_replace_table.append('GIPS_AGC_STANDALONE_DIG')
-do_not_replace_table.append('GIPS_ECModes')
-do_not_replace_table.append('GIPS_EC_UNCHANGED')
-do_not_replace_table.append('GIPS_EC_DEFAULT')
-do_not_replace_table.append('GIPS_EC_CONFERENCE')
-do_not_replace_table.append('GIPS_EC_AEC')
-do_not_replace_table.append('GIPS_EC_AES')
-do_not_replace_table.append('GIPS_EC_AECM')
-do_not_replace_table.append('GIPS_EC_NEC_IAD')
-do_not_replace_table.append('GIPS_AESModes')
-do_not_replace_table.append('GIPS_AES_DEFAULT')
-do_not_replace_table.append('GIPS_AES_NORMAL')
-do_not_replace_table.append('GIPS_AES_HIGH')
-do_not_replace_table.append('GIPS_AES_ATTENUATE')
-do_not_replace_table.append('GIPS_AES_NORMAL_SOFT_TRANS')
-do_not_replace_table.append('GIPS_AES_HIGH_SOFT_TRANS')
-do_not_replace_table.append('GIPS_AES_ATTENUATE_SOFT_TRANS')
-do_not_replace_table.append('GIPS_AECMModes')
-do_not_replace_table.append('GIPS_AECM_QUIET_EARPIECE_OR_HEADSET')
-do_not_replace_table.append('GIPS_AECM_EARPIECE')
-do_not_replace_table.append('GIPS_AECM_LOUD_EARPIECE')
-do_not_replace_table.append('GIPS_AECM_SPEAKERPHONE')
-do_not_replace_table.append('GIPS_AECM_LOUD_SPEAKERPHONE')
-do_not_replace_table.append('AECM_LOUD_SPEAKERPHONE')
-do_not_replace_table.append('GIPS_VAD_CONVENTIONAL')
-do_not_replace_table.append('GIPS_VAD_AGGRESSIVE_LOW')
-do_not_replace_table.append('GIPS_VAD_AGGRESSIVE_MID')
-do_not_replace_table.append('GIPS_VAD_AGGRESSIVE_HIGH')
-do_not_replace_table.append('GIPS_NetEQModes')
-do_not_replace_table.append('GIPS_NETEQ_DEFAULT')
-do_not_replace_table.append('GIPS_NETEQ_STREAMING')
-do_not_replace_table.append('GIPS_NETEQ_FAX')
-do_not_replace_table.append('GIPS_NetEQBGNModes')
-do_not_replace_table.append('GIPS_BGN_ON')
-do_not_replace_table.append('GIPS_BGN_FADE')
-do_not_replace_table.append('GIPS_BGN_OFF')
-do_not_replace_table.append('GIPS_OnHoldModes')
-do_not_replace_table.append('GIPS_HOLD_SEND_AND_PLAY')
-do_not_replace_table.append('GIPS_HOLD_SEND_ONLY')
-do_not_replace_table.append('GIPS_HOLD_PLAY_ONLY')
-do_not_replace_table.append('GIPS_PayloadFrequencies')
-do_not_replace_table.append('GIPS_FREQ_8000_HZ')
-do_not_replace_table.append('GIPS_FREQ_16000_HZ')
-do_not_replace_table.append('GIPS_FREQ_32000_HZ')
-do_not_replace_table.append('GIPS_TelephoneEventDetectionMethods')
-do_not_replace_table.append('GIPS_IN_BAND')
-do_not_replace_table.append('GIPS_OUT_OF_BAND')
-do_not_replace_table.append('GIPS_IN_AND_OUT_OF_BAND')
-do_not_replace_table.append('GIPS_ProcessingTypes')
-do_not_replace_table.append('GIPS_PLAYBACK_PER_CHANNEL')
-do_not_replace_table.append('GIPS_PLAYBACK_ALL_CHANNELS_MIXED')
-do_not_replace_table.append('GIPS_RECORDING_PER_CHANNEL')
-do_not_replace_table.append('GIPS_RECORDING_ALL_CHANNELS_MIXED')
-do_not_replace_table.append('GIPS_StereoChannel')
-do_not_replace_table.append('GIPS_StereoLeft')
-do_not_replace_table.append('GIPS_StereoRight')
-do_not_replace_table.append('GIPS_StereoBoth')
-do_not_replace_table.append('GIPS_stat_val')
-do_not_replace_table.append('GIPS_P56_statistics')
-do_not_replace_table.append('GIPS_echo_statistics')
-do_not_replace_table.append('GIPS_NetworkStatistics')
-do_not_replace_table.append('GIPS_JitterStatistics')
-do_not_replace_table.append('GIPSVideoRawType')
-do_not_replace_table.append('GIPS_VIDEO_I420')
-do_not_replace_table.append('GIPS_VIDEO_YV12')
-do_not_replace_table.append('GIPS_VIDEO_YUY2')
-do_not_replace_table.append('GIPS_VIDEO_UYVY')
-do_not_replace_table.append('GIPS_VIDEO_IYUV')
-do_not_replace_table.append('GIPS_VIDEO_ARGB')
-do_not_replace_table.append('GIPS_VIDEO_RGB24')
-do_not_replace_table.append('GIPS_VIDEO_RGB565')
-do_not_replace_table.append('GIPS_VIDEO_ARGB4444')
-do_not_replace_table.append('GIPS_VIDEO_ARGB1555')
-do_not_replace_table.append('GIPS_VIDEO_MJPG')
-do_not_replace_table.append('GIPS_VIDEO_NV12')
-do_not_replace_table.append('GIPS_VIDEO_NV21')
-do_not_replace_table.append('GIPS_VIDEO_Unknown')
-do_not_replace_table.append('GIPSVideoLayouts')
-do_not_replace_table.append('GIPS_LAYOUT_NONE')
-do_not_replace_table.append('GIPS_LAYOUT_DEFAULT')
-do_not_replace_table.append('GIPS_LAYOUT_ADVANCED1')
-do_not_replace_table.append('GIPS_LAYOUT_ADVANCED2')
-do_not_replace_table.append('GIPS_LAYOUT_ADVANCED3')
-do_not_replace_table.append('GIPS_LAYOUT_ADVANCED4')
-do_not_replace_table.append('GIPS_LAYOUT_FULL')
-do_not_replace_table.append('KGIPSConfigParameterSize')
-do_not_replace_table.append('KGIPSPayloadNameSize')
-do_not_replace_table.append('GIPSVideoCodecH263')
-do_not_replace_table.append('GIPSVideoH264Packetization')
-do_not_replace_table.append('GIPS_H264_SingleMode')
-do_not_replace_table.append('GIPS_H264_NonInterleavedMode')
-do_not_replace_table.append('GIPSVideoCodecComplexity')
-do_not_replace_table.append('GIPSVideoCodec_Complexity_Normal')
-do_not_replace_table.append('GIPSVideoCodec_Comlexity_High')
-do_not_replace_table.append('GIPSVideoCodec_Comlexity_Higher')
-do_not_replace_table.append('GIPSVideoCodec_Comlexity_Max')
-do_not_replace_table.append('GIPSVideoCodecH264')
-do_not_replace_table.append('GIPSVideoH264Packetization')
-do_not_replace_table.append('GIPSVideoCodecComplexity')
-do_not_replace_table.append('GIPSVideoCodecProfile')
-do_not_replace_table.append('KGIPSConfigParameterSize')
-do_not_replace_table.append('KGIPSMaxSVCLayers')
-do_not_replace_table.append('GIPSVideoH264LayerTypes')
-do_not_replace_table.append('GIPS_H264SVC_Base')
-do_not_replace_table.append('GIPS_H264SVC_Extend_2X2')
-do_not_replace_table.append('GIPS_H264SVC_Extend_1X1')
-do_not_replace_table.append('GIPS_H264SVC_Extend_MGS')
-do_not_replace_table.append('GIPS_H264SVC_Extend_1_5')
-do_not_replace_table.append('GIPS_H264SVC_Extend_Custom')
-do_not_replace_table.append('GIPSVideoH264LayersProperties')
-do_not_replace_table.append('GIPSVideoH264LayerTypes')
-do_not_replace_table.append('GIPSVideoH264Layers')
-do_not_replace_table.append('GIPSVideoH264LayersProperties')
-do_not_replace_table.append('GIPSVideoCodecH264SVC')
-do_not_replace_table.append('GIPSVideoCodecComplexity')
-do_not_replace_table.append('GIPSVideoCodecProfile')
-do_not_replace_table.append('GIPSVideoH264Layers')
-do_not_replace_table.append('GIPSVideoCodecVP8')
-do_not_replace_table.append('GIPSVideoCodecComplexity')
-do_not_replace_table.append('GIPSVideoCodecMPEG')
-do_not_replace_table.append('GIPSVideoCodecGeneric')
-do_not_replace_table.append('GIPSVideoCodecType')
-do_not_replace_table.append('GIPSVideoCodec_H263')
-do_not_replace_table.append('GIPSVideoCodec_H264')
-do_not_replace_table.append('GIPSVideoCodec_H264SVC')
-do_not_replace_table.append('GIPSVideoCodec_VP8')
-do_not_replace_table.append('GIPSVideoCodec_MPEG4')
-do_not_replace_table.append('GIPSVideoCodec_I420')
-do_not_replace_table.append('GIPSVideoCodec_RED')
-do_not_replace_table.append('GIPSVideoCodec_ULPFEC')
-do_not_replace_table.append('GIPSVideoCodec_Unknown')
-do_not_replace_table.append('GIPSVideoCodecUnion')
-do_not_replace_table.append('GIPSVideoCodecH263')
-do_not_replace_table.append('GIPSVideoCodecH264')
-do_not_replace_table.append('GIPSVideoCodecH264SVC')
-do_not_replace_table.append('GIPSVideoCodecVP8')
-do_not_replace_table.append('GIPSVideoCodecMPEG4')
-do_not_replace_table.append('GIPSVideoCodecGeneric')
-do_not_replace_table.append('GIPSVideoCodec')
-do_not_replace_table.append('GIPSVideoCodecType')
-do_not_replace_table.append('GIPSVideoCodecUnion')
-do_not_replace_table.append('GIPSAudioFrame')
-do_not_replace_table.append('GIPS_CodecInst')
-do_not_replace_table.append('GIPS_FileFormats')
-do_not_replace_table.append('GIPSTickTime')
-do_not_replace_table.append('GIPS_Word64')
-do_not_replace_table.append('GIPS_UWord64')
-do_not_replace_table.append('GIPS_Word32')
-do_not_replace_table.append('GIPS_UWord32')
-do_not_replace_table.append('GIPS_Word16')
-do_not_replace_table.append('GIPS_UWord16')
-do_not_replace_table.append('GIPS_Word8')
-do_not_replace_table.append('GIPS_UWord8')
-
-if((len(sys.argv) != 2) and (len(sys.argv) != 3)):
- print 'parameters are: parent directory [--commit]'
- quit()
-
-if((len(sys.argv) == 3) and (sys.argv[2] != '--commit')):
- print 'parameters are: parent directory [--commit]'
- quit()
-
-commit = (len(sys.argv) == 3)
-
-directory = sys.argv[1];
-if(not filemanagement.pathexist(directory)):
- print 'path ' + directory + ' does not exist'
- quit()
-
-# APIs are all in h-files
-extension = '.h'
-
-# All h-files
-files_to_modify = filemanagement.listallfilesinfolder(directory,\
- extension)
-
-def isinmanualremovetable( compare_word ):
- for old_word, new_word in manual_replace_table:
- if(old_word == compare_word):
- return True
- return False
-
-# Begin
-# This function looks at each line and decides which words should be replaced
-# that is this is the only part of the script that you will ever want to change!
-def findstringstoreplace(line):
- original_line = line
-# Dont replace compiler directives
- if(line[0] == '#'):
- return []
-# Dont allow global removal of namespace gips since it is very intrusive
- for sub_string_compare in do_not_replace_line_table:
- index = stringmanipulation.issubstring(line,sub_string_compare)
- if(index != -1):
- return []
-
- return_value = []
-
- line = stringmanipulation.removeccomment(line)
- line = stringmanipulation.whitespacestoonespace(line)
- if(len(line) == 0):
- return []
- if(line[0] == '*'):
- return []
- index = stringmanipulation.issubstring(line,prefix_to_filter)
- while index >= 0:
- dont_store_hit = False
- word_position = stringmanipulation.getword(line, index)
- start_of_word = word_position[0]
- size_of_word = word_position[1]
- end_of_word = start_of_word + size_of_word
- old_word = line[start_of_word:end_of_word]
- if(isinmanualremovetable(old_word)):
- dont_store_hit = True
- if((end_of_word + 2 < len(line)) and\
- name_space_to_ignore == line[start_of_word:end_of_word+2]):
- dont_store_hit = True
-
- result = stringmanipulation.removeprefix(old_word,prefix_to_filter)
- new_word = result[1]
- for word_to_filter in words_to_filter:
- new_word = stringmanipulation.removealloccurances(new_word,word_to_filter)
- result = stringmanipulation.removeprefix(new_word,'_')
- new_word = result[1]
- new_word = stringmanipulation.fixabbreviations(new_word)
- new_word = stringmanipulation.removealloccurances(new_word,'_')
- if(not dont_store_hit):
- return_value.append([old_word,new_word])
-# remove the word we found from the string so we dont find it again
- line = line[0:start_of_word] + line[end_of_word:len(line)]
- index = stringmanipulation.issubstring(line,'GIPS')
-
- return return_value
-# End
-
-# loop through all files
-for path, file_name in files_to_modify:
-# if(file_name != 'GIPSTickUtil.h'):
-# continue
- full_file_name = path + file_name
- file_pointer = open(full_file_name,'r')
-# print file_name
-#loop through all lines
- for line in file_pointer:
-# print line
- local_replace_string = findstringstoreplace(line)
- #print local_replace_string
- if(len(local_replace_string) != 0):
- replace_table.extend(local_replace_string)
-
-
-# we have built our replace table now
-replace_table = stringmanipulation.removeduplicates( replace_table )
-replace_table = stringmanipulation.ordertablesizefirst( replace_table )
-replace_table = stringmanipulation.complement(replace_table,\
- do_not_replace_table)
-
-def replaceoriginal( path,my_table ):
- size_of_table = len(my_table)
- for index in range(len(my_table)):
- old_name = my_table[index][0]
- new_name = my_table[index][1]
- filemanagement.replacestringinfolder(path, old_name, new_name,\
- ".h")
- print (100*index) / (size_of_table*2)
-
-def replaceall( my_table, extension_list ):
- size_of_table = len(my_table)
- for index in range(len(my_table)):
- old_name = my_table[index][0]
- new_name = my_table[index][1]
- new_name = new_name
- for extension in extensions_to_edit:
- filemanagement.replacestringinallsubfolders(old_name, new_name,
- extension)
- print 100*(size_of_table + index) / (size_of_table*2)
-
-
-if(commit):
- print 'commiting'
- replace_table = stringmanipulation.removenochange(replace_table)
- p4commands.checkoutallfiles()
- replaceoriginal(directory,replace_table)
- replaceall(replace_table,extensions_to_edit)
- p4commands.revertunchangedfiles()
-else:
- for old_name, new_name in replace_table:
- print 'Going to replace [' + old_name + '] with [' + new_name + ']'
diff --git a/tools/refactoring/integratefiles.py b/tools/refactoring/integratefiles.py
deleted file mode 100644
index c5cc892095..0000000000
--- a/tools/refactoring/integratefiles.py
+++ /dev/null
@@ -1,100 +0,0 @@
-#!/usr/bin/env python
-
-import stringmanipulation
-import filemanagement
-import p4commands
-import sys
-
-extensions = ['.h', '.cpp', '.cc', '.gyp']
-
-ignore_these = ['list_no_stl.h','map_no_stl.h','constructor_magic.h']
-
-exceptions = [
-['GIPSRWLock.h','rw_lock.h'],
-['GIPSCriticalsection.h','critical_section.h'],
-]
-
-if((len(sys.argv) != 4) and (len(sys.argv) != 5)):
- print 'parameters are: parent directory extension new extension [--commit]'
- quit()
-
-directory = sys.argv[1];
-if(not filemanagement.pathexist(directory)):
- print 'path ' + directory + ' does not exist'
- quit()
-
-old_extension = sys.argv[2]
-if(not stringmanipulation.isextension(old_extension)):
- print old_extension + ' is not a valid extension'
- quit()
-
-new_extension = sys.argv[3]
-if(not stringmanipulation.isextension(new_extension)):
- print new_extension + ' is not a valid extension'
- quit()
-
-if((len(sys.argv) == 5) and (sys.argv[4] != '--commit')):
- print 'parameters are: parent directory extension new extension [--commit]'
- quit()
-
-commit = False
-if(len(sys.argv) == 5):
- commit = True
-
-files_to_integrate = filemanagement.listallfilesinfolder(directory,\
- old_extension)
-
-if(commit):
- p4commands.checkoutallfiles()
-for index in range(len(files_to_integrate)):
- if(commit):
- print (100*index)/len(files_to_integrate)
- path_dir = files_to_integrate[index][0]
- filename = files_to_integrate[index][1]
- is_ignore = False
- for ignore_names in ignore_these:
- if(filename == ignore_names):
- is_ignore = True
- break
- if(is_ignore):
- continue
-
- new_file_name = ''
- is_exception = False
- for exception_name,exception_name_new in exceptions:
- if(filename == exception_name):
- is_exception = True
- new_file_name = exception_name_new
- break
-
- if(not is_exception):
- new_file_name = filename
-
- new_file_name = stringmanipulation.removeallprefix(new_file_name,\
- 'gips')
- new_file_name = stringmanipulation.removealloccurances(new_file_name,\
- 'module')
- new_file_name = stringmanipulation.changeextension(new_file_name,\
- old_extension,\
- new_extension)
- new_file_name = stringmanipulation.fixabbreviations( new_file_name )
- new_file_name = stringmanipulation.lowercasewithunderscore(new_file_name)
- if(not commit):
- print 'File ' + filename + ' will be replaced with ' + new_file_name
- continue
- full_new_file_name = path_dir + new_file_name
- full_old_file_name = path_dir + filename
- if(full_new_file_name != full_old_file_name):
- p4commands.integratefile(full_old_file_name,full_new_file_name)
- else:
- print 'skipping ' + new_file_name + ' due to no change'
- for extension in extensions:
- print 'replacing ' + filename
- if (extension == ".gyp"):
- filemanagement.replacestringinallsubfolders(
- filename,new_file_name,extension)
- else:
- filemanagement.replacestringinallsubfolders(
- '\"' + filename + '\"', '\"' + new_file_name + '\"', extension)
-if(commit):
- p4commands.revertunchangedfiles()
diff --git a/tools/refactoring/p4commands.py b/tools/refactoring/p4commands.py
deleted file mode 100644
index 71ac31b0d0..0000000000
--- a/tools/refactoring/p4commands.py
+++ /dev/null
@@ -1,31 +0,0 @@
-import os
-import filemanagement
-
-# checks out entire p4 repository
-def checkoutallfiles():
- os.system('p4 edit //depotGoogle/...')
- return
-
-# reverts all unchanged files, this is completely innoculus
-def revertunchangedfiles():
- os.system('p4 revert -a //depotGoogle/...')
- return
-
-def integratefile( old_name, new_name):
- if(old_name == new_name):
- return
- if(not filemanagement.fileexist(old_name)):
- return
- integrate_command = 'p4 integrate -o -f ' +\
- old_name +\
- ' ' +\
- new_name +\
- ' > p4summary.txt 2> error.txt'
- os.system(integrate_command)
- #print integrate_command
- delete_command = 'p4 delete -c default ' +\
- old_name +\
- ' > p4summary.txt 2> error.txt'
- os.system(delete_command)
- #print delete_command
- return
diff --git a/tools/refactoring/removetrace.py b/tools/refactoring/removetrace.py
deleted file mode 100644
index 43c622dc40..0000000000
--- a/tools/refactoring/removetrace.py
+++ /dev/null
@@ -1,161 +0,0 @@
-# Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
-#
-# Use of this source code is governed by a BSD-style license
-# that can be found in the LICENSE file in the root of the source
-# tree. An additional intellectual property rights grant can be found
-# in the file PATENTS. All contributing project authors may
-# be found in the AUTHORS file in the root of the source tree.
-
-# NOTE: This is a hack which disobeys a number of conventions and best
-# practices. It's here just to be easily shared. If it's to remain in the
-# repository it should be refactored.
-
-#!/usr/bin/env python
-
-import stringmanipulation
-import filemanagement
-import sys
-
-trace_remove_key_word = 'kTraceModuleCall'
-
-if((len(sys.argv) != 2) and (len(sys.argv) != 3)):
- print 'parameters are: parent directory [--commit]'
- quit()
-
-if((len(sys.argv) == 3) and (sys.argv[2] != '--commit')):
- print 'parameters are: parent directory [--commit]'
- quit()
-
-commit = (len(sys.argv) == 3)
-
-directory = sys.argv[1];
-occurances = []
-
-trace_identifier = 'WEBRTC_TRACE('
-extensions = ['.h','.cc','.c','.cpp']
-files_to_fix = []
-for extension in extensions:
- files_to_fix.extend(filemanagement.listallfilesinfolder(directory,\
- extension))
-
-# This function identifies the begining of a trace statement
-def istracebegining(line):
- return stringmanipulation.issubstring(line, trace_identifier) != -1
-
-def endofstatement(line):
- return stringmanipulation.issubstring(line, ';') != -1
-
-def removekeywordfound(line):
- return stringmanipulation.issubstring(line, trace_remove_key_word) != -1
-
-# Used to store temporary result before flushing to real file when finished
-def temporaryfilename():
- return 'deleteme.txt'
-
-
-def find_occurances(path, file_name):
- full_filename = path + file_name
- file_handle = open(full_filename,'r')
- line_is_trace = False
- last_trace_line = -1
- for line_nr, line in enumerate(file_handle):
- if(istracebegining(line)):
- line_is_trace = True;
- last_trace_line = line_nr
-
- if(line_is_trace):
- if(removekeywordfound(line)):
- occurances.append(last_trace_line)
-
- if(endofstatement(line)):
- line_is_trace = False;
-
-def remove_occurances(path, file_name):
- full_file_name = path + file_name
- if (not filemanagement.fileexist(full_file_name)):
- print 'File ' + full_file_name + ' is not found.'
- print 'Should not happen! Ever!'
- quit()
-
- full_temporary_file_name = path + temporaryfilename()
- temporary_file = open(full_temporary_file_name,'w')
- original_file = open(full_file_name,'r')
- next_occurance_id = 0;
- removing_statement = False
- if(len(occurances) == next_occurance_id):
- return
- next_occurance = occurances[next_occurance_id]
- next_occurance_id += 1
- for line_nr, line in enumerate(original_file):
- if(line_nr == next_occurance):
- removing_statement = True
- if(len(occurances) == next_occurance_id):
- next_occurance_id = -1
- else:
- next_occurance = occurances[next_occurance_id]
- next_occurance_id += 1
-
- if (not removing_statement):
- temporary_file.writelines(line)
-
- if(endofstatement(line)):
- removing_statement = False;
-
- temporary_file.close()
- original_file.close()
- filemanagement.copyfile(full_file_name,full_temporary_file_name)
- filemanagement.deletefile(full_temporary_file_name)
-
-def nextoccurance():
- if (len(occurances) == 0):
- return -1
- return_value = occurances[0]
- occurances = occurances[1:len(occurances)]
- return return_value
-
-def would_be_removed_occurances(path, file_name):
- full_file_name = path + file_name
- if (not filemanagement.fileexist(full_file_name)):
- print 'File ' + full_file_name + ' is not found.'
- print 'Should not happen! Ever!'
- quit()
-
- original_file = open(full_file_name,'r')
- removing_statement = False
- next_occurance_id = 0;
- if(len(occurances) == next_occurance_id):
- return
- next_occurance = occurances[next_occurance_id]
- next_occurance_id += 1
- for line_nr, line in enumerate(original_file):
- if(line_nr == next_occurance):
- removing_statement = True
- if(len(occurances) == next_occurance_id):
- return
- next_occurance = occurances[next_occurance_id]
- next_occurance_id += 1
-
- if (removing_statement):
- print line_nr
-
- if(endofstatement(line)):
- removing_statement = False;
- if(next_occurance == -1):
- break
- original_file.close()
-
-for index in range(len(files_to_fix)):
- if(commit):
- print (100*index)/len(files_to_fix)
-
- path_dir = files_to_fix[index][0]
- filename = files_to_fix[index][1]
-
- #print path_dir + filename
- occurances = []
- find_occurances(path_dir, filename)
-
- if(not commit):
- would_be_removed_occurances(path_dir, filename)
- continue
- remove_occurances(path_dir, filename)
diff --git a/tools/refactoring/stringmanipulation.py b/tools/refactoring/stringmanipulation.py
deleted file mode 100644
index 0d9e0ff3a5..0000000000
--- a/tools/refactoring/stringmanipulation.py
+++ /dev/null
@@ -1,303 +0,0 @@
-import string
-
-# returns tuple, [success,updated_string] where the updated string has
-# has one less (the first) occurance of match string
-def removefirstoccurance( remove_string, match_string ):
- lowercase_string = remove_string.lower()
- lowercase_match_string = match_string.lower()
- lowest_index = lowercase_string.find(lowercase_match_string)
- if(lowest_index == -1):
- return [False,remove_string]
- past_match_index = lowest_index + len(lowercase_match_string)
- highest_index = len(remove_string)
- remove_string = remove_string[0:lowest_index] + remove_string[past_match_index: highest_index]
- return [True,remove_string]
-
-# returns a string with all occurances of match_string removed
-def removealloccurances( remove_string, match_string ):
- return_value = [True, remove_string]
- while(return_value[0]):
- return_value = removefirstoccurance(return_value[1],match_string)
- return return_value[1]
-
-# removes an occurance of match_string only if it's first in the string
-# returns tuple [succes, new_string]
-def removeprefix( remove_string, match_string ):
- lowercase_string = remove_string.lower()
- lowercase_match_string = match_string.lower()
- lowest_index = lowercase_string.find(lowercase_match_string)
- if(lowest_index == -1):
- return [False,remove_string]
- if(lowest_index != 0):
- return [False,remove_string]
- past_match_index = lowest_index + len(lowercase_match_string)
- highest_index = len(remove_string)
- remove_string = remove_string[0:lowest_index] + remove_string[past_match_index: highest_index]
-# print lowest_index
-# print past_match_index
- return [True,remove_string]
-
-# removes multiple occurances of match string as long as they are first in
-# the string
-def removeallprefix( remove_string, match_string ):
- return_value = [True, remove_string]
- while(return_value[0]):
- return_value = removeprefix(return_value[1],match_string)
- return return_value[1]
-
-# returns true if extensionstring is a correct extension
-def isextension( extensionstring ):
- if(len(extensionstring) < 2):
- return False
- if(extensionstring[0] != '.'):
- return False
- if(extensionstring[1:len(extensionstring)-1].find('.') != -1):
- return False
- return True
-
-# returns the index of start of the last occurance of match_string
-def findlastoccurance( original_string, match_string ):
- search_index = original_string.find(match_string)
- found_index = search_index
- last_index = len(original_string) - 1
- while((search_index != -1) and (search_index < last_index)):
- search_index = original_string[search_index+1:last_index].find(match_string)
- if(search_index != -1):
- found_index = search_index
- return found_index
-
-# changes extension from original_extension to new_extension
-def changeextension( original_string, original_extension, new_extension):
- if(not isextension(original_extension)):
- return original_string
- if(not isextension(new_extension)):
- return original_string
- index = findlastoccurance(original_string, original_extension)
- if(index == -1):
- return original_string
- return_value = original_string[0:index] + new_extension
- return return_value
-
-# wanted to do this with str.find however didnt seem to work so do it manually
-# returns the index of the first capital letter
-def findfirstcapitalletter( original_string ):
- for index in range(len(original_string)):
- if(original_string[index].lower() != original_string[index]):
- return index
- return -1
-
-
-# replaces capital letters with underscore and lower case letter (except very
-# first
-def lowercasewithunderscore( original_string ):
-# ignore the first letter since there should be no underscore in front of it
- if(len(original_string) < 2):
- return original_string
- return_value = original_string[1:len(original_string)]
- index = findfirstcapitalletter(return_value)
- while(index != -1):
- return_value = return_value[0:index] + \
- '_' + \
- return_value[index].lower() + \
- return_value[index+1:len(return_value)]
- index = findfirstcapitalletter(return_value)
- return_value = original_string[0].lower() + return_value
- return return_value
-
-# my table is a duplicate of strings
-def removeduplicates( my_table ):
- new_table = []
- for old_string1, new_string1 in my_table:
- found = 0
- for old_string2, new_string2 in new_table:
- if(old_string1 == old_string2):
- found += 1
- if(new_string1 == new_string2):
- if(new_string1 == ''):
- found += found
- else:
- found += 1
- if(found == 1):
- print 'missmatching set, terminating program'
- print old_string1
- print new_string1
- print old_string2
- print new_string2
- quit()
- if(found == 2):
- break
- if(found == 0):
- new_table.append([old_string1,new_string1])
- return new_table
-
-def removenochange( my_table ):
- new_table = []
- for old_string, new_string in my_table:
- if(old_string != new_string):
- new_table.append([old_string,new_string])
- return new_table
-
-# order table after size of the string (can be used to replace bigger strings
-# first which is useful since smaller strings can be inside the bigger string)
-# E.g. GIPS is a sub string of GIPSVE if we remove GIPS first GIPSVE will never
-# be removed. N is small so no need for fancy sort algorithm. Use selection sort
-def ordertablesizefirst( my_table ):
- for current_index in range(len(my_table)):
- biggest_string = 0
- biggest_string_index = -1
- for search_index in range(len(my_table)):
- if(search_index < current_index):
- continue
- length_of_string = len(my_table[search_index][0])
- if(length_of_string > biggest_string):
- biggest_string = length_of_string
- biggest_string_index = search_index
- if(biggest_string_index == -1):
- print 'sorting algorithm failed, program exit'
- quit()
- old_value = my_table[current_index]
- my_table[current_index] = my_table[biggest_string_index]
- my_table[biggest_string_index] = old_value
- return my_table
-
-# returns true if string 1 or 2 is a substring of the other, assuming neither
-# has whitespaces
-def issubstring( string1, string2 ):
- if(len(string1) == 0):
- return -1
- if(len(string2) == 0):
- return -1
- large_string = string1
- small_string = string2
- if(len(string1) < len(string2)):
- large_string = string2
- small_string = string1
-
- for index in range(len(large_string)):
- large_sub_string = large_string[index:index+len(small_string)].lower()
- if(large_sub_string ==\
- small_string.lower()):
- return index
- return -1
-
-#not_part_of_word_table = [' ','(',')','{','}',':','\t','*','&','/','[',']','.',',','\n']
-#def ispartofword( char ):
-# for item in not_part_of_word_table:
-# if(char == item):
-# return False
-# return True
-
-# must be numerical,_ or charachter
-def ispartofword( char ):
- if(char.isalpha()):
- return True
- if(char.isalnum()):
- return True
- if(char == '_'):
- return True
- return False
-
-# returns the index of the first letter in the word that the current_index
-# is pointing to and the size of the word
-def getword( line, current_index):
- if(current_index < 0):
- return []
- line = line.rstrip()
- if(len(line) <= current_index):
- return []
- if(line[current_index] == ' '):
- return []
- start_pos = current_index
- while start_pos >= 0:
- if(not ispartofword(line[start_pos])):
- start_pos += 1
- break
- start_pos -= 1
- if(start_pos == -1):
- start_pos = 0
- end_pos = current_index
- while end_pos < len(line):
- if(not ispartofword(line[end_pos])):
- break
- end_pos += 1
- return [start_pos,end_pos - start_pos]
-
-# my table is a tuple [string1,string2] complement_to_table is just a list
-# of strings to compare to string1
-def complement( my_table, complement_to_table ):
- new_table = []
- for index in range(len(my_table)):
- found = False;
- for compare_string in complement_to_table:
- if(my_table[index][0].lower() == compare_string.lower()):
- found = True
- if(not found):
- new_table.append(my_table[index])
- return new_table
-
-def removestringfromhead( line, remove_string):
- for index in range(len(line)):
- if(line[index:index+len(remove_string)] != remove_string):
- return line[index:index+len(line)]
- return ''
-
-def removeccomment( line ):
- comment_string = '//'
- for index in range(len(line)):
- if(line[index:index+len(comment_string)] == comment_string):
- return line[0:index]
- return line
-
-def whitespacestoonespace( line ):
- return ' '.join(line.split())
-
-def fixabbreviations( original_string ):
- previouswascapital = (original_string[0].upper() == original_string[0])
- new_string = ''
- for index in range(len(original_string)):
- if(index == 0):
- new_string += original_string[index]
- continue
- if(original_string[index] == '_'):
- new_string += original_string[index]
- previouswascapital = False
- continue
- if(original_string[index].isdigit()):
- new_string += original_string[index]
- previouswascapital = False
- continue
- currentiscapital = (original_string[index].upper() == original_string[index])
- letter_to_add = original_string[index]
- if(previouswascapital and currentiscapital):
- letter_to_add = letter_to_add.lower()
- if(previouswascapital and (not currentiscapital)):
- old_letter = new_string[len(new_string)-1]
- new_string = new_string[0:len(new_string)-1]
- new_string += old_letter.upper()
- previouswascapital = currentiscapital
- new_string += letter_to_add
- return new_string
-
-def replaceoccurances(old_string, replace_string, replace_with_string):
- if (len(replace_string) == 0):
- return old_string
- if (len(old_string) < len(replace_string)):
- return old_string
- # Simple implementation, could proably be done smarter
- new_string = ''
- for index in range(len(old_string)):
- #print new_string
- if(len(replace_string) > (len(old_string) - index)):
- new_string += old_string[index:index + len(old_string)]
- break
- match = (len(replace_string) > 0)
- for replace_index in range(len(replace_string)):
- if (replace_string[replace_index] != old_string[index + replace_index]):
- match = False
- break
- if (match):
- new_string += replace_with_string
- index =+ len(replace_string)
- else:
- new_string += old_string[index]
- return new_string
diff --git a/tools/refactoring/trim.py b/tools/refactoring/trim.py
deleted file mode 100644
index 5539f5fef7..0000000000
--- a/tools/refactoring/trim.py
+++ /dev/null
@@ -1,29 +0,0 @@
-#!/usr/bin/env python
-
-import sys
-import fileinput
-
-# Defaults
-TABSIZE = 4
-
-usage = """
-Replaces all TAB characters with %(TABSIZE)d space characters.
-In addition, all trailing space characters are removed.
-usage: trim file ...
-file ... : files are changed in place without taking any backup.
-""" % vars()
-
-def main():
-
- if len(sys.argv) == 1:
- sys.stderr.write(usage)
- sys.exit(2)
-
- # Iterate over the lines of all files listed in sys.argv[1:]
- for line in fileinput.input(sys.argv[1:], inplace=True):
- line = line.replace('\t',' '*TABSIZE); # replace TABs
- line = line.rstrip(None) # remove trailing whitespaces
- print line # modify the file
-
-if __name__ == '__main__':
- main()
diff --git a/tools/refactoring/trimall.py b/tools/refactoring/trimall.py
deleted file mode 100644
index 7a1c458af3..0000000000
--- a/tools/refactoring/trimall.py
+++ /dev/null
@@ -1,59 +0,0 @@
-#!/usr/bin/env python
-
-import sys
-import fileinput
-import filemanagement
-import p4commands
-
-# Defaults
-TABSIZE = 4
-
-extensions = ['.h','.cc','.c','.cpp']
-
-ignore_these = ['my_ignore_header.h']
-
-usage = """
-Replaces all TAB characters with %(TABSIZE)d space characters.
-In addition, all trailing space characters are removed.
-usage: trim directory
-""" % vars()
-
-if((len(sys.argv) != 2) and (len(sys.argv) != 3)):
- sys.stderr.write(usage)
- sys.exit(2)
-
-directory = sys.argv[1];
-if(not filemanagement.pathexist(directory)):
- sys.stderr.write(usage)
- sys.exit(2)
-
-if((len(sys.argv) == 3) and (sys.argv[2] != '--commit')):
- sys.stderr.write(usage)
- sys.exit(2)
-
-commit = False
-if(len(sys.argv) == 3):
- commit = True
-
-files_to_fix = []
-for extension in extensions:
- files_to_fix.extend(filemanagement.listallfilesinfolder(directory,\
- extension))
-
-def main():
- if (commit):
- p4commands.checkoutallfiles()
- for path,file_name in files_to_fix:
- full_file_name = path + file_name
- if (not commit):
- print full_file_name + ' will be edited'
- continue
- for line in fileinput.input(full_file_name, inplace=True):
- line = line.replace('\t',' '*TABSIZE); # replace TABs
- line = line.rstrip(None) # remove trailing whitespaces
- print line # modify the file
- if (commit):
- p4commands.revertunchangedfiles()
-
-if __name__ == '__main__':
- main()
diff --git a/tools/refactoring/webrtc_reformat.py b/tools/refactoring/webrtc_reformat.py
deleted file mode 100755
index 269d1c3f6c..0000000000
--- a/tools/refactoring/webrtc_reformat.py
+++ /dev/null
@@ -1,212 +0,0 @@
-#!/usr/bin/python
-# Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
-#
-# Use of this source code is governed by a BSD-style license
-# that can be found in the LICENSE file in the root of the source
-# tree. An additional intellectual property rights grant can be found
-# in the file PATENTS. All contributing project authors may
-# be found in the AUTHORS file in the root of the source tree.
-
-"""WebRTC reformat script.
-
-This script is used to reformat WebRTC code from the old code style to Google
-C++ code style. This script does not indent code; use clang-reformat-chrome.py
-as described in go/webrtc/engineering/reformatting-gips---google.
-"""
-
-__author__ = 'mflodman@webrtc.org (Magnus Flodman)'
-
-import fnmatch
-import os
-import re
-import subprocess
-import sys
-
-
-def LowerWord(obj):
- """Helper for DeCamelCase."""
- optional_last_letters = obj.group(3) or ''
- return obj.group(1) + '_' + obj.group(2).lower() + optional_last_letters
-
-
-def DeCamelCase(text):
- """De-camelize variable names.
-
- This function will look at any stringLikeThis and format it in steps. The
- sequence will be stringLikeThis -> string_likeThis -> string_like_this.
- """
- possible_tokens_before_vars = '[ _*\(\&\!\[]'
- pattern = re.compile(r'(?<=' + possible_tokens_before_vars + ')' +
- # Match some lower-case characters
- '([a-z]+)' +
- # Don't match kFoo, !kFoo, [kFoo], etc
- '(?<!' + possible_tokens_before_vars + 'k)' +
- # Match some upper-case characters
- '([A-Z]+)([a-z])?')
- while re.search(pattern, text):
- text = re.sub(pattern, LowerWord, text)
- return text
-
-
-def MoveUnderScore(text):
- """Moves the underscore from beginning of variable name to the end."""
- # TODO(mflodman) Replace \1 with ?-expression.
- # We don't want to change macros and #defines though, so don't do anything
- # if the first character is uppercase (normal variables shouldn't have that).
- pattern = r'([ \*\!\&\(\[\]])_(?!_)(?![A-Z])(\w+)'
- return re.sub(pattern, r'\1\2_', text)
-
-
-def PostfixToPrefixInForLoops(text):
- """Converts x++ to ++x in the increment part of a for loop."""
- pattern = r'(for \(.*;.*;) (\w+)\+\+\)'
- return re.sub(pattern, r'\1++\2)', text)
-
-
-def SortIncludeHeaders(text, filename):
- """Sorts all include headers in alphabetic order.
-
- The file's own header goes first, followed by system headers and then
- project headers. This function will exit if we detect any fancy #ifdef logic
- among the includes - that's a lot harder to sort.
-
- Args:
- text: The file text.
- filename: The file we are reformatting.
-
- Returns:
- The text with includes sorted.
- """
- # Get all includes in file.
- include_pattern = re.compile('#include.+\n')
- includes = re.findall(include_pattern, text)
-
- # Sort system headers and project headers separately.
- sys_includes = []
- project_includes = []
- self_include = ''
- sys_pattern = re.compile('#include <')
- h_filename, _ = os.path.splitext(os.path.basename(filename))
-
- for item in includes:
- if re.search(h_filename + '\.', item):
- self_include = item
- elif re.search(sys_pattern, item):
- sys_includes.append(item)
- else:
- project_includes.append(item)
-
- sys_includes = sorted(sys_includes)
- project_includes = sorted(project_includes)
- headers = (self_include + '\n' + ''.join(sys_includes) + '\n' +
- ''.join(project_includes))
-
- # Replace existing headers with the sorted string.
- text_no_hdrs = re.sub(include_pattern, r'???', text)
-
- # Insert sorted headers unless we detect #ifdefs right next to the headers.
- if re.search(r'(#ifdef|#ifndef|#if).*\s*\?{3,}\s*#endif', text_no_hdrs):
- print 'WARNING: Include headers not sorted in ' + filename
- return text
-
- return_text = re.sub(r'\?{3,}', headers, text_no_hdrs, 1)
- if re.search(r'\?{3,}', text_no_hdrs):
- # Remove possible remaining ???.
- return_text = re.sub(r'\?{3,}', r'', return_text)
-
- return return_text
-
-
-def AddPath(match):
- """Helper for adding file path for WebRTC header files, ignoring other."""
- file_to_examine = match.group(1) + '.h'
- # TODO(mflodman) Use current directory and find webrtc/.
- for path, _, files in os.walk('./webrtc'):
- for filename in files:
- if fnmatch.fnmatch(filename, file_to_examine):
- path_name = os.path.join(path, filename).replace('./', '')
- return '#include "%s"\n' % path_name
-
- # No path found, return original string.
- return '#include "'+ file_to_examine + '"\n'
-
-
-def AddHeaderPath(text):
- """Add path to all included header files that have no path yet."""
- headers = re.compile('#include "(.+).h"\n')
- return re.sub(headers, AddPath, text)
-
-
-def AddWebrtcToOldSrcRelativePath(match):
- file_to_examine = match.group(1) + '.h'
- path, filename = os.path.split(file_to_examine)
- dirs_in_webrtc = [name for name in os.listdir('./webrtc')
- if os.path.isdir(os.path.join('./webrtc', name))]
- for dir_in_webrtc in dirs_in_webrtc:
- if path.startswith(dir_in_webrtc):
- return '#include "%s"\n' % os.path.join('webrtc', path, filename)
- return '#include "%s"\n' % file_to_examine
-
-def AddWebrtcPrefixToOldSrcRelativePaths(text):
- """For all paths starting with for instance video_engine, add webrtc/."""
- headers = re.compile('#include "(.+).h"\n')
- return re.sub(headers, AddWebrtcToOldSrcRelativePath, text)
-
-
-def FixIncludeGuards(text, file_name):
- """Change include guard according to the stantard."""
- # Remove a possible webrtc/ from the path.
- file_name = re.sub(r'(webrtc\/)(.+)', r'\2', file_name)
- new_guard = 'WEBRTC_' + file_name
- new_guard = new_guard.upper()
- new_guard = re.sub(r'([/\.])', r'_', new_guard)
- new_guard += '_'
-
- text = re.sub(r'#ifndef WEBRTC_.+\n', r'#ifndef ' + new_guard + '\n', text, 1)
- text = re.sub(r'#define WEBRTC_.+\n', r'#define ' + new_guard + '\n', text, 1)
- text = re.sub(r'#endif *\/\/ *WEBRTC_.+\n', r'#endif // ' + new_guard + '\n',
- text, 1)
-
- return text
-
-
-def SaveFile(filename, text):
- os.remove(filename)
- f = open(filename, 'w')
- f.write(text)
- f.close()
-
-
-def main():
- args = sys.argv[1:]
- if not args:
- print 'Usage: %s <filename>' % sys.argv[0]
- sys.exit(1)
-
- for filename in args:
- f = open(filename)
- text = f.read()
- f.close()
-
- text = DeCamelCase(text)
- text = MoveUnderScore(text)
- text = PostfixToPrefixInForLoops(text)
- text = AddHeaderPath(text)
- text = AddWebrtcPrefixToOldSrcRelativePaths(text)
- text = SortIncludeHeaders(text, filename)
-
- # Remove the original file and re-create it with the reformatted content.
- SaveFile(filename, text)
-
- if filename.endswith('.h'):
- f = open(filename)
- text = f.read()
- f.close()
- text = FixIncludeGuards(text, filename)
- SaveFile(filename, text)
-
- print filename + ' done.'
-
-
-if __name__ == '__main__':
- main()
diff --git a/tools/sslroots/generate_sslroots.py b/tools/sslroots/generate_sslroots.py
new file mode 100644
index 0000000000..65751f1f1a
--- /dev/null
+++ b/tools/sslroots/generate_sslroots.py
@@ -0,0 +1,190 @@
+# -*- coding:utf-8 -*-
+# Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
+#
+# Use of this source code is governed by a BSD-style license
+# that can be found in the LICENSE file in the root of the source
+# tree. An additional intellectual property rights grant can be found
+# in the file PATENTS. All contributing project authors may
+# be found in the AUTHORS file in the root of the source tree.
+
+
+"""This is a tool to transform a crt file into a C/C++ header.
+
+Usage:
+generate_sslroots.py cert_file.crt [--verbose | -v] [--full_cert | -f]
+
+Arguments:
+ -v Print output while running.
+ -f Add public key and certificate name. Default is to skip and reduce
+ generated file size.
+"""
+
+import commands
+from optparse import OptionParser
+import os
+import re
+import string
+
+_GENERATED_FILE = 'sslroots.h'
+_PREFIX = '__generated__'
+_EXTENSION = '.crt'
+_SUBJECT_NAME_ARRAY = 'subject_name'
+_SUBJECT_NAME_VARIABLE = 'SubjectName'
+_PUBLIC_KEY_ARRAY = 'public_key'
+_PUBLIC_KEY_VARIABLE = 'PublicKey'
+_CERTIFICATE_ARRAY = 'certificate'
+_CERTIFICATE_VARIABLE = 'Certificate'
+_CERTIFICATE_SIZE_VARIABLE = 'CertificateSize'
+_INT_TYPE = 'size_t'
+_CHAR_TYPE = 'const unsigned char*'
+_VERBOSE = 'verbose'
+
+
+def main():
+ """The main entrypoint."""
+ parser = OptionParser('usage %prog FILE')
+ parser.add_option('-v', '--verbose', dest='verbose', action='store_true')
+ parser.add_option('-f', '--full_cert', dest='full_cert', action='store_true')
+ options, args = parser.parse_args()
+ if len(args) < 1:
+ parser.error('No crt file specified.')
+ return
+ root_dir = _SplitCrt(args[0], options)
+ _GenCFiles(root_dir, options)
+ _Cleanup(root_dir)
+
+
+def _SplitCrt(source_file, options):
+ sub_file_blocks = []
+ label_name = ''
+ root_dir = os.path.dirname(os.path.abspath(source_file)) + '/'
+ _PrintOutput(root_dir, options)
+ f = open(source_file)
+ for line in f:
+ if line.startswith('# Label: '):
+ sub_file_blocks.append(line)
+ label = re.search(r'\".*\"', line)
+ temp_label = label.group(0)
+ end = len(temp_label)-1
+ label_name = _SafeName(temp_label[1:end])
+ elif line.startswith('-----END CERTIFICATE-----'):
+ sub_file_blocks.append(line)
+ new_file_name = root_dir + _PREFIX + label_name + _EXTENSION
+ _PrintOutput('Generating: ' + new_file_name, options)
+ new_file = open(new_file_name, 'w')
+ for out_line in sub_file_blocks:
+ new_file.write(out_line)
+ new_file.close()
+ sub_file_blocks = []
+ else:
+ sub_file_blocks.append(line)
+ f.close()
+ return root_dir
+
+
+def _GenCFiles(root_dir, options):
+ output_header_file = open(root_dir + _GENERATED_FILE, 'w')
+ output_header_file.write(_CreateOutputHeader())
+ if options.full_cert:
+ subject_name_list = _CreateArraySectionHeader(_SUBJECT_NAME_VARIABLE,
+ _CHAR_TYPE, options)
+ public_key_list = _CreateArraySectionHeader(_PUBLIC_KEY_VARIABLE,
+ _CHAR_TYPE, options)
+ certificate_list = _CreateArraySectionHeader(_CERTIFICATE_VARIABLE,
+ _CHAR_TYPE, options)
+ certificate_size_list = _CreateArraySectionHeader(_CERTIFICATE_SIZE_VARIABLE,
+ _INT_TYPE, options)
+
+ for _, _, files in os.walk(root_dir):
+ for current_file in files:
+ if current_file.startswith(_PREFIX):
+ prefix_length = len(_PREFIX)
+ length = len(current_file) - len(_EXTENSION)
+ label = current_file[prefix_length:length]
+ filtered_output, cert_size = _CreateCertSection(root_dir, current_file,
+ label, options)
+ output_header_file.write(filtered_output + '\n\n\n')
+ if options.full_cert:
+ subject_name_list += _AddLabelToArray(label, _SUBJECT_NAME_ARRAY)
+ public_key_list += _AddLabelToArray(label, _PUBLIC_KEY_ARRAY)
+ certificate_list += _AddLabelToArray(label, _CERTIFICATE_ARRAY)
+ certificate_size_list += (' %s,\n') %(cert_size)
+
+ if options.full_cert:
+ subject_name_list += _CreateArraySectionFooter()
+ output_header_file.write(subject_name_list)
+ public_key_list += _CreateArraySectionFooter()
+ output_header_file.write(public_key_list)
+ certificate_list += _CreateArraySectionFooter()
+ output_header_file.write(certificate_list)
+ certificate_size_list += _CreateArraySectionFooter()
+ output_header_file.write(certificate_size_list)
+ output_header_file.close()
+
+
+def _Cleanup(root_dir):
+ for f in os.listdir(root_dir):
+ if f.startswith(_PREFIX):
+ os.remove(root_dir + f)
+
+
+def _CreateCertSection(root_dir, source_file, label, options):
+ command = 'openssl x509 -in %s%s -noout -C' %(root_dir, source_file)
+ _PrintOutput(command, options)
+ output = commands.getstatusoutput(command)[1]
+ renamed_output = output.replace('unsigned char XXX_',
+ 'const unsigned char ' + label + '_')
+ filtered_output = ''
+ cert_block = '^const unsigned char.*?};$'
+ prog = re.compile(cert_block, re.IGNORECASE | re.MULTILINE | re.DOTALL)
+ if not options.full_cert:
+ filtered_output = prog.sub('', renamed_output, count=2)
+ else:
+ filtered_output = renamed_output
+
+ cert_size_block = r'\d\d\d+'
+ prog2 = re.compile(cert_size_block, re.MULTILINE | re.VERBOSE)
+ result = prog2.findall(renamed_output)
+ cert_size = result[len(result) - 1]
+
+ return filtered_output, cert_size
+
+
+def _CreateOutputHeader():
+ output = ('// This file is the root certificates in C form that are needed to'
+ ' connect to\n// Google.\n\n'
+ '// It was generated with the following command line:\n'
+ '// > python tools/certs/generate_sslroots.py'
+ '\n// https://pki.google.com/roots.pem\n\n')
+ return output
+
+
+def _CreateArraySectionHeader(type_name, type_type, options):
+ output = ('const %s kSSLCert%sList[] = {\n') %(type_type, type_name)
+ _PrintOutput(output, options)
+ return output
+
+
+def _AddLabelToArray(label, type_name):
+ return ' %s_%s,\n' %(label, type_name)
+
+
+def _CreateArraySectionFooter():
+ return '};\n\n'
+
+
+def _SafeName(original_file_name):
+ bad_chars = ' -./\\()áéíőú'
+ replacement_chars = ''
+ for _ in bad_chars:
+ replacement_chars += '_'
+ translation_table = string.maketrans(bad_chars, replacement_chars)
+ return original_file_name.translate(translation_table)
+
+
+def _PrintOutput(output, options):
+ if options.verbose:
+ print output
+
+if __name__ == '__main__':
+ main()
diff --git a/tools/valgrind-webrtc/drmemory/suppressions.txt b/tools/valgrind-webrtc/drmemory/suppressions.txt
index 2addea53cf..1b0626c7ab 100644
--- a/tools/valgrind-webrtc/drmemory/suppressions.txt
+++ b/tools/valgrind-webrtc/drmemory/suppressions.txt
@@ -2,22 +2,6 @@
# It acts as a place holder for future additions for WebRTC.
# It must exist for the Python wrapper script to work properly.
-INVALID HEAP ARGUMENT
-name=https://code.google.com/p/webrtc/issues/detail?id=2321 (1)
-drmemorylib.dll!replace_operator_delete_array
-*!webrtc::scoped_array<short>::~scoped_array<short>
-*!webrtc::PushResampler::~PushResampler
-...
-*!testing::internal::HandleSehExceptionsInMethodIfSupported<testing::Test,void>
-
-INVALID HEAP ARGUMENT
-name=https://code.google.com/p/webrtc/issues/detail?id=2321 (2)
-drmemorylib.dll!replace_operator_delete_array
-*!webrtc::scoped_array<float>::~scoped_array<float>
-*!webrtc::PushSincResampler::~PushSincResampler
-...
-*!testing::internal::HandleSehExceptionsInMethodIfSupported<testing::Test,void>
-
GDI USAGE ERROR
name=https://code.google.com/p/webrtc/issues/detail?id=2323 (1)
system call NtGdiDeleteObjectApp
@@ -90,25 +74,6 @@ drmemorylib.dll!replace_operator_new
*!webrtc::test::UdpSocketManager_AddAndRemoveSocketDoesNotLeakMemory_Test::TestBody
*!testing::internal::HandleSehExceptionsInMethodIfSupported<testing::Test,void>
-INVALID HEAP ARGUMENT
-name=https://code.google.com/p/webrtc/issues/detail?id=2515 (1)
-drmemorylib.dll!replace_operator_delete_nothrow
-*!webrtc::scoped_array<short>::~scoped_array<short>
-*!webrtc::NetEqImpl::~NetEqImpl
-*!webrtc::NetEqImpl::`scalar deleting destructor'
-...
-*!testing::internal::HandleSehExceptionsInMethodIfSupported<testing::Test,void>
-
-INVALID HEAP ARGUMENT
-name=https://code.google.com/p/webrtc/issues/detail?id=2515 (2)
-drmemorylib.dll!replace_operator_delete_nothrow
-*!webrtc::scoped_array<short>::reset
-*!webrtc::NetEqImpl::SetSampleRateAndChannels
-*!webrtc::NetEqImpl::InsertPacketInternal
-*!webrtc::NetEqImpl::InsertPacket
-...
-*!testing::internal::HandleSehExceptionsInMethodIfSupported<testing::Test,void>
-
UNINITIALIZED READ
name=https://code.google.com/p/webrtc/issues/detail?id=2516
system call NtUserGetThreadDesktop parameter value #1
diff --git a/tools/valgrind-webrtc/gtest_exclude/libjingle_peerconnection_unittest.gtest-drmemory_win32.txt b/tools/valgrind-webrtc/gtest_exclude/libjingle_peerconnection_unittest.gtest-drmemory_win32.txt
index d41c231cf6..d041dbd526 100644
--- a/tools/valgrind-webrtc/gtest_exclude/libjingle_peerconnection_unittest.gtest-drmemory_win32.txt
+++ b/tools/valgrind-webrtc/gtest_exclude/libjingle_peerconnection_unittest.gtest-drmemory_win32.txt
@@ -1,7 +1,7 @@
# Flakily fails or crashes on Dr Memory Full.
# https://code.google.com/p/webrtc/issues/detail?id=3158
DtmfSenderTest.*
-JsepPeerConnectionP2PTestClient.*
+P2PTestConductor.*
PeerConnectionEndToEndTest.*
PeerConnectionInterfaceTest.*
# Issue 3453
diff --git a/tools/valgrind-webrtc/gtest_exclude/libjingle_peerconnection_unittest.gtest-memcheck.txt b/tools/valgrind-webrtc/gtest_exclude/libjingle_peerconnection_unittest.gtest-memcheck.txt
index 40974a2084..9cf29b8161 100644
--- a/tools/valgrind-webrtc/gtest_exclude/libjingle_peerconnection_unittest.gtest-memcheck.txt
+++ b/tools/valgrind-webrtc/gtest_exclude/libjingle_peerconnection_unittest.gtest-memcheck.txt
@@ -1,6 +1,6 @@
# Tests that are failing when run under memcheck.
# https://code.google.com/p/webrtc/issues/detail?id=4387
DtmfSenderTest.*
-JsepPeerConnectionP2PTestClient.*
+P2PTestConductor.*
PeerConnectionEndToEndTest.*
PeerConnectionInterfaceTest.*
diff --git a/tools/valgrind-webrtc/gtest_exclude/modules_tests.gtest-drmemory.txt b/tools/valgrind-webrtc/gtest_exclude/modules_tests.gtest-drmemory.txt
index e4f3ae2412..1642e3cf0e 100644
--- a/tools/valgrind-webrtc/gtest_exclude/modules_tests.gtest-drmemory.txt
+++ b/tools/valgrind-webrtc/gtest_exclude/modules_tests.gtest-drmemory.txt
@@ -8,6 +8,5 @@ AudioCodingModuleTest.TestStereo*
AudioCodingModuleTest.TestVADDTX*
AudioCodingModuleTest.TestOpus*
FecTest.FecTest
-TestVp8Impl.BaseUnitTest
VideoProcessorIntegrationTest.ProcessNoLossChangeBitRateVP8
VideoProcessorIntegrationTest.*VP9
diff --git a/tools/valgrind-webrtc/gtest_exclude/rtc_unittests.gtest-drmemory.txt b/tools/valgrind-webrtc/gtest_exclude/rtc_unittests.gtest-drmemory.txt
index 092b785a74..888062773d 100644
--- a/tools/valgrind-webrtc/gtest_exclude/rtc_unittests.gtest-drmemory.txt
+++ b/tools/valgrind-webrtc/gtest_exclude/rtc_unittests.gtest-drmemory.txt
@@ -5,3 +5,6 @@ PortAllocatorTest.*
PortTest.*
PseudoTcpTest.TestSendBothUseLargeWindowScale
SharedExclusiveLockTest.TestSharedShared
+# Fails on Dr Memory Light.
+# https://bugs.chromium.org/p/webrtc/issues/detail?id=5199
+ThreadTest.ThreeThreadsInvoke
diff --git a/tools/valgrind-webrtc/gtest_exclude/system_wrappers_unittests.gtest-drmemory_win32.txt b/tools/valgrind-webrtc/gtest_exclude/system_wrappers_unittests.gtest-drmemory_win32.txt
index 0bceb97c9d..a3344c70e5 100755..100644
--- a/tools/valgrind-webrtc/gtest_exclude/system_wrappers_unittests.gtest-drmemory_win32.txt
+++ b/tools/valgrind-webrtc/gtest_exclude/system_wrappers_unittests.gtest-drmemory_win32.txt
@@ -1,3 +1,4 @@
-# https://code.google.com/p/webrtc/issues/detail?id=2330
-ClockTest.NtpTime
-
+# https://code.google.com/p/webrtc/issues/detail?id=2330
+ClockTest.NtpTime
+CritSectTest.ThreadWakesOnce
+CritSectTest.ThreadWakesTwice
diff --git a/tools/valgrind-webrtc/gtest_exclude/video_engine_tests.gtest-drmemory_win32.txt b/tools/valgrind-webrtc/gtest_exclude/video_engine_tests.gtest-drmemory_win32.txt
index 309fd405b2..c4d953aef3 100644
--- a/tools/valgrind-webrtc/gtest_exclude/video_engine_tests.gtest-drmemory_win32.txt
+++ b/tools/valgrind-webrtc/gtest_exclude/video_engine_tests.gtest-drmemory_win32.txt
@@ -3,14 +3,23 @@
EndToEndTest.CanSwitchToUseAllSsrcs
EndToEndTest.SendsAndReceivesMultipleStreams
EndToEndTest.ReceivesAndRetransmitsNack
-# https://code.google.com/p/webrtc/issues/detail?id=3471
-VideoSendStreamTest.RetransmitsNackOverRtxWithPacing
+EndToEndTest.ReceivesTransportFeedback
# Flaky: https://code.google.com/p/webrtc/issues/detail?id=3552
EndToEndTest.RestartingSendStreamPreservesRtpState
EndToEndTest.RestartingSendStreamPreservesRtpStatesWithRtx
EndToEndTest.SendsAndReceivesH264
EndToEndTest.SendsAndReceivesVP9
+EndToEndTest.TransportFeedbackNotConfigured
+EndToEndTest.TransportSeqNumOnAudioAndVideo
VideoSendStreamTest.CanReconfigureToUseStartBitrateAbovePreviousMax
VideoSendStreamTest.ReconfigureBitratesSetsEncoderBitratesCorrectly
+# https://code.google.com/p/webrtc/issues/detail?id=5417
+VideoSendStreamTest.Vp9NonFlexMode_2Tl2SLayers
+VideoSendStreamTest.Vp9NonFlexMode_3Tl2SLayers
+VideoSendStreamTest.Vp9FlexModeRefCount
# https://code.google.com/p/webrtc/issues/detail?id=4979
EndToEndTest.AssignsTransportSequenceNumbers
+# Flaky: https://bugs.chromium.org/p/webrtc/issues/detail?id=5225
+BitrateEstimatorTest.SwitchesToASTThenBackToTOFForVideo
+# https://bugs.chromium.org/p/webrtc/issues/detail?id=5312
+RtcEventLogTest.DropOldEvents
diff --git a/tools/valgrind-webrtc/gtest_exclude/video_engine_tests.gtest-memcheck.txt b/tools/valgrind-webrtc/gtest_exclude/video_engine_tests.gtest-memcheck.txt
index 57387dcb2e..6faf218d35 100644
--- a/tools/valgrind-webrtc/gtest_exclude/video_engine_tests.gtest-memcheck.txt
+++ b/tools/valgrind-webrtc/gtest_exclude/video_engine_tests.gtest-memcheck.txt
@@ -6,3 +6,5 @@ VideoSendStreamTest.VP9FlexMode
# Flaky under memcheck (WebRTC issue 5134)
EndToEndTest.AssignsTransportSequenceNumbers
+# https://bugs.chromium.org/p/webrtc/issues/detail?id=5312
+RtcEventLogTest.DropOldEvents
diff --git a/tools/valgrind-webrtc/memcheck/suppressions.txt b/tools/valgrind-webrtc/memcheck/suppressions.txt
index 6291df20dd..9919522e83 100644
--- a/tools/valgrind-webrtc/memcheck/suppressions.txt
+++ b/tools/valgrind-webrtc/memcheck/suppressions.txt
@@ -232,159 +232,6 @@
}
{
- bug_332_1
- Memcheck:Uninitialized
- ...
- fun:_ZN6webrtc11VoEBaseImpl16NeedMorePlayDataEjhhjPvRj
- fun:_ZN6webrtc17AudioDeviceBuffer18RequestPlayoutDataEj
- fun:_ZN6webrtc21AudioDeviceLinuxPulse17PlayThreadProcessEv
- fun:_ZN6webrtc21AudioDeviceLinuxPulse14PlayThreadFuncEPv
- fun:_ZN6webrtc11ThreadPosix3RunEv
- fun:StartThread
-}
-
-{
- bug_332_2
- Memcheck:Unaddressable
- fun:memcpy@@GLIBC_2.14
- fun:_ZN6webrtc21AudioDeviceLinuxPulse16ReadRecordedDataEPKvm
- fun:_ZN6webrtc21AudioDeviceLinuxPulse16RecThreadProcessEv
- fun:_ZN6webrtc21AudioDeviceLinuxPulse13RecThreadFuncEPv
- fun:_ZN6webrtc11ThreadPosix3RunEv
- fun:StartThread
-}
-
-{
- bug_332_3
- Memcheck:Uninitialized
- fun:_ZN6webrtc11RTCPUtility21RTCPParseCommonHeaderEPKhS2_RNS0_16RTCPCommonHeaderE
- ...
- fun:_ZN6webrtc12RTCPReceiver10HandleSDESERNS_11RTCPUtility12RTCPParserV2E
- fun:_ZN6webrtc12RTCPReceiver18IncomingRTCPPacketERNS_8RTCPHelp21RTCPPacketInformationEPNS_11RTCPUtility12RTCPParserV2E
- fun:_ZN6webrtc17ModuleRtpRtcpImpl14IncomingPacketEPKht
- fun:_ZN6webrtc3voe7Channel18IncomingRTCPPacketEPKaiPKct
- fun:_ZN6webrtc16UdpTransportImpl20IncomingRTCPFunctionEPKaiPKNS_13SocketAddressE
- fun:_ZN6webrtc16UdpTransportImpl20IncomingRTCPCallbackEPvPKaiPKNS_13SocketAddressE
- fun:_ZN6webrtc14UdpSocketPosix11HasIncomingEv
- fun:_ZN6webrtc25UdpSocketManagerPosixImpl7ProcessEv
- fun:_ZN6webrtc25UdpSocketManagerPosixImpl3RunEPv
- fun:_ZN6webrtc11ThreadPosix3RunEv
- fun:StartThread
-}
-
-{
- bug_332_4
- Memcheck:Uninitialized
- ...
- fun:_ZN6webrtc3voe10AudioLevel12ComputeLevelERKNS_10AudioFrameE
- fun:_ZN6webrtc3voe11OutputMixer28DoOperationsOnCombinedSignalEv
- fun:_ZN6webrtc11VoEBaseImpl16NeedMorePlayDataEjhhjPvRj
- fun:_ZN6webrtc17AudioDeviceBuffer18RequestPlayoutDataEj
- fun:_ZN6webrtc21AudioDeviceLinuxPulse17PlayThreadProcessEv
- fun:_ZN6webrtc21AudioDeviceLinuxPulse14PlayThreadFuncEPv
- fun:_ZN6webrtc11ThreadPosix3RunEv
- fun:StartThread
-}
-
-{
- bug_332_5
- Memcheck:Uninitialized
- fun:WebRtcSpl_UpBy2ShortToInt
- fun:WebRtcSpl_Resample8khzTo22khz
- fun:_ZN6webrtc9Resampler4PushEPKsiPsiRi
- fun:_ZN6webrtc3voe16RemixAndResampleERKNS_10AudioFrameEPNS_9ResamplerEPS1_
- fun:_ZN6webrtc3voe11OutputMixer13GetMixedAudioEiiPNS_10AudioFrameE
- fun:_ZN6webrtc11VoEBaseImpl16NeedMorePlayDataEjhhjPvRj
- fun:_ZN6webrtc17AudioDeviceBuffer18RequestPlayoutDataEj
- fun:_ZN6webrtc21AudioDeviceLinuxPulse17PlayThreadProcessEv
- fun:_ZN6webrtc21AudioDeviceLinuxPulse14PlayThreadFuncEPv
- fun:_ZN6webrtc11ThreadPosix3RunEv
- fun:StartThread
-}
-
-{
- bug 332_6
- Memcheck:Param
- socketcall.sendto(msg)
- obj:*libpthread-*.so
- fun:_ZN6webrtc14UdpSocketPosix6SendToEPKaiRKNS_13SocketAddressE
- fun:_ZN6webrtc16UdpTransportImpl14SendRTCPPacketEiPKvi
- fun:_ZN6webrtc3voe7Channel14SendRTCPPacketEiPKvi
- ...
- fun:_ZN6webrtc17ModuleRtpRtcpImpl14IncomingPacketEPKht
- fun:_ZN6webrtc3voe7Channel17IncomingRTPPacketEPKaiPKct
- fun:_ZN6webrtc16UdpTransportImpl19IncomingRTPFunctionEPKaiPKNS_13SocketAddressE
- fun:_ZN6webrtc16UdpTransportImpl19IncomingRTPCallbackEPvPKaiPKNS_13SocketAddressE
- fun:_ZN6webrtc14UdpSocketPosix11HasIncomingEv
- fun:_ZN6webrtc25UdpSocketManagerPosixImpl7ProcessEv
- fun:_ZN6webrtc25UdpSocketManagerPosixImpl3RunEPv
- fun:_ZN6webrtc11ThreadPosix3RunEv
- fun:StartThread
-}
-
-
-{
- bug_332_7
- Memcheck:Param
- socketcall.sendto(msg)
- obj:*libpthread-*.so
- fun:_ZN6webrtc14UdpSocketPosix6SendToEPKaiRKNS_13SocketAddressE
- fun:_ZN6webrtc16UdpTransportImpl10SendPacketEiPKvi
- ...
- fun:_ZN6webrtc21AudioDeviceLinuxPulse16ReadRecordedDataEPKvm
- fun:_ZN6webrtc21AudioDeviceLinuxPulse16RecThreadProcessEv
- fun:_ZN6webrtc21AudioDeviceLinuxPulse13RecThreadFuncEPv
- fun:_ZN6webrtc11ThreadPosix3RunEv
- fun:StartThread
-}
-
-{
- bug_332_8
- Memcheck:Param
- socketcall.sendto(msg)
- obj:*libpthread-*.so
- fun:_ZN6webrtc14UdpSocketPosix6SendToEPKaiRKNS_13SocketAddressE
- fun:_ZN6webrtc16UdpTransportImpl14SendRTCPPacketEiPKvi
- fun:_ZN6webrtc3voe7Channel14SendRTCPPacketEiPKvi
- fun:_ZN6webrtc10RTCPSender13SendToNetworkEPKht
- fun:_ZN6webrtc10RTCPSender8SendRTCPEjiPKtbm
- fun:_ZN6webrtc17ModuleRtpRtcpImpl7ProcessEv
- fun:_ZN6webrtc17ProcessThreadImpl7ProcessEv
- fun:_ZN6webrtc17ProcessThreadImpl3RunEPv
- fun:_ZN6webrtc11ThreadPosix3RunEv
- fun:StartThread
-}
-
-{
- bug_332_9
- Memcheck:Uninitialized
- ...
- fun:_ZN6webrtc17AudioDeviceBuffer19DeliverRecordedDataEv
- fun:_ZN6webrtc21AudioDeviceLinuxPulse19ProcessRecordedDataEPajj
- fun:_ZN6webrtc21AudioDeviceLinuxPulse16ReadRecordedDataEPKvm
- fun:_ZN6webrtc21AudioDeviceLinuxPulse16RecThreadProcessEv
- fun:_ZN6webrtc21AudioDeviceLinuxPulse13RecThreadFuncEPv
- fun:_ZN6webrtc11ThreadPosix3RunEv
- fun:StartThread
-}
-
-{
- bug_332_10
- Memcheck:Uninitialized
- ...
- fun:_ZN6webrtc12RTCPReceiver18IncomingRTCPPacketERNS_8RTCPHelp21RTCPPacketInformationEPNS_11RTCPUtility12RTCPParserV2E
- fun:_ZN6webrtc17ModuleRtpRtcpImpl14IncomingPacketEPKht
- fun:_ZN6webrtc3voe7Channel18IncomingRTCPPacketEPKaiPKct
- fun:_ZN6webrtc16UdpTransportImpl20IncomingRTCPFunctionEPKaiPKNS_13SocketAddressE
- fun:_ZN6webrtc16UdpTransportImpl20IncomingRTCPCallbackEPvPKaiPKNS_13SocketAddressE
- fun:_ZN6webrtc14UdpSocketPosix11HasIncomingEv
- fun:_ZN6webrtc25UdpSocketManagerPosixImpl7ProcessEv
- fun:_ZN6webrtc25UdpSocketManagerPosixImpl3RunEPv
- fun:_ZN6webrtc11ThreadPosix3RunEv
- fun:StartThread
-}
-
-{
bug_891
Memcheck:Unaddressable
fun:XShmPutImage
diff --git a/webrtc/.gitignore b/webrtc/.gitignore
index dade989c6e..89cb6dbe09 100644
--- a/webrtc/.gitignore
+++ b/webrtc/.gitignore
@@ -1 +1,27 @@
-# This file is for projects that checkout webrtc/ directly (e.g. Chromium).
+# This file is for projects that checkout webrtc/ directly (e.g. Chromium). It
+# is a truncated copy of the .gitignore file in the parent directory.
+*.DS_Store
+*.Makefile
+*.ncb
+*.ninja
+*.props
+*.pyc
+*.rules
+*.scons
+*.sdf
+*.sln
+*.suo
+*.targets
+*.user
+*.vcproj
+*.vcxproj
+*.vcxproj.filters
+*.vpj
+*.vpw
+*.vpwhistu
+*.vtg
+*.xcodeproj
+*_proto.xml
+*_proto_cpp.xml
+*~
+.*.sw?
diff --git a/webrtc/BUILD.gn b/webrtc/BUILD.gn
index ac14d7d528..be824b7bc7 100644
--- a/webrtc/BUILD.gn
+++ b/webrtc/BUILD.gn
@@ -8,8 +8,8 @@
# TODO(kjellander): Rebase this to webrtc/build/common.gypi changes after r6330.
-import("//build/config/crypto.gni")
import("//build/config/linux/pkg_config.gni")
+import("//build/config/sanitizers/sanitizers.gni")
import("build/webrtc.gni")
import("//third_party/protobuf/proto_library.gni")
@@ -178,8 +178,8 @@ source_set("webrtc") {
public_configs = [ ":common_inherited_config" ]
deps = [
- "audio",
":webrtc_common",
+ "audio",
"base:rtc_base",
"call",
"common_audio",
@@ -219,8 +219,8 @@ if (!build_with_chromium) {
testonly = true
deps = [
":webrtc",
- "modules/video_render:video_render_internal_impl",
"modules/video_capture:video_capture_internal_impl",
+ "modules/video_render:video_render_internal_impl",
"test",
]
}
@@ -238,6 +238,12 @@ source_set("webrtc_common") {
configs += [ ":common_config" ]
public_configs = [ ":common_inherited_config" ]
+
+ if (is_clang && !is_nacl) {
+ # Suppress warnings from Chrome's Clang plugins.
+ # See http://code.google.com/p/webrtc/issues/detail?id=163 for details.
+ configs -= [ "//build/config/clang:find_bad_constructs" ]
+ }
}
source_set("gtest_prod") {
@@ -279,3 +285,14 @@ source_set("rtc_event_log") {
configs -= [ "//build/config/clang:find_bad_constructs" ]
}
}
+
+if (use_libfuzzer || use_drfuzz) {
+ # This target is only here for gn to discover fuzzer build targets under
+ # webrtc/test/fuzzers/.
+ group("webrtc_fuzzers_dummy") {
+ testonly = true
+ deps = [
+ "test/fuzzers:webrtc_fuzzer_main",
+ ]
+ }
+}
diff --git a/webrtc/api/BUILD.gn b/webrtc/api/BUILD.gn
new file mode 100644
index 0000000000..7cfa083a6b
--- /dev/null
+++ b/webrtc/api/BUILD.gn
@@ -0,0 +1,76 @@
+# Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
+#
+# Use of this source code is governed by a BSD-style license
+# that can be found in the LICENSE file in the root of the source
+# tree. An additional intellectual property rights grant can be found
+# in the file PATENTS. All contributing project authors may
+# be found in the AUTHORS file in the root of the source tree.
+
+import("../build/webrtc.gni")
+
+config("ios_config") {
+ libs = [
+ "CoreGraphics.framework",
+ "GLKit.framework",
+ "OpenGLES.framework",
+ "QuartzCore.framework",
+ ]
+}
+
+if (is_ios) {
+ source_set("rtc_api_objc") {
+ deps = [
+ "//webrtc/base:rtc_base_objc",
+ #"//talk/libjingle:libjingle_peerconnection",
+ ]
+ cflags = [
+ "-fobjc-arc",
+ "-Wobjc-missing-property-synthesis",
+ ]
+ sources = [
+ # Add these when there's a BUILD.gn for peer connection APIs
+ #"objc/RTCIceCandidate+Private.h",
+ #"objc/RTCIceCandidate.h",
+ #"objc/RTCIceCandidate.mm",
+ #"objc/RTCMediaSource+Private.h",
+ #"objc/RTCMediaSource.h",
+ #"objc/RTCMediaSource.mm",
+ #"objc/RTCMediaStreamTrack+Private.h",
+ #"objc/RTCMediaStreamTrack.h",
+ #"objc/RTCMediaStreamTrack.mm",
+ "objc/RTCIceServer+Private.h",
+ "objc/RTCIceServer.h",
+ "objc/RTCIceServer.mm",
+ "objc/RTCMediaConstraints+Private.h",
+ "objc/RTCMediaConstraints.h",
+ "objc/RTCMediaConstraints.mm",
+ "objc/RTCOpenGLVideoRenderer.h",
+ "objc/RTCOpenGLVideoRenderer.mm",
+ "objc/RTCSessionDescription+Private.h",
+ "objc/RTCSessionDescription.h",
+ "objc/RTCSessionDescription.mm",
+ "objc/RTCStatsReport+Private.h",
+ "objc/RTCStatsReport.h",
+ "objc/RTCStatsReport.mm",
+ "objc/RTCVideoFrame+Private.h",
+ "objc/RTCVideoFrame.h",
+ "objc/RTCVideoFrame.mm",
+ "objc/RTCVideoRenderer.h",
+ "objc/WebRTC-Prefix.pch",
+ ]
+
+ if (is_ios) {
+ sources += [
+ "objc/RTCEAGLVideoView.h",
+ "objc/RTCEAGLVideoView.m",
+ ]
+ }
+
+ if (is_mac) {
+ sources += [
+ "objc/RTCNSGLVideoView.h",
+ "objc/RTCNSGLVideoView.m",
+ ]
+ }
+ }
+}
diff --git a/webrtc/api/OWNERS b/webrtc/api/OWNERS
new file mode 100644
index 0000000000..cd06158b7f
--- /dev/null
+++ b/webrtc/api/OWNERS
@@ -0,0 +1 @@
+tkchin@webrtc.org
diff --git a/webrtc/api/api.gyp b/webrtc/api/api.gyp
new file mode 100644
index 0000000000..ba3fe8d0bd
--- /dev/null
+++ b/webrtc/api/api.gyp
@@ -0,0 +1,83 @@
+# Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
+#
+# Use of this source code is governed by a BSD-style license
+# that can be found in the LICENSE file in the root of the source
+# tree. An additional intellectual property rights grant can be found
+# in the file PATENTS. All contributing project authors may
+# be found in the AUTHORS file in the root of the source tree.
+
+{
+ 'includes': [ '../build/common.gypi', ],
+ 'conditions': [
+ ['OS=="ios"', {
+ 'targets': [
+ {
+ 'target_name': 'rtc_api_objc',
+ 'type': 'static_library',
+ 'dependencies': [
+ '<(webrtc_root)/base/base.gyp:rtc_base_objc',
+ '../../talk/libjingle.gyp:libjingle_peerconnection',
+ ],
+ 'sources': [
+ 'objc/RTCIceCandidate+Private.h',
+ 'objc/RTCIceCandidate.h',
+ 'objc/RTCIceCandidate.mm',
+ 'objc/RTCIceServer+Private.h',
+ 'objc/RTCIceServer.h',
+ 'objc/RTCIceServer.mm',
+ 'objc/RTCMediaConstraints+Private.h',
+ 'objc/RTCMediaConstraints.h',
+ 'objc/RTCMediaConstraints.mm',
+ 'objc/RTCMediaSource+Private.h',
+ 'objc/RTCMediaSource.h',
+ 'objc/RTCMediaSource.mm',
+ 'objc/RTCMediaStreamTrack+Private.h',
+ 'objc/RTCMediaStreamTrack.h',
+ 'objc/RTCMediaStreamTrack.mm',
+ 'objc/RTCOpenGLVideoRenderer.h',
+ 'objc/RTCOpenGLVideoRenderer.mm',
+ 'objc/RTCSessionDescription+Private.h',
+ 'objc/RTCSessionDescription.h',
+ 'objc/RTCSessionDescription.mm',
+ 'objc/RTCStatsReport+Private.h',
+ 'objc/RTCStatsReport.h',
+ 'objc/RTCStatsReport.mm',
+ 'objc/RTCVideoFrame+Private.h',
+ 'objc/RTCVideoFrame.h',
+ 'objc/RTCVideoFrame.mm',
+ 'objc/RTCVideoRenderer.h',
+ ],
+ 'conditions': [
+ ['OS=="ios"', {
+ 'sources': [
+ 'objc/RTCEAGLVideoView.h',
+ 'objc/RTCEAGLVideoView.m',
+ ],
+ 'all_dependent_settings': {
+ 'xcode_settings': {
+ 'OTHER_LDFLAGS': [
+ '-framework CoreGraphics',
+ '-framework GLKit',
+ '-framework OpenGLES',
+ '-framework QuartzCore',
+ ]
+ }
+ }
+ }],
+ ['OS=="mac"', {
+ 'sources': [
+ 'objc/RTCNSGLVideoView.h',
+ 'objc/RTCNSGLVideoView.m',
+ ],
+ }],
+ ],
+ 'xcode_settings': {
+ 'CLANG_ENABLE_OBJC_ARC': 'YES',
+ 'CLANG_WARN_OBJC_MISSING_PROPERTY_SYNTHESIS': 'YES',
+ 'GCC_PREFIX_HEADER': 'objc/WebRTC-Prefix.pch',
+ },
+ }
+ ],
+ }], # OS=="ios"
+ ],
+}
diff --git a/webrtc/api/api_tests.gyp b/webrtc/api/api_tests.gyp
new file mode 100644
index 0000000000..c2c18bc693
--- /dev/null
+++ b/webrtc/api/api_tests.gyp
@@ -0,0 +1,40 @@
+# Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
+#
+# Use of this source code is governed by a BSD-style license
+# that can be found in the LICENSE file in the root of the source
+# tree. An additional intellectual property rights grant can be found
+# in the file PATENTS. All contributing project authors may
+# be found in the AUTHORS file in the root of the source tree.
+
+{
+ 'includes': [ '../build/common.gypi', ],
+ 'conditions': [
+ ['OS=="ios"', {
+ 'targets': [
+ {
+ 'target_name': 'rtc_api_objc_test',
+ 'type': 'executable',
+ 'dependencies': [
+ '<(webrtc_root)/api/api.gyp:rtc_api_objc',
+ '<(webrtc_root)/base/base_tests.gyp:rtc_base_tests_utils',
+ ],
+ 'sources': [
+ 'objctests/RTCIceCandidateTest.mm',
+ 'objctests/RTCIceServerTest.mm',
+ 'objctests/RTCMediaConstraintsTest.mm',
+ 'objctests/RTCSessionDescriptionTest.mm',
+ ],
+ 'xcode_settings': {
+ 'CLANG_ENABLE_OBJC_ARC': 'YES',
+ 'CLANG_WARN_OBJC_MISSING_PROPERTY_SYNTHESIS': 'YES',
+ 'GCC_PREFIX_HEADER': 'objc/WebRTC-Prefix.pch',
+ # |-ObjC| flag needed to make sure category method implementations
+ # are included:
+ # https://developer.apple.com/library/mac/qa/qa1490/_index.html
+ 'OTHER_LDFLAGS': ['-ObjC'],
+ },
+ }
+ ],
+ }], # OS=="ios"
+ ],
+}
diff --git a/webrtc/api/objc/OWNERS b/webrtc/api/objc/OWNERS
new file mode 100644
index 0000000000..cd06158b7f
--- /dev/null
+++ b/webrtc/api/objc/OWNERS
@@ -0,0 +1 @@
+tkchin@webrtc.org
diff --git a/webrtc/api/objc/README b/webrtc/api/objc/README
new file mode 100644
index 0000000000..bd33e61921
--- /dev/null
+++ b/webrtc/api/objc/README
@@ -0,0 +1,3 @@
+This is a work-in-progress to update the Objective-C API according to the W3C
+specification. The Objective-C API located at talk/app/webrtc/objc is
+deprecated, but will remain for the time being.
diff --git a/webrtc/api/objc/RTCEAGLVideoView.h b/webrtc/api/objc/RTCEAGLVideoView.h
new file mode 100644
index 0000000000..1a57df76bb
--- /dev/null
+++ b/webrtc/api/objc/RTCEAGLVideoView.h
@@ -0,0 +1,35 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <Foundation/Foundation.h>
+#import <UIKit/UIKit.h>
+
+#import "RTCVideoRenderer.h"
+
+NS_ASSUME_NONNULL_BEGIN
+
+@class RTCEAGLVideoView;
+@protocol RTCEAGLVideoViewDelegate
+
+- (void)videoView:(RTCEAGLVideoView *)videoView didChangeVideoSize:(CGSize)size;
+
+@end
+
+/**
+ * RTCEAGLVideoView is an RTCVideoRenderer which renders video frames in its
+ * bounds using OpenGLES 2.0.
+ */
+@interface RTCEAGLVideoView : UIView <RTCVideoRenderer>
+
+@property(nonatomic, weak) id<RTCEAGLVideoViewDelegate> delegate;
+
+@end
+
+NS_ASSUME_NONNULL_END
diff --git a/webrtc/api/objc/RTCEAGLVideoView.m b/webrtc/api/objc/RTCEAGLVideoView.m
new file mode 100644
index 0000000000..e664ede455
--- /dev/null
+++ b/webrtc/api/objc/RTCEAGLVideoView.m
@@ -0,0 +1,259 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "RTCEAGLVideoView.h"
+
+#import <GLKit/GLKit.h>
+
+#import "RTCVideoFrame.h"
+#import "RTCOpenGLVideoRenderer.h"
+
+// RTCDisplayLinkTimer wraps a CADisplayLink and is set to fire every two screen
+// refreshes, which should be 30fps. We wrap the display link in order to avoid
+// a retain cycle since CADisplayLink takes a strong reference onto its target.
+// The timer is paused by default.
+@interface RTCDisplayLinkTimer : NSObject
+
+@property(nonatomic) BOOL isPaused;
+
+- (instancetype)initWithTimerHandler:(void (^)(void))timerHandler;
+- (void)invalidate;
+
+@end
+
+@implementation RTCDisplayLinkTimer {
+ CADisplayLink *_displayLink;
+ void (^_timerHandler)(void);
+}
+
+- (instancetype)initWithTimerHandler:(void (^)(void))timerHandler {
+ NSParameterAssert(timerHandler);
+ if (self = [super init]) {
+ _timerHandler = timerHandler;
+ _displayLink =
+ [CADisplayLink displayLinkWithTarget:self
+ selector:@selector(displayLinkDidFire:)];
+ _displayLink.paused = YES;
+ // Set to half of screen refresh, which should be 30fps.
+ [_displayLink setFrameInterval:2];
+ [_displayLink addToRunLoop:[NSRunLoop currentRunLoop]
+ forMode:NSRunLoopCommonModes];
+ }
+ return self;
+}
+
+- (void)dealloc {
+ [self invalidate];
+}
+
+- (BOOL)isPaused {
+ return _displayLink.paused;
+}
+
+- (void)setIsPaused:(BOOL)isPaused {
+ _displayLink.paused = isPaused;
+}
+
+- (void)invalidate {
+ [_displayLink invalidate];
+}
+
+- (void)displayLinkDidFire:(CADisplayLink *)displayLink {
+ _timerHandler();
+}
+
+@end
+
+// RTCEAGLVideoView wraps a GLKView which is setup with
+// enableSetNeedsDisplay = NO for the purpose of gaining control of
+// exactly when to call -[GLKView display]. This need for extra
+// control is required to avoid triggering method calls on GLKView
+// that results in attempting to bind the underlying render buffer
+// when the drawable size would be empty which would result in the
+// error GL_FRAMEBUFFER_INCOMPLETE_ATTACHMENT. -[GLKView display] is
+// the method that will trigger the binding of the render
+// buffer. Because the standard behaviour of -[UIView setNeedsDisplay]
+// is disabled for the reasons above, the RTCEAGLVideoView maintains
+// its own |isDirty| flag.
+
+@interface RTCEAGLVideoView () <GLKViewDelegate>
+// |videoFrame| is set when we receive a frame from a worker thread and is read
+// from the display link callback so atomicity is required.
+@property(atomic, strong) RTCVideoFrame *videoFrame;
+@property(nonatomic, readonly) GLKView *glkView;
+@property(nonatomic, readonly) RTCOpenGLVideoRenderer *glRenderer;
+@end
+
+@implementation RTCEAGLVideoView {
+ RTCDisplayLinkTimer *_timer;
+ // This flag should only be set and read on the main thread (e.g. by
+ // setNeedsDisplay)
+ BOOL _isDirty;
+}
+
+@synthesize delegate = _delegate;
+@synthesize videoFrame = _videoFrame;
+@synthesize glkView = _glkView;
+@synthesize glRenderer = _glRenderer;
+
+- (instancetype)initWithFrame:(CGRect)frame {
+ if (self = [super initWithFrame:frame]) {
+ [self configure];
+ }
+ return self;
+}
+
+- (instancetype)initWithCoder:(NSCoder *)aDecoder {
+ if (self = [super initWithCoder:aDecoder]) {
+ [self configure];
+ }
+ return self;
+}
+
+- (void)configure {
+ EAGLContext *glContext =
+ [[EAGLContext alloc] initWithAPI:kEAGLRenderingAPIOpenGLES3];
+ if (!glContext) {
+ glContext = [[EAGLContext alloc] initWithAPI:kEAGLRenderingAPIOpenGLES2];
+ }
+ _glRenderer = [[RTCOpenGLVideoRenderer alloc] initWithContext:glContext];
+
+ // GLKView manages a framebuffer for us.
+ _glkView = [[GLKView alloc] initWithFrame:CGRectZero
+ context:glContext];
+ _glkView.drawableColorFormat = GLKViewDrawableColorFormatRGBA8888;
+ _glkView.drawableDepthFormat = GLKViewDrawableDepthFormatNone;
+ _glkView.drawableStencilFormat = GLKViewDrawableStencilFormatNone;
+ _glkView.drawableMultisample = GLKViewDrawableMultisampleNone;
+ _glkView.delegate = self;
+ _glkView.layer.masksToBounds = YES;
+ _glkView.enableSetNeedsDisplay = NO;
+ [self addSubview:_glkView];
+
+ // Listen to application state in order to clean up OpenGL before app goes
+ // away.
+ NSNotificationCenter *notificationCenter =
+ [NSNotificationCenter defaultCenter];
+ [notificationCenter addObserver:self
+ selector:@selector(willResignActive)
+ name:UIApplicationWillResignActiveNotification
+ object:nil];
+ [notificationCenter addObserver:self
+ selector:@selector(didBecomeActive)
+ name:UIApplicationDidBecomeActiveNotification
+ object:nil];
+
+ // Frames are received on a separate thread, so we poll for current frame
+ // using a refresh rate proportional to screen refresh frequency. This
+ // occurs on the main thread.
+ __weak RTCEAGLVideoView *weakSelf = self;
+ _timer = [[RTCDisplayLinkTimer alloc] initWithTimerHandler:^{
+ RTCEAGLVideoView *strongSelf = weakSelf;
+ [strongSelf displayLinkTimerDidFire];
+ }];
+ [self setupGL];
+}
+
+- (void)dealloc {
+ [[NSNotificationCenter defaultCenter] removeObserver:self];
+ UIApplicationState appState =
+ [UIApplication sharedApplication].applicationState;
+ if (appState == UIApplicationStateActive) {
+ [self teardownGL];
+ }
+ [_timer invalidate];
+}
+
+#pragma mark - UIView
+
+- (void)setNeedsDisplay {
+ [super setNeedsDisplay];
+ _isDirty = YES;
+}
+
+- (void)setNeedsDisplayInRect:(CGRect)rect {
+ [super setNeedsDisplayInRect:rect];
+ _isDirty = YES;
+}
+
+- (void)layoutSubviews {
+ [super layoutSubviews];
+ _glkView.frame = self.bounds;
+}
+
+#pragma mark - GLKViewDelegate
+
+// This method is called when the GLKView's content is dirty and needs to be
+// redrawn. This occurs on main thread.
+- (void)glkView:(GLKView *)view drawInRect:(CGRect)rect {
+ // The renderer will draw the frame to the framebuffer corresponding to the
+ // one used by |view|.
+ [_glRenderer drawFrame:self.videoFrame];
+}
+
+#pragma mark - RTCVideoRenderer
+
+// These methods may be called on non-main thread.
+- (void)setSize:(CGSize)size {
+ __weak RTCEAGLVideoView *weakSelf = self;
+ dispatch_async(dispatch_get_main_queue(), ^{
+ RTCEAGLVideoView *strongSelf = weakSelf;
+ [strongSelf.delegate videoView:strongSelf didChangeVideoSize:size];
+ });
+}
+
+- (void)renderFrame:(RTCVideoFrame *)frame {
+ self.videoFrame = frame;
+}
+
+#pragma mark - Private
+
+- (void)displayLinkTimerDidFire {
+ // Don't render unless video frame have changed or the view content
+ // has explicitly been marked dirty.
+ if (!_isDirty && _glRenderer.lastDrawnFrame == self.videoFrame) {
+ return;
+ }
+
+ // Always reset isDirty at this point, even if -[GLKView display]
+ // won't be called in the case the drawable size is empty.
+ _isDirty = NO;
+
+ // Only call -[GLKView display] if the drawable size is
+ // non-empty. Calling display will make the GLKView setup its
+ // render buffer if necessary, but that will fail with error
+ // GL_FRAMEBUFFER_INCOMPLETE_ATTACHMENT if size is empty.
+ if (self.bounds.size.width > 0 && self.bounds.size.height > 0) {
+ [_glkView display];
+ }
+}
+
+- (void)setupGL {
+ self.videoFrame = nil;
+ [_glRenderer setupGL];
+ _timer.isPaused = NO;
+}
+
+- (void)teardownGL {
+ self.videoFrame = nil;
+ _timer.isPaused = YES;
+ [_glkView deleteDrawable];
+ [_glRenderer teardownGL];
+}
+
+- (void)didBecomeActive {
+ [self setupGL];
+}
+
+- (void)willResignActive {
+ [self teardownGL];
+}
+
+@end
diff --git a/webrtc/api/objc/RTCIceCandidate+Private.h b/webrtc/api/objc/RTCIceCandidate+Private.h
new file mode 100644
index 0000000000..ca95a43e3a
--- /dev/null
+++ b/webrtc/api/objc/RTCIceCandidate+Private.h
@@ -0,0 +1,36 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "RTCIceCandidate.h"
+
+#include "talk/app/webrtc/jsep.h"
+#include "webrtc/base/scoped_ptr.h"
+
+NS_ASSUME_NONNULL_BEGIN
+
+@interface RTCIceCandidate ()
+
+/**
+ * The native IceCandidateInterface representation of this RTCIceCandidate
+ * object. This is needed to pass to the underlying C++ APIs.
+ */
+@property(nonatomic, readonly)
+ rtc::scoped_ptr<webrtc::IceCandidateInterface> nativeCandidate;
+
+/**
+ * Initialize an RTCIceCandidate from a native IceCandidateInterface. No
+ * ownership is taken of the native candidate.
+ */
+- (instancetype)initWithNativeCandidate:
+ (webrtc::IceCandidateInterface *)candidate;
+
+@end
+
+NS_ASSUME_NONNULL_END
diff --git a/webrtc/api/objc/RTCIceCandidate.h b/webrtc/api/objc/RTCIceCandidate.h
new file mode 100644
index 0000000000..41ea69e991
--- /dev/null
+++ b/webrtc/api/objc/RTCIceCandidate.h
@@ -0,0 +1,44 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <Foundation/Foundation.h>
+
+NS_ASSUME_NONNULL_BEGIN
+
+@interface RTCIceCandidate : NSObject
+
+/**
+ * If present, the identifier of the "media stream identification" for the media
+ * component this candidate is associated with.
+ */
+@property(nonatomic, readonly, nullable) NSString *sdpMid;
+
+/**
+ * The index (starting at zero) of the media description this candidate is
+ * associated with in the SDP.
+ */
+@property(nonatomic, readonly) NSInteger sdpMLineIndex;
+
+/** The SDP string for this candidate. */
+@property(nonatomic, readonly) NSString *sdp;
+
+- (instancetype)init NS_UNAVAILABLE;
+
+/**
+ * Initialize an RTCIceCandidate from SDP.
+ */
+- (instancetype)initWithSdp:(NSString *)sdp
+ sdpMLineIndex:(NSInteger)sdpMLineIndex
+ sdpMid:(nullable NSString *)sdpMid
+ NS_DESIGNATED_INITIALIZER;
+
+@end
+
+NS_ASSUME_NONNULL_END
diff --git a/webrtc/api/objc/RTCIceCandidate.mm b/webrtc/api/objc/RTCIceCandidate.mm
new file mode 100644
index 0000000000..9e094f6f06
--- /dev/null
+++ b/webrtc/api/objc/RTCIceCandidate.mm
@@ -0,0 +1,70 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "RTCIceCandidate.h"
+
+#import "webrtc/api/objc/RTCIceCandidate+Private.h"
+#import "webrtc/base/objc/NSString+StdString.h"
+#import "webrtc/base/objc/RTCLogging.h"
+
+@implementation RTCIceCandidate
+
+@synthesize sdpMid = _sdpMid;
+@synthesize sdpMLineIndex = _sdpMLineIndex;
+@synthesize sdp = _sdp;
+
+- (instancetype)initWithSdp:(NSString *)sdp
+ sdpMLineIndex:(NSInteger)sdpMLineIndex
+ sdpMid:(NSString *)sdpMid {
+ NSParameterAssert(sdp.length);
+ if (self = [super init]) {
+ _sdpMid = [sdpMid copy];
+ _sdpMLineIndex = sdpMLineIndex;
+ _sdp = [sdp copy];
+ }
+ return self;
+}
+
+- (NSString *)description {
+ return [NSString stringWithFormat:@"RTCIceCandidate:\n%@\n%ld\n%@",
+ _sdpMid,
+ (long)_sdpMLineIndex,
+ _sdp];
+}
+
+#pragma mark - Private
+
+- (instancetype)initWithNativeCandidate:
+ (webrtc::IceCandidateInterface *)candidate {
+ NSParameterAssert(candidate);
+ std::string sdp;
+ candidate->ToString(&sdp);
+
+ return [self initWithSdp:[NSString stringForStdString:sdp]
+ sdpMLineIndex:candidate->sdp_mline_index()
+ sdpMid:[NSString stringForStdString:candidate->sdp_mid()]];
+}
+
+- (rtc::scoped_ptr<webrtc::IceCandidateInterface>)nativeCandidate {
+ webrtc::SdpParseError error;
+
+ webrtc::IceCandidateInterface *candidate = webrtc::CreateIceCandidate(
+ _sdpMid.stdString, _sdpMLineIndex, _sdp.stdString, &error);
+
+ if (!candidate) {
+ RTCLog(@"Failed to create ICE candidate: %s\nline: %s",
+ error.description.c_str(),
+ error.line.c_str());
+ }
+
+ return rtc::scoped_ptr<webrtc::IceCandidateInterface>(candidate);
+}
+
+@end
diff --git a/webrtc/api/objc/RTCIceServer+Private.h b/webrtc/api/objc/RTCIceServer+Private.h
new file mode 100644
index 0000000000..59f5a92dff
--- /dev/null
+++ b/webrtc/api/objc/RTCIceServer+Private.h
@@ -0,0 +1,28 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "RTCIceServer.h"
+
+#include "talk/app/webrtc/peerconnectioninterface.h"
+
+NS_ASSUME_NONNULL_BEGIN
+
+@interface RTCIceServer ()
+
+/**
+ * IceServer struct representation of this RTCIceServer object's data.
+ * This is needed to pass to the underlying C++ APIs.
+ */
+@property(nonatomic, readonly)
+ webrtc::PeerConnectionInterface::IceServer iceServer;
+
+@end
+
+NS_ASSUME_NONNULL_END
diff --git a/webrtc/api/objc/RTCIceServer.h b/webrtc/api/objc/RTCIceServer.h
new file mode 100644
index 0000000000..be4e0d7b6e
--- /dev/null
+++ b/webrtc/api/objc/RTCIceServer.h
@@ -0,0 +1,42 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <Foundation/Foundation.h>
+
+NS_ASSUME_NONNULL_BEGIN
+
+@interface RTCIceServer : NSObject
+
+/** URI(s) for this server represented as NSStrings. */
+@property(nonatomic, copy, readonly) NSArray<NSString *> *urlStrings;
+
+/** Username to use if this RTCIceServer object is a TURN server. */
+@property(nonatomic, copy, readonly, nullable) NSString *username;
+
+/** Credential to use if this RTCIceServer object is a TURN server. */
+@property(nonatomic, copy, readonly, nullable) NSString *credential;
+
+- (instancetype)init NS_UNAVAILABLE;
+
+/** Convenience initializer for a server with no authentication (e.g. STUN). */
+- (instancetype)initWithURLStrings:(NSArray<NSString *> *)urlStrings;
+
+/**
+ * Initialize an RTCIceServer with its associated URLs, optional username,
+ * optional credential, and credentialType.
+ */
+- (instancetype)initWithURLStrings:(NSArray<NSString *> *)urlStrings
+ username:(nullable NSString *)username
+ credential:(nullable NSString *)credential
+ NS_DESIGNATED_INITIALIZER;
+
+@end
+
+NS_ASSUME_NONNULL_END
diff --git a/webrtc/api/objc/RTCIceServer.mm b/webrtc/api/objc/RTCIceServer.mm
new file mode 100644
index 0000000000..7a898e06d5
--- /dev/null
+++ b/webrtc/api/objc/RTCIceServer.mm
@@ -0,0 +1,64 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "RTCIceServer.h"
+
+#import "webrtc/api/objc/RTCIceServer+Private.h"
+#import "webrtc/base/objc/NSString+StdString.h"
+
+@implementation RTCIceServer
+
+@synthesize urlStrings = _urlStrings;
+@synthesize username = _username;
+@synthesize credential = _credential;
+
+- (instancetype)initWithURLStrings:(NSArray<NSString *> *)urlStrings {
+ NSParameterAssert(urlStrings.count);
+ return [self initWithURLStrings:urlStrings
+ username:nil
+ credential:nil];
+}
+
+- (instancetype)initWithURLStrings:(NSArray<NSString *> *)urlStrings
+ username:(NSString *)username
+ credential:(NSString *)credential {
+ NSParameterAssert(urlStrings.count);
+ if (self = [super init]) {
+ _urlStrings = [[NSArray alloc] initWithArray:urlStrings copyItems:YES];
+ _username = [username copy];
+ _credential = [credential copy];
+ }
+ return self;
+}
+
+- (NSString *)description {
+ return [NSString stringWithFormat:@"RTCIceServer:\n%@\n%@\n%@",
+ _urlStrings,
+ _username,
+ _credential];
+}
+
+#pragma mark - Private
+
+- (webrtc::PeerConnectionInterface::IceServer)iceServer {
+ __block webrtc::PeerConnectionInterface::IceServer iceServer;
+
+ iceServer.username = [NSString stdStringForString:_username];
+ iceServer.password = [NSString stdStringForString:_credential];
+
+ [_urlStrings enumerateObjectsUsingBlock:^(NSString *url,
+ NSUInteger idx,
+ BOOL *stop) {
+ iceServer.urls.push_back(url.stdString);
+ }];
+ return iceServer;
+}
+
+@end
diff --git a/webrtc/api/objc/RTCMediaConstraints+Private.h b/webrtc/api/objc/RTCMediaConstraints+Private.h
new file mode 100644
index 0000000000..2c4b722104
--- /dev/null
+++ b/webrtc/api/objc/RTCMediaConstraints+Private.h
@@ -0,0 +1,53 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "RTCMediaConstraints.h"
+
+#include "talk/app/webrtc/mediaconstraintsinterface.h"
+#include "webrtc/base/scoped_ptr.h"
+
+namespace webrtc {
+
+class MediaConstraints : public MediaConstraintsInterface {
+ public:
+ virtual ~MediaConstraints();
+ MediaConstraints();
+ MediaConstraints(
+ const MediaConstraintsInterface::Constraints& mandatory,
+ const MediaConstraintsInterface::Constraints& optional);
+ virtual const Constraints& GetMandatory() const;
+ virtual const Constraints& GetOptional() const;
+
+ private:
+ MediaConstraintsInterface::Constraints mandatory_;
+ MediaConstraintsInterface::Constraints optional_;
+};
+
+} // namespace webrtc
+
+
+NS_ASSUME_NONNULL_BEGIN
+
+@interface RTCMediaConstraints ()
+
+/**
+ * A MediaConstraints representation of this RTCMediaConstraints object. This is
+ * needed to pass to the underlying C++ APIs.
+ */
+- (rtc::scoped_ptr<webrtc::MediaConstraints>)nativeConstraints;
+
+/** Return a native Constraints object representing these constraints */
++ (webrtc::MediaConstraintsInterface::Constraints)
+ nativeConstraintsForConstraints:
+ (NSDictionary<NSString *, NSString *> *)constraints;
+
+@end
+
+NS_ASSUME_NONNULL_END
diff --git a/webrtc/api/objc/RTCMediaConstraints.h b/webrtc/api/objc/RTCMediaConstraints.h
new file mode 100644
index 0000000000..a8ad39142e
--- /dev/null
+++ b/webrtc/api/objc/RTCMediaConstraints.h
@@ -0,0 +1,28 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <Foundation/Foundation.h>
+
+NS_ASSUME_NONNULL_BEGIN
+
+@interface RTCMediaConstraints : NSObject
+
+- (instancetype)init NS_UNAVAILABLE;
+
+/** Initialize with mandatory and/or optional constraints. */
+- (instancetype)initWithMandatoryConstraints:
+ (nullable NSDictionary<NSString *, NSString *> *)mandatory
+ optionalConstraints:
+ (nullable NSDictionary<NSString *, NSString *> *)optional
+ NS_DESIGNATED_INITIALIZER;
+
+@end
+
+NS_ASSUME_NONNULL_END
diff --git a/webrtc/api/objc/RTCMediaConstraints.mm b/webrtc/api/objc/RTCMediaConstraints.mm
new file mode 100644
index 0000000000..a53a517747
--- /dev/null
+++ b/webrtc/api/objc/RTCMediaConstraints.mm
@@ -0,0 +1,92 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "RTCMediaConstraints.h"
+
+#import "webrtc/api/objc/RTCMediaConstraints+Private.h"
+#import "webrtc/base/objc/NSString+StdString.h"
+
+namespace webrtc {
+
+MediaConstraints::~MediaConstraints() {}
+
+MediaConstraints::MediaConstraints() {}
+
+MediaConstraints::MediaConstraints(
+ const MediaConstraintsInterface::Constraints& mandatory,
+ const MediaConstraintsInterface::Constraints& optional)
+ : mandatory_(mandatory), optional_(optional) {}
+
+const MediaConstraintsInterface::Constraints&
+MediaConstraints::GetMandatory() const {
+ return mandatory_;
+}
+
+const MediaConstraintsInterface::Constraints&
+MediaConstraints::GetOptional() const {
+ return optional_;
+}
+
+} // namespace webrtc
+
+
+@implementation RTCMediaConstraints {
+ NSDictionary<NSString *, NSString *> *_mandatory;
+ NSDictionary<NSString *, NSString *> *_optional;
+}
+
+- (instancetype)initWithMandatoryConstraints:
+ (NSDictionary<NSString *, NSString *> *)mandatory
+ optionalConstraints:
+ (NSDictionary<NSString *, NSString *> *)optional {
+ if (self = [super init]) {
+ _mandatory = [[NSDictionary alloc] initWithDictionary:mandatory
+ copyItems:YES];
+ _optional = [[NSDictionary alloc] initWithDictionary:optional
+ copyItems:YES];
+ }
+ return self;
+}
+
+- (NSString *)description {
+ return [NSString stringWithFormat:@"RTCMediaConstraints:\n%@\n%@",
+ _mandatory,
+ _optional];
+}
+
+#pragma mark - Private
+
+- (rtc::scoped_ptr<webrtc::MediaConstraints>)nativeConstraints {
+ webrtc::MediaConstraintsInterface::Constraints mandatory =
+ [[self class] nativeConstraintsForConstraints:_mandatory];
+ webrtc::MediaConstraintsInterface::Constraints optional =
+ [[self class] nativeConstraintsForConstraints:_optional];
+
+ webrtc::MediaConstraints *nativeConstraints =
+ new webrtc::MediaConstraints(mandatory, optional);
+ return rtc::scoped_ptr<webrtc::MediaConstraints>(nativeConstraints);
+}
+
++ (webrtc::MediaConstraintsInterface::Constraints)
+ nativeConstraintsForConstraints:
+ (NSDictionary<NSString *, NSString *> *)constraints {
+ webrtc::MediaConstraintsInterface::Constraints nativeConstraints;
+ for (NSString *key in constraints) {
+ NSAssert([key isKindOfClass:[NSString class]],
+ @"%@ is not an NSString.", key);
+ NSAssert([constraints[key] isKindOfClass:[NSString class]],
+ @"%@ is not an NSString.", constraints[key]);
+ nativeConstraints.push_back(webrtc::MediaConstraintsInterface::Constraint(
+ key.stdString, constraints[key].stdString));
+ }
+ return nativeConstraints;
+}
+
+@end
diff --git a/webrtc/api/objc/RTCMediaSource+Private.h b/webrtc/api/objc/RTCMediaSource+Private.h
new file mode 100644
index 0000000000..fcbaad8e45
--- /dev/null
+++ b/webrtc/api/objc/RTCMediaSource+Private.h
@@ -0,0 +1,41 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "RTCMediaSource.h"
+
+#include "talk/app/webrtc/mediastreaminterface.h"
+
+NS_ASSUME_NONNULL_BEGIN
+
+@interface RTCMediaSource ()
+
+/**
+ * The MediaSourceInterface object passed to this RTCMediaSource during
+ * construction.
+ */
+@property(nonatomic, readonly)
+ rtc::scoped_refptr<webrtc::MediaSourceInterface> nativeMediaSource;
+
+/** Initialize an RTCMediaSource from a native MediaSourceInterface. */
+- (instancetype)initWithNativeMediaSource:
+ (rtc::scoped_refptr<webrtc::MediaSourceInterface>)nativeMediaSource
+ NS_DESIGNATED_INITIALIZER;
+
++ (webrtc::MediaSourceInterface::SourceState)nativeSourceStateForState:
+ (RTCSourceState)state;
+
++ (RTCSourceState)sourceStateForNativeState:
+ (webrtc::MediaSourceInterface::SourceState)nativeState;
+
++ (NSString *)stringForState:(RTCSourceState)state;
+
+@end
+
+NS_ASSUME_NONNULL_END
diff --git a/webrtc/api/objc/RTCMediaSource.h b/webrtc/api/objc/RTCMediaSource.h
new file mode 100644
index 0000000000..0b36b8d709
--- /dev/null
+++ b/webrtc/api/objc/RTCMediaSource.h
@@ -0,0 +1,31 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <Foundation/Foundation.h>
+
+typedef NS_ENUM(NSInteger, RTCSourceState) {
+ RTCSourceStateInitializing,
+ RTCSourceStateLive,
+ RTCSourceStateEnded,
+ RTCSourceStateMuted,
+};
+
+NS_ASSUME_NONNULL_BEGIN
+
+@interface RTCMediaSource : NSObject
+
+/** The current state of the RTCMediaSource. */
+@property(nonatomic, readonly) RTCSourceState state;
+
+- (instancetype)init NS_UNAVAILABLE;
+
+@end
+
+NS_ASSUME_NONNULL_END
diff --git a/webrtc/api/objc/RTCMediaSource.mm b/webrtc/api/objc/RTCMediaSource.mm
new file mode 100644
index 0000000000..5f46ab8318
--- /dev/null
+++ b/webrtc/api/objc/RTCMediaSource.mm
@@ -0,0 +1,84 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "RTCMediaSource.h"
+
+#import "webrtc/api/objc/RTCMediaSource+Private.h"
+
+@implementation RTCMediaSource {
+ rtc::scoped_refptr<webrtc::MediaSourceInterface> _nativeMediaSource;
+}
+
+- (RTCSourceState)state {
+ return [[self class] sourceStateForNativeState:_nativeMediaSource->state()];
+}
+
+- (NSString *)description {
+ return [NSString stringWithFormat:@"RTCMediaSource:\n%@",
+ [[self class] stringForState:self.state]];
+}
+
+#pragma mark - Private
+
+- (rtc::scoped_refptr<webrtc::MediaSourceInterface>)nativeMediaSource {
+ return _nativeMediaSource;
+}
+
+- (instancetype)initWithNativeMediaSource:
+ (rtc::scoped_refptr<webrtc::MediaSourceInterface>)nativeMediaSource {
+ NSParameterAssert(nativeMediaSource);
+ if (self = [super init]) {
+ _nativeMediaSource = nativeMediaSource;
+ }
+ return self;
+}
+
++ (webrtc::MediaSourceInterface::SourceState)nativeSourceStateForState:
+ (RTCSourceState)state {
+ switch (state) {
+ case RTCSourceStateInitializing:
+ return webrtc::MediaSourceInterface::kInitializing;
+ case RTCSourceStateLive:
+ return webrtc::MediaSourceInterface::kLive;
+ case RTCSourceStateEnded:
+ return webrtc::MediaSourceInterface::kEnded;
+ case RTCSourceStateMuted:
+ return webrtc::MediaSourceInterface::kMuted;
+ }
+}
+
++ (RTCSourceState)sourceStateForNativeState:
+ (webrtc::MediaSourceInterface::SourceState)nativeState {
+ switch (nativeState) {
+ case webrtc::MediaSourceInterface::kInitializing:
+ return RTCSourceStateInitializing;
+ case webrtc::MediaSourceInterface::kLive:
+ return RTCSourceStateLive;
+ case webrtc::MediaSourceInterface::kEnded:
+ return RTCSourceStateEnded;
+ case webrtc::MediaSourceInterface::kMuted:
+ return RTCSourceStateMuted;
+ }
+}
+
++ (NSString *)stringForState:(RTCSourceState)state {
+ switch (state) {
+ case RTCSourceStateInitializing:
+ return @"Initializing";
+ case RTCSourceStateLive:
+ return @"Live";
+ case RTCSourceStateEnded:
+ return @"Ended";
+ case RTCSourceStateMuted:
+ return @"Muted";
+ }
+}
+
+@end
diff --git a/webrtc/api/objc/RTCMediaStreamTrack+Private.h b/webrtc/api/objc/RTCMediaStreamTrack+Private.h
new file mode 100644
index 0000000000..3e17e63cd3
--- /dev/null
+++ b/webrtc/api/objc/RTCMediaStreamTrack+Private.h
@@ -0,0 +1,45 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "RTCMediaStreamTrack.h"
+
+#include "talk/app/webrtc/mediastreaminterface.h"
+#include "webrtc/base/scoped_ptr.h"
+
+NS_ASSUME_NONNULL_BEGIN
+
+@interface RTCMediaStreamTrack ()
+
+/**
+ * The native MediaStreamTrackInterface representation of this
+ * RTCMediaStreamTrack object. This is needed to pass to the underlying C++
+ * APIs.
+ */
+@property(nonatomic, readonly)
+ rtc::scoped_refptr<webrtc::MediaStreamTrackInterface> nativeTrack;
+
+/**
+ * Initialize an RTCMediaStreamTrack from a native MediaStreamTrackInterface.
+ */
+- (instancetype)initWithNativeTrack:
+ (rtc::scoped_refptr<webrtc::MediaStreamTrackInterface>)nativeTrack
+ NS_DESIGNATED_INITIALIZER;
+
++ (webrtc::MediaStreamTrackInterface::TrackState)nativeTrackStateForState:
+ (RTCMediaStreamTrackState)state;
+
++ (RTCMediaStreamTrackState)trackStateForNativeState:
+ (webrtc::MediaStreamTrackInterface::TrackState)nativeState;
+
++ (NSString *)stringForState:(RTCMediaStreamTrackState)state;
+
+@end
+
+NS_ASSUME_NONNULL_END
diff --git a/webrtc/api/objc/RTCMediaStreamTrack.h b/webrtc/api/objc/RTCMediaStreamTrack.h
new file mode 100644
index 0000000000..beb48d3b6f
--- /dev/null
+++ b/webrtc/api/objc/RTCMediaStreamTrack.h
@@ -0,0 +1,47 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <Foundation/Foundation.h>
+
+/**
+ * Represents the state of the track. This exposes the same states in C++,
+ * which include two more states than are in the W3C spec.
+ */
+typedef NS_ENUM(NSInteger, RTCMediaStreamTrackState) {
+ RTCMediaStreamTrackStateInitializing,
+ RTCMediaStreamTrackStateLive,
+ RTCMediaStreamTrackStateEnded,
+ RTCMediaStreamTrackStateFailed,
+};
+
+NS_ASSUME_NONNULL_BEGIN
+
+@interface RTCMediaStreamTrack : NSObject
+
+/**
+ * The kind of track. For example, "audio" if this track represents an audio
+ * track and "video" if this track represents a video track.
+ */
+@property(nonatomic, readonly) NSString *kind;
+
+/** An identifier string. */
+@property(nonatomic, readonly) NSString *trackId;
+
+/** The enabled state of the track. */
+@property(nonatomic) BOOL isEnabled;
+
+/** The state of the track. */
+@property(nonatomic, readonly) RTCMediaStreamTrackState readyState;
+
+- (instancetype)init NS_UNAVAILABLE;
+
+@end
+
+NS_ASSUME_NONNULL_END
diff --git a/webrtc/api/objc/RTCMediaStreamTrack.mm b/webrtc/api/objc/RTCMediaStreamTrack.mm
new file mode 100644
index 0000000000..e5751b0746
--- /dev/null
+++ b/webrtc/api/objc/RTCMediaStreamTrack.mm
@@ -0,0 +1,105 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "RTCMediaStreamTrack.h"
+
+#import "webrtc/api/objc/RTCMediaStreamTrack+Private.h"
+#import "webrtc/base/objc/NSString+StdString.h"
+
+@implementation RTCMediaStreamTrack {
+ rtc::scoped_refptr<webrtc::MediaStreamTrackInterface> _nativeTrack;
+}
+
+- (NSString *)kind {
+ return [NSString stringForStdString:_nativeTrack->kind()];
+}
+
+- (NSString *)trackId {
+ return [NSString stringForStdString:_nativeTrack->id()];
+}
+
+- (BOOL)isEnabled {
+ return _nativeTrack->enabled();
+}
+
+- (void)setIsEnabled:(BOOL)isEnabled {
+ _nativeTrack->set_enabled(isEnabled);
+}
+
+- (RTCMediaStreamTrackState)readyState {
+ return [[self class] trackStateForNativeState:_nativeTrack->state()];
+}
+
+- (NSString *)description {
+ NSString *readyState = [[self class] stringForState:self.readyState];
+ return [NSString stringWithFormat:@"RTCMediaStreamTrack:\n%@\n%@\n%@\n%@",
+ self.kind,
+ self.trackId,
+ self.isEnabled ? @"enabled" : @"disabled",
+ readyState];
+}
+
+#pragma mark - Private
+
+- (rtc::scoped_refptr<webrtc::MediaStreamTrackInterface>)nativeTrack {
+ return _nativeTrack;
+}
+
+- (instancetype)initWithNativeTrack:
+ (rtc::scoped_refptr<webrtc::MediaStreamTrackInterface>)nativeTrack {
+ NSParameterAssert(nativeTrack);
+ if (self = [super init]) {
+ _nativeTrack = nativeTrack;
+ }
+ return self;
+}
+
++ (webrtc::MediaStreamTrackInterface::TrackState)nativeTrackStateForState:
+ (RTCMediaStreamTrackState)state {
+ switch (state) {
+ case RTCMediaStreamTrackStateInitializing:
+ return webrtc::MediaStreamTrackInterface::kInitializing;
+ case RTCMediaStreamTrackStateLive:
+ return webrtc::MediaStreamTrackInterface::kLive;
+ case RTCMediaStreamTrackStateEnded:
+ return webrtc::MediaStreamTrackInterface::kEnded;
+ case RTCMediaStreamTrackStateFailed:
+ return webrtc::MediaStreamTrackInterface::kFailed;
+ }
+}
+
++ (RTCMediaStreamTrackState)trackStateForNativeState:
+ (webrtc::MediaStreamTrackInterface::TrackState)nativeState {
+ switch (nativeState) {
+ case webrtc::MediaStreamTrackInterface::kInitializing:
+ return RTCMediaStreamTrackStateInitializing;
+ case webrtc::MediaStreamTrackInterface::kLive:
+ return RTCMediaStreamTrackStateLive;
+ case webrtc::MediaStreamTrackInterface::kEnded:
+ return RTCMediaStreamTrackStateEnded;
+ case webrtc::MediaStreamTrackInterface::kFailed:
+ return RTCMediaStreamTrackStateFailed;
+ }
+}
+
++ (NSString *)stringForState:(RTCMediaStreamTrackState)state {
+ switch (state) {
+ case RTCMediaStreamTrackStateInitializing:
+ return @"Initializing";
+ case RTCMediaStreamTrackStateLive:
+ return @"Live";
+ case RTCMediaStreamTrackStateEnded:
+ return @"Ended";
+ case RTCMediaStreamTrackStateFailed:
+ return @"Failed";
+ }
+}
+
+@end
diff --git a/webrtc/api/objc/RTCNSGLVideoView.h b/webrtc/api/objc/RTCNSGLVideoView.h
new file mode 100644
index 0000000000..27eb31e9af
--- /dev/null
+++ b/webrtc/api/objc/RTCNSGLVideoView.h
@@ -0,0 +1,34 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#if TARGET_OS_IPHONE
+#error "This file targets OSX."
+#endif
+
+#import <AppKit/NSOpenGLView.h>
+
+#import "RTCVideoRenderer.h"
+
+NS_ASSUME_NONNULL_BEGIN
+
+@class RTCNSGLVideoView;
+@protocol RTCNSGLVideoViewDelegate
+
+- (void)videoView:(RTCNSGLVideoView *)videoView didChangeVideoSize:(CGSize)size;
+
+@end
+
+@interface RTCNSGLVideoView : NSOpenGLView <RTCVideoRenderer>
+
+@property(nonatomic, weak) id<RTCNSGLVideoViewDelegate> delegate;
+
+@end
+
+NS_ASSUME_NONNULL_END
diff --git a/webrtc/api/objc/RTCNSGLVideoView.m b/webrtc/api/objc/RTCNSGLVideoView.m
new file mode 100644
index 0000000000..063e6f1330
--- /dev/null
+++ b/webrtc/api/objc/RTCNSGLVideoView.m
@@ -0,0 +1,141 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "RTCNSGLVideoView.h"
+
+#import <CoreVideo/CVDisplayLink.h>
+#import <OpenGL/gl3.h>
+#import "RTCVideoFrame.h"
+#import "RTCOpenGLVideoRenderer.h"
+
+@interface RTCNSGLVideoView ()
+// |videoFrame| is set when we receive a frame from a worker thread and is read
+// from the display link callback so atomicity is required.
+@property(atomic, strong) RTCVideoFrame *videoFrame;
+@property(atomic, strong) RTCOpenGLVideoRenderer *glRenderer;
+- (void)drawFrame;
+@end
+
+static CVReturn OnDisplayLinkFired(CVDisplayLinkRef displayLink,
+ const CVTimeStamp *now,
+ const CVTimeStamp *outputTime,
+ CVOptionFlags flagsIn,
+ CVOptionFlags *flagsOut,
+ void *displayLinkContext) {
+ RTCNSGLVideoView *view = (__bridge RTCNSGLVideoView *)displayLinkContext;
+ [view drawFrame];
+ return kCVReturnSuccess;
+}
+
+@implementation RTCNSGLVideoView {
+ CVDisplayLinkRef _displayLink;
+}
+
+@synthesize delegate = _delegate;
+@synthesize videoFrame = _videoFrame;
+@synthesize glRenderer = _glRenderer;
+
+- (void)dealloc {
+ [self teardownDisplayLink];
+}
+
+- (void)drawRect:(NSRect)rect {
+ [self drawFrame];
+}
+
+- (void)reshape {
+ [super reshape];
+ NSRect frame = [self frame];
+ CGLLockContext([[self openGLContext] CGLContextObj]);
+ glViewport(0, 0, frame.size.width, frame.size.height);
+ CGLUnlockContext([[self openGLContext] CGLContextObj]);
+}
+
+- (void)lockFocus {
+ NSOpenGLContext *context = [self openGLContext];
+ [super lockFocus];
+ if ([context view] != self) {
+ [context setView:self];
+ }
+ [context makeCurrentContext];
+}
+
+- (void)prepareOpenGL {
+ [super prepareOpenGL];
+ if (!self.glRenderer) {
+ self.glRenderer =
+ [[RTCOpenGLVideoRenderer alloc] initWithContext:[self openGLContext]];
+ }
+ [self.glRenderer setupGL];
+ [self setupDisplayLink];
+}
+
+- (void)clearGLContext {
+ [self.glRenderer teardownGL];
+ self.glRenderer = nil;
+ [super clearGLContext];
+}
+
+#pragma mark - RTCVideoRenderer
+
+// These methods may be called on non-main thread.
+- (void)setSize:(CGSize)size {
+ dispatch_async(dispatch_get_main_queue(), ^{
+ [self.delegate videoView:self didChangeVideoSize:size];
+ });
+}
+
+- (void)renderFrame:(RTCVideoFrame *)frame {
+ self.videoFrame = frame;
+}
+
+#pragma mark - Private
+
+- (void)drawFrame {
+ RTCVideoFrame *videoFrame = self.videoFrame;
+ if (self.glRenderer.lastDrawnFrame != videoFrame) {
+ // This method may be called from CVDisplayLink callback which isn't on the
+ // main thread so we have to lock the GL context before drawing.
+ CGLLockContext([[self openGLContext] CGLContextObj]);
+ [self.glRenderer drawFrame:videoFrame];
+ CGLUnlockContext([[self openGLContext] CGLContextObj]);
+ }
+}
+
+- (void)setupDisplayLink {
+ if (_displayLink) {
+ return;
+ }
+ // Synchronize buffer swaps with vertical refresh rate.
+ GLint swapInt = 1;
+ [[self openGLContext] setValues:&swapInt forParameter:NSOpenGLCPSwapInterval];
+
+ // Create display link.
+ CVDisplayLinkCreateWithActiveCGDisplays(&_displayLink);
+ CVDisplayLinkSetOutputCallback(_displayLink,
+ &OnDisplayLinkFired,
+ (__bridge void *)self);
+ // Set the display link for the current renderer.
+ CGLContextObj cglContext = [[self openGLContext] CGLContextObj];
+ CGLPixelFormatObj cglPixelFormat = [[self pixelFormat] CGLPixelFormatObj];
+ CVDisplayLinkSetCurrentCGDisplayFromOpenGLContext(
+ _displayLink, cglContext, cglPixelFormat);
+ CVDisplayLinkStart(_displayLink);
+}
+
+- (void)teardownDisplayLink {
+ if (!_displayLink) {
+ return;
+ }
+ CVDisplayLinkRelease(_displayLink);
+ _displayLink = NULL;
+}
+
+@end
diff --git a/webrtc/api/objc/RTCOpenGLVideoRenderer.h b/webrtc/api/objc/RTCOpenGLVideoRenderer.h
new file mode 100644
index 0000000000..729839c6a3
--- /dev/null
+++ b/webrtc/api/objc/RTCOpenGLVideoRenderer.h
@@ -0,0 +1,58 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <Foundation/Foundation.h>
+#if TARGET_OS_IPHONE
+#import <GLKit/GLKit.h>
+#else
+#import <AppKit/NSOpenGL.h>
+#endif
+
+NS_ASSUME_NONNULL_BEGIN
+
+@class RTCVideoFrame;
+
+// RTCOpenGLVideoRenderer issues appropriate OpenGL commands to draw a frame to
+// the currently bound framebuffer. Supports OpenGL 3.2 and OpenGLES 2.0. OpenGL
+// framebuffer creation and management should be handled elsewhere using the
+// same context used to initialize this class.
+@interface RTCOpenGLVideoRenderer : NSObject
+
+// The last successfully drawn frame. Used to avoid drawing frames unnecessarily
+// hence saving battery life by reducing load.
+@property(nonatomic, readonly) RTCVideoFrame *lastDrawnFrame;
+
+#if TARGET_OS_IPHONE
+- (instancetype)initWithContext:(EAGLContext *)context
+ NS_DESIGNATED_INITIALIZER;
+#else
+- (instancetype)initWithContext:(NSOpenGLContext *)context
+ NS_DESIGNATED_INITIALIZER;
+#endif
+
+// Draws |frame| onto the currently bound OpenGL framebuffer. |setupGL| must be
+// called before this function will succeed.
+- (BOOL)drawFrame:(RTCVideoFrame *)frame;
+
+// The following methods are used to manage OpenGL resources. On iOS
+// applications should release resources when placed in background for use in
+// the foreground application. In fact, attempting to call OpenGLES commands
+// while in background will result in application termination.
+
+// Sets up the OpenGL state needed for rendering.
+- (void)setupGL;
+// Tears down the OpenGL state created by |setupGL|.
+- (void)teardownGL;
+
+- (instancetype)init NS_UNAVAILABLE;
+
+@end
+
+NS_ASSUME_NONNULL_END
diff --git a/webrtc/api/objc/RTCOpenGLVideoRenderer.mm b/webrtc/api/objc/RTCOpenGLVideoRenderer.mm
new file mode 100644
index 0000000000..56a6431ffa
--- /dev/null
+++ b/webrtc/api/objc/RTCOpenGLVideoRenderer.mm
@@ -0,0 +1,485 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "RTCOpenGLVideoRenderer.h"
+
+#include <string.h>
+
+#include "webrtc/base/scoped_ptr.h"
+
+#if TARGET_OS_IPHONE
+#import <OpenGLES/ES3/gl.h>
+#else
+#import <OpenGL/gl3.h>
+#endif
+
+#import "RTCVideoFrame.h"
+
+// TODO(tkchin): check and log openGL errors. Methods here return BOOLs in
+// anticipation of that happening in the future.
+
+#if TARGET_OS_IPHONE
+#define RTC_PIXEL_FORMAT GL_LUMINANCE
+#define SHADER_VERSION
+#define VERTEX_SHADER_IN "attribute"
+#define VERTEX_SHADER_OUT "varying"
+#define FRAGMENT_SHADER_IN "varying"
+#define FRAGMENT_SHADER_OUT
+#define FRAGMENT_SHADER_COLOR "gl_FragColor"
+#define FRAGMENT_SHADER_TEXTURE "texture2D"
+#else
+#define RTC_PIXEL_FORMAT GL_RED
+#define SHADER_VERSION "#version 150\n"
+#define VERTEX_SHADER_IN "in"
+#define VERTEX_SHADER_OUT "out"
+#define FRAGMENT_SHADER_IN "in"
+#define FRAGMENT_SHADER_OUT "out vec4 fragColor;\n"
+#define FRAGMENT_SHADER_COLOR "fragColor"
+#define FRAGMENT_SHADER_TEXTURE "texture"
+#endif
+
+// Vertex shader doesn't do anything except pass coordinates through.
+static const char kVertexShaderSource[] =
+ SHADER_VERSION
+ VERTEX_SHADER_IN " vec2 position;\n"
+ VERTEX_SHADER_IN " vec2 texcoord;\n"
+ VERTEX_SHADER_OUT " vec2 v_texcoord;\n"
+ "void main() {\n"
+ " gl_Position = vec4(position.x, position.y, 0.0, 1.0);\n"
+ " v_texcoord = texcoord;\n"
+ "}\n";
+
+// Fragment shader converts YUV values from input textures into a final RGB
+// pixel. The conversion formula is from http://www.fourcc.org/fccyvrgb.php.
+static const char kFragmentShaderSource[] =
+ SHADER_VERSION
+ "precision highp float;"
+ FRAGMENT_SHADER_IN " vec2 v_texcoord;\n"
+ "uniform lowp sampler2D s_textureY;\n"
+ "uniform lowp sampler2D s_textureU;\n"
+ "uniform lowp sampler2D s_textureV;\n"
+ FRAGMENT_SHADER_OUT
+ "void main() {\n"
+ " float y, u, v, r, g, b;\n"
+ " y = " FRAGMENT_SHADER_TEXTURE "(s_textureY, v_texcoord).r;\n"
+ " u = " FRAGMENT_SHADER_TEXTURE "(s_textureU, v_texcoord).r;\n"
+ " v = " FRAGMENT_SHADER_TEXTURE "(s_textureV, v_texcoord).r;\n"
+ " u = u - 0.5;\n"
+ " v = v - 0.5;\n"
+ " r = y + 1.403 * v;\n"
+ " g = y - 0.344 * u - 0.714 * v;\n"
+ " b = y + 1.770 * u;\n"
+ " " FRAGMENT_SHADER_COLOR " = vec4(r, g, b, 1.0);\n"
+ " }\n";
+
+// Compiles a shader of the given |type| with GLSL source |source| and returns
+// the shader handle or 0 on error.
+GLuint CreateShader(GLenum type, const GLchar *source) {
+ GLuint shader = glCreateShader(type);
+ if (!shader) {
+ return 0;
+ }
+ glShaderSource(shader, 1, &source, NULL);
+ glCompileShader(shader);
+ GLint compileStatus = GL_FALSE;
+ glGetShaderiv(shader, GL_COMPILE_STATUS, &compileStatus);
+ if (compileStatus == GL_FALSE) {
+ glDeleteShader(shader);
+ shader = 0;
+ }
+ return shader;
+}
+
+// Links a shader program with the given vertex and fragment shaders and
+// returns the program handle or 0 on error.
+GLuint CreateProgram(GLuint vertexShader, GLuint fragmentShader) {
+ if (vertexShader == 0 || fragmentShader == 0) {
+ return 0;
+ }
+ GLuint program = glCreateProgram();
+ if (!program) {
+ return 0;
+ }
+ glAttachShader(program, vertexShader);
+ glAttachShader(program, fragmentShader);
+ glLinkProgram(program);
+ GLint linkStatus = GL_FALSE;
+ glGetProgramiv(program, GL_LINK_STATUS, &linkStatus);
+ if (linkStatus == GL_FALSE) {
+ glDeleteProgram(program);
+ program = 0;
+ }
+ return program;
+}
+
+// When modelview and projection matrices are identity (default) the world is
+// contained in the square around origin with unit size 2. Drawing to these
+// coordinates is equivalent to drawing to the entire screen. The texture is
+// stretched over that square using texture coordinates (u, v) that range
+// from (0, 0) to (1, 1) inclusive. Texture coordinates are flipped vertically
+// here because the incoming frame has origin in upper left hand corner but
+// OpenGL expects origin in bottom left corner.
+const GLfloat gVertices[] = {
+ // X, Y, U, V.
+ -1, -1, 0, 1, // Bottom left.
+ 1, -1, 1, 1, // Bottom right.
+ 1, 1, 1, 0, // Top right.
+ -1, 1, 0, 0, // Top left.
+};
+
+// |kNumTextures| must not exceed 8, which is the limit in OpenGLES2. Two sets
+// of 3 textures are used here, one for each of the Y, U and V planes. Having
+// two sets alleviates CPU blockage in the event that the GPU is asked to render
+// to a texture that is already in use.
+static const GLsizei kNumTextureSets = 2;
+static const GLsizei kNumTextures = 3 * kNumTextureSets;
+
+@implementation RTCOpenGLVideoRenderer {
+#if TARGET_OS_IPHONE
+ EAGLContext *_context;
+#else
+ NSOpenGLContext *_context;
+#endif
+ BOOL _isInitialized;
+ NSUInteger _currentTextureSet;
+ // Handles for OpenGL constructs.
+ GLuint _textures[kNumTextures];
+ GLuint _program;
+#if !TARGET_OS_IPHONE
+ GLuint _vertexArray;
+#endif
+ GLuint _vertexBuffer;
+ GLint _position;
+ GLint _texcoord;
+ GLint _ySampler;
+ GLint _uSampler;
+ GLint _vSampler;
+ // Used to create a non-padded plane for GPU upload when we receive padded
+ // frames.
+ rtc::scoped_ptr<uint8_t[]> _planeBuffer;
+}
+
+@synthesize lastDrawnFrame = _lastDrawnFrame;
+
++ (void)initialize {
+ // Disable dithering for performance.
+ glDisable(GL_DITHER);
+}
+
+#if TARGET_OS_IPHONE
+- (instancetype)initWithContext:(EAGLContext *)context {
+#else
+- (instancetype)initWithContext:(NSOpenGLContext *)context {
+#endif
+ NSAssert(context != nil, @"context cannot be nil");
+ if (self = [super init]) {
+ _context = context;
+ }
+ return self;
+}
+
+- (BOOL)drawFrame:(RTCVideoFrame *)frame {
+ if (!_isInitialized) {
+ return NO;
+ }
+ if (_lastDrawnFrame == frame) {
+ return NO;
+ }
+ [self ensureGLContext];
+ glClear(GL_COLOR_BUFFER_BIT);
+ if (frame) {
+ if (![self updateTextureSizesForFrame:frame] ||
+ ![self updateTextureDataForFrame:frame]) {
+ return NO;
+ }
+#if !TARGET_OS_IPHONE
+ glBindVertexArray(_vertexArray);
+#endif
+ glBindBuffer(GL_ARRAY_BUFFER, _vertexBuffer);
+ glDrawArrays(GL_TRIANGLE_FAN, 0, 4);
+ }
+#if !TARGET_OS_IPHONE
+ [_context flushBuffer];
+#endif
+ _lastDrawnFrame = frame;
+ return YES;
+}
+
+- (void)setupGL {
+ if (_isInitialized) {
+ return;
+ }
+ [self ensureGLContext];
+ if (![self setupProgram]) {
+ return;
+ }
+ if (![self setupTextures]) {
+ return;
+ }
+ if (![self setupVertices]) {
+ return;
+ }
+ glUseProgram(_program);
+ glPixelStorei(GL_UNPACK_ALIGNMENT, 1);
+ _isInitialized = YES;
+}
+
+- (void)teardownGL {
+ if (!_isInitialized) {
+ return;
+ }
+ [self ensureGLContext];
+ glDeleteProgram(_program);
+ _program = 0;
+ glDeleteTextures(kNumTextures, _textures);
+ glDeleteBuffers(1, &_vertexBuffer);
+ _vertexBuffer = 0;
+#if !TARGET_OS_IPHONE
+ glDeleteVertexArrays(1, &_vertexArray);
+#endif
+ _isInitialized = NO;
+}
+
+#pragma mark - Private
+
+- (void)ensureGLContext {
+ NSAssert(_context, @"context shouldn't be nil");
+#if TARGET_OS_IPHONE
+ if ([EAGLContext currentContext] != _context) {
+ [EAGLContext setCurrentContext:_context];
+ }
+#else
+ if ([NSOpenGLContext currentContext] != _context) {
+ [_context makeCurrentContext];
+ }
+#endif
+}
+
+- (BOOL)setupProgram {
+ NSAssert(!_program, @"program already set up");
+ GLuint vertexShader = CreateShader(GL_VERTEX_SHADER, kVertexShaderSource);
+ NSAssert(vertexShader, @"failed to create vertex shader");
+ GLuint fragmentShader =
+ CreateShader(GL_FRAGMENT_SHADER, kFragmentShaderSource);
+ NSAssert(fragmentShader, @"failed to create fragment shader");
+ _program = CreateProgram(vertexShader, fragmentShader);
+ // Shaders are created only to generate program.
+ if (vertexShader) {
+ glDeleteShader(vertexShader);
+ }
+ if (fragmentShader) {
+ glDeleteShader(fragmentShader);
+ }
+ if (!_program) {
+ return NO;
+ }
+ _position = glGetAttribLocation(_program, "position");
+ _texcoord = glGetAttribLocation(_program, "texcoord");
+ _ySampler = glGetUniformLocation(_program, "s_textureY");
+ _uSampler = glGetUniformLocation(_program, "s_textureU");
+ _vSampler = glGetUniformLocation(_program, "s_textureV");
+ if (_position < 0 || _texcoord < 0 || _ySampler < 0 || _uSampler < 0 ||
+ _vSampler < 0) {
+ return NO;
+ }
+ return YES;
+}
+
+- (BOOL)setupTextures {
+ glGenTextures(kNumTextures, _textures);
+ // Set parameters for each of the textures we created.
+ for (GLsizei i = 0; i < kNumTextures; i++) {
+ glActiveTexture(GL_TEXTURE0 + i);
+ glBindTexture(GL_TEXTURE_2D, _textures[i]);
+ glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
+ glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
+ glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
+ glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
+ }
+ return YES;
+}
+
+- (BOOL)updateTextureSizesForFrame:(RTCVideoFrame *)frame {
+ if (frame.height == _lastDrawnFrame.height &&
+ frame.width == _lastDrawnFrame.width &&
+ frame.chromaWidth == _lastDrawnFrame.chromaWidth &&
+ frame.chromaHeight == _lastDrawnFrame.chromaHeight) {
+ return YES;
+ }
+ GLsizei lumaWidth = frame.width;
+ GLsizei lumaHeight = frame.height;
+ GLsizei chromaWidth = frame.chromaWidth;
+ GLsizei chromaHeight = frame.chromaHeight;
+ for (GLint i = 0; i < kNumTextureSets; i++) {
+ glActiveTexture(GL_TEXTURE0 + i * 3);
+ glTexImage2D(GL_TEXTURE_2D,
+ 0,
+ RTC_PIXEL_FORMAT,
+ lumaWidth,
+ lumaHeight,
+ 0,
+ RTC_PIXEL_FORMAT,
+ GL_UNSIGNED_BYTE,
+ 0);
+ glActiveTexture(GL_TEXTURE0 + i * 3 + 1);
+ glTexImage2D(GL_TEXTURE_2D,
+ 0,
+ RTC_PIXEL_FORMAT,
+ chromaWidth,
+ chromaHeight,
+ 0,
+ RTC_PIXEL_FORMAT,
+ GL_UNSIGNED_BYTE,
+ 0);
+ glActiveTexture(GL_TEXTURE0 + i * 3 + 2);
+ glTexImage2D(GL_TEXTURE_2D,
+ 0,
+ RTC_PIXEL_FORMAT,
+ chromaWidth,
+ chromaHeight,
+ 0,
+ RTC_PIXEL_FORMAT,
+ GL_UNSIGNED_BYTE,
+ 0);
+ }
+ if ((NSUInteger)frame.yPitch != frame.width ||
+ (NSUInteger)frame.uPitch != frame.chromaWidth ||
+ (NSUInteger)frame.vPitch != frame.chromaWidth) {
+ _planeBuffer.reset(new uint8_t[frame.width * frame.height]);
+ } else {
+ _planeBuffer.reset();
+ }
+ return YES;
+}
+
+- (void)uploadPlane:(const uint8_t *)plane
+ sampler:(GLint)sampler
+ offset:(NSUInteger)offset
+ width:(size_t)width
+ height:(size_t)height
+ stride:(int32_t)stride {
+ glActiveTexture(GL_TEXTURE0 + offset);
+ // When setting texture sampler uniforms, the texture index is used not
+ // the texture handle.
+ glUniform1i(sampler, offset);
+#if TARGET_OS_IPHONE
+ BOOL hasUnpackRowLength = _context.API == kEAGLRenderingAPIOpenGLES3;
+#else
+ BOOL hasUnpackRowLength = YES;
+#endif
+ const uint8_t *uploadPlane = plane;
+ if ((size_t)stride != width) {
+ if (hasUnpackRowLength) {
+ // GLES3 allows us to specify stride.
+ glPixelStorei(GL_UNPACK_ROW_LENGTH, stride);
+ glTexImage2D(GL_TEXTURE_2D,
+ 0,
+ RTC_PIXEL_FORMAT,
+ width,
+ height,
+ 0,
+ RTC_PIXEL_FORMAT,
+ GL_UNSIGNED_BYTE,
+ uploadPlane);
+ glPixelStorei(GL_UNPACK_ROW_LENGTH, 0);
+ return;
+ } else {
+ // Make an unpadded copy and upload that instead. Quick profiling showed
+ // that this is faster than uploading row by row using glTexSubImage2D.
+ uint8_t *unpaddedPlane = _planeBuffer.get();
+ for (size_t y = 0; y < height; ++y) {
+ memcpy(unpaddedPlane + y * width, plane + y * stride, width);
+ }
+ uploadPlane = unpaddedPlane;
+ }
+ }
+ glTexImage2D(GL_TEXTURE_2D,
+ 0,
+ RTC_PIXEL_FORMAT,
+ width,
+ height,
+ 0,
+ RTC_PIXEL_FORMAT,
+ GL_UNSIGNED_BYTE,
+ uploadPlane);
+}
+
+- (BOOL)updateTextureDataForFrame:(RTCVideoFrame *)frame {
+ NSUInteger textureOffset = _currentTextureSet * 3;
+ NSAssert(textureOffset + 3 <= kNumTextures, @"invalid offset");
+
+ [self uploadPlane:frame.yPlane
+ sampler:_ySampler
+ offset:textureOffset
+ width:frame.width
+ height:frame.height
+ stride:frame.yPitch];
+
+ [self uploadPlane:frame.uPlane
+ sampler:_uSampler
+ offset:textureOffset + 1
+ width:frame.chromaWidth
+ height:frame.chromaHeight
+ stride:frame.uPitch];
+
+ [self uploadPlane:frame.vPlane
+ sampler:_vSampler
+ offset:textureOffset + 2
+ width:frame.chromaWidth
+ height:frame.chromaHeight
+ stride:frame.vPitch];
+
+ _currentTextureSet = (_currentTextureSet + 1) % kNumTextureSets;
+ return YES;
+}
+
+- (BOOL)setupVertices {
+#if !TARGET_OS_IPHONE
+ NSAssert(!_vertexArray, @"vertex array already set up");
+ glGenVertexArrays(1, &_vertexArray);
+ if (!_vertexArray) {
+ return NO;
+ }
+ glBindVertexArray(_vertexArray);
+#endif
+ NSAssert(!_vertexBuffer, @"vertex buffer already set up");
+ glGenBuffers(1, &_vertexBuffer);
+ if (!_vertexBuffer) {
+#if !TARGET_OS_IPHONE
+ glDeleteVertexArrays(1, &_vertexArray);
+ _vertexArray = 0;
+#endif
+ return NO;
+ }
+ glBindBuffer(GL_ARRAY_BUFFER, _vertexBuffer);
+ glBufferData(GL_ARRAY_BUFFER, sizeof(gVertices), gVertices, GL_DYNAMIC_DRAW);
+
+ // Read position attribute from |gVertices| with size of 2 and stride of 4
+ // beginning at the start of the array. The last argument indicates offset
+ // of data within |gVertices| as supplied to the vertex buffer.
+ glVertexAttribPointer(
+ _position, 2, GL_FLOAT, GL_FALSE, 4 * sizeof(GLfloat), (void *)0);
+ glEnableVertexAttribArray(_position);
+
+ // Read texcoord attribute from |gVertices| with size of 2 and stride of 4
+ // beginning at the first texcoord in the array. The last argument indicates
+ // offset of data within |gVertices| as supplied to the vertex buffer.
+ glVertexAttribPointer(_texcoord,
+ 2,
+ GL_FLOAT,
+ GL_FALSE,
+ 4 * sizeof(GLfloat),
+ (void *)(2 * sizeof(GLfloat)));
+ glEnableVertexAttribArray(_texcoord);
+
+ return YES;
+}
+
+@end
diff --git a/webrtc/api/objc/RTCSessionDescription+Private.h b/webrtc/api/objc/RTCSessionDescription+Private.h
new file mode 100644
index 0000000000..aa0314d3f3
--- /dev/null
+++ b/webrtc/api/objc/RTCSessionDescription+Private.h
@@ -0,0 +1,41 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "RTCSessionDescription.h"
+
+#include "talk/app/webrtc/jsep.h"
+
+NS_ASSUME_NONNULL_BEGIN
+
+@interface RTCSessionDescription ()
+
+/**
+ * The native SessionDescriptionInterface representation of this
+ * RTCSessionDescription object. This is needed to pass to the underlying C++
+ * APIs.
+ */
+@property(nonatomic, readonly)
+ webrtc::SessionDescriptionInterface *nativeDescription;
+
+/**
+ * Initialize an RTCSessionDescription from a native
+ * SessionDescriptionInterface. No ownership is taken of the native session
+ * description.
+ */
+- (instancetype)initWithNativeDescription:
+ (webrtc::SessionDescriptionInterface *)nativeDescription;
+
++ (std::string)stringForType:(RTCSdpType)type;
+
++ (RTCSdpType)typeForString:(const std::string &)string;
+
+@end
+
+NS_ASSUME_NONNULL_END
diff --git a/webrtc/api/objc/RTCSessionDescription.h b/webrtc/api/objc/RTCSessionDescription.h
new file mode 100644
index 0000000000..5f00b1c9f4
--- /dev/null
+++ b/webrtc/api/objc/RTCSessionDescription.h
@@ -0,0 +1,41 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <Foundation/Foundation.h>
+
+/**
+ * Represents the session description type. This exposes the same types that are
+ * in C++, which doesn't include the rollback type that is in the W3C spec.
+ */
+typedef NS_ENUM(NSInteger, RTCSdpType) {
+ RTCSdpTypeOffer,
+ RTCSdpTypePrAnswer,
+ RTCSdpTypeAnswer,
+};
+
+NS_ASSUME_NONNULL_BEGIN
+
+@interface RTCSessionDescription : NSObject
+
+/** The type of session description. */
+@property(nonatomic, readonly) RTCSdpType type;
+
+/** The SDP string representation of this session description. */
+@property(nonatomic, readonly) NSString *sdp;
+
+- (instancetype)init NS_UNAVAILABLE;
+
+/** Initialize a session description with a type and SDP string. */
+- (instancetype)initWithType:(RTCSdpType)type sdp:(NSString *)sdp
+ NS_DESIGNATED_INITIALIZER;
+
+@end
+
+NS_ASSUME_NONNULL_END
diff --git a/webrtc/api/objc/RTCSessionDescription.mm b/webrtc/api/objc/RTCSessionDescription.mm
new file mode 100644
index 0000000000..7ed0760158
--- /dev/null
+++ b/webrtc/api/objc/RTCSessionDescription.mm
@@ -0,0 +1,92 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "RTCSessionDescription.h"
+
+#include "webrtc/base/checks.h"
+
+#import "webrtc/api/objc/RTCSessionDescription+Private.h"
+#import "webrtc/base/objc/NSString+StdString.h"
+#import "webrtc/base/objc/RTCLogging.h"
+
+@implementation RTCSessionDescription
+
+@synthesize type = _type;
+@synthesize sdp = _sdp;
+
+- (instancetype)initWithType:(RTCSdpType)type sdp:(NSString *)sdp {
+ NSParameterAssert(sdp.length);
+ if (self = [super init]) {
+ _type = type;
+ _sdp = [sdp copy];
+ }
+ return self;
+}
+
+- (NSString *)description {
+ return [NSString stringWithFormat:@"RTCSessionDescription:\n%s\n%@",
+ [[self class] stringForType:_type].c_str(),
+ _sdp];
+}
+
+#pragma mark - Private
+
+- (webrtc::SessionDescriptionInterface *)nativeDescription {
+ webrtc::SdpParseError error;
+
+ webrtc::SessionDescriptionInterface *description =
+ webrtc::CreateSessionDescription([[self class] stringForType:_type],
+ _sdp.stdString,
+ &error);
+
+ if (!description) {
+ RTCLogError(@"Failed to create session description: %s\nline: %s",
+ error.description.c_str(),
+ error.line.c_str());
+ }
+
+ return description;
+}
+
+- (instancetype)initWithNativeDescription:
+ (webrtc::SessionDescriptionInterface *)nativeDescription {
+ NSParameterAssert(nativeDescription);
+ std::string sdp;
+ nativeDescription->ToString(&sdp);
+ RTCSdpType type = [[self class] typeForString:nativeDescription->type()];
+
+ return [self initWithType:type
+ sdp:[NSString stringForStdString:sdp]];
+}
+
++ (std::string)stringForType:(RTCSdpType)type {
+ switch (type) {
+ case RTCSdpTypeOffer:
+ return webrtc::SessionDescriptionInterface::kOffer;
+ case RTCSdpTypePrAnswer:
+ return webrtc::SessionDescriptionInterface::kPrAnswer;
+ case RTCSdpTypeAnswer:
+ return webrtc::SessionDescriptionInterface::kAnswer;
+ }
+}
+
++ (RTCSdpType)typeForString:(const std::string &)string {
+ if (string == webrtc::SessionDescriptionInterface::kOffer) {
+ return RTCSdpTypeOffer;
+ } else if (string == webrtc::SessionDescriptionInterface::kPrAnswer) {
+ return RTCSdpTypePrAnswer;
+ } else if (string == webrtc::SessionDescriptionInterface::kAnswer) {
+ return RTCSdpTypeAnswer;
+ } else {
+ RTC_NOTREACHED();
+ }
+}
+
+@end
diff --git a/webrtc/api/objc/RTCStatsReport+Private.h b/webrtc/api/objc/RTCStatsReport+Private.h
new file mode 100644
index 0000000000..5b7dc32a74
--- /dev/null
+++ b/webrtc/api/objc/RTCStatsReport+Private.h
@@ -0,0 +1,24 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "RTCStatsReport.h"
+
+#include "talk/app/webrtc/statstypes.h"
+
+NS_ASSUME_NONNULL_BEGIN
+
+@interface RTCStatsReport ()
+
+/** Initialize an RTCStatsReport object from a native StatsReport. */
+- (instancetype)initWithNativeReport:(const webrtc::StatsReport &)nativeReport;
+
+@end
+
+NS_ASSUME_NONNULL_END
diff --git a/webrtc/api/objc/RTCStatsReport.h b/webrtc/api/objc/RTCStatsReport.h
new file mode 100644
index 0000000000..fc66faf2cf
--- /dev/null
+++ b/webrtc/api/objc/RTCStatsReport.h
@@ -0,0 +1,34 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <Foundation/Foundation.h>
+
+NS_ASSUME_NONNULL_BEGIN
+
+/** This does not currently conform to the spec. */
+@interface RTCStatsReport : NSObject
+
+/** Time since 1970-01-01T00:00:00Z in milliseconds. */
+@property(nonatomic, readonly) CFTimeInterval timestamp;
+
+/** The type of stats held by this object. */
+@property(nonatomic, readonly) NSString *type;
+
+/** The identifier for this object. */
+@property(nonatomic, readonly) NSString *statsId;
+
+/** A dictionary holding the actual stats. */
+@property(nonatomic, readonly) NSDictionary<NSString *, NSString *> *values;
+
+- (instancetype)init NS_UNAVAILABLE;
+
+@end
+
+NS_ASSUME_NONNULL_END
diff --git a/webrtc/api/objc/RTCStatsReport.mm b/webrtc/api/objc/RTCStatsReport.mm
new file mode 100644
index 0000000000..35a5229014
--- /dev/null
+++ b/webrtc/api/objc/RTCStatsReport.mm
@@ -0,0 +1,62 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "RTCStatsReport.h"
+
+#include "webrtc/base/checks.h"
+
+#import "webrtc/api/objc/RTCStatsReport+Private.h"
+#import "webrtc/base/objc/NSString+StdString.h"
+#import "webrtc/base/objc/RTCLogging.h"
+
+@implementation RTCStatsReport
+
+@synthesize timestamp = _timestamp;
+@synthesize type = _type;
+@synthesize statsId = _statsId;
+@synthesize values = _values;
+
+- (NSString *)description {
+ return [NSString stringWithFormat:@"RTCStatsReport:\n%@\n%@\n%f\n%@",
+ _statsId,
+ _type,
+ _timestamp,
+ _values];
+}
+
+#pragma mark - Private
+
+- (instancetype)initWithNativeReport:(const webrtc::StatsReport &)nativeReport {
+ if (self = [super init]) {
+ _timestamp = nativeReport.timestamp();
+ _type = [NSString stringForStdString:nativeReport.TypeToString()];
+ _statsId = [NSString stringForStdString:
+ nativeReport.id()->ToString()];
+
+ NSUInteger capacity = nativeReport.values().size();
+ NSMutableDictionary *values =
+ [NSMutableDictionary dictionaryWithCapacity:capacity];
+ for (auto const &valuePair : nativeReport.values()) {
+ NSString *key = [NSString stringForStdString:
+ valuePair.second->display_name()];
+ NSString *value = [NSString stringForStdString:
+ valuePair.second->ToString()];
+
+ // Not expecting duplicate keys.
+ RTC_DCHECK(values[key]);
+
+ values[key] = value;
+ }
+ _values = values;
+ }
+ return self;
+}
+
+@end
diff --git a/webrtc/api/objc/RTCVideoFrame+Private.h b/webrtc/api/objc/RTCVideoFrame+Private.h
new file mode 100644
index 0000000000..954344aee1
--- /dev/null
+++ b/webrtc/api/objc/RTCVideoFrame+Private.h
@@ -0,0 +1,24 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "RTCVideoFrame.h"
+
+#include "talk/media/base/videoframe.h"
+
+NS_ASSUME_NONNULL_BEGIN
+
+@interface RTCVideoFrame ()
+
+- (instancetype)initWithNativeFrame:(const cricket::VideoFrame *)nativeFrame
+ NS_DESIGNATED_INITIALIZER;
+
+@end
+
+NS_ASSUME_NONNULL_END
diff --git a/webrtc/api/objc/RTCVideoFrame.h b/webrtc/api/objc/RTCVideoFrame.h
new file mode 100644
index 0000000000..8ed23ba82c
--- /dev/null
+++ b/webrtc/api/objc/RTCVideoFrame.h
@@ -0,0 +1,37 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <Foundation/Foundation.h>
+
+NS_ASSUME_NONNULL_BEGIN
+
+@interface RTCVideoFrame : NSObject
+
+/** Width without rotation applied. */
+@property(nonatomic, readonly) size_t width;
+
+/** Height without rotation applied. */
+@property(nonatomic, readonly) size_t height;
+@property(nonatomic, readonly) size_t chromaWidth;
+@property(nonatomic, readonly) size_t chromaHeight;
+@property(nonatomic, readonly) size_t chromaSize;
+// These can return NULL if the object is not backed by a buffer.
+@property(nonatomic, readonly, nullable) const uint8_t *yPlane;
+@property(nonatomic, readonly, nullable) const uint8_t *uPlane;
+@property(nonatomic, readonly, nullable) const uint8_t *vPlane;
+@property(nonatomic, readonly) int32_t yPitch;
+@property(nonatomic, readonly) int32_t uPitch;
+@property(nonatomic, readonly) int32_t vPitch;
+
+- (instancetype)init NS_UNAVAILABLE;
+
+@end
+
+NS_ASSUME_NONNULL_END
diff --git a/webrtc/api/objc/RTCVideoFrame.mm b/webrtc/api/objc/RTCVideoFrame.mm
new file mode 100644
index 0000000000..db2d07ba31
--- /dev/null
+++ b/webrtc/api/objc/RTCVideoFrame.mm
@@ -0,0 +1,79 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "RTCVideoFrame.h"
+
+#include "webrtc/base/scoped_ptr.h"
+
+#import "webrtc/api/objc/RTCVideoFrame+Private.h"
+
+@implementation RTCVideoFrame {
+ rtc::scoped_ptr<cricket::VideoFrame> _videoFrame;
+}
+
+- (size_t)width {
+ return _videoFrame->GetWidth();
+}
+
+- (size_t)height {
+ return _videoFrame->GetHeight();
+}
+
+- (size_t)chromaWidth {
+ return _videoFrame->GetChromaWidth();
+}
+
+- (size_t)chromaHeight {
+ return _videoFrame->GetChromaHeight();
+}
+
+- (size_t)chromaSize {
+ return _videoFrame->GetChromaSize();
+}
+
+- (const uint8_t *)yPlane {
+ const cricket::VideoFrame *const_frame = _videoFrame.get();
+ return const_frame->GetYPlane();
+}
+
+- (const uint8_t *)uPlane {
+ const cricket::VideoFrame *const_frame = _videoFrame.get();
+ return const_frame->GetUPlane();
+}
+
+- (const uint8_t *)vPlane {
+ const cricket::VideoFrame *const_frame = _videoFrame.get();
+ return const_frame->GetVPlane();
+}
+
+- (int32_t)yPitch {
+ return _videoFrame->GetYPitch();
+}
+
+- (int32_t)uPitch {
+ return _videoFrame->GetUPitch();
+}
+
+- (int32_t)vPitch {
+ return _videoFrame->GetVPitch();
+}
+
+#pragma mark - Private
+
+- (instancetype)initWithNativeFrame:(const cricket::VideoFrame *)nativeFrame {
+ if (self = [super init]) {
+ // Keep a shallow copy of the video frame. The underlying frame buffer is
+ // not copied.
+ _videoFrame.reset(nativeFrame->Copy());
+ }
+ return self;
+}
+
+@end
diff --git a/webrtc/api/objc/RTCVideoRenderer.h b/webrtc/api/objc/RTCVideoRenderer.h
new file mode 100644
index 0000000000..a97456275a
--- /dev/null
+++ b/webrtc/api/objc/RTCVideoRenderer.h
@@ -0,0 +1,30 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <Foundation/Foundation.h>
+#if TARGET_OS_IPHONE
+#import <UIKit/UIKit.h>
+#endif
+
+NS_ASSUME_NONNULL_BEGIN
+
+@class RTCVideoFrame;
+
+@protocol RTCVideoRenderer <NSObject>
+
+/** The size of the frame. */
+- (void)setSize:(CGSize)size;
+
+/** The frame to be displayed. */
+- (void)renderFrame:(RTCVideoFrame *)frame;
+
+@end
+
+NS_ASSUME_NONNULL_END
diff --git a/webrtc/api/objc/WebRTC-Prefix.pch b/webrtc/api/objc/WebRTC-Prefix.pch
new file mode 100644
index 0000000000..990b1602da
--- /dev/null
+++ b/webrtc/api/objc/WebRTC-Prefix.pch
@@ -0,0 +1,13 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#if !defined(__has_feature) || !__has_feature(objc_arc)
+#error "This file requires ARC support."
+#endif
diff --git a/webrtc/api/objctests/RTCIceCandidateTest.mm b/webrtc/api/objctests/RTCIceCandidateTest.mm
new file mode 100644
index 0000000000..391db44ae1
--- /dev/null
+++ b/webrtc/api/objctests/RTCIceCandidateTest.mm
@@ -0,0 +1,74 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <Foundation/Foundation.h>
+
+#include "webrtc/base/gunit.h"
+
+#import "webrtc/api/objc/RTCIceCandidate.h"
+#import "webrtc/api/objc/RTCIceCandidate+Private.h"
+#import "webrtc/base/objc/NSString+StdString.h"
+
+@interface RTCIceCandidateTest : NSObject
+- (void)testCandidate;
+- (void)testInitFromNativeCandidate;
+@end
+
+@implementation RTCIceCandidateTest
+
+- (void)testCandidate {
+ NSString *sdp = @"candidate:4025901590 1 udp 2122265343 "
+ "fdff:2642:12a6:fe38:c001:beda:fcf9:51aa "
+ "59052 typ host generation 0";
+
+ RTCIceCandidate *candidate = [[RTCIceCandidate alloc] initWithSdp:sdp
+ sdpMLineIndex:0
+ sdpMid:@"audio"];
+
+ rtc::scoped_ptr<webrtc::IceCandidateInterface> nativeCandidate =
+ candidate.nativeCandidate;
+ EXPECT_EQ("audio", nativeCandidate->sdp_mid());
+ EXPECT_EQ(0, nativeCandidate->sdp_mline_index());
+
+ std::string sdpString;
+ nativeCandidate->ToString(&sdpString);
+ EXPECT_EQ(sdp.stdString, sdpString);
+}
+
+- (void)testInitFromNativeCandidate {
+ std::string sdp("candidate:4025901590 1 udp 2122265343 "
+ "fdff:2642:12a6:fe38:c001:beda:fcf9:51aa "
+ "59052 typ host generation 0");
+ webrtc::IceCandidateInterface *nativeCandidate =
+ webrtc::CreateIceCandidate("audio", 0, sdp, nullptr);
+
+ RTCIceCandidate *iceCandidate =
+ [[RTCIceCandidate alloc] initWithNativeCandidate:nativeCandidate];
+ EXPECT_TRUE([@"audio" isEqualToString:iceCandidate.sdpMid]);
+ EXPECT_EQ(0, iceCandidate.sdpMLineIndex);
+
+ EXPECT_EQ(sdp, iceCandidate.sdp.stdString);
+}
+
+@end
+
+TEST(RTCIceCandidateTest, CandidateTest) {
+ @autoreleasepool {
+ RTCIceCandidateTest *test = [[RTCIceCandidateTest alloc] init];
+ [test testCandidate];
+ }
+}
+
+TEST(RTCIceCandidateTest, InitFromCandidateTest) {
+ @autoreleasepool {
+ RTCIceCandidateTest *test = [[RTCIceCandidateTest alloc] init];
+ [test testInitFromNativeCandidate];
+ }
+}
diff --git a/webrtc/api/objctests/RTCIceServerTest.mm b/webrtc/api/objctests/RTCIceServerTest.mm
new file mode 100644
index 0000000000..5fa43f8447
--- /dev/null
+++ b/webrtc/api/objctests/RTCIceServerTest.mm
@@ -0,0 +1,84 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <Foundation/Foundation.h>
+
+#include <vector>
+
+#include "webrtc/base/gunit.h"
+
+#import "webrtc/api/objc/RTCIceServer.h"
+#import "webrtc/api/objc/RTCIceServer+Private.h"
+
+@interface RTCIceServerTest : NSObject
+- (void)testOneURLServer;
+- (void)testTwoURLServer;
+- (void)testPasswordCredential;
+@end
+
+@implementation RTCIceServerTest
+
+- (void)testOneURLServer {
+ RTCIceServer *server = [[RTCIceServer alloc] initWithURLStrings:@[
+ @"stun:stun1.example.net" ]];
+
+ webrtc::PeerConnectionInterface::IceServer iceStruct = server.iceServer;
+ EXPECT_EQ((size_t)1, iceStruct.urls.size());
+ EXPECT_EQ("stun:stun1.example.net", iceStruct.urls.front());
+ EXPECT_EQ("", iceStruct.username);
+ EXPECT_EQ("", iceStruct.password);
+}
+
+- (void)testTwoURLServer {
+ RTCIceServer *server = [[RTCIceServer alloc] initWithURLStrings:@[
+ @"turn1:turn1.example.net", @"turn2:turn2.example.net" ]];
+
+ webrtc::PeerConnectionInterface::IceServer iceStruct = server.iceServer;
+ EXPECT_EQ((size_t)2, iceStruct.urls.size());
+ EXPECT_EQ("turn1:turn1.example.net", iceStruct.urls.front());
+ EXPECT_EQ("turn2:turn2.example.net", iceStruct.urls.back());
+ EXPECT_EQ("", iceStruct.username);
+ EXPECT_EQ("", iceStruct.password);
+}
+
+- (void)testPasswordCredential {
+ RTCIceServer *server = [[RTCIceServer alloc]
+ initWithURLStrings:@[ @"turn1:turn1.example.net" ]
+ username:@"username"
+ credential:@"credential"];
+ webrtc::PeerConnectionInterface::IceServer iceStruct = server.iceServer;
+ EXPECT_EQ((size_t)1, iceStruct.urls.size());
+ EXPECT_EQ("turn1:turn1.example.net", iceStruct.urls.front());
+ EXPECT_EQ("username", iceStruct.username);
+ EXPECT_EQ("credential", iceStruct.password);
+}
+
+@end
+
+TEST(RTCIceServerTest, OneURLTest) {
+ @autoreleasepool {
+ RTCIceServerTest *test = [[RTCIceServerTest alloc] init];
+ [test testOneURLServer];
+ }
+}
+
+TEST(RTCIceServerTest, TwoURLTest) {
+ @autoreleasepool {
+ RTCIceServerTest *test = [[RTCIceServerTest alloc] init];
+ [test testTwoURLServer];
+ }
+}
+
+TEST(RTCIceServerTest, PasswordCredentialTest) {
+ @autoreleasepool {
+ RTCIceServerTest *test = [[RTCIceServerTest alloc] init];
+ [test testPasswordCredential];
+ }
+}
diff --git a/webrtc/api/objctests/RTCMediaConstraintsTest.mm b/webrtc/api/objctests/RTCMediaConstraintsTest.mm
new file mode 100644
index 0000000000..44ffe3d033
--- /dev/null
+++ b/webrtc/api/objctests/RTCMediaConstraintsTest.mm
@@ -0,0 +1,66 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <Foundation/Foundation.h>
+
+#include "webrtc/base/gunit.h"
+
+#import "webrtc/api/objc/RTCMediaConstraints.h"
+#import "webrtc/api/objc/RTCMediaConstraints+Private.h"
+#import "webrtc/base/objc/NSString+StdString.h"
+
+@interface RTCMediaConstraintsTest : NSObject
+- (void)testMediaConstraints;
+@end
+
+@implementation RTCMediaConstraintsTest
+
+- (void)testMediaConstraints {
+ NSDictionary *mandatory = @{@"key1": @"value1", @"key2": @"value2"};
+ NSDictionary *optional = @{@"key3": @"value3", @"key4": @"value4"};
+
+ RTCMediaConstraints *constraints = [[RTCMediaConstraints alloc]
+ initWithMandatoryConstraints:mandatory
+ optionalConstraints:optional];
+ rtc::scoped_ptr<webrtc::MediaConstraints> nativeConstraints =
+ [constraints nativeConstraints];
+
+ webrtc::MediaConstraintsInterface::Constraints nativeMandatory =
+ nativeConstraints->GetMandatory();
+ [self expectConstraints:mandatory inNativeConstraints:nativeMandatory];
+
+ webrtc::MediaConstraintsInterface::Constraints nativeOptional =
+ nativeConstraints->GetOptional();
+ [self expectConstraints:optional inNativeConstraints:nativeOptional];
+}
+
+- (void)expectConstraints:(NSDictionary *)constraints
+ inNativeConstraints:
+ (webrtc::MediaConstraintsInterface::Constraints)nativeConstraints {
+ EXPECT_EQ(constraints.count, nativeConstraints.size());
+
+ for (NSString *key in constraints) {
+ NSString *value = constraints[key];
+
+ std::string nativeValue;
+ bool found = nativeConstraints.FindFirst(key.stdString, &nativeValue);
+ EXPECT_TRUE(found);
+ EXPECT_EQ(value.stdString, nativeValue);
+ }
+}
+
+@end
+
+TEST(RTCMediaConstraintsTest, MediaConstraintsTest) {
+ @autoreleasepool {
+ RTCMediaConstraintsTest *test = [[RTCMediaConstraintsTest alloc] init];
+ [test testMediaConstraints];
+ }
+}
diff --git a/webrtc/api/objctests/RTCSessionDescriptionTest.mm b/webrtc/api/objctests/RTCSessionDescriptionTest.mm
new file mode 100644
index 0000000000..2404dedd3a
--- /dev/null
+++ b/webrtc/api/objctests/RTCSessionDescriptionTest.mm
@@ -0,0 +1,144 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <Foundation/Foundation.h>
+
+#include "webrtc/base/gunit.h"
+
+#import "webrtc/api/objc/RTCSessionDescription.h"
+#import "webrtc/api/objc/RTCSessionDescription+Private.h"
+#import "webrtc/base/objc/NSString+StdString.h"
+
+@interface RTCSessionDescriptionTest : NSObject
+- (void)testSessionDescriptionConversion;
+- (void)testInitFromNativeSessionDescription;
+@end
+
+@implementation RTCSessionDescriptionTest
+
+/**
+ * Test conversion of an Objective-C RTCSessionDescription to a native
+ * SessionDescriptionInterface (based on the types and SDP strings being equal).
+ */
+- (void)testSessionDescriptionConversion {
+ RTCSessionDescription *description =
+ [[RTCSessionDescription alloc] initWithType:RTCSdpTypeAnswer
+ sdp:[self sdp]];
+
+ webrtc::SessionDescriptionInterface *nativeDescription =
+ description.nativeDescription;
+
+ EXPECT_EQ(RTCSdpTypeAnswer,
+ [RTCSessionDescription typeForString:nativeDescription->type()]);
+
+ std::string sdp;
+ nativeDescription->ToString(&sdp);
+ EXPECT_EQ([self sdp].stdString, sdp);
+}
+
+- (void)testInitFromNativeSessionDescription {
+ webrtc::SessionDescriptionInterface *nativeDescription;
+
+ nativeDescription = webrtc::CreateSessionDescription(
+ webrtc::SessionDescriptionInterface::kAnswer,
+ [self sdp].stdString,
+ nullptr);
+
+ RTCSessionDescription *description =
+ [[RTCSessionDescription alloc] initWithNativeDescription:
+ nativeDescription];
+ EXPECT_EQ(webrtc::SessionDescriptionInterface::kAnswer,
+ [RTCSessionDescription stringForType:description.type]);
+ EXPECT_TRUE([[self sdp] isEqualToString:description.sdp]);
+}
+
+- (NSString *)sdp {
+ return @"v=0\r\n"
+ "o=- 5319989746393411314 2 IN IP4 127.0.0.1\r\n"
+ "s=-\r\n"
+ "t=0 0\r\n"
+ "a=group:BUNDLE audio video\r\n"
+ "a=msid-semantic: WMS ARDAMS\r\n"
+ "m=audio 9 UDP/TLS/RTP/SAVPF 111 103 9 0 8 126\r\n"
+ "c=IN IP4 0.0.0.0\r\n"
+ "a=rtcp:9 IN IP4 0.0.0.0\r\n"
+ "a=ice-ufrag:f3o+0HG7l9nwIWFY\r\n"
+ "a=ice-pwd:VDctmJNCptR2TB7+meDpw7w5\r\n"
+ "a=fingerprint:sha-256 A9:D5:8D:A8:69:22:39:60:92:AD:94:1A:22:2D:5E:"
+ "A5:4A:A9:18:C2:35:5D:46:5E:59:BD:1C:AF:38:9F:E6:E1\r\n"
+ "a=setup:active\r\n"
+ "a=mid:audio\r\n"
+ "a=extmap:1 urn:ietf:params:rtp-hdrext:ssrc-audio-level\r\n"
+ "a=extmap:3 http://www.webrtc.org/experiments/rtp-hdrext/"
+ "abs-send-time\r\n"
+ "a=sendrecv\r\n"
+ "a=rtcp-mux\r\n"
+ "a=rtpmap:111 opus/48000/2\r\n"
+ "a=fmtp:111 minptime=10; useinbandfec=1\r\n"
+ "a=rtpmap:103 ISAC/16000\r\n"
+ "a=rtpmap:9 G722/8000\r\n"
+ "a=rtpmap:0 PCMU/8000\r\n"
+ "a=rtpmap:8 PCMA/8000\r\n"
+ "a=rtpmap:126 telephone-event/8000\r\n"
+ "a=maxptime:60\r\n"
+ "a=ssrc:1504474588 cname:V+FdIC5AJpxLhdYQ\r\n"
+ "a=ssrc:1504474588 msid:ARDAMS ARDAMSa0\r\n"
+ "a=ssrc:1504474588 mslabel:ARDAMS\r\n"
+ "a=ssrc:1504474588 label:ARDAMSa0\r\n"
+ "m=video 9 UDP/TLS/RTP/SAVPF 100 116 117 96\r\n"
+ "c=IN IP4 0.0.0.0\r\n"
+ "a=rtcp:9 IN IP4 0.0.0.0\r\n"
+ "a=ice-ufrag:f3o+0HG7l9nwIWFY\r\n"
+ "a=ice-pwd:VDctmJNCptR2TB7+meDpw7w5\r\n"
+ "a=fingerprint:sha-256 A9:D5:8D:A8:69:22:39:60:92:AD:94:1A:22:2D:5E:"
+ "A5:4A:A9:18:C2:35:5D:46:5E:59:BD:1C:AF:38:9F:E6:E1\r\n"
+ "a=setup:active\r\n"
+ "a=mid:video\r\n"
+ "a=extmap:2 urn:ietf:params:rtp-hdrext:toffset\r\n"
+ "a=extmap:3 http://www.webrtc.org/experiments/rtp-hdrext/"
+ "abs-send-time\r\n"
+ "a=extmap:4 urn:3gpp:video-orientation\r\n"
+ "a=sendrecv\r\n"
+ "a=rtcp-mux\r\n"
+ "a=rtpmap:100 VP8/90000\r\n"
+ "a=rtcp-fb:100 ccm fir\r\n"
+ "a=rtcp-fb:100 nack\r\n"
+ "a=rtcp-fb:100 nack pli\r\n"
+ "a=rtcp-fb:100 goog-remb\r\n"
+ "a=rtpmap:116 red/90000\r\n"
+ "a=rtpmap:117 ulpfec/90000\r\n"
+ "a=rtpmap:96 rtx/90000\r\n"
+ "a=fmtp:96 apt=100\r\n"
+ "a=ssrc-group:FID 498297514 1644357692\r\n"
+ "a=ssrc:498297514 cname:V+FdIC5AJpxLhdYQ\r\n"
+ "a=ssrc:498297514 msid:ARDAMS ARDAMSv0\r\n"
+ "a=ssrc:498297514 mslabel:ARDAMS\r\n"
+ "a=ssrc:498297514 label:ARDAMSv0\r\n"
+ "a=ssrc:1644357692 cname:V+FdIC5AJpxLhdYQ\r\n"
+ "a=ssrc:1644357692 msid:ARDAMS ARDAMSv0\r\n"
+ "a=ssrc:1644357692 mslabel:ARDAMS\r\n"
+ "a=ssrc:1644357692 label:ARDAMSv0\r\n";
+}
+
+@end
+
+TEST(RTCSessionDescriptionTest, SessionDescriptionConversionTest) {
+ @autoreleasepool {
+ RTCSessionDescriptionTest *test = [[RTCSessionDescriptionTest alloc] init];
+ [test testSessionDescriptionConversion];
+ }
+}
+
+TEST(RTCSessionDescriptionTest, InitFromSessionDescriptionTest) {
+ @autoreleasepool {
+ RTCSessionDescriptionTest *test = [[RTCSessionDescriptionTest alloc] init];
+ [test testInitFromNativeSessionDescription];
+ }
+}
diff --git a/webrtc/audio/BUILD.gn b/webrtc/audio/BUILD.gn
index d5061db9dc..5a9902eac1 100644
--- a/webrtc/audio/BUILD.gn
+++ b/webrtc/audio/BUILD.gn
@@ -14,6 +14,9 @@ source_set("audio") {
"audio_receive_stream.h",
"audio_send_stream.cc",
"audio_send_stream.h",
+ "audio_sink.h",
+ "audio_state.cc",
+ "audio_state.h",
"conversion.h",
"scoped_voe_interface.h",
]
@@ -29,7 +32,7 @@ source_set("audio") {
deps = [
"..:webrtc_common",
- "../voice_engine",
"../system_wrappers",
+ "../voice_engine",
]
}
diff --git a/webrtc/audio/audio_receive_stream.cc b/webrtc/audio/audio_receive_stream.cc
index 34197c3ff7..64d008326d 100644
--- a/webrtc/audio/audio_receive_stream.cc
+++ b/webrtc/audio/audio_receive_stream.cc
@@ -11,20 +11,41 @@
#include "webrtc/audio/audio_receive_stream.h"
#include <string>
+#include <utility>
+#include "webrtc/audio/audio_sink.h"
+#include "webrtc/audio/audio_state.h"
#include "webrtc/audio/conversion.h"
#include "webrtc/base/checks.h"
#include "webrtc/base/logging.h"
+#include "webrtc/call/congestion_controller.h"
#include "webrtc/modules/remote_bitrate_estimator/include/remote_bitrate_estimator.h"
#include "webrtc/system_wrappers/include/tick_util.h"
+#include "webrtc/voice_engine/channel_proxy.h"
#include "webrtc/voice_engine/include/voe_base.h"
#include "webrtc/voice_engine/include/voe_codec.h"
#include "webrtc/voice_engine/include/voe_neteq_stats.h"
#include "webrtc/voice_engine/include/voe_rtp_rtcp.h"
#include "webrtc/voice_engine/include/voe_video_sync.h"
#include "webrtc/voice_engine/include/voe_volume_control.h"
+#include "webrtc/voice_engine/voice_engine_impl.h"
namespace webrtc {
+namespace {
+
+bool UseSendSideBwe(const webrtc::AudioReceiveStream::Config& config) {
+ if (!config.rtp.transport_cc) {
+ return false;
+ }
+ for (const auto& extension : config.rtp.extensions) {
+ if (extension.name == RtpExtension::kTransportSequenceNumber) {
+ return true;
+ }
+ }
+ return false;
+}
+} // namespace
+
std::string AudioReceiveStream::Config::Rtp::ToString() const {
std::stringstream ss;
ss << "{remote_ssrc: " << remote_ssrc;
@@ -60,120 +81,62 @@ std::string AudioReceiveStream::Config::ToString() const {
namespace internal {
AudioReceiveStream::AudioReceiveStream(
- RemoteBitrateEstimator* remote_bitrate_estimator,
- const webrtc::AudioReceiveStream::Config& config,
- VoiceEngine* voice_engine)
- : remote_bitrate_estimator_(remote_bitrate_estimator),
- config_(config),
- voice_engine_(voice_engine),
- voe_base_(voice_engine),
+ CongestionController* congestion_controller,
+ const webrtc::AudioReceiveStream::Config& config,
+ const rtc::scoped_refptr<webrtc::AudioState>& audio_state)
+ : config_(config),
+ audio_state_(audio_state),
rtp_header_parser_(RtpHeaderParser::Create()) {
LOG(LS_INFO) << "AudioReceiveStream: " << config_.ToString();
- RTC_DCHECK(config.voe_channel_id != -1);
- RTC_DCHECK(remote_bitrate_estimator_ != nullptr);
- RTC_DCHECK(voice_engine_ != nullptr);
- RTC_DCHECK(rtp_header_parser_ != nullptr);
- for (const auto& ext : config.rtp.extensions) {
- // One-byte-extension local identifiers are in the range 1-14 inclusive.
- RTC_DCHECK_GE(ext.id, 1);
- RTC_DCHECK_LE(ext.id, 14);
- if (ext.name == RtpExtension::kAudioLevel) {
- RTC_CHECK(rtp_header_parser_->RegisterRtpHeaderExtension(
- kRtpExtensionAudioLevel, ext.id));
- } else if (ext.name == RtpExtension::kAbsSendTime) {
- RTC_CHECK(rtp_header_parser_->RegisterRtpHeaderExtension(
- kRtpExtensionAbsoluteSendTime, ext.id));
- } else if (ext.name == RtpExtension::kTransportSequenceNumber) {
- RTC_CHECK(rtp_header_parser_->RegisterRtpHeaderExtension(
- kRtpExtensionTransportSequenceNumber, ext.id));
+ RTC_DCHECK_NE(config_.voe_channel_id, -1);
+ RTC_DCHECK(audio_state_.get());
+ RTC_DCHECK(congestion_controller);
+ RTC_DCHECK(rtp_header_parser_);
+
+ VoiceEngineImpl* voe_impl = static_cast<VoiceEngineImpl*>(voice_engine());
+ channel_proxy_ = voe_impl->GetChannelProxy(config_.voe_channel_id);
+ channel_proxy_->SetLocalSSRC(config.rtp.local_ssrc);
+ for (const auto& extension : config.rtp.extensions) {
+ if (extension.name == RtpExtension::kAudioLevel) {
+ channel_proxy_->SetReceiveAudioLevelIndicationStatus(true, extension.id);
+ bool registered = rtp_header_parser_->RegisterRtpHeaderExtension(
+ kRtpExtensionAudioLevel, extension.id);
+ RTC_DCHECK(registered);
+ } else if (extension.name == RtpExtension::kAbsSendTime) {
+ channel_proxy_->SetReceiveAbsoluteSenderTimeStatus(true, extension.id);
+ bool registered = rtp_header_parser_->RegisterRtpHeaderExtension(
+ kRtpExtensionAbsoluteSendTime, extension.id);
+ RTC_DCHECK(registered);
+ } else if (extension.name == RtpExtension::kTransportSequenceNumber) {
+ bool registered = rtp_header_parser_->RegisterRtpHeaderExtension(
+ kRtpExtensionTransportSequenceNumber, extension.id);
+ RTC_DCHECK(registered);
} else {
RTC_NOTREACHED() << "Unsupported RTP extension.";
}
}
+ // Configure bandwidth estimation.
+ channel_proxy_->SetCongestionControlObjects(
+ nullptr, nullptr, congestion_controller->packet_router());
+ if (config.combined_audio_video_bwe) {
+ if (UseSendSideBwe(config)) {
+ remote_bitrate_estimator_ =
+ congestion_controller->GetRemoteBitrateEstimator(true);
+ } else {
+ remote_bitrate_estimator_ =
+ congestion_controller->GetRemoteBitrateEstimator(false);
+ }
+ RTC_DCHECK(remote_bitrate_estimator_);
+ }
}
AudioReceiveStream::~AudioReceiveStream() {
RTC_DCHECK(thread_checker_.CalledOnValidThread());
LOG(LS_INFO) << "~AudioReceiveStream: " << config_.ToString();
-}
-
-webrtc::AudioReceiveStream::Stats AudioReceiveStream::GetStats() const {
- RTC_DCHECK(thread_checker_.CalledOnValidThread());
- webrtc::AudioReceiveStream::Stats stats;
- stats.remote_ssrc = config_.rtp.remote_ssrc;
- ScopedVoEInterface<VoECodec> codec(voice_engine_);
- ScopedVoEInterface<VoENetEqStats> neteq(voice_engine_);
- ScopedVoEInterface<VoERTP_RTCP> rtp(voice_engine_);
- ScopedVoEInterface<VoEVideoSync> sync(voice_engine_);
- ScopedVoEInterface<VoEVolumeControl> volume(voice_engine_);
- unsigned int ssrc = 0;
- webrtc::CallStatistics call_stats = {0};
- webrtc::CodecInst codec_inst = {0};
- // Only collect stats if we have seen some traffic with the SSRC.
- if (rtp->GetRemoteSSRC(config_.voe_channel_id, ssrc) == -1 ||
- rtp->GetRTCPStatistics(config_.voe_channel_id, call_stats) == -1 ||
- codec->GetRecCodec(config_.voe_channel_id, codec_inst) == -1) {
- return stats;
+ channel_proxy_->SetCongestionControlObjects(nullptr, nullptr, nullptr);
+ if (remote_bitrate_estimator_) {
+ remote_bitrate_estimator_->RemoveStream(config_.rtp.remote_ssrc);
}
-
- stats.bytes_rcvd = call_stats.bytesReceived;
- stats.packets_rcvd = call_stats.packetsReceived;
- stats.packets_lost = call_stats.cumulativeLost;
- stats.fraction_lost = Q8ToFloat(call_stats.fractionLost);
- if (codec_inst.pltype != -1) {
- stats.codec_name = codec_inst.plname;
- }
- stats.ext_seqnum = call_stats.extendedMax;
- if (codec_inst.plfreq / 1000 > 0) {
- stats.jitter_ms = call_stats.jitterSamples / (codec_inst.plfreq / 1000);
- }
- {
- int jitter_buffer_delay_ms = 0;
- int playout_buffer_delay_ms = 0;
- sync->GetDelayEstimate(config_.voe_channel_id, &jitter_buffer_delay_ms,
- &playout_buffer_delay_ms);
- stats.delay_estimate_ms =
- jitter_buffer_delay_ms + playout_buffer_delay_ms;
- }
- {
- unsigned int level = 0;
- if (volume->GetSpeechOutputLevelFullRange(config_.voe_channel_id, level)
- != -1) {
- stats.audio_level = static_cast<int32_t>(level);
- }
- }
-
- webrtc::NetworkStatistics ns = {0};
- if (neteq->GetNetworkStatistics(config_.voe_channel_id, ns) != -1) {
- // Get jitter buffer and total delay (alg + jitter + playout) stats.
- stats.jitter_buffer_ms = ns.currentBufferSize;
- stats.jitter_buffer_preferred_ms = ns.preferredBufferSize;
- stats.expand_rate = Q14ToFloat(ns.currentExpandRate);
- stats.speech_expand_rate = Q14ToFloat(ns.currentSpeechExpandRate);
- stats.secondary_decoded_rate = Q14ToFloat(ns.currentSecondaryDecodedRate);
- stats.accelerate_rate = Q14ToFloat(ns.currentAccelerateRate);
- stats.preemptive_expand_rate = Q14ToFloat(ns.currentPreemptiveRate);
- }
-
- webrtc::AudioDecodingCallStats ds;
- if (neteq->GetDecodingCallStatistics(config_.voe_channel_id, &ds) != -1) {
- stats.decoding_calls_to_silence_generator =
- ds.calls_to_silence_generator;
- stats.decoding_calls_to_neteq = ds.calls_to_neteq;
- stats.decoding_normal = ds.decoded_normal;
- stats.decoding_plc = ds.decoded_plc;
- stats.decoding_cng = ds.decoded_cng;
- stats.decoding_plc_cng = ds.decoded_plc_cng;
- }
-
- stats.capture_start_ntp_time_ms = call_stats.capture_start_ntp_time_ms_;
-
- return stats;
-}
-
-const webrtc::AudioReceiveStream::Config& AudioReceiveStream::config() const {
- RTC_DCHECK(thread_checker_.CalledOnValidThread());
- return config_;
}
void AudioReceiveStream::Start() {
@@ -204,15 +167,16 @@ bool AudioReceiveStream::DeliverRtp(const uint8_t* packet,
// thread. Then this check can be enabled.
// RTC_DCHECK(!thread_checker_.CalledOnValidThread());
RTPHeader header;
-
if (!rtp_header_parser_->Parse(packet, length, &header)) {
return false;
}
- // Only forward if the parsed header has absolute sender time. RTP timestamps
- // may have different rates for audio and video and shouldn't be mixed.
- if (config_.combined_audio_video_bwe &&
- header.extension.hasAbsoluteSendTime) {
+ // Only forward if the parsed header has one of the headers necessary for
+ // bandwidth estimation. RTP timestamps has different rates for audio and
+ // video and shouldn't be mixed.
+ if (remote_bitrate_estimator_ &&
+ (header.extension.hasAbsoluteSendTime ||
+ header.extension.hasTransportSequenceNumber)) {
int64_t arrival_time_ms = TickTime::MillisecondTimestamp();
if (packet_time.timestamp >= 0)
arrival_time_ms = (packet_time.timestamp + 500) / 1000;
@@ -222,5 +186,71 @@ bool AudioReceiveStream::DeliverRtp(const uint8_t* packet,
}
return true;
}
+
+webrtc::AudioReceiveStream::Stats AudioReceiveStream::GetStats() const {
+ RTC_DCHECK(thread_checker_.CalledOnValidThread());
+ webrtc::AudioReceiveStream::Stats stats;
+ stats.remote_ssrc = config_.rtp.remote_ssrc;
+ ScopedVoEInterface<VoECodec> codec(voice_engine());
+
+ webrtc::CallStatistics call_stats = channel_proxy_->GetRTCPStatistics();
+ webrtc::CodecInst codec_inst = {0};
+ if (codec->GetRecCodec(config_.voe_channel_id, codec_inst) == -1) {
+ return stats;
+ }
+
+ stats.bytes_rcvd = call_stats.bytesReceived;
+ stats.packets_rcvd = call_stats.packetsReceived;
+ stats.packets_lost = call_stats.cumulativeLost;
+ stats.fraction_lost = Q8ToFloat(call_stats.fractionLost);
+ stats.capture_start_ntp_time_ms = call_stats.capture_start_ntp_time_ms_;
+ if (codec_inst.pltype != -1) {
+ stats.codec_name = codec_inst.plname;
+ }
+ stats.ext_seqnum = call_stats.extendedMax;
+ if (codec_inst.plfreq / 1000 > 0) {
+ stats.jitter_ms = call_stats.jitterSamples / (codec_inst.plfreq / 1000);
+ }
+ stats.delay_estimate_ms = channel_proxy_->GetDelayEstimate();
+ stats.audio_level = channel_proxy_->GetSpeechOutputLevelFullRange();
+
+ // Get jitter buffer and total delay (alg + jitter + playout) stats.
+ auto ns = channel_proxy_->GetNetworkStatistics();
+ stats.jitter_buffer_ms = ns.currentBufferSize;
+ stats.jitter_buffer_preferred_ms = ns.preferredBufferSize;
+ stats.expand_rate = Q14ToFloat(ns.currentExpandRate);
+ stats.speech_expand_rate = Q14ToFloat(ns.currentSpeechExpandRate);
+ stats.secondary_decoded_rate = Q14ToFloat(ns.currentSecondaryDecodedRate);
+ stats.accelerate_rate = Q14ToFloat(ns.currentAccelerateRate);
+ stats.preemptive_expand_rate = Q14ToFloat(ns.currentPreemptiveRate);
+
+ auto ds = channel_proxy_->GetDecodingCallStatistics();
+ stats.decoding_calls_to_silence_generator = ds.calls_to_silence_generator;
+ stats.decoding_calls_to_neteq = ds.calls_to_neteq;
+ stats.decoding_normal = ds.decoded_normal;
+ stats.decoding_plc = ds.decoded_plc;
+ stats.decoding_cng = ds.decoded_cng;
+ stats.decoding_plc_cng = ds.decoded_plc_cng;
+
+ return stats;
+}
+
+void AudioReceiveStream::SetSink(rtc::scoped_ptr<AudioSinkInterface> sink) {
+ RTC_DCHECK(thread_checker_.CalledOnValidThread());
+ channel_proxy_->SetSink(std::move(sink));
+}
+
+const webrtc::AudioReceiveStream::Config& AudioReceiveStream::config() const {
+ RTC_DCHECK(thread_checker_.CalledOnValidThread());
+ return config_;
+}
+
+VoiceEngine* AudioReceiveStream::voice_engine() const {
+ internal::AudioState* audio_state =
+ static_cast<internal::AudioState*>(audio_state_.get());
+ VoiceEngine* voice_engine = audio_state->voice_engine();
+ RTC_DCHECK(voice_engine);
+ return voice_engine;
+}
} // namespace internal
} // namespace webrtc
diff --git a/webrtc/audio/audio_receive_stream.h b/webrtc/audio/audio_receive_stream.h
index 5d02b0e2ae..4940c6a64c 100644
--- a/webrtc/audio/audio_receive_stream.h
+++ b/webrtc/audio/audio_receive_stream.h
@@ -12,23 +12,25 @@
#define WEBRTC_AUDIO_AUDIO_RECEIVE_STREAM_H_
#include "webrtc/audio_receive_stream.h"
-#include "webrtc/audio/scoped_voe_interface.h"
+#include "webrtc/audio_state.h"
#include "webrtc/base/thread_checker.h"
-#include "webrtc/modules/rtp_rtcp/interface/rtp_header_parser.h"
-#include "webrtc/voice_engine/include/voe_base.h"
+#include "webrtc/modules/rtp_rtcp/include/rtp_header_parser.h"
namespace webrtc {
-
+class CongestionController;
class RemoteBitrateEstimator;
-class VoiceEngine;
+
+namespace voe {
+class ChannelProxy;
+} // namespace voe
namespace internal {
class AudioReceiveStream final : public webrtc::AudioReceiveStream {
public:
- AudioReceiveStream(RemoteBitrateEstimator* remote_bitrate_estimator,
+ AudioReceiveStream(CongestionController* congestion_controller,
const webrtc::AudioReceiveStream::Config& config,
- VoiceEngine* voice_engine);
+ const rtc::scoped_refptr<webrtc::AudioState>& audio_state);
~AudioReceiveStream() override;
// webrtc::ReceiveStream implementation.
@@ -43,16 +45,19 @@ class AudioReceiveStream final : public webrtc::AudioReceiveStream {
// webrtc::AudioReceiveStream implementation.
webrtc::AudioReceiveStream::Stats GetStats() const override;
+ void SetSink(rtc::scoped_ptr<AudioSinkInterface> sink) override;
+
const webrtc::AudioReceiveStream::Config& config() const;
private:
+ VoiceEngine* voice_engine() const;
+
rtc::ThreadChecker thread_checker_;
- RemoteBitrateEstimator* const remote_bitrate_estimator_;
+ RemoteBitrateEstimator* remote_bitrate_estimator_ = nullptr;
const webrtc::AudioReceiveStream::Config config_;
- VoiceEngine* voice_engine_;
- // We hold one interface pointer to the VoE to make sure it is kept alive.
- ScopedVoEInterface<VoEBase> voe_base_;
+ rtc::scoped_refptr<webrtc::AudioState> audio_state_;
rtc::scoped_ptr<RtpHeaderParser> rtp_header_parser_;
+ rtc::scoped_ptr<voe::ChannelProxy> channel_proxy_;
RTC_DISALLOW_IMPLICIT_CONSTRUCTORS(AudioReceiveStream);
};
diff --git a/webrtc/audio/audio_receive_stream_unittest.cc b/webrtc/audio/audio_receive_stream_unittest.cc
index 4e267f1738..eb008b3045 100644
--- a/webrtc/audio/audio_receive_stream_unittest.cc
+++ b/webrtc/audio/audio_receive_stream_unittest.cc
@@ -8,154 +8,320 @@
* be found in the AUTHORS file in the root of the source tree.
*/
+#include <string>
+
#include "testing/gtest/include/gtest/gtest.h"
#include "webrtc/audio/audio_receive_stream.h"
#include "webrtc/audio/conversion.h"
+#include "webrtc/call/mock/mock_congestion_controller.h"
+#include "webrtc/modules/bitrate_controller/include/mock/mock_bitrate_controller.h"
+#include "webrtc/modules/pacing/packet_router.h"
#include "webrtc/modules/remote_bitrate_estimator/include/mock/mock_remote_bitrate_estimator.h"
#include "webrtc/modules/rtp_rtcp/source/byte_io.h"
-#include "webrtc/test/fake_voice_engine.h"
+#include "webrtc/modules/utility/include/mock/mock_process_thread.h"
+#include "webrtc/system_wrappers/include/clock.h"
+#include "webrtc/test/mock_voe_channel_proxy.h"
+#include "webrtc/test/mock_voice_engine.h"
+#include "webrtc/video/call_stats.h"
+namespace webrtc {
+namespace test {
namespace {
-using webrtc::ByteWriter;
+using testing::_;
+using testing::Return;
-const size_t kAbsoluteSendTimeLength = 4;
+AudioDecodingCallStats MakeAudioDecodeStatsForTest() {
+ AudioDecodingCallStats audio_decode_stats;
+ audio_decode_stats.calls_to_silence_generator = 234;
+ audio_decode_stats.calls_to_neteq = 567;
+ audio_decode_stats.decoded_normal = 890;
+ audio_decode_stats.decoded_plc = 123;
+ audio_decode_stats.decoded_cng = 456;
+ audio_decode_stats.decoded_plc_cng = 789;
+ return audio_decode_stats;
+}
-void BuildAbsoluteSendTimeExtension(uint8_t* buffer,
- int id,
- uint32_t abs_send_time) {
- const size_t kRtpOneByteHeaderLength = 4;
- const uint16_t kRtpOneByteHeaderExtensionId = 0xBEDE;
- ByteWriter<uint16_t>::WriteBigEndian(buffer, kRtpOneByteHeaderExtensionId);
+const int kChannelId = 2;
+const uint32_t kRemoteSsrc = 1234;
+const uint32_t kLocalSsrc = 5678;
+const size_t kOneByteExtensionHeaderLength = 4;
+const size_t kOneByteExtensionLength = 4;
+const int kAbsSendTimeId = 2;
+const int kAudioLevelId = 3;
+const int kTransportSequenceNumberId = 4;
+const int kJitterBufferDelay = -7;
+const int kPlayoutBufferDelay = 302;
+const unsigned int kSpeechOutputLevel = 99;
+const CallStatistics kCallStats = {
+ 345, 678, 901, 234, -12, 3456, 7890, 567, 890, 123};
+const CodecInst kCodecInst = {
+ 123, "codec_name_recv", 96000, -187, 0, -103};
+const NetworkStatistics kNetworkStats = {
+ 123, 456, false, 0, 0, 789, 12, 345, 678, 901, -1, -1, -1, -1, -1, 0};
+const AudioDecodingCallStats kAudioDecodeStats = MakeAudioDecodeStatsForTest();
+
+struct ConfigHelper {
+ ConfigHelper()
+ : simulated_clock_(123456),
+ call_stats_(&simulated_clock_),
+ congestion_controller_(&process_thread_,
+ &call_stats_,
+ &bitrate_observer_) {
+ using testing::Invoke;
+
+ EXPECT_CALL(voice_engine_,
+ RegisterVoiceEngineObserver(_)).WillOnce(Return(0));
+ EXPECT_CALL(voice_engine_,
+ DeRegisterVoiceEngineObserver()).WillOnce(Return(0));
+ AudioState::Config config;
+ config.voice_engine = &voice_engine_;
+ audio_state_ = AudioState::Create(config);
+
+ EXPECT_CALL(voice_engine_, ChannelProxyFactory(kChannelId))
+ .WillOnce(Invoke([this](int channel_id) {
+ EXPECT_FALSE(channel_proxy_);
+ channel_proxy_ = new testing::StrictMock<MockVoEChannelProxy>();
+ EXPECT_CALL(*channel_proxy_, SetLocalSSRC(kLocalSsrc)).Times(1);
+ EXPECT_CALL(*channel_proxy_,
+ SetReceiveAbsoluteSenderTimeStatus(true, kAbsSendTimeId))
+ .Times(1);
+ EXPECT_CALL(*channel_proxy_,
+ SetReceiveAudioLevelIndicationStatus(true, kAudioLevelId))
+ .Times(1);
+ EXPECT_CALL(*channel_proxy_, SetCongestionControlObjects(
+ nullptr, nullptr, &packet_router_))
+ .Times(1);
+ EXPECT_CALL(congestion_controller_, packet_router())
+ .WillOnce(Return(&packet_router_));
+ EXPECT_CALL(*channel_proxy_,
+ SetCongestionControlObjects(nullptr, nullptr, nullptr))
+ .Times(1);
+ return channel_proxy_;
+ }));
+ stream_config_.voe_channel_id = kChannelId;
+ stream_config_.rtp.local_ssrc = kLocalSsrc;
+ stream_config_.rtp.remote_ssrc = kRemoteSsrc;
+ stream_config_.rtp.extensions.push_back(
+ RtpExtension(RtpExtension::kAbsSendTime, kAbsSendTimeId));
+ stream_config_.rtp.extensions.push_back(
+ RtpExtension(RtpExtension::kAudioLevel, kAudioLevelId));
+ }
+
+ MockCongestionController* congestion_controller() {
+ return &congestion_controller_;
+ }
+ MockRemoteBitrateEstimator* remote_bitrate_estimator() {
+ return &remote_bitrate_estimator_;
+ }
+ AudioReceiveStream::Config& config() { return stream_config_; }
+ rtc::scoped_refptr<AudioState> audio_state() { return audio_state_; }
+ MockVoiceEngine& voice_engine() { return voice_engine_; }
- const uint32_t kPosLength = 2;
- ByteWriter<uint16_t>::WriteBigEndian(buffer + kPosLength,
- kAbsoluteSendTimeLength / 4);
+ void SetupMockForBweFeedback(bool send_side_bwe) {
+ EXPECT_CALL(congestion_controller_,
+ GetRemoteBitrateEstimator(send_side_bwe))
+ .WillOnce(Return(&remote_bitrate_estimator_));
+ EXPECT_CALL(remote_bitrate_estimator_,
+ RemoveStream(stream_config_.rtp.remote_ssrc));
+ }
- const uint8_t kLengthOfData = 3;
- buffer[kRtpOneByteHeaderLength] = (id << 4) + (kLengthOfData - 1);
- ByteWriter<uint32_t, kLengthOfData>::WriteBigEndian(
- buffer + kRtpOneByteHeaderLength + 1, abs_send_time);
+ void SetupMockForGetStats() {
+ using testing::DoAll;
+ using testing::SetArgReferee;
+
+ ASSERT_TRUE(channel_proxy_);
+ EXPECT_CALL(*channel_proxy_, GetRTCPStatistics())
+ .WillOnce(Return(kCallStats));
+ EXPECT_CALL(*channel_proxy_, GetDelayEstimate())
+ .WillOnce(Return(kJitterBufferDelay + kPlayoutBufferDelay));
+ EXPECT_CALL(*channel_proxy_, GetSpeechOutputLevelFullRange())
+ .WillOnce(Return(kSpeechOutputLevel));
+ EXPECT_CALL(*channel_proxy_, GetNetworkStatistics())
+ .WillOnce(Return(kNetworkStats));
+ EXPECT_CALL(*channel_proxy_, GetDecodingCallStatistics())
+ .WillOnce(Return(kAudioDecodeStats));
+
+ EXPECT_CALL(voice_engine_, GetRecCodec(kChannelId, _))
+ .WillOnce(DoAll(SetArgReferee<1>(kCodecInst), Return(0)));
+ }
+
+ private:
+ SimulatedClock simulated_clock_;
+ CallStats call_stats_;
+ PacketRouter packet_router_;
+ testing::NiceMock<MockBitrateObserver> bitrate_observer_;
+ testing::NiceMock<MockProcessThread> process_thread_;
+ MockCongestionController congestion_controller_;
+ MockRemoteBitrateEstimator remote_bitrate_estimator_;
+ testing::StrictMock<MockVoiceEngine> voice_engine_;
+ rtc::scoped_refptr<AudioState> audio_state_;
+ AudioReceiveStream::Config stream_config_;
+ testing::StrictMock<MockVoEChannelProxy>* channel_proxy_ = nullptr;
+};
+
+void BuildOneByteExtension(std::vector<uint8_t>::iterator it,
+ int id,
+ uint32_t extension_value,
+ size_t value_length) {
+ const uint16_t kRtpOneByteHeaderExtensionId = 0xBEDE;
+ ByteWriter<uint16_t>::WriteBigEndian(&(*it), kRtpOneByteHeaderExtensionId);
+ it += 2;
+
+ ByteWriter<uint16_t>::WriteBigEndian(&(*it), kOneByteExtensionLength / 4);
+ it += 2;
+ const size_t kExtensionDataLength = kOneByteExtensionLength - 1;
+ uint32_t shifted_value = extension_value
+ << (8 * (kExtensionDataLength - value_length));
+ *it = (id << 4) + (value_length - 1);
+ ++it;
+ ByteWriter<uint32_t, kExtensionDataLength>::WriteBigEndian(&(*it),
+ shifted_value);
}
-size_t CreateRtpHeaderWithAbsSendTime(uint8_t* header,
- int extension_id,
- uint32_t abs_send_time) {
+std::vector<uint8_t> CreateRtpHeaderWithOneByteExtension(
+ int extension_id,
+ uint32_t extension_value,
+ size_t value_length) {
+ std::vector<uint8_t> header;
+ header.resize(webrtc::kRtpHeaderSize + kOneByteExtensionHeaderLength +
+ kOneByteExtensionLength);
header[0] = 0x80; // Version 2.
header[0] |= 0x10; // Set extension bit.
header[1] = 100; // Payload type.
header[1] |= 0x80; // Marker bit is set.
- ByteWriter<uint16_t>::WriteBigEndian(header + 2, 0x1234); // Sequence number.
- ByteWriter<uint32_t>::WriteBigEndian(header + 4, 0x5678); // Timestamp.
- ByteWriter<uint32_t>::WriteBigEndian(header + 8, 0x4321); // SSRC.
- int32_t rtp_header_length = webrtc::kRtpHeaderSize;
-
- BuildAbsoluteSendTimeExtension(header + rtp_header_length, extension_id,
- abs_send_time);
- rtp_header_length += kAbsoluteSendTimeLength;
- return rtp_header_length;
+ ByteWriter<uint16_t>::WriteBigEndian(&header[2], 0x1234); // Sequence number.
+ ByteWriter<uint32_t>::WriteBigEndian(&header[4], 0x5678); // Timestamp.
+ ByteWriter<uint32_t>::WriteBigEndian(&header[8], 0x4321); // SSRC.
+
+ BuildOneByteExtension(header.begin() + webrtc::kRtpHeaderSize, extension_id,
+ extension_value, value_length);
+ return header;
}
} // namespace
-namespace webrtc {
-namespace test {
-
TEST(AudioReceiveStreamTest, ConfigToString) {
- const int kAbsSendTimeId = 3;
AudioReceiveStream::Config config;
- config.rtp.remote_ssrc = 1234;
- config.rtp.local_ssrc = 5678;
+ config.rtp.remote_ssrc = kRemoteSsrc;
+ config.rtp.local_ssrc = kLocalSsrc;
config.rtp.extensions.push_back(
RtpExtension(RtpExtension::kAbsSendTime, kAbsSendTimeId));
- config.voe_channel_id = 1;
+ config.voe_channel_id = kChannelId;
config.combined_audio_video_bwe = true;
- EXPECT_EQ("{rtp: {remote_ssrc: 1234, local_ssrc: 5678, extensions: [{name: "
- "http://www.webrtc.org/experiments/rtp-hdrext/abs-send-time, id: 3}]}, "
+ EXPECT_EQ(
+ "{rtp: {remote_ssrc: 1234, local_ssrc: 5678, extensions: [{name: "
+ "http://www.webrtc.org/experiments/rtp-hdrext/abs-send-time, id: 2}]}, "
"receive_transport: nullptr, rtcp_send_transport: nullptr, "
- "voe_channel_id: 1, combined_audio_video_bwe: true}", config.ToString());
+ "voe_channel_id: 2, combined_audio_video_bwe: true}",
+ config.ToString());
}
TEST(AudioReceiveStreamTest, ConstructDestruct) {
- MockRemoteBitrateEstimator remote_bitrate_estimator;
- FakeVoiceEngine voice_engine;
- AudioReceiveStream::Config config;
- config.voe_channel_id = 1;
- internal::AudioReceiveStream recv_stream(&remote_bitrate_estimator, config,
- &voice_engine);
+ ConfigHelper helper;
+ internal::AudioReceiveStream recv_stream(
+ helper.congestion_controller(), helper.config(), helper.audio_state());
+}
+
+MATCHER_P(VerifyHeaderExtension, expected_extension, "") {
+ return arg.extension.hasAbsoluteSendTime ==
+ expected_extension.hasAbsoluteSendTime &&
+ arg.extension.absoluteSendTime ==
+ expected_extension.absoluteSendTime &&
+ arg.extension.hasTransportSequenceNumber ==
+ expected_extension.hasTransportSequenceNumber &&
+ arg.extension.transportSequenceNumber ==
+ expected_extension.transportSequenceNumber;
}
TEST(AudioReceiveStreamTest, AudioPacketUpdatesBweWithTimestamp) {
- MockRemoteBitrateEstimator remote_bitrate_estimator;
- FakeVoiceEngine voice_engine;
- AudioReceiveStream::Config config;
- config.combined_audio_video_bwe = true;
- config.voe_channel_id = FakeVoiceEngine::kRecvChannelId;
- const int kAbsSendTimeId = 3;
- config.rtp.extensions.push_back(
- RtpExtension(RtpExtension::kAbsSendTime, kAbsSendTimeId));
- internal::AudioReceiveStream recv_stream(&remote_bitrate_estimator, config,
- &voice_engine);
- uint8_t rtp_packet[30];
+ ConfigHelper helper;
+ helper.config().combined_audio_video_bwe = true;
+ helper.SetupMockForBweFeedback(false);
+ internal::AudioReceiveStream recv_stream(
+ helper.congestion_controller(), helper.config(), helper.audio_state());
const int kAbsSendTimeValue = 1234;
- CreateRtpHeaderWithAbsSendTime(rtp_packet, kAbsSendTimeId, kAbsSendTimeValue);
+ std::vector<uint8_t> rtp_packet =
+ CreateRtpHeaderWithOneByteExtension(kAbsSendTimeId, kAbsSendTimeValue, 3);
PacketTime packet_time(5678000, 0);
const size_t kExpectedHeaderLength = 20;
- EXPECT_CALL(remote_bitrate_estimator,
- IncomingPacket(packet_time.timestamp / 1000,
- sizeof(rtp_packet) - kExpectedHeaderLength, testing::_, false))
+ RTPHeaderExtension expected_extension;
+ expected_extension.hasAbsoluteSendTime = true;
+ expected_extension.absoluteSendTime = kAbsSendTimeValue;
+ EXPECT_CALL(*helper.remote_bitrate_estimator(),
+ IncomingPacket(packet_time.timestamp / 1000,
+ rtp_packet.size() - kExpectedHeaderLength,
+ VerifyHeaderExtension(expected_extension), false))
.Times(1);
EXPECT_TRUE(
- recv_stream.DeliverRtp(rtp_packet, sizeof(rtp_packet), packet_time));
+ recv_stream.DeliverRtp(&rtp_packet[0], rtp_packet.size(), packet_time));
}
-TEST(AudioReceiveStreamTest, GetStats) {
- MockRemoteBitrateEstimator remote_bitrate_estimator;
- FakeVoiceEngine voice_engine;
- AudioReceiveStream::Config config;
- config.rtp.remote_ssrc = FakeVoiceEngine::kRecvSsrc;
- config.voe_channel_id = FakeVoiceEngine::kRecvChannelId;
- internal::AudioReceiveStream recv_stream(&remote_bitrate_estimator, config,
- &voice_engine);
+TEST(AudioReceiveStreamTest, AudioPacketUpdatesBweFeedback) {
+ ConfigHelper helper;
+ helper.config().combined_audio_video_bwe = true;
+ helper.config().rtp.transport_cc = true;
+ helper.config().rtp.extensions.push_back(RtpExtension(
+ RtpExtension::kTransportSequenceNumber, kTransportSequenceNumberId));
+ helper.SetupMockForBweFeedback(true);
+ internal::AudioReceiveStream recv_stream(
+ helper.congestion_controller(), helper.config(), helper.audio_state());
+ const int kTransportSequenceNumberValue = 1234;
+ std::vector<uint8_t> rtp_packet = CreateRtpHeaderWithOneByteExtension(
+ kTransportSequenceNumberId, kTransportSequenceNumberValue, 2);
+ PacketTime packet_time(5678000, 0);
+ const size_t kExpectedHeaderLength = 20;
+ RTPHeaderExtension expected_extension;
+ expected_extension.hasTransportSequenceNumber = true;
+ expected_extension.transportSequenceNumber = kTransportSequenceNumberValue;
+ EXPECT_CALL(*helper.remote_bitrate_estimator(),
+ IncomingPacket(packet_time.timestamp / 1000,
+ rtp_packet.size() - kExpectedHeaderLength,
+ VerifyHeaderExtension(expected_extension), false))
+ .Times(1);
+ EXPECT_TRUE(
+ recv_stream.DeliverRtp(&rtp_packet[0], rtp_packet.size(), packet_time));
+}
+TEST(AudioReceiveStreamTest, GetStats) {
+ ConfigHelper helper;
+ internal::AudioReceiveStream recv_stream(
+ helper.congestion_controller(), helper.config(), helper.audio_state());
+ helper.SetupMockForGetStats();
AudioReceiveStream::Stats stats = recv_stream.GetStats();
- const CallStatistics& call_stats = FakeVoiceEngine::kRecvCallStats;
- const CodecInst& codec_inst = FakeVoiceEngine::kRecvCodecInst;
- const NetworkStatistics& net_stats = FakeVoiceEngine::kRecvNetworkStats;
- const AudioDecodingCallStats& decode_stats =
- FakeVoiceEngine::kRecvAudioDecodingCallStats;
- EXPECT_EQ(FakeVoiceEngine::kRecvSsrc, stats.remote_ssrc);
- EXPECT_EQ(static_cast<int64_t>(call_stats.bytesReceived), stats.bytes_rcvd);
- EXPECT_EQ(static_cast<uint32_t>(call_stats.packetsReceived),
+ EXPECT_EQ(kRemoteSsrc, stats.remote_ssrc);
+ EXPECT_EQ(static_cast<int64_t>(kCallStats.bytesReceived), stats.bytes_rcvd);
+ EXPECT_EQ(static_cast<uint32_t>(kCallStats.packetsReceived),
stats.packets_rcvd);
- EXPECT_EQ(call_stats.cumulativeLost, stats.packets_lost);
- EXPECT_EQ(Q8ToFloat(call_stats.fractionLost), stats.fraction_lost);
- EXPECT_EQ(std::string(codec_inst.plname), stats.codec_name);
- EXPECT_EQ(call_stats.extendedMax, stats.ext_seqnum);
- EXPECT_EQ(call_stats.jitterSamples / (codec_inst.plfreq / 1000),
+ EXPECT_EQ(kCallStats.cumulativeLost, stats.packets_lost);
+ EXPECT_EQ(Q8ToFloat(kCallStats.fractionLost), stats.fraction_lost);
+ EXPECT_EQ(std::string(kCodecInst.plname), stats.codec_name);
+ EXPECT_EQ(kCallStats.extendedMax, stats.ext_seqnum);
+ EXPECT_EQ(kCallStats.jitterSamples / (kCodecInst.plfreq / 1000),
stats.jitter_ms);
- EXPECT_EQ(net_stats.currentBufferSize, stats.jitter_buffer_ms);
- EXPECT_EQ(net_stats.preferredBufferSize, stats.jitter_buffer_preferred_ms);
- EXPECT_EQ(static_cast<uint32_t>(FakeVoiceEngine::kRecvJitterBufferDelay +
- FakeVoiceEngine::kRecvPlayoutBufferDelay), stats.delay_estimate_ms);
- EXPECT_EQ(static_cast<int32_t>(FakeVoiceEngine::kRecvSpeechOutputLevel),
- stats.audio_level);
- EXPECT_EQ(Q14ToFloat(net_stats.currentExpandRate), stats.expand_rate);
- EXPECT_EQ(Q14ToFloat(net_stats.currentSpeechExpandRate),
+ EXPECT_EQ(kNetworkStats.currentBufferSize, stats.jitter_buffer_ms);
+ EXPECT_EQ(kNetworkStats.preferredBufferSize,
+ stats.jitter_buffer_preferred_ms);
+ EXPECT_EQ(static_cast<uint32_t>(kJitterBufferDelay + kPlayoutBufferDelay),
+ stats.delay_estimate_ms);
+ EXPECT_EQ(static_cast<int32_t>(kSpeechOutputLevel), stats.audio_level);
+ EXPECT_EQ(Q14ToFloat(kNetworkStats.currentExpandRate), stats.expand_rate);
+ EXPECT_EQ(Q14ToFloat(kNetworkStats.currentSpeechExpandRate),
stats.speech_expand_rate);
- EXPECT_EQ(Q14ToFloat(net_stats.currentSecondaryDecodedRate),
+ EXPECT_EQ(Q14ToFloat(kNetworkStats.currentSecondaryDecodedRate),
stats.secondary_decoded_rate);
- EXPECT_EQ(Q14ToFloat(net_stats.currentAccelerateRate), stats.accelerate_rate);
- EXPECT_EQ(Q14ToFloat(net_stats.currentPreemptiveRate),
+ EXPECT_EQ(Q14ToFloat(kNetworkStats.currentAccelerateRate),
+ stats.accelerate_rate);
+ EXPECT_EQ(Q14ToFloat(kNetworkStats.currentPreemptiveRate),
stats.preemptive_expand_rate);
- EXPECT_EQ(decode_stats.calls_to_silence_generator,
+ EXPECT_EQ(kAudioDecodeStats.calls_to_silence_generator,
stats.decoding_calls_to_silence_generator);
- EXPECT_EQ(decode_stats.calls_to_neteq, stats.decoding_calls_to_neteq);
- EXPECT_EQ(decode_stats.decoded_normal, stats.decoding_normal);
- EXPECT_EQ(decode_stats.decoded_plc, stats.decoding_plc);
- EXPECT_EQ(decode_stats.decoded_cng, stats.decoding_cng);
- EXPECT_EQ(decode_stats.decoded_plc_cng, stats.decoding_plc_cng);
- EXPECT_EQ(call_stats.capture_start_ntp_time_ms_,
+ EXPECT_EQ(kAudioDecodeStats.calls_to_neteq, stats.decoding_calls_to_neteq);
+ EXPECT_EQ(kAudioDecodeStats.decoded_normal, stats.decoding_normal);
+ EXPECT_EQ(kAudioDecodeStats.decoded_plc, stats.decoding_plc);
+ EXPECT_EQ(kAudioDecodeStats.decoded_cng, stats.decoding_cng);
+ EXPECT_EQ(kAudioDecodeStats.decoded_plc_cng, stats.decoding_plc_cng);
+ EXPECT_EQ(kCallStats.capture_start_ntp_time_ms_,
stats.capture_start_ntp_time_ms);
}
} // namespace test
diff --git a/webrtc/audio/audio_send_stream.cc b/webrtc/audio/audio_send_stream.cc
index ccfdca546d..35a65521dd 100644
--- a/webrtc/audio/audio_send_stream.cc
+++ b/webrtc/audio/audio_send_stream.cc
@@ -12,13 +12,20 @@
#include <string>
+#include "webrtc/audio/audio_state.h"
#include "webrtc/audio/conversion.h"
+#include "webrtc/audio/scoped_voe_interface.h"
#include "webrtc/base/checks.h"
#include "webrtc/base/logging.h"
+#include "webrtc/call/congestion_controller.h"
+#include "webrtc/modules/pacing/paced_sender.h"
+#include "webrtc/modules/rtp_rtcp/include/rtp_rtcp_defines.h"
+#include "webrtc/voice_engine/channel_proxy.h"
#include "webrtc/voice_engine/include/voe_audio_processing.h"
#include "webrtc/voice_engine/include/voe_codec.h"
#include "webrtc/voice_engine/include/voe_rtp_rtcp.h"
#include "webrtc/voice_engine/include/voe_volume_control.h"
+#include "webrtc/voice_engine/voice_engine_impl.h"
namespace webrtc {
std::string AudioSendStream::Config::Rtp::ToString() const {
@@ -32,6 +39,7 @@ std::string AudioSendStream::Config::Rtp::ToString() const {
}
}
ss << ']';
+ ss << ", c_name: " << c_name;
ss << '}';
return ss.str();
}
@@ -48,38 +56,91 @@ std::string AudioSendStream::Config::ToString() const {
}
namespace internal {
-AudioSendStream::AudioSendStream(const webrtc::AudioSendStream::Config& config,
- VoiceEngine* voice_engine)
- : config_(config),
- voice_engine_(voice_engine),
- voe_base_(voice_engine) {
+AudioSendStream::AudioSendStream(
+ const webrtc::AudioSendStream::Config& config,
+ const rtc::scoped_refptr<webrtc::AudioState>& audio_state,
+ CongestionController* congestion_controller)
+ : config_(config), audio_state_(audio_state) {
LOG(LS_INFO) << "AudioSendStream: " << config_.ToString();
- RTC_DCHECK_NE(config.voe_channel_id, -1);
- RTC_DCHECK(voice_engine_);
+ RTC_DCHECK_NE(config_.voe_channel_id, -1);
+ RTC_DCHECK(audio_state_.get());
+ RTC_DCHECK(congestion_controller);
+
+ VoiceEngineImpl* voe_impl = static_cast<VoiceEngineImpl*>(voice_engine());
+ channel_proxy_ = voe_impl->GetChannelProxy(config_.voe_channel_id);
+ channel_proxy_->SetCongestionControlObjects(
+ congestion_controller->pacer(),
+ congestion_controller->GetTransportFeedbackObserver(),
+ congestion_controller->packet_router());
+ channel_proxy_->SetRTCPStatus(true);
+ channel_proxy_->SetLocalSSRC(config.rtp.ssrc);
+ channel_proxy_->SetRTCP_CNAME(config.rtp.c_name);
+
+ for (const auto& extension : config.rtp.extensions) {
+ if (extension.name == RtpExtension::kAbsSendTime) {
+ channel_proxy_->SetSendAbsoluteSenderTimeStatus(true, extension.id);
+ } else if (extension.name == RtpExtension::kAudioLevel) {
+ channel_proxy_->SetSendAudioLevelIndicationStatus(true, extension.id);
+ } else if (extension.name == RtpExtension::kTransportSequenceNumber) {
+ channel_proxy_->EnableSendTransportSequenceNumber(extension.id);
+ } else {
+ RTC_NOTREACHED() << "Registering unsupported RTP extension.";
+ }
+ }
}
AudioSendStream::~AudioSendStream() {
RTC_DCHECK(thread_checker_.CalledOnValidThread());
LOG(LS_INFO) << "~AudioSendStream: " << config_.ToString();
+ channel_proxy_->SetCongestionControlObjects(nullptr, nullptr, nullptr);
+}
+
+void AudioSendStream::Start() {
+ RTC_DCHECK(thread_checker_.CalledOnValidThread());
+}
+
+void AudioSendStream::Stop() {
+ RTC_DCHECK(thread_checker_.CalledOnValidThread());
+}
+
+void AudioSendStream::SignalNetworkState(NetworkState state) {
+ RTC_DCHECK(thread_checker_.CalledOnValidThread());
+}
+
+bool AudioSendStream::DeliverRtcp(const uint8_t* packet, size_t length) {
+ // TODO(solenberg): Tests call this function on a network thread, libjingle
+ // calls on the worker thread. We should move towards always using a network
+ // thread. Then this check can be enabled.
+ // RTC_DCHECK(!thread_checker_.CalledOnValidThread());
+ return false;
+}
+
+bool AudioSendStream::SendTelephoneEvent(int payload_type, uint8_t event,
+ uint32_t duration_ms) {
+ RTC_DCHECK(thread_checker_.CalledOnValidThread());
+ return channel_proxy_->SetSendTelephoneEventPayloadType(payload_type) &&
+ channel_proxy_->SendTelephoneEventOutband(event, duration_ms);
}
webrtc::AudioSendStream::Stats AudioSendStream::GetStats() const {
RTC_DCHECK(thread_checker_.CalledOnValidThread());
webrtc::AudioSendStream::Stats stats;
stats.local_ssrc = config_.rtp.ssrc;
- ScopedVoEInterface<VoEAudioProcessing> processing(voice_engine_);
- ScopedVoEInterface<VoECodec> codec(voice_engine_);
- ScopedVoEInterface<VoERTP_RTCP> rtp(voice_engine_);
- ScopedVoEInterface<VoEVolumeControl> volume(voice_engine_);
- unsigned int ssrc = 0;
- webrtc::CallStatistics call_stats = {0};
- if (rtp->GetLocalSSRC(config_.voe_channel_id, ssrc) == -1 ||
- rtp->GetRTCPStatistics(config_.voe_channel_id, call_stats) == -1) {
- return stats;
- }
+ ScopedVoEInterface<VoEAudioProcessing> processing(voice_engine());
+ ScopedVoEInterface<VoECodec> codec(voice_engine());
+ ScopedVoEInterface<VoEVolumeControl> volume(voice_engine());
+ webrtc::CallStatistics call_stats = channel_proxy_->GetRTCPStatistics();
stats.bytes_sent = call_stats.bytesSent;
stats.packets_sent = call_stats.packetsSent;
+ // RTT isn't known until a RTCP report is received. Until then, VoiceEngine
+ // returns 0 to indicate an error value.
+ if (call_stats.rttMs > 0) {
+ stats.rtt_ms = call_stats.rttMs;
+ }
+ // TODO(solenberg): [was ajm]: Re-enable this metric once we have a reliable
+ // implementation.
+ stats.aec_quality_min = -1;
webrtc::CodecInst codec_inst = {0};
if (codec->GetSendCodec(config_.voe_channel_id, codec_inst) != -1) {
@@ -87,54 +148,43 @@ webrtc::AudioSendStream::Stats AudioSendStream::GetStats() const {
stats.codec_name = codec_inst.plname;
// Get data from the last remote RTCP report.
- std::vector<webrtc::ReportBlock> blocks;
- if (rtp->GetRemoteRTCPReportBlocks(config_.voe_channel_id, &blocks) != -1) {
- for (const webrtc::ReportBlock& block : blocks) {
- // Lookup report for send ssrc only.
- if (block.source_SSRC == stats.local_ssrc) {
- stats.packets_lost = block.cumulative_num_packets_lost;
- stats.fraction_lost = Q8ToFloat(block.fraction_lost);
- stats.ext_seqnum = block.extended_highest_sequence_number;
- // Convert samples to milliseconds.
- if (codec_inst.plfreq / 1000 > 0) {
- stats.jitter_ms =
- block.interarrival_jitter / (codec_inst.plfreq / 1000);
- }
- break;
+ for (const auto& block : channel_proxy_->GetRemoteRTCPReportBlocks()) {
+ // Lookup report for send ssrc only.
+ if (block.source_SSRC == stats.local_ssrc) {
+ stats.packets_lost = block.cumulative_num_packets_lost;
+ stats.fraction_lost = Q8ToFloat(block.fraction_lost);
+ stats.ext_seqnum = block.extended_highest_sequence_number;
+ // Convert samples to milliseconds.
+ if (codec_inst.plfreq / 1000 > 0) {
+ stats.jitter_ms =
+ block.interarrival_jitter / (codec_inst.plfreq / 1000);
}
+ break;
}
}
}
- // RTT isn't known until a RTCP report is received. Until then, VoiceEngine
- // returns 0 to indicate an error value.
- if (call_stats.rttMs > 0) {
- stats.rtt_ms = call_stats.rttMs;
- }
-
// Local speech level.
{
unsigned int level = 0;
- if (volume->GetSpeechInputLevelFullRange(level) != -1) {
- stats.audio_level = static_cast<int32_t>(level);
- }
+ int error = volume->GetSpeechInputLevelFullRange(level);
+ RTC_DCHECK_EQ(0, error);
+ stats.audio_level = static_cast<int32_t>(level);
}
- // TODO(ajm): Re-enable this metric once we have a reliable implementation.
- stats.aec_quality_min = -1;
-
bool echo_metrics_on = false;
- if (processing->GetEcMetricsStatus(echo_metrics_on) != -1 &&
- echo_metrics_on) {
+ int error = processing->GetEcMetricsStatus(echo_metrics_on);
+ RTC_DCHECK_EQ(0, error);
+ if (echo_metrics_on) {
// These can also be negative, but in practice -1 is only used to signal
// insufficient data, since the resolution is limited to multiples of 4 ms.
int median = -1;
int std = -1;
float dummy = 0.0f;
- if (processing->GetEcDelayMetrics(median, std, dummy) != -1) {
- stats.echo_delay_median_ms = median;
- stats.echo_delay_std_ms = std;
- }
+ error = processing->GetEcDelayMetrics(median, std, dummy);
+ RTC_DCHECK_EQ(0, error);
+ stats.echo_delay_median_ms = median;
+ stats.echo_delay_std_ms = std;
// These can take on valid negative values, so use the lowest possible level
// as default rather than -1.
@@ -142,14 +192,15 @@ webrtc::AudioSendStream::Stats AudioSendStream::GetStats() const {
int erle = -100;
int dummy1 = 0;
int dummy2 = 0;
- if (processing->GetEchoMetrics(erl, erle, dummy1, dummy2) != -1) {
- stats.echo_return_loss = erl;
- stats.echo_return_loss_enhancement = erle;
- }
+ error = processing->GetEchoMetrics(erl, erle, dummy1, dummy2);
+ RTC_DCHECK_EQ(0, error);
+ stats.echo_return_loss = erl;
+ stats.echo_return_loss_enhancement = erle;
}
- // TODO(solenberg): Collect typing noise warnings here too!
- // bool typing_noise_detected = typing_noise_detected_;
+ internal::AudioState* audio_state =
+ static_cast<internal::AudioState*>(audio_state_.get());
+ stats.typing_noise_detected = audio_state->typing_noise_detected();
return stats;
}
@@ -159,24 +210,12 @@ const webrtc::AudioSendStream::Config& AudioSendStream::config() const {
return config_;
}
-void AudioSendStream::Start() {
- RTC_DCHECK(thread_checker_.CalledOnValidThread());
-}
-
-void AudioSendStream::Stop() {
- RTC_DCHECK(thread_checker_.CalledOnValidThread());
-}
-
-void AudioSendStream::SignalNetworkState(NetworkState state) {
- RTC_DCHECK(thread_checker_.CalledOnValidThread());
-}
-
-bool AudioSendStream::DeliverRtcp(const uint8_t* packet, size_t length) {
- // TODO(solenberg): Tests call this function on a network thread, libjingle
- // calls on the worker thread. We should move towards always using a network
- // thread. Then this check can be enabled.
- // RTC_DCHECK(!thread_checker_.CalledOnValidThread());
- return false;
+VoiceEngine* AudioSendStream::voice_engine() const {
+ internal::AudioState* audio_state =
+ static_cast<internal::AudioState*>(audio_state_.get());
+ VoiceEngine* voice_engine = audio_state->voice_engine();
+ RTC_DCHECK(voice_engine);
+ return voice_engine;
}
} // namespace internal
} // namespace webrtc
diff --git a/webrtc/audio/audio_send_stream.h b/webrtc/audio/audio_send_stream.h
index ae81dfc8fc..8b96350590 100644
--- a/webrtc/audio/audio_send_stream.h
+++ b/webrtc/audio/audio_send_stream.h
@@ -12,20 +12,24 @@
#define WEBRTC_AUDIO_AUDIO_SEND_STREAM_H_
#include "webrtc/audio_send_stream.h"
-#include "webrtc/audio/scoped_voe_interface.h"
+#include "webrtc/audio_state.h"
#include "webrtc/base/thread_checker.h"
-#include "webrtc/voice_engine/include/voe_base.h"
+#include "webrtc/base/scoped_ptr.h"
namespace webrtc {
-
+class CongestionController;
class VoiceEngine;
-namespace internal {
+namespace voe {
+class ChannelProxy;
+} // namespace voe
+namespace internal {
class AudioSendStream final : public webrtc::AudioSendStream {
public:
AudioSendStream(const webrtc::AudioSendStream::Config& config,
- VoiceEngine* voice_engine);
+ const rtc::scoped_refptr<webrtc::AudioState>& audio_state,
+ CongestionController* congestion_controller);
~AudioSendStream() override;
// webrtc::SendStream implementation.
@@ -35,16 +39,19 @@ class AudioSendStream final : public webrtc::AudioSendStream {
bool DeliverRtcp(const uint8_t* packet, size_t length) override;
// webrtc::AudioSendStream implementation.
+ bool SendTelephoneEvent(int payload_type, uint8_t event,
+ uint32_t duration_ms) override;
webrtc::AudioSendStream::Stats GetStats() const override;
const webrtc::AudioSendStream::Config& config() const;
private:
+ VoiceEngine* voice_engine() const;
+
rtc::ThreadChecker thread_checker_;
const webrtc::AudioSendStream::Config config_;
- VoiceEngine* voice_engine_;
- // We hold one interface pointer to the VoE to make sure it is kept alive.
- ScopedVoEInterface<VoEBase> voe_base_;
+ rtc::scoped_refptr<webrtc::AudioState> audio_state_;
+ rtc::scoped_ptr<voe::ChannelProxy> channel_proxy_;
RTC_DISALLOW_IMPLICIT_CONSTRUCTORS(AudioSendStream);
};
diff --git a/webrtc/audio/audio_send_stream_unittest.cc b/webrtc/audio/audio_send_stream_unittest.cc
index 227ec83799..466c1571ac 100644
--- a/webrtc/audio/audio_send_stream_unittest.cc
+++ b/webrtc/audio/audio_send_stream_unittest.cc
@@ -8,69 +8,238 @@
* be found in the AUTHORS file in the root of the source tree.
*/
+#include <string>
+#include <vector>
+
#include "testing/gtest/include/gtest/gtest.h"
#include "webrtc/audio/audio_send_stream.h"
+#include "webrtc/audio/audio_state.h"
#include "webrtc/audio/conversion.h"
-#include "webrtc/test/fake_voice_engine.h"
+#include "webrtc/call/congestion_controller.h"
+#include "webrtc/modules/bitrate_controller/include/mock/mock_bitrate_controller.h"
+#include "webrtc/modules/pacing/paced_sender.h"
+#include "webrtc/test/mock_voe_channel_proxy.h"
+#include "webrtc/test/mock_voice_engine.h"
+#include "webrtc/video/call_stats.h"
namespace webrtc {
namespace test {
+namespace {
+
+using testing::_;
+using testing::Return;
+
+const int kChannelId = 1;
+const uint32_t kSsrc = 1234;
+const char* kCName = "foo_name";
+const int kAudioLevelId = 2;
+const int kAbsSendTimeId = 3;
+const int kTransportSequenceNumberId = 4;
+const int kEchoDelayMedian = 254;
+const int kEchoDelayStdDev = -3;
+const int kEchoReturnLoss = -65;
+const int kEchoReturnLossEnhancement = 101;
+const unsigned int kSpeechInputLevel = 96;
+const CallStatistics kCallStats = {
+ 1345, 1678, 1901, 1234, 112, 13456, 17890, 1567, -1890, -1123};
+const CodecInst kCodecInst = {-121, "codec_name_send", 48000, -231, 0, -671};
+const ReportBlock kReportBlock = {456, 780, 123, 567, 890, 132, 143, 13354};
+const int kTelephoneEventPayloadType = 123;
+const uint8_t kTelephoneEventCode = 45;
+const uint32_t kTelephoneEventDuration = 6789;
+
+struct ConfigHelper {
+ ConfigHelper()
+ : stream_config_(nullptr),
+ call_stats_(Clock::GetRealTimeClock()),
+ process_thread_(ProcessThread::Create("AudioTestThread")),
+ congestion_controller_(process_thread_.get(),
+ &call_stats_,
+ &bitrate_observer_) {
+ using testing::Invoke;
+ using testing::StrEq;
+
+ EXPECT_CALL(voice_engine_,
+ RegisterVoiceEngineObserver(_)).WillOnce(Return(0));
+ EXPECT_CALL(voice_engine_,
+ DeRegisterVoiceEngineObserver()).WillOnce(Return(0));
+ AudioState::Config config;
+ config.voice_engine = &voice_engine_;
+ audio_state_ = AudioState::Create(config);
+
+ EXPECT_CALL(voice_engine_, ChannelProxyFactory(kChannelId))
+ .WillOnce(Invoke([this](int channel_id) {
+ EXPECT_FALSE(channel_proxy_);
+ channel_proxy_ = new testing::StrictMock<MockVoEChannelProxy>();
+ EXPECT_CALL(*channel_proxy_, SetRTCPStatus(true)).Times(1);
+ EXPECT_CALL(*channel_proxy_, SetLocalSSRC(kSsrc)).Times(1);
+ EXPECT_CALL(*channel_proxy_, SetRTCP_CNAME(StrEq(kCName))).Times(1);
+ EXPECT_CALL(*channel_proxy_,
+ SetSendAbsoluteSenderTimeStatus(true, kAbsSendTimeId)).Times(1);
+ EXPECT_CALL(*channel_proxy_,
+ SetSendAudioLevelIndicationStatus(true, kAudioLevelId)).Times(1);
+ EXPECT_CALL(*channel_proxy_, EnableSendTransportSequenceNumber(
+ kTransportSequenceNumberId))
+ .Times(1);
+ EXPECT_CALL(*channel_proxy_,
+ SetCongestionControlObjects(
+ congestion_controller_.pacer(),
+ congestion_controller_.GetTransportFeedbackObserver(),
+ congestion_controller_.packet_router()))
+ .Times(1);
+ EXPECT_CALL(*channel_proxy_,
+ SetCongestionControlObjects(nullptr, nullptr, nullptr))
+ .Times(1);
+ return channel_proxy_;
+ }));
+ stream_config_.voe_channel_id = kChannelId;
+ stream_config_.rtp.ssrc = kSsrc;
+ stream_config_.rtp.c_name = kCName;
+ stream_config_.rtp.extensions.push_back(
+ RtpExtension(RtpExtension::kAudioLevel, kAudioLevelId));
+ stream_config_.rtp.extensions.push_back(
+ RtpExtension(RtpExtension::kAbsSendTime, kAbsSendTimeId));
+ stream_config_.rtp.extensions.push_back(RtpExtension(
+ RtpExtension::kTransportSequenceNumber, kTransportSequenceNumberId));
+ }
+
+ AudioSendStream::Config& config() { return stream_config_; }
+ rtc::scoped_refptr<AudioState> audio_state() { return audio_state_; }
+ CongestionController* congestion_controller() {
+ return &congestion_controller_;
+ }
+
+ void SetupMockForSendTelephoneEvent() {
+ EXPECT_TRUE(channel_proxy_);
+ EXPECT_CALL(*channel_proxy_,
+ SetSendTelephoneEventPayloadType(kTelephoneEventPayloadType))
+ .WillOnce(Return(true));
+ EXPECT_CALL(*channel_proxy_,
+ SendTelephoneEventOutband(kTelephoneEventCode, kTelephoneEventDuration))
+ .WillOnce(Return(true));
+ }
+
+ void SetupMockForGetStats() {
+ using testing::DoAll;
+ using testing::SetArgReferee;
+
+ std::vector<ReportBlock> report_blocks;
+ webrtc::ReportBlock block = kReportBlock;
+ report_blocks.push_back(block); // Has wrong SSRC.
+ block.source_SSRC = kSsrc;
+ report_blocks.push_back(block); // Correct block.
+ block.fraction_lost = 0;
+ report_blocks.push_back(block); // Duplicate SSRC, bad fraction_lost.
+
+ EXPECT_TRUE(channel_proxy_);
+ EXPECT_CALL(*channel_proxy_, GetRTCPStatistics())
+ .WillRepeatedly(Return(kCallStats));
+ EXPECT_CALL(*channel_proxy_, GetRemoteRTCPReportBlocks())
+ .WillRepeatedly(Return(report_blocks));
+
+ EXPECT_CALL(voice_engine_, GetSendCodec(kChannelId, _))
+ .WillRepeatedly(DoAll(SetArgReferee<1>(kCodecInst), Return(0)));
+ EXPECT_CALL(voice_engine_, GetSpeechInputLevelFullRange(_))
+ .WillRepeatedly(DoAll(SetArgReferee<0>(kSpeechInputLevel), Return(0)));
+ EXPECT_CALL(voice_engine_, GetEcMetricsStatus(_))
+ .WillRepeatedly(DoAll(SetArgReferee<0>(true), Return(0)));
+ EXPECT_CALL(voice_engine_, GetEchoMetrics(_, _, _, _))
+ .WillRepeatedly(DoAll(SetArgReferee<0>(kEchoReturnLoss),
+ SetArgReferee<1>(kEchoReturnLossEnhancement),
+ Return(0)));
+ EXPECT_CALL(voice_engine_, GetEcDelayMetrics(_, _, _))
+ .WillRepeatedly(DoAll(SetArgReferee<0>(kEchoDelayMedian),
+ SetArgReferee<1>(kEchoDelayStdDev), Return(0)));
+ }
+
+ private:
+ testing::StrictMock<MockVoiceEngine> voice_engine_;
+ rtc::scoped_refptr<AudioState> audio_state_;
+ AudioSendStream::Config stream_config_;
+ testing::StrictMock<MockVoEChannelProxy>* channel_proxy_ = nullptr;
+ CallStats call_stats_;
+ testing::NiceMock<MockBitrateObserver> bitrate_observer_;
+ rtc::scoped_ptr<ProcessThread> process_thread_;
+ CongestionController congestion_controller_;
+};
+} // namespace
TEST(AudioSendStreamTest, ConfigToString) {
- const int kAbsSendTimeId = 3;
AudioSendStream::Config config(nullptr);
- config.rtp.ssrc = 1234;
+ config.rtp.ssrc = kSsrc;
config.rtp.extensions.push_back(
RtpExtension(RtpExtension::kAbsSendTime, kAbsSendTimeId));
- config.voe_channel_id = 1;
+ config.rtp.c_name = kCName;
+ config.voe_channel_id = kChannelId;
config.cng_payload_type = 42;
config.red_payload_type = 17;
- EXPECT_EQ("{rtp: {ssrc: 1234, extensions: [{name: "
- "http://www.webrtc.org/experiments/rtp-hdrext/abs-send-time, id: 3}]}, "
- "voe_channel_id: 1, cng_payload_type: 42, red_payload_type: 17}",
+ EXPECT_EQ(
+ "{rtp: {ssrc: 1234, extensions: [{name: "
+ "http://www.webrtc.org/experiments/rtp-hdrext/abs-send-time, id: 3}], "
+ "c_name: foo_name}, voe_channel_id: 1, cng_payload_type: 42, "
+ "red_payload_type: 17}",
config.ToString());
}
TEST(AudioSendStreamTest, ConstructDestruct) {
- FakeVoiceEngine voice_engine;
- AudioSendStream::Config config(nullptr);
- config.voe_channel_id = 1;
- internal::AudioSendStream send_stream(config, &voice_engine);
+ ConfigHelper helper;
+ internal::AudioSendStream send_stream(helper.config(), helper.audio_state(),
+ helper.congestion_controller());
}
-TEST(AudioSendStreamTest, GetStats) {
- FakeVoiceEngine voice_engine;
- AudioSendStream::Config config(nullptr);
- config.rtp.ssrc = FakeVoiceEngine::kSendSsrc;
- config.voe_channel_id = FakeVoiceEngine::kSendChannelId;
- internal::AudioSendStream send_stream(config, &voice_engine);
+TEST(AudioSendStreamTest, SendTelephoneEvent) {
+ ConfigHelper helper;
+ internal::AudioSendStream send_stream(helper.config(), helper.audio_state(),
+ helper.congestion_controller());
+ helper.SetupMockForSendTelephoneEvent();
+ EXPECT_TRUE(send_stream.SendTelephoneEvent(kTelephoneEventPayloadType,
+ kTelephoneEventCode, kTelephoneEventDuration));
+}
+TEST(AudioSendStreamTest, GetStats) {
+ ConfigHelper helper;
+ internal::AudioSendStream send_stream(helper.config(), helper.audio_state(),
+ helper.congestion_controller());
+ helper.SetupMockForGetStats();
AudioSendStream::Stats stats = send_stream.GetStats();
- const CallStatistics& call_stats = FakeVoiceEngine::kSendCallStats;
- const CodecInst& codec_inst = FakeVoiceEngine::kSendCodecInst;
- const ReportBlock& report_block = FakeVoiceEngine::kSendReportBlock;
- EXPECT_EQ(FakeVoiceEngine::kSendSsrc, stats.local_ssrc);
- EXPECT_EQ(static_cast<int64_t>(call_stats.bytesSent), stats.bytes_sent);
- EXPECT_EQ(call_stats.packetsSent, stats.packets_sent);
- EXPECT_EQ(static_cast<int32_t>(report_block.cumulative_num_packets_lost),
+ EXPECT_EQ(kSsrc, stats.local_ssrc);
+ EXPECT_EQ(static_cast<int64_t>(kCallStats.bytesSent), stats.bytes_sent);
+ EXPECT_EQ(kCallStats.packetsSent, stats.packets_sent);
+ EXPECT_EQ(static_cast<int32_t>(kReportBlock.cumulative_num_packets_lost),
stats.packets_lost);
- EXPECT_EQ(Q8ToFloat(report_block.fraction_lost), stats.fraction_lost);
- EXPECT_EQ(std::string(codec_inst.plname), stats.codec_name);
- EXPECT_EQ(static_cast<int32_t>(report_block.extended_highest_sequence_number),
+ EXPECT_EQ(Q8ToFloat(kReportBlock.fraction_lost), stats.fraction_lost);
+ EXPECT_EQ(std::string(kCodecInst.plname), stats.codec_name);
+ EXPECT_EQ(static_cast<int32_t>(kReportBlock.extended_highest_sequence_number),
stats.ext_seqnum);
- EXPECT_EQ(static_cast<int32_t>(report_block.interarrival_jitter /
- (codec_inst.plfreq / 1000)), stats.jitter_ms);
- EXPECT_EQ(call_stats.rttMs, stats.rtt_ms);
- EXPECT_EQ(static_cast<int32_t>(FakeVoiceEngine::kSendSpeechInputLevel),
- stats.audio_level);
+ EXPECT_EQ(static_cast<int32_t>(kReportBlock.interarrival_jitter /
+ (kCodecInst.plfreq / 1000)),
+ stats.jitter_ms);
+ EXPECT_EQ(kCallStats.rttMs, stats.rtt_ms);
+ EXPECT_EQ(static_cast<int32_t>(kSpeechInputLevel), stats.audio_level);
EXPECT_EQ(-1, stats.aec_quality_min);
- EXPECT_EQ(FakeVoiceEngine::kSendEchoDelayMedian, stats.echo_delay_median_ms);
- EXPECT_EQ(FakeVoiceEngine::kSendEchoDelayStdDev, stats.echo_delay_std_ms);
- EXPECT_EQ(FakeVoiceEngine::kSendEchoReturnLoss, stats.echo_return_loss);
- EXPECT_EQ(FakeVoiceEngine::kSendEchoReturnLossEnhancement,
- stats.echo_return_loss_enhancement);
+ EXPECT_EQ(kEchoDelayMedian, stats.echo_delay_median_ms);
+ EXPECT_EQ(kEchoDelayStdDev, stats.echo_delay_std_ms);
+ EXPECT_EQ(kEchoReturnLoss, stats.echo_return_loss);
+ EXPECT_EQ(kEchoReturnLossEnhancement, stats.echo_return_loss_enhancement);
EXPECT_FALSE(stats.typing_noise_detected);
}
+
+TEST(AudioSendStreamTest, GetStatsTypingNoiseDetected) {
+ ConfigHelper helper;
+ internal::AudioSendStream send_stream(helper.config(), helper.audio_state(),
+ helper.congestion_controller());
+ helper.SetupMockForGetStats();
+ EXPECT_FALSE(send_stream.GetStats().typing_noise_detected);
+
+ internal::AudioState* internal_audio_state =
+ static_cast<internal::AudioState*>(helper.audio_state().get());
+ VoiceEngineObserver* voe_observer =
+ static_cast<VoiceEngineObserver*>(internal_audio_state);
+ voe_observer->CallbackOnError(-1, VE_TYPING_NOISE_WARNING);
+ EXPECT_TRUE(send_stream.GetStats().typing_noise_detected);
+ voe_observer->CallbackOnError(-1, VE_TYPING_NOISE_OFF_WARNING);
+ EXPECT_FALSE(send_stream.GetStats().typing_noise_detected);
+}
} // namespace test
} // namespace webrtc
diff --git a/webrtc/audio/audio_sink.h b/webrtc/audio/audio_sink.h
new file mode 100644
index 0000000000..999644f4ce
--- /dev/null
+++ b/webrtc/audio/audio_sink.h
@@ -0,0 +1,53 @@
+/*
+ * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_AUDIO_AUDIO_SINK_H_
+#define WEBRTC_AUDIO_AUDIO_SINK_H_
+
+#if defined(WEBRTC_POSIX) && !defined(__STDC_FORMAT_MACROS)
+// Avoid conflict with format_macros.h.
+#define __STDC_FORMAT_MACROS
+#endif
+
+#include <inttypes.h>
+#include <stddef.h>
+
+namespace webrtc {
+
+// Represents a simple push audio sink.
+class AudioSinkInterface {
+ public:
+ virtual ~AudioSinkInterface() {}
+
+ struct Data {
+ Data(int16_t* data,
+ size_t samples_per_channel,
+ int sample_rate,
+ size_t channels,
+ uint32_t timestamp)
+ : data(data),
+ samples_per_channel(samples_per_channel),
+ sample_rate(sample_rate),
+ channels(channels),
+ timestamp(timestamp) {}
+
+ int16_t* data; // The actual 16bit audio data.
+ size_t samples_per_channel; // Number of frames in the buffer.
+ int sample_rate; // Sample rate in Hz.
+ size_t channels; // Number of channels in the audio data.
+ uint32_t timestamp; // The RTP timestamp of the first sample.
+ };
+
+ virtual void OnData(const Data& audio) = 0;
+};
+
+} // namespace webrtc
+
+#endif // WEBRTC_AUDIO_AUDIO_SINK_H_
diff --git a/webrtc/audio/audio_state.cc b/webrtc/audio/audio_state.cc
new file mode 100644
index 0000000000..e63f97af2d
--- /dev/null
+++ b/webrtc/audio/audio_state.cc
@@ -0,0 +1,79 @@
+/*
+ * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/audio/audio_state.h"
+
+#include "webrtc/base/atomicops.h"
+#include "webrtc/base/checks.h"
+#include "webrtc/base/logging.h"
+#include "webrtc/voice_engine/include/voe_errors.h"
+
+namespace webrtc {
+namespace internal {
+
+AudioState::AudioState(const AudioState::Config& config)
+ : config_(config), voe_base_(config.voice_engine) {
+ process_thread_checker_.DetachFromThread();
+ // Only one AudioState should be created per VoiceEngine.
+ RTC_CHECK(voe_base_->RegisterVoiceEngineObserver(*this) != -1);
+}
+
+AudioState::~AudioState() {
+ RTC_DCHECK(thread_checker_.CalledOnValidThread());
+ voe_base_->DeRegisterVoiceEngineObserver();
+}
+
+VoiceEngine* AudioState::voice_engine() {
+ RTC_DCHECK(thread_checker_.CalledOnValidThread());
+ return config_.voice_engine;
+}
+
+bool AudioState::typing_noise_detected() const {
+ RTC_DCHECK(thread_checker_.CalledOnValidThread());
+ rtc::CritScope lock(&crit_sect_);
+ return typing_noise_detected_;
+}
+
+// Reference count; implementation copied from rtc::RefCountedObject.
+int AudioState::AddRef() const {
+ return rtc::AtomicOps::Increment(&ref_count_);
+}
+
+// Reference count; implementation copied from rtc::RefCountedObject.
+int AudioState::Release() const {
+ int count = rtc::AtomicOps::Decrement(&ref_count_);
+ if (!count) {
+ delete this;
+ }
+ return count;
+}
+
+void AudioState::CallbackOnError(int channel_id, int err_code) {
+ RTC_DCHECK(process_thread_checker_.CalledOnValidThread());
+
+ // All call sites in VoE, as of this writing, specify -1 as channel_id.
+ RTC_DCHECK(channel_id == -1);
+ LOG(LS_INFO) << "VoiceEngine error " << err_code << " reported on channel "
+ << channel_id << ".";
+ if (err_code == VE_TYPING_NOISE_WARNING) {
+ rtc::CritScope lock(&crit_sect_);
+ typing_noise_detected_ = true;
+ } else if (err_code == VE_TYPING_NOISE_OFF_WARNING) {
+ rtc::CritScope lock(&crit_sect_);
+ typing_noise_detected_ = false;
+ }
+}
+} // namespace internal
+
+rtc::scoped_refptr<AudioState> AudioState::Create(
+ const AudioState::Config& config) {
+ return rtc::scoped_refptr<AudioState>(new internal::AudioState(config));
+}
+} // namespace webrtc
diff --git a/webrtc/audio/audio_state.h b/webrtc/audio/audio_state.h
new file mode 100644
index 0000000000..2cb83e4989
--- /dev/null
+++ b/webrtc/audio/audio_state.h
@@ -0,0 +1,61 @@
+/*
+ * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_AUDIO_AUDIO_STATE_H_
+#define WEBRTC_AUDIO_AUDIO_STATE_H_
+
+#include "webrtc/audio_state.h"
+#include "webrtc/audio/scoped_voe_interface.h"
+#include "webrtc/base/constructormagic.h"
+#include "webrtc/base/criticalsection.h"
+#include "webrtc/base/thread_checker.h"
+#include "webrtc/voice_engine/include/voe_base.h"
+
+namespace webrtc {
+namespace internal {
+
+class AudioState final : public webrtc::AudioState,
+ public webrtc::VoiceEngineObserver {
+ public:
+ explicit AudioState(const AudioState::Config& config);
+ ~AudioState() override;
+
+ VoiceEngine* voice_engine();
+ bool typing_noise_detected() const;
+
+ private:
+ // rtc::RefCountInterface implementation.
+ int AddRef() const override;
+ int Release() const override;
+
+ // webrtc::VoiceEngineObserver implementation.
+ void CallbackOnError(int channel_id, int err_code) override;
+
+ rtc::ThreadChecker thread_checker_;
+ rtc::ThreadChecker process_thread_checker_;
+ const webrtc::AudioState::Config config_;
+
+ // We hold one interface pointer to the VoE to make sure it is kept alive.
+ ScopedVoEInterface<VoEBase> voe_base_;
+
+ // The critical section isn't strictly needed in this case, but xSAN bots may
+ // trigger on unprotected cross-thread access.
+ mutable rtc::CriticalSection crit_sect_;
+ bool typing_noise_detected_ GUARDED_BY(crit_sect_) = false;
+
+ // Reference count; implementation copied from rtc::RefCountedObject.
+ mutable volatile int ref_count_ = 0;
+
+ RTC_DISALLOW_IMPLICIT_CONSTRUCTORS(AudioState);
+};
+} // namespace internal
+} // namespace webrtc
+
+#endif // WEBRTC_AUDIO_AUDIO_STATE_H_
diff --git a/webrtc/audio/audio_state_unittest.cc b/webrtc/audio/audio_state_unittest.cc
new file mode 100644
index 0000000000..11fbdb4a86
--- /dev/null
+++ b/webrtc/audio/audio_state_unittest.cc
@@ -0,0 +1,80 @@
+/*
+ * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "testing/gtest/include/gtest/gtest.h"
+
+#include "webrtc/audio/audio_state.h"
+#include "webrtc/base/scoped_ptr.h"
+#include "webrtc/test/mock_voice_engine.h"
+
+namespace webrtc {
+namespace test {
+namespace {
+
+struct ConfigHelper {
+ ConfigHelper() {
+ EXPECT_CALL(voice_engine_,
+ RegisterVoiceEngineObserver(testing::_)).WillOnce(testing::Return(0));
+ EXPECT_CALL(voice_engine_,
+ DeRegisterVoiceEngineObserver()).WillOnce(testing::Return(0));
+ config_.voice_engine = &voice_engine_;
+ }
+ AudioState::Config& config() { return config_; }
+ MockVoiceEngine& voice_engine() { return voice_engine_; }
+
+ private:
+ testing::StrictMock<MockVoiceEngine> voice_engine_;
+ AudioState::Config config_;
+};
+} // namespace
+
+TEST(AudioStateTest, Create) {
+ ConfigHelper helper;
+ rtc::scoped_refptr<AudioState> audio_state =
+ AudioState::Create(helper.config());
+ EXPECT_TRUE(audio_state.get());
+}
+
+TEST(AudioStateTest, ConstructDestruct) {
+ ConfigHelper helper;
+ rtc::scoped_ptr<internal::AudioState> audio_state(
+ new internal::AudioState(helper.config()));
+}
+
+TEST(AudioStateTest, GetVoiceEngine) {
+ ConfigHelper helper;
+ rtc::scoped_ptr<internal::AudioState> audio_state(
+ new internal::AudioState(helper.config()));
+ EXPECT_EQ(audio_state->voice_engine(), &helper.voice_engine());
+}
+
+TEST(AudioStateTest, TypingNoiseDetected) {
+ ConfigHelper helper;
+ rtc::scoped_ptr<internal::AudioState> audio_state(
+ new internal::AudioState(helper.config()));
+ VoiceEngineObserver* voe_observer =
+ static_cast<VoiceEngineObserver*>(audio_state.get());
+ EXPECT_FALSE(audio_state->typing_noise_detected());
+
+ voe_observer->CallbackOnError(-1, VE_NOT_INITED);
+ EXPECT_FALSE(audio_state->typing_noise_detected());
+
+ voe_observer->CallbackOnError(-1, VE_TYPING_NOISE_WARNING);
+ EXPECT_TRUE(audio_state->typing_noise_detected());
+ voe_observer->CallbackOnError(-1, VE_NOT_INITED);
+ EXPECT_TRUE(audio_state->typing_noise_detected());
+
+ voe_observer->CallbackOnError(-1, VE_TYPING_NOISE_OFF_WARNING);
+ EXPECT_FALSE(audio_state->typing_noise_detected());
+ voe_observer->CallbackOnError(-1, VE_NOT_INITED);
+ EXPECT_FALSE(audio_state->typing_noise_detected());
+}
+} // namespace test
+} // namespace webrtc
diff --git a/webrtc/audio/webrtc_audio.gypi b/webrtc/audio/webrtc_audio.gypi
index b9d45db56d..53b7d16b1a 100644
--- a/webrtc/audio/webrtc_audio.gypi
+++ b/webrtc/audio/webrtc_audio.gypi
@@ -18,6 +18,9 @@
'audio/audio_receive_stream.h',
'audio/audio_send_stream.cc',
'audio/audio_send_stream.h',
+ 'audio/audio_sink.h',
+ 'audio/audio_state.cc',
+ 'audio/audio_state.h',
'audio/conversion.h',
'audio/scoped_voe_interface.h',
],
diff --git a/webrtc/audio_receive_stream.h b/webrtc/audio_receive_stream.h
index 3e5a518a7d..8cab094f4b 100644
--- a/webrtc/audio_receive_stream.h
+++ b/webrtc/audio_receive_stream.h
@@ -15,6 +15,7 @@
#include <string>
#include <vector>
+#include "webrtc/base/scoped_ptr.h"
#include "webrtc/config.h"
#include "webrtc/stream.h"
#include "webrtc/transport.h"
@@ -23,6 +24,12 @@
namespace webrtc {
class AudioDecoder;
+class AudioSinkInterface;
+
+// WORK IN PROGRESS
+// This class is under development and is not yet intended for for use outside
+// of WebRtc/Libjingle. Please use the VoiceEngine API instead.
+// See: https://bugs.chromium.org/p/webrtc/issues/detail?id=4690
class AudioReceiveStream : public ReceiveStream {
public:
@@ -66,6 +73,12 @@ class AudioReceiveStream : public ReceiveStream {
// Sender SSRC used for sending RTCP (such as receiver reports).
uint32_t local_ssrc = 0;
+ // Enable feedback for send side bandwidth estimation.
+ // See
+ // https://tools.ietf.org/html/draft-holmer-rmcat-transport-wide-cc-extensions
+ // for details.
+ bool transport_cc = false;
+
// RTP header extensions used for the received stream.
std::vector<RtpExtension> extensions;
} rtp;
@@ -95,6 +108,16 @@ class AudioReceiveStream : public ReceiveStream {
};
virtual Stats GetStats() const = 0;
+
+ // Sets an audio sink that receives unmixed audio from the receive stream.
+ // Ownership of the sink is passed to the stream and can be used by the
+ // caller to do lifetime management (i.e. when the sink's dtor is called).
+ // Only one sink can be set and passing a null sink, clears an existing one.
+ // NOTE: Audio must still somehow be pulled through AudioTransport for audio
+ // to stream through this sink. In practice, this happens if mixed audio
+ // is being pulled+rendered and/or if audio is being pulled for the purposes
+ // of feeding to the AEC.
+ virtual void SetSink(rtc::scoped_ptr<AudioSinkInterface> sink) = 0;
};
} // namespace webrtc
diff --git a/webrtc/audio_send_stream.h b/webrtc/audio_send_stream.h
index 89b73e6e3e..d1af9e0103 100644
--- a/webrtc/audio_send_stream.h
+++ b/webrtc/audio_send_stream.h
@@ -23,6 +23,11 @@
namespace webrtc {
+// WORK IN PROGRESS
+// This class is under development and is not yet intended for for use outside
+// of WebRtc/Libjingle. Please use the VoiceEngine API instead.
+// See: https://bugs.chromium.org/p/webrtc/issues/detail?id=4690
+
class AudioSendStream : public SendStream {
public:
struct Stats {
@@ -59,8 +64,11 @@ class AudioSendStream : public SendStream {
// Sender SSRC.
uint32_t ssrc = 0;
- // RTP header extensions used for the received stream.
+ // RTP header extensions used for the sent stream.
std::vector<RtpExtension> extensions;
+
+ // RTCP CNAME, see RFC 3550.
+ std::string c_name;
} rtp;
// Transport for outgoing packets. The transport is expected to exist for
@@ -81,6 +89,9 @@ class AudioSendStream : public SendStream {
int red_payload_type = -1; // pt, or -1 to disable REDundant coding.
};
+ // TODO(solenberg): Make payload_type a config property instead.
+ virtual bool SendTelephoneEvent(int payload_type, uint8_t event,
+ uint32_t duration_ms) = 0;
virtual Stats GetStats() const = 0;
};
} // namespace webrtc
diff --git a/webrtc/audio_state.h b/webrtc/audio_state.h
new file mode 100644
index 0000000000..fa5784c844
--- /dev/null
+++ b/webrtc/audio_state.h
@@ -0,0 +1,48 @@
+/*
+ * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#ifndef WEBRTC_AUDIO_STATE_H_
+#define WEBRTC_AUDIO_STATE_H_
+
+#include "webrtc/base/refcount.h"
+#include "webrtc/base/scoped_ref_ptr.h"
+
+namespace webrtc {
+
+class AudioDeviceModule;
+class VoiceEngine;
+
+// WORK IN PROGRESS
+// This class is under development and is not yet intended for for use outside
+// of WebRtc/Libjingle. Please use the VoiceEngine API instead.
+// See: https://bugs.chromium.org/p/webrtc/issues/detail?id=4690
+
+// AudioState holds the state which must be shared between multiple instances of
+// webrtc::Call for audio processing purposes.
+class AudioState : public rtc::RefCountInterface {
+ public:
+ struct Config {
+ // VoiceEngine used for audio streams and audio/video synchronization.
+ // AudioState will tickle the VoE refcount to keep it alive for as long as
+ // the AudioState itself.
+ VoiceEngine* voice_engine = nullptr;
+
+ // The AudioDeviceModule associated with the Calls.
+ AudioDeviceModule* audio_device_module = nullptr;
+ };
+
+ // TODO(solenberg): Replace scoped_refptr with shared_ptr once we can use it.
+ static rtc::scoped_refptr<AudioState> Create(
+ const AudioState::Config& config);
+
+ virtual ~AudioState() {}
+};
+} // namespace webrtc
+
+#endif // WEBRTC_AUDIO_STATE_H_
diff --git a/webrtc/base/Android.mk b/webrtc/base/Android.mk
index 6a786df972..f2aafada78 100644
--- a/webrtc/base/Android.mk
+++ b/webrtc/base/Android.mk
@@ -21,6 +21,7 @@ LOCAL_SRC_FILES := \
checks.cc \
criticalsection.cc \
event.cc \
+ event_tracer.cc \
logging.cc \
platform_file.cc \
platform_thread.cc \
diff --git a/webrtc/base/BUILD.gn b/webrtc/base/BUILD.gn
index 11a26646be..0f7a3f2fbe 100644
--- a/webrtc/base/BUILD.gn
+++ b/webrtc/base/BUILD.gn
@@ -10,6 +10,13 @@ import("//build/config/crypto.gni")
import("//build/config/ui.gni")
import("../build/webrtc.gni")
+# Enable OpenSSL (BoringSSL) for iOS. This is covered in webrtc/supplement.gypi
+# for the GYP build.
+import("//build_overrides/webrtc.gni")
+if (is_ios && !build_with_chromium) {
+ use_openssl = true
+}
+
config("rtc_base_config") {
include_dirs = [
"//third_party/jsoncpp/overrides/include",
@@ -41,6 +48,7 @@ config("openssl_config") {
config("ios_config") {
libs = [
+ "AVFoundation.framework",
"CFNetwork.framework",
#"Foundation.framework", # Already included in //build/config:default_libs.
@@ -90,6 +98,7 @@ if (rtc_build_ssl == 0) {
# The subset of rtc_base approved for use outside of libjingle.
static_library("rtc_base_approved") {
+ deps = []
configs += [ "..:common_config" ]
public_configs = [ "..:common_inherited_config" ]
@@ -107,23 +116,28 @@ static_library("rtc_base_approved") {
"byteorder.h",
"checks.cc",
"checks.h",
+ "constructormagic.h",
"criticalsection.cc",
"criticalsection.h",
+ "deprecation.h",
"event.cc",
"event.h",
"event_tracer.cc",
"event_tracer.h",
"exp_filter.cc",
"exp_filter.h",
- "maybe.h",
"md5.cc",
"md5.h",
"md5digest.cc",
"md5digest.h",
+ "optional.h",
"platform_file.cc",
"platform_file.h",
"platform_thread.cc",
"platform_thread.h",
+ "platform_thread_types.h",
+ "random.cc",
+ "random.h",
"safe_conversions.h",
"safe_conversions_impl.h",
"scoped_ptr.h",
@@ -143,10 +157,16 @@ static_library("rtc_base_approved") {
"trace_event.h",
]
- if (!build_with_chromium) {
+ if (build_with_chromium) {
+ # Dependency on chromium's logging (in //base).
+ deps += [ "//base:base" ]
+ sources += [
+ "../../webrtc_overrides/webrtc/base/logging.cc",
+ "../../webrtc_overrides/webrtc/base/logging.h",
+ ]
+ include_dirs = [ "../../webrtc_overrides" ]
+ } else {
sources += [
- "basictypes.h",
- "constructormagic.h",
"logging.cc",
"logging.h",
]
@@ -157,7 +177,8 @@ static_library("rtc_base") {
cflags = []
cflags_cc = []
libs = []
- deps = [
+ deps = []
+ public_deps = [
":rtc_base_approved",
]
@@ -194,7 +215,6 @@ static_library("rtc_base") {
"autodetectproxy.h",
"base64.cc",
"base64.h",
- "basicdefs.h",
"common.cc",
"common.h",
"crc32.cc",
@@ -312,19 +332,20 @@ static_library("rtc_base") {
if (is_posix) {
sources += [
+ "ifaddrs-android.h",
+ "ifaddrs_converter.cc",
"unixfilesystem.cc",
"unixfilesystem.h",
]
}
if (build_with_chromium) {
- sources += [
- "../../webrtc_overrides/webrtc/base/logging.cc",
- "../../webrtc_overrides/webrtc/base/logging.h",
- ]
-
deps += [ "..:webrtc_common" ]
+ if (is_mac) {
+ sources += [ "macifaddrs_converter.cc" ]
+ }
+
if (is_win) {
sources += [ "../../webrtc_overrides/webrtc/base/win32socketinit.cc" ]
}
@@ -592,5 +613,28 @@ static_library("rtc_base") {
if (is_nacl) {
deps += [ "//native_client_sdk/src/libraries/nacl_io" ]
defines += [ "timezone=_timezone" ]
+ sources -= [ "ifaddrs_converter.cc" ]
+ }
+}
+
+if (is_ios) {
+ source_set("rtc_base_objc") {
+ deps = [
+ ":rtc_base",
+ ]
+ cflags = [ "-fobjc-arc" ]
+ configs += [ "..:common_config" ]
+ public_configs = [ "..:common_inherited_config" ]
+
+ sources = [
+ "objc/NSString+StdString.h",
+ "objc/NSString+StdString.mm",
+ "objc/RTCCameraPreviewView.h",
+ "objc/RTCCameraPreviewView.m",
+ "objc/RTCDispatcher.h",
+ "objc/RTCDispatcher.m",
+ "objc/RTCLogging.h",
+ "objc/RTCLogging.mm",
+ ]
}
}
diff --git a/webrtc/base/OWNERS b/webrtc/base/OWNERS
index 9a527df143..2f400904c6 100644
--- a/webrtc/base/OWNERS
+++ b/webrtc/base/OWNERS
@@ -9,4 +9,10 @@ pthatcher@webrtc.org
sergeyu@chromium.org
tommi@webrtc.org
+# These are for the common case of adding or renaming files. If you're doing
+# structural changes, please get a review from a reviewer in this file.
+per-file *.gyp=*
+per-file *.gypi=*
+
per-file BUILD.gn=kjellander@webrtc.org
+
diff --git a/webrtc/base/array_view.h b/webrtc/base/array_view.h
index 019bd8b6c6..a7ca66cc95 100644
--- a/webrtc/base/array_view.h
+++ b/webrtc/base/array_view.h
@@ -11,18 +11,63 @@
#ifndef WEBRTC_BASE_ARRAY_VIEW_H_
#define WEBRTC_BASE_ARRAY_VIEW_H_
-#include <vector>
-
#include "webrtc/base/checks.h"
namespace rtc {
-// Keeps track of an array (a pointer and a size) that it doesn't own.
-// ArrayView objects are immutable except for assignment, and small enough to
-// be cheaply passed by value.
+// Many functions read from or write to arrays. The obvious way to do this is
+// to use two arguments, a pointer to the first element and an element count:
+//
+// bool Contains17(const int* arr, size_t size) {
+// for (size_t i = 0; i < size; ++i) {
+// if (arr[i] == 17)
+// return true;
+// }
+// return false;
+// }
+//
+// This is flexible, since it doesn't matter how the array is stored (C array,
+// std::vector, rtc::Buffer, ...), but it's error-prone because the caller has
+// to correctly specify the array length:
+//
+// Contains17(arr, arraysize(arr)); // C array
+// Contains17(&arr[0], arr.size()); // std::vector
+// Contains17(arr, size); // pointer + size
+// ...
+//
+// It's also kind of messy to have two separate arguments for what is
+// conceptually a single thing.
+//
+// Enter rtc::ArrayView<T>. It contains a T pointer (to an array it doesn't
+// own) and a count, and supports the basic things you'd expect, such as
+// indexing and iteration. It allows us to write our function like this:
+//
+// bool Contains17(rtc::ArrayView<const int> arr) {
+// for (auto e : arr) {
+// if (e == 17)
+// return true;
+// }
+// return false;
+// }
//
-// Note that ArrayView<T> and ArrayView<const T> are distinct types; this is
-// how you would represent mutable and unmutable views of an array.
+// And even better, because a bunch of things will implicitly convert to
+// ArrayView, we can call it like this:
+//
+// Contains17(arr); // C array
+// Contains17(arr); // std::vector
+// Contains17(rtc::ArrayView<int>(arr, size)); // pointer + size
+// ...
+//
+// One important point is that ArrayView<T> and ArrayView<const T> are
+// different types, which allow and don't allow mutation of the array elements,
+// respectively. The implicit conversions work just like you'd hope, so that
+// e.g. vector<int> will convert to either ArrayView<int> or ArrayView<const
+// int>, but const vector<int> will convert only to ArrayView<const int>.
+// (ArrayView itself can be the source type in such conversions, so
+// ArrayView<int> will convert to ArrayView<const int>.)
+//
+// Note: ArrayView is tiny (just a pointer and a count) and trivially copyable,
+// so it's probably cheaper to pass it by value than by const reference.
template <typename T>
class ArrayView final {
public:
@@ -50,17 +95,12 @@ class ArrayView final {
// std::vector).
template <typename U>
ArrayView(U& u) : ArrayView(u.data(), u.size()) {}
- // TODO(kwiberg): Remove the special case for std::vector (and the include of
- // <vector>); it is handled by the general case in C++11, since std::vector
- // has a data() method there.
- template <typename U>
- ArrayView(std::vector<U>& u)
- : ArrayView(u.empty() ? nullptr : &u[0], u.size()) {}
// Indexing, size, and iteration. These allow mutation even if the ArrayView
// is const, because the ArrayView doesn't own the array. (To prevent
// mutation, use ArrayView<const T>.)
size_t size() const { return size_; }
+ bool empty() const { return size_ == 0; }
T* data() const { return data_; }
T& operator[](size_t idx) const {
RTC_DCHECK_LT(idx, size_);
@@ -72,6 +112,15 @@ class ArrayView final {
const T* cbegin() const { return data_; }
const T* cend() const { return data_ + size_; }
+ // Comparing two ArrayViews compares their (pointer,size) pairs; it does
+ // *not* dereference the pointers.
+ friend bool operator==(const ArrayView& a, const ArrayView& b) {
+ return a.data_ == b.data_ && a.size_ == b.size_;
+ }
+ friend bool operator!=(const ArrayView& a, const ArrayView& b) {
+ return !(a == b);
+ }
+
private:
// Invariant: !data_ iff size_ == 0.
void CheckInvariant() const { RTC_DCHECK_EQ(!data_, size_ == 0); }
diff --git a/webrtc/base/array_view_unittest.cc b/webrtc/base/array_view_unittest.cc
index 0d1bff03d1..8bb1bcc4c6 100644
--- a/webrtc/base/array_view_unittest.cc
+++ b/webrtc/base/array_view_unittest.cc
@@ -214,4 +214,20 @@ TEST(ArrayViewTest, TestIteration) {
}
}
+TEST(ArrayViewTest, TestEmpty) {
+ EXPECT_TRUE(ArrayView<int>().empty());
+ const int a[] = {1, 2, 3};
+ EXPECT_FALSE(ArrayView<const int>(a).empty());
+}
+
+TEST(ArrayViewTest, TestCompare) {
+ int a[] = {1, 2, 3};
+ int b[] = {1, 2, 3};
+ EXPECT_EQ(ArrayView<int>(a), ArrayView<int>(a));
+ EXPECT_EQ(ArrayView<int>(), ArrayView<int>());
+ EXPECT_NE(ArrayView<int>(a), ArrayView<int>(b));
+ EXPECT_NE(ArrayView<int>(a), ArrayView<int>());
+ EXPECT_NE(ArrayView<int>(a), ArrayView<int>(a, 2));
+}
+
} // namespace rtc
diff --git a/webrtc/base/atomicops.h b/webrtc/base/atomicops.h
index a863566a67..a286bf01cc 100644
--- a/webrtc/base/atomicops.h
+++ b/webrtc/base/atomicops.h
@@ -42,6 +42,16 @@ class AtomicOps {
new_value,
old_value);
}
+ // Pointer variants.
+ template <typename T>
+ static T* AcquireLoadPtr(T* volatile* ptr) {
+ return *ptr;
+ }
+ template <typename T>
+ static T* CompareAndSwapPtr(T* volatile* ptr, T* old_value, T* new_value) {
+ return static_cast<T*>(::InterlockedCompareExchangePointer(
+ reinterpret_cast<PVOID volatile*>(ptr), new_value, old_value));
+ }
#else
static int Increment(volatile int* i) {
return __sync_add_and_fetch(i, 1);
@@ -58,6 +68,15 @@ class AtomicOps {
static int CompareAndSwap(volatile int* i, int old_value, int new_value) {
return __sync_val_compare_and_swap(i, old_value, new_value);
}
+ // Pointer variants.
+ template <typename T>
+ static T* AcquireLoadPtr(T* volatile* ptr) {
+ return __atomic_load_n(ptr, __ATOMIC_ACQUIRE);
+ }
+ template <typename T>
+ static T* CompareAndSwapPtr(T* volatile* ptr, T* old_value, T* new_value) {
+ return __sync_val_compare_and_swap(ptr, old_value, new_value);
+ }
#endif
};
diff --git a/webrtc/base/autodetectproxy_unittest.cc b/webrtc/base/autodetectproxy_unittest.cc
index bc57304c0a..2ae7a6aa25 100644
--- a/webrtc/base/autodetectproxy_unittest.cc
+++ b/webrtc/base/autodetectproxy_unittest.cc
@@ -12,7 +12,6 @@
#include "webrtc/base/gunit.h"
#include "webrtc/base/httpcommon.h"
#include "webrtc/base/httpcommon-inl.h"
-#include "webrtc/test/testsupport/gtest_disable.h"
namespace rtc {
diff --git a/webrtc/base/base.gyp b/webrtc/base/base.gyp
index 4b6ad85362..c9182632ab 100644
--- a/webrtc/base/base.gyp
+++ b/webrtc/base/base.gyp
@@ -22,6 +22,46 @@
}],
],
}],
+ # TODO(tkchin): Mac support. There are a bunch of problems right now because
+ # of some settings pulled down from Chromium.
+ ['OS=="ios"', {
+ 'targets': [
+ {
+ 'target_name': 'rtc_base_objc',
+ 'type': 'static_library',
+ 'dependencies': [
+ 'rtc_base',
+ ],
+ 'sources': [
+ 'objc/NSString+StdString.h',
+ 'objc/NSString+StdString.mm',
+ 'objc/RTCDispatcher.h',
+ 'objc/RTCDispatcher.m',
+ 'objc/RTCLogging.h',
+ 'objc/RTCLogging.mm',
+ ],
+ 'conditions': [
+ ['OS=="ios"', {
+ 'sources': [
+ 'objc/RTCCameraPreviewView.h',
+ 'objc/RTCCameraPreviewView.m',
+ ],
+ 'all_dependent_settings': {
+ 'xcode_settings': {
+ 'OTHER_LDFLAGS': [
+ '-framework AVFoundation',
+ ],
+ },
+ },
+ }],
+ ],
+ 'xcode_settings': {
+ 'CLANG_ENABLE_OBJC_ARC': 'YES',
+ 'CLANG_WARN_OBJC_MISSING_PROPERTY_SYNTHESIS': 'YES',
+ },
+ }
+ ],
+ }], # OS=="ios"
],
'targets': [
{
@@ -31,7 +71,6 @@
'sources': [
'array_view.h',
'atomicops.h',
- 'basictypes.h',
'bitbuffer.cc',
'bitbuffer.h',
'buffer.cc',
@@ -46,6 +85,7 @@
'constructormagic.h',
'criticalsection.cc',
'criticalsection.h',
+ 'deprecation.h',
'event.cc',
'event.h',
'event_tracer.cc',
@@ -54,15 +94,18 @@
'exp_filter.h',
'logging.cc',
'logging.h',
- 'maybe.h',
'md5.cc',
'md5.h',
'md5digest.cc',
'md5digest.h',
+ 'optional.h',
'platform_file.cc',
'platform_file.h',
'platform_thread.cc',
'platform_thread.h',
+ 'platform_thread_types.h',
+ 'random.cc',
+ 'random.h',
'ratetracker.cc',
'ratetracker.h',
'safe_conversions.h',
@@ -85,9 +128,16 @@
],
'conditions': [
['build_with_chromium==1', {
+ 'dependencies': [
+ '<(DEPTH)/base/base.gyp:base',
+ ],
'include_dirs': [
'../../webrtc_overrides',
],
+ 'sources': [
+ '../../webrtc_overrides/webrtc/base/logging.cc',
+ '../../webrtc_overrides/webrtc/base/logging.h',
+ ],
'sources!': [
'logging.cc',
'logging.h',
@@ -102,6 +152,9 @@
'<(webrtc_root)/common.gyp:webrtc_common',
'rtc_base_approved',
],
+ 'export_dependent_settings': [
+ 'rtc_base_approved',
+ ],
'defines': [
'FEATURE_ENABLE_SSL',
'SSL_USE_OPENSSL',
@@ -131,7 +184,6 @@
'bandwidthsmoother.h',
'base64.cc',
'base64.h',
- 'basicdefs.h',
'bind.h',
'callback.h',
'common.cc',
@@ -172,6 +224,9 @@
'httpserver.h',
'ifaddrs-android.cc',
'ifaddrs-android.h',
+ 'ifaddrs_converter.cc',
+ 'ifaddrs_converter.h',
+ 'macifaddrs_converter.cc',
'iosfilesystem.mm',
'ipaddress.cc',
'ipaddress.h',
@@ -380,8 +435,6 @@
'../../boringssl/src/include',
],
'sources': [
- '../../webrtc_overrides/webrtc/base/logging.cc',
- '../../webrtc_overrides/webrtc/base/logging.h',
'../../webrtc_overrides/webrtc/base/win32socketinit.cc',
],
'sources!': [
@@ -390,7 +443,6 @@
'bandwidthsmoother.h',
'bind.h',
'callback.h',
- 'constructormagic.h',
'dbus.cc',
'dbus.h',
'diskcache_win32.cc',
@@ -494,6 +546,17 @@
'WEBRTC_EXTERNAL_JSON',
],
}],
+ ['OS=="win" and clang==1', {
+ 'msvs_settings': {
+ 'VCCLCompilerTool': {
+ 'AdditionalOptions': [
+ # Disable warnings failing when compiling with Clang on Windows.
+ # https://bugs.chromium.org/p/webrtc/issues/detail?id=5366
+ '-Wno-missing-braces',
+ ],
+ },
+ },
+ }],
],
}],
['OS == "android"', {
@@ -602,6 +665,9 @@
],
}],
['OS=="win"', {
+ 'sources!': [
+ 'ifaddrs_converter.cc',
+ ],
'link_settings': {
'libraries': [
'-lcrypt32.lib',
@@ -653,6 +719,7 @@
}],
['OS!="ios" and OS!="mac"', {
'sources!': [
+ 'macifaddrs_converter.cc',
'scoped_autorelease_pool.mm',
],
}],
diff --git a/webrtc/base/base_tests.gyp b/webrtc/base/base_tests.gyp
index ee371f4a62..5d73d50756 100644
--- a/webrtc/base/base_tests.gyp
+++ b/webrtc/base/base_tests.gyp
@@ -71,22 +71,25 @@
'httpserver_unittest.cc',
'ipaddress_unittest.cc',
'logging_unittest.cc',
- 'maybe_unittest.cc',
'md5digest_unittest.cc',
'messagedigest_unittest.cc',
'messagequeue_unittest.cc',
'multipart_unittest.cc',
'nat_unittest.cc',
'network_unittest.cc',
+ 'optional_unittest.cc',
'optionsfile_unittest.cc',
'pathutils_unittest.cc',
+ 'platform_thread_unittest.cc',
'profiler_unittest.cc',
'proxy_unittest.cc',
'proxydetect_unittest.cc',
+ 'random_unittest.cc',
'ratelimiter_unittest.cc',
'ratetracker_unittest.cc',
'referencecountedsingletonfactory_unittest.cc',
'rollingaccumulator_unittest.cc',
+ 'rtccertificate_unittests.cc',
'scopedptrcollection_unittest.cc',
'sha1digest_unittest.cc',
'sharedexclusivelock_unittest.cc',
@@ -131,6 +134,18 @@
'win32windowpicker_unittest.cc',
],
}],
+ ['OS=="win" and clang==1', {
+ 'msvs_settings': {
+ 'VCCLCompilerTool': {
+ 'AdditionalOptions': [
+ # Disable warnings failing when compiling with Clang on Windows.
+ # https://bugs.chromium.org/p/webrtc/issues/detail?id=5366
+ '-Wno-missing-braces',
+ '-Wno-unused-const-variable',
+ ],
+ },
+ },
+ }],
['OS=="mac"', {
'sources': [
'macutils_unittest.cc',
diff --git a/webrtc/base/basicdefs.h b/webrtc/base/basicdefs.h
deleted file mode 100644
index 1dee2ae658..0000000000
--- a/webrtc/base/basicdefs.h
+++ /dev/null
@@ -1,20 +0,0 @@
-/*
- * Copyright 2004 The WebRTC Project Authors. All rights reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_BASE_BASICDEFS_H_
-#define WEBRTC_BASE_BASICDEFS_H_
-
-#if HAVE_CONFIG_H
-#include "config.h" // NOLINT
-#endif
-
-#define ARRAY_SIZE(x) (static_cast<int>(sizeof(x) / sizeof(x[0])))
-
-#endif // WEBRTC_BASE_BASICDEFS_H_
diff --git a/webrtc/base/bitbuffer_unittest.cc b/webrtc/base/bitbuffer_unittest.cc
index 99701f7cab..ce42257255 100644
--- a/webrtc/base/bitbuffer_unittest.cc
+++ b/webrtc/base/bitbuffer_unittest.cc
@@ -8,6 +8,7 @@
* be found in the AUTHORS file in the root of the source tree.
*/
+#include "webrtc/base/arraysize.h"
#include "webrtc/base/bitbuffer.h"
#include "webrtc/base/bytebuffer.h"
#include "webrtc/base/common.h"
@@ -301,11 +302,11 @@ TEST(BitBufferWriterTest, SymmetricGolomb) {
char test_string[] = "my precious";
uint8_t bytes[64] = {0};
BitBufferWriter buffer(bytes, 64);
- for (size_t i = 0; i < ARRAY_SIZE(test_string); ++i) {
+ for (size_t i = 0; i < arraysize(test_string); ++i) {
EXPECT_TRUE(buffer.WriteExponentialGolomb(test_string[i]));
}
buffer.Seek(0, 0);
- for (size_t i = 0; i < ARRAY_SIZE(test_string); ++i) {
+ for (size_t i = 0; i < arraysize(test_string); ++i) {
uint32_t val;
EXPECT_TRUE(buffer.ReadExponentialGolomb(&val));
EXPECT_LE(val, std::numeric_limits<uint8_t>::max());
diff --git a/webrtc/base/buffer.cc b/webrtc/base/buffer.cc
index 90e687bbb6..62855f1620 100644
--- a/webrtc/base/buffer.cc
+++ b/webrtc/base/buffer.cc
@@ -11,6 +11,7 @@
#include "webrtc/base/buffer.h"
#include <cassert>
+#include <utility>
namespace rtc {
@@ -22,7 +23,9 @@ Buffer::Buffer(const Buffer& buf) : Buffer(buf.data(), buf.size()) {
}
Buffer::Buffer(Buffer&& buf)
- : size_(buf.size()), capacity_(buf.capacity()), data_(buf.data_.Pass()) {
+ : size_(buf.size()),
+ capacity_(buf.capacity()),
+ data_(std::move(buf.data_)) {
assert(IsConsistent());
buf.OnMovedFrom();
}
diff --git a/webrtc/base/buffer.h b/webrtc/base/buffer.h
index 7b9402b511..ff9bb73d3f 100644
--- a/webrtc/base/buffer.h
+++ b/webrtc/base/buffer.h
@@ -15,6 +15,8 @@
#include <cassert>
#include <cstring>
#include <utility> // std::swap (C++11 and later)
+
+#include "webrtc/base/deprecation.h"
#include "webrtc/base/scoped_ptr.h"
namespace rtc {
@@ -104,7 +106,7 @@ class Buffer {
assert(buf.IsConsistent());
size_ = buf.size_;
capacity_ = buf.capacity_;
- data_ = buf.data_.Pass();
+ data_ = std::move(buf.data_);
buf.OnMovedFrom();
return *this;
}
@@ -164,15 +166,17 @@ class Buffer {
return;
scoped_ptr<uint8_t[]> new_data(new uint8_t[capacity]);
std::memcpy(new_data.get(), data_.get(), size_);
- data_ = new_data.Pass();
+ data_ = std::move(new_data);
capacity_ = capacity;
assert(IsConsistent());
}
- // We can't call std::move(b), so call b.Pass() instead to do the same job.
- Buffer&& Pass() {
+ // b.Pass() does the same thing as std::move(b).
+ // Deprecated; remove in March 2016 (bug 5373).
+ RTC_DEPRECATED Buffer&& Pass() { return DEPRECATED_Pass(); }
+ Buffer&& DEPRECATED_Pass() {
assert(IsConsistent());
- return static_cast<Buffer&&>(*this);
+ return std::move(*this);
}
// Resets the buffer to zero size and capacity. Works even if the buffer has
diff --git a/webrtc/base/buffer_unittest.cc b/webrtc/base/buffer_unittest.cc
index f1ae6b8676..0b93b9b56e 100644
--- a/webrtc/base/buffer_unittest.cc
+++ b/webrtc/base/buffer_unittest.cc
@@ -138,7 +138,7 @@ TEST(BufferTest, TestEnsureCapacityLarger) {
TEST(BufferTest, TestMoveConstruct) {
Buffer buf1(kTestData, 3, 40);
const uint8_t* data = buf1.data();
- Buffer buf2(buf1.Pass());
+ Buffer buf2(buf1.DEPRECATED_Pass());
EXPECT_EQ(buf2.size(), 3u);
EXPECT_EQ(buf2.capacity(), 40u);
EXPECT_EQ(buf2.data(), data);
@@ -152,7 +152,7 @@ TEST(BufferTest, TestMoveAssign) {
Buffer buf1(kTestData, 3, 40);
const uint8_t* data = buf1.data();
Buffer buf2(kTestData);
- buf2 = buf1.Pass();
+ buf2 = buf1.DEPRECATED_Pass();
EXPECT_EQ(buf2.size(), 3u);
EXPECT_EQ(buf2.capacity(), 40u);
EXPECT_EQ(buf2.data(), data);
diff --git a/webrtc/base/bufferqueue.cc b/webrtc/base/bufferqueue.cc
index 955af51f6b..1ac57abc0c 100644
--- a/webrtc/base/bufferqueue.cc
+++ b/webrtc/base/bufferqueue.cc
@@ -38,19 +38,19 @@ bool BufferQueue::ReadFront(void* buffer, size_t bytes, size_t* bytes_read) {
return false;
}
+ bool was_writable = queue_.size() < capacity_;
Buffer* packet = queue_.front();
queue_.pop_front();
- size_t next_packet_size = packet->size();
- if (bytes > next_packet_size) {
- bytes = next_packet_size;
- }
-
+ bytes = std::min(bytes, packet->size());
memcpy(buffer, packet->data(), bytes);
if (bytes_read) {
*bytes_read = bytes;
}
free_list_.push_back(packet);
+ if (!was_writable) {
+ NotifyWritableForTest();
+ }
return true;
}
@@ -61,6 +61,7 @@ bool BufferQueue::WriteBack(const void* buffer, size_t bytes,
return false;
}
+ bool was_readable = !queue_.empty();
Buffer* packet;
if (!free_list_.empty()) {
packet = free_list_.back();
@@ -74,6 +75,9 @@ bool BufferQueue::WriteBack(const void* buffer, size_t bytes,
*bytes_written = bytes;
}
queue_.push_back(packet);
+ if (!was_readable) {
+ NotifyReadableForTest();
+ }
return true;
}
diff --git a/webrtc/base/bufferqueue.h b/webrtc/base/bufferqueue.h
index 4941fccf2e..458f0189cd 100644
--- a/webrtc/base/bufferqueue.h
+++ b/webrtc/base/bufferqueue.h
@@ -21,26 +21,33 @@ namespace rtc {
class BufferQueue {
public:
- // Creates a buffer queue queue with a given capacity and default buffer size.
+ // Creates a buffer queue with a given capacity and default buffer size.
BufferQueue(size_t capacity, size_t default_size);
- ~BufferQueue();
+ virtual ~BufferQueue();
// Return number of queued buffers.
size_t size() const;
// ReadFront will only read one buffer at a time and will truncate buffers
// that don't fit in the passed memory.
+ // Returns true unless no data could be returned.
bool ReadFront(void* data, size_t bytes, size_t* bytes_read);
// WriteBack always writes either the complete memory or nothing.
+ // Returns true unless no data could be written.
bool WriteBack(const void* data, size_t bytes, size_t* bytes_written);
+ protected:
+ // These methods are called when the state of the queue changes.
+ virtual void NotifyReadableForTest() {}
+ virtual void NotifyWritableForTest() {}
+
private:
size_t capacity_;
size_t default_size_;
- std::deque<Buffer*> queue_;
- std::vector<Buffer*> free_list_;
- mutable CriticalSection crit_; // object lock
+ mutable CriticalSection crit_;
+ std::deque<Buffer*> queue_ GUARDED_BY(crit_);
+ std::vector<Buffer*> free_list_ GUARDED_BY(crit_);
RTC_DISALLOW_COPY_AND_ASSIGN(BufferQueue);
};
diff --git a/webrtc/base/bytebuffer_unittest.cc b/webrtc/base/bytebuffer_unittest.cc
index 56b0e055f5..0287d85e6f 100644
--- a/webrtc/base/bytebuffer_unittest.cc
+++ b/webrtc/base/bytebuffer_unittest.cc
@@ -8,6 +8,7 @@
* be found in the AUTHORS file in the root of the source tree.
*/
+#include "webrtc/base/arraysize.h"
#include "webrtc/base/bytebuffer.h"
#include "webrtc/base/byteorder.h"
#include "webrtc/base/common.h"
@@ -114,7 +115,7 @@ TEST(ByteBufferTest, TestGetSetReadPosition) {
TEST(ByteBufferTest, TestReadWriteBuffer) {
ByteBuffer::ByteOrder orders[2] = { ByteBuffer::ORDER_HOST,
ByteBuffer::ORDER_NETWORK };
- for (size_t i = 0; i < ARRAY_SIZE(orders); i++) {
+ for (size_t i = 0; i < arraysize(orders); i++) {
ByteBuffer buffer(orders[i]);
EXPECT_EQ(orders[i], buffer.Order());
uint8_t ru8;
diff --git a/webrtc/base/callback_unittest.cc b/webrtc/base/callback_unittest.cc
index 66c939140e..db294cd96e 100644
--- a/webrtc/base/callback_unittest.cc
+++ b/webrtc/base/callback_unittest.cc
@@ -11,6 +11,8 @@
#include "webrtc/base/bind.h"
#include "webrtc/base/callback.h"
#include "webrtc/base/gunit.h"
+#include "webrtc/base/keep_ref_until_done.h"
+#include "webrtc/base/refcount.h"
namespace rtc {
@@ -26,6 +28,21 @@ struct BindTester {
int b(int x) const { return x * x; }
};
+class RefCountedBindTester : public RefCountInterface {
+ public:
+ RefCountedBindTester() : count_(0) {}
+ int AddRef() const override {
+ return ++count_;
+ }
+ int Release() const {
+ return --count_;
+ }
+ int RefCount() const { return count_; }
+
+ private:
+ mutable int count_;
+};
+
} // namespace
TEST(CallbackTest, VoidReturn) {
@@ -78,4 +95,46 @@ TEST(CallbackTest, WithBind) {
EXPECT_EQ(25, cb1());
}
+TEST(KeepRefUntilDoneTest, simple) {
+ RefCountedBindTester t;
+ EXPECT_EQ(0, t.RefCount());
+ {
+ Callback0<void> cb = KeepRefUntilDone(&t);
+ EXPECT_EQ(1, t.RefCount());
+ cb();
+ EXPECT_EQ(1, t.RefCount());
+ cb();
+ EXPECT_EQ(1, t.RefCount());
+ }
+ EXPECT_EQ(0, t.RefCount());
+}
+
+TEST(KeepRefUntilDoneTest, copy) {
+ RefCountedBindTester t;
+ EXPECT_EQ(0, t.RefCount());
+ Callback0<void> cb2;
+ {
+ Callback0<void> cb = KeepRefUntilDone(&t);
+ EXPECT_EQ(1, t.RefCount());
+ cb2 = cb;
+ }
+ EXPECT_EQ(1, t.RefCount());
+ cb2 = Callback0<void>();
+ EXPECT_EQ(0, t.RefCount());
+}
+
+TEST(KeepRefUntilDoneTest, scopedref) {
+ RefCountedBindTester t;
+ EXPECT_EQ(0, t.RefCount());
+ {
+ scoped_refptr<RefCountedBindTester> t_scoped_ref(&t);
+ Callback0<void> cb = KeepRefUntilDone(t_scoped_ref);
+ t_scoped_ref = nullptr;
+ EXPECT_EQ(1, t.RefCount());
+ cb();
+ EXPECT_EQ(1, t.RefCount());
+ }
+ EXPECT_EQ(0, t.RefCount());
+}
+
} // namespace rtc
diff --git a/webrtc/base/common.h b/webrtc/base/common.h
index e615c7669a..1b1dac64b0 100644
--- a/webrtc/base/common.h
+++ b/webrtc/base/common.h
@@ -54,14 +54,16 @@ inline void RtcUnused(const void*) {}
#endif // !defined(WEBRTC_WIN)
-#define ARRAY_SIZE(x) (static_cast<int>(sizeof(x) / sizeof(x[0])))
-
/////////////////////////////////////////////////////////////////////////////
// Assertions
/////////////////////////////////////////////////////////////////////////////
#ifndef ENABLE_DEBUG
-#define ENABLE_DEBUG _DEBUG
+#if !defined(NDEBUG)
+#define ENABLE_DEBUG 1
+#else
+#define ENABLE_DEBUG 0
+#endif
#endif // !defined(ENABLE_DEBUG)
// Even for release builds, allow for the override of LogAssert. Though no
@@ -176,7 +178,7 @@ inline bool ImplicitCastToBool(bool result) { return result; }
// Forces compiler to inline, even against its better judgement. Use wisely.
#if defined(__GNUC__)
-#define FORCE_INLINE __attribute__((always_inline))
+#define FORCE_INLINE __attribute__ ((__always_inline__))
#elif defined(WEBRTC_WIN)
#define FORCE_INLINE __forceinline
#else
@@ -191,7 +193,7 @@ inline bool ImplicitCastToBool(bool result) { return result; }
// libjingle are merged.
#if !defined(WARN_UNUSED_RESULT)
#if defined(__GNUC__) || defined(__clang__)
-#define WARN_UNUSED_RESULT __attribute__((warn_unused_result))
+#define WARN_UNUSED_RESULT __attribute__ ((__warn_unused_result__))
#else
#define WARN_UNUSED_RESULT
#endif
diff --git a/webrtc/base/cpumonitor_unittest.cc b/webrtc/base/cpumonitor_unittest.cc
deleted file mode 100644
index 379f62fd3c..0000000000
--- a/webrtc/base/cpumonitor_unittest.cc
+++ /dev/null
@@ -1,389 +0,0 @@
-/*
- * Copyright 2010 The WebRTC Project Authors. All rights reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include <iomanip>
-#include <iostream>
-#include <vector>
-
-#if defined(WEBRTC_WIN)
-#include "webrtc/base/win32.h"
-#endif
-
-#include "webrtc/base/cpumonitor.h"
-#include "webrtc/base/flags.h"
-#include "webrtc/base/gunit.h"
-#include "webrtc/base/scoped_ptr.h"
-#include "webrtc/base/thread.h"
-#include "webrtc/base/timeutils.h"
-#include "webrtc/base/timing.h"
-#include "webrtc/test/testsupport/gtest_disable.h"
-
-namespace rtc {
-
-static const int kMaxCpus = 1024;
-static const int kSettleTime = 100; // Amount of time to between tests.
-static const int kIdleTime = 500; // Amount of time to be idle in ms.
-static const int kBusyTime = 1000; // Amount of time to be busy in ms.
-static const int kLongInterval = 2000; // Interval longer than busy times
-
-class BusyThread : public rtc::Thread {
- public:
- BusyThread(double load, double duration, double interval) :
- load_(load), duration_(duration), interval_(interval) {
- }
- virtual ~BusyThread() {
- Stop();
- }
- void Run() {
- Timing time;
- double busy_time = interval_ * load_ / 100.0;
- for (;;) {
- time.BusyWait(busy_time);
- time.IdleWait(interval_ - busy_time);
- if (duration_) {
- duration_ -= interval_;
- if (duration_ <= 0) {
- break;
- }
- }
- }
- }
- private:
- double load_;
- double duration_;
- double interval_;
-};
-
-class CpuLoadListener : public sigslot::has_slots<> {
- public:
- CpuLoadListener()
- : current_cpus_(0),
- cpus_(0),
- process_load_(.0f),
- system_load_(.0f),
- count_(0) {
- }
-
- void OnCpuLoad(int current_cpus, int cpus, float proc_load, float sys_load) {
- current_cpus_ = current_cpus;
- cpus_ = cpus;
- process_load_ = proc_load;
- system_load_ = sys_load;
- ++count_;
- }
-
- int current_cpus() const { return current_cpus_; }
- int cpus() const { return cpus_; }
- float process_load() const { return process_load_; }
- float system_load() const { return system_load_; }
- int count() const { return count_; }
-
- private:
- int current_cpus_;
- int cpus_;
- float process_load_;
- float system_load_;
- int count_;
-};
-
-// Set affinity (which cpu to run on), but respecting FLAG_affinity:
-// -1 means no affinity - run on whatever cpu is available.
-// 0 .. N means run on specific cpu. The tool will create N threads and call
-// SetThreadAffinity on 0 to N - 1 as cpu. FLAG_affinity sets the first cpu
-// so the range becomes affinity to affinity + N - 1
-// Note that this function affects Windows scheduling, effectively giving
-// the thread with affinity for a specified CPU more priority on that CPU.
-bool SetThreadAffinity(BusyThread* t, int cpu, int affinity) {
-#if defined(WEBRTC_WIN)
- if (affinity >= 0) {
- return ::SetThreadAffinityMask(t->GetHandle(),
- 1 << (cpu + affinity)) != FALSE;
- }
-#endif
- return true;
-}
-
-bool SetThreadPriority(BusyThread* t, int prio) {
- if (!prio) {
- return true;
- }
- bool ok = t->SetPriority(static_cast<rtc::ThreadPriority>(prio));
- if (!ok) {
- std::cout << "Error setting thread priority." << std::endl;
- }
- return ok;
-}
-
-int CpuLoad(double cpuload, double duration, int numthreads,
- int priority, double interval, int affinity) {
- int ret = 0;
- std::vector<BusyThread*> threads;
- for (int i = 0; i < numthreads; ++i) {
- threads.push_back(new BusyThread(cpuload, duration, interval));
- // NOTE(fbarchard): Priority must be done before Start.
- if (!SetThreadPriority(threads[i], priority) ||
- !threads[i]->Start() ||
- !SetThreadAffinity(threads[i], i, affinity)) {
- ret = 1;
- break;
- }
- }
- // Wait on each thread
- if (ret == 0) {
- for (int i = 0; i < numthreads; ++i) {
- threads[i]->Stop();
- }
- }
-
- for (int i = 0; i < numthreads; ++i) {
- delete threads[i];
- }
- return ret;
-}
-
-// Make 2 CPUs busy
-static void CpuTwoBusyLoop(int busytime) {
- CpuLoad(100.0, busytime / 1000.0, 2, 1, 0.050, -1);
-}
-
-// Make 1 CPUs busy
-static void CpuBusyLoop(int busytime) {
- CpuLoad(100.0, busytime / 1000.0, 1, 1, 0.050, -1);
-}
-
-// Make 1 use half CPU time.
-static void CpuHalfBusyLoop(int busytime) {
- CpuLoad(50.0, busytime / 1000.0, 1, 1, 0.050, -1);
-}
-
-void TestCpuSampler(bool test_proc, bool test_sys, bool force_fallback) {
- CpuSampler sampler;
- sampler.set_force_fallback(force_fallback);
- EXPECT_TRUE(sampler.Init());
- sampler.set_load_interval(100);
- int cpus = sampler.GetMaxCpus();
-
- // Test1: CpuSampler under idle situation.
- Thread::SleepMs(kSettleTime);
- sampler.GetProcessLoad();
- sampler.GetSystemLoad();
-
- Thread::SleepMs(kIdleTime);
-
- float proc_idle = 0.f, sys_idle = 0.f;
- if (test_proc) {
- proc_idle = sampler.GetProcessLoad();
- }
- if (test_sys) {
- sys_idle = sampler.GetSystemLoad();
- }
- if (test_proc) {
- LOG(LS_INFO) << "ProcessLoad Idle: "
- << std::setiosflags(std::ios_base::fixed)
- << std::setprecision(2) << std::setw(6) << proc_idle;
- EXPECT_GE(proc_idle, 0.f);
- EXPECT_LE(proc_idle, static_cast<float>(cpus));
- }
- if (test_sys) {
- LOG(LS_INFO) << "SystemLoad Idle: "
- << std::setiosflags(std::ios_base::fixed)
- << std::setprecision(2) << std::setw(6) << sys_idle;
- EXPECT_GE(sys_idle, 0.f);
- EXPECT_LE(sys_idle, static_cast<float>(cpus));
- }
-
- // Test2: CpuSampler with main process at 50% busy.
- Thread::SleepMs(kSettleTime);
- sampler.GetProcessLoad();
- sampler.GetSystemLoad();
-
- CpuHalfBusyLoop(kBusyTime);
-
- float proc_halfbusy = 0.f, sys_halfbusy = 0.f;
- if (test_proc) {
- proc_halfbusy = sampler.GetProcessLoad();
- }
- if (test_sys) {
- sys_halfbusy = sampler.GetSystemLoad();
- }
- if (test_proc) {
- LOG(LS_INFO) << "ProcessLoad Halfbusy: "
- << std::setiosflags(std::ios_base::fixed)
- << std::setprecision(2) << std::setw(6) << proc_halfbusy;
- EXPECT_GE(proc_halfbusy, 0.f);
- EXPECT_LE(proc_halfbusy, static_cast<float>(cpus));
- }
- if (test_sys) {
- LOG(LS_INFO) << "SystemLoad Halfbusy: "
- << std::setiosflags(std::ios_base::fixed)
- << std::setprecision(2) << std::setw(6) << sys_halfbusy;
- EXPECT_GE(sys_halfbusy, 0.f);
- EXPECT_LE(sys_halfbusy, static_cast<float>(cpus));
- }
-
- // Test3: CpuSampler with main process busy.
- Thread::SleepMs(kSettleTime);
- sampler.GetProcessLoad();
- sampler.GetSystemLoad();
-
- CpuBusyLoop(kBusyTime);
-
- float proc_busy = 0.f, sys_busy = 0.f;
- if (test_proc) {
- proc_busy = sampler.GetProcessLoad();
- }
- if (test_sys) {
- sys_busy = sampler.GetSystemLoad();
- }
- if (test_proc) {
- LOG(LS_INFO) << "ProcessLoad Busy: "
- << std::setiosflags(std::ios_base::fixed)
- << std::setprecision(2) << std::setw(6) << proc_busy;
- EXPECT_GE(proc_busy, 0.f);
- EXPECT_LE(proc_busy, static_cast<float>(cpus));
- }
- if (test_sys) {
- LOG(LS_INFO) << "SystemLoad Busy: "
- << std::setiosflags(std::ios_base::fixed)
- << std::setprecision(2) << std::setw(6) << sys_busy;
- EXPECT_GE(sys_busy, 0.f);
- EXPECT_LE(sys_busy, static_cast<float>(cpus));
- }
-
- // Test4: CpuSampler with 2 cpus process busy.
- if (cpus >= 2) {
- Thread::SleepMs(kSettleTime);
- sampler.GetProcessLoad();
- sampler.GetSystemLoad();
-
- CpuTwoBusyLoop(kBusyTime);
-
- float proc_twobusy = 0.f, sys_twobusy = 0.f;
- if (test_proc) {
- proc_twobusy = sampler.GetProcessLoad();
- }
- if (test_sys) {
- sys_twobusy = sampler.GetSystemLoad();
- }
- if (test_proc) {
- LOG(LS_INFO) << "ProcessLoad 2 CPU Busy:"
- << std::setiosflags(std::ios_base::fixed)
- << std::setprecision(2) << std::setw(6) << proc_twobusy;
- EXPECT_GE(proc_twobusy, 0.f);
- EXPECT_LE(proc_twobusy, static_cast<float>(cpus));
- }
- if (test_sys) {
- LOG(LS_INFO) << "SystemLoad 2 CPU Busy: "
- << std::setiosflags(std::ios_base::fixed)
- << std::setprecision(2) << std::setw(6) << sys_twobusy;
- EXPECT_GE(sys_twobusy, 0.f);
- EXPECT_LE(sys_twobusy, static_cast<float>(cpus));
- }
- }
-
- // Test5: CpuSampler with idle process after being busy.
- Thread::SleepMs(kSettleTime);
- sampler.GetProcessLoad();
- sampler.GetSystemLoad();
-
- Thread::SleepMs(kIdleTime);
-
- if (test_proc) {
- proc_idle = sampler.GetProcessLoad();
- }
- if (test_sys) {
- sys_idle = sampler.GetSystemLoad();
- }
- if (test_proc) {
- LOG(LS_INFO) << "ProcessLoad Idle: "
- << std::setiosflags(std::ios_base::fixed)
- << std::setprecision(2) << std::setw(6) << proc_idle;
- EXPECT_GE(proc_idle, 0.f);
- EXPECT_LE(proc_idle, proc_busy);
- }
- if (test_sys) {
- LOG(LS_INFO) << "SystemLoad Idle: "
- << std::setiosflags(std::ios_base::fixed)
- << std::setprecision(2) << std::setw(6) << sys_idle;
- EXPECT_GE(sys_idle, 0.f);
- EXPECT_LE(sys_idle, static_cast<float>(cpus));
- }
-}
-
-TEST(CpuMonitorTest, TestCpus) {
- CpuSampler sampler;
- EXPECT_TRUE(sampler.Init());
- int current_cpus = sampler.GetCurrentCpus();
- int cpus = sampler.GetMaxCpus();
- LOG(LS_INFO) << "Current Cpus: " << std::setw(9) << current_cpus;
- LOG(LS_INFO) << "Maximum Cpus: " << std::setw(9) << cpus;
- EXPECT_GT(cpus, 0);
- EXPECT_LE(cpus, kMaxCpus);
- EXPECT_GT(current_cpus, 0);
- EXPECT_LE(current_cpus, cpus);
-}
-
-#if defined(WEBRTC_WIN)
-// Tests overall system CpuSampler using legacy OS fallback code if applicable.
-TEST(CpuMonitorTest, TestGetSystemLoadForceFallback) {
- TestCpuSampler(false, true, true);
-}
-#endif
-
-// Tests both process and system functions in use at same time.
-TEST(CpuMonitorTest, TestGetBothLoad) {
- TestCpuSampler(true, true, false);
-}
-
-// Tests a query less than the interval produces the same value.
-TEST(CpuMonitorTest, TestInterval) {
- CpuSampler sampler;
- EXPECT_TRUE(sampler.Init());
-
- // Test1: Set interval to large value so sampler will not update.
- sampler.set_load_interval(kLongInterval);
-
- sampler.GetProcessLoad();
- sampler.GetSystemLoad();
-
- float proc_orig = sampler.GetProcessLoad();
- float sys_orig = sampler.GetSystemLoad();
-
- Thread::SleepMs(kIdleTime);
-
- float proc_halftime = sampler.GetProcessLoad();
- float sys_halftime = sampler.GetSystemLoad();
-
- EXPECT_EQ(proc_orig, proc_halftime);
- EXPECT_EQ(sys_orig, sys_halftime);
-}
-
-TEST(CpuMonitorTest, TestCpuMonitor) {
- CpuMonitor monitor(Thread::Current());
- CpuLoadListener listener;
- monitor.SignalUpdate.connect(&listener, &CpuLoadListener::OnCpuLoad);
- EXPECT_TRUE(monitor.Start(10));
- // We have checked cpu load more than twice.
- EXPECT_TRUE_WAIT(listener.count() > 2, 1000);
- EXPECT_GT(listener.current_cpus(), 0);
- EXPECT_GT(listener.cpus(), 0);
- EXPECT_GE(listener.process_load(), .0f);
- EXPECT_GE(listener.system_load(), .0f);
-
- monitor.Stop();
- // Wait 20 ms to ake sure all signals are delivered.
- Thread::Current()->ProcessMessages(20);
- int old_count = listener.count();
- Thread::Current()->ProcessMessages(20);
- // Verfy no more siganls.
- EXPECT_EQ(old_count, listener.count());
-}
-
-} // namespace rtc
diff --git a/webrtc/base/crc32.cc b/webrtc/base/crc32.cc
index eae338ad16..97b82145d7 100644
--- a/webrtc/base/crc32.cc
+++ b/webrtc/base/crc32.cc
@@ -10,7 +10,7 @@
#include "webrtc/base/crc32.h"
-#include "webrtc/base/basicdefs.h"
+#include "webrtc/base/arraysize.h"
namespace rtc {
@@ -22,9 +22,9 @@ static const uint32_t kCrc32Polynomial = 0xEDB88320;
static uint32_t kCrc32Table[256] = {0};
static void EnsureCrc32TableInited() {
- if (kCrc32Table[ARRAY_SIZE(kCrc32Table) - 1])
+ if (kCrc32Table[arraysize(kCrc32Table) - 1])
return; // already inited
- for (uint32_t i = 0; i < ARRAY_SIZE(kCrc32Table); ++i) {
+ for (uint32_t i = 0; i < arraysize(kCrc32Table); ++i) {
uint32_t c = i;
for (size_t j = 0; j < 8; ++j) {
if (c & 1) {
diff --git a/webrtc/base/criticalsection.h b/webrtc/base/criticalsection.h
index ddbf857f2b..5b3eaf5684 100644
--- a/webrtc/base/criticalsection.h
+++ b/webrtc/base/criticalsection.h
@@ -89,7 +89,7 @@ class TryCritScope {
#if defined(WEBRTC_WIN)
_Check_return_ bool locked() const;
#else
- bool locked() const __attribute__((warn_unused_result));
+ bool locked() const __attribute__ ((__warn_unused_result__));
#endif
private:
CriticalSection* const cs_;
diff --git a/webrtc/base/criticalsection_unittest.cc b/webrtc/base/criticalsection_unittest.cc
index ff4fdef948..d6990c0023 100644
--- a/webrtc/base/criticalsection_unittest.cc
+++ b/webrtc/base/criticalsection_unittest.cc
@@ -14,9 +14,9 @@
#include "webrtc/base/criticalsection.h"
#include "webrtc/base/event.h"
#include "webrtc/base/gunit.h"
+#include "webrtc/base/scoped_ptr.h"
#include "webrtc/base/scopedptrcollection.h"
#include "webrtc/base/thread.h"
-#include "webrtc/test/testsupport/gtest_disable.h"
namespace rtc {
@@ -220,6 +220,28 @@ TEST(AtomicOpsTest, Simple) {
EXPECT_EQ(0, value);
}
+TEST(AtomicOpsTest, SimplePtr) {
+ class Foo {};
+ Foo* volatile foo = nullptr;
+ scoped_ptr<Foo> a(new Foo());
+ scoped_ptr<Foo> b(new Foo());
+ // Reading the initial value should work as expected.
+ EXPECT_TRUE(rtc::AtomicOps::AcquireLoadPtr(&foo) == nullptr);
+ // Setting using compare and swap should work.
+ EXPECT_TRUE(rtc::AtomicOps::CompareAndSwapPtr(
+ &foo, static_cast<Foo*>(nullptr), a.get()) == nullptr);
+ EXPECT_TRUE(rtc::AtomicOps::AcquireLoadPtr(&foo) == a.get());
+ // Setting another value but with the wrong previous pointer should fail
+ // (remain a).
+ EXPECT_TRUE(rtc::AtomicOps::CompareAndSwapPtr(
+ &foo, static_cast<Foo*>(nullptr), b.get()) == a.get());
+ EXPECT_TRUE(rtc::AtomicOps::AcquireLoadPtr(&foo) == a.get());
+ // Replacing a with b should work.
+ EXPECT_TRUE(rtc::AtomicOps::CompareAndSwapPtr(&foo, a.get(), b.get()) ==
+ a.get());
+ EXPECT_TRUE(rtc::AtomicOps::AcquireLoadPtr(&foo) == b.get());
+}
+
TEST(AtomicOpsTest, Increment) {
// Create and start lots of threads.
AtomicOpRunner<IncrementOp, UniqueValueVerifier> runner(0);
diff --git a/webrtc/base/deprecation.h b/webrtc/base/deprecation.h
new file mode 100644
index 0000000000..ce950f9b52
--- /dev/null
+++ b/webrtc/base/deprecation.h
@@ -0,0 +1,45 @@
+/*
+ * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_BASE_DEPRECATION_H_
+#define WEBRTC_BASE_DEPRECATION_H_
+
+// Annotate the declarations of deprecated functions with this to cause a
+// compiler warning when they're used. Like so:
+//
+// RTC_DEPRECATED std::pony PonyPlz(const std::pony_spec& ps);
+//
+// NOTE 1: The annotation goes on the declaration in the .h file, not the
+// definition in the .cc file!
+//
+// NOTE 2: In order to keep unit testing the deprecated function without
+// getting warnings, do something like this:
+//
+// std::pony DEPRECATED_PonyPlz(const std::pony_spec& ps);
+// RTC_DEPRECATED inline std::pony PonyPlz(const std::pony_spec& ps) {
+// return DEPRECATED_PonyPlz(ps);
+// }
+//
+// In other words, rename the existing function, and provide an inline wrapper
+// using the original name that calls it. That way, callers who are willing to
+// call it using the DEPRECATED_-prefixed name don't get the warning.
+//
+// TODO(kwiberg): Remove this when we can use [[deprecated]] from C++14.
+#if defined(_MSC_VER)
+// Note: Deprecation warnings seem to fail to trigger on Windows
+// (https://bugs.chromium.org/p/webrtc/issues/detail?id=5368).
+#define RTC_DEPRECATED __declspec(deprecated)
+#elif defined(__GNUC__)
+#define RTC_DEPRECATED __attribute__ ((__deprecated__))
+#else
+#define RTC_DEPRECATED
+#endif
+
+#endif // WEBRTC_BASE_DEPRECATION_H_
diff --git a/webrtc/base/diskcache.cc b/webrtc/base/diskcache.cc
index 6bbc53eb13..a1fba6af9a 100644
--- a/webrtc/base/diskcache.cc
+++ b/webrtc/base/diskcache.cc
@@ -15,6 +15,7 @@
#endif
#include <algorithm>
+#include "webrtc/base/arraysize.h"
#include "webrtc/base/common.h"
#include "webrtc/base/diskcache.h"
#include "webrtc/base/fileutils.h"
@@ -23,11 +24,11 @@
#include "webrtc/base/stringencode.h"
#include "webrtc/base/stringutils.h"
-#ifdef _DEBUG
+#if !defined(NDEBUG)
#define TRANSPARENT_CACHE_NAMES 1
-#else // !_DEBUG
+#else
#define TRANSPARENT_CACHE_NAMES 0
-#endif // !_DEBUG
+#endif
namespace rtc {
@@ -211,14 +212,14 @@ bool DiskCache::DeleteResource(const std::string& id) {
}
bool DiskCache::CheckLimit() {
-#ifdef _DEBUG
+#if !defined(NDEBUG)
// Temporary check to make sure everything is working correctly.
size_t cache_size = 0;
for (EntryMap::iterator it = map_.begin(); it != map_.end(); ++it) {
cache_size += it->second.size;
}
ASSERT(cache_size == total_size_);
-#endif // _DEBUG
+#endif
// TODO: Replace this with a non-brain-dead algorithm for clearing out the
// oldest resources... something that isn't O(n^2)
@@ -263,7 +264,7 @@ std::string DiskCache::IdToFilename(const std::string& id, size_t index) const {
#endif // !TRANSPARENT_CACHE_NAMES
char extension[32];
- sprintfn(extension, ARRAY_SIZE(extension), ".%u", index);
+ sprintfn(extension, arraysize(extension), ".%u", index);
Pathname pathname;
pathname.SetFolder(folder_);
diff --git a/webrtc/base/event_tracer.cc b/webrtc/base/event_tracer.cc
index 5c6d39f0a4..4174589d36 100644
--- a/webrtc/base/event_tracer.cc
+++ b/webrtc/base/event_tracer.cc
@@ -7,15 +7,26 @@
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
-
#include "webrtc/base/event_tracer.h"
+#include <inttypes.h>
+
+#include <vector>
+
+#include "webrtc/base/checks.h"
+#include "webrtc/base/criticalsection.h"
+#include "webrtc/base/event.h"
+#include "webrtc/base/logging.h"
+#include "webrtc/base/platform_thread.h"
+#include "webrtc/base/timeutils.h"
+#include "webrtc/base/trace_event.h"
+
namespace webrtc {
namespace {
-GetCategoryEnabledPtr g_get_category_enabled_ptr = 0;
-AddTraceEventPtr g_add_trace_event_ptr = 0;
+GetCategoryEnabledPtr g_get_category_enabled_ptr = nullptr;
+AddTraceEventPtr g_add_trace_event_ptr = nullptr;
} // namespace
@@ -25,7 +36,6 @@ void SetupEventTracer(GetCategoryEnabledPtr get_category_enabled_ptr,
g_add_trace_event_ptr = add_trace_event_ptr;
}
-// static
const unsigned char* EventTracer::GetCategoryEnabled(const char* name) {
if (g_get_category_enabled_ptr)
return g_get_category_enabled_ptr(name);
@@ -34,7 +44,8 @@ const unsigned char* EventTracer::GetCategoryEnabled(const char* name) {
return reinterpret_cast<const unsigned char*>("\0");
}
-// static
+// Arguments to this function (phase, etc.) are as defined in
+// webrtc/base/trace_event.h.
void EventTracer::AddTraceEvent(char phase,
const unsigned char* category_enabled,
const char* name,
@@ -58,3 +69,202 @@ void EventTracer::AddTraceEvent(char phase,
}
} // namespace webrtc
+
+namespace rtc {
+namespace tracing {
+namespace {
+
+static bool EventTracingThreadFunc(void* params);
+
+// Atomic-int fast path for avoiding logging when disabled.
+static volatile int g_event_logging_active = 0;
+
+// TODO(pbos): Log metadata for all threads, etc.
+class EventLogger final {
+ public:
+ EventLogger()
+ : logging_thread_(EventTracingThreadFunc, this, "EventTracingThread"),
+ shutdown_event_(false, false) {}
+ ~EventLogger() { RTC_DCHECK(thread_checker_.CalledOnValidThread()); }
+
+ void AddTraceEvent(const char* name,
+ const unsigned char* category_enabled,
+ char phase,
+ uint64_t timestamp,
+ int pid,
+ rtc::PlatformThreadId thread_id) {
+ rtc::CritScope lock(&crit_);
+ trace_events_.push_back(
+ {name, category_enabled, phase, timestamp, 1, thread_id});
+ }
+
+// The TraceEvent format is documented here:
+// https://docs.google.com/document/d/1CvAClvFfyA5R-PhYUmn5OOQtYMH4h6I0nSsKchNAySU/preview
+ void Log() {
+ RTC_DCHECK(output_file_);
+ static const int kLoggingIntervalMs = 100;
+ fprintf(output_file_, "{ \"traceEvents\": [\n");
+ bool has_logged_event = false;
+ while (true) {
+ bool shutting_down = shutdown_event_.Wait(kLoggingIntervalMs);
+ std::vector<TraceEvent> events;
+ {
+ rtc::CritScope lock(&crit_);
+ trace_events_.swap(events);
+ }
+ for (const TraceEvent& e : events) {
+ fprintf(output_file_,
+ "%s{ \"name\": \"%s\""
+ ", \"cat\": \"%s\""
+ ", \"ph\": \"%c\""
+ ", \"ts\": %" PRIu64
+ ", \"pid\": %d"
+#if defined(WEBRTC_WIN)
+ ", \"tid\": %lu"
+#else
+ ", \"tid\": %d"
+#endif // defined(WEBRTC_WIN)
+ "}\n",
+ has_logged_event ? "," : " ", e.name, e.category_enabled,
+ e.phase, e.timestamp, e.pid, e.tid);
+ has_logged_event = true;
+ }
+ if (shutting_down)
+ break;
+ }
+ fprintf(output_file_, "]}\n");
+ if (output_file_owned_)
+ fclose(output_file_);
+ output_file_ = nullptr;
+ }
+
+ void Start(FILE* file, bool owned) {
+ RTC_DCHECK(file);
+ RTC_DCHECK(!output_file_);
+ output_file_ = file;
+ output_file_owned_ = owned;
+ {
+ rtc::CritScope lock(&crit_);
+ // Since the atomic fast-path for adding events to the queue can be
+ // bypassed while the logging thread is shutting down there may be some
+ // stale events in the queue, hence the vector needs to be cleared to not
+ // log events from a previous logging session (which may be days old).
+ trace_events_.clear();
+ }
+ // Enable event logging (fast-path). This should be disabled since starting
+ // shouldn't be done twice.
+ RTC_CHECK_EQ(0,
+ rtc::AtomicOps::CompareAndSwap(&g_event_logging_active, 0, 1));
+
+ // Finally start, everything should be set up now.
+ logging_thread_.Start();
+ }
+
+ void Stop() {
+ // Try to stop. Abort if we're not currently logging.
+ if (rtc::AtomicOps::CompareAndSwap(&g_event_logging_active, 1, 0) == 0)
+ return;
+
+ // Wake up logging thread to finish writing.
+ shutdown_event_.Set();
+ // Join the logging thread.
+ logging_thread_.Stop();
+ }
+
+ private:
+ struct TraceEvent {
+ const char* name;
+ const unsigned char* category_enabled;
+ char phase;
+ uint64_t timestamp;
+ int pid;
+ rtc::PlatformThreadId tid;
+ };
+
+ rtc::CriticalSection crit_;
+ std::vector<TraceEvent> trace_events_ GUARDED_BY(crit_);
+ rtc::PlatformThread logging_thread_;
+ rtc::Event shutdown_event_;
+ rtc::ThreadChecker thread_checker_;
+ FILE* output_file_ = nullptr;
+ bool output_file_owned_ = false;
+};
+
+static bool EventTracingThreadFunc(void* params) {
+ static_cast<EventLogger*>(params)->Log();
+ return true;
+}
+
+static EventLogger* volatile g_event_logger = nullptr;
+static const char* const kDisabledTracePrefix = TRACE_DISABLED_BY_DEFAULT("");
+const unsigned char* InternalGetCategoryEnabled(const char* name) {
+ const char* prefix_ptr = &kDisabledTracePrefix[0];
+ const char* name_ptr = name;
+ // Check whether name contains the default-disabled prefix.
+ while (*prefix_ptr == *name_ptr && *prefix_ptr != '\0') {
+ ++prefix_ptr;
+ ++name_ptr;
+ }
+ return reinterpret_cast<const unsigned char*>(*prefix_ptr == '\0' ? ""
+ : name);
+}
+
+void InternalAddTraceEvent(char phase,
+ const unsigned char* category_enabled,
+ const char* name,
+ unsigned long long id,
+ int num_args,
+ const char** arg_names,
+ const unsigned char* arg_types,
+ const unsigned long long* arg_values,
+ unsigned char flags) {
+ // Fast path for when event tracing is inactive.
+ if (rtc::AtomicOps::AcquireLoad(&g_event_logging_active) == 0)
+ return;
+
+ g_event_logger->AddTraceEvent(name, category_enabled, phase,
+ rtc::TimeMicros(), 1, rtc::CurrentThreadId());
+}
+
+} // namespace
+
+void SetupInternalTracer() {
+ RTC_CHECK(rtc::AtomicOps::CompareAndSwapPtr(
+ &g_event_logger, static_cast<EventLogger*>(nullptr),
+ new EventLogger()) == nullptr);
+ g_event_logger = new EventLogger();
+ webrtc::SetupEventTracer(InternalGetCategoryEnabled, InternalAddTraceEvent);
+}
+
+void StartInternalCaptureToFile(FILE* file) {
+ g_event_logger->Start(file, false);
+}
+
+bool StartInternalCapture(const char* filename) {
+ FILE* file = fopen(filename, "w");
+ if (!file) {
+ LOG(LS_ERROR) << "Failed to open trace file '" << filename
+ << "' for writing.";
+ return false;
+ }
+ g_event_logger->Start(file, true);
+ return true;
+}
+
+void StopInternalCapture() {
+ g_event_logger->Stop();
+}
+
+void ShutdownInternalTracer() {
+ StopInternalCapture();
+ EventLogger* old_logger = rtc::AtomicOps::AcquireLoadPtr(&g_event_logger);
+ RTC_DCHECK(old_logger);
+ RTC_CHECK(rtc::AtomicOps::CompareAndSwapPtr(
+ &g_event_logger, old_logger,
+ static_cast<EventLogger*>(nullptr)) == old_logger);
+ delete old_logger;
+ webrtc::SetupEventTracer(nullptr, nullptr);
+}
+
+} // namespace tracing
+} // namespace rtc
diff --git a/webrtc/base/event_tracer.h b/webrtc/base/event_tracer.h
index cfc6e9e472..51c8cfdc49 100644
--- a/webrtc/base/event_tracer.h
+++ b/webrtc/base/event_tracer.h
@@ -26,6 +26,8 @@
#ifndef WEBRTC_BASE_EVENT_TRACER_H_
#define WEBRTC_BASE_EVENT_TRACER_H_
+#include <stdio.h>
+
namespace webrtc {
typedef const unsigned char* (*GetCategoryEnabledPtr)(const char* name);
@@ -68,4 +70,16 @@ class EventTracer {
} // namespace webrtc
+namespace rtc {
+namespace tracing {
+// Set up internal event tracer.
+void SetupInternalTracer();
+bool StartInternalCapture(const char* filename);
+void StartInternalCaptureToFile(FILE* file);
+void StopInternalCapture();
+// Make sure we run this, this will tear down the internal tracing.
+void ShutdownInternalTracer();
+} // namespace tracing
+} // namespace rtc
+
#endif // WEBRTC_BASE_EVENT_TRACER_H_
diff --git a/webrtc/base/fakenetwork.h b/webrtc/base/fakenetwork.h
index fb99d59e1b..e3996e6649 100644
--- a/webrtc/base/fakenetwork.h
+++ b/webrtc/base/fakenetwork.h
@@ -12,6 +12,7 @@
#define WEBRTC_BASE_FAKENETWORK_H_
#include <string>
+#include <utility>
#include <vector>
#include "webrtc/base/network.h"
@@ -31,7 +32,7 @@ class FakeNetworkManager : public NetworkManagerBase,
public:
FakeNetworkManager() : thread_(Thread::Current()) {}
- typedef std::vector<SocketAddress> IfaceList;
+ typedef std::vector<std::pair<SocketAddress, AdapterType>> IfaceList;
void AddInterface(const SocketAddress& iface) {
// Ensure a unique name for the interface if its name is not given.
@@ -39,16 +40,22 @@ class FakeNetworkManager : public NetworkManagerBase,
}
void AddInterface(const SocketAddress& iface, const std::string& if_name) {
+ AddInterface(iface, if_name, ADAPTER_TYPE_UNKNOWN);
+ }
+
+ void AddInterface(const SocketAddress& iface,
+ const std::string& if_name,
+ AdapterType type) {
SocketAddress address(if_name, 0);
address.SetResolvedIP(iface.ipaddr());
- ifaces_.push_back(address);
+ ifaces_.push_back(std::make_pair(address, type));
DoUpdateNetworks();
}
void RemoveInterface(const SocketAddress& iface) {
for (IfaceList::iterator it = ifaces_.begin();
it != ifaces_.end(); ++it) {
- if (it->EqualIPs(iface)) {
+ if (it->first.EqualIPs(iface)) {
ifaces_.erase(it);
break;
}
@@ -76,6 +83,7 @@ class FakeNetworkManager : public NetworkManagerBase,
}
using NetworkManagerBase::set_enumeration_permission;
+ using NetworkManagerBase::set_default_local_addresses;
private:
void DoUpdateNetworks() {
@@ -85,17 +93,17 @@ class FakeNetworkManager : public NetworkManagerBase,
for (IfaceList::iterator it = ifaces_.begin();
it != ifaces_.end(); ++it) {
int prefix_length = 0;
- if (it->ipaddr().family() == AF_INET) {
+ if (it->first.ipaddr().family() == AF_INET) {
prefix_length = kFakeIPv4NetworkPrefixLength;
- } else if (it->ipaddr().family() == AF_INET6) {
+ } else if (it->first.ipaddr().family() == AF_INET6) {
prefix_length = kFakeIPv6NetworkPrefixLength;
}
- IPAddress prefix = TruncateIP(it->ipaddr(), prefix_length);
- scoped_ptr<Network> net(new Network(it->hostname(),
- it->hostname(),
- prefix,
- prefix_length));
- net->AddIP(it->ipaddr());
+ IPAddress prefix = TruncateIP(it->first.ipaddr(), prefix_length);
+ scoped_ptr<Network> net(new Network(it->first.hostname(),
+ it->first.hostname(), prefix,
+ prefix_length, it->second));
+ net->set_default_local_address_provider(this);
+ net->AddIP(it->first.ipaddr());
networks.push_back(net.release());
}
bool changed;
@@ -111,6 +119,9 @@ class FakeNetworkManager : public NetworkManagerBase,
int next_index_ = 0;
int start_count_ = 0;
bool sent_first_update_ = false;
+
+ IPAddress default_local_ipv4_address_;
+ IPAddress default_local_ipv6_address_;
};
} // namespace rtc
diff --git a/webrtc/base/fakesslidentity.h b/webrtc/base/fakesslidentity.h
index 7926580e7b..ec603a541d 100644
--- a/webrtc/base/fakesslidentity.h
+++ b/webrtc/base/fakesslidentity.h
@@ -25,9 +25,11 @@ class FakeSSLCertificate : public rtc::SSLCertificate {
// SHA-1 is the default digest algorithm because it is available in all build
// configurations used for unit testing.
explicit FakeSSLCertificate(const std::string& data)
- : data_(data), digest_algorithm_(DIGEST_SHA_1) {}
+ : data_(data), digest_algorithm_(DIGEST_SHA_1), expiration_time_(-1) {}
explicit FakeSSLCertificate(const std::vector<std::string>& certs)
- : data_(certs.front()), digest_algorithm_(DIGEST_SHA_1) {
+ : data_(certs.front()),
+ digest_algorithm_(DIGEST_SHA_1),
+ expiration_time_(-1) {
std::vector<std::string>::const_iterator it;
// Skip certs[0].
for (it = certs.begin() + 1; it != certs.end(); ++it) {
@@ -45,6 +47,12 @@ class FakeSSLCertificate : public rtc::SSLCertificate {
VERIFY(SSLIdentity::PemToDer(kPemTypeCertificate, data_, &der_string));
der_buffer->SetData(der_string.c_str(), der_string.size());
}
+ int64_t CertificateExpirationTime() const override {
+ return expiration_time_;
+ }
+ void SetCertificateExpirationTime(int64_t expiration_time) {
+ expiration_time_ = expiration_time;
+ }
void set_digest_algorithm(const std::string& algorithm) {
digest_algorithm_ = algorithm;
}
@@ -78,6 +86,8 @@ class FakeSSLCertificate : public rtc::SSLCertificate {
std::string data_;
std::vector<FakeSSLCertificate> certs_;
std::string digest_algorithm_;
+ // Expiration time in seconds relative to epoch, 1970-01-01T00:00:00Z (UTC).
+ int64_t expiration_time_;
};
class FakeSSLIdentity : public rtc::SSLIdentity {
diff --git a/webrtc/base/filerotatingstream.cc b/webrtc/base/filerotatingstream.cc
index 65dfd6397f..080999476b 100644
--- a/webrtc/base/filerotatingstream.cc
+++ b/webrtc/base/filerotatingstream.cc
@@ -281,7 +281,7 @@ void FileRotatingStream::RotateFiles() {
// Rotates the files by deleting the file at |rotation_index_|, which is the
// oldest file and then renaming the newer files to have an incremented index.
// See header file comments for example.
- RTC_DCHECK_LE(rotation_index_, file_names_.size());
+ RTC_DCHECK_LT(rotation_index_, file_names_.size());
std::string file_to_delete = file_names_[rotation_index_];
if (Filesystem::IsFile(file_to_delete)) {
if (!Filesystem::DeleteFile(file_to_delete)) {
diff --git a/webrtc/base/fileutils.cc b/webrtc/base/fileutils.cc
index 6f385d72b7..cb23153de7 100644
--- a/webrtc/base/fileutils.cc
+++ b/webrtc/base/fileutils.cc
@@ -10,6 +10,7 @@
#include <assert.h>
+#include "webrtc/base/arraysize.h"
#include "webrtc/base/pathutils.h"
#include "webrtc/base/fileutils.h"
#include "webrtc/base/stringutils.h"
@@ -273,8 +274,8 @@ bool CreateUniqueFile(Pathname& path, bool create_empty) {
}
version += 1;
char version_base[MAX_PATH];
- sprintfn(version_base, ARRAY_SIZE(version_base), "%s-%u",
- basename.c_str(), version);
+ sprintfn(version_base, arraysize(version_base), "%s-%u", basename.c_str(),
+ version);
path.SetBasename(version_base);
}
return true;
diff --git a/webrtc/base/format_macros.h b/webrtc/base/format_macros.h
index 5d7dcc36b9..90f86a686c 100644
--- a/webrtc/base/format_macros.h
+++ b/webrtc/base/format_macros.h
@@ -73,6 +73,8 @@
#else // WEBRTC_WIN
+#include <inttypes.h>
+
#if !defined(PRId64)
#define PRId64 "I64d"
#endif
diff --git a/webrtc/base/gunit.h b/webrtc/base/gunit.h
index c2bc844d5f..1a6c36374e 100644
--- a/webrtc/base/gunit.h
+++ b/webrtc/base/gunit.h
@@ -13,7 +13,7 @@
#include "webrtc/base/logging.h"
#include "webrtc/base/thread.h"
-#if defined(WEBRTC_ANDROID) || defined(GTEST_RELATIVE_PATH)
+#if defined(GTEST_RELATIVE_PATH)
#include "testing/gtest/include/gtest/gtest.h"
#else
#include "testing/base/public/gunit.h"
@@ -35,7 +35,7 @@
rtc::Thread::Current()->ProcessMessages(1); \
res = (ex); \
} \
- } while (0);
+ } while (0)
// The typical EXPECT_XXXX and ASSERT_XXXXs, but done until true or a timeout.
#define EXPECT_TRUE_WAIT(ex, timeout) \
@@ -43,28 +43,28 @@
bool res; \
WAIT_(ex, timeout, res); \
if (!res) EXPECT_TRUE(ex); \
- } while (0);
+ } while (0)
#define EXPECT_EQ_WAIT(v1, v2, timeout) \
do { \
bool res; \
WAIT_(v1 == v2, timeout, res); \
if (!res) EXPECT_EQ(v1, v2); \
- } while (0);
+ } while (0)
#define ASSERT_TRUE_WAIT(ex, timeout) \
do { \
bool res; \
WAIT_(ex, timeout, res); \
if (!res) ASSERT_TRUE(ex); \
- } while (0);
+ } while (0)
#define ASSERT_EQ_WAIT(v1, v2, timeout) \
do { \
bool res; \
WAIT_(v1 == v2, timeout, res); \
if (!res) ASSERT_EQ(v1, v2); \
- } while (0);
+ } while (0)
// Version with a "soft" timeout and a margin. This logs if the timeout is
// exceeded, but it only fails if the expression still isn't true after the
@@ -82,6 +82,6 @@
if (!res) { \
EXPECT_TRUE(ex); \
} \
- } while (0);
+ } while (0)
#endif // WEBRTC_BASE_GUNIT_H_
diff --git a/webrtc/base/helpers.cc b/webrtc/base/helpers.cc
index 8e59b6410c..1ad5d0e12b 100644
--- a/webrtc/base/helpers.cc
+++ b/webrtc/base/helpers.cc
@@ -164,16 +164,20 @@ class TestRandomGenerator : public RandomGenerator {
int seed_;
};
+namespace {
+
// TODO: Use Base64::Base64Table instead.
-static const char BASE64[64] = {
- 'A', 'B', 'C', 'D', 'E', 'F', 'G', 'H', 'I', 'J', 'K', 'L', 'M',
- 'N', 'O', 'P', 'Q', 'R', 'S', 'T', 'U', 'V', 'W', 'X', 'Y', 'Z',
- 'a', 'b', 'c', 'd', 'e', 'f', 'g', 'h', 'i', 'j', 'k', 'l', 'm',
- 'n', 'o', 'p', 'q', 'r', 's', 't', 'u', 'v', 'w', 'x', 'y', 'z',
- '0', '1', '2', '3', '4', '5', '6', '7', '8', '9', '+', '/'
-};
+static const char kBase64[64] = {
+ 'A', 'B', 'C', 'D', 'E', 'F', 'G', 'H', 'I', 'J', 'K', 'L', 'M',
+ 'N', 'O', 'P', 'Q', 'R', 'S', 'T', 'U', 'V', 'W', 'X', 'Y', 'Z',
+ 'a', 'b', 'c', 'd', 'e', 'f', 'g', 'h', 'i', 'j', 'k', 'l', 'm',
+ 'n', 'o', 'p', 'q', 'r', 's', 't', 'u', 'v', 'w', 'x', 'y', 'z',
+ '0', '1', '2', '3', '4', '5', '6', '7', '8', '9', '+', '/'};
-namespace {
+static const char kHex[16] = {'0', '1', '2', '3', '4', '5', '6', '7',
+ '8', '9', 'a', 'b', 'c', 'd', 'e', 'f'};
+
+static const char kUuidDigit17[4] = {'8', '9', 'a', 'b'};
// This round about way of creating a global RNG is to safe-guard against
// indeterminant static initialization order.
@@ -232,7 +236,7 @@ bool CreateRandomString(size_t len,
}
bool CreateRandomString(size_t len, std::string* str) {
- return CreateRandomString(len, BASE64, 64, str);
+ return CreateRandomString(len, kBase64, 64, str);
}
bool CreateRandomString(size_t len, const std::string& table,
@@ -241,6 +245,41 @@ bool CreateRandomString(size_t len, const std::string& table,
static_cast<int>(table.size()), str);
}
+// Version 4 UUID is of the form:
+// xxxxxxxx-xxxx-4xxx-yxxx-xxxxxxxxxxxx
+// Where 'x' is a hex digit, and 'y' is 8, 9, a or b.
+std::string CreateRandomUuid() {
+ std::string str;
+ scoped_ptr<uint8_t[]> bytes(new uint8_t[31]);
+ if (!Rng().Generate(bytes.get(), 31)) {
+ LOG(LS_ERROR) << "Failed to generate random string!";
+ return str;
+ }
+ str.reserve(36);
+ for (size_t i = 0; i < 8; ++i) {
+ str.push_back(kHex[bytes[i] % 16]);
+ }
+ str.push_back('-');
+ for (size_t i = 8; i < 12; ++i) {
+ str.push_back(kHex[bytes[i] % 16]);
+ }
+ str.push_back('-');
+ str.push_back('4');
+ for (size_t i = 12; i < 15; ++i) {
+ str.push_back(kHex[bytes[i] % 16]);
+ }
+ str.push_back('-');
+ str.push_back(kUuidDigit17[bytes[15] % 4]);
+ for (size_t i = 16; i < 19; ++i) {
+ str.push_back(kHex[bytes[i] % 16]);
+ }
+ str.push_back('-');
+ for (size_t i = 19; i < 31; ++i) {
+ str.push_back(kHex[bytes[i] % 16]);
+ }
+ return str;
+}
+
uint32_t CreateRandomId() {
uint32_t id;
if (!Rng().Generate(&id, sizeof(id))) {
diff --git a/webrtc/base/helpers.h b/webrtc/base/helpers.h
index 102c08bd0d..0e7937362a 100644
--- a/webrtc/base/helpers.h
+++ b/webrtc/base/helpers.h
@@ -39,6 +39,9 @@ bool CreateRandomString(size_t length, std::string* str);
bool CreateRandomString(size_t length, const std::string& table,
std::string* str);
+// Generates a (cryptographically) random UUID version 4 string.
+std::string CreateRandomUuid();
+
// Generates a random id.
uint32_t CreateRandomId();
diff --git a/webrtc/base/helpers_unittest.cc b/webrtc/base/helpers_unittest.cc
index 6ea0167e98..83cc685919 100644
--- a/webrtc/base/helpers_unittest.cc
+++ b/webrtc/base/helpers_unittest.cc
@@ -43,16 +43,23 @@ TEST_F(RandomTest, TestCreateRandomString) {
EXPECT_EQ(256U, random2.size());
}
+TEST_F(RandomTest, TestCreateRandomUuid) {
+ std::string random = CreateRandomUuid();
+ EXPECT_EQ(36U, random.size());
+}
+
TEST_F(RandomTest, TestCreateRandomForTest) {
// Make sure we get the output we expect.
SetRandomTestMode(true);
EXPECT_EQ(2154761789U, CreateRandomId());
EXPECT_EQ("h0ISP4S5SJKH/9EY", CreateRandomString(16));
+ EXPECT_EQ("41706e92-cdd3-46d9-a22d-8ff1737ffb11", CreateRandomUuid());
// Reset and make sure we get the same output.
SetRandomTestMode(true);
EXPECT_EQ(2154761789U, CreateRandomId());
EXPECT_EQ("h0ISP4S5SJKH/9EY", CreateRandomString(16));
+ EXPECT_EQ("41706e92-cdd3-46d9-a22d-8ff1737ffb11", CreateRandomUuid());
// Test different character sets.
SetRandomTestMode(true);
diff --git a/webrtc/base/httpcommon-inl.h b/webrtc/base/httpcommon-inl.h
index d1c0bf01cf..188d9e6509 100644
--- a/webrtc/base/httpcommon-inl.h
+++ b/webrtc/base/httpcommon-inl.h
@@ -11,6 +11,7 @@
#ifndef WEBRTC_BASE_HTTPCOMMON_INL_H__
#define WEBRTC_BASE_HTTPCOMMON_INL_H__
+#include "webrtc/base/arraysize.h"
#include "webrtc/base/common.h"
#include "webrtc/base/httpcommon.h"
@@ -80,7 +81,7 @@ void Url<CTYPE>::do_set_full_path(const CTYPE* val, size_t len) {
template<class CTYPE>
void Url<CTYPE>::do_get_url(string* val) const {
CTYPE protocol[9];
- asccpyn(protocol, ARRAY_SIZE(protocol), secure_ ? "https://" : "http://");
+ asccpyn(protocol, arraysize(protocol), secure_ ? "https://" : "http://");
val->append(protocol);
do_get_address(val);
do_get_full_path(val);
@@ -91,8 +92,8 @@ void Url<CTYPE>::do_get_address(string* val) const {
val->append(host_);
if (port_ != HttpDefaultPort(secure_)) {
CTYPE format[5], port[32];
- asccpyn(format, ARRAY_SIZE(format), ":%hu");
- sprintfn(port, ARRAY_SIZE(port), format, port_);
+ asccpyn(format, arraysize(format), ":%hu");
+ sprintfn(port, arraysize(port), format, port_);
val->append(port);
}
}
diff --git a/webrtc/base/httpcommon.cc b/webrtc/base/httpcommon.cc
index 0c3547e4e7..c90bea51cc 100644
--- a/webrtc/base/httpcommon.cc
+++ b/webrtc/base/httpcommon.cc
@@ -21,6 +21,7 @@
#include <algorithm>
+#include "webrtc/base/arraysize.h"
#include "webrtc/base/base64.h"
#include "webrtc/base/common.h"
#include "webrtc/base/cryptstring.h"
@@ -377,7 +378,7 @@ bool HttpDateToSeconds(const std::string& date, time_t* seconds) {
gmt = non_gmt + ((zone[0] == '+') ? offset : -offset);
} else {
size_t zindex;
- if (!find_string(zindex, zone, kTimeZones, ARRAY_SIZE(kTimeZones))) {
+ if (!find_string(zindex, zone, kTimeZones, arraysize(kTimeZones))) {
return false;
}
gmt = non_gmt + kTimeZoneOffsets[zindex] * 60 * 60;
diff --git a/webrtc/base/ifaddrs_converter.cc b/webrtc/base/ifaddrs_converter.cc
new file mode 100644
index 0000000000..7dd35552f6
--- /dev/null
+++ b/webrtc/base/ifaddrs_converter.cc
@@ -0,0 +1,60 @@
+/*
+ * Copyright 2015 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/base/ifaddrs_converter.h"
+
+namespace rtc {
+
+IfAddrsConverter::IfAddrsConverter() {}
+
+IfAddrsConverter::~IfAddrsConverter() {}
+
+bool IfAddrsConverter::ConvertIfAddrsToIPAddress(
+ const struct ifaddrs* interface,
+ InterfaceAddress* ip,
+ IPAddress* mask) {
+ switch (interface->ifa_addr->sa_family) {
+ case AF_INET: {
+ *ip = IPAddress(
+ reinterpret_cast<sockaddr_in*>(interface->ifa_addr)->sin_addr);
+ *mask = IPAddress(
+ reinterpret_cast<sockaddr_in*>(interface->ifa_netmask)->sin_addr);
+ return true;
+ }
+ case AF_INET6: {
+ int ip_attributes = IPV6_ADDRESS_FLAG_NONE;
+ if (!ConvertNativeAttributesToIPAttributes(interface, &ip_attributes)) {
+ return false;
+ }
+ *ip = InterfaceAddress(
+ reinterpret_cast<sockaddr_in6*>(interface->ifa_addr)->sin6_addr,
+ ip_attributes);
+ *mask = IPAddress(
+ reinterpret_cast<sockaddr_in6*>(interface->ifa_netmask)->sin6_addr);
+ return true;
+ }
+ default: { return false; }
+ }
+}
+
+bool IfAddrsConverter::ConvertNativeAttributesToIPAttributes(
+ const struct ifaddrs* interface,
+ int* ip_attributes) {
+ *ip_attributes = IPV6_ADDRESS_FLAG_NONE;
+ return true;
+}
+
+#if !defined(WEBRTC_MAC)
+// For MAC and IOS, it's defined in macifaddrs_converter.cc
+IfAddrsConverter* CreateIfAddrsConverter() {
+ return new IfAddrsConverter();
+}
+#endif
+} // namespace rtc
diff --git a/webrtc/base/ifaddrs_converter.h b/webrtc/base/ifaddrs_converter.h
new file mode 100644
index 0000000000..0a1cdb9e41
--- /dev/null
+++ b/webrtc/base/ifaddrs_converter.h
@@ -0,0 +1,45 @@
+/*
+ * Copyright 2015 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_BASE_IFADDRS_CONVERTER_H_
+#define WEBRTC_BASE_IFADDRS_CONVERTER_H_
+
+#if defined(WEBRTC_ANDROID)
+#include "webrtc/base/ifaddrs-android.h"
+#else
+#include <ifaddrs.h>
+#endif // WEBRTC_ANDROID
+
+#include "webrtc/base/ipaddress.h"
+
+namespace rtc {
+
+// This class converts native interface addresses to our internal IPAddress
+// class. Subclasses should override ConvertNativeToIPAttributes to implement
+// the different ways of retrieving IPv6 attributes for various POSIX platforms.
+class IfAddrsConverter {
+ public:
+ IfAddrsConverter();
+ virtual ~IfAddrsConverter();
+ virtual bool ConvertIfAddrsToIPAddress(const struct ifaddrs* interface,
+ InterfaceAddress* ipaddress,
+ IPAddress* mask);
+
+ protected:
+ virtual bool ConvertNativeAttributesToIPAttributes(
+ const struct ifaddrs* interface,
+ int* ip_attributes);
+};
+
+IfAddrsConverter* CreateIfAddrsConverter();
+
+} // namespace rtc
+
+#endif // WEBRTC_BASE_IFADDRS_CONVERTER_H_
diff --git a/webrtc/base/ipaddress.cc b/webrtc/base/ipaddress.cc
index 316207fe49..c92f33c74d 100644
--- a/webrtc/base/ipaddress.cc
+++ b/webrtc/base/ipaddress.cc
@@ -27,8 +27,10 @@
#include "webrtc/base/ipaddress.h"
#include "webrtc/base/byteorder.h"
-#include "webrtc/base/nethelpers.h"
+#include "webrtc/base/checks.h"
#include "webrtc/base/logging.h"
+#include "webrtc/base/nethelpers.h"
+#include "webrtc/base/stringutils.h"
#include "webrtc/base/win32.h"
namespace rtc {
@@ -41,8 +43,6 @@ static const in6_addr kTeredoPrefix = {{{0x20, 0x01, 0x00, 0x00}}};
static const in6_addr kV4CompatibilityPrefix = {{{0}}};
static const in6_addr k6BonePrefix = {{{0x3f, 0xfe, 0}}};
-bool IPAddress::strip_sensitive_ = false;
-
static bool IsPrivateV4(uint32_t ip);
static in_addr ExtractMappedAddress(const in6_addr& addr);
@@ -144,9 +144,10 @@ std::string IPAddress::ToString() const {
}
std::string IPAddress::ToSensitiveString() const {
- if (!strip_sensitive_)
- return ToString();
-
+#if !defined(NDEBUG)
+ // Return non-stripped in debug.
+ return ToString();
+#else
switch (family_) {
case AF_INET: {
std::string address = ToString();
@@ -158,12 +159,20 @@ std::string IPAddress::ToSensitiveString() const {
return address;
}
case AF_INET6: {
- // TODO(grunell): Return a string of format 1:2:3:x:x:x:x:x or such
- // instead of zeroing out.
- return TruncateIP(*this, 128 - 80).ToString();
+ std::string result;
+ result.resize(INET6_ADDRSTRLEN);
+ in6_addr addr = ipv6_address();
+ size_t len =
+ rtc::sprintfn(&(result[0]), result.size(), "%x:%x:%x:x:x:x:x:x",
+ (addr.s6_addr[0] << 8) + addr.s6_addr[1],
+ (addr.s6_addr[2] << 8) + addr.s6_addr[3],
+ (addr.s6_addr[4] << 8) + addr.s6_addr[5]);
+ result.resize(len);
+ return result;
}
}
return std::string();
+#endif
}
IPAddress IPAddress::Normalized() const {
@@ -186,10 +195,6 @@ IPAddress IPAddress::AsIPv6Address() const {
return IPAddress(v6addr);
}
-void IPAddress::set_strip_sensitive(bool enable) {
- strip_sensitive_ = enable;
-}
-
bool InterfaceAddress::operator==(const InterfaceAddress &other) const {
return ipv6_flags_ == other.ipv6_flags() &&
static_cast<const IPAddress&>(*this) == other;
@@ -506,4 +511,15 @@ IPAddress GetLoopbackIP(int family) {
}
return rtc::IPAddress();
}
+
+IPAddress GetAnyIP(int family) {
+ if (family == AF_INET) {
+ return rtc::IPAddress(INADDR_ANY);
+ }
+ if (family == AF_INET6) {
+ return rtc::IPAddress(in6addr_any);
+ }
+ return rtc::IPAddress();
+}
+
} // Namespace rtc
diff --git a/webrtc/base/ipaddress.h b/webrtc/base/ipaddress.h
index fe2d6e2c92..ef1e3d8170 100644
--- a/webrtc/base/ipaddress.h
+++ b/webrtc/base/ipaddress.h
@@ -112,16 +112,12 @@ class IPAddress {
// Whether this is an unspecified IP address.
bool IsNil() const;
- static void set_strip_sensitive(bool enable);
-
private:
int family_;
union {
in_addr ip4;
in6_addr ip6;
} u_;
-
- static bool strip_sensitive_;
};
// IP class which could represent IPv6 address flags which is only
@@ -180,6 +176,7 @@ int IPAddressPrecedence(const IPAddress& ip);
IPAddress TruncateIP(const IPAddress& ip, int length);
IPAddress GetLoopbackIP(int family);
+IPAddress GetAnyIP(int family);
// Returns the number of contiguously set bits, counting from the MSB in network
// byte order, in this IPAddress. Bits after the first 0 encountered are not
diff --git a/webrtc/base/ipaddress_unittest.cc b/webrtc/base/ipaddress_unittest.cc
index d5cb6f709c..62773c143a 100644
--- a/webrtc/base/ipaddress_unittest.cc
+++ b/webrtc/base/ipaddress_unittest.cc
@@ -25,6 +25,10 @@ static const in6_addr kIPv6PublicAddr = {{{0x24, 0x01, 0xfa, 0x00,
0x00, 0x04, 0x10, 0x00,
0xbe, 0x30, 0x5b, 0xff,
0xfe, 0xe5, 0x00, 0xc3}}};
+static const in6_addr kIPv6PublicAddr2 = {{{0x24, 0x01, 0x00, 0x00,
+ 0x00, 0x00, 0x10, 0x00,
+ 0xbe, 0x30, 0x5b, 0xff,
+ 0xfe, 0xe5, 0x00, 0xc3}}};
static const in6_addr kIPv4MappedAnyAddr = {{{0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0xff, 0xff,
@@ -52,7 +56,12 @@ static const std::string kIPv6TemporaryAddrString =
"2620:0:1008:1201:2089:6dda:385e:80c0";
static const std::string kIPv6PublicAddrString =
"2401:fa00:4:1000:be30:5bff:fee5:c3";
-static const std::string kIPv6PublicAddrAnonymizedString = "2401:fa00:4::";
+static const std::string kIPv6PublicAddr2String =
+ "2401::1000:be30:5bff:fee5:c3";
+static const std::string kIPv6PublicAddrAnonymizedString =
+ "2401:fa00:4:x:x:x:x:x";
+static const std::string kIPv6PublicAddr2AnonymizedString =
+ "2401:0:0:x:x:x:x:x";
static const std::string kIPv4MappedAnyAddrString = "::ffff:0:0";
static const std::string kIPv4MappedRFC1918AddrString = "::ffff:c0a8:701";
static const std::string kIPv4MappedLoopbackAddrString = "::ffff:7f00:1";
@@ -888,20 +897,20 @@ TEST(IPAddressTest, TestCategorizeIPv6) {
TEST(IPAddressTest, TestToSensitiveString) {
IPAddress addr_v4 = IPAddress(kIPv4PublicAddr);
- EXPECT_EQ(kIPv4PublicAddrString, addr_v4.ToString());
- EXPECT_EQ(kIPv4PublicAddrString, addr_v4.ToSensitiveString());
- IPAddress::set_strip_sensitive(true);
- EXPECT_EQ(kIPv4PublicAddrString, addr_v4.ToString());
- EXPECT_EQ(kIPv4PublicAddrAnonymizedString, addr_v4.ToSensitiveString());
- IPAddress::set_strip_sensitive(false);
-
IPAddress addr_v6 = IPAddress(kIPv6PublicAddr);
+ IPAddress addr_v6_2 = IPAddress(kIPv6PublicAddr2);
+ EXPECT_EQ(kIPv4PublicAddrString, addr_v4.ToString());
EXPECT_EQ(kIPv6PublicAddrString, addr_v6.ToString());
- EXPECT_EQ(kIPv6PublicAddrString, addr_v6.ToSensitiveString());
- IPAddress::set_strip_sensitive(true);
- EXPECT_EQ(kIPv6PublicAddrString, addr_v6.ToString());
+ EXPECT_EQ(kIPv6PublicAddr2String, addr_v6_2.ToString());
+#if defined(NDEBUG)
+ EXPECT_EQ(kIPv4PublicAddrAnonymizedString, addr_v4.ToSensitiveString());
EXPECT_EQ(kIPv6PublicAddrAnonymizedString, addr_v6.ToSensitiveString());
- IPAddress::set_strip_sensitive(false);
+ EXPECT_EQ(kIPv6PublicAddr2AnonymizedString, addr_v6_2.ToSensitiveString());
+#else
+ EXPECT_EQ(kIPv4PublicAddrString, addr_v4.ToSensitiveString());
+ EXPECT_EQ(kIPv6PublicAddrString, addr_v6.ToSensitiveString());
+ EXPECT_EQ(kIPv6PublicAddr2String, addr_v6_2.ToSensitiveString());
+#endif // defined(NDEBUG)
}
TEST(IPAddressTest, TestInterfaceAddress) {
diff --git a/webrtc/base/keep_ref_until_done.h b/webrtc/base/keep_ref_until_done.h
new file mode 100644
index 0000000000..269e1c8657
--- /dev/null
+++ b/webrtc/base/keep_ref_until_done.h
@@ -0,0 +1,43 @@
+/*
+ * Copyright 2015 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_BASE_KEEP_REF_UNTIL_DONE_H_
+#define WEBRTC_BASE_KEEP_REF_UNTIL_DONE_H_
+
+#include "webrtc/base/bind.h"
+#include "webrtc/base/callback.h"
+#include "webrtc/base/refcount.h"
+#include "webrtc/base/scoped_ref_ptr.h"
+
+namespace rtc {
+
+namespace impl {
+template <class T>
+static inline void DoNothing(const scoped_refptr<T>& object) {}
+} // namespace impl
+
+// KeepRefUntilDone keeps a reference to |object| until the returned
+// callback goes out of scope. If the returned callback is copied, the
+// reference will be released when the last callback goes out of scope.
+template <class ObjectT>
+static inline Callback0<void> KeepRefUntilDone(ObjectT* object) {
+ return rtc::Bind(&impl::DoNothing<ObjectT>, scoped_refptr<ObjectT>(object));
+}
+
+template <class ObjectT>
+static inline Callback0<void> KeepRefUntilDone(
+ const scoped_refptr<ObjectT>& object) {
+ return rtc::Bind(&impl::DoNothing<ObjectT>, object);
+}
+
+} // namespace rtc
+
+
+#endif // WEBRTC_BASE_KEEP_REF_UNTIL_DONE_H_
diff --git a/webrtc/base/latebindingsymboltable_unittest.cc b/webrtc/base/latebindingsymboltable_unittest.cc
index 30ebd17cba..0079f20342 100644
--- a/webrtc/base/latebindingsymboltable_unittest.cc
+++ b/webrtc/base/latebindingsymboltable_unittest.cc
@@ -21,9 +21,10 @@ namespace rtc {
#define LIBM_SYMBOLS_CLASS_NAME LibmTestSymbolTable
#define LIBM_SYMBOLS_LIST \
- X(acos) \
- X(sin) \
- X(tan)
+ X(acosf) \
+ X(sinf) \
+ X(tanf)
+
#define LATE_BINDING_SYMBOL_TABLE_CLASS_NAME LIBM_SYMBOLS_CLASS_NAME
#define LATE_BINDING_SYMBOL_TABLE_SYMBOLS_LIST LIBM_SYMBOLS_LIST
@@ -39,9 +40,9 @@ TEST(LateBindingSymbolTable, libm) {
EXPECT_FALSE(table.IsLoaded());
ASSERT_TRUE(table.Load());
EXPECT_TRUE(table.IsLoaded());
- EXPECT_EQ(table.acos()(0.5), acos(0.5));
- EXPECT_EQ(table.sin()(0.5), sin(0.5));
- EXPECT_EQ(table.tan()(0.5), tan(0.5));
+ EXPECT_EQ(table.acosf()(0.5f), acosf(0.5f));
+ EXPECT_EQ(table.sinf()(0.5f), sinf(0.5f));
+ EXPECT_EQ(table.tanf()(0.5f), tanf(0.5f));
// It would be nice to check that the addresses are the same, but the nature
// of dynamic linking and relocation makes them actually be different.
table.Unload();
diff --git a/webrtc/base/linux.cc b/webrtc/base/linux.cc
index 1586b27b07..0894d39c77 100644
--- a/webrtc/base/linux.cc
+++ b/webrtc/base/linux.cc
@@ -233,33 +233,6 @@ bool ConfigParser::ParseLine(std::string* key, std::string* value) {
return true;
}
-#if !defined(WEBRTC_CHROMIUM_BUILD)
-static bool ExpectLineFromStream(FileStream* stream,
- std::string* out) {
- StreamResult res = stream->ReadLine(out);
- if (res != SR_SUCCESS) {
- if (res != SR_EOS) {
- LOG(LS_ERROR) << "Error when reading from stream";
- } else {
- LOG(LS_ERROR) << "Incorrect number of lines in stream";
- }
- return false;
- }
- return true;
-}
-
-static void ExpectEofFromStream(FileStream* stream) {
- std::string unused;
- StreamResult res = stream->ReadLine(&unused);
- if (res == SR_SUCCESS) {
- LOG(LS_WARNING) << "Ignoring unexpected extra lines from stream";
- } else if (res != SR_EOS) {
- LOG(LS_WARNING) << "Error when checking for extra lines from stream";
- }
-}
-
-#endif
-
std::string ReadLinuxUname() {
struct utsname buf;
if (uname(&buf) < 0) {
diff --git a/webrtc/base/logging.cc b/webrtc/base/logging.cc
index b02be27e33..686b9b2b02 100644
--- a/webrtc/base/logging.cc
+++ b/webrtc/base/logging.cc
@@ -92,13 +92,13 @@ std::string ErrorName(int err, const ConstantLabel* err_table) {
/////////////////////////////////////////////////////////////////////////////
// By default, release builds don't log, debug builds at info level
-#if _DEBUG
+#if !defined(NDEBUG)
LoggingSeverity LogMessage::min_sev_ = LS_INFO;
LoggingSeverity LogMessage::dbg_sev_ = LS_INFO;
-#else // !_DEBUG
+#else
LoggingSeverity LogMessage::min_sev_ = LS_NONE;
LoggingSeverity LogMessage::dbg_sev_ = LS_NONE;
-#endif // !_DEBUG
+#endif
bool LogMessage::log_to_stderr_ = true;
namespace {
@@ -340,7 +340,7 @@ void LogMessage::OutputToDebug(const std::string& str,
LoggingSeverity severity,
const std::string& tag) {
bool log_to_stderr = log_to_stderr_;
-#if defined(WEBRTC_MAC) && !defined(WEBRTC_IOS) && (!defined(_DEBUG) || defined(NDEBUG))
+#if defined(WEBRTC_MAC) && !defined(WEBRTC_IOS) && defined(NDEBUG)
// On the Mac, all stderr output goes to the Console log and causes clutter.
// So in opt builds, don't log to stderr unless the user specifically sets
// a preference to do so.
diff --git a/webrtc/base/logging.h b/webrtc/base/logging.h
index 1208275a34..e40ca4465f 100644
--- a/webrtc/base/logging.h
+++ b/webrtc/base/logging.h
@@ -285,7 +285,7 @@ class LogMessageVoidify {
rtc::LogMessage(__FILE__, __LINE__, sev).stream()
// The _F version prefixes the message with the current function name.
-#if (defined(__GNUC__) && defined(_DEBUG)) || defined(WANT_PRETTY_LOG_F)
+#if (defined(__GNUC__) && !defined(NDEBUG)) || defined(WANT_PRETTY_LOG_F)
#define LOG_F(sev) LOG(sev) << __PRETTY_FUNCTION__ << ": "
#define LOG_T_F(sev) LOG(sev) << this << ": " << __PRETTY_FUNCTION__ << ": "
#else
diff --git a/webrtc/base/logging_unittest.cc b/webrtc/base/logging_unittest.cc
index 3719cde4e9..6047361bf5 100644
--- a/webrtc/base/logging_unittest.cc
+++ b/webrtc/base/logging_unittest.cc
@@ -14,7 +14,6 @@
#include "webrtc/base/pathutils.h"
#include "webrtc/base/stream.h"
#include "webrtc/base/thread.h"
-#include "webrtc/test/testsupport/gtest_disable.h"
namespace rtc {
diff --git a/webrtc/base/macasyncsocket.cc b/webrtc/base/macasyncsocket.cc
index 1f12500b73..8f811ea8b6 100644
--- a/webrtc/base/macasyncsocket.cc
+++ b/webrtc/base/macasyncsocket.cc
@@ -112,7 +112,7 @@ int MacAsyncSocket::Connect(const SocketAddress& addr) {
SetError(EALREADY);
return SOCKET_ERROR;
}
- if (addr.IsUnresolved()) {
+ if (addr.IsUnresolvedIP()) {
LOG(LS_VERBOSE) << "Resolving addr in MacAsyncSocket::Connect";
resolver_ = new AsyncResolver();
resolver_->SignalWorkDone.connect(this,
diff --git a/webrtc/base/macifaddrs_converter.cc b/webrtc/base/macifaddrs_converter.cc
new file mode 100644
index 0000000000..0916cb5ba2
--- /dev/null
+++ b/webrtc/base/macifaddrs_converter.cc
@@ -0,0 +1,281 @@
+/*
+ * Copyright 2015 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <net/if.h>
+#include <sys/ioctl.h>
+#include <unistd.h>
+
+#include "webrtc/base/checks.h"
+#include "webrtc/base/ifaddrs_converter.h"
+#include "webrtc/base/logging.h"
+#include "webrtc/base/scoped_ptr.h"
+
+#if !defined(WEBRTC_IOS)
+#include <net/if_media.h>
+#include <netinet/in_var.h>
+#else // WEBRTC_IOS
+#define SCOPE6_ID_MAX 16
+
+struct in6_addrlifetime {
+ time_t ia6t_expire; /* valid lifetime expiration time */
+ time_t ia6t_preferred; /* preferred lifetime expiration time */
+ u_int32_t ia6t_vltime; /* valid lifetime */
+ u_int32_t ia6t_pltime; /* prefix lifetime */
+};
+
+struct in6_ifstat {
+ u_quad_t ifs6_in_receive; /* # of total input datagram */
+ u_quad_t ifs6_in_hdrerr; /* # of datagrams with invalid hdr */
+ u_quad_t ifs6_in_toobig; /* # of datagrams exceeded MTU */
+ u_quad_t ifs6_in_noroute; /* # of datagrams with no route */
+ u_quad_t ifs6_in_addrerr; /* # of datagrams with invalid dst */
+ u_quad_t ifs6_in_protounknown; /* # of datagrams with unknown proto */
+ /* NOTE: increment on final dst if */
+ u_quad_t ifs6_in_truncated; /* # of truncated datagrams */
+ u_quad_t ifs6_in_discard; /* # of discarded datagrams */
+ /* NOTE: fragment timeout is not here */
+ u_quad_t ifs6_in_deliver; /* # of datagrams delivered to ULP */
+ /* NOTE: increment on final dst if */
+ u_quad_t ifs6_out_forward; /* # of datagrams forwarded */
+ /* NOTE: increment on outgoing if */
+ u_quad_t ifs6_out_request; /* # of outgoing datagrams from ULP */
+ /* NOTE: does not include forwrads */
+ u_quad_t ifs6_out_discard; /* # of discarded datagrams */
+ u_quad_t ifs6_out_fragok; /* # of datagrams fragmented */
+ u_quad_t ifs6_out_fragfail; /* # of datagrams failed on fragment */
+ u_quad_t ifs6_out_fragcreat; /* # of fragment datagrams */
+ /* NOTE: this is # after fragment */
+ u_quad_t ifs6_reass_reqd; /* # of incoming fragmented packets */
+ /* NOTE: increment on final dst if */
+ u_quad_t ifs6_reass_ok; /* # of reassembled packets */
+ /* NOTE: this is # after reass */
+ /* NOTE: increment on final dst if */
+ u_quad_t ifs6_reass_fail; /* # of reass failures */
+ /* NOTE: may not be packet count */
+ /* NOTE: increment on final dst if */
+ u_quad_t ifs6_in_mcast; /* # of inbound multicast datagrams */
+ u_quad_t ifs6_out_mcast; /* # of outbound multicast datagrams */
+};
+struct icmp6_ifstat {
+ /*
+ * Input statistics
+ */
+ /* ipv6IfIcmpInMsgs, total # of input messages */
+ u_quad_t ifs6_in_msg;
+ /* ipv6IfIcmpInErrors, # of input error messages */
+ u_quad_t ifs6_in_error;
+ /* ipv6IfIcmpInDestUnreachs, # of input dest unreach errors */
+ u_quad_t ifs6_in_dstunreach;
+ /* ipv6IfIcmpInAdminProhibs, # of input admin. prohibited errs */
+ u_quad_t ifs6_in_adminprohib;
+ /* ipv6IfIcmpInTimeExcds, # of input time exceeded errors */
+ u_quad_t ifs6_in_timeexceed;
+ /* ipv6IfIcmpInParmProblems, # of input parameter problem errors */
+ u_quad_t ifs6_in_paramprob;
+ /* ipv6IfIcmpInPktTooBigs, # of input packet too big errors */
+ u_quad_t ifs6_in_pkttoobig;
+ /* ipv6IfIcmpInEchos, # of input echo requests */
+ u_quad_t ifs6_in_echo;
+ /* ipv6IfIcmpInEchoReplies, # of input echo replies */
+ u_quad_t ifs6_in_echoreply;
+ /* ipv6IfIcmpInRouterSolicits, # of input router solicitations */
+ u_quad_t ifs6_in_routersolicit;
+ /* ipv6IfIcmpInRouterAdvertisements, # of input router advertisements */
+ u_quad_t ifs6_in_routeradvert;
+ /* ipv6IfIcmpInNeighborSolicits, # of input neighbor solicitations */
+ u_quad_t ifs6_in_neighborsolicit;
+ /* ipv6IfIcmpInNeighborAdvertisements, # of input neighbor advs. */
+ u_quad_t ifs6_in_neighboradvert;
+ /* ipv6IfIcmpInRedirects, # of input redirects */
+ u_quad_t ifs6_in_redirect;
+ /* ipv6IfIcmpInGroupMembQueries, # of input MLD queries */
+ u_quad_t ifs6_in_mldquery;
+ /* ipv6IfIcmpInGroupMembResponses, # of input MLD reports */
+ u_quad_t ifs6_in_mldreport;
+ /* ipv6IfIcmpInGroupMembReductions, # of input MLD done */
+ u_quad_t ifs6_in_mlddone;
+
+ /*
+ * Output statistics. We should solve unresolved routing problem...
+ */
+ /* ipv6IfIcmpOutMsgs, total # of output messages */
+ u_quad_t ifs6_out_msg;
+ /* ipv6IfIcmpOutErrors, # of output error messages */
+ u_quad_t ifs6_out_error;
+ /* ipv6IfIcmpOutDestUnreachs, # of output dest unreach errors */
+ u_quad_t ifs6_out_dstunreach;
+ /* ipv6IfIcmpOutAdminProhibs, # of output admin. prohibited errs */
+ u_quad_t ifs6_out_adminprohib;
+ /* ipv6IfIcmpOutTimeExcds, # of output time exceeded errors */
+ u_quad_t ifs6_out_timeexceed;
+ /* ipv6IfIcmpOutParmProblems, # of output parameter problem errors */
+ u_quad_t ifs6_out_paramprob;
+ /* ipv6IfIcmpOutPktTooBigs, # of output packet too big errors */
+ u_quad_t ifs6_out_pkttoobig;
+ /* ipv6IfIcmpOutEchos, # of output echo requests */
+ u_quad_t ifs6_out_echo;
+ /* ipv6IfIcmpOutEchoReplies, # of output echo replies */
+ u_quad_t ifs6_out_echoreply;
+ /* ipv6IfIcmpOutRouterSolicits, # of output router solicitations */
+ u_quad_t ifs6_out_routersolicit;
+ /* ipv6IfIcmpOutRouterAdvertisements, # of output router advs. */
+ u_quad_t ifs6_out_routeradvert;
+ /* ipv6IfIcmpOutNeighborSolicits, # of output neighbor solicitations */
+ u_quad_t ifs6_out_neighborsolicit;
+ /* ipv6IfIcmpOutNeighborAdvertisements, # of output neighbor advs. */
+ u_quad_t ifs6_out_neighboradvert;
+ /* ipv6IfIcmpOutRedirects, # of output redirects */
+ u_quad_t ifs6_out_redirect;
+ /* ipv6IfIcmpOutGroupMembQueries, # of output MLD queries */
+ u_quad_t ifs6_out_mldquery;
+ /* ipv6IfIcmpOutGroupMembResponses, # of output MLD reports */
+ u_quad_t ifs6_out_mldreport;
+ /* ipv6IfIcmpOutGroupMembReductions, # of output MLD done */
+ u_quad_t ifs6_out_mlddone;
+};
+
+struct in6_ifreq {
+ char ifr_name[IFNAMSIZ];
+ union {
+ struct sockaddr_in6 ifru_addr;
+ struct sockaddr_in6 ifru_dstaddr;
+ int ifru_flags;
+ int ifru_flags6;
+ int ifru_metric;
+ int ifru_intval;
+ caddr_t ifru_data;
+ struct in6_addrlifetime ifru_lifetime;
+ struct in6_ifstat ifru_stat;
+ struct icmp6_ifstat ifru_icmp6stat;
+ u_int32_t ifru_scope_id[SCOPE6_ID_MAX];
+ } ifr_ifru;
+};
+
+#define SIOCGIFAFLAG_IN6 _IOWR('i', 73, struct in6_ifreq)
+
+#define IN6_IFF_ANYCAST 0x0001 /* anycast address */
+#define IN6_IFF_TENTATIVE 0x0002 /* tentative address */
+#define IN6_IFF_DUPLICATED 0x0004 /* DAD detected duplicate */
+#define IN6_IFF_DETACHED 0x0008 /* may be detached from the link */
+#define IN6_IFF_DEPRECATED 0x0010 /* deprecated address */
+#define IN6_IFF_TEMPORARY 0x0080 /* temporary (anonymous) address. */
+
+#endif // WEBRTC_IOS
+
+namespace rtc {
+
+namespace {
+
+class IPv6AttributesGetter {
+ public:
+ IPv6AttributesGetter();
+ virtual ~IPv6AttributesGetter();
+ bool IsInitialized() const;
+ bool GetIPAttributes(const char* ifname,
+ const sockaddr* sock_addr,
+ int* native_attributes);
+
+ private:
+ // on MAC or IOS, we have to use ioctl with a socket to query an IPv6
+ // interface's attribute.
+ int ioctl_socket_;
+};
+
+IPv6AttributesGetter::IPv6AttributesGetter()
+ : ioctl_socket_(
+ socket(AF_INET6, SOCK_DGRAM, 0 /* unspecified protocol */)) {
+ RTC_DCHECK_GE(ioctl_socket_, 0);
+}
+
+bool IPv6AttributesGetter::IsInitialized() const {
+ return ioctl_socket_ >= 0;
+}
+
+IPv6AttributesGetter::~IPv6AttributesGetter() {
+ if (!IsInitialized()) {
+ return;
+ }
+ close(ioctl_socket_);
+}
+
+bool IPv6AttributesGetter::GetIPAttributes(const char* ifname,
+ const sockaddr* sock_addr,
+ int* native_attributes) {
+ if (!IsInitialized()) {
+ return false;
+ }
+
+ struct in6_ifreq ifr = {};
+ strncpy(ifr.ifr_name, ifname, sizeof(ifr.ifr_name) - 1);
+ memcpy(&ifr.ifr_ifru.ifru_addr, sock_addr, sock_addr->sa_len);
+ int rv = ioctl(ioctl_socket_, SIOCGIFAFLAG_IN6, &ifr);
+ if (rv >= 0) {
+ *native_attributes = ifr.ifr_ifru.ifru_flags;
+ } else {
+ LOG(LS_ERROR) << "ioctl returns " << errno;
+ }
+ return (rv >= 0);
+}
+
+// Converts native IPv6 address attributes to net IPv6 address attributes. If
+// it returns false, the IP address isn't suitable for one-to-one communications
+// applications and should be ignored.
+bool ConvertNativeToIPAttributes(int native_attributes, int* net_attributes) {
+ // For MacOSX, we disallow addresses with attributes IN6_IFF_ANYCASE,
+ // IN6_IFF_DUPLICATED, IN6_IFF_TENTATIVE, and IN6_IFF_DETACHED as these are
+ // still progressing through duplicated address detection (DAD) or are not
+ // suitable for one-to-one communication applications.
+ if (native_attributes & (IN6_IFF_ANYCAST | IN6_IFF_DUPLICATED |
+ IN6_IFF_TENTATIVE | IN6_IFF_DETACHED)) {
+ return false;
+ }
+
+ if (native_attributes & IN6_IFF_TEMPORARY) {
+ *net_attributes |= IPV6_ADDRESS_FLAG_TEMPORARY;
+ }
+
+ if (native_attributes & IN6_IFF_DEPRECATED) {
+ *net_attributes |= IPV6_ADDRESS_FLAG_DEPRECATED;
+ }
+
+ return true;
+}
+
+class MacIfAddrsConverter : public IfAddrsConverter {
+ public:
+ MacIfAddrsConverter() : ip_attribute_getter_(new IPv6AttributesGetter()) {}
+ ~MacIfAddrsConverter() override {}
+
+ bool ConvertNativeAttributesToIPAttributes(const struct ifaddrs* interface,
+ int* ip_attributes) override {
+ int native_attributes;
+ if (!ip_attribute_getter_->GetIPAttributes(
+ interface->ifa_name, interface->ifa_addr, &native_attributes)) {
+ return false;
+ }
+
+ if (!ConvertNativeToIPAttributes(native_attributes, ip_attributes)) {
+ return false;
+ }
+
+ return true;
+ }
+
+ private:
+ rtc::scoped_ptr<IPv6AttributesGetter> ip_attribute_getter_;
+};
+
+} // namespace
+
+IfAddrsConverter* CreateIfAddrsConverter() {
+ return new MacIfAddrsConverter();
+}
+
+} // namespace rtc
diff --git a/webrtc/base/macutils.cc b/webrtc/base/macutils.cc
index 6e436d4a80..7b1ff47536 100644
--- a/webrtc/base/macutils.cc
+++ b/webrtc/base/macutils.cc
@@ -191,10 +191,10 @@ bool RunAppleScript(const std::string& script) {
AECreateDesc(typeNull, NULL, 0, &result_data);
OSAScriptError(component, kOSAErrorMessage, typeChar, &result_data);
int len = AEGetDescDataSize(&result_data);
- char* data = (char*) malloc(len);
+ char* data = (char*)malloc(len);
if (data != NULL) {
err = AEGetDescData(&result_data, data, len);
- LOG(LS_ERROR) << "Script error: " << data;
+ LOG(LS_ERROR) << "Script error: " << std::string(data, len);
}
AEDisposeDesc(&script_desc);
AEDisposeDesc(&result_data);
diff --git a/webrtc/base/maybe.h b/webrtc/base/maybe.h
deleted file mode 100644
index df204366d5..0000000000
--- a/webrtc/base/maybe.h
+++ /dev/null
@@ -1,121 +0,0 @@
-/*
- * Copyright 2015 The WebRTC Project Authors. All rights reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_BASE_MAYBE_H_
-#define WEBRTC_BASE_MAYBE_H_
-
-#include <algorithm>
-#include <utility>
-
-#include "webrtc/base/checks.h"
-
-namespace rtc {
-
-// Simple std::experimental::optional-wannabe. It either contains a T or not.
-// In order to keep the implementation simple and portable, this implementation
-// actually contains a (default-constructed) T even when it supposedly doesn't
-// contain a value; use e.g. rtc::scoped_ptr<T> instead if that's too
-// expensive.
-//
-// A moved-from Maybe<T> may only be destroyed, and assigned to if T allows
-// being assigned to after having been moved from. Specifically, you may not
-// assume that it just doesn't contain a value anymore.
-//
-// TODO(kwiberg): Get rid of this class when the standard library has
-// std::optional (and we're allowed to use it).
-template <typename T>
-class Maybe final {
- public:
- // Construct an empty Maybe.
- Maybe() : has_value_(false) {}
-
- // Construct a Maybe that contains a value. Note: These are non-explicit, so
- // that a T will implicitly convert to Maybe<T>.
- Maybe(const T& val) : value_(val), has_value_(true) {}
- Maybe(T&& val) : value_(static_cast<T&&>(val)), has_value_(true) {}
-
- // Copy and move constructors.
- // TODO(kwiberg): =default the move constructor when MSVC supports it.
- Maybe(const Maybe&) = default;
- Maybe(Maybe&& m)
- : value_(static_cast<T&&>(m.value_)), has_value_(m.has_value_) {}
-
- // Assignment. Note that we allow assignment from either Maybe<T> or plain T.
- // TODO(kwiberg): =default the move assignment op when MSVC supports it.
- Maybe& operator=(const Maybe&) = default;
- Maybe& operator=(Maybe&& m) {
- value_ = static_cast<T&&>(m.value_);
- has_value_ = m.has_value_;
- return *this;
- }
- Maybe& operator=(const T& val) {
- value_ = val;
- has_value_ = true;
- return *this;
- }
- Maybe& operator=(T&& val) {
- value_ = static_cast<T&&>(val);
- has_value_ = true;
- return *this;
- }
-
- friend void swap(Maybe& m1, Maybe& m2) {
- using std::swap;
- swap(m1.value_, m2.value_);
- swap(m1.has_value_, m2.has_value_);
- }
-
- // Conversion to bool to test if we have a value.
- explicit operator bool() const { return has_value_; }
-
- // Dereferencing. Only allowed if we have a value.
- const T* operator->() const {
- RTC_DCHECK(has_value_);
- return &value_;
- }
- T* operator->() {
- RTC_DCHECK(has_value_);
- return &value_;
- }
- const T& operator*() const {
- RTC_DCHECK(has_value_);
- return value_;
- }
- T& operator*() {
- RTC_DCHECK(has_value_);
- return value_;
- }
-
- // Dereference with a default value in case we don't have a value.
- const T& value_or(const T& default_val) const {
- return has_value_ ? value_ : default_val;
- }
-
- // Equality tests. Two Maybes are equal if they contain equivalent values, or
- // if they're both empty.
- friend bool operator==(const Maybe& m1, const Maybe& m2) {
- return m1.has_value_ && m2.has_value_ ? m1.value_ == m2.value_
- : m1.has_value_ == m2.has_value_;
- }
- friend bool operator!=(const Maybe& m1, const Maybe& m2) {
- return m1.has_value_ && m2.has_value_ ? m1.value_ != m2.value_
- : m1.has_value_ != m2.has_value_;
- }
-
- private:
- // Invariant: Unless *this has been moved from, value_ is default-initialized
- // (or copied or moved from a default-initialized T) if !has_value_.
- T value_;
- bool has_value_;
-};
-
-} // namespace rtc
-
-#endif // WEBRTC_BASE_MAYBE_H_
diff --git a/webrtc/base/maybe_unittest.cc b/webrtc/base/maybe_unittest.cc
deleted file mode 100644
index 73fdc90873..0000000000
--- a/webrtc/base/maybe_unittest.cc
+++ /dev/null
@@ -1,485 +0,0 @@
-/*
- * Copyright 2015 The WebRTC Project Authors. All rights reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include <sstream>
-#include <string>
-#include <utility>
-#include <vector>
-
-#include "webrtc/base/gunit.h"
-#include "webrtc/base/maybe.h"
-
-namespace rtc {
-
-namespace {
-
-// Class whose instances logs various method calls (constructor, destructor,
-// etc.). Each instance has a unique ID (a simple global sequence number) and
-// an origin ID. When a copy is made, the new object gets a fresh ID but copies
-// the origin ID from the original. When a new Logger is created from scratch,
-// it gets a fresh ID, and the origin ID is the same as the ID (default
-// constructor) or given as an argument (explicit constructor).
-class Logger {
- public:
- Logger() : id_(next_id_++), origin_(id_) { Log("default constructor"); }
- explicit Logger(int origin) : id_(next_id_++), origin_(origin) {
- Log("explicit constructor");
- }
- Logger(const Logger& other) : id_(next_id_++), origin_(other.origin_) {
- LogFrom("copy constructor", other);
- }
- Logger(Logger&& other) : id_(next_id_++), origin_(other.origin_) {
- LogFrom("move constructor", other);
- }
- ~Logger() { Log("destructor"); }
- Logger& operator=(const Logger& other) {
- origin_ = other.origin_;
- LogFrom("operator= copy", other);
- return *this;
- }
- Logger& operator=(Logger&& other) {
- origin_ = other.origin_;
- LogFrom("operator= move", other);
- return *this;
- }
- friend void swap(Logger& a, Logger& b) {
- using std::swap;
- swap(a.origin_, b.origin_);
- Log2("swap", a, b);
- }
- friend bool operator==(const Logger& a, const Logger& b) {
- Log2("operator==", a, b);
- return a.origin_ == b.origin_;
- }
- friend bool operator!=(const Logger& a, const Logger& b) {
- Log2("operator!=", a, b);
- return a.origin_ != b.origin_;
- }
- void Foo() { Log("Foo()"); }
- void Foo() const { Log("Foo() const"); }
- static rtc::scoped_ptr<std::vector<std::string>> Setup() {
- auto s = rtc_make_scoped_ptr(new std::vector<std::string>);
- Logger::log_ = s.get();
- Logger::next_id_ = 0;
- return s;
- }
-
- private:
- int id_;
- int origin_;
- static std::vector<std::string>* log_;
- static int next_id_;
- void Log(const char* msg) const {
- std::ostringstream oss;
- oss << id_ << ':' << origin_ << ". " << msg;
- log_->push_back(oss.str());
- }
- void LogFrom(const char* msg, const Logger& other) const {
- std::ostringstream oss;
- oss << id_ << ':' << origin_ << ". " << msg << " (from " << other.id_ << ':'
- << other.origin_ << ")";
- log_->push_back(oss.str());
- }
- static void Log2(const char* msg, const Logger& a, const Logger& b) {
- std::ostringstream oss;
- oss << msg << ' ' << a.id_ << ':' << a.origin_ << ", " << b.id_ << ':'
- << b.origin_;
- log_->push_back(oss.str());
- }
-};
-
-std::vector<std::string>* Logger::log_ = nullptr;
-int Logger::next_id_ = 0;
-
-// Append all the other args to the vector pointed to by the first arg.
-template <typename T>
-void VectorAppend(std::vector<T>* v) {}
-template <typename T, typename... Ts>
-void VectorAppend(std::vector<T>* v, const T& e, Ts... es) {
- v->push_back(e);
- VectorAppend(v, es...);
-}
-
-// Create a vector of strings. Because we're not allowed to use
-// std::initializer_list.
-template <typename... Ts>
-std::vector<std::string> V(Ts... es) {
- std::vector<std::string> strings;
- VectorAppend(&strings, static_cast<std::string>(es)...);
- return strings;
-}
-
-} // namespace
-
-TEST(MaybeTest, TestConstructDefault) {
- auto log = Logger::Setup();
- {
- Maybe<Logger> x;
- EXPECT_FALSE(x);
- }
- EXPECT_EQ(V("0:0. default constructor", "0:0. destructor"), *log);
-}
-
-TEST(MaybeTest, TestConstructCopyEmpty) {
- auto log = Logger::Setup();
- {
- Maybe<Logger> x;
- EXPECT_FALSE(x);
- auto y = x;
- EXPECT_FALSE(y);
- }
- EXPECT_EQ(V("0:0. default constructor", "1:0. copy constructor (from 0:0)",
- "1:0. destructor", "0:0. destructor"),
- *log);
-}
-
-TEST(MaybeTest, TestConstructCopyFull) {
- auto log = Logger::Setup();
- {
- Logger a;
- Maybe<Logger> x = a;
- EXPECT_TRUE(x);
- log->push_back("---");
- auto y = x;
- EXPECT_TRUE(y);
- log->push_back("---");
- }
- EXPECT_EQ(V("0:0. default constructor", "1:0. copy constructor (from 0:0)",
- "---", "2:0. copy constructor (from 1:0)", "---",
- "2:0. destructor", "1:0. destructor", "0:0. destructor"),
- *log);
-}
-
-TEST(MaybeTest, TestConstructMoveEmpty) {
- auto log = Logger::Setup();
- {
- Maybe<Logger> x;
- EXPECT_FALSE(x);
- auto y = static_cast<Maybe<Logger>&&>(x);
- EXPECT_FALSE(y);
- }
- EXPECT_EQ(V("0:0. default constructor", "1:0. move constructor (from 0:0)",
- "1:0. destructor", "0:0. destructor"),
- *log);
-}
-
-TEST(MaybeTest, TestConstructMoveFull) {
- auto log = Logger::Setup();
- {
- Maybe<Logger> x = Logger(17);
- EXPECT_TRUE(x);
- log->push_back("---");
- auto y = static_cast<Maybe<Logger>&&>(x);
- EXPECT_TRUE(x);
- EXPECT_TRUE(y);
- log->push_back("---");
- }
- EXPECT_EQ(
- V("0:17. explicit constructor", "1:17. move constructor (from 0:17)",
- "0:17. destructor", "---", "2:17. move constructor (from 1:17)", "---",
- "2:17. destructor", "1:17. destructor"),
- *log);
-}
-
-TEST(MaybeTest, TestCopyAssignToEmptyFromEmpty) {
- auto log = Logger::Setup();
- {
- Maybe<Logger> x, y;
- x = y;
- }
- EXPECT_EQ(
- V("0:0. default constructor", "1:1. default constructor",
- "0:1. operator= copy (from 1:1)", "1:1. destructor", "0:1. destructor"),
- *log);
-}
-
-TEST(MaybeTest, TestCopyAssignToFullFromEmpty) {
- auto log = Logger::Setup();
- {
- Maybe<Logger> x = Logger(17);
- Maybe<Logger> y;
- log->push_back("---");
- x = y;
- log->push_back("---");
- }
- EXPECT_EQ(
- V("0:17. explicit constructor", "1:17. move constructor (from 0:17)",
- "0:17. destructor", "2:2. default constructor", "---",
- "1:2. operator= copy (from 2:2)", "---", "2:2. destructor",
- "1:2. destructor"),
- *log);
-}
-
-TEST(MaybeTest, TestCopyAssignToEmptyFromFull) {
- auto log = Logger::Setup();
- {
- Maybe<Logger> x;
- Maybe<Logger> y = Logger(17);
- log->push_back("---");
- x = y;
- log->push_back("---");
- }
- EXPECT_EQ(V("0:0. default constructor", "1:17. explicit constructor",
- "2:17. move constructor (from 1:17)", "1:17. destructor", "---",
- "0:17. operator= copy (from 2:17)", "---", "2:17. destructor",
- "0:17. destructor"),
- *log);
-}
-
-TEST(MaybeTest, TestCopyAssignToFullFromFull) {
- auto log = Logger::Setup();
- {
- Maybe<Logger> x = Logger(17);
- Maybe<Logger> y = Logger(42);
- log->push_back("---");
- x = y;
- log->push_back("---");
- }
- EXPECT_EQ(
- V("0:17. explicit constructor", "1:17. move constructor (from 0:17)",
- "0:17. destructor", "2:42. explicit constructor",
- "3:42. move constructor (from 2:42)", "2:42. destructor", "---",
- "1:42. operator= copy (from 3:42)", "---", "3:42. destructor",
- "1:42. destructor"),
- *log);
-}
-
-TEST(MaybeTest, TestCopyAssignToEmptyFromT) {
- auto log = Logger::Setup();
- {
- Maybe<Logger> x;
- Logger y(17);
- log->push_back("---");
- x = y;
- log->push_back("---");
- }
- EXPECT_EQ(V("0:0. default constructor", "1:17. explicit constructor", "---",
- "0:17. operator= copy (from 1:17)", "---", "1:17. destructor",
- "0:17. destructor"),
- *log);
-}
-
-TEST(MaybeTest, TestCopyAssignToFullFromT) {
- auto log = Logger::Setup();
- {
- Maybe<Logger> x = Logger(17);
- Logger y(42);
- log->push_back("---");
- x = y;
- log->push_back("---");
- }
- EXPECT_EQ(
- V("0:17. explicit constructor", "1:17. move constructor (from 0:17)",
- "0:17. destructor", "2:42. explicit constructor", "---",
- "1:42. operator= copy (from 2:42)", "---", "2:42. destructor",
- "1:42. destructor"),
- *log);
-}
-
-TEST(MaybeTest, TestMoveAssignToEmptyFromEmpty) {
- auto log = Logger::Setup();
- {
- Maybe<Logger> x, y;
- x = static_cast<Maybe<Logger>&&>(y);
- }
- EXPECT_EQ(
- V("0:0. default constructor", "1:1. default constructor",
- "0:1. operator= move (from 1:1)", "1:1. destructor", "0:1. destructor"),
- *log);
-}
-
-TEST(MaybeTest, TestMoveAssignToFullFromEmpty) {
- auto log = Logger::Setup();
- {
- Maybe<Logger> x = Logger(17);
- Maybe<Logger> y;
- log->push_back("---");
- x = static_cast<Maybe<Logger>&&>(y);
- log->push_back("---");
- }
- EXPECT_EQ(
- V("0:17. explicit constructor", "1:17. move constructor (from 0:17)",
- "0:17. destructor", "2:2. default constructor", "---",
- "1:2. operator= move (from 2:2)", "---", "2:2. destructor",
- "1:2. destructor"),
- *log);
-}
-
-TEST(MaybeTest, TestMoveAssignToEmptyFromFull) {
- auto log = Logger::Setup();
- {
- Maybe<Logger> x;
- Maybe<Logger> y = Logger(17);
- log->push_back("---");
- x = static_cast<Maybe<Logger>&&>(y);
- log->push_back("---");
- }
- EXPECT_EQ(V("0:0. default constructor", "1:17. explicit constructor",
- "2:17. move constructor (from 1:17)", "1:17. destructor", "---",
- "0:17. operator= move (from 2:17)", "---", "2:17. destructor",
- "0:17. destructor"),
- *log);
-}
-
-TEST(MaybeTest, TestMoveAssignToFullFromFull) {
- auto log = Logger::Setup();
- {
- Maybe<Logger> x = Logger(17);
- Maybe<Logger> y = Logger(42);
- log->push_back("---");
- x = static_cast<Maybe<Logger>&&>(y);
- log->push_back("---");
- }
- EXPECT_EQ(
- V("0:17. explicit constructor", "1:17. move constructor (from 0:17)",
- "0:17. destructor", "2:42. explicit constructor",
- "3:42. move constructor (from 2:42)", "2:42. destructor", "---",
- "1:42. operator= move (from 3:42)", "---", "3:42. destructor",
- "1:42. destructor"),
- *log);
-}
-
-TEST(MaybeTest, TestMoveAssignToEmptyFromT) {
- auto log = Logger::Setup();
- {
- Maybe<Logger> x;
- Logger y(17);
- log->push_back("---");
- x = static_cast<Logger&&>(y);
- log->push_back("---");
- }
- EXPECT_EQ(V("0:0. default constructor", "1:17. explicit constructor", "---",
- "0:17. operator= move (from 1:17)", "---", "1:17. destructor",
- "0:17. destructor"),
- *log);
-}
-
-TEST(MaybeTest, TestMoveAssignToFullFromT) {
- auto log = Logger::Setup();
- {
- Maybe<Logger> x = Logger(17);
- Logger y(42);
- log->push_back("---");
- x = static_cast<Logger&&>(y);
- log->push_back("---");
- }
- EXPECT_EQ(
- V("0:17. explicit constructor", "1:17. move constructor (from 0:17)",
- "0:17. destructor", "2:42. explicit constructor", "---",
- "1:42. operator= move (from 2:42)", "---", "2:42. destructor",
- "1:42. destructor"),
- *log);
-}
-
-TEST(MaybeTest, TestDereference) {
- auto log = Logger::Setup();
- {
- Maybe<Logger> x = Logger(42);
- const auto& y = x;
- log->push_back("---");
- x->Foo();
- y->Foo();
- static_cast<Maybe<Logger>&&>(x)->Foo();
- static_cast<const Maybe<Logger>&&>(y)->Foo();
- log->push_back("---");
- (*x).Foo();
- (*y).Foo();
- (*static_cast<Maybe<Logger>&&>(x)).Foo();
- (*static_cast<const Maybe<Logger>&&>(y)).Foo();
- log->push_back("---");
- }
- EXPECT_EQ(V("0:42. explicit constructor",
- "1:42. move constructor (from 0:42)", "0:42. destructor", "---",
- "1:42. Foo()", "1:42. Foo() const", "1:42. Foo()",
- "1:42. Foo() const", "---", "1:42. Foo()", "1:42. Foo() const",
- "1:42. Foo()", "1:42. Foo() const", "---", "1:42. destructor"),
- *log);
-}
-
-TEST(MaybeTest, TestDereferenceWithDefault) {
- auto log = Logger::Setup();
- {
- const Logger a(17), b(42);
- Maybe<Logger> x(a);
- Maybe<Logger> y;
- log->push_back("-1-");
- EXPECT_EQ(a, x.value_or(Logger(42)));
- log->push_back("-2-");
- EXPECT_EQ(b, y.value_or(Logger(42)));
- log->push_back("-3-");
- EXPECT_EQ(a, Maybe<Logger>(Logger(17)).value_or(b));
- log->push_back("-4-");
- EXPECT_EQ(b, Maybe<Logger>().value_or(b));
- log->push_back("-5-");
- }
- EXPECT_EQ(
- V("0:17. explicit constructor", "1:42. explicit constructor",
- "2:17. copy constructor (from 0:17)", "3:3. default constructor", "-1-",
- "4:42. explicit constructor", "operator== 0:17, 2:17",
- "4:42. destructor", "-2-", "5:42. explicit constructor",
- "operator== 1:42, 5:42", "5:42. destructor", "-3-",
- "6:17. explicit constructor", "7:17. move constructor (from 6:17)",
- "operator== 0:17, 7:17", "7:17. destructor", "6:17. destructor", "-4-",
- "8:8. default constructor", "operator== 1:42, 1:42", "8:8. destructor",
- "-5-", "3:3. destructor", "2:17. destructor", "1:42. destructor",
- "0:17. destructor"),
- *log);
-}
-
-TEST(MaybeTest, TestEquality) {
- auto log = Logger::Setup();
- {
- Logger a(17), b(42);
- Maybe<Logger> ma1(a), ma2(a), mb(b), me1, me2;
- log->push_back("---");
- EXPECT_EQ(ma1, ma1);
- EXPECT_EQ(ma1, ma2);
- EXPECT_NE(ma1, mb);
- EXPECT_NE(ma1, me1);
- EXPECT_EQ(me1, me1);
- EXPECT_EQ(me1, me2);
- log->push_back("---");
- }
- EXPECT_EQ(V("0:17. explicit constructor", "1:42. explicit constructor",
- "2:17. copy constructor (from 0:17)",
- "3:17. copy constructor (from 0:17)",
- "4:42. copy constructor (from 1:42)", "5:5. default constructor",
- "6:6. default constructor", "---", "operator== 2:17, 2:17",
- "operator== 2:17, 3:17", "operator!= 2:17, 4:42", "---",
- "6:6. destructor", "5:5. destructor", "4:42. destructor",
- "3:17. destructor", "2:17. destructor", "1:42. destructor",
- "0:17. destructor"),
- *log);
-}
-
-TEST(MaybeTest, TestSwap) {
- auto log = Logger::Setup();
- {
- Logger a(17), b(42);
- Maybe<Logger> x1(a), x2(b), y1(a), y2, z1, z2;
- log->push_back("---");
- swap(x1, x2); // Swap full <-> full.
- swap(y1, y2); // Swap full <-> empty.
- swap(z1, z2); // Swap empty <-> empty.
- log->push_back("---");
- }
- EXPECT_EQ(V("0:17. explicit constructor", "1:42. explicit constructor",
- "2:17. copy constructor (from 0:17)",
- "3:42. copy constructor (from 1:42)",
- "4:17. copy constructor (from 0:17)", "5:5. default constructor",
- "6:6. default constructor", "7:7. default constructor", "---",
- "swap 2:42, 3:17", "swap 4:5, 5:17", "swap 6:7, 7:6", "---",
- "7:6. destructor", "6:7. destructor", "5:17. destructor",
- "4:5. destructor", "3:17. destructor", "2:42. destructor",
- "1:42. destructor", "0:17. destructor"),
- *log);
-}
-
-} // namespace rtc
diff --git a/webrtc/base/messagehandler.h b/webrtc/base/messagehandler.h
index df82b4ea9a..b55b229a6d 100644
--- a/webrtc/base/messagehandler.h
+++ b/webrtc/base/messagehandler.h
@@ -11,6 +11,8 @@
#ifndef WEBRTC_BASE_MESSAGEHANDLER_H_
#define WEBRTC_BASE_MESSAGEHANDLER_H_
+#include <utility>
+
#include "webrtc/base/constructormagic.h"
#include "webrtc/base/scoped_ptr.h"
@@ -54,8 +56,8 @@ class FunctorMessageHandler<class rtc::scoped_ptr<ReturnT>, FunctorT>
: public MessageHandler {
public:
explicit FunctorMessageHandler(const FunctorT& functor) : functor_(functor) {}
- virtual void OnMessage(Message* msg) { result_ = functor_().Pass(); }
- rtc::scoped_ptr<ReturnT> result() { return result_.Pass(); }
+ virtual void OnMessage(Message* msg) { result_ = std::move(functor_()); }
+ rtc::scoped_ptr<ReturnT> result() { return std::move(result_); }
private:
FunctorT functor_;
diff --git a/webrtc/base/messagequeue_unittest.cc b/webrtc/base/messagequeue_unittest.cc
index 871542df28..78024e0b2d 100644
--- a/webrtc/base/messagequeue_unittest.cc
+++ b/webrtc/base/messagequeue_unittest.cc
@@ -16,7 +16,6 @@
#include "webrtc/base/thread.h"
#include "webrtc/base/timeutils.h"
#include "webrtc/base/nullsocketserver.h"
-#include "webrtc/test/testsupport/gtest_disable.h"
using namespace rtc;
diff --git a/webrtc/base/nat_unittest.cc b/webrtc/base/nat_unittest.cc
index e967b29b27..8be1be9f05 100644
--- a/webrtc/base/nat_unittest.cc
+++ b/webrtc/base/nat_unittest.cc
@@ -8,6 +8,7 @@
* be found in the AUTHORS file in the root of the source tree.
*/
+#include <algorithm>
#include <string>
#include "webrtc/base/gunit.h"
@@ -20,7 +21,6 @@
#include "webrtc/base/testclient.h"
#include "webrtc/base/asynctcpsocket.h"
#include "webrtc/base/virtualsocketserver.h"
-#include "webrtc/test/testsupport/gtest_disable.h"
using namespace rtc;
@@ -207,6 +207,12 @@ void TestPhysicalInternal(const SocketAddress& int_addr) {
std::vector<Network*> networks;
network_manager.GetNetworks(&networks);
+ networks.erase(std::remove_if(networks.begin(), networks.end(),
+ [](rtc::Network* network) {
+ return rtc::kDefaultNetworkIgnoreMask &
+ network->type();
+ }),
+ networks.end());
if (networks.empty()) {
LOG(LS_WARNING) << "Not enough network adapters for test.";
return;
diff --git a/webrtc/base/natsocketfactory.cc b/webrtc/base/natsocketfactory.cc
index 548a80caa8..0abd2a1b05 100644
--- a/webrtc/base/natsocketfactory.cc
+++ b/webrtc/base/natsocketfactory.cc
@@ -10,6 +10,7 @@
#include "webrtc/base/natsocketfactory.h"
+#include "webrtc/base/arraysize.h"
#include "webrtc/base/logging.h"
#include "webrtc/base/natserver.h"
#include "webrtc/base/virtualsocketserver.h"
@@ -270,7 +271,7 @@ class NATSocket : public AsyncSocket, public sigslot::has_slots<> {
// Sends the destination address to the server to tell it to connect.
void SendConnectRequest() {
char buf[kNATEncodedIPv6AddressSize];
- size_t length = PackAddressForNAT(buf, ARRAY_SIZE(buf), remote_addr_);
+ size_t length = PackAddressForNAT(buf, arraysize(buf), remote_addr_);
socket_->Send(buf, length);
}
diff --git a/webrtc/base/network.cc b/webrtc/base/network.cc
index 879c1e4529..488c475137 100644
--- a/webrtc/base/network.cc
+++ b/webrtc/base/network.cc
@@ -24,23 +24,13 @@
#elif !defined(__native_client__)
#include <net/if.h>
#endif
-#include <sys/socket.h>
-#include <sys/utsname.h>
-#include <sys/ioctl.h>
-#include <unistd.h>
-#include <errno.h>
-
-#if defined(WEBRTC_ANDROID)
-#include "webrtc/base/ifaddrs-android.h"
-#elif !defined(__native_client__)
-#include <ifaddrs.h>
-#endif
-
#endif // WEBRTC_POSIX
#if defined(WEBRTC_WIN)
#include "webrtc/base/win32.h"
#include <Iphlpapi.h>
+#elif !defined(__native_client__)
+#include "webrtc/base/ifaddrs_converter.h"
#endif
#include <stdio.h>
@@ -129,7 +119,8 @@ std::string AdapterTypeToString(AdapterType type) {
}
}
-bool IsIgnoredIPv6(const IPAddress& ip) {
+#if !defined(__native_client__)
+bool IsIgnoredIPv6(const InterfaceAddress& ip) {
if (ip.family() != AF_INET6) {
return false;
}
@@ -146,11 +137,23 @@ bool IsIgnoredIPv6(const IPAddress& ip) {
return true;
}
+ // Ignore deprecated IPv6.
+ if (ip.ipv6_flags() & IPV6_ADDRESS_FLAG_DEPRECATED) {
+ return true;
+ }
+
return false;
}
+#endif // !defined(__native_client__)
} // namespace
+// These addresses are used as the targets to find out the default local address
+// on a multi-homed endpoint. They are actually DNS servers.
+const char kPublicIPv4Host[] = "8.8.8.8";
+const char kPublicIPv6Host[] = "2001:4860:4860::8888";
+const int kPublicPort = 53; // DNS port.
+
std::string MakeNetworkKey(const std::string& name, const IPAddress& prefix,
int prefix_length) {
std::ostringstream ost;
@@ -169,6 +172,10 @@ NetworkManager::EnumerationPermission NetworkManager::enumeration_permission()
return ENUMERATION_ALLOWED;
}
+bool NetworkManager::GetDefaultLocalAddress(int family, IPAddress* addr) const {
+ return false;
+}
+
NetworkManagerBase::NetworkManagerBase()
: enumeration_permission_(NetworkManager::ENUMERATION_ALLOWED),
max_ipv6_networks_(kMaxIPv6Networks),
@@ -191,6 +198,7 @@ void NetworkManagerBase::GetAnyAddressNetworks(NetworkList* networks) {
const rtc::IPAddress ipv4_any_address(INADDR_ANY);
ipv4_any_address_network_.reset(
new rtc::Network("any", "any", ipv4_any_address, 0));
+ ipv4_any_address_network_->set_default_local_address_provider(this);
ipv4_any_address_network_->AddIP(ipv4_any_address);
}
networks->push_back(ipv4_any_address_network_.get());
@@ -200,6 +208,7 @@ void NetworkManagerBase::GetAnyAddressNetworks(NetworkList* networks) {
const rtc::IPAddress ipv6_any_address(in6addr_any);
ipv6_any_address_network_.reset(
new rtc::Network("any", "any", ipv6_any_address, 0));
+ ipv6_any_address_network_->set_default_local_address_provider(this);
ipv6_any_address_network_->AddIP(ipv6_any_address);
}
networks->push_back(ipv6_any_address_network_.get());
@@ -230,20 +239,12 @@ void NetworkManagerBase::MergeNetworkList(const NetworkList& new_networks,
void NetworkManagerBase::MergeNetworkList(const NetworkList& new_networks,
bool* changed,
NetworkManager::Stats* stats) {
+ *changed = false;
// AddressList in this map will track IP addresses for all Networks
// with the same key.
std::map<std::string, AddressList> consolidated_address_list;
NetworkList list(new_networks);
-
- // Result of Network merge. Element in this list should have unique key.
- NetworkList merged_list;
std::sort(list.begin(), list.end(), CompareNetworks);
-
- *changed = false;
-
- if (networks_.size() != list.size())
- *changed = true;
-
// First, build a set of network-keys to the ipaddresses.
for (Network* network : list) {
bool might_add_to_merged_list = false;
@@ -275,6 +276,8 @@ void NetworkManagerBase::MergeNetworkList(const NetworkList& new_networks,
}
// Next, look for existing network objects to re-use.
+ // Result of Network merge. Element in this list should have unique key.
+ NetworkList merged_list;
for (const auto& kv : consolidated_address_list) {
const std::string& key = kv.first;
Network* net = kv.second.net;
@@ -289,17 +292,36 @@ void NetworkManagerBase::MergeNetworkList(const NetworkList& new_networks,
*changed = true;
} else {
// This network exists in the map already. Reset its IP addresses.
- *changed = existing->second->SetIPs(kv.second.ips, *changed);
- merged_list.push_back(existing->second);
- if (existing->second != net) {
+ Network* existing_net = existing->second;
+ *changed = existing_net->SetIPs(kv.second.ips, *changed);
+ merged_list.push_back(existing_net);
+ // If the existing network was not active, networks have changed.
+ if (!existing_net->active()) {
+ *changed = true;
+ }
+ ASSERT(net->active());
+ if (existing_net != net) {
delete net;
}
}
}
- networks_ = merged_list;
+ // It may still happen that the merged list is a subset of |networks_|.
+ // To detect this change, we compare their sizes.
+ if (merged_list.size() != networks_.size()) {
+ *changed = true;
+ }
- // If the network lists changes, we resort it.
+ // If the network list changes, we re-assign |networks_| to the merged list
+ // and re-sort it.
if (*changed) {
+ networks_ = merged_list;
+ // Reset the active states of all networks.
+ for (const auto& kv : networks_map_) {
+ kv.second->set_active(false);
+ }
+ for (Network* network : networks_) {
+ network->set_active(true);
+ }
std::sort(networks_.begin(), networks_.end(), SortNetworks);
// Now network interfaces are sorted, we should set the preference value
// for each of the interfaces we are planning to use.
@@ -321,9 +343,30 @@ void NetworkManagerBase::MergeNetworkList(const NetworkList& new_networks,
}
}
+void NetworkManagerBase::set_default_local_addresses(const IPAddress& ipv4,
+ const IPAddress& ipv6) {
+ if (ipv4.family() == AF_INET) {
+ default_local_ipv4_address_ = ipv4;
+ }
+ if (ipv6.family() == AF_INET6) {
+ default_local_ipv6_address_ = ipv6;
+ }
+}
+
+bool NetworkManagerBase::GetDefaultLocalAddress(int family,
+ IPAddress* ipaddr) const {
+ if (family == AF_INET && !default_local_ipv4_address_.IsNil()) {
+ *ipaddr = default_local_ipv4_address_;
+ return true;
+ } else if (family == AF_INET6 && !default_local_ipv6_address_.IsNil()) {
+ *ipaddr = default_local_ipv6_address_;
+ return true;
+ }
+ return false;
+}
+
BasicNetworkManager::BasicNetworkManager()
: thread_(NULL), sent_first_update_(false), start_count_(0),
- network_ignore_mask_(kDefaultNetworkIgnoreMask),
ignore_non_default_routes_(false) {
}
@@ -346,49 +389,47 @@ bool BasicNetworkManager::CreateNetworks(bool include_ignored,
#elif defined(WEBRTC_POSIX)
void BasicNetworkManager::ConvertIfAddrs(struct ifaddrs* interfaces,
+ IfAddrsConverter* ifaddrs_converter,
bool include_ignored,
NetworkList* networks) const {
NetworkMap current_networks;
+
for (struct ifaddrs* cursor = interfaces;
cursor != NULL; cursor = cursor->ifa_next) {
IPAddress prefix;
IPAddress mask;
- IPAddress ip;
+ InterfaceAddress ip;
int scope_id = 0;
// Some interfaces may not have address assigned.
- if (!cursor->ifa_addr || !cursor->ifa_netmask)
+ if (!cursor->ifa_addr || !cursor->ifa_netmask) {
continue;
+ }
+ // Skip ones which are down.
+ if (!(cursor->ifa_flags & IFF_RUNNING)) {
+ continue;
+ }
+ // Skip unknown family.
+ if (cursor->ifa_addr->sa_family != AF_INET &&
+ cursor->ifa_addr->sa_family != AF_INET6) {
+ continue;
+ }
+ // Skip IPv6 if not enabled.
+ if (cursor->ifa_addr->sa_family == AF_INET6 && !ipv6_enabled()) {
+ continue;
+ }
+ // Convert to InterfaceAddress.
+ if (!ifaddrs_converter->ConvertIfAddrsToIPAddress(cursor, &ip, &mask)) {
+ continue;
+ }
- switch (cursor->ifa_addr->sa_family) {
- case AF_INET: {
- ip = IPAddress(
- reinterpret_cast<sockaddr_in*>(cursor->ifa_addr)->sin_addr);
- mask = IPAddress(
- reinterpret_cast<sockaddr_in*>(cursor->ifa_netmask)->sin_addr);
- break;
- }
- case AF_INET6: {
- if (ipv6_enabled()) {
- ip = IPAddress(
- reinterpret_cast<sockaddr_in6*>(cursor->ifa_addr)->sin6_addr);
-
- if (IsIgnoredIPv6(ip)) {
- continue;
- }
-
- mask = IPAddress(
- reinterpret_cast<sockaddr_in6*>(cursor->ifa_netmask)->sin6_addr);
- scope_id =
- reinterpret_cast<sockaddr_in6*>(cursor->ifa_addr)->sin6_scope_id;
- break;
- } else {
- continue;
- }
- }
- default: {
+ // Special case for IPv6 address.
+ if (cursor->ifa_addr->sa_family == AF_INET6) {
+ if (IsIgnoredIPv6(ip)) {
continue;
}
+ scope_id =
+ reinterpret_cast<sockaddr_in6*>(cursor->ifa_addr)->sin6_scope_id;
}
int prefix_length = CountIPMaskBits(mask);
@@ -409,14 +450,14 @@ void BasicNetworkManager::ConvertIfAddrs(struct ifaddrs* interfaces,
#endif
// TODO(phoglund): Need to recognize other types as well.
scoped_ptr<Network> network(new Network(cursor->ifa_name,
- cursor->ifa_name,
- prefix,
- prefix_length,
- adapter_type));
+ cursor->ifa_name, prefix,
+ prefix_length, adapter_type));
+ network->set_default_local_address_provider(this);
network->set_scope_id(scope_id);
network->AddIP(ip);
network->set_ignored(IsIgnoredNetwork(*network));
if (include_ignored || !network->ignored()) {
+ current_networks[key] = network.get();
networks->push_back(network.release());
}
} else {
@@ -434,7 +475,9 @@ bool BasicNetworkManager::CreateNetworks(bool include_ignored,
return false;
}
- ConvertIfAddrs(interfaces, include_ignored, networks);
+ rtc::scoped_ptr<IfAddrsConverter> ifaddrs_converter(CreateIfAddrsConverter());
+ ConvertIfAddrs(interfaces, ifaddrs_converter.get(), include_ignored,
+ networks);
freeifaddrs(interfaces);
return true;
@@ -511,14 +554,14 @@ bool BasicNetworkManager::CreateNetworks(bool include_ignored,
PIP_ADAPTER_PREFIX prefixlist = adapter_addrs->FirstPrefix;
std::string name;
std::string description;
-#ifdef _DEBUG
+#if !defined(NDEBUG)
name = ToUtf8(adapter_addrs->FriendlyName,
wcslen(adapter_addrs->FriendlyName));
#endif
description = ToUtf8(adapter_addrs->Description,
wcslen(adapter_addrs->Description));
for (; address; address = address->Next) {
-#ifndef _DEBUG
+#if defined(NDEBUG)
name = rtc::ToString(count);
#endif
@@ -563,16 +606,15 @@ bool BasicNetworkManager::CreateNetworks(bool include_ignored,
// TODO(phoglund): Need to recognize other types as well.
adapter_type = ADAPTER_TYPE_LOOPBACK;
}
- scoped_ptr<Network> network(new Network(name,
- description,
- prefix,
- prefix_length,
- adapter_type));
+ scoped_ptr<Network> network(new Network(name, description, prefix,
+ prefix_length, adapter_type));
+ network->set_default_local_address_provider(this);
network->set_scope_id(scope_id);
network->AddIP(ip);
bool ignored = IsIgnoredNetwork(*network);
network->set_ignored(ignored);
if (include_ignored || !network->ignored()) {
+ current_networks[key] = network.get();
networks->push_back(network.release());
}
} else {
@@ -624,9 +666,6 @@ bool BasicNetworkManager::IsIgnoredNetwork(const Network& network) const {
}
}
- if (network_ignore_mask_ & network.type()) {
- return true;
- }
#if defined(WEBRTC_POSIX)
// Filter out VMware/VirtualBox interfaces, typically named vmnet1,
// vmnet8, or vboxnet0.
@@ -724,6 +763,25 @@ void BasicNetworkManager::OnMessage(Message* msg) {
}
}
+IPAddress BasicNetworkManager::QueryDefaultLocalAddress(int family) const {
+ ASSERT(thread_ == Thread::Current());
+ ASSERT(thread_->socketserver() != nullptr);
+ ASSERT(family == AF_INET || family == AF_INET6);
+
+ scoped_ptr<AsyncSocket> socket(
+ thread_->socketserver()->CreateAsyncSocket(family, SOCK_DGRAM));
+ if (!socket) {
+ return IPAddress();
+ }
+
+ if (!socket->Connect(
+ SocketAddress(family == AF_INET ? kPublicIPv4Host : kPublicIPv6Host,
+ kPublicPort))) {
+ return IPAddress();
+ }
+ return socket->GetLocalAddress().ipaddr();
+}
+
void BasicNetworkManager::UpdateNetworksOnce() {
if (!start_count_)
return;
@@ -735,7 +793,10 @@ void BasicNetworkManager::UpdateNetworksOnce() {
SignalError();
} else {
bool changed;
- MergeNetworkList(list, &changed);
+ NetworkManager::Stats stats;
+ MergeNetworkList(list, &changed, &stats);
+ set_default_local_addresses(QueryDefaultLocalAddress(AF_INET),
+ QueryDefaultLocalAddress(AF_INET6));
if (changed || !sent_first_update_) {
SignalNetworksChanged();
sent_first_update_ = true;
@@ -748,39 +809,45 @@ void BasicNetworkManager::UpdateNetworksContinually() {
thread_->PostDelayed(kNetworksUpdateIntervalMs, this, kUpdateNetworksMessage);
}
-void BasicNetworkManager::DumpNetworks(bool include_ignored) {
+void BasicNetworkManager::DumpNetworks() {
NetworkList list;
- CreateNetworks(include_ignored, &list);
+ GetNetworks(&list);
LOG(LS_INFO) << "NetworkManager detected " << list.size() << " networks:";
for (const Network* network : list) {
- if (!network->ignored() || include_ignored) {
- LOG(LS_INFO) << network->ToString() << ": "
- << network->description()
- << ((network->ignored()) ? ", Ignored" : "");
- }
- }
- // Release the network list created previously.
- // Do this in a seperated for loop for better readability.
- for (Network* network : list) {
- delete network;
+ LOG(LS_INFO) << network->ToString() << ": " << network->description()
+ << ", active ? " << network->active()
+ << ((network->ignored()) ? ", Ignored" : "");
}
}
-Network::Network(const std::string& name, const std::string& desc,
- const IPAddress& prefix, int prefix_length)
- : name_(name), description_(desc), prefix_(prefix),
+Network::Network(const std::string& name,
+ const std::string& desc,
+ const IPAddress& prefix,
+ int prefix_length)
+ : name_(name),
+ description_(desc),
+ prefix_(prefix),
prefix_length_(prefix_length),
- key_(MakeNetworkKey(name, prefix, prefix_length)), scope_id_(0),
- ignored_(false), type_(ADAPTER_TYPE_UNKNOWN), preference_(0) {
-}
-
-Network::Network(const std::string& name, const std::string& desc,
- const IPAddress& prefix, int prefix_length, AdapterType type)
- : name_(name), description_(desc), prefix_(prefix),
+ key_(MakeNetworkKey(name, prefix, prefix_length)),
+ scope_id_(0),
+ ignored_(false),
+ type_(ADAPTER_TYPE_UNKNOWN),
+ preference_(0) {}
+
+Network::Network(const std::string& name,
+ const std::string& desc,
+ const IPAddress& prefix,
+ int prefix_length,
+ AdapterType type)
+ : name_(name),
+ description_(desc),
+ prefix_(prefix),
prefix_length_(prefix_length),
- key_(MakeNetworkKey(name, prefix, prefix_length)), scope_id_(0),
- ignored_(false), type_(type), preference_(0) {
-}
+ key_(MakeNetworkKey(name, prefix, prefix_length)),
+ scope_id_(0),
+ ignored_(false),
+ type_(type),
+ preference_(0) {}
Network::~Network() = default;
diff --git a/webrtc/base/network.h b/webrtc/base/network.h
index ab3a88dc7d..2f2e1b3a45 100644
--- a/webrtc/base/network.h
+++ b/webrtc/base/network.h
@@ -28,6 +28,10 @@ struct ifaddrs;
namespace rtc {
+extern const char kPublicIPv4Host[];
+extern const char kPublicIPv6Host[];
+
+class IfAddrsConverter;
class Network;
class NetworkMonitorInterface;
class Thread;
@@ -51,9 +55,18 @@ const int kDefaultNetworkIgnoreMask = ADAPTER_TYPE_LOOPBACK;
std::string MakeNetworkKey(const std::string& name, const IPAddress& prefix,
int prefix_length);
+class DefaultLocalAddressProvider {
+ public:
+ virtual ~DefaultLocalAddressProvider() = default;
+ // The default local address is the local address used in multi-homed endpoint
+ // when the any address (0.0.0.0 or ::) is used as the local address. It's
+ // important to check the return value as a IP family may not be enabled.
+ virtual bool GetDefaultLocalAddress(int family, IPAddress* ipaddr) const = 0;
+};
+
// Generic network manager interface. It provides list of local
// networks.
-class NetworkManager {
+class NetworkManager : public DefaultLocalAddressProvider {
public:
typedef std::vector<Network*> NetworkList;
@@ -67,7 +80,7 @@ class NetworkManager {
};
NetworkManager();
- virtual ~NetworkManager();
+ ~NetworkManager() override;
// Called when network list is updated.
sigslot::signal0<> SignalNetworksChanged;
@@ -99,8 +112,9 @@ class NetworkManager {
// TODO(guoweis): remove this body when chromium implements this.
virtual void GetAnyAddressNetworks(NetworkList* networks) {}
- // Dumps a list of networks available to LS_INFO.
- virtual void DumpNetworks(bool include_ignored) {}
+ // Dumps the current list of networks in the network manager.
+ virtual void DumpNetworks() {}
+ bool GetDefaultLocalAddress(int family, IPAddress* ipaddr) const override;
struct Stats {
int ipv4_network_count;
@@ -128,6 +142,8 @@ class NetworkManagerBase : public NetworkManager {
EnumerationPermission enumeration_permission() const override;
+ bool GetDefaultLocalAddress(int family, IPAddress* ipaddr) const override;
+
protected:
typedef std::map<std::string, Network*> NetworkMap;
// Updates |networks_| with the networks listed in |list|. If
@@ -146,6 +162,9 @@ class NetworkManagerBase : public NetworkManager {
enumeration_permission_ = state;
}
+ void set_default_local_addresses(const IPAddress& ipv4,
+ const IPAddress& ipv6);
+
private:
friend class NetworkTest;
@@ -159,6 +178,9 @@ class NetworkManagerBase : public NetworkManager {
rtc::scoped_ptr<rtc::Network> ipv4_any_address_network_;
rtc::scoped_ptr<rtc::Network> ipv6_any_address_network_;
+
+ IPAddress default_local_ipv4_address_;
+ IPAddress default_local_ipv6_address_;
};
// Basic implementation of the NetworkManager interface that gets list
@@ -173,8 +195,7 @@ class BasicNetworkManager : public NetworkManagerBase,
void StartUpdating() override;
void StopUpdating() override;
- // Logs the available networks.
- void DumpNetworks(bool include_ignored) override;
+ void DumpNetworks() override;
// MessageHandler interface.
void OnMessage(Message* msg) override;
@@ -186,18 +207,6 @@ class BasicNetworkManager : public NetworkManagerBase,
network_ignore_list_ = list;
}
- // Sets the network types to ignore. For instance, calling this with
- // ADAPTER_TYPE_ETHERNET | ADAPTER_TYPE_LOOPBACK will ignore Ethernet and
- // loopback interfaces. Set to kDefaultNetworkIgnoreMask by default.
- void set_network_ignore_mask(int network_ignore_mask) {
- // TODO(phoglund): implement support for other types than loopback.
- // See https://code.google.com/p/webrtc/issues/detail?id=4288.
- // Then remove set_network_ignore_list.
- network_ignore_mask_ = network_ignore_mask;
- }
-
- int network_ignore_mask() const { return network_ignore_mask_; }
-
#if defined(WEBRTC_LINUX)
// Sets the flag for ignoring non-default routes.
void set_ignore_non_default_routes(bool value) {
@@ -209,6 +218,7 @@ class BasicNetworkManager : public NetworkManagerBase,
#if defined(WEBRTC_POSIX)
// Separated from CreateNetworks for tests.
void ConvertIfAddrs(ifaddrs* interfaces,
+ IfAddrsConverter* converter,
bool include_ignored,
NetworkList* networks) const;
#endif // defined(WEBRTC_POSIX)
@@ -220,6 +230,11 @@ class BasicNetworkManager : public NetworkManagerBase,
// based on the network's property instead of any individual IP.
bool IsIgnoredNetwork(const Network& network) const;
+ // This function connects a UDP socket to a public address and returns the
+ // local address associated it. Since it binds to the "any" address
+ // internally, it returns the default local address on a multi-homed endpoint.
+ IPAddress QueryDefaultLocalAddress(int family) const;
+
private:
friend class NetworkTest;
@@ -239,7 +254,6 @@ class BasicNetworkManager : public NetworkManagerBase,
bool sent_first_update_;
int start_count_;
std::vector<std::string> network_ignore_list_;
- int network_ignore_mask_;
bool ignore_non_default_routes_;
scoped_ptr<NetworkMonitorInterface> network_monitor_;
};
@@ -247,13 +261,26 @@ class BasicNetworkManager : public NetworkManagerBase,
// Represents a Unix-type network interface, with a name and single address.
class Network {
public:
- Network(const std::string& name, const std::string& description,
- const IPAddress& prefix, int prefix_length);
-
- Network(const std::string& name, const std::string& description,
- const IPAddress& prefix, int prefix_length, AdapterType type);
+ Network(const std::string& name,
+ const std::string& description,
+ const IPAddress& prefix,
+ int prefix_length);
+
+ Network(const std::string& name,
+ const std::string& description,
+ const IPAddress& prefix,
+ int prefix_length,
+ AdapterType type);
~Network();
+ const DefaultLocalAddressProvider* default_local_address_provider() {
+ return default_local_address_provider_;
+ }
+ void set_default_local_address_provider(
+ const DefaultLocalAddressProvider* provider) {
+ default_local_address_provider_ = provider;
+ }
+
// Returns the name of the interface this network is associated wtih.
const std::string& name() const { return name_; }
@@ -319,10 +346,17 @@ class Network {
int preference() const { return preference_; }
void set_preference(int preference) { preference_ = preference; }
+ // When we enumerate networks and find a previously-seen network is missing,
+ // we do not remove it (because it may be used elsewhere). Instead, we mark
+ // it inactive, so that we can detect network changes properly.
+ bool active() const { return active_; }
+ void set_active(bool active) { active_ = active; }
+
// Debugging description of this network
std::string ToString() const;
private:
+ const DefaultLocalAddressProvider* default_local_address_provider_ = nullptr;
std::string name_;
std::string description_;
IPAddress prefix_;
@@ -333,6 +367,7 @@ class Network {
bool ignored_;
AdapterType type_;
int preference_;
+ bool active_ = true;
friend class NetworkManager;
};
diff --git a/webrtc/base/network_unittest.cc b/webrtc/base/network_unittest.cc
index 436222127e..7dd400b996 100644
--- a/webrtc/base/network_unittest.cc
+++ b/webrtc/base/network_unittest.cc
@@ -10,16 +10,14 @@
#include "webrtc/base/network.h"
+#include "webrtc/base/nethelpers.h"
#include "webrtc/base/networkmonitor.h"
#include <vector>
#if defined(WEBRTC_POSIX)
#include <sys/types.h>
-#if !defined(WEBRTC_ANDROID)
-#include <ifaddrs.h>
-#else
-#include "webrtc/base/ifaddrs-android.h"
-#endif
-#endif
+#include <net/if.h>
+#include "webrtc/base/ifaddrs_converter.h"
+#endif // defined(WEBRTC_POSIX)
#include "webrtc/base/gunit.h"
#if defined(WEBRTC_WIN)
#include "webrtc/base/logging.h" // For LOG_GLE
@@ -27,6 +25,8 @@
namespace rtc {
+namespace {
+
class FakeNetworkMonitor : public NetworkMonitorBase {
public:
void Start() override {}
@@ -41,6 +41,8 @@ class FakeNetworkMonitorFactory : public NetworkMonitorFactory {
}
};
+} // namespace
+
class NetworkTest : public testing::Test, public sigslot::has_slots<> {
public:
NetworkTest() : callback_called_(false) {}
@@ -88,7 +90,51 @@ class NetworkTest : public testing::Test, public sigslot::has_slots<> {
struct ifaddrs* interfaces,
bool include_ignored,
NetworkManager::NetworkList* networks) {
- network_manager.ConvertIfAddrs(interfaces, include_ignored, networks);
+ // Use the base IfAddrsConverter for test cases.
+ rtc::scoped_ptr<IfAddrsConverter> ifaddrs_converter(new IfAddrsConverter());
+ network_manager.ConvertIfAddrs(interfaces, ifaddrs_converter.get(),
+ include_ignored, networks);
+ }
+
+ struct sockaddr_in6* CreateIpv6Addr(const std::string& ip_string,
+ uint32_t scope_id) {
+ struct sockaddr_in6* ipv6_addr = new struct sockaddr_in6;
+ memset(ipv6_addr, 0, sizeof(struct sockaddr_in6));
+ ipv6_addr->sin6_family = AF_INET6;
+ ipv6_addr->sin6_scope_id = scope_id;
+ IPAddress ip;
+ IPFromString(ip_string, &ip);
+ ipv6_addr->sin6_addr = ip.ipv6_address();
+ return ipv6_addr;
+ }
+
+ // Pointers created here need to be released via ReleaseIfAddrs.
+ struct ifaddrs* AddIpv6Address(struct ifaddrs* list,
+ char* if_name,
+ const std::string& ipv6_address,
+ const std::string& ipv6_netmask,
+ uint32_t scope_id) {
+ struct ifaddrs* if_addr = new struct ifaddrs;
+ memset(if_addr, 0, sizeof(struct ifaddrs));
+ if_addr->ifa_name = if_name;
+ if_addr->ifa_addr = reinterpret_cast<struct sockaddr*>(
+ CreateIpv6Addr(ipv6_address, scope_id));
+ if_addr->ifa_netmask =
+ reinterpret_cast<struct sockaddr*>(CreateIpv6Addr(ipv6_netmask, 0));
+ if_addr->ifa_next = list;
+ if_addr->ifa_flags = IFF_RUNNING;
+ return if_addr;
+ }
+
+ void ReleaseIfAddrs(struct ifaddrs* list) {
+ struct ifaddrs* if_addr = list;
+ while (if_addr != nullptr) {
+ struct ifaddrs* next_addr = if_addr->ifa_next;
+ delete if_addr->ifa_addr;
+ delete if_addr->ifa_netmask;
+ delete if_addr;
+ if_addr = next_addr;
+ }
}
#endif // defined(WEBRTC_POSIX)
@@ -96,6 +142,12 @@ class NetworkTest : public testing::Test, public sigslot::has_slots<> {
bool callback_called_;
};
+class TestBasicNetworkManager : public BasicNetworkManager {
+ public:
+ using BasicNetworkManager::QueryDefaultLocalAddress;
+ using BasicNetworkManager::set_default_local_addresses;
+};
+
// Test that the Network ctor works properly.
TEST_F(NetworkTest, TestNetworkConstruct) {
Network ipv4_network1("test_eth0", "Test Network Adapter 1",
@@ -107,26 +159,6 @@ TEST_F(NetworkTest, TestNetworkConstruct) {
EXPECT_FALSE(ipv4_network1.ignored());
}
-// Tests that our ignore function works properly.
-TEST_F(NetworkTest, TestIsIgnoredNetworkIgnoresOnlyLoopbackByDefault) {
- Network ipv4_network1("test_eth0", "Test Network Adapter 1",
- IPAddress(0x12345600U), 24, ADAPTER_TYPE_ETHERNET);
- Network ipv4_network2("test_wlan0", "Test Network Adapter 2",
- IPAddress(0x12345601U), 16, ADAPTER_TYPE_WIFI);
- Network ipv4_network3("test_cell0", "Test Network Adapter 3",
- IPAddress(0x12345602U), 16, ADAPTER_TYPE_CELLULAR);
- Network ipv4_network4("test_vpn0", "Test Network Adapter 4",
- IPAddress(0x12345603U), 16, ADAPTER_TYPE_VPN);
- Network ipv4_network5("test_lo", "Test Network Adapter 5",
- IPAddress(0x12345604U), 16, ADAPTER_TYPE_LOOPBACK);
- BasicNetworkManager network_manager;
- EXPECT_FALSE(IsIgnoredNetwork(network_manager, ipv4_network1));
- EXPECT_FALSE(IsIgnoredNetwork(network_manager, ipv4_network2));
- EXPECT_FALSE(IsIgnoredNetwork(network_manager, ipv4_network3));
- EXPECT_FALSE(IsIgnoredNetwork(network_manager, ipv4_network4));
- EXPECT_TRUE(IsIgnoredNetwork(network_manager, ipv4_network5));
-}
-
TEST_F(NetworkTest, TestIsIgnoredNetworkIgnoresIPsStartingWith0) {
Network ipv4_network1("test_eth0", "Test Network Adapter 1",
IPAddress(0x12345600U), 24, ADAPTER_TYPE_ETHERNET);
@@ -137,21 +169,6 @@ TEST_F(NetworkTest, TestIsIgnoredNetworkIgnoresIPsStartingWith0) {
EXPECT_TRUE(IsIgnoredNetwork(network_manager, ipv4_network2));
}
-TEST_F(NetworkTest, TestIsIgnoredNetworkIgnoresNetworksAccordingToIgnoreMask) {
- Network ipv4_network1("test_eth0", "Test Network Adapter 1",
- IPAddress(0x12345600U), 24, ADAPTER_TYPE_ETHERNET);
- Network ipv4_network2("test_wlan0", "Test Network Adapter 2",
- IPAddress(0x12345601U), 16, ADAPTER_TYPE_WIFI);
- Network ipv4_network3("test_cell0", "Test Network Adapter 3",
- IPAddress(0x12345602U), 16, ADAPTER_TYPE_CELLULAR);
- BasicNetworkManager network_manager;
- network_manager.set_network_ignore_mask(
- ADAPTER_TYPE_ETHERNET | ADAPTER_TYPE_LOOPBACK | ADAPTER_TYPE_WIFI);
- EXPECT_TRUE(IsIgnoredNetwork(network_manager, ipv4_network1));
- EXPECT_TRUE(IsIgnoredNetwork(network_manager, ipv4_network2));
- EXPECT_FALSE(IsIgnoredNetwork(network_manager, ipv4_network3));
-}
-
// TODO(phoglund): Remove when ignore list goes away.
TEST_F(NetworkTest, TestIgnoreList) {
Network ignore_me("ignore_me", "Ignore me please!",
@@ -583,10 +600,13 @@ TEST_F(NetworkTest, TestMultiplePublicNetworksOnOneInterfaceMerge) {
}
}
-// Test that DumpNetworks works.
-TEST_F(NetworkTest, TestDumpNetworks) {
+// Test that DumpNetworks does not crash.
+TEST_F(NetworkTest, TestCreateAndDumpNetworks) {
BasicNetworkManager manager;
- manager.DumpNetworks(true);
+ NetworkManager::NetworkList list = GetNetworks(manager, true);
+ bool changed;
+ MergeNetworkList(manager, list, &changed);
+ manager.DumpNetworks();
}
// Test that we can toggle IPv6 on and off.
@@ -693,6 +713,40 @@ TEST_F(NetworkTest, TestConvertIfAddrsNoAddress) {
CallConvertIfAddrs(manager, &list, true, &result);
EXPECT_TRUE(result.empty());
}
+
+// Verify that if there are two addresses on one interface, only one network
+// is generated.
+TEST_F(NetworkTest, TestConvertIfAddrsMultiAddressesOnOneInterface) {
+ char if_name[20] = "rmnet0";
+ ifaddrs* list = nullptr;
+ list = AddIpv6Address(list, if_name, "1000:2000:3000:4000:0:0:0:1",
+ "FFFF:FFFF:FFFF:FFFF::", 0);
+ list = AddIpv6Address(list, if_name, "1000:2000:3000:4000:0:0:0:2",
+ "FFFF:FFFF:FFFF:FFFF::", 0);
+ NetworkManager::NetworkList result;
+ BasicNetworkManager manager;
+ CallConvertIfAddrs(manager, list, true, &result);
+ EXPECT_EQ(1U, result.size());
+ bool changed;
+ // This ensures we release the objects created in CallConvertIfAddrs.
+ MergeNetworkList(manager, result, &changed);
+ ReleaseIfAddrs(list);
+}
+
+TEST_F(NetworkTest, TestConvertIfAddrsNotRunning) {
+ ifaddrs list;
+ memset(&list, 0, sizeof(list));
+ list.ifa_name = const_cast<char*>("test_iface");
+ sockaddr ifa_addr;
+ sockaddr ifa_netmask;
+ list.ifa_addr = &ifa_addr;
+ list.ifa_netmask = &ifa_netmask;
+
+ NetworkManager::NetworkList result;
+ BasicNetworkManager manager;
+ CallConvertIfAddrs(manager, &list, true, &result);
+ EXPECT_TRUE(result.empty());
+}
#endif // defined(WEBRTC_POSIX)
#if defined(WEBRTC_LINUX) && !defined(WEBRTC_ANDROID)
@@ -776,6 +830,49 @@ TEST_F(NetworkTest, TestMergeNetworkList) {
EXPECT_EQ(list2[0]->GetIPs()[1], ip2);
}
+// Test that MergeNetworkList successfully detects the change if
+// a network becomes inactive and then active again.
+TEST_F(NetworkTest, TestMergeNetworkListWithInactiveNetworks) {
+ BasicNetworkManager manager;
+ Network network1("test_wifi", "Test Network Adapter 1",
+ IPAddress(0x12345600U), 24);
+ Network network2("test_eth0", "Test Network Adapter 2",
+ IPAddress(0x00010000U), 16);
+ network1.AddIP(IPAddress(0x12345678));
+ network2.AddIP(IPAddress(0x00010004));
+ NetworkManager::NetworkList list;
+ Network* net1 = new Network(network1);
+ list.push_back(net1);
+ bool changed;
+ MergeNetworkList(manager, list, &changed);
+ EXPECT_TRUE(changed);
+ list.clear();
+ manager.GetNetworks(&list);
+ ASSERT_EQ(1U, list.size());
+ EXPECT_EQ(net1, list[0]);
+
+ list.clear();
+ Network* net2 = new Network(network2);
+ list.push_back(net2);
+ MergeNetworkList(manager, list, &changed);
+ EXPECT_TRUE(changed);
+ list.clear();
+ manager.GetNetworks(&list);
+ ASSERT_EQ(1U, list.size());
+ EXPECT_EQ(net2, list[0]);
+
+ // Now network1 is inactive. Try to merge it again.
+ list.clear();
+ list.push_back(new Network(network1));
+ MergeNetworkList(manager, list, &changed);
+ EXPECT_TRUE(changed);
+ list.clear();
+ manager.GetNetworks(&list);
+ ASSERT_EQ(1U, list.size());
+ EXPECT_TRUE(list[0]->active());
+ EXPECT_EQ(net1, list[0]);
+}
+
// Test that the filtering logic follows the defined ruleset in network.h.
TEST_F(NetworkTest, TestIPv6Selection) {
InterfaceAddress ip;
@@ -842,4 +939,35 @@ TEST_F(NetworkTest, TestNetworkMonitoring) {
NetworkMonitorFactory::ReleaseFactory(factory);
}
+TEST_F(NetworkTest, DefaultLocalAddress) {
+ TestBasicNetworkManager manager;
+ manager.StartUpdating();
+ IPAddress ip;
+
+ // GetDefaultLocalAddress should return false when not set.
+ EXPECT_FALSE(manager.GetDefaultLocalAddress(AF_INET, &ip));
+ EXPECT_FALSE(manager.GetDefaultLocalAddress(AF_INET6, &ip));
+
+ // Make sure we can query default local address when an address for such
+ // address family exists.
+ std::vector<Network*> networks;
+ manager.GetNetworks(&networks);
+ for (auto& network : networks) {
+ if (network->GetBestIP().family() == AF_INET) {
+ EXPECT_TRUE(manager.QueryDefaultLocalAddress(AF_INET) != IPAddress());
+ } else if (network->GetBestIP().family() == AF_INET6) {
+ EXPECT_TRUE(manager.QueryDefaultLocalAddress(AF_INET6) != IPAddress());
+ }
+ }
+
+ // GetDefaultLocalAddress should return the valid default address after set.
+ manager.set_default_local_addresses(GetLoopbackIP(AF_INET),
+ GetLoopbackIP(AF_INET6));
+ EXPECT_TRUE(manager.GetDefaultLocalAddress(AF_INET, &ip));
+ EXPECT_EQ(ip, GetLoopbackIP(AF_INET));
+ EXPECT_TRUE(manager.GetDefaultLocalAddress(AF_INET6, &ip));
+ EXPECT_EQ(ip, GetLoopbackIP(AF_INET6));
+ manager.StopUpdating();
+}
+
} // namespace rtc
diff --git a/webrtc/base/nullsocketserver_unittest.cc b/webrtc/base/nullsocketserver_unittest.cc
index 2aa38b490d..4f22c382d8 100644
--- a/webrtc/base/nullsocketserver_unittest.cc
+++ b/webrtc/base/nullsocketserver_unittest.cc
@@ -10,7 +10,6 @@
#include "webrtc/base/gunit.h"
#include "webrtc/base/nullsocketserver.h"
-#include "webrtc/test/testsupport/gtest_disable.h"
namespace rtc {
diff --git a/webrtc/base/objc/NSString+StdString.h b/webrtc/base/objc/NSString+StdString.h
new file mode 100644
index 0000000000..8bf6cc94be
--- /dev/null
+++ b/webrtc/base/objc/NSString+StdString.h
@@ -0,0 +1,26 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <Foundation/Foundation.h>
+
+#include <string>
+
+NS_ASSUME_NONNULL_BEGIN
+
+@interface NSString (StdString)
+
+@property(nonatomic, readonly) std::string stdString;
+
++ (std::string)stdStringForString:(NSString *)nsString;
++ (NSString *)stringForStdString:(const std::string&)stdString;
+
+@end
+
+NS_ASSUME_NONNULL_END
diff --git a/webrtc/base/objc/NSString+StdString.mm b/webrtc/base/objc/NSString+StdString.mm
new file mode 100644
index 0000000000..3210ff0b65
--- /dev/null
+++ b/webrtc/base/objc/NSString+StdString.mm
@@ -0,0 +1,33 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "NSString+StdString.h"
+
+@implementation NSString (StdString)
+
+- (std::string)stdString {
+ return [NSString stdStringForString:self];
+}
+
++ (std::string)stdStringForString:(NSString *)nsString {
+ NSData *charData = [nsString dataUsingEncoding:NSUTF8StringEncoding];
+ return std::string(reinterpret_cast<const char *>(charData.bytes),
+ charData.length);
+}
+
++ (NSString *)stringForStdString:(const std::string&)stdString {
+ // std::string may contain null termination character so we construct
+ // using length.
+ return [[NSString alloc] initWithBytes:stdString.data()
+ length:stdString.length()
+ encoding:NSUTF8StringEncoding];
+}
+
+@end
diff --git a/webrtc/base/objc/OWNERS b/webrtc/base/objc/OWNERS
new file mode 100644
index 0000000000..cd06158b7f
--- /dev/null
+++ b/webrtc/base/objc/OWNERS
@@ -0,0 +1 @@
+tkchin@webrtc.org
diff --git a/webrtc/base/objc/RTCCameraPreviewView.h b/webrtc/base/objc/RTCCameraPreviewView.h
new file mode 100644
index 0000000000..03e94c29ae
--- /dev/null
+++ b/webrtc/base/objc/RTCCameraPreviewView.h
@@ -0,0 +1,28 @@
+/*
+ * Copyright 2015 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <Foundation/Foundation.h>
+#import <UIKit/UIKit.h>
+
+@class AVCaptureSession;
+@class RTCAVFoundationVideoSource;
+
+/** RTCCameraPreviewView is a view that renders local video from an
+ * AVCaptureSession.
+ */
+@interface RTCCameraPreviewView : UIView
+
+/** The capture session being rendered in the view. Capture session
+ * is assigned to AVCaptureVideoPreviewLayer async in the same
+ * queue that the AVCaptureSession is started/stopped.
+ */
+@property(nonatomic, strong) AVCaptureSession *captureSession;
+
+@end
diff --git a/webrtc/base/objc/RTCCameraPreviewView.m b/webrtc/base/objc/RTCCameraPreviewView.m
new file mode 100644
index 0000000000..5a57483676
--- /dev/null
+++ b/webrtc/base/objc/RTCCameraPreviewView.m
@@ -0,0 +1,47 @@
+/*
+ * Copyright 2015 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#if !defined(__has_feature) || !__has_feature(objc_arc)
+#error "This file requires ARC support."
+#endif
+
+#import "webrtc/base/objc/RTCCameraPreviewView.h"
+
+#import <AVFoundation/AVFoundation.h>
+
+#import "webrtc/base/objc/RTCDispatcher.h"
+
+@implementation RTCCameraPreviewView
+
+@synthesize captureSession = _captureSession;
+
++ (Class)layerClass {
+ return [AVCaptureVideoPreviewLayer class];
+}
+
+- (void)setCaptureSession:(AVCaptureSession *)captureSession {
+ if (_captureSession == captureSession) {
+ return;
+ }
+ _captureSession = captureSession;
+ AVCaptureVideoPreviewLayer *previewLayer = [self previewLayer];
+ [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
+ block:^{
+ previewLayer.session = captureSession;
+ }];
+}
+
+#pragma mark - Private
+
+- (AVCaptureVideoPreviewLayer *)previewLayer {
+ return (AVCaptureVideoPreviewLayer *)self.layer;
+}
+
+@end
diff --git a/webrtc/base/objc/RTCDispatcher.h b/webrtc/base/objc/RTCDispatcher.h
new file mode 100644
index 0000000000..c32b93d472
--- /dev/null
+++ b/webrtc/base/objc/RTCDispatcher.h
@@ -0,0 +1,35 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <Foundation/Foundation.h>
+
+typedef NS_ENUM(NSInteger, RTCDispatcherQueueType) {
+ // Main dispatcher queue.
+ RTCDispatcherTypeMain,
+ // Used for starting/stopping AVCaptureSession, and assigning
+ // capture session to AVCaptureVideoPreviewLayer.
+ RTCDispatcherTypeCaptureSession,
+};
+
+/** Dispatcher that asynchronously dispatches blocks to a specific
+ * shared dispatch queue.
+ */
+@interface RTCDispatcher : NSObject
+
+- (instancetype)init NS_UNAVAILABLE;
+
+/** Dispatch the block asynchronously on the queue for dispatchType.
+ * @param dispatchType The queue type to dispatch on.
+ * @param block The block to dispatch asynchronously.
+ */
++ (void)dispatchAsyncOnType:(RTCDispatcherQueueType)dispatchType
+ block:(dispatch_block_t)block;
+
+@end
diff --git a/webrtc/base/objc/RTCDispatcher.m b/webrtc/base/objc/RTCDispatcher.m
new file mode 100644
index 0000000000..065705a4ae
--- /dev/null
+++ b/webrtc/base/objc/RTCDispatcher.m
@@ -0,0 +1,46 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "RTCDispatcher.h"
+
+static dispatch_queue_t kCaptureSessionQueue = nil;
+
+@implementation RTCDispatcher {
+ dispatch_queue_t _captureSessionQueue;
+}
+
++ (void)initialize {
+ static dispatch_once_t onceToken;
+ dispatch_once(&onceToken, ^{
+ kCaptureSessionQueue = dispatch_queue_create(
+ "org.webrtc.RTCDispatcherCaptureSession",
+ DISPATCH_QUEUE_SERIAL);
+ });
+}
+
++ (void)dispatchAsyncOnType:(RTCDispatcherQueueType)dispatchType
+ block:(dispatch_block_t)block {
+ dispatch_queue_t queue = [self dispatchQueueForType:dispatchType];
+ dispatch_async(queue, block);
+}
+
+#pragma mark - Private
+
++ (dispatch_queue_t)dispatchQueueForType:(RTCDispatcherQueueType)dispatchType {
+ switch (dispatchType) {
+ case RTCDispatcherTypeMain:
+ return dispatch_get_main_queue();
+ case RTCDispatcherTypeCaptureSession:
+ return kCaptureSessionQueue;
+ }
+}
+
+@end
+
diff --git a/webrtc/base/objc/RTCLogging.h b/webrtc/base/objc/RTCLogging.h
new file mode 100644
index 0000000000..19fade5cfc
--- /dev/null
+++ b/webrtc/base/objc/RTCLogging.h
@@ -0,0 +1,75 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <Foundation/Foundation.h>
+
+// Subset of rtc::LoggingSeverity.
+typedef NS_ENUM(NSInteger, RTCLoggingSeverity) {
+ kRTCLoggingSeverityVerbose,
+ kRTCLoggingSeverityInfo,
+ kRTCLoggingSeverityWarning,
+ kRTCLoggingSeverityError,
+};
+
+#if defined(__cplusplus)
+extern "C" void RTCLogEx(RTCLoggingSeverity severity, NSString* log_string);
+extern "C" void RTCSetMinDebugLogLevel(RTCLoggingSeverity severity);
+extern "C" NSString* RTCFileName(const char* filePath);
+#else
+
+// Wrapper for C++ LOG(sev) macros.
+// Logs the log string to the webrtc logstream for the given severity.
+extern void RTCLogEx(RTCLoggingSeverity severity, NSString* log_string);
+
+// Wrapper for rtc::LogMessage::LogToDebug.
+// Sets the minimum severity to be logged to console.
+extern void RTCSetMinDebugLogLevel(RTCLoggingSeverity severity);
+
+// Returns the filename with the path prefix removed.
+extern NSString* RTCFileName(const char* filePath);
+
+#endif
+
+// Some convenience macros.
+
+#define RTCLogString(format, ...) \
+ [NSString stringWithFormat:@"(%@:%d %s): " format, \
+ RTCFileName(__FILE__), \
+ __LINE__, \
+ __FUNCTION__, \
+ ##__VA_ARGS__]
+
+#define RTCLogFormat(severity, format, ...) \
+ do { \
+ NSString* log_string = RTCLogString(format, ##__VA_ARGS__); \
+ RTCLogEx(severity, log_string); \
+ } while (false)
+
+#define RTCLogVerbose(format, ...) \
+ RTCLogFormat(kRTCLoggingSeverityVerbose, format, ##__VA_ARGS__) \
+
+#define RTCLogInfo(format, ...) \
+ RTCLogFormat(kRTCLoggingSeverityInfo, format, ##__VA_ARGS__) \
+
+#define RTCLogWarning(format, ...) \
+ RTCLogFormat(kRTCLoggingSeverityWarning, format, ##__VA_ARGS__) \
+
+#define RTCLogError(format, ...) \
+ RTCLogFormat(kRTCLoggingSeverityError, format, ##__VA_ARGS__) \
+
+#if !defined(NDEBUG)
+#define RTCLogDebug(format, ...) RTCLogInfo(format, ##__VA_ARGS__)
+#else
+#define RTCLogDebug(format, ...) \
+ do { \
+ } while (false)
+#endif
+
+#define RTCLog(format, ...) RTCLogInfo(format, ##__VA_ARGS__)
diff --git a/webrtc/base/objc/RTCLogging.mm b/webrtc/base/objc/RTCLogging.mm
new file mode 100644
index 0000000000..e9afe725d1
--- /dev/null
+++ b/webrtc/base/objc/RTCLogging.mm
@@ -0,0 +1,47 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "RTCLogging.h"
+
+#include "webrtc/base/logging.h"
+
+rtc::LoggingSeverity RTCGetNativeLoggingSeverity(RTCLoggingSeverity severity) {
+ switch (severity) {
+ case kRTCLoggingSeverityVerbose:
+ return rtc::LS_VERBOSE;
+ case kRTCLoggingSeverityInfo:
+ return rtc::LS_INFO;
+ case kRTCLoggingSeverityWarning:
+ return rtc::LS_WARNING;
+ case kRTCLoggingSeverityError:
+ return rtc::LS_ERROR;
+ }
+}
+
+void RTCLogEx(RTCLoggingSeverity severity, NSString* log_string) {
+ if (log_string.length) {
+ const char* utf8_string = log_string.UTF8String;
+ LOG_V(RTCGetNativeLoggingSeverity(severity)) << utf8_string;
+ }
+}
+
+void RTCSetMinDebugLogLevel(RTCLoggingSeverity severity) {
+ rtc::LogMessage::LogToDebug(RTCGetNativeLoggingSeverity(severity));
+}
+
+NSString* RTCFileName(const char* file_path) {
+ NSString* ns_file_path =
+ [[NSString alloc] initWithBytesNoCopy:const_cast<char*>(file_path)
+ length:strlen(file_path)
+ encoding:NSUTF8StringEncoding
+ freeWhenDone:NO];
+ return ns_file_path.lastPathComponent;
+}
+
diff --git a/webrtc/base/openssladapter.cc b/webrtc/base/openssladapter.cc
index c906ebb4b4..1f5fbbc4d1 100644
--- a/webrtc/base/openssladapter.cc
+++ b/webrtc/base/openssladapter.cc
@@ -31,6 +31,7 @@
#include "config.h"
#endif // HAVE_CONFIG_H
+#include "webrtc/base/arraysize.h"
#include "webrtc/base/common.h"
#include "webrtc/base/logging.h"
#include "webrtc/base/openssl.h"
@@ -835,7 +836,7 @@ bool OpenSSLAdapter::SSLPostConnectionCheck(SSL* ssl, const char* host) {
return ok;
}
-#if _DEBUG
+#if !defined(NDEBUG)
// We only use this for tracing and so it is only needed in debug mode
@@ -864,11 +865,11 @@ OpenSSLAdapter::SSLInfoCallback(const SSL* s, int where, int ret) {
}
}
-#endif // _DEBUG
+#endif
int
OpenSSLAdapter::SSLVerifyCallback(int ok, X509_STORE_CTX* store) {
-#if _DEBUG
+#if !defined(NDEBUG)
if (!ok) {
char data[256];
X509* cert = X509_STORE_CTX_get_current_cert(store);
@@ -915,7 +916,7 @@ OpenSSLAdapter::SSLVerifyCallback(int ok, X509_STORE_CTX* store) {
bool OpenSSLAdapter::ConfigureTrustedRootCertificates(SSL_CTX* ctx) {
// Add the root cert that we care about to the SSL context
int count_of_added_certs = 0;
- for (int i = 0; i < ARRAY_SIZE(kSSLCertCertificateList); i++) {
+ for (size_t i = 0; i < arraysize(kSSLCertCertificateList); i++) {
const unsigned char* cert_buffer = kSSLCertCertificateList[i];
size_t cert_buffer_len = kSSLCertCertificateSizeList[i];
X509* cert = d2i_X509(NULL, &cert_buffer,
@@ -949,7 +950,7 @@ OpenSSLAdapter::SetupSSLContext() {
return NULL;
}
-#ifdef _DEBUG
+#if !defined(NDEBUG)
SSL_CTX_set_info_callback(ctx, SSLInfoCallback);
#endif
diff --git a/webrtc/base/openssladapter.h b/webrtc/base/openssladapter.h
index 3dcb1c5645..cdf45e603f 100644
--- a/webrtc/base/openssladapter.h
+++ b/webrtc/base/openssladapter.h
@@ -67,9 +67,9 @@ private:
static bool VerifyServerName(SSL* ssl, const char* host,
bool ignore_bad_cert);
bool SSLPostConnectionCheck(SSL* ssl, const char* host);
-#if _DEBUG
+#if !defined(NDEBUG)
static void SSLInfoCallback(const SSL* s, int where, int ret);
-#endif // !_DEBUG
+#endif
static int SSLVerifyCallback(int ok, X509_STORE_CTX* store);
static VerificationCallback custom_verify_callback_;
friend class OpenSSLStreamAdapter; // for custom_verify_callback_;
diff --git a/webrtc/base/opensslidentity.cc b/webrtc/base/opensslidentity.cc
index feda6744f0..7185571102 100644
--- a/webrtc/base/opensslidentity.cc
+++ b/webrtc/base/opensslidentity.cc
@@ -96,6 +96,7 @@ static X509* MakeCertificate(EVP_PKEY* pkey, const SSLIdentityParams& params) {
X509* x509 = NULL;
BIGNUM* serial_number = NULL;
X509_NAME* name = NULL;
+ time_t epoch_off = 0; // Time offset since epoch.
if ((x509=X509_new()) == NULL)
goto error;
@@ -130,8 +131,8 @@ static X509* MakeCertificate(EVP_PKEY* pkey, const SSLIdentityParams& params) {
!X509_set_issuer_name(x509, name))
goto error;
- if (!X509_gmtime_adj(X509_get_notBefore(x509), params.not_before) ||
- !X509_gmtime_adj(X509_get_notAfter(x509), params.not_after))
+ if (!X509_time_adj(X509_get_notBefore(x509), params.not_before, &epoch_off) ||
+ !X509_time_adj(X509_get_notAfter(x509), params.not_after, &epoch_off))
goto error;
if (!X509_sign(x509, pkey, EVP_sha256()))
@@ -186,7 +187,7 @@ void OpenSSLKeyPair::AddReference() {
#endif
}
-#ifdef _DEBUG
+#if !defined(NDEBUG)
// Print a certificate to the log, for debugging.
static void PrintCert(X509* x509) {
BIO* temp_memory_bio = BIO_new(BIO_s_mem());
@@ -215,7 +216,7 @@ OpenSSLCertificate* OpenSSLCertificate::Generate(
LogSSLErrors("Generating certificate");
return NULL;
}
-#ifdef _DEBUG
+#if !defined(NDEBUG)
PrintCert(x509);
#endif
OpenSSLCertificate* ret = new OpenSSLCertificate(x509);
@@ -373,6 +374,22 @@ void OpenSSLCertificate::AddReference() const {
#endif
}
+// Documented in sslidentity.h.
+int64_t OpenSSLCertificate::CertificateExpirationTime() const {
+ ASN1_TIME* expire_time = X509_get_notAfter(x509_);
+ bool long_format;
+
+ if (expire_time->type == V_ASN1_UTCTIME) {
+ long_format = false;
+ } else if (expire_time->type == V_ASN1_GENERALIZEDTIME) {
+ long_format = true;
+ } else {
+ return -1;
+ }
+
+ return ASN1TimeToSec(expire_time->data, expire_time->length, long_format);
+}
+
OpenSSLIdentity::OpenSSLIdentity(OpenSSLKeyPair* key_pair,
OpenSSLCertificate* certificate)
: key_pair_(key_pair), certificate_(certificate) {
@@ -401,8 +418,9 @@ OpenSSLIdentity* OpenSSLIdentity::Generate(const std::string& common_name,
SSLIdentityParams params;
params.key_params = key_params;
params.common_name = common_name;
- params.not_before = CERTIFICATE_WINDOW;
- params.not_after = CERTIFICATE_LIFETIME;
+ time_t now = time(NULL);
+ params.not_before = now + CERTIFICATE_WINDOW;
+ params.not_after = now + CERTIFICATE_LIFETIME;
return GenerateInternal(params);
}
diff --git a/webrtc/base/opensslidentity.h b/webrtc/base/opensslidentity.h
index f957ef2288..c8aa69a76e 100644
--- a/webrtc/base/opensslidentity.h
+++ b/webrtc/base/opensslidentity.h
@@ -87,6 +87,8 @@ class OpenSSLCertificate : public SSLCertificate {
bool GetSignatureDigestAlgorithm(std::string* algorithm) const override;
bool GetChain(SSLCertChain** chain) const override;
+ int64_t CertificateExpirationTime() const override;
+
private:
void AddReference() const;
diff --git a/webrtc/base/opensslstreamadapter.cc b/webrtc/base/opensslstreamadapter.cc
index 67ed5db4b5..7563f17c56 100644
--- a/webrtc/base/opensslstreamadapter.cc
+++ b/webrtc/base/opensslstreamadapter.cc
@@ -43,17 +43,19 @@ namespace rtc {
#endif
#ifdef HAVE_DTLS_SRTP
-// SRTP cipher suite table
+// SRTP cipher suite table. |internal_name| is used to construct a
+// colon-separated profile strings which is needed by
+// SSL_CTX_set_tlsext_use_srtp().
struct SrtpCipherMapEntry {
- const char* external_name;
const char* internal_name;
+ const int id;
};
// This isn't elegant, but it's better than an external reference
static SrtpCipherMapEntry SrtpCipherMap[] = {
- {CS_AES_CM_128_HMAC_SHA1_80, "SRTP_AES128_CM_SHA1_80"},
- {CS_AES_CM_128_HMAC_SHA1_32, "SRTP_AES128_CM_SHA1_32"},
- {NULL, NULL}};
+ {"SRTP_AES128_CM_SHA1_80", SRTP_AES128_CM_SHA1_80},
+ {"SRTP_AES128_CM_SHA1_32", SRTP_AES128_CM_SHA1_32},
+ {nullptr, 0}};
#endif
#ifndef OPENSSL_IS_BORINGSSL
@@ -158,10 +160,12 @@ static int kDefaultSslCipher12 =
static int kDefaultSslEcCipher12 =
static_cast<uint16_t>(TLS1_CK_ECDHE_ECDSA_WITH_AES_128_GCM_SHA256);
// Fallback cipher for DTLS 1.2 if hardware-accelerated AES-GCM is unavailable.
+// TODO(davidben): Switch to the standardized CHACHA20_POLY1305 variant when
+// available.
static int kDefaultSslCipher12NoAesGcm =
- static_cast<uint16_t>(TLS1_CK_ECDHE_RSA_CHACHA20_POLY1305);
+ static_cast<uint16_t>(TLS1_CK_ECDHE_RSA_CHACHA20_POLY1305_OLD);
static int kDefaultSslEcCipher12NoAesGcm =
- static_cast<uint16_t>(TLS1_CK_ECDHE_ECDSA_CHACHA20_POLY1305);
+ static_cast<uint16_t>(TLS1_CK_ECDHE_ECDSA_CHACHA20_POLY1305_OLD);
#else // !OPENSSL_IS_BORINGSSL
// OpenSSL sorts differently than BoringSSL, so the default cipher doesn't
// change between TLS 1.0 and TLS 1.2 with the current setup.
@@ -297,12 +301,13 @@ OpenSSLStreamAdapter::OpenSSLStreamAdapter(StreamInterface* stream)
: SSLStreamAdapter(stream),
state_(SSL_NONE),
role_(SSL_CLIENT),
- ssl_read_needs_write_(false), ssl_write_needs_read_(false),
- ssl_(NULL), ssl_ctx_(NULL),
+ ssl_read_needs_write_(false),
+ ssl_write_needs_read_(false),
+ ssl_(NULL),
+ ssl_ctx_(NULL),
custom_verification_succeeded_(false),
ssl_mode_(SSL_MODE_TLS),
- ssl_max_version_(SSL_PROTOCOL_TLS_11) {
-}
+ ssl_max_version_(SSL_PROTOCOL_TLS_12) {}
OpenSSLStreamAdapter::~OpenSSLStreamAdapter() {
Cleanup();
@@ -348,9 +353,9 @@ bool OpenSSLStreamAdapter::SetPeerCertificateDigest(const std::string
return true;
}
-std::string OpenSSLStreamAdapter::GetSslCipherSuiteName(int cipher) {
+std::string OpenSSLStreamAdapter::SslCipherSuiteToName(int cipher_suite) {
#ifdef OPENSSL_IS_BORINGSSL
- const SSL_CIPHER* ssl_cipher = SSL_get_cipher_by_value(cipher);
+ const SSL_CIPHER* ssl_cipher = SSL_get_cipher_by_value(cipher_suite);
if (!ssl_cipher) {
return std::string();
}
@@ -361,7 +366,7 @@ std::string OpenSSLStreamAdapter::GetSslCipherSuiteName(int cipher) {
#else
for (const SslCipherMapEntry* entry = kSslCipherMap; entry->rfc_name;
++entry) {
- if (cipher == entry->openssl_id) {
+ if (cipher_suite == static_cast<int>(entry->openssl_id)) {
return entry->rfc_name;
}
}
@@ -369,7 +374,7 @@ std::string OpenSSLStreamAdapter::GetSslCipherSuiteName(int cipher) {
#endif
}
-bool OpenSSLStreamAdapter::GetSslCipherSuite(int* cipher) {
+bool OpenSSLStreamAdapter::GetSslCipherSuite(int* cipher_suite) {
if (state_ != SSL_CONNECTED)
return false;
@@ -378,7 +383,7 @@ bool OpenSSLStreamAdapter::GetSslCipherSuite(int* cipher) {
return false;
}
- *cipher = static_cast<uint16_t>(SSL_CIPHER_get_id(current_cipher));
+ *cipher_suite = static_cast<uint16_t>(SSL_CIPHER_get_id(current_cipher));
return true;
}
@@ -405,20 +410,20 @@ bool OpenSSLStreamAdapter::ExportKeyingMaterial(const std::string& label,
#endif
}
-bool OpenSSLStreamAdapter::SetDtlsSrtpCiphers(
- const std::vector<std::string>& ciphers) {
+bool OpenSSLStreamAdapter::SetDtlsSrtpCryptoSuites(
+ const std::vector<int>& ciphers) {
#ifdef HAVE_DTLS_SRTP
std::string internal_ciphers;
if (state_ != SSL_NONE)
return false;
- for (std::vector<std::string>::const_iterator cipher = ciphers.begin();
+ for (std::vector<int>::const_iterator cipher = ciphers.begin();
cipher != ciphers.end(); ++cipher) {
bool found = false;
- for (SrtpCipherMapEntry *entry = SrtpCipherMap; entry->internal_name;
+ for (SrtpCipherMapEntry* entry = SrtpCipherMap; entry->internal_name;
++entry) {
- if (*cipher == entry->external_name) {
+ if (*cipher == entry->id) {
found = true;
if (!internal_ciphers.empty())
internal_ciphers += ":";
@@ -443,7 +448,7 @@ bool OpenSSLStreamAdapter::SetDtlsSrtpCiphers(
#endif
}
-bool OpenSSLStreamAdapter::GetDtlsSrtpCipher(std::string* cipher) {
+bool OpenSSLStreamAdapter::GetDtlsSrtpCryptoSuite(int* crypto_suite) {
#ifdef HAVE_DTLS_SRTP
ASSERT(state_ == SSL_CONNECTED);
if (state_ != SSL_CONNECTED)
@@ -455,17 +460,9 @@ bool OpenSSLStreamAdapter::GetDtlsSrtpCipher(std::string* cipher) {
if (!srtp_profile)
return false;
- for (SrtpCipherMapEntry *entry = SrtpCipherMap;
- entry->internal_name; ++entry) {
- if (!strcmp(entry->internal_name, srtp_profile->name)) {
- *cipher = entry->external_name;
- return true;
- }
- }
-
- ASSERT(false); // This should never happen
-
- return false;
+ *crypto_suite = srtp_profile->id;
+ ASSERT(!SrtpCryptoSuiteToName(*crypto_suite).empty());
+ return true;
#else
return false;
#endif
@@ -994,7 +991,7 @@ SSL_CTX* OpenSSLStreamAdapter::SetupSSLContext() {
return NULL;
}
-#ifdef _DEBUG
+#if !defined(NDEBUG)
SSL_CTX_set_info_callback(ctx, OpenSSLAdapter::SSLInfoCallback);
#endif
diff --git a/webrtc/base/opensslstreamadapter.h b/webrtc/base/opensslstreamadapter.h
index 0f3ded9cb4..e57b2a3293 100644
--- a/webrtc/base/opensslstreamadapter.h
+++ b/webrtc/base/opensslstreamadapter.h
@@ -88,7 +88,7 @@ class OpenSSLStreamAdapter : public SSLStreamAdapter {
StreamState GetState() const override;
// TODO(guoweis): Move this away from a static class method.
- static std::string GetSslCipherSuiteName(int cipher);
+ static std::string SslCipherSuiteToName(int crypto_suite);
bool GetSslCipherSuite(int* cipher) override;
@@ -101,8 +101,8 @@ class OpenSSLStreamAdapter : public SSLStreamAdapter {
size_t result_len) override;
// DTLS-SRTP interface
- bool SetDtlsSrtpCiphers(const std::vector<std::string>& ciphers) override;
- bool GetDtlsSrtpCipher(std::string* cipher) override;
+ bool SetDtlsSrtpCryptoSuites(const std::vector<int>& crypto_suites) override;
+ bool GetDtlsSrtpCryptoSuite(int* crypto_suite) override;
// Capabilities interfaces
static bool HaveDtls();
diff --git a/webrtc/base/optional.h b/webrtc/base/optional.h
new file mode 100644
index 0000000000..b8071e6358
--- /dev/null
+++ b/webrtc/base/optional.h
@@ -0,0 +1,139 @@
+/*
+ * Copyright 2015 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_BASE_OPTIONAL_H_
+#define WEBRTC_BASE_OPTIONAL_H_
+
+#include <algorithm>
+#include <utility>
+
+#include "webrtc/base/checks.h"
+
+namespace rtc {
+
+// Simple std::experimental::optional-wannabe. It either contains a T or not.
+// In order to keep the implementation simple and portable, this implementation
+// actually contains a (default-constructed) T even when it supposedly doesn't
+// contain a value; use e.g. rtc::scoped_ptr<T> instead if that's too
+// expensive.
+//
+// A moved-from Optional<T> may only be destroyed, and assigned to if T allows
+// being assigned to after having been moved from. Specifically, you may not
+// assume that it just doesn't contain a value anymore.
+//
+// Examples of good places to use Optional:
+//
+// - As a class or struct member, when the member doesn't always have a value:
+// struct Prisoner {
+// std::string name;
+// Optional<int> cell_number; // Empty if not currently incarcerated.
+// };
+//
+// - As a return value for functions that may fail to return a value on all
+// allowed inputs. For example, a function that searches an array might
+// return an Optional<size_t> (the index where it found the element, or
+// nothing if it didn't find it); and a function that parses numbers might
+// return Optional<double> (the parsed number, or nothing if parsing failed).
+//
+// Examples of bad places to use Optional:
+//
+// - As a return value for functions that may fail because of disallowed
+// inputs. For example, a string length function should not return
+// Optional<size_t> so that it can return nothing in case the caller passed
+// it a null pointer; the function should probably use RTC_[D]CHECK instead,
+// and return plain size_t.
+//
+// - As a return value for functions that may fail to return a value on all
+// allowed inputs, but need to tell the caller what went wrong. Returning
+// Optional<double> when parsing a single number as in the example above
+// might make sense, but any larger parse job is probably going to need to
+// tell the caller what the problem was, not just that there was one.
+//
+// TODO(kwiberg): Get rid of this class when the standard library has
+// std::optional (and we're allowed to use it).
+template <typename T>
+class Optional final {
+ public:
+ // Construct an empty Optional.
+ Optional() : has_value_(false) {}
+
+ // Construct an Optional that contains a value.
+ explicit Optional(const T& val) : value_(val), has_value_(true) {}
+ explicit Optional(T&& val) : value_(std::move(val)), has_value_(true) {}
+
+ // Copy and move constructors.
+ // TODO(kwiberg): =default the move constructor when MSVC supports it.
+ Optional(const Optional&) = default;
+ Optional(Optional&& m)
+ : value_(std::move(m.value_)), has_value_(m.has_value_) {}
+
+ // Assignment.
+ // TODO(kwiberg): =default the move assignment op when MSVC supports it.
+ Optional& operator=(const Optional&) = default;
+ Optional& operator=(Optional&& m) {
+ value_ = std::move(m.value_);
+ has_value_ = m.has_value_;
+ return *this;
+ }
+
+ friend void swap(Optional& m1, Optional& m2) {
+ using std::swap;
+ swap(m1.value_, m2.value_);
+ swap(m1.has_value_, m2.has_value_);
+ }
+
+ // Conversion to bool to test if we have a value.
+ explicit operator bool() const { return has_value_; }
+
+ // Dereferencing. Only allowed if we have a value.
+ const T* operator->() const {
+ RTC_DCHECK(has_value_);
+ return &value_;
+ }
+ T* operator->() {
+ RTC_DCHECK(has_value_);
+ return &value_;
+ }
+ const T& operator*() const {
+ RTC_DCHECK(has_value_);
+ return value_;
+ }
+ T& operator*() {
+ RTC_DCHECK(has_value_);
+ return value_;
+ }
+
+ // Dereference with a default value in case we don't have a value.
+ const T& value_or(const T& default_val) const {
+ return has_value_ ? value_ : default_val;
+ }
+
+ // Equality tests. Two Optionals are equal if they contain equivalent values,
+ // or
+ // if they're both empty.
+ friend bool operator==(const Optional& m1, const Optional& m2) {
+ return m1.has_value_ && m2.has_value_ ? m1.value_ == m2.value_
+ : m1.has_value_ == m2.has_value_;
+ }
+ friend bool operator!=(const Optional& m1, const Optional& m2) {
+ return m1.has_value_ && m2.has_value_ ? m1.value_ != m2.value_
+ : m1.has_value_ != m2.has_value_;
+ }
+
+ private:
+ // Invariant: Unless *this has been moved from, value_ is default-initialized
+ // (or copied or moved from a default-initialized T) if !has_value_.
+ T value_;
+ bool has_value_;
+};
+
+} // namespace rtc
+
+#endif // WEBRTC_BASE_OPTIONAL_H_
diff --git a/webrtc/base/optional_unittest.cc b/webrtc/base/optional_unittest.cc
new file mode 100644
index 0000000000..eabf091e17
--- /dev/null
+++ b/webrtc/base/optional_unittest.cc
@@ -0,0 +1,489 @@
+/*
+ * Copyright 2015 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <sstream>
+#include <string>
+#include <utility>
+#include <vector>
+
+#include "webrtc/base/gunit.h"
+#include "webrtc/base/optional.h"
+
+namespace rtc {
+
+namespace {
+
+// Class whose instances logs various method calls (constructor, destructor,
+// etc.). Each instance has a unique ID (a simple global sequence number) and
+// an origin ID. When a copy is made, the new object gets a fresh ID but copies
+// the origin ID from the original. When a new Logger is created from scratch,
+// it gets a fresh ID, and the origin ID is the same as the ID (default
+// constructor) or given as an argument (explicit constructor).
+class Logger {
+ public:
+ Logger() : id_(next_id_++), origin_(id_) { Log("default constructor"); }
+ explicit Logger(int origin) : id_(next_id_++), origin_(origin) {
+ Log("explicit constructor");
+ }
+ Logger(const Logger& other) : id_(next_id_++), origin_(other.origin_) {
+ LogFrom("copy constructor", other);
+ }
+ Logger(Logger&& other) : id_(next_id_++), origin_(other.origin_) {
+ LogFrom("move constructor", other);
+ }
+ ~Logger() { Log("destructor"); }
+ Logger& operator=(const Logger& other) {
+ origin_ = other.origin_;
+ LogFrom("operator= copy", other);
+ return *this;
+ }
+ Logger& operator=(Logger&& other) {
+ origin_ = other.origin_;
+ LogFrom("operator= move", other);
+ return *this;
+ }
+ friend void swap(Logger& a, Logger& b) {
+ using std::swap;
+ swap(a.origin_, b.origin_);
+ Log2("swap", a, b);
+ }
+ friend bool operator==(const Logger& a, const Logger& b) {
+ Log2("operator==", a, b);
+ return a.origin_ == b.origin_;
+ }
+ friend bool operator!=(const Logger& a, const Logger& b) {
+ Log2("operator!=", a, b);
+ return a.origin_ != b.origin_;
+ }
+ void Foo() { Log("Foo()"); }
+ void Foo() const { Log("Foo() const"); }
+ static rtc::scoped_ptr<std::vector<std::string>> Setup() {
+ auto s = rtc_make_scoped_ptr(new std::vector<std::string>);
+ Logger::log_ = s.get();
+ Logger::next_id_ = 0;
+ return s;
+ }
+
+ private:
+ int id_;
+ int origin_;
+ static std::vector<std::string>* log_;
+ static int next_id_;
+ void Log(const char* msg) const {
+ std::ostringstream oss;
+ oss << id_ << ':' << origin_ << ". " << msg;
+ log_->push_back(oss.str());
+ }
+ void LogFrom(const char* msg, const Logger& other) const {
+ std::ostringstream oss;
+ oss << id_ << ':' << origin_ << ". " << msg << " (from " << other.id_ << ':'
+ << other.origin_ << ")";
+ log_->push_back(oss.str());
+ }
+ static void Log2(const char* msg, const Logger& a, const Logger& b) {
+ std::ostringstream oss;
+ oss << msg << ' ' << a.id_ << ':' << a.origin_ << ", " << b.id_ << ':'
+ << b.origin_;
+ log_->push_back(oss.str());
+ }
+};
+
+std::vector<std::string>* Logger::log_ = nullptr;
+int Logger::next_id_ = 0;
+
+// Append all the other args to the vector pointed to by the first arg.
+template <typename T>
+void VectorAppend(std::vector<T>* v) {}
+template <typename T, typename... Ts>
+void VectorAppend(std::vector<T>* v, const T& e, Ts... es) {
+ v->push_back(e);
+ VectorAppend(v, es...);
+}
+
+// Create a vector of strings. Because we're not allowed to use
+// std::initializer_list.
+template <typename... Ts>
+std::vector<std::string> V(Ts... es) {
+ std::vector<std::string> strings;
+ VectorAppend(&strings, static_cast<std::string>(es)...);
+ return strings;
+}
+
+} // namespace
+
+TEST(OptionalTest, TestConstructDefault) {
+ auto log = Logger::Setup();
+ {
+ Optional<Logger> x;
+ EXPECT_FALSE(x);
+ }
+ EXPECT_EQ(V("0:0. default constructor", "0:0. destructor"), *log);
+}
+
+TEST(OptionalTest, TestConstructCopyEmpty) {
+ auto log = Logger::Setup();
+ {
+ Optional<Logger> x;
+ EXPECT_FALSE(x);
+ auto y = x;
+ EXPECT_FALSE(y);
+ }
+ EXPECT_EQ(V("0:0. default constructor", "1:0. copy constructor (from 0:0)",
+ "1:0. destructor", "0:0. destructor"),
+ *log);
+}
+
+TEST(OptionalTest, TestConstructCopyFull) {
+ auto log = Logger::Setup();
+ {
+ Logger a;
+ Optional<Logger> x(a);
+ EXPECT_TRUE(x);
+ log->push_back("---");
+ auto y = x;
+ EXPECT_TRUE(y);
+ log->push_back("---");
+ }
+ EXPECT_EQ(V("0:0. default constructor", "1:0. copy constructor (from 0:0)",
+ "---", "2:0. copy constructor (from 1:0)", "---",
+ "2:0. destructor", "1:0. destructor", "0:0. destructor"),
+ *log);
+}
+
+TEST(OptionalTest, TestConstructMoveEmpty) {
+ auto log = Logger::Setup();
+ {
+ Optional<Logger> x;
+ EXPECT_FALSE(x);
+ auto y = std::move(x);
+ EXPECT_FALSE(y);
+ }
+ EXPECT_EQ(V("0:0. default constructor", "1:0. move constructor (from 0:0)",
+ "1:0. destructor", "0:0. destructor"),
+ *log);
+}
+
+TEST(OptionalTest, TestConstructMoveFull) {
+ auto log = Logger::Setup();
+ {
+ Optional<Logger> x(Logger(17));
+ EXPECT_TRUE(x);
+ log->push_back("---");
+ auto y = std::move(x);
+ EXPECT_TRUE(x);
+ EXPECT_TRUE(y);
+ log->push_back("---");
+ }
+ EXPECT_EQ(
+ V("0:17. explicit constructor", "1:17. move constructor (from 0:17)",
+ "0:17. destructor", "---", "2:17. move constructor (from 1:17)", "---",
+ "2:17. destructor", "1:17. destructor"),
+ *log);
+}
+
+TEST(OptionalTest, TestCopyAssignToEmptyFromEmpty) {
+ auto log = Logger::Setup();
+ {
+ Optional<Logger> x, y;
+ x = y;
+ }
+ EXPECT_EQ(
+ V("0:0. default constructor", "1:1. default constructor",
+ "0:1. operator= copy (from 1:1)", "1:1. destructor", "0:1. destructor"),
+ *log);
+}
+
+TEST(OptionalTest, TestCopyAssignToFullFromEmpty) {
+ auto log = Logger::Setup();
+ {
+ Optional<Logger> x(Logger(17));
+ Optional<Logger> y;
+ log->push_back("---");
+ x = y;
+ log->push_back("---");
+ }
+ EXPECT_EQ(
+ V("0:17. explicit constructor", "1:17. move constructor (from 0:17)",
+ "0:17. destructor", "2:2. default constructor", "---",
+ "1:2. operator= copy (from 2:2)", "---", "2:2. destructor",
+ "1:2. destructor"),
+ *log);
+}
+
+TEST(OptionalTest, TestCopyAssignToEmptyFromFull) {
+ auto log = Logger::Setup();
+ {
+ Optional<Logger> x;
+ Optional<Logger> y(Logger(17));
+ log->push_back("---");
+ x = y;
+ log->push_back("---");
+ }
+ EXPECT_EQ(V("0:0. default constructor", "1:17. explicit constructor",
+ "2:17. move constructor (from 1:17)", "1:17. destructor", "---",
+ "0:17. operator= copy (from 2:17)", "---", "2:17. destructor",
+ "0:17. destructor"),
+ *log);
+}
+
+TEST(OptionalTest, TestCopyAssignToFullFromFull) {
+ auto log = Logger::Setup();
+ {
+ Optional<Logger> x(Logger(17));
+ Optional<Logger> y(Logger(42));
+ log->push_back("---");
+ x = y;
+ log->push_back("---");
+ }
+ EXPECT_EQ(
+ V("0:17. explicit constructor", "1:17. move constructor (from 0:17)",
+ "0:17. destructor", "2:42. explicit constructor",
+ "3:42. move constructor (from 2:42)", "2:42. destructor", "---",
+ "1:42. operator= copy (from 3:42)", "---", "3:42. destructor",
+ "1:42. destructor"),
+ *log);
+}
+
+TEST(OptionalTest, TestCopyAssignToEmptyFromT) {
+ auto log = Logger::Setup();
+ {
+ Optional<Logger> x;
+ Logger y(17);
+ log->push_back("---");
+ x = Optional<Logger>(y);
+ log->push_back("---");
+ }
+ EXPECT_EQ(V("0:0. default constructor", "1:17. explicit constructor", "---",
+ "2:17. copy constructor (from 1:17)",
+ "0:17. operator= move (from 2:17)", "2:17. destructor", "---",
+ "1:17. destructor", "0:17. destructor"),
+ *log);
+}
+
+TEST(OptionalTest, TestCopyAssignToFullFromT) {
+ auto log = Logger::Setup();
+ {
+ Optional<Logger> x(Logger(17));
+ Logger y(42);
+ log->push_back("---");
+ x = Optional<Logger>(y);
+ log->push_back("---");
+ }
+ EXPECT_EQ(
+ V("0:17. explicit constructor", "1:17. move constructor (from 0:17)",
+ "0:17. destructor", "2:42. explicit constructor", "---",
+ "3:42. copy constructor (from 2:42)",
+ "1:42. operator= move (from 3:42)", "3:42. destructor", "---",
+ "2:42. destructor", "1:42. destructor"),
+ *log);
+}
+
+TEST(OptionalTest, TestMoveAssignToEmptyFromEmpty) {
+ auto log = Logger::Setup();
+ {
+ Optional<Logger> x, y;
+ x = std::move(y);
+ }
+ EXPECT_EQ(
+ V("0:0. default constructor", "1:1. default constructor",
+ "0:1. operator= move (from 1:1)", "1:1. destructor", "0:1. destructor"),
+ *log);
+}
+
+TEST(OptionalTest, TestMoveAssignToFullFromEmpty) {
+ auto log = Logger::Setup();
+ {
+ Optional<Logger> x(Logger(17));
+ Optional<Logger> y;
+ log->push_back("---");
+ x = std::move(y);
+ log->push_back("---");
+ }
+ EXPECT_EQ(
+ V("0:17. explicit constructor", "1:17. move constructor (from 0:17)",
+ "0:17. destructor", "2:2. default constructor", "---",
+ "1:2. operator= move (from 2:2)", "---", "2:2. destructor",
+ "1:2. destructor"),
+ *log);
+}
+
+TEST(OptionalTest, TestMoveAssignToEmptyFromFull) {
+ auto log = Logger::Setup();
+ {
+ Optional<Logger> x;
+ Optional<Logger> y(Logger(17));
+ log->push_back("---");
+ x = std::move(y);
+ log->push_back("---");
+ }
+ EXPECT_EQ(V("0:0. default constructor", "1:17. explicit constructor",
+ "2:17. move constructor (from 1:17)", "1:17. destructor", "---",
+ "0:17. operator= move (from 2:17)", "---", "2:17. destructor",
+ "0:17. destructor"),
+ *log);
+}
+
+TEST(OptionalTest, TestMoveAssignToFullFromFull) {
+ auto log = Logger::Setup();
+ {
+ Optional<Logger> x(Logger(17));
+ Optional<Logger> y(Logger(42));
+ log->push_back("---");
+ x = std::move(y);
+ log->push_back("---");
+ }
+ EXPECT_EQ(
+ V("0:17. explicit constructor", "1:17. move constructor (from 0:17)",
+ "0:17. destructor", "2:42. explicit constructor",
+ "3:42. move constructor (from 2:42)", "2:42. destructor", "---",
+ "1:42. operator= move (from 3:42)", "---", "3:42. destructor",
+ "1:42. destructor"),
+ *log);
+}
+
+TEST(OptionalTest, TestMoveAssignToEmptyFromT) {
+ auto log = Logger::Setup();
+ {
+ Optional<Logger> x;
+ Logger y(17);
+ log->push_back("---");
+ x = Optional<Logger>(std::move(y));
+ log->push_back("---");
+ }
+ EXPECT_EQ(V("0:0. default constructor", "1:17. explicit constructor", "---",
+ "2:17. move constructor (from 1:17)",
+ "0:17. operator= move (from 2:17)", "2:17. destructor", "---",
+ "1:17. destructor", "0:17. destructor"),
+ *log);
+}
+
+TEST(OptionalTest, TestMoveAssignToFullFromT) {
+ auto log = Logger::Setup();
+ {
+ Optional<Logger> x(Logger(17));
+ Logger y(42);
+ log->push_back("---");
+ x = Optional<Logger>(std::move(y));
+ log->push_back("---");
+ }
+ EXPECT_EQ(
+ V("0:17. explicit constructor", "1:17. move constructor (from 0:17)",
+ "0:17. destructor", "2:42. explicit constructor", "---",
+ "3:42. move constructor (from 2:42)",
+ "1:42. operator= move (from 3:42)", "3:42. destructor", "---",
+ "2:42. destructor", "1:42. destructor"),
+ *log);
+}
+
+TEST(OptionalTest, TestDereference) {
+ auto log = Logger::Setup();
+ {
+ Optional<Logger> x(Logger(42));
+ const auto& y = x;
+ log->push_back("---");
+ x->Foo();
+ y->Foo();
+ std::move(x)->Foo();
+ std::move(y)->Foo();
+ log->push_back("---");
+ (*x).Foo();
+ (*y).Foo();
+ (*std::move(x)).Foo();
+ (*std::move(y)).Foo();
+ log->push_back("---");
+ }
+ EXPECT_EQ(V("0:42. explicit constructor",
+ "1:42. move constructor (from 0:42)", "0:42. destructor", "---",
+ "1:42. Foo()", "1:42. Foo() const", "1:42. Foo()",
+ "1:42. Foo() const", "---", "1:42. Foo()", "1:42. Foo() const",
+ "1:42. Foo()", "1:42. Foo() const", "---", "1:42. destructor"),
+ *log);
+}
+
+TEST(OptionalTest, TestDereferenceWithDefault) {
+ auto log = Logger::Setup();
+ {
+ const Logger a(17), b(42);
+ Optional<Logger> x(a);
+ Optional<Logger> y;
+ log->push_back("-1-");
+ EXPECT_EQ(a, x.value_or(Logger(42)));
+ log->push_back("-2-");
+ EXPECT_EQ(b, y.value_or(Logger(42)));
+ log->push_back("-3-");
+ EXPECT_EQ(a, Optional<Logger>(Logger(17)).value_or(b));
+ log->push_back("-4-");
+ EXPECT_EQ(b, Optional<Logger>().value_or(b));
+ log->push_back("-5-");
+ }
+ EXPECT_EQ(
+ V("0:17. explicit constructor", "1:42. explicit constructor",
+ "2:17. copy constructor (from 0:17)", "3:3. default constructor", "-1-",
+ "4:42. explicit constructor", "operator== 0:17, 2:17",
+ "4:42. destructor", "-2-", "5:42. explicit constructor",
+ "operator== 1:42, 5:42", "5:42. destructor", "-3-",
+ "6:17. explicit constructor", "7:17. move constructor (from 6:17)",
+ "operator== 0:17, 7:17", "7:17. destructor", "6:17. destructor", "-4-",
+ "8:8. default constructor", "operator== 1:42, 1:42", "8:8. destructor",
+ "-5-", "3:3. destructor", "2:17. destructor", "1:42. destructor",
+ "0:17. destructor"),
+ *log);
+}
+
+TEST(OptionalTest, TestEquality) {
+ auto log = Logger::Setup();
+ {
+ Logger a(17), b(42);
+ Optional<Logger> ma1(a), ma2(a), mb(b), me1, me2;
+ log->push_back("---");
+ EXPECT_EQ(ma1, ma1);
+ EXPECT_EQ(ma1, ma2);
+ EXPECT_NE(ma1, mb);
+ EXPECT_NE(ma1, me1);
+ EXPECT_EQ(me1, me1);
+ EXPECT_EQ(me1, me2);
+ log->push_back("---");
+ }
+ EXPECT_EQ(V("0:17. explicit constructor", "1:42. explicit constructor",
+ "2:17. copy constructor (from 0:17)",
+ "3:17. copy constructor (from 0:17)",
+ "4:42. copy constructor (from 1:42)", "5:5. default constructor",
+ "6:6. default constructor", "---", "operator== 2:17, 2:17",
+ "operator== 2:17, 3:17", "operator!= 2:17, 4:42", "---",
+ "6:6. destructor", "5:5. destructor", "4:42. destructor",
+ "3:17. destructor", "2:17. destructor", "1:42. destructor",
+ "0:17. destructor"),
+ *log);
+}
+
+TEST(OptionalTest, TestSwap) {
+ auto log = Logger::Setup();
+ {
+ Logger a(17), b(42);
+ Optional<Logger> x1(a), x2(b), y1(a), y2, z1, z2;
+ log->push_back("---");
+ swap(x1, x2); // Swap full <-> full.
+ swap(y1, y2); // Swap full <-> empty.
+ swap(z1, z2); // Swap empty <-> empty.
+ log->push_back("---");
+ }
+ EXPECT_EQ(V("0:17. explicit constructor", "1:42. explicit constructor",
+ "2:17. copy constructor (from 0:17)",
+ "3:42. copy constructor (from 1:42)",
+ "4:17. copy constructor (from 0:17)", "5:5. default constructor",
+ "6:6. default constructor", "7:7. default constructor", "---",
+ "swap 2:42, 3:17", "swap 4:5, 5:17", "swap 6:7, 7:6", "---",
+ "7:6. destructor", "6:7. destructor", "5:17. destructor",
+ "4:5. destructor", "3:17. destructor", "2:42. destructor",
+ "1:42. destructor", "0:17. destructor"),
+ *log);
+}
+
+} // namespace rtc
diff --git a/webrtc/base/physicalsocketserver.cc b/webrtc/base/physicalsocketserver.cc
index 86abcf279b..3e454527ca 100644
--- a/webrtc/base/physicalsocketserver.cc
+++ b/webrtc/base/physicalsocketserver.cc
@@ -39,11 +39,11 @@
#include <algorithm>
#include <map>
+#include "webrtc/base/arraysize.h"
#include "webrtc/base/basictypes.h"
#include "webrtc/base/byteorder.h"
#include "webrtc/base/common.h"
#include "webrtc/base/logging.h"
-#include "webrtc/base/nethelpers.h"
#include "webrtc/base/physicalsocketserver.h"
#include "webrtc/base/timeutils.h"
#include "webrtc/base/winping.h"
@@ -96,463 +96,669 @@ static const int ICMP_HEADER_SIZE = 8u;
static const int ICMP_PING_TIMEOUT_MILLIS = 10000u;
#endif
-class PhysicalSocket : public AsyncSocket, public sigslot::has_slots<> {
- public:
- PhysicalSocket(PhysicalSocketServer* ss, SOCKET s = INVALID_SOCKET)
- : ss_(ss), s_(s), enabled_events_(0), error_(0),
- state_((s == INVALID_SOCKET) ? CS_CLOSED : CS_CONNECTED),
- resolver_(NULL) {
+PhysicalSocket::PhysicalSocket(PhysicalSocketServer* ss, SOCKET s)
+ : ss_(ss), s_(s), enabled_events_(0), error_(0),
+ state_((s == INVALID_SOCKET) ? CS_CLOSED : CS_CONNECTED),
+ resolver_(nullptr) {
#if defined(WEBRTC_WIN)
- // EnsureWinsockInit() ensures that winsock is initialized. The default
- // version of this function doesn't do anything because winsock is
- // initialized by constructor of a static object. If neccessary libjingle
- // users can link it with a different version of this function by replacing
- // win32socketinit.cc. See win32socketinit.cc for more details.
- EnsureWinsockInit();
+ // EnsureWinsockInit() ensures that winsock is initialized. The default
+ // version of this function doesn't do anything because winsock is
+ // initialized by constructor of a static object. If neccessary libjingle
+ // users can link it with a different version of this function by replacing
+ // win32socketinit.cc. See win32socketinit.cc for more details.
+ EnsureWinsockInit();
#endif
- if (s_ != INVALID_SOCKET) {
- enabled_events_ = DE_READ | DE_WRITE;
+ if (s_ != INVALID_SOCKET) {
+ enabled_events_ = DE_READ | DE_WRITE;
- int type = SOCK_STREAM;
- socklen_t len = sizeof(type);
- VERIFY(0 == getsockopt(s_, SOL_SOCKET, SO_TYPE, (SockOptArg)&type, &len));
- udp_ = (SOCK_DGRAM == type);
- }
+ int type = SOCK_STREAM;
+ socklen_t len = sizeof(type);
+ VERIFY(0 == getsockopt(s_, SOL_SOCKET, SO_TYPE, (SockOptArg)&type, &len));
+ udp_ = (SOCK_DGRAM == type);
}
+}
- ~PhysicalSocket() override {
- Close();
- }
+PhysicalSocket::~PhysicalSocket() {
+ Close();
+}
- // Creates the underlying OS socket (same as the "socket" function).
- virtual bool Create(int family, int type) {
- Close();
- s_ = ::socket(family, type, 0);
- udp_ = (SOCK_DGRAM == type);
- UpdateLastError();
- if (udp_)
- enabled_events_ = DE_READ | DE_WRITE;
- return s_ != INVALID_SOCKET;
- }
-
- SocketAddress GetLocalAddress() const override {
- sockaddr_storage addr_storage = {0};
- socklen_t addrlen = sizeof(addr_storage);
- sockaddr* addr = reinterpret_cast<sockaddr*>(&addr_storage);
- int result = ::getsockname(s_, addr, &addrlen);
- SocketAddress address;
- if (result >= 0) {
- SocketAddressFromSockAddrStorage(addr_storage, &address);
- } else {
- LOG(LS_WARNING) << "GetLocalAddress: unable to get local addr, socket="
- << s_;
- }
- return address;
- }
+bool PhysicalSocket::Create(int family, int type) {
+ Close();
+ s_ = ::socket(family, type, 0);
+ udp_ = (SOCK_DGRAM == type);
+ UpdateLastError();
+ if (udp_)
+ enabled_events_ = DE_READ | DE_WRITE;
+ return s_ != INVALID_SOCKET;
+}
- SocketAddress GetRemoteAddress() const override {
- sockaddr_storage addr_storage = {0};
- socklen_t addrlen = sizeof(addr_storage);
- sockaddr* addr = reinterpret_cast<sockaddr*>(&addr_storage);
- int result = ::getpeername(s_, addr, &addrlen);
- SocketAddress address;
- if (result >= 0) {
- SocketAddressFromSockAddrStorage(addr_storage, &address);
- } else {
- LOG(LS_WARNING) << "GetRemoteAddress: unable to get remote addr, socket="
- << s_;
- }
- return address;
+SocketAddress PhysicalSocket::GetLocalAddress() const {
+ sockaddr_storage addr_storage = {0};
+ socklen_t addrlen = sizeof(addr_storage);
+ sockaddr* addr = reinterpret_cast<sockaddr*>(&addr_storage);
+ int result = ::getsockname(s_, addr, &addrlen);
+ SocketAddress address;
+ if (result >= 0) {
+ SocketAddressFromSockAddrStorage(addr_storage, &address);
+ } else {
+ LOG(LS_WARNING) << "GetLocalAddress: unable to get local addr, socket="
+ << s_;
}
+ return address;
+}
- int Bind(const SocketAddress& bind_addr) override {
- sockaddr_storage addr_storage;
- size_t len = bind_addr.ToSockAddrStorage(&addr_storage);
- sockaddr* addr = reinterpret_cast<sockaddr*>(&addr_storage);
- int err = ::bind(s_, addr, static_cast<int>(len));
- UpdateLastError();
-#ifdef _DEBUG
- if (0 == err) {
- dbg_addr_ = "Bound @ ";
- dbg_addr_.append(GetLocalAddress().ToString());
- }
-#endif // _DEBUG
- return err;
+SocketAddress PhysicalSocket::GetRemoteAddress() const {
+ sockaddr_storage addr_storage = {0};
+ socklen_t addrlen = sizeof(addr_storage);
+ sockaddr* addr = reinterpret_cast<sockaddr*>(&addr_storage);
+ int result = ::getpeername(s_, addr, &addrlen);
+ SocketAddress address;
+ if (result >= 0) {
+ SocketAddressFromSockAddrStorage(addr_storage, &address);
+ } else {
+ LOG(LS_WARNING) << "GetRemoteAddress: unable to get remote addr, socket="
+ << s_;
}
+ return address;
+}
- int Connect(const SocketAddress& addr) override {
- // TODO: Implicit creation is required to reconnect...
- // ...but should we make it more explicit?
- if (state_ != CS_CLOSED) {
- SetError(EALREADY);
- return SOCKET_ERROR;
- }
- if (addr.IsUnresolved()) {
- LOG(LS_VERBOSE) << "Resolving addr in PhysicalSocket::Connect";
- resolver_ = new AsyncResolver();
- resolver_->SignalDone.connect(this, &PhysicalSocket::OnResolveResult);
- resolver_->Start(addr);
- state_ = CS_CONNECTING;
- return 0;
- }
-
- return DoConnect(addr);
+int PhysicalSocket::Bind(const SocketAddress& bind_addr) {
+ sockaddr_storage addr_storage;
+ size_t len = bind_addr.ToSockAddrStorage(&addr_storage);
+ sockaddr* addr = reinterpret_cast<sockaddr*>(&addr_storage);
+ int err = ::bind(s_, addr, static_cast<int>(len));
+ UpdateLastError();
+#if !defined(NDEBUG)
+ if (0 == err) {
+ dbg_addr_ = "Bound @ ";
+ dbg_addr_.append(GetLocalAddress().ToString());
}
+#endif
+ return err;
+}
- int DoConnect(const SocketAddress& connect_addr) {
- if ((s_ == INVALID_SOCKET) &&
- !Create(connect_addr.family(), SOCK_STREAM)) {
- return SOCKET_ERROR;
- }
- sockaddr_storage addr_storage;
- size_t len = connect_addr.ToSockAddrStorage(&addr_storage);
- sockaddr* addr = reinterpret_cast<sockaddr*>(&addr_storage);
- int err = ::connect(s_, addr, static_cast<int>(len));
- UpdateLastError();
- if (err == 0) {
- state_ = CS_CONNECTED;
- } else if (IsBlockingError(GetError())) {
- state_ = CS_CONNECTING;
- enabled_events_ |= DE_CONNECT;
- } else {
- return SOCKET_ERROR;
- }
-
- enabled_events_ |= DE_READ | DE_WRITE;
+int PhysicalSocket::Connect(const SocketAddress& addr) {
+ // TODO(pthatcher): Implicit creation is required to reconnect...
+ // ...but should we make it more explicit?
+ if (state_ != CS_CLOSED) {
+ SetError(EALREADY);
+ return SOCKET_ERROR;
+ }
+ if (addr.IsUnresolvedIP()) {
+ LOG(LS_VERBOSE) << "Resolving addr in PhysicalSocket::Connect";
+ resolver_ = new AsyncResolver();
+ resolver_->SignalDone.connect(this, &PhysicalSocket::OnResolveResult);
+ resolver_->Start(addr);
+ state_ = CS_CONNECTING;
return 0;
}
- int GetError() const override {
- CritScope cs(&crit_);
- return error_;
- }
+ return DoConnect(addr);
+}
- void SetError(int error) override {
- CritScope cs(&crit_);
- error_ = error;
+int PhysicalSocket::DoConnect(const SocketAddress& connect_addr) {
+ if ((s_ == INVALID_SOCKET) &&
+ !Create(connect_addr.family(), SOCK_STREAM)) {
+ return SOCKET_ERROR;
+ }
+ sockaddr_storage addr_storage;
+ size_t len = connect_addr.ToSockAddrStorage(&addr_storage);
+ sockaddr* addr = reinterpret_cast<sockaddr*>(&addr_storage);
+ int err = ::connect(s_, addr, static_cast<int>(len));
+ UpdateLastError();
+ if (err == 0) {
+ state_ = CS_CONNECTED;
+ } else if (IsBlockingError(GetError())) {
+ state_ = CS_CONNECTING;
+ enabled_events_ |= DE_CONNECT;
+ } else {
+ return SOCKET_ERROR;
}
- ConnState GetState() const override { return state_; }
+ enabled_events_ |= DE_READ | DE_WRITE;
+ return 0;
+}
- int GetOption(Option opt, int* value) override {
- int slevel;
- int sopt;
- if (TranslateOption(opt, &slevel, &sopt) == -1)
- return -1;
- socklen_t optlen = sizeof(*value);
- int ret = ::getsockopt(s_, slevel, sopt, (SockOptArg)value, &optlen);
- if (ret != -1 && opt == OPT_DONTFRAGMENT) {
+int PhysicalSocket::GetError() const {
+ CritScope cs(&crit_);
+ return error_;
+}
+
+void PhysicalSocket::SetError(int error) {
+ CritScope cs(&crit_);
+ error_ = error;
+}
+
+AsyncSocket::ConnState PhysicalSocket::GetState() const {
+ return state_;
+}
+
+int PhysicalSocket::GetOption(Option opt, int* value) {
+ int slevel;
+ int sopt;
+ if (TranslateOption(opt, &slevel, &sopt) == -1)
+ return -1;
+ socklen_t optlen = sizeof(*value);
+ int ret = ::getsockopt(s_, slevel, sopt, (SockOptArg)value, &optlen);
+ if (ret != -1 && opt == OPT_DONTFRAGMENT) {
#if defined(WEBRTC_LINUX) && !defined(WEBRTC_ANDROID)
- *value = (*value != IP_PMTUDISC_DONT) ? 1 : 0;
+ *value = (*value != IP_PMTUDISC_DONT) ? 1 : 0;
#endif
- }
- return ret;
}
+ return ret;
+}
- int SetOption(Option opt, int value) override {
- int slevel;
- int sopt;
- if (TranslateOption(opt, &slevel, &sopt) == -1)
- return -1;
- if (opt == OPT_DONTFRAGMENT) {
+int PhysicalSocket::SetOption(Option opt, int value) {
+ int slevel;
+ int sopt;
+ if (TranslateOption(opt, &slevel, &sopt) == -1)
+ return -1;
+ if (opt == OPT_DONTFRAGMENT) {
#if defined(WEBRTC_LINUX) && !defined(WEBRTC_ANDROID)
- value = (value) ? IP_PMTUDISC_DO : IP_PMTUDISC_DONT;
+ value = (value) ? IP_PMTUDISC_DO : IP_PMTUDISC_DONT;
#endif
- }
- return ::setsockopt(s_, slevel, sopt, (SockOptArg)&value, sizeof(value));
}
+ return ::setsockopt(s_, slevel, sopt, (SockOptArg)&value, sizeof(value));
+}
- int Send(const void* pv, size_t cb) override {
- int sent = ::send(s_, reinterpret_cast<const char *>(pv), (int)cb,
+int PhysicalSocket::Send(const void* pv, size_t cb) {
+ int sent = ::send(s_, reinterpret_cast<const char *>(pv), (int)cb,
#if defined(WEBRTC_LINUX) && !defined(WEBRTC_ANDROID)
- // Suppress SIGPIPE. Without this, attempting to send on a socket whose
- // other end is closed will result in a SIGPIPE signal being raised to
- // our process, which by default will terminate the process, which we
- // don't want. By specifying this flag, we'll just get the error EPIPE
- // instead and can handle the error gracefully.
- MSG_NOSIGNAL
+ // Suppress SIGPIPE. Without this, attempting to send on a socket whose
+ // other end is closed will result in a SIGPIPE signal being raised to
+ // our process, which by default will terminate the process, which we
+ // don't want. By specifying this flag, we'll just get the error EPIPE
+ // instead and can handle the error gracefully.
+ MSG_NOSIGNAL
#else
- 0
+ 0
#endif
- );
- UpdateLastError();
- MaybeRemapSendError();
- // We have seen minidumps where this may be false.
- ASSERT(sent <= static_cast<int>(cb));
- if ((sent < 0) && IsBlockingError(GetError())) {
- enabled_events_ |= DE_WRITE;
- }
- return sent;
- }
+ );
+ UpdateLastError();
+ MaybeRemapSendError();
+ // We have seen minidumps where this may be false.
+ ASSERT(sent <= static_cast<int>(cb));
+ if ((sent < 0) && IsBlockingError(GetError())) {
+ enabled_events_ |= DE_WRITE;
+ }
+ return sent;
+}
- int SendTo(const void* buffer,
- size_t length,
- const SocketAddress& addr) override {
- sockaddr_storage saddr;
- size_t len = addr.ToSockAddrStorage(&saddr);
- int sent = ::sendto(
- s_, static_cast<const char *>(buffer), static_cast<int>(length),
+int PhysicalSocket::SendTo(const void* buffer,
+ size_t length,
+ const SocketAddress& addr) {
+ sockaddr_storage saddr;
+ size_t len = addr.ToSockAddrStorage(&saddr);
+ int sent = ::sendto(
+ s_, static_cast<const char *>(buffer), static_cast<int>(length),
#if defined(WEBRTC_LINUX) && !defined(WEBRTC_ANDROID)
- // Suppress SIGPIPE. See above for explanation.
- MSG_NOSIGNAL,
+ // Suppress SIGPIPE. See above for explanation.
+ MSG_NOSIGNAL,
#else
- 0,
+ 0,
#endif
- reinterpret_cast<sockaddr*>(&saddr), static_cast<int>(len));
- UpdateLastError();
- MaybeRemapSendError();
- // We have seen minidumps where this may be false.
- ASSERT(sent <= static_cast<int>(length));
- if ((sent < 0) && IsBlockingError(GetError())) {
- enabled_events_ |= DE_WRITE;
- }
- return sent;
- }
-
- int Recv(void* buffer, size_t length) override {
- int received = ::recv(s_, static_cast<char*>(buffer),
- static_cast<int>(length), 0);
- if ((received == 0) && (length != 0)) {
- // Note: on graceful shutdown, recv can return 0. In this case, we
- // pretend it is blocking, and then signal close, so that simplifying
- // assumptions can be made about Recv.
- LOG(LS_WARNING) << "EOF from socket; deferring close event";
- // Must turn this back on so that the select() loop will notice the close
- // event.
- enabled_events_ |= DE_READ;
- SetError(EWOULDBLOCK);
- return SOCKET_ERROR;
- }
- UpdateLastError();
- int error = GetError();
- bool success = (received >= 0) || IsBlockingError(error);
- if (udp_ || success) {
- enabled_events_ |= DE_READ;
- }
- if (!success) {
- LOG_F(LS_VERBOSE) << "Error = " << error;
- }
- return received;
- }
+ reinterpret_cast<sockaddr*>(&saddr), static_cast<int>(len));
+ UpdateLastError();
+ MaybeRemapSendError();
+ // We have seen minidumps where this may be false.
+ ASSERT(sent <= static_cast<int>(length));
+ if ((sent < 0) && IsBlockingError(GetError())) {
+ enabled_events_ |= DE_WRITE;
+ }
+ return sent;
+}
- int RecvFrom(void* buffer, size_t length, SocketAddress* out_addr) override {
- sockaddr_storage addr_storage;
- socklen_t addr_len = sizeof(addr_storage);
- sockaddr* addr = reinterpret_cast<sockaddr*>(&addr_storage);
- int received = ::recvfrom(s_, static_cast<char*>(buffer),
- static_cast<int>(length), 0, addr, &addr_len);
- UpdateLastError();
- if ((received >= 0) && (out_addr != NULL))
- SocketAddressFromSockAddrStorage(addr_storage, out_addr);
- int error = GetError();
- bool success = (received >= 0) || IsBlockingError(error);
- if (udp_ || success) {
- enabled_events_ |= DE_READ;
- }
- if (!success) {
- LOG_F(LS_VERBOSE) << "Error = " << error;
- }
- return received;
- }
+int PhysicalSocket::Recv(void* buffer, size_t length) {
+ int received = ::recv(s_, static_cast<char*>(buffer),
+ static_cast<int>(length), 0);
+ if ((received == 0) && (length != 0)) {
+ // Note: on graceful shutdown, recv can return 0. In this case, we
+ // pretend it is blocking, and then signal close, so that simplifying
+ // assumptions can be made about Recv.
+ LOG(LS_WARNING) << "EOF from socket; deferring close event";
+ // Must turn this back on so that the select() loop will notice the close
+ // event.
+ enabled_events_ |= DE_READ;
+ SetError(EWOULDBLOCK);
+ return SOCKET_ERROR;
+ }
+ UpdateLastError();
+ int error = GetError();
+ bool success = (received >= 0) || IsBlockingError(error);
+ if (udp_ || success) {
+ enabled_events_ |= DE_READ;
+ }
+ if (!success) {
+ LOG_F(LS_VERBOSE) << "Error = " << error;
+ }
+ return received;
+}
- int Listen(int backlog) override {
- int err = ::listen(s_, backlog);
- UpdateLastError();
- if (err == 0) {
- state_ = CS_CONNECTING;
- enabled_events_ |= DE_ACCEPT;
-#ifdef _DEBUG
- dbg_addr_ = "Listening @ ";
- dbg_addr_.append(GetLocalAddress().ToString());
-#endif // _DEBUG
- }
- return err;
- }
+int PhysicalSocket::RecvFrom(void* buffer,
+ size_t length,
+ SocketAddress* out_addr) {
+ sockaddr_storage addr_storage;
+ socklen_t addr_len = sizeof(addr_storage);
+ sockaddr* addr = reinterpret_cast<sockaddr*>(&addr_storage);
+ int received = ::recvfrom(s_, static_cast<char*>(buffer),
+ static_cast<int>(length), 0, addr, &addr_len);
+ UpdateLastError();
+ if ((received >= 0) && (out_addr != nullptr))
+ SocketAddressFromSockAddrStorage(addr_storage, out_addr);
+ int error = GetError();
+ bool success = (received >= 0) || IsBlockingError(error);
+ if (udp_ || success) {
+ enabled_events_ |= DE_READ;
+ }
+ if (!success) {
+ LOG_F(LS_VERBOSE) << "Error = " << error;
+ }
+ return received;
+}
- AsyncSocket* Accept(SocketAddress* out_addr) override {
- sockaddr_storage addr_storage;
- socklen_t addr_len = sizeof(addr_storage);
- sockaddr* addr = reinterpret_cast<sockaddr*>(&addr_storage);
- SOCKET s = ::accept(s_, addr, &addr_len);
- UpdateLastError();
- if (s == INVALID_SOCKET)
- return NULL;
+int PhysicalSocket::Listen(int backlog) {
+ int err = ::listen(s_, backlog);
+ UpdateLastError();
+ if (err == 0) {
+ state_ = CS_CONNECTING;
enabled_events_ |= DE_ACCEPT;
- if (out_addr != NULL)
- SocketAddressFromSockAddrStorage(addr_storage, out_addr);
- return ss_->WrapSocket(s);
+#if !defined(NDEBUG)
+ dbg_addr_ = "Listening @ ";
+ dbg_addr_.append(GetLocalAddress().ToString());
+#endif
}
+ return err;
+}
- int Close() override {
- if (s_ == INVALID_SOCKET)
- return 0;
- int err = ::closesocket(s_);
- UpdateLastError();
- s_ = INVALID_SOCKET;
- state_ = CS_CLOSED;
- enabled_events_ = 0;
- if (resolver_) {
- resolver_->Destroy(false);
- resolver_ = NULL;
- }
- return err;
- }
+AsyncSocket* PhysicalSocket::Accept(SocketAddress* out_addr) {
+ // Always re-subscribe DE_ACCEPT to make sure new incoming connections will
+ // trigger an event even if DoAccept returns an error here.
+ enabled_events_ |= DE_ACCEPT;
+ sockaddr_storage addr_storage;
+ socklen_t addr_len = sizeof(addr_storage);
+ sockaddr* addr = reinterpret_cast<sockaddr*>(&addr_storage);
+ SOCKET s = DoAccept(s_, addr, &addr_len);
+ UpdateLastError();
+ if (s == INVALID_SOCKET)
+ return nullptr;
+ if (out_addr != nullptr)
+ SocketAddressFromSockAddrStorage(addr_storage, out_addr);
+ return ss_->WrapSocket(s);
+}
- int EstimateMTU(uint16_t* mtu) override {
- SocketAddress addr = GetRemoteAddress();
- if (addr.IsAnyIP()) {
- SetError(ENOTCONN);
- return -1;
- }
+int PhysicalSocket::Close() {
+ if (s_ == INVALID_SOCKET)
+ return 0;
+ int err = ::closesocket(s_);
+ UpdateLastError();
+ s_ = INVALID_SOCKET;
+ state_ = CS_CLOSED;
+ enabled_events_ = 0;
+ if (resolver_) {
+ resolver_->Destroy(false);
+ resolver_ = nullptr;
+ }
+ return err;
+}
+
+int PhysicalSocket::EstimateMTU(uint16_t* mtu) {
+ SocketAddress addr = GetRemoteAddress();
+ if (addr.IsAnyIP()) {
+ SetError(ENOTCONN);
+ return -1;
+ }
#if defined(WEBRTC_WIN)
- // Gets the interface MTU (TTL=1) for the interface used to reach |addr|.
- WinPing ping;
- if (!ping.IsValid()) {
+ // Gets the interface MTU (TTL=1) for the interface used to reach |addr|.
+ WinPing ping;
+ if (!ping.IsValid()) {
+ SetError(EINVAL); // can't think of a better error ID
+ return -1;
+ }
+ int header_size = ICMP_HEADER_SIZE;
+ if (addr.family() == AF_INET6) {
+ header_size += IPV6_HEADER_SIZE;
+ } else if (addr.family() == AF_INET) {
+ header_size += IP_HEADER_SIZE;
+ }
+
+ for (int level = 0; PACKET_MAXIMUMS[level + 1] > 0; ++level) {
+ int32_t size = PACKET_MAXIMUMS[level] - header_size;
+ WinPing::PingResult result = ping.Ping(addr.ipaddr(), size,
+ ICMP_PING_TIMEOUT_MILLIS,
+ 1, false);
+ if (result == WinPing::PING_FAIL) {
SetError(EINVAL); // can't think of a better error ID
return -1;
+ } else if (result != WinPing::PING_TOO_LARGE) {
+ *mtu = PACKET_MAXIMUMS[level];
+ return 0;
}
- int header_size = ICMP_HEADER_SIZE;
- if (addr.family() == AF_INET6) {
- header_size += IPV6_HEADER_SIZE;
- } else if (addr.family() == AF_INET) {
- header_size += IP_HEADER_SIZE;
- }
-
- for (int level = 0; PACKET_MAXIMUMS[level + 1] > 0; ++level) {
- int32_t size = PACKET_MAXIMUMS[level] - header_size;
- WinPing::PingResult result = ping.Ping(addr.ipaddr(), size,
- ICMP_PING_TIMEOUT_MILLIS,
- 1, false);
- if (result == WinPing::PING_FAIL) {
- SetError(EINVAL); // can't think of a better error ID
- return -1;
- } else if (result != WinPing::PING_TOO_LARGE) {
- *mtu = PACKET_MAXIMUMS[level];
- return 0;
- }
- }
+ }
- ASSERT(false);
- return -1;
+ ASSERT(false);
+ return -1;
#elif defined(WEBRTC_MAC)
- // No simple way to do this on Mac OS X.
- // SIOCGIFMTU would work if we knew which interface would be used, but
- // figuring that out is pretty complicated. For now we'll return an error
- // and let the caller pick a default MTU.
- SetError(EINVAL);
- return -1;
+ // No simple way to do this on Mac OS X.
+ // SIOCGIFMTU would work if we knew which interface would be used, but
+ // figuring that out is pretty complicated. For now we'll return an error
+ // and let the caller pick a default MTU.
+ SetError(EINVAL);
+ return -1;
#elif defined(WEBRTC_LINUX)
- // Gets the path MTU.
- int value;
- socklen_t vlen = sizeof(value);
- int err = getsockopt(s_, IPPROTO_IP, IP_MTU, &value, &vlen);
- if (err < 0) {
- UpdateLastError();
- return err;
- }
+ // Gets the path MTU.
+ int value;
+ socklen_t vlen = sizeof(value);
+ int err = getsockopt(s_, IPPROTO_IP, IP_MTU, &value, &vlen);
+ if (err < 0) {
+ UpdateLastError();
+ return err;
+ }
- ASSERT((0 <= value) && (value <= 65536));
- *mtu = value;
- return 0;
+ ASSERT((0 <= value) && (value <= 65536));
+ *mtu = value;
+ return 0;
#elif defined(__native_client__)
- // Most socket operations, including this, will fail in NaCl's sandbox.
- error_ = EACCES;
- return -1;
+ // Most socket operations, including this, will fail in NaCl's sandbox.
+ error_ = EACCES;
+ return -1;
#endif
- }
+}
- SocketServer* socketserver() { return ss_; }
- protected:
- void OnResolveResult(AsyncResolverInterface* resolver) {
- if (resolver != resolver_) {
- return;
- }
+SOCKET PhysicalSocket::DoAccept(SOCKET socket,
+ sockaddr* addr,
+ socklen_t* addrlen) {
+ return ::accept(socket, addr, addrlen);
+}
- int error = resolver_->GetError();
- if (error == 0) {
- error = DoConnect(resolver_->address());
- } else {
- Close();
- }
+void PhysicalSocket::OnResolveResult(AsyncResolverInterface* resolver) {
+ if (resolver != resolver_) {
+ return;
+ }
- if (error) {
- SetError(error);
- SignalCloseEvent(this, error);
- }
+ int error = resolver_->GetError();
+ if (error == 0) {
+ error = DoConnect(resolver_->address());
+ } else {
+ Close();
}
- void UpdateLastError() {
- SetError(LAST_SYSTEM_ERROR);
+ if (error) {
+ SetError(error);
+ SignalCloseEvent(this, error);
}
+}
- void MaybeRemapSendError() {
+void PhysicalSocket::UpdateLastError() {
+ SetError(LAST_SYSTEM_ERROR);
+}
+
+void PhysicalSocket::MaybeRemapSendError() {
#if defined(WEBRTC_MAC)
- // https://developer.apple.com/library/mac/documentation/Darwin/
- // Reference/ManPages/man2/sendto.2.html
- // ENOBUFS - The output queue for a network interface is full.
- // This generally indicates that the interface has stopped sending,
- // but may be caused by transient congestion.
- if (GetError() == ENOBUFS) {
- SetError(EWOULDBLOCK);
- }
-#endif
+ // https://developer.apple.com/library/mac/documentation/Darwin/
+ // Reference/ManPages/man2/sendto.2.html
+ // ENOBUFS - The output queue for a network interface is full.
+ // This generally indicates that the interface has stopped sending,
+ // but may be caused by transient congestion.
+ if (GetError() == ENOBUFS) {
+ SetError(EWOULDBLOCK);
}
+#endif
+}
- static int TranslateOption(Option opt, int* slevel, int* sopt) {
- switch (opt) {
- case OPT_DONTFRAGMENT:
+int PhysicalSocket::TranslateOption(Option opt, int* slevel, int* sopt) {
+ switch (opt) {
+ case OPT_DONTFRAGMENT:
#if defined(WEBRTC_WIN)
- *slevel = IPPROTO_IP;
- *sopt = IP_DONTFRAGMENT;
- break;
+ *slevel = IPPROTO_IP;
+ *sopt = IP_DONTFRAGMENT;
+ break;
#elif defined(WEBRTC_MAC) || defined(BSD) || defined(__native_client__)
- LOG(LS_WARNING) << "Socket::OPT_DONTFRAGMENT not supported.";
- return -1;
+ LOG(LS_WARNING) << "Socket::OPT_DONTFRAGMENT not supported.";
+ return -1;
+#elif defined(WEBRTC_POSIX)
+ *slevel = IPPROTO_IP;
+ *sopt = IP_MTU_DISCOVER;
+ break;
+#endif
+ case OPT_RCVBUF:
+ *slevel = SOL_SOCKET;
+ *sopt = SO_RCVBUF;
+ break;
+ case OPT_SNDBUF:
+ *slevel = SOL_SOCKET;
+ *sopt = SO_SNDBUF;
+ break;
+ case OPT_NODELAY:
+ *slevel = IPPROTO_TCP;
+ *sopt = TCP_NODELAY;
+ break;
+ case OPT_DSCP:
+ LOG(LS_WARNING) << "Socket::OPT_DSCP not supported.";
+ return -1;
+ case OPT_RTP_SENDTIME_EXTN_ID:
+ return -1; // No logging is necessary as this not a OS socket option.
+ default:
+ ASSERT(false);
+ return -1;
+ }
+ return 0;
+}
+
+SocketDispatcher::SocketDispatcher(PhysicalSocketServer *ss)
+#if defined(WEBRTC_WIN)
+ : PhysicalSocket(ss), id_(0), signal_close_(false)
+#else
+ : PhysicalSocket(ss)
+#endif
+{
+}
+
+SocketDispatcher::SocketDispatcher(SOCKET s, PhysicalSocketServer *ss)
+#if defined(WEBRTC_WIN)
+ : PhysicalSocket(ss, s), id_(0), signal_close_(false)
+#else
+ : PhysicalSocket(ss, s)
+#endif
+{
+}
+
+SocketDispatcher::~SocketDispatcher() {
+ Close();
+}
+
+bool SocketDispatcher::Initialize() {
+ ASSERT(s_ != INVALID_SOCKET);
+ // Must be a non-blocking
+#if defined(WEBRTC_WIN)
+ u_long argp = 1;
+ ioctlsocket(s_, FIONBIO, &argp);
#elif defined(WEBRTC_POSIX)
- *slevel = IPPROTO_IP;
- *sopt = IP_MTU_DISCOVER;
- break;
+ fcntl(s_, F_SETFL, fcntl(s_, F_GETFL, 0) | O_NONBLOCK);
+#endif
+ ss_->Add(this);
+ return true;
+}
+
+bool SocketDispatcher::Create(int type) {
+ return Create(AF_INET, type);
+}
+
+bool SocketDispatcher::Create(int family, int type) {
+ // Change the socket to be non-blocking.
+ if (!PhysicalSocket::Create(family, type))
+ return false;
+
+ if (!Initialize())
+ return false;
+
+#if defined(WEBRTC_WIN)
+ do { id_ = ++next_id_; } while (id_ == 0);
#endif
- case OPT_RCVBUF:
- *slevel = SOL_SOCKET;
- *sopt = SO_RCVBUF;
- break;
- case OPT_SNDBUF:
- *slevel = SOL_SOCKET;
- *sopt = SO_SNDBUF;
- break;
- case OPT_NODELAY:
- *slevel = IPPROTO_TCP;
- *sopt = TCP_NODELAY;
- break;
- case OPT_DSCP:
- LOG(LS_WARNING) << "Socket::OPT_DSCP not supported.";
- return -1;
- case OPT_RTP_SENDTIME_EXTN_ID:
- return -1; // No logging is necessary as this not a OS socket option.
+ return true;
+}
+
+#if defined(WEBRTC_WIN)
+
+WSAEVENT SocketDispatcher::GetWSAEvent() {
+ return WSA_INVALID_EVENT;
+}
+
+SOCKET SocketDispatcher::GetSocket() {
+ return s_;
+}
+
+bool SocketDispatcher::CheckSignalClose() {
+ if (!signal_close_)
+ return false;
+
+ char ch;
+ if (recv(s_, &ch, 1, MSG_PEEK) > 0)
+ return false;
+
+ state_ = CS_CLOSED;
+ signal_close_ = false;
+ SignalCloseEvent(this, signal_err_);
+ return true;
+}
+
+int SocketDispatcher::next_id_ = 0;
+
+#elif defined(WEBRTC_POSIX)
+
+int SocketDispatcher::GetDescriptor() {
+ return s_;
+}
+
+bool SocketDispatcher::IsDescriptorClosed() {
+ // We don't have a reliable way of distinguishing end-of-stream
+ // from readability. So test on each readable call. Is this
+ // inefficient? Probably.
+ char ch;
+ ssize_t res = ::recv(s_, &ch, 1, MSG_PEEK);
+ if (res > 0) {
+ // Data available, so not closed.
+ return false;
+ } else if (res == 0) {
+ // EOF, so closed.
+ return true;
+ } else { // error
+ switch (errno) {
+ // Returned if we've already closed s_.
+ case EBADF:
+ // Returned during ungraceful peer shutdown.
+ case ECONNRESET:
+ return true;
default:
- ASSERT(false);
- return -1;
+ // Assume that all other errors are just blocking errors, meaning the
+ // connection is still good but we just can't read from it right now.
+ // This should only happen when connecting (and at most once), because
+ // in all other cases this function is only called if the file
+ // descriptor is already known to be in the readable state. However,
+ // it's not necessary a problem if we spuriously interpret a
+ // "connection lost"-type error as a blocking error, because typically
+ // the next recv() will get EOF, so we'll still eventually notice that
+ // the socket is closed.
+ LOG_ERR(LS_WARNING) << "Assuming benign blocking error";
+ return false;
}
- return 0;
}
+}
- PhysicalSocketServer* ss_;
- SOCKET s_;
- uint8_t enabled_events_;
- bool udp_;
- int error_;
- // Protects |error_| that is accessed from different threads.
- mutable CriticalSection crit_;
- ConnState state_;
- AsyncResolver* resolver_;
-
-#ifdef _DEBUG
- std::string dbg_addr_;
-#endif // _DEBUG;
-};
+#endif // WEBRTC_POSIX
+
+uint32_t SocketDispatcher::GetRequestedEvents() {
+ return enabled_events_;
+}
+
+void SocketDispatcher::OnPreEvent(uint32_t ff) {
+ if ((ff & DE_CONNECT) != 0)
+ state_ = CS_CONNECTED;
+
+#if defined(WEBRTC_WIN)
+ // We set CS_CLOSED from CheckSignalClose.
+#elif defined(WEBRTC_POSIX)
+ if ((ff & DE_CLOSE) != 0)
+ state_ = CS_CLOSED;
+#endif
+}
+
+#if defined(WEBRTC_WIN)
+
+void SocketDispatcher::OnEvent(uint32_t ff, int err) {
+ int cache_id = id_;
+ // Make sure we deliver connect/accept first. Otherwise, consumers may see
+ // something like a READ followed by a CONNECT, which would be odd.
+ if (((ff & DE_CONNECT) != 0) && (id_ == cache_id)) {
+ if (ff != DE_CONNECT)
+ LOG(LS_VERBOSE) << "Signalled with DE_CONNECT: " << ff;
+ enabled_events_ &= ~DE_CONNECT;
+#if !defined(NDEBUG)
+ dbg_addr_ = "Connected @ ";
+ dbg_addr_.append(GetRemoteAddress().ToString());
+#endif
+ SignalConnectEvent(this);
+ }
+ if (((ff & DE_ACCEPT) != 0) && (id_ == cache_id)) {
+ enabled_events_ &= ~DE_ACCEPT;
+ SignalReadEvent(this);
+ }
+ if ((ff & DE_READ) != 0) {
+ enabled_events_ &= ~DE_READ;
+ SignalReadEvent(this);
+ }
+ if (((ff & DE_WRITE) != 0) && (id_ == cache_id)) {
+ enabled_events_ &= ~DE_WRITE;
+ SignalWriteEvent(this);
+ }
+ if (((ff & DE_CLOSE) != 0) && (id_ == cache_id)) {
+ signal_close_ = true;
+ signal_err_ = err;
+ }
+}
+
+#elif defined(WEBRTC_POSIX)
+
+void SocketDispatcher::OnEvent(uint32_t ff, int err) {
+ // Make sure we deliver connect/accept first. Otherwise, consumers may see
+ // something like a READ followed by a CONNECT, which would be odd.
+ if ((ff & DE_CONNECT) != 0) {
+ enabled_events_ &= ~DE_CONNECT;
+ SignalConnectEvent(this);
+ }
+ if ((ff & DE_ACCEPT) != 0) {
+ enabled_events_ &= ~DE_ACCEPT;
+ SignalReadEvent(this);
+ }
+ if ((ff & DE_READ) != 0) {
+ enabled_events_ &= ~DE_READ;
+ SignalReadEvent(this);
+ }
+ if ((ff & DE_WRITE) != 0) {
+ enabled_events_ &= ~DE_WRITE;
+ SignalWriteEvent(this);
+ }
+ if ((ff & DE_CLOSE) != 0) {
+ // The socket is now dead to us, so stop checking it.
+ enabled_events_ = 0;
+ SignalCloseEvent(this, err);
+ }
+}
+
+#endif // WEBRTC_POSIX
+
+int SocketDispatcher::Close() {
+ if (s_ == INVALID_SOCKET)
+ return 0;
+
+#if defined(WEBRTC_WIN)
+ id_ = 0;
+ signal_close_ = false;
+#endif
+ ss_->Remove(this);
+ return PhysicalSocket::Close();
+}
#if defined(WEBRTC_POSIX)
class EventDispatcher : public Dispatcher {
@@ -628,8 +834,8 @@ class PosixSignalHandler {
// Returns true if the given signal number is set.
bool IsSignalSet(int signum) const {
- ASSERT(signum < ARRAY_SIZE(received_signal_));
- if (signum < ARRAY_SIZE(received_signal_)) {
+ ASSERT(signum < static_cast<int>(arraysize(received_signal_)));
+ if (signum < static_cast<int>(arraysize(received_signal_))) {
return received_signal_[signum];
} else {
return false;
@@ -638,8 +844,8 @@ class PosixSignalHandler {
// Clears the given signal number.
void ClearSignal(int signum) {
- ASSERT(signum < ARRAY_SIZE(received_signal_));
- if (signum < ARRAY_SIZE(received_signal_)) {
+ ASSERT(signum < static_cast<int>(arraysize(received_signal_)));
+ if (signum < static_cast<int>(arraysize(received_signal_))) {
received_signal_[signum] = false;
}
}
@@ -654,7 +860,7 @@ class PosixSignalHandler {
// user-level state of the process, since the handler could be executed at any
// time on any thread.
void OnPosixSignalReceived(int signum) {
- if (signum >= ARRAY_SIZE(received_signal_)) {
+ if (signum >= static_cast<int>(arraysize(received_signal_))) {
// We don't have space in our array for this.
return;
}
@@ -790,116 +996,6 @@ class PosixSignalDispatcher : public Dispatcher {
PhysicalSocketServer *owner_;
};
-class SocketDispatcher : public Dispatcher, public PhysicalSocket {
- public:
- explicit SocketDispatcher(PhysicalSocketServer *ss) : PhysicalSocket(ss) {
- }
- SocketDispatcher(SOCKET s, PhysicalSocketServer *ss) : PhysicalSocket(ss, s) {
- }
-
- ~SocketDispatcher() override {
- Close();
- }
-
- bool Initialize() {
- ss_->Add(this);
- fcntl(s_, F_SETFL, fcntl(s_, F_GETFL, 0) | O_NONBLOCK);
- return true;
- }
-
- virtual bool Create(int type) {
- return Create(AF_INET, type);
- }
-
- bool Create(int family, int type) override {
- // Change the socket to be non-blocking.
- if (!PhysicalSocket::Create(family, type))
- return false;
-
- return Initialize();
- }
-
- int GetDescriptor() override { return s_; }
-
- bool IsDescriptorClosed() override {
- // We don't have a reliable way of distinguishing end-of-stream
- // from readability. So test on each readable call. Is this
- // inefficient? Probably.
- char ch;
- ssize_t res = ::recv(s_, &ch, 1, MSG_PEEK);
- if (res > 0) {
- // Data available, so not closed.
- return false;
- } else if (res == 0) {
- // EOF, so closed.
- return true;
- } else { // error
- switch (errno) {
- // Returned if we've already closed s_.
- case EBADF:
- // Returned during ungraceful peer shutdown.
- case ECONNRESET:
- return true;
- default:
- // Assume that all other errors are just blocking errors, meaning the
- // connection is still good but we just can't read from it right now.
- // This should only happen when connecting (and at most once), because
- // in all other cases this function is only called if the file
- // descriptor is already known to be in the readable state. However,
- // it's not necessary a problem if we spuriously interpret a
- // "connection lost"-type error as a blocking error, because typically
- // the next recv() will get EOF, so we'll still eventually notice that
- // the socket is closed.
- LOG_ERR(LS_WARNING) << "Assuming benign blocking error";
- return false;
- }
- }
- }
-
- uint32_t GetRequestedEvents() override { return enabled_events_; }
-
- void OnPreEvent(uint32_t ff) override {
- if ((ff & DE_CONNECT) != 0)
- state_ = CS_CONNECTED;
- if ((ff & DE_CLOSE) != 0)
- state_ = CS_CLOSED;
- }
-
- void OnEvent(uint32_t ff, int err) override {
- // Make sure we deliver connect/accept first. Otherwise, consumers may see
- // something like a READ followed by a CONNECT, which would be odd.
- if ((ff & DE_CONNECT) != 0) {
- enabled_events_ &= ~DE_CONNECT;
- SignalConnectEvent(this);
- }
- if ((ff & DE_ACCEPT) != 0) {
- enabled_events_ &= ~DE_ACCEPT;
- SignalReadEvent(this);
- }
- if ((ff & DE_READ) != 0) {
- enabled_events_ &= ~DE_READ;
- SignalReadEvent(this);
- }
- if ((ff & DE_WRITE) != 0) {
- enabled_events_ &= ~DE_WRITE;
- SignalWriteEvent(this);
- }
- if ((ff & DE_CLOSE) != 0) {
- // The socket is now dead to us, so stop checking it.
- enabled_events_ = 0;
- SignalCloseEvent(this, err);
- }
- }
-
- int Close() override {
- if (s_ == INVALID_SOCKET)
- return 0;
-
- ss_->Remove(this);
- return PhysicalSocket::Close();
- }
-};
-
class FileDispatcher: public Dispatcher, public AsyncFile {
public:
FileDispatcher(int fd, PhysicalSocketServer *ss) : ss_(ss), fd_(fd) {
@@ -1013,130 +1109,6 @@ private:
PhysicalSocketServer* ss_;
WSAEVENT hev_;
};
-
-class SocketDispatcher : public Dispatcher, public PhysicalSocket {
- public:
- static int next_id_;
- int id_;
- bool signal_close_;
- int signal_err_;
-
- SocketDispatcher(PhysicalSocketServer* ss)
- : PhysicalSocket(ss),
- id_(0),
- signal_close_(false) {
- }
-
- SocketDispatcher(SOCKET s, PhysicalSocketServer* ss)
- : PhysicalSocket(ss, s),
- id_(0),
- signal_close_(false) {
- }
-
- virtual ~SocketDispatcher() {
- Close();
- }
-
- bool Initialize() {
- ASSERT(s_ != INVALID_SOCKET);
- // Must be a non-blocking
- u_long argp = 1;
- ioctlsocket(s_, FIONBIO, &argp);
- ss_->Add(this);
- return true;
- }
-
- virtual bool Create(int type) {
- return Create(AF_INET, type);
- }
-
- virtual bool Create(int family, int type) {
- // Create socket
- if (!PhysicalSocket::Create(family, type))
- return false;
-
- if (!Initialize())
- return false;
-
- do { id_ = ++next_id_; } while (id_ == 0);
- return true;
- }
-
- virtual int Close() {
- if (s_ == INVALID_SOCKET)
- return 0;
-
- id_ = 0;
- signal_close_ = false;
- ss_->Remove(this);
- return PhysicalSocket::Close();
- }
-
- virtual uint32_t GetRequestedEvents() { return enabled_events_; }
-
- virtual void OnPreEvent(uint32_t ff) {
- if ((ff & DE_CONNECT) != 0)
- state_ = CS_CONNECTED;
- // We set CS_CLOSED from CheckSignalClose.
- }
-
- virtual void OnEvent(uint32_t ff, int err) {
- int cache_id = id_;
- // Make sure we deliver connect/accept first. Otherwise, consumers may see
- // something like a READ followed by a CONNECT, which would be odd.
- if (((ff & DE_CONNECT) != 0) && (id_ == cache_id)) {
- if (ff != DE_CONNECT)
- LOG(LS_VERBOSE) << "Signalled with DE_CONNECT: " << ff;
- enabled_events_ &= ~DE_CONNECT;
-#ifdef _DEBUG
- dbg_addr_ = "Connected @ ";
- dbg_addr_.append(GetRemoteAddress().ToString());
-#endif // _DEBUG
- SignalConnectEvent(this);
- }
- if (((ff & DE_ACCEPT) != 0) && (id_ == cache_id)) {
- enabled_events_ &= ~DE_ACCEPT;
- SignalReadEvent(this);
- }
- if ((ff & DE_READ) != 0) {
- enabled_events_ &= ~DE_READ;
- SignalReadEvent(this);
- }
- if (((ff & DE_WRITE) != 0) && (id_ == cache_id)) {
- enabled_events_ &= ~DE_WRITE;
- SignalWriteEvent(this);
- }
- if (((ff & DE_CLOSE) != 0) && (id_ == cache_id)) {
- signal_close_ = true;
- signal_err_ = err;
- }
- }
-
- virtual WSAEVENT GetWSAEvent() {
- return WSA_INVALID_EVENT;
- }
-
- virtual SOCKET GetSocket() {
- return s_;
- }
-
- virtual bool CheckSignalClose() {
- if (!signal_close_)
- return false;
-
- char ch;
- if (recv(s_, &ch, 1, MSG_PEEK) > 0)
- return false;
-
- state_ = CS_CLOSED;
- signal_close_ = false;
- SignalCloseEvent(this, signal_err_);
- return true;
- }
-};
-
-int SocketDispatcher::next_id_ = 0;
-
#endif // WEBRTC_WIN
// Sets the value of a boolean value to false when signaled.
@@ -1189,7 +1161,7 @@ Socket* PhysicalSocketServer::CreateSocket(int family, int type) {
return socket;
} else {
delete socket;
- return 0;
+ return nullptr;
}
}
@@ -1203,7 +1175,7 @@ AsyncSocket* PhysicalSocketServer::CreateAsyncSocket(int family, int type) {
return dispatcher;
} else {
delete dispatcher;
- return 0;
+ return nullptr;
}
}
@@ -1213,7 +1185,7 @@ AsyncSocket* PhysicalSocketServer::WrapSocket(SOCKET s) {
return dispatcher;
} else {
delete dispatcher;
- return 0;
+ return nullptr;
}
}
@@ -1342,7 +1314,7 @@ bool PhysicalSocketServer::Wait(int cmsWait, bool process_io) {
int errcode = 0;
// Reap any error code, which can be signaled through reads or writes.
- // TODO: Should we set errcode if getsockopt fails?
+ // TODO(pthatcher): Should we set errcode if getsockopt fails?
if (FD_ISSET(fd, &fdsRead) || FD_ISSET(fd, &fdsWrite)) {
socklen_t len = sizeof(errcode);
::getsockopt(fd, SOL_SOCKET, SO_ERROR, &errcode, &len);
@@ -1351,7 +1323,7 @@ bool PhysicalSocketServer::Wait(int cmsWait, bool process_io) {
// Check readable descriptors. If we're waiting on an accept, signal
// that. Otherwise we're waiting for data, check to see if we're
// readable or really closed.
- // TODO: Only peek at TCP descriptors.
+ // TODO(pthatcher): Only peek at TCP descriptors.
if (FD_ISSET(fd, &fdsRead)) {
FD_CLR(fd, &fdsRead);
if (pdispatcher->GetRequestedEvents() & DE_ACCEPT) {
@@ -1525,7 +1497,7 @@ bool PhysicalSocketServer::Wait(int cmsWait, bool process_io) {
if (dw == WSA_WAIT_FAILED) {
// Failed?
- // TODO: need a better strategy than this!
+ // TODO(pthatcher): need a better strategy than this!
WSAGetLastError();
ASSERT(false);
return false;
diff --git a/webrtc/base/physicalsocketserver.h b/webrtc/base/physicalsocketserver.h
index af09e0b988..ae1f10f596 100644
--- a/webrtc/base/physicalsocketserver.h
+++ b/webrtc/base/physicalsocketserver.h
@@ -14,6 +14,7 @@
#include <vector>
#include "webrtc/base/asyncfile.h"
+#include "webrtc/base/nethelpers.h"
#include "webrtc/base/scoped_ptr.h"
#include "webrtc/base/socketserver.h"
#include "webrtc/base/criticalsection.h"
@@ -115,6 +116,107 @@ class PhysicalSocketServer : public SocketServer {
#endif
};
+class PhysicalSocket : public AsyncSocket, public sigslot::has_slots<> {
+ public:
+ PhysicalSocket(PhysicalSocketServer* ss, SOCKET s = INVALID_SOCKET);
+ ~PhysicalSocket() override;
+
+ // Creates the underlying OS socket (same as the "socket" function).
+ virtual bool Create(int family, int type);
+
+ SocketAddress GetLocalAddress() const override;
+ SocketAddress GetRemoteAddress() const override;
+
+ int Bind(const SocketAddress& bind_addr) override;
+ int Connect(const SocketAddress& addr) override;
+
+ int GetError() const override;
+ void SetError(int error) override;
+
+ ConnState GetState() const override;
+
+ int GetOption(Option opt, int* value) override;
+ int SetOption(Option opt, int value) override;
+
+ int Send(const void* pv, size_t cb) override;
+ int SendTo(const void* buffer,
+ size_t length,
+ const SocketAddress& addr) override;
+
+ int Recv(void* buffer, size_t length) override;
+ int RecvFrom(void* buffer, size_t length, SocketAddress* out_addr) override;
+
+ int Listen(int backlog) override;
+ AsyncSocket* Accept(SocketAddress* out_addr) override;
+
+ int Close() override;
+
+ int EstimateMTU(uint16_t* mtu) override;
+
+ SocketServer* socketserver() { return ss_; }
+
+ protected:
+ int DoConnect(const SocketAddress& connect_addr);
+
+ // Make virtual so ::accept can be overwritten in tests.
+ virtual SOCKET DoAccept(SOCKET socket, sockaddr* addr, socklen_t* addrlen);
+
+ void OnResolveResult(AsyncResolverInterface* resolver);
+
+ void UpdateLastError();
+ void MaybeRemapSendError();
+
+ static int TranslateOption(Option opt, int* slevel, int* sopt);
+
+ PhysicalSocketServer* ss_;
+ SOCKET s_;
+ uint8_t enabled_events_;
+ bool udp_;
+ mutable CriticalSection crit_;
+ int error_ GUARDED_BY(crit_);
+ ConnState state_;
+ AsyncResolver* resolver_;
+
+#if !defined(NDEBUG)
+ std::string dbg_addr_;
+#endif
+};
+
+class SocketDispatcher : public Dispatcher, public PhysicalSocket {
+ public:
+ explicit SocketDispatcher(PhysicalSocketServer *ss);
+ SocketDispatcher(SOCKET s, PhysicalSocketServer *ss);
+ ~SocketDispatcher() override;
+
+ bool Initialize();
+
+ virtual bool Create(int type);
+ bool Create(int family, int type) override;
+
+#if defined(WEBRTC_WIN)
+ WSAEVENT GetWSAEvent() override;
+ SOCKET GetSocket() override;
+ bool CheckSignalClose() override;
+#elif defined(WEBRTC_POSIX)
+ int GetDescriptor() override;
+ bool IsDescriptorClosed() override;
+#endif
+
+ uint32_t GetRequestedEvents() override;
+ void OnPreEvent(uint32_t ff) override;
+ void OnEvent(uint32_t ff, int err) override;
+
+ int Close() override;
+
+#if defined(WEBRTC_WIN)
+ private:
+ static int next_id_;
+ int id_;
+ bool signal_close_;
+ int signal_err_;
+#endif // WEBRTC_WIN
+};
+
} // namespace rtc
#endif // WEBRTC_BASE_PHYSICALSOCKETSERVER_H__
diff --git a/webrtc/base/physicalsocketserver_unittest.cc b/webrtc/base/physicalsocketserver_unittest.cc
index 6c7be9f63c..a2fde80b42 100644
--- a/webrtc/base/physicalsocketserver_unittest.cc
+++ b/webrtc/base/physicalsocketserver_unittest.cc
@@ -18,13 +18,85 @@
#include "webrtc/base/socket_unittest.h"
#include "webrtc/base/testutils.h"
#include "webrtc/base/thread.h"
-#include "webrtc/test/testsupport/gtest_disable.h"
namespace rtc {
+class PhysicalSocketTest;
+
+class FakeSocketDispatcher : public SocketDispatcher {
+ public:
+ explicit FakeSocketDispatcher(PhysicalSocketServer* ss)
+ : SocketDispatcher(ss) {
+ }
+
+ protected:
+ SOCKET DoAccept(SOCKET socket, sockaddr* addr, socklen_t* addrlen) override;
+};
+
+class FakePhysicalSocketServer : public PhysicalSocketServer {
+ public:
+ explicit FakePhysicalSocketServer(PhysicalSocketTest* test)
+ : test_(test) {
+ }
+
+ AsyncSocket* CreateAsyncSocket(int type) override {
+ SocketDispatcher* dispatcher = new FakeSocketDispatcher(this);
+ if (dispatcher->Create(type)) {
+ return dispatcher;
+ } else {
+ delete dispatcher;
+ return nullptr;
+ }
+ }
+
+ AsyncSocket* CreateAsyncSocket(int family, int type) override {
+ SocketDispatcher* dispatcher = new FakeSocketDispatcher(this);
+ if (dispatcher->Create(family, type)) {
+ return dispatcher;
+ } else {
+ delete dispatcher;
+ return nullptr;
+ }
+ }
+
+ PhysicalSocketTest* GetTest() const { return test_; }
+
+ private:
+ PhysicalSocketTest* test_;
+};
+
class PhysicalSocketTest : public SocketTest {
+ public:
+ // Set flag to simluate failures when calling "::accept" on a AsyncSocket.
+ void SetFailAccept(bool fail) { fail_accept_ = fail; }
+ bool FailAccept() const { return fail_accept_; }
+
+ protected:
+ PhysicalSocketTest()
+ : server_(new FakePhysicalSocketServer(this)),
+ scope_(server_.get()),
+ fail_accept_(false) {
+ }
+
+ void ConnectInternalAcceptError(const IPAddress& loopback);
+
+ rtc::scoped_ptr<FakePhysicalSocketServer> server_;
+ SocketServerScope scope_;
+ bool fail_accept_;
};
+SOCKET FakeSocketDispatcher::DoAccept(SOCKET socket,
+ sockaddr* addr,
+ socklen_t* addrlen) {
+ FakePhysicalSocketServer* ss =
+ static_cast<FakePhysicalSocketServer*>(socketserver());
+ if (ss->GetTest()->FailAccept()) {
+ return INVALID_SOCKET;
+ }
+
+ return SocketDispatcher::DoAccept(socket, addr, addrlen);
+}
+
TEST_F(PhysicalSocketTest, TestConnectIPv4) {
SocketTest::TestConnectIPv4();
}
@@ -51,6 +123,92 @@ TEST_F(PhysicalSocketTest, TestConnectFailIPv4) {
SocketTest::TestConnectFailIPv4();
}
+void PhysicalSocketTest::ConnectInternalAcceptError(const IPAddress& loopback) {
+ testing::StreamSink sink;
+ SocketAddress accept_addr;
+
+ // Create two clients.
+ scoped_ptr<AsyncSocket> client1(server_->CreateAsyncSocket(loopback.family(),
+ SOCK_STREAM));
+ sink.Monitor(client1.get());
+ EXPECT_EQ(AsyncSocket::CS_CLOSED, client1->GetState());
+ EXPECT_PRED1(IsUnspecOrEmptyIP, client1->GetLocalAddress().ipaddr());
+
+ scoped_ptr<AsyncSocket> client2(server_->CreateAsyncSocket(loopback.family(),
+ SOCK_STREAM));
+ sink.Monitor(client2.get());
+ EXPECT_EQ(AsyncSocket::CS_CLOSED, client2->GetState());
+ EXPECT_PRED1(IsUnspecOrEmptyIP, client2->GetLocalAddress().ipaddr());
+
+ // Create server and listen.
+ scoped_ptr<AsyncSocket> server(
+ server_->CreateAsyncSocket(loopback.family(), SOCK_STREAM));
+ sink.Monitor(server.get());
+ EXPECT_EQ(0, server->Bind(SocketAddress(loopback, 0)));
+ EXPECT_EQ(0, server->Listen(5));
+ EXPECT_EQ(AsyncSocket::CS_CONNECTING, server->GetState());
+
+ // Ensure no pending server connections, since we haven't done anything yet.
+ EXPECT_FALSE(sink.Check(server.get(), testing::SSE_READ));
+ EXPECT_TRUE(nullptr == server->Accept(&accept_addr));
+ EXPECT_TRUE(accept_addr.IsNil());
+
+ // Attempt first connect to listening socket.
+ EXPECT_EQ(0, client1->Connect(server->GetLocalAddress()));
+ EXPECT_FALSE(client1->GetLocalAddress().IsNil());
+ EXPECT_NE(server->GetLocalAddress(), client1->GetLocalAddress());
+
+ // Client is connecting, outcome not yet determined.
+ EXPECT_EQ(AsyncSocket::CS_CONNECTING, client1->GetState());
+ EXPECT_FALSE(sink.Check(client1.get(), testing::SSE_OPEN));
+ EXPECT_FALSE(sink.Check(client1.get(), testing::SSE_CLOSE));
+
+ // Server has pending connection, try to accept it (will fail).
+ EXPECT_TRUE_WAIT((sink.Check(server.get(), testing::SSE_READ)), kTimeout);
+ // Simulate "::accept" returning an error.
+ SetFailAccept(true);
+ scoped_ptr<AsyncSocket> accepted(server->Accept(&accept_addr));
+ EXPECT_FALSE(accepted);
+ ASSERT_TRUE(accept_addr.IsNil());
+
+ // Ensure no more pending server connections.
+ EXPECT_FALSE(sink.Check(server.get(), testing::SSE_READ));
+ EXPECT_TRUE(nullptr == server->Accept(&accept_addr));
+ EXPECT_TRUE(accept_addr.IsNil());
+
+ // Attempt second connect to listening socket.
+ EXPECT_EQ(0, client2->Connect(server->GetLocalAddress()));
+ EXPECT_FALSE(client2->GetLocalAddress().IsNil());
+ EXPECT_NE(server->GetLocalAddress(), client2->GetLocalAddress());
+
+ // Client is connecting, outcome not yet determined.
+ EXPECT_EQ(AsyncSocket::CS_CONNECTING, client2->GetState());
+ EXPECT_FALSE(sink.Check(client2.get(), testing::SSE_OPEN));
+ EXPECT_FALSE(sink.Check(client2.get(), testing::SSE_CLOSE));
+
+ // Server has pending connection, try to accept it (will succeed).
+ EXPECT_TRUE_WAIT((sink.Check(server.get(), testing::SSE_READ)), kTimeout);
+ SetFailAccept(false);
+ scoped_ptr<AsyncSocket> accepted2(server->Accept(&accept_addr));
+ ASSERT_TRUE(accepted2);
+ EXPECT_FALSE(accept_addr.IsNil());
+ EXPECT_EQ(accepted2->GetRemoteAddress(), accept_addr);
+}
+
+TEST_F(PhysicalSocketTest, TestConnectAcceptErrorIPv4) {
+ ConnectInternalAcceptError(kIPv4Loopback);
+}
+
+// Crashes on Linux. See webrtc:4923.
+#if defined(WEBRTC_LINUX)
+#define MAYBE_TestConnectAcceptErrorIPv6 DISABLED_TestConnectAcceptErrorIPv6
+#else
+#define MAYBE_TestConnectAcceptErrorIPv6 TestConnectAcceptErrorIPv6
+#endif
+TEST_F(PhysicalSocketTest, MAYBE_TestConnectAcceptErrorIPv6) {
+ ConnectInternalAcceptError(kIPv6Loopback);
+}
+
// Crashes on Linux. See webrtc:4923.
#if defined(WEBRTC_LINUX)
#define MAYBE_TestConnectFailIPv6 DISABLED_TestConnectFailIPv6
@@ -215,8 +373,11 @@ TEST_F(PhysicalSocketTest, MAYBE_TestUdpIPv6) {
// https://code.google.com/p/webrtc/issues/detail?id=4958
// TODO(deadbeef): Enable again once test is reimplemented to be unflaky.
// Also disable for ASan.
+// Disabled on Android: https://code.google.com/p/webrtc/issues/detail?id=4364
+// Disabled on Linux: https://bugs.chromium.org/p/webrtc/issues/detail?id=5233
#if defined(THREAD_SANITIZER) || defined(MEMORY_SANITIZER) || \
- defined(ADDRESS_SANITIZER)
+ defined(ADDRESS_SANITIZER) || defined(WEBRTC_ANDROID) || \
+ defined(WEBRTC_LINUX)
#define MAYBE_TestUdpReadyToSendIPv4 DISABLED_TestUdpReadyToSendIPv4
#else
#define MAYBE_TestUdpReadyToSendIPv4 TestUdpReadyToSendIPv4
diff --git a/webrtc/base/platform_thread.cc b/webrtc/base/platform_thread.cc
index 4167392363..05b7a258c0 100644
--- a/webrtc/base/platform_thread.cc
+++ b/webrtc/base/platform_thread.cc
@@ -10,8 +10,6 @@
#include "webrtc/base/platform_thread.h"
-#include <string.h>
-
#include "webrtc/base/checks.h"
#if defined(WEBRTC_LINUX)
@@ -58,7 +56,6 @@ bool IsThreadRefEqual(const PlatformThreadRef& a, const PlatformThreadRef& b) {
}
void SetCurrentThreadName(const char* name) {
- RTC_DCHECK(strlen(name) < 64);
#if defined(WEBRTC_WIN)
struct {
DWORD dwType;
@@ -79,4 +76,175 @@ void SetCurrentThreadName(const char* name) {
#endif
}
+namespace {
+#if defined(WEBRTC_WIN)
+void CALLBACK RaiseFlag(ULONG_PTR param) {
+ *reinterpret_cast<bool*>(param) = true;
+}
+#else
+struct ThreadAttributes {
+ ThreadAttributes() { pthread_attr_init(&attr); }
+ ~ThreadAttributes() { pthread_attr_destroy(&attr); }
+ pthread_attr_t* operator&() { return &attr; }
+ pthread_attr_t attr;
+};
+#endif // defined(WEBRTC_WIN)
+}
+
+PlatformThread::PlatformThread(ThreadRunFunction func,
+ void* obj,
+ const char* thread_name)
+ : run_function_(func),
+ obj_(obj),
+ name_(thread_name ? thread_name : "webrtc"),
+#if defined(WEBRTC_WIN)
+ stop_(false),
+ thread_(NULL) {
+#else
+ stop_event_(false, false),
+ thread_(0) {
+#endif // defined(WEBRTC_WIN)
+ RTC_DCHECK(func);
+ RTC_DCHECK(name_.length() < 64);
+}
+
+PlatformThread::~PlatformThread() {
+ RTC_DCHECK(thread_checker_.CalledOnValidThread());
+#if defined(WEBRTC_WIN)
+ RTC_DCHECK(!thread_);
+#endif // defined(WEBRTC_WIN)
+}
+
+#if defined(WEBRTC_WIN)
+DWORD WINAPI PlatformThread::StartThread(void* param) {
+ static_cast<PlatformThread*>(param)->Run();
+ return 0;
+}
+#else
+void* PlatformThread::StartThread(void* param) {
+ static_cast<PlatformThread*>(param)->Run();
+ return 0;
+}
+#endif // defined(WEBRTC_WIN)
+
+void PlatformThread::Start() {
+ RTC_DCHECK(thread_checker_.CalledOnValidThread());
+ RTC_DCHECK(!thread_) << "Thread already started?";
+#if defined(WEBRTC_WIN)
+ stop_ = false;
+
+ // See bug 2902 for background on STACK_SIZE_PARAM_IS_A_RESERVATION.
+ // Set the reserved stack stack size to 1M, which is the default on Windows
+ // and Linux.
+ DWORD thread_id;
+ thread_ = ::CreateThread(NULL, 1024 * 1024, &StartThread, this,
+ STACK_SIZE_PARAM_IS_A_RESERVATION, &thread_id);
+ RTC_CHECK(thread_) << "CreateThread failed";
+#else
+ ThreadAttributes attr;
+ // Set the stack stack size to 1M.
+ pthread_attr_setstacksize(&attr, 1024 * 1024);
+ RTC_CHECK_EQ(0, pthread_create(&thread_, &attr, &StartThread, this));
+#endif // defined(WEBRTC_WIN)
+}
+
+bool PlatformThread::IsRunning() const {
+ RTC_DCHECK(thread_checker_.CalledOnValidThread());
+#if defined(WEBRTC_WIN)
+ return thread_ != nullptr;
+#else
+ return thread_ != 0;
+#endif // defined(WEBRTC_WIN)
+}
+
+void PlatformThread::Stop() {
+ RTC_DCHECK(thread_checker_.CalledOnValidThread());
+ if (!IsRunning())
+ return;
+
+#if defined(WEBRTC_WIN)
+ // Set stop_ to |true| on the worker thread.
+ QueueUserAPC(&RaiseFlag, thread_, reinterpret_cast<ULONG_PTR>(&stop_));
+ WaitForSingleObject(thread_, INFINITE);
+ CloseHandle(thread_);
+ thread_ = nullptr;
+#else
+ stop_event_.Set();
+ RTC_CHECK_EQ(0, pthread_join(thread_, nullptr));
+ thread_ = 0;
+#endif // defined(WEBRTC_WIN)
+}
+
+void PlatformThread::Run() {
+ if (!name_.empty())
+ rtc::SetCurrentThreadName(name_.c_str());
+ do {
+ // The interface contract of Start/Stop is that for a successfull call to
+ // Start, there should be at least one call to the run function. So we
+ // call the function before checking |stop_|.
+ if (!run_function_(obj_))
+ break;
+#if defined(WEBRTC_WIN)
+ // Alertable sleep to permit RaiseFlag to run and update |stop_|.
+ SleepEx(0, true);
+ } while (!stop_);
+#else
+ } while (!stop_event_.Wait(0));
+#endif // defined(WEBRTC_WIN)
+}
+
+bool PlatformThread::SetPriority(ThreadPriority priority) {
+ RTC_DCHECK(thread_checker_.CalledOnValidThread());
+ RTC_DCHECK(IsRunning());
+#if defined(WEBRTC_WIN)
+ return SetThreadPriority(thread_, priority) != FALSE;
+#elif defined(__native_client__)
+ // Setting thread priorities is not supported in NaCl.
+ return true;
+#elif defined(WEBRTC_CHROMIUM_BUILD) && defined(WEBRTC_LINUX)
+ // TODO(tommi): Switch to the same mechanism as Chromium uses for changing
+ // thread priorities.
+ return true;
+#else
+#ifdef WEBRTC_THREAD_RR
+ const int policy = SCHED_RR;
+#else
+ const int policy = SCHED_FIFO;
+#endif
+ const int min_prio = sched_get_priority_min(policy);
+ const int max_prio = sched_get_priority_max(policy);
+ if (min_prio == -1 || max_prio == -1) {
+ return false;
+ }
+
+ if (max_prio - min_prio <= 2)
+ return false;
+
+ // Convert webrtc priority to system priorities:
+ sched_param param;
+ const int top_prio = max_prio - 1;
+ const int low_prio = min_prio + 1;
+ switch (priority) {
+ case kLowPriority:
+ param.sched_priority = low_prio;
+ break;
+ case kNormalPriority:
+ // The -1 ensures that the kHighPriority is always greater or equal to
+ // kNormalPriority.
+ param.sched_priority = (low_prio + top_prio - 1) / 2;
+ break;
+ case kHighPriority:
+ param.sched_priority = std::max(top_prio - 2, low_prio);
+ break;
+ case kHighestPriority:
+ param.sched_priority = std::max(top_prio - 1, low_prio);
+ break;
+ case kRealtimePriority:
+ param.sched_priority = top_prio;
+ break;
+ }
+ return pthread_setschedparam(thread_, policy, &param) == 0;
+#endif // defined(WEBRTC_WIN)
+}
+
} // namespace rtc
diff --git a/webrtc/base/platform_thread.h b/webrtc/base/platform_thread.h
index 50033b3928..53465e4b17 100644
--- a/webrtc/base/platform_thread.h
+++ b/webrtc/base/platform_thread.h
@@ -11,23 +11,15 @@
#ifndef WEBRTC_BASE_PLATFORM_THREAD_H_
#define WEBRTC_BASE_PLATFORM_THREAD_H_
-#if defined(WEBRTC_WIN)
-#include <winsock2.h>
-#include <windows.h>
-#elif defined(WEBRTC_POSIX)
-#include <pthread.h>
-#include <unistd.h>
-#endif
+#include <string>
-namespace rtc {
+#include "webrtc/base/constructormagic.h"
+#include "webrtc/base/event.h"
+#include "webrtc/base/platform_thread_types.h"
+#include "webrtc/base/scoped_ptr.h"
+#include "webrtc/base/thread_checker.h"
-#if defined(WEBRTC_WIN)
-typedef DWORD PlatformThreadId;
-typedef DWORD PlatformThreadRef;
-#elif defined(WEBRTC_POSIX)
-typedef pid_t PlatformThreadId;
-typedef pthread_t PlatformThreadRef;
-#endif
+namespace rtc {
PlatformThreadId CurrentThreadId();
PlatformThreadRef CurrentThreadRef();
@@ -38,6 +30,71 @@ bool IsThreadRefEqual(const PlatformThreadRef& a, const PlatformThreadRef& b);
// Sets the current thread name.
void SetCurrentThreadName(const char* name);
+// Callback function that the spawned thread will enter once spawned.
+// A return value of false is interpreted as that the function has no
+// more work to do and that the thread can be released.
+typedef bool (*ThreadRunFunction)(void*);
+
+enum ThreadPriority {
+#ifdef WEBRTC_WIN
+ kLowPriority = THREAD_PRIORITY_BELOW_NORMAL,
+ kNormalPriority = THREAD_PRIORITY_NORMAL,
+ kHighPriority = THREAD_PRIORITY_ABOVE_NORMAL,
+ kHighestPriority = THREAD_PRIORITY_HIGHEST,
+ kRealtimePriority = THREAD_PRIORITY_TIME_CRITICAL
+#else
+ kLowPriority = 1,
+ kNormalPriority = 2,
+ kHighPriority = 3,
+ kHighestPriority = 4,
+ kRealtimePriority = 5
+#endif
+};
+
+// Represents a simple worker thread. The implementation must be assumed
+// to be single threaded, meaning that all methods of the class, must be
+// called from the same thread, including instantiation.
+class PlatformThread {
+ public:
+ PlatformThread(ThreadRunFunction func, void* obj, const char* thread_name);
+ virtual ~PlatformThread();
+
+ // Spawns a thread and tries to set thread priority according to the priority
+ // from when CreateThread was called.
+ void Start();
+
+ bool IsRunning() const;
+
+ // Stops (joins) the spawned thread.
+ void Stop();
+
+ // Set the priority of the thread. Must be called when thread is running.
+ bool SetPriority(ThreadPriority priority);
+
+ private:
+ void Run();
+
+ ThreadRunFunction const run_function_;
+ void* const obj_;
+ // TODO(pbos): Make sure call sites use string literals and update to a const
+ // char* instead of a std::string.
+ const std::string name_;
+ rtc::ThreadChecker thread_checker_;
+#if defined(WEBRTC_WIN)
+ static DWORD WINAPI StartThread(void* param);
+
+ bool stop_;
+ HANDLE thread_;
+#else
+ static void* StartThread(void* param);
+
+ rtc::Event stop_event_;
+
+ pthread_t thread_;
+#endif // defined(WEBRTC_WIN)
+ RTC_DISALLOW_COPY_AND_ASSIGN(PlatformThread);
+};
+
} // namespace rtc
#endif // WEBRTC_BASE_PLATFORM_THREAD_H_
diff --git a/webrtc/base/platform_thread_types.h b/webrtc/base/platform_thread_types.h
new file mode 100644
index 0000000000..546fffd96d
--- /dev/null
+++ b/webrtc/base/platform_thread_types.h
@@ -0,0 +1,32 @@
+/*
+ * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_BASE_PLATFORM_THREAD_TYPES_H_
+#define WEBRTC_BASE_PLATFORM_THREAD_TYPES_H_
+
+#if defined(WEBRTC_WIN)
+#include <winsock2.h>
+#include <windows.h>
+#elif defined(WEBRTC_POSIX)
+#include <pthread.h>
+#include <unistd.h>
+#endif
+
+namespace rtc {
+#if defined(WEBRTC_WIN)
+typedef DWORD PlatformThreadId;
+typedef DWORD PlatformThreadRef;
+#elif defined(WEBRTC_POSIX)
+typedef pid_t PlatformThreadId;
+typedef pthread_t PlatformThreadRef;
+#endif
+} // namespace rtc
+
+#endif // WEBRTC_BASE_PLATFORM_THREAD_TYPES_H_
diff --git a/webrtc/base/platform_thread_unittest.cc b/webrtc/base/platform_thread_unittest.cc
new file mode 100644
index 0000000000..f9db8e34a3
--- /dev/null
+++ b/webrtc/base/platform_thread_unittest.cc
@@ -0,0 +1,51 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/base/platform_thread.h"
+
+#include "testing/gtest/include/gtest/gtest.h"
+#include "webrtc/base/scoped_ptr.h"
+#include "webrtc/system_wrappers/include/sleep.h"
+
+namespace webrtc {
+
+// Function that does nothing, and reports success.
+bool NullRunFunction(void* obj) {
+ SleepMs(0); // Hand over timeslice, prevents busy looping.
+ return true;
+}
+
+TEST(PlatformThreadTest, StartStop) {
+ rtc::PlatformThread thread(&NullRunFunction, nullptr, "PlatformThreadTest");
+ thread.Start();
+ thread.Stop();
+}
+
+// Function that sets a boolean.
+bool SetFlagRunFunction(void* obj) {
+ bool* obj_as_bool = static_cast<bool*>(obj);
+ *obj_as_bool = true;
+ SleepMs(0); // Hand over timeslice, prevents busy looping.
+ return true;
+}
+
+TEST(PlatformThreadTest, RunFunctionIsCalled) {
+ bool flag = false;
+ rtc::PlatformThread thread(&SetFlagRunFunction, &flag, "RunFunctionIsCalled");
+ thread.Start();
+
+ // At this point, the flag may be either true or false.
+ thread.Stop();
+
+ // We expect the thread to have run at least once.
+ EXPECT_TRUE(flag);
+}
+
+} // namespace webrtc
diff --git a/webrtc/base/proxy_unittest.cc b/webrtc/base/proxy_unittest.cc
index 03dc154a6f..d8a523fe17 100644
--- a/webrtc/base/proxy_unittest.cc
+++ b/webrtc/base/proxy_unittest.cc
@@ -17,7 +17,6 @@
#include "webrtc/base/testclient.h"
#include "webrtc/base/testechoserver.h"
#include "webrtc/base/virtualsocketserver.h"
-#include "webrtc/test/testsupport/gtest_disable.h"
using rtc::Socket;
using rtc::Thread;
diff --git a/webrtc/base/proxydetect.cc b/webrtc/base/proxydetect.cc
index b144d20a97..30959ca1d3 100644
--- a/webrtc/base/proxydetect.cc
+++ b/webrtc/base/proxydetect.cc
@@ -34,6 +34,7 @@
#include <map>
+#include "webrtc/base/arraysize.h"
#include "webrtc/base/fileutils.h"
#include "webrtc/base/httpcommon.h"
#include "webrtc/base/httpcommon-inl.h"
@@ -222,7 +223,7 @@ bool ProxyItemMatch(const Url<char>& url, char * item, size_t len) {
uint32_t mask = (m == 0) ? 0 : (~0UL) << (32 - m);
SocketAddress addr(url.host(), 0);
// TODO: Support IPv6 proxyitems. This code block is IPv4 only anyway.
- return !addr.IsUnresolved() &&
+ return !addr.IsUnresolvedIP() &&
((addr.ipaddr().v4AddressAsHostOrderInteger() & mask) == (ip & mask));
}
@@ -398,7 +399,7 @@ bool GetFirefoxProfilePath(Pathname* path) {
}
char buffer[NAME_MAX + 1];
if (0 != FSRefMakePath(&fr, reinterpret_cast<uint8_t*>(buffer),
- ARRAY_SIZE(buffer))) {
+ arraysize(buffer))) {
LOG(LS_ERROR) << "FSRefMakePath failed";
return false;
}
diff --git a/webrtc/base/random.cc b/webrtc/base/random.cc
new file mode 100644
index 0000000000..14a9faf5b3
--- /dev/null
+++ b/webrtc/base/random.cc
@@ -0,0 +1,86 @@
+/*
+ * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#include "webrtc/base/random.h"
+
+#include <math.h>
+
+#include "webrtc/base/checks.h"
+
+namespace webrtc {
+
+Random::Random(uint64_t seed) {
+ RTC_DCHECK(seed != 0x0ull);
+ state_ = seed;
+}
+
+uint32_t Random::Rand(uint32_t t) {
+ // Casting the output to 32 bits will give an almost uniform number.
+ // Pr[x=0] = (2^32-1) / (2^64-1)
+ // Pr[x=k] = 2^32 / (2^64-1) for k!=0
+ // Uniform would be Pr[x=k] = 2^32 / 2^64 for all 32-bit integers k.
+ uint32_t x = NextOutput();
+ // If x / 2^32 is uniform on [0,1), then x / 2^32 * (t+1) is uniform on
+ // the interval [0,t+1), so the integer part is uniform on [0,t].
+ uint64_t result = x * (static_cast<uint64_t>(t) + 1);
+ result >>= 32;
+ return result;
+}
+
+uint32_t Random::Rand(uint32_t low, uint32_t high) {
+ RTC_DCHECK(low <= high);
+ return Rand(high - low) + low;
+}
+
+int32_t Random::Rand(int32_t low, int32_t high) {
+ RTC_DCHECK(low <= high);
+ // We rely on subtraction (and addition) to be the same for signed and
+ // unsigned numbers in two-complement representation. Thus, although
+ // high - low might be negative as an int, it is the correct difference
+ // when interpreted as an unsigned.
+ return Rand(high - low) + low;
+}
+
+template <>
+float Random::Rand<float>() {
+ double result = NextOutput() - 1;
+ result = result / 0xFFFFFFFFFFFFFFFEull;
+ return static_cast<float>(result);
+}
+
+template <>
+double Random::Rand<double>() {
+ double result = NextOutput() - 1;
+ result = result / 0xFFFFFFFFFFFFFFFEull;
+ return result;
+}
+
+template <>
+bool Random::Rand<bool>() {
+ return Rand(0, 1) == 1;
+}
+
+double Random::Gaussian(double mean, double standard_deviation) {
+ // Creating a Normal distribution variable from two independent uniform
+ // variables based on the Box-Muller transform, which is defined on the
+ // interval (0, 1]. Note that we rely on NextOutput to generate integers
+ // in the range [1, 2^64-1]. Normally this behavior is a bit frustrating,
+ // but here it is exactly what we need.
+ const double kPi = 3.14159265358979323846;
+ double u1 = static_cast<double>(NextOutput()) / 0xFFFFFFFFFFFFFFFFull;
+ double u2 = static_cast<double>(NextOutput()) / 0xFFFFFFFFFFFFFFFFull;
+ return mean + standard_deviation * sqrt(-2 * log(u1)) * cos(2 * kPi * u2);
+}
+
+double Random::Exponential(double lambda) {
+ double uniform = Rand<double>();
+ return -log(uniform) / lambda;
+}
+
+} // namespace webrtc
diff --git a/webrtc/base/random.h b/webrtc/base/random.h
new file mode 100644
index 0000000000..647b84c9c9
--- /dev/null
+++ b/webrtc/base/random.h
@@ -0,0 +1,82 @@
+/*
+ * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_BASE_RANDOM_H_
+#define WEBRTC_BASE_RANDOM_H_
+
+#include <limits>
+
+#include "webrtc/typedefs.h"
+#include "webrtc/base/constructormagic.h"
+#include "webrtc/base/checks.h"
+
+namespace webrtc {
+
+class Random {
+ public:
+ explicit Random(uint64_t seed);
+
+ // Return pseudo-random integer of the specified type.
+ // We need to limit the size to 32 bits to keep the output close to uniform.
+ template <typename T>
+ T Rand() {
+ static_assert(std::numeric_limits<T>::is_integer &&
+ std::numeric_limits<T>::radix == 2 &&
+ std::numeric_limits<T>::digits <= 32,
+ "Rand is only supported for built-in integer types that are "
+ "32 bits or smaller.");
+ return static_cast<T>(NextOutput());
+ }
+
+ // Uniformly distributed pseudo-random number in the interval [0, t].
+ uint32_t Rand(uint32_t t);
+
+ // Uniformly distributed pseudo-random number in the interval [low, high].
+ uint32_t Rand(uint32_t low, uint32_t high);
+
+ // Uniformly distributed pseudo-random number in the interval [low, high].
+ int32_t Rand(int32_t low, int32_t high);
+
+ // Normal Distribution.
+ double Gaussian(double mean, double standard_deviation);
+
+ // Exponential Distribution.
+ double Exponential(double lambda);
+
+ private:
+ // Outputs a nonzero 64-bit random number.
+ uint64_t NextOutput() {
+ state_ ^= state_ >> 12;
+ state_ ^= state_ << 25;
+ state_ ^= state_ >> 27;
+ RTC_DCHECK(state_ != 0x0ULL);
+ return state_ * 2685821657736338717ull;
+ }
+
+ uint64_t state_;
+
+ RTC_DISALLOW_IMPLICIT_CONSTRUCTORS(Random);
+};
+
+// Return pseudo-random number in the interval [0.0, 1.0).
+template <>
+float Random::Rand<float>();
+
+// Return pseudo-random number in the interval [0.0, 1.0).
+template <>
+double Random::Rand<double>();
+
+// Return pseudo-random boolean value.
+template <>
+bool Random::Rand<bool>();
+
+} // namespace webrtc
+
+#endif // WEBRTC_BASE_RANDOM_H_
diff --git a/webrtc/base/random_unittest.cc b/webrtc/base/random_unittest.cc
new file mode 100644
index 0000000000..febae1c28f
--- /dev/null
+++ b/webrtc/base/random_unittest.cc
@@ -0,0 +1,302 @@
+/*
+ * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <math.h>
+
+#include <limits>
+#include <vector>
+
+#include "testing/gtest/include/gtest/gtest.h"
+#include "webrtc/base/random.h"
+
+namespace webrtc {
+
+namespace {
+// Computes the positive remainder of x/n.
+template <typename T>
+T fdiv_remainder(T x, T n) {
+ RTC_CHECK_GE(n, static_cast<T>(0));
+ T remainder = x % n;
+ if (remainder < 0)
+ remainder += n;
+ return remainder;
+}
+} // namespace
+
+// Sample a number of random integers of type T. Divide them into buckets
+// based on the remainder when dividing by bucket_count and check that each
+// bucket gets roughly the expected number of elements.
+template <typename T>
+void UniformBucketTest(T bucket_count, int samples, Random* prng) {
+ std::vector<int> buckets(bucket_count, 0);
+
+ uint64_t total_values = 1ull << (std::numeric_limits<T>::digits +
+ std::numeric_limits<T>::is_signed);
+ T upper_limit =
+ std::numeric_limits<T>::max() -
+ static_cast<T>(total_values % static_cast<uint64_t>(bucket_count));
+ ASSERT_GT(upper_limit, std::numeric_limits<T>::max() / 2);
+
+ for (int i = 0; i < samples; i++) {
+ T sample;
+ do {
+ // We exclude a few numbers from the range so that it is divisible by
+ // the number of buckets. If we are unlucky and hit one of the excluded
+ // numbers we just resample. Note that if the number of buckets is a
+ // power of 2, then we don't have to exclude anything.
+ sample = prng->Rand<T>();
+ } while (sample > upper_limit);
+ buckets[fdiv_remainder(sample, bucket_count)]++;
+ }
+
+ for (T i = 0; i < bucket_count; i++) {
+ // Expect the result to be within 3 standard deviations of the mean.
+ EXPECT_NEAR(buckets[i], samples / bucket_count,
+ 3 * sqrt(samples / bucket_count));
+ }
+}
+
+TEST(RandomNumberGeneratorTest, BucketTestSignedChar) {
+ Random prng(7297352569824ull);
+ UniformBucketTest<signed char>(64, 640000, &prng);
+ UniformBucketTest<signed char>(11, 440000, &prng);
+ UniformBucketTest<signed char>(3, 270000, &prng);
+}
+
+TEST(RandomNumberGeneratorTest, BucketTestUnsignedChar) {
+ Random prng(7297352569824ull);
+ UniformBucketTest<unsigned char>(64, 640000, &prng);
+ UniformBucketTest<unsigned char>(11, 440000, &prng);
+ UniformBucketTest<unsigned char>(3, 270000, &prng);
+}
+
+TEST(RandomNumberGeneratorTest, BucketTestSignedShort) {
+ Random prng(7297352569824ull);
+ UniformBucketTest<int16_t>(64, 640000, &prng);
+ UniformBucketTest<int16_t>(11, 440000, &prng);
+ UniformBucketTest<int16_t>(3, 270000, &prng);
+}
+
+TEST(RandomNumberGeneratorTest, BucketTestUnsignedShort) {
+ Random prng(7297352569824ull);
+ UniformBucketTest<uint16_t>(64, 640000, &prng);
+ UniformBucketTest<uint16_t>(11, 440000, &prng);
+ UniformBucketTest<uint16_t>(3, 270000, &prng);
+}
+
+TEST(RandomNumberGeneratorTest, BucketTestSignedInt) {
+ Random prng(7297352569824ull);
+ UniformBucketTest<signed int>(64, 640000, &prng);
+ UniformBucketTest<signed int>(11, 440000, &prng);
+ UniformBucketTest<signed int>(3, 270000, &prng);
+}
+
+TEST(RandomNumberGeneratorTest, BucketTestUnsignedInt) {
+ Random prng(7297352569824ull);
+ UniformBucketTest<unsigned int>(64, 640000, &prng);
+ UniformBucketTest<unsigned int>(11, 440000, &prng);
+ UniformBucketTest<unsigned int>(3, 270000, &prng);
+}
+
+// The range of the random numbers is divided into bucket_count intervals
+// of consecutive numbers. Check that approximately equally many numbers
+// from each inteval are generated.
+void BucketTestSignedInterval(unsigned int bucket_count,
+ unsigned int samples,
+ int32_t low,
+ int32_t high,
+ int sigma_level,
+ Random* prng) {
+ std::vector<unsigned int> buckets(bucket_count, 0);
+
+ ASSERT_GE(high, low);
+ ASSERT_GE(bucket_count, 2u);
+ uint32_t interval = static_cast<uint32_t>(high - low + 1);
+ uint32_t numbers_per_bucket;
+ if (interval == 0) {
+ // The computation high - low + 1 should be 2^32 but overflowed
+ // Hence, bucket_count must be a power of 2
+ ASSERT_EQ(bucket_count & (bucket_count - 1), 0u);
+ numbers_per_bucket = (0x80000000u / bucket_count) * 2;
+ } else {
+ ASSERT_EQ(interval % bucket_count, 0u);
+ numbers_per_bucket = interval / bucket_count;
+ }
+
+ for (unsigned int i = 0; i < samples; i++) {
+ int32_t sample = prng->Rand(low, high);
+ EXPECT_LE(low, sample);
+ EXPECT_GE(high, sample);
+ buckets[static_cast<uint32_t>(sample - low) / numbers_per_bucket]++;
+ }
+
+ for (unsigned int i = 0; i < bucket_count; i++) {
+ // Expect the result to be within 3 standard deviations of the mean,
+ // or more generally, within sigma_level standard deviations of the mean.
+ double mean = static_cast<double>(samples) / bucket_count;
+ EXPECT_NEAR(buckets[i], mean, sigma_level * sqrt(mean));
+ }
+}
+
+// The range of the random numbers is divided into bucket_count intervals
+// of consecutive numbers. Check that approximately equally many numbers
+// from each inteval are generated.
+void BucketTestUnsignedInterval(unsigned int bucket_count,
+ unsigned int samples,
+ uint32_t low,
+ uint32_t high,
+ int sigma_level,
+ Random* prng) {
+ std::vector<unsigned int> buckets(bucket_count, 0);
+
+ ASSERT_GE(high, low);
+ ASSERT_GE(bucket_count, 2u);
+ uint32_t interval = static_cast<uint32_t>(high - low + 1);
+ uint32_t numbers_per_bucket;
+ if (interval == 0) {
+ // The computation high - low + 1 should be 2^32 but overflowed
+ // Hence, bucket_count must be a power of 2
+ ASSERT_EQ(bucket_count & (bucket_count - 1), 0u);
+ numbers_per_bucket = (0x80000000u / bucket_count) * 2;
+ } else {
+ ASSERT_EQ(interval % bucket_count, 0u);
+ numbers_per_bucket = interval / bucket_count;
+ }
+
+ for (unsigned int i = 0; i < samples; i++) {
+ uint32_t sample = prng->Rand(low, high);
+ EXPECT_LE(low, sample);
+ EXPECT_GE(high, sample);
+ buckets[static_cast<uint32_t>(sample - low) / numbers_per_bucket]++;
+ }
+
+ for (unsigned int i = 0; i < bucket_count; i++) {
+ // Expect the result to be within 3 standard deviations of the mean,
+ // or more generally, within sigma_level standard deviations of the mean.
+ double mean = static_cast<double>(samples) / bucket_count;
+ EXPECT_NEAR(buckets[i], mean, sigma_level * sqrt(mean));
+ }
+}
+
+TEST(RandomNumberGeneratorTest, UniformUnsignedInterval) {
+ Random prng(299792458ull);
+ BucketTestUnsignedInterval(2, 100000, 0, 1, 3, &prng);
+ BucketTestUnsignedInterval(7, 100000, 1, 14, 3, &prng);
+ BucketTestUnsignedInterval(11, 100000, 1000, 1010, 3, &prng);
+ BucketTestUnsignedInterval(100, 100000, 0, 99, 3, &prng);
+ BucketTestUnsignedInterval(2, 100000, 0, 4294967295, 3, &prng);
+ BucketTestUnsignedInterval(17, 100000, 455, 2147484110, 3, &prng);
+ // 99.7% of all samples will be within 3 standard deviations of the mean,
+ // but since we test 1000 buckets we allow an interval of 4 sigma.
+ BucketTestUnsignedInterval(1000, 1000000, 0, 2147483999, 4, &prng);
+}
+
+TEST(RandomNumberGeneratorTest, UniformSignedInterval) {
+ Random prng(66260695729ull);
+ BucketTestSignedInterval(2, 100000, 0, 1, 3, &prng);
+ BucketTestSignedInterval(7, 100000, -2, 4, 3, &prng);
+ BucketTestSignedInterval(11, 100000, 1000, 1010, 3, &prng);
+ BucketTestSignedInterval(100, 100000, 0, 99, 3, &prng);
+ BucketTestSignedInterval(2, 100000, std::numeric_limits<int32_t>::min(),
+ std::numeric_limits<int32_t>::max(), 3, &prng);
+ BucketTestSignedInterval(17, 100000, -1073741826, 1073741829, 3, &prng);
+ // 99.7% of all samples will be within 3 standard deviations of the mean,
+ // but since we test 1000 buckets we allow an interval of 4 sigma.
+ BucketTestSignedInterval(1000, 1000000, -352, 2147483647, 4, &prng);
+}
+
+// The range of the random numbers is divided into bucket_count intervals
+// of consecutive numbers. Check that approximately equally many numbers
+// from each inteval are generated.
+void BucketTestFloat(unsigned int bucket_count,
+ unsigned int samples,
+ int sigma_level,
+ Random* prng) {
+ ASSERT_GE(bucket_count, 2u);
+ std::vector<unsigned int> buckets(bucket_count, 0);
+
+ for (unsigned int i = 0; i < samples; i++) {
+ uint32_t sample = bucket_count * prng->Rand<float>();
+ EXPECT_LE(0u, sample);
+ EXPECT_GE(bucket_count - 1, sample);
+ buckets[sample]++;
+ }
+
+ for (unsigned int i = 0; i < bucket_count; i++) {
+ // Expect the result to be within 3 standard deviations of the mean,
+ // or more generally, within sigma_level standard deviations of the mean.
+ double mean = static_cast<double>(samples) / bucket_count;
+ EXPECT_NEAR(buckets[i], mean, sigma_level * sqrt(mean));
+ }
+}
+
+TEST(RandomNumberGeneratorTest, UniformFloatInterval) {
+ Random prng(1380648813ull);
+ BucketTestFloat(100, 100000, 3, &prng);
+ // 99.7% of all samples will be within 3 standard deviations of the mean,
+ // but since we test 1000 buckets we allow an interval of 4 sigma.
+ // BucketTestSignedInterval(1000, 1000000, -352, 2147483647, 4, &prng);
+}
+
+TEST(RandomNumberGeneratorTest, SignedHasSameBitPattern) {
+ Random prng_signed(66738480ull), prng_unsigned(66738480ull);
+
+ for (int i = 0; i < 1000; i++) {
+ signed int s = prng_signed.Rand<signed int>();
+ unsigned int u = prng_unsigned.Rand<unsigned int>();
+ EXPECT_EQ(u, static_cast<unsigned int>(s));
+ }
+
+ for (int i = 0; i < 1000; i++) {
+ int16_t s = prng_signed.Rand<int16_t>();
+ uint16_t u = prng_unsigned.Rand<uint16_t>();
+ EXPECT_EQ(u, static_cast<uint16_t>(s));
+ }
+
+ for (int i = 0; i < 1000; i++) {
+ signed char s = prng_signed.Rand<signed char>();
+ unsigned char u = prng_unsigned.Rand<unsigned char>();
+ EXPECT_EQ(u, static_cast<unsigned char>(s));
+ }
+}
+
+TEST(RandomNumberGeneratorTest, Gaussian) {
+ const int kN = 100000;
+ const int kBuckets = 100;
+ const double kMean = 49;
+ const double kStddev = 10;
+
+ Random prng(1256637061);
+
+ std::vector<unsigned int> buckets(kBuckets, 0);
+ for (int i = 0; i < kN; i++) {
+ int index = prng.Gaussian(kMean, kStddev) + 0.5;
+ if (index >= 0 && index < kBuckets) {
+ buckets[index]++;
+ }
+ }
+
+ const double kPi = 3.14159265358979323846;
+ const double kScale = 1 / (kStddev * sqrt(2.0 * kPi));
+ const double kDiv = -2.0 * kStddev * kStddev;
+ for (int n = 0; n < kBuckets; ++n) {
+ // Use Simpsons rule to estimate the probability that a random gaussian
+ // sample is in the interval [n-0.5, n+0.5].
+ double f_left = kScale * exp((n - kMean - 0.5) * (n - kMean - 0.5) / kDiv);
+ double f_mid = kScale * exp((n - kMean) * (n - kMean) / kDiv);
+ double f_right = kScale * exp((n - kMean + 0.5) * (n - kMean + 0.5) / kDiv);
+ double normal_dist = (f_left + 4 * f_mid + f_right) / 6;
+ // Expect the number of samples to be within 3 standard deviations
+ // (rounded up) of the expected number of samples in the bucket.
+ EXPECT_NEAR(buckets[n], kN * normal_dist, 3 * sqrt(kN * normal_dist) + 1);
+ }
+}
+
+} // namespace webrtc
diff --git a/webrtc/base/ratetracker.cc b/webrtc/base/ratetracker.cc
index 5cb449016e..35521a8d3d 100644
--- a/webrtc/base/ratetracker.cc
+++ b/webrtc/base/ratetracker.cc
@@ -73,8 +73,9 @@ double RateTracker::ComputeRateForInterval(
size_t start_bucket = NextBucketIndex(current_bucket_ + buckets_to_skip);
// Only count a portion of the first bucket according to how much of the
// first bucket is within the current interval.
- size_t total_samples = sample_buckets_[start_bucket] *
- (bucket_milliseconds_ - milliseconds_to_skip) /
+ size_t total_samples = ((sample_buckets_[start_bucket] *
+ (bucket_milliseconds_ - milliseconds_to_skip)) +
+ (bucket_milliseconds_ >> 1)) /
bucket_milliseconds_;
// All other buckets in the interval are counted in their entirety.
for (size_t i = NextBucketIndex(start_bucket);
diff --git a/webrtc/base/rtccertificate.cc b/webrtc/base/rtccertificate.cc
index a176d9080b..7b764bd72e 100644
--- a/webrtc/base/rtccertificate.cc
+++ b/webrtc/base/rtccertificate.cc
@@ -11,7 +11,6 @@
#include "webrtc/base/rtccertificate.h"
#include "webrtc/base/checks.h"
-#include "webrtc/base/timeutils.h"
namespace rtc {
@@ -28,13 +27,16 @@ RTCCertificate::RTCCertificate(SSLIdentity* identity)
RTCCertificate::~RTCCertificate() {
}
-uint64_t RTCCertificate::expires_timestamp_ns() const {
- // TODO(hbos): Update once SSLIdentity/SSLCertificate supports expires field.
- return 0;
+uint64_t RTCCertificate::Expires() const {
+ int64_t expires = ssl_certificate().CertificateExpirationTime();
+ if (expires != -1)
+ return static_cast<uint64_t>(expires) * kNumMillisecsPerSec;
+ // If the expiration time could not be retrieved return an expired timestamp.
+ return 0; // = 1970-01-01
}
-bool RTCCertificate::HasExpired() const {
- return expires_timestamp_ns() <= TimeNanos();
+bool RTCCertificate::HasExpired(uint64_t now) const {
+ return Expires() <= now;
}
const SSLCertificate& RTCCertificate::ssl_certificate() const {
diff --git a/webrtc/base/rtccertificate.h b/webrtc/base/rtccertificate.h
index d238938ae1..600739bc86 100644
--- a/webrtc/base/rtccertificate.h
+++ b/webrtc/base/rtccertificate.h
@@ -27,8 +27,11 @@ class RTCCertificate : public RefCountInterface {
// Takes ownership of |identity|.
static scoped_refptr<RTCCertificate> Create(scoped_ptr<SSLIdentity> identity);
- uint64_t expires_timestamp_ns() const;
- bool HasExpired() const;
+ // Returns the expiration time in ms relative to epoch, 1970-01-01T00:00:00Z.
+ uint64_t Expires() const;
+ // Checks if the certificate has expired, where |now| is expressed in ms
+ // relative to epoch, 1970-01-01T00:00:00Z.
+ bool HasExpired(uint64_t now) const;
const SSLCertificate& ssl_certificate() const;
// TODO(hbos): If possible, remove once RTCCertificate and its
diff --git a/webrtc/base/rtccertificate_unittests.cc b/webrtc/base/rtccertificate_unittests.cc
new file mode 100644
index 0000000000..84c854478b
--- /dev/null
+++ b/webrtc/base/rtccertificate_unittests.cc
@@ -0,0 +1,118 @@
+/*
+ * Copyright 2015 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <utility>
+
+#include "webrtc/base/checks.h"
+#include "webrtc/base/fakesslidentity.h"
+#include "webrtc/base/gunit.h"
+#include "webrtc/base/logging.h"
+#include "webrtc/base/rtccertificate.h"
+#include "webrtc/base/safe_conversions.h"
+#include "webrtc/base/scoped_ptr.h"
+#include "webrtc/base/sslidentity.h"
+#include "webrtc/base/thread.h"
+#include "webrtc/base/timeutils.h"
+
+namespace rtc {
+
+namespace {
+
+static const char* kTestCertCommonName = "RTCCertificateTest's certificate";
+
+} // namespace
+
+class RTCCertificateTest : public testing::Test {
+ public:
+ RTCCertificateTest() {}
+ ~RTCCertificateTest() {}
+
+ protected:
+ // Timestamp note:
+ // All timestamps in this unittest are expressed in number of seconds since
+ // epoch, 1970-01-01T00:00:00Z (UTC). The RTCCertificate interface uses ms,
+ // but only seconds-precision is supported by SSLCertificate. To make the
+ // tests clearer we convert everything to seconds since the precision matters
+ // when generating certificates or comparing timestamps.
+ // As a result, ExpiresSeconds and HasExpiredSeconds are used instead of
+ // RTCCertificate::Expires and ::HasExpired for ms -> s conversion.
+
+ uint64_t NowSeconds() const {
+ return TimeNanos() / kNumNanosecsPerSec;
+ }
+
+ uint64_t ExpiresSeconds(const scoped_refptr<RTCCertificate>& cert) const {
+ uint64_t exp_ms = cert->Expires();
+ uint64_t exp_s = exp_ms / kNumMillisecsPerSec;
+ // Make sure this did not result in loss of precision.
+ RTC_CHECK_EQ(exp_s * kNumMillisecsPerSec, exp_ms);
+ return exp_s;
+ }
+
+ bool HasExpiredSeconds(const scoped_refptr<RTCCertificate>& cert,
+ uint64_t now_s) const {
+ return cert->HasExpired(now_s * kNumMillisecsPerSec);
+ }
+
+ // An RTC_CHECK ensures that |expires_s| this is in valid range of time_t as
+ // is required by SSLIdentityParams. On some 32-bit systems time_t is limited
+ // to < 2^31. On such systems this will fail for expiration times of year 2038
+ // or later.
+ scoped_refptr<RTCCertificate> GenerateCertificateWithExpires(
+ uint64_t expires_s) const {
+ RTC_CHECK(IsValueInRangeForNumericType<time_t>(expires_s));
+
+ SSLIdentityParams params;
+ params.common_name = kTestCertCommonName;
+ params.not_before = 0;
+ params.not_after = static_cast<time_t>(expires_s);
+ // Certificate type does not matter for our purposes, using ECDSA because it
+ // is fast to generate.
+ params.key_params = KeyParams::ECDSA();
+
+ scoped_ptr<SSLIdentity> identity(SSLIdentity::GenerateForTest(params));
+ return RTCCertificate::Create(std::move(identity));
+ }
+};
+
+TEST_F(RTCCertificateTest, NewCertificateNotExpired) {
+ // Generate a real certificate without specifying the expiration time.
+ // Certificate type doesn't matter, using ECDSA because it's fast to generate.
+ scoped_ptr<SSLIdentity> identity(
+ SSLIdentity::Generate(kTestCertCommonName, KeyParams::ECDSA()));
+ scoped_refptr<RTCCertificate> certificate =
+ RTCCertificate::Create(std::move(identity));
+
+ uint64_t now = NowSeconds();
+ EXPECT_FALSE(HasExpiredSeconds(certificate, now));
+ // Even without specifying the expiration time we would expect it to be valid
+ // for at least half an hour.
+ EXPECT_FALSE(HasExpiredSeconds(certificate, now + 30*60));
+}
+
+TEST_F(RTCCertificateTest, UsesExpiresAskedFor) {
+ uint64_t now = NowSeconds();
+ scoped_refptr<RTCCertificate> certificate =
+ GenerateCertificateWithExpires(now);
+ EXPECT_EQ(now, ExpiresSeconds(certificate));
+}
+
+TEST_F(RTCCertificateTest, ExpiresInOneSecond) {
+ // Generate a certificate that expires in 1s.
+ uint64_t now = NowSeconds();
+ scoped_refptr<RTCCertificate> certificate =
+ GenerateCertificateWithExpires(now + 1);
+ // Now it should not have expired.
+ EXPECT_FALSE(HasExpiredSeconds(certificate, now));
+ // In 2s it should have expired.
+ EXPECT_TRUE(HasExpiredSeconds(certificate, now + 2));
+}
+
+} // namespace rtc
diff --git a/webrtc/base/scoped_ptr.h b/webrtc/base/scoped_ptr.h
index 4266d052b1..c4603c3208 100644
--- a/webrtc/base/scoped_ptr.h
+++ b/webrtc/base/scoped_ptr.h
@@ -42,55 +42,39 @@
// }
//
// These scopers also implement part of the functionality of C++11 unique_ptr
-// in that they are "movable but not copyable." You can use the scopers in
-// the parameter and return types of functions to signify ownership transfer
-// in to and out of a function. When calling a function that has a scoper
-// as the argument type, it must be called with the result of an analogous
-// scoper's Pass() function or another function that generates a temporary;
-// passing by copy will NOT work. Here is an example using scoped_ptr:
+// in that they are "movable but not copyable." You can use the scopers in the
+// parameter and return types of functions to signify ownership transfer in to
+// and out of a function. When calling a function that has a scoper as the
+// argument type, it must be called with the result of calling std::move on an
+// analogous scoper, or another function that generates a temporary; passing by
+// copy will NOT work. Here is an example using scoped_ptr:
//
// void TakesOwnership(scoped_ptr<Foo> arg) {
// // Do something with arg
// }
// scoped_ptr<Foo> CreateFoo() {
-// // No need for calling Pass() because we are constructing a temporary
+// // No need for calling std::move because we are constructing a temporary
// // for the return value.
// return scoped_ptr<Foo>(new Foo("new"));
// }
// scoped_ptr<Foo> PassThru(scoped_ptr<Foo> arg) {
-// return arg.Pass();
+// return std::move(arg);
// }
//
// {
// scoped_ptr<Foo> ptr(new Foo("yay")); // ptr manages Foo("yay").
-// TakesOwnership(ptr.Pass()); // ptr no longer owns Foo("yay").
+// TakesOwnership(std::move(ptr)); // ptr no longer owns Foo("yay").
// scoped_ptr<Foo> ptr2 = CreateFoo(); // ptr2 owns the return Foo.
// scoped_ptr<Foo> ptr3 = // ptr3 now owns what was in ptr2.
-// PassThru(ptr2.Pass()); // ptr2 is correspondingly nullptr.
+// PassThru(std::move(ptr2)); // ptr2 is correspondingly nullptr.
// }
//
-// Notice that if you do not call Pass() when returning from PassThru(), or
+// Notice that if you do not call std::move when returning from PassThru(), or
// when invoking TakesOwnership(), the code will not compile because scopers
// are not copyable; they only implement move semantics which require calling
-// the Pass() function to signify a destructive transfer of state. CreateFoo()
-// is different though because we are constructing a temporary on the return
-// line and thus can avoid needing to call Pass().
-//
-// Pass() properly handles upcast in initialization, i.e. you can use a
-// scoped_ptr<Child> to initialize a scoped_ptr<Parent>:
-//
-// scoped_ptr<Foo> foo(new Foo());
-// scoped_ptr<FooParent> parent(foo.Pass());
-//
-// PassAs<>() should be used to upcast return value in return statement:
-//
-// scoped_ptr<Foo> CreateFoo() {
-// scoped_ptr<FooChild> result(new FooChild());
-// return result.PassAs<Foo>();
-// }
-//
-// Note that PassAs<>() is implemented only for scoped_ptr<T>, but not for
-// scoped_ptr<T[]>. This is because casting array pointers may not be safe.
+// std::move to signify a destructive transfer of state. CreateFoo() is
+// different though because we are constructing a temporary on the return line
+// and thus can avoid needing to call std::move.
#ifndef WEBRTC_BASE_SCOPED_PTR_H__
#define WEBRTC_BASE_SCOPED_PTR_H__
@@ -103,8 +87,10 @@
#include <stdlib.h>
#include <algorithm> // For std::swap().
+#include <cstddef>
#include "webrtc/base/constructormagic.h"
+#include "webrtc/base/deprecation.h"
#include "webrtc/base/template_util.h"
#include "webrtc/typedefs.h"
@@ -342,7 +328,7 @@ class scoped_ptr {
scoped_ptr(element_type* p, const D& d) : impl_(p, d) {}
// Constructor. Allows construction from a nullptr.
- scoped_ptr(decltype(nullptr)) : impl_(nullptr) {}
+ scoped_ptr(std::nullptr_t) : impl_(nullptr) {}
// Constructor. Allows construction from a scoped_ptr rvalue for a
// convertible type and deleter.
@@ -379,7 +365,7 @@ class scoped_ptr {
// operator=. Allows assignment from a nullptr. Deletes the currently owned
// object, if any.
- scoped_ptr& operator=(decltype(nullptr)) {
+ scoped_ptr& operator=(std::nullptr_t) {
reset();
return *this;
}
@@ -389,7 +375,10 @@ class scoped_ptr {
scoped_ptr& operator=(const scoped_ptr& other) = delete;
// Get an rvalue reference. (sp.Pass() does the same thing as std::move(sp).)
- scoped_ptr&& Pass() { return static_cast<scoped_ptr&&>(*this); }
+ // Deprecated; remove in March 2016 (bug 5373).
+ RTC_DEPRECATED scoped_ptr&& Pass() {
+ return std::move(*this);
+ }
// Reset. Deletes the currently owned object, if any.
// Then takes ownership of a new object, if given.
@@ -499,7 +488,7 @@ class scoped_ptr<T[], D> {
explicit scoped_ptr(element_type* array) : impl_(array) {}
// Constructor. Allows construction from a nullptr.
- scoped_ptr(decltype(nullptr)) : impl_(nullptr) {}
+ scoped_ptr(std::nullptr_t) : impl_(nullptr) {}
// Constructor. Allows construction from a scoped_ptr rvalue.
scoped_ptr(scoped_ptr&& other) : impl_(&other.impl_) {}
@@ -512,7 +501,7 @@ class scoped_ptr<T[], D> {
// operator=. Allows assignment from a nullptr. Deletes the currently owned
// array, if any.
- scoped_ptr& operator=(decltype(nullptr)) {
+ scoped_ptr& operator=(std::nullptr_t) {
reset();
return *this;
}
@@ -522,7 +511,10 @@ class scoped_ptr<T[], D> {
scoped_ptr& operator=(const scoped_ptr& other) = delete;
// Get an rvalue reference. (sp.Pass() does the same thing as std::move(sp).)
- scoped_ptr&& Pass() { return static_cast<scoped_ptr&&>(*this); }
+ // Deprecated; remove in March 2016 (bug 5373).
+ RTC_DEPRECATED scoped_ptr&& Pass() {
+ return std::move(*this);
+ }
// Reset. Deletes the currently owned array, if any.
// Then takes ownership of a new object, if given.
diff --git a/webrtc/base/sec_buffer.h b/webrtc/base/sec_buffer.h
index d4cda00d46..e6ffea4eb7 100644
--- a/webrtc/base/sec_buffer.h
+++ b/webrtc/base/sec_buffer.h
@@ -119,7 +119,7 @@ class CSecBufferBundle : public SecBufferBundleBase {
}
// Accessor for the descriptor
- const PSecBufferDesc desc() const {
+ PSecBufferDesc desc() const {
return &desc_;
}
diff --git a/webrtc/base/sharedexclusivelock_unittest.cc b/webrtc/base/sharedexclusivelock_unittest.cc
index 2857e00449..9b64ed760a 100644
--- a/webrtc/base/sharedexclusivelock_unittest.cc
+++ b/webrtc/base/sharedexclusivelock_unittest.cc
@@ -16,7 +16,6 @@
#include "webrtc/base/sharedexclusivelock.h"
#include "webrtc/base/thread.h"
#include "webrtc/base/timeutils.h"
-#include "webrtc/test/testsupport/gtest_disable.h"
namespace rtc {
diff --git a/webrtc/base/signalthread.cc b/webrtc/base/signalthread.cc
index d03f386416..75f7b77315 100644
--- a/webrtc/base/signalthread.cc
+++ b/webrtc/base/signalthread.cc
@@ -39,13 +39,6 @@ bool SignalThread::SetName(const std::string& name, const void* obj) {
return worker_.SetName(name, obj);
}
-bool SignalThread::SetPriority(ThreadPriority priority) {
- EnterExit ee(this);
- ASSERT(main_->IsCurrent());
- ASSERT(kInit == state_);
- return worker_.SetPriority(priority);
-}
-
void SignalThread::Start() {
EnterExit ee(this);
ASSERT(main_->IsCurrent());
diff --git a/webrtc/base/signalthread.h b/webrtc/base/signalthread.h
index 4dda88966c..ec250c6aad 100644
--- a/webrtc/base/signalthread.h
+++ b/webrtc/base/signalthread.h
@@ -45,9 +45,6 @@ class SignalThread
// Context: Main Thread. Call before Start to change the worker's name.
bool SetName(const std::string& name, const void* obj);
- // Context: Main Thread. Call before Start to change the worker's priority.
- bool SetPriority(ThreadPriority priority);
-
// Context: Main Thread. Call to begin the worker thread.
void Start();
diff --git a/webrtc/base/signalthread_unittest.cc b/webrtc/base/signalthread_unittest.cc
index fe6c6023a6..a583aefcb5 100644
--- a/webrtc/base/signalthread_unittest.cc
+++ b/webrtc/base/signalthread_unittest.cc
@@ -11,7 +11,6 @@
#include "webrtc/base/gunit.h"
#include "webrtc/base/signalthread.h"
#include "webrtc/base/thread.h"
-#include "webrtc/test/testsupport/gtest_disable.h"
using namespace rtc;
diff --git a/webrtc/base/sigslot.h b/webrtc/base/sigslot.h
index d9b12b04bc..a5fd5f79af 100644
--- a/webrtc/base/sigslot.h
+++ b/webrtc/base/sigslot.h
@@ -532,7 +532,7 @@ namespace sigslot {
m_connected_slots.erase(m_connected_slots.begin(), m_connected_slots.end());
}
-#ifdef _DEBUG
+#if !defined(NDEBUG)
bool connected(has_slots_interface* pclass)
{
lock_block<mt_policy> lock(this);
@@ -686,7 +686,7 @@ namespace sigslot {
m_connected_slots.erase(m_connected_slots.begin(), m_connected_slots.end());
}
-#ifdef _DEBUG
+#if !defined(NDEBUG)
bool connected(has_slots_interface* pclass)
{
lock_block<mt_policy> lock(this);
@@ -825,7 +825,7 @@ namespace sigslot {
m_connected_slots.erase(m_connected_slots.begin(), m_connected_slots.end());
}
-#ifdef _DEBUG
+#if !defined(NDEBUG)
bool connected(has_slots_interface* pclass)
{
lock_block<mt_policy> lock(this);
@@ -963,7 +963,7 @@ namespace sigslot {
m_connected_slots.erase(m_connected_slots.begin(), m_connected_slots.end());
}
-#ifdef _DEBUG
+#if !defined(NDEBUG)
bool connected(has_slots_interface* pclass)
{
lock_block<mt_policy> lock(this);
@@ -1101,7 +1101,7 @@ namespace sigslot {
m_connected_slots.erase(m_connected_slots.begin(), m_connected_slots.end());
}
-#ifdef _DEBUG
+#if !defined(NDEBUG)
bool connected(has_slots_interface* pclass)
{
lock_block<mt_policy> lock(this);
@@ -1241,7 +1241,7 @@ namespace sigslot {
m_connected_slots.erase(m_connected_slots.begin(), m_connected_slots.end());
}
-#ifdef _DEBUG
+#if !defined(NDEBUG)
bool connected(has_slots_interface* pclass)
{
lock_block<mt_policy> lock(this);
@@ -1381,7 +1381,7 @@ namespace sigslot {
m_connected_slots.erase(m_connected_slots.begin(), m_connected_slots.end());
}
-#ifdef _DEBUG
+#if !defined(NDEBUG)
bool connected(has_slots_interface* pclass)
{
lock_block<mt_policy> lock(this);
@@ -1521,7 +1521,7 @@ namespace sigslot {
m_connected_slots.erase(m_connected_slots.begin(), m_connected_slots.end());
}
-#ifdef _DEBUG
+#if !defined(NDEBUG)
bool connected(has_slots_interface* pclass)
{
lock_block<mt_policy> lock(this);
@@ -1662,7 +1662,7 @@ namespace sigslot {
m_connected_slots.erase(m_connected_slots.begin(), m_connected_slots.end());
}
-#ifdef _DEBUG
+#if !defined(NDEBUG)
bool connected(has_slots_interface* pclass)
{
lock_block<mt_policy> lock(this);
diff --git a/webrtc/base/socket_unittest.cc b/webrtc/base/socket_unittest.cc
index d078d7cd17..8143823b86 100644
--- a/webrtc/base/socket_unittest.cc
+++ b/webrtc/base/socket_unittest.cc
@@ -10,6 +10,7 @@
#include "webrtc/base/socket_unittest.h"
+#include "webrtc/base/arraysize.h"
#include "webrtc/base/asyncudpsocket.h"
#include "webrtc/base/gunit.h"
#include "webrtc/base/nethelpers.h"
@@ -827,7 +828,7 @@ void SocketTest::SingleFlowControlCallbackInternal(const IPAddress& loopback) {
// Fill the socket buffer.
char buf[1024 * 16] = {0};
int sends = 0;
- while (++sends && accepted->Send(&buf, ARRAY_SIZE(buf)) != -1) {}
+ while (++sends && accepted->Send(&buf, arraysize(buf)) != -1) {}
EXPECT_TRUE(accepted->IsBlocking());
// Wait until data is available.
@@ -835,7 +836,7 @@ void SocketTest::SingleFlowControlCallbackInternal(const IPAddress& loopback) {
// Pull data.
for (int i = 0; i < sends; ++i) {
- client->Recv(buf, ARRAY_SIZE(buf));
+ client->Recv(buf, arraysize(buf));
}
// Expect at least one additional writable callback.
@@ -845,7 +846,7 @@ void SocketTest::SingleFlowControlCallbackInternal(const IPAddress& loopback) {
// callbacks.
int extras = 0;
for (int i = 0; i < 100; ++i) {
- accepted->Send(&buf, ARRAY_SIZE(buf));
+ accepted->Send(&buf, arraysize(buf));
rtc::Thread::Current()->ProcessMessages(1);
if (sink.Check(accepted.get(), testing::SSE_WRITE)) {
extras++;
diff --git a/webrtc/base/socket_unittest.h b/webrtc/base/socket_unittest.h
index d368afb3f5..e4a6b32705 100644
--- a/webrtc/base/socket_unittest.h
+++ b/webrtc/base/socket_unittest.h
@@ -21,8 +21,9 @@ namespace rtc {
// socketserver, and call the SocketTest test methods.
class SocketTest : public testing::Test {
protected:
- SocketTest() : ss_(NULL), kIPv4Loopback(INADDR_LOOPBACK),
- kIPv6Loopback(in6addr_loopback) {}
+ SocketTest() : kIPv4Loopback(INADDR_LOOPBACK),
+ kIPv6Loopback(in6addr_loopback),
+ ss_(nullptr) {}
virtual void SetUp() { ss_ = Thread::Current()->socketserver(); }
void TestConnectIPv4();
void TestConnectIPv6();
@@ -57,6 +58,10 @@ class SocketTest : public testing::Test {
void TestGetSetOptionsIPv4();
void TestGetSetOptionsIPv6();
+ static const int kTimeout = 5000; // ms
+ const IPAddress kIPv4Loopback;
+ const IPAddress kIPv6Loopback;
+
private:
void ConnectInternal(const IPAddress& loopback);
void ConnectWithDnsLookupInternal(const IPAddress& loopback,
@@ -77,12 +82,13 @@ class SocketTest : public testing::Test {
void UdpReadyToSend(const IPAddress& loopback);
void GetSetOptionsInternal(const IPAddress& loopback);
- static const int kTimeout = 5000; // ms
SocketServer* ss_;
- const IPAddress kIPv4Loopback;
- const IPAddress kIPv6Loopback;
};
+// For unbound sockets, GetLocalAddress / GetRemoteAddress return AF_UNSPEC
+// values on Windows, but an empty address of the same family on Linux/MacOS X.
+bool IsUnspecOrEmptyIP(const IPAddress& address);
+
} // namespace rtc
#endif // WEBRTC_BASE_SOCKET_UNITTEST_H_
diff --git a/webrtc/base/socketadapters.cc b/webrtc/base/socketadapters.cc
index af2efb82c7..2b513dca63 100644
--- a/webrtc/base/socketadapters.cc
+++ b/webrtc/base/socketadapters.cc
@@ -688,7 +688,7 @@ void AsyncSocksProxySocket::SendConnect() {
request.WriteUInt8(5); // Socks Version
request.WriteUInt8(1); // CONNECT
request.WriteUInt8(0); // Reserved
- if (dest_.IsUnresolved()) {
+ if (dest_.IsUnresolvedIP()) {
std::string hostname = dest_.hostname();
request.WriteUInt8(3); // DOMAINNAME
request.WriteUInt8(static_cast<uint8_t>(hostname.size()));
diff --git a/webrtc/base/socketaddress.cc b/webrtc/base/socketaddress.cc
index 79ede80181..c5fd798cb1 100644
--- a/webrtc/base/socketaddress.cc
+++ b/webrtc/base/socketaddress.cc
@@ -307,39 +307,6 @@ size_t SocketAddress::ToSockAddrStorage(sockaddr_storage* addr) const {
return ToSockAddrStorageHelper(addr, ip_, port_, scope_id_);
}
-bool SocketAddress::StringToIP(const std::string& hostname, uint32_t* ip) {
- in_addr addr;
- if (rtc::inet_pton(AF_INET, hostname.c_str(), &addr) == 0)
- return false;
- *ip = NetworkToHost32(addr.s_addr);
- return true;
-}
-
-bool SocketAddress::StringToIP(const std::string& hostname, IPAddress* ip) {
- in_addr addr4;
- if (rtc::inet_pton(AF_INET, hostname.c_str(), &addr4) > 0) {
- if (ip) {
- *ip = IPAddress(addr4);
- }
- return true;
- }
-
- in6_addr addr6;
- if (rtc::inet_pton(AF_INET6, hostname.c_str(), &addr6) > 0) {
- if (ip) {
- *ip = IPAddress(addr6);
- }
- return true;
- }
- return false;
-}
-
-uint32_t SocketAddress::StringToIP(const std::string& hostname) {
- uint32_t ip = 0;
- StringToIP(hostname, &ip);
- return ip;
-}
-
bool SocketAddressFromSockAddrStorage(const sockaddr_storage& addr,
SocketAddress* out) {
if (!out) {
diff --git a/webrtc/base/socketaddress.h b/webrtc/base/socketaddress.h
index 1d975a1c0e..175d7a9d12 100644
--- a/webrtc/base/socketaddress.h
+++ b/webrtc/base/socketaddress.h
@@ -141,7 +141,6 @@ class SocketAddress {
// Determines whether the hostname has been resolved to an IP.
bool IsUnresolvedIP() const;
- inline bool IsUnresolved() const { return IsUnresolvedIP(); } // deprecated
// Determines whether this address is identical to the given one.
bool operator ==(const SocketAddress& addr) const;
@@ -177,16 +176,6 @@ class SocketAddress {
size_t ToDualStackSockAddrStorage(sockaddr_storage* saddr) const;
size_t ToSockAddrStorage(sockaddr_storage* saddr) const;
- // Converts the IP address given in dotted form into compact form.
- // Only dotted names (A.B.C.D) are converted.
- // Output integer is returned in host byte order.
- // TODO: Deprecate, replace wth agnostic versions.
- static bool StringToIP(const std::string& str, uint32_t* ip);
- static uint32_t StringToIP(const std::string& str);
-
- // Converts the IP address given in printable form into an IPAddress.
- static bool StringToIP(const std::string& str, IPAddress* ip);
-
private:
std::string hostname_;
IPAddress ip_;
diff --git a/webrtc/base/socketaddress_unittest.cc b/webrtc/base/socketaddress_unittest.cc
index 6e9f089561..e235447dc7 100644
--- a/webrtc/base/socketaddress_unittest.cc
+++ b/webrtc/base/socketaddress_unittest.cc
@@ -27,10 +27,11 @@ const in6_addr kMappedV4Addr = { { {0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0xFF, 0xFF,
0x01, 0x02, 0x03, 0x04} } };
const std::string kTestV6AddrString = "2001:db8:1020:3040:5060:7080:90a0:b0c0";
-const std::string kTestV6AddrAnonymizedString = "2001:db8:1020::";
+const std::string kTestV6AddrAnonymizedString = "2001:db8:1020:x:x:x:x:x";
const std::string kTestV6AddrFullString =
"[2001:db8:1020:3040:5060:7080:90a0:b0c0]:5678";
-const std::string kTestV6AddrFullAnonymizedString = "[2001:db8:1020::]:5678";
+const std::string kTestV6AddrFullAnonymizedString =
+ "[2001:db8:1020:x:x:x:x:x]:5678";
TEST(SocketAddressTest, TestDefaultCtor) {
SocketAddress addr;
@@ -325,23 +326,26 @@ TEST(SocketAddressTest, TestToSensitiveString) {
SocketAddress addr_v4("1.2.3.4", 5678);
EXPECT_EQ("1.2.3.4", addr_v4.HostAsURIString());
EXPECT_EQ("1.2.3.4:5678", addr_v4.ToString());
- EXPECT_EQ("1.2.3.4", addr_v4.HostAsSensitiveURIString());
- EXPECT_EQ("1.2.3.4:5678", addr_v4.ToSensitiveString());
- IPAddress::set_strip_sensitive(true);
+
+#if defined(NDEBUG)
EXPECT_EQ("1.2.3.x", addr_v4.HostAsSensitiveURIString());
EXPECT_EQ("1.2.3.x:5678", addr_v4.ToSensitiveString());
- IPAddress::set_strip_sensitive(false);
+#else
+ EXPECT_EQ("1.2.3.4", addr_v4.HostAsSensitiveURIString());
+ EXPECT_EQ("1.2.3.4:5678", addr_v4.ToSensitiveString());
+#endif // defined(NDEBUG)
SocketAddress addr_v6(kTestV6AddrString, 5678);
EXPECT_EQ("[" + kTestV6AddrString + "]", addr_v6.HostAsURIString());
EXPECT_EQ(kTestV6AddrFullString, addr_v6.ToString());
- EXPECT_EQ("[" + kTestV6AddrString + "]", addr_v6.HostAsSensitiveURIString());
- EXPECT_EQ(kTestV6AddrFullString, addr_v6.ToSensitiveString());
- IPAddress::set_strip_sensitive(true);
+#if defined(NDEBUG)
EXPECT_EQ("[" + kTestV6AddrAnonymizedString + "]",
addr_v6.HostAsSensitiveURIString());
EXPECT_EQ(kTestV6AddrFullAnonymizedString, addr_v6.ToSensitiveString());
- IPAddress::set_strip_sensitive(false);
+#else
+ EXPECT_EQ("[" + kTestV6AddrString + "]", addr_v6.HostAsSensitiveURIString());
+ EXPECT_EQ(kTestV6AddrFullString, addr_v6.ToSensitiveString());
+#endif // defined(NDEBUG)
}
} // namespace rtc
diff --git a/webrtc/base/sslidentity.cc b/webrtc/base/sslidentity.cc
index 180e60c58b..5f6b6869dd 100644
--- a/webrtc/base/sslidentity.cc
+++ b/webrtc/base/sslidentity.cc
@@ -15,6 +15,7 @@
#include "webrtc/base/sslidentity.h"
+#include <ctime>
#include <string>
#include "webrtc/base/base64.h"
@@ -177,4 +178,74 @@ SSLIdentity* SSLIdentity::FromPEMStrings(const std::string& private_key,
#endif // SSL_USE_OPENSSL
+// Read |n| bytes from ASN1 number string at *|pp| and return the numeric value.
+// Update *|pp| and *|np| to reflect number of read bytes.
+static inline int ASN1ReadInt(const unsigned char** pp, size_t* np, size_t n) {
+ const unsigned char* p = *pp;
+ int x = 0;
+ for (size_t i = 0; i < n; i++)
+ x = 10 * x + p[i] - '0';
+ *pp = p + n;
+ *np = *np - n;
+ return x;
+}
+
+int64_t ASN1TimeToSec(const unsigned char* s, size_t length, bool long_format) {
+ size_t bytes_left = length;
+
+ // Make sure the string ends with Z. Doing it here protects the strspn call
+ // from running off the end of the string in Z's absense.
+ if (length == 0 || s[length - 1] != 'Z')
+ return -1;
+
+ // Make sure we only have ASCII digits so that we don't need to clutter the
+ // code below and ASN1ReadInt with error checking.
+ size_t n = strspn(reinterpret_cast<const char*>(s), "0123456789");
+ if (n + 1 != length)
+ return -1;
+
+ int year;
+
+ // Read out ASN1 year, in either 2-char "UTCTIME" or 4-char "GENERALIZEDTIME"
+ // format. Both format use UTC in this context.
+ if (long_format) {
+ // ASN1 format: yyyymmddhh[mm[ss[.fff]]]Z where the Z is literal, but
+ // RFC 5280 requires us to only support exactly yyyymmddhhmmssZ.
+
+ if (bytes_left < 11)
+ return -1;
+
+ year = ASN1ReadInt(&s, &bytes_left, 4);
+ year -= 1900;
+ } else {
+ // ASN1 format: yymmddhhmm[ss]Z where the Z is literal, but RFC 5280
+ // requires us to only support exactly yymmddhhmmssZ.
+
+ if (bytes_left < 9)
+ return -1;
+
+ year = ASN1ReadInt(&s, &bytes_left, 2);
+ if (year < 50) // Per RFC 5280 4.1.2.5.1
+ year += 100;
+ }
+
+ std::tm tm;
+ tm.tm_year = year;
+
+ // Read out remaining ASN1 time data and store it in |tm| in documented
+ // std::tm format.
+ tm.tm_mon = ASN1ReadInt(&s, &bytes_left, 2) - 1;
+ tm.tm_mday = ASN1ReadInt(&s, &bytes_left, 2);
+ tm.tm_hour = ASN1ReadInt(&s, &bytes_left, 2);
+ tm.tm_min = ASN1ReadInt(&s, &bytes_left, 2);
+ tm.tm_sec = ASN1ReadInt(&s, &bytes_left, 2);
+
+ if (bytes_left != 1) {
+ // Now just Z should remain. Its existence was asserted above.
+ return -1;
+ }
+
+ return TmToSeconds(tm);
+}
+
} // namespace rtc
diff --git a/webrtc/base/sslidentity.h b/webrtc/base/sslidentity.h
index cf9942637e..a143ee4108 100644
--- a/webrtc/base/sslidentity.h
+++ b/webrtc/base/sslidentity.h
@@ -19,6 +19,7 @@
#include "webrtc/base/buffer.h"
#include "webrtc/base/messagedigest.h"
+#include "webrtc/base/timeutils.h"
namespace rtc {
@@ -68,6 +69,10 @@ class SSLCertificate {
unsigned char* digest,
size_t size,
size_t* length) const = 0;
+
+ // Returns the time in seconds relative to epoch, 1970-01-01T00:00:00Z (UTC),
+ // or -1 if an expiration time could not be retrieved.
+ virtual int64_t CertificateExpirationTime() const = 0;
};
// SSLCertChain is a simple wrapper for a vector of SSLCertificates. It serves
@@ -168,8 +173,8 @@ KeyType IntKeyTypeFamilyToKeyType(int key_type_family);
// random string will be used.
struct SSLIdentityParams {
std::string common_name;
- int not_before; // offset from current time in seconds.
- int not_after; // offset from current time in seconds.
+ time_t not_before; // Absolute time since epoch in seconds.
+ time_t not_after; // Absolute time since epoch in seconds.
KeyParams key_params;
};
@@ -217,6 +222,11 @@ class SSLIdentity {
size_t length);
};
+// Convert from ASN1 time as restricted by RFC 5280 to seconds from 1970-01-01
+// 00.00 ("epoch"). If the ASN1 time cannot be read, return -1. The data at
+// |s| is not 0-terminated; its char count is defined by |length|.
+int64_t ASN1TimeToSec(const unsigned char* s, size_t length, bool long_format);
+
extern const char kPemTypeCertificate[];
extern const char kPemTypeRsaPrivateKey[];
extern const char kPemTypeEcPrivateKey[];
diff --git a/webrtc/base/sslidentity_unittest.cc b/webrtc/base/sslidentity_unittest.cc
index e8df41506b..3582edb4a4 100644
--- a/webrtc/base/sslidentity_unittest.cc
+++ b/webrtc/base/sslidentity_unittest.cc
@@ -11,6 +11,7 @@
#include <string>
#include "webrtc/base/gunit.h"
+#include "webrtc/base/helpers.h"
#include "webrtc/base/ssladapter.h"
#include "webrtc/base/sslidentity.h"
@@ -295,3 +296,119 @@ TEST_F(SSLIdentityTest, PemDerConversion) {
TEST_F(SSLIdentityTest, GetSignatureDigestAlgorithm) {
TestGetSignatureDigestAlgorithm();
}
+
+class SSLIdentityExpirationTest : public testing::Test {
+ public:
+ SSLIdentityExpirationTest() {
+ // Set use of the test RNG to get deterministic expiration timestamp.
+ rtc::SetRandomTestMode(true);
+ }
+ ~SSLIdentityExpirationTest() {
+ // Put it back for the next test.
+ rtc::SetRandomTestMode(false);
+ }
+
+ void TestASN1TimeToSec() {
+ struct asn_example {
+ const char* string;
+ bool long_format;
+ int64_t want;
+ } static const data[] = {
+ // Valid examples.
+ {"19700101000000Z", true, 0},
+ {"700101000000Z", false, 0},
+ {"19700101000001Z", true, 1},
+ {"700101000001Z", false, 1},
+ {"19700101000100Z", true, 60},
+ {"19700101000101Z", true, 61},
+ {"19700101010000Z", true, 3600},
+ {"19700101010001Z", true, 3601},
+ {"19700101010100Z", true, 3660},
+ {"19700101010101Z", true, 3661},
+ {"710911012345Z", false, 53400225},
+ {"20000101000000Z", true, 946684800},
+ {"20000101000000Z", true, 946684800},
+ {"20151130140156Z", true, 1448892116},
+ {"151130140156Z", false, 1448892116},
+ {"20491231235959Z", true, 2524607999},
+ {"491231235959Z", false, 2524607999},
+ {"20500101000000Z", true, 2524607999+1},
+ {"20700101000000Z", true, 3155760000},
+ {"21000101000000Z", true, 4102444800},
+ {"24000101000000Z", true, 13569465600},
+
+ // Invalid examples.
+ {"19700101000000", true, -1}, // missing Z long format
+ {"19700101000000X", true, -1}, // X instead of Z long format
+ {"197001010000000", true, -1}, // 0 instead of Z long format
+ {"1970010100000000Z", true, -1}, // excess digits long format
+ {"700101000000", false, -1}, // missing Z short format
+ {"700101000000X", false, -1}, // X instead of Z short format
+ {"7001010000000", false, -1}, // 0 instead of Z short format
+ {"70010100000000Z", false, -1}, // excess digits short format
+ {":9700101000000Z", true, -1}, // invalid character
+ {"1:700101000001Z", true, -1}, // invalid character
+ {"19:00101000100Z", true, -1}, // invalid character
+ {"197:0101000101Z", true, -1}, // invalid character
+ {"1970:101010000Z", true, -1}, // invalid character
+ {"19700:01010001Z", true, -1}, // invalid character
+ {"197001:1010100Z", true, -1}, // invalid character
+ {"1970010:010101Z", true, -1}, // invalid character
+ {"70010100:000Z", false, -1}, // invalid character
+ {"700101000:01Z", false, -1}, // invalid character
+ {"2000010100:000Z", true, -1}, // invalid character
+ {"21000101000:00Z", true, -1}, // invalid character
+ {"240001010000:0Z", true, -1}, // invalid character
+ {"500101000000Z", false, -1}, // but too old for epoch
+ {"691231235959Z", false, -1}, // too old for epoch
+ {"19611118043000Z", false, -1}, // way too old for epoch
+ };
+
+ unsigned char buf[20];
+
+ // Run all examples and check for the expected result.
+ for (const auto& entry : data) {
+ size_t length = strlen(entry.string);
+ memcpy(buf, entry.string, length); // Copy the ASN1 string...
+ buf[length] = rtc::CreateRandomId(); // ...and terminate it with junk.
+ int64_t res = rtc::ASN1TimeToSec(buf, length, entry.long_format);
+ LOG(LS_VERBOSE) << entry.string;
+ ASSERT_EQ(entry.want, res);
+ }
+ // Run all examples again, but with an invalid length.
+ for (const auto& entry : data) {
+ size_t length = strlen(entry.string);
+ memcpy(buf, entry.string, length); // Copy the ASN1 string...
+ buf[length] = rtc::CreateRandomId(); // ...and terminate it with junk.
+ int64_t res = rtc::ASN1TimeToSec(buf, length - 1, entry.long_format);
+ LOG(LS_VERBOSE) << entry.string;
+ ASSERT_EQ(-1, res);
+ }
+ }
+
+ void TestExpireTime(int times) {
+ for (int i = 0; i < times; i++) {
+ rtc::SSLIdentityParams params;
+ params.common_name = "";
+ params.not_before = 0;
+ // We limit the time to < 2^31 here, i.e., we stay before 2038, since else
+ // we hit time offset limitations in OpenSSL on some 32-bit systems.
+ params.not_after = rtc::CreateRandomId() % 0x80000000;
+ // We test just ECDSA here since what we're out to exercise here is the
+ // code for expiration setting and reading.
+ params.key_params = rtc::KeyParams::ECDSA(rtc::EC_NIST_P256);
+ SSLIdentity* identity = rtc::SSLIdentity::GenerateForTest(params);
+ EXPECT_EQ(params.not_after,
+ identity->certificate().CertificateExpirationTime());
+ delete identity;
+ }
+ }
+};
+
+TEST_F(SSLIdentityExpirationTest, TestASN1TimeToSec) {
+ TestASN1TimeToSec();
+}
+
+TEST_F(SSLIdentityExpirationTest, TestExpireTime) {
+ TestExpireTime(500);
+}
diff --git a/webrtc/base/sslroots.h b/webrtc/base/sslroots.h
index 31d601c169..0464ac8339 100644
--- a/webrtc/base/sslroots.h
+++ b/webrtc/base/sslroots.h
@@ -2,83 +2,813 @@
// Google.
// It was generated with the following command line:
-// > python //depot/googleclient/talk/tools/generate_sslroots.py
-// //depot/google3/security/cacerts/for_connecting_to_google/roots.pem
+// > python tools/sslroots/generate_sslroots.py
+// https://pki.google.com/roots.pem
-/* subject:/C=SE/O=AddTrust AB/OU=AddTrust External TTP Network/CN=AddTrust External CA Root */
-/* issuer :/C=SE/O=AddTrust AB/OU=AddTrust External TTP Network/CN=AddTrust External CA Root */
+/* subject:/C=BE/O=GlobalSign nv-sa/OU=Root CA/CN=GlobalSign Root CA */
+/* issuer :/C=BE/O=GlobalSign nv-sa/OU=Root CA/CN=GlobalSign Root CA */
-namespace rtc {
-const unsigned char AddTrust_External_Root_certificate[1082]={
-0x30,0x82,0x04,0x36,0x30,0x82,0x03,0x1E,0xA0,0x03,0x02,0x01,0x02,0x02,0x01,0x01,
+const unsigned char GlobalSign_Root_CA_certificate[889]={
+0x30,0x82,0x03,0x75,0x30,0x82,0x02,0x5D,0xA0,0x03,0x02,0x01,0x02,0x02,0x0B,0x04,
+0x00,0x00,0x00,0x00,0x01,0x15,0x4B,0x5A,0xC3,0x94,0x30,0x0D,0x06,0x09,0x2A,0x86,
+0x48,0x86,0xF7,0x0D,0x01,0x01,0x05,0x05,0x00,0x30,0x57,0x31,0x0B,0x30,0x09,0x06,
+0x03,0x55,0x04,0x06,0x13,0x02,0x42,0x45,0x31,0x19,0x30,0x17,0x06,0x03,0x55,0x04,
+0x0A,0x13,0x10,0x47,0x6C,0x6F,0x62,0x61,0x6C,0x53,0x69,0x67,0x6E,0x20,0x6E,0x76,
+0x2D,0x73,0x61,0x31,0x10,0x30,0x0E,0x06,0x03,0x55,0x04,0x0B,0x13,0x07,0x52,0x6F,
+0x6F,0x74,0x20,0x43,0x41,0x31,0x1B,0x30,0x19,0x06,0x03,0x55,0x04,0x03,0x13,0x12,
+0x47,0x6C,0x6F,0x62,0x61,0x6C,0x53,0x69,0x67,0x6E,0x20,0x52,0x6F,0x6F,0x74,0x20,
+0x43,0x41,0x30,0x1E,0x17,0x0D,0x39,0x38,0x30,0x39,0x30,0x31,0x31,0x32,0x30,0x30,
+0x30,0x30,0x5A,0x17,0x0D,0x32,0x38,0x30,0x31,0x32,0x38,0x31,0x32,0x30,0x30,0x30,
+0x30,0x5A,0x30,0x57,0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x42,
+0x45,0x31,0x19,0x30,0x17,0x06,0x03,0x55,0x04,0x0A,0x13,0x10,0x47,0x6C,0x6F,0x62,
+0x61,0x6C,0x53,0x69,0x67,0x6E,0x20,0x6E,0x76,0x2D,0x73,0x61,0x31,0x10,0x30,0x0E,
+0x06,0x03,0x55,0x04,0x0B,0x13,0x07,0x52,0x6F,0x6F,0x74,0x20,0x43,0x41,0x31,0x1B,
+0x30,0x19,0x06,0x03,0x55,0x04,0x03,0x13,0x12,0x47,0x6C,0x6F,0x62,0x61,0x6C,0x53,
+0x69,0x67,0x6E,0x20,0x52,0x6F,0x6F,0x74,0x20,0x43,0x41,0x30,0x82,0x01,0x22,0x30,
+0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x01,0x05,0x00,0x03,0x82,
+0x01,0x0F,0x00,0x30,0x82,0x01,0x0A,0x02,0x82,0x01,0x01,0x00,0xDA,0x0E,0xE6,0x99,
+0x8D,0xCE,0xA3,0xE3,0x4F,0x8A,0x7E,0xFB,0xF1,0x8B,0x83,0x25,0x6B,0xEA,0x48,0x1F,
+0xF1,0x2A,0xB0,0xB9,0x95,0x11,0x04,0xBD,0xF0,0x63,0xD1,0xE2,0x67,0x66,0xCF,0x1C,
+0xDD,0xCF,0x1B,0x48,0x2B,0xEE,0x8D,0x89,0x8E,0x9A,0xAF,0x29,0x80,0x65,0xAB,0xE9,
+0xC7,0x2D,0x12,0xCB,0xAB,0x1C,0x4C,0x70,0x07,0xA1,0x3D,0x0A,0x30,0xCD,0x15,0x8D,
+0x4F,0xF8,0xDD,0xD4,0x8C,0x50,0x15,0x1C,0xEF,0x50,0xEE,0xC4,0x2E,0xF7,0xFC,0xE9,
+0x52,0xF2,0x91,0x7D,0xE0,0x6D,0xD5,0x35,0x30,0x8E,0x5E,0x43,0x73,0xF2,0x41,0xE9,
+0xD5,0x6A,0xE3,0xB2,0x89,0x3A,0x56,0x39,0x38,0x6F,0x06,0x3C,0x88,0x69,0x5B,0x2A,
+0x4D,0xC5,0xA7,0x54,0xB8,0x6C,0x89,0xCC,0x9B,0xF9,0x3C,0xCA,0xE5,0xFD,0x89,0xF5,
+0x12,0x3C,0x92,0x78,0x96,0xD6,0xDC,0x74,0x6E,0x93,0x44,0x61,0xD1,0x8D,0xC7,0x46,
+0xB2,0x75,0x0E,0x86,0xE8,0x19,0x8A,0xD5,0x6D,0x6C,0xD5,0x78,0x16,0x95,0xA2,0xE9,
+0xC8,0x0A,0x38,0xEB,0xF2,0x24,0x13,0x4F,0x73,0x54,0x93,0x13,0x85,0x3A,0x1B,0xBC,
+0x1E,0x34,0xB5,0x8B,0x05,0x8C,0xB9,0x77,0x8B,0xB1,0xDB,0x1F,0x20,0x91,0xAB,0x09,
+0x53,0x6E,0x90,0xCE,0x7B,0x37,0x74,0xB9,0x70,0x47,0x91,0x22,0x51,0x63,0x16,0x79,
+0xAE,0xB1,0xAE,0x41,0x26,0x08,0xC8,0x19,0x2B,0xD1,0x46,0xAA,0x48,0xD6,0x64,0x2A,
+0xD7,0x83,0x34,0xFF,0x2C,0x2A,0xC1,0x6C,0x19,0x43,0x4A,0x07,0x85,0xE7,0xD3,0x7C,
+0xF6,0x21,0x68,0xEF,0xEA,0xF2,0x52,0x9F,0x7F,0x93,0x90,0xCF,0x02,0x03,0x01,0x00,
+0x01,0xA3,0x42,0x30,0x40,0x30,0x0E,0x06,0x03,0x55,0x1D,0x0F,0x01,0x01,0xFF,0x04,
+0x04,0x03,0x02,0x01,0x06,0x30,0x0F,0x06,0x03,0x55,0x1D,0x13,0x01,0x01,0xFF,0x04,
+0x05,0x30,0x03,0x01,0x01,0xFF,0x30,0x1D,0x06,0x03,0x55,0x1D,0x0E,0x04,0x16,0x04,
+0x14,0x60,0x7B,0x66,0x1A,0x45,0x0D,0x97,0xCA,0x89,0x50,0x2F,0x7D,0x04,0xCD,0x34,
+0xA8,0xFF,0xFC,0xFD,0x4B,0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,
+0x01,0x05,0x05,0x00,0x03,0x82,0x01,0x01,0x00,0xD6,0x73,0xE7,0x7C,0x4F,0x76,0xD0,
+0x8D,0xBF,0xEC,0xBA,0xA2,0xBE,0x34,0xC5,0x28,0x32,0xB5,0x7C,0xFC,0x6C,0x9C,0x2C,
+0x2B,0xBD,0x09,0x9E,0x53,0xBF,0x6B,0x5E,0xAA,0x11,0x48,0xB6,0xE5,0x08,0xA3,0xB3,
+0xCA,0x3D,0x61,0x4D,0xD3,0x46,0x09,0xB3,0x3E,0xC3,0xA0,0xE3,0x63,0x55,0x1B,0xF2,
+0xBA,0xEF,0xAD,0x39,0xE1,0x43,0xB9,0x38,0xA3,0xE6,0x2F,0x8A,0x26,0x3B,0xEF,0xA0,
+0x50,0x56,0xF9,0xC6,0x0A,0xFD,0x38,0xCD,0xC4,0x0B,0x70,0x51,0x94,0x97,0x98,0x04,
+0xDF,0xC3,0x5F,0x94,0xD5,0x15,0xC9,0x14,0x41,0x9C,0xC4,0x5D,0x75,0x64,0x15,0x0D,
+0xFF,0x55,0x30,0xEC,0x86,0x8F,0xFF,0x0D,0xEF,0x2C,0xB9,0x63,0x46,0xF6,0xAA,0xFC,
+0xDF,0xBC,0x69,0xFD,0x2E,0x12,0x48,0x64,0x9A,0xE0,0x95,0xF0,0xA6,0xEF,0x29,0x8F,
+0x01,0xB1,0x15,0xB5,0x0C,0x1D,0xA5,0xFE,0x69,0x2C,0x69,0x24,0x78,0x1E,0xB3,0xA7,
+0x1C,0x71,0x62,0xEE,0xCA,0xC8,0x97,0xAC,0x17,0x5D,0x8A,0xC2,0xF8,0x47,0x86,0x6E,
+0x2A,0xC4,0x56,0x31,0x95,0xD0,0x67,0x89,0x85,0x2B,0xF9,0x6C,0xA6,0x5D,0x46,0x9D,
+0x0C,0xAA,0x82,0xE4,0x99,0x51,0xDD,0x70,0xB7,0xDB,0x56,0x3D,0x61,0xE4,0x6A,0xE1,
+0x5C,0xD6,0xF6,0xFE,0x3D,0xDE,0x41,0xCC,0x07,0xAE,0x63,0x52,0xBF,0x53,0x53,0xF4,
+0x2B,0xE9,0xC7,0xFD,0xB6,0xF7,0x82,0x5F,0x85,0xD2,0x41,0x18,0xDB,0x81,0xB3,0x04,
+0x1C,0xC5,0x1F,0xA4,0x80,0x6F,0x15,0x20,0xC9,0xDE,0x0C,0x88,0x0A,0x1D,0xD6,0x66,
+0x55,0xE2,0xFC,0x48,0xC9,0x29,0x26,0x69,0xE0,
+};
+
+
+/* subject:/C=US/ST=New Jersey/L=Jersey City/O=The USERTRUST Network/CN=USERTrust RSA Certification Authority */
+/* issuer :/C=US/ST=New Jersey/L=Jersey City/O=The USERTRUST Network/CN=USERTrust RSA Certification Authority */
+
+
+const unsigned char USERTrust_RSA_Certification_Authority_certificate[1506]={
+0x30,0x82,0x05,0xDE,0x30,0x82,0x03,0xC6,0xA0,0x03,0x02,0x01,0x02,0x02,0x10,0x01,
+0xFD,0x6D,0x30,0xFC,0xA3,0xCA,0x51,0xA8,0x1B,0xBC,0x64,0x0E,0x35,0x03,0x2D,0x30,
+0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x0C,0x05,0x00,0x30,0x81,
+0x88,0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x55,0x53,0x31,0x13,
+0x30,0x11,0x06,0x03,0x55,0x04,0x08,0x13,0x0A,0x4E,0x65,0x77,0x20,0x4A,0x65,0x72,
+0x73,0x65,0x79,0x31,0x14,0x30,0x12,0x06,0x03,0x55,0x04,0x07,0x13,0x0B,0x4A,0x65,
+0x72,0x73,0x65,0x79,0x20,0x43,0x69,0x74,0x79,0x31,0x1E,0x30,0x1C,0x06,0x03,0x55,
+0x04,0x0A,0x13,0x15,0x54,0x68,0x65,0x20,0x55,0x53,0x45,0x52,0x54,0x52,0x55,0x53,
+0x54,0x20,0x4E,0x65,0x74,0x77,0x6F,0x72,0x6B,0x31,0x2E,0x30,0x2C,0x06,0x03,0x55,
+0x04,0x03,0x13,0x25,0x55,0x53,0x45,0x52,0x54,0x72,0x75,0x73,0x74,0x20,0x52,0x53,
+0x41,0x20,0x43,0x65,0x72,0x74,0x69,0x66,0x69,0x63,0x61,0x74,0x69,0x6F,0x6E,0x20,
+0x41,0x75,0x74,0x68,0x6F,0x72,0x69,0x74,0x79,0x30,0x1E,0x17,0x0D,0x31,0x30,0x30,
+0x32,0x30,0x31,0x30,0x30,0x30,0x30,0x30,0x30,0x5A,0x17,0x0D,0x33,0x38,0x30,0x31,
+0x31,0x38,0x32,0x33,0x35,0x39,0x35,0x39,0x5A,0x30,0x81,0x88,0x31,0x0B,0x30,0x09,
+0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x55,0x53,0x31,0x13,0x30,0x11,0x06,0x03,0x55,
+0x04,0x08,0x13,0x0A,0x4E,0x65,0x77,0x20,0x4A,0x65,0x72,0x73,0x65,0x79,0x31,0x14,
+0x30,0x12,0x06,0x03,0x55,0x04,0x07,0x13,0x0B,0x4A,0x65,0x72,0x73,0x65,0x79,0x20,
+0x43,0x69,0x74,0x79,0x31,0x1E,0x30,0x1C,0x06,0x03,0x55,0x04,0x0A,0x13,0x15,0x54,
+0x68,0x65,0x20,0x55,0x53,0x45,0x52,0x54,0x52,0x55,0x53,0x54,0x20,0x4E,0x65,0x74,
+0x77,0x6F,0x72,0x6B,0x31,0x2E,0x30,0x2C,0x06,0x03,0x55,0x04,0x03,0x13,0x25,0x55,
+0x53,0x45,0x52,0x54,0x72,0x75,0x73,0x74,0x20,0x52,0x53,0x41,0x20,0x43,0x65,0x72,
+0x74,0x69,0x66,0x69,0x63,0x61,0x74,0x69,0x6F,0x6E,0x20,0x41,0x75,0x74,0x68,0x6F,
+0x72,0x69,0x74,0x79,0x30,0x82,0x02,0x22,0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,
+0xF7,0x0D,0x01,0x01,0x01,0x05,0x00,0x03,0x82,0x02,0x0F,0x00,0x30,0x82,0x02,0x0A,
+0x02,0x82,0x02,0x01,0x00,0x80,0x12,0x65,0x17,0x36,0x0E,0xC3,0xDB,0x08,0xB3,0xD0,
+0xAC,0x57,0x0D,0x76,0xED,0xCD,0x27,0xD3,0x4C,0xAD,0x50,0x83,0x61,0xE2,0xAA,0x20,
+0x4D,0x09,0x2D,0x64,0x09,0xDC,0xCE,0x89,0x9F,0xCC,0x3D,0xA9,0xEC,0xF6,0xCF,0xC1,
+0xDC,0xF1,0xD3,0xB1,0xD6,0x7B,0x37,0x28,0x11,0x2B,0x47,0xDA,0x39,0xC6,0xBC,0x3A,
+0x19,0xB4,0x5F,0xA6,0xBD,0x7D,0x9D,0xA3,0x63,0x42,0xB6,0x76,0xF2,0xA9,0x3B,0x2B,
+0x91,0xF8,0xE2,0x6F,0xD0,0xEC,0x16,0x20,0x90,0x09,0x3E,0xE2,0xE8,0x74,0xC9,0x18,
+0xB4,0x91,0xD4,0x62,0x64,0xDB,0x7F,0xA3,0x06,0xF1,0x88,0x18,0x6A,0x90,0x22,0x3C,
+0xBC,0xFE,0x13,0xF0,0x87,0x14,0x7B,0xF6,0xE4,0x1F,0x8E,0xD4,0xE4,0x51,0xC6,0x11,
+0x67,0x46,0x08,0x51,0xCB,0x86,0x14,0x54,0x3F,0xBC,0x33,0xFE,0x7E,0x6C,0x9C,0xFF,
+0x16,0x9D,0x18,0xBD,0x51,0x8E,0x35,0xA6,0xA7,0x66,0xC8,0x72,0x67,0xDB,0x21,0x66,
+0xB1,0xD4,0x9B,0x78,0x03,0xC0,0x50,0x3A,0xE8,0xCC,0xF0,0xDC,0xBC,0x9E,0x4C,0xFE,
+0xAF,0x05,0x96,0x35,0x1F,0x57,0x5A,0xB7,0xFF,0xCE,0xF9,0x3D,0xB7,0x2C,0xB6,0xF6,
+0x54,0xDD,0xC8,0xE7,0x12,0x3A,0x4D,0xAE,0x4C,0x8A,0xB7,0x5C,0x9A,0xB4,0xB7,0x20,
+0x3D,0xCA,0x7F,0x22,0x34,0xAE,0x7E,0x3B,0x68,0x66,0x01,0x44,0xE7,0x01,0x4E,0x46,
+0x53,0x9B,0x33,0x60,0xF7,0x94,0xBE,0x53,0x37,0x90,0x73,0x43,0xF3,0x32,0xC3,0x53,
+0xEF,0xDB,0xAA,0xFE,0x74,0x4E,0x69,0xC7,0x6B,0x8C,0x60,0x93,0xDE,0xC4,0xC7,0x0C,
+0xDF,0xE1,0x32,0xAE,0xCC,0x93,0x3B,0x51,0x78,0x95,0x67,0x8B,0xEE,0x3D,0x56,0xFE,
+0x0C,0xD0,0x69,0x0F,0x1B,0x0F,0xF3,0x25,0x26,0x6B,0x33,0x6D,0xF7,0x6E,0x47,0xFA,
+0x73,0x43,0xE5,0x7E,0x0E,0xA5,0x66,0xB1,0x29,0x7C,0x32,0x84,0x63,0x55,0x89,0xC4,
+0x0D,0xC1,0x93,0x54,0x30,0x19,0x13,0xAC,0xD3,0x7D,0x37,0xA7,0xEB,0x5D,0x3A,0x6C,
+0x35,0x5C,0xDB,0x41,0xD7,0x12,0xDA,0xA9,0x49,0x0B,0xDF,0xD8,0x80,0x8A,0x09,0x93,
+0x62,0x8E,0xB5,0x66,0xCF,0x25,0x88,0xCD,0x84,0xB8,0xB1,0x3F,0xA4,0x39,0x0F,0xD9,
+0x02,0x9E,0xEB,0x12,0x4C,0x95,0x7C,0xF3,0x6B,0x05,0xA9,0x5E,0x16,0x83,0xCC,0xB8,
+0x67,0xE2,0xE8,0x13,0x9D,0xCC,0x5B,0x82,0xD3,0x4C,0xB3,0xED,0x5B,0xFF,0xDE,0xE5,
+0x73,0xAC,0x23,0x3B,0x2D,0x00,0xBF,0x35,0x55,0x74,0x09,0x49,0xD8,0x49,0x58,0x1A,
+0x7F,0x92,0x36,0xE6,0x51,0x92,0x0E,0xF3,0x26,0x7D,0x1C,0x4D,0x17,0xBC,0xC9,0xEC,
+0x43,0x26,0xD0,0xBF,0x41,0x5F,0x40,0xA9,0x44,0x44,0xF4,0x99,0xE7,0x57,0x87,0x9E,
+0x50,0x1F,0x57,0x54,0xA8,0x3E,0xFD,0x74,0x63,0x2F,0xB1,0x50,0x65,0x09,0xE6,0x58,
+0x42,0x2E,0x43,0x1A,0x4C,0xB4,0xF0,0x25,0x47,0x59,0xFA,0x04,0x1E,0x93,0xD4,0x26,
+0x46,0x4A,0x50,0x81,0xB2,0xDE,0xBE,0x78,0xB7,0xFC,0x67,0x15,0xE1,0xC9,0x57,0x84,
+0x1E,0x0F,0x63,0xD6,0xE9,0x62,0xBA,0xD6,0x5F,0x55,0x2E,0xEA,0x5C,0xC6,0x28,0x08,
+0x04,0x25,0x39,0xB8,0x0E,0x2B,0xA9,0xF2,0x4C,0x97,0x1C,0x07,0x3F,0x0D,0x52,0xF5,
+0xED,0xEF,0x2F,0x82,0x0F,0x02,0x03,0x01,0x00,0x01,0xA3,0x42,0x30,0x40,0x30,0x1D,
+0x06,0x03,0x55,0x1D,0x0E,0x04,0x16,0x04,0x14,0x53,0x79,0xBF,0x5A,0xAA,0x2B,0x4A,
+0xCF,0x54,0x80,0xE1,0xD8,0x9B,0xC0,0x9D,0xF2,0xB2,0x03,0x66,0xCB,0x30,0x0E,0x06,
+0x03,0x55,0x1D,0x0F,0x01,0x01,0xFF,0x04,0x04,0x03,0x02,0x01,0x06,0x30,0x0F,0x06,
+0x03,0x55,0x1D,0x13,0x01,0x01,0xFF,0x04,0x05,0x30,0x03,0x01,0x01,0xFF,0x30,0x0D,
+0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x0C,0x05,0x00,0x03,0x82,0x02,
+0x01,0x00,0x5C,0xD4,0x7C,0x0D,0xCF,0xF7,0x01,0x7D,0x41,0x99,0x65,0x0C,0x73,0xC5,
+0x52,0x9F,0xCB,0xF8,0xCF,0x99,0x06,0x7F,0x1B,0xDA,0x43,0x15,0x9F,0x9E,0x02,0x55,
+0x57,0x96,0x14,0xF1,0x52,0x3C,0x27,0x87,0x94,0x28,0xED,0x1F,0x3A,0x01,0x37,0xA2,
+0x76,0xFC,0x53,0x50,0xC0,0x84,0x9B,0xC6,0x6B,0x4E,0xBA,0x8C,0x21,0x4F,0xA2,0x8E,
+0x55,0x62,0x91,0xF3,0x69,0x15,0xD8,0xBC,0x88,0xE3,0xC4,0xAA,0x0B,0xFD,0xEF,0xA8,
+0xE9,0x4B,0x55,0x2A,0x06,0x20,0x6D,0x55,0x78,0x29,0x19,0xEE,0x5F,0x30,0x5C,0x4B,
+0x24,0x11,0x55,0xFF,0x24,0x9A,0x6E,0x5E,0x2A,0x2B,0xEE,0x0B,0x4D,0x9F,0x7F,0xF7,
+0x01,0x38,0x94,0x14,0x95,0x43,0x07,0x09,0xFB,0x60,0xA9,0xEE,0x1C,0xAB,0x12,0x8C,
+0xA0,0x9A,0x5E,0xA7,0x98,0x6A,0x59,0x6D,0x8B,0x3F,0x08,0xFB,0xC8,0xD1,0x45,0xAF,
+0x18,0x15,0x64,0x90,0x12,0x0F,0x73,0x28,0x2E,0xC5,0xE2,0x24,0x4E,0xFC,0x58,0xEC,
+0xF0,0xF4,0x45,0xFE,0x22,0xB3,0xEB,0x2F,0x8E,0xD2,0xD9,0x45,0x61,0x05,0xC1,0x97,
+0x6F,0xA8,0x76,0x72,0x8F,0x8B,0x8C,0x36,0xAF,0xBF,0x0D,0x05,0xCE,0x71,0x8D,0xE6,
+0xA6,0x6F,0x1F,0x6C,0xA6,0x71,0x62,0xC5,0xD8,0xD0,0x83,0x72,0x0C,0xF1,0x67,0x11,
+0x89,0x0C,0x9C,0x13,0x4C,0x72,0x34,0xDF,0xBC,0xD5,0x71,0xDF,0xAA,0x71,0xDD,0xE1,
+0xB9,0x6C,0x8C,0x3C,0x12,0x5D,0x65,0xDA,0xBD,0x57,0x12,0xB6,0x43,0x6B,0xFF,0xE5,
+0xDE,0x4D,0x66,0x11,0x51,0xCF,0x99,0xAE,0xEC,0x17,0xB6,0xE8,0x71,0x91,0x8C,0xDE,
+0x49,0xFE,0xDD,0x35,0x71,0xA2,0x15,0x27,0x94,0x1C,0xCF,0x61,0xE3,0x26,0xBB,0x6F,
+0xA3,0x67,0x25,0x21,0x5D,0xE6,0xDD,0x1D,0x0B,0x2E,0x68,0x1B,0x3B,0x82,0xAF,0xEC,
+0x83,0x67,0x85,0xD4,0x98,0x51,0x74,0xB1,0xB9,0x99,0x80,0x89,0xFF,0x7F,0x78,0x19,
+0x5C,0x79,0x4A,0x60,0x2E,0x92,0x40,0xAE,0x4C,0x37,0x2A,0x2C,0xC9,0xC7,0x62,0xC8,
+0x0E,0x5D,0xF7,0x36,0x5B,0xCA,0xE0,0x25,0x25,0x01,0xB4,0xDD,0x1A,0x07,0x9C,0x77,
+0x00,0x3F,0xD0,0xDC,0xD5,0xEC,0x3D,0xD4,0xFA,0xBB,0x3F,0xCC,0x85,0xD6,0x6F,0x7F,
+0xA9,0x2D,0xDF,0xB9,0x02,0xF7,0xF5,0x97,0x9A,0xB5,0x35,0xDA,0xC3,0x67,0xB0,0x87,
+0x4A,0xA9,0x28,0x9E,0x23,0x8E,0xFF,0x5C,0x27,0x6B,0xE1,0xB0,0x4F,0xF3,0x07,0xEE,
+0x00,0x2E,0xD4,0x59,0x87,0xCB,0x52,0x41,0x95,0xEA,0xF4,0x47,0xD7,0xEE,0x64,0x41,
+0x55,0x7C,0x8D,0x59,0x02,0x95,0xDD,0x62,0x9D,0xC2,0xB9,0xEE,0x5A,0x28,0x74,0x84,
+0xA5,0x9B,0xB7,0x90,0xC7,0x0C,0x07,0xDF,0xF5,0x89,0x36,0x74,0x32,0xD6,0x28,0xC1,
+0xB0,0xB0,0x0B,0xE0,0x9C,0x4C,0xC3,0x1C,0xD6,0xFC,0xE3,0x69,0xB5,0x47,0x46,0x81,
+0x2F,0xA2,0x82,0xAB,0xD3,0x63,0x44,0x70,0xC4,0x8D,0xFF,0x2D,0x33,0xBA,0xAD,0x8F,
+0x7B,0xB5,0x70,0x88,0xAE,0x3E,0x19,0xCF,0x40,0x28,0xD8,0xFC,0xC8,0x90,0xBB,0x5D,
+0x99,0x22,0xF5,0x52,0xE6,0x58,0xC5,0x1F,0x88,0x31,0x43,0xEE,0x88,0x1D,0xD7,0xC6,
+0x8E,0x3C,0x43,0x6A,0x1D,0xA7,0x18,0xDE,0x7D,0x3D,0x16,0xF1,0x62,0xF9,0xCA,0x90,
+0xA8,0xFD,
+};
+
+
+/* subject:/C=US/O=Starfield Technologies, Inc./OU=Starfield Class 2 Certification Authority */
+/* issuer :/C=US/O=Starfield Technologies, Inc./OU=Starfield Class 2 Certification Authority */
+
+
+const unsigned char Starfield_Class_2_CA_certificate[1043]={
+0x30,0x82,0x04,0x0F,0x30,0x82,0x02,0xF7,0xA0,0x03,0x02,0x01,0x02,0x02,0x01,0x00,
0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x05,0x05,0x00,0x30,
-0x6F,0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x53,0x45,0x31,0x14,
-0x30,0x12,0x06,0x03,0x55,0x04,0x0A,0x13,0x0B,0x41,0x64,0x64,0x54,0x72,0x75,0x73,
-0x74,0x20,0x41,0x42,0x31,0x26,0x30,0x24,0x06,0x03,0x55,0x04,0x0B,0x13,0x1D,0x41,
-0x64,0x64,0x54,0x72,0x75,0x73,0x74,0x20,0x45,0x78,0x74,0x65,0x72,0x6E,0x61,0x6C,
-0x20,0x54,0x54,0x50,0x20,0x4E,0x65,0x74,0x77,0x6F,0x72,0x6B,0x31,0x22,0x30,0x20,
-0x06,0x03,0x55,0x04,0x03,0x13,0x19,0x41,0x64,0x64,0x54,0x72,0x75,0x73,0x74,0x20,
-0x45,0x78,0x74,0x65,0x72,0x6E,0x61,0x6C,0x20,0x43,0x41,0x20,0x52,0x6F,0x6F,0x74,
-0x30,0x1E,0x17,0x0D,0x30,0x30,0x30,0x35,0x33,0x30,0x31,0x30,0x34,0x38,0x33,0x38,
-0x5A,0x17,0x0D,0x32,0x30,0x30,0x35,0x33,0x30,0x31,0x30,0x34,0x38,0x33,0x38,0x5A,
-0x30,0x6F,0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x53,0x45,0x31,
-0x14,0x30,0x12,0x06,0x03,0x55,0x04,0x0A,0x13,0x0B,0x41,0x64,0x64,0x54,0x72,0x75,
-0x73,0x74,0x20,0x41,0x42,0x31,0x26,0x30,0x24,0x06,0x03,0x55,0x04,0x0B,0x13,0x1D,
-0x41,0x64,0x64,0x54,0x72,0x75,0x73,0x74,0x20,0x45,0x78,0x74,0x65,0x72,0x6E,0x61,
-0x6C,0x20,0x54,0x54,0x50,0x20,0x4E,0x65,0x74,0x77,0x6F,0x72,0x6B,0x31,0x22,0x30,
-0x20,0x06,0x03,0x55,0x04,0x03,0x13,0x19,0x41,0x64,0x64,0x54,0x72,0x75,0x73,0x74,
-0x20,0x45,0x78,0x74,0x65,0x72,0x6E,0x61,0x6C,0x20,0x43,0x41,0x20,0x52,0x6F,0x6F,
-0x74,0x30,0x82,0x01,0x22,0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,
-0x01,0x01,0x05,0x00,0x03,0x82,0x01,0x0F,0x00,0x30,0x82,0x01,0x0A,0x02,0x82,0x01,
-0x01,0x00,0xB7,0xF7,0x1A,0x33,0xE6,0xF2,0x00,0x04,0x2D,0x39,0xE0,0x4E,0x5B,0xED,
-0x1F,0xBC,0x6C,0x0F,0xCD,0xB5,0xFA,0x23,0xB6,0xCE,0xDE,0x9B,0x11,0x33,0x97,0xA4,
-0x29,0x4C,0x7D,0x93,0x9F,0xBD,0x4A,0xBC,0x93,0xED,0x03,0x1A,0xE3,0x8F,0xCF,0xE5,
-0x6D,0x50,0x5A,0xD6,0x97,0x29,0x94,0x5A,0x80,0xB0,0x49,0x7A,0xDB,0x2E,0x95,0xFD,
-0xB8,0xCA,0xBF,0x37,0x38,0x2D,0x1E,0x3E,0x91,0x41,0xAD,0x70,0x56,0xC7,0xF0,0x4F,
-0x3F,0xE8,0x32,0x9E,0x74,0xCA,0xC8,0x90,0x54,0xE9,0xC6,0x5F,0x0F,0x78,0x9D,0x9A,
-0x40,0x3C,0x0E,0xAC,0x61,0xAA,0x5E,0x14,0x8F,0x9E,0x87,0xA1,0x6A,0x50,0xDC,0xD7,
-0x9A,0x4E,0xAF,0x05,0xB3,0xA6,0x71,0x94,0x9C,0x71,0xB3,0x50,0x60,0x0A,0xC7,0x13,
-0x9D,0x38,0x07,0x86,0x02,0xA8,0xE9,0xA8,0x69,0x26,0x18,0x90,0xAB,0x4C,0xB0,0x4F,
-0x23,0xAB,0x3A,0x4F,0x84,0xD8,0xDF,0xCE,0x9F,0xE1,0x69,0x6F,0xBB,0xD7,0x42,0xD7,
-0x6B,0x44,0xE4,0xC7,0xAD,0xEE,0x6D,0x41,0x5F,0x72,0x5A,0x71,0x08,0x37,0xB3,0x79,
-0x65,0xA4,0x59,0xA0,0x94,0x37,0xF7,0x00,0x2F,0x0D,0xC2,0x92,0x72,0xDA,0xD0,0x38,
-0x72,0xDB,0x14,0xA8,0x45,0xC4,0x5D,0x2A,0x7D,0xB7,0xB4,0xD6,0xC4,0xEE,0xAC,0xCD,
-0x13,0x44,0xB7,0xC9,0x2B,0xDD,0x43,0x00,0x25,0xFA,0x61,0xB9,0x69,0x6A,0x58,0x23,
-0x11,0xB7,0xA7,0x33,0x8F,0x56,0x75,0x59,0xF5,0xCD,0x29,0xD7,0x46,0xB7,0x0A,0x2B,
-0x65,0xB6,0xD3,0x42,0x6F,0x15,0xB2,0xB8,0x7B,0xFB,0xEF,0xE9,0x5D,0x53,0xD5,0x34,
-0x5A,0x27,0x02,0x03,0x01,0x00,0x01,0xA3,0x81,0xDC,0x30,0x81,0xD9,0x30,0x1D,0x06,
-0x03,0x55,0x1D,0x0E,0x04,0x16,0x04,0x14,0xAD,0xBD,0x98,0x7A,0x34,0xB4,0x26,0xF7,
-0xFA,0xC4,0x26,0x54,0xEF,0x03,0xBD,0xE0,0x24,0xCB,0x54,0x1A,0x30,0x0B,0x06,0x03,
-0x55,0x1D,0x0F,0x04,0x04,0x03,0x02,0x01,0x06,0x30,0x0F,0x06,0x03,0x55,0x1D,0x13,
-0x01,0x01,0xFF,0x04,0x05,0x30,0x03,0x01,0x01,0xFF,0x30,0x81,0x99,0x06,0x03,0x55,
-0x1D,0x23,0x04,0x81,0x91,0x30,0x81,0x8E,0x80,0x14,0xAD,0xBD,0x98,0x7A,0x34,0xB4,
-0x26,0xF7,0xFA,0xC4,0x26,0x54,0xEF,0x03,0xBD,0xE0,0x24,0xCB,0x54,0x1A,0xA1,0x73,
-0xA4,0x71,0x30,0x6F,0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x53,
-0x45,0x31,0x14,0x30,0x12,0x06,0x03,0x55,0x04,0x0A,0x13,0x0B,0x41,0x64,0x64,0x54,
-0x72,0x75,0x73,0x74,0x20,0x41,0x42,0x31,0x26,0x30,0x24,0x06,0x03,0x55,0x04,0x0B,
-0x13,0x1D,0x41,0x64,0x64,0x54,0x72,0x75,0x73,0x74,0x20,0x45,0x78,0x74,0x65,0x72,
-0x6E,0x61,0x6C,0x20,0x54,0x54,0x50,0x20,0x4E,0x65,0x74,0x77,0x6F,0x72,0x6B,0x31,
-0x22,0x30,0x20,0x06,0x03,0x55,0x04,0x03,0x13,0x19,0x41,0x64,0x64,0x54,0x72,0x75,
-0x73,0x74,0x20,0x45,0x78,0x74,0x65,0x72,0x6E,0x61,0x6C,0x20,0x43,0x41,0x20,0x52,
-0x6F,0x6F,0x74,0x82,0x01,0x01,0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,
-0x01,0x01,0x05,0x05,0x00,0x03,0x82,0x01,0x01,0x00,0xB0,0x9B,0xE0,0x85,0x25,0xC2,
-0xD6,0x23,0xE2,0x0F,0x96,0x06,0x92,0x9D,0x41,0x98,0x9C,0xD9,0x84,0x79,0x81,0xD9,
-0x1E,0x5B,0x14,0x07,0x23,0x36,0x65,0x8F,0xB0,0xD8,0x77,0xBB,0xAC,0x41,0x6C,0x47,
-0x60,0x83,0x51,0xB0,0xF9,0x32,0x3D,0xE7,0xFC,0xF6,0x26,0x13,0xC7,0x80,0x16,0xA5,
-0xBF,0x5A,0xFC,0x87,0xCF,0x78,0x79,0x89,0x21,0x9A,0xE2,0x4C,0x07,0x0A,0x86,0x35,
-0xBC,0xF2,0xDE,0x51,0xC4,0xD2,0x96,0xB7,0xDC,0x7E,0x4E,0xEE,0x70,0xFD,0x1C,0x39,
-0xEB,0x0C,0x02,0x51,0x14,0x2D,0x8E,0xBD,0x16,0xE0,0xC1,0xDF,0x46,0x75,0xE7,0x24,
-0xAD,0xEC,0xF4,0x42,0xB4,0x85,0x93,0x70,0x10,0x67,0xBA,0x9D,0x06,0x35,0x4A,0x18,
-0xD3,0x2B,0x7A,0xCC,0x51,0x42,0xA1,0x7A,0x63,0xD1,0xE6,0xBB,0xA1,0xC5,0x2B,0xC2,
-0x36,0xBE,0x13,0x0D,0xE6,0xBD,0x63,0x7E,0x79,0x7B,0xA7,0x09,0x0D,0x40,0xAB,0x6A,
-0xDD,0x8F,0x8A,0xC3,0xF6,0xF6,0x8C,0x1A,0x42,0x05,0x51,0xD4,0x45,0xF5,0x9F,0xA7,
-0x62,0x21,0x68,0x15,0x20,0x43,0x3C,0x99,0xE7,0x7C,0xBD,0x24,0xD8,0xA9,0x91,0x17,
-0x73,0x88,0x3F,0x56,0x1B,0x31,0x38,0x18,0xB4,0x71,0x0F,0x9A,0xCD,0xC8,0x0E,0x9E,
-0x8E,0x2E,0x1B,0xE1,0x8C,0x98,0x83,0xCB,0x1F,0x31,0xF1,0x44,0x4C,0xC6,0x04,0x73,
-0x49,0x76,0x60,0x0F,0xC7,0xF8,0xBD,0x17,0x80,0x6B,0x2E,0xE9,0xCC,0x4C,0x0E,0x5A,
-0x9A,0x79,0x0F,0x20,0x0A,0x2E,0xD5,0x9E,0x63,0x26,0x1E,0x55,0x92,0x94,0xD8,0x82,
-0x17,0x5A,0x7B,0xD0,0xBC,0xC7,0x8F,0x4E,0x86,0x04,
+0x68,0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x55,0x53,0x31,0x25,
+0x30,0x23,0x06,0x03,0x55,0x04,0x0A,0x13,0x1C,0x53,0x74,0x61,0x72,0x66,0x69,0x65,
+0x6C,0x64,0x20,0x54,0x65,0x63,0x68,0x6E,0x6F,0x6C,0x6F,0x67,0x69,0x65,0x73,0x2C,
+0x20,0x49,0x6E,0x63,0x2E,0x31,0x32,0x30,0x30,0x06,0x03,0x55,0x04,0x0B,0x13,0x29,
+0x53,0x74,0x61,0x72,0x66,0x69,0x65,0x6C,0x64,0x20,0x43,0x6C,0x61,0x73,0x73,0x20,
+0x32,0x20,0x43,0x65,0x72,0x74,0x69,0x66,0x69,0x63,0x61,0x74,0x69,0x6F,0x6E,0x20,
+0x41,0x75,0x74,0x68,0x6F,0x72,0x69,0x74,0x79,0x30,0x1E,0x17,0x0D,0x30,0x34,0x30,
+0x36,0x32,0x39,0x31,0x37,0x33,0x39,0x31,0x36,0x5A,0x17,0x0D,0x33,0x34,0x30,0x36,
+0x32,0x39,0x31,0x37,0x33,0x39,0x31,0x36,0x5A,0x30,0x68,0x31,0x0B,0x30,0x09,0x06,
+0x03,0x55,0x04,0x06,0x13,0x02,0x55,0x53,0x31,0x25,0x30,0x23,0x06,0x03,0x55,0x04,
+0x0A,0x13,0x1C,0x53,0x74,0x61,0x72,0x66,0x69,0x65,0x6C,0x64,0x20,0x54,0x65,0x63,
+0x68,0x6E,0x6F,0x6C,0x6F,0x67,0x69,0x65,0x73,0x2C,0x20,0x49,0x6E,0x63,0x2E,0x31,
+0x32,0x30,0x30,0x06,0x03,0x55,0x04,0x0B,0x13,0x29,0x53,0x74,0x61,0x72,0x66,0x69,
+0x65,0x6C,0x64,0x20,0x43,0x6C,0x61,0x73,0x73,0x20,0x32,0x20,0x43,0x65,0x72,0x74,
+0x69,0x66,0x69,0x63,0x61,0x74,0x69,0x6F,0x6E,0x20,0x41,0x75,0x74,0x68,0x6F,0x72,
+0x69,0x74,0x79,0x30,0x82,0x01,0x20,0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,
+0x0D,0x01,0x01,0x01,0x05,0x00,0x03,0x82,0x01,0x0D,0x00,0x30,0x82,0x01,0x08,0x02,
+0x82,0x01,0x01,0x00,0xB7,0x32,0xC8,0xFE,0xE9,0x71,0xA6,0x04,0x85,0xAD,0x0C,0x11,
+0x64,0xDF,0xCE,0x4D,0xEF,0xC8,0x03,0x18,0x87,0x3F,0xA1,0xAB,0xFB,0x3C,0xA6,0x9F,
+0xF0,0xC3,0xA1,0xDA,0xD4,0xD8,0x6E,0x2B,0x53,0x90,0xFB,0x24,0xA4,0x3E,0x84,0xF0,
+0x9E,0xE8,0x5F,0xEC,0xE5,0x27,0x44,0xF5,0x28,0xA6,0x3F,0x7B,0xDE,0xE0,0x2A,0xF0,
+0xC8,0xAF,0x53,0x2F,0x9E,0xCA,0x05,0x01,0x93,0x1E,0x8F,0x66,0x1C,0x39,0xA7,0x4D,
+0xFA,0x5A,0xB6,0x73,0x04,0x25,0x66,0xEB,0x77,0x7F,0xE7,0x59,0xC6,0x4A,0x99,0x25,
+0x14,0x54,0xEB,0x26,0xC7,0xF3,0x7F,0x19,0xD5,0x30,0x70,0x8F,0xAF,0xB0,0x46,0x2A,
+0xFF,0xAD,0xEB,0x29,0xED,0xD7,0x9F,0xAA,0x04,0x87,0xA3,0xD4,0xF9,0x89,0xA5,0x34,
+0x5F,0xDB,0x43,0x91,0x82,0x36,0xD9,0x66,0x3C,0xB1,0xB8,0xB9,0x82,0xFD,0x9C,0x3A,
+0x3E,0x10,0xC8,0x3B,0xEF,0x06,0x65,0x66,0x7A,0x9B,0x19,0x18,0x3D,0xFF,0x71,0x51,
+0x3C,0x30,0x2E,0x5F,0xBE,0x3D,0x77,0x73,0xB2,0x5D,0x06,0x6C,0xC3,0x23,0x56,0x9A,
+0x2B,0x85,0x26,0x92,0x1C,0xA7,0x02,0xB3,0xE4,0x3F,0x0D,0xAF,0x08,0x79,0x82,0xB8,
+0x36,0x3D,0xEA,0x9C,0xD3,0x35,0xB3,0xBC,0x69,0xCA,0xF5,0xCC,0x9D,0xE8,0xFD,0x64,
+0x8D,0x17,0x80,0x33,0x6E,0x5E,0x4A,0x5D,0x99,0xC9,0x1E,0x87,0xB4,0x9D,0x1A,0xC0,
+0xD5,0x6E,0x13,0x35,0x23,0x5E,0xDF,0x9B,0x5F,0x3D,0xEF,0xD6,0xF7,0x76,0xC2,0xEA,
+0x3E,0xBB,0x78,0x0D,0x1C,0x42,0x67,0x6B,0x04,0xD8,0xF8,0xD6,0xDA,0x6F,0x8B,0xF2,
+0x44,0xA0,0x01,0xAB,0x02,0x01,0x03,0xA3,0x81,0xC5,0x30,0x81,0xC2,0x30,0x1D,0x06,
+0x03,0x55,0x1D,0x0E,0x04,0x16,0x04,0x14,0xBF,0x5F,0xB7,0xD1,0xCE,0xDD,0x1F,0x86,
+0xF4,0x5B,0x55,0xAC,0xDC,0xD7,0x10,0xC2,0x0E,0xA9,0x88,0xE7,0x30,0x81,0x92,0x06,
+0x03,0x55,0x1D,0x23,0x04,0x81,0x8A,0x30,0x81,0x87,0x80,0x14,0xBF,0x5F,0xB7,0xD1,
+0xCE,0xDD,0x1F,0x86,0xF4,0x5B,0x55,0xAC,0xDC,0xD7,0x10,0xC2,0x0E,0xA9,0x88,0xE7,
+0xA1,0x6C,0xA4,0x6A,0x30,0x68,0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,
+0x02,0x55,0x53,0x31,0x25,0x30,0x23,0x06,0x03,0x55,0x04,0x0A,0x13,0x1C,0x53,0x74,
+0x61,0x72,0x66,0x69,0x65,0x6C,0x64,0x20,0x54,0x65,0x63,0x68,0x6E,0x6F,0x6C,0x6F,
+0x67,0x69,0x65,0x73,0x2C,0x20,0x49,0x6E,0x63,0x2E,0x31,0x32,0x30,0x30,0x06,0x03,
+0x55,0x04,0x0B,0x13,0x29,0x53,0x74,0x61,0x72,0x66,0x69,0x65,0x6C,0x64,0x20,0x43,
+0x6C,0x61,0x73,0x73,0x20,0x32,0x20,0x43,0x65,0x72,0x74,0x69,0x66,0x69,0x63,0x61,
+0x74,0x69,0x6F,0x6E,0x20,0x41,0x75,0x74,0x68,0x6F,0x72,0x69,0x74,0x79,0x82,0x01,
+0x00,0x30,0x0C,0x06,0x03,0x55,0x1D,0x13,0x04,0x05,0x30,0x03,0x01,0x01,0xFF,0x30,
+0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x05,0x05,0x00,0x03,0x82,
+0x01,0x01,0x00,0x05,0x9D,0x3F,0x88,0x9D,0xD1,0xC9,0x1A,0x55,0xA1,0xAC,0x69,0xF3,
+0xF3,0x59,0xDA,0x9B,0x01,0x87,0x1A,0x4F,0x57,0xA9,0xA1,0x79,0x09,0x2A,0xDB,0xF7,
+0x2F,0xB2,0x1E,0xCC,0xC7,0x5E,0x6A,0xD8,0x83,0x87,0xA1,0x97,0xEF,0x49,0x35,0x3E,
+0x77,0x06,0x41,0x58,0x62,0xBF,0x8E,0x58,0xB8,0x0A,0x67,0x3F,0xEC,0xB3,0xDD,0x21,
+0x66,0x1F,0xC9,0x54,0xFA,0x72,0xCC,0x3D,0x4C,0x40,0xD8,0x81,0xAF,0x77,0x9E,0x83,
+0x7A,0xBB,0xA2,0xC7,0xF5,0x34,0x17,0x8E,0xD9,0x11,0x40,0xF4,0xFC,0x2C,0x2A,0x4D,
+0x15,0x7F,0xA7,0x62,0x5D,0x2E,0x25,0xD3,0x00,0x0B,0x20,0x1A,0x1D,0x68,0xF9,0x17,
+0xB8,0xF4,0xBD,0x8B,0xED,0x28,0x59,0xDD,0x4D,0x16,0x8B,0x17,0x83,0xC8,0xB2,0x65,
+0xC7,0x2D,0x7A,0xA5,0xAA,0xBC,0x53,0x86,0x6D,0xDD,0x57,0xA4,0xCA,0xF8,0x20,0x41,
+0x0B,0x68,0xF0,0xF4,0xFB,0x74,0xBE,0x56,0x5D,0x7A,0x79,0xF5,0xF9,0x1D,0x85,0xE3,
+0x2D,0x95,0xBE,0xF5,0x71,0x90,0x43,0xCC,0x8D,0x1F,0x9A,0x00,0x0A,0x87,0x29,0xE9,
+0x55,0x22,0x58,0x00,0x23,0xEA,0xE3,0x12,0x43,0x29,0x5B,0x47,0x08,0xDD,0x8C,0x41,
+0x6A,0x65,0x06,0xA8,0xE5,0x21,0xAA,0x41,0xB4,0x95,0x21,0x95,0xB9,0x7D,0xD1,0x34,
+0xAB,0x13,0xD6,0xAD,0xBC,0xDC,0xE2,0x3D,0x39,0xCD,0xBD,0x3E,0x75,0x70,0xA1,0x18,
+0x59,0x03,0xC9,0x22,0xB4,0x8F,0x9C,0xD5,0x5E,0x2A,0xD7,0xA5,0xB6,0xD4,0x0A,0x6D,
+0xF8,0xB7,0x40,0x11,0x46,0x9A,0x1F,0x79,0x0E,0x62,0xBF,0x0F,0x97,0xEC,0xE0,0x2F,
+0x1F,0x17,0x94,
+};
+
+
+/* subject:/C=US/O=VeriSign, Inc./OU=VeriSign Trust Network/OU=(c) 1999 VeriSign, Inc. - For authorized use only/CN=VeriSign Class 3 Public Primary Certification Authority - G3 */
+/* issuer :/C=US/O=VeriSign, Inc./OU=VeriSign Trust Network/OU=(c) 1999 VeriSign, Inc. - For authorized use only/CN=VeriSign Class 3 Public Primary Certification Authority - G3 */
+
+
+const unsigned char Verisign_Class_3_Public_Primary_Certification_Authority___G3_certificate[1054]={
+0x30,0x82,0x04,0x1A,0x30,0x82,0x03,0x02,0x02,0x11,0x00,0x9B,0x7E,0x06,0x49,0xA3,
+0x3E,0x62,0xB9,0xD5,0xEE,0x90,0x48,0x71,0x29,0xEF,0x57,0x30,0x0D,0x06,0x09,0x2A,
+0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x05,0x05,0x00,0x30,0x81,0xCA,0x31,0x0B,0x30,
+0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x55,0x53,0x31,0x17,0x30,0x15,0x06,0x03,
+0x55,0x04,0x0A,0x13,0x0E,0x56,0x65,0x72,0x69,0x53,0x69,0x67,0x6E,0x2C,0x20,0x49,
+0x6E,0x63,0x2E,0x31,0x1F,0x30,0x1D,0x06,0x03,0x55,0x04,0x0B,0x13,0x16,0x56,0x65,
+0x72,0x69,0x53,0x69,0x67,0x6E,0x20,0x54,0x72,0x75,0x73,0x74,0x20,0x4E,0x65,0x74,
+0x77,0x6F,0x72,0x6B,0x31,0x3A,0x30,0x38,0x06,0x03,0x55,0x04,0x0B,0x13,0x31,0x28,
+0x63,0x29,0x20,0x31,0x39,0x39,0x39,0x20,0x56,0x65,0x72,0x69,0x53,0x69,0x67,0x6E,
+0x2C,0x20,0x49,0x6E,0x63,0x2E,0x20,0x2D,0x20,0x46,0x6F,0x72,0x20,0x61,0x75,0x74,
+0x68,0x6F,0x72,0x69,0x7A,0x65,0x64,0x20,0x75,0x73,0x65,0x20,0x6F,0x6E,0x6C,0x79,
+0x31,0x45,0x30,0x43,0x06,0x03,0x55,0x04,0x03,0x13,0x3C,0x56,0x65,0x72,0x69,0x53,
+0x69,0x67,0x6E,0x20,0x43,0x6C,0x61,0x73,0x73,0x20,0x33,0x20,0x50,0x75,0x62,0x6C,
+0x69,0x63,0x20,0x50,0x72,0x69,0x6D,0x61,0x72,0x79,0x20,0x43,0x65,0x72,0x74,0x69,
+0x66,0x69,0x63,0x61,0x74,0x69,0x6F,0x6E,0x20,0x41,0x75,0x74,0x68,0x6F,0x72,0x69,
+0x74,0x79,0x20,0x2D,0x20,0x47,0x33,0x30,0x1E,0x17,0x0D,0x39,0x39,0x31,0x30,0x30,
+0x31,0x30,0x30,0x30,0x30,0x30,0x30,0x5A,0x17,0x0D,0x33,0x36,0x30,0x37,0x31,0x36,
+0x32,0x33,0x35,0x39,0x35,0x39,0x5A,0x30,0x81,0xCA,0x31,0x0B,0x30,0x09,0x06,0x03,
+0x55,0x04,0x06,0x13,0x02,0x55,0x53,0x31,0x17,0x30,0x15,0x06,0x03,0x55,0x04,0x0A,
+0x13,0x0E,0x56,0x65,0x72,0x69,0x53,0x69,0x67,0x6E,0x2C,0x20,0x49,0x6E,0x63,0x2E,
+0x31,0x1F,0x30,0x1D,0x06,0x03,0x55,0x04,0x0B,0x13,0x16,0x56,0x65,0x72,0x69,0x53,
+0x69,0x67,0x6E,0x20,0x54,0x72,0x75,0x73,0x74,0x20,0x4E,0x65,0x74,0x77,0x6F,0x72,
+0x6B,0x31,0x3A,0x30,0x38,0x06,0x03,0x55,0x04,0x0B,0x13,0x31,0x28,0x63,0x29,0x20,
+0x31,0x39,0x39,0x39,0x20,0x56,0x65,0x72,0x69,0x53,0x69,0x67,0x6E,0x2C,0x20,0x49,
+0x6E,0x63,0x2E,0x20,0x2D,0x20,0x46,0x6F,0x72,0x20,0x61,0x75,0x74,0x68,0x6F,0x72,
+0x69,0x7A,0x65,0x64,0x20,0x75,0x73,0x65,0x20,0x6F,0x6E,0x6C,0x79,0x31,0x45,0x30,
+0x43,0x06,0x03,0x55,0x04,0x03,0x13,0x3C,0x56,0x65,0x72,0x69,0x53,0x69,0x67,0x6E,
+0x20,0x43,0x6C,0x61,0x73,0x73,0x20,0x33,0x20,0x50,0x75,0x62,0x6C,0x69,0x63,0x20,
+0x50,0x72,0x69,0x6D,0x61,0x72,0x79,0x20,0x43,0x65,0x72,0x74,0x69,0x66,0x69,0x63,
+0x61,0x74,0x69,0x6F,0x6E,0x20,0x41,0x75,0x74,0x68,0x6F,0x72,0x69,0x74,0x79,0x20,
+0x2D,0x20,0x47,0x33,0x30,0x82,0x01,0x22,0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,
+0xF7,0x0D,0x01,0x01,0x01,0x05,0x00,0x03,0x82,0x01,0x0F,0x00,0x30,0x82,0x01,0x0A,
+0x02,0x82,0x01,0x01,0x00,0xCB,0xBA,0x9C,0x52,0xFC,0x78,0x1F,0x1A,0x1E,0x6F,0x1B,
+0x37,0x73,0xBD,0xF8,0xC9,0x6B,0x94,0x12,0x30,0x4F,0xF0,0x36,0x47,0xF5,0xD0,0x91,
+0x0A,0xF5,0x17,0xC8,0xA5,0x61,0xC1,0x16,0x40,0x4D,0xFB,0x8A,0x61,0x90,0xE5,0x76,
+0x20,0xC1,0x11,0x06,0x7D,0xAB,0x2C,0x6E,0xA6,0xF5,0x11,0x41,0x8E,0xFA,0x2D,0xAD,
+0x2A,0x61,0x59,0xA4,0x67,0x26,0x4C,0xD0,0xE8,0xBC,0x52,0x5B,0x70,0x20,0x04,0x58,
+0xD1,0x7A,0xC9,0xA4,0x69,0xBC,0x83,0x17,0x64,0xAD,0x05,0x8B,0xBC,0xD0,0x58,0xCE,
+0x8D,0x8C,0xF5,0xEB,0xF0,0x42,0x49,0x0B,0x9D,0x97,0x27,0x67,0x32,0x6E,0xE1,0xAE,
+0x93,0x15,0x1C,0x70,0xBC,0x20,0x4D,0x2F,0x18,0xDE,0x92,0x88,0xE8,0x6C,0x85,0x57,
+0x11,0x1A,0xE9,0x7E,0xE3,0x26,0x11,0x54,0xA2,0x45,0x96,0x55,0x83,0xCA,0x30,0x89,
+0xE8,0xDC,0xD8,0xA3,0xED,0x2A,0x80,0x3F,0x7F,0x79,0x65,0x57,0x3E,0x15,0x20,0x66,
+0x08,0x2F,0x95,0x93,0xBF,0xAA,0x47,0x2F,0xA8,0x46,0x97,0xF0,0x12,0xE2,0xFE,0xC2,
+0x0A,0x2B,0x51,0xE6,0x76,0xE6,0xB7,0x46,0xB7,0xE2,0x0D,0xA6,0xCC,0xA8,0xC3,0x4C,
+0x59,0x55,0x89,0xE6,0xE8,0x53,0x5C,0x1C,0xEA,0x9D,0xF0,0x62,0x16,0x0B,0xA7,0xC9,
+0x5F,0x0C,0xF0,0xDE,0xC2,0x76,0xCE,0xAF,0xF7,0x6A,0xF2,0xFA,0x41,0xA6,0xA2,0x33,
+0x14,0xC9,0xE5,0x7A,0x63,0xD3,0x9E,0x62,0x37,0xD5,0x85,0x65,0x9E,0x0E,0xE6,0x53,
+0x24,0x74,0x1B,0x5E,0x1D,0x12,0x53,0x5B,0xC7,0x2C,0xE7,0x83,0x49,0x3B,0x15,0xAE,
+0x8A,0x68,0xB9,0x57,0x97,0x02,0x03,0x01,0x00,0x01,0x30,0x0D,0x06,0x09,0x2A,0x86,
+0x48,0x86,0xF7,0x0D,0x01,0x01,0x05,0x05,0x00,0x03,0x82,0x01,0x01,0x00,0x11,0x14,
+0x96,0xC1,0xAB,0x92,0x08,0xF7,0x3F,0x2F,0xC9,0xB2,0xFE,0xE4,0x5A,0x9F,0x64,0xDE,
+0xDB,0x21,0x4F,0x86,0x99,0x34,0x76,0x36,0x57,0xDD,0xD0,0x15,0x2F,0xC5,0xAD,0x7F,
+0x15,0x1F,0x37,0x62,0x73,0x3E,0xD4,0xE7,0x5F,0xCE,0x17,0x03,0xDB,0x35,0xFA,0x2B,
+0xDB,0xAE,0x60,0x09,0x5F,0x1E,0x5F,0x8F,0x6E,0xBB,0x0B,0x3D,0xEA,0x5A,0x13,0x1E,
+0x0C,0x60,0x6F,0xB5,0xC0,0xB5,0x23,0x22,0x2E,0x07,0x0B,0xCB,0xA9,0x74,0xCB,0x47,
+0xBB,0x1D,0xC1,0xD7,0xA5,0x6B,0xCC,0x2F,0xD2,0x42,0xFD,0x49,0xDD,0xA7,0x89,0xCF,
+0x53,0xBA,0xDA,0x00,0x5A,0x28,0xBF,0x82,0xDF,0xF8,0xBA,0x13,0x1D,0x50,0x86,0x82,
+0xFD,0x8E,0x30,0x8F,0x29,0x46,0xB0,0x1E,0x3D,0x35,0xDA,0x38,0x62,0x16,0x18,0x4A,
+0xAD,0xE6,0xB6,0x51,0x6C,0xDE,0xAF,0x62,0xEB,0x01,0xD0,0x1E,0x24,0xFE,0x7A,0x8F,
+0x12,0x1A,0x12,0x68,0xB8,0xFB,0x66,0x99,0x14,0x14,0x45,0x5C,0xAE,0xE7,0xAE,0x69,
+0x17,0x81,0x2B,0x5A,0x37,0xC9,0x5E,0x2A,0xF4,0xC6,0xE2,0xA1,0x5C,0x54,0x9B,0xA6,
+0x54,0x00,0xCF,0xF0,0xF1,0xC1,0xC7,0x98,0x30,0x1A,0x3B,0x36,0x16,0xDB,0xA3,0x6E,
+0xEA,0xFD,0xAD,0xB2,0xC2,0xDA,0xEF,0x02,0x47,0x13,0x8A,0xC0,0xF1,0xB3,0x31,0xAD,
+0x4F,0x1C,0xE1,0x4F,0x9C,0xAF,0x0F,0x0C,0x9D,0xF7,0x78,0x0D,0xD8,0xF4,0x35,0x56,
+0x80,0xDA,0xB7,0x6D,0x17,0x8F,0x9D,0x1E,0x81,0x64,0xE1,0xFE,0xC5,0x45,0xBA,0xAD,
+0x6B,0xB9,0x0A,0x7A,0x4E,0x4F,0x4B,0x84,0xEE,0x4B,0xF1,0x7D,0xDD,0x11,
+};
+
+
+/* subject:/C=US/ST=New Jersey/L=Jersey City/O=The USERTRUST Network/CN=USERTrust ECC Certification Authority */
+/* issuer :/C=US/ST=New Jersey/L=Jersey City/O=The USERTRUST Network/CN=USERTrust ECC Certification Authority */
+
+
+const unsigned char USERTrust_ECC_Certification_Authority_certificate[659]={
+0x30,0x82,0x02,0x8F,0x30,0x82,0x02,0x15,0xA0,0x03,0x02,0x01,0x02,0x02,0x10,0x5C,
+0x8B,0x99,0xC5,0x5A,0x94,0xC5,0xD2,0x71,0x56,0xDE,0xCD,0x89,0x80,0xCC,0x26,0x30,
+0x0A,0x06,0x08,0x2A,0x86,0x48,0xCE,0x3D,0x04,0x03,0x03,0x30,0x81,0x88,0x31,0x0B,
+0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x55,0x53,0x31,0x13,0x30,0x11,0x06,
+0x03,0x55,0x04,0x08,0x13,0x0A,0x4E,0x65,0x77,0x20,0x4A,0x65,0x72,0x73,0x65,0x79,
+0x31,0x14,0x30,0x12,0x06,0x03,0x55,0x04,0x07,0x13,0x0B,0x4A,0x65,0x72,0x73,0x65,
+0x79,0x20,0x43,0x69,0x74,0x79,0x31,0x1E,0x30,0x1C,0x06,0x03,0x55,0x04,0x0A,0x13,
+0x15,0x54,0x68,0x65,0x20,0x55,0x53,0x45,0x52,0x54,0x52,0x55,0x53,0x54,0x20,0x4E,
+0x65,0x74,0x77,0x6F,0x72,0x6B,0x31,0x2E,0x30,0x2C,0x06,0x03,0x55,0x04,0x03,0x13,
+0x25,0x55,0x53,0x45,0x52,0x54,0x72,0x75,0x73,0x74,0x20,0x45,0x43,0x43,0x20,0x43,
+0x65,0x72,0x74,0x69,0x66,0x69,0x63,0x61,0x74,0x69,0x6F,0x6E,0x20,0x41,0x75,0x74,
+0x68,0x6F,0x72,0x69,0x74,0x79,0x30,0x1E,0x17,0x0D,0x31,0x30,0x30,0x32,0x30,0x31,
+0x30,0x30,0x30,0x30,0x30,0x30,0x5A,0x17,0x0D,0x33,0x38,0x30,0x31,0x31,0x38,0x32,
+0x33,0x35,0x39,0x35,0x39,0x5A,0x30,0x81,0x88,0x31,0x0B,0x30,0x09,0x06,0x03,0x55,
+0x04,0x06,0x13,0x02,0x55,0x53,0x31,0x13,0x30,0x11,0x06,0x03,0x55,0x04,0x08,0x13,
+0x0A,0x4E,0x65,0x77,0x20,0x4A,0x65,0x72,0x73,0x65,0x79,0x31,0x14,0x30,0x12,0x06,
+0x03,0x55,0x04,0x07,0x13,0x0B,0x4A,0x65,0x72,0x73,0x65,0x79,0x20,0x43,0x69,0x74,
+0x79,0x31,0x1E,0x30,0x1C,0x06,0x03,0x55,0x04,0x0A,0x13,0x15,0x54,0x68,0x65,0x20,
+0x55,0x53,0x45,0x52,0x54,0x52,0x55,0x53,0x54,0x20,0x4E,0x65,0x74,0x77,0x6F,0x72,
+0x6B,0x31,0x2E,0x30,0x2C,0x06,0x03,0x55,0x04,0x03,0x13,0x25,0x55,0x53,0x45,0x52,
+0x54,0x72,0x75,0x73,0x74,0x20,0x45,0x43,0x43,0x20,0x43,0x65,0x72,0x74,0x69,0x66,
+0x69,0x63,0x61,0x74,0x69,0x6F,0x6E,0x20,0x41,0x75,0x74,0x68,0x6F,0x72,0x69,0x74,
+0x79,0x30,0x76,0x30,0x10,0x06,0x07,0x2A,0x86,0x48,0xCE,0x3D,0x02,0x01,0x06,0x05,
+0x2B,0x81,0x04,0x00,0x22,0x03,0x62,0x00,0x04,0x1A,0xAC,0x54,0x5A,0xA9,0xF9,0x68,
+0x23,0xE7,0x7A,0xD5,0x24,0x6F,0x53,0xC6,0x5A,0xD8,0x4B,0xAB,0xC6,0xD5,0xB6,0xD1,
+0xE6,0x73,0x71,0xAE,0xDD,0x9C,0xD6,0x0C,0x61,0xFD,0xDB,0xA0,0x89,0x03,0xB8,0x05,
+0x14,0xEC,0x57,0xCE,0xEE,0x5D,0x3F,0xE2,0x21,0xB3,0xCE,0xF7,0xD4,0x8A,0x79,0xE0,
+0xA3,0x83,0x7E,0x2D,0x97,0xD0,0x61,0xC4,0xF1,0x99,0xDC,0x25,0x91,0x63,0xAB,0x7F,
+0x30,0xA3,0xB4,0x70,0xE2,0xC7,0xA1,0x33,0x9C,0xF3,0xBF,0x2E,0x5C,0x53,0xB1,0x5F,
+0xB3,0x7D,0x32,0x7F,0x8A,0x34,0xE3,0x79,0x79,0xA3,0x42,0x30,0x40,0x30,0x1D,0x06,
+0x03,0x55,0x1D,0x0E,0x04,0x16,0x04,0x14,0x3A,0xE1,0x09,0x86,0xD4,0xCF,0x19,0xC2,
+0x96,0x76,0x74,0x49,0x76,0xDC,0xE0,0x35,0xC6,0x63,0x63,0x9A,0x30,0x0E,0x06,0x03,
+0x55,0x1D,0x0F,0x01,0x01,0xFF,0x04,0x04,0x03,0x02,0x01,0x06,0x30,0x0F,0x06,0x03,
+0x55,0x1D,0x13,0x01,0x01,0xFF,0x04,0x05,0x30,0x03,0x01,0x01,0xFF,0x30,0x0A,0x06,
+0x08,0x2A,0x86,0x48,0xCE,0x3D,0x04,0x03,0x03,0x03,0x68,0x00,0x30,0x65,0x02,0x30,
+0x36,0x67,0xA1,0x16,0x08,0xDC,0xE4,0x97,0x00,0x41,0x1D,0x4E,0xBE,0xE1,0x63,0x01,
+0xCF,0x3B,0xAA,0x42,0x11,0x64,0xA0,0x9D,0x94,0x39,0x02,0x11,0x79,0x5C,0x7B,0x1D,
+0xFA,0x64,0xB9,0xEE,0x16,0x42,0xB3,0xBF,0x8A,0xC2,0x09,0xC4,0xEC,0xE4,0xB1,0x4D,
+0x02,0x31,0x00,0xE9,0x2A,0x61,0x47,0x8C,0x52,0x4A,0x4B,0x4E,0x18,0x70,0xF6,0xD6,
+0x44,0xD6,0x6E,0xF5,0x83,0xBA,0x6D,0x58,0xBD,0x24,0xD9,0x56,0x48,0xEA,0xEF,0xC4,
+0xA2,0x46,0x81,0x88,0x6A,0x3A,0x46,0xD1,0xA9,0x9B,0x4D,0xC9,0x61,0xDA,0xD1,0x5D,
+0x57,0x6A,0x18,
+};
+
+
+/* subject:/C=US/O=GeoTrust Inc./CN=GeoTrust Global CA */
+/* issuer :/C=US/O=GeoTrust Inc./CN=GeoTrust Global CA */
+
+
+const unsigned char GeoTrust_Global_CA_certificate[856]={
+0x30,0x82,0x03,0x54,0x30,0x82,0x02,0x3C,0xA0,0x03,0x02,0x01,0x02,0x02,0x03,0x02,
+0x34,0x56,0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x05,0x05,
+0x00,0x30,0x42,0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x55,0x53,
+0x31,0x16,0x30,0x14,0x06,0x03,0x55,0x04,0x0A,0x13,0x0D,0x47,0x65,0x6F,0x54,0x72,
+0x75,0x73,0x74,0x20,0x49,0x6E,0x63,0x2E,0x31,0x1B,0x30,0x19,0x06,0x03,0x55,0x04,
+0x03,0x13,0x12,0x47,0x65,0x6F,0x54,0x72,0x75,0x73,0x74,0x20,0x47,0x6C,0x6F,0x62,
+0x61,0x6C,0x20,0x43,0x41,0x30,0x1E,0x17,0x0D,0x30,0x32,0x30,0x35,0x32,0x31,0x30,
+0x34,0x30,0x30,0x30,0x30,0x5A,0x17,0x0D,0x32,0x32,0x30,0x35,0x32,0x31,0x30,0x34,
+0x30,0x30,0x30,0x30,0x5A,0x30,0x42,0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04,0x06,
+0x13,0x02,0x55,0x53,0x31,0x16,0x30,0x14,0x06,0x03,0x55,0x04,0x0A,0x13,0x0D,0x47,
+0x65,0x6F,0x54,0x72,0x75,0x73,0x74,0x20,0x49,0x6E,0x63,0x2E,0x31,0x1B,0x30,0x19,
+0x06,0x03,0x55,0x04,0x03,0x13,0x12,0x47,0x65,0x6F,0x54,0x72,0x75,0x73,0x74,0x20,
+0x47,0x6C,0x6F,0x62,0x61,0x6C,0x20,0x43,0x41,0x30,0x82,0x01,0x22,0x30,0x0D,0x06,
+0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x01,0x05,0x00,0x03,0x82,0x01,0x0F,
+0x00,0x30,0x82,0x01,0x0A,0x02,0x82,0x01,0x01,0x00,0xDA,0xCC,0x18,0x63,0x30,0xFD,
+0xF4,0x17,0x23,0x1A,0x56,0x7E,0x5B,0xDF,0x3C,0x6C,0x38,0xE4,0x71,0xB7,0x78,0x91,
+0xD4,0xBC,0xA1,0xD8,0x4C,0xF8,0xA8,0x43,0xB6,0x03,0xE9,0x4D,0x21,0x07,0x08,0x88,
+0xDA,0x58,0x2F,0x66,0x39,0x29,0xBD,0x05,0x78,0x8B,0x9D,0x38,0xE8,0x05,0xB7,0x6A,
+0x7E,0x71,0xA4,0xE6,0xC4,0x60,0xA6,0xB0,0xEF,0x80,0xE4,0x89,0x28,0x0F,0x9E,0x25,
+0xD6,0xED,0x83,0xF3,0xAD,0xA6,0x91,0xC7,0x98,0xC9,0x42,0x18,0x35,0x14,0x9D,0xAD,
+0x98,0x46,0x92,0x2E,0x4F,0xCA,0xF1,0x87,0x43,0xC1,0x16,0x95,0x57,0x2D,0x50,0xEF,
+0x89,0x2D,0x80,0x7A,0x57,0xAD,0xF2,0xEE,0x5F,0x6B,0xD2,0x00,0x8D,0xB9,0x14,0xF8,
+0x14,0x15,0x35,0xD9,0xC0,0x46,0xA3,0x7B,0x72,0xC8,0x91,0xBF,0xC9,0x55,0x2B,0xCD,
+0xD0,0x97,0x3E,0x9C,0x26,0x64,0xCC,0xDF,0xCE,0x83,0x19,0x71,0xCA,0x4E,0xE6,0xD4,
+0xD5,0x7B,0xA9,0x19,0xCD,0x55,0xDE,0xC8,0xEC,0xD2,0x5E,0x38,0x53,0xE5,0x5C,0x4F,
+0x8C,0x2D,0xFE,0x50,0x23,0x36,0xFC,0x66,0xE6,0xCB,0x8E,0xA4,0x39,0x19,0x00,0xB7,
+0x95,0x02,0x39,0x91,0x0B,0x0E,0xFE,0x38,0x2E,0xD1,0x1D,0x05,0x9A,0xF6,0x4D,0x3E,
+0x6F,0x0F,0x07,0x1D,0xAF,0x2C,0x1E,0x8F,0x60,0x39,0xE2,0xFA,0x36,0x53,0x13,0x39,
+0xD4,0x5E,0x26,0x2B,0xDB,0x3D,0xA8,0x14,0xBD,0x32,0xEB,0x18,0x03,0x28,0x52,0x04,
+0x71,0xE5,0xAB,0x33,0x3D,0xE1,0x38,0xBB,0x07,0x36,0x84,0x62,0x9C,0x79,0xEA,0x16,
+0x30,0xF4,0x5F,0xC0,0x2B,0xE8,0x71,0x6B,0xE4,0xF9,0x02,0x03,0x01,0x00,0x01,0xA3,
+0x53,0x30,0x51,0x30,0x0F,0x06,0x03,0x55,0x1D,0x13,0x01,0x01,0xFF,0x04,0x05,0x30,
+0x03,0x01,0x01,0xFF,0x30,0x1D,0x06,0x03,0x55,0x1D,0x0E,0x04,0x16,0x04,0x14,0xC0,
+0x7A,0x98,0x68,0x8D,0x89,0xFB,0xAB,0x05,0x64,0x0C,0x11,0x7D,0xAA,0x7D,0x65,0xB8,
+0xCA,0xCC,0x4E,0x30,0x1F,0x06,0x03,0x55,0x1D,0x23,0x04,0x18,0x30,0x16,0x80,0x14,
+0xC0,0x7A,0x98,0x68,0x8D,0x89,0xFB,0xAB,0x05,0x64,0x0C,0x11,0x7D,0xAA,0x7D,0x65,
+0xB8,0xCA,0xCC,0x4E,0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,
+0x05,0x05,0x00,0x03,0x82,0x01,0x01,0x00,0x35,0xE3,0x29,0x6A,0xE5,0x2F,0x5D,0x54,
+0x8E,0x29,0x50,0x94,0x9F,0x99,0x1A,0x14,0xE4,0x8F,0x78,0x2A,0x62,0x94,0xA2,0x27,
+0x67,0x9E,0xD0,0xCF,0x1A,0x5E,0x47,0xE9,0xC1,0xB2,0xA4,0xCF,0xDD,0x41,0x1A,0x05,
+0x4E,0x9B,0x4B,0xEE,0x4A,0x6F,0x55,0x52,0xB3,0x24,0xA1,0x37,0x0A,0xEB,0x64,0x76,
+0x2A,0x2E,0x2C,0xF3,0xFD,0x3B,0x75,0x90,0xBF,0xFA,0x71,0xD8,0xC7,0x3D,0x37,0xD2,
+0xB5,0x05,0x95,0x62,0xB9,0xA6,0xDE,0x89,0x3D,0x36,0x7B,0x38,0x77,0x48,0x97,0xAC,
+0xA6,0x20,0x8F,0x2E,0xA6,0xC9,0x0C,0xC2,0xB2,0x99,0x45,0x00,0xC7,0xCE,0x11,0x51,
+0x22,0x22,0xE0,0xA5,0xEA,0xB6,0x15,0x48,0x09,0x64,0xEA,0x5E,0x4F,0x74,0xF7,0x05,
+0x3E,0xC7,0x8A,0x52,0x0C,0xDB,0x15,0xB4,0xBD,0x6D,0x9B,0xE5,0xC6,0xB1,0x54,0x68,
+0xA9,0xE3,0x69,0x90,0xB6,0x9A,0xA5,0x0F,0xB8,0xB9,0x3F,0x20,0x7D,0xAE,0x4A,0xB5,
+0xB8,0x9C,0xE4,0x1D,0xB6,0xAB,0xE6,0x94,0xA5,0xC1,0xC7,0x83,0xAD,0xDB,0xF5,0x27,
+0x87,0x0E,0x04,0x6C,0xD5,0xFF,0xDD,0xA0,0x5D,0xED,0x87,0x52,0xB7,0x2B,0x15,0x02,
+0xAE,0x39,0xA6,0x6A,0x74,0xE9,0xDA,0xC4,0xE7,0xBC,0x4D,0x34,0x1E,0xA9,0x5C,0x4D,
+0x33,0x5F,0x92,0x09,0x2F,0x88,0x66,0x5D,0x77,0x97,0xC7,0x1D,0x76,0x13,0xA9,0xD5,
+0xE5,0xF1,0x16,0x09,0x11,0x35,0xD5,0xAC,0xDB,0x24,0x71,0x70,0x2C,0x98,0x56,0x0B,
+0xD9,0x17,0xB4,0xD1,0xE3,0x51,0x2B,0x5E,0x75,0xE8,0xD5,0xD0,0xDC,0x4F,0x34,0xED,
+0xC2,0x05,0x66,0x80,0xA1,0xCB,0xE6,0x33,
+};
+
+
+/* subject:/C=US/ST=Arizona/L=Scottsdale/O=Starfield Technologies, Inc./CN=Starfield Root Certificate Authority - G2 */
+/* issuer :/C=US/ST=Arizona/L=Scottsdale/O=Starfield Technologies, Inc./CN=Starfield Root Certificate Authority - G2 */
+
+
+const unsigned char Starfield_Root_Certificate_Authority___G2_certificate[993]={
+0x30,0x82,0x03,0xDD,0x30,0x82,0x02,0xC5,0xA0,0x03,0x02,0x01,0x02,0x02,0x01,0x00,
+0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x0B,0x05,0x00,0x30,
+0x81,0x8F,0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x55,0x53,0x31,
+0x10,0x30,0x0E,0x06,0x03,0x55,0x04,0x08,0x13,0x07,0x41,0x72,0x69,0x7A,0x6F,0x6E,
+0x61,0x31,0x13,0x30,0x11,0x06,0x03,0x55,0x04,0x07,0x13,0x0A,0x53,0x63,0x6F,0x74,
+0x74,0x73,0x64,0x61,0x6C,0x65,0x31,0x25,0x30,0x23,0x06,0x03,0x55,0x04,0x0A,0x13,
+0x1C,0x53,0x74,0x61,0x72,0x66,0x69,0x65,0x6C,0x64,0x20,0x54,0x65,0x63,0x68,0x6E,
+0x6F,0x6C,0x6F,0x67,0x69,0x65,0x73,0x2C,0x20,0x49,0x6E,0x63,0x2E,0x31,0x32,0x30,
+0x30,0x06,0x03,0x55,0x04,0x03,0x13,0x29,0x53,0x74,0x61,0x72,0x66,0x69,0x65,0x6C,
+0x64,0x20,0x52,0x6F,0x6F,0x74,0x20,0x43,0x65,0x72,0x74,0x69,0x66,0x69,0x63,0x61,
+0x74,0x65,0x20,0x41,0x75,0x74,0x68,0x6F,0x72,0x69,0x74,0x79,0x20,0x2D,0x20,0x47,
+0x32,0x30,0x1E,0x17,0x0D,0x30,0x39,0x30,0x39,0x30,0x31,0x30,0x30,0x30,0x30,0x30,
+0x30,0x5A,0x17,0x0D,0x33,0x37,0x31,0x32,0x33,0x31,0x32,0x33,0x35,0x39,0x35,0x39,
+0x5A,0x30,0x81,0x8F,0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x55,
+0x53,0x31,0x10,0x30,0x0E,0x06,0x03,0x55,0x04,0x08,0x13,0x07,0x41,0x72,0x69,0x7A,
+0x6F,0x6E,0x61,0x31,0x13,0x30,0x11,0x06,0x03,0x55,0x04,0x07,0x13,0x0A,0x53,0x63,
+0x6F,0x74,0x74,0x73,0x64,0x61,0x6C,0x65,0x31,0x25,0x30,0x23,0x06,0x03,0x55,0x04,
+0x0A,0x13,0x1C,0x53,0x74,0x61,0x72,0x66,0x69,0x65,0x6C,0x64,0x20,0x54,0x65,0x63,
+0x68,0x6E,0x6F,0x6C,0x6F,0x67,0x69,0x65,0x73,0x2C,0x20,0x49,0x6E,0x63,0x2E,0x31,
+0x32,0x30,0x30,0x06,0x03,0x55,0x04,0x03,0x13,0x29,0x53,0x74,0x61,0x72,0x66,0x69,
+0x65,0x6C,0x64,0x20,0x52,0x6F,0x6F,0x74,0x20,0x43,0x65,0x72,0x74,0x69,0x66,0x69,
+0x63,0x61,0x74,0x65,0x20,0x41,0x75,0x74,0x68,0x6F,0x72,0x69,0x74,0x79,0x20,0x2D,
+0x20,0x47,0x32,0x30,0x82,0x01,0x22,0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,
+0x0D,0x01,0x01,0x01,0x05,0x00,0x03,0x82,0x01,0x0F,0x00,0x30,0x82,0x01,0x0A,0x02,
+0x82,0x01,0x01,0x00,0xBD,0xED,0xC1,0x03,0xFC,0xF6,0x8F,0xFC,0x02,0xB1,0x6F,0x5B,
+0x9F,0x48,0xD9,0x9D,0x79,0xE2,0xA2,0xB7,0x03,0x61,0x56,0x18,0xC3,0x47,0xB6,0xD7,
+0xCA,0x3D,0x35,0x2E,0x89,0x43,0xF7,0xA1,0x69,0x9B,0xDE,0x8A,0x1A,0xFD,0x13,0x20,
+0x9C,0xB4,0x49,0x77,0x32,0x29,0x56,0xFD,0xB9,0xEC,0x8C,0xDD,0x22,0xFA,0x72,0xDC,
+0x27,0x61,0x97,0xEE,0xF6,0x5A,0x84,0xEC,0x6E,0x19,0xB9,0x89,0x2C,0xDC,0x84,0x5B,
+0xD5,0x74,0xFB,0x6B,0x5F,0xC5,0x89,0xA5,0x10,0x52,0x89,0x46,0x55,0xF4,0xB8,0x75,
+0x1C,0xE6,0x7F,0xE4,0x54,0xAE,0x4B,0xF8,0x55,0x72,0x57,0x02,0x19,0xF8,0x17,0x71,
+0x59,0xEB,0x1E,0x28,0x07,0x74,0xC5,0x9D,0x48,0xBE,0x6C,0xB4,0xF4,0xA4,0xB0,0xF3,
+0x64,0x37,0x79,0x92,0xC0,0xEC,0x46,0x5E,0x7F,0xE1,0x6D,0x53,0x4C,0x62,0xAF,0xCD,
+0x1F,0x0B,0x63,0xBB,0x3A,0x9D,0xFB,0xFC,0x79,0x00,0x98,0x61,0x74,0xCF,0x26,0x82,
+0x40,0x63,0xF3,0xB2,0x72,0x6A,0x19,0x0D,0x99,0xCA,0xD4,0x0E,0x75,0xCC,0x37,0xFB,
+0x8B,0x89,0xC1,0x59,0xF1,0x62,0x7F,0x5F,0xB3,0x5F,0x65,0x30,0xF8,0xA7,0xB7,0x4D,
+0x76,0x5A,0x1E,0x76,0x5E,0x34,0xC0,0xE8,0x96,0x56,0x99,0x8A,0xB3,0xF0,0x7F,0xA4,
+0xCD,0xBD,0xDC,0x32,0x31,0x7C,0x91,0xCF,0xE0,0x5F,0x11,0xF8,0x6B,0xAA,0x49,0x5C,
+0xD1,0x99,0x94,0xD1,0xA2,0xE3,0x63,0x5B,0x09,0x76,0xB5,0x56,0x62,0xE1,0x4B,0x74,
+0x1D,0x96,0xD4,0x26,0xD4,0x08,0x04,0x59,0xD0,0x98,0x0E,0x0E,0xE6,0xDE,0xFC,0xC3,
+0xEC,0x1F,0x90,0xF1,0x02,0x03,0x01,0x00,0x01,0xA3,0x42,0x30,0x40,0x30,0x0F,0x06,
+0x03,0x55,0x1D,0x13,0x01,0x01,0xFF,0x04,0x05,0x30,0x03,0x01,0x01,0xFF,0x30,0x0E,
+0x06,0x03,0x55,0x1D,0x0F,0x01,0x01,0xFF,0x04,0x04,0x03,0x02,0x01,0x06,0x30,0x1D,
+0x06,0x03,0x55,0x1D,0x0E,0x04,0x16,0x04,0x14,0x7C,0x0C,0x32,0x1F,0xA7,0xD9,0x30,
+0x7F,0xC4,0x7D,0x68,0xA3,0x62,0xA8,0xA1,0xCE,0xAB,0x07,0x5B,0x27,0x30,0x0D,0x06,
+0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x0B,0x05,0x00,0x03,0x82,0x01,0x01,
+0x00,0x11,0x59,0xFA,0x25,0x4F,0x03,0x6F,0x94,0x99,0x3B,0x9A,0x1F,0x82,0x85,0x39,
+0xD4,0x76,0x05,0x94,0x5E,0xE1,0x28,0x93,0x6D,0x62,0x5D,0x09,0xC2,0xA0,0xA8,0xD4,
+0xB0,0x75,0x38,0xF1,0x34,0x6A,0x9D,0xE4,0x9F,0x8A,0x86,0x26,0x51,0xE6,0x2C,0xD1,
+0xC6,0x2D,0x6E,0x95,0x20,0x4A,0x92,0x01,0xEC,0xB8,0x8A,0x67,0x7B,0x31,0xE2,0x67,
+0x2E,0x8C,0x95,0x03,0x26,0x2E,0x43,0x9D,0x4A,0x31,0xF6,0x0E,0xB5,0x0C,0xBB,0xB7,
+0xE2,0x37,0x7F,0x22,0xBA,0x00,0xA3,0x0E,0x7B,0x52,0xFB,0x6B,0xBB,0x3B,0xC4,0xD3,
+0x79,0x51,0x4E,0xCD,0x90,0xF4,0x67,0x07,0x19,0xC8,0x3C,0x46,0x7A,0x0D,0x01,0x7D,
+0xC5,0x58,0xE7,0x6D,0xE6,0x85,0x30,0x17,0x9A,0x24,0xC4,0x10,0xE0,0x04,0xF7,0xE0,
+0xF2,0x7F,0xD4,0xAA,0x0A,0xFF,0x42,0x1D,0x37,0xED,0x94,0xE5,0x64,0x59,0x12,0x20,
+0x77,0x38,0xD3,0x32,0x3E,0x38,0x81,0x75,0x96,0x73,0xFA,0x68,0x8F,0xB1,0xCB,0xCE,
+0x1F,0xC5,0xEC,0xFA,0x9C,0x7E,0xCF,0x7E,0xB1,0xF1,0x07,0x2D,0xB6,0xFC,0xBF,0xCA,
+0xA4,0xBF,0xD0,0x97,0x05,0x4A,0xBC,0xEA,0x18,0x28,0x02,0x90,0xBD,0x54,0x78,0x09,
+0x21,0x71,0xD3,0xD1,0x7D,0x1D,0xD9,0x16,0xB0,0xA9,0x61,0x3D,0xD0,0x0A,0x00,0x22,
+0xFC,0xC7,0x7B,0xCB,0x09,0x64,0x45,0x0B,0x3B,0x40,0x81,0xF7,0x7D,0x7C,0x32,0xF5,
+0x98,0xCA,0x58,0x8E,0x7D,0x2A,0xEE,0x90,0x59,0x73,0x64,0xF9,0x36,0x74,0x5E,0x25,
+0xA1,0xF5,0x66,0x05,0x2E,0x7F,0x39,0x15,0xA9,0x2A,0xFB,0x50,0x8B,0x8E,0x85,0x69,
+0xF4,
+};
+
+
+/* subject:/C=US/O=DigiCert Inc/OU=www.digicert.com/CN=DigiCert Global Root G3 */
+/* issuer :/C=US/O=DigiCert Inc/OU=www.digicert.com/CN=DigiCert Global Root G3 */
+
+
+const unsigned char DigiCert_Global_Root_G3_certificate[579]={
+0x30,0x82,0x02,0x3F,0x30,0x82,0x01,0xC5,0xA0,0x03,0x02,0x01,0x02,0x02,0x10,0x05,
+0x55,0x56,0xBC,0xF2,0x5E,0xA4,0x35,0x35,0xC3,0xA4,0x0F,0xD5,0xAB,0x45,0x72,0x30,
+0x0A,0x06,0x08,0x2A,0x86,0x48,0xCE,0x3D,0x04,0x03,0x03,0x30,0x61,0x31,0x0B,0x30,
+0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x55,0x53,0x31,0x15,0x30,0x13,0x06,0x03,
+0x55,0x04,0x0A,0x13,0x0C,0x44,0x69,0x67,0x69,0x43,0x65,0x72,0x74,0x20,0x49,0x6E,
+0x63,0x31,0x19,0x30,0x17,0x06,0x03,0x55,0x04,0x0B,0x13,0x10,0x77,0x77,0x77,0x2E,
+0x64,0x69,0x67,0x69,0x63,0x65,0x72,0x74,0x2E,0x63,0x6F,0x6D,0x31,0x20,0x30,0x1E,
+0x06,0x03,0x55,0x04,0x03,0x13,0x17,0x44,0x69,0x67,0x69,0x43,0x65,0x72,0x74,0x20,
+0x47,0x6C,0x6F,0x62,0x61,0x6C,0x20,0x52,0x6F,0x6F,0x74,0x20,0x47,0x33,0x30,0x1E,
+0x17,0x0D,0x31,0x33,0x30,0x38,0x30,0x31,0x31,0x32,0x30,0x30,0x30,0x30,0x5A,0x17,
+0x0D,0x33,0x38,0x30,0x31,0x31,0x35,0x31,0x32,0x30,0x30,0x30,0x30,0x5A,0x30,0x61,
+0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x55,0x53,0x31,0x15,0x30,
+0x13,0x06,0x03,0x55,0x04,0x0A,0x13,0x0C,0x44,0x69,0x67,0x69,0x43,0x65,0x72,0x74,
+0x20,0x49,0x6E,0x63,0x31,0x19,0x30,0x17,0x06,0x03,0x55,0x04,0x0B,0x13,0x10,0x77,
+0x77,0x77,0x2E,0x64,0x69,0x67,0x69,0x63,0x65,0x72,0x74,0x2E,0x63,0x6F,0x6D,0x31,
+0x20,0x30,0x1E,0x06,0x03,0x55,0x04,0x03,0x13,0x17,0x44,0x69,0x67,0x69,0x43,0x65,
+0x72,0x74,0x20,0x47,0x6C,0x6F,0x62,0x61,0x6C,0x20,0x52,0x6F,0x6F,0x74,0x20,0x47,
+0x33,0x30,0x76,0x30,0x10,0x06,0x07,0x2A,0x86,0x48,0xCE,0x3D,0x02,0x01,0x06,0x05,
+0x2B,0x81,0x04,0x00,0x22,0x03,0x62,0x00,0x04,0xDD,0xA7,0xD9,0xBB,0x8A,0xB8,0x0B,
+0xFB,0x0B,0x7F,0x21,0xD2,0xF0,0xBE,0xBE,0x73,0xF3,0x33,0x5D,0x1A,0xBC,0x34,0xEA,
+0xDE,0xC6,0x9B,0xBC,0xD0,0x95,0xF6,0xF0,0xCC,0xD0,0x0B,0xBA,0x61,0x5B,0x51,0x46,
+0x7E,0x9E,0x2D,0x9F,0xEE,0x8E,0x63,0x0C,0x17,0xEC,0x07,0x70,0xF5,0xCF,0x84,0x2E,
+0x40,0x83,0x9C,0xE8,0x3F,0x41,0x6D,0x3B,0xAD,0xD3,0xA4,0x14,0x59,0x36,0x78,0x9D,
+0x03,0x43,0xEE,0x10,0x13,0x6C,0x72,0xDE,0xAE,0x88,0xA7,0xA1,0x6B,0xB5,0x43,0xCE,
+0x67,0xDC,0x23,0xFF,0x03,0x1C,0xA3,0xE2,0x3E,0xA3,0x42,0x30,0x40,0x30,0x0F,0x06,
+0x03,0x55,0x1D,0x13,0x01,0x01,0xFF,0x04,0x05,0x30,0x03,0x01,0x01,0xFF,0x30,0x0E,
+0x06,0x03,0x55,0x1D,0x0F,0x01,0x01,0xFF,0x04,0x04,0x03,0x02,0x01,0x86,0x30,0x1D,
+0x06,0x03,0x55,0x1D,0x0E,0x04,0x16,0x04,0x14,0xB3,0xDB,0x48,0xA4,0xF9,0xA1,0xC5,
+0xD8,0xAE,0x36,0x41,0xCC,0x11,0x63,0x69,0x62,0x29,0xBC,0x4B,0xC6,0x30,0x0A,0x06,
+0x08,0x2A,0x86,0x48,0xCE,0x3D,0x04,0x03,0x03,0x03,0x68,0x00,0x30,0x65,0x02,0x31,
+0x00,0xAD,0xBC,0xF2,0x6C,0x3F,0x12,0x4A,0xD1,0x2D,0x39,0xC3,0x0A,0x09,0x97,0x73,
+0xF4,0x88,0x36,0x8C,0x88,0x27,0xBB,0xE6,0x88,0x8D,0x50,0x85,0xA7,0x63,0xF9,0x9E,
+0x32,0xDE,0x66,0x93,0x0F,0xF1,0xCC,0xB1,0x09,0x8F,0xDD,0x6C,0xAB,0xFA,0x6B,0x7F,
+0xA0,0x02,0x30,0x39,0x66,0x5B,0xC2,0x64,0x8D,0xB8,0x9E,0x50,0xDC,0xA8,0xD5,0x49,
+0xA2,0xED,0xC7,0xDC,0xD1,0x49,0x7F,0x17,0x01,0xB8,0xC8,0x86,0x8F,0x4E,0x8C,0x88,
+0x2B,0xA8,0x9A,0xA9,0x8A,0xC5,0xD1,0x00,0xBD,0xF8,0x54,0xE2,0x9A,0xE5,0x5B,0x7C,
+0xB3,0x27,0x17,
+};
+
+
+/* subject:/C=US/O=thawte, Inc./OU=(c) 2007 thawte, Inc. - For authorized use only/CN=thawte Primary Root CA - G2 */
+/* issuer :/C=US/O=thawte, Inc./OU=(c) 2007 thawte, Inc. - For authorized use only/CN=thawte Primary Root CA - G2 */
+
+
+const unsigned char thawte_Primary_Root_CA___G2_certificate[652]={
+0x30,0x82,0x02,0x88,0x30,0x82,0x02,0x0D,0xA0,0x03,0x02,0x01,0x02,0x02,0x10,0x35,
+0xFC,0x26,0x5C,0xD9,0x84,0x4F,0xC9,0x3D,0x26,0x3D,0x57,0x9B,0xAE,0xD7,0x56,0x30,
+0x0A,0x06,0x08,0x2A,0x86,0x48,0xCE,0x3D,0x04,0x03,0x03,0x30,0x81,0x84,0x31,0x0B,
+0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x55,0x53,0x31,0x15,0x30,0x13,0x06,
+0x03,0x55,0x04,0x0A,0x13,0x0C,0x74,0x68,0x61,0x77,0x74,0x65,0x2C,0x20,0x49,0x6E,
+0x63,0x2E,0x31,0x38,0x30,0x36,0x06,0x03,0x55,0x04,0x0B,0x13,0x2F,0x28,0x63,0x29,
+0x20,0x32,0x30,0x30,0x37,0x20,0x74,0x68,0x61,0x77,0x74,0x65,0x2C,0x20,0x49,0x6E,
+0x63,0x2E,0x20,0x2D,0x20,0x46,0x6F,0x72,0x20,0x61,0x75,0x74,0x68,0x6F,0x72,0x69,
+0x7A,0x65,0x64,0x20,0x75,0x73,0x65,0x20,0x6F,0x6E,0x6C,0x79,0x31,0x24,0x30,0x22,
+0x06,0x03,0x55,0x04,0x03,0x13,0x1B,0x74,0x68,0x61,0x77,0x74,0x65,0x20,0x50,0x72,
+0x69,0x6D,0x61,0x72,0x79,0x20,0x52,0x6F,0x6F,0x74,0x20,0x43,0x41,0x20,0x2D,0x20,
+0x47,0x32,0x30,0x1E,0x17,0x0D,0x30,0x37,0x31,0x31,0x30,0x35,0x30,0x30,0x30,0x30,
+0x30,0x30,0x5A,0x17,0x0D,0x33,0x38,0x30,0x31,0x31,0x38,0x32,0x33,0x35,0x39,0x35,
+0x39,0x5A,0x30,0x81,0x84,0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02,
+0x55,0x53,0x31,0x15,0x30,0x13,0x06,0x03,0x55,0x04,0x0A,0x13,0x0C,0x74,0x68,0x61,
+0x77,0x74,0x65,0x2C,0x20,0x49,0x6E,0x63,0x2E,0x31,0x38,0x30,0x36,0x06,0x03,0x55,
+0x04,0x0B,0x13,0x2F,0x28,0x63,0x29,0x20,0x32,0x30,0x30,0x37,0x20,0x74,0x68,0x61,
+0x77,0x74,0x65,0x2C,0x20,0x49,0x6E,0x63,0x2E,0x20,0x2D,0x20,0x46,0x6F,0x72,0x20,
+0x61,0x75,0x74,0x68,0x6F,0x72,0x69,0x7A,0x65,0x64,0x20,0x75,0x73,0x65,0x20,0x6F,
+0x6E,0x6C,0x79,0x31,0x24,0x30,0x22,0x06,0x03,0x55,0x04,0x03,0x13,0x1B,0x74,0x68,
+0x61,0x77,0x74,0x65,0x20,0x50,0x72,0x69,0x6D,0x61,0x72,0x79,0x20,0x52,0x6F,0x6F,
+0x74,0x20,0x43,0x41,0x20,0x2D,0x20,0x47,0x32,0x30,0x76,0x30,0x10,0x06,0x07,0x2A,
+0x86,0x48,0xCE,0x3D,0x02,0x01,0x06,0x05,0x2B,0x81,0x04,0x00,0x22,0x03,0x62,0x00,
+0x04,0xA2,0xD5,0x9C,0x82,0x7B,0x95,0x9D,0xF1,0x52,0x78,0x87,0xFE,0x8A,0x16,0xBF,
+0x05,0xE6,0xDF,0xA3,0x02,0x4F,0x0D,0x07,0xC6,0x00,0x51,0xBA,0x0C,0x02,0x52,0x2D,
+0x22,0xA4,0x42,0x39,0xC4,0xFE,0x8F,0xEA,0xC9,0xC1,0xBE,0xD4,0x4D,0xFF,0x9F,0x7A,
+0x9E,0xE2,0xB1,0x7C,0x9A,0xAD,0xA7,0x86,0x09,0x73,0x87,0xD1,0xE7,0x9A,0xE3,0x7A,
+0xA5,0xAA,0x6E,0xFB,0xBA,0xB3,0x70,0xC0,0x67,0x88,0xA2,0x35,0xD4,0xA3,0x9A,0xB1,
+0xFD,0xAD,0xC2,0xEF,0x31,0xFA,0xA8,0xB9,0xF3,0xFB,0x08,0xC6,0x91,0xD1,0xFB,0x29,
+0x95,0xA3,0x42,0x30,0x40,0x30,0x0F,0x06,0x03,0x55,0x1D,0x13,0x01,0x01,0xFF,0x04,
+0x05,0x30,0x03,0x01,0x01,0xFF,0x30,0x0E,0x06,0x03,0x55,0x1D,0x0F,0x01,0x01,0xFF,
+0x04,0x04,0x03,0x02,0x01,0x06,0x30,0x1D,0x06,0x03,0x55,0x1D,0x0E,0x04,0x16,0x04,
+0x14,0x9A,0xD8,0x00,0x30,0x00,0xE7,0x6B,0x7F,0x85,0x18,0xEE,0x8B,0xB6,0xCE,0x8A,
+0x0C,0xF8,0x11,0xE1,0xBB,0x30,0x0A,0x06,0x08,0x2A,0x86,0x48,0xCE,0x3D,0x04,0x03,
+0x03,0x03,0x69,0x00,0x30,0x66,0x02,0x31,0x00,0xDD,0xF8,0xE0,0x57,0x47,0x5B,0xA7,
+0xE6,0x0A,0xC3,0xBD,0xF5,0x80,0x8A,0x97,0x35,0x0D,0x1B,0x89,0x3C,0x54,0x86,0x77,
+0x28,0xCA,0xA1,0xF4,0x79,0xDE,0xB5,0xE6,0x38,0xB0,0xF0,0x65,0x70,0x8C,0x7F,0x02,
+0x54,0xC2,0xBF,0xFF,0xD8,0xA1,0x3E,0xD9,0xCF,0x02,0x31,0x00,0xC4,0x8D,0x94,0xFC,
+0xDC,0x53,0xD2,0xDC,0x9D,0x78,0x16,0x1F,0x15,0x33,0x23,0x53,0x52,0xE3,0x5A,0x31,
+0x5D,0x9D,0xCA,0xAE,0xBD,0x13,0x29,0x44,0x0D,0x27,0x5B,0xA8,0xE7,0x68,0x9C,0x12,
+0xF7,0x58,0x3F,0x2E,0x72,0x02,0x57,0xA3,0x8F,0xA1,0x14,0x2E,
+};
+
+
+/* subject:/C=US/O=VeriSign, Inc./OU=VeriSign Trust Network/OU=(c) 2008 VeriSign, Inc. - For authorized use only/CN=VeriSign Universal Root Certification Authority */
+/* issuer :/C=US/O=VeriSign, Inc./OU=VeriSign Trust Network/OU=(c) 2008 VeriSign, Inc. - For authorized use only/CN=VeriSign Universal Root Certification Authority */
+
+
+const unsigned char VeriSign_Universal_Root_Certification_Authority_certificate[1213]={
+0x30,0x82,0x04,0xB9,0x30,0x82,0x03,0xA1,0xA0,0x03,0x02,0x01,0x02,0x02,0x10,0x40,
+0x1A,0xC4,0x64,0x21,0xB3,0x13,0x21,0x03,0x0E,0xBB,0xE4,0x12,0x1A,0xC5,0x1D,0x30,
+0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x0B,0x05,0x00,0x30,0x81,
+0xBD,0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x55,0x53,0x31,0x17,
+0x30,0x15,0x06,0x03,0x55,0x04,0x0A,0x13,0x0E,0x56,0x65,0x72,0x69,0x53,0x69,0x67,
+0x6E,0x2C,0x20,0x49,0x6E,0x63,0x2E,0x31,0x1F,0x30,0x1D,0x06,0x03,0x55,0x04,0x0B,
+0x13,0x16,0x56,0x65,0x72,0x69,0x53,0x69,0x67,0x6E,0x20,0x54,0x72,0x75,0x73,0x74,
+0x20,0x4E,0x65,0x74,0x77,0x6F,0x72,0x6B,0x31,0x3A,0x30,0x38,0x06,0x03,0x55,0x04,
+0x0B,0x13,0x31,0x28,0x63,0x29,0x20,0x32,0x30,0x30,0x38,0x20,0x56,0x65,0x72,0x69,
+0x53,0x69,0x67,0x6E,0x2C,0x20,0x49,0x6E,0x63,0x2E,0x20,0x2D,0x20,0x46,0x6F,0x72,
+0x20,0x61,0x75,0x74,0x68,0x6F,0x72,0x69,0x7A,0x65,0x64,0x20,0x75,0x73,0x65,0x20,
+0x6F,0x6E,0x6C,0x79,0x31,0x38,0x30,0x36,0x06,0x03,0x55,0x04,0x03,0x13,0x2F,0x56,
+0x65,0x72,0x69,0x53,0x69,0x67,0x6E,0x20,0x55,0x6E,0x69,0x76,0x65,0x72,0x73,0x61,
+0x6C,0x20,0x52,0x6F,0x6F,0x74,0x20,0x43,0x65,0x72,0x74,0x69,0x66,0x69,0x63,0x61,
+0x74,0x69,0x6F,0x6E,0x20,0x41,0x75,0x74,0x68,0x6F,0x72,0x69,0x74,0x79,0x30,0x1E,
+0x17,0x0D,0x30,0x38,0x30,0x34,0x30,0x32,0x30,0x30,0x30,0x30,0x30,0x30,0x5A,0x17,
+0x0D,0x33,0x37,0x31,0x32,0x30,0x31,0x32,0x33,0x35,0x39,0x35,0x39,0x5A,0x30,0x81,
+0xBD,0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x55,0x53,0x31,0x17,
+0x30,0x15,0x06,0x03,0x55,0x04,0x0A,0x13,0x0E,0x56,0x65,0x72,0x69,0x53,0x69,0x67,
+0x6E,0x2C,0x20,0x49,0x6E,0x63,0x2E,0x31,0x1F,0x30,0x1D,0x06,0x03,0x55,0x04,0x0B,
+0x13,0x16,0x56,0x65,0x72,0x69,0x53,0x69,0x67,0x6E,0x20,0x54,0x72,0x75,0x73,0x74,
+0x20,0x4E,0x65,0x74,0x77,0x6F,0x72,0x6B,0x31,0x3A,0x30,0x38,0x06,0x03,0x55,0x04,
+0x0B,0x13,0x31,0x28,0x63,0x29,0x20,0x32,0x30,0x30,0x38,0x20,0x56,0x65,0x72,0x69,
+0x53,0x69,0x67,0x6E,0x2C,0x20,0x49,0x6E,0x63,0x2E,0x20,0x2D,0x20,0x46,0x6F,0x72,
+0x20,0x61,0x75,0x74,0x68,0x6F,0x72,0x69,0x7A,0x65,0x64,0x20,0x75,0x73,0x65,0x20,
+0x6F,0x6E,0x6C,0x79,0x31,0x38,0x30,0x36,0x06,0x03,0x55,0x04,0x03,0x13,0x2F,0x56,
+0x65,0x72,0x69,0x53,0x69,0x67,0x6E,0x20,0x55,0x6E,0x69,0x76,0x65,0x72,0x73,0x61,
+0x6C,0x20,0x52,0x6F,0x6F,0x74,0x20,0x43,0x65,0x72,0x74,0x69,0x66,0x69,0x63,0x61,
+0x74,0x69,0x6F,0x6E,0x20,0x41,0x75,0x74,0x68,0x6F,0x72,0x69,0x74,0x79,0x30,0x82,
+0x01,0x22,0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x01,0x05,
+0x00,0x03,0x82,0x01,0x0F,0x00,0x30,0x82,0x01,0x0A,0x02,0x82,0x01,0x01,0x00,0xC7,
+0x61,0x37,0x5E,0xB1,0x01,0x34,0xDB,0x62,0xD7,0x15,0x9B,0xFF,0x58,0x5A,0x8C,0x23,
+0x23,0xD6,0x60,0x8E,0x91,0xD7,0x90,0x98,0x83,0x7A,0xE6,0x58,0x19,0x38,0x8C,0xC5,
+0xF6,0xE5,0x64,0x85,0xB4,0xA2,0x71,0xFB,0xED,0xBD,0xB9,0xDA,0xCD,0x4D,0x00,0xB4,
+0xC8,0x2D,0x73,0xA5,0xC7,0x69,0x71,0x95,0x1F,0x39,0x3C,0xB2,0x44,0x07,0x9C,0xE8,
+0x0E,0xFA,0x4D,0x4A,0xC4,0x21,0xDF,0x29,0x61,0x8F,0x32,0x22,0x61,0x82,0xC5,0x87,
+0x1F,0x6E,0x8C,0x7C,0x5F,0x16,0x20,0x51,0x44,0xD1,0x70,0x4F,0x57,0xEA,0xE3,0x1C,
+0xE3,0xCC,0x79,0xEE,0x58,0xD8,0x0E,0xC2,0xB3,0x45,0x93,0xC0,0x2C,0xE7,0x9A,0x17,
+0x2B,0x7B,0x00,0x37,0x7A,0x41,0x33,0x78,0xE1,0x33,0xE2,0xF3,0x10,0x1A,0x7F,0x87,
+0x2C,0xBE,0xF6,0xF5,0xF7,0x42,0xE2,0xE5,0xBF,0x87,0x62,0x89,0x5F,0x00,0x4B,0xDF,
+0xC5,0xDD,0xE4,0x75,0x44,0x32,0x41,0x3A,0x1E,0x71,0x6E,0x69,0xCB,0x0B,0x75,0x46,
+0x08,0xD1,0xCA,0xD2,0x2B,0x95,0xD0,0xCF,0xFB,0xB9,0x40,0x6B,0x64,0x8C,0x57,0x4D,
+0xFC,0x13,0x11,0x79,0x84,0xED,0x5E,0x54,0xF6,0x34,0x9F,0x08,0x01,0xF3,0x10,0x25,
+0x06,0x17,0x4A,0xDA,0xF1,0x1D,0x7A,0x66,0x6B,0x98,0x60,0x66,0xA4,0xD9,0xEF,0xD2,
+0x2E,0x82,0xF1,0xF0,0xEF,0x09,0xEA,0x44,0xC9,0x15,0x6A,0xE2,0x03,0x6E,0x33,0xD3,
+0xAC,0x9F,0x55,0x00,0xC7,0xF6,0x08,0x6A,0x94,0xB9,0x5F,0xDC,0xE0,0x33,0xF1,0x84,
+0x60,0xF9,0x5B,0x27,0x11,0xB4,0xFC,0x16,0xF2,0xBB,0x56,0x6A,0x80,0x25,0x8D,0x02,
+0x03,0x01,0x00,0x01,0xA3,0x81,0xB2,0x30,0x81,0xAF,0x30,0x0F,0x06,0x03,0x55,0x1D,
+0x13,0x01,0x01,0xFF,0x04,0x05,0x30,0x03,0x01,0x01,0xFF,0x30,0x0E,0x06,0x03,0x55,
+0x1D,0x0F,0x01,0x01,0xFF,0x04,0x04,0x03,0x02,0x01,0x06,0x30,0x6D,0x06,0x08,0x2B,
+0x06,0x01,0x05,0x05,0x07,0x01,0x0C,0x04,0x61,0x30,0x5F,0xA1,0x5D,0xA0,0x5B,0x30,
+0x59,0x30,0x57,0x30,0x55,0x16,0x09,0x69,0x6D,0x61,0x67,0x65,0x2F,0x67,0x69,0x66,
+0x30,0x21,0x30,0x1F,0x30,0x07,0x06,0x05,0x2B,0x0E,0x03,0x02,0x1A,0x04,0x14,0x8F,
+0xE5,0xD3,0x1A,0x86,0xAC,0x8D,0x8E,0x6B,0xC3,0xCF,0x80,0x6A,0xD4,0x48,0x18,0x2C,
+0x7B,0x19,0x2E,0x30,0x25,0x16,0x23,0x68,0x74,0x74,0x70,0x3A,0x2F,0x2F,0x6C,0x6F,
+0x67,0x6F,0x2E,0x76,0x65,0x72,0x69,0x73,0x69,0x67,0x6E,0x2E,0x63,0x6F,0x6D,0x2F,
+0x76,0x73,0x6C,0x6F,0x67,0x6F,0x2E,0x67,0x69,0x66,0x30,0x1D,0x06,0x03,0x55,0x1D,
+0x0E,0x04,0x16,0x04,0x14,0xB6,0x77,0xFA,0x69,0x48,0x47,0x9F,0x53,0x12,0xD5,0xC2,
+0xEA,0x07,0x32,0x76,0x07,0xD1,0x97,0x07,0x19,0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,
+0x86,0xF7,0x0D,0x01,0x01,0x0B,0x05,0x00,0x03,0x82,0x01,0x01,0x00,0x4A,0xF8,0xF8,
+0xB0,0x03,0xE6,0x2C,0x67,0x7B,0xE4,0x94,0x77,0x63,0xCC,0x6E,0x4C,0xF9,0x7D,0x0E,
+0x0D,0xDC,0xC8,0xB9,0x35,0xB9,0x70,0x4F,0x63,0xFA,0x24,0xFA,0x6C,0x83,0x8C,0x47,
+0x9D,0x3B,0x63,0xF3,0x9A,0xF9,0x76,0x32,0x95,0x91,0xB1,0x77,0xBC,0xAC,0x9A,0xBE,
+0xB1,0xE4,0x31,0x21,0xC6,0x81,0x95,0x56,0x5A,0x0E,0xB1,0xC2,0xD4,0xB1,0xA6,0x59,
+0xAC,0xF1,0x63,0xCB,0xB8,0x4C,0x1D,0x59,0x90,0x4A,0xEF,0x90,0x16,0x28,0x1F,0x5A,
+0xAE,0x10,0xFB,0x81,0x50,0x38,0x0C,0x6C,0xCC,0xF1,0x3D,0xC3,0xF5,0x63,0xE3,0xB3,
+0xE3,0x21,0xC9,0x24,0x39,0xE9,0xFD,0x15,0x66,0x46,0xF4,0x1B,0x11,0xD0,0x4D,0x73,
+0xA3,0x7D,0x46,0xF9,0x3D,0xED,0xA8,0x5F,0x62,0xD4,0xF1,0x3F,0xF8,0xE0,0x74,0x57,
+0x2B,0x18,0x9D,0x81,0xB4,0xC4,0x28,0xDA,0x94,0x97,0xA5,0x70,0xEB,0xAC,0x1D,0xBE,
+0x07,0x11,0xF0,0xD5,0xDB,0xDD,0xE5,0x8C,0xF0,0xD5,0x32,0xB0,0x83,0xE6,0x57,0xE2,
+0x8F,0xBF,0xBE,0xA1,0xAA,0xBF,0x3D,0x1D,0xB5,0xD4,0x38,0xEA,0xD7,0xB0,0x5C,0x3A,
+0x4F,0x6A,0x3F,0x8F,0xC0,0x66,0x6C,0x63,0xAA,0xE9,0xD9,0xA4,0x16,0xF4,0x81,0xD1,
+0x95,0x14,0x0E,0x7D,0xCD,0x95,0x34,0xD9,0xD2,0x8F,0x70,0x73,0x81,0x7B,0x9C,0x7E,
+0xBD,0x98,0x61,0xD8,0x45,0x87,0x98,0x90,0xC5,0xEB,0x86,0x30,0xC6,0x35,0xBF,0xF0,
+0xFF,0xC3,0x55,0x88,0x83,0x4B,0xEF,0x05,0x92,0x06,0x71,0xF2,0xB8,0x98,0x93,0xB7,
+0xEC,0xCD,0x82,0x61,0xF1,0x38,0xE6,0x4F,0x97,0x98,0x2A,0x5A,0x8D,
+};
+
+
+/* subject:/C=US/O=VeriSign, Inc./OU=VeriSign Trust Network/OU=(c) 2007 VeriSign, Inc. - For authorized use only/CN=VeriSign Class 3 Public Primary Certification Authority - G4 */
+/* issuer :/C=US/O=VeriSign, Inc./OU=VeriSign Trust Network/OU=(c) 2007 VeriSign, Inc. - For authorized use only/CN=VeriSign Class 3 Public Primary Certification Authority - G4 */
+
+
+const unsigned char VeriSign_Class_3_Public_Primary_Certification_Authority___G4_certificate[904]={
+0x30,0x82,0x03,0x84,0x30,0x82,0x03,0x0A,0xA0,0x03,0x02,0x01,0x02,0x02,0x10,0x2F,
+0x80,0xFE,0x23,0x8C,0x0E,0x22,0x0F,0x48,0x67,0x12,0x28,0x91,0x87,0xAC,0xB3,0x30,
+0x0A,0x06,0x08,0x2A,0x86,0x48,0xCE,0x3D,0x04,0x03,0x03,0x30,0x81,0xCA,0x31,0x0B,
+0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x55,0x53,0x31,0x17,0x30,0x15,0x06,
+0x03,0x55,0x04,0x0A,0x13,0x0E,0x56,0x65,0x72,0x69,0x53,0x69,0x67,0x6E,0x2C,0x20,
+0x49,0x6E,0x63,0x2E,0x31,0x1F,0x30,0x1D,0x06,0x03,0x55,0x04,0x0B,0x13,0x16,0x56,
+0x65,0x72,0x69,0x53,0x69,0x67,0x6E,0x20,0x54,0x72,0x75,0x73,0x74,0x20,0x4E,0x65,
+0x74,0x77,0x6F,0x72,0x6B,0x31,0x3A,0x30,0x38,0x06,0x03,0x55,0x04,0x0B,0x13,0x31,
+0x28,0x63,0x29,0x20,0x32,0x30,0x30,0x37,0x20,0x56,0x65,0x72,0x69,0x53,0x69,0x67,
+0x6E,0x2C,0x20,0x49,0x6E,0x63,0x2E,0x20,0x2D,0x20,0x46,0x6F,0x72,0x20,0x61,0x75,
+0x74,0x68,0x6F,0x72,0x69,0x7A,0x65,0x64,0x20,0x75,0x73,0x65,0x20,0x6F,0x6E,0x6C,
+0x79,0x31,0x45,0x30,0x43,0x06,0x03,0x55,0x04,0x03,0x13,0x3C,0x56,0x65,0x72,0x69,
+0x53,0x69,0x67,0x6E,0x20,0x43,0x6C,0x61,0x73,0x73,0x20,0x33,0x20,0x50,0x75,0x62,
+0x6C,0x69,0x63,0x20,0x50,0x72,0x69,0x6D,0x61,0x72,0x79,0x20,0x43,0x65,0x72,0x74,
+0x69,0x66,0x69,0x63,0x61,0x74,0x69,0x6F,0x6E,0x20,0x41,0x75,0x74,0x68,0x6F,0x72,
+0x69,0x74,0x79,0x20,0x2D,0x20,0x47,0x34,0x30,0x1E,0x17,0x0D,0x30,0x37,0x31,0x31,
+0x30,0x35,0x30,0x30,0x30,0x30,0x30,0x30,0x5A,0x17,0x0D,0x33,0x38,0x30,0x31,0x31,
+0x38,0x32,0x33,0x35,0x39,0x35,0x39,0x5A,0x30,0x81,0xCA,0x31,0x0B,0x30,0x09,0x06,
+0x03,0x55,0x04,0x06,0x13,0x02,0x55,0x53,0x31,0x17,0x30,0x15,0x06,0x03,0x55,0x04,
+0x0A,0x13,0x0E,0x56,0x65,0x72,0x69,0x53,0x69,0x67,0x6E,0x2C,0x20,0x49,0x6E,0x63,
+0x2E,0x31,0x1F,0x30,0x1D,0x06,0x03,0x55,0x04,0x0B,0x13,0x16,0x56,0x65,0x72,0x69,
+0x53,0x69,0x67,0x6E,0x20,0x54,0x72,0x75,0x73,0x74,0x20,0x4E,0x65,0x74,0x77,0x6F,
+0x72,0x6B,0x31,0x3A,0x30,0x38,0x06,0x03,0x55,0x04,0x0B,0x13,0x31,0x28,0x63,0x29,
+0x20,0x32,0x30,0x30,0x37,0x20,0x56,0x65,0x72,0x69,0x53,0x69,0x67,0x6E,0x2C,0x20,
+0x49,0x6E,0x63,0x2E,0x20,0x2D,0x20,0x46,0x6F,0x72,0x20,0x61,0x75,0x74,0x68,0x6F,
+0x72,0x69,0x7A,0x65,0x64,0x20,0x75,0x73,0x65,0x20,0x6F,0x6E,0x6C,0x79,0x31,0x45,
+0x30,0x43,0x06,0x03,0x55,0x04,0x03,0x13,0x3C,0x56,0x65,0x72,0x69,0x53,0x69,0x67,
+0x6E,0x20,0x43,0x6C,0x61,0x73,0x73,0x20,0x33,0x20,0x50,0x75,0x62,0x6C,0x69,0x63,
+0x20,0x50,0x72,0x69,0x6D,0x61,0x72,0x79,0x20,0x43,0x65,0x72,0x74,0x69,0x66,0x69,
+0x63,0x61,0x74,0x69,0x6F,0x6E,0x20,0x41,0x75,0x74,0x68,0x6F,0x72,0x69,0x74,0x79,
+0x20,0x2D,0x20,0x47,0x34,0x30,0x76,0x30,0x10,0x06,0x07,0x2A,0x86,0x48,0xCE,0x3D,
+0x02,0x01,0x06,0x05,0x2B,0x81,0x04,0x00,0x22,0x03,0x62,0x00,0x04,0xA7,0x56,0x7A,
+0x7C,0x52,0xDA,0x64,0x9B,0x0E,0x2D,0x5C,0xD8,0x5E,0xAC,0x92,0x3D,0xFE,0x01,0xE6,
+0x19,0x4A,0x3D,0x14,0x03,0x4B,0xFA,0x60,0x27,0x20,0xD9,0x83,0x89,0x69,0xFA,0x54,
+0xC6,0x9A,0x18,0x5E,0x55,0x2A,0x64,0xDE,0x06,0xF6,0x8D,0x4A,0x3B,0xAD,0x10,0x3C,
+0x65,0x3D,0x90,0x88,0x04,0x89,0xE0,0x30,0x61,0xB3,0xAE,0x5D,0x01,0xA7,0x7B,0xDE,
+0x7C,0xB2,0xBE,0xCA,0x65,0x61,0x00,0x86,0xAE,0xDA,0x8F,0x7B,0xD0,0x89,0xAD,0x4D,
+0x1D,0x59,0x9A,0x41,0xB1,0xBC,0x47,0x80,0xDC,0x9E,0x62,0xC3,0xF9,0xA3,0x81,0xB2,
+0x30,0x81,0xAF,0x30,0x0F,0x06,0x03,0x55,0x1D,0x13,0x01,0x01,0xFF,0x04,0x05,0x30,
+0x03,0x01,0x01,0xFF,0x30,0x0E,0x06,0x03,0x55,0x1D,0x0F,0x01,0x01,0xFF,0x04,0x04,
+0x03,0x02,0x01,0x06,0x30,0x6D,0x06,0x08,0x2B,0x06,0x01,0x05,0x05,0x07,0x01,0x0C,
+0x04,0x61,0x30,0x5F,0xA1,0x5D,0xA0,0x5B,0x30,0x59,0x30,0x57,0x30,0x55,0x16,0x09,
+0x69,0x6D,0x61,0x67,0x65,0x2F,0x67,0x69,0x66,0x30,0x21,0x30,0x1F,0x30,0x07,0x06,
+0x05,0x2B,0x0E,0x03,0x02,0x1A,0x04,0x14,0x8F,0xE5,0xD3,0x1A,0x86,0xAC,0x8D,0x8E,
+0x6B,0xC3,0xCF,0x80,0x6A,0xD4,0x48,0x18,0x2C,0x7B,0x19,0x2E,0x30,0x25,0x16,0x23,
+0x68,0x74,0x74,0x70,0x3A,0x2F,0x2F,0x6C,0x6F,0x67,0x6F,0x2E,0x76,0x65,0x72,0x69,
+0x73,0x69,0x67,0x6E,0x2E,0x63,0x6F,0x6D,0x2F,0x76,0x73,0x6C,0x6F,0x67,0x6F,0x2E,
+0x67,0x69,0x66,0x30,0x1D,0x06,0x03,0x55,0x1D,0x0E,0x04,0x16,0x04,0x14,0xB3,0x16,
+0x91,0xFD,0xEE,0xA6,0x6E,0xE4,0xB5,0x2E,0x49,0x8F,0x87,0x78,0x81,0x80,0xEC,0xE5,
+0xB1,0xB5,0x30,0x0A,0x06,0x08,0x2A,0x86,0x48,0xCE,0x3D,0x04,0x03,0x03,0x03,0x68,
+0x00,0x30,0x65,0x02,0x30,0x66,0x21,0x0C,0x18,0x26,0x60,0x5A,0x38,0x7B,0x56,0x42,
+0xE0,0xA7,0xFC,0x36,0x84,0x51,0x91,0x20,0x2C,0x76,0x4D,0x43,0x3D,0xC4,0x1D,0x84,
+0x23,0xD0,0xAC,0xD6,0x7C,0x35,0x06,0xCE,0xCD,0x69,0xBD,0x90,0x0D,0xDB,0x6C,0x48,
+0x42,0x1D,0x0E,0xAA,0x42,0x02,0x31,0x00,0x9C,0x3D,0x48,0x39,0x23,0x39,0x58,0x1A,
+0x15,0x12,0x59,0x6A,0x9E,0xEF,0xD5,0x59,0xB2,0x1D,0x52,0x2C,0x99,0x71,0xCD,0xC7,
+0x29,0xDF,0x1B,0x2A,0x61,0x7B,0x71,0xD1,0xDE,0xF3,0xC0,0xE5,0x0D,0x3A,0x4A,0xAA,
+0x2D,0xA7,0xD8,0x86,0x2A,0xDD,0x2E,0x10,
+};
+
+
+/* subject:/C=US/O=DigiCert Inc/OU=www.digicert.com/CN=DigiCert Global Root G2 */
+/* issuer :/C=US/O=DigiCert Inc/OU=www.digicert.com/CN=DigiCert Global Root G2 */
+
+
+const unsigned char DigiCert_Global_Root_G2_certificate[914]={
+0x30,0x82,0x03,0x8E,0x30,0x82,0x02,0x76,0xA0,0x03,0x02,0x01,0x02,0x02,0x10,0x03,
+0x3A,0xF1,0xE6,0xA7,0x11,0xA9,0xA0,0xBB,0x28,0x64,0xB1,0x1D,0x09,0xFA,0xE5,0x30,
+0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x0B,0x05,0x00,0x30,0x61,
+0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x55,0x53,0x31,0x15,0x30,
+0x13,0x06,0x03,0x55,0x04,0x0A,0x13,0x0C,0x44,0x69,0x67,0x69,0x43,0x65,0x72,0x74,
+0x20,0x49,0x6E,0x63,0x31,0x19,0x30,0x17,0x06,0x03,0x55,0x04,0x0B,0x13,0x10,0x77,
+0x77,0x77,0x2E,0x64,0x69,0x67,0x69,0x63,0x65,0x72,0x74,0x2E,0x63,0x6F,0x6D,0x31,
+0x20,0x30,0x1E,0x06,0x03,0x55,0x04,0x03,0x13,0x17,0x44,0x69,0x67,0x69,0x43,0x65,
+0x72,0x74,0x20,0x47,0x6C,0x6F,0x62,0x61,0x6C,0x20,0x52,0x6F,0x6F,0x74,0x20,0x47,
+0x32,0x30,0x1E,0x17,0x0D,0x31,0x33,0x30,0x38,0x30,0x31,0x31,0x32,0x30,0x30,0x30,
+0x30,0x5A,0x17,0x0D,0x33,0x38,0x30,0x31,0x31,0x35,0x31,0x32,0x30,0x30,0x30,0x30,
+0x5A,0x30,0x61,0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x55,0x53,
+0x31,0x15,0x30,0x13,0x06,0x03,0x55,0x04,0x0A,0x13,0x0C,0x44,0x69,0x67,0x69,0x43,
+0x65,0x72,0x74,0x20,0x49,0x6E,0x63,0x31,0x19,0x30,0x17,0x06,0x03,0x55,0x04,0x0B,
+0x13,0x10,0x77,0x77,0x77,0x2E,0x64,0x69,0x67,0x69,0x63,0x65,0x72,0x74,0x2E,0x63,
+0x6F,0x6D,0x31,0x20,0x30,0x1E,0x06,0x03,0x55,0x04,0x03,0x13,0x17,0x44,0x69,0x67,
+0x69,0x43,0x65,0x72,0x74,0x20,0x47,0x6C,0x6F,0x62,0x61,0x6C,0x20,0x52,0x6F,0x6F,
+0x74,0x20,0x47,0x32,0x30,0x82,0x01,0x22,0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,
+0xF7,0x0D,0x01,0x01,0x01,0x05,0x00,0x03,0x82,0x01,0x0F,0x00,0x30,0x82,0x01,0x0A,
+0x02,0x82,0x01,0x01,0x00,0xBB,0x37,0xCD,0x34,0xDC,0x7B,0x6B,0xC9,0xB2,0x68,0x90,
+0xAD,0x4A,0x75,0xFF,0x46,0xBA,0x21,0x0A,0x08,0x8D,0xF5,0x19,0x54,0xC9,0xFB,0x88,
+0xDB,0xF3,0xAE,0xF2,0x3A,0x89,0x91,0x3C,0x7A,0xE6,0xAB,0x06,0x1A,0x6B,0xCF,0xAC,
+0x2D,0xE8,0x5E,0x09,0x24,0x44,0xBA,0x62,0x9A,0x7E,0xD6,0xA3,0xA8,0x7E,0xE0,0x54,
+0x75,0x20,0x05,0xAC,0x50,0xB7,0x9C,0x63,0x1A,0x6C,0x30,0xDC,0xDA,0x1F,0x19,0xB1,
+0xD7,0x1E,0xDE,0xFD,0xD7,0xE0,0xCB,0x94,0x83,0x37,0xAE,0xEC,0x1F,0x43,0x4E,0xDD,
+0x7B,0x2C,0xD2,0xBD,0x2E,0xA5,0x2F,0xE4,0xA9,0xB8,0xAD,0x3A,0xD4,0x99,0xA4,0xB6,
+0x25,0xE9,0x9B,0x6B,0x00,0x60,0x92,0x60,0xFF,0x4F,0x21,0x49,0x18,0xF7,0x67,0x90,
+0xAB,0x61,0x06,0x9C,0x8F,0xF2,0xBA,0xE9,0xB4,0xE9,0x92,0x32,0x6B,0xB5,0xF3,0x57,
+0xE8,0x5D,0x1B,0xCD,0x8C,0x1D,0xAB,0x95,0x04,0x95,0x49,0xF3,0x35,0x2D,0x96,0xE3,
+0x49,0x6D,0xDD,0x77,0xE3,0xFB,0x49,0x4B,0xB4,0xAC,0x55,0x07,0xA9,0x8F,0x95,0xB3,
+0xB4,0x23,0xBB,0x4C,0x6D,0x45,0xF0,0xF6,0xA9,0xB2,0x95,0x30,0xB4,0xFD,0x4C,0x55,
+0x8C,0x27,0x4A,0x57,0x14,0x7C,0x82,0x9D,0xCD,0x73,0x92,0xD3,0x16,0x4A,0x06,0x0C,
+0x8C,0x50,0xD1,0x8F,0x1E,0x09,0xBE,0x17,0xA1,0xE6,0x21,0xCA,0xFD,0x83,0xE5,0x10,
+0xBC,0x83,0xA5,0x0A,0xC4,0x67,0x28,0xF6,0x73,0x14,0x14,0x3D,0x46,0x76,0xC3,0x87,
+0x14,0x89,0x21,0x34,0x4D,0xAF,0x0F,0x45,0x0C,0xA6,0x49,0xA1,0xBA,0xBB,0x9C,0xC5,
+0xB1,0x33,0x83,0x29,0x85,0x02,0x03,0x01,0x00,0x01,0xA3,0x42,0x30,0x40,0x30,0x0F,
+0x06,0x03,0x55,0x1D,0x13,0x01,0x01,0xFF,0x04,0x05,0x30,0x03,0x01,0x01,0xFF,0x30,
+0x0E,0x06,0x03,0x55,0x1D,0x0F,0x01,0x01,0xFF,0x04,0x04,0x03,0x02,0x01,0x86,0x30,
+0x1D,0x06,0x03,0x55,0x1D,0x0E,0x04,0x16,0x04,0x14,0x4E,0x22,0x54,0x20,0x18,0x95,
+0xE6,0xE3,0x6E,0xE6,0x0F,0xFA,0xFA,0xB9,0x12,0xED,0x06,0x17,0x8F,0x39,0x30,0x0D,
+0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x0B,0x05,0x00,0x03,0x82,0x01,
+0x01,0x00,0x60,0x67,0x28,0x94,0x6F,0x0E,0x48,0x63,0xEB,0x31,0xDD,0xEA,0x67,0x18,
+0xD5,0x89,0x7D,0x3C,0xC5,0x8B,0x4A,0x7F,0xE9,0xBE,0xDB,0x2B,0x17,0xDF,0xB0,0x5F,
+0x73,0x77,0x2A,0x32,0x13,0x39,0x81,0x67,0x42,0x84,0x23,0xF2,0x45,0x67,0x35,0xEC,
+0x88,0xBF,0xF8,0x8F,0xB0,0x61,0x0C,0x34,0xA4,0xAE,0x20,0x4C,0x84,0xC6,0xDB,0xF8,
+0x35,0xE1,0x76,0xD9,0xDF,0xA6,0x42,0xBB,0xC7,0x44,0x08,0x86,0x7F,0x36,0x74,0x24,
+0x5A,0xDA,0x6C,0x0D,0x14,0x59,0x35,0xBD,0xF2,0x49,0xDD,0xB6,0x1F,0xC9,0xB3,0x0D,
+0x47,0x2A,0x3D,0x99,0x2F,0xBB,0x5C,0xBB,0xB5,0xD4,0x20,0xE1,0x99,0x5F,0x53,0x46,
+0x15,0xDB,0x68,0x9B,0xF0,0xF3,0x30,0xD5,0x3E,0x31,0xE2,0x8D,0x84,0x9E,0xE3,0x8A,
+0xDA,0xDA,0x96,0x3E,0x35,0x13,0xA5,0x5F,0xF0,0xF9,0x70,0x50,0x70,0x47,0x41,0x11,
+0x57,0x19,0x4E,0xC0,0x8F,0xAE,0x06,0xC4,0x95,0x13,0x17,0x2F,0x1B,0x25,0x9F,0x75,
+0xF2,0xB1,0x8E,0x99,0xA1,0x6F,0x13,0xB1,0x41,0x71,0xFE,0x88,0x2A,0xC8,0x4F,0x10,
+0x20,0x55,0xD7,0xF3,0x14,0x45,0xE5,0xE0,0x44,0xF4,0xEA,0x87,0x95,0x32,0x93,0x0E,
+0xFE,0x53,0x46,0xFA,0x2C,0x9D,0xFF,0x8B,0x22,0xB9,0x4B,0xD9,0x09,0x45,0xA4,0xDE,
+0xA4,0xB8,0x9A,0x58,0xDD,0x1B,0x7D,0x52,0x9F,0x8E,0x59,0x43,0x88,0x81,0xA4,0x9E,
+0x26,0xD5,0x6F,0xAD,0xDD,0x0D,0xC6,0x37,0x7D,0xED,0x03,0x92,0x1B,0xE5,0x77,0x5F,
+0x76,0xEE,0x3C,0x8D,0xC4,0x5D,0x56,0x5B,0xA2,0xD9,0x66,0x6E,0xB3,0x35,0x37,0xE5,
+0x32,0xB6,
};
@@ -156,6 +886,196 @@ const unsigned char AddTrust_Low_Value_Services_Root_certificate[1052]={
};
+/* subject:/C=US/O=AffirmTrust/CN=AffirmTrust Premium ECC */
+/* issuer :/C=US/O=AffirmTrust/CN=AffirmTrust Premium ECC */
+
+
+const unsigned char AffirmTrust_Premium_ECC_certificate[514]={
+0x30,0x82,0x01,0xFE,0x30,0x82,0x01,0x85,0xA0,0x03,0x02,0x01,0x02,0x02,0x08,0x74,
+0x97,0x25,0x8A,0xC7,0x3F,0x7A,0x54,0x30,0x0A,0x06,0x08,0x2A,0x86,0x48,0xCE,0x3D,
+0x04,0x03,0x03,0x30,0x45,0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02,
+0x55,0x53,0x31,0x14,0x30,0x12,0x06,0x03,0x55,0x04,0x0A,0x0C,0x0B,0x41,0x66,0x66,
+0x69,0x72,0x6D,0x54,0x72,0x75,0x73,0x74,0x31,0x20,0x30,0x1E,0x06,0x03,0x55,0x04,
+0x03,0x0C,0x17,0x41,0x66,0x66,0x69,0x72,0x6D,0x54,0x72,0x75,0x73,0x74,0x20,0x50,
+0x72,0x65,0x6D,0x69,0x75,0x6D,0x20,0x45,0x43,0x43,0x30,0x1E,0x17,0x0D,0x31,0x30,
+0x30,0x31,0x32,0x39,0x31,0x34,0x32,0x30,0x32,0x34,0x5A,0x17,0x0D,0x34,0x30,0x31,
+0x32,0x33,0x31,0x31,0x34,0x32,0x30,0x32,0x34,0x5A,0x30,0x45,0x31,0x0B,0x30,0x09,
+0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x55,0x53,0x31,0x14,0x30,0x12,0x06,0x03,0x55,
+0x04,0x0A,0x0C,0x0B,0x41,0x66,0x66,0x69,0x72,0x6D,0x54,0x72,0x75,0x73,0x74,0x31,
+0x20,0x30,0x1E,0x06,0x03,0x55,0x04,0x03,0x0C,0x17,0x41,0x66,0x66,0x69,0x72,0x6D,
+0x54,0x72,0x75,0x73,0x74,0x20,0x50,0x72,0x65,0x6D,0x69,0x75,0x6D,0x20,0x45,0x43,
+0x43,0x30,0x76,0x30,0x10,0x06,0x07,0x2A,0x86,0x48,0xCE,0x3D,0x02,0x01,0x06,0x05,
+0x2B,0x81,0x04,0x00,0x22,0x03,0x62,0x00,0x04,0x0D,0x30,0x5E,0x1B,0x15,0x9D,0x03,
+0xD0,0xA1,0x79,0x35,0xB7,0x3A,0x3C,0x92,0x7A,0xCA,0x15,0x1C,0xCD,0x62,0xF3,0x9C,
+0x26,0x5C,0x07,0x3D,0xE5,0x54,0xFA,0xA3,0xD6,0xCC,0x12,0xEA,0xF4,0x14,0x5F,0xE8,
+0x8E,0x19,0xAB,0x2F,0x2E,0x48,0xE6,0xAC,0x18,0x43,0x78,0xAC,0xD0,0x37,0xC3,0xBD,
+0xB2,0xCD,0x2C,0xE6,0x47,0xE2,0x1A,0xE6,0x63,0xB8,0x3D,0x2E,0x2F,0x78,0xC4,0x4F,
+0xDB,0xF4,0x0F,0xA4,0x68,0x4C,0x55,0x72,0x6B,0x95,0x1D,0x4E,0x18,0x42,0x95,0x78,
+0xCC,0x37,0x3C,0x91,0xE2,0x9B,0x65,0x2B,0x29,0xA3,0x42,0x30,0x40,0x30,0x1D,0x06,
+0x03,0x55,0x1D,0x0E,0x04,0x16,0x04,0x14,0x9A,0xAF,0x29,0x7A,0xC0,0x11,0x35,0x35,
+0x26,0x51,0x30,0x00,0xC3,0x6A,0xFE,0x40,0xD5,0xAE,0xD6,0x3C,0x30,0x0F,0x06,0x03,
+0x55,0x1D,0x13,0x01,0x01,0xFF,0x04,0x05,0x30,0x03,0x01,0x01,0xFF,0x30,0x0E,0x06,
+0x03,0x55,0x1D,0x0F,0x01,0x01,0xFF,0x04,0x04,0x03,0x02,0x01,0x06,0x30,0x0A,0x06,
+0x08,0x2A,0x86,0x48,0xCE,0x3D,0x04,0x03,0x03,0x03,0x67,0x00,0x30,0x64,0x02,0x30,
+0x17,0x09,0xF3,0x87,0x88,0x50,0x5A,0xAF,0xC8,0xC0,0x42,0xBF,0x47,0x5F,0xF5,0x6C,
+0x6A,0x86,0xE0,0xC4,0x27,0x74,0xE4,0x38,0x53,0xD7,0x05,0x7F,0x1B,0x34,0xE3,0xC6,
+0x2F,0xB3,0xCA,0x09,0x3C,0x37,0x9D,0xD7,0xE7,0xB8,0x46,0xF1,0xFD,0xA1,0xE2,0x71,
+0x02,0x30,0x42,0x59,0x87,0x43,0xD4,0x51,0xDF,0xBA,0xD3,0x09,0x32,0x5A,0xCE,0x88,
+0x7E,0x57,0x3D,0x9C,0x5F,0x42,0x6B,0xF5,0x07,0x2D,0xB5,0xF0,0x82,0x93,0xF9,0x59,
+0x6F,0xAE,0x64,0xFA,0x58,0xE5,0x8B,0x1E,0xE3,0x63,0xBE,0xB5,0x81,0xCD,0x6F,0x02,
+0x8C,0x79,
+};
+
+
+/* subject:/C=US/O=VeriSign, Inc./OU=VeriSign Trust Network/OU=(c) 1999 VeriSign, Inc. - For authorized use only/CN=VeriSign Class 4 Public Primary Certification Authority - G3 */
+/* issuer :/C=US/O=VeriSign, Inc./OU=VeriSign Trust Network/OU=(c) 1999 VeriSign, Inc. - For authorized use only/CN=VeriSign Class 4 Public Primary Certification Authority - G3 */
+
+
+const unsigned char Verisign_Class_4_Public_Primary_Certification_Authority___G3_certificate[1054]={
+0x30,0x82,0x04,0x1A,0x30,0x82,0x03,0x02,0x02,0x11,0x00,0xEC,0xA0,0xA7,0x8B,0x6E,
+0x75,0x6A,0x01,0xCF,0xC4,0x7C,0xCC,0x2F,0x94,0x5E,0xD7,0x30,0x0D,0x06,0x09,0x2A,
+0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x05,0x05,0x00,0x30,0x81,0xCA,0x31,0x0B,0x30,
+0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x55,0x53,0x31,0x17,0x30,0x15,0x06,0x03,
+0x55,0x04,0x0A,0x13,0x0E,0x56,0x65,0x72,0x69,0x53,0x69,0x67,0x6E,0x2C,0x20,0x49,
+0x6E,0x63,0x2E,0x31,0x1F,0x30,0x1D,0x06,0x03,0x55,0x04,0x0B,0x13,0x16,0x56,0x65,
+0x72,0x69,0x53,0x69,0x67,0x6E,0x20,0x54,0x72,0x75,0x73,0x74,0x20,0x4E,0x65,0x74,
+0x77,0x6F,0x72,0x6B,0x31,0x3A,0x30,0x38,0x06,0x03,0x55,0x04,0x0B,0x13,0x31,0x28,
+0x63,0x29,0x20,0x31,0x39,0x39,0x39,0x20,0x56,0x65,0x72,0x69,0x53,0x69,0x67,0x6E,
+0x2C,0x20,0x49,0x6E,0x63,0x2E,0x20,0x2D,0x20,0x46,0x6F,0x72,0x20,0x61,0x75,0x74,
+0x68,0x6F,0x72,0x69,0x7A,0x65,0x64,0x20,0x75,0x73,0x65,0x20,0x6F,0x6E,0x6C,0x79,
+0x31,0x45,0x30,0x43,0x06,0x03,0x55,0x04,0x03,0x13,0x3C,0x56,0x65,0x72,0x69,0x53,
+0x69,0x67,0x6E,0x20,0x43,0x6C,0x61,0x73,0x73,0x20,0x34,0x20,0x50,0x75,0x62,0x6C,
+0x69,0x63,0x20,0x50,0x72,0x69,0x6D,0x61,0x72,0x79,0x20,0x43,0x65,0x72,0x74,0x69,
+0x66,0x69,0x63,0x61,0x74,0x69,0x6F,0x6E,0x20,0x41,0x75,0x74,0x68,0x6F,0x72,0x69,
+0x74,0x79,0x20,0x2D,0x20,0x47,0x33,0x30,0x1E,0x17,0x0D,0x39,0x39,0x31,0x30,0x30,
+0x31,0x30,0x30,0x30,0x30,0x30,0x30,0x5A,0x17,0x0D,0x33,0x36,0x30,0x37,0x31,0x36,
+0x32,0x33,0x35,0x39,0x35,0x39,0x5A,0x30,0x81,0xCA,0x31,0x0B,0x30,0x09,0x06,0x03,
+0x55,0x04,0x06,0x13,0x02,0x55,0x53,0x31,0x17,0x30,0x15,0x06,0x03,0x55,0x04,0x0A,
+0x13,0x0E,0x56,0x65,0x72,0x69,0x53,0x69,0x67,0x6E,0x2C,0x20,0x49,0x6E,0x63,0x2E,
+0x31,0x1F,0x30,0x1D,0x06,0x03,0x55,0x04,0x0B,0x13,0x16,0x56,0x65,0x72,0x69,0x53,
+0x69,0x67,0x6E,0x20,0x54,0x72,0x75,0x73,0x74,0x20,0x4E,0x65,0x74,0x77,0x6F,0x72,
+0x6B,0x31,0x3A,0x30,0x38,0x06,0x03,0x55,0x04,0x0B,0x13,0x31,0x28,0x63,0x29,0x20,
+0x31,0x39,0x39,0x39,0x20,0x56,0x65,0x72,0x69,0x53,0x69,0x67,0x6E,0x2C,0x20,0x49,
+0x6E,0x63,0x2E,0x20,0x2D,0x20,0x46,0x6F,0x72,0x20,0x61,0x75,0x74,0x68,0x6F,0x72,
+0x69,0x7A,0x65,0x64,0x20,0x75,0x73,0x65,0x20,0x6F,0x6E,0x6C,0x79,0x31,0x45,0x30,
+0x43,0x06,0x03,0x55,0x04,0x03,0x13,0x3C,0x56,0x65,0x72,0x69,0x53,0x69,0x67,0x6E,
+0x20,0x43,0x6C,0x61,0x73,0x73,0x20,0x34,0x20,0x50,0x75,0x62,0x6C,0x69,0x63,0x20,
+0x50,0x72,0x69,0x6D,0x61,0x72,0x79,0x20,0x43,0x65,0x72,0x74,0x69,0x66,0x69,0x63,
+0x61,0x74,0x69,0x6F,0x6E,0x20,0x41,0x75,0x74,0x68,0x6F,0x72,0x69,0x74,0x79,0x20,
+0x2D,0x20,0x47,0x33,0x30,0x82,0x01,0x22,0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,
+0xF7,0x0D,0x01,0x01,0x01,0x05,0x00,0x03,0x82,0x01,0x0F,0x00,0x30,0x82,0x01,0x0A,
+0x02,0x82,0x01,0x01,0x00,0xAD,0xCB,0xA5,0x11,0x69,0xC6,0x59,0xAB,0xF1,0x8F,0xB5,
+0x19,0x0F,0x56,0xCE,0xCC,0xB5,0x1F,0x20,0xE4,0x9E,0x26,0x25,0x4B,0xE0,0x73,0x65,
+0x89,0x59,0xDE,0xD0,0x83,0xE4,0xF5,0x0F,0xB5,0xBB,0xAD,0xF1,0x7C,0xE8,0x21,0xFC,
+0xE4,0xE8,0x0C,0xEE,0x7C,0x45,0x22,0x19,0x76,0x92,0xB4,0x13,0xB7,0x20,0x5B,0x09,
+0xFA,0x61,0xAE,0xA8,0xF2,0xA5,0x8D,0x85,0xC2,0x2A,0xD6,0xDE,0x66,0x36,0xD2,0x9B,
+0x02,0xF4,0xA8,0x92,0x60,0x7C,0x9C,0x69,0xB4,0x8F,0x24,0x1E,0xD0,0x86,0x52,0xF6,
+0x32,0x9C,0x41,0x58,0x1E,0x22,0xBD,0xCD,0x45,0x62,0x95,0x08,0x6E,0xD0,0x66,0xDD,
+0x53,0xA2,0xCC,0xF0,0x10,0xDC,0x54,0x73,0x8B,0x04,0xA1,0x46,0x33,0x33,0x5C,0x17,
+0x40,0xB9,0x9E,0x4D,0xD3,0xF3,0xBE,0x55,0x83,0xE8,0xB1,0x89,0x8E,0x5A,0x7C,0x9A,
+0x96,0x22,0x90,0x3B,0x88,0x25,0xF2,0xD2,0x53,0x88,0x02,0x0C,0x0B,0x78,0xF2,0xE6,
+0x37,0x17,0x4B,0x30,0x46,0x07,0xE4,0x80,0x6D,0xA6,0xD8,0x96,0x2E,0xE8,0x2C,0xF8,
+0x11,0xB3,0x38,0x0D,0x66,0xA6,0x9B,0xEA,0xC9,0x23,0x5B,0xDB,0x8E,0xE2,0xF3,0x13,
+0x8E,0x1A,0x59,0x2D,0xAA,0x02,0xF0,0xEC,0xA4,0x87,0x66,0xDC,0xC1,0x3F,0xF5,0xD8,
+0xB9,0xF4,0xEC,0x82,0xC6,0xD2,0x3D,0x95,0x1D,0xE5,0xC0,0x4F,0x84,0xC9,0xD9,0xA3,
+0x44,0x28,0x06,0x6A,0xD7,0x45,0xAC,0xF0,0x6B,0x6A,0xEF,0x4E,0x5F,0xF8,0x11,0x82,
+0x1E,0x38,0x63,0x34,0x66,0x50,0xD4,0x3E,0x93,0x73,0xFA,0x30,0xC3,0x66,0xAD,0xFF,
+0x93,0x2D,0x97,0xEF,0x03,0x02,0x03,0x01,0x00,0x01,0x30,0x0D,0x06,0x09,0x2A,0x86,
+0x48,0x86,0xF7,0x0D,0x01,0x01,0x05,0x05,0x00,0x03,0x82,0x01,0x01,0x00,0x8F,0xFA,
+0x25,0x6B,0x4F,0x5B,0xE4,0xA4,0x4E,0x27,0x55,0xAB,0x22,0x15,0x59,0x3C,0xCA,0xB5,
+0x0A,0xD4,0x4A,0xDB,0xAB,0xDD,0xA1,0x5F,0x53,0xC5,0xA0,0x57,0x39,0xC2,0xCE,0x47,
+0x2B,0xBE,0x3A,0xC8,0x56,0xBF,0xC2,0xD9,0x27,0x10,0x3A,0xB1,0x05,0x3C,0xC0,0x77,
+0x31,0xBB,0x3A,0xD3,0x05,0x7B,0x6D,0x9A,0x1C,0x30,0x8C,0x80,0xCB,0x93,0x93,0x2A,
+0x83,0xAB,0x05,0x51,0x82,0x02,0x00,0x11,0x67,0x6B,0xF3,0x88,0x61,0x47,0x5F,0x03,
+0x93,0xD5,0x5B,0x0D,0xE0,0xF1,0xD4,0xA1,0x32,0x35,0x85,0xB2,0x3A,0xDB,0xB0,0x82,
+0xAB,0xD1,0xCB,0x0A,0xBC,0x4F,0x8C,0x5B,0xC5,0x4B,0x00,0x3B,0x1F,0x2A,0x82,0xA6,
+0x7E,0x36,0x85,0xDC,0x7E,0x3C,0x67,0x00,0xB5,0xE4,0x3B,0x52,0xE0,0xA8,0xEB,0x5D,
+0x15,0xF9,0xC6,0x6D,0xF0,0xAD,0x1D,0x0E,0x85,0xB7,0xA9,0x9A,0x73,0x14,0x5A,0x5B,
+0x8F,0x41,0x28,0xC0,0xD5,0xE8,0x2D,0x4D,0xA4,0x5E,0xCD,0xAA,0xD9,0xED,0xCE,0xDC,
+0xD8,0xD5,0x3C,0x42,0x1D,0x17,0xC1,0x12,0x5D,0x45,0x38,0xC3,0x38,0xF3,0xFC,0x85,
+0x2E,0x83,0x46,0x48,0xB2,0xD7,0x20,0x5F,0x92,0x36,0x8F,0xE7,0x79,0x0F,0x98,0x5E,
+0x99,0xE8,0xF0,0xD0,0xA4,0xBB,0xF5,0x53,0xBD,0x2A,0xCE,0x59,0xB0,0xAF,0x6E,0x7F,
+0x6C,0xBB,0xD2,0x1E,0x00,0xB0,0x21,0xED,0xF8,0x41,0x62,0x82,0xB9,0xD8,0xB2,0xC4,
+0xBB,0x46,0x50,0xF3,0x31,0xC5,0x8F,0x01,0xA8,0x74,0xEB,0xF5,0x78,0x27,0xDA,0xE7,
+0xF7,0x66,0x43,0xF3,0x9E,0x83,0x3E,0x20,0xAA,0xC3,0x35,0x60,0x91,0xCE,
+};
+
+
+/* subject:/C=US/O=thawte, Inc./OU=Certification Services Division/OU=(c) 2006 thawte, Inc. - For authorized use only/CN=thawte Primary Root CA */
+/* issuer :/C=US/O=thawte, Inc./OU=Certification Services Division/OU=(c) 2006 thawte, Inc. - For authorized use only/CN=thawte Primary Root CA */
+
+
+const unsigned char thawte_Primary_Root_CA_certificate[1060]={
+0x30,0x82,0x04,0x20,0x30,0x82,0x03,0x08,0xA0,0x03,0x02,0x01,0x02,0x02,0x10,0x34,
+0x4E,0xD5,0x57,0x20,0xD5,0xED,0xEC,0x49,0xF4,0x2F,0xCE,0x37,0xDB,0x2B,0x6D,0x30,
+0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x05,0x05,0x00,0x30,0x81,
+0xA9,0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x55,0x53,0x31,0x15,
+0x30,0x13,0x06,0x03,0x55,0x04,0x0A,0x13,0x0C,0x74,0x68,0x61,0x77,0x74,0x65,0x2C,
+0x20,0x49,0x6E,0x63,0x2E,0x31,0x28,0x30,0x26,0x06,0x03,0x55,0x04,0x0B,0x13,0x1F,
+0x43,0x65,0x72,0x74,0x69,0x66,0x69,0x63,0x61,0x74,0x69,0x6F,0x6E,0x20,0x53,0x65,
+0x72,0x76,0x69,0x63,0x65,0x73,0x20,0x44,0x69,0x76,0x69,0x73,0x69,0x6F,0x6E,0x31,
+0x38,0x30,0x36,0x06,0x03,0x55,0x04,0x0B,0x13,0x2F,0x28,0x63,0x29,0x20,0x32,0x30,
+0x30,0x36,0x20,0x74,0x68,0x61,0x77,0x74,0x65,0x2C,0x20,0x49,0x6E,0x63,0x2E,0x20,
+0x2D,0x20,0x46,0x6F,0x72,0x20,0x61,0x75,0x74,0x68,0x6F,0x72,0x69,0x7A,0x65,0x64,
+0x20,0x75,0x73,0x65,0x20,0x6F,0x6E,0x6C,0x79,0x31,0x1F,0x30,0x1D,0x06,0x03,0x55,
+0x04,0x03,0x13,0x16,0x74,0x68,0x61,0x77,0x74,0x65,0x20,0x50,0x72,0x69,0x6D,0x61,
+0x72,0x79,0x20,0x52,0x6F,0x6F,0x74,0x20,0x43,0x41,0x30,0x1E,0x17,0x0D,0x30,0x36,
+0x31,0x31,0x31,0x37,0x30,0x30,0x30,0x30,0x30,0x30,0x5A,0x17,0x0D,0x33,0x36,0x30,
+0x37,0x31,0x36,0x32,0x33,0x35,0x39,0x35,0x39,0x5A,0x30,0x81,0xA9,0x31,0x0B,0x30,
+0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x55,0x53,0x31,0x15,0x30,0x13,0x06,0x03,
+0x55,0x04,0x0A,0x13,0x0C,0x74,0x68,0x61,0x77,0x74,0x65,0x2C,0x20,0x49,0x6E,0x63,
+0x2E,0x31,0x28,0x30,0x26,0x06,0x03,0x55,0x04,0x0B,0x13,0x1F,0x43,0x65,0x72,0x74,
+0x69,0x66,0x69,0x63,0x61,0x74,0x69,0x6F,0x6E,0x20,0x53,0x65,0x72,0x76,0x69,0x63,
+0x65,0x73,0x20,0x44,0x69,0x76,0x69,0x73,0x69,0x6F,0x6E,0x31,0x38,0x30,0x36,0x06,
+0x03,0x55,0x04,0x0B,0x13,0x2F,0x28,0x63,0x29,0x20,0x32,0x30,0x30,0x36,0x20,0x74,
+0x68,0x61,0x77,0x74,0x65,0x2C,0x20,0x49,0x6E,0x63,0x2E,0x20,0x2D,0x20,0x46,0x6F,
+0x72,0x20,0x61,0x75,0x74,0x68,0x6F,0x72,0x69,0x7A,0x65,0x64,0x20,0x75,0x73,0x65,
+0x20,0x6F,0x6E,0x6C,0x79,0x31,0x1F,0x30,0x1D,0x06,0x03,0x55,0x04,0x03,0x13,0x16,
+0x74,0x68,0x61,0x77,0x74,0x65,0x20,0x50,0x72,0x69,0x6D,0x61,0x72,0x79,0x20,0x52,
+0x6F,0x6F,0x74,0x20,0x43,0x41,0x30,0x82,0x01,0x22,0x30,0x0D,0x06,0x09,0x2A,0x86,
+0x48,0x86,0xF7,0x0D,0x01,0x01,0x01,0x05,0x00,0x03,0x82,0x01,0x0F,0x00,0x30,0x82,
+0x01,0x0A,0x02,0x82,0x01,0x01,0x00,0xAC,0xA0,0xF0,0xFB,0x80,0x59,0xD4,0x9C,0xC7,
+0xA4,0xCF,0x9D,0xA1,0x59,0x73,0x09,0x10,0x45,0x0C,0x0D,0x2C,0x6E,0x68,0xF1,0x6C,
+0x5B,0x48,0x68,0x49,0x59,0x37,0xFC,0x0B,0x33,0x19,0xC2,0x77,0x7F,0xCC,0x10,0x2D,
+0x95,0x34,0x1C,0xE6,0xEB,0x4D,0x09,0xA7,0x1C,0xD2,0xB8,0xC9,0x97,0x36,0x02,0xB7,
+0x89,0xD4,0x24,0x5F,0x06,0xC0,0xCC,0x44,0x94,0x94,0x8D,0x02,0x62,0x6F,0xEB,0x5A,
+0xDD,0x11,0x8D,0x28,0x9A,0x5C,0x84,0x90,0x10,0x7A,0x0D,0xBD,0x74,0x66,0x2F,0x6A,
+0x38,0xA0,0xE2,0xD5,0x54,0x44,0xEB,0x1D,0x07,0x9F,0x07,0xBA,0x6F,0xEE,0xE9,0xFD,
+0x4E,0x0B,0x29,0xF5,0x3E,0x84,0xA0,0x01,0xF1,0x9C,0xAB,0xF8,0x1C,0x7E,0x89,0xA4,
+0xE8,0xA1,0xD8,0x71,0x65,0x0D,0xA3,0x51,0x7B,0xEE,0xBC,0xD2,0x22,0x60,0x0D,0xB9,
+0x5B,0x9D,0xDF,0xBA,0xFC,0x51,0x5B,0x0B,0xAF,0x98,0xB2,0xE9,0x2E,0xE9,0x04,0xE8,
+0x62,0x87,0xDE,0x2B,0xC8,0xD7,0x4E,0xC1,0x4C,0x64,0x1E,0xDD,0xCF,0x87,0x58,0xBA,
+0x4A,0x4F,0xCA,0x68,0x07,0x1D,0x1C,0x9D,0x4A,0xC6,0xD5,0x2F,0x91,0xCC,0x7C,0x71,
+0x72,0x1C,0xC5,0xC0,0x67,0xEB,0x32,0xFD,0xC9,0x92,0x5C,0x94,0xDA,0x85,0xC0,0x9B,
+0xBF,0x53,0x7D,0x2B,0x09,0xF4,0x8C,0x9D,0x91,0x1F,0x97,0x6A,0x52,0xCB,0xDE,0x09,
+0x36,0xA4,0x77,0xD8,0x7B,0x87,0x50,0x44,0xD5,0x3E,0x6E,0x29,0x69,0xFB,0x39,0x49,
+0x26,0x1E,0x09,0xA5,0x80,0x7B,0x40,0x2D,0xEB,0xE8,0x27,0x85,0xC9,0xFE,0x61,0xFD,
+0x7E,0xE6,0x7C,0x97,0x1D,0xD5,0x9D,0x02,0x03,0x01,0x00,0x01,0xA3,0x42,0x30,0x40,
+0x30,0x0F,0x06,0x03,0x55,0x1D,0x13,0x01,0x01,0xFF,0x04,0x05,0x30,0x03,0x01,0x01,
+0xFF,0x30,0x0E,0x06,0x03,0x55,0x1D,0x0F,0x01,0x01,0xFF,0x04,0x04,0x03,0x02,0x01,
+0x06,0x30,0x1D,0x06,0x03,0x55,0x1D,0x0E,0x04,0x16,0x04,0x14,0x7B,0x5B,0x45,0xCF,
+0xAF,0xCE,0xCB,0x7A,0xFD,0x31,0x92,0x1A,0x6A,0xB6,0xF3,0x46,0xEB,0x57,0x48,0x50,
+0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x05,0x05,0x00,0x03,
+0x82,0x01,0x01,0x00,0x79,0x11,0xC0,0x4B,0xB3,0x91,0xB6,0xFC,0xF0,0xE9,0x67,0xD4,
+0x0D,0x6E,0x45,0xBE,0x55,0xE8,0x93,0xD2,0xCE,0x03,0x3F,0xED,0xDA,0x25,0xB0,0x1D,
+0x57,0xCB,0x1E,0x3A,0x76,0xA0,0x4C,0xEC,0x50,0x76,0xE8,0x64,0x72,0x0C,0xA4,0xA9,
+0xF1,0xB8,0x8B,0xD6,0xD6,0x87,0x84,0xBB,0x32,0xE5,0x41,0x11,0xC0,0x77,0xD9,0xB3,
+0x60,0x9D,0xEB,0x1B,0xD5,0xD1,0x6E,0x44,0x44,0xA9,0xA6,0x01,0xEC,0x55,0x62,0x1D,
+0x77,0xB8,0x5C,0x8E,0x48,0x49,0x7C,0x9C,0x3B,0x57,0x11,0xAC,0xAD,0x73,0x37,0x8E,
+0x2F,0x78,0x5C,0x90,0x68,0x47,0xD9,0x60,0x60,0xE6,0xFC,0x07,0x3D,0x22,0x20,0x17,
+0xC4,0xF7,0x16,0xE9,0xC4,0xD8,0x72,0xF9,0xC8,0x73,0x7C,0xDF,0x16,0x2F,0x15,0xA9,
+0x3E,0xFD,0x6A,0x27,0xB6,0xA1,0xEB,0x5A,0xBA,0x98,0x1F,0xD5,0xE3,0x4D,0x64,0x0A,
+0x9D,0x13,0xC8,0x61,0xBA,0xF5,0x39,0x1C,0x87,0xBA,0xB8,0xBD,0x7B,0x22,0x7F,0xF6,
+0xFE,0xAC,0x40,0x79,0xE5,0xAC,0x10,0x6F,0x3D,0x8F,0x1B,0x79,0x76,0x8B,0xC4,0x37,
+0xB3,0x21,0x18,0x84,0xE5,0x36,0x00,0xEB,0x63,0x20,0x99,0xB9,0xE9,0xFE,0x33,0x04,
+0xBB,0x41,0xC8,0xC1,0x02,0xF9,0x44,0x63,0x20,0x9E,0x81,0xCE,0x42,0xD3,0xD6,0x3F,
+0x2C,0x76,0xD3,0x63,0x9C,0x59,0xDD,0x8F,0xA6,0xE1,0x0E,0xA0,0x2E,0x41,0xF7,0x2E,
+0x95,0x47,0xCF,0xBC,0xFD,0x33,0xF3,0xF6,0x0B,0x61,0x7E,0x7E,0x91,0x2B,0x81,0x47,
+0xC2,0x27,0x30,0xEE,0xA7,0x10,0x5D,0x37,0x8F,0x5C,0x39,0x2B,0xE4,0x04,0xF0,0x7B,
+0x8D,0x56,0x8C,0x68,
+};
+
+
/* subject:/C=SE/O=AddTrust AB/OU=AddTrust TTP Network/CN=AddTrust Public CA Root */
/* issuer :/C=SE/O=AddTrust AB/OU=AddTrust TTP Network/CN=AddTrust Public CA Root */
@@ -305,64 +1225,366 @@ const unsigned char AddTrust_Qualified_Certificates_Root_certificate[1058]={
};
-/* subject:/C=US/O=AffirmTrust/CN=AffirmTrust Commercial */
-/* issuer :/C=US/O=AffirmTrust/CN=AffirmTrust Commercial */
+/* subject:/C=US/O=GeoTrust Inc./OU=(c) 2008 GeoTrust Inc. - For authorized use only/CN=GeoTrust Primary Certification Authority - G3 */
+/* issuer :/C=US/O=GeoTrust Inc./OU=(c) 2008 GeoTrust Inc. - For authorized use only/CN=GeoTrust Primary Certification Authority - G3 */
-const unsigned char AffirmTrust_Commercial_certificate[848]={
-0x30,0x82,0x03,0x4C,0x30,0x82,0x02,0x34,0xA0,0x03,0x02,0x01,0x02,0x02,0x08,0x77,
-0x77,0x06,0x27,0x26,0xA9,0xB1,0x7C,0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,
-0x0D,0x01,0x01,0x0B,0x05,0x00,0x30,0x44,0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04,
-0x06,0x13,0x02,0x55,0x53,0x31,0x14,0x30,0x12,0x06,0x03,0x55,0x04,0x0A,0x0C,0x0B,
-0x41,0x66,0x66,0x69,0x72,0x6D,0x54,0x72,0x75,0x73,0x74,0x31,0x1F,0x30,0x1D,0x06,
-0x03,0x55,0x04,0x03,0x0C,0x16,0x41,0x66,0x66,0x69,0x72,0x6D,0x54,0x72,0x75,0x73,
-0x74,0x20,0x43,0x6F,0x6D,0x6D,0x65,0x72,0x63,0x69,0x61,0x6C,0x30,0x1E,0x17,0x0D,
-0x31,0x30,0x30,0x31,0x32,0x39,0x31,0x34,0x30,0x36,0x30,0x36,0x5A,0x17,0x0D,0x33,
-0x30,0x31,0x32,0x33,0x31,0x31,0x34,0x30,0x36,0x30,0x36,0x5A,0x30,0x44,0x31,0x0B,
-0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x55,0x53,0x31,0x14,0x30,0x12,0x06,
-0x03,0x55,0x04,0x0A,0x0C,0x0B,0x41,0x66,0x66,0x69,0x72,0x6D,0x54,0x72,0x75,0x73,
-0x74,0x31,0x1F,0x30,0x1D,0x06,0x03,0x55,0x04,0x03,0x0C,0x16,0x41,0x66,0x66,0x69,
-0x72,0x6D,0x54,0x72,0x75,0x73,0x74,0x20,0x43,0x6F,0x6D,0x6D,0x65,0x72,0x63,0x69,
-0x61,0x6C,0x30,0x82,0x01,0x22,0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,
-0x01,0x01,0x01,0x05,0x00,0x03,0x82,0x01,0x0F,0x00,0x30,0x82,0x01,0x0A,0x02,0x82,
-0x01,0x01,0x00,0xF6,0x1B,0x4F,0x67,0x07,0x2B,0xA1,0x15,0xF5,0x06,0x22,0xCB,0x1F,
-0x01,0xB2,0xE3,0x73,0x45,0x06,0x44,0x49,0x2C,0xBB,0x49,0x25,0x14,0xD6,0xCE,0xC3,
-0xB7,0xAB,0x2C,0x4F,0xC6,0x41,0x32,0x94,0x57,0xFA,0x12,0xA7,0x5B,0x0E,0xE2,0x8F,
-0x1F,0x1E,0x86,0x19,0xA7,0xAA,0xB5,0x2D,0xB9,0x5F,0x0D,0x8A,0xC2,0xAF,0x85,0x35,
-0x79,0x32,0x2D,0xBB,0x1C,0x62,0x37,0xF2,0xB1,0x5B,0x4A,0x3D,0xCA,0xCD,0x71,0x5F,
-0xE9,0x42,0xBE,0x94,0xE8,0xC8,0xDE,0xF9,0x22,0x48,0x64,0xC6,0xE5,0xAB,0xC6,0x2B,
-0x6D,0xAD,0x05,0xF0,0xFA,0xD5,0x0B,0xCF,0x9A,0xE5,0xF0,0x50,0xA4,0x8B,0x3B,0x47,
-0xA5,0x23,0x5B,0x7A,0x7A,0xF8,0x33,0x3F,0xB8,0xEF,0x99,0x97,0xE3,0x20,0xC1,0xD6,
-0x28,0x89,0xCF,0x94,0xFB,0xB9,0x45,0xED,0xE3,0x40,0x17,0x11,0xD4,0x74,0xF0,0x0B,
-0x31,0xE2,0x2B,0x26,0x6A,0x9B,0x4C,0x57,0xAE,0xAC,0x20,0x3E,0xBA,0x45,0x7A,0x05,
-0xF3,0xBD,0x9B,0x69,0x15,0xAE,0x7D,0x4E,0x20,0x63,0xC4,0x35,0x76,0x3A,0x07,0x02,
-0xC9,0x37,0xFD,0xC7,0x47,0xEE,0xE8,0xF1,0x76,0x1D,0x73,0x15,0xF2,0x97,0xA4,0xB5,
-0xC8,0x7A,0x79,0xD9,0x42,0xAA,0x2B,0x7F,0x5C,0xFE,0xCE,0x26,0x4F,0xA3,0x66,0x81,
-0x35,0xAF,0x44,0xBA,0x54,0x1E,0x1C,0x30,0x32,0x65,0x9D,0xE6,0x3C,0x93,0x5E,0x50,
-0x4E,0x7A,0xE3,0x3A,0xD4,0x6E,0xCC,0x1A,0xFB,0xF9,0xD2,0x37,0xAE,0x24,0x2A,0xAB,
-0x57,0x03,0x22,0x28,0x0D,0x49,0x75,0x7F,0xB7,0x28,0xDA,0x75,0xBF,0x8E,0xE3,0xDC,
-0x0E,0x79,0x31,0x02,0x03,0x01,0x00,0x01,0xA3,0x42,0x30,0x40,0x30,0x1D,0x06,0x03,
-0x55,0x1D,0x0E,0x04,0x16,0x04,0x14,0x9D,0x93,0xC6,0x53,0x8B,0x5E,0xCA,0xAF,0x3F,
-0x9F,0x1E,0x0F,0xE5,0x99,0x95,0xBC,0x24,0xF6,0x94,0x8F,0x30,0x0F,0x06,0x03,0x55,
-0x1D,0x13,0x01,0x01,0xFF,0x04,0x05,0x30,0x03,0x01,0x01,0xFF,0x30,0x0E,0x06,0x03,
-0x55,0x1D,0x0F,0x01,0x01,0xFF,0x04,0x04,0x03,0x02,0x01,0x06,0x30,0x0D,0x06,0x09,
-0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x0B,0x05,0x00,0x03,0x82,0x01,0x01,0x00,
-0x58,0xAC,0xF4,0x04,0x0E,0xCD,0xC0,0x0D,0xFF,0x0A,0xFD,0xD4,0xBA,0x16,0x5F,0x29,
-0xBD,0x7B,0x68,0x99,0x58,0x49,0xD2,0xB4,0x1D,0x37,0x4D,0x7F,0x27,0x7D,0x46,0x06,
-0x5D,0x43,0xC6,0x86,0x2E,0x3E,0x73,0xB2,0x26,0x7D,0x4F,0x93,0xA9,0xB6,0xC4,0x2A,
-0x9A,0xAB,0x21,0x97,0x14,0xB1,0xDE,0x8C,0xD3,0xAB,0x89,0x15,0xD8,0x6B,0x24,0xD4,
-0xF1,0x16,0xAE,0xD8,0xA4,0x5C,0xD4,0x7F,0x51,0x8E,0xED,0x18,0x01,0xB1,0x93,0x63,
-0xBD,0xBC,0xF8,0x61,0x80,0x9A,0x9E,0xB1,0xCE,0x42,0x70,0xE2,0xA9,0x7D,0x06,0x25,
-0x7D,0x27,0xA1,0xFE,0x6F,0xEC,0xB3,0x1E,0x24,0xDA,0xE3,0x4B,0x55,0x1A,0x00,0x3B,
-0x35,0xB4,0x3B,0xD9,0xD7,0x5D,0x30,0xFD,0x81,0x13,0x89,0xF2,0xC2,0x06,0x2B,0xED,
-0x67,0xC4,0x8E,0xC9,0x43,0xB2,0x5C,0x6B,0x15,0x89,0x02,0xBC,0x62,0xFC,0x4E,0xF2,
-0xB5,0x33,0xAA,0xB2,0x6F,0xD3,0x0A,0xA2,0x50,0xE3,0xF6,0x3B,0xE8,0x2E,0x44,0xC2,
-0xDB,0x66,0x38,0xA9,0x33,0x56,0x48,0xF1,0x6D,0x1B,0x33,0x8D,0x0D,0x8C,0x3F,0x60,
-0x37,0x9D,0xD3,0xCA,0x6D,0x7E,0x34,0x7E,0x0D,0x9F,0x72,0x76,0x8B,0x1B,0x9F,0x72,
-0xFD,0x52,0x35,0x41,0x45,0x02,0x96,0x2F,0x1C,0xB2,0x9A,0x73,0x49,0x21,0xB1,0x49,
-0x47,0x45,0x47,0xB4,0xEF,0x6A,0x34,0x11,0xC9,0x4D,0x9A,0xCC,0x59,0xB7,0xD6,0x02,
-0x9E,0x5A,0x4E,0x65,0xB5,0x94,0xAE,0x1B,0xDF,0x29,0xB0,0x16,0xF1,0xBF,0x00,0x9E,
-0x07,0x3A,0x17,0x64,0xB5,0x04,0xB5,0x23,0x21,0x99,0x0A,0x95,0x3B,0x97,0x7C,0xEF,
+const unsigned char GeoTrust_Primary_Certification_Authority___G3_certificate[1026]={
+0x30,0x82,0x03,0xFE,0x30,0x82,0x02,0xE6,0xA0,0x03,0x02,0x01,0x02,0x02,0x10,0x15,
+0xAC,0x6E,0x94,0x19,0xB2,0x79,0x4B,0x41,0xF6,0x27,0xA9,0xC3,0x18,0x0F,0x1F,0x30,
+0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x0B,0x05,0x00,0x30,0x81,
+0x98,0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x55,0x53,0x31,0x16,
+0x30,0x14,0x06,0x03,0x55,0x04,0x0A,0x13,0x0D,0x47,0x65,0x6F,0x54,0x72,0x75,0x73,
+0x74,0x20,0x49,0x6E,0x63,0x2E,0x31,0x39,0x30,0x37,0x06,0x03,0x55,0x04,0x0B,0x13,
+0x30,0x28,0x63,0x29,0x20,0x32,0x30,0x30,0x38,0x20,0x47,0x65,0x6F,0x54,0x72,0x75,
+0x73,0x74,0x20,0x49,0x6E,0x63,0x2E,0x20,0x2D,0x20,0x46,0x6F,0x72,0x20,0x61,0x75,
+0x74,0x68,0x6F,0x72,0x69,0x7A,0x65,0x64,0x20,0x75,0x73,0x65,0x20,0x6F,0x6E,0x6C,
+0x79,0x31,0x36,0x30,0x34,0x06,0x03,0x55,0x04,0x03,0x13,0x2D,0x47,0x65,0x6F,0x54,
+0x72,0x75,0x73,0x74,0x20,0x50,0x72,0x69,0x6D,0x61,0x72,0x79,0x20,0x43,0x65,0x72,
+0x74,0x69,0x66,0x69,0x63,0x61,0x74,0x69,0x6F,0x6E,0x20,0x41,0x75,0x74,0x68,0x6F,
+0x72,0x69,0x74,0x79,0x20,0x2D,0x20,0x47,0x33,0x30,0x1E,0x17,0x0D,0x30,0x38,0x30,
+0x34,0x30,0x32,0x30,0x30,0x30,0x30,0x30,0x30,0x5A,0x17,0x0D,0x33,0x37,0x31,0x32,
+0x30,0x31,0x32,0x33,0x35,0x39,0x35,0x39,0x5A,0x30,0x81,0x98,0x31,0x0B,0x30,0x09,
+0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x55,0x53,0x31,0x16,0x30,0x14,0x06,0x03,0x55,
+0x04,0x0A,0x13,0x0D,0x47,0x65,0x6F,0x54,0x72,0x75,0x73,0x74,0x20,0x49,0x6E,0x63,
+0x2E,0x31,0x39,0x30,0x37,0x06,0x03,0x55,0x04,0x0B,0x13,0x30,0x28,0x63,0x29,0x20,
+0x32,0x30,0x30,0x38,0x20,0x47,0x65,0x6F,0x54,0x72,0x75,0x73,0x74,0x20,0x49,0x6E,
+0x63,0x2E,0x20,0x2D,0x20,0x46,0x6F,0x72,0x20,0x61,0x75,0x74,0x68,0x6F,0x72,0x69,
+0x7A,0x65,0x64,0x20,0x75,0x73,0x65,0x20,0x6F,0x6E,0x6C,0x79,0x31,0x36,0x30,0x34,
+0x06,0x03,0x55,0x04,0x03,0x13,0x2D,0x47,0x65,0x6F,0x54,0x72,0x75,0x73,0x74,0x20,
+0x50,0x72,0x69,0x6D,0x61,0x72,0x79,0x20,0x43,0x65,0x72,0x74,0x69,0x66,0x69,0x63,
+0x61,0x74,0x69,0x6F,0x6E,0x20,0x41,0x75,0x74,0x68,0x6F,0x72,0x69,0x74,0x79,0x20,
+0x2D,0x20,0x47,0x33,0x30,0x82,0x01,0x22,0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,
+0xF7,0x0D,0x01,0x01,0x01,0x05,0x00,0x03,0x82,0x01,0x0F,0x00,0x30,0x82,0x01,0x0A,
+0x02,0x82,0x01,0x01,0x00,0xDC,0xE2,0x5E,0x62,0x58,0x1D,0x33,0x57,0x39,0x32,0x33,
+0xFA,0xEB,0xCB,0x87,0x8C,0xA7,0xD4,0x4A,0xDD,0x06,0x88,0xEA,0x64,0x8E,0x31,0x98,
+0xA5,0x38,0x90,0x1E,0x98,0xCF,0x2E,0x63,0x2B,0xF0,0x46,0xBC,0x44,0xB2,0x89,0xA1,
+0xC0,0x28,0x0C,0x49,0x70,0x21,0x95,0x9F,0x64,0xC0,0xA6,0x93,0x12,0x02,0x65,0x26,
+0x86,0xC6,0xA5,0x89,0xF0,0xFA,0xD7,0x84,0xA0,0x70,0xAF,0x4F,0x1A,0x97,0x3F,0x06,
+0x44,0xD5,0xC9,0xEB,0x72,0x10,0x7D,0xE4,0x31,0x28,0xFB,0x1C,0x61,0xE6,0x28,0x07,
+0x44,0x73,0x92,0x22,0x69,0xA7,0x03,0x88,0x6C,0x9D,0x63,0xC8,0x52,0xDA,0x98,0x27,
+0xE7,0x08,0x4C,0x70,0x3E,0xB4,0xC9,0x12,0xC1,0xC5,0x67,0x83,0x5D,0x33,0xF3,0x03,
+0x11,0xEC,0x6A,0xD0,0x53,0xE2,0xD1,0xBA,0x36,0x60,0x94,0x80,0xBB,0x61,0x63,0x6C,
+0x5B,0x17,0x7E,0xDF,0x40,0x94,0x1E,0xAB,0x0D,0xC2,0x21,0x28,0x70,0x88,0xFF,0xD6,
+0x26,0x6C,0x6C,0x60,0x04,0x25,0x4E,0x55,0x7E,0x7D,0xEF,0xBF,0x94,0x48,0xDE,0xB7,
+0x1D,0xDD,0x70,0x8D,0x05,0x5F,0x88,0xA5,0x9B,0xF2,0xC2,0xEE,0xEA,0xD1,0x40,0x41,
+0x6D,0x62,0x38,0x1D,0x56,0x06,0xC5,0x03,0x47,0x51,0x20,0x19,0xFC,0x7B,0x10,0x0B,
+0x0E,0x62,0xAE,0x76,0x55,0xBF,0x5F,0x77,0xBE,0x3E,0x49,0x01,0x53,0x3D,0x98,0x25,
+0x03,0x76,0x24,0x5A,0x1D,0xB4,0xDB,0x89,0xEA,0x79,0xE5,0xB6,0xB3,0x3B,0x3F,0xBA,
+0x4C,0x28,0x41,0x7F,0x06,0xAC,0x6A,0x8E,0xC1,0xD0,0xF6,0x05,0x1D,0x7D,0xE6,0x42,
+0x86,0xE3,0xA5,0xD5,0x47,0x02,0x03,0x01,0x00,0x01,0xA3,0x42,0x30,0x40,0x30,0x0F,
+0x06,0x03,0x55,0x1D,0x13,0x01,0x01,0xFF,0x04,0x05,0x30,0x03,0x01,0x01,0xFF,0x30,
+0x0E,0x06,0x03,0x55,0x1D,0x0F,0x01,0x01,0xFF,0x04,0x04,0x03,0x02,0x01,0x06,0x30,
+0x1D,0x06,0x03,0x55,0x1D,0x0E,0x04,0x16,0x04,0x14,0xC4,0x79,0xCA,0x8E,0xA1,0x4E,
+0x03,0x1D,0x1C,0xDC,0x6B,0xDB,0x31,0x5B,0x94,0x3E,0x3F,0x30,0x7F,0x2D,0x30,0x0D,
+0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x0B,0x05,0x00,0x03,0x82,0x01,
+0x01,0x00,0x2D,0xC5,0x13,0xCF,0x56,0x80,0x7B,0x7A,0x78,0xBD,0x9F,0xAE,0x2C,0x99,
+0xE7,0xEF,0xDA,0xDF,0x94,0x5E,0x09,0x69,0xA7,0xE7,0x6E,0x68,0x8C,0xBD,0x72,0xBE,
+0x47,0xA9,0x0E,0x97,0x12,0xB8,0x4A,0xF1,0x64,0xD3,0x39,0xDF,0x25,0x34,0xD4,0xC1,
+0xCD,0x4E,0x81,0xF0,0x0F,0x04,0xC4,0x24,0xB3,0x34,0x96,0xC6,0xA6,0xAA,0x30,0xDF,
+0x68,0x61,0x73,0xD7,0xF9,0x8E,0x85,0x89,0xEF,0x0E,0x5E,0x95,0x28,0x4A,0x2A,0x27,
+0x8F,0x10,0x8E,0x2E,0x7C,0x86,0xC4,0x02,0x9E,0xDA,0x0C,0x77,0x65,0x0E,0x44,0x0D,
+0x92,0xFD,0xFD,0xB3,0x16,0x36,0xFA,0x11,0x0D,0x1D,0x8C,0x0E,0x07,0x89,0x6A,0x29,
+0x56,0xF7,0x72,0xF4,0xDD,0x15,0x9C,0x77,0x35,0x66,0x57,0xAB,0x13,0x53,0xD8,0x8E,
+0xC1,0x40,0xC5,0xD7,0x13,0x16,0x5A,0x72,0xC7,0xB7,0x69,0x01,0xC4,0x7A,0xB1,0x83,
+0x01,0x68,0x7D,0x8D,0x41,0xA1,0x94,0x18,0xC1,0x25,0x5C,0xFC,0xF0,0xFE,0x83,0x02,
+0x87,0x7C,0x0D,0x0D,0xCF,0x2E,0x08,0x5C,0x4A,0x40,0x0D,0x3E,0xEC,0x81,0x61,0xE6,
+0x24,0xDB,0xCA,0xE0,0x0E,0x2D,0x07,0xB2,0x3E,0x56,0xDC,0x8D,0xF5,0x41,0x85,0x07,
+0x48,0x9B,0x0C,0x0B,0xCB,0x49,0x3F,0x7D,0xEC,0xB7,0xFD,0xCB,0x8D,0x67,0x89,0x1A,
+0xAB,0xED,0xBB,0x1E,0xA3,0x00,0x08,0x08,0x17,0x2A,0x82,0x5C,0x31,0x5D,0x46,0x8A,
+0x2D,0x0F,0x86,0x9B,0x74,0xD9,0x45,0xFB,0xD4,0x40,0xB1,0x7A,0xAA,0x68,0x2D,0x86,
+0xB2,0x99,0x22,0xE1,0xC1,0x2B,0xC7,0x9C,0xF8,0xF3,0x5F,0xA8,0x82,0x12,0xEB,0x19,
+0x11,0x2D,
+};
+
+
+/* subject:/C=US/O=GeoTrust Inc./CN=GeoTrust Universal CA 2 */
+/* issuer :/C=US/O=GeoTrust Inc./CN=GeoTrust Universal CA 2 */
+
+
+const unsigned char GeoTrust_Universal_CA_2_certificate[1392]={
+0x30,0x82,0x05,0x6C,0x30,0x82,0x03,0x54,0xA0,0x03,0x02,0x01,0x02,0x02,0x01,0x01,
+0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x05,0x05,0x00,0x30,
+0x47,0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x55,0x53,0x31,0x16,
+0x30,0x14,0x06,0x03,0x55,0x04,0x0A,0x13,0x0D,0x47,0x65,0x6F,0x54,0x72,0x75,0x73,
+0x74,0x20,0x49,0x6E,0x63,0x2E,0x31,0x20,0x30,0x1E,0x06,0x03,0x55,0x04,0x03,0x13,
+0x17,0x47,0x65,0x6F,0x54,0x72,0x75,0x73,0x74,0x20,0x55,0x6E,0x69,0x76,0x65,0x72,
+0x73,0x61,0x6C,0x20,0x43,0x41,0x20,0x32,0x30,0x1E,0x17,0x0D,0x30,0x34,0x30,0x33,
+0x30,0x34,0x30,0x35,0x30,0x30,0x30,0x30,0x5A,0x17,0x0D,0x32,0x39,0x30,0x33,0x30,
+0x34,0x30,0x35,0x30,0x30,0x30,0x30,0x5A,0x30,0x47,0x31,0x0B,0x30,0x09,0x06,0x03,
+0x55,0x04,0x06,0x13,0x02,0x55,0x53,0x31,0x16,0x30,0x14,0x06,0x03,0x55,0x04,0x0A,
+0x13,0x0D,0x47,0x65,0x6F,0x54,0x72,0x75,0x73,0x74,0x20,0x49,0x6E,0x63,0x2E,0x31,
+0x20,0x30,0x1E,0x06,0x03,0x55,0x04,0x03,0x13,0x17,0x47,0x65,0x6F,0x54,0x72,0x75,
+0x73,0x74,0x20,0x55,0x6E,0x69,0x76,0x65,0x72,0x73,0x61,0x6C,0x20,0x43,0x41,0x20,
+0x32,0x30,0x82,0x02,0x22,0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,
+0x01,0x01,0x05,0x00,0x03,0x82,0x02,0x0F,0x00,0x30,0x82,0x02,0x0A,0x02,0x82,0x02,
+0x01,0x00,0xB3,0x54,0x52,0xC1,0xC9,0x3E,0xF2,0xD9,0xDC,0xB1,0x53,0x1A,0x59,0x29,
+0xE7,0xB1,0xC3,0x45,0x28,0xE5,0xD7,0xD1,0xED,0xC5,0xC5,0x4B,0xA1,0xAA,0x74,0x7B,
+0x57,0xAF,0x4A,0x26,0xFC,0xD8,0xF5,0x5E,0xA7,0x6E,0x19,0xDB,0x74,0x0C,0x4F,0x35,
+0x5B,0x32,0x0B,0x01,0xE3,0xDB,0xEB,0x7A,0x77,0x35,0xEA,0xAA,0x5A,0xE0,0xD6,0xE8,
+0xA1,0x57,0x94,0xF0,0x90,0xA3,0x74,0x56,0x94,0x44,0x30,0x03,0x1E,0x5C,0x4E,0x2B,
+0x85,0x26,0x74,0x82,0x7A,0x0C,0x76,0xA0,0x6F,0x4D,0xCE,0x41,0x2D,0xA0,0x15,0x06,
+0x14,0x5F,0xB7,0x42,0xCD,0x7B,0x8F,0x58,0x61,0x34,0xDC,0x2A,0x08,0xF9,0x2E,0xC3,
+0x01,0xA6,0x22,0x44,0x1C,0x4C,0x07,0x82,0xE6,0x5B,0xCE,0xD0,0x4A,0x7C,0x04,0xD3,
+0x19,0x73,0x27,0xF0,0xAA,0x98,0x7F,0x2E,0xAF,0x4E,0xEB,0x87,0x1E,0x24,0x77,0x6A,
+0x5D,0xB6,0xE8,0x5B,0x45,0xBA,0xDC,0xC3,0xA1,0x05,0x6F,0x56,0x8E,0x8F,0x10,0x26,
+0xA5,0x49,0xC3,0x2E,0xD7,0x41,0x87,0x22,0xE0,0x4F,0x86,0xCA,0x60,0xB5,0xEA,0xA1,
+0x63,0xC0,0x01,0x97,0x10,0x79,0xBD,0x00,0x3C,0x12,0x6D,0x2B,0x15,0xB1,0xAC,0x4B,
+0xB1,0xEE,0x18,0xB9,0x4E,0x96,0xDC,0xDC,0x76,0xFF,0x3B,0xBE,0xCF,0x5F,0x03,0xC0,
+0xFC,0x3B,0xE8,0xBE,0x46,0x1B,0xFF,0xDA,0x40,0xC2,0x52,0xF7,0xFE,0xE3,0x3A,0xF7,
+0x6A,0x77,0x35,0xD0,0xDA,0x8D,0xEB,0x5E,0x18,0x6A,0x31,0xC7,0x1E,0xBA,0x3C,0x1B,
+0x28,0xD6,0x6B,0x54,0xC6,0xAA,0x5B,0xD7,0xA2,0x2C,0x1B,0x19,0xCC,0xA2,0x02,0xF6,
+0x9B,0x59,0xBD,0x37,0x6B,0x86,0xB5,0x6D,0x82,0xBA,0xD8,0xEA,0xC9,0x56,0xBC,0xA9,
+0x36,0x58,0xFD,0x3E,0x19,0xF3,0xED,0x0C,0x26,0xA9,0x93,0x38,0xF8,0x4F,0xC1,0x5D,
+0x22,0x06,0xD0,0x97,0xEA,0xE1,0xAD,0xC6,0x55,0xE0,0x81,0x2B,0x28,0x83,0x3A,0xFA,
+0xF4,0x7B,0x21,0x51,0x00,0xBE,0x52,0x38,0xCE,0xCD,0x66,0x79,0xA8,0xF4,0x81,0x56,
+0xE2,0xD0,0x83,0x09,0x47,0x51,0x5B,0x50,0x6A,0xCF,0xDB,0x48,0x1A,0x5D,0x3E,0xF7,
+0xCB,0xF6,0x65,0xF7,0x6C,0xF1,0x95,0xF8,0x02,0x3B,0x32,0x56,0x82,0x39,0x7A,0x5B,
+0xBD,0x2F,0x89,0x1B,0xBF,0xA1,0xB4,0xE8,0xFF,0x7F,0x8D,0x8C,0xDF,0x03,0xF1,0x60,
+0x4E,0x58,0x11,0x4C,0xEB,0xA3,0x3F,0x10,0x2B,0x83,0x9A,0x01,0x73,0xD9,0x94,0x6D,
+0x84,0x00,0x27,0x66,0xAC,0xF0,0x70,0x40,0x09,0x42,0x92,0xAD,0x4F,0x93,0x0D,0x61,
+0x09,0x51,0x24,0xD8,0x92,0xD5,0x0B,0x94,0x61,0xB2,0x87,0xB2,0xED,0xFF,0x9A,0x35,
+0xFF,0x85,0x54,0xCA,0xED,0x44,0x43,0xAC,0x1B,0x3C,0x16,0x6B,0x48,0x4A,0x0A,0x1C,
+0x40,0x88,0x1F,0x92,0xC2,0x0B,0x00,0x05,0xFF,0xF2,0xC8,0x02,0x4A,0xA4,0xAA,0xA9,
+0xCC,0x99,0x96,0x9C,0x2F,0x58,0xE0,0x7D,0xE1,0xBE,0xBB,0x07,0xDC,0x5F,0x04,0x72,
+0x5C,0x31,0x34,0xC3,0xEC,0x5F,0x2D,0xE0,0x3D,0x64,0x90,0x22,0xE6,0xD1,0xEC,0xB8,
+0x2E,0xDD,0x59,0xAE,0xD9,0xA1,0x37,0xBF,0x54,0x35,0xDC,0x73,0x32,0x4F,0x8C,0x04,
+0x1E,0x33,0xB2,0xC9,0x46,0xF1,0xD8,0x5C,0xC8,0x55,0x50,0xC9,0x68,0xBD,0xA8,0xBA,
+0x36,0x09,0x02,0x03,0x01,0x00,0x01,0xA3,0x63,0x30,0x61,0x30,0x0F,0x06,0x03,0x55,
+0x1D,0x13,0x01,0x01,0xFF,0x04,0x05,0x30,0x03,0x01,0x01,0xFF,0x30,0x1D,0x06,0x03,
+0x55,0x1D,0x0E,0x04,0x16,0x04,0x14,0x76,0xF3,0x55,0xE1,0xFA,0xA4,0x36,0xFB,0xF0,
+0x9F,0x5C,0x62,0x71,0xED,0x3C,0xF4,0x47,0x38,0x10,0x2B,0x30,0x1F,0x06,0x03,0x55,
+0x1D,0x23,0x04,0x18,0x30,0x16,0x80,0x14,0x76,0xF3,0x55,0xE1,0xFA,0xA4,0x36,0xFB,
+0xF0,0x9F,0x5C,0x62,0x71,0xED,0x3C,0xF4,0x47,0x38,0x10,0x2B,0x30,0x0E,0x06,0x03,
+0x55,0x1D,0x0F,0x01,0x01,0xFF,0x04,0x04,0x03,0x02,0x01,0x86,0x30,0x0D,0x06,0x09,
+0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x05,0x05,0x00,0x03,0x82,0x02,0x01,0x00,
+0x66,0xC1,0xC6,0x23,0xF3,0xD9,0xE0,0x2E,0x6E,0x5F,0xE8,0xCF,0xAE,0xB0,0xB0,0x25,
+0x4D,0x2B,0xF8,0x3B,0x58,0x9B,0x40,0x24,0x37,0x5A,0xCB,0xAB,0x16,0x49,0xFF,0xB3,
+0x75,0x79,0x33,0xA1,0x2F,0x6D,0x70,0x17,0x34,0x91,0xFE,0x67,0x7E,0x8F,0xEC,0x9B,
+0xE5,0x5E,0x82,0xA9,0x55,0x1F,0x2F,0xDC,0xD4,0x51,0x07,0x12,0xFE,0xAC,0x16,0x3E,
+0x2C,0x35,0xC6,0x63,0xFC,0xDC,0x10,0xEB,0x0D,0xA3,0xAA,0xD0,0x7C,0xCC,0xD1,0xD0,
+0x2F,0x51,0x2E,0xC4,0x14,0x5A,0xDE,0xE8,0x19,0xE1,0x3E,0xC6,0xCC,0xA4,0x29,0xE7,
+0x2E,0x84,0xAA,0x06,0x30,0x78,0x76,0x54,0x73,0x28,0x98,0x59,0x38,0xE0,0x00,0x0D,
+0x62,0xD3,0x42,0x7D,0x21,0x9F,0xAE,0x3D,0x3A,0x8C,0xD5,0xFA,0x77,0x0D,0x18,0x2B,
+0x16,0x0E,0x5F,0x36,0xE1,0xFC,0x2A,0xB5,0x30,0x24,0xCF,0xE0,0x63,0x0C,0x7B,0x58,
+0x1A,0xFE,0x99,0xBA,0x42,0x12,0xB1,0x91,0xF4,0x7C,0x68,0xE2,0xC8,0xE8,0xAF,0x2C,
+0xEA,0xC9,0x7E,0xAE,0xBB,0x2A,0x3D,0x0D,0x15,0xDC,0x34,0x95,0xB6,0x18,0x74,0xA8,
+0x6A,0x0F,0xC7,0xB4,0xF4,0x13,0xC4,0xE4,0x5B,0xED,0x0A,0xD2,0xA4,0x97,0x4C,0x2A,
+0xED,0x2F,0x6C,0x12,0x89,0x3D,0xF1,0x27,0x70,0xAA,0x6A,0x03,0x52,0x21,0x9F,0x40,
+0xA8,0x67,0x50,0xF2,0xF3,0x5A,0x1F,0xDF,0xDF,0x23,0xF6,0xDC,0x78,0x4E,0xE6,0x98,
+0x4F,0x55,0x3A,0x53,0xE3,0xEF,0xF2,0xF4,0x9F,0xC7,0x7C,0xD8,0x58,0xAF,0x29,0x22,
+0x97,0xB8,0xE0,0xBD,0x91,0x2E,0xB0,0x76,0xEC,0x57,0x11,0xCF,0xEF,0x29,0x44,0xF3,
+0xE9,0x85,0x7A,0x60,0x63,0xE4,0x5D,0x33,0x89,0x17,0xD9,0x31,0xAA,0xDA,0xD6,0xF3,
+0x18,0x35,0x72,0xCF,0x87,0x2B,0x2F,0x63,0x23,0x84,0x5D,0x84,0x8C,0x3F,0x57,0xA0,
+0x88,0xFC,0x99,0x91,0x28,0x26,0x69,0x99,0xD4,0x8F,0x97,0x44,0xBE,0x8E,0xD5,0x48,
+0xB1,0xA4,0x28,0x29,0xF1,0x15,0xB4,0xE1,0xE5,0x9E,0xDD,0xF8,0x8F,0xA6,0x6F,0x26,
+0xD7,0x09,0x3C,0x3A,0x1C,0x11,0x0E,0xA6,0x6C,0x37,0xF7,0xAD,0x44,0x87,0x2C,0x28,
+0xC7,0xD8,0x74,0x82,0xB3,0xD0,0x6F,0x4A,0x57,0xBB,0x35,0x29,0x27,0xA0,0x8B,0xE8,
+0x21,0xA7,0x87,0x64,0x36,0x5D,0xCC,0xD8,0x16,0xAC,0xC7,0xB2,0x27,0x40,0x92,0x55,
+0x38,0x28,0x8D,0x51,0x6E,0xDD,0x14,0x67,0x53,0x6C,0x71,0x5C,0x26,0x84,0x4D,0x75,
+0x5A,0xB6,0x7E,0x60,0x56,0xA9,0x4D,0xAD,0xFB,0x9B,0x1E,0x97,0xF3,0x0D,0xD9,0xD2,
+0x97,0x54,0x77,0xDA,0x3D,0x12,0xB7,0xE0,0x1E,0xEF,0x08,0x06,0xAC,0xF9,0x85,0x87,
+0xE9,0xA2,0xDC,0xAF,0x7E,0x18,0x12,0x83,0xFD,0x56,0x17,0x41,0x2E,0xD5,0x29,0x82,
+0x7D,0x99,0xF4,0x31,0xF6,0x71,0xA9,0xCF,0x2C,0x01,0x27,0xA5,0x05,0xB9,0xAA,0xB2,
+0x48,0x4E,0x2A,0xEF,0x9F,0x93,0x52,0x51,0x95,0x3C,0x52,0x73,0x8E,0x56,0x4C,0x17,
+0x40,0xC0,0x09,0x28,0xE4,0x8B,0x6A,0x48,0x53,0xDB,0xEC,0xCD,0x55,0x55,0xF1,0xC6,
+0xF8,0xE9,0xA2,0x2C,0x4C,0xA6,0xD1,0x26,0x5F,0x7E,0xAF,0x5A,0x4C,0xDA,0x1F,0xA6,
+0xF2,0x1C,0x2C,0x7E,0xAE,0x02,0x16,0xD2,0x56,0xD0,0x2F,0x57,0x53,0x47,0xE8,0x92,
+};
+
+
+/* subject:/C=IE/O=Baltimore/OU=CyberTrust/CN=Baltimore CyberTrust Root */
+/* issuer :/C=IE/O=Baltimore/OU=CyberTrust/CN=Baltimore CyberTrust Root */
+
+
+const unsigned char Baltimore_CyberTrust_Root_certificate[891]={
+0x30,0x82,0x03,0x77,0x30,0x82,0x02,0x5F,0xA0,0x03,0x02,0x01,0x02,0x02,0x04,0x02,
+0x00,0x00,0xB9,0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x05,
+0x05,0x00,0x30,0x5A,0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x49,
+0x45,0x31,0x12,0x30,0x10,0x06,0x03,0x55,0x04,0x0A,0x13,0x09,0x42,0x61,0x6C,0x74,
+0x69,0x6D,0x6F,0x72,0x65,0x31,0x13,0x30,0x11,0x06,0x03,0x55,0x04,0x0B,0x13,0x0A,
+0x43,0x79,0x62,0x65,0x72,0x54,0x72,0x75,0x73,0x74,0x31,0x22,0x30,0x20,0x06,0x03,
+0x55,0x04,0x03,0x13,0x19,0x42,0x61,0x6C,0x74,0x69,0x6D,0x6F,0x72,0x65,0x20,0x43,
+0x79,0x62,0x65,0x72,0x54,0x72,0x75,0x73,0x74,0x20,0x52,0x6F,0x6F,0x74,0x30,0x1E,
+0x17,0x0D,0x30,0x30,0x30,0x35,0x31,0x32,0x31,0x38,0x34,0x36,0x30,0x30,0x5A,0x17,
+0x0D,0x32,0x35,0x30,0x35,0x31,0x32,0x32,0x33,0x35,0x39,0x30,0x30,0x5A,0x30,0x5A,
+0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x49,0x45,0x31,0x12,0x30,
+0x10,0x06,0x03,0x55,0x04,0x0A,0x13,0x09,0x42,0x61,0x6C,0x74,0x69,0x6D,0x6F,0x72,
+0x65,0x31,0x13,0x30,0x11,0x06,0x03,0x55,0x04,0x0B,0x13,0x0A,0x43,0x79,0x62,0x65,
+0x72,0x54,0x72,0x75,0x73,0x74,0x31,0x22,0x30,0x20,0x06,0x03,0x55,0x04,0x03,0x13,
+0x19,0x42,0x61,0x6C,0x74,0x69,0x6D,0x6F,0x72,0x65,0x20,0x43,0x79,0x62,0x65,0x72,
+0x54,0x72,0x75,0x73,0x74,0x20,0x52,0x6F,0x6F,0x74,0x30,0x82,0x01,0x22,0x30,0x0D,
+0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x01,0x05,0x00,0x03,0x82,0x01,
+0x0F,0x00,0x30,0x82,0x01,0x0A,0x02,0x82,0x01,0x01,0x00,0xA3,0x04,0xBB,0x22,0xAB,
+0x98,0x3D,0x57,0xE8,0x26,0x72,0x9A,0xB5,0x79,0xD4,0x29,0xE2,0xE1,0xE8,0x95,0x80,
+0xB1,0xB0,0xE3,0x5B,0x8E,0x2B,0x29,0x9A,0x64,0xDF,0xA1,0x5D,0xED,0xB0,0x09,0x05,
+0x6D,0xDB,0x28,0x2E,0xCE,0x62,0xA2,0x62,0xFE,0xB4,0x88,0xDA,0x12,0xEB,0x38,0xEB,
+0x21,0x9D,0xC0,0x41,0x2B,0x01,0x52,0x7B,0x88,0x77,0xD3,0x1C,0x8F,0xC7,0xBA,0xB9,
+0x88,0xB5,0x6A,0x09,0xE7,0x73,0xE8,0x11,0x40,0xA7,0xD1,0xCC,0xCA,0x62,0x8D,0x2D,
+0xE5,0x8F,0x0B,0xA6,0x50,0xD2,0xA8,0x50,0xC3,0x28,0xEA,0xF5,0xAB,0x25,0x87,0x8A,
+0x9A,0x96,0x1C,0xA9,0x67,0xB8,0x3F,0x0C,0xD5,0xF7,0xF9,0x52,0x13,0x2F,0xC2,0x1B,
+0xD5,0x70,0x70,0xF0,0x8F,0xC0,0x12,0xCA,0x06,0xCB,0x9A,0xE1,0xD9,0xCA,0x33,0x7A,
+0x77,0xD6,0xF8,0xEC,0xB9,0xF1,0x68,0x44,0x42,0x48,0x13,0xD2,0xC0,0xC2,0xA4,0xAE,
+0x5E,0x60,0xFE,0xB6,0xA6,0x05,0xFC,0xB4,0xDD,0x07,0x59,0x02,0xD4,0x59,0x18,0x98,
+0x63,0xF5,0xA5,0x63,0xE0,0x90,0x0C,0x7D,0x5D,0xB2,0x06,0x7A,0xF3,0x85,0xEA,0xEB,
+0xD4,0x03,0xAE,0x5E,0x84,0x3E,0x5F,0xFF,0x15,0xED,0x69,0xBC,0xF9,0x39,0x36,0x72,
+0x75,0xCF,0x77,0x52,0x4D,0xF3,0xC9,0x90,0x2C,0xB9,0x3D,0xE5,0xC9,0x23,0x53,0x3F,
+0x1F,0x24,0x98,0x21,0x5C,0x07,0x99,0x29,0xBD,0xC6,0x3A,0xEC,0xE7,0x6E,0x86,0x3A,
+0x6B,0x97,0x74,0x63,0x33,0xBD,0x68,0x18,0x31,0xF0,0x78,0x8D,0x76,0xBF,0xFC,0x9E,
+0x8E,0x5D,0x2A,0x86,0xA7,0x4D,0x90,0xDC,0x27,0x1A,0x39,0x02,0x03,0x01,0x00,0x01,
+0xA3,0x45,0x30,0x43,0x30,0x1D,0x06,0x03,0x55,0x1D,0x0E,0x04,0x16,0x04,0x14,0xE5,
+0x9D,0x59,0x30,0x82,0x47,0x58,0xCC,0xAC,0xFA,0x08,0x54,0x36,0x86,0x7B,0x3A,0xB5,
+0x04,0x4D,0xF0,0x30,0x12,0x06,0x03,0x55,0x1D,0x13,0x01,0x01,0xFF,0x04,0x08,0x30,
+0x06,0x01,0x01,0xFF,0x02,0x01,0x03,0x30,0x0E,0x06,0x03,0x55,0x1D,0x0F,0x01,0x01,
+0xFF,0x04,0x04,0x03,0x02,0x01,0x06,0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,
+0x0D,0x01,0x01,0x05,0x05,0x00,0x03,0x82,0x01,0x01,0x00,0x85,0x0C,0x5D,0x8E,0xE4,
+0x6F,0x51,0x68,0x42,0x05,0xA0,0xDD,0xBB,0x4F,0x27,0x25,0x84,0x03,0xBD,0xF7,0x64,
+0xFD,0x2D,0xD7,0x30,0xE3,0xA4,0x10,0x17,0xEB,0xDA,0x29,0x29,0xB6,0x79,0x3F,0x76,
+0xF6,0x19,0x13,0x23,0xB8,0x10,0x0A,0xF9,0x58,0xA4,0xD4,0x61,0x70,0xBD,0x04,0x61,
+0x6A,0x12,0x8A,0x17,0xD5,0x0A,0xBD,0xC5,0xBC,0x30,0x7C,0xD6,0xE9,0x0C,0x25,0x8D,
+0x86,0x40,0x4F,0xEC,0xCC,0xA3,0x7E,0x38,0xC6,0x37,0x11,0x4F,0xED,0xDD,0x68,0x31,
+0x8E,0x4C,0xD2,0xB3,0x01,0x74,0xEE,0xBE,0x75,0x5E,0x07,0x48,0x1A,0x7F,0x70,0xFF,
+0x16,0x5C,0x84,0xC0,0x79,0x85,0xB8,0x05,0xFD,0x7F,0xBE,0x65,0x11,0xA3,0x0F,0xC0,
+0x02,0xB4,0xF8,0x52,0x37,0x39,0x04,0xD5,0xA9,0x31,0x7A,0x18,0xBF,0xA0,0x2A,0xF4,
+0x12,0x99,0xF7,0xA3,0x45,0x82,0xE3,0x3C,0x5E,0xF5,0x9D,0x9E,0xB5,0xC8,0x9E,0x7C,
+0x2E,0xC8,0xA4,0x9E,0x4E,0x08,0x14,0x4B,0x6D,0xFD,0x70,0x6D,0x6B,0x1A,0x63,0xBD,
+0x64,0xE6,0x1F,0xB7,0xCE,0xF0,0xF2,0x9F,0x2E,0xBB,0x1B,0xB7,0xF2,0x50,0x88,0x73,
+0x92,0xC2,0xE2,0xE3,0x16,0x8D,0x9A,0x32,0x02,0xAB,0x8E,0x18,0xDD,0xE9,0x10,0x11,
+0xEE,0x7E,0x35,0xAB,0x90,0xAF,0x3E,0x30,0x94,0x7A,0xD0,0x33,0x3D,0xA7,0x65,0x0F,
+0xF5,0xFC,0x8E,0x9E,0x62,0xCF,0x47,0x44,0x2C,0x01,0x5D,0xBB,0x1D,0xB5,0x32,0xD2,
+0x47,0xD2,0x38,0x2E,0xD0,0xFE,0x81,0xDC,0x32,0x6A,0x1E,0xB5,0xEE,0x3C,0xD5,0xFC,
+0xE7,0x81,0x1D,0x19,0xC3,0x24,0x42,0xEA,0x63,0x39,0xA9,
+};
+
+
+/* subject:/OU=GlobalSign Root CA - R2/O=GlobalSign/CN=GlobalSign */
+/* issuer :/OU=GlobalSign Root CA - R2/O=GlobalSign/CN=GlobalSign */
+
+
+const unsigned char GlobalSign_Root_CA___R2_certificate[958]={
+0x30,0x82,0x03,0xBA,0x30,0x82,0x02,0xA2,0xA0,0x03,0x02,0x01,0x02,0x02,0x0B,0x04,
+0x00,0x00,0x00,0x00,0x01,0x0F,0x86,0x26,0xE6,0x0D,0x30,0x0D,0x06,0x09,0x2A,0x86,
+0x48,0x86,0xF7,0x0D,0x01,0x01,0x05,0x05,0x00,0x30,0x4C,0x31,0x20,0x30,0x1E,0x06,
+0x03,0x55,0x04,0x0B,0x13,0x17,0x47,0x6C,0x6F,0x62,0x61,0x6C,0x53,0x69,0x67,0x6E,
+0x20,0x52,0x6F,0x6F,0x74,0x20,0x43,0x41,0x20,0x2D,0x20,0x52,0x32,0x31,0x13,0x30,
+0x11,0x06,0x03,0x55,0x04,0x0A,0x13,0x0A,0x47,0x6C,0x6F,0x62,0x61,0x6C,0x53,0x69,
+0x67,0x6E,0x31,0x13,0x30,0x11,0x06,0x03,0x55,0x04,0x03,0x13,0x0A,0x47,0x6C,0x6F,
+0x62,0x61,0x6C,0x53,0x69,0x67,0x6E,0x30,0x1E,0x17,0x0D,0x30,0x36,0x31,0x32,0x31,
+0x35,0x30,0x38,0x30,0x30,0x30,0x30,0x5A,0x17,0x0D,0x32,0x31,0x31,0x32,0x31,0x35,
+0x30,0x38,0x30,0x30,0x30,0x30,0x5A,0x30,0x4C,0x31,0x20,0x30,0x1E,0x06,0x03,0x55,
+0x04,0x0B,0x13,0x17,0x47,0x6C,0x6F,0x62,0x61,0x6C,0x53,0x69,0x67,0x6E,0x20,0x52,
+0x6F,0x6F,0x74,0x20,0x43,0x41,0x20,0x2D,0x20,0x52,0x32,0x31,0x13,0x30,0x11,0x06,
+0x03,0x55,0x04,0x0A,0x13,0x0A,0x47,0x6C,0x6F,0x62,0x61,0x6C,0x53,0x69,0x67,0x6E,
+0x31,0x13,0x30,0x11,0x06,0x03,0x55,0x04,0x03,0x13,0x0A,0x47,0x6C,0x6F,0x62,0x61,
+0x6C,0x53,0x69,0x67,0x6E,0x30,0x82,0x01,0x22,0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,
+0x86,0xF7,0x0D,0x01,0x01,0x01,0x05,0x00,0x03,0x82,0x01,0x0F,0x00,0x30,0x82,0x01,
+0x0A,0x02,0x82,0x01,0x01,0x00,0xA6,0xCF,0x24,0x0E,0xBE,0x2E,0x6F,0x28,0x99,0x45,
+0x42,0xC4,0xAB,0x3E,0x21,0x54,0x9B,0x0B,0xD3,0x7F,0x84,0x70,0xFA,0x12,0xB3,0xCB,
+0xBF,0x87,0x5F,0xC6,0x7F,0x86,0xD3,0xB2,0x30,0x5C,0xD6,0xFD,0xAD,0xF1,0x7B,0xDC,
+0xE5,0xF8,0x60,0x96,0x09,0x92,0x10,0xF5,0xD0,0x53,0xDE,0xFB,0x7B,0x7E,0x73,0x88,
+0xAC,0x52,0x88,0x7B,0x4A,0xA6,0xCA,0x49,0xA6,0x5E,0xA8,0xA7,0x8C,0x5A,0x11,0xBC,
+0x7A,0x82,0xEB,0xBE,0x8C,0xE9,0xB3,0xAC,0x96,0x25,0x07,0x97,0x4A,0x99,0x2A,0x07,
+0x2F,0xB4,0x1E,0x77,0xBF,0x8A,0x0F,0xB5,0x02,0x7C,0x1B,0x96,0xB8,0xC5,0xB9,0x3A,
+0x2C,0xBC,0xD6,0x12,0xB9,0xEB,0x59,0x7D,0xE2,0xD0,0x06,0x86,0x5F,0x5E,0x49,0x6A,
+0xB5,0x39,0x5E,0x88,0x34,0xEC,0xBC,0x78,0x0C,0x08,0x98,0x84,0x6C,0xA8,0xCD,0x4B,
+0xB4,0xA0,0x7D,0x0C,0x79,0x4D,0xF0,0xB8,0x2D,0xCB,0x21,0xCA,0xD5,0x6C,0x5B,0x7D,
+0xE1,0xA0,0x29,0x84,0xA1,0xF9,0xD3,0x94,0x49,0xCB,0x24,0x62,0x91,0x20,0xBC,0xDD,
+0x0B,0xD5,0xD9,0xCC,0xF9,0xEA,0x27,0x0A,0x2B,0x73,0x91,0xC6,0x9D,0x1B,0xAC,0xC8,
+0xCB,0xE8,0xE0,0xA0,0xF4,0x2F,0x90,0x8B,0x4D,0xFB,0xB0,0x36,0x1B,0xF6,0x19,0x7A,
+0x85,0xE0,0x6D,0xF2,0x61,0x13,0x88,0x5C,0x9F,0xE0,0x93,0x0A,0x51,0x97,0x8A,0x5A,
+0xCE,0xAF,0xAB,0xD5,0xF7,0xAA,0x09,0xAA,0x60,0xBD,0xDC,0xD9,0x5F,0xDF,0x72,0xA9,
+0x60,0x13,0x5E,0x00,0x01,0xC9,0x4A,0xFA,0x3F,0xA4,0xEA,0x07,0x03,0x21,0x02,0x8E,
+0x82,0xCA,0x03,0xC2,0x9B,0x8F,0x02,0x03,0x01,0x00,0x01,0xA3,0x81,0x9C,0x30,0x81,
+0x99,0x30,0x0E,0x06,0x03,0x55,0x1D,0x0F,0x01,0x01,0xFF,0x04,0x04,0x03,0x02,0x01,
+0x06,0x30,0x0F,0x06,0x03,0x55,0x1D,0x13,0x01,0x01,0xFF,0x04,0x05,0x30,0x03,0x01,
+0x01,0xFF,0x30,0x1D,0x06,0x03,0x55,0x1D,0x0E,0x04,0x16,0x04,0x14,0x9B,0xE2,0x07,
+0x57,0x67,0x1C,0x1E,0xC0,0x6A,0x06,0xDE,0x59,0xB4,0x9A,0x2D,0xDF,0xDC,0x19,0x86,
+0x2E,0x30,0x36,0x06,0x03,0x55,0x1D,0x1F,0x04,0x2F,0x30,0x2D,0x30,0x2B,0xA0,0x29,
+0xA0,0x27,0x86,0x25,0x68,0x74,0x74,0x70,0x3A,0x2F,0x2F,0x63,0x72,0x6C,0x2E,0x67,
+0x6C,0x6F,0x62,0x61,0x6C,0x73,0x69,0x67,0x6E,0x2E,0x6E,0x65,0x74,0x2F,0x72,0x6F,
+0x6F,0x74,0x2D,0x72,0x32,0x2E,0x63,0x72,0x6C,0x30,0x1F,0x06,0x03,0x55,0x1D,0x23,
+0x04,0x18,0x30,0x16,0x80,0x14,0x9B,0xE2,0x07,0x57,0x67,0x1C,0x1E,0xC0,0x6A,0x06,
+0xDE,0x59,0xB4,0x9A,0x2D,0xDF,0xDC,0x19,0x86,0x2E,0x30,0x0D,0x06,0x09,0x2A,0x86,
+0x48,0x86,0xF7,0x0D,0x01,0x01,0x05,0x05,0x00,0x03,0x82,0x01,0x01,0x00,0x99,0x81,
+0x53,0x87,0x1C,0x68,0x97,0x86,0x91,0xEC,0xE0,0x4A,0xB8,0x44,0x0B,0xAB,0x81,0xAC,
+0x27,0x4F,0xD6,0xC1,0xB8,0x1C,0x43,0x78,0xB3,0x0C,0x9A,0xFC,0xEA,0x2C,0x3C,0x6E,
+0x61,0x1B,0x4D,0x4B,0x29,0xF5,0x9F,0x05,0x1D,0x26,0xC1,0xB8,0xE9,0x83,0x00,0x62,
+0x45,0xB6,0xA9,0x08,0x93,0xB9,0xA9,0x33,0x4B,0x18,0x9A,0xC2,0xF8,0x87,0x88,0x4E,
+0xDB,0xDD,0x71,0x34,0x1A,0xC1,0x54,0xDA,0x46,0x3F,0xE0,0xD3,0x2A,0xAB,0x6D,0x54,
+0x22,0xF5,0x3A,0x62,0xCD,0x20,0x6F,0xBA,0x29,0x89,0xD7,0xDD,0x91,0xEE,0xD3,0x5C,
+0xA2,0x3E,0xA1,0x5B,0x41,0xF5,0xDF,0xE5,0x64,0x43,0x2D,0xE9,0xD5,0x39,0xAB,0xD2,
+0xA2,0xDF,0xB7,0x8B,0xD0,0xC0,0x80,0x19,0x1C,0x45,0xC0,0x2D,0x8C,0xE8,0xF8,0x2D,
+0xA4,0x74,0x56,0x49,0xC5,0x05,0xB5,0x4F,0x15,0xDE,0x6E,0x44,0x78,0x39,0x87,0xA8,
+0x7E,0xBB,0xF3,0x79,0x18,0x91,0xBB,0xF4,0x6F,0x9D,0xC1,0xF0,0x8C,0x35,0x8C,0x5D,
+0x01,0xFB,0xC3,0x6D,0xB9,0xEF,0x44,0x6D,0x79,0x46,0x31,0x7E,0x0A,0xFE,0xA9,0x82,
+0xC1,0xFF,0xEF,0xAB,0x6E,0x20,0xC4,0x50,0xC9,0x5F,0x9D,0x4D,0x9B,0x17,0x8C,0x0C,
+0xE5,0x01,0xC9,0xA0,0x41,0x6A,0x73,0x53,0xFA,0xA5,0x50,0xB4,0x6E,0x25,0x0F,0xFB,
+0x4C,0x18,0xF4,0xFD,0x52,0xD9,0x8E,0x69,0xB1,0xE8,0x11,0x0F,0xDE,0x88,0xD8,0xFB,
+0x1D,0x49,0xF7,0xAA,0xDE,0x95,0xCF,0x20,0x78,0xC2,0x60,0x12,0xDB,0x25,0x40,0x8C,
+0x6A,0xFC,0x7E,0x42,0x38,0x40,0x64,0x12,0xF7,0x9E,0x81,0xE1,0x93,0x2E,
+};
+
+
+/* subject:/OU=GlobalSign Root CA - R3/O=GlobalSign/CN=GlobalSign */
+/* issuer :/OU=GlobalSign Root CA - R3/O=GlobalSign/CN=GlobalSign */
+
+
+const unsigned char GlobalSign_Root_CA___R3_certificate[867]={
+0x30,0x82,0x03,0x5F,0x30,0x82,0x02,0x47,0xA0,0x03,0x02,0x01,0x02,0x02,0x0B,0x04,
+0x00,0x00,0x00,0x00,0x01,0x21,0x58,0x53,0x08,0xA2,0x30,0x0D,0x06,0x09,0x2A,0x86,
+0x48,0x86,0xF7,0x0D,0x01,0x01,0x0B,0x05,0x00,0x30,0x4C,0x31,0x20,0x30,0x1E,0x06,
+0x03,0x55,0x04,0x0B,0x13,0x17,0x47,0x6C,0x6F,0x62,0x61,0x6C,0x53,0x69,0x67,0x6E,
+0x20,0x52,0x6F,0x6F,0x74,0x20,0x43,0x41,0x20,0x2D,0x20,0x52,0x33,0x31,0x13,0x30,
+0x11,0x06,0x03,0x55,0x04,0x0A,0x13,0x0A,0x47,0x6C,0x6F,0x62,0x61,0x6C,0x53,0x69,
+0x67,0x6E,0x31,0x13,0x30,0x11,0x06,0x03,0x55,0x04,0x03,0x13,0x0A,0x47,0x6C,0x6F,
+0x62,0x61,0x6C,0x53,0x69,0x67,0x6E,0x30,0x1E,0x17,0x0D,0x30,0x39,0x30,0x33,0x31,
+0x38,0x31,0x30,0x30,0x30,0x30,0x30,0x5A,0x17,0x0D,0x32,0x39,0x30,0x33,0x31,0x38,
+0x31,0x30,0x30,0x30,0x30,0x30,0x5A,0x30,0x4C,0x31,0x20,0x30,0x1E,0x06,0x03,0x55,
+0x04,0x0B,0x13,0x17,0x47,0x6C,0x6F,0x62,0x61,0x6C,0x53,0x69,0x67,0x6E,0x20,0x52,
+0x6F,0x6F,0x74,0x20,0x43,0x41,0x20,0x2D,0x20,0x52,0x33,0x31,0x13,0x30,0x11,0x06,
+0x03,0x55,0x04,0x0A,0x13,0x0A,0x47,0x6C,0x6F,0x62,0x61,0x6C,0x53,0x69,0x67,0x6E,
+0x31,0x13,0x30,0x11,0x06,0x03,0x55,0x04,0x03,0x13,0x0A,0x47,0x6C,0x6F,0x62,0x61,
+0x6C,0x53,0x69,0x67,0x6E,0x30,0x82,0x01,0x22,0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,
+0x86,0xF7,0x0D,0x01,0x01,0x01,0x05,0x00,0x03,0x82,0x01,0x0F,0x00,0x30,0x82,0x01,
+0x0A,0x02,0x82,0x01,0x01,0x00,0xCC,0x25,0x76,0x90,0x79,0x06,0x78,0x22,0x16,0xF5,
+0xC0,0x83,0xB6,0x84,0xCA,0x28,0x9E,0xFD,0x05,0x76,0x11,0xC5,0xAD,0x88,0x72,0xFC,
+0x46,0x02,0x43,0xC7,0xB2,0x8A,0x9D,0x04,0x5F,0x24,0xCB,0x2E,0x4B,0xE1,0x60,0x82,
+0x46,0xE1,0x52,0xAB,0x0C,0x81,0x47,0x70,0x6C,0xDD,0x64,0xD1,0xEB,0xF5,0x2C,0xA3,
+0x0F,0x82,0x3D,0x0C,0x2B,0xAE,0x97,0xD7,0xB6,0x14,0x86,0x10,0x79,0xBB,0x3B,0x13,
+0x80,0x77,0x8C,0x08,0xE1,0x49,0xD2,0x6A,0x62,0x2F,0x1F,0x5E,0xFA,0x96,0x68,0xDF,
+0x89,0x27,0x95,0x38,0x9F,0x06,0xD7,0x3E,0xC9,0xCB,0x26,0x59,0x0D,0x73,0xDE,0xB0,
+0xC8,0xE9,0x26,0x0E,0x83,0x15,0xC6,0xEF,0x5B,0x8B,0xD2,0x04,0x60,0xCA,0x49,0xA6,
+0x28,0xF6,0x69,0x3B,0xF6,0xCB,0xC8,0x28,0x91,0xE5,0x9D,0x8A,0x61,0x57,0x37,0xAC,
+0x74,0x14,0xDC,0x74,0xE0,0x3A,0xEE,0x72,0x2F,0x2E,0x9C,0xFB,0xD0,0xBB,0xBF,0xF5,
+0x3D,0x00,0xE1,0x06,0x33,0xE8,0x82,0x2B,0xAE,0x53,0xA6,0x3A,0x16,0x73,0x8C,0xDD,
+0x41,0x0E,0x20,0x3A,0xC0,0xB4,0xA7,0xA1,0xE9,0xB2,0x4F,0x90,0x2E,0x32,0x60,0xE9,
+0x57,0xCB,0xB9,0x04,0x92,0x68,0x68,0xE5,0x38,0x26,0x60,0x75,0xB2,0x9F,0x77,0xFF,
+0x91,0x14,0xEF,0xAE,0x20,0x49,0xFC,0xAD,0x40,0x15,0x48,0xD1,0x02,0x31,0x61,0x19,
+0x5E,0xB8,0x97,0xEF,0xAD,0x77,0xB7,0x64,0x9A,0x7A,0xBF,0x5F,0xC1,0x13,0xEF,0x9B,
+0x62,0xFB,0x0D,0x6C,0xE0,0x54,0x69,0x16,0xA9,0x03,0xDA,0x6E,0xE9,0x83,0x93,0x71,
+0x76,0xC6,0x69,0x85,0x82,0x17,0x02,0x03,0x01,0x00,0x01,0xA3,0x42,0x30,0x40,0x30,
+0x0E,0x06,0x03,0x55,0x1D,0x0F,0x01,0x01,0xFF,0x04,0x04,0x03,0x02,0x01,0x06,0x30,
+0x0F,0x06,0x03,0x55,0x1D,0x13,0x01,0x01,0xFF,0x04,0x05,0x30,0x03,0x01,0x01,0xFF,
+0x30,0x1D,0x06,0x03,0x55,0x1D,0x0E,0x04,0x16,0x04,0x14,0x8F,0xF0,0x4B,0x7F,0xA8,
+0x2E,0x45,0x24,0xAE,0x4D,0x50,0xFA,0x63,0x9A,0x8B,0xDE,0xE2,0xDD,0x1B,0xBC,0x30,
+0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x0B,0x05,0x00,0x03,0x82,
+0x01,0x01,0x00,0x4B,0x40,0xDB,0xC0,0x50,0xAA,0xFE,0xC8,0x0C,0xEF,0xF7,0x96,0x54,
+0x45,0x49,0xBB,0x96,0x00,0x09,0x41,0xAC,0xB3,0x13,0x86,0x86,0x28,0x07,0x33,0xCA,
+0x6B,0xE6,0x74,0xB9,0xBA,0x00,0x2D,0xAE,0xA4,0x0A,0xD3,0xF5,0xF1,0xF1,0x0F,0x8A,
+0xBF,0x73,0x67,0x4A,0x83,0xC7,0x44,0x7B,0x78,0xE0,0xAF,0x6E,0x6C,0x6F,0x03,0x29,
+0x8E,0x33,0x39,0x45,0xC3,0x8E,0xE4,0xB9,0x57,0x6C,0xAA,0xFC,0x12,0x96,0xEC,0x53,
+0xC6,0x2D,0xE4,0x24,0x6C,0xB9,0x94,0x63,0xFB,0xDC,0x53,0x68,0x67,0x56,0x3E,0x83,
+0xB8,0xCF,0x35,0x21,0xC3,0xC9,0x68,0xFE,0xCE,0xDA,0xC2,0x53,0xAA,0xCC,0x90,0x8A,
+0xE9,0xF0,0x5D,0x46,0x8C,0x95,0xDD,0x7A,0x58,0x28,0x1A,0x2F,0x1D,0xDE,0xCD,0x00,
+0x37,0x41,0x8F,0xED,0x44,0x6D,0xD7,0x53,0x28,0x97,0x7E,0xF3,0x67,0x04,0x1E,0x15,
+0xD7,0x8A,0x96,0xB4,0xD3,0xDE,0x4C,0x27,0xA4,0x4C,0x1B,0x73,0x73,0x76,0xF4,0x17,
+0x99,0xC2,0x1F,0x7A,0x0E,0xE3,0x2D,0x08,0xAD,0x0A,0x1C,0x2C,0xFF,0x3C,0xAB,0x55,
+0x0E,0x0F,0x91,0x7E,0x36,0xEB,0xC3,0x57,0x49,0xBE,0xE1,0x2E,0x2D,0x7C,0x60,0x8B,
+0xC3,0x41,0x51,0x13,0x23,0x9D,0xCE,0xF7,0x32,0x6B,0x94,0x01,0xA8,0x99,0xE7,0x2C,
+0x33,0x1F,0x3A,0x3B,0x25,0xD2,0x86,0x40,0xCE,0x3B,0x2C,0x86,0x78,0xC9,0x61,0x2F,
+0x14,0xBA,0xEE,0xDB,0x55,0x6F,0xDF,0x84,0xEE,0x05,0x09,0x4D,0xBD,0x28,0xD8,0x72,
+0xCE,0xD3,0x62,0x50,0x65,0x1E,0xEB,0x92,0x97,0x83,0x31,0xD9,0xB3,0xB5,0xCA,0x47,
+0x58,0x3F,0x5F,
};
@@ -427,443 +1649,627 @@ const unsigned char AffirmTrust_Networking_certificate[848]={
};
-/* subject:/C=US/O=AffirmTrust/CN=AffirmTrust Premium */
-/* issuer :/C=US/O=AffirmTrust/CN=AffirmTrust Premium */
+/* subject:/C=SE/O=AddTrust AB/OU=AddTrust External TTP Network/CN=AddTrust External CA Root */
+/* issuer :/C=SE/O=AddTrust AB/OU=AddTrust External TTP Network/CN=AddTrust External CA Root */
-const unsigned char AffirmTrust_Premium_certificate[1354]={
-0x30,0x82,0x05,0x46,0x30,0x82,0x03,0x2E,0xA0,0x03,0x02,0x01,0x02,0x02,0x08,0x6D,
-0x8C,0x14,0x46,0xB1,0xA6,0x0A,0xEE,0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,
-0x0D,0x01,0x01,0x0C,0x05,0x00,0x30,0x41,0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04,
-0x06,0x13,0x02,0x55,0x53,0x31,0x14,0x30,0x12,0x06,0x03,0x55,0x04,0x0A,0x0C,0x0B,
-0x41,0x66,0x66,0x69,0x72,0x6D,0x54,0x72,0x75,0x73,0x74,0x31,0x1C,0x30,0x1A,0x06,
-0x03,0x55,0x04,0x03,0x0C,0x13,0x41,0x66,0x66,0x69,0x72,0x6D,0x54,0x72,0x75,0x73,
-0x74,0x20,0x50,0x72,0x65,0x6D,0x69,0x75,0x6D,0x30,0x1E,0x17,0x0D,0x31,0x30,0x30,
-0x31,0x32,0x39,0x31,0x34,0x31,0x30,0x33,0x36,0x5A,0x17,0x0D,0x34,0x30,0x31,0x32,
-0x33,0x31,0x31,0x34,0x31,0x30,0x33,0x36,0x5A,0x30,0x41,0x31,0x0B,0x30,0x09,0x06,
-0x03,0x55,0x04,0x06,0x13,0x02,0x55,0x53,0x31,0x14,0x30,0x12,0x06,0x03,0x55,0x04,
-0x0A,0x0C,0x0B,0x41,0x66,0x66,0x69,0x72,0x6D,0x54,0x72,0x75,0x73,0x74,0x31,0x1C,
-0x30,0x1A,0x06,0x03,0x55,0x04,0x03,0x0C,0x13,0x41,0x66,0x66,0x69,0x72,0x6D,0x54,
-0x72,0x75,0x73,0x74,0x20,0x50,0x72,0x65,0x6D,0x69,0x75,0x6D,0x30,0x82,0x02,0x22,
-0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x01,0x05,0x00,0x03,
-0x82,0x02,0x0F,0x00,0x30,0x82,0x02,0x0A,0x02,0x82,0x02,0x01,0x00,0xC4,0x12,0xDF,
-0xA9,0x5F,0xFE,0x41,0xDD,0xDD,0xF5,0x9F,0x8A,0xE3,0xF6,0xAC,0xE1,0x3C,0x78,0x9A,
-0xBC,0xD8,0xF0,0x7F,0x7A,0xA0,0x33,0x2A,0xDC,0x8D,0x20,0x5B,0xAE,0x2D,0x6F,0xE7,
-0x93,0xD9,0x36,0x70,0x6A,0x68,0xCF,0x8E,0x51,0xA3,0x85,0x5B,0x67,0x04,0xA0,0x10,
-0x24,0x6F,0x5D,0x28,0x82,0xC1,0x97,0x57,0xD8,0x48,0x29,0x13,0xB6,0xE1,0xBE,0x91,
-0x4D,0xDF,0x85,0x0C,0x53,0x18,0x9A,0x1E,0x24,0xA2,0x4F,0x8F,0xF0,0xA2,0x85,0x0B,
-0xCB,0xF4,0x29,0x7F,0xD2,0xA4,0x58,0xEE,0x26,0x4D,0xC9,0xAA,0xA8,0x7B,0x9A,0xD9,
-0xFA,0x38,0xDE,0x44,0x57,0x15,0xE5,0xF8,0x8C,0xC8,0xD9,0x48,0xE2,0x0D,0x16,0x27,
-0x1D,0x1E,0xC8,0x83,0x85,0x25,0xB7,0xBA,0xAA,0x55,0x41,0xCC,0x03,0x22,0x4B,0x2D,
-0x91,0x8D,0x8B,0xE6,0x89,0xAF,0x66,0xC7,0xE9,0xFF,0x2B,0xE9,0x3C,0xAC,0xDA,0xD2,
-0xB3,0xC3,0xE1,0x68,0x9C,0x89,0xF8,0x7A,0x00,0x56,0xDE,0xF4,0x55,0x95,0x6C,0xFB,
-0xBA,0x64,0xDD,0x62,0x8B,0xDF,0x0B,0x77,0x32,0xEB,0x62,0xCC,0x26,0x9A,0x9B,0xBB,
-0xAA,0x62,0x83,0x4C,0xB4,0x06,0x7A,0x30,0xC8,0x29,0xBF,0xED,0x06,0x4D,0x97,0xB9,
-0x1C,0xC4,0x31,0x2B,0xD5,0x5F,0xBC,0x53,0x12,0x17,0x9C,0x99,0x57,0x29,0x66,0x77,
-0x61,0x21,0x31,0x07,0x2E,0x25,0x49,0x9D,0x18,0xF2,0xEE,0xF3,0x2B,0x71,0x8C,0xB5,
-0xBA,0x39,0x07,0x49,0x77,0xFC,0xEF,0x2E,0x92,0x90,0x05,0x8D,0x2D,0x2F,0x77,0x7B,
-0xEF,0x43,0xBF,0x35,0xBB,0x9A,0xD8,0xF9,0x73,0xA7,0x2C,0xF2,0xD0,0x57,0xEE,0x28,
-0x4E,0x26,0x5F,0x8F,0x90,0x68,0x09,0x2F,0xB8,0xF8,0xDC,0x06,0xE9,0x2E,0x9A,0x3E,
-0x51,0xA7,0xD1,0x22,0xC4,0x0A,0xA7,0x38,0x48,0x6C,0xB3,0xF9,0xFF,0x7D,0xAB,0x86,
-0x57,0xE3,0xBA,0xD6,0x85,0x78,0x77,0xBA,0x43,0xEA,0x48,0x7F,0xF6,0xD8,0xBE,0x23,
-0x6D,0x1E,0xBF,0xD1,0x36,0x6C,0x58,0x5C,0xF1,0xEE,0xA4,0x19,0x54,0x1A,0xF5,0x03,
-0xD2,0x76,0xE6,0xE1,0x8C,0xBD,0x3C,0xB3,0xD3,0x48,0x4B,0xE2,0xC8,0xF8,0x7F,0x92,
-0xA8,0x76,0x46,0x9C,0x42,0x65,0x3E,0xA4,0x1E,0xC1,0x07,0x03,0x5A,0x46,0x2D,0xB8,
-0x97,0xF3,0xB7,0xD5,0xB2,0x55,0x21,0xEF,0xBA,0xDC,0x4C,0x00,0x97,0xFB,0x14,0x95,
-0x27,0x33,0xBF,0xE8,0x43,0x47,0x46,0xD2,0x08,0x99,0x16,0x60,0x3B,0x9A,0x7E,0xD2,
-0xE6,0xED,0x38,0xEA,0xEC,0x01,0x1E,0x3C,0x48,0x56,0x49,0x09,0xC7,0x4C,0x37,0x00,
-0x9E,0x88,0x0E,0xC0,0x73,0xE1,0x6F,0x66,0xE9,0x72,0x47,0x30,0x3E,0x10,0xE5,0x0B,
-0x03,0xC9,0x9A,0x42,0x00,0x6C,0xC5,0x94,0x7E,0x61,0xC4,0x8A,0xDF,0x7F,0x82,0x1A,
-0x0B,0x59,0xC4,0x59,0x32,0x77,0xB3,0xBC,0x60,0x69,0x56,0x39,0xFD,0xB4,0x06,0x7B,
-0x2C,0xD6,0x64,0x36,0xD9,0xBD,0x48,0xED,0x84,0x1F,0x7E,0xA5,0x22,0x8F,0x2A,0xB8,
-0x42,0xF4,0x82,0xB7,0xD4,0x53,0x90,0x78,0x4E,0x2D,0x1A,0xFD,0x81,0x6F,0x44,0xD7,
-0x3B,0x01,0x74,0x96,0x42,0xE0,0x00,0xE2,0x2E,0x6B,0xEA,0xC5,0xEE,0x72,0xAC,0xBB,
-0xBF,0xFE,0xEA,0xAA,0xA8,0xF8,0xDC,0xF6,0xB2,0x79,0x8A,0xB6,0x67,0x02,0x03,0x01,
-0x00,0x01,0xA3,0x42,0x30,0x40,0x30,0x1D,0x06,0x03,0x55,0x1D,0x0E,0x04,0x16,0x04,
-0x14,0x9D,0xC0,0x67,0xA6,0x0C,0x22,0xD9,0x26,0xF5,0x45,0xAB,0xA6,0x65,0x52,0x11,
-0x27,0xD8,0x45,0xAC,0x63,0x30,0x0F,0x06,0x03,0x55,0x1D,0x13,0x01,0x01,0xFF,0x04,
-0x05,0x30,0x03,0x01,0x01,0xFF,0x30,0x0E,0x06,0x03,0x55,0x1D,0x0F,0x01,0x01,0xFF,
-0x04,0x04,0x03,0x02,0x01,0x06,0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,
-0x01,0x01,0x0C,0x05,0x00,0x03,0x82,0x02,0x01,0x00,0xB3,0x57,0x4D,0x10,0x62,0x4E,
-0x3A,0xE4,0xAC,0xEA,0xB8,0x1C,0xAF,0x32,0x23,0xC8,0xB3,0x49,0x5A,0x51,0x9C,0x76,
-0x28,0x8D,0x79,0xAA,0x57,0x46,0x17,0xD5,0xF5,0x52,0xF6,0xB7,0x44,0xE8,0x08,0x44,
-0xBF,0x18,0x84,0xD2,0x0B,0x80,0xCD,0xC5,0x12,0xFD,0x00,0x55,0x05,0x61,0x87,0x41,
-0xDC,0xB5,0x24,0x9E,0x3C,0xC4,0xD8,0xC8,0xFB,0x70,0x9E,0x2F,0x78,0x96,0x83,0x20,
-0x36,0xDE,0x7C,0x0F,0x69,0x13,0x88,0xA5,0x75,0x36,0x98,0x08,0xA6,0xC6,0xDF,0xAC,
-0xCE,0xE3,0x58,0xD6,0xB7,0x3E,0xDE,0xBA,0xF3,0xEB,0x34,0x40,0xD8,0xA2,0x81,0xF5,
-0x78,0x3F,0x2F,0xD5,0xA5,0xFC,0xD9,0xA2,0xD4,0x5E,0x04,0x0E,0x17,0xAD,0xFE,0x41,
-0xF0,0xE5,0xB2,0x72,0xFA,0x44,0x82,0x33,0x42,0xE8,0x2D,0x58,0xF7,0x56,0x8C,0x62,
-0x3F,0xBA,0x42,0xB0,0x9C,0x0C,0x5C,0x7E,0x2E,0x65,0x26,0x5C,0x53,0x4F,0x00,0xB2,
-0x78,0x7E,0xA1,0x0D,0x99,0x2D,0x8D,0xB8,0x1D,0x8E,0xA2,0xC4,0xB0,0xFD,0x60,0xD0,
-0x30,0xA4,0x8E,0xC8,0x04,0x62,0xA9,0xC4,0xED,0x35,0xDE,0x7A,0x97,0xED,0x0E,0x38,
-0x5E,0x92,0x2F,0x93,0x70,0xA5,0xA9,0x9C,0x6F,0xA7,0x7D,0x13,0x1D,0x7E,0xC6,0x08,
-0x48,0xB1,0x5E,0x67,0xEB,0x51,0x08,0x25,0xE9,0xE6,0x25,0x6B,0x52,0x29,0x91,0x9C,
-0xD2,0x39,0x73,0x08,0x57,0xDE,0x99,0x06,0xB4,0x5B,0x9D,0x10,0x06,0xE1,0xC2,0x00,
-0xA8,0xB8,0x1C,0x4A,0x02,0x0A,0x14,0xD0,0xC1,0x41,0xCA,0xFB,0x8C,0x35,0x21,0x7D,
-0x82,0x38,0xF2,0xA9,0x54,0x91,0x19,0x35,0x93,0x94,0x6D,0x6A,0x3A,0xC5,0xB2,0xD0,
-0xBB,0x89,0x86,0x93,0xE8,0x9B,0xC9,0x0F,0x3A,0xA7,0x7A,0xB8,0xA1,0xF0,0x78,0x46,
-0xFA,0xFC,0x37,0x2F,0xE5,0x8A,0x84,0xF3,0xDF,0xFE,0x04,0xD9,0xA1,0x68,0xA0,0x2F,
-0x24,0xE2,0x09,0x95,0x06,0xD5,0x95,0xCA,0xE1,0x24,0x96,0xEB,0x7C,0xF6,0x93,0x05,
-0xBB,0xED,0x73,0xE9,0x2D,0xD1,0x75,0x39,0xD7,0xE7,0x24,0xDB,0xD8,0x4E,0x5F,0x43,
-0x8F,0x9E,0xD0,0x14,0x39,0xBF,0x55,0x70,0x48,0x99,0x57,0x31,0xB4,0x9C,0xEE,0x4A,
-0x98,0x03,0x96,0x30,0x1F,0x60,0x06,0xEE,0x1B,0x23,0xFE,0x81,0x60,0x23,0x1A,0x47,
-0x62,0x85,0xA5,0xCC,0x19,0x34,0x80,0x6F,0xB3,0xAC,0x1A,0xE3,0x9F,0xF0,0x7B,0x48,
-0xAD,0xD5,0x01,0xD9,0x67,0xB6,0xA9,0x72,0x93,0xEA,0x2D,0x66,0xB5,0xB2,0xB8,0xE4,
-0x3D,0x3C,0xB2,0xEF,0x4C,0x8C,0xEA,0xEB,0x07,0xBF,0xAB,0x35,0x9A,0x55,0x86,0xBC,
-0x18,0xA6,0xB5,0xA8,0x5E,0xB4,0x83,0x6C,0x6B,0x69,0x40,0xD3,0x9F,0xDC,0xF1,0xC3,
-0x69,0x6B,0xB9,0xE1,0x6D,0x09,0xF4,0xF1,0xAA,0x50,0x76,0x0A,0x7A,0x7D,0x7A,0x17,
-0xA1,0x55,0x96,0x42,0x99,0x31,0x09,0xDD,0x60,0x11,0x8D,0x05,0x30,0x7E,0xE6,0x8E,
-0x46,0xD1,0x9D,0x14,0xDA,0xC7,0x17,0xE4,0x05,0x96,0x8C,0xC4,0x24,0xB5,0x1B,0xCF,
-0x14,0x07,0xB2,0x40,0xF8,0xA3,0x9E,0x41,0x86,0xBC,0x04,0xD0,0x6B,0x96,0xC8,0x2A,
-0x80,0x34,0xFD,0xBF,0xEF,0x06,0xA3,0xDD,0x58,0xC5,0x85,0x3D,0x3E,0x8F,0xFE,0x9E,
-0x29,0xE0,0xB6,0xB8,0x09,0x68,0x19,0x1C,0x18,0x43,
+const unsigned char AddTrust_External_Root_certificate[1082]={
+0x30,0x82,0x04,0x36,0x30,0x82,0x03,0x1E,0xA0,0x03,0x02,0x01,0x02,0x02,0x01,0x01,
+0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x05,0x05,0x00,0x30,
+0x6F,0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x53,0x45,0x31,0x14,
+0x30,0x12,0x06,0x03,0x55,0x04,0x0A,0x13,0x0B,0x41,0x64,0x64,0x54,0x72,0x75,0x73,
+0x74,0x20,0x41,0x42,0x31,0x26,0x30,0x24,0x06,0x03,0x55,0x04,0x0B,0x13,0x1D,0x41,
+0x64,0x64,0x54,0x72,0x75,0x73,0x74,0x20,0x45,0x78,0x74,0x65,0x72,0x6E,0x61,0x6C,
+0x20,0x54,0x54,0x50,0x20,0x4E,0x65,0x74,0x77,0x6F,0x72,0x6B,0x31,0x22,0x30,0x20,
+0x06,0x03,0x55,0x04,0x03,0x13,0x19,0x41,0x64,0x64,0x54,0x72,0x75,0x73,0x74,0x20,
+0x45,0x78,0x74,0x65,0x72,0x6E,0x61,0x6C,0x20,0x43,0x41,0x20,0x52,0x6F,0x6F,0x74,
+0x30,0x1E,0x17,0x0D,0x30,0x30,0x30,0x35,0x33,0x30,0x31,0x30,0x34,0x38,0x33,0x38,
+0x5A,0x17,0x0D,0x32,0x30,0x30,0x35,0x33,0x30,0x31,0x30,0x34,0x38,0x33,0x38,0x5A,
+0x30,0x6F,0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x53,0x45,0x31,
+0x14,0x30,0x12,0x06,0x03,0x55,0x04,0x0A,0x13,0x0B,0x41,0x64,0x64,0x54,0x72,0x75,
+0x73,0x74,0x20,0x41,0x42,0x31,0x26,0x30,0x24,0x06,0x03,0x55,0x04,0x0B,0x13,0x1D,
+0x41,0x64,0x64,0x54,0x72,0x75,0x73,0x74,0x20,0x45,0x78,0x74,0x65,0x72,0x6E,0x61,
+0x6C,0x20,0x54,0x54,0x50,0x20,0x4E,0x65,0x74,0x77,0x6F,0x72,0x6B,0x31,0x22,0x30,
+0x20,0x06,0x03,0x55,0x04,0x03,0x13,0x19,0x41,0x64,0x64,0x54,0x72,0x75,0x73,0x74,
+0x20,0x45,0x78,0x74,0x65,0x72,0x6E,0x61,0x6C,0x20,0x43,0x41,0x20,0x52,0x6F,0x6F,
+0x74,0x30,0x82,0x01,0x22,0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,
+0x01,0x01,0x05,0x00,0x03,0x82,0x01,0x0F,0x00,0x30,0x82,0x01,0x0A,0x02,0x82,0x01,
+0x01,0x00,0xB7,0xF7,0x1A,0x33,0xE6,0xF2,0x00,0x04,0x2D,0x39,0xE0,0x4E,0x5B,0xED,
+0x1F,0xBC,0x6C,0x0F,0xCD,0xB5,0xFA,0x23,0xB6,0xCE,0xDE,0x9B,0x11,0x33,0x97,0xA4,
+0x29,0x4C,0x7D,0x93,0x9F,0xBD,0x4A,0xBC,0x93,0xED,0x03,0x1A,0xE3,0x8F,0xCF,0xE5,
+0x6D,0x50,0x5A,0xD6,0x97,0x29,0x94,0x5A,0x80,0xB0,0x49,0x7A,0xDB,0x2E,0x95,0xFD,
+0xB8,0xCA,0xBF,0x37,0x38,0x2D,0x1E,0x3E,0x91,0x41,0xAD,0x70,0x56,0xC7,0xF0,0x4F,
+0x3F,0xE8,0x32,0x9E,0x74,0xCA,0xC8,0x90,0x54,0xE9,0xC6,0x5F,0x0F,0x78,0x9D,0x9A,
+0x40,0x3C,0x0E,0xAC,0x61,0xAA,0x5E,0x14,0x8F,0x9E,0x87,0xA1,0x6A,0x50,0xDC,0xD7,
+0x9A,0x4E,0xAF,0x05,0xB3,0xA6,0x71,0x94,0x9C,0x71,0xB3,0x50,0x60,0x0A,0xC7,0x13,
+0x9D,0x38,0x07,0x86,0x02,0xA8,0xE9,0xA8,0x69,0x26,0x18,0x90,0xAB,0x4C,0xB0,0x4F,
+0x23,0xAB,0x3A,0x4F,0x84,0xD8,0xDF,0xCE,0x9F,0xE1,0x69,0x6F,0xBB,0xD7,0x42,0xD7,
+0x6B,0x44,0xE4,0xC7,0xAD,0xEE,0x6D,0x41,0x5F,0x72,0x5A,0x71,0x08,0x37,0xB3,0x79,
+0x65,0xA4,0x59,0xA0,0x94,0x37,0xF7,0x00,0x2F,0x0D,0xC2,0x92,0x72,0xDA,0xD0,0x38,
+0x72,0xDB,0x14,0xA8,0x45,0xC4,0x5D,0x2A,0x7D,0xB7,0xB4,0xD6,0xC4,0xEE,0xAC,0xCD,
+0x13,0x44,0xB7,0xC9,0x2B,0xDD,0x43,0x00,0x25,0xFA,0x61,0xB9,0x69,0x6A,0x58,0x23,
+0x11,0xB7,0xA7,0x33,0x8F,0x56,0x75,0x59,0xF5,0xCD,0x29,0xD7,0x46,0xB7,0x0A,0x2B,
+0x65,0xB6,0xD3,0x42,0x6F,0x15,0xB2,0xB8,0x7B,0xFB,0xEF,0xE9,0x5D,0x53,0xD5,0x34,
+0x5A,0x27,0x02,0x03,0x01,0x00,0x01,0xA3,0x81,0xDC,0x30,0x81,0xD9,0x30,0x1D,0x06,
+0x03,0x55,0x1D,0x0E,0x04,0x16,0x04,0x14,0xAD,0xBD,0x98,0x7A,0x34,0xB4,0x26,0xF7,
+0xFA,0xC4,0x26,0x54,0xEF,0x03,0xBD,0xE0,0x24,0xCB,0x54,0x1A,0x30,0x0B,0x06,0x03,
+0x55,0x1D,0x0F,0x04,0x04,0x03,0x02,0x01,0x06,0x30,0x0F,0x06,0x03,0x55,0x1D,0x13,
+0x01,0x01,0xFF,0x04,0x05,0x30,0x03,0x01,0x01,0xFF,0x30,0x81,0x99,0x06,0x03,0x55,
+0x1D,0x23,0x04,0x81,0x91,0x30,0x81,0x8E,0x80,0x14,0xAD,0xBD,0x98,0x7A,0x34,0xB4,
+0x26,0xF7,0xFA,0xC4,0x26,0x54,0xEF,0x03,0xBD,0xE0,0x24,0xCB,0x54,0x1A,0xA1,0x73,
+0xA4,0x71,0x30,0x6F,0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x53,
+0x45,0x31,0x14,0x30,0x12,0x06,0x03,0x55,0x04,0x0A,0x13,0x0B,0x41,0x64,0x64,0x54,
+0x72,0x75,0x73,0x74,0x20,0x41,0x42,0x31,0x26,0x30,0x24,0x06,0x03,0x55,0x04,0x0B,
+0x13,0x1D,0x41,0x64,0x64,0x54,0x72,0x75,0x73,0x74,0x20,0x45,0x78,0x74,0x65,0x72,
+0x6E,0x61,0x6C,0x20,0x54,0x54,0x50,0x20,0x4E,0x65,0x74,0x77,0x6F,0x72,0x6B,0x31,
+0x22,0x30,0x20,0x06,0x03,0x55,0x04,0x03,0x13,0x19,0x41,0x64,0x64,0x54,0x72,0x75,
+0x73,0x74,0x20,0x45,0x78,0x74,0x65,0x72,0x6E,0x61,0x6C,0x20,0x43,0x41,0x20,0x52,
+0x6F,0x6F,0x74,0x82,0x01,0x01,0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,
+0x01,0x01,0x05,0x05,0x00,0x03,0x82,0x01,0x01,0x00,0xB0,0x9B,0xE0,0x85,0x25,0xC2,
+0xD6,0x23,0xE2,0x0F,0x96,0x06,0x92,0x9D,0x41,0x98,0x9C,0xD9,0x84,0x79,0x81,0xD9,
+0x1E,0x5B,0x14,0x07,0x23,0x36,0x65,0x8F,0xB0,0xD8,0x77,0xBB,0xAC,0x41,0x6C,0x47,
+0x60,0x83,0x51,0xB0,0xF9,0x32,0x3D,0xE7,0xFC,0xF6,0x26,0x13,0xC7,0x80,0x16,0xA5,
+0xBF,0x5A,0xFC,0x87,0xCF,0x78,0x79,0x89,0x21,0x9A,0xE2,0x4C,0x07,0x0A,0x86,0x35,
+0xBC,0xF2,0xDE,0x51,0xC4,0xD2,0x96,0xB7,0xDC,0x7E,0x4E,0xEE,0x70,0xFD,0x1C,0x39,
+0xEB,0x0C,0x02,0x51,0x14,0x2D,0x8E,0xBD,0x16,0xE0,0xC1,0xDF,0x46,0x75,0xE7,0x24,
+0xAD,0xEC,0xF4,0x42,0xB4,0x85,0x93,0x70,0x10,0x67,0xBA,0x9D,0x06,0x35,0x4A,0x18,
+0xD3,0x2B,0x7A,0xCC,0x51,0x42,0xA1,0x7A,0x63,0xD1,0xE6,0xBB,0xA1,0xC5,0x2B,0xC2,
+0x36,0xBE,0x13,0x0D,0xE6,0xBD,0x63,0x7E,0x79,0x7B,0xA7,0x09,0x0D,0x40,0xAB,0x6A,
+0xDD,0x8F,0x8A,0xC3,0xF6,0xF6,0x8C,0x1A,0x42,0x05,0x51,0xD4,0x45,0xF5,0x9F,0xA7,
+0x62,0x21,0x68,0x15,0x20,0x43,0x3C,0x99,0xE7,0x7C,0xBD,0x24,0xD8,0xA9,0x91,0x17,
+0x73,0x88,0x3F,0x56,0x1B,0x31,0x38,0x18,0xB4,0x71,0x0F,0x9A,0xCD,0xC8,0x0E,0x9E,
+0x8E,0x2E,0x1B,0xE1,0x8C,0x98,0x83,0xCB,0x1F,0x31,0xF1,0x44,0x4C,0xC6,0x04,0x73,
+0x49,0x76,0x60,0x0F,0xC7,0xF8,0xBD,0x17,0x80,0x6B,0x2E,0xE9,0xCC,0x4C,0x0E,0x5A,
+0x9A,0x79,0x0F,0x20,0x0A,0x2E,0xD5,0x9E,0x63,0x26,0x1E,0x55,0x92,0x94,0xD8,0x82,
+0x17,0x5A,0x7B,0xD0,0xBC,0xC7,0x8F,0x4E,0x86,0x04,
};
-/* subject:/C=US/O=AffirmTrust/CN=AffirmTrust Premium ECC */
-/* issuer :/C=US/O=AffirmTrust/CN=AffirmTrust Premium ECC */
+/* subject:/C=US/O=thawte, Inc./OU=Certification Services Division/OU=(c) 2008 thawte, Inc. - For authorized use only/CN=thawte Primary Root CA - G3 */
+/* issuer :/C=US/O=thawte, Inc./OU=Certification Services Division/OU=(c) 2008 thawte, Inc. - For authorized use only/CN=thawte Primary Root CA - G3 */
-const unsigned char AffirmTrust_Premium_ECC_certificate[514]={
-0x30,0x82,0x01,0xFE,0x30,0x82,0x01,0x85,0xA0,0x03,0x02,0x01,0x02,0x02,0x08,0x74,
-0x97,0x25,0x8A,0xC7,0x3F,0x7A,0x54,0x30,0x0A,0x06,0x08,0x2A,0x86,0x48,0xCE,0x3D,
-0x04,0x03,0x03,0x30,0x45,0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02,
-0x55,0x53,0x31,0x14,0x30,0x12,0x06,0x03,0x55,0x04,0x0A,0x0C,0x0B,0x41,0x66,0x66,
-0x69,0x72,0x6D,0x54,0x72,0x75,0x73,0x74,0x31,0x20,0x30,0x1E,0x06,0x03,0x55,0x04,
-0x03,0x0C,0x17,0x41,0x66,0x66,0x69,0x72,0x6D,0x54,0x72,0x75,0x73,0x74,0x20,0x50,
-0x72,0x65,0x6D,0x69,0x75,0x6D,0x20,0x45,0x43,0x43,0x30,0x1E,0x17,0x0D,0x31,0x30,
-0x30,0x31,0x32,0x39,0x31,0x34,0x32,0x30,0x32,0x34,0x5A,0x17,0x0D,0x34,0x30,0x31,
-0x32,0x33,0x31,0x31,0x34,0x32,0x30,0x32,0x34,0x5A,0x30,0x45,0x31,0x0B,0x30,0x09,
-0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x55,0x53,0x31,0x14,0x30,0x12,0x06,0x03,0x55,
-0x04,0x0A,0x0C,0x0B,0x41,0x66,0x66,0x69,0x72,0x6D,0x54,0x72,0x75,0x73,0x74,0x31,
-0x20,0x30,0x1E,0x06,0x03,0x55,0x04,0x03,0x0C,0x17,0x41,0x66,0x66,0x69,0x72,0x6D,
-0x54,0x72,0x75,0x73,0x74,0x20,0x50,0x72,0x65,0x6D,0x69,0x75,0x6D,0x20,0x45,0x43,
-0x43,0x30,0x76,0x30,0x10,0x06,0x07,0x2A,0x86,0x48,0xCE,0x3D,0x02,0x01,0x06,0x05,
-0x2B,0x81,0x04,0x00,0x22,0x03,0x62,0x00,0x04,0x0D,0x30,0x5E,0x1B,0x15,0x9D,0x03,
-0xD0,0xA1,0x79,0x35,0xB7,0x3A,0x3C,0x92,0x7A,0xCA,0x15,0x1C,0xCD,0x62,0xF3,0x9C,
-0x26,0x5C,0x07,0x3D,0xE5,0x54,0xFA,0xA3,0xD6,0xCC,0x12,0xEA,0xF4,0x14,0x5F,0xE8,
-0x8E,0x19,0xAB,0x2F,0x2E,0x48,0xE6,0xAC,0x18,0x43,0x78,0xAC,0xD0,0x37,0xC3,0xBD,
-0xB2,0xCD,0x2C,0xE6,0x47,0xE2,0x1A,0xE6,0x63,0xB8,0x3D,0x2E,0x2F,0x78,0xC4,0x4F,
-0xDB,0xF4,0x0F,0xA4,0x68,0x4C,0x55,0x72,0x6B,0x95,0x1D,0x4E,0x18,0x42,0x95,0x78,
-0xCC,0x37,0x3C,0x91,0xE2,0x9B,0x65,0x2B,0x29,0xA3,0x42,0x30,0x40,0x30,0x1D,0x06,
-0x03,0x55,0x1D,0x0E,0x04,0x16,0x04,0x14,0x9A,0xAF,0x29,0x7A,0xC0,0x11,0x35,0x35,
-0x26,0x51,0x30,0x00,0xC3,0x6A,0xFE,0x40,0xD5,0xAE,0xD6,0x3C,0x30,0x0F,0x06,0x03,
-0x55,0x1D,0x13,0x01,0x01,0xFF,0x04,0x05,0x30,0x03,0x01,0x01,0xFF,0x30,0x0E,0x06,
-0x03,0x55,0x1D,0x0F,0x01,0x01,0xFF,0x04,0x04,0x03,0x02,0x01,0x06,0x30,0x0A,0x06,
-0x08,0x2A,0x86,0x48,0xCE,0x3D,0x04,0x03,0x03,0x03,0x67,0x00,0x30,0x64,0x02,0x30,
-0x17,0x09,0xF3,0x87,0x88,0x50,0x5A,0xAF,0xC8,0xC0,0x42,0xBF,0x47,0x5F,0xF5,0x6C,
-0x6A,0x86,0xE0,0xC4,0x27,0x74,0xE4,0x38,0x53,0xD7,0x05,0x7F,0x1B,0x34,0xE3,0xC6,
-0x2F,0xB3,0xCA,0x09,0x3C,0x37,0x9D,0xD7,0xE7,0xB8,0x46,0xF1,0xFD,0xA1,0xE2,0x71,
-0x02,0x30,0x42,0x59,0x87,0x43,0xD4,0x51,0xDF,0xBA,0xD3,0x09,0x32,0x5A,0xCE,0x88,
-0x7E,0x57,0x3D,0x9C,0x5F,0x42,0x6B,0xF5,0x07,0x2D,0xB5,0xF0,0x82,0x93,0xF9,0x59,
-0x6F,0xAE,0x64,0xFA,0x58,0xE5,0x8B,0x1E,0xE3,0x63,0xBE,0xB5,0x81,0xCD,0x6F,0x02,
-0x8C,0x79,
+const unsigned char thawte_Primary_Root_CA___G3_certificate[1070]={
+0x30,0x82,0x04,0x2A,0x30,0x82,0x03,0x12,0xA0,0x03,0x02,0x01,0x02,0x02,0x10,0x60,
+0x01,0x97,0xB7,0x46,0xA7,0xEA,0xB4,0xB4,0x9A,0xD6,0x4B,0x2F,0xF7,0x90,0xFB,0x30,
+0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x0B,0x05,0x00,0x30,0x81,
+0xAE,0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x55,0x53,0x31,0x15,
+0x30,0x13,0x06,0x03,0x55,0x04,0x0A,0x13,0x0C,0x74,0x68,0x61,0x77,0x74,0x65,0x2C,
+0x20,0x49,0x6E,0x63,0x2E,0x31,0x28,0x30,0x26,0x06,0x03,0x55,0x04,0x0B,0x13,0x1F,
+0x43,0x65,0x72,0x74,0x69,0x66,0x69,0x63,0x61,0x74,0x69,0x6F,0x6E,0x20,0x53,0x65,
+0x72,0x76,0x69,0x63,0x65,0x73,0x20,0x44,0x69,0x76,0x69,0x73,0x69,0x6F,0x6E,0x31,
+0x38,0x30,0x36,0x06,0x03,0x55,0x04,0x0B,0x13,0x2F,0x28,0x63,0x29,0x20,0x32,0x30,
+0x30,0x38,0x20,0x74,0x68,0x61,0x77,0x74,0x65,0x2C,0x20,0x49,0x6E,0x63,0x2E,0x20,
+0x2D,0x20,0x46,0x6F,0x72,0x20,0x61,0x75,0x74,0x68,0x6F,0x72,0x69,0x7A,0x65,0x64,
+0x20,0x75,0x73,0x65,0x20,0x6F,0x6E,0x6C,0x79,0x31,0x24,0x30,0x22,0x06,0x03,0x55,
+0x04,0x03,0x13,0x1B,0x74,0x68,0x61,0x77,0x74,0x65,0x20,0x50,0x72,0x69,0x6D,0x61,
+0x72,0x79,0x20,0x52,0x6F,0x6F,0x74,0x20,0x43,0x41,0x20,0x2D,0x20,0x47,0x33,0x30,
+0x1E,0x17,0x0D,0x30,0x38,0x30,0x34,0x30,0x32,0x30,0x30,0x30,0x30,0x30,0x30,0x5A,
+0x17,0x0D,0x33,0x37,0x31,0x32,0x30,0x31,0x32,0x33,0x35,0x39,0x35,0x39,0x5A,0x30,
+0x81,0xAE,0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x55,0x53,0x31,
+0x15,0x30,0x13,0x06,0x03,0x55,0x04,0x0A,0x13,0x0C,0x74,0x68,0x61,0x77,0x74,0x65,
+0x2C,0x20,0x49,0x6E,0x63,0x2E,0x31,0x28,0x30,0x26,0x06,0x03,0x55,0x04,0x0B,0x13,
+0x1F,0x43,0x65,0x72,0x74,0x69,0x66,0x69,0x63,0x61,0x74,0x69,0x6F,0x6E,0x20,0x53,
+0x65,0x72,0x76,0x69,0x63,0x65,0x73,0x20,0x44,0x69,0x76,0x69,0x73,0x69,0x6F,0x6E,
+0x31,0x38,0x30,0x36,0x06,0x03,0x55,0x04,0x0B,0x13,0x2F,0x28,0x63,0x29,0x20,0x32,
+0x30,0x30,0x38,0x20,0x74,0x68,0x61,0x77,0x74,0x65,0x2C,0x20,0x49,0x6E,0x63,0x2E,
+0x20,0x2D,0x20,0x46,0x6F,0x72,0x20,0x61,0x75,0x74,0x68,0x6F,0x72,0x69,0x7A,0x65,
+0x64,0x20,0x75,0x73,0x65,0x20,0x6F,0x6E,0x6C,0x79,0x31,0x24,0x30,0x22,0x06,0x03,
+0x55,0x04,0x03,0x13,0x1B,0x74,0x68,0x61,0x77,0x74,0x65,0x20,0x50,0x72,0x69,0x6D,
+0x61,0x72,0x79,0x20,0x52,0x6F,0x6F,0x74,0x20,0x43,0x41,0x20,0x2D,0x20,0x47,0x33,
+0x30,0x82,0x01,0x22,0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,
+0x01,0x05,0x00,0x03,0x82,0x01,0x0F,0x00,0x30,0x82,0x01,0x0A,0x02,0x82,0x01,0x01,
+0x00,0xB2,0xBF,0x27,0x2C,0xFB,0xDB,0xD8,0x5B,0xDD,0x78,0x7B,0x1B,0x9E,0x77,0x66,
+0x81,0xCB,0x3E,0xBC,0x7C,0xAE,0xF3,0xA6,0x27,0x9A,0x34,0xA3,0x68,0x31,0x71,0x38,
+0x33,0x62,0xE4,0xF3,0x71,0x66,0x79,0xB1,0xA9,0x65,0xA3,0xA5,0x8B,0xD5,0x8F,0x60,
+0x2D,0x3F,0x42,0xCC,0xAA,0x6B,0x32,0xC0,0x23,0xCB,0x2C,0x41,0xDD,0xE4,0xDF,0xFC,
+0x61,0x9C,0xE2,0x73,0xB2,0x22,0x95,0x11,0x43,0x18,0x5F,0xC4,0xB6,0x1F,0x57,0x6C,
+0x0A,0x05,0x58,0x22,0xC8,0x36,0x4C,0x3A,0x7C,0xA5,0xD1,0xCF,0x86,0xAF,0x88,0xA7,
+0x44,0x02,0x13,0x74,0x71,0x73,0x0A,0x42,0x59,0x02,0xF8,0x1B,0x14,0x6B,0x42,0xDF,
+0x6F,0x5F,0xBA,0x6B,0x82,0xA2,0x9D,0x5B,0xE7,0x4A,0xBD,0x1E,0x01,0x72,0xDB,0x4B,
+0x74,0xE8,0x3B,0x7F,0x7F,0x7D,0x1F,0x04,0xB4,0x26,0x9B,0xE0,0xB4,0x5A,0xAC,0x47,
+0x3D,0x55,0xB8,0xD7,0xB0,0x26,0x52,0x28,0x01,0x31,0x40,0x66,0xD8,0xD9,0x24,0xBD,
+0xF6,0x2A,0xD8,0xEC,0x21,0x49,0x5C,0x9B,0xF6,0x7A,0xE9,0x7F,0x55,0x35,0x7E,0x96,
+0x6B,0x8D,0x93,0x93,0x27,0xCB,0x92,0xBB,0xEA,0xAC,0x40,0xC0,0x9F,0xC2,0xF8,0x80,
+0xCF,0x5D,0xF4,0x5A,0xDC,0xCE,0x74,0x86,0xA6,0x3E,0x6C,0x0B,0x53,0xCA,0xBD,0x92,
+0xCE,0x19,0x06,0x72,0xE6,0x0C,0x5C,0x38,0x69,0xC7,0x04,0xD6,0xBC,0x6C,0xCE,0x5B,
+0xF6,0xF7,0x68,0x9C,0xDC,0x25,0x15,0x48,0x88,0xA1,0xE9,0xA9,0xF8,0x98,0x9C,0xE0,
+0xF3,0xD5,0x31,0x28,0x61,0x11,0x6C,0x67,0x96,0x8D,0x39,0x99,0xCB,0xC2,0x45,0x24,
+0x39,0x02,0x03,0x01,0x00,0x01,0xA3,0x42,0x30,0x40,0x30,0x0F,0x06,0x03,0x55,0x1D,
+0x13,0x01,0x01,0xFF,0x04,0x05,0x30,0x03,0x01,0x01,0xFF,0x30,0x0E,0x06,0x03,0x55,
+0x1D,0x0F,0x01,0x01,0xFF,0x04,0x04,0x03,0x02,0x01,0x06,0x30,0x1D,0x06,0x03,0x55,
+0x1D,0x0E,0x04,0x16,0x04,0x14,0xAD,0x6C,0xAA,0x94,0x60,0x9C,0xED,0xE4,0xFF,0xFA,
+0x3E,0x0A,0x74,0x2B,0x63,0x03,0xF7,0xB6,0x59,0xBF,0x30,0x0D,0x06,0x09,0x2A,0x86,
+0x48,0x86,0xF7,0x0D,0x01,0x01,0x0B,0x05,0x00,0x03,0x82,0x01,0x01,0x00,0x1A,0x40,
+0xD8,0x95,0x65,0xAC,0x09,0x92,0x89,0xC6,0x39,0xF4,0x10,0xE5,0xA9,0x0E,0x66,0x53,
+0x5D,0x78,0xDE,0xFA,0x24,0x91,0xBB,0xE7,0x44,0x51,0xDF,0xC6,0x16,0x34,0x0A,0xEF,
+0x6A,0x44,0x51,0xEA,0x2B,0x07,0x8A,0x03,0x7A,0xC3,0xEB,0x3F,0x0A,0x2C,0x52,0x16,
+0xA0,0x2B,0x43,0xB9,0x25,0x90,0x3F,0x70,0xA9,0x33,0x25,0x6D,0x45,0x1A,0x28,0x3B,
+0x27,0xCF,0xAA,0xC3,0x29,0x42,0x1B,0xDF,0x3B,0x4C,0xC0,0x33,0x34,0x5B,0x41,0x88,
+0xBF,0x6B,0x2B,0x65,0xAF,0x28,0xEF,0xB2,0xF5,0xC3,0xAA,0x66,0xCE,0x7B,0x56,0xEE,
+0xB7,0xC8,0xCB,0x67,0xC1,0xC9,0x9C,0x1A,0x18,0xB8,0xC4,0xC3,0x49,0x03,0xF1,0x60,
+0x0E,0x50,0xCD,0x46,0xC5,0xF3,0x77,0x79,0xF7,0xB6,0x15,0xE0,0x38,0xDB,0xC7,0x2F,
+0x28,0xA0,0x0C,0x3F,0x77,0x26,0x74,0xD9,0x25,0x12,0xDA,0x31,0xDA,0x1A,0x1E,0xDC,
+0x29,0x41,0x91,0x22,0x3C,0x69,0xA7,0xBB,0x02,0xF2,0xB6,0x5C,0x27,0x03,0x89,0xF4,
+0x06,0xEA,0x9B,0xE4,0x72,0x82,0xE3,0xA1,0x09,0xC1,0xE9,0x00,0x19,0xD3,0x3E,0xD4,
+0x70,0x6B,0xBA,0x71,0xA6,0xAA,0x58,0xAE,0xF4,0xBB,0xE9,0x6C,0xB6,0xEF,0x87,0xCC,
+0x9B,0xBB,0xFF,0x39,0xE6,0x56,0x61,0xD3,0x0A,0xA7,0xC4,0x5C,0x4C,0x60,0x7B,0x05,
+0x77,0x26,0x7A,0xBF,0xD8,0x07,0x52,0x2C,0x62,0xF7,0x70,0x63,0xD9,0x39,0xBC,0x6F,
+0x1C,0xC2,0x79,0xDC,0x76,0x29,0xAF,0xCE,0xC5,0x2C,0x64,0x04,0x5E,0x88,0x36,0x6E,
+0x31,0xD4,0x40,0x1A,0x62,0x34,0x36,0x3F,0x35,0x01,0xAE,0xAC,0x63,0xA0,
};
-/* subject:/C=US/O=America Online Inc./CN=America Online Root Certification Authority 1 */
-/* issuer :/C=US/O=America Online Inc./CN=America Online Root Certification Authority 1 */
+/* subject:/C=US/O=DigiCert Inc/OU=www.digicert.com/CN=DigiCert Assured ID Root CA */
+/* issuer :/C=US/O=DigiCert Inc/OU=www.digicert.com/CN=DigiCert Assured ID Root CA */
-const unsigned char America_Online_Root_Certification_Authority_1_certificate[936]={
-0x30,0x82,0x03,0xA4,0x30,0x82,0x02,0x8C,0xA0,0x03,0x02,0x01,0x02,0x02,0x01,0x01,
-0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x05,0x05,0x00,0x30,
-0x63,0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x55,0x53,0x31,0x1C,
-0x30,0x1A,0x06,0x03,0x55,0x04,0x0A,0x13,0x13,0x41,0x6D,0x65,0x72,0x69,0x63,0x61,
-0x20,0x4F,0x6E,0x6C,0x69,0x6E,0x65,0x20,0x49,0x6E,0x63,0x2E,0x31,0x36,0x30,0x34,
-0x06,0x03,0x55,0x04,0x03,0x13,0x2D,0x41,0x6D,0x65,0x72,0x69,0x63,0x61,0x20,0x4F,
-0x6E,0x6C,0x69,0x6E,0x65,0x20,0x52,0x6F,0x6F,0x74,0x20,0x43,0x65,0x72,0x74,0x69,
-0x66,0x69,0x63,0x61,0x74,0x69,0x6F,0x6E,0x20,0x41,0x75,0x74,0x68,0x6F,0x72,0x69,
-0x74,0x79,0x20,0x31,0x30,0x1E,0x17,0x0D,0x30,0x32,0x30,0x35,0x32,0x38,0x30,0x36,
-0x30,0x30,0x30,0x30,0x5A,0x17,0x0D,0x33,0x37,0x31,0x31,0x31,0x39,0x32,0x30,0x34,
-0x33,0x30,0x30,0x5A,0x30,0x63,0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,
-0x02,0x55,0x53,0x31,0x1C,0x30,0x1A,0x06,0x03,0x55,0x04,0x0A,0x13,0x13,0x41,0x6D,
-0x65,0x72,0x69,0x63,0x61,0x20,0x4F,0x6E,0x6C,0x69,0x6E,0x65,0x20,0x49,0x6E,0x63,
-0x2E,0x31,0x36,0x30,0x34,0x06,0x03,0x55,0x04,0x03,0x13,0x2D,0x41,0x6D,0x65,0x72,
-0x69,0x63,0x61,0x20,0x4F,0x6E,0x6C,0x69,0x6E,0x65,0x20,0x52,0x6F,0x6F,0x74,0x20,
-0x43,0x65,0x72,0x74,0x69,0x66,0x69,0x63,0x61,0x74,0x69,0x6F,0x6E,0x20,0x41,0x75,
-0x74,0x68,0x6F,0x72,0x69,0x74,0x79,0x20,0x31,0x30,0x82,0x01,0x22,0x30,0x0D,0x06,
-0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x01,0x05,0x00,0x03,0x82,0x01,0x0F,
-0x00,0x30,0x82,0x01,0x0A,0x02,0x82,0x01,0x01,0x00,0xA8,0x2F,0xE8,0xA4,0x69,0x06,
-0x03,0x47,0xC3,0xE9,0x2A,0x98,0xFF,0x19,0xA2,0x70,0x9A,0xC6,0x50,0xB2,0x7E,0xA5,
-0xDF,0x68,0x4D,0x1B,0x7C,0x0F,0xB6,0x97,0x68,0x7D,0x2D,0xA6,0x8B,0x97,0xE9,0x64,
-0x86,0xC9,0xA3,0xEF,0xA0,0x86,0xBF,0x60,0x65,0x9C,0x4B,0x54,0x88,0xC2,0x48,0xC5,
-0x4A,0x39,0xBF,0x14,0xE3,0x59,0x55,0xE5,0x19,0xB4,0x74,0xC8,0xB4,0x05,0x39,0x5C,
-0x16,0xA5,0xE2,0x95,0x05,0xE0,0x12,0xAE,0x59,0x8B,0xA2,0x33,0x68,0x58,0x1C,0xA6,
-0xD4,0x15,0xB7,0xD8,0x9F,0xD7,0xDC,0x71,0xAB,0x7E,0x9A,0xBF,0x9B,0x8E,0x33,0x0F,
-0x22,0xFD,0x1F,0x2E,0xE7,0x07,0x36,0xEF,0x62,0x39,0xC5,0xDD,0xCB,0xBA,0x25,0x14,
-0x23,0xDE,0x0C,0xC6,0x3D,0x3C,0xCE,0x82,0x08,0xE6,0x66,0x3E,0xDA,0x51,0x3B,0x16,
-0x3A,0xA3,0x05,0x7F,0xA0,0xDC,0x87,0xD5,0x9C,0xFC,0x72,0xA9,0xA0,0x7D,0x78,0xE4,
-0xB7,0x31,0x55,0x1E,0x65,0xBB,0xD4,0x61,0xB0,0x21,0x60,0xED,0x10,0x32,0x72,0xC5,
-0x92,0x25,0x1E,0xF8,0x90,0x4A,0x18,0x78,0x47,0xDF,0x7E,0x30,0x37,0x3E,0x50,0x1B,
-0xDB,0x1C,0xD3,0x6B,0x9A,0x86,0x53,0x07,0xB0,0xEF,0xAC,0x06,0x78,0xF8,0x84,0x99,
-0xFE,0x21,0x8D,0x4C,0x80,0xB6,0x0C,0x82,0xF6,0x66,0x70,0x79,0x1A,0xD3,0x4F,0xA3,
-0xCF,0xF1,0xCF,0x46,0xB0,0x4B,0x0F,0x3E,0xDD,0x88,0x62,0xB8,0x8C,0xA9,0x09,0x28,
-0x3B,0x7A,0xC7,0x97,0xE1,0x1E,0xE5,0xF4,0x9F,0xC0,0xC0,0xAE,0x24,0xA0,0xC8,0xA1,
-0xD9,0x0F,0xD6,0x7B,0x26,0x82,0x69,0x32,0x3D,0xA7,0x02,0x03,0x01,0x00,0x01,0xA3,
-0x63,0x30,0x61,0x30,0x0F,0x06,0x03,0x55,0x1D,0x13,0x01,0x01,0xFF,0x04,0x05,0x30,
-0x03,0x01,0x01,0xFF,0x30,0x1D,0x06,0x03,0x55,0x1D,0x0E,0x04,0x16,0x04,0x14,0x00,
-0xAD,0xD9,0xA3,0xF6,0x79,0xF6,0x6E,0x74,0xA9,0x7F,0x33,0x3D,0x81,0x17,0xD7,0x4C,
-0xCF,0x33,0xDE,0x30,0x1F,0x06,0x03,0x55,0x1D,0x23,0x04,0x18,0x30,0x16,0x80,0x14,
-0x00,0xAD,0xD9,0xA3,0xF6,0x79,0xF6,0x6E,0x74,0xA9,0x7F,0x33,0x3D,0x81,0x17,0xD7,
-0x4C,0xCF,0x33,0xDE,0x30,0x0E,0x06,0x03,0x55,0x1D,0x0F,0x01,0x01,0xFF,0x04,0x04,
-0x03,0x02,0x01,0x86,0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,
-0x05,0x05,0x00,0x03,0x82,0x01,0x01,0x00,0x7C,0x8A,0xD1,0x1F,0x18,0x37,0x82,0xE0,
-0xB8,0xB0,0xA3,0xED,0x56,0x95,0xC8,0x62,0x61,0x9C,0x05,0xA2,0xCD,0xC2,0x62,0x26,
-0x61,0xCD,0x10,0x16,0xD7,0xCC,0xB4,0x65,0x34,0xD0,0x11,0x8A,0xAD,0xA8,0xA9,0x05,
-0x66,0xEF,0x74,0xF3,0x6D,0x5F,0x9D,0x99,0xAF,0xF6,0x8B,0xFB,0xEB,0x52,0xB2,0x05,
-0x98,0xA2,0x6F,0x2A,0xC5,0x54,0xBD,0x25,0xBD,0x5F,0xAE,0xC8,0x86,0xEA,0x46,0x2C,
-0xC1,0xB3,0xBD,0xC1,0xE9,0x49,0x70,0x18,0x16,0x97,0x08,0x13,0x8C,0x20,0xE0,0x1B,
-0x2E,0x3A,0x47,0xCB,0x1E,0xE4,0x00,0x30,0x95,0x5B,0xF4,0x45,0xA3,0xC0,0x1A,0xB0,
-0x01,0x4E,0xAB,0xBD,0xC0,0x23,0x6E,0x63,0x3F,0x80,0x4A,0xC5,0x07,0xED,0xDC,0xE2,
-0x6F,0xC7,0xC1,0x62,0xF1,0xE3,0x72,0xD6,0x04,0xC8,0x74,0x67,0x0B,0xFA,0x88,0xAB,
-0xA1,0x01,0xC8,0x6F,0xF0,0x14,0xAF,0xD2,0x99,0xCD,0x51,0x93,0x7E,0xED,0x2E,0x38,
-0xC7,0xBD,0xCE,0x46,0x50,0x3D,0x72,0xE3,0x79,0x25,0x9D,0x9B,0x88,0x2B,0x10,0x20,
-0xDD,0xA5,0xB8,0x32,0x9F,0x8D,0xE0,0x29,0xDF,0x21,0x74,0x86,0x82,0xDB,0x2F,0x82,
-0x30,0xC6,0xC7,0x35,0x86,0xB3,0xF9,0x96,0x5F,0x46,0xDB,0x0C,0x45,0xFD,0xF3,0x50,
-0xC3,0x6F,0xC6,0xC3,0x48,0xAD,0x46,0xA6,0xE1,0x27,0x47,0x0A,0x1D,0x0E,0x9B,0xB6,
-0xC2,0x77,0x7F,0x63,0xF2,0xE0,0x7D,0x1A,0xBE,0xFC,0xE0,0xDF,0xD7,0xC7,0xA7,0x6C,
-0xB0,0xF9,0xAE,0xBA,0x3C,0xFD,0x74,0xB4,0x11,0xE8,0x58,0x0D,0x80,0xBC,0xD3,0xA8,
-0x80,0x3A,0x99,0xED,0x75,0xCC,0x46,0x7B,
+const unsigned char DigiCert_Assured_ID_Root_CA_certificate[955]={
+0x30,0x82,0x03,0xB7,0x30,0x82,0x02,0x9F,0xA0,0x03,0x02,0x01,0x02,0x02,0x10,0x0C,
+0xE7,0xE0,0xE5,0x17,0xD8,0x46,0xFE,0x8F,0xE5,0x60,0xFC,0x1B,0xF0,0x30,0x39,0x30,
+0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x05,0x05,0x00,0x30,0x65,
+0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x55,0x53,0x31,0x15,0x30,
+0x13,0x06,0x03,0x55,0x04,0x0A,0x13,0x0C,0x44,0x69,0x67,0x69,0x43,0x65,0x72,0x74,
+0x20,0x49,0x6E,0x63,0x31,0x19,0x30,0x17,0x06,0x03,0x55,0x04,0x0B,0x13,0x10,0x77,
+0x77,0x77,0x2E,0x64,0x69,0x67,0x69,0x63,0x65,0x72,0x74,0x2E,0x63,0x6F,0x6D,0x31,
+0x24,0x30,0x22,0x06,0x03,0x55,0x04,0x03,0x13,0x1B,0x44,0x69,0x67,0x69,0x43,0x65,
+0x72,0x74,0x20,0x41,0x73,0x73,0x75,0x72,0x65,0x64,0x20,0x49,0x44,0x20,0x52,0x6F,
+0x6F,0x74,0x20,0x43,0x41,0x30,0x1E,0x17,0x0D,0x30,0x36,0x31,0x31,0x31,0x30,0x30,
+0x30,0x30,0x30,0x30,0x30,0x5A,0x17,0x0D,0x33,0x31,0x31,0x31,0x31,0x30,0x30,0x30,
+0x30,0x30,0x30,0x30,0x5A,0x30,0x65,0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04,0x06,
+0x13,0x02,0x55,0x53,0x31,0x15,0x30,0x13,0x06,0x03,0x55,0x04,0x0A,0x13,0x0C,0x44,
+0x69,0x67,0x69,0x43,0x65,0x72,0x74,0x20,0x49,0x6E,0x63,0x31,0x19,0x30,0x17,0x06,
+0x03,0x55,0x04,0x0B,0x13,0x10,0x77,0x77,0x77,0x2E,0x64,0x69,0x67,0x69,0x63,0x65,
+0x72,0x74,0x2E,0x63,0x6F,0x6D,0x31,0x24,0x30,0x22,0x06,0x03,0x55,0x04,0x03,0x13,
+0x1B,0x44,0x69,0x67,0x69,0x43,0x65,0x72,0x74,0x20,0x41,0x73,0x73,0x75,0x72,0x65,
+0x64,0x20,0x49,0x44,0x20,0x52,0x6F,0x6F,0x74,0x20,0x43,0x41,0x30,0x82,0x01,0x22,
+0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x01,0x05,0x00,0x03,
+0x82,0x01,0x0F,0x00,0x30,0x82,0x01,0x0A,0x02,0x82,0x01,0x01,0x00,0xAD,0x0E,0x15,
+0xCE,0xE4,0x43,0x80,0x5C,0xB1,0x87,0xF3,0xB7,0x60,0xF9,0x71,0x12,0xA5,0xAE,0xDC,
+0x26,0x94,0x88,0xAA,0xF4,0xCE,0xF5,0x20,0x39,0x28,0x58,0x60,0x0C,0xF8,0x80,0xDA,
+0xA9,0x15,0x95,0x32,0x61,0x3C,0xB5,0xB1,0x28,0x84,0x8A,0x8A,0xDC,0x9F,0x0A,0x0C,
+0x83,0x17,0x7A,0x8F,0x90,0xAC,0x8A,0xE7,0x79,0x53,0x5C,0x31,0x84,0x2A,0xF6,0x0F,
+0x98,0x32,0x36,0x76,0xCC,0xDE,0xDD,0x3C,0xA8,0xA2,0xEF,0x6A,0xFB,0x21,0xF2,0x52,
+0x61,0xDF,0x9F,0x20,0xD7,0x1F,0xE2,0xB1,0xD9,0xFE,0x18,0x64,0xD2,0x12,0x5B,0x5F,
+0xF9,0x58,0x18,0x35,0xBC,0x47,0xCD,0xA1,0x36,0xF9,0x6B,0x7F,0xD4,0xB0,0x38,0x3E,
+0xC1,0x1B,0xC3,0x8C,0x33,0xD9,0xD8,0x2F,0x18,0xFE,0x28,0x0F,0xB3,0xA7,0x83,0xD6,
+0xC3,0x6E,0x44,0xC0,0x61,0x35,0x96,0x16,0xFE,0x59,0x9C,0x8B,0x76,0x6D,0xD7,0xF1,
+0xA2,0x4B,0x0D,0x2B,0xFF,0x0B,0x72,0xDA,0x9E,0x60,0xD0,0x8E,0x90,0x35,0xC6,0x78,
+0x55,0x87,0x20,0xA1,0xCF,0xE5,0x6D,0x0A,0xC8,0x49,0x7C,0x31,0x98,0x33,0x6C,0x22,
+0xE9,0x87,0xD0,0x32,0x5A,0xA2,0xBA,0x13,0x82,0x11,0xED,0x39,0x17,0x9D,0x99,0x3A,
+0x72,0xA1,0xE6,0xFA,0xA4,0xD9,0xD5,0x17,0x31,0x75,0xAE,0x85,0x7D,0x22,0xAE,0x3F,
+0x01,0x46,0x86,0xF6,0x28,0x79,0xC8,0xB1,0xDA,0xE4,0x57,0x17,0xC4,0x7E,0x1C,0x0E,
+0xB0,0xB4,0x92,0xA6,0x56,0xB3,0xBD,0xB2,0x97,0xED,0xAA,0xA7,0xF0,0xB7,0xC5,0xA8,
+0x3F,0x95,0x16,0xD0,0xFF,0xA1,0x96,0xEB,0x08,0x5F,0x18,0x77,0x4F,0x02,0x03,0x01,
+0x00,0x01,0xA3,0x63,0x30,0x61,0x30,0x0E,0x06,0x03,0x55,0x1D,0x0F,0x01,0x01,0xFF,
+0x04,0x04,0x03,0x02,0x01,0x86,0x30,0x0F,0x06,0x03,0x55,0x1D,0x13,0x01,0x01,0xFF,
+0x04,0x05,0x30,0x03,0x01,0x01,0xFF,0x30,0x1D,0x06,0x03,0x55,0x1D,0x0E,0x04,0x16,
+0x04,0x14,0x45,0xEB,0xA2,0xAF,0xF4,0x92,0xCB,0x82,0x31,0x2D,0x51,0x8B,0xA7,0xA7,
+0x21,0x9D,0xF3,0x6D,0xC8,0x0F,0x30,0x1F,0x06,0x03,0x55,0x1D,0x23,0x04,0x18,0x30,
+0x16,0x80,0x14,0x45,0xEB,0xA2,0xAF,0xF4,0x92,0xCB,0x82,0x31,0x2D,0x51,0x8B,0xA7,
+0xA7,0x21,0x9D,0xF3,0x6D,0xC8,0x0F,0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,
+0x0D,0x01,0x01,0x05,0x05,0x00,0x03,0x82,0x01,0x01,0x00,0xA2,0x0E,0xBC,0xDF,0xE2,
+0xED,0xF0,0xE3,0x72,0x73,0x7A,0x64,0x94,0xBF,0xF7,0x72,0x66,0xD8,0x32,0xE4,0x42,
+0x75,0x62,0xAE,0x87,0xEB,0xF2,0xD5,0xD9,0xDE,0x56,0xB3,0x9F,0xCC,0xCE,0x14,0x28,
+0xB9,0x0D,0x97,0x60,0x5C,0x12,0x4C,0x58,0xE4,0xD3,0x3D,0x83,0x49,0x45,0x58,0x97,
+0x35,0x69,0x1A,0xA8,0x47,0xEA,0x56,0xC6,0x79,0xAB,0x12,0xD8,0x67,0x81,0x84,0xDF,
+0x7F,0x09,0x3C,0x94,0xE6,0xB8,0x26,0x2C,0x20,0xBD,0x3D,0xB3,0x28,0x89,0xF7,0x5F,
+0xFF,0x22,0xE2,0x97,0x84,0x1F,0xE9,0x65,0xEF,0x87,0xE0,0xDF,0xC1,0x67,0x49,0xB3,
+0x5D,0xEB,0xB2,0x09,0x2A,0xEB,0x26,0xED,0x78,0xBE,0x7D,0x3F,0x2B,0xF3,0xB7,0x26,
+0x35,0x6D,0x5F,0x89,0x01,0xB6,0x49,0x5B,0x9F,0x01,0x05,0x9B,0xAB,0x3D,0x25,0xC1,
+0xCC,0xB6,0x7F,0xC2,0xF1,0x6F,0x86,0xC6,0xFA,0x64,0x68,0xEB,0x81,0x2D,0x94,0xEB,
+0x42,0xB7,0xFA,0x8C,0x1E,0xDD,0x62,0xF1,0xBE,0x50,0x67,0xB7,0x6C,0xBD,0xF3,0xF1,
+0x1F,0x6B,0x0C,0x36,0x07,0x16,0x7F,0x37,0x7C,0xA9,0x5B,0x6D,0x7A,0xF1,0x12,0x46,
+0x60,0x83,0xD7,0x27,0x04,0xBE,0x4B,0xCE,0x97,0xBE,0xC3,0x67,0x2A,0x68,0x11,0xDF,
+0x80,0xE7,0x0C,0x33,0x66,0xBF,0x13,0x0D,0x14,0x6E,0xF3,0x7F,0x1F,0x63,0x10,0x1E,
+0xFA,0x8D,0x1B,0x25,0x6D,0x6C,0x8F,0xA5,0xB7,0x61,0x01,0xB1,0xD2,0xA3,0x26,0xA1,
+0x10,0x71,0x9D,0xAD,0xE2,0xC3,0xF9,0xC3,0x99,0x51,0xB7,0x2B,0x07,0x08,0xCE,0x2E,
+0xE6,0x50,0xB2,0xA7,0xFA,0x0A,0x45,0x2F,0xA2,0xF0,0xF2,
};
-/* subject:/C=US/O=America Online Inc./CN=America Online Root Certification Authority 2 */
-/* issuer :/C=US/O=America Online Inc./CN=America Online Root Certification Authority 2 */
+/* subject:/C=US/O=The Go Daddy Group, Inc./OU=Go Daddy Class 2 Certification Authority */
+/* issuer :/C=US/O=The Go Daddy Group, Inc./OU=Go Daddy Class 2 Certification Authority */
-const unsigned char America_Online_Root_Certification_Authority_2_certificate[1448]={
-0x30,0x82,0x05,0xA4,0x30,0x82,0x03,0x8C,0xA0,0x03,0x02,0x01,0x02,0x02,0x01,0x01,
+const unsigned char Go_Daddy_Class_2_CA_certificate[1028]={
+0x30,0x82,0x04,0x00,0x30,0x82,0x02,0xE8,0xA0,0x03,0x02,0x01,0x02,0x02,0x01,0x00,
0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x05,0x05,0x00,0x30,
-0x63,0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x55,0x53,0x31,0x1C,
-0x30,0x1A,0x06,0x03,0x55,0x04,0x0A,0x13,0x13,0x41,0x6D,0x65,0x72,0x69,0x63,0x61,
-0x20,0x4F,0x6E,0x6C,0x69,0x6E,0x65,0x20,0x49,0x6E,0x63,0x2E,0x31,0x36,0x30,0x34,
-0x06,0x03,0x55,0x04,0x03,0x13,0x2D,0x41,0x6D,0x65,0x72,0x69,0x63,0x61,0x20,0x4F,
-0x6E,0x6C,0x69,0x6E,0x65,0x20,0x52,0x6F,0x6F,0x74,0x20,0x43,0x65,0x72,0x74,0x69,
+0x63,0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x55,0x53,0x31,0x21,
+0x30,0x1F,0x06,0x03,0x55,0x04,0x0A,0x13,0x18,0x54,0x68,0x65,0x20,0x47,0x6F,0x20,
+0x44,0x61,0x64,0x64,0x79,0x20,0x47,0x72,0x6F,0x75,0x70,0x2C,0x20,0x49,0x6E,0x63,
+0x2E,0x31,0x31,0x30,0x2F,0x06,0x03,0x55,0x04,0x0B,0x13,0x28,0x47,0x6F,0x20,0x44,
+0x61,0x64,0x64,0x79,0x20,0x43,0x6C,0x61,0x73,0x73,0x20,0x32,0x20,0x43,0x65,0x72,
+0x74,0x69,0x66,0x69,0x63,0x61,0x74,0x69,0x6F,0x6E,0x20,0x41,0x75,0x74,0x68,0x6F,
+0x72,0x69,0x74,0x79,0x30,0x1E,0x17,0x0D,0x30,0x34,0x30,0x36,0x32,0x39,0x31,0x37,
+0x30,0x36,0x32,0x30,0x5A,0x17,0x0D,0x33,0x34,0x30,0x36,0x32,0x39,0x31,0x37,0x30,
+0x36,0x32,0x30,0x5A,0x30,0x63,0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,
+0x02,0x55,0x53,0x31,0x21,0x30,0x1F,0x06,0x03,0x55,0x04,0x0A,0x13,0x18,0x54,0x68,
+0x65,0x20,0x47,0x6F,0x20,0x44,0x61,0x64,0x64,0x79,0x20,0x47,0x72,0x6F,0x75,0x70,
+0x2C,0x20,0x49,0x6E,0x63,0x2E,0x31,0x31,0x30,0x2F,0x06,0x03,0x55,0x04,0x0B,0x13,
+0x28,0x47,0x6F,0x20,0x44,0x61,0x64,0x64,0x79,0x20,0x43,0x6C,0x61,0x73,0x73,0x20,
+0x32,0x20,0x43,0x65,0x72,0x74,0x69,0x66,0x69,0x63,0x61,0x74,0x69,0x6F,0x6E,0x20,
+0x41,0x75,0x74,0x68,0x6F,0x72,0x69,0x74,0x79,0x30,0x82,0x01,0x20,0x30,0x0D,0x06,
+0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x01,0x05,0x00,0x03,0x82,0x01,0x0D,
+0x00,0x30,0x82,0x01,0x08,0x02,0x82,0x01,0x01,0x00,0xDE,0x9D,0xD7,0xEA,0x57,0x18,
+0x49,0xA1,0x5B,0xEB,0xD7,0x5F,0x48,0x86,0xEA,0xBE,0xDD,0xFF,0xE4,0xEF,0x67,0x1C,
+0xF4,0x65,0x68,0xB3,0x57,0x71,0xA0,0x5E,0x77,0xBB,0xED,0x9B,0x49,0xE9,0x70,0x80,
+0x3D,0x56,0x18,0x63,0x08,0x6F,0xDA,0xF2,0xCC,0xD0,0x3F,0x7F,0x02,0x54,0x22,0x54,
+0x10,0xD8,0xB2,0x81,0xD4,0xC0,0x75,0x3D,0x4B,0x7F,0xC7,0x77,0xC3,0x3E,0x78,0xAB,
+0x1A,0x03,0xB5,0x20,0x6B,0x2F,0x6A,0x2B,0xB1,0xC5,0x88,0x7E,0xC4,0xBB,0x1E,0xB0,
+0xC1,0xD8,0x45,0x27,0x6F,0xAA,0x37,0x58,0xF7,0x87,0x26,0xD7,0xD8,0x2D,0xF6,0xA9,
+0x17,0xB7,0x1F,0x72,0x36,0x4E,0xA6,0x17,0x3F,0x65,0x98,0x92,0xDB,0x2A,0x6E,0x5D,
+0xA2,0xFE,0x88,0xE0,0x0B,0xDE,0x7F,0xE5,0x8D,0x15,0xE1,0xEB,0xCB,0x3A,0xD5,0xE2,
+0x12,0xA2,0x13,0x2D,0xD8,0x8E,0xAF,0x5F,0x12,0x3D,0xA0,0x08,0x05,0x08,0xB6,0x5C,
+0xA5,0x65,0x38,0x04,0x45,0x99,0x1E,0xA3,0x60,0x60,0x74,0xC5,0x41,0xA5,0x72,0x62,
+0x1B,0x62,0xC5,0x1F,0x6F,0x5F,0x1A,0x42,0xBE,0x02,0x51,0x65,0xA8,0xAE,0x23,0x18,
+0x6A,0xFC,0x78,0x03,0xA9,0x4D,0x7F,0x80,0xC3,0xFA,0xAB,0x5A,0xFC,0xA1,0x40,0xA4,
+0xCA,0x19,0x16,0xFE,0xB2,0xC8,0xEF,0x5E,0x73,0x0D,0xEE,0x77,0xBD,0x9A,0xF6,0x79,
+0x98,0xBC,0xB1,0x07,0x67,0xA2,0x15,0x0D,0xDD,0xA0,0x58,0xC6,0x44,0x7B,0x0A,0x3E,
+0x62,0x28,0x5F,0xBA,0x41,0x07,0x53,0x58,0xCF,0x11,0x7E,0x38,0x74,0xC5,0xF8,0xFF,
+0xB5,0x69,0x90,0x8F,0x84,0x74,0xEA,0x97,0x1B,0xAF,0x02,0x01,0x03,0xA3,0x81,0xC0,
+0x30,0x81,0xBD,0x30,0x1D,0x06,0x03,0x55,0x1D,0x0E,0x04,0x16,0x04,0x14,0xD2,0xC4,
+0xB0,0xD2,0x91,0xD4,0x4C,0x11,0x71,0xB3,0x61,0xCB,0x3D,0xA1,0xFE,0xDD,0xA8,0x6A,
+0xD4,0xE3,0x30,0x81,0x8D,0x06,0x03,0x55,0x1D,0x23,0x04,0x81,0x85,0x30,0x81,0x82,
+0x80,0x14,0xD2,0xC4,0xB0,0xD2,0x91,0xD4,0x4C,0x11,0x71,0xB3,0x61,0xCB,0x3D,0xA1,
+0xFE,0xDD,0xA8,0x6A,0xD4,0xE3,0xA1,0x67,0xA4,0x65,0x30,0x63,0x31,0x0B,0x30,0x09,
+0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x55,0x53,0x31,0x21,0x30,0x1F,0x06,0x03,0x55,
+0x04,0x0A,0x13,0x18,0x54,0x68,0x65,0x20,0x47,0x6F,0x20,0x44,0x61,0x64,0x64,0x79,
+0x20,0x47,0x72,0x6F,0x75,0x70,0x2C,0x20,0x49,0x6E,0x63,0x2E,0x31,0x31,0x30,0x2F,
+0x06,0x03,0x55,0x04,0x0B,0x13,0x28,0x47,0x6F,0x20,0x44,0x61,0x64,0x64,0x79,0x20,
+0x43,0x6C,0x61,0x73,0x73,0x20,0x32,0x20,0x43,0x65,0x72,0x74,0x69,0x66,0x69,0x63,
+0x61,0x74,0x69,0x6F,0x6E,0x20,0x41,0x75,0x74,0x68,0x6F,0x72,0x69,0x74,0x79,0x82,
+0x01,0x00,0x30,0x0C,0x06,0x03,0x55,0x1D,0x13,0x04,0x05,0x30,0x03,0x01,0x01,0xFF,
+0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x05,0x05,0x00,0x03,
+0x82,0x01,0x01,0x00,0x32,0x4B,0xF3,0xB2,0xCA,0x3E,0x91,0xFC,0x12,0xC6,0xA1,0x07,
+0x8C,0x8E,0x77,0xA0,0x33,0x06,0x14,0x5C,0x90,0x1E,0x18,0xF7,0x08,0xA6,0x3D,0x0A,
+0x19,0xF9,0x87,0x80,0x11,0x6E,0x69,0xE4,0x96,0x17,0x30,0xFF,0x34,0x91,0x63,0x72,
+0x38,0xEE,0xCC,0x1C,0x01,0xA3,0x1D,0x94,0x28,0xA4,0x31,0xF6,0x7A,0xC4,0x54,0xD7,
+0xF6,0xE5,0x31,0x58,0x03,0xA2,0xCC,0xCE,0x62,0xDB,0x94,0x45,0x73,0xB5,0xBF,0x45,
+0xC9,0x24,0xB5,0xD5,0x82,0x02,0xAD,0x23,0x79,0x69,0x8D,0xB8,0xB6,0x4D,0xCE,0xCF,
+0x4C,0xCA,0x33,0x23,0xE8,0x1C,0x88,0xAA,0x9D,0x8B,0x41,0x6E,0x16,0xC9,0x20,0xE5,
+0x89,0x9E,0xCD,0x3B,0xDA,0x70,0xF7,0x7E,0x99,0x26,0x20,0x14,0x54,0x25,0xAB,0x6E,
+0x73,0x85,0xE6,0x9B,0x21,0x9D,0x0A,0x6C,0x82,0x0E,0xA8,0xF8,0xC2,0x0C,0xFA,0x10,
+0x1E,0x6C,0x96,0xEF,0x87,0x0D,0xC4,0x0F,0x61,0x8B,0xAD,0xEE,0x83,0x2B,0x95,0xF8,
+0x8E,0x92,0x84,0x72,0x39,0xEB,0x20,0xEA,0x83,0xED,0x83,0xCD,0x97,0x6E,0x08,0xBC,
+0xEB,0x4E,0x26,0xB6,0x73,0x2B,0xE4,0xD3,0xF6,0x4C,0xFE,0x26,0x71,0xE2,0x61,0x11,
+0x74,0x4A,0xFF,0x57,0x1A,0x87,0x0F,0x75,0x48,0x2E,0xCF,0x51,0x69,0x17,0xA0,0x02,
+0x12,0x61,0x95,0xD5,0xD1,0x40,0xB2,0x10,0x4C,0xEE,0xC4,0xAC,0x10,0x43,0xA6,0xA5,
+0x9E,0x0A,0xD5,0x95,0x62,0x9A,0x0D,0xCF,0x88,0x82,0xC5,0x32,0x0C,0xE4,0x2B,0x9F,
+0x45,0xE6,0x0D,0x9F,0x28,0x9C,0xB1,0xB9,0x2A,0x5A,0x57,0xAD,0x37,0x0F,0xAF,0x1D,
+0x7F,0xDB,0xBD,0x9F,
+};
+
+
+/* subject:/C=US/O=GeoTrust Inc./CN=GeoTrust Primary Certification Authority */
+/* issuer :/C=US/O=GeoTrust Inc./CN=GeoTrust Primary Certification Authority */
+
+
+const unsigned char GeoTrust_Primary_Certification_Authority_certificate[896]={
+0x30,0x82,0x03,0x7C,0x30,0x82,0x02,0x64,0xA0,0x03,0x02,0x01,0x02,0x02,0x10,0x18,
+0xAC,0xB5,0x6A,0xFD,0x69,0xB6,0x15,0x3A,0x63,0x6C,0xAF,0xDA,0xFA,0xC4,0xA1,0x30,
+0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x05,0x05,0x00,0x30,0x58,
+0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x55,0x53,0x31,0x16,0x30,
+0x14,0x06,0x03,0x55,0x04,0x0A,0x13,0x0D,0x47,0x65,0x6F,0x54,0x72,0x75,0x73,0x74,
+0x20,0x49,0x6E,0x63,0x2E,0x31,0x31,0x30,0x2F,0x06,0x03,0x55,0x04,0x03,0x13,0x28,
+0x47,0x65,0x6F,0x54,0x72,0x75,0x73,0x74,0x20,0x50,0x72,0x69,0x6D,0x61,0x72,0x79,
+0x20,0x43,0x65,0x72,0x74,0x69,0x66,0x69,0x63,0x61,0x74,0x69,0x6F,0x6E,0x20,0x41,
+0x75,0x74,0x68,0x6F,0x72,0x69,0x74,0x79,0x30,0x1E,0x17,0x0D,0x30,0x36,0x31,0x31,
+0x32,0x37,0x30,0x30,0x30,0x30,0x30,0x30,0x5A,0x17,0x0D,0x33,0x36,0x30,0x37,0x31,
+0x36,0x32,0x33,0x35,0x39,0x35,0x39,0x5A,0x30,0x58,0x31,0x0B,0x30,0x09,0x06,0x03,
+0x55,0x04,0x06,0x13,0x02,0x55,0x53,0x31,0x16,0x30,0x14,0x06,0x03,0x55,0x04,0x0A,
+0x13,0x0D,0x47,0x65,0x6F,0x54,0x72,0x75,0x73,0x74,0x20,0x49,0x6E,0x63,0x2E,0x31,
+0x31,0x30,0x2F,0x06,0x03,0x55,0x04,0x03,0x13,0x28,0x47,0x65,0x6F,0x54,0x72,0x75,
+0x73,0x74,0x20,0x50,0x72,0x69,0x6D,0x61,0x72,0x79,0x20,0x43,0x65,0x72,0x74,0x69,
0x66,0x69,0x63,0x61,0x74,0x69,0x6F,0x6E,0x20,0x41,0x75,0x74,0x68,0x6F,0x72,0x69,
-0x74,0x79,0x20,0x32,0x30,0x1E,0x17,0x0D,0x30,0x32,0x30,0x35,0x32,0x38,0x30,0x36,
-0x30,0x30,0x30,0x30,0x5A,0x17,0x0D,0x33,0x37,0x30,0x39,0x32,0x39,0x31,0x34,0x30,
-0x38,0x30,0x30,0x5A,0x30,0x63,0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,
-0x02,0x55,0x53,0x31,0x1C,0x30,0x1A,0x06,0x03,0x55,0x04,0x0A,0x13,0x13,0x41,0x6D,
-0x65,0x72,0x69,0x63,0x61,0x20,0x4F,0x6E,0x6C,0x69,0x6E,0x65,0x20,0x49,0x6E,0x63,
-0x2E,0x31,0x36,0x30,0x34,0x06,0x03,0x55,0x04,0x03,0x13,0x2D,0x41,0x6D,0x65,0x72,
-0x69,0x63,0x61,0x20,0x4F,0x6E,0x6C,0x69,0x6E,0x65,0x20,0x52,0x6F,0x6F,0x74,0x20,
-0x43,0x65,0x72,0x74,0x69,0x66,0x69,0x63,0x61,0x74,0x69,0x6F,0x6E,0x20,0x41,0x75,
-0x74,0x68,0x6F,0x72,0x69,0x74,0x79,0x20,0x32,0x30,0x82,0x02,0x22,0x30,0x0D,0x06,
-0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x01,0x05,0x00,0x03,0x82,0x02,0x0F,
-0x00,0x30,0x82,0x02,0x0A,0x02,0x82,0x02,0x01,0x00,0xCC,0x41,0x45,0x1D,0xE9,0x3D,
-0x4D,0x10,0xF6,0x8C,0xB1,0x41,0xC9,0xE0,0x5E,0xCB,0x0D,0xB7,0xBF,0x47,0x73,0xD3,
-0xF0,0x55,0x4D,0xDD,0xC6,0x0C,0xFA,0xB1,0x66,0x05,0x6A,0xCD,0x78,0xB4,0xDC,0x02,
-0xDB,0x4E,0x81,0xF3,0xD7,0xA7,0x7C,0x71,0xBC,0x75,0x63,0xA0,0x5D,0xE3,0x07,0x0C,
-0x48,0xEC,0x25,0xC4,0x03,0x20,0xF4,0xFF,0x0E,0x3B,0x12,0xFF,0x9B,0x8D,0xE1,0xC6,
-0xD5,0x1B,0xB4,0x6D,0x22,0xE3,0xB1,0xDB,0x7F,0x21,0x64,0xAF,0x86,0xBC,0x57,0x22,
-0x2A,0xD6,0x47,0x81,0x57,0x44,0x82,0x56,0x53,0xBD,0x86,0x14,0x01,0x0B,0xFC,0x7F,
-0x74,0xA4,0x5A,0xAE,0xF1,0xBA,0x11,0xB5,0x9B,0x58,0x5A,0x80,0xB4,0x37,0x78,0x09,
-0x33,0x7C,0x32,0x47,0x03,0x5C,0xC4,0xA5,0x83,0x48,0xF4,0x57,0x56,0x6E,0x81,0x36,
-0x27,0x18,0x4F,0xEC,0x9B,0x28,0xC2,0xD4,0xB4,0xD7,0x7C,0x0C,0x3E,0x0C,0x2B,0xDF,
-0xCA,0x04,0xD7,0xC6,0x8E,0xEA,0x58,0x4E,0xA8,0xA4,0xA5,0x18,0x1C,0x6C,0x45,0x98,
-0xA3,0x41,0xD1,0x2D,0xD2,0xC7,0x6D,0x8D,0x19,0xF1,0xAD,0x79,0xB7,0x81,0x3F,0xBD,
-0x06,0x82,0x27,0x2D,0x10,0x58,0x05,0xB5,0x78,0x05,0xB9,0x2F,0xDB,0x0C,0x6B,0x90,
-0x90,0x7E,0x14,0x59,0x38,0xBB,0x94,0x24,0x13,0xE5,0xD1,0x9D,0x14,0xDF,0xD3,0x82,
-0x4D,0x46,0xF0,0x80,0x39,0x52,0x32,0x0F,0xE3,0x84,0xB2,0x7A,0x43,0xF2,0x5E,0xDE,
-0x5F,0x3F,0x1D,0xDD,0xE3,0xB2,0x1B,0xA0,0xA1,0x2A,0x23,0x03,0x6E,0x2E,0x01,0x15,
-0x87,0x5C,0xA6,0x75,0x75,0xC7,0x97,0x61,0xBE,0xDE,0x86,0xDC,0xD4,0x48,0xDB,0xBD,
-0x2A,0xBF,0x4A,0x55,0xDA,0xE8,0x7D,0x50,0xFB,0xB4,0x80,0x17,0xB8,0x94,0xBF,0x01,
-0x3D,0xEA,0xDA,0xBA,0x7C,0xE0,0x58,0x67,0x17,0xB9,0x58,0xE0,0x88,0x86,0x46,0x67,
-0x6C,0x9D,0x10,0x47,0x58,0x32,0xD0,0x35,0x7C,0x79,0x2A,0x90,0xA2,0x5A,0x10,0x11,
-0x23,0x35,0xAD,0x2F,0xCC,0xE4,0x4A,0x5B,0xA7,0xC8,0x27,0xF2,0x83,0xDE,0x5E,0xBB,
-0x5E,0x77,0xE7,0xE8,0xA5,0x6E,0x63,0xC2,0x0D,0x5D,0x61,0xD0,0x8C,0xD2,0x6C,0x5A,
-0x21,0x0E,0xCA,0x28,0xA3,0xCE,0x2A,0xE9,0x95,0xC7,0x48,0xCF,0x96,0x6F,0x1D,0x92,
-0x25,0xC8,0xC6,0xC6,0xC1,0xC1,0x0C,0x05,0xAC,0x26,0xC4,0xD2,0x75,0xD2,0xE1,0x2A,
-0x67,0xC0,0x3D,0x5B,0xA5,0x9A,0xEB,0xCF,0x7B,0x1A,0xA8,0x9D,0x14,0x45,0xE5,0x0F,
-0xA0,0x9A,0x65,0xDE,0x2F,0x28,0xBD,0xCE,0x6F,0x94,0x66,0x83,0x48,0x29,0xD8,0xEA,
-0x65,0x8C,0xAF,0x93,0xD9,0x64,0x9F,0x55,0x57,0x26,0xBF,0x6F,0xCB,0x37,0x31,0x99,
-0xA3,0x60,0xBB,0x1C,0xAD,0x89,0x34,0x32,0x62,0xB8,0x43,0x21,0x06,0x72,0x0C,0xA1,
-0x5C,0x6D,0x46,0xC5,0xFA,0x29,0xCF,0x30,0xDE,0x89,0xDC,0x71,0x5B,0xDD,0xB6,0x37,
-0x3E,0xDF,0x50,0xF5,0xB8,0x07,0x25,0x26,0xE5,0xBC,0xB5,0xFE,0x3C,0x02,0xB3,0xB7,
-0xF8,0xBE,0x43,0xC1,0x87,0x11,0x94,0x9E,0x23,0x6C,0x17,0x8A,0xB8,0x8A,0x27,0x0C,
-0x54,0x47,0xF0,0xA9,0xB3,0xC0,0x80,0x8C,0xA0,0x27,0xEB,0x1D,0x19,0xE3,0x07,0x8E,
-0x77,0x70,0xCA,0x2B,0xF4,0x7D,0x76,0xE0,0x78,0x67,0x02,0x03,0x01,0x00,0x01,0xA3,
-0x63,0x30,0x61,0x30,0x0F,0x06,0x03,0x55,0x1D,0x13,0x01,0x01,0xFF,0x04,0x05,0x30,
-0x03,0x01,0x01,0xFF,0x30,0x1D,0x06,0x03,0x55,0x1D,0x0E,0x04,0x16,0x04,0x14,0x4D,
-0x45,0xC1,0x68,0x38,0xBB,0x73,0xA9,0x69,0xA1,0x20,0xE7,0xED,0xF5,0x22,0xA1,0x23,
-0x14,0xD7,0x9E,0x30,0x1F,0x06,0x03,0x55,0x1D,0x23,0x04,0x18,0x30,0x16,0x80,0x14,
-0x4D,0x45,0xC1,0x68,0x38,0xBB,0x73,0xA9,0x69,0xA1,0x20,0xE7,0xED,0xF5,0x22,0xA1,
-0x23,0x14,0xD7,0x9E,0x30,0x0E,0x06,0x03,0x55,0x1D,0x0F,0x01,0x01,0xFF,0x04,0x04,
-0x03,0x02,0x01,0x86,0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,
-0x05,0x05,0x00,0x03,0x82,0x02,0x01,0x00,0x67,0x6B,0x06,0xB9,0x5F,0x45,0x3B,0x2A,
-0x4B,0x33,0xB3,0xE6,0x1B,0x6B,0x59,0x4E,0x22,0xCC,0xB9,0xB7,0xA4,0x25,0xC9,0xA7,
-0xC4,0xF0,0x54,0x96,0x0B,0x64,0xF3,0xB1,0x58,0x4F,0x5E,0x51,0xFC,0xB2,0x97,0x7B,
-0x27,0x65,0xC2,0xE5,0xCA,0xE7,0x0D,0x0C,0x25,0x7B,0x62,0xE3,0xFA,0x9F,0xB4,0x87,
-0xB7,0x45,0x46,0xAF,0x83,0xA5,0x97,0x48,0x8C,0xA5,0xBD,0xF1,0x16,0x2B,0x9B,0x76,
-0x2C,0x7A,0x35,0x60,0x6C,0x11,0x80,0x97,0xCC,0xA9,0x92,0x52,0xE6,0x2B,0xE6,0x69,
-0xED,0xA9,0xF8,0x36,0x2D,0x2C,0x77,0xBF,0x61,0x48,0xD1,0x63,0x0B,0xB9,0x5B,0x52,
-0xED,0x18,0xB0,0x43,0x42,0x22,0xA6,0xB1,0x77,0xAE,0xDE,0x69,0xC5,0xCD,0xC7,0x1C,
-0xA1,0xB1,0xA5,0x1C,0x10,0xFB,0x18,0xBE,0x1A,0x70,0xDD,0xC1,0x92,0x4B,0xBE,0x29,
-0x5A,0x9D,0x3F,0x35,0xBE,0xE5,0x7D,0x51,0xF8,0x55,0xE0,0x25,0x75,0x23,0x87,0x1E,
-0x5C,0xDC,0xBA,0x9D,0xB0,0xAC,0xB3,0x69,0xDB,0x17,0x83,0xC9,0xF7,0xDE,0x0C,0xBC,
-0x08,0xDC,0x91,0x9E,0xA8,0xD0,0xD7,0x15,0x37,0x73,0xA5,0x35,0xB8,0xFC,0x7E,0xC5,
-0x44,0x40,0x06,0xC3,0xEB,0xF8,0x22,0x80,0x5C,0x47,0xCE,0x02,0xE3,0x11,0x9F,0x44,
-0xFF,0xFD,0x9A,0x32,0xCC,0x7D,0x64,0x51,0x0E,0xEB,0x57,0x26,0x76,0x3A,0xE3,0x1E,
-0x22,0x3C,0xC2,0xA6,0x36,0xDD,0x19,0xEF,0xA7,0xFC,0x12,0xF3,0x26,0xC0,0x59,0x31,
-0x85,0x4C,0x9C,0xD8,0xCF,0xDF,0xA4,0xCC,0xCC,0x29,0x93,0xFF,0x94,0x6D,0x76,0x5C,
-0x13,0x08,0x97,0xF2,0xED,0xA5,0x0B,0x4D,0xDD,0xE8,0xC9,0x68,0x0E,0x66,0xD3,0x00,
-0x0E,0x33,0x12,0x5B,0xBC,0x95,0xE5,0x32,0x90,0xA8,0xB3,0xC6,0x6C,0x83,0xAD,0x77,
-0xEE,0x8B,0x7E,0x7E,0xB1,0xA9,0xAB,0xD3,0xE1,0xF1,0xB6,0xC0,0xB1,0xEA,0x88,0xC0,
-0xE7,0xD3,0x90,0xE9,0x28,0x92,0x94,0x7B,0x68,0x7B,0x97,0x2A,0x0A,0x67,0x2D,0x85,
-0x02,0x38,0x10,0xE4,0x03,0x61,0xD4,0xDA,0x25,0x36,0xC7,0x08,0x58,0x2D,0xA1,0xA7,
-0x51,0xAF,0x30,0x0A,0x49,0xF5,0xA6,0x69,0x87,0x07,0x2D,0x44,0x46,0x76,0x8E,0x2A,
-0xE5,0x9A,0x3B,0xD7,0x18,0xA2,0xFC,0x9C,0x38,0x10,0xCC,0xC6,0x3B,0xD2,0xB5,0x17,
-0x3A,0x6F,0xFD,0xAE,0x25,0xBD,0xF5,0x72,0x59,0x64,0xB1,0x74,0x2A,0x38,0x5F,0x18,
-0x4C,0xDF,0xCF,0x71,0x04,0x5A,0x36,0xD4,0xBF,0x2F,0x99,0x9C,0xE8,0xD9,0xBA,0xB1,
-0x95,0xE6,0x02,0x4B,0x21,0xA1,0x5B,0xD5,0xC1,0x4F,0x8F,0xAE,0x69,0x6D,0x53,0xDB,
-0x01,0x93,0xB5,0x5C,0x1E,0x18,0xDD,0x64,0x5A,0xCA,0x18,0x28,0x3E,0x63,0x04,0x11,
-0xFD,0x1C,0x8D,0x00,0x0F,0xB8,0x37,0xDF,0x67,0x8A,0x9D,0x66,0xA9,0x02,0x6A,0x91,
-0xFF,0x13,0xCA,0x2F,0x5D,0x83,0xBC,0x87,0x93,0x6C,0xDC,0x24,0x51,0x16,0x04,0x25,
-0x66,0xFA,0xB3,0xD9,0xC2,0xBA,0x29,0xBE,0x9A,0x48,0x38,0x82,0x99,0xF4,0xBF,0x3B,
-0x4A,0x31,0x19,0xF9,0xBF,0x8E,0x21,0x33,0x14,0xCA,0x4F,0x54,0x5F,0xFB,0xCE,0xFB,
-0x8F,0x71,0x7F,0xFD,0x5E,0x19,0xA0,0x0F,0x4B,0x91,0xB8,0xC4,0x54,0xBC,0x06,0xB0,
-0x45,0x8F,0x26,0x91,0xA2,0x8E,0xFE,0xA9,
+0x74,0x79,0x30,0x82,0x01,0x22,0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,
+0x01,0x01,0x01,0x05,0x00,0x03,0x82,0x01,0x0F,0x00,0x30,0x82,0x01,0x0A,0x02,0x82,
+0x01,0x01,0x00,0xBE,0xB8,0x15,0x7B,0xFF,0xD4,0x7C,0x7D,0x67,0xAD,0x83,0x64,0x7B,
+0xC8,0x42,0x53,0x2D,0xDF,0xF6,0x84,0x08,0x20,0x61,0xD6,0x01,0x59,0x6A,0x9C,0x44,
+0x11,0xAF,0xEF,0x76,0xFD,0x95,0x7E,0xCE,0x61,0x30,0xBB,0x7A,0x83,0x5F,0x02,0xBD,
+0x01,0x66,0xCA,0xEE,0x15,0x8D,0x6F,0xA1,0x30,0x9C,0xBD,0xA1,0x85,0x9E,0x94,0x3A,
+0xF3,0x56,0x88,0x00,0x31,0xCF,0xD8,0xEE,0x6A,0x96,0x02,0xD9,0xED,0x03,0x8C,0xFB,
+0x75,0x6D,0xE7,0xEA,0xB8,0x55,0x16,0x05,0x16,0x9A,0xF4,0xE0,0x5E,0xB1,0x88,0xC0,
+0x64,0x85,0x5C,0x15,0x4D,0x88,0xC7,0xB7,0xBA,0xE0,0x75,0xE9,0xAD,0x05,0x3D,0x9D,
+0xC7,0x89,0x48,0xE0,0xBB,0x28,0xC8,0x03,0xE1,0x30,0x93,0x64,0x5E,0x52,0xC0,0x59,
+0x70,0x22,0x35,0x57,0x88,0x8A,0xF1,0x95,0x0A,0x83,0xD7,0xBC,0x31,0x73,0x01,0x34,
+0xED,0xEF,0x46,0x71,0xE0,0x6B,0x02,0xA8,0x35,0x72,0x6B,0x97,0x9B,0x66,0xE0,0xCB,
+0x1C,0x79,0x5F,0xD8,0x1A,0x04,0x68,0x1E,0x47,0x02,0xE6,0x9D,0x60,0xE2,0x36,0x97,
+0x01,0xDF,0xCE,0x35,0x92,0xDF,0xBE,0x67,0xC7,0x6D,0x77,0x59,0x3B,0x8F,0x9D,0xD6,
+0x90,0x15,0x94,0xBC,0x42,0x34,0x10,0xC1,0x39,0xF9,0xB1,0x27,0x3E,0x7E,0xD6,0x8A,
+0x75,0xC5,0xB2,0xAF,0x96,0xD3,0xA2,0xDE,0x9B,0xE4,0x98,0xBE,0x7D,0xE1,0xE9,0x81,
+0xAD,0xB6,0x6F,0xFC,0xD7,0x0E,0xDA,0xE0,0x34,0xB0,0x0D,0x1A,0x77,0xE7,0xE3,0x08,
+0x98,0xEF,0x58,0xFA,0x9C,0x84,0xB7,0x36,0xAF,0xC2,0xDF,0xAC,0xD2,0xF4,0x10,0x06,
+0x70,0x71,0x35,0x02,0x03,0x01,0x00,0x01,0xA3,0x42,0x30,0x40,0x30,0x0F,0x06,0x03,
+0x55,0x1D,0x13,0x01,0x01,0xFF,0x04,0x05,0x30,0x03,0x01,0x01,0xFF,0x30,0x0E,0x06,
+0x03,0x55,0x1D,0x0F,0x01,0x01,0xFF,0x04,0x04,0x03,0x02,0x01,0x06,0x30,0x1D,0x06,
+0x03,0x55,0x1D,0x0E,0x04,0x16,0x04,0x14,0x2C,0xD5,0x50,0x41,0x97,0x15,0x8B,0xF0,
+0x8F,0x36,0x61,0x5B,0x4A,0xFB,0x6B,0xD9,0x99,0xC9,0x33,0x92,0x30,0x0D,0x06,0x09,
+0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x05,0x05,0x00,0x03,0x82,0x01,0x01,0x00,
+0x5A,0x70,0x7F,0x2C,0xDD,0xB7,0x34,0x4F,0xF5,0x86,0x51,0xA9,0x26,0xBE,0x4B,0xB8,
+0xAA,0xF1,0x71,0x0D,0xDC,0x61,0xC7,0xA0,0xEA,0x34,0x1E,0x7A,0x77,0x0F,0x04,0x35,
+0xE8,0x27,0x8F,0x6C,0x90,0xBF,0x91,0x16,0x24,0x46,0x3E,0x4A,0x4E,0xCE,0x2B,0x16,
+0xD5,0x0B,0x52,0x1D,0xFC,0x1F,0x67,0xA2,0x02,0x45,0x31,0x4F,0xCE,0xF3,0xFA,0x03,
+0xA7,0x79,0x9D,0x53,0x6A,0xD9,0xDA,0x63,0x3A,0xF8,0x80,0xD7,0xD3,0x99,0xE1,0xA5,
+0xE1,0xBE,0xD4,0x55,0x71,0x98,0x35,0x3A,0xBE,0x93,0xEA,0xAE,0xAD,0x42,0xB2,0x90,
+0x6F,0xE0,0xFC,0x21,0x4D,0x35,0x63,0x33,0x89,0x49,0xD6,0x9B,0x4E,0xCA,0xC7,0xE7,
+0x4E,0x09,0x00,0xF7,0xDA,0xC7,0xEF,0x99,0x62,0x99,0x77,0xB6,0x95,0x22,0x5E,0x8A,
+0xA0,0xAB,0xF4,0xB8,0x78,0x98,0xCA,0x38,0x19,0x99,0xC9,0x72,0x9E,0x78,0xCD,0x4B,
+0xAC,0xAF,0x19,0xA0,0x73,0x12,0x2D,0xFC,0xC2,0x41,0xBA,0x81,0x91,0xDA,0x16,0x5A,
+0x31,0xB7,0xF9,0xB4,0x71,0x80,0x12,0x48,0x99,0x72,0x73,0x5A,0x59,0x53,0xC1,0x63,
+0x52,0x33,0xED,0xA7,0xC9,0xD2,0x39,0x02,0x70,0xFA,0xE0,0xB1,0x42,0x66,0x29,0xAA,
+0x9B,0x51,0xED,0x30,0x54,0x22,0x14,0x5F,0xD9,0xAB,0x1D,0xC1,0xE4,0x94,0xF0,0xF8,
+0xF5,0x2B,0xF7,0xEA,0xCA,0x78,0x46,0xD6,0xB8,0x91,0xFD,0xA6,0x0D,0x2B,0x1A,0x14,
+0x01,0x3E,0x80,0xF0,0x42,0xA0,0x95,0x07,0x5E,0x6D,0xCD,0xCC,0x4B,0xA4,0x45,0x8D,
+0xAB,0x12,0xE8,0xB3,0xDE,0x5A,0xE5,0xA0,0x7C,0xE8,0x0F,0x22,0x1D,0x5A,0xE9,0x59,
};
-/* subject:/C=IE/O=Baltimore/OU=CyberTrust/CN=Baltimore CyberTrust Root */
-/* issuer :/C=IE/O=Baltimore/OU=CyberTrust/CN=Baltimore CyberTrust Root */
+/* subject:/C=US/O=VeriSign, Inc./OU=VeriSign Trust Network/OU=(c) 2006 VeriSign, Inc. - For authorized use only/CN=VeriSign Class 3 Public Primary Certification Authority - G5 */
+/* issuer :/C=US/O=VeriSign, Inc./OU=VeriSign Trust Network/OU=(c) 2006 VeriSign, Inc. - For authorized use only/CN=VeriSign Class 3 Public Primary Certification Authority - G5 */
-const unsigned char Baltimore_CyberTrust_Root_certificate[891]={
-0x30,0x82,0x03,0x77,0x30,0x82,0x02,0x5F,0xA0,0x03,0x02,0x01,0x02,0x02,0x04,0x02,
-0x00,0x00,0xB9,0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x05,
-0x05,0x00,0x30,0x5A,0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x49,
-0x45,0x31,0x12,0x30,0x10,0x06,0x03,0x55,0x04,0x0A,0x13,0x09,0x42,0x61,0x6C,0x74,
-0x69,0x6D,0x6F,0x72,0x65,0x31,0x13,0x30,0x11,0x06,0x03,0x55,0x04,0x0B,0x13,0x0A,
-0x43,0x79,0x62,0x65,0x72,0x54,0x72,0x75,0x73,0x74,0x31,0x22,0x30,0x20,0x06,0x03,
-0x55,0x04,0x03,0x13,0x19,0x42,0x61,0x6C,0x74,0x69,0x6D,0x6F,0x72,0x65,0x20,0x43,
-0x79,0x62,0x65,0x72,0x54,0x72,0x75,0x73,0x74,0x20,0x52,0x6F,0x6F,0x74,0x30,0x1E,
-0x17,0x0D,0x30,0x30,0x30,0x35,0x31,0x32,0x31,0x38,0x34,0x36,0x30,0x30,0x5A,0x17,
-0x0D,0x32,0x35,0x30,0x35,0x31,0x32,0x32,0x33,0x35,0x39,0x30,0x30,0x5A,0x30,0x5A,
-0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x49,0x45,0x31,0x12,0x30,
-0x10,0x06,0x03,0x55,0x04,0x0A,0x13,0x09,0x42,0x61,0x6C,0x74,0x69,0x6D,0x6F,0x72,
-0x65,0x31,0x13,0x30,0x11,0x06,0x03,0x55,0x04,0x0B,0x13,0x0A,0x43,0x79,0x62,0x65,
-0x72,0x54,0x72,0x75,0x73,0x74,0x31,0x22,0x30,0x20,0x06,0x03,0x55,0x04,0x03,0x13,
-0x19,0x42,0x61,0x6C,0x74,0x69,0x6D,0x6F,0x72,0x65,0x20,0x43,0x79,0x62,0x65,0x72,
-0x54,0x72,0x75,0x73,0x74,0x20,0x52,0x6F,0x6F,0x74,0x30,0x82,0x01,0x22,0x30,0x0D,
-0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x01,0x05,0x00,0x03,0x82,0x01,
-0x0F,0x00,0x30,0x82,0x01,0x0A,0x02,0x82,0x01,0x01,0x00,0xA3,0x04,0xBB,0x22,0xAB,
-0x98,0x3D,0x57,0xE8,0x26,0x72,0x9A,0xB5,0x79,0xD4,0x29,0xE2,0xE1,0xE8,0x95,0x80,
-0xB1,0xB0,0xE3,0x5B,0x8E,0x2B,0x29,0x9A,0x64,0xDF,0xA1,0x5D,0xED,0xB0,0x09,0x05,
-0x6D,0xDB,0x28,0x2E,0xCE,0x62,0xA2,0x62,0xFE,0xB4,0x88,0xDA,0x12,0xEB,0x38,0xEB,
-0x21,0x9D,0xC0,0x41,0x2B,0x01,0x52,0x7B,0x88,0x77,0xD3,0x1C,0x8F,0xC7,0xBA,0xB9,
-0x88,0xB5,0x6A,0x09,0xE7,0x73,0xE8,0x11,0x40,0xA7,0xD1,0xCC,0xCA,0x62,0x8D,0x2D,
-0xE5,0x8F,0x0B,0xA6,0x50,0xD2,0xA8,0x50,0xC3,0x28,0xEA,0xF5,0xAB,0x25,0x87,0x8A,
-0x9A,0x96,0x1C,0xA9,0x67,0xB8,0x3F,0x0C,0xD5,0xF7,0xF9,0x52,0x13,0x2F,0xC2,0x1B,
-0xD5,0x70,0x70,0xF0,0x8F,0xC0,0x12,0xCA,0x06,0xCB,0x9A,0xE1,0xD9,0xCA,0x33,0x7A,
-0x77,0xD6,0xF8,0xEC,0xB9,0xF1,0x68,0x44,0x42,0x48,0x13,0xD2,0xC0,0xC2,0xA4,0xAE,
-0x5E,0x60,0xFE,0xB6,0xA6,0x05,0xFC,0xB4,0xDD,0x07,0x59,0x02,0xD4,0x59,0x18,0x98,
-0x63,0xF5,0xA5,0x63,0xE0,0x90,0x0C,0x7D,0x5D,0xB2,0x06,0x7A,0xF3,0x85,0xEA,0xEB,
-0xD4,0x03,0xAE,0x5E,0x84,0x3E,0x5F,0xFF,0x15,0xED,0x69,0xBC,0xF9,0x39,0x36,0x72,
-0x75,0xCF,0x77,0x52,0x4D,0xF3,0xC9,0x90,0x2C,0xB9,0x3D,0xE5,0xC9,0x23,0x53,0x3F,
-0x1F,0x24,0x98,0x21,0x5C,0x07,0x99,0x29,0xBD,0xC6,0x3A,0xEC,0xE7,0x6E,0x86,0x3A,
-0x6B,0x97,0x74,0x63,0x33,0xBD,0x68,0x18,0x31,0xF0,0x78,0x8D,0x76,0xBF,0xFC,0x9E,
-0x8E,0x5D,0x2A,0x86,0xA7,0x4D,0x90,0xDC,0x27,0x1A,0x39,0x02,0x03,0x01,0x00,0x01,
-0xA3,0x45,0x30,0x43,0x30,0x1D,0x06,0x03,0x55,0x1D,0x0E,0x04,0x16,0x04,0x14,0xE5,
-0x9D,0x59,0x30,0x82,0x47,0x58,0xCC,0xAC,0xFA,0x08,0x54,0x36,0x86,0x7B,0x3A,0xB5,
-0x04,0x4D,0xF0,0x30,0x12,0x06,0x03,0x55,0x1D,0x13,0x01,0x01,0xFF,0x04,0x08,0x30,
-0x06,0x01,0x01,0xFF,0x02,0x01,0x03,0x30,0x0E,0x06,0x03,0x55,0x1D,0x0F,0x01,0x01,
-0xFF,0x04,0x04,0x03,0x02,0x01,0x06,0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,
-0x0D,0x01,0x01,0x05,0x05,0x00,0x03,0x82,0x01,0x01,0x00,0x85,0x0C,0x5D,0x8E,0xE4,
-0x6F,0x51,0x68,0x42,0x05,0xA0,0xDD,0xBB,0x4F,0x27,0x25,0x84,0x03,0xBD,0xF7,0x64,
-0xFD,0x2D,0xD7,0x30,0xE3,0xA4,0x10,0x17,0xEB,0xDA,0x29,0x29,0xB6,0x79,0x3F,0x76,
-0xF6,0x19,0x13,0x23,0xB8,0x10,0x0A,0xF9,0x58,0xA4,0xD4,0x61,0x70,0xBD,0x04,0x61,
-0x6A,0x12,0x8A,0x17,0xD5,0x0A,0xBD,0xC5,0xBC,0x30,0x7C,0xD6,0xE9,0x0C,0x25,0x8D,
-0x86,0x40,0x4F,0xEC,0xCC,0xA3,0x7E,0x38,0xC6,0x37,0x11,0x4F,0xED,0xDD,0x68,0x31,
-0x8E,0x4C,0xD2,0xB3,0x01,0x74,0xEE,0xBE,0x75,0x5E,0x07,0x48,0x1A,0x7F,0x70,0xFF,
-0x16,0x5C,0x84,0xC0,0x79,0x85,0xB8,0x05,0xFD,0x7F,0xBE,0x65,0x11,0xA3,0x0F,0xC0,
-0x02,0xB4,0xF8,0x52,0x37,0x39,0x04,0xD5,0xA9,0x31,0x7A,0x18,0xBF,0xA0,0x2A,0xF4,
-0x12,0x99,0xF7,0xA3,0x45,0x82,0xE3,0x3C,0x5E,0xF5,0x9D,0x9E,0xB5,0xC8,0x9E,0x7C,
-0x2E,0xC8,0xA4,0x9E,0x4E,0x08,0x14,0x4B,0x6D,0xFD,0x70,0x6D,0x6B,0x1A,0x63,0xBD,
-0x64,0xE6,0x1F,0xB7,0xCE,0xF0,0xF2,0x9F,0x2E,0xBB,0x1B,0xB7,0xF2,0x50,0x88,0x73,
-0x92,0xC2,0xE2,0xE3,0x16,0x8D,0x9A,0x32,0x02,0xAB,0x8E,0x18,0xDD,0xE9,0x10,0x11,
-0xEE,0x7E,0x35,0xAB,0x90,0xAF,0x3E,0x30,0x94,0x7A,0xD0,0x33,0x3D,0xA7,0x65,0x0F,
-0xF5,0xFC,0x8E,0x9E,0x62,0xCF,0x47,0x44,0x2C,0x01,0x5D,0xBB,0x1D,0xB5,0x32,0xD2,
-0x47,0xD2,0x38,0x2E,0xD0,0xFE,0x81,0xDC,0x32,0x6A,0x1E,0xB5,0xEE,0x3C,0xD5,0xFC,
-0xE7,0x81,0x1D,0x19,0xC3,0x24,0x42,0xEA,0x63,0x39,0xA9,
+const unsigned char VeriSign_Class_3_Public_Primary_Certification_Authority___G5_certificate[1239]={
+0x30,0x82,0x04,0xD3,0x30,0x82,0x03,0xBB,0xA0,0x03,0x02,0x01,0x02,0x02,0x10,0x18,
+0xDA,0xD1,0x9E,0x26,0x7D,0xE8,0xBB,0x4A,0x21,0x58,0xCD,0xCC,0x6B,0x3B,0x4A,0x30,
+0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x05,0x05,0x00,0x30,0x81,
+0xCA,0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x55,0x53,0x31,0x17,
+0x30,0x15,0x06,0x03,0x55,0x04,0x0A,0x13,0x0E,0x56,0x65,0x72,0x69,0x53,0x69,0x67,
+0x6E,0x2C,0x20,0x49,0x6E,0x63,0x2E,0x31,0x1F,0x30,0x1D,0x06,0x03,0x55,0x04,0x0B,
+0x13,0x16,0x56,0x65,0x72,0x69,0x53,0x69,0x67,0x6E,0x20,0x54,0x72,0x75,0x73,0x74,
+0x20,0x4E,0x65,0x74,0x77,0x6F,0x72,0x6B,0x31,0x3A,0x30,0x38,0x06,0x03,0x55,0x04,
+0x0B,0x13,0x31,0x28,0x63,0x29,0x20,0x32,0x30,0x30,0x36,0x20,0x56,0x65,0x72,0x69,
+0x53,0x69,0x67,0x6E,0x2C,0x20,0x49,0x6E,0x63,0x2E,0x20,0x2D,0x20,0x46,0x6F,0x72,
+0x20,0x61,0x75,0x74,0x68,0x6F,0x72,0x69,0x7A,0x65,0x64,0x20,0x75,0x73,0x65,0x20,
+0x6F,0x6E,0x6C,0x79,0x31,0x45,0x30,0x43,0x06,0x03,0x55,0x04,0x03,0x13,0x3C,0x56,
+0x65,0x72,0x69,0x53,0x69,0x67,0x6E,0x20,0x43,0x6C,0x61,0x73,0x73,0x20,0x33,0x20,
+0x50,0x75,0x62,0x6C,0x69,0x63,0x20,0x50,0x72,0x69,0x6D,0x61,0x72,0x79,0x20,0x43,
+0x65,0x72,0x74,0x69,0x66,0x69,0x63,0x61,0x74,0x69,0x6F,0x6E,0x20,0x41,0x75,0x74,
+0x68,0x6F,0x72,0x69,0x74,0x79,0x20,0x2D,0x20,0x47,0x35,0x30,0x1E,0x17,0x0D,0x30,
+0x36,0x31,0x31,0x30,0x38,0x30,0x30,0x30,0x30,0x30,0x30,0x5A,0x17,0x0D,0x33,0x36,
+0x30,0x37,0x31,0x36,0x32,0x33,0x35,0x39,0x35,0x39,0x5A,0x30,0x81,0xCA,0x31,0x0B,
+0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x55,0x53,0x31,0x17,0x30,0x15,0x06,
+0x03,0x55,0x04,0x0A,0x13,0x0E,0x56,0x65,0x72,0x69,0x53,0x69,0x67,0x6E,0x2C,0x20,
+0x49,0x6E,0x63,0x2E,0x31,0x1F,0x30,0x1D,0x06,0x03,0x55,0x04,0x0B,0x13,0x16,0x56,
+0x65,0x72,0x69,0x53,0x69,0x67,0x6E,0x20,0x54,0x72,0x75,0x73,0x74,0x20,0x4E,0x65,
+0x74,0x77,0x6F,0x72,0x6B,0x31,0x3A,0x30,0x38,0x06,0x03,0x55,0x04,0x0B,0x13,0x31,
+0x28,0x63,0x29,0x20,0x32,0x30,0x30,0x36,0x20,0x56,0x65,0x72,0x69,0x53,0x69,0x67,
+0x6E,0x2C,0x20,0x49,0x6E,0x63,0x2E,0x20,0x2D,0x20,0x46,0x6F,0x72,0x20,0x61,0x75,
+0x74,0x68,0x6F,0x72,0x69,0x7A,0x65,0x64,0x20,0x75,0x73,0x65,0x20,0x6F,0x6E,0x6C,
+0x79,0x31,0x45,0x30,0x43,0x06,0x03,0x55,0x04,0x03,0x13,0x3C,0x56,0x65,0x72,0x69,
+0x53,0x69,0x67,0x6E,0x20,0x43,0x6C,0x61,0x73,0x73,0x20,0x33,0x20,0x50,0x75,0x62,
+0x6C,0x69,0x63,0x20,0x50,0x72,0x69,0x6D,0x61,0x72,0x79,0x20,0x43,0x65,0x72,0x74,
+0x69,0x66,0x69,0x63,0x61,0x74,0x69,0x6F,0x6E,0x20,0x41,0x75,0x74,0x68,0x6F,0x72,
+0x69,0x74,0x79,0x20,0x2D,0x20,0x47,0x35,0x30,0x82,0x01,0x22,0x30,0x0D,0x06,0x09,
+0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x01,0x05,0x00,0x03,0x82,0x01,0x0F,0x00,
+0x30,0x82,0x01,0x0A,0x02,0x82,0x01,0x01,0x00,0xAF,0x24,0x08,0x08,0x29,0x7A,0x35,
+0x9E,0x60,0x0C,0xAA,0xE7,0x4B,0x3B,0x4E,0xDC,0x7C,0xBC,0x3C,0x45,0x1C,0xBB,0x2B,
+0xE0,0xFE,0x29,0x02,0xF9,0x57,0x08,0xA3,0x64,0x85,0x15,0x27,0xF5,0xF1,0xAD,0xC8,
+0x31,0x89,0x5D,0x22,0xE8,0x2A,0xAA,0xA6,0x42,0xB3,0x8F,0xF8,0xB9,0x55,0xB7,0xB1,
+0xB7,0x4B,0xB3,0xFE,0x8F,0x7E,0x07,0x57,0xEC,0xEF,0x43,0xDB,0x66,0x62,0x15,0x61,
+0xCF,0x60,0x0D,0xA4,0xD8,0xDE,0xF8,0xE0,0xC3,0x62,0x08,0x3D,0x54,0x13,0xEB,0x49,
+0xCA,0x59,0x54,0x85,0x26,0xE5,0x2B,0x8F,0x1B,0x9F,0xEB,0xF5,0xA1,0x91,0xC2,0x33,
+0x49,0xD8,0x43,0x63,0x6A,0x52,0x4B,0xD2,0x8F,0xE8,0x70,0x51,0x4D,0xD1,0x89,0x69,
+0x7B,0xC7,0x70,0xF6,0xB3,0xDC,0x12,0x74,0xDB,0x7B,0x5D,0x4B,0x56,0xD3,0x96,0xBF,
+0x15,0x77,0xA1,0xB0,0xF4,0xA2,0x25,0xF2,0xAF,0x1C,0x92,0x67,0x18,0xE5,0xF4,0x06,
+0x04,0xEF,0x90,0xB9,0xE4,0x00,0xE4,0xDD,0x3A,0xB5,0x19,0xFF,0x02,0xBA,0xF4,0x3C,
+0xEE,0xE0,0x8B,0xEB,0x37,0x8B,0xEC,0xF4,0xD7,0xAC,0xF2,0xF6,0xF0,0x3D,0xAF,0xDD,
+0x75,0x91,0x33,0x19,0x1D,0x1C,0x40,0xCB,0x74,0x24,0x19,0x21,0x93,0xD9,0x14,0xFE,
+0xAC,0x2A,0x52,0xC7,0x8F,0xD5,0x04,0x49,0xE4,0x8D,0x63,0x47,0x88,0x3C,0x69,0x83,
+0xCB,0xFE,0x47,0xBD,0x2B,0x7E,0x4F,0xC5,0x95,0xAE,0x0E,0x9D,0xD4,0xD1,0x43,0xC0,
+0x67,0x73,0xE3,0x14,0x08,0x7E,0xE5,0x3F,0x9F,0x73,0xB8,0x33,0x0A,0xCF,0x5D,0x3F,
+0x34,0x87,0x96,0x8A,0xEE,0x53,0xE8,0x25,0x15,0x02,0x03,0x01,0x00,0x01,0xA3,0x81,
+0xB2,0x30,0x81,0xAF,0x30,0x0F,0x06,0x03,0x55,0x1D,0x13,0x01,0x01,0xFF,0x04,0x05,
+0x30,0x03,0x01,0x01,0xFF,0x30,0x0E,0x06,0x03,0x55,0x1D,0x0F,0x01,0x01,0xFF,0x04,
+0x04,0x03,0x02,0x01,0x06,0x30,0x6D,0x06,0x08,0x2B,0x06,0x01,0x05,0x05,0x07,0x01,
+0x0C,0x04,0x61,0x30,0x5F,0xA1,0x5D,0xA0,0x5B,0x30,0x59,0x30,0x57,0x30,0x55,0x16,
+0x09,0x69,0x6D,0x61,0x67,0x65,0x2F,0x67,0x69,0x66,0x30,0x21,0x30,0x1F,0x30,0x07,
+0x06,0x05,0x2B,0x0E,0x03,0x02,0x1A,0x04,0x14,0x8F,0xE5,0xD3,0x1A,0x86,0xAC,0x8D,
+0x8E,0x6B,0xC3,0xCF,0x80,0x6A,0xD4,0x48,0x18,0x2C,0x7B,0x19,0x2E,0x30,0x25,0x16,
+0x23,0x68,0x74,0x74,0x70,0x3A,0x2F,0x2F,0x6C,0x6F,0x67,0x6F,0x2E,0x76,0x65,0x72,
+0x69,0x73,0x69,0x67,0x6E,0x2E,0x63,0x6F,0x6D,0x2F,0x76,0x73,0x6C,0x6F,0x67,0x6F,
+0x2E,0x67,0x69,0x66,0x30,0x1D,0x06,0x03,0x55,0x1D,0x0E,0x04,0x16,0x04,0x14,0x7F,
+0xD3,0x65,0xA7,0xC2,0xDD,0xEC,0xBB,0xF0,0x30,0x09,0xF3,0x43,0x39,0xFA,0x02,0xAF,
+0x33,0x31,0x33,0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x05,
+0x05,0x00,0x03,0x82,0x01,0x01,0x00,0x93,0x24,0x4A,0x30,0x5F,0x62,0xCF,0xD8,0x1A,
+0x98,0x2F,0x3D,0xEA,0xDC,0x99,0x2D,0xBD,0x77,0xF6,0xA5,0x79,0x22,0x38,0xEC,0xC4,
+0xA7,0xA0,0x78,0x12,0xAD,0x62,0x0E,0x45,0x70,0x64,0xC5,0xE7,0x97,0x66,0x2D,0x98,
+0x09,0x7E,0x5F,0xAF,0xD6,0xCC,0x28,0x65,0xF2,0x01,0xAA,0x08,0x1A,0x47,0xDE,0xF9,
+0xF9,0x7C,0x92,0x5A,0x08,0x69,0x20,0x0D,0xD9,0x3E,0x6D,0x6E,0x3C,0x0D,0x6E,0xD8,
+0xE6,0x06,0x91,0x40,0x18,0xB9,0xF8,0xC1,0xED,0xDF,0xDB,0x41,0xAA,0xE0,0x96,0x20,
+0xC9,0xCD,0x64,0x15,0x38,0x81,0xC9,0x94,0xEE,0xA2,0x84,0x29,0x0B,0x13,0x6F,0x8E,
+0xDB,0x0C,0xDD,0x25,0x02,0xDB,0xA4,0x8B,0x19,0x44,0xD2,0x41,0x7A,0x05,0x69,0x4A,
+0x58,0x4F,0x60,0xCA,0x7E,0x82,0x6A,0x0B,0x02,0xAA,0x25,0x17,0x39,0xB5,0xDB,0x7F,
+0xE7,0x84,0x65,0x2A,0x95,0x8A,0xBD,0x86,0xDE,0x5E,0x81,0x16,0x83,0x2D,0x10,0xCC,
+0xDE,0xFD,0xA8,0x82,0x2A,0x6D,0x28,0x1F,0x0D,0x0B,0xC4,0xE5,0xE7,0x1A,0x26,0x19,
+0xE1,0xF4,0x11,0x6F,0x10,0xB5,0x95,0xFC,0xE7,0x42,0x05,0x32,0xDB,0xCE,0x9D,0x51,
+0x5E,0x28,0xB6,0x9E,0x85,0xD3,0x5B,0xEF,0xA5,0x7D,0x45,0x40,0x72,0x8E,0xB7,0x0E,
+0x6B,0x0E,0x06,0xFB,0x33,0x35,0x48,0x71,0xB8,0x9D,0x27,0x8B,0xC4,0x65,0x5F,0x0D,
+0x86,0x76,0x9C,0x44,0x7A,0xF6,0x95,0x5C,0xF6,0x5D,0x32,0x08,0x33,0xA4,0x54,0xB6,
+0x18,0x3F,0x68,0x5C,0xF2,0x42,0x4A,0x85,0x38,0x54,0x83,0x5F,0xD1,0xE8,0x2C,0xF2,
+0xAC,0x11,0xD6,0xA8,0xED,0x63,0x6A,
};
-/* subject:/C=GB/ST=Greater Manchester/L=Salford/O=Comodo CA Limited/CN=AAA Certificate Services */
-/* issuer :/C=GB/ST=Greater Manchester/L=Salford/O=Comodo CA Limited/CN=AAA Certificate Services */
+/* subject:/C=US/O=Equifax/OU=Equifax Secure Certificate Authority */
+/* issuer :/C=US/O=Equifax/OU=Equifax Secure Certificate Authority */
-const unsigned char Comodo_AAA_Services_root_certificate[1078]={
-0x30,0x82,0x04,0x32,0x30,0x82,0x03,0x1A,0xA0,0x03,0x02,0x01,0x02,0x02,0x01,0x01,
-0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x05,0x05,0x00,0x30,
-0x7B,0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x47,0x42,0x31,0x1B,
-0x30,0x19,0x06,0x03,0x55,0x04,0x08,0x0C,0x12,0x47,0x72,0x65,0x61,0x74,0x65,0x72,
-0x20,0x4D,0x61,0x6E,0x63,0x68,0x65,0x73,0x74,0x65,0x72,0x31,0x10,0x30,0x0E,0x06,
-0x03,0x55,0x04,0x07,0x0C,0x07,0x53,0x61,0x6C,0x66,0x6F,0x72,0x64,0x31,0x1A,0x30,
-0x18,0x06,0x03,0x55,0x04,0x0A,0x0C,0x11,0x43,0x6F,0x6D,0x6F,0x64,0x6F,0x20,0x43,
-0x41,0x20,0x4C,0x69,0x6D,0x69,0x74,0x65,0x64,0x31,0x21,0x30,0x1F,0x06,0x03,0x55,
-0x04,0x03,0x0C,0x18,0x41,0x41,0x41,0x20,0x43,0x65,0x72,0x74,0x69,0x66,0x69,0x63,
-0x61,0x74,0x65,0x20,0x53,0x65,0x72,0x76,0x69,0x63,0x65,0x73,0x30,0x1E,0x17,0x0D,
-0x30,0x34,0x30,0x31,0x30,0x31,0x30,0x30,0x30,0x30,0x30,0x30,0x5A,0x17,0x0D,0x32,
-0x38,0x31,0x32,0x33,0x31,0x32,0x33,0x35,0x39,0x35,0x39,0x5A,0x30,0x7B,0x31,0x0B,
-0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x47,0x42,0x31,0x1B,0x30,0x19,0x06,
-0x03,0x55,0x04,0x08,0x0C,0x12,0x47,0x72,0x65,0x61,0x74,0x65,0x72,0x20,0x4D,0x61,
-0x6E,0x63,0x68,0x65,0x73,0x74,0x65,0x72,0x31,0x10,0x30,0x0E,0x06,0x03,0x55,0x04,
-0x07,0x0C,0x07,0x53,0x61,0x6C,0x66,0x6F,0x72,0x64,0x31,0x1A,0x30,0x18,0x06,0x03,
-0x55,0x04,0x0A,0x0C,0x11,0x43,0x6F,0x6D,0x6F,0x64,0x6F,0x20,0x43,0x41,0x20,0x4C,
-0x69,0x6D,0x69,0x74,0x65,0x64,0x31,0x21,0x30,0x1F,0x06,0x03,0x55,0x04,0x03,0x0C,
-0x18,0x41,0x41,0x41,0x20,0x43,0x65,0x72,0x74,0x69,0x66,0x69,0x63,0x61,0x74,0x65,
-0x20,0x53,0x65,0x72,0x76,0x69,0x63,0x65,0x73,0x30,0x82,0x01,0x22,0x30,0x0D,0x06,
-0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x01,0x05,0x00,0x03,0x82,0x01,0x0F,
-0x00,0x30,0x82,0x01,0x0A,0x02,0x82,0x01,0x01,0x00,0xBE,0x40,0x9D,0xF4,0x6E,0xE1,
-0xEA,0x76,0x87,0x1C,0x4D,0x45,0x44,0x8E,0xBE,0x46,0xC8,0x83,0x06,0x9D,0xC1,0x2A,
-0xFE,0x18,0x1F,0x8E,0xE4,0x02,0xFA,0xF3,0xAB,0x5D,0x50,0x8A,0x16,0x31,0x0B,0x9A,
-0x06,0xD0,0xC5,0x70,0x22,0xCD,0x49,0x2D,0x54,0x63,0xCC,0xB6,0x6E,0x68,0x46,0x0B,
-0x53,0xEA,0xCB,0x4C,0x24,0xC0,0xBC,0x72,0x4E,0xEA,0xF1,0x15,0xAE,0xF4,0x54,0x9A,
-0x12,0x0A,0xC3,0x7A,0xB2,0x33,0x60,0xE2,0xDA,0x89,0x55,0xF3,0x22,0x58,0xF3,0xDE,
-0xDC,0xCF,0xEF,0x83,0x86,0xA2,0x8C,0x94,0x4F,0x9F,0x68,0xF2,0x98,0x90,0x46,0x84,
-0x27,0xC7,0x76,0xBF,0xE3,0xCC,0x35,0x2C,0x8B,0x5E,0x07,0x64,0x65,0x82,0xC0,0x48,
-0xB0,0xA8,0x91,0xF9,0x61,0x9F,0x76,0x20,0x50,0xA8,0x91,0xC7,0x66,0xB5,0xEB,0x78,
-0x62,0x03,0x56,0xF0,0x8A,0x1A,0x13,0xEA,0x31,0xA3,0x1E,0xA0,0x99,0xFD,0x38,0xF6,
-0xF6,0x27,0x32,0x58,0x6F,0x07,0xF5,0x6B,0xB8,0xFB,0x14,0x2B,0xAF,0xB7,0xAA,0xCC,
-0xD6,0x63,0x5F,0x73,0x8C,0xDA,0x05,0x99,0xA8,0x38,0xA8,0xCB,0x17,0x78,0x36,0x51,
-0xAC,0xE9,0x9E,0xF4,0x78,0x3A,0x8D,0xCF,0x0F,0xD9,0x42,0xE2,0x98,0x0C,0xAB,0x2F,
-0x9F,0x0E,0x01,0xDE,0xEF,0x9F,0x99,0x49,0xF1,0x2D,0xDF,0xAC,0x74,0x4D,0x1B,0x98,
-0xB5,0x47,0xC5,0xE5,0x29,0xD1,0xF9,0x90,0x18,0xC7,0x62,0x9C,0xBE,0x83,0xC7,0x26,
-0x7B,0x3E,0x8A,0x25,0xC7,0xC0,0xDD,0x9D,0xE6,0x35,0x68,0x10,0x20,0x9D,0x8F,0xD8,
-0xDE,0xD2,0xC3,0x84,0x9C,0x0D,0x5E,0xE8,0x2F,0xC9,0x02,0x03,0x01,0x00,0x01,0xA3,
-0x81,0xC0,0x30,0x81,0xBD,0x30,0x1D,0x06,0x03,0x55,0x1D,0x0E,0x04,0x16,0x04,0x14,
-0xA0,0x11,0x0A,0x23,0x3E,0x96,0xF1,0x07,0xEC,0xE2,0xAF,0x29,0xEF,0x82,0xA5,0x7F,
-0xD0,0x30,0xA4,0xB4,0x30,0x0E,0x06,0x03,0x55,0x1D,0x0F,0x01,0x01,0xFF,0x04,0x04,
-0x03,0x02,0x01,0x06,0x30,0x0F,0x06,0x03,0x55,0x1D,0x13,0x01,0x01,0xFF,0x04,0x05,
-0x30,0x03,0x01,0x01,0xFF,0x30,0x7B,0x06,0x03,0x55,0x1D,0x1F,0x04,0x74,0x30,0x72,
-0x30,0x38,0xA0,0x36,0xA0,0x34,0x86,0x32,0x68,0x74,0x74,0x70,0x3A,0x2F,0x2F,0x63,
-0x72,0x6C,0x2E,0x63,0x6F,0x6D,0x6F,0x64,0x6F,0x63,0x61,0x2E,0x63,0x6F,0x6D,0x2F,
-0x41,0x41,0x41,0x43,0x65,0x72,0x74,0x69,0x66,0x69,0x63,0x61,0x74,0x65,0x53,0x65,
-0x72,0x76,0x69,0x63,0x65,0x73,0x2E,0x63,0x72,0x6C,0x30,0x36,0xA0,0x34,0xA0,0x32,
-0x86,0x30,0x68,0x74,0x74,0x70,0x3A,0x2F,0x2F,0x63,0x72,0x6C,0x2E,0x63,0x6F,0x6D,
-0x6F,0x64,0x6F,0x2E,0x6E,0x65,0x74,0x2F,0x41,0x41,0x41,0x43,0x65,0x72,0x74,0x69,
-0x66,0x69,0x63,0x61,0x74,0x65,0x53,0x65,0x72,0x76,0x69,0x63,0x65,0x73,0x2E,0x63,
-0x72,0x6C,0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x05,0x05,
-0x00,0x03,0x82,0x01,0x01,0x00,0x08,0x56,0xFC,0x02,0xF0,0x9B,0xE8,0xFF,0xA4,0xFA,
-0xD6,0x7B,0xC6,0x44,0x80,0xCE,0x4F,0xC4,0xC5,0xF6,0x00,0x58,0xCC,0xA6,0xB6,0xBC,
-0x14,0x49,0x68,0x04,0x76,0xE8,0xE6,0xEE,0x5D,0xEC,0x02,0x0F,0x60,0xD6,0x8D,0x50,
-0x18,0x4F,0x26,0x4E,0x01,0xE3,0xE6,0xB0,0xA5,0xEE,0xBF,0xBC,0x74,0x54,0x41,0xBF,
-0xFD,0xFC,0x12,0xB8,0xC7,0x4F,0x5A,0xF4,0x89,0x60,0x05,0x7F,0x60,0xB7,0x05,0x4A,
-0xF3,0xF6,0xF1,0xC2,0xBF,0xC4,0xB9,0x74,0x86,0xB6,0x2D,0x7D,0x6B,0xCC,0xD2,0xF3,
-0x46,0xDD,0x2F,0xC6,0xE0,0x6A,0xC3,0xC3,0x34,0x03,0x2C,0x7D,0x96,0xDD,0x5A,0xC2,
-0x0E,0xA7,0x0A,0x99,0xC1,0x05,0x8B,0xAB,0x0C,0x2F,0xF3,0x5C,0x3A,0xCF,0x6C,0x37,
-0x55,0x09,0x87,0xDE,0x53,0x40,0x6C,0x58,0xEF,0xFC,0xB6,0xAB,0x65,0x6E,0x04,0xF6,
-0x1B,0xDC,0x3C,0xE0,0x5A,0x15,0xC6,0x9E,0xD9,0xF1,0x59,0x48,0x30,0x21,0x65,0x03,
-0x6C,0xEC,0xE9,0x21,0x73,0xEC,0x9B,0x03,0xA1,0xE0,0x37,0xAD,0xA0,0x15,0x18,0x8F,
-0xFA,0xBA,0x02,0xCE,0xA7,0x2C,0xA9,0x10,0x13,0x2C,0xD4,0xE5,0x08,0x26,0xAB,0x22,
-0x97,0x60,0xF8,0x90,0x5E,0x74,0xD4,0xA2,0x9A,0x53,0xBD,0xF2,0xA9,0x68,0xE0,0xA2,
-0x6E,0xC2,0xD7,0x6C,0xB1,0xA3,0x0F,0x9E,0xBF,0xEB,0x68,0xE7,0x56,0xF2,0xAE,0xF2,
-0xE3,0x2B,0x38,0x3A,0x09,0x81,0xB5,0x6B,0x85,0xD7,0xBE,0x2D,0xED,0x3F,0x1A,0xB7,
-0xB2,0x63,0xE2,0xF5,0x62,0x2C,0x82,0xD4,0x6A,0x00,0x41,0x50,0xF1,0x39,0x83,0x9F,
-0x95,0xE9,0x36,0x96,0x98,0x6E,
+const unsigned char Equifax_Secure_CA_certificate[804]={
+0x30,0x82,0x03,0x20,0x30,0x82,0x02,0x89,0xA0,0x03,0x02,0x01,0x02,0x02,0x04,0x35,
+0xDE,0xF4,0xCF,0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x05,
+0x05,0x00,0x30,0x4E,0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x55,
+0x53,0x31,0x10,0x30,0x0E,0x06,0x03,0x55,0x04,0x0A,0x13,0x07,0x45,0x71,0x75,0x69,
+0x66,0x61,0x78,0x31,0x2D,0x30,0x2B,0x06,0x03,0x55,0x04,0x0B,0x13,0x24,0x45,0x71,
+0x75,0x69,0x66,0x61,0x78,0x20,0x53,0x65,0x63,0x75,0x72,0x65,0x20,0x43,0x65,0x72,
+0x74,0x69,0x66,0x69,0x63,0x61,0x74,0x65,0x20,0x41,0x75,0x74,0x68,0x6F,0x72,0x69,
+0x74,0x79,0x30,0x1E,0x17,0x0D,0x39,0x38,0x30,0x38,0x32,0x32,0x31,0x36,0x34,0x31,
+0x35,0x31,0x5A,0x17,0x0D,0x31,0x38,0x30,0x38,0x32,0x32,0x31,0x36,0x34,0x31,0x35,
+0x31,0x5A,0x30,0x4E,0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x55,
+0x53,0x31,0x10,0x30,0x0E,0x06,0x03,0x55,0x04,0x0A,0x13,0x07,0x45,0x71,0x75,0x69,
+0x66,0x61,0x78,0x31,0x2D,0x30,0x2B,0x06,0x03,0x55,0x04,0x0B,0x13,0x24,0x45,0x71,
+0x75,0x69,0x66,0x61,0x78,0x20,0x53,0x65,0x63,0x75,0x72,0x65,0x20,0x43,0x65,0x72,
+0x74,0x69,0x66,0x69,0x63,0x61,0x74,0x65,0x20,0x41,0x75,0x74,0x68,0x6F,0x72,0x69,
+0x74,0x79,0x30,0x81,0x9F,0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,
+0x01,0x01,0x05,0x00,0x03,0x81,0x8D,0x00,0x30,0x81,0x89,0x02,0x81,0x81,0x00,0xC1,
+0x5D,0xB1,0x58,0x67,0x08,0x62,0xEE,0xA0,0x9A,0x2D,0x1F,0x08,0x6D,0x91,0x14,0x68,
+0x98,0x0A,0x1E,0xFE,0xDA,0x04,0x6F,0x13,0x84,0x62,0x21,0xC3,0xD1,0x7C,0xCE,0x9F,
+0x05,0xE0,0xB8,0x01,0xF0,0x4E,0x34,0xEC,0xE2,0x8A,0x95,0x04,0x64,0xAC,0xF1,0x6B,
+0x53,0x5F,0x05,0xB3,0xCB,0x67,0x80,0xBF,0x42,0x02,0x8E,0xFE,0xDD,0x01,0x09,0xEC,
+0xE1,0x00,0x14,0x4F,0xFC,0xFB,0xF0,0x0C,0xDD,0x43,0xBA,0x5B,0x2B,0xE1,0x1F,0x80,
+0x70,0x99,0x15,0x57,0x93,0x16,0xF1,0x0F,0x97,0x6A,0xB7,0xC2,0x68,0x23,0x1C,0xCC,
+0x4D,0x59,0x30,0xAC,0x51,0x1E,0x3B,0xAF,0x2B,0xD6,0xEE,0x63,0x45,0x7B,0xC5,0xD9,
+0x5F,0x50,0xD2,0xE3,0x50,0x0F,0x3A,0x88,0xE7,0xBF,0x14,0xFD,0xE0,0xC7,0xB9,0x02,
+0x03,0x01,0x00,0x01,0xA3,0x82,0x01,0x09,0x30,0x82,0x01,0x05,0x30,0x70,0x06,0x03,
+0x55,0x1D,0x1F,0x04,0x69,0x30,0x67,0x30,0x65,0xA0,0x63,0xA0,0x61,0xA4,0x5F,0x30,
+0x5D,0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x55,0x53,0x31,0x10,
+0x30,0x0E,0x06,0x03,0x55,0x04,0x0A,0x13,0x07,0x45,0x71,0x75,0x69,0x66,0x61,0x78,
+0x31,0x2D,0x30,0x2B,0x06,0x03,0x55,0x04,0x0B,0x13,0x24,0x45,0x71,0x75,0x69,0x66,
+0x61,0x78,0x20,0x53,0x65,0x63,0x75,0x72,0x65,0x20,0x43,0x65,0x72,0x74,0x69,0x66,
+0x69,0x63,0x61,0x74,0x65,0x20,0x41,0x75,0x74,0x68,0x6F,0x72,0x69,0x74,0x79,0x31,
+0x0D,0x30,0x0B,0x06,0x03,0x55,0x04,0x03,0x13,0x04,0x43,0x52,0x4C,0x31,0x30,0x1A,
+0x06,0x03,0x55,0x1D,0x10,0x04,0x13,0x30,0x11,0x81,0x0F,0x32,0x30,0x31,0x38,0x30,
+0x38,0x32,0x32,0x31,0x36,0x34,0x31,0x35,0x31,0x5A,0x30,0x0B,0x06,0x03,0x55,0x1D,
+0x0F,0x04,0x04,0x03,0x02,0x01,0x06,0x30,0x1F,0x06,0x03,0x55,0x1D,0x23,0x04,0x18,
+0x30,0x16,0x80,0x14,0x48,0xE6,0x68,0xF9,0x2B,0xD2,0xB2,0x95,0xD7,0x47,0xD8,0x23,
+0x20,0x10,0x4F,0x33,0x98,0x90,0x9F,0xD4,0x30,0x1D,0x06,0x03,0x55,0x1D,0x0E,0x04,
+0x16,0x04,0x14,0x48,0xE6,0x68,0xF9,0x2B,0xD2,0xB2,0x95,0xD7,0x47,0xD8,0x23,0x20,
+0x10,0x4F,0x33,0x98,0x90,0x9F,0xD4,0x30,0x0C,0x06,0x03,0x55,0x1D,0x13,0x04,0x05,
+0x30,0x03,0x01,0x01,0xFF,0x30,0x1A,0x06,0x09,0x2A,0x86,0x48,0x86,0xF6,0x7D,0x07,
+0x41,0x00,0x04,0x0D,0x30,0x0B,0x1B,0x05,0x56,0x33,0x2E,0x30,0x63,0x03,0x02,0x06,
+0xC0,0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x05,0x05,0x00,
+0x03,0x81,0x81,0x00,0x58,0xCE,0x29,0xEA,0xFC,0xF7,0xDE,0xB5,0xCE,0x02,0xB9,0x17,
+0xB5,0x85,0xD1,0xB9,0xE3,0xE0,0x95,0xCC,0x25,0x31,0x0D,0x00,0xA6,0x92,0x6E,0x7F,
+0xB6,0x92,0x63,0x9E,0x50,0x95,0xD1,0x9A,0x6F,0xE4,0x11,0xDE,0x63,0x85,0x6E,0x98,
+0xEE,0xA8,0xFF,0x5A,0xC8,0xD3,0x55,0xB2,0x66,0x71,0x57,0xDE,0xC0,0x21,0xEB,0x3D,
+0x2A,0xA7,0x23,0x49,0x01,0x04,0x86,0x42,0x7B,0xFC,0xEE,0x7F,0xA2,0x16,0x52,0xB5,
+0x67,0x67,0xD3,0x40,0xDB,0x3B,0x26,0x58,0xB2,0x28,0x77,0x3D,0xAE,0x14,0x77,0x61,
+0xD6,0xFA,0x2A,0x66,0x27,0xA0,0x0D,0xFA,0xA7,0x73,0x5C,0xEA,0x70,0xF1,0x94,0x21,
+0x65,0x44,0x5F,0xFA,0xFC,0xEF,0x29,0x68,0xA9,0xA2,0x87,0x79,0xEF,0x79,0xEF,0x4F,
+0xAC,0x07,0x77,0x38,
+};
+
+
+/* subject:/O=Entrust.net/OU=www.entrust.net/CPS_2048 incorp. by ref. (limits liab.)/OU=(c) 1999 Entrust.net Limited/CN=Entrust.net Certification Authority (2048) */
+/* issuer :/O=Entrust.net/OU=www.entrust.net/CPS_2048 incorp. by ref. (limits liab.)/OU=(c) 1999 Entrust.net Limited/CN=Entrust.net Certification Authority (2048) */
+
+
+const unsigned char Entrust_net_Premium_2048_Secure_Server_CA_certificate[1120]={
+0x30,0x82,0x04,0x5C,0x30,0x82,0x03,0x44,0xA0,0x03,0x02,0x01,0x02,0x02,0x04,0x38,
+0x63,0xB9,0x66,0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x05,
+0x05,0x00,0x30,0x81,0xB4,0x31,0x14,0x30,0x12,0x06,0x03,0x55,0x04,0x0A,0x13,0x0B,
+0x45,0x6E,0x74,0x72,0x75,0x73,0x74,0x2E,0x6E,0x65,0x74,0x31,0x40,0x30,0x3E,0x06,
+0x03,0x55,0x04,0x0B,0x14,0x37,0x77,0x77,0x77,0x2E,0x65,0x6E,0x74,0x72,0x75,0x73,
+0x74,0x2E,0x6E,0x65,0x74,0x2F,0x43,0x50,0x53,0x5F,0x32,0x30,0x34,0x38,0x20,0x69,
+0x6E,0x63,0x6F,0x72,0x70,0x2E,0x20,0x62,0x79,0x20,0x72,0x65,0x66,0x2E,0x20,0x28,
+0x6C,0x69,0x6D,0x69,0x74,0x73,0x20,0x6C,0x69,0x61,0x62,0x2E,0x29,0x31,0x25,0x30,
+0x23,0x06,0x03,0x55,0x04,0x0B,0x13,0x1C,0x28,0x63,0x29,0x20,0x31,0x39,0x39,0x39,
+0x20,0x45,0x6E,0x74,0x72,0x75,0x73,0x74,0x2E,0x6E,0x65,0x74,0x20,0x4C,0x69,0x6D,
+0x69,0x74,0x65,0x64,0x31,0x33,0x30,0x31,0x06,0x03,0x55,0x04,0x03,0x13,0x2A,0x45,
+0x6E,0x74,0x72,0x75,0x73,0x74,0x2E,0x6E,0x65,0x74,0x20,0x43,0x65,0x72,0x74,0x69,
+0x66,0x69,0x63,0x61,0x74,0x69,0x6F,0x6E,0x20,0x41,0x75,0x74,0x68,0x6F,0x72,0x69,
+0x74,0x79,0x20,0x28,0x32,0x30,0x34,0x38,0x29,0x30,0x1E,0x17,0x0D,0x39,0x39,0x31,
+0x32,0x32,0x34,0x31,0x37,0x35,0x30,0x35,0x31,0x5A,0x17,0x0D,0x31,0x39,0x31,0x32,
+0x32,0x34,0x31,0x38,0x32,0x30,0x35,0x31,0x5A,0x30,0x81,0xB4,0x31,0x14,0x30,0x12,
+0x06,0x03,0x55,0x04,0x0A,0x13,0x0B,0x45,0x6E,0x74,0x72,0x75,0x73,0x74,0x2E,0x6E,
+0x65,0x74,0x31,0x40,0x30,0x3E,0x06,0x03,0x55,0x04,0x0B,0x14,0x37,0x77,0x77,0x77,
+0x2E,0x65,0x6E,0x74,0x72,0x75,0x73,0x74,0x2E,0x6E,0x65,0x74,0x2F,0x43,0x50,0x53,
+0x5F,0x32,0x30,0x34,0x38,0x20,0x69,0x6E,0x63,0x6F,0x72,0x70,0x2E,0x20,0x62,0x79,
+0x20,0x72,0x65,0x66,0x2E,0x20,0x28,0x6C,0x69,0x6D,0x69,0x74,0x73,0x20,0x6C,0x69,
+0x61,0x62,0x2E,0x29,0x31,0x25,0x30,0x23,0x06,0x03,0x55,0x04,0x0B,0x13,0x1C,0x28,
+0x63,0x29,0x20,0x31,0x39,0x39,0x39,0x20,0x45,0x6E,0x74,0x72,0x75,0x73,0x74,0x2E,
+0x6E,0x65,0x74,0x20,0x4C,0x69,0x6D,0x69,0x74,0x65,0x64,0x31,0x33,0x30,0x31,0x06,
+0x03,0x55,0x04,0x03,0x13,0x2A,0x45,0x6E,0x74,0x72,0x75,0x73,0x74,0x2E,0x6E,0x65,
+0x74,0x20,0x43,0x65,0x72,0x74,0x69,0x66,0x69,0x63,0x61,0x74,0x69,0x6F,0x6E,0x20,
+0x41,0x75,0x74,0x68,0x6F,0x72,0x69,0x74,0x79,0x20,0x28,0x32,0x30,0x34,0x38,0x29,
+0x30,0x82,0x01,0x22,0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,
+0x01,0x05,0x00,0x03,0x82,0x01,0x0F,0x00,0x30,0x82,0x01,0x0A,0x02,0x82,0x01,0x01,
+0x00,0xAD,0x4D,0x4B,0xA9,0x12,0x86,0xB2,0xEA,0xA3,0x20,0x07,0x15,0x16,0x64,0x2A,
+0x2B,0x4B,0xD1,0xBF,0x0B,0x4A,0x4D,0x8E,0xED,0x80,0x76,0xA5,0x67,0xB7,0x78,0x40,
+0xC0,0x73,0x42,0xC8,0x68,0xC0,0xDB,0x53,0x2B,0xDD,0x5E,0xB8,0x76,0x98,0x35,0x93,
+0x8B,0x1A,0x9D,0x7C,0x13,0x3A,0x0E,0x1F,0x5B,0xB7,0x1E,0xCF,0xE5,0x24,0x14,0x1E,
+0xB1,0x81,0xA9,0x8D,0x7D,0xB8,0xCC,0x6B,0x4B,0x03,0xF1,0x02,0x0C,0xDC,0xAB,0xA5,
+0x40,0x24,0x00,0x7F,0x74,0x94,0xA1,0x9D,0x08,0x29,0xB3,0x88,0x0B,0xF5,0x87,0x77,
+0x9D,0x55,0xCD,0xE4,0xC3,0x7E,0xD7,0x6A,0x64,0xAB,0x85,0x14,0x86,0x95,0x5B,0x97,
+0x32,0x50,0x6F,0x3D,0xC8,0xBA,0x66,0x0C,0xE3,0xFC,0xBD,0xB8,0x49,0xC1,0x76,0x89,
+0x49,0x19,0xFD,0xC0,0xA8,0xBD,0x89,0xA3,0x67,0x2F,0xC6,0x9F,0xBC,0x71,0x19,0x60,
+0xB8,0x2D,0xE9,0x2C,0xC9,0x90,0x76,0x66,0x7B,0x94,0xE2,0xAF,0x78,0xD6,0x65,0x53,
+0x5D,0x3C,0xD6,0x9C,0xB2,0xCF,0x29,0x03,0xF9,0x2F,0xA4,0x50,0xB2,0xD4,0x48,0xCE,
+0x05,0x32,0x55,0x8A,0xFD,0xB2,0x64,0x4C,0x0E,0xE4,0x98,0x07,0x75,0xDB,0x7F,0xDF,
+0xB9,0x08,0x55,0x60,0x85,0x30,0x29,0xF9,0x7B,0x48,0xA4,0x69,0x86,0xE3,0x35,0x3F,
+0x1E,0x86,0x5D,0x7A,0x7A,0x15,0xBD,0xEF,0x00,0x8E,0x15,0x22,0x54,0x17,0x00,0x90,
+0x26,0x93,0xBC,0x0E,0x49,0x68,0x91,0xBF,0xF8,0x47,0xD3,0x9D,0x95,0x42,0xC1,0x0E,
+0x4D,0xDF,0x6F,0x26,0xCF,0xC3,0x18,0x21,0x62,0x66,0x43,0x70,0xD6,0xD5,0xC0,0x07,
+0xE1,0x02,0x03,0x01,0x00,0x01,0xA3,0x74,0x30,0x72,0x30,0x11,0x06,0x09,0x60,0x86,
+0x48,0x01,0x86,0xF8,0x42,0x01,0x01,0x04,0x04,0x03,0x02,0x00,0x07,0x30,0x1F,0x06,
+0x03,0x55,0x1D,0x23,0x04,0x18,0x30,0x16,0x80,0x14,0x55,0xE4,0x81,0xD1,0x11,0x80,
+0xBE,0xD8,0x89,0xB9,0x08,0xA3,0x31,0xF9,0xA1,0x24,0x09,0x16,0xB9,0x70,0x30,0x1D,
+0x06,0x03,0x55,0x1D,0x0E,0x04,0x16,0x04,0x14,0x55,0xE4,0x81,0xD1,0x11,0x80,0xBE,
+0xD8,0x89,0xB9,0x08,0xA3,0x31,0xF9,0xA1,0x24,0x09,0x16,0xB9,0x70,0x30,0x1D,0x06,
+0x09,0x2A,0x86,0x48,0x86,0xF6,0x7D,0x07,0x41,0x00,0x04,0x10,0x30,0x0E,0x1B,0x08,
+0x56,0x35,0x2E,0x30,0x3A,0x34,0x2E,0x30,0x03,0x02,0x04,0x90,0x30,0x0D,0x06,0x09,
+0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x05,0x05,0x00,0x03,0x82,0x01,0x01,0x00,
+0x59,0x47,0xAC,0x21,0x84,0x8A,0x17,0xC9,0x9C,0x89,0x53,0x1E,0xBA,0x80,0x85,0x1A,
+0xC6,0x3C,0x4E,0x3E,0xB1,0x9C,0xB6,0x7C,0xC6,0x92,0x5D,0x18,0x64,0x02,0xE3,0xD3,
+0x06,0x08,0x11,0x61,0x7C,0x63,0xE3,0x2B,0x9D,0x31,0x03,0x70,0x76,0xD2,0xA3,0x28,
+0xA0,0xF4,0xBB,0x9A,0x63,0x73,0xED,0x6D,0xE5,0x2A,0xDB,0xED,0x14,0xA9,0x2B,0xC6,
+0x36,0x11,0xD0,0x2B,0xEB,0x07,0x8B,0xA5,0xDA,0x9E,0x5C,0x19,0x9D,0x56,0x12,0xF5,
+0x54,0x29,0xC8,0x05,0xED,0xB2,0x12,0x2A,0x8D,0xF4,0x03,0x1B,0xFF,0xE7,0x92,0x10,
+0x87,0xB0,0x3A,0xB5,0xC3,0x9D,0x05,0x37,0x12,0xA3,0xC7,0xF4,0x15,0xB9,0xD5,0xA4,
+0x39,0x16,0x9B,0x53,0x3A,0x23,0x91,0xF1,0xA8,0x82,0xA2,0x6A,0x88,0x68,0xC1,0x79,
+0x02,0x22,0xBC,0xAA,0xA6,0xD6,0xAE,0xDF,0xB0,0x14,0x5F,0xB8,0x87,0xD0,0xDD,0x7C,
+0x7F,0x7B,0xFF,0xAF,0x1C,0xCF,0xE6,0xDB,0x07,0xAD,0x5E,0xDB,0x85,0x9D,0xD0,0x2B,
+0x0D,0x33,0xDB,0x04,0xD1,0xE6,0x49,0x40,0x13,0x2B,0x76,0xFB,0x3E,0xE9,0x9C,0x89,
+0x0F,0x15,0xCE,0x18,0xB0,0x85,0x78,0x21,0x4F,0x6B,0x4F,0x0E,0xFA,0x36,0x67,0xCD,
+0x07,0xF2,0xFF,0x08,0xD0,0xE2,0xDE,0xD9,0xBF,0x2A,0xAF,0xB8,0x87,0x86,0x21,0x3C,
+0x04,0xCA,0xB7,0x94,0x68,0x7F,0xCF,0x3C,0xE9,0x98,0xD7,0x38,0xFF,0xEC,0xC0,0xD9,
+0x50,0xF0,0x2E,0x4B,0x58,0xAE,0x46,0x6F,0xD0,0x2E,0xC3,0x60,0xDA,0x72,0x55,0x72,
+0xBD,0x4C,0x45,0x9E,0x61,0xBA,0xBF,0x84,0x81,0x92,0x03,0xD1,0xD2,0x69,0x7C,0xC5,
+};
+
+
+/* subject:/C=US/O=DigiCert Inc/OU=www.digicert.com/CN=DigiCert Assured ID Root G3 */
+/* issuer :/C=US/O=DigiCert Inc/OU=www.digicert.com/CN=DigiCert Assured ID Root G3 */
+
+
+const unsigned char DigiCert_Assured_ID_Root_G3_certificate[586]={
+0x30,0x82,0x02,0x46,0x30,0x82,0x01,0xCD,0xA0,0x03,0x02,0x01,0x02,0x02,0x10,0x0B,
+0xA1,0x5A,0xFA,0x1D,0xDF,0xA0,0xB5,0x49,0x44,0xAF,0xCD,0x24,0xA0,0x6C,0xEC,0x30,
+0x0A,0x06,0x08,0x2A,0x86,0x48,0xCE,0x3D,0x04,0x03,0x03,0x30,0x65,0x31,0x0B,0x30,
+0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x55,0x53,0x31,0x15,0x30,0x13,0x06,0x03,
+0x55,0x04,0x0A,0x13,0x0C,0x44,0x69,0x67,0x69,0x43,0x65,0x72,0x74,0x20,0x49,0x6E,
+0x63,0x31,0x19,0x30,0x17,0x06,0x03,0x55,0x04,0x0B,0x13,0x10,0x77,0x77,0x77,0x2E,
+0x64,0x69,0x67,0x69,0x63,0x65,0x72,0x74,0x2E,0x63,0x6F,0x6D,0x31,0x24,0x30,0x22,
+0x06,0x03,0x55,0x04,0x03,0x13,0x1B,0x44,0x69,0x67,0x69,0x43,0x65,0x72,0x74,0x20,
+0x41,0x73,0x73,0x75,0x72,0x65,0x64,0x20,0x49,0x44,0x20,0x52,0x6F,0x6F,0x74,0x20,
+0x47,0x33,0x30,0x1E,0x17,0x0D,0x31,0x33,0x30,0x38,0x30,0x31,0x31,0x32,0x30,0x30,
+0x30,0x30,0x5A,0x17,0x0D,0x33,0x38,0x30,0x31,0x31,0x35,0x31,0x32,0x30,0x30,0x30,
+0x30,0x5A,0x30,0x65,0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x55,
+0x53,0x31,0x15,0x30,0x13,0x06,0x03,0x55,0x04,0x0A,0x13,0x0C,0x44,0x69,0x67,0x69,
+0x43,0x65,0x72,0x74,0x20,0x49,0x6E,0x63,0x31,0x19,0x30,0x17,0x06,0x03,0x55,0x04,
+0x0B,0x13,0x10,0x77,0x77,0x77,0x2E,0x64,0x69,0x67,0x69,0x63,0x65,0x72,0x74,0x2E,
+0x63,0x6F,0x6D,0x31,0x24,0x30,0x22,0x06,0x03,0x55,0x04,0x03,0x13,0x1B,0x44,0x69,
+0x67,0x69,0x43,0x65,0x72,0x74,0x20,0x41,0x73,0x73,0x75,0x72,0x65,0x64,0x20,0x49,
+0x44,0x20,0x52,0x6F,0x6F,0x74,0x20,0x47,0x33,0x30,0x76,0x30,0x10,0x06,0x07,0x2A,
+0x86,0x48,0xCE,0x3D,0x02,0x01,0x06,0x05,0x2B,0x81,0x04,0x00,0x22,0x03,0x62,0x00,
+0x04,0x19,0xE7,0xBC,0xAC,0x44,0x65,0xED,0xCD,0xB8,0x3F,0x58,0xFB,0x8D,0xB1,0x57,
+0xA9,0x44,0x2D,0x05,0x15,0xF2,0xEF,0x0B,0xFF,0x10,0x74,0x9F,0xB5,0x62,0x52,0x5F,
+0x66,0x7E,0x1F,0xE5,0xDC,0x1B,0x45,0x79,0x0B,0xCC,0xC6,0x53,0x0A,0x9D,0x8D,0x5D,
+0x02,0xD9,0xA9,0x59,0xDE,0x02,0x5A,0xF6,0x95,0x2A,0x0E,0x8D,0x38,0x4A,0x8A,0x49,
+0xC6,0xBC,0xC6,0x03,0x38,0x07,0x5F,0x55,0xDA,0x7E,0x09,0x6E,0xE2,0x7F,0x5E,0xD0,
+0x45,0x20,0x0F,0x59,0x76,0x10,0xD6,0xA0,0x24,0xF0,0x2D,0xDE,0x36,0xF2,0x6C,0x29,
+0x39,0xA3,0x42,0x30,0x40,0x30,0x0F,0x06,0x03,0x55,0x1D,0x13,0x01,0x01,0xFF,0x04,
+0x05,0x30,0x03,0x01,0x01,0xFF,0x30,0x0E,0x06,0x03,0x55,0x1D,0x0F,0x01,0x01,0xFF,
+0x04,0x04,0x03,0x02,0x01,0x86,0x30,0x1D,0x06,0x03,0x55,0x1D,0x0E,0x04,0x16,0x04,
+0x14,0xCB,0xD0,0xBD,0xA9,0xE1,0x98,0x05,0x51,0xA1,0x4D,0x37,0xA2,0x83,0x79,0xCE,
+0x8D,0x1D,0x2A,0xE4,0x84,0x30,0x0A,0x06,0x08,0x2A,0x86,0x48,0xCE,0x3D,0x04,0x03,
+0x03,0x03,0x67,0x00,0x30,0x64,0x02,0x30,0x25,0xA4,0x81,0x45,0x02,0x6B,0x12,0x4B,
+0x75,0x74,0x4F,0xC8,0x23,0xE3,0x70,0xF2,0x75,0x72,0xDE,0x7C,0x89,0xF0,0xCF,0x91,
+0x72,0x61,0x9E,0x5E,0x10,0x92,0x59,0x56,0xB9,0x83,0xC7,0x10,0xE7,0x38,0xE9,0x58,
+0x26,0x36,0x7D,0xD5,0xE4,0x34,0x86,0x39,0x02,0x30,0x7C,0x36,0x53,0xF0,0x30,0xE5,
+0x62,0x63,0x3A,0x99,0xE2,0xB6,0xA3,0x3B,0x9B,0x34,0xFA,0x1E,0xDA,0x10,0x92,0x71,
+0x5E,0x91,0x13,0xA7,0xDD,0xA4,0x6E,0x92,0xCC,0x32,0xD6,0xF5,0x21,0x66,0xC7,0x2F,
+0xEA,0x96,0x63,0x6A,0x65,0x45,0x92,0x95,0x01,0xB4,
};
@@ -942,344 +2348,6 @@ const unsigned char COMODO_Certification_Authority_certificate[1057]={
};
-/* subject:/C=GB/ST=Greater Manchester/L=Salford/O=COMODO CA Limited/CN=COMODO ECC Certification Authority */
-/* issuer :/C=GB/ST=Greater Manchester/L=Salford/O=COMODO CA Limited/CN=COMODO ECC Certification Authority */
-
-
-const unsigned char COMODO_ECC_Certification_Authority_certificate[653]={
-0x30,0x82,0x02,0x89,0x30,0x82,0x02,0x0F,0xA0,0x03,0x02,0x01,0x02,0x02,0x10,0x1F,
-0x47,0xAF,0xAA,0x62,0x00,0x70,0x50,0x54,0x4C,0x01,0x9E,0x9B,0x63,0x99,0x2A,0x30,
-0x0A,0x06,0x08,0x2A,0x86,0x48,0xCE,0x3D,0x04,0x03,0x03,0x30,0x81,0x85,0x31,0x0B,
-0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x47,0x42,0x31,0x1B,0x30,0x19,0x06,
-0x03,0x55,0x04,0x08,0x13,0x12,0x47,0x72,0x65,0x61,0x74,0x65,0x72,0x20,0x4D,0x61,
-0x6E,0x63,0x68,0x65,0x73,0x74,0x65,0x72,0x31,0x10,0x30,0x0E,0x06,0x03,0x55,0x04,
-0x07,0x13,0x07,0x53,0x61,0x6C,0x66,0x6F,0x72,0x64,0x31,0x1A,0x30,0x18,0x06,0x03,
-0x55,0x04,0x0A,0x13,0x11,0x43,0x4F,0x4D,0x4F,0x44,0x4F,0x20,0x43,0x41,0x20,0x4C,
-0x69,0x6D,0x69,0x74,0x65,0x64,0x31,0x2B,0x30,0x29,0x06,0x03,0x55,0x04,0x03,0x13,
-0x22,0x43,0x4F,0x4D,0x4F,0x44,0x4F,0x20,0x45,0x43,0x43,0x20,0x43,0x65,0x72,0x74,
-0x69,0x66,0x69,0x63,0x61,0x74,0x69,0x6F,0x6E,0x20,0x41,0x75,0x74,0x68,0x6F,0x72,
-0x69,0x74,0x79,0x30,0x1E,0x17,0x0D,0x30,0x38,0x30,0x33,0x30,0x36,0x30,0x30,0x30,
-0x30,0x30,0x30,0x5A,0x17,0x0D,0x33,0x38,0x30,0x31,0x31,0x38,0x32,0x33,0x35,0x39,
-0x35,0x39,0x5A,0x30,0x81,0x85,0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,
-0x02,0x47,0x42,0x31,0x1B,0x30,0x19,0x06,0x03,0x55,0x04,0x08,0x13,0x12,0x47,0x72,
-0x65,0x61,0x74,0x65,0x72,0x20,0x4D,0x61,0x6E,0x63,0x68,0x65,0x73,0x74,0x65,0x72,
-0x31,0x10,0x30,0x0E,0x06,0x03,0x55,0x04,0x07,0x13,0x07,0x53,0x61,0x6C,0x66,0x6F,
-0x72,0x64,0x31,0x1A,0x30,0x18,0x06,0x03,0x55,0x04,0x0A,0x13,0x11,0x43,0x4F,0x4D,
-0x4F,0x44,0x4F,0x20,0x43,0x41,0x20,0x4C,0x69,0x6D,0x69,0x74,0x65,0x64,0x31,0x2B,
-0x30,0x29,0x06,0x03,0x55,0x04,0x03,0x13,0x22,0x43,0x4F,0x4D,0x4F,0x44,0x4F,0x20,
-0x45,0x43,0x43,0x20,0x43,0x65,0x72,0x74,0x69,0x66,0x69,0x63,0x61,0x74,0x69,0x6F,
-0x6E,0x20,0x41,0x75,0x74,0x68,0x6F,0x72,0x69,0x74,0x79,0x30,0x76,0x30,0x10,0x06,
-0x07,0x2A,0x86,0x48,0xCE,0x3D,0x02,0x01,0x06,0x05,0x2B,0x81,0x04,0x00,0x22,0x03,
-0x62,0x00,0x04,0x03,0x47,0x7B,0x2F,0x75,0xC9,0x82,0x15,0x85,0xFB,0x75,0xE4,0x91,
-0x16,0xD4,0xAB,0x62,0x99,0xF5,0x3E,0x52,0x0B,0x06,0xCE,0x41,0x00,0x7F,0x97,0xE1,
-0x0A,0x24,0x3C,0x1D,0x01,0x04,0xEE,0x3D,0xD2,0x8D,0x09,0x97,0x0C,0xE0,0x75,0xE4,
-0xFA,0xFB,0x77,0x8A,0x2A,0xF5,0x03,0x60,0x4B,0x36,0x8B,0x16,0x23,0x16,0xAD,0x09,
-0x71,0xF4,0x4A,0xF4,0x28,0x50,0xB4,0xFE,0x88,0x1C,0x6E,0x3F,0x6C,0x2F,0x2F,0x09,
-0x59,0x5B,0xA5,0x5B,0x0B,0x33,0x99,0xE2,0xC3,0x3D,0x89,0xF9,0x6A,0x2C,0xEF,0xB2,
-0xD3,0x06,0xE9,0xA3,0x42,0x30,0x40,0x30,0x1D,0x06,0x03,0x55,0x1D,0x0E,0x04,0x16,
-0x04,0x14,0x75,0x71,0xA7,0x19,0x48,0x19,0xBC,0x9D,0x9D,0xEA,0x41,0x47,0xDF,0x94,
-0xC4,0x48,0x77,0x99,0xD3,0x79,0x30,0x0E,0x06,0x03,0x55,0x1D,0x0F,0x01,0x01,0xFF,
-0x04,0x04,0x03,0x02,0x01,0x06,0x30,0x0F,0x06,0x03,0x55,0x1D,0x13,0x01,0x01,0xFF,
-0x04,0x05,0x30,0x03,0x01,0x01,0xFF,0x30,0x0A,0x06,0x08,0x2A,0x86,0x48,0xCE,0x3D,
-0x04,0x03,0x03,0x03,0x68,0x00,0x30,0x65,0x02,0x31,0x00,0xEF,0x03,0x5B,0x7A,0xAC,
-0xB7,0x78,0x0A,0x72,0xB7,0x88,0xDF,0xFF,0xB5,0x46,0x14,0x09,0x0A,0xFA,0xA0,0xE6,
-0x7D,0x08,0xC6,0x1A,0x87,0xBD,0x18,0xA8,0x73,0xBD,0x26,0xCA,0x60,0x0C,0x9D,0xCE,
-0x99,0x9F,0xCF,0x5C,0x0F,0x30,0xE1,0xBE,0x14,0x31,0xEA,0x02,0x30,0x14,0xF4,0x93,
-0x3C,0x49,0xA7,0x33,0x7A,0x90,0x46,0x47,0xB3,0x63,0x7D,0x13,0x9B,0x4E,0xB7,0x6F,
-0x18,0x37,0x80,0x53,0xFE,0xDD,0x20,0xE0,0x35,0x9A,0x36,0xD1,0xC7,0x01,0xB9,0xE6,
-0xDC,0xDD,0xF3,0xFF,0x1D,0x2C,0x3A,0x16,0x57,0xD9,0x92,0x39,0xD6,
-};
-
-
-/* subject:/C=GB/ST=Greater Manchester/L=Salford/O=Comodo CA Limited/CN=Secure Certificate Services */
-/* issuer :/C=GB/ST=Greater Manchester/L=Salford/O=Comodo CA Limited/CN=Secure Certificate Services */
-
-
-const unsigned char Comodo_Secure_Services_root_certificate[1091]={
-0x30,0x82,0x04,0x3F,0x30,0x82,0x03,0x27,0xA0,0x03,0x02,0x01,0x02,0x02,0x01,0x01,
-0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x05,0x05,0x00,0x30,
-0x7E,0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x47,0x42,0x31,0x1B,
-0x30,0x19,0x06,0x03,0x55,0x04,0x08,0x0C,0x12,0x47,0x72,0x65,0x61,0x74,0x65,0x72,
-0x20,0x4D,0x61,0x6E,0x63,0x68,0x65,0x73,0x74,0x65,0x72,0x31,0x10,0x30,0x0E,0x06,
-0x03,0x55,0x04,0x07,0x0C,0x07,0x53,0x61,0x6C,0x66,0x6F,0x72,0x64,0x31,0x1A,0x30,
-0x18,0x06,0x03,0x55,0x04,0x0A,0x0C,0x11,0x43,0x6F,0x6D,0x6F,0x64,0x6F,0x20,0x43,
-0x41,0x20,0x4C,0x69,0x6D,0x69,0x74,0x65,0x64,0x31,0x24,0x30,0x22,0x06,0x03,0x55,
-0x04,0x03,0x0C,0x1B,0x53,0x65,0x63,0x75,0x72,0x65,0x20,0x43,0x65,0x72,0x74,0x69,
-0x66,0x69,0x63,0x61,0x74,0x65,0x20,0x53,0x65,0x72,0x76,0x69,0x63,0x65,0x73,0x30,
-0x1E,0x17,0x0D,0x30,0x34,0x30,0x31,0x30,0x31,0x30,0x30,0x30,0x30,0x30,0x30,0x5A,
-0x17,0x0D,0x32,0x38,0x31,0x32,0x33,0x31,0x32,0x33,0x35,0x39,0x35,0x39,0x5A,0x30,
-0x7E,0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x47,0x42,0x31,0x1B,
-0x30,0x19,0x06,0x03,0x55,0x04,0x08,0x0C,0x12,0x47,0x72,0x65,0x61,0x74,0x65,0x72,
-0x20,0x4D,0x61,0x6E,0x63,0x68,0x65,0x73,0x74,0x65,0x72,0x31,0x10,0x30,0x0E,0x06,
-0x03,0x55,0x04,0x07,0x0C,0x07,0x53,0x61,0x6C,0x66,0x6F,0x72,0x64,0x31,0x1A,0x30,
-0x18,0x06,0x03,0x55,0x04,0x0A,0x0C,0x11,0x43,0x6F,0x6D,0x6F,0x64,0x6F,0x20,0x43,
-0x41,0x20,0x4C,0x69,0x6D,0x69,0x74,0x65,0x64,0x31,0x24,0x30,0x22,0x06,0x03,0x55,
-0x04,0x03,0x0C,0x1B,0x53,0x65,0x63,0x75,0x72,0x65,0x20,0x43,0x65,0x72,0x74,0x69,
-0x66,0x69,0x63,0x61,0x74,0x65,0x20,0x53,0x65,0x72,0x76,0x69,0x63,0x65,0x73,0x30,
-0x82,0x01,0x22,0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x01,
-0x05,0x00,0x03,0x82,0x01,0x0F,0x00,0x30,0x82,0x01,0x0A,0x02,0x82,0x01,0x01,0x00,
-0xC0,0x71,0x33,0x82,0x8A,0xD0,0x70,0xEB,0x73,0x87,0x82,0x40,0xD5,0x1D,0xE4,0xCB,
-0xC9,0x0E,0x42,0x90,0xF9,0xDE,0x34,0xB9,0xA1,0xBA,0x11,0xF4,0x25,0x85,0xF3,0xCC,
-0x72,0x6D,0xF2,0x7B,0x97,0x6B,0xB3,0x07,0xF1,0x77,0x24,0x91,0x5F,0x25,0x8F,0xF6,
-0x74,0x3D,0xE4,0x80,0xC2,0xF8,0x3C,0x0D,0xF3,0xBF,0x40,0xEA,0xF7,0xC8,0x52,0xD1,
-0x72,0x6F,0xEF,0xC8,0xAB,0x41,0xB8,0x6E,0x2E,0x17,0x2A,0x95,0x69,0x0C,0xCD,0xD2,
-0x1E,0x94,0x7B,0x2D,0x94,0x1D,0xAA,0x75,0xD7,0xB3,0x98,0xCB,0xAC,0xBC,0x64,0x53,
-0x40,0xBC,0x8F,0xAC,0xAC,0x36,0xCB,0x5C,0xAD,0xBB,0xDD,0xE0,0x94,0x17,0xEC,0xD1,
-0x5C,0xD0,0xBF,0xEF,0xA5,0x95,0xC9,0x90,0xC5,0xB0,0xAC,0xFB,0x1B,0x43,0xDF,0x7A,
-0x08,0x5D,0xB7,0xB8,0xF2,0x40,0x1B,0x2B,0x27,0x9E,0x50,0xCE,0x5E,0x65,0x82,0x88,
-0x8C,0x5E,0xD3,0x4E,0x0C,0x7A,0xEA,0x08,0x91,0xB6,0x36,0xAA,0x2B,0x42,0xFB,0xEA,
-0xC2,0xA3,0x39,0xE5,0xDB,0x26,0x38,0xAD,0x8B,0x0A,0xEE,0x19,0x63,0xC7,0x1C,0x24,
-0xDF,0x03,0x78,0xDA,0xE6,0xEA,0xC1,0x47,0x1A,0x0B,0x0B,0x46,0x09,0xDD,0x02,0xFC,
-0xDE,0xCB,0x87,0x5F,0xD7,0x30,0x63,0x68,0xA1,0xAE,0xDC,0x32,0xA1,0xBA,0xBE,0xFE,
-0x44,0xAB,0x68,0xB6,0xA5,0x17,0x15,0xFD,0xBD,0xD5,0xA7,0xA7,0x9A,0xE4,0x44,0x33,
-0xE9,0x88,0x8E,0xFC,0xED,0x51,0xEB,0x93,0x71,0x4E,0xAD,0x01,0xE7,0x44,0x8E,0xAB,
-0x2D,0xCB,0xA8,0xFE,0x01,0x49,0x48,0xF0,0xC0,0xDD,0xC7,0x68,0xD8,0x92,0xFE,0x3D,
-0x02,0x03,0x01,0x00,0x01,0xA3,0x81,0xC7,0x30,0x81,0xC4,0x30,0x1D,0x06,0x03,0x55,
-0x1D,0x0E,0x04,0x16,0x04,0x14,0x3C,0xD8,0x93,0x88,0xC2,0xC0,0x82,0x09,0xCC,0x01,
-0x99,0x06,0x93,0x20,0xE9,0x9E,0x70,0x09,0x63,0x4F,0x30,0x0E,0x06,0x03,0x55,0x1D,
-0x0F,0x01,0x01,0xFF,0x04,0x04,0x03,0x02,0x01,0x06,0x30,0x0F,0x06,0x03,0x55,0x1D,
-0x13,0x01,0x01,0xFF,0x04,0x05,0x30,0x03,0x01,0x01,0xFF,0x30,0x81,0x81,0x06,0x03,
-0x55,0x1D,0x1F,0x04,0x7A,0x30,0x78,0x30,0x3B,0xA0,0x39,0xA0,0x37,0x86,0x35,0x68,
-0x74,0x74,0x70,0x3A,0x2F,0x2F,0x63,0x72,0x6C,0x2E,0x63,0x6F,0x6D,0x6F,0x64,0x6F,
-0x63,0x61,0x2E,0x63,0x6F,0x6D,0x2F,0x53,0x65,0x63,0x75,0x72,0x65,0x43,0x65,0x72,
-0x74,0x69,0x66,0x69,0x63,0x61,0x74,0x65,0x53,0x65,0x72,0x76,0x69,0x63,0x65,0x73,
-0x2E,0x63,0x72,0x6C,0x30,0x39,0xA0,0x37,0xA0,0x35,0x86,0x33,0x68,0x74,0x74,0x70,
-0x3A,0x2F,0x2F,0x63,0x72,0x6C,0x2E,0x63,0x6F,0x6D,0x6F,0x64,0x6F,0x2E,0x6E,0x65,
-0x74,0x2F,0x53,0x65,0x63,0x75,0x72,0x65,0x43,0x65,0x72,0x74,0x69,0x66,0x69,0x63,
-0x61,0x74,0x65,0x53,0x65,0x72,0x76,0x69,0x63,0x65,0x73,0x2E,0x63,0x72,0x6C,0x30,
-0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x05,0x05,0x00,0x03,0x82,
-0x01,0x01,0x00,0x87,0x01,0x6D,0x23,0x1D,0x7E,0x5B,0x17,0x7D,0xC1,0x61,0x32,0xCF,
-0x8F,0xE7,0xF3,0x8A,0x94,0x59,0x66,0xE0,0x9E,0x28,0xA8,0x5E,0xD3,0xB7,0xF4,0x34,
-0xE6,0xAA,0x39,0xB2,0x97,0x16,0xC5,0x82,0x6F,0x32,0xA4,0xE9,0x8C,0xE7,0xAF,0xFD,
-0xEF,0xC2,0xE8,0xB9,0x4B,0xAA,0xA3,0xF4,0xE6,0xDA,0x8D,0x65,0x21,0xFB,0xBA,0x80,
-0xEB,0x26,0x28,0x85,0x1A,0xFE,0x39,0x8C,0xDE,0x5B,0x04,0x04,0xB4,0x54,0xF9,0xA3,
-0x67,0x9E,0x41,0xFA,0x09,0x52,0xCC,0x05,0x48,0xA8,0xC9,0x3F,0x21,0x04,0x1E,0xCE,
-0x48,0x6B,0xFC,0x85,0xE8,0xC2,0x7B,0xAF,0x7F,0xB7,0xCC,0xF8,0x5F,0x3A,0xFD,0x35,
-0xC6,0x0D,0xEF,0x97,0xDC,0x4C,0xAB,0x11,0xE1,0x6B,0xCB,0x31,0xD1,0x6C,0xFB,0x48,
-0x80,0xAB,0xDC,0x9C,0x37,0xB8,0x21,0x14,0x4B,0x0D,0x71,0x3D,0xEC,0x83,0x33,0x6E,
-0xD1,0x6E,0x32,0x16,0xEC,0x98,0xC7,0x16,0x8B,0x59,0xA6,0x34,0xAB,0x05,0x57,0x2D,
-0x93,0xF7,0xAA,0x13,0xCB,0xD2,0x13,0xE2,0xB7,0x2E,0x3B,0xCD,0x6B,0x50,0x17,0x09,
-0x68,0x3E,0xB5,0x26,0x57,0xEE,0xB6,0xE0,0xB6,0xDD,0xB9,0x29,0x80,0x79,0x7D,0x8F,
-0xA3,0xF0,0xA4,0x28,0xA4,0x15,0xC4,0x85,0xF4,0x27,0xD4,0x6B,0xBF,0xE5,0x5C,0xE4,
-0x65,0x02,0x76,0x54,0xB4,0xE3,0x37,0x66,0x24,0xD3,0x19,0x61,0xC8,0x52,0x10,0xE5,
-0x8B,0x37,0x9A,0xB9,0xA9,0xF9,0x1D,0xBF,0xEA,0x99,0x92,0x61,0x96,0xFF,0x01,0xCD,
-0xA1,0x5F,0x0D,0xBC,0x71,0xBC,0x0E,0xAC,0x0B,0x1D,0x47,0x45,0x1D,0xC1,0xEC,0x7C,
-0xEC,0xFD,0x29,
-};
-
-
-/* subject:/C=GB/ST=Greater Manchester/L=Salford/O=Comodo CA Limited/CN=Trusted Certificate Services */
-/* issuer :/C=GB/ST=Greater Manchester/L=Salford/O=Comodo CA Limited/CN=Trusted Certificate Services */
-
-
-const unsigned char Comodo_Trusted_Services_root_certificate[1095]={
-0x30,0x82,0x04,0x43,0x30,0x82,0x03,0x2B,0xA0,0x03,0x02,0x01,0x02,0x02,0x01,0x01,
-0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x05,0x05,0x00,0x30,
-0x7F,0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x47,0x42,0x31,0x1B,
-0x30,0x19,0x06,0x03,0x55,0x04,0x08,0x0C,0x12,0x47,0x72,0x65,0x61,0x74,0x65,0x72,
-0x20,0x4D,0x61,0x6E,0x63,0x68,0x65,0x73,0x74,0x65,0x72,0x31,0x10,0x30,0x0E,0x06,
-0x03,0x55,0x04,0x07,0x0C,0x07,0x53,0x61,0x6C,0x66,0x6F,0x72,0x64,0x31,0x1A,0x30,
-0x18,0x06,0x03,0x55,0x04,0x0A,0x0C,0x11,0x43,0x6F,0x6D,0x6F,0x64,0x6F,0x20,0x43,
-0x41,0x20,0x4C,0x69,0x6D,0x69,0x74,0x65,0x64,0x31,0x25,0x30,0x23,0x06,0x03,0x55,
-0x04,0x03,0x0C,0x1C,0x54,0x72,0x75,0x73,0x74,0x65,0x64,0x20,0x43,0x65,0x72,0x74,
-0x69,0x66,0x69,0x63,0x61,0x74,0x65,0x20,0x53,0x65,0x72,0x76,0x69,0x63,0x65,0x73,
-0x30,0x1E,0x17,0x0D,0x30,0x34,0x30,0x31,0x30,0x31,0x30,0x30,0x30,0x30,0x30,0x30,
-0x5A,0x17,0x0D,0x32,0x38,0x31,0x32,0x33,0x31,0x32,0x33,0x35,0x39,0x35,0x39,0x5A,
-0x30,0x7F,0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x47,0x42,0x31,
-0x1B,0x30,0x19,0x06,0x03,0x55,0x04,0x08,0x0C,0x12,0x47,0x72,0x65,0x61,0x74,0x65,
-0x72,0x20,0x4D,0x61,0x6E,0x63,0x68,0x65,0x73,0x74,0x65,0x72,0x31,0x10,0x30,0x0E,
-0x06,0x03,0x55,0x04,0x07,0x0C,0x07,0x53,0x61,0x6C,0x66,0x6F,0x72,0x64,0x31,0x1A,
-0x30,0x18,0x06,0x03,0x55,0x04,0x0A,0x0C,0x11,0x43,0x6F,0x6D,0x6F,0x64,0x6F,0x20,
-0x43,0x41,0x20,0x4C,0x69,0x6D,0x69,0x74,0x65,0x64,0x31,0x25,0x30,0x23,0x06,0x03,
-0x55,0x04,0x03,0x0C,0x1C,0x54,0x72,0x75,0x73,0x74,0x65,0x64,0x20,0x43,0x65,0x72,
-0x74,0x69,0x66,0x69,0x63,0x61,0x74,0x65,0x20,0x53,0x65,0x72,0x76,0x69,0x63,0x65,
-0x73,0x30,0x82,0x01,0x22,0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,
-0x01,0x01,0x05,0x00,0x03,0x82,0x01,0x0F,0x00,0x30,0x82,0x01,0x0A,0x02,0x82,0x01,
-0x01,0x00,0xDF,0x71,0x6F,0x36,0x58,0x53,0x5A,0xF2,0x36,0x54,0x57,0x80,0xC4,0x74,
-0x08,0x20,0xED,0x18,0x7F,0x2A,0x1D,0xE6,0x35,0x9A,0x1E,0x25,0xAC,0x9C,0xE5,0x96,
-0x7E,0x72,0x52,0xA0,0x15,0x42,0xDB,0x59,0xDD,0x64,0x7A,0x1A,0xD0,0xB8,0x7B,0xDD,
-0x39,0x15,0xBC,0x55,0x48,0xC4,0xED,0x3A,0x00,0xEA,0x31,0x11,0xBA,0xF2,0x71,0x74,
-0x1A,0x67,0xB8,0xCF,0x33,0xCC,0xA8,0x31,0xAF,0xA3,0xE3,0xD7,0x7F,0xBF,0x33,0x2D,
-0x4C,0x6A,0x3C,0xEC,0x8B,0xC3,0x92,0xD2,0x53,0x77,0x24,0x74,0x9C,0x07,0x6E,0x70,
-0xFC,0xBD,0x0B,0x5B,0x76,0xBA,0x5F,0xF2,0xFF,0xD7,0x37,0x4B,0x4A,0x60,0x78,0xF7,
-0xF0,0xFA,0xCA,0x70,0xB4,0xEA,0x59,0xAA,0xA3,0xCE,0x48,0x2F,0xA9,0xC3,0xB2,0x0B,
-0x7E,0x17,0x72,0x16,0x0C,0xA6,0x07,0x0C,0x1B,0x38,0xCF,0xC9,0x62,0xB7,0x3F,0xA0,
-0x93,0xA5,0x87,0x41,0xF2,0xB7,0x70,0x40,0x77,0xD8,0xBE,0x14,0x7C,0xE3,0xA8,0xC0,
-0x7A,0x8E,0xE9,0x63,0x6A,0xD1,0x0F,0x9A,0xC6,0xD2,0xF4,0x8B,0x3A,0x14,0x04,0x56,
-0xD4,0xED,0xB8,0xCC,0x6E,0xF5,0xFB,0xE2,0x2C,0x58,0xBD,0x7F,0x4F,0x6B,0x2B,0xF7,
-0x60,0x24,0x58,0x24,0xCE,0x26,0xEF,0x34,0x91,0x3A,0xD5,0xE3,0x81,0xD0,0xB2,0xF0,
-0x04,0x02,0xD7,0x5B,0xB7,0x3E,0x92,0xAC,0x6B,0x12,0x8A,0xF9,0xE4,0x05,0xB0,0x3B,
-0x91,0x49,0x5C,0xB2,0xEB,0x53,0xEA,0xF8,0x9F,0x47,0x86,0xEE,0xBF,0x95,0xC0,0xC0,
-0x06,0x9F,0xD2,0x5B,0x5E,0x11,0x1B,0xF4,0xC7,0x04,0x35,0x29,0xD2,0x55,0x5C,0xE4,
-0xED,0xEB,0x02,0x03,0x01,0x00,0x01,0xA3,0x81,0xC9,0x30,0x81,0xC6,0x30,0x1D,0x06,
-0x03,0x55,0x1D,0x0E,0x04,0x16,0x04,0x14,0xC5,0x7B,0x58,0xBD,0xED,0xDA,0x25,0x69,
-0xD2,0xF7,0x59,0x16,0xA8,0xB3,0x32,0xC0,0x7B,0x27,0x5B,0xF4,0x30,0x0E,0x06,0x03,
-0x55,0x1D,0x0F,0x01,0x01,0xFF,0x04,0x04,0x03,0x02,0x01,0x06,0x30,0x0F,0x06,0x03,
-0x55,0x1D,0x13,0x01,0x01,0xFF,0x04,0x05,0x30,0x03,0x01,0x01,0xFF,0x30,0x81,0x83,
-0x06,0x03,0x55,0x1D,0x1F,0x04,0x7C,0x30,0x7A,0x30,0x3C,0xA0,0x3A,0xA0,0x38,0x86,
-0x36,0x68,0x74,0x74,0x70,0x3A,0x2F,0x2F,0x63,0x72,0x6C,0x2E,0x63,0x6F,0x6D,0x6F,
-0x64,0x6F,0x63,0x61,0x2E,0x63,0x6F,0x6D,0x2F,0x54,0x72,0x75,0x73,0x74,0x65,0x64,
-0x43,0x65,0x72,0x74,0x69,0x66,0x69,0x63,0x61,0x74,0x65,0x53,0x65,0x72,0x76,0x69,
-0x63,0x65,0x73,0x2E,0x63,0x72,0x6C,0x30,0x3A,0xA0,0x38,0xA0,0x36,0x86,0x34,0x68,
-0x74,0x74,0x70,0x3A,0x2F,0x2F,0x63,0x72,0x6C,0x2E,0x63,0x6F,0x6D,0x6F,0x64,0x6F,
-0x2E,0x6E,0x65,0x74,0x2F,0x54,0x72,0x75,0x73,0x74,0x65,0x64,0x43,0x65,0x72,0x74,
-0x69,0x66,0x69,0x63,0x61,0x74,0x65,0x53,0x65,0x72,0x76,0x69,0x63,0x65,0x73,0x2E,
-0x63,0x72,0x6C,0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x05,
-0x05,0x00,0x03,0x82,0x01,0x01,0x00,0xC8,0x93,0x81,0x3B,0x89,0xB4,0xAF,0xB8,0x84,
-0x12,0x4C,0x8D,0xD2,0xF0,0xDB,0x70,0xBA,0x57,0x86,0x15,0x34,0x10,0xB9,0x2F,0x7F,
-0x1E,0xB0,0xA8,0x89,0x60,0xA1,0x8A,0xC2,0x77,0x0C,0x50,0x4A,0x9B,0x00,0x8B,0xD8,
-0x8B,0xF4,0x41,0xE2,0xD0,0x83,0x8A,0x4A,0x1C,0x14,0x06,0xB0,0xA3,0x68,0x05,0x70,
-0x31,0x30,0xA7,0x53,0x9B,0x0E,0xE9,0x4A,0xA0,0x58,0x69,0x67,0x0E,0xAE,0x9D,0xF6,
-0xA5,0x2C,0x41,0xBF,0x3C,0x06,0x6B,0xE4,0x59,0xCC,0x6D,0x10,0xF1,0x96,0x6F,0x1F,
-0xDF,0xF4,0x04,0x02,0xA4,0x9F,0x45,0x3E,0xC8,0xD8,0xFA,0x36,0x46,0x44,0x50,0x3F,
-0x82,0x97,0x91,0x1F,0x28,0xDB,0x18,0x11,0x8C,0x2A,0xE4,0x65,0x83,0x57,0x12,0x12,
-0x8C,0x17,0x3F,0x94,0x36,0xFE,0x5D,0xB0,0xC0,0x04,0x77,0x13,0xB8,0xF4,0x15,0xD5,
-0x3F,0x38,0xCC,0x94,0x3A,0x55,0xD0,0xAC,0x98,0xF5,0xBA,0x00,0x5F,0xE0,0x86,0x19,
-0x81,0x78,0x2F,0x28,0xC0,0x7E,0xD3,0xCC,0x42,0x0A,0xF5,0xAE,0x50,0xA0,0xD1,0x3E,
-0xC6,0xA1,0x71,0xEC,0x3F,0xA0,0x20,0x8C,0x66,0x3A,0x89,0xB4,0x8E,0xD4,0xD8,0xB1,
-0x4D,0x25,0x47,0xEE,0x2F,0x88,0xC8,0xB5,0xE1,0x05,0x45,0xC0,0xBE,0x14,0x71,0xDE,
-0x7A,0xFD,0x8E,0x7B,0x7D,0x4D,0x08,0x96,0xA5,0x12,0x73,0xF0,0x2D,0xCA,0x37,0x27,
-0x74,0x12,0x27,0x4C,0xCB,0xB6,0x97,0xE9,0xD9,0xAE,0x08,0x6D,0x5A,0x39,0x40,0xDD,
-0x05,0x47,0x75,0x6A,0x5A,0x21,0xB3,0xA3,0x18,0xCF,0x4E,0xF7,0x2E,0x57,0xB7,0x98,
-0x70,0x5E,0xC8,0xC4,0x78,0xB0,0x62,
-};
-
-
-/* subject:/O=Cybertrust, Inc/CN=Cybertrust Global Root */
-/* issuer :/O=Cybertrust, Inc/CN=Cybertrust Global Root */
-
-
-const unsigned char Cybertrust_Global_Root_certificate[933]={
-0x30,0x82,0x03,0xA1,0x30,0x82,0x02,0x89,0xA0,0x03,0x02,0x01,0x02,0x02,0x0B,0x04,
-0x00,0x00,0x00,0x00,0x01,0x0F,0x85,0xAA,0x2D,0x48,0x30,0x0D,0x06,0x09,0x2A,0x86,
-0x48,0x86,0xF7,0x0D,0x01,0x01,0x05,0x05,0x00,0x30,0x3B,0x31,0x18,0x30,0x16,0x06,
-0x03,0x55,0x04,0x0A,0x13,0x0F,0x43,0x79,0x62,0x65,0x72,0x74,0x72,0x75,0x73,0x74,
-0x2C,0x20,0x49,0x6E,0x63,0x31,0x1F,0x30,0x1D,0x06,0x03,0x55,0x04,0x03,0x13,0x16,
-0x43,0x79,0x62,0x65,0x72,0x74,0x72,0x75,0x73,0x74,0x20,0x47,0x6C,0x6F,0x62,0x61,
-0x6C,0x20,0x52,0x6F,0x6F,0x74,0x30,0x1E,0x17,0x0D,0x30,0x36,0x31,0x32,0x31,0x35,
-0x30,0x38,0x30,0x30,0x30,0x30,0x5A,0x17,0x0D,0x32,0x31,0x31,0x32,0x31,0x35,0x30,
-0x38,0x30,0x30,0x30,0x30,0x5A,0x30,0x3B,0x31,0x18,0x30,0x16,0x06,0x03,0x55,0x04,
-0x0A,0x13,0x0F,0x43,0x79,0x62,0x65,0x72,0x74,0x72,0x75,0x73,0x74,0x2C,0x20,0x49,
-0x6E,0x63,0x31,0x1F,0x30,0x1D,0x06,0x03,0x55,0x04,0x03,0x13,0x16,0x43,0x79,0x62,
-0x65,0x72,0x74,0x72,0x75,0x73,0x74,0x20,0x47,0x6C,0x6F,0x62,0x61,0x6C,0x20,0x52,
-0x6F,0x6F,0x74,0x30,0x82,0x01,0x22,0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,
-0x0D,0x01,0x01,0x01,0x05,0x00,0x03,0x82,0x01,0x0F,0x00,0x30,0x82,0x01,0x0A,0x02,
-0x82,0x01,0x01,0x00,0xF8,0xC8,0xBC,0xBD,0x14,0x50,0x66,0x13,0xFF,0xF0,0xD3,0x79,
-0xEC,0x23,0xF2,0xB7,0x1A,0xC7,0x8E,0x85,0xF1,0x12,0x73,0xA6,0x19,0xAA,0x10,0xDB,
-0x9C,0xA2,0x65,0x74,0x5A,0x77,0x3E,0x51,0x7D,0x56,0xF6,0xDC,0x23,0xB6,0xD4,0xED,
-0x5F,0x58,0xB1,0x37,0x4D,0xD5,0x49,0x0E,0x6E,0xF5,0x6A,0x87,0xD6,0xD2,0x8C,0xD2,
-0x27,0xC6,0xE2,0xFF,0x36,0x9F,0x98,0x65,0xA0,0x13,0x4E,0xC6,0x2A,0x64,0x9B,0xD5,
-0x90,0x12,0xCF,0x14,0x06,0xF4,0x3B,0xE3,0xD4,0x28,0xBE,0xE8,0x0E,0xF8,0xAB,0x4E,
-0x48,0x94,0x6D,0x8E,0x95,0x31,0x10,0x5C,0xED,0xA2,0x2D,0xBD,0xD5,0x3A,0x6D,0xB2,
-0x1C,0xBB,0x60,0xC0,0x46,0x4B,0x01,0xF5,0x49,0xAE,0x7E,0x46,0x8A,0xD0,0x74,0x8D,
-0xA1,0x0C,0x02,0xCE,0xEE,0xFC,0xE7,0x8F,0xB8,0x6B,0x66,0xF3,0x7F,0x44,0x00,0xBF,
-0x66,0x25,0x14,0x2B,0xDD,0x10,0x30,0x1D,0x07,0x96,0x3F,0x4D,0xF6,0x6B,0xB8,0x8F,
-0xB7,0x7B,0x0C,0xA5,0x38,0xEB,0xDE,0x47,0xDB,0xD5,0x5D,0x39,0xFC,0x88,0xA7,0xF3,
-0xD7,0x2A,0x74,0xF1,0xE8,0x5A,0xA2,0x3B,0x9F,0x50,0xBA,0xA6,0x8C,0x45,0x35,0xC2,
-0x50,0x65,0x95,0xDC,0x63,0x82,0xEF,0xDD,0xBF,0x77,0x4D,0x9C,0x62,0xC9,0x63,0x73,
-0x16,0xD0,0x29,0x0F,0x49,0xA9,0x48,0xF0,0xB3,0xAA,0xB7,0x6C,0xC5,0xA7,0x30,0x39,
-0x40,0x5D,0xAE,0xC4,0xE2,0x5D,0x26,0x53,0xF0,0xCE,0x1C,0x23,0x08,0x61,0xA8,0x94,
-0x19,0xBA,0x04,0x62,0x40,0xEC,0x1F,0x38,0x70,0x77,0x12,0x06,0x71,0xA7,0x30,0x18,
-0x5D,0x25,0x27,0xA5,0x02,0x03,0x01,0x00,0x01,0xA3,0x81,0xA5,0x30,0x81,0xA2,0x30,
-0x0E,0x06,0x03,0x55,0x1D,0x0F,0x01,0x01,0xFF,0x04,0x04,0x03,0x02,0x01,0x06,0x30,
-0x0F,0x06,0x03,0x55,0x1D,0x13,0x01,0x01,0xFF,0x04,0x05,0x30,0x03,0x01,0x01,0xFF,
-0x30,0x1D,0x06,0x03,0x55,0x1D,0x0E,0x04,0x16,0x04,0x14,0xB6,0x08,0x7B,0x0D,0x7A,
-0xCC,0xAC,0x20,0x4C,0x86,0x56,0x32,0x5E,0xCF,0xAB,0x6E,0x85,0x2D,0x70,0x57,0x30,
-0x3F,0x06,0x03,0x55,0x1D,0x1F,0x04,0x38,0x30,0x36,0x30,0x34,0xA0,0x32,0xA0,0x30,
-0x86,0x2E,0x68,0x74,0x74,0x70,0x3A,0x2F,0x2F,0x77,0x77,0x77,0x32,0x2E,0x70,0x75,
-0x62,0x6C,0x69,0x63,0x2D,0x74,0x72,0x75,0x73,0x74,0x2E,0x63,0x6F,0x6D,0x2F,0x63,
-0x72,0x6C,0x2F,0x63,0x74,0x2F,0x63,0x74,0x72,0x6F,0x6F,0x74,0x2E,0x63,0x72,0x6C,
-0x30,0x1F,0x06,0x03,0x55,0x1D,0x23,0x04,0x18,0x30,0x16,0x80,0x14,0xB6,0x08,0x7B,
-0x0D,0x7A,0xCC,0xAC,0x20,0x4C,0x86,0x56,0x32,0x5E,0xCF,0xAB,0x6E,0x85,0x2D,0x70,
-0x57,0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x05,0x05,0x00,
-0x03,0x82,0x01,0x01,0x00,0x56,0xEF,0x0A,0x23,0xA0,0x54,0x4E,0x95,0x97,0xC9,0xF8,
-0x89,0xDA,0x45,0xC1,0xD4,0xA3,0x00,0x25,0xF4,0x1F,0x13,0xAB,0xB7,0xA3,0x85,0x58,
-0x69,0xC2,0x30,0xAD,0xD8,0x15,0x8A,0x2D,0xE3,0xC9,0xCD,0x81,0x5A,0xF8,0x73,0x23,
-0x5A,0xA7,0x7C,0x05,0xF3,0xFD,0x22,0x3B,0x0E,0xD1,0x06,0xC4,0xDB,0x36,0x4C,0x73,
-0x04,0x8E,0xE5,0xB0,0x22,0xE4,0xC5,0xF3,0x2E,0xA5,0xD9,0x23,0xE3,0xB8,0x4E,0x4A,
-0x20,0xA7,0x6E,0x02,0x24,0x9F,0x22,0x60,0x67,0x7B,0x8B,0x1D,0x72,0x09,0xC5,0x31,
-0x5C,0xE9,0x79,0x9F,0x80,0x47,0x3D,0xAD,0xA1,0x0B,0x07,0x14,0x3D,0x47,0xFF,0x03,
-0x69,0x1A,0x0C,0x0B,0x44,0xE7,0x63,0x25,0xA7,0x7F,0xB2,0xC9,0xB8,0x76,0x84,0xED,
-0x23,0xF6,0x7D,0x07,0xAB,0x45,0x7E,0xD3,0xDF,0xB3,0xBF,0xE9,0x8A,0xB6,0xCD,0xA8,
-0xA2,0x67,0x2B,0x52,0xD5,0xB7,0x65,0xF0,0x39,0x4C,0x63,0xA0,0x91,0x79,0x93,0x52,
-0x0F,0x54,0xDD,0x83,0xBB,0x9F,0xD1,0x8F,0xA7,0x53,0x73,0xC3,0xCB,0xFF,0x30,0xEC,
-0x7C,0x04,0xB8,0xD8,0x44,0x1F,0x93,0x5F,0x71,0x09,0x22,0xB7,0x6E,0x3E,0xEA,0x1C,
-0x03,0x4E,0x9D,0x1A,0x20,0x61,0xFB,0x81,0x37,0xEC,0x5E,0xFC,0x0A,0x45,0xAB,0xD7,
-0xE7,0x17,0x55,0xD0,0xA0,0xEA,0x60,0x9B,0xA6,0xF6,0xE3,0x8C,0x5B,0x29,0xC2,0x06,
-0x60,0x14,0x9D,0x2D,0x97,0x4C,0xA9,0x93,0x15,0x9D,0x61,0xC4,0x01,0x5F,0x48,0xD6,
-0x58,0xBD,0x56,0x31,0x12,0x4E,0x11,0xC8,0x21,0xE0,0xB3,0x11,0x91,0x65,0xDB,0xB4,
-0xA6,0x88,0x38,0xCE,0x55,
-};
-
-
-/* subject:/C=US/O=DigiCert Inc/OU=www.digicert.com/CN=DigiCert Assured ID Root CA */
-/* issuer :/C=US/O=DigiCert Inc/OU=www.digicert.com/CN=DigiCert Assured ID Root CA */
-
-
-const unsigned char DigiCert_Assured_ID_Root_CA_certificate[955]={
-0x30,0x82,0x03,0xB7,0x30,0x82,0x02,0x9F,0xA0,0x03,0x02,0x01,0x02,0x02,0x10,0x0C,
-0xE7,0xE0,0xE5,0x17,0xD8,0x46,0xFE,0x8F,0xE5,0x60,0xFC,0x1B,0xF0,0x30,0x39,0x30,
-0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x05,0x05,0x00,0x30,0x65,
-0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x55,0x53,0x31,0x15,0x30,
-0x13,0x06,0x03,0x55,0x04,0x0A,0x13,0x0C,0x44,0x69,0x67,0x69,0x43,0x65,0x72,0x74,
-0x20,0x49,0x6E,0x63,0x31,0x19,0x30,0x17,0x06,0x03,0x55,0x04,0x0B,0x13,0x10,0x77,
-0x77,0x77,0x2E,0x64,0x69,0x67,0x69,0x63,0x65,0x72,0x74,0x2E,0x63,0x6F,0x6D,0x31,
-0x24,0x30,0x22,0x06,0x03,0x55,0x04,0x03,0x13,0x1B,0x44,0x69,0x67,0x69,0x43,0x65,
-0x72,0x74,0x20,0x41,0x73,0x73,0x75,0x72,0x65,0x64,0x20,0x49,0x44,0x20,0x52,0x6F,
-0x6F,0x74,0x20,0x43,0x41,0x30,0x1E,0x17,0x0D,0x30,0x36,0x31,0x31,0x31,0x30,0x30,
-0x30,0x30,0x30,0x30,0x30,0x5A,0x17,0x0D,0x33,0x31,0x31,0x31,0x31,0x30,0x30,0x30,
-0x30,0x30,0x30,0x30,0x5A,0x30,0x65,0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04,0x06,
-0x13,0x02,0x55,0x53,0x31,0x15,0x30,0x13,0x06,0x03,0x55,0x04,0x0A,0x13,0x0C,0x44,
-0x69,0x67,0x69,0x43,0x65,0x72,0x74,0x20,0x49,0x6E,0x63,0x31,0x19,0x30,0x17,0x06,
-0x03,0x55,0x04,0x0B,0x13,0x10,0x77,0x77,0x77,0x2E,0x64,0x69,0x67,0x69,0x63,0x65,
-0x72,0x74,0x2E,0x63,0x6F,0x6D,0x31,0x24,0x30,0x22,0x06,0x03,0x55,0x04,0x03,0x13,
-0x1B,0x44,0x69,0x67,0x69,0x43,0x65,0x72,0x74,0x20,0x41,0x73,0x73,0x75,0x72,0x65,
-0x64,0x20,0x49,0x44,0x20,0x52,0x6F,0x6F,0x74,0x20,0x43,0x41,0x30,0x82,0x01,0x22,
-0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x01,0x05,0x00,0x03,
-0x82,0x01,0x0F,0x00,0x30,0x82,0x01,0x0A,0x02,0x82,0x01,0x01,0x00,0xAD,0x0E,0x15,
-0xCE,0xE4,0x43,0x80,0x5C,0xB1,0x87,0xF3,0xB7,0x60,0xF9,0x71,0x12,0xA5,0xAE,0xDC,
-0x26,0x94,0x88,0xAA,0xF4,0xCE,0xF5,0x20,0x39,0x28,0x58,0x60,0x0C,0xF8,0x80,0xDA,
-0xA9,0x15,0x95,0x32,0x61,0x3C,0xB5,0xB1,0x28,0x84,0x8A,0x8A,0xDC,0x9F,0x0A,0x0C,
-0x83,0x17,0x7A,0x8F,0x90,0xAC,0x8A,0xE7,0x79,0x53,0x5C,0x31,0x84,0x2A,0xF6,0x0F,
-0x98,0x32,0x36,0x76,0xCC,0xDE,0xDD,0x3C,0xA8,0xA2,0xEF,0x6A,0xFB,0x21,0xF2,0x52,
-0x61,0xDF,0x9F,0x20,0xD7,0x1F,0xE2,0xB1,0xD9,0xFE,0x18,0x64,0xD2,0x12,0x5B,0x5F,
-0xF9,0x58,0x18,0x35,0xBC,0x47,0xCD,0xA1,0x36,0xF9,0x6B,0x7F,0xD4,0xB0,0x38,0x3E,
-0xC1,0x1B,0xC3,0x8C,0x33,0xD9,0xD8,0x2F,0x18,0xFE,0x28,0x0F,0xB3,0xA7,0x83,0xD6,
-0xC3,0x6E,0x44,0xC0,0x61,0x35,0x96,0x16,0xFE,0x59,0x9C,0x8B,0x76,0x6D,0xD7,0xF1,
-0xA2,0x4B,0x0D,0x2B,0xFF,0x0B,0x72,0xDA,0x9E,0x60,0xD0,0x8E,0x90,0x35,0xC6,0x78,
-0x55,0x87,0x20,0xA1,0xCF,0xE5,0x6D,0x0A,0xC8,0x49,0x7C,0x31,0x98,0x33,0x6C,0x22,
-0xE9,0x87,0xD0,0x32,0x5A,0xA2,0xBA,0x13,0x82,0x11,0xED,0x39,0x17,0x9D,0x99,0x3A,
-0x72,0xA1,0xE6,0xFA,0xA4,0xD9,0xD5,0x17,0x31,0x75,0xAE,0x85,0x7D,0x22,0xAE,0x3F,
-0x01,0x46,0x86,0xF6,0x28,0x79,0xC8,0xB1,0xDA,0xE4,0x57,0x17,0xC4,0x7E,0x1C,0x0E,
-0xB0,0xB4,0x92,0xA6,0x56,0xB3,0xBD,0xB2,0x97,0xED,0xAA,0xA7,0xF0,0xB7,0xC5,0xA8,
-0x3F,0x95,0x16,0xD0,0xFF,0xA1,0x96,0xEB,0x08,0x5F,0x18,0x77,0x4F,0x02,0x03,0x01,
-0x00,0x01,0xA3,0x63,0x30,0x61,0x30,0x0E,0x06,0x03,0x55,0x1D,0x0F,0x01,0x01,0xFF,
-0x04,0x04,0x03,0x02,0x01,0x86,0x30,0x0F,0x06,0x03,0x55,0x1D,0x13,0x01,0x01,0xFF,
-0x04,0x05,0x30,0x03,0x01,0x01,0xFF,0x30,0x1D,0x06,0x03,0x55,0x1D,0x0E,0x04,0x16,
-0x04,0x14,0x45,0xEB,0xA2,0xAF,0xF4,0x92,0xCB,0x82,0x31,0x2D,0x51,0x8B,0xA7,0xA7,
-0x21,0x9D,0xF3,0x6D,0xC8,0x0F,0x30,0x1F,0x06,0x03,0x55,0x1D,0x23,0x04,0x18,0x30,
-0x16,0x80,0x14,0x45,0xEB,0xA2,0xAF,0xF4,0x92,0xCB,0x82,0x31,0x2D,0x51,0x8B,0xA7,
-0xA7,0x21,0x9D,0xF3,0x6D,0xC8,0x0F,0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,
-0x0D,0x01,0x01,0x05,0x05,0x00,0x03,0x82,0x01,0x01,0x00,0xA2,0x0E,0xBC,0xDF,0xE2,
-0xED,0xF0,0xE3,0x72,0x73,0x7A,0x64,0x94,0xBF,0xF7,0x72,0x66,0xD8,0x32,0xE4,0x42,
-0x75,0x62,0xAE,0x87,0xEB,0xF2,0xD5,0xD9,0xDE,0x56,0xB3,0x9F,0xCC,0xCE,0x14,0x28,
-0xB9,0x0D,0x97,0x60,0x5C,0x12,0x4C,0x58,0xE4,0xD3,0x3D,0x83,0x49,0x45,0x58,0x97,
-0x35,0x69,0x1A,0xA8,0x47,0xEA,0x56,0xC6,0x79,0xAB,0x12,0xD8,0x67,0x81,0x84,0xDF,
-0x7F,0x09,0x3C,0x94,0xE6,0xB8,0x26,0x2C,0x20,0xBD,0x3D,0xB3,0x28,0x89,0xF7,0x5F,
-0xFF,0x22,0xE2,0x97,0x84,0x1F,0xE9,0x65,0xEF,0x87,0xE0,0xDF,0xC1,0x67,0x49,0xB3,
-0x5D,0xEB,0xB2,0x09,0x2A,0xEB,0x26,0xED,0x78,0xBE,0x7D,0x3F,0x2B,0xF3,0xB7,0x26,
-0x35,0x6D,0x5F,0x89,0x01,0xB6,0x49,0x5B,0x9F,0x01,0x05,0x9B,0xAB,0x3D,0x25,0xC1,
-0xCC,0xB6,0x7F,0xC2,0xF1,0x6F,0x86,0xC6,0xFA,0x64,0x68,0xEB,0x81,0x2D,0x94,0xEB,
-0x42,0xB7,0xFA,0x8C,0x1E,0xDD,0x62,0xF1,0xBE,0x50,0x67,0xB7,0x6C,0xBD,0xF3,0xF1,
-0x1F,0x6B,0x0C,0x36,0x07,0x16,0x7F,0x37,0x7C,0xA9,0x5B,0x6D,0x7A,0xF1,0x12,0x46,
-0x60,0x83,0xD7,0x27,0x04,0xBE,0x4B,0xCE,0x97,0xBE,0xC3,0x67,0x2A,0x68,0x11,0xDF,
-0x80,0xE7,0x0C,0x33,0x66,0xBF,0x13,0x0D,0x14,0x6E,0xF3,0x7F,0x1F,0x63,0x10,0x1E,
-0xFA,0x8D,0x1B,0x25,0x6D,0x6C,0x8F,0xA5,0xB7,0x61,0x01,0xB1,0xD2,0xA3,0x26,0xA1,
-0x10,0x71,0x9D,0xAD,0xE2,0xC3,0xF9,0xC3,0x99,0x51,0xB7,0x2B,0x07,0x08,0xCE,0x2E,
-0xE6,0x50,0xB2,0xA7,0xFA,0x0A,0x45,0x2F,0xA2,0xF0,0xF2,
-};
-
-
/* subject:/C=US/O=DigiCert Inc/OU=www.digicert.com/CN=DigiCert Global Root CA */
/* issuer :/C=US/O=DigiCert Inc/OU=www.digicert.com/CN=DigiCert Global Root CA */
@@ -1348,6 +2416,82 @@ const unsigned char DigiCert_Global_Root_CA_certificate[947]={
};
+/* subject:/C=GB/ST=Greater Manchester/L=Salford/O=Comodo CA Limited/CN=AAA Certificate Services */
+/* issuer :/C=GB/ST=Greater Manchester/L=Salford/O=Comodo CA Limited/CN=AAA Certificate Services */
+
+
+const unsigned char Comodo_AAA_Services_root_certificate[1078]={
+0x30,0x82,0x04,0x32,0x30,0x82,0x03,0x1A,0xA0,0x03,0x02,0x01,0x02,0x02,0x01,0x01,
+0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x05,0x05,0x00,0x30,
+0x7B,0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x47,0x42,0x31,0x1B,
+0x30,0x19,0x06,0x03,0x55,0x04,0x08,0x0C,0x12,0x47,0x72,0x65,0x61,0x74,0x65,0x72,
+0x20,0x4D,0x61,0x6E,0x63,0x68,0x65,0x73,0x74,0x65,0x72,0x31,0x10,0x30,0x0E,0x06,
+0x03,0x55,0x04,0x07,0x0C,0x07,0x53,0x61,0x6C,0x66,0x6F,0x72,0x64,0x31,0x1A,0x30,
+0x18,0x06,0x03,0x55,0x04,0x0A,0x0C,0x11,0x43,0x6F,0x6D,0x6F,0x64,0x6F,0x20,0x43,
+0x41,0x20,0x4C,0x69,0x6D,0x69,0x74,0x65,0x64,0x31,0x21,0x30,0x1F,0x06,0x03,0x55,
+0x04,0x03,0x0C,0x18,0x41,0x41,0x41,0x20,0x43,0x65,0x72,0x74,0x69,0x66,0x69,0x63,
+0x61,0x74,0x65,0x20,0x53,0x65,0x72,0x76,0x69,0x63,0x65,0x73,0x30,0x1E,0x17,0x0D,
+0x30,0x34,0x30,0x31,0x30,0x31,0x30,0x30,0x30,0x30,0x30,0x30,0x5A,0x17,0x0D,0x32,
+0x38,0x31,0x32,0x33,0x31,0x32,0x33,0x35,0x39,0x35,0x39,0x5A,0x30,0x7B,0x31,0x0B,
+0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x47,0x42,0x31,0x1B,0x30,0x19,0x06,
+0x03,0x55,0x04,0x08,0x0C,0x12,0x47,0x72,0x65,0x61,0x74,0x65,0x72,0x20,0x4D,0x61,
+0x6E,0x63,0x68,0x65,0x73,0x74,0x65,0x72,0x31,0x10,0x30,0x0E,0x06,0x03,0x55,0x04,
+0x07,0x0C,0x07,0x53,0x61,0x6C,0x66,0x6F,0x72,0x64,0x31,0x1A,0x30,0x18,0x06,0x03,
+0x55,0x04,0x0A,0x0C,0x11,0x43,0x6F,0x6D,0x6F,0x64,0x6F,0x20,0x43,0x41,0x20,0x4C,
+0x69,0x6D,0x69,0x74,0x65,0x64,0x31,0x21,0x30,0x1F,0x06,0x03,0x55,0x04,0x03,0x0C,
+0x18,0x41,0x41,0x41,0x20,0x43,0x65,0x72,0x74,0x69,0x66,0x69,0x63,0x61,0x74,0x65,
+0x20,0x53,0x65,0x72,0x76,0x69,0x63,0x65,0x73,0x30,0x82,0x01,0x22,0x30,0x0D,0x06,
+0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x01,0x05,0x00,0x03,0x82,0x01,0x0F,
+0x00,0x30,0x82,0x01,0x0A,0x02,0x82,0x01,0x01,0x00,0xBE,0x40,0x9D,0xF4,0x6E,0xE1,
+0xEA,0x76,0x87,0x1C,0x4D,0x45,0x44,0x8E,0xBE,0x46,0xC8,0x83,0x06,0x9D,0xC1,0x2A,
+0xFE,0x18,0x1F,0x8E,0xE4,0x02,0xFA,0xF3,0xAB,0x5D,0x50,0x8A,0x16,0x31,0x0B,0x9A,
+0x06,0xD0,0xC5,0x70,0x22,0xCD,0x49,0x2D,0x54,0x63,0xCC,0xB6,0x6E,0x68,0x46,0x0B,
+0x53,0xEA,0xCB,0x4C,0x24,0xC0,0xBC,0x72,0x4E,0xEA,0xF1,0x15,0xAE,0xF4,0x54,0x9A,
+0x12,0x0A,0xC3,0x7A,0xB2,0x33,0x60,0xE2,0xDA,0x89,0x55,0xF3,0x22,0x58,0xF3,0xDE,
+0xDC,0xCF,0xEF,0x83,0x86,0xA2,0x8C,0x94,0x4F,0x9F,0x68,0xF2,0x98,0x90,0x46,0x84,
+0x27,0xC7,0x76,0xBF,0xE3,0xCC,0x35,0x2C,0x8B,0x5E,0x07,0x64,0x65,0x82,0xC0,0x48,
+0xB0,0xA8,0x91,0xF9,0x61,0x9F,0x76,0x20,0x50,0xA8,0x91,0xC7,0x66,0xB5,0xEB,0x78,
+0x62,0x03,0x56,0xF0,0x8A,0x1A,0x13,0xEA,0x31,0xA3,0x1E,0xA0,0x99,0xFD,0x38,0xF6,
+0xF6,0x27,0x32,0x58,0x6F,0x07,0xF5,0x6B,0xB8,0xFB,0x14,0x2B,0xAF,0xB7,0xAA,0xCC,
+0xD6,0x63,0x5F,0x73,0x8C,0xDA,0x05,0x99,0xA8,0x38,0xA8,0xCB,0x17,0x78,0x36,0x51,
+0xAC,0xE9,0x9E,0xF4,0x78,0x3A,0x8D,0xCF,0x0F,0xD9,0x42,0xE2,0x98,0x0C,0xAB,0x2F,
+0x9F,0x0E,0x01,0xDE,0xEF,0x9F,0x99,0x49,0xF1,0x2D,0xDF,0xAC,0x74,0x4D,0x1B,0x98,
+0xB5,0x47,0xC5,0xE5,0x29,0xD1,0xF9,0x90,0x18,0xC7,0x62,0x9C,0xBE,0x83,0xC7,0x26,
+0x7B,0x3E,0x8A,0x25,0xC7,0xC0,0xDD,0x9D,0xE6,0x35,0x68,0x10,0x20,0x9D,0x8F,0xD8,
+0xDE,0xD2,0xC3,0x84,0x9C,0x0D,0x5E,0xE8,0x2F,0xC9,0x02,0x03,0x01,0x00,0x01,0xA3,
+0x81,0xC0,0x30,0x81,0xBD,0x30,0x1D,0x06,0x03,0x55,0x1D,0x0E,0x04,0x16,0x04,0x14,
+0xA0,0x11,0x0A,0x23,0x3E,0x96,0xF1,0x07,0xEC,0xE2,0xAF,0x29,0xEF,0x82,0xA5,0x7F,
+0xD0,0x30,0xA4,0xB4,0x30,0x0E,0x06,0x03,0x55,0x1D,0x0F,0x01,0x01,0xFF,0x04,0x04,
+0x03,0x02,0x01,0x06,0x30,0x0F,0x06,0x03,0x55,0x1D,0x13,0x01,0x01,0xFF,0x04,0x05,
+0x30,0x03,0x01,0x01,0xFF,0x30,0x7B,0x06,0x03,0x55,0x1D,0x1F,0x04,0x74,0x30,0x72,
+0x30,0x38,0xA0,0x36,0xA0,0x34,0x86,0x32,0x68,0x74,0x74,0x70,0x3A,0x2F,0x2F,0x63,
+0x72,0x6C,0x2E,0x63,0x6F,0x6D,0x6F,0x64,0x6F,0x63,0x61,0x2E,0x63,0x6F,0x6D,0x2F,
+0x41,0x41,0x41,0x43,0x65,0x72,0x74,0x69,0x66,0x69,0x63,0x61,0x74,0x65,0x53,0x65,
+0x72,0x76,0x69,0x63,0x65,0x73,0x2E,0x63,0x72,0x6C,0x30,0x36,0xA0,0x34,0xA0,0x32,
+0x86,0x30,0x68,0x74,0x74,0x70,0x3A,0x2F,0x2F,0x63,0x72,0x6C,0x2E,0x63,0x6F,0x6D,
+0x6F,0x64,0x6F,0x2E,0x6E,0x65,0x74,0x2F,0x41,0x41,0x41,0x43,0x65,0x72,0x74,0x69,
+0x66,0x69,0x63,0x61,0x74,0x65,0x53,0x65,0x72,0x76,0x69,0x63,0x65,0x73,0x2E,0x63,
+0x72,0x6C,0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x05,0x05,
+0x00,0x03,0x82,0x01,0x01,0x00,0x08,0x56,0xFC,0x02,0xF0,0x9B,0xE8,0xFF,0xA4,0xFA,
+0xD6,0x7B,0xC6,0x44,0x80,0xCE,0x4F,0xC4,0xC5,0xF6,0x00,0x58,0xCC,0xA6,0xB6,0xBC,
+0x14,0x49,0x68,0x04,0x76,0xE8,0xE6,0xEE,0x5D,0xEC,0x02,0x0F,0x60,0xD6,0x8D,0x50,
+0x18,0x4F,0x26,0x4E,0x01,0xE3,0xE6,0xB0,0xA5,0xEE,0xBF,0xBC,0x74,0x54,0x41,0xBF,
+0xFD,0xFC,0x12,0xB8,0xC7,0x4F,0x5A,0xF4,0x89,0x60,0x05,0x7F,0x60,0xB7,0x05,0x4A,
+0xF3,0xF6,0xF1,0xC2,0xBF,0xC4,0xB9,0x74,0x86,0xB6,0x2D,0x7D,0x6B,0xCC,0xD2,0xF3,
+0x46,0xDD,0x2F,0xC6,0xE0,0x6A,0xC3,0xC3,0x34,0x03,0x2C,0x7D,0x96,0xDD,0x5A,0xC2,
+0x0E,0xA7,0x0A,0x99,0xC1,0x05,0x8B,0xAB,0x0C,0x2F,0xF3,0x5C,0x3A,0xCF,0x6C,0x37,
+0x55,0x09,0x87,0xDE,0x53,0x40,0x6C,0x58,0xEF,0xFC,0xB6,0xAB,0x65,0x6E,0x04,0xF6,
+0x1B,0xDC,0x3C,0xE0,0x5A,0x15,0xC6,0x9E,0xD9,0xF1,0x59,0x48,0x30,0x21,0x65,0x03,
+0x6C,0xEC,0xE9,0x21,0x73,0xEC,0x9B,0x03,0xA1,0xE0,0x37,0xAD,0xA0,0x15,0x18,0x8F,
+0xFA,0xBA,0x02,0xCE,0xA7,0x2C,0xA9,0x10,0x13,0x2C,0xD4,0xE5,0x08,0x26,0xAB,0x22,
+0x97,0x60,0xF8,0x90,0x5E,0x74,0xD4,0xA2,0x9A,0x53,0xBD,0xF2,0xA9,0x68,0xE0,0xA2,
+0x6E,0xC2,0xD7,0x6C,0xB1,0xA3,0x0F,0x9E,0xBF,0xEB,0x68,0xE7,0x56,0xF2,0xAE,0xF2,
+0xE3,0x2B,0x38,0x3A,0x09,0x81,0xB5,0x6B,0x85,0xD7,0xBE,0x2D,0xED,0x3F,0x1A,0xB7,
+0xB2,0x63,0xE2,0xF5,0x62,0x2C,0x82,0xD4,0x6A,0x00,0x41,0x50,0xF1,0x39,0x83,0x9F,
+0x95,0xE9,0x36,0x96,0x98,0x6E,
+};
+
+
/* subject:/C=US/O=DigiCert Inc/OU=www.digicert.com/CN=DigiCert High Assurance EV Root CA */
/* issuer :/C=US/O=DigiCert Inc/OU=www.digicert.com/CN=DigiCert High Assurance EV Root CA */
@@ -1417,783 +2561,6 @@ const unsigned char DigiCert_High_Assurance_EV_Root_CA_certificate[969]={
};
-/* subject:/O=Entrust.net/OU=www.entrust.net/CPS_2048 incorp. by ref. (limits liab.)/OU=(c) 1999 Entrust.net Limited/CN=Entrust.net Certification Authority (2048) */
-/* issuer :/O=Entrust.net/OU=www.entrust.net/CPS_2048 incorp. by ref. (limits liab.)/OU=(c) 1999 Entrust.net Limited/CN=Entrust.net Certification Authority (2048) */
-
-
-const unsigned char Entrust_net_Premium_2048_Secure_Server_CA_certificate[1120]={
-0x30,0x82,0x04,0x5C,0x30,0x82,0x03,0x44,0xA0,0x03,0x02,0x01,0x02,0x02,0x04,0x38,
-0x63,0xB9,0x66,0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x05,
-0x05,0x00,0x30,0x81,0xB4,0x31,0x14,0x30,0x12,0x06,0x03,0x55,0x04,0x0A,0x13,0x0B,
-0x45,0x6E,0x74,0x72,0x75,0x73,0x74,0x2E,0x6E,0x65,0x74,0x31,0x40,0x30,0x3E,0x06,
-0x03,0x55,0x04,0x0B,0x14,0x37,0x77,0x77,0x77,0x2E,0x65,0x6E,0x74,0x72,0x75,0x73,
-0x74,0x2E,0x6E,0x65,0x74,0x2F,0x43,0x50,0x53,0x5F,0x32,0x30,0x34,0x38,0x20,0x69,
-0x6E,0x63,0x6F,0x72,0x70,0x2E,0x20,0x62,0x79,0x20,0x72,0x65,0x66,0x2E,0x20,0x28,
-0x6C,0x69,0x6D,0x69,0x74,0x73,0x20,0x6C,0x69,0x61,0x62,0x2E,0x29,0x31,0x25,0x30,
-0x23,0x06,0x03,0x55,0x04,0x0B,0x13,0x1C,0x28,0x63,0x29,0x20,0x31,0x39,0x39,0x39,
-0x20,0x45,0x6E,0x74,0x72,0x75,0x73,0x74,0x2E,0x6E,0x65,0x74,0x20,0x4C,0x69,0x6D,
-0x69,0x74,0x65,0x64,0x31,0x33,0x30,0x31,0x06,0x03,0x55,0x04,0x03,0x13,0x2A,0x45,
-0x6E,0x74,0x72,0x75,0x73,0x74,0x2E,0x6E,0x65,0x74,0x20,0x43,0x65,0x72,0x74,0x69,
-0x66,0x69,0x63,0x61,0x74,0x69,0x6F,0x6E,0x20,0x41,0x75,0x74,0x68,0x6F,0x72,0x69,
-0x74,0x79,0x20,0x28,0x32,0x30,0x34,0x38,0x29,0x30,0x1E,0x17,0x0D,0x39,0x39,0x31,
-0x32,0x32,0x34,0x31,0x37,0x35,0x30,0x35,0x31,0x5A,0x17,0x0D,0x31,0x39,0x31,0x32,
-0x32,0x34,0x31,0x38,0x32,0x30,0x35,0x31,0x5A,0x30,0x81,0xB4,0x31,0x14,0x30,0x12,
-0x06,0x03,0x55,0x04,0x0A,0x13,0x0B,0x45,0x6E,0x74,0x72,0x75,0x73,0x74,0x2E,0x6E,
-0x65,0x74,0x31,0x40,0x30,0x3E,0x06,0x03,0x55,0x04,0x0B,0x14,0x37,0x77,0x77,0x77,
-0x2E,0x65,0x6E,0x74,0x72,0x75,0x73,0x74,0x2E,0x6E,0x65,0x74,0x2F,0x43,0x50,0x53,
-0x5F,0x32,0x30,0x34,0x38,0x20,0x69,0x6E,0x63,0x6F,0x72,0x70,0x2E,0x20,0x62,0x79,
-0x20,0x72,0x65,0x66,0x2E,0x20,0x28,0x6C,0x69,0x6D,0x69,0x74,0x73,0x20,0x6C,0x69,
-0x61,0x62,0x2E,0x29,0x31,0x25,0x30,0x23,0x06,0x03,0x55,0x04,0x0B,0x13,0x1C,0x28,
-0x63,0x29,0x20,0x31,0x39,0x39,0x39,0x20,0x45,0x6E,0x74,0x72,0x75,0x73,0x74,0x2E,
-0x6E,0x65,0x74,0x20,0x4C,0x69,0x6D,0x69,0x74,0x65,0x64,0x31,0x33,0x30,0x31,0x06,
-0x03,0x55,0x04,0x03,0x13,0x2A,0x45,0x6E,0x74,0x72,0x75,0x73,0x74,0x2E,0x6E,0x65,
-0x74,0x20,0x43,0x65,0x72,0x74,0x69,0x66,0x69,0x63,0x61,0x74,0x69,0x6F,0x6E,0x20,
-0x41,0x75,0x74,0x68,0x6F,0x72,0x69,0x74,0x79,0x20,0x28,0x32,0x30,0x34,0x38,0x29,
-0x30,0x82,0x01,0x22,0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,
-0x01,0x05,0x00,0x03,0x82,0x01,0x0F,0x00,0x30,0x82,0x01,0x0A,0x02,0x82,0x01,0x01,
-0x00,0xAD,0x4D,0x4B,0xA9,0x12,0x86,0xB2,0xEA,0xA3,0x20,0x07,0x15,0x16,0x64,0x2A,
-0x2B,0x4B,0xD1,0xBF,0x0B,0x4A,0x4D,0x8E,0xED,0x80,0x76,0xA5,0x67,0xB7,0x78,0x40,
-0xC0,0x73,0x42,0xC8,0x68,0xC0,0xDB,0x53,0x2B,0xDD,0x5E,0xB8,0x76,0x98,0x35,0x93,
-0x8B,0x1A,0x9D,0x7C,0x13,0x3A,0x0E,0x1F,0x5B,0xB7,0x1E,0xCF,0xE5,0x24,0x14,0x1E,
-0xB1,0x81,0xA9,0x8D,0x7D,0xB8,0xCC,0x6B,0x4B,0x03,0xF1,0x02,0x0C,0xDC,0xAB,0xA5,
-0x40,0x24,0x00,0x7F,0x74,0x94,0xA1,0x9D,0x08,0x29,0xB3,0x88,0x0B,0xF5,0x87,0x77,
-0x9D,0x55,0xCD,0xE4,0xC3,0x7E,0xD7,0x6A,0x64,0xAB,0x85,0x14,0x86,0x95,0x5B,0x97,
-0x32,0x50,0x6F,0x3D,0xC8,0xBA,0x66,0x0C,0xE3,0xFC,0xBD,0xB8,0x49,0xC1,0x76,0x89,
-0x49,0x19,0xFD,0xC0,0xA8,0xBD,0x89,0xA3,0x67,0x2F,0xC6,0x9F,0xBC,0x71,0x19,0x60,
-0xB8,0x2D,0xE9,0x2C,0xC9,0x90,0x76,0x66,0x7B,0x94,0xE2,0xAF,0x78,0xD6,0x65,0x53,
-0x5D,0x3C,0xD6,0x9C,0xB2,0xCF,0x29,0x03,0xF9,0x2F,0xA4,0x50,0xB2,0xD4,0x48,0xCE,
-0x05,0x32,0x55,0x8A,0xFD,0xB2,0x64,0x4C,0x0E,0xE4,0x98,0x07,0x75,0xDB,0x7F,0xDF,
-0xB9,0x08,0x55,0x60,0x85,0x30,0x29,0xF9,0x7B,0x48,0xA4,0x69,0x86,0xE3,0x35,0x3F,
-0x1E,0x86,0x5D,0x7A,0x7A,0x15,0xBD,0xEF,0x00,0x8E,0x15,0x22,0x54,0x17,0x00,0x90,
-0x26,0x93,0xBC,0x0E,0x49,0x68,0x91,0xBF,0xF8,0x47,0xD3,0x9D,0x95,0x42,0xC1,0x0E,
-0x4D,0xDF,0x6F,0x26,0xCF,0xC3,0x18,0x21,0x62,0x66,0x43,0x70,0xD6,0xD5,0xC0,0x07,
-0xE1,0x02,0x03,0x01,0x00,0x01,0xA3,0x74,0x30,0x72,0x30,0x11,0x06,0x09,0x60,0x86,
-0x48,0x01,0x86,0xF8,0x42,0x01,0x01,0x04,0x04,0x03,0x02,0x00,0x07,0x30,0x1F,0x06,
-0x03,0x55,0x1D,0x23,0x04,0x18,0x30,0x16,0x80,0x14,0x55,0xE4,0x81,0xD1,0x11,0x80,
-0xBE,0xD8,0x89,0xB9,0x08,0xA3,0x31,0xF9,0xA1,0x24,0x09,0x16,0xB9,0x70,0x30,0x1D,
-0x06,0x03,0x55,0x1D,0x0E,0x04,0x16,0x04,0x14,0x55,0xE4,0x81,0xD1,0x11,0x80,0xBE,
-0xD8,0x89,0xB9,0x08,0xA3,0x31,0xF9,0xA1,0x24,0x09,0x16,0xB9,0x70,0x30,0x1D,0x06,
-0x09,0x2A,0x86,0x48,0x86,0xF6,0x7D,0x07,0x41,0x00,0x04,0x10,0x30,0x0E,0x1B,0x08,
-0x56,0x35,0x2E,0x30,0x3A,0x34,0x2E,0x30,0x03,0x02,0x04,0x90,0x30,0x0D,0x06,0x09,
-0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x05,0x05,0x00,0x03,0x82,0x01,0x01,0x00,
-0x59,0x47,0xAC,0x21,0x84,0x8A,0x17,0xC9,0x9C,0x89,0x53,0x1E,0xBA,0x80,0x85,0x1A,
-0xC6,0x3C,0x4E,0x3E,0xB1,0x9C,0xB6,0x7C,0xC6,0x92,0x5D,0x18,0x64,0x02,0xE3,0xD3,
-0x06,0x08,0x11,0x61,0x7C,0x63,0xE3,0x2B,0x9D,0x31,0x03,0x70,0x76,0xD2,0xA3,0x28,
-0xA0,0xF4,0xBB,0x9A,0x63,0x73,0xED,0x6D,0xE5,0x2A,0xDB,0xED,0x14,0xA9,0x2B,0xC6,
-0x36,0x11,0xD0,0x2B,0xEB,0x07,0x8B,0xA5,0xDA,0x9E,0x5C,0x19,0x9D,0x56,0x12,0xF5,
-0x54,0x29,0xC8,0x05,0xED,0xB2,0x12,0x2A,0x8D,0xF4,0x03,0x1B,0xFF,0xE7,0x92,0x10,
-0x87,0xB0,0x3A,0xB5,0xC3,0x9D,0x05,0x37,0x12,0xA3,0xC7,0xF4,0x15,0xB9,0xD5,0xA4,
-0x39,0x16,0x9B,0x53,0x3A,0x23,0x91,0xF1,0xA8,0x82,0xA2,0x6A,0x88,0x68,0xC1,0x79,
-0x02,0x22,0xBC,0xAA,0xA6,0xD6,0xAE,0xDF,0xB0,0x14,0x5F,0xB8,0x87,0xD0,0xDD,0x7C,
-0x7F,0x7B,0xFF,0xAF,0x1C,0xCF,0xE6,0xDB,0x07,0xAD,0x5E,0xDB,0x85,0x9D,0xD0,0x2B,
-0x0D,0x33,0xDB,0x04,0xD1,0xE6,0x49,0x40,0x13,0x2B,0x76,0xFB,0x3E,0xE9,0x9C,0x89,
-0x0F,0x15,0xCE,0x18,0xB0,0x85,0x78,0x21,0x4F,0x6B,0x4F,0x0E,0xFA,0x36,0x67,0xCD,
-0x07,0xF2,0xFF,0x08,0xD0,0xE2,0xDE,0xD9,0xBF,0x2A,0xAF,0xB8,0x87,0x86,0x21,0x3C,
-0x04,0xCA,0xB7,0x94,0x68,0x7F,0xCF,0x3C,0xE9,0x98,0xD7,0x38,0xFF,0xEC,0xC0,0xD9,
-0x50,0xF0,0x2E,0x4B,0x58,0xAE,0x46,0x6F,0xD0,0x2E,0xC3,0x60,0xDA,0x72,0x55,0x72,
-0xBD,0x4C,0x45,0x9E,0x61,0xBA,0xBF,0x84,0x81,0x92,0x03,0xD1,0xD2,0x69,0x7C,0xC5,
-};
-
-
-/* subject:/C=US/O=Entrust.net/OU=www.entrust.net/CPS incorp. by ref. (limits liab.)/OU=(c) 1999 Entrust.net Limited/CN=Entrust.net Secure Server Certification Authority */
-/* issuer :/C=US/O=Entrust.net/OU=www.entrust.net/CPS incorp. by ref. (limits liab.)/OU=(c) 1999 Entrust.net Limited/CN=Entrust.net Secure Server Certification Authority */
-
-
-const unsigned char Entrust_net_Secure_Server_CA_certificate[1244]={
-0x30,0x82,0x04,0xD8,0x30,0x82,0x04,0x41,0xA0,0x03,0x02,0x01,0x02,0x02,0x04,0x37,
-0x4A,0xD2,0x43,0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x05,
-0x05,0x00,0x30,0x81,0xC3,0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02,
-0x55,0x53,0x31,0x14,0x30,0x12,0x06,0x03,0x55,0x04,0x0A,0x13,0x0B,0x45,0x6E,0x74,
-0x72,0x75,0x73,0x74,0x2E,0x6E,0x65,0x74,0x31,0x3B,0x30,0x39,0x06,0x03,0x55,0x04,
-0x0B,0x13,0x32,0x77,0x77,0x77,0x2E,0x65,0x6E,0x74,0x72,0x75,0x73,0x74,0x2E,0x6E,
-0x65,0x74,0x2F,0x43,0x50,0x53,0x20,0x69,0x6E,0x63,0x6F,0x72,0x70,0x2E,0x20,0x62,
-0x79,0x20,0x72,0x65,0x66,0x2E,0x20,0x28,0x6C,0x69,0x6D,0x69,0x74,0x73,0x20,0x6C,
-0x69,0x61,0x62,0x2E,0x29,0x31,0x25,0x30,0x23,0x06,0x03,0x55,0x04,0x0B,0x13,0x1C,
-0x28,0x63,0x29,0x20,0x31,0x39,0x39,0x39,0x20,0x45,0x6E,0x74,0x72,0x75,0x73,0x74,
-0x2E,0x6E,0x65,0x74,0x20,0x4C,0x69,0x6D,0x69,0x74,0x65,0x64,0x31,0x3A,0x30,0x38,
-0x06,0x03,0x55,0x04,0x03,0x13,0x31,0x45,0x6E,0x74,0x72,0x75,0x73,0x74,0x2E,0x6E,
-0x65,0x74,0x20,0x53,0x65,0x63,0x75,0x72,0x65,0x20,0x53,0x65,0x72,0x76,0x65,0x72,
-0x20,0x43,0x65,0x72,0x74,0x69,0x66,0x69,0x63,0x61,0x74,0x69,0x6F,0x6E,0x20,0x41,
-0x75,0x74,0x68,0x6F,0x72,0x69,0x74,0x79,0x30,0x1E,0x17,0x0D,0x39,0x39,0x30,0x35,
-0x32,0x35,0x31,0x36,0x30,0x39,0x34,0x30,0x5A,0x17,0x0D,0x31,0x39,0x30,0x35,0x32,
-0x35,0x31,0x36,0x33,0x39,0x34,0x30,0x5A,0x30,0x81,0xC3,0x31,0x0B,0x30,0x09,0x06,
-0x03,0x55,0x04,0x06,0x13,0x02,0x55,0x53,0x31,0x14,0x30,0x12,0x06,0x03,0x55,0x04,
-0x0A,0x13,0x0B,0x45,0x6E,0x74,0x72,0x75,0x73,0x74,0x2E,0x6E,0x65,0x74,0x31,0x3B,
-0x30,0x39,0x06,0x03,0x55,0x04,0x0B,0x13,0x32,0x77,0x77,0x77,0x2E,0x65,0x6E,0x74,
-0x72,0x75,0x73,0x74,0x2E,0x6E,0x65,0x74,0x2F,0x43,0x50,0x53,0x20,0x69,0x6E,0x63,
-0x6F,0x72,0x70,0x2E,0x20,0x62,0x79,0x20,0x72,0x65,0x66,0x2E,0x20,0x28,0x6C,0x69,
-0x6D,0x69,0x74,0x73,0x20,0x6C,0x69,0x61,0x62,0x2E,0x29,0x31,0x25,0x30,0x23,0x06,
-0x03,0x55,0x04,0x0B,0x13,0x1C,0x28,0x63,0x29,0x20,0x31,0x39,0x39,0x39,0x20,0x45,
-0x6E,0x74,0x72,0x75,0x73,0x74,0x2E,0x6E,0x65,0x74,0x20,0x4C,0x69,0x6D,0x69,0x74,
-0x65,0x64,0x31,0x3A,0x30,0x38,0x06,0x03,0x55,0x04,0x03,0x13,0x31,0x45,0x6E,0x74,
-0x72,0x75,0x73,0x74,0x2E,0x6E,0x65,0x74,0x20,0x53,0x65,0x63,0x75,0x72,0x65,0x20,
-0x53,0x65,0x72,0x76,0x65,0x72,0x20,0x43,0x65,0x72,0x74,0x69,0x66,0x69,0x63,0x61,
-0x74,0x69,0x6F,0x6E,0x20,0x41,0x75,0x74,0x68,0x6F,0x72,0x69,0x74,0x79,0x30,0x81,
-0x9D,0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x01,0x05,0x00,
-0x03,0x81,0x8B,0x00,0x30,0x81,0x87,0x02,0x81,0x81,0x00,0xCD,0x28,0x83,0x34,0x54,
-0x1B,0x89,0xF3,0x0F,0xAF,0x37,0x91,0x31,0xFF,0xAF,0x31,0x60,0xC9,0xA8,0xE8,0xB2,
-0x10,0x68,0xED,0x9F,0xE7,0x93,0x36,0xF1,0x0A,0x64,0xBB,0x47,0xF5,0x04,0x17,0x3F,
-0x23,0x47,0x4D,0xC5,0x27,0x19,0x81,0x26,0x0C,0x54,0x72,0x0D,0x88,0x2D,0xD9,0x1F,
-0x9A,0x12,0x9F,0xBC,0xB3,0x71,0xD3,0x80,0x19,0x3F,0x47,0x66,0x7B,0x8C,0x35,0x28,
-0xD2,0xB9,0x0A,0xDF,0x24,0xDA,0x9C,0xD6,0x50,0x79,0x81,0x7A,0x5A,0xD3,0x37,0xF7,
-0xC2,0x4A,0xD8,0x29,0x92,0x26,0x64,0xD1,0xE4,0x98,0x6C,0x3A,0x00,0x8A,0xF5,0x34,
-0x9B,0x65,0xF8,0xED,0xE3,0x10,0xFF,0xFD,0xB8,0x49,0x58,0xDC,0xA0,0xDE,0x82,0x39,
-0x6B,0x81,0xB1,0x16,0x19,0x61,0xB9,0x54,0xB6,0xE6,0x43,0x02,0x01,0x03,0xA3,0x82,
-0x01,0xD7,0x30,0x82,0x01,0xD3,0x30,0x11,0x06,0x09,0x60,0x86,0x48,0x01,0x86,0xF8,
-0x42,0x01,0x01,0x04,0x04,0x03,0x02,0x00,0x07,0x30,0x82,0x01,0x19,0x06,0x03,0x55,
-0x1D,0x1F,0x04,0x82,0x01,0x10,0x30,0x82,0x01,0x0C,0x30,0x81,0xDE,0xA0,0x81,0xDB,
-0xA0,0x81,0xD8,0xA4,0x81,0xD5,0x30,0x81,0xD2,0x31,0x0B,0x30,0x09,0x06,0x03,0x55,
-0x04,0x06,0x13,0x02,0x55,0x53,0x31,0x14,0x30,0x12,0x06,0x03,0x55,0x04,0x0A,0x13,
-0x0B,0x45,0x6E,0x74,0x72,0x75,0x73,0x74,0x2E,0x6E,0x65,0x74,0x31,0x3B,0x30,0x39,
-0x06,0x03,0x55,0x04,0x0B,0x13,0x32,0x77,0x77,0x77,0x2E,0x65,0x6E,0x74,0x72,0x75,
-0x73,0x74,0x2E,0x6E,0x65,0x74,0x2F,0x43,0x50,0x53,0x20,0x69,0x6E,0x63,0x6F,0x72,
-0x70,0x2E,0x20,0x62,0x79,0x20,0x72,0x65,0x66,0x2E,0x20,0x28,0x6C,0x69,0x6D,0x69,
-0x74,0x73,0x20,0x6C,0x69,0x61,0x62,0x2E,0x29,0x31,0x25,0x30,0x23,0x06,0x03,0x55,
-0x04,0x0B,0x13,0x1C,0x28,0x63,0x29,0x20,0x31,0x39,0x39,0x39,0x20,0x45,0x6E,0x74,
-0x72,0x75,0x73,0x74,0x2E,0x6E,0x65,0x74,0x20,0x4C,0x69,0x6D,0x69,0x74,0x65,0x64,
-0x31,0x3A,0x30,0x38,0x06,0x03,0x55,0x04,0x03,0x13,0x31,0x45,0x6E,0x74,0x72,0x75,
-0x73,0x74,0x2E,0x6E,0x65,0x74,0x20,0x53,0x65,0x63,0x75,0x72,0x65,0x20,0x53,0x65,
-0x72,0x76,0x65,0x72,0x20,0x43,0x65,0x72,0x74,0x69,0x66,0x69,0x63,0x61,0x74,0x69,
-0x6F,0x6E,0x20,0x41,0x75,0x74,0x68,0x6F,0x72,0x69,0x74,0x79,0x31,0x0D,0x30,0x0B,
-0x06,0x03,0x55,0x04,0x03,0x13,0x04,0x43,0x52,0x4C,0x31,0x30,0x29,0xA0,0x27,0xA0,
-0x25,0x86,0x23,0x68,0x74,0x74,0x70,0x3A,0x2F,0x2F,0x77,0x77,0x77,0x2E,0x65,0x6E,
-0x74,0x72,0x75,0x73,0x74,0x2E,0x6E,0x65,0x74,0x2F,0x43,0x52,0x4C,0x2F,0x6E,0x65,
-0x74,0x31,0x2E,0x63,0x72,0x6C,0x30,0x2B,0x06,0x03,0x55,0x1D,0x10,0x04,0x24,0x30,
-0x22,0x80,0x0F,0x31,0x39,0x39,0x39,0x30,0x35,0x32,0x35,0x31,0x36,0x30,0x39,0x34,
-0x30,0x5A,0x81,0x0F,0x32,0x30,0x31,0x39,0x30,0x35,0x32,0x35,0x31,0x36,0x30,0x39,
-0x34,0x30,0x5A,0x30,0x0B,0x06,0x03,0x55,0x1D,0x0F,0x04,0x04,0x03,0x02,0x01,0x06,
-0x30,0x1F,0x06,0x03,0x55,0x1D,0x23,0x04,0x18,0x30,0x16,0x80,0x14,0xF0,0x17,0x62,
-0x13,0x55,0x3D,0xB3,0xFF,0x0A,0x00,0x6B,0xFB,0x50,0x84,0x97,0xF3,0xED,0x62,0xD0,
-0x1A,0x30,0x1D,0x06,0x03,0x55,0x1D,0x0E,0x04,0x16,0x04,0x14,0xF0,0x17,0x62,0x13,
-0x55,0x3D,0xB3,0xFF,0x0A,0x00,0x6B,0xFB,0x50,0x84,0x97,0xF3,0xED,0x62,0xD0,0x1A,
-0x30,0x0C,0x06,0x03,0x55,0x1D,0x13,0x04,0x05,0x30,0x03,0x01,0x01,0xFF,0x30,0x19,
-0x06,0x09,0x2A,0x86,0x48,0x86,0xF6,0x7D,0x07,0x41,0x00,0x04,0x0C,0x30,0x0A,0x1B,
-0x04,0x56,0x34,0x2E,0x30,0x03,0x02,0x04,0x90,0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,
-0x86,0xF7,0x0D,0x01,0x01,0x05,0x05,0x00,0x03,0x81,0x81,0x00,0x90,0xDC,0x30,0x02,
-0xFA,0x64,0x74,0xC2,0xA7,0x0A,0xA5,0x7C,0x21,0x8D,0x34,0x17,0xA8,0xFB,0x47,0x0E,
-0xFF,0x25,0x7C,0x8D,0x13,0x0A,0xFB,0xE4,0x98,0xB5,0xEF,0x8C,0xF8,0xC5,0x10,0x0D,
-0xF7,0x92,0xBE,0xF1,0xC3,0xD5,0xD5,0x95,0x6A,0x04,0xBB,0x2C,0xCE,0x26,0x36,0x65,
-0xC8,0x31,0xC6,0xE7,0xEE,0x3F,0xE3,0x57,0x75,0x84,0x7A,0x11,0xEF,0x46,0x4F,0x18,
-0xF4,0xD3,0x98,0xBB,0xA8,0x87,0x32,0xBA,0x72,0xF6,0x3C,0xE2,0x3D,0x9F,0xD7,0x1D,
-0xD9,0xC3,0x60,0x43,0x8C,0x58,0x0E,0x22,0x96,0x2F,0x62,0xA3,0x2C,0x1F,0xBA,0xAD,
-0x05,0xEF,0xAB,0x32,0x78,0x87,0xA0,0x54,0x73,0x19,0xB5,0x5C,0x05,0xF9,0x52,0x3E,
-0x6D,0x2D,0x45,0x0B,0xF7,0x0A,0x93,0xEA,0xED,0x06,0xF9,0xB2,
-};
-
-
-/* subject:/C=US/O=Entrust, Inc./OU=www.entrust.net/CPS is incorporated by reference/OU=(c) 2006 Entrust, Inc./CN=Entrust Root Certification Authority */
-/* issuer :/C=US/O=Entrust, Inc./OU=www.entrust.net/CPS is incorporated by reference/OU=(c) 2006 Entrust, Inc./CN=Entrust Root Certification Authority */
-
-
-const unsigned char Entrust_Root_Certification_Authority_certificate[1173]={
-0x30,0x82,0x04,0x91,0x30,0x82,0x03,0x79,0xA0,0x03,0x02,0x01,0x02,0x02,0x04,0x45,
-0x6B,0x50,0x54,0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x05,
-0x05,0x00,0x30,0x81,0xB0,0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02,
-0x55,0x53,0x31,0x16,0x30,0x14,0x06,0x03,0x55,0x04,0x0A,0x13,0x0D,0x45,0x6E,0x74,
-0x72,0x75,0x73,0x74,0x2C,0x20,0x49,0x6E,0x63,0x2E,0x31,0x39,0x30,0x37,0x06,0x03,
-0x55,0x04,0x0B,0x13,0x30,0x77,0x77,0x77,0x2E,0x65,0x6E,0x74,0x72,0x75,0x73,0x74,
-0x2E,0x6E,0x65,0x74,0x2F,0x43,0x50,0x53,0x20,0x69,0x73,0x20,0x69,0x6E,0x63,0x6F,
-0x72,0x70,0x6F,0x72,0x61,0x74,0x65,0x64,0x20,0x62,0x79,0x20,0x72,0x65,0x66,0x65,
-0x72,0x65,0x6E,0x63,0x65,0x31,0x1F,0x30,0x1D,0x06,0x03,0x55,0x04,0x0B,0x13,0x16,
-0x28,0x63,0x29,0x20,0x32,0x30,0x30,0x36,0x20,0x45,0x6E,0x74,0x72,0x75,0x73,0x74,
-0x2C,0x20,0x49,0x6E,0x63,0x2E,0x31,0x2D,0x30,0x2B,0x06,0x03,0x55,0x04,0x03,0x13,
-0x24,0x45,0x6E,0x74,0x72,0x75,0x73,0x74,0x20,0x52,0x6F,0x6F,0x74,0x20,0x43,0x65,
-0x72,0x74,0x69,0x66,0x69,0x63,0x61,0x74,0x69,0x6F,0x6E,0x20,0x41,0x75,0x74,0x68,
-0x6F,0x72,0x69,0x74,0x79,0x30,0x1E,0x17,0x0D,0x30,0x36,0x31,0x31,0x32,0x37,0x32,
-0x30,0x32,0x33,0x34,0x32,0x5A,0x17,0x0D,0x32,0x36,0x31,0x31,0x32,0x37,0x32,0x30,
-0x35,0x33,0x34,0x32,0x5A,0x30,0x81,0xB0,0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04,
-0x06,0x13,0x02,0x55,0x53,0x31,0x16,0x30,0x14,0x06,0x03,0x55,0x04,0x0A,0x13,0x0D,
-0x45,0x6E,0x74,0x72,0x75,0x73,0x74,0x2C,0x20,0x49,0x6E,0x63,0x2E,0x31,0x39,0x30,
-0x37,0x06,0x03,0x55,0x04,0x0B,0x13,0x30,0x77,0x77,0x77,0x2E,0x65,0x6E,0x74,0x72,
-0x75,0x73,0x74,0x2E,0x6E,0x65,0x74,0x2F,0x43,0x50,0x53,0x20,0x69,0x73,0x20,0x69,
-0x6E,0x63,0x6F,0x72,0x70,0x6F,0x72,0x61,0x74,0x65,0x64,0x20,0x62,0x79,0x20,0x72,
-0x65,0x66,0x65,0x72,0x65,0x6E,0x63,0x65,0x31,0x1F,0x30,0x1D,0x06,0x03,0x55,0x04,
-0x0B,0x13,0x16,0x28,0x63,0x29,0x20,0x32,0x30,0x30,0x36,0x20,0x45,0x6E,0x74,0x72,
-0x75,0x73,0x74,0x2C,0x20,0x49,0x6E,0x63,0x2E,0x31,0x2D,0x30,0x2B,0x06,0x03,0x55,
-0x04,0x03,0x13,0x24,0x45,0x6E,0x74,0x72,0x75,0x73,0x74,0x20,0x52,0x6F,0x6F,0x74,
-0x20,0x43,0x65,0x72,0x74,0x69,0x66,0x69,0x63,0x61,0x74,0x69,0x6F,0x6E,0x20,0x41,
-0x75,0x74,0x68,0x6F,0x72,0x69,0x74,0x79,0x30,0x82,0x01,0x22,0x30,0x0D,0x06,0x09,
-0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x01,0x05,0x00,0x03,0x82,0x01,0x0F,0x00,
-0x30,0x82,0x01,0x0A,0x02,0x82,0x01,0x01,0x00,0xB6,0x95,0xB6,0x43,0x42,0xFA,0xC6,
-0x6D,0x2A,0x6F,0x48,0xDF,0x94,0x4C,0x39,0x57,0x05,0xEE,0xC3,0x79,0x11,0x41,0x68,
-0x36,0xED,0xEC,0xFE,0x9A,0x01,0x8F,0xA1,0x38,0x28,0xFC,0xF7,0x10,0x46,0x66,0x2E,
-0x4D,0x1E,0x1A,0xB1,0x1A,0x4E,0xC6,0xD1,0xC0,0x95,0x88,0xB0,0xC9,0xFF,0x31,0x8B,
-0x33,0x03,0xDB,0xB7,0x83,0x7B,0x3E,0x20,0x84,0x5E,0xED,0xB2,0x56,0x28,0xA7,0xF8,
-0xE0,0xB9,0x40,0x71,0x37,0xC5,0xCB,0x47,0x0E,0x97,0x2A,0x68,0xC0,0x22,0x95,0x62,
-0x15,0xDB,0x47,0xD9,0xF5,0xD0,0x2B,0xFF,0x82,0x4B,0xC9,0xAD,0x3E,0xDE,0x4C,0xDB,
-0x90,0x80,0x50,0x3F,0x09,0x8A,0x84,0x00,0xEC,0x30,0x0A,0x3D,0x18,0xCD,0xFB,0xFD,
-0x2A,0x59,0x9A,0x23,0x95,0x17,0x2C,0x45,0x9E,0x1F,0x6E,0x43,0x79,0x6D,0x0C,0x5C,
-0x98,0xFE,0x48,0xA7,0xC5,0x23,0x47,0x5C,0x5E,0xFD,0x6E,0xE7,0x1E,0xB4,0xF6,0x68,
-0x45,0xD1,0x86,0x83,0x5B,0xA2,0x8A,0x8D,0xB1,0xE3,0x29,0x80,0xFE,0x25,0x71,0x88,
-0xAD,0xBE,0xBC,0x8F,0xAC,0x52,0x96,0x4B,0xAA,0x51,0x8D,0xE4,0x13,0x31,0x19,0xE8,
-0x4E,0x4D,0x9F,0xDB,0xAC,0xB3,0x6A,0xD5,0xBC,0x39,0x54,0x71,0xCA,0x7A,0x7A,0x7F,
-0x90,0xDD,0x7D,0x1D,0x80,0xD9,0x81,0xBB,0x59,0x26,0xC2,0x11,0xFE,0xE6,0x93,0xE2,
-0xF7,0x80,0xE4,0x65,0xFB,0x34,0x37,0x0E,0x29,0x80,0x70,0x4D,0xAF,0x38,0x86,0x2E,
-0x9E,0x7F,0x57,0xAF,0x9E,0x17,0xAE,0xEB,0x1C,0xCB,0x28,0x21,0x5F,0xB6,0x1C,0xD8,
-0xE7,0xA2,0x04,0x22,0xF9,0xD3,0xDA,0xD8,0xCB,0x02,0x03,0x01,0x00,0x01,0xA3,0x81,
-0xB0,0x30,0x81,0xAD,0x30,0x0E,0x06,0x03,0x55,0x1D,0x0F,0x01,0x01,0xFF,0x04,0x04,
-0x03,0x02,0x01,0x06,0x30,0x0F,0x06,0x03,0x55,0x1D,0x13,0x01,0x01,0xFF,0x04,0x05,
-0x30,0x03,0x01,0x01,0xFF,0x30,0x2B,0x06,0x03,0x55,0x1D,0x10,0x04,0x24,0x30,0x22,
-0x80,0x0F,0x32,0x30,0x30,0x36,0x31,0x31,0x32,0x37,0x32,0x30,0x32,0x33,0x34,0x32,
-0x5A,0x81,0x0F,0x32,0x30,0x32,0x36,0x31,0x31,0x32,0x37,0x32,0x30,0x35,0x33,0x34,
-0x32,0x5A,0x30,0x1F,0x06,0x03,0x55,0x1D,0x23,0x04,0x18,0x30,0x16,0x80,0x14,0x68,
-0x90,0xE4,0x67,0xA4,0xA6,0x53,0x80,0xC7,0x86,0x66,0xA4,0xF1,0xF7,0x4B,0x43,0xFB,
-0x84,0xBD,0x6D,0x30,0x1D,0x06,0x03,0x55,0x1D,0x0E,0x04,0x16,0x04,0x14,0x68,0x90,
-0xE4,0x67,0xA4,0xA6,0x53,0x80,0xC7,0x86,0x66,0xA4,0xF1,0xF7,0x4B,0x43,0xFB,0x84,
-0xBD,0x6D,0x30,0x1D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF6,0x7D,0x07,0x41,0x00,0x04,
-0x10,0x30,0x0E,0x1B,0x08,0x56,0x37,0x2E,0x31,0x3A,0x34,0x2E,0x30,0x03,0x02,0x04,
-0x90,0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x05,0x05,0x00,
-0x03,0x82,0x01,0x01,0x00,0x93,0xD4,0x30,0xB0,0xD7,0x03,0x20,0x2A,0xD0,0xF9,0x63,
-0xE8,0x91,0x0C,0x05,0x20,0xA9,0x5F,0x19,0xCA,0x7B,0x72,0x4E,0xD4,0xB1,0xDB,0xD0,
-0x96,0xFB,0x54,0x5A,0x19,0x2C,0x0C,0x08,0xF7,0xB2,0xBC,0x85,0xA8,0x9D,0x7F,0x6D,
-0x3B,0x52,0xB3,0x2A,0xDB,0xE7,0xD4,0x84,0x8C,0x63,0xF6,0x0F,0xCB,0x26,0x01,0x91,
-0x50,0x6C,0xF4,0x5F,0x14,0xE2,0x93,0x74,0xC0,0x13,0x9E,0x30,0x3A,0x50,0xE3,0xB4,
-0x60,0xC5,0x1C,0xF0,0x22,0x44,0x8D,0x71,0x47,0xAC,0xC8,0x1A,0xC9,0xE9,0x9B,0x9A,
-0x00,0x60,0x13,0xFF,0x70,0x7E,0x5F,0x11,0x4D,0x49,0x1B,0xB3,0x15,0x52,0x7B,0xC9,
-0x54,0xDA,0xBF,0x9D,0x95,0xAF,0x6B,0x9A,0xD8,0x9E,0xE9,0xF1,0xE4,0x43,0x8D,0xE2,
-0x11,0x44,0x3A,0xBF,0xAF,0xBD,0x83,0x42,0x73,0x52,0x8B,0xAA,0xBB,0xA7,0x29,0xCF,
-0xF5,0x64,0x1C,0x0A,0x4D,0xD1,0xBC,0xAA,0xAC,0x9F,0x2A,0xD0,0xFF,0x7F,0x7F,0xDA,
-0x7D,0xEA,0xB1,0xED,0x30,0x25,0xC1,0x84,0xDA,0x34,0xD2,0x5B,0x78,0x83,0x56,0xEC,
-0x9C,0x36,0xC3,0x26,0xE2,0x11,0xF6,0x67,0x49,0x1D,0x92,0xAB,0x8C,0xFB,0xEB,0xFF,
-0x7A,0xEE,0x85,0x4A,0xA7,0x50,0x80,0xF0,0xA7,0x5C,0x4A,0x94,0x2E,0x5F,0x05,0x99,
-0x3C,0x52,0x41,0xE0,0xCD,0xB4,0x63,0xCF,0x01,0x43,0xBA,0x9C,0x83,0xDC,0x8F,0x60,
-0x3B,0xF3,0x5A,0xB4,0xB4,0x7B,0xAE,0xDA,0x0B,0x90,0x38,0x75,0xEF,0x81,0x1D,0x66,
-0xD2,0xF7,0x57,0x70,0x36,0xB3,0xBF,0xFC,0x28,0xAF,0x71,0x25,0x85,0x5B,0x13,0xFE,
-0x1E,0x7F,0x5A,0xB4,0x3C,
-};
-
-
-/* subject:/C=US/O=Equifax/OU=Equifax Secure Certificate Authority */
-/* issuer :/C=US/O=Equifax/OU=Equifax Secure Certificate Authority */
-
-
-const unsigned char Equifax_Secure_CA_certificate[804]={
-0x30,0x82,0x03,0x20,0x30,0x82,0x02,0x89,0xA0,0x03,0x02,0x01,0x02,0x02,0x04,0x35,
-0xDE,0xF4,0xCF,0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x05,
-0x05,0x00,0x30,0x4E,0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x55,
-0x53,0x31,0x10,0x30,0x0E,0x06,0x03,0x55,0x04,0x0A,0x13,0x07,0x45,0x71,0x75,0x69,
-0x66,0x61,0x78,0x31,0x2D,0x30,0x2B,0x06,0x03,0x55,0x04,0x0B,0x13,0x24,0x45,0x71,
-0x75,0x69,0x66,0x61,0x78,0x20,0x53,0x65,0x63,0x75,0x72,0x65,0x20,0x43,0x65,0x72,
-0x74,0x69,0x66,0x69,0x63,0x61,0x74,0x65,0x20,0x41,0x75,0x74,0x68,0x6F,0x72,0x69,
-0x74,0x79,0x30,0x1E,0x17,0x0D,0x39,0x38,0x30,0x38,0x32,0x32,0x31,0x36,0x34,0x31,
-0x35,0x31,0x5A,0x17,0x0D,0x31,0x38,0x30,0x38,0x32,0x32,0x31,0x36,0x34,0x31,0x35,
-0x31,0x5A,0x30,0x4E,0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x55,
-0x53,0x31,0x10,0x30,0x0E,0x06,0x03,0x55,0x04,0x0A,0x13,0x07,0x45,0x71,0x75,0x69,
-0x66,0x61,0x78,0x31,0x2D,0x30,0x2B,0x06,0x03,0x55,0x04,0x0B,0x13,0x24,0x45,0x71,
-0x75,0x69,0x66,0x61,0x78,0x20,0x53,0x65,0x63,0x75,0x72,0x65,0x20,0x43,0x65,0x72,
-0x74,0x69,0x66,0x69,0x63,0x61,0x74,0x65,0x20,0x41,0x75,0x74,0x68,0x6F,0x72,0x69,
-0x74,0x79,0x30,0x81,0x9F,0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,
-0x01,0x01,0x05,0x00,0x03,0x81,0x8D,0x00,0x30,0x81,0x89,0x02,0x81,0x81,0x00,0xC1,
-0x5D,0xB1,0x58,0x67,0x08,0x62,0xEE,0xA0,0x9A,0x2D,0x1F,0x08,0x6D,0x91,0x14,0x68,
-0x98,0x0A,0x1E,0xFE,0xDA,0x04,0x6F,0x13,0x84,0x62,0x21,0xC3,0xD1,0x7C,0xCE,0x9F,
-0x05,0xE0,0xB8,0x01,0xF0,0x4E,0x34,0xEC,0xE2,0x8A,0x95,0x04,0x64,0xAC,0xF1,0x6B,
-0x53,0x5F,0x05,0xB3,0xCB,0x67,0x80,0xBF,0x42,0x02,0x8E,0xFE,0xDD,0x01,0x09,0xEC,
-0xE1,0x00,0x14,0x4F,0xFC,0xFB,0xF0,0x0C,0xDD,0x43,0xBA,0x5B,0x2B,0xE1,0x1F,0x80,
-0x70,0x99,0x15,0x57,0x93,0x16,0xF1,0x0F,0x97,0x6A,0xB7,0xC2,0x68,0x23,0x1C,0xCC,
-0x4D,0x59,0x30,0xAC,0x51,0x1E,0x3B,0xAF,0x2B,0xD6,0xEE,0x63,0x45,0x7B,0xC5,0xD9,
-0x5F,0x50,0xD2,0xE3,0x50,0x0F,0x3A,0x88,0xE7,0xBF,0x14,0xFD,0xE0,0xC7,0xB9,0x02,
-0x03,0x01,0x00,0x01,0xA3,0x82,0x01,0x09,0x30,0x82,0x01,0x05,0x30,0x70,0x06,0x03,
-0x55,0x1D,0x1F,0x04,0x69,0x30,0x67,0x30,0x65,0xA0,0x63,0xA0,0x61,0xA4,0x5F,0x30,
-0x5D,0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x55,0x53,0x31,0x10,
-0x30,0x0E,0x06,0x03,0x55,0x04,0x0A,0x13,0x07,0x45,0x71,0x75,0x69,0x66,0x61,0x78,
-0x31,0x2D,0x30,0x2B,0x06,0x03,0x55,0x04,0x0B,0x13,0x24,0x45,0x71,0x75,0x69,0x66,
-0x61,0x78,0x20,0x53,0x65,0x63,0x75,0x72,0x65,0x20,0x43,0x65,0x72,0x74,0x69,0x66,
-0x69,0x63,0x61,0x74,0x65,0x20,0x41,0x75,0x74,0x68,0x6F,0x72,0x69,0x74,0x79,0x31,
-0x0D,0x30,0x0B,0x06,0x03,0x55,0x04,0x03,0x13,0x04,0x43,0x52,0x4C,0x31,0x30,0x1A,
-0x06,0x03,0x55,0x1D,0x10,0x04,0x13,0x30,0x11,0x81,0x0F,0x32,0x30,0x31,0x38,0x30,
-0x38,0x32,0x32,0x31,0x36,0x34,0x31,0x35,0x31,0x5A,0x30,0x0B,0x06,0x03,0x55,0x1D,
-0x0F,0x04,0x04,0x03,0x02,0x01,0x06,0x30,0x1F,0x06,0x03,0x55,0x1D,0x23,0x04,0x18,
-0x30,0x16,0x80,0x14,0x48,0xE6,0x68,0xF9,0x2B,0xD2,0xB2,0x95,0xD7,0x47,0xD8,0x23,
-0x20,0x10,0x4F,0x33,0x98,0x90,0x9F,0xD4,0x30,0x1D,0x06,0x03,0x55,0x1D,0x0E,0x04,
-0x16,0x04,0x14,0x48,0xE6,0x68,0xF9,0x2B,0xD2,0xB2,0x95,0xD7,0x47,0xD8,0x23,0x20,
-0x10,0x4F,0x33,0x98,0x90,0x9F,0xD4,0x30,0x0C,0x06,0x03,0x55,0x1D,0x13,0x04,0x05,
-0x30,0x03,0x01,0x01,0xFF,0x30,0x1A,0x06,0x09,0x2A,0x86,0x48,0x86,0xF6,0x7D,0x07,
-0x41,0x00,0x04,0x0D,0x30,0x0B,0x1B,0x05,0x56,0x33,0x2E,0x30,0x63,0x03,0x02,0x06,
-0xC0,0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x05,0x05,0x00,
-0x03,0x81,0x81,0x00,0x58,0xCE,0x29,0xEA,0xFC,0xF7,0xDE,0xB5,0xCE,0x02,0xB9,0x17,
-0xB5,0x85,0xD1,0xB9,0xE3,0xE0,0x95,0xCC,0x25,0x31,0x0D,0x00,0xA6,0x92,0x6E,0x7F,
-0xB6,0x92,0x63,0x9E,0x50,0x95,0xD1,0x9A,0x6F,0xE4,0x11,0xDE,0x63,0x85,0x6E,0x98,
-0xEE,0xA8,0xFF,0x5A,0xC8,0xD3,0x55,0xB2,0x66,0x71,0x57,0xDE,0xC0,0x21,0xEB,0x3D,
-0x2A,0xA7,0x23,0x49,0x01,0x04,0x86,0x42,0x7B,0xFC,0xEE,0x7F,0xA2,0x16,0x52,0xB5,
-0x67,0x67,0xD3,0x40,0xDB,0x3B,0x26,0x58,0xB2,0x28,0x77,0x3D,0xAE,0x14,0x77,0x61,
-0xD6,0xFA,0x2A,0x66,0x27,0xA0,0x0D,0xFA,0xA7,0x73,0x5C,0xEA,0x70,0xF1,0x94,0x21,
-0x65,0x44,0x5F,0xFA,0xFC,0xEF,0x29,0x68,0xA9,0xA2,0x87,0x79,0xEF,0x79,0xEF,0x4F,
-0xAC,0x07,0x77,0x38,
-};
-
-
-/* subject:/C=US/O=Equifax Secure Inc./CN=Equifax Secure eBusiness CA-1 */
-/* issuer :/C=US/O=Equifax Secure Inc./CN=Equifax Secure eBusiness CA-1 */
-
-
-const unsigned char Equifax_Secure_eBusiness_CA_1_certificate[646]={
-0x30,0x82,0x02,0x82,0x30,0x82,0x01,0xEB,0xA0,0x03,0x02,0x01,0x02,0x02,0x01,0x04,
-0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x04,0x05,0x00,0x30,
-0x53,0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x55,0x53,0x31,0x1C,
-0x30,0x1A,0x06,0x03,0x55,0x04,0x0A,0x13,0x13,0x45,0x71,0x75,0x69,0x66,0x61,0x78,
-0x20,0x53,0x65,0x63,0x75,0x72,0x65,0x20,0x49,0x6E,0x63,0x2E,0x31,0x26,0x30,0x24,
-0x06,0x03,0x55,0x04,0x03,0x13,0x1D,0x45,0x71,0x75,0x69,0x66,0x61,0x78,0x20,0x53,
-0x65,0x63,0x75,0x72,0x65,0x20,0x65,0x42,0x75,0x73,0x69,0x6E,0x65,0x73,0x73,0x20,
-0x43,0x41,0x2D,0x31,0x30,0x1E,0x17,0x0D,0x39,0x39,0x30,0x36,0x32,0x31,0x30,0x34,
-0x30,0x30,0x30,0x30,0x5A,0x17,0x0D,0x32,0x30,0x30,0x36,0x32,0x31,0x30,0x34,0x30,
-0x30,0x30,0x30,0x5A,0x30,0x53,0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,
-0x02,0x55,0x53,0x31,0x1C,0x30,0x1A,0x06,0x03,0x55,0x04,0x0A,0x13,0x13,0x45,0x71,
-0x75,0x69,0x66,0x61,0x78,0x20,0x53,0x65,0x63,0x75,0x72,0x65,0x20,0x49,0x6E,0x63,
-0x2E,0x31,0x26,0x30,0x24,0x06,0x03,0x55,0x04,0x03,0x13,0x1D,0x45,0x71,0x75,0x69,
-0x66,0x61,0x78,0x20,0x53,0x65,0x63,0x75,0x72,0x65,0x20,0x65,0x42,0x75,0x73,0x69,
-0x6E,0x65,0x73,0x73,0x20,0x43,0x41,0x2D,0x31,0x30,0x81,0x9F,0x30,0x0D,0x06,0x09,
-0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x01,0x05,0x00,0x03,0x81,0x8D,0x00,0x30,
-0x81,0x89,0x02,0x81,0x81,0x00,0xCE,0x2F,0x19,0xBC,0x17,0xB7,0x77,0xDE,0x93,0xA9,
-0x5F,0x5A,0x0D,0x17,0x4F,0x34,0x1A,0x0C,0x98,0xF4,0x22,0xD9,0x59,0xD4,0xC4,0x68,
-0x46,0xF0,0xB4,0x35,0xC5,0x85,0x03,0x20,0xC6,0xAF,0x45,0xA5,0x21,0x51,0x45,0x41,
-0xEB,0x16,0x58,0x36,0x32,0x6F,0xE2,0x50,0x62,0x64,0xF9,0xFD,0x51,0x9C,0xAA,0x24,
-0xD9,0xF4,0x9D,0x83,0x2A,0x87,0x0A,0x21,0xD3,0x12,0x38,0x34,0x6C,0x8D,0x00,0x6E,
-0x5A,0xA0,0xD9,0x42,0xEE,0x1A,0x21,0x95,0xF9,0x52,0x4C,0x55,0x5A,0xC5,0x0F,0x38,
-0x4F,0x46,0xFA,0x6D,0xF8,0x2E,0x35,0xD6,0x1D,0x7C,0xEB,0xE2,0xF0,0xB0,0x75,0x80,
-0xC8,0xA9,0x13,0xAC,0xBE,0x88,0xEF,0x3A,0x6E,0xAB,0x5F,0x2A,0x38,0x62,0x02,0xB0,
-0x12,0x7B,0xFE,0x8F,0xA6,0x03,0x02,0x03,0x01,0x00,0x01,0xA3,0x66,0x30,0x64,0x30,
-0x11,0x06,0x09,0x60,0x86,0x48,0x01,0x86,0xF8,0x42,0x01,0x01,0x04,0x04,0x03,0x02,
-0x00,0x07,0x30,0x0F,0x06,0x03,0x55,0x1D,0x13,0x01,0x01,0xFF,0x04,0x05,0x30,0x03,
-0x01,0x01,0xFF,0x30,0x1F,0x06,0x03,0x55,0x1D,0x23,0x04,0x18,0x30,0x16,0x80,0x14,
-0x4A,0x78,0x32,0x52,0x11,0xDB,0x59,0x16,0x36,0x5E,0xDF,0xC1,0x14,0x36,0x40,0x6A,
-0x47,0x7C,0x4C,0xA1,0x30,0x1D,0x06,0x03,0x55,0x1D,0x0E,0x04,0x16,0x04,0x14,0x4A,
-0x78,0x32,0x52,0x11,0xDB,0x59,0x16,0x36,0x5E,0xDF,0xC1,0x14,0x36,0x40,0x6A,0x47,
-0x7C,0x4C,0xA1,0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x04,
-0x05,0x00,0x03,0x81,0x81,0x00,0x75,0x5B,0xA8,0x9B,0x03,0x11,0xE6,0xE9,0x56,0x4C,
-0xCD,0xF9,0xA9,0x4C,0xC0,0x0D,0x9A,0xF3,0xCC,0x65,0x69,0xE6,0x25,0x76,0xCC,0x59,
-0xB7,0xD6,0x54,0xC3,0x1D,0xCD,0x99,0xAC,0x19,0xDD,0xB4,0x85,0xD5,0xE0,0x3D,0xFC,
-0x62,0x20,0xA7,0x84,0x4B,0x58,0x65,0xF1,0xE2,0xF9,0x95,0x21,0x3F,0xF5,0xD4,0x7E,
-0x58,0x1E,0x47,0x87,0x54,0x3E,0x58,0xA1,0xB5,0xB5,0xF8,0x2A,0xEF,0x71,0xE7,0xBC,
-0xC3,0xF6,0xB1,0x49,0x46,0xE2,0xD7,0xA0,0x6B,0xE5,0x56,0x7A,0x9A,0x27,0x98,0x7C,
-0x46,0x62,0x14,0xE7,0xC9,0xFC,0x6E,0x03,0x12,0x79,0x80,0x38,0x1D,0x48,0x82,0x8D,
-0xFC,0x17,0xFE,0x2A,0x96,0x2B,0xB5,0x62,0xA6,0xA6,0x3D,0xBD,0x7F,0x92,0x59,0xCD,
-0x5A,0x2A,0x82,0xB2,0x37,0x79,
-};
-
-
-/* subject:/C=US/O=Equifax Secure/OU=Equifax Secure eBusiness CA-2 */
-/* issuer :/C=US/O=Equifax Secure/OU=Equifax Secure eBusiness CA-2 */
-
-
-const unsigned char Equifax_Secure_eBusiness_CA_2_certificate[804]={
-0x30,0x82,0x03,0x20,0x30,0x82,0x02,0x89,0xA0,0x03,0x02,0x01,0x02,0x02,0x04,0x37,
-0x70,0xCF,0xB5,0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x05,
-0x05,0x00,0x30,0x4E,0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x55,
-0x53,0x31,0x17,0x30,0x15,0x06,0x03,0x55,0x04,0x0A,0x13,0x0E,0x45,0x71,0x75,0x69,
-0x66,0x61,0x78,0x20,0x53,0x65,0x63,0x75,0x72,0x65,0x31,0x26,0x30,0x24,0x06,0x03,
-0x55,0x04,0x0B,0x13,0x1D,0x45,0x71,0x75,0x69,0x66,0x61,0x78,0x20,0x53,0x65,0x63,
-0x75,0x72,0x65,0x20,0x65,0x42,0x75,0x73,0x69,0x6E,0x65,0x73,0x73,0x20,0x43,0x41,
-0x2D,0x32,0x30,0x1E,0x17,0x0D,0x39,0x39,0x30,0x36,0x32,0x33,0x31,0x32,0x31,0x34,
-0x34,0x35,0x5A,0x17,0x0D,0x31,0x39,0x30,0x36,0x32,0x33,0x31,0x32,0x31,0x34,0x34,
-0x35,0x5A,0x30,0x4E,0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x55,
-0x53,0x31,0x17,0x30,0x15,0x06,0x03,0x55,0x04,0x0A,0x13,0x0E,0x45,0x71,0x75,0x69,
-0x66,0x61,0x78,0x20,0x53,0x65,0x63,0x75,0x72,0x65,0x31,0x26,0x30,0x24,0x06,0x03,
-0x55,0x04,0x0B,0x13,0x1D,0x45,0x71,0x75,0x69,0x66,0x61,0x78,0x20,0x53,0x65,0x63,
-0x75,0x72,0x65,0x20,0x65,0x42,0x75,0x73,0x69,0x6E,0x65,0x73,0x73,0x20,0x43,0x41,
-0x2D,0x32,0x30,0x81,0x9F,0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,
-0x01,0x01,0x05,0x00,0x03,0x81,0x8D,0x00,0x30,0x81,0x89,0x02,0x81,0x81,0x00,0xE4,
-0x39,0x39,0x93,0x1E,0x52,0x06,0x1B,0x28,0x36,0xF8,0xB2,0xA3,0x29,0xC5,0xED,0x8E,
-0xB2,0x11,0xBD,0xFE,0xEB,0xE7,0xB4,0x74,0xC2,0x8F,0xFF,0x05,0xE7,0xD9,0x9D,0x06,
-0xBF,0x12,0xC8,0x3F,0x0E,0xF2,0xD6,0xD1,0x24,0xB2,0x11,0xDE,0xD1,0x73,0x09,0x8A,
-0xD4,0xB1,0x2C,0x98,0x09,0x0D,0x1E,0x50,0x46,0xB2,0x83,0xA6,0x45,0x8D,0x62,0x68,
-0xBB,0x85,0x1B,0x20,0x70,0x32,0xAA,0x40,0xCD,0xA6,0x96,0x5F,0xC4,0x71,0x37,0x3F,
-0x04,0xF3,0xB7,0x41,0x24,0x39,0x07,0x1A,0x1E,0x2E,0x61,0x58,0xA0,0x12,0x0B,0xE5,
-0xA5,0xDF,0xC5,0xAB,0xEA,0x37,0x71,0xCC,0x1C,0xC8,0x37,0x3A,0xB9,0x97,0x52,0xA7,
-0xAC,0xC5,0x6A,0x24,0x94,0x4E,0x9C,0x7B,0xCF,0xC0,0x6A,0xD6,0xDF,0x21,0xBD,0x02,
-0x03,0x01,0x00,0x01,0xA3,0x82,0x01,0x09,0x30,0x82,0x01,0x05,0x30,0x70,0x06,0x03,
-0x55,0x1D,0x1F,0x04,0x69,0x30,0x67,0x30,0x65,0xA0,0x63,0xA0,0x61,0xA4,0x5F,0x30,
-0x5D,0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x55,0x53,0x31,0x17,
-0x30,0x15,0x06,0x03,0x55,0x04,0x0A,0x13,0x0E,0x45,0x71,0x75,0x69,0x66,0x61,0x78,
-0x20,0x53,0x65,0x63,0x75,0x72,0x65,0x31,0x26,0x30,0x24,0x06,0x03,0x55,0x04,0x0B,
-0x13,0x1D,0x45,0x71,0x75,0x69,0x66,0x61,0x78,0x20,0x53,0x65,0x63,0x75,0x72,0x65,
-0x20,0x65,0x42,0x75,0x73,0x69,0x6E,0x65,0x73,0x73,0x20,0x43,0x41,0x2D,0x32,0x31,
-0x0D,0x30,0x0B,0x06,0x03,0x55,0x04,0x03,0x13,0x04,0x43,0x52,0x4C,0x31,0x30,0x1A,
-0x06,0x03,0x55,0x1D,0x10,0x04,0x13,0x30,0x11,0x81,0x0F,0x32,0x30,0x31,0x39,0x30,
-0x36,0x32,0x33,0x31,0x32,0x31,0x34,0x34,0x35,0x5A,0x30,0x0B,0x06,0x03,0x55,0x1D,
-0x0F,0x04,0x04,0x03,0x02,0x01,0x06,0x30,0x1F,0x06,0x03,0x55,0x1D,0x23,0x04,0x18,
-0x30,0x16,0x80,0x14,0x50,0x9E,0x0B,0xEA,0xAF,0x5E,0xB9,0x20,0x48,0xA6,0x50,0x6A,
-0xCB,0xFD,0xD8,0x20,0x7A,0xA7,0x82,0x76,0x30,0x1D,0x06,0x03,0x55,0x1D,0x0E,0x04,
-0x16,0x04,0x14,0x50,0x9E,0x0B,0xEA,0xAF,0x5E,0xB9,0x20,0x48,0xA6,0x50,0x6A,0xCB,
-0xFD,0xD8,0x20,0x7A,0xA7,0x82,0x76,0x30,0x0C,0x06,0x03,0x55,0x1D,0x13,0x04,0x05,
-0x30,0x03,0x01,0x01,0xFF,0x30,0x1A,0x06,0x09,0x2A,0x86,0x48,0x86,0xF6,0x7D,0x07,
-0x41,0x00,0x04,0x0D,0x30,0x0B,0x1B,0x05,0x56,0x33,0x2E,0x30,0x63,0x03,0x02,0x06,
-0xC0,0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x05,0x05,0x00,
-0x03,0x81,0x81,0x00,0x0C,0x86,0x82,0xAD,0xE8,0x4E,0x1A,0xF5,0x8E,0x89,0x27,0xE2,
-0x35,0x58,0x3D,0x29,0xB4,0x07,0x8F,0x36,0x50,0x95,0xBF,0x6E,0xC1,0x9E,0xEB,0xC4,
-0x90,0xB2,0x85,0xA8,0xBB,0xB7,0x42,0xE0,0x0F,0x07,0x39,0xDF,0xFB,0x9E,0x90,0xB2,
-0xD1,0xC1,0x3E,0x53,0x9F,0x03,0x44,0xB0,0x7E,0x4B,0xF4,0x6F,0xE4,0x7C,0x1F,0xE7,
-0xE2,0xB1,0xE4,0xB8,0x9A,0xEF,0xC3,0xBD,0xCE,0xDE,0x0B,0x32,0x34,0xD9,0xDE,0x28,
-0xED,0x33,0x6B,0xC4,0xD4,0xD7,0x3D,0x12,0x58,0xAB,0x7D,0x09,0x2D,0xCB,0x70,0xF5,
-0x13,0x8A,0x94,0xA1,0x27,0xA4,0xD6,0x70,0xC5,0x6D,0x94,0xB5,0xC9,0x7D,0x9D,0xA0,
-0xD2,0xC6,0x08,0x49,0xD9,0x66,0x9B,0xA6,0xD3,0xF4,0x0B,0xDC,0xC5,0x26,0x57,0xE1,
-0x91,0x30,0xEA,0xCD,
-};
-
-
-/* subject:/C=US/O=Equifax Secure Inc./CN=Equifax Secure Global eBusiness CA-1 */
-/* issuer :/C=US/O=Equifax Secure Inc./CN=Equifax Secure Global eBusiness CA-1 */
-
-
-const unsigned char Equifax_Secure_Global_eBusiness_CA_certificate[660]={
-0x30,0x82,0x02,0x90,0x30,0x82,0x01,0xF9,0xA0,0x03,0x02,0x01,0x02,0x02,0x01,0x01,
-0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x04,0x05,0x00,0x30,
-0x5A,0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x55,0x53,0x31,0x1C,
-0x30,0x1A,0x06,0x03,0x55,0x04,0x0A,0x13,0x13,0x45,0x71,0x75,0x69,0x66,0x61,0x78,
-0x20,0x53,0x65,0x63,0x75,0x72,0x65,0x20,0x49,0x6E,0x63,0x2E,0x31,0x2D,0x30,0x2B,
-0x06,0x03,0x55,0x04,0x03,0x13,0x24,0x45,0x71,0x75,0x69,0x66,0x61,0x78,0x20,0x53,
-0x65,0x63,0x75,0x72,0x65,0x20,0x47,0x6C,0x6F,0x62,0x61,0x6C,0x20,0x65,0x42,0x75,
-0x73,0x69,0x6E,0x65,0x73,0x73,0x20,0x43,0x41,0x2D,0x31,0x30,0x1E,0x17,0x0D,0x39,
-0x39,0x30,0x36,0x32,0x31,0x30,0x34,0x30,0x30,0x30,0x30,0x5A,0x17,0x0D,0x32,0x30,
-0x30,0x36,0x32,0x31,0x30,0x34,0x30,0x30,0x30,0x30,0x5A,0x30,0x5A,0x31,0x0B,0x30,
-0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x55,0x53,0x31,0x1C,0x30,0x1A,0x06,0x03,
-0x55,0x04,0x0A,0x13,0x13,0x45,0x71,0x75,0x69,0x66,0x61,0x78,0x20,0x53,0x65,0x63,
-0x75,0x72,0x65,0x20,0x49,0x6E,0x63,0x2E,0x31,0x2D,0x30,0x2B,0x06,0x03,0x55,0x04,
-0x03,0x13,0x24,0x45,0x71,0x75,0x69,0x66,0x61,0x78,0x20,0x53,0x65,0x63,0x75,0x72,
-0x65,0x20,0x47,0x6C,0x6F,0x62,0x61,0x6C,0x20,0x65,0x42,0x75,0x73,0x69,0x6E,0x65,
-0x73,0x73,0x20,0x43,0x41,0x2D,0x31,0x30,0x81,0x9F,0x30,0x0D,0x06,0x09,0x2A,0x86,
-0x48,0x86,0xF7,0x0D,0x01,0x01,0x01,0x05,0x00,0x03,0x81,0x8D,0x00,0x30,0x81,0x89,
-0x02,0x81,0x81,0x00,0xBA,0xE7,0x17,0x90,0x02,0x65,0xB1,0x34,0x55,0x3C,0x49,0xC2,
-0x51,0xD5,0xDF,0xA7,0xD1,0x37,0x8F,0xD1,0xE7,0x81,0x73,0x41,0x52,0x60,0x9B,0x9D,
-0xA1,0x17,0x26,0x78,0xAD,0xC7,0xB1,0xE8,0x26,0x94,0x32,0xB5,0xDE,0x33,0x8D,0x3A,
-0x2F,0xDB,0xF2,0x9A,0x7A,0x5A,0x73,0x98,0xA3,0x5C,0xE9,0xFB,0x8A,0x73,0x1B,0x5C,
-0xE7,0xC3,0xBF,0x80,0x6C,0xCD,0xA9,0xF4,0xD6,0x2B,0xC0,0xF7,0xF9,0x99,0xAA,0x63,
-0xA2,0xB1,0x47,0x02,0x0F,0xD4,0xE4,0x51,0x3A,0x12,0x3C,0x6C,0x8A,0x5A,0x54,0x84,
-0x70,0xDB,0xC1,0xC5,0x90,0xCF,0x72,0x45,0xCB,0xA8,0x59,0xC0,0xCD,0x33,0x9D,0x3F,
-0xA3,0x96,0xEB,0x85,0x33,0x21,0x1C,0x3E,0x1E,0x3E,0x60,0x6E,0x76,0x9C,0x67,0x85,
-0xC5,0xC8,0xC3,0x61,0x02,0x03,0x01,0x00,0x01,0xA3,0x66,0x30,0x64,0x30,0x11,0x06,
-0x09,0x60,0x86,0x48,0x01,0x86,0xF8,0x42,0x01,0x01,0x04,0x04,0x03,0x02,0x00,0x07,
-0x30,0x0F,0x06,0x03,0x55,0x1D,0x13,0x01,0x01,0xFF,0x04,0x05,0x30,0x03,0x01,0x01,
-0xFF,0x30,0x1F,0x06,0x03,0x55,0x1D,0x23,0x04,0x18,0x30,0x16,0x80,0x14,0xBE,0xA8,
-0xA0,0x74,0x72,0x50,0x6B,0x44,0xB7,0xC9,0x23,0xD8,0xFB,0xA8,0xFF,0xB3,0x57,0x6B,
-0x68,0x6C,0x30,0x1D,0x06,0x03,0x55,0x1D,0x0E,0x04,0x16,0x04,0x14,0xBE,0xA8,0xA0,
-0x74,0x72,0x50,0x6B,0x44,0xB7,0xC9,0x23,0xD8,0xFB,0xA8,0xFF,0xB3,0x57,0x6B,0x68,
-0x6C,0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x04,0x05,0x00,
-0x03,0x81,0x81,0x00,0x30,0xE2,0x01,0x51,0xAA,0xC7,0xEA,0x5F,0xDA,0xB9,0xD0,0x65,
-0x0F,0x30,0xD6,0x3E,0xDA,0x0D,0x14,0x49,0x6E,0x91,0x93,0x27,0x14,0x31,0xEF,0xC4,
-0xF7,0x2D,0x45,0xF8,0xEC,0xC7,0xBF,0xA2,0x41,0x0D,0x23,0xB4,0x92,0xF9,0x19,0x00,
-0x67,0xBD,0x01,0xAF,0xCD,0xE0,0x71,0xFC,0x5A,0xCF,0x64,0xC4,0xE0,0x96,0x98,0xD0,
-0xA3,0x40,0xE2,0x01,0x8A,0xEF,0x27,0x07,0xF1,0x65,0x01,0x8A,0x44,0x2D,0x06,0x65,
-0x75,0x52,0xC0,0x86,0x10,0x20,0x21,0x5F,0x6C,0x6B,0x0F,0x6C,0xAE,0x09,0x1C,0xAF,
-0xF2,0xA2,0x18,0x34,0xC4,0x75,0xA4,0x73,0x1C,0xF1,0x8D,0xDC,0xEF,0xAD,0xF9,0xB3,
-0x76,0xB4,0x92,0xBF,0xDC,0x95,0x10,0x1E,0xBE,0xCB,0xC8,0x3B,0x5A,0x84,0x60,0x19,
-0x56,0x94,0xA9,0x55,
-};
-
-
-/* subject:/C=US/O=GeoTrust Inc./CN=GeoTrust Global CA */
-/* issuer :/C=US/O=GeoTrust Inc./CN=GeoTrust Global CA */
-
-
-const unsigned char GeoTrust_Global_CA_certificate[856]={
-0x30,0x82,0x03,0x54,0x30,0x82,0x02,0x3C,0xA0,0x03,0x02,0x01,0x02,0x02,0x03,0x02,
-0x34,0x56,0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x05,0x05,
-0x00,0x30,0x42,0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x55,0x53,
-0x31,0x16,0x30,0x14,0x06,0x03,0x55,0x04,0x0A,0x13,0x0D,0x47,0x65,0x6F,0x54,0x72,
-0x75,0x73,0x74,0x20,0x49,0x6E,0x63,0x2E,0x31,0x1B,0x30,0x19,0x06,0x03,0x55,0x04,
-0x03,0x13,0x12,0x47,0x65,0x6F,0x54,0x72,0x75,0x73,0x74,0x20,0x47,0x6C,0x6F,0x62,
-0x61,0x6C,0x20,0x43,0x41,0x30,0x1E,0x17,0x0D,0x30,0x32,0x30,0x35,0x32,0x31,0x30,
-0x34,0x30,0x30,0x30,0x30,0x5A,0x17,0x0D,0x32,0x32,0x30,0x35,0x32,0x31,0x30,0x34,
-0x30,0x30,0x30,0x30,0x5A,0x30,0x42,0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04,0x06,
-0x13,0x02,0x55,0x53,0x31,0x16,0x30,0x14,0x06,0x03,0x55,0x04,0x0A,0x13,0x0D,0x47,
-0x65,0x6F,0x54,0x72,0x75,0x73,0x74,0x20,0x49,0x6E,0x63,0x2E,0x31,0x1B,0x30,0x19,
-0x06,0x03,0x55,0x04,0x03,0x13,0x12,0x47,0x65,0x6F,0x54,0x72,0x75,0x73,0x74,0x20,
-0x47,0x6C,0x6F,0x62,0x61,0x6C,0x20,0x43,0x41,0x30,0x82,0x01,0x22,0x30,0x0D,0x06,
-0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x01,0x05,0x00,0x03,0x82,0x01,0x0F,
-0x00,0x30,0x82,0x01,0x0A,0x02,0x82,0x01,0x01,0x00,0xDA,0xCC,0x18,0x63,0x30,0xFD,
-0xF4,0x17,0x23,0x1A,0x56,0x7E,0x5B,0xDF,0x3C,0x6C,0x38,0xE4,0x71,0xB7,0x78,0x91,
-0xD4,0xBC,0xA1,0xD8,0x4C,0xF8,0xA8,0x43,0xB6,0x03,0xE9,0x4D,0x21,0x07,0x08,0x88,
-0xDA,0x58,0x2F,0x66,0x39,0x29,0xBD,0x05,0x78,0x8B,0x9D,0x38,0xE8,0x05,0xB7,0x6A,
-0x7E,0x71,0xA4,0xE6,0xC4,0x60,0xA6,0xB0,0xEF,0x80,0xE4,0x89,0x28,0x0F,0x9E,0x25,
-0xD6,0xED,0x83,0xF3,0xAD,0xA6,0x91,0xC7,0x98,0xC9,0x42,0x18,0x35,0x14,0x9D,0xAD,
-0x98,0x46,0x92,0x2E,0x4F,0xCA,0xF1,0x87,0x43,0xC1,0x16,0x95,0x57,0x2D,0x50,0xEF,
-0x89,0x2D,0x80,0x7A,0x57,0xAD,0xF2,0xEE,0x5F,0x6B,0xD2,0x00,0x8D,0xB9,0x14,0xF8,
-0x14,0x15,0x35,0xD9,0xC0,0x46,0xA3,0x7B,0x72,0xC8,0x91,0xBF,0xC9,0x55,0x2B,0xCD,
-0xD0,0x97,0x3E,0x9C,0x26,0x64,0xCC,0xDF,0xCE,0x83,0x19,0x71,0xCA,0x4E,0xE6,0xD4,
-0xD5,0x7B,0xA9,0x19,0xCD,0x55,0xDE,0xC8,0xEC,0xD2,0x5E,0x38,0x53,0xE5,0x5C,0x4F,
-0x8C,0x2D,0xFE,0x50,0x23,0x36,0xFC,0x66,0xE6,0xCB,0x8E,0xA4,0x39,0x19,0x00,0xB7,
-0x95,0x02,0x39,0x91,0x0B,0x0E,0xFE,0x38,0x2E,0xD1,0x1D,0x05,0x9A,0xF6,0x4D,0x3E,
-0x6F,0x0F,0x07,0x1D,0xAF,0x2C,0x1E,0x8F,0x60,0x39,0xE2,0xFA,0x36,0x53,0x13,0x39,
-0xD4,0x5E,0x26,0x2B,0xDB,0x3D,0xA8,0x14,0xBD,0x32,0xEB,0x18,0x03,0x28,0x52,0x04,
-0x71,0xE5,0xAB,0x33,0x3D,0xE1,0x38,0xBB,0x07,0x36,0x84,0x62,0x9C,0x79,0xEA,0x16,
-0x30,0xF4,0x5F,0xC0,0x2B,0xE8,0x71,0x6B,0xE4,0xF9,0x02,0x03,0x01,0x00,0x01,0xA3,
-0x53,0x30,0x51,0x30,0x0F,0x06,0x03,0x55,0x1D,0x13,0x01,0x01,0xFF,0x04,0x05,0x30,
-0x03,0x01,0x01,0xFF,0x30,0x1D,0x06,0x03,0x55,0x1D,0x0E,0x04,0x16,0x04,0x14,0xC0,
-0x7A,0x98,0x68,0x8D,0x89,0xFB,0xAB,0x05,0x64,0x0C,0x11,0x7D,0xAA,0x7D,0x65,0xB8,
-0xCA,0xCC,0x4E,0x30,0x1F,0x06,0x03,0x55,0x1D,0x23,0x04,0x18,0x30,0x16,0x80,0x14,
-0xC0,0x7A,0x98,0x68,0x8D,0x89,0xFB,0xAB,0x05,0x64,0x0C,0x11,0x7D,0xAA,0x7D,0x65,
-0xB8,0xCA,0xCC,0x4E,0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,
-0x05,0x05,0x00,0x03,0x82,0x01,0x01,0x00,0x35,0xE3,0x29,0x6A,0xE5,0x2F,0x5D,0x54,
-0x8E,0x29,0x50,0x94,0x9F,0x99,0x1A,0x14,0xE4,0x8F,0x78,0x2A,0x62,0x94,0xA2,0x27,
-0x67,0x9E,0xD0,0xCF,0x1A,0x5E,0x47,0xE9,0xC1,0xB2,0xA4,0xCF,0xDD,0x41,0x1A,0x05,
-0x4E,0x9B,0x4B,0xEE,0x4A,0x6F,0x55,0x52,0xB3,0x24,0xA1,0x37,0x0A,0xEB,0x64,0x76,
-0x2A,0x2E,0x2C,0xF3,0xFD,0x3B,0x75,0x90,0xBF,0xFA,0x71,0xD8,0xC7,0x3D,0x37,0xD2,
-0xB5,0x05,0x95,0x62,0xB9,0xA6,0xDE,0x89,0x3D,0x36,0x7B,0x38,0x77,0x48,0x97,0xAC,
-0xA6,0x20,0x8F,0x2E,0xA6,0xC9,0x0C,0xC2,0xB2,0x99,0x45,0x00,0xC7,0xCE,0x11,0x51,
-0x22,0x22,0xE0,0xA5,0xEA,0xB6,0x15,0x48,0x09,0x64,0xEA,0x5E,0x4F,0x74,0xF7,0x05,
-0x3E,0xC7,0x8A,0x52,0x0C,0xDB,0x15,0xB4,0xBD,0x6D,0x9B,0xE5,0xC6,0xB1,0x54,0x68,
-0xA9,0xE3,0x69,0x90,0xB6,0x9A,0xA5,0x0F,0xB8,0xB9,0x3F,0x20,0x7D,0xAE,0x4A,0xB5,
-0xB8,0x9C,0xE4,0x1D,0xB6,0xAB,0xE6,0x94,0xA5,0xC1,0xC7,0x83,0xAD,0xDB,0xF5,0x27,
-0x87,0x0E,0x04,0x6C,0xD5,0xFF,0xDD,0xA0,0x5D,0xED,0x87,0x52,0xB7,0x2B,0x15,0x02,
-0xAE,0x39,0xA6,0x6A,0x74,0xE9,0xDA,0xC4,0xE7,0xBC,0x4D,0x34,0x1E,0xA9,0x5C,0x4D,
-0x33,0x5F,0x92,0x09,0x2F,0x88,0x66,0x5D,0x77,0x97,0xC7,0x1D,0x76,0x13,0xA9,0xD5,
-0xE5,0xF1,0x16,0x09,0x11,0x35,0xD5,0xAC,0xDB,0x24,0x71,0x70,0x2C,0x98,0x56,0x0B,
-0xD9,0x17,0xB4,0xD1,0xE3,0x51,0x2B,0x5E,0x75,0xE8,0xD5,0xD0,0xDC,0x4F,0x34,0xED,
-0xC2,0x05,0x66,0x80,0xA1,0xCB,0xE6,0x33,
-};
-
-
-/* subject:/C=US/O=GeoTrust Inc./CN=GeoTrust Global CA 2 */
-/* issuer :/C=US/O=GeoTrust Inc./CN=GeoTrust Global CA 2 */
-
-
-const unsigned char GeoTrust_Global_CA_2_certificate[874]={
-0x30,0x82,0x03,0x66,0x30,0x82,0x02,0x4E,0xA0,0x03,0x02,0x01,0x02,0x02,0x01,0x01,
-0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x05,0x05,0x00,0x30,
-0x44,0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x55,0x53,0x31,0x16,
-0x30,0x14,0x06,0x03,0x55,0x04,0x0A,0x13,0x0D,0x47,0x65,0x6F,0x54,0x72,0x75,0x73,
-0x74,0x20,0x49,0x6E,0x63,0x2E,0x31,0x1D,0x30,0x1B,0x06,0x03,0x55,0x04,0x03,0x13,
-0x14,0x47,0x65,0x6F,0x54,0x72,0x75,0x73,0x74,0x20,0x47,0x6C,0x6F,0x62,0x61,0x6C,
-0x20,0x43,0x41,0x20,0x32,0x30,0x1E,0x17,0x0D,0x30,0x34,0x30,0x33,0x30,0x34,0x30,
-0x35,0x30,0x30,0x30,0x30,0x5A,0x17,0x0D,0x31,0x39,0x30,0x33,0x30,0x34,0x30,0x35,
-0x30,0x30,0x30,0x30,0x5A,0x30,0x44,0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04,0x06,
-0x13,0x02,0x55,0x53,0x31,0x16,0x30,0x14,0x06,0x03,0x55,0x04,0x0A,0x13,0x0D,0x47,
-0x65,0x6F,0x54,0x72,0x75,0x73,0x74,0x20,0x49,0x6E,0x63,0x2E,0x31,0x1D,0x30,0x1B,
-0x06,0x03,0x55,0x04,0x03,0x13,0x14,0x47,0x65,0x6F,0x54,0x72,0x75,0x73,0x74,0x20,
-0x47,0x6C,0x6F,0x62,0x61,0x6C,0x20,0x43,0x41,0x20,0x32,0x30,0x82,0x01,0x22,0x30,
-0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x01,0x05,0x00,0x03,0x82,
-0x01,0x0F,0x00,0x30,0x82,0x01,0x0A,0x02,0x82,0x01,0x01,0x00,0xEF,0x3C,0x4D,0x40,
-0x3D,0x10,0xDF,0x3B,0x53,0x00,0xE1,0x67,0xFE,0x94,0x60,0x15,0x3E,0x85,0x88,0xF1,
-0x89,0x0D,0x90,0xC8,0x28,0x23,0x99,0x05,0xE8,0x2B,0x20,0x9D,0xC6,0xF3,0x60,0x46,
-0xD8,0xC1,0xB2,0xD5,0x8C,0x31,0xD9,0xDC,0x20,0x79,0x24,0x81,0xBF,0x35,0x32,0xFC,
-0x63,0x69,0xDB,0xB1,0x2A,0x6B,0xEE,0x21,0x58,0xF2,0x08,0xE9,0x78,0xCB,0x6F,0xCB,
-0xFC,0x16,0x52,0xC8,0x91,0xC4,0xFF,0x3D,0x73,0xDE,0xB1,0x3E,0xA7,0xC2,0x7D,0x66,
-0xC1,0xF5,0x7E,0x52,0x24,0x1A,0xE2,0xD5,0x67,0x91,0xD0,0x82,0x10,0xD7,0x78,0x4B,
-0x4F,0x2B,0x42,0x39,0xBD,0x64,0x2D,0x40,0xA0,0xB0,0x10,0xD3,0x38,0x48,0x46,0x88,
-0xA1,0x0C,0xBB,0x3A,0x33,0x2A,0x62,0x98,0xFB,0x00,0x9D,0x13,0x59,0x7F,0x6F,0x3B,
-0x72,0xAA,0xEE,0xA6,0x0F,0x86,0xF9,0x05,0x61,0xEA,0x67,0x7F,0x0C,0x37,0x96,0x8B,
-0xE6,0x69,0x16,0x47,0x11,0xC2,0x27,0x59,0x03,0xB3,0xA6,0x60,0xC2,0x21,0x40,0x56,
-0xFA,0xA0,0xC7,0x7D,0x3A,0x13,0xE3,0xEC,0x57,0xC7,0xB3,0xD6,0xAE,0x9D,0x89,0x80,
-0xF7,0x01,0xE7,0x2C,0xF6,0x96,0x2B,0x13,0x0D,0x79,0x2C,0xD9,0xC0,0xE4,0x86,0x7B,
-0x4B,0x8C,0x0C,0x72,0x82,0x8A,0xFB,0x17,0xCD,0x00,0x6C,0x3A,0x13,0x3C,0xB0,0x84,
-0x87,0x4B,0x16,0x7A,0x29,0xB2,0x4F,0xDB,0x1D,0xD4,0x0B,0xF3,0x66,0x37,0xBD,0xD8,
-0xF6,0x57,0xBB,0x5E,0x24,0x7A,0xB8,0x3C,0x8B,0xB9,0xFA,0x92,0x1A,0x1A,0x84,0x9E,
-0xD8,0x74,0x8F,0xAA,0x1B,0x7F,0x5E,0xF4,0xFE,0x45,0x22,0x21,0x02,0x03,0x01,0x00,
-0x01,0xA3,0x63,0x30,0x61,0x30,0x0F,0x06,0x03,0x55,0x1D,0x13,0x01,0x01,0xFF,0x04,
-0x05,0x30,0x03,0x01,0x01,0xFF,0x30,0x1D,0x06,0x03,0x55,0x1D,0x0E,0x04,0x16,0x04,
-0x14,0x71,0x38,0x36,0xF2,0x02,0x31,0x53,0x47,0x2B,0x6E,0xBA,0x65,0x46,0xA9,0x10,
-0x15,0x58,0x20,0x05,0x09,0x30,0x1F,0x06,0x03,0x55,0x1D,0x23,0x04,0x18,0x30,0x16,
-0x80,0x14,0x71,0x38,0x36,0xF2,0x02,0x31,0x53,0x47,0x2B,0x6E,0xBA,0x65,0x46,0xA9,
-0x10,0x15,0x58,0x20,0x05,0x09,0x30,0x0E,0x06,0x03,0x55,0x1D,0x0F,0x01,0x01,0xFF,
-0x04,0x04,0x03,0x02,0x01,0x86,0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,
-0x01,0x01,0x05,0x05,0x00,0x03,0x82,0x01,0x01,0x00,0x03,0xF7,0xB5,0x2B,0xAB,0x5D,
-0x10,0xFC,0x7B,0xB2,0xB2,0x5E,0xAC,0x9B,0x0E,0x7E,0x53,0x78,0x59,0x3E,0x42,0x04,
-0xFE,0x75,0xA3,0xAD,0xAC,0x81,0x4E,0xD7,0x02,0x8B,0x5E,0xC4,0x2D,0xC8,0x52,0x76,
-0xC7,0x2C,0x1F,0xFC,0x81,0x32,0x98,0xD1,0x4B,0xC6,0x92,0x93,0x33,0x35,0x31,0x2F,
-0xFC,0xD8,0x1D,0x44,0xDD,0xE0,0x81,0x7F,0x9D,0xE9,0x8B,0xE1,0x64,0x91,0x62,0x0B,
-0x39,0x08,0x8C,0xAC,0x74,0x9D,0x59,0xD9,0x7A,0x59,0x52,0x97,0x11,0xB9,0x16,0x7B,
-0x6F,0x45,0xD3,0x96,0xD9,0x31,0x7D,0x02,0x36,0x0F,0x9C,0x3B,0x6E,0xCF,0x2C,0x0D,
-0x03,0x46,0x45,0xEB,0xA0,0xF4,0x7F,0x48,0x44,0xC6,0x08,0x40,0xCC,0xDE,0x1B,0x70,
-0xB5,0x29,0xAD,0xBA,0x8B,0x3B,0x34,0x65,0x75,0x1B,0x71,0x21,0x1D,0x2C,0x14,0x0A,
-0xB0,0x96,0x95,0xB8,0xD6,0xEA,0xF2,0x65,0xFB,0x29,0xBA,0x4F,0xEA,0x91,0x93,0x74,
-0x69,0xB6,0xF2,0xFF,0xE1,0x1A,0xD0,0x0C,0xD1,0x76,0x85,0xCB,0x8A,0x25,0xBD,0x97,
-0x5E,0x2C,0x6F,0x15,0x99,0x26,0xE7,0xB6,0x29,0xFF,0x22,0xEC,0xC9,0x02,0xC7,0x56,
-0x00,0xCD,0x49,0xB9,0xB3,0x6C,0x7B,0x53,0x04,0x1A,0xE2,0xA8,0xC9,0xAA,0x12,0x05,
-0x23,0xC2,0xCE,0xE7,0xBB,0x04,0x02,0xCC,0xC0,0x47,0xA2,0xE4,0xC4,0x29,0x2F,0x5B,
-0x45,0x57,0x89,0x51,0xEE,0x3C,0xEB,0x52,0x08,0xFF,0x07,0x35,0x1E,0x9F,0x35,0x6A,
-0x47,0x4A,0x56,0x98,0xD1,0x5A,0x85,0x1F,0x8C,0xF5,0x22,0xBF,0xAB,0xCE,0x83,0xF3,
-0xE2,0x22,0x29,0xAE,0x7D,0x83,0x40,0xA8,0xBA,0x6C,
-};
-
-
-/* subject:/C=US/O=GeoTrust Inc./CN=GeoTrust Primary Certification Authority */
-/* issuer :/C=US/O=GeoTrust Inc./CN=GeoTrust Primary Certification Authority */
-
-
-const unsigned char GeoTrust_Primary_Certification_Authority_certificate[896]={
-0x30,0x82,0x03,0x7C,0x30,0x82,0x02,0x64,0xA0,0x03,0x02,0x01,0x02,0x02,0x10,0x18,
-0xAC,0xB5,0x6A,0xFD,0x69,0xB6,0x15,0x3A,0x63,0x6C,0xAF,0xDA,0xFA,0xC4,0xA1,0x30,
-0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x05,0x05,0x00,0x30,0x58,
-0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x55,0x53,0x31,0x16,0x30,
-0x14,0x06,0x03,0x55,0x04,0x0A,0x13,0x0D,0x47,0x65,0x6F,0x54,0x72,0x75,0x73,0x74,
-0x20,0x49,0x6E,0x63,0x2E,0x31,0x31,0x30,0x2F,0x06,0x03,0x55,0x04,0x03,0x13,0x28,
-0x47,0x65,0x6F,0x54,0x72,0x75,0x73,0x74,0x20,0x50,0x72,0x69,0x6D,0x61,0x72,0x79,
-0x20,0x43,0x65,0x72,0x74,0x69,0x66,0x69,0x63,0x61,0x74,0x69,0x6F,0x6E,0x20,0x41,
-0x75,0x74,0x68,0x6F,0x72,0x69,0x74,0x79,0x30,0x1E,0x17,0x0D,0x30,0x36,0x31,0x31,
-0x32,0x37,0x30,0x30,0x30,0x30,0x30,0x30,0x5A,0x17,0x0D,0x33,0x36,0x30,0x37,0x31,
-0x36,0x32,0x33,0x35,0x39,0x35,0x39,0x5A,0x30,0x58,0x31,0x0B,0x30,0x09,0x06,0x03,
-0x55,0x04,0x06,0x13,0x02,0x55,0x53,0x31,0x16,0x30,0x14,0x06,0x03,0x55,0x04,0x0A,
-0x13,0x0D,0x47,0x65,0x6F,0x54,0x72,0x75,0x73,0x74,0x20,0x49,0x6E,0x63,0x2E,0x31,
-0x31,0x30,0x2F,0x06,0x03,0x55,0x04,0x03,0x13,0x28,0x47,0x65,0x6F,0x54,0x72,0x75,
-0x73,0x74,0x20,0x50,0x72,0x69,0x6D,0x61,0x72,0x79,0x20,0x43,0x65,0x72,0x74,0x69,
-0x66,0x69,0x63,0x61,0x74,0x69,0x6F,0x6E,0x20,0x41,0x75,0x74,0x68,0x6F,0x72,0x69,
-0x74,0x79,0x30,0x82,0x01,0x22,0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,
-0x01,0x01,0x01,0x05,0x00,0x03,0x82,0x01,0x0F,0x00,0x30,0x82,0x01,0x0A,0x02,0x82,
-0x01,0x01,0x00,0xBE,0xB8,0x15,0x7B,0xFF,0xD4,0x7C,0x7D,0x67,0xAD,0x83,0x64,0x7B,
-0xC8,0x42,0x53,0x2D,0xDF,0xF6,0x84,0x08,0x20,0x61,0xD6,0x01,0x59,0x6A,0x9C,0x44,
-0x11,0xAF,0xEF,0x76,0xFD,0x95,0x7E,0xCE,0x61,0x30,0xBB,0x7A,0x83,0x5F,0x02,0xBD,
-0x01,0x66,0xCA,0xEE,0x15,0x8D,0x6F,0xA1,0x30,0x9C,0xBD,0xA1,0x85,0x9E,0x94,0x3A,
-0xF3,0x56,0x88,0x00,0x31,0xCF,0xD8,0xEE,0x6A,0x96,0x02,0xD9,0xED,0x03,0x8C,0xFB,
-0x75,0x6D,0xE7,0xEA,0xB8,0x55,0x16,0x05,0x16,0x9A,0xF4,0xE0,0x5E,0xB1,0x88,0xC0,
-0x64,0x85,0x5C,0x15,0x4D,0x88,0xC7,0xB7,0xBA,0xE0,0x75,0xE9,0xAD,0x05,0x3D,0x9D,
-0xC7,0x89,0x48,0xE0,0xBB,0x28,0xC8,0x03,0xE1,0x30,0x93,0x64,0x5E,0x52,0xC0,0x59,
-0x70,0x22,0x35,0x57,0x88,0x8A,0xF1,0x95,0x0A,0x83,0xD7,0xBC,0x31,0x73,0x01,0x34,
-0xED,0xEF,0x46,0x71,0xE0,0x6B,0x02,0xA8,0x35,0x72,0x6B,0x97,0x9B,0x66,0xE0,0xCB,
-0x1C,0x79,0x5F,0xD8,0x1A,0x04,0x68,0x1E,0x47,0x02,0xE6,0x9D,0x60,0xE2,0x36,0x97,
-0x01,0xDF,0xCE,0x35,0x92,0xDF,0xBE,0x67,0xC7,0x6D,0x77,0x59,0x3B,0x8F,0x9D,0xD6,
-0x90,0x15,0x94,0xBC,0x42,0x34,0x10,0xC1,0x39,0xF9,0xB1,0x27,0x3E,0x7E,0xD6,0x8A,
-0x75,0xC5,0xB2,0xAF,0x96,0xD3,0xA2,0xDE,0x9B,0xE4,0x98,0xBE,0x7D,0xE1,0xE9,0x81,
-0xAD,0xB6,0x6F,0xFC,0xD7,0x0E,0xDA,0xE0,0x34,0xB0,0x0D,0x1A,0x77,0xE7,0xE3,0x08,
-0x98,0xEF,0x58,0xFA,0x9C,0x84,0xB7,0x36,0xAF,0xC2,0xDF,0xAC,0xD2,0xF4,0x10,0x06,
-0x70,0x71,0x35,0x02,0x03,0x01,0x00,0x01,0xA3,0x42,0x30,0x40,0x30,0x0F,0x06,0x03,
-0x55,0x1D,0x13,0x01,0x01,0xFF,0x04,0x05,0x30,0x03,0x01,0x01,0xFF,0x30,0x0E,0x06,
-0x03,0x55,0x1D,0x0F,0x01,0x01,0xFF,0x04,0x04,0x03,0x02,0x01,0x06,0x30,0x1D,0x06,
-0x03,0x55,0x1D,0x0E,0x04,0x16,0x04,0x14,0x2C,0xD5,0x50,0x41,0x97,0x15,0x8B,0xF0,
-0x8F,0x36,0x61,0x5B,0x4A,0xFB,0x6B,0xD9,0x99,0xC9,0x33,0x92,0x30,0x0D,0x06,0x09,
-0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x05,0x05,0x00,0x03,0x82,0x01,0x01,0x00,
-0x5A,0x70,0x7F,0x2C,0xDD,0xB7,0x34,0x4F,0xF5,0x86,0x51,0xA9,0x26,0xBE,0x4B,0xB8,
-0xAA,0xF1,0x71,0x0D,0xDC,0x61,0xC7,0xA0,0xEA,0x34,0x1E,0x7A,0x77,0x0F,0x04,0x35,
-0xE8,0x27,0x8F,0x6C,0x90,0xBF,0x91,0x16,0x24,0x46,0x3E,0x4A,0x4E,0xCE,0x2B,0x16,
-0xD5,0x0B,0x52,0x1D,0xFC,0x1F,0x67,0xA2,0x02,0x45,0x31,0x4F,0xCE,0xF3,0xFA,0x03,
-0xA7,0x79,0x9D,0x53,0x6A,0xD9,0xDA,0x63,0x3A,0xF8,0x80,0xD7,0xD3,0x99,0xE1,0xA5,
-0xE1,0xBE,0xD4,0x55,0x71,0x98,0x35,0x3A,0xBE,0x93,0xEA,0xAE,0xAD,0x42,0xB2,0x90,
-0x6F,0xE0,0xFC,0x21,0x4D,0x35,0x63,0x33,0x89,0x49,0xD6,0x9B,0x4E,0xCA,0xC7,0xE7,
-0x4E,0x09,0x00,0xF7,0xDA,0xC7,0xEF,0x99,0x62,0x99,0x77,0xB6,0x95,0x22,0x5E,0x8A,
-0xA0,0xAB,0xF4,0xB8,0x78,0x98,0xCA,0x38,0x19,0x99,0xC9,0x72,0x9E,0x78,0xCD,0x4B,
-0xAC,0xAF,0x19,0xA0,0x73,0x12,0x2D,0xFC,0xC2,0x41,0xBA,0x81,0x91,0xDA,0x16,0x5A,
-0x31,0xB7,0xF9,0xB4,0x71,0x80,0x12,0x48,0x99,0x72,0x73,0x5A,0x59,0x53,0xC1,0x63,
-0x52,0x33,0xED,0xA7,0xC9,0xD2,0x39,0x02,0x70,0xFA,0xE0,0xB1,0x42,0x66,0x29,0xAA,
-0x9B,0x51,0xED,0x30,0x54,0x22,0x14,0x5F,0xD9,0xAB,0x1D,0xC1,0xE4,0x94,0xF0,0xF8,
-0xF5,0x2B,0xF7,0xEA,0xCA,0x78,0x46,0xD6,0xB8,0x91,0xFD,0xA6,0x0D,0x2B,0x1A,0x14,
-0x01,0x3E,0x80,0xF0,0x42,0xA0,0x95,0x07,0x5E,0x6D,0xCD,0xCC,0x4B,0xA4,0x45,0x8D,
-0xAB,0x12,0xE8,0xB3,0xDE,0x5A,0xE5,0xA0,0x7C,0xE8,0x0F,0x22,0x1D,0x5A,0xE9,0x59,
-};
-
-
-/* subject:/C=US/O=GeoTrust Inc./OU=(c) 2007 GeoTrust Inc. - For authorized use only/CN=GeoTrust Primary Certification Authority - G2 */
-/* issuer :/C=US/O=GeoTrust Inc./OU=(c) 2007 GeoTrust Inc. - For authorized use only/CN=GeoTrust Primary Certification Authority - G2 */
-
-
-const unsigned char GeoTrust_Primary_Certification_Authority___G2_certificate[690]={
-0x30,0x82,0x02,0xAE,0x30,0x82,0x02,0x35,0xA0,0x03,0x02,0x01,0x02,0x02,0x10,0x3C,
-0xB2,0xF4,0x48,0x0A,0x00,0xE2,0xFE,0xEB,0x24,0x3B,0x5E,0x60,0x3E,0xC3,0x6B,0x30,
-0x0A,0x06,0x08,0x2A,0x86,0x48,0xCE,0x3D,0x04,0x03,0x03,0x30,0x81,0x98,0x31,0x0B,
-0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x55,0x53,0x31,0x16,0x30,0x14,0x06,
-0x03,0x55,0x04,0x0A,0x13,0x0D,0x47,0x65,0x6F,0x54,0x72,0x75,0x73,0x74,0x20,0x49,
-0x6E,0x63,0x2E,0x31,0x39,0x30,0x37,0x06,0x03,0x55,0x04,0x0B,0x13,0x30,0x28,0x63,
-0x29,0x20,0x32,0x30,0x30,0x37,0x20,0x47,0x65,0x6F,0x54,0x72,0x75,0x73,0x74,0x20,
-0x49,0x6E,0x63,0x2E,0x20,0x2D,0x20,0x46,0x6F,0x72,0x20,0x61,0x75,0x74,0x68,0x6F,
-0x72,0x69,0x7A,0x65,0x64,0x20,0x75,0x73,0x65,0x20,0x6F,0x6E,0x6C,0x79,0x31,0x36,
-0x30,0x34,0x06,0x03,0x55,0x04,0x03,0x13,0x2D,0x47,0x65,0x6F,0x54,0x72,0x75,0x73,
-0x74,0x20,0x50,0x72,0x69,0x6D,0x61,0x72,0x79,0x20,0x43,0x65,0x72,0x74,0x69,0x66,
-0x69,0x63,0x61,0x74,0x69,0x6F,0x6E,0x20,0x41,0x75,0x74,0x68,0x6F,0x72,0x69,0x74,
-0x79,0x20,0x2D,0x20,0x47,0x32,0x30,0x1E,0x17,0x0D,0x30,0x37,0x31,0x31,0x30,0x35,
-0x30,0x30,0x30,0x30,0x30,0x30,0x5A,0x17,0x0D,0x33,0x38,0x30,0x31,0x31,0x38,0x32,
-0x33,0x35,0x39,0x35,0x39,0x5A,0x30,0x81,0x98,0x31,0x0B,0x30,0x09,0x06,0x03,0x55,
-0x04,0x06,0x13,0x02,0x55,0x53,0x31,0x16,0x30,0x14,0x06,0x03,0x55,0x04,0x0A,0x13,
-0x0D,0x47,0x65,0x6F,0x54,0x72,0x75,0x73,0x74,0x20,0x49,0x6E,0x63,0x2E,0x31,0x39,
-0x30,0x37,0x06,0x03,0x55,0x04,0x0B,0x13,0x30,0x28,0x63,0x29,0x20,0x32,0x30,0x30,
-0x37,0x20,0x47,0x65,0x6F,0x54,0x72,0x75,0x73,0x74,0x20,0x49,0x6E,0x63,0x2E,0x20,
-0x2D,0x20,0x46,0x6F,0x72,0x20,0x61,0x75,0x74,0x68,0x6F,0x72,0x69,0x7A,0x65,0x64,
-0x20,0x75,0x73,0x65,0x20,0x6F,0x6E,0x6C,0x79,0x31,0x36,0x30,0x34,0x06,0x03,0x55,
-0x04,0x03,0x13,0x2D,0x47,0x65,0x6F,0x54,0x72,0x75,0x73,0x74,0x20,0x50,0x72,0x69,
-0x6D,0x61,0x72,0x79,0x20,0x43,0x65,0x72,0x74,0x69,0x66,0x69,0x63,0x61,0x74,0x69,
-0x6F,0x6E,0x20,0x41,0x75,0x74,0x68,0x6F,0x72,0x69,0x74,0x79,0x20,0x2D,0x20,0x47,
-0x32,0x30,0x76,0x30,0x10,0x06,0x07,0x2A,0x86,0x48,0xCE,0x3D,0x02,0x01,0x06,0x05,
-0x2B,0x81,0x04,0x00,0x22,0x03,0x62,0x00,0x04,0x15,0xB1,0xE8,0xFD,0x03,0x15,0x43,
-0xE5,0xAC,0xEB,0x87,0x37,0x11,0x62,0xEF,0xD2,0x83,0x36,0x52,0x7D,0x45,0x57,0x0B,
-0x4A,0x8D,0x7B,0x54,0x3B,0x3A,0x6E,0x5F,0x15,0x02,0xC0,0x50,0xA6,0xCF,0x25,0x2F,
-0x7D,0xCA,0x48,0xB8,0xC7,0x50,0x63,0x1C,0x2A,0x21,0x08,0x7C,0x9A,0x36,0xD8,0x0B,
-0xFE,0xD1,0x26,0xC5,0x58,0x31,0x30,0x28,0x25,0xF3,0x5D,0x5D,0xA3,0xB8,0xB6,0xA5,
-0xB4,0x92,0xED,0x6C,0x2C,0x9F,0xEB,0xDD,0x43,0x89,0xA2,0x3C,0x4B,0x48,0x91,0x1D,
-0x50,0xEC,0x26,0xDF,0xD6,0x60,0x2E,0xBD,0x21,0xA3,0x42,0x30,0x40,0x30,0x0F,0x06,
-0x03,0x55,0x1D,0x13,0x01,0x01,0xFF,0x04,0x05,0x30,0x03,0x01,0x01,0xFF,0x30,0x0E,
-0x06,0x03,0x55,0x1D,0x0F,0x01,0x01,0xFF,0x04,0x04,0x03,0x02,0x01,0x06,0x30,0x1D,
-0x06,0x03,0x55,0x1D,0x0E,0x04,0x16,0x04,0x14,0x15,0x5F,0x35,0x57,0x51,0x55,0xFB,
-0x25,0xB2,0xAD,0x03,0x69,0xFC,0x01,0xA3,0xFA,0xBE,0x11,0x55,0xD5,0x30,0x0A,0x06,
-0x08,0x2A,0x86,0x48,0xCE,0x3D,0x04,0x03,0x03,0x03,0x67,0x00,0x30,0x64,0x02,0x30,
-0x64,0x96,0x59,0xA6,0xE8,0x09,0xDE,0x8B,0xBA,0xFA,0x5A,0x88,0x88,0xF0,0x1F,0x91,
-0xD3,0x46,0xA8,0xF2,0x4A,0x4C,0x02,0x63,0xFB,0x6C,0x5F,0x38,0xDB,0x2E,0x41,0x93,
-0xA9,0x0E,0xE6,0x9D,0xDC,0x31,0x1C,0xB2,0xA0,0xA7,0x18,0x1C,0x79,0xE1,0xC7,0x36,
-0x02,0x30,0x3A,0x56,0xAF,0x9A,0x74,0x6C,0xF6,0xFB,0x83,0xE0,0x33,0xD3,0x08,0x5F,
-0xA1,0x9C,0xC2,0x5B,0x9F,0x46,0xD6,0xB6,0xCB,0x91,0x06,0x63,0xA2,0x06,0xE7,0x33,
-0xAC,0x3E,0xA8,0x81,0x12,0xD0,0xCB,0xBA,0xD0,0x92,0x0B,0xB6,0x9E,0x96,0xAA,0x04,
-0x0F,0x8A,
-};
-
-
-/* subject:/C=US/O=GeoTrust Inc./OU=(c) 2008 GeoTrust Inc. - For authorized use only/CN=GeoTrust Primary Certification Authority - G3 */
-/* issuer :/C=US/O=GeoTrust Inc./OU=(c) 2008 GeoTrust Inc. - For authorized use only/CN=GeoTrust Primary Certification Authority - G3 */
-
-
-const unsigned char GeoTrust_Primary_Certification_Authority___G3_certificate[1026]={
-0x30,0x82,0x03,0xFE,0x30,0x82,0x02,0xE6,0xA0,0x03,0x02,0x01,0x02,0x02,0x10,0x15,
-0xAC,0x6E,0x94,0x19,0xB2,0x79,0x4B,0x41,0xF6,0x27,0xA9,0xC3,0x18,0x0F,0x1F,0x30,
-0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x0B,0x05,0x00,0x30,0x81,
-0x98,0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x55,0x53,0x31,0x16,
-0x30,0x14,0x06,0x03,0x55,0x04,0x0A,0x13,0x0D,0x47,0x65,0x6F,0x54,0x72,0x75,0x73,
-0x74,0x20,0x49,0x6E,0x63,0x2E,0x31,0x39,0x30,0x37,0x06,0x03,0x55,0x04,0x0B,0x13,
-0x30,0x28,0x63,0x29,0x20,0x32,0x30,0x30,0x38,0x20,0x47,0x65,0x6F,0x54,0x72,0x75,
-0x73,0x74,0x20,0x49,0x6E,0x63,0x2E,0x20,0x2D,0x20,0x46,0x6F,0x72,0x20,0x61,0x75,
-0x74,0x68,0x6F,0x72,0x69,0x7A,0x65,0x64,0x20,0x75,0x73,0x65,0x20,0x6F,0x6E,0x6C,
-0x79,0x31,0x36,0x30,0x34,0x06,0x03,0x55,0x04,0x03,0x13,0x2D,0x47,0x65,0x6F,0x54,
-0x72,0x75,0x73,0x74,0x20,0x50,0x72,0x69,0x6D,0x61,0x72,0x79,0x20,0x43,0x65,0x72,
-0x74,0x69,0x66,0x69,0x63,0x61,0x74,0x69,0x6F,0x6E,0x20,0x41,0x75,0x74,0x68,0x6F,
-0x72,0x69,0x74,0x79,0x20,0x2D,0x20,0x47,0x33,0x30,0x1E,0x17,0x0D,0x30,0x38,0x30,
-0x34,0x30,0x32,0x30,0x30,0x30,0x30,0x30,0x30,0x5A,0x17,0x0D,0x33,0x37,0x31,0x32,
-0x30,0x31,0x32,0x33,0x35,0x39,0x35,0x39,0x5A,0x30,0x81,0x98,0x31,0x0B,0x30,0x09,
-0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x55,0x53,0x31,0x16,0x30,0x14,0x06,0x03,0x55,
-0x04,0x0A,0x13,0x0D,0x47,0x65,0x6F,0x54,0x72,0x75,0x73,0x74,0x20,0x49,0x6E,0x63,
-0x2E,0x31,0x39,0x30,0x37,0x06,0x03,0x55,0x04,0x0B,0x13,0x30,0x28,0x63,0x29,0x20,
-0x32,0x30,0x30,0x38,0x20,0x47,0x65,0x6F,0x54,0x72,0x75,0x73,0x74,0x20,0x49,0x6E,
-0x63,0x2E,0x20,0x2D,0x20,0x46,0x6F,0x72,0x20,0x61,0x75,0x74,0x68,0x6F,0x72,0x69,
-0x7A,0x65,0x64,0x20,0x75,0x73,0x65,0x20,0x6F,0x6E,0x6C,0x79,0x31,0x36,0x30,0x34,
-0x06,0x03,0x55,0x04,0x03,0x13,0x2D,0x47,0x65,0x6F,0x54,0x72,0x75,0x73,0x74,0x20,
-0x50,0x72,0x69,0x6D,0x61,0x72,0x79,0x20,0x43,0x65,0x72,0x74,0x69,0x66,0x69,0x63,
-0x61,0x74,0x69,0x6F,0x6E,0x20,0x41,0x75,0x74,0x68,0x6F,0x72,0x69,0x74,0x79,0x20,
-0x2D,0x20,0x47,0x33,0x30,0x82,0x01,0x22,0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,
-0xF7,0x0D,0x01,0x01,0x01,0x05,0x00,0x03,0x82,0x01,0x0F,0x00,0x30,0x82,0x01,0x0A,
-0x02,0x82,0x01,0x01,0x00,0xDC,0xE2,0x5E,0x62,0x58,0x1D,0x33,0x57,0x39,0x32,0x33,
-0xFA,0xEB,0xCB,0x87,0x8C,0xA7,0xD4,0x4A,0xDD,0x06,0x88,0xEA,0x64,0x8E,0x31,0x98,
-0xA5,0x38,0x90,0x1E,0x98,0xCF,0x2E,0x63,0x2B,0xF0,0x46,0xBC,0x44,0xB2,0x89,0xA1,
-0xC0,0x28,0x0C,0x49,0x70,0x21,0x95,0x9F,0x64,0xC0,0xA6,0x93,0x12,0x02,0x65,0x26,
-0x86,0xC6,0xA5,0x89,0xF0,0xFA,0xD7,0x84,0xA0,0x70,0xAF,0x4F,0x1A,0x97,0x3F,0x06,
-0x44,0xD5,0xC9,0xEB,0x72,0x10,0x7D,0xE4,0x31,0x28,0xFB,0x1C,0x61,0xE6,0x28,0x07,
-0x44,0x73,0x92,0x22,0x69,0xA7,0x03,0x88,0x6C,0x9D,0x63,0xC8,0x52,0xDA,0x98,0x27,
-0xE7,0x08,0x4C,0x70,0x3E,0xB4,0xC9,0x12,0xC1,0xC5,0x67,0x83,0x5D,0x33,0xF3,0x03,
-0x11,0xEC,0x6A,0xD0,0x53,0xE2,0xD1,0xBA,0x36,0x60,0x94,0x80,0xBB,0x61,0x63,0x6C,
-0x5B,0x17,0x7E,0xDF,0x40,0x94,0x1E,0xAB,0x0D,0xC2,0x21,0x28,0x70,0x88,0xFF,0xD6,
-0x26,0x6C,0x6C,0x60,0x04,0x25,0x4E,0x55,0x7E,0x7D,0xEF,0xBF,0x94,0x48,0xDE,0xB7,
-0x1D,0xDD,0x70,0x8D,0x05,0x5F,0x88,0xA5,0x9B,0xF2,0xC2,0xEE,0xEA,0xD1,0x40,0x41,
-0x6D,0x62,0x38,0x1D,0x56,0x06,0xC5,0x03,0x47,0x51,0x20,0x19,0xFC,0x7B,0x10,0x0B,
-0x0E,0x62,0xAE,0x76,0x55,0xBF,0x5F,0x77,0xBE,0x3E,0x49,0x01,0x53,0x3D,0x98,0x25,
-0x03,0x76,0x24,0x5A,0x1D,0xB4,0xDB,0x89,0xEA,0x79,0xE5,0xB6,0xB3,0x3B,0x3F,0xBA,
-0x4C,0x28,0x41,0x7F,0x06,0xAC,0x6A,0x8E,0xC1,0xD0,0xF6,0x05,0x1D,0x7D,0xE6,0x42,
-0x86,0xE3,0xA5,0xD5,0x47,0x02,0x03,0x01,0x00,0x01,0xA3,0x42,0x30,0x40,0x30,0x0F,
-0x06,0x03,0x55,0x1D,0x13,0x01,0x01,0xFF,0x04,0x05,0x30,0x03,0x01,0x01,0xFF,0x30,
-0x0E,0x06,0x03,0x55,0x1D,0x0F,0x01,0x01,0xFF,0x04,0x04,0x03,0x02,0x01,0x06,0x30,
-0x1D,0x06,0x03,0x55,0x1D,0x0E,0x04,0x16,0x04,0x14,0xC4,0x79,0xCA,0x8E,0xA1,0x4E,
-0x03,0x1D,0x1C,0xDC,0x6B,0xDB,0x31,0x5B,0x94,0x3E,0x3F,0x30,0x7F,0x2D,0x30,0x0D,
-0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x0B,0x05,0x00,0x03,0x82,0x01,
-0x01,0x00,0x2D,0xC5,0x13,0xCF,0x56,0x80,0x7B,0x7A,0x78,0xBD,0x9F,0xAE,0x2C,0x99,
-0xE7,0xEF,0xDA,0xDF,0x94,0x5E,0x09,0x69,0xA7,0xE7,0x6E,0x68,0x8C,0xBD,0x72,0xBE,
-0x47,0xA9,0x0E,0x97,0x12,0xB8,0x4A,0xF1,0x64,0xD3,0x39,0xDF,0x25,0x34,0xD4,0xC1,
-0xCD,0x4E,0x81,0xF0,0x0F,0x04,0xC4,0x24,0xB3,0x34,0x96,0xC6,0xA6,0xAA,0x30,0xDF,
-0x68,0x61,0x73,0xD7,0xF9,0x8E,0x85,0x89,0xEF,0x0E,0x5E,0x95,0x28,0x4A,0x2A,0x27,
-0x8F,0x10,0x8E,0x2E,0x7C,0x86,0xC4,0x02,0x9E,0xDA,0x0C,0x77,0x65,0x0E,0x44,0x0D,
-0x92,0xFD,0xFD,0xB3,0x16,0x36,0xFA,0x11,0x0D,0x1D,0x8C,0x0E,0x07,0x89,0x6A,0x29,
-0x56,0xF7,0x72,0xF4,0xDD,0x15,0x9C,0x77,0x35,0x66,0x57,0xAB,0x13,0x53,0xD8,0x8E,
-0xC1,0x40,0xC5,0xD7,0x13,0x16,0x5A,0x72,0xC7,0xB7,0x69,0x01,0xC4,0x7A,0xB1,0x83,
-0x01,0x68,0x7D,0x8D,0x41,0xA1,0x94,0x18,0xC1,0x25,0x5C,0xFC,0xF0,0xFE,0x83,0x02,
-0x87,0x7C,0x0D,0x0D,0xCF,0x2E,0x08,0x5C,0x4A,0x40,0x0D,0x3E,0xEC,0x81,0x61,0xE6,
-0x24,0xDB,0xCA,0xE0,0x0E,0x2D,0x07,0xB2,0x3E,0x56,0xDC,0x8D,0xF5,0x41,0x85,0x07,
-0x48,0x9B,0x0C,0x0B,0xCB,0x49,0x3F,0x7D,0xEC,0xB7,0xFD,0xCB,0x8D,0x67,0x89,0x1A,
-0xAB,0xED,0xBB,0x1E,0xA3,0x00,0x08,0x08,0x17,0x2A,0x82,0x5C,0x31,0x5D,0x46,0x8A,
-0x2D,0x0F,0x86,0x9B,0x74,0xD9,0x45,0xFB,0xD4,0x40,0xB1,0x7A,0xAA,0x68,0x2D,0x86,
-0xB2,0x99,0x22,0xE1,0xC1,0x2B,0xC7,0x9C,0xF8,0xF3,0x5F,0xA8,0x82,0x12,0xEB,0x19,
-0x11,0x2D,
-};
-
-
/* subject:/C=US/O=GeoTrust Inc./CN=GeoTrust Universal CA */
/* issuer :/C=US/O=GeoTrust Inc./CN=GeoTrust Universal CA */
@@ -2289,366 +2656,349 @@ const unsigned char GeoTrust_Universal_CA_certificate[1388]={
};
-/* subject:/C=US/O=GeoTrust Inc./CN=GeoTrust Universal CA 2 */
-/* issuer :/C=US/O=GeoTrust Inc./CN=GeoTrust Universal CA 2 */
+/* subject:/C=GB/ST=Greater Manchester/L=Salford/O=COMODO CA Limited/CN=COMODO ECC Certification Authority */
+/* issuer :/C=GB/ST=Greater Manchester/L=Salford/O=COMODO CA Limited/CN=COMODO ECC Certification Authority */
-const unsigned char GeoTrust_Universal_CA_2_certificate[1392]={
-0x30,0x82,0x05,0x6C,0x30,0x82,0x03,0x54,0xA0,0x03,0x02,0x01,0x02,0x02,0x01,0x01,
-0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x05,0x05,0x00,0x30,
-0x47,0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x55,0x53,0x31,0x16,
-0x30,0x14,0x06,0x03,0x55,0x04,0x0A,0x13,0x0D,0x47,0x65,0x6F,0x54,0x72,0x75,0x73,
-0x74,0x20,0x49,0x6E,0x63,0x2E,0x31,0x20,0x30,0x1E,0x06,0x03,0x55,0x04,0x03,0x13,
-0x17,0x47,0x65,0x6F,0x54,0x72,0x75,0x73,0x74,0x20,0x55,0x6E,0x69,0x76,0x65,0x72,
-0x73,0x61,0x6C,0x20,0x43,0x41,0x20,0x32,0x30,0x1E,0x17,0x0D,0x30,0x34,0x30,0x33,
-0x30,0x34,0x30,0x35,0x30,0x30,0x30,0x30,0x5A,0x17,0x0D,0x32,0x39,0x30,0x33,0x30,
-0x34,0x30,0x35,0x30,0x30,0x30,0x30,0x5A,0x30,0x47,0x31,0x0B,0x30,0x09,0x06,0x03,
-0x55,0x04,0x06,0x13,0x02,0x55,0x53,0x31,0x16,0x30,0x14,0x06,0x03,0x55,0x04,0x0A,
-0x13,0x0D,0x47,0x65,0x6F,0x54,0x72,0x75,0x73,0x74,0x20,0x49,0x6E,0x63,0x2E,0x31,
-0x20,0x30,0x1E,0x06,0x03,0x55,0x04,0x03,0x13,0x17,0x47,0x65,0x6F,0x54,0x72,0x75,
-0x73,0x74,0x20,0x55,0x6E,0x69,0x76,0x65,0x72,0x73,0x61,0x6C,0x20,0x43,0x41,0x20,
-0x32,0x30,0x82,0x02,0x22,0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,
-0x01,0x01,0x05,0x00,0x03,0x82,0x02,0x0F,0x00,0x30,0x82,0x02,0x0A,0x02,0x82,0x02,
-0x01,0x00,0xB3,0x54,0x52,0xC1,0xC9,0x3E,0xF2,0xD9,0xDC,0xB1,0x53,0x1A,0x59,0x29,
-0xE7,0xB1,0xC3,0x45,0x28,0xE5,0xD7,0xD1,0xED,0xC5,0xC5,0x4B,0xA1,0xAA,0x74,0x7B,
-0x57,0xAF,0x4A,0x26,0xFC,0xD8,0xF5,0x5E,0xA7,0x6E,0x19,0xDB,0x74,0x0C,0x4F,0x35,
-0x5B,0x32,0x0B,0x01,0xE3,0xDB,0xEB,0x7A,0x77,0x35,0xEA,0xAA,0x5A,0xE0,0xD6,0xE8,
-0xA1,0x57,0x94,0xF0,0x90,0xA3,0x74,0x56,0x94,0x44,0x30,0x03,0x1E,0x5C,0x4E,0x2B,
-0x85,0x26,0x74,0x82,0x7A,0x0C,0x76,0xA0,0x6F,0x4D,0xCE,0x41,0x2D,0xA0,0x15,0x06,
-0x14,0x5F,0xB7,0x42,0xCD,0x7B,0x8F,0x58,0x61,0x34,0xDC,0x2A,0x08,0xF9,0x2E,0xC3,
-0x01,0xA6,0x22,0x44,0x1C,0x4C,0x07,0x82,0xE6,0x5B,0xCE,0xD0,0x4A,0x7C,0x04,0xD3,
-0x19,0x73,0x27,0xF0,0xAA,0x98,0x7F,0x2E,0xAF,0x4E,0xEB,0x87,0x1E,0x24,0x77,0x6A,
-0x5D,0xB6,0xE8,0x5B,0x45,0xBA,0xDC,0xC3,0xA1,0x05,0x6F,0x56,0x8E,0x8F,0x10,0x26,
-0xA5,0x49,0xC3,0x2E,0xD7,0x41,0x87,0x22,0xE0,0x4F,0x86,0xCA,0x60,0xB5,0xEA,0xA1,
-0x63,0xC0,0x01,0x97,0x10,0x79,0xBD,0x00,0x3C,0x12,0x6D,0x2B,0x15,0xB1,0xAC,0x4B,
-0xB1,0xEE,0x18,0xB9,0x4E,0x96,0xDC,0xDC,0x76,0xFF,0x3B,0xBE,0xCF,0x5F,0x03,0xC0,
-0xFC,0x3B,0xE8,0xBE,0x46,0x1B,0xFF,0xDA,0x40,0xC2,0x52,0xF7,0xFE,0xE3,0x3A,0xF7,
-0x6A,0x77,0x35,0xD0,0xDA,0x8D,0xEB,0x5E,0x18,0x6A,0x31,0xC7,0x1E,0xBA,0x3C,0x1B,
-0x28,0xD6,0x6B,0x54,0xC6,0xAA,0x5B,0xD7,0xA2,0x2C,0x1B,0x19,0xCC,0xA2,0x02,0xF6,
-0x9B,0x59,0xBD,0x37,0x6B,0x86,0xB5,0x6D,0x82,0xBA,0xD8,0xEA,0xC9,0x56,0xBC,0xA9,
-0x36,0x58,0xFD,0x3E,0x19,0xF3,0xED,0x0C,0x26,0xA9,0x93,0x38,0xF8,0x4F,0xC1,0x5D,
-0x22,0x06,0xD0,0x97,0xEA,0xE1,0xAD,0xC6,0x55,0xE0,0x81,0x2B,0x28,0x83,0x3A,0xFA,
-0xF4,0x7B,0x21,0x51,0x00,0xBE,0x52,0x38,0xCE,0xCD,0x66,0x79,0xA8,0xF4,0x81,0x56,
-0xE2,0xD0,0x83,0x09,0x47,0x51,0x5B,0x50,0x6A,0xCF,0xDB,0x48,0x1A,0x5D,0x3E,0xF7,
-0xCB,0xF6,0x65,0xF7,0x6C,0xF1,0x95,0xF8,0x02,0x3B,0x32,0x56,0x82,0x39,0x7A,0x5B,
-0xBD,0x2F,0x89,0x1B,0xBF,0xA1,0xB4,0xE8,0xFF,0x7F,0x8D,0x8C,0xDF,0x03,0xF1,0x60,
-0x4E,0x58,0x11,0x4C,0xEB,0xA3,0x3F,0x10,0x2B,0x83,0x9A,0x01,0x73,0xD9,0x94,0x6D,
-0x84,0x00,0x27,0x66,0xAC,0xF0,0x70,0x40,0x09,0x42,0x92,0xAD,0x4F,0x93,0x0D,0x61,
-0x09,0x51,0x24,0xD8,0x92,0xD5,0x0B,0x94,0x61,0xB2,0x87,0xB2,0xED,0xFF,0x9A,0x35,
-0xFF,0x85,0x54,0xCA,0xED,0x44,0x43,0xAC,0x1B,0x3C,0x16,0x6B,0x48,0x4A,0x0A,0x1C,
-0x40,0x88,0x1F,0x92,0xC2,0x0B,0x00,0x05,0xFF,0xF2,0xC8,0x02,0x4A,0xA4,0xAA,0xA9,
-0xCC,0x99,0x96,0x9C,0x2F,0x58,0xE0,0x7D,0xE1,0xBE,0xBB,0x07,0xDC,0x5F,0x04,0x72,
-0x5C,0x31,0x34,0xC3,0xEC,0x5F,0x2D,0xE0,0x3D,0x64,0x90,0x22,0xE6,0xD1,0xEC,0xB8,
-0x2E,0xDD,0x59,0xAE,0xD9,0xA1,0x37,0xBF,0x54,0x35,0xDC,0x73,0x32,0x4F,0x8C,0x04,
-0x1E,0x33,0xB2,0xC9,0x46,0xF1,0xD8,0x5C,0xC8,0x55,0x50,0xC9,0x68,0xBD,0xA8,0xBA,
-0x36,0x09,0x02,0x03,0x01,0x00,0x01,0xA3,0x63,0x30,0x61,0x30,0x0F,0x06,0x03,0x55,
-0x1D,0x13,0x01,0x01,0xFF,0x04,0x05,0x30,0x03,0x01,0x01,0xFF,0x30,0x1D,0x06,0x03,
-0x55,0x1D,0x0E,0x04,0x16,0x04,0x14,0x76,0xF3,0x55,0xE1,0xFA,0xA4,0x36,0xFB,0xF0,
-0x9F,0x5C,0x62,0x71,0xED,0x3C,0xF4,0x47,0x38,0x10,0x2B,0x30,0x1F,0x06,0x03,0x55,
-0x1D,0x23,0x04,0x18,0x30,0x16,0x80,0x14,0x76,0xF3,0x55,0xE1,0xFA,0xA4,0x36,0xFB,
-0xF0,0x9F,0x5C,0x62,0x71,0xED,0x3C,0xF4,0x47,0x38,0x10,0x2B,0x30,0x0E,0x06,0x03,
-0x55,0x1D,0x0F,0x01,0x01,0xFF,0x04,0x04,0x03,0x02,0x01,0x86,0x30,0x0D,0x06,0x09,
-0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x05,0x05,0x00,0x03,0x82,0x02,0x01,0x00,
-0x66,0xC1,0xC6,0x23,0xF3,0xD9,0xE0,0x2E,0x6E,0x5F,0xE8,0xCF,0xAE,0xB0,0xB0,0x25,
-0x4D,0x2B,0xF8,0x3B,0x58,0x9B,0x40,0x24,0x37,0x5A,0xCB,0xAB,0x16,0x49,0xFF,0xB3,
-0x75,0x79,0x33,0xA1,0x2F,0x6D,0x70,0x17,0x34,0x91,0xFE,0x67,0x7E,0x8F,0xEC,0x9B,
-0xE5,0x5E,0x82,0xA9,0x55,0x1F,0x2F,0xDC,0xD4,0x51,0x07,0x12,0xFE,0xAC,0x16,0x3E,
-0x2C,0x35,0xC6,0x63,0xFC,0xDC,0x10,0xEB,0x0D,0xA3,0xAA,0xD0,0x7C,0xCC,0xD1,0xD0,
-0x2F,0x51,0x2E,0xC4,0x14,0x5A,0xDE,0xE8,0x19,0xE1,0x3E,0xC6,0xCC,0xA4,0x29,0xE7,
-0x2E,0x84,0xAA,0x06,0x30,0x78,0x76,0x54,0x73,0x28,0x98,0x59,0x38,0xE0,0x00,0x0D,
-0x62,0xD3,0x42,0x7D,0x21,0x9F,0xAE,0x3D,0x3A,0x8C,0xD5,0xFA,0x77,0x0D,0x18,0x2B,
-0x16,0x0E,0x5F,0x36,0xE1,0xFC,0x2A,0xB5,0x30,0x24,0xCF,0xE0,0x63,0x0C,0x7B,0x58,
-0x1A,0xFE,0x99,0xBA,0x42,0x12,0xB1,0x91,0xF4,0x7C,0x68,0xE2,0xC8,0xE8,0xAF,0x2C,
-0xEA,0xC9,0x7E,0xAE,0xBB,0x2A,0x3D,0x0D,0x15,0xDC,0x34,0x95,0xB6,0x18,0x74,0xA8,
-0x6A,0x0F,0xC7,0xB4,0xF4,0x13,0xC4,0xE4,0x5B,0xED,0x0A,0xD2,0xA4,0x97,0x4C,0x2A,
-0xED,0x2F,0x6C,0x12,0x89,0x3D,0xF1,0x27,0x70,0xAA,0x6A,0x03,0x52,0x21,0x9F,0x40,
-0xA8,0x67,0x50,0xF2,0xF3,0x5A,0x1F,0xDF,0xDF,0x23,0xF6,0xDC,0x78,0x4E,0xE6,0x98,
-0x4F,0x55,0x3A,0x53,0xE3,0xEF,0xF2,0xF4,0x9F,0xC7,0x7C,0xD8,0x58,0xAF,0x29,0x22,
-0x97,0xB8,0xE0,0xBD,0x91,0x2E,0xB0,0x76,0xEC,0x57,0x11,0xCF,0xEF,0x29,0x44,0xF3,
-0xE9,0x85,0x7A,0x60,0x63,0xE4,0x5D,0x33,0x89,0x17,0xD9,0x31,0xAA,0xDA,0xD6,0xF3,
-0x18,0x35,0x72,0xCF,0x87,0x2B,0x2F,0x63,0x23,0x84,0x5D,0x84,0x8C,0x3F,0x57,0xA0,
-0x88,0xFC,0x99,0x91,0x28,0x26,0x69,0x99,0xD4,0x8F,0x97,0x44,0xBE,0x8E,0xD5,0x48,
-0xB1,0xA4,0x28,0x29,0xF1,0x15,0xB4,0xE1,0xE5,0x9E,0xDD,0xF8,0x8F,0xA6,0x6F,0x26,
-0xD7,0x09,0x3C,0x3A,0x1C,0x11,0x0E,0xA6,0x6C,0x37,0xF7,0xAD,0x44,0x87,0x2C,0x28,
-0xC7,0xD8,0x74,0x82,0xB3,0xD0,0x6F,0x4A,0x57,0xBB,0x35,0x29,0x27,0xA0,0x8B,0xE8,
-0x21,0xA7,0x87,0x64,0x36,0x5D,0xCC,0xD8,0x16,0xAC,0xC7,0xB2,0x27,0x40,0x92,0x55,
-0x38,0x28,0x8D,0x51,0x6E,0xDD,0x14,0x67,0x53,0x6C,0x71,0x5C,0x26,0x84,0x4D,0x75,
-0x5A,0xB6,0x7E,0x60,0x56,0xA9,0x4D,0xAD,0xFB,0x9B,0x1E,0x97,0xF3,0x0D,0xD9,0xD2,
-0x97,0x54,0x77,0xDA,0x3D,0x12,0xB7,0xE0,0x1E,0xEF,0x08,0x06,0xAC,0xF9,0x85,0x87,
-0xE9,0xA2,0xDC,0xAF,0x7E,0x18,0x12,0x83,0xFD,0x56,0x17,0x41,0x2E,0xD5,0x29,0x82,
-0x7D,0x99,0xF4,0x31,0xF6,0x71,0xA9,0xCF,0x2C,0x01,0x27,0xA5,0x05,0xB9,0xAA,0xB2,
-0x48,0x4E,0x2A,0xEF,0x9F,0x93,0x52,0x51,0x95,0x3C,0x52,0x73,0x8E,0x56,0x4C,0x17,
-0x40,0xC0,0x09,0x28,0xE4,0x8B,0x6A,0x48,0x53,0xDB,0xEC,0xCD,0x55,0x55,0xF1,0xC6,
-0xF8,0xE9,0xA2,0x2C,0x4C,0xA6,0xD1,0x26,0x5F,0x7E,0xAF,0x5A,0x4C,0xDA,0x1F,0xA6,
-0xF2,0x1C,0x2C,0x7E,0xAE,0x02,0x16,0xD2,0x56,0xD0,0x2F,0x57,0x53,0x47,0xE8,0x92,
+const unsigned char COMODO_ECC_Certification_Authority_certificate[653]={
+0x30,0x82,0x02,0x89,0x30,0x82,0x02,0x0F,0xA0,0x03,0x02,0x01,0x02,0x02,0x10,0x1F,
+0x47,0xAF,0xAA,0x62,0x00,0x70,0x50,0x54,0x4C,0x01,0x9E,0x9B,0x63,0x99,0x2A,0x30,
+0x0A,0x06,0x08,0x2A,0x86,0x48,0xCE,0x3D,0x04,0x03,0x03,0x30,0x81,0x85,0x31,0x0B,
+0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x47,0x42,0x31,0x1B,0x30,0x19,0x06,
+0x03,0x55,0x04,0x08,0x13,0x12,0x47,0x72,0x65,0x61,0x74,0x65,0x72,0x20,0x4D,0x61,
+0x6E,0x63,0x68,0x65,0x73,0x74,0x65,0x72,0x31,0x10,0x30,0x0E,0x06,0x03,0x55,0x04,
+0x07,0x13,0x07,0x53,0x61,0x6C,0x66,0x6F,0x72,0x64,0x31,0x1A,0x30,0x18,0x06,0x03,
+0x55,0x04,0x0A,0x13,0x11,0x43,0x4F,0x4D,0x4F,0x44,0x4F,0x20,0x43,0x41,0x20,0x4C,
+0x69,0x6D,0x69,0x74,0x65,0x64,0x31,0x2B,0x30,0x29,0x06,0x03,0x55,0x04,0x03,0x13,
+0x22,0x43,0x4F,0x4D,0x4F,0x44,0x4F,0x20,0x45,0x43,0x43,0x20,0x43,0x65,0x72,0x74,
+0x69,0x66,0x69,0x63,0x61,0x74,0x69,0x6F,0x6E,0x20,0x41,0x75,0x74,0x68,0x6F,0x72,
+0x69,0x74,0x79,0x30,0x1E,0x17,0x0D,0x30,0x38,0x30,0x33,0x30,0x36,0x30,0x30,0x30,
+0x30,0x30,0x30,0x5A,0x17,0x0D,0x33,0x38,0x30,0x31,0x31,0x38,0x32,0x33,0x35,0x39,
+0x35,0x39,0x5A,0x30,0x81,0x85,0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,
+0x02,0x47,0x42,0x31,0x1B,0x30,0x19,0x06,0x03,0x55,0x04,0x08,0x13,0x12,0x47,0x72,
+0x65,0x61,0x74,0x65,0x72,0x20,0x4D,0x61,0x6E,0x63,0x68,0x65,0x73,0x74,0x65,0x72,
+0x31,0x10,0x30,0x0E,0x06,0x03,0x55,0x04,0x07,0x13,0x07,0x53,0x61,0x6C,0x66,0x6F,
+0x72,0x64,0x31,0x1A,0x30,0x18,0x06,0x03,0x55,0x04,0x0A,0x13,0x11,0x43,0x4F,0x4D,
+0x4F,0x44,0x4F,0x20,0x43,0x41,0x20,0x4C,0x69,0x6D,0x69,0x74,0x65,0x64,0x31,0x2B,
+0x30,0x29,0x06,0x03,0x55,0x04,0x03,0x13,0x22,0x43,0x4F,0x4D,0x4F,0x44,0x4F,0x20,
+0x45,0x43,0x43,0x20,0x43,0x65,0x72,0x74,0x69,0x66,0x69,0x63,0x61,0x74,0x69,0x6F,
+0x6E,0x20,0x41,0x75,0x74,0x68,0x6F,0x72,0x69,0x74,0x79,0x30,0x76,0x30,0x10,0x06,
+0x07,0x2A,0x86,0x48,0xCE,0x3D,0x02,0x01,0x06,0x05,0x2B,0x81,0x04,0x00,0x22,0x03,
+0x62,0x00,0x04,0x03,0x47,0x7B,0x2F,0x75,0xC9,0x82,0x15,0x85,0xFB,0x75,0xE4,0x91,
+0x16,0xD4,0xAB,0x62,0x99,0xF5,0x3E,0x52,0x0B,0x06,0xCE,0x41,0x00,0x7F,0x97,0xE1,
+0x0A,0x24,0x3C,0x1D,0x01,0x04,0xEE,0x3D,0xD2,0x8D,0x09,0x97,0x0C,0xE0,0x75,0xE4,
+0xFA,0xFB,0x77,0x8A,0x2A,0xF5,0x03,0x60,0x4B,0x36,0x8B,0x16,0x23,0x16,0xAD,0x09,
+0x71,0xF4,0x4A,0xF4,0x28,0x50,0xB4,0xFE,0x88,0x1C,0x6E,0x3F,0x6C,0x2F,0x2F,0x09,
+0x59,0x5B,0xA5,0x5B,0x0B,0x33,0x99,0xE2,0xC3,0x3D,0x89,0xF9,0x6A,0x2C,0xEF,0xB2,
+0xD3,0x06,0xE9,0xA3,0x42,0x30,0x40,0x30,0x1D,0x06,0x03,0x55,0x1D,0x0E,0x04,0x16,
+0x04,0x14,0x75,0x71,0xA7,0x19,0x48,0x19,0xBC,0x9D,0x9D,0xEA,0x41,0x47,0xDF,0x94,
+0xC4,0x48,0x77,0x99,0xD3,0x79,0x30,0x0E,0x06,0x03,0x55,0x1D,0x0F,0x01,0x01,0xFF,
+0x04,0x04,0x03,0x02,0x01,0x06,0x30,0x0F,0x06,0x03,0x55,0x1D,0x13,0x01,0x01,0xFF,
+0x04,0x05,0x30,0x03,0x01,0x01,0xFF,0x30,0x0A,0x06,0x08,0x2A,0x86,0x48,0xCE,0x3D,
+0x04,0x03,0x03,0x03,0x68,0x00,0x30,0x65,0x02,0x31,0x00,0xEF,0x03,0x5B,0x7A,0xAC,
+0xB7,0x78,0x0A,0x72,0xB7,0x88,0xDF,0xFF,0xB5,0x46,0x14,0x09,0x0A,0xFA,0xA0,0xE6,
+0x7D,0x08,0xC6,0x1A,0x87,0xBD,0x18,0xA8,0x73,0xBD,0x26,0xCA,0x60,0x0C,0x9D,0xCE,
+0x99,0x9F,0xCF,0x5C,0x0F,0x30,0xE1,0xBE,0x14,0x31,0xEA,0x02,0x30,0x14,0xF4,0x93,
+0x3C,0x49,0xA7,0x33,0x7A,0x90,0x46,0x47,0xB3,0x63,0x7D,0x13,0x9B,0x4E,0xB7,0x6F,
+0x18,0x37,0x80,0x53,0xFE,0xDD,0x20,0xE0,0x35,0x9A,0x36,0xD1,0xC7,0x01,0xB9,0xE6,
+0xDC,0xDD,0xF3,0xFF,0x1D,0x2C,0x3A,0x16,0x57,0xD9,0x92,0x39,0xD6,
};
-/* subject:/C=BE/O=GlobalSign nv-sa/OU=Root CA/CN=GlobalSign Root CA */
-/* issuer :/C=BE/O=GlobalSign nv-sa/OU=Root CA/CN=GlobalSign Root CA */
+/* subject:/C=US/O=Entrust, Inc./OU=See www.entrust.net/legal-terms/OU=(c) 2009 Entrust, Inc. - for authorized use only/CN=Entrust Root Certification Authority - G2 */
+/* issuer :/C=US/O=Entrust, Inc./OU=See www.entrust.net/legal-terms/OU=(c) 2009 Entrust, Inc. - for authorized use only/CN=Entrust Root Certification Authority - G2 */
-const unsigned char GlobalSign_Root_CA_certificate[889]={
-0x30,0x82,0x03,0x75,0x30,0x82,0x02,0x5D,0xA0,0x03,0x02,0x01,0x02,0x02,0x0B,0x04,
-0x00,0x00,0x00,0x00,0x01,0x15,0x4B,0x5A,0xC3,0x94,0x30,0x0D,0x06,0x09,0x2A,0x86,
-0x48,0x86,0xF7,0x0D,0x01,0x01,0x05,0x05,0x00,0x30,0x57,0x31,0x0B,0x30,0x09,0x06,
-0x03,0x55,0x04,0x06,0x13,0x02,0x42,0x45,0x31,0x19,0x30,0x17,0x06,0x03,0x55,0x04,
-0x0A,0x13,0x10,0x47,0x6C,0x6F,0x62,0x61,0x6C,0x53,0x69,0x67,0x6E,0x20,0x6E,0x76,
-0x2D,0x73,0x61,0x31,0x10,0x30,0x0E,0x06,0x03,0x55,0x04,0x0B,0x13,0x07,0x52,0x6F,
-0x6F,0x74,0x20,0x43,0x41,0x31,0x1B,0x30,0x19,0x06,0x03,0x55,0x04,0x03,0x13,0x12,
-0x47,0x6C,0x6F,0x62,0x61,0x6C,0x53,0x69,0x67,0x6E,0x20,0x52,0x6F,0x6F,0x74,0x20,
-0x43,0x41,0x30,0x1E,0x17,0x0D,0x39,0x38,0x30,0x39,0x30,0x31,0x31,0x32,0x30,0x30,
-0x30,0x30,0x5A,0x17,0x0D,0x32,0x38,0x30,0x31,0x32,0x38,0x31,0x32,0x30,0x30,0x30,
-0x30,0x5A,0x30,0x57,0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x42,
-0x45,0x31,0x19,0x30,0x17,0x06,0x03,0x55,0x04,0x0A,0x13,0x10,0x47,0x6C,0x6F,0x62,
-0x61,0x6C,0x53,0x69,0x67,0x6E,0x20,0x6E,0x76,0x2D,0x73,0x61,0x31,0x10,0x30,0x0E,
-0x06,0x03,0x55,0x04,0x0B,0x13,0x07,0x52,0x6F,0x6F,0x74,0x20,0x43,0x41,0x31,0x1B,
-0x30,0x19,0x06,0x03,0x55,0x04,0x03,0x13,0x12,0x47,0x6C,0x6F,0x62,0x61,0x6C,0x53,
-0x69,0x67,0x6E,0x20,0x52,0x6F,0x6F,0x74,0x20,0x43,0x41,0x30,0x82,0x01,0x22,0x30,
-0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x01,0x05,0x00,0x03,0x82,
-0x01,0x0F,0x00,0x30,0x82,0x01,0x0A,0x02,0x82,0x01,0x01,0x00,0xDA,0x0E,0xE6,0x99,
-0x8D,0xCE,0xA3,0xE3,0x4F,0x8A,0x7E,0xFB,0xF1,0x8B,0x83,0x25,0x6B,0xEA,0x48,0x1F,
-0xF1,0x2A,0xB0,0xB9,0x95,0x11,0x04,0xBD,0xF0,0x63,0xD1,0xE2,0x67,0x66,0xCF,0x1C,
-0xDD,0xCF,0x1B,0x48,0x2B,0xEE,0x8D,0x89,0x8E,0x9A,0xAF,0x29,0x80,0x65,0xAB,0xE9,
-0xC7,0x2D,0x12,0xCB,0xAB,0x1C,0x4C,0x70,0x07,0xA1,0x3D,0x0A,0x30,0xCD,0x15,0x8D,
-0x4F,0xF8,0xDD,0xD4,0x8C,0x50,0x15,0x1C,0xEF,0x50,0xEE,0xC4,0x2E,0xF7,0xFC,0xE9,
-0x52,0xF2,0x91,0x7D,0xE0,0x6D,0xD5,0x35,0x30,0x8E,0x5E,0x43,0x73,0xF2,0x41,0xE9,
-0xD5,0x6A,0xE3,0xB2,0x89,0x3A,0x56,0x39,0x38,0x6F,0x06,0x3C,0x88,0x69,0x5B,0x2A,
-0x4D,0xC5,0xA7,0x54,0xB8,0x6C,0x89,0xCC,0x9B,0xF9,0x3C,0xCA,0xE5,0xFD,0x89,0xF5,
-0x12,0x3C,0x92,0x78,0x96,0xD6,0xDC,0x74,0x6E,0x93,0x44,0x61,0xD1,0x8D,0xC7,0x46,
-0xB2,0x75,0x0E,0x86,0xE8,0x19,0x8A,0xD5,0x6D,0x6C,0xD5,0x78,0x16,0x95,0xA2,0xE9,
-0xC8,0x0A,0x38,0xEB,0xF2,0x24,0x13,0x4F,0x73,0x54,0x93,0x13,0x85,0x3A,0x1B,0xBC,
-0x1E,0x34,0xB5,0x8B,0x05,0x8C,0xB9,0x77,0x8B,0xB1,0xDB,0x1F,0x20,0x91,0xAB,0x09,
-0x53,0x6E,0x90,0xCE,0x7B,0x37,0x74,0xB9,0x70,0x47,0x91,0x22,0x51,0x63,0x16,0x79,
-0xAE,0xB1,0xAE,0x41,0x26,0x08,0xC8,0x19,0x2B,0xD1,0x46,0xAA,0x48,0xD6,0x64,0x2A,
-0xD7,0x83,0x34,0xFF,0x2C,0x2A,0xC1,0x6C,0x19,0x43,0x4A,0x07,0x85,0xE7,0xD3,0x7C,
-0xF6,0x21,0x68,0xEF,0xEA,0xF2,0x52,0x9F,0x7F,0x93,0x90,0xCF,0x02,0x03,0x01,0x00,
-0x01,0xA3,0x42,0x30,0x40,0x30,0x0E,0x06,0x03,0x55,0x1D,0x0F,0x01,0x01,0xFF,0x04,
-0x04,0x03,0x02,0x01,0x06,0x30,0x0F,0x06,0x03,0x55,0x1D,0x13,0x01,0x01,0xFF,0x04,
-0x05,0x30,0x03,0x01,0x01,0xFF,0x30,0x1D,0x06,0x03,0x55,0x1D,0x0E,0x04,0x16,0x04,
-0x14,0x60,0x7B,0x66,0x1A,0x45,0x0D,0x97,0xCA,0x89,0x50,0x2F,0x7D,0x04,0xCD,0x34,
-0xA8,0xFF,0xFC,0xFD,0x4B,0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,
-0x01,0x05,0x05,0x00,0x03,0x82,0x01,0x01,0x00,0xD6,0x73,0xE7,0x7C,0x4F,0x76,0xD0,
-0x8D,0xBF,0xEC,0xBA,0xA2,0xBE,0x34,0xC5,0x28,0x32,0xB5,0x7C,0xFC,0x6C,0x9C,0x2C,
-0x2B,0xBD,0x09,0x9E,0x53,0xBF,0x6B,0x5E,0xAA,0x11,0x48,0xB6,0xE5,0x08,0xA3,0xB3,
-0xCA,0x3D,0x61,0x4D,0xD3,0x46,0x09,0xB3,0x3E,0xC3,0xA0,0xE3,0x63,0x55,0x1B,0xF2,
-0xBA,0xEF,0xAD,0x39,0xE1,0x43,0xB9,0x38,0xA3,0xE6,0x2F,0x8A,0x26,0x3B,0xEF,0xA0,
-0x50,0x56,0xF9,0xC6,0x0A,0xFD,0x38,0xCD,0xC4,0x0B,0x70,0x51,0x94,0x97,0x98,0x04,
-0xDF,0xC3,0x5F,0x94,0xD5,0x15,0xC9,0x14,0x41,0x9C,0xC4,0x5D,0x75,0x64,0x15,0x0D,
-0xFF,0x55,0x30,0xEC,0x86,0x8F,0xFF,0x0D,0xEF,0x2C,0xB9,0x63,0x46,0xF6,0xAA,0xFC,
-0xDF,0xBC,0x69,0xFD,0x2E,0x12,0x48,0x64,0x9A,0xE0,0x95,0xF0,0xA6,0xEF,0x29,0x8F,
-0x01,0xB1,0x15,0xB5,0x0C,0x1D,0xA5,0xFE,0x69,0x2C,0x69,0x24,0x78,0x1E,0xB3,0xA7,
-0x1C,0x71,0x62,0xEE,0xCA,0xC8,0x97,0xAC,0x17,0x5D,0x8A,0xC2,0xF8,0x47,0x86,0x6E,
-0x2A,0xC4,0x56,0x31,0x95,0xD0,0x67,0x89,0x85,0x2B,0xF9,0x6C,0xA6,0x5D,0x46,0x9D,
-0x0C,0xAA,0x82,0xE4,0x99,0x51,0xDD,0x70,0xB7,0xDB,0x56,0x3D,0x61,0xE4,0x6A,0xE1,
-0x5C,0xD6,0xF6,0xFE,0x3D,0xDE,0x41,0xCC,0x07,0xAE,0x63,0x52,0xBF,0x53,0x53,0xF4,
-0x2B,0xE9,0xC7,0xFD,0xB6,0xF7,0x82,0x5F,0x85,0xD2,0x41,0x18,0xDB,0x81,0xB3,0x04,
-0x1C,0xC5,0x1F,0xA4,0x80,0x6F,0x15,0x20,0xC9,0xDE,0x0C,0x88,0x0A,0x1D,0xD6,0x66,
-0x55,0xE2,0xFC,0x48,0xC9,0x29,0x26,0x69,0xE0,
+const unsigned char Entrust_Root_Certification_Authority___G2_certificate[1090]={
+0x30,0x82,0x04,0x3E,0x30,0x82,0x03,0x26,0xA0,0x03,0x02,0x01,0x02,0x02,0x04,0x4A,
+0x53,0x8C,0x28,0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x0B,
+0x05,0x00,0x30,0x81,0xBE,0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02,
+0x55,0x53,0x31,0x16,0x30,0x14,0x06,0x03,0x55,0x04,0x0A,0x13,0x0D,0x45,0x6E,0x74,
+0x72,0x75,0x73,0x74,0x2C,0x20,0x49,0x6E,0x63,0x2E,0x31,0x28,0x30,0x26,0x06,0x03,
+0x55,0x04,0x0B,0x13,0x1F,0x53,0x65,0x65,0x20,0x77,0x77,0x77,0x2E,0x65,0x6E,0x74,
+0x72,0x75,0x73,0x74,0x2E,0x6E,0x65,0x74,0x2F,0x6C,0x65,0x67,0x61,0x6C,0x2D,0x74,
+0x65,0x72,0x6D,0x73,0x31,0x39,0x30,0x37,0x06,0x03,0x55,0x04,0x0B,0x13,0x30,0x28,
+0x63,0x29,0x20,0x32,0x30,0x30,0x39,0x20,0x45,0x6E,0x74,0x72,0x75,0x73,0x74,0x2C,
+0x20,0x49,0x6E,0x63,0x2E,0x20,0x2D,0x20,0x66,0x6F,0x72,0x20,0x61,0x75,0x74,0x68,
+0x6F,0x72,0x69,0x7A,0x65,0x64,0x20,0x75,0x73,0x65,0x20,0x6F,0x6E,0x6C,0x79,0x31,
+0x32,0x30,0x30,0x06,0x03,0x55,0x04,0x03,0x13,0x29,0x45,0x6E,0x74,0x72,0x75,0x73,
+0x74,0x20,0x52,0x6F,0x6F,0x74,0x20,0x43,0x65,0x72,0x74,0x69,0x66,0x69,0x63,0x61,
+0x74,0x69,0x6F,0x6E,0x20,0x41,0x75,0x74,0x68,0x6F,0x72,0x69,0x74,0x79,0x20,0x2D,
+0x20,0x47,0x32,0x30,0x1E,0x17,0x0D,0x30,0x39,0x30,0x37,0x30,0x37,0x31,0x37,0x32,
+0x35,0x35,0x34,0x5A,0x17,0x0D,0x33,0x30,0x31,0x32,0x30,0x37,0x31,0x37,0x35,0x35,
+0x35,0x34,0x5A,0x30,0x81,0xBE,0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,
+0x02,0x55,0x53,0x31,0x16,0x30,0x14,0x06,0x03,0x55,0x04,0x0A,0x13,0x0D,0x45,0x6E,
+0x74,0x72,0x75,0x73,0x74,0x2C,0x20,0x49,0x6E,0x63,0x2E,0x31,0x28,0x30,0x26,0x06,
+0x03,0x55,0x04,0x0B,0x13,0x1F,0x53,0x65,0x65,0x20,0x77,0x77,0x77,0x2E,0x65,0x6E,
+0x74,0x72,0x75,0x73,0x74,0x2E,0x6E,0x65,0x74,0x2F,0x6C,0x65,0x67,0x61,0x6C,0x2D,
+0x74,0x65,0x72,0x6D,0x73,0x31,0x39,0x30,0x37,0x06,0x03,0x55,0x04,0x0B,0x13,0x30,
+0x28,0x63,0x29,0x20,0x32,0x30,0x30,0x39,0x20,0x45,0x6E,0x74,0x72,0x75,0x73,0x74,
+0x2C,0x20,0x49,0x6E,0x63,0x2E,0x20,0x2D,0x20,0x66,0x6F,0x72,0x20,0x61,0x75,0x74,
+0x68,0x6F,0x72,0x69,0x7A,0x65,0x64,0x20,0x75,0x73,0x65,0x20,0x6F,0x6E,0x6C,0x79,
+0x31,0x32,0x30,0x30,0x06,0x03,0x55,0x04,0x03,0x13,0x29,0x45,0x6E,0x74,0x72,0x75,
+0x73,0x74,0x20,0x52,0x6F,0x6F,0x74,0x20,0x43,0x65,0x72,0x74,0x69,0x66,0x69,0x63,
+0x61,0x74,0x69,0x6F,0x6E,0x20,0x41,0x75,0x74,0x68,0x6F,0x72,0x69,0x74,0x79,0x20,
+0x2D,0x20,0x47,0x32,0x30,0x82,0x01,0x22,0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,
+0xF7,0x0D,0x01,0x01,0x01,0x05,0x00,0x03,0x82,0x01,0x0F,0x00,0x30,0x82,0x01,0x0A,
+0x02,0x82,0x01,0x01,0x00,0xBA,0x84,0xB6,0x72,0xDB,0x9E,0x0C,0x6B,0xE2,0x99,0xE9,
+0x30,0x01,0xA7,0x76,0xEA,0x32,0xB8,0x95,0x41,0x1A,0xC9,0xDA,0x61,0x4E,0x58,0x72,
+0xCF,0xFE,0xF6,0x82,0x79,0xBF,0x73,0x61,0x06,0x0A,0xA5,0x27,0xD8,0xB3,0x5F,0xD3,
+0x45,0x4E,0x1C,0x72,0xD6,0x4E,0x32,0xF2,0x72,0x8A,0x0F,0xF7,0x83,0x19,0xD0,0x6A,
+0x80,0x80,0x00,0x45,0x1E,0xB0,0xC7,0xE7,0x9A,0xBF,0x12,0x57,0x27,0x1C,0xA3,0x68,
+0x2F,0x0A,0x87,0xBD,0x6A,0x6B,0x0E,0x5E,0x65,0xF3,0x1C,0x77,0xD5,0xD4,0x85,0x8D,
+0x70,0x21,0xB4,0xB3,0x32,0xE7,0x8B,0xA2,0xD5,0x86,0x39,0x02,0xB1,0xB8,0xD2,0x47,
+0xCE,0xE4,0xC9,0x49,0xC4,0x3B,0xA7,0xDE,0xFB,0x54,0x7D,0x57,0xBE,0xF0,0xE8,0x6E,
+0xC2,0x79,0xB2,0x3A,0x0B,0x55,0xE2,0x50,0x98,0x16,0x32,0x13,0x5C,0x2F,0x78,0x56,
+0xC1,0xC2,0x94,0xB3,0xF2,0x5A,0xE4,0x27,0x9A,0x9F,0x24,0xD7,0xC6,0xEC,0xD0,0x9B,
+0x25,0x82,0xE3,0xCC,0xC2,0xC4,0x45,0xC5,0x8C,0x97,0x7A,0x06,0x6B,0x2A,0x11,0x9F,
+0xA9,0x0A,0x6E,0x48,0x3B,0x6F,0xDB,0xD4,0x11,0x19,0x42,0xF7,0x8F,0x07,0xBF,0xF5,
+0x53,0x5F,0x9C,0x3E,0xF4,0x17,0x2C,0xE6,0x69,0xAC,0x4E,0x32,0x4C,0x62,0x77,0xEA,
+0xB7,0xE8,0xE5,0xBB,0x34,0xBC,0x19,0x8B,0xAE,0x9C,0x51,0xE7,0xB7,0x7E,0xB5,0x53,
+0xB1,0x33,0x22,0xE5,0x6D,0xCF,0x70,0x3C,0x1A,0xFA,0xE2,0x9B,0x67,0xB6,0x83,0xF4,
+0x8D,0xA5,0xAF,0x62,0x4C,0x4D,0xE0,0x58,0xAC,0x64,0x34,0x12,0x03,0xF8,0xB6,0x8D,
+0x94,0x63,0x24,0xA4,0x71,0x02,0x03,0x01,0x00,0x01,0xA3,0x42,0x30,0x40,0x30,0x0E,
+0x06,0x03,0x55,0x1D,0x0F,0x01,0x01,0xFF,0x04,0x04,0x03,0x02,0x01,0x06,0x30,0x0F,
+0x06,0x03,0x55,0x1D,0x13,0x01,0x01,0xFF,0x04,0x05,0x30,0x03,0x01,0x01,0xFF,0x30,
+0x1D,0x06,0x03,0x55,0x1D,0x0E,0x04,0x16,0x04,0x14,0x6A,0x72,0x26,0x7A,0xD0,0x1E,
+0xEF,0x7D,0xE7,0x3B,0x69,0x51,0xD4,0x6C,0x8D,0x9F,0x90,0x12,0x66,0xAB,0x30,0x0D,
+0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x0B,0x05,0x00,0x03,0x82,0x01,
+0x01,0x00,0x79,0x9F,0x1D,0x96,0xC6,0xB6,0x79,0x3F,0x22,0x8D,0x87,0xD3,0x87,0x03,
+0x04,0x60,0x6A,0x6B,0x9A,0x2E,0x59,0x89,0x73,0x11,0xAC,0x43,0xD1,0xF5,0x13,0xFF,
+0x8D,0x39,0x2B,0xC0,0xF2,0xBD,0x4F,0x70,0x8C,0xA9,0x2F,0xEA,0x17,0xC4,0x0B,0x54,
+0x9E,0xD4,0x1B,0x96,0x98,0x33,0x3C,0xA8,0xAD,0x62,0xA2,0x00,0x76,0xAB,0x59,0x69,
+0x6E,0x06,0x1D,0x7E,0xC4,0xB9,0x44,0x8D,0x98,0xAF,0x12,0xD4,0x61,0xDB,0x0A,0x19,
+0x46,0x47,0xF3,0xEB,0xF7,0x63,0xC1,0x40,0x05,0x40,0xA5,0xD2,0xB7,0xF4,0xB5,0x9A,
+0x36,0xBF,0xA9,0x88,0x76,0x88,0x04,0x55,0x04,0x2B,0x9C,0x87,0x7F,0x1A,0x37,0x3C,
+0x7E,0x2D,0xA5,0x1A,0xD8,0xD4,0x89,0x5E,0xCA,0xBD,0xAC,0x3D,0x6C,0xD8,0x6D,0xAF,
+0xD5,0xF3,0x76,0x0F,0xCD,0x3B,0x88,0x38,0x22,0x9D,0x6C,0x93,0x9A,0xC4,0x3D,0xBF,
+0x82,0x1B,0x65,0x3F,0xA6,0x0F,0x5D,0xAA,0xFC,0xE5,0xB2,0x15,0xCA,0xB5,0xAD,0xC6,
+0xBC,0x3D,0xD0,0x84,0xE8,0xEA,0x06,0x72,0xB0,0x4D,0x39,0x32,0x78,0xBF,0x3E,0x11,
+0x9C,0x0B,0xA4,0x9D,0x9A,0x21,0xF3,0xF0,0x9B,0x0B,0x30,0x78,0xDB,0xC1,0xDC,0x87,
+0x43,0xFE,0xBC,0x63,0x9A,0xCA,0xC5,0xC2,0x1C,0xC9,0xC7,0x8D,0xFF,0x3B,0x12,0x58,
+0x08,0xE6,0xB6,0x3D,0xEC,0x7A,0x2C,0x4E,0xFB,0x83,0x96,0xCE,0x0C,0x3C,0x69,0x87,
+0x54,0x73,0xA4,0x73,0xC2,0x93,0xFF,0x51,0x10,0xAC,0x15,0x54,0x01,0xD8,0xFC,0x05,
+0xB1,0x89,0xA1,0x7F,0x74,0x83,0x9A,0x49,0xD7,0xDC,0x4E,0x7B,0x8A,0x48,0x6F,0x8B,
+0x45,0xF6,
};
-/* subject:/OU=GlobalSign Root CA - R2/O=GlobalSign/CN=GlobalSign */
-/* issuer :/OU=GlobalSign Root CA - R2/O=GlobalSign/CN=GlobalSign */
+/* subject:/C=US/O=DigiCert Inc/OU=www.digicert.com/CN=DigiCert Assured ID Root G2 */
+/* issuer :/C=US/O=DigiCert Inc/OU=www.digicert.com/CN=DigiCert Assured ID Root G2 */
-const unsigned char GlobalSign_Root_CA___R2_certificate[958]={
-0x30,0x82,0x03,0xBA,0x30,0x82,0x02,0xA2,0xA0,0x03,0x02,0x01,0x02,0x02,0x0B,0x04,
-0x00,0x00,0x00,0x00,0x01,0x0F,0x86,0x26,0xE6,0x0D,0x30,0x0D,0x06,0x09,0x2A,0x86,
-0x48,0x86,0xF7,0x0D,0x01,0x01,0x05,0x05,0x00,0x30,0x4C,0x31,0x20,0x30,0x1E,0x06,
-0x03,0x55,0x04,0x0B,0x13,0x17,0x47,0x6C,0x6F,0x62,0x61,0x6C,0x53,0x69,0x67,0x6E,
-0x20,0x52,0x6F,0x6F,0x74,0x20,0x43,0x41,0x20,0x2D,0x20,0x52,0x32,0x31,0x13,0x30,
-0x11,0x06,0x03,0x55,0x04,0x0A,0x13,0x0A,0x47,0x6C,0x6F,0x62,0x61,0x6C,0x53,0x69,
-0x67,0x6E,0x31,0x13,0x30,0x11,0x06,0x03,0x55,0x04,0x03,0x13,0x0A,0x47,0x6C,0x6F,
-0x62,0x61,0x6C,0x53,0x69,0x67,0x6E,0x30,0x1E,0x17,0x0D,0x30,0x36,0x31,0x32,0x31,
-0x35,0x30,0x38,0x30,0x30,0x30,0x30,0x5A,0x17,0x0D,0x32,0x31,0x31,0x32,0x31,0x35,
-0x30,0x38,0x30,0x30,0x30,0x30,0x5A,0x30,0x4C,0x31,0x20,0x30,0x1E,0x06,0x03,0x55,
-0x04,0x0B,0x13,0x17,0x47,0x6C,0x6F,0x62,0x61,0x6C,0x53,0x69,0x67,0x6E,0x20,0x52,
-0x6F,0x6F,0x74,0x20,0x43,0x41,0x20,0x2D,0x20,0x52,0x32,0x31,0x13,0x30,0x11,0x06,
-0x03,0x55,0x04,0x0A,0x13,0x0A,0x47,0x6C,0x6F,0x62,0x61,0x6C,0x53,0x69,0x67,0x6E,
-0x31,0x13,0x30,0x11,0x06,0x03,0x55,0x04,0x03,0x13,0x0A,0x47,0x6C,0x6F,0x62,0x61,
-0x6C,0x53,0x69,0x67,0x6E,0x30,0x82,0x01,0x22,0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,
-0x86,0xF7,0x0D,0x01,0x01,0x01,0x05,0x00,0x03,0x82,0x01,0x0F,0x00,0x30,0x82,0x01,
-0x0A,0x02,0x82,0x01,0x01,0x00,0xA6,0xCF,0x24,0x0E,0xBE,0x2E,0x6F,0x28,0x99,0x45,
-0x42,0xC4,0xAB,0x3E,0x21,0x54,0x9B,0x0B,0xD3,0x7F,0x84,0x70,0xFA,0x12,0xB3,0xCB,
-0xBF,0x87,0x5F,0xC6,0x7F,0x86,0xD3,0xB2,0x30,0x5C,0xD6,0xFD,0xAD,0xF1,0x7B,0xDC,
-0xE5,0xF8,0x60,0x96,0x09,0x92,0x10,0xF5,0xD0,0x53,0xDE,0xFB,0x7B,0x7E,0x73,0x88,
-0xAC,0x52,0x88,0x7B,0x4A,0xA6,0xCA,0x49,0xA6,0x5E,0xA8,0xA7,0x8C,0x5A,0x11,0xBC,
-0x7A,0x82,0xEB,0xBE,0x8C,0xE9,0xB3,0xAC,0x96,0x25,0x07,0x97,0x4A,0x99,0x2A,0x07,
-0x2F,0xB4,0x1E,0x77,0xBF,0x8A,0x0F,0xB5,0x02,0x7C,0x1B,0x96,0xB8,0xC5,0xB9,0x3A,
-0x2C,0xBC,0xD6,0x12,0xB9,0xEB,0x59,0x7D,0xE2,0xD0,0x06,0x86,0x5F,0x5E,0x49,0x6A,
-0xB5,0x39,0x5E,0x88,0x34,0xEC,0xBC,0x78,0x0C,0x08,0x98,0x84,0x6C,0xA8,0xCD,0x4B,
-0xB4,0xA0,0x7D,0x0C,0x79,0x4D,0xF0,0xB8,0x2D,0xCB,0x21,0xCA,0xD5,0x6C,0x5B,0x7D,
-0xE1,0xA0,0x29,0x84,0xA1,0xF9,0xD3,0x94,0x49,0xCB,0x24,0x62,0x91,0x20,0xBC,0xDD,
-0x0B,0xD5,0xD9,0xCC,0xF9,0xEA,0x27,0x0A,0x2B,0x73,0x91,0xC6,0x9D,0x1B,0xAC,0xC8,
-0xCB,0xE8,0xE0,0xA0,0xF4,0x2F,0x90,0x8B,0x4D,0xFB,0xB0,0x36,0x1B,0xF6,0x19,0x7A,
-0x85,0xE0,0x6D,0xF2,0x61,0x13,0x88,0x5C,0x9F,0xE0,0x93,0x0A,0x51,0x97,0x8A,0x5A,
-0xCE,0xAF,0xAB,0xD5,0xF7,0xAA,0x09,0xAA,0x60,0xBD,0xDC,0xD9,0x5F,0xDF,0x72,0xA9,
-0x60,0x13,0x5E,0x00,0x01,0xC9,0x4A,0xFA,0x3F,0xA4,0xEA,0x07,0x03,0x21,0x02,0x8E,
-0x82,0xCA,0x03,0xC2,0x9B,0x8F,0x02,0x03,0x01,0x00,0x01,0xA3,0x81,0x9C,0x30,0x81,
-0x99,0x30,0x0E,0x06,0x03,0x55,0x1D,0x0F,0x01,0x01,0xFF,0x04,0x04,0x03,0x02,0x01,
-0x06,0x30,0x0F,0x06,0x03,0x55,0x1D,0x13,0x01,0x01,0xFF,0x04,0x05,0x30,0x03,0x01,
-0x01,0xFF,0x30,0x1D,0x06,0x03,0x55,0x1D,0x0E,0x04,0x16,0x04,0x14,0x9B,0xE2,0x07,
-0x57,0x67,0x1C,0x1E,0xC0,0x6A,0x06,0xDE,0x59,0xB4,0x9A,0x2D,0xDF,0xDC,0x19,0x86,
-0x2E,0x30,0x36,0x06,0x03,0x55,0x1D,0x1F,0x04,0x2F,0x30,0x2D,0x30,0x2B,0xA0,0x29,
-0xA0,0x27,0x86,0x25,0x68,0x74,0x74,0x70,0x3A,0x2F,0x2F,0x63,0x72,0x6C,0x2E,0x67,
-0x6C,0x6F,0x62,0x61,0x6C,0x73,0x69,0x67,0x6E,0x2E,0x6E,0x65,0x74,0x2F,0x72,0x6F,
-0x6F,0x74,0x2D,0x72,0x32,0x2E,0x63,0x72,0x6C,0x30,0x1F,0x06,0x03,0x55,0x1D,0x23,
-0x04,0x18,0x30,0x16,0x80,0x14,0x9B,0xE2,0x07,0x57,0x67,0x1C,0x1E,0xC0,0x6A,0x06,
-0xDE,0x59,0xB4,0x9A,0x2D,0xDF,0xDC,0x19,0x86,0x2E,0x30,0x0D,0x06,0x09,0x2A,0x86,
-0x48,0x86,0xF7,0x0D,0x01,0x01,0x05,0x05,0x00,0x03,0x82,0x01,0x01,0x00,0x99,0x81,
-0x53,0x87,0x1C,0x68,0x97,0x86,0x91,0xEC,0xE0,0x4A,0xB8,0x44,0x0B,0xAB,0x81,0xAC,
-0x27,0x4F,0xD6,0xC1,0xB8,0x1C,0x43,0x78,0xB3,0x0C,0x9A,0xFC,0xEA,0x2C,0x3C,0x6E,
-0x61,0x1B,0x4D,0x4B,0x29,0xF5,0x9F,0x05,0x1D,0x26,0xC1,0xB8,0xE9,0x83,0x00,0x62,
-0x45,0xB6,0xA9,0x08,0x93,0xB9,0xA9,0x33,0x4B,0x18,0x9A,0xC2,0xF8,0x87,0x88,0x4E,
-0xDB,0xDD,0x71,0x34,0x1A,0xC1,0x54,0xDA,0x46,0x3F,0xE0,0xD3,0x2A,0xAB,0x6D,0x54,
-0x22,0xF5,0x3A,0x62,0xCD,0x20,0x6F,0xBA,0x29,0x89,0xD7,0xDD,0x91,0xEE,0xD3,0x5C,
-0xA2,0x3E,0xA1,0x5B,0x41,0xF5,0xDF,0xE5,0x64,0x43,0x2D,0xE9,0xD5,0x39,0xAB,0xD2,
-0xA2,0xDF,0xB7,0x8B,0xD0,0xC0,0x80,0x19,0x1C,0x45,0xC0,0x2D,0x8C,0xE8,0xF8,0x2D,
-0xA4,0x74,0x56,0x49,0xC5,0x05,0xB5,0x4F,0x15,0xDE,0x6E,0x44,0x78,0x39,0x87,0xA8,
-0x7E,0xBB,0xF3,0x79,0x18,0x91,0xBB,0xF4,0x6F,0x9D,0xC1,0xF0,0x8C,0x35,0x8C,0x5D,
-0x01,0xFB,0xC3,0x6D,0xB9,0xEF,0x44,0x6D,0x79,0x46,0x31,0x7E,0x0A,0xFE,0xA9,0x82,
-0xC1,0xFF,0xEF,0xAB,0x6E,0x20,0xC4,0x50,0xC9,0x5F,0x9D,0x4D,0x9B,0x17,0x8C,0x0C,
-0xE5,0x01,0xC9,0xA0,0x41,0x6A,0x73,0x53,0xFA,0xA5,0x50,0xB4,0x6E,0x25,0x0F,0xFB,
-0x4C,0x18,0xF4,0xFD,0x52,0xD9,0x8E,0x69,0xB1,0xE8,0x11,0x0F,0xDE,0x88,0xD8,0xFB,
-0x1D,0x49,0xF7,0xAA,0xDE,0x95,0xCF,0x20,0x78,0xC2,0x60,0x12,0xDB,0x25,0x40,0x8C,
-0x6A,0xFC,0x7E,0x42,0x38,0x40,0x64,0x12,0xF7,0x9E,0x81,0xE1,0x93,0x2E,
+const unsigned char DigiCert_Assured_ID_Root_G2_certificate[922]={
+0x30,0x82,0x03,0x96,0x30,0x82,0x02,0x7E,0xA0,0x03,0x02,0x01,0x02,0x02,0x10,0x0B,
+0x93,0x1C,0x3A,0xD6,0x39,0x67,0xEA,0x67,0x23,0xBF,0xC3,0xAF,0x9A,0xF4,0x4B,0x30,
+0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x0B,0x05,0x00,0x30,0x65,
+0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x55,0x53,0x31,0x15,0x30,
+0x13,0x06,0x03,0x55,0x04,0x0A,0x13,0x0C,0x44,0x69,0x67,0x69,0x43,0x65,0x72,0x74,
+0x20,0x49,0x6E,0x63,0x31,0x19,0x30,0x17,0x06,0x03,0x55,0x04,0x0B,0x13,0x10,0x77,
+0x77,0x77,0x2E,0x64,0x69,0x67,0x69,0x63,0x65,0x72,0x74,0x2E,0x63,0x6F,0x6D,0x31,
+0x24,0x30,0x22,0x06,0x03,0x55,0x04,0x03,0x13,0x1B,0x44,0x69,0x67,0x69,0x43,0x65,
+0x72,0x74,0x20,0x41,0x73,0x73,0x75,0x72,0x65,0x64,0x20,0x49,0x44,0x20,0x52,0x6F,
+0x6F,0x74,0x20,0x47,0x32,0x30,0x1E,0x17,0x0D,0x31,0x33,0x30,0x38,0x30,0x31,0x31,
+0x32,0x30,0x30,0x30,0x30,0x5A,0x17,0x0D,0x33,0x38,0x30,0x31,0x31,0x35,0x31,0x32,
+0x30,0x30,0x30,0x30,0x5A,0x30,0x65,0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04,0x06,
+0x13,0x02,0x55,0x53,0x31,0x15,0x30,0x13,0x06,0x03,0x55,0x04,0x0A,0x13,0x0C,0x44,
+0x69,0x67,0x69,0x43,0x65,0x72,0x74,0x20,0x49,0x6E,0x63,0x31,0x19,0x30,0x17,0x06,
+0x03,0x55,0x04,0x0B,0x13,0x10,0x77,0x77,0x77,0x2E,0x64,0x69,0x67,0x69,0x63,0x65,
+0x72,0x74,0x2E,0x63,0x6F,0x6D,0x31,0x24,0x30,0x22,0x06,0x03,0x55,0x04,0x03,0x13,
+0x1B,0x44,0x69,0x67,0x69,0x43,0x65,0x72,0x74,0x20,0x41,0x73,0x73,0x75,0x72,0x65,
+0x64,0x20,0x49,0x44,0x20,0x52,0x6F,0x6F,0x74,0x20,0x47,0x32,0x30,0x82,0x01,0x22,
+0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x01,0x05,0x00,0x03,
+0x82,0x01,0x0F,0x00,0x30,0x82,0x01,0x0A,0x02,0x82,0x01,0x01,0x00,0xD9,0xE7,0x28,
+0x2F,0x52,0x3F,0x36,0x72,0x49,0x88,0x93,0x34,0xF3,0xF8,0x6A,0x1E,0x31,0x54,0x80,
+0x9F,0xAD,0x54,0x41,0xB5,0x47,0xDF,0x96,0xA8,0xD4,0xAF,0x80,0x2D,0xB9,0x0A,0xCF,
+0x75,0xFD,0x89,0xA5,0x7D,0x24,0xFA,0xE3,0x22,0x0C,0x2B,0xBC,0x95,0x17,0x0B,0x33,
+0xBF,0x19,0x4D,0x41,0x06,0x90,0x00,0xBD,0x0C,0x4D,0x10,0xFE,0x07,0xB5,0xE7,0x1C,
+0x6E,0x22,0x55,0x31,0x65,0x97,0xBD,0xD3,0x17,0xD2,0x1E,0x62,0xF3,0xDB,0xEA,0x6C,
+0x50,0x8C,0x3F,0x84,0x0C,0x96,0xCF,0xB7,0xCB,0x03,0xE0,0xCA,0x6D,0xA1,0x14,0x4C,
+0x1B,0x89,0xDD,0xED,0x00,0xB0,0x52,0x7C,0xAF,0x91,0x6C,0xB1,0x38,0x13,0xD1,0xE9,
+0x12,0x08,0xC0,0x00,0xB0,0x1C,0x2B,0x11,0xDA,0x77,0x70,0x36,0x9B,0xAE,0xCE,0x79,
+0x87,0xDC,0x82,0x70,0xE6,0x09,0x74,0x70,0x55,0x69,0xAF,0xA3,0x68,0x9F,0xBF,0xDD,
+0xB6,0x79,0xB3,0xF2,0x9D,0x70,0x29,0x55,0xF4,0xAB,0xFF,0x95,0x61,0xF3,0xC9,0x40,
+0x6F,0x1D,0xD1,0xBE,0x93,0xBB,0xD3,0x88,0x2A,0xBB,0x9D,0xBF,0x72,0x5A,0x56,0x71,
+0x3B,0x3F,0xD4,0xF3,0xD1,0x0A,0xFE,0x28,0xEF,0xA3,0xEE,0xD9,0x99,0xAF,0x03,0xD3,
+0x8F,0x60,0xB7,0xF2,0x92,0xA1,0xB1,0xBD,0x89,0x89,0x1F,0x30,0xCD,0xC3,0xA6,0x2E,
+0x62,0x33,0xAE,0x16,0x02,0x77,0x44,0x5A,0xE7,0x81,0x0A,0x3C,0xA7,0x44,0x2E,0x79,
+0xB8,0x3F,0x04,0xBC,0x5C,0xA0,0x87,0xE1,0x1B,0xAF,0x51,0x8E,0xCD,0xEC,0x2C,0xFA,
+0xF8,0xFE,0x6D,0xF0,0x3A,0x7C,0xAA,0x8B,0xE4,0x67,0x95,0x31,0x8D,0x02,0x03,0x01,
+0x00,0x01,0xA3,0x42,0x30,0x40,0x30,0x0F,0x06,0x03,0x55,0x1D,0x13,0x01,0x01,0xFF,
+0x04,0x05,0x30,0x03,0x01,0x01,0xFF,0x30,0x0E,0x06,0x03,0x55,0x1D,0x0F,0x01,0x01,
+0xFF,0x04,0x04,0x03,0x02,0x01,0x86,0x30,0x1D,0x06,0x03,0x55,0x1D,0x0E,0x04,0x16,
+0x04,0x14,0xCE,0xC3,0x4A,0xB9,0x99,0x55,0xF2,0xB8,0xDB,0x60,0xBF,0xA9,0x7E,0xBD,
+0x56,0xB5,0x97,0x36,0xA7,0xD6,0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,
+0x01,0x01,0x0B,0x05,0x00,0x03,0x82,0x01,0x01,0x00,0xCA,0xA5,0x55,0x8C,0xE3,0xC8,
+0x41,0x6E,0x69,0x27,0xA7,0x75,0x11,0xEF,0x3C,0x86,0x36,0x6F,0xD2,0x9D,0xC6,0x78,
+0x38,0x1D,0x69,0x96,0xA2,0x92,0x69,0x2E,0x38,0x6C,0x9B,0x7D,0x04,0xD4,0x89,0xA5,
+0xB1,0x31,0x37,0x8A,0xC9,0x21,0xCC,0xAB,0x6C,0xCD,0x8B,0x1C,0x9A,0xD6,0xBF,0x48,
+0xD2,0x32,0x66,0xC1,0x8A,0xC0,0xF3,0x2F,0x3A,0xEF,0xC0,0xE3,0xD4,0x91,0x86,0xD1,
+0x50,0xE3,0x03,0xDB,0x73,0x77,0x6F,0x4A,0x39,0x53,0xED,0xDE,0x26,0xC7,0xB5,0x7D,
+0xAF,0x2B,0x42,0xD1,0x75,0x62,0xE3,0x4A,0x2B,0x02,0xC7,0x50,0x4B,0xE0,0x69,0xE2,
+0x96,0x6C,0x0E,0x44,0x66,0x10,0x44,0x8F,0xAD,0x05,0xEB,0xF8,0x79,0xAC,0xA6,0x1B,
+0xE8,0x37,0x34,0x9D,0x53,0xC9,0x61,0xAA,0xA2,0x52,0xAF,0x4A,0x70,0x16,0x86,0xC2,
+0x3A,0xC8,0xB1,0x13,0x70,0x36,0xD8,0xCF,0xEE,0xF4,0x0A,0x34,0xD5,0x5B,0x4C,0xFD,
+0x07,0x9C,0xA2,0xBA,0xD9,0x01,0x72,0x5C,0xF3,0x4D,0xC1,0xDD,0x0E,0xB1,0x1C,0x0D,
+0xC4,0x63,0xBE,0xAD,0xF4,0x14,0xFB,0x89,0xEC,0xA2,0x41,0x0E,0x4C,0xCC,0xC8,0x57,
+0x40,0xD0,0x6E,0x03,0xAA,0xCD,0x0C,0x8E,0x89,0x99,0x99,0x6C,0xF0,0x3C,0x30,0xAF,
+0x38,0xDF,0x6F,0xBC,0xA3,0xBE,0x29,0x20,0x27,0xAB,0x74,0xFF,0x13,0x22,0x78,0xDE,
+0x97,0x52,0x55,0x1E,0x83,0xB5,0x54,0x20,0x03,0xEE,0xAE,0xC0,0x4F,0x56,0xDE,0x37,
+0xCC,0xC3,0x7F,0xAA,0x04,0x27,0xBB,0xD3,0x77,0xB8,0x62,0xDB,0x17,0x7C,0x9C,0x28,
+0x22,0x13,0x73,0x6C,0xCF,0x26,0xF5,0x8A,0x29,0xE7,
};
-/* subject:/OU=GlobalSign Root CA - R3/O=GlobalSign/CN=GlobalSign */
-/* issuer :/OU=GlobalSign Root CA - R3/O=GlobalSign/CN=GlobalSign */
+/* subject:/C=US/O=AffirmTrust/CN=AffirmTrust Commercial */
+/* issuer :/C=US/O=AffirmTrust/CN=AffirmTrust Commercial */
-const unsigned char GlobalSign_Root_CA___R3_certificate[867]={
-0x30,0x82,0x03,0x5F,0x30,0x82,0x02,0x47,0xA0,0x03,0x02,0x01,0x02,0x02,0x0B,0x04,
-0x00,0x00,0x00,0x00,0x01,0x21,0x58,0x53,0x08,0xA2,0x30,0x0D,0x06,0x09,0x2A,0x86,
-0x48,0x86,0xF7,0x0D,0x01,0x01,0x0B,0x05,0x00,0x30,0x4C,0x31,0x20,0x30,0x1E,0x06,
-0x03,0x55,0x04,0x0B,0x13,0x17,0x47,0x6C,0x6F,0x62,0x61,0x6C,0x53,0x69,0x67,0x6E,
-0x20,0x52,0x6F,0x6F,0x74,0x20,0x43,0x41,0x20,0x2D,0x20,0x52,0x33,0x31,0x13,0x30,
-0x11,0x06,0x03,0x55,0x04,0x0A,0x13,0x0A,0x47,0x6C,0x6F,0x62,0x61,0x6C,0x53,0x69,
-0x67,0x6E,0x31,0x13,0x30,0x11,0x06,0x03,0x55,0x04,0x03,0x13,0x0A,0x47,0x6C,0x6F,
-0x62,0x61,0x6C,0x53,0x69,0x67,0x6E,0x30,0x1E,0x17,0x0D,0x30,0x39,0x30,0x33,0x31,
-0x38,0x31,0x30,0x30,0x30,0x30,0x30,0x5A,0x17,0x0D,0x32,0x39,0x30,0x33,0x31,0x38,
-0x31,0x30,0x30,0x30,0x30,0x30,0x5A,0x30,0x4C,0x31,0x20,0x30,0x1E,0x06,0x03,0x55,
-0x04,0x0B,0x13,0x17,0x47,0x6C,0x6F,0x62,0x61,0x6C,0x53,0x69,0x67,0x6E,0x20,0x52,
-0x6F,0x6F,0x74,0x20,0x43,0x41,0x20,0x2D,0x20,0x52,0x33,0x31,0x13,0x30,0x11,0x06,
-0x03,0x55,0x04,0x0A,0x13,0x0A,0x47,0x6C,0x6F,0x62,0x61,0x6C,0x53,0x69,0x67,0x6E,
-0x31,0x13,0x30,0x11,0x06,0x03,0x55,0x04,0x03,0x13,0x0A,0x47,0x6C,0x6F,0x62,0x61,
-0x6C,0x53,0x69,0x67,0x6E,0x30,0x82,0x01,0x22,0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,
-0x86,0xF7,0x0D,0x01,0x01,0x01,0x05,0x00,0x03,0x82,0x01,0x0F,0x00,0x30,0x82,0x01,
-0x0A,0x02,0x82,0x01,0x01,0x00,0xCC,0x25,0x76,0x90,0x79,0x06,0x78,0x22,0x16,0xF5,
-0xC0,0x83,0xB6,0x84,0xCA,0x28,0x9E,0xFD,0x05,0x76,0x11,0xC5,0xAD,0x88,0x72,0xFC,
-0x46,0x02,0x43,0xC7,0xB2,0x8A,0x9D,0x04,0x5F,0x24,0xCB,0x2E,0x4B,0xE1,0x60,0x82,
-0x46,0xE1,0x52,0xAB,0x0C,0x81,0x47,0x70,0x6C,0xDD,0x64,0xD1,0xEB,0xF5,0x2C,0xA3,
-0x0F,0x82,0x3D,0x0C,0x2B,0xAE,0x97,0xD7,0xB6,0x14,0x86,0x10,0x79,0xBB,0x3B,0x13,
-0x80,0x77,0x8C,0x08,0xE1,0x49,0xD2,0x6A,0x62,0x2F,0x1F,0x5E,0xFA,0x96,0x68,0xDF,
-0x89,0x27,0x95,0x38,0x9F,0x06,0xD7,0x3E,0xC9,0xCB,0x26,0x59,0x0D,0x73,0xDE,0xB0,
-0xC8,0xE9,0x26,0x0E,0x83,0x15,0xC6,0xEF,0x5B,0x8B,0xD2,0x04,0x60,0xCA,0x49,0xA6,
-0x28,0xF6,0x69,0x3B,0xF6,0xCB,0xC8,0x28,0x91,0xE5,0x9D,0x8A,0x61,0x57,0x37,0xAC,
-0x74,0x14,0xDC,0x74,0xE0,0x3A,0xEE,0x72,0x2F,0x2E,0x9C,0xFB,0xD0,0xBB,0xBF,0xF5,
-0x3D,0x00,0xE1,0x06,0x33,0xE8,0x82,0x2B,0xAE,0x53,0xA6,0x3A,0x16,0x73,0x8C,0xDD,
-0x41,0x0E,0x20,0x3A,0xC0,0xB4,0xA7,0xA1,0xE9,0xB2,0x4F,0x90,0x2E,0x32,0x60,0xE9,
-0x57,0xCB,0xB9,0x04,0x92,0x68,0x68,0xE5,0x38,0x26,0x60,0x75,0xB2,0x9F,0x77,0xFF,
-0x91,0x14,0xEF,0xAE,0x20,0x49,0xFC,0xAD,0x40,0x15,0x48,0xD1,0x02,0x31,0x61,0x19,
-0x5E,0xB8,0x97,0xEF,0xAD,0x77,0xB7,0x64,0x9A,0x7A,0xBF,0x5F,0xC1,0x13,0xEF,0x9B,
-0x62,0xFB,0x0D,0x6C,0xE0,0x54,0x69,0x16,0xA9,0x03,0xDA,0x6E,0xE9,0x83,0x93,0x71,
-0x76,0xC6,0x69,0x85,0x82,0x17,0x02,0x03,0x01,0x00,0x01,0xA3,0x42,0x30,0x40,0x30,
-0x0E,0x06,0x03,0x55,0x1D,0x0F,0x01,0x01,0xFF,0x04,0x04,0x03,0x02,0x01,0x06,0x30,
-0x0F,0x06,0x03,0x55,0x1D,0x13,0x01,0x01,0xFF,0x04,0x05,0x30,0x03,0x01,0x01,0xFF,
-0x30,0x1D,0x06,0x03,0x55,0x1D,0x0E,0x04,0x16,0x04,0x14,0x8F,0xF0,0x4B,0x7F,0xA8,
-0x2E,0x45,0x24,0xAE,0x4D,0x50,0xFA,0x63,0x9A,0x8B,0xDE,0xE2,0xDD,0x1B,0xBC,0x30,
-0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x0B,0x05,0x00,0x03,0x82,
-0x01,0x01,0x00,0x4B,0x40,0xDB,0xC0,0x50,0xAA,0xFE,0xC8,0x0C,0xEF,0xF7,0x96,0x54,
-0x45,0x49,0xBB,0x96,0x00,0x09,0x41,0xAC,0xB3,0x13,0x86,0x86,0x28,0x07,0x33,0xCA,
-0x6B,0xE6,0x74,0xB9,0xBA,0x00,0x2D,0xAE,0xA4,0x0A,0xD3,0xF5,0xF1,0xF1,0x0F,0x8A,
-0xBF,0x73,0x67,0x4A,0x83,0xC7,0x44,0x7B,0x78,0xE0,0xAF,0x6E,0x6C,0x6F,0x03,0x29,
-0x8E,0x33,0x39,0x45,0xC3,0x8E,0xE4,0xB9,0x57,0x6C,0xAA,0xFC,0x12,0x96,0xEC,0x53,
-0xC6,0x2D,0xE4,0x24,0x6C,0xB9,0x94,0x63,0xFB,0xDC,0x53,0x68,0x67,0x56,0x3E,0x83,
-0xB8,0xCF,0x35,0x21,0xC3,0xC9,0x68,0xFE,0xCE,0xDA,0xC2,0x53,0xAA,0xCC,0x90,0x8A,
-0xE9,0xF0,0x5D,0x46,0x8C,0x95,0xDD,0x7A,0x58,0x28,0x1A,0x2F,0x1D,0xDE,0xCD,0x00,
-0x37,0x41,0x8F,0xED,0x44,0x6D,0xD7,0x53,0x28,0x97,0x7E,0xF3,0x67,0x04,0x1E,0x15,
-0xD7,0x8A,0x96,0xB4,0xD3,0xDE,0x4C,0x27,0xA4,0x4C,0x1B,0x73,0x73,0x76,0xF4,0x17,
-0x99,0xC2,0x1F,0x7A,0x0E,0xE3,0x2D,0x08,0xAD,0x0A,0x1C,0x2C,0xFF,0x3C,0xAB,0x55,
-0x0E,0x0F,0x91,0x7E,0x36,0xEB,0xC3,0x57,0x49,0xBE,0xE1,0x2E,0x2D,0x7C,0x60,0x8B,
-0xC3,0x41,0x51,0x13,0x23,0x9D,0xCE,0xF7,0x32,0x6B,0x94,0x01,0xA8,0x99,0xE7,0x2C,
-0x33,0x1F,0x3A,0x3B,0x25,0xD2,0x86,0x40,0xCE,0x3B,0x2C,0x86,0x78,0xC9,0x61,0x2F,
-0x14,0xBA,0xEE,0xDB,0x55,0x6F,0xDF,0x84,0xEE,0x05,0x09,0x4D,0xBD,0x28,0xD8,0x72,
-0xCE,0xD3,0x62,0x50,0x65,0x1E,0xEB,0x92,0x97,0x83,0x31,0xD9,0xB3,0xB5,0xCA,0x47,
-0x58,0x3F,0x5F,
+const unsigned char AffirmTrust_Commercial_certificate[848]={
+0x30,0x82,0x03,0x4C,0x30,0x82,0x02,0x34,0xA0,0x03,0x02,0x01,0x02,0x02,0x08,0x77,
+0x77,0x06,0x27,0x26,0xA9,0xB1,0x7C,0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,
+0x0D,0x01,0x01,0x0B,0x05,0x00,0x30,0x44,0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04,
+0x06,0x13,0x02,0x55,0x53,0x31,0x14,0x30,0x12,0x06,0x03,0x55,0x04,0x0A,0x0C,0x0B,
+0x41,0x66,0x66,0x69,0x72,0x6D,0x54,0x72,0x75,0x73,0x74,0x31,0x1F,0x30,0x1D,0x06,
+0x03,0x55,0x04,0x03,0x0C,0x16,0x41,0x66,0x66,0x69,0x72,0x6D,0x54,0x72,0x75,0x73,
+0x74,0x20,0x43,0x6F,0x6D,0x6D,0x65,0x72,0x63,0x69,0x61,0x6C,0x30,0x1E,0x17,0x0D,
+0x31,0x30,0x30,0x31,0x32,0x39,0x31,0x34,0x30,0x36,0x30,0x36,0x5A,0x17,0x0D,0x33,
+0x30,0x31,0x32,0x33,0x31,0x31,0x34,0x30,0x36,0x30,0x36,0x5A,0x30,0x44,0x31,0x0B,
+0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x55,0x53,0x31,0x14,0x30,0x12,0x06,
+0x03,0x55,0x04,0x0A,0x0C,0x0B,0x41,0x66,0x66,0x69,0x72,0x6D,0x54,0x72,0x75,0x73,
+0x74,0x31,0x1F,0x30,0x1D,0x06,0x03,0x55,0x04,0x03,0x0C,0x16,0x41,0x66,0x66,0x69,
+0x72,0x6D,0x54,0x72,0x75,0x73,0x74,0x20,0x43,0x6F,0x6D,0x6D,0x65,0x72,0x63,0x69,
+0x61,0x6C,0x30,0x82,0x01,0x22,0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,
+0x01,0x01,0x01,0x05,0x00,0x03,0x82,0x01,0x0F,0x00,0x30,0x82,0x01,0x0A,0x02,0x82,
+0x01,0x01,0x00,0xF6,0x1B,0x4F,0x67,0x07,0x2B,0xA1,0x15,0xF5,0x06,0x22,0xCB,0x1F,
+0x01,0xB2,0xE3,0x73,0x45,0x06,0x44,0x49,0x2C,0xBB,0x49,0x25,0x14,0xD6,0xCE,0xC3,
+0xB7,0xAB,0x2C,0x4F,0xC6,0x41,0x32,0x94,0x57,0xFA,0x12,0xA7,0x5B,0x0E,0xE2,0x8F,
+0x1F,0x1E,0x86,0x19,0xA7,0xAA,0xB5,0x2D,0xB9,0x5F,0x0D,0x8A,0xC2,0xAF,0x85,0x35,
+0x79,0x32,0x2D,0xBB,0x1C,0x62,0x37,0xF2,0xB1,0x5B,0x4A,0x3D,0xCA,0xCD,0x71,0x5F,
+0xE9,0x42,0xBE,0x94,0xE8,0xC8,0xDE,0xF9,0x22,0x48,0x64,0xC6,0xE5,0xAB,0xC6,0x2B,
+0x6D,0xAD,0x05,0xF0,0xFA,0xD5,0x0B,0xCF,0x9A,0xE5,0xF0,0x50,0xA4,0x8B,0x3B,0x47,
+0xA5,0x23,0x5B,0x7A,0x7A,0xF8,0x33,0x3F,0xB8,0xEF,0x99,0x97,0xE3,0x20,0xC1,0xD6,
+0x28,0x89,0xCF,0x94,0xFB,0xB9,0x45,0xED,0xE3,0x40,0x17,0x11,0xD4,0x74,0xF0,0x0B,
+0x31,0xE2,0x2B,0x26,0x6A,0x9B,0x4C,0x57,0xAE,0xAC,0x20,0x3E,0xBA,0x45,0x7A,0x05,
+0xF3,0xBD,0x9B,0x69,0x15,0xAE,0x7D,0x4E,0x20,0x63,0xC4,0x35,0x76,0x3A,0x07,0x02,
+0xC9,0x37,0xFD,0xC7,0x47,0xEE,0xE8,0xF1,0x76,0x1D,0x73,0x15,0xF2,0x97,0xA4,0xB5,
+0xC8,0x7A,0x79,0xD9,0x42,0xAA,0x2B,0x7F,0x5C,0xFE,0xCE,0x26,0x4F,0xA3,0x66,0x81,
+0x35,0xAF,0x44,0xBA,0x54,0x1E,0x1C,0x30,0x32,0x65,0x9D,0xE6,0x3C,0x93,0x5E,0x50,
+0x4E,0x7A,0xE3,0x3A,0xD4,0x6E,0xCC,0x1A,0xFB,0xF9,0xD2,0x37,0xAE,0x24,0x2A,0xAB,
+0x57,0x03,0x22,0x28,0x0D,0x49,0x75,0x7F,0xB7,0x28,0xDA,0x75,0xBF,0x8E,0xE3,0xDC,
+0x0E,0x79,0x31,0x02,0x03,0x01,0x00,0x01,0xA3,0x42,0x30,0x40,0x30,0x1D,0x06,0x03,
+0x55,0x1D,0x0E,0x04,0x16,0x04,0x14,0x9D,0x93,0xC6,0x53,0x8B,0x5E,0xCA,0xAF,0x3F,
+0x9F,0x1E,0x0F,0xE5,0x99,0x95,0xBC,0x24,0xF6,0x94,0x8F,0x30,0x0F,0x06,0x03,0x55,
+0x1D,0x13,0x01,0x01,0xFF,0x04,0x05,0x30,0x03,0x01,0x01,0xFF,0x30,0x0E,0x06,0x03,
+0x55,0x1D,0x0F,0x01,0x01,0xFF,0x04,0x04,0x03,0x02,0x01,0x06,0x30,0x0D,0x06,0x09,
+0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x0B,0x05,0x00,0x03,0x82,0x01,0x01,0x00,
+0x58,0xAC,0xF4,0x04,0x0E,0xCD,0xC0,0x0D,0xFF,0x0A,0xFD,0xD4,0xBA,0x16,0x5F,0x29,
+0xBD,0x7B,0x68,0x99,0x58,0x49,0xD2,0xB4,0x1D,0x37,0x4D,0x7F,0x27,0x7D,0x46,0x06,
+0x5D,0x43,0xC6,0x86,0x2E,0x3E,0x73,0xB2,0x26,0x7D,0x4F,0x93,0xA9,0xB6,0xC4,0x2A,
+0x9A,0xAB,0x21,0x97,0x14,0xB1,0xDE,0x8C,0xD3,0xAB,0x89,0x15,0xD8,0x6B,0x24,0xD4,
+0xF1,0x16,0xAE,0xD8,0xA4,0x5C,0xD4,0x7F,0x51,0x8E,0xED,0x18,0x01,0xB1,0x93,0x63,
+0xBD,0xBC,0xF8,0x61,0x80,0x9A,0x9E,0xB1,0xCE,0x42,0x70,0xE2,0xA9,0x7D,0x06,0x25,
+0x7D,0x27,0xA1,0xFE,0x6F,0xEC,0xB3,0x1E,0x24,0xDA,0xE3,0x4B,0x55,0x1A,0x00,0x3B,
+0x35,0xB4,0x3B,0xD9,0xD7,0x5D,0x30,0xFD,0x81,0x13,0x89,0xF2,0xC2,0x06,0x2B,0xED,
+0x67,0xC4,0x8E,0xC9,0x43,0xB2,0x5C,0x6B,0x15,0x89,0x02,0xBC,0x62,0xFC,0x4E,0xF2,
+0xB5,0x33,0xAA,0xB2,0x6F,0xD3,0x0A,0xA2,0x50,0xE3,0xF6,0x3B,0xE8,0x2E,0x44,0xC2,
+0xDB,0x66,0x38,0xA9,0x33,0x56,0x48,0xF1,0x6D,0x1B,0x33,0x8D,0x0D,0x8C,0x3F,0x60,
+0x37,0x9D,0xD3,0xCA,0x6D,0x7E,0x34,0x7E,0x0D,0x9F,0x72,0x76,0x8B,0x1B,0x9F,0x72,
+0xFD,0x52,0x35,0x41,0x45,0x02,0x96,0x2F,0x1C,0xB2,0x9A,0x73,0x49,0x21,0xB1,0x49,
+0x47,0x45,0x47,0xB4,0xEF,0x6A,0x34,0x11,0xC9,0x4D,0x9A,0xCC,0x59,0xB7,0xD6,0x02,
+0x9E,0x5A,0x4E,0x65,0xB5,0x94,0xAE,0x1B,0xDF,0x29,0xB0,0x16,0xF1,0xBF,0x00,0x9E,
+0x07,0x3A,0x17,0x64,0xB5,0x04,0xB5,0x23,0x21,0x99,0x0A,0x95,0x3B,0x97,0x7C,0xEF,
};
-/* subject:/C=US/O=The Go Daddy Group, Inc./OU=Go Daddy Class 2 Certification Authority */
-/* issuer :/C=US/O=The Go Daddy Group, Inc./OU=Go Daddy Class 2 Certification Authority */
+/* subject:/C=US/O=AffirmTrust/CN=AffirmTrust Premium */
+/* issuer :/C=US/O=AffirmTrust/CN=AffirmTrust Premium */
-const unsigned char Go_Daddy_Class_2_CA_certificate[1028]={
-0x30,0x82,0x04,0x00,0x30,0x82,0x02,0xE8,0xA0,0x03,0x02,0x01,0x02,0x02,0x01,0x00,
-0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x05,0x05,0x00,0x30,
-0x63,0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x55,0x53,0x31,0x21,
-0x30,0x1F,0x06,0x03,0x55,0x04,0x0A,0x13,0x18,0x54,0x68,0x65,0x20,0x47,0x6F,0x20,
-0x44,0x61,0x64,0x64,0x79,0x20,0x47,0x72,0x6F,0x75,0x70,0x2C,0x20,0x49,0x6E,0x63,
-0x2E,0x31,0x31,0x30,0x2F,0x06,0x03,0x55,0x04,0x0B,0x13,0x28,0x47,0x6F,0x20,0x44,
-0x61,0x64,0x64,0x79,0x20,0x43,0x6C,0x61,0x73,0x73,0x20,0x32,0x20,0x43,0x65,0x72,
-0x74,0x69,0x66,0x69,0x63,0x61,0x74,0x69,0x6F,0x6E,0x20,0x41,0x75,0x74,0x68,0x6F,
-0x72,0x69,0x74,0x79,0x30,0x1E,0x17,0x0D,0x30,0x34,0x30,0x36,0x32,0x39,0x31,0x37,
-0x30,0x36,0x32,0x30,0x5A,0x17,0x0D,0x33,0x34,0x30,0x36,0x32,0x39,0x31,0x37,0x30,
-0x36,0x32,0x30,0x5A,0x30,0x63,0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,
-0x02,0x55,0x53,0x31,0x21,0x30,0x1F,0x06,0x03,0x55,0x04,0x0A,0x13,0x18,0x54,0x68,
-0x65,0x20,0x47,0x6F,0x20,0x44,0x61,0x64,0x64,0x79,0x20,0x47,0x72,0x6F,0x75,0x70,
-0x2C,0x20,0x49,0x6E,0x63,0x2E,0x31,0x31,0x30,0x2F,0x06,0x03,0x55,0x04,0x0B,0x13,
-0x28,0x47,0x6F,0x20,0x44,0x61,0x64,0x64,0x79,0x20,0x43,0x6C,0x61,0x73,0x73,0x20,
-0x32,0x20,0x43,0x65,0x72,0x74,0x69,0x66,0x69,0x63,0x61,0x74,0x69,0x6F,0x6E,0x20,
-0x41,0x75,0x74,0x68,0x6F,0x72,0x69,0x74,0x79,0x30,0x82,0x01,0x20,0x30,0x0D,0x06,
-0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x01,0x05,0x00,0x03,0x82,0x01,0x0D,
-0x00,0x30,0x82,0x01,0x08,0x02,0x82,0x01,0x01,0x00,0xDE,0x9D,0xD7,0xEA,0x57,0x18,
-0x49,0xA1,0x5B,0xEB,0xD7,0x5F,0x48,0x86,0xEA,0xBE,0xDD,0xFF,0xE4,0xEF,0x67,0x1C,
-0xF4,0x65,0x68,0xB3,0x57,0x71,0xA0,0x5E,0x77,0xBB,0xED,0x9B,0x49,0xE9,0x70,0x80,
-0x3D,0x56,0x18,0x63,0x08,0x6F,0xDA,0xF2,0xCC,0xD0,0x3F,0x7F,0x02,0x54,0x22,0x54,
-0x10,0xD8,0xB2,0x81,0xD4,0xC0,0x75,0x3D,0x4B,0x7F,0xC7,0x77,0xC3,0x3E,0x78,0xAB,
-0x1A,0x03,0xB5,0x20,0x6B,0x2F,0x6A,0x2B,0xB1,0xC5,0x88,0x7E,0xC4,0xBB,0x1E,0xB0,
-0xC1,0xD8,0x45,0x27,0x6F,0xAA,0x37,0x58,0xF7,0x87,0x26,0xD7,0xD8,0x2D,0xF6,0xA9,
-0x17,0xB7,0x1F,0x72,0x36,0x4E,0xA6,0x17,0x3F,0x65,0x98,0x92,0xDB,0x2A,0x6E,0x5D,
-0xA2,0xFE,0x88,0xE0,0x0B,0xDE,0x7F,0xE5,0x8D,0x15,0xE1,0xEB,0xCB,0x3A,0xD5,0xE2,
-0x12,0xA2,0x13,0x2D,0xD8,0x8E,0xAF,0x5F,0x12,0x3D,0xA0,0x08,0x05,0x08,0xB6,0x5C,
-0xA5,0x65,0x38,0x04,0x45,0x99,0x1E,0xA3,0x60,0x60,0x74,0xC5,0x41,0xA5,0x72,0x62,
-0x1B,0x62,0xC5,0x1F,0x6F,0x5F,0x1A,0x42,0xBE,0x02,0x51,0x65,0xA8,0xAE,0x23,0x18,
-0x6A,0xFC,0x78,0x03,0xA9,0x4D,0x7F,0x80,0xC3,0xFA,0xAB,0x5A,0xFC,0xA1,0x40,0xA4,
-0xCA,0x19,0x16,0xFE,0xB2,0xC8,0xEF,0x5E,0x73,0x0D,0xEE,0x77,0xBD,0x9A,0xF6,0x79,
-0x98,0xBC,0xB1,0x07,0x67,0xA2,0x15,0x0D,0xDD,0xA0,0x58,0xC6,0x44,0x7B,0x0A,0x3E,
-0x62,0x28,0x5F,0xBA,0x41,0x07,0x53,0x58,0xCF,0x11,0x7E,0x38,0x74,0xC5,0xF8,0xFF,
-0xB5,0x69,0x90,0x8F,0x84,0x74,0xEA,0x97,0x1B,0xAF,0x02,0x01,0x03,0xA3,0x81,0xC0,
-0x30,0x81,0xBD,0x30,0x1D,0x06,0x03,0x55,0x1D,0x0E,0x04,0x16,0x04,0x14,0xD2,0xC4,
-0xB0,0xD2,0x91,0xD4,0x4C,0x11,0x71,0xB3,0x61,0xCB,0x3D,0xA1,0xFE,0xDD,0xA8,0x6A,
-0xD4,0xE3,0x30,0x81,0x8D,0x06,0x03,0x55,0x1D,0x23,0x04,0x81,0x85,0x30,0x81,0x82,
-0x80,0x14,0xD2,0xC4,0xB0,0xD2,0x91,0xD4,0x4C,0x11,0x71,0xB3,0x61,0xCB,0x3D,0xA1,
-0xFE,0xDD,0xA8,0x6A,0xD4,0xE3,0xA1,0x67,0xA4,0x65,0x30,0x63,0x31,0x0B,0x30,0x09,
-0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x55,0x53,0x31,0x21,0x30,0x1F,0x06,0x03,0x55,
-0x04,0x0A,0x13,0x18,0x54,0x68,0x65,0x20,0x47,0x6F,0x20,0x44,0x61,0x64,0x64,0x79,
-0x20,0x47,0x72,0x6F,0x75,0x70,0x2C,0x20,0x49,0x6E,0x63,0x2E,0x31,0x31,0x30,0x2F,
-0x06,0x03,0x55,0x04,0x0B,0x13,0x28,0x47,0x6F,0x20,0x44,0x61,0x64,0x64,0x79,0x20,
-0x43,0x6C,0x61,0x73,0x73,0x20,0x32,0x20,0x43,0x65,0x72,0x74,0x69,0x66,0x69,0x63,
-0x61,0x74,0x69,0x6F,0x6E,0x20,0x41,0x75,0x74,0x68,0x6F,0x72,0x69,0x74,0x79,0x82,
-0x01,0x00,0x30,0x0C,0x06,0x03,0x55,0x1D,0x13,0x04,0x05,0x30,0x03,0x01,0x01,0xFF,
-0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x05,0x05,0x00,0x03,
-0x82,0x01,0x01,0x00,0x32,0x4B,0xF3,0xB2,0xCA,0x3E,0x91,0xFC,0x12,0xC6,0xA1,0x07,
-0x8C,0x8E,0x77,0xA0,0x33,0x06,0x14,0x5C,0x90,0x1E,0x18,0xF7,0x08,0xA6,0x3D,0x0A,
-0x19,0xF9,0x87,0x80,0x11,0x6E,0x69,0xE4,0x96,0x17,0x30,0xFF,0x34,0x91,0x63,0x72,
-0x38,0xEE,0xCC,0x1C,0x01,0xA3,0x1D,0x94,0x28,0xA4,0x31,0xF6,0x7A,0xC4,0x54,0xD7,
-0xF6,0xE5,0x31,0x58,0x03,0xA2,0xCC,0xCE,0x62,0xDB,0x94,0x45,0x73,0xB5,0xBF,0x45,
-0xC9,0x24,0xB5,0xD5,0x82,0x02,0xAD,0x23,0x79,0x69,0x8D,0xB8,0xB6,0x4D,0xCE,0xCF,
-0x4C,0xCA,0x33,0x23,0xE8,0x1C,0x88,0xAA,0x9D,0x8B,0x41,0x6E,0x16,0xC9,0x20,0xE5,
-0x89,0x9E,0xCD,0x3B,0xDA,0x70,0xF7,0x7E,0x99,0x26,0x20,0x14,0x54,0x25,0xAB,0x6E,
-0x73,0x85,0xE6,0x9B,0x21,0x9D,0x0A,0x6C,0x82,0x0E,0xA8,0xF8,0xC2,0x0C,0xFA,0x10,
-0x1E,0x6C,0x96,0xEF,0x87,0x0D,0xC4,0x0F,0x61,0x8B,0xAD,0xEE,0x83,0x2B,0x95,0xF8,
-0x8E,0x92,0x84,0x72,0x39,0xEB,0x20,0xEA,0x83,0xED,0x83,0xCD,0x97,0x6E,0x08,0xBC,
-0xEB,0x4E,0x26,0xB6,0x73,0x2B,0xE4,0xD3,0xF6,0x4C,0xFE,0x26,0x71,0xE2,0x61,0x11,
-0x74,0x4A,0xFF,0x57,0x1A,0x87,0x0F,0x75,0x48,0x2E,0xCF,0x51,0x69,0x17,0xA0,0x02,
-0x12,0x61,0x95,0xD5,0xD1,0x40,0xB2,0x10,0x4C,0xEE,0xC4,0xAC,0x10,0x43,0xA6,0xA5,
-0x9E,0x0A,0xD5,0x95,0x62,0x9A,0x0D,0xCF,0x88,0x82,0xC5,0x32,0x0C,0xE4,0x2B,0x9F,
-0x45,0xE6,0x0D,0x9F,0x28,0x9C,0xB1,0xB9,0x2A,0x5A,0x57,0xAD,0x37,0x0F,0xAF,0x1D,
-0x7F,0xDB,0xBD,0x9F,
+const unsigned char AffirmTrust_Premium_certificate[1354]={
+0x30,0x82,0x05,0x46,0x30,0x82,0x03,0x2E,0xA0,0x03,0x02,0x01,0x02,0x02,0x08,0x6D,
+0x8C,0x14,0x46,0xB1,0xA6,0x0A,0xEE,0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,
+0x0D,0x01,0x01,0x0C,0x05,0x00,0x30,0x41,0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04,
+0x06,0x13,0x02,0x55,0x53,0x31,0x14,0x30,0x12,0x06,0x03,0x55,0x04,0x0A,0x0C,0x0B,
+0x41,0x66,0x66,0x69,0x72,0x6D,0x54,0x72,0x75,0x73,0x74,0x31,0x1C,0x30,0x1A,0x06,
+0x03,0x55,0x04,0x03,0x0C,0x13,0x41,0x66,0x66,0x69,0x72,0x6D,0x54,0x72,0x75,0x73,
+0x74,0x20,0x50,0x72,0x65,0x6D,0x69,0x75,0x6D,0x30,0x1E,0x17,0x0D,0x31,0x30,0x30,
+0x31,0x32,0x39,0x31,0x34,0x31,0x30,0x33,0x36,0x5A,0x17,0x0D,0x34,0x30,0x31,0x32,
+0x33,0x31,0x31,0x34,0x31,0x30,0x33,0x36,0x5A,0x30,0x41,0x31,0x0B,0x30,0x09,0x06,
+0x03,0x55,0x04,0x06,0x13,0x02,0x55,0x53,0x31,0x14,0x30,0x12,0x06,0x03,0x55,0x04,
+0x0A,0x0C,0x0B,0x41,0x66,0x66,0x69,0x72,0x6D,0x54,0x72,0x75,0x73,0x74,0x31,0x1C,
+0x30,0x1A,0x06,0x03,0x55,0x04,0x03,0x0C,0x13,0x41,0x66,0x66,0x69,0x72,0x6D,0x54,
+0x72,0x75,0x73,0x74,0x20,0x50,0x72,0x65,0x6D,0x69,0x75,0x6D,0x30,0x82,0x02,0x22,
+0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x01,0x05,0x00,0x03,
+0x82,0x02,0x0F,0x00,0x30,0x82,0x02,0x0A,0x02,0x82,0x02,0x01,0x00,0xC4,0x12,0xDF,
+0xA9,0x5F,0xFE,0x41,0xDD,0xDD,0xF5,0x9F,0x8A,0xE3,0xF6,0xAC,0xE1,0x3C,0x78,0x9A,
+0xBC,0xD8,0xF0,0x7F,0x7A,0xA0,0x33,0x2A,0xDC,0x8D,0x20,0x5B,0xAE,0x2D,0x6F,0xE7,
+0x93,0xD9,0x36,0x70,0x6A,0x68,0xCF,0x8E,0x51,0xA3,0x85,0x5B,0x67,0x04,0xA0,0x10,
+0x24,0x6F,0x5D,0x28,0x82,0xC1,0x97,0x57,0xD8,0x48,0x29,0x13,0xB6,0xE1,0xBE,0x91,
+0x4D,0xDF,0x85,0x0C,0x53,0x18,0x9A,0x1E,0x24,0xA2,0x4F,0x8F,0xF0,0xA2,0x85,0x0B,
+0xCB,0xF4,0x29,0x7F,0xD2,0xA4,0x58,0xEE,0x26,0x4D,0xC9,0xAA,0xA8,0x7B,0x9A,0xD9,
+0xFA,0x38,0xDE,0x44,0x57,0x15,0xE5,0xF8,0x8C,0xC8,0xD9,0x48,0xE2,0x0D,0x16,0x27,
+0x1D,0x1E,0xC8,0x83,0x85,0x25,0xB7,0xBA,0xAA,0x55,0x41,0xCC,0x03,0x22,0x4B,0x2D,
+0x91,0x8D,0x8B,0xE6,0x89,0xAF,0x66,0xC7,0xE9,0xFF,0x2B,0xE9,0x3C,0xAC,0xDA,0xD2,
+0xB3,0xC3,0xE1,0x68,0x9C,0x89,0xF8,0x7A,0x00,0x56,0xDE,0xF4,0x55,0x95,0x6C,0xFB,
+0xBA,0x64,0xDD,0x62,0x8B,0xDF,0x0B,0x77,0x32,0xEB,0x62,0xCC,0x26,0x9A,0x9B,0xBB,
+0xAA,0x62,0x83,0x4C,0xB4,0x06,0x7A,0x30,0xC8,0x29,0xBF,0xED,0x06,0x4D,0x97,0xB9,
+0x1C,0xC4,0x31,0x2B,0xD5,0x5F,0xBC,0x53,0x12,0x17,0x9C,0x99,0x57,0x29,0x66,0x77,
+0x61,0x21,0x31,0x07,0x2E,0x25,0x49,0x9D,0x18,0xF2,0xEE,0xF3,0x2B,0x71,0x8C,0xB5,
+0xBA,0x39,0x07,0x49,0x77,0xFC,0xEF,0x2E,0x92,0x90,0x05,0x8D,0x2D,0x2F,0x77,0x7B,
+0xEF,0x43,0xBF,0x35,0xBB,0x9A,0xD8,0xF9,0x73,0xA7,0x2C,0xF2,0xD0,0x57,0xEE,0x28,
+0x4E,0x26,0x5F,0x8F,0x90,0x68,0x09,0x2F,0xB8,0xF8,0xDC,0x06,0xE9,0x2E,0x9A,0x3E,
+0x51,0xA7,0xD1,0x22,0xC4,0x0A,0xA7,0x38,0x48,0x6C,0xB3,0xF9,0xFF,0x7D,0xAB,0x86,
+0x57,0xE3,0xBA,0xD6,0x85,0x78,0x77,0xBA,0x43,0xEA,0x48,0x7F,0xF6,0xD8,0xBE,0x23,
+0x6D,0x1E,0xBF,0xD1,0x36,0x6C,0x58,0x5C,0xF1,0xEE,0xA4,0x19,0x54,0x1A,0xF5,0x03,
+0xD2,0x76,0xE6,0xE1,0x8C,0xBD,0x3C,0xB3,0xD3,0x48,0x4B,0xE2,0xC8,0xF8,0x7F,0x92,
+0xA8,0x76,0x46,0x9C,0x42,0x65,0x3E,0xA4,0x1E,0xC1,0x07,0x03,0x5A,0x46,0x2D,0xB8,
+0x97,0xF3,0xB7,0xD5,0xB2,0x55,0x21,0xEF,0xBA,0xDC,0x4C,0x00,0x97,0xFB,0x14,0x95,
+0x27,0x33,0xBF,0xE8,0x43,0x47,0x46,0xD2,0x08,0x99,0x16,0x60,0x3B,0x9A,0x7E,0xD2,
+0xE6,0xED,0x38,0xEA,0xEC,0x01,0x1E,0x3C,0x48,0x56,0x49,0x09,0xC7,0x4C,0x37,0x00,
+0x9E,0x88,0x0E,0xC0,0x73,0xE1,0x6F,0x66,0xE9,0x72,0x47,0x30,0x3E,0x10,0xE5,0x0B,
+0x03,0xC9,0x9A,0x42,0x00,0x6C,0xC5,0x94,0x7E,0x61,0xC4,0x8A,0xDF,0x7F,0x82,0x1A,
+0x0B,0x59,0xC4,0x59,0x32,0x77,0xB3,0xBC,0x60,0x69,0x56,0x39,0xFD,0xB4,0x06,0x7B,
+0x2C,0xD6,0x64,0x36,0xD9,0xBD,0x48,0xED,0x84,0x1F,0x7E,0xA5,0x22,0x8F,0x2A,0xB8,
+0x42,0xF4,0x82,0xB7,0xD4,0x53,0x90,0x78,0x4E,0x2D,0x1A,0xFD,0x81,0x6F,0x44,0xD7,
+0x3B,0x01,0x74,0x96,0x42,0xE0,0x00,0xE2,0x2E,0x6B,0xEA,0xC5,0xEE,0x72,0xAC,0xBB,
+0xBF,0xFE,0xEA,0xAA,0xA8,0xF8,0xDC,0xF6,0xB2,0x79,0x8A,0xB6,0x67,0x02,0x03,0x01,
+0x00,0x01,0xA3,0x42,0x30,0x40,0x30,0x1D,0x06,0x03,0x55,0x1D,0x0E,0x04,0x16,0x04,
+0x14,0x9D,0xC0,0x67,0xA6,0x0C,0x22,0xD9,0x26,0xF5,0x45,0xAB,0xA6,0x65,0x52,0x11,
+0x27,0xD8,0x45,0xAC,0x63,0x30,0x0F,0x06,0x03,0x55,0x1D,0x13,0x01,0x01,0xFF,0x04,
+0x05,0x30,0x03,0x01,0x01,0xFF,0x30,0x0E,0x06,0x03,0x55,0x1D,0x0F,0x01,0x01,0xFF,
+0x04,0x04,0x03,0x02,0x01,0x06,0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,
+0x01,0x01,0x0C,0x05,0x00,0x03,0x82,0x02,0x01,0x00,0xB3,0x57,0x4D,0x10,0x62,0x4E,
+0x3A,0xE4,0xAC,0xEA,0xB8,0x1C,0xAF,0x32,0x23,0xC8,0xB3,0x49,0x5A,0x51,0x9C,0x76,
+0x28,0x8D,0x79,0xAA,0x57,0x46,0x17,0xD5,0xF5,0x52,0xF6,0xB7,0x44,0xE8,0x08,0x44,
+0xBF,0x18,0x84,0xD2,0x0B,0x80,0xCD,0xC5,0x12,0xFD,0x00,0x55,0x05,0x61,0x87,0x41,
+0xDC,0xB5,0x24,0x9E,0x3C,0xC4,0xD8,0xC8,0xFB,0x70,0x9E,0x2F,0x78,0x96,0x83,0x20,
+0x36,0xDE,0x7C,0x0F,0x69,0x13,0x88,0xA5,0x75,0x36,0x98,0x08,0xA6,0xC6,0xDF,0xAC,
+0xCE,0xE3,0x58,0xD6,0xB7,0x3E,0xDE,0xBA,0xF3,0xEB,0x34,0x40,0xD8,0xA2,0x81,0xF5,
+0x78,0x3F,0x2F,0xD5,0xA5,0xFC,0xD9,0xA2,0xD4,0x5E,0x04,0x0E,0x17,0xAD,0xFE,0x41,
+0xF0,0xE5,0xB2,0x72,0xFA,0x44,0x82,0x33,0x42,0xE8,0x2D,0x58,0xF7,0x56,0x8C,0x62,
+0x3F,0xBA,0x42,0xB0,0x9C,0x0C,0x5C,0x7E,0x2E,0x65,0x26,0x5C,0x53,0x4F,0x00,0xB2,
+0x78,0x7E,0xA1,0x0D,0x99,0x2D,0x8D,0xB8,0x1D,0x8E,0xA2,0xC4,0xB0,0xFD,0x60,0xD0,
+0x30,0xA4,0x8E,0xC8,0x04,0x62,0xA9,0xC4,0xED,0x35,0xDE,0x7A,0x97,0xED,0x0E,0x38,
+0x5E,0x92,0x2F,0x93,0x70,0xA5,0xA9,0x9C,0x6F,0xA7,0x7D,0x13,0x1D,0x7E,0xC6,0x08,
+0x48,0xB1,0x5E,0x67,0xEB,0x51,0x08,0x25,0xE9,0xE6,0x25,0x6B,0x52,0x29,0x91,0x9C,
+0xD2,0x39,0x73,0x08,0x57,0xDE,0x99,0x06,0xB4,0x5B,0x9D,0x10,0x06,0xE1,0xC2,0x00,
+0xA8,0xB8,0x1C,0x4A,0x02,0x0A,0x14,0xD0,0xC1,0x41,0xCA,0xFB,0x8C,0x35,0x21,0x7D,
+0x82,0x38,0xF2,0xA9,0x54,0x91,0x19,0x35,0x93,0x94,0x6D,0x6A,0x3A,0xC5,0xB2,0xD0,
+0xBB,0x89,0x86,0x93,0xE8,0x9B,0xC9,0x0F,0x3A,0xA7,0x7A,0xB8,0xA1,0xF0,0x78,0x46,
+0xFA,0xFC,0x37,0x2F,0xE5,0x8A,0x84,0xF3,0xDF,0xFE,0x04,0xD9,0xA1,0x68,0xA0,0x2F,
+0x24,0xE2,0x09,0x95,0x06,0xD5,0x95,0xCA,0xE1,0x24,0x96,0xEB,0x7C,0xF6,0x93,0x05,
+0xBB,0xED,0x73,0xE9,0x2D,0xD1,0x75,0x39,0xD7,0xE7,0x24,0xDB,0xD8,0x4E,0x5F,0x43,
+0x8F,0x9E,0xD0,0x14,0x39,0xBF,0x55,0x70,0x48,0x99,0x57,0x31,0xB4,0x9C,0xEE,0x4A,
+0x98,0x03,0x96,0x30,0x1F,0x60,0x06,0xEE,0x1B,0x23,0xFE,0x81,0x60,0x23,0x1A,0x47,
+0x62,0x85,0xA5,0xCC,0x19,0x34,0x80,0x6F,0xB3,0xAC,0x1A,0xE3,0x9F,0xF0,0x7B,0x48,
+0xAD,0xD5,0x01,0xD9,0x67,0xB6,0xA9,0x72,0x93,0xEA,0x2D,0x66,0xB5,0xB2,0xB8,0xE4,
+0x3D,0x3C,0xB2,0xEF,0x4C,0x8C,0xEA,0xEB,0x07,0xBF,0xAB,0x35,0x9A,0x55,0x86,0xBC,
+0x18,0xA6,0xB5,0xA8,0x5E,0xB4,0x83,0x6C,0x6B,0x69,0x40,0xD3,0x9F,0xDC,0xF1,0xC3,
+0x69,0x6B,0xB9,0xE1,0x6D,0x09,0xF4,0xF1,0xAA,0x50,0x76,0x0A,0x7A,0x7D,0x7A,0x17,
+0xA1,0x55,0x96,0x42,0x99,0x31,0x09,0xDD,0x60,0x11,0x8D,0x05,0x30,0x7E,0xE6,0x8E,
+0x46,0xD1,0x9D,0x14,0xDA,0xC7,0x17,0xE4,0x05,0x96,0x8C,0xC4,0x24,0xB5,0x1B,0xCF,
+0x14,0x07,0xB2,0x40,0xF8,0xA3,0x9E,0x41,0x86,0xBC,0x04,0xD0,0x6B,0x96,0xC8,0x2A,
+0x80,0x34,0xFD,0xBF,0xEF,0x06,0xA3,0xDD,0x58,0xC5,0x85,0x3D,0x3E,0x8F,0xFE,0x9E,
+0x29,0xE0,0xB6,0xB8,0x09,0x68,0x19,0x1C,0x18,0x43,
};
@@ -2721,616 +3071,570 @@ const unsigned char Go_Daddy_Root_Certificate_Authority___G2_certificate[969]={
};
-/* subject:/C=US/O=GTE Corporation/OU=GTE CyberTrust Solutions, Inc./CN=GTE CyberTrust Global Root */
-/* issuer :/C=US/O=GTE Corporation/OU=GTE CyberTrust Solutions, Inc./CN=GTE CyberTrust Global Root */
-
-
-const unsigned char GTE_CyberTrust_Global_Root_certificate[606]={
-0x30,0x82,0x02,0x5A,0x30,0x82,0x01,0xC3,0x02,0x02,0x01,0xA5,0x30,0x0D,0x06,0x09,
-0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x04,0x05,0x00,0x30,0x75,0x31,0x0B,0x30,
-0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x55,0x53,0x31,0x18,0x30,0x16,0x06,0x03,
-0x55,0x04,0x0A,0x13,0x0F,0x47,0x54,0x45,0x20,0x43,0x6F,0x72,0x70,0x6F,0x72,0x61,
-0x74,0x69,0x6F,0x6E,0x31,0x27,0x30,0x25,0x06,0x03,0x55,0x04,0x0B,0x13,0x1E,0x47,
-0x54,0x45,0x20,0x43,0x79,0x62,0x65,0x72,0x54,0x72,0x75,0x73,0x74,0x20,0x53,0x6F,
-0x6C,0x75,0x74,0x69,0x6F,0x6E,0x73,0x2C,0x20,0x49,0x6E,0x63,0x2E,0x31,0x23,0x30,
-0x21,0x06,0x03,0x55,0x04,0x03,0x13,0x1A,0x47,0x54,0x45,0x20,0x43,0x79,0x62,0x65,
-0x72,0x54,0x72,0x75,0x73,0x74,0x20,0x47,0x6C,0x6F,0x62,0x61,0x6C,0x20,0x52,0x6F,
-0x6F,0x74,0x30,0x1E,0x17,0x0D,0x39,0x38,0x30,0x38,0x31,0x33,0x30,0x30,0x32,0x39,
-0x30,0x30,0x5A,0x17,0x0D,0x31,0x38,0x30,0x38,0x31,0x33,0x32,0x33,0x35,0x39,0x30,
-0x30,0x5A,0x30,0x75,0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x55,
-0x53,0x31,0x18,0x30,0x16,0x06,0x03,0x55,0x04,0x0A,0x13,0x0F,0x47,0x54,0x45,0x20,
-0x43,0x6F,0x72,0x70,0x6F,0x72,0x61,0x74,0x69,0x6F,0x6E,0x31,0x27,0x30,0x25,0x06,
-0x03,0x55,0x04,0x0B,0x13,0x1E,0x47,0x54,0x45,0x20,0x43,0x79,0x62,0x65,0x72,0x54,
-0x72,0x75,0x73,0x74,0x20,0x53,0x6F,0x6C,0x75,0x74,0x69,0x6F,0x6E,0x73,0x2C,0x20,
-0x49,0x6E,0x63,0x2E,0x31,0x23,0x30,0x21,0x06,0x03,0x55,0x04,0x03,0x13,0x1A,0x47,
-0x54,0x45,0x20,0x43,0x79,0x62,0x65,0x72,0x54,0x72,0x75,0x73,0x74,0x20,0x47,0x6C,
-0x6F,0x62,0x61,0x6C,0x20,0x52,0x6F,0x6F,0x74,0x30,0x81,0x9F,0x30,0x0D,0x06,0x09,
-0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x01,0x05,0x00,0x03,0x81,0x8D,0x00,0x30,
-0x81,0x89,0x02,0x81,0x81,0x00,0x95,0x0F,0xA0,0xB6,0xF0,0x50,0x9C,0xE8,0x7A,0xC7,
-0x88,0xCD,0xDD,0x17,0x0E,0x2E,0xB0,0x94,0xD0,0x1B,0x3D,0x0E,0xF6,0x94,0xC0,0x8A,
-0x94,0xC7,0x06,0xC8,0x90,0x97,0xC8,0xB8,0x64,0x1A,0x7A,0x7E,0x6C,0x3C,0x53,0xE1,
-0x37,0x28,0x73,0x60,0x7F,0xB2,0x97,0x53,0x07,0x9F,0x53,0xF9,0x6D,0x58,0x94,0xD2,
-0xAF,0x8D,0x6D,0x88,0x67,0x80,0xE6,0xED,0xB2,0x95,0xCF,0x72,0x31,0xCA,0xA5,0x1C,
-0x72,0xBA,0x5C,0x02,0xE7,0x64,0x42,0xE7,0xF9,0xA9,0x2C,0xD6,0x3A,0x0D,0xAC,0x8D,
-0x42,0xAA,0x24,0x01,0x39,0xE6,0x9C,0x3F,0x01,0x85,0x57,0x0D,0x58,0x87,0x45,0xF8,
-0xD3,0x85,0xAA,0x93,0x69,0x26,0x85,0x70,0x48,0x80,0x3F,0x12,0x15,0xC7,0x79,0xB4,
-0x1F,0x05,0x2F,0x3B,0x62,0x99,0x02,0x03,0x01,0x00,0x01,0x30,0x0D,0x06,0x09,0x2A,
-0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x04,0x05,0x00,0x03,0x81,0x81,0x00,0x6D,0xEB,
-0x1B,0x09,0xE9,0x5E,0xD9,0x51,0xDB,0x67,0x22,0x61,0xA4,0x2A,0x3C,0x48,0x77,0xE3,
-0xA0,0x7C,0xA6,0xDE,0x73,0xA2,0x14,0x03,0x85,0x3D,0xFB,0xAB,0x0E,0x30,0xC5,0x83,
-0x16,0x33,0x81,0x13,0x08,0x9E,0x7B,0x34,0x4E,0xDF,0x40,0xC8,0x74,0xD7,0xB9,0x7D,
-0xDC,0xF4,0x76,0x55,0x7D,0x9B,0x63,0x54,0x18,0xE9,0xF0,0xEA,0xF3,0x5C,0xB1,0xD9,
-0x8B,0x42,0x1E,0xB9,0xC0,0x95,0x4E,0xBA,0xFA,0xD5,0xE2,0x7C,0xF5,0x68,0x61,0xBF,
-0x8E,0xEC,0x05,0x97,0x5F,0x5B,0xB0,0xD7,0xA3,0x85,0x34,0xC4,0x24,0xA7,0x0D,0x0F,
-0x95,0x93,0xEF,0xCB,0x94,0xD8,0x9E,0x1F,0x9D,0x5C,0x85,0x6D,0xC7,0xAA,0xAE,0x4F,
-0x1F,0x22,0xB5,0xCD,0x95,0xAD,0xBA,0xA7,0xCC,0xF9,0xAB,0x0B,0x7A,0x7F,
+/* subject:/C=GB/ST=Greater Manchester/L=Salford/O=Comodo CA Limited/CN=Secure Certificate Services */
+/* issuer :/C=GB/ST=Greater Manchester/L=Salford/O=Comodo CA Limited/CN=Secure Certificate Services */
+
+
+const unsigned char Comodo_Secure_Services_root_certificate[1091]={
+0x30,0x82,0x04,0x3F,0x30,0x82,0x03,0x27,0xA0,0x03,0x02,0x01,0x02,0x02,0x01,0x01,
+0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x05,0x05,0x00,0x30,
+0x7E,0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x47,0x42,0x31,0x1B,
+0x30,0x19,0x06,0x03,0x55,0x04,0x08,0x0C,0x12,0x47,0x72,0x65,0x61,0x74,0x65,0x72,
+0x20,0x4D,0x61,0x6E,0x63,0x68,0x65,0x73,0x74,0x65,0x72,0x31,0x10,0x30,0x0E,0x06,
+0x03,0x55,0x04,0x07,0x0C,0x07,0x53,0x61,0x6C,0x66,0x6F,0x72,0x64,0x31,0x1A,0x30,
+0x18,0x06,0x03,0x55,0x04,0x0A,0x0C,0x11,0x43,0x6F,0x6D,0x6F,0x64,0x6F,0x20,0x43,
+0x41,0x20,0x4C,0x69,0x6D,0x69,0x74,0x65,0x64,0x31,0x24,0x30,0x22,0x06,0x03,0x55,
+0x04,0x03,0x0C,0x1B,0x53,0x65,0x63,0x75,0x72,0x65,0x20,0x43,0x65,0x72,0x74,0x69,
+0x66,0x69,0x63,0x61,0x74,0x65,0x20,0x53,0x65,0x72,0x76,0x69,0x63,0x65,0x73,0x30,
+0x1E,0x17,0x0D,0x30,0x34,0x30,0x31,0x30,0x31,0x30,0x30,0x30,0x30,0x30,0x30,0x5A,
+0x17,0x0D,0x32,0x38,0x31,0x32,0x33,0x31,0x32,0x33,0x35,0x39,0x35,0x39,0x5A,0x30,
+0x7E,0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x47,0x42,0x31,0x1B,
+0x30,0x19,0x06,0x03,0x55,0x04,0x08,0x0C,0x12,0x47,0x72,0x65,0x61,0x74,0x65,0x72,
+0x20,0x4D,0x61,0x6E,0x63,0x68,0x65,0x73,0x74,0x65,0x72,0x31,0x10,0x30,0x0E,0x06,
+0x03,0x55,0x04,0x07,0x0C,0x07,0x53,0x61,0x6C,0x66,0x6F,0x72,0x64,0x31,0x1A,0x30,
+0x18,0x06,0x03,0x55,0x04,0x0A,0x0C,0x11,0x43,0x6F,0x6D,0x6F,0x64,0x6F,0x20,0x43,
+0x41,0x20,0x4C,0x69,0x6D,0x69,0x74,0x65,0x64,0x31,0x24,0x30,0x22,0x06,0x03,0x55,
+0x04,0x03,0x0C,0x1B,0x53,0x65,0x63,0x75,0x72,0x65,0x20,0x43,0x65,0x72,0x74,0x69,
+0x66,0x69,0x63,0x61,0x74,0x65,0x20,0x53,0x65,0x72,0x76,0x69,0x63,0x65,0x73,0x30,
+0x82,0x01,0x22,0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x01,
+0x05,0x00,0x03,0x82,0x01,0x0F,0x00,0x30,0x82,0x01,0x0A,0x02,0x82,0x01,0x01,0x00,
+0xC0,0x71,0x33,0x82,0x8A,0xD0,0x70,0xEB,0x73,0x87,0x82,0x40,0xD5,0x1D,0xE4,0xCB,
+0xC9,0x0E,0x42,0x90,0xF9,0xDE,0x34,0xB9,0xA1,0xBA,0x11,0xF4,0x25,0x85,0xF3,0xCC,
+0x72,0x6D,0xF2,0x7B,0x97,0x6B,0xB3,0x07,0xF1,0x77,0x24,0x91,0x5F,0x25,0x8F,0xF6,
+0x74,0x3D,0xE4,0x80,0xC2,0xF8,0x3C,0x0D,0xF3,0xBF,0x40,0xEA,0xF7,0xC8,0x52,0xD1,
+0x72,0x6F,0xEF,0xC8,0xAB,0x41,0xB8,0x6E,0x2E,0x17,0x2A,0x95,0x69,0x0C,0xCD,0xD2,
+0x1E,0x94,0x7B,0x2D,0x94,0x1D,0xAA,0x75,0xD7,0xB3,0x98,0xCB,0xAC,0xBC,0x64,0x53,
+0x40,0xBC,0x8F,0xAC,0xAC,0x36,0xCB,0x5C,0xAD,0xBB,0xDD,0xE0,0x94,0x17,0xEC,0xD1,
+0x5C,0xD0,0xBF,0xEF,0xA5,0x95,0xC9,0x90,0xC5,0xB0,0xAC,0xFB,0x1B,0x43,0xDF,0x7A,
+0x08,0x5D,0xB7,0xB8,0xF2,0x40,0x1B,0x2B,0x27,0x9E,0x50,0xCE,0x5E,0x65,0x82,0x88,
+0x8C,0x5E,0xD3,0x4E,0x0C,0x7A,0xEA,0x08,0x91,0xB6,0x36,0xAA,0x2B,0x42,0xFB,0xEA,
+0xC2,0xA3,0x39,0xE5,0xDB,0x26,0x38,0xAD,0x8B,0x0A,0xEE,0x19,0x63,0xC7,0x1C,0x24,
+0xDF,0x03,0x78,0xDA,0xE6,0xEA,0xC1,0x47,0x1A,0x0B,0x0B,0x46,0x09,0xDD,0x02,0xFC,
+0xDE,0xCB,0x87,0x5F,0xD7,0x30,0x63,0x68,0xA1,0xAE,0xDC,0x32,0xA1,0xBA,0xBE,0xFE,
+0x44,0xAB,0x68,0xB6,0xA5,0x17,0x15,0xFD,0xBD,0xD5,0xA7,0xA7,0x9A,0xE4,0x44,0x33,
+0xE9,0x88,0x8E,0xFC,0xED,0x51,0xEB,0x93,0x71,0x4E,0xAD,0x01,0xE7,0x44,0x8E,0xAB,
+0x2D,0xCB,0xA8,0xFE,0x01,0x49,0x48,0xF0,0xC0,0xDD,0xC7,0x68,0xD8,0x92,0xFE,0x3D,
+0x02,0x03,0x01,0x00,0x01,0xA3,0x81,0xC7,0x30,0x81,0xC4,0x30,0x1D,0x06,0x03,0x55,
+0x1D,0x0E,0x04,0x16,0x04,0x14,0x3C,0xD8,0x93,0x88,0xC2,0xC0,0x82,0x09,0xCC,0x01,
+0x99,0x06,0x93,0x20,0xE9,0x9E,0x70,0x09,0x63,0x4F,0x30,0x0E,0x06,0x03,0x55,0x1D,
+0x0F,0x01,0x01,0xFF,0x04,0x04,0x03,0x02,0x01,0x06,0x30,0x0F,0x06,0x03,0x55,0x1D,
+0x13,0x01,0x01,0xFF,0x04,0x05,0x30,0x03,0x01,0x01,0xFF,0x30,0x81,0x81,0x06,0x03,
+0x55,0x1D,0x1F,0x04,0x7A,0x30,0x78,0x30,0x3B,0xA0,0x39,0xA0,0x37,0x86,0x35,0x68,
+0x74,0x74,0x70,0x3A,0x2F,0x2F,0x63,0x72,0x6C,0x2E,0x63,0x6F,0x6D,0x6F,0x64,0x6F,
+0x63,0x61,0x2E,0x63,0x6F,0x6D,0x2F,0x53,0x65,0x63,0x75,0x72,0x65,0x43,0x65,0x72,
+0x74,0x69,0x66,0x69,0x63,0x61,0x74,0x65,0x53,0x65,0x72,0x76,0x69,0x63,0x65,0x73,
+0x2E,0x63,0x72,0x6C,0x30,0x39,0xA0,0x37,0xA0,0x35,0x86,0x33,0x68,0x74,0x74,0x70,
+0x3A,0x2F,0x2F,0x63,0x72,0x6C,0x2E,0x63,0x6F,0x6D,0x6F,0x64,0x6F,0x2E,0x6E,0x65,
+0x74,0x2F,0x53,0x65,0x63,0x75,0x72,0x65,0x43,0x65,0x72,0x74,0x69,0x66,0x69,0x63,
+0x61,0x74,0x65,0x53,0x65,0x72,0x76,0x69,0x63,0x65,0x73,0x2E,0x63,0x72,0x6C,0x30,
+0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x05,0x05,0x00,0x03,0x82,
+0x01,0x01,0x00,0x87,0x01,0x6D,0x23,0x1D,0x7E,0x5B,0x17,0x7D,0xC1,0x61,0x32,0xCF,
+0x8F,0xE7,0xF3,0x8A,0x94,0x59,0x66,0xE0,0x9E,0x28,0xA8,0x5E,0xD3,0xB7,0xF4,0x34,
+0xE6,0xAA,0x39,0xB2,0x97,0x16,0xC5,0x82,0x6F,0x32,0xA4,0xE9,0x8C,0xE7,0xAF,0xFD,
+0xEF,0xC2,0xE8,0xB9,0x4B,0xAA,0xA3,0xF4,0xE6,0xDA,0x8D,0x65,0x21,0xFB,0xBA,0x80,
+0xEB,0x26,0x28,0x85,0x1A,0xFE,0x39,0x8C,0xDE,0x5B,0x04,0x04,0xB4,0x54,0xF9,0xA3,
+0x67,0x9E,0x41,0xFA,0x09,0x52,0xCC,0x05,0x48,0xA8,0xC9,0x3F,0x21,0x04,0x1E,0xCE,
+0x48,0x6B,0xFC,0x85,0xE8,0xC2,0x7B,0xAF,0x7F,0xB7,0xCC,0xF8,0x5F,0x3A,0xFD,0x35,
+0xC6,0x0D,0xEF,0x97,0xDC,0x4C,0xAB,0x11,0xE1,0x6B,0xCB,0x31,0xD1,0x6C,0xFB,0x48,
+0x80,0xAB,0xDC,0x9C,0x37,0xB8,0x21,0x14,0x4B,0x0D,0x71,0x3D,0xEC,0x83,0x33,0x6E,
+0xD1,0x6E,0x32,0x16,0xEC,0x98,0xC7,0x16,0x8B,0x59,0xA6,0x34,0xAB,0x05,0x57,0x2D,
+0x93,0xF7,0xAA,0x13,0xCB,0xD2,0x13,0xE2,0xB7,0x2E,0x3B,0xCD,0x6B,0x50,0x17,0x09,
+0x68,0x3E,0xB5,0x26,0x57,0xEE,0xB6,0xE0,0xB6,0xDD,0xB9,0x29,0x80,0x79,0x7D,0x8F,
+0xA3,0xF0,0xA4,0x28,0xA4,0x15,0xC4,0x85,0xF4,0x27,0xD4,0x6B,0xBF,0xE5,0x5C,0xE4,
+0x65,0x02,0x76,0x54,0xB4,0xE3,0x37,0x66,0x24,0xD3,0x19,0x61,0xC8,0x52,0x10,0xE5,
+0x8B,0x37,0x9A,0xB9,0xA9,0xF9,0x1D,0xBF,0xEA,0x99,0x92,0x61,0x96,0xFF,0x01,0xCD,
+0xA1,0x5F,0x0D,0xBC,0x71,0xBC,0x0E,0xAC,0x0B,0x1D,0x47,0x45,0x1D,0xC1,0xEC,0x7C,
+0xEC,0xFD,0x29,
};
-/* subject:/C=US/O=Network Solutions L.L.C./CN=Network Solutions Certificate Authority */
-/* issuer :/C=US/O=Network Solutions L.L.C./CN=Network Solutions Certificate Authority */
+/* subject:/C=US/O=DigiCert Inc/OU=www.digicert.com/CN=DigiCert Trusted Root G4 */
+/* issuer :/C=US/O=DigiCert Inc/OU=www.digicert.com/CN=DigiCert Trusted Root G4 */
-const unsigned char Network_Solutions_Certificate_Authority_certificate[1002]={
-0x30,0x82,0x03,0xE6,0x30,0x82,0x02,0xCE,0xA0,0x03,0x02,0x01,0x02,0x02,0x10,0x57,
-0xCB,0x33,0x6F,0xC2,0x5C,0x16,0xE6,0x47,0x16,0x17,0xE3,0x90,0x31,0x68,0xE0,0x30,
-0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x05,0x05,0x00,0x30,0x62,
-0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x55,0x53,0x31,0x21,0x30,
-0x1F,0x06,0x03,0x55,0x04,0x0A,0x13,0x18,0x4E,0x65,0x74,0x77,0x6F,0x72,0x6B,0x20,
-0x53,0x6F,0x6C,0x75,0x74,0x69,0x6F,0x6E,0x73,0x20,0x4C,0x2E,0x4C,0x2E,0x43,0x2E,
-0x31,0x30,0x30,0x2E,0x06,0x03,0x55,0x04,0x03,0x13,0x27,0x4E,0x65,0x74,0x77,0x6F,
-0x72,0x6B,0x20,0x53,0x6F,0x6C,0x75,0x74,0x69,0x6F,0x6E,0x73,0x20,0x43,0x65,0x72,
-0x74,0x69,0x66,0x69,0x63,0x61,0x74,0x65,0x20,0x41,0x75,0x74,0x68,0x6F,0x72,0x69,
-0x74,0x79,0x30,0x1E,0x17,0x0D,0x30,0x36,0x31,0x32,0x30,0x31,0x30,0x30,0x30,0x30,
-0x30,0x30,0x5A,0x17,0x0D,0x32,0x39,0x31,0x32,0x33,0x31,0x32,0x33,0x35,0x39,0x35,
-0x39,0x5A,0x30,0x62,0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x55,
-0x53,0x31,0x21,0x30,0x1F,0x06,0x03,0x55,0x04,0x0A,0x13,0x18,0x4E,0x65,0x74,0x77,
-0x6F,0x72,0x6B,0x20,0x53,0x6F,0x6C,0x75,0x74,0x69,0x6F,0x6E,0x73,0x20,0x4C,0x2E,
-0x4C,0x2E,0x43,0x2E,0x31,0x30,0x30,0x2E,0x06,0x03,0x55,0x04,0x03,0x13,0x27,0x4E,
-0x65,0x74,0x77,0x6F,0x72,0x6B,0x20,0x53,0x6F,0x6C,0x75,0x74,0x69,0x6F,0x6E,0x73,
-0x20,0x43,0x65,0x72,0x74,0x69,0x66,0x69,0x63,0x61,0x74,0x65,0x20,0x41,0x75,0x74,
-0x68,0x6F,0x72,0x69,0x74,0x79,0x30,0x82,0x01,0x22,0x30,0x0D,0x06,0x09,0x2A,0x86,
-0x48,0x86,0xF7,0x0D,0x01,0x01,0x01,0x05,0x00,0x03,0x82,0x01,0x0F,0x00,0x30,0x82,
-0x01,0x0A,0x02,0x82,0x01,0x01,0x00,0xE4,0xBC,0x7E,0x92,0x30,0x6D,0xC6,0xD8,0x8E,
-0x2B,0x0B,0xBC,0x46,0xCE,0xE0,0x27,0x96,0xDE,0xDE,0xF9,0xFA,0x12,0xD3,0x3C,0x33,
-0x73,0xB3,0x04,0x2F,0xBC,0x71,0x8C,0xE5,0x9F,0xB6,0x22,0x60,0x3E,0x5F,0x5D,0xCE,
-0x09,0xFF,0x82,0x0C,0x1B,0x9A,0x51,0x50,0x1A,0x26,0x89,0xDD,0xD5,0x61,0x5D,0x19,
-0xDC,0x12,0x0F,0x2D,0x0A,0xA2,0x43,0x5D,0x17,0xD0,0x34,0x92,0x20,0xEA,0x73,0xCF,
-0x38,0x2C,0x06,0x26,0x09,0x7A,0x72,0xF7,0xFA,0x50,0x32,0xF8,0xC2,0x93,0xD3,0x69,
-0xA2,0x23,0xCE,0x41,0xB1,0xCC,0xE4,0xD5,0x1F,0x36,0xD1,0x8A,0x3A,0xF8,0x8C,0x63,
-0xE2,0x14,0x59,0x69,0xED,0x0D,0xD3,0x7F,0x6B,0xE8,0xB8,0x03,0xE5,0x4F,0x6A,0xE5,
-0x98,0x63,0x69,0x48,0x05,0xBE,0x2E,0xFF,0x33,0xB6,0xE9,0x97,0x59,0x69,0xF8,0x67,
-0x19,0xAE,0x93,0x61,0x96,0x44,0x15,0xD3,0x72,0xB0,0x3F,0xBC,0x6A,0x7D,0xEC,0x48,
-0x7F,0x8D,0xC3,0xAB,0xAA,0x71,0x2B,0x53,0x69,0x41,0x53,0x34,0xB5,0xB0,0xB9,0xC5,
-0x06,0x0A,0xC4,0xB0,0x45,0xF5,0x41,0x5D,0x6E,0x89,0x45,0x7B,0x3D,0x3B,0x26,0x8C,
-0x74,0xC2,0xE5,0xD2,0xD1,0x7D,0xB2,0x11,0xD4,0xFB,0x58,0x32,0x22,0x9A,0x80,0xC9,
-0xDC,0xFD,0x0C,0xE9,0x7F,0x5E,0x03,0x97,0xCE,0x3B,0x00,0x14,0x87,0x27,0x70,0x38,
-0xA9,0x8E,0x6E,0xB3,0x27,0x76,0x98,0x51,0xE0,0x05,0xE3,0x21,0xAB,0x1A,0xD5,0x85,
-0x22,0x3C,0x29,0xB5,0x9A,0x16,0xC5,0x80,0xA8,0xF4,0xBB,0x6B,0x30,0x8F,0x2F,0x46,
-0x02,0xA2,0xB1,0x0C,0x22,0xE0,0xD3,0x02,0x03,0x01,0x00,0x01,0xA3,0x81,0x97,0x30,
-0x81,0x94,0x30,0x1D,0x06,0x03,0x55,0x1D,0x0E,0x04,0x16,0x04,0x14,0x21,0x30,0xC9,
-0xFB,0x00,0xD7,0x4E,0x98,0xDA,0x87,0xAA,0x2A,0xD0,0xA7,0x2E,0xB1,0x40,0x31,0xA7,
-0x4C,0x30,0x0E,0x06,0x03,0x55,0x1D,0x0F,0x01,0x01,0xFF,0x04,0x04,0x03,0x02,0x01,
-0x06,0x30,0x0F,0x06,0x03,0x55,0x1D,0x13,0x01,0x01,0xFF,0x04,0x05,0x30,0x03,0x01,
-0x01,0xFF,0x30,0x52,0x06,0x03,0x55,0x1D,0x1F,0x04,0x4B,0x30,0x49,0x30,0x47,0xA0,
-0x45,0xA0,0x43,0x86,0x41,0x68,0x74,0x74,0x70,0x3A,0x2F,0x2F,0x63,0x72,0x6C,0x2E,
-0x6E,0x65,0x74,0x73,0x6F,0x6C,0x73,0x73,0x6C,0x2E,0x63,0x6F,0x6D,0x2F,0x4E,0x65,
-0x74,0x77,0x6F,0x72,0x6B,0x53,0x6F,0x6C,0x75,0x74,0x69,0x6F,0x6E,0x73,0x43,0x65,
-0x72,0x74,0x69,0x66,0x69,0x63,0x61,0x74,0x65,0x41,0x75,0x74,0x68,0x6F,0x72,0x69,
-0x74,0x79,0x2E,0x63,0x72,0x6C,0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,
-0x01,0x01,0x05,0x05,0x00,0x03,0x82,0x01,0x01,0x00,0xBB,0xAE,0x4B,0xE7,0xB7,0x57,
-0xEB,0x7F,0xAA,0x2D,0xB7,0x73,0x47,0x85,0x6A,0xC1,0xE4,0xA5,0x1D,0xE4,0xE7,0x3C,
-0xE9,0xF4,0x59,0x65,0x77,0xB5,0x7A,0x5B,0x5A,0x8D,0x25,0x36,0xE0,0x7A,0x97,0x2E,
-0x38,0xC0,0x57,0x60,0x83,0x98,0x06,0x83,0x9F,0xB9,0x76,0x7A,0x6E,0x50,0xE0,0xBA,
-0x88,0x2C,0xFC,0x45,0xCC,0x18,0xB0,0x99,0x95,0x51,0x0E,0xEC,0x1D,0xB8,0x88,0xFF,
-0x87,0x50,0x1C,0x82,0xC2,0xE3,0xE0,0x32,0x80,0xBF,0xA0,0x0B,0x47,0xC8,0xC3,0x31,
-0xEF,0x99,0x67,0x32,0x80,0x4F,0x17,0x21,0x79,0x0C,0x69,0x5C,0xDE,0x5E,0x34,0xAE,
-0x02,0xB5,0x26,0xEA,0x50,0xDF,0x7F,0x18,0x65,0x2C,0xC9,0xF2,0x63,0xE1,0xA9,0x07,
-0xFE,0x7C,0x71,0x1F,0x6B,0x33,0x24,0x6A,0x1E,0x05,0xF7,0x05,0x68,0xC0,0x6A,0x12,
-0xCB,0x2E,0x5E,0x61,0xCB,0xAE,0x28,0xD3,0x7E,0xC2,0xB4,0x66,0x91,0x26,0x5F,0x3C,
-0x2E,0x24,0x5F,0xCB,0x58,0x0F,0xEB,0x28,0xEC,0xAF,0x11,0x96,0xF3,0xDC,0x7B,0x6F,
-0xC0,0xA7,0x88,0xF2,0x53,0x77,0xB3,0x60,0x5E,0xAE,0xAE,0x28,0xDA,0x35,0x2C,0x6F,
-0x34,0x45,0xD3,0x26,0xE1,0xDE,0xEC,0x5B,0x4F,0x27,0x6B,0x16,0x7C,0xBD,0x44,0x04,
-0x18,0x82,0xB3,0x89,0x79,0x17,0x10,0x71,0x3D,0x7A,0xA2,0x16,0x4E,0xF5,0x01,0xCD,
-0xA4,0x6C,0x65,0x68,0xA1,0x49,0x76,0x5C,0x43,0xC9,0xD8,0xBC,0x36,0x67,0x6C,0xA5,
-0x94,0xB5,0xD4,0xCC,0xB9,0xBD,0x6A,0x35,0x56,0x21,0xDE,0xD8,0xC3,0xEB,0xFB,0xCB,
-0xA4,0x60,0x4C,0xB0,0x55,0xA0,0xA0,0x7B,0x57,0xB2,
+const unsigned char DigiCert_Trusted_Root_G4_certificate[1428]={
+0x30,0x82,0x05,0x90,0x30,0x82,0x03,0x78,0xA0,0x03,0x02,0x01,0x02,0x02,0x10,0x05,
+0x9B,0x1B,0x57,0x9E,0x8E,0x21,0x32,0xE2,0x39,0x07,0xBD,0xA7,0x77,0x75,0x5C,0x30,
+0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x0C,0x05,0x00,0x30,0x62,
+0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x55,0x53,0x31,0x15,0x30,
+0x13,0x06,0x03,0x55,0x04,0x0A,0x13,0x0C,0x44,0x69,0x67,0x69,0x43,0x65,0x72,0x74,
+0x20,0x49,0x6E,0x63,0x31,0x19,0x30,0x17,0x06,0x03,0x55,0x04,0x0B,0x13,0x10,0x77,
+0x77,0x77,0x2E,0x64,0x69,0x67,0x69,0x63,0x65,0x72,0x74,0x2E,0x63,0x6F,0x6D,0x31,
+0x21,0x30,0x1F,0x06,0x03,0x55,0x04,0x03,0x13,0x18,0x44,0x69,0x67,0x69,0x43,0x65,
+0x72,0x74,0x20,0x54,0x72,0x75,0x73,0x74,0x65,0x64,0x20,0x52,0x6F,0x6F,0x74,0x20,
+0x47,0x34,0x30,0x1E,0x17,0x0D,0x31,0x33,0x30,0x38,0x30,0x31,0x31,0x32,0x30,0x30,
+0x30,0x30,0x5A,0x17,0x0D,0x33,0x38,0x30,0x31,0x31,0x35,0x31,0x32,0x30,0x30,0x30,
+0x30,0x5A,0x30,0x62,0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x55,
+0x53,0x31,0x15,0x30,0x13,0x06,0x03,0x55,0x04,0x0A,0x13,0x0C,0x44,0x69,0x67,0x69,
+0x43,0x65,0x72,0x74,0x20,0x49,0x6E,0x63,0x31,0x19,0x30,0x17,0x06,0x03,0x55,0x04,
+0x0B,0x13,0x10,0x77,0x77,0x77,0x2E,0x64,0x69,0x67,0x69,0x63,0x65,0x72,0x74,0x2E,
+0x63,0x6F,0x6D,0x31,0x21,0x30,0x1F,0x06,0x03,0x55,0x04,0x03,0x13,0x18,0x44,0x69,
+0x67,0x69,0x43,0x65,0x72,0x74,0x20,0x54,0x72,0x75,0x73,0x74,0x65,0x64,0x20,0x52,
+0x6F,0x6F,0x74,0x20,0x47,0x34,0x30,0x82,0x02,0x22,0x30,0x0D,0x06,0x09,0x2A,0x86,
+0x48,0x86,0xF7,0x0D,0x01,0x01,0x01,0x05,0x00,0x03,0x82,0x02,0x0F,0x00,0x30,0x82,
+0x02,0x0A,0x02,0x82,0x02,0x01,0x00,0xBF,0xE6,0x90,0x73,0x68,0xDE,0xBB,0xE4,0x5D,
+0x4A,0x3C,0x30,0x22,0x30,0x69,0x33,0xEC,0xC2,0xA7,0x25,0x2E,0xC9,0x21,0x3D,0xF2,
+0x8A,0xD8,0x59,0xC2,0xE1,0x29,0xA7,0x3D,0x58,0xAB,0x76,0x9A,0xCD,0xAE,0x7B,0x1B,
+0x84,0x0D,0xC4,0x30,0x1F,0xF3,0x1B,0xA4,0x38,0x16,0xEB,0x56,0xC6,0x97,0x6D,0x1D,
+0xAB,0xB2,0x79,0xF2,0xCA,0x11,0xD2,0xE4,0x5F,0xD6,0x05,0x3C,0x52,0x0F,0x52,0x1F,
+0xC6,0x9E,0x15,0xA5,0x7E,0xBE,0x9F,0xA9,0x57,0x16,0x59,0x55,0x72,0xAF,0x68,0x93,
+0x70,0xC2,0xB2,0xBA,0x75,0x99,0x6A,0x73,0x32,0x94,0xD1,0x10,0x44,0x10,0x2E,0xDF,
+0x82,0xF3,0x07,0x84,0xE6,0x74,0x3B,0x6D,0x71,0xE2,0x2D,0x0C,0x1B,0xEE,0x20,0xD5,
+0xC9,0x20,0x1D,0x63,0x29,0x2D,0xCE,0xEC,0x5E,0x4E,0xC8,0x93,0xF8,0x21,0x61,0x9B,
+0x34,0xEB,0x05,0xC6,0x5E,0xEC,0x5B,0x1A,0xBC,0xEB,0xC9,0xCF,0xCD,0xAC,0x34,0x40,
+0x5F,0xB1,0x7A,0x66,0xEE,0x77,0xC8,0x48,0xA8,0x66,0x57,0x57,0x9F,0x54,0x58,0x8E,
+0x0C,0x2B,0xB7,0x4F,0xA7,0x30,0xD9,0x56,0xEE,0xCA,0x7B,0x5D,0xE3,0xAD,0xC9,0x4F,
+0x5E,0xE5,0x35,0xE7,0x31,0xCB,0xDA,0x93,0x5E,0xDC,0x8E,0x8F,0x80,0xDA,0xB6,0x91,
+0x98,0x40,0x90,0x79,0xC3,0x78,0xC7,0xB6,0xB1,0xC4,0xB5,0x6A,0x18,0x38,0x03,0x10,
+0x8D,0xD8,0xD4,0x37,0xA4,0x2E,0x05,0x7D,0x88,0xF5,0x82,0x3E,0x10,0x91,0x70,0xAB,
+0x55,0x82,0x41,0x32,0xD7,0xDB,0x04,0x73,0x2A,0x6E,0x91,0x01,0x7C,0x21,0x4C,0xD4,
+0xBC,0xAE,0x1B,0x03,0x75,0x5D,0x78,0x66,0xD9,0x3A,0x31,0x44,0x9A,0x33,0x40,0xBF,
+0x08,0xD7,0x5A,0x49,0xA4,0xC2,0xE6,0xA9,0xA0,0x67,0xDD,0xA4,0x27,0xBC,0xA1,0x4F,
+0x39,0xB5,0x11,0x58,0x17,0xF7,0x24,0x5C,0x46,0x8F,0x64,0xF7,0xC1,0x69,0x88,0x76,
+0x98,0x76,0x3D,0x59,0x5D,0x42,0x76,0x87,0x89,0x97,0x69,0x7A,0x48,0xF0,0xE0,0xA2,
+0x12,0x1B,0x66,0x9A,0x74,0xCA,0xDE,0x4B,0x1E,0xE7,0x0E,0x63,0xAE,0xE6,0xD4,0xEF,
+0x92,0x92,0x3A,0x9E,0x3D,0xDC,0x00,0xE4,0x45,0x25,0x89,0xB6,0x9A,0x44,0x19,0x2B,
+0x7E,0xC0,0x94,0xB4,0xD2,0x61,0x6D,0xEB,0x33,0xD9,0xC5,0xDF,0x4B,0x04,0x00,0xCC,
+0x7D,0x1C,0x95,0xC3,0x8F,0xF7,0x21,0xB2,0xB2,0x11,0xB7,0xBB,0x7F,0xF2,0xD5,0x8C,
+0x70,0x2C,0x41,0x60,0xAA,0xB1,0x63,0x18,0x44,0x95,0x1A,0x76,0x62,0x7E,0xF6,0x80,
+0xB0,0xFB,0xE8,0x64,0xA6,0x33,0xD1,0x89,0x07,0xE1,0xBD,0xB7,0xE6,0x43,0xA4,0x18,
+0xB8,0xA6,0x77,0x01,0xE1,0x0F,0x94,0x0C,0x21,0x1D,0xB2,0x54,0x29,0x25,0x89,0x6C,
+0xE5,0x0E,0x52,0x51,0x47,0x74,0xBE,0x26,0xAC,0xB6,0x41,0x75,0xDE,0x7A,0xAC,0x5F,
+0x8D,0x3F,0xC9,0xBC,0xD3,0x41,0x11,0x12,0x5B,0xE5,0x10,0x50,0xEB,0x31,0xC5,0xCA,
+0x72,0x16,0x22,0x09,0xDF,0x7C,0x4C,0x75,0x3F,0x63,0xEC,0x21,0x5F,0xC4,0x20,0x51,
+0x6B,0x6F,0xB1,0xAB,0x86,0x8B,0x4F,0xC2,0xD6,0x45,0x5F,0x9D,0x20,0xFC,0xA1,0x1E,
+0xC5,0xC0,0x8F,0xA2,0xB1,0x7E,0x0A,0x26,0x99,0xF5,0xE4,0x69,0x2F,0x98,0x1D,0x2D,
+0xF5,0xD9,0xA9,0xB2,0x1D,0xE5,0x1B,0x02,0x03,0x01,0x00,0x01,0xA3,0x42,0x30,0x40,
+0x30,0x0F,0x06,0x03,0x55,0x1D,0x13,0x01,0x01,0xFF,0x04,0x05,0x30,0x03,0x01,0x01,
+0xFF,0x30,0x0E,0x06,0x03,0x55,0x1D,0x0F,0x01,0x01,0xFF,0x04,0x04,0x03,0x02,0x01,
+0x86,0x30,0x1D,0x06,0x03,0x55,0x1D,0x0E,0x04,0x16,0x04,0x14,0xEC,0xD7,0xE3,0x82,
+0xD2,0x71,0x5D,0x64,0x4C,0xDF,0x2E,0x67,0x3F,0xE7,0xBA,0x98,0xAE,0x1C,0x0F,0x4F,
+0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x0C,0x05,0x00,0x03,
+0x82,0x02,0x01,0x00,0xBB,0x61,0xD9,0x7D,0xA9,0x6C,0xBE,0x17,0xC4,0x91,0x1B,0xC3,
+0xA1,0xA2,0x00,0x8D,0xE3,0x64,0x68,0x0F,0x56,0xCF,0x77,0xAE,0x70,0xF9,0xFD,0x9A,
+0x4A,0x99,0xB9,0xC9,0x78,0x5C,0x0C,0x0C,0x5F,0xE4,0xE6,0x14,0x29,0x56,0x0B,0x36,
+0x49,0x5D,0x44,0x63,0xE0,0xAD,0x9C,0x96,0x18,0x66,0x1B,0x23,0x0D,0x3D,0x79,0xE9,
+0x6D,0x6B,0xD6,0x54,0xF8,0xD2,0x3C,0xC1,0x43,0x40,0xAE,0x1D,0x50,0xF5,0x52,0xFC,
+0x90,0x3B,0xBB,0x98,0x99,0x69,0x6B,0xC7,0xC1,0xA7,0xA8,0x68,0xA4,0x27,0xDC,0x9D,
+0xF9,0x27,0xAE,0x30,0x85,0xB9,0xF6,0x67,0x4D,0x3A,0x3E,0x8F,0x59,0x39,0x22,0x53,
+0x44,0xEB,0xC8,0x5D,0x03,0xCA,0xED,0x50,0x7A,0x7D,0x62,0x21,0x0A,0x80,0xC8,0x73,
+0x66,0xD1,0xA0,0x05,0x60,0x5F,0xE8,0xA5,0xB4,0xA7,0xAF,0xA8,0xF7,0x6D,0x35,0x9C,
+0x7C,0x5A,0x8A,0xD6,0xA2,0x38,0x99,0xF3,0x78,0x8B,0xF4,0x4D,0xD2,0x20,0x0B,0xDE,
+0x04,0xEE,0x8C,0x9B,0x47,0x81,0x72,0x0D,0xC0,0x14,0x32,0xEF,0x30,0x59,0x2E,0xAE,
+0xE0,0x71,0xF2,0x56,0xE4,0x6A,0x97,0x6F,0x92,0x50,0x6D,0x96,0x8D,0x68,0x7A,0x9A,
+0xB2,0x36,0x14,0x7A,0x06,0xF2,0x24,0xB9,0x09,0x11,0x50,0xD7,0x08,0xB1,0xB8,0x89,
+0x7A,0x84,0x23,0x61,0x42,0x29,0xE5,0xA3,0xCD,0xA2,0x20,0x41,0xD7,0xD1,0x9C,0x64,
+0xD9,0xEA,0x26,0xA1,0x8B,0x14,0xD7,0x4C,0x19,0xB2,0x50,0x41,0x71,0x3D,0x3F,0x4D,
+0x70,0x23,0x86,0x0C,0x4A,0xDC,0x81,0xD2,0xCC,0x32,0x94,0x84,0x0D,0x08,0x09,0x97,
+0x1C,0x4F,0xC0,0xEE,0x6B,0x20,0x74,0x30,0xD2,0xE0,0x39,0x34,0x10,0x85,0x21,0x15,
+0x01,0x08,0xE8,0x55,0x32,0xDE,0x71,0x49,0xD9,0x28,0x17,0x50,0x4D,0xE6,0xBE,0x4D,
+0xD1,0x75,0xAC,0xD0,0xCA,0xFB,0x41,0xB8,0x43,0xA5,0xAA,0xD3,0xC3,0x05,0x44,0x4F,
+0x2C,0x36,0x9B,0xE2,0xFA,0xE2,0x45,0xB8,0x23,0x53,0x6C,0x06,0x6F,0x67,0x55,0x7F,
+0x46,0xB5,0x4C,0x3F,0x6E,0x28,0x5A,0x79,0x26,0xD2,0xA4,0xA8,0x62,0x97,0xD2,0x1E,
+0xE2,0xED,0x4A,0x8B,0xBC,0x1B,0xFD,0x47,0x4A,0x0D,0xDF,0x67,0x66,0x7E,0xB2,0x5B,
+0x41,0xD0,0x3B,0xE4,0xF4,0x3B,0xF4,0x04,0x63,0xE9,0xEF,0xC2,0x54,0x00,0x51,0xA0,
+0x8A,0x2A,0xC9,0xCE,0x78,0xCC,0xD5,0xEA,0x87,0x04,0x18,0xB3,0xCE,0xAF,0x49,0x88,
+0xAF,0xF3,0x92,0x99,0xB6,0xB3,0xE6,0x61,0x0F,0xD2,0x85,0x00,0xE7,0x50,0x1A,0xE4,
+0x1B,0x95,0x9D,0x19,0xA1,0xB9,0x9C,0xB1,0x9B,0xB1,0x00,0x1E,0xEF,0xD0,0x0F,0x4F,
+0x42,0x6C,0xC9,0x0A,0xBC,0xEE,0x43,0xFA,0x3A,0x71,0xA5,0xC8,0x4D,0x26,0xA5,0x35,
+0xFD,0x89,0x5D,0xBC,0x85,0x62,0x1D,0x32,0xD2,0xA0,0x2B,0x54,0xED,0x9A,0x57,0xC1,
+0xDB,0xFA,0x10,0xCF,0x19,0xB7,0x8B,0x4A,0x1B,0x8F,0x01,0xB6,0x27,0x95,0x53,0xE8,
+0xB6,0x89,0x6D,0x5B,0xBC,0x68,0xD4,0x23,0xE8,0x8B,0x51,0xA2,0x56,0xF9,0xF0,0xA6,
+0x80,0xA0,0xD6,0x1E,0xB3,0xBC,0x0F,0x0F,0x53,0x75,0x29,0xAA,0xEA,0x13,0x77,0xE4,
+0xDE,0x8C,0x81,0x21,0xAD,0x07,0x10,0x47,0x11,0xAD,0x87,0x3D,0x07,0xD1,0x75,0xBC,
+0xCF,0xF3,0x66,0x7E,
};
-/* subject:/L=ValiCert Validation Network/O=ValiCert, Inc./OU=ValiCert Class 3 Policy Validation Authority/CN=http://www.valicert.com//emailAddress=info@valicert.com */
-/* issuer :/L=ValiCert Validation Network/O=ValiCert, Inc./OU=ValiCert Class 3 Policy Validation Authority/CN=http://www.valicert.com//emailAddress=info@valicert.com */
-
-
-const unsigned char RSA_Root_Certificate_1_certificate[747]={
-0x30,0x82,0x02,0xE7,0x30,0x82,0x02,0x50,0x02,0x01,0x01,0x30,0x0D,0x06,0x09,0x2A,
-0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x05,0x05,0x00,0x30,0x81,0xBB,0x31,0x24,0x30,
-0x22,0x06,0x03,0x55,0x04,0x07,0x13,0x1B,0x56,0x61,0x6C,0x69,0x43,0x65,0x72,0x74,
-0x20,0x56,0x61,0x6C,0x69,0x64,0x61,0x74,0x69,0x6F,0x6E,0x20,0x4E,0x65,0x74,0x77,
-0x6F,0x72,0x6B,0x31,0x17,0x30,0x15,0x06,0x03,0x55,0x04,0x0A,0x13,0x0E,0x56,0x61,
-0x6C,0x69,0x43,0x65,0x72,0x74,0x2C,0x20,0x49,0x6E,0x63,0x2E,0x31,0x35,0x30,0x33,
-0x06,0x03,0x55,0x04,0x0B,0x13,0x2C,0x56,0x61,0x6C,0x69,0x43,0x65,0x72,0x74,0x20,
-0x43,0x6C,0x61,0x73,0x73,0x20,0x33,0x20,0x50,0x6F,0x6C,0x69,0x63,0x79,0x20,0x56,
-0x61,0x6C,0x69,0x64,0x61,0x74,0x69,0x6F,0x6E,0x20,0x41,0x75,0x74,0x68,0x6F,0x72,
-0x69,0x74,0x79,0x31,0x21,0x30,0x1F,0x06,0x03,0x55,0x04,0x03,0x13,0x18,0x68,0x74,
-0x74,0x70,0x3A,0x2F,0x2F,0x77,0x77,0x77,0x2E,0x76,0x61,0x6C,0x69,0x63,0x65,0x72,
-0x74,0x2E,0x63,0x6F,0x6D,0x2F,0x31,0x20,0x30,0x1E,0x06,0x09,0x2A,0x86,0x48,0x86,
-0xF7,0x0D,0x01,0x09,0x01,0x16,0x11,0x69,0x6E,0x66,0x6F,0x40,0x76,0x61,0x6C,0x69,
-0x63,0x65,0x72,0x74,0x2E,0x63,0x6F,0x6D,0x30,0x1E,0x17,0x0D,0x39,0x39,0x30,0x36,
-0x32,0x36,0x30,0x30,0x32,0x32,0x33,0x33,0x5A,0x17,0x0D,0x31,0x39,0x30,0x36,0x32,
-0x36,0x30,0x30,0x32,0x32,0x33,0x33,0x5A,0x30,0x81,0xBB,0x31,0x24,0x30,0x22,0x06,
-0x03,0x55,0x04,0x07,0x13,0x1B,0x56,0x61,0x6C,0x69,0x43,0x65,0x72,0x74,0x20,0x56,
-0x61,0x6C,0x69,0x64,0x61,0x74,0x69,0x6F,0x6E,0x20,0x4E,0x65,0x74,0x77,0x6F,0x72,
-0x6B,0x31,0x17,0x30,0x15,0x06,0x03,0x55,0x04,0x0A,0x13,0x0E,0x56,0x61,0x6C,0x69,
-0x43,0x65,0x72,0x74,0x2C,0x20,0x49,0x6E,0x63,0x2E,0x31,0x35,0x30,0x33,0x06,0x03,
-0x55,0x04,0x0B,0x13,0x2C,0x56,0x61,0x6C,0x69,0x43,0x65,0x72,0x74,0x20,0x43,0x6C,
-0x61,0x73,0x73,0x20,0x33,0x20,0x50,0x6F,0x6C,0x69,0x63,0x79,0x20,0x56,0x61,0x6C,
-0x69,0x64,0x61,0x74,0x69,0x6F,0x6E,0x20,0x41,0x75,0x74,0x68,0x6F,0x72,0x69,0x74,
-0x79,0x31,0x21,0x30,0x1F,0x06,0x03,0x55,0x04,0x03,0x13,0x18,0x68,0x74,0x74,0x70,
-0x3A,0x2F,0x2F,0x77,0x77,0x77,0x2E,0x76,0x61,0x6C,0x69,0x63,0x65,0x72,0x74,0x2E,
-0x63,0x6F,0x6D,0x2F,0x31,0x20,0x30,0x1E,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,
-0x01,0x09,0x01,0x16,0x11,0x69,0x6E,0x66,0x6F,0x40,0x76,0x61,0x6C,0x69,0x63,0x65,
-0x72,0x74,0x2E,0x63,0x6F,0x6D,0x30,0x81,0x9F,0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,
-0x86,0xF7,0x0D,0x01,0x01,0x01,0x05,0x00,0x03,0x81,0x8D,0x00,0x30,0x81,0x89,0x02,
-0x81,0x81,0x00,0xE3,0x98,0x51,0x96,0x1C,0xE8,0xD5,0xB1,0x06,0x81,0x6A,0x57,0xC3,
-0x72,0x75,0x93,0xAB,0xCF,0x9E,0xA6,0xFC,0xF3,0x16,0x52,0xD6,0x2D,0x4D,0x9F,0x35,
-0x44,0xA8,0x2E,0x04,0x4D,0x07,0x49,0x8A,0x38,0x29,0xF5,0x77,0x37,0xE7,0xB7,0xAB,
-0x5D,0xDF,0x36,0x71,0x14,0x99,0x8F,0xDC,0xC2,0x92,0xF1,0xE7,0x60,0x92,0x97,0xEC,
-0xD8,0x48,0xDC,0xBF,0xC1,0x02,0x20,0xC6,0x24,0xA4,0x28,0x4C,0x30,0x5A,0x76,0x6D,
-0xB1,0x5C,0xF3,0xDD,0xDE,0x9E,0x10,0x71,0xA1,0x88,0xC7,0x5B,0x9B,0x41,0x6D,0xCA,
-0xB0,0xB8,0x8E,0x15,0xEE,0xAD,0x33,0x2B,0xCF,0x47,0x04,0x5C,0x75,0x71,0x0A,0x98,
-0x24,0x98,0x29,0xA7,0x49,0x59,0xA5,0xDD,0xF8,0xB7,0x43,0x62,0x61,0xF3,0xD3,0xE2,
-0xD0,0x55,0x3F,0x02,0x03,0x01,0x00,0x01,0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,
-0xF7,0x0D,0x01,0x01,0x05,0x05,0x00,0x03,0x81,0x81,0x00,0x56,0xBB,0x02,0x58,0x84,
-0x67,0x08,0x2C,0xDF,0x1F,0xDB,0x7B,0x49,0x33,0xF5,0xD3,0x67,0x9D,0xF4,0xB4,0x0A,
-0x10,0xB3,0xC9,0xC5,0x2C,0xE2,0x92,0x6A,0x71,0x78,0x27,0xF2,0x70,0x83,0x42,0xD3,
-0x3E,0xCF,0xA9,0x54,0xF4,0xF1,0xD8,0x92,0x16,0x8C,0xD1,0x04,0xCB,0x4B,0xAB,0xC9,
-0x9F,0x45,0xAE,0x3C,0x8A,0xA9,0xB0,0x71,0x33,0x5D,0xC8,0xC5,0x57,0xDF,0xAF,0xA8,
-0x35,0xB3,0x7F,0x89,0x87,0xE9,0xE8,0x25,0x92,0xB8,0x7F,0x85,0x7A,0xAE,0xD6,0xBC,
-0x1E,0x37,0x58,0x2A,0x67,0xC9,0x91,0xCF,0x2A,0x81,0x3E,0xED,0xC6,0x39,0xDF,0xC0,
-0x3E,0x19,0x9C,0x19,0xCC,0x13,0x4D,0x82,0x41,0xB5,0x8C,0xDE,0xE0,0x3D,0x60,0x08,
-0x20,0x0F,0x45,0x7E,0x6B,0xA2,0x7F,0xA3,0x8C,0x15,0xEE,
+/* subject:/OU=GlobalSign ECC Root CA - R5/O=GlobalSign/CN=GlobalSign */
+/* issuer :/OU=GlobalSign ECC Root CA - R5/O=GlobalSign/CN=GlobalSign */
+
+
+const unsigned char GlobalSign_ECC_Root_CA___R5_certificate[546]={
+0x30,0x82,0x02,0x1E,0x30,0x82,0x01,0xA4,0xA0,0x03,0x02,0x01,0x02,0x02,0x11,0x60,
+0x59,0x49,0xE0,0x26,0x2E,0xBB,0x55,0xF9,0x0A,0x77,0x8A,0x71,0xF9,0x4A,0xD8,0x6C,
+0x30,0x0A,0x06,0x08,0x2A,0x86,0x48,0xCE,0x3D,0x04,0x03,0x03,0x30,0x50,0x31,0x24,
+0x30,0x22,0x06,0x03,0x55,0x04,0x0B,0x13,0x1B,0x47,0x6C,0x6F,0x62,0x61,0x6C,0x53,
+0x69,0x67,0x6E,0x20,0x45,0x43,0x43,0x20,0x52,0x6F,0x6F,0x74,0x20,0x43,0x41,0x20,
+0x2D,0x20,0x52,0x35,0x31,0x13,0x30,0x11,0x06,0x03,0x55,0x04,0x0A,0x13,0x0A,0x47,
+0x6C,0x6F,0x62,0x61,0x6C,0x53,0x69,0x67,0x6E,0x31,0x13,0x30,0x11,0x06,0x03,0x55,
+0x04,0x03,0x13,0x0A,0x47,0x6C,0x6F,0x62,0x61,0x6C,0x53,0x69,0x67,0x6E,0x30,0x1E,
+0x17,0x0D,0x31,0x32,0x31,0x31,0x31,0x33,0x30,0x30,0x30,0x30,0x30,0x30,0x5A,0x17,
+0x0D,0x33,0x38,0x30,0x31,0x31,0x39,0x30,0x33,0x31,0x34,0x30,0x37,0x5A,0x30,0x50,
+0x31,0x24,0x30,0x22,0x06,0x03,0x55,0x04,0x0B,0x13,0x1B,0x47,0x6C,0x6F,0x62,0x61,
+0x6C,0x53,0x69,0x67,0x6E,0x20,0x45,0x43,0x43,0x20,0x52,0x6F,0x6F,0x74,0x20,0x43,
+0x41,0x20,0x2D,0x20,0x52,0x35,0x31,0x13,0x30,0x11,0x06,0x03,0x55,0x04,0x0A,0x13,
+0x0A,0x47,0x6C,0x6F,0x62,0x61,0x6C,0x53,0x69,0x67,0x6E,0x31,0x13,0x30,0x11,0x06,
+0x03,0x55,0x04,0x03,0x13,0x0A,0x47,0x6C,0x6F,0x62,0x61,0x6C,0x53,0x69,0x67,0x6E,
+0x30,0x76,0x30,0x10,0x06,0x07,0x2A,0x86,0x48,0xCE,0x3D,0x02,0x01,0x06,0x05,0x2B,
+0x81,0x04,0x00,0x22,0x03,0x62,0x00,0x04,0x47,0x45,0x0E,0x96,0xFB,0x7D,0x5D,0xBF,
+0xE9,0x39,0xD1,0x21,0xF8,0x9F,0x0B,0xB6,0xD5,0x7B,0x1E,0x92,0x3A,0x48,0x59,0x1C,
+0xF0,0x62,0x31,0x2D,0xC0,0x7A,0x28,0xFE,0x1A,0xA7,0x5C,0xB3,0xB6,0xCC,0x97,0xE7,
+0x45,0xD4,0x58,0xFA,0xD1,0x77,0x6D,0x43,0xA2,0xC0,0x87,0x65,0x34,0x0A,0x1F,0x7A,
+0xDD,0xEB,0x3C,0x33,0xA1,0xC5,0x9D,0x4D,0xA4,0x6F,0x41,0x95,0x38,0x7F,0xC9,0x1E,
+0x84,0xEB,0xD1,0x9E,0x49,0x92,0x87,0x94,0x87,0x0C,0x3A,0x85,0x4A,0x66,0x9F,0x9D,
+0x59,0x93,0x4D,0x97,0x61,0x06,0x86,0x4A,0xA3,0x42,0x30,0x40,0x30,0x0E,0x06,0x03,
+0x55,0x1D,0x0F,0x01,0x01,0xFF,0x04,0x04,0x03,0x02,0x01,0x06,0x30,0x0F,0x06,0x03,
+0x55,0x1D,0x13,0x01,0x01,0xFF,0x04,0x05,0x30,0x03,0x01,0x01,0xFF,0x30,0x1D,0x06,
+0x03,0x55,0x1D,0x0E,0x04,0x16,0x04,0x14,0x3D,0xE6,0x29,0x48,0x9B,0xEA,0x07,0xCA,
+0x21,0x44,0x4A,0x26,0xDE,0x6E,0xDE,0xD2,0x83,0xD0,0x9F,0x59,0x30,0x0A,0x06,0x08,
+0x2A,0x86,0x48,0xCE,0x3D,0x04,0x03,0x03,0x03,0x68,0x00,0x30,0x65,0x02,0x31,0x00,
+0xE5,0x69,0x12,0xC9,0x6E,0xDB,0xC6,0x31,0xBA,0x09,0x41,0xE1,0x97,0xF8,0xFB,0xFD,
+0x9A,0xE2,0x7D,0x12,0xC9,0xED,0x7C,0x64,0xD3,0xCB,0x05,0x25,0x8B,0x56,0xD9,0xA0,
+0xE7,0x5E,0x5D,0x4E,0x0B,0x83,0x9C,0x5B,0x76,0x29,0xA0,0x09,0x26,0x21,0x6A,0x62,
+0x02,0x30,0x71,0xD2,0xB5,0x8F,0x5C,0xEA,0x3B,0xE1,0x78,0x09,0x85,0xA8,0x75,0x92,
+0x3B,0xC8,0x5C,0xFD,0x48,0xEF,0x0D,0x74,0x22,0xA8,0x08,0xE2,0x6E,0xC5,0x49,0xCE,
+0xC7,0x0C,0xBC,0xA7,0x61,0x69,0xF1,0xF7,0x3B,0xE1,0x2A,0xCB,0xF9,0x2B,0xF3,0x66,
+0x90,0x37,
};
-/* subject:/C=US/O=Starfield Technologies, Inc./OU=Starfield Class 2 Certification Authority */
-/* issuer :/C=US/O=Starfield Technologies, Inc./OU=Starfield Class 2 Certification Authority */
+/* subject:/C=US/ST=UT/L=Salt Lake City/O=The USERTRUST Network/OU=http://www.usertrust.com/CN=UTN-USERFirst-Hardware */
+/* issuer :/C=US/ST=UT/L=Salt Lake City/O=The USERTRUST Network/OU=http://www.usertrust.com/CN=UTN-USERFirst-Hardware */
-const unsigned char Starfield_Class_2_CA_certificate[1043]={
-0x30,0x82,0x04,0x0F,0x30,0x82,0x02,0xF7,0xA0,0x03,0x02,0x01,0x02,0x02,0x01,0x00,
-0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x05,0x05,0x00,0x30,
-0x68,0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x55,0x53,0x31,0x25,
-0x30,0x23,0x06,0x03,0x55,0x04,0x0A,0x13,0x1C,0x53,0x74,0x61,0x72,0x66,0x69,0x65,
-0x6C,0x64,0x20,0x54,0x65,0x63,0x68,0x6E,0x6F,0x6C,0x6F,0x67,0x69,0x65,0x73,0x2C,
-0x20,0x49,0x6E,0x63,0x2E,0x31,0x32,0x30,0x30,0x06,0x03,0x55,0x04,0x0B,0x13,0x29,
-0x53,0x74,0x61,0x72,0x66,0x69,0x65,0x6C,0x64,0x20,0x43,0x6C,0x61,0x73,0x73,0x20,
-0x32,0x20,0x43,0x65,0x72,0x74,0x69,0x66,0x69,0x63,0x61,0x74,0x69,0x6F,0x6E,0x20,
-0x41,0x75,0x74,0x68,0x6F,0x72,0x69,0x74,0x79,0x30,0x1E,0x17,0x0D,0x30,0x34,0x30,
-0x36,0x32,0x39,0x31,0x37,0x33,0x39,0x31,0x36,0x5A,0x17,0x0D,0x33,0x34,0x30,0x36,
-0x32,0x39,0x31,0x37,0x33,0x39,0x31,0x36,0x5A,0x30,0x68,0x31,0x0B,0x30,0x09,0x06,
-0x03,0x55,0x04,0x06,0x13,0x02,0x55,0x53,0x31,0x25,0x30,0x23,0x06,0x03,0x55,0x04,
-0x0A,0x13,0x1C,0x53,0x74,0x61,0x72,0x66,0x69,0x65,0x6C,0x64,0x20,0x54,0x65,0x63,
-0x68,0x6E,0x6F,0x6C,0x6F,0x67,0x69,0x65,0x73,0x2C,0x20,0x49,0x6E,0x63,0x2E,0x31,
-0x32,0x30,0x30,0x06,0x03,0x55,0x04,0x0B,0x13,0x29,0x53,0x74,0x61,0x72,0x66,0x69,
-0x65,0x6C,0x64,0x20,0x43,0x6C,0x61,0x73,0x73,0x20,0x32,0x20,0x43,0x65,0x72,0x74,
-0x69,0x66,0x69,0x63,0x61,0x74,0x69,0x6F,0x6E,0x20,0x41,0x75,0x74,0x68,0x6F,0x72,
-0x69,0x74,0x79,0x30,0x82,0x01,0x20,0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,
-0x0D,0x01,0x01,0x01,0x05,0x00,0x03,0x82,0x01,0x0D,0x00,0x30,0x82,0x01,0x08,0x02,
-0x82,0x01,0x01,0x00,0xB7,0x32,0xC8,0xFE,0xE9,0x71,0xA6,0x04,0x85,0xAD,0x0C,0x11,
-0x64,0xDF,0xCE,0x4D,0xEF,0xC8,0x03,0x18,0x87,0x3F,0xA1,0xAB,0xFB,0x3C,0xA6,0x9F,
-0xF0,0xC3,0xA1,0xDA,0xD4,0xD8,0x6E,0x2B,0x53,0x90,0xFB,0x24,0xA4,0x3E,0x84,0xF0,
-0x9E,0xE8,0x5F,0xEC,0xE5,0x27,0x44,0xF5,0x28,0xA6,0x3F,0x7B,0xDE,0xE0,0x2A,0xF0,
-0xC8,0xAF,0x53,0x2F,0x9E,0xCA,0x05,0x01,0x93,0x1E,0x8F,0x66,0x1C,0x39,0xA7,0x4D,
-0xFA,0x5A,0xB6,0x73,0x04,0x25,0x66,0xEB,0x77,0x7F,0xE7,0x59,0xC6,0x4A,0x99,0x25,
-0x14,0x54,0xEB,0x26,0xC7,0xF3,0x7F,0x19,0xD5,0x30,0x70,0x8F,0xAF,0xB0,0x46,0x2A,
-0xFF,0xAD,0xEB,0x29,0xED,0xD7,0x9F,0xAA,0x04,0x87,0xA3,0xD4,0xF9,0x89,0xA5,0x34,
-0x5F,0xDB,0x43,0x91,0x82,0x36,0xD9,0x66,0x3C,0xB1,0xB8,0xB9,0x82,0xFD,0x9C,0x3A,
-0x3E,0x10,0xC8,0x3B,0xEF,0x06,0x65,0x66,0x7A,0x9B,0x19,0x18,0x3D,0xFF,0x71,0x51,
-0x3C,0x30,0x2E,0x5F,0xBE,0x3D,0x77,0x73,0xB2,0x5D,0x06,0x6C,0xC3,0x23,0x56,0x9A,
-0x2B,0x85,0x26,0x92,0x1C,0xA7,0x02,0xB3,0xE4,0x3F,0x0D,0xAF,0x08,0x79,0x82,0xB8,
-0x36,0x3D,0xEA,0x9C,0xD3,0x35,0xB3,0xBC,0x69,0xCA,0xF5,0xCC,0x9D,0xE8,0xFD,0x64,
-0x8D,0x17,0x80,0x33,0x6E,0x5E,0x4A,0x5D,0x99,0xC9,0x1E,0x87,0xB4,0x9D,0x1A,0xC0,
-0xD5,0x6E,0x13,0x35,0x23,0x5E,0xDF,0x9B,0x5F,0x3D,0xEF,0xD6,0xF7,0x76,0xC2,0xEA,
-0x3E,0xBB,0x78,0x0D,0x1C,0x42,0x67,0x6B,0x04,0xD8,0xF8,0xD6,0xDA,0x6F,0x8B,0xF2,
-0x44,0xA0,0x01,0xAB,0x02,0x01,0x03,0xA3,0x81,0xC5,0x30,0x81,0xC2,0x30,0x1D,0x06,
-0x03,0x55,0x1D,0x0E,0x04,0x16,0x04,0x14,0xBF,0x5F,0xB7,0xD1,0xCE,0xDD,0x1F,0x86,
-0xF4,0x5B,0x55,0xAC,0xDC,0xD7,0x10,0xC2,0x0E,0xA9,0x88,0xE7,0x30,0x81,0x92,0x06,
-0x03,0x55,0x1D,0x23,0x04,0x81,0x8A,0x30,0x81,0x87,0x80,0x14,0xBF,0x5F,0xB7,0xD1,
-0xCE,0xDD,0x1F,0x86,0xF4,0x5B,0x55,0xAC,0xDC,0xD7,0x10,0xC2,0x0E,0xA9,0x88,0xE7,
-0xA1,0x6C,0xA4,0x6A,0x30,0x68,0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,
-0x02,0x55,0x53,0x31,0x25,0x30,0x23,0x06,0x03,0x55,0x04,0x0A,0x13,0x1C,0x53,0x74,
-0x61,0x72,0x66,0x69,0x65,0x6C,0x64,0x20,0x54,0x65,0x63,0x68,0x6E,0x6F,0x6C,0x6F,
-0x67,0x69,0x65,0x73,0x2C,0x20,0x49,0x6E,0x63,0x2E,0x31,0x32,0x30,0x30,0x06,0x03,
-0x55,0x04,0x0B,0x13,0x29,0x53,0x74,0x61,0x72,0x66,0x69,0x65,0x6C,0x64,0x20,0x43,
-0x6C,0x61,0x73,0x73,0x20,0x32,0x20,0x43,0x65,0x72,0x74,0x69,0x66,0x69,0x63,0x61,
-0x74,0x69,0x6F,0x6E,0x20,0x41,0x75,0x74,0x68,0x6F,0x72,0x69,0x74,0x79,0x82,0x01,
-0x00,0x30,0x0C,0x06,0x03,0x55,0x1D,0x13,0x04,0x05,0x30,0x03,0x01,0x01,0xFF,0x30,
-0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x05,0x05,0x00,0x03,0x82,
-0x01,0x01,0x00,0x05,0x9D,0x3F,0x88,0x9D,0xD1,0xC9,0x1A,0x55,0xA1,0xAC,0x69,0xF3,
-0xF3,0x59,0xDA,0x9B,0x01,0x87,0x1A,0x4F,0x57,0xA9,0xA1,0x79,0x09,0x2A,0xDB,0xF7,
-0x2F,0xB2,0x1E,0xCC,0xC7,0x5E,0x6A,0xD8,0x83,0x87,0xA1,0x97,0xEF,0x49,0x35,0x3E,
-0x77,0x06,0x41,0x58,0x62,0xBF,0x8E,0x58,0xB8,0x0A,0x67,0x3F,0xEC,0xB3,0xDD,0x21,
-0x66,0x1F,0xC9,0x54,0xFA,0x72,0xCC,0x3D,0x4C,0x40,0xD8,0x81,0xAF,0x77,0x9E,0x83,
-0x7A,0xBB,0xA2,0xC7,0xF5,0x34,0x17,0x8E,0xD9,0x11,0x40,0xF4,0xFC,0x2C,0x2A,0x4D,
-0x15,0x7F,0xA7,0x62,0x5D,0x2E,0x25,0xD3,0x00,0x0B,0x20,0x1A,0x1D,0x68,0xF9,0x17,
-0xB8,0xF4,0xBD,0x8B,0xED,0x28,0x59,0xDD,0x4D,0x16,0x8B,0x17,0x83,0xC8,0xB2,0x65,
-0xC7,0x2D,0x7A,0xA5,0xAA,0xBC,0x53,0x86,0x6D,0xDD,0x57,0xA4,0xCA,0xF8,0x20,0x41,
-0x0B,0x68,0xF0,0xF4,0xFB,0x74,0xBE,0x56,0x5D,0x7A,0x79,0xF5,0xF9,0x1D,0x85,0xE3,
-0x2D,0x95,0xBE,0xF5,0x71,0x90,0x43,0xCC,0x8D,0x1F,0x9A,0x00,0x0A,0x87,0x29,0xE9,
-0x55,0x22,0x58,0x00,0x23,0xEA,0xE3,0x12,0x43,0x29,0x5B,0x47,0x08,0xDD,0x8C,0x41,
-0x6A,0x65,0x06,0xA8,0xE5,0x21,0xAA,0x41,0xB4,0x95,0x21,0x95,0xB9,0x7D,0xD1,0x34,
-0xAB,0x13,0xD6,0xAD,0xBC,0xDC,0xE2,0x3D,0x39,0xCD,0xBD,0x3E,0x75,0x70,0xA1,0x18,
-0x59,0x03,0xC9,0x22,0xB4,0x8F,0x9C,0xD5,0x5E,0x2A,0xD7,0xA5,0xB6,0xD4,0x0A,0x6D,
-0xF8,0xB7,0x40,0x11,0x46,0x9A,0x1F,0x79,0x0E,0x62,0xBF,0x0F,0x97,0xEC,0xE0,0x2F,
-0x1F,0x17,0x94,
+const unsigned char UTN_USERFirst_Hardware_Root_CA_certificate[1144]={
+0x30,0x82,0x04,0x74,0x30,0x82,0x03,0x5C,0xA0,0x03,0x02,0x01,0x02,0x02,0x10,0x44,
+0xBE,0x0C,0x8B,0x50,0x00,0x24,0xB4,0x11,0xD3,0x36,0x2A,0xFE,0x65,0x0A,0xFD,0x30,
+0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x05,0x05,0x00,0x30,0x81,
+0x97,0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x55,0x53,0x31,0x0B,
+0x30,0x09,0x06,0x03,0x55,0x04,0x08,0x13,0x02,0x55,0x54,0x31,0x17,0x30,0x15,0x06,
+0x03,0x55,0x04,0x07,0x13,0x0E,0x53,0x61,0x6C,0x74,0x20,0x4C,0x61,0x6B,0x65,0x20,
+0x43,0x69,0x74,0x79,0x31,0x1E,0x30,0x1C,0x06,0x03,0x55,0x04,0x0A,0x13,0x15,0x54,
+0x68,0x65,0x20,0x55,0x53,0x45,0x52,0x54,0x52,0x55,0x53,0x54,0x20,0x4E,0x65,0x74,
+0x77,0x6F,0x72,0x6B,0x31,0x21,0x30,0x1F,0x06,0x03,0x55,0x04,0x0B,0x13,0x18,0x68,
+0x74,0x74,0x70,0x3A,0x2F,0x2F,0x77,0x77,0x77,0x2E,0x75,0x73,0x65,0x72,0x74,0x72,
+0x75,0x73,0x74,0x2E,0x63,0x6F,0x6D,0x31,0x1F,0x30,0x1D,0x06,0x03,0x55,0x04,0x03,
+0x13,0x16,0x55,0x54,0x4E,0x2D,0x55,0x53,0x45,0x52,0x46,0x69,0x72,0x73,0x74,0x2D,
+0x48,0x61,0x72,0x64,0x77,0x61,0x72,0x65,0x30,0x1E,0x17,0x0D,0x39,0x39,0x30,0x37,
+0x30,0x39,0x31,0x38,0x31,0x30,0x34,0x32,0x5A,0x17,0x0D,0x31,0x39,0x30,0x37,0x30,
+0x39,0x31,0x38,0x31,0x39,0x32,0x32,0x5A,0x30,0x81,0x97,0x31,0x0B,0x30,0x09,0x06,
+0x03,0x55,0x04,0x06,0x13,0x02,0x55,0x53,0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04,
+0x08,0x13,0x02,0x55,0x54,0x31,0x17,0x30,0x15,0x06,0x03,0x55,0x04,0x07,0x13,0x0E,
+0x53,0x61,0x6C,0x74,0x20,0x4C,0x61,0x6B,0x65,0x20,0x43,0x69,0x74,0x79,0x31,0x1E,
+0x30,0x1C,0x06,0x03,0x55,0x04,0x0A,0x13,0x15,0x54,0x68,0x65,0x20,0x55,0x53,0x45,
+0x52,0x54,0x52,0x55,0x53,0x54,0x20,0x4E,0x65,0x74,0x77,0x6F,0x72,0x6B,0x31,0x21,
+0x30,0x1F,0x06,0x03,0x55,0x04,0x0B,0x13,0x18,0x68,0x74,0x74,0x70,0x3A,0x2F,0x2F,
+0x77,0x77,0x77,0x2E,0x75,0x73,0x65,0x72,0x74,0x72,0x75,0x73,0x74,0x2E,0x63,0x6F,
+0x6D,0x31,0x1F,0x30,0x1D,0x06,0x03,0x55,0x04,0x03,0x13,0x16,0x55,0x54,0x4E,0x2D,
+0x55,0x53,0x45,0x52,0x46,0x69,0x72,0x73,0x74,0x2D,0x48,0x61,0x72,0x64,0x77,0x61,
+0x72,0x65,0x30,0x82,0x01,0x22,0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,
+0x01,0x01,0x01,0x05,0x00,0x03,0x82,0x01,0x0F,0x00,0x30,0x82,0x01,0x0A,0x02,0x82,
+0x01,0x01,0x00,0xB1,0xF7,0xC3,0x38,0x3F,0xB4,0xA8,0x7F,0xCF,0x39,0x82,0x51,0x67,
+0xD0,0x6D,0x9F,0xD2,0xFF,0x58,0xF3,0xE7,0x9F,0x2B,0xEC,0x0D,0x89,0x54,0x99,0xB9,
+0x38,0x99,0x16,0xF7,0xE0,0x21,0x79,0x48,0xC2,0xBB,0x61,0x74,0x12,0x96,0x1D,0x3C,
+0x6A,0x72,0xD5,0x3C,0x10,0x67,0x3A,0x39,0xED,0x2B,0x13,0xCD,0x66,0xEB,0x95,0x09,
+0x33,0xA4,0x6C,0x97,0xB1,0xE8,0xC6,0xEC,0xC1,0x75,0x79,0x9C,0x46,0x5E,0x8D,0xAB,
+0xD0,0x6A,0xFD,0xB9,0x2A,0x55,0x17,0x10,0x54,0xB3,0x19,0xF0,0x9A,0xF6,0xF1,0xB1,
+0x5D,0xB6,0xA7,0x6D,0xFB,0xE0,0x71,0x17,0x6B,0xA2,0x88,0xFB,0x00,0xDF,0xFE,0x1A,
+0x31,0x77,0x0C,0x9A,0x01,0x7A,0xB1,0x32,0xE3,0x2B,0x01,0x07,0x38,0x6E,0xC3,0xA5,
+0x5E,0x23,0xBC,0x45,0x9B,0x7B,0x50,0xC1,0xC9,0x30,0x8F,0xDB,0xE5,0x2B,0x7A,0xD3,
+0x5B,0xFB,0x33,0x40,0x1E,0xA0,0xD5,0x98,0x17,0xBC,0x8B,0x87,0xC3,0x89,0xD3,0x5D,
+0xA0,0x8E,0xB2,0xAA,0xAA,0xF6,0x8E,0x69,0x88,0x06,0xC5,0xFA,0x89,0x21,0xF3,0x08,
+0x9D,0x69,0x2E,0x09,0x33,0x9B,0x29,0x0D,0x46,0x0F,0x8C,0xCC,0x49,0x34,0xB0,0x69,
+0x51,0xBD,0xF9,0x06,0xCD,0x68,0xAD,0x66,0x4C,0xBC,0x3E,0xAC,0x61,0xBD,0x0A,0x88,
+0x0E,0xC8,0xDF,0x3D,0xEE,0x7C,0x04,0x4C,0x9D,0x0A,0x5E,0x6B,0x91,0xD6,0xEE,0xC7,
+0xED,0x28,0x8D,0xAB,0x4D,0x87,0x89,0x73,0xD0,0x6E,0xA4,0xD0,0x1E,0x16,0x8B,0x14,
+0xE1,0x76,0x44,0x03,0x7F,0x63,0xAC,0xE4,0xCD,0x49,0x9C,0xC5,0x92,0xF4,0xAB,0x32,
+0xA1,0x48,0x5B,0x02,0x03,0x01,0x00,0x01,0xA3,0x81,0xB9,0x30,0x81,0xB6,0x30,0x0B,
+0x06,0x03,0x55,0x1D,0x0F,0x04,0x04,0x03,0x02,0x01,0xC6,0x30,0x0F,0x06,0x03,0x55,
+0x1D,0x13,0x01,0x01,0xFF,0x04,0x05,0x30,0x03,0x01,0x01,0xFF,0x30,0x1D,0x06,0x03,
+0x55,0x1D,0x0E,0x04,0x16,0x04,0x14,0xA1,0x72,0x5F,0x26,0x1B,0x28,0x98,0x43,0x95,
+0x5D,0x07,0x37,0xD5,0x85,0x96,0x9D,0x4B,0xD2,0xC3,0x45,0x30,0x44,0x06,0x03,0x55,
+0x1D,0x1F,0x04,0x3D,0x30,0x3B,0x30,0x39,0xA0,0x37,0xA0,0x35,0x86,0x33,0x68,0x74,
+0x74,0x70,0x3A,0x2F,0x2F,0x63,0x72,0x6C,0x2E,0x75,0x73,0x65,0x72,0x74,0x72,0x75,
+0x73,0x74,0x2E,0x63,0x6F,0x6D,0x2F,0x55,0x54,0x4E,0x2D,0x55,0x53,0x45,0x52,0x46,
+0x69,0x72,0x73,0x74,0x2D,0x48,0x61,0x72,0x64,0x77,0x61,0x72,0x65,0x2E,0x63,0x72,
+0x6C,0x30,0x31,0x06,0x03,0x55,0x1D,0x25,0x04,0x2A,0x30,0x28,0x06,0x08,0x2B,0x06,
+0x01,0x05,0x05,0x07,0x03,0x01,0x06,0x08,0x2B,0x06,0x01,0x05,0x05,0x07,0x03,0x05,
+0x06,0x08,0x2B,0x06,0x01,0x05,0x05,0x07,0x03,0x06,0x06,0x08,0x2B,0x06,0x01,0x05,
+0x05,0x07,0x03,0x07,0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,
+0x05,0x05,0x00,0x03,0x82,0x01,0x01,0x00,0x47,0x19,0x0F,0xDE,0x74,0xC6,0x99,0x97,
+0xAF,0xFC,0xAD,0x28,0x5E,0x75,0x8E,0xEB,0x2D,0x67,0xEE,0x4E,0x7B,0x2B,0xD7,0x0C,
+0xFF,0xF6,0xDE,0xCB,0x55,0xA2,0x0A,0xE1,0x4C,0x54,0x65,0x93,0x60,0x6B,0x9F,0x12,
+0x9C,0xAD,0x5E,0x83,0x2C,0xEB,0x5A,0xAE,0xC0,0xE4,0x2D,0xF4,0x00,0x63,0x1D,0xB8,
+0xC0,0x6C,0xF2,0xCF,0x49,0xBB,0x4D,0x93,0x6F,0x06,0xA6,0x0A,0x22,0xB2,0x49,0x62,
+0x08,0x4E,0xFF,0xC8,0xC8,0x14,0xB2,0x88,0x16,0x5D,0xE7,0x01,0xE4,0x12,0x95,0xE5,
+0x45,0x34,0xB3,0x8B,0x69,0xBD,0xCF,0xB4,0x85,0x8F,0x75,0x51,0x9E,0x7D,0x3A,0x38,
+0x3A,0x14,0x48,0x12,0xC6,0xFB,0xA7,0x3B,0x1A,0x8D,0x0D,0x82,0x40,0x07,0xE8,0x04,
+0x08,0x90,0xA1,0x89,0xCB,0x19,0x50,0xDF,0xCA,0x1C,0x01,0xBC,0x1D,0x04,0x19,0x7B,
+0x10,0x76,0x97,0x3B,0xEE,0x90,0x90,0xCA,0xC4,0x0E,0x1F,0x16,0x6E,0x75,0xEF,0x33,
+0xF8,0xD3,0x6F,0x5B,0x1E,0x96,0xE3,0xE0,0x74,0x77,0x74,0x7B,0x8A,0xA2,0x6E,0x2D,
+0xDD,0x76,0xD6,0x39,0x30,0x82,0xF0,0xAB,0x9C,0x52,0xF2,0x2A,0xC7,0xAF,0x49,0x5E,
+0x7E,0xC7,0x68,0xE5,0x82,0x81,0xC8,0x6A,0x27,0xF9,0x27,0x88,0x2A,0xD5,0x58,0x50,
+0x95,0x1F,0xF0,0x3B,0x1C,0x57,0xBB,0x7D,0x14,0x39,0x62,0x2B,0x9A,0xC9,0x94,0x92,
+0x2A,0xA3,0x22,0x0C,0xFF,0x89,0x26,0x7D,0x5F,0x23,0x2B,0x47,0xD7,0x15,0x1D,0xA9,
+0x6A,0x9E,0x51,0x0D,0x2A,0x51,0x9E,0x81,0xF9,0xD4,0x3B,0x5E,0x70,0x12,0x7F,0x10,
+0x32,0x9C,0x1E,0xBB,0x9D,0xF8,0x66,0xA8,
};
-/* subject:/C=US/ST=Arizona/L=Scottsdale/O=Starfield Technologies, Inc./CN=Starfield Root Certificate Authority - G2 */
-/* issuer :/C=US/ST=Arizona/L=Scottsdale/O=Starfield Technologies, Inc./CN=Starfield Root Certificate Authority - G2 */
-
-
-const unsigned char Starfield_Root_Certificate_Authority___G2_certificate[993]={
-0x30,0x82,0x03,0xDD,0x30,0x82,0x02,0xC5,0xA0,0x03,0x02,0x01,0x02,0x02,0x01,0x00,
-0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x0B,0x05,0x00,0x30,
-0x81,0x8F,0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x55,0x53,0x31,
-0x10,0x30,0x0E,0x06,0x03,0x55,0x04,0x08,0x13,0x07,0x41,0x72,0x69,0x7A,0x6F,0x6E,
-0x61,0x31,0x13,0x30,0x11,0x06,0x03,0x55,0x04,0x07,0x13,0x0A,0x53,0x63,0x6F,0x74,
-0x74,0x73,0x64,0x61,0x6C,0x65,0x31,0x25,0x30,0x23,0x06,0x03,0x55,0x04,0x0A,0x13,
-0x1C,0x53,0x74,0x61,0x72,0x66,0x69,0x65,0x6C,0x64,0x20,0x54,0x65,0x63,0x68,0x6E,
-0x6F,0x6C,0x6F,0x67,0x69,0x65,0x73,0x2C,0x20,0x49,0x6E,0x63,0x2E,0x31,0x32,0x30,
-0x30,0x06,0x03,0x55,0x04,0x03,0x13,0x29,0x53,0x74,0x61,0x72,0x66,0x69,0x65,0x6C,
-0x64,0x20,0x52,0x6F,0x6F,0x74,0x20,0x43,0x65,0x72,0x74,0x69,0x66,0x69,0x63,0x61,
-0x74,0x65,0x20,0x41,0x75,0x74,0x68,0x6F,0x72,0x69,0x74,0x79,0x20,0x2D,0x20,0x47,
-0x32,0x30,0x1E,0x17,0x0D,0x30,0x39,0x30,0x39,0x30,0x31,0x30,0x30,0x30,0x30,0x30,
-0x30,0x5A,0x17,0x0D,0x33,0x37,0x31,0x32,0x33,0x31,0x32,0x33,0x35,0x39,0x35,0x39,
-0x5A,0x30,0x81,0x8F,0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x55,
-0x53,0x31,0x10,0x30,0x0E,0x06,0x03,0x55,0x04,0x08,0x13,0x07,0x41,0x72,0x69,0x7A,
-0x6F,0x6E,0x61,0x31,0x13,0x30,0x11,0x06,0x03,0x55,0x04,0x07,0x13,0x0A,0x53,0x63,
-0x6F,0x74,0x74,0x73,0x64,0x61,0x6C,0x65,0x31,0x25,0x30,0x23,0x06,0x03,0x55,0x04,
-0x0A,0x13,0x1C,0x53,0x74,0x61,0x72,0x66,0x69,0x65,0x6C,0x64,0x20,0x54,0x65,0x63,
-0x68,0x6E,0x6F,0x6C,0x6F,0x67,0x69,0x65,0x73,0x2C,0x20,0x49,0x6E,0x63,0x2E,0x31,
-0x32,0x30,0x30,0x06,0x03,0x55,0x04,0x03,0x13,0x29,0x53,0x74,0x61,0x72,0x66,0x69,
-0x65,0x6C,0x64,0x20,0x52,0x6F,0x6F,0x74,0x20,0x43,0x65,0x72,0x74,0x69,0x66,0x69,
-0x63,0x61,0x74,0x65,0x20,0x41,0x75,0x74,0x68,0x6F,0x72,0x69,0x74,0x79,0x20,0x2D,
-0x20,0x47,0x32,0x30,0x82,0x01,0x22,0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,
-0x0D,0x01,0x01,0x01,0x05,0x00,0x03,0x82,0x01,0x0F,0x00,0x30,0x82,0x01,0x0A,0x02,
-0x82,0x01,0x01,0x00,0xBD,0xED,0xC1,0x03,0xFC,0xF6,0x8F,0xFC,0x02,0xB1,0x6F,0x5B,
-0x9F,0x48,0xD9,0x9D,0x79,0xE2,0xA2,0xB7,0x03,0x61,0x56,0x18,0xC3,0x47,0xB6,0xD7,
-0xCA,0x3D,0x35,0x2E,0x89,0x43,0xF7,0xA1,0x69,0x9B,0xDE,0x8A,0x1A,0xFD,0x13,0x20,
-0x9C,0xB4,0x49,0x77,0x32,0x29,0x56,0xFD,0xB9,0xEC,0x8C,0xDD,0x22,0xFA,0x72,0xDC,
-0x27,0x61,0x97,0xEE,0xF6,0x5A,0x84,0xEC,0x6E,0x19,0xB9,0x89,0x2C,0xDC,0x84,0x5B,
-0xD5,0x74,0xFB,0x6B,0x5F,0xC5,0x89,0xA5,0x10,0x52,0x89,0x46,0x55,0xF4,0xB8,0x75,
-0x1C,0xE6,0x7F,0xE4,0x54,0xAE,0x4B,0xF8,0x55,0x72,0x57,0x02,0x19,0xF8,0x17,0x71,
-0x59,0xEB,0x1E,0x28,0x07,0x74,0xC5,0x9D,0x48,0xBE,0x6C,0xB4,0xF4,0xA4,0xB0,0xF3,
-0x64,0x37,0x79,0x92,0xC0,0xEC,0x46,0x5E,0x7F,0xE1,0x6D,0x53,0x4C,0x62,0xAF,0xCD,
-0x1F,0x0B,0x63,0xBB,0x3A,0x9D,0xFB,0xFC,0x79,0x00,0x98,0x61,0x74,0xCF,0x26,0x82,
-0x40,0x63,0xF3,0xB2,0x72,0x6A,0x19,0x0D,0x99,0xCA,0xD4,0x0E,0x75,0xCC,0x37,0xFB,
-0x8B,0x89,0xC1,0x59,0xF1,0x62,0x7F,0x5F,0xB3,0x5F,0x65,0x30,0xF8,0xA7,0xB7,0x4D,
-0x76,0x5A,0x1E,0x76,0x5E,0x34,0xC0,0xE8,0x96,0x56,0x99,0x8A,0xB3,0xF0,0x7F,0xA4,
-0xCD,0xBD,0xDC,0x32,0x31,0x7C,0x91,0xCF,0xE0,0x5F,0x11,0xF8,0x6B,0xAA,0x49,0x5C,
-0xD1,0x99,0x94,0xD1,0xA2,0xE3,0x63,0x5B,0x09,0x76,0xB5,0x56,0x62,0xE1,0x4B,0x74,
-0x1D,0x96,0xD4,0x26,0xD4,0x08,0x04,0x59,0xD0,0x98,0x0E,0x0E,0xE6,0xDE,0xFC,0xC3,
-0xEC,0x1F,0x90,0xF1,0x02,0x03,0x01,0x00,0x01,0xA3,0x42,0x30,0x40,0x30,0x0F,0x06,
-0x03,0x55,0x1D,0x13,0x01,0x01,0xFF,0x04,0x05,0x30,0x03,0x01,0x01,0xFF,0x30,0x0E,
-0x06,0x03,0x55,0x1D,0x0F,0x01,0x01,0xFF,0x04,0x04,0x03,0x02,0x01,0x06,0x30,0x1D,
-0x06,0x03,0x55,0x1D,0x0E,0x04,0x16,0x04,0x14,0x7C,0x0C,0x32,0x1F,0xA7,0xD9,0x30,
-0x7F,0xC4,0x7D,0x68,0xA3,0x62,0xA8,0xA1,0xCE,0xAB,0x07,0x5B,0x27,0x30,0x0D,0x06,
-0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x0B,0x05,0x00,0x03,0x82,0x01,0x01,
-0x00,0x11,0x59,0xFA,0x25,0x4F,0x03,0x6F,0x94,0x99,0x3B,0x9A,0x1F,0x82,0x85,0x39,
-0xD4,0x76,0x05,0x94,0x5E,0xE1,0x28,0x93,0x6D,0x62,0x5D,0x09,0xC2,0xA0,0xA8,0xD4,
-0xB0,0x75,0x38,0xF1,0x34,0x6A,0x9D,0xE4,0x9F,0x8A,0x86,0x26,0x51,0xE6,0x2C,0xD1,
-0xC6,0x2D,0x6E,0x95,0x20,0x4A,0x92,0x01,0xEC,0xB8,0x8A,0x67,0x7B,0x31,0xE2,0x67,
-0x2E,0x8C,0x95,0x03,0x26,0x2E,0x43,0x9D,0x4A,0x31,0xF6,0x0E,0xB5,0x0C,0xBB,0xB7,
-0xE2,0x37,0x7F,0x22,0xBA,0x00,0xA3,0x0E,0x7B,0x52,0xFB,0x6B,0xBB,0x3B,0xC4,0xD3,
-0x79,0x51,0x4E,0xCD,0x90,0xF4,0x67,0x07,0x19,0xC8,0x3C,0x46,0x7A,0x0D,0x01,0x7D,
-0xC5,0x58,0xE7,0x6D,0xE6,0x85,0x30,0x17,0x9A,0x24,0xC4,0x10,0xE0,0x04,0xF7,0xE0,
-0xF2,0x7F,0xD4,0xAA,0x0A,0xFF,0x42,0x1D,0x37,0xED,0x94,0xE5,0x64,0x59,0x12,0x20,
-0x77,0x38,0xD3,0x32,0x3E,0x38,0x81,0x75,0x96,0x73,0xFA,0x68,0x8F,0xB1,0xCB,0xCE,
-0x1F,0xC5,0xEC,0xFA,0x9C,0x7E,0xCF,0x7E,0xB1,0xF1,0x07,0x2D,0xB6,0xFC,0xBF,0xCA,
-0xA4,0xBF,0xD0,0x97,0x05,0x4A,0xBC,0xEA,0x18,0x28,0x02,0x90,0xBD,0x54,0x78,0x09,
-0x21,0x71,0xD3,0xD1,0x7D,0x1D,0xD9,0x16,0xB0,0xA9,0x61,0x3D,0xD0,0x0A,0x00,0x22,
-0xFC,0xC7,0x7B,0xCB,0x09,0x64,0x45,0x0B,0x3B,0x40,0x81,0xF7,0x7D,0x7C,0x32,0xF5,
-0x98,0xCA,0x58,0x8E,0x7D,0x2A,0xEE,0x90,0x59,0x73,0x64,0xF9,0x36,0x74,0x5E,0x25,
-0xA1,0xF5,0x66,0x05,0x2E,0x7F,0x39,0x15,0xA9,0x2A,0xFB,0x50,0x8B,0x8E,0x85,0x69,
-0xF4,
+/* subject:/OU=GlobalSign ECC Root CA - R4/O=GlobalSign/CN=GlobalSign */
+/* issuer :/OU=GlobalSign ECC Root CA - R4/O=GlobalSign/CN=GlobalSign */
+
+
+const unsigned char GlobalSign_ECC_Root_CA___R4_certificate[485]={
+0x30,0x82,0x01,0xE1,0x30,0x82,0x01,0x87,0xA0,0x03,0x02,0x01,0x02,0x02,0x11,0x2A,
+0x38,0xA4,0x1C,0x96,0x0A,0x04,0xDE,0x42,0xB2,0x28,0xA5,0x0B,0xE8,0x34,0x98,0x02,
+0x30,0x0A,0x06,0x08,0x2A,0x86,0x48,0xCE,0x3D,0x04,0x03,0x02,0x30,0x50,0x31,0x24,
+0x30,0x22,0x06,0x03,0x55,0x04,0x0B,0x13,0x1B,0x47,0x6C,0x6F,0x62,0x61,0x6C,0x53,
+0x69,0x67,0x6E,0x20,0x45,0x43,0x43,0x20,0x52,0x6F,0x6F,0x74,0x20,0x43,0x41,0x20,
+0x2D,0x20,0x52,0x34,0x31,0x13,0x30,0x11,0x06,0x03,0x55,0x04,0x0A,0x13,0x0A,0x47,
+0x6C,0x6F,0x62,0x61,0x6C,0x53,0x69,0x67,0x6E,0x31,0x13,0x30,0x11,0x06,0x03,0x55,
+0x04,0x03,0x13,0x0A,0x47,0x6C,0x6F,0x62,0x61,0x6C,0x53,0x69,0x67,0x6E,0x30,0x1E,
+0x17,0x0D,0x31,0x32,0x31,0x31,0x31,0x33,0x30,0x30,0x30,0x30,0x30,0x30,0x5A,0x17,
+0x0D,0x33,0x38,0x30,0x31,0x31,0x39,0x30,0x33,0x31,0x34,0x30,0x37,0x5A,0x30,0x50,
+0x31,0x24,0x30,0x22,0x06,0x03,0x55,0x04,0x0B,0x13,0x1B,0x47,0x6C,0x6F,0x62,0x61,
+0x6C,0x53,0x69,0x67,0x6E,0x20,0x45,0x43,0x43,0x20,0x52,0x6F,0x6F,0x74,0x20,0x43,
+0x41,0x20,0x2D,0x20,0x52,0x34,0x31,0x13,0x30,0x11,0x06,0x03,0x55,0x04,0x0A,0x13,
+0x0A,0x47,0x6C,0x6F,0x62,0x61,0x6C,0x53,0x69,0x67,0x6E,0x31,0x13,0x30,0x11,0x06,
+0x03,0x55,0x04,0x03,0x13,0x0A,0x47,0x6C,0x6F,0x62,0x61,0x6C,0x53,0x69,0x67,0x6E,
+0x30,0x59,0x30,0x13,0x06,0x07,0x2A,0x86,0x48,0xCE,0x3D,0x02,0x01,0x06,0x08,0x2A,
+0x86,0x48,0xCE,0x3D,0x03,0x01,0x07,0x03,0x42,0x00,0x04,0xB8,0xC6,0x79,0xD3,0x8F,
+0x6C,0x25,0x0E,0x9F,0x2E,0x39,0x19,0x1C,0x03,0xA4,0xAE,0x9A,0xE5,0x39,0x07,0x09,
+0x16,0xCA,0x63,0xB1,0xB9,0x86,0xF8,0x8A,0x57,0xC1,0x57,0xCE,0x42,0xFA,0x73,0xA1,
+0xF7,0x65,0x42,0xFF,0x1E,0xC1,0x00,0xB2,0x6E,0x73,0x0E,0xFF,0xC7,0x21,0xE5,0x18,
+0xA4,0xAA,0xD9,0x71,0x3F,0xA8,0xD4,0xB9,0xCE,0x8C,0x1D,0xA3,0x42,0x30,0x40,0x30,
+0x0E,0x06,0x03,0x55,0x1D,0x0F,0x01,0x01,0xFF,0x04,0x04,0x03,0x02,0x01,0x06,0x30,
+0x0F,0x06,0x03,0x55,0x1D,0x13,0x01,0x01,0xFF,0x04,0x05,0x30,0x03,0x01,0x01,0xFF,
+0x30,0x1D,0x06,0x03,0x55,0x1D,0x0E,0x04,0x16,0x04,0x14,0x54,0xB0,0x7B,0xAD,0x45,
+0xB8,0xE2,0x40,0x7F,0xFB,0x0A,0x6E,0xFB,0xBE,0x33,0xC9,0x3C,0xA3,0x84,0xD5,0x30,
+0x0A,0x06,0x08,0x2A,0x86,0x48,0xCE,0x3D,0x04,0x03,0x02,0x03,0x48,0x00,0x30,0x45,
+0x02,0x21,0x00,0xDC,0x92,0xA1,0xA0,0x13,0xA6,0xCF,0x03,0xB0,0xE6,0xC4,0x21,0x97,
+0x90,0xFA,0x14,0x57,0x2D,0x03,0xEC,0xEE,0x3C,0xD3,0x6E,0xCA,0xA8,0x6C,0x76,0xBC,
+0xA2,0xDE,0xBB,0x02,0x20,0x27,0xA8,0x85,0x27,0x35,0x9B,0x56,0xC6,0xA3,0xF2,0x47,
+0xD2,0xB7,0x6E,0x1B,0x02,0x00,0x17,0xAA,0x67,0xA6,0x15,0x91,0xDE,0xFA,0x94,0xEC,
+0x7B,0x0B,0xF8,0x9F,0x84,
};
-/* subject:/C=US/ST=Arizona/L=Scottsdale/O=Starfield Technologies, Inc./CN=Starfield Services Root Certificate Authority - G2 */
-/* issuer :/C=US/ST=Arizona/L=Scottsdale/O=Starfield Technologies, Inc./CN=Starfield Services Root Certificate Authority - G2 */
+/* subject:/C=DE/O=TC TrustCenter GmbH/OU=TC TrustCenter Universal CA/CN=TC TrustCenter Universal CA I */
+/* issuer :/C=DE/O=TC TrustCenter GmbH/OU=TC TrustCenter Universal CA/CN=TC TrustCenter Universal CA I */
-const unsigned char Starfield_Services_Root_Certificate_Authority___G2_certificate[1011]={
-0x30,0x82,0x03,0xEF,0x30,0x82,0x02,0xD7,0xA0,0x03,0x02,0x01,0x02,0x02,0x01,0x00,
-0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x0B,0x05,0x00,0x30,
-0x81,0x98,0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x55,0x53,0x31,
-0x10,0x30,0x0E,0x06,0x03,0x55,0x04,0x08,0x13,0x07,0x41,0x72,0x69,0x7A,0x6F,0x6E,
-0x61,0x31,0x13,0x30,0x11,0x06,0x03,0x55,0x04,0x07,0x13,0x0A,0x53,0x63,0x6F,0x74,
-0x74,0x73,0x64,0x61,0x6C,0x65,0x31,0x25,0x30,0x23,0x06,0x03,0x55,0x04,0x0A,0x13,
-0x1C,0x53,0x74,0x61,0x72,0x66,0x69,0x65,0x6C,0x64,0x20,0x54,0x65,0x63,0x68,0x6E,
-0x6F,0x6C,0x6F,0x67,0x69,0x65,0x73,0x2C,0x20,0x49,0x6E,0x63,0x2E,0x31,0x3B,0x30,
-0x39,0x06,0x03,0x55,0x04,0x03,0x13,0x32,0x53,0x74,0x61,0x72,0x66,0x69,0x65,0x6C,
-0x64,0x20,0x53,0x65,0x72,0x76,0x69,0x63,0x65,0x73,0x20,0x52,0x6F,0x6F,0x74,0x20,
-0x43,0x65,0x72,0x74,0x69,0x66,0x69,0x63,0x61,0x74,0x65,0x20,0x41,0x75,0x74,0x68,
-0x6F,0x72,0x69,0x74,0x79,0x20,0x2D,0x20,0x47,0x32,0x30,0x1E,0x17,0x0D,0x30,0x39,
-0x30,0x39,0x30,0x31,0x30,0x30,0x30,0x30,0x30,0x30,0x5A,0x17,0x0D,0x33,0x37,0x31,
-0x32,0x33,0x31,0x32,0x33,0x35,0x39,0x35,0x39,0x5A,0x30,0x81,0x98,0x31,0x0B,0x30,
-0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x55,0x53,0x31,0x10,0x30,0x0E,0x06,0x03,
-0x55,0x04,0x08,0x13,0x07,0x41,0x72,0x69,0x7A,0x6F,0x6E,0x61,0x31,0x13,0x30,0x11,
-0x06,0x03,0x55,0x04,0x07,0x13,0x0A,0x53,0x63,0x6F,0x74,0x74,0x73,0x64,0x61,0x6C,
-0x65,0x31,0x25,0x30,0x23,0x06,0x03,0x55,0x04,0x0A,0x13,0x1C,0x53,0x74,0x61,0x72,
-0x66,0x69,0x65,0x6C,0x64,0x20,0x54,0x65,0x63,0x68,0x6E,0x6F,0x6C,0x6F,0x67,0x69,
-0x65,0x73,0x2C,0x20,0x49,0x6E,0x63,0x2E,0x31,0x3B,0x30,0x39,0x06,0x03,0x55,0x04,
-0x03,0x13,0x32,0x53,0x74,0x61,0x72,0x66,0x69,0x65,0x6C,0x64,0x20,0x53,0x65,0x72,
-0x76,0x69,0x63,0x65,0x73,0x20,0x52,0x6F,0x6F,0x74,0x20,0x43,0x65,0x72,0x74,0x69,
-0x66,0x69,0x63,0x61,0x74,0x65,0x20,0x41,0x75,0x74,0x68,0x6F,0x72,0x69,0x74,0x79,
-0x20,0x2D,0x20,0x47,0x32,0x30,0x82,0x01,0x22,0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,
-0x86,0xF7,0x0D,0x01,0x01,0x01,0x05,0x00,0x03,0x82,0x01,0x0F,0x00,0x30,0x82,0x01,
-0x0A,0x02,0x82,0x01,0x01,0x00,0xD5,0x0C,0x3A,0xC4,0x2A,0xF9,0x4E,0xE2,0xF5,0xBE,
-0x19,0x97,0x5F,0x8E,0x88,0x53,0xB1,0x1F,0x3F,0xCB,0xCF,0x9F,0x20,0x13,0x6D,0x29,
-0x3A,0xC8,0x0F,0x7D,0x3C,0xF7,0x6B,0x76,0x38,0x63,0xD9,0x36,0x60,0xA8,0x9B,0x5E,
-0x5C,0x00,0x80,0xB2,0x2F,0x59,0x7F,0xF6,0x87,0xF9,0x25,0x43,0x86,0xE7,0x69,0x1B,
-0x52,0x9A,0x90,0xE1,0x71,0xE3,0xD8,0x2D,0x0D,0x4E,0x6F,0xF6,0xC8,0x49,0xD9,0xB6,
-0xF3,0x1A,0x56,0xAE,0x2B,0xB6,0x74,0x14,0xEB,0xCF,0xFB,0x26,0xE3,0x1A,0xBA,0x1D,
-0x96,0x2E,0x6A,0x3B,0x58,0x94,0x89,0x47,0x56,0xFF,0x25,0xA0,0x93,0x70,0x53,0x83,
-0xDA,0x84,0x74,0x14,0xC3,0x67,0x9E,0x04,0x68,0x3A,0xDF,0x8E,0x40,0x5A,0x1D,0x4A,
-0x4E,0xCF,0x43,0x91,0x3B,0xE7,0x56,0xD6,0x00,0x70,0xCB,0x52,0xEE,0x7B,0x7D,0xAE,
-0x3A,0xE7,0xBC,0x31,0xF9,0x45,0xF6,0xC2,0x60,0xCF,0x13,0x59,0x02,0x2B,0x80,0xCC,
-0x34,0x47,0xDF,0xB9,0xDE,0x90,0x65,0x6D,0x02,0xCF,0x2C,0x91,0xA6,0xA6,0xE7,0xDE,
-0x85,0x18,0x49,0x7C,0x66,0x4E,0xA3,0x3A,0x6D,0xA9,0xB5,0xEE,0x34,0x2E,0xBA,0x0D,
-0x03,0xB8,0x33,0xDF,0x47,0xEB,0xB1,0x6B,0x8D,0x25,0xD9,0x9B,0xCE,0x81,0xD1,0x45,
-0x46,0x32,0x96,0x70,0x87,0xDE,0x02,0x0E,0x49,0x43,0x85,0xB6,0x6C,0x73,0xBB,0x64,
-0xEA,0x61,0x41,0xAC,0xC9,0xD4,0x54,0xDF,0x87,0x2F,0xC7,0x22,0xB2,0x26,0xCC,0x9F,
-0x59,0x54,0x68,0x9F,0xFC,0xBE,0x2A,0x2F,0xC4,0x55,0x1C,0x75,0x40,0x60,0x17,0x85,
-0x02,0x55,0x39,0x8B,0x7F,0x05,0x02,0x03,0x01,0x00,0x01,0xA3,0x42,0x30,0x40,0x30,
-0x0F,0x06,0x03,0x55,0x1D,0x13,0x01,0x01,0xFF,0x04,0x05,0x30,0x03,0x01,0x01,0xFF,
-0x30,0x0E,0x06,0x03,0x55,0x1D,0x0F,0x01,0x01,0xFF,0x04,0x04,0x03,0x02,0x01,0x06,
-0x30,0x1D,0x06,0x03,0x55,0x1D,0x0E,0x04,0x16,0x04,0x14,0x9C,0x5F,0x00,0xDF,0xAA,
-0x01,0xD7,0x30,0x2B,0x38,0x88,0xA2,0xB8,0x6D,0x4A,0x9C,0xF2,0x11,0x91,0x83,0x30,
-0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x0B,0x05,0x00,0x03,0x82,
-0x01,0x01,0x00,0x4B,0x36,0xA6,0x84,0x77,0x69,0xDD,0x3B,0x19,0x9F,0x67,0x23,0x08,
-0x6F,0x0E,0x61,0xC9,0xFD,0x84,0xDC,0x5F,0xD8,0x36,0x81,0xCD,0xD8,0x1B,0x41,0x2D,
-0x9F,0x60,0xDD,0xC7,0x1A,0x68,0xD9,0xD1,0x6E,0x86,0xE1,0x88,0x23,0xCF,0x13,0xDE,
-0x43,0xCF,0xE2,0x34,0xB3,0x04,0x9D,0x1F,0x29,0xD5,0xBF,0xF8,0x5E,0xC8,0xD5,0xC1,
-0xBD,0xEE,0x92,0x6F,0x32,0x74,0xF2,0x91,0x82,0x2F,0xBD,0x82,0x42,0x7A,0xAD,0x2A,
-0xB7,0x20,0x7D,0x4D,0xBC,0x7A,0x55,0x12,0xC2,0x15,0xEA,0xBD,0xF7,0x6A,0x95,0x2E,
-0x6C,0x74,0x9F,0xCF,0x1C,0xB4,0xF2,0xC5,0x01,0xA3,0x85,0xD0,0x72,0x3E,0xAD,0x73,
-0xAB,0x0B,0x9B,0x75,0x0C,0x6D,0x45,0xB7,0x8E,0x94,0xAC,0x96,0x37,0xB5,0xA0,0xD0,
-0x8F,0x15,0x47,0x0E,0xE3,0xE8,0x83,0xDD,0x8F,0xFD,0xEF,0x41,0x01,0x77,0xCC,0x27,
-0xA9,0x62,0x85,0x33,0xF2,0x37,0x08,0xEF,0x71,0xCF,0x77,0x06,0xDE,0xC8,0x19,0x1D,
-0x88,0x40,0xCF,0x7D,0x46,0x1D,0xFF,0x1E,0xC7,0xE1,0xCE,0xFF,0x23,0xDB,0xC6,0xFA,
-0x8D,0x55,0x4E,0xA9,0x02,0xE7,0x47,0x11,0x46,0x3E,0xF4,0xFD,0xBD,0x7B,0x29,0x26,
-0xBB,0xA9,0x61,0x62,0x37,0x28,0xB6,0x2D,0x2A,0xF6,0x10,0x86,0x64,0xC9,0x70,0xA7,
-0xD2,0xAD,0xB7,0x29,0x70,0x79,0xEA,0x3C,0xDA,0x63,0x25,0x9F,0xFD,0x68,0xB7,0x30,
-0xEC,0x70,0xFB,0x75,0x8A,0xB7,0x6D,0x60,0x67,0xB2,0x1E,0xC8,0xB9,0xE9,0xD8,0xA8,
-0x6F,0x02,0x8B,0x67,0x0D,0x4D,0x26,0x57,0x71,0xDA,0x20,0xFC,0xC1,0x4A,0x50,0x8D,
-0xB1,0x28,0xBA,
+const unsigned char TC_TrustCenter_Universal_CA_I_certificate[993]={
+0x30,0x82,0x03,0xDD,0x30,0x82,0x02,0xC5,0xA0,0x03,0x02,0x01,0x02,0x02,0x0E,0x1D,
+0xA2,0x00,0x01,0x00,0x02,0xEC,0xB7,0x60,0x80,0x78,0x8D,0xB6,0x06,0x30,0x0D,0x06,
+0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x05,0x05,0x00,0x30,0x79,0x31,0x0B,
+0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x44,0x45,0x31,0x1C,0x30,0x1A,0x06,
+0x03,0x55,0x04,0x0A,0x13,0x13,0x54,0x43,0x20,0x54,0x72,0x75,0x73,0x74,0x43,0x65,
+0x6E,0x74,0x65,0x72,0x20,0x47,0x6D,0x62,0x48,0x31,0x24,0x30,0x22,0x06,0x03,0x55,
+0x04,0x0B,0x13,0x1B,0x54,0x43,0x20,0x54,0x72,0x75,0x73,0x74,0x43,0x65,0x6E,0x74,
+0x65,0x72,0x20,0x55,0x6E,0x69,0x76,0x65,0x72,0x73,0x61,0x6C,0x20,0x43,0x41,0x31,
+0x26,0x30,0x24,0x06,0x03,0x55,0x04,0x03,0x13,0x1D,0x54,0x43,0x20,0x54,0x72,0x75,
+0x73,0x74,0x43,0x65,0x6E,0x74,0x65,0x72,0x20,0x55,0x6E,0x69,0x76,0x65,0x72,0x73,
+0x61,0x6C,0x20,0x43,0x41,0x20,0x49,0x30,0x1E,0x17,0x0D,0x30,0x36,0x30,0x33,0x32,
+0x32,0x31,0x35,0x35,0x34,0x32,0x38,0x5A,0x17,0x0D,0x32,0x35,0x31,0x32,0x33,0x31,
+0x32,0x32,0x35,0x39,0x35,0x39,0x5A,0x30,0x79,0x31,0x0B,0x30,0x09,0x06,0x03,0x55,
+0x04,0x06,0x13,0x02,0x44,0x45,0x31,0x1C,0x30,0x1A,0x06,0x03,0x55,0x04,0x0A,0x13,
+0x13,0x54,0x43,0x20,0x54,0x72,0x75,0x73,0x74,0x43,0x65,0x6E,0x74,0x65,0x72,0x20,
+0x47,0x6D,0x62,0x48,0x31,0x24,0x30,0x22,0x06,0x03,0x55,0x04,0x0B,0x13,0x1B,0x54,
+0x43,0x20,0x54,0x72,0x75,0x73,0x74,0x43,0x65,0x6E,0x74,0x65,0x72,0x20,0x55,0x6E,
+0x69,0x76,0x65,0x72,0x73,0x61,0x6C,0x20,0x43,0x41,0x31,0x26,0x30,0x24,0x06,0x03,
+0x55,0x04,0x03,0x13,0x1D,0x54,0x43,0x20,0x54,0x72,0x75,0x73,0x74,0x43,0x65,0x6E,
+0x74,0x65,0x72,0x20,0x55,0x6E,0x69,0x76,0x65,0x72,0x73,0x61,0x6C,0x20,0x43,0x41,
+0x20,0x49,0x30,0x82,0x01,0x22,0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,
+0x01,0x01,0x01,0x05,0x00,0x03,0x82,0x01,0x0F,0x00,0x30,0x82,0x01,0x0A,0x02,0x82,
+0x01,0x01,0x00,0xA4,0x77,0x23,0x96,0x44,0xAF,0x90,0xF4,0x31,0xA7,0x10,0xF4,0x26,
+0x87,0x9C,0xF3,0x38,0xD9,0x0F,0x5E,0xDE,0xCF,0x41,0xE8,0x31,0xAD,0xC6,0x74,0x91,
+0x24,0x96,0x78,0x1E,0x09,0xA0,0x9B,0x9A,0x95,0x4A,0x4A,0xF5,0x62,0x7C,0x02,0xA8,
+0xCA,0xAC,0xFB,0x5A,0x04,0x76,0x39,0xDE,0x5F,0xF1,0xF9,0xB3,0xBF,0xF3,0x03,0x58,
+0x55,0xD2,0xAA,0xB7,0xE3,0x04,0x22,0xD1,0xF8,0x94,0xDA,0x22,0x08,0x00,0x8D,0xD3,
+0x7C,0x26,0x5D,0xCC,0x77,0x79,0xE7,0x2C,0x78,0x39,0xA8,0x26,0x73,0x0E,0xA2,0x5D,
+0x25,0x69,0x85,0x4F,0x55,0x0E,0x9A,0xEF,0xC6,0xB9,0x44,0xE1,0x57,0x3D,0xDF,0x1F,
+0x54,0x22,0xE5,0x6F,0x65,0xAA,0x33,0x84,0x3A,0xF3,0xCE,0x7A,0xBE,0x55,0x97,0xAE,
+0x8D,0x12,0x0F,0x14,0x33,0xE2,0x50,0x70,0xC3,0x49,0x87,0x13,0xBC,0x51,0xDE,0xD7,
+0x98,0x12,0x5A,0xEF,0x3A,0x83,0x33,0x92,0x06,0x75,0x8B,0x92,0x7C,0x12,0x68,0x7B,
+0x70,0x6A,0x0F,0xB5,0x9B,0xB6,0x77,0x5B,0x48,0x59,0x9D,0xE4,0xEF,0x5A,0xAD,0xF3,
+0xC1,0x9E,0xD4,0xD7,0x45,0x4E,0xCA,0x56,0x34,0x21,0xBC,0x3E,0x17,0x5B,0x6F,0x77,
+0x0C,0x48,0x01,0x43,0x29,0xB0,0xDD,0x3F,0x96,0x6E,0xE6,0x95,0xAA,0x0C,0xC0,0x20,
+0xB6,0xFD,0x3E,0x36,0x27,0x9C,0xE3,0x5C,0xCF,0x4E,0x81,0xDC,0x19,0xBB,0x91,0x90,
+0x7D,0xEC,0xE6,0x97,0x04,0x1E,0x93,0xCC,0x22,0x49,0xD7,0x97,0x86,0xB6,0x13,0x0A,
+0x3C,0x43,0x23,0x77,0x7E,0xF0,0xDC,0xE6,0xCD,0x24,0x1F,0x3B,0x83,0x9B,0x34,0x3A,
+0x83,0x34,0xE3,0x02,0x03,0x01,0x00,0x01,0xA3,0x63,0x30,0x61,0x30,0x1F,0x06,0x03,
+0x55,0x1D,0x23,0x04,0x18,0x30,0x16,0x80,0x14,0x92,0xA4,0x75,0x2C,0xA4,0x9E,0xBE,
+0x81,0x44,0xEB,0x79,0xFC,0x8A,0xC5,0x95,0xA5,0xEB,0x10,0x75,0x73,0x30,0x0F,0x06,
+0x03,0x55,0x1D,0x13,0x01,0x01,0xFF,0x04,0x05,0x30,0x03,0x01,0x01,0xFF,0x30,0x0E,
+0x06,0x03,0x55,0x1D,0x0F,0x01,0x01,0xFF,0x04,0x04,0x03,0x02,0x01,0x86,0x30,0x1D,
+0x06,0x03,0x55,0x1D,0x0E,0x04,0x16,0x04,0x14,0x92,0xA4,0x75,0x2C,0xA4,0x9E,0xBE,
+0x81,0x44,0xEB,0x79,0xFC,0x8A,0xC5,0x95,0xA5,0xEB,0x10,0x75,0x73,0x30,0x0D,0x06,
+0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x05,0x05,0x00,0x03,0x82,0x01,0x01,
+0x00,0x28,0xD2,0xE0,0x86,0xD5,0xE6,0xF8,0x7B,0xF0,0x97,0xDC,0x22,0x6B,0x3B,0x95,
+0x14,0x56,0x0F,0x11,0x30,0xA5,0x9A,0x4F,0x3A,0xB0,0x3A,0xE0,0x06,0xCB,0x65,0xF5,
+0xED,0xC6,0x97,0x27,0xFE,0x25,0xF2,0x57,0xE6,0x5E,0x95,0x8C,0x3E,0x64,0x60,0x15,
+0x5A,0x7F,0x2F,0x0D,0x01,0xC5,0xB1,0x60,0xFD,0x45,0x35,0xCF,0xF0,0xB2,0xBF,0x06,
+0xD9,0xEF,0x5A,0xBE,0xB3,0x62,0x21,0xB4,0xD7,0xAB,0x35,0x7C,0x53,0x3E,0xA6,0x27,
+0xF1,0xA1,0x2D,0xDA,0x1A,0x23,0x9D,0xCC,0xDD,0xEC,0x3C,0x2D,0x9E,0x27,0x34,0x5D,
+0x0F,0xC2,0x36,0x79,0xBC,0xC9,0x4A,0x62,0x2D,0xED,0x6B,0xD9,0x7D,0x41,0x43,0x7C,
+0xB6,0xAA,0xCA,0xED,0x61,0xB1,0x37,0x82,0x15,0x09,0x1A,0x8A,0x16,0x30,0xD8,0xEC,
+0xC9,0xD6,0x47,0x72,0x78,0x4B,0x10,0x46,0x14,0x8E,0x5F,0x0E,0xAF,0xEC,0xC7,0x2F,
+0xAB,0x10,0xD7,0xB6,0xF1,0x6E,0xEC,0x86,0xB2,0xC2,0xE8,0x0D,0x92,0x73,0xDC,0xA2,
+0xF4,0x0F,0x3A,0xBF,0x61,0x23,0x10,0x89,0x9C,0x48,0x40,0x6E,0x70,0x00,0xB3,0xD3,
+0xBA,0x37,0x44,0x58,0x11,0x7A,0x02,0x6A,0x88,0xF0,0x37,0x34,0xF0,0x19,0xE9,0xAC,
+0xD4,0x65,0x73,0xF6,0x69,0x8C,0x64,0x94,0x3A,0x79,0x85,0x29,0xB0,0x16,0x2B,0x0C,
+0x82,0x3F,0x06,0x9C,0xC7,0xFD,0x10,0x2B,0x9E,0x0F,0x2C,0xB6,0x9E,0xE3,0x15,0xBF,
+0xD9,0x36,0x1C,0xBA,0x25,0x1A,0x52,0x3D,0x1A,0xEC,0x22,0x0C,0x1C,0xE0,0xA4,0xA2,
+0x3D,0xF0,0xE8,0x39,0xCF,0x81,0xC0,0x7B,0xED,0x5D,0x1F,0x6F,0xC5,0xD0,0x0B,0xD7,
+0x98,
};
-/* subject:/C=IL/O=StartCom Ltd./OU=Secure Digital Certificate Signing/CN=StartCom Certification Authority */
-/* issuer :/C=IL/O=StartCom Ltd./OU=Secure Digital Certificate Signing/CN=StartCom Certification Authority */
+/* subject:/C=GB/ST=Greater Manchester/L=Salford/O=Comodo CA Limited/CN=Trusted Certificate Services */
+/* issuer :/C=GB/ST=Greater Manchester/L=Salford/O=Comodo CA Limited/CN=Trusted Certificate Services */
-const unsigned char StartCom_Certification_Authority_certificate[1931]={
-0x30,0x82,0x07,0x87,0x30,0x82,0x05,0x6F,0xA0,0x03,0x02,0x01,0x02,0x02,0x01,0x2D,
-0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x0B,0x05,0x00,0x30,
-0x7D,0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x49,0x4C,0x31,0x16,
-0x30,0x14,0x06,0x03,0x55,0x04,0x0A,0x13,0x0D,0x53,0x74,0x61,0x72,0x74,0x43,0x6F,
-0x6D,0x20,0x4C,0x74,0x64,0x2E,0x31,0x2B,0x30,0x29,0x06,0x03,0x55,0x04,0x0B,0x13,
-0x22,0x53,0x65,0x63,0x75,0x72,0x65,0x20,0x44,0x69,0x67,0x69,0x74,0x61,0x6C,0x20,
-0x43,0x65,0x72,0x74,0x69,0x66,0x69,0x63,0x61,0x74,0x65,0x20,0x53,0x69,0x67,0x6E,
-0x69,0x6E,0x67,0x31,0x29,0x30,0x27,0x06,0x03,0x55,0x04,0x03,0x13,0x20,0x53,0x74,
-0x61,0x72,0x74,0x43,0x6F,0x6D,0x20,0x43,0x65,0x72,0x74,0x69,0x66,0x69,0x63,0x61,
-0x74,0x69,0x6F,0x6E,0x20,0x41,0x75,0x74,0x68,0x6F,0x72,0x69,0x74,0x79,0x30,0x1E,
-0x17,0x0D,0x30,0x36,0x30,0x39,0x31,0x37,0x31,0x39,0x34,0x36,0x33,0x37,0x5A,0x17,
-0x0D,0x33,0x36,0x30,0x39,0x31,0x37,0x31,0x39,0x34,0x36,0x33,0x36,0x5A,0x30,0x7D,
-0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x49,0x4C,0x31,0x16,0x30,
-0x14,0x06,0x03,0x55,0x04,0x0A,0x13,0x0D,0x53,0x74,0x61,0x72,0x74,0x43,0x6F,0x6D,
-0x20,0x4C,0x74,0x64,0x2E,0x31,0x2B,0x30,0x29,0x06,0x03,0x55,0x04,0x0B,0x13,0x22,
-0x53,0x65,0x63,0x75,0x72,0x65,0x20,0x44,0x69,0x67,0x69,0x74,0x61,0x6C,0x20,0x43,
-0x65,0x72,0x74,0x69,0x66,0x69,0x63,0x61,0x74,0x65,0x20,0x53,0x69,0x67,0x6E,0x69,
-0x6E,0x67,0x31,0x29,0x30,0x27,0x06,0x03,0x55,0x04,0x03,0x13,0x20,0x53,0x74,0x61,
-0x72,0x74,0x43,0x6F,0x6D,0x20,0x43,0x65,0x72,0x74,0x69,0x66,0x69,0x63,0x61,0x74,
-0x69,0x6F,0x6E,0x20,0x41,0x75,0x74,0x68,0x6F,0x72,0x69,0x74,0x79,0x30,0x82,0x02,
-0x22,0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x01,0x05,0x00,
-0x03,0x82,0x02,0x0F,0x00,0x30,0x82,0x02,0x0A,0x02,0x82,0x02,0x01,0x00,0xC1,0x88,
-0xDB,0x09,0xBC,0x6C,0x46,0x7C,0x78,0x9F,0x95,0x7B,0xB5,0x33,0x90,0xF2,0x72,0x62,
-0xD6,0xC1,0x36,0x20,0x22,0x24,0x5E,0xCE,0xE9,0x77,0xF2,0x43,0x0A,0xA2,0x06,0x64,
-0xA4,0xCC,0x8E,0x36,0xF8,0x38,0xE6,0x23,0xF0,0x6E,0x6D,0xB1,0x3C,0xDD,0x72,0xA3,
-0x85,0x1C,0xA1,0xD3,0x3D,0xB4,0x33,0x2B,0xD3,0x2F,0xAF,0xFE,0xEA,0xB0,0x41,0x59,
-0x67,0xB6,0xC4,0x06,0x7D,0x0A,0x9E,0x74,0x85,0xD6,0x79,0x4C,0x80,0x37,0x7A,0xDF,
-0x39,0x05,0x52,0x59,0xF7,0xF4,0x1B,0x46,0x43,0xA4,0xD2,0x85,0x85,0xD2,0xC3,0x71,
-0xF3,0x75,0x62,0x34,0xBA,0x2C,0x8A,0x7F,0x1E,0x8F,0xEE,0xED,0x34,0xD0,0x11,0xC7,
-0x96,0xCD,0x52,0x3D,0xBA,0x33,0xD6,0xDD,0x4D,0xDE,0x0B,0x3B,0x4A,0x4B,0x9F,0xC2,
-0x26,0x2F,0xFA,0xB5,0x16,0x1C,0x72,0x35,0x77,0xCA,0x3C,0x5D,0xE6,0xCA,0xE1,0x26,
-0x8B,0x1A,0x36,0x76,0x5C,0x01,0xDB,0x74,0x14,0x25,0xFE,0xED,0xB5,0xA0,0x88,0x0F,
-0xDD,0x78,0xCA,0x2D,0x1F,0x07,0x97,0x30,0x01,0x2D,0x72,0x79,0xFA,0x46,0xD6,0x13,
-0x2A,0xA8,0xB9,0xA6,0xAB,0x83,0x49,0x1D,0xE5,0xF2,0xEF,0xDD,0xE4,0x01,0x8E,0x18,
-0x0A,0x8F,0x63,0x53,0x16,0x85,0x62,0xA9,0x0E,0x19,0x3A,0xCC,0xB5,0x66,0xA6,0xC2,
-0x6B,0x74,0x07,0xE4,0x2B,0xE1,0x76,0x3E,0xB4,0x6D,0xD8,0xF6,0x44,0xE1,0x73,0x62,
-0x1F,0x3B,0xC4,0xBE,0xA0,0x53,0x56,0x25,0x6C,0x51,0x09,0xF7,0xAA,0xAB,0xCA,0xBF,
-0x76,0xFD,0x6D,0x9B,0xF3,0x9D,0xDB,0xBF,0x3D,0x66,0xBC,0x0C,0x56,0xAA,0xAF,0x98,
-0x48,0x95,0x3A,0x4B,0xDF,0xA7,0x58,0x50,0xD9,0x38,0x75,0xA9,0x5B,0xEA,0x43,0x0C,
-0x02,0xFF,0x99,0xEB,0xE8,0x6C,0x4D,0x70,0x5B,0x29,0x65,0x9C,0xDD,0xAA,0x5D,0xCC,
-0xAF,0x01,0x31,0xEC,0x0C,0xEB,0xD2,0x8D,0xE8,0xEA,0x9C,0x7B,0xE6,0x6E,0xF7,0x27,
-0x66,0x0C,0x1A,0x48,0xD7,0x6E,0x42,0xE3,0x3F,0xDE,0x21,0x3E,0x7B,0xE1,0x0D,0x70,
-0xFB,0x63,0xAA,0xA8,0x6C,0x1A,0x54,0xB4,0x5C,0x25,0x7A,0xC9,0xA2,0xC9,0x8B,0x16,
-0xA6,0xBB,0x2C,0x7E,0x17,0x5E,0x05,0x4D,0x58,0x6E,0x12,0x1D,0x01,0xEE,0x12,0x10,
-0x0D,0xC6,0x32,0x7F,0x18,0xFF,0xFC,0xF4,0xFA,0xCD,0x6E,0x91,0xE8,0x36,0x49,0xBE,
-0x1A,0x48,0x69,0x8B,0xC2,0x96,0x4D,0x1A,0x12,0xB2,0x69,0x17,0xC1,0x0A,0x90,0xD6,
-0xFA,0x79,0x22,0x48,0xBF,0xBA,0x7B,0x69,0xF8,0x70,0xC7,0xFA,0x7A,0x37,0xD8,0xD8,
-0x0D,0xD2,0x76,0x4F,0x57,0xFF,0x90,0xB7,0xE3,0x91,0xD2,0xDD,0xEF,0xC2,0x60,0xB7,
-0x67,0x3A,0xDD,0xFE,0xAA,0x9C,0xF0,0xD4,0x8B,0x7F,0x72,0x22,0xCE,0xC6,0x9F,0x97,
-0xB6,0xF8,0xAF,0x8A,0xA0,0x10,0xA8,0xD9,0xFB,0x18,0xC6,0xB6,0xB5,0x5C,0x52,0x3C,
-0x89,0xB6,0x19,0x2A,0x73,0x01,0x0A,0x0F,0x03,0xB3,0x12,0x60,0xF2,0x7A,0x2F,0x81,
-0xDB,0xA3,0x6E,0xFF,0x26,0x30,0x97,0xF5,0x8B,0xDD,0x89,0x57,0xB6,0xAD,0x3D,0xB3,
-0xAF,0x2B,0xC5,0xB7,0x76,0x02,0xF0,0xA5,0xD6,0x2B,0x9A,0x86,0x14,0x2A,0x72,0xF6,
-0xE3,0x33,0x8C,0x5D,0x09,0x4B,0x13,0xDF,0xBB,0x8C,0x74,0x13,0x52,0x4B,0x02,0x03,
-0x01,0x00,0x01,0xA3,0x82,0x02,0x10,0x30,0x82,0x02,0x0C,0x30,0x0F,0x06,0x03,0x55,
-0x1D,0x13,0x01,0x01,0xFF,0x04,0x05,0x30,0x03,0x01,0x01,0xFF,0x30,0x0E,0x06,0x03,
-0x55,0x1D,0x0F,0x01,0x01,0xFF,0x04,0x04,0x03,0x02,0x01,0x06,0x30,0x1D,0x06,0x03,
-0x55,0x1D,0x0E,0x04,0x16,0x04,0x14,0x4E,0x0B,0xEF,0x1A,0xA4,0x40,0x5B,0xA5,0x17,
-0x69,0x87,0x30,0xCA,0x34,0x68,0x43,0xD0,0x41,0xAE,0xF2,0x30,0x1F,0x06,0x03,0x55,
-0x1D,0x23,0x04,0x18,0x30,0x16,0x80,0x14,0x4E,0x0B,0xEF,0x1A,0xA4,0x40,0x5B,0xA5,
-0x17,0x69,0x87,0x30,0xCA,0x34,0x68,0x43,0xD0,0x41,0xAE,0xF2,0x30,0x82,0x01,0x5A,
-0x06,0x03,0x55,0x1D,0x20,0x04,0x82,0x01,0x51,0x30,0x82,0x01,0x4D,0x30,0x82,0x01,
-0x49,0x06,0x0B,0x2B,0x06,0x01,0x04,0x01,0x81,0xB5,0x37,0x01,0x01,0x01,0x30,0x82,
-0x01,0x38,0x30,0x2E,0x06,0x08,0x2B,0x06,0x01,0x05,0x05,0x07,0x02,0x01,0x16,0x22,
-0x68,0x74,0x74,0x70,0x3A,0x2F,0x2F,0x77,0x77,0x77,0x2E,0x73,0x74,0x61,0x72,0x74,
-0x73,0x73,0x6C,0x2E,0x63,0x6F,0x6D,0x2F,0x70,0x6F,0x6C,0x69,0x63,0x79,0x2E,0x70,
-0x64,0x66,0x30,0x34,0x06,0x08,0x2B,0x06,0x01,0x05,0x05,0x07,0x02,0x01,0x16,0x28,
-0x68,0x74,0x74,0x70,0x3A,0x2F,0x2F,0x77,0x77,0x77,0x2E,0x73,0x74,0x61,0x72,0x74,
-0x73,0x73,0x6C,0x2E,0x63,0x6F,0x6D,0x2F,0x69,0x6E,0x74,0x65,0x72,0x6D,0x65,0x64,
-0x69,0x61,0x74,0x65,0x2E,0x70,0x64,0x66,0x30,0x81,0xCF,0x06,0x08,0x2B,0x06,0x01,
-0x05,0x05,0x07,0x02,0x02,0x30,0x81,0xC2,0x30,0x27,0x16,0x20,0x53,0x74,0x61,0x72,
-0x74,0x20,0x43,0x6F,0x6D,0x6D,0x65,0x72,0x63,0x69,0x61,0x6C,0x20,0x28,0x53,0x74,
-0x61,0x72,0x74,0x43,0x6F,0x6D,0x29,0x20,0x4C,0x74,0x64,0x2E,0x30,0x03,0x02,0x01,
-0x01,0x1A,0x81,0x96,0x4C,0x69,0x6D,0x69,0x74,0x65,0x64,0x20,0x4C,0x69,0x61,0x62,
-0x69,0x6C,0x69,0x74,0x79,0x2C,0x20,0x72,0x65,0x61,0x64,0x20,0x74,0x68,0x65,0x20,
-0x73,0x65,0x63,0x74,0x69,0x6F,0x6E,0x20,0x2A,0x4C,0x65,0x67,0x61,0x6C,0x20,0x4C,
-0x69,0x6D,0x69,0x74,0x61,0x74,0x69,0x6F,0x6E,0x73,0x2A,0x20,0x6F,0x66,0x20,0x74,
-0x68,0x65,0x20,0x53,0x74,0x61,0x72,0x74,0x43,0x6F,0x6D,0x20,0x43,0x65,0x72,0x74,
-0x69,0x66,0x69,0x63,0x61,0x74,0x69,0x6F,0x6E,0x20,0x41,0x75,0x74,0x68,0x6F,0x72,
-0x69,0x74,0x79,0x20,0x50,0x6F,0x6C,0x69,0x63,0x79,0x20,0x61,0x76,0x61,0x69,0x6C,
-0x61,0x62,0x6C,0x65,0x20,0x61,0x74,0x20,0x68,0x74,0x74,0x70,0x3A,0x2F,0x2F,0x77,
-0x77,0x77,0x2E,0x73,0x74,0x61,0x72,0x74,0x73,0x73,0x6C,0x2E,0x63,0x6F,0x6D,0x2F,
-0x70,0x6F,0x6C,0x69,0x63,0x79,0x2E,0x70,0x64,0x66,0x30,0x11,0x06,0x09,0x60,0x86,
-0x48,0x01,0x86,0xF8,0x42,0x01,0x01,0x04,0x04,0x03,0x02,0x00,0x07,0x30,0x38,0x06,
-0x09,0x60,0x86,0x48,0x01,0x86,0xF8,0x42,0x01,0x0D,0x04,0x2B,0x16,0x29,0x53,0x74,
-0x61,0x72,0x74,0x43,0x6F,0x6D,0x20,0x46,0x72,0x65,0x65,0x20,0x53,0x53,0x4C,0x20,
-0x43,0x65,0x72,0x74,0x69,0x66,0x69,0x63,0x61,0x74,0x69,0x6F,0x6E,0x20,0x41,0x75,
-0x74,0x68,0x6F,0x72,0x69,0x74,0x79,0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,
-0x0D,0x01,0x01,0x0B,0x05,0x00,0x03,0x82,0x02,0x01,0x00,0x8E,0x8F,0xE7,0xDC,0x94,
-0x79,0x7C,0xF1,0x85,0x7F,0x9F,0x49,0x6F,0x6B,0xCA,0x5D,0xFB,0x8C,0xFE,0x04,0xC5,
-0xC1,0x62,0xD1,0x7D,0x42,0x8A,0xBC,0x53,0xB7,0x94,0x03,0x66,0x30,0x3F,0xB1,0xE7,
-0x0A,0xA7,0x50,0x20,0x55,0x25,0x7F,0x76,0x7A,0x14,0x0D,0xEB,0x04,0x0E,0x40,0xE6,
-0x3E,0xD8,0x88,0xAB,0x07,0x27,0x83,0xA9,0x75,0xA6,0x37,0x73,0xC7,0xFD,0x4B,0xD2,
-0x4D,0xAD,0x17,0x40,0xC8,0x46,0xBE,0x3B,0x7F,0x51,0xFC,0xC3,0xB6,0x05,0x31,0xDC,
-0xCD,0x85,0x22,0x4E,0x71,0xB7,0xF2,0x71,0x5E,0xB0,0x1A,0xC6,0xBA,0x93,0x8B,0x78,
-0x92,0x4A,0x85,0xF8,0x78,0x0F,0x83,0xFE,0x2F,0xAD,0x2C,0xF7,0xE4,0xA4,0xBB,0x2D,
-0xD0,0xE7,0x0D,0x3A,0xB8,0x3E,0xCE,0xF6,0x78,0xF6,0xAE,0x47,0x24,0xCA,0xA3,0x35,
-0x36,0xCE,0xC7,0xC6,0x87,0x98,0xDA,0xEC,0xFB,0xE9,0xB2,0xCE,0x27,0x9B,0x88,0xC3,
-0x04,0xA1,0xF6,0x0B,0x59,0x68,0xAF,0xC9,0xDB,0x10,0x0F,0x4D,0xF6,0x64,0x63,0x5C,
-0xA5,0x12,0x6F,0x92,0xB2,0x93,0x94,0xC7,0x88,0x17,0x0E,0x93,0xB6,0x7E,0x62,0x8B,
-0x90,0x7F,0xAB,0x4E,0x9F,0xFC,0xE3,0x75,0x14,0x4F,0x2A,0x32,0xDF,0x5B,0x0D,0xE0,
-0xF5,0x7B,0x93,0x0D,0xAB,0xA1,0xCF,0x87,0xE1,0xA5,0x04,0x45,0xE8,0x3C,0x12,0xA5,
-0x09,0xC5,0xB0,0xD1,0xB7,0x53,0xF3,0x60,0x14,0xBA,0x85,0x69,0x6A,0x21,0x7C,0x1F,
-0x75,0x61,0x17,0x20,0x17,0x7B,0x6C,0x3B,0x41,0x29,0x5C,0xE1,0xAC,0x5A,0xD1,0xCD,
-0x8C,0x9B,0xEB,0x60,0x1D,0x19,0xEC,0xF7,0xE5,0xB0,0xDA,0xF9,0x79,0x18,0xA5,0x45,
-0x3F,0x49,0x43,0x57,0xD2,0xDD,0x24,0xD5,0x2C,0xA3,0xFD,0x91,0x8D,0x27,0xB5,0xE5,
-0xEB,0x14,0x06,0x9A,0x4C,0x7B,0x21,0xBB,0x3A,0xAD,0x30,0x06,0x18,0xC0,0xD8,0xC1,
-0x6B,0x2C,0x7F,0x59,0x5C,0x5D,0x91,0xB1,0x70,0x22,0x57,0xEB,0x8A,0x6B,0x48,0x4A,
-0xD5,0x0F,0x29,0xEC,0xC6,0x40,0xC0,0x2F,0x88,0x4C,0x68,0x01,0x17,0x77,0xF4,0x24,
-0x19,0x4F,0xBD,0xFA,0xE1,0xB2,0x20,0x21,0x4B,0xDD,0x1A,0xD8,0x29,0x7D,0xAA,0xB8,
-0xDE,0x54,0xEC,0x21,0x55,0x80,0x6C,0x1E,0xF5,0x30,0xC8,0xA3,0x10,0xE5,0xB2,0xE6,
-0x2A,0x14,0x31,0xC3,0x85,0x2D,0x8C,0x98,0xB1,0x86,0x5A,0x4F,0x89,0x59,0x2D,0xB9,
-0xC7,0xF7,0x1C,0xC8,0x8A,0x7F,0xC0,0x9D,0x05,0x4A,0xE6,0x42,0x4F,0x62,0xA3,0x6D,
-0x29,0xA4,0x1F,0x85,0xAB,0xDB,0xE5,0x81,0xC8,0xAD,0x2A,0x3D,0x4C,0x5D,0x5B,0x84,
-0x26,0x71,0xC4,0x85,0x5E,0x71,0x24,0xCA,0xA5,0x1B,0x6C,0xD8,0x61,0xD3,0x1A,0xE0,
-0x54,0xDB,0xCE,0xBA,0xA9,0x32,0xB5,0x22,0xF6,0x73,0x41,0x09,0x5D,0xB8,0x17,0x5D,
-0x0E,0x0F,0x99,0x90,0xD6,0x47,0xDA,0x6F,0x0A,0x3A,0x62,0x28,0x14,0x67,0x82,0xD9,
-0xF1,0xD0,0x80,0x59,0x9B,0xCB,0x31,0xD8,0x9B,0x0F,0x8C,0x77,0x4E,0xB5,0x68,0x8A,
-0xF2,0x6C,0xF6,0x24,0x0E,0x2D,0x6C,0x70,0xC5,0x73,0xD1,0xDE,0x14,0xD0,0x71,0x8F,
-0xB6,0xD3,0x7B,0x02,0xF6,0xE3,0xB8,0xD4,0x09,0x6E,0x6B,0x9E,0x75,0x84,0x39,0xE6,
-0x7F,0x25,0xA5,0xF2,0x48,0x00,0xC0,0xA4,0x01,0xDA,0x3F,
+const unsigned char Comodo_Trusted_Services_root_certificate[1095]={
+0x30,0x82,0x04,0x43,0x30,0x82,0x03,0x2B,0xA0,0x03,0x02,0x01,0x02,0x02,0x01,0x01,
+0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x05,0x05,0x00,0x30,
+0x7F,0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x47,0x42,0x31,0x1B,
+0x30,0x19,0x06,0x03,0x55,0x04,0x08,0x0C,0x12,0x47,0x72,0x65,0x61,0x74,0x65,0x72,
+0x20,0x4D,0x61,0x6E,0x63,0x68,0x65,0x73,0x74,0x65,0x72,0x31,0x10,0x30,0x0E,0x06,
+0x03,0x55,0x04,0x07,0x0C,0x07,0x53,0x61,0x6C,0x66,0x6F,0x72,0x64,0x31,0x1A,0x30,
+0x18,0x06,0x03,0x55,0x04,0x0A,0x0C,0x11,0x43,0x6F,0x6D,0x6F,0x64,0x6F,0x20,0x43,
+0x41,0x20,0x4C,0x69,0x6D,0x69,0x74,0x65,0x64,0x31,0x25,0x30,0x23,0x06,0x03,0x55,
+0x04,0x03,0x0C,0x1C,0x54,0x72,0x75,0x73,0x74,0x65,0x64,0x20,0x43,0x65,0x72,0x74,
+0x69,0x66,0x69,0x63,0x61,0x74,0x65,0x20,0x53,0x65,0x72,0x76,0x69,0x63,0x65,0x73,
+0x30,0x1E,0x17,0x0D,0x30,0x34,0x30,0x31,0x30,0x31,0x30,0x30,0x30,0x30,0x30,0x30,
+0x5A,0x17,0x0D,0x32,0x38,0x31,0x32,0x33,0x31,0x32,0x33,0x35,0x39,0x35,0x39,0x5A,
+0x30,0x7F,0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x47,0x42,0x31,
+0x1B,0x30,0x19,0x06,0x03,0x55,0x04,0x08,0x0C,0x12,0x47,0x72,0x65,0x61,0x74,0x65,
+0x72,0x20,0x4D,0x61,0x6E,0x63,0x68,0x65,0x73,0x74,0x65,0x72,0x31,0x10,0x30,0x0E,
+0x06,0x03,0x55,0x04,0x07,0x0C,0x07,0x53,0x61,0x6C,0x66,0x6F,0x72,0x64,0x31,0x1A,
+0x30,0x18,0x06,0x03,0x55,0x04,0x0A,0x0C,0x11,0x43,0x6F,0x6D,0x6F,0x64,0x6F,0x20,
+0x43,0x41,0x20,0x4C,0x69,0x6D,0x69,0x74,0x65,0x64,0x31,0x25,0x30,0x23,0x06,0x03,
+0x55,0x04,0x03,0x0C,0x1C,0x54,0x72,0x75,0x73,0x74,0x65,0x64,0x20,0x43,0x65,0x72,
+0x74,0x69,0x66,0x69,0x63,0x61,0x74,0x65,0x20,0x53,0x65,0x72,0x76,0x69,0x63,0x65,
+0x73,0x30,0x82,0x01,0x22,0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,
+0x01,0x01,0x05,0x00,0x03,0x82,0x01,0x0F,0x00,0x30,0x82,0x01,0x0A,0x02,0x82,0x01,
+0x01,0x00,0xDF,0x71,0x6F,0x36,0x58,0x53,0x5A,0xF2,0x36,0x54,0x57,0x80,0xC4,0x74,
+0x08,0x20,0xED,0x18,0x7F,0x2A,0x1D,0xE6,0x35,0x9A,0x1E,0x25,0xAC,0x9C,0xE5,0x96,
+0x7E,0x72,0x52,0xA0,0x15,0x42,0xDB,0x59,0xDD,0x64,0x7A,0x1A,0xD0,0xB8,0x7B,0xDD,
+0x39,0x15,0xBC,0x55,0x48,0xC4,0xED,0x3A,0x00,0xEA,0x31,0x11,0xBA,0xF2,0x71,0x74,
+0x1A,0x67,0xB8,0xCF,0x33,0xCC,0xA8,0x31,0xAF,0xA3,0xE3,0xD7,0x7F,0xBF,0x33,0x2D,
+0x4C,0x6A,0x3C,0xEC,0x8B,0xC3,0x92,0xD2,0x53,0x77,0x24,0x74,0x9C,0x07,0x6E,0x70,
+0xFC,0xBD,0x0B,0x5B,0x76,0xBA,0x5F,0xF2,0xFF,0xD7,0x37,0x4B,0x4A,0x60,0x78,0xF7,
+0xF0,0xFA,0xCA,0x70,0xB4,0xEA,0x59,0xAA,0xA3,0xCE,0x48,0x2F,0xA9,0xC3,0xB2,0x0B,
+0x7E,0x17,0x72,0x16,0x0C,0xA6,0x07,0x0C,0x1B,0x38,0xCF,0xC9,0x62,0xB7,0x3F,0xA0,
+0x93,0xA5,0x87,0x41,0xF2,0xB7,0x70,0x40,0x77,0xD8,0xBE,0x14,0x7C,0xE3,0xA8,0xC0,
+0x7A,0x8E,0xE9,0x63,0x6A,0xD1,0x0F,0x9A,0xC6,0xD2,0xF4,0x8B,0x3A,0x14,0x04,0x56,
+0xD4,0xED,0xB8,0xCC,0x6E,0xF5,0xFB,0xE2,0x2C,0x58,0xBD,0x7F,0x4F,0x6B,0x2B,0xF7,
+0x60,0x24,0x58,0x24,0xCE,0x26,0xEF,0x34,0x91,0x3A,0xD5,0xE3,0x81,0xD0,0xB2,0xF0,
+0x04,0x02,0xD7,0x5B,0xB7,0x3E,0x92,0xAC,0x6B,0x12,0x8A,0xF9,0xE4,0x05,0xB0,0x3B,
+0x91,0x49,0x5C,0xB2,0xEB,0x53,0xEA,0xF8,0x9F,0x47,0x86,0xEE,0xBF,0x95,0xC0,0xC0,
+0x06,0x9F,0xD2,0x5B,0x5E,0x11,0x1B,0xF4,0xC7,0x04,0x35,0x29,0xD2,0x55,0x5C,0xE4,
+0xED,0xEB,0x02,0x03,0x01,0x00,0x01,0xA3,0x81,0xC9,0x30,0x81,0xC6,0x30,0x1D,0x06,
+0x03,0x55,0x1D,0x0E,0x04,0x16,0x04,0x14,0xC5,0x7B,0x58,0xBD,0xED,0xDA,0x25,0x69,
+0xD2,0xF7,0x59,0x16,0xA8,0xB3,0x32,0xC0,0x7B,0x27,0x5B,0xF4,0x30,0x0E,0x06,0x03,
+0x55,0x1D,0x0F,0x01,0x01,0xFF,0x04,0x04,0x03,0x02,0x01,0x06,0x30,0x0F,0x06,0x03,
+0x55,0x1D,0x13,0x01,0x01,0xFF,0x04,0x05,0x30,0x03,0x01,0x01,0xFF,0x30,0x81,0x83,
+0x06,0x03,0x55,0x1D,0x1F,0x04,0x7C,0x30,0x7A,0x30,0x3C,0xA0,0x3A,0xA0,0x38,0x86,
+0x36,0x68,0x74,0x74,0x70,0x3A,0x2F,0x2F,0x63,0x72,0x6C,0x2E,0x63,0x6F,0x6D,0x6F,
+0x64,0x6F,0x63,0x61,0x2E,0x63,0x6F,0x6D,0x2F,0x54,0x72,0x75,0x73,0x74,0x65,0x64,
+0x43,0x65,0x72,0x74,0x69,0x66,0x69,0x63,0x61,0x74,0x65,0x53,0x65,0x72,0x76,0x69,
+0x63,0x65,0x73,0x2E,0x63,0x72,0x6C,0x30,0x3A,0xA0,0x38,0xA0,0x36,0x86,0x34,0x68,
+0x74,0x74,0x70,0x3A,0x2F,0x2F,0x63,0x72,0x6C,0x2E,0x63,0x6F,0x6D,0x6F,0x64,0x6F,
+0x2E,0x6E,0x65,0x74,0x2F,0x54,0x72,0x75,0x73,0x74,0x65,0x64,0x43,0x65,0x72,0x74,
+0x69,0x66,0x69,0x63,0x61,0x74,0x65,0x53,0x65,0x72,0x76,0x69,0x63,0x65,0x73,0x2E,
+0x63,0x72,0x6C,0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x05,
+0x05,0x00,0x03,0x82,0x01,0x01,0x00,0xC8,0x93,0x81,0x3B,0x89,0xB4,0xAF,0xB8,0x84,
+0x12,0x4C,0x8D,0xD2,0xF0,0xDB,0x70,0xBA,0x57,0x86,0x15,0x34,0x10,0xB9,0x2F,0x7F,
+0x1E,0xB0,0xA8,0x89,0x60,0xA1,0x8A,0xC2,0x77,0x0C,0x50,0x4A,0x9B,0x00,0x8B,0xD8,
+0x8B,0xF4,0x41,0xE2,0xD0,0x83,0x8A,0x4A,0x1C,0x14,0x06,0xB0,0xA3,0x68,0x05,0x70,
+0x31,0x30,0xA7,0x53,0x9B,0x0E,0xE9,0x4A,0xA0,0x58,0x69,0x67,0x0E,0xAE,0x9D,0xF6,
+0xA5,0x2C,0x41,0xBF,0x3C,0x06,0x6B,0xE4,0x59,0xCC,0x6D,0x10,0xF1,0x96,0x6F,0x1F,
+0xDF,0xF4,0x04,0x02,0xA4,0x9F,0x45,0x3E,0xC8,0xD8,0xFA,0x36,0x46,0x44,0x50,0x3F,
+0x82,0x97,0x91,0x1F,0x28,0xDB,0x18,0x11,0x8C,0x2A,0xE4,0x65,0x83,0x57,0x12,0x12,
+0x8C,0x17,0x3F,0x94,0x36,0xFE,0x5D,0xB0,0xC0,0x04,0x77,0x13,0xB8,0xF4,0x15,0xD5,
+0x3F,0x38,0xCC,0x94,0x3A,0x55,0xD0,0xAC,0x98,0xF5,0xBA,0x00,0x5F,0xE0,0x86,0x19,
+0x81,0x78,0x2F,0x28,0xC0,0x7E,0xD3,0xCC,0x42,0x0A,0xF5,0xAE,0x50,0xA0,0xD1,0x3E,
+0xC6,0xA1,0x71,0xEC,0x3F,0xA0,0x20,0x8C,0x66,0x3A,0x89,0xB4,0x8E,0xD4,0xD8,0xB1,
+0x4D,0x25,0x47,0xEE,0x2F,0x88,0xC8,0xB5,0xE1,0x05,0x45,0xC0,0xBE,0x14,0x71,0xDE,
+0x7A,0xFD,0x8E,0x7B,0x7D,0x4D,0x08,0x96,0xA5,0x12,0x73,0xF0,0x2D,0xCA,0x37,0x27,
+0x74,0x12,0x27,0x4C,0xCB,0xB6,0x97,0xE9,0xD9,0xAE,0x08,0x6D,0x5A,0x39,0x40,0xDD,
+0x05,0x47,0x75,0x6A,0x5A,0x21,0xB3,0xA3,0x18,0xCF,0x4E,0xF7,0x2E,0x57,0xB7,0x98,
+0x70,0x5E,0xC8,0xC4,0x78,0xB0,0x62,
};
-/* subject:/C=IL/O=StartCom Ltd./CN=StartCom Certification Authority G2 */
-/* issuer :/C=IL/O=StartCom Ltd./CN=StartCom Certification Authority G2 */
+/* subject:/C=US/O=Entrust, Inc./OU=www.entrust.net/CPS is incorporated by reference/OU=(c) 2006 Entrust, Inc./CN=Entrust Root Certification Authority */
+/* issuer :/C=US/O=Entrust, Inc./OU=www.entrust.net/CPS is incorporated by reference/OU=(c) 2006 Entrust, Inc./CN=Entrust Root Certification Authority */
-const unsigned char StartCom_Certification_Authority_G2_certificate[1383]={
-0x30,0x82,0x05,0x63,0x30,0x82,0x03,0x4B,0xA0,0x03,0x02,0x01,0x02,0x02,0x01,0x3B,
-0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x0B,0x05,0x00,0x30,
-0x53,0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x49,0x4C,0x31,0x16,
-0x30,0x14,0x06,0x03,0x55,0x04,0x0A,0x13,0x0D,0x53,0x74,0x61,0x72,0x74,0x43,0x6F,
-0x6D,0x20,0x4C,0x74,0x64,0x2E,0x31,0x2C,0x30,0x2A,0x06,0x03,0x55,0x04,0x03,0x13,
-0x23,0x53,0x74,0x61,0x72,0x74,0x43,0x6F,0x6D,0x20,0x43,0x65,0x72,0x74,0x69,0x66,
-0x69,0x63,0x61,0x74,0x69,0x6F,0x6E,0x20,0x41,0x75,0x74,0x68,0x6F,0x72,0x69,0x74,
-0x79,0x20,0x47,0x32,0x30,0x1E,0x17,0x0D,0x31,0x30,0x30,0x31,0x30,0x31,0x30,0x31,
-0x30,0x30,0x30,0x31,0x5A,0x17,0x0D,0x33,0x39,0x31,0x32,0x33,0x31,0x32,0x33,0x35,
-0x39,0x30,0x31,0x5A,0x30,0x53,0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,
-0x02,0x49,0x4C,0x31,0x16,0x30,0x14,0x06,0x03,0x55,0x04,0x0A,0x13,0x0D,0x53,0x74,
-0x61,0x72,0x74,0x43,0x6F,0x6D,0x20,0x4C,0x74,0x64,0x2E,0x31,0x2C,0x30,0x2A,0x06,
-0x03,0x55,0x04,0x03,0x13,0x23,0x53,0x74,0x61,0x72,0x74,0x43,0x6F,0x6D,0x20,0x43,
-0x65,0x72,0x74,0x69,0x66,0x69,0x63,0x61,0x74,0x69,0x6F,0x6E,0x20,0x41,0x75,0x74,
-0x68,0x6F,0x72,0x69,0x74,0x79,0x20,0x47,0x32,0x30,0x82,0x02,0x22,0x30,0x0D,0x06,
-0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x01,0x05,0x00,0x03,0x82,0x02,0x0F,
-0x00,0x30,0x82,0x02,0x0A,0x02,0x82,0x02,0x01,0x00,0xB6,0x89,0x36,0x5B,0x07,0xB7,
-0x20,0x36,0xBD,0x82,0xBB,0xE1,0x16,0x20,0x03,0x95,0x7A,0xAF,0x0E,0xA3,0x55,0xC9,
-0x25,0x99,0x4A,0xC5,0xD0,0x56,0x41,0x87,0x90,0x4D,0x21,0x60,0xA4,0x14,0x87,0x3B,
-0xCD,0xFD,0xB2,0x3E,0xB4,0x67,0x03,0x6A,0xED,0xE1,0x0F,0x4B,0xC0,0x91,0x85,0x70,
-0x45,0xE0,0x42,0x9E,0xDE,0x29,0x23,0xD4,0x01,0x0D,0xA0,0x10,0x79,0xB8,0xDB,0x03,
-0xBD,0xF3,0xA9,0x2F,0xD1,0xC6,0xE0,0x0F,0xCB,0x9E,0x8A,0x14,0x0A,0xB8,0xBD,0xF6,
-0x56,0x62,0xF1,0xC5,0x72,0xB6,0x32,0x25,0xD9,0xB2,0xF3,0xBD,0x65,0xC5,0x0D,0x2C,
-0x6E,0xD5,0x92,0x6F,0x18,0x8B,0x00,0x41,0x14,0x82,0x6F,0x40,0x20,0x26,0x7A,0x28,
-0x0F,0xF5,0x1E,0x7F,0x27,0xF7,0x94,0xB1,0x37,0x3D,0xB7,0xC7,0x91,0xF7,0xE2,0x01,
-0xEC,0xFD,0x94,0x89,0xE1,0xCC,0x6E,0xD3,0x36,0xD6,0x0A,0x19,0x79,0xAE,0xD7,0x34,
-0x82,0x65,0xFF,0x7C,0x42,0xBB,0xB6,0xDD,0x0B,0xA6,0x34,0xAF,0x4B,0x60,0xFE,0x7F,
-0x43,0x49,0x06,0x8B,0x8C,0x43,0xB8,0x56,0xF2,0xD9,0x7F,0x21,0x43,0x17,0xEA,0xA7,
-0x48,0x95,0x01,0x75,0x75,0xEA,0x2B,0xA5,0x43,0x95,0xEA,0x15,0x84,0x9D,0x08,0x8D,
-0x26,0x6E,0x55,0x9B,0xAB,0xDC,0xD2,0x39,0xD2,0x31,0x1D,0x60,0xE2,0xAC,0xCC,0x56,
-0x45,0x24,0xF5,0x1C,0x54,0xAB,0xEE,0x86,0xDD,0x96,0x32,0x85,0xF8,0x4C,0x4F,0xE8,
-0x95,0x76,0xB6,0x05,0xDD,0x36,0x23,0x67,0xBC,0xFF,0x15,0xE2,0xCA,0x3B,0xE6,0xA6,
-0xEC,0x3B,0xEC,0x26,0x11,0x34,0x48,0x8D,0xF6,0x80,0x2B,0x1A,0x23,0x02,0xEB,0x8A,
-0x1C,0x3A,0x76,0x2A,0x7B,0x56,0x16,0x1C,0x72,0x2A,0xB3,0xAA,0xE3,0x60,0xA5,0x00,
-0x9F,0x04,0x9B,0xE2,0x6F,0x1E,0x14,0x58,0x5B,0xA5,0x6C,0x8B,0x58,0x3C,0xC3,0xBA,
-0x4E,0x3A,0x5C,0xF7,0xE1,0x96,0x2B,0x3E,0xEF,0x07,0xBC,0xA4,0xE5,0x5D,0xCC,0x4D,
-0x9F,0x0D,0xE1,0xDC,0xAA,0xBB,0xE1,0x6E,0x1A,0xEC,0x8F,0xE1,0xB6,0x4C,0x4D,0x79,
-0x72,0x5D,0x17,0x35,0x0B,0x1D,0xD7,0xC1,0x47,0xDA,0x96,0x24,0xE0,0xD0,0x72,0xA8,
-0x5A,0x5F,0x66,0x2D,0x10,0xDC,0x2F,0x2A,0x13,0xAE,0x26,0xFE,0x0A,0x1C,0x19,0xCC,
-0xD0,0x3E,0x0B,0x9C,0xC8,0x09,0x2E,0xF9,0x5B,0x96,0x7A,0x47,0x9C,0xE9,0x7A,0xF3,
-0x05,0x50,0x74,0x95,0x73,0x9E,0x30,0x09,0xF3,0x97,0x82,0x5E,0xE6,0x8F,0x39,0x08,
-0x1E,0x59,0xE5,0x35,0x14,0x42,0x13,0xFF,0x00,0x9C,0xF7,0xBE,0xAA,0x50,0xCF,0xE2,
-0x51,0x48,0xD7,0xB8,0x6F,0xAF,0xF8,0x4E,0x7E,0x33,0x98,0x92,0x14,0x62,0x3A,0x75,
-0x63,0xCF,0x7B,0xFA,0xDE,0x82,0x3B,0xA9,0xBB,0x39,0xE2,0xC4,0xBD,0x2C,0x00,0x0E,
-0xC8,0x17,0xAC,0x13,0xEF,0x4D,0x25,0x8E,0xD8,0xB3,0x90,0x2F,0xA9,0xDA,0x29,0x7D,
-0x1D,0xAF,0x74,0x3A,0xB2,0x27,0xC0,0xC1,0x1E,0x3E,0x75,0xA3,0x16,0xA9,0xAF,0x7A,
-0x22,0x5D,0x9F,0x13,0x1A,0xCF,0xA7,0xA0,0xEB,0xE3,0x86,0x0A,0xD3,0xFD,0xE6,0x96,
-0x95,0xD7,0x23,0xC8,0x37,0xDD,0xC4,0x7C,0xAA,0x36,0xAC,0x98,0x1A,0x12,0xB1,0xE0,
-0x4E,0xE8,0xB1,0x3B,0xF5,0xD6,0x6F,0xF1,0x30,0xD7,0x02,0x03,0x01,0x00,0x01,0xA3,
-0x42,0x30,0x40,0x30,0x0F,0x06,0x03,0x55,0x1D,0x13,0x01,0x01,0xFF,0x04,0x05,0x30,
-0x03,0x01,0x01,0xFF,0x30,0x0E,0x06,0x03,0x55,0x1D,0x0F,0x01,0x01,0xFF,0x04,0x04,
-0x03,0x02,0x01,0x06,0x30,0x1D,0x06,0x03,0x55,0x1D,0x0E,0x04,0x16,0x04,0x14,0x4B,
-0xC5,0xB4,0x40,0x6B,0xAD,0x1C,0xB3,0xA5,0x1C,0x65,0x6E,0x46,0x36,0x89,0x87,0x05,
-0x0C,0x0E,0xB6,0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x0B,
-0x05,0x00,0x03,0x82,0x02,0x01,0x00,0x73,0x57,0x3F,0x2C,0xD5,0x95,0x32,0x7E,0x37,
-0xDB,0x96,0x92,0xEB,0x19,0x5E,0x7E,0x53,0xE7,0x41,0xEC,0x11,0xB6,0x47,0xEF,0xB5,
-0xDE,0xED,0x74,0x5C,0xC5,0xF1,0x8E,0x49,0xE0,0xFC,0x6E,0x99,0x13,0xCD,0x9F,0x8A,
-0xDA,0xCD,0x3A,0x0A,0xD8,0x3A,0x5A,0x09,0x3F,0x5F,0x34,0xD0,0x2F,0x03,0xD2,0x66,
-0x1D,0x1A,0xBD,0x9C,0x90,0x37,0xC8,0x0C,0x8E,0x07,0x5A,0x94,0x45,0x46,0x2A,0xE6,
-0xBE,0x7A,0xDA,0xA1,0xA9,0xA4,0x69,0x12,0x92,0xB0,0x7D,0x36,0xD4,0x44,0x87,0xD7,
-0x51,0xF1,0x29,0x63,0xD6,0x75,0xCD,0x16,0xE4,0x27,0x89,0x1D,0xF8,0xC2,0x32,0x48,
-0xFD,0xDB,0x99,0xD0,0x8F,0x5F,0x54,0x74,0xCC,0xAC,0x67,0x34,0x11,0x62,0xD9,0x0C,
-0x0A,0x37,0x87,0xD1,0xA3,0x17,0x48,0x8E,0xD2,0x17,0x1D,0xF6,0xD7,0xFD,0xDB,0x65,
-0xEB,0xFD,0xA8,0xD4,0xF5,0xD6,0x4F,0xA4,0x5B,0x75,0xE8,0xC5,0xD2,0x60,0xB2,0xDB,
-0x09,0x7E,0x25,0x8B,0x7B,0xBA,0x52,0x92,0x9E,0x3E,0xE8,0xC5,0x77,0xA1,0x3C,0xE0,
-0x4A,0x73,0x6B,0x61,0xCF,0x86,0xDC,0x43,0xFF,0xFF,0x21,0xFE,0x23,0x5D,0x24,0x4A,
-0xF5,0xD3,0x6D,0x0F,0x62,0x04,0x05,0x57,0x82,0xDA,0x6E,0xA4,0x33,0x25,0x79,0x4B,
-0x2E,0x54,0x19,0x8B,0xCC,0x2C,0x3D,0x30,0xE9,0xD1,0x06,0xFF,0xE8,0x32,0x46,0xBE,
-0xB5,0x33,0x76,0x77,0xA8,0x01,0x5D,0x96,0xC1,0xC1,0xD5,0xBE,0xAE,0x25,0xC0,0xC9,
-0x1E,0x0A,0x09,0x20,0x88,0xA1,0x0E,0xC9,0xF3,0x6F,0x4D,0x82,0x54,0x00,0x20,0xA7,
-0xD2,0x8F,0xE4,0x39,0x54,0x17,0x2E,0x8D,0x1E,0xB8,0x1B,0xBB,0x1B,0xBD,0x9A,0x4E,
-0x3B,0x10,0x34,0xDC,0x9C,0x88,0x53,0xEF,0xA2,0x31,0x5B,0x58,0x4F,0x91,0x62,0xC8,
-0xC2,0x9A,0x9A,0xCD,0x15,0x5D,0x38,0xA9,0xD6,0xBE,0xF8,0x13,0xB5,0x9F,0x12,0x69,
-0xF2,0x50,0x62,0xAC,0xFB,0x17,0x37,0xF4,0xEE,0xB8,0x75,0x67,0x60,0x10,0xFB,0x83,
-0x50,0xF9,0x44,0xB5,0x75,0x9C,0x40,0x17,0xB2,0xFE,0xFD,0x79,0x5D,0x6E,0x58,0x58,
-0x5F,0x30,0xFC,0x00,0xAE,0xAF,0x33,0xC1,0x0E,0x4E,0x6C,0xBA,0xA7,0xA6,0xA1,0x7F,
-0x32,0xDB,0x38,0xE0,0xB1,0x72,0x17,0x0A,0x2B,0x91,0xEC,0x6A,0x63,0x26,0xED,0x89,
-0xD4,0x78,0xCC,0x74,0x1E,0x05,0xF8,0x6B,0xFE,0x8C,0x6A,0x76,0x39,0x29,0xAE,0x65,
-0x23,0x12,0x95,0x08,0x22,0x1C,0x97,0xCE,0x5B,0x06,0xEE,0x0C,0xE2,0xBB,0xBC,0x1F,
-0x44,0x93,0xF6,0xD8,0x38,0x45,0x05,0x21,0xED,0xE4,0xAD,0xAB,0x12,0xB6,0x03,0xA4,
-0x42,0x2E,0x2D,0xC4,0x09,0x3A,0x03,0x67,0x69,0x84,0x9A,0xE1,0x59,0x90,0x8A,0x28,
-0x85,0xD5,0x5D,0x74,0xB1,0xD1,0x0E,0x20,0x58,0x9B,0x13,0xA5,0xB0,0x63,0xA6,0xED,
-0x7B,0x47,0xFD,0x45,0x55,0x30,0xA4,0xEE,0x9A,0xD4,0xE6,0xE2,0x87,0xEF,0x98,0xC9,
-0x32,0x82,0x11,0x29,0x22,0xBC,0x00,0x0A,0x31,0x5E,0x2D,0x0F,0xC0,0x8E,0xE9,0x6B,
-0xB2,0x8F,0x2E,0x06,0xD8,0xD1,0x91,0xC7,0xC6,0x12,0xF4,0x4C,0xFD,0x30,0x17,0xC3,
-0xC1,0xDA,0x38,0x5B,0xE3,0xA9,0xEA,0xE6,0xA1,0xBA,0x79,0xEF,0x73,0xD8,0xB6,0x53,
-0x57,0x2D,0xF6,0xD0,0xE1,0xD7,0x48,
+const unsigned char Entrust_Root_Certification_Authority_certificate[1173]={
+0x30,0x82,0x04,0x91,0x30,0x82,0x03,0x79,0xA0,0x03,0x02,0x01,0x02,0x02,0x04,0x45,
+0x6B,0x50,0x54,0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x05,
+0x05,0x00,0x30,0x81,0xB0,0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02,
+0x55,0x53,0x31,0x16,0x30,0x14,0x06,0x03,0x55,0x04,0x0A,0x13,0x0D,0x45,0x6E,0x74,
+0x72,0x75,0x73,0x74,0x2C,0x20,0x49,0x6E,0x63,0x2E,0x31,0x39,0x30,0x37,0x06,0x03,
+0x55,0x04,0x0B,0x13,0x30,0x77,0x77,0x77,0x2E,0x65,0x6E,0x74,0x72,0x75,0x73,0x74,
+0x2E,0x6E,0x65,0x74,0x2F,0x43,0x50,0x53,0x20,0x69,0x73,0x20,0x69,0x6E,0x63,0x6F,
+0x72,0x70,0x6F,0x72,0x61,0x74,0x65,0x64,0x20,0x62,0x79,0x20,0x72,0x65,0x66,0x65,
+0x72,0x65,0x6E,0x63,0x65,0x31,0x1F,0x30,0x1D,0x06,0x03,0x55,0x04,0x0B,0x13,0x16,
+0x28,0x63,0x29,0x20,0x32,0x30,0x30,0x36,0x20,0x45,0x6E,0x74,0x72,0x75,0x73,0x74,
+0x2C,0x20,0x49,0x6E,0x63,0x2E,0x31,0x2D,0x30,0x2B,0x06,0x03,0x55,0x04,0x03,0x13,
+0x24,0x45,0x6E,0x74,0x72,0x75,0x73,0x74,0x20,0x52,0x6F,0x6F,0x74,0x20,0x43,0x65,
+0x72,0x74,0x69,0x66,0x69,0x63,0x61,0x74,0x69,0x6F,0x6E,0x20,0x41,0x75,0x74,0x68,
+0x6F,0x72,0x69,0x74,0x79,0x30,0x1E,0x17,0x0D,0x30,0x36,0x31,0x31,0x32,0x37,0x32,
+0x30,0x32,0x33,0x34,0x32,0x5A,0x17,0x0D,0x32,0x36,0x31,0x31,0x32,0x37,0x32,0x30,
+0x35,0x33,0x34,0x32,0x5A,0x30,0x81,0xB0,0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04,
+0x06,0x13,0x02,0x55,0x53,0x31,0x16,0x30,0x14,0x06,0x03,0x55,0x04,0x0A,0x13,0x0D,
+0x45,0x6E,0x74,0x72,0x75,0x73,0x74,0x2C,0x20,0x49,0x6E,0x63,0x2E,0x31,0x39,0x30,
+0x37,0x06,0x03,0x55,0x04,0x0B,0x13,0x30,0x77,0x77,0x77,0x2E,0x65,0x6E,0x74,0x72,
+0x75,0x73,0x74,0x2E,0x6E,0x65,0x74,0x2F,0x43,0x50,0x53,0x20,0x69,0x73,0x20,0x69,
+0x6E,0x63,0x6F,0x72,0x70,0x6F,0x72,0x61,0x74,0x65,0x64,0x20,0x62,0x79,0x20,0x72,
+0x65,0x66,0x65,0x72,0x65,0x6E,0x63,0x65,0x31,0x1F,0x30,0x1D,0x06,0x03,0x55,0x04,
+0x0B,0x13,0x16,0x28,0x63,0x29,0x20,0x32,0x30,0x30,0x36,0x20,0x45,0x6E,0x74,0x72,
+0x75,0x73,0x74,0x2C,0x20,0x49,0x6E,0x63,0x2E,0x31,0x2D,0x30,0x2B,0x06,0x03,0x55,
+0x04,0x03,0x13,0x24,0x45,0x6E,0x74,0x72,0x75,0x73,0x74,0x20,0x52,0x6F,0x6F,0x74,
+0x20,0x43,0x65,0x72,0x74,0x69,0x66,0x69,0x63,0x61,0x74,0x69,0x6F,0x6E,0x20,0x41,
+0x75,0x74,0x68,0x6F,0x72,0x69,0x74,0x79,0x30,0x82,0x01,0x22,0x30,0x0D,0x06,0x09,
+0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x01,0x05,0x00,0x03,0x82,0x01,0x0F,0x00,
+0x30,0x82,0x01,0x0A,0x02,0x82,0x01,0x01,0x00,0xB6,0x95,0xB6,0x43,0x42,0xFA,0xC6,
+0x6D,0x2A,0x6F,0x48,0xDF,0x94,0x4C,0x39,0x57,0x05,0xEE,0xC3,0x79,0x11,0x41,0x68,
+0x36,0xED,0xEC,0xFE,0x9A,0x01,0x8F,0xA1,0x38,0x28,0xFC,0xF7,0x10,0x46,0x66,0x2E,
+0x4D,0x1E,0x1A,0xB1,0x1A,0x4E,0xC6,0xD1,0xC0,0x95,0x88,0xB0,0xC9,0xFF,0x31,0x8B,
+0x33,0x03,0xDB,0xB7,0x83,0x7B,0x3E,0x20,0x84,0x5E,0xED,0xB2,0x56,0x28,0xA7,0xF8,
+0xE0,0xB9,0x40,0x71,0x37,0xC5,0xCB,0x47,0x0E,0x97,0x2A,0x68,0xC0,0x22,0x95,0x62,
+0x15,0xDB,0x47,0xD9,0xF5,0xD0,0x2B,0xFF,0x82,0x4B,0xC9,0xAD,0x3E,0xDE,0x4C,0xDB,
+0x90,0x80,0x50,0x3F,0x09,0x8A,0x84,0x00,0xEC,0x30,0x0A,0x3D,0x18,0xCD,0xFB,0xFD,
+0x2A,0x59,0x9A,0x23,0x95,0x17,0x2C,0x45,0x9E,0x1F,0x6E,0x43,0x79,0x6D,0x0C,0x5C,
+0x98,0xFE,0x48,0xA7,0xC5,0x23,0x47,0x5C,0x5E,0xFD,0x6E,0xE7,0x1E,0xB4,0xF6,0x68,
+0x45,0xD1,0x86,0x83,0x5B,0xA2,0x8A,0x8D,0xB1,0xE3,0x29,0x80,0xFE,0x25,0x71,0x88,
+0xAD,0xBE,0xBC,0x8F,0xAC,0x52,0x96,0x4B,0xAA,0x51,0x8D,0xE4,0x13,0x31,0x19,0xE8,
+0x4E,0x4D,0x9F,0xDB,0xAC,0xB3,0x6A,0xD5,0xBC,0x39,0x54,0x71,0xCA,0x7A,0x7A,0x7F,
+0x90,0xDD,0x7D,0x1D,0x80,0xD9,0x81,0xBB,0x59,0x26,0xC2,0x11,0xFE,0xE6,0x93,0xE2,
+0xF7,0x80,0xE4,0x65,0xFB,0x34,0x37,0x0E,0x29,0x80,0x70,0x4D,0xAF,0x38,0x86,0x2E,
+0x9E,0x7F,0x57,0xAF,0x9E,0x17,0xAE,0xEB,0x1C,0xCB,0x28,0x21,0x5F,0xB6,0x1C,0xD8,
+0xE7,0xA2,0x04,0x22,0xF9,0xD3,0xDA,0xD8,0xCB,0x02,0x03,0x01,0x00,0x01,0xA3,0x81,
+0xB0,0x30,0x81,0xAD,0x30,0x0E,0x06,0x03,0x55,0x1D,0x0F,0x01,0x01,0xFF,0x04,0x04,
+0x03,0x02,0x01,0x06,0x30,0x0F,0x06,0x03,0x55,0x1D,0x13,0x01,0x01,0xFF,0x04,0x05,
+0x30,0x03,0x01,0x01,0xFF,0x30,0x2B,0x06,0x03,0x55,0x1D,0x10,0x04,0x24,0x30,0x22,
+0x80,0x0F,0x32,0x30,0x30,0x36,0x31,0x31,0x32,0x37,0x32,0x30,0x32,0x33,0x34,0x32,
+0x5A,0x81,0x0F,0x32,0x30,0x32,0x36,0x31,0x31,0x32,0x37,0x32,0x30,0x35,0x33,0x34,
+0x32,0x5A,0x30,0x1F,0x06,0x03,0x55,0x1D,0x23,0x04,0x18,0x30,0x16,0x80,0x14,0x68,
+0x90,0xE4,0x67,0xA4,0xA6,0x53,0x80,0xC7,0x86,0x66,0xA4,0xF1,0xF7,0x4B,0x43,0xFB,
+0x84,0xBD,0x6D,0x30,0x1D,0x06,0x03,0x55,0x1D,0x0E,0x04,0x16,0x04,0x14,0x68,0x90,
+0xE4,0x67,0xA4,0xA6,0x53,0x80,0xC7,0x86,0x66,0xA4,0xF1,0xF7,0x4B,0x43,0xFB,0x84,
+0xBD,0x6D,0x30,0x1D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF6,0x7D,0x07,0x41,0x00,0x04,
+0x10,0x30,0x0E,0x1B,0x08,0x56,0x37,0x2E,0x31,0x3A,0x34,0x2E,0x30,0x03,0x02,0x04,
+0x90,0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x05,0x05,0x00,
+0x03,0x82,0x01,0x01,0x00,0x93,0xD4,0x30,0xB0,0xD7,0x03,0x20,0x2A,0xD0,0xF9,0x63,
+0xE8,0x91,0x0C,0x05,0x20,0xA9,0x5F,0x19,0xCA,0x7B,0x72,0x4E,0xD4,0xB1,0xDB,0xD0,
+0x96,0xFB,0x54,0x5A,0x19,0x2C,0x0C,0x08,0xF7,0xB2,0xBC,0x85,0xA8,0x9D,0x7F,0x6D,
+0x3B,0x52,0xB3,0x2A,0xDB,0xE7,0xD4,0x84,0x8C,0x63,0xF6,0x0F,0xCB,0x26,0x01,0x91,
+0x50,0x6C,0xF4,0x5F,0x14,0xE2,0x93,0x74,0xC0,0x13,0x9E,0x30,0x3A,0x50,0xE3,0xB4,
+0x60,0xC5,0x1C,0xF0,0x22,0x44,0x8D,0x71,0x47,0xAC,0xC8,0x1A,0xC9,0xE9,0x9B,0x9A,
+0x00,0x60,0x13,0xFF,0x70,0x7E,0x5F,0x11,0x4D,0x49,0x1B,0xB3,0x15,0x52,0x7B,0xC9,
+0x54,0xDA,0xBF,0x9D,0x95,0xAF,0x6B,0x9A,0xD8,0x9E,0xE9,0xF1,0xE4,0x43,0x8D,0xE2,
+0x11,0x44,0x3A,0xBF,0xAF,0xBD,0x83,0x42,0x73,0x52,0x8B,0xAA,0xBB,0xA7,0x29,0xCF,
+0xF5,0x64,0x1C,0x0A,0x4D,0xD1,0xBC,0xAA,0xAC,0x9F,0x2A,0xD0,0xFF,0x7F,0x7F,0xDA,
+0x7D,0xEA,0xB1,0xED,0x30,0x25,0xC1,0x84,0xDA,0x34,0xD2,0x5B,0x78,0x83,0x56,0xEC,
+0x9C,0x36,0xC3,0x26,0xE2,0x11,0xF6,0x67,0x49,0x1D,0x92,0xAB,0x8C,0xFB,0xEB,0xFF,
+0x7A,0xEE,0x85,0x4A,0xA7,0x50,0x80,0xF0,0xA7,0x5C,0x4A,0x94,0x2E,0x5F,0x05,0x99,
+0x3C,0x52,0x41,0xE0,0xCD,0xB4,0x63,0xCF,0x01,0x43,0xBA,0x9C,0x83,0xDC,0x8F,0x60,
+0x3B,0xF3,0x5A,0xB4,0xB4,0x7B,0xAE,0xDA,0x0B,0x90,0x38,0x75,0xEF,0x81,0x1D,0x66,
+0xD2,0xF7,0x57,0x70,0x36,0xB3,0xBF,0xFC,0x28,0xAF,0x71,0x25,0x85,0x5B,0x13,0xFE,
+0x1E,0x7F,0x5A,0xB4,0x3C,
};
@@ -3417,544 +3721,343 @@ const unsigned char TC_TrustCenter_Class_2_CA_II_certificate[1198]={
};
-/* subject:/C=DE/O=TC TrustCenter GmbH/OU=TC TrustCenter Class 3 CA/CN=TC TrustCenter Class 3 CA II */
-/* issuer :/C=DE/O=TC TrustCenter GmbH/OU=TC TrustCenter Class 3 CA/CN=TC TrustCenter Class 3 CA II */
-
-
-const unsigned char TC_TrustCenter_Class_3_CA_II_certificate[1198]={
-0x30,0x82,0x04,0xAA,0x30,0x82,0x03,0x92,0xA0,0x03,0x02,0x01,0x02,0x02,0x0E,0x4A,
-0x47,0x00,0x01,0x00,0x02,0xE5,0xA0,0x5D,0xD6,0x3F,0x00,0x51,0xBF,0x30,0x0D,0x06,
-0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x05,0x05,0x00,0x30,0x76,0x31,0x0B,
-0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x44,0x45,0x31,0x1C,0x30,0x1A,0x06,
-0x03,0x55,0x04,0x0A,0x13,0x13,0x54,0x43,0x20,0x54,0x72,0x75,0x73,0x74,0x43,0x65,
-0x6E,0x74,0x65,0x72,0x20,0x47,0x6D,0x62,0x48,0x31,0x22,0x30,0x20,0x06,0x03,0x55,
-0x04,0x0B,0x13,0x19,0x54,0x43,0x20,0x54,0x72,0x75,0x73,0x74,0x43,0x65,0x6E,0x74,
-0x65,0x72,0x20,0x43,0x6C,0x61,0x73,0x73,0x20,0x33,0x20,0x43,0x41,0x31,0x25,0x30,
-0x23,0x06,0x03,0x55,0x04,0x03,0x13,0x1C,0x54,0x43,0x20,0x54,0x72,0x75,0x73,0x74,
-0x43,0x65,0x6E,0x74,0x65,0x72,0x20,0x43,0x6C,0x61,0x73,0x73,0x20,0x33,0x20,0x43,
-0x41,0x20,0x49,0x49,0x30,0x1E,0x17,0x0D,0x30,0x36,0x30,0x31,0x31,0x32,0x31,0x34,
-0x34,0x31,0x35,0x37,0x5A,0x17,0x0D,0x32,0x35,0x31,0x32,0x33,0x31,0x32,0x32,0x35,
-0x39,0x35,0x39,0x5A,0x30,0x76,0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,
-0x02,0x44,0x45,0x31,0x1C,0x30,0x1A,0x06,0x03,0x55,0x04,0x0A,0x13,0x13,0x54,0x43,
-0x20,0x54,0x72,0x75,0x73,0x74,0x43,0x65,0x6E,0x74,0x65,0x72,0x20,0x47,0x6D,0x62,
-0x48,0x31,0x22,0x30,0x20,0x06,0x03,0x55,0x04,0x0B,0x13,0x19,0x54,0x43,0x20,0x54,
-0x72,0x75,0x73,0x74,0x43,0x65,0x6E,0x74,0x65,0x72,0x20,0x43,0x6C,0x61,0x73,0x73,
-0x20,0x33,0x20,0x43,0x41,0x31,0x25,0x30,0x23,0x06,0x03,0x55,0x04,0x03,0x13,0x1C,
-0x54,0x43,0x20,0x54,0x72,0x75,0x73,0x74,0x43,0x65,0x6E,0x74,0x65,0x72,0x20,0x43,
-0x6C,0x61,0x73,0x73,0x20,0x33,0x20,0x43,0x41,0x20,0x49,0x49,0x30,0x82,0x01,0x22,
-0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x01,0x05,0x00,0x03,
-0x82,0x01,0x0F,0x00,0x30,0x82,0x01,0x0A,0x02,0x82,0x01,0x01,0x00,0xB4,0xE0,0xBB,
-0x51,0xBB,0x39,0x5C,0x8B,0x04,0xC5,0x4C,0x79,0x1C,0x23,0x86,0x31,0x10,0x63,0x43,
-0x55,0x27,0x3F,0xC6,0x45,0xC7,0xA4,0x3D,0xEC,0x09,0x0D,0x1A,0x1E,0x20,0xC2,0x56,
-0x1E,0xDE,0x1B,0x37,0x07,0x30,0x22,0x2F,0x6F,0xF1,0x06,0xF1,0xAB,0xAD,0xD6,0xC8,
-0xAB,0x61,0xA3,0x2F,0x43,0xC4,0xB0,0xB2,0x2D,0xFC,0xC3,0x96,0x69,0x7B,0x7E,0x8A,
-0xE4,0xCC,0xC0,0x39,0x12,0x90,0x42,0x60,0xC9,0xCC,0x35,0x68,0xEE,0xDA,0x5F,0x90,
-0x56,0x5F,0xCD,0x1C,0x4D,0x5B,0x58,0x49,0xEB,0x0E,0x01,0x4F,0x64,0xFA,0x2C,0x3C,
-0x89,0x58,0xD8,0x2F,0x2E,0xE2,0xB0,0x68,0xE9,0x22,0x3B,0x75,0x89,0xD6,0x44,0x1A,
-0x65,0xF2,0x1B,0x97,0x26,0x1D,0x28,0x6D,0xAC,0xE8,0xBD,0x59,0x1D,0x2B,0x24,0xF6,
-0xD6,0x84,0x03,0x66,0x88,0x24,0x00,0x78,0x60,0xF1,0xF8,0xAB,0xFE,0x02,0xB2,0x6B,
-0xFB,0x22,0xFB,0x35,0xE6,0x16,0xD1,0xAD,0xF6,0x2E,0x12,0xE4,0xFA,0x35,0x6A,0xE5,
-0x19,0xB9,0x5D,0xDB,0x3B,0x1E,0x1A,0xFB,0xD3,0xFF,0x15,0x14,0x08,0xD8,0x09,0x6A,
-0xBA,0x45,0x9D,0x14,0x79,0x60,0x7D,0xAF,0x40,0x8A,0x07,0x73,0xB3,0x93,0x96,0xD3,
-0x74,0x34,0x8D,0x3A,0x37,0x29,0xDE,0x5C,0xEC,0xF5,0xEE,0x2E,0x31,0xC2,0x20,0xDC,
-0xBE,0xF1,0x4F,0x7F,0x23,0x52,0xD9,0x5B,0xE2,0x64,0xD9,0x9C,0xAA,0x07,0x08,0xB5,
-0x45,0xBD,0xD1,0xD0,0x31,0xC1,0xAB,0x54,0x9F,0xA9,0xD2,0xC3,0x62,0x60,0x03,0xF1,
-0xBB,0x39,0x4A,0x92,0x4A,0x3D,0x0A,0xB9,0x9D,0xC5,0xA0,0xFE,0x37,0x02,0x03,0x01,
-0x00,0x01,0xA3,0x82,0x01,0x34,0x30,0x82,0x01,0x30,0x30,0x0F,0x06,0x03,0x55,0x1D,
-0x13,0x01,0x01,0xFF,0x04,0x05,0x30,0x03,0x01,0x01,0xFF,0x30,0x0E,0x06,0x03,0x55,
-0x1D,0x0F,0x01,0x01,0xFF,0x04,0x04,0x03,0x02,0x01,0x06,0x30,0x1D,0x06,0x03,0x55,
-0x1D,0x0E,0x04,0x16,0x04,0x14,0xD4,0xA2,0xFC,0x9F,0xB3,0xC3,0xD8,0x03,0xD3,0x57,
-0x5C,0x07,0xA4,0xD0,0x24,0xA7,0xC0,0xF2,0x00,0xD4,0x30,0x81,0xED,0x06,0x03,0x55,
-0x1D,0x1F,0x04,0x81,0xE5,0x30,0x81,0xE2,0x30,0x81,0xDF,0xA0,0x81,0xDC,0xA0,0x81,
-0xD9,0x86,0x35,0x68,0x74,0x74,0x70,0x3A,0x2F,0x2F,0x77,0x77,0x77,0x2E,0x74,0x72,
-0x75,0x73,0x74,0x63,0x65,0x6E,0x74,0x65,0x72,0x2E,0x64,0x65,0x2F,0x63,0x72,0x6C,
-0x2F,0x76,0x32,0x2F,0x74,0x63,0x5F,0x63,0x6C,0x61,0x73,0x73,0x5F,0x33,0x5F,0x63,
-0x61,0x5F,0x49,0x49,0x2E,0x63,0x72,0x6C,0x86,0x81,0x9F,0x6C,0x64,0x61,0x70,0x3A,
-0x2F,0x2F,0x77,0x77,0x77,0x2E,0x74,0x72,0x75,0x73,0x74,0x63,0x65,0x6E,0x74,0x65,
-0x72,0x2E,0x64,0x65,0x2F,0x43,0x4E,0x3D,0x54,0x43,0x25,0x32,0x30,0x54,0x72,0x75,
-0x73,0x74,0x43,0x65,0x6E,0x74,0x65,0x72,0x25,0x32,0x30,0x43,0x6C,0x61,0x73,0x73,
-0x25,0x32,0x30,0x33,0x25,0x32,0x30,0x43,0x41,0x25,0x32,0x30,0x49,0x49,0x2C,0x4F,
-0x3D,0x54,0x43,0x25,0x32,0x30,0x54,0x72,0x75,0x73,0x74,0x43,0x65,0x6E,0x74,0x65,
-0x72,0x25,0x32,0x30,0x47,0x6D,0x62,0x48,0x2C,0x4F,0x55,0x3D,0x72,0x6F,0x6F,0x74,
-0x63,0x65,0x72,0x74,0x73,0x2C,0x44,0x43,0x3D,0x74,0x72,0x75,0x73,0x74,0x63,0x65,
-0x6E,0x74,0x65,0x72,0x2C,0x44,0x43,0x3D,0x64,0x65,0x3F,0x63,0x65,0x72,0x74,0x69,
-0x66,0x69,0x63,0x61,0x74,0x65,0x52,0x65,0x76,0x6F,0x63,0x61,0x74,0x69,0x6F,0x6E,
-0x4C,0x69,0x73,0x74,0x3F,0x62,0x61,0x73,0x65,0x3F,0x30,0x0D,0x06,0x09,0x2A,0x86,
-0x48,0x86,0xF7,0x0D,0x01,0x01,0x05,0x05,0x00,0x03,0x82,0x01,0x01,0x00,0x36,0x60,
-0xE4,0x70,0xF7,0x06,0x20,0x43,0xD9,0x23,0x1A,0x42,0xF2,0xF8,0xA3,0xB2,0xB9,0x4D,
-0x8A,0xB4,0xF3,0xC2,0x9A,0x55,0x31,0x7C,0xC4,0x3B,0x67,0x9A,0xB4,0xDF,0x4D,0x0E,
-0x8A,0x93,0x4A,0x17,0x8B,0x1B,0x8D,0xCA,0x89,0xE1,0xCF,0x3A,0x1E,0xAC,0x1D,0xF1,
-0x9C,0x32,0xB4,0x8E,0x59,0x76,0xA2,0x41,0x85,0x25,0x37,0xA0,0x13,0xD0,0xF5,0x7C,
-0x4E,0xD5,0xEA,0x96,0xE2,0x6E,0x72,0xC1,0xBB,0x2A,0xFE,0x6C,0x6E,0xF8,0x91,0x98,
-0x46,0xFC,0xC9,0x1B,0x57,0x5B,0xEA,0xC8,0x1A,0x3B,0x3F,0xB0,0x51,0x98,0x3C,0x07,
-0xDA,0x2C,0x59,0x01,0xDA,0x8B,0x44,0xE8,0xE1,0x74,0xFD,0xA7,0x68,0xDD,0x54,0xBA,
-0x83,0x46,0xEC,0xC8,0x46,0xB5,0xF8,0xAF,0x97,0xC0,0x3B,0x09,0x1C,0x8F,0xCE,0x72,
-0x96,0x3D,0x33,0x56,0x70,0xBC,0x96,0xCB,0xD8,0xD5,0x7D,0x20,0x9A,0x83,0x9F,0x1A,
-0xDC,0x39,0xF1,0xC5,0x72,0xA3,0x11,0x03,0xFD,0x3B,0x42,0x52,0x29,0xDB,0xE8,0x01,
-0xF7,0x9B,0x5E,0x8C,0xD6,0x8D,0x86,0x4E,0x19,0xFA,0xBC,0x1C,0xBE,0xC5,0x21,0xA5,
-0x87,0x9E,0x78,0x2E,0x36,0xDB,0x09,0x71,0xA3,0x72,0x34,0xF8,0x6C,0xE3,0x06,0x09,
-0xF2,0x5E,0x56,0xA5,0xD3,0xDD,0x98,0xFA,0xD4,0xE6,0x06,0xF4,0xF0,0xB6,0x20,0x63,
-0x4B,0xEA,0x29,0xBD,0xAA,0x82,0x66,0x1E,0xFB,0x81,0xAA,0xA7,0x37,0xAD,0x13,0x18,
-0xE6,0x92,0xC3,0x81,0xC1,0x33,0xBB,0x88,0x1E,0xA1,0xE7,0xE2,0xB4,0xBD,0x31,0x6C,
-0x0E,0x51,0x3D,0x6F,0xFB,0x96,0x56,0x80,0xE2,0x36,0x17,0xD1,0xDC,0xE4,
-};
-
-
-/* subject:/C=DE/O=TC TrustCenter GmbH/OU=TC TrustCenter Universal CA/CN=TC TrustCenter Universal CA I */
-/* issuer :/C=DE/O=TC TrustCenter GmbH/OU=TC TrustCenter Universal CA/CN=TC TrustCenter Universal CA I */
+/* subject:/O=Cybertrust, Inc/CN=Cybertrust Global Root */
+/* issuer :/O=Cybertrust, Inc/CN=Cybertrust Global Root */
-const unsigned char TC_TrustCenter_Universal_CA_I_certificate[993]={
-0x30,0x82,0x03,0xDD,0x30,0x82,0x02,0xC5,0xA0,0x03,0x02,0x01,0x02,0x02,0x0E,0x1D,
-0xA2,0x00,0x01,0x00,0x02,0xEC,0xB7,0x60,0x80,0x78,0x8D,0xB6,0x06,0x30,0x0D,0x06,
-0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x05,0x05,0x00,0x30,0x79,0x31,0x0B,
-0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x44,0x45,0x31,0x1C,0x30,0x1A,0x06,
-0x03,0x55,0x04,0x0A,0x13,0x13,0x54,0x43,0x20,0x54,0x72,0x75,0x73,0x74,0x43,0x65,
-0x6E,0x74,0x65,0x72,0x20,0x47,0x6D,0x62,0x48,0x31,0x24,0x30,0x22,0x06,0x03,0x55,
-0x04,0x0B,0x13,0x1B,0x54,0x43,0x20,0x54,0x72,0x75,0x73,0x74,0x43,0x65,0x6E,0x74,
-0x65,0x72,0x20,0x55,0x6E,0x69,0x76,0x65,0x72,0x73,0x61,0x6C,0x20,0x43,0x41,0x31,
-0x26,0x30,0x24,0x06,0x03,0x55,0x04,0x03,0x13,0x1D,0x54,0x43,0x20,0x54,0x72,0x75,
-0x73,0x74,0x43,0x65,0x6E,0x74,0x65,0x72,0x20,0x55,0x6E,0x69,0x76,0x65,0x72,0x73,
-0x61,0x6C,0x20,0x43,0x41,0x20,0x49,0x30,0x1E,0x17,0x0D,0x30,0x36,0x30,0x33,0x32,
-0x32,0x31,0x35,0x35,0x34,0x32,0x38,0x5A,0x17,0x0D,0x32,0x35,0x31,0x32,0x33,0x31,
-0x32,0x32,0x35,0x39,0x35,0x39,0x5A,0x30,0x79,0x31,0x0B,0x30,0x09,0x06,0x03,0x55,
-0x04,0x06,0x13,0x02,0x44,0x45,0x31,0x1C,0x30,0x1A,0x06,0x03,0x55,0x04,0x0A,0x13,
-0x13,0x54,0x43,0x20,0x54,0x72,0x75,0x73,0x74,0x43,0x65,0x6E,0x74,0x65,0x72,0x20,
-0x47,0x6D,0x62,0x48,0x31,0x24,0x30,0x22,0x06,0x03,0x55,0x04,0x0B,0x13,0x1B,0x54,
-0x43,0x20,0x54,0x72,0x75,0x73,0x74,0x43,0x65,0x6E,0x74,0x65,0x72,0x20,0x55,0x6E,
-0x69,0x76,0x65,0x72,0x73,0x61,0x6C,0x20,0x43,0x41,0x31,0x26,0x30,0x24,0x06,0x03,
-0x55,0x04,0x03,0x13,0x1D,0x54,0x43,0x20,0x54,0x72,0x75,0x73,0x74,0x43,0x65,0x6E,
-0x74,0x65,0x72,0x20,0x55,0x6E,0x69,0x76,0x65,0x72,0x73,0x61,0x6C,0x20,0x43,0x41,
-0x20,0x49,0x30,0x82,0x01,0x22,0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,
-0x01,0x01,0x01,0x05,0x00,0x03,0x82,0x01,0x0F,0x00,0x30,0x82,0x01,0x0A,0x02,0x82,
-0x01,0x01,0x00,0xA4,0x77,0x23,0x96,0x44,0xAF,0x90,0xF4,0x31,0xA7,0x10,0xF4,0x26,
-0x87,0x9C,0xF3,0x38,0xD9,0x0F,0x5E,0xDE,0xCF,0x41,0xE8,0x31,0xAD,0xC6,0x74,0x91,
-0x24,0x96,0x78,0x1E,0x09,0xA0,0x9B,0x9A,0x95,0x4A,0x4A,0xF5,0x62,0x7C,0x02,0xA8,
-0xCA,0xAC,0xFB,0x5A,0x04,0x76,0x39,0xDE,0x5F,0xF1,0xF9,0xB3,0xBF,0xF3,0x03,0x58,
-0x55,0xD2,0xAA,0xB7,0xE3,0x04,0x22,0xD1,0xF8,0x94,0xDA,0x22,0x08,0x00,0x8D,0xD3,
-0x7C,0x26,0x5D,0xCC,0x77,0x79,0xE7,0x2C,0x78,0x39,0xA8,0x26,0x73,0x0E,0xA2,0x5D,
-0x25,0x69,0x85,0x4F,0x55,0x0E,0x9A,0xEF,0xC6,0xB9,0x44,0xE1,0x57,0x3D,0xDF,0x1F,
-0x54,0x22,0xE5,0x6F,0x65,0xAA,0x33,0x84,0x3A,0xF3,0xCE,0x7A,0xBE,0x55,0x97,0xAE,
-0x8D,0x12,0x0F,0x14,0x33,0xE2,0x50,0x70,0xC3,0x49,0x87,0x13,0xBC,0x51,0xDE,0xD7,
-0x98,0x12,0x5A,0xEF,0x3A,0x83,0x33,0x92,0x06,0x75,0x8B,0x92,0x7C,0x12,0x68,0x7B,
-0x70,0x6A,0x0F,0xB5,0x9B,0xB6,0x77,0x5B,0x48,0x59,0x9D,0xE4,0xEF,0x5A,0xAD,0xF3,
-0xC1,0x9E,0xD4,0xD7,0x45,0x4E,0xCA,0x56,0x34,0x21,0xBC,0x3E,0x17,0x5B,0x6F,0x77,
-0x0C,0x48,0x01,0x43,0x29,0xB0,0xDD,0x3F,0x96,0x6E,0xE6,0x95,0xAA,0x0C,0xC0,0x20,
-0xB6,0xFD,0x3E,0x36,0x27,0x9C,0xE3,0x5C,0xCF,0x4E,0x81,0xDC,0x19,0xBB,0x91,0x90,
-0x7D,0xEC,0xE6,0x97,0x04,0x1E,0x93,0xCC,0x22,0x49,0xD7,0x97,0x86,0xB6,0x13,0x0A,
-0x3C,0x43,0x23,0x77,0x7E,0xF0,0xDC,0xE6,0xCD,0x24,0x1F,0x3B,0x83,0x9B,0x34,0x3A,
-0x83,0x34,0xE3,0x02,0x03,0x01,0x00,0x01,0xA3,0x63,0x30,0x61,0x30,0x1F,0x06,0x03,
-0x55,0x1D,0x23,0x04,0x18,0x30,0x16,0x80,0x14,0x92,0xA4,0x75,0x2C,0xA4,0x9E,0xBE,
-0x81,0x44,0xEB,0x79,0xFC,0x8A,0xC5,0x95,0xA5,0xEB,0x10,0x75,0x73,0x30,0x0F,0x06,
-0x03,0x55,0x1D,0x13,0x01,0x01,0xFF,0x04,0x05,0x30,0x03,0x01,0x01,0xFF,0x30,0x0E,
-0x06,0x03,0x55,0x1D,0x0F,0x01,0x01,0xFF,0x04,0x04,0x03,0x02,0x01,0x86,0x30,0x1D,
-0x06,0x03,0x55,0x1D,0x0E,0x04,0x16,0x04,0x14,0x92,0xA4,0x75,0x2C,0xA4,0x9E,0xBE,
-0x81,0x44,0xEB,0x79,0xFC,0x8A,0xC5,0x95,0xA5,0xEB,0x10,0x75,0x73,0x30,0x0D,0x06,
-0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x05,0x05,0x00,0x03,0x82,0x01,0x01,
-0x00,0x28,0xD2,0xE0,0x86,0xD5,0xE6,0xF8,0x7B,0xF0,0x97,0xDC,0x22,0x6B,0x3B,0x95,
-0x14,0x56,0x0F,0x11,0x30,0xA5,0x9A,0x4F,0x3A,0xB0,0x3A,0xE0,0x06,0xCB,0x65,0xF5,
-0xED,0xC6,0x97,0x27,0xFE,0x25,0xF2,0x57,0xE6,0x5E,0x95,0x8C,0x3E,0x64,0x60,0x15,
-0x5A,0x7F,0x2F,0x0D,0x01,0xC5,0xB1,0x60,0xFD,0x45,0x35,0xCF,0xF0,0xB2,0xBF,0x06,
-0xD9,0xEF,0x5A,0xBE,0xB3,0x62,0x21,0xB4,0xD7,0xAB,0x35,0x7C,0x53,0x3E,0xA6,0x27,
-0xF1,0xA1,0x2D,0xDA,0x1A,0x23,0x9D,0xCC,0xDD,0xEC,0x3C,0x2D,0x9E,0x27,0x34,0x5D,
-0x0F,0xC2,0x36,0x79,0xBC,0xC9,0x4A,0x62,0x2D,0xED,0x6B,0xD9,0x7D,0x41,0x43,0x7C,
-0xB6,0xAA,0xCA,0xED,0x61,0xB1,0x37,0x82,0x15,0x09,0x1A,0x8A,0x16,0x30,0xD8,0xEC,
-0xC9,0xD6,0x47,0x72,0x78,0x4B,0x10,0x46,0x14,0x8E,0x5F,0x0E,0xAF,0xEC,0xC7,0x2F,
-0xAB,0x10,0xD7,0xB6,0xF1,0x6E,0xEC,0x86,0xB2,0xC2,0xE8,0x0D,0x92,0x73,0xDC,0xA2,
-0xF4,0x0F,0x3A,0xBF,0x61,0x23,0x10,0x89,0x9C,0x48,0x40,0x6E,0x70,0x00,0xB3,0xD3,
-0xBA,0x37,0x44,0x58,0x11,0x7A,0x02,0x6A,0x88,0xF0,0x37,0x34,0xF0,0x19,0xE9,0xAC,
-0xD4,0x65,0x73,0xF6,0x69,0x8C,0x64,0x94,0x3A,0x79,0x85,0x29,0xB0,0x16,0x2B,0x0C,
-0x82,0x3F,0x06,0x9C,0xC7,0xFD,0x10,0x2B,0x9E,0x0F,0x2C,0xB6,0x9E,0xE3,0x15,0xBF,
-0xD9,0x36,0x1C,0xBA,0x25,0x1A,0x52,0x3D,0x1A,0xEC,0x22,0x0C,0x1C,0xE0,0xA4,0xA2,
-0x3D,0xF0,0xE8,0x39,0xCF,0x81,0xC0,0x7B,0xED,0x5D,0x1F,0x6F,0xC5,0xD0,0x0B,0xD7,
-0x98,
+const unsigned char Cybertrust_Global_Root_certificate[933]={
+0x30,0x82,0x03,0xA1,0x30,0x82,0x02,0x89,0xA0,0x03,0x02,0x01,0x02,0x02,0x0B,0x04,
+0x00,0x00,0x00,0x00,0x01,0x0F,0x85,0xAA,0x2D,0x48,0x30,0x0D,0x06,0x09,0x2A,0x86,
+0x48,0x86,0xF7,0x0D,0x01,0x01,0x05,0x05,0x00,0x30,0x3B,0x31,0x18,0x30,0x16,0x06,
+0x03,0x55,0x04,0x0A,0x13,0x0F,0x43,0x79,0x62,0x65,0x72,0x74,0x72,0x75,0x73,0x74,
+0x2C,0x20,0x49,0x6E,0x63,0x31,0x1F,0x30,0x1D,0x06,0x03,0x55,0x04,0x03,0x13,0x16,
+0x43,0x79,0x62,0x65,0x72,0x74,0x72,0x75,0x73,0x74,0x20,0x47,0x6C,0x6F,0x62,0x61,
+0x6C,0x20,0x52,0x6F,0x6F,0x74,0x30,0x1E,0x17,0x0D,0x30,0x36,0x31,0x32,0x31,0x35,
+0x30,0x38,0x30,0x30,0x30,0x30,0x5A,0x17,0x0D,0x32,0x31,0x31,0x32,0x31,0x35,0x30,
+0x38,0x30,0x30,0x30,0x30,0x5A,0x30,0x3B,0x31,0x18,0x30,0x16,0x06,0x03,0x55,0x04,
+0x0A,0x13,0x0F,0x43,0x79,0x62,0x65,0x72,0x74,0x72,0x75,0x73,0x74,0x2C,0x20,0x49,
+0x6E,0x63,0x31,0x1F,0x30,0x1D,0x06,0x03,0x55,0x04,0x03,0x13,0x16,0x43,0x79,0x62,
+0x65,0x72,0x74,0x72,0x75,0x73,0x74,0x20,0x47,0x6C,0x6F,0x62,0x61,0x6C,0x20,0x52,
+0x6F,0x6F,0x74,0x30,0x82,0x01,0x22,0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,
+0x0D,0x01,0x01,0x01,0x05,0x00,0x03,0x82,0x01,0x0F,0x00,0x30,0x82,0x01,0x0A,0x02,
+0x82,0x01,0x01,0x00,0xF8,0xC8,0xBC,0xBD,0x14,0x50,0x66,0x13,0xFF,0xF0,0xD3,0x79,
+0xEC,0x23,0xF2,0xB7,0x1A,0xC7,0x8E,0x85,0xF1,0x12,0x73,0xA6,0x19,0xAA,0x10,0xDB,
+0x9C,0xA2,0x65,0x74,0x5A,0x77,0x3E,0x51,0x7D,0x56,0xF6,0xDC,0x23,0xB6,0xD4,0xED,
+0x5F,0x58,0xB1,0x37,0x4D,0xD5,0x49,0x0E,0x6E,0xF5,0x6A,0x87,0xD6,0xD2,0x8C,0xD2,
+0x27,0xC6,0xE2,0xFF,0x36,0x9F,0x98,0x65,0xA0,0x13,0x4E,0xC6,0x2A,0x64,0x9B,0xD5,
+0x90,0x12,0xCF,0x14,0x06,0xF4,0x3B,0xE3,0xD4,0x28,0xBE,0xE8,0x0E,0xF8,0xAB,0x4E,
+0x48,0x94,0x6D,0x8E,0x95,0x31,0x10,0x5C,0xED,0xA2,0x2D,0xBD,0xD5,0x3A,0x6D,0xB2,
+0x1C,0xBB,0x60,0xC0,0x46,0x4B,0x01,0xF5,0x49,0xAE,0x7E,0x46,0x8A,0xD0,0x74,0x8D,
+0xA1,0x0C,0x02,0xCE,0xEE,0xFC,0xE7,0x8F,0xB8,0x6B,0x66,0xF3,0x7F,0x44,0x00,0xBF,
+0x66,0x25,0x14,0x2B,0xDD,0x10,0x30,0x1D,0x07,0x96,0x3F,0x4D,0xF6,0x6B,0xB8,0x8F,
+0xB7,0x7B,0x0C,0xA5,0x38,0xEB,0xDE,0x47,0xDB,0xD5,0x5D,0x39,0xFC,0x88,0xA7,0xF3,
+0xD7,0x2A,0x74,0xF1,0xE8,0x5A,0xA2,0x3B,0x9F,0x50,0xBA,0xA6,0x8C,0x45,0x35,0xC2,
+0x50,0x65,0x95,0xDC,0x63,0x82,0xEF,0xDD,0xBF,0x77,0x4D,0x9C,0x62,0xC9,0x63,0x73,
+0x16,0xD0,0x29,0x0F,0x49,0xA9,0x48,0xF0,0xB3,0xAA,0xB7,0x6C,0xC5,0xA7,0x30,0x39,
+0x40,0x5D,0xAE,0xC4,0xE2,0x5D,0x26,0x53,0xF0,0xCE,0x1C,0x23,0x08,0x61,0xA8,0x94,
+0x19,0xBA,0x04,0x62,0x40,0xEC,0x1F,0x38,0x70,0x77,0x12,0x06,0x71,0xA7,0x30,0x18,
+0x5D,0x25,0x27,0xA5,0x02,0x03,0x01,0x00,0x01,0xA3,0x81,0xA5,0x30,0x81,0xA2,0x30,
+0x0E,0x06,0x03,0x55,0x1D,0x0F,0x01,0x01,0xFF,0x04,0x04,0x03,0x02,0x01,0x06,0x30,
+0x0F,0x06,0x03,0x55,0x1D,0x13,0x01,0x01,0xFF,0x04,0x05,0x30,0x03,0x01,0x01,0xFF,
+0x30,0x1D,0x06,0x03,0x55,0x1D,0x0E,0x04,0x16,0x04,0x14,0xB6,0x08,0x7B,0x0D,0x7A,
+0xCC,0xAC,0x20,0x4C,0x86,0x56,0x32,0x5E,0xCF,0xAB,0x6E,0x85,0x2D,0x70,0x57,0x30,
+0x3F,0x06,0x03,0x55,0x1D,0x1F,0x04,0x38,0x30,0x36,0x30,0x34,0xA0,0x32,0xA0,0x30,
+0x86,0x2E,0x68,0x74,0x74,0x70,0x3A,0x2F,0x2F,0x77,0x77,0x77,0x32,0x2E,0x70,0x75,
+0x62,0x6C,0x69,0x63,0x2D,0x74,0x72,0x75,0x73,0x74,0x2E,0x63,0x6F,0x6D,0x2F,0x63,
+0x72,0x6C,0x2F,0x63,0x74,0x2F,0x63,0x74,0x72,0x6F,0x6F,0x74,0x2E,0x63,0x72,0x6C,
+0x30,0x1F,0x06,0x03,0x55,0x1D,0x23,0x04,0x18,0x30,0x16,0x80,0x14,0xB6,0x08,0x7B,
+0x0D,0x7A,0xCC,0xAC,0x20,0x4C,0x86,0x56,0x32,0x5E,0xCF,0xAB,0x6E,0x85,0x2D,0x70,
+0x57,0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x05,0x05,0x00,
+0x03,0x82,0x01,0x01,0x00,0x56,0xEF,0x0A,0x23,0xA0,0x54,0x4E,0x95,0x97,0xC9,0xF8,
+0x89,0xDA,0x45,0xC1,0xD4,0xA3,0x00,0x25,0xF4,0x1F,0x13,0xAB,0xB7,0xA3,0x85,0x58,
+0x69,0xC2,0x30,0xAD,0xD8,0x15,0x8A,0x2D,0xE3,0xC9,0xCD,0x81,0x5A,0xF8,0x73,0x23,
+0x5A,0xA7,0x7C,0x05,0xF3,0xFD,0x22,0x3B,0x0E,0xD1,0x06,0xC4,0xDB,0x36,0x4C,0x73,
+0x04,0x8E,0xE5,0xB0,0x22,0xE4,0xC5,0xF3,0x2E,0xA5,0xD9,0x23,0xE3,0xB8,0x4E,0x4A,
+0x20,0xA7,0x6E,0x02,0x24,0x9F,0x22,0x60,0x67,0x7B,0x8B,0x1D,0x72,0x09,0xC5,0x31,
+0x5C,0xE9,0x79,0x9F,0x80,0x47,0x3D,0xAD,0xA1,0x0B,0x07,0x14,0x3D,0x47,0xFF,0x03,
+0x69,0x1A,0x0C,0x0B,0x44,0xE7,0x63,0x25,0xA7,0x7F,0xB2,0xC9,0xB8,0x76,0x84,0xED,
+0x23,0xF6,0x7D,0x07,0xAB,0x45,0x7E,0xD3,0xDF,0xB3,0xBF,0xE9,0x8A,0xB6,0xCD,0xA8,
+0xA2,0x67,0x2B,0x52,0xD5,0xB7,0x65,0xF0,0x39,0x4C,0x63,0xA0,0x91,0x79,0x93,0x52,
+0x0F,0x54,0xDD,0x83,0xBB,0x9F,0xD1,0x8F,0xA7,0x53,0x73,0xC3,0xCB,0xFF,0x30,0xEC,
+0x7C,0x04,0xB8,0xD8,0x44,0x1F,0x93,0x5F,0x71,0x09,0x22,0xB7,0x6E,0x3E,0xEA,0x1C,
+0x03,0x4E,0x9D,0x1A,0x20,0x61,0xFB,0x81,0x37,0xEC,0x5E,0xFC,0x0A,0x45,0xAB,0xD7,
+0xE7,0x17,0x55,0xD0,0xA0,0xEA,0x60,0x9B,0xA6,0xF6,0xE3,0x8C,0x5B,0x29,0xC2,0x06,
+0x60,0x14,0x9D,0x2D,0x97,0x4C,0xA9,0x93,0x15,0x9D,0x61,0xC4,0x01,0x5F,0x48,0xD6,
+0x58,0xBD,0x56,0x31,0x12,0x4E,0x11,0xC8,0x21,0xE0,0xB3,0x11,0x91,0x65,0xDB,0xB4,
+0xA6,0x88,0x38,0xCE,0x55,
};
-/* subject:/C=DE/O=TC TrustCenter GmbH/OU=TC TrustCenter Universal CA/CN=TC TrustCenter Universal CA III */
-/* issuer :/C=DE/O=TC TrustCenter GmbH/OU=TC TrustCenter Universal CA/CN=TC TrustCenter Universal CA III */
-
-
-const unsigned char TC_TrustCenter_Universal_CA_III_certificate[997]={
-0x30,0x82,0x03,0xE1,0x30,0x82,0x02,0xC9,0xA0,0x03,0x02,0x01,0x02,0x02,0x0E,0x63,
-0x25,0x00,0x01,0x00,0x02,0x14,0x8D,0x33,0x15,0x02,0xE4,0x6C,0xF4,0x30,0x0D,0x06,
-0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x05,0x05,0x00,0x30,0x7B,0x31,0x0B,
-0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x44,0x45,0x31,0x1C,0x30,0x1A,0x06,
-0x03,0x55,0x04,0x0A,0x13,0x13,0x54,0x43,0x20,0x54,0x72,0x75,0x73,0x74,0x43,0x65,
-0x6E,0x74,0x65,0x72,0x20,0x47,0x6D,0x62,0x48,0x31,0x24,0x30,0x22,0x06,0x03,0x55,
-0x04,0x0B,0x13,0x1B,0x54,0x43,0x20,0x54,0x72,0x75,0x73,0x74,0x43,0x65,0x6E,0x74,
-0x65,0x72,0x20,0x55,0x6E,0x69,0x76,0x65,0x72,0x73,0x61,0x6C,0x20,0x43,0x41,0x31,
-0x28,0x30,0x26,0x06,0x03,0x55,0x04,0x03,0x13,0x1F,0x54,0x43,0x20,0x54,0x72,0x75,
-0x73,0x74,0x43,0x65,0x6E,0x74,0x65,0x72,0x20,0x55,0x6E,0x69,0x76,0x65,0x72,0x73,
-0x61,0x6C,0x20,0x43,0x41,0x20,0x49,0x49,0x49,0x30,0x1E,0x17,0x0D,0x30,0x39,0x30,
-0x39,0x30,0x39,0x30,0x38,0x31,0x35,0x32,0x37,0x5A,0x17,0x0D,0x32,0x39,0x31,0x32,
-0x33,0x31,0x32,0x33,0x35,0x39,0x35,0x39,0x5A,0x30,0x7B,0x31,0x0B,0x30,0x09,0x06,
-0x03,0x55,0x04,0x06,0x13,0x02,0x44,0x45,0x31,0x1C,0x30,0x1A,0x06,0x03,0x55,0x04,
-0x0A,0x13,0x13,0x54,0x43,0x20,0x54,0x72,0x75,0x73,0x74,0x43,0x65,0x6E,0x74,0x65,
-0x72,0x20,0x47,0x6D,0x62,0x48,0x31,0x24,0x30,0x22,0x06,0x03,0x55,0x04,0x0B,0x13,
-0x1B,0x54,0x43,0x20,0x54,0x72,0x75,0x73,0x74,0x43,0x65,0x6E,0x74,0x65,0x72,0x20,
-0x55,0x6E,0x69,0x76,0x65,0x72,0x73,0x61,0x6C,0x20,0x43,0x41,0x31,0x28,0x30,0x26,
-0x06,0x03,0x55,0x04,0x03,0x13,0x1F,0x54,0x43,0x20,0x54,0x72,0x75,0x73,0x74,0x43,
-0x65,0x6E,0x74,0x65,0x72,0x20,0x55,0x6E,0x69,0x76,0x65,0x72,0x73,0x61,0x6C,0x20,
-0x43,0x41,0x20,0x49,0x49,0x49,0x30,0x82,0x01,0x22,0x30,0x0D,0x06,0x09,0x2A,0x86,
-0x48,0x86,0xF7,0x0D,0x01,0x01,0x01,0x05,0x00,0x03,0x82,0x01,0x0F,0x00,0x30,0x82,
-0x01,0x0A,0x02,0x82,0x01,0x01,0x00,0xC2,0xDA,0x9C,0x62,0xB0,0xB9,0x71,0x12,0xB0,
-0x0B,0xC8,0x1A,0x57,0xB2,0xAE,0x83,0x14,0x99,0xB3,0x34,0x4B,0x9B,0x90,0xA2,0xC5,
-0xE7,0xE7,0x2F,0x02,0xA0,0x4D,0x2D,0xA4,0xFA,0x85,0xDA,0x9B,0x25,0x85,0x2D,0x40,
-0x28,0x20,0x6D,0xEA,0xE0,0xBD,0xB1,0x48,0x83,0x22,0x29,0x44,0x9F,0x4E,0x83,0xEE,
-0x35,0x51,0x13,0x73,0x74,0xD5,0xBC,0xF2,0x30,0x66,0x94,0x53,0xC0,0x40,0x36,0x2F,
-0x0C,0x84,0x65,0xCE,0x0F,0x6E,0xC2,0x58,0x93,0xE8,0x2C,0x0B,0x3A,0xE9,0xC1,0x8E,
-0xFB,0xF2,0x6B,0xCA,0x3C,0xE2,0x9C,0x4E,0x8E,0xE4,0xF9,0x7D,0xD3,0x27,0x9F,0x1B,
-0xD5,0x67,0x78,0x87,0x2D,0x7F,0x0B,0x47,0xB3,0xC7,0xE8,0xC9,0x48,0x7C,0xAF,0x2F,
-0xCC,0x0A,0xD9,0x41,0xEF,0x9F,0xFE,0x9A,0xE1,0xB2,0xAE,0xF9,0x53,0xB5,0xE5,0xE9,
-0x46,0x9F,0x60,0xE3,0xDF,0x8D,0xD3,0x7F,0xFB,0x96,0x7E,0xB3,0xB5,0x72,0xF8,0x4B,
-0xAD,0x08,0x79,0xCD,0x69,0x89,0x40,0x27,0xF5,0x2A,0xC1,0xAD,0x43,0xEC,0xA4,0x53,
-0xC8,0x61,0xB6,0xF7,0xD2,0x79,0x2A,0x67,0x18,0x76,0x48,0x6D,0x5B,0x25,0x01,0xD1,
-0x26,0xC5,0xB7,0x57,0x69,0x23,0x15,0x5B,0x61,0x8A,0xAD,0xF0,0x1B,0x2D,0xD9,0xAF,
-0x5C,0xF1,0x26,0x90,0x69,0xA9,0xD5,0x0C,0x40,0xF5,0x33,0x80,0x43,0x8F,0x9C,0xA3,
-0x76,0x2A,0x45,0xB4,0xAF,0xBF,0x7F,0x3E,0x87,0x3F,0x76,0xC5,0xCD,0x2A,0xDE,0x20,
-0xC5,0x16,0x58,0xCB,0xF9,0x1B,0xF5,0x0F,0xCB,0x0D,0x11,0x52,0x64,0xB8,0xD2,0x76,
-0x62,0x77,0x83,0xF1,0x58,0x9F,0xFF,0x02,0x03,0x01,0x00,0x01,0xA3,0x63,0x30,0x61,
-0x30,0x1F,0x06,0x03,0x55,0x1D,0x23,0x04,0x18,0x30,0x16,0x80,0x14,0x56,0xE7,0xE1,
-0x5B,0x25,0x43,0x80,0xE0,0xF6,0x8C,0xE1,0x71,0xBC,0x8E,0xE5,0x80,0x2F,0xC4,0x48,
-0xE2,0x30,0x0F,0x06,0x03,0x55,0x1D,0x13,0x01,0x01,0xFF,0x04,0x05,0x30,0x03,0x01,
-0x01,0xFF,0x30,0x0E,0x06,0x03,0x55,0x1D,0x0F,0x01,0x01,0xFF,0x04,0x04,0x03,0x02,
-0x01,0x06,0x30,0x1D,0x06,0x03,0x55,0x1D,0x0E,0x04,0x16,0x04,0x14,0x56,0xE7,0xE1,
-0x5B,0x25,0x43,0x80,0xE0,0xF6,0x8C,0xE1,0x71,0xBC,0x8E,0xE5,0x80,0x2F,0xC4,0x48,
-0xE2,0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x05,0x05,0x00,
-0x03,0x82,0x01,0x01,0x00,0x83,0xC7,0xAF,0xEA,0x7F,0x4D,0x0A,0x3C,0x39,0xB1,0x68,
-0xBE,0x7B,0x6D,0x89,0x2E,0xE9,0xB3,0x09,0xE7,0x18,0x57,0x8D,0x85,0x9A,0x17,0xF3,
-0x76,0x42,0x50,0x13,0x0F,0xC7,0x90,0x6F,0x33,0xAD,0xC5,0x49,0x60,0x2B,0x6C,0x49,
-0x58,0x19,0xD4,0xE2,0xBE,0xB7,0xBF,0xAB,0x49,0xBC,0x94,0xC8,0xAB,0xBE,0x28,0x6C,
-0x16,0x68,0xE0,0xC8,0x97,0x46,0x20,0xA0,0x68,0x67,0x60,0x88,0x39,0x20,0x51,0xD8,
-0x68,0x01,0x11,0xCE,0xA7,0xF6,0x11,0x07,0xF6,0xEC,0xEC,0xAC,0x1A,0x1F,0xB2,0x66,
-0x6E,0x56,0x67,0x60,0x7A,0x74,0x5E,0xC0,0x6D,0x97,0x36,0xAE,0xB5,0x0D,0x5D,0x66,
-0x73,0xC0,0x25,0x32,0x45,0xD8,0x4A,0x06,0x07,0x8F,0xC4,0xB7,0x07,0xB1,0x4D,0x06,
-0x0D,0xE1,0xA5,0xEB,0xF4,0x75,0xCA,0xBA,0x9C,0xD0,0xBD,0xB3,0xD3,0x32,0x24,0x4C,
-0xEE,0x7E,0xE2,0x76,0x04,0x4B,0x49,0x53,0xD8,0xF2,0xE9,0x54,0x33,0xFC,0xE5,0x71,
-0x1F,0x3D,0x14,0x5C,0x96,0x4B,0xF1,0x3A,0xF2,0x00,0xBB,0x6C,0xB4,0xFA,0x96,0x55,
-0x08,0x88,0x09,0xC1,0xCC,0x91,0x19,0x29,0xB0,0x20,0x2D,0xFF,0xCB,0x38,0xA4,0x40,
-0xE1,0x17,0xBE,0x79,0x61,0x80,0xFF,0x07,0x03,0x86,0x4C,0x4E,0x7B,0x06,0x9F,0x11,
-0x86,0x8D,0x89,0xEE,0x27,0xC4,0xDB,0xE2,0xBC,0x19,0x8E,0x0B,0xC3,0xC3,0x13,0xC7,
-0x2D,0x03,0x63,0x3B,0xD3,0xE8,0xE4,0xA2,0x2A,0xC2,0x82,0x08,0x94,0x16,0x54,0xF0,
-0xEF,0x1F,0x27,0x90,0x25,0xB8,0x0D,0x0E,0x28,0x1B,0x47,0x77,0x47,0xBD,0x1C,0xA8,
-0x25,0xF1,0x94,0xB4,0x66,
-};
+/* subject:/C=US/O=Entrust, Inc./OU=See www.entrust.net/legal-terms/OU=(c) 2012 Entrust, Inc. - for authorized use only/CN=Entrust Root Certification Authority - EC1 */
+/* issuer :/C=US/O=Entrust, Inc./OU=See www.entrust.net/legal-terms/OU=(c) 2012 Entrust, Inc. - for authorized use only/CN=Entrust Root Certification Authority - EC1 */
-/* subject:/C=ZA/ST=Western Cape/L=Cape Town/O=Thawte Consulting cc/OU=Certification Services Division/CN=Thawte Premium Server CA/emailAddress=premium-server@thawte.com */
-/* issuer :/C=ZA/ST=Western Cape/L=Cape Town/O=Thawte Consulting cc/OU=Certification Services Division/CN=Thawte Premium Server CA/emailAddress=premium-server@thawte.com */
-
-
-const unsigned char Thawte_Premium_Server_CA_certificate[811]={
-0x30,0x82,0x03,0x27,0x30,0x82,0x02,0x90,0xA0,0x03,0x02,0x01,0x02,0x02,0x01,0x01,
-0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x04,0x05,0x00,0x30,
-0x81,0xCE,0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x5A,0x41,0x31,
-0x15,0x30,0x13,0x06,0x03,0x55,0x04,0x08,0x13,0x0C,0x57,0x65,0x73,0x74,0x65,0x72,
-0x6E,0x20,0x43,0x61,0x70,0x65,0x31,0x12,0x30,0x10,0x06,0x03,0x55,0x04,0x07,0x13,
-0x09,0x43,0x61,0x70,0x65,0x20,0x54,0x6F,0x77,0x6E,0x31,0x1D,0x30,0x1B,0x06,0x03,
-0x55,0x04,0x0A,0x13,0x14,0x54,0x68,0x61,0x77,0x74,0x65,0x20,0x43,0x6F,0x6E,0x73,
-0x75,0x6C,0x74,0x69,0x6E,0x67,0x20,0x63,0x63,0x31,0x28,0x30,0x26,0x06,0x03,0x55,
-0x04,0x0B,0x13,0x1F,0x43,0x65,0x72,0x74,0x69,0x66,0x69,0x63,0x61,0x74,0x69,0x6F,
-0x6E,0x20,0x53,0x65,0x72,0x76,0x69,0x63,0x65,0x73,0x20,0x44,0x69,0x76,0x69,0x73,
-0x69,0x6F,0x6E,0x31,0x21,0x30,0x1F,0x06,0x03,0x55,0x04,0x03,0x13,0x18,0x54,0x68,
-0x61,0x77,0x74,0x65,0x20,0x50,0x72,0x65,0x6D,0x69,0x75,0x6D,0x20,0x53,0x65,0x72,
-0x76,0x65,0x72,0x20,0x43,0x41,0x31,0x28,0x30,0x26,0x06,0x09,0x2A,0x86,0x48,0x86,
-0xF7,0x0D,0x01,0x09,0x01,0x16,0x19,0x70,0x72,0x65,0x6D,0x69,0x75,0x6D,0x2D,0x73,
-0x65,0x72,0x76,0x65,0x72,0x40,0x74,0x68,0x61,0x77,0x74,0x65,0x2E,0x63,0x6F,0x6D,
-0x30,0x1E,0x17,0x0D,0x39,0x36,0x30,0x38,0x30,0x31,0x30,0x30,0x30,0x30,0x30,0x30,
-0x5A,0x17,0x0D,0x32,0x30,0x31,0x32,0x33,0x31,0x32,0x33,0x35,0x39,0x35,0x39,0x5A,
-0x30,0x81,0xCE,0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x5A,0x41,
-0x31,0x15,0x30,0x13,0x06,0x03,0x55,0x04,0x08,0x13,0x0C,0x57,0x65,0x73,0x74,0x65,
-0x72,0x6E,0x20,0x43,0x61,0x70,0x65,0x31,0x12,0x30,0x10,0x06,0x03,0x55,0x04,0x07,
-0x13,0x09,0x43,0x61,0x70,0x65,0x20,0x54,0x6F,0x77,0x6E,0x31,0x1D,0x30,0x1B,0x06,
-0x03,0x55,0x04,0x0A,0x13,0x14,0x54,0x68,0x61,0x77,0x74,0x65,0x20,0x43,0x6F,0x6E,
-0x73,0x75,0x6C,0x74,0x69,0x6E,0x67,0x20,0x63,0x63,0x31,0x28,0x30,0x26,0x06,0x03,
-0x55,0x04,0x0B,0x13,0x1F,0x43,0x65,0x72,0x74,0x69,0x66,0x69,0x63,0x61,0x74,0x69,
-0x6F,0x6E,0x20,0x53,0x65,0x72,0x76,0x69,0x63,0x65,0x73,0x20,0x44,0x69,0x76,0x69,
-0x73,0x69,0x6F,0x6E,0x31,0x21,0x30,0x1F,0x06,0x03,0x55,0x04,0x03,0x13,0x18,0x54,
-0x68,0x61,0x77,0x74,0x65,0x20,0x50,0x72,0x65,0x6D,0x69,0x75,0x6D,0x20,0x53,0x65,
-0x72,0x76,0x65,0x72,0x20,0x43,0x41,0x31,0x28,0x30,0x26,0x06,0x09,0x2A,0x86,0x48,
-0x86,0xF7,0x0D,0x01,0x09,0x01,0x16,0x19,0x70,0x72,0x65,0x6D,0x69,0x75,0x6D,0x2D,
-0x73,0x65,0x72,0x76,0x65,0x72,0x40,0x74,0x68,0x61,0x77,0x74,0x65,0x2E,0x63,0x6F,
-0x6D,0x30,0x81,0x9F,0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,
-0x01,0x05,0x00,0x03,0x81,0x8D,0x00,0x30,0x81,0x89,0x02,0x81,0x81,0x00,0xD2,0x36,
-0x36,0x6A,0x8B,0xD7,0xC2,0x5B,0x9E,0xDA,0x81,0x41,0x62,0x8F,0x38,0xEE,0x49,0x04,
-0x55,0xD6,0xD0,0xEF,0x1C,0x1B,0x95,0x16,0x47,0xEF,0x18,0x48,0x35,0x3A,0x52,0xF4,
-0x2B,0x6A,0x06,0x8F,0x3B,0x2F,0xEA,0x56,0xE3,0xAF,0x86,0x8D,0x9E,0x17,0xF7,0x9E,
-0xB4,0x65,0x75,0x02,0x4D,0xEF,0xCB,0x09,0xA2,0x21,0x51,0xD8,0x9B,0xD0,0x67,0xD0,
-0xBA,0x0D,0x92,0x06,0x14,0x73,0xD4,0x93,0xCB,0x97,0x2A,0x00,0x9C,0x5C,0x4E,0x0C,
-0xBC,0xFA,0x15,0x52,0xFC,0xF2,0x44,0x6E,0xDA,0x11,0x4A,0x6E,0x08,0x9F,0x2F,0x2D,
-0xE3,0xF9,0xAA,0x3A,0x86,0x73,0xB6,0x46,0x53,0x58,0xC8,0x89,0x05,0xBD,0x83,0x11,
-0xB8,0x73,0x3F,0xAA,0x07,0x8D,0xF4,0x42,0x4D,0xE7,0x40,0x9D,0x1C,0x37,0x02,0x03,
-0x01,0x00,0x01,0xA3,0x13,0x30,0x11,0x30,0x0F,0x06,0x03,0x55,0x1D,0x13,0x01,0x01,
-0xFF,0x04,0x05,0x30,0x03,0x01,0x01,0xFF,0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,
-0xF7,0x0D,0x01,0x01,0x04,0x05,0x00,0x03,0x81,0x81,0x00,0x26,0x48,0x2C,0x16,0xC2,
-0x58,0xFA,0xE8,0x16,0x74,0x0C,0xAA,0xAA,0x5F,0x54,0x3F,0xF2,0xD7,0xC9,0x78,0x60,
-0x5E,0x5E,0x6E,0x37,0x63,0x22,0x77,0x36,0x7E,0xB2,0x17,0xC4,0x34,0xB9,0xF5,0x08,
-0x85,0xFC,0xC9,0x01,0x38,0xFF,0x4D,0xBE,0xF2,0x16,0x42,0x43,0xE7,0xBB,0x5A,0x46,
-0xFB,0xC1,0xC6,0x11,0x1F,0xF1,0x4A,0xB0,0x28,0x46,0xC9,0xC3,0xC4,0x42,0x7D,0xBC,
-0xFA,0xAB,0x59,0x6E,0xD5,0xB7,0x51,0x88,0x11,0xE3,0xA4,0x85,0x19,0x6B,0x82,0x4C,
-0xA4,0x0C,0x12,0xAD,0xE9,0xA4,0xAE,0x3F,0xF1,0xC3,0x49,0x65,0x9A,0x8C,0xC5,0xC8,
-0x3E,0x25,0xB7,0x94,0x99,0xBB,0x92,0x32,0x71,0x07,0xF0,0x86,0x5E,0xED,0x50,0x27,
-0xA6,0x0D,0xA6,0x23,0xF9,0xBB,0xCB,0xA6,0x07,0x14,0x42,
+const unsigned char Entrust_Root_Certification_Authority___EC1_certificate[765]={
+0x30,0x82,0x02,0xF9,0x30,0x82,0x02,0x80,0xA0,0x03,0x02,0x01,0x02,0x02,0x0D,0x00,
+0xA6,0x8B,0x79,0x29,0x00,0x00,0x00,0x00,0x50,0xD0,0x91,0xF9,0x30,0x0A,0x06,0x08,
+0x2A,0x86,0x48,0xCE,0x3D,0x04,0x03,0x03,0x30,0x81,0xBF,0x31,0x0B,0x30,0x09,0x06,
+0x03,0x55,0x04,0x06,0x13,0x02,0x55,0x53,0x31,0x16,0x30,0x14,0x06,0x03,0x55,0x04,
+0x0A,0x13,0x0D,0x45,0x6E,0x74,0x72,0x75,0x73,0x74,0x2C,0x20,0x49,0x6E,0x63,0x2E,
+0x31,0x28,0x30,0x26,0x06,0x03,0x55,0x04,0x0B,0x13,0x1F,0x53,0x65,0x65,0x20,0x77,
+0x77,0x77,0x2E,0x65,0x6E,0x74,0x72,0x75,0x73,0x74,0x2E,0x6E,0x65,0x74,0x2F,0x6C,
+0x65,0x67,0x61,0x6C,0x2D,0x74,0x65,0x72,0x6D,0x73,0x31,0x39,0x30,0x37,0x06,0x03,
+0x55,0x04,0x0B,0x13,0x30,0x28,0x63,0x29,0x20,0x32,0x30,0x31,0x32,0x20,0x45,0x6E,
+0x74,0x72,0x75,0x73,0x74,0x2C,0x20,0x49,0x6E,0x63,0x2E,0x20,0x2D,0x20,0x66,0x6F,
+0x72,0x20,0x61,0x75,0x74,0x68,0x6F,0x72,0x69,0x7A,0x65,0x64,0x20,0x75,0x73,0x65,
+0x20,0x6F,0x6E,0x6C,0x79,0x31,0x33,0x30,0x31,0x06,0x03,0x55,0x04,0x03,0x13,0x2A,
+0x45,0x6E,0x74,0x72,0x75,0x73,0x74,0x20,0x52,0x6F,0x6F,0x74,0x20,0x43,0x65,0x72,
+0x74,0x69,0x66,0x69,0x63,0x61,0x74,0x69,0x6F,0x6E,0x20,0x41,0x75,0x74,0x68,0x6F,
+0x72,0x69,0x74,0x79,0x20,0x2D,0x20,0x45,0x43,0x31,0x30,0x1E,0x17,0x0D,0x31,0x32,
+0x31,0x32,0x31,0x38,0x31,0x35,0x32,0x35,0x33,0x36,0x5A,0x17,0x0D,0x33,0x37,0x31,
+0x32,0x31,0x38,0x31,0x35,0x35,0x35,0x33,0x36,0x5A,0x30,0x81,0xBF,0x31,0x0B,0x30,
+0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x55,0x53,0x31,0x16,0x30,0x14,0x06,0x03,
+0x55,0x04,0x0A,0x13,0x0D,0x45,0x6E,0x74,0x72,0x75,0x73,0x74,0x2C,0x20,0x49,0x6E,
+0x63,0x2E,0x31,0x28,0x30,0x26,0x06,0x03,0x55,0x04,0x0B,0x13,0x1F,0x53,0x65,0x65,
+0x20,0x77,0x77,0x77,0x2E,0x65,0x6E,0x74,0x72,0x75,0x73,0x74,0x2E,0x6E,0x65,0x74,
+0x2F,0x6C,0x65,0x67,0x61,0x6C,0x2D,0x74,0x65,0x72,0x6D,0x73,0x31,0x39,0x30,0x37,
+0x06,0x03,0x55,0x04,0x0B,0x13,0x30,0x28,0x63,0x29,0x20,0x32,0x30,0x31,0x32,0x20,
+0x45,0x6E,0x74,0x72,0x75,0x73,0x74,0x2C,0x20,0x49,0x6E,0x63,0x2E,0x20,0x2D,0x20,
+0x66,0x6F,0x72,0x20,0x61,0x75,0x74,0x68,0x6F,0x72,0x69,0x7A,0x65,0x64,0x20,0x75,
+0x73,0x65,0x20,0x6F,0x6E,0x6C,0x79,0x31,0x33,0x30,0x31,0x06,0x03,0x55,0x04,0x03,
+0x13,0x2A,0x45,0x6E,0x74,0x72,0x75,0x73,0x74,0x20,0x52,0x6F,0x6F,0x74,0x20,0x43,
+0x65,0x72,0x74,0x69,0x66,0x69,0x63,0x61,0x74,0x69,0x6F,0x6E,0x20,0x41,0x75,0x74,
+0x68,0x6F,0x72,0x69,0x74,0x79,0x20,0x2D,0x20,0x45,0x43,0x31,0x30,0x76,0x30,0x10,
+0x06,0x07,0x2A,0x86,0x48,0xCE,0x3D,0x02,0x01,0x06,0x05,0x2B,0x81,0x04,0x00,0x22,
+0x03,0x62,0x00,0x04,0x84,0x13,0xC9,0xD0,0xBA,0x6D,0x41,0x7B,0xE2,0x6C,0xD0,0xEB,
+0x55,0x5F,0x66,0x02,0x1A,0x24,0xF4,0x5B,0x89,0x69,0x47,0xE3,0xB8,0xC2,0x7D,0xF1,
+0xF2,0x02,0xC5,0x9F,0xA0,0xF6,0x5B,0xD5,0x8B,0x06,0x19,0x86,0x4F,0x53,0x10,0x6D,
+0x07,0x24,0x27,0xA1,0xA0,0xF8,0xD5,0x47,0x19,0x61,0x4C,0x7D,0xCA,0x93,0x27,0xEA,
+0x74,0x0C,0xEF,0x6F,0x96,0x09,0xFE,0x63,0xEC,0x70,0x5D,0x36,0xAD,0x67,0x77,0xAE,
+0xC9,0x9D,0x7C,0x55,0x44,0x3A,0xA2,0x63,0x51,0x1F,0xF5,0xE3,0x62,0xD4,0xA9,0x47,
+0x07,0x3E,0xCC,0x20,0xA3,0x42,0x30,0x40,0x30,0x0E,0x06,0x03,0x55,0x1D,0x0F,0x01,
+0x01,0xFF,0x04,0x04,0x03,0x02,0x01,0x06,0x30,0x0F,0x06,0x03,0x55,0x1D,0x13,0x01,
+0x01,0xFF,0x04,0x05,0x30,0x03,0x01,0x01,0xFF,0x30,0x1D,0x06,0x03,0x55,0x1D,0x0E,
+0x04,0x16,0x04,0x14,0xB7,0x63,0xE7,0x1A,0xDD,0x8D,0xE9,0x08,0xA6,0x55,0x83,0xA4,
+0xE0,0x6A,0x50,0x41,0x65,0x11,0x42,0x49,0x30,0x0A,0x06,0x08,0x2A,0x86,0x48,0xCE,
+0x3D,0x04,0x03,0x03,0x03,0x67,0x00,0x30,0x64,0x02,0x30,0x61,0x79,0xD8,0xE5,0x42,
+0x47,0xDF,0x1C,0xAE,0x53,0x99,0x17,0xB6,0x6F,0x1C,0x7D,0xE1,0xBF,0x11,0x94,0xD1,
+0x03,0x88,0x75,0xE4,0x8D,0x89,0xA4,0x8A,0x77,0x46,0xDE,0x6D,0x61,0xEF,0x02,0xF5,
+0xFB,0xB5,0xDF,0xCC,0xFE,0x4E,0xFF,0xFE,0xA9,0xE6,0xA7,0x02,0x30,0x5B,0x99,0xD7,
+0x85,0x37,0x06,0xB5,0x7B,0x08,0xFD,0xEB,0x27,0x8B,0x4A,0x94,0xF9,0xE1,0xFA,0xA7,
+0x8E,0x26,0x08,0xE8,0x7C,0x92,0x68,0x6D,0x73,0xD8,0x6F,0x26,0xAC,0x21,0x02,0xB8,
+0x99,0xB7,0x26,0x41,0x5B,0x25,0x60,0xAE,0xD0,0x48,0x1A,0xEE,0x06,
};
-/* subject:/C=US/O=thawte, Inc./OU=Certification Services Division/OU=(c) 2006 thawte, Inc. - For authorized use only/CN=thawte Primary Root CA */
-/* issuer :/C=US/O=thawte, Inc./OU=Certification Services Division/OU=(c) 2006 thawte, Inc. - For authorized use only/CN=thawte Primary Root CA */
+/* subject:/C=US/O=GeoTrust Inc./OU=(c) 2007 GeoTrust Inc. - For authorized use only/CN=GeoTrust Primary Certification Authority - G2 */
+/* issuer :/C=US/O=GeoTrust Inc./OU=(c) 2007 GeoTrust Inc. - For authorized use only/CN=GeoTrust Primary Certification Authority - G2 */
-const unsigned char thawte_Primary_Root_CA_certificate[1060]={
-0x30,0x82,0x04,0x20,0x30,0x82,0x03,0x08,0xA0,0x03,0x02,0x01,0x02,0x02,0x10,0x34,
-0x4E,0xD5,0x57,0x20,0xD5,0xED,0xEC,0x49,0xF4,0x2F,0xCE,0x37,0xDB,0x2B,0x6D,0x30,
-0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x05,0x05,0x00,0x30,0x81,
-0xA9,0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x55,0x53,0x31,0x15,
-0x30,0x13,0x06,0x03,0x55,0x04,0x0A,0x13,0x0C,0x74,0x68,0x61,0x77,0x74,0x65,0x2C,
-0x20,0x49,0x6E,0x63,0x2E,0x31,0x28,0x30,0x26,0x06,0x03,0x55,0x04,0x0B,0x13,0x1F,
-0x43,0x65,0x72,0x74,0x69,0x66,0x69,0x63,0x61,0x74,0x69,0x6F,0x6E,0x20,0x53,0x65,
-0x72,0x76,0x69,0x63,0x65,0x73,0x20,0x44,0x69,0x76,0x69,0x73,0x69,0x6F,0x6E,0x31,
-0x38,0x30,0x36,0x06,0x03,0x55,0x04,0x0B,0x13,0x2F,0x28,0x63,0x29,0x20,0x32,0x30,
-0x30,0x36,0x20,0x74,0x68,0x61,0x77,0x74,0x65,0x2C,0x20,0x49,0x6E,0x63,0x2E,0x20,
+const unsigned char GeoTrust_Primary_Certification_Authority___G2_certificate[690]={
+0x30,0x82,0x02,0xAE,0x30,0x82,0x02,0x35,0xA0,0x03,0x02,0x01,0x02,0x02,0x10,0x3C,
+0xB2,0xF4,0x48,0x0A,0x00,0xE2,0xFE,0xEB,0x24,0x3B,0x5E,0x60,0x3E,0xC3,0x6B,0x30,
+0x0A,0x06,0x08,0x2A,0x86,0x48,0xCE,0x3D,0x04,0x03,0x03,0x30,0x81,0x98,0x31,0x0B,
+0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x55,0x53,0x31,0x16,0x30,0x14,0x06,
+0x03,0x55,0x04,0x0A,0x13,0x0D,0x47,0x65,0x6F,0x54,0x72,0x75,0x73,0x74,0x20,0x49,
+0x6E,0x63,0x2E,0x31,0x39,0x30,0x37,0x06,0x03,0x55,0x04,0x0B,0x13,0x30,0x28,0x63,
+0x29,0x20,0x32,0x30,0x30,0x37,0x20,0x47,0x65,0x6F,0x54,0x72,0x75,0x73,0x74,0x20,
+0x49,0x6E,0x63,0x2E,0x20,0x2D,0x20,0x46,0x6F,0x72,0x20,0x61,0x75,0x74,0x68,0x6F,
+0x72,0x69,0x7A,0x65,0x64,0x20,0x75,0x73,0x65,0x20,0x6F,0x6E,0x6C,0x79,0x31,0x36,
+0x30,0x34,0x06,0x03,0x55,0x04,0x03,0x13,0x2D,0x47,0x65,0x6F,0x54,0x72,0x75,0x73,
+0x74,0x20,0x50,0x72,0x69,0x6D,0x61,0x72,0x79,0x20,0x43,0x65,0x72,0x74,0x69,0x66,
+0x69,0x63,0x61,0x74,0x69,0x6F,0x6E,0x20,0x41,0x75,0x74,0x68,0x6F,0x72,0x69,0x74,
+0x79,0x20,0x2D,0x20,0x47,0x32,0x30,0x1E,0x17,0x0D,0x30,0x37,0x31,0x31,0x30,0x35,
+0x30,0x30,0x30,0x30,0x30,0x30,0x5A,0x17,0x0D,0x33,0x38,0x30,0x31,0x31,0x38,0x32,
+0x33,0x35,0x39,0x35,0x39,0x5A,0x30,0x81,0x98,0x31,0x0B,0x30,0x09,0x06,0x03,0x55,
+0x04,0x06,0x13,0x02,0x55,0x53,0x31,0x16,0x30,0x14,0x06,0x03,0x55,0x04,0x0A,0x13,
+0x0D,0x47,0x65,0x6F,0x54,0x72,0x75,0x73,0x74,0x20,0x49,0x6E,0x63,0x2E,0x31,0x39,
+0x30,0x37,0x06,0x03,0x55,0x04,0x0B,0x13,0x30,0x28,0x63,0x29,0x20,0x32,0x30,0x30,
+0x37,0x20,0x47,0x65,0x6F,0x54,0x72,0x75,0x73,0x74,0x20,0x49,0x6E,0x63,0x2E,0x20,
0x2D,0x20,0x46,0x6F,0x72,0x20,0x61,0x75,0x74,0x68,0x6F,0x72,0x69,0x7A,0x65,0x64,
-0x20,0x75,0x73,0x65,0x20,0x6F,0x6E,0x6C,0x79,0x31,0x1F,0x30,0x1D,0x06,0x03,0x55,
-0x04,0x03,0x13,0x16,0x74,0x68,0x61,0x77,0x74,0x65,0x20,0x50,0x72,0x69,0x6D,0x61,
-0x72,0x79,0x20,0x52,0x6F,0x6F,0x74,0x20,0x43,0x41,0x30,0x1E,0x17,0x0D,0x30,0x36,
-0x31,0x31,0x31,0x37,0x30,0x30,0x30,0x30,0x30,0x30,0x5A,0x17,0x0D,0x33,0x36,0x30,
-0x37,0x31,0x36,0x32,0x33,0x35,0x39,0x35,0x39,0x5A,0x30,0x81,0xA9,0x31,0x0B,0x30,
-0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x55,0x53,0x31,0x15,0x30,0x13,0x06,0x03,
-0x55,0x04,0x0A,0x13,0x0C,0x74,0x68,0x61,0x77,0x74,0x65,0x2C,0x20,0x49,0x6E,0x63,
-0x2E,0x31,0x28,0x30,0x26,0x06,0x03,0x55,0x04,0x0B,0x13,0x1F,0x43,0x65,0x72,0x74,
-0x69,0x66,0x69,0x63,0x61,0x74,0x69,0x6F,0x6E,0x20,0x53,0x65,0x72,0x76,0x69,0x63,
-0x65,0x73,0x20,0x44,0x69,0x76,0x69,0x73,0x69,0x6F,0x6E,0x31,0x38,0x30,0x36,0x06,
-0x03,0x55,0x04,0x0B,0x13,0x2F,0x28,0x63,0x29,0x20,0x32,0x30,0x30,0x36,0x20,0x74,
-0x68,0x61,0x77,0x74,0x65,0x2C,0x20,0x49,0x6E,0x63,0x2E,0x20,0x2D,0x20,0x46,0x6F,
-0x72,0x20,0x61,0x75,0x74,0x68,0x6F,0x72,0x69,0x7A,0x65,0x64,0x20,0x75,0x73,0x65,
-0x20,0x6F,0x6E,0x6C,0x79,0x31,0x1F,0x30,0x1D,0x06,0x03,0x55,0x04,0x03,0x13,0x16,
-0x74,0x68,0x61,0x77,0x74,0x65,0x20,0x50,0x72,0x69,0x6D,0x61,0x72,0x79,0x20,0x52,
-0x6F,0x6F,0x74,0x20,0x43,0x41,0x30,0x82,0x01,0x22,0x30,0x0D,0x06,0x09,0x2A,0x86,
-0x48,0x86,0xF7,0x0D,0x01,0x01,0x01,0x05,0x00,0x03,0x82,0x01,0x0F,0x00,0x30,0x82,
-0x01,0x0A,0x02,0x82,0x01,0x01,0x00,0xAC,0xA0,0xF0,0xFB,0x80,0x59,0xD4,0x9C,0xC7,
-0xA4,0xCF,0x9D,0xA1,0x59,0x73,0x09,0x10,0x45,0x0C,0x0D,0x2C,0x6E,0x68,0xF1,0x6C,
-0x5B,0x48,0x68,0x49,0x59,0x37,0xFC,0x0B,0x33,0x19,0xC2,0x77,0x7F,0xCC,0x10,0x2D,
-0x95,0x34,0x1C,0xE6,0xEB,0x4D,0x09,0xA7,0x1C,0xD2,0xB8,0xC9,0x97,0x36,0x02,0xB7,
-0x89,0xD4,0x24,0x5F,0x06,0xC0,0xCC,0x44,0x94,0x94,0x8D,0x02,0x62,0x6F,0xEB,0x5A,
-0xDD,0x11,0x8D,0x28,0x9A,0x5C,0x84,0x90,0x10,0x7A,0x0D,0xBD,0x74,0x66,0x2F,0x6A,
-0x38,0xA0,0xE2,0xD5,0x54,0x44,0xEB,0x1D,0x07,0x9F,0x07,0xBA,0x6F,0xEE,0xE9,0xFD,
-0x4E,0x0B,0x29,0xF5,0x3E,0x84,0xA0,0x01,0xF1,0x9C,0xAB,0xF8,0x1C,0x7E,0x89,0xA4,
-0xE8,0xA1,0xD8,0x71,0x65,0x0D,0xA3,0x51,0x7B,0xEE,0xBC,0xD2,0x22,0x60,0x0D,0xB9,
-0x5B,0x9D,0xDF,0xBA,0xFC,0x51,0x5B,0x0B,0xAF,0x98,0xB2,0xE9,0x2E,0xE9,0x04,0xE8,
-0x62,0x87,0xDE,0x2B,0xC8,0xD7,0x4E,0xC1,0x4C,0x64,0x1E,0xDD,0xCF,0x87,0x58,0xBA,
-0x4A,0x4F,0xCA,0x68,0x07,0x1D,0x1C,0x9D,0x4A,0xC6,0xD5,0x2F,0x91,0xCC,0x7C,0x71,
-0x72,0x1C,0xC5,0xC0,0x67,0xEB,0x32,0xFD,0xC9,0x92,0x5C,0x94,0xDA,0x85,0xC0,0x9B,
-0xBF,0x53,0x7D,0x2B,0x09,0xF4,0x8C,0x9D,0x91,0x1F,0x97,0x6A,0x52,0xCB,0xDE,0x09,
-0x36,0xA4,0x77,0xD8,0x7B,0x87,0x50,0x44,0xD5,0x3E,0x6E,0x29,0x69,0xFB,0x39,0x49,
-0x26,0x1E,0x09,0xA5,0x80,0x7B,0x40,0x2D,0xEB,0xE8,0x27,0x85,0xC9,0xFE,0x61,0xFD,
-0x7E,0xE6,0x7C,0x97,0x1D,0xD5,0x9D,0x02,0x03,0x01,0x00,0x01,0xA3,0x42,0x30,0x40,
-0x30,0x0F,0x06,0x03,0x55,0x1D,0x13,0x01,0x01,0xFF,0x04,0x05,0x30,0x03,0x01,0x01,
-0xFF,0x30,0x0E,0x06,0x03,0x55,0x1D,0x0F,0x01,0x01,0xFF,0x04,0x04,0x03,0x02,0x01,
-0x06,0x30,0x1D,0x06,0x03,0x55,0x1D,0x0E,0x04,0x16,0x04,0x14,0x7B,0x5B,0x45,0xCF,
-0xAF,0xCE,0xCB,0x7A,0xFD,0x31,0x92,0x1A,0x6A,0xB6,0xF3,0x46,0xEB,0x57,0x48,0x50,
-0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x05,0x05,0x00,0x03,
-0x82,0x01,0x01,0x00,0x79,0x11,0xC0,0x4B,0xB3,0x91,0xB6,0xFC,0xF0,0xE9,0x67,0xD4,
-0x0D,0x6E,0x45,0xBE,0x55,0xE8,0x93,0xD2,0xCE,0x03,0x3F,0xED,0xDA,0x25,0xB0,0x1D,
-0x57,0xCB,0x1E,0x3A,0x76,0xA0,0x4C,0xEC,0x50,0x76,0xE8,0x64,0x72,0x0C,0xA4,0xA9,
-0xF1,0xB8,0x8B,0xD6,0xD6,0x87,0x84,0xBB,0x32,0xE5,0x41,0x11,0xC0,0x77,0xD9,0xB3,
-0x60,0x9D,0xEB,0x1B,0xD5,0xD1,0x6E,0x44,0x44,0xA9,0xA6,0x01,0xEC,0x55,0x62,0x1D,
-0x77,0xB8,0x5C,0x8E,0x48,0x49,0x7C,0x9C,0x3B,0x57,0x11,0xAC,0xAD,0x73,0x37,0x8E,
-0x2F,0x78,0x5C,0x90,0x68,0x47,0xD9,0x60,0x60,0xE6,0xFC,0x07,0x3D,0x22,0x20,0x17,
-0xC4,0xF7,0x16,0xE9,0xC4,0xD8,0x72,0xF9,0xC8,0x73,0x7C,0xDF,0x16,0x2F,0x15,0xA9,
-0x3E,0xFD,0x6A,0x27,0xB6,0xA1,0xEB,0x5A,0xBA,0x98,0x1F,0xD5,0xE3,0x4D,0x64,0x0A,
-0x9D,0x13,0xC8,0x61,0xBA,0xF5,0x39,0x1C,0x87,0xBA,0xB8,0xBD,0x7B,0x22,0x7F,0xF6,
-0xFE,0xAC,0x40,0x79,0xE5,0xAC,0x10,0x6F,0x3D,0x8F,0x1B,0x79,0x76,0x8B,0xC4,0x37,
-0xB3,0x21,0x18,0x84,0xE5,0x36,0x00,0xEB,0x63,0x20,0x99,0xB9,0xE9,0xFE,0x33,0x04,
-0xBB,0x41,0xC8,0xC1,0x02,0xF9,0x44,0x63,0x20,0x9E,0x81,0xCE,0x42,0xD3,0xD6,0x3F,
-0x2C,0x76,0xD3,0x63,0x9C,0x59,0xDD,0x8F,0xA6,0xE1,0x0E,0xA0,0x2E,0x41,0xF7,0x2E,
-0x95,0x47,0xCF,0xBC,0xFD,0x33,0xF3,0xF6,0x0B,0x61,0x7E,0x7E,0x91,0x2B,0x81,0x47,
-0xC2,0x27,0x30,0xEE,0xA7,0x10,0x5D,0x37,0x8F,0x5C,0x39,0x2B,0xE4,0x04,0xF0,0x7B,
-0x8D,0x56,0x8C,0x68,
+0x20,0x75,0x73,0x65,0x20,0x6F,0x6E,0x6C,0x79,0x31,0x36,0x30,0x34,0x06,0x03,0x55,
+0x04,0x03,0x13,0x2D,0x47,0x65,0x6F,0x54,0x72,0x75,0x73,0x74,0x20,0x50,0x72,0x69,
+0x6D,0x61,0x72,0x79,0x20,0x43,0x65,0x72,0x74,0x69,0x66,0x69,0x63,0x61,0x74,0x69,
+0x6F,0x6E,0x20,0x41,0x75,0x74,0x68,0x6F,0x72,0x69,0x74,0x79,0x20,0x2D,0x20,0x47,
+0x32,0x30,0x76,0x30,0x10,0x06,0x07,0x2A,0x86,0x48,0xCE,0x3D,0x02,0x01,0x06,0x05,
+0x2B,0x81,0x04,0x00,0x22,0x03,0x62,0x00,0x04,0x15,0xB1,0xE8,0xFD,0x03,0x15,0x43,
+0xE5,0xAC,0xEB,0x87,0x37,0x11,0x62,0xEF,0xD2,0x83,0x36,0x52,0x7D,0x45,0x57,0x0B,
+0x4A,0x8D,0x7B,0x54,0x3B,0x3A,0x6E,0x5F,0x15,0x02,0xC0,0x50,0xA6,0xCF,0x25,0x2F,
+0x7D,0xCA,0x48,0xB8,0xC7,0x50,0x63,0x1C,0x2A,0x21,0x08,0x7C,0x9A,0x36,0xD8,0x0B,
+0xFE,0xD1,0x26,0xC5,0x58,0x31,0x30,0x28,0x25,0xF3,0x5D,0x5D,0xA3,0xB8,0xB6,0xA5,
+0xB4,0x92,0xED,0x6C,0x2C,0x9F,0xEB,0xDD,0x43,0x89,0xA2,0x3C,0x4B,0x48,0x91,0x1D,
+0x50,0xEC,0x26,0xDF,0xD6,0x60,0x2E,0xBD,0x21,0xA3,0x42,0x30,0x40,0x30,0x0F,0x06,
+0x03,0x55,0x1D,0x13,0x01,0x01,0xFF,0x04,0x05,0x30,0x03,0x01,0x01,0xFF,0x30,0x0E,
+0x06,0x03,0x55,0x1D,0x0F,0x01,0x01,0xFF,0x04,0x04,0x03,0x02,0x01,0x06,0x30,0x1D,
+0x06,0x03,0x55,0x1D,0x0E,0x04,0x16,0x04,0x14,0x15,0x5F,0x35,0x57,0x51,0x55,0xFB,
+0x25,0xB2,0xAD,0x03,0x69,0xFC,0x01,0xA3,0xFA,0xBE,0x11,0x55,0xD5,0x30,0x0A,0x06,
+0x08,0x2A,0x86,0x48,0xCE,0x3D,0x04,0x03,0x03,0x03,0x67,0x00,0x30,0x64,0x02,0x30,
+0x64,0x96,0x59,0xA6,0xE8,0x09,0xDE,0x8B,0xBA,0xFA,0x5A,0x88,0x88,0xF0,0x1F,0x91,
+0xD3,0x46,0xA8,0xF2,0x4A,0x4C,0x02,0x63,0xFB,0x6C,0x5F,0x38,0xDB,0x2E,0x41,0x93,
+0xA9,0x0E,0xE6,0x9D,0xDC,0x31,0x1C,0xB2,0xA0,0xA7,0x18,0x1C,0x79,0xE1,0xC7,0x36,
+0x02,0x30,0x3A,0x56,0xAF,0x9A,0x74,0x6C,0xF6,0xFB,0x83,0xE0,0x33,0xD3,0x08,0x5F,
+0xA1,0x9C,0xC2,0x5B,0x9F,0x46,0xD6,0xB6,0xCB,0x91,0x06,0x63,0xA2,0x06,0xE7,0x33,
+0xAC,0x3E,0xA8,0x81,0x12,0xD0,0xCB,0xBA,0xD0,0x92,0x0B,0xB6,0x9E,0x96,0xAA,0x04,
+0x0F,0x8A,
};
-/* subject:/C=US/O=thawte, Inc./OU=(c) 2007 thawte, Inc. - For authorized use only/CN=thawte Primary Root CA - G2 */
-/* issuer :/C=US/O=thawte, Inc./OU=(c) 2007 thawte, Inc. - For authorized use only/CN=thawte Primary Root CA - G2 */
+/* subject:/C=US/O=GeoTrust Inc./CN=GeoTrust Global CA 2 */
+/* issuer :/C=US/O=GeoTrust Inc./CN=GeoTrust Global CA 2 */
-const unsigned char thawte_Primary_Root_CA___G2_certificate[652]={
-0x30,0x82,0x02,0x88,0x30,0x82,0x02,0x0D,0xA0,0x03,0x02,0x01,0x02,0x02,0x10,0x35,
-0xFC,0x26,0x5C,0xD9,0x84,0x4F,0xC9,0x3D,0x26,0x3D,0x57,0x9B,0xAE,0xD7,0x56,0x30,
-0x0A,0x06,0x08,0x2A,0x86,0x48,0xCE,0x3D,0x04,0x03,0x03,0x30,0x81,0x84,0x31,0x0B,
-0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x55,0x53,0x31,0x15,0x30,0x13,0x06,
-0x03,0x55,0x04,0x0A,0x13,0x0C,0x74,0x68,0x61,0x77,0x74,0x65,0x2C,0x20,0x49,0x6E,
-0x63,0x2E,0x31,0x38,0x30,0x36,0x06,0x03,0x55,0x04,0x0B,0x13,0x2F,0x28,0x63,0x29,
-0x20,0x32,0x30,0x30,0x37,0x20,0x74,0x68,0x61,0x77,0x74,0x65,0x2C,0x20,0x49,0x6E,
-0x63,0x2E,0x20,0x2D,0x20,0x46,0x6F,0x72,0x20,0x61,0x75,0x74,0x68,0x6F,0x72,0x69,
-0x7A,0x65,0x64,0x20,0x75,0x73,0x65,0x20,0x6F,0x6E,0x6C,0x79,0x31,0x24,0x30,0x22,
-0x06,0x03,0x55,0x04,0x03,0x13,0x1B,0x74,0x68,0x61,0x77,0x74,0x65,0x20,0x50,0x72,
-0x69,0x6D,0x61,0x72,0x79,0x20,0x52,0x6F,0x6F,0x74,0x20,0x43,0x41,0x20,0x2D,0x20,
-0x47,0x32,0x30,0x1E,0x17,0x0D,0x30,0x37,0x31,0x31,0x30,0x35,0x30,0x30,0x30,0x30,
-0x30,0x30,0x5A,0x17,0x0D,0x33,0x38,0x30,0x31,0x31,0x38,0x32,0x33,0x35,0x39,0x35,
-0x39,0x5A,0x30,0x81,0x84,0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02,
-0x55,0x53,0x31,0x15,0x30,0x13,0x06,0x03,0x55,0x04,0x0A,0x13,0x0C,0x74,0x68,0x61,
-0x77,0x74,0x65,0x2C,0x20,0x49,0x6E,0x63,0x2E,0x31,0x38,0x30,0x36,0x06,0x03,0x55,
-0x04,0x0B,0x13,0x2F,0x28,0x63,0x29,0x20,0x32,0x30,0x30,0x37,0x20,0x74,0x68,0x61,
-0x77,0x74,0x65,0x2C,0x20,0x49,0x6E,0x63,0x2E,0x20,0x2D,0x20,0x46,0x6F,0x72,0x20,
-0x61,0x75,0x74,0x68,0x6F,0x72,0x69,0x7A,0x65,0x64,0x20,0x75,0x73,0x65,0x20,0x6F,
-0x6E,0x6C,0x79,0x31,0x24,0x30,0x22,0x06,0x03,0x55,0x04,0x03,0x13,0x1B,0x74,0x68,
-0x61,0x77,0x74,0x65,0x20,0x50,0x72,0x69,0x6D,0x61,0x72,0x79,0x20,0x52,0x6F,0x6F,
-0x74,0x20,0x43,0x41,0x20,0x2D,0x20,0x47,0x32,0x30,0x76,0x30,0x10,0x06,0x07,0x2A,
-0x86,0x48,0xCE,0x3D,0x02,0x01,0x06,0x05,0x2B,0x81,0x04,0x00,0x22,0x03,0x62,0x00,
-0x04,0xA2,0xD5,0x9C,0x82,0x7B,0x95,0x9D,0xF1,0x52,0x78,0x87,0xFE,0x8A,0x16,0xBF,
-0x05,0xE6,0xDF,0xA3,0x02,0x4F,0x0D,0x07,0xC6,0x00,0x51,0xBA,0x0C,0x02,0x52,0x2D,
-0x22,0xA4,0x42,0x39,0xC4,0xFE,0x8F,0xEA,0xC9,0xC1,0xBE,0xD4,0x4D,0xFF,0x9F,0x7A,
-0x9E,0xE2,0xB1,0x7C,0x9A,0xAD,0xA7,0x86,0x09,0x73,0x87,0xD1,0xE7,0x9A,0xE3,0x7A,
-0xA5,0xAA,0x6E,0xFB,0xBA,0xB3,0x70,0xC0,0x67,0x88,0xA2,0x35,0xD4,0xA3,0x9A,0xB1,
-0xFD,0xAD,0xC2,0xEF,0x31,0xFA,0xA8,0xB9,0xF3,0xFB,0x08,0xC6,0x91,0xD1,0xFB,0x29,
-0x95,0xA3,0x42,0x30,0x40,0x30,0x0F,0x06,0x03,0x55,0x1D,0x13,0x01,0x01,0xFF,0x04,
-0x05,0x30,0x03,0x01,0x01,0xFF,0x30,0x0E,0x06,0x03,0x55,0x1D,0x0F,0x01,0x01,0xFF,
-0x04,0x04,0x03,0x02,0x01,0x06,0x30,0x1D,0x06,0x03,0x55,0x1D,0x0E,0x04,0x16,0x04,
-0x14,0x9A,0xD8,0x00,0x30,0x00,0xE7,0x6B,0x7F,0x85,0x18,0xEE,0x8B,0xB6,0xCE,0x8A,
-0x0C,0xF8,0x11,0xE1,0xBB,0x30,0x0A,0x06,0x08,0x2A,0x86,0x48,0xCE,0x3D,0x04,0x03,
-0x03,0x03,0x69,0x00,0x30,0x66,0x02,0x31,0x00,0xDD,0xF8,0xE0,0x57,0x47,0x5B,0xA7,
-0xE6,0x0A,0xC3,0xBD,0xF5,0x80,0x8A,0x97,0x35,0x0D,0x1B,0x89,0x3C,0x54,0x86,0x77,
-0x28,0xCA,0xA1,0xF4,0x79,0xDE,0xB5,0xE6,0x38,0xB0,0xF0,0x65,0x70,0x8C,0x7F,0x02,
-0x54,0xC2,0xBF,0xFF,0xD8,0xA1,0x3E,0xD9,0xCF,0x02,0x31,0x00,0xC4,0x8D,0x94,0xFC,
-0xDC,0x53,0xD2,0xDC,0x9D,0x78,0x16,0x1F,0x15,0x33,0x23,0x53,0x52,0xE3,0x5A,0x31,
-0x5D,0x9D,0xCA,0xAE,0xBD,0x13,0x29,0x44,0x0D,0x27,0x5B,0xA8,0xE7,0x68,0x9C,0x12,
-0xF7,0x58,0x3F,0x2E,0x72,0x02,0x57,0xA3,0x8F,0xA1,0x14,0x2E,
+const unsigned char GeoTrust_Global_CA_2_certificate[874]={
+0x30,0x82,0x03,0x66,0x30,0x82,0x02,0x4E,0xA0,0x03,0x02,0x01,0x02,0x02,0x01,0x01,
+0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x05,0x05,0x00,0x30,
+0x44,0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x55,0x53,0x31,0x16,
+0x30,0x14,0x06,0x03,0x55,0x04,0x0A,0x13,0x0D,0x47,0x65,0x6F,0x54,0x72,0x75,0x73,
+0x74,0x20,0x49,0x6E,0x63,0x2E,0x31,0x1D,0x30,0x1B,0x06,0x03,0x55,0x04,0x03,0x13,
+0x14,0x47,0x65,0x6F,0x54,0x72,0x75,0x73,0x74,0x20,0x47,0x6C,0x6F,0x62,0x61,0x6C,
+0x20,0x43,0x41,0x20,0x32,0x30,0x1E,0x17,0x0D,0x30,0x34,0x30,0x33,0x30,0x34,0x30,
+0x35,0x30,0x30,0x30,0x30,0x5A,0x17,0x0D,0x31,0x39,0x30,0x33,0x30,0x34,0x30,0x35,
+0x30,0x30,0x30,0x30,0x5A,0x30,0x44,0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04,0x06,
+0x13,0x02,0x55,0x53,0x31,0x16,0x30,0x14,0x06,0x03,0x55,0x04,0x0A,0x13,0x0D,0x47,
+0x65,0x6F,0x54,0x72,0x75,0x73,0x74,0x20,0x49,0x6E,0x63,0x2E,0x31,0x1D,0x30,0x1B,
+0x06,0x03,0x55,0x04,0x03,0x13,0x14,0x47,0x65,0x6F,0x54,0x72,0x75,0x73,0x74,0x20,
+0x47,0x6C,0x6F,0x62,0x61,0x6C,0x20,0x43,0x41,0x20,0x32,0x30,0x82,0x01,0x22,0x30,
+0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x01,0x05,0x00,0x03,0x82,
+0x01,0x0F,0x00,0x30,0x82,0x01,0x0A,0x02,0x82,0x01,0x01,0x00,0xEF,0x3C,0x4D,0x40,
+0x3D,0x10,0xDF,0x3B,0x53,0x00,0xE1,0x67,0xFE,0x94,0x60,0x15,0x3E,0x85,0x88,0xF1,
+0x89,0x0D,0x90,0xC8,0x28,0x23,0x99,0x05,0xE8,0x2B,0x20,0x9D,0xC6,0xF3,0x60,0x46,
+0xD8,0xC1,0xB2,0xD5,0x8C,0x31,0xD9,0xDC,0x20,0x79,0x24,0x81,0xBF,0x35,0x32,0xFC,
+0x63,0x69,0xDB,0xB1,0x2A,0x6B,0xEE,0x21,0x58,0xF2,0x08,0xE9,0x78,0xCB,0x6F,0xCB,
+0xFC,0x16,0x52,0xC8,0x91,0xC4,0xFF,0x3D,0x73,0xDE,0xB1,0x3E,0xA7,0xC2,0x7D,0x66,
+0xC1,0xF5,0x7E,0x52,0x24,0x1A,0xE2,0xD5,0x67,0x91,0xD0,0x82,0x10,0xD7,0x78,0x4B,
+0x4F,0x2B,0x42,0x39,0xBD,0x64,0x2D,0x40,0xA0,0xB0,0x10,0xD3,0x38,0x48,0x46,0x88,
+0xA1,0x0C,0xBB,0x3A,0x33,0x2A,0x62,0x98,0xFB,0x00,0x9D,0x13,0x59,0x7F,0x6F,0x3B,
+0x72,0xAA,0xEE,0xA6,0x0F,0x86,0xF9,0x05,0x61,0xEA,0x67,0x7F,0x0C,0x37,0x96,0x8B,
+0xE6,0x69,0x16,0x47,0x11,0xC2,0x27,0x59,0x03,0xB3,0xA6,0x60,0xC2,0x21,0x40,0x56,
+0xFA,0xA0,0xC7,0x7D,0x3A,0x13,0xE3,0xEC,0x57,0xC7,0xB3,0xD6,0xAE,0x9D,0x89,0x80,
+0xF7,0x01,0xE7,0x2C,0xF6,0x96,0x2B,0x13,0x0D,0x79,0x2C,0xD9,0xC0,0xE4,0x86,0x7B,
+0x4B,0x8C,0x0C,0x72,0x82,0x8A,0xFB,0x17,0xCD,0x00,0x6C,0x3A,0x13,0x3C,0xB0,0x84,
+0x87,0x4B,0x16,0x7A,0x29,0xB2,0x4F,0xDB,0x1D,0xD4,0x0B,0xF3,0x66,0x37,0xBD,0xD8,
+0xF6,0x57,0xBB,0x5E,0x24,0x7A,0xB8,0x3C,0x8B,0xB9,0xFA,0x92,0x1A,0x1A,0x84,0x9E,
+0xD8,0x74,0x8F,0xAA,0x1B,0x7F,0x5E,0xF4,0xFE,0x45,0x22,0x21,0x02,0x03,0x01,0x00,
+0x01,0xA3,0x63,0x30,0x61,0x30,0x0F,0x06,0x03,0x55,0x1D,0x13,0x01,0x01,0xFF,0x04,
+0x05,0x30,0x03,0x01,0x01,0xFF,0x30,0x1D,0x06,0x03,0x55,0x1D,0x0E,0x04,0x16,0x04,
+0x14,0x71,0x38,0x36,0xF2,0x02,0x31,0x53,0x47,0x2B,0x6E,0xBA,0x65,0x46,0xA9,0x10,
+0x15,0x58,0x20,0x05,0x09,0x30,0x1F,0x06,0x03,0x55,0x1D,0x23,0x04,0x18,0x30,0x16,
+0x80,0x14,0x71,0x38,0x36,0xF2,0x02,0x31,0x53,0x47,0x2B,0x6E,0xBA,0x65,0x46,0xA9,
+0x10,0x15,0x58,0x20,0x05,0x09,0x30,0x0E,0x06,0x03,0x55,0x1D,0x0F,0x01,0x01,0xFF,
+0x04,0x04,0x03,0x02,0x01,0x86,0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,
+0x01,0x01,0x05,0x05,0x00,0x03,0x82,0x01,0x01,0x00,0x03,0xF7,0xB5,0x2B,0xAB,0x5D,
+0x10,0xFC,0x7B,0xB2,0xB2,0x5E,0xAC,0x9B,0x0E,0x7E,0x53,0x78,0x59,0x3E,0x42,0x04,
+0xFE,0x75,0xA3,0xAD,0xAC,0x81,0x4E,0xD7,0x02,0x8B,0x5E,0xC4,0x2D,0xC8,0x52,0x76,
+0xC7,0x2C,0x1F,0xFC,0x81,0x32,0x98,0xD1,0x4B,0xC6,0x92,0x93,0x33,0x35,0x31,0x2F,
+0xFC,0xD8,0x1D,0x44,0xDD,0xE0,0x81,0x7F,0x9D,0xE9,0x8B,0xE1,0x64,0x91,0x62,0x0B,
+0x39,0x08,0x8C,0xAC,0x74,0x9D,0x59,0xD9,0x7A,0x59,0x52,0x97,0x11,0xB9,0x16,0x7B,
+0x6F,0x45,0xD3,0x96,0xD9,0x31,0x7D,0x02,0x36,0x0F,0x9C,0x3B,0x6E,0xCF,0x2C,0x0D,
+0x03,0x46,0x45,0xEB,0xA0,0xF4,0x7F,0x48,0x44,0xC6,0x08,0x40,0xCC,0xDE,0x1B,0x70,
+0xB5,0x29,0xAD,0xBA,0x8B,0x3B,0x34,0x65,0x75,0x1B,0x71,0x21,0x1D,0x2C,0x14,0x0A,
+0xB0,0x96,0x95,0xB8,0xD6,0xEA,0xF2,0x65,0xFB,0x29,0xBA,0x4F,0xEA,0x91,0x93,0x74,
+0x69,0xB6,0xF2,0xFF,0xE1,0x1A,0xD0,0x0C,0xD1,0x76,0x85,0xCB,0x8A,0x25,0xBD,0x97,
+0x5E,0x2C,0x6F,0x15,0x99,0x26,0xE7,0xB6,0x29,0xFF,0x22,0xEC,0xC9,0x02,0xC7,0x56,
+0x00,0xCD,0x49,0xB9,0xB3,0x6C,0x7B,0x53,0x04,0x1A,0xE2,0xA8,0xC9,0xAA,0x12,0x05,
+0x23,0xC2,0xCE,0xE7,0xBB,0x04,0x02,0xCC,0xC0,0x47,0xA2,0xE4,0xC4,0x29,0x2F,0x5B,
+0x45,0x57,0x89,0x51,0xEE,0x3C,0xEB,0x52,0x08,0xFF,0x07,0x35,0x1E,0x9F,0x35,0x6A,
+0x47,0x4A,0x56,0x98,0xD1,0x5A,0x85,0x1F,0x8C,0xF5,0x22,0xBF,0xAB,0xCE,0x83,0xF3,
+0xE2,0x22,0x29,0xAE,0x7D,0x83,0x40,0xA8,0xBA,0x6C,
};
-/* subject:/C=US/O=thawte, Inc./OU=Certification Services Division/OU=(c) 2008 thawte, Inc. - For authorized use only/CN=thawte Primary Root CA - G3 */
-/* issuer :/C=US/O=thawte, Inc./OU=Certification Services Division/OU=(c) 2008 thawte, Inc. - For authorized use only/CN=thawte Primary Root CA - G3 */
-
-
-const unsigned char thawte_Primary_Root_CA___G3_certificate[1070]={
-0x30,0x82,0x04,0x2A,0x30,0x82,0x03,0x12,0xA0,0x03,0x02,0x01,0x02,0x02,0x10,0x60,
-0x01,0x97,0xB7,0x46,0xA7,0xEA,0xB4,0xB4,0x9A,0xD6,0x4B,0x2F,0xF7,0x90,0xFB,0x30,
-0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x0B,0x05,0x00,0x30,0x81,
-0xAE,0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x55,0x53,0x31,0x15,
-0x30,0x13,0x06,0x03,0x55,0x04,0x0A,0x13,0x0C,0x74,0x68,0x61,0x77,0x74,0x65,0x2C,
-0x20,0x49,0x6E,0x63,0x2E,0x31,0x28,0x30,0x26,0x06,0x03,0x55,0x04,0x0B,0x13,0x1F,
-0x43,0x65,0x72,0x74,0x69,0x66,0x69,0x63,0x61,0x74,0x69,0x6F,0x6E,0x20,0x53,0x65,
-0x72,0x76,0x69,0x63,0x65,0x73,0x20,0x44,0x69,0x76,0x69,0x73,0x69,0x6F,0x6E,0x31,
-0x38,0x30,0x36,0x06,0x03,0x55,0x04,0x0B,0x13,0x2F,0x28,0x63,0x29,0x20,0x32,0x30,
-0x30,0x38,0x20,0x74,0x68,0x61,0x77,0x74,0x65,0x2C,0x20,0x49,0x6E,0x63,0x2E,0x20,
-0x2D,0x20,0x46,0x6F,0x72,0x20,0x61,0x75,0x74,0x68,0x6F,0x72,0x69,0x7A,0x65,0x64,
-0x20,0x75,0x73,0x65,0x20,0x6F,0x6E,0x6C,0x79,0x31,0x24,0x30,0x22,0x06,0x03,0x55,
-0x04,0x03,0x13,0x1B,0x74,0x68,0x61,0x77,0x74,0x65,0x20,0x50,0x72,0x69,0x6D,0x61,
-0x72,0x79,0x20,0x52,0x6F,0x6F,0x74,0x20,0x43,0x41,0x20,0x2D,0x20,0x47,0x33,0x30,
-0x1E,0x17,0x0D,0x30,0x38,0x30,0x34,0x30,0x32,0x30,0x30,0x30,0x30,0x30,0x30,0x5A,
-0x17,0x0D,0x33,0x37,0x31,0x32,0x30,0x31,0x32,0x33,0x35,0x39,0x35,0x39,0x5A,0x30,
-0x81,0xAE,0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x55,0x53,0x31,
-0x15,0x30,0x13,0x06,0x03,0x55,0x04,0x0A,0x13,0x0C,0x74,0x68,0x61,0x77,0x74,0x65,
-0x2C,0x20,0x49,0x6E,0x63,0x2E,0x31,0x28,0x30,0x26,0x06,0x03,0x55,0x04,0x0B,0x13,
-0x1F,0x43,0x65,0x72,0x74,0x69,0x66,0x69,0x63,0x61,0x74,0x69,0x6F,0x6E,0x20,0x53,
-0x65,0x72,0x76,0x69,0x63,0x65,0x73,0x20,0x44,0x69,0x76,0x69,0x73,0x69,0x6F,0x6E,
-0x31,0x38,0x30,0x36,0x06,0x03,0x55,0x04,0x0B,0x13,0x2F,0x28,0x63,0x29,0x20,0x32,
-0x30,0x30,0x38,0x20,0x74,0x68,0x61,0x77,0x74,0x65,0x2C,0x20,0x49,0x6E,0x63,0x2E,
-0x20,0x2D,0x20,0x46,0x6F,0x72,0x20,0x61,0x75,0x74,0x68,0x6F,0x72,0x69,0x7A,0x65,
-0x64,0x20,0x75,0x73,0x65,0x20,0x6F,0x6E,0x6C,0x79,0x31,0x24,0x30,0x22,0x06,0x03,
-0x55,0x04,0x03,0x13,0x1B,0x74,0x68,0x61,0x77,0x74,0x65,0x20,0x50,0x72,0x69,0x6D,
-0x61,0x72,0x79,0x20,0x52,0x6F,0x6F,0x74,0x20,0x43,0x41,0x20,0x2D,0x20,0x47,0x33,
-0x30,0x82,0x01,0x22,0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,
-0x01,0x05,0x00,0x03,0x82,0x01,0x0F,0x00,0x30,0x82,0x01,0x0A,0x02,0x82,0x01,0x01,
-0x00,0xB2,0xBF,0x27,0x2C,0xFB,0xDB,0xD8,0x5B,0xDD,0x78,0x7B,0x1B,0x9E,0x77,0x66,
-0x81,0xCB,0x3E,0xBC,0x7C,0xAE,0xF3,0xA6,0x27,0x9A,0x34,0xA3,0x68,0x31,0x71,0x38,
-0x33,0x62,0xE4,0xF3,0x71,0x66,0x79,0xB1,0xA9,0x65,0xA3,0xA5,0x8B,0xD5,0x8F,0x60,
-0x2D,0x3F,0x42,0xCC,0xAA,0x6B,0x32,0xC0,0x23,0xCB,0x2C,0x41,0xDD,0xE4,0xDF,0xFC,
-0x61,0x9C,0xE2,0x73,0xB2,0x22,0x95,0x11,0x43,0x18,0x5F,0xC4,0xB6,0x1F,0x57,0x6C,
-0x0A,0x05,0x58,0x22,0xC8,0x36,0x4C,0x3A,0x7C,0xA5,0xD1,0xCF,0x86,0xAF,0x88,0xA7,
-0x44,0x02,0x13,0x74,0x71,0x73,0x0A,0x42,0x59,0x02,0xF8,0x1B,0x14,0x6B,0x42,0xDF,
-0x6F,0x5F,0xBA,0x6B,0x82,0xA2,0x9D,0x5B,0xE7,0x4A,0xBD,0x1E,0x01,0x72,0xDB,0x4B,
-0x74,0xE8,0x3B,0x7F,0x7F,0x7D,0x1F,0x04,0xB4,0x26,0x9B,0xE0,0xB4,0x5A,0xAC,0x47,
-0x3D,0x55,0xB8,0xD7,0xB0,0x26,0x52,0x28,0x01,0x31,0x40,0x66,0xD8,0xD9,0x24,0xBD,
-0xF6,0x2A,0xD8,0xEC,0x21,0x49,0x5C,0x9B,0xF6,0x7A,0xE9,0x7F,0x55,0x35,0x7E,0x96,
-0x6B,0x8D,0x93,0x93,0x27,0xCB,0x92,0xBB,0xEA,0xAC,0x40,0xC0,0x9F,0xC2,0xF8,0x80,
-0xCF,0x5D,0xF4,0x5A,0xDC,0xCE,0x74,0x86,0xA6,0x3E,0x6C,0x0B,0x53,0xCA,0xBD,0x92,
-0xCE,0x19,0x06,0x72,0xE6,0x0C,0x5C,0x38,0x69,0xC7,0x04,0xD6,0xBC,0x6C,0xCE,0x5B,
-0xF6,0xF7,0x68,0x9C,0xDC,0x25,0x15,0x48,0x88,0xA1,0xE9,0xA9,0xF8,0x98,0x9C,0xE0,
-0xF3,0xD5,0x31,0x28,0x61,0x11,0x6C,0x67,0x96,0x8D,0x39,0x99,0xCB,0xC2,0x45,0x24,
-0x39,0x02,0x03,0x01,0x00,0x01,0xA3,0x42,0x30,0x40,0x30,0x0F,0x06,0x03,0x55,0x1D,
-0x13,0x01,0x01,0xFF,0x04,0x05,0x30,0x03,0x01,0x01,0xFF,0x30,0x0E,0x06,0x03,0x55,
-0x1D,0x0F,0x01,0x01,0xFF,0x04,0x04,0x03,0x02,0x01,0x06,0x30,0x1D,0x06,0x03,0x55,
-0x1D,0x0E,0x04,0x16,0x04,0x14,0xAD,0x6C,0xAA,0x94,0x60,0x9C,0xED,0xE4,0xFF,0xFA,
-0x3E,0x0A,0x74,0x2B,0x63,0x03,0xF7,0xB6,0x59,0xBF,0x30,0x0D,0x06,0x09,0x2A,0x86,
-0x48,0x86,0xF7,0x0D,0x01,0x01,0x0B,0x05,0x00,0x03,0x82,0x01,0x01,0x00,0x1A,0x40,
-0xD8,0x95,0x65,0xAC,0x09,0x92,0x89,0xC6,0x39,0xF4,0x10,0xE5,0xA9,0x0E,0x66,0x53,
-0x5D,0x78,0xDE,0xFA,0x24,0x91,0xBB,0xE7,0x44,0x51,0xDF,0xC6,0x16,0x34,0x0A,0xEF,
-0x6A,0x44,0x51,0xEA,0x2B,0x07,0x8A,0x03,0x7A,0xC3,0xEB,0x3F,0x0A,0x2C,0x52,0x16,
-0xA0,0x2B,0x43,0xB9,0x25,0x90,0x3F,0x70,0xA9,0x33,0x25,0x6D,0x45,0x1A,0x28,0x3B,
-0x27,0xCF,0xAA,0xC3,0x29,0x42,0x1B,0xDF,0x3B,0x4C,0xC0,0x33,0x34,0x5B,0x41,0x88,
-0xBF,0x6B,0x2B,0x65,0xAF,0x28,0xEF,0xB2,0xF5,0xC3,0xAA,0x66,0xCE,0x7B,0x56,0xEE,
-0xB7,0xC8,0xCB,0x67,0xC1,0xC9,0x9C,0x1A,0x18,0xB8,0xC4,0xC3,0x49,0x03,0xF1,0x60,
-0x0E,0x50,0xCD,0x46,0xC5,0xF3,0x77,0x79,0xF7,0xB6,0x15,0xE0,0x38,0xDB,0xC7,0x2F,
-0x28,0xA0,0x0C,0x3F,0x77,0x26,0x74,0xD9,0x25,0x12,0xDA,0x31,0xDA,0x1A,0x1E,0xDC,
-0x29,0x41,0x91,0x22,0x3C,0x69,0xA7,0xBB,0x02,0xF2,0xB6,0x5C,0x27,0x03,0x89,0xF4,
-0x06,0xEA,0x9B,0xE4,0x72,0x82,0xE3,0xA1,0x09,0xC1,0xE9,0x00,0x19,0xD3,0x3E,0xD4,
-0x70,0x6B,0xBA,0x71,0xA6,0xAA,0x58,0xAE,0xF4,0xBB,0xE9,0x6C,0xB6,0xEF,0x87,0xCC,
-0x9B,0xBB,0xFF,0x39,0xE6,0x56,0x61,0xD3,0x0A,0xA7,0xC4,0x5C,0x4C,0x60,0x7B,0x05,
-0x77,0x26,0x7A,0xBF,0xD8,0x07,0x52,0x2C,0x62,0xF7,0x70,0x63,0xD9,0x39,0xBC,0x6F,
-0x1C,0xC2,0x79,0xDC,0x76,0x29,0xAF,0xCE,0xC5,0x2C,0x64,0x04,0x5E,0x88,0x36,0x6E,
-0x31,0xD4,0x40,0x1A,0x62,0x34,0x36,0x3F,0x35,0x01,0xAE,0xAC,0x63,0xA0,
-};
+/* subject:/C=GB/ST=Greater Manchester/L=Salford/O=COMODO CA Limited/CN=COMODO RSA Certification Authority */
+/* issuer :/C=GB/ST=Greater Manchester/L=Salford/O=COMODO CA Limited/CN=COMODO RSA Certification Authority */
-/* subject:/C=ZA/ST=Western Cape/L=Cape Town/O=Thawte Consulting cc/OU=Certification Services Division/CN=Thawte Server CA/emailAddress=server-certs@thawte.com */
-/* issuer :/C=ZA/ST=Western Cape/L=Cape Town/O=Thawte Consulting cc/OU=Certification Services Division/CN=Thawte Server CA/emailAddress=server-certs@thawte.com */
-
-
-const unsigned char Thawte_Server_CA_certificate[791]={
-0x30,0x82,0x03,0x13,0x30,0x82,0x02,0x7C,0xA0,0x03,0x02,0x01,0x02,0x02,0x01,0x01,
-0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x04,0x05,0x00,0x30,
-0x81,0xC4,0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x5A,0x41,0x31,
-0x15,0x30,0x13,0x06,0x03,0x55,0x04,0x08,0x13,0x0C,0x57,0x65,0x73,0x74,0x65,0x72,
-0x6E,0x20,0x43,0x61,0x70,0x65,0x31,0x12,0x30,0x10,0x06,0x03,0x55,0x04,0x07,0x13,
-0x09,0x43,0x61,0x70,0x65,0x20,0x54,0x6F,0x77,0x6E,0x31,0x1D,0x30,0x1B,0x06,0x03,
-0x55,0x04,0x0A,0x13,0x14,0x54,0x68,0x61,0x77,0x74,0x65,0x20,0x43,0x6F,0x6E,0x73,
-0x75,0x6C,0x74,0x69,0x6E,0x67,0x20,0x63,0x63,0x31,0x28,0x30,0x26,0x06,0x03,0x55,
-0x04,0x0B,0x13,0x1F,0x43,0x65,0x72,0x74,0x69,0x66,0x69,0x63,0x61,0x74,0x69,0x6F,
-0x6E,0x20,0x53,0x65,0x72,0x76,0x69,0x63,0x65,0x73,0x20,0x44,0x69,0x76,0x69,0x73,
-0x69,0x6F,0x6E,0x31,0x19,0x30,0x17,0x06,0x03,0x55,0x04,0x03,0x13,0x10,0x54,0x68,
-0x61,0x77,0x74,0x65,0x20,0x53,0x65,0x72,0x76,0x65,0x72,0x20,0x43,0x41,0x31,0x26,
-0x30,0x24,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x09,0x01,0x16,0x17,0x73,
-0x65,0x72,0x76,0x65,0x72,0x2D,0x63,0x65,0x72,0x74,0x73,0x40,0x74,0x68,0x61,0x77,
-0x74,0x65,0x2E,0x63,0x6F,0x6D,0x30,0x1E,0x17,0x0D,0x39,0x36,0x30,0x38,0x30,0x31,
-0x30,0x30,0x30,0x30,0x30,0x30,0x5A,0x17,0x0D,0x32,0x30,0x31,0x32,0x33,0x31,0x32,
-0x33,0x35,0x39,0x35,0x39,0x5A,0x30,0x81,0xC4,0x31,0x0B,0x30,0x09,0x06,0x03,0x55,
-0x04,0x06,0x13,0x02,0x5A,0x41,0x31,0x15,0x30,0x13,0x06,0x03,0x55,0x04,0x08,0x13,
-0x0C,0x57,0x65,0x73,0x74,0x65,0x72,0x6E,0x20,0x43,0x61,0x70,0x65,0x31,0x12,0x30,
-0x10,0x06,0x03,0x55,0x04,0x07,0x13,0x09,0x43,0x61,0x70,0x65,0x20,0x54,0x6F,0x77,
-0x6E,0x31,0x1D,0x30,0x1B,0x06,0x03,0x55,0x04,0x0A,0x13,0x14,0x54,0x68,0x61,0x77,
-0x74,0x65,0x20,0x43,0x6F,0x6E,0x73,0x75,0x6C,0x74,0x69,0x6E,0x67,0x20,0x63,0x63,
-0x31,0x28,0x30,0x26,0x06,0x03,0x55,0x04,0x0B,0x13,0x1F,0x43,0x65,0x72,0x74,0x69,
-0x66,0x69,0x63,0x61,0x74,0x69,0x6F,0x6E,0x20,0x53,0x65,0x72,0x76,0x69,0x63,0x65,
-0x73,0x20,0x44,0x69,0x76,0x69,0x73,0x69,0x6F,0x6E,0x31,0x19,0x30,0x17,0x06,0x03,
-0x55,0x04,0x03,0x13,0x10,0x54,0x68,0x61,0x77,0x74,0x65,0x20,0x53,0x65,0x72,0x76,
-0x65,0x72,0x20,0x43,0x41,0x31,0x26,0x30,0x24,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,
-0x0D,0x01,0x09,0x01,0x16,0x17,0x73,0x65,0x72,0x76,0x65,0x72,0x2D,0x63,0x65,0x72,
-0x74,0x73,0x40,0x74,0x68,0x61,0x77,0x74,0x65,0x2E,0x63,0x6F,0x6D,0x30,0x81,0x9F,
-0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x01,0x05,0x00,0x03,
-0x81,0x8D,0x00,0x30,0x81,0x89,0x02,0x81,0x81,0x00,0xD3,0xA4,0x50,0x6E,0xC8,0xFF,
-0x56,0x6B,0xE6,0xCF,0x5D,0xB6,0xEA,0x0C,0x68,0x75,0x47,0xA2,0xAA,0xC2,0xDA,0x84,
-0x25,0xFC,0xA8,0xF4,0x47,0x51,0xDA,0x85,0xB5,0x20,0x74,0x94,0x86,0x1E,0x0F,0x75,
-0xC9,0xE9,0x08,0x61,0xF5,0x06,0x6D,0x30,0x6E,0x15,0x19,0x02,0xE9,0x52,0xC0,0x62,
-0xDB,0x4D,0x99,0x9E,0xE2,0x6A,0x0C,0x44,0x38,0xCD,0xFE,0xBE,0xE3,0x64,0x09,0x70,
-0xC5,0xFE,0xB1,0x6B,0x29,0xB6,0x2F,0x49,0xC8,0x3B,0xD4,0x27,0x04,0x25,0x10,0x97,
-0x2F,0xE7,0x90,0x6D,0xC0,0x28,0x42,0x99,0xD7,0x4C,0x43,0xDE,0xC3,0xF5,0x21,0x6D,
-0x54,0x9F,0x5D,0xC3,0x58,0xE1,0xC0,0xE4,0xD9,0x5B,0xB0,0xB8,0xDC,0xB4,0x7B,0xDF,
-0x36,0x3A,0xC2,0xB5,0x66,0x22,0x12,0xD6,0x87,0x0D,0x02,0x03,0x01,0x00,0x01,0xA3,
-0x13,0x30,0x11,0x30,0x0F,0x06,0x03,0x55,0x1D,0x13,0x01,0x01,0xFF,0x04,0x05,0x30,
-0x03,0x01,0x01,0xFF,0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,
-0x04,0x05,0x00,0x03,0x81,0x81,0x00,0x07,0xFA,0x4C,0x69,0x5C,0xFB,0x95,0xCC,0x46,
-0xEE,0x85,0x83,0x4D,0x21,0x30,0x8E,0xCA,0xD9,0xA8,0x6F,0x49,0x1A,0xE6,0xDA,0x51,
-0xE3,0x60,0x70,0x6C,0x84,0x61,0x11,0xA1,0x1A,0xC8,0x48,0x3E,0x59,0x43,0x7D,0x4F,
-0x95,0x3D,0xA1,0x8B,0xB7,0x0B,0x62,0x98,0x7A,0x75,0x8A,0xDD,0x88,0x4E,0x4E,0x9E,
-0x40,0xDB,0xA8,0xCC,0x32,0x74,0xB9,0x6F,0x0D,0xC6,0xE3,0xB3,0x44,0x0B,0xD9,0x8A,
-0x6F,0x9A,0x29,0x9B,0x99,0x18,0x28,0x3B,0xD1,0xE3,0x40,0x28,0x9A,0x5A,0x3C,0xD5,
-0xB5,0xE7,0x20,0x1B,0x8B,0xCA,0xA4,0xAB,0x8D,0xE9,0x51,0xD9,0xE2,0x4C,0x2C,0x59,
-0xA9,0xDA,0xB9,0xB2,0x75,0x1B,0xF6,0x42,0xF2,0xEF,0xC7,0xF2,0x18,0xF9,0x89,0xBC,
-0xA3,0xFF,0x8A,0x23,0x2E,0x70,0x47,
+const unsigned char COMODO_RSA_Certification_Authority_certificate[1500]={
+0x30,0x82,0x05,0xD8,0x30,0x82,0x03,0xC0,0xA0,0x03,0x02,0x01,0x02,0x02,0x10,0x4C,
+0xAA,0xF9,0xCA,0xDB,0x63,0x6F,0xE0,0x1F,0xF7,0x4E,0xD8,0x5B,0x03,0x86,0x9D,0x30,
+0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x0C,0x05,0x00,0x30,0x81,
+0x85,0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x47,0x42,0x31,0x1B,
+0x30,0x19,0x06,0x03,0x55,0x04,0x08,0x13,0x12,0x47,0x72,0x65,0x61,0x74,0x65,0x72,
+0x20,0x4D,0x61,0x6E,0x63,0x68,0x65,0x73,0x74,0x65,0x72,0x31,0x10,0x30,0x0E,0x06,
+0x03,0x55,0x04,0x07,0x13,0x07,0x53,0x61,0x6C,0x66,0x6F,0x72,0x64,0x31,0x1A,0x30,
+0x18,0x06,0x03,0x55,0x04,0x0A,0x13,0x11,0x43,0x4F,0x4D,0x4F,0x44,0x4F,0x20,0x43,
+0x41,0x20,0x4C,0x69,0x6D,0x69,0x74,0x65,0x64,0x31,0x2B,0x30,0x29,0x06,0x03,0x55,
+0x04,0x03,0x13,0x22,0x43,0x4F,0x4D,0x4F,0x44,0x4F,0x20,0x52,0x53,0x41,0x20,0x43,
+0x65,0x72,0x74,0x69,0x66,0x69,0x63,0x61,0x74,0x69,0x6F,0x6E,0x20,0x41,0x75,0x74,
+0x68,0x6F,0x72,0x69,0x74,0x79,0x30,0x1E,0x17,0x0D,0x31,0x30,0x30,0x31,0x31,0x39,
+0x30,0x30,0x30,0x30,0x30,0x30,0x5A,0x17,0x0D,0x33,0x38,0x30,0x31,0x31,0x38,0x32,
+0x33,0x35,0x39,0x35,0x39,0x5A,0x30,0x81,0x85,0x31,0x0B,0x30,0x09,0x06,0x03,0x55,
+0x04,0x06,0x13,0x02,0x47,0x42,0x31,0x1B,0x30,0x19,0x06,0x03,0x55,0x04,0x08,0x13,
+0x12,0x47,0x72,0x65,0x61,0x74,0x65,0x72,0x20,0x4D,0x61,0x6E,0x63,0x68,0x65,0x73,
+0x74,0x65,0x72,0x31,0x10,0x30,0x0E,0x06,0x03,0x55,0x04,0x07,0x13,0x07,0x53,0x61,
+0x6C,0x66,0x6F,0x72,0x64,0x31,0x1A,0x30,0x18,0x06,0x03,0x55,0x04,0x0A,0x13,0x11,
+0x43,0x4F,0x4D,0x4F,0x44,0x4F,0x20,0x43,0x41,0x20,0x4C,0x69,0x6D,0x69,0x74,0x65,
+0x64,0x31,0x2B,0x30,0x29,0x06,0x03,0x55,0x04,0x03,0x13,0x22,0x43,0x4F,0x4D,0x4F,
+0x44,0x4F,0x20,0x52,0x53,0x41,0x20,0x43,0x65,0x72,0x74,0x69,0x66,0x69,0x63,0x61,
+0x74,0x69,0x6F,0x6E,0x20,0x41,0x75,0x74,0x68,0x6F,0x72,0x69,0x74,0x79,0x30,0x82,
+0x02,0x22,0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x01,0x05,
+0x00,0x03,0x82,0x02,0x0F,0x00,0x30,0x82,0x02,0x0A,0x02,0x82,0x02,0x01,0x00,0x91,
+0xE8,0x54,0x92,0xD2,0x0A,0x56,0xB1,0xAC,0x0D,0x24,0xDD,0xC5,0xCF,0x44,0x67,0x74,
+0x99,0x2B,0x37,0xA3,0x7D,0x23,0x70,0x00,0x71,0xBC,0x53,0xDF,0xC4,0xFA,0x2A,0x12,
+0x8F,0x4B,0x7F,0x10,0x56,0xBD,0x9F,0x70,0x72,0xB7,0x61,0x7F,0xC9,0x4B,0x0F,0x17,
+0xA7,0x3D,0xE3,0xB0,0x04,0x61,0xEE,0xFF,0x11,0x97,0xC7,0xF4,0x86,0x3E,0x0A,0xFA,
+0x3E,0x5C,0xF9,0x93,0xE6,0x34,0x7A,0xD9,0x14,0x6B,0xE7,0x9C,0xB3,0x85,0xA0,0x82,
+0x7A,0x76,0xAF,0x71,0x90,0xD7,0xEC,0xFD,0x0D,0xFA,0x9C,0x6C,0xFA,0xDF,0xB0,0x82,
+0xF4,0x14,0x7E,0xF9,0xBE,0xC4,0xA6,0x2F,0x4F,0x7F,0x99,0x7F,0xB5,0xFC,0x67,0x43,
+0x72,0xBD,0x0C,0x00,0xD6,0x89,0xEB,0x6B,0x2C,0xD3,0xED,0x8F,0x98,0x1C,0x14,0xAB,
+0x7E,0xE5,0xE3,0x6E,0xFC,0xD8,0xA8,0xE4,0x92,0x24,0xDA,0x43,0x6B,0x62,0xB8,0x55,
+0xFD,0xEA,0xC1,0xBC,0x6C,0xB6,0x8B,0xF3,0x0E,0x8D,0x9A,0xE4,0x9B,0x6C,0x69,0x99,
+0xF8,0x78,0x48,0x30,0x45,0xD5,0xAD,0xE1,0x0D,0x3C,0x45,0x60,0xFC,0x32,0x96,0x51,
+0x27,0xBC,0x67,0xC3,0xCA,0x2E,0xB6,0x6B,0xEA,0x46,0xC7,0xC7,0x20,0xA0,0xB1,0x1F,
+0x65,0xDE,0x48,0x08,0xBA,0xA4,0x4E,0xA9,0xF2,0x83,0x46,0x37,0x84,0xEB,0xE8,0xCC,
+0x81,0x48,0x43,0x67,0x4E,0x72,0x2A,0x9B,0x5C,0xBD,0x4C,0x1B,0x28,0x8A,0x5C,0x22,
+0x7B,0xB4,0xAB,0x98,0xD9,0xEE,0xE0,0x51,0x83,0xC3,0x09,0x46,0x4E,0x6D,0x3E,0x99,
+0xFA,0x95,0x17,0xDA,0x7C,0x33,0x57,0x41,0x3C,0x8D,0x51,0xED,0x0B,0xB6,0x5C,0xAF,
+0x2C,0x63,0x1A,0xDF,0x57,0xC8,0x3F,0xBC,0xE9,0x5D,0xC4,0x9B,0xAF,0x45,0x99,0xE2,
+0xA3,0x5A,0x24,0xB4,0xBA,0xA9,0x56,0x3D,0xCF,0x6F,0xAA,0xFF,0x49,0x58,0xBE,0xF0,
+0xA8,0xFF,0xF4,0xB8,0xAD,0xE9,0x37,0xFB,0xBA,0xB8,0xF4,0x0B,0x3A,0xF9,0xE8,0x43,
+0x42,0x1E,0x89,0xD8,0x84,0xCB,0x13,0xF1,0xD9,0xBB,0xE1,0x89,0x60,0xB8,0x8C,0x28,
+0x56,0xAC,0x14,0x1D,0x9C,0x0A,0xE7,0x71,0xEB,0xCF,0x0E,0xDD,0x3D,0xA9,0x96,0xA1,
+0x48,0xBD,0x3C,0xF7,0xAF,0xB5,0x0D,0x22,0x4C,0xC0,0x11,0x81,0xEC,0x56,0x3B,0xF6,
+0xD3,0xA2,0xE2,0x5B,0xB7,0xB2,0x04,0x22,0x52,0x95,0x80,0x93,0x69,0xE8,0x8E,0x4C,
+0x65,0xF1,0x91,0x03,0x2D,0x70,0x74,0x02,0xEA,0x8B,0x67,0x15,0x29,0x69,0x52,0x02,
+0xBB,0xD7,0xDF,0x50,0x6A,0x55,0x46,0xBF,0xA0,0xA3,0x28,0x61,0x7F,0x70,0xD0,0xC3,
+0xA2,0xAA,0x2C,0x21,0xAA,0x47,0xCE,0x28,0x9C,0x06,0x45,0x76,0xBF,0x82,0x18,0x27,
+0xB4,0xD5,0xAE,0xB4,0xCB,0x50,0xE6,0x6B,0xF4,0x4C,0x86,0x71,0x30,0xE9,0xA6,0xDF,
+0x16,0x86,0xE0,0xD8,0xFF,0x40,0xDD,0xFB,0xD0,0x42,0x88,0x7F,0xA3,0x33,0x3A,0x2E,
+0x5C,0x1E,0x41,0x11,0x81,0x63,0xCE,0x18,0x71,0x6B,0x2B,0xEC,0xA6,0x8A,0xB7,0x31,
+0x5C,0x3A,0x6A,0x47,0xE0,0xC3,0x79,0x59,0xD6,0x20,0x1A,0xAF,0xF2,0x6A,0x98,0xAA,
+0x72,0xBC,0x57,0x4A,0xD2,0x4B,0x9D,0xBB,0x10,0xFC,0xB0,0x4C,0x41,0xE5,0xED,0x1D,
+0x3D,0x5E,0x28,0x9D,0x9C,0xCC,0xBF,0xB3,0x51,0xDA,0xA7,0x47,0xE5,0x84,0x53,0x02,
+0x03,0x01,0x00,0x01,0xA3,0x42,0x30,0x40,0x30,0x1D,0x06,0x03,0x55,0x1D,0x0E,0x04,
+0x16,0x04,0x14,0xBB,0xAF,0x7E,0x02,0x3D,0xFA,0xA6,0xF1,0x3C,0x84,0x8E,0xAD,0xEE,
+0x38,0x98,0xEC,0xD9,0x32,0x32,0xD4,0x30,0x0E,0x06,0x03,0x55,0x1D,0x0F,0x01,0x01,
+0xFF,0x04,0x04,0x03,0x02,0x01,0x06,0x30,0x0F,0x06,0x03,0x55,0x1D,0x13,0x01,0x01,
+0xFF,0x04,0x05,0x30,0x03,0x01,0x01,0xFF,0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,
+0xF7,0x0D,0x01,0x01,0x0C,0x05,0x00,0x03,0x82,0x02,0x01,0x00,0x0A,0xF1,0xD5,0x46,
+0x84,0xB7,0xAE,0x51,0xBB,0x6C,0xB2,0x4D,0x41,0x14,0x00,0x93,0x4C,0x9C,0xCB,0xE5,
+0xC0,0x54,0xCF,0xA0,0x25,0x8E,0x02,0xF9,0xFD,0xB0,0xA2,0x0D,0xF5,0x20,0x98,0x3C,
+0x13,0x2D,0xAC,0x56,0xA2,0xB0,0xD6,0x7E,0x11,0x92,0xE9,0x2E,0xBA,0x9E,0x2E,0x9A,
+0x72,0xB1,0xBD,0x19,0x44,0x6C,0x61,0x35,0xA2,0x9A,0xB4,0x16,0x12,0x69,0x5A,0x8C,
+0xE1,0xD7,0x3E,0xA4,0x1A,0xE8,0x2F,0x03,0xF4,0xAE,0x61,0x1D,0x10,0x1B,0x2A,0xA4,
+0x8B,0x7A,0xC5,0xFE,0x05,0xA6,0xE1,0xC0,0xD6,0xC8,0xFE,0x9E,0xAE,0x8F,0x2B,0xBA,
+0x3D,0x99,0xF8,0xD8,0x73,0x09,0x58,0x46,0x6E,0xA6,0x9C,0xF4,0xD7,0x27,0xD3,0x95,
+0xDA,0x37,0x83,0x72,0x1C,0xD3,0x73,0xE0,0xA2,0x47,0x99,0x03,0x38,0x5D,0xD5,0x49,
+0x79,0x00,0x29,0x1C,0xC7,0xEC,0x9B,0x20,0x1C,0x07,0x24,0x69,0x57,0x78,0xB2,0x39,
+0xFC,0x3A,0x84,0xA0,0xB5,0x9C,0x7C,0x8D,0xBF,0x2E,0x93,0x62,0x27,0xB7,0x39,0xDA,
+0x17,0x18,0xAE,0xBD,0x3C,0x09,0x68,0xFF,0x84,0x9B,0x3C,0xD5,0xD6,0x0B,0x03,0xE3,
+0x57,0x9E,0x14,0xF7,0xD1,0xEB,0x4F,0xC8,0xBD,0x87,0x23,0xB7,0xB6,0x49,0x43,0x79,
+0x85,0x5C,0xBA,0xEB,0x92,0x0B,0xA1,0xC6,0xE8,0x68,0xA8,0x4C,0x16,0xB1,0x1A,0x99,
+0x0A,0xE8,0x53,0x2C,0x92,0xBB,0xA1,0x09,0x18,0x75,0x0C,0x65,0xA8,0x7B,0xCB,0x23,
+0xB7,0x1A,0xC2,0x28,0x85,0xC3,0x1B,0xFF,0xD0,0x2B,0x62,0xEF,0xA4,0x7B,0x09,0x91,
+0x98,0x67,0x8C,0x14,0x01,0xCD,0x68,0x06,0x6A,0x63,0x21,0x75,0x03,0x80,0x88,0x8A,
+0x6E,0x81,0xC6,0x85,0xF2,0xA9,0xA4,0x2D,0xE7,0xF4,0xA5,0x24,0x10,0x47,0x83,0xCA,
+0xCD,0xF4,0x8D,0x79,0x58,0xB1,0x06,0x9B,0xE7,0x1A,0x2A,0xD9,0x9D,0x01,0xD7,0x94,
+0x7D,0xED,0x03,0x4A,0xCA,0xF0,0xDB,0xE8,0xA9,0x01,0x3E,0xF5,0x56,0x99,0xC9,0x1E,
+0x8E,0x49,0x3D,0xBB,0xE5,0x09,0xB9,0xE0,0x4F,0x49,0x92,0x3D,0x16,0x82,0x40,0xCC,
+0xCC,0x59,0xC6,0xE6,0x3A,0xED,0x12,0x2E,0x69,0x3C,0x6C,0x95,0xB1,0xFD,0xAA,0x1D,
+0x7B,0x7F,0x86,0xBE,0x1E,0x0E,0x32,0x46,0xFB,0xFB,0x13,0x8F,0x75,0x7F,0x4C,0x8B,
+0x4B,0x46,0x63,0xFE,0x00,0x34,0x40,0x70,0xC1,0xC3,0xB9,0xA1,0xDD,0xA6,0x70,0xE2,
+0x04,0xB3,0x41,0xBC,0xE9,0x80,0x91,0xEA,0x64,0x9C,0x7A,0xE1,0x22,0x03,0xA9,0x9C,
+0x6E,0x6F,0x0E,0x65,0x4F,0x6C,0x87,0x87,0x5E,0xF3,0x6E,0xA0,0xF9,0x75,0xA5,0x9B,
+0x40,0xE8,0x53,0xB2,0x27,0x9D,0x4A,0xB9,0xC0,0x77,0x21,0x8D,0xFF,0x87,0xF2,0xDE,
+0xBC,0x8C,0xEF,0x17,0xDF,0xB7,0x49,0x0B,0xD1,0xF2,0x6E,0x30,0x0B,0x1A,0x0E,0x4E,
+0x76,0xED,0x11,0xFC,0xF5,0xE9,0x56,0xB2,0x7D,0xBF,0xC7,0x6D,0x0A,0x93,0x8C,0xA5,
+0xD0,0xC0,0xB6,0x1D,0xBE,0x3A,0x4E,0x94,0xA2,0xD7,0x6E,0x6C,0x0B,0xC2,0x8A,0x7C,
+0xFA,0x20,0xF3,0xC4,0xE4,0xE5,0xCD,0x0D,0xA8,0xCB,0x91,0x92,0xB1,0x7C,0x85,0xEC,
+0xB5,0x14,0x69,0x66,0x0E,0x82,0xE7,0xCD,0xCE,0xC8,0x2D,0xA6,0x51,0x7F,0x21,0xC1,
+0x35,0x53,0x85,0x06,0x4A,0x5D,0x9F,0xAD,0xBB,0x1B,0x5F,0x74,
};
@@ -4037,896 +4140,127 @@ const unsigned char UTN_DATACorp_SGC_Root_CA_certificate[1122]={
};
-/* subject:/C=US/ST=UT/L=Salt Lake City/O=The USERTRUST Network/OU=http://www.usertrust.com/CN=UTN-USERFirst-Hardware */
-/* issuer :/C=US/ST=UT/L=Salt Lake City/O=The USERTRUST Network/OU=http://www.usertrust.com/CN=UTN-USERFirst-Hardware */
-
-
-const unsigned char UTN_USERFirst_Hardware_Root_CA_certificate[1144]={
-0x30,0x82,0x04,0x74,0x30,0x82,0x03,0x5C,0xA0,0x03,0x02,0x01,0x02,0x02,0x10,0x44,
-0xBE,0x0C,0x8B,0x50,0x00,0x24,0xB4,0x11,0xD3,0x36,0x2A,0xFE,0x65,0x0A,0xFD,0x30,
-0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x05,0x05,0x00,0x30,0x81,
-0x97,0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x55,0x53,0x31,0x0B,
-0x30,0x09,0x06,0x03,0x55,0x04,0x08,0x13,0x02,0x55,0x54,0x31,0x17,0x30,0x15,0x06,
-0x03,0x55,0x04,0x07,0x13,0x0E,0x53,0x61,0x6C,0x74,0x20,0x4C,0x61,0x6B,0x65,0x20,
-0x43,0x69,0x74,0x79,0x31,0x1E,0x30,0x1C,0x06,0x03,0x55,0x04,0x0A,0x13,0x15,0x54,
-0x68,0x65,0x20,0x55,0x53,0x45,0x52,0x54,0x52,0x55,0x53,0x54,0x20,0x4E,0x65,0x74,
-0x77,0x6F,0x72,0x6B,0x31,0x21,0x30,0x1F,0x06,0x03,0x55,0x04,0x0B,0x13,0x18,0x68,
-0x74,0x74,0x70,0x3A,0x2F,0x2F,0x77,0x77,0x77,0x2E,0x75,0x73,0x65,0x72,0x74,0x72,
-0x75,0x73,0x74,0x2E,0x63,0x6F,0x6D,0x31,0x1F,0x30,0x1D,0x06,0x03,0x55,0x04,0x03,
-0x13,0x16,0x55,0x54,0x4E,0x2D,0x55,0x53,0x45,0x52,0x46,0x69,0x72,0x73,0x74,0x2D,
-0x48,0x61,0x72,0x64,0x77,0x61,0x72,0x65,0x30,0x1E,0x17,0x0D,0x39,0x39,0x30,0x37,
-0x30,0x39,0x31,0x38,0x31,0x30,0x34,0x32,0x5A,0x17,0x0D,0x31,0x39,0x30,0x37,0x30,
-0x39,0x31,0x38,0x31,0x39,0x32,0x32,0x5A,0x30,0x81,0x97,0x31,0x0B,0x30,0x09,0x06,
-0x03,0x55,0x04,0x06,0x13,0x02,0x55,0x53,0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04,
-0x08,0x13,0x02,0x55,0x54,0x31,0x17,0x30,0x15,0x06,0x03,0x55,0x04,0x07,0x13,0x0E,
-0x53,0x61,0x6C,0x74,0x20,0x4C,0x61,0x6B,0x65,0x20,0x43,0x69,0x74,0x79,0x31,0x1E,
-0x30,0x1C,0x06,0x03,0x55,0x04,0x0A,0x13,0x15,0x54,0x68,0x65,0x20,0x55,0x53,0x45,
-0x52,0x54,0x52,0x55,0x53,0x54,0x20,0x4E,0x65,0x74,0x77,0x6F,0x72,0x6B,0x31,0x21,
-0x30,0x1F,0x06,0x03,0x55,0x04,0x0B,0x13,0x18,0x68,0x74,0x74,0x70,0x3A,0x2F,0x2F,
-0x77,0x77,0x77,0x2E,0x75,0x73,0x65,0x72,0x74,0x72,0x75,0x73,0x74,0x2E,0x63,0x6F,
-0x6D,0x31,0x1F,0x30,0x1D,0x06,0x03,0x55,0x04,0x03,0x13,0x16,0x55,0x54,0x4E,0x2D,
-0x55,0x53,0x45,0x52,0x46,0x69,0x72,0x73,0x74,0x2D,0x48,0x61,0x72,0x64,0x77,0x61,
-0x72,0x65,0x30,0x82,0x01,0x22,0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,
-0x01,0x01,0x01,0x05,0x00,0x03,0x82,0x01,0x0F,0x00,0x30,0x82,0x01,0x0A,0x02,0x82,
-0x01,0x01,0x00,0xB1,0xF7,0xC3,0x38,0x3F,0xB4,0xA8,0x7F,0xCF,0x39,0x82,0x51,0x67,
-0xD0,0x6D,0x9F,0xD2,0xFF,0x58,0xF3,0xE7,0x9F,0x2B,0xEC,0x0D,0x89,0x54,0x99,0xB9,
-0x38,0x99,0x16,0xF7,0xE0,0x21,0x79,0x48,0xC2,0xBB,0x61,0x74,0x12,0x96,0x1D,0x3C,
-0x6A,0x72,0xD5,0x3C,0x10,0x67,0x3A,0x39,0xED,0x2B,0x13,0xCD,0x66,0xEB,0x95,0x09,
-0x33,0xA4,0x6C,0x97,0xB1,0xE8,0xC6,0xEC,0xC1,0x75,0x79,0x9C,0x46,0x5E,0x8D,0xAB,
-0xD0,0x6A,0xFD,0xB9,0x2A,0x55,0x17,0x10,0x54,0xB3,0x19,0xF0,0x9A,0xF6,0xF1,0xB1,
-0x5D,0xB6,0xA7,0x6D,0xFB,0xE0,0x71,0x17,0x6B,0xA2,0x88,0xFB,0x00,0xDF,0xFE,0x1A,
-0x31,0x77,0x0C,0x9A,0x01,0x7A,0xB1,0x32,0xE3,0x2B,0x01,0x07,0x38,0x6E,0xC3,0xA5,
-0x5E,0x23,0xBC,0x45,0x9B,0x7B,0x50,0xC1,0xC9,0x30,0x8F,0xDB,0xE5,0x2B,0x7A,0xD3,
-0x5B,0xFB,0x33,0x40,0x1E,0xA0,0xD5,0x98,0x17,0xBC,0x8B,0x87,0xC3,0x89,0xD3,0x5D,
-0xA0,0x8E,0xB2,0xAA,0xAA,0xF6,0x8E,0x69,0x88,0x06,0xC5,0xFA,0x89,0x21,0xF3,0x08,
-0x9D,0x69,0x2E,0x09,0x33,0x9B,0x29,0x0D,0x46,0x0F,0x8C,0xCC,0x49,0x34,0xB0,0x69,
-0x51,0xBD,0xF9,0x06,0xCD,0x68,0xAD,0x66,0x4C,0xBC,0x3E,0xAC,0x61,0xBD,0x0A,0x88,
-0x0E,0xC8,0xDF,0x3D,0xEE,0x7C,0x04,0x4C,0x9D,0x0A,0x5E,0x6B,0x91,0xD6,0xEE,0xC7,
-0xED,0x28,0x8D,0xAB,0x4D,0x87,0x89,0x73,0xD0,0x6E,0xA4,0xD0,0x1E,0x16,0x8B,0x14,
-0xE1,0x76,0x44,0x03,0x7F,0x63,0xAC,0xE4,0xCD,0x49,0x9C,0xC5,0x92,0xF4,0xAB,0x32,
-0xA1,0x48,0x5B,0x02,0x03,0x01,0x00,0x01,0xA3,0x81,0xB9,0x30,0x81,0xB6,0x30,0x0B,
-0x06,0x03,0x55,0x1D,0x0F,0x04,0x04,0x03,0x02,0x01,0xC6,0x30,0x0F,0x06,0x03,0x55,
-0x1D,0x13,0x01,0x01,0xFF,0x04,0x05,0x30,0x03,0x01,0x01,0xFF,0x30,0x1D,0x06,0x03,
-0x55,0x1D,0x0E,0x04,0x16,0x04,0x14,0xA1,0x72,0x5F,0x26,0x1B,0x28,0x98,0x43,0x95,
-0x5D,0x07,0x37,0xD5,0x85,0x96,0x9D,0x4B,0xD2,0xC3,0x45,0x30,0x44,0x06,0x03,0x55,
-0x1D,0x1F,0x04,0x3D,0x30,0x3B,0x30,0x39,0xA0,0x37,0xA0,0x35,0x86,0x33,0x68,0x74,
-0x74,0x70,0x3A,0x2F,0x2F,0x63,0x72,0x6C,0x2E,0x75,0x73,0x65,0x72,0x74,0x72,0x75,
-0x73,0x74,0x2E,0x63,0x6F,0x6D,0x2F,0x55,0x54,0x4E,0x2D,0x55,0x53,0x45,0x52,0x46,
-0x69,0x72,0x73,0x74,0x2D,0x48,0x61,0x72,0x64,0x77,0x61,0x72,0x65,0x2E,0x63,0x72,
-0x6C,0x30,0x31,0x06,0x03,0x55,0x1D,0x25,0x04,0x2A,0x30,0x28,0x06,0x08,0x2B,0x06,
-0x01,0x05,0x05,0x07,0x03,0x01,0x06,0x08,0x2B,0x06,0x01,0x05,0x05,0x07,0x03,0x05,
-0x06,0x08,0x2B,0x06,0x01,0x05,0x05,0x07,0x03,0x06,0x06,0x08,0x2B,0x06,0x01,0x05,
-0x05,0x07,0x03,0x07,0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,
-0x05,0x05,0x00,0x03,0x82,0x01,0x01,0x00,0x47,0x19,0x0F,0xDE,0x74,0xC6,0x99,0x97,
-0xAF,0xFC,0xAD,0x28,0x5E,0x75,0x8E,0xEB,0x2D,0x67,0xEE,0x4E,0x7B,0x2B,0xD7,0x0C,
-0xFF,0xF6,0xDE,0xCB,0x55,0xA2,0x0A,0xE1,0x4C,0x54,0x65,0x93,0x60,0x6B,0x9F,0x12,
-0x9C,0xAD,0x5E,0x83,0x2C,0xEB,0x5A,0xAE,0xC0,0xE4,0x2D,0xF4,0x00,0x63,0x1D,0xB8,
-0xC0,0x6C,0xF2,0xCF,0x49,0xBB,0x4D,0x93,0x6F,0x06,0xA6,0x0A,0x22,0xB2,0x49,0x62,
-0x08,0x4E,0xFF,0xC8,0xC8,0x14,0xB2,0x88,0x16,0x5D,0xE7,0x01,0xE4,0x12,0x95,0xE5,
-0x45,0x34,0xB3,0x8B,0x69,0xBD,0xCF,0xB4,0x85,0x8F,0x75,0x51,0x9E,0x7D,0x3A,0x38,
-0x3A,0x14,0x48,0x12,0xC6,0xFB,0xA7,0x3B,0x1A,0x8D,0x0D,0x82,0x40,0x07,0xE8,0x04,
-0x08,0x90,0xA1,0x89,0xCB,0x19,0x50,0xDF,0xCA,0x1C,0x01,0xBC,0x1D,0x04,0x19,0x7B,
-0x10,0x76,0x97,0x3B,0xEE,0x90,0x90,0xCA,0xC4,0x0E,0x1F,0x16,0x6E,0x75,0xEF,0x33,
-0xF8,0xD3,0x6F,0x5B,0x1E,0x96,0xE3,0xE0,0x74,0x77,0x74,0x7B,0x8A,0xA2,0x6E,0x2D,
-0xDD,0x76,0xD6,0x39,0x30,0x82,0xF0,0xAB,0x9C,0x52,0xF2,0x2A,0xC7,0xAF,0x49,0x5E,
-0x7E,0xC7,0x68,0xE5,0x82,0x81,0xC8,0x6A,0x27,0xF9,0x27,0x88,0x2A,0xD5,0x58,0x50,
-0x95,0x1F,0xF0,0x3B,0x1C,0x57,0xBB,0x7D,0x14,0x39,0x62,0x2B,0x9A,0xC9,0x94,0x92,
-0x2A,0xA3,0x22,0x0C,0xFF,0x89,0x26,0x7D,0x5F,0x23,0x2B,0x47,0xD7,0x15,0x1D,0xA9,
-0x6A,0x9E,0x51,0x0D,0x2A,0x51,0x9E,0x81,0xF9,0xD4,0x3B,0x5E,0x70,0x12,0x7F,0x10,
-0x32,0x9C,0x1E,0xBB,0x9D,0xF8,0x66,0xA8,
-};
-
-
-/* subject:/L=ValiCert Validation Network/O=ValiCert, Inc./OU=ValiCert Class 1 Policy Validation Authority/CN=http://www.valicert.com//emailAddress=info@valicert.com */
-/* issuer :/L=ValiCert Validation Network/O=ValiCert, Inc./OU=ValiCert Class 1 Policy Validation Authority/CN=http://www.valicert.com//emailAddress=info@valicert.com */
-
-
-const unsigned char ValiCert_Class_1_VA_certificate[747]={
-0x30,0x82,0x02,0xE7,0x30,0x82,0x02,0x50,0x02,0x01,0x01,0x30,0x0D,0x06,0x09,0x2A,
-0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x05,0x05,0x00,0x30,0x81,0xBB,0x31,0x24,0x30,
-0x22,0x06,0x03,0x55,0x04,0x07,0x13,0x1B,0x56,0x61,0x6C,0x69,0x43,0x65,0x72,0x74,
-0x20,0x56,0x61,0x6C,0x69,0x64,0x61,0x74,0x69,0x6F,0x6E,0x20,0x4E,0x65,0x74,0x77,
-0x6F,0x72,0x6B,0x31,0x17,0x30,0x15,0x06,0x03,0x55,0x04,0x0A,0x13,0x0E,0x56,0x61,
-0x6C,0x69,0x43,0x65,0x72,0x74,0x2C,0x20,0x49,0x6E,0x63,0x2E,0x31,0x35,0x30,0x33,
-0x06,0x03,0x55,0x04,0x0B,0x13,0x2C,0x56,0x61,0x6C,0x69,0x43,0x65,0x72,0x74,0x20,
-0x43,0x6C,0x61,0x73,0x73,0x20,0x31,0x20,0x50,0x6F,0x6C,0x69,0x63,0x79,0x20,0x56,
-0x61,0x6C,0x69,0x64,0x61,0x74,0x69,0x6F,0x6E,0x20,0x41,0x75,0x74,0x68,0x6F,0x72,
-0x69,0x74,0x79,0x31,0x21,0x30,0x1F,0x06,0x03,0x55,0x04,0x03,0x13,0x18,0x68,0x74,
-0x74,0x70,0x3A,0x2F,0x2F,0x77,0x77,0x77,0x2E,0x76,0x61,0x6C,0x69,0x63,0x65,0x72,
-0x74,0x2E,0x63,0x6F,0x6D,0x2F,0x31,0x20,0x30,0x1E,0x06,0x09,0x2A,0x86,0x48,0x86,
-0xF7,0x0D,0x01,0x09,0x01,0x16,0x11,0x69,0x6E,0x66,0x6F,0x40,0x76,0x61,0x6C,0x69,
-0x63,0x65,0x72,0x74,0x2E,0x63,0x6F,0x6D,0x30,0x1E,0x17,0x0D,0x39,0x39,0x30,0x36,
-0x32,0x35,0x32,0x32,0x32,0x33,0x34,0x38,0x5A,0x17,0x0D,0x31,0x39,0x30,0x36,0x32,
-0x35,0x32,0x32,0x32,0x33,0x34,0x38,0x5A,0x30,0x81,0xBB,0x31,0x24,0x30,0x22,0x06,
-0x03,0x55,0x04,0x07,0x13,0x1B,0x56,0x61,0x6C,0x69,0x43,0x65,0x72,0x74,0x20,0x56,
-0x61,0x6C,0x69,0x64,0x61,0x74,0x69,0x6F,0x6E,0x20,0x4E,0x65,0x74,0x77,0x6F,0x72,
-0x6B,0x31,0x17,0x30,0x15,0x06,0x03,0x55,0x04,0x0A,0x13,0x0E,0x56,0x61,0x6C,0x69,
-0x43,0x65,0x72,0x74,0x2C,0x20,0x49,0x6E,0x63,0x2E,0x31,0x35,0x30,0x33,0x06,0x03,
-0x55,0x04,0x0B,0x13,0x2C,0x56,0x61,0x6C,0x69,0x43,0x65,0x72,0x74,0x20,0x43,0x6C,
-0x61,0x73,0x73,0x20,0x31,0x20,0x50,0x6F,0x6C,0x69,0x63,0x79,0x20,0x56,0x61,0x6C,
-0x69,0x64,0x61,0x74,0x69,0x6F,0x6E,0x20,0x41,0x75,0x74,0x68,0x6F,0x72,0x69,0x74,
-0x79,0x31,0x21,0x30,0x1F,0x06,0x03,0x55,0x04,0x03,0x13,0x18,0x68,0x74,0x74,0x70,
-0x3A,0x2F,0x2F,0x77,0x77,0x77,0x2E,0x76,0x61,0x6C,0x69,0x63,0x65,0x72,0x74,0x2E,
-0x63,0x6F,0x6D,0x2F,0x31,0x20,0x30,0x1E,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,
-0x01,0x09,0x01,0x16,0x11,0x69,0x6E,0x66,0x6F,0x40,0x76,0x61,0x6C,0x69,0x63,0x65,
-0x72,0x74,0x2E,0x63,0x6F,0x6D,0x30,0x81,0x9F,0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,
-0x86,0xF7,0x0D,0x01,0x01,0x01,0x05,0x00,0x03,0x81,0x8D,0x00,0x30,0x81,0x89,0x02,
-0x81,0x81,0x00,0xD8,0x59,0x82,0x7A,0x89,0xB8,0x96,0xBA,0xA6,0x2F,0x68,0x6F,0x58,
-0x2E,0xA7,0x54,0x1C,0x06,0x6E,0xF4,0xEA,0x8D,0x48,0xBC,0x31,0x94,0x17,0xF0,0xF3,
-0x4E,0xBC,0xB2,0xB8,0x35,0x92,0x76,0xB0,0xD0,0xA5,0xA5,0x01,0xD7,0x00,0x03,0x12,
-0x22,0x19,0x08,0xF8,0xFF,0x11,0x23,0x9B,0xCE,0x07,0xF5,0xBF,0x69,0x1A,0x26,0xFE,
-0x4E,0xE9,0xD1,0x7F,0x9D,0x2C,0x40,0x1D,0x59,0x68,0x6E,0xA6,0xF8,0x58,0xB0,0x9D,
-0x1A,0x8F,0xD3,0x3F,0xF1,0xDC,0x19,0x06,0x81,0xA8,0x0E,0xE0,0x3A,0xDD,0xC8,0x53,
-0x45,0x09,0x06,0xE6,0x0F,0x70,0xC3,0xFA,0x40,0xA6,0x0E,0xE2,0x56,0x05,0x0F,0x18,
-0x4D,0xFC,0x20,0x82,0xD1,0x73,0x55,0x74,0x8D,0x76,0x72,0xA0,0x1D,0x9D,0x1D,0xC0,
-0xDD,0x3F,0x71,0x02,0x03,0x01,0x00,0x01,0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,
-0xF7,0x0D,0x01,0x01,0x05,0x05,0x00,0x03,0x81,0x81,0x00,0x50,0x68,0x3D,0x49,0xF4,
-0x2C,0x1C,0x06,0x94,0xDF,0x95,0x60,0x7F,0x96,0x7B,0x17,0xFE,0x4F,0x71,0xAD,0x64,
-0xC8,0xDD,0x77,0xD2,0xEF,0x59,0x55,0xE8,0x3F,0xE8,0x8E,0x05,0x2A,0x21,0xF2,0x07,
-0xD2,0xB5,0xA7,0x52,0xFE,0x9C,0xB1,0xB6,0xE2,0x5B,0x77,0x17,0x40,0xEA,0x72,0xD6,
-0x23,0xCB,0x28,0x81,0x32,0xC3,0x00,0x79,0x18,0xEC,0x59,0x17,0x89,0xC9,0xC6,0x6A,
-0x1E,0x71,0xC9,0xFD,0xB7,0x74,0xA5,0x25,0x45,0x69,0xC5,0x48,0xAB,0x19,0xE1,0x45,
-0x8A,0x25,0x6B,0x19,0xEE,0xE5,0xBB,0x12,0xF5,0x7F,0xF7,0xA6,0x8D,0x51,0xC3,0xF0,
-0x9D,0x74,0xB7,0xA9,0x3E,0xA0,0xA5,0xFF,0xB6,0x49,0x03,0x13,0xDA,0x22,0xCC,0xED,
-0x71,0x82,0x2B,0x99,0xCF,0x3A,0xB7,0xF5,0x2D,0x72,0xC8,
-};
-
-
-/* subject:/L=ValiCert Validation Network/O=ValiCert, Inc./OU=ValiCert Class 2 Policy Validation Authority/CN=http://www.valicert.com//emailAddress=info@valicert.com */
-/* issuer :/L=ValiCert Validation Network/O=ValiCert, Inc./OU=ValiCert Class 2 Policy Validation Authority/CN=http://www.valicert.com//emailAddress=info@valicert.com */
-
-
-const unsigned char ValiCert_Class_2_VA_certificate[747]={
-0x30,0x82,0x02,0xE7,0x30,0x82,0x02,0x50,0x02,0x01,0x01,0x30,0x0D,0x06,0x09,0x2A,
-0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x05,0x05,0x00,0x30,0x81,0xBB,0x31,0x24,0x30,
-0x22,0x06,0x03,0x55,0x04,0x07,0x13,0x1B,0x56,0x61,0x6C,0x69,0x43,0x65,0x72,0x74,
-0x20,0x56,0x61,0x6C,0x69,0x64,0x61,0x74,0x69,0x6F,0x6E,0x20,0x4E,0x65,0x74,0x77,
-0x6F,0x72,0x6B,0x31,0x17,0x30,0x15,0x06,0x03,0x55,0x04,0x0A,0x13,0x0E,0x56,0x61,
-0x6C,0x69,0x43,0x65,0x72,0x74,0x2C,0x20,0x49,0x6E,0x63,0x2E,0x31,0x35,0x30,0x33,
-0x06,0x03,0x55,0x04,0x0B,0x13,0x2C,0x56,0x61,0x6C,0x69,0x43,0x65,0x72,0x74,0x20,
-0x43,0x6C,0x61,0x73,0x73,0x20,0x32,0x20,0x50,0x6F,0x6C,0x69,0x63,0x79,0x20,0x56,
-0x61,0x6C,0x69,0x64,0x61,0x74,0x69,0x6F,0x6E,0x20,0x41,0x75,0x74,0x68,0x6F,0x72,
-0x69,0x74,0x79,0x31,0x21,0x30,0x1F,0x06,0x03,0x55,0x04,0x03,0x13,0x18,0x68,0x74,
-0x74,0x70,0x3A,0x2F,0x2F,0x77,0x77,0x77,0x2E,0x76,0x61,0x6C,0x69,0x63,0x65,0x72,
-0x74,0x2E,0x63,0x6F,0x6D,0x2F,0x31,0x20,0x30,0x1E,0x06,0x09,0x2A,0x86,0x48,0x86,
-0xF7,0x0D,0x01,0x09,0x01,0x16,0x11,0x69,0x6E,0x66,0x6F,0x40,0x76,0x61,0x6C,0x69,
-0x63,0x65,0x72,0x74,0x2E,0x63,0x6F,0x6D,0x30,0x1E,0x17,0x0D,0x39,0x39,0x30,0x36,
-0x32,0x36,0x30,0x30,0x31,0x39,0x35,0x34,0x5A,0x17,0x0D,0x31,0x39,0x30,0x36,0x32,
-0x36,0x30,0x30,0x31,0x39,0x35,0x34,0x5A,0x30,0x81,0xBB,0x31,0x24,0x30,0x22,0x06,
-0x03,0x55,0x04,0x07,0x13,0x1B,0x56,0x61,0x6C,0x69,0x43,0x65,0x72,0x74,0x20,0x56,
-0x61,0x6C,0x69,0x64,0x61,0x74,0x69,0x6F,0x6E,0x20,0x4E,0x65,0x74,0x77,0x6F,0x72,
-0x6B,0x31,0x17,0x30,0x15,0x06,0x03,0x55,0x04,0x0A,0x13,0x0E,0x56,0x61,0x6C,0x69,
-0x43,0x65,0x72,0x74,0x2C,0x20,0x49,0x6E,0x63,0x2E,0x31,0x35,0x30,0x33,0x06,0x03,
-0x55,0x04,0x0B,0x13,0x2C,0x56,0x61,0x6C,0x69,0x43,0x65,0x72,0x74,0x20,0x43,0x6C,
-0x61,0x73,0x73,0x20,0x32,0x20,0x50,0x6F,0x6C,0x69,0x63,0x79,0x20,0x56,0x61,0x6C,
-0x69,0x64,0x61,0x74,0x69,0x6F,0x6E,0x20,0x41,0x75,0x74,0x68,0x6F,0x72,0x69,0x74,
-0x79,0x31,0x21,0x30,0x1F,0x06,0x03,0x55,0x04,0x03,0x13,0x18,0x68,0x74,0x74,0x70,
-0x3A,0x2F,0x2F,0x77,0x77,0x77,0x2E,0x76,0x61,0x6C,0x69,0x63,0x65,0x72,0x74,0x2E,
-0x63,0x6F,0x6D,0x2F,0x31,0x20,0x30,0x1E,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,
-0x01,0x09,0x01,0x16,0x11,0x69,0x6E,0x66,0x6F,0x40,0x76,0x61,0x6C,0x69,0x63,0x65,
-0x72,0x74,0x2E,0x63,0x6F,0x6D,0x30,0x81,0x9F,0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,
-0x86,0xF7,0x0D,0x01,0x01,0x01,0x05,0x00,0x03,0x81,0x8D,0x00,0x30,0x81,0x89,0x02,
-0x81,0x81,0x00,0xCE,0x3A,0x71,0xCA,0xE5,0xAB,0xC8,0x59,0x92,0x55,0xD7,0xAB,0xD8,
-0x74,0x0E,0xF9,0xEE,0xD9,0xF6,0x55,0x47,0x59,0x65,0x47,0x0E,0x05,0x55,0xDC,0xEB,
-0x98,0x36,0x3C,0x5C,0x53,0x5D,0xD3,0x30,0xCF,0x38,0xEC,0xBD,0x41,0x89,0xED,0x25,
-0x42,0x09,0x24,0x6B,0x0A,0x5E,0xB3,0x7C,0xDD,0x52,0x2D,0x4C,0xE6,0xD4,0xD6,0x7D,
-0x5A,0x59,0xA9,0x65,0xD4,0x49,0x13,0x2D,0x24,0x4D,0x1C,0x50,0x6F,0xB5,0xC1,0x85,
-0x54,0x3B,0xFE,0x71,0xE4,0xD3,0x5C,0x42,0xF9,0x80,0xE0,0x91,0x1A,0x0A,0x5B,0x39,
-0x36,0x67,0xF3,0x3F,0x55,0x7C,0x1B,0x3F,0xB4,0x5F,0x64,0x73,0x34,0xE3,0xB4,0x12,
-0xBF,0x87,0x64,0xF8,0xDA,0x12,0xFF,0x37,0x27,0xC1,0xB3,0x43,0xBB,0xEF,0x7B,0x6E,
-0x2E,0x69,0xF7,0x02,0x03,0x01,0x00,0x01,0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,
-0xF7,0x0D,0x01,0x01,0x05,0x05,0x00,0x03,0x81,0x81,0x00,0x3B,0x7F,0x50,0x6F,0x6F,
-0x50,0x94,0x99,0x49,0x62,0x38,0x38,0x1F,0x4B,0xF8,0xA5,0xC8,0x3E,0xA7,0x82,0x81,
-0xF6,0x2B,0xC7,0xE8,0xC5,0xCE,0xE8,0x3A,0x10,0x82,0xCB,0x18,0x00,0x8E,0x4D,0xBD,
-0xA8,0x58,0x7F,0xA1,0x79,0x00,0xB5,0xBB,0xE9,0x8D,0xAF,0x41,0xD9,0x0F,0x34,0xEE,
-0x21,0x81,0x19,0xA0,0x32,0x49,0x28,0xF4,0xC4,0x8E,0x56,0xD5,0x52,0x33,0xFD,0x50,
-0xD5,0x7E,0x99,0x6C,0x03,0xE4,0xC9,0x4C,0xFC,0xCB,0x6C,0xAB,0x66,0xB3,0x4A,0x21,
-0x8C,0xE5,0xB5,0x0C,0x32,0x3E,0x10,0xB2,0xCC,0x6C,0xA1,0xDC,0x9A,0x98,0x4C,0x02,
-0x5B,0xF3,0xCE,0xB9,0x9E,0xA5,0x72,0x0E,0x4A,0xB7,0x3F,0x3C,0xE6,0x16,0x68,0xF8,
-0xBE,0xED,0x74,0x4C,0xBC,0x5B,0xD5,0x62,0x1F,0x43,0xDD,
-};
-
-
-/* subject:/C=US/O=VeriSign, Inc./OU=Class 3 Public Primary Certification Authority */
-/* issuer :/C=US/O=VeriSign, Inc./OU=Class 3 Public Primary Certification Authority */
-
-
-const unsigned char Verisign_Class_3_Public_Primary_Certification_Authority_certificate[576]={
-0x30,0x82,0x02,0x3C,0x30,0x82,0x01,0xA5,0x02,0x10,0x3C,0x91,0x31,0xCB,0x1F,0xF6,
-0xD0,0x1B,0x0E,0x9A,0xB8,0xD0,0x44,0xBF,0x12,0xBE,0x30,0x0D,0x06,0x09,0x2A,0x86,
-0x48,0x86,0xF7,0x0D,0x01,0x01,0x05,0x05,0x00,0x30,0x5F,0x31,0x0B,0x30,0x09,0x06,
-0x03,0x55,0x04,0x06,0x13,0x02,0x55,0x53,0x31,0x17,0x30,0x15,0x06,0x03,0x55,0x04,
-0x0A,0x13,0x0E,0x56,0x65,0x72,0x69,0x53,0x69,0x67,0x6E,0x2C,0x20,0x49,0x6E,0x63,
-0x2E,0x31,0x37,0x30,0x35,0x06,0x03,0x55,0x04,0x0B,0x13,0x2E,0x43,0x6C,0x61,0x73,
-0x73,0x20,0x33,0x20,0x50,0x75,0x62,0x6C,0x69,0x63,0x20,0x50,0x72,0x69,0x6D,0x61,
-0x72,0x79,0x20,0x43,0x65,0x72,0x74,0x69,0x66,0x69,0x63,0x61,0x74,0x69,0x6F,0x6E,
-0x20,0x41,0x75,0x74,0x68,0x6F,0x72,0x69,0x74,0x79,0x30,0x1E,0x17,0x0D,0x39,0x36,
-0x30,0x31,0x32,0x39,0x30,0x30,0x30,0x30,0x30,0x30,0x5A,0x17,0x0D,0x32,0x38,0x30,
-0x38,0x30,0x32,0x32,0x33,0x35,0x39,0x35,0x39,0x5A,0x30,0x5F,0x31,0x0B,0x30,0x09,
-0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x55,0x53,0x31,0x17,0x30,0x15,0x06,0x03,0x55,
-0x04,0x0A,0x13,0x0E,0x56,0x65,0x72,0x69,0x53,0x69,0x67,0x6E,0x2C,0x20,0x49,0x6E,
-0x63,0x2E,0x31,0x37,0x30,0x35,0x06,0x03,0x55,0x04,0x0B,0x13,0x2E,0x43,0x6C,0x61,
-0x73,0x73,0x20,0x33,0x20,0x50,0x75,0x62,0x6C,0x69,0x63,0x20,0x50,0x72,0x69,0x6D,
-0x61,0x72,0x79,0x20,0x43,0x65,0x72,0x74,0x69,0x66,0x69,0x63,0x61,0x74,0x69,0x6F,
-0x6E,0x20,0x41,0x75,0x74,0x68,0x6F,0x72,0x69,0x74,0x79,0x30,0x81,0x9F,0x30,0x0D,
-0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x01,0x05,0x00,0x03,0x81,0x8D,
-0x00,0x30,0x81,0x89,0x02,0x81,0x81,0x00,0xC9,0x5C,0x59,0x9E,0xF2,0x1B,0x8A,0x01,
-0x14,0xB4,0x10,0xDF,0x04,0x40,0xDB,0xE3,0x57,0xAF,0x6A,0x45,0x40,0x8F,0x84,0x0C,
-0x0B,0xD1,0x33,0xD9,0xD9,0x11,0xCF,0xEE,0x02,0x58,0x1F,0x25,0xF7,0x2A,0xA8,0x44,
-0x05,0xAA,0xEC,0x03,0x1F,0x78,0x7F,0x9E,0x93,0xB9,0x9A,0x00,0xAA,0x23,0x7D,0xD6,
-0xAC,0x85,0xA2,0x63,0x45,0xC7,0x72,0x27,0xCC,0xF4,0x4C,0xC6,0x75,0x71,0xD2,0x39,
-0xEF,0x4F,0x42,0xF0,0x75,0xDF,0x0A,0x90,0xC6,0x8E,0x20,0x6F,0x98,0x0F,0xF8,0xAC,
-0x23,0x5F,0x70,0x29,0x36,0xA4,0xC9,0x86,0xE7,0xB1,0x9A,0x20,0xCB,0x53,0xA5,0x85,
-0xE7,0x3D,0xBE,0x7D,0x9A,0xFE,0x24,0x45,0x33,0xDC,0x76,0x15,0xED,0x0F,0xA2,0x71,
-0x64,0x4C,0x65,0x2E,0x81,0x68,0x45,0xA7,0x02,0x03,0x01,0x00,0x01,0x30,0x0D,0x06,
-0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x05,0x05,0x00,0x03,0x81,0x81,0x00,
-0x10,0x72,0x52,0xA9,0x05,0x14,0x19,0x32,0x08,0x41,0xF0,0xC5,0x6B,0x0A,0xCC,0x7E,
-0x0F,0x21,0x19,0xCD,0xE4,0x67,0xDC,0x5F,0xA9,0x1B,0xE6,0xCA,0xE8,0x73,0x9D,0x22,
-0xD8,0x98,0x6E,0x73,0x03,0x61,0x91,0xC5,0x7C,0xB0,0x45,0x40,0x6E,0x44,0x9D,0x8D,
-0xB0,0xB1,0x96,0x74,0x61,0x2D,0x0D,0xA9,0x45,0xD2,0xA4,0x92,0x2A,0xD6,0x9A,0x75,
-0x97,0x6E,0x3F,0x53,0xFD,0x45,0x99,0x60,0x1D,0xA8,0x2B,0x4C,0xF9,0x5E,0xA7,0x09,
-0xD8,0x75,0x30,0xD7,0xD2,0x65,0x60,0x3D,0x67,0xD6,0x48,0x55,0x75,0x69,0x3F,0x91,
-0xF5,0x48,0x0B,0x47,0x69,0x22,0x69,0x82,0x96,0xBE,0xC9,0xC8,0x38,0x86,0x4A,0x7A,
-0x2C,0x73,0x19,0x48,0x69,0x4E,0x6B,0x7C,0x65,0xBF,0x0F,0xFC,0x70,0xCE,0x88,0x90,
-};
-
-
-/* subject:/C=US/O=VeriSign, Inc./OU=Class 3 Public Primary Certification Authority - G2/OU=(c) 1998 VeriSign, Inc. - For authorized use only/OU=VeriSign Trust Network */
-/* issuer :/C=US/O=VeriSign, Inc./OU=Class 3 Public Primary Certification Authority - G2/OU=(c) 1998 VeriSign, Inc. - For authorized use only/OU=VeriSign Trust Network */
-
-
-const unsigned char Verisign_Class_3_Public_Primary_Certification_Authority___G2_certificate[774]={
-0x30,0x82,0x03,0x02,0x30,0x82,0x02,0x6B,0x02,0x10,0x7D,0xD9,0xFE,0x07,0xCF,0xA8,
-0x1E,0xB7,0x10,0x79,0x67,0xFB,0xA7,0x89,0x34,0xC6,0x30,0x0D,0x06,0x09,0x2A,0x86,
-0x48,0x86,0xF7,0x0D,0x01,0x01,0x05,0x05,0x00,0x30,0x81,0xC1,0x31,0x0B,0x30,0x09,
-0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x55,0x53,0x31,0x17,0x30,0x15,0x06,0x03,0x55,
-0x04,0x0A,0x13,0x0E,0x56,0x65,0x72,0x69,0x53,0x69,0x67,0x6E,0x2C,0x20,0x49,0x6E,
-0x63,0x2E,0x31,0x3C,0x30,0x3A,0x06,0x03,0x55,0x04,0x0B,0x13,0x33,0x43,0x6C,0x61,
-0x73,0x73,0x20,0x33,0x20,0x50,0x75,0x62,0x6C,0x69,0x63,0x20,0x50,0x72,0x69,0x6D,
-0x61,0x72,0x79,0x20,0x43,0x65,0x72,0x74,0x69,0x66,0x69,0x63,0x61,0x74,0x69,0x6F,
-0x6E,0x20,0x41,0x75,0x74,0x68,0x6F,0x72,0x69,0x74,0x79,0x20,0x2D,0x20,0x47,0x32,
-0x31,0x3A,0x30,0x38,0x06,0x03,0x55,0x04,0x0B,0x13,0x31,0x28,0x63,0x29,0x20,0x31,
-0x39,0x39,0x38,0x20,0x56,0x65,0x72,0x69,0x53,0x69,0x67,0x6E,0x2C,0x20,0x49,0x6E,
-0x63,0x2E,0x20,0x2D,0x20,0x46,0x6F,0x72,0x20,0x61,0x75,0x74,0x68,0x6F,0x72,0x69,
-0x7A,0x65,0x64,0x20,0x75,0x73,0x65,0x20,0x6F,0x6E,0x6C,0x79,0x31,0x1F,0x30,0x1D,
-0x06,0x03,0x55,0x04,0x0B,0x13,0x16,0x56,0x65,0x72,0x69,0x53,0x69,0x67,0x6E,0x20,
-0x54,0x72,0x75,0x73,0x74,0x20,0x4E,0x65,0x74,0x77,0x6F,0x72,0x6B,0x30,0x1E,0x17,
-0x0D,0x39,0x38,0x30,0x35,0x31,0x38,0x30,0x30,0x30,0x30,0x30,0x30,0x5A,0x17,0x0D,
-0x32,0x38,0x30,0x38,0x30,0x31,0x32,0x33,0x35,0x39,0x35,0x39,0x5A,0x30,0x81,0xC1,
-0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x55,0x53,0x31,0x17,0x30,
-0x15,0x06,0x03,0x55,0x04,0x0A,0x13,0x0E,0x56,0x65,0x72,0x69,0x53,0x69,0x67,0x6E,
-0x2C,0x20,0x49,0x6E,0x63,0x2E,0x31,0x3C,0x30,0x3A,0x06,0x03,0x55,0x04,0x0B,0x13,
-0x33,0x43,0x6C,0x61,0x73,0x73,0x20,0x33,0x20,0x50,0x75,0x62,0x6C,0x69,0x63,0x20,
-0x50,0x72,0x69,0x6D,0x61,0x72,0x79,0x20,0x43,0x65,0x72,0x74,0x69,0x66,0x69,0x63,
-0x61,0x74,0x69,0x6F,0x6E,0x20,0x41,0x75,0x74,0x68,0x6F,0x72,0x69,0x74,0x79,0x20,
-0x2D,0x20,0x47,0x32,0x31,0x3A,0x30,0x38,0x06,0x03,0x55,0x04,0x0B,0x13,0x31,0x28,
-0x63,0x29,0x20,0x31,0x39,0x39,0x38,0x20,0x56,0x65,0x72,0x69,0x53,0x69,0x67,0x6E,
-0x2C,0x20,0x49,0x6E,0x63,0x2E,0x20,0x2D,0x20,0x46,0x6F,0x72,0x20,0x61,0x75,0x74,
-0x68,0x6F,0x72,0x69,0x7A,0x65,0x64,0x20,0x75,0x73,0x65,0x20,0x6F,0x6E,0x6C,0x79,
-0x31,0x1F,0x30,0x1D,0x06,0x03,0x55,0x04,0x0B,0x13,0x16,0x56,0x65,0x72,0x69,0x53,
-0x69,0x67,0x6E,0x20,0x54,0x72,0x75,0x73,0x74,0x20,0x4E,0x65,0x74,0x77,0x6F,0x72,
-0x6B,0x30,0x81,0x9F,0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,
-0x01,0x05,0x00,0x03,0x81,0x8D,0x00,0x30,0x81,0x89,0x02,0x81,0x81,0x00,0xCC,0x5E,
-0xD1,0x11,0x5D,0x5C,0x69,0xD0,0xAB,0xD3,0xB9,0x6A,0x4C,0x99,0x1F,0x59,0x98,0x30,
-0x8E,0x16,0x85,0x20,0x46,0x6D,0x47,0x3F,0xD4,0x85,0x20,0x84,0xE1,0x6D,0xB3,0xF8,
-0xA4,0xED,0x0C,0xF1,0x17,0x0F,0x3B,0xF9,0xA7,0xF9,0x25,0xD7,0xC1,0xCF,0x84,0x63,
-0xF2,0x7C,0x63,0xCF,0xA2,0x47,0xF2,0xC6,0x5B,0x33,0x8E,0x64,0x40,0x04,0x68,0xC1,
-0x80,0xB9,0x64,0x1C,0x45,0x77,0xC7,0xD8,0x6E,0xF5,0x95,0x29,0x3C,0x50,0xE8,0x34,
-0xD7,0x78,0x1F,0xA8,0xBA,0x6D,0x43,0x91,0x95,0x8F,0x45,0x57,0x5E,0x7E,0xC5,0xFB,
-0xCA,0xA4,0x04,0xEB,0xEA,0x97,0x37,0x54,0x30,0x6F,0xBB,0x01,0x47,0x32,0x33,0xCD,
-0xDC,0x57,0x9B,0x64,0x69,0x61,0xF8,0x9B,0x1D,0x1C,0x89,0x4F,0x5C,0x67,0x02,0x03,
-0x01,0x00,0x01,0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x05,
-0x05,0x00,0x03,0x81,0x81,0x00,0x51,0x4D,0xCD,0xBE,0x5C,0xCB,0x98,0x19,0x9C,0x15,
-0xB2,0x01,0x39,0x78,0x2E,0x4D,0x0F,0x67,0x70,0x70,0x99,0xC6,0x10,0x5A,0x94,0xA4,
-0x53,0x4D,0x54,0x6D,0x2B,0xAF,0x0D,0x5D,0x40,0x8B,0x64,0xD3,0xD7,0xEE,0xDE,0x56,
-0x61,0x92,0x5F,0xA6,0xC4,0x1D,0x10,0x61,0x36,0xD3,0x2C,0x27,0x3C,0xE8,0x29,0x09,
-0xB9,0x11,0x64,0x74,0xCC,0xB5,0x73,0x9F,0x1C,0x48,0xA9,0xBC,0x61,0x01,0xEE,0xE2,
-0x17,0xA6,0x0C,0xE3,0x40,0x08,0x3B,0x0E,0xE7,0xEB,0x44,0x73,0x2A,0x9A,0xF1,0x69,
-0x92,0xEF,0x71,0x14,0xC3,0x39,0xAC,0x71,0xA7,0x91,0x09,0x6F,0xE4,0x71,0x06,0xB3,
-0xBA,0x59,0x57,0x26,0x79,0x00,0xF6,0xF8,0x0D,0xA2,0x33,0x30,0x28,0xD4,0xAA,0x58,
-0xA0,0x9D,0x9D,0x69,0x91,0xFD,
-};
-
-
-/* subject:/C=US/O=VeriSign, Inc./OU=VeriSign Trust Network/OU=(c) 1999 VeriSign, Inc. - For authorized use only/CN=VeriSign Class 3 Public Primary Certification Authority - G3 */
-/* issuer :/C=US/O=VeriSign, Inc./OU=VeriSign Trust Network/OU=(c) 1999 VeriSign, Inc. - For authorized use only/CN=VeriSign Class 3 Public Primary Certification Authority - G3 */
-
-
-const unsigned char Verisign_Class_3_Public_Primary_Certification_Authority___G3_certificate[1054]={
-0x30,0x82,0x04,0x1A,0x30,0x82,0x03,0x02,0x02,0x11,0x00,0x9B,0x7E,0x06,0x49,0xA3,
-0x3E,0x62,0xB9,0xD5,0xEE,0x90,0x48,0x71,0x29,0xEF,0x57,0x30,0x0D,0x06,0x09,0x2A,
-0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x05,0x05,0x00,0x30,0x81,0xCA,0x31,0x0B,0x30,
-0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x55,0x53,0x31,0x17,0x30,0x15,0x06,0x03,
-0x55,0x04,0x0A,0x13,0x0E,0x56,0x65,0x72,0x69,0x53,0x69,0x67,0x6E,0x2C,0x20,0x49,
-0x6E,0x63,0x2E,0x31,0x1F,0x30,0x1D,0x06,0x03,0x55,0x04,0x0B,0x13,0x16,0x56,0x65,
-0x72,0x69,0x53,0x69,0x67,0x6E,0x20,0x54,0x72,0x75,0x73,0x74,0x20,0x4E,0x65,0x74,
-0x77,0x6F,0x72,0x6B,0x31,0x3A,0x30,0x38,0x06,0x03,0x55,0x04,0x0B,0x13,0x31,0x28,
-0x63,0x29,0x20,0x31,0x39,0x39,0x39,0x20,0x56,0x65,0x72,0x69,0x53,0x69,0x67,0x6E,
-0x2C,0x20,0x49,0x6E,0x63,0x2E,0x20,0x2D,0x20,0x46,0x6F,0x72,0x20,0x61,0x75,0x74,
-0x68,0x6F,0x72,0x69,0x7A,0x65,0x64,0x20,0x75,0x73,0x65,0x20,0x6F,0x6E,0x6C,0x79,
-0x31,0x45,0x30,0x43,0x06,0x03,0x55,0x04,0x03,0x13,0x3C,0x56,0x65,0x72,0x69,0x53,
-0x69,0x67,0x6E,0x20,0x43,0x6C,0x61,0x73,0x73,0x20,0x33,0x20,0x50,0x75,0x62,0x6C,
-0x69,0x63,0x20,0x50,0x72,0x69,0x6D,0x61,0x72,0x79,0x20,0x43,0x65,0x72,0x74,0x69,
-0x66,0x69,0x63,0x61,0x74,0x69,0x6F,0x6E,0x20,0x41,0x75,0x74,0x68,0x6F,0x72,0x69,
-0x74,0x79,0x20,0x2D,0x20,0x47,0x33,0x30,0x1E,0x17,0x0D,0x39,0x39,0x31,0x30,0x30,
-0x31,0x30,0x30,0x30,0x30,0x30,0x30,0x5A,0x17,0x0D,0x33,0x36,0x30,0x37,0x31,0x36,
-0x32,0x33,0x35,0x39,0x35,0x39,0x5A,0x30,0x81,0xCA,0x31,0x0B,0x30,0x09,0x06,0x03,
-0x55,0x04,0x06,0x13,0x02,0x55,0x53,0x31,0x17,0x30,0x15,0x06,0x03,0x55,0x04,0x0A,
-0x13,0x0E,0x56,0x65,0x72,0x69,0x53,0x69,0x67,0x6E,0x2C,0x20,0x49,0x6E,0x63,0x2E,
-0x31,0x1F,0x30,0x1D,0x06,0x03,0x55,0x04,0x0B,0x13,0x16,0x56,0x65,0x72,0x69,0x53,
-0x69,0x67,0x6E,0x20,0x54,0x72,0x75,0x73,0x74,0x20,0x4E,0x65,0x74,0x77,0x6F,0x72,
-0x6B,0x31,0x3A,0x30,0x38,0x06,0x03,0x55,0x04,0x0B,0x13,0x31,0x28,0x63,0x29,0x20,
-0x31,0x39,0x39,0x39,0x20,0x56,0x65,0x72,0x69,0x53,0x69,0x67,0x6E,0x2C,0x20,0x49,
-0x6E,0x63,0x2E,0x20,0x2D,0x20,0x46,0x6F,0x72,0x20,0x61,0x75,0x74,0x68,0x6F,0x72,
-0x69,0x7A,0x65,0x64,0x20,0x75,0x73,0x65,0x20,0x6F,0x6E,0x6C,0x79,0x31,0x45,0x30,
-0x43,0x06,0x03,0x55,0x04,0x03,0x13,0x3C,0x56,0x65,0x72,0x69,0x53,0x69,0x67,0x6E,
-0x20,0x43,0x6C,0x61,0x73,0x73,0x20,0x33,0x20,0x50,0x75,0x62,0x6C,0x69,0x63,0x20,
-0x50,0x72,0x69,0x6D,0x61,0x72,0x79,0x20,0x43,0x65,0x72,0x74,0x69,0x66,0x69,0x63,
-0x61,0x74,0x69,0x6F,0x6E,0x20,0x41,0x75,0x74,0x68,0x6F,0x72,0x69,0x74,0x79,0x20,
-0x2D,0x20,0x47,0x33,0x30,0x82,0x01,0x22,0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,
-0xF7,0x0D,0x01,0x01,0x01,0x05,0x00,0x03,0x82,0x01,0x0F,0x00,0x30,0x82,0x01,0x0A,
-0x02,0x82,0x01,0x01,0x00,0xCB,0xBA,0x9C,0x52,0xFC,0x78,0x1F,0x1A,0x1E,0x6F,0x1B,
-0x37,0x73,0xBD,0xF8,0xC9,0x6B,0x94,0x12,0x30,0x4F,0xF0,0x36,0x47,0xF5,0xD0,0x91,
-0x0A,0xF5,0x17,0xC8,0xA5,0x61,0xC1,0x16,0x40,0x4D,0xFB,0x8A,0x61,0x90,0xE5,0x76,
-0x20,0xC1,0x11,0x06,0x7D,0xAB,0x2C,0x6E,0xA6,0xF5,0x11,0x41,0x8E,0xFA,0x2D,0xAD,
-0x2A,0x61,0x59,0xA4,0x67,0x26,0x4C,0xD0,0xE8,0xBC,0x52,0x5B,0x70,0x20,0x04,0x58,
-0xD1,0x7A,0xC9,0xA4,0x69,0xBC,0x83,0x17,0x64,0xAD,0x05,0x8B,0xBC,0xD0,0x58,0xCE,
-0x8D,0x8C,0xF5,0xEB,0xF0,0x42,0x49,0x0B,0x9D,0x97,0x27,0x67,0x32,0x6E,0xE1,0xAE,
-0x93,0x15,0x1C,0x70,0xBC,0x20,0x4D,0x2F,0x18,0xDE,0x92,0x88,0xE8,0x6C,0x85,0x57,
-0x11,0x1A,0xE9,0x7E,0xE3,0x26,0x11,0x54,0xA2,0x45,0x96,0x55,0x83,0xCA,0x30,0x89,
-0xE8,0xDC,0xD8,0xA3,0xED,0x2A,0x80,0x3F,0x7F,0x79,0x65,0x57,0x3E,0x15,0x20,0x66,
-0x08,0x2F,0x95,0x93,0xBF,0xAA,0x47,0x2F,0xA8,0x46,0x97,0xF0,0x12,0xE2,0xFE,0xC2,
-0x0A,0x2B,0x51,0xE6,0x76,0xE6,0xB7,0x46,0xB7,0xE2,0x0D,0xA6,0xCC,0xA8,0xC3,0x4C,
-0x59,0x55,0x89,0xE6,0xE8,0x53,0x5C,0x1C,0xEA,0x9D,0xF0,0x62,0x16,0x0B,0xA7,0xC9,
-0x5F,0x0C,0xF0,0xDE,0xC2,0x76,0xCE,0xAF,0xF7,0x6A,0xF2,0xFA,0x41,0xA6,0xA2,0x33,
-0x14,0xC9,0xE5,0x7A,0x63,0xD3,0x9E,0x62,0x37,0xD5,0x85,0x65,0x9E,0x0E,0xE6,0x53,
-0x24,0x74,0x1B,0x5E,0x1D,0x12,0x53,0x5B,0xC7,0x2C,0xE7,0x83,0x49,0x3B,0x15,0xAE,
-0x8A,0x68,0xB9,0x57,0x97,0x02,0x03,0x01,0x00,0x01,0x30,0x0D,0x06,0x09,0x2A,0x86,
-0x48,0x86,0xF7,0x0D,0x01,0x01,0x05,0x05,0x00,0x03,0x82,0x01,0x01,0x00,0x11,0x14,
-0x96,0xC1,0xAB,0x92,0x08,0xF7,0x3F,0x2F,0xC9,0xB2,0xFE,0xE4,0x5A,0x9F,0x64,0xDE,
-0xDB,0x21,0x4F,0x86,0x99,0x34,0x76,0x36,0x57,0xDD,0xD0,0x15,0x2F,0xC5,0xAD,0x7F,
-0x15,0x1F,0x37,0x62,0x73,0x3E,0xD4,0xE7,0x5F,0xCE,0x17,0x03,0xDB,0x35,0xFA,0x2B,
-0xDB,0xAE,0x60,0x09,0x5F,0x1E,0x5F,0x8F,0x6E,0xBB,0x0B,0x3D,0xEA,0x5A,0x13,0x1E,
-0x0C,0x60,0x6F,0xB5,0xC0,0xB5,0x23,0x22,0x2E,0x07,0x0B,0xCB,0xA9,0x74,0xCB,0x47,
-0xBB,0x1D,0xC1,0xD7,0xA5,0x6B,0xCC,0x2F,0xD2,0x42,0xFD,0x49,0xDD,0xA7,0x89,0xCF,
-0x53,0xBA,0xDA,0x00,0x5A,0x28,0xBF,0x82,0xDF,0xF8,0xBA,0x13,0x1D,0x50,0x86,0x82,
-0xFD,0x8E,0x30,0x8F,0x29,0x46,0xB0,0x1E,0x3D,0x35,0xDA,0x38,0x62,0x16,0x18,0x4A,
-0xAD,0xE6,0xB6,0x51,0x6C,0xDE,0xAF,0x62,0xEB,0x01,0xD0,0x1E,0x24,0xFE,0x7A,0x8F,
-0x12,0x1A,0x12,0x68,0xB8,0xFB,0x66,0x99,0x14,0x14,0x45,0x5C,0xAE,0xE7,0xAE,0x69,
-0x17,0x81,0x2B,0x5A,0x37,0xC9,0x5E,0x2A,0xF4,0xC6,0xE2,0xA1,0x5C,0x54,0x9B,0xA6,
-0x54,0x00,0xCF,0xF0,0xF1,0xC1,0xC7,0x98,0x30,0x1A,0x3B,0x36,0x16,0xDB,0xA3,0x6E,
-0xEA,0xFD,0xAD,0xB2,0xC2,0xDA,0xEF,0x02,0x47,0x13,0x8A,0xC0,0xF1,0xB3,0x31,0xAD,
-0x4F,0x1C,0xE1,0x4F,0x9C,0xAF,0x0F,0x0C,0x9D,0xF7,0x78,0x0D,0xD8,0xF4,0x35,0x56,
-0x80,0xDA,0xB7,0x6D,0x17,0x8F,0x9D,0x1E,0x81,0x64,0xE1,0xFE,0xC5,0x45,0xBA,0xAD,
-0x6B,0xB9,0x0A,0x7A,0x4E,0x4F,0x4B,0x84,0xEE,0x4B,0xF1,0x7D,0xDD,0x11,
-};
-
-
-/* subject:/C=US/O=VeriSign, Inc./OU=VeriSign Trust Network/OU=(c) 2007 VeriSign, Inc. - For authorized use only/CN=VeriSign Class 3 Public Primary Certification Authority - G4 */
-/* issuer :/C=US/O=VeriSign, Inc./OU=VeriSign Trust Network/OU=(c) 2007 VeriSign, Inc. - For authorized use only/CN=VeriSign Class 3 Public Primary Certification Authority - G4 */
-
-
-const unsigned char VeriSign_Class_3_Public_Primary_Certification_Authority___G4_certificate[904]={
-0x30,0x82,0x03,0x84,0x30,0x82,0x03,0x0A,0xA0,0x03,0x02,0x01,0x02,0x02,0x10,0x2F,
-0x80,0xFE,0x23,0x8C,0x0E,0x22,0x0F,0x48,0x67,0x12,0x28,0x91,0x87,0xAC,0xB3,0x30,
-0x0A,0x06,0x08,0x2A,0x86,0x48,0xCE,0x3D,0x04,0x03,0x03,0x30,0x81,0xCA,0x31,0x0B,
-0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x55,0x53,0x31,0x17,0x30,0x15,0x06,
-0x03,0x55,0x04,0x0A,0x13,0x0E,0x56,0x65,0x72,0x69,0x53,0x69,0x67,0x6E,0x2C,0x20,
-0x49,0x6E,0x63,0x2E,0x31,0x1F,0x30,0x1D,0x06,0x03,0x55,0x04,0x0B,0x13,0x16,0x56,
-0x65,0x72,0x69,0x53,0x69,0x67,0x6E,0x20,0x54,0x72,0x75,0x73,0x74,0x20,0x4E,0x65,
-0x74,0x77,0x6F,0x72,0x6B,0x31,0x3A,0x30,0x38,0x06,0x03,0x55,0x04,0x0B,0x13,0x31,
-0x28,0x63,0x29,0x20,0x32,0x30,0x30,0x37,0x20,0x56,0x65,0x72,0x69,0x53,0x69,0x67,
-0x6E,0x2C,0x20,0x49,0x6E,0x63,0x2E,0x20,0x2D,0x20,0x46,0x6F,0x72,0x20,0x61,0x75,
-0x74,0x68,0x6F,0x72,0x69,0x7A,0x65,0x64,0x20,0x75,0x73,0x65,0x20,0x6F,0x6E,0x6C,
-0x79,0x31,0x45,0x30,0x43,0x06,0x03,0x55,0x04,0x03,0x13,0x3C,0x56,0x65,0x72,0x69,
-0x53,0x69,0x67,0x6E,0x20,0x43,0x6C,0x61,0x73,0x73,0x20,0x33,0x20,0x50,0x75,0x62,
-0x6C,0x69,0x63,0x20,0x50,0x72,0x69,0x6D,0x61,0x72,0x79,0x20,0x43,0x65,0x72,0x74,
-0x69,0x66,0x69,0x63,0x61,0x74,0x69,0x6F,0x6E,0x20,0x41,0x75,0x74,0x68,0x6F,0x72,
-0x69,0x74,0x79,0x20,0x2D,0x20,0x47,0x34,0x30,0x1E,0x17,0x0D,0x30,0x37,0x31,0x31,
-0x30,0x35,0x30,0x30,0x30,0x30,0x30,0x30,0x5A,0x17,0x0D,0x33,0x38,0x30,0x31,0x31,
-0x38,0x32,0x33,0x35,0x39,0x35,0x39,0x5A,0x30,0x81,0xCA,0x31,0x0B,0x30,0x09,0x06,
-0x03,0x55,0x04,0x06,0x13,0x02,0x55,0x53,0x31,0x17,0x30,0x15,0x06,0x03,0x55,0x04,
-0x0A,0x13,0x0E,0x56,0x65,0x72,0x69,0x53,0x69,0x67,0x6E,0x2C,0x20,0x49,0x6E,0x63,
-0x2E,0x31,0x1F,0x30,0x1D,0x06,0x03,0x55,0x04,0x0B,0x13,0x16,0x56,0x65,0x72,0x69,
-0x53,0x69,0x67,0x6E,0x20,0x54,0x72,0x75,0x73,0x74,0x20,0x4E,0x65,0x74,0x77,0x6F,
-0x72,0x6B,0x31,0x3A,0x30,0x38,0x06,0x03,0x55,0x04,0x0B,0x13,0x31,0x28,0x63,0x29,
-0x20,0x32,0x30,0x30,0x37,0x20,0x56,0x65,0x72,0x69,0x53,0x69,0x67,0x6E,0x2C,0x20,
-0x49,0x6E,0x63,0x2E,0x20,0x2D,0x20,0x46,0x6F,0x72,0x20,0x61,0x75,0x74,0x68,0x6F,
-0x72,0x69,0x7A,0x65,0x64,0x20,0x75,0x73,0x65,0x20,0x6F,0x6E,0x6C,0x79,0x31,0x45,
-0x30,0x43,0x06,0x03,0x55,0x04,0x03,0x13,0x3C,0x56,0x65,0x72,0x69,0x53,0x69,0x67,
-0x6E,0x20,0x43,0x6C,0x61,0x73,0x73,0x20,0x33,0x20,0x50,0x75,0x62,0x6C,0x69,0x63,
-0x20,0x50,0x72,0x69,0x6D,0x61,0x72,0x79,0x20,0x43,0x65,0x72,0x74,0x69,0x66,0x69,
-0x63,0x61,0x74,0x69,0x6F,0x6E,0x20,0x41,0x75,0x74,0x68,0x6F,0x72,0x69,0x74,0x79,
-0x20,0x2D,0x20,0x47,0x34,0x30,0x76,0x30,0x10,0x06,0x07,0x2A,0x86,0x48,0xCE,0x3D,
-0x02,0x01,0x06,0x05,0x2B,0x81,0x04,0x00,0x22,0x03,0x62,0x00,0x04,0xA7,0x56,0x7A,
-0x7C,0x52,0xDA,0x64,0x9B,0x0E,0x2D,0x5C,0xD8,0x5E,0xAC,0x92,0x3D,0xFE,0x01,0xE6,
-0x19,0x4A,0x3D,0x14,0x03,0x4B,0xFA,0x60,0x27,0x20,0xD9,0x83,0x89,0x69,0xFA,0x54,
-0xC6,0x9A,0x18,0x5E,0x55,0x2A,0x64,0xDE,0x06,0xF6,0x8D,0x4A,0x3B,0xAD,0x10,0x3C,
-0x65,0x3D,0x90,0x88,0x04,0x89,0xE0,0x30,0x61,0xB3,0xAE,0x5D,0x01,0xA7,0x7B,0xDE,
-0x7C,0xB2,0xBE,0xCA,0x65,0x61,0x00,0x86,0xAE,0xDA,0x8F,0x7B,0xD0,0x89,0xAD,0x4D,
-0x1D,0x59,0x9A,0x41,0xB1,0xBC,0x47,0x80,0xDC,0x9E,0x62,0xC3,0xF9,0xA3,0x81,0xB2,
-0x30,0x81,0xAF,0x30,0x0F,0x06,0x03,0x55,0x1D,0x13,0x01,0x01,0xFF,0x04,0x05,0x30,
-0x03,0x01,0x01,0xFF,0x30,0x0E,0x06,0x03,0x55,0x1D,0x0F,0x01,0x01,0xFF,0x04,0x04,
-0x03,0x02,0x01,0x06,0x30,0x6D,0x06,0x08,0x2B,0x06,0x01,0x05,0x05,0x07,0x01,0x0C,
-0x04,0x61,0x30,0x5F,0xA1,0x5D,0xA0,0x5B,0x30,0x59,0x30,0x57,0x30,0x55,0x16,0x09,
-0x69,0x6D,0x61,0x67,0x65,0x2F,0x67,0x69,0x66,0x30,0x21,0x30,0x1F,0x30,0x07,0x06,
-0x05,0x2B,0x0E,0x03,0x02,0x1A,0x04,0x14,0x8F,0xE5,0xD3,0x1A,0x86,0xAC,0x8D,0x8E,
-0x6B,0xC3,0xCF,0x80,0x6A,0xD4,0x48,0x18,0x2C,0x7B,0x19,0x2E,0x30,0x25,0x16,0x23,
-0x68,0x74,0x74,0x70,0x3A,0x2F,0x2F,0x6C,0x6F,0x67,0x6F,0x2E,0x76,0x65,0x72,0x69,
-0x73,0x69,0x67,0x6E,0x2E,0x63,0x6F,0x6D,0x2F,0x76,0x73,0x6C,0x6F,0x67,0x6F,0x2E,
-0x67,0x69,0x66,0x30,0x1D,0x06,0x03,0x55,0x1D,0x0E,0x04,0x16,0x04,0x14,0xB3,0x16,
-0x91,0xFD,0xEE,0xA6,0x6E,0xE4,0xB5,0x2E,0x49,0x8F,0x87,0x78,0x81,0x80,0xEC,0xE5,
-0xB1,0xB5,0x30,0x0A,0x06,0x08,0x2A,0x86,0x48,0xCE,0x3D,0x04,0x03,0x03,0x03,0x68,
-0x00,0x30,0x65,0x02,0x30,0x66,0x21,0x0C,0x18,0x26,0x60,0x5A,0x38,0x7B,0x56,0x42,
-0xE0,0xA7,0xFC,0x36,0x84,0x51,0x91,0x20,0x2C,0x76,0x4D,0x43,0x3D,0xC4,0x1D,0x84,
-0x23,0xD0,0xAC,0xD6,0x7C,0x35,0x06,0xCE,0xCD,0x69,0xBD,0x90,0x0D,0xDB,0x6C,0x48,
-0x42,0x1D,0x0E,0xAA,0x42,0x02,0x31,0x00,0x9C,0x3D,0x48,0x39,0x23,0x39,0x58,0x1A,
-0x15,0x12,0x59,0x6A,0x9E,0xEF,0xD5,0x59,0xB2,0x1D,0x52,0x2C,0x99,0x71,0xCD,0xC7,
-0x29,0xDF,0x1B,0x2A,0x61,0x7B,0x71,0xD1,0xDE,0xF3,0xC0,0xE5,0x0D,0x3A,0x4A,0xAA,
-0x2D,0xA7,0xD8,0x86,0x2A,0xDD,0x2E,0x10,
-};
-
-
-/* subject:/C=US/O=VeriSign, Inc./OU=VeriSign Trust Network/OU=(c) 2006 VeriSign, Inc. - For authorized use only/CN=VeriSign Class 3 Public Primary Certification Authority - G5 */
-/* issuer :/C=US/O=VeriSign, Inc./OU=VeriSign Trust Network/OU=(c) 2006 VeriSign, Inc. - For authorized use only/CN=VeriSign Class 3 Public Primary Certification Authority - G5 */
-
-
-const unsigned char VeriSign_Class_3_Public_Primary_Certification_Authority___G5_certificate[1239]={
-0x30,0x82,0x04,0xD3,0x30,0x82,0x03,0xBB,0xA0,0x03,0x02,0x01,0x02,0x02,0x10,0x18,
-0xDA,0xD1,0x9E,0x26,0x7D,0xE8,0xBB,0x4A,0x21,0x58,0xCD,0xCC,0x6B,0x3B,0x4A,0x30,
-0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x05,0x05,0x00,0x30,0x81,
-0xCA,0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x55,0x53,0x31,0x17,
-0x30,0x15,0x06,0x03,0x55,0x04,0x0A,0x13,0x0E,0x56,0x65,0x72,0x69,0x53,0x69,0x67,
-0x6E,0x2C,0x20,0x49,0x6E,0x63,0x2E,0x31,0x1F,0x30,0x1D,0x06,0x03,0x55,0x04,0x0B,
-0x13,0x16,0x56,0x65,0x72,0x69,0x53,0x69,0x67,0x6E,0x20,0x54,0x72,0x75,0x73,0x74,
-0x20,0x4E,0x65,0x74,0x77,0x6F,0x72,0x6B,0x31,0x3A,0x30,0x38,0x06,0x03,0x55,0x04,
-0x0B,0x13,0x31,0x28,0x63,0x29,0x20,0x32,0x30,0x30,0x36,0x20,0x56,0x65,0x72,0x69,
-0x53,0x69,0x67,0x6E,0x2C,0x20,0x49,0x6E,0x63,0x2E,0x20,0x2D,0x20,0x46,0x6F,0x72,
-0x20,0x61,0x75,0x74,0x68,0x6F,0x72,0x69,0x7A,0x65,0x64,0x20,0x75,0x73,0x65,0x20,
-0x6F,0x6E,0x6C,0x79,0x31,0x45,0x30,0x43,0x06,0x03,0x55,0x04,0x03,0x13,0x3C,0x56,
-0x65,0x72,0x69,0x53,0x69,0x67,0x6E,0x20,0x43,0x6C,0x61,0x73,0x73,0x20,0x33,0x20,
-0x50,0x75,0x62,0x6C,0x69,0x63,0x20,0x50,0x72,0x69,0x6D,0x61,0x72,0x79,0x20,0x43,
-0x65,0x72,0x74,0x69,0x66,0x69,0x63,0x61,0x74,0x69,0x6F,0x6E,0x20,0x41,0x75,0x74,
-0x68,0x6F,0x72,0x69,0x74,0x79,0x20,0x2D,0x20,0x47,0x35,0x30,0x1E,0x17,0x0D,0x30,
-0x36,0x31,0x31,0x30,0x38,0x30,0x30,0x30,0x30,0x30,0x30,0x5A,0x17,0x0D,0x33,0x36,
-0x30,0x37,0x31,0x36,0x32,0x33,0x35,0x39,0x35,0x39,0x5A,0x30,0x81,0xCA,0x31,0x0B,
-0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x55,0x53,0x31,0x17,0x30,0x15,0x06,
-0x03,0x55,0x04,0x0A,0x13,0x0E,0x56,0x65,0x72,0x69,0x53,0x69,0x67,0x6E,0x2C,0x20,
-0x49,0x6E,0x63,0x2E,0x31,0x1F,0x30,0x1D,0x06,0x03,0x55,0x04,0x0B,0x13,0x16,0x56,
-0x65,0x72,0x69,0x53,0x69,0x67,0x6E,0x20,0x54,0x72,0x75,0x73,0x74,0x20,0x4E,0x65,
-0x74,0x77,0x6F,0x72,0x6B,0x31,0x3A,0x30,0x38,0x06,0x03,0x55,0x04,0x0B,0x13,0x31,
-0x28,0x63,0x29,0x20,0x32,0x30,0x30,0x36,0x20,0x56,0x65,0x72,0x69,0x53,0x69,0x67,
-0x6E,0x2C,0x20,0x49,0x6E,0x63,0x2E,0x20,0x2D,0x20,0x46,0x6F,0x72,0x20,0x61,0x75,
-0x74,0x68,0x6F,0x72,0x69,0x7A,0x65,0x64,0x20,0x75,0x73,0x65,0x20,0x6F,0x6E,0x6C,
-0x79,0x31,0x45,0x30,0x43,0x06,0x03,0x55,0x04,0x03,0x13,0x3C,0x56,0x65,0x72,0x69,
-0x53,0x69,0x67,0x6E,0x20,0x43,0x6C,0x61,0x73,0x73,0x20,0x33,0x20,0x50,0x75,0x62,
-0x6C,0x69,0x63,0x20,0x50,0x72,0x69,0x6D,0x61,0x72,0x79,0x20,0x43,0x65,0x72,0x74,
-0x69,0x66,0x69,0x63,0x61,0x74,0x69,0x6F,0x6E,0x20,0x41,0x75,0x74,0x68,0x6F,0x72,
-0x69,0x74,0x79,0x20,0x2D,0x20,0x47,0x35,0x30,0x82,0x01,0x22,0x30,0x0D,0x06,0x09,
-0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x01,0x05,0x00,0x03,0x82,0x01,0x0F,0x00,
-0x30,0x82,0x01,0x0A,0x02,0x82,0x01,0x01,0x00,0xAF,0x24,0x08,0x08,0x29,0x7A,0x35,
-0x9E,0x60,0x0C,0xAA,0xE7,0x4B,0x3B,0x4E,0xDC,0x7C,0xBC,0x3C,0x45,0x1C,0xBB,0x2B,
-0xE0,0xFE,0x29,0x02,0xF9,0x57,0x08,0xA3,0x64,0x85,0x15,0x27,0xF5,0xF1,0xAD,0xC8,
-0x31,0x89,0x5D,0x22,0xE8,0x2A,0xAA,0xA6,0x42,0xB3,0x8F,0xF8,0xB9,0x55,0xB7,0xB1,
-0xB7,0x4B,0xB3,0xFE,0x8F,0x7E,0x07,0x57,0xEC,0xEF,0x43,0xDB,0x66,0x62,0x15,0x61,
-0xCF,0x60,0x0D,0xA4,0xD8,0xDE,0xF8,0xE0,0xC3,0x62,0x08,0x3D,0x54,0x13,0xEB,0x49,
-0xCA,0x59,0x54,0x85,0x26,0xE5,0x2B,0x8F,0x1B,0x9F,0xEB,0xF5,0xA1,0x91,0xC2,0x33,
-0x49,0xD8,0x43,0x63,0x6A,0x52,0x4B,0xD2,0x8F,0xE8,0x70,0x51,0x4D,0xD1,0x89,0x69,
-0x7B,0xC7,0x70,0xF6,0xB3,0xDC,0x12,0x74,0xDB,0x7B,0x5D,0x4B,0x56,0xD3,0x96,0xBF,
-0x15,0x77,0xA1,0xB0,0xF4,0xA2,0x25,0xF2,0xAF,0x1C,0x92,0x67,0x18,0xE5,0xF4,0x06,
-0x04,0xEF,0x90,0xB9,0xE4,0x00,0xE4,0xDD,0x3A,0xB5,0x19,0xFF,0x02,0xBA,0xF4,0x3C,
-0xEE,0xE0,0x8B,0xEB,0x37,0x8B,0xEC,0xF4,0xD7,0xAC,0xF2,0xF6,0xF0,0x3D,0xAF,0xDD,
-0x75,0x91,0x33,0x19,0x1D,0x1C,0x40,0xCB,0x74,0x24,0x19,0x21,0x93,0xD9,0x14,0xFE,
-0xAC,0x2A,0x52,0xC7,0x8F,0xD5,0x04,0x49,0xE4,0x8D,0x63,0x47,0x88,0x3C,0x69,0x83,
-0xCB,0xFE,0x47,0xBD,0x2B,0x7E,0x4F,0xC5,0x95,0xAE,0x0E,0x9D,0xD4,0xD1,0x43,0xC0,
-0x67,0x73,0xE3,0x14,0x08,0x7E,0xE5,0x3F,0x9F,0x73,0xB8,0x33,0x0A,0xCF,0x5D,0x3F,
-0x34,0x87,0x96,0x8A,0xEE,0x53,0xE8,0x25,0x15,0x02,0x03,0x01,0x00,0x01,0xA3,0x81,
-0xB2,0x30,0x81,0xAF,0x30,0x0F,0x06,0x03,0x55,0x1D,0x13,0x01,0x01,0xFF,0x04,0x05,
-0x30,0x03,0x01,0x01,0xFF,0x30,0x0E,0x06,0x03,0x55,0x1D,0x0F,0x01,0x01,0xFF,0x04,
-0x04,0x03,0x02,0x01,0x06,0x30,0x6D,0x06,0x08,0x2B,0x06,0x01,0x05,0x05,0x07,0x01,
-0x0C,0x04,0x61,0x30,0x5F,0xA1,0x5D,0xA0,0x5B,0x30,0x59,0x30,0x57,0x30,0x55,0x16,
-0x09,0x69,0x6D,0x61,0x67,0x65,0x2F,0x67,0x69,0x66,0x30,0x21,0x30,0x1F,0x30,0x07,
-0x06,0x05,0x2B,0x0E,0x03,0x02,0x1A,0x04,0x14,0x8F,0xE5,0xD3,0x1A,0x86,0xAC,0x8D,
-0x8E,0x6B,0xC3,0xCF,0x80,0x6A,0xD4,0x48,0x18,0x2C,0x7B,0x19,0x2E,0x30,0x25,0x16,
-0x23,0x68,0x74,0x74,0x70,0x3A,0x2F,0x2F,0x6C,0x6F,0x67,0x6F,0x2E,0x76,0x65,0x72,
-0x69,0x73,0x69,0x67,0x6E,0x2E,0x63,0x6F,0x6D,0x2F,0x76,0x73,0x6C,0x6F,0x67,0x6F,
-0x2E,0x67,0x69,0x66,0x30,0x1D,0x06,0x03,0x55,0x1D,0x0E,0x04,0x16,0x04,0x14,0x7F,
-0xD3,0x65,0xA7,0xC2,0xDD,0xEC,0xBB,0xF0,0x30,0x09,0xF3,0x43,0x39,0xFA,0x02,0xAF,
-0x33,0x31,0x33,0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x05,
-0x05,0x00,0x03,0x82,0x01,0x01,0x00,0x93,0x24,0x4A,0x30,0x5F,0x62,0xCF,0xD8,0x1A,
-0x98,0x2F,0x3D,0xEA,0xDC,0x99,0x2D,0xBD,0x77,0xF6,0xA5,0x79,0x22,0x38,0xEC,0xC4,
-0xA7,0xA0,0x78,0x12,0xAD,0x62,0x0E,0x45,0x70,0x64,0xC5,0xE7,0x97,0x66,0x2D,0x98,
-0x09,0x7E,0x5F,0xAF,0xD6,0xCC,0x28,0x65,0xF2,0x01,0xAA,0x08,0x1A,0x47,0xDE,0xF9,
-0xF9,0x7C,0x92,0x5A,0x08,0x69,0x20,0x0D,0xD9,0x3E,0x6D,0x6E,0x3C,0x0D,0x6E,0xD8,
-0xE6,0x06,0x91,0x40,0x18,0xB9,0xF8,0xC1,0xED,0xDF,0xDB,0x41,0xAA,0xE0,0x96,0x20,
-0xC9,0xCD,0x64,0x15,0x38,0x81,0xC9,0x94,0xEE,0xA2,0x84,0x29,0x0B,0x13,0x6F,0x8E,
-0xDB,0x0C,0xDD,0x25,0x02,0xDB,0xA4,0x8B,0x19,0x44,0xD2,0x41,0x7A,0x05,0x69,0x4A,
-0x58,0x4F,0x60,0xCA,0x7E,0x82,0x6A,0x0B,0x02,0xAA,0x25,0x17,0x39,0xB5,0xDB,0x7F,
-0xE7,0x84,0x65,0x2A,0x95,0x8A,0xBD,0x86,0xDE,0x5E,0x81,0x16,0x83,0x2D,0x10,0xCC,
-0xDE,0xFD,0xA8,0x82,0x2A,0x6D,0x28,0x1F,0x0D,0x0B,0xC4,0xE5,0xE7,0x1A,0x26,0x19,
-0xE1,0xF4,0x11,0x6F,0x10,0xB5,0x95,0xFC,0xE7,0x42,0x05,0x32,0xDB,0xCE,0x9D,0x51,
-0x5E,0x28,0xB6,0x9E,0x85,0xD3,0x5B,0xEF,0xA5,0x7D,0x45,0x40,0x72,0x8E,0xB7,0x0E,
-0x6B,0x0E,0x06,0xFB,0x33,0x35,0x48,0x71,0xB8,0x9D,0x27,0x8B,0xC4,0x65,0x5F,0x0D,
-0x86,0x76,0x9C,0x44,0x7A,0xF6,0x95,0x5C,0xF6,0x5D,0x32,0x08,0x33,0xA4,0x54,0xB6,
-0x18,0x3F,0x68,0x5C,0xF2,0x42,0x4A,0x85,0x38,0x54,0x83,0x5F,0xD1,0xE8,0x2C,0xF2,
-0xAC,0x11,0xD6,0xA8,0xED,0x63,0x6A,
-};
-
-
-/* subject:/C=US/O=VeriSign, Inc./OU=VeriSign Trust Network/OU=(c) 1999 VeriSign, Inc. - For authorized use only/CN=VeriSign Class 4 Public Primary Certification Authority - G3 */
-/* issuer :/C=US/O=VeriSign, Inc./OU=VeriSign Trust Network/OU=(c) 1999 VeriSign, Inc. - For authorized use only/CN=VeriSign Class 4 Public Primary Certification Authority - G3 */
-
-
-const unsigned char Verisign_Class_4_Public_Primary_Certification_Authority___G3_certificate[1054]={
-0x30,0x82,0x04,0x1A,0x30,0x82,0x03,0x02,0x02,0x11,0x00,0xEC,0xA0,0xA7,0x8B,0x6E,
-0x75,0x6A,0x01,0xCF,0xC4,0x7C,0xCC,0x2F,0x94,0x5E,0xD7,0x30,0x0D,0x06,0x09,0x2A,
-0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x05,0x05,0x00,0x30,0x81,0xCA,0x31,0x0B,0x30,
-0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x55,0x53,0x31,0x17,0x30,0x15,0x06,0x03,
-0x55,0x04,0x0A,0x13,0x0E,0x56,0x65,0x72,0x69,0x53,0x69,0x67,0x6E,0x2C,0x20,0x49,
-0x6E,0x63,0x2E,0x31,0x1F,0x30,0x1D,0x06,0x03,0x55,0x04,0x0B,0x13,0x16,0x56,0x65,
-0x72,0x69,0x53,0x69,0x67,0x6E,0x20,0x54,0x72,0x75,0x73,0x74,0x20,0x4E,0x65,0x74,
-0x77,0x6F,0x72,0x6B,0x31,0x3A,0x30,0x38,0x06,0x03,0x55,0x04,0x0B,0x13,0x31,0x28,
-0x63,0x29,0x20,0x31,0x39,0x39,0x39,0x20,0x56,0x65,0x72,0x69,0x53,0x69,0x67,0x6E,
-0x2C,0x20,0x49,0x6E,0x63,0x2E,0x20,0x2D,0x20,0x46,0x6F,0x72,0x20,0x61,0x75,0x74,
-0x68,0x6F,0x72,0x69,0x7A,0x65,0x64,0x20,0x75,0x73,0x65,0x20,0x6F,0x6E,0x6C,0x79,
-0x31,0x45,0x30,0x43,0x06,0x03,0x55,0x04,0x03,0x13,0x3C,0x56,0x65,0x72,0x69,0x53,
-0x69,0x67,0x6E,0x20,0x43,0x6C,0x61,0x73,0x73,0x20,0x34,0x20,0x50,0x75,0x62,0x6C,
-0x69,0x63,0x20,0x50,0x72,0x69,0x6D,0x61,0x72,0x79,0x20,0x43,0x65,0x72,0x74,0x69,
-0x66,0x69,0x63,0x61,0x74,0x69,0x6F,0x6E,0x20,0x41,0x75,0x74,0x68,0x6F,0x72,0x69,
-0x74,0x79,0x20,0x2D,0x20,0x47,0x33,0x30,0x1E,0x17,0x0D,0x39,0x39,0x31,0x30,0x30,
-0x31,0x30,0x30,0x30,0x30,0x30,0x30,0x5A,0x17,0x0D,0x33,0x36,0x30,0x37,0x31,0x36,
-0x32,0x33,0x35,0x39,0x35,0x39,0x5A,0x30,0x81,0xCA,0x31,0x0B,0x30,0x09,0x06,0x03,
-0x55,0x04,0x06,0x13,0x02,0x55,0x53,0x31,0x17,0x30,0x15,0x06,0x03,0x55,0x04,0x0A,
-0x13,0x0E,0x56,0x65,0x72,0x69,0x53,0x69,0x67,0x6E,0x2C,0x20,0x49,0x6E,0x63,0x2E,
-0x31,0x1F,0x30,0x1D,0x06,0x03,0x55,0x04,0x0B,0x13,0x16,0x56,0x65,0x72,0x69,0x53,
-0x69,0x67,0x6E,0x20,0x54,0x72,0x75,0x73,0x74,0x20,0x4E,0x65,0x74,0x77,0x6F,0x72,
-0x6B,0x31,0x3A,0x30,0x38,0x06,0x03,0x55,0x04,0x0B,0x13,0x31,0x28,0x63,0x29,0x20,
-0x31,0x39,0x39,0x39,0x20,0x56,0x65,0x72,0x69,0x53,0x69,0x67,0x6E,0x2C,0x20,0x49,
-0x6E,0x63,0x2E,0x20,0x2D,0x20,0x46,0x6F,0x72,0x20,0x61,0x75,0x74,0x68,0x6F,0x72,
-0x69,0x7A,0x65,0x64,0x20,0x75,0x73,0x65,0x20,0x6F,0x6E,0x6C,0x79,0x31,0x45,0x30,
-0x43,0x06,0x03,0x55,0x04,0x03,0x13,0x3C,0x56,0x65,0x72,0x69,0x53,0x69,0x67,0x6E,
-0x20,0x43,0x6C,0x61,0x73,0x73,0x20,0x34,0x20,0x50,0x75,0x62,0x6C,0x69,0x63,0x20,
-0x50,0x72,0x69,0x6D,0x61,0x72,0x79,0x20,0x43,0x65,0x72,0x74,0x69,0x66,0x69,0x63,
-0x61,0x74,0x69,0x6F,0x6E,0x20,0x41,0x75,0x74,0x68,0x6F,0x72,0x69,0x74,0x79,0x20,
-0x2D,0x20,0x47,0x33,0x30,0x82,0x01,0x22,0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,
-0xF7,0x0D,0x01,0x01,0x01,0x05,0x00,0x03,0x82,0x01,0x0F,0x00,0x30,0x82,0x01,0x0A,
-0x02,0x82,0x01,0x01,0x00,0xAD,0xCB,0xA5,0x11,0x69,0xC6,0x59,0xAB,0xF1,0x8F,0xB5,
-0x19,0x0F,0x56,0xCE,0xCC,0xB5,0x1F,0x20,0xE4,0x9E,0x26,0x25,0x4B,0xE0,0x73,0x65,
-0x89,0x59,0xDE,0xD0,0x83,0xE4,0xF5,0x0F,0xB5,0xBB,0xAD,0xF1,0x7C,0xE8,0x21,0xFC,
-0xE4,0xE8,0x0C,0xEE,0x7C,0x45,0x22,0x19,0x76,0x92,0xB4,0x13,0xB7,0x20,0x5B,0x09,
-0xFA,0x61,0xAE,0xA8,0xF2,0xA5,0x8D,0x85,0xC2,0x2A,0xD6,0xDE,0x66,0x36,0xD2,0x9B,
-0x02,0xF4,0xA8,0x92,0x60,0x7C,0x9C,0x69,0xB4,0x8F,0x24,0x1E,0xD0,0x86,0x52,0xF6,
-0x32,0x9C,0x41,0x58,0x1E,0x22,0xBD,0xCD,0x45,0x62,0x95,0x08,0x6E,0xD0,0x66,0xDD,
-0x53,0xA2,0xCC,0xF0,0x10,0xDC,0x54,0x73,0x8B,0x04,0xA1,0x46,0x33,0x33,0x5C,0x17,
-0x40,0xB9,0x9E,0x4D,0xD3,0xF3,0xBE,0x55,0x83,0xE8,0xB1,0x89,0x8E,0x5A,0x7C,0x9A,
-0x96,0x22,0x90,0x3B,0x88,0x25,0xF2,0xD2,0x53,0x88,0x02,0x0C,0x0B,0x78,0xF2,0xE6,
-0x37,0x17,0x4B,0x30,0x46,0x07,0xE4,0x80,0x6D,0xA6,0xD8,0x96,0x2E,0xE8,0x2C,0xF8,
-0x11,0xB3,0x38,0x0D,0x66,0xA6,0x9B,0xEA,0xC9,0x23,0x5B,0xDB,0x8E,0xE2,0xF3,0x13,
-0x8E,0x1A,0x59,0x2D,0xAA,0x02,0xF0,0xEC,0xA4,0x87,0x66,0xDC,0xC1,0x3F,0xF5,0xD8,
-0xB9,0xF4,0xEC,0x82,0xC6,0xD2,0x3D,0x95,0x1D,0xE5,0xC0,0x4F,0x84,0xC9,0xD9,0xA3,
-0x44,0x28,0x06,0x6A,0xD7,0x45,0xAC,0xF0,0x6B,0x6A,0xEF,0x4E,0x5F,0xF8,0x11,0x82,
-0x1E,0x38,0x63,0x34,0x66,0x50,0xD4,0x3E,0x93,0x73,0xFA,0x30,0xC3,0x66,0xAD,0xFF,
-0x93,0x2D,0x97,0xEF,0x03,0x02,0x03,0x01,0x00,0x01,0x30,0x0D,0x06,0x09,0x2A,0x86,
-0x48,0x86,0xF7,0x0D,0x01,0x01,0x05,0x05,0x00,0x03,0x82,0x01,0x01,0x00,0x8F,0xFA,
-0x25,0x6B,0x4F,0x5B,0xE4,0xA4,0x4E,0x27,0x55,0xAB,0x22,0x15,0x59,0x3C,0xCA,0xB5,
-0x0A,0xD4,0x4A,0xDB,0xAB,0xDD,0xA1,0x5F,0x53,0xC5,0xA0,0x57,0x39,0xC2,0xCE,0x47,
-0x2B,0xBE,0x3A,0xC8,0x56,0xBF,0xC2,0xD9,0x27,0x10,0x3A,0xB1,0x05,0x3C,0xC0,0x77,
-0x31,0xBB,0x3A,0xD3,0x05,0x7B,0x6D,0x9A,0x1C,0x30,0x8C,0x80,0xCB,0x93,0x93,0x2A,
-0x83,0xAB,0x05,0x51,0x82,0x02,0x00,0x11,0x67,0x6B,0xF3,0x88,0x61,0x47,0x5F,0x03,
-0x93,0xD5,0x5B,0x0D,0xE0,0xF1,0xD4,0xA1,0x32,0x35,0x85,0xB2,0x3A,0xDB,0xB0,0x82,
-0xAB,0xD1,0xCB,0x0A,0xBC,0x4F,0x8C,0x5B,0xC5,0x4B,0x00,0x3B,0x1F,0x2A,0x82,0xA6,
-0x7E,0x36,0x85,0xDC,0x7E,0x3C,0x67,0x00,0xB5,0xE4,0x3B,0x52,0xE0,0xA8,0xEB,0x5D,
-0x15,0xF9,0xC6,0x6D,0xF0,0xAD,0x1D,0x0E,0x85,0xB7,0xA9,0x9A,0x73,0x14,0x5A,0x5B,
-0x8F,0x41,0x28,0xC0,0xD5,0xE8,0x2D,0x4D,0xA4,0x5E,0xCD,0xAA,0xD9,0xED,0xCE,0xDC,
-0xD8,0xD5,0x3C,0x42,0x1D,0x17,0xC1,0x12,0x5D,0x45,0x38,0xC3,0x38,0xF3,0xFC,0x85,
-0x2E,0x83,0x46,0x48,0xB2,0xD7,0x20,0x5F,0x92,0x36,0x8F,0xE7,0x79,0x0F,0x98,0x5E,
-0x99,0xE8,0xF0,0xD0,0xA4,0xBB,0xF5,0x53,0xBD,0x2A,0xCE,0x59,0xB0,0xAF,0x6E,0x7F,
-0x6C,0xBB,0xD2,0x1E,0x00,0xB0,0x21,0xED,0xF8,0x41,0x62,0x82,0xB9,0xD8,0xB2,0xC4,
-0xBB,0x46,0x50,0xF3,0x31,0xC5,0x8F,0x01,0xA8,0x74,0xEB,0xF5,0x78,0x27,0xDA,0xE7,
-0xF7,0x66,0x43,0xF3,0x9E,0x83,0x3E,0x20,0xAA,0xC3,0x35,0x60,0x91,0xCE,
-};
-
-
-/* subject:/C=US/O=VeriSign, Inc./OU=VeriSign Trust Network/OU=(c) 2008 VeriSign, Inc. - For authorized use only/CN=VeriSign Universal Root Certification Authority */
-/* issuer :/C=US/O=VeriSign, Inc./OU=VeriSign Trust Network/OU=(c) 2008 VeriSign, Inc. - For authorized use only/CN=VeriSign Universal Root Certification Authority */
-
-
-const unsigned char VeriSign_Universal_Root_Certification_Authority_certificate[1213]={
-0x30,0x82,0x04,0xB9,0x30,0x82,0x03,0xA1,0xA0,0x03,0x02,0x01,0x02,0x02,0x10,0x40,
-0x1A,0xC4,0x64,0x21,0xB3,0x13,0x21,0x03,0x0E,0xBB,0xE4,0x12,0x1A,0xC5,0x1D,0x30,
-0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x0B,0x05,0x00,0x30,0x81,
-0xBD,0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x55,0x53,0x31,0x17,
-0x30,0x15,0x06,0x03,0x55,0x04,0x0A,0x13,0x0E,0x56,0x65,0x72,0x69,0x53,0x69,0x67,
-0x6E,0x2C,0x20,0x49,0x6E,0x63,0x2E,0x31,0x1F,0x30,0x1D,0x06,0x03,0x55,0x04,0x0B,
-0x13,0x16,0x56,0x65,0x72,0x69,0x53,0x69,0x67,0x6E,0x20,0x54,0x72,0x75,0x73,0x74,
-0x20,0x4E,0x65,0x74,0x77,0x6F,0x72,0x6B,0x31,0x3A,0x30,0x38,0x06,0x03,0x55,0x04,
-0x0B,0x13,0x31,0x28,0x63,0x29,0x20,0x32,0x30,0x30,0x38,0x20,0x56,0x65,0x72,0x69,
-0x53,0x69,0x67,0x6E,0x2C,0x20,0x49,0x6E,0x63,0x2E,0x20,0x2D,0x20,0x46,0x6F,0x72,
-0x20,0x61,0x75,0x74,0x68,0x6F,0x72,0x69,0x7A,0x65,0x64,0x20,0x75,0x73,0x65,0x20,
-0x6F,0x6E,0x6C,0x79,0x31,0x38,0x30,0x36,0x06,0x03,0x55,0x04,0x03,0x13,0x2F,0x56,
-0x65,0x72,0x69,0x53,0x69,0x67,0x6E,0x20,0x55,0x6E,0x69,0x76,0x65,0x72,0x73,0x61,
-0x6C,0x20,0x52,0x6F,0x6F,0x74,0x20,0x43,0x65,0x72,0x74,0x69,0x66,0x69,0x63,0x61,
-0x74,0x69,0x6F,0x6E,0x20,0x41,0x75,0x74,0x68,0x6F,0x72,0x69,0x74,0x79,0x30,0x1E,
-0x17,0x0D,0x30,0x38,0x30,0x34,0x30,0x32,0x30,0x30,0x30,0x30,0x30,0x30,0x5A,0x17,
-0x0D,0x33,0x37,0x31,0x32,0x30,0x31,0x32,0x33,0x35,0x39,0x35,0x39,0x5A,0x30,0x81,
-0xBD,0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x55,0x53,0x31,0x17,
-0x30,0x15,0x06,0x03,0x55,0x04,0x0A,0x13,0x0E,0x56,0x65,0x72,0x69,0x53,0x69,0x67,
-0x6E,0x2C,0x20,0x49,0x6E,0x63,0x2E,0x31,0x1F,0x30,0x1D,0x06,0x03,0x55,0x04,0x0B,
-0x13,0x16,0x56,0x65,0x72,0x69,0x53,0x69,0x67,0x6E,0x20,0x54,0x72,0x75,0x73,0x74,
-0x20,0x4E,0x65,0x74,0x77,0x6F,0x72,0x6B,0x31,0x3A,0x30,0x38,0x06,0x03,0x55,0x04,
-0x0B,0x13,0x31,0x28,0x63,0x29,0x20,0x32,0x30,0x30,0x38,0x20,0x56,0x65,0x72,0x69,
-0x53,0x69,0x67,0x6E,0x2C,0x20,0x49,0x6E,0x63,0x2E,0x20,0x2D,0x20,0x46,0x6F,0x72,
-0x20,0x61,0x75,0x74,0x68,0x6F,0x72,0x69,0x7A,0x65,0x64,0x20,0x75,0x73,0x65,0x20,
-0x6F,0x6E,0x6C,0x79,0x31,0x38,0x30,0x36,0x06,0x03,0x55,0x04,0x03,0x13,0x2F,0x56,
-0x65,0x72,0x69,0x53,0x69,0x67,0x6E,0x20,0x55,0x6E,0x69,0x76,0x65,0x72,0x73,0x61,
-0x6C,0x20,0x52,0x6F,0x6F,0x74,0x20,0x43,0x65,0x72,0x74,0x69,0x66,0x69,0x63,0x61,
-0x74,0x69,0x6F,0x6E,0x20,0x41,0x75,0x74,0x68,0x6F,0x72,0x69,0x74,0x79,0x30,0x82,
-0x01,0x22,0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x01,0x05,
-0x00,0x03,0x82,0x01,0x0F,0x00,0x30,0x82,0x01,0x0A,0x02,0x82,0x01,0x01,0x00,0xC7,
-0x61,0x37,0x5E,0xB1,0x01,0x34,0xDB,0x62,0xD7,0x15,0x9B,0xFF,0x58,0x5A,0x8C,0x23,
-0x23,0xD6,0x60,0x8E,0x91,0xD7,0x90,0x98,0x83,0x7A,0xE6,0x58,0x19,0x38,0x8C,0xC5,
-0xF6,0xE5,0x64,0x85,0xB4,0xA2,0x71,0xFB,0xED,0xBD,0xB9,0xDA,0xCD,0x4D,0x00,0xB4,
-0xC8,0x2D,0x73,0xA5,0xC7,0x69,0x71,0x95,0x1F,0x39,0x3C,0xB2,0x44,0x07,0x9C,0xE8,
-0x0E,0xFA,0x4D,0x4A,0xC4,0x21,0xDF,0x29,0x61,0x8F,0x32,0x22,0x61,0x82,0xC5,0x87,
-0x1F,0x6E,0x8C,0x7C,0x5F,0x16,0x20,0x51,0x44,0xD1,0x70,0x4F,0x57,0xEA,0xE3,0x1C,
-0xE3,0xCC,0x79,0xEE,0x58,0xD8,0x0E,0xC2,0xB3,0x45,0x93,0xC0,0x2C,0xE7,0x9A,0x17,
-0x2B,0x7B,0x00,0x37,0x7A,0x41,0x33,0x78,0xE1,0x33,0xE2,0xF3,0x10,0x1A,0x7F,0x87,
-0x2C,0xBE,0xF6,0xF5,0xF7,0x42,0xE2,0xE5,0xBF,0x87,0x62,0x89,0x5F,0x00,0x4B,0xDF,
-0xC5,0xDD,0xE4,0x75,0x44,0x32,0x41,0x3A,0x1E,0x71,0x6E,0x69,0xCB,0x0B,0x75,0x46,
-0x08,0xD1,0xCA,0xD2,0x2B,0x95,0xD0,0xCF,0xFB,0xB9,0x40,0x6B,0x64,0x8C,0x57,0x4D,
-0xFC,0x13,0x11,0x79,0x84,0xED,0x5E,0x54,0xF6,0x34,0x9F,0x08,0x01,0xF3,0x10,0x25,
-0x06,0x17,0x4A,0xDA,0xF1,0x1D,0x7A,0x66,0x6B,0x98,0x60,0x66,0xA4,0xD9,0xEF,0xD2,
-0x2E,0x82,0xF1,0xF0,0xEF,0x09,0xEA,0x44,0xC9,0x15,0x6A,0xE2,0x03,0x6E,0x33,0xD3,
-0xAC,0x9F,0x55,0x00,0xC7,0xF6,0x08,0x6A,0x94,0xB9,0x5F,0xDC,0xE0,0x33,0xF1,0x84,
-0x60,0xF9,0x5B,0x27,0x11,0xB4,0xFC,0x16,0xF2,0xBB,0x56,0x6A,0x80,0x25,0x8D,0x02,
-0x03,0x01,0x00,0x01,0xA3,0x81,0xB2,0x30,0x81,0xAF,0x30,0x0F,0x06,0x03,0x55,0x1D,
-0x13,0x01,0x01,0xFF,0x04,0x05,0x30,0x03,0x01,0x01,0xFF,0x30,0x0E,0x06,0x03,0x55,
-0x1D,0x0F,0x01,0x01,0xFF,0x04,0x04,0x03,0x02,0x01,0x06,0x30,0x6D,0x06,0x08,0x2B,
-0x06,0x01,0x05,0x05,0x07,0x01,0x0C,0x04,0x61,0x30,0x5F,0xA1,0x5D,0xA0,0x5B,0x30,
-0x59,0x30,0x57,0x30,0x55,0x16,0x09,0x69,0x6D,0x61,0x67,0x65,0x2F,0x67,0x69,0x66,
-0x30,0x21,0x30,0x1F,0x30,0x07,0x06,0x05,0x2B,0x0E,0x03,0x02,0x1A,0x04,0x14,0x8F,
-0xE5,0xD3,0x1A,0x86,0xAC,0x8D,0x8E,0x6B,0xC3,0xCF,0x80,0x6A,0xD4,0x48,0x18,0x2C,
-0x7B,0x19,0x2E,0x30,0x25,0x16,0x23,0x68,0x74,0x74,0x70,0x3A,0x2F,0x2F,0x6C,0x6F,
-0x67,0x6F,0x2E,0x76,0x65,0x72,0x69,0x73,0x69,0x67,0x6E,0x2E,0x63,0x6F,0x6D,0x2F,
-0x76,0x73,0x6C,0x6F,0x67,0x6F,0x2E,0x67,0x69,0x66,0x30,0x1D,0x06,0x03,0x55,0x1D,
-0x0E,0x04,0x16,0x04,0x14,0xB6,0x77,0xFA,0x69,0x48,0x47,0x9F,0x53,0x12,0xD5,0xC2,
-0xEA,0x07,0x32,0x76,0x07,0xD1,0x97,0x07,0x19,0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,
-0x86,0xF7,0x0D,0x01,0x01,0x0B,0x05,0x00,0x03,0x82,0x01,0x01,0x00,0x4A,0xF8,0xF8,
-0xB0,0x03,0xE6,0x2C,0x67,0x7B,0xE4,0x94,0x77,0x63,0xCC,0x6E,0x4C,0xF9,0x7D,0x0E,
-0x0D,0xDC,0xC8,0xB9,0x35,0xB9,0x70,0x4F,0x63,0xFA,0x24,0xFA,0x6C,0x83,0x8C,0x47,
-0x9D,0x3B,0x63,0xF3,0x9A,0xF9,0x76,0x32,0x95,0x91,0xB1,0x77,0xBC,0xAC,0x9A,0xBE,
-0xB1,0xE4,0x31,0x21,0xC6,0x81,0x95,0x56,0x5A,0x0E,0xB1,0xC2,0xD4,0xB1,0xA6,0x59,
-0xAC,0xF1,0x63,0xCB,0xB8,0x4C,0x1D,0x59,0x90,0x4A,0xEF,0x90,0x16,0x28,0x1F,0x5A,
-0xAE,0x10,0xFB,0x81,0x50,0x38,0x0C,0x6C,0xCC,0xF1,0x3D,0xC3,0xF5,0x63,0xE3,0xB3,
-0xE3,0x21,0xC9,0x24,0x39,0xE9,0xFD,0x15,0x66,0x46,0xF4,0x1B,0x11,0xD0,0x4D,0x73,
-0xA3,0x7D,0x46,0xF9,0x3D,0xED,0xA8,0x5F,0x62,0xD4,0xF1,0x3F,0xF8,0xE0,0x74,0x57,
-0x2B,0x18,0x9D,0x81,0xB4,0xC4,0x28,0xDA,0x94,0x97,0xA5,0x70,0xEB,0xAC,0x1D,0xBE,
-0x07,0x11,0xF0,0xD5,0xDB,0xDD,0xE5,0x8C,0xF0,0xD5,0x32,0xB0,0x83,0xE6,0x57,0xE2,
-0x8F,0xBF,0xBE,0xA1,0xAA,0xBF,0x3D,0x1D,0xB5,0xD4,0x38,0xEA,0xD7,0xB0,0x5C,0x3A,
-0x4F,0x6A,0x3F,0x8F,0xC0,0x66,0x6C,0x63,0xAA,0xE9,0xD9,0xA4,0x16,0xF4,0x81,0xD1,
-0x95,0x14,0x0E,0x7D,0xCD,0x95,0x34,0xD9,0xD2,0x8F,0x70,0x73,0x81,0x7B,0x9C,0x7E,
-0xBD,0x98,0x61,0xD8,0x45,0x87,0x98,0x90,0xC5,0xEB,0x86,0x30,0xC6,0x35,0xBF,0xF0,
-0xFF,0xC3,0x55,0x88,0x83,0x4B,0xEF,0x05,0x92,0x06,0x71,0xF2,0xB8,0x98,0x93,0xB7,
-0xEC,0xCD,0x82,0x61,0xF1,0x38,0xE6,0x4F,0x97,0x98,0x2A,0x5A,0x8D,
-};
-
-
-/* subject:/C=US/OU=www.xrampsecurity.com/O=XRamp Security Services Inc/CN=XRamp Global Certification Authority */
-/* issuer :/C=US/OU=www.xrampsecurity.com/O=XRamp Security Services Inc/CN=XRamp Global Certification Authority */
-
-
-const unsigned char XRamp_Global_CA_Root_certificate[1076]={
-0x30,0x82,0x04,0x30,0x30,0x82,0x03,0x18,0xA0,0x03,0x02,0x01,0x02,0x02,0x10,0x50,
-0x94,0x6C,0xEC,0x18,0xEA,0xD5,0x9C,0x4D,0xD5,0x97,0xEF,0x75,0x8F,0xA0,0xAD,0x30,
-0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x05,0x05,0x00,0x30,0x81,
-0x82,0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x55,0x53,0x31,0x1E,
-0x30,0x1C,0x06,0x03,0x55,0x04,0x0B,0x13,0x15,0x77,0x77,0x77,0x2E,0x78,0x72,0x61,
-0x6D,0x70,0x73,0x65,0x63,0x75,0x72,0x69,0x74,0x79,0x2E,0x63,0x6F,0x6D,0x31,0x24,
-0x30,0x22,0x06,0x03,0x55,0x04,0x0A,0x13,0x1B,0x58,0x52,0x61,0x6D,0x70,0x20,0x53,
-0x65,0x63,0x75,0x72,0x69,0x74,0x79,0x20,0x53,0x65,0x72,0x76,0x69,0x63,0x65,0x73,
-0x20,0x49,0x6E,0x63,0x31,0x2D,0x30,0x2B,0x06,0x03,0x55,0x04,0x03,0x13,0x24,0x58,
-0x52,0x61,0x6D,0x70,0x20,0x47,0x6C,0x6F,0x62,0x61,0x6C,0x20,0x43,0x65,0x72,0x74,
-0x69,0x66,0x69,0x63,0x61,0x74,0x69,0x6F,0x6E,0x20,0x41,0x75,0x74,0x68,0x6F,0x72,
-0x69,0x74,0x79,0x30,0x1E,0x17,0x0D,0x30,0x34,0x31,0x31,0x30,0x31,0x31,0x37,0x31,
-0x34,0x30,0x34,0x5A,0x17,0x0D,0x33,0x35,0x30,0x31,0x30,0x31,0x30,0x35,0x33,0x37,
-0x31,0x39,0x5A,0x30,0x81,0x82,0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,
-0x02,0x55,0x53,0x31,0x1E,0x30,0x1C,0x06,0x03,0x55,0x04,0x0B,0x13,0x15,0x77,0x77,
-0x77,0x2E,0x78,0x72,0x61,0x6D,0x70,0x73,0x65,0x63,0x75,0x72,0x69,0x74,0x79,0x2E,
-0x63,0x6F,0x6D,0x31,0x24,0x30,0x22,0x06,0x03,0x55,0x04,0x0A,0x13,0x1B,0x58,0x52,
-0x61,0x6D,0x70,0x20,0x53,0x65,0x63,0x75,0x72,0x69,0x74,0x79,0x20,0x53,0x65,0x72,
-0x76,0x69,0x63,0x65,0x73,0x20,0x49,0x6E,0x63,0x31,0x2D,0x30,0x2B,0x06,0x03,0x55,
-0x04,0x03,0x13,0x24,0x58,0x52,0x61,0x6D,0x70,0x20,0x47,0x6C,0x6F,0x62,0x61,0x6C,
-0x20,0x43,0x65,0x72,0x74,0x69,0x66,0x69,0x63,0x61,0x74,0x69,0x6F,0x6E,0x20,0x41,
-0x75,0x74,0x68,0x6F,0x72,0x69,0x74,0x79,0x30,0x82,0x01,0x22,0x30,0x0D,0x06,0x09,
-0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x01,0x05,0x00,0x03,0x82,0x01,0x0F,0x00,
-0x30,0x82,0x01,0x0A,0x02,0x82,0x01,0x01,0x00,0x98,0x24,0x1E,0xBD,0x15,0xB4,0xBA,
-0xDF,0xC7,0x8C,0xA5,0x27,0xB6,0x38,0x0B,0x69,0xF3,0xB6,0x4E,0xA8,0x2C,0x2E,0x21,
-0x1D,0x5C,0x44,0xDF,0x21,0x5D,0x7E,0x23,0x74,0xFE,0x5E,0x7E,0xB4,0x4A,0xB7,0xA6,
-0xAD,0x1F,0xAE,0xE0,0x06,0x16,0xE2,0x9B,0x5B,0xD9,0x67,0x74,0x6B,0x5D,0x80,0x8F,
-0x29,0x9D,0x86,0x1B,0xD9,0x9C,0x0D,0x98,0x6D,0x76,0x10,0x28,0x58,0xE4,0x65,0xB0,
-0x7F,0x4A,0x98,0x79,0x9F,0xE0,0xC3,0x31,0x7E,0x80,0x2B,0xB5,0x8C,0xC0,0x40,0x3B,
-0x11,0x86,0xD0,0xCB,0xA2,0x86,0x36,0x60,0xA4,0xD5,0x30,0x82,0x6D,0xD9,0x6E,0xD0,
-0x0F,0x12,0x04,0x33,0x97,0x5F,0x4F,0x61,0x5A,0xF0,0xE4,0xF9,0x91,0xAB,0xE7,0x1D,
-0x3B,0xBC,0xE8,0xCF,0xF4,0x6B,0x2D,0x34,0x7C,0xE2,0x48,0x61,0x1C,0x8E,0xF3,0x61,
-0x44,0xCC,0x6F,0xA0,0x4A,0xA9,0x94,0xB0,0x4D,0xDA,0xE7,0xA9,0x34,0x7A,0x72,0x38,
-0xA8,0x41,0xCC,0x3C,0x94,0x11,0x7D,0xEB,0xC8,0xA6,0x8C,0xB7,0x86,0xCB,0xCA,0x33,
-0x3B,0xD9,0x3D,0x37,0x8B,0xFB,0x7A,0x3E,0x86,0x2C,0xE7,0x73,0xD7,0x0A,0x57,0xAC,
-0x64,0x9B,0x19,0xEB,0xF4,0x0F,0x04,0x08,0x8A,0xAC,0x03,0x17,0x19,0x64,0xF4,0x5A,
-0x25,0x22,0x8D,0x34,0x2C,0xB2,0xF6,0x68,0x1D,0x12,0x6D,0xD3,0x8A,0x1E,0x14,0xDA,
-0xC4,0x8F,0xA6,0xE2,0x23,0x85,0xD5,0x7A,0x0D,0xBD,0x6A,0xE0,0xE9,0xEC,0xEC,0x17,
-0xBB,0x42,0x1B,0x67,0xAA,0x25,0xED,0x45,0x83,0x21,0xFC,0xC1,0xC9,0x7C,0xD5,0x62,
-0x3E,0xFA,0xF2,0xC5,0x2D,0xD3,0xFD,0xD4,0x65,0x02,0x03,0x01,0x00,0x01,0xA3,0x81,
-0x9F,0x30,0x81,0x9C,0x30,0x13,0x06,0x09,0x2B,0x06,0x01,0x04,0x01,0x82,0x37,0x14,
-0x02,0x04,0x06,0x1E,0x04,0x00,0x43,0x00,0x41,0x30,0x0B,0x06,0x03,0x55,0x1D,0x0F,
-0x04,0x04,0x03,0x02,0x01,0x86,0x30,0x0F,0x06,0x03,0x55,0x1D,0x13,0x01,0x01,0xFF,
-0x04,0x05,0x30,0x03,0x01,0x01,0xFF,0x30,0x1D,0x06,0x03,0x55,0x1D,0x0E,0x04,0x16,
-0x04,0x14,0xC6,0x4F,0xA2,0x3D,0x06,0x63,0x84,0x09,0x9C,0xCE,0x62,0xE4,0x04,0xAC,
-0x8D,0x5C,0xB5,0xE9,0xB6,0x1B,0x30,0x36,0x06,0x03,0x55,0x1D,0x1F,0x04,0x2F,0x30,
-0x2D,0x30,0x2B,0xA0,0x29,0xA0,0x27,0x86,0x25,0x68,0x74,0x74,0x70,0x3A,0x2F,0x2F,
-0x63,0x72,0x6C,0x2E,0x78,0x72,0x61,0x6D,0x70,0x73,0x65,0x63,0x75,0x72,0x69,0x74,
-0x79,0x2E,0x63,0x6F,0x6D,0x2F,0x58,0x47,0x43,0x41,0x2E,0x63,0x72,0x6C,0x30,0x10,
-0x06,0x09,0x2B,0x06,0x01,0x04,0x01,0x82,0x37,0x15,0x01,0x04,0x03,0x02,0x01,0x01,
-0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x05,0x05,0x00,0x03,
-0x82,0x01,0x01,0x00,0x91,0x15,0x39,0x03,0x01,0x1B,0x67,0xFB,0x4A,0x1C,0xF9,0x0A,
-0x60,0x5B,0xA1,0xDA,0x4D,0x97,0x62,0xF9,0x24,0x53,0x27,0xD7,0x82,0x64,0x4E,0x90,
-0x2E,0xC3,0x49,0x1B,0x2B,0x9A,0xDC,0xFC,0xA8,0x78,0x67,0x35,0xF1,0x1D,0xF0,0x11,
-0xBD,0xB7,0x48,0xE3,0x10,0xF6,0x0D,0xDF,0x3F,0xD2,0xC9,0xB6,0xAA,0x55,0xA4,0x48,
-0xBA,0x02,0xDB,0xDE,0x59,0x2E,0x15,0x5B,0x3B,0x9D,0x16,0x7D,0x47,0xD7,0x37,0xEA,
-0x5F,0x4D,0x76,0x12,0x36,0xBB,0x1F,0xD7,0xA1,0x81,0x04,0x46,0x20,0xA3,0x2C,0x6D,
-0xA9,0x9E,0x01,0x7E,0x3F,0x29,0xCE,0x00,0x93,0xDF,0xFD,0xC9,0x92,0x73,0x89,0x89,
-0x64,0x9E,0xE7,0x2B,0xE4,0x1C,0x91,0x2C,0xD2,0xB9,0xCE,0x7D,0xCE,0x6F,0x31,0x99,
-0xD3,0xE6,0xBE,0xD2,0x1E,0x90,0xF0,0x09,0x14,0x79,0x5C,0x23,0xAB,0x4D,0xD2,0xDA,
-0x21,0x1F,0x4D,0x99,0x79,0x9D,0xE1,0xCF,0x27,0x9F,0x10,0x9B,0x1C,0x88,0x0D,0xB0,
-0x8A,0x64,0x41,0x31,0xB8,0x0E,0x6C,0x90,0x24,0xA4,0x9B,0x5C,0x71,0x8F,0xBA,0xBB,
-0x7E,0x1C,0x1B,0xDB,0x6A,0x80,0x0F,0x21,0xBC,0xE9,0xDB,0xA6,0xB7,0x40,0xF4,0xB2,
-0x8B,0xA9,0xB1,0xE4,0xEF,0x9A,0x1A,0xD0,0x3D,0x69,0x99,0xEE,0xA8,0x28,0xA3,0xE1,
-0x3C,0xB3,0xF0,0xB2,0x11,0x9C,0xCF,0x7C,0x40,0xE6,0xDD,0xE7,0x43,0x7D,0xA2,0xD8,
-0x3A,0xB5,0xA9,0x8D,0xF2,0x34,0x99,0xC4,0xD4,0x10,0xE1,0x06,0xFD,0x09,0x84,0x10,
-0x3B,0xEE,0xC4,0x4C,0xF4,0xEC,0x27,0x7C,0x42,0xC2,0x74,0x7C,0x82,0x8A,0x09,0xC9,
-0xB4,0x03,0x25,0xBC,
-};
-
-
const unsigned char* kSSLCertCertificateList[] = {
- AddTrust_External_Root_certificate,
+ GlobalSign_Root_CA_certificate,
+ USERTrust_RSA_Certification_Authority_certificate,
+ Starfield_Class_2_CA_certificate,
+ Verisign_Class_3_Public_Primary_Certification_Authority___G3_certificate,
+ USERTrust_ECC_Certification_Authority_certificate,
+ GeoTrust_Global_CA_certificate,
+ Starfield_Root_Certificate_Authority___G2_certificate,
+ DigiCert_Global_Root_G3_certificate,
+ thawte_Primary_Root_CA___G2_certificate,
+ VeriSign_Universal_Root_Certification_Authority_certificate,
+ VeriSign_Class_3_Public_Primary_Certification_Authority___G4_certificate,
+ DigiCert_Global_Root_G2_certificate,
AddTrust_Low_Value_Services_Root_certificate,
+ AffirmTrust_Premium_ECC_certificate,
+ Verisign_Class_4_Public_Primary_Certification_Authority___G3_certificate,
+ thawte_Primary_Root_CA_certificate,
AddTrust_Public_Services_Root_certificate,
AddTrust_Qualified_Certificates_Root_certificate,
- AffirmTrust_Commercial_certificate,
- AffirmTrust_Networking_certificate,
- AffirmTrust_Premium_certificate,
- AffirmTrust_Premium_ECC_certificate,
- America_Online_Root_Certification_Authority_1_certificate,
- America_Online_Root_Certification_Authority_2_certificate,
- Baltimore_CyberTrust_Root_certificate,
- Comodo_AAA_Services_root_certificate,
- COMODO_Certification_Authority_certificate,
- COMODO_ECC_Certification_Authority_certificate,
- Comodo_Secure_Services_root_certificate,
- Comodo_Trusted_Services_root_certificate,
- Cybertrust_Global_Root_certificate,
- DigiCert_Assured_ID_Root_CA_certificate,
- DigiCert_Global_Root_CA_certificate,
- DigiCert_High_Assurance_EV_Root_CA_certificate,
- Entrust_net_Premium_2048_Secure_Server_CA_certificate,
- Entrust_net_Secure_Server_CA_certificate,
- Entrust_Root_Certification_Authority_certificate,
- Equifax_Secure_CA_certificate,
- Equifax_Secure_eBusiness_CA_1_certificate,
- Equifax_Secure_eBusiness_CA_2_certificate,
- Equifax_Secure_Global_eBusiness_CA_certificate,
- GeoTrust_Global_CA_certificate,
- GeoTrust_Global_CA_2_certificate,
- GeoTrust_Primary_Certification_Authority_certificate,
- GeoTrust_Primary_Certification_Authority___G2_certificate,
GeoTrust_Primary_Certification_Authority___G3_certificate,
- GeoTrust_Universal_CA_certificate,
GeoTrust_Universal_CA_2_certificate,
- GlobalSign_Root_CA_certificate,
+ Baltimore_CyberTrust_Root_certificate,
GlobalSign_Root_CA___R2_certificate,
GlobalSign_Root_CA___R3_certificate,
+ AffirmTrust_Networking_certificate,
+ AddTrust_External_Root_certificate,
+ thawte_Primary_Root_CA___G3_certificate,
+ DigiCert_Assured_ID_Root_CA_certificate,
Go_Daddy_Class_2_CA_certificate,
+ GeoTrust_Primary_Certification_Authority_certificate,
+ VeriSign_Class_3_Public_Primary_Certification_Authority___G5_certificate,
+ Equifax_Secure_CA_certificate,
+ Entrust_net_Premium_2048_Secure_Server_CA_certificate,
+ DigiCert_Assured_ID_Root_G3_certificate,
+ COMODO_Certification_Authority_certificate,
+ DigiCert_Global_Root_CA_certificate,
+ Comodo_AAA_Services_root_certificate,
+ DigiCert_High_Assurance_EV_Root_CA_certificate,
+ GeoTrust_Universal_CA_certificate,
+ COMODO_ECC_Certification_Authority_certificate,
+ Entrust_Root_Certification_Authority___G2_certificate,
+ DigiCert_Assured_ID_Root_G2_certificate,
+ AffirmTrust_Commercial_certificate,
+ AffirmTrust_Premium_certificate,
Go_Daddy_Root_Certificate_Authority___G2_certificate,
- GTE_CyberTrust_Global_Root_certificate,
- Network_Solutions_Certificate_Authority_certificate,
- RSA_Root_Certificate_1_certificate,
- Starfield_Class_2_CA_certificate,
- Starfield_Root_Certificate_Authority___G2_certificate,
- Starfield_Services_Root_Certificate_Authority___G2_certificate,
- StartCom_Certification_Authority_certificate,
- StartCom_Certification_Authority_G2_certificate,
- TC_TrustCenter_Class_2_CA_II_certificate,
- TC_TrustCenter_Class_3_CA_II_certificate,
+ Comodo_Secure_Services_root_certificate,
+ DigiCert_Trusted_Root_G4_certificate,
+ GlobalSign_ECC_Root_CA___R5_certificate,
+ UTN_USERFirst_Hardware_Root_CA_certificate,
+ GlobalSign_ECC_Root_CA___R4_certificate,
TC_TrustCenter_Universal_CA_I_certificate,
- TC_TrustCenter_Universal_CA_III_certificate,
- Thawte_Premium_Server_CA_certificate,
- thawte_Primary_Root_CA_certificate,
- thawte_Primary_Root_CA___G2_certificate,
- thawte_Primary_Root_CA___G3_certificate,
- Thawte_Server_CA_certificate,
+ Comodo_Trusted_Services_root_certificate,
+ Entrust_Root_Certification_Authority_certificate,
+ TC_TrustCenter_Class_2_CA_II_certificate,
+ Cybertrust_Global_Root_certificate,
+ Entrust_Root_Certification_Authority___EC1_certificate,
+ GeoTrust_Primary_Certification_Authority___G2_certificate,
+ GeoTrust_Global_CA_2_certificate,
+ COMODO_RSA_Certification_Authority_certificate,
UTN_DATACorp_SGC_Root_CA_certificate,
- UTN_USERFirst_Hardware_Root_CA_certificate,
- ValiCert_Class_1_VA_certificate,
- ValiCert_Class_2_VA_certificate,
- Verisign_Class_3_Public_Primary_Certification_Authority_certificate,
- Verisign_Class_3_Public_Primary_Certification_Authority___G2_certificate,
- Verisign_Class_3_Public_Primary_Certification_Authority___G3_certificate,
- VeriSign_Class_3_Public_Primary_Certification_Authority___G4_certificate,
- VeriSign_Class_3_Public_Primary_Certification_Authority___G5_certificate,
- Verisign_Class_4_Public_Primary_Certification_Authority___G3_certificate,
- VeriSign_Universal_Root_Certification_Authority_certificate,
- XRamp_Global_CA_Root_certificate,
};
const size_t kSSLCertCertificateSizeList[] = {
- 1082,
+ 889,
+ 1506,
+ 1043,
+ 1054,
+ 659,
+ 856,
+ 993,
+ 579,
+ 652,
+ 1213,
+ 904,
+ 914,
1052,
+ 514,
+ 1054,
+ 1060,
1049,
1058,
- 848,
- 848,
- 1354,
- 514,
- 936,
- 1448,
- 891,
- 1078,
- 1057,
- 653,
- 1091,
- 1095,
- 933,
- 955,
- 947,
- 969,
- 1120,
- 1244,
- 1173,
- 804,
- 646,
- 804,
- 660,
- 856,
- 874,
- 896,
- 690,
1026,
- 1388,
1392,
- 889,
+ 891,
958,
867,
+ 848,
+ 1082,
+ 1070,
+ 955,
1028,
+ 896,
+ 1239,
+ 804,
+ 1120,
+ 586,
+ 1057,
+ 947,
+ 1078,
969,
- 606,
- 1002,
- 747,
- 1043,
+ 1388,
+ 653,
+ 1090,
+ 922,
+ 848,
+ 1354,
+ 969,
+ 1091,
+ 1428,
+ 546,
+ 1144,
+ 485,
993,
- 1011,
- 1931,
- 1383,
- 1198,
+ 1095,
+ 1173,
1198,
- 993,
- 997,
- 811,
- 1060,
- 652,
- 1070,
- 791,
+ 933,
+ 765,
+ 690,
+ 874,
+ 1500,
1122,
- 1144,
- 747,
- 747,
- 576,
- 774,
- 1054,
- 904,
- 1239,
- 1054,
- 1213,
- 1076,
};
-} // namspace rtc
diff --git a/webrtc/base/sslstreamadapter.cc b/webrtc/base/sslstreamadapter.cc
index b9ba9d3f2a..a2cff3e448 100644
--- a/webrtc/base/sslstreamadapter.cc
+++ b/webrtc/base/sslstreamadapter.cc
@@ -30,12 +30,20 @@ namespace rtc {
const char CS_AES_CM_128_HMAC_SHA1_80[] = "AES_CM_128_HMAC_SHA1_80";
const char CS_AES_CM_128_HMAC_SHA1_32[] = "AES_CM_128_HMAC_SHA1_32";
-int GetSrtpCryptoSuiteFromName(const std::string& cipher) {
- if (cipher == CS_AES_CM_128_HMAC_SHA1_32)
+std::string SrtpCryptoSuiteToName(int crypto_suite) {
+ if (crypto_suite == SRTP_AES128_CM_SHA1_32)
+ return CS_AES_CM_128_HMAC_SHA1_32;
+ if (crypto_suite == SRTP_AES128_CM_SHA1_80)
+ return CS_AES_CM_128_HMAC_SHA1_80;
+ return std::string();
+}
+
+int SrtpCryptoSuiteFromName(const std::string& crypto_suite) {
+ if (crypto_suite == CS_AES_CM_128_HMAC_SHA1_32)
return SRTP_AES128_CM_SHA1_32;
- if (cipher == CS_AES_CM_128_HMAC_SHA1_80)
+ if (crypto_suite == CS_AES_CM_128_HMAC_SHA1_80)
return SRTP_AES128_CM_SHA1_80;
- return 0;
+ return SRTP_INVALID_CRYPTO_SUITE;
}
SSLStreamAdapter* SSLStreamAdapter::Create(StreamInterface* stream) {
@@ -46,7 +54,7 @@ SSLStreamAdapter* SSLStreamAdapter::Create(StreamInterface* stream) {
#endif // SSL_USE_OPENSSL
}
-bool SSLStreamAdapter::GetSslCipherSuite(int* cipher) {
+bool SSLStreamAdapter::GetSslCipherSuite(int* cipher_suite) {
return false;
}
@@ -59,12 +67,12 @@ bool SSLStreamAdapter::ExportKeyingMaterial(const std::string& label,
return false; // Default is unsupported
}
-bool SSLStreamAdapter::SetDtlsSrtpCiphers(
- const std::vector<std::string>& ciphers) {
+bool SSLStreamAdapter::SetDtlsSrtpCryptoSuites(
+ const std::vector<int>& crypto_suites) {
return false;
}
-bool SSLStreamAdapter::GetDtlsSrtpCipher(std::string* cipher) {
+bool SSLStreamAdapter::GetDtlsSrtpCryptoSuite(int* crypto_suite) {
return false;
}
@@ -83,8 +91,8 @@ int SSLStreamAdapter::GetDefaultSslCipherForTest(SSLProtocolVersion version,
return OpenSSLStreamAdapter::GetDefaultSslCipherForTest(version, key_type);
}
-std::string SSLStreamAdapter::GetSslCipherSuiteName(int cipher) {
- return OpenSSLStreamAdapter::GetSslCipherSuiteName(cipher);
+std::string SSLStreamAdapter::SslCipherSuiteToName(int cipher_suite) {
+ return OpenSSLStreamAdapter::SslCipherSuiteToName(cipher_suite);
}
#endif // SSL_USE_OPENSSL
diff --git a/webrtc/base/sslstreamadapter.h b/webrtc/base/sslstreamadapter.h
index 65a7729d16..c57056b14a 100644
--- a/webrtc/base/sslstreamadapter.h
+++ b/webrtc/base/sslstreamadapter.h
@@ -19,7 +19,11 @@
namespace rtc {
+// Constants for SSL profile.
+const int TLS_NULL_WITH_NULL_NULL = 0;
+
// Constants for SRTP profiles.
+const int SRTP_INVALID_CRYPTO_SUITE = 0;
const int SRTP_AES128_CM_SHA1_80 = 0x0001;
const int SRTP_AES128_CM_SHA1_32 = 0x0002;
@@ -31,10 +35,13 @@ extern const char CS_AES_CM_128_HMAC_SHA1_80[];
// 128-bit AES with 32-bit SHA-1 HMAC.
extern const char CS_AES_CM_128_HMAC_SHA1_32[];
-// Returns the DTLS-SRTP protection profile ID, as defined in
-// https://tools.ietf.org/html/rfc5764#section-4.1.2, for the given SRTP
-// Crypto-suite, as defined in https://tools.ietf.org/html/rfc4568#section-6.2
-int GetSrtpCryptoSuiteFromName(const std::string& cipher_rfc_name);
+// Given the DTLS-SRTP protection profile ID, as defined in
+// https://tools.ietf.org/html/rfc4568#section-6.2 , return the SRTP profile
+// name, as defined in https://tools.ietf.org/html/rfc5764#section-4.1.2.
+std::string SrtpCryptoSuiteToName(int crypto_suite);
+
+// The reverse of above conversion.
+int SrtpCryptoSuiteFromName(const std::string& crypto_suite);
// SSLStreamAdapter : A StreamInterfaceAdapter that does SSL/TLS.
// After SSL has been started, the stream will only open on successful
@@ -152,7 +159,7 @@ class SSLStreamAdapter : public StreamAdapterInterface {
// Retrieves the IANA registration id of the cipher suite used for the
// connection (e.g. 0x2F for "TLS_RSA_WITH_AES_128_CBC_SHA").
- virtual bool GetSslCipherSuite(int* cipher);
+ virtual bool GetSslCipherSuite(int* cipher_suite);
// Key Exporter interface from RFC 5705
// Arguments are:
@@ -174,8 +181,8 @@ class SSLStreamAdapter : public StreamAdapterInterface {
size_t result_len);
// DTLS-SRTP interface
- virtual bool SetDtlsSrtpCiphers(const std::vector<std::string>& ciphers);
- virtual bool GetDtlsSrtpCipher(std::string* cipher);
+ virtual bool SetDtlsSrtpCryptoSuites(const std::vector<int>& crypto_suites);
+ virtual bool GetDtlsSrtpCryptoSuite(int* crypto_suite);
// Capabilities testing
static bool HaveDtls();
@@ -191,7 +198,7 @@ class SSLStreamAdapter : public StreamAdapterInterface {
// TODO(guoweis): Move this away from a static class method. Currently this is
// introduced such that any caller could depend on sslstreamadapter.h without
// depending on specific SSL implementation.
- static std::string GetSslCipherSuiteName(int cipher);
+ static std::string SslCipherSuiteToName(int cipher_suite);
private:
// If true, the server certificate need not match the configured
diff --git a/webrtc/base/sslstreamadapter_unittest.cc b/webrtc/base/sslstreamadapter_unittest.cc
index a3e8d9c637..1ed06c3154 100644
--- a/webrtc/base/sslstreamadapter_unittest.cc
+++ b/webrtc/base/sslstreamadapter_unittest.cc
@@ -13,6 +13,7 @@
#include <set>
#include <string>
+#include "webrtc/base/bufferqueue.h"
#include "webrtc/base/gunit.h"
#include "webrtc/base/helpers.h"
#include "webrtc/base/scoped_ptr.h"
@@ -21,7 +22,6 @@
#include "webrtc/base/sslidentity.h"
#include "webrtc/base/sslstreamadapter.h"
#include "webrtc/base/stream.h"
-#include "webrtc/test/testsupport/gtest_disable.h"
using ::testing::WithParamInterface;
using ::testing::Values;
@@ -29,8 +29,6 @@ using ::testing::Combine;
using ::testing::tuple;
static const int kBlockSize = 4096;
-static const char kAES_CM_HMAC_SHA1_80[] = "AES_CM_128_HMAC_SHA1_80";
-static const char kAES_CM_HMAC_SHA1_32[] = "AES_CM_128_HMAC_SHA1_32";
static const char kExporterLabel[] = "label";
static const unsigned char kExporterContext[] = "context";
static int kExporterContextLen = sizeof(kExporterContext);
@@ -74,26 +72,26 @@ static const char kCERT_PEM[] =
class SSLStreamAdapterTestBase;
-class SSLDummyStream : public rtc::StreamInterface,
- public sigslot::has_slots<> {
+class SSLDummyStreamBase : public rtc::StreamInterface,
+ public sigslot::has_slots<> {
public:
- explicit SSLDummyStream(SSLStreamAdapterTestBase *test,
- const std::string &side,
- rtc::FifoBuffer *in,
- rtc::FifoBuffer *out) :
- test_(test),
+ SSLDummyStreamBase(SSLStreamAdapterTestBase* test,
+ const std::string &side,
+ rtc::StreamInterface* in,
+ rtc::StreamInterface* out) :
+ test_base_(test),
side_(side),
in_(in),
out_(out),
first_packet_(true) {
- in_->SignalEvent.connect(this, &SSLDummyStream::OnEventIn);
- out_->SignalEvent.connect(this, &SSLDummyStream::OnEventOut);
+ in_->SignalEvent.connect(this, &SSLDummyStreamBase::OnEventIn);
+ out_->SignalEvent.connect(this, &SSLDummyStreamBase::OnEventOut);
}
- virtual rtc::StreamState GetState() const { return rtc::SS_OPEN; }
+ rtc::StreamState GetState() const override { return rtc::SS_OPEN; }
- virtual rtc::StreamResult Read(void* buffer, size_t buffer_len,
- size_t* read, int* error) {
+ rtc::StreamResult Read(void* buffer, size_t buffer_len,
+ size_t* read, int* error) override {
rtc::StreamResult r;
r = in_->Read(buffer, buffer_len, read, error);
@@ -111,22 +109,20 @@ class SSLDummyStream : public rtc::StreamInterface,
}
// Catch readability events on in and pass them up.
- virtual void OnEventIn(rtc::StreamInterface *stream, int sig,
- int err) {
+ void OnEventIn(rtc::StreamInterface* stream, int sig, int err) {
int mask = (rtc::SE_READ | rtc::SE_CLOSE);
if (sig & mask) {
- LOG(LS_INFO) << "SSLDummyStream::OnEvent side=" << side_ << " sig="
+ LOG(LS_INFO) << "SSLDummyStreamBase::OnEvent side=" << side_ << " sig="
<< sig << " forwarding upward";
PostEvent(sig & mask, 0);
}
}
// Catch writeability events on out and pass them up.
- virtual void OnEventOut(rtc::StreamInterface *stream, int sig,
- int err) {
+ void OnEventOut(rtc::StreamInterface* stream, int sig, int err) {
if (sig & rtc::SE_WRITE) {
- LOG(LS_INFO) << "SSLDummyStream::OnEvent side=" << side_ << " sig="
+ LOG(LS_INFO) << "SSLDummyStreamBase::OnEvent side=" << side_ << " sig="
<< sig << " forwarding upward";
PostEvent(sig & rtc::SE_WRITE, 0);
@@ -135,28 +131,92 @@ class SSLDummyStream : public rtc::StreamInterface,
// Write to the outgoing FifoBuffer
rtc::StreamResult WriteData(const void* data, size_t data_len,
- size_t* written, int* error) {
+ size_t* written, int* error) {
return out_->Write(data, data_len, written, error);
}
- // Defined later
- virtual rtc::StreamResult Write(const void* data, size_t data_len,
- size_t* written, int* error);
+ rtc::StreamResult Write(const void* data, size_t data_len,
+ size_t* written, int* error) override;
- virtual void Close() {
+ void Close() override {
LOG(LS_INFO) << "Closing outbound stream";
out_->Close();
}
- private:
- SSLStreamAdapterTestBase *test_;
+ protected:
+ SSLStreamAdapterTestBase* test_base_;
const std::string side_;
- rtc::FifoBuffer *in_;
- rtc::FifoBuffer *out_;
+ rtc::StreamInterface* in_;
+ rtc::StreamInterface* out_;
bool first_packet_;
};
+class SSLDummyStreamTLS : public SSLDummyStreamBase {
+ public:
+ SSLDummyStreamTLS(SSLStreamAdapterTestBase* test,
+ const std::string& side,
+ rtc::FifoBuffer* in,
+ rtc::FifoBuffer* out) :
+ SSLDummyStreamBase(test, side, in, out) {
+ }
+};
+
+class BufferQueueStream : public rtc::BufferQueue,
+ public rtc::StreamInterface {
+ public:
+ BufferQueueStream(size_t capacity, size_t default_size)
+ : rtc::BufferQueue(capacity, default_size) {
+ }
+
+ // Implementation of abstract StreamInterface methods.
+
+ // A buffer queue stream is always "open".
+ rtc::StreamState GetState() const override { return rtc::SS_OPEN; }
+
+ // Reading a buffer queue stream will either succeed or block.
+ rtc::StreamResult Read(void* buffer, size_t buffer_len,
+ size_t* read, int* error) override {
+ if (!ReadFront(buffer, buffer_len, read)) {
+ return rtc::SR_BLOCK;
+ }
+ return rtc::SR_SUCCESS;
+ }
+
+ // Writing to a buffer queue stream will either succeed or block.
+ rtc::StreamResult Write(const void* data, size_t data_len,
+ size_t* written, int* error) override {
+ if (!WriteBack(data, data_len, written)) {
+ return rtc::SR_BLOCK;
+ }
+ return rtc::SR_SUCCESS;
+ }
+
+ // A buffer queue stream can not be closed.
+ void Close() override {}
+
+ protected:
+ void NotifyReadableForTest() override {
+ PostEvent(rtc::SE_READ, 0);
+ }
+
+ void NotifyWritableForTest() override {
+ PostEvent(rtc::SE_WRITE, 0);
+ }
+};
+
+class SSLDummyStreamDTLS : public SSLDummyStreamBase {
+ public:
+ SSLDummyStreamDTLS(SSLStreamAdapterTestBase* test,
+ const std::string& side,
+ BufferQueueStream* in,
+ BufferQueueStream* out) :
+ SSLDummyStreamBase(test, side, in, out) {
+ }
+};
+
static const int kFifoBufferSize = 4096;
+static const int kBufferCapacity = 1;
+static const size_t kDefaultBufferSize = 2048;
class SSLStreamAdapterTestBase : public testing::Test,
public sigslot::has_slots<> {
@@ -167,14 +227,12 @@ class SSLStreamAdapterTestBase : public testing::Test,
bool dtls,
rtc::KeyParams client_key_type = rtc::KeyParams(rtc::KT_DEFAULT),
rtc::KeyParams server_key_type = rtc::KeyParams(rtc::KT_DEFAULT))
- : client_buffer_(kFifoBufferSize),
- server_buffer_(kFifoBufferSize),
- client_stream_(
- new SSLDummyStream(this, "c2s", &client_buffer_, &server_buffer_)),
- server_stream_(
- new SSLDummyStream(this, "s2c", &server_buffer_, &client_buffer_)),
- client_ssl_(rtc::SSLStreamAdapter::Create(client_stream_)),
- server_ssl_(rtc::SSLStreamAdapter::Create(server_stream_)),
+ : client_cert_pem_(client_cert_pem),
+ client_private_key_pem_(client_private_key_pem),
+ client_key_type_(client_key_type),
+ server_key_type_(server_key_type),
+ client_stream_(NULL),
+ server_stream_(NULL),
client_identity_(NULL),
server_identity_(NULL),
delay_(0),
@@ -187,36 +245,47 @@ class SSLStreamAdapterTestBase : public testing::Test,
identities_set_(false) {
// Set use of the test RNG to get predictable loss patterns.
rtc::SetRandomTestMode(true);
+ }
+
+ ~SSLStreamAdapterTestBase() {
+ // Put it back for the next test.
+ rtc::SetRandomTestMode(false);
+ }
+
+ void SetUp() override {
+ CreateStreams();
+
+ client_ssl_.reset(rtc::SSLStreamAdapter::Create(client_stream_));
+ server_ssl_.reset(rtc::SSLStreamAdapter::Create(server_stream_));
// Set up the slots
client_ssl_->SignalEvent.connect(this, &SSLStreamAdapterTestBase::OnEvent);
server_ssl_->SignalEvent.connect(this, &SSLStreamAdapterTestBase::OnEvent);
- if (!client_cert_pem.empty() && !client_private_key_pem.empty()) {
+ if (!client_cert_pem_.empty() && !client_private_key_pem_.empty()) {
client_identity_ = rtc::SSLIdentity::FromPEMStrings(
- client_private_key_pem, client_cert_pem);
+ client_private_key_pem_, client_cert_pem_);
} else {
- client_identity_ = rtc::SSLIdentity::Generate("client", client_key_type);
+ client_identity_ = rtc::SSLIdentity::Generate("client", client_key_type_);
}
- server_identity_ = rtc::SSLIdentity::Generate("server", server_key_type);
+ server_identity_ = rtc::SSLIdentity::Generate("server", server_key_type_);
client_ssl_->SetIdentity(client_identity_);
server_ssl_->SetIdentity(server_identity_);
}
- ~SSLStreamAdapterTestBase() {
- // Put it back for the next test.
- rtc::SetRandomTestMode(false);
+ void TearDown() override {
+ client_ssl_.reset(nullptr);
+ server_ssl_.reset(nullptr);
}
+ virtual void CreateStreams() = 0;
+
// Recreate the client/server identities with the specified validity period.
// |not_before| and |not_after| are offsets from the current time in number
// of seconds.
void ResetIdentitiesWithValidity(int not_before, int not_after) {
- client_stream_ =
- new SSLDummyStream(this, "c2s", &client_buffer_, &server_buffer_);
- server_stream_ =
- new SSLDummyStream(this, "s2c", &server_buffer_, &client_buffer_);
+ CreateStreams();
client_ssl_.reset(rtc::SSLStreamAdapter::Create(client_stream_));
server_ssl_.reset(rtc::SSLStreamAdapter::Create(server_stream_));
@@ -224,18 +293,20 @@ class SSLStreamAdapterTestBase : public testing::Test,
client_ssl_->SignalEvent.connect(this, &SSLStreamAdapterTestBase::OnEvent);
server_ssl_->SignalEvent.connect(this, &SSLStreamAdapterTestBase::OnEvent);
+ time_t now = time(nullptr);
+
rtc::SSLIdentityParams client_params;
client_params.key_params = rtc::KeyParams(rtc::KT_DEFAULT);
client_params.common_name = "client";
- client_params.not_before = not_before;
- client_params.not_after = not_after;
+ client_params.not_before = now + not_before;
+ client_params.not_after = now + not_after;
client_identity_ = rtc::SSLIdentity::GenerateForTest(client_params);
rtc::SSLIdentityParams server_params;
server_params.key_params = rtc::KeyParams(rtc::KT_DEFAULT);
server_params.common_name = "server";
- server_params.not_before = not_before;
- server_params.not_after = not_after;
+ server_params.not_before = now + not_before;
+ server_params.not_after = now + not_after;
server_identity_ = rtc::SSLIdentity::GenerateForTest(server_params);
client_ssl_->SetIdentity(client_identity_);
@@ -331,9 +402,9 @@ class SSLStreamAdapterTestBase : public testing::Test,
}
}
- rtc::StreamResult DataWritten(SSLDummyStream *from, const void *data,
- size_t data_len, size_t *written,
- int *error) {
+ rtc::StreamResult DataWritten(SSLDummyStreamBase *from, const void *data,
+ size_t data_len, size_t *written,
+ int *error) {
// Randomly drop loss_ percent of packets
if (rtc::CreateRandomId() % 100 < static_cast<uint32_t>(loss_)) {
LOG(LS_INFO) << "Randomly dropping packet, size=" << data_len;
@@ -389,19 +460,18 @@ class SSLStreamAdapterTestBase : public testing::Test,
handshake_wait_ = wait;
}
- void SetDtlsSrtpCiphers(const std::vector<std::string> &ciphers,
- bool client) {
+ void SetDtlsSrtpCryptoSuites(const std::vector<int>& ciphers, bool client) {
if (client)
- client_ssl_->SetDtlsSrtpCiphers(ciphers);
+ client_ssl_->SetDtlsSrtpCryptoSuites(ciphers);
else
- server_ssl_->SetDtlsSrtpCiphers(ciphers);
+ server_ssl_->SetDtlsSrtpCryptoSuites(ciphers);
}
- bool GetDtlsSrtpCipher(bool client, std::string *retval) {
+ bool GetDtlsSrtpCryptoSuite(bool client, int* retval) {
if (client)
- return client_ssl_->GetDtlsSrtpCipher(retval);
+ return client_ssl_->GetDtlsSrtpCryptoSuite(retval);
else
- return server_ssl_->GetDtlsSrtpCipher(retval);
+ return server_ssl_->GetDtlsSrtpCryptoSuite(retval);
}
bool GetPeerCertificate(bool client, rtc::SSLCertificate** cert) {
@@ -443,10 +513,12 @@ class SSLStreamAdapterTestBase : public testing::Test,
virtual void TestTransfer(int size) = 0;
protected:
- rtc::FifoBuffer client_buffer_;
- rtc::FifoBuffer server_buffer_;
- SSLDummyStream *client_stream_; // freed by client_ssl_ destructor
- SSLDummyStream *server_stream_; // freed by server_ssl_ destructor
+ std::string client_cert_pem_;
+ std::string client_private_key_pem_;
+ rtc::KeyParams client_key_type_;
+ rtc::KeyParams server_key_type_;
+ SSLDummyStreamBase *client_stream_; // freed by client_ssl_ destructor
+ SSLDummyStreamBase *server_stream_; // freed by server_ssl_ destructor
rtc::scoped_ptr<rtc::SSLStreamAdapter> client_ssl_;
rtc::scoped_ptr<rtc::SSLStreamAdapter> server_ssl_;
rtc::SSLIdentity *client_identity_; // freed by client_ssl_ destructor
@@ -470,7 +542,17 @@ class SSLStreamAdapterTestTLS
"",
false,
::testing::get<0>(GetParam()),
- ::testing::get<1>(GetParam())){};
+ ::testing::get<1>(GetParam())),
+ client_buffer_(kFifoBufferSize),
+ server_buffer_(kFifoBufferSize) {
+ }
+
+ void CreateStreams() override {
+ client_stream_ =
+ new SSLDummyStreamTLS(this, "c2s", &client_buffer_, &server_buffer_);
+ server_stream_ =
+ new SSLDummyStreamTLS(this, "s2c", &server_buffer_, &client_buffer_);
+ }
// Test data transfer for TLS
virtual void TestTransfer(int size) {
@@ -549,7 +631,7 @@ class SSLStreamAdapterTestTLS
if (r == rtc::SR_ERROR || r == rtc::SR_EOS) {
// Unfortunately, errors are the way that the stream adapter
- // signals close in OpenSSL
+ // signals close in OpenSSL.
stream->Close();
return;
}
@@ -565,6 +647,8 @@ class SSLStreamAdapterTestTLS
}
private:
+ rtc::FifoBuffer client_buffer_;
+ rtc::FifoBuffer server_buffer_;
rtc::MemoryStream send_stream_;
rtc::MemoryStream recv_stream_;
};
@@ -579,6 +663,8 @@ class SSLStreamAdapterTestDTLS
true,
::testing::get<0>(GetParam()),
::testing::get<1>(GetParam())),
+ client_buffer_(kBufferCapacity, kDefaultBufferSize),
+ server_buffer_(kBufferCapacity, kDefaultBufferSize),
packet_size_(1000),
count_(0),
sent_(0) {}
@@ -586,18 +672,32 @@ class SSLStreamAdapterTestDTLS
SSLStreamAdapterTestDTLS(const std::string& cert_pem,
const std::string& private_key_pem) :
SSLStreamAdapterTestBase(cert_pem, private_key_pem, true),
+ client_buffer_(kBufferCapacity, kDefaultBufferSize),
+ server_buffer_(kBufferCapacity, kDefaultBufferSize),
packet_size_(1000), count_(0), sent_(0) {
}
+ void CreateStreams() override {
+ client_stream_ =
+ new SSLDummyStreamDTLS(this, "c2s", &client_buffer_, &server_buffer_);
+ server_stream_ =
+ new SSLDummyStreamDTLS(this, "s2c", &server_buffer_, &client_buffer_);
+ }
+
virtual void WriteData() {
unsigned char *packet = new unsigned char[1600];
- do {
- memset(packet, sent_ & 0xff, packet_size_);
- *(reinterpret_cast<uint32_t *>(packet)) = sent_;
+ while (sent_ < count_) {
+ unsigned int rand_state = sent_;
+ packet[0] = sent_;
+ for (size_t i = 1; i < packet_size_; i++) {
+ // This is a simple LC PRNG. Keep in synch with identical code below.
+ rand_state = (rand_state * 251 + 19937) >> 7;
+ packet[i] = rand_state & 0xff;
+ }
size_t sent;
- int rv = client_ssl_->Write(packet, packet_size_, &sent, 0);
+ rtc::StreamResult rv = client_ssl_->Write(packet, packet_size_, &sent, 0);
if (rv == rtc::SR_SUCCESS) {
LOG(LS_VERBOSE) << "Sent: " << sent_;
sent_++;
@@ -608,7 +708,7 @@ class SSLStreamAdapterTestDTLS
ADD_FAILURE();
break;
}
- } while (sent_ < count_);
+ }
delete [] packet;
}
@@ -637,11 +737,13 @@ class SSLStreamAdapterTestDTLS
// Now parse the datagram
ASSERT_EQ(packet_size_, bread);
- unsigned char* ptr_to_buffer = buffer;
- uint32_t packet_num = *(reinterpret_cast<uint32_t *>(ptr_to_buffer));
+ unsigned char packet_num = buffer[0];
- for (size_t i = 4; i < packet_size_; i++) {
- ASSERT_EQ((packet_num & 0xff), buffer[i]);
+ unsigned int rand_state = packet_num;
+ for (size_t i = 1; i < packet_size_; i++) {
+ // This is a simple LC PRNG. Keep in synch with identical code above.
+ rand_state = (rand_state * 251 + 19937) >> 7;
+ ASSERT_EQ(rand_state & 0xff, buffer[i]);
}
received_.insert(packet_num);
}
@@ -667,6 +769,8 @@ class SSLStreamAdapterTestDTLS
};
private:
+ BufferQueueStream client_buffer_;
+ BufferQueueStream server_buffer_;
size_t packet_size_;
int count_;
int sent_;
@@ -674,23 +778,20 @@ class SSLStreamAdapterTestDTLS
};
-rtc::StreamResult SSLDummyStream::Write(const void* data, size_t data_len,
+rtc::StreamResult SSLDummyStreamBase::Write(const void* data, size_t data_len,
size_t* written, int* error) {
- *written = data_len;
-
LOG(LS_INFO) << "Writing to loopback " << data_len;
if (first_packet_) {
first_packet_ = false;
- if (test_->GetLoseFirstPacket()) {
+ if (test_base_->GetLoseFirstPacket()) {
LOG(LS_INFO) << "Losing initial packet of length " << data_len;
+ *written = data_len; // Fake successful writing also to writer.
return rtc::SR_SUCCESS;
}
}
- return test_->DataWritten(this, data, data_len, written, error);
-
- return rtc::SR_SUCCESS;
+ return test_base_->DataWritten(this, data, data_len, written, error);
};
class SSLStreamAdapterTestDTLSFromPEMStrings : public SSLStreamAdapterTestDTLS {
@@ -782,23 +883,20 @@ TEST_P(SSLStreamAdapterTestDTLS, DISABLED_TestDTLSConnectWithSmallMtu) {
};
// Test transfer -- trivial
-// Disabled due to https://code.google.com/p/webrtc/issues/detail?id=5005
-TEST_P(SSLStreamAdapterTestDTLS, DISABLED_TestDTLSTransfer) {
+TEST_P(SSLStreamAdapterTestDTLS, TestDTLSTransfer) {
MAYBE_SKIP_TEST(HaveDtls);
TestHandshake();
TestTransfer(100);
};
-// Disabled due to https://code.google.com/p/webrtc/issues/detail?id=5005
-TEST_P(SSLStreamAdapterTestDTLS, DISABLED_TestDTLSTransferWithLoss) {
+TEST_P(SSLStreamAdapterTestDTLS, TestDTLSTransferWithLoss) {
MAYBE_SKIP_TEST(HaveDtls);
TestHandshake();
SetLoss(10);
TestTransfer(100);
};
-// Disabled due to https://code.google.com/p/webrtc/issues/detail?id=5005
-TEST_P(SSLStreamAdapterTestDTLS, DISABLED_TestDTLSTransferWithDamage) {
+TEST_P(SSLStreamAdapterTestDTLS, TestDTLSTransferWithDamage) {
MAYBE_SKIP_TEST(HaveDtls);
SetDamage(); // Must be called first because first packet
// write happens at end of handshake.
@@ -809,74 +907,74 @@ TEST_P(SSLStreamAdapterTestDTLS, DISABLED_TestDTLSTransferWithDamage) {
// Test DTLS-SRTP with all high ciphers
TEST_P(SSLStreamAdapterTestDTLS, TestDTLSSrtpHigh) {
MAYBE_SKIP_TEST(HaveDtlsSrtp);
- std::vector<std::string> high;
- high.push_back(kAES_CM_HMAC_SHA1_80);
- SetDtlsSrtpCiphers(high, true);
- SetDtlsSrtpCiphers(high, false);
+ std::vector<int> high;
+ high.push_back(rtc::SRTP_AES128_CM_SHA1_80);
+ SetDtlsSrtpCryptoSuites(high, true);
+ SetDtlsSrtpCryptoSuites(high, false);
TestHandshake();
- std::string client_cipher;
- ASSERT_TRUE(GetDtlsSrtpCipher(true, &client_cipher));
- std::string server_cipher;
- ASSERT_TRUE(GetDtlsSrtpCipher(false, &server_cipher));
+ int client_cipher;
+ ASSERT_TRUE(GetDtlsSrtpCryptoSuite(true, &client_cipher));
+ int server_cipher;
+ ASSERT_TRUE(GetDtlsSrtpCryptoSuite(false, &server_cipher));
ASSERT_EQ(client_cipher, server_cipher);
- ASSERT_EQ(client_cipher, kAES_CM_HMAC_SHA1_80);
+ ASSERT_EQ(client_cipher, rtc::SRTP_AES128_CM_SHA1_80);
};
// Test DTLS-SRTP with all low ciphers
TEST_P(SSLStreamAdapterTestDTLS, TestDTLSSrtpLow) {
MAYBE_SKIP_TEST(HaveDtlsSrtp);
- std::vector<std::string> low;
- low.push_back(kAES_CM_HMAC_SHA1_32);
- SetDtlsSrtpCiphers(low, true);
- SetDtlsSrtpCiphers(low, false);
+ std::vector<int> low;
+ low.push_back(rtc::SRTP_AES128_CM_SHA1_32);
+ SetDtlsSrtpCryptoSuites(low, true);
+ SetDtlsSrtpCryptoSuites(low, false);
TestHandshake();
- std::string client_cipher;
- ASSERT_TRUE(GetDtlsSrtpCipher(true, &client_cipher));
- std::string server_cipher;
- ASSERT_TRUE(GetDtlsSrtpCipher(false, &server_cipher));
+ int client_cipher;
+ ASSERT_TRUE(GetDtlsSrtpCryptoSuite(true, &client_cipher));
+ int server_cipher;
+ ASSERT_TRUE(GetDtlsSrtpCryptoSuite(false, &server_cipher));
ASSERT_EQ(client_cipher, server_cipher);
- ASSERT_EQ(client_cipher, kAES_CM_HMAC_SHA1_32);
+ ASSERT_EQ(client_cipher, rtc::SRTP_AES128_CM_SHA1_32);
};
// Test DTLS-SRTP with a mismatch -- should not converge
TEST_P(SSLStreamAdapterTestDTLS, TestDTLSSrtpHighLow) {
MAYBE_SKIP_TEST(HaveDtlsSrtp);
- std::vector<std::string> high;
- high.push_back(kAES_CM_HMAC_SHA1_80);
- std::vector<std::string> low;
- low.push_back(kAES_CM_HMAC_SHA1_32);
- SetDtlsSrtpCiphers(high, true);
- SetDtlsSrtpCiphers(low, false);
+ std::vector<int> high;
+ high.push_back(rtc::SRTP_AES128_CM_SHA1_80);
+ std::vector<int> low;
+ low.push_back(rtc::SRTP_AES128_CM_SHA1_32);
+ SetDtlsSrtpCryptoSuites(high, true);
+ SetDtlsSrtpCryptoSuites(low, false);
TestHandshake();
- std::string client_cipher;
- ASSERT_FALSE(GetDtlsSrtpCipher(true, &client_cipher));
- std::string server_cipher;
- ASSERT_FALSE(GetDtlsSrtpCipher(false, &server_cipher));
+ int client_cipher;
+ ASSERT_FALSE(GetDtlsSrtpCryptoSuite(true, &client_cipher));
+ int server_cipher;
+ ASSERT_FALSE(GetDtlsSrtpCryptoSuite(false, &server_cipher));
};
// Test DTLS-SRTP with each side being mixed -- should select high
TEST_P(SSLStreamAdapterTestDTLS, TestDTLSSrtpMixed) {
MAYBE_SKIP_TEST(HaveDtlsSrtp);
- std::vector<std::string> mixed;
- mixed.push_back(kAES_CM_HMAC_SHA1_80);
- mixed.push_back(kAES_CM_HMAC_SHA1_32);
- SetDtlsSrtpCiphers(mixed, true);
- SetDtlsSrtpCiphers(mixed, false);
+ std::vector<int> mixed;
+ mixed.push_back(rtc::SRTP_AES128_CM_SHA1_80);
+ mixed.push_back(rtc::SRTP_AES128_CM_SHA1_32);
+ SetDtlsSrtpCryptoSuites(mixed, true);
+ SetDtlsSrtpCryptoSuites(mixed, false);
TestHandshake();
- std::string client_cipher;
- ASSERT_TRUE(GetDtlsSrtpCipher(true, &client_cipher));
- std::string server_cipher;
- ASSERT_TRUE(GetDtlsSrtpCipher(false, &server_cipher));
+ int client_cipher;
+ ASSERT_TRUE(GetDtlsSrtpCryptoSuite(true, &client_cipher));
+ int server_cipher;
+ ASSERT_TRUE(GetDtlsSrtpCryptoSuite(false, &server_cipher));
ASSERT_EQ(client_cipher, server_cipher);
- ASSERT_EQ(client_cipher, kAES_CM_HMAC_SHA1_80);
+ ASSERT_EQ(client_cipher, rtc::SRTP_AES128_CM_SHA1_80);
};
// Test an exporter
@@ -921,14 +1019,14 @@ TEST_P(SSLStreamAdapterTestDTLS, TestCertExpired) {
}
// Test data transfer using certs created from strings.
-TEST_P(SSLStreamAdapterTestDTLSFromPEMStrings, TestTransfer) {
+TEST_F(SSLStreamAdapterTestDTLSFromPEMStrings, TestTransfer) {
MAYBE_SKIP_TEST(HaveDtls);
TestHandshake();
TestTransfer(100);
}
// Test getting the remote certificate.
-TEST_P(SSLStreamAdapterTestDTLSFromPEMStrings, TestDTLSGetPeerCertificate) {
+TEST_F(SSLStreamAdapterTestDTLSFromPEMStrings, TestDTLSGetPeerCertificate) {
MAYBE_SKIP_TEST(HaveDtls);
// Peer certificates haven't been received yet.
@@ -1052,6 +1150,10 @@ INSTANTIATE_TEST_CASE_P(
Values(rtc::KeyParams::RSA(1024, 65537),
rtc::KeyParams::RSA(1152, 65537),
rtc::KeyParams::ECDSA(rtc::EC_NIST_P256))));
+
+#if !defined(MEMORY_SANITIZER)
+// Fails under MemorySanitizer:
+// See https://code.google.com/p/webrtc/issues/detail?id=5381.
INSTANTIATE_TEST_CASE_P(
SSLStreamAdapterTestsDTLS,
SSLStreamAdapterTestDTLS,
@@ -1061,3 +1163,4 @@ INSTANTIATE_TEST_CASE_P(
Values(rtc::KeyParams::RSA(1024, 65537),
rtc::KeyParams::RSA(1152, 65537),
rtc::KeyParams::ECDSA(rtc::EC_NIST_P256))));
+#endif
diff --git a/webrtc/base/sslstreamadapterhelper.cc b/webrtc/base/sslstreamadapterhelper.cc
index c3be4ea684..61c0e43ff7 100644
--- a/webrtc/base/sslstreamadapterhelper.cc
+++ b/webrtc/base/sslstreamadapterhelper.cc
@@ -29,8 +29,7 @@ SSLStreamAdapterHelper::SSLStreamAdapterHelper(StreamInterface* stream)
role_(SSL_CLIENT),
ssl_error_code_(0), // Not meaningful yet
ssl_mode_(SSL_MODE_TLS),
- ssl_max_version_(SSL_PROTOCOL_TLS_11) {
-}
+ ssl_max_version_(SSL_PROTOCOL_TLS_12) {}
SSLStreamAdapterHelper::~SSLStreamAdapterHelper() = default;
diff --git a/webrtc/base/stream_unittest.cc b/webrtc/base/stream_unittest.cc
index 4172a9726c..8cfd052fe5 100644
--- a/webrtc/base/stream_unittest.cc
+++ b/webrtc/base/stream_unittest.cc
@@ -12,7 +12,6 @@
#include "webrtc/base/gunit.h"
#include "webrtc/base/pathutils.h"
#include "webrtc/base/stream.h"
-#include "webrtc/test/testsupport/gtest_disable.h"
namespace rtc {
diff --git a/webrtc/base/stringencode_unittest.cc b/webrtc/base/stringencode_unittest.cc
index 406d9c7d4a..588e9d8ff5 100644
--- a/webrtc/base/stringencode_unittest.cc
+++ b/webrtc/base/stringencode_unittest.cc
@@ -8,6 +8,7 @@
* be found in the AUTHORS file in the root of the source tree.
*/
+#include "webrtc/base/arraysize.h"
#include "webrtc/base/common.h"
#include "webrtc/base/gunit.h"
#include "webrtc/base/stringencode.h"
@@ -48,7 +49,7 @@ TEST(Utf8EncodeTest, EncodeDecode) {
}
char buffer[5];
- memset(buffer, 0x01, ARRAY_SIZE(buffer));
+ memset(buffer, 0x01, arraysize(buffer));
ASSERT_EQ(kTests[i].enclen, utf8_encode(buffer,
kTests[i].encsize,
kTests[i].decoded));
@@ -56,7 +57,7 @@ TEST(Utf8EncodeTest, EncodeDecode) {
// Make sure remainder of buffer is unchanged
ASSERT_TRUE(memory_check(buffer + kTests[i].enclen,
0x1,
- ARRAY_SIZE(buffer) - kTests[i].enclen));
+ arraysize(buffer) - kTests[i].enclen));
}
}
diff --git a/webrtc/base/stringutils.cc b/webrtc/base/stringutils.cc
index 868e475f2d..9580253d1b 100644
--- a/webrtc/base/stringutils.cc
+++ b/webrtc/base/stringutils.cc
@@ -77,11 +77,11 @@ size_t asccpyn(wchar_t* buffer, size_t buflen,
} else if (srclen >= buflen) {
srclen = buflen - 1;
}
-#if _DEBUG
+#if !defined(NDEBUG)
// Double check that characters are not UTF-8
for (size_t pos = 0; pos < srclen; ++pos)
RTC_DCHECK_LT(static_cast<unsigned char>(source[pos]), 128);
-#endif // _DEBUG
+#endif
std::copy(source, source + srclen, buffer);
buffer[srclen] = 0;
return srclen;
diff --git a/webrtc/base/systeminfo.cc b/webrtc/base/systeminfo.cc
index bfc96b3763..b400aa08a8 100644
--- a/webrtc/base/systeminfo.cc
+++ b/webrtc/base/systeminfo.cc
@@ -110,7 +110,7 @@ int SystemInfo::GetCurCpus() {
DWORD_PTR process_mask = 0;
DWORD_PTR system_mask = 0;
::GetProcessAffinityMask(::GetCurrentProcess(), &process_mask, &system_mask);
- for (int i = 0; i < sizeof(DWORD_PTR) * 8; ++i) {
+ for (size_t i = 0; i < sizeof(DWORD_PTR) * 8; ++i) {
if (process_mask & 1)
++cur_cpus;
process_mask >>= 1;
diff --git a/webrtc/base/task.cc b/webrtc/base/task.cc
index bdf8f1df03..b09ced12b4 100644
--- a/webrtc/base/task.cc
+++ b/webrtc/base/task.cc
@@ -68,7 +68,7 @@ void Task::Start() {
void Task::Step() {
if (done_) {
-#ifdef _DEBUG
+#if !defined(NDEBUG)
// we do not know how !blocked_ happens when done_ - should be impossible.
// But it causes problems, so in retail build, we force blocked_, and
// under debug we assert.
@@ -88,7 +88,7 @@ void Task::Step() {
// SignalDone();
Stop();
-#ifdef _DEBUG
+#if !defined(NDEBUG)
// verify that stop removed this from its parent
ASSERT(!parent()->IsChildTask(this));
#endif
@@ -125,7 +125,7 @@ void Task::Step() {
// SignalDone();
Stop();
-#if _DEBUG
+#if !defined(NDEBUG)
// verify that stop removed this from its parent
ASSERT(!parent()->IsChildTask(this));
#endif
@@ -150,7 +150,7 @@ void Task::Abort(bool nowake) {
// "done_" is set before calling "Stop()" to ensure that this code
// doesn't execute more than once (recursively) for the same task.
Stop();
-#ifdef _DEBUG
+#if !defined(NDEBUG)
// verify that stop removed this from its parent
ASSERT(!parent()->IsChildTask(this));
#endif
diff --git a/webrtc/base/task_unittest.cc b/webrtc/base/task_unittest.cc
index 7f67841641..7492436a5d 100644
--- a/webrtc/base/task_unittest.cc
+++ b/webrtc/base/task_unittest.cc
@@ -20,6 +20,7 @@
#include "webrtc/base/win32.h"
#endif // WEBRTC_WIN
+#include "webrtc/base/arraysize.h"
#include "webrtc/base/common.h"
#include "webrtc/base/gunit.h"
#include "webrtc/base/logging.h"
@@ -27,7 +28,6 @@
#include "webrtc/base/taskrunner.h"
#include "webrtc/base/thread.h"
#include "webrtc/base/timeutils.h"
-#include "webrtc/test/testsupport/gtest_disable.h"
namespace rtc {
@@ -408,7 +408,7 @@ TEST(start_task_test, AbortShouldWake) {
class TimeoutChangeTest : public sigslot::has_slots<> {
public:
TimeoutChangeTest()
- : task_count_(ARRAY_SIZE(stuck_tasks_)) {}
+ : task_count_(arraysize(stuck_tasks_)) {}
// no need to delete any tasks; the task runner owns them
~TimeoutChangeTest() {}
@@ -463,7 +463,7 @@ class TimeoutChangeTest : public sigslot::has_slots<> {
private:
void OnTimeoutId(const int id) {
- for (int i = 0; i < ARRAY_SIZE(stuck_tasks_); ++i) {
+ for (size_t i = 0; i < arraysize(stuck_tasks_); ++i) {
if (stuck_tasks_[i] && stuck_tasks_[i]->unique_id() == id) {
task_count_--;
stuck_tasks_[i] = NULL;
diff --git a/webrtc/base/taskparent.cc b/webrtc/base/taskparent.cc
index db6db37029..14d236dc42 100644
--- a/webrtc/base/taskparent.cc
+++ b/webrtc/base/taskparent.cc
@@ -46,7 +46,7 @@ void TaskParent::AddChild(Task *child) {
children_->insert(child);
}
-#ifdef _DEBUG
+#if !defined(NDEBUG)
bool TaskParent::IsChildTask(Task *task) {
ASSERT(task != NULL);
return task->parent_ == this && children_->find(task) != children_->end();
@@ -69,7 +69,7 @@ bool TaskParent::AnyChildError() {
void TaskParent::AbortAllChildren() {
if (children_->size() > 0) {
-#ifdef _DEBUG
+#if !defined(NDEBUG)
runner_->IncrementAbortCount();
#endif
@@ -78,7 +78,7 @@ void TaskParent::AbortAllChildren() {
(*it)->Abort(true); // Note we do not wake
}
-#ifdef _DEBUG
+#if !defined(NDEBUG)
runner_->DecrementAbortCount();
#endif
}
diff --git a/webrtc/base/taskparent.h b/webrtc/base/taskparent.h
index e9342c1c52..41008fa98e 100644
--- a/webrtc/base/taskparent.h
+++ b/webrtc/base/taskparent.h
@@ -32,7 +32,7 @@ class TaskParent {
bool AllChildrenDone();
bool AnyChildError();
-#ifdef _DEBUG
+#if !defined(NDEBUG)
bool IsChildTask(Task *task);
#endif
diff --git a/webrtc/base/taskrunner.cc b/webrtc/base/taskrunner.cc
index e7278f10a1..c50c9f833e 100644
--- a/webrtc/base/taskrunner.cc
+++ b/webrtc/base/taskrunner.cc
@@ -23,7 +23,7 @@ TaskRunner::TaskRunner()
: TaskParent(this),
next_timeout_task_(NULL),
tasks_running_(false)
-#ifdef _DEBUG
+#if !defined(NDEBUG)
, abort_count_(0),
deleting_task_(NULL)
#endif
@@ -88,11 +88,11 @@ void TaskRunner::InternalRunTasks(bool in_destructor) {
need_timeout_recalc = true;
}
-#ifdef _DEBUG
+#if !defined(NDEBUG)
deleting_task_ = task;
#endif
delete task;
-#ifdef _DEBUG
+#if !defined(NDEBUG)
deleting_task_ = NULL;
#endif
tasks_[i] = NULL;
diff --git a/webrtc/base/taskrunner.h b/webrtc/base/taskrunner.h
index 9a43aac068..e0cf17513a 100644
--- a/webrtc/base/taskrunner.h
+++ b/webrtc/base/taskrunner.h
@@ -44,7 +44,7 @@ class TaskRunner : public TaskParent, public sigslot::has_slots<> {
void UpdateTaskTimeout(Task* task, int64_t previous_task_timeout_time);
-#ifdef _DEBUG
+#if !defined(NDEBUG)
bool is_ok_to_delete(Task* task) {
return task == deleting_task_;
}
@@ -87,7 +87,7 @@ class TaskRunner : public TaskParent, public sigslot::has_slots<> {
std::vector<Task *> tasks_;
Task *next_timeout_task_;
bool tasks_running_;
-#ifdef _DEBUG
+#if !defined(NDEBUG)
int abort_count_;
Task* deleting_task_;
#endif
diff --git a/webrtc/base/testclient_unittest.cc b/webrtc/base/testclient_unittest.cc
index 17bf4e6c46..bdd06b329a 100644
--- a/webrtc/base/testclient_unittest.cc
+++ b/webrtc/base/testclient_unittest.cc
@@ -14,7 +14,6 @@
#include "webrtc/base/testclient.h"
#include "webrtc/base/testechoserver.h"
#include "webrtc/base/thread.h"
-#include "webrtc/test/testsupport/gtest_disable.h"
using namespace rtc;
diff --git a/webrtc/base/testutils.h b/webrtc/base/testutils.h
index e56895d7ea..6e7e22a928 100644
--- a/webrtc/base/testutils.h
+++ b/webrtc/base/testutils.h
@@ -25,6 +25,7 @@
#include <algorithm>
#include <map>
#include <vector>
+#include "webrtc/base/arraysize.h"
#include "webrtc/base/asyncsocket.h"
#include "webrtc/base/common.h"
#include "webrtc/base/gunit.h"
@@ -357,7 +358,7 @@ private:
}
void OnReadEvent(AsyncSocket* socket) {
char data[64 * 1024];
- int result = socket_->Recv(data, ARRAY_SIZE(data));
+ int result = socket_->Recv(data, arraysize(data));
if (result > 0) {
recv_buffer_.insert(recv_buffer_.end(), data, data + result);
}
diff --git a/webrtc/base/thread.cc b/webrtc/base/thread.cc
index 8ab381f4df..4197d28175 100644
--- a/webrtc/base/thread.cc
+++ b/webrtc/base/thread.cc
@@ -140,7 +140,6 @@ Thread::ScopedDisallowBlockingCalls::~ScopedDisallowBlockingCalls() {
Thread::Thread(SocketServer* ss)
: MessageQueue(ss),
- priority_(PRIORITY_NORMAL),
running_(true, false),
#if defined(WEBRTC_WIN)
thread_(NULL),
@@ -188,34 +187,6 @@ bool Thread::SetName(const std::string& name, const void* obj) {
return true;
}
-bool Thread::SetPriority(ThreadPriority priority) {
-#if defined(WEBRTC_WIN)
- if (running()) {
- ASSERT(thread_ != NULL);
- BOOL ret = FALSE;
- if (priority == PRIORITY_NORMAL) {
- ret = ::SetThreadPriority(thread_, THREAD_PRIORITY_NORMAL);
- } else if (priority == PRIORITY_HIGH) {
- ret = ::SetThreadPriority(thread_, THREAD_PRIORITY_HIGHEST);
- } else if (priority == PRIORITY_ABOVE_NORMAL) {
- ret = ::SetThreadPriority(thread_, THREAD_PRIORITY_ABOVE_NORMAL);
- } else if (priority == PRIORITY_IDLE) {
- ret = ::SetThreadPriority(thread_, THREAD_PRIORITY_IDLE);
- }
- if (!ret) {
- return false;
- }
- }
- priority_ = priority;
- return true;
-#else
- // TODO: Implement for Linux/Mac if possible.
- if (running()) return false;
- priority_ = priority;
- return true;
-#endif
-}
-
bool Thread::Start(Runnable* runnable) {
ASSERT(owned_);
if (!owned_) return false;
@@ -232,18 +203,10 @@ bool Thread::Start(Runnable* runnable) {
init->thread = this;
init->runnable = runnable;
#if defined(WEBRTC_WIN)
- DWORD flags = 0;
- if (priority_ != PRIORITY_NORMAL) {
- flags = CREATE_SUSPENDED;
- }
- thread_ = CreateThread(NULL, 0, (LPTHREAD_START_ROUTINE)PreRun, init, flags,
+ thread_ = CreateThread(NULL, 0, (LPTHREAD_START_ROUTINE)PreRun, init, 0,
&thread_id_);
if (thread_) {
running_.Set();
- if (priority_ != PRIORITY_NORMAL) {
- SetPriority(priority_);
- ::ResumeThread(thread_);
- }
} else {
return false;
}
@@ -251,37 +214,6 @@ bool Thread::Start(Runnable* runnable) {
pthread_attr_t attr;
pthread_attr_init(&attr);
- // Thread priorities are not supported in NaCl.
-#if !defined(__native_client__)
- if (priority_ != PRIORITY_NORMAL) {
- if (priority_ == PRIORITY_IDLE) {
- // There is no POSIX-standard way to set a below-normal priority for an
- // individual thread (only whole process), so let's not support it.
- LOG(LS_WARNING) << "PRIORITY_IDLE not supported";
- } else {
- // Set real-time round-robin policy.
- if (pthread_attr_setschedpolicy(&attr, SCHED_RR) != 0) {
- LOG(LS_ERROR) << "pthread_attr_setschedpolicy";
- }
- struct sched_param param;
- if (pthread_attr_getschedparam(&attr, &param) != 0) {
- LOG(LS_ERROR) << "pthread_attr_getschedparam";
- } else {
- // The numbers here are arbitrary.
- if (priority_ == PRIORITY_HIGH) {
- param.sched_priority = 6; // 6 = HIGH
- } else {
- ASSERT(priority_ == PRIORITY_ABOVE_NORMAL);
- param.sched_priority = 4; // 4 = ABOVE_NORMAL
- }
- if (pthread_attr_setschedparam(&attr, &param) != 0) {
- LOG(LS_ERROR) << "pthread_attr_setschedparam";
- }
- }
- }
- }
-#endif // !defined(__native_client__)
-
int error_code = pthread_create(&thread_, &attr, PreRun, init);
if (0 != error_code) {
LOG(LS_ERROR) << "Unable to create pthread, error " << error_code;
@@ -345,7 +277,7 @@ bool Thread::SetAllowBlockingCalls(bool allow) {
// static
void Thread::AssertBlockingIsAllowedOnCurrentThread() {
-#ifdef _DEBUG
+#if !defined(NDEBUG)
Thread* current = Thread::Current();
ASSERT(!current || current->blocking_calls_allowed_);
#endif
diff --git a/webrtc/base/thread.h b/webrtc/base/thread.h
index 9cbe8ec4dc..f91aa56733 100644
--- a/webrtc/base/thread.h
+++ b/webrtc/base/thread.h
@@ -78,13 +78,6 @@ struct _SendMessage {
bool *ready;
};
-enum ThreadPriority {
- PRIORITY_IDLE = -1,
- PRIORITY_NORMAL = 0,
- PRIORITY_ABOVE_NORMAL = 1,
- PRIORITY_HIGH = 2,
-};
-
class Runnable {
public:
virtual ~Runnable() {}
@@ -137,10 +130,6 @@ class Thread : public MessageQueue {
const std::string& name() const { return name_; }
bool SetName(const std::string& name, const void* obj);
- // Sets the thread's priority. Must be called before Start().
- ThreadPriority priority() const { return priority_; }
- bool SetPriority(ThreadPriority priority);
-
// Starts the execution of the thread.
bool Start(Runnable* runnable = NULL);
@@ -271,7 +260,6 @@ class Thread : public MessageQueue {
std::list<_SendMessage> sendlist_;
std::string name_;
- ThreadPriority priority_;
Event running_; // Signalled means running.
#if defined(WEBRTC_POSIX)
diff --git a/webrtc/base/thread_checker_impl.cc b/webrtc/base/thread_checker_impl.cc
index ea88308772..79be606445 100644
--- a/webrtc/base/thread_checker_impl.cc
+++ b/webrtc/base/thread_checker_impl.cc
@@ -12,6 +12,8 @@
#include "webrtc/base/thread_checker_impl.h"
+#include "webrtc/base/platform_thread.h"
+
namespace rtc {
ThreadCheckerImpl::ThreadCheckerImpl() : valid_thread_(CurrentThreadRef()) {
diff --git a/webrtc/base/thread_checker_impl.h b/webrtc/base/thread_checker_impl.h
index 7b39ada0ca..045583591d 100644
--- a/webrtc/base/thread_checker_impl.h
+++ b/webrtc/base/thread_checker_impl.h
@@ -14,7 +14,7 @@
#define WEBRTC_BASE_THREAD_CHECKER_IMPL_H_
#include "webrtc/base/criticalsection.h"
-#include "webrtc/base/platform_thread.h"
+#include "webrtc/base/platform_thread_types.h"
namespace rtc {
diff --git a/webrtc/base/thread_checker_unittest.cc b/webrtc/base/thread_checker_unittest.cc
index bcffb523ab..338190093d 100644
--- a/webrtc/base/thread_checker_unittest.cc
+++ b/webrtc/base/thread_checker_unittest.cc
@@ -15,7 +15,6 @@
#include "webrtc/base/thread.h"
#include "webrtc/base/thread_checker.h"
#include "webrtc/base/scoped_ptr.h"
-#include "webrtc/test/testsupport/gtest_disable.h"
// Duplicated from base/threading/thread_checker.h so that we can be
// good citizens there and undef the macro.
diff --git a/webrtc/base/thread_unittest.cc b/webrtc/base/thread_unittest.cc
index e50e45cd8a..7ed4326724 100644
--- a/webrtc/base/thread_unittest.cc
+++ b/webrtc/base/thread_unittest.cc
@@ -15,7 +15,6 @@
#include "webrtc/base/physicalsocketserver.h"
#include "webrtc/base/socketaddress.h"
#include "webrtc/base/thread.h"
-#include "webrtc/test/testsupport/gtest_disable.h"
#if defined(WEBRTC_WIN)
#include <comdef.h> // NOLINT
@@ -137,16 +136,48 @@ class SignalWhenDestroyedThread : public Thread {
Event* event_;
};
+// A bool wrapped in a mutex, to avoid data races. Using a volatile
+// bool should be sufficient for correct code ("eventual consistency"
+// between caches is sufficient), but we can't tell the compiler about
+// that, and then tsan complains about a data race.
+
+// See also discussion at
+// http://stackoverflow.com/questions/7223164/is-mutex-needed-to-synchronize-a-simple-flag-between-pthreads
+
+// Using std::atomic<bool> or std::atomic_flag in C++11 is probably
+// the right thing to do, but those features are not yet allowed. Or
+// rtc::AtomicInt, if/when that is added. Since the use isn't
+// performance critical, use a plain critical section for the time
+// being.
+
+class AtomicBool {
+ public:
+ explicit AtomicBool(bool value = false) : flag_(value) {}
+ AtomicBool& operator=(bool value) {
+ CritScope scoped_lock(&cs_);
+ flag_ = value;
+ return *this;
+ }
+ bool get() const {
+ CritScope scoped_lock(&cs_);
+ return flag_;
+ }
+
+ private:
+ mutable CriticalSection cs_;
+ bool flag_;
+};
+
// Function objects to test Thread::Invoke.
struct FunctorA {
int operator()() { return 42; }
};
class FunctorB {
public:
- explicit FunctorB(bool* flag) : flag_(flag) {}
+ explicit FunctorB(AtomicBool* flag) : flag_(flag) {}
void operator()() { if (flag_) *flag_ = true; }
private:
- bool* flag_;
+ AtomicBool* flag_;
};
struct FunctorC {
int operator()() {
@@ -220,33 +251,6 @@ TEST(ThreadTest, Names) {
delete thread;
}
-// Test that setting thread priorities doesn't cause a malfunction.
-// There's no easy way to verify the priority was set properly at this time.
-TEST(ThreadTest, Priorities) {
- Thread *thread;
- thread = new Thread();
- EXPECT_TRUE(thread->SetPriority(PRIORITY_HIGH));
- EXPECT_TRUE(thread->Start());
- thread->Stop();
- delete thread;
- thread = new Thread();
- EXPECT_TRUE(thread->SetPriority(PRIORITY_ABOVE_NORMAL));
- EXPECT_TRUE(thread->Start());
- thread->Stop();
- delete thread;
-
- thread = new Thread();
- EXPECT_TRUE(thread->Start());
-#if defined(WEBRTC_WIN)
- EXPECT_TRUE(thread->SetPriority(PRIORITY_ABOVE_NORMAL));
-#else
- EXPECT_FALSE(thread->SetPriority(PRIORITY_ABOVE_NORMAL));
-#endif
- thread->Stop();
- delete thread;
-
-}
-
TEST(ThreadTest, Wrap) {
Thread* current_thread = Thread::Current();
current_thread->UnwrapCurrent();
@@ -266,10 +270,10 @@ TEST(ThreadTest, Invoke) {
thread.Start();
// Try calling functors.
EXPECT_EQ(42, thread.Invoke<int>(FunctorA()));
- bool called = false;
+ AtomicBool called;
FunctorB f2(&called);
thread.Invoke<void>(f2);
- EXPECT_TRUE(called);
+ EXPECT_TRUE(called.get());
// Try calling bare functions.
struct LocalFuncs {
static int Func1() { return 999; }
@@ -408,9 +412,9 @@ TEST_F(AsyncInvokeTest, FireAndForget) {
Thread thread;
thread.Start();
// Try calling functor.
- bool called = false;
+ AtomicBool called;
invoker.AsyncInvoke<void>(&thread, FunctorB(&called));
- EXPECT_TRUE_WAIT(called, kWaitTimeout);
+ EXPECT_TRUE_WAIT(called.get(), kWaitTimeout);
}
TEST_F(AsyncInvokeTest, WithCallback) {
@@ -478,26 +482,26 @@ TEST_F(AsyncInvokeTest, KillInvokerBeforeExecute) {
TEST_F(AsyncInvokeTest, Flush) {
AsyncInvoker invoker;
- bool flag1 = false;
- bool flag2 = false;
+ AtomicBool flag1;
+ AtomicBool flag2;
// Queue two async calls to the current thread.
invoker.AsyncInvoke<void>(Thread::Current(),
FunctorB(&flag1));
invoker.AsyncInvoke<void>(Thread::Current(),
FunctorB(&flag2));
// Because we haven't pumped messages, these should not have run yet.
- EXPECT_FALSE(flag1);
- EXPECT_FALSE(flag2);
+ EXPECT_FALSE(flag1.get());
+ EXPECT_FALSE(flag2.get());
// Force them to run now.
invoker.Flush(Thread::Current());
- EXPECT_TRUE(flag1);
- EXPECT_TRUE(flag2);
+ EXPECT_TRUE(flag1.get());
+ EXPECT_TRUE(flag2.get());
}
TEST_F(AsyncInvokeTest, FlushWithIds) {
AsyncInvoker invoker;
- bool flag1 = false;
- bool flag2 = false;
+ AtomicBool flag1;
+ AtomicBool flag2;
// Queue two async calls to the current thread, one with a message id.
invoker.AsyncInvoke<void>(Thread::Current(),
FunctorB(&flag1),
@@ -505,17 +509,17 @@ TEST_F(AsyncInvokeTest, FlushWithIds) {
invoker.AsyncInvoke<void>(Thread::Current(),
FunctorB(&flag2));
// Because we haven't pumped messages, these should not have run yet.
- EXPECT_FALSE(flag1);
- EXPECT_FALSE(flag2);
+ EXPECT_FALSE(flag1.get());
+ EXPECT_FALSE(flag2.get());
// Execute pending calls with id == 5.
invoker.Flush(Thread::Current(), 5);
- EXPECT_TRUE(flag1);
- EXPECT_FALSE(flag2);
+ EXPECT_TRUE(flag1.get());
+ EXPECT_FALSE(flag2.get());
flag1 = false;
// Execute all pending calls. The id == 5 call should not execute again.
invoker.Flush(Thread::Current());
- EXPECT_FALSE(flag1);
- EXPECT_TRUE(flag2);
+ EXPECT_FALSE(flag1.get());
+ EXPECT_TRUE(flag2.get());
}
class GuardedAsyncInvokeTest : public testing::Test {
@@ -564,11 +568,11 @@ TEST_F(GuardedAsyncInvokeTest, KillThreadFireAndForget) {
// Kill |thread|.
thread = nullptr;
// Try calling functor.
- bool called = false;
+ AtomicBool called;
EXPECT_FALSE(invoker->AsyncInvoke<void>(FunctorB(&called)));
// With thread gone, nothing should happen.
- WAIT(called, kWaitTimeout);
- EXPECT_FALSE(called);
+ WAIT(called.get(), kWaitTimeout);
+ EXPECT_FALSE(called.get());
}
// Test that we can call AsyncInvoke with callback after the thread died.
@@ -595,9 +599,9 @@ TEST_F(GuardedAsyncInvokeTest, KillThreadWithCallback) {
TEST_F(GuardedAsyncInvokeTest, FireAndForget) {
GuardedAsyncInvoker invoker;
// Try calling functor.
- bool called = false;
+ AtomicBool called;
EXPECT_TRUE(invoker.AsyncInvoke<void>(FunctorB(&called)));
- EXPECT_TRUE_WAIT(called, kWaitTimeout);
+ EXPECT_TRUE_WAIT(called.get(), kWaitTimeout);
}
TEST_F(GuardedAsyncInvokeTest, WithCallback) {
@@ -660,39 +664,39 @@ TEST_F(GuardedAsyncInvokeTest, KillInvokerBeforeExecute) {
TEST_F(GuardedAsyncInvokeTest, Flush) {
GuardedAsyncInvoker invoker;
- bool flag1 = false;
- bool flag2 = false;
+ AtomicBool flag1;
+ AtomicBool flag2;
// Queue two async calls to the current thread.
EXPECT_TRUE(invoker.AsyncInvoke<void>(FunctorB(&flag1)));
EXPECT_TRUE(invoker.AsyncInvoke<void>(FunctorB(&flag2)));
// Because we haven't pumped messages, these should not have run yet.
- EXPECT_FALSE(flag1);
- EXPECT_FALSE(flag2);
+ EXPECT_FALSE(flag1.get());
+ EXPECT_FALSE(flag2.get());
// Force them to run now.
EXPECT_TRUE(invoker.Flush());
- EXPECT_TRUE(flag1);
- EXPECT_TRUE(flag2);
+ EXPECT_TRUE(flag1.get());
+ EXPECT_TRUE(flag2.get());
}
TEST_F(GuardedAsyncInvokeTest, FlushWithIds) {
GuardedAsyncInvoker invoker;
- bool flag1 = false;
- bool flag2 = false;
+ AtomicBool flag1;
+ AtomicBool flag2;
// Queue two async calls to the current thread, one with a message id.
EXPECT_TRUE(invoker.AsyncInvoke<void>(FunctorB(&flag1), 5));
EXPECT_TRUE(invoker.AsyncInvoke<void>(FunctorB(&flag2)));
// Because we haven't pumped messages, these should not have run yet.
- EXPECT_FALSE(flag1);
- EXPECT_FALSE(flag2);
+ EXPECT_FALSE(flag1.get());
+ EXPECT_FALSE(flag2.get());
// Execute pending calls with id == 5.
EXPECT_TRUE(invoker.Flush(5));
- EXPECT_TRUE(flag1);
- EXPECT_FALSE(flag2);
+ EXPECT_TRUE(flag1.get());
+ EXPECT_FALSE(flag2.get());
flag1 = false;
// Execute all pending calls. The id == 5 call should not execute again.
EXPECT_TRUE(invoker.Flush());
- EXPECT_FALSE(flag1);
- EXPECT_TRUE(flag2);
+ EXPECT_FALSE(flag1.get());
+ EXPECT_TRUE(flag2.get());
}
#if defined(WEBRTC_WIN)
diff --git a/webrtc/base/timeutils.cc b/webrtc/base/timeutils.cc
index fac5b66c7e..05e9ad8243 100644
--- a/webrtc/base/timeutils.cc
+++ b/webrtc/base/timeutils.cc
@@ -204,4 +204,48 @@ int64_t TimestampWrapAroundHandler::Unwrap(uint32_t ts) {
return unwrapped_ts;
}
+int64_t TmToSeconds(const std::tm& tm) {
+ static short int mdays[12] = {31, 28, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31};
+ static short int cumul_mdays[12] = {0, 31, 59, 90, 120, 151,
+ 181, 212, 243, 273, 304, 334};
+ int year = tm.tm_year + 1900;
+ int month = tm.tm_mon;
+ int day = tm.tm_mday - 1; // Make 0-based like the rest.
+ int hour = tm.tm_hour;
+ int min = tm.tm_min;
+ int sec = tm.tm_sec;
+
+ bool expiry_in_leap_year = (year % 4 == 0 &&
+ (year % 100 != 0 || year % 400 == 0));
+
+ if (year < 1970)
+ return -1;
+ if (month < 0 || month > 11)
+ return -1;
+ if (day < 0 || day >= mdays[month] + (expiry_in_leap_year && month == 2 - 1))
+ return -1;
+ if (hour < 0 || hour > 23)
+ return -1;
+ if (min < 0 || min > 59)
+ return -1;
+ if (sec < 0 || sec > 59)
+ return -1;
+
+ day += cumul_mdays[month];
+
+ // Add number of leap days between 1970 and the expiration year, inclusive.
+ day += ((year / 4 - 1970 / 4) - (year / 100 - 1970 / 100) +
+ (year / 400 - 1970 / 400));
+
+ // We will have added one day too much above if expiration is during a leap
+ // year, and expiration is in January or February.
+ if (expiry_in_leap_year && month <= 2 - 1) // |month| is zero based.
+ day -= 1;
+
+ // Combine all variables into seconds from 1970-01-01 00:00 (except |month|
+ // which was accumulated into |day| above).
+ return (((static_cast<int64_t>
+ (year - 1970) * 365 + day) * 24 + hour) * 60 + min) * 60 + sec;
+}
+
} // namespace rtc
diff --git a/webrtc/base/timeutils.h b/webrtc/base/timeutils.h
index bdeccc3739..3ade430947 100644
--- a/webrtc/base/timeutils.h
+++ b/webrtc/base/timeutils.h
@@ -11,6 +11,7 @@
#ifndef WEBRTC_BASE_TIMEUTILS_H_
#define WEBRTC_BASE_TIMEUTILS_H_
+#include <ctime>
#include <time.h>
#include "webrtc/base/basictypes.h"
@@ -93,6 +94,11 @@ class TimestampWrapAroundHandler {
int64_t num_wrap_;
};
+// Convert from std::tm, which is relative to 1900-01-01 00:00 to number of
+// seconds from 1970-01-01 00:00 ("epoch"). Don't return time_t since that
+// is still 32 bits on many systems.
+int64_t TmToSeconds(const std::tm& tm);
+
} // namespace rtc
#endif // WEBRTC_BASE_TIMEUTILS_H_
diff --git a/webrtc/base/timeutils_unittest.cc b/webrtc/base/timeutils_unittest.cc
index d1b9ad4f96..688658b32f 100644
--- a/webrtc/base/timeutils_unittest.cc
+++ b/webrtc/base/timeutils_unittest.cc
@@ -10,6 +10,7 @@
#include "webrtc/base/common.h"
#include "webrtc/base/gunit.h"
+#include "webrtc/base/helpers.h"
#include "webrtc/base/thread.h"
#include "webrtc/base/timeutils.h"
@@ -166,4 +167,99 @@ TEST_F(TimestampWrapAroundHandlerTest, Unwrap) {
EXPECT_EQ(unwrapped_ts, wraparound_handler_.Unwrap(ts));
}
+class TmToSeconds : public testing::Test {
+ public:
+ TmToSeconds() {
+ // Set use of the test RNG to get deterministic expiration timestamp.
+ rtc::SetRandomTestMode(true);
+ }
+ ~TmToSeconds() {
+ // Put it back for the next test.
+ rtc::SetRandomTestMode(false);
+ }
+
+ void TestTmToSeconds(int times) {
+ static char mdays[12] = {31, 28, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31};
+ for (int i = 0; i < times; i++) {
+
+ // First generate something correct and check that TmToSeconds is happy.
+ int year = rtc::CreateRandomId() % 400 + 1970;
+
+ bool leap_year = false;
+ if (year % 4 == 0)
+ leap_year = true;
+ if (year % 100 == 0)
+ leap_year = false;
+ if (year % 400 == 0)
+ leap_year = true;
+
+ std::tm tm;
+ tm.tm_year = year - 1900; // std::tm is year 1900 based.
+ tm.tm_mon = rtc::CreateRandomId() % 12;
+ tm.tm_mday = rtc::CreateRandomId() % mdays[tm.tm_mon] + 1;
+ tm.tm_hour = rtc::CreateRandomId() % 24;
+ tm.tm_min = rtc::CreateRandomId() % 60;
+ tm.tm_sec = rtc::CreateRandomId() % 60;
+ int64_t t = rtc::TmToSeconds(tm);
+ EXPECT_TRUE(t >= 0);
+
+ // Now damage a random field and check that TmToSeconds is unhappy.
+ switch (rtc::CreateRandomId() % 11) {
+ case 0:
+ tm.tm_year = 1969 - 1900;
+ break;
+ case 1:
+ tm.tm_mon = -1;
+ break;
+ case 2:
+ tm.tm_mon = 12;
+ break;
+ case 3:
+ tm.tm_mday = 0;
+ break;
+ case 4:
+ tm.tm_mday = mdays[tm.tm_mon] + (leap_year && tm.tm_mon == 1) + 1;
+ break;
+ case 5:
+ tm.tm_hour = -1;
+ break;
+ case 6:
+ tm.tm_hour = 24;
+ break;
+ case 7:
+ tm.tm_min = -1;
+ break;
+ case 8:
+ tm.tm_min = 60;
+ break;
+ case 9:
+ tm.tm_sec = -1;
+ break;
+ case 10:
+ tm.tm_sec = 60;
+ break;
+ }
+ EXPECT_EQ(rtc::TmToSeconds(tm), -1);
+ }
+ // Check consistency with the system gmtime_r. With time_t, we can only
+ // portably test dates until 2038, which is achieved by the % 0x80000000.
+ for (int i = 0; i < times; i++) {
+ time_t t = rtc::CreateRandomId() % 0x80000000;
+#if defined(WEBRTC_WIN)
+ std::tm* tm = std::gmtime(&t);
+ EXPECT_TRUE(tm);
+ EXPECT_TRUE(rtc::TmToSeconds(*tm) == t);
+#else
+ std::tm tm;
+ EXPECT_TRUE(gmtime_r(&t, &tm));
+ EXPECT_TRUE(rtc::TmToSeconds(tm) == t);
+#endif
+ }
+ }
+};
+
+TEST_F(TmToSeconds, TestTmToSeconds) {
+ TestTmToSeconds(100000);
+}
+
} // namespace rtc
diff --git a/webrtc/base/trace_event.h b/webrtc/base/trace_event.h
index c14cbff030..3916af4fb6 100644
--- a/webrtc/base/trace_event.h
+++ b/webrtc/base/trace_event.h
@@ -701,7 +701,7 @@ class TraceID {
explicit TraceID(const void* id, unsigned char* flags)
: data_(static_cast<unsigned long long>(
- reinterpret_cast<unsigned long>(id))) {
+ reinterpret_cast<uintptr_t>(id))) {
*flags |= TRACE_EVENT_FLAG_MANGLE_ID;
}
explicit TraceID(ForceMangle id, unsigned char* flags) : data_(id.data()) {
diff --git a/webrtc/base/unittest_main.cc b/webrtc/base/unittest_main.cc
index f952b2d547..167570d449 100644
--- a/webrtc/base/unittest_main.cc
+++ b/webrtc/base/unittest_main.cc
@@ -78,12 +78,12 @@ int main(int argc, char** argv) {
_CrtSetReportHook2(_CRT_RPTHOOK_INSTALL, TestCrtReportHandler);
}
-#ifdef _DEBUG // Turn on memory leak checking on Windows.
+#if !defined(NDEBUG) // Turn on memory leak checking on Windows.
_CrtSetDbgFlag(_CRTDBG_ALLOC_MEM_DF |_CRTDBG_LEAK_CHECK_DF);
if (FLAG_crt_break_alloc >= 0) {
_crtBreakAlloc = FLAG_crt_break_alloc;
}
-#endif // _DEBUG
+#endif
#endif // WEBRTC_WIN
rtc::Filesystem::SetOrganizationName("google");
@@ -93,6 +93,10 @@ int main(int argc, char** argv) {
rtc::LogMessage::LogTimestamps();
if (*FLAG_log != '\0') {
rtc::LogMessage::ConfigureLogging(FLAG_log);
+ } else if (rtc::LogMessage::GetLogToDebug() > rtc::LS_INFO) {
+ // Default to LS_INFO, even for release builds to provide better test
+ // logging.
+ rtc::LogMessage::LogToDebug(rtc::LS_INFO);
}
// Initialize SSL which are used by several tests.
diff --git a/webrtc/base/unixfilesystem.cc b/webrtc/base/unixfilesystem.cc
index 30d6e7872e..b474324192 100644
--- a/webrtc/base/unixfilesystem.cc
+++ b/webrtc/base/unixfilesystem.cc
@@ -44,6 +44,7 @@
#include <sys/syslimits.h>
#endif
+#include "webrtc/base/arraysize.h"
#include "webrtc/base/fileutils.h"
#include "webrtc/base/pathutils.h"
#include "webrtc/base/stream.h"
@@ -176,7 +177,7 @@ bool UnixFilesystem::GetTemporaryFolder(Pathname &pathname, bool create,
kCreateFolder, &fr))
return false;
unsigned char buffer[NAME_MAX+1];
- if (0 != FSRefMakePath(&fr, buffer, ARRAY_SIZE(buffer)))
+ if (0 != FSRefMakePath(&fr, buffer, arraysize(buffer)))
return false;
pathname.SetPathname(reinterpret_cast<char*>(buffer), "");
#elif defined(WEBRTC_ANDROID) || defined(WEBRTC_IOS)
@@ -303,7 +304,7 @@ bool UnixFilesystem::IsTemporaryPath(const Pathname& pathname) {
#endif // WEBRTC_MAC && !defined(WEBRTC_IOS)
#endif // WEBRTC_ANDROID || WEBRTC_IOS
};
- for (size_t i = 0; i < ARRAY_SIZE(kTempPrefixes); ++i) {
+ for (size_t i = 0; i < arraysize(kTempPrefixes); ++i) {
if (0 == strncmp(pathname.pathname().c_str(), kTempPrefixes[i],
strlen(kTempPrefixes[i])))
return true;
@@ -372,12 +373,12 @@ bool UnixFilesystem::GetAppPathname(Pathname* path) {
return success;
#elif defined(__native_client__)
return false;
-#elif IOS
+#elif WEBRTC_IOS
IOSAppName(path);
return true;
#else // WEBRTC_MAC && !defined(WEBRTC_IOS)
char buffer[PATH_MAX + 2];
- ssize_t len = readlink("/proc/self/exe", buffer, ARRAY_SIZE(buffer) - 1);
+ ssize_t len = readlink("/proc/self/exe", buffer, arraysize(buffer) - 1);
if ((len <= 0) || (len == PATH_MAX + 1))
return false;
buffer[len] = '\0';
@@ -399,7 +400,7 @@ bool UnixFilesystem::GetAppDataFolder(Pathname* path, bool per_user) {
kCreateFolder, &fr))
return false;
unsigned char buffer[NAME_MAX+1];
- if (0 != FSRefMakePath(&fr, buffer, ARRAY_SIZE(buffer)))
+ if (0 != FSRefMakePath(&fr, buffer, arraysize(buffer)))
return false;
path->SetPathname(reinterpret_cast<char*>(buffer), "");
} else {
@@ -487,7 +488,7 @@ bool UnixFilesystem::GetAppTempFolder(Pathname* path) {
// Create a random directory as /tmp/<appname>-<pid>-<timestamp>
char buffer[128];
- sprintfn(buffer, ARRAY_SIZE(buffer), "-%d-%d",
+ sprintfn(buffer, arraysize(buffer), "-%d-%d",
static_cast<int>(getpid()),
static_cast<int>(time(0)));
std::string folder(application_name_);
diff --git a/webrtc/base/urlencode_unittest.cc b/webrtc/base/urlencode_unittest.cc
index 52169132e2..6a61db3ae3 100644
--- a/webrtc/base/urlencode_unittest.cc
+++ b/webrtc/base/urlencode_unittest.cc
@@ -8,6 +8,7 @@
* be found in the AUTHORS file in the root of the source tree.
*/
+#include "webrtc/base/arraysize.h"
#include "webrtc/base/common.h"
#include "webrtc/base/gunit.h"
#include "webrtc/base/thread.h"
@@ -19,7 +20,7 @@ TEST(Urlencode, SourceTooLong) {
char source[] = "^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^"
"^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^";
char dest[1];
- ASSERT_EQ(0, UrlEncode(source, dest, ARRAY_SIZE(dest)));
+ ASSERT_EQ(0, UrlEncode(source, dest, arraysize(dest)));
ASSERT_EQ('\0', dest[0]);
dest[0] = 'a';
@@ -30,7 +31,7 @@ TEST(Urlencode, SourceTooLong) {
TEST(Urlencode, OneCharacterConversion) {
char source[] = "^";
char dest[4];
- ASSERT_EQ(3, UrlEncode(source, dest, ARRAY_SIZE(dest)));
+ ASSERT_EQ(3, UrlEncode(source, dest, arraysize(dest)));
ASSERT_STREQ("%5E", dest);
}
@@ -40,7 +41,7 @@ TEST(Urlencode, ShortDestinationNoEncoding) {
// hold the text given.
char source[] = "aa";
char dest[3];
- ASSERT_EQ(2, UrlEncode(source, dest, ARRAY_SIZE(dest)));
+ ASSERT_EQ(2, UrlEncode(source, dest, arraysize(dest)));
ASSERT_STREQ("aa", dest);
}
@@ -49,14 +50,14 @@ TEST(Urlencode, ShortDestinationEncoding) {
// big enough to hold the encoding.
char source[] = "&";
char dest[3];
- ASSERT_EQ(0, UrlEncode(source, dest, ARRAY_SIZE(dest)));
+ ASSERT_EQ(0, UrlEncode(source, dest, arraysize(dest)));
ASSERT_EQ('\0', dest[0]);
}
TEST(Urlencode, Encoding1) {
char source[] = "A^ ";
char dest[8];
- ASSERT_EQ(5, UrlEncode(source, dest, ARRAY_SIZE(dest)));
+ ASSERT_EQ(5, UrlEncode(source, dest, arraysize(dest)));
ASSERT_STREQ("A%5E+", dest);
}
@@ -64,7 +65,7 @@ TEST(Urlencode, Encoding2) {
char source[] = "A^ ";
char dest[8];
ASSERT_EQ(7, rtc::UrlEncodeWithoutEncodingSpaceAsPlus(source, dest,
- ARRAY_SIZE(dest)));
+ arraysize(dest)));
ASSERT_STREQ("A%5E%20", dest);
}
diff --git a/webrtc/base/virtualsocket_unittest.cc b/webrtc/base/virtualsocket_unittest.cc
index 694b154a4d..2cd2b5e4de 100644
--- a/webrtc/base/virtualsocket_unittest.cc
+++ b/webrtc/base/virtualsocket_unittest.cc
@@ -14,6 +14,7 @@
#include <netinet/in.h>
#endif
+#include "webrtc/base/arraysize.h"
#include "webrtc/base/logging.h"
#include "webrtc/base/gunit.h"
#include "webrtc/base/testclient.h"
@@ -21,7 +22,6 @@
#include "webrtc/base/thread.h"
#include "webrtc/base/timeutils.h"
#include "webrtc/base/virtualsocketserver.h"
-#include "webrtc/test/testsupport/gtest_disable.h"
using namespace rtc;
@@ -1022,9 +1022,9 @@ TEST_F(VirtualSocketServerTest, CreatesStandardDistribution) {
const double kTestDev[] = { 0.25, 0.1, 0.01 };
// TODO: The current code only works for 1000 data points or more.
const uint32_t kTestSamples[] = {/*10, 100,*/ 1000};
- for (size_t midx = 0; midx < ARRAY_SIZE(kTestMean); ++midx) {
- for (size_t didx = 0; didx < ARRAY_SIZE(kTestDev); ++didx) {
- for (size_t sidx = 0; sidx < ARRAY_SIZE(kTestSamples); ++sidx) {
+ for (size_t midx = 0; midx < arraysize(kTestMean); ++midx) {
+ for (size_t didx = 0; didx < arraysize(kTestDev); ++didx) {
+ for (size_t sidx = 0; sidx < arraysize(kTestSamples); ++sidx) {
ASSERT_LT(0u, kTestSamples[sidx]);
const uint32_t kStdDev =
static_cast<uint32_t>(kTestDev[didx] * kTestMean[midx]);
diff --git a/webrtc/base/win32.cc b/webrtc/base/win32.cc
index 6e09829153..182b84f482 100644
--- a/webrtc/base/win32.cc
+++ b/webrtc/base/win32.cc
@@ -14,6 +14,7 @@
#include <ws2tcpip.h>
#include <algorithm>
+#include "webrtc/base/arraysize.h"
#include "webrtc/base/basictypes.h"
#include "webrtc/base/byteorder.h"
#include "webrtc/base/common.h"
@@ -87,7 +88,7 @@ const char* inet_ntop_v6(const void* src, char* dst, socklen_t size) {
int current = 1;
int max = 0;
int maxpos = -1;
- int run_array_size = ARRAY_SIZE(runpos);
+ int run_array_size = arraysize(runpos);
// Run over the address marking runs of 0s.
for (int i = 0; i < run_array_size; ++i) {
if (as_shorts[i] == 0) {
diff --git a/webrtc/base/win32filesystem.cc b/webrtc/base/win32filesystem.cc
index 8ac918ff83..b731974bac 100644
--- a/webrtc/base/win32filesystem.cc
+++ b/webrtc/base/win32filesystem.cc
@@ -15,6 +15,7 @@
#include <shlobj.h>
#include <tchar.h>
+#include "webrtc/base/arraysize.h"
#include "webrtc/base/fileutils.h"
#include "webrtc/base/pathutils.h"
#include "webrtc/base/scoped_ptr.h"
@@ -197,16 +198,16 @@ bool Win32Filesystem::DeleteEmptyFolder(const Pathname &folder) {
bool Win32Filesystem::GetTemporaryFolder(Pathname &pathname, bool create,
const std::string *append) {
wchar_t buffer[MAX_PATH + 1];
- if (!::GetTempPath(ARRAY_SIZE(buffer), buffer))
+ if (!::GetTempPath(arraysize(buffer), buffer))
return false;
if (!IsCurrentProcessLowIntegrity() &&
- !::GetLongPathName(buffer, buffer, ARRAY_SIZE(buffer)))
+ !::GetLongPathName(buffer, buffer, arraysize(buffer)))
return false;
size_t len = strlen(buffer);
if ((len > 0) && (buffer[len-1] != '\\')) {
- len += strcpyn(buffer + len, ARRAY_SIZE(buffer) - len, L"\\");
+ len += strcpyn(buffer + len, arraysize(buffer) - len, L"\\");
}
- if (len >= ARRAY_SIZE(buffer) - 1)
+ if (len >= arraysize(buffer) - 1)
return false;
pathname.clear();
pathname.SetFolder(ToUtf8(buffer));
@@ -295,10 +296,10 @@ bool Win32Filesystem::CopyFile(const Pathname &old_path,
bool Win32Filesystem::IsTemporaryPath(const Pathname& pathname) {
TCHAR buffer[MAX_PATH + 1];
- if (!::GetTempPath(ARRAY_SIZE(buffer), buffer))
+ if (!::GetTempPath(arraysize(buffer), buffer))
return false;
if (!IsCurrentProcessLowIntegrity() &&
- !::GetLongPathName(buffer, buffer, ARRAY_SIZE(buffer)))
+ !::GetLongPathName(buffer, buffer, arraysize(buffer)))
return false;
return (::strnicmp(ToUtf16(pathname.pathname()).c_str(),
buffer, strlen(buffer)) == 0);
@@ -337,7 +338,7 @@ bool Win32Filesystem::GetFileTime(const Pathname& path, FileTimeType which,
bool Win32Filesystem::GetAppPathname(Pathname* path) {
TCHAR buffer[MAX_PATH + 1];
- if (0 == ::GetModuleFileName(NULL, buffer, ARRAY_SIZE(buffer)))
+ if (0 == ::GetModuleFileName(NULL, buffer, arraysize(buffer)))
return false;
path->SetPathname(ToUtf8(buffer));
return true;
@@ -351,20 +352,20 @@ bool Win32Filesystem::GetAppDataFolder(Pathname* path, bool per_user) {
if (!::SHGetSpecialFolderPath(NULL, buffer, csidl, TRUE))
return false;
if (!IsCurrentProcessLowIntegrity() &&
- !::GetLongPathName(buffer, buffer, ARRAY_SIZE(buffer)))
+ !::GetLongPathName(buffer, buffer, arraysize(buffer)))
return false;
- size_t len = strcatn(buffer, ARRAY_SIZE(buffer), __T("\\"));
- len += strcpyn(buffer + len, ARRAY_SIZE(buffer) - len,
+ size_t len = strcatn(buffer, arraysize(buffer), __T("\\"));
+ len += strcpyn(buffer + len, arraysize(buffer) - len,
ToUtf16(organization_name_).c_str());
if ((len > 0) && (buffer[len-1] != __T('\\'))) {
- len += strcpyn(buffer + len, ARRAY_SIZE(buffer) - len, __T("\\"));
+ len += strcpyn(buffer + len, arraysize(buffer) - len, __T("\\"));
}
- len += strcpyn(buffer + len, ARRAY_SIZE(buffer) - len,
+ len += strcpyn(buffer + len, arraysize(buffer) - len,
ToUtf16(application_name_).c_str());
if ((len > 0) && (buffer[len-1] != __T('\\'))) {
- len += strcpyn(buffer + len, ARRAY_SIZE(buffer) - len, __T("\\"));
+ len += strcpyn(buffer + len, arraysize(buffer) - len, __T("\\"));
}
- if (len >= ARRAY_SIZE(buffer) - 1)
+ if (len >= arraysize(buffer) - 1)
return false;
path->clear();
path->SetFolder(ToUtf8(buffer));
diff --git a/webrtc/base/win32regkey_unittest.cc b/webrtc/base/win32regkey_unittest.cc
index 389c3a243c..1702ef741d 100644
--- a/webrtc/base/win32regkey_unittest.cc
+++ b/webrtc/base/win32regkey_unittest.cc
@@ -10,6 +10,7 @@
// Unittest for registry access API
+#include "webrtc/base/arraysize.h"
#include "webrtc/base/gunit.h"
#include "webrtc/base/common.h"
#include "webrtc/base/win32regkey.h"
@@ -564,8 +565,8 @@ void RegKeyStaticFunctionsTest() {
#ifdef IS_PRIVATE_BUILD
// get a temp file name
wchar_t temp_path[MAX_PATH] = {0};
- EXPECT_LT(::GetTempPath(ARRAY_SIZE(temp_path), temp_path),
- static_cast<DWORD>(ARRAY_SIZE(temp_path)));
+ EXPECT_LT(::GetTempPath(arraysize(temp_path), temp_path),
+ static_cast<DWORD>(arraysize(temp_path)));
wchar_t temp_file[MAX_PATH] = {0};
EXPECT_NE(::GetTempFileName(temp_path, L"rkut_",
::GetTickCount(), temp_file), 0);
diff --git a/webrtc/base/win32socketserver.cc b/webrtc/base/win32socketserver.cc
index f466bf10cb..72ce4ebb7c 100644
--- a/webrtc/base/win32socketserver.cc
+++ b/webrtc/base/win32socketserver.cc
@@ -55,7 +55,7 @@ static const int ICMP_HEADER_SIZE = 8u;
static const int ICMP_PING_TIMEOUT_MILLIS = 10000u;
// TODO: Enable for production builds also? Use FormatMessage?
-#ifdef _DEBUG
+#if !defined(NDEBUG)
LPCSTR WSAErrorToString(int error, LPCSTR *description_result) {
LPCSTR string = "Unspecified";
LPCSTR description = "Unspecified description";
@@ -626,7 +626,7 @@ void Win32Socket::OnSocketNotify(SOCKET socket, int event, int error) {
case FD_CONNECT:
if (error != ERROR_SUCCESS) {
ReportWSAError("WSAAsync:connect notify", error, addr_);
-#ifdef _DEBUG
+#if !defined(NDEBUG)
int32_t duration = TimeSince(connect_time_);
LOG(LS_INFO) << "WSAAsync:connect error (" << duration
<< " ms), faking close";
@@ -639,7 +639,7 @@ void Win32Socket::OnSocketNotify(SOCKET socket, int event, int error) {
// though the connect event never did occur.
SignalCloseEvent(this, error);
} else {
-#ifdef _DEBUG
+#if !defined(NDEBUG)
int32_t duration = TimeSince(connect_time_);
LOG(LS_INFO) << "WSAAsync:connect (" << duration << " ms)";
#endif
diff --git a/webrtc/base/win32windowpicker.cc b/webrtc/base/win32windowpicker.cc
index b4550ae4a4..da05a5c65c 100644
--- a/webrtc/base/win32windowpicker.cc
+++ b/webrtc/base/win32windowpicker.cc
@@ -12,6 +12,7 @@
#include <string>
#include <vector>
+#include "webrtc/base/arraysize.h"
#include "webrtc/base/common.h"
#include "webrtc/base/logging.h"
@@ -58,7 +59,7 @@ BOOL CALLBACK Win32WindowPicker::EnumProc(HWND hwnd, LPARAM l_param) {
}
TCHAR window_title[500];
- GetWindowText(hwnd, window_title, ARRAY_SIZE(window_title));
+ GetWindowText(hwnd, window_title, arraysize(window_title));
std::string title = ToUtf8(window_title);
WindowId id(hwnd);
diff --git a/webrtc/base/win32windowpicker_unittest.cc b/webrtc/base/win32windowpicker_unittest.cc
index 71e8af6bf2..701bb27d42 100644
--- a/webrtc/base/win32windowpicker_unittest.cc
+++ b/webrtc/base/win32windowpicker_unittest.cc
@@ -7,6 +7,7 @@
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
+#include "webrtc/base/arraysize.h"
#include "webrtc/base/gunit.h"
#include "webrtc/base/common.h"
#include "webrtc/base/logging.h"
@@ -71,7 +72,7 @@ TEST(Win32WindowPickerTest, TestGetWindowList) {
EXPECT_EQ(window_picker.visible_window()->handle(), desc.id().id());
TCHAR window_title[500];
GetWindowText(window_picker.visible_window()->handle(), window_title,
- ARRAY_SIZE(window_title));
+ arraysize(window_title));
EXPECT_EQ(0, wcscmp(window_title, kVisibleWindowTitle));
}
diff --git a/webrtc/build/android/AndroidManifest.xml b/webrtc/build/android/AndroidManifest.xml
new file mode 100644
index 0000000000..bb6d354fef
--- /dev/null
+++ b/webrtc/build/android/AndroidManifest.xml
@@ -0,0 +1,14 @@
+<?xml version="1.0" encoding="utf-8"?>
+<!--
+ This is a dummy manifest which is required by:
+ 1. aapt when generating R.java in java.gypi:
+ Nothing in the manifest is used, but it is still required by aapt.
+ 2. lint: [min|target]SdkVersion are required by lint and should
+ be kept up-to-date.
+-->
+<manifest xmlns:android="http://schemas.android.com/apk/res/android"
+ package="dummy.package">
+
+ <uses-sdk android:minSdkVersion="16" android:targetSdkVersion="23" />
+
+</manifest>
diff --git a/webrtc/build/android/suppressions.xml b/webrtc/build/android/suppressions.xml
new file mode 100644
index 0000000000..0fc22e0813
--- /dev/null
+++ b/webrtc/build/android/suppressions.xml
@@ -0,0 +1,23 @@
+<?xml version="1.0" encoding="utf-8"?>
+<lint>
+ <!-- These lint settings is for the Android linter that gets run by
+ lint_action.gypi on compile of WebRTC java code. All WebRTC java code
+ should lint cleanly for the issues below. -->
+ <!-- TODO(phoglund): make work with suppress.py or remove printout referring
+ to suppress.py. -->
+ <issue id="NewApi"></issue>
+
+ <issue id="Locale" severity="ignore"/>
+ <issue id="SdCardPath" severity="ignore"/>
+ <issue id="UseValueOf" severity="ignore"/>
+ <issue id="InlinedApi" severity="ignore"/>
+ <issue id="DefaultLocale" severity="ignore"/>
+ <issue id="Assert" severity="ignore"/>
+ <issue id="UseSparseArrays" severity="ignore"/>
+
+ <!-- These are just from the dummy AndroidManifest.xml we use for linting.
+ It's in the same directory as this file. -->
+ <issue id="MissingApplicationIcon" severity="ignore"/>
+ <issue id="AllowBackup" severity="ignore"/>
+ <issue id="MissingVersion" severity="ignore"/>
+</lint>
diff --git a/webrtc/build/android/test_runner.py b/webrtc/build/android/test_runner.py
index 799698201b..78a7a190b2 100755
--- a/webrtc/build/android/test_runner.py
+++ b/webrtc/build/android/test_runner.py
@@ -38,6 +38,8 @@ def main():
'webrtc/common_audio/common_audio_unittests.isolate',
'common_video_unittests':
'webrtc/common_video/common_video_unittests.isolate',
+ 'libjingle_peerconnection_unittest':
+ 'talk/libjingle_peerconnection_unittest.isolate',
'modules_tests': 'webrtc/modules/modules_tests.isolate',
'modules_unittests': 'webrtc/modules/modules_unittests.isolate',
'rtc_unittests': 'webrtc/rtc_unittests.isolate',
@@ -48,8 +50,6 @@ def main():
'video_capture_tests':
'webrtc/modules/video_capture/video_capture_tests.isolate',
'video_engine_tests': 'webrtc/video_engine_tests.isolate',
- 'video_engine_core_unittests':
- 'webrtc/video_engine/video_engine_core_unittests.isolate',
'voice_engine_unittests':
'webrtc/voice_engine/voice_engine_unittests.isolate',
'webrtc_nonparallel_tests': 'webrtc/webrtc_nonparallel_tests.isolate',
diff --git a/webrtc/build/apk_test.gypi b/webrtc/build/apk_test.gypi
new file mode 100644
index 0000000000..a41e436a48
--- /dev/null
+++ b/webrtc/build/apk_test.gypi
@@ -0,0 +1,40 @@
+# Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
+#
+# Use of this source code is governed by a BSD-style license
+# that can be found in the LICENSE file in the root of the source
+# tree. An additional intellectual property rights grant can be found
+# in the file PATENTS. All contributing project authors may
+# be found in the AUTHORS file in the root of the source tree.
+
+# This is almost an identical copy of src/build/apk_test.gypi with minor
+# modifications to allow test executables starting with "lib".
+# See http://crbug.com/543820 for more details.
+
+{
+ 'dependencies': [
+ '<(DEPTH)/base/base.gyp:base_java',
+ '<(DEPTH)/build/android/pylib/device/commands/commands.gyp:chromium_commands',
+ '<(DEPTH)/build/android/pylib/remote/device/dummy/dummy.gyp:remote_device_dummy_apk',
+ '<(DEPTH)/testing/android/appurify_support.gyp:appurify_support_java',
+ '<(DEPTH)/testing/android/on_device_instrumentation.gyp:reporter_java',
+ '<(DEPTH)/tools/android/android_tools.gyp:android_tools',
+ ],
+ 'conditions': [
+ ['OS == "android"', {
+ 'variables': {
+ # These are used to configure java_apk.gypi included below.
+ 'test_type': 'gtest',
+ 'apk_name': '<(test_suite_name)',
+ 'intermediate_dir': '<(PRODUCT_DIR)/<(test_suite_name)_apk',
+ 'final_apk_path': '<(intermediate_dir)/<(test_suite_name)-debug.apk',
+ 'java_in_dir': '<(DEPTH)/testing/android/native_test/java',
+ 'native_lib_target': '<(test_suite_name)',
+ 'gyp_managed_install': 0,
+ },
+ 'includes': [
+ '../../build/java_apk.gypi',
+ '../../build/android/test_runner.gypi',
+ ],
+ }], # 'OS == "android"
+ ], # conditions
+}
diff --git a/webrtc/build/apk_tests.gyp b/webrtc/build/apk_tests.gyp
index a3833ff524..02a13421f9 100644
--- a/webrtc/build/apk_tests.gyp
+++ b/webrtc/build/apk_tests.gyp
@@ -61,6 +61,23 @@
],
},
{
+ 'target_name': 'libjingle_peerconnection_unittest_apk',
+ 'type': 'none',
+ 'variables': {
+ 'test_suite_name': 'libjingle_peerconnection_unittest',
+ 'input_shlib_path': '<(SHARED_LIB_DIR)/<(SHARED_LIB_PREFIX)libjingle_peerconnection_unittest<(SHARED_LIB_SUFFIX)',
+ },
+ 'dependencies': [
+ '<(DEPTH)/talk/libjingle_tests.gyp:libjingle_peerconnection_unittest',
+ '<(DEPTH)/talk/libjingle.gyp:libjingle_peerconnection_java',
+ ],
+ 'includes': [
+ # Use webrtc copy of apk_test.gypi to allow test executables starting
+ # with "lib". See http://crbug.com/543820 for more details.
+ '../build/apk_test.gypi',
+ ],
+ },
+ {
'target_name': 'modules_tests_apk',
'type': 'none',
'variables': {
@@ -146,20 +163,6 @@
],
},
{
- 'target_name': 'video_engine_core_unittests_apk',
- 'type': 'none',
- 'variables': {
- 'test_suite_name': 'video_engine_core_unittests',
- 'input_shlib_path': '<(SHARED_LIB_DIR)/<(SHARED_LIB_PREFIX)video_engine_core_unittests<(SHARED_LIB_SUFFIX)',
- },
- 'dependencies': [
- '<(webrtc_root)/video_engine/video_engine_core_unittests.gyp:video_engine_core_unittests',
- ],
- 'includes': [
- '../../build/apk_test.gypi',
- ],
- },
- {
'target_name': 'video_engine_tests_apk',
'type': 'none',
'variables': {
diff --git a/webrtc/build/apk_tests_noop.gyp b/webrtc/build/apk_tests_noop.gyp
index 719bddb854..ed9249aec5 100644
--- a/webrtc/build/apk_tests_noop.gyp
+++ b/webrtc/build/apk_tests_noop.gyp
@@ -22,6 +22,10 @@
'type': 'none',
},
{
+ 'target_name': 'libjingle_peerconnection_unittest_apk',
+ 'type': 'none',
+ },
+ {
'target_name': 'modules_tests_apk',
'type': 'none',
},
@@ -46,10 +50,6 @@
'type': 'none',
},
{
- 'target_name': 'video_engine_core_unittests_apk',
- 'type': 'none',
- },
- {
'target_name': 'video_engine_tests_apk',
'type': 'none',
},
diff --git a/webrtc/build/common.gypi b/webrtc/build/common.gypi
index 2b0516863e..8d8583f9a4 100644
--- a/webrtc/build/common.gypi
+++ b/webrtc/build/common.gypi
@@ -21,12 +21,10 @@
'conditions': [
['build_with_chromium==1', {
- 'build_with_libjingle': 1,
'webrtc_root%': '<(DEPTH)/third_party/webrtc',
'apk_tests_path%': '<(DEPTH)/third_party/webrtc/build/apk_tests_noop.gyp',
'modules_java_gyp_path%': '<(DEPTH)/third_party/webrtc/modules/modules_java_chromium.gyp',
}, {
- 'build_with_libjingle%': 0,
'webrtc_root%': '<(DEPTH)/webrtc',
'apk_tests_path%': '<(DEPTH)/webrtc/build/apk_tests.gyp',
'modules_java_gyp_path%': '<(DEPTH)/webrtc/modules/modules_java.gyp',
@@ -34,7 +32,6 @@
],
},
'build_with_chromium%': '<(build_with_chromium)',
- 'build_with_libjingle%': '<(build_with_libjingle)',
'webrtc_root%': '<(webrtc_root)',
'apk_tests_path%': '<(apk_tests_path)',
'modules_java_gyp_path%': '<(modules_java_gyp_path)',
@@ -47,7 +44,6 @@
'build_with_mozilla%': 0,
},
'build_with_chromium%': '<(build_with_chromium)',
- 'build_with_libjingle%': '<(build_with_libjingle)',
'build_with_mozilla%': '<(build_with_mozilla)',
'webrtc_root%': '<(webrtc_root)',
'apk_tests_path%': '<(apk_tests_path)',
@@ -89,15 +85,14 @@
# Disable these to not build components which can be externally provided.
'build_expat%': 1,
- 'build_icu%': 1,
'build_json%': 1,
'build_libjpeg%': 1,
'build_libvpx%': 1,
'build_libyuv%': 1,
'build_openmax_dl%': 1,
'build_opus%': 1,
+ 'build_protobuf%': 1,
'build_ssl%': 1,
- 'build_vp9%': 1,
# Disable by default
'have_dbus_glib%': 0,
@@ -129,6 +124,17 @@
# Enabling this may break interop with Android clients that support H264.
'use_objc_h264%': 0,
+ # Enable this to build H.264 encoder/decoder using third party libraries.
+ # Encoding uses OpenH264 and decoding uses FFmpeg. Because of this, OpenH264
+ # and FFmpeg have to be correctly enabled separately.
+ # - use_openh264=1 is required for OpenH264 targets to be defined.
+ # - ffmpeg_branding=Chrome is one way to support H.264 decoding in FFmpeg.
+ # FFmpeg can be built with/without H.264 support, see 'ffmpeg_branding'.
+ # Without it, it compiles but H264DecoderImpl fails to initialize.
+ # CHECK THE OPENH264, FFMPEG AND H.264 LICENSES/PATENTS BEFORE BUILDING.
+ # http://www.openh264.org, https://www.ffmpeg.org/
+ 'use_third_party_h264%': 0, # TODO(hbos): To be used in follow-up CL(s).
+
'conditions': [
['build_with_chromium==1', {
# Exclude pulse audio on Chromium since its prerequisites don't require
@@ -137,6 +143,10 @@
# Exclude internal ADM since Chromium uses its own IO handling.
'include_internal_audio_device%': 0,
+
+ # Remove tests for Chromium to avoid slowing down GYP generation.
+ 'include_tests%': 0,
+ 'restrict_webrtc_logging%': 1,
}, { # Settings for the standalone (not-in-Chromium) build.
# TODO(andrew): For now, disable the Chrome plugins, which causes a
# flood of chromium-style warnings. Investigate enabling them:
@@ -145,17 +155,11 @@
'include_pulse_audio%': 1,
'include_internal_audio_device%': 1,
- }],
- ['build_with_libjingle==1', {
- 'include_tests%': 0,
- 'restrict_webrtc_logging%': 1,
- }, {
'include_tests%': 1,
'restrict_webrtc_logging%': 0,
}],
['OS=="ios"', {
'build_libjpeg%': 0,
- 'enable_protobuf%': 0,
}],
['target_arch=="arm" or target_arch=="arm64"', {
'prefer_fixed_point%': 1,
diff --git a/webrtc/build/protoc.gypi b/webrtc/build/protoc.gypi
index 5e486f16c2..682bc22cc5 100644
--- a/webrtc/build/protoc.gypi
+++ b/webrtc/build/protoc.gypi
@@ -109,10 +109,6 @@
'process_outputs_as_sources': 1,
},
],
- 'dependencies': [
- '<(DEPTH)/third_party/protobuf/protobuf.gyp:protoc#host',
- '<(DEPTH)/third_party/protobuf/protobuf.gyp:protobuf_lite',
- ],
'include_dirs': [
'<(SHARED_INTERMEDIATE_DIR)/protoc_out',
'<(DEPTH)',
@@ -123,12 +119,20 @@
'<(DEPTH)',
]
},
- 'export_dependent_settings': [
- # The generated headers reference headers within protobuf_lite,
- # so dependencies must be able to find those headers too.
- '<(DEPTH)/third_party/protobuf/protobuf.gyp:protobuf_lite',
- ],
# This target exports a hard dependency because it generates header
# files.
'hard_dependency': 1,
+ 'conditions': [
+ ['build_protobuf==1', {
+ 'dependencies': [
+ '<(DEPTH)/third_party/protobuf/protobuf.gyp:protoc#host',
+ '<(DEPTH)/third_party/protobuf/protobuf.gyp:protobuf_lite',
+ ],
+ 'export_dependent_settings': [
+ # The generated headers reference headers within protobuf_lite,
+ # so dependencies must be able to find those headers too.
+ '<(DEPTH)/third_party/protobuf/protobuf.gyp:protobuf_lite',
+ ],
+ }],
+ ],
}
diff --git a/webrtc/build/sanitizers/tsan_suppressions_webrtc.cc b/webrtc/build/sanitizers/tsan_suppressions_webrtc.cc
index 4022160b52..115099099f 100644
--- a/webrtc/build/sanitizers/tsan_suppressions_webrtc.cc
+++ b/webrtc/build/sanitizers/tsan_suppressions_webrtc.cc
@@ -42,6 +42,10 @@ char kTSanDefaultSuppressions[] =
"race:webrtc/modules/audio_processing/aec/aec_core.c\n"
"race:webrtc/modules/audio_processing/aec/aec_rdft.c\n"
+// Race in pulse initialization.
+// https://code.google.com/p/webrtc/issues/detail?id=5152
+"race:webrtc::AudioDeviceLinuxPulse::Init\n"
+
// rtc_unittest
// https://code.google.com/p/webrtc/issues/detail?id=3911 for details.
"race:rtc::AsyncInvoker::OnMessage\n"
@@ -68,6 +72,9 @@ char kTSanDefaultSuppressions[] =
// TODO(jiayl): https://code.google.com/p/webrtc/issues/detail?id=3492
"race:user_sctp_timer_iterate\n"
+// https://code.google.com/p/webrtc/issues/detail?id=5151
+"race:sctp_close\n"
+
// Potential deadlocks detected after roll in r6516.
// https://code.google.com/p/webrtc/issues/detail?id=3509
"deadlock:webrtc::RTCPReceiver::SetSsrcs\n"
@@ -85,7 +92,7 @@ char kTSanDefaultSuppressions[] =
// Race between InitCpuFlags and TestCpuFlag in libyuv.
// https://code.google.com/p/libyuv/issues/detail?id=508
-"race:libyuv::TestCpuFlag\n"
+"race:InitCpuFlags\n"
// End of suppressions.
; // Please keep this semicolon.
diff --git a/webrtc/build/webrtc.gni b/webrtc/build/webrtc.gni
index 1d33e892d7..c55f4230bd 100644
--- a/webrtc/build/webrtc.gni
+++ b/webrtc/build/webrtc.gni
@@ -36,7 +36,6 @@ declare_args() {
# Disable these to not build components which can be externally provided.
rtc_build_expat = true
- rtc_build_icu = true
rtc_build_json = true
rtc_build_libjpeg = true
rtc_build_libvpx = true
@@ -44,7 +43,6 @@ declare_args() {
rtc_build_openmax_dl = true
rtc_build_opus = true
rtc_build_ssl = true
- rtc_build_vp9 = true
# Disable by default.
rtc_have_dbus_glib = false
@@ -92,6 +90,17 @@ declare_args() {
# Enable this to use HW H.264 encoder/decoder on iOS PeerConnections.
# Enabling this may break interop with Android clients that support H264.
rtc_use_objc_h264 = false
+
+ # Enable this to build H.264 encoder/decoder using third party libraries.
+ # Encoding uses OpenH264 and decoding uses FFmpeg. Because of this, OpenH264
+ # and FFmpeg have to be correctly enabled separately.
+ # - use_openh264=true is required for OpenH264 targets to be defined.
+ # - ffmpeg_branding="Chrome" is one way to support H.264 decoding in FFmpeg.
+ # FFmpeg can be built with/without H.264 support, see 'ffmpeg_branding'.
+ # Without it, it compiles but H264DecoderImpl fails to initialize.
+ # CHECK THE OPENH264, FFMPEG AND H.264 LICENSES/PATENTS BEFORE BUILDING.
+ # http://www.openh264.org, https://www.ffmpeg.org/
+ use_third_party_h264 = false # TODO(hbos): To be used in follow-up CL(s).
}
# Make it possible to provide custom locations for some libraries (move these
diff --git a/webrtc/call.h b/webrtc/call.h
index e6e8cdee0b..313c5e58c1 100644
--- a/webrtc/call.h
+++ b/webrtc/call.h
@@ -16,16 +16,14 @@
#include "webrtc/common_types.h"
#include "webrtc/audio_receive_stream.h"
#include "webrtc/audio_send_stream.h"
+#include "webrtc/audio_state.h"
#include "webrtc/base/socket.h"
#include "webrtc/video_receive_stream.h"
#include "webrtc/video_send_stream.h"
namespace webrtc {
-class AudioDeviceModule;
class AudioProcessing;
-class VoiceEngine;
-class VoiceEngineObserver;
const char* Version();
@@ -74,9 +72,6 @@ class Call {
struct Config {
static const int kDefaultStartBitrateBps;
- // VoiceEngine used for audio/video synchronization for this Call.
- VoiceEngine* voice_engine = nullptr;
-
// Bitrate config used until valid bitrate estimates are calculated. Also
// used to cap total bitrate used.
struct BitrateConfig {
@@ -85,11 +80,13 @@ class Call {
int max_bitrate_bps = -1;
} bitrate_config;
- struct AudioConfig {
- AudioDeviceModule* audio_device_module = nullptr;
- AudioProcessing* audio_processing = nullptr;
- VoiceEngineObserver* voice_engine_observer = nullptr;
- } audio_config;
+ // AudioState which is possibly shared between multiple calls.
+ // TODO(solenberg): Change this to a shared_ptr once we can use C++11.
+ rtc::scoped_refptr<AudioState> audio_state;
+
+ // Audio Processing Module to be used in this call.
+ // TODO(solenberg): Change this to a shared_ptr once we can use C++11.
+ AudioProcessing* audio_processing = nullptr;
};
struct Stats {
diff --git a/webrtc/call/BUILD.gn b/webrtc/call/BUILD.gn
index 3abc762a77..498c724900 100644
--- a/webrtc/call/BUILD.gn
+++ b/webrtc/call/BUILD.gn
@@ -10,6 +10,7 @@ import("../build/webrtc.gni")
source_set("call") {
sources = [
+ "bitrate_allocator.cc",
"call.cc",
"congestion_controller.cc",
"transport_adapter.cc",
diff --git a/webrtc/call/bitrate_allocator.cc b/webrtc/call/bitrate_allocator.cc
new file mode 100644
index 0000000000..b3789d3bb6
--- /dev/null
+++ b/webrtc/call/bitrate_allocator.cc
@@ -0,0 +1,194 @@
+/*
+ * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ *
+ */
+
+#include "webrtc/call/bitrate_allocator.h"
+
+#include <algorithm>
+#include <utility>
+
+#include "webrtc/modules/bitrate_controller/include/bitrate_controller.h"
+
+namespace webrtc {
+
+// Allow packets to be transmitted in up to 2 times max video bitrate if the
+// bandwidth estimate allows it.
+const int kTransmissionMaxBitrateMultiplier = 2;
+const int kDefaultBitrateBps = 300000;
+
+BitrateAllocator::BitrateAllocator()
+ : crit_sect_(CriticalSectionWrapper::CreateCriticalSection()),
+ bitrate_observers_(),
+ bitrate_observers_modified_(false),
+ enforce_min_bitrate_(true),
+ last_bitrate_bps_(kDefaultBitrateBps),
+ last_fraction_loss_(0),
+ last_rtt_(0) {}
+
+uint32_t BitrateAllocator::OnNetworkChanged(uint32_t bitrate,
+ uint8_t fraction_loss,
+ int64_t rtt) {
+ CriticalSectionScoped lock(crit_sect_.get());
+ last_bitrate_bps_ = bitrate;
+ last_fraction_loss_ = fraction_loss;
+ last_rtt_ = rtt;
+ uint32_t allocated_bitrate_bps = 0;
+ ObserverBitrateMap allocation = AllocateBitrates();
+ for (const auto& kv : allocation) {
+ kv.first->OnNetworkChanged(kv.second, last_fraction_loss_, last_rtt_);
+ allocated_bitrate_bps += kv.second;
+ }
+ return allocated_bitrate_bps;
+}
+
+BitrateAllocator::ObserverBitrateMap BitrateAllocator::AllocateBitrates() {
+ if (bitrate_observers_.empty())
+ return ObserverBitrateMap();
+
+ uint32_t sum_min_bitrates = 0;
+ for (const auto& observer : bitrate_observers_)
+ sum_min_bitrates += observer.second.min_bitrate;
+ if (last_bitrate_bps_ <= sum_min_bitrates)
+ return LowRateAllocation(last_bitrate_bps_);
+ else
+ return NormalRateAllocation(last_bitrate_bps_, sum_min_bitrates);
+}
+
+int BitrateAllocator::AddBitrateObserver(BitrateObserver* observer,
+ uint32_t min_bitrate_bps,
+ uint32_t max_bitrate_bps) {
+ CriticalSectionScoped lock(crit_sect_.get());
+
+ BitrateObserverConfList::iterator it =
+ FindObserverConfigurationPair(observer);
+
+ // Allow the max bitrate to be exceeded for FEC and retransmissions.
+ // TODO(holmer): We have to get rid of this hack as it makes it difficult to
+ // properly allocate bitrate. The allocator should instead distribute any
+ // extra bitrate after all streams have maxed out.
+ max_bitrate_bps *= kTransmissionMaxBitrateMultiplier;
+ if (it != bitrate_observers_.end()) {
+ // Update current configuration.
+ it->second.min_bitrate = min_bitrate_bps;
+ it->second.max_bitrate = max_bitrate_bps;
+ } else {
+ // Add new settings.
+ bitrate_observers_.push_back(BitrateObserverConfiguration(
+ observer, BitrateConfiguration(min_bitrate_bps, max_bitrate_bps)));
+ bitrate_observers_modified_ = true;
+ }
+
+ ObserverBitrateMap allocation = AllocateBitrates();
+ int new_observer_bitrate_bps = 0;
+ for (auto& kv : allocation) {
+ kv.first->OnNetworkChanged(kv.second, last_fraction_loss_, last_rtt_);
+ if (kv.first == observer)
+ new_observer_bitrate_bps = kv.second;
+ }
+ return new_observer_bitrate_bps;
+}
+
+void BitrateAllocator::RemoveBitrateObserver(BitrateObserver* observer) {
+ CriticalSectionScoped lock(crit_sect_.get());
+ BitrateObserverConfList::iterator it =
+ FindObserverConfigurationPair(observer);
+ if (it != bitrate_observers_.end()) {
+ bitrate_observers_.erase(it);
+ bitrate_observers_modified_ = true;
+ }
+}
+
+void BitrateAllocator::GetMinMaxBitrateSumBps(int* min_bitrate_sum_bps,
+ int* max_bitrate_sum_bps) const {
+ *min_bitrate_sum_bps = 0;
+ *max_bitrate_sum_bps = 0;
+
+ CriticalSectionScoped lock(crit_sect_.get());
+ for (const auto& observer : bitrate_observers_) {
+ *min_bitrate_sum_bps += observer.second.min_bitrate;
+ *max_bitrate_sum_bps += observer.second.max_bitrate;
+ }
+}
+
+BitrateAllocator::BitrateObserverConfList::iterator
+BitrateAllocator::FindObserverConfigurationPair(
+ const BitrateObserver* observer) {
+ for (auto it = bitrate_observers_.begin(); it != bitrate_observers_.end();
+ ++it) {
+ if (it->first == observer)
+ return it;
+ }
+ return bitrate_observers_.end();
+}
+
+void BitrateAllocator::EnforceMinBitrate(bool enforce_min_bitrate) {
+ CriticalSectionScoped lock(crit_sect_.get());
+ enforce_min_bitrate_ = enforce_min_bitrate;
+}
+
+BitrateAllocator::ObserverBitrateMap BitrateAllocator::NormalRateAllocation(
+ uint32_t bitrate,
+ uint32_t sum_min_bitrates) {
+ uint32_t number_of_observers =
+ static_cast<uint32_t>(bitrate_observers_.size());
+ uint32_t bitrate_per_observer =
+ (bitrate - sum_min_bitrates) / number_of_observers;
+ // Use map to sort list based on max bitrate.
+ ObserverSortingMap list_max_bitrates;
+ for (const auto& observer : bitrate_observers_) {
+ list_max_bitrates.insert(std::pair<uint32_t, ObserverConfiguration>(
+ observer.second.max_bitrate,
+ ObserverConfiguration(observer.first, observer.second.min_bitrate)));
+ }
+ ObserverBitrateMap allocation;
+ ObserverSortingMap::iterator max_it = list_max_bitrates.begin();
+ while (max_it != list_max_bitrates.end()) {
+ number_of_observers--;
+ uint32_t observer_allowance =
+ max_it->second.min_bitrate + bitrate_per_observer;
+ if (max_it->first < observer_allowance) {
+ // We have more than enough for this observer.
+ // Carry the remainder forward.
+ uint32_t remainder = observer_allowance - max_it->first;
+ if (number_of_observers != 0) {
+ bitrate_per_observer += remainder / number_of_observers;
+ }
+ allocation[max_it->second.observer] = max_it->first;
+ } else {
+ allocation[max_it->second.observer] = observer_allowance;
+ }
+ list_max_bitrates.erase(max_it);
+ // Prepare next iteration.
+ max_it = list_max_bitrates.begin();
+ }
+ return allocation;
+}
+
+BitrateAllocator::ObserverBitrateMap BitrateAllocator::LowRateAllocation(
+ uint32_t bitrate) {
+ ObserverBitrateMap allocation;
+ if (enforce_min_bitrate_) {
+ // Min bitrate to all observers.
+ for (const auto& observer : bitrate_observers_)
+ allocation[observer.first] = observer.second.min_bitrate;
+ } else {
+ // Allocate up to |min_bitrate| to one observer at a time, until
+ // |bitrate| is depleted.
+ uint32_t remainder = bitrate;
+ for (const auto& observer : bitrate_observers_) {
+ uint32_t allocated_bitrate =
+ std::min(remainder, observer.second.min_bitrate);
+ allocation[observer.first] = allocated_bitrate;
+ remainder -= allocated_bitrate;
+ }
+ }
+ return allocation;
+}
+} // namespace webrtc
diff --git a/webrtc/call/bitrate_allocator.h b/webrtc/call/bitrate_allocator.h
new file mode 100644
index 0000000000..4a3fd59d49
--- /dev/null
+++ b/webrtc/call/bitrate_allocator.h
@@ -0,0 +1,102 @@
+/*
+ * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ *
+ * Usage: this class will register multiple RtcpBitrateObserver's one at each
+ * RTCP module. It will aggregate the results and run one bandwidth estimation
+ * and push the result to the encoders via BitrateObserver(s).
+ */
+
+#ifndef WEBRTC_CALL_BITRATE_ALLOCATOR_H_
+#define WEBRTC_CALL_BITRATE_ALLOCATOR_H_
+
+#include <list>
+#include <map>
+#include <utility>
+
+#include "webrtc/base/scoped_ptr.h"
+#include "webrtc/base/thread_annotations.h"
+#include "webrtc/system_wrappers/include/critical_section_wrapper.h"
+
+namespace webrtc {
+
+class BitrateObserver;
+
+class BitrateAllocator {
+ public:
+ BitrateAllocator();
+
+ // Allocate target_bitrate across the registered BitrateObservers.
+ // Returns actual bitrate allocated (might be higher than target_bitrate if
+ // for instance EnforceMinBitrate() is enabled.
+ uint32_t OnNetworkChanged(uint32_t target_bitrate,
+ uint8_t fraction_loss,
+ int64_t rtt);
+
+ // Set the start and max send bitrate used by the bandwidth management.
+ //
+ // |observer| updates bitrates if already in use.
+ // |min_bitrate_bps| = 0 equals no min bitrate.
+ // |max_bitrate_bps| = 0 equals no max bitrate.
+ // Returns bitrate allocated for the bitrate observer.
+ int AddBitrateObserver(BitrateObserver* observer,
+ uint32_t min_bitrate_bps,
+ uint32_t max_bitrate_bps);
+
+ void RemoveBitrateObserver(BitrateObserver* observer);
+
+ void GetMinMaxBitrateSumBps(int* min_bitrate_sum_bps,
+ int* max_bitrate_sum_bps) const;
+
+ // This method controls the behavior when the available bitrate is lower than
+ // the minimum bitrate, or the sum of minimum bitrates.
+ // When true, the bitrate will never be set lower than the minimum bitrate(s).
+ // When false, the bitrate observers will be allocated rates up to their
+ // respective minimum bitrate, satisfying one observer after the other.
+ void EnforceMinBitrate(bool enforce_min_bitrate);
+
+ private:
+ struct BitrateConfiguration {
+ BitrateConfiguration(uint32_t min_bitrate, uint32_t max_bitrate)
+ : min_bitrate(min_bitrate), max_bitrate(max_bitrate) {}
+ uint32_t min_bitrate;
+ uint32_t max_bitrate;
+ };
+ struct ObserverConfiguration {
+ ObserverConfiguration(BitrateObserver* observer, uint32_t bitrate)
+ : observer(observer), min_bitrate(bitrate) {}
+ BitrateObserver* const observer;
+ uint32_t min_bitrate;
+ };
+ typedef std::pair<BitrateObserver*, BitrateConfiguration>
+ BitrateObserverConfiguration;
+ typedef std::list<BitrateObserverConfiguration> BitrateObserverConfList;
+ typedef std::multimap<uint32_t, ObserverConfiguration> ObserverSortingMap;
+ typedef std::map<BitrateObserver*, int> ObserverBitrateMap;
+
+ BitrateObserverConfList::iterator FindObserverConfigurationPair(
+ const BitrateObserver* observer) EXCLUSIVE_LOCKS_REQUIRED(crit_sect_);
+ ObserverBitrateMap AllocateBitrates() EXCLUSIVE_LOCKS_REQUIRED(crit_sect_);
+ ObserverBitrateMap NormalRateAllocation(uint32_t bitrate,
+ uint32_t sum_min_bitrates)
+ EXCLUSIVE_LOCKS_REQUIRED(crit_sect_);
+
+ ObserverBitrateMap LowRateAllocation(uint32_t bitrate)
+ EXCLUSIVE_LOCKS_REQUIRED(crit_sect_);
+
+ rtc::scoped_ptr<CriticalSectionWrapper> crit_sect_;
+ // Stored in a list to keep track of the insertion order.
+ BitrateObserverConfList bitrate_observers_ GUARDED_BY(crit_sect_);
+ bool bitrate_observers_modified_ GUARDED_BY(crit_sect_);
+ bool enforce_min_bitrate_ GUARDED_BY(crit_sect_);
+ uint32_t last_bitrate_bps_ GUARDED_BY(crit_sect_);
+ uint8_t last_fraction_loss_ GUARDED_BY(crit_sect_);
+ int64_t last_rtt_ GUARDED_BY(crit_sect_);
+};
+} // namespace webrtc
+#endif // WEBRTC_CALL_BITRATE_ALLOCATOR_H_
diff --git a/webrtc/call/bitrate_allocator_unittest.cc b/webrtc/call/bitrate_allocator_unittest.cc
new file mode 100644
index 0000000000..86f75a4380
--- /dev/null
+++ b/webrtc/call/bitrate_allocator_unittest.cc
@@ -0,0 +1,212 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <algorithm>
+#include <vector>
+
+#include "testing/gtest/include/gtest/gtest.h"
+#include "webrtc/call/bitrate_allocator.h"
+#include "webrtc/modules/bitrate_controller/include/bitrate_controller.h"
+
+namespace webrtc {
+
+class TestBitrateObserver : public BitrateObserver {
+ public:
+ TestBitrateObserver()
+ : last_bitrate_(0), last_fraction_loss_(0), last_rtt_(0) {}
+
+ virtual void OnNetworkChanged(uint32_t bitrate,
+ uint8_t fraction_loss,
+ int64_t rtt) {
+ last_bitrate_ = bitrate;
+ last_fraction_loss_ = fraction_loss;
+ last_rtt_ = rtt;
+ }
+ uint32_t last_bitrate_;
+ uint8_t last_fraction_loss_;
+ int64_t last_rtt_;
+};
+
+class BitrateAllocatorTest : public ::testing::Test {
+ protected:
+ BitrateAllocatorTest() : allocator_(new BitrateAllocator()) {
+ allocator_->OnNetworkChanged(300000u, 0, 0);
+ }
+ ~BitrateAllocatorTest() {}
+
+ rtc::scoped_ptr<BitrateAllocator> allocator_;
+};
+
+TEST_F(BitrateAllocatorTest, UpdatingBitrateObserver) {
+ TestBitrateObserver bitrate_observer;
+ int start_bitrate =
+ allocator_->AddBitrateObserver(&bitrate_observer, 100000, 1500000);
+ EXPECT_EQ(300000, start_bitrate);
+ allocator_->OnNetworkChanged(200000, 0, 0);
+ EXPECT_EQ(200000u, bitrate_observer.last_bitrate_);
+
+ // TODO(pbos): Expect capping to 1.5M instead of 3M when not boosting the max
+ // bitrate for FEC/retransmissions (see todo in BitrateAllocator).
+ allocator_->OnNetworkChanged(4000000, 0, 0);
+ EXPECT_EQ(3000000u, bitrate_observer.last_bitrate_);
+ start_bitrate =
+ allocator_->AddBitrateObserver(&bitrate_observer, 100000, 4000000);
+ EXPECT_EQ(4000000, start_bitrate);
+
+ start_bitrate =
+ allocator_->AddBitrateObserver(&bitrate_observer, 100000, 1500000);
+ EXPECT_EQ(3000000, start_bitrate);
+ EXPECT_EQ(3000000u, bitrate_observer.last_bitrate_);
+ allocator_->OnNetworkChanged(1500000, 0, 0);
+ EXPECT_EQ(1500000u, bitrate_observer.last_bitrate_);
+}
+
+TEST_F(BitrateAllocatorTest, TwoBitrateObserversOneRtcpObserver) {
+ TestBitrateObserver bitrate_observer_1;
+ TestBitrateObserver bitrate_observer_2;
+ int start_bitrate =
+ allocator_->AddBitrateObserver(&bitrate_observer_1, 100000, 300000);
+ EXPECT_EQ(300000, start_bitrate);
+ start_bitrate =
+ allocator_->AddBitrateObserver(&bitrate_observer_2, 200000, 300000);
+ EXPECT_EQ(200000, start_bitrate);
+
+ // Test too low start bitrate, hence lower than sum of min. Min bitrates will
+ // be allocated to all observers.
+ allocator_->OnNetworkChanged(200000, 0, 50);
+ EXPECT_EQ(100000u, bitrate_observer_1.last_bitrate_);
+ EXPECT_EQ(0, bitrate_observer_1.last_fraction_loss_);
+ EXPECT_EQ(50, bitrate_observer_1.last_rtt_);
+ EXPECT_EQ(200000u, bitrate_observer_2.last_bitrate_);
+ EXPECT_EQ(0, bitrate_observer_2.last_fraction_loss_);
+ EXPECT_EQ(50, bitrate_observer_2.last_rtt_);
+
+ // Test a bitrate which should be distributed equally.
+ allocator_->OnNetworkChanged(500000, 0, 50);
+ const uint32_t kBitrateToShare = 500000 - 200000 - 100000;
+ EXPECT_EQ(100000u + kBitrateToShare / 2, bitrate_observer_1.last_bitrate_);
+ EXPECT_EQ(200000u + kBitrateToShare / 2, bitrate_observer_2.last_bitrate_);
+
+ // Limited by 2x max bitrates since we leave room for FEC and retransmissions.
+ allocator_->OnNetworkChanged(1500000, 0, 50);
+ EXPECT_EQ(600000u, bitrate_observer_1.last_bitrate_);
+ EXPECT_EQ(600000u, bitrate_observer_2.last_bitrate_);
+}
+
+class BitrateAllocatorTestNoEnforceMin : public ::testing::Test {
+ protected:
+ BitrateAllocatorTestNoEnforceMin() : allocator_(new BitrateAllocator()) {
+ allocator_->EnforceMinBitrate(false);
+ allocator_->OnNetworkChanged(300000u, 0, 0);
+ }
+ ~BitrateAllocatorTestNoEnforceMin() {}
+
+ rtc::scoped_ptr<BitrateAllocator> allocator_;
+};
+
+// The following three tests verify that the EnforceMinBitrate() method works
+// as intended.
+TEST_F(BitrateAllocatorTestNoEnforceMin, OneBitrateObserver) {
+ TestBitrateObserver bitrate_observer_1;
+ int start_bitrate =
+ allocator_->AddBitrateObserver(&bitrate_observer_1, 100000, 400000);
+ EXPECT_EQ(300000, start_bitrate);
+
+ // High REMB.
+ allocator_->OnNetworkChanged(150000, 0, 0);
+ EXPECT_EQ(150000u, bitrate_observer_1.last_bitrate_);
+
+ // Low REMB.
+ allocator_->OnNetworkChanged(10000, 0, 0);
+ EXPECT_EQ(10000u, bitrate_observer_1.last_bitrate_);
+
+ allocator_->RemoveBitrateObserver(&bitrate_observer_1);
+}
+
+TEST_F(BitrateAllocatorTestNoEnforceMin, ThreeBitrateObservers) {
+ TestBitrateObserver bitrate_observer_1;
+ TestBitrateObserver bitrate_observer_2;
+ TestBitrateObserver bitrate_observer_3;
+ // Set up the observers with min bitrates at 100000, 200000, and 300000.
+ int start_bitrate =
+ allocator_->AddBitrateObserver(&bitrate_observer_1, 100000, 400000);
+ EXPECT_EQ(300000, start_bitrate);
+
+ start_bitrate =
+ allocator_->AddBitrateObserver(&bitrate_observer_2, 200000, 400000);
+ EXPECT_EQ(200000, start_bitrate);
+ EXPECT_EQ(100000u, bitrate_observer_1.last_bitrate_);
+
+ start_bitrate =
+ allocator_->AddBitrateObserver(&bitrate_observer_3, 300000, 400000);
+ EXPECT_EQ(0, start_bitrate);
+ EXPECT_EQ(100000u, bitrate_observer_1.last_bitrate_);
+ EXPECT_EQ(200000u, bitrate_observer_2.last_bitrate_);
+
+ // High REMB. Make sure the controllers get a fair share of the surplus
+ // (i.e., what is left after each controller gets its min rate).
+ allocator_->OnNetworkChanged(690000, 0, 0);
+ // Verify that each observer gets its min rate (sum of min rates is 600000),
+ // and that the remaining 90000 is divided equally among the three.
+ uint32_t bitrate_to_share = 690000u - 100000u - 200000u - 300000u;
+ EXPECT_EQ(100000u + bitrate_to_share / 3, bitrate_observer_1.last_bitrate_);
+ EXPECT_EQ(200000u + bitrate_to_share / 3, bitrate_observer_2.last_bitrate_);
+ EXPECT_EQ(300000u + bitrate_to_share / 3, bitrate_observer_3.last_bitrate_);
+
+ // High REMB, but below the sum of min bitrates.
+ allocator_->OnNetworkChanged(500000, 0, 0);
+ // Verify that the first and second observers get their min bitrates, and the
+ // third gets the remainder.
+ EXPECT_EQ(100000u, bitrate_observer_1.last_bitrate_); // Min bitrate.
+ EXPECT_EQ(200000u, bitrate_observer_2.last_bitrate_); // Min bitrate.
+ EXPECT_EQ(200000u, bitrate_observer_3.last_bitrate_); // Remainder.
+
+ // Low REMB.
+ allocator_->OnNetworkChanged(10000, 0, 0);
+ // Verify that the first observer gets all the rate, and the rest get zero.
+ EXPECT_EQ(10000u, bitrate_observer_1.last_bitrate_);
+ EXPECT_EQ(0u, bitrate_observer_2.last_bitrate_);
+ EXPECT_EQ(0u, bitrate_observer_3.last_bitrate_);
+
+ allocator_->RemoveBitrateObserver(&bitrate_observer_1);
+ allocator_->RemoveBitrateObserver(&bitrate_observer_2);
+ allocator_->RemoveBitrateObserver(&bitrate_observer_3);
+}
+
+TEST_F(BitrateAllocatorTest, ThreeBitrateObserversLowRembEnforceMin) {
+ TestBitrateObserver bitrate_observer_1;
+ TestBitrateObserver bitrate_observer_2;
+ TestBitrateObserver bitrate_observer_3;
+ int start_bitrate =
+ allocator_->AddBitrateObserver(&bitrate_observer_1, 100000, 400000);
+ EXPECT_EQ(300000, start_bitrate);
+
+ start_bitrate =
+ allocator_->AddBitrateObserver(&bitrate_observer_2, 200000, 400000);
+ EXPECT_EQ(200000, start_bitrate);
+ EXPECT_EQ(100000u, bitrate_observer_1.last_bitrate_);
+
+ start_bitrate =
+ allocator_->AddBitrateObserver(&bitrate_observer_3, 300000, 400000);
+ EXPECT_EQ(300000, start_bitrate);
+ EXPECT_EQ(100000, static_cast<int>(bitrate_observer_1.last_bitrate_));
+ EXPECT_EQ(200000, static_cast<int>(bitrate_observer_2.last_bitrate_));
+
+ // Low REMB. Verify that all observers still get their respective min bitrate.
+ allocator_->OnNetworkChanged(1000, 0, 0);
+ EXPECT_EQ(100000u, bitrate_observer_1.last_bitrate_); // Min cap.
+ EXPECT_EQ(200000u, bitrate_observer_2.last_bitrate_); // Min cap.
+ EXPECT_EQ(300000u, bitrate_observer_3.last_bitrate_); // Min cap.
+
+ allocator_->RemoveBitrateObserver(&bitrate_observer_1);
+ allocator_->RemoveBitrateObserver(&bitrate_observer_2);
+ allocator_->RemoveBitrateObserver(&bitrate_observer_3);
+}
+} // namespace webrtc
diff --git a/webrtc/call/bitrate_estimator_tests.cc b/webrtc/call/bitrate_estimator_tests.cc
index 685f3fd665..4b24bbd5ef 100644
--- a/webrtc/call/bitrate_estimator_tests.cc
+++ b/webrtc/call/bitrate_estimator_tests.cc
@@ -13,66 +13,54 @@
#include "testing/gtest/include/gtest/gtest.h"
+#include "webrtc/audio_state.h"
#include "webrtc/base/checks.h"
+#include "webrtc/base/event.h"
+#include "webrtc/base/logging.h"
#include "webrtc/base/scoped_ptr.h"
#include "webrtc/base/thread_annotations.h"
#include "webrtc/call.h"
#include "webrtc/system_wrappers/include/critical_section_wrapper.h"
-#include "webrtc/system_wrappers/include/event_wrapper.h"
#include "webrtc/system_wrappers/include/trace.h"
#include "webrtc/test/call_test.h"
#include "webrtc/test/direct_transport.h"
#include "webrtc/test/encoder_settings.h"
#include "webrtc/test/fake_decoder.h"
#include "webrtc/test/fake_encoder.h"
-#include "webrtc/test/fake_voice_engine.h"
+#include "webrtc/test/mock_voice_engine.h"
#include "webrtc/test/frame_generator_capturer.h"
namespace webrtc {
namespace {
// Note: If you consider to re-use this class, think twice and instead consider
-// writing tests that don't depend on the trace system.
-class TraceObserver {
+// writing tests that don't depend on the logging system.
+class LogObserver {
public:
- TraceObserver() {
- Trace::set_level_filter(kTraceTerseInfo);
-
- Trace::CreateTrace();
- Trace::SetTraceCallback(&callback_);
-
- // Call webrtc trace to initialize the tracer that would otherwise trigger a
- // data-race if left to be initialized by multiple threads (i.e. threads
- // spawned by test::DirectTransport members in BitrateEstimatorTest).
- WEBRTC_TRACE(kTraceStateInfo,
- kTraceUtility,
- -1,
- "Instantiate without data races.");
- }
+ LogObserver() { rtc::LogMessage::AddLogToStream(&callback_, rtc::LS_INFO); }
- ~TraceObserver() {
- Trace::SetTraceCallback(nullptr);
- Trace::ReturnTrace();
- }
+ ~LogObserver() { rtc::LogMessage::RemoveLogToStream(&callback_); }
void PushExpectedLogLine(const std::string& expected_log_line) {
callback_.PushExpectedLogLine(expected_log_line);
}
- EventTypeWrapper Wait() {
- return callback_.Wait();
- }
+ bool Wait() { return callback_.Wait(); }
private:
- class Callback : public TraceCallback {
+ class Callback : public rtc::LogSink {
public:
- Callback() : done_(EventWrapper::Create()) {}
+ Callback() : done_(false, false) {}
- void Print(TraceLevel level, const char* message, int length) override {
+ void OnLogMessage(const std::string& message) override {
rtc::CritScope lock(&crit_sect_);
- std::string msg(message);
- if (msg.find("BitrateEstimator") != std::string::npos) {
- received_log_lines_.push_back(msg);
+ // Ignore log lines that are due to missing AST extensions, these are
+ // logged when we switch back from AST to TOF until the wrapping bitrate
+ // estimator gives up on using AST.
+ if (message.find("BitrateEstimator") != std::string::npos &&
+ message.find("packet is missing") == std::string::npos) {
+ received_log_lines_.push_back(message);
}
+
int num_popped = 0;
while (!received_log_lines_.empty() && !expected_log_lines_.empty()) {
std::string a = received_log_lines_.front();
@@ -80,19 +68,17 @@ class TraceObserver {
received_log_lines_.pop_front();
expected_log_lines_.pop_front();
num_popped++;
- EXPECT_TRUE(a.find(b) != std::string::npos);
+ EXPECT_TRUE(a.find(b) != std::string::npos) << a << " != " << b;
}
if (expected_log_lines_.size() <= 0) {
if (num_popped > 0) {
- done_->Set();
+ done_.Set();
}
return;
}
}
- EventTypeWrapper Wait() {
- return done_->Wait(test::CallTest::kDefaultTimeoutMs);
- }
+ bool Wait() { return done_.Wait(test::CallTest::kDefaultTimeoutMs); }
void PushExpectedLogLine(const std::string& expected_log_line) {
rtc::CritScope lock(&crit_sect_);
@@ -104,7 +90,7 @@ class TraceObserver {
rtc::CriticalSection crit_sect_;
Strings received_log_lines_ GUARDED_BY(crit_sect_);
Strings expected_log_lines_ GUARDED_BY(crit_sect_);
- rtc::scoped_ptr<EventWrapper> done_;
+ rtc::Event done_;
};
Callback callback_;
@@ -118,13 +104,13 @@ class BitrateEstimatorTest : public test::CallTest {
public:
BitrateEstimatorTest() : receive_config_(nullptr) {}
- virtual ~BitrateEstimatorTest() {
- EXPECT_TRUE(streams_.empty());
- }
+ virtual ~BitrateEstimatorTest() { EXPECT_TRUE(streams_.empty()); }
virtual void SetUp() {
+ AudioState::Config audio_state_config;
+ audio_state_config.voice_engine = &mock_voice_engine_;
Call::Config config;
- config.voice_engine = &fake_voice_engine_;
+ config.audio_state = AudioState::Create(audio_state_config);
receiver_call_.reset(Call::Create(config));
sender_call_.reset(Call::Create(config));
@@ -133,18 +119,19 @@ class BitrateEstimatorTest : public test::CallTest {
receive_transport_.reset(new test::DirectTransport(receiver_call_.get()));
receive_transport_->SetReceiver(sender_call_->Receiver());
- send_config_ = VideoSendStream::Config(send_transport_.get());
- send_config_.rtp.ssrcs.push_back(kSendSsrcs[0]);
+ video_send_config_ = VideoSendStream::Config(send_transport_.get());
+ video_send_config_.rtp.ssrcs.push_back(kVideoSendSsrcs[0]);
// Encoders will be set separately per stream.
- send_config_.encoder_settings.encoder = nullptr;
- send_config_.encoder_settings.payload_name = "FAKE";
- send_config_.encoder_settings.payload_type = kFakeSendPayloadType;
- encoder_config_.streams = test::CreateVideoStreams(1);
+ video_send_config_.encoder_settings.encoder = nullptr;
+ video_send_config_.encoder_settings.payload_name = "FAKE";
+ video_send_config_.encoder_settings.payload_type =
+ kFakeVideoSendPayloadType;
+ video_encoder_config_.streams = test::CreateVideoStreams(1);
receive_config_ = VideoReceiveStream::Config(receive_transport_.get());
// receive_config_.decoders will be set by every stream separately.
- receive_config_.rtp.remote_ssrc = send_config_.rtp.ssrcs[0];
- receive_config_.rtp.local_ssrc = kReceiverLocalSsrc;
+ receive_config_.rtp.remote_ssrc = video_send_config_.rtp.ssrcs[0];
+ receive_config_.rtp.local_ssrc = kReceiverLocalVideoSsrc;
receive_config_.rtp.remb = true;
receive_config_.rtp.extensions.push_back(
RtpExtension(RtpExtension::kTOffset, kTOFExtensionId));
@@ -154,7 +141,7 @@ class BitrateEstimatorTest : public test::CallTest {
virtual void TearDown() {
std::for_each(streams_.begin(), streams_.end(),
- std::mem_fun(&Stream::StopSending));
+ std::mem_fun(&Stream::StopSending));
send_transport_->StopSending();
receive_transport_->StopSending();
@@ -165,6 +152,7 @@ class BitrateEstimatorTest : public test::CallTest {
}
receiver_call_.reset();
+ sender_call_.reset();
}
protected:
@@ -181,23 +169,21 @@ class BitrateEstimatorTest : public test::CallTest {
frame_generator_capturer_(),
fake_encoder_(Clock::GetRealTimeClock()),
fake_decoder_() {
- test_->send_config_.rtp.ssrcs[0]++;
- test_->send_config_.encoder_settings.encoder = &fake_encoder_;
+ test_->video_send_config_.rtp.ssrcs[0]++;
+ test_->video_send_config_.encoder_settings.encoder = &fake_encoder_;
send_stream_ = test_->sender_call_->CreateVideoSendStream(
- test_->send_config_, test_->encoder_config_);
- RTC_DCHECK_EQ(1u, test_->encoder_config_.streams.size());
+ test_->video_send_config_, test_->video_encoder_config_);
+ RTC_DCHECK_EQ(1u, test_->video_encoder_config_.streams.size());
frame_generator_capturer_.reset(test::FrameGeneratorCapturer::Create(
- send_stream_->Input(),
- test_->encoder_config_.streams[0].width,
- test_->encoder_config_.streams[0].height,
- 30,
+ send_stream_->Input(), test_->video_encoder_config_.streams[0].width,
+ test_->video_encoder_config_.streams[0].height, 30,
Clock::GetRealTimeClock()));
send_stream_->Start();
frame_generator_capturer_->Start();
if (receive_audio) {
AudioReceiveStream::Config receive_config;
- receive_config.rtp.remote_ssrc = test_->send_config_.rtp.ssrcs[0];
+ receive_config.rtp.remote_ssrc = test_->video_send_config_.rtp.ssrcs[0];
// Bogus non-default id to prevent hitting a RTC_DCHECK when creating
// the AudioReceiveStream. Every receive stream has to correspond to
// an underlying channel id.
@@ -211,12 +197,13 @@ class BitrateEstimatorTest : public test::CallTest {
VideoReceiveStream::Decoder decoder;
decoder.decoder = &fake_decoder_;
decoder.payload_type =
- test_->send_config_.encoder_settings.payload_type;
+ test_->video_send_config_.encoder_settings.payload_type;
decoder.payload_name =
- test_->send_config_.encoder_settings.payload_name;
+ test_->video_send_config_.encoder_settings.payload_name;
+ test_->receive_config_.decoders.clear();
test_->receive_config_.decoders.push_back(decoder);
test_->receive_config_.rtp.remote_ssrc =
- test_->send_config_.rtp.ssrcs[0];
+ test_->video_send_config_.rtp.ssrcs[0];
test_->receive_config_.rtp.local_ssrc++;
video_receive_stream_ = test_->receiver_call_->CreateVideoReceiveStream(
test_->receive_config_);
@@ -262,8 +249,8 @@ class BitrateEstimatorTest : public test::CallTest {
test::FakeDecoder fake_decoder_;
};
- test::FakeVoiceEngine fake_voice_engine_;
- TraceObserver receiver_trace_;
+ testing::NiceMock<test::MockVoiceEngine> mock_voice_engine_;
+ LogObserver receiver_log_;
rtc::scoped_ptr<test::DirectTransport> send_transport_;
rtc::scoped_ptr<test::DirectTransport> receive_transport_;
rtc::scoped_ptr<Call> sender_call_;
@@ -278,89 +265,89 @@ static const char* kSingleStreamLog =
"RemoteBitrateEstimatorSingleStream: Instantiating.";
TEST_F(BitrateEstimatorTest, InstantiatesTOFPerDefaultForVideo) {
- send_config_.rtp.extensions.push_back(
+ video_send_config_.rtp.extensions.push_back(
RtpExtension(RtpExtension::kTOffset, kTOFExtensionId));
- receiver_trace_.PushExpectedLogLine(kSingleStreamLog);
- receiver_trace_.PushExpectedLogLine(kSingleStreamLog);
+ receiver_log_.PushExpectedLogLine(kSingleStreamLog);
+ receiver_log_.PushExpectedLogLine(kSingleStreamLog);
streams_.push_back(new Stream(this, false));
- EXPECT_EQ(kEventSignaled, receiver_trace_.Wait());
+ EXPECT_TRUE(receiver_log_.Wait());
}
TEST_F(BitrateEstimatorTest, ImmediatelySwitchToASTForAudio) {
- send_config_.rtp.extensions.push_back(
+ video_send_config_.rtp.extensions.push_back(
RtpExtension(RtpExtension::kAbsSendTime, kASTExtensionId));
- receiver_trace_.PushExpectedLogLine(kSingleStreamLog);
- receiver_trace_.PushExpectedLogLine(kSingleStreamLog);
- receiver_trace_.PushExpectedLogLine("Switching to absolute send time RBE.");
- receiver_trace_.PushExpectedLogLine(kAbsSendTimeLog);
+ receiver_log_.PushExpectedLogLine(kSingleStreamLog);
+ receiver_log_.PushExpectedLogLine(kSingleStreamLog);
+ receiver_log_.PushExpectedLogLine("Switching to absolute send time RBE.");
+ receiver_log_.PushExpectedLogLine(kAbsSendTimeLog);
streams_.push_back(new Stream(this, true));
- EXPECT_EQ(kEventSignaled, receiver_trace_.Wait());
+ EXPECT_TRUE(receiver_log_.Wait());
}
TEST_F(BitrateEstimatorTest, ImmediatelySwitchToASTForVideo) {
- send_config_.rtp.extensions.push_back(
+ video_send_config_.rtp.extensions.push_back(
RtpExtension(RtpExtension::kAbsSendTime, kASTExtensionId));
- receiver_trace_.PushExpectedLogLine(kSingleStreamLog);
- receiver_trace_.PushExpectedLogLine(kSingleStreamLog);
- receiver_trace_.PushExpectedLogLine("Switching to absolute send time RBE.");
- receiver_trace_.PushExpectedLogLine(kAbsSendTimeLog);
+ receiver_log_.PushExpectedLogLine(kSingleStreamLog);
+ receiver_log_.PushExpectedLogLine(kSingleStreamLog);
+ receiver_log_.PushExpectedLogLine("Switching to absolute send time RBE.");
+ receiver_log_.PushExpectedLogLine(kAbsSendTimeLog);
streams_.push_back(new Stream(this, false));
- EXPECT_EQ(kEventSignaled, receiver_trace_.Wait());
+ EXPECT_TRUE(receiver_log_.Wait());
}
TEST_F(BitrateEstimatorTest, SwitchesToASTForAudio) {
- receiver_trace_.PushExpectedLogLine(kSingleStreamLog);
- receiver_trace_.PushExpectedLogLine(kSingleStreamLog);
+ receiver_log_.PushExpectedLogLine(kSingleStreamLog);
+ receiver_log_.PushExpectedLogLine(kSingleStreamLog);
streams_.push_back(new Stream(this, true));
- EXPECT_EQ(kEventSignaled, receiver_trace_.Wait());
+ EXPECT_TRUE(receiver_log_.Wait());
- send_config_.rtp.extensions.push_back(
+ video_send_config_.rtp.extensions.push_back(
RtpExtension(RtpExtension::kAbsSendTime, kASTExtensionId));
- receiver_trace_.PushExpectedLogLine("Switching to absolute send time RBE.");
- receiver_trace_.PushExpectedLogLine(kAbsSendTimeLog);
+ receiver_log_.PushExpectedLogLine("Switching to absolute send time RBE.");
+ receiver_log_.PushExpectedLogLine(kAbsSendTimeLog);
streams_.push_back(new Stream(this, true));
- EXPECT_EQ(kEventSignaled, receiver_trace_.Wait());
+ EXPECT_TRUE(receiver_log_.Wait());
}
TEST_F(BitrateEstimatorTest, SwitchesToASTForVideo) {
- send_config_.rtp.extensions.push_back(
+ video_send_config_.rtp.extensions.push_back(
RtpExtension(RtpExtension::kTOffset, kTOFExtensionId));
- receiver_trace_.PushExpectedLogLine(kSingleStreamLog);
- receiver_trace_.PushExpectedLogLine(kSingleStreamLog);
+ receiver_log_.PushExpectedLogLine(kSingleStreamLog);
+ receiver_log_.PushExpectedLogLine(kSingleStreamLog);
streams_.push_back(new Stream(this, false));
- EXPECT_EQ(kEventSignaled, receiver_trace_.Wait());
+ EXPECT_TRUE(receiver_log_.Wait());
- send_config_.rtp.extensions[0] =
+ video_send_config_.rtp.extensions[0] =
RtpExtension(RtpExtension::kAbsSendTime, kASTExtensionId);
- receiver_trace_.PushExpectedLogLine("Switching to absolute send time RBE.");
- receiver_trace_.PushExpectedLogLine(kAbsSendTimeLog);
+ receiver_log_.PushExpectedLogLine("Switching to absolute send time RBE.");
+ receiver_log_.PushExpectedLogLine(kAbsSendTimeLog);
streams_.push_back(new Stream(this, false));
- EXPECT_EQ(kEventSignaled, receiver_trace_.Wait());
+ EXPECT_TRUE(receiver_log_.Wait());
}
TEST_F(BitrateEstimatorTest, SwitchesToASTThenBackToTOFForVideo) {
- send_config_.rtp.extensions.push_back(
+ video_send_config_.rtp.extensions.push_back(
RtpExtension(RtpExtension::kTOffset, kTOFExtensionId));
- receiver_trace_.PushExpectedLogLine(kSingleStreamLog);
- receiver_trace_.PushExpectedLogLine(kSingleStreamLog);
+ receiver_log_.PushExpectedLogLine(kSingleStreamLog);
+ receiver_log_.PushExpectedLogLine(kSingleStreamLog);
streams_.push_back(new Stream(this, false));
- EXPECT_EQ(kEventSignaled, receiver_trace_.Wait());
+ EXPECT_TRUE(receiver_log_.Wait());
- send_config_.rtp.extensions[0] =
+ video_send_config_.rtp.extensions[0] =
RtpExtension(RtpExtension::kAbsSendTime, kASTExtensionId);
- receiver_trace_.PushExpectedLogLine("Switching to absolute send time RBE.");
- receiver_trace_.PushExpectedLogLine(kAbsSendTimeLog);
+ receiver_log_.PushExpectedLogLine("Switching to absolute send time RBE.");
+ receiver_log_.PushExpectedLogLine(kAbsSendTimeLog);
streams_.push_back(new Stream(this, false));
- EXPECT_EQ(kEventSignaled, receiver_trace_.Wait());
+ EXPECT_TRUE(receiver_log_.Wait());
- send_config_.rtp.extensions[0] =
+ video_send_config_.rtp.extensions[0] =
RtpExtension(RtpExtension::kTOffset, kTOFExtensionId);
- receiver_trace_.PushExpectedLogLine(
+ receiver_log_.PushExpectedLogLine(
"WrappingBitrateEstimator: Switching to transmission time offset RBE.");
- receiver_trace_.PushExpectedLogLine(kSingleStreamLog);
+ receiver_log_.PushExpectedLogLine(kSingleStreamLog);
streams_.push_back(new Stream(this, false));
streams_[0]->StopSending();
streams_[1]->StopSending();
- EXPECT_EQ(kEventSignaled, receiver_trace_.Wait());
+ EXPECT_TRUE(receiver_log_.Wait());
}
} // namespace webrtc
diff --git a/webrtc/call/call.cc b/webrtc/call/call.cc
index 594ddf5c97..5c46a48f14 100644
--- a/webrtc/call/call.cc
+++ b/webrtc/call/call.cc
@@ -15,27 +15,33 @@
#include "webrtc/audio/audio_receive_stream.h"
#include "webrtc/audio/audio_send_stream.h"
+#include "webrtc/audio/audio_state.h"
+#include "webrtc/audio/scoped_voe_interface.h"
#include "webrtc/base/checks.h"
+#include "webrtc/base/logging.h"
#include "webrtc/base/scoped_ptr.h"
#include "webrtc/base/thread_annotations.h"
#include "webrtc/base/thread_checker.h"
#include "webrtc/base/trace_event.h"
#include "webrtc/call.h"
+#include "webrtc/call/bitrate_allocator.h"
#include "webrtc/call/congestion_controller.h"
#include "webrtc/call/rtc_event_log.h"
#include "webrtc/common.h"
#include "webrtc/config.h"
-#include "webrtc/modules/rtp_rtcp/interface/rtp_header_parser.h"
+#include "webrtc/modules/bitrate_controller/include/bitrate_controller.h"
+#include "webrtc/modules/pacing/paced_sender.h"
+#include "webrtc/modules/rtp_rtcp/include/rtp_header_parser.h"
#include "webrtc/modules/rtp_rtcp/source/byte_io.h"
-#include "webrtc/modules/utility/interface/process_thread.h"
+#include "webrtc/modules/utility/include/process_thread.h"
#include "webrtc/system_wrappers/include/cpu_info.h"
#include "webrtc/system_wrappers/include/critical_section_wrapper.h"
-#include "webrtc/system_wrappers/include/logging.h"
+#include "webrtc/system_wrappers/include/metrics.h"
#include "webrtc/system_wrappers/include/rw_lock_wrapper.h"
#include "webrtc/system_wrappers/include/trace.h"
+#include "webrtc/video/call_stats.h"
#include "webrtc/video/video_receive_stream.h"
#include "webrtc/video/video_send_stream.h"
-#include "webrtc/video_engine/call_stats.h"
#include "webrtc/voice_engine/include/voe_codec.h"
namespace webrtc {
@@ -44,7 +50,8 @@ const int Call::Config::kDefaultStartBitrateBps = 300000;
namespace internal {
-class Call : public webrtc::Call, public PacketReceiver {
+class Call : public webrtc::Call, public PacketReceiver,
+ public BitrateObserver {
public:
explicit Call(const Call::Config& config);
virtual ~Call();
@@ -83,6 +90,10 @@ class Call : public webrtc::Call, public PacketReceiver {
void OnSentPacket(const rtc::SentPacket& sent_packet) override;
+ // Implements BitrateObserver.
+ void OnNetworkChanged(uint32_t bitrate_bps, uint8_t fraction_loss,
+ int64_t rtt_ms) override;
+
private:
DeliveryStatus DeliverRtcp(MediaType media_type, const uint8_t* packet,
size_t length);
@@ -94,14 +105,28 @@ class Call : public webrtc::Call, public PacketReceiver {
void ConfigureSync(const std::string& sync_group)
EXCLUSIVE_LOCKS_REQUIRED(receive_crit_);
+ VoiceEngine* voice_engine() {
+ internal::AudioState* audio_state =
+ static_cast<internal::AudioState*>(config_.audio_state.get());
+ if (audio_state)
+ return audio_state->voice_engine();
+ else
+ return nullptr;
+ }
+
+ void UpdateSendHistograms() EXCLUSIVE_LOCKS_REQUIRED(&bitrate_crit_);
+ void UpdateReceiveHistograms();
+
+ Clock* const clock_;
+
const int num_cpu_cores_;
const rtc::scoped_ptr<ProcessThread> module_process_thread_;
const rtc::scoped_ptr<CallStats> call_stats_;
- const rtc::scoped_ptr<CongestionController> congestion_controller_;
+ const rtc::scoped_ptr<BitrateAllocator> bitrate_allocator_;
Call::Config config_;
rtc::ThreadChecker configuration_thread_checker_;
- bool network_enabled_;
+ bool network_enabled_;
rtc::scoped_ptr<RWLockWrapper> receive_crit_;
// Audio and Video receive streams are owned by the client that creates them.
@@ -123,7 +148,25 @@ class Call : public webrtc::Call, public PacketReceiver {
VideoSendStream::RtpStateMap suspended_video_send_ssrcs_;
RtcEventLog* event_log_ = nullptr;
- VoECodec* voe_codec_ = nullptr;
+
+ // The following members are only accessed (exclusively) from one thread and
+ // from the destructor, and therefore doesn't need any explicit
+ // synchronization.
+ int64_t received_video_bytes_;
+ int64_t received_audio_bytes_;
+ int64_t received_rtcp_bytes_;
+ int64_t first_rtp_packet_received_ms_;
+ int64_t last_rtp_packet_received_ms_;
+ int64_t first_packet_sent_ms_;
+
+ // TODO(holmer): Remove this lock once BitrateController no longer calls
+ // OnNetworkChanged from multiple threads.
+ rtc::CriticalSection bitrate_crit_;
+ int64_t estimated_send_bitrate_sum_kbits_ GUARDED_BY(&bitrate_crit_);
+ int64_t pacer_bitrate_sum_kbits_ GUARDED_BY(&bitrate_crit_);
+ int64_t num_bitrate_updates_ GUARDED_BY(&bitrate_crit_);
+
+ const rtc::scoped_ptr<CongestionController> congestion_controller_;
RTC_DISALLOW_COPY_AND_ASSIGN(Call);
};
@@ -136,15 +179,29 @@ Call* Call::Create(const Call::Config& config) {
namespace internal {
Call::Call(const Call::Config& config)
- : num_cpu_cores_(CpuInfo::DetectNumberOfCores()),
+ : clock_(Clock::GetRealTimeClock()),
+ num_cpu_cores_(CpuInfo::DetectNumberOfCores()),
module_process_thread_(ProcessThread::Create("ModuleProcessThread")),
- call_stats_(new CallStats()),
- congestion_controller_(new CongestionController(
- module_process_thread_.get(), call_stats_.get())),
+ call_stats_(new CallStats(clock_)),
+ bitrate_allocator_(new BitrateAllocator()),
config_(config),
network_enabled_(true),
receive_crit_(RWLockWrapper::CreateRWLock()),
- send_crit_(RWLockWrapper::CreateRWLock()) {
+ send_crit_(RWLockWrapper::CreateRWLock()),
+ received_video_bytes_(0),
+ received_audio_bytes_(0),
+ received_rtcp_bytes_(0),
+ first_rtp_packet_received_ms_(-1),
+ last_rtp_packet_received_ms_(-1),
+ first_packet_sent_ms_(-1),
+ estimated_send_bitrate_sum_kbits_(0),
+ pacer_bitrate_sum_kbits_(0),
+ num_bitrate_updates_(0),
+ congestion_controller_(
+ new CongestionController(module_process_thread_.get(),
+ call_stats_.get(),
+ this)) {
+ RTC_DCHECK(configuration_thread_checker_.CalledOnValidThread());
RTC_DCHECK_GE(config.bitrate_config.min_bitrate_bps, 0);
RTC_DCHECK_GE(config.bitrate_config.start_bitrate_bps,
config.bitrate_config.min_bitrate_bps);
@@ -152,12 +209,9 @@ Call::Call(const Call::Config& config)
RTC_DCHECK_GE(config.bitrate_config.max_bitrate_bps,
config.bitrate_config.start_bitrate_bps);
}
- if (config.voice_engine) {
- // Keep a reference to VoECodec, so we're sure the VoiceEngine lives for the
- // duration of the call.
- voe_codec_ = VoECodec::GetInterface(config.voice_engine);
- if (voe_codec_)
- event_log_ = voe_codec_->GetEventLog();
+ if (config.audio_state.get()) {
+ ScopedVoEInterface<VoECodec> voe_codec(voice_engine());
+ event_log_ = voe_codec->GetEventLog();
}
Trace::CreateTrace();
@@ -168,10 +222,14 @@ Call::Call(const Call::Config& config)
config_.bitrate_config.min_bitrate_bps,
config_.bitrate_config.start_bitrate_bps,
config_.bitrate_config.max_bitrate_bps);
+
+ congestion_controller_->GetBitrateController()->SetEventLog(event_log_);
}
Call::~Call() {
RTC_DCHECK(configuration_thread_checker_.CalledOnValidThread());
+ UpdateSendHistograms();
+ UpdateReceiveHistograms();
RTC_CHECK(audio_send_ssrcs_.empty());
RTC_CHECK(video_send_ssrcs_.empty());
RTC_CHECK(video_send_streams_.empty());
@@ -182,9 +240,53 @@ Call::~Call() {
module_process_thread_->DeRegisterModule(call_stats_.get());
module_process_thread_->Stop();
Trace::ReturnTrace();
+}
+
+void Call::UpdateSendHistograms() {
+ if (num_bitrate_updates_ == 0 || first_packet_sent_ms_ == -1)
+ return;
+ int64_t elapsed_sec =
+ (clock_->TimeInMilliseconds() - first_packet_sent_ms_) / 1000;
+ if (elapsed_sec < metrics::kMinRunTimeInSeconds)
+ return;
+ int send_bitrate_kbps =
+ estimated_send_bitrate_sum_kbits_ / num_bitrate_updates_;
+ int pacer_bitrate_kbps = pacer_bitrate_sum_kbits_ / num_bitrate_updates_;
+ if (send_bitrate_kbps > 0) {
+ RTC_HISTOGRAM_COUNTS_SPARSE_100000("WebRTC.Call.EstimatedSendBitrateInKbps",
+ send_bitrate_kbps);
+ }
+ if (pacer_bitrate_kbps > 0) {
+ RTC_HISTOGRAM_COUNTS_SPARSE_100000("WebRTC.Call.PacerBitrateInKbps",
+ pacer_bitrate_kbps);
+ }
+}
- if (voe_codec_)
- voe_codec_->Release();
+void Call::UpdateReceiveHistograms() {
+ if (first_rtp_packet_received_ms_ == -1)
+ return;
+ int64_t elapsed_sec =
+ (last_rtp_packet_received_ms_ - first_rtp_packet_received_ms_) / 1000;
+ if (elapsed_sec < metrics::kMinRunTimeInSeconds)
+ return;
+ int audio_bitrate_kbps = received_audio_bytes_ * 8 / elapsed_sec / 1000;
+ int video_bitrate_kbps = received_video_bytes_ * 8 / elapsed_sec / 1000;
+ int rtcp_bitrate_bps = received_rtcp_bytes_ * 8 / elapsed_sec;
+ if (video_bitrate_kbps > 0) {
+ RTC_HISTOGRAM_COUNTS_SPARSE_100000("WebRTC.Call.VideoBitrateReceivedInKbps",
+ video_bitrate_kbps);
+ }
+ if (audio_bitrate_kbps > 0) {
+ RTC_HISTOGRAM_COUNTS_SPARSE_100000("WebRTC.Call.AudioBitrateReceivedInKbps",
+ audio_bitrate_kbps);
+ }
+ if (rtcp_bitrate_bps > 0) {
+ RTC_HISTOGRAM_COUNTS_SPARSE_100000("WebRTC.Call.RtcpBitrateReceivedInBps",
+ rtcp_bitrate_bps);
+ }
+ RTC_HISTOGRAM_COUNTS_SPARSE_100000(
+ "WebRTC.Call.BitrateReceivedInKbps",
+ audio_bitrate_kbps + video_bitrate_kbps + rtcp_bitrate_bps / 1000);
}
PacketReceiver* Call::Receiver() {
@@ -198,8 +300,8 @@ webrtc::AudioSendStream* Call::CreateAudioSendStream(
const webrtc::AudioSendStream::Config& config) {
TRACE_EVENT0("webrtc", "Call::CreateAudioSendStream");
RTC_DCHECK(configuration_thread_checker_.CalledOnValidThread());
- AudioSendStream* send_stream =
- new AudioSendStream(config, config_.voice_engine);
+ AudioSendStream* send_stream = new AudioSendStream(
+ config, config_.audio_state, congestion_controller_.get());
if (!network_enabled_)
send_stream->SignalNetworkState(kNetworkDown);
{
@@ -234,8 +336,7 @@ webrtc::AudioReceiveStream* Call::CreateAudioReceiveStream(
TRACE_EVENT0("webrtc", "Call::CreateAudioReceiveStream");
RTC_DCHECK(configuration_thread_checker_.CalledOnValidThread());
AudioReceiveStream* receive_stream = new AudioReceiveStream(
- congestion_controller_->GetRemoteBitrateEstimator(false), config,
- config_.voice_engine);
+ congestion_controller_.get(), config, config_.audio_state);
{
WriteLockScoped write_lock(*receive_crit_);
RTC_DCHECK(audio_receive_ssrcs_.find(config.rtp.remote_ssrc) ==
@@ -279,8 +380,8 @@ webrtc::VideoSendStream* Call::CreateVideoSendStream(
// the call has already started.
VideoSendStream* send_stream = new VideoSendStream(
num_cpu_cores_, module_process_thread_.get(), call_stats_.get(),
- congestion_controller_.get(), config, encoder_config,
- suspended_video_send_ssrcs_);
+ congestion_controller_.get(), bitrate_allocator_.get(), config,
+ encoder_config, suspended_video_send_ssrcs_);
if (!network_enabled_)
send_stream->SignalNetworkState(kNetworkDown);
@@ -338,7 +439,7 @@ webrtc::VideoReceiveStream* Call::CreateVideoReceiveStream(
RTC_DCHECK(configuration_thread_checker_.CalledOnValidThread());
VideoReceiveStream* receive_stream = new VideoReceiveStream(
num_cpu_cores_, congestion_controller_.get(), config,
- config_.voice_engine, module_process_thread_.get(), call_stats_.get());
+ voice_engine(), module_process_thread_.get(), call_stats_.get());
WriteLockScoped write_lock(*receive_crit_);
RTC_DCHECK(video_receive_ssrcs_.find(config.rtp.remote_ssrc) ==
@@ -463,12 +564,48 @@ void Call::SignalNetworkState(NetworkState state) {
}
void Call::OnSentPacket(const rtc::SentPacket& sent_packet) {
+ if (first_packet_sent_ms_ == -1)
+ first_packet_sent_ms_ = clock_->TimeInMilliseconds();
congestion_controller_->OnSentPacket(sent_packet);
}
+void Call::OnNetworkChanged(uint32_t target_bitrate_bps, uint8_t fraction_loss,
+ int64_t rtt_ms) {
+ uint32_t allocated_bitrate_bps = bitrate_allocator_->OnNetworkChanged(
+ target_bitrate_bps, fraction_loss, rtt_ms);
+
+ int pad_up_to_bitrate_bps = 0;
+ {
+ ReadLockScoped read_lock(*send_crit_);
+ // No need to update as long as we're not sending.
+ if (video_send_streams_.empty())
+ return;
+
+ for (VideoSendStream* stream : video_send_streams_)
+ pad_up_to_bitrate_bps += stream->GetPaddingNeededBps();
+ }
+ // Allocated bitrate might be higher than bitrate estimate if enforcing min
+ // bitrate, or lower if estimate is higher than the sum of max bitrates, so
+ // set the pacer bitrate to the maximum of the two.
+ uint32_t pacer_bitrate_bps =
+ std::max(target_bitrate_bps, allocated_bitrate_bps);
+ {
+ rtc::CritScope lock(&bitrate_crit_);
+ // We only update these stats if we have send streams, and assume that
+ // OnNetworkChanged is called roughly with a fixed frequency.
+ estimated_send_bitrate_sum_kbits_ += target_bitrate_bps / 1000;
+ pacer_bitrate_sum_kbits_ += pacer_bitrate_bps / 1000;
+ ++num_bitrate_updates_;
+ }
+ congestion_controller_->UpdatePacerBitrate(
+ target_bitrate_bps / 1000,
+ PacedSender::kDefaultPaceMultiplier * pacer_bitrate_bps / 1000,
+ pad_up_to_bitrate_bps / 1000);
+}
+
void Call::ConfigureSync(const std::string& sync_group) {
// Set sync only if there was no previous one.
- if (config_.voice_engine == nullptr || sync_group.empty())
+ if (voice_engine() == nullptr || sync_group.empty())
return;
AudioReceiveStream* sync_audio_stream = nullptr;
@@ -506,10 +643,10 @@ void Call::ConfigureSync(const std::string& sync_group) {
}
// Only sync the first A/V pair within this sync group.
if (sync_audio_stream != nullptr && num_synced_streams == 1) {
- video_stream->SetSyncChannel(config_.voice_engine,
+ video_stream->SetSyncChannel(voice_engine(),
sync_audio_stream->config().voe_channel_id);
} else {
- video_stream->SetSyncChannel(config_.voice_engine, -1);
+ video_stream->SetSyncChannel(voice_engine(), -1);
}
}
}
@@ -517,10 +654,12 @@ void Call::ConfigureSync(const std::string& sync_group) {
PacketReceiver::DeliveryStatus Call::DeliverRtcp(MediaType media_type,
const uint8_t* packet,
size_t length) {
+ TRACE_EVENT0("webrtc", "Call::DeliverRtcp");
// TODO(pbos): Figure out what channel needs it actually.
// Do NOT broadcast! Also make sure it's a valid packet.
// Return DELIVERY_UNKNOWN_SSRC if it can be determined that
// there's no receiver of the packet.
+ received_rtcp_bytes_ += length;
bool rtcp_delivered = false;
if (media_type == MediaType::ANY || media_type == MediaType::VIDEO) {
ReadLockScoped read_lock(*receive_crit_);
@@ -549,16 +688,21 @@ PacketReceiver::DeliveryStatus Call::DeliverRtp(MediaType media_type,
const uint8_t* packet,
size_t length,
const PacketTime& packet_time) {
+ TRACE_EVENT0("webrtc", "Call::DeliverRtp");
// Minimum RTP header size.
if (length < 12)
return DELIVERY_PACKET_ERROR;
- uint32_t ssrc = ByteReader<uint32_t>::ReadBigEndian(&packet[8]);
+ last_rtp_packet_received_ms_ = clock_->TimeInMilliseconds();
+ if (first_rtp_packet_received_ms_ == -1)
+ first_rtp_packet_received_ms_ = last_rtp_packet_received_ms_;
+ uint32_t ssrc = ByteReader<uint32_t>::ReadBigEndian(&packet[8]);
ReadLockScoped read_lock(*receive_crit_);
if (media_type == MediaType::ANY || media_type == MediaType::AUDIO) {
auto it = audio_receive_ssrcs_.find(ssrc);
if (it != audio_receive_ssrcs_.end()) {
+ received_audio_bytes_ += length;
auto status = it->second->DeliverRtp(packet, length, packet_time)
? DELIVERY_OK
: DELIVERY_PACKET_ERROR;
@@ -570,6 +714,7 @@ PacketReceiver::DeliveryStatus Call::DeliverRtp(MediaType media_type,
if (media_type == MediaType::ANY || media_type == MediaType::VIDEO) {
auto it = video_receive_ssrcs_.find(ssrc);
if (it != video_receive_ssrcs_.end()) {
+ received_video_bytes_ += length;
auto status = it->second->DeliverRtp(packet, length, packet_time)
? DELIVERY_OK
: DELIVERY_PACKET_ERROR;
diff --git a/webrtc/call/call_perf_tests.cc b/webrtc/call/call_perf_tests.cc
index c37b83bab4..3adcb10b09 100644
--- a/webrtc/call/call_perf_tests.cc
+++ b/webrtc/call/call_perf_tests.cc
@@ -18,8 +18,10 @@
#include "webrtc/base/thread_annotations.h"
#include "webrtc/call.h"
#include "webrtc/call/transport_adapter.h"
-#include "webrtc/modules/audio_coding/main/include/audio_coding_module.h"
-#include "webrtc/modules/rtp_rtcp/interface/rtp_header_parser.h"
+#include "webrtc/common.h"
+#include "webrtc/config.h"
+#include "webrtc/modules/audio_coding/include/audio_coding_module.h"
+#include "webrtc/modules/rtp_rtcp/include/rtp_header_parser.h"
#include "webrtc/modules/rtp_rtcp/source/rtcp_utility.h"
#include "webrtc/system_wrappers/include/critical_section_wrapper.h"
#include "webrtc/system_wrappers/include/rtp_to_ntp.h"
@@ -172,7 +174,7 @@ class VideoRtcpAndSyncObserver : public SyncRtcpObserver, public VideoRenderer {
false);
}
if (time_since_creation > kMinRunTimeMs)
- observation_complete_->Set();
+ observation_complete_.Set();
}
}
@@ -189,6 +191,8 @@ class VideoRtcpAndSyncObserver : public SyncRtcpObserver, public VideoRenderer {
void CallPerfTest::TestAudioVideoSync(bool fec, bool create_audio_first) {
const char* kSyncGroup = "av_sync";
+ const uint32_t kAudioSendSsrc = 1234;
+ const uint32_t kAudioRecvSsrc = 5678;
class AudioPacketReceiver : public PacketReceiver {
public:
AudioPacketReceiver(int channel, VoENetwork* voe_network)
@@ -228,35 +232,45 @@ void CallPerfTest::TestAudioVideoSync(bool fec, bool create_audio_first) {
test::FakeAudioDevice fake_audio_device(Clock::GetRealTimeClock(),
audio_filename);
EXPECT_EQ(0, voe_base->Init(&fake_audio_device, nullptr));
- int channel = voe_base->CreateChannel();
+ Config voe_config;
+ voe_config.Set<VoicePacing>(new VoicePacing(true));
+ int send_channel_id = voe_base->CreateChannel(voe_config);
+ int recv_channel_id = voe_base->CreateChannel();
SyncRtcpObserver audio_observer;
+ AudioState::Config send_audio_state_config;
+ send_audio_state_config.voice_engine = voice_engine;
+ Call::Config sender_config;
+ sender_config.audio_state = AudioState::Create(send_audio_state_config);
Call::Config receiver_config;
- receiver_config.voice_engine = voice_engine;
- CreateCalls(Call::Config(), receiver_config);
+ receiver_config.audio_state = sender_config.audio_state;
+ CreateCalls(sender_config, receiver_config);
- CodecInst isac = {103, "ISAC", 16000, 480, 1, 32000};
- EXPECT_EQ(0, voe_codec->SetSendCodec(channel, isac));
-
- AudioPacketReceiver voe_packet_receiver(channel, voe_network);
+ AudioPacketReceiver voe_send_packet_receiver(send_channel_id, voe_network);
+ AudioPacketReceiver voe_recv_packet_receiver(recv_channel_id, voe_network);
FakeNetworkPipe::Config net_config;
net_config.queue_delay_ms = 500;
net_config.loss_percent = 5;
test::PacketTransport audio_send_transport(
nullptr, &audio_observer, test::PacketTransport::kSender, net_config);
- audio_send_transport.SetReceiver(&voe_packet_receiver);
+ audio_send_transport.SetReceiver(&voe_recv_packet_receiver);
test::PacketTransport audio_receive_transport(
nullptr, &audio_observer, test::PacketTransport::kReceiver, net_config);
- audio_receive_transport.SetReceiver(&voe_packet_receiver);
+ audio_receive_transport.SetReceiver(&voe_send_packet_receiver);
+
+ internal::TransportAdapter send_transport_adapter(&audio_send_transport);
+ send_transport_adapter.Enable();
+ EXPECT_EQ(0, voe_network->RegisterExternalTransport(send_channel_id,
+ send_transport_adapter));
- internal::TransportAdapter transport_adapter(&audio_send_transport);
- transport_adapter.Enable();
- EXPECT_EQ(0,
- voe_network->RegisterExternalTransport(channel, transport_adapter));
+ internal::TransportAdapter recv_transport_adapter(&audio_receive_transport);
+ recv_transport_adapter.Enable();
+ EXPECT_EQ(0, voe_network->RegisterExternalTransport(recv_channel_id,
+ recv_transport_adapter));
- VideoRtcpAndSyncObserver observer(Clock::GetRealTimeClock(), channel,
+ VideoRtcpAndSyncObserver observer(Clock::GetRealTimeClock(), recv_channel_id,
voe_sync, &audio_observer);
test::PacketTransport sync_send_transport(sender_call_.get(), &observer,
@@ -270,34 +284,45 @@ void CallPerfTest::TestAudioVideoSync(bool fec, bool create_audio_first) {
test::FakeDecoder fake_decoder;
- CreateSendConfig(1, &sync_send_transport);
+ CreateSendConfig(1, 0, &sync_send_transport);
CreateMatchingReceiveConfigs(&sync_receive_transport);
- send_config_.rtp.nack.rtp_history_ms = kNackRtpHistoryMs;
+ AudioSendStream::Config audio_send_config(&audio_send_transport);
+ audio_send_config.voe_channel_id = send_channel_id;
+ audio_send_config.rtp.ssrc = kAudioSendSsrc;
+ AudioSendStream* audio_send_stream =
+ sender_call_->CreateAudioSendStream(audio_send_config);
+
+ CodecInst isac = {103, "ISAC", 16000, 480, 1, 32000};
+ EXPECT_EQ(0, voe_codec->SetSendCodec(send_channel_id, isac));
+
+ video_send_config_.rtp.nack.rtp_history_ms = kNackRtpHistoryMs;
if (fec) {
- send_config_.rtp.fec.red_payload_type = kRedPayloadType;
- send_config_.rtp.fec.ulpfec_payload_type = kUlpfecPayloadType;
- receive_configs_[0].rtp.fec.red_payload_type = kRedPayloadType;
- receive_configs_[0].rtp.fec.ulpfec_payload_type = kUlpfecPayloadType;
+ video_send_config_.rtp.fec.red_payload_type = kRedPayloadType;
+ video_send_config_.rtp.fec.ulpfec_payload_type = kUlpfecPayloadType;
+ video_receive_configs_[0].rtp.fec.red_payload_type = kRedPayloadType;
+ video_receive_configs_[0].rtp.fec.ulpfec_payload_type = kUlpfecPayloadType;
}
- receive_configs_[0].rtp.nack.rtp_history_ms = 1000;
- receive_configs_[0].renderer = &observer;
- receive_configs_[0].sync_group = kSyncGroup;
+ video_receive_configs_[0].rtp.nack.rtp_history_ms = 1000;
+ video_receive_configs_[0].renderer = &observer;
+ video_receive_configs_[0].sync_group = kSyncGroup;
- AudioReceiveStream::Config audio_config;
- audio_config.voe_channel_id = channel;
- audio_config.sync_group = kSyncGroup;
+ AudioReceiveStream::Config audio_recv_config;
+ audio_recv_config.rtp.remote_ssrc = kAudioSendSsrc;
+ audio_recv_config.rtp.local_ssrc = kAudioRecvSsrc;
+ audio_recv_config.voe_channel_id = recv_channel_id;
+ audio_recv_config.sync_group = kSyncGroup;
- AudioReceiveStream* audio_receive_stream = nullptr;
+ AudioReceiveStream* audio_receive_stream;
if (create_audio_first) {
audio_receive_stream =
- receiver_call_->CreateAudioReceiveStream(audio_config);
- CreateStreams();
+ receiver_call_->CreateAudioReceiveStream(audio_recv_config);
+ CreateVideoStreams();
} else {
- CreateStreams();
+ CreateVideoStreams();
audio_receive_stream =
- receiver_call_->CreateAudioReceiveStream(audio_config);
+ receiver_call_->CreateAudioReceiveStream(audio_recv_config);
}
CreateFrameGeneratorCapturer();
@@ -305,16 +330,16 @@ void CallPerfTest::TestAudioVideoSync(bool fec, bool create_audio_first) {
Start();
fake_audio_device.Start();
- EXPECT_EQ(0, voe_base->StartPlayout(channel));
- EXPECT_EQ(0, voe_base->StartReceive(channel));
- EXPECT_EQ(0, voe_base->StartSend(channel));
+ EXPECT_EQ(0, voe_base->StartPlayout(recv_channel_id));
+ EXPECT_EQ(0, voe_base->StartReceive(recv_channel_id));
+ EXPECT_EQ(0, voe_base->StartSend(send_channel_id));
- EXPECT_EQ(kEventSignaled, observer.Wait())
+ EXPECT_TRUE(observer.Wait())
<< "Timed out while waiting for audio and video to be synchronized.";
- EXPECT_EQ(0, voe_base->StopSend(channel));
- EXPECT_EQ(0, voe_base->StopReceive(channel));
- EXPECT_EQ(0, voe_base->StopPlayout(channel));
+ EXPECT_EQ(0, voe_base->StopSend(send_channel_id));
+ EXPECT_EQ(0, voe_base->StopReceive(recv_channel_id));
+ EXPECT_EQ(0, voe_base->StopPlayout(recv_channel_id));
fake_audio_device.Stop();
Stop();
@@ -323,16 +348,18 @@ void CallPerfTest::TestAudioVideoSync(bool fec, bool create_audio_first) {
audio_send_transport.StopSending();
audio_receive_transport.StopSending();
- voe_base->DeleteChannel(channel);
+ DestroyStreams();
+
+ sender_call_->DestroyAudioSendStream(audio_send_stream);
+ receiver_call_->DestroyAudioReceiveStream(audio_receive_stream);
+
+ voe_base->DeleteChannel(send_channel_id);
+ voe_base->DeleteChannel(recv_channel_id);
voe_base->Release();
voe_codec->Release();
voe_network->Release();
voe_sync->Release();
- DestroyStreams();
-
- receiver_call_->DestroyAudioReceiveStream(audio_receive_stream);
-
DestroyCalls();
VoiceEngine::Delete(voice_engine);
@@ -357,8 +384,12 @@ void CallPerfTest::TestCaptureNtpTime(const FakeNetworkPipe::Config& net_config,
class CaptureNtpTimeObserver : public test::EndToEndTest,
public VideoRenderer {
public:
- CaptureNtpTimeObserver(int threshold_ms, int start_time_ms, int run_time_ms)
+ CaptureNtpTimeObserver(const FakeNetworkPipe::Config& net_config,
+ int threshold_ms,
+ int start_time_ms,
+ int run_time_ms)
: EndToEndTest(kLongTimeoutMs),
+ net_config_(net_config),
clock_(Clock::GetRealTimeClock()),
threshold_ms_(threshold_ms),
start_time_ms_(start_time_ms),
@@ -369,6 +400,16 @@ void CallPerfTest::TestCaptureNtpTime(const FakeNetworkPipe::Config& net_config,
rtp_start_timestamp_(0) {}
private:
+ test::PacketTransport* CreateSendTransport(Call* sender_call) override {
+ return new test::PacketTransport(
+ sender_call, this, test::PacketTransport::kSender, net_config_);
+ }
+
+ test::PacketTransport* CreateReceiveTransport() override {
+ return new test::PacketTransport(
+ nullptr, this, test::PacketTransport::kReceiver, net_config_);
+ }
+
void RenderFrame(const VideoFrame& video_frame,
int time_to_render_ms) override {
rtc::CritScope lock(&crit_);
@@ -386,7 +427,7 @@ void CallPerfTest::TestCaptureNtpTime(const FakeNetworkPipe::Config& net_config,
}
if (time_since_creation > run_time_ms_) {
- observation_complete_->Set();
+ observation_complete_.Set();
}
FrameCaptureTimeList::iterator iter =
@@ -437,21 +478,23 @@ void CallPerfTest::TestCaptureNtpTime(const FakeNetworkPipe::Config& net_config,
capturer_ = frame_generator_capturer;
}
- void ModifyConfigs(VideoSendStream::Config* send_config,
- std::vector<VideoReceiveStream::Config>* receive_configs,
- VideoEncoderConfig* encoder_config) override {
+ void ModifyVideoConfigs(
+ VideoSendStream::Config* send_config,
+ std::vector<VideoReceiveStream::Config>* receive_configs,
+ VideoEncoderConfig* encoder_config) override {
(*receive_configs)[0].renderer = this;
// Enable the receiver side rtt calculation.
(*receive_configs)[0].rtp.rtcp_xr.receiver_reference_time_report = true;
}
void PerformTest() override {
- EXPECT_EQ(kEventSignaled, Wait()) << "Timed out while waiting for "
- "estimated capture NTP time to be "
- "within bounds.";
+ EXPECT_TRUE(Wait()) << "Timed out while waiting for "
+ "estimated capture NTP time to be "
+ "within bounds.";
}
rtc::CriticalSection crit_;
+ const FakeNetworkPipe::Config net_config_;
Clock* const clock_;
int threshold_ms_;
int start_time_ms_;
@@ -462,9 +505,9 @@ void CallPerfTest::TestCaptureNtpTime(const FakeNetworkPipe::Config& net_config,
uint32_t rtp_start_timestamp_;
typedef std::map<uint32_t, uint32_t> FrameCaptureTimeList;
FrameCaptureTimeList capture_time_list_ GUARDED_BY(&crit_);
- } test(threshold_ms, start_time_ms, run_time_ms);
+ } test(net_config, threshold_ms, start_time_ms, run_time_ms);
- RunBaseTest(&test, net_config);
+ RunBaseTest(&test);
}
TEST_F(CallPerfTest, CaptureNtpTimeWithNetworkDelay) {
@@ -501,26 +544,26 @@ void CallPerfTest::TestCpuOveruse(LoadObserver::Load tested_load,
void OnLoadUpdate(Load load) override {
if (load == tested_load_)
- observation_complete_->Set();
+ observation_complete_.Set();
}
- void ModifyConfigs(VideoSendStream::Config* send_config,
- std::vector<VideoReceiveStream::Config>* receive_configs,
- VideoEncoderConfig* encoder_config) override {
+ void ModifyVideoConfigs(
+ VideoSendStream::Config* send_config,
+ std::vector<VideoReceiveStream::Config>* receive_configs,
+ VideoEncoderConfig* encoder_config) override {
send_config->overuse_callback = this;
send_config->encoder_settings.encoder = &encoder_;
}
void PerformTest() override {
- EXPECT_EQ(kEventSignaled, Wait())
- << "Timed out before receiving an overuse callback.";
+ EXPECT_TRUE(Wait()) << "Timed out before receiving an overuse callback.";
}
LoadObserver::Load tested_load_;
test::DelayedEncoder encoder_;
} test(tested_load, encode_delay_ms);
- RunBaseTest(&test, FakeNetworkPipe::Config());
+ RunBaseTest(&test);
}
TEST_F(CallPerfTest, ReceivesCpuUnderuse) {
@@ -581,21 +624,22 @@ void CallPerfTest::TestMinTransmitBitrate(bool pad_to_min_bitrate) {
}
if (num_bitrate_observations_in_range_ ==
kNumBitrateObservationsInRange)
- observation_complete_->Set();
+ observation_complete_.Set();
}
}
return SEND_PACKET;
}
- void OnStreamsCreated(
+ void OnVideoStreamsCreated(
VideoSendStream* send_stream,
const std::vector<VideoReceiveStream*>& receive_streams) override {
send_stream_ = send_stream;
}
- void ModifyConfigs(VideoSendStream::Config* send_config,
- std::vector<VideoReceiveStream::Config>* receive_configs,
- VideoEncoderConfig* encoder_config) override {
+ void ModifyVideoConfigs(
+ VideoSendStream::Config* send_config,
+ std::vector<VideoReceiveStream::Config>* receive_configs,
+ VideoEncoderConfig* encoder_config) override {
if (pad_to_min_bitrate_) {
encoder_config->min_transmit_bitrate_bps = kMinTransmitBitrateBps;
} else {
@@ -604,8 +648,7 @@ void CallPerfTest::TestMinTransmitBitrate(bool pad_to_min_bitrate) {
}
void PerformTest() override {
- EXPECT_EQ(kEventSignaled, Wait())
- << "Timeout while waiting for send-bitrate stats.";
+ EXPECT_TRUE(Wait()) << "Timeout while waiting for send-bitrate stats.";
}
VideoSendStream* send_stream_;
@@ -614,7 +657,7 @@ void CallPerfTest::TestMinTransmitBitrate(bool pad_to_min_bitrate) {
} test(pad_to_min_bitrate);
fake_encoder_.SetMaxBitrate(kMaxEncodeBitrateKbps);
- RunBaseTest(&test, FakeNetworkPipe::Config());
+ RunBaseTest(&test);
}
TEST_F(CallPerfTest, PadsToMinTransmitBitrate) { TestMinTransmitBitrate(true); }
@@ -633,7 +676,7 @@ TEST_F(CallPerfTest, KeepsHighBitrateWhenReconfiguringSender) {
BitrateObserver()
: EndToEndTest(kDefaultTimeoutMs),
FakeEncoder(Clock::GetRealTimeClock()),
- time_to_reconfigure_(webrtc::EventWrapper::Create()),
+ time_to_reconfigure_(false, false),
encoder_inits_(0),
last_set_bitrate_(0),
send_stream_(nullptr) {}
@@ -652,7 +695,7 @@ TEST_F(CallPerfTest, KeepsHighBitrateWhenReconfiguringSender) {
last_set_bitrate_,
kPermittedReconfiguredBitrateDiffKbps)
<< "Encoder reconfigured with bitrate too far away from last set.";
- observation_complete_->Set();
+ observation_complete_.Set();
}
return FakeEncoder::InitEncode(config, number_of_cores, max_payload_size);
}
@@ -662,7 +705,7 @@ TEST_F(CallPerfTest, KeepsHighBitrateWhenReconfiguringSender) {
last_set_bitrate_ = new_target_bitrate_kbps;
if (encoder_inits_ == 1 &&
new_target_bitrate_kbps > kReconfigureThresholdKbps) {
- time_to_reconfigure_->Set();
+ time_to_reconfigure_.Set();
}
return FakeEncoder::SetRates(new_target_bitrate_kbps, framerate);
}
@@ -673,9 +716,10 @@ TEST_F(CallPerfTest, KeepsHighBitrateWhenReconfiguringSender) {
return config;
}
- void ModifyConfigs(VideoSendStream::Config* send_config,
- std::vector<VideoReceiveStream::Config>* receive_configs,
- VideoEncoderConfig* encoder_config) override {
+ void ModifyVideoConfigs(
+ VideoSendStream::Config* send_config,
+ std::vector<VideoReceiveStream::Config>* receive_configs,
+ VideoEncoderConfig* encoder_config) override {
send_config->encoder_settings.encoder = this;
encoder_config->streams[0].min_bitrate_bps = 50000;
encoder_config->streams[0].target_bitrate_bps =
@@ -684,32 +728,32 @@ TEST_F(CallPerfTest, KeepsHighBitrateWhenReconfiguringSender) {
encoder_config_ = *encoder_config;
}
- void OnStreamsCreated(
+ void OnVideoStreamsCreated(
VideoSendStream* send_stream,
const std::vector<VideoReceiveStream*>& receive_streams) override {
send_stream_ = send_stream;
}
void PerformTest() override {
- ASSERT_EQ(kEventSignaled, time_to_reconfigure_->Wait(kDefaultTimeoutMs))
+ ASSERT_TRUE(time_to_reconfigure_.Wait(kDefaultTimeoutMs))
<< "Timed out before receiving an initial high bitrate.";
encoder_config_.streams[0].width *= 2;
encoder_config_.streams[0].height *= 2;
EXPECT_TRUE(send_stream_->ReconfigureVideoEncoder(encoder_config_));
- EXPECT_EQ(kEventSignaled, Wait())
+ EXPECT_TRUE(Wait())
<< "Timed out while waiting for a couple of high bitrate estimates "
"after reconfiguring the send stream.";
}
private:
- rtc::scoped_ptr<webrtc::EventWrapper> time_to_reconfigure_;
+ rtc::Event time_to_reconfigure_;
int encoder_inits_;
uint32_t last_set_bitrate_;
VideoSendStream* send_stream_;
VideoEncoderConfig encoder_config_;
} test;
- RunBaseTest(&test, FakeNetworkPipe::Config());
+ RunBaseTest(&test);
}
} // namespace webrtc
diff --git a/webrtc/call/call_unittest.cc b/webrtc/call/call_unittest.cc
index 9819b538f8..75c8238a5b 100644
--- a/webrtc/call/call_unittest.cc
+++ b/webrtc/call/call_unittest.cc
@@ -12,22 +12,25 @@
#include "testing/gtest/include/gtest/gtest.h"
+#include "webrtc/audio_state.h"
#include "webrtc/call.h"
-#include "webrtc/test/fake_voice_engine.h"
+#include "webrtc/test/mock_voice_engine.h"
namespace {
struct CallHelper {
- CallHelper() : voice_engine_(new webrtc::test::FakeVoiceEngine()) {
+ CallHelper() {
+ webrtc::AudioState::Config audio_state_config;
+ audio_state_config.voice_engine = &voice_engine_;
webrtc::Call::Config config;
- config.voice_engine = voice_engine_.get();
+ config.audio_state = webrtc::AudioState::Create(audio_state_config);
call_.reset(webrtc::Call::Create(config));
}
webrtc::Call* operator->() { return call_.get(); }
private:
- rtc::scoped_ptr<webrtc::test::FakeVoiceEngine> voice_engine_;
+ testing::NiceMock<webrtc::test::MockVoiceEngine> voice_engine_;
rtc::scoped_ptr<webrtc::Call> call_;
};
} // namespace
diff --git a/webrtc/call/congestion_controller.cc b/webrtc/call/congestion_controller.cc
index 1ec361e898..c442667ae0 100644
--- a/webrtc/call/congestion_controller.cc
+++ b/webrtc/call/congestion_controller.cc
@@ -11,23 +11,24 @@
#include "webrtc/call/congestion_controller.h"
#include "webrtc/base/checks.h"
+#include "webrtc/base/logging.h"
#include "webrtc/base/thread_annotations.h"
#include "webrtc/common.h"
-#include "webrtc/modules/pacing/include/paced_sender.h"
-#include "webrtc/modules/pacing/include/packet_router.h"
+#include "webrtc/modules/bitrate_controller/include/bitrate_controller.h"
+#include "webrtc/modules/pacing/paced_sender.h"
+#include "webrtc/modules/pacing/packet_router.h"
#include "webrtc/modules/remote_bitrate_estimator/include/send_time_history.h"
#include "webrtc/modules/remote_bitrate_estimator/remote_bitrate_estimator_abs_send_time.h"
#include "webrtc/modules/remote_bitrate_estimator/remote_bitrate_estimator_single_stream.h"
#include "webrtc/modules/remote_bitrate_estimator/remote_estimator_proxy.h"
#include "webrtc/modules/remote_bitrate_estimator/transport_feedback_adapter.h"
-#include "webrtc/modules/rtp_rtcp/interface/rtp_rtcp.h"
-#include "webrtc/modules/utility/interface/process_thread.h"
+#include "webrtc/modules/rtp_rtcp/include/rtp_rtcp.h"
+#include "webrtc/modules/utility/include/process_thread.h"
#include "webrtc/system_wrappers/include/critical_section_wrapper.h"
-#include "webrtc/system_wrappers/include/logging.h"
-#include "webrtc/video_engine/call_stats.h"
-#include "webrtc/video_engine/payload_router.h"
-#include "webrtc/video_engine/vie_encoder.h"
-#include "webrtc/video_engine/vie_remb.h"
+#include "webrtc/video/call_stats.h"
+#include "webrtc/video/payload_router.h"
+#include "webrtc/video/vie_encoder.h"
+#include "webrtc/video/vie_remb.h"
#include "webrtc/voice_engine/include/voe_video_sync.h"
namespace webrtc {
@@ -144,9 +145,9 @@ class WrappingBitrateEstimator : public RemoteBitrateEstimator {
} // namespace
CongestionController::CongestionController(ProcessThread* process_thread,
- CallStats* call_stats)
- : remb_(new VieRemb()),
- bitrate_allocator_(new BitrateAllocator()),
+ CallStats* call_stats,
+ BitrateObserver* bitrate_observer)
+ : remb_(new VieRemb(Clock::GetRealTimeClock())),
packet_router_(new PacketRouter()),
pacer_(new PacedSender(Clock::GetRealTimeClock(),
packet_router_.get(),
@@ -166,7 +167,7 @@ CongestionController::CongestionController(ProcessThread* process_thread,
// construction.
bitrate_controller_(
BitrateController::CreateBitrateController(Clock::GetRealTimeClock(),
- this)),
+ bitrate_observer)),
min_bitrate_bps_(RemoteBitrateEstimator::kDefaultMinBitrateBps) {
call_stats_->RegisterStatsObserver(remote_bitrate_estimator_.get());
@@ -249,6 +250,12 @@ CongestionController::GetTransportFeedbackObserver() {
return transport_feedback_adapter_.get();
}
+void CongestionController::UpdatePacerBitrate(int bitrate_kbps,
+ int max_bitrate_kbps,
+ int min_bitrate_kbps) {
+ pacer_->UpdateBitrate(bitrate_kbps, max_bitrate_kbps, min_bitrate_kbps);
+}
+
int64_t CongestionController::GetPacerQueuingDelayMs() const {
return pacer_->QueueInMs();
}
@@ -278,23 +285,6 @@ void CongestionController::SignalNetworkState(NetworkState state) {
}
}
-// TODO(mflodman): Move this logic out from CongestionController.
-void CongestionController::OnNetworkChanged(uint32_t target_bitrate_bps,
- uint8_t fraction_loss,
- int64_t rtt) {
- bitrate_allocator_->OnNetworkChanged(target_bitrate_bps, fraction_loss, rtt);
- int pad_up_to_bitrate_bps = 0;
- {
- rtc::CritScope lock(&encoder_crit_);
- for (const auto& encoder : encoders_)
- pad_up_to_bitrate_bps += encoder->GetPaddingNeededBps();
- }
- pacer_->UpdateBitrate(
- target_bitrate_bps / 1000,
- PacedSender::kDefaultPaceMultiplier * target_bitrate_bps / 1000,
- pad_up_to_bitrate_bps / 1000);
-}
-
void CongestionController::OnSentPacket(const rtc::SentPacket& sent_packet) {
if (transport_feedback_adapter_) {
transport_feedback_adapter_->OnSentPacket(sent_packet.packet_id,
diff --git a/webrtc/call/congestion_controller.h b/webrtc/call/congestion_controller.h
index b424234123..b77c46faa3 100644
--- a/webrtc/call/congestion_controller.h
+++ b/webrtc/call/congestion_controller.h
@@ -16,12 +16,12 @@
#include "webrtc/base/criticalsection.h"
#include "webrtc/base/scoped_ptr.h"
#include "webrtc/base/socket.h"
-#include "webrtc/modules/bitrate_controller/include/bitrate_controller.h"
#include "webrtc/stream.h"
namespace webrtc {
-class BitrateAllocator;
+class BitrateController;
+class BitrateObserver;
class CallStats;
class Config;
class PacedSender;
@@ -32,43 +32,43 @@ class RemoteEstimatorProxy;
class RtpRtcp;
class SendStatisticsProxy;
class TransportFeedbackAdapter;
+class TransportFeedbackObserver;
class ViEEncoder;
class VieRemb;
-class CongestionController : public BitrateObserver {
+class CongestionController {
public:
- CongestionController(ProcessThread* process_thread, CallStats* call_stats);
- ~CongestionController();
- void AddEncoder(ViEEncoder* encoder);
- void RemoveEncoder(ViEEncoder* encoder);
- void SetBweBitrates(int min_bitrate_bps,
- int start_bitrate_bps,
- int max_bitrate_bps);
-
- void SetChannelRembStatus(bool sender, bool receiver, RtpRtcp* rtp_module);
-
- void SignalNetworkState(NetworkState state);
-
- BitrateController* GetBitrateController() const;
- RemoteBitrateEstimator* GetRemoteBitrateEstimator(bool send_side_bwe) const;
- int64_t GetPacerQueuingDelayMs() const;
- PacedSender* pacer() const { return pacer_.get(); }
- PacketRouter* packet_router() const { return packet_router_.get(); }
- BitrateAllocator* bitrate_allocator() const {
- return bitrate_allocator_.get(); }
- TransportFeedbackObserver* GetTransportFeedbackObserver();
-
- // Implements BitrateObserver.
- void OnNetworkChanged(uint32_t target_bitrate_bps,
- uint8_t fraction_loss,
- int64_t rtt) override;
-
- void OnSentPacket(const rtc::SentPacket& sent_packet);
+ CongestionController(ProcessThread* process_thread, CallStats* call_stats,
+ BitrateObserver* bitrate_observer);
+ virtual ~CongestionController();
+ virtual void AddEncoder(ViEEncoder* encoder);
+ virtual void RemoveEncoder(ViEEncoder* encoder);
+ virtual void SetBweBitrates(int min_bitrate_bps,
+ int start_bitrate_bps,
+ int max_bitrate_bps);
+
+ virtual void SetChannelRembStatus(bool sender,
+ bool receiver,
+ RtpRtcp* rtp_module);
+
+ virtual void SignalNetworkState(NetworkState state);
+
+ virtual BitrateController* GetBitrateController() const;
+ virtual RemoteBitrateEstimator* GetRemoteBitrateEstimator(
+ bool send_side_bwe) const;
+ virtual int64_t GetPacerQueuingDelayMs() const;
+ virtual PacedSender* pacer() const { return pacer_.get(); }
+ virtual PacketRouter* packet_router() const { return packet_router_.get(); }
+ virtual TransportFeedbackObserver* GetTransportFeedbackObserver();
+
+ virtual void UpdatePacerBitrate(int bitrate_kbps,
+ int max_bitrate_kbps,
+ int min_bitrate_kbps);
+
+ virtual void OnSentPacket(const rtc::SentPacket& sent_packet);
private:
rtc::scoped_ptr<VieRemb> remb_;
- // TODO(mflodman): Move bitrate_allocator_ to Call.
- rtc::scoped_ptr<BitrateAllocator> bitrate_allocator_;
rtc::scoped_ptr<PacketRouter> packet_router_;
rtc::scoped_ptr<PacedSender> pacer_;
rtc::scoped_ptr<RemoteBitrateEstimator> remote_bitrate_estimator_;
@@ -86,6 +86,8 @@ class CongestionController : public BitrateObserver {
rtc::scoped_ptr<BitrateController> bitrate_controller_;
rtc::scoped_ptr<TransportFeedbackAdapter> transport_feedback_adapter_;
int min_bitrate_bps_;
+
+ RTC_DISALLOW_IMPLICIT_CONSTRUCTORS(CongestionController);
};
} // namespace webrtc
diff --git a/webrtc/call/mock/mock_congestion_controller.h b/webrtc/call/mock/mock_congestion_controller.h
new file mode 100644
index 0000000000..54014da339
--- /dev/null
+++ b/webrtc/call/mock/mock_congestion_controller.h
@@ -0,0 +1,52 @@
+/*
+ * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_CALL_MOCK_MOCK_CONGESTION_CONTROLLER_H_
+#define WEBRTC_CALL_MOCK_MOCK_CONGESTION_CONTROLLER_H_
+
+#include "testing/gmock/include/gmock/gmock.h"
+#include "webrtc/call/congestion_controller.h"
+
+namespace webrtc {
+namespace test {
+
+class MockCongestionController : public CongestionController {
+ public:
+ MockCongestionController(ProcessThread* process_thread,
+ CallStats* call_stats,
+ BitrateObserver* bitrate_observer)
+ : CongestionController(process_thread, call_stats, bitrate_observer) {}
+ MOCK_METHOD1(AddEncoder, void(ViEEncoder* encoder));
+ MOCK_METHOD1(RemoveEncoder, void(ViEEncoder* encoder));
+ MOCK_METHOD3(SetBweBitrates,
+ void(int min_bitrate_bps,
+ int start_bitrate_bps,
+ int max_bitrate_bps));
+ MOCK_METHOD3(SetChannelRembStatus,
+ void(bool sender, bool receiver, RtpRtcp* rtp_module));
+ MOCK_METHOD1(SignalNetworkState, void(NetworkState state));
+ MOCK_CONST_METHOD0(GetBitrateController, BitrateController*());
+ MOCK_CONST_METHOD1(GetRemoteBitrateEstimator,
+ RemoteBitrateEstimator*(bool send_side_bwe));
+ MOCK_CONST_METHOD0(GetPacerQueuingDelayMs, int64_t());
+ MOCK_CONST_METHOD0(pacer, PacedSender*());
+ MOCK_CONST_METHOD0(packet_router, PacketRouter*());
+ MOCK_METHOD0(GetTransportFeedbackObserver, TransportFeedbackObserver*());
+ MOCK_METHOD3(UpdatePacerBitrate,
+ void(int bitrate_kbps,
+ int max_bitrate_kbps,
+ int min_bitrate_kbps));
+ MOCK_METHOD1(OnSentPacket, void(const rtc::SentPacket& sent_packet));
+
+ RTC_DISALLOW_IMPLICIT_CONSTRUCTORS(MockCongestionController);
+};
+} // namespace test
+} // namespace webrtc
+#endif // WEBRTC_CALL_MOCK_MOCK_CONGESTION_CONTROLLER_H_
diff --git a/webrtc/call/packet_injection_tests.cc b/webrtc/call/packet_injection_tests.cc
index 18ca0581d1..277cd3e4df 100644
--- a/webrtc/call/packet_injection_tests.cc
+++ b/webrtc/call/packet_injection_tests.cc
@@ -40,22 +40,22 @@ void PacketInjectionTest::InjectIncorrectPacket(CodecType codec_type,
CreateReceiverCall(Call::Config());
test::NullTransport null_transport;
- CreateSendConfig(1, &null_transport);
+ CreateSendConfig(1, 0, &null_transport);
CreateMatchingReceiveConfigs(&null_transport);
- receive_configs_[0].decoders[0].payload_type = payload_type;
+ video_receive_configs_[0].decoders[0].payload_type = payload_type;
switch (codec_type) {
case CodecType::kVp8:
- receive_configs_[0].decoders[0].payload_name = "VP8";
+ video_receive_configs_[0].decoders[0].payload_name = "VP8";
break;
case CodecType::kH264:
- receive_configs_[0].decoders[0].payload_name = "H264";
+ video_receive_configs_[0].decoders[0].payload_name = "H264";
break;
}
- CreateStreams();
+ CreateVideoStreams();
RTPHeader header;
EXPECT_TRUE(rtp_header_parser_->Parse(packet, length, &header));
- EXPECT_EQ(kSendSsrcs[0], header.ssrc)
+ EXPECT_EQ(kVideoSendSsrcs[0], header.ssrc)
<< "Packet should have configured SSRC to not be dropped early.";
EXPECT_EQ(payload_type, header.payloadType);
Start();
diff --git a/webrtc/call/rampup_tests.cc b/webrtc/call/rampup_tests.cc
new file mode 100644
index 0000000000..81f1e81c68
--- /dev/null
+++ b/webrtc/call/rampup_tests.cc
@@ -0,0 +1,587 @@
+/*
+ * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/call/rampup_tests.h"
+
+#include "testing/gtest/include/gtest/gtest.h"
+#include "webrtc/base/checks.h"
+#include "webrtc/base/platform_thread.h"
+#include "webrtc/test/testsupport/perf_test.h"
+
+namespace webrtc {
+namespace {
+
+static const int64_t kPollIntervalMs = 20;
+
+std::vector<uint32_t> GenerateSsrcs(size_t num_streams, uint32_t ssrc_offset) {
+ std::vector<uint32_t> ssrcs;
+ for (size_t i = 0; i != num_streams; ++i)
+ ssrcs.push_back(static_cast<uint32_t>(ssrc_offset + i));
+ return ssrcs;
+}
+} // namespace
+
+RampUpTester::RampUpTester(size_t num_video_streams,
+ size_t num_audio_streams,
+ unsigned int start_bitrate_bps,
+ const std::string& extension_type,
+ bool rtx,
+ bool red)
+ : EndToEndTest(test::CallTest::kLongTimeoutMs),
+ event_(false, false),
+ clock_(Clock::GetRealTimeClock()),
+ num_video_streams_(num_video_streams),
+ num_audio_streams_(num_audio_streams),
+ rtx_(rtx),
+ red_(red),
+ send_stream_(nullptr),
+ start_bitrate_bps_(start_bitrate_bps),
+ start_bitrate_verified_(false),
+ expected_bitrate_bps_(0),
+ test_start_ms_(-1),
+ ramp_up_finished_ms_(-1),
+ extension_type_(extension_type),
+ video_ssrcs_(GenerateSsrcs(num_video_streams_, 100)),
+ video_rtx_ssrcs_(GenerateSsrcs(num_video_streams_, 200)),
+ audio_ssrcs_(GenerateSsrcs(num_audio_streams_, 300)),
+ poller_thread_(&BitrateStatsPollingThread,
+ this,
+ "BitrateStatsPollingThread"),
+ sender_call_(nullptr) {
+ EXPECT_LE(num_audio_streams_, 1u);
+ if (rtx_) {
+ for (size_t i = 0; i < video_ssrcs_.size(); ++i)
+ rtx_ssrc_map_[video_rtx_ssrcs_[i]] = video_ssrcs_[i];
+ }
+}
+
+RampUpTester::~RampUpTester() {
+ event_.Set();
+}
+
+Call::Config RampUpTester::GetSenderCallConfig() {
+ Call::Config call_config;
+ if (start_bitrate_bps_ != 0) {
+ call_config.bitrate_config.start_bitrate_bps = start_bitrate_bps_;
+ }
+ call_config.bitrate_config.min_bitrate_bps = 10000;
+ return call_config;
+}
+
+void RampUpTester::OnVideoStreamsCreated(
+ VideoSendStream* send_stream,
+ const std::vector<VideoReceiveStream*>& receive_streams) {
+ send_stream_ = send_stream;
+}
+
+test::PacketTransport* RampUpTester::CreateSendTransport(Call* sender_call) {
+ send_transport_ = new test::PacketTransport(sender_call, this,
+ test::PacketTransport::kSender,
+ forward_transport_config_);
+ return send_transport_;
+}
+
+size_t RampUpTester::GetNumVideoStreams() const {
+ return num_video_streams_;
+}
+
+size_t RampUpTester::GetNumAudioStreams() const {
+ return num_audio_streams_;
+}
+
+void RampUpTester::ModifyVideoConfigs(
+ VideoSendStream::Config* send_config,
+ std::vector<VideoReceiveStream::Config>* receive_configs,
+ VideoEncoderConfig* encoder_config) {
+ send_config->suspend_below_min_bitrate = true;
+
+ if (num_video_streams_ == 1) {
+ encoder_config->streams[0].target_bitrate_bps =
+ encoder_config->streams[0].max_bitrate_bps = 2000000;
+ // For single stream rampup until 1mbps
+ expected_bitrate_bps_ = kSingleStreamTargetBps;
+ } else {
+ // For multi stream rampup until all streams are being sent. That means
+ // enough birate to send all the target streams plus the min bitrate of
+ // the last one.
+ expected_bitrate_bps_ = encoder_config->streams.back().min_bitrate_bps;
+ for (size_t i = 0; i < encoder_config->streams.size() - 1; ++i) {
+ expected_bitrate_bps_ += encoder_config->streams[i].target_bitrate_bps;
+ }
+ }
+
+ send_config->rtp.extensions.clear();
+
+ bool remb;
+ bool transport_cc;
+ if (extension_type_ == RtpExtension::kAbsSendTime) {
+ remb = true;
+ transport_cc = false;
+ send_config->rtp.extensions.push_back(
+ RtpExtension(extension_type_.c_str(), kAbsSendTimeExtensionId));
+ } else if (extension_type_ == RtpExtension::kTransportSequenceNumber) {
+ remb = false;
+ transport_cc = true;
+ send_config->rtp.extensions.push_back(RtpExtension(
+ extension_type_.c_str(), kTransportSequenceNumberExtensionId));
+ } else {
+ remb = true;
+ transport_cc = false;
+ send_config->rtp.extensions.push_back(RtpExtension(
+ extension_type_.c_str(), kTransmissionTimeOffsetExtensionId));
+ }
+
+ send_config->rtp.nack.rtp_history_ms = test::CallTest::kNackRtpHistoryMs;
+ send_config->rtp.ssrcs = video_ssrcs_;
+ if (rtx_) {
+ send_config->rtp.rtx.payload_type = test::CallTest::kSendRtxPayloadType;
+ send_config->rtp.rtx.ssrcs = video_rtx_ssrcs_;
+ }
+ if (red_) {
+ send_config->rtp.fec.ulpfec_payload_type =
+ test::CallTest::kUlpfecPayloadType;
+ send_config->rtp.fec.red_payload_type = test::CallTest::kRedPayloadType;
+ }
+
+ size_t i = 0;
+ for (VideoReceiveStream::Config& recv_config : *receive_configs) {
+ recv_config.rtp.remb = remb;
+ recv_config.rtp.transport_cc = transport_cc;
+ recv_config.rtp.extensions = send_config->rtp.extensions;
+
+ recv_config.rtp.remote_ssrc = video_ssrcs_[i];
+ recv_config.rtp.nack.rtp_history_ms = send_config->rtp.nack.rtp_history_ms;
+
+ if (red_) {
+ recv_config.rtp.fec.red_payload_type =
+ send_config->rtp.fec.red_payload_type;
+ recv_config.rtp.fec.ulpfec_payload_type =
+ send_config->rtp.fec.ulpfec_payload_type;
+ }
+
+ if (rtx_) {
+ recv_config.rtp.rtx[send_config->encoder_settings.payload_type].ssrc =
+ video_rtx_ssrcs_[i];
+ recv_config.rtp.rtx[send_config->encoder_settings.payload_type]
+ .payload_type = send_config->rtp.rtx.payload_type;
+ }
+ ++i;
+ }
+}
+
+void RampUpTester::ModifyAudioConfigs(
+ AudioSendStream::Config* send_config,
+ std::vector<AudioReceiveStream::Config>* receive_configs) {
+ if (num_audio_streams_ == 0)
+ return;
+
+ EXPECT_NE(RtpExtension::kTOffset, extension_type_)
+ << "Audio BWE not supported with toffset.";
+
+ send_config->rtp.ssrc = audio_ssrcs_[0];
+ send_config->rtp.extensions.clear();
+
+ bool transport_cc = false;
+ if (extension_type_ == RtpExtension::kAbsSendTime) {
+ transport_cc = false;
+ send_config->rtp.extensions.push_back(
+ RtpExtension(extension_type_.c_str(), kAbsSendTimeExtensionId));
+ } else if (extension_type_ == RtpExtension::kTransportSequenceNumber) {
+ transport_cc = true;
+ send_config->rtp.extensions.push_back(RtpExtension(
+ extension_type_.c_str(), kTransportSequenceNumberExtensionId));
+ }
+
+ for (AudioReceiveStream::Config& recv_config : *receive_configs) {
+ recv_config.combined_audio_video_bwe = true;
+ recv_config.rtp.transport_cc = transport_cc;
+ recv_config.rtp.extensions = send_config->rtp.extensions;
+ recv_config.rtp.remote_ssrc = send_config->rtp.ssrc;
+ }
+}
+
+void RampUpTester::OnCallsCreated(Call* sender_call, Call* receiver_call) {
+ sender_call_ = sender_call;
+}
+
+bool RampUpTester::BitrateStatsPollingThread(void* obj) {
+ return static_cast<RampUpTester*>(obj)->PollStats();
+}
+
+bool RampUpTester::PollStats() {
+ if (sender_call_) {
+ Call::Stats stats = sender_call_->GetStats();
+
+ RTC_DCHECK_GT(expected_bitrate_bps_, 0);
+ if (!start_bitrate_verified_ && start_bitrate_bps_ != 0) {
+ // For tests with an explicitly set start bitrate, verify the first
+ // bitrate estimate is close to the start bitrate and lower than the
+ // test target bitrate. This is to verify a call respects the configured
+ // start bitrate, but due to the BWE implementation we can't guarantee the
+ // first estimate really is as high as the start bitrate.
+ EXPECT_GT(stats.send_bandwidth_bps, 0.9 * start_bitrate_bps_);
+ start_bitrate_verified_ = true;
+ }
+ if (stats.send_bandwidth_bps >= expected_bitrate_bps_) {
+ ramp_up_finished_ms_ = clock_->TimeInMilliseconds();
+ observation_complete_.Set();
+ }
+ }
+
+ return !event_.Wait(kPollIntervalMs);
+}
+
+void RampUpTester::ReportResult(const std::string& measurement,
+ size_t value,
+ const std::string& units) const {
+ webrtc::test::PrintResult(
+ measurement, "",
+ ::testing::UnitTest::GetInstance()->current_test_info()->name(), value,
+ units, false);
+}
+
+void RampUpTester::AccumulateStats(const VideoSendStream::StreamStats& stream,
+ size_t* total_packets_sent,
+ size_t* total_sent,
+ size_t* padding_sent,
+ size_t* media_sent) const {
+ *total_packets_sent += stream.rtp_stats.transmitted.packets +
+ stream.rtp_stats.retransmitted.packets +
+ stream.rtp_stats.fec.packets;
+ *total_sent += stream.rtp_stats.transmitted.TotalBytes() +
+ stream.rtp_stats.retransmitted.TotalBytes() +
+ stream.rtp_stats.fec.TotalBytes();
+ *padding_sent += stream.rtp_stats.transmitted.padding_bytes +
+ stream.rtp_stats.retransmitted.padding_bytes +
+ stream.rtp_stats.fec.padding_bytes;
+ *media_sent += stream.rtp_stats.MediaPayloadBytes();
+}
+
+void RampUpTester::TriggerTestDone() {
+ RTC_DCHECK_GE(test_start_ms_, 0);
+
+ // TODO(holmer): Add audio send stats here too when those APIs are available.
+ VideoSendStream::Stats send_stats = send_stream_->GetStats();
+
+ size_t total_packets_sent = 0;
+ size_t total_sent = 0;
+ size_t padding_sent = 0;
+ size_t media_sent = 0;
+ for (uint32_t ssrc : video_ssrcs_) {
+ AccumulateStats(send_stats.substreams[ssrc], &total_packets_sent,
+ &total_sent, &padding_sent, &media_sent);
+ }
+
+ size_t rtx_total_packets_sent = 0;
+ size_t rtx_total_sent = 0;
+ size_t rtx_padding_sent = 0;
+ size_t rtx_media_sent = 0;
+ for (uint32_t rtx_ssrc : video_rtx_ssrcs_) {
+ AccumulateStats(send_stats.substreams[rtx_ssrc], &rtx_total_packets_sent,
+ &rtx_total_sent, &rtx_padding_sent, &rtx_media_sent);
+ }
+
+ ReportResult("ramp-up-total-packets-sent", total_packets_sent, "packets");
+ ReportResult("ramp-up-total-sent", total_sent, "bytes");
+ ReportResult("ramp-up-media-sent", media_sent, "bytes");
+ ReportResult("ramp-up-padding-sent", padding_sent, "bytes");
+ ReportResult("ramp-up-rtx-total-packets-sent", rtx_total_packets_sent,
+ "packets");
+ ReportResult("ramp-up-rtx-total-sent", rtx_total_sent, "bytes");
+ ReportResult("ramp-up-rtx-media-sent", rtx_media_sent, "bytes");
+ ReportResult("ramp-up-rtx-padding-sent", rtx_padding_sent, "bytes");
+ if (ramp_up_finished_ms_ >= 0) {
+ ReportResult("ramp-up-time", ramp_up_finished_ms_ - test_start_ms_,
+ "milliseconds");
+ }
+ ReportResult("ramp-up-average-network-latency",
+ send_transport_->GetAverageDelayMs(), "milliseconds");
+}
+
+void RampUpTester::PerformTest() {
+ test_start_ms_ = clock_->TimeInMilliseconds();
+ poller_thread_.Start();
+ EXPECT_TRUE(Wait()) << "Timed out while waiting for ramp-up to complete.";
+ TriggerTestDone();
+ poller_thread_.Stop();
+}
+
+RampUpDownUpTester::RampUpDownUpTester(size_t num_video_streams,
+ size_t num_audio_streams,
+ unsigned int start_bitrate_bps,
+ const std::string& extension_type,
+ bool rtx,
+ bool red)
+ : RampUpTester(num_video_streams,
+ num_audio_streams,
+ start_bitrate_bps,
+ extension_type,
+ rtx,
+ red),
+ test_state_(kFirstRampup),
+ state_start_ms_(clock_->TimeInMilliseconds()),
+ interval_start_ms_(clock_->TimeInMilliseconds()),
+ sent_bytes_(0) {
+ forward_transport_config_.link_capacity_kbps = kHighBandwidthLimitBps / 1000;
+}
+
+RampUpDownUpTester::~RampUpDownUpTester() {}
+
+bool RampUpDownUpTester::PollStats() {
+ if (send_stream_) {
+ webrtc::VideoSendStream::Stats stats = send_stream_->GetStats();
+ int transmit_bitrate_bps = 0;
+ for (auto it : stats.substreams) {
+ transmit_bitrate_bps += it.second.total_bitrate_bps;
+ }
+
+ EvolveTestState(transmit_bitrate_bps, stats.suspended);
+ }
+
+ return !event_.Wait(kPollIntervalMs);
+}
+
+Call::Config RampUpDownUpTester::GetReceiverCallConfig() {
+ Call::Config config;
+ config.bitrate_config.min_bitrate_bps = 10000;
+ return config;
+}
+
+std::string RampUpDownUpTester::GetModifierString() const {
+ std::string str("_");
+ if (num_video_streams_ > 0) {
+ std::ostringstream s;
+ s << num_video_streams_;
+ str += s.str();
+ str += "stream";
+ str += (num_video_streams_ > 1 ? "s" : "");
+ str += "_";
+ }
+ if (num_audio_streams_ > 0) {
+ std::ostringstream s;
+ s << num_audio_streams_;
+ str += s.str();
+ str += "stream";
+ str += (num_audio_streams_ > 1 ? "s" : "");
+ str += "_";
+ }
+ str += (rtx_ ? "" : "no");
+ str += "rtx";
+ return str;
+}
+
+void RampUpDownUpTester::EvolveTestState(int bitrate_bps, bool suspended) {
+ int64_t now = clock_->TimeInMilliseconds();
+ switch (test_state_) {
+ case kFirstRampup: {
+ EXPECT_FALSE(suspended);
+ if (bitrate_bps > kExpectedHighBitrateBps) {
+ // The first ramp-up has reached the target bitrate. Change the
+ // channel limit, and move to the next test state.
+ forward_transport_config_.link_capacity_kbps =
+ kLowBandwidthLimitBps / 1000;
+ send_transport_->SetConfig(forward_transport_config_);
+ test_state_ = kLowRate;
+ webrtc::test::PrintResult("ramp_up_down_up", GetModifierString(),
+ "first_rampup", now - state_start_ms_, "ms",
+ false);
+ state_start_ms_ = now;
+ interval_start_ms_ = now;
+ sent_bytes_ = 0;
+ }
+ break;
+ }
+ case kLowRate: {
+ if (bitrate_bps < kExpectedLowBitrateBps && suspended) {
+ // The ramp-down was successful. Change the channel limit back to a
+ // high value, and move to the next test state.
+ forward_transport_config_.link_capacity_kbps =
+ kHighBandwidthLimitBps / 1000;
+ send_transport_->SetConfig(forward_transport_config_);
+ test_state_ = kSecondRampup;
+ webrtc::test::PrintResult("ramp_up_down_up", GetModifierString(),
+ "rampdown", now - state_start_ms_, "ms",
+ false);
+ state_start_ms_ = now;
+ interval_start_ms_ = now;
+ sent_bytes_ = 0;
+ }
+ break;
+ }
+ case kSecondRampup: {
+ if (bitrate_bps > kExpectedHighBitrateBps && !suspended) {
+ webrtc::test::PrintResult("ramp_up_down_up", GetModifierString(),
+ "second_rampup", now - state_start_ms_, "ms",
+ false);
+ ReportResult("ramp-up-down-up-average-network-latency",
+ send_transport_->GetAverageDelayMs(), "milliseconds");
+ observation_complete_.Set();
+ }
+ break;
+ }
+ }
+}
+
+class RampUpTest : public test::CallTest {
+ public:
+ RampUpTest() {}
+
+ virtual ~RampUpTest() {
+ EXPECT_EQ(nullptr, video_send_stream_);
+ EXPECT_TRUE(video_receive_streams_.empty());
+ }
+};
+
+TEST_F(RampUpTest, SingleStream) {
+ RampUpTester test(1, 0, 0, RtpExtension::kTOffset, false, false);
+ RunBaseTest(&test);
+}
+
+TEST_F(RampUpTest, Simulcast) {
+ RampUpTester test(3, 0, 0, RtpExtension::kTOffset, false, false);
+ RunBaseTest(&test);
+}
+
+TEST_F(RampUpTest, SimulcastWithRtx) {
+ RampUpTester test(3, 0, 0, RtpExtension::kTOffset, true, false);
+ RunBaseTest(&test);
+}
+
+TEST_F(RampUpTest, SimulcastByRedWithRtx) {
+ RampUpTester test(3, 0, 0, RtpExtension::kTOffset, true, true);
+ RunBaseTest(&test);
+}
+
+TEST_F(RampUpTest, SingleStreamWithHighStartBitrate) {
+ RampUpTester test(1, 0, 0.9 * kSingleStreamTargetBps, RtpExtension::kTOffset,
+ false, false);
+ RunBaseTest(&test);
+}
+
+// Disabled on Mac due to flakiness, see
+// https://bugs.chromium.org/p/webrtc/issues/detail?id=5407
+#ifndef WEBRTC_MAC
+
+static const uint32_t kStartBitrateBps = 60000;
+
+TEST_F(RampUpTest, UpDownUpOneStream) {
+ RampUpDownUpTester test(1, 0, kStartBitrateBps, RtpExtension::kAbsSendTime,
+ false, false);
+ RunBaseTest(&test);
+}
+
+TEST_F(RampUpTest, UpDownUpThreeStreams) {
+ RampUpDownUpTester test(3, 0, kStartBitrateBps, RtpExtension::kAbsSendTime,
+ false, false);
+ RunBaseTest(&test);
+}
+
+TEST_F(RampUpTest, UpDownUpOneStreamRtx) {
+ RampUpDownUpTester test(1, 0, kStartBitrateBps, RtpExtension::kAbsSendTime,
+ true, false);
+ RunBaseTest(&test);
+}
+
+TEST_F(RampUpTest, UpDownUpThreeStreamsRtx) {
+ RampUpDownUpTester test(3, 0, kStartBitrateBps, RtpExtension::kAbsSendTime,
+ true, false);
+ RunBaseTest(&test);
+}
+
+TEST_F(RampUpTest, UpDownUpOneStreamByRedRtx) {
+ RampUpDownUpTester test(1, 0, kStartBitrateBps, RtpExtension::kAbsSendTime,
+ true, true);
+ RunBaseTest(&test);
+}
+
+TEST_F(RampUpTest, UpDownUpThreeStreamsByRedRtx) {
+ RampUpDownUpTester test(3, 0, kStartBitrateBps, RtpExtension::kAbsSendTime,
+ true, true);
+ RunBaseTest(&test);
+}
+
+TEST_F(RampUpTest, SendSideVideoUpDownUpRtx) {
+ RampUpDownUpTester test(3, 0, kStartBitrateBps,
+ RtpExtension::kTransportSequenceNumber, true, false);
+ RunBaseTest(&test);
+}
+
+// TODO(holmer): Enable when audio bitrates are included in the bitrate
+// allocation.
+TEST_F(RampUpTest, DISABLED_SendSideAudioVideoUpDownUpRtx) {
+ RampUpDownUpTester test(3, 1, kStartBitrateBps,
+ RtpExtension::kTransportSequenceNumber, true, false);
+ RunBaseTest(&test);
+}
+
+#endif
+
+TEST_F(RampUpTest, AbsSendTimeSingleStream) {
+ RampUpTester test(1, 0, 0, RtpExtension::kAbsSendTime, false, false);
+ RunBaseTest(&test);
+}
+
+TEST_F(RampUpTest, AbsSendTimeSimulcast) {
+ RampUpTester test(3, 0, 0, RtpExtension::kAbsSendTime, false, false);
+ RunBaseTest(&test);
+}
+
+TEST_F(RampUpTest, AbsSendTimeSimulcastWithRtx) {
+ RampUpTester test(3, 0, 0, RtpExtension::kAbsSendTime, true, false);
+ RunBaseTest(&test);
+}
+
+TEST_F(RampUpTest, AbsSendTimeSimulcastByRedWithRtx) {
+ RampUpTester test(3, 0, 0, RtpExtension::kAbsSendTime, true, true);
+ RunBaseTest(&test);
+}
+
+TEST_F(RampUpTest, AbsSendTimeSingleStreamWithHighStartBitrate) {
+ RampUpTester test(1, 0, 0.9 * kSingleStreamTargetBps,
+ RtpExtension::kAbsSendTime, false, false);
+ RunBaseTest(&test);
+}
+
+TEST_F(RampUpTest, TransportSequenceNumberSingleStream) {
+ RampUpTester test(1, 0, 0, RtpExtension::kTransportSequenceNumber, false,
+ false);
+ RunBaseTest(&test);
+}
+
+TEST_F(RampUpTest, TransportSequenceNumberSimulcast) {
+ RampUpTester test(3, 0, 0, RtpExtension::kTransportSequenceNumber, false,
+ false);
+ RunBaseTest(&test);
+}
+
+TEST_F(RampUpTest, TransportSequenceNumberSimulcastWithRtx) {
+ RampUpTester test(3, 0, 0, RtpExtension::kTransportSequenceNumber, true,
+ false);
+ RunBaseTest(&test);
+}
+
+TEST_F(RampUpTest, AudioVideoTransportSequenceNumberSimulcastWithRtx) {
+ RampUpTester test(3, 1, 0, RtpExtension::kTransportSequenceNumber, true,
+ false);
+ RunBaseTest(&test);
+}
+
+TEST_F(RampUpTest, TransportSequenceNumberSimulcastByRedWithRtx) {
+ RampUpTester test(3, 0, 0, RtpExtension::kTransportSequenceNumber, true,
+ true);
+ RunBaseTest(&test);
+}
+
+TEST_F(RampUpTest, TransportSequenceNumberSingleStreamWithHighStartBitrate) {
+ RampUpTester test(1, 0, 0.9 * kSingleStreamTargetBps,
+ RtpExtension::kTransportSequenceNumber, false, false);
+ RunBaseTest(&test);
+}
+} // namespace webrtc
diff --git a/webrtc/call/rampup_tests.h b/webrtc/call/rampup_tests.h
new file mode 100644
index 0000000000..31a0a0296e
--- /dev/null
+++ b/webrtc/call/rampup_tests.h
@@ -0,0 +1,137 @@
+/*
+ * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_CALL_RAMPUP_TESTS_H_
+#define WEBRTC_CALL_RAMPUP_TESTS_H_
+
+#include <map>
+#include <string>
+#include <vector>
+
+#include "webrtc/base/event.h"
+#include "webrtc/base/scoped_ptr.h"
+#include "webrtc/call.h"
+#include "webrtc/test/call_test.h"
+
+namespace webrtc {
+
+static const int kTransmissionTimeOffsetExtensionId = 6;
+static const int kAbsSendTimeExtensionId = 7;
+static const int kTransportSequenceNumberExtensionId = 8;
+static const unsigned int kSingleStreamTargetBps = 1000000;
+
+class Clock;
+
+class RampUpTester : public test::EndToEndTest {
+ public:
+ RampUpTester(size_t num_video_streams,
+ size_t num_audio_streams,
+ unsigned int start_bitrate_bps,
+ const std::string& extension_type,
+ bool rtx,
+ bool red);
+ ~RampUpTester() override;
+
+ size_t GetNumVideoStreams() const override;
+ size_t GetNumAudioStreams() const override;
+
+ void PerformTest() override;
+
+ protected:
+ virtual bool PollStats();
+
+ void AccumulateStats(const VideoSendStream::StreamStats& stream,
+ size_t* total_packets_sent,
+ size_t* total_sent,
+ size_t* padding_sent,
+ size_t* media_sent) const;
+
+ void ReportResult(const std::string& measurement,
+ size_t value,
+ const std::string& units) const;
+ void TriggerTestDone();
+
+ rtc::Event event_;
+ Clock* const clock_;
+ FakeNetworkPipe::Config forward_transport_config_;
+ const size_t num_video_streams_;
+ const size_t num_audio_streams_;
+ const bool rtx_;
+ const bool red_;
+ VideoSendStream* send_stream_;
+ test::PacketTransport* send_transport_;
+
+ private:
+ typedef std::map<uint32_t, uint32_t> SsrcMap;
+
+ Call::Config GetSenderCallConfig() override;
+ void OnVideoStreamsCreated(
+ VideoSendStream* send_stream,
+ const std::vector<VideoReceiveStream*>& receive_streams) override;
+ test::PacketTransport* CreateSendTransport(Call* sender_call) override;
+ void ModifyVideoConfigs(
+ VideoSendStream::Config* send_config,
+ std::vector<VideoReceiveStream::Config>* receive_configs,
+ VideoEncoderConfig* encoder_config) override;
+ void ModifyAudioConfigs(
+ AudioSendStream::Config* send_config,
+ std::vector<AudioReceiveStream::Config>* receive_configs) override;
+ void OnCallsCreated(Call* sender_call, Call* receiver_call) override;
+
+ static bool BitrateStatsPollingThread(void* obj);
+
+ const int start_bitrate_bps_;
+ bool start_bitrate_verified_;
+ int expected_bitrate_bps_;
+ int64_t test_start_ms_;
+ int64_t ramp_up_finished_ms_;
+
+ const std::string extension_type_;
+ std::vector<uint32_t> video_ssrcs_;
+ std::vector<uint32_t> video_rtx_ssrcs_;
+ std::vector<uint32_t> audio_ssrcs_;
+ SsrcMap rtx_ssrc_map_;
+
+ rtc::PlatformThread poller_thread_;
+ Call* sender_call_;
+};
+
+class RampUpDownUpTester : public RampUpTester {
+ public:
+ RampUpDownUpTester(size_t num_video_streams,
+ size_t num_audio_streams,
+ unsigned int start_bitrate_bps,
+ const std::string& extension_type,
+ bool rtx,
+ bool red);
+ ~RampUpDownUpTester() override;
+
+ protected:
+ bool PollStats() override;
+
+ private:
+ static const int kHighBandwidthLimitBps = 80000;
+ static const int kExpectedHighBitrateBps = 60000;
+ static const int kLowBandwidthLimitBps = 20000;
+ static const int kExpectedLowBitrateBps = 20000;
+ enum TestStates { kFirstRampup, kLowRate, kSecondRampup };
+
+ Call::Config GetReceiverCallConfig() override;
+
+ std::string GetModifierString() const;
+ void EvolveTestState(int bitrate_bps, bool suspended);
+
+ TestStates test_state_;
+ int64_t state_start_ms_;
+ int64_t interval_start_ms_;
+ int sent_bytes_;
+};
+} // namespace webrtc
+#endif // WEBRTC_CALL_RAMPUP_TESTS_H_
diff --git a/webrtc/call/rtc_event_log.cc b/webrtc/call/rtc_event_log.cc
index 550b556e80..9f592ce479 100644
--- a/webrtc/call/rtc_event_log.cc
+++ b/webrtc/call/rtc_event_log.cc
@@ -17,7 +17,9 @@
#include "webrtc/base/criticalsection.h"
#include "webrtc/base/thread_annotations.h"
#include "webrtc/call.h"
+#include "webrtc/modules/rtp_rtcp/include/rtp_rtcp_defines.h"
#include "webrtc/modules/rtp_rtcp/source/byte_io.h"
+#include "webrtc/modules/rtp_rtcp/source/rtcp_utility.h"
#include "webrtc/system_wrappers/include/clock.h"
#include "webrtc/system_wrappers/include/file_wrapper.h"
@@ -54,6 +56,9 @@ class RtcEventLogImpl final : public RtcEventLog {
const uint8_t* packet,
size_t length) override {}
void LogAudioPlayout(uint32_t ssrc) override {}
+ void LogBwePacketLossEvent(int32_t bitrate,
+ uint8_t fraction_loss,
+ int32_t total_packets) override {}
};
#else // ENABLE_RTC_EVENT_LOG is defined
@@ -78,6 +83,9 @@ class RtcEventLogImpl final : public RtcEventLog {
const uint8_t* packet,
size_t length) override;
void LogAudioPlayout(uint32_t ssrc) override;
+ void LogBwePacketLossEvent(int32_t bitrate,
+ uint8_t fraction_loss,
+ int32_t total_packets) override;
private:
// Starts logging. This function assumes the file_ has been opened succesfully
@@ -254,8 +262,7 @@ void RtcEventLogImpl::LogVideoReceiveStreamConfig(
rtc::CritScope lock(&crit_);
rtclog::Event event;
- const int64_t timestamp = clock_->TimeInMicroseconds();
- event.set_timestamp_us(timestamp);
+ event.set_timestamp_us(clock_->TimeInMicroseconds());
event.set_type(rtclog::Event::VIDEO_RECEIVER_CONFIG_EVENT);
rtclog::VideoReceiveConfig* receiver_config =
@@ -264,9 +271,6 @@ void RtcEventLogImpl::LogVideoReceiveStreamConfig(
receiver_config->set_local_ssrc(config.rtp.local_ssrc);
receiver_config->set_rtcp_mode(ConvertRtcpMode(config.rtp.rtcp_mode));
-
- receiver_config->set_receiver_reference_time_report(
- config.rtp.rtcp_xr.receiver_reference_time_report);
receiver_config->set_remb(config.rtp.remb);
for (const auto& kv : config.rtp.rtx) {
@@ -296,8 +300,7 @@ void RtcEventLogImpl::LogVideoSendStreamConfig(
rtc::CritScope lock(&crit_);
rtclog::Event event;
- const int64_t timestamp = clock_->TimeInMicroseconds();
- event.set_timestamp_us(timestamp);
+ event.set_timestamp_us(clock_->TimeInMicroseconds());
event.set_type(rtclog::Event::VIDEO_SENDER_CONFIG_EVENT);
rtclog::VideoSendConfig* sender_config = event.mutable_video_sender_config();
@@ -318,8 +321,6 @@ void RtcEventLogImpl::LogVideoSendStreamConfig(
}
sender_config->set_rtx_payload_type(config.rtp.rtx.payload_type);
- sender_config->set_c_name(config.rtp.c_name);
-
rtclog::EncoderConfig* encoder = sender_config->mutable_encoder();
encoder->set_name(config.encoder_settings.payload_name);
encoder->set_payload_type(config.encoder_settings.payload_type);
@@ -348,8 +349,7 @@ void RtcEventLogImpl::LogRtpHeader(bool incoming,
rtc::CritScope lock(&crit_);
rtclog::Event rtp_event;
- const int64_t timestamp = clock_->TimeInMicroseconds();
- rtp_event.set_timestamp_us(timestamp);
+ rtp_event.set_timestamp_us(clock_->TimeInMicroseconds());
rtp_event.set_type(rtclog::Event::RTP_EVENT);
rtp_event.mutable_rtp_packet()->set_incoming(incoming);
rtp_event.mutable_rtp_packet()->set_type(ConvertMediaType(media_type));
@@ -364,33 +364,89 @@ void RtcEventLogImpl::LogRtcpPacket(bool incoming,
size_t length) {
rtc::CritScope lock(&crit_);
rtclog::Event rtcp_event;
- const int64_t timestamp = clock_->TimeInMicroseconds();
- rtcp_event.set_timestamp_us(timestamp);
+ rtcp_event.set_timestamp_us(clock_->TimeInMicroseconds());
rtcp_event.set_type(rtclog::Event::RTCP_EVENT);
rtcp_event.mutable_rtcp_packet()->set_incoming(incoming);
rtcp_event.mutable_rtcp_packet()->set_type(ConvertMediaType(media_type));
- rtcp_event.mutable_rtcp_packet()->set_packet_data(packet, length);
+
+ RTCPUtility::RtcpCommonHeader header;
+ const uint8_t* block_begin = packet;
+ const uint8_t* packet_end = packet + length;
+ RTC_DCHECK(length <= IP_PACKET_SIZE);
+ uint8_t buffer[IP_PACKET_SIZE];
+ uint32_t buffer_length = 0;
+ while (block_begin < packet_end) {
+ if (!RtcpParseCommonHeader(block_begin, packet_end - block_begin,
+ &header)) {
+ break; // Incorrect message header.
+ }
+ uint32_t block_size = header.BlockSize();
+ switch (header.packet_type) {
+ case RTCPUtility::PT_SR:
+ FALLTHROUGH();
+ case RTCPUtility::PT_RR:
+ FALLTHROUGH();
+ case RTCPUtility::PT_BYE:
+ FALLTHROUGH();
+ case RTCPUtility::PT_IJ:
+ FALLTHROUGH();
+ case RTCPUtility::PT_RTPFB:
+ FALLTHROUGH();
+ case RTCPUtility::PT_PSFB:
+ FALLTHROUGH();
+ case RTCPUtility::PT_XR:
+ // We log sender reports, receiver reports, bye messages
+ // inter-arrival jitter, third-party loss reports, payload-specific
+ // feedback and extended reports.
+ memcpy(buffer + buffer_length, block_begin, block_size);
+ buffer_length += block_size;
+ break;
+ case RTCPUtility::PT_SDES:
+ FALLTHROUGH();
+ case RTCPUtility::PT_APP:
+ FALLTHROUGH();
+ default:
+ // We don't log sender descriptions, application defined messages
+ // or message blocks of unknown type.
+ break;
+ }
+
+ block_begin += block_size;
+ }
+ rtcp_event.mutable_rtcp_packet()->set_packet_data(buffer, buffer_length);
HandleEvent(&rtcp_event);
}
void RtcEventLogImpl::LogAudioPlayout(uint32_t ssrc) {
rtc::CritScope lock(&crit_);
rtclog::Event event;
- const int64_t timestamp = clock_->TimeInMicroseconds();
- event.set_timestamp_us(timestamp);
+ event.set_timestamp_us(clock_->TimeInMicroseconds());
event.set_type(rtclog::Event::AUDIO_PLAYOUT_EVENT);
auto playout_event = event.mutable_audio_playout_event();
playout_event->set_local_ssrc(ssrc);
HandleEvent(&event);
}
+void RtcEventLogImpl::LogBwePacketLossEvent(int32_t bitrate,
+ uint8_t fraction_loss,
+ int32_t total_packets) {
+ rtc::CritScope lock(&crit_);
+ rtclog::Event event;
+ event.set_timestamp_us(clock_->TimeInMicroseconds());
+ event.set_type(rtclog::Event::BWE_PACKET_LOSS_EVENT);
+ auto bwe_event = event.mutable_bwe_packet_loss_event();
+ bwe_event->set_bitrate(bitrate);
+ bwe_event->set_fraction_loss(fraction_loss);
+ bwe_event->set_total_packets(total_packets);
+ HandleEvent(&event);
+}
+
void RtcEventLogImpl::StopLoggingLocked() {
if (currently_logging_) {
currently_logging_ = false;
// Create a LogEnd event
rtclog::Event event;
- int64_t timestamp = clock_->TimeInMicroseconds();
- event.set_timestamp_us(timestamp);
+ event.set_timestamp_us(clock_->TimeInMicroseconds());
event.set_type(rtclog::Event::LOG_END);
// Store the event and close the file
RTC_DCHECK(file_->Open());
diff --git a/webrtc/call/rtc_event_log.h b/webrtc/call/rtc_event_log.h
index 85d7525752..489687a195 100644
--- a/webrtc/call/rtc_event_log.h
+++ b/webrtc/call/rtc_event_log.h
@@ -77,6 +77,11 @@ class RtcEventLog {
// Logs an audio playout event
virtual void LogAudioPlayout(uint32_t ssrc) = 0;
+ // Logs a bitrate update from the bandwidth estimator based on packet loss.
+ virtual void LogBwePacketLossEvent(int32_t bitrate,
+ uint8_t fraction_loss,
+ int32_t total_packets) = 0;
+
// Reads an RtcEventLog file and returns true when reading was successful.
// The result is stored in the given EventStream object.
static bool ParseRtcEventLog(const std::string& file_name,
diff --git a/webrtc/call/rtc_event_log.proto b/webrtc/call/rtc_event_log.proto
index 6bdea7bd2f..b14306e362 100644
--- a/webrtc/call/rtc_event_log.proto
+++ b/webrtc/call/rtc_event_log.proto
@@ -34,10 +34,12 @@ message Event {
RTP_EVENT = 3;
RTCP_EVENT = 4;
AUDIO_PLAYOUT_EVENT = 5;
- VIDEO_RECEIVER_CONFIG_EVENT = 6;
- VIDEO_SENDER_CONFIG_EVENT = 7;
- AUDIO_RECEIVER_CONFIG_EVENT = 8;
- AUDIO_SENDER_CONFIG_EVENT = 9;
+ BWE_PACKET_LOSS_EVENT = 6;
+ BWE_PACKET_DELAY_EVENT = 7;
+ VIDEO_RECEIVER_CONFIG_EVENT = 8;
+ VIDEO_SENDER_CONFIG_EVENT = 9;
+ AUDIO_RECEIVER_CONFIG_EVENT = 10;
+ AUDIO_SENDER_CONFIG_EVENT = 11;
}
// required - Indicates the type of this event
@@ -52,17 +54,20 @@ message Event {
// optional - but required if type == AUDIO_PLAYOUT_EVENT
optional AudioPlayoutEvent audio_playout_event = 5;
+ // optional - but required if type == BWE_PACKET_LOSS_EVENT
+ optional BwePacketLossEvent bwe_packet_loss_event = 6;
+
// optional - but required if type == VIDEO_RECEIVER_CONFIG_EVENT
- optional VideoReceiveConfig video_receiver_config = 6;
+ optional VideoReceiveConfig video_receiver_config = 8;
// optional - but required if type == VIDEO_SENDER_CONFIG_EVENT
- optional VideoSendConfig video_sender_config = 7;
+ optional VideoSendConfig video_sender_config = 9;
// optional - but required if type == AUDIO_RECEIVER_CONFIG_EVENT
- optional AudioReceiveConfig audio_receiver_config = 8;
+ optional AudioReceiveConfig audio_receiver_config = 10;
// optional - but required if type == AUDIO_SENDER_CONFIG_EVENT
- optional AudioSendConfig audio_sender_config = 9;
+ optional AudioSendConfig audio_sender_config = 11;
}
@@ -99,6 +104,19 @@ message AudioPlayoutEvent {
optional uint32 local_ssrc = 2;
}
+message BwePacketLossEvent {
+ // required - Bandwidth estimate (in bps) after the update.
+ optional int32 bitrate = 1;
+
+ // required - Fraction of lost packets since last receiver report
+ // computed as floor( 256 * (#lost_packets / #total_packets) ).
+ // The possible values range from 0 to 255.
+ optional uint32 fraction_loss = 2;
+
+ // TODO(terelius): Is this really needed? Remove or make optional?
+ // required - Total number of packets that the BWE update is based on.
+ optional int32 total_packets = 3;
+}
// TODO(terelius): Video and audio streams could in principle share SSRC,
// so identifying a stream based only on SSRC might not work.
@@ -119,20 +137,17 @@ message VideoReceiveConfig {
// required - RTCP mode to use.
optional RtcpMode rtcp_mode = 3;
- // required - Extended RTCP settings.
- optional bool receiver_reference_time_report = 4;
-
// required - Receiver estimated maximum bandwidth.
- optional bool remb = 5;
+ optional bool remb = 4;
// Map from video RTP payload type -> RTX config.
- repeated RtxMap rtx_map = 6;
+ repeated RtxMap rtx_map = 5;
// RTP header extensions used for the received stream.
- repeated RtpHeaderExtension header_extensions = 7;
+ repeated RtpHeaderExtension header_extensions = 6;
// List of decoders associated with the stream.
- repeated DecoderConfig decoders = 8;
+ repeated DecoderConfig decoders = 7;
}
@@ -142,7 +157,7 @@ message DecoderConfig {
optional string name = 1;
// required
- optional sint32 payload_type = 2;
+ optional int32 payload_type = 2;
}
@@ -152,7 +167,7 @@ message RtpHeaderExtension {
optional string name = 1;
// required
- optional sint32 id = 2;
+ optional int32 id = 2;
}
@@ -163,13 +178,13 @@ message RtxConfig {
optional uint32 rtx_ssrc = 1;
// required - Payload type to use for the RTX stream.
- optional sint32 rtx_payload_type = 2;
+ optional int32 rtx_payload_type = 2;
}
message RtxMap {
// required
- optional sint32 payload_type = 1;
+ optional int32 payload_type = 1;
// required
optional RtxConfig config = 2;
@@ -189,13 +204,10 @@ message VideoSendConfig {
repeated uint32 rtx_ssrcs = 3;
// required if rtx_ssrcs is used - Payload type for retransmitted packets.
- optional sint32 rtx_payload_type = 4;
-
- // required - Canonical end-point identifier.
- optional string c_name = 5;
+ optional int32 rtx_payload_type = 4;
// required - Encoder associated with the stream.
- optional EncoderConfig encoder = 6;
+ optional EncoderConfig encoder = 5;
}
@@ -205,7 +217,7 @@ message EncoderConfig {
optional string name = 1;
// required
- optional sint32 payload_type = 2;
+ optional int32 payload_type = 2;
}
diff --git a/webrtc/call/rtc_event_log_unittest.cc b/webrtc/call/rtc_event_log_unittest.cc
index a4fdd13512..f590f669a2 100644
--- a/webrtc/call/rtc_event_log_unittest.cc
+++ b/webrtc/call/rtc_event_log_unittest.cc
@@ -10,22 +10,23 @@
#ifdef ENABLE_RTC_EVENT_LOG
-#include <stdio.h>
#include <string>
+#include <utility>
#include <vector>
#include "testing/gtest/include/gtest/gtest.h"
#include "webrtc/base/buffer.h"
#include "webrtc/base/checks.h"
+#include "webrtc/base/random.h"
#include "webrtc/base/scoped_ptr.h"
#include "webrtc/base/thread.h"
#include "webrtc/call.h"
#include "webrtc/call/rtc_event_log.h"
+#include "webrtc/modules/rtp_rtcp/source/rtcp_packet.h"
#include "webrtc/modules/rtp_rtcp/source/rtp_sender.h"
#include "webrtc/system_wrappers/include/clock.h"
#include "webrtc/test/test_suite.h"
#include "webrtc/test/testsupport/fileutils.h"
-#include "webrtc/test/testsupport/gtest_disable.h"
// Files generated at build-time by the protobuf compiler.
#ifdef WEBRTC_ANDROID_PLATFORM_BUILD
@@ -138,9 +139,6 @@ void VerifyReceiveStreamConfig(const rtclog::Event& event,
else
EXPECT_EQ(rtclog::VideoReceiveConfig::RTCP_REDUCEDSIZE,
receiver_config.rtcp_mode());
- ASSERT_TRUE(receiver_config.has_receiver_reference_time_report());
- EXPECT_EQ(config.rtp.rtcp_xr.receiver_reference_time_report,
- receiver_config.receiver_reference_time_report());
ASSERT_TRUE(receiver_config.has_remb());
EXPECT_EQ(config.rtp.remb, receiver_config.remb());
// Check RTX map.
@@ -214,9 +212,6 @@ void VerifySendStreamConfig(const rtclog::Event& event,
ASSERT_TRUE(sender_config.has_rtx_payload_type());
EXPECT_EQ(config.rtp.rtx.payload_type, sender_config.rtx_payload_type());
}
- // Check CNAME.
- ASSERT_TRUE(sender_config.has_c_name());
- EXPECT_EQ(config.rtp.c_name, sender_config.c_name());
// Check encoder.
ASSERT_TRUE(sender_config.has_encoder());
ASSERT_TRUE(sender_config.encoder().has_name());
@@ -230,7 +225,7 @@ void VerifySendStreamConfig(const rtclog::Event& event,
void VerifyRtpEvent(const rtclog::Event& event,
bool incoming,
MediaType media_type,
- uint8_t* header,
+ const uint8_t* header,
size_t header_size,
size_t total_size) {
ASSERT_TRUE(IsValidBasicEvent(event));
@@ -252,7 +247,7 @@ void VerifyRtpEvent(const rtclog::Event& event,
void VerifyRtcpEvent(const rtclog::Event& event,
bool incoming,
MediaType media_type,
- uint8_t* packet,
+ const uint8_t* packet,
size_t total_size) {
ASSERT_TRUE(IsValidBasicEvent(event));
ASSERT_EQ(rtclog::Event::RTCP_EVENT, event.type());
@@ -276,6 +271,21 @@ void VerifyPlayoutEvent(const rtclog::Event& event, uint32_t ssrc) {
EXPECT_EQ(ssrc, playout_event.local_ssrc());
}
+void VerifyBweLossEvent(const rtclog::Event& event,
+ int32_t bitrate,
+ uint8_t fraction_loss,
+ int32_t total_packets) {
+ ASSERT_TRUE(IsValidBasicEvent(event));
+ ASSERT_EQ(rtclog::Event::BWE_PACKET_LOSS_EVENT, event.type());
+ const rtclog::BwePacketLossEvent& bwe_event = event.bwe_packet_loss_event();
+ ASSERT_TRUE(bwe_event.has_bitrate());
+ EXPECT_EQ(bitrate, bwe_event.bitrate());
+ ASSERT_TRUE(bwe_event.has_fraction_loss());
+ EXPECT_EQ(fraction_loss, bwe_event.fraction_loss());
+ ASSERT_TRUE(bwe_event.has_total_packets());
+ EXPECT_EQ(total_packets, bwe_event.total_packets());
+}
+
void VerifyLogStartEvent(const rtclog::Event& event) {
ASSERT_TRUE(IsValidBasicEvent(event));
EXPECT_EQ(rtclog::Event::LOG_START, event.type());
@@ -289,7 +299,8 @@ void VerifyLogStartEvent(const rtclog::Event& event) {
size_t GenerateRtpPacket(uint32_t extensions_bitvector,
uint32_t csrcs_count,
uint8_t* packet,
- size_t packet_size) {
+ size_t packet_size,
+ Random* prng) {
RTC_CHECK_GE(packet_size, 16 + 4 * csrcs_count + 4 * kNumExtensions);
Clock* clock = Clock::GetRealTimeClock();
@@ -306,12 +317,12 @@ size_t GenerateRtpPacket(uint32_t extensions_bitvector,
std::vector<uint32_t> csrcs;
for (unsigned i = 0; i < csrcs_count; i++) {
- csrcs.push_back(rand());
+ csrcs.push_back(prng->Rand<uint32_t>());
}
rtp_sender.SetCsrcs(csrcs);
- rtp_sender.SetSSRC(rand());
- rtp_sender.SetStartTimestamp(rand(), true);
- rtp_sender.SetSequenceNumber(rand());
+ rtp_sender.SetSSRC(prng->Rand<uint32_t>());
+ rtp_sender.SetStartTimestamp(prng->Rand<uint32_t>(), true);
+ rtp_sender.SetSequenceNumber(prng->Rand<uint16_t>());
for (unsigned i = 0; i < kNumExtensions; i++) {
if (extensions_bitvector & (1u << i)) {
@@ -319,76 +330,84 @@ size_t GenerateRtpPacket(uint32_t extensions_bitvector,
}
}
- int8_t payload_type = rand() % 128;
- bool marker_bit = (rand() % 2 == 1);
- uint32_t capture_timestamp = rand();
- int64_t capture_time_ms = rand();
- bool timestamp_provided = (rand() % 2 == 1);
- bool inc_sequence_number = (rand() % 2 == 1);
+ int8_t payload_type = prng->Rand(0, 127);
+ bool marker_bit = prng->Rand<bool>();
+ uint32_t capture_timestamp = prng->Rand<uint32_t>();
+ int64_t capture_time_ms = prng->Rand<uint32_t>();
+ bool timestamp_provided = prng->Rand<bool>();
+ bool inc_sequence_number = prng->Rand<bool>();
size_t header_size = rtp_sender.BuildRTPheader(
packet, payload_type, marker_bit, capture_timestamp, capture_time_ms,
timestamp_provided, inc_sequence_number);
for (size_t i = header_size; i < packet_size; i++) {
- packet[i] = rand();
+ packet[i] = prng->Rand<uint8_t>();
}
return header_size;
}
-void GenerateRtcpPacket(uint8_t* packet, size_t packet_size) {
- for (size_t i = 0; i < packet_size; i++) {
- packet[i] = rand();
- }
+rtc::scoped_ptr<rtcp::RawPacket> GenerateRtcpPacket(Random* prng) {
+ rtcp::ReportBlock report_block;
+ report_block.To(prng->Rand<uint32_t>()); // Remote SSRC.
+ report_block.WithFractionLost(prng->Rand(50));
+
+ rtcp::SenderReport sender_report;
+ sender_report.From(prng->Rand<uint32_t>()); // Sender SSRC.
+ sender_report.WithNtpSec(prng->Rand<uint32_t>());
+ sender_report.WithNtpFrac(prng->Rand<uint32_t>());
+ sender_report.WithPacketCount(prng->Rand<uint32_t>());
+ sender_report.WithReportBlock(report_block);
+
+ return sender_report.Build();
}
void GenerateVideoReceiveConfig(uint32_t extensions_bitvector,
- VideoReceiveStream::Config* config) {
+ VideoReceiveStream::Config* config,
+ Random* prng) {
// Create a map from a payload type to an encoder name.
VideoReceiveStream::Decoder decoder;
- decoder.payload_type = rand();
- decoder.payload_name = (rand() % 2 ? "VP8" : "H264");
+ decoder.payload_type = prng->Rand(0, 127);
+ decoder.payload_name = (prng->Rand<bool>() ? "VP8" : "H264");
config->decoders.push_back(decoder);
// Add SSRCs for the stream.
- config->rtp.remote_ssrc = rand();
- config->rtp.local_ssrc = rand();
+ config->rtp.remote_ssrc = prng->Rand<uint32_t>();
+ config->rtp.local_ssrc = prng->Rand<uint32_t>();
// Add extensions and settings for RTCP.
config->rtp.rtcp_mode =
- rand() % 2 ? RtcpMode::kCompound : RtcpMode::kReducedSize;
- config->rtp.rtcp_xr.receiver_reference_time_report = (rand() % 2 == 1);
- config->rtp.remb = (rand() % 2 == 1);
+ prng->Rand<bool>() ? RtcpMode::kCompound : RtcpMode::kReducedSize;
+ config->rtp.remb = prng->Rand<bool>();
// Add a map from a payload type to a new ssrc and a new payload type for RTX.
VideoReceiveStream::Config::Rtp::Rtx rtx_pair;
- rtx_pair.ssrc = rand();
- rtx_pair.payload_type = rand();
- config->rtp.rtx.insert(std::make_pair(rand(), rtx_pair));
+ rtx_pair.ssrc = prng->Rand<uint32_t>();
+ rtx_pair.payload_type = prng->Rand(0, 127);
+ config->rtp.rtx.insert(std::make_pair(prng->Rand(0, 127), rtx_pair));
// Add header extensions.
for (unsigned i = 0; i < kNumExtensions; i++) {
if (extensions_bitvector & (1u << i)) {
config->rtp.extensions.push_back(
- RtpExtension(kExtensionNames[i], rand()));
+ RtpExtension(kExtensionNames[i], prng->Rand<int>()));
}
}
}
void GenerateVideoSendConfig(uint32_t extensions_bitvector,
- VideoSendStream::Config* config) {
+ VideoSendStream::Config* config,
+ Random* prng) {
// Create a map from a payload type to an encoder name.
- config->encoder_settings.payload_type = rand();
- config->encoder_settings.payload_name = (rand() % 2 ? "VP8" : "H264");
+ config->encoder_settings.payload_type = prng->Rand(0, 127);
+ config->encoder_settings.payload_name = (prng->Rand<bool>() ? "VP8" : "H264");
// Add SSRCs for the stream.
- config->rtp.ssrcs.push_back(rand());
+ config->rtp.ssrcs.push_back(prng->Rand<uint32_t>());
// Add a map from a payload type to new ssrcs and a new payload type for RTX.
- config->rtp.rtx.ssrcs.push_back(rand());
- config->rtp.rtx.payload_type = rand();
- // Add a CNAME.
- config->rtp.c_name = "some.user@some.host";
+ config->rtp.rtx.ssrcs.push_back(prng->Rand<uint32_t>());
+ config->rtp.rtx.payload_type = prng->Rand(0, 127);
// Add header extensions.
for (unsigned i = 0; i < kNumExtensions; i++) {
if (extensions_bitvector & (1u << i)) {
config->rtp.extensions.push_back(
- RtpExtension(kExtensionNames[i], rand()));
+ RtpExtension(kExtensionNames[i], prng->Rand<int>()));
}
}
}
@@ -398,42 +417,49 @@ void GenerateVideoSendConfig(uint32_t extensions_bitvector,
void LogSessionAndReadBack(size_t rtp_count,
size_t rtcp_count,
size_t playout_count,
+ size_t bwe_loss_count,
uint32_t extensions_bitvector,
uint32_t csrcs_count,
unsigned int random_seed) {
ASSERT_LE(rtcp_count, rtp_count);
ASSERT_LE(playout_count, rtp_count);
+ ASSERT_LE(bwe_loss_count, rtp_count);
std::vector<rtc::Buffer> rtp_packets;
- std::vector<rtc::Buffer> rtcp_packets;
+ std::vector<rtc::scoped_ptr<rtcp::RawPacket> > rtcp_packets;
std::vector<size_t> rtp_header_sizes;
std::vector<uint32_t> playout_ssrcs;
+ std::vector<std::pair<int32_t, uint8_t> > bwe_loss_updates;
VideoReceiveStream::Config receiver_config(nullptr);
VideoSendStream::Config sender_config(nullptr);
- srand(random_seed);
+ Random prng(random_seed);
// Create rtp_count RTP packets containing random data.
for (size_t i = 0; i < rtp_count; i++) {
- size_t packet_size = 1000 + rand() % 64;
+ size_t packet_size = prng.Rand(1000, 1100);
rtp_packets.push_back(rtc::Buffer(packet_size));
- size_t header_size = GenerateRtpPacket(extensions_bitvector, csrcs_count,
- rtp_packets[i].data(), packet_size);
+ size_t header_size =
+ GenerateRtpPacket(extensions_bitvector, csrcs_count,
+ rtp_packets[i].data(), packet_size, &prng);
rtp_header_sizes.push_back(header_size);
}
// Create rtcp_count RTCP packets containing random data.
for (size_t i = 0; i < rtcp_count; i++) {
- size_t packet_size = 1000 + rand() % 64;
- rtcp_packets.push_back(rtc::Buffer(packet_size));
- GenerateRtcpPacket(rtcp_packets[i].data(), packet_size);
+ rtcp_packets.push_back(GenerateRtcpPacket(&prng));
}
// Create playout_count random SSRCs to use when logging AudioPlayout events.
for (size_t i = 0; i < playout_count; i++) {
- playout_ssrcs.push_back(static_cast<uint32_t>(rand()));
+ playout_ssrcs.push_back(prng.Rand<uint32_t>());
+ }
+ // Create bwe_loss_count random bitrate updates for BwePacketLoss.
+ for (size_t i = 0; i < bwe_loss_count; i++) {
+ bwe_loss_updates.push_back(
+ std::make_pair(prng.Rand<int32_t>(), prng.Rand<uint8_t>()));
}
// Create configurations for the video streams.
- GenerateVideoReceiveConfig(extensions_bitvector, &receiver_config);
- GenerateVideoSendConfig(extensions_bitvector, &sender_config);
+ GenerateVideoReceiveConfig(extensions_bitvector, &receiver_config, &prng);
+ GenerateVideoSendConfig(extensions_bitvector, &sender_config, &prng);
const int config_count = 2;
// Find the name of the current test, in order to use it as a temporary
@@ -448,7 +474,9 @@ void LogSessionAndReadBack(size_t rtp_count,
rtc::scoped_ptr<RtcEventLog> log_dumper(RtcEventLog::Create());
log_dumper->LogVideoReceiveStreamConfig(receiver_config);
log_dumper->LogVideoSendStreamConfig(sender_config);
- size_t rtcp_index = 1, playout_index = 1;
+ size_t rtcp_index = 1;
+ size_t playout_index = 1;
+ size_t bwe_loss_index = 1;
for (size_t i = 1; i <= rtp_count; i++) {
log_dumper->LogRtpHeader(
(i % 2 == 0), // Every second packet is incoming.
@@ -458,14 +486,20 @@ void LogSessionAndReadBack(size_t rtp_count,
log_dumper->LogRtcpPacket(
rtcp_index % 2 == 0, // Every second packet is incoming
rtcp_index % 3 == 0 ? MediaType::AUDIO : MediaType::VIDEO,
- rtcp_packets[rtcp_index - 1].data(),
- rtcp_packets[rtcp_index - 1].size());
+ rtcp_packets[rtcp_index - 1]->Buffer(),
+ rtcp_packets[rtcp_index - 1]->Length());
rtcp_index++;
}
if (i * playout_count >= playout_index * rtp_count) {
log_dumper->LogAudioPlayout(playout_ssrcs[playout_index - 1]);
playout_index++;
}
+ if (i * bwe_loss_count >= bwe_loss_index * rtp_count) {
+ log_dumper->LogBwePacketLossEvent(
+ bwe_loss_updates[bwe_loss_index - 1].first,
+ bwe_loss_updates[bwe_loss_index - 1].second, i);
+ bwe_loss_index++;
+ }
if (i == rtp_count / 2) {
log_dumper->StartLogging(temp_filename, 10000000);
}
@@ -480,12 +514,15 @@ void LogSessionAndReadBack(size_t rtp_count,
// Verify that what we read back from the event log is the same as
// what we wrote down. For RTCP we log the full packets, but for
// RTP we should only log the header.
- const int event_count =
- config_count + playout_count + rtcp_count + rtp_count + 1;
+ const int event_count = config_count + playout_count + bwe_loss_count +
+ rtcp_count + rtp_count + 1;
EXPECT_EQ(event_count, parsed_stream.stream_size());
VerifyReceiveStreamConfig(parsed_stream.stream(0), receiver_config);
VerifySendStreamConfig(parsed_stream.stream(1), sender_config);
- size_t event_index = config_count, rtcp_index = 1, playout_index = 1;
+ size_t event_index = config_count;
+ size_t rtcp_index = 1;
+ size_t playout_index = 1;
+ size_t bwe_loss_index = 1;
for (size_t i = 1; i <= rtp_count; i++) {
VerifyRtpEvent(parsed_stream.stream(event_index),
(i % 2 == 0), // Every second packet is incoming.
@@ -497,8 +534,8 @@ void LogSessionAndReadBack(size_t rtp_count,
VerifyRtcpEvent(parsed_stream.stream(event_index),
rtcp_index % 2 == 0, // Every second packet is incoming.
rtcp_index % 3 == 0 ? MediaType::AUDIO : MediaType::VIDEO,
- rtcp_packets[rtcp_index - 1].data(),
- rtcp_packets[rtcp_index - 1].size());
+ rtcp_packets[rtcp_index - 1]->Buffer(),
+ rtcp_packets[rtcp_index - 1]->Length());
event_index++;
rtcp_index++;
}
@@ -508,6 +545,13 @@ void LogSessionAndReadBack(size_t rtp_count,
event_index++;
playout_index++;
}
+ if (i * bwe_loss_count >= bwe_loss_index * rtp_count) {
+ VerifyBweLossEvent(parsed_stream.stream(event_index),
+ bwe_loss_updates[bwe_loss_index - 1].first,
+ bwe_loss_updates[bwe_loss_index - 1].second, i);
+ event_index++;
+ bwe_loss_index++;
+ }
if (i == rtp_count / 2) {
VerifyLogStartEvent(parsed_stream.stream(event_index));
event_index++;
@@ -519,10 +563,11 @@ void LogSessionAndReadBack(size_t rtp_count,
}
TEST(RtcEventLogTest, LogSessionAndReadBack) {
- // Log 5 RTP, 2 RTCP, and 0 playout events with no header extensions or CSRCS.
- LogSessionAndReadBack(5, 2, 0, 0, 0, 321);
+ // Log 5 RTP, 2 RTCP, 0 playout events and 0 BWE events
+ // with no header extensions or CSRCS.
+ LogSessionAndReadBack(5, 2, 0, 0, 0, 0, 321);
- // Enable AbsSendTime and TransportSequenceNumbers
+ // Enable AbsSendTime and TransportSequenceNumbers.
uint32_t extensions = 0;
for (uint32_t i = 0; i < kNumExtensions; i++) {
if (kExtensionTypes[i] == RTPExtensionType::kRtpExtensionAbsoluteSendTime ||
@@ -531,20 +576,21 @@ TEST(RtcEventLogTest, LogSessionAndReadBack) {
extensions |= 1u << i;
}
}
- LogSessionAndReadBack(8, 2, 0, extensions, 0, 3141592653u);
+ LogSessionAndReadBack(8, 2, 0, 0, extensions, 0, 3141592653u);
- extensions = (1u << kNumExtensions) - 1; // Enable all header extensions
- LogSessionAndReadBack(9, 2, 3, extensions, 2, 2718281828u);
+ extensions = (1u << kNumExtensions) - 1; // Enable all header extensions.
+ LogSessionAndReadBack(9, 2, 3, 2, extensions, 2, 2718281828u);
// Try all combinations of header extensions and up to 2 CSRCS.
for (extensions = 0; extensions < (1u << kNumExtensions); extensions++) {
for (uint32_t csrcs_count = 0; csrcs_count < 3; csrcs_count++) {
LogSessionAndReadBack(5 + extensions, // Number of RTP packets.
2 + csrcs_count, // Number of RTCP packets.
- 3 + csrcs_count, // Number of playout events
- extensions, // Bit vector choosing extensions
- csrcs_count, // Number of contributing sources
- rand());
+ 3 + csrcs_count, // Number of playout events.
+ 1 + csrcs_count, // Number of BWE loss events.
+ extensions, // Bit vector choosing extensions.
+ csrcs_count, // Number of contributing sources.
+ extensions * 3 + csrcs_count + 1); // Random seed.
}
}
}
@@ -556,35 +602,32 @@ void DropOldEvents(uint32_t extensions_bitvector,
unsigned int random_seed) {
rtc::Buffer old_rtp_packet;
rtc::Buffer recent_rtp_packet;
- rtc::Buffer old_rtcp_packet;
- rtc::Buffer recent_rtcp_packet;
+ rtc::scoped_ptr<rtcp::RawPacket> old_rtcp_packet;
+ rtc::scoped_ptr<rtcp::RawPacket> recent_rtcp_packet;
VideoReceiveStream::Config receiver_config(nullptr);
VideoSendStream::Config sender_config(nullptr);
- srand(random_seed);
+ Random prng(random_seed);
// Create two RTP packets containing random data.
- size_t packet_size = 1000 + rand() % 64;
+ size_t packet_size = prng.Rand(1000, 1100);
old_rtp_packet.SetSize(packet_size);
GenerateRtpPacket(extensions_bitvector, csrcs_count, old_rtp_packet.data(),
- packet_size);
- packet_size = 1000 + rand() % 64;
+ packet_size, &prng);
+ packet_size = prng.Rand(1000, 1100);
recent_rtp_packet.SetSize(packet_size);
- size_t recent_header_size = GenerateRtpPacket(
- extensions_bitvector, csrcs_count, recent_rtp_packet.data(), packet_size);
+ size_t recent_header_size =
+ GenerateRtpPacket(extensions_bitvector, csrcs_count,
+ recent_rtp_packet.data(), packet_size, &prng);
// Create two RTCP packets containing random data.
- packet_size = 1000 + rand() % 64;
- old_rtcp_packet.SetSize(packet_size);
- GenerateRtcpPacket(old_rtcp_packet.data(), packet_size);
- packet_size = 1000 + rand() % 64;
- recent_rtcp_packet.SetSize(packet_size);
- GenerateRtcpPacket(recent_rtcp_packet.data(), packet_size);
+ old_rtcp_packet = GenerateRtcpPacket(&prng);
+ recent_rtcp_packet = GenerateRtcpPacket(&prng);
// Create configurations for the video streams.
- GenerateVideoReceiveConfig(extensions_bitvector, &receiver_config);
- GenerateVideoSendConfig(extensions_bitvector, &sender_config);
+ GenerateVideoReceiveConfig(extensions_bitvector, &receiver_config, &prng);
+ GenerateVideoSendConfig(extensions_bitvector, &sender_config, &prng);
// Find the name of the current test, in order to use it as a temporary
// filename.
@@ -601,16 +644,16 @@ void DropOldEvents(uint32_t extensions_bitvector,
log_dumper->LogVideoSendStreamConfig(sender_config);
log_dumper->LogRtpHeader(false, MediaType::AUDIO, old_rtp_packet.data(),
old_rtp_packet.size());
- log_dumper->LogRtcpPacket(true, MediaType::AUDIO, old_rtcp_packet.data(),
- old_rtcp_packet.size());
+ log_dumper->LogRtcpPacket(true, MediaType::AUDIO, old_rtcp_packet->Buffer(),
+ old_rtcp_packet->Length());
// Sleep 55 ms to let old events be removed from the queue.
rtc::Thread::SleepMs(55);
log_dumper->StartLogging(temp_filename, 10000000);
log_dumper->LogRtpHeader(true, MediaType::VIDEO, recent_rtp_packet.data(),
recent_rtp_packet.size());
log_dumper->LogRtcpPacket(false, MediaType::VIDEO,
- recent_rtcp_packet.data(),
- recent_rtcp_packet.size());
+ recent_rtcp_packet->Buffer(),
+ recent_rtcp_packet->Length());
}
// Read the generated file from disk.
@@ -628,7 +671,7 @@ void DropOldEvents(uint32_t extensions_bitvector,
recent_rtp_packet.data(), recent_header_size,
recent_rtp_packet.size());
VerifyRtcpEvent(parsed_stream.stream(4), false, MediaType::VIDEO,
- recent_rtcp_packet.data(), recent_rtcp_packet.size());
+ recent_rtcp_packet->Buffer(), recent_rtcp_packet->Length());
// Clean up temporary file - can be pretty slow.
remove(temp_filename.c_str());
diff --git a/webrtc/call/webrtc_call.gypi b/webrtc/call/webrtc_call.gypi
index fd70ae81f4..0c3efff43a 100644
--- a/webrtc/call/webrtc_call.gypi
+++ b/webrtc/call/webrtc_call.gypi
@@ -14,6 +14,7 @@
'<(webrtc_root)/webrtc.gyp:rtc_event_log',
],
'webrtc_call_sources': [
+ 'call/bitrate_allocator.cc',
'call/call.cc',
'call/congestion_controller.cc',
'call/transport_adapter.cc',
diff --git a/webrtc/codereview.settings b/webrtc/codereview.settings
index 97bee14549..c441cc61bc 100644
--- a/webrtc/codereview.settings
+++ b/webrtc/codereview.settings
@@ -1,4 +1,5 @@
-Creating CLs from this location is not supported!
-Please create a full WebRTC checkout using 'fetch webrtc'
-or by cloning https://chromium.googlesource.com/external/webrtc
+Creating CLs from this location is not supported! Please make sure the current
+working directory is the parent directory of this directory.
+If you're working with a Chromium checkout, you'll have to create a full WebRTC
+checkout and upload a CL from that. See http://www.webrtc.org for instructions.
diff --git a/webrtc/common.h b/webrtc/common.h
index dda045ee6c..d705d4b8ed 100644
--- a/webrtc/common.h
+++ b/webrtc/common.h
@@ -17,6 +17,23 @@
namespace webrtc {
+// Only add new values to the end of the enumeration and never remove (only
+// deprecate) to maintain binary compatibility.
+enum class ConfigOptionID {
+ kMyExperimentForTest,
+ kAlgo1CostFunctionForTest,
+ kTemporalLayersFactory,
+ kNetEqCapacityConfig,
+ kNetEqFastAccelerate,
+ kVoicePacing,
+ kExtendedFilter,
+ kDelayAgnostic,
+ kExperimentalAgc,
+ kExperimentalNs,
+ kBeamforming,
+ kIntelligibility
+};
+
// Class Config is designed to ease passing a set of options across webrtc code.
// Options are identified by typename in order to avoid incorrect casts.
//
@@ -61,8 +78,6 @@ class Config {
}
private:
- typedef void* OptionIdentifier;
-
struct BaseOption {
virtual ~BaseOption() {}
};
@@ -76,11 +91,9 @@ class Config {
T* value;
};
- // Own implementation of rtti-subset to avoid depending on rtti and its costs.
template<typename T>
- static OptionIdentifier identifier() {
- static char id_placeholder;
- return &id_placeholder;
+ static ConfigOptionID identifier() {
+ return T::identifier;
}
// Used to instantiate a default constructed object that doesn't needs to be
@@ -92,7 +105,7 @@ class Config {
return def;
}
- typedef std::map<OptionIdentifier, BaseOption*> OptionMap;
+ typedef std::map<ConfigOptionID, BaseOption*> OptionMap;
OptionMap options_;
// RTC_DISALLOW_COPY_AND_ASSIGN
diff --git a/webrtc/common_audio/BUILD.gn b/webrtc/common_audio/BUILD.gn
index b01b31816b..b4ec1d71ef 100644
--- a/webrtc/common_audio/BUILD.gn
+++ b/webrtc/common_audio/BUILD.gn
@@ -87,6 +87,7 @@ source_set("common_audio") {
"signal_processing/vector_scaling_operations.c",
"sparse_fir_filter.cc",
"sparse_fir_filter.h",
+ "swap_queue.h",
"vad/include/vad.h",
"vad/include/webrtc_vad.h",
"vad/vad.cc",
diff --git a/webrtc/common_audio/OWNERS b/webrtc/common_audio/OWNERS
index 20f640041e..208a7c5635 100644
--- a/webrtc/common_audio/OWNERS
+++ b/webrtc/common_audio/OWNERS
@@ -1,4 +1,3 @@
-andrew@webrtc.org
henrik.lundin@webrtc.org
jan.skoglund@webrtc.org
kwiberg@webrtc.org
diff --git a/webrtc/common_audio/audio_converter.cc b/webrtc/common_audio/audio_converter.cc
index f1709ae653..9ebfabc286 100644
--- a/webrtc/common_audio/audio_converter.cc
+++ b/webrtc/common_audio/audio_converter.cc
@@ -11,6 +11,7 @@
#include "webrtc/common_audio/audio_converter.h"
#include <cstring>
+#include <utility>
#include "webrtc/base/checks.h"
#include "webrtc/base/safe_conversions.h"
@@ -24,7 +25,7 @@ namespace webrtc {
class CopyConverter : public AudioConverter {
public:
- CopyConverter(int src_channels, size_t src_frames, int dst_channels,
+ CopyConverter(size_t src_channels, size_t src_frames, size_t dst_channels,
size_t dst_frames)
: AudioConverter(src_channels, src_frames, dst_channels, dst_frames) {}
~CopyConverter() override {};
@@ -33,7 +34,7 @@ class CopyConverter : public AudioConverter {
size_t dst_capacity) override {
CheckSizes(src_size, dst_capacity);
if (src != dst) {
- for (int i = 0; i < src_channels(); ++i)
+ for (size_t i = 0; i < src_channels(); ++i)
std::memcpy(dst[i], src[i], dst_frames() * sizeof(*dst[i]));
}
}
@@ -41,7 +42,7 @@ class CopyConverter : public AudioConverter {
class UpmixConverter : public AudioConverter {
public:
- UpmixConverter(int src_channels, size_t src_frames, int dst_channels,
+ UpmixConverter(size_t src_channels, size_t src_frames, size_t dst_channels,
size_t dst_frames)
: AudioConverter(src_channels, src_frames, dst_channels, dst_frames) {}
~UpmixConverter() override {};
@@ -51,7 +52,7 @@ class UpmixConverter : public AudioConverter {
CheckSizes(src_size, dst_capacity);
for (size_t i = 0; i < dst_frames(); ++i) {
const float value = src[0][i];
- for (int j = 0; j < dst_channels(); ++j)
+ for (size_t j = 0; j < dst_channels(); ++j)
dst[j][i] = value;
}
}
@@ -59,7 +60,7 @@ class UpmixConverter : public AudioConverter {
class DownmixConverter : public AudioConverter {
public:
- DownmixConverter(int src_channels, size_t src_frames, int dst_channels,
+ DownmixConverter(size_t src_channels, size_t src_frames, size_t dst_channels,
size_t dst_frames)
: AudioConverter(src_channels, src_frames, dst_channels, dst_frames) {
}
@@ -71,7 +72,7 @@ class DownmixConverter : public AudioConverter {
float* dst_mono = dst[0];
for (size_t i = 0; i < src_frames(); ++i) {
float sum = 0;
- for (int j = 0; j < src_channels(); ++j)
+ for (size_t j = 0; j < src_channels(); ++j)
sum += src[j][i];
dst_mono[i] = sum / src_channels();
}
@@ -80,11 +81,11 @@ class DownmixConverter : public AudioConverter {
class ResampleConverter : public AudioConverter {
public:
- ResampleConverter(int src_channels, size_t src_frames, int dst_channels,
+ ResampleConverter(size_t src_channels, size_t src_frames, size_t dst_channels,
size_t dst_frames)
: AudioConverter(src_channels, src_frames, dst_channels, dst_frames) {
resamplers_.reserve(src_channels);
- for (int i = 0; i < src_channels; ++i)
+ for (size_t i = 0; i < src_channels; ++i)
resamplers_.push_back(new PushSincResampler(src_frames, dst_frames));
}
~ResampleConverter() override {};
@@ -105,7 +106,7 @@ class ResampleConverter : public AudioConverter {
class CompositionConverter : public AudioConverter {
public:
CompositionConverter(ScopedVector<AudioConverter> converters)
- : converters_(converters.Pass()) {
+ : converters_(std::move(converters)) {
RTC_CHECK_GE(converters_.size(), 2u);
// We need an intermediate buffer after every converter.
for (auto it = converters_.begin(); it != converters_.end() - 1; ++it)
@@ -135,9 +136,9 @@ class CompositionConverter : public AudioConverter {
ScopedVector<ChannelBuffer<float>> buffers_;
};
-rtc::scoped_ptr<AudioConverter> AudioConverter::Create(int src_channels,
+rtc::scoped_ptr<AudioConverter> AudioConverter::Create(size_t src_channels,
size_t src_frames,
- int dst_channels,
+ size_t dst_channels,
size_t dst_frames) {
rtc::scoped_ptr<AudioConverter> sp;
if (src_channels > dst_channels) {
@@ -147,7 +148,7 @@ rtc::scoped_ptr<AudioConverter> AudioConverter::Create(int src_channels,
dst_channels, src_frames));
converters.push_back(new ResampleConverter(dst_channels, src_frames,
dst_channels, dst_frames));
- sp.reset(new CompositionConverter(converters.Pass()));
+ sp.reset(new CompositionConverter(std::move(converters)));
} else {
sp.reset(new DownmixConverter(src_channels, src_frames, dst_channels,
dst_frames));
@@ -159,7 +160,7 @@ rtc::scoped_ptr<AudioConverter> AudioConverter::Create(int src_channels,
src_channels, dst_frames));
converters.push_back(new UpmixConverter(src_channels, dst_frames,
dst_channels, dst_frames));
- sp.reset(new CompositionConverter(converters.Pass()));
+ sp.reset(new CompositionConverter(std::move(converters)));
} else {
sp.reset(new UpmixConverter(src_channels, src_frames, dst_channels,
dst_frames));
@@ -172,7 +173,7 @@ rtc::scoped_ptr<AudioConverter> AudioConverter::Create(int src_channels,
dst_frames));
}
- return sp.Pass();
+ return sp;
}
// For CompositionConverter.
@@ -182,8 +183,8 @@ AudioConverter::AudioConverter()
dst_channels_(0),
dst_frames_(0) {}
-AudioConverter::AudioConverter(int src_channels, size_t src_frames,
- int dst_channels, size_t dst_frames)
+AudioConverter::AudioConverter(size_t src_channels, size_t src_frames,
+ size_t dst_channels, size_t dst_frames)
: src_channels_(src_channels),
src_frames_(src_frames),
dst_channels_(dst_channels),
diff --git a/webrtc/common_audio/audio_converter.h b/webrtc/common_audio/audio_converter.h
index 7d1513bc02..c5f08c1d9b 100644
--- a/webrtc/common_audio/audio_converter.h
+++ b/webrtc/common_audio/audio_converter.h
@@ -26,9 +26,9 @@ class AudioConverter {
public:
// Returns a new AudioConverter, which will use the supplied format for its
// lifetime. Caller is responsible for the memory.
- static rtc::scoped_ptr<AudioConverter> Create(int src_channels,
+ static rtc::scoped_ptr<AudioConverter> Create(size_t src_channels,
size_t src_frames,
- int dst_channels,
+ size_t dst_channels,
size_t dst_frames);
virtual ~AudioConverter() {};
@@ -39,23 +39,23 @@ class AudioConverter {
virtual void Convert(const float* const* src, size_t src_size,
float* const* dst, size_t dst_capacity) = 0;
- int src_channels() const { return src_channels_; }
+ size_t src_channels() const { return src_channels_; }
size_t src_frames() const { return src_frames_; }
- int dst_channels() const { return dst_channels_; }
+ size_t dst_channels() const { return dst_channels_; }
size_t dst_frames() const { return dst_frames_; }
protected:
AudioConverter();
- AudioConverter(int src_channels, size_t src_frames, int dst_channels,
+ AudioConverter(size_t src_channels, size_t src_frames, size_t dst_channels,
size_t dst_frames);
// Helper to RTC_CHECK that inputs are correctly sized.
void CheckSizes(size_t src_size, size_t dst_capacity) const;
private:
- const int src_channels_;
+ const size_t src_channels_;
const size_t src_frames_;
- const int dst_channels_;
+ const size_t dst_channels_;
const size_t dst_frames_;
RTC_DISALLOW_COPY_AND_ASSIGN(AudioConverter);
diff --git a/webrtc/common_audio/audio_converter_unittest.cc b/webrtc/common_audio/audio_converter_unittest.cc
index c85b96e285..dace0bdccf 100644
--- a/webrtc/common_audio/audio_converter_unittest.cc
+++ b/webrtc/common_audio/audio_converter_unittest.cc
@@ -13,6 +13,7 @@
#include <vector>
#include "testing/gtest/include/gtest/gtest.h"
+#include "webrtc/base/arraysize.h"
#include "webrtc/base/format_macros.h"
#include "webrtc/base/scoped_ptr.h"
#include "webrtc/common_audio/audio_converter.h"
@@ -24,11 +25,11 @@ namespace webrtc {
typedef rtc::scoped_ptr<ChannelBuffer<float>> ScopedBuffer;
// Sets the signal value to increase by |data| with every sample.
-ScopedBuffer CreateBuffer(const std::vector<float>& data, int frames) {
- const int num_channels = static_cast<int>(data.size());
+ScopedBuffer CreateBuffer(const std::vector<float>& data, size_t frames) {
+ const size_t num_channels = data.size();
ScopedBuffer sb(new ChannelBuffer<float>(frames, num_channels));
- for (int i = 0; i < num_channels; ++i)
- for (int j = 0; j < frames; ++j)
+ for (size_t i = 0; i < num_channels; ++i)
+ for (size_t j = 0; j < frames; ++j)
sb->channels()[i][j] = data[i] * j;
return sb;
}
@@ -56,7 +57,7 @@ float ComputeSNR(const ChannelBuffer<float>& ref,
float mse = 0;
float variance = 0;
float mean = 0;
- for (int i = 0; i < ref.num_channels(); ++i) {
+ for (size_t i = 0; i < ref.num_channels(); ++i) {
for (size_t j = 0; j < ref.num_frames() - delay; ++j) {
float error = ref.channels()[i][j] - test.channels()[i][j + delay];
mse += error * error;
@@ -85,9 +86,9 @@ float ComputeSNR(const ChannelBuffer<float>& ref,
// Sets the source to a linearly increasing signal for which we can easily
// generate a reference. Runs the AudioConverter and ensures the output has
// sufficiently high SNR relative to the reference.
-void RunAudioConverterTest(int src_channels,
+void RunAudioConverterTest(size_t src_channels,
int src_sample_rate_hz,
- int dst_channels,
+ size_t dst_channels,
int dst_sample_rate_hz) {
const float kSrcLeft = 0.0002f;
const float kSrcRight = 0.0001f;
@@ -96,8 +97,8 @@ void RunAudioConverterTest(int src_channels,
const float dst_left = resampling_factor * kSrcLeft;
const float dst_right = resampling_factor * kSrcRight;
const float dst_mono = (dst_left + dst_right) / 2;
- const int src_frames = src_sample_rate_hz / 100;
- const int dst_frames = dst_sample_rate_hz / 100;
+ const size_t src_frames = static_cast<size_t>(src_sample_rate_hz / 100);
+ const size_t dst_frames = static_cast<size_t>(dst_sample_rate_hz / 100);
std::vector<float> src_data(1, kSrcLeft);
if (src_channels == 2)
@@ -127,8 +128,9 @@ void RunAudioConverterTest(int src_channels,
static_cast<size_t>(
PushSincResampler::AlgorithmicDelaySeconds(src_sample_rate_hz) *
dst_sample_rate_hz);
- printf("(%d, %d Hz) -> (%d, %d Hz) ", // SNR reported on the same line later.
- src_channels, src_sample_rate_hz, dst_channels, dst_sample_rate_hz);
+ // SNR reported on the same line later.
+ printf("(%" PRIuS ", %d Hz) -> (%" PRIuS ", %d Hz) ",
+ src_channels, src_sample_rate_hz, dst_channels, dst_sample_rate_hz);
rtc::scoped_ptr<AudioConverter> converter = AudioConverter::Create(
src_channels, src_frames, dst_channels, dst_frames);
@@ -141,13 +143,13 @@ void RunAudioConverterTest(int src_channels,
TEST(AudioConverterTest, ConversionsPassSNRThreshold) {
const int kSampleRates[] = {8000, 16000, 32000, 44100, 48000};
- const int kSampleRatesSize = sizeof(kSampleRates) / sizeof(*kSampleRates);
- const int kChannels[] = {1, 2};
- const int kChannelsSize = sizeof(kChannels) / sizeof(*kChannels);
- for (int src_rate = 0; src_rate < kSampleRatesSize; ++src_rate) {
- for (int dst_rate = 0; dst_rate < kSampleRatesSize; ++dst_rate) {
- for (int src_channel = 0; src_channel < kChannelsSize; ++src_channel) {
- for (int dst_channel = 0; dst_channel < kChannelsSize; ++dst_channel) {
+ const size_t kChannels[] = {1, 2};
+ for (size_t src_rate = 0; src_rate < arraysize(kSampleRates); ++src_rate) {
+ for (size_t dst_rate = 0; dst_rate < arraysize(kSampleRates); ++dst_rate) {
+ for (size_t src_channel = 0; src_channel < arraysize(kChannels);
+ ++src_channel) {
+ for (size_t dst_channel = 0; dst_channel < arraysize(kChannels);
+ ++dst_channel) {
RunAudioConverterTest(kChannels[src_channel], kSampleRates[src_rate],
kChannels[dst_channel], kSampleRates[dst_rate]);
}
diff --git a/webrtc/common_audio/blocker.cc b/webrtc/common_audio/blocker.cc
index 0133550beb..13432f2e7a 100644
--- a/webrtc/common_audio/blocker.cc
+++ b/webrtc/common_audio/blocker.cc
@@ -22,10 +22,10 @@ void AddFrames(const float* const* a,
const float* const* b,
int b_start_index,
size_t num_frames,
- int num_channels,
+ size_t num_channels,
float* const* result,
size_t result_start_index) {
- for (int i = 0; i < num_channels; ++i) {
+ for (size_t i = 0; i < num_channels; ++i) {
for (size_t j = 0; j < num_frames; ++j) {
result[i][j + result_start_index] =
a[i][j + a_start_index] + b[i][j + b_start_index];
@@ -37,10 +37,10 @@ void AddFrames(const float* const* a,
void CopyFrames(const float* const* src,
size_t src_start_index,
size_t num_frames,
- int num_channels,
+ size_t num_channels,
float* const* dst,
size_t dst_start_index) {
- for (int i = 0; i < num_channels; ++i) {
+ for (size_t i = 0; i < num_channels; ++i) {
memcpy(&dst[i][dst_start_index],
&src[i][src_start_index],
num_frames * sizeof(dst[i][dst_start_index]));
@@ -51,10 +51,10 @@ void CopyFrames(const float* const* src,
void MoveFrames(const float* const* src,
size_t src_start_index,
size_t num_frames,
- int num_channels,
+ size_t num_channels,
float* const* dst,
size_t dst_start_index) {
- for (int i = 0; i < num_channels; ++i) {
+ for (size_t i = 0; i < num_channels; ++i) {
memmove(&dst[i][dst_start_index],
&src[i][src_start_index],
num_frames * sizeof(dst[i][dst_start_index]));
@@ -64,8 +64,8 @@ void MoveFrames(const float* const* src,
void ZeroOut(float* const* buffer,
size_t starting_idx,
size_t num_frames,
- int num_channels) {
- for (int i = 0; i < num_channels; ++i) {
+ size_t num_channels) {
+ for (size_t i = 0; i < num_channels; ++i) {
memset(&buffer[i][starting_idx], 0,
num_frames * sizeof(buffer[i][starting_idx]));
}
@@ -75,9 +75,9 @@ void ZeroOut(float* const* buffer,
// stored in |frames|.
void ApplyWindow(const float* window,
size_t num_frames,
- int num_channels,
+ size_t num_channels,
float* const* frames) {
- for (int i = 0; i < num_channels; ++i) {
+ for (size_t i = 0; i < num_channels; ++i) {
for (size_t j = 0; j < num_frames; ++j) {
frames[i][j] = frames[i][j] * window[j];
}
@@ -100,8 +100,8 @@ namespace webrtc {
Blocker::Blocker(size_t chunk_size,
size_t block_size,
- int num_input_channels,
- int num_output_channels,
+ size_t num_input_channels,
+ size_t num_output_channels,
const float* window,
size_t shift_amount,
BlockerCallback* callback)
@@ -166,8 +166,8 @@ Blocker::Blocker(size_t chunk_size,
// TODO(claguna): Look at using ring buffers to eliminate some copies.
void Blocker::ProcessChunk(const float* const* input,
size_t chunk_size,
- int num_input_channels,
- int num_output_channels,
+ size_t num_input_channels,
+ size_t num_output_channels,
float* const* output) {
RTC_CHECK_EQ(chunk_size, chunk_size_);
RTC_CHECK_EQ(num_input_channels, num_input_channels_);
diff --git a/webrtc/common_audio/blocker.h b/webrtc/common_audio/blocker.h
index 025638ae8c..3a67c134d0 100644
--- a/webrtc/common_audio/blocker.h
+++ b/webrtc/common_audio/blocker.h
@@ -26,8 +26,8 @@ class BlockerCallback {
virtual void ProcessBlock(const float* const* input,
size_t num_frames,
- int num_input_channels,
- int num_output_channels,
+ size_t num_input_channels,
+ size_t num_output_channels,
float* const* output) = 0;
};
@@ -65,23 +65,23 @@ class Blocker {
public:
Blocker(size_t chunk_size,
size_t block_size,
- int num_input_channels,
- int num_output_channels,
+ size_t num_input_channels,
+ size_t num_output_channels,
const float* window,
size_t shift_amount,
BlockerCallback* callback);
void ProcessChunk(const float* const* input,
size_t chunk_size,
- int num_input_channels,
- int num_output_channels,
+ size_t num_input_channels,
+ size_t num_output_channels,
float* const* output);
private:
const size_t chunk_size_;
const size_t block_size_;
- const int num_input_channels_;
- const int num_output_channels_;
+ const size_t num_input_channels_;
+ const size_t num_output_channels_;
// The number of frames of delay to add at the beginning of the first chunk.
const size_t initial_delay_;
diff --git a/webrtc/common_audio/blocker_unittest.cc b/webrtc/common_audio/blocker_unittest.cc
index 397e269239..a5a7b56282 100644
--- a/webrtc/common_audio/blocker_unittest.cc
+++ b/webrtc/common_audio/blocker_unittest.cc
@@ -11,6 +11,7 @@
#include "webrtc/common_audio/blocker.h"
#include "testing/gtest/include/gtest/gtest.h"
+#include "webrtc/base/arraysize.h"
namespace {
@@ -19,10 +20,10 @@ class PlusThreeBlockerCallback : public webrtc::BlockerCallback {
public:
void ProcessBlock(const float* const* input,
size_t num_frames,
- int num_input_channels,
- int num_output_channels,
+ size_t num_input_channels,
+ size_t num_output_channels,
float* const* output) override {
- for (int i = 0; i < num_output_channels; ++i) {
+ for (size_t i = 0; i < num_output_channels; ++i) {
for (size_t j = 0; j < num_frames; ++j) {
output[i][j] = input[i][j] + 3;
}
@@ -35,10 +36,10 @@ class CopyBlockerCallback : public webrtc::BlockerCallback {
public:
void ProcessBlock(const float* const* input,
size_t num_frames,
- int num_input_channels,
- int num_output_channels,
+ size_t num_input_channels,
+ size_t num_output_channels,
float* const* output) override {
- for (int i = 0; i < num_output_channels; ++i) {
+ for (size_t i = 0; i < num_output_channels; ++i) {
for (size_t j = 0; j < num_frames; ++j) {
output[i][j] = input[i][j];
}
@@ -56,16 +57,16 @@ namespace webrtc {
class BlockerTest : public ::testing::Test {
protected:
void RunTest(Blocker* blocker,
- int chunk_size,
- int num_frames,
+ size_t chunk_size,
+ size_t num_frames,
const float* const* input,
float* const* input_chunk,
float* const* output,
float* const* output_chunk,
- int num_input_channels,
- int num_output_channels) {
- int start = 0;
- int end = chunk_size - 1;
+ size_t num_input_channels,
+ size_t num_output_channels) {
+ size_t start = 0;
+ size_t end = chunk_size - 1;
while (end < num_frames) {
CopyTo(input_chunk, 0, start, num_input_channels, chunk_size, input);
blocker->ProcessChunk(input_chunk,
@@ -75,28 +76,28 @@ class BlockerTest : public ::testing::Test {
output_chunk);
CopyTo(output, start, 0, num_output_channels, chunk_size, output_chunk);
- start = start + chunk_size;
- end = end + chunk_size;
+ start += chunk_size;
+ end += chunk_size;
}
}
void ValidateSignalEquality(const float* const* expected,
const float* const* actual,
- int num_channels,
- int num_frames) {
- for (int i = 0; i < num_channels; ++i) {
- for (int j = 0; j < num_frames; ++j) {
+ size_t num_channels,
+ size_t num_frames) {
+ for (size_t i = 0; i < num_channels; ++i) {
+ for (size_t j = 0; j < num_frames; ++j) {
EXPECT_FLOAT_EQ(expected[i][j], actual[i][j]);
}
}
}
void ValidateInitialDelay(const float* const* output,
- int num_channels,
- int num_frames,
- int initial_delay) {
- for (int i = 0; i < num_channels; ++i) {
- for (int j = 0; j < num_frames; ++j) {
+ size_t num_channels,
+ size_t num_frames,
+ size_t initial_delay) {
+ for (size_t i = 0; i < num_channels; ++i) {
+ for (size_t j = 0; j < num_frames; ++j) {
if (j < initial_delay) {
EXPECT_FLOAT_EQ(output[i][j], 0.f);
} else {
@@ -107,12 +108,12 @@ class BlockerTest : public ::testing::Test {
}
static void CopyTo(float* const* dst,
- int start_index_dst,
- int start_index_src,
- int num_channels,
- int num_frames,
+ size_t start_index_dst,
+ size_t start_index_src,
+ size_t num_channels,
+ size_t num_frames,
const float* const* src) {
- for (int i = 0; i < num_channels; ++i) {
+ for (size_t i = 0; i < num_channels; ++i) {
memcpy(&dst[i][start_index_dst],
&src[i][start_index_src],
num_frames * sizeof(float));
@@ -121,12 +122,12 @@ class BlockerTest : public ::testing::Test {
};
TEST_F(BlockerTest, TestBlockerMutuallyPrimeChunkandBlockSize) {
- const int kNumInputChannels = 3;
- const int kNumOutputChannels = 2;
- const int kNumFrames = 10;
- const int kBlockSize = 4;
- const int kChunkSize = 5;
- const int kShiftAmount = 2;
+ const size_t kNumInputChannels = 3;
+ const size_t kNumOutputChannels = 2;
+ const size_t kNumFrames = 10;
+ const size_t kBlockSize = 4;
+ const size_t kChunkSize = 5;
+ const size_t kShiftAmount = 2;
const float kInput[kNumInputChannels][kNumFrames] = {
{1, 1, 1, 1, 1, 1, 1, 1, 1, 1},
@@ -174,12 +175,12 @@ TEST_F(BlockerTest, TestBlockerMutuallyPrimeChunkandBlockSize) {
}
TEST_F(BlockerTest, TestBlockerMutuallyPrimeShiftAndBlockSize) {
- const int kNumInputChannels = 3;
- const int kNumOutputChannels = 2;
- const int kNumFrames = 12;
- const int kBlockSize = 4;
- const int kChunkSize = 6;
- const int kShiftAmount = 3;
+ const size_t kNumInputChannels = 3;
+ const size_t kNumOutputChannels = 2;
+ const size_t kNumFrames = 12;
+ const size_t kBlockSize = 4;
+ const size_t kChunkSize = 6;
+ const size_t kShiftAmount = 3;
const float kInput[kNumInputChannels][kNumFrames] = {
{1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1},
@@ -227,12 +228,12 @@ TEST_F(BlockerTest, TestBlockerMutuallyPrimeShiftAndBlockSize) {
}
TEST_F(BlockerTest, TestBlockerNoOverlap) {
- const int kNumInputChannels = 3;
- const int kNumOutputChannels = 2;
- const int kNumFrames = 12;
- const int kBlockSize = 4;
- const int kChunkSize = 4;
- const int kShiftAmount = 4;
+ const size_t kNumInputChannels = 3;
+ const size_t kNumOutputChannels = 2;
+ const size_t kNumFrames = 12;
+ const size_t kBlockSize = 4;
+ const size_t kChunkSize = 4;
+ const size_t kShiftAmount = 4;
const float kInput[kNumInputChannels][kNumFrames] = {
{1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1},
@@ -280,21 +281,21 @@ TEST_F(BlockerTest, TestBlockerNoOverlap) {
}
TEST_F(BlockerTest, InitialDelaysAreMinimum) {
- const int kNumInputChannels = 3;
- const int kNumOutputChannels = 2;
- const int kNumFrames = 1280;
- const int kChunkSize[] =
+ const size_t kNumInputChannels = 3;
+ const size_t kNumOutputChannels = 2;
+ const size_t kNumFrames = 1280;
+ const size_t kChunkSize[] =
{80, 80, 80, 80, 80, 80, 160, 160, 160, 160, 160, 160};
- const int kBlockSize[] =
+ const size_t kBlockSize[] =
{64, 64, 64, 128, 128, 128, 128, 128, 128, 256, 256, 256};
- const int kShiftAmount[] =
+ const size_t kShiftAmount[] =
{16, 32, 64, 32, 64, 128, 32, 64, 128, 64, 128, 256};
- const int kInitialDelay[] =
+ const size_t kInitialDelay[] =
{48, 48, 48, 112, 112, 112, 96, 96, 96, 224, 224, 224};
float input[kNumInputChannels][kNumFrames];
- for (int i = 0; i < kNumInputChannels; ++i) {
- for (int j = 0; j < kNumFrames; ++j) {
+ for (size_t i = 0; i < kNumInputChannels; ++i) {
+ for (size_t j = 0; j < kNumFrames; ++j) {
input[i][j] = i + 1;
}
}
@@ -305,9 +306,9 @@ TEST_F(BlockerTest, InitialDelaysAreMinimum) {
CopyBlockerCallback callback;
- for (size_t i = 0; i < (sizeof(kChunkSize) / sizeof(*kChunkSize)); ++i) {
+ for (size_t i = 0; i < arraysize(kChunkSize); ++i) {
rtc::scoped_ptr<float[]> window(new float[kBlockSize[i]]);
- for (int j = 0; j < kBlockSize[i]; ++j) {
+ for (size_t j = 0; j < kBlockSize[i]; ++j) {
window[j] = 1.f;
}
diff --git a/webrtc/common_audio/channel_buffer.cc b/webrtc/common_audio/channel_buffer.cc
index d3dc7c04f7..44520c6100 100644
--- a/webrtc/common_audio/channel_buffer.cc
+++ b/webrtc/common_audio/channel_buffer.cc
@@ -13,7 +13,7 @@
namespace webrtc {
IFChannelBuffer::IFChannelBuffer(size_t num_frames,
- int num_channels,
+ size_t num_channels,
size_t num_bands)
: ivalid_(true),
ibuf_(num_frames, num_channels, num_bands),
@@ -47,7 +47,7 @@ void IFChannelBuffer::RefreshF() const {
assert(ivalid_);
const int16_t* const* int_channels = ibuf_.channels();
float* const* float_channels = fbuf_.channels();
- for (int i = 0; i < ibuf_.num_channels(); ++i) {
+ for (size_t i = 0; i < ibuf_.num_channels(); ++i) {
for (size_t j = 0; j < ibuf_.num_frames(); ++j) {
float_channels[i][j] = int_channels[i][j];
}
@@ -61,7 +61,7 @@ void IFChannelBuffer::RefreshI() const {
assert(fvalid_);
int16_t* const* int_channels = ibuf_.channels();
const float* const* float_channels = fbuf_.channels();
- for (int i = 0; i < ibuf_.num_channels(); ++i) {
+ for (size_t i = 0; i < ibuf_.num_channels(); ++i) {
FloatS16ToS16(float_channels[i],
ibuf_.num_frames(),
int_channels[i]);
diff --git a/webrtc/common_audio/channel_buffer.h b/webrtc/common_audio/channel_buffer.h
index 6050090876..d9069163fa 100644
--- a/webrtc/common_audio/channel_buffer.h
+++ b/webrtc/common_audio/channel_buffer.h
@@ -40,7 +40,7 @@ template <typename T>
class ChannelBuffer {
public:
ChannelBuffer(size_t num_frames,
- int num_channels,
+ size_t num_channels,
size_t num_bands = 1)
: data_(new T[num_frames * num_channels]()),
channels_(new T*[num_channels * num_bands]),
@@ -49,7 +49,7 @@ class ChannelBuffer {
num_frames_per_band_(num_frames / num_bands),
num_channels_(num_channels),
num_bands_(num_bands) {
- for (int i = 0; i < num_channels_; ++i) {
+ for (size_t i = 0; i < num_channels_; ++i) {
for (size_t j = 0; j < num_bands_; ++j) {
channels_[j * num_channels_ + i] =
&data_[i * num_frames_ + j * num_frames_per_band_];
@@ -90,12 +90,12 @@ class ChannelBuffer {
// 0 <= channel < |num_channels_|
// 0 <= band < |num_bands_|
// 0 <= sample < |num_frames_per_band_|
- const T* const* bands(int channel) const {
+ const T* const* bands(size_t channel) const {
RTC_DCHECK_LT(channel, num_channels_);
- RTC_DCHECK_GE(channel, 0);
+ RTC_DCHECK_GE(channel, 0u);
return &bands_[channel * num_bands_];
}
- T* const* bands(int channel) {
+ T* const* bands(size_t channel) {
const ChannelBuffer<T>* t = this;
return const_cast<T* const*>(t->bands(channel));
}
@@ -104,7 +104,7 @@ class ChannelBuffer {
// Returns |slice| for convenience.
const T* const* Slice(T** slice, size_t start_frame) const {
RTC_DCHECK_LT(start_frame, num_frames_);
- for (int i = 0; i < num_channels_; ++i)
+ for (size_t i = 0; i < num_channels_; ++i)
slice[i] = &channels_[i][start_frame];
return slice;
}
@@ -115,7 +115,7 @@ class ChannelBuffer {
size_t num_frames() const { return num_frames_; }
size_t num_frames_per_band() const { return num_frames_per_band_; }
- int num_channels() const { return num_channels_; }
+ size_t num_channels() const { return num_channels_; }
size_t num_bands() const { return num_bands_; }
size_t size() const {return num_frames_ * num_channels_; }
@@ -130,7 +130,7 @@ class ChannelBuffer {
rtc::scoped_ptr<T* []> bands_;
const size_t num_frames_;
const size_t num_frames_per_band_;
- const int num_channels_;
+ const size_t num_channels_;
const size_t num_bands_;
};
@@ -142,7 +142,7 @@ class ChannelBuffer {
// fbuf() until the next call to any of the other functions.
class IFChannelBuffer {
public:
- IFChannelBuffer(size_t num_frames, int num_channels, size_t num_bands = 1);
+ IFChannelBuffer(size_t num_frames, size_t num_channels, size_t num_bands = 1);
ChannelBuffer<int16_t>* ibuf();
ChannelBuffer<float>* fbuf();
@@ -151,7 +151,7 @@ class IFChannelBuffer {
size_t num_frames() const { return ibuf_.num_frames(); }
size_t num_frames_per_band() const { return ibuf_.num_frames_per_band(); }
- int num_channels() const { return ibuf_.num_channels(); }
+ size_t num_channels() const { return ibuf_.num_channels(); }
size_t num_bands() const { return ibuf_.num_bands(); }
private:
diff --git a/webrtc/common_audio/common_audio.gyp b/webrtc/common_audio/common_audio.gyp
index 884a8afcf8..f0a6fc9473 100644
--- a/webrtc/common_audio/common_audio.gyp
+++ b/webrtc/common_audio/common_audio.gyp
@@ -101,6 +101,7 @@
'signal_processing/vector_scaling_operations.c',
'sparse_fir_filter.cc',
'sparse_fir_filter.h',
+ 'swap_queue.h',
'vad/include/vad.h',
'vad/include/webrtc_vad.h',
'vad/vad.cc',
@@ -227,9 +228,10 @@
},
], # targets
}],
- ['include_tests==1', {
+ ['include_tests==1 and OS!="ios"', {
'targets' : [
{
+ # Does not compile on iOS: webrtc:4755.
'target_name': 'common_audio_unittests',
'type': '<(gtest_target_type)',
'dependencies': [
@@ -256,6 +258,7 @@
'signal_processing/real_fft_unittest.cc',
'signal_processing/signal_processing_unittest.cc',
'sparse_fir_filter_unittest.cc',
+ 'swap_queue_unittest.cc',
'vad/vad_core_unittest.cc',
'vad/vad_filterbank_unittest.cc',
'vad/vad_gmm_unittest.cc',
diff --git a/webrtc/common_audio/include/audio_util.h b/webrtc/common_audio/include/audio_util.h
index 2c0028ce90..55dfc06a31 100644
--- a/webrtc/common_audio/include/audio_util.h
+++ b/webrtc/common_audio/include/audio_util.h
@@ -87,11 +87,11 @@ void CopyAudioIfNeeded(const T* const* src,
template <typename T>
void Deinterleave(const T* interleaved,
size_t samples_per_channel,
- int num_channels,
+ size_t num_channels,
T* const* deinterleaved) {
- for (int i = 0; i < num_channels; ++i) {
+ for (size_t i = 0; i < num_channels; ++i) {
T* channel = deinterleaved[i];
- int interleaved_idx = i;
+ size_t interleaved_idx = i;
for (size_t j = 0; j < samples_per_channel; ++j) {
channel[j] = interleaved[interleaved_idx];
interleaved_idx += num_channels;
@@ -105,11 +105,11 @@ void Deinterleave(const T* interleaved,
template <typename T>
void Interleave(const T* const* deinterleaved,
size_t samples_per_channel,
- int num_channels,
+ size_t num_channels,
T* interleaved) {
- for (int i = 0; i < num_channels; ++i) {
+ for (size_t i = 0; i < num_channels; ++i) {
const T* channel = deinterleaved[i];
- int interleaved_idx = i;
+ size_t interleaved_idx = i;
for (size_t j = 0; j < samples_per_channel; ++j) {
interleaved[interleaved_idx] = channel[j];
interleaved_idx += num_channels;
diff --git a/webrtc/common_audio/lapped_transform.cc b/webrtc/common_audio/lapped_transform.cc
index c01f1d9d8c..5ab1db1b25 100644
--- a/webrtc/common_audio/lapped_transform.cc
+++ b/webrtc/common_audio/lapped_transform.cc
@@ -21,14 +21,14 @@ namespace webrtc {
void LappedTransform::BlockThunk::ProcessBlock(const float* const* input,
size_t num_frames,
- int num_input_channels,
- int num_output_channels,
+ size_t num_input_channels,
+ size_t num_output_channels,
float* const* output) {
RTC_CHECK_EQ(num_input_channels, parent_->num_in_channels_);
RTC_CHECK_EQ(num_output_channels, parent_->num_out_channels_);
RTC_CHECK_EQ(parent_->block_length_, num_frames);
- for (int i = 0; i < num_input_channels; ++i) {
+ for (size_t i = 0; i < num_input_channels; ++i) {
memcpy(parent_->real_buf_.Row(i), input[i],
num_frames * sizeof(*input[0]));
parent_->fft_->Forward(parent_->real_buf_.Row(i),
@@ -44,7 +44,7 @@ void LappedTransform::BlockThunk::ProcessBlock(const float* const* input,
num_output_channels,
parent_->cplx_post_.Array());
- for (int i = 0; i < num_output_channels; ++i) {
+ for (size_t i = 0; i < num_output_channels; ++i) {
parent_->fft_->Inverse(parent_->cplx_post_.Row(i),
parent_->real_buf_.Row(i));
memcpy(output[i], parent_->real_buf_.Row(i),
@@ -52,8 +52,8 @@ void LappedTransform::BlockThunk::ProcessBlock(const float* const* input,
}
}
-LappedTransform::LappedTransform(int num_in_channels,
- int num_out_channels,
+LappedTransform::LappedTransform(size_t num_in_channels,
+ size_t num_out_channels,
size_t chunk_length,
const float* window,
size_t block_length,
diff --git a/webrtc/common_audio/lapped_transform.h b/webrtc/common_audio/lapped_transform.h
index 21e10e3911..1373ca10e1 100644
--- a/webrtc/common_audio/lapped_transform.h
+++ b/webrtc/common_audio/lapped_transform.h
@@ -35,8 +35,8 @@ class LappedTransform {
virtual ~Callback() {}
virtual void ProcessAudioBlock(const std::complex<float>* const* in_block,
- int num_in_channels, size_t frames,
- int num_out_channels,
+ size_t num_in_channels, size_t frames,
+ size_t num_out_channels,
std::complex<float>* const* out_block) = 0;
};
@@ -46,8 +46,8 @@ class LappedTransform {
// |block_length| defines the length of a block, in samples.
// |shift_amount| is in samples. |callback| is the caller-owned audio
// processing function called for each block of the input chunk.
- LappedTransform(int num_in_channels,
- int num_out_channels,
+ LappedTransform(size_t num_in_channels,
+ size_t num_out_channels,
size_t chunk_length,
const float* window,
size_t block_length,
@@ -75,7 +75,7 @@ class LappedTransform {
// in_chunk.
//
// Returns the same num_in_channels passed to the LappedTransform constructor.
- int num_in_channels() const { return num_in_channels_; }
+ size_t num_in_channels() const { return num_in_channels_; }
// Get the number of output channels.
//
@@ -84,7 +84,7 @@ class LappedTransform {
//
// Returns the same num_out_channels passed to the LappedTransform
// constructor.
- int num_out_channels() const { return num_out_channels_; }
+ size_t num_out_channels() const { return num_out_channels_; }
private:
// Internal middleware callback, given to the blocker. Transforms each block
@@ -93,16 +93,18 @@ class LappedTransform {
public:
explicit BlockThunk(LappedTransform* parent) : parent_(parent) {}
- virtual void ProcessBlock(const float* const* input, size_t num_frames,
- int num_input_channels, int num_output_channels,
+ virtual void ProcessBlock(const float* const* input,
+ size_t num_frames,
+ size_t num_input_channels,
+ size_t num_output_channels,
float* const* output);
private:
LappedTransform* const parent_;
} blocker_callback_;
- const int num_in_channels_;
- const int num_out_channels_;
+ const size_t num_in_channels_;
+ const size_t num_out_channels_;
const size_t block_length_;
const size_t chunk_length_;
diff --git a/webrtc/common_audio/lapped_transform_unittest.cc b/webrtc/common_audio/lapped_transform_unittest.cc
index f688cc240a..a78488e326 100644
--- a/webrtc/common_audio/lapped_transform_unittest.cc
+++ b/webrtc/common_audio/lapped_transform_unittest.cc
@@ -25,23 +25,23 @@ class NoopCallback : public webrtc::LappedTransform::Callback {
NoopCallback() : block_num_(0) {}
virtual void ProcessAudioBlock(const complex<float>* const* in_block,
- int in_channels,
+ size_t in_channels,
size_t frames,
- int out_channels,
+ size_t out_channels,
complex<float>* const* out_block) {
RTC_CHECK_EQ(in_channels, out_channels);
- for (int i = 0; i < out_channels; ++i) {
+ for (size_t i = 0; i < out_channels; ++i) {
memcpy(out_block[i], in_block[i], sizeof(**in_block) * frames);
}
++block_num_;
}
- int block_num() {
+ size_t block_num() {
return block_num_;
}
private:
- int block_num_;
+ size_t block_num_;
};
class FftCheckerCallback : public webrtc::LappedTransform::Callback {
@@ -49,9 +49,9 @@ class FftCheckerCallback : public webrtc::LappedTransform::Callback {
FftCheckerCallback() : block_num_(0) {}
virtual void ProcessAudioBlock(const complex<float>* const* in_block,
- int in_channels,
+ size_t in_channels,
size_t frames,
- int out_channels,
+ size_t out_channels,
complex<float>* const* out_block) {
RTC_CHECK_EQ(in_channels, out_channels);
@@ -69,12 +69,12 @@ class FftCheckerCallback : public webrtc::LappedTransform::Callback {
}
}
- int block_num() {
+ size_t block_num() {
return block_num_;
}
private:
- int block_num_;
+ size_t block_num_;
};
void SetFloatArray(float value, int rows, int cols, float* const* array) {
@@ -90,10 +90,10 @@ void SetFloatArray(float value, int rows, int cols, float* const* array) {
namespace webrtc {
TEST(LappedTransformTest, Windowless) {
- const int kChannels = 3;
- const int kChunkLength = 512;
- const int kBlockLength = 64;
- const int kShiftAmount = 64;
+ const size_t kChannels = 3;
+ const size_t kChunkLength = 512;
+ const size_t kBlockLength = 64;
+ const size_t kShiftAmount = 64;
NoopCallback noop;
// Rectangular window.
@@ -118,8 +118,8 @@ TEST(LappedTransformTest, Windowless) {
trans.ProcessChunk(in_chunk, out_chunk);
- for (int i = 0; i < kChannels; ++i) {
- for (int j = 0; j < kChunkLength; ++j) {
+ for (size_t i = 0; i < kChannels; ++i) {
+ for (size_t j = 0; j < kChunkLength; ++j) {
ASSERT_NEAR(out_chunk[i][j], 2.0f, 1e-5f);
}
}
@@ -128,9 +128,9 @@ TEST(LappedTransformTest, Windowless) {
}
TEST(LappedTransformTest, IdentityProcessor) {
- const int kChunkLength = 512;
- const int kBlockLength = 64;
- const int kShiftAmount = 32;
+ const size_t kChunkLength = 512;
+ const size_t kBlockLength = 64;
+ const size_t kShiftAmount = 32;
NoopCallback noop;
// Identity window for |overlap = block_size / 2|.
@@ -149,7 +149,7 @@ TEST(LappedTransformTest, IdentityProcessor) {
trans.ProcessChunk(&in_chunk, &out_chunk);
- for (int i = 0; i < kChunkLength; ++i) {
+ for (size_t i = 0; i < kChunkLength; ++i) {
ASSERT_NEAR(out_chunk[i],
(i < kBlockLength - kShiftAmount) ? 0.0f : 2.0f,
1e-5f);
@@ -159,8 +159,8 @@ TEST(LappedTransformTest, IdentityProcessor) {
}
TEST(LappedTransformTest, Callbacks) {
- const int kChunkLength = 512;
- const int kBlockLength = 64;
+ const size_t kChunkLength = 512;
+ const size_t kBlockLength = 64;
FftCheckerCallback call;
// Rectangular window.
@@ -183,7 +183,7 @@ TEST(LappedTransformTest, Callbacks) {
}
TEST(LappedTransformTest, chunk_length) {
- const int kBlockLength = 64;
+ const size_t kBlockLength = 64;
FftCheckerCallback call;
const float window[kBlockLength] = {};
diff --git a/webrtc/common_audio/real_fourier.cc b/webrtc/common_audio/real_fourier.cc
index fef3c60c4c..55ec49cba2 100644
--- a/webrtc/common_audio/real_fourier.cc
+++ b/webrtc/common_audio/real_fourier.cc
@@ -19,7 +19,7 @@ namespace webrtc {
using std::complex;
-const int RealFourier::kFftBufferAlignment = 32;
+const size_t RealFourier::kFftBufferAlignment = 32;
rtc::scoped_ptr<RealFourier> RealFourier::Create(int fft_order) {
#if defined(RTC_USE_OPENMAX_DL)
diff --git a/webrtc/common_audio/real_fourier.h b/webrtc/common_audio/real_fourier.h
index ce3bbff679..0be56a58b0 100644
--- a/webrtc/common_audio/real_fourier.h
+++ b/webrtc/common_audio/real_fourier.h
@@ -30,7 +30,7 @@ class RealFourier {
fft_cplx_scoper;
// The alignment required for all input and output buffers, in bytes.
- static const int kFftBufferAlignment;
+ static const size_t kFftBufferAlignment;
// Construct a wrapper instance for the given input order, which must be
// between 1 and kMaxFftOrder, inclusively.
diff --git a/webrtc/common_audio/real_fourier_unittest.cc b/webrtc/common_audio/real_fourier_unittest.cc
index 5c8542138b..eb5880ee8a 100644
--- a/webrtc/common_audio/real_fourier_unittest.cc
+++ b/webrtc/common_audio/real_fourier_unittest.cc
@@ -26,15 +26,15 @@ TEST(RealFourierStaticsTest, AllocatorAlignment) {
RealFourier::fft_real_scoper real;
real = RealFourier::AllocRealBuffer(3);
ASSERT_TRUE(real.get() != nullptr);
- int64_t ptr_value = reinterpret_cast<int64_t>(real.get());
- EXPECT_EQ(0, ptr_value % RealFourier::kFftBufferAlignment);
+ uintptr_t ptr_value = reinterpret_cast<uintptr_t>(real.get());
+ EXPECT_EQ(0u, ptr_value % RealFourier::kFftBufferAlignment);
}
{
RealFourier::fft_cplx_scoper cplx;
cplx = RealFourier::AllocCplxBuffer(3);
ASSERT_TRUE(cplx.get() != nullptr);
- int64_t ptr_value = reinterpret_cast<int64_t>(cplx.get());
- EXPECT_EQ(0, ptr_value % RealFourier::kFftBufferAlignment);
+ uintptr_t ptr_value = reinterpret_cast<uintptr_t>(cplx.get());
+ EXPECT_EQ(0u, ptr_value % RealFourier::kFftBufferAlignment);
}
}
diff --git a/webrtc/common_audio/resampler/include/push_resampler.h b/webrtc/common_audio/resampler/include/push_resampler.h
index b5c0003615..eeda790497 100644
--- a/webrtc/common_audio/resampler/include/push_resampler.h
+++ b/webrtc/common_audio/resampler/include/push_resampler.h
@@ -29,7 +29,7 @@ class PushResampler {
// Must be called whenever the parameters change. Free to be called at any
// time as it is a no-op if parameters have not changed since the last call.
int InitializeIfNeeded(int src_sample_rate_hz, int dst_sample_rate_hz,
- int num_channels);
+ size_t num_channels);
// Returns the total number of samples provided in destination (e.g. 32 kHz,
// 2 channel audio gives 640 samples).
@@ -40,7 +40,7 @@ class PushResampler {
rtc::scoped_ptr<PushSincResampler> sinc_resampler_right_;
int src_sample_rate_hz_;
int dst_sample_rate_hz_;
- int num_channels_;
+ size_t num_channels_;
rtc::scoped_ptr<T[]> src_left_;
rtc::scoped_ptr<T[]> src_right_;
rtc::scoped_ptr<T[]> dst_left_;
diff --git a/webrtc/common_audio/resampler/include/resampler.h b/webrtc/common_audio/resampler/include/resampler.h
index 0d4c1afe4e..e26ac904c0 100644
--- a/webrtc/common_audio/resampler/include/resampler.h
+++ b/webrtc/common_audio/resampler/include/resampler.h
@@ -28,14 +28,14 @@ class Resampler
public:
Resampler();
- Resampler(int inFreq, int outFreq, int num_channels);
+ Resampler(int inFreq, int outFreq, size_t num_channels);
~Resampler();
// Reset all states
- int Reset(int inFreq, int outFreq, int num_channels);
+ int Reset(int inFreq, int outFreq, size_t num_channels);
// Reset all states if any parameter has changed
- int ResetIfNeeded(int inFreq, int outFreq, int num_channels);
+ int ResetIfNeeded(int inFreq, int outFreq, size_t num_channels);
// Resample samplesIn to samplesOut.
int Push(const int16_t* samplesIn, size_t lengthIn, int16_t* samplesOut,
@@ -83,7 +83,7 @@ private:
int my_in_frequency_khz_;
int my_out_frequency_khz_;
ResamplerMode my_mode_;
- int num_channels_;
+ size_t num_channels_;
// Extra instance for stereo
Resampler* slave_left_;
diff --git a/webrtc/common_audio/resampler/push_resampler.cc b/webrtc/common_audio/resampler/push_resampler.cc
index 566acdeaa3..f654e9a397 100644
--- a/webrtc/common_audio/resampler/push_resampler.cc
+++ b/webrtc/common_audio/resampler/push_resampler.cc
@@ -32,7 +32,7 @@ PushResampler<T>::~PushResampler() {
template <typename T>
int PushResampler<T>::InitializeIfNeeded(int src_sample_rate_hz,
int dst_sample_rate_hz,
- int num_channels) {
+ size_t num_channels) {
if (src_sample_rate_hz == src_sample_rate_hz_ &&
dst_sample_rate_hz == dst_sample_rate_hz_ &&
num_channels == num_channels_)
@@ -68,10 +68,8 @@ int PushResampler<T>::InitializeIfNeeded(int src_sample_rate_hz,
template <typename T>
int PushResampler<T>::Resample(const T* src, size_t src_length, T* dst,
size_t dst_capacity) {
- const size_t src_size_10ms =
- static_cast<size_t>(src_sample_rate_hz_ * num_channels_ / 100);
- const size_t dst_size_10ms =
- static_cast<size_t>(dst_sample_rate_hz_ * num_channels_ / 100);
+ const size_t src_size_10ms = src_sample_rate_hz_ * num_channels_ / 100;
+ const size_t dst_size_10ms = dst_sample_rate_hz_ * num_channels_ / 100;
if (src_length != src_size_10ms || dst_capacity < dst_size_10ms)
return -1;
diff --git a/webrtc/common_audio/resampler/resampler.cc b/webrtc/common_audio/resampler/resampler.cc
index c9e7a1fb96..7c690fc47a 100644
--- a/webrtc/common_audio/resampler/resampler.cc
+++ b/webrtc/common_audio/resampler/resampler.cc
@@ -39,7 +39,7 @@ Resampler::Resampler()
slave_right_(nullptr) {
}
-Resampler::Resampler(int inFreq, int outFreq, int num_channels)
+Resampler::Resampler(int inFreq, int outFreq, size_t num_channels)
: Resampler() {
Reset(inFreq, outFreq, num_channels);
}
@@ -76,7 +76,7 @@ Resampler::~Resampler()
}
}
-int Resampler::ResetIfNeeded(int inFreq, int outFreq, int num_channels)
+int Resampler::ResetIfNeeded(int inFreq, int outFreq, size_t num_channels)
{
int tmpInFreq_kHz = inFreq / 1000;
int tmpOutFreq_kHz = outFreq / 1000;
@@ -91,7 +91,7 @@ int Resampler::ResetIfNeeded(int inFreq, int outFreq, int num_channels)
}
}
-int Resampler::Reset(int inFreq, int outFreq, int num_channels)
+int Resampler::Reset(int inFreq, int outFreq, size_t num_channels)
{
if (num_channels != 1 && num_channels != 2) {
return -1;
diff --git a/webrtc/common_audio/signal_processing/real_fft_unittest.cc b/webrtc/common_audio/signal_processing/real_fft_unittest.cc
index 9bd35cd68b..fa98836b9a 100644
--- a/webrtc/common_audio/signal_processing/real_fft_unittest.cc
+++ b/webrtc/common_audio/signal_processing/real_fft_unittest.cc
@@ -10,7 +10,6 @@
#include "webrtc/common_audio/signal_processing/include/real_fft.h"
#include "webrtc/common_audio/signal_processing/include/signal_processing_library.h"
-#include "webrtc/test/testsupport/gtest_disable.h"
#include "webrtc/typedefs.h"
#include "testing/gtest/include/gtest/gtest.h"
diff --git a/webrtc/common_audio/swap_queue.h b/webrtc/common_audio/swap_queue.h
new file mode 100644
index 0000000000..d8bb5c024e
--- /dev/null
+++ b/webrtc/common_audio/swap_queue.h
@@ -0,0 +1,210 @@
+/*
+ * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_COMMON_AUDIO_SWAP_QUEUE_H_
+#define WEBRTC_COMMON_AUDIO_SWAP_QUEUE_H_
+
+#include <algorithm>
+#include <utility>
+#include <vector>
+
+#include "webrtc/base/checks.h"
+#include "webrtc/base/criticalsection.h"
+
+namespace webrtc {
+
+namespace internal {
+
+// (Internal; please don't use outside this file.)
+template <typename T>
+bool NoopSwapQueueItemVerifierFunction(const T&) {
+ return true;
+}
+
+} // namespace internal
+
+// Functor to use when supplying a verifier function for the queue.
+template <typename T,
+ bool (*QueueItemVerifierFunction)(const T&) =
+ internal::NoopSwapQueueItemVerifierFunction>
+class SwapQueueItemVerifier {
+ public:
+ bool operator()(const T& t) const { return QueueItemVerifierFunction(t); }
+};
+
+// This class is a fixed-size queue. A producer calls Insert() to insert
+// an element of type T at the back of the queue, and a consumer calls
+// Remove() to remove an element from the front of the queue. It's safe
+// for the producer(s) and the consumer(s) to access the queue
+// concurrently, from different threads.
+//
+// To avoid the construction, copying, and destruction of Ts that a naive
+// queue implementation would require, for each "full" T passed from
+// producer to consumer, SwapQueue<T> passes an "empty" T in the other
+// direction (an "empty" T is one that contains nothing of value for the
+// consumer). This bidirectional movement is implemented with swap().
+//
+// // Create queue:
+// Bottle proto(568); // Prepare an empty Bottle. Heap allocates space for
+// // 568 ml.
+// SwapQueue<Bottle> q(N, proto); // Init queue with N copies of proto.
+// // Each copy allocates on the heap.
+// // Producer pseudo-code:
+// Bottle b(568); // Prepare an empty Bottle. Heap allocates space for 568 ml.
+// loop {
+// b.Fill(amount); // Where amount <= 568 ml.
+// q.Insert(&b); // Swap our full Bottle for an empty one from q.
+// }
+//
+// // Consumer pseudo-code:
+// Bottle b(568); // Prepare an empty Bottle. Heap allocates space for 568 ml.
+// loop {
+// q.Remove(&b); // Swap our empty Bottle for the next-in-line full Bottle.
+// Drink(&b);
+// }
+//
+// For a well-behaved Bottle class, there are no allocations in the
+// producer, since it just fills an empty Bottle that's already large
+// enough; no deallocations in the consumer, since it returns each empty
+// Bottle to the queue after having drunk it; and no copies along the
+// way, since the queue uses swap() everywhere to move full Bottles in
+// one direction and empty ones in the other.
+template <typename T, typename QueueItemVerifier = SwapQueueItemVerifier<T>>
+class SwapQueue {
+ public:
+ // Creates a queue of size size and fills it with default constructed Ts.
+ explicit SwapQueue(size_t size) : queue_(size) {
+ RTC_DCHECK(VerifyQueueSlots());
+ }
+
+ // Same as above and accepts an item verification functor.
+ SwapQueue(size_t size, const QueueItemVerifier& queue_item_verifier)
+ : queue_item_verifier_(queue_item_verifier), queue_(size) {
+ RTC_DCHECK(VerifyQueueSlots());
+ }
+
+ // Creates a queue of size size and fills it with copies of prototype.
+ SwapQueue(size_t size, const T& prototype) : queue_(size, prototype) {
+ RTC_DCHECK(VerifyQueueSlots());
+ }
+
+ // Same as above and accepts an item verification functor.
+ SwapQueue(size_t size,
+ const T& prototype,
+ const QueueItemVerifier& queue_item_verifier)
+ : queue_item_verifier_(queue_item_verifier), queue_(size, prototype) {
+ RTC_DCHECK(VerifyQueueSlots());
+ }
+
+ // Resets the queue to have zero content wile maintaining the queue size.
+ void Clear() {
+ rtc::CritScope cs(&crit_queue_);
+ next_write_index_ = 0;
+ next_read_index_ = 0;
+ num_elements_ = 0;
+ }
+
+ // Inserts a "full" T at the back of the queue by swapping *input with an
+ // "empty" T from the queue.
+ // Returns true if the item was inserted or false if not (the queue was full).
+ // When specified, the T given in *input must pass the ItemVerifier() test.
+ // The contents of *input after the call are then also guaranteed to pass the
+ // ItemVerifier() test.
+ bool Insert(T* input) WARN_UNUSED_RESULT {
+ RTC_DCHECK(input);
+
+ rtc::CritScope cs(&crit_queue_);
+
+ RTC_DCHECK(queue_item_verifier_(*input));
+
+ if (num_elements_ == queue_.size()) {
+ return false;
+ }
+
+ using std::swap;
+ swap(*input, queue_[next_write_index_]);
+
+ ++next_write_index_;
+ if (next_write_index_ == queue_.size()) {
+ next_write_index_ = 0;
+ }
+
+ ++num_elements_;
+
+ RTC_DCHECK_LT(next_write_index_, queue_.size());
+ RTC_DCHECK_LE(num_elements_, queue_.size());
+
+ return true;
+ }
+
+ // Removes the frontmost "full" T from the queue by swapping it with
+ // the "empty" T in *output.
+ // Returns true if an item could be removed or false if not (the queue was
+ // empty). When specified, The T given in *output must pass the ItemVerifier()
+ // test and the contents of *output after the call are then also guaranteed to
+ // pass the ItemVerifier() test.
+ bool Remove(T* output) WARN_UNUSED_RESULT {
+ RTC_DCHECK(output);
+
+ rtc::CritScope cs(&crit_queue_);
+
+ RTC_DCHECK(queue_item_verifier_(*output));
+
+ if (num_elements_ == 0) {
+ return false;
+ }
+
+ using std::swap;
+ swap(*output, queue_[next_read_index_]);
+
+ ++next_read_index_;
+ if (next_read_index_ == queue_.size()) {
+ next_read_index_ = 0;
+ }
+
+ --num_elements_;
+
+ RTC_DCHECK_LT(next_read_index_, queue_.size());
+ RTC_DCHECK_LE(num_elements_, queue_.size());
+
+ return true;
+ }
+
+ private:
+ // Verify that the queue slots complies with the ItemVerifier test.
+ bool VerifyQueueSlots() {
+ rtc::CritScope cs(&crit_queue_);
+ for (const auto& v : queue_) {
+ RTC_DCHECK(queue_item_verifier_(v));
+ }
+ return true;
+ }
+
+ rtc::CriticalSection crit_queue_;
+
+ // TODO(peah): Change this to use std::function() once we can use C++11 std
+ // lib.
+ QueueItemVerifier queue_item_verifier_ GUARDED_BY(crit_queue_);
+
+ // (next_read_index_ + num_elements_) % queue_.size() =
+ // next_write_index_
+ size_t next_write_index_ GUARDED_BY(crit_queue_) = 0;
+ size_t next_read_index_ GUARDED_BY(crit_queue_) = 0;
+ size_t num_elements_ GUARDED_BY(crit_queue_) = 0;
+
+ // queue_.size() is constant.
+ std::vector<T> queue_ GUARDED_BY(crit_queue_);
+
+ RTC_DISALLOW_COPY_AND_ASSIGN(SwapQueue);
+};
+
+} // namespace webrtc
+
+#endif // WEBRTC_COMMON_AUDIO_SWAP_QUEUE_H_
diff --git a/webrtc/common_audio/swap_queue_unittest.cc b/webrtc/common_audio/swap_queue_unittest.cc
new file mode 100644
index 0000000000..104e494bc6
--- /dev/null
+++ b/webrtc/common_audio/swap_queue_unittest.cc
@@ -0,0 +1,225 @@
+/*
+ * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/common_audio/swap_queue.h"
+
+#include <vector>
+
+#include "testing/gtest/include/gtest/gtest.h"
+
+namespace webrtc {
+
+namespace {
+
+// Test parameter for the basic sample based SwapQueue Tests.
+const size_t kChunkSize = 3;
+
+// Queue item verification function for the vector test.
+bool LengthVerifierFunction(const std::vector<int>& v) {
+ return v.size() == kChunkSize;
+}
+
+// Queue item verifier for the vector test.
+class LengthVerifierFunctor {
+ public:
+ explicit LengthVerifierFunctor(size_t length) : length_(length) {}
+
+ bool operator()(const std::vector<int>& v) const {
+ return v.size() == length_;
+ }
+
+ private:
+ size_t length_;
+};
+
+} // anonymous namespace
+
+TEST(SwapQueueTest, BasicOperation) {
+ std::vector<int> i(kChunkSize, 0);
+ SwapQueue<std::vector<int>> queue(2, i);
+
+ EXPECT_TRUE(queue.Insert(&i));
+ EXPECT_EQ(i.size(), kChunkSize);
+ EXPECT_TRUE(queue.Insert(&i));
+ EXPECT_EQ(i.size(), kChunkSize);
+ EXPECT_TRUE(queue.Remove(&i));
+ EXPECT_EQ(i.size(), kChunkSize);
+ EXPECT_TRUE(queue.Remove(&i));
+ EXPECT_EQ(i.size(), kChunkSize);
+}
+
+TEST(SwapQueueTest, FullQueue) {
+ SwapQueue<int> queue(2);
+
+ // Fill the queue.
+ int i = 0;
+ EXPECT_TRUE(queue.Insert(&i));
+ i = 1;
+ EXPECT_TRUE(queue.Insert(&i));
+
+ // Ensure that the value is not swapped when doing an Insert
+ // on a full queue.
+ i = 2;
+ EXPECT_FALSE(queue.Insert(&i));
+ EXPECT_EQ(i, 2);
+
+ // Ensure that the Insert didn't overwrite anything in the queue.
+ EXPECT_TRUE(queue.Remove(&i));
+ EXPECT_EQ(i, 0);
+ EXPECT_TRUE(queue.Remove(&i));
+ EXPECT_EQ(i, 1);
+}
+
+TEST(SwapQueueTest, EmptyQueue) {
+ SwapQueue<int> queue(2);
+ int i = 0;
+ EXPECT_FALSE(queue.Remove(&i));
+ EXPECT_TRUE(queue.Insert(&i));
+ EXPECT_TRUE(queue.Remove(&i));
+ EXPECT_FALSE(queue.Remove(&i));
+}
+
+TEST(SwapQueueTest, Clear) {
+ SwapQueue<int> queue(2);
+ int i = 0;
+
+ // Fill the queue.
+ EXPECT_TRUE(queue.Insert(&i));
+ EXPECT_TRUE(queue.Insert(&i));
+
+ // Ensure full queue.
+ EXPECT_FALSE(queue.Insert(&i));
+
+ // Empty the queue.
+ queue.Clear();
+
+ // Ensure that the queue is empty
+ EXPECT_FALSE(queue.Remove(&i));
+
+ // Ensure that the queue is no longer full.
+ EXPECT_TRUE(queue.Insert(&i));
+}
+
+TEST(SwapQueueTest, SuccessfulItemVerifyFunction) {
+ std::vector<int> template_element(kChunkSize);
+ SwapQueue<std::vector<int>,
+ SwapQueueItemVerifier<std::vector<int>, LengthVerifierFunction>>
+ queue(2, template_element);
+ std::vector<int> valid_chunk(kChunkSize, 0);
+
+ EXPECT_TRUE(queue.Insert(&valid_chunk));
+ EXPECT_EQ(valid_chunk.size(), kChunkSize);
+ EXPECT_TRUE(queue.Remove(&valid_chunk));
+ EXPECT_EQ(valid_chunk.size(), kChunkSize);
+}
+
+TEST(SwapQueueTest, SuccessfulItemVerifyFunctor) {
+ std::vector<int> template_element(kChunkSize);
+ LengthVerifierFunctor verifier(kChunkSize);
+ SwapQueue<std::vector<int>, LengthVerifierFunctor> queue(2, template_element,
+ verifier);
+ std::vector<int> valid_chunk(kChunkSize, 0);
+
+ EXPECT_TRUE(queue.Insert(&valid_chunk));
+ EXPECT_EQ(valid_chunk.size(), kChunkSize);
+ EXPECT_TRUE(queue.Remove(&valid_chunk));
+ EXPECT_EQ(valid_chunk.size(), kChunkSize);
+}
+
+#if RTC_DCHECK_IS_ON && GTEST_HAS_DEATH_TEST && !defined(WEBRTC_ANDROID)
+TEST(SwapQueueTest, UnsuccessfulItemVerifyFunctor) {
+ // Queue item verifier for the test.
+ auto minus_2_verifier = [](const int& i) { return i > -2; };
+ SwapQueue<int, decltype(minus_2_verifier)> queue(2, minus_2_verifier);
+
+ int valid_value = 1;
+ int invalid_value = -4;
+ EXPECT_TRUE(queue.Insert(&valid_value));
+ EXPECT_TRUE(queue.Remove(&valid_value));
+ bool result;
+ EXPECT_DEATH(result = queue.Insert(&invalid_value), "");
+}
+
+TEST(SwapQueueTest, UnSuccessfulItemVerifyInsert) {
+ std::vector<int> template_element(kChunkSize);
+ SwapQueue<std::vector<int>,
+ SwapQueueItemVerifier<std::vector<int>, &LengthVerifierFunction>>
+ queue(2, template_element);
+ std::vector<int> invalid_chunk(kChunkSize - 1, 0);
+ bool result;
+ EXPECT_DEATH(result = queue.Insert(&invalid_chunk), "");
+}
+
+TEST(SwapQueueTest, UnSuccessfulItemVerifyRemove) {
+ std::vector<int> template_element(kChunkSize);
+ SwapQueue<std::vector<int>,
+ SwapQueueItemVerifier<std::vector<int>, &LengthVerifierFunction>>
+ queue(2, template_element);
+ std::vector<int> invalid_chunk(kChunkSize - 1, 0);
+ std::vector<int> valid_chunk(kChunkSize, 0);
+ EXPECT_TRUE(queue.Insert(&valid_chunk));
+ EXPECT_EQ(valid_chunk.size(), kChunkSize);
+ bool result;
+ EXPECT_DEATH(result = queue.Remove(&invalid_chunk), "");
+}
+#endif
+
+TEST(SwapQueueTest, VectorContentTest) {
+ const size_t kQueueSize = 10;
+ const size_t kFrameLength = 160;
+ const size_t kDataLength = kQueueSize * kFrameLength;
+ std::vector<int16_t> buffer_reader(kFrameLength, 0);
+ std::vector<int16_t> buffer_writer(kFrameLength, 0);
+ SwapQueue<std::vector<int16_t>> queue(kQueueSize,
+ std::vector<int16_t>(kFrameLength));
+ std::vector<int16_t> samples(kDataLength);
+
+ for (size_t k = 0; k < kDataLength; k++) {
+ samples[k] = k % 9;
+ }
+
+ for (size_t k = 0; k < kQueueSize; k++) {
+ buffer_writer.clear();
+ buffer_writer.insert(buffer_writer.end(), &samples[0] + k * kFrameLength,
+ &samples[0] + (k + 1) * kFrameLength);
+
+ EXPECT_TRUE(queue.Insert(&buffer_writer));
+ }
+
+ for (size_t k = 0; k < kQueueSize; k++) {
+ EXPECT_TRUE(queue.Remove(&buffer_reader));
+
+ for (size_t j = 0; j < buffer_reader.size(); j++) {
+ EXPECT_EQ(buffer_reader[j], samples[k * kFrameLength + j]);
+ }
+ }
+}
+
+TEST(SwapQueueTest, ZeroSlotQueue) {
+ SwapQueue<int> queue(0);
+ int i = 42;
+ EXPECT_FALSE(queue.Insert(&i));
+ EXPECT_FALSE(queue.Remove(&i));
+ EXPECT_EQ(i, 42);
+}
+
+TEST(SwapQueueTest, OneSlotQueue) {
+ SwapQueue<int> queue(1);
+ int i = 42;
+ EXPECT_TRUE(queue.Insert(&i));
+ i = 43;
+ EXPECT_FALSE(queue.Insert(&i));
+ EXPECT_EQ(i, 43);
+ EXPECT_TRUE(queue.Remove(&i));
+ EXPECT_EQ(i, 42);
+ EXPECT_FALSE(queue.Remove(&i));
+}
+
+} // namespace webrtc
diff --git a/webrtc/common_audio/wav_file.cc b/webrtc/common_audio/wav_file.cc
index 8dae7d6e98..94b7a3c2c0 100644
--- a/webrtc/common_audio/wav_file.cc
+++ b/webrtc/common_audio/wav_file.cc
@@ -13,6 +13,7 @@
#include <algorithm>
#include <cstdio>
#include <limits>
+#include <sstream>
#include "webrtc/base/checks.h"
#include "webrtc/base/safe_conversions.h"
@@ -23,7 +24,7 @@ namespace webrtc {
// We write 16-bit PCM WAV files.
static const WavFormat kWavFormat = kWavFormatPcm;
-static const int kBytesPerSample = 2;
+static const size_t kBytesPerSample = 2;
// Doesn't take ownership of the file handle and won't close it.
class ReadableWavFile : public ReadableWav {
@@ -37,13 +38,21 @@ class ReadableWavFile : public ReadableWav {
FILE* file_;
};
+std::string WavFile::FormatAsString() const {
+ std::ostringstream s;
+ s << "Sample rate: " << sample_rate() << " Hz, Channels: " << num_channels()
+ << ", Duration: "
+ << (1.f * num_samples()) / (num_channels() * sample_rate()) << " s";
+ return s.str();
+}
+
WavReader::WavReader(const std::string& filename)
: file_handle_(fopen(filename.c_str(), "rb")) {
- RTC_CHECK(file_handle_ && "Could not open wav file for reading.");
+ RTC_CHECK(file_handle_) << "Could not open wav file for reading.";
ReadableWavFile readable(file_handle_);
WavFormat format;
- int bytes_per_sample;
+ size_t bytes_per_sample;
RTC_CHECK(ReadWavHeader(&readable, &num_channels_, &sample_rate_, &format,
&bytes_per_sample, &num_samples_));
num_samples_remaining_ = num_samples_;
@@ -60,14 +69,13 @@ size_t WavReader::ReadSamples(size_t num_samples, int16_t* samples) {
#error "Need to convert samples to big-endian when reading from WAV file"
#endif
// There could be metadata after the audio; ensure we don't read it.
- num_samples = std::min(rtc::checked_cast<uint32_t>(num_samples),
- num_samples_remaining_);
+ num_samples = std::min(num_samples, num_samples_remaining_);
const size_t read =
fread(samples, sizeof(*samples), num_samples, file_handle_);
// If we didn't read what was requested, ensure we've reached the EOF.
RTC_CHECK(read == num_samples || feof(file_handle_));
RTC_CHECK_LE(read, num_samples_remaining_);
- num_samples_remaining_ -= rtc::checked_cast<uint32_t>(read);
+ num_samples_remaining_ -= read;
return read;
}
@@ -91,12 +99,12 @@ void WavReader::Close() {
}
WavWriter::WavWriter(const std::string& filename, int sample_rate,
- int num_channels)
+ size_t num_channels)
: sample_rate_(sample_rate),
num_channels_(num_channels),
num_samples_(0),
file_handle_(fopen(filename.c_str(), "wb")) {
- RTC_CHECK(file_handle_ && "Could not open wav file for writing.");
+ RTC_CHECK(file_handle_) << "Could not open wav file for writing.";
RTC_CHECK(CheckWavParameters(num_channels_, sample_rate_, kWavFormat,
kBytesPerSample, num_samples_));
@@ -117,9 +125,8 @@ void WavWriter::WriteSamples(const int16_t* samples, size_t num_samples) {
const size_t written =
fwrite(samples, sizeof(*samples), num_samples, file_handle_);
RTC_CHECK_EQ(num_samples, written);
- num_samples_ += static_cast<uint32_t>(written);
- RTC_CHECK(written <= std::numeric_limits<uint32_t>::max() ||
- num_samples_ >= written); // detect uint32_t overflow
+ num_samples_ += written;
+ RTC_CHECK(num_samples_ >= written); // detect size_t overflow
}
void WavWriter::WriteSamples(const float* samples, size_t num_samples) {
@@ -146,7 +153,7 @@ void WavWriter::Close() {
rtc_WavWriter* rtc_WavOpen(const char* filename,
int sample_rate,
- int num_channels) {
+ size_t num_channels) {
return reinterpret_cast<rtc_WavWriter*>(
new webrtc::WavWriter(filename, sample_rate, num_channels));
}
@@ -165,10 +172,10 @@ int rtc_WavSampleRate(const rtc_WavWriter* wf) {
return reinterpret_cast<const webrtc::WavWriter*>(wf)->sample_rate();
}
-int rtc_WavNumChannels(const rtc_WavWriter* wf) {
+size_t rtc_WavNumChannels(const rtc_WavWriter* wf) {
return reinterpret_cast<const webrtc::WavWriter*>(wf)->num_channels();
}
-uint32_t rtc_WavNumSamples(const rtc_WavWriter* wf) {
+size_t rtc_WavNumSamples(const rtc_WavWriter* wf) {
return reinterpret_cast<const webrtc::WavWriter*>(wf)->num_samples();
}
diff --git a/webrtc/common_audio/wav_file.h b/webrtc/common_audio/wav_file.h
index 2eadd3f775..e656eb8643 100644
--- a/webrtc/common_audio/wav_file.h
+++ b/webrtc/common_audio/wav_file.h
@@ -27,8 +27,11 @@ class WavFile {
virtual ~WavFile() {}
virtual int sample_rate() const = 0;
- virtual int num_channels() const = 0;
- virtual uint32_t num_samples() const = 0;
+ virtual size_t num_channels() const = 0;
+ virtual size_t num_samples() const = 0;
+
+ // Returns a human-readable string containing the audio format.
+ std::string FormatAsString() const;
};
// Simple C++ class for writing 16-bit PCM WAV files. All error handling is
@@ -36,7 +39,7 @@ class WavFile {
class WavWriter final : public WavFile {
public:
// Open a new WAV file for writing.
- WavWriter(const std::string& filename, int sample_rate, int num_channels);
+ WavWriter(const std::string& filename, int sample_rate, size_t num_channels);
// Close the WAV file, after writing its header.
~WavWriter();
@@ -48,14 +51,14 @@ class WavWriter final : public WavFile {
void WriteSamples(const int16_t* samples, size_t num_samples);
int sample_rate() const override { return sample_rate_; }
- int num_channels() const override { return num_channels_; }
- uint32_t num_samples() const override { return num_samples_; }
+ size_t num_channels() const override { return num_channels_; }
+ size_t num_samples() const override { return num_samples_; }
private:
void Close();
const int sample_rate_;
- const int num_channels_;
- uint32_t num_samples_; // Total number of samples written to file.
+ const size_t num_channels_;
+ size_t num_samples_; // Total number of samples written to file.
FILE* file_handle_; // Output file, owned by this class
RTC_DISALLOW_COPY_AND_ASSIGN(WavWriter);
@@ -76,15 +79,15 @@ class WavReader final : public WavFile {
size_t ReadSamples(size_t num_samples, int16_t* samples);
int sample_rate() const override { return sample_rate_; }
- int num_channels() const override { return num_channels_; }
- uint32_t num_samples() const override { return num_samples_; }
+ size_t num_channels() const override { return num_channels_; }
+ size_t num_samples() const override { return num_samples_; }
private:
void Close();
int sample_rate_;
- int num_channels_;
- uint32_t num_samples_; // Total number of samples in the file.
- uint32_t num_samples_remaining_;
+ size_t num_channels_;
+ size_t num_samples_; // Total number of samples in the file.
+ size_t num_samples_remaining_;
FILE* file_handle_; // Input file, owned by this class.
RTC_DISALLOW_COPY_AND_ASSIGN(WavReader);
@@ -99,14 +102,14 @@ extern "C" {
typedef struct rtc_WavWriter rtc_WavWriter;
rtc_WavWriter* rtc_WavOpen(const char* filename,
int sample_rate,
- int num_channels);
+ size_t num_channels);
void rtc_WavClose(rtc_WavWriter* wf);
void rtc_WavWriteSamples(rtc_WavWriter* wf,
const float* samples,
size_t num_samples);
int rtc_WavSampleRate(const rtc_WavWriter* wf);
-int rtc_WavNumChannels(const rtc_WavWriter* wf);
-uint32_t rtc_WavNumSamples(const rtc_WavWriter* wf);
+size_t rtc_WavNumChannels(const rtc_WavWriter* wf);
+size_t rtc_WavNumSamples(const rtc_WavWriter* wf);
#ifdef __cplusplus
} // extern "C"
diff --git a/webrtc/common_audio/wav_file_unittest.cc b/webrtc/common_audio/wav_file_unittest.cc
index 78b0a34de9..ba1db1c296 100644
--- a/webrtc/common_audio/wav_file_unittest.cc
+++ b/webrtc/common_audio/wav_file_unittest.cc
@@ -26,11 +26,11 @@ static const float kSamples[] = {0.0, 10.0, 4e4, -1e9};
// Write a tiny WAV file with the C++ interface and verify the result.
TEST(WavWriterTest, CPP) {
const std::string outfile = test::OutputPath() + "wavtest1.wav";
- static const uint32_t kNumSamples = 3;
+ static const size_t kNumSamples = 3;
{
WavWriter w(outfile, 14099, 1);
EXPECT_EQ(14099, w.sample_rate());
- EXPECT_EQ(1, w.num_channels());
+ EXPECT_EQ(1u, w.num_channels());
EXPECT_EQ(0u, w.num_samples());
w.WriteSamples(kSamples, kNumSamples);
EXPECT_EQ(kNumSamples, w.num_samples());
@@ -64,10 +64,10 @@ TEST(WavWriterTest, CPP) {
0xff, 0x7f, // third sample: 4e4 (saturated)
kMetadata[0], kMetadata[1],
};
- static const int kContentSize =
+ static const size_t kContentSize =
kWavHeaderSize + kNumSamples * sizeof(int16_t) + sizeof(kMetadata);
static_assert(sizeof(kExpectedContents) == kContentSize, "content size");
- EXPECT_EQ(size_t(kContentSize), test::GetFileSize(outfile));
+ EXPECT_EQ(kContentSize, test::GetFileSize(outfile));
FILE* f = fopen(outfile.c_str(), "rb");
ASSERT_TRUE(f);
uint8_t contents[kContentSize];
@@ -78,7 +78,7 @@ TEST(WavWriterTest, CPP) {
{
WavReader r(outfile);
EXPECT_EQ(14099, r.sample_rate());
- EXPECT_EQ(1, r.num_channels());
+ EXPECT_EQ(1u, r.num_channels());
EXPECT_EQ(kNumSamples, r.num_samples());
static const float kTruncatedSamples[] = {0.0, 10.0, 32767.0};
float samples[kNumSamples];
@@ -93,9 +93,9 @@ TEST(WavWriterTest, C) {
const std::string outfile = test::OutputPath() + "wavtest2.wav";
rtc_WavWriter* w = rtc_WavOpen(outfile.c_str(), 11904, 2);
EXPECT_EQ(11904, rtc_WavSampleRate(w));
- EXPECT_EQ(2, rtc_WavNumChannels(w));
+ EXPECT_EQ(2u, rtc_WavNumChannels(w));
EXPECT_EQ(0u, rtc_WavNumSamples(w));
- static const uint32_t kNumSamples = 4;
+ static const size_t kNumSamples = 4;
rtc_WavWriteSamples(w, &kSamples[0], 2);
EXPECT_EQ(2u, rtc_WavNumSamples(w));
rtc_WavWriteSamples(w, &kSamples[2], kNumSamples - 2);
@@ -120,10 +120,10 @@ TEST(WavWriterTest, C) {
0xff, 0x7f, // third sample: 4e4 (saturated)
0, 0x80, // fourth sample: -1e9 (saturated)
};
- static const int kContentSize =
+ static const size_t kContentSize =
kWavHeaderSize + kNumSamples * sizeof(int16_t);
static_assert(sizeof(kExpectedContents) == kContentSize, "content size");
- EXPECT_EQ(size_t(kContentSize), test::GetFileSize(outfile));
+ EXPECT_EQ(kContentSize, test::GetFileSize(outfile));
FILE* f = fopen(outfile.c_str(), "rb");
ASSERT_TRUE(f);
uint8_t contents[kContentSize];
@@ -136,10 +136,10 @@ TEST(WavWriterTest, C) {
TEST(WavWriterTest, LargeFile) {
std::string outfile = test::OutputPath() + "wavtest3.wav";
static const int kSampleRate = 8000;
- static const int kNumChannels = 2;
- static const uint32_t kNumSamples = 3 * kSampleRate * kNumChannels;
+ static const size_t kNumChannels = 2;
+ static const size_t kNumSamples = 3 * kSampleRate * kNumChannels;
float samples[kNumSamples];
- for (uint32_t i = 0; i < kNumSamples; i += kNumChannels) {
+ for (size_t i = 0; i < kNumSamples; i += kNumChannels) {
// A nice periodic beeping sound.
static const double kToneHz = 440;
const double t = static_cast<double>(i) / (kNumChannels * kSampleRate);
diff --git a/webrtc/common_audio/wav_header.cc b/webrtc/common_audio/wav_header.cc
index 61cfffe62c..402ea17916 100644
--- a/webrtc/common_audio/wav_header.cc
+++ b/webrtc/common_audio/wav_header.cc
@@ -59,20 +59,19 @@ static_assert(sizeof(WavHeader) == kWavHeaderSize, "no padding in header");
} // namespace
-bool CheckWavParameters(int num_channels,
+bool CheckWavParameters(size_t num_channels,
int sample_rate,
WavFormat format,
- int bytes_per_sample,
- uint32_t num_samples) {
+ size_t bytes_per_sample,
+ size_t num_samples) {
// num_channels, sample_rate, and bytes_per_sample must be positive, must fit
// in their respective fields, and their product must fit in the 32-bit
// ByteRate field.
- if (num_channels <= 0 || sample_rate <= 0 || bytes_per_sample <= 0)
+ if (num_channels == 0 || sample_rate <= 0 || bytes_per_sample == 0)
return false;
if (static_cast<uint64_t>(sample_rate) > std::numeric_limits<uint32_t>::max())
return false;
- if (static_cast<uint64_t>(num_channels) >
- std::numeric_limits<uint16_t>::max())
+ if (num_channels > std::numeric_limits<uint16_t>::max())
return false;
if (static_cast<uint64_t>(bytes_per_sample) * 8 >
std::numeric_limits<uint16_t>::max())
@@ -99,10 +98,9 @@ bool CheckWavParameters(int num_channels,
// The number of bytes in the file, not counting the first ChunkHeader, must
// be less than 2^32; otherwise, the ChunkSize field overflows.
- const uint32_t max_samples =
- (std::numeric_limits<uint32_t>::max()
- - (kWavHeaderSize - sizeof(ChunkHeader))) /
- bytes_per_sample;
+ const size_t header_size = kWavHeaderSize - sizeof(ChunkHeader);
+ const size_t max_samples =
+ (std::numeric_limits<uint32_t>::max() - header_size) / bytes_per_sample;
if (num_samples > max_samples)
return false;
@@ -132,30 +130,32 @@ static inline std::string ReadFourCC(uint32_t x) {
#error "Write be-to-le conversion functions"
#endif
-static inline uint32_t RiffChunkSize(uint32_t bytes_in_payload) {
- return bytes_in_payload + kWavHeaderSize - sizeof(ChunkHeader);
+static inline uint32_t RiffChunkSize(size_t bytes_in_payload) {
+ return static_cast<uint32_t>(
+ bytes_in_payload + kWavHeaderSize - sizeof(ChunkHeader));
}
-static inline uint32_t ByteRate(int num_channels, int sample_rate,
- int bytes_per_sample) {
- return static_cast<uint32_t>(num_channels) * sample_rate * bytes_per_sample;
+static inline uint32_t ByteRate(size_t num_channels, int sample_rate,
+ size_t bytes_per_sample) {
+ return static_cast<uint32_t>(num_channels * sample_rate * bytes_per_sample);
}
-static inline uint16_t BlockAlign(int num_channels, int bytes_per_sample) {
- return num_channels * bytes_per_sample;
+static inline uint16_t BlockAlign(size_t num_channels,
+ size_t bytes_per_sample) {
+ return static_cast<uint16_t>(num_channels * bytes_per_sample);
}
void WriteWavHeader(uint8_t* buf,
- int num_channels,
+ size_t num_channels,
int sample_rate,
WavFormat format,
- int bytes_per_sample,
- uint32_t num_samples) {
+ size_t bytes_per_sample,
+ size_t num_samples) {
RTC_CHECK(CheckWavParameters(num_channels, sample_rate, format,
bytes_per_sample, num_samples));
WavHeader header;
- const uint32_t bytes_in_payload = bytes_per_sample * num_samples;
+ const size_t bytes_in_payload = bytes_per_sample * num_samples;
WriteFourCC(&header.riff.header.ID, 'R', 'I', 'F', 'F');
WriteLE32(&header.riff.header.Size, RiffChunkSize(bytes_in_payload));
@@ -164,15 +164,16 @@ void WriteWavHeader(uint8_t* buf,
WriteFourCC(&header.fmt.header.ID, 'f', 'm', 't', ' ');
WriteLE32(&header.fmt.header.Size, kFmtSubchunkSize);
WriteLE16(&header.fmt.AudioFormat, format);
- WriteLE16(&header.fmt.NumChannels, num_channels);
+ WriteLE16(&header.fmt.NumChannels, static_cast<uint16_t>(num_channels));
WriteLE32(&header.fmt.SampleRate, sample_rate);
WriteLE32(&header.fmt.ByteRate, ByteRate(num_channels, sample_rate,
bytes_per_sample));
WriteLE16(&header.fmt.BlockAlign, BlockAlign(num_channels, bytes_per_sample));
- WriteLE16(&header.fmt.BitsPerSample, 8 * bytes_per_sample);
+ WriteLE16(&header.fmt.BitsPerSample,
+ static_cast<uint16_t>(8 * bytes_per_sample));
WriteFourCC(&header.data.header.ID, 'd', 'a', 't', 'a');
- WriteLE32(&header.data.header.Size, bytes_in_payload);
+ WriteLE32(&header.data.header.Size, static_cast<uint32_t>(bytes_in_payload));
// Do an extra copy rather than writing everything to buf directly, since buf
// might not be correctly aligned.
@@ -180,11 +181,11 @@ void WriteWavHeader(uint8_t* buf,
}
bool ReadWavHeader(ReadableWav* readable,
- int* num_channels,
+ size_t* num_channels,
int* sample_rate,
WavFormat* format,
- int* bytes_per_sample,
- uint32_t* num_samples) {
+ size_t* bytes_per_sample,
+ size_t* num_samples) {
WavHeader header;
if (readable->Read(&header, kWavHeaderSize - sizeof(header.data)) !=
kWavHeaderSize - sizeof(header.data))
@@ -210,8 +211,8 @@ bool ReadWavHeader(ReadableWav* readable,
*num_channels = ReadLE16(header.fmt.NumChannels);
*sample_rate = ReadLE32(header.fmt.SampleRate);
*bytes_per_sample = ReadLE16(header.fmt.BitsPerSample) / 8;
- const uint32_t bytes_in_payload = ReadLE32(header.data.header.Size);
- if (*bytes_per_sample <= 0)
+ const size_t bytes_in_payload = ReadLE32(header.data.header.Size);
+ if (*bytes_per_sample == 0)
return false;
*num_samples = bytes_in_payload / *bytes_per_sample;
diff --git a/webrtc/common_audio/wav_header.h b/webrtc/common_audio/wav_header.h
index 1a0fd7c81d..6844306941 100644
--- a/webrtc/common_audio/wav_header.h
+++ b/webrtc/common_audio/wav_header.h
@@ -32,32 +32,32 @@ enum WavFormat {
};
// Return true if the given parameters will make a well-formed WAV header.
-bool CheckWavParameters(int num_channels,
+bool CheckWavParameters(size_t num_channels,
int sample_rate,
WavFormat format,
- int bytes_per_sample,
- uint32_t num_samples);
+ size_t bytes_per_sample,
+ size_t num_samples);
// Write a kWavHeaderSize bytes long WAV header to buf. The payload that
// follows the header is supposed to have the specified number of interleaved
// channels and contain the specified total number of samples of the specified
// type. CHECKs the input parameters for validity.
void WriteWavHeader(uint8_t* buf,
- int num_channels,
+ size_t num_channels,
int sample_rate,
WavFormat format,
- int bytes_per_sample,
- uint32_t num_samples);
+ size_t bytes_per_sample,
+ size_t num_samples);
// Read a WAV header from an implemented ReadableWav and parse the values into
// the provided output parameters. ReadableWav is used because the header can
// be variably sized. Returns false if the header is invalid.
bool ReadWavHeader(ReadableWav* readable,
- int* num_channels,
+ size_t* num_channels,
int* sample_rate,
WavFormat* format,
- int* bytes_per_sample,
- uint32_t* num_samples);
+ size_t* bytes_per_sample,
+ size_t* num_samples);
} // namespace webrtc
diff --git a/webrtc/common_audio/wav_header_unittest.cc b/webrtc/common_audio/wav_header_unittest.cc
index e03cb303aa..8527939eac 100644
--- a/webrtc/common_audio/wav_header_unittest.cc
+++ b/webrtc/common_audio/wav_header_unittest.cc
@@ -70,7 +70,7 @@ TEST(WavHeaderTest, CheckWavParameters) {
// Try some really stupid values for one parameter at a time.
EXPECT_TRUE(CheckWavParameters(1, 8000, kWavFormatPcm, 1, 0));
EXPECT_FALSE(CheckWavParameters(0, 8000, kWavFormatPcm, 1, 0));
- EXPECT_FALSE(CheckWavParameters(-1, 8000, kWavFormatPcm, 1, 0));
+ EXPECT_FALSE(CheckWavParameters(0x10000, 8000, kWavFormatPcm, 1, 0));
EXPECT_FALSE(CheckWavParameters(1, 0, kWavFormatPcm, 1, 0));
EXPECT_FALSE(CheckWavParameters(1, 8000, WavFormat(0), 1, 0));
EXPECT_FALSE(CheckWavParameters(1, 8000, kWavFormatPcm, 0, 0));
@@ -91,11 +91,11 @@ TEST(WavHeaderTest, CheckWavParameters) {
}
TEST(WavHeaderTest, ReadWavHeaderWithErrors) {
- int num_channels = 0;
+ size_t num_channels = 0;
int sample_rate = 0;
WavFormat format = kWavFormatPcm;
- int bytes_per_sample = 0;
- uint32_t num_samples = 0;
+ size_t bytes_per_sample = 0;
+ size_t num_samples = 0;
// Test a few ways the header can be invalid. We start with the valid header
// used in WriteAndReadWavHeader, and invalidate one field per test. The
@@ -268,19 +268,19 @@ TEST(WavHeaderTest, WriteAndReadWavHeader) {
static_assert(sizeof(kExpectedBuf) == kSize, "buffer size");
EXPECT_EQ(0, memcmp(kExpectedBuf, buf, kSize));
- int num_channels = 0;
+ size_t num_channels = 0;
int sample_rate = 0;
WavFormat format = kWavFormatPcm;
- int bytes_per_sample = 0;
- uint32_t num_samples = 0;
+ size_t bytes_per_sample = 0;
+ size_t num_samples = 0;
ReadableWavBuffer r(buf + 4, sizeof(buf) - 8);
EXPECT_TRUE(
ReadWavHeader(&r, &num_channels, &sample_rate, &format,
&bytes_per_sample, &num_samples));
- EXPECT_EQ(17, num_channels);
+ EXPECT_EQ(17u, num_channels);
EXPECT_EQ(12345, sample_rate);
EXPECT_EQ(kWavFormatALaw, format);
- EXPECT_EQ(1, bytes_per_sample);
+ EXPECT_EQ(1u, bytes_per_sample);
EXPECT_EQ(123457689u, num_samples);
}
@@ -304,19 +304,19 @@ TEST(WavHeaderTest, ReadAtypicalWavHeader) {
0x99, 0xd0, 0x5b, 0x07, // size of payload: 123457689
};
- int num_channels = 0;
+ size_t num_channels = 0;
int sample_rate = 0;
WavFormat format = kWavFormatPcm;
- int bytes_per_sample = 0;
- uint32_t num_samples = 0;
+ size_t bytes_per_sample = 0;
+ size_t num_samples = 0;
ReadableWavBuffer r(kBuf, sizeof(kBuf));
EXPECT_TRUE(
ReadWavHeader(&r, &num_channels, &sample_rate, &format,
&bytes_per_sample, &num_samples));
- EXPECT_EQ(17, num_channels);
+ EXPECT_EQ(17u, num_channels);
EXPECT_EQ(12345, sample_rate);
EXPECT_EQ(kWavFormatALaw, format);
- EXPECT_EQ(1, bytes_per_sample);
+ EXPECT_EQ(1u, bytes_per_sample);
EXPECT_EQ(123457689u, num_samples);
}
diff --git a/webrtc/common_types.h b/webrtc/common_types.h
index 07faf6aefc..444ef928d9 100644
--- a/webrtc/common_types.h
+++ b/webrtc/common_types.h
@@ -291,7 +291,7 @@ struct CodecInst {
char plname[RTP_PAYLOAD_NAME_SIZE];
int plfreq;
int pacsize;
- int channels;
+ size_t channels;
int rate; // bits/sec unlike {start,min,max}Bitrate elsewhere in this file!
bool operator==(const CodecInst& other) const {
@@ -311,12 +311,6 @@ struct CodecInst {
// RTP
enum {kRtpCsrcSize = 15}; // RFC 3550 page 13
-enum RTPDirections
-{
- kRtpIncoming = 0,
- kRtpOutgoing
-};
-
enum PayloadFrequencies
{
kFreq8000Hz = 8000,
@@ -547,6 +541,7 @@ enum RawVideoType
enum { kConfigParameterSize = 128};
enum { kPayloadNameSize = 32};
enum { kMaxSimulcastStreams = 4};
+enum { kMaxSpatialLayers = 5 };
enum { kMaxTemporalStreams = 4};
enum VideoCodecComplexity
@@ -676,6 +671,13 @@ struct SimulcastStream {
}
};
+struct SpatialLayer {
+ int scaling_factor_num;
+ int scaling_factor_den;
+ int target_bitrate_bps;
+ // TODO(ivica): Add max_quantizer and min_quantizer?
+};
+
enum VideoCodecMode {
kRealtimeVideo,
kScreensharing
@@ -702,6 +704,7 @@ struct VideoCodec {
unsigned int qpMax;
unsigned char numberOfSimulcastStreams;
SimulcastStream simulcastStream[kMaxSimulcastStreams];
+ SpatialLayer spatialLayers[kMaxSpatialLayers];
VideoCodecMode mode;
diff --git a/webrtc/common_video/BUILD.gn b/webrtc/common_video/BUILD.gn
index 473c757a75..4ef968d60f 100644
--- a/webrtc/common_video/BUILD.gn
+++ b/webrtc/common_video/BUILD.gn
@@ -10,7 +10,7 @@ import("../build/webrtc.gni")
config("common_video_config") {
include_dirs = [
- "interface",
+ "include",
"libyuv/include",
]
}
@@ -18,10 +18,10 @@ config("common_video_config") {
source_set("common_video") {
sources = [
"i420_buffer_pool.cc",
+ "include/i420_buffer_pool.h",
+ "include/incoming_video_stream.h",
+ "include/video_frame_buffer.h",
"incoming_video_stream.cc",
- "interface/i420_buffer_pool.h",
- "interface/incoming_video_stream.h",
- "interface/video_frame_buffer.h",
"libyuv/include/scaler.h",
"libyuv/include/webrtc_libyuv.h",
"libyuv/scaler.cc",
diff --git a/webrtc/common_video/common_video.gyp b/webrtc/common_video/common_video.gyp
index 5c0ecb8208..fe14da1d2e 100644
--- a/webrtc/common_video/common_video.gyp
+++ b/webrtc/common_video/common_video.gyp
@@ -14,7 +14,7 @@
'type': 'static_library',
'include_dirs': [
'<(webrtc_root)/modules/interface/',
- 'interface',
+ 'include',
'libyuv/include',
],
'dependencies': [
@@ -23,7 +23,7 @@
],
'direct_dependent_settings': {
'include_dirs': [
- 'interface',
+ 'include',
'libyuv/include',
],
},
@@ -42,9 +42,9 @@
'i420_buffer_pool.cc',
'video_frame.cc',
'incoming_video_stream.cc',
- 'interface/i420_buffer_pool.h',
- 'interface/incoming_video_stream.h',
- 'interface/video_frame_buffer.h',
+ 'include/i420_buffer_pool.h',
+ 'include/incoming_video_stream.h',
+ 'include/video_frame_buffer.h',
'libyuv/include/scaler.h',
'libyuv/include/webrtc_libyuv.h',
'libyuv/scaler.cc',
diff --git a/webrtc/common_video/common_video_unittests.gyp b/webrtc/common_video/common_video_unittests.gyp
index beeab5ddca..b5e892caf0 100644
--- a/webrtc/common_video/common_video_unittests.gyp
+++ b/webrtc/common_video/common_video_unittests.gyp
@@ -17,6 +17,7 @@
'<(DEPTH)/testing/gtest.gyp:gtest',
'<(webrtc_root)/system_wrappers/system_wrappers.gyp:system_wrappers',
'<(webrtc_root)/test/test.gyp:test_support_main',
+ '<(webrtc_root)/test/test.gyp:fake_video_frames',
],
'sources': [
'i420_buffer_pool_unittest.cc',
diff --git a/webrtc/common_video/i420_buffer_pool.cc b/webrtc/common_video/i420_buffer_pool.cc
index c746666a16..98daec99f6 100644
--- a/webrtc/common_video/i420_buffer_pool.cc
+++ b/webrtc/common_video/i420_buffer_pool.cc
@@ -8,7 +8,7 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-#include "webrtc/common_video/interface/i420_buffer_pool.h"
+#include "webrtc/common_video/include/i420_buffer_pool.h"
#include "webrtc/base/checks.h"
diff --git a/webrtc/common_video/i420_buffer_pool_unittest.cc b/webrtc/common_video/i420_buffer_pool_unittest.cc
index a1596ebb09..b030ee774a 100644
--- a/webrtc/common_video/i420_buffer_pool_unittest.cc
+++ b/webrtc/common_video/i420_buffer_pool_unittest.cc
@@ -11,7 +11,7 @@
#include <string>
#include "testing/gtest/include/gtest/gtest.h"
-#include "webrtc/common_video/interface/i420_buffer_pool.h"
+#include "webrtc/common_video/include/i420_buffer_pool.h"
namespace webrtc {
diff --git a/webrtc/common_video/i420_video_frame_unittest.cc b/webrtc/common_video/i420_video_frame_unittest.cc
index da3996b9bd..1ec451cb79 100644
--- a/webrtc/common_video/i420_video_frame_unittest.cc
+++ b/webrtc/common_video/i420_video_frame_unittest.cc
@@ -8,8 +8,6 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-#include "webrtc/video_frame.h"
-
#include <math.h>
#include <string.h>
@@ -17,6 +15,7 @@
#include "webrtc/base/bind.h"
#include "webrtc/base/scoped_ptr.h"
#include "webrtc/test/fake_texture_frame.h"
+#include "webrtc/video_frame.h"
namespace webrtc {
@@ -25,7 +24,6 @@ bool EqualPlane(const uint8_t* data1,
int stride,
int width,
int height);
-bool EqualFrames(const VideoFrame& frame1, const VideoFrame& frame2);
int ExpectedSize(int plane_stride, int image_height, PlaneType type);
TEST(TestVideoFrame, InitialValues) {
@@ -103,7 +101,7 @@ TEST(TestVideoFrame, CopyFrame) {
stride_u, stride_v, kRotation));
// Frame of smaller dimensions.
EXPECT_EQ(0, small_frame.CopyFrame(big_frame));
- EXPECT_TRUE(EqualFrames(small_frame, big_frame));
+ EXPECT_TRUE(small_frame.EqualsFrame(big_frame));
EXPECT_EQ(kRotation, small_frame.rotation());
// Frame of larger dimensions.
@@ -113,7 +111,7 @@ TEST(TestVideoFrame, CopyFrame) {
memset(small_frame.buffer(kUPlane), 2, small_frame.allocated_size(kUPlane));
memset(small_frame.buffer(kVPlane), 3, small_frame.allocated_size(kVPlane));
EXPECT_EQ(0, big_frame.CopyFrame(small_frame));
- EXPECT_TRUE(EqualFrames(small_frame, big_frame));
+ EXPECT_TRUE(small_frame.EqualsFrame(big_frame));
}
TEST(TestVideoFrame, ShallowCopy) {
@@ -174,7 +172,7 @@ TEST(TestVideoFrame, ShallowCopy) {
TEST(TestVideoFrame, Reset) {
VideoFrame frame;
- ASSERT_TRUE(frame.CreateEmptyFrame(5, 5, 5, 5, 5) == 0);
+ ASSERT_EQ(frame.CreateEmptyFrame(5, 5, 5, 5, 5), 0);
frame.set_ntp_time_ms(1);
frame.set_timestamp(2);
frame.set_render_time_ms(3);
@@ -244,7 +242,7 @@ TEST(TestVideoFrame, FailToReuseAllocation) {
TEST(TestVideoFrame, TextureInitialValues) {
test::FakeNativeHandle* handle = new test::FakeNativeHandle();
- VideoFrame frame = test::CreateFakeNativeHandleFrame(
+ VideoFrame frame = test::FakeNativeHandle::CreateFrame(
handle, 640, 480, 100, 10, webrtc::kVideoRotation_0);
EXPECT_EQ(640, frame.width());
EXPECT_EQ(480, frame.height());
@@ -258,48 +256,4 @@ TEST(TestVideoFrame, TextureInitialValues) {
EXPECT_EQ(20, frame.render_time_ms());
}
-bool EqualPlane(const uint8_t* data1,
- const uint8_t* data2,
- int stride,
- int width,
- int height) {
- for (int y = 0; y < height; ++y) {
- if (memcmp(data1, data2, width) != 0)
- return false;
- data1 += stride;
- data2 += stride;
- }
- return true;
-}
-
-bool EqualFrames(const VideoFrame& frame1, const VideoFrame& frame2) {
- if ((frame1.width() != frame2.width()) ||
- (frame1.height() != frame2.height()) ||
- (frame1.stride(kYPlane) != frame2.stride(kYPlane)) ||
- (frame1.stride(kUPlane) != frame2.stride(kUPlane)) ||
- (frame1.stride(kVPlane) != frame2.stride(kVPlane)) ||
- (frame1.timestamp() != frame2.timestamp()) ||
- (frame1.ntp_time_ms() != frame2.ntp_time_ms()) ||
- (frame1.render_time_ms() != frame2.render_time_ms())) {
- return false;
- }
- const int half_width = (frame1.width() + 1) / 2;
- const int half_height = (frame1.height() + 1) / 2;
- return EqualPlane(frame1.buffer(kYPlane), frame2.buffer(kYPlane),
- frame1.stride(kYPlane), frame1.width(), frame1.height()) &&
- EqualPlane(frame1.buffer(kUPlane), frame2.buffer(kUPlane),
- frame1.stride(kUPlane), half_width, half_height) &&
- EqualPlane(frame1.buffer(kVPlane), frame2.buffer(kVPlane),
- frame1.stride(kVPlane), half_width, half_height);
-}
-
-int ExpectedSize(int plane_stride, int image_height, PlaneType type) {
- if (type == kYPlane) {
- return (plane_stride * image_height);
- } else {
- int half_height = (image_height + 1) / 2;
- return (plane_stride * half_height);
- }
-}
-
} // namespace webrtc
diff --git a/webrtc/common_video/include/i420_buffer_pool.h b/webrtc/common_video/include/i420_buffer_pool.h
new file mode 100644
index 0000000000..5ab1510689
--- /dev/null
+++ b/webrtc/common_video/include/i420_buffer_pool.h
@@ -0,0 +1,43 @@
+/*
+ * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_COMMON_VIDEO_INCLUDE_I420_BUFFER_POOL_H_
+#define WEBRTC_COMMON_VIDEO_INCLUDE_I420_BUFFER_POOL_H_
+
+#include <list>
+
+#include "webrtc/base/thread_checker.h"
+#include "webrtc/common_video/include/video_frame_buffer.h"
+
+namespace webrtc {
+
+// Simple buffer pool to avoid unnecessary allocations of I420Buffer objects.
+// The pool manages the memory of the I420Buffer returned from CreateBuffer.
+// When the I420Buffer is destructed, the memory is returned to the pool for use
+// by subsequent calls to CreateBuffer. If the resolution passed to CreateBuffer
+// changes, old buffers will be purged from the pool.
+class I420BufferPool {
+ public:
+ I420BufferPool();
+ // Returns a buffer from the pool, or creates a new buffer if no suitable
+ // buffer exists in the pool.
+ rtc::scoped_refptr<VideoFrameBuffer> CreateBuffer(int width, int height);
+ // Clears buffers_ and detaches the thread checker so that it can be reused
+ // later from another thread.
+ void Release();
+
+ private:
+ rtc::ThreadChecker thread_checker_;
+ std::list<rtc::scoped_refptr<I420Buffer>> buffers_;
+};
+
+} // namespace webrtc
+
+#endif // WEBRTC_COMMON_VIDEO_INCLUDE_I420_BUFFER_POOL_H_
diff --git a/webrtc/common_video/include/incoming_video_stream.h b/webrtc/common_video/include/incoming_video_stream.h
new file mode 100644
index 0000000000..e3147eb871
--- /dev/null
+++ b/webrtc/common_video/include/incoming_video_stream.h
@@ -0,0 +1,107 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_COMMON_VIDEO_INCLUDE_INCOMING_VIDEO_STREAM_H_
+#define WEBRTC_COMMON_VIDEO_INCLUDE_INCOMING_VIDEO_STREAM_H_
+
+#include "webrtc/base/platform_thread.h"
+#include "webrtc/base/scoped_ptr.h"
+#include "webrtc/base/thread_annotations.h"
+#include "webrtc/common_video/video_render_frames.h"
+
+namespace webrtc {
+class CriticalSectionWrapper;
+class EventTimerWrapper;
+
+class VideoRenderCallback {
+ public:
+ virtual int32_t RenderFrame(const uint32_t streamId,
+ const VideoFrame& videoFrame) = 0;
+
+ protected:
+ virtual ~VideoRenderCallback() {}
+};
+
+class IncomingVideoStream : public VideoRenderCallback {
+ public:
+ IncomingVideoStream(uint32_t stream_id, bool disable_prerenderer_smoothing);
+ ~IncomingVideoStream();
+
+ // Get callback to deliver frames to the module.
+ VideoRenderCallback* ModuleCallback();
+ virtual int32_t RenderFrame(const uint32_t stream_id,
+ const VideoFrame& video_frame);
+
+ // Set callback to the platform dependent code.
+ void SetRenderCallback(VideoRenderCallback* render_callback);
+
+ // Callback for file recording, snapshot, ...
+ void SetExternalCallback(VideoRenderCallback* render_object);
+
+ // Start/Stop.
+ int32_t Start();
+ int32_t Stop();
+
+ // Clear all buffers.
+ int32_t Reset();
+
+ // Properties.
+ uint32_t StreamId() const;
+ uint32_t IncomingRate() const;
+
+ int32_t SetStartImage(const VideoFrame& video_frame);
+
+ int32_t SetTimeoutImage(const VideoFrame& video_frame,
+ const uint32_t timeout);
+
+ int32_t SetExpectedRenderDelay(int32_t delay_ms);
+
+ protected:
+ static bool IncomingVideoStreamThreadFun(void* obj);
+ bool IncomingVideoStreamProcess();
+
+ private:
+ enum { kEventStartupTimeMs = 10 };
+ enum { kEventMaxWaitTimeMs = 100 };
+ enum { kFrameRatePeriodMs = 1000 };
+
+ void DeliverFrame(const VideoFrame& video_frame);
+
+ uint32_t const stream_id_;
+ const bool disable_prerenderer_smoothing_;
+ // Critsects in allowed to enter order.
+ const rtc::scoped_ptr<CriticalSectionWrapper> stream_critsect_;
+ const rtc::scoped_ptr<CriticalSectionWrapper> thread_critsect_;
+ const rtc::scoped_ptr<CriticalSectionWrapper> buffer_critsect_;
+ // TODO(pbos): Make plain member and stop resetting this thread, just
+ // start/stoping it is enough.
+ rtc::scoped_ptr<rtc::PlatformThread> incoming_render_thread_
+ GUARDED_BY(thread_critsect_);
+ rtc::scoped_ptr<EventTimerWrapper> deliver_buffer_event_;
+
+ bool running_ GUARDED_BY(stream_critsect_);
+ VideoRenderCallback* external_callback_ GUARDED_BY(thread_critsect_);
+ VideoRenderCallback* render_callback_ GUARDED_BY(thread_critsect_);
+ const rtc::scoped_ptr<VideoRenderFrames> render_buffers_
+ GUARDED_BY(buffer_critsect_);
+
+ uint32_t incoming_rate_ GUARDED_BY(stream_critsect_);
+ int64_t last_rate_calculation_time_ms_ GUARDED_BY(stream_critsect_);
+ uint16_t num_frames_since_last_calculation_ GUARDED_BY(stream_critsect_);
+ int64_t last_render_time_ms_ GUARDED_BY(thread_critsect_);
+ VideoFrame temp_frame_ GUARDED_BY(thread_critsect_);
+ VideoFrame start_image_ GUARDED_BY(thread_critsect_);
+ VideoFrame timeout_image_ GUARDED_BY(thread_critsect_);
+ uint32_t timeout_time_ GUARDED_BY(thread_critsect_);
+};
+
+} // namespace webrtc
+
+#endif // WEBRTC_COMMON_VIDEO_INCLUDE_INCOMING_VIDEO_STREAM_H_
diff --git a/webrtc/common_video/include/video_frame_buffer.h b/webrtc/common_video/include/video_frame_buffer.h
new file mode 100644
index 0000000000..710d2862f0
--- /dev/null
+++ b/webrtc/common_video/include/video_frame_buffer.h
@@ -0,0 +1,157 @@
+/*
+ * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_COMMON_VIDEO_INCLUDE_VIDEO_FRAME_BUFFER_H_
+#define WEBRTC_COMMON_VIDEO_INCLUDE_VIDEO_FRAME_BUFFER_H_
+
+#include "webrtc/base/callback.h"
+#include "webrtc/base/refcount.h"
+#include "webrtc/base/scoped_ptr.h"
+#include "webrtc/base/scoped_ref_ptr.h"
+#include "webrtc/system_wrappers/include/aligned_malloc.h"
+
+namespace webrtc {
+
+enum PlaneType {
+ kYPlane = 0,
+ kUPlane = 1,
+ kVPlane = 2,
+ kNumOfPlanes = 3,
+};
+
+// Interface of a simple frame buffer containing pixel data. This interface does
+// not contain any frame metadata such as rotation, timestamp, pixel_width, etc.
+class VideoFrameBuffer : public rtc::RefCountInterface {
+ public:
+ // Returns true if this buffer has a single exclusive owner.
+ virtual bool HasOneRef() const = 0;
+
+ // The resolution of the frame in pixels. For formats where some planes are
+ // subsampled, this is the highest-resolution plane.
+ virtual int width() const = 0;
+ virtual int height() const = 0;
+
+ // Returns pointer to the pixel data for a given plane. The memory is owned by
+ // the VideoFrameBuffer object and must not be freed by the caller.
+ virtual const uint8_t* data(PlaneType type) const = 0;
+
+ // Non-const data access is disallowed by default. You need to make sure you
+ // have exclusive access and a writable buffer before calling this function.
+ virtual uint8_t* MutableData(PlaneType type);
+
+ // Returns the number of bytes between successive rows for a given plane.
+ virtual int stride(PlaneType type) const = 0;
+
+ // Return the handle of the underlying video frame. This is used when the
+ // frame is backed by a texture.
+ virtual void* native_handle() const = 0;
+
+ // Returns a new memory-backed frame buffer converted from this buffer's
+ // native handle.
+ virtual rtc::scoped_refptr<VideoFrameBuffer> NativeToI420Buffer() = 0;
+
+ protected:
+ virtual ~VideoFrameBuffer();
+};
+
+// Plain I420 buffer in standard memory.
+class I420Buffer : public VideoFrameBuffer {
+ public:
+ I420Buffer(int width, int height);
+ I420Buffer(int width, int height, int stride_y, int stride_u, int stride_v);
+
+ int width() const override;
+ int height() const override;
+ const uint8_t* data(PlaneType type) const override;
+ // Non-const data access is only allowed if HasOneRef() is true to protect
+ // against unexpected overwrites.
+ uint8_t* MutableData(PlaneType type) override;
+ int stride(PlaneType type) const override;
+ void* native_handle() const override;
+ rtc::scoped_refptr<VideoFrameBuffer> NativeToI420Buffer() override;
+
+ protected:
+ ~I420Buffer() override;
+
+ private:
+ const int width_;
+ const int height_;
+ const int stride_y_;
+ const int stride_u_;
+ const int stride_v_;
+ const rtc::scoped_ptr<uint8_t, AlignedFreeDeleter> data_;
+};
+
+// Base class for native-handle buffer is a wrapper around a |native_handle|.
+// This is used for convenience as most native-handle implementations can share
+// many VideoFrame implementations, but need to implement a few others (such
+// as their own destructors or conversion methods back to software I420).
+class NativeHandleBuffer : public VideoFrameBuffer {
+ public:
+ NativeHandleBuffer(void* native_handle, int width, int height);
+
+ int width() const override;
+ int height() const override;
+ const uint8_t* data(PlaneType type) const override;
+ int stride(PlaneType type) const override;
+ void* native_handle() const override;
+
+ protected:
+ void* native_handle_;
+ const int width_;
+ const int height_;
+};
+
+class WrappedI420Buffer : public webrtc::VideoFrameBuffer {
+ public:
+ WrappedI420Buffer(int width,
+ int height,
+ const uint8_t* y_plane,
+ int y_stride,
+ const uint8_t* u_plane,
+ int u_stride,
+ const uint8_t* v_plane,
+ int v_stride,
+ const rtc::Callback0<void>& no_longer_used);
+ int width() const override;
+ int height() const override;
+
+ const uint8_t* data(PlaneType type) const override;
+
+ int stride(PlaneType type) const override;
+ void* native_handle() const override;
+
+ rtc::scoped_refptr<VideoFrameBuffer> NativeToI420Buffer() override;
+
+ private:
+ friend class rtc::RefCountedObject<WrappedI420Buffer>;
+ ~WrappedI420Buffer() override;
+
+ const int width_;
+ const int height_;
+ const uint8_t* const y_plane_;
+ const uint8_t* const u_plane_;
+ const uint8_t* const v_plane_;
+ const int y_stride_;
+ const int u_stride_;
+ const int v_stride_;
+ rtc::Callback0<void> no_longer_used_cb_;
+};
+
+// Helper function to crop |buffer| without making a deep copy. May only be used
+// for non-native frames.
+rtc::scoped_refptr<VideoFrameBuffer> ShallowCenterCrop(
+ const rtc::scoped_refptr<VideoFrameBuffer>& buffer,
+ int cropped_width,
+ int cropped_height);
+
+} // namespace webrtc
+
+#endif // WEBRTC_COMMON_VIDEO_INCLUDE_VIDEO_FRAME_BUFFER_H_
diff --git a/webrtc/common_video/include/video_image.h b/webrtc/common_video/include/video_image.h
new file mode 100644
index 0000000000..4a6e451c0f
--- /dev/null
+++ b/webrtc/common_video/include/video_image.h
@@ -0,0 +1,17 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_COMMON_VIDEO_INCLUDE_VIDEO_IMAGE_H_
+#define WEBRTC_COMMON_VIDEO_INCLUDE_VIDEO_IMAGE_H_
+
+// TODO(pbos): Remove this file and include webrtc/video_frame.h instead.
+#include "webrtc/video_frame.h"
+
+#endif // WEBRTC_COMMON_VIDEO_INCLUDE_VIDEO_IMAGE_H_
diff --git a/webrtc/common_video/incoming_video_stream.cc b/webrtc/common_video/incoming_video_stream.cc
index 79bbb8a7b6..1272ecc5bb 100644
--- a/webrtc/common_video/incoming_video_stream.cc
+++ b/webrtc/common_video/incoming_video_stream.cc
@@ -8,7 +8,7 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-#include "webrtc/common_video/interface/incoming_video_stream.h"
+#include "webrtc/common_video/include/incoming_video_stream.h"
#include <assert.h>
@@ -21,18 +21,21 @@
#include <sys/time.h>
#endif
+#include "webrtc/base/platform_thread.h"
#include "webrtc/common_video/libyuv/include/webrtc_libyuv.h"
#include "webrtc/common_video/video_render_frames.h"
#include "webrtc/system_wrappers/include/critical_section_wrapper.h"
#include "webrtc/system_wrappers/include/event_wrapper.h"
-#include "webrtc/system_wrappers/include/thread_wrapper.h"
#include "webrtc/system_wrappers/include/tick_util.h"
#include "webrtc/system_wrappers/include/trace.h"
+#include "webrtc/video_renderer.h"
namespace webrtc {
-IncomingVideoStream::IncomingVideoStream(uint32_t stream_id)
+IncomingVideoStream::IncomingVideoStream(uint32_t stream_id,
+ bool disable_prerenderer_smoothing)
: stream_id_(stream_id),
+ disable_prerenderer_smoothing_(disable_prerenderer_smoothing),
stream_critsect_(CriticalSectionWrapper::CreateCriticalSection()),
thread_critsect_(CriticalSectionWrapper::CreateCriticalSection()),
buffer_critsect_(CriticalSectionWrapper::CreateCriticalSection()),
@@ -49,8 +52,7 @@ IncomingVideoStream::IncomingVideoStream(uint32_t stream_id)
temp_frame_(),
start_image_(),
timeout_image_(),
- timeout_time_() {
-}
+ timeout_time_() {}
IncomingVideoStream::~IncomingVideoStream() {
Stop();
@@ -80,11 +82,15 @@ int32_t IncomingVideoStream::RenderFrame(const uint32_t stream_id,
last_rate_calculation_time_ms_ = now_ms;
}
- // Insert frame.
- CriticalSectionScoped csB(buffer_critsect_.get());
- if (render_buffers_->AddFrame(video_frame) == 1)
- deliver_buffer_event_->Set();
-
+ // Hand over or insert frame.
+ if (disable_prerenderer_smoothing_) {
+ DeliverFrame(video_frame);
+ } else {
+ CriticalSectionScoped csB(buffer_critsect_.get());
+ if (render_buffers_->AddFrame(video_frame) == 1) {
+ deliver_buffer_event_->Set();
+ }
+ }
return 0;
}
@@ -128,21 +134,20 @@ int32_t IncomingVideoStream::Start() {
return 0;
}
- CriticalSectionScoped csT(thread_critsect_.get());
- assert(incoming_render_thread_ == NULL);
+ if (!disable_prerenderer_smoothing_) {
+ CriticalSectionScoped csT(thread_critsect_.get());
+ assert(incoming_render_thread_ == NULL);
- incoming_render_thread_ = ThreadWrapper::CreateThread(
- IncomingVideoStreamThreadFun, this, "IncomingVideoStreamThread");
- if (!incoming_render_thread_) {
- return -1;
- }
+ incoming_render_thread_.reset(new rtc::PlatformThread(
+ IncomingVideoStreamThreadFun, this, "IncomingVideoStreamThread"));
+ if (!incoming_render_thread_) {
+ return -1;
+ }
- if (incoming_render_thread_->Start()) {
- } else {
- return -1;
+ incoming_render_thread_->Start();
+ incoming_render_thread_->SetPriority(rtc::kRealtimePriority);
+ deliver_buffer_event_->StartTimer(false, kEventStartupTimeMs);
}
- incoming_render_thread_->SetPriority(kRealtimePriority);
- deliver_buffer_event_->StartTimer(false, kEventStartupTimeMs);
running_ = true;
return 0;
@@ -155,7 +160,7 @@ int32_t IncomingVideoStream::Stop() {
return 0;
}
- ThreadWrapper* thread = NULL;
+ rtc::PlatformThread* thread = NULL;
{
CriticalSectionScoped cs_thread(thread_critsect_.get());
if (incoming_render_thread_) {
@@ -169,11 +174,8 @@ int32_t IncomingVideoStream::Stop() {
}
}
if (thread) {
- if (thread->Stop()) {
- delete thread;
- } else {
- assert(false);
- }
+ thread->Stop();
+ delete thread;
}
running_ = false;
return 0;
@@ -205,6 +207,7 @@ bool IncomingVideoStream::IncomingVideoStreamProcess() {
// Terminating
return false;
}
+
// Get a new frame to render and the time for the frame after this one.
VideoFrame frame_to_render;
uint32_t wait_time;
@@ -220,37 +223,41 @@ bool IncomingVideoStream::IncomingVideoStreamProcess() {
}
deliver_buffer_event_->StartTimer(false, wait_time);
- if (frame_to_render.IsZeroSize()) {
- if (render_callback_) {
- if (last_render_time_ms_ == 0 && !start_image_.IsZeroSize()) {
- // We have not rendered anything and have a start image.
- temp_frame_.CopyFrame(start_image_);
- render_callback_->RenderFrame(stream_id_, temp_frame_);
- } else if (!timeout_image_.IsZeroSize() &&
- last_render_time_ms_ + timeout_time_ <
- TickTime::MillisecondTimestamp()) {
- // Render a timeout image.
- temp_frame_.CopyFrame(timeout_image_);
- render_callback_->RenderFrame(stream_id_, temp_frame_);
- }
- }
+ DeliverFrame(frame_to_render);
+ }
+ return true;
+}
- // No frame.
- return true;
+void IncomingVideoStream::DeliverFrame(const VideoFrame& video_frame) {
+ CriticalSectionScoped cs(thread_critsect_.get());
+ if (video_frame.IsZeroSize()) {
+ if (render_callback_) {
+ if (last_render_time_ms_ == 0 && !start_image_.IsZeroSize()) {
+ // We have not rendered anything and have a start image.
+ temp_frame_.CopyFrame(start_image_);
+ render_callback_->RenderFrame(stream_id_, temp_frame_);
+ } else if (!timeout_image_.IsZeroSize() &&
+ last_render_time_ms_ + timeout_time_ <
+ TickTime::MillisecondTimestamp()) {
+ // Render a timeout image.
+ temp_frame_.CopyFrame(timeout_image_);
+ render_callback_->RenderFrame(stream_id_, temp_frame_);
+ }
}
- // Send frame for rendering.
- if (external_callback_) {
- external_callback_->RenderFrame(stream_id_, frame_to_render);
- } else if (render_callback_) {
- render_callback_->RenderFrame(stream_id_, frame_to_render);
- }
+ // No frame.
+ return;
+ }
- // We're done with this frame.
- if (!frame_to_render.IsZeroSize())
- last_render_time_ms_ = frame_to_render.render_time_ms();
+ // Send frame for rendering.
+ if (external_callback_) {
+ external_callback_->RenderFrame(stream_id_, video_frame);
+ } else if (render_callback_) {
+ render_callback_->RenderFrame(stream_id_, video_frame);
}
- return true;
+
+ // We're done with this frame.
+ last_render_time_ms_ = video_frame.render_time_ms();
}
} // namespace webrtc
diff --git a/webrtc/common_video/interface/i420_buffer_pool.h b/webrtc/common_video/interface/i420_buffer_pool.h
deleted file mode 100644
index df862cdba5..0000000000
--- a/webrtc/common_video/interface/i420_buffer_pool.h
+++ /dev/null
@@ -1,43 +0,0 @@
-/*
- * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_COMMON_VIDEO_INTERFACE_I420_BUFFER_POOL_H_
-#define WEBRTC_COMMON_VIDEO_INTERFACE_I420_BUFFER_POOL_H_
-
-#include <list>
-
-#include "webrtc/base/thread_checker.h"
-#include "webrtc/common_video/interface/video_frame_buffer.h"
-
-namespace webrtc {
-
-// Simple buffer pool to avoid unnecessary allocations of I420Buffer objects.
-// The pool manages the memory of the I420Buffer returned from CreateBuffer.
-// When the I420Buffer is destructed, the memory is returned to the pool for use
-// by subsequent calls to CreateBuffer. If the resolution passed to CreateBuffer
-// changes, old buffers will be purged from the pool.
-class I420BufferPool {
- public:
- I420BufferPool();
- // Returns a buffer from the pool, or creates a new buffer if no suitable
- // buffer exists in the pool.
- rtc::scoped_refptr<VideoFrameBuffer> CreateBuffer(int width, int height);
- // Clears buffers_ and detaches the thread checker so that it can be reused
- // later from another thread.
- void Release();
-
- private:
- rtc::ThreadChecker thread_checker_;
- std::list<rtc::scoped_refptr<I420Buffer>> buffers_;
-};
-
-} // namespace webrtc
-
-#endif // WEBRTC_COMMON_VIDEO_INTERFACE_I420_BUFFER_POOL_H_
diff --git a/webrtc/common_video/interface/incoming_video_stream.h b/webrtc/common_video/interface/incoming_video_stream.h
deleted file mode 100644
index 74ecc4e837..0000000000
--- a/webrtc/common_video/interface/incoming_video_stream.h
+++ /dev/null
@@ -1,102 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_COMMON_VIDEO_INTERFACE_INCOMING_VIDEO_STREAM_H_
-#define WEBRTC_COMMON_VIDEO_INTERFACE_INCOMING_VIDEO_STREAM_H_
-
-#include "webrtc/base/scoped_ptr.h"
-#include "webrtc/base/thread_annotations.h"
-#include "webrtc/common_video/video_render_frames.h"
-
-namespace webrtc {
-class CriticalSectionWrapper;
-class EventTimerWrapper;
-class ThreadWrapper;
-
-class VideoRenderCallback {
- public:
- virtual int32_t RenderFrame(const uint32_t streamId,
- const VideoFrame& videoFrame) = 0;
-
- protected:
- virtual ~VideoRenderCallback() {}
-};
-
-class IncomingVideoStream : public VideoRenderCallback {
- public:
- explicit IncomingVideoStream(uint32_t stream_id);
- ~IncomingVideoStream();
-
- // Get callback to deliver frames to the module.
- VideoRenderCallback* ModuleCallback();
- virtual int32_t RenderFrame(const uint32_t stream_id,
- const VideoFrame& video_frame);
-
- // Set callback to the platform dependent code.
- void SetRenderCallback(VideoRenderCallback* render_callback);
-
- // Callback for file recording, snapshot, ...
- void SetExternalCallback(VideoRenderCallback* render_object);
-
- // Start/Stop.
- int32_t Start();
- int32_t Stop();
-
- // Clear all buffers.
- int32_t Reset();
-
- // Properties.
- uint32_t StreamId() const;
- uint32_t IncomingRate() const;
-
- int32_t SetStartImage(const VideoFrame& video_frame);
-
- int32_t SetTimeoutImage(const VideoFrame& video_frame,
- const uint32_t timeout);
-
- int32_t SetExpectedRenderDelay(int32_t delay_ms);
-
- protected:
- static bool IncomingVideoStreamThreadFun(void* obj);
- bool IncomingVideoStreamProcess();
-
- private:
- enum { kEventStartupTimeMs = 10 };
- enum { kEventMaxWaitTimeMs = 100 };
- enum { kFrameRatePeriodMs = 1000 };
-
- uint32_t const stream_id_;
- // Critsects in allowed to enter order.
- const rtc::scoped_ptr<CriticalSectionWrapper> stream_critsect_;
- const rtc::scoped_ptr<CriticalSectionWrapper> thread_critsect_;
- const rtc::scoped_ptr<CriticalSectionWrapper> buffer_critsect_;
- rtc::scoped_ptr<ThreadWrapper> incoming_render_thread_
- GUARDED_BY(thread_critsect_);
- rtc::scoped_ptr<EventTimerWrapper> deliver_buffer_event_;
-
- bool running_ GUARDED_BY(stream_critsect_);
- VideoRenderCallback* external_callback_ GUARDED_BY(thread_critsect_);
- VideoRenderCallback* render_callback_ GUARDED_BY(thread_critsect_);
- const rtc::scoped_ptr<VideoRenderFrames> render_buffers_
- GUARDED_BY(buffer_critsect_);
-
- uint32_t incoming_rate_ GUARDED_BY(stream_critsect_);
- int64_t last_rate_calculation_time_ms_ GUARDED_BY(stream_critsect_);
- uint16_t num_frames_since_last_calculation_ GUARDED_BY(stream_critsect_);
- int64_t last_render_time_ms_ GUARDED_BY(thread_critsect_);
- VideoFrame temp_frame_ GUARDED_BY(thread_critsect_);
- VideoFrame start_image_ GUARDED_BY(thread_critsect_);
- VideoFrame timeout_image_ GUARDED_BY(thread_critsect_);
- uint32_t timeout_time_ GUARDED_BY(thread_critsect_);
-};
-
-} // namespace webrtc
-
-#endif // WEBRTC_COMMON_VIDEO_INTERFACE_INCOMING_VIDEO_STREAM_H_
diff --git a/webrtc/common_video/interface/video_frame_buffer.h b/webrtc/common_video/interface/video_frame_buffer.h
deleted file mode 100644
index 1062165b81..0000000000
--- a/webrtc/common_video/interface/video_frame_buffer.h
+++ /dev/null
@@ -1,157 +0,0 @@
-/*
- * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_VIDEO_FRAME_BUFFER_H_
-#define WEBRTC_VIDEO_FRAME_BUFFER_H_
-
-#include "webrtc/base/callback.h"
-#include "webrtc/base/refcount.h"
-#include "webrtc/base/scoped_ptr.h"
-#include "webrtc/base/scoped_ref_ptr.h"
-#include "webrtc/system_wrappers/include/aligned_malloc.h"
-
-namespace webrtc {
-
-enum PlaneType {
- kYPlane = 0,
- kUPlane = 1,
- kVPlane = 2,
- kNumOfPlanes = 3,
-};
-
-// Interface of a simple frame buffer containing pixel data. This interface does
-// not contain any frame metadata such as rotation, timestamp, pixel_width, etc.
-class VideoFrameBuffer : public rtc::RefCountInterface {
- public:
- // Returns true if this buffer has a single exclusive owner.
- virtual bool HasOneRef() const = 0;
-
- // The resolution of the frame in pixels. For formats where some planes are
- // subsampled, this is the highest-resolution plane.
- virtual int width() const = 0;
- virtual int height() const = 0;
-
- // Returns pointer to the pixel data for a given plane. The memory is owned by
- // the VideoFrameBuffer object and must not be freed by the caller.
- virtual const uint8_t* data(PlaneType type) const = 0;
-
- // Non-const data access is disallowed by default. You need to make sure you
- // have exclusive access and a writable buffer before calling this function.
- virtual uint8_t* MutableData(PlaneType type);
-
- // Returns the number of bytes between successive rows for a given plane.
- virtual int stride(PlaneType type) const = 0;
-
- // Return the handle of the underlying video frame. This is used when the
- // frame is backed by a texture.
- virtual void* native_handle() const = 0;
-
- // Returns a new memory-backed frame buffer converted from this buffer's
- // native handle.
- virtual rtc::scoped_refptr<VideoFrameBuffer> NativeToI420Buffer() = 0;
-
- protected:
- virtual ~VideoFrameBuffer();
-};
-
-// Plain I420 buffer in standard memory.
-class I420Buffer : public VideoFrameBuffer {
- public:
- I420Buffer(int width, int height);
- I420Buffer(int width, int height, int stride_y, int stride_u, int stride_v);
-
- int width() const override;
- int height() const override;
- const uint8_t* data(PlaneType type) const override;
- // Non-const data access is only allowed if HasOneRef() is true to protect
- // against unexpected overwrites.
- uint8_t* MutableData(PlaneType type) override;
- int stride(PlaneType type) const override;
- void* native_handle() const override;
- rtc::scoped_refptr<VideoFrameBuffer> NativeToI420Buffer() override;
-
- protected:
- ~I420Buffer() override;
-
- private:
- const int width_;
- const int height_;
- const int stride_y_;
- const int stride_u_;
- const int stride_v_;
- const rtc::scoped_ptr<uint8_t, AlignedFreeDeleter> data_;
-};
-
-// Base class for native-handle buffer is a wrapper around a |native_handle|.
-// This is used for convenience as most native-handle implementations can share
-// many VideoFrame implementations, but need to implement a few others (such
-// as their own destructors or conversion methods back to software I420).
-class NativeHandleBuffer : public VideoFrameBuffer {
- public:
- NativeHandleBuffer(void* native_handle, int width, int height);
-
- int width() const override;
- int height() const override;
- const uint8_t* data(PlaneType type) const override;
- int stride(PlaneType type) const override;
- void* native_handle() const override;
-
- protected:
- void* native_handle_;
- const int width_;
- const int height_;
-};
-
-class WrappedI420Buffer : public webrtc::VideoFrameBuffer {
- public:
- WrappedI420Buffer(int width,
- int height,
- const uint8_t* y_plane,
- int y_stride,
- const uint8_t* u_plane,
- int u_stride,
- const uint8_t* v_plane,
- int v_stride,
- const rtc::Callback0<void>& no_longer_used);
- int width() const override;
- int height() const override;
-
- const uint8_t* data(PlaneType type) const override;
-
- int stride(PlaneType type) const override;
- void* native_handle() const override;
-
- rtc::scoped_refptr<VideoFrameBuffer> NativeToI420Buffer() override;
-
- private:
- friend class rtc::RefCountedObject<WrappedI420Buffer>;
- ~WrappedI420Buffer() override;
-
- const int width_;
- const int height_;
- const uint8_t* const y_plane_;
- const uint8_t* const u_plane_;
- const uint8_t* const v_plane_;
- const int y_stride_;
- const int u_stride_;
- const int v_stride_;
- rtc::Callback0<void> no_longer_used_cb_;
-};
-
-// Helper function to crop |buffer| without making a deep copy. May only be used
-// for non-native frames.
-rtc::scoped_refptr<VideoFrameBuffer> ShallowCenterCrop(
- const rtc::scoped_refptr<VideoFrameBuffer>& buffer,
- int cropped_width,
- int cropped_height);
-
-} // namespace webrtc
-
-#endif // WEBRTC_VIDEO_FRAME_BUFFER_H_
diff --git a/webrtc/common_video/interface/video_image.h b/webrtc/common_video/interface/video_image.h
deleted file mode 100644
index 4cbf23f1a1..0000000000
--- a/webrtc/common_video/interface/video_image.h
+++ /dev/null
@@ -1,17 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef COMMON_VIDEO_INTERFACE_VIDEO_IMAGE_H
-#define COMMON_VIDEO_INTERFACE_VIDEO_IMAGE_H
-
-// TODO(pbos): Remove this file and include webrtc/video_frame.h instead.
-#include "webrtc/video_frame.h"
-
-#endif // COMMON_VIDEO_INTERFACE_VIDEO_IMAGE_H
diff --git a/webrtc/common_video/libyuv/include/scaler.h b/webrtc/common_video/libyuv/include/scaler.h
index c04d01f33b..2b92f8148b 100644
--- a/webrtc/common_video/libyuv/include/scaler.h
+++ b/webrtc/common_video/libyuv/include/scaler.h
@@ -15,7 +15,7 @@
#ifndef WEBRTC_COMMON_VIDEO_LIBYUV_INCLUDE_SCALER_H_
#define WEBRTC_COMMON_VIDEO_LIBYUV_INCLUDE_SCALER_H_
-#include "webrtc/common_video/interface/i420_buffer_pool.h"
+#include "webrtc/common_video/include/i420_buffer_pool.h"
#include "webrtc/common_video/libyuv/include/webrtc_libyuv.h"
#include "webrtc/typedefs.h"
#include "webrtc/video_frame.h"
diff --git a/webrtc/common_video/libyuv/include/webrtc_libyuv.h b/webrtc/common_video/libyuv/include/webrtc_libyuv.h
index baf8c171ad..d66736fb24 100644
--- a/webrtc/common_video/libyuv/include/webrtc_libyuv.h
+++ b/webrtc/common_video/libyuv/include/webrtc_libyuv.h
@@ -152,6 +152,7 @@ int ConvertNV12ToRGB565(const uint8_t* src_frame,
double I420PSNR(const VideoFrame* ref_frame, const VideoFrame* test_frame);
// Compute SSIM for an I420 frame (all planes).
double I420SSIM(const VideoFrame* ref_frame, const VideoFrame* test_frame);
-}
+
+} // namespace webrtc
#endif // WEBRTC_COMMON_VIDEO_LIBYUV_INCLUDE_WEBRTC_LIBYUV_H_
diff --git a/webrtc/common_video/libyuv/libyuv_unittest.cc b/webrtc/common_video/libyuv/libyuv_unittest.cc
index b674b51b67..e7cf0759f6 100644
--- a/webrtc/common_video/libyuv/libyuv_unittest.cc
+++ b/webrtc/common_video/libyuv/libyuv_unittest.cc
@@ -97,7 +97,7 @@ TestLibYuv::TestLibYuv()
width_(352),
height_(288),
size_y_(width_ * height_),
- size_uv_(((width_ + 1 ) / 2) * ((height_ + 1) / 2)),
+ size_uv_(((width_ + 1) / 2) * ((height_ + 1) / 2)),
frame_length_(CalcBufferSize(kI420, 352, 288)) {
orig_buffer_.reset(new uint8_t[frame_length_]);
}
@@ -142,9 +142,9 @@ TEST_F(TestLibYuv, ConvertTest) {
double psnr = 0.0;
VideoFrame res_i420_frame;
- EXPECT_EQ(0,res_i420_frame.CreateEmptyFrame(width_, height_, width_,
- (width_ + 1) / 2,
- (width_ + 1) / 2));
+ EXPECT_EQ(0, res_i420_frame.CreateEmptyFrame(width_, height_, width_,
+ (width_ + 1) / 2,
+ (width_ + 1) / 2));
printf("\nConvert #%d I420 <-> I420 \n", j);
rtc::scoped_ptr<uint8_t[]> out_i420_buffer(new uint8_t[frame_length_]);
EXPECT_EQ(0, ConvertFromI420(orig_frame_, kI420, 0,
@@ -281,8 +281,8 @@ TEST_F(TestLibYuv, ConvertAlignedFrame) {
int stride_y = 0;
int stride_uv = 0;
Calc16ByteAlignedStride(width_, &stride_y, &stride_uv);
- EXPECT_EQ(0,res_i420_frame.CreateEmptyFrame(width_, height_,
- stride_y, stride_uv, stride_uv));
+ EXPECT_EQ(0, res_i420_frame.CreateEmptyFrame(width_, height_,
+ stride_y, stride_uv, stride_uv));
rtc::scoped_ptr<uint8_t[]> out_i420_buffer(new uint8_t[frame_length_]);
EXPECT_EQ(0, ConvertFromI420(orig_frame_, kI420, 0,
out_i420_buffer.get()));
@@ -303,27 +303,27 @@ TEST_F(TestLibYuv, RotateTest) {
VideoFrame rotated_res_i420_frame;
int rotated_width = height_;
int rotated_height = width_;
- int stride_y ;
+ int stride_y;
int stride_uv;
Calc16ByteAlignedStride(rotated_width, &stride_y, &stride_uv);
- EXPECT_EQ(0,rotated_res_i420_frame.CreateEmptyFrame(rotated_width,
- rotated_height,
- stride_y,
- stride_uv,
- stride_uv));
+ EXPECT_EQ(0, rotated_res_i420_frame.CreateEmptyFrame(rotated_width,
+ rotated_height,
+ stride_y,
+ stride_uv,
+ stride_uv));
EXPECT_EQ(0, ConvertToI420(kI420, orig_buffer_.get(), 0, 0, width_, height_,
0, kVideoRotation_90, &rotated_res_i420_frame));
EXPECT_EQ(0, ConvertToI420(kI420, orig_buffer_.get(), 0, 0, width_, height_,
0, kVideoRotation_270, &rotated_res_i420_frame));
- EXPECT_EQ(0,rotated_res_i420_frame.CreateEmptyFrame(width_, height_,
- width_, (width_ + 1) / 2,
- (width_ + 1) / 2));
+ EXPECT_EQ(0, rotated_res_i420_frame.CreateEmptyFrame(width_, height_,
+ width_, (width_ + 1) / 2,
+ (width_ + 1) / 2));
EXPECT_EQ(0, ConvertToI420(kI420, orig_buffer_.get(), 0, 0, width_, height_,
0, kVideoRotation_180, &rotated_res_i420_frame));
}
TEST_F(TestLibYuv, alignment) {
- int value = 0x3FF; // 1023
+ int value = 0x3FF; // 1023
EXPECT_EQ(0x400, AlignInt(value, 128)); // Low 7 bits are zero.
EXPECT_EQ(0x400, AlignInt(value, 64)); // Low 6 bits are zero.
EXPECT_EQ(0x400, AlignInt(value, 32)); // Low 5 bits are zero.
@@ -346,4 +346,4 @@ TEST_F(TestLibYuv, StrideAlignment) {
EXPECT_EQ(64, stride_uv);
}
-} // namespace
+} // namespace webrtc
diff --git a/webrtc/common_video/libyuv/scaler_unittest.cc b/webrtc/common_video/libyuv/scaler_unittest.cc
index 568311bc2e..6d026383a2 100644
--- a/webrtc/common_video/libyuv/scaler_unittest.cc
+++ b/webrtc/common_video/libyuv/scaler_unittest.cc
@@ -15,7 +15,6 @@
#include "webrtc/common_video/libyuv/include/scaler.h"
#include "webrtc/system_wrappers/include/tick_util.h"
#include "webrtc/test/testsupport/fileutils.h"
-#include "webrtc/test/testsupport/gtest_disable.h"
namespace webrtc {
@@ -114,8 +113,13 @@ TEST_F(TestScaler, ScaleSendingBufferTooSmall) {
EXPECT_EQ(half_height_, test_frame2.height());
}
-//TODO (mikhal): Converge the test into one function that accepts the method.
-TEST_F(TestScaler, DISABLED_ON_ANDROID(PointScaleTest)) {
+// TODO(mikhal): Converge the test into one function that accepts the method.
+#if defined(WEBRTC_ANDROID)
+#define MAYBE_PointScaleTest DISABLED_PointScaleTest
+#else
+#define MAYBE_PointScaleTest PointScaleTest
+#endif
+TEST_F(TestScaler, MAYBE_PointScaleTest) {
double avg_psnr;
FILE* source_file2;
ScaleMethod method = kScalePoint;
@@ -182,7 +186,12 @@ TEST_F(TestScaler, DISABLED_ON_ANDROID(PointScaleTest)) {
ASSERT_EQ(0, fclose(source_file2));
}
-TEST_F(TestScaler, DISABLED_ON_ANDROID(BiLinearScaleTest)) {
+#if defined(WEBRTC_ANDROID)
+#define MAYBE_BilinearScaleTest DISABLED_BiLinearScaleTest
+#else
+#define MAYBE_BilinearScaleTest BiLinearScaleTest
+#endif
+TEST_F(TestScaler, MAYBE_BiLinearScaleTest) {
double avg_psnr;
FILE* source_file2;
ScaleMethod method = kScaleBilinear;
@@ -234,7 +243,12 @@ TEST_F(TestScaler, DISABLED_ON_ANDROID(BiLinearScaleTest)) {
400, 300);
}
-TEST_F(TestScaler, DISABLED_ON_ANDROID(BoxScaleTest)) {
+#if defined(WEBRTC_ANDROID)
+#define MAYBE_BoxScaleTest DISABLED_BoxScaleTest
+#else
+#define MAYBE_BoxScaleTest BoxScaleTest
+#endif
+TEST_F(TestScaler, MAYBE_BoxScaleTest) {
double avg_psnr;
FILE* source_file2;
ScaleMethod method = kScaleBox;
@@ -322,7 +336,7 @@ double TestScaler::ComputeAvgSequencePSNR(FILE* input_file,
return avg_psnr;
}
-// TODO (mikhal): Move part to a separate scale test.
+// TODO(mikhal): Move part to a separate scale test.
void TestScaler::ScaleSequence(ScaleMethod method,
FILE* source_file, std::string out_name,
int src_width, int src_height,
diff --git a/webrtc/common_video/libyuv/webrtc_libyuv.cc b/webrtc/common_video/libyuv/webrtc_libyuv.cc
index bf95624769..48f5c2036b 100644
--- a/webrtc/common_video/libyuv/webrtc_libyuv.cc
+++ b/webrtc/common_video/libyuv/webrtc_libyuv.cc
@@ -58,7 +58,7 @@ VideoType RawVideoTypeToCommonVideoVideoType(RawVideoType type) {
int AlignInt(int value, int alignment) {
assert(!((alignment - 1) & alignment));
- return ((value + alignment - 1) & ~ (alignment - 1));
+ return ((value + alignment - 1) & ~(alignment - 1));
}
void Calc16ByteAlignedStride(int width, int* stride_y, int* stride_uv) {
@@ -119,8 +119,8 @@ int PrintVideoFrame(const VideoFrame& frame, FILE* file) {
}
plane_buffer += frame.stride(plane_type);
}
- }
- return 0;
+ }
+ return 0;
}
int ExtractBuffer(const VideoFrame& input_frame, size_t size, uint8_t* buffer) {
@@ -176,7 +176,7 @@ int ConvertRGB24ToARGB(const uint8_t* src_frame, uint8_t* dst_frame,
}
libyuv::RotationMode ConvertRotationMode(VideoRotation rotation) {
- switch(rotation) {
+ switch (rotation) {
case kVideoRotation_0:
return libyuv::kRotate0;
case kVideoRotation_90:
@@ -191,7 +191,7 @@ libyuv::RotationMode ConvertRotationMode(VideoRotation rotation) {
}
int ConvertVideoType(VideoType video_type) {
- switch(video_type) {
+ switch (video_type) {
case kUnknown:
return libyuv::FOURCC_ANY;
case kI420:
@@ -243,7 +243,7 @@ int ConvertToI420(VideoType src_video_type,
// Stride values should correspond to the destination values.
if (rotation == kVideoRotation_90 || rotation == kVideoRotation_270) {
dst_width = dst_frame->height();
- dst_height =dst_frame->width();
+ dst_height = dst_frame->width();
}
return libyuv::ConvertToI420(src_frame, sample_size,
dst_frame->buffer(kYPlane),
diff --git a/webrtc/common_video/plane.cc b/webrtc/common_video/plane.cc
deleted file mode 100644
index e0bbba10ba..0000000000
--- a/webrtc/common_video/plane.cc
+++ /dev/null
@@ -1,80 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include "webrtc/common_video/plane.h"
-
-#include <string.h> // memcpy
-
-#include <algorithm> // swap
-
-namespace webrtc {
-
-// Aligning pointer to 64 bytes for improved performance, e.g. use SIMD.
-static const int kBufferAlignment = 64;
-
-Plane::Plane()
- : allocated_size_(0),
- plane_size_(0),
- stride_(0) {}
-
-Plane::~Plane() {}
-
-int Plane::CreateEmptyPlane(int allocated_size, int stride, int plane_size) {
- if (allocated_size < 1 || stride < 1 || plane_size < 1)
- return -1;
- stride_ = stride;
- if (MaybeResize(allocated_size) < 0)
- return -1;
- plane_size_ = plane_size;
- return 0;
-}
-
-int Plane::MaybeResize(int new_size) {
- if (new_size <= 0)
- return -1;
- if (new_size <= allocated_size_)
- return 0;
- rtc::scoped_ptr<uint8_t, AlignedFreeDeleter> new_buffer(
- static_cast<uint8_t*>(AlignedMalloc(new_size, kBufferAlignment)));
- if (buffer_.get()) {
- memcpy(new_buffer.get(), buffer_.get(), plane_size_);
- }
- buffer_.reset(new_buffer.release());
- allocated_size_ = new_size;
- return 0;
-}
-
-int Plane::Copy(const Plane& plane) {
- if (MaybeResize(plane.allocated_size_) < 0)
- return -1;
- if (plane.buffer_.get())
- memcpy(buffer_.get(), plane.buffer_.get(), plane.plane_size_);
- stride_ = plane.stride_;
- plane_size_ = plane.plane_size_;
- return 0;
-}
-
-int Plane::Copy(int size, int stride, const uint8_t* buffer) {
- if (MaybeResize(size) < 0)
- return -1;
- memcpy(buffer_.get(), buffer, size);
- plane_size_ = size;
- stride_ = stride;
- return 0;
-}
-
-void Plane::Swap(Plane& plane) {
- std::swap(stride_, plane.stride_);
- std::swap(allocated_size_, plane.allocated_size_);
- std::swap(plane_size_, plane.plane_size_);
- buffer_.swap(plane.buffer_);
-}
-
-} // namespace webrtc
diff --git a/webrtc/common_video/plane.h b/webrtc/common_video/plane.h
deleted file mode 100644
index 3ef949adf0..0000000000
--- a/webrtc/common_video/plane.h
+++ /dev/null
@@ -1,75 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef COMMON_VIDEO_PLANE_H
-#define COMMON_VIDEO_PLANE_H
-
-#include "webrtc/base/scoped_ptr.h"
-#include "webrtc/system_wrappers/include/aligned_malloc.h"
-#include "webrtc/typedefs.h"
-
-namespace webrtc {
-
-// Helper class for VideoFrame: Store plane data and perform basic plane
-// operations.
-class Plane {
- public:
- Plane();
- ~Plane();
- // CreateEmptyPlane - set allocated size, actual plane size and stride:
- // If current size is smaller than current size, then a buffer of sufficient
- // size will be allocated.
- // Return value: 0 on success ,-1 on error.
- int CreateEmptyPlane(int allocated_size, int stride, int plane_size);
-
- // Copy the entire plane data.
- // Return value: 0 on success ,-1 on error.
- int Copy(const Plane& plane);
-
- // Copy buffer: If current size is smaller
- // than current size, then a buffer of sufficient size will be allocated.
- // Return value: 0 on success ,-1 on error.
- int Copy(int size, int stride, const uint8_t* buffer);
-
- // Swap plane data.
- void Swap(Plane& plane);
-
- // Get allocated size.
- int allocated_size() const {return allocated_size_;}
-
- // Set actual size.
- void ResetSize() {plane_size_ = 0;}
-
- // Return true is plane size is zero, false if not.
- bool IsZeroSize() const {return plane_size_ == 0;}
-
- // Get stride value.
- int stride() const {return stride_;}
-
- // Return data pointer.
- const uint8_t* buffer() const {return buffer_.get();}
- // Overloading with non-const.
- uint8_t* buffer() {return buffer_.get();}
-
- private:
- // Resize when needed: If current allocated size is less than new_size, buffer
- // will be updated. Old data will be copied to new buffer.
- // Return value: 0 on success ,-1 on error.
- int MaybeResize(int new_size);
-
- rtc::scoped_ptr<uint8_t, AlignedFreeDeleter> buffer_;
- int allocated_size_;
- int plane_size_;
- int stride_;
-}; // Plane
-
-} // namespace webrtc
-
-#endif // COMMON_VIDEO_PLANE_H
diff --git a/webrtc/common_video/video_frame.cc b/webrtc/common_video/video_frame.cc
index 7cdbd53f9d..8ccd821d09 100644
--- a/webrtc/common_video/video_frame.cc
+++ b/webrtc/common_video/video_frame.cc
@@ -19,6 +19,26 @@
namespace webrtc {
+bool EqualPlane(const uint8_t* data1,
+ const uint8_t* data2,
+ int stride,
+ int width,
+ int height) {
+ for (int y = 0; y < height; ++y) {
+ if (memcmp(data1, data2, width) != 0)
+ return false;
+ data1 += stride;
+ data2 += stride;
+ }
+ return true;
+}
+
+int ExpectedSize(int plane_stride, int image_height, PlaneType type) {
+ if (type == kYPlane)
+ return plane_stride * image_height;
+ return plane_stride * ((image_height + 1) / 2);
+}
+
VideoFrame::VideoFrame() {
// Intentionally using Reset instead of initializer list so that any missed
// fields in Reset will be caught by memory checkers.
@@ -202,4 +222,24 @@ VideoFrame VideoFrame::ConvertNativeToI420Frame() const {
return frame;
}
+bool VideoFrame::EqualsFrame(const VideoFrame& frame) const {
+ if (width() != frame.width() || height() != frame.height() ||
+ stride(kYPlane) != frame.stride(kYPlane) ||
+ stride(kUPlane) != frame.stride(kUPlane) ||
+ stride(kVPlane) != frame.stride(kVPlane) ||
+ timestamp() != frame.timestamp() ||
+ ntp_time_ms() != frame.ntp_time_ms() ||
+ render_time_ms() != frame.render_time_ms()) {
+ return false;
+ }
+ const int half_width = (width() + 1) / 2;
+ const int half_height = (height() + 1) / 2;
+ return EqualPlane(buffer(kYPlane), frame.buffer(kYPlane),
+ stride(kYPlane), width(), height()) &&
+ EqualPlane(buffer(kUPlane), frame.buffer(kUPlane),
+ stride(kUPlane), half_width, half_height) &&
+ EqualPlane(buffer(kVPlane), frame.buffer(kVPlane),
+ stride(kVPlane), half_width, half_height);
+}
+
} // namespace webrtc
diff --git a/webrtc/common_video/video_frame_buffer.cc b/webrtc/common_video/video_frame_buffer.cc
index 36ee14a17f..492bc49587 100644
--- a/webrtc/common_video/video_frame_buffer.cc
+++ b/webrtc/common_video/video_frame_buffer.cc
@@ -8,21 +8,15 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-#include "webrtc/common_video/interface/video_frame_buffer.h"
+#include "webrtc/common_video/include/video_frame_buffer.h"
-#include "webrtc/base/bind.h"
#include "webrtc/base/checks.h"
+#include "webrtc/base/keep_ref_until_done.h"
// Aligning pointer to 64 bytes for improved performance, e.g. use SIMD.
static const int kBufferAlignment = 64;
namespace webrtc {
-namespace {
-
-// Used in rtc::Bind to keep a buffer alive until destructor is called.
-static void NoLongerUsedCallback(rtc::scoped_refptr<VideoFrameBuffer> dummy) {}
-
-} // anonymous namespace
uint8_t* VideoFrameBuffer::MutableData(PlaneType type) {
RTC_NOTREACHED();
@@ -238,7 +232,7 @@ rtc::scoped_refptr<VideoFrameBuffer> ShallowCenterCrop(
y_plane, buffer->stride(kYPlane),
u_plane, buffer->stride(kUPlane),
v_plane, buffer->stride(kVPlane),
- rtc::Bind(&NoLongerUsedCallback, buffer));
+ rtc::KeepRefUntilDone(buffer));
}
} // namespace webrtc
diff --git a/webrtc/common_video/video_render_frames.cc b/webrtc/common_video/video_render_frames.cc
index f4ece5e8e9..8b447cb10f 100644
--- a/webrtc/common_video/video_render_frames.cc
+++ b/webrtc/common_video/video_render_frames.cc
@@ -12,7 +12,7 @@
#include <assert.h>
-#include "webrtc/modules/interface/module_common_types.h"
+#include "webrtc/modules/include/module_common_types.h"
#include "webrtc/system_wrappers/include/tick_util.h"
#include "webrtc/system_wrappers/include/trace.h"
@@ -20,7 +20,7 @@ namespace webrtc {
const uint32_t KEventMaxWaitTimeMs = 200;
const uint32_t kMinRenderDelayMs = 10;
-const uint32_t kMaxRenderDelayMs= 500;
+const uint32_t kMaxRenderDelayMs = 500;
VideoRenderFrames::VideoRenderFrames()
: render_delay_ms_(10) {
diff --git a/webrtc/config.h b/webrtc/config.h
index 52711636ac..06460ae359 100644
--- a/webrtc/config.h
+++ b/webrtc/config.h
@@ -16,6 +16,7 @@
#include <string>
#include <vector>
+#include "webrtc/common.h"
#include "webrtc/common_types.h"
#include "webrtc/typedefs.h"
@@ -49,10 +50,13 @@ struct FecConfig {
int red_rtx_payload_type;
};
-// RTP header extension to use for the video stream, see RFC 5285.
+// RTP header extension, see RFC 5285.
struct RtpExtension {
RtpExtension(const std::string& name, int id) : name(name), id(id) {}
std::string ToString() const;
+ bool operator==(const RtpExtension& rhs) const {
+ return name == rhs.name && id == rhs.id;
+ }
static bool IsSupportedForAudio(const std::string& name);
static bool IsSupportedForVideo(const std::string& name);
@@ -104,6 +108,7 @@ struct VideoEncoderConfig {
std::string ToString() const;
std::vector<VideoStream> streams;
+ std::vector<SpatialLayer> spatial_layers;
ContentType content_type;
void* encoder_specific_settings;
@@ -124,6 +129,7 @@ struct VideoEncoderConfig {
struct NetEqCapacityConfig {
NetEqCapacityConfig() : enabled(false), capacity(0) {}
explicit NetEqCapacityConfig(int value) : enabled(true), capacity(value) {}
+ static const ConfigOptionID identifier = ConfigOptionID::kNetEqCapacityConfig;
bool enabled;
int capacity;
};
@@ -131,6 +137,14 @@ struct NetEqCapacityConfig {
struct NetEqFastAccelerate {
NetEqFastAccelerate() : enabled(false) {}
explicit NetEqFastAccelerate(bool value) : enabled(value) {}
+ static const ConfigOptionID identifier = ConfigOptionID::kNetEqFastAccelerate;
+ bool enabled;
+};
+
+struct VoicePacing {
+ VoicePacing() : enabled(false) {}
+ explicit VoicePacing(bool value) : enabled(value) {}
+ static const ConfigOptionID identifier = ConfigOptionID::kVoicePacing;
bool enabled;
};
diff --git a/webrtc/engine_configurations.h b/webrtc/engine_configurations.h
index c832d9acb3..42b181691c 100644
--- a/webrtc/engine_configurations.h
+++ b/webrtc/engine_configurations.h
@@ -14,19 +14,6 @@
#include "webrtc/typedefs.h"
// ============================================================================
-// Voice and Video
-// ============================================================================
-
-// ----------------------------------------------------------------------------
-// [Video] Codec settings
-// ----------------------------------------------------------------------------
-
-#define VIDEOCODEC_I420
-#define VIDEOCODEC_VP8
-#define VIDEOCODEC_VP9
-#define VIDEOCODEC_H264
-
-// ============================================================================
// VoiceEngine
// ============================================================================
diff --git a/webrtc/examples/android/media_demo/AndroidManifest.xml b/webrtc/examples/android/media_demo/AndroidManifest.xml
deleted file mode 100644
index 62bf46076f..0000000000
--- a/webrtc/examples/android/media_demo/AndroidManifest.xml
+++ /dev/null
@@ -1,29 +0,0 @@
-<?xml version="1.0" encoding="utf-8"?>
-<manifest xmlns:android="http://schemas.android.com/apk/res/android"
- android:versionCode="1" package="org.webrtc.webrtcdemo" android:versionName="1.07">
- <application android:icon="@drawable/logo"
- android:label="@string/appName"
- android:debuggable="true">
- <activity android:name=".WebRTCDemo"
- android:theme="@android:style/Theme.Holo"
- android:label="@string/appName"
- android:screenOrientation="landscape"
- >
- <intent-filter>
- <action android:name="android.intent.action.MAIN" />
- <category android:name="android.intent.category.LAUNCHER" />
- <action android:name="android.intent.action.HEADSET_PLUG"/>
- </intent-filter>
- </activity>
- </application>
-
- <uses-sdk android:minSdkVersion="14" />
- <uses-permission android:name="android.permission.CAMERA"></uses-permission>
- <uses-feature android:name="android.hardware.camera" />
- <uses-feature android:name="android.hardware.camera.autofocus" />
- <uses-permission android:name="android.permission.MODIFY_AUDIO_SETTINGS" />
- <uses-permission android:name="android.permission.RECORD_AUDIO" />
- <uses-permission android:name="android.permission.INTERNET" />
- <uses-permission android:name="android.permission.WAKE_LOCK" />
- <uses-permission android:name="android.permission.WRITE_EXTERNAL_STORAGE"/>
-</manifest> \ No newline at end of file
diff --git a/webrtc/examples/android/media_demo/README b/webrtc/examples/android/media_demo/README
deleted file mode 100644
index af71f38f46..0000000000
--- a/webrtc/examples/android/media_demo/README
+++ /dev/null
@@ -1,24 +0,0 @@
-This directory contains a sample app for sending and receiving audio
-on Android. It further lets you enable and disable some call quality
-enhancements such as echo cancellation, noise suppression etc.
-
-Prerequisites:
-- Make sure gclient is checking out tools necessary to target Android: your
- .gclient file should contain a line like:
- target_os = ['android']
- Make sure to re-run gclient sync after adding this to download the tools.
-- Env vars need to be set up to target Android; easiest way to do this is to run
- (from the libjingle trunk directory):
- . ./build/android/envsetup.sh
- Note that this clobbers any previously-set $GYP_DEFINES so it must be done
- before the next item.
-- Set up webrtc-related GYP variables:
- export GYP_DEFINES="$GYP_DEFINES java_home=</path/to/JDK>"
-- Finally, run "gclient runhooks" to generate Android-targeting .ninja files.
-
-Example of building the app:
-cd <path/to/repository>/trunk
-ninja -C out/Debug WebRTCDemo
-
-It can then be installed and run on the device:
-adb install -r out/Debug/WebRTCDemo-debug.apk
diff --git a/webrtc/examples/android/media_demo/build.xml b/webrtc/examples/android/media_demo/build.xml
deleted file mode 100644
index 17734886d9..0000000000
--- a/webrtc/examples/android/media_demo/build.xml
+++ /dev/null
@@ -1,92 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<project name="WebRTCDemo" default="help">
-
- <!-- The local.properties file is created and updated by the 'android' tool.
- It contains the path to the SDK. It should *NOT* be checked into
- Version Control Systems. -->
- <property file="local.properties" />
-
- <!-- The ant.properties file can be created by you. It is only edited by the
- 'android' tool to add properties to it.
- This is the place to change some Ant specific build properties.
- Here are some properties you may want to change/update:
-
- source.dir
- The name of the source directory. Default is 'src'.
- out.dir
- The name of the output directory. Default is 'bin'.
-
- For other overridable properties, look at the beginning of the rules
- files in the SDK, at tools/ant/build.xml
-
- Properties related to the SDK location or the project target should
- be updated using the 'android' tool with the 'update' action.
-
- This file is an integral part of the build system for your
- application and should be checked into Version Control Systems.
-
- -->
- <property file="ant.properties" />
-
- <!-- if sdk.dir was not set from one of the property file, then
- get it from the ANDROID_HOME env var.
- This must be done before we load project.properties since
- the proguard config can use sdk.dir -->
- <property environment="env" />
- <condition property="sdk.dir" value="${env.ANDROID_SDK_ROOT}">
- <isset property="env.ANDROID_SDK_ROOT" />
- </condition>
-
- <!-- The project.properties file is created and updated by the 'android'
- tool, as well as ADT.
-
- This contains project specific properties such as project target, and library
- dependencies. Lower level build properties are stored in ant.properties
- (or in .classpath for Eclipse projects).
-
- This file is an integral part of the build system for your
- application and should be checked into Version Control Systems. -->
- <loadproperties srcFile="project.properties" />
-
- <!-- quick check on sdk.dir -->
- <fail
- message="sdk.dir is missing. Make sure to generate local.properties using 'android update project' or to inject it through the ANDROID_SDK_ROOT environment variable."
- unless="sdk.dir"
- />
-
- <!--
- Import per project custom build rules if present at the root of the project.
- This is the place to put custom intermediary targets such as:
- -pre-build
- -pre-compile
- -post-compile (This is typically used for code obfuscation.
- Compiled code location: ${out.classes.absolute.dir}
- If this is not done in place, override ${out.dex.input.absolute.dir})
- -post-package
- -post-build
- -pre-clean
- -->
- <import file="custom_rules.xml" optional="true" />
-
- <!-- Import the actual build file.
-
- To customize existing targets, there are two options:
- - Customize only one target:
- - copy/paste the target into this file, *before* the
- <import> task.
- - customize it to your needs.
- - Customize the whole content of build.xml
- - copy/paste the content of the rules files (minus the top node)
- into this file, replacing the <import> task.
- - customize to your needs.
-
- ***********************
- ****** IMPORTANT ******
- ***********************
- In all cases you must update the value of version-tag below to read 'custom' instead of an integer,
- in order to avoid having your file be overridden by tools such as "android update project"
- -->
- <!-- version-tag: 1 -->
- <import file="${sdk.dir}/tools/ant/build.xml" />
-
-</project>
diff --git a/webrtc/examples/android/media_demo/jni/jni_helpers.cc b/webrtc/examples/android/media_demo/jni/jni_helpers.cc
deleted file mode 100644
index b0d1a7425c..0000000000
--- a/webrtc/examples/android/media_demo/jni/jni_helpers.cc
+++ /dev/null
@@ -1,82 +0,0 @@
-/*
- * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include "webrtc/examples/android/media_demo/jni/jni_helpers.h"
-
-#include <limits>
-
-#include "unicode/unistr.h"
-
-using icu::UnicodeString;
-
-jmethodID GetMethodID(JNIEnv* jni, jclass c, const std::string& name,
- const char* signature) {
- jmethodID m = jni->GetMethodID(c, name.c_str(), signature);
- CHECK_JNI_EXCEPTION(jni, "error during GetMethodID");
- return m;
-}
-
-jlong jlongFromPointer(void* ptr) {
- CHECK(sizeof(intptr_t) <= sizeof(jlong), "Time to rethink the use of jlongs");
- // Going through intptr_t to be obvious about the definedness of the
- // conversion from pointer to integral type. intptr_t to jlong is a standard
- // widening by the COMPILE_ASSERT above.
- jlong ret = reinterpret_cast<intptr_t>(ptr);
- CHECK(reinterpret_cast<void*>(ret) == ptr,
- "jlong does not convert back to pointer");
- return ret;
-}
-
-// Given a (UTF-16) jstring return a new UTF-8 native string.
-std::string JavaToStdString(JNIEnv* jni, const jstring& j_string) {
- const jchar* jchars = jni->GetStringChars(j_string, NULL);
- CHECK_JNI_EXCEPTION(jni, "Error during GetStringChars");
- UnicodeString ustr(jchars, jni->GetStringLength(j_string));
- CHECK_JNI_EXCEPTION(jni, "Error during GetStringLength");
- jni->ReleaseStringChars(j_string, jchars);
- CHECK_JNI_EXCEPTION(jni, "Error during ReleaseStringChars");
- std::string ret;
- return ustr.toUTF8String(ret);
-}
-
-ClassReferenceHolder::ClassReferenceHolder(JNIEnv* jni, const char** classes,
- int size) {
- for (int i = 0; i < size; ++i) {
- LoadClass(jni, classes[i]);
- }
-}
-ClassReferenceHolder::~ClassReferenceHolder() {
- CHECK(classes_.empty(), "Must call FreeReferences() before dtor!");
-}
-
-void ClassReferenceHolder::FreeReferences(JNIEnv* jni) {
- for (std::map<std::string, jclass>::const_iterator it = classes_.begin();
- it != classes_.end(); ++it) {
- jni->DeleteGlobalRef(it->second);
- }
- classes_.clear();
-}
-
-jclass ClassReferenceHolder::GetClass(const std::string& name) {
- std::map<std::string, jclass>::iterator it = classes_.find(name);
- CHECK(it != classes_.end(), "Could not find class");
- return it->second;
-}
-
-void ClassReferenceHolder::LoadClass(JNIEnv* jni, const std::string& name) {
- jclass localRef = jni->FindClass(name.c_str());
- CHECK_JNI_EXCEPTION(jni, "Could not load class");
- CHECK(localRef, name.c_str());
- jclass globalRef = reinterpret_cast<jclass>(jni->NewGlobalRef(localRef));
- CHECK_JNI_EXCEPTION(jni, "error during NewGlobalRef");
- CHECK(globalRef, name.c_str());
- bool inserted = classes_.insert(std::make_pair(name, globalRef)).second;
- CHECK(inserted, "Duplicate class name");
-}
diff --git a/webrtc/examples/android/media_demo/jni/jni_helpers.h b/webrtc/examples/android/media_demo/jni/jni_helpers.h
deleted file mode 100644
index 3d8ff48111..0000000000
--- a/webrtc/examples/android/media_demo/jni/jni_helpers.h
+++ /dev/null
@@ -1,79 +0,0 @@
-/*
- * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_EXAMPLES_ANDROID_MEDIA_DEMO_JNI_JNI_HELPERS_H_
-#define WEBRTC_EXAMPLES_ANDROID_MEDIA_DEMO_JNI_JNI_HELPERS_H_
-
-// TODO(henrike): this file contains duplication with regards to
-// talk/app/webrtc/java/jni/peerconnection_jni.cc. When/if code can be shared
-// between trunk/talk and trunk/webrtc remove the duplication.
-
-#include <android/log.h>
-#include <jni.h>
-
-#include <assert.h>
-#include <map>
-#include <string>
-
-#define TAG "WEBRTC-NATIVE"
-
-// Abort the process if |x| is false, emitting |msg| to logcat.
-#define CHECK(x, msg) \
- if (x) { \
- } else { \
- __android_log_print(ANDROID_LOG_ERROR, TAG, "%s:%d: %s", __FILE__, \
- __LINE__, msg); \
- assert(false); \
- }
-
-// Abort the process if |jni| has a Java exception pending, emitting |msg| to
-// logcat.
-#define CHECK_JNI_EXCEPTION(jni, msg) \
- if (0) { \
- } else { \
- if (jni->ExceptionCheck()) { \
- jni->ExceptionDescribe(); \
- jni->ExceptionClear(); \
- CHECK(0, msg); \
- } \
- }
-
-// JNIEnv-helper methods that CHECK success: no Java exception thrown and found
-// object/class/method/field is non-null.
-jmethodID GetMethodID(JNIEnv* jni, jclass c, const std::string& name,
- const char* signature);
-
-// Return a |jlong| that will automatically convert back to |ptr| when assigned
-// to a |uint64_t|
-jlong jlongFromPointer(void* ptr);
-
-// Given a (UTF-16) jstring return a new UTF-8 native string.
-std::string JavaToStdString(JNIEnv* jni, const jstring& j_string);
-
-// Android's FindClass() is trickier than usual because the app-specific
-// ClassLoader is not consulted when there is no app-specific frame on the
-// stack. Consequently, we only look up classes once in JNI_OnLoad.
-// http://developer.android.com/training/articles/perf-jni.html#faq_FindClass
-class ClassReferenceHolder {
- public:
- ClassReferenceHolder(JNIEnv* jni, const char** classes, int size);
- ~ClassReferenceHolder();
-
- void FreeReferences(JNIEnv* jni);
-
- jclass GetClass(const std::string& name);
-
- private:
- void LoadClass(JNIEnv* jni, const std::string& name);
-
- std::map<std::string, jclass> classes_;
-};
-
-#endif // WEBRTC_EXAMPLES_ANDROID_MEDIA_DEMO_JNI_JNI_HELPERS_H_
diff --git a/webrtc/examples/android/media_demo/jni/on_load.cc b/webrtc/examples/android/media_demo/jni/on_load.cc
deleted file mode 100644
index 5827ee8a30..0000000000
--- a/webrtc/examples/android/media_demo/jni/on_load.cc
+++ /dev/null
@@ -1,48 +0,0 @@
-/*
- * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include <jni.h>
-
-#include <assert.h>
-
-#include "webrtc/examples/android/media_demo/jni/jni_helpers.h"
-#include "webrtc/examples/android/media_demo/jni/voice_engine_jni.h"
-#include "webrtc/voice_engine/include/voe_base.h"
-
-// Macro for native functions that can be found by way of jni-auto discovery.
-// Note extern "C" is needed for "discovery" of native methods to work.
-#define JOWW(rettype, name) \
- extern "C" rettype JNIEXPORT JNICALL Java_org_webrtc_webrtcdemo_##name
-
-static JavaVM* g_vm = NULL;
-
-extern "C" jint JNIEXPORT JNICALL JNI_OnLoad(JavaVM* vm, void* reserved) {
- // Only called once.
- CHECK(!g_vm, "OnLoad called more than once");
- g_vm = vm;
- return JNI_VERSION_1_4;
-}
-
-JOWW(void, NativeWebRtcContextRegistry_register)(
- JNIEnv* jni,
- jclass,
- jobject context) {
- webrtc_examples::SetVoeDeviceObjects(g_vm);
- CHECK(webrtc::VoiceEngine::SetAndroidObjects(g_vm, context) == 0,
- "Failed to register android objects to voice engine");
-}
-
-JOWW(void, NativeWebRtcContextRegistry_unRegister)(
- JNIEnv* jni,
- jclass) {
- CHECK(webrtc::VoiceEngine::SetAndroidObjects(NULL, NULL) == 0,
- "Failed to unregister android objects from voice engine");
- webrtc_examples::ClearVoeDeviceObjects();
-}
diff --git a/webrtc/examples/android/media_demo/jni/voice_engine_jni.cc b/webrtc/examples/android/media_demo/jni/voice_engine_jni.cc
deleted file mode 100644
index 79d6cbc4b7..0000000000
--- a/webrtc/examples/android/media_demo/jni/voice_engine_jni.cc
+++ /dev/null
@@ -1,423 +0,0 @@
-/*
- * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-// This file contains JNI for the voice engine interfaces.
-// The native functions are found using jni's auto discovery.
-
-#include "webrtc/examples/android/media_demo/jni/voice_engine_jni.h"
-
-#include <map>
-#include <string>
-
-#include "webrtc/base/arraysize.h"
-#include "webrtc/examples/android/media_demo/jni/jni_helpers.h"
-#include "webrtc/modules/utility/interface/helpers_android.h"
-#include "webrtc/test/channel_transport/include/channel_transport.h"
-#include "webrtc/voice_engine/include/voe_audio_processing.h"
-#include "webrtc/voice_engine/include/voe_base.h"
-#include "webrtc/voice_engine/include/voe_codec.h"
-#include "webrtc/voice_engine/include/voe_file.h"
-#include "webrtc/voice_engine/include/voe_hardware.h"
-#include "webrtc/voice_engine/include/voe_network.h"
-#include "webrtc/voice_engine/include/voe_rtp_rtcp.h"
-#include "webrtc/voice_engine/include/voe_volume_control.h"
-
-// Macro for native functions that can be found by way of jni-auto discovery.
-// Note extern "C" is needed for "discovery" of native methods to work.
-#define JOWW(rettype, name) \
- extern "C" rettype JNIEXPORT JNICALL Java_org_webrtc_webrtcdemo_##name
-
-namespace {
-
-static JavaVM* g_vm = NULL;
-static ClassReferenceHolder* g_class_reference_holder = NULL;
-
-jclass GetClass(JNIEnv* jni, const char* name) {
- CHECK(g_class_reference_holder, "Class reference holder NULL");
- return g_class_reference_holder->GetClass(name);
-}
-
-static const char* g_classes[] = {"org/webrtc/webrtcdemo/CodecInst"};
-
-template<typename T>
-void ReleaseSubApi(T instance) {
- CHECK(instance->Release() >= 0, "failed to release instance")
-}
-
-class VoiceEngineData {
- public:
- VoiceEngineData()
- : ve(webrtc::VoiceEngine::Create()),
- base(webrtc::VoEBase::GetInterface(ve)),
- codec(webrtc::VoECodec::GetInterface(ve)),
- file(webrtc::VoEFile::GetInterface(ve)),
- netw(webrtc::VoENetwork::GetInterface(ve)),
- apm(webrtc::VoEAudioProcessing::GetInterface(ve)),
- volume(webrtc::VoEVolumeControl::GetInterface(ve)),
- hardware(webrtc::VoEHardware::GetInterface(ve)),
- rtp(webrtc::VoERTP_RTCP::GetInterface(ve)) {
- CHECK(ve != NULL, "Voice engine instance failed to be created");
- CHECK(base != NULL, "Failed to acquire base interface");
- CHECK(codec != NULL, "Failed to acquire codec interface");
- CHECK(file != NULL, "Failed to acquire file interface");
- CHECK(netw != NULL, "Failed to acquire netw interface");
- CHECK(apm != NULL, "Failed to acquire apm interface");
- CHECK(volume != NULL, "Failed to acquire volume interface");
- CHECK(hardware != NULL, "Failed to acquire hardware interface");
- CHECK(rtp != NULL, "Failed to acquire rtp interface");
- }
-
- ~VoiceEngineData() {
- CHECK(channel_transports_.empty(),
- "VoE transports must be deleted before terminating");
- CHECK(base->Terminate() == 0, "VoE failed to terminate");
- ReleaseSubApi(base);
- ReleaseSubApi(codec);
- ReleaseSubApi(file);
- ReleaseSubApi(netw);
- ReleaseSubApi(apm);
- ReleaseSubApi(volume);
- ReleaseSubApi(hardware);
- ReleaseSubApi(rtp);
- webrtc::VoiceEngine* ve_instance = ve;
- CHECK(webrtc::VoiceEngine::Delete(ve_instance), "VoE failed to be deleted");
- }
-
- int CreateChannel() {
- int channel = base->CreateChannel();
- if (channel == -1) {
- return -1;
- }
- CreateTransport(channel);
- return channel;
- }
-
- int DeleteChannel(int channel) {
- if (base->DeleteChannel(channel) != 0) {
- return -1;
- }
- DeleteTransport(channel);
- return 0;
- }
-
- webrtc::test::VoiceChannelTransport* GetTransport(int channel) {
- ChannelTransports::iterator found = channel_transports_.find(channel);
- if (found == channel_transports_.end()) {
- return NULL;
- }
- return found->second;
- }
-
- webrtc::VoiceEngine* const ve;
- webrtc::VoEBase* const base;
- webrtc::VoECodec* const codec;
- webrtc::VoEFile* const file;
- webrtc::VoENetwork* const netw;
- webrtc::VoEAudioProcessing* const apm;
- webrtc::VoEVolumeControl* const volume;
- webrtc::VoEHardware* const hardware;
- webrtc::VoERTP_RTCP* const rtp;
-
- private:
- // Voice engine no longer provides a socket implementation. There is,
- // however, a socket implementation in webrtc::test.
- typedef std::map<int, webrtc::test::VoiceChannelTransport*>
- ChannelTransports;
-
- void CreateTransport(int channel) {
- CHECK(GetTransport(channel) == NULL,
- "Transport already created for VoE channel, inconsistent state");
- channel_transports_[channel] =
- new webrtc::test::VoiceChannelTransport(netw, channel);
- }
- void DeleteTransport(int channel) {
- CHECK(GetTransport(channel) != NULL,
- "VoE channel missing transport, inconsistent state");
- delete channel_transports_[channel];
- channel_transports_.erase(channel);
- }
-
- ChannelTransports channel_transports_;
-};
-
-webrtc::CodecInst* GetCodecInst(JNIEnv* jni, jobject j_codec) {
- jclass j_codec_class = jni->GetObjectClass(j_codec);
- jfieldID native_codec_id =
- jni->GetFieldID(j_codec_class, "nativeCodecInst", "J");
- jlong j_p = jni->GetLongField(j_codec, native_codec_id);
- return reinterpret_cast<webrtc::CodecInst*>(j_p);
-}
-
-} // namespace
-
-namespace webrtc_examples {
-
-void SetVoeDeviceObjects(JavaVM* vm) {
- CHECK(vm, "Trying to register NULL vm");
- g_vm = vm;
- webrtc::AttachThreadScoped ats(g_vm);
- JNIEnv* jni = ats.env();
- g_class_reference_holder = new ClassReferenceHolder(
- jni, g_classes, arraysize(g_classes));
-}
-
-void ClearVoeDeviceObjects() {
- CHECK(g_vm, "Clearing vm without it being set");
- {
- webrtc::AttachThreadScoped ats(g_vm);
- g_class_reference_holder->FreeReferences(ats.env());
- }
- g_vm = NULL;
- delete g_class_reference_holder;
- g_class_reference_holder = NULL;
-}
-
-} // namespace webrtc_examples
-
-VoiceEngineData* GetVoiceEngineData(JNIEnv* jni, jobject j_voe) {
- jclass j_voe_class = jni->GetObjectClass(j_voe);
- jfieldID native_voe_id =
- jni->GetFieldID(j_voe_class, "nativeVoiceEngine", "J");
- jlong j_p = jni->GetLongField(j_voe, native_voe_id);
- return reinterpret_cast<VoiceEngineData*>(j_p);
-}
-
-webrtc::VoiceEngine* GetVoiceEngine(JNIEnv* jni, jobject j_voe) {
- return GetVoiceEngineData(jni, j_voe)->ve;
-}
-
-JOWW(jlong, VoiceEngine_create)(JNIEnv* jni, jclass) {
- VoiceEngineData* voe_data = new VoiceEngineData();
- return jlongFromPointer(voe_data);
-}
-
-JOWW(void, VoiceEngine_dispose)(JNIEnv* jni, jobject j_voe) {
- VoiceEngineData* voe_data = GetVoiceEngineData(jni, j_voe);
- delete voe_data;
-}
-
-JOWW(jint, VoiceEngine_init)(JNIEnv* jni, jobject j_voe) {
- VoiceEngineData* voe_data = GetVoiceEngineData(jni, j_voe);
- return voe_data->base->Init();
-}
-
-JOWW(jint, VoiceEngine_createChannel)(JNIEnv* jni, jobject j_voe) {
- VoiceEngineData* voe_data = GetVoiceEngineData(jni, j_voe);
- return voe_data->CreateChannel();
-}
-
-JOWW(jint, VoiceEngine_deleteChannel)(JNIEnv* jni, jobject j_voe,
- jint channel) {
- VoiceEngineData* voe_data = GetVoiceEngineData(jni, j_voe);
- return voe_data->DeleteChannel(channel);
-}
-
-JOWW(jint, VoiceEngine_setLocalReceiver)(JNIEnv* jni, jobject j_voe,
- jint channel, jint port) {
- VoiceEngineData* voe_data = GetVoiceEngineData(jni, j_voe);
- webrtc::test::VoiceChannelTransport* transport =
- voe_data->GetTransport(channel);
- return transport->SetLocalReceiver(port);
-}
-
-JOWW(jint, VoiceEngine_setSendDestination)(JNIEnv* jni, jobject j_voe,
- jint channel, jint port,
- jstring j_addr) {
- VoiceEngineData* voe_data = GetVoiceEngineData(jni, j_voe);
- std::string addr = JavaToStdString(jni, j_addr);
- webrtc::test::VoiceChannelTransport* transport =
- voe_data->GetTransport(channel);
- return transport->SetSendDestination(addr.c_str(), port);
-}
-
-JOWW(jint, VoiceEngine_startListen)(JNIEnv* jni, jobject j_voe, jint channel) {
- VoiceEngineData* voe_data = GetVoiceEngineData(jni, j_voe);
- return voe_data->base->StartReceive(channel);
-}
-
-JOWW(jint, VoiceEngine_startPlayout)(JNIEnv* jni, jobject j_voe, jint channel) {
- VoiceEngineData* voe_data = GetVoiceEngineData(jni, j_voe);
- return voe_data->base->StartPlayout(channel);
-}
-
-JOWW(jint, VoiceEngine_startSend)(JNIEnv* jni, jobject j_voe, jint channel) {
- VoiceEngineData* voe_data = GetVoiceEngineData(jni, j_voe);
- return voe_data->base->StartSend(channel);
-}
-
-JOWW(jint, VoiceEngine_stopListen)(JNIEnv* jni, jobject j_voe, jint channel) {
- VoiceEngineData* voe_data = GetVoiceEngineData(jni, j_voe);
- return voe_data->base->StartReceive(channel);
-}
-
-JOWW(jint, VoiceEngine_stopPlayout)(JNIEnv* jni, jobject j_voe, jint channel) {
- VoiceEngineData* voe_data = GetVoiceEngineData(jni, j_voe);
- return voe_data->base->StopPlayout(channel);
-}
-
-JOWW(jint, VoiceEngine_stopSend)(JNIEnv* jni, jobject j_voe, jint channel) {
- VoiceEngineData* voe_data = GetVoiceEngineData(jni, j_voe);
- return voe_data->base->StopSend(channel);
-}
-
-JOWW(jint, VoiceEngine_setSpeakerVolume)(JNIEnv* jni, jobject j_voe,
- jint level) {
- VoiceEngineData* voe_data = GetVoiceEngineData(jni, j_voe);
- return voe_data->volume->SetSpeakerVolume(level);
-}
-
-JOWW(jint, VoiceEngine_startPlayingFileLocally)(JNIEnv* jni, jobject j_voe,
- jint channel,
- jstring j_filename,
- jboolean loop) {
- VoiceEngineData* voe_data = GetVoiceEngineData(jni, j_voe);
- std::string filename = JavaToStdString(jni, j_filename);
- return voe_data->file->StartPlayingFileLocally(channel,
- filename.c_str(),
- loop);
-}
-
-JOWW(jint, VoiceEngine_stopPlayingFileLocally)(JNIEnv* jni, jobject j_voe,
- jint channel) {
- VoiceEngineData* voe_data = GetVoiceEngineData(jni, j_voe);
- return voe_data->file->StopPlayingFileLocally(channel);
-}
-
-JOWW(jint, VoiceEngine_startPlayingFileAsMicrophone)(JNIEnv* jni, jobject j_voe,
- jint channel,
- jstring j_filename,
- jboolean loop) {
- VoiceEngineData* voe_data = GetVoiceEngineData(jni, j_voe);
- std::string filename = JavaToStdString(jni, j_filename);
- return voe_data->file->StartPlayingFileAsMicrophone(channel,
- filename.c_str(),
- loop);
-}
-
-JOWW(jint, VoiceEngine_stopPlayingFileAsMicrophone)(JNIEnv* jni, jobject j_voe,
- jint channel) {
- VoiceEngineData* voe_data = GetVoiceEngineData(jni, j_voe);
- return voe_data->file->StopPlayingFileAsMicrophone(channel);
-}
-
-JOWW(jint, VoiceEngine_numOfCodecs)(JNIEnv* jni, jobject j_voe) {
- VoiceEngineData* voe_data = GetVoiceEngineData(jni, j_voe);
- return voe_data->codec->NumOfCodecs();
-}
-
-JOWW(jobject, VoiceEngine_getCodec)(JNIEnv* jni, jobject j_voe, jint index) {
- VoiceEngineData* voe_data = GetVoiceEngineData(jni, j_voe);
- webrtc::CodecInst* codec = new webrtc::CodecInst();
- CHECK(voe_data->codec->GetCodec(index, *codec) == 0,
- "getCodec must be called with valid index");
- jclass j_codec_class = GetClass(jni, "org/webrtc/webrtcdemo/CodecInst");
- jmethodID j_codec_ctor = GetMethodID(jni, j_codec_class, "<init>", "(J)V");
- jobject j_codec =
- jni->NewObject(j_codec_class, j_codec_ctor, jlongFromPointer(codec));
- CHECK_JNI_EXCEPTION(jni, "error during NewObject");
- return j_codec;
-}
-
-JOWW(jint, VoiceEngine_setSendCodec)(JNIEnv* jni, jobject j_voe, jint channel,
- jobject j_codec) {
- VoiceEngineData* voe_data = GetVoiceEngineData(jni, j_voe);
- webrtc::CodecInst* inst = GetCodecInst(jni, j_codec);
- return voe_data->codec->SetSendCodec(channel, *inst);
-}
-
-JOWW(jint, VoiceEngine_setEcStatus)(JNIEnv* jni, jobject j_voe, jboolean enable,
- jint ec_mode) {
- VoiceEngineData* voe_data = GetVoiceEngineData(jni, j_voe);
- return voe_data->apm->SetEcStatus(enable,
- static_cast<webrtc::EcModes>(ec_mode));
-}
-
-JOWW(jint, VoiceEngine_setAecmMode)(JNIEnv* jni, jobject j_voe, jint aecm_mode,
- jboolean cng) {
- VoiceEngineData* voe_data = GetVoiceEngineData(jni, j_voe);
- return voe_data->apm->SetAecmMode(static_cast<webrtc::AecmModes>(aecm_mode),
- cng);
-}
-
-JOWW(jint, VoiceEngine_setAgcStatus)(JNIEnv* jni, jobject j_voe,
- jboolean enable, jint agc_mode) {
- VoiceEngineData* voe_data = GetVoiceEngineData(jni, j_voe);
- return voe_data->apm->SetAgcStatus(enable,
- static_cast<webrtc::AgcModes>(agc_mode));
-}
-
-// Returns the native AgcConfig object associated with the Java object
-// |j_codec|.
-void GetNativeAgcConfig(JNIEnv* jni, jobject j_codec,
- webrtc::AgcConfig* agc_config) {
- jclass j_codec_class = jni->GetObjectClass(j_codec);
- jfieldID dBOv_id = jni->GetFieldID(j_codec_class, "targetLevelDbOv", "I");
- agc_config->targetLeveldBOv = jni->GetIntField(j_codec, dBOv_id);
- jfieldID gain_id =
- jni->GetFieldID(j_codec_class, "digitalCompressionGaindB", "I");
- agc_config->digitalCompressionGaindB = jni->GetIntField(j_codec, gain_id);
- jfieldID limiter_id = jni->GetFieldID(j_codec_class, "limiterEnable", "Z");
- agc_config->limiterEnable = jni->GetBooleanField(j_codec, limiter_id);
-}
-
-JOWW(jint, VoiceEngine_setAgcConfig)(JNIEnv* jni, jobject j_voe,
- jobject j_config) {
- VoiceEngineData* voe_data = GetVoiceEngineData(jni, j_voe);
- webrtc::AgcConfig config;
- GetNativeAgcConfig(jni, j_config, &config);
- return voe_data->apm->SetAgcConfig(config);
-}
-
-JOWW(jint, VoiceEngine_setNsStatus)(JNIEnv* jni, jobject j_voe, jboolean enable,
- jint ns_mode) {
- VoiceEngineData* voe_data = GetVoiceEngineData(jni, j_voe);
- return voe_data->apm->SetNsStatus(enable,
- static_cast<webrtc::NsModes>(ns_mode));
-}
-
-JOWW(jint, VoiceEngine_startDebugRecording)(JNIEnv* jni, jobject j_voe,
- jstring j_filename) {
- VoiceEngineData* voe_data = GetVoiceEngineData(jni, j_voe);
- std::string filename = JavaToStdString(jni, j_filename);
- return voe_data->apm->StartDebugRecording(filename.c_str());
-}
-
-JOWW(jint, VoiceEngine_stopDebugRecording)(JNIEnv* jni, jobject j_voe) {
- VoiceEngineData* voe_data = GetVoiceEngineData(jni, j_voe);
- return voe_data->apm->StopDebugRecording();
-}
-
-JOWW(void, CodecInst_dispose)(JNIEnv* jni, jobject j_codec) {
- delete GetCodecInst(jni, j_codec);
-}
-
-JOWW(jint, CodecInst_plType)(JNIEnv* jni, jobject j_codec) {
- return GetCodecInst(jni, j_codec)->pltype;
-}
-
-JOWW(jstring, CodecInst_name)(JNIEnv* jni, jobject j_codec) {
- return jni->NewStringUTF(GetCodecInst(jni, j_codec)->plname);
-}
-
-JOWW(jint, CodecInst_plFrequency)(JNIEnv* jni, jobject j_codec) {
- return GetCodecInst(jni, j_codec)->plfreq;
-}
-
-JOWW(jint, CodecInst_pacSize)(JNIEnv* jni, jobject j_codec) {
- return GetCodecInst(jni, j_codec)->pacsize;
-}
-
-JOWW(jint, CodecInst_channels)(JNIEnv* jni, jobject j_codec) {
- return GetCodecInst(jni, j_codec)->channels;
-}
-
-JOWW(jint, CodecInst_rate)(JNIEnv* jni, jobject j_codec) {
- return GetCodecInst(jni, j_codec)->rate;
-}
diff --git a/webrtc/examples/android/media_demo/jni/voice_engine_jni.h b/webrtc/examples/android/media_demo/jni/voice_engine_jni.h
deleted file mode 100644
index 57ef507653..0000000000
--- a/webrtc/examples/android/media_demo/jni/voice_engine_jni.h
+++ /dev/null
@@ -1,31 +0,0 @@
-/*
- * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_EXAMPLES_ANDROID_MEDIA_DEMO_JNI_VOICE_ENGINE_H_
-#define WEBRTC_EXAMPLES_ANDROID_MEDIA_DEMO_JNI_VOICE_ENGINE_H_
-
-#include <jni.h>
-
-namespace webrtc {
-
-class VoiceEngine;
-
-} // namespace webrtc
-
-namespace webrtc_examples {
-
-void SetVoeDeviceObjects(JavaVM* vm);
-void ClearVoeDeviceObjects();
-
-} // namespace webrtc_examples
-
-webrtc::VoiceEngine* GetVoiceEngine(JNIEnv* jni, jobject j_voe);
-
-#endif // WEBRTC_EXAMPLES_ANDROID_MEDIA_DEMO_JNI_VOICE_ENGINE_H_
diff --git a/webrtc/examples/android/media_demo/project.properties b/webrtc/examples/android/media_demo/project.properties
deleted file mode 100644
index 69eb2d039b..0000000000
--- a/webrtc/examples/android/media_demo/project.properties
+++ /dev/null
@@ -1,14 +0,0 @@
-# This file is automatically generated by Android Tools.
-# Do not modify this file -- YOUR CHANGES WILL BE ERASED!
-#
-# This file must be checked in Version Control Systems.
-#
-# To customize properties used by the Ant build system use,
-# "ant.properties", and override values to adapt the script to your
-# project structure.
-
-# To enable ProGuard to shrink and obfuscate your code, uncomment this (available properties: sdk.dir, user.home):
-#proguard.config=${sdk.dir}/tools/proguard/proguard-android.txt:proguard-project.txt
-
-# Project target.
-target=android-23
diff --git a/webrtc/examples/android/media_demo/res/drawable/logo.png b/webrtc/examples/android/media_demo/res/drawable/logo.png
deleted file mode 100644
index 1ff07d1102..0000000000
--- a/webrtc/examples/android/media_demo/res/drawable/logo.png
+++ /dev/null
Binary files differ
diff --git a/webrtc/examples/android/media_demo/res/layout/audiomenu.xml b/webrtc/examples/android/media_demo/res/layout/audiomenu.xml
deleted file mode 100644
index f35547062a..0000000000
--- a/webrtc/examples/android/media_demo/res/layout/audiomenu.xml
+++ /dev/null
@@ -1,80 +0,0 @@
-<?xml version="1.0" encoding="utf-8"?>
-<LinearLayout xmlns:android="http://schemas.android.com/apk/res/android"
- android:layout_width="fill_parent"
- android:layout_height="fill_parent"
- android:orientation="vertical">
- <TextView android:layout_width="wrap_content"
- android:layout_height="wrap_content"
- android:textStyle="bold"
- android:textSize="24dip"
- android:text="Audio Settings">
- </TextView>
- <TextView android:layout_height="wrap_content"
- android:layout_gravity="bottom"
- android:layout_width="wrap_content"
- android:text="@string/codecType">
- </TextView>
- <Spinner android:id="@+id/spAudioCodecType"
- android:layout_height="wrap_content"
- android:layout_width="fill_parent">
- </Spinner>
- <LinearLayout android:layout_height="wrap_content"
- android:layout_width="fill_parent">
- <TextView android:layout_width="wrap_content"
- android:layout_height="wrap_content"
- android:text="@string/aTxPort">
- </TextView>
- <EditText android:id="@+id/etATxPort"
- android:layout_width="wrap_content"
- android:layout_height="wrap_content"
- android:imeOptions="actionDone"
- android:inputType="number">
- </EditText>
- <TextView android:layout_width="wrap_content"
- android:layout_height="wrap_content"
- android:text="@string/aRxPort">
- </TextView>
- <EditText android:id="@+id/etARxPort"
- android:layout_height="wrap_content"
- android:layout_width="wrap_content"
- android:imeOptions="actionDone"
- android:inputType="number">
- </EditText>
- </LinearLayout>
- <LinearLayout android:layout_height="wrap_content"
- android:layout_width="fill_parent">
- <CheckBox android:id="@+id/cbAecm"
- android:layout_width="wrap_content"
- android:layout_height="wrap_content"
- android:text="@string/aecm">
- </CheckBox>
- <CheckBox android:id="@+id/cbNoiseSuppression"
- android:layout_width="wrap_content"
- android:layout_height="wrap_content"
- android:text="@string/noiseSuppression">
- </CheckBox>
- <CheckBox android:id="@+id/cbAutoGainControl"
- android:layout_width="wrap_content"
- android:layout_height="wrap_content"
- android:text="@string/autoGainControl">
- </CheckBox>
- </LinearLayout>
- <LinearLayout android:layout_height="wrap_content"
- android:layout_width="fill_parent">
- <CheckBox android:id="@+id/cbSpeaker"
- android:layout_width="wrap_content"
- android:layout_height="wrap_content"
- android:text="@string/speaker">
- </CheckBox>
- <CheckBox android:id="@+id/cbDebugRecording"
- android:layout_width="wrap_content"
- android:layout_height="wrap_content"
- android:text="@string/debugRecording">
- </CheckBox>
- <CheckBox android:id="@+id/cbAudioRTPDump"
- android:layout_width="wrap_content"
- android:layout_height="wrap_content"
- android:text="@string/rtpDump">
- </CheckBox>
- </LinearLayout>
-</LinearLayout> \ No newline at end of file
diff --git a/webrtc/examples/android/media_demo/res/layout/dropdownitems.xml b/webrtc/examples/android/media_demo/res/layout/dropdownitems.xml
deleted file mode 100644
index 1014612000..0000000000
--- a/webrtc/examples/android/media_demo/res/layout/dropdownitems.xml
+++ /dev/null
@@ -1,17 +0,0 @@
-<?xml version="1.0" encoding="utf-8"?>
-<RelativeLayout xmlns:android="http://schemas.android.com/apk/res/android"
- android:layout_width="fill_parent"
- android:layout_height="wrap_content"
- android:orientation="vertical"
- android:padding="3dip">
- <TextView android:id="@+id/spinner_row"
- android:layout_toRightOf="@+id/image"
- android:padding="3dip"
- android:layout_marginTop="2dip"
- android:textColor="#FFF"
- android:textStyle="bold"
- android:text="description"
- android:layout_marginLeft="5dip"
- android:layout_width="wrap_content"
- android:layout_height="wrap_content"/>
-</RelativeLayout> \ No newline at end of file
diff --git a/webrtc/examples/android/media_demo/res/layout/mainmenu.xml b/webrtc/examples/android/media_demo/res/layout/mainmenu.xml
deleted file mode 100644
index 89f5399df7..0000000000
--- a/webrtc/examples/android/media_demo/res/layout/mainmenu.xml
+++ /dev/null
@@ -1,26 +0,0 @@
-<?xml version="1.0" encoding="utf-8"?>
-<LinearLayout
- xmlns:android="http://schemas.android.com/apk/res/android"
- android:orientation="horizontal"
- android:layout_width="fill_parent"
- android:layout_height="fill_parent">
- <LinearLayout
- android:orientation="vertical"
- android:layout_width="120dip"
- android:layout_height="fill_parent">
- <TextView android:id="@+id/tvStats"
- android:layout_width="fill_parent"
- android:layout_height="60dip"
- android:textSize="6sp"
- android:text=""/>
- <Button android:id="@+id/btStats"
- android:layout_width="fill_parent"
- android:layout_height="wrap_content"
- android:layout_gravity="bottom"
- android:text="@string/stats"/>
- <Button android:id="@+id/btStartStopCall"
- android:layout_width="fill_parent"
- android:layout_height="wrap_content"
- android:layout_gravity="bottom"/>
- </LinearLayout>
-</LinearLayout> \ No newline at end of file
diff --git a/webrtc/examples/android/media_demo/res/layout/settingsmenu.xml b/webrtc/examples/android/media_demo/res/layout/settingsmenu.xml
deleted file mode 100644
index 4fba57eadc..0000000000
--- a/webrtc/examples/android/media_demo/res/layout/settingsmenu.xml
+++ /dev/null
@@ -1,36 +0,0 @@
-<?xml version="1.0" encoding="utf-8"?>
-<LinearLayout xmlns:android="http://schemas.android.com/apk/res/android"
- android:layout_width="fill_parent"
- android:layout_height="fill_parent"
- android:layout_gravity="right"
- android:orientation="vertical">
- <TextView android:layout_width="wrap_content"
- android:layout_height="wrap_content"
- android:textStyle="bold"
- android:textSize="24dip"
- android:text="@string/gSettings">
- </TextView>
- <LinearLayout android:orientation="horizontal"
- android:layout_height="wrap_content"
- android:layout_width="fill_parent">
- <CheckBox android:id="@+id/cbAudio"
- android:layout_width="wrap_content"
- android:layout_height="wrap_content"
- android:text="@string/enableAudio">
- </CheckBox>
- <CheckBox android:id="@+id/cbLoopback"
- android:layout_width="wrap_content"
- android:layout_height="wrap_content"
- android:text="@string/loopback">
- </CheckBox>
- </LinearLayout>
- <TextView android:layout_width="wrap_content"
- android:layout_height="wrap_content"
- android:text="@string/remoteIp">
- </TextView>
- <EditText android:id="@+id/etRemoteIp"
- android:layout_height="wrap_content"
- android:layout_width="fill_parent"
- android:imeOptions="actionDone">
- </EditText>
-</LinearLayout> \ No newline at end of file
diff --git a/webrtc/examples/android/media_demo/res/menu/main_activity_actions.xml b/webrtc/examples/android/media_demo/res/menu/main_activity_actions.xml
deleted file mode 100644
index a4921a6bbe..0000000000
--- a/webrtc/examples/android/media_demo/res/menu/main_activity_actions.xml
+++ /dev/null
@@ -1,5 +0,0 @@
-<menu xmlns:android="http://schemas.android.com/apk/res/android" >
- <item android:id="@+id/action_exit"
- android:icon="@drawable/logo"
- android:title="Exit"/>
-</menu> \ No newline at end of file
diff --git a/webrtc/examples/android/media_demo/res/values/bools.xml b/webrtc/examples/android/media_demo/res/values/bools.xml
deleted file mode 100644
index d4f3fc0e95..0000000000
--- a/webrtc/examples/android/media_demo/res/values/bools.xml
+++ /dev/null
@@ -1,13 +0,0 @@
-<?xml version="1.0" encoding="utf-8"?>
-<resources>
- <bool name="apm_debug_enabled_default">false</bool>
- <bool name="audio_enabled_default">true</bool>
- <bool name="loopback_enabled_default">true</bool>
- <bool name="nack_enabled_default">true</bool>
- <bool name="opengl_enabled_default">true</bool>
- <bool name="speaker_enabled_default">false</bool>
- <bool name="stats_enabled_default">true</bool>
- <bool name="trace_enabled_default">true</bool>
- <bool name="video_receive_enabled_default">true</bool>
- <bool name="video_send_enabled_default">true</bool>
-</resources> \ No newline at end of file
diff --git a/webrtc/examples/android/media_demo/res/values/integers.xml b/webrtc/examples/android/media_demo/res/values/integers.xml
deleted file mode 100644
index 562643b5f2..0000000000
--- a/webrtc/examples/android/media_demo/res/values/integers.xml
+++ /dev/null
@@ -1,13 +0,0 @@
-<?xml version="1.0" encoding="utf-8"?>
-<resources>
- <integer name="aRxPortDefault">11113</integer>
- <integer name="aTxPortDefault">11113</integer>
- <integer name="openGl">0</integer>
- <integer name="surfaceView">1</integer>
- <integer name="mediaCodec">2</integer>
- <integer name="defaultView">0</integer>
- <integer name="call_restart_periodicity_ms">0</integer>
- <integer name="video_codec_default">0</integer>
- <integer name="vRxPortDefault">11111</integer>
- <integer name="vTxPortDefault">11111</integer>
-</resources> \ No newline at end of file
diff --git a/webrtc/examples/android/media_demo/res/values/strings.xml b/webrtc/examples/android/media_demo/res/values/strings.xml
deleted file mode 100644
index 297d289b0c..0000000000
--- a/webrtc/examples/android/media_demo/res/values/strings.xml
+++ /dev/null
@@ -1,41 +0,0 @@
-<?xml version="1.0" encoding="utf-8"?>
-<resources>
- <string name="aecm">AECM</string>
- <string name="appName">WebRTC Engine Demo</string>
- <string name="aRxPort">Audio Rx Port</string>
- <string name="aTxPort">Audio Tx Port</string>
- <string name="autoGainControl">AGC</string>
- <string name="backCamera">SwitchToBack</string>
- <string name="codecSize">Codec Size</string>
- <string name="codecType">Codec Type</string>
- <string name="debugRecording">APMRecord</string>
- <string name="demoTitle">Video Engine Android Demo</string>
- <string name="enableVideoReceive">Video Receive</string>
- <string name="enableVideoSend">Video Send</string>
- <string name="enableAudio">Audio</string>
- <string name="error">Error</string>
- <string name="errorCamera">Camera Error</string>
- <string name="exit">Exit</string>
- <string name="frontCamera">SwitchToFront</string>
- <string name="gSettings">Global Settings</string>
- <string name="loopback">Loopback</string>
- <string name="loopbackIp">127.0.0.1</string>
- <string name="nack">NACK</string>
- <string name="noiseSuppression">NS</string>
- <string name="remoteIp">Remote IP address</string>
- <string name="rtpDump">rtpdump</string>
- <string name="speaker">Speaker</string>
- <string name="startBoth">Start Both</string>
- <string name="startCall">StartCall</string>
- <string name="startListen">Start Listen</string>
- <string name="startSend">Start Send</string>
- <string name="stats">Stats</string>
- <string name="statsOn">Stats on</string>
- <string name="statsOff">Stats off</string>
- <string name="stopCall">StopCall</string>
- <string name="surfaceView">SurfaceView</string>
- <string name="tag">WEBRTC</string>
- <string name="vRxPort">Video Rx Port</string>
- <string name="vSettings">Video Settings</string>
- <string name="vTxPort">Video Tx Port</string>
-</resources>
diff --git a/webrtc/examples/android/media_demo/src/org/webrtc/webrtcdemo/AudioMenuFragment.java b/webrtc/examples/android/media_demo/src/org/webrtc/webrtcdemo/AudioMenuFragment.java
deleted file mode 100644
index 94e23c2465..0000000000
--- a/webrtc/examples/android/media_demo/src/org/webrtc/webrtcdemo/AudioMenuFragment.java
+++ /dev/null
@@ -1,156 +0,0 @@
-/*
- * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-package org.webrtc.webrtcdemo;
-
-import android.app.Activity;
-import android.app.Fragment;
-import android.os.Bundle;
-import android.util.Log;
-import android.view.LayoutInflater;
-import android.view.View;
-import android.view.ViewGroup;
-import android.widget.AdapterView;
-import android.widget.AdapterView.OnItemSelectedListener;
-import android.widget.CheckBox;
-import android.widget.EditText;
-import android.widget.Spinner;
-import android.widget.TextView;
-import java.lang.Integer;
-
-public class AudioMenuFragment extends Fragment {
-
- private String TAG;
- private MenuStateProvider stateProvider;
-
- @Override
- public View onCreateView(LayoutInflater inflater, ViewGroup container,
- Bundle savedInstanceState) {
- View v = inflater.inflate(R.layout.audiomenu, container, false);
-
- TAG = getResources().getString(R.string.tag);
-
- String[] audioCodecsStrings = getEngine().audioCodecsAsString();
- Spinner spAudioCodecType = (Spinner) v.findViewById(R.id.spAudioCodecType);
- spAudioCodecType.setAdapter(new SpinnerAdapter(getActivity(),
- R.layout.dropdownitems,
- audioCodecsStrings,
- inflater));
- spAudioCodecType.setSelection(getEngine().audioCodecIndex());
- spAudioCodecType.setOnItemSelectedListener(new OnItemSelectedListener() {
- public void onItemSelected(AdapterView<?> adapterView, View view,
- int position, long id) {
- getEngine().setAudioCodec(position);
- }
- public void onNothingSelected(AdapterView<?> arg0) {
- Log.d(TAG, "No setting selected");
- }
- });
-
- EditText etATxPort = (EditText) v.findViewById(R.id.etATxPort);
- etATxPort.setText(Integer.toString(getEngine().audioTxPort()));
- etATxPort.setOnClickListener(new View.OnClickListener() {
- public void onClick(View editText) {
- EditText etATxPort = (EditText) editText;
- getEngine()
- .setAudioTxPort(Integer.parseInt(etATxPort.getText().toString()));
- etATxPort.setText(Integer.toString(getEngine().audioTxPort()));
- }
- });
- EditText etARxPort = (EditText) v.findViewById(R.id.etARxPort);
- etARxPort.setText(Integer.toString(getEngine().audioRxPort()));
- etARxPort.setOnClickListener(new View.OnClickListener() {
- public void onClick(View editText) {
- EditText etARxPort = (EditText) editText;
- getEngine()
- .setAudioRxPort(Integer.parseInt(etARxPort.getText().toString()));
- etARxPort.setText(Integer.toString(getEngine().audioRxPort()));
-
- }
- });
-
- CheckBox cbEnableAecm = (CheckBox) v.findViewById(R.id.cbAecm);
- cbEnableAecm.setChecked(getEngine().aecmEnabled());
- cbEnableAecm.setOnClickListener(new View.OnClickListener() {
- public void onClick(View checkBox) {
- CheckBox cbEnableAecm = (CheckBox) checkBox;
- getEngine().setEc(cbEnableAecm.isChecked());
- cbEnableAecm.setChecked(getEngine().aecmEnabled());
- }
- });
- CheckBox cbEnableNs = (CheckBox) v.findViewById(R.id.cbNoiseSuppression);
- cbEnableNs.setChecked(getEngine().nsEnabled());
- cbEnableNs.setOnClickListener(new View.OnClickListener() {
- public void onClick(View checkBox) {
- CheckBox cbEnableNs = (CheckBox) checkBox;
- getEngine().setNs(cbEnableNs.isChecked());
- cbEnableNs.setChecked(getEngine().nsEnabled());
- }
- });
- CheckBox cbEnableAgc = (CheckBox) v.findViewById(R.id.cbAutoGainControl);
- cbEnableAgc.setChecked(getEngine().agcEnabled());
- cbEnableAgc.setOnClickListener(new View.OnClickListener() {
- public void onClick(View checkBox) {
- CheckBox cbEnableAgc = (CheckBox) checkBox;
- getEngine().setAgc(cbEnableAgc.isChecked());
- cbEnableAgc.setChecked(getEngine().agcEnabled());
- }
- });
- CheckBox cbEnableSpeaker = (CheckBox) v.findViewById(R.id.cbSpeaker);
- cbEnableSpeaker.setChecked(getEngine().speakerEnabled());
- cbEnableSpeaker.setOnClickListener(new View.OnClickListener() {
- public void onClick(View checkBox) {
- CheckBox cbEnableSpeaker = (CheckBox) checkBox;
- getEngine().setSpeaker(cbEnableSpeaker.isChecked());
- cbEnableSpeaker.setChecked(getEngine().speakerEnabled());
- }
- });
- CheckBox cbEnableDebugAPM =
- (CheckBox) v.findViewById(R.id.cbDebugRecording);
- cbEnableDebugAPM.setChecked(getEngine().apmRecord());
- cbEnableDebugAPM.setOnClickListener(new View.OnClickListener() {
- public void onClick(View checkBox) {
- CheckBox cbEnableDebugAPM = (CheckBox) checkBox;
- getEngine().setDebuging(cbEnableDebugAPM.isChecked());
- cbEnableDebugAPM.setChecked(getEngine().apmRecord());
- }
- });
- CheckBox cbEnableAudioRTPDump =
- (CheckBox) v.findViewById(R.id.cbAudioRTPDump);
- cbEnableAudioRTPDump.setChecked(getEngine().audioRtpDump());
- cbEnableAudioRTPDump.setOnClickListener(new View.OnClickListener() {
- public void onClick(View checkBox) {
- CheckBox cbEnableAudioRTPDump = (CheckBox) checkBox;
- getEngine().setIncomingVoeRtpDump(cbEnableAudioRTPDump.isChecked());
- cbEnableAudioRTPDump.setChecked(getEngine().audioRtpDump());
- }
- });
- return v;
- }
-
- @Override
- public void onAttach(Activity activity) {
- super.onAttach(activity);
-
- // This makes sure that the container activity has implemented
- // the callback interface. If not, it throws an exception.
- try {
- stateProvider = (MenuStateProvider) activity;
- } catch (ClassCastException e) {
- throw new ClassCastException(activity +
- " must implement MenuStateProvider");
- }
- }
-
- private MediaEngine getEngine() {
- return stateProvider.getEngine();
- }
-
-} \ No newline at end of file
diff --git a/webrtc/examples/android/media_demo/src/org/webrtc/webrtcdemo/CodecInst.java b/webrtc/examples/android/media_demo/src/org/webrtc/webrtcdemo/CodecInst.java
deleted file mode 100644
index 133d63926b..0000000000
--- a/webrtc/examples/android/media_demo/src/org/webrtc/webrtcdemo/CodecInst.java
+++ /dev/null
@@ -1,39 +0,0 @@
-/*
- * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-package org.webrtc.webrtcdemo;
-
-public class CodecInst {
- private final long nativeCodecInst;
-
- // CodecInst can only be created from the native layer.
- private CodecInst(long nativeCodecInst) {
- this.nativeCodecInst = nativeCodecInst;
- }
-
- public String toString() {
- return name() + " " +
- "PlType: " + plType() + " " +
- "PlFreq: " + plFrequency() + " " +
- "Size: " + pacSize() + " " +
- "Channels: " + channels() + " " +
- "Rate: " + rate();
- }
-
- // Dispose must be called before all references to CodecInst are lost as it
- // will free memory allocated in the native layer.
- public native void dispose();
- public native int plType();
- public native String name();
- public native int plFrequency();
- public native int pacSize();
- public native int channels();
- public native int rate();
-} \ No newline at end of file
diff --git a/webrtc/examples/android/media_demo/src/org/webrtc/webrtcdemo/MainMenuFragment.java b/webrtc/examples/android/media_demo/src/org/webrtc/webrtcdemo/MainMenuFragment.java
deleted file mode 100644
index 793d784043..0000000000
--- a/webrtc/examples/android/media_demo/src/org/webrtc/webrtcdemo/MainMenuFragment.java
+++ /dev/null
@@ -1,123 +0,0 @@
-/*
- * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-package org.webrtc.webrtcdemo;
-
-import android.app.Activity;
-import android.app.Fragment;
-import android.os.Bundle;
-import android.util.Log;
-import android.view.LayoutInflater;
-import android.view.SurfaceView;
-import android.view.View;
-import android.view.ViewGroup;
-import android.widget.Button;
-import android.widget.LinearLayout;
-import android.widget.TextView;
-
-public class MainMenuFragment extends Fragment implements MediaEngineObserver {
-
- private String TAG;
- private MenuStateProvider stateProvider;
-
- private Button btStartStopCall;
- private TextView tvStats;
-
- @Override
- public View onCreateView(LayoutInflater inflater, ViewGroup container,
- Bundle savedInstanceState) {
- View v = inflater.inflate(R.layout.mainmenu, container, false);
-
- TAG = getResources().getString(R.string.tag);
-
- Button btStats = (Button) v.findViewById(R.id.btStats);
- boolean stats = getResources().getBoolean(R.bool.stats_enabled_default);
- enableStats(btStats, stats);
- btStats.setOnClickListener(new View.OnClickListener() {
- public void onClick(View button) {
- boolean turnOnStats = ((Button) button).getText().equals(
- getResources().getString(R.string.statsOn));
- enableStats((Button) button, turnOnStats);
- }
- });
- tvStats = (TextView) v.findViewById(R.id.tvStats);
-
- btStartStopCall = (Button) v.findViewById(R.id.btStartStopCall);
- btStartStopCall.setText(getEngine().isRunning() ?
- R.string.stopCall :
- R.string.startCall);
- btStartStopCall.setOnClickListener(new View.OnClickListener() {
- public void onClick(View button) {
- toggleStart();
- }
- });
- return v;
- }
-
- @Override
- public void onAttach(Activity activity) {
- super.onAttach(activity);
-
- // This makes sure that the container activity has implemented
- // the callback interface. If not, it throws an exception.
- try {
- stateProvider = (MenuStateProvider) activity;
- } catch (ClassCastException e) {
- throw new ClassCastException(activity +
- " must implement MenuStateProvider");
- }
- }
-
- // tvStats need to be updated on the UI thread.
- public void newStats(final String stats) {
- getActivity().runOnUiThread(new Runnable() {
- public void run() {
- tvStats.setText(stats);
- }
- });
- }
-
- private MediaEngine getEngine() {
- return stateProvider.getEngine();
- }
-
- private void enableStats(Button btStats, boolean enable) {
- if (enable) {
- getEngine().setObserver(this);
- } else {
- getEngine().setObserver(null);
- // Clear old stats text by posting empty stats.
- newStats("");
- }
- // If stats was true it was just turned on. This means that
- // clicking the button again should turn off stats.
- btStats.setText(enable ? R.string.statsOff : R.string.statsOn);
- }
-
-
- public void toggleStart() {
- if (getEngine().isRunning()) {
- stopAll();
- } else {
- startCall();
- }
- btStartStopCall.setText(getEngine().isRunning() ?
- R.string.stopCall :
- R.string.startCall);
- }
-
- public void stopAll() {
- getEngine().stop();
- }
-
- private void startCall() {
- getEngine().start();
- }
-} \ No newline at end of file
diff --git a/webrtc/examples/android/media_demo/src/org/webrtc/webrtcdemo/MediaEngine.java b/webrtc/examples/android/media_demo/src/org/webrtc/webrtcdemo/MediaEngine.java
deleted file mode 100644
index a7036914ff..0000000000
--- a/webrtc/examples/android/media_demo/src/org/webrtc/webrtcdemo/MediaEngine.java
+++ /dev/null
@@ -1,321 +0,0 @@
-/*
- * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-package org.webrtc.webrtcdemo;
-
-import android.app.AlertDialog;
-import android.content.BroadcastReceiver;
-import android.content.Context;
-import android.content.DialogInterface;
-import android.content.Intent;
-import android.content.IntentFilter;
-import android.media.AudioManager;
-import android.os.Environment;
-import android.util.Log;
-import android.view.OrientationEventListener;
-import java.io.File;
-
-public class MediaEngine {
- private static final String LOG_DIR = "webrtc";
-
- // Checks for and communicate failures to user (logcat and popup).
- private void check(boolean value, String message) {
- if (value) {
- return;
- }
- Log.e("WEBRTC-CHECK", message);
- AlertDialog alertDialog = new AlertDialog.Builder(context).create();
- alertDialog.setTitle("WebRTC Error");
- alertDialog.setMessage(message);
- alertDialog.setButton(DialogInterface.BUTTON_POSITIVE,
- "OK",
- new DialogInterface.OnClickListener() {
- public void onClick(DialogInterface dialog, int which) {
- dialog.dismiss();
- return;
- }
- }
- );
- alertDialog.show();
- }
-
-
- // Shared Audio/Video members.
- private final Context context;
- private String remoteIp;
- private boolean enableTrace;
-
- // Audio
- private VoiceEngine voe;
- private int audioChannel;
- private boolean audioEnabled;
- private boolean voeRunning;
- private int audioCodecIndex;
- private int audioTxPort;
- private int audioRxPort;
-
- private boolean speakerEnabled;
- private boolean headsetPluggedIn;
- private boolean enableAgc;
- private boolean enableNs;
- private boolean enableAecm;
-
- private BroadcastReceiver headsetListener;
-
- private boolean audioRtpDump;
- private boolean apmRecord;
-
- private int inFps;
- private int inKbps;
- private int outFps;
- private int outKbps;
- private int inWidth;
- private int inHeight;
-
- public MediaEngine(Context context) {
- this.context = context;
- voe = new VoiceEngine();
- check(voe.init() == 0, "Failed voe Init");
- audioChannel = voe.createChannel();
- check(audioChannel >= 0, "Failed voe CreateChannel");
- check(audioChannel >= 0, "Failed voe CreateChannel");
-
- check(voe.setAecmMode(VoiceEngine.AecmModes.SPEAKERPHONE, false) == 0,
- "VoE set Aecm speakerphone mode failed");
-
- // Set audio mode to communication
- AudioManager audioManager =
- ((AudioManager) context.getSystemService(Context.AUDIO_SERVICE));
- audioManager.setMode(AudioManager.MODE_IN_COMMUNICATION);
- // Listen to headset being plugged in/out.
- IntentFilter receiverFilter = new IntentFilter(Intent.ACTION_HEADSET_PLUG);
- headsetListener = new BroadcastReceiver() {
- @Override
- public void onReceive(Context context, Intent intent) {
- if (intent.getAction().compareTo(Intent.ACTION_HEADSET_PLUG) == 0) {
- headsetPluggedIn = intent.getIntExtra("state", 0) == 1;
- updateAudioOutput();
- }
- }
- };
- context.registerReceiver(headsetListener, receiverFilter);
- }
-
- public void dispose() {
- check(!voeRunning && !voeRunning, "Engines must be stopped before dispose");
- context.unregisterReceiver(headsetListener);
- check(voe.deleteChannel(audioChannel) == 0, "VoE delete channel failed");
- voe.dispose();
- }
-
- public void start() {
- if (audioEnabled) {
- startVoE();
- }
- }
-
- public void stop() {
- stopVoe();
- }
-
- public boolean isRunning() {
- return voeRunning;
- }
-
- public void setRemoteIp(String remoteIp) {
- this.remoteIp = remoteIp;
- UpdateSendDestination();
- }
-
- public String remoteIp() { return remoteIp; }
-
- private String getDebugDirectory() {
- // Should create a folder in /scard/|LOG_DIR|
- return Environment.getExternalStorageDirectory().toString() + "/" +
- LOG_DIR;
- }
-
- private boolean createDebugDirectory() {
- File webrtc_dir = new File(getDebugDirectory());
- if (!webrtc_dir.exists()) {
- return webrtc_dir.mkdir();
- }
- return webrtc_dir.isDirectory();
- }
-
- public void startVoE() {
- check(!voeRunning, "VoE already started");
- check(voe.startListen(audioChannel) == 0, "Failed StartListen");
- check(voe.startPlayout(audioChannel) == 0, "VoE start playout failed");
- check(voe.startSend(audioChannel) == 0, "VoE start send failed");
- voeRunning = true;
- }
-
- private void stopVoe() {
- check(voeRunning, "VoE not started");
- check(voe.stopSend(audioChannel) == 0, "VoE stop send failed");
- check(voe.stopPlayout(audioChannel) == 0, "VoE stop playout failed");
- check(voe.stopListen(audioChannel) == 0, "VoE stop listen failed");
- voeRunning = false;
- }
-
- public void setAudio(boolean audioEnabled) {
- this.audioEnabled = audioEnabled;
- }
-
- public boolean audioEnabled() { return audioEnabled; }
-
- public int audioCodecIndex() { return audioCodecIndex; }
-
- public void setAudioCodec(int codecNumber) {
- audioCodecIndex = codecNumber;
- CodecInst codec = voe.getCodec(codecNumber);
- check(voe.setSendCodec(audioChannel, codec) == 0, "Failed setSendCodec");
- codec.dispose();
- }
-
- public String[] audioCodecsAsString() {
- String[] retVal = new String[voe.numOfCodecs()];
- for (int i = 0; i < voe.numOfCodecs(); ++i) {
- CodecInst codec = voe.getCodec(i);
- retVal[i] = codec.toString();
- codec.dispose();
- }
- return retVal;
- }
-
- private CodecInst[] defaultAudioCodecs() {
- CodecInst[] retVal = new CodecInst[voe.numOfCodecs()];
- for (int i = 0; i < voe.numOfCodecs(); ++i) {
- retVal[i] = voe.getCodec(i);
- }
- return retVal;
- }
-
- public int getIsacIndex() {
- CodecInst[] codecs = defaultAudioCodecs();
- for (int i = 0; i < codecs.length; ++i) {
- if (codecs[i].name().contains("ISAC")) {
- return i;
- }
- }
- return 0;
- }
-
- public void setAudioTxPort(int audioTxPort) {
- this.audioTxPort = audioTxPort;
- UpdateSendDestination();
- }
-
- public int audioTxPort() { return audioTxPort; }
-
- public void setAudioRxPort(int audioRxPort) {
- check(voe.setLocalReceiver(audioChannel, audioRxPort) == 0,
- "Failed setLocalReceiver");
- this.audioRxPort = audioRxPort;
- }
-
- public int audioRxPort() { return audioRxPort; }
-
- public boolean agcEnabled() { return enableAgc; }
-
- public void setAgc(boolean enable) {
- enableAgc = enable;
- VoiceEngine.AgcConfig agc_config =
- new VoiceEngine.AgcConfig(3, 9, true);
- check(voe.setAgcConfig(agc_config) == 0, "VoE set AGC Config failed");
- check(voe.setAgcStatus(enableAgc, VoiceEngine.AgcModes.FIXED_DIGITAL) == 0,
- "VoE set AGC Status failed");
- }
-
- public boolean nsEnabled() { return enableNs; }
-
- public void setNs(boolean enable) {
- enableNs = enable;
- check(voe.setNsStatus(enableNs,
- VoiceEngine.NsModes.MODERATE_SUPPRESSION) == 0,
- "VoE set NS Status failed");
- }
-
- public boolean aecmEnabled() { return enableAecm; }
-
- public void setEc(boolean enable) {
- enableAecm = enable;
- check(voe.setEcStatus(enable, VoiceEngine.EcModes.AECM) == 0,
- "voe setEcStatus");
- }
-
- public boolean speakerEnabled() {
- return speakerEnabled;
- }
-
- public void setSpeaker(boolean enable) {
- speakerEnabled = enable;
- updateAudioOutput();
- }
-
- // Debug helpers.
- public boolean apmRecord() { return apmRecord; }
-
- public boolean audioRtpDump() { return audioRtpDump; }
-
- public void setDebuging(boolean enable) {
- apmRecord = enable;
- if (!enable) {
- check(voe.stopDebugRecording() == 0, "Failed stopping debug");
- return;
- }
- if (!createDebugDirectory()) {
- check(false, "Unable to create debug directory.");
- return;
- }
- String debugDirectory = getDebugDirectory();
- check(voe.startDebugRecording(debugDirectory + String.format("/apm_%d.dat",
- System.currentTimeMillis())) == 0,
- "Failed starting debug");
- }
-
- public void setIncomingVoeRtpDump(boolean enable) {
- audioRtpDump = enable;
- if (!enable) {
- check(voe.stopRtpDump(audioChannel,
- VoiceEngine.RtpDirections.INCOMING) == 0,
- "voe stopping rtp dump");
- return;
- }
- String debugDirectory = getDebugDirectory();
- check(voe.startRtpDump(audioChannel, debugDirectory +
- String.format("/voe_%d.rtp", System.currentTimeMillis()),
- VoiceEngine.RtpDirections.INCOMING) == 0,
- "voe starting rtp dump");
- }
-
- private void updateAudioOutput() {
- boolean useSpeaker = !headsetPluggedIn && speakerEnabled;
- AudioManager audioManager =
- ((AudioManager) context.getSystemService(Context.AUDIO_SERVICE));
- audioManager.setSpeakerphoneOn(useSpeaker);
- }
-
- private void UpdateSendDestination() {
- if (remoteIp == null) {
- return;
- }
- if (audioTxPort != 0) {
- check(voe.setSendDestination(audioChannel, audioTxPort,
- remoteIp) == 0, "VoE set send destination failed");
- }
- }
-
- MediaEngineObserver observer;
- public void setObserver(MediaEngineObserver observer) {
- this.observer = observer;
- }
-}
diff --git a/webrtc/examples/android/media_demo/src/org/webrtc/webrtcdemo/MediaEngineObserver.java b/webrtc/examples/android/media_demo/src/org/webrtc/webrtcdemo/MediaEngineObserver.java
deleted file mode 100644
index 3ea91b5e92..0000000000
--- a/webrtc/examples/android/media_demo/src/org/webrtc/webrtcdemo/MediaEngineObserver.java
+++ /dev/null
@@ -1,15 +0,0 @@
-/*
- * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-package org.webrtc.webrtcdemo;
-
-public interface MediaEngineObserver {
- void newStats(String stats);
-} \ No newline at end of file
diff --git a/webrtc/examples/android/media_demo/src/org/webrtc/webrtcdemo/MenuStateProvider.java b/webrtc/examples/android/media_demo/src/org/webrtc/webrtcdemo/MenuStateProvider.java
deleted file mode 100644
index 08cb508667..0000000000
--- a/webrtc/examples/android/media_demo/src/org/webrtc/webrtcdemo/MenuStateProvider.java
+++ /dev/null
@@ -1,15 +0,0 @@
-/*
- * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-package org.webrtc.webrtcdemo;
-
-public interface MenuStateProvider {
- public MediaEngine getEngine();
-} \ No newline at end of file
diff --git a/webrtc/examples/android/media_demo/src/org/webrtc/webrtcdemo/NativeWebRtcContextRegistry.java b/webrtc/examples/android/media_demo/src/org/webrtc/webrtcdemo/NativeWebRtcContextRegistry.java
deleted file mode 100644
index 3d4f00a4f6..0000000000
--- a/webrtc/examples/android/media_demo/src/org/webrtc/webrtcdemo/NativeWebRtcContextRegistry.java
+++ /dev/null
@@ -1,22 +0,0 @@
-/*
- * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-package org.webrtc.webrtcdemo;
-
-import android.content.Context;
-
-public class NativeWebRtcContextRegistry {
- static {
- System.loadLibrary("webrtcdemo-jni");
- }
-
- public native void register(Context context);
- public native void unRegister();
-} \ No newline at end of file
diff --git a/webrtc/examples/android/media_demo/src/org/webrtc/webrtcdemo/RtcpStatistics.java b/webrtc/examples/android/media_demo/src/org/webrtc/webrtcdemo/RtcpStatistics.java
deleted file mode 100644
index dbe817b1af..0000000000
--- a/webrtc/examples/android/media_demo/src/org/webrtc/webrtcdemo/RtcpStatistics.java
+++ /dev/null
@@ -1,32 +0,0 @@
-/*
- * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-package org.webrtc.webrtcdemo;
-
-public class RtcpStatistics {
- // Definition of fraction lost can be found in RFC3550.
- // It is equivalent to taking the integer part after multiplying the loss
- // fraction by 256.
- public final int fractionLost;
- public final int cumulativeLost;
- public final int extendedMax;
- public final int jitter;
- public final int rttMs;
-
- // Only allowed to be created by the native layer.
- private RtcpStatistics(int fractionLost, int cumulativeLost, int extendedMax,
- int jitter, int rttMs) {
- this.fractionLost = fractionLost;
- this.cumulativeLost = cumulativeLost;
- this.extendedMax = extendedMax;
- this.jitter = jitter;
- this.rttMs = rttMs;
- }
-} \ No newline at end of file
diff --git a/webrtc/examples/android/media_demo/src/org/webrtc/webrtcdemo/SettingsMenuFragment.java b/webrtc/examples/android/media_demo/src/org/webrtc/webrtcdemo/SettingsMenuFragment.java
deleted file mode 100644
index 761f96ce29..0000000000
--- a/webrtc/examples/android/media_demo/src/org/webrtc/webrtcdemo/SettingsMenuFragment.java
+++ /dev/null
@@ -1,129 +0,0 @@
-/*
- * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-package org.webrtc.webrtcdemo;
-
-import android.app.Activity;
-import android.app.Fragment;
-import android.os.Bundle;
-import android.util.Log;
-import android.view.LayoutInflater;
-import android.view.View;
-import android.view.ViewGroup;
-import android.widget.CheckBox;
-import android.widget.EditText;
-import android.widget.RadioGroup;
-import android.widget.TextView;
-import java.net.InetAddress;
-import java.net.NetworkInterface;
-import java.net.SocketException;
-import java.util.Enumeration;
-
-public class SettingsMenuFragment extends Fragment
- implements RadioGroup.OnCheckedChangeListener {
-
- private String TAG;
- private MenuStateProvider stateProvider;
-
- EditText etRemoteIp;
-
- @Override
- public View onCreateView(LayoutInflater inflater, ViewGroup container,
- Bundle savedInstanceState) {
- View v = inflater.inflate(R.layout.settingsmenu, container, false);
-
- TAG = getResources().getString(R.string.tag);
-
- CheckBox cbAudio = (CheckBox) v.findViewById(R.id.cbAudio);
- cbAudio.setChecked(getEngine().audioEnabled());
- cbAudio.setOnClickListener(new View.OnClickListener() {
- public void onClick(View checkBox) {
- CheckBox cbAudio = (CheckBox) checkBox;
- getEngine().setAudio(cbAudio.isChecked());
- cbAudio.setChecked(getEngine().audioEnabled());
- }
- });
- boolean loopback =
- getResources().getBoolean(R.bool.loopback_enabled_default);
- CheckBox cbLoopback = (CheckBox) v.findViewById(R.id.cbLoopback);
- cbLoopback.setChecked(loopback);
- cbLoopback.setOnClickListener(new View.OnClickListener() {
- public void onClick(View checkBox) {
- loopbackChanged((CheckBox) checkBox);
- }
- });
- etRemoteIp = (EditText) v.findViewById(R.id.etRemoteIp);
- etRemoteIp.setOnFocusChangeListener(new View.OnFocusChangeListener() {
- public void onFocusChange(View editText, boolean hasFocus) {
- if (!hasFocus) {
- getEngine().setRemoteIp(etRemoteIp.getText().toString());
- }
- }
- });
- // Has to be after remote IP as loopback changes it.
- loopbackChanged(cbLoopback);
- return v;
- }
-
- @Override
- public void onAttach(Activity activity) {
- super.onAttach(activity);
-
- // This makes sure that the container activity has implemented
- // the callback interface. If not, it throws an exception.
- try {
- stateProvider = (MenuStateProvider) activity;
- } catch (ClassCastException e) {
- throw new ClassCastException(activity +
- " must implement MenuStateProvider");
- }
- }
-
- private void loopbackChanged(CheckBox cbLoopback) {
- boolean loopback = cbLoopback.isChecked();
- etRemoteIp.setText(loopback ? getLoopbackIPString() : getLocalIpAddress());
- getEngine().setRemoteIp(etRemoteIp.getText().toString());
- }
-
- private String getLoopbackIPString() {
- return getResources().getString(R.string.loopbackIp);
- }
-
- private String getLocalIpAddress() {
- String localIp = "";
- try {
- for (Enumeration<NetworkInterface> en = NetworkInterface
- .getNetworkInterfaces(); en.hasMoreElements();) {
- NetworkInterface intf = en.nextElement();
- for (Enumeration<InetAddress> enumIpAddr =
- intf.getInetAddresses();
- enumIpAddr.hasMoreElements(); ) {
- InetAddress inetAddress = enumIpAddr.nextElement();
- if (!inetAddress.isLoopbackAddress()) {
- // Set the remote ip address the same as
- // the local ip address of the last netif
- localIp = inetAddress.getHostAddress().toString();
- }
- }
- }
- } catch (SocketException e) {
- Log.e(TAG, "Unable to get local IP address. Not the end of the world", e);
- }
- return localIp;
- }
-
- private MediaEngine getEngine() {
- return stateProvider.getEngine();
- }
-
- @Override
- public void onCheckedChanged(RadioGroup group, int checkedId) {
- }
-} \ No newline at end of file
diff --git a/webrtc/examples/android/media_demo/src/org/webrtc/webrtcdemo/SpinnerAdapter.java b/webrtc/examples/android/media_demo/src/org/webrtc/webrtcdemo/SpinnerAdapter.java
deleted file mode 100644
index fb04a7aac3..0000000000
--- a/webrtc/examples/android/media_demo/src/org/webrtc/webrtcdemo/SpinnerAdapter.java
+++ /dev/null
@@ -1,49 +0,0 @@
-/*
- * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-package org.webrtc.webrtcdemo;
-
-import android.widget.ArrayAdapter;
-import android.content.Context;
-import android.widget.TextView;
-import android.view.View;
-import android.view.ViewGroup;
-import android.view.LayoutInflater;
-
-public class SpinnerAdapter extends ArrayAdapter<String> {
- private String[] menuItems;
- LayoutInflater inflater;
- int textViewResourceId;
-
- public SpinnerAdapter(Context context, int textViewResourceId,
- String[] objects, LayoutInflater inflater) {
- super(context, textViewResourceId, objects);
- menuItems = objects;
- this.inflater = inflater;
- this.textViewResourceId = textViewResourceId;
- }
-
- @Override public View getDropDownView(int position, View convertView,
- ViewGroup parent) {
- return getCustomView(position, convertView, parent);
- }
-
- @Override public View getView(int position, View convertView,
- ViewGroup parent) {
- return getCustomView(position, convertView, parent);
- }
-
- private View getCustomView(int position, View v, ViewGroup parent) {
- View row = inflater.inflate(textViewResourceId, parent, false);
- TextView label = (TextView) row.findViewById(R.id.spinner_row);
- label.setText(menuItems[position]);
- return row;
- }
-} \ No newline at end of file
diff --git a/webrtc/examples/android/media_demo/src/org/webrtc/webrtcdemo/VoiceEngine.java b/webrtc/examples/android/media_demo/src/org/webrtc/webrtcdemo/VoiceEngine.java
deleted file mode 100644
index 900355ad8e..0000000000
--- a/webrtc/examples/android/media_demo/src/org/webrtc/webrtcdemo/VoiceEngine.java
+++ /dev/null
@@ -1,117 +0,0 @@
-/*
- * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-package org.webrtc.webrtcdemo;
-
-public class VoiceEngine {
- private final long nativeVoiceEngine;
-
- // Keep in sync (including this comment) with
- // webrtc/common_types.h:NsModes
- public enum NsModes {
- UNCHANGED, DEFAULT, CONFERENCE, LOW_SUPPRESSION,
- MODERATE_SUPPRESSION, HIGH_SUPPRESSION, VERY_HIGH_SUPPRESSION
- }
-
- // Keep in sync (including this comment) with
- // webrtc/common_types.h:AgcModes
- public enum AgcModes {
- UNCHANGED, DEFAULT, ADAPTIVE_ANALOG, ADAPTIVE_DIGITAL,
- FIXED_DIGITAL
- }
-
- // Keep in sync (including this comment) with
- // webrtc/common_types.h:AecmModes
- public enum AecmModes {
- QUIET_EARPIECE_OR_HEADSET, EARPIECE, LOUD_EARPIECE,
- SPEAKERPHONE, LOUD_SPEAKERPHONE
- }
-
- // Keep in sync (including this comment) with
- // webrtc/common_types.h:EcModes
- public enum EcModes { UNCHANGED, DEFAULT, CONFERENCE, AEC, AECM }
-
- // Keep in sync (including this comment) with
- // webrtc/common_types.h:RtpDirections
- public enum RtpDirections { INCOMING, OUTGOING }
-
- public static class AgcConfig {
- AgcConfig(int targetLevelDbOv, int digitalCompressionGaindB,
- boolean limiterEnable) {
- this.targetLevelDbOv = targetLevelDbOv;
- this.digitalCompressionGaindB = digitalCompressionGaindB;
- this.limiterEnable = limiterEnable;
- }
- private final int targetLevelDbOv;
- private final int digitalCompressionGaindB;
- private final boolean limiterEnable;
- }
-
- public VoiceEngine() {
- nativeVoiceEngine = create();
- }
- private static native long create();
- public native int init();
- public native void dispose();
- public native int createChannel();
- public native int deleteChannel(int channel);
- public native int setLocalReceiver(int channel, int port);
- public native int setSendDestination(int channel, int port, String ipaddr);
- public native int startListen(int channel);
- public native int startPlayout(int channel);
- public native int startSend(int channel);
- public native int stopListen(int channel);
- public native int stopPlayout(int channel);
- public native int stopSend(int channel);
- public native int setSpeakerVolume(int volume);
- public native int setLoudspeakerStatus(boolean enable);
- public native int startPlayingFileLocally(
- int channel,
- String fileName,
- boolean loop);
- public native int stopPlayingFileLocally(int channel);
- public native int startPlayingFileAsMicrophone(
- int channel,
- String fileName,
- boolean loop);
- public native int stopPlayingFileAsMicrophone(int channel);
- public native int numOfCodecs();
- public native CodecInst getCodec(int index);
- public native int setSendCodec(int channel, CodecInst codec);
- public int setEcStatus(boolean enable, EcModes mode) {
- return setEcStatus(enable, mode.ordinal());
- }
- private native int setEcStatus(boolean enable, int ec_mode);
- public int setAecmMode(AecmModes aecm_mode, boolean cng) {
- return setAecmMode(aecm_mode.ordinal(), cng);
- }
- private native int setAecmMode(int aecm_mode, boolean cng);
- public int setAgcStatus(boolean enable, AgcModes agc_mode) {
- return setAgcStatus(enable, agc_mode.ordinal());
- }
- private native int setAgcStatus(boolean enable, int agc_mode);
- public native int setAgcConfig(AgcConfig agc_config);
- public int setNsStatus(boolean enable, NsModes ns_mode) {
- return setNsStatus(enable, ns_mode.ordinal());
- }
- private native int setNsStatus(boolean enable, int ns_mode);
- public native int startDebugRecording(String file);
- public native int stopDebugRecording();
- public int startRtpDump(int channel, String file,
- RtpDirections direction) {
- return startRtpDump(channel, file, direction.ordinal());
- }
- private native int startRtpDump(int channel, String file,
- int direction);
- public int stopRtpDump(int channel, RtpDirections direction) {
- return stopRtpDump(channel, direction.ordinal());
- }
- private native int stopRtpDump(int channel, int direction);
-} \ No newline at end of file
diff --git a/webrtc/examples/android/media_demo/src/org/webrtc/webrtcdemo/WebRTCDemo.java b/webrtc/examples/android/media_demo/src/org/webrtc/webrtcdemo/WebRTCDemo.java
deleted file mode 100644
index 3b972cf126..0000000000
--- a/webrtc/examples/android/media_demo/src/org/webrtc/webrtcdemo/WebRTCDemo.java
+++ /dev/null
@@ -1,210 +0,0 @@
-/*
- * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-package org.webrtc.webrtcdemo;
-
-import android.app.ActionBar.Tab;
-import android.app.ActionBar.TabListener;
-import android.app.ActionBar;
-import android.app.Activity;
-import android.app.Fragment;
-import android.app.FragmentTransaction;
-import android.content.pm.ActivityInfo;
-import android.media.AudioManager;
-import android.os.Bundle;
-import android.os.Handler;
-import android.view.KeyEvent;
-import android.view.Menu;
-import android.view.MenuInflater;
-import android.view.MenuItem;
-import android.view.WindowManager;
-
-public class WebRTCDemo extends Activity implements MenuStateProvider {
-
- // From http://developer.android.com/guide/topics/ui/actionbar.html
- public static class TabListener<T extends Fragment>
- implements ActionBar.TabListener {
- private Fragment fragment;
- private final Activity activity;
- private final String tag;
- private final Class<T> instance;
- private final Bundle args;
-
- public TabListener(Activity activity, String tag, Class<T> clz) {
- this(activity, tag, clz, null);
- }
-
- public TabListener(Activity activity, String tag, Class<T> clz,
- Bundle args) {
- this.activity = activity;
- this.tag = tag;
- this.instance = clz;
- this.args = args;
- }
-
- public void onTabSelected(Tab tab, FragmentTransaction ft) {
- // Check if the fragment is already initialized
- if (fragment == null) {
- // If not, instantiate and add it to the activity
- fragment = Fragment.instantiate(activity, instance.getName(), args);
- ft.add(android.R.id.content, fragment, tag);
- } else {
- // If it exists, simply attach it in order to show it
- ft.attach(fragment);
- }
- }
-
- public void onTabUnselected(Tab tab, FragmentTransaction ft) {
- if (fragment != null) {
- // Detach the fragment, because another one is being attached
- ft.detach(fragment);
- }
- }
-
- public void onTabReselected(Tab tab, FragmentTransaction ft) {
- // User selected the already selected tab. Do nothing.
- }
- }
-
- private NativeWebRtcContextRegistry contextRegistry = null;
- private MediaEngine mediaEngine = null;
- private Handler handler;
- public MediaEngine getEngine() { return mediaEngine; }
-
- @Override
- public void onCreate(Bundle savedInstanceState) {
- super.onCreate(savedInstanceState);
-
- // Global settings.
- getWindow().addFlags(WindowManager.LayoutParams.FLAG_FULLSCREEN);
- getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);
-
- // State.
- // Must be instantiated before MediaEngine.
- contextRegistry = new NativeWebRtcContextRegistry();
- contextRegistry.register(this);
-
- // Load all settings dictated in xml.
- mediaEngine = new MediaEngine(this);
- mediaEngine.setRemoteIp(getResources().getString(R.string.loopbackIp));
-
- mediaEngine.setAudio(getResources().getBoolean(
- R.bool.audio_enabled_default));
- mediaEngine.setAudioCodec(mediaEngine.getIsacIndex());
- mediaEngine.setAudioRxPort(getResources().getInteger(
- R.integer.aRxPortDefault));
- mediaEngine.setAudioTxPort(getResources().getInteger(
- R.integer.aTxPortDefault));
- mediaEngine.setSpeaker(getResources().getBoolean(
- R.bool.speaker_enabled_default));
- mediaEngine.setDebuging(getResources().getBoolean(
- R.bool.apm_debug_enabled_default));
-
- // Create action bar with all tabs.
- ActionBar actionBar = getActionBar();
- actionBar.setNavigationMode(ActionBar.NAVIGATION_MODE_TABS);
- actionBar.setDisplayShowTitleEnabled(false);
-
- Tab tab = actionBar.newTab()
- .setText("Main")
- .setTabListener(new TabListener<MainMenuFragment>(
- this, "main", MainMenuFragment.class));
- actionBar.addTab(tab);
-
- tab = actionBar.newTab()
- .setText("Settings")
- .setTabListener(new TabListener<SettingsMenuFragment>(
- this, "Settings", SettingsMenuFragment.class));
- actionBar.addTab(tab);
-
- tab = actionBar.newTab()
- .setText("Audio")
- .setTabListener(new TabListener<AudioMenuFragment>(
- this, "Audio", AudioMenuFragment.class));
- actionBar.addTab(tab);
-
- enableTimedStartStop();
-
- // Hint that voice call audio stream should be used for hardware volume
- // controls.
- setVolumeControlStream(AudioManager.STREAM_VOICE_CALL);
- }
-
- @Override
- public boolean onCreateOptionsMenu(Menu menu) {
- MenuInflater inflater = getMenuInflater();
- inflater.inflate(R.menu.main_activity_actions, menu);
- return super.onCreateOptionsMenu(menu);
- }
-
- @Override
- public boolean onOptionsItemSelected(MenuItem item) {
- // Handle presses on the action bar items
- switch (item.getItemId()) {
- case R.id.action_exit:
- MainMenuFragment main = (MainMenuFragment)getFragmentManager()
- .findFragmentByTag("main");
- main.stopAll();
- finish();
- return true;
- default:
- return super.onOptionsItemSelected(item);
- }
- }
-
- @Override
- public void onDestroy() {
- disableTimedStartStop();
- mediaEngine.dispose();
- contextRegistry.unRegister();
- super.onDestroy();
- }
-
- @Override
- public boolean onKeyDown(int keyCode, KeyEvent event) {
- if (keyCode == KeyEvent.KEYCODE_BACK) {
- // Prevent app from running in the background.
- MainMenuFragment main = (MainMenuFragment)getFragmentManager()
- .findFragmentByTag("main");
- main.stopAll();
- finish();
- return true;
- }
- return super.onKeyDown(keyCode, event);
- }
-
- private int getCallRestartPeriodicity() {
- return getResources().getInteger(R.integer.call_restart_periodicity_ms);
- }
-
- // Thread repeatedly calling start/stop.
- void enableTimedStartStop() {
- if (getCallRestartPeriodicity() > 0) {
- // Periodicity == 0 <-> Disabled.
- handler = new Handler();
- handler.postDelayed(startOrStopCallback, getCallRestartPeriodicity());
- }
- }
-
- void disableTimedStartStop() {
- if (handler != null) {
- handler.removeCallbacks(startOrStopCallback);
- }
- }
-
- private Runnable startOrStopCallback = new Runnable() {
- public void run() {
- MainMenuFragment main = (MainMenuFragment)getFragmentManager()
- .findFragmentByTag("main");
- main.toggleStart();
- handler.postDelayed(startOrStopCallback, getCallRestartPeriodicity());
- }
- };
-}
diff --git a/webrtc/examples/androidapp/AndroidManifest.xml b/webrtc/examples/androidapp/AndroidManifest.xml
index 6a91cfdeed..bd0dee821a 100644
--- a/webrtc/examples/androidapp/AndroidManifest.xml
+++ b/webrtc/examples/androidapp/AndroidManifest.xml
@@ -7,7 +7,7 @@
<uses-feature android:name="android.hardware.camera" />
<uses-feature android:name="android.hardware.camera.autofocus" />
<uses-feature android:glEsVersion="0x00020000" android:required="true" />
- <uses-sdk android:minSdkVersion="14" android:targetSdkVersion="21" />
+ <uses-sdk android:minSdkVersion="16" android:targetSdkVersion="21" />
<uses-permission android:name="android.permission.CAMERA" />
<uses-permission android:name="android.permission.MODIFY_AUDIO_SETTINGS" />
diff --git a/webrtc/examples/androidapp/res/values/strings.xml b/webrtc/examples/androidapp/res/values/strings.xml
index b3c55b438e..a5f64bad5b 100644
--- a/webrtc/examples/androidapp/res/values/strings.xml
+++ b/webrtc/examples/androidapp/res/values/strings.xml
@@ -71,6 +71,11 @@
<string name="pref_hwcodec_dlg">Use hardware accelerated video codec (if available).</string>
<string name="pref_hwcodec_default">true</string>
+ <string name="pref_capturetotexture_key">capturetotexture_preference</string>
+ <string name="pref_capturetotexture_title">Video capture to surface texture.</string>
+ <string name="pref_capturetotexture_dlg">Capture video to textures (if available).</string>
+ <string name="pref_capturetotexture_default">false</string>
+
<string name="pref_value_enabled">Enabled</string>
<string name="pref_value_disabled">Disabled</string>
@@ -97,14 +102,19 @@
<string name="pref_noaudioprocessing_dlg">Disable audio processing pipeline.</string>
<string name="pref_noaudioprocessing_default">false</string>
+ <string name="pref_aecdump_key">aecdump_preference</string>
+ <string name="pref_aecdump_title">Create aecdump.</string>
+ <string name="pref_aecdump_dlg">Enable diagnostic audio recordings.</string>
+ <string name="pref_aecdump_default">false</string>
+
+ <string name="pref_opensles_key">opensles_preference</string>
+ <string name="pref_opensles_title">Use OpenSL ES for audio playback.</string>
+ <string name="pref_opensles_dlg">Use OpenSL ES for audio playback.</string>
+ <string name="pref_opensles_default">false</string>
+
<string name="pref_miscsettings_key">misc_settings_key</string>
<string name="pref_miscsettings_title">Miscellaneous settings.</string>
- <string name="pref_cpu_usage_detection_key">cpu_usage_detection</string>
- <string name="pref_cpu_usage_detection_title">CPU overuse detection.</string>
- <string name="pref_cpu_usage_detection_dlg">Adapt transmission to CPU status.</string>
- <string name="pref_cpu_usage_detection_default" translatable="false">true</string>
-
<string name="pref_room_server_url_key">room_server_url_preference</string>
<string name="pref_room_server_url_title">Room server URL.</string>
<string name="pref_room_server_url_dlg">Enter a room server URL.</string>
@@ -115,4 +125,9 @@
<string name="pref_displayhud_dlg">Display call statistics.</string>
<string name="pref_displayhud_default" translatable="false">false</string>
+ <string name="pref_tracing_key">tracing_preference</string>
+ <string name="pref_tracing_title">Debug performance tracing.</string>
+ <string name="pref_tracing_dlg">Debug performance tracing.</string>
+ <string name="pref_tracing_default" translatable="false">false</string>
+
</resources>
diff --git a/webrtc/examples/androidapp/res/xml/preferences.xml b/webrtc/examples/androidapp/res/xml/preferences.xml
index c580e0cb77..0c6f916a2d 100644
--- a/webrtc/examples/androidapp/res/xml/preferences.xml
+++ b/webrtc/examples/androidapp/res/xml/preferences.xml
@@ -60,6 +60,12 @@
android:title="@string/pref_hwcodec_title"
android:dialogTitle="@string/pref_hwcodec_dlg"
android:defaultValue="@string/pref_hwcodec_default" />
+
+ <CheckBoxPreference
+ android:key="@string/pref_capturetotexture_key"
+ android:title="@string/pref_capturetotexture_title"
+ android:dialogTitle="@string/pref_capturetotexture_dlg"
+ android:defaultValue="@string/pref_capturetotexture_default" />
</PreferenceCategory>
<PreferenceCategory
@@ -94,18 +100,24 @@
android:title="@string/pref_noaudioprocessing_title"
android:dialogTitle="@string/pref_noaudioprocessing_dlg"
android:defaultValue="@string/pref_noaudioprocessing_default" />
+
+ <CheckBoxPreference
+ android:key="@string/pref_aecdump_key"
+ android:title="@string/pref_aecdump_title"
+ android:dialogTitle="@string/pref_aecdump_dlg"
+ android:defaultValue="@string/pref_aecdump_default" />
+
+ <CheckBoxPreference
+ android:key="@string/pref_opensles_key"
+ android:title="@string/pref_opensles_title"
+ android:dialogTitle="@string/pref_opensles_dlg"
+ android:defaultValue="@string/pref_opensles_default" />
</PreferenceCategory>
<PreferenceCategory
android:key="@string/pref_miscsettings_key"
android:title="@string/pref_miscsettings_title">
- <CheckBoxPreference
- android:key="@string/pref_cpu_usage_detection_key"
- android:title="@string/pref_cpu_usage_detection_title"
- android:dialogTitle="@string/pref_cpu_usage_detection_dlg"
- android:defaultValue="@string/pref_cpu_usage_detection_default" />
-
<EditTextPreference
android:key="@string/pref_room_server_url_key"
android:title="@string/pref_room_server_url_title"
@@ -118,6 +130,12 @@
android:title="@string/pref_displayhud_title"
android:dialogTitle="@string/pref_displayhud_dlg"
android:defaultValue="@string/pref_displayhud_default" />
+
+ <CheckBoxPreference
+ android:key="@string/pref_tracing_key"
+ android:title="@string/pref_tracing_title"
+ android:dialogTitle="@string/pref_tracing_dlg"
+ android:defaultValue="@string/pref_tracing_default" />
</PreferenceCategory>
</PreferenceScreen>
diff --git a/webrtc/examples/androidapp/src/org/appspot/apprtc/CallActivity.java b/webrtc/examples/androidapp/src/org/appspot/apprtc/CallActivity.java
index 8ae7981e36..b9abf11eb3 100644
--- a/webrtc/examples/androidapp/src/org/appspot/apprtc/CallActivity.java
+++ b/webrtc/examples/androidapp/src/org/appspot/apprtc/CallActivity.java
@@ -22,7 +22,6 @@ import android.content.DialogInterface;
import android.content.Intent;
import android.content.pm.PackageManager;
import android.net.Uri;
-import android.opengl.GLSurfaceView;
import android.os.Bundle;
import android.os.Handler;
import android.util.Log;
@@ -67,16 +66,21 @@ public class CallActivity extends Activity
"org.appspot.apprtc.VIDEOCODEC";
public static final String EXTRA_HWCODEC_ENABLED =
"org.appspot.apprtc.HWCODEC";
+ public static final String EXTRA_CAPTURETOTEXTURE_ENABLED =
+ "org.appspot.apprtc.CAPTURETOTEXTURE";
public static final String EXTRA_AUDIO_BITRATE =
"org.appspot.apprtc.AUDIO_BITRATE";
public static final String EXTRA_AUDIOCODEC =
"org.appspot.apprtc.AUDIOCODEC";
public static final String EXTRA_NOAUDIOPROCESSING_ENABLED =
"org.appspot.apprtc.NOAUDIOPROCESSING";
- public static final String EXTRA_CPUOVERUSE_DETECTION =
- "org.appspot.apprtc.CPUOVERUSE_DETECTION";
+ public static final String EXTRA_AECDUMP_ENABLED =
+ "org.appspot.apprtc.AECDUMP";
+ public static final String EXTRA_OPENSLES_ENABLED =
+ "org.appspot.apprtc.OPENSLES";
public static final String EXTRA_DISPLAY_HUD =
"org.appspot.apprtc.DISPLAY_HUD";
+ public static final String EXTRA_TRACING = "org.appspot.apprtc.TRACING";
public static final String EXTRA_CMDLINE =
"org.appspot.apprtc.CMDLINE";
public static final String EXTRA_RUNTIME =
@@ -177,9 +181,9 @@ public class CallActivity extends Activity
remoteRender.setOnClickListener(listener);
// Create video renderers.
- rootEglBase = new EglBase();
- localRender.init(rootEglBase.getContext(), null);
- remoteRender.init(rootEglBase.getContext(), null);
+ rootEglBase = EglBase.create();
+ localRender.init(rootEglBase.getEglBaseContext(), null);
+ remoteRender.init(rootEglBase.getEglBaseContext(), null);
localRender.setZOrderMediaOverlay(true);
updateVideoView();
@@ -212,19 +216,23 @@ public class CallActivity extends Activity
return;
}
boolean loopback = intent.getBooleanExtra(EXTRA_LOOPBACK, false);
+ boolean tracing = intent.getBooleanExtra(EXTRA_TRACING, false);
peerConnectionParameters = new PeerConnectionParameters(
intent.getBooleanExtra(EXTRA_VIDEO_CALL, true),
loopback,
+ tracing,
intent.getIntExtra(EXTRA_VIDEO_WIDTH, 0),
intent.getIntExtra(EXTRA_VIDEO_HEIGHT, 0),
intent.getIntExtra(EXTRA_VIDEO_FPS, 0),
intent.getIntExtra(EXTRA_VIDEO_BITRATE, 0),
intent.getStringExtra(EXTRA_VIDEOCODEC),
intent.getBooleanExtra(EXTRA_HWCODEC_ENABLED, true),
+ intent.getBooleanExtra(EXTRA_CAPTURETOTEXTURE_ENABLED, false),
intent.getIntExtra(EXTRA_AUDIO_BITRATE, 0),
intent.getStringExtra(EXTRA_AUDIOCODEC),
intent.getBooleanExtra(EXTRA_NOAUDIOPROCESSING_ENABLED, false),
- intent.getBooleanExtra(EXTRA_CPUOVERUSE_DETECTION, true));
+ intent.getBooleanExtra(EXTRA_AECDUMP_ENABLED, false),
+ intent.getBooleanExtra(EXTRA_OPENSLES_ENABLED, false));
commandLineRun = intent.getBooleanExtra(EXTRA_CMDLINE, false);
runTimeMs = intent.getIntExtra(EXTRA_RUNTIME, 0);
@@ -246,6 +254,7 @@ public class CallActivity extends Activity
// For command line execution run connection for <runTimeMs> and exit.
if (commandLineRun && runTimeMs > 0) {
(new Handler()).postDelayed(new Runnable() {
+ @Override
public void run() {
disconnect();
}
@@ -480,7 +489,7 @@ public class CallActivity extends Activity
signalingParameters = params;
logAndToast("Creating peer connection, delay=" + delta + "ms");
- peerConnectionClient.createPeerConnection(rootEglBase.getContext(),
+ peerConnectionClient.createPeerConnection(rootEglBase.getEglBaseContext(),
localRender, remoteRender, signalingParameters);
if (signalingParameters.initiator) {
diff --git a/webrtc/examples/androidapp/src/org/appspot/apprtc/ConnectActivity.java b/webrtc/examples/androidapp/src/org/appspot/apprtc/ConnectActivity.java
index 0bdaebb5b0..e55dba0cba 100644
--- a/webrtc/examples/androidapp/src/org/appspot/apprtc/ConnectActivity.java
+++ b/webrtc/examples/androidapp/src/org/appspot/apprtc/ConnectActivity.java
@@ -65,9 +65,12 @@ public class ConnectActivity extends Activity {
private String keyprefAudioBitrateValue;
private String keyprefAudioCodec;
private String keyprefHwCodecAcceleration;
+ private String keyprefCaptureToTexture;
private String keyprefNoAudioProcessingPipeline;
- private String keyprefCpuUsageDetection;
+ private String keyprefAecDump;
+ private String keyprefOpenSLES;
private String keyprefDisplayHud;
+ private String keyprefTracing;
private String keyprefRoomServerUrl;
private String keyprefRoom;
private String keyprefRoomList;
@@ -89,12 +92,15 @@ public class ConnectActivity extends Activity {
keyprefVideoBitrateValue = getString(R.string.pref_startvideobitratevalue_key);
keyprefVideoCodec = getString(R.string.pref_videocodec_key);
keyprefHwCodecAcceleration = getString(R.string.pref_hwcodec_key);
+ keyprefCaptureToTexture = getString(R.string.pref_capturetotexture_key);
keyprefAudioBitrateType = getString(R.string.pref_startaudiobitrate_key);
keyprefAudioBitrateValue = getString(R.string.pref_startaudiobitratevalue_key);
keyprefAudioCodec = getString(R.string.pref_audiocodec_key);
keyprefNoAudioProcessingPipeline = getString(R.string.pref_noaudioprocessing_key);
- keyprefCpuUsageDetection = getString(R.string.pref_cpu_usage_detection_key);
+ keyprefAecDump = getString(R.string.pref_aecdump_key);
+ keyprefOpenSLES = getString(R.string.pref_opensles_key);
keyprefDisplayHud = getString(R.string.pref_displayhud_key);
+ keyprefTracing = getString(R.string.pref_tracing_key);
keyprefRoomServerUrl = getString(R.string.pref_room_server_url_key);
keyprefRoom = getString(R.string.pref_room_key);
keyprefRoomList = getString(R.string.pref_room_list_key);
@@ -253,11 +259,25 @@ public class ConnectActivity extends Activity {
boolean hwCodec = sharedPref.getBoolean(keyprefHwCodecAcceleration,
Boolean.valueOf(getString(R.string.pref_hwcodec_default)));
+ // Check Capture to texture.
+ boolean captureToTexture = sharedPref.getBoolean(keyprefCaptureToTexture,
+ Boolean.valueOf(getString(R.string.pref_capturetotexture_default)));
+
// Check Disable Audio Processing flag.
boolean noAudioProcessing = sharedPref.getBoolean(
keyprefNoAudioProcessingPipeline,
Boolean.valueOf(getString(R.string.pref_noaudioprocessing_default)));
+ // Check Disable Audio Processing flag.
+ boolean aecDump = sharedPref.getBoolean(
+ keyprefAecDump,
+ Boolean.valueOf(getString(R.string.pref_aecdump_default)));
+
+ // Check OpenSL ES enabled flag.
+ boolean useOpenSLES = sharedPref.getBoolean(
+ keyprefOpenSLES,
+ Boolean.valueOf(getString(R.string.pref_opensles_default)));
+
// Get video resolution from settings.
int videoWidth = 0;
int videoHeight = 0;
@@ -313,16 +333,13 @@ public class ConnectActivity extends Activity {
audioStartBitrate = Integer.parseInt(bitrateValue);
}
- // Test if CpuOveruseDetection should be disabled. By default is on.
- boolean cpuOveruseDetection = sharedPref.getBoolean(
- keyprefCpuUsageDetection,
- Boolean.valueOf(
- getString(R.string.pref_cpu_usage_detection_default)));
-
// Check statistics display option.
boolean displayHud = sharedPref.getBoolean(keyprefDisplayHud,
Boolean.valueOf(getString(R.string.pref_displayhud_default)));
+ boolean tracing = sharedPref.getBoolean(
+ keyprefTracing, Boolean.valueOf(getString(R.string.pref_tracing_default)));
+
// Start AppRTCDemo activity.
Log.d(TAG, "Connecting to room " + roomId + " at URL " + roomUrl);
if (validateUrl(roomUrl)) {
@@ -340,13 +357,15 @@ public class ConnectActivity extends Activity {
intent.putExtra(CallActivity.EXTRA_VIDEO_BITRATE, videoStartBitrate);
intent.putExtra(CallActivity.EXTRA_VIDEOCODEC, videoCodec);
intent.putExtra(CallActivity.EXTRA_HWCODEC_ENABLED, hwCodec);
+ intent.putExtra(CallActivity.EXTRA_CAPTURETOTEXTURE_ENABLED, captureToTexture);
intent.putExtra(CallActivity.EXTRA_NOAUDIOPROCESSING_ENABLED,
noAudioProcessing);
+ intent.putExtra(CallActivity.EXTRA_AECDUMP_ENABLED, aecDump);
+ intent.putExtra(CallActivity.EXTRA_OPENSLES_ENABLED, useOpenSLES);
intent.putExtra(CallActivity.EXTRA_AUDIO_BITRATE, audioStartBitrate);
intent.putExtra(CallActivity.EXTRA_AUDIOCODEC, audioCodec);
- intent.putExtra(CallActivity.EXTRA_CPUOVERUSE_DETECTION,
- cpuOveruseDetection);
intent.putExtra(CallActivity.EXTRA_DISPLAY_HUD, displayHud);
+ intent.putExtra(CallActivity.EXTRA_TRACING, tracing);
intent.putExtra(CallActivity.EXTRA_CMDLINE, commandLineRun);
intent.putExtra(CallActivity.EXTRA_RUNTIME, runTimeMs);
diff --git a/webrtc/examples/androidapp/src/org/appspot/apprtc/PeerConnectionClient.java b/webrtc/examples/androidapp/src/org/appspot/apprtc/PeerConnectionClient.java
index 263046b2e9..c41dd66345 100644
--- a/webrtc/examples/androidapp/src/org/appspot/apprtc/PeerConnectionClient.java
+++ b/webrtc/examples/androidapp/src/org/appspot/apprtc/PeerConnectionClient.java
@@ -11,12 +11,15 @@
package org.appspot.apprtc;
import android.content.Context;
+import android.os.ParcelFileDescriptor;
+import android.os.Environment;
import android.util.Log;
import org.appspot.apprtc.AppRTCClient.SignalingParameters;
import org.appspot.apprtc.util.LooperExecutor;
import org.webrtc.CameraEnumerationAndroid;
import org.webrtc.DataChannel;
+import org.webrtc.EglBase;
import org.webrtc.IceCandidate;
import org.webrtc.Logging;
import org.webrtc.MediaCodecVideoEncoder;
@@ -34,7 +37,10 @@ import org.webrtc.VideoCapturerAndroid;
import org.webrtc.VideoRenderer;
import org.webrtc.VideoSource;
import org.webrtc.VideoTrack;
+import org.webrtc.voiceengine.WebRtcAudioManager;
+import java.io.File;
+import java.io.IOException;
import java.util.EnumSet;
import java.util.LinkedList;
import java.util.Timer;
@@ -42,8 +48,6 @@ import java.util.TimerTask;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
-import javax.microedition.khronos.egl.EGLContext;
-
/**
* Peer connection client implementation.
*
@@ -55,7 +59,6 @@ public class PeerConnectionClient {
public static final String VIDEO_TRACK_ID = "ARDAMSv0";
public static final String AUDIO_TRACK_ID = "ARDAMSa0";
private static final String TAG = "PCRTCClient";
- private static final String FIELD_TRIAL_VP9 = "WebRTC-SupportVP9/Enabled/";
private static final String FIELD_TRIAL_AUTOMATIC_RESIZE =
"WebRTC-MediaCodecVideoEncoder-AutomaticResize/Enabled/";
private static final String VIDEO_CODEC_VP8 = "VP8";
@@ -94,7 +97,7 @@ public class PeerConnectionClient {
private VideoSource videoSource;
private boolean videoCallEnabled;
private boolean preferIsac;
- private boolean preferH264;
+ private String preferredVideoCodec;
private boolean videoSourceStopped;
private boolean isError;
private Timer statsTimer;
@@ -104,6 +107,7 @@ public class PeerConnectionClient {
private MediaConstraints pcConstraints;
private MediaConstraints videoConstraints;
private MediaConstraints audioConstraints;
+ private ParcelFileDescriptor aecDumpFileDescriptor;
private MediaConstraints sdpMediaConstraints;
private PeerConnectionParameters peerConnectionParameters;
// Queued remote ICE candidates are consumed only after both local and
@@ -127,35 +131,41 @@ public class PeerConnectionClient {
public static class PeerConnectionParameters {
public final boolean videoCallEnabled;
public final boolean loopback;
+ public final boolean tracing;
public final int videoWidth;
public final int videoHeight;
public final int videoFps;
public final int videoStartBitrate;
public final String videoCodec;
public final boolean videoCodecHwAcceleration;
+ public final boolean captureToTexture;
public final int audioStartBitrate;
public final String audioCodec;
public final boolean noAudioProcessing;
- public final boolean cpuOveruseDetection;
+ public final boolean aecDump;
+ public final boolean useOpenSLES;
public PeerConnectionParameters(
- boolean videoCallEnabled, boolean loopback,
+ boolean videoCallEnabled, boolean loopback, boolean tracing,
int videoWidth, int videoHeight, int videoFps, int videoStartBitrate,
- String videoCodec, boolean videoCodecHwAcceleration,
+ String videoCodec, boolean videoCodecHwAcceleration, boolean captureToTexture,
int audioStartBitrate, String audioCodec,
- boolean noAudioProcessing, boolean cpuOveruseDetection) {
+ boolean noAudioProcessing, boolean aecDump, boolean useOpenSLES) {
this.videoCallEnabled = videoCallEnabled;
this.loopback = loopback;
+ this.tracing = tracing;
this.videoWidth = videoWidth;
this.videoHeight = videoHeight;
this.videoFps = videoFps;
this.videoStartBitrate = videoStartBitrate;
this.videoCodec = videoCodec;
this.videoCodecHwAcceleration = videoCodecHwAcceleration;
+ this.captureToTexture = captureToTexture;
this.audioStartBitrate = audioStartBitrate;
this.audioCodec = audioCodec;
this.noAudioProcessing = noAudioProcessing;
- this.cpuOveruseDetection = cpuOveruseDetection;
+ this.aecDump = aecDump;
+ this.useOpenSLES = useOpenSLES;
}
}
@@ -228,7 +238,6 @@ public class PeerConnectionClient {
factory = null;
peerConnection = null;
preferIsac = false;
- preferH264 = false;
videoSourceStopped = false;
isError = false;
queuedRemoteCandidates = null;
@@ -249,7 +258,7 @@ public class PeerConnectionClient {
}
public void createPeerConnection(
- final EGLContext renderEGLContext,
+ final EglBase.Context renderEGLContext,
final VideoRenderer.Callbacks localRender,
final VideoRenderer.Callbacks remoteRender,
final SignalingParameters signalingParameters) {
@@ -283,31 +292,47 @@ public class PeerConnectionClient {
}
private void createPeerConnectionFactoryInternal(Context context) {
+ PeerConnectionFactory.initializeInternalTracer();
+ if (peerConnectionParameters.tracing) {
+ PeerConnectionFactory.startInternalTracingCapture(
+ Environment.getExternalStorageDirectory().getAbsolutePath() + File.separator
+ + "webrtc-trace.txt");
+ }
Log.d(TAG, "Create peer connection factory. Use video: " +
peerConnectionParameters.videoCallEnabled);
isError = false;
// Initialize field trials.
- String field_trials = FIELD_TRIAL_AUTOMATIC_RESIZE;
- // Check if VP9 is used by default.
- if (videoCallEnabled && peerConnectionParameters.videoCodec != null
- && peerConnectionParameters.videoCodec.equals(VIDEO_CODEC_VP9)) {
- field_trials += FIELD_TRIAL_VP9;
+ PeerConnectionFactory.initializeFieldTrials(FIELD_TRIAL_AUTOMATIC_RESIZE);
+
+ // Check preferred video codec.
+ preferredVideoCodec = VIDEO_CODEC_VP8;
+ if (videoCallEnabled && peerConnectionParameters.videoCodec != null) {
+ if (peerConnectionParameters.videoCodec.equals(VIDEO_CODEC_VP9)) {
+ preferredVideoCodec = VIDEO_CODEC_VP9;
+ } else if (peerConnectionParameters.videoCodec.equals(VIDEO_CODEC_H264)) {
+ preferredVideoCodec = VIDEO_CODEC_H264;
+ }
}
- PeerConnectionFactory.initializeFieldTrials(field_trials);
+ Log.d(TAG, "Pereferred video codec: " + preferredVideoCodec);
- // Check if H.264 is used by default.
- preferH264 = false;
- if (videoCallEnabled && peerConnectionParameters.videoCodec != null
- && peerConnectionParameters.videoCodec.equals(VIDEO_CODEC_H264)) {
- preferH264 = true;
- }
// Check if ISAC is used by default.
preferIsac = false;
if (peerConnectionParameters.audioCodec != null
&& peerConnectionParameters.audioCodec.equals(AUDIO_CODEC_ISAC)) {
preferIsac = true;
}
+
+ // Enable/disable OpenSL ES playback.
+ if (!peerConnectionParameters.useOpenSLES) {
+ Log.d(TAG, "Disable OpenSL ES audio even if device supports it");
+ WebRtcAudioManager.setBlacklistDeviceForOpenSLESUsage(true /* enable */);
+ } else {
+ Log.d(TAG, "Allow OpenSL ES audio if device supports it");
+ WebRtcAudioManager.setBlacklistDeviceForOpenSLESUsage(false);
+ }
+
+ // Create peer connection factory.
if (!PeerConnectionFactory.initializeAndroidGlobals(context, true, true,
peerConnectionParameters.videoCodecHwAcceleration)) {
events.onPeerConnectionError("Failed to initializeAndroidGlobals");
@@ -405,7 +430,7 @@ public class PeerConnectionClient {
}
}
- private void createPeerConnectionInternal(EGLContext renderEGLContext) {
+ private void createPeerConnectionInternal(EglBase.Context renderEGLContext) {
if (factory == null || isError) {
Log.e(TAG, "Peerconnection factory is not created");
return;
@@ -420,7 +445,7 @@ public class PeerConnectionClient {
if (videoCallEnabled) {
Log.d(TAG, "EGLContext: " + renderEGLContext);
- factory.setVideoHwAccelerationOptions(renderEGLContext);
+ factory.setVideoHwAccelerationOptions(renderEGLContext, renderEGLContext);
}
PeerConnection.RTCConfiguration rtcConfig =
@@ -453,7 +478,8 @@ public class PeerConnectionClient {
cameraDeviceName = frontCameraDeviceName;
}
Log.d(TAG, "Opening camera: " + cameraDeviceName);
- videoCapturer = VideoCapturerAndroid.create(cameraDeviceName, null);
+ videoCapturer = VideoCapturerAndroid.create(cameraDeviceName, null,
+ peerConnectionParameters.captureToTexture ? renderEGLContext : null);
if (videoCapturer == null) {
reportError("Failed to open camera");
return;
@@ -466,10 +492,26 @@ public class PeerConnectionClient {
factory.createAudioSource(audioConstraints)));
peerConnection.addStream(mediaStream);
+ if (peerConnectionParameters.aecDump) {
+ try {
+ aecDumpFileDescriptor = ParcelFileDescriptor.open(
+ new File("/sdcard/Download/audio.aecdump"),
+ ParcelFileDescriptor.MODE_READ_WRITE |
+ ParcelFileDescriptor.MODE_CREATE |
+ ParcelFileDescriptor.MODE_TRUNCATE);
+ factory.startAecDump(aecDumpFileDescriptor.getFd());
+ } catch(IOException e) {
+ Log.e(TAG, "Can not open aecdump file", e);
+ }
+ }
+
Log.d(TAG, "Peer connection created.");
}
private void closeInternal() {
+ if (factory != null && peerConnectionParameters.aecDump) {
+ factory.stopAecDump();
+ }
Log.d(TAG, "Closing peer connection.");
statsTimer.cancel();
if (peerConnection != null) {
@@ -489,6 +531,8 @@ public class PeerConnectionClient {
options = null;
Log.d(TAG, "Closing peer connection done.");
events.onPeerConnectionClosed();
+ PeerConnectionFactory.stopInternalTracingCapture();
+ PeerConnectionFactory.shutdownInternalTracer();
}
public boolean isHDVideo() {
@@ -623,8 +667,8 @@ public class PeerConnectionClient {
if (preferIsac) {
sdpDescription = preferCodec(sdpDescription, AUDIO_CODEC_ISAC, true);
}
- if (videoCallEnabled && preferH264) {
- sdpDescription = preferCodec(sdpDescription, VIDEO_CODEC_H264, false);
+ if (videoCallEnabled) {
+ sdpDescription = preferCodec(sdpDescription, preferredVideoCodec, false);
}
if (videoCallEnabled && peerConnectionParameters.videoStartBitrate > 0) {
sdpDescription = setStartBitrate(VIDEO_CODEC_VP8, true,
@@ -972,8 +1016,8 @@ public class PeerConnectionClient {
if (preferIsac) {
sdpDescription = preferCodec(sdpDescription, AUDIO_CODEC_ISAC, true);
}
- if (videoCallEnabled && preferH264) {
- sdpDescription = preferCodec(sdpDescription, VIDEO_CODEC_H264, false);
+ if (videoCallEnabled) {
+ sdpDescription = preferCodec(sdpDescription, preferredVideoCodec, false);
}
final SessionDescription sdp = new SessionDescription(
origSdp.type, sdpDescription);
diff --git a/webrtc/examples/androidapp/src/org/appspot/apprtc/SettingsActivity.java b/webrtc/examples/androidapp/src/org/appspot/apprtc/SettingsActivity.java
index 9ad6e4d8e4..06a2d06802 100644
--- a/webrtc/examples/androidapp/src/org/appspot/apprtc/SettingsActivity.java
+++ b/webrtc/examples/androidapp/src/org/appspot/apprtc/SettingsActivity.java
@@ -30,15 +30,18 @@ public class SettingsActivity extends Activity
private String keyprefStartVideoBitrateValue;
private String keyPrefVideoCodec;
private String keyprefHwCodec;
+ private String keyprefCaptureToTexture;
private String keyprefStartAudioBitrateType;
private String keyprefStartAudioBitrateValue;
private String keyPrefAudioCodec;
private String keyprefNoAudioProcessing;
+ private String keyprefAecDump;
+ private String keyprefOpenSLES;
- private String keyprefCpuUsageDetection;
private String keyPrefRoomServerUrl;
private String keyPrefDisplayHud;
+ private String keyPrefTracing;
@Override
protected void onCreate(Bundle savedInstanceState) {
@@ -51,15 +54,18 @@ public class SettingsActivity extends Activity
keyprefStartVideoBitrateValue = getString(R.string.pref_startvideobitratevalue_key);
keyPrefVideoCodec = getString(R.string.pref_videocodec_key);
keyprefHwCodec = getString(R.string.pref_hwcodec_key);
+ keyprefCaptureToTexture = getString(R.string.pref_capturetotexture_key);
keyprefStartAudioBitrateType = getString(R.string.pref_startaudiobitrate_key);
keyprefStartAudioBitrateValue = getString(R.string.pref_startaudiobitratevalue_key);
keyPrefAudioCodec = getString(R.string.pref_audiocodec_key);
keyprefNoAudioProcessing = getString(R.string.pref_noaudioprocessing_key);
+ keyprefAecDump = getString(R.string.pref_aecdump_key);
+ keyprefOpenSLES = getString(R.string.pref_opensles_key);
- keyprefCpuUsageDetection = getString(R.string.pref_cpu_usage_detection_key);
keyPrefRoomServerUrl = getString(R.string.pref_room_server_url_key);
keyPrefDisplayHud = getString(R.string.pref_displayhud_key);
+ keyPrefTracing = getString(R.string.pref_tracing_key);
// Display the fragment as the main content.
settingsFragment = new SettingsFragment();
@@ -84,16 +90,19 @@ public class SettingsActivity extends Activity
setVideoBitrateEnable(sharedPreferences);
updateSummary(sharedPreferences, keyPrefVideoCodec);
updateSummaryB(sharedPreferences, keyprefHwCodec);
+ updateSummaryB(sharedPreferences, keyprefCaptureToTexture);
updateSummary(sharedPreferences, keyprefStartAudioBitrateType);
updateSummaryBitrate(sharedPreferences, keyprefStartAudioBitrateValue);
setAudioBitrateEnable(sharedPreferences);
updateSummary(sharedPreferences, keyPrefAudioCodec);
updateSummaryB(sharedPreferences, keyprefNoAudioProcessing);
+ updateSummaryB(sharedPreferences, keyprefAecDump);
+ updateSummaryB(sharedPreferences, keyprefOpenSLES);
- updateSummaryB(sharedPreferences, keyprefCpuUsageDetection);
updateSummary(sharedPreferences, keyPrefRoomServerUrl);
updateSummaryB(sharedPreferences, keyPrefDisplayHud);
+ updateSummaryB(sharedPreferences, keyPrefTracing);
}
@Override
@@ -119,10 +128,13 @@ public class SettingsActivity extends Activity
|| key.equals(keyprefStartAudioBitrateValue)) {
updateSummaryBitrate(sharedPreferences, key);
} else if (key.equals(keyprefVideoCall)
+ || key.equals(keyPrefTracing)
|| key.equals(keyprefCaptureQualitySlider)
|| key.equals(keyprefHwCodec)
+ || key.equals(keyprefCaptureToTexture)
|| key.equals(keyprefNoAudioProcessing)
- || key.equals(keyprefCpuUsageDetection)
+ || key.equals(keyprefAecDump)
+ || key.equals(keyprefOpenSLES)
|| key.equals(keyPrefDisplayHud)) {
updateSummaryB(sharedPreferences, key);
}
diff --git a/webrtc/examples/androidtests/src/org/appspot/apprtc/test/PeerConnectionClientTest.java b/webrtc/examples/androidtests/src/org/appspot/apprtc/test/PeerConnectionClientTest.java
index 5a5034b340..16a9fb3612 100644
--- a/webrtc/examples/androidtests/src/org/appspot/apprtc/test/PeerConnectionClientTest.java
+++ b/webrtc/examples/androidtests/src/org/appspot/apprtc/test/PeerConnectionClientTest.java
@@ -22,7 +22,7 @@ import org.appspot.apprtc.PeerConnectionClient.PeerConnectionParameters;
import org.appspot.apprtc.util.LooperExecutor;
import org.webrtc.EglBase;
import org.webrtc.IceCandidate;
-import org.webrtc.MediaConstraints;
+import org.webrtc.MediaCodecVideoEncoder;
import org.webrtc.PeerConnection;
import org.webrtc.PeerConnectionFactory;
import org.webrtc.SessionDescription;
@@ -225,7 +225,7 @@ public class PeerConnectionClientTest extends InstrumentationTestCase
PeerConnectionClient createPeerConnectionClient(
MockRenderer localRenderer, MockRenderer remoteRenderer,
- PeerConnectionParameters peerConnectionParameters, boolean decodeToTexture) {
+ PeerConnectionParameters peerConnectionParameters, boolean useTexures) {
List<PeerConnection.IceServer> iceServers =
new LinkedList<PeerConnection.IceServer>();
SignalingParameters signalingParameters = new SignalingParameters(
@@ -240,19 +240,28 @@ public class PeerConnectionClientTest extends InstrumentationTestCase
client.setPeerConnectionFactoryOptions(options);
client.createPeerConnectionFactory(
getInstrumentation().getContext(), peerConnectionParameters, this);
- client.createPeerConnection(decodeToTexture ? eglBase.getContext() : null,
+ client.createPeerConnection(useTexures ? eglBase.getEglBaseContext() : null,
localRenderer, remoteRenderer, signalingParameters);
client.createOffer();
return client;
}
- private PeerConnectionParameters createParameters(boolean enableVideo,
- String videoCodec) {
+ private PeerConnectionParameters createParametersForAudioCall() {
PeerConnectionParameters peerConnectionParameters =
new PeerConnectionParameters(
- enableVideo, true, // videoCallEnabled, loopback.
- 0, 0, 0, 0, videoCodec, true, // video codec parameters.
- 0, "OPUS", false, true); // audio codec parameters.
+ false, true, false, // videoCallEnabled, loopback, tracing.
+ 0, 0, 0, 0, "", true, false, // video codec parameters.
+ 0, "OPUS", false, false, false); // audio codec parameters.
+ return peerConnectionParameters;
+ }
+
+ private PeerConnectionParameters createParametersForVideoCall(
+ String videoCodec, boolean captureToTexture) {
+ PeerConnectionParameters peerConnectionParameters =
+ new PeerConnectionParameters(
+ true, true, false, // videoCallEnabled, loopback, tracing.
+ 0, 0, 0, 0, videoCodec, true, captureToTexture, // video codec parameters.
+ 0, "OPUS", false, false, false); // audio codec parameters.
return peerConnectionParameters;
}
@@ -261,7 +270,7 @@ public class PeerConnectionClientTest extends InstrumentationTestCase
signalingExecutor = new LooperExecutor();
signalingExecutor.requestStart();
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.JELLY_BEAN_MR1) {
- eglBase = new EglBase();
+ eglBase = EglBase.create();
}
}
@@ -278,7 +287,8 @@ public class PeerConnectionClientTest extends InstrumentationTestCase
Log.d(TAG, "testSetLocalOfferMakesVideoFlowLocally");
MockRenderer localRenderer = new MockRenderer(EXPECTED_VIDEO_FRAMES, LOCAL_RENDERER_NAME);
pcClient = createPeerConnectionClient(
- localRenderer, new MockRenderer(0, null), createParameters(true, VIDEO_CODEC_VP8), false);
+ localRenderer, new MockRenderer(0, null),
+ createParametersForVideoCall(VIDEO_CODEC_VP8, false), false);
// Wait for local SDP and ice candidates set events.
assertTrue("Local SDP was not set.", waitForLocalSDP(WAIT_TIMEOUT));
@@ -338,46 +348,74 @@ public class PeerConnectionClientTest extends InstrumentationTestCase
}
public void testLoopbackAudio() throws InterruptedException {
- doLoopbackTest(createParameters(false, VIDEO_CODEC_VP8), false);
+ doLoopbackTest(createParametersForAudioCall(), false);
}
public void testLoopbackVp8() throws InterruptedException {
- doLoopbackTest(createParameters(true, VIDEO_CODEC_VP8), false);
+ doLoopbackTest(createParametersForVideoCall(VIDEO_CODEC_VP8, false), false);
}
public void DISABLED_testLoopbackVp9() throws InterruptedException {
- doLoopbackTest(createParameters(true, VIDEO_CODEC_VP9), false);
+ doLoopbackTest(createParametersForVideoCall(VIDEO_CODEC_VP9, false), false);
}
public void testLoopbackH264() throws InterruptedException {
- doLoopbackTest(createParameters(true, VIDEO_CODEC_H264), false);
+ doLoopbackTest(createParametersForVideoCall(VIDEO_CODEC_H264, false), false);
}
public void testLoopbackVp8DecodeToTexture() throws InterruptedException {
- if (Build.VERSION.SDK_INT < Build.VERSION_CODES.JELLY_BEAN_MR1) {
- Log.i(TAG, "Decode to textures is not supported, requires EGL14.");
+ if (Build.VERSION.SDK_INT < Build.VERSION_CODES.KITKAT) {
+ Log.i(TAG, "Decode to textures is not supported, requires SDK version 19.");
return;
}
-
- doLoopbackTest(createParameters(true, VIDEO_CODEC_VP8), true);
+ doLoopbackTest(createParametersForVideoCall(VIDEO_CODEC_VP8, false), true);
}
public void DISABLED_testLoopbackVp9DecodeToTexture() throws InterruptedException {
- if (Build.VERSION.SDK_INT < Build.VERSION_CODES.JELLY_BEAN_MR1) {
- Log.i(TAG, "Decode to textures is not supported, requires EGL14.");
+ if (Build.VERSION.SDK_INT < Build.VERSION_CODES.KITKAT) {
+ Log.i(TAG, "Decode to textures is not supported, requires SDK version 19.");
return;
}
- doLoopbackTest(createParameters(true, VIDEO_CODEC_VP9), true);
+ doLoopbackTest(createParametersForVideoCall(VIDEO_CODEC_VP9, false), true);
}
public void testLoopbackH264DecodeToTexture() throws InterruptedException {
- if (Build.VERSION.SDK_INT < Build.VERSION_CODES.JELLY_BEAN_MR1) {
- Log.i(TAG, "Decode to textures is not supported, requires EGL14.");
+ if (Build.VERSION.SDK_INT < Build.VERSION_CODES.KITKAT) {
+ Log.i(TAG, "Decode to textures is not supported, requires SDK version 19.");
return;
}
- doLoopbackTest(createParameters(true, VIDEO_CODEC_H264), true);
+ doLoopbackTest(createParametersForVideoCall(VIDEO_CODEC_H264, false), true);
}
+ public void testLoopbackVp8CaptureToTexture() throws InterruptedException {
+ if (Build.VERSION.SDK_INT < Build.VERSION_CODES.KITKAT) {
+ Log.i(TAG, "Encode to textures is not supported. Requires SDK version 19");
+ return;
+ }
+ // TODO(perkj): If we can always capture to textures, there is no need to check if the
+ // hardware encoder supports to encode from a texture.
+ if (!MediaCodecVideoEncoder.isVp8HwSupportedUsingTextures()) {
+ Log.i(TAG, "VP8 encode to textures is not supported.");
+ return;
+ }
+ doLoopbackTest(createParametersForVideoCall(VIDEO_CODEC_VP8, true), true);
+ }
+
+ public void testLoopbackH264CaptureToTexture() throws InterruptedException {
+ if (Build.VERSION.SDK_INT < Build.VERSION_CODES.KITKAT) {
+ Log.i(TAG, "Encode to textures is not supported. Requires KITKAT");
+ return;
+ }
+ // TODO(perkj): If we can always capture to textures, there is no need to check if the
+ // hardware encoder supports to encode from a texture.
+ if (!MediaCodecVideoEncoder.isH264HwSupportedUsingTextures()) {
+ Log.i(TAG, "H264 encode to textures is not supported.");
+ return;
+ }
+ doLoopbackTest(createParametersForVideoCall(VIDEO_CODEC_H264, true), true);
+ }
+
+
// Checks if default front camera can be switched to back camera and then
// again to front camera.
public void testCameraSwitch() throws InterruptedException {
@@ -388,7 +426,7 @@ public class PeerConnectionClientTest extends InstrumentationTestCase
MockRenderer remoteRenderer = new MockRenderer(EXPECTED_VIDEO_FRAMES, REMOTE_RENDERER_NAME);
pcClient = createPeerConnectionClient(
- localRenderer, remoteRenderer, createParameters(true, VIDEO_CODEC_VP8), false);
+ localRenderer, remoteRenderer, createParametersForVideoCall(VIDEO_CODEC_VP8, false), false);
// Wait for local SDP, rename it to answer and set as remote SDP.
assertTrue("Local SDP was not set.", waitForLocalSDP(WAIT_TIMEOUT));
@@ -434,7 +472,7 @@ public class PeerConnectionClientTest extends InstrumentationTestCase
MockRenderer remoteRenderer = new MockRenderer(EXPECTED_VIDEO_FRAMES, REMOTE_RENDERER_NAME);
pcClient = createPeerConnectionClient(
- localRenderer, remoteRenderer, createParameters(true, VIDEO_CODEC_VP8), false);
+ localRenderer, remoteRenderer, createParametersForVideoCall(VIDEO_CODEC_VP8, false), false);
// Wait for local SDP, rename it to answer and set as remote SDP.
assertTrue("Local SDP was not set.", waitForLocalSDP(WAIT_TIMEOUT));
diff --git a/webrtc/examples/objc/AppRTCDemo/ios/ARDAppDelegate.m b/webrtc/examples/objc/AppRTCDemo/ios/ARDAppDelegate.m
index 0f4165ebac..9568b94e77 100644
--- a/webrtc/examples/objc/AppRTCDemo/ios/ARDAppDelegate.m
+++ b/webrtc/examples/objc/AppRTCDemo/ios/ARDAppDelegate.m
@@ -29,7 +29,7 @@
ARDMainViewController *viewController = [[ARDMainViewController alloc] init];
_window.rootViewController = viewController;
-#ifndef _DEBUG
+#if defined(NDEBUG)
// In debug builds the default level is LS_INFO and in non-debug builds it is
// disabled. Continue to log to console in non-debug builds, but only
// warnings and errors.
diff --git a/webrtc/examples/objc/AppRTCDemo/ios/ARDMainView.m b/webrtc/examples/objc/AppRTCDemo/ios/ARDMainView.m
index 3c9e46e148..e809cb3027 100644
--- a/webrtc/examples/objc/AppRTCDemo/ios/ARDMainView.m
+++ b/webrtc/examples/objc/AppRTCDemo/ios/ARDMainView.m
@@ -21,15 +21,8 @@ static CGFloat const kRoomTextFieldMargin = 8;
static CGFloat const kCallControlMargin = 8;
static CGFloat const kAppLabelHeight = 20;
-@class ARDRoomTextField;
-@protocol ARDRoomTextFieldDelegate <NSObject>
-- (void)roomTextField:(ARDRoomTextField *)roomTextField
- didInputRoom:(NSString *)room;
-@end
-
// Helper view that contains a text field and a clear button.
@interface ARDRoomTextField : UIView <UITextFieldDelegate>
-@property(nonatomic, weak) id<ARDRoomTextFieldDelegate> delegate;
@property(nonatomic, readonly) NSString *roomText;
@end
@@ -38,14 +31,14 @@ static CGFloat const kAppLabelHeight = 20;
UIButton *_clearButton;
}
-@synthesize delegate = _delegate;
-
- (instancetype)initWithFrame:(CGRect)frame {
if (self = [super initWithFrame:frame]) {
_roomText = [[UITextField alloc] initWithFrame:CGRectZero];
_roomText.borderStyle = UITextBorderStyleNone;
_roomText.font = [UIFont fontWithName:@"Roboto" size:12];
_roomText.placeholder = @"Room name";
+ _roomText.autocorrectionType = UITextAutocorrectionTypeNo;
+ _roomText.autocapitalizationType = UITextAutocapitalizationTypeNone;
_roomText.delegate = self;
[_roomText addTarget:self
action:@selector(textFieldDidChange:)
@@ -96,10 +89,6 @@ static CGFloat const kAppLabelHeight = 20;
#pragma mark - UITextFieldDelegate
-- (void)textFieldDidEndEditing:(UITextField *)textField {
- [_delegate roomTextField:self didInputRoom:textField.text];
-}
-
- (BOOL)textFieldShouldReturn:(UITextField *)textField {
// There is no other control that can take focus, so manually resign focus
// when return (Join) is pressed to trigger |textFieldDidEndEditing|.
@@ -125,9 +114,6 @@ static CGFloat const kAppLabelHeight = 20;
@end
-@interface ARDMainView () <ARDRoomTextFieldDelegate>
-@end
-
@implementation ARDMainView {
UILabel *_appLabel;
ARDRoomTextField *_roomText;
@@ -151,7 +137,6 @@ static CGFloat const kAppLabelHeight = 20;
[self addSubview:_appLabel];
_roomText = [[ARDRoomTextField alloc] initWithFrame:CGRectZero];
- _roomText.delegate = self;
[self addSubview:_roomText];
UIFont *controlFont = [UIFont fontWithName:@"Roboto" size:20];
@@ -260,16 +245,6 @@ static CGFloat const kAppLabelHeight = 20;
_startCallButton.frame.size.height);
}
-#pragma mark - ARDRoomTextFieldDelegate
-
-- (void)roomTextField:(ARDRoomTextField *)roomTextField
- didInputRoom:(NSString *)room {
- [_delegate mainView:self
- didInputRoom:room
- isLoopback:NO
- isAudioOnly:_audioOnlySwitch.isOn];
-}
-
#pragma mark - Private
- (void)onStartCall:(id)sender {
diff --git a/webrtc/examples/objc/AppRTCDemo/ios/ARDVideoCallView.h b/webrtc/examples/objc/AppRTCDemo/ios/ARDVideoCallView.h
index 209bcd462c..378281d005 100644
--- a/webrtc/examples/objc/AppRTCDemo/ios/ARDVideoCallView.h
+++ b/webrtc/examples/objc/AppRTCDemo/ios/ARDVideoCallView.h
@@ -10,6 +10,7 @@
#import <UIKit/UIKit.h>
+#import "webrtc/base/objc/RTCCameraPreviewView.h"
#import "RTCEAGLVideoView.h"
#import "ARDStatsView.h"
@@ -33,7 +34,7 @@
@interface ARDVideoCallView : UIView
@property(nonatomic, readonly) UILabel *statusLabel;
-@property(nonatomic, readonly) RTCEAGLVideoView *localVideoView;
+@property(nonatomic, readonly) RTCCameraPreviewView *localVideoView;
@property(nonatomic, readonly) RTCEAGLVideoView *remoteVideoView;
@property(nonatomic, readonly) ARDStatsView *statsView;
@property(nonatomic, weak) id<ARDVideoCallViewDelegate> delegate;
diff --git a/webrtc/examples/objc/AppRTCDemo/ios/ARDVideoCallView.m b/webrtc/examples/objc/AppRTCDemo/ios/ARDVideoCallView.m
index 4048b84bb2..4c9c9d284e 100644
--- a/webrtc/examples/objc/AppRTCDemo/ios/ARDVideoCallView.m
+++ b/webrtc/examples/objc/AppRTCDemo/ios/ARDVideoCallView.m
@@ -25,7 +25,6 @@ static CGFloat const kStatusBarHeight = 20;
@implementation ARDVideoCallView {
UIButton *_cameraSwitchButton;
UIButton *_hangupButton;
- CGSize _localVideoSize;
CGSize _remoteVideoSize;
BOOL _useRearCamera;
}
@@ -42,10 +41,7 @@ static CGFloat const kStatusBarHeight = 20;
_remoteVideoView.delegate = self;
[self addSubview:_remoteVideoView];
- // TODO(tkchin): replace this with a view that renders layer from
- // AVCaptureSession.
- _localVideoView = [[RTCEAGLVideoView alloc] initWithFrame:CGRectZero];
- _localVideoView.delegate = self;
+ _localVideoView = [[RTCCameraPreviewView alloc] initWithFrame:CGRectZero];
[self addSubview:_localVideoView];
_statsView = [[ARDStatsView alloc] initWithFrame:CGRectZero];
@@ -114,22 +110,15 @@ static CGFloat const kStatusBarHeight = 20;
_remoteVideoView.frame = bounds;
}
- if (_localVideoSize.width && _localVideoSize.height > 0) {
- // Aspect fit local video view into a square box.
- CGRect localVideoFrame =
- CGRectMake(0, 0, kLocalVideoViewSize, kLocalVideoViewSize);
- localVideoFrame =
- AVMakeRectWithAspectRatioInsideRect(_localVideoSize, localVideoFrame);
-
- // Place the view in the bottom right.
- localVideoFrame.origin.x = CGRectGetMaxX(bounds)
- - localVideoFrame.size.width - kLocalVideoViewPadding;
- localVideoFrame.origin.y = CGRectGetMaxY(bounds)
- - localVideoFrame.size.height - kLocalVideoViewPadding;
- _localVideoView.frame = localVideoFrame;
- } else {
- _localVideoView.frame = bounds;
- }
+ // Aspect fit local video view into a square box.
+ CGRect localVideoFrame =
+ CGRectMake(0, 0, kLocalVideoViewSize, kLocalVideoViewSize);
+ // Place the view in the bottom right.
+ localVideoFrame.origin.x = CGRectGetMaxX(bounds)
+ - localVideoFrame.size.width - kLocalVideoViewPadding;
+ localVideoFrame.origin.y = CGRectGetMaxY(bounds)
+ - localVideoFrame.size.height - kLocalVideoViewPadding;
+ _localVideoView.frame = localVideoFrame;
// Place stats at the top.
CGSize statsSize = [_statsView sizeThatFits:bounds.size];
@@ -159,10 +148,7 @@ static CGFloat const kStatusBarHeight = 20;
#pragma mark - RTCEAGLVideoViewDelegate
- (void)videoView:(RTCEAGLVideoView*)videoView didChangeVideoSize:(CGSize)size {
- if (videoView == _localVideoView) {
- _localVideoSize = size;
- _localVideoView.hidden = CGSizeEqualToSize(CGSizeZero, _localVideoSize);
- } else if (videoView == _remoteVideoView) {
+ if (videoView == _remoteVideoView) {
_remoteVideoSize = size;
}
[self setNeedsLayout];
diff --git a/webrtc/examples/objc/AppRTCDemo/ios/ARDVideoCallViewController.m b/webrtc/examples/objc/AppRTCDemo/ios/ARDVideoCallViewController.m
index 8de6b959f0..51290a05b5 100644
--- a/webrtc/examples/objc/AppRTCDemo/ios/ARDVideoCallViewController.m
+++ b/webrtc/examples/objc/AppRTCDemo/ios/ARDVideoCallViewController.m
@@ -128,18 +128,21 @@
if (_localVideoTrack == localVideoTrack) {
return;
}
- [_localVideoTrack removeRenderer:_videoCallView.localVideoView];
_localVideoTrack = nil;
- [_videoCallView.localVideoView renderFrame:nil];
_localVideoTrack = localVideoTrack;
- [_localVideoTrack addRenderer:_videoCallView.localVideoView];
+ RTCAVFoundationVideoSource *source = nil;
+ if ([localVideoTrack.source
+ isKindOfClass:[RTCAVFoundationVideoSource class]]) {
+ source = (RTCAVFoundationVideoSource*)localVideoTrack.source;
+ }
+ _videoCallView.localVideoView.captureSession = source.captureSession;
}
- (void)setRemoteVideoTrack:(RTCVideoTrack *)remoteVideoTrack {
if (_remoteVideoTrack == remoteVideoTrack) {
return;
}
- [_remoteVideoTrack removeRenderer:_videoCallView.localVideoView];
+ [_remoteVideoTrack removeRenderer:_videoCallView.remoteVideoView];
_remoteVideoTrack = nil;
[_videoCallView.remoteVideoView renderFrame:nil];
_remoteVideoTrack = remoteVideoTrack;
diff --git a/webrtc/examples/peerconnection/client/conductor.cc b/webrtc/examples/peerconnection/client/conductor.cc
index e3def9955f..883f44a77b 100644
--- a/webrtc/examples/peerconnection/client/conductor.cc
+++ b/webrtc/examples/peerconnection/client/conductor.cc
@@ -113,28 +113,22 @@ bool Conductor::CreatePeerConnection(bool dtls) {
ASSERT(peer_connection_factory_.get() != NULL);
ASSERT(peer_connection_.get() == NULL);
- webrtc::PeerConnectionInterface::IceServers servers;
+ webrtc::PeerConnectionInterface::RTCConfiguration config;
webrtc::PeerConnectionInterface::IceServer server;
server.uri = GetPeerConnectionString();
- servers.push_back(server);
+ config.servers.push_back(server);
webrtc::FakeConstraints constraints;
if (dtls) {
constraints.AddOptional(webrtc::MediaConstraintsInterface::kEnableDtlsSrtp,
"true");
- }
- else
- {
+ } else {
constraints.AddOptional(webrtc::MediaConstraintsInterface::kEnableDtlsSrtp,
"false");
}
- peer_connection_ =
- peer_connection_factory_->CreatePeerConnection(servers,
- &constraints,
- NULL,
- NULL,
- this);
+ peer_connection_ = peer_connection_factory_->CreatePeerConnection(
+ config, &constraints, NULL, NULL, this);
return peer_connection_.get() != NULL;
}
diff --git a/webrtc/examples/peerconnection/client/conductor.h b/webrtc/examples/peerconnection/client/conductor.h
index f5f16a3d10..e5ee170299 100644
--- a/webrtc/examples/peerconnection/client/conductor.h
+++ b/webrtc/examples/peerconnection/client/conductor.h
@@ -8,8 +8,8 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-#ifndef TALK_EXAMPLES_PEERCONNECTION_CLIENT_CONDUCTOR_H_
-#define TALK_EXAMPLES_PEERCONNECTION_CLIENT_CONDUCTOR_H_
+#ifndef WEBRTC_EXAMPLES_PEERCONNECTION_CLIENT_CONDUCTOR_H_
+#define WEBRTC_EXAMPLES_PEERCONNECTION_CLIENT_CONDUCTOR_H_
#pragma once
#include <deque>
@@ -126,4 +126,4 @@ class Conductor
std::string server_;
};
-#endif // TALK_EXAMPLES_PEERCONNECTION_CLIENT_CONDUCTOR_H_
+#endif // WEBRTC_EXAMPLES_PEERCONNECTION_CLIENT_CONDUCTOR_H_
diff --git a/webrtc/examples/peerconnection/client/defaults.cc b/webrtc/examples/peerconnection/client/defaults.cc
index 3090c15ca1..a2501c718f 100644
--- a/webrtc/examples/peerconnection/client/defaults.cc
+++ b/webrtc/examples/peerconnection/client/defaults.cc
@@ -19,6 +19,7 @@
#include <unistd.h>
#endif
+#include "webrtc/base/arraysize.h"
#include "webrtc/base/common.h"
const char kAudioLabel[] = "audio_label";
@@ -49,10 +50,12 @@ std::string GetDefaultServerName() {
std::string GetPeerName() {
char computer_name[256];
- if (gethostname(computer_name, ARRAY_SIZE(computer_name)) != 0)
- strcpy(computer_name, "host");
std::string ret(GetEnvVarOrDefault("USERNAME", "user"));
ret += '@';
- ret += computer_name;
+ if (gethostname(computer_name, arraysize(computer_name)) == 0) {
+ ret += computer_name;
+ } else {
+ ret += "host";
+ }
return ret;
}
diff --git a/webrtc/examples/peerconnection/client/defaults.h b/webrtc/examples/peerconnection/client/defaults.h
index 7b503974e5..f4d3bf52f7 100644
--- a/webrtc/examples/peerconnection/client/defaults.h
+++ b/webrtc/examples/peerconnection/client/defaults.h
@@ -8,8 +8,8 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-#ifndef PEERCONNECTION_SAMPLES_CLIENT_DEFAULTS_H_
-#define PEERCONNECTION_SAMPLES_CLIENT_DEFAULTS_H_
+#ifndef WEBRTC_EXAMPLES_PEERCONNECTION_CLIENT_DEFAULTS_H_
+#define WEBRTC_EXAMPLES_PEERCONNECTION_CLIENT_DEFAULTS_H_
#pragma once
#include <string>
@@ -27,4 +27,4 @@ std::string GetPeerConnectionString();
std::string GetDefaultServerName();
std::string GetPeerName();
-#endif // PEERCONNECTION_SAMPLES_CLIENT_DEFAULTS_H_
+#endif // WEBRTC_EXAMPLES_PEERCONNECTION_CLIENT_DEFAULTS_H_
diff --git a/webrtc/examples/peerconnection/client/flagdefs.h b/webrtc/examples/peerconnection/client/flagdefs.h
index 0cffffb135..92e2773166 100644
--- a/webrtc/examples/peerconnection/client/flagdefs.h
+++ b/webrtc/examples/peerconnection/client/flagdefs.h
@@ -8,8 +8,8 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-#ifndef TALK_EXAMPLES_PEERCONNECTION_CLIENT_FLAGDEFS_H_
-#define TALK_EXAMPLES_PEERCONNECTION_CLIENT_FLAGDEFS_H_
+#ifndef WEBRTC_EXAMPLES_PEERCONNECTION_CLIENT_FLAGDEFS_H_
+#define WEBRTC_EXAMPLES_PEERCONNECTION_CLIENT_FLAGDEFS_H_
#pragma once
#include "webrtc/base/flags.h"
@@ -30,4 +30,4 @@ DEFINE_bool(autocall, false, "Call the first available other client on "
"the server without user intervention. Note: this flag should only be set "
"to true on one of the two clients.");
-#endif // TALK_EXAMPLES_PEERCONNECTION_CLIENT_FLAGDEFS_H_
+#endif // WEBRTC_EXAMPLES_PEERCONNECTION_CLIENT_FLAGDEFS_H_
diff --git a/webrtc/examples/peerconnection/client/linux/main.cc b/webrtc/examples/peerconnection/client/linux/main.cc
index cf88c36fbb..4db929c82e 100644
--- a/webrtc/examples/peerconnection/client/linux/main.cc
+++ b/webrtc/examples/peerconnection/client/linux/main.cc
@@ -30,7 +30,7 @@ class CustomSocketServer : public rtc::PhysicalSocketServer {
// Override so that we can also pump the GTK message loop.
virtual bool Wait(int cms, bool process_io) {
// Pump GTK events.
- // TODO: We really should move either the socket server or UI to a
+ // TODO(henrike): We really should move either the socket server or UI to a
// different thread. Alternatively we could look at merging the two loops
// by implementing a dispatcher for the socket server and/or use
// g_main_context_set_poll_func.
@@ -96,10 +96,12 @@ int main(int argc, char* argv[]) {
wnd.Destroy();
thread->set_socketserver(NULL);
- // TODO: Run the Gtk main loop to tear down the connection.
- //while (gtk_events_pending()) {
- // gtk_main_iteration();
- //}
+ // TODO(henrike): Run the Gtk main loop to tear down the connection.
+ /*
+ while (gtk_events_pending()) {
+ gtk_main_iteration();
+ }
+ */
rtc::CleanupSSL();
return 0;
}
diff --git a/webrtc/examples/peerconnection/client/linux/main_wnd.cc b/webrtc/examples/peerconnection/client/linux/main_wnd.cc
index 254fb946f9..cf98c1cac7 100644
--- a/webrtc/examples/peerconnection/client/linux/main_wnd.cc
+++ b/webrtc/examples/peerconnection/client/linux/main_wnd.cc
@@ -116,7 +116,8 @@ gboolean Redraw(gpointer data) {
wnd->OnRedraw();
return false;
}
-} // end anonymous
+
+} // namespace
//
// GtkMainWnd implementation.
@@ -174,7 +175,8 @@ void GtkMainWnd::StopLocalRenderer() {
local_renderer_.reset();
}
-void GtkMainWnd::StartRemoteRenderer(webrtc::VideoTrackInterface* remote_video) {
+void GtkMainWnd::StartRemoteRenderer(
+ webrtc::VideoTrackInterface* remote_video) {
remote_renderer_.reset(new VideoRenderer(this, remote_video));
}
@@ -488,7 +490,7 @@ void GtkMainWnd::VideoRenderer::RenderFrame(
static_cast<int>(frame->GetHeight()));
int size = width_ * height_ * 4;
- // TODO: Convert directly to RGBA
+ // TODO(henrike): Convert directly to RGBA
frame->ConvertToRgbBuffer(cricket::FOURCC_ARGB,
image_.get(),
size,
diff --git a/webrtc/examples/peerconnection/client/linux/main_wnd.h b/webrtc/examples/peerconnection/client/linux/main_wnd.h
index 1a91082768..e35d4dd8fa 100644
--- a/webrtc/examples/peerconnection/client/linux/main_wnd.h
+++ b/webrtc/examples/peerconnection/client/linux/main_wnd.h
@@ -8,8 +8,10 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-#ifndef PEERCONNECTION_SAMPLES_CLIENT_LINUX_MAIN_WND_H_
-#define PEERCONNECTION_SAMPLES_CLIENT_LINUX_MAIN_WND_H_
+#ifndef WEBRTC_EXAMPLES_PEERCONNECTION_CLIENT_LINUX_MAIN_WND_H_
+#define WEBRTC_EXAMPLES_PEERCONNECTION_CLIENT_LINUX_MAIN_WND_H_
+
+#include <string>
#include "webrtc/examples/peerconnection/client/main_wnd.h"
#include "webrtc/examples/peerconnection/client/peer_connection_client.h"
@@ -115,4 +117,4 @@ class GtkMainWnd : public MainWindow {
int draw_buffer_size_;
};
-#endif // PEERCONNECTION_SAMPLES_CLIENT_LINUX_MAIN_WND_H_
+#endif // WEBRTC_EXAMPLES_PEERCONNECTION_CLIENT_LINUX_MAIN_WND_H_
diff --git a/webrtc/examples/peerconnection/client/main_wnd.cc b/webrtc/examples/peerconnection/client/main_wnd.cc
index 30b12a8511..72f85b9eb2 100644
--- a/webrtc/examples/peerconnection/client/main_wnd.cc
+++ b/webrtc/examples/peerconnection/client/main_wnd.cc
@@ -13,6 +13,7 @@
#include <math.h>
#include "webrtc/examples/peerconnection/client/defaults.h"
+#include "webrtc/base/arraysize.h"
#include "webrtc/base/common.h"
#include "webrtc/base/logging.h"
@@ -241,7 +242,7 @@ void MainWnd::OnPaint() {
// Set the map mode so that the ratio will be maintained for us.
HDC all_dc[] = { ps.hdc, dc_mem };
- for (int i = 0; i < ARRAY_SIZE(all_dc); ++i) {
+ for (int i = 0; i < arraysize(all_dc); ++i) {
SetMapMode(all_dc[i], MM_ISOTROPIC);
SetWindowExtEx(all_dc[i], width, height, NULL);
SetViewportExtEx(all_dc[i], rc.right, rc.bottom, NULL);
diff --git a/webrtc/examples/peerconnection/client/main_wnd.h b/webrtc/examples/peerconnection/client/main_wnd.h
index 9f61a568fd..ac4fd8a9b9 100644
--- a/webrtc/examples/peerconnection/client/main_wnd.h
+++ b/webrtc/examples/peerconnection/client/main_wnd.h
@@ -8,8 +8,8 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-#ifndef PEERCONNECTION_SAMPLES_CLIENT_MAIN_WND_H_
-#define PEERCONNECTION_SAMPLES_CLIENT_MAIN_WND_H_
+#ifndef WEBRTC_EXAMPLES_PEERCONNECTION_CLIENT_MAIN_WND_H_
+#define WEBRTC_EXAMPLES_PEERCONNECTION_CLIENT_MAIN_WND_H_
#pragma once
#include <map>
@@ -60,7 +60,8 @@ class MainWindow {
virtual void StartLocalRenderer(webrtc::VideoTrackInterface* local_video) = 0;
virtual void StopLocalRenderer() = 0;
- virtual void StartRemoteRenderer(webrtc::VideoTrackInterface* remote_video) = 0;
+ virtual void StartRemoteRenderer(
+ webrtc::VideoTrackInterface* remote_video) = 0;
virtual void StopRemoteRenderer() = 0;
virtual void QueueUIThreadCallback(int msg_id, void* data) = 0;
@@ -197,4 +198,4 @@ class MainWnd : public MainWindow {
};
#endif // WIN32
-#endif // PEERCONNECTION_SAMPLES_CLIENT_MAIN_WND_H_
+#endif // WEBRTC_EXAMPLES_PEERCONNECTION_CLIENT_MAIN_WND_H_
diff --git a/webrtc/examples/peerconnection/client/peer_connection_client.cc b/webrtc/examples/peerconnection/client/peer_connection_client.cc
index d49ce35060..9875115c4b 100644
--- a/webrtc/examples/peerconnection/client/peer_connection_client.cc
+++ b/webrtc/examples/peerconnection/client/peer_connection_client.cc
@@ -43,7 +43,7 @@ rtc::AsyncSocket* CreateClientSocket(int family) {
#endif
}
-}
+} // namespace
PeerConnectionClient::PeerConnectionClient()
: callback_(NULL),
@@ -114,7 +114,7 @@ void PeerConnectionClient::Connect(const std::string& server, int port,
server_address_.SetPort(port);
client_name_ = client_name;
- if (server_address_.IsUnresolved()) {
+ if (server_address_.IsUnresolvedIP()) {
state_ = RESOLVING;
resolver_ = new rtc::AsyncResolver();
resolver_->SignalDone.connect(this, &PeerConnectionClient::OnResolveResult);
diff --git a/webrtc/examples/peerconnection/client/peer_connection_client.h b/webrtc/examples/peerconnection/client/peer_connection_client.h
index 5b5787bc14..b7abfdfe18 100644
--- a/webrtc/examples/peerconnection/client/peer_connection_client.h
+++ b/webrtc/examples/peerconnection/client/peer_connection_client.h
@@ -8,8 +8,8 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-#ifndef PEERCONNECTION_SAMPLES_CLIENT_PEER_CONNECTION_CLIENT_H_
-#define PEERCONNECTION_SAMPLES_CLIENT_PEER_CONNECTION_CLIENT_H_
+#ifndef WEBRTC_EXAMPLES_PEERCONNECTION_CLIENT_PEER_CONNECTION_CLIENT_H_
+#define WEBRTC_EXAMPLES_PEERCONNECTION_CLIENT_PEER_CONNECTION_CLIENT_H_
#pragma once
#include <map>
@@ -120,4 +120,4 @@ class PeerConnectionClient : public sigslot::has_slots<>,
int my_id_;
};
-#endif // PEERCONNECTION_SAMPLES_CLIENT_PEER_CONNECTION_CLIENT_H_
+#endif // WEBRTC_EXAMPLES_PEERCONNECTION_CLIENT_PEER_CONNECTION_CLIENT_H_
diff --git a/webrtc/examples/peerconnection/server/data_socket.h b/webrtc/examples/peerconnection/server/data_socket.h
index 454ad3978a..0ef61ea6aa 100644
--- a/webrtc/examples/peerconnection/server/data_socket.h
+++ b/webrtc/examples/peerconnection/server/data_socket.h
@@ -8,8 +8,8 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-#ifndef TALK_EXAMPLES_PEERCONNECTION_SERVER_DATA_SOCKET_H_
-#define TALK_EXAMPLES_PEERCONNECTION_SERVER_DATA_SOCKET_H_
+#ifndef WEBRTC_EXAMPLES_PEERCONNECTION_SERVER_DATA_SOCKET_H_
+#define WEBRTC_EXAMPLES_PEERCONNECTION_SERVER_DATA_SOCKET_H_
#pragma once
#ifdef WIN32
@@ -150,4 +150,4 @@ class ListeningSocket : public SocketBase {
DataSocket* Accept() const;
};
-#endif // TALK_EXAMPLES_PEERCONNECTION_SERVER_DATA_SOCKET_H_
+#endif // WEBRTC_EXAMPLES_PEERCONNECTION_SERVER_DATA_SOCKET_H_
diff --git a/webrtc/examples/peerconnection/server/peer_channel.cc b/webrtc/examples/peerconnection/server/peer_channel.cc
index 150e5dec97..5e173cd460 100644
--- a/webrtc/examples/peerconnection/server/peer_channel.cc
+++ b/webrtc/examples/peerconnection/server/peer_channel.cc
@@ -19,6 +19,7 @@
#include "webrtc/examples/peerconnection/server/data_socket.h"
#include "webrtc/examples/peerconnection/server/utils.h"
#include "webrtc/base/stringutils.h"
+#include "webrtc/base/urlencode.h"
using rtc::sprintfn;
@@ -59,7 +60,7 @@ ChannelMember::ChannelMember(DataSocket* socket)
assert(socket);
assert(socket->method() == DataSocket::GET);
assert(socket->PathEquals("/sign_in"));
- name_ = socket->request_arguments(); // TODO: urldecode
+ name_ = rtc::UrlDecodeString(socket->request_arguments());
if (name_.empty())
name_ = "peer_" + int2str(id_);
else if (name_.length() > kMaxNameLength)
diff --git a/webrtc/examples/peerconnection/server/peer_channel.h b/webrtc/examples/peerconnection/server/peer_channel.h
index 263f17dfa8..6fd740d2f9 100644
--- a/webrtc/examples/peerconnection/server/peer_channel.h
+++ b/webrtc/examples/peerconnection/server/peer_channel.h
@@ -8,8 +8,8 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-#ifndef TALK_EXAMPLES_PEERCONNECTION_SERVER_PEER_CHANNEL_H_
-#define TALK_EXAMPLES_PEERCONNECTION_SERVER_PEER_CHANNEL_H_
+#ifndef WEBRTC_EXAMPLES_PEERCONNECTION_SERVER_PEER_CHANNEL_H_
+#define WEBRTC_EXAMPLES_PEERCONNECTION_SERVER_PEER_CHANNEL_H_
#pragma once
#include <time.h>
@@ -117,4 +117,4 @@ class PeerChannel {
Members members_;
};
-#endif // TALK_EXAMPLES_PEERCONNECTION_SERVER_PEER_CHANNEL_H_
+#endif // WEBRTC_EXAMPLES_PEERCONNECTION_SERVER_PEER_CHANNEL_H_
diff --git a/webrtc/examples/peerconnection/server/utils.h b/webrtc/examples/peerconnection/server/utils.h
index e70968b875..e1c8729c0b 100644
--- a/webrtc/examples/peerconnection/server/utils.h
+++ b/webrtc/examples/peerconnection/server/utils.h
@@ -8,8 +8,8 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-#ifndef TALK_EXAMPLES_PEERCONNECTION_SERVER_UTILS_H_
-#define TALK_EXAMPLES_PEERCONNECTION_SERVER_UTILS_H_
+#ifndef WEBRTC_EXAMPLES_PEERCONNECTION_SERVER_UTILS_H_
+#define WEBRTC_EXAMPLES_PEERCONNECTION_SERVER_UTILS_H_
#pragma once
#include <assert.h>
@@ -22,4 +22,4 @@
std::string int2str(int i);
std::string size_t2str(size_t i);
-#endif // TALK_EXAMPLES_PEERCONNECTION_SERVER_UTILS_H_
+#endif // WEBRTC_EXAMPLES_PEERCONNECTION_SERVER_UTILS_H_
diff --git a/webrtc/examples/stunserver/stunserver_main.cc b/webrtc/examples/stunserver/stunserver_main.cc
index 9cbd6156da..9bdf58ac3f 100644
--- a/webrtc/examples/stunserver/stunserver_main.cc
+++ b/webrtc/examples/stunserver/stunserver_main.cc
@@ -17,7 +17,7 @@
#include "webrtc/p2p/base/stunserver.h"
#include "webrtc/base/thread.h"
-using namespace cricket;
+using cricket::StunServer;
int main(int argc, char* argv[]) {
if (argc != 2) {
diff --git a/webrtc/libjingle/xmllite/xmlelement_unittest.cc b/webrtc/libjingle/xmllite/xmlelement_unittest.cc
index 257899aba1..df8faedbf0 100644
--- a/webrtc/libjingle/xmllite/xmlelement_unittest.cc
+++ b/webrtc/libjingle/xmllite/xmlelement_unittest.cc
@@ -15,7 +15,6 @@
#include "webrtc/base/common.h"
#include "webrtc/base/gunit.h"
#include "webrtc/base/thread.h"
-#include "webrtc/test/testsupport/gtest_disable.h"
using buzz::QName;
using buzz::XmlAttr;
diff --git a/webrtc/libjingle/xmpp/chatroommoduleimpl.cc b/webrtc/libjingle/xmpp/chatroommoduleimpl.cc
index 546aa75f92..52fba4cbae 100644
--- a/webrtc/libjingle/xmpp/chatroommoduleimpl.cc
+++ b/webrtc/libjingle/xmpp/chatroommoduleimpl.cc
@@ -17,6 +17,7 @@
#include "webrtc/libjingle/xmpp/chatroommodule.h"
#include "webrtc/libjingle/xmpp/constants.h"
#include "webrtc/libjingle/xmpp/moduleimpl.h"
+#include "webrtc/base/arraysize.h"
#include "webrtc/base/common.h"
namespace buzz {
@@ -535,7 +536,7 @@ XmppChatroomModuleImpl::ChangePresence(XmppChatroomState new_state,
// find the right transition description
StateTransitionDescription* transition_desc = NULL;
- for (int i=0; i < ARRAY_SIZE(Transitions); i++) {
+ for (size_t i = 0; i < arraysize(Transitions); i++) {
if (Transitions[i].old_state == old_state &&
Transitions[i].new_state == new_state) {
transition_desc = &Transitions[i];
diff --git a/webrtc/libjingle/xmpp/constants.cc b/webrtc/libjingle/xmpp/constants.cc
index 38e0cec48d..6ad2a5aa37 100644
--- a/webrtc/libjingle/xmpp/constants.cc
+++ b/webrtc/libjingle/xmpp/constants.cc
@@ -16,7 +16,6 @@
#include "webrtc/libjingle/xmllite/xmlconstants.h"
#include "webrtc/libjingle/xmllite/xmlelement.h"
#include "webrtc/libjingle/xmpp/jid.h"
-#include "webrtc/base/basicdefs.h"
namespace buzz {
diff --git a/webrtc/libjingle/xmpp/presenceouttask.cc b/webrtc/libjingle/xmpp/presenceouttask.cc
index aa19c9dd77..5519a4fd4a 100644
--- a/webrtc/libjingle/xmpp/presenceouttask.cc
+++ b/webrtc/libjingle/xmpp/presenceouttask.cc
@@ -13,6 +13,7 @@
#include "webrtc/libjingle/xmpp/constants.h"
#include "webrtc/libjingle/xmpp/presenceouttask.h"
#include "webrtc/libjingle/xmpp/xmppclient.h"
+#include "webrtc/base/arraysize.h"
#include "webrtc/base/stringencode.h"
namespace buzz {
@@ -128,7 +129,7 @@ PresenceOutTask::TranslateStatus(const PresenceStatus & s) {
time(&current_time_seconds);
struct tm* current_time = gmtime(&current_time_seconds);
char output[256];
- strftime(output, ARRAY_SIZE(output), "%Y%m%dT%H:%M:%S", current_time);
+ strftime(output, arraysize(output), "%Y%m%dT%H:%M:%S", current_time);
result->AddAttr(kQnStamp, output, 1);
}
}
diff --git a/webrtc/libjingle/xmpp/xmppclient.cc b/webrtc/libjingle/xmpp/xmppclient.cc
index 7c2a5e693c..f7e88c3927 100644
--- a/webrtc/libjingle/xmpp/xmppclient.cc
+++ b/webrtc/libjingle/xmpp/xmppclient.cc
@@ -362,7 +362,7 @@ void XmppClient::Private::OnSocketRead() {
if (bytes_read == 0)
return;
-//#ifdef _DEBUG
+//#if !defined(NDEBUG)
client_->SignalLogInput(bytes, static_cast<int>(bytes_read));
//#endif
@@ -386,7 +386,7 @@ void XmppClient::Private::OnStateChange(int state) {
}
void XmppClient::Private::WriteOutput(const char* bytes, size_t len) {
-//#ifdef _DEBUG
+//#if !defined(NDEBUG)
client_->SignalLogOutput(bytes, static_cast<int>(len));
//#endif
diff --git a/webrtc/libjingle/xmpp/xmppclient.h b/webrtc/libjingle/xmpp/xmppclient.h
index 7b9eb7ab74..84ca6c1418 100644
--- a/webrtc/libjingle/xmpp/xmppclient.h
+++ b/webrtc/libjingle/xmpp/xmppclient.h
@@ -16,7 +16,6 @@
#include "webrtc/libjingle/xmpp/xmppclientsettings.h"
#include "webrtc/libjingle/xmpp/xmppengine.h"
#include "webrtc/libjingle/xmpp/xmpptask.h"
-#include "webrtc/base/basicdefs.h"
#include "webrtc/base/sigslot.h"
#include "webrtc/base/task.h"
diff --git a/webrtc/libjingle/xmpp/xmpplogintask.cc b/webrtc/libjingle/xmpp/xmpplogintask.cc
index f5745cd979..e39713d463 100644
--- a/webrtc/libjingle/xmpp/xmpplogintask.cc
+++ b/webrtc/libjingle/xmpp/xmpplogintask.cc
@@ -25,7 +25,7 @@ using rtc::ConstantLabel;
namespace buzz {
-#ifdef _DEBUG
+#if !defined(NDEBUG)
const ConstantLabel XmppLoginTask::LOGINTASK_STATES[] = {
KLABEL(LOGINSTATE_INIT),
KLABEL(LOGINSTATE_STREAMSTART_SENT),
@@ -40,7 +40,7 @@ const ConstantLabel XmppLoginTask::LOGINTASK_STATES[] = {
KLABEL(LOGINSTATE_DONE),
LASTLABEL
};
-#endif // _DEBUG
+#endif
XmppLoginTask::XmppLoginTask(XmppEngineImpl * pctx) :
pctx_(pctx),
authNeeded_(true),
@@ -84,10 +84,10 @@ XmppLoginTask::Advance() {
const XmlElement * element = NULL;
-#if _DEBUG
+#if !defined(NDEBUG)
LOG(LS_VERBOSE) << "XmppLoginTask::Advance - "
<< rtc::ErrorName(state_, LOGINTASK_STATES);
-#endif // _DEBUG
+#endif
switch (state_) {
diff --git a/webrtc/libjingle/xmpp/xmpplogintask.h b/webrtc/libjingle/xmpp/xmpplogintask.h
index 58e0a2f3fa..f69a648394 100644
--- a/webrtc/libjingle/xmpp/xmpplogintask.h
+++ b/webrtc/libjingle/xmpp/xmpplogintask.h
@@ -77,9 +77,9 @@ private:
rtc::scoped_ptr<SaslMechanism> sasl_mech_;
-#ifdef _DEBUG
+#if !defined(NDEBUG)
static const rtc::ConstantLabel LOGINTASK_STATES[];
-#endif // _DEBUG
+#endif
};
}
diff --git a/webrtc/libjingle/xmpp/xmppsocket.cc b/webrtc/libjingle/xmpp/xmppsocket.cc
index d67a71fe43..25e03efbe9 100644
--- a/webrtc/libjingle/xmpp/xmppsocket.cc
+++ b/webrtc/libjingle/xmpp/xmppsocket.cc
@@ -15,7 +15,6 @@
#endif
#include <errno.h>
-#include "webrtc/base/basicdefs.h"
#include "webrtc/base/logging.h"
#include "webrtc/base/thread.h"
#ifdef FEATURE_ENABLE_SSL
diff --git a/webrtc/libjingle/xmpp/xmpptask.cc b/webrtc/libjingle/xmpp/xmpptask.cc
index 09067058ae..84f9ba6b92 100644
--- a/webrtc/libjingle/xmpp/xmpptask.cc
+++ b/webrtc/libjingle/xmpp/xmpptask.cc
@@ -24,7 +24,7 @@ XmppClientInterface::~XmppClientInterface() {
XmppTask::XmppTask(XmppTaskParentInterface* parent,
XmppEngine::HandlerLevel level)
: XmppTaskBase(parent), stopped_(false) {
-#ifdef _DEBUG
+#if !defined(NDEBUG)
debug_force_timeout_ = false;
#endif
@@ -70,7 +70,7 @@ void XmppTask::OnDisconnect() {
}
void XmppTask::QueueStanza(const XmlElement* stanza) {
-#ifdef _DEBUG
+#if !defined(NDEBUG)
if (debug_force_timeout_)
return;
#endif
diff --git a/webrtc/libjingle/xmpp/xmpptask.h b/webrtc/libjingle/xmpp/xmpptask.h
index 5b97e89c97..36351b7fa8 100644
--- a/webrtc/libjingle/xmpp/xmpptask.h
+++ b/webrtc/libjingle/xmpp/xmpptask.h
@@ -119,7 +119,7 @@ class XmppTask : public XmppTaskBase,
std::string task_id() const { return id_; }
void set_task_id(std::string id) { id_ = id; }
-#ifdef _DEBUG
+#if !defined(NDEBUG)
void set_debug_force_timeout(const bool f) { debug_force_timeout_ = f; }
#endif
@@ -162,7 +162,7 @@ private:
rtc::scoped_ptr<XmlElement> next_stanza_;
std::string id_;
-#ifdef _DEBUG
+#if !defined(NDEBUG)
bool debug_force_timeout_;
#endif
};
diff --git a/webrtc/libjingle_examples.gyp b/webrtc/libjingle_examples.gyp
deleted file mode 100755
index ab888184f6..0000000000
--- a/webrtc/libjingle_examples.gyp
+++ /dev/null
@@ -1,409 +0,0 @@
-#
-# Copyright 2012 The WebRTC Project Authors. All rights reserved.
-#
-# Use of this source code is governed by a BSD-style license
-# that can be found in the LICENSE file in the root of the source
-# tree. An additional intellectual property rights grant can be found
-# in the file PATENTS. All contributing project authors may
-# be found in the AUTHORS file in the root of the source tree.
-
-{
- 'includes': [
- '../talk/build/common.gypi',
- ],
- 'targets': [
- {
- 'target_name': 'relayserver',
- 'type': 'executable',
- 'dependencies': [
- '../talk/libjingle.gyp:libjingle',
- '../talk/libjingle.gyp:libjingle_p2p',
- ],
- 'sources': [
- 'examples/relayserver/relayserver_main.cc',
- ],
- }, # target relayserver
- {
- 'target_name': 'stunserver',
- 'type': 'executable',
- 'dependencies': [
- '../talk/libjingle.gyp:libjingle',
- '../talk/libjingle.gyp:libjingle_p2p',
- ],
- 'sources': [
- 'examples/stunserver/stunserver_main.cc',
- ],
- }, # target stunserver
- {
- 'target_name': 'turnserver',
- 'type': 'executable',
- 'dependencies': [
- '../talk/libjingle.gyp:libjingle',
- '../talk/libjingle.gyp:libjingle_p2p',
- ],
- 'sources': [
- 'examples/turnserver/turnserver_main.cc',
- ],
- }, # target turnserver
- {
- 'target_name': 'peerconnection_server',
- 'type': 'executable',
- 'sources': [
- 'examples/peerconnection/server/data_socket.cc',
- 'examples/peerconnection/server/data_socket.h',
- 'examples/peerconnection/server/main.cc',
- 'examples/peerconnection/server/peer_channel.cc',
- 'examples/peerconnection/server/peer_channel.h',
- 'examples/peerconnection/server/utils.cc',
- 'examples/peerconnection/server/utils.h',
- ],
- 'dependencies': [
- '<(webrtc_root)/common.gyp:webrtc_common',
- '../talk/libjingle.gyp:libjingle',
- ],
- # TODO(ronghuawu): crbug.com/167187 fix size_t to int truncations.
- 'msvs_disabled_warnings': [ 4309, ],
- }, # target peerconnection_server
- ],
- 'conditions': [
- ['OS=="linux" or OS=="win"', {
- 'targets': [
- {
- 'target_name': 'peerconnection_client',
- 'type': 'executable',
- 'sources': [
- 'examples/peerconnection/client/conductor.cc',
- 'examples/peerconnection/client/conductor.h',
- 'examples/peerconnection/client/defaults.cc',
- 'examples/peerconnection/client/defaults.h',
- 'examples/peerconnection/client/peer_connection_client.cc',
- 'examples/peerconnection/client/peer_connection_client.h',
- ],
- 'dependencies': [
- '../talk/libjingle.gyp:libjingle_peerconnection',
- '<(webrtc_root)/system_wrappers/system_wrappers.gyp:field_trial_default',
- '<@(libjingle_tests_additional_deps)',
- ],
- 'conditions': [
- ['build_json==1', {
- 'dependencies': [
- '<(DEPTH)/third_party/jsoncpp/jsoncpp.gyp:jsoncpp',
- ],
- }],
- # TODO(ronghuawu): Move these files to a win/ directory then they
- # can be excluded automatically.
- ['OS=="win"', {
- 'sources': [
- 'examples/peerconnection/client/flagdefs.h',
- 'examples/peerconnection/client/main.cc',
- 'examples/peerconnection/client/main_wnd.cc',
- 'examples/peerconnection/client/main_wnd.h',
- ],
- 'msvs_settings': {
- 'VCLinkerTool': {
- 'SubSystem': '2', # Windows
- },
- },
- }], # OS=="win"
- ['OS=="linux"', {
- 'sources': [
- 'examples/peerconnection/client/linux/main.cc',
- 'examples/peerconnection/client/linux/main_wnd.cc',
- 'examples/peerconnection/client/linux/main_wnd.h',
- ],
- 'cflags': [
- '<!@(pkg-config --cflags glib-2.0 gobject-2.0 gtk+-2.0)',
- ],
- 'link_settings': {
- 'ldflags': [
- '<!@(pkg-config --libs-only-L --libs-only-other glib-2.0'
- ' gobject-2.0 gthread-2.0 gtk+-2.0)',
- ],
- 'libraries': [
- '<!@(pkg-config --libs-only-l glib-2.0 gobject-2.0'
- ' gthread-2.0 gtk+-2.0)',
- '-lX11',
- '-lXcomposite',
- '-lXext',
- '-lXrender',
- ],
- },
- }], # OS=="linux"
- ], # conditions
- }, # target peerconnection_client
- ], # targets
- }], # OS=="linux" or OS=="win"
-
- ['OS=="ios" or (OS=="mac" and target_arch!="ia32" and mac_sdk>="10.8")', {
- 'targets': [
- {
- 'target_name': 'apprtc_common',
- 'type': 'static_library',
- 'dependencies': [
- '<(webrtc_root)/system_wrappers/system_wrappers.gyp:field_trial_default',
- '../talk/libjingle.gyp:libjingle_peerconnection_objc',
- ],
- 'sources': [
- 'examples/objc/AppRTCDemo/common/ARDUtilities.h',
- 'examples/objc/AppRTCDemo/common/ARDUtilities.m',
- ],
- 'include_dirs': [
- 'examples/objc/AppRTCDemo/common',
- ],
- 'direct_dependent_settings': {
- 'include_dirs': [
- 'examples/objc/AppRTCDemo/common',
- ],
- },
- 'conditions': [
- ['OS=="mac"', {
- 'xcode_settings': {
- 'MACOSX_DEPLOYMENT_TARGET' : '10.8',
- },
- }],
- ],
- 'link_settings': {
- 'xcode_settings': {
- 'OTHER_LDFLAGS': [
- '-framework QuartzCore',
- ],
- },
- },
- },
- {
- 'target_name': 'apprtc_signaling',
- 'type': 'static_library',
- 'dependencies': [
- 'apprtc_common',
- '../talk/libjingle.gyp:libjingle_peerconnection_objc',
- 'socketrocket',
- ],
- 'sources': [
- 'examples/objc/AppRTCDemo/ARDAppClient.h',
- 'examples/objc/AppRTCDemo/ARDAppClient.m',
- 'examples/objc/AppRTCDemo/ARDAppClient+Internal.h',
- 'examples/objc/AppRTCDemo/ARDAppEngineClient.h',
- 'examples/objc/AppRTCDemo/ARDAppEngineClient.m',
- 'examples/objc/AppRTCDemo/ARDBitrateTracker.h',
- 'examples/objc/AppRTCDemo/ARDBitrateTracker.m',
- 'examples/objc/AppRTCDemo/ARDCEODTURNClient.h',
- 'examples/objc/AppRTCDemo/ARDCEODTURNClient.m',
- 'examples/objc/AppRTCDemo/ARDJoinResponse.h',
- 'examples/objc/AppRTCDemo/ARDJoinResponse.m',
- 'examples/objc/AppRTCDemo/ARDJoinResponse+Internal.h',
- 'examples/objc/AppRTCDemo/ARDMessageResponse.h',
- 'examples/objc/AppRTCDemo/ARDMessageResponse.m',
- 'examples/objc/AppRTCDemo/ARDMessageResponse+Internal.h',
- 'examples/objc/AppRTCDemo/ARDRoomServerClient.h',
- 'examples/objc/AppRTCDemo/ARDSDPUtils.h',
- 'examples/objc/AppRTCDemo/ARDSDPUtils.m',
- 'examples/objc/AppRTCDemo/ARDSignalingChannel.h',
- 'examples/objc/AppRTCDemo/ARDSignalingMessage.h',
- 'examples/objc/AppRTCDemo/ARDSignalingMessage.m',
- 'examples/objc/AppRTCDemo/ARDStatsBuilder.h',
- 'examples/objc/AppRTCDemo/ARDStatsBuilder.m',
- 'examples/objc/AppRTCDemo/ARDTURNClient.h',
- 'examples/objc/AppRTCDemo/ARDWebSocketChannel.h',
- 'examples/objc/AppRTCDemo/ARDWebSocketChannel.m',
- 'examples/objc/AppRTCDemo/RTCICECandidate+JSON.h',
- 'examples/objc/AppRTCDemo/RTCICECandidate+JSON.m',
- 'examples/objc/AppRTCDemo/RTCICEServer+JSON.h',
- 'examples/objc/AppRTCDemo/RTCICEServer+JSON.m',
- 'examples/objc/AppRTCDemo/RTCMediaConstraints+JSON.h',
- 'examples/objc/AppRTCDemo/RTCMediaConstraints+JSON.m',
- 'examples/objc/AppRTCDemo/RTCSessionDescription+JSON.h',
- 'examples/objc/AppRTCDemo/RTCSessionDescription+JSON.m',
- ],
- 'include_dirs': [
- 'examples/objc/AppRTCDemo',
- ],
- 'direct_dependent_settings': {
- 'include_dirs': [
- 'examples/objc/AppRTCDemo',
- ],
- },
- 'export_dependent_settings': [
- '../talk/libjingle.gyp:libjingle_peerconnection_objc',
- ],
- 'conditions': [
- ['OS=="mac"', {
- 'xcode_settings': {
- 'MACOSX_DEPLOYMENT_TARGET' : '10.8',
- },
- }],
- ],
- },
- {
- 'target_name': 'AppRTCDemo',
- 'type': 'executable',
- 'product_name': 'AppRTCDemo',
- 'mac_bundle': 1,
- 'dependencies': [
- 'apprtc_common',
- 'apprtc_signaling',
- ],
- 'conditions': [
- ['OS=="ios"', {
- 'mac_bundle_resources': [
- 'examples/objc/AppRTCDemo/ios/resources/iPhone5@2x.png',
- 'examples/objc/AppRTCDemo/ios/resources/iPhone6@2x.png',
- 'examples/objc/AppRTCDemo/ios/resources/iPhone6p@3x.png',
- 'examples/objc/AppRTCDemo/ios/resources/Roboto-Regular.ttf',
- 'examples/objc/AppRTCDemo/ios/resources/ic_call_end_black_24dp.png',
- 'examples/objc/AppRTCDemo/ios/resources/ic_call_end_black_24dp@2x.png',
- 'examples/objc/AppRTCDemo/ios/resources/ic_clear_black_24dp.png',
- 'examples/objc/AppRTCDemo/ios/resources/ic_clear_black_24dp@2x.png',
- 'examples/objc/AppRTCDemo/ios/resources/ic_switch_video_black_24dp.png',
- 'examples/objc/AppRTCDemo/ios/resources/ic_switch_video_black_24dp@2x.png',
- 'examples/objc/Icon.png',
- ],
- 'sources': [
- 'examples/objc/AppRTCDemo/ios/ARDAppDelegate.h',
- 'examples/objc/AppRTCDemo/ios/ARDAppDelegate.m',
- 'examples/objc/AppRTCDemo/ios/ARDMainView.h',
- 'examples/objc/AppRTCDemo/ios/ARDMainView.m',
- 'examples/objc/AppRTCDemo/ios/ARDMainViewController.h',
- 'examples/objc/AppRTCDemo/ios/ARDMainViewController.m',
- 'examples/objc/AppRTCDemo/ios/ARDStatsView.h',
- 'examples/objc/AppRTCDemo/ios/ARDStatsView.m',
- 'examples/objc/AppRTCDemo/ios/ARDVideoCallView.h',
- 'examples/objc/AppRTCDemo/ios/ARDVideoCallView.m',
- 'examples/objc/AppRTCDemo/ios/ARDVideoCallViewController.h',
- 'examples/objc/AppRTCDemo/ios/ARDVideoCallViewController.m',
- 'examples/objc/AppRTCDemo/ios/AppRTCDemo-Prefix.pch',
- 'examples/objc/AppRTCDemo/ios/UIImage+ARDUtilities.h',
- 'examples/objc/AppRTCDemo/ios/UIImage+ARDUtilities.m',
- 'examples/objc/AppRTCDemo/ios/main.m',
- ],
- 'xcode_settings': {
- 'INFOPLIST_FILE': 'examples/objc/AppRTCDemo/ios/Info.plist',
- },
- }],
- ['OS=="mac"', {
- 'sources': [
- 'examples/objc/AppRTCDemo/mac/APPRTCAppDelegate.h',
- 'examples/objc/AppRTCDemo/mac/APPRTCAppDelegate.m',
- 'examples/objc/AppRTCDemo/mac/APPRTCViewController.h',
- 'examples/objc/AppRTCDemo/mac/APPRTCViewController.m',
- 'examples/objc/AppRTCDemo/mac/main.m',
- ],
- 'xcode_settings': {
- 'CLANG_WARN_OBJC_MISSING_PROPERTY_SYNTHESIS': 'NO',
- 'INFOPLIST_FILE': 'examples/objc/AppRTCDemo/mac/Info.plist',
- 'MACOSX_DEPLOYMENT_TARGET' : '10.8',
- 'OTHER_LDFLAGS': [
- '-framework AVFoundation',
- ],
- },
- }],
- ['target_arch=="ia32"', {
- 'dependencies' : [
- '<(DEPTH)/testing/iossim/iossim.gyp:iossim#host',
- ],
- }],
- ],
- }, # target AppRTCDemo
- {
- # TODO(tkchin): move this into the real third party location and
- # have it mirrored on chrome infra.
- 'target_name': 'socketrocket',
- 'type': 'static_library',
- 'sources': [
- 'examples/objc/AppRTCDemo/third_party/SocketRocket/SRWebSocket.h',
- 'examples/objc/AppRTCDemo/third_party/SocketRocket/SRWebSocket.m',
- ],
- 'conditions': [
- ['OS=="mac"', {
- 'xcode_settings': {
- # SocketRocket autosynthesizes some properties. Disable the
- # warning so we can compile successfully.
- 'CLANG_WARN_OBJC_MISSING_PROPERTY_SYNTHESIS': 'NO',
- 'MACOSX_DEPLOYMENT_TARGET' : '10.8',
- # SRWebSocket.m uses code with partial availability.
- # https://code.google.com/p/webrtc/issues/detail?id=4695
- 'WARNING_CFLAGS!': ['-Wpartial-availability'],
- },
- }],
- ],
- 'direct_dependent_settings': {
- 'include_dirs': [
- 'examples/objc/AppRTCDemo/third_party/SocketRocket',
- ],
- },
- 'xcode_settings': {
- 'CLANG_ENABLE_OBJC_ARC': 'YES',
- 'WARNING_CFLAGS': [
- '-Wno-deprecated-declarations',
- ],
- },
- 'link_settings': {
- 'xcode_settings': {
- 'OTHER_LDFLAGS': [
- '-framework CFNetwork',
- '-licucore',
- ],
- },
- }
- }, # target socketrocket
- ], # targets
- }], # OS=="ios" or (OS=="mac" and target_arch!="ia32" and mac_sdk>="10.8")
-
- ['OS=="android"', {
- 'targets': [
- {
- 'target_name': 'AppRTCDemo',
- 'type': 'none',
- 'dependencies': [
- '../talk/libjingle.gyp:libjingle_peerconnection_java',
- ],
- 'variables': {
- 'apk_name': 'AppRTCDemo',
- 'java_in_dir': 'examples/androidapp',
- 'has_java_resources': 1,
- 'resource_dir': 'examples/androidapp/res',
- 'R_package': 'org.appspot.apprtc',
- 'R_package_relpath': 'org/appspot/apprtc',
- 'input_jars_paths': [
- 'examples/androidapp/third_party/autobanh/autobanh.jar',
- ],
- 'library_dexed_jars_paths': [
- 'examples/androidapp/third_party/autobanh/autobanh.jar',
- ],
- 'native_lib_target': 'libjingle_peerconnection_so',
- 'add_to_dependents_classpaths':1,
- },
- 'includes': [ '../build/java_apk.gypi' ],
- }, # target AppRTCDemo
-
- {
- # AppRTCDemo creates a .jar as a side effect. Any java targets
- # that need that .jar in their classpath should depend on this target,
- # AppRTCDemo_apk. Dependents of AppRTCDemo_apk receive its
- # jar path in the variable 'apk_output_jar_path'.
- # This target should only be used by targets which instrument
- # AppRTCDemo_apk.
- 'target_name': 'AppRTCDemo_apk',
- 'type': 'none',
- 'dependencies': [
- 'AppRTCDemo',
- ],
- 'includes': [ '../build/apk_fake_jar.gypi' ],
- }, # target AppRTCDemo_apk
-
- {
- 'target_name': 'AppRTCDemoTest',
- 'type': 'none',
- 'dependencies': [
- 'AppRTCDemo_apk',
- ],
- 'variables': {
- 'apk_name': 'AppRTCDemoTest',
- 'java_in_dir': 'examples/androidtests',
- 'is_test_apk': 1,
- },
- 'includes': [ '../build/java_apk.gypi' ],
- },
- ], # targets
- }], # OS=="android"
- ],
-}
diff --git a/webrtc/modules/audio_coding/BUILD.gn b/webrtc/modules/audio_coding/BUILD.gn
index 839a1439e6..000dd394df 100644
--- a/webrtc/modules/audio_coding/BUILD.gn
+++ b/webrtc/modules/audio_coding/BUILD.gn
@@ -11,10 +11,10 @@ import("../../build/webrtc.gni")
source_set("rent_a_codec") {
sources = [
- "main/acm2/acm_codec_database.cc",
- "main/acm2/acm_codec_database.h",
- "main/acm2/rent_a_codec.cc",
- "main/acm2/rent_a_codec.h",
+ "acm2/acm_codec_database.cc",
+ "acm2/acm_codec_database.h",
+ "acm2/rent_a_codec.cc",
+ "acm2/rent_a_codec.h",
]
configs += [ "../..:common_config" ]
public_configs = [ "../..:common_inherited_config" ]
@@ -44,31 +44,29 @@ source_set("rent_a_codec") {
config("audio_coding_config") {
include_dirs = [
- "main/include",
- "../interface",
+ "include",
+ "../include",
]
}
source_set("audio_coding") {
sources = [
- "main/acm2/acm_common_defs.h",
- "main/acm2/acm_receiver.cc",
- "main/acm2/acm_receiver.h",
- "main/acm2/acm_resampler.cc",
- "main/acm2/acm_resampler.h",
- "main/acm2/audio_coding_module.cc",
- "main/acm2/audio_coding_module_impl.cc",
- "main/acm2/audio_coding_module_impl.h",
- "main/acm2/call_statistics.cc",
- "main/acm2/call_statistics.h",
- "main/acm2/codec_manager.cc",
- "main/acm2/codec_manager.h",
- "main/acm2/codec_owner.cc",
- "main/acm2/codec_owner.h",
- "main/acm2/initial_delay_manager.cc",
- "main/acm2/initial_delay_manager.h",
- "main/include/audio_coding_module.h",
- "main/include/audio_coding_module_typedefs.h",
+ "acm2/acm_common_defs.h",
+ "acm2/acm_receiver.cc",
+ "acm2/acm_receiver.h",
+ "acm2/acm_resampler.cc",
+ "acm2/acm_resampler.h",
+ "acm2/audio_coding_module.cc",
+ "acm2/audio_coding_module_impl.cc",
+ "acm2/audio_coding_module_impl.h",
+ "acm2/call_statistics.cc",
+ "acm2/call_statistics.h",
+ "acm2/codec_manager.cc",
+ "acm2/codec_manager.h",
+ "acm2/initial_delay_manager.cc",
+ "acm2/initial_delay_manager.h",
+ "include/audio_coding_module.h",
+ "include/audio_coding_module_typedefs.h",
]
defines = []
@@ -166,11 +164,11 @@ config("cng_config") {
source_set("cng") {
sources = [
"codecs/cng/audio_encoder_cng.cc",
+ "codecs/cng/audio_encoder_cng.h",
"codecs/cng/cng_helpfuns.c",
"codecs/cng/cng_helpfuns.h",
- "codecs/cng/include/audio_encoder_cng.h",
- "codecs/cng/include/webrtc_cng.h",
"codecs/cng/webrtc_cng.c",
+ "codecs/cng/webrtc_cng.h",
]
configs += [ "../..:common_config" ]
@@ -181,8 +179,8 @@ source_set("cng") {
]
deps = [
- "../../common_audio",
":audio_encoder_interface",
+ "../../common_audio",
]
}
@@ -204,8 +202,8 @@ source_set("red") {
]
deps = [
- "../../common_audio",
":audio_encoder_interface",
+ "../../common_audio",
]
}
@@ -219,13 +217,13 @@ config("g711_config") {
source_set("g711") {
sources = [
"codecs/g711/audio_decoder_pcm.cc",
+ "codecs/g711/audio_decoder_pcm.h",
"codecs/g711/audio_encoder_pcm.cc",
+ "codecs/g711/audio_encoder_pcm.h",
"codecs/g711/g711.c",
"codecs/g711/g711.h",
"codecs/g711/g711_interface.c",
- "codecs/g711/include/audio_decoder_pcm.h",
- "codecs/g711/include/audio_encoder_pcm.h",
- "codecs/g711/include/g711_interface.h",
+ "codecs/g711/g711_interface.h",
]
configs += [ "../..:common_config" ]
@@ -236,6 +234,7 @@ source_set("g711") {
]
deps = [
+ ":audio_decoder_interface",
":audio_encoder_interface",
]
}
@@ -250,14 +249,14 @@ config("g722_config") {
source_set("g722") {
sources = [
"codecs/g722/audio_decoder_g722.cc",
+ "codecs/g722/audio_decoder_g722.h",
"codecs/g722/audio_encoder_g722.cc",
+ "codecs/g722/audio_encoder_g722.h",
"codecs/g722/g722_decode.c",
"codecs/g722/g722_enc_dec.h",
"codecs/g722/g722_encode.c",
"codecs/g722/g722_interface.c",
- "codecs/g722/include/audio_decoder_g722.h",
- "codecs/g722/include/audio_encoder_g722.h",
- "codecs/g722/include/g722_interface.h",
+ "codecs/g722/g722_interface.h",
]
configs += [ "../..:common_config" ]
@@ -268,6 +267,7 @@ source_set("g722") {
]
deps = [
+ ":audio_decoder_interface",
":audio_encoder_interface",
]
}
@@ -286,7 +286,9 @@ source_set("ilbc") {
"codecs/ilbc/abs_quant_loop.c",
"codecs/ilbc/abs_quant_loop.h",
"codecs/ilbc/audio_decoder_ilbc.cc",
+ "codecs/ilbc/audio_decoder_ilbc.h",
"codecs/ilbc/audio_encoder_ilbc.cc",
+ "codecs/ilbc/audio_encoder_ilbc.h",
"codecs/ilbc/augmented_cb_corr.c",
"codecs/ilbc/augmented_cb_corr.h",
"codecs/ilbc/bw_expand.c",
@@ -351,9 +353,7 @@ source_set("ilbc") {
"codecs/ilbc/hp_output.c",
"codecs/ilbc/hp_output.h",
"codecs/ilbc/ilbc.c",
- "codecs/ilbc/include/audio_decoder_ilbc.h",
- "codecs/ilbc/include/audio_encoder_ilbc.h",
- "codecs/ilbc/include/ilbc.h",
+ "codecs/ilbc/ilbc.h",
"codecs/ilbc/index_conv_dec.c",
"codecs/ilbc/index_conv_dec.h",
"codecs/ilbc/index_conv_enc.c",
@@ -434,8 +434,9 @@ source_set("ilbc") {
]
deps = [
- "../../common_audio",
+ ":audio_decoder_interface",
":audio_encoder_interface",
+ "../../common_audio",
]
}
@@ -604,6 +605,7 @@ source_set("isac_fix") {
]
deps = [
+ ":audio_decoder_interface",
":audio_encoder_interface",
":isac_common",
"../../common_audio",
@@ -696,14 +698,15 @@ config("pcm16b_config") {
source_set("pcm16b") {
sources = [
"codecs/pcm16b/audio_decoder_pcm16b.cc",
+ "codecs/pcm16b/audio_decoder_pcm16b.h",
"codecs/pcm16b/audio_encoder_pcm16b.cc",
- "codecs/pcm16b/include/audio_decoder_pcm16b.h",
- "codecs/pcm16b/include/audio_encoder_pcm16b.h",
- "codecs/pcm16b/include/pcm16b.h",
+ "codecs/pcm16b/audio_encoder_pcm16b.h",
"codecs/pcm16b/pcm16b.c",
+ "codecs/pcm16b/pcm16b.h",
]
deps = [
+ ":audio_decoder_interface",
":audio_encoder_interface",
":g711",
]
@@ -723,16 +726,18 @@ config("opus_config") {
source_set("webrtc_opus") {
sources = [
"codecs/opus/audio_decoder_opus.cc",
+ "codecs/opus/audio_decoder_opus.h",
"codecs/opus/audio_encoder_opus.cc",
- "codecs/opus/include/audio_decoder_opus.h",
- "codecs/opus/include/audio_encoder_opus.h",
- "codecs/opus/include/opus_interface.h",
+ "codecs/opus/audio_encoder_opus.h",
"codecs/opus/opus_inst.h",
"codecs/opus/opus_interface.c",
+ "codecs/opus/opus_interface.h",
]
deps = [
+ ":audio_decoder_interface",
":audio_encoder_interface",
+ "../../base:rtc_base_approved",
]
if (rtc_build_opus) {
diff --git a/webrtc/modules/audio_coding/OWNERS b/webrtc/modules/audio_coding/OWNERS
index f43fb1f3ee..77db17d1c5 100644
--- a/webrtc/modules/audio_coding/OWNERS
+++ b/webrtc/modules/audio_coding/OWNERS
@@ -5,8 +5,6 @@ kwiberg@webrtc.org
minyue@webrtc.org
jan.skoglund@webrtc.org
-per-file *.isolate=kjellander@webrtc.org
-
# These are for the common case of adding or renaming files. If you're doing
# structural changes, please get a review from a reviewer in this file.
per-file *.gyp=*
diff --git a/webrtc/modules/audio_coding/acm2/acm_codec_database.cc b/webrtc/modules/audio_coding/acm2/acm_codec_database.cc
new file mode 100644
index 0000000000..5f3c07802b
--- /dev/null
+++ b/webrtc/modules/audio_coding/acm2/acm_codec_database.cc
@@ -0,0 +1,333 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+/*
+ * This file generates databases with information about all supported audio
+ * codecs.
+ */
+
+// TODO(tlegrand): Change constant input pointers in all functions to constant
+// references, where appropriate.
+#include "webrtc/modules/audio_coding/acm2/acm_codec_database.h"
+
+#include <assert.h>
+
+#include "webrtc/base/checks.h"
+#include "webrtc/modules/audio_coding/acm2/acm_common_defs.h"
+#include "webrtc/system_wrappers/include/trace.h"
+
+namespace webrtc {
+
+namespace acm2 {
+
+namespace {
+
+// Checks if the bitrate is valid for iSAC.
+bool IsISACRateValid(int rate) {
+ return (rate == -1) || ((rate <= 56000) && (rate >= 10000));
+}
+
+// Checks if the bitrate is valid for iLBC.
+bool IsILBCRateValid(int rate, int frame_size_samples) {
+ if (((frame_size_samples == 240) || (frame_size_samples == 480)) &&
+ (rate == 13300)) {
+ return true;
+ } else if (((frame_size_samples == 160) || (frame_size_samples == 320)) &&
+ (rate == 15200)) {
+ return true;
+ } else {
+ return false;
+ }
+}
+
+// Checks if the bitrate is valid for Opus.
+bool IsOpusRateValid(int rate) {
+ return (rate >= 6000) && (rate <= 510000);
+}
+
+} // namespace
+
+// Not yet used payload-types.
+// 83, 82, 81, 80, 79, 78, 77, 76, 75, 74, 73, 72, 71, 70, 69, 68,
+// 67, 66, 65
+
+const CodecInst ACMCodecDB::database_[] = {
+#if (defined(WEBRTC_CODEC_ISAC) || defined(WEBRTC_CODEC_ISACFX))
+ {103, "ISAC", 16000, kIsacPacSize480, 1, kIsacWbDefaultRate},
+# if (defined(WEBRTC_CODEC_ISAC))
+ {104, "ISAC", 32000, kIsacPacSize960, 1, kIsacSwbDefaultRate},
+# endif
+#endif
+ // Mono
+ {107, "L16", 8000, 80, 1, 128000},
+ {108, "L16", 16000, 160, 1, 256000},
+ {109, "L16", 32000, 320, 1, 512000},
+ // Stereo
+ {111, "L16", 8000, 80, 2, 128000},
+ {112, "L16", 16000, 160, 2, 256000},
+ {113, "L16", 32000, 320, 2, 512000},
+ // G.711, PCM mu-law and A-law.
+ // Mono
+ {0, "PCMU", 8000, 160, 1, 64000},
+ {8, "PCMA", 8000, 160, 1, 64000},
+ // Stereo
+ {110, "PCMU", 8000, 160, 2, 64000},
+ {118, "PCMA", 8000, 160, 2, 64000},
+#ifdef WEBRTC_CODEC_ILBC
+ {102, "ILBC", 8000, 240, 1, 13300},
+#endif
+#ifdef WEBRTC_CODEC_G722
+ // Mono
+ {9, "G722", 16000, 320, 1, 64000},
+ // Stereo
+ {119, "G722", 16000, 320, 2, 64000},
+#endif
+#ifdef WEBRTC_CODEC_OPUS
+ // Opus internally supports 48, 24, 16, 12, 8 kHz.
+ // Mono and stereo.
+ {120, "opus", 48000, 960, 2, 64000},
+#endif
+ // Comfort noise for four different sampling frequencies.
+ {13, "CN", 8000, 240, 1, 0},
+ {98, "CN", 16000, 480, 1, 0},
+ {99, "CN", 32000, 960, 1, 0},
+#ifdef ENABLE_48000_HZ
+ {100, "CN", 48000, 1440, 1, 0},
+#endif
+ {106, "telephone-event", 8000, 240, 1, 0},
+#ifdef WEBRTC_CODEC_RED
+ {127, "red", 8000, 0, 1, 0},
+#endif
+ // To prevent compile errors due to trailing commas.
+ {-1, "Null", -1, -1, 0, -1}
+};
+
+// Create database with all codec settings at compile time.
+// Each entry needs the following parameters in the given order:
+// Number of allowed packet sizes, a vector with the allowed packet sizes,
+// Basic block samples, max number of channels that are supported.
+const ACMCodecDB::CodecSettings ACMCodecDB::codec_settings_[] = {
+#if (defined(WEBRTC_CODEC_ISAC) || defined(WEBRTC_CODEC_ISACFX))
+ {2, {kIsacPacSize480, kIsacPacSize960}, 0, 1},
+# if (defined(WEBRTC_CODEC_ISAC))
+ {1, {kIsacPacSize960}, 0, 1},
+# endif
+#endif
+ // Mono
+ {4, {80, 160, 240, 320}, 0, 2},
+ {4, {160, 320, 480, 640}, 0, 2},
+ {2, {320, 640}, 0, 2},
+ // Stereo
+ {4, {80, 160, 240, 320}, 0, 2},
+ {4, {160, 320, 480, 640}, 0, 2},
+ {2, {320, 640}, 0, 2},
+ // G.711, PCM mu-law and A-law.
+ // Mono
+ {6, {80, 160, 240, 320, 400, 480}, 0, 2},
+ {6, {80, 160, 240, 320, 400, 480}, 0, 2},
+ // Stereo
+ {6, {80, 160, 240, 320, 400, 480}, 0, 2},
+ {6, {80, 160, 240, 320, 400, 480}, 0, 2},
+#ifdef WEBRTC_CODEC_ILBC
+ {4, {160, 240, 320, 480}, 0, 1},
+#endif
+#ifdef WEBRTC_CODEC_G722
+ // Mono
+ {6, {160, 320, 480, 640, 800, 960}, 0, 2},
+ // Stereo
+ {6, {160, 320, 480, 640, 800, 960}, 0, 2},
+#endif
+#ifdef WEBRTC_CODEC_OPUS
+ // Opus supports frames shorter than 10ms,
+ // but it doesn't help us to use them.
+ // Mono and stereo.
+ {4, {480, 960, 1920, 2880}, 0, 2},
+#endif
+ // Comfort noise for three different sampling frequencies.
+ {1, {240}, 240, 1},
+ {1, {480}, 480, 1},
+ {1, {960}, 960, 1},
+#ifdef ENABLE_48000_HZ
+ {1, {1440}, 1440, 1},
+#endif
+ {1, {240}, 240, 1},
+#ifdef WEBRTC_CODEC_RED
+ {1, {0}, 0, 1},
+#endif
+ // To prevent compile errors due to trailing commas.
+ {-1, {-1}, -1, 0}
+};
+
+// Create a database of all NetEQ decoders at compile time.
+const NetEqDecoder ACMCodecDB::neteq_decoders_[] = {
+#if (defined(WEBRTC_CODEC_ISAC) || defined(WEBRTC_CODEC_ISACFX))
+ NetEqDecoder::kDecoderISAC,
+# if (defined(WEBRTC_CODEC_ISAC))
+ NetEqDecoder::kDecoderISACswb,
+# endif
+#endif
+ // Mono
+ NetEqDecoder::kDecoderPCM16B, NetEqDecoder::kDecoderPCM16Bwb,
+ NetEqDecoder::kDecoderPCM16Bswb32kHz,
+ // Stereo
+ NetEqDecoder::kDecoderPCM16B_2ch, NetEqDecoder::kDecoderPCM16Bwb_2ch,
+ NetEqDecoder::kDecoderPCM16Bswb32kHz_2ch,
+ // G.711, PCM mu-las and A-law.
+ // Mono
+ NetEqDecoder::kDecoderPCMu, NetEqDecoder::kDecoderPCMa,
+ // Stereo
+ NetEqDecoder::kDecoderPCMu_2ch, NetEqDecoder::kDecoderPCMa_2ch,
+#ifdef WEBRTC_CODEC_ILBC
+ NetEqDecoder::kDecoderILBC,
+#endif
+#ifdef WEBRTC_CODEC_G722
+ // Mono
+ NetEqDecoder::kDecoderG722,
+ // Stereo
+ NetEqDecoder::kDecoderG722_2ch,
+#endif
+#ifdef WEBRTC_CODEC_OPUS
+ // Mono and stereo.
+ NetEqDecoder::kDecoderOpus,
+#endif
+ // Comfort noise for three different sampling frequencies.
+ NetEqDecoder::kDecoderCNGnb, NetEqDecoder::kDecoderCNGwb,
+ NetEqDecoder::kDecoderCNGswb32kHz,
+#ifdef ENABLE_48000_HZ
+ NetEqDecoder::kDecoderCNGswb48kHz,
+#endif
+ NetEqDecoder::kDecoderAVT,
+#ifdef WEBRTC_CODEC_RED
+ NetEqDecoder::kDecoderRED,
+#endif
+};
+
+// Enumerator for error codes when asking for codec database id.
+enum {
+ kInvalidCodec = -10,
+ kInvalidPayloadtype = -30,
+ kInvalidPacketSize = -40,
+ kInvalidRate = -50
+};
+
+// Gets the codec id number from the database. If there is some mismatch in
+// the codec settings, the function will return an error code.
+// NOTE! The first mismatch found will generate the return value.
+int ACMCodecDB::CodecNumber(const CodecInst& codec_inst) {
+ // Look for a matching codec in the database.
+ int codec_id = CodecId(codec_inst);
+
+ // Checks if we found a matching codec.
+ if (codec_id == -1) {
+ return kInvalidCodec;
+ }
+
+ // Checks the validity of payload type
+ if (!RentACodec::IsPayloadTypeValid(codec_inst.pltype)) {
+ return kInvalidPayloadtype;
+ }
+
+ // Comfort Noise is special case, packet-size & rate is not checked.
+ if (STR_CASE_CMP(database_[codec_id].plname, "CN") == 0) {
+ return codec_id;
+ }
+
+ // RED is special case, packet-size & rate is not checked.
+ if (STR_CASE_CMP(database_[codec_id].plname, "red") == 0) {
+ return codec_id;
+ }
+
+ // Checks the validity of packet size.
+ if (codec_settings_[codec_id].num_packet_sizes > 0) {
+ bool packet_size_ok = false;
+ int i;
+ int packet_size_samples;
+ for (i = 0; i < codec_settings_[codec_id].num_packet_sizes; i++) {
+ packet_size_samples =
+ codec_settings_[codec_id].packet_sizes_samples[i];
+ if (codec_inst.pacsize == packet_size_samples) {
+ packet_size_ok = true;
+ break;
+ }
+ }
+
+ if (!packet_size_ok) {
+ return kInvalidPacketSize;
+ }
+ }
+
+ if (codec_inst.pacsize < 1) {
+ return kInvalidPacketSize;
+ }
+
+ // Check the validity of rate. Codecs with multiple rates have their own
+ // function for this.
+ if (STR_CASE_CMP("isac", codec_inst.plname) == 0) {
+ return IsISACRateValid(codec_inst.rate) ? codec_id : kInvalidRate;
+ } else if (STR_CASE_CMP("ilbc", codec_inst.plname) == 0) {
+ return IsILBCRateValid(codec_inst.rate, codec_inst.pacsize)
+ ? codec_id : kInvalidRate;
+ } else if (STR_CASE_CMP("opus", codec_inst.plname) == 0) {
+ return IsOpusRateValid(codec_inst.rate)
+ ? codec_id : kInvalidRate;
+ }
+
+ return database_[codec_id].rate == codec_inst.rate ? codec_id : kInvalidRate;
+}
+
+// Looks for a matching payload name, frequency, and channels in the
+// codec list. Need to check all three since some codecs have several codec
+// entries with different frequencies and/or channels.
+// Does not check other codec settings, such as payload type and packet size.
+// Returns the id of the codec, or -1 if no match is found.
+int ACMCodecDB::CodecId(const CodecInst& codec_inst) {
+ return (CodecId(codec_inst.plname, codec_inst.plfreq,
+ codec_inst.channels));
+}
+
+int ACMCodecDB::CodecId(const char* payload_name,
+ int frequency,
+ size_t channels) {
+ for (const CodecInst& ci : RentACodec::Database()) {
+ bool name_match = false;
+ bool frequency_match = false;
+ bool channels_match = false;
+
+ // Payload name, sampling frequency and number of channels need to match.
+ // NOTE! If |frequency| is -1, the frequency is not applicable, and is
+ // always treated as true, like for RED.
+ name_match = (STR_CASE_CMP(ci.plname, payload_name) == 0);
+ frequency_match = (frequency == ci.plfreq) || (frequency == -1);
+ // The number of channels must match for all codecs but Opus.
+ if (STR_CASE_CMP(payload_name, "opus") != 0) {
+ channels_match = (channels == ci.channels);
+ } else {
+ // For opus we just check that number of channels is valid.
+ channels_match = (channels == 1 || channels == 2);
+ }
+
+ if (name_match && frequency_match && channels_match) {
+ // We have found a matching codec in the list.
+ return &ci - RentACodec::Database().data();
+ }
+ }
+
+ // We didn't find a matching codec.
+ return -1;
+}
+// Gets codec id number from database for the receiver.
+int ACMCodecDB::ReceiverCodecNumber(const CodecInst& codec_inst) {
+ // Look for a matching codec in the database.
+ return CodecId(codec_inst);
+}
+
+} // namespace acm2
+
+} // namespace webrtc
diff --git a/webrtc/modules/audio_coding/acm2/acm_codec_database.h b/webrtc/modules/audio_coding/acm2/acm_codec_database.h
new file mode 100644
index 0000000000..6c2db9cfc8
--- /dev/null
+++ b/webrtc/modules/audio_coding/acm2/acm_codec_database.h
@@ -0,0 +1,83 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+/*
+ * This file generates databases with information about all supported audio
+ * codecs.
+ */
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_ACM2_ACM_CODEC_DATABASE_H_
+#define WEBRTC_MODULES_AUDIO_CODING_ACM2_ACM_CODEC_DATABASE_H_
+
+#include "webrtc/common_types.h"
+#include "webrtc/engine_configurations.h"
+#include "webrtc/modules/audio_coding/acm2/rent_a_codec.h"
+#include "webrtc/modules/audio_coding/neteq/include/neteq.h"
+
+namespace webrtc {
+
+namespace acm2 {
+
+// TODO(tlegrand): replace class ACMCodecDB with a namespace.
+class ACMCodecDB {
+ public:
+ // kMaxNumCodecs - Maximum number of codecs that can be activated in one
+ // build.
+ // kMaxNumPacketSize - Maximum number of allowed packet sizes for one codec.
+ // These might need to be increased if adding a new codec to the database
+ static const int kMaxNumCodecs = 50;
+ static const int kMaxNumPacketSize = 6;
+
+ // Codec specific settings
+ //
+ // num_packet_sizes - number of allowed packet sizes.
+ // packet_sizes_samples - list of the allowed packet sizes.
+ // basic_block_samples - assigned a value different from 0 if the codec
+ // requires to be fed with a specific number of samples
+ // that can be different from packet size.
+ // channel_support - number of channels supported to encode;
+ // 1 = mono, 2 = stereo, etc.
+ struct CodecSettings {
+ int num_packet_sizes;
+ int packet_sizes_samples[kMaxNumPacketSize];
+ int basic_block_samples;
+ size_t channel_support;
+ };
+
+ // Returns codec id from database, given the information received in the input
+ // [codec_inst].
+ // Input:
+ // [codec_inst] - Information about the codec for which we require the
+ // database id.
+ // Return:
+ // codec id if successful, otherwise < 0.
+ static int CodecNumber(const CodecInst& codec_inst);
+ static int CodecId(const CodecInst& codec_inst);
+ static int CodecId(const char* payload_name, int frequency, size_t channels);
+ static int ReceiverCodecNumber(const CodecInst& codec_inst);
+
+ // Databases with information about the supported codecs
+ // database_ - stored information about all codecs: payload type, name,
+ // sampling frequency, packet size in samples, default channel
+ // support, and default rate.
+ // codec_settings_ - stored codec settings: number of allowed packet sizes,
+ // a vector with the allowed packet sizes, basic block
+ // samples, and max number of channels that are supported.
+ // neteq_decoders_ - list of supported decoders in NetEQ.
+ static const CodecInst database_[kMaxNumCodecs];
+ static const CodecSettings codec_settings_[kMaxNumCodecs];
+ static const NetEqDecoder neteq_decoders_[kMaxNumCodecs];
+};
+
+} // namespace acm2
+
+} // namespace webrtc
+
+#endif // WEBRTC_MODULES_AUDIO_CODING_ACM2_ACM_CODEC_DATABASE_H_
diff --git a/webrtc/modules/audio_coding/acm2/acm_common_defs.h b/webrtc/modules/audio_coding/acm2/acm_common_defs.h
new file mode 100644
index 0000000000..483bdd93f1
--- /dev/null
+++ b/webrtc/modules/audio_coding/acm2/acm_common_defs.h
@@ -0,0 +1,32 @@
+/*
+ * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_ACM2_ACM_COMMON_DEFS_H_
+#define WEBRTC_MODULES_AUDIO_CODING_ACM2_ACM_COMMON_DEFS_H_
+
+#include "webrtc/engine_configurations.h"
+
+// Checks for enabled codecs, we prevent enabling codecs which are not
+// compatible.
+#if ((defined WEBRTC_CODEC_ISAC) && (defined WEBRTC_CODEC_ISACFX))
+#error iSAC and iSACFX codecs cannot be enabled at the same time
+#endif
+
+namespace webrtc {
+
+// General codec specific defines
+const int kIsacWbDefaultRate = 32000;
+const int kIsacSwbDefaultRate = 56000;
+const int kIsacPacSize480 = 480;
+const int kIsacPacSize960 = 960;
+
+} // namespace webrtc
+
+#endif // WEBRTC_MODULES_AUDIO_CODING_ACM2_ACM_COMMON_DEFS_H_
diff --git a/webrtc/modules/audio_coding/main/acm2/acm_neteq_unittest.cc b/webrtc/modules/audio_coding/acm2/acm_neteq_unittest.cc
index 607b933deb..607b933deb 100644
--- a/webrtc/modules/audio_coding/main/acm2/acm_neteq_unittest.cc
+++ b/webrtc/modules/audio_coding/acm2/acm_neteq_unittest.cc
diff --git a/webrtc/modules/audio_coding/acm2/acm_receive_test_oldapi.cc b/webrtc/modules/audio_coding/acm2/acm_receive_test_oldapi.cc
new file mode 100644
index 0000000000..855a39e675
--- /dev/null
+++ b/webrtc/modules/audio_coding/acm2/acm_receive_test_oldapi.cc
@@ -0,0 +1,222 @@
+/*
+ * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/modules/audio_coding/acm2/acm_receive_test_oldapi.h"
+
+#include <assert.h>
+#include <stdio.h>
+
+#include "testing/gtest/include/gtest/gtest.h"
+#include "webrtc/modules/audio_coding/include/audio_coding_module.h"
+#include "webrtc/modules/audio_coding/neteq/tools/audio_sink.h"
+#include "webrtc/modules/audio_coding/neteq/tools/packet.h"
+#include "webrtc/modules/audio_coding/neteq/tools/packet_source.h"
+
+namespace webrtc {
+namespace test {
+
+namespace {
+// Returns true if the codec should be registered, otherwise false. Changes
+// the number of channels for the Opus codec to always be 1.
+bool ModifyAndUseThisCodec(CodecInst* codec_param) {
+ if (STR_CASE_CMP(codec_param->plname, "CN") == 0 &&
+ codec_param->plfreq == 48000)
+ return false; // Skip 48 kHz comfort noise.
+
+ if (STR_CASE_CMP(codec_param->plname, "telephone-event") == 0)
+ return false; // Skip DTFM.
+
+ return true;
+}
+
+// Remaps payload types from ACM's default to those used in the resource file
+// neteq_universal_new.rtp. Returns true if the codec should be registered,
+// otherwise false. The payload types are set as follows (all are mono codecs):
+// PCMu = 0;
+// PCMa = 8;
+// Comfort noise 8 kHz = 13
+// Comfort noise 16 kHz = 98
+// Comfort noise 32 kHz = 99
+// iLBC = 102
+// iSAC wideband = 103
+// iSAC super-wideband = 104
+// AVT/DTMF = 106
+// RED = 117
+// PCM16b 8 kHz = 93
+// PCM16b 16 kHz = 94
+// PCM16b 32 kHz = 95
+// G.722 = 94
+bool RemapPltypeAndUseThisCodec(const char* plname,
+ int plfreq,
+ size_t channels,
+ int* pltype) {
+ if (channels != 1)
+ return false; // Don't use non-mono codecs.
+
+ // Re-map pltypes to those used in the NetEq test files.
+ if (STR_CASE_CMP(plname, "PCMU") == 0 && plfreq == 8000) {
+ *pltype = 0;
+ } else if (STR_CASE_CMP(plname, "PCMA") == 0 && plfreq == 8000) {
+ *pltype = 8;
+ } else if (STR_CASE_CMP(plname, "CN") == 0 && plfreq == 8000) {
+ *pltype = 13;
+ } else if (STR_CASE_CMP(plname, "CN") == 0 && plfreq == 16000) {
+ *pltype = 98;
+ } else if (STR_CASE_CMP(plname, "CN") == 0 && plfreq == 32000) {
+ *pltype = 99;
+ } else if (STR_CASE_CMP(plname, "ILBC") == 0) {
+ *pltype = 102;
+ } else if (STR_CASE_CMP(plname, "ISAC") == 0 && plfreq == 16000) {
+ *pltype = 103;
+ } else if (STR_CASE_CMP(plname, "ISAC") == 0 && plfreq == 32000) {
+ *pltype = 104;
+ } else if (STR_CASE_CMP(plname, "telephone-event") == 0) {
+ *pltype = 106;
+ } else if (STR_CASE_CMP(plname, "red") == 0) {
+ *pltype = 117;
+ } else if (STR_CASE_CMP(plname, "L16") == 0 && plfreq == 8000) {
+ *pltype = 93;
+ } else if (STR_CASE_CMP(plname, "L16") == 0 && plfreq == 16000) {
+ *pltype = 94;
+ } else if (STR_CASE_CMP(plname, "L16") == 0 && plfreq == 32000) {
+ *pltype = 95;
+ } else if (STR_CASE_CMP(plname, "G722") == 0) {
+ *pltype = 9;
+ } else {
+ // Don't use any other codecs.
+ return false;
+ }
+ return true;
+}
+} // namespace
+
+AcmReceiveTestOldApi::AcmReceiveTestOldApi(
+ PacketSource* packet_source,
+ AudioSink* audio_sink,
+ int output_freq_hz,
+ NumOutputChannels exptected_output_channels)
+ : clock_(0),
+ acm_(webrtc::AudioCodingModule::Create(0, &clock_)),
+ packet_source_(packet_source),
+ audio_sink_(audio_sink),
+ output_freq_hz_(output_freq_hz),
+ exptected_output_channels_(exptected_output_channels) {
+}
+
+void AcmReceiveTestOldApi::RegisterDefaultCodecs() {
+ CodecInst my_codec_param;
+ for (int n = 0; n < acm_->NumberOfCodecs(); n++) {
+ ASSERT_EQ(0, acm_->Codec(n, &my_codec_param)) << "Failed to get codec.";
+ if (ModifyAndUseThisCodec(&my_codec_param)) {
+ ASSERT_EQ(0, acm_->RegisterReceiveCodec(my_codec_param))
+ << "Couldn't register receive codec.\n";
+ }
+ }
+}
+
+void AcmReceiveTestOldApi::RegisterNetEqTestCodecs() {
+ CodecInst my_codec_param;
+ for (int n = 0; n < acm_->NumberOfCodecs(); n++) {
+ ASSERT_EQ(0, acm_->Codec(n, &my_codec_param)) << "Failed to get codec.";
+ if (!ModifyAndUseThisCodec(&my_codec_param)) {
+ // Skip this codec.
+ continue;
+ }
+
+ if (RemapPltypeAndUseThisCodec(my_codec_param.plname,
+ my_codec_param.plfreq,
+ my_codec_param.channels,
+ &my_codec_param.pltype)) {
+ ASSERT_EQ(0, acm_->RegisterReceiveCodec(my_codec_param))
+ << "Couldn't register receive codec.\n";
+ }
+ }
+}
+
+int AcmReceiveTestOldApi::RegisterExternalReceiveCodec(
+ int rtp_payload_type,
+ AudioDecoder* external_decoder,
+ int sample_rate_hz,
+ int num_channels,
+ const std::string& name) {
+ return acm_->RegisterExternalReceiveCodec(rtp_payload_type, external_decoder,
+ sample_rate_hz, num_channels, name);
+}
+
+void AcmReceiveTestOldApi::Run() {
+ for (rtc::scoped_ptr<Packet> packet(packet_source_->NextPacket()); packet;
+ packet.reset(packet_source_->NextPacket())) {
+ // Pull audio until time to insert packet.
+ while (clock_.TimeInMilliseconds() < packet->time_ms()) {
+ AudioFrame output_frame;
+ EXPECT_EQ(0, acm_->PlayoutData10Ms(output_freq_hz_, &output_frame));
+ EXPECT_EQ(output_freq_hz_, output_frame.sample_rate_hz_);
+ const size_t samples_per_block =
+ static_cast<size_t>(output_freq_hz_ * 10 / 1000);
+ EXPECT_EQ(samples_per_block, output_frame.samples_per_channel_);
+ if (exptected_output_channels_ != kArbitraryChannels) {
+ if (output_frame.speech_type_ == webrtc::AudioFrame::kPLC) {
+ // Don't check number of channels for PLC output, since each test run
+ // usually starts with a short period of mono PLC before decoding the
+ // first packet.
+ } else {
+ EXPECT_EQ(exptected_output_channels_, output_frame.num_channels_);
+ }
+ }
+ ASSERT_TRUE(audio_sink_->WriteAudioFrame(output_frame));
+ clock_.AdvanceTimeMilliseconds(10);
+ AfterGetAudio();
+ }
+
+ // Insert packet after converting from RTPHeader to WebRtcRTPHeader.
+ WebRtcRTPHeader header;
+ header.header = packet->header();
+ header.frameType = kAudioFrameSpeech;
+ memset(&header.type.Audio, 0, sizeof(RTPAudioHeader));
+ EXPECT_EQ(0,
+ acm_->IncomingPacket(
+ packet->payload(),
+ static_cast<int32_t>(packet->payload_length_bytes()),
+ header))
+ << "Failure when inserting packet:" << std::endl
+ << " PT = " << static_cast<int>(header.header.payloadType) << std::endl
+ << " TS = " << header.header.timestamp << std::endl
+ << " SN = " << header.header.sequenceNumber;
+ }
+}
+
+AcmReceiveTestToggleOutputFreqOldApi::AcmReceiveTestToggleOutputFreqOldApi(
+ PacketSource* packet_source,
+ AudioSink* audio_sink,
+ int output_freq_hz_1,
+ int output_freq_hz_2,
+ int toggle_period_ms,
+ NumOutputChannels exptected_output_channels)
+ : AcmReceiveTestOldApi(packet_source,
+ audio_sink,
+ output_freq_hz_1,
+ exptected_output_channels),
+ output_freq_hz_1_(output_freq_hz_1),
+ output_freq_hz_2_(output_freq_hz_2),
+ toggle_period_ms_(toggle_period_ms),
+ last_toggle_time_ms_(clock_.TimeInMilliseconds()) {
+}
+
+void AcmReceiveTestToggleOutputFreqOldApi::AfterGetAudio() {
+ if (clock_.TimeInMilliseconds() >= last_toggle_time_ms_ + toggle_period_ms_) {
+ output_freq_hz_ = (output_freq_hz_ == output_freq_hz_1_)
+ ? output_freq_hz_2_
+ : output_freq_hz_1_;
+ last_toggle_time_ms_ = clock_.TimeInMilliseconds();
+ }
+}
+
+} // namespace test
+} // namespace webrtc
diff --git a/webrtc/modules/audio_coding/acm2/acm_receive_test_oldapi.h b/webrtc/modules/audio_coding/acm2/acm_receive_test_oldapi.h
new file mode 100644
index 0000000000..3010ec72b1
--- /dev/null
+++ b/webrtc/modules/audio_coding/acm2/acm_receive_test_oldapi.h
@@ -0,0 +1,97 @@
+/*
+ * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_ACM2_ACM_RECEIVE_TEST_OLDAPI_H_
+#define WEBRTC_MODULES_AUDIO_CODING_ACM2_ACM_RECEIVE_TEST_OLDAPI_H_
+
+#include <string>
+
+#include "webrtc/base/constructormagic.h"
+#include "webrtc/base/scoped_ptr.h"
+#include "webrtc/system_wrappers/include/clock.h"
+
+namespace webrtc {
+class AudioCodingModule;
+class AudioDecoder;
+struct CodecInst;
+
+namespace test {
+class AudioSink;
+class PacketSource;
+
+class AcmReceiveTestOldApi {
+ public:
+ enum NumOutputChannels {
+ kArbitraryChannels = 0,
+ kMonoOutput = 1,
+ kStereoOutput = 2
+ };
+
+ AcmReceiveTestOldApi(PacketSource* packet_source,
+ AudioSink* audio_sink,
+ int output_freq_hz,
+ NumOutputChannels exptected_output_channels);
+ virtual ~AcmReceiveTestOldApi() {}
+
+ // Registers the codecs with default parameters from ACM.
+ void RegisterDefaultCodecs();
+
+ // Registers codecs with payload types matching the pre-encoded NetEq test
+ // files.
+ void RegisterNetEqTestCodecs();
+
+ int RegisterExternalReceiveCodec(int rtp_payload_type,
+ AudioDecoder* external_decoder,
+ int sample_rate_hz,
+ int num_channels,
+ const std::string& name);
+
+ // Runs the test and returns true if successful.
+ void Run();
+
+ protected:
+ // Method is called after each block of output audio is received from ACM.
+ virtual void AfterGetAudio() {}
+
+ SimulatedClock clock_;
+ rtc::scoped_ptr<AudioCodingModule> acm_;
+ PacketSource* packet_source_;
+ AudioSink* audio_sink_;
+ int output_freq_hz_;
+ NumOutputChannels exptected_output_channels_;
+
+ RTC_DISALLOW_COPY_AND_ASSIGN(AcmReceiveTestOldApi);
+};
+
+// This test toggles the output frequency every |toggle_period_ms|. The test
+// starts with |output_freq_hz_1|. Except for the toggling, it does the same
+// thing as AcmReceiveTestOldApi.
+class AcmReceiveTestToggleOutputFreqOldApi : public AcmReceiveTestOldApi {
+ public:
+ AcmReceiveTestToggleOutputFreqOldApi(
+ PacketSource* packet_source,
+ AudioSink* audio_sink,
+ int output_freq_hz_1,
+ int output_freq_hz_2,
+ int toggle_period_ms,
+ NumOutputChannels exptected_output_channels);
+
+ protected:
+ void AfterGetAudio() override;
+
+ const int output_freq_hz_1_;
+ const int output_freq_hz_2_;
+ const int toggle_period_ms_;
+ int64_t last_toggle_time_ms_;
+};
+
+} // namespace test
+} // namespace webrtc
+#endif // WEBRTC_MODULES_AUDIO_CODING_ACM2_ACM_RECEIVE_TEST_OLDAPI_H_
diff --git a/webrtc/modules/audio_coding/acm2/acm_receiver.cc b/webrtc/modules/audio_coding/acm2/acm_receiver.cc
new file mode 100644
index 0000000000..f45d5d3414
--- /dev/null
+++ b/webrtc/modules/audio_coding/acm2/acm_receiver.cc
@@ -0,0 +1,541 @@
+/*
+ * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/modules/audio_coding/acm2/acm_receiver.h"
+
+#include <stdlib.h> // malloc
+
+#include <algorithm> // sort
+#include <vector>
+
+#include "webrtc/base/checks.h"
+#include "webrtc/base/format_macros.h"
+#include "webrtc/base/logging.h"
+#include "webrtc/common_audio/signal_processing/include/signal_processing_library.h"
+#include "webrtc/common_types.h"
+#include "webrtc/modules/audio_coding/codecs/audio_decoder.h"
+#include "webrtc/modules/audio_coding/acm2/acm_resampler.h"
+#include "webrtc/modules/audio_coding/acm2/call_statistics.h"
+#include "webrtc/modules/audio_coding/neteq/include/neteq.h"
+#include "webrtc/system_wrappers/include/clock.h"
+#include "webrtc/system_wrappers/include/critical_section_wrapper.h"
+#include "webrtc/system_wrappers/include/tick_util.h"
+#include "webrtc/system_wrappers/include/trace.h"
+
+namespace webrtc {
+
+namespace acm2 {
+
+namespace {
+
+// |vad_activity_| field of |audio_frame| is set to |previous_audio_activity_|
+// before the call to this function.
+void SetAudioFrameActivityAndType(bool vad_enabled,
+ NetEqOutputType type,
+ AudioFrame* audio_frame) {
+ if (vad_enabled) {
+ switch (type) {
+ case kOutputNormal: {
+ audio_frame->vad_activity_ = AudioFrame::kVadActive;
+ audio_frame->speech_type_ = AudioFrame::kNormalSpeech;
+ break;
+ }
+ case kOutputVADPassive: {
+ audio_frame->vad_activity_ = AudioFrame::kVadPassive;
+ audio_frame->speech_type_ = AudioFrame::kNormalSpeech;
+ break;
+ }
+ case kOutputCNG: {
+ audio_frame->vad_activity_ = AudioFrame::kVadPassive;
+ audio_frame->speech_type_ = AudioFrame::kCNG;
+ break;
+ }
+ case kOutputPLC: {
+ // Don't change |audio_frame->vad_activity_|, it should be the same as
+ // |previous_audio_activity_|.
+ audio_frame->speech_type_ = AudioFrame::kPLC;
+ break;
+ }
+ case kOutputPLCtoCNG: {
+ audio_frame->vad_activity_ = AudioFrame::kVadPassive;
+ audio_frame->speech_type_ = AudioFrame::kPLCCNG;
+ break;
+ }
+ default:
+ assert(false);
+ }
+ } else {
+ // Always return kVadUnknown when receive VAD is inactive
+ audio_frame->vad_activity_ = AudioFrame::kVadUnknown;
+ switch (type) {
+ case kOutputNormal: {
+ audio_frame->speech_type_ = AudioFrame::kNormalSpeech;
+ break;
+ }
+ case kOutputCNG: {
+ audio_frame->speech_type_ = AudioFrame::kCNG;
+ break;
+ }
+ case kOutputPLC: {
+ audio_frame->speech_type_ = AudioFrame::kPLC;
+ break;
+ }
+ case kOutputPLCtoCNG: {
+ audio_frame->speech_type_ = AudioFrame::kPLCCNG;
+ break;
+ }
+ case kOutputVADPassive: {
+ // Normally, we should no get any VAD decision if post-decoding VAD is
+ // not active. However, if post-decoding VAD has been active then
+ // disabled, we might be here for couple of frames.
+ audio_frame->speech_type_ = AudioFrame::kNormalSpeech;
+ LOG(WARNING) << "Post-decoding VAD is disabled but output is "
+ << "labeled VAD-passive";
+ break;
+ }
+ default:
+ assert(false);
+ }
+ }
+}
+
+// Is the given codec a CNG codec?
+// TODO(kwiberg): Move to RentACodec.
+bool IsCng(int codec_id) {
+ auto i = RentACodec::CodecIdFromIndex(codec_id);
+ return (i && (*i == RentACodec::CodecId::kCNNB ||
+ *i == RentACodec::CodecId::kCNWB ||
+ *i == RentACodec::CodecId::kCNSWB ||
+ *i == RentACodec::CodecId::kCNFB));
+}
+
+} // namespace
+
+AcmReceiver::AcmReceiver(const AudioCodingModule::Config& config)
+ : crit_sect_(CriticalSectionWrapper::CreateCriticalSection()),
+ id_(config.id),
+ last_audio_decoder_(nullptr),
+ previous_audio_activity_(AudioFrame::kVadPassive),
+ audio_buffer_(new int16_t[AudioFrame::kMaxDataSizeSamples]),
+ last_audio_buffer_(new int16_t[AudioFrame::kMaxDataSizeSamples]),
+ neteq_(NetEq::Create(config.neteq_config)),
+ vad_enabled_(config.neteq_config.enable_post_decode_vad),
+ clock_(config.clock),
+ resampled_last_output_frame_(true) {
+ assert(clock_);
+ memset(audio_buffer_.get(), 0, AudioFrame::kMaxDataSizeSamples);
+ memset(last_audio_buffer_.get(), 0, AudioFrame::kMaxDataSizeSamples);
+}
+
+AcmReceiver::~AcmReceiver() {
+ delete neteq_;
+}
+
+int AcmReceiver::SetMinimumDelay(int delay_ms) {
+ if (neteq_->SetMinimumDelay(delay_ms))
+ return 0;
+ LOG(LERROR) << "AcmReceiver::SetExtraDelay " << delay_ms;
+ return -1;
+}
+
+int AcmReceiver::SetMaximumDelay(int delay_ms) {
+ if (neteq_->SetMaximumDelay(delay_ms))
+ return 0;
+ LOG(LERROR) << "AcmReceiver::SetExtraDelay " << delay_ms;
+ return -1;
+}
+
+int AcmReceiver::LeastRequiredDelayMs() const {
+ return neteq_->LeastRequiredDelayMs();
+}
+
+rtc::Optional<int> AcmReceiver::last_packet_sample_rate_hz() const {
+ CriticalSectionScoped lock(crit_sect_.get());
+ return last_packet_sample_rate_hz_;
+}
+
+int AcmReceiver::last_output_sample_rate_hz() const {
+ return neteq_->last_output_sample_rate_hz();
+}
+
+int AcmReceiver::InsertPacket(const WebRtcRTPHeader& rtp_header,
+ rtc::ArrayView<const uint8_t> incoming_payload) {
+ uint32_t receive_timestamp = 0;
+ const RTPHeader* header = &rtp_header.header; // Just a shorthand.
+
+ {
+ CriticalSectionScoped lock(crit_sect_.get());
+
+ const Decoder* decoder = RtpHeaderToDecoder(*header, incoming_payload[0]);
+ if (!decoder) {
+ LOG_F(LS_ERROR) << "Payload-type "
+ << static_cast<int>(header->payloadType)
+ << " is not registered.";
+ return -1;
+ }
+ const int sample_rate_hz = [&decoder] {
+ const auto ci = RentACodec::CodecIdFromIndex(decoder->acm_codec_id);
+ return ci ? RentACodec::CodecInstById(*ci)->plfreq : -1;
+ }();
+ receive_timestamp = NowInTimestamp(sample_rate_hz);
+
+ // If this is a CNG while the audio codec is not mono, skip pushing in
+ // packets into NetEq.
+ if (IsCng(decoder->acm_codec_id) && last_audio_decoder_ &&
+ last_audio_decoder_->channels > 1)
+ return 0;
+ if (!IsCng(decoder->acm_codec_id) &&
+ decoder->acm_codec_id !=
+ *RentACodec::CodecIndexFromId(RentACodec::CodecId::kAVT)) {
+ last_audio_decoder_ = decoder;
+ last_packet_sample_rate_hz_ = rtc::Optional<int>(decoder->sample_rate_hz);
+ }
+
+ } // |crit_sect_| is released.
+
+ if (neteq_->InsertPacket(rtp_header, incoming_payload, receive_timestamp) <
+ 0) {
+ LOG(LERROR) << "AcmReceiver::InsertPacket "
+ << static_cast<int>(header->payloadType)
+ << " Failed to insert packet";
+ return -1;
+ }
+ return 0;
+}
+
+int AcmReceiver::GetAudio(int desired_freq_hz, AudioFrame* audio_frame) {
+ enum NetEqOutputType type;
+ size_t samples_per_channel;
+ size_t num_channels;
+
+ // Accessing members, take the lock.
+ CriticalSectionScoped lock(crit_sect_.get());
+
+ // Always write the output to |audio_buffer_| first.
+ if (neteq_->GetAudio(AudioFrame::kMaxDataSizeSamples,
+ audio_buffer_.get(),
+ &samples_per_channel,
+ &num_channels,
+ &type) != NetEq::kOK) {
+ LOG(LERROR) << "AcmReceiver::GetAudio - NetEq Failed.";
+ return -1;
+ }
+
+ const int current_sample_rate_hz = neteq_->last_output_sample_rate_hz();
+
+ // Update if resampling is required.
+ const bool need_resampling =
+ (desired_freq_hz != -1) && (current_sample_rate_hz != desired_freq_hz);
+
+ if (need_resampling && !resampled_last_output_frame_) {
+ // Prime the resampler with the last frame.
+ int16_t temp_output[AudioFrame::kMaxDataSizeSamples];
+ int samples_per_channel_int = resampler_.Resample10Msec(
+ last_audio_buffer_.get(), current_sample_rate_hz, desired_freq_hz,
+ num_channels, AudioFrame::kMaxDataSizeSamples, temp_output);
+ if (samples_per_channel_int < 0) {
+ LOG(LERROR) << "AcmReceiver::GetAudio - "
+ "Resampling last_audio_buffer_ failed.";
+ return -1;
+ }
+ samples_per_channel = static_cast<size_t>(samples_per_channel_int);
+ }
+
+ // The audio in |audio_buffer_| is tansferred to |audio_frame_| below, either
+ // through resampling, or through straight memcpy.
+ // TODO(henrik.lundin) Glitches in the output may appear if the output rate
+ // from NetEq changes. See WebRTC issue 3923.
+ if (need_resampling) {
+ int samples_per_channel_int = resampler_.Resample10Msec(
+ audio_buffer_.get(), current_sample_rate_hz, desired_freq_hz,
+ num_channels, AudioFrame::kMaxDataSizeSamples, audio_frame->data_);
+ if (samples_per_channel_int < 0) {
+ LOG(LERROR) << "AcmReceiver::GetAudio - Resampling audio_buffer_ failed.";
+ return -1;
+ }
+ samples_per_channel = static_cast<size_t>(samples_per_channel_int);
+ resampled_last_output_frame_ = true;
+ } else {
+ resampled_last_output_frame_ = false;
+ // We might end up here ONLY if codec is changed.
+ memcpy(audio_frame->data_,
+ audio_buffer_.get(),
+ samples_per_channel * num_channels * sizeof(int16_t));
+ }
+
+ // Swap buffers, so that the current audio is stored in |last_audio_buffer_|
+ // for next time.
+ audio_buffer_.swap(last_audio_buffer_);
+
+ audio_frame->num_channels_ = num_channels;
+ audio_frame->samples_per_channel_ = samples_per_channel;
+ audio_frame->sample_rate_hz_ = static_cast<int>(samples_per_channel * 100);
+
+ // Should set |vad_activity| before calling SetAudioFrameActivityAndType().
+ audio_frame->vad_activity_ = previous_audio_activity_;
+ SetAudioFrameActivityAndType(vad_enabled_, type, audio_frame);
+ previous_audio_activity_ = audio_frame->vad_activity_;
+ call_stats_.DecodedByNetEq(audio_frame->speech_type_);
+
+ // Computes the RTP timestamp of the first sample in |audio_frame| from
+ // |GetPlayoutTimestamp|, which is the timestamp of the last sample of
+ // |audio_frame|.
+ uint32_t playout_timestamp = 0;
+ if (GetPlayoutTimestamp(&playout_timestamp)) {
+ audio_frame->timestamp_ = playout_timestamp -
+ static_cast<uint32_t>(audio_frame->samples_per_channel_);
+ } else {
+ // Remain 0 until we have a valid |playout_timestamp|.
+ audio_frame->timestamp_ = 0;
+ }
+
+ return 0;
+}
+
+int32_t AcmReceiver::AddCodec(int acm_codec_id,
+ uint8_t payload_type,
+ size_t channels,
+ int sample_rate_hz,
+ AudioDecoder* audio_decoder,
+ const std::string& name) {
+ const auto neteq_decoder = [acm_codec_id, channels]() -> NetEqDecoder {
+ if (acm_codec_id == -1)
+ return NetEqDecoder::kDecoderArbitrary; // External decoder.
+ const rtc::Optional<RentACodec::CodecId> cid =
+ RentACodec::CodecIdFromIndex(acm_codec_id);
+ RTC_DCHECK(cid) << "Invalid codec index: " << acm_codec_id;
+ const rtc::Optional<NetEqDecoder> ned =
+ RentACodec::NetEqDecoderFromCodecId(*cid, channels);
+ RTC_DCHECK(ned) << "Invalid codec ID: " << static_cast<int>(*cid);
+ return *ned;
+ }();
+
+ CriticalSectionScoped lock(crit_sect_.get());
+
+ // The corresponding NetEq decoder ID.
+ // If this codec has been registered before.
+ auto it = decoders_.find(payload_type);
+ if (it != decoders_.end()) {
+ const Decoder& decoder = it->second;
+ if (acm_codec_id != -1 && decoder.acm_codec_id == acm_codec_id &&
+ decoder.channels == channels &&
+ decoder.sample_rate_hz == sample_rate_hz) {
+ // Re-registering the same codec. Do nothing and return.
+ return 0;
+ }
+
+ // Changing codec. First unregister the old codec, then register the new
+ // one.
+ if (neteq_->RemovePayloadType(payload_type) != NetEq::kOK) {
+ LOG(LERROR) << "Cannot remove payload " << static_cast<int>(payload_type);
+ return -1;
+ }
+
+ decoders_.erase(it);
+ }
+
+ int ret_val;
+ if (!audio_decoder) {
+ ret_val = neteq_->RegisterPayloadType(neteq_decoder, name, payload_type);
+ } else {
+ ret_val = neteq_->RegisterExternalDecoder(
+ audio_decoder, neteq_decoder, name, payload_type, sample_rate_hz);
+ }
+ if (ret_val != NetEq::kOK) {
+ LOG(LERROR) << "AcmReceiver::AddCodec " << acm_codec_id
+ << static_cast<int>(payload_type)
+ << " channels: " << channels;
+ return -1;
+ }
+
+ Decoder decoder;
+ decoder.acm_codec_id = acm_codec_id;
+ decoder.payload_type = payload_type;
+ decoder.channels = channels;
+ decoder.sample_rate_hz = sample_rate_hz;
+ decoders_[payload_type] = decoder;
+ return 0;
+}
+
+void AcmReceiver::EnableVad() {
+ neteq_->EnableVad();
+ CriticalSectionScoped lock(crit_sect_.get());
+ vad_enabled_ = true;
+}
+
+void AcmReceiver::DisableVad() {
+ neteq_->DisableVad();
+ CriticalSectionScoped lock(crit_sect_.get());
+ vad_enabled_ = false;
+}
+
+void AcmReceiver::FlushBuffers() {
+ neteq_->FlushBuffers();
+}
+
+// If failed in removing one of the codecs, this method continues to remove as
+// many as it can.
+int AcmReceiver::RemoveAllCodecs() {
+ int ret_val = 0;
+ CriticalSectionScoped lock(crit_sect_.get());
+ for (auto it = decoders_.begin(); it != decoders_.end(); ) {
+ auto cur = it;
+ ++it; // it will be valid even if we erase cur
+ if (neteq_->RemovePayloadType(cur->second.payload_type) == 0) {
+ decoders_.erase(cur);
+ } else {
+ LOG_F(LS_ERROR) << "Cannot remove payload "
+ << static_cast<int>(cur->second.payload_type);
+ ret_val = -1;
+ }
+ }
+
+ // No codec is registered, invalidate last audio decoder.
+ last_audio_decoder_ = nullptr;
+ last_packet_sample_rate_hz_ = rtc::Optional<int>();
+ return ret_val;
+}
+
+int AcmReceiver::RemoveCodec(uint8_t payload_type) {
+ CriticalSectionScoped lock(crit_sect_.get());
+ auto it = decoders_.find(payload_type);
+ if (it == decoders_.end()) { // Such a payload-type is not registered.
+ return 0;
+ }
+ if (neteq_->RemovePayloadType(payload_type) != NetEq::kOK) {
+ LOG(LERROR) << "AcmReceiver::RemoveCodec" << static_cast<int>(payload_type);
+ return -1;
+ }
+ if (last_audio_decoder_ == &it->second) {
+ last_audio_decoder_ = nullptr;
+ last_packet_sample_rate_hz_ = rtc::Optional<int>();
+ }
+ decoders_.erase(it);
+ return 0;
+}
+
+void AcmReceiver::set_id(int id) {
+ CriticalSectionScoped lock(crit_sect_.get());
+ id_ = id;
+}
+
+bool AcmReceiver::GetPlayoutTimestamp(uint32_t* timestamp) {
+ return neteq_->GetPlayoutTimestamp(timestamp);
+}
+
+int AcmReceiver::LastAudioCodec(CodecInst* codec) const {
+ CriticalSectionScoped lock(crit_sect_.get());
+ if (!last_audio_decoder_) {
+ return -1;
+ }
+ *codec = *RentACodec::CodecInstById(
+ *RentACodec::CodecIdFromIndex(last_audio_decoder_->acm_codec_id));
+ codec->pltype = last_audio_decoder_->payload_type;
+ codec->channels = last_audio_decoder_->channels;
+ codec->plfreq = last_audio_decoder_->sample_rate_hz;
+ return 0;
+}
+
+void AcmReceiver::GetNetworkStatistics(NetworkStatistics* acm_stat) {
+ NetEqNetworkStatistics neteq_stat;
+ // NetEq function always returns zero, so we don't check the return value.
+ neteq_->NetworkStatistics(&neteq_stat);
+
+ acm_stat->currentBufferSize = neteq_stat.current_buffer_size_ms;
+ acm_stat->preferredBufferSize = neteq_stat.preferred_buffer_size_ms;
+ acm_stat->jitterPeaksFound = neteq_stat.jitter_peaks_found ? true : false;
+ acm_stat->currentPacketLossRate = neteq_stat.packet_loss_rate;
+ acm_stat->currentDiscardRate = neteq_stat.packet_discard_rate;
+ acm_stat->currentExpandRate = neteq_stat.expand_rate;
+ acm_stat->currentSpeechExpandRate = neteq_stat.speech_expand_rate;
+ acm_stat->currentPreemptiveRate = neteq_stat.preemptive_rate;
+ acm_stat->currentAccelerateRate = neteq_stat.accelerate_rate;
+ acm_stat->currentSecondaryDecodedRate = neteq_stat.secondary_decoded_rate;
+ acm_stat->clockDriftPPM = neteq_stat.clockdrift_ppm;
+ acm_stat->addedSamples = neteq_stat.added_zero_samples;
+ acm_stat->meanWaitingTimeMs = neteq_stat.mean_waiting_time_ms;
+ acm_stat->medianWaitingTimeMs = neteq_stat.median_waiting_time_ms;
+ acm_stat->minWaitingTimeMs = neteq_stat.min_waiting_time_ms;
+ acm_stat->maxWaitingTimeMs = neteq_stat.max_waiting_time_ms;
+}
+
+int AcmReceiver::DecoderByPayloadType(uint8_t payload_type,
+ CodecInst* codec) const {
+ CriticalSectionScoped lock(crit_sect_.get());
+ auto it = decoders_.find(payload_type);
+ if (it == decoders_.end()) {
+ LOG(LERROR) << "AcmReceiver::DecoderByPayloadType "
+ << static_cast<int>(payload_type);
+ return -1;
+ }
+ const Decoder& decoder = it->second;
+ *codec = *RentACodec::CodecInstById(
+ *RentACodec::CodecIdFromIndex(decoder.acm_codec_id));
+ codec->pltype = decoder.payload_type;
+ codec->channels = decoder.channels;
+ codec->plfreq = decoder.sample_rate_hz;
+ return 0;
+}
+
+int AcmReceiver::EnableNack(size_t max_nack_list_size) {
+ neteq_->EnableNack(max_nack_list_size);
+ return 0;
+}
+
+void AcmReceiver::DisableNack() {
+ neteq_->DisableNack();
+}
+
+std::vector<uint16_t> AcmReceiver::GetNackList(
+ int64_t round_trip_time_ms) const {
+ return neteq_->GetNackList(round_trip_time_ms);
+}
+
+void AcmReceiver::ResetInitialDelay() {
+ neteq_->SetMinimumDelay(0);
+ // TODO(turajs): Should NetEq Buffer be flushed?
+}
+
+const AcmReceiver::Decoder* AcmReceiver::RtpHeaderToDecoder(
+ const RTPHeader& rtp_header,
+ uint8_t payload_type) const {
+ auto it = decoders_.find(rtp_header.payloadType);
+ const auto red_index =
+ RentACodec::CodecIndexFromId(RentACodec::CodecId::kRED);
+ if (red_index && // This ensures that RED is defined in WebRTC.
+ it != decoders_.end() && it->second.acm_codec_id == *red_index) {
+ // This is a RED packet, get the payload of the audio codec.
+ it = decoders_.find(payload_type & 0x7F);
+ }
+
+ // Check if the payload is registered.
+ return it != decoders_.end() ? &it->second : nullptr;
+}
+
+uint32_t AcmReceiver::NowInTimestamp(int decoder_sampling_rate) const {
+ // Down-cast the time to (32-6)-bit since we only care about
+ // the least significant bits. (32-6) bits cover 2^(32-6) = 67108864 ms.
+ // We masked 6 most significant bits of 32-bit so there is no overflow in
+ // the conversion from milliseconds to timestamp.
+ const uint32_t now_in_ms = static_cast<uint32_t>(
+ clock_->TimeInMilliseconds() & 0x03ffffff);
+ return static_cast<uint32_t>(
+ (decoder_sampling_rate / 1000) * now_in_ms);
+}
+
+void AcmReceiver::GetDecodingCallStatistics(
+ AudioDecodingCallStats* stats) const {
+ CriticalSectionScoped lock(crit_sect_.get());
+ *stats = call_stats_.GetDecodingStatistics();
+}
+
+} // namespace acm2
+
+} // namespace webrtc
diff --git a/webrtc/modules/audio_coding/acm2/acm_receiver.h b/webrtc/modules/audio_coding/acm2/acm_receiver.h
new file mode 100644
index 0000000000..b150612f69
--- /dev/null
+++ b/webrtc/modules/audio_coding/acm2/acm_receiver.h
@@ -0,0 +1,307 @@
+/*
+ * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_ACM2_ACM_RECEIVER_H_
+#define WEBRTC_MODULES_AUDIO_CODING_ACM2_ACM_RECEIVER_H_
+
+#include <map>
+#include <string>
+#include <vector>
+
+#include "webrtc/base/array_view.h"
+#include "webrtc/base/optional.h"
+#include "webrtc/base/scoped_ptr.h"
+#include "webrtc/base/thread_annotations.h"
+#include "webrtc/common_audio/vad/include/webrtc_vad.h"
+#include "webrtc/engine_configurations.h"
+#include "webrtc/modules/audio_coding/include/audio_coding_module.h"
+#include "webrtc/modules/audio_coding/acm2/acm_resampler.h"
+#include "webrtc/modules/audio_coding/acm2/call_statistics.h"
+#include "webrtc/modules/audio_coding/acm2/initial_delay_manager.h"
+#include "webrtc/modules/audio_coding/neteq/include/neteq.h"
+#include "webrtc/modules/include/module_common_types.h"
+#include "webrtc/typedefs.h"
+
+namespace webrtc {
+
+struct CodecInst;
+class CriticalSectionWrapper;
+class NetEq;
+
+namespace acm2 {
+
+class AcmReceiver {
+ public:
+ struct Decoder {
+ int acm_codec_id;
+ uint8_t payload_type;
+ // This field is meaningful for codecs where both mono and
+ // stereo versions are registered under the same ID.
+ size_t channels;
+ int sample_rate_hz;
+ };
+
+ // Constructor of the class
+ explicit AcmReceiver(const AudioCodingModule::Config& config);
+
+ // Destructor of the class.
+ ~AcmReceiver();
+
+ //
+ // Inserts a payload with its associated RTP-header into NetEq.
+ //
+ // Input:
+ // - rtp_header : RTP header for the incoming payload containing
+ // information about payload type, sequence number,
+ // timestamp, SSRC and marker bit.
+ // - incoming_payload : Incoming audio payload.
+ // - length_payload : Length of incoming audio payload in bytes.
+ //
+ // Return value : 0 if OK.
+ // <0 if NetEq returned an error.
+ //
+ int InsertPacket(const WebRtcRTPHeader& rtp_header,
+ rtc::ArrayView<const uint8_t> incoming_payload);
+
+ //
+ // Asks NetEq for 10 milliseconds of decoded audio.
+ //
+ // Input:
+ // -desired_freq_hz : specifies the sampling rate [Hz] of the output
+ // audio. If set -1 indicates to resampling is
+ // is required and the audio returned at the
+ // sampling rate of the decoder.
+ //
+ // Output:
+ // -audio_frame : an audio frame were output data and
+ // associated parameters are written to.
+ //
+ // Return value : 0 if OK.
+ // -1 if NetEq returned an error.
+ //
+ int GetAudio(int desired_freq_hz, AudioFrame* audio_frame);
+
+ //
+ // Adds a new codec to the NetEq codec database.
+ //
+ // Input:
+ // - acm_codec_id : ACM codec ID; -1 means external decoder.
+ // - payload_type : payload type.
+ // - sample_rate_hz : sample rate.
+ // - audio_decoder : pointer to a decoder object. If it's null, then
+ // NetEq will internally create a decoder object
+ // based on the value of |acm_codec_id| (which
+ // mustn't be -1). Otherwise, NetEq will use the
+ // given decoder for the given payload type. NetEq
+ // won't take ownership of the decoder; it's up to
+ // the caller to delete it when it's no longer
+ // needed.
+ //
+ // Providing an existing decoder object here is
+ // necessary for external decoders, but may also be
+ // used for built-in decoders if NetEq doesn't have
+ // all the info it needs to construct them properly
+ // (e.g. iSAC, where the decoder needs to be paired
+ // with an encoder).
+ //
+ // Return value : 0 if OK.
+ // <0 if NetEq returned an error.
+ //
+ int AddCodec(int acm_codec_id,
+ uint8_t payload_type,
+ size_t channels,
+ int sample_rate_hz,
+ AudioDecoder* audio_decoder,
+ const std::string& name);
+
+ //
+ // Sets a minimum delay for packet buffer. The given delay is maintained,
+ // unless channel condition dictates a higher delay.
+ //
+ // Input:
+ // - delay_ms : minimum delay in milliseconds.
+ //
+ // Return value : 0 if OK.
+ // <0 if NetEq returned an error.
+ //
+ int SetMinimumDelay(int delay_ms);
+
+ //
+ // Sets a maximum delay [ms] for the packet buffer. The target delay does not
+ // exceed the given value, even if channel condition requires so.
+ //
+ // Input:
+ // - delay_ms : maximum delay in milliseconds.
+ //
+ // Return value : 0 if OK.
+ // <0 if NetEq returned an error.
+ //
+ int SetMaximumDelay(int delay_ms);
+
+ //
+ // Get least required delay computed based on channel conditions. Note that
+ // this is before applying any user-defined limits (specified by calling
+ // (SetMinimumDelay() and/or SetMaximumDelay()).
+ //
+ int LeastRequiredDelayMs() const;
+
+ //
+ // Resets the initial delay to zero.
+ //
+ void ResetInitialDelay();
+
+ // Returns the sample rate of the decoder associated with the last incoming
+ // packet. If no packet of a registered non-CNG codec has been received, the
+ // return value is empty. Also, if the decoder was unregistered since the last
+ // packet was inserted, the return value is empty.
+ rtc::Optional<int> last_packet_sample_rate_hz() const;
+
+ // Returns last_output_sample_rate_hz from the NetEq instance.
+ int last_output_sample_rate_hz() const;
+
+ //
+ // Get the current network statistics from NetEq.
+ //
+ // Output:
+ // - statistics : The current network statistics.
+ //
+ void GetNetworkStatistics(NetworkStatistics* statistics);
+
+ //
+ // Enable post-decoding VAD.
+ //
+ void EnableVad();
+
+ //
+ // Disable post-decoding VAD.
+ //
+ void DisableVad();
+
+ //
+ // Returns whether post-decoding VAD is enabled (true) or disabled (false).
+ //
+ bool vad_enabled() const { return vad_enabled_; }
+
+ //
+ // Flushes the NetEq packet and speech buffers.
+ //
+ void FlushBuffers();
+
+ //
+ // Removes a payload-type from the NetEq codec database.
+ //
+ // Input:
+ // - payload_type : the payload-type to be removed.
+ //
+ // Return value : 0 if OK.
+ // -1 if an error occurred.
+ //
+ int RemoveCodec(uint8_t payload_type);
+
+ //
+ // Remove all registered codecs.
+ //
+ int RemoveAllCodecs();
+
+ //
+ // Set ID.
+ //
+ void set_id(int id); // TODO(turajs): can be inline.
+
+ //
+ // Gets the RTP timestamp of the last sample delivered by GetAudio().
+ // Returns true if the RTP timestamp is valid, otherwise false.
+ //
+ bool GetPlayoutTimestamp(uint32_t* timestamp);
+
+ //
+ // Get the audio codec associated with the last non-CNG/non-DTMF received
+ // payload. If no non-CNG/non-DTMF packet is received -1 is returned,
+ // otherwise return 0.
+ //
+ int LastAudioCodec(CodecInst* codec) const;
+
+ //
+ // Get a decoder given its registered payload-type.
+ //
+ // Input:
+ // -payload_type : the payload-type of the codec to be retrieved.
+ //
+ // Output:
+ // -codec : codec associated with the given payload-type.
+ //
+ // Return value : 0 if succeeded.
+ // -1 if failed, e.g. given payload-type is not
+ // registered.
+ //
+ int DecoderByPayloadType(uint8_t payload_type,
+ CodecInst* codec) const;
+
+ //
+ // Enable NACK and set the maximum size of the NACK list. If NACK is already
+ // enabled then the maximum NACK list size is modified accordingly.
+ //
+ // Input:
+ // -max_nack_list_size : maximum NACK list size
+ // should be positive (none zero) and less than or
+ // equal to |Nack::kNackListSizeLimit|
+ // Return value
+ // : 0 if succeeded.
+ // -1 if failed
+ //
+ int EnableNack(size_t max_nack_list_size);
+
+ // Disable NACK.
+ void DisableNack();
+
+ //
+ // Get a list of packets to be retransmitted.
+ //
+ // Input:
+ // -round_trip_time_ms : estimate of the round-trip-time (in milliseconds).
+ // Return value : list of packets to be retransmitted.
+ //
+ std::vector<uint16_t> GetNackList(int64_t round_trip_time_ms) const;
+
+ //
+ // Get statistics of calls to GetAudio().
+ void GetDecodingCallStatistics(AudioDecodingCallStats* stats) const;
+
+ private:
+ const Decoder* RtpHeaderToDecoder(const RTPHeader& rtp_header,
+ uint8_t payload_type) const
+ EXCLUSIVE_LOCKS_REQUIRED(crit_sect_);
+
+ uint32_t NowInTimestamp(int decoder_sampling_rate) const;
+
+ rtc::scoped_ptr<CriticalSectionWrapper> crit_sect_;
+ int id_; // TODO(henrik.lundin) Make const.
+ const Decoder* last_audio_decoder_ GUARDED_BY(crit_sect_);
+ AudioFrame::VADActivity previous_audio_activity_ GUARDED_BY(crit_sect_);
+ ACMResampler resampler_ GUARDED_BY(crit_sect_);
+ // Used in GetAudio, declared as member to avoid allocating every 10ms.
+ // TODO(henrik.lundin) Stack-allocate in GetAudio instead?
+ rtc::scoped_ptr<int16_t[]> audio_buffer_ GUARDED_BY(crit_sect_);
+ rtc::scoped_ptr<int16_t[]> last_audio_buffer_ GUARDED_BY(crit_sect_);
+ CallStatistics call_stats_ GUARDED_BY(crit_sect_);
+ NetEq* neteq_;
+ // Decoders map is keyed by payload type
+ std::map<uint8_t, Decoder> decoders_ GUARDED_BY(crit_sect_);
+ bool vad_enabled_;
+ Clock* clock_; // TODO(henrik.lundin) Make const if possible.
+ bool resampled_last_output_frame_ GUARDED_BY(crit_sect_);
+ rtc::Optional<int> last_packet_sample_rate_hz_ GUARDED_BY(crit_sect_);
+};
+
+} // namespace acm2
+
+} // namespace webrtc
+
+#endif // WEBRTC_MODULES_AUDIO_CODING_ACM2_ACM_RECEIVER_H_
diff --git a/webrtc/modules/audio_coding/acm2/acm_receiver_unittest_oldapi.cc b/webrtc/modules/audio_coding/acm2/acm_receiver_unittest_oldapi.cc
new file mode 100644
index 0000000000..24ecc694ff
--- /dev/null
+++ b/webrtc/modules/audio_coding/acm2/acm_receiver_unittest_oldapi.cc
@@ -0,0 +1,398 @@
+/*
+ * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/modules/audio_coding/acm2/acm_receiver.h"
+
+#include <algorithm> // std::min
+
+#include "testing/gtest/include/gtest/gtest.h"
+#include "webrtc/base/scoped_ptr.h"
+#include "webrtc/modules/audio_coding/include/audio_coding_module.h"
+#include "webrtc/modules/audio_coding/acm2/audio_coding_module_impl.h"
+#include "webrtc/modules/audio_coding/neteq/tools/rtp_generator.h"
+#include "webrtc/system_wrappers/include/clock.h"
+#include "webrtc/test/test_suite.h"
+#include "webrtc/test/testsupport/fileutils.h"
+
+namespace webrtc {
+
+namespace acm2 {
+namespace {
+
+bool CodecsEqual(const CodecInst& codec_a, const CodecInst& codec_b) {
+ if (strcmp(codec_a.plname, codec_b.plname) != 0 ||
+ codec_a.plfreq != codec_b.plfreq ||
+ codec_a.pltype != codec_b.pltype ||
+ codec_b.channels != codec_a.channels)
+ return false;
+ return true;
+}
+
+struct CodecIdInst {
+ explicit CodecIdInst(RentACodec::CodecId codec_id) {
+ const auto codec_ix = RentACodec::CodecIndexFromId(codec_id);
+ EXPECT_TRUE(codec_ix);
+ id = *codec_ix;
+ const auto codec_inst = RentACodec::CodecInstById(codec_id);
+ EXPECT_TRUE(codec_inst);
+ inst = *codec_inst;
+ }
+ int id;
+ CodecInst inst;
+};
+
+} // namespace
+
+class AcmReceiverTestOldApi : public AudioPacketizationCallback,
+ public ::testing::Test {
+ protected:
+ AcmReceiverTestOldApi()
+ : timestamp_(0),
+ packet_sent_(false),
+ last_packet_send_timestamp_(timestamp_),
+ last_frame_type_(kEmptyFrame) {
+ AudioCodingModule::Config config;
+ acm_.reset(new AudioCodingModuleImpl(config));
+ receiver_.reset(new AcmReceiver(config));
+ }
+
+ ~AcmReceiverTestOldApi() {}
+
+ void SetUp() override {
+ ASSERT_TRUE(receiver_.get() != NULL);
+ ASSERT_TRUE(acm_.get() != NULL);
+ codecs_ = RentACodec::Database();
+
+ acm_->InitializeReceiver();
+ acm_->RegisterTransportCallback(this);
+
+ rtp_header_.header.sequenceNumber = 0;
+ rtp_header_.header.timestamp = 0;
+ rtp_header_.header.markerBit = false;
+ rtp_header_.header.ssrc = 0x12345678; // Arbitrary.
+ rtp_header_.header.numCSRCs = 0;
+ rtp_header_.header.payloadType = 0;
+ rtp_header_.frameType = kAudioFrameSpeech;
+ rtp_header_.type.Audio.isCNG = false;
+ }
+
+ void TearDown() override {}
+
+ void InsertOnePacketOfSilence(int codec_id) {
+ CodecInst codec =
+ *RentACodec::CodecInstById(*RentACodec::CodecIdFromIndex(codec_id));
+ if (timestamp_ == 0) { // This is the first time inserting audio.
+ ASSERT_EQ(0, acm_->RegisterSendCodec(codec));
+ } else {
+ auto current_codec = acm_->SendCodec();
+ ASSERT_TRUE(current_codec);
+ if (!CodecsEqual(codec, *current_codec))
+ ASSERT_EQ(0, acm_->RegisterSendCodec(codec));
+ }
+ AudioFrame frame;
+ // Frame setup according to the codec.
+ frame.sample_rate_hz_ = codec.plfreq;
+ frame.samples_per_channel_ = codec.plfreq / 100; // 10 ms.
+ frame.num_channels_ = codec.channels;
+ memset(frame.data_, 0, frame.samples_per_channel_ * frame.num_channels_ *
+ sizeof(int16_t));
+ packet_sent_ = false;
+ last_packet_send_timestamp_ = timestamp_;
+ while (!packet_sent_) {
+ frame.timestamp_ = timestamp_;
+ timestamp_ += frame.samples_per_channel_;
+ ASSERT_GE(acm_->Add10MsData(frame), 0);
+ }
+ }
+
+ template <size_t N>
+ void AddSetOfCodecs(const RentACodec::CodecId(&ids)[N]) {
+ for (auto id : ids) {
+ const auto i = RentACodec::CodecIndexFromId(id);
+ ASSERT_TRUE(i);
+ ASSERT_EQ(
+ 0, receiver_->AddCodec(*i, codecs_[*i].pltype, codecs_[*i].channels,
+ codecs_[*i].plfreq, nullptr, ""));
+ }
+ }
+
+ int SendData(FrameType frame_type,
+ uint8_t payload_type,
+ uint32_t timestamp,
+ const uint8_t* payload_data,
+ size_t payload_len_bytes,
+ const RTPFragmentationHeader* fragmentation) override {
+ if (frame_type == kEmptyFrame)
+ return 0;
+
+ rtp_header_.header.payloadType = payload_type;
+ rtp_header_.frameType = frame_type;
+ if (frame_type == kAudioFrameSpeech)
+ rtp_header_.type.Audio.isCNG = false;
+ else
+ rtp_header_.type.Audio.isCNG = true;
+ rtp_header_.header.timestamp = timestamp;
+
+ int ret_val = receiver_->InsertPacket(
+ rtp_header_,
+ rtc::ArrayView<const uint8_t>(payload_data, payload_len_bytes));
+ if (ret_val < 0) {
+ assert(false);
+ return -1;
+ }
+ rtp_header_.header.sequenceNumber++;
+ packet_sent_ = true;
+ last_frame_type_ = frame_type;
+ return 0;
+ }
+
+ rtc::scoped_ptr<AcmReceiver> receiver_;
+ rtc::ArrayView<const CodecInst> codecs_;
+ rtc::scoped_ptr<AudioCodingModule> acm_;
+ WebRtcRTPHeader rtp_header_;
+ uint32_t timestamp_;
+ bool packet_sent_; // Set when SendData is called reset when inserting audio.
+ uint32_t last_packet_send_timestamp_;
+ FrameType last_frame_type_;
+};
+
+#if defined(WEBRTC_ANDROID)
+#define MAYBE_AddCodecGetCodec DISABLED_AddCodecGetCodec
+#else
+#define MAYBE_AddCodecGetCodec AddCodecGetCodec
+#endif
+TEST_F(AcmReceiverTestOldApi, MAYBE_AddCodecGetCodec) {
+ // Add codec.
+ for (size_t n = 0; n < codecs_.size(); ++n) {
+ if (n & 0x1) // Just add codecs with odd index.
+ EXPECT_EQ(0,
+ receiver_->AddCodec(n, codecs_[n].pltype, codecs_[n].channels,
+ codecs_[n].plfreq, NULL, ""));
+ }
+ // Get codec and compare.
+ for (size_t n = 0; n < codecs_.size(); ++n) {
+ CodecInst my_codec;
+ if (n & 0x1) {
+ // Codecs with odd index should match the reference.
+ EXPECT_EQ(0, receiver_->DecoderByPayloadType(codecs_[n].pltype,
+ &my_codec));
+ EXPECT_TRUE(CodecsEqual(codecs_[n], my_codec));
+ } else {
+ // Codecs with even index are not registered.
+ EXPECT_EQ(-1, receiver_->DecoderByPayloadType(codecs_[n].pltype,
+ &my_codec));
+ }
+ }
+}
+
+#if defined(WEBRTC_ANDROID)
+#define MAYBE_AddCodecChangePayloadType DISABLED_AddCodecChangePayloadType
+#else
+#define MAYBE_AddCodecChangePayloadType AddCodecChangePayloadType
+#endif
+TEST_F(AcmReceiverTestOldApi, MAYBE_AddCodecChangePayloadType) {
+ const CodecIdInst codec1(RentACodec::CodecId::kPCMA);
+ CodecInst codec2 = codec1.inst;
+ ++codec2.pltype;
+ CodecInst test_codec;
+
+ // Register the same codec with different payloads.
+ EXPECT_EQ(0, receiver_->AddCodec(codec1.id, codec1.inst.pltype,
+ codec1.inst.channels, codec1.inst.plfreq,
+ nullptr, ""));
+ EXPECT_EQ(0, receiver_->AddCodec(codec1.id, codec2.pltype, codec2.channels,
+ codec2.plfreq, NULL, ""));
+
+ // Both payload types should exist.
+ EXPECT_EQ(0,
+ receiver_->DecoderByPayloadType(codec1.inst.pltype, &test_codec));
+ EXPECT_EQ(true, CodecsEqual(codec1.inst, test_codec));
+ EXPECT_EQ(0, receiver_->DecoderByPayloadType(codec2.pltype, &test_codec));
+ EXPECT_EQ(true, CodecsEqual(codec2, test_codec));
+}
+
+#if defined(WEBRTC_ANDROID)
+#define MAYBE_AddCodecChangeCodecId DISABLED_AddCodecChangeCodecId
+#else
+#define MAYBE_AddCodecChangeCodecId AddCodecChangeCodecId
+#endif
+TEST_F(AcmReceiverTestOldApi, AddCodecChangeCodecId) {
+ const CodecIdInst codec1(RentACodec::CodecId::kPCMU);
+ CodecIdInst codec2(RentACodec::CodecId::kPCMA);
+ codec2.inst.pltype = codec1.inst.pltype;
+ CodecInst test_codec;
+
+ // Register the same payload type with different codec ID.
+ EXPECT_EQ(0, receiver_->AddCodec(codec1.id, codec1.inst.pltype,
+ codec1.inst.channels, codec1.inst.plfreq,
+ nullptr, ""));
+ EXPECT_EQ(0, receiver_->AddCodec(codec2.id, codec2.inst.pltype,
+ codec2.inst.channels, codec2.inst.plfreq,
+ nullptr, ""));
+
+ // Make sure that the last codec is used.
+ EXPECT_EQ(0,
+ receiver_->DecoderByPayloadType(codec2.inst.pltype, &test_codec));
+ EXPECT_EQ(true, CodecsEqual(codec2.inst, test_codec));
+}
+
+#if defined(WEBRTC_ANDROID)
+#define MAYBE_AddCodecRemoveCodec DISABLED_AddCodecRemoveCodec
+#else
+#define MAYBE_AddCodecRemoveCodec AddCodecRemoveCodec
+#endif
+TEST_F(AcmReceiverTestOldApi, MAYBE_AddCodecRemoveCodec) {
+ const CodecIdInst codec(RentACodec::CodecId::kPCMA);
+ const int payload_type = codec.inst.pltype;
+ EXPECT_EQ(
+ 0, receiver_->AddCodec(codec.id, codec.inst.pltype, codec.inst.channels,
+ codec.inst.plfreq, nullptr, ""));
+
+ // Remove non-existing codec should not fail. ACM1 legacy.
+ EXPECT_EQ(0, receiver_->RemoveCodec(payload_type + 1));
+
+ // Remove an existing codec.
+ EXPECT_EQ(0, receiver_->RemoveCodec(payload_type));
+
+ // Ask for the removed codec, must fail.
+ CodecInst ci;
+ EXPECT_EQ(-1, receiver_->DecoderByPayloadType(payload_type, &ci));
+}
+
+#if defined(WEBRTC_ANDROID)
+#define MAYBE_SampleRate DISABLED_SampleRate
+#else
+#define MAYBE_SampleRate SampleRate
+#endif
+TEST_F(AcmReceiverTestOldApi, MAYBE_SampleRate) {
+ const RentACodec::CodecId kCodecId[] = {RentACodec::CodecId::kISAC,
+ RentACodec::CodecId::kISACSWB};
+ AddSetOfCodecs(kCodecId);
+
+ AudioFrame frame;
+ const int kOutSampleRateHz = 8000; // Different than codec sample rate.
+ for (const auto codec_id : kCodecId) {
+ const CodecIdInst codec(codec_id);
+ const int num_10ms_frames = codec.inst.pacsize / (codec.inst.plfreq / 100);
+ InsertOnePacketOfSilence(codec.id);
+ for (int k = 0; k < num_10ms_frames; ++k) {
+ EXPECT_EQ(0, receiver_->GetAudio(kOutSampleRateHz, &frame));
+ }
+ EXPECT_EQ(codec.inst.plfreq, receiver_->last_output_sample_rate_hz());
+ }
+}
+
+#if defined(WEBRTC_ANDROID)
+#define MAYBE_PostdecodingVad DISABLED_PostdecodingVad
+#else
+#define MAYBE_PostdecodingVad PostdecodingVad
+#endif
+TEST_F(AcmReceiverTestOldApi, MAYBE_PostdecodingVad) {
+ receiver_->EnableVad();
+ EXPECT_TRUE(receiver_->vad_enabled());
+ const CodecIdInst codec(RentACodec::CodecId::kPCM16Bwb);
+ ASSERT_EQ(
+ 0, receiver_->AddCodec(codec.id, codec.inst.pltype, codec.inst.channels,
+ codec.inst.plfreq, nullptr, ""));
+ const int kNumPackets = 5;
+ const int num_10ms_frames = codec.inst.pacsize / (codec.inst.plfreq / 100);
+ AudioFrame frame;
+ for (int n = 0; n < kNumPackets; ++n) {
+ InsertOnePacketOfSilence(codec.id);
+ for (int k = 0; k < num_10ms_frames; ++k)
+ ASSERT_EQ(0, receiver_->GetAudio(codec.inst.plfreq, &frame));
+ }
+ EXPECT_EQ(AudioFrame::kVadPassive, frame.vad_activity_);
+
+ receiver_->DisableVad();
+ EXPECT_FALSE(receiver_->vad_enabled());
+
+ for (int n = 0; n < kNumPackets; ++n) {
+ InsertOnePacketOfSilence(codec.id);
+ for (int k = 0; k < num_10ms_frames; ++k)
+ ASSERT_EQ(0, receiver_->GetAudio(codec.inst.plfreq, &frame));
+ }
+ EXPECT_EQ(AudioFrame::kVadUnknown, frame.vad_activity_);
+}
+
+#if defined(WEBRTC_ANDROID)
+#define MAYBE_LastAudioCodec DISABLED_LastAudioCodec
+#else
+#define MAYBE_LastAudioCodec LastAudioCodec
+#endif
+#if defined(WEBRTC_CODEC_ISAC)
+TEST_F(AcmReceiverTestOldApi, MAYBE_LastAudioCodec) {
+ const RentACodec::CodecId kCodecId[] = {
+ RentACodec::CodecId::kISAC, RentACodec::CodecId::kPCMA,
+ RentACodec::CodecId::kISACSWB, RentACodec::CodecId::kPCM16Bswb32kHz};
+ AddSetOfCodecs(kCodecId);
+
+ const RentACodec::CodecId kCngId[] = {
+ // Not including full-band.
+ RentACodec::CodecId::kCNNB, RentACodec::CodecId::kCNWB,
+ RentACodec::CodecId::kCNSWB};
+ AddSetOfCodecs(kCngId);
+
+ // Register CNG at sender side.
+ for (auto id : kCngId)
+ ASSERT_EQ(0, acm_->RegisterSendCodec(CodecIdInst(id).inst));
+
+ CodecInst codec;
+ // No audio payload is received.
+ EXPECT_EQ(-1, receiver_->LastAudioCodec(&codec));
+
+ // Start with sending DTX.
+ ASSERT_EQ(0, acm_->SetVAD(true, true, VADVeryAggr));
+ packet_sent_ = false;
+ InsertOnePacketOfSilence(CodecIdInst(kCodecId[0]).id); // Enough to test
+ // with one codec.
+ ASSERT_TRUE(packet_sent_);
+ EXPECT_EQ(kAudioFrameCN, last_frame_type_);
+
+ // Has received, only, DTX. Last Audio codec is undefined.
+ EXPECT_EQ(-1, receiver_->LastAudioCodec(&codec));
+ EXPECT_FALSE(receiver_->last_packet_sample_rate_hz());
+
+ for (auto id : kCodecId) {
+ const CodecIdInst c(id);
+
+ // Set DTX off to send audio payload.
+ acm_->SetVAD(false, false, VADAggr);
+ packet_sent_ = false;
+ InsertOnePacketOfSilence(c.id);
+
+ // Sanity check if Actually an audio payload received, and it should be
+ // of type "speech."
+ ASSERT_TRUE(packet_sent_);
+ ASSERT_EQ(kAudioFrameSpeech, last_frame_type_);
+ EXPECT_EQ(rtc::Optional<int>(c.inst.plfreq),
+ receiver_->last_packet_sample_rate_hz());
+
+ // Set VAD on to send DTX. Then check if the "Last Audio codec" returns
+ // the expected codec.
+ acm_->SetVAD(true, true, VADAggr);
+
+ // Do as many encoding until a DTX is sent.
+ while (last_frame_type_ != kAudioFrameCN) {
+ packet_sent_ = false;
+ InsertOnePacketOfSilence(c.id);
+ ASSERT_TRUE(packet_sent_);
+ }
+ EXPECT_EQ(rtc::Optional<int>(c.inst.plfreq),
+ receiver_->last_packet_sample_rate_hz());
+ EXPECT_EQ(0, receiver_->LastAudioCodec(&codec));
+ EXPECT_TRUE(CodecsEqual(c.inst, codec));
+ }
+}
+#endif
+
+} // namespace acm2
+
+} // namespace webrtc
diff --git a/webrtc/modules/audio_coding/acm2/acm_resampler.cc b/webrtc/modules/audio_coding/acm2/acm_resampler.cc
new file mode 100644
index 0000000000..dfc3ef7e27
--- /dev/null
+++ b/webrtc/modules/audio_coding/acm2/acm_resampler.cc
@@ -0,0 +1,63 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/modules/audio_coding/acm2/acm_resampler.h"
+
+#include <assert.h>
+#include <string.h>
+
+#include "webrtc/common_audio/resampler/include/resampler.h"
+#include "webrtc/system_wrappers/include/logging.h"
+
+namespace webrtc {
+namespace acm2 {
+
+ACMResampler::ACMResampler() {
+}
+
+ACMResampler::~ACMResampler() {
+}
+
+int ACMResampler::Resample10Msec(const int16_t* in_audio,
+ int in_freq_hz,
+ int out_freq_hz,
+ size_t num_audio_channels,
+ size_t out_capacity_samples,
+ int16_t* out_audio) {
+ size_t in_length = in_freq_hz * num_audio_channels / 100;
+ if (in_freq_hz == out_freq_hz) {
+ if (out_capacity_samples < in_length) {
+ assert(false);
+ return -1;
+ }
+ memcpy(out_audio, in_audio, in_length * sizeof(int16_t));
+ return static_cast<int>(in_length / num_audio_channels);
+ }
+
+ if (resampler_.InitializeIfNeeded(in_freq_hz, out_freq_hz,
+ num_audio_channels) != 0) {
+ LOG(LS_ERROR) << "InitializeIfNeeded(" << in_freq_hz << ", " << out_freq_hz
+ << ", " << num_audio_channels << ") failed.";
+ return -1;
+ }
+
+ int out_length =
+ resampler_.Resample(in_audio, in_length, out_audio, out_capacity_samples);
+ if (out_length == -1) {
+ LOG(LS_ERROR) << "Resample(" << in_audio << ", " << in_length << ", "
+ << out_audio << ", " << out_capacity_samples << ") failed.";
+ return -1;
+ }
+
+ return static_cast<int>(out_length / num_audio_channels);
+}
+
+} // namespace acm2
+} // namespace webrtc
diff --git a/webrtc/modules/audio_coding/acm2/acm_resampler.h b/webrtc/modules/audio_coding/acm2/acm_resampler.h
new file mode 100644
index 0000000000..268db8b752
--- /dev/null
+++ b/webrtc/modules/audio_coding/acm2/acm_resampler.h
@@ -0,0 +1,39 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_ACM2_ACM_RESAMPLER_H_
+#define WEBRTC_MODULES_AUDIO_CODING_ACM2_ACM_RESAMPLER_H_
+
+#include "webrtc/common_audio/resampler/include/push_resampler.h"
+#include "webrtc/typedefs.h"
+
+namespace webrtc {
+namespace acm2 {
+
+class ACMResampler {
+ public:
+ ACMResampler();
+ ~ACMResampler();
+
+ int Resample10Msec(const int16_t* in_audio,
+ int in_freq_hz,
+ int out_freq_hz,
+ size_t num_audio_channels,
+ size_t out_capacity_samples,
+ int16_t* out_audio);
+
+ private:
+ PushResampler<int16_t> resampler_;
+};
+
+} // namespace acm2
+} // namespace webrtc
+
+#endif // WEBRTC_MODULES_AUDIO_CODING_ACM2_ACM_RESAMPLER_H_
diff --git a/webrtc/modules/audio_coding/acm2/acm_send_test_oldapi.cc b/webrtc/modules/audio_coding/acm2/acm_send_test_oldapi.cc
new file mode 100644
index 0000000000..3a89a77487
--- /dev/null
+++ b/webrtc/modules/audio_coding/acm2/acm_send_test_oldapi.cc
@@ -0,0 +1,158 @@
+/*
+ * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/modules/audio_coding/acm2/acm_send_test_oldapi.h"
+
+#include <assert.h>
+#include <stdio.h>
+#include <string.h>
+
+#include "testing/gtest/include/gtest/gtest.h"
+#include "webrtc/base/checks.h"
+#include "webrtc/modules/audio_coding/codecs/audio_encoder.h"
+#include "webrtc/modules/audio_coding/include/audio_coding_module.h"
+#include "webrtc/modules/audio_coding/neteq/tools/input_audio_file.h"
+#include "webrtc/modules/audio_coding/neteq/tools/packet.h"
+
+namespace webrtc {
+namespace test {
+
+AcmSendTestOldApi::AcmSendTestOldApi(InputAudioFile* audio_source,
+ int source_rate_hz,
+ int test_duration_ms)
+ : clock_(0),
+ acm_(webrtc::AudioCodingModule::Create(0, &clock_)),
+ audio_source_(audio_source),
+ source_rate_hz_(source_rate_hz),
+ input_block_size_samples_(
+ static_cast<size_t>(source_rate_hz_ * kBlockSizeMs / 1000)),
+ codec_registered_(false),
+ test_duration_ms_(test_duration_ms),
+ frame_type_(kAudioFrameSpeech),
+ payload_type_(0),
+ timestamp_(0),
+ sequence_number_(0) {
+ input_frame_.sample_rate_hz_ = source_rate_hz_;
+ input_frame_.num_channels_ = 1;
+ input_frame_.samples_per_channel_ = input_block_size_samples_;
+ assert(input_block_size_samples_ * input_frame_.num_channels_ <=
+ AudioFrame::kMaxDataSizeSamples);
+ acm_->RegisterTransportCallback(this);
+}
+
+bool AcmSendTestOldApi::RegisterCodec(const char* payload_name,
+ int sampling_freq_hz,
+ int channels,
+ int payload_type,
+ int frame_size_samples) {
+ CodecInst codec;
+ RTC_CHECK_EQ(0, AudioCodingModule::Codec(payload_name, &codec,
+ sampling_freq_hz, channels));
+ codec.pltype = payload_type;
+ codec.pacsize = frame_size_samples;
+ codec_registered_ = (acm_->RegisterSendCodec(codec) == 0);
+ input_frame_.num_channels_ = channels;
+ assert(input_block_size_samples_ * input_frame_.num_channels_ <=
+ AudioFrame::kMaxDataSizeSamples);
+ return codec_registered_;
+}
+
+bool AcmSendTestOldApi::RegisterExternalCodec(
+ AudioEncoder* external_speech_encoder) {
+ acm_->RegisterExternalSendCodec(external_speech_encoder);
+ input_frame_.num_channels_ = external_speech_encoder->NumChannels();
+ assert(input_block_size_samples_ * input_frame_.num_channels_ <=
+ AudioFrame::kMaxDataSizeSamples);
+ return codec_registered_ = true;
+}
+
+Packet* AcmSendTestOldApi::NextPacket() {
+ assert(codec_registered_);
+ if (filter_.test(static_cast<size_t>(payload_type_))) {
+ // This payload type should be filtered out. Since the payload type is the
+ // same throughout the whole test run, no packet at all will be delivered.
+ // We can just as well signal that the test is over by returning NULL.
+ return NULL;
+ }
+ // Insert audio and process until one packet is produced.
+ while (clock_.TimeInMilliseconds() < test_duration_ms_) {
+ clock_.AdvanceTimeMilliseconds(kBlockSizeMs);
+ RTC_CHECK(
+ audio_source_->Read(input_block_size_samples_, input_frame_.data_));
+ if (input_frame_.num_channels_ > 1) {
+ InputAudioFile::DuplicateInterleaved(input_frame_.data_,
+ input_block_size_samples_,
+ input_frame_.num_channels_,
+ input_frame_.data_);
+ }
+ data_to_send_ = false;
+ RTC_CHECK_GE(acm_->Add10MsData(input_frame_), 0);
+ input_frame_.timestamp_ += static_cast<uint32_t>(input_block_size_samples_);
+ if (data_to_send_) {
+ // Encoded packet received.
+ return CreatePacket();
+ }
+ }
+ // Test ended.
+ return NULL;
+}
+
+// This method receives the callback from ACM when a new packet is produced.
+int32_t AcmSendTestOldApi::SendData(
+ FrameType frame_type,
+ uint8_t payload_type,
+ uint32_t timestamp,
+ const uint8_t* payload_data,
+ size_t payload_len_bytes,
+ const RTPFragmentationHeader* fragmentation) {
+ // Store the packet locally.
+ frame_type_ = frame_type;
+ payload_type_ = payload_type;
+ timestamp_ = timestamp;
+ last_payload_vec_.assign(payload_data, payload_data + payload_len_bytes);
+ assert(last_payload_vec_.size() == payload_len_bytes);
+ data_to_send_ = true;
+ return 0;
+}
+
+Packet* AcmSendTestOldApi::CreatePacket() {
+ const size_t kRtpHeaderSize = 12;
+ size_t allocated_bytes = last_payload_vec_.size() + kRtpHeaderSize;
+ uint8_t* packet_memory = new uint8_t[allocated_bytes];
+ // Populate the header bytes.
+ packet_memory[0] = 0x80;
+ packet_memory[1] = static_cast<uint8_t>(payload_type_);
+ packet_memory[2] = (sequence_number_ >> 8) & 0xFF;
+ packet_memory[3] = (sequence_number_) & 0xFF;
+ packet_memory[4] = (timestamp_ >> 24) & 0xFF;
+ packet_memory[5] = (timestamp_ >> 16) & 0xFF;
+ packet_memory[6] = (timestamp_ >> 8) & 0xFF;
+ packet_memory[7] = timestamp_ & 0xFF;
+ // Set SSRC to 0x12345678.
+ packet_memory[8] = 0x12;
+ packet_memory[9] = 0x34;
+ packet_memory[10] = 0x56;
+ packet_memory[11] = 0x78;
+
+ ++sequence_number_;
+
+ // Copy the payload data.
+ memcpy(packet_memory + kRtpHeaderSize,
+ &last_payload_vec_[0],
+ last_payload_vec_.size());
+ Packet* packet =
+ new Packet(packet_memory, allocated_bytes, clock_.TimeInMilliseconds());
+ assert(packet);
+ assert(packet->valid_header());
+ return packet;
+}
+
+} // namespace test
+} // namespace webrtc
diff --git a/webrtc/modules/audio_coding/acm2/acm_send_test_oldapi.h b/webrtc/modules/audio_coding/acm2/acm_send_test_oldapi.h
new file mode 100644
index 0000000000..ce68196a3f
--- /dev/null
+++ b/webrtc/modules/audio_coding/acm2/acm_send_test_oldapi.h
@@ -0,0 +1,91 @@
+/*
+ * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_ACM2_ACM_SEND_TEST_OLDAPI_H_
+#define WEBRTC_MODULES_AUDIO_CODING_ACM2_ACM_SEND_TEST_OLDAPI_H_
+
+#include <vector>
+
+#include "webrtc/base/constructormagic.h"
+#include "webrtc/base/scoped_ptr.h"
+#include "webrtc/modules/audio_coding/include/audio_coding_module.h"
+#include "webrtc/modules/audio_coding/neteq/tools/packet_source.h"
+#include "webrtc/system_wrappers/include/clock.h"
+
+namespace webrtc {
+class AudioEncoder;
+
+namespace test {
+class InputAudioFile;
+class Packet;
+
+class AcmSendTestOldApi : public AudioPacketizationCallback,
+ public PacketSource {
+ public:
+ AcmSendTestOldApi(InputAudioFile* audio_source,
+ int source_rate_hz,
+ int test_duration_ms);
+ virtual ~AcmSendTestOldApi() {}
+
+ // Registers the send codec. Returns true on success, false otherwise.
+ bool RegisterCodec(const char* payload_name,
+ int sampling_freq_hz,
+ int channels,
+ int payload_type,
+ int frame_size_samples);
+
+ // Registers an external send codec. Returns true on success, false otherwise.
+ bool RegisterExternalCodec(AudioEncoder* external_speech_encoder);
+
+ // Returns the next encoded packet. Returns NULL if the test duration was
+ // exceeded. Ownership of the packet is handed over to the caller.
+ // Inherited from PacketSource.
+ Packet* NextPacket();
+
+ // Inherited from AudioPacketizationCallback.
+ int32_t SendData(FrameType frame_type,
+ uint8_t payload_type,
+ uint32_t timestamp,
+ const uint8_t* payload_data,
+ size_t payload_len_bytes,
+ const RTPFragmentationHeader* fragmentation) override;
+
+ AudioCodingModule* acm() { return acm_.get(); }
+
+ private:
+ static const int kBlockSizeMs = 10;
+
+ // Creates a Packet object from the last packet produced by ACM (and received
+ // through the SendData method as a callback). Ownership of the new Packet
+ // object is transferred to the caller.
+ Packet* CreatePacket();
+
+ SimulatedClock clock_;
+ rtc::scoped_ptr<AudioCodingModule> acm_;
+ InputAudioFile* audio_source_;
+ int source_rate_hz_;
+ const size_t input_block_size_samples_;
+ AudioFrame input_frame_;
+ bool codec_registered_;
+ int test_duration_ms_;
+ // The following member variables are set whenever SendData() is called.
+ FrameType frame_type_;
+ int payload_type_;
+ uint32_t timestamp_;
+ uint16_t sequence_number_;
+ std::vector<uint8_t> last_payload_vec_;
+ bool data_to_send_;
+
+ RTC_DISALLOW_COPY_AND_ASSIGN(AcmSendTestOldApi);
+};
+
+} // namespace test
+} // namespace webrtc
+#endif // WEBRTC_MODULES_AUDIO_CODING_ACM2_ACM_SEND_TEST_OLDAPI_H_
diff --git a/webrtc/modules/audio_coding/acm2/audio_coding_module.cc b/webrtc/modules/audio_coding/acm2/audio_coding_module.cc
new file mode 100644
index 0000000000..c4dd349cc4
--- /dev/null
+++ b/webrtc/modules/audio_coding/acm2/audio_coding_module.cc
@@ -0,0 +1,98 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/modules/audio_coding/include/audio_coding_module.h"
+
+#include "webrtc/base/checks.h"
+#include "webrtc/common_types.h"
+#include "webrtc/modules/audio_coding/acm2/audio_coding_module_impl.h"
+#include "webrtc/modules/audio_coding/acm2/rent_a_codec.h"
+#include "webrtc/system_wrappers/include/clock.h"
+#include "webrtc/system_wrappers/include/trace.h"
+
+namespace webrtc {
+
+// Create module
+AudioCodingModule* AudioCodingModule::Create(int id) {
+ Config config;
+ config.id = id;
+ config.clock = Clock::GetRealTimeClock();
+ return Create(config);
+}
+
+AudioCodingModule* AudioCodingModule::Create(int id, Clock* clock) {
+ Config config;
+ config.id = id;
+ config.clock = clock;
+ return Create(config);
+}
+
+AudioCodingModule* AudioCodingModule::Create(const Config& config) {
+ return new acm2::AudioCodingModuleImpl(config);
+}
+
+int AudioCodingModule::NumberOfCodecs() {
+ return static_cast<int>(acm2::RentACodec::NumberOfCodecs());
+}
+
+int AudioCodingModule::Codec(int list_id, CodecInst* codec) {
+ auto codec_id = acm2::RentACodec::CodecIdFromIndex(list_id);
+ if (!codec_id)
+ return -1;
+ auto ci = acm2::RentACodec::CodecInstById(*codec_id);
+ if (!ci)
+ return -1;
+ *codec = *ci;
+ return 0;
+}
+
+int AudioCodingModule::Codec(const char* payload_name,
+ CodecInst* codec,
+ int sampling_freq_hz,
+ size_t channels) {
+ rtc::Optional<CodecInst> ci = acm2::RentACodec::CodecInstByParams(
+ payload_name, sampling_freq_hz, channels);
+ if (ci) {
+ *codec = *ci;
+ return 0;
+ } else {
+ // We couldn't find a matching codec, so set the parameters to unacceptable
+ // values and return.
+ codec->plname[0] = '\0';
+ codec->pltype = -1;
+ codec->pacsize = 0;
+ codec->rate = 0;
+ codec->plfreq = 0;
+ return -1;
+ }
+}
+
+int AudioCodingModule::Codec(const char* payload_name,
+ int sampling_freq_hz,
+ size_t channels) {
+ rtc::Optional<acm2::RentACodec::CodecId> ci =
+ acm2::RentACodec::CodecIdByParams(payload_name, sampling_freq_hz,
+ channels);
+ if (!ci)
+ return -1;
+ rtc::Optional<int> i = acm2::RentACodec::CodecIndexFromId(*ci);
+ return i ? *i : -1;
+}
+
+// Checks the validity of the parameters of the given codec
+bool AudioCodingModule::IsCodecValid(const CodecInst& codec) {
+ bool valid = acm2::RentACodec::IsCodecValid(codec);
+ if (!valid)
+ WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, -1,
+ "Invalid codec setting");
+ return valid;
+}
+
+} // namespace webrtc
diff --git a/webrtc/modules/audio_coding/acm2/audio_coding_module_impl.cc b/webrtc/modules/audio_coding/acm2/audio_coding_module_impl.cc
new file mode 100644
index 0000000000..ac302f0fe3
--- /dev/null
+++ b/webrtc/modules/audio_coding/acm2/audio_coding_module_impl.cc
@@ -0,0 +1,828 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/modules/audio_coding/acm2/audio_coding_module_impl.h"
+
+#include <assert.h>
+#include <stdlib.h>
+#include <vector>
+
+#include "webrtc/base/checks.h"
+#include "webrtc/base/safe_conversions.h"
+#include "webrtc/engine_configurations.h"
+#include "webrtc/modules/audio_coding/include/audio_coding_module_typedefs.h"
+#include "webrtc/modules/audio_coding/acm2/acm_common_defs.h"
+#include "webrtc/modules/audio_coding/acm2/acm_resampler.h"
+#include "webrtc/modules/audio_coding/acm2/call_statistics.h"
+#include "webrtc/system_wrappers/include/critical_section_wrapper.h"
+#include "webrtc/system_wrappers/include/logging.h"
+#include "webrtc/system_wrappers/include/metrics.h"
+#include "webrtc/system_wrappers/include/rw_lock_wrapper.h"
+#include "webrtc/system_wrappers/include/trace.h"
+#include "webrtc/typedefs.h"
+
+namespace webrtc {
+
+namespace acm2 {
+
+namespace {
+
+// TODO(turajs): the same functionality is used in NetEq. If both classes
+// need them, make it a static function in ACMCodecDB.
+bool IsCodecRED(const CodecInst& codec) {
+ return (STR_CASE_CMP(codec.plname, "RED") == 0);
+}
+
+bool IsCodecCN(const CodecInst& codec) {
+ return (STR_CASE_CMP(codec.plname, "CN") == 0);
+}
+
+// Stereo-to-mono can be used as in-place.
+int DownMix(const AudioFrame& frame,
+ size_t length_out_buff,
+ int16_t* out_buff) {
+ if (length_out_buff < frame.samples_per_channel_) {
+ return -1;
+ }
+ for (size_t n = 0; n < frame.samples_per_channel_; ++n)
+ out_buff[n] = (frame.data_[2 * n] + frame.data_[2 * n + 1]) >> 1;
+ return 0;
+}
+
+// Mono-to-stereo can be used as in-place.
+int UpMix(const AudioFrame& frame, size_t length_out_buff, int16_t* out_buff) {
+ if (length_out_buff < frame.samples_per_channel_) {
+ return -1;
+ }
+ for (size_t n = frame.samples_per_channel_; n != 0; --n) {
+ size_t i = n - 1;
+ int16_t sample = frame.data_[i];
+ out_buff[2 * i + 1] = sample;
+ out_buff[2 * i] = sample;
+ }
+ return 0;
+}
+
+void ConvertEncodedInfoToFragmentationHeader(
+ const AudioEncoder::EncodedInfo& info,
+ RTPFragmentationHeader* frag) {
+ if (info.redundant.empty()) {
+ frag->fragmentationVectorSize = 0;
+ return;
+ }
+
+ frag->VerifyAndAllocateFragmentationHeader(
+ static_cast<uint16_t>(info.redundant.size()));
+ frag->fragmentationVectorSize = static_cast<uint16_t>(info.redundant.size());
+ size_t offset = 0;
+ for (size_t i = 0; i < info.redundant.size(); ++i) {
+ frag->fragmentationOffset[i] = offset;
+ offset += info.redundant[i].encoded_bytes;
+ frag->fragmentationLength[i] = info.redundant[i].encoded_bytes;
+ frag->fragmentationTimeDiff[i] = rtc::checked_cast<uint16_t>(
+ info.encoded_timestamp - info.redundant[i].encoded_timestamp);
+ frag->fragmentationPlType[i] = info.redundant[i].payload_type;
+ }
+}
+} // namespace
+
+void AudioCodingModuleImpl::ChangeLogger::MaybeLog(int value) {
+ if (value != last_value_ || first_time_) {
+ first_time_ = false;
+ last_value_ = value;
+ RTC_HISTOGRAM_COUNTS_SPARSE_100(histogram_name_, value);
+ }
+}
+
+AudioCodingModuleImpl::AudioCodingModuleImpl(
+ const AudioCodingModule::Config& config)
+ : acm_crit_sect_(CriticalSectionWrapper::CreateCriticalSection()),
+ id_(config.id),
+ expected_codec_ts_(0xD87F3F9F),
+ expected_in_ts_(0xD87F3F9F),
+ receiver_(config),
+ bitrate_logger_("WebRTC.Audio.TargetBitrateInKbps"),
+ previous_pltype_(255),
+ receiver_initialized_(false),
+ first_10ms_data_(false),
+ first_frame_(true),
+ callback_crit_sect_(CriticalSectionWrapper::CreateCriticalSection()),
+ packetization_callback_(NULL),
+ vad_callback_(NULL) {
+ if (InitializeReceiverSafe() < 0) {
+ WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, id_,
+ "Cannot initialize receiver");
+ }
+ WEBRTC_TRACE(webrtc::kTraceMemory, webrtc::kTraceAudioCoding, id_, "Created");
+}
+
+AudioCodingModuleImpl::~AudioCodingModuleImpl() = default;
+
+int32_t AudioCodingModuleImpl::Encode(const InputData& input_data) {
+ AudioEncoder::EncodedInfo encoded_info;
+ uint8_t previous_pltype;
+
+ // Check if there is an encoder before.
+ if (!HaveValidEncoder("Process"))
+ return -1;
+
+ AudioEncoder* audio_encoder = rent_a_codec_.GetEncoderStack();
+ // Scale the timestamp to the codec's RTP timestamp rate.
+ uint32_t rtp_timestamp =
+ first_frame_ ? input_data.input_timestamp
+ : last_rtp_timestamp_ +
+ rtc::CheckedDivExact(
+ input_data.input_timestamp - last_timestamp_,
+ static_cast<uint32_t>(rtc::CheckedDivExact(
+ audio_encoder->SampleRateHz(),
+ audio_encoder->RtpTimestampRateHz())));
+ last_timestamp_ = input_data.input_timestamp;
+ last_rtp_timestamp_ = rtp_timestamp;
+ first_frame_ = false;
+
+ encode_buffer_.SetSize(audio_encoder->MaxEncodedBytes());
+ encoded_info = audio_encoder->Encode(
+ rtp_timestamp, rtc::ArrayView<const int16_t>(
+ input_data.audio, input_data.audio_channel *
+ input_data.length_per_channel),
+ encode_buffer_.size(), encode_buffer_.data());
+ encode_buffer_.SetSize(encoded_info.encoded_bytes);
+ bitrate_logger_.MaybeLog(audio_encoder->GetTargetBitrate() / 1000);
+ if (encode_buffer_.size() == 0 && !encoded_info.send_even_if_empty) {
+ // Not enough data.
+ return 0;
+ }
+ previous_pltype = previous_pltype_; // Read it while we have the critsect.
+
+ RTPFragmentationHeader my_fragmentation;
+ ConvertEncodedInfoToFragmentationHeader(encoded_info, &my_fragmentation);
+ FrameType frame_type;
+ if (encode_buffer_.size() == 0 && encoded_info.send_even_if_empty) {
+ frame_type = kEmptyFrame;
+ encoded_info.payload_type = previous_pltype;
+ } else {
+ RTC_DCHECK_GT(encode_buffer_.size(), 0u);
+ frame_type = encoded_info.speech ? kAudioFrameSpeech : kAudioFrameCN;
+ }
+
+ {
+ CriticalSectionScoped lock(callback_crit_sect_.get());
+ if (packetization_callback_) {
+ packetization_callback_->SendData(
+ frame_type, encoded_info.payload_type, encoded_info.encoded_timestamp,
+ encode_buffer_.data(), encode_buffer_.size(),
+ my_fragmentation.fragmentationVectorSize > 0 ? &my_fragmentation
+ : nullptr);
+ }
+
+ if (vad_callback_) {
+ // Callback with VAD decision.
+ vad_callback_->InFrameType(frame_type);
+ }
+ }
+ previous_pltype_ = encoded_info.payload_type;
+ return static_cast<int32_t>(encode_buffer_.size());
+}
+
+/////////////////////////////////////////
+// Sender
+//
+
+// Can be called multiple times for Codec, CNG, RED.
+int AudioCodingModuleImpl::RegisterSendCodec(const CodecInst& send_codec) {
+ CriticalSectionScoped lock(acm_crit_sect_.get());
+ if (!codec_manager_.RegisterEncoder(send_codec)) {
+ return -1;
+ }
+ auto* sp = codec_manager_.GetStackParams();
+ if (!sp->speech_encoder && codec_manager_.GetCodecInst()) {
+ // We have no speech encoder, but we have a specification for making one.
+ AudioEncoder* enc =
+ rent_a_codec_.RentEncoder(*codec_manager_.GetCodecInst());
+ if (!enc)
+ return -1;
+ sp->speech_encoder = enc;
+ }
+ if (sp->speech_encoder)
+ rent_a_codec_.RentEncoderStack(sp);
+ return 0;
+}
+
+void AudioCodingModuleImpl::RegisterExternalSendCodec(
+ AudioEncoder* external_speech_encoder) {
+ CriticalSectionScoped lock(acm_crit_sect_.get());
+ auto* sp = codec_manager_.GetStackParams();
+ sp->speech_encoder = external_speech_encoder;
+ rent_a_codec_.RentEncoderStack(sp);
+}
+
+// Get current send codec.
+rtc::Optional<CodecInst> AudioCodingModuleImpl::SendCodec() const {
+ CriticalSectionScoped lock(acm_crit_sect_.get());
+ auto* ci = codec_manager_.GetCodecInst();
+ if (ci) {
+ return rtc::Optional<CodecInst>(*ci);
+ }
+ auto* enc = codec_manager_.GetStackParams()->speech_encoder;
+ if (enc) {
+ return rtc::Optional<CodecInst>(CodecManager::ForgeCodecInst(enc));
+ }
+ return rtc::Optional<CodecInst>();
+}
+
+// Get current send frequency.
+int AudioCodingModuleImpl::SendFrequency() const {
+ WEBRTC_TRACE(webrtc::kTraceStream, webrtc::kTraceAudioCoding, id_,
+ "SendFrequency()");
+ CriticalSectionScoped lock(acm_crit_sect_.get());
+
+ const auto* enc = rent_a_codec_.GetEncoderStack();
+ if (!enc) {
+ WEBRTC_TRACE(webrtc::kTraceStream, webrtc::kTraceAudioCoding, id_,
+ "SendFrequency Failed, no codec is registered");
+ return -1;
+ }
+
+ return enc->SampleRateHz();
+}
+
+void AudioCodingModuleImpl::SetBitRate(int bitrate_bps) {
+ CriticalSectionScoped lock(acm_crit_sect_.get());
+ auto* enc = rent_a_codec_.GetEncoderStack();
+ if (enc) {
+ enc->SetTargetBitrate(bitrate_bps);
+ }
+}
+
+// Register a transport callback which will be called to deliver
+// the encoded buffers.
+int AudioCodingModuleImpl::RegisterTransportCallback(
+ AudioPacketizationCallback* transport) {
+ CriticalSectionScoped lock(callback_crit_sect_.get());
+ packetization_callback_ = transport;
+ return 0;
+}
+
+// Add 10MS of raw (PCM) audio data to the encoder.
+int AudioCodingModuleImpl::Add10MsData(const AudioFrame& audio_frame) {
+ InputData input_data;
+ CriticalSectionScoped lock(acm_crit_sect_.get());
+ int r = Add10MsDataInternal(audio_frame, &input_data);
+ return r < 0 ? r : Encode(input_data);
+}
+
+int AudioCodingModuleImpl::Add10MsDataInternal(const AudioFrame& audio_frame,
+ InputData* input_data) {
+ if (audio_frame.samples_per_channel_ == 0) {
+ assert(false);
+ WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, id_,
+ "Cannot Add 10 ms audio, payload length is zero");
+ return -1;
+ }
+
+ if (audio_frame.sample_rate_hz_ > 48000) {
+ assert(false);
+ WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, id_,
+ "Cannot Add 10 ms audio, input frequency not valid");
+ return -1;
+ }
+
+ // If the length and frequency matches. We currently just support raw PCM.
+ if (static_cast<size_t>(audio_frame.sample_rate_hz_ / 100) !=
+ audio_frame.samples_per_channel_) {
+ WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, id_,
+ "Cannot Add 10 ms audio, input frequency and length doesn't"
+ " match");
+ return -1;
+ }
+
+ if (audio_frame.num_channels_ != 1 && audio_frame.num_channels_ != 2) {
+ WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, id_,
+ "Cannot Add 10 ms audio, invalid number of channels.");
+ return -1;
+ }
+
+ // Do we have a codec registered?
+ if (!HaveValidEncoder("Add10MsData")) {
+ return -1;
+ }
+
+ const AudioFrame* ptr_frame;
+ // Perform a resampling, also down-mix if it is required and can be
+ // performed before resampling (a down mix prior to resampling will take
+ // place if both primary and secondary encoders are mono and input is in
+ // stereo).
+ if (PreprocessToAddData(audio_frame, &ptr_frame) < 0) {
+ return -1;
+ }
+
+ // Check whether we need an up-mix or down-mix?
+ const size_t current_num_channels =
+ rent_a_codec_.GetEncoderStack()->NumChannels();
+ const bool same_num_channels =
+ ptr_frame->num_channels_ == current_num_channels;
+
+ if (!same_num_channels) {
+ if (ptr_frame->num_channels_ == 1) {
+ if (UpMix(*ptr_frame, WEBRTC_10MS_PCM_AUDIO, input_data->buffer) < 0)
+ return -1;
+ } else {
+ if (DownMix(*ptr_frame, WEBRTC_10MS_PCM_AUDIO, input_data->buffer) < 0)
+ return -1;
+ }
+ }
+
+ // When adding data to encoders this pointer is pointing to an audio buffer
+ // with correct number of channels.
+ const int16_t* ptr_audio = ptr_frame->data_;
+
+ // For pushing data to primary, point the |ptr_audio| to correct buffer.
+ if (!same_num_channels)
+ ptr_audio = input_data->buffer;
+
+ input_data->input_timestamp = ptr_frame->timestamp_;
+ input_data->audio = ptr_audio;
+ input_data->length_per_channel = ptr_frame->samples_per_channel_;
+ input_data->audio_channel = current_num_channels;
+
+ return 0;
+}
+
+// Perform a resampling and down-mix if required. We down-mix only if
+// encoder is mono and input is stereo. In case of dual-streaming, both
+// encoders has to be mono for down-mix to take place.
+// |*ptr_out| will point to the pre-processed audio-frame. If no pre-processing
+// is required, |*ptr_out| points to |in_frame|.
+int AudioCodingModuleImpl::PreprocessToAddData(const AudioFrame& in_frame,
+ const AudioFrame** ptr_out) {
+ const auto* enc = rent_a_codec_.GetEncoderStack();
+ const bool resample = in_frame.sample_rate_hz_ != enc->SampleRateHz();
+
+ // This variable is true if primary codec and secondary codec (if exists)
+ // are both mono and input is stereo.
+ // TODO(henrik.lundin): This condition should probably be
+ // in_frame.num_channels_ > enc->NumChannels()
+ const bool down_mix = in_frame.num_channels_ == 2 && enc->NumChannels() == 1;
+
+ if (!first_10ms_data_) {
+ expected_in_ts_ = in_frame.timestamp_;
+ expected_codec_ts_ = in_frame.timestamp_;
+ first_10ms_data_ = true;
+ } else if (in_frame.timestamp_ != expected_in_ts_) {
+ // TODO(turajs): Do we need a warning here.
+ expected_codec_ts_ +=
+ (in_frame.timestamp_ - expected_in_ts_) *
+ static_cast<uint32_t>(static_cast<double>(enc->SampleRateHz()) /
+ static_cast<double>(in_frame.sample_rate_hz_));
+ expected_in_ts_ = in_frame.timestamp_;
+ }
+
+
+ if (!down_mix && !resample) {
+ // No pre-processing is required.
+ expected_in_ts_ += static_cast<uint32_t>(in_frame.samples_per_channel_);
+ expected_codec_ts_ += static_cast<uint32_t>(in_frame.samples_per_channel_);
+ *ptr_out = &in_frame;
+ return 0;
+ }
+
+ *ptr_out = &preprocess_frame_;
+ preprocess_frame_.num_channels_ = in_frame.num_channels_;
+ int16_t audio[WEBRTC_10MS_PCM_AUDIO];
+ const int16_t* src_ptr_audio = in_frame.data_;
+ int16_t* dest_ptr_audio = preprocess_frame_.data_;
+ if (down_mix) {
+ // If a resampling is required the output of a down-mix is written into a
+ // local buffer, otherwise, it will be written to the output frame.
+ if (resample)
+ dest_ptr_audio = audio;
+ if (DownMix(in_frame, WEBRTC_10MS_PCM_AUDIO, dest_ptr_audio) < 0)
+ return -1;
+ preprocess_frame_.num_channels_ = 1;
+ // Set the input of the resampler is the down-mixed signal.
+ src_ptr_audio = audio;
+ }
+
+ preprocess_frame_.timestamp_ = expected_codec_ts_;
+ preprocess_frame_.samples_per_channel_ = in_frame.samples_per_channel_;
+ preprocess_frame_.sample_rate_hz_ = in_frame.sample_rate_hz_;
+ // If it is required, we have to do a resampling.
+ if (resample) {
+ // The result of the resampler is written to output frame.
+ dest_ptr_audio = preprocess_frame_.data_;
+
+ int samples_per_channel = resampler_.Resample10Msec(
+ src_ptr_audio, in_frame.sample_rate_hz_, enc->SampleRateHz(),
+ preprocess_frame_.num_channels_, AudioFrame::kMaxDataSizeSamples,
+ dest_ptr_audio);
+
+ if (samples_per_channel < 0) {
+ WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, id_,
+ "Cannot add 10 ms audio, resampling failed");
+ return -1;
+ }
+ preprocess_frame_.samples_per_channel_ =
+ static_cast<size_t>(samples_per_channel);
+ preprocess_frame_.sample_rate_hz_ = enc->SampleRateHz();
+ }
+
+ expected_codec_ts_ +=
+ static_cast<uint32_t>(preprocess_frame_.samples_per_channel_);
+ expected_in_ts_ += static_cast<uint32_t>(in_frame.samples_per_channel_);
+
+ return 0;
+}
+
+/////////////////////////////////////////
+// (RED) Redundant Coding
+//
+
+bool AudioCodingModuleImpl::REDStatus() const {
+ CriticalSectionScoped lock(acm_crit_sect_.get());
+ return codec_manager_.GetStackParams()->use_red;
+}
+
+// Configure RED status i.e on/off.
+int AudioCodingModuleImpl::SetREDStatus(bool enable_red) {
+#ifdef WEBRTC_CODEC_RED
+ CriticalSectionScoped lock(acm_crit_sect_.get());
+ if (!codec_manager_.SetCopyRed(enable_red)) {
+ return -1;
+ }
+ auto* sp = codec_manager_.GetStackParams();
+ if (sp->speech_encoder)
+ rent_a_codec_.RentEncoderStack(sp);
+ return 0;
+#else
+ WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceAudioCoding, id_,
+ " WEBRTC_CODEC_RED is undefined");
+ return -1;
+#endif
+}
+
+/////////////////////////////////////////
+// (FEC) Forward Error Correction (codec internal)
+//
+
+bool AudioCodingModuleImpl::CodecFEC() const {
+ CriticalSectionScoped lock(acm_crit_sect_.get());
+ return codec_manager_.GetStackParams()->use_codec_fec;
+}
+
+int AudioCodingModuleImpl::SetCodecFEC(bool enable_codec_fec) {
+ CriticalSectionScoped lock(acm_crit_sect_.get());
+ if (!codec_manager_.SetCodecFEC(enable_codec_fec)) {
+ return -1;
+ }
+ auto* sp = codec_manager_.GetStackParams();
+ if (sp->speech_encoder)
+ rent_a_codec_.RentEncoderStack(sp);
+ if (enable_codec_fec) {
+ return sp->use_codec_fec ? 0 : -1;
+ } else {
+ RTC_DCHECK(!sp->use_codec_fec);
+ return 0;
+ }
+}
+
+int AudioCodingModuleImpl::SetPacketLossRate(int loss_rate) {
+ CriticalSectionScoped lock(acm_crit_sect_.get());
+ if (HaveValidEncoder("SetPacketLossRate")) {
+ rent_a_codec_.GetEncoderStack()->SetProjectedPacketLossRate(loss_rate /
+ 100.0);
+ }
+ return 0;
+}
+
+/////////////////////////////////////////
+// (VAD) Voice Activity Detection
+//
+int AudioCodingModuleImpl::SetVAD(bool enable_dtx,
+ bool enable_vad,
+ ACMVADMode mode) {
+ // Note: |enable_vad| is not used; VAD is enabled based on the DTX setting.
+ RTC_DCHECK_EQ(enable_dtx, enable_vad);
+ CriticalSectionScoped lock(acm_crit_sect_.get());
+ if (!codec_manager_.SetVAD(enable_dtx, mode)) {
+ return -1;
+ }
+ auto* sp = codec_manager_.GetStackParams();
+ if (sp->speech_encoder)
+ rent_a_codec_.RentEncoderStack(sp);
+ return 0;
+}
+
+// Get VAD/DTX settings.
+int AudioCodingModuleImpl::VAD(bool* dtx_enabled, bool* vad_enabled,
+ ACMVADMode* mode) const {
+ CriticalSectionScoped lock(acm_crit_sect_.get());
+ const auto* sp = codec_manager_.GetStackParams();
+ *dtx_enabled = *vad_enabled = sp->use_cng;
+ *mode = sp->vad_mode;
+ return 0;
+}
+
+/////////////////////////////////////////
+// Receiver
+//
+
+int AudioCodingModuleImpl::InitializeReceiver() {
+ CriticalSectionScoped lock(acm_crit_sect_.get());
+ return InitializeReceiverSafe();
+}
+
+// Initialize receiver, resets codec database etc.
+int AudioCodingModuleImpl::InitializeReceiverSafe() {
+ // If the receiver is already initialized then we want to destroy any
+ // existing decoders. After a call to this function, we should have a clean
+ // start-up.
+ if (receiver_initialized_) {
+ if (receiver_.RemoveAllCodecs() < 0)
+ return -1;
+ }
+ receiver_.set_id(id_);
+ receiver_.ResetInitialDelay();
+ receiver_.SetMinimumDelay(0);
+ receiver_.SetMaximumDelay(0);
+ receiver_.FlushBuffers();
+
+ // Register RED and CN.
+ auto db = RentACodec::Database();
+ for (size_t i = 0; i < db.size(); i++) {
+ if (IsCodecRED(db[i]) || IsCodecCN(db[i])) {
+ if (receiver_.AddCodec(static_cast<int>(i),
+ static_cast<uint8_t>(db[i].pltype), 1,
+ db[i].plfreq, nullptr, db[i].plname) < 0) {
+ WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, id_,
+ "Cannot register master codec.");
+ return -1;
+ }
+ }
+ }
+ receiver_initialized_ = true;
+ return 0;
+}
+
+// Get current receive frequency.
+int AudioCodingModuleImpl::ReceiveFrequency() const {
+ const auto last_packet_sample_rate = receiver_.last_packet_sample_rate_hz();
+ return last_packet_sample_rate ? *last_packet_sample_rate
+ : receiver_.last_output_sample_rate_hz();
+}
+
+// Get current playout frequency.
+int AudioCodingModuleImpl::PlayoutFrequency() const {
+ WEBRTC_TRACE(webrtc::kTraceStream, webrtc::kTraceAudioCoding, id_,
+ "PlayoutFrequency()");
+ return receiver_.last_output_sample_rate_hz();
+}
+
+// Register possible receive codecs, can be called multiple times,
+// for codecs, CNG (NB, WB and SWB), DTMF, RED.
+int AudioCodingModuleImpl::RegisterReceiveCodec(const CodecInst& codec) {
+ CriticalSectionScoped lock(acm_crit_sect_.get());
+ RTC_DCHECK(receiver_initialized_);
+ if (codec.channels > 2) {
+ LOG_F(LS_ERROR) << "Unsupported number of channels: " << codec.channels;
+ return -1;
+ }
+
+ auto codec_id =
+ RentACodec::CodecIdByParams(codec.plname, codec.plfreq, codec.channels);
+ if (!codec_id) {
+ LOG_F(LS_ERROR) << "Wrong codec params to be registered as receive codec";
+ return -1;
+ }
+ auto codec_index = RentACodec::CodecIndexFromId(*codec_id);
+ RTC_CHECK(codec_index) << "Invalid codec ID: " << static_cast<int>(*codec_id);
+
+ // Check if the payload-type is valid.
+ if (!RentACodec::IsPayloadTypeValid(codec.pltype)) {
+ LOG_F(LS_ERROR) << "Invalid payload type " << codec.pltype << " for "
+ << codec.plname;
+ return -1;
+ }
+
+ // Get |decoder| associated with |codec|. |decoder| is NULL if |codec| does
+ // not own its decoder.
+ return receiver_.AddCodec(
+ *codec_index, codec.pltype, codec.channels, codec.plfreq,
+ STR_CASE_CMP(codec.plname, "isac") == 0 ? rent_a_codec_.RentIsacDecoder()
+ : nullptr,
+ codec.plname);
+}
+
+int AudioCodingModuleImpl::RegisterExternalReceiveCodec(
+ int rtp_payload_type,
+ AudioDecoder* external_decoder,
+ int sample_rate_hz,
+ int num_channels,
+ const std::string& name) {
+ CriticalSectionScoped lock(acm_crit_sect_.get());
+ RTC_DCHECK(receiver_initialized_);
+ if (num_channels > 2 || num_channels < 0) {
+ LOG_F(LS_ERROR) << "Unsupported number of channels: " << num_channels;
+ return -1;
+ }
+
+ // Check if the payload-type is valid.
+ if (!RentACodec::IsPayloadTypeValid(rtp_payload_type)) {
+ LOG_F(LS_ERROR) << "Invalid payload-type " << rtp_payload_type
+ << " for external decoder.";
+ return -1;
+ }
+
+ return receiver_.AddCodec(-1 /* external */, rtp_payload_type, num_channels,
+ sample_rate_hz, external_decoder, name);
+}
+
+// Get current received codec.
+int AudioCodingModuleImpl::ReceiveCodec(CodecInst* current_codec) const {
+ CriticalSectionScoped lock(acm_crit_sect_.get());
+ return receiver_.LastAudioCodec(current_codec);
+}
+
+// Incoming packet from network parsed and ready for decode.
+int AudioCodingModuleImpl::IncomingPacket(const uint8_t* incoming_payload,
+ const size_t payload_length,
+ const WebRtcRTPHeader& rtp_header) {
+ return receiver_.InsertPacket(
+ rtp_header,
+ rtc::ArrayView<const uint8_t>(incoming_payload, payload_length));
+}
+
+// Minimum playout delay (Used for lip-sync).
+int AudioCodingModuleImpl::SetMinimumPlayoutDelay(int time_ms) {
+ if ((time_ms < 0) || (time_ms > 10000)) {
+ WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, id_,
+ "Delay must be in the range of 0-1000 milliseconds.");
+ return -1;
+ }
+ return receiver_.SetMinimumDelay(time_ms);
+}
+
+int AudioCodingModuleImpl::SetMaximumPlayoutDelay(int time_ms) {
+ if ((time_ms < 0) || (time_ms > 10000)) {
+ WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, id_,
+ "Delay must be in the range of 0-1000 milliseconds.");
+ return -1;
+ }
+ return receiver_.SetMaximumDelay(time_ms);
+}
+
+// Get 10 milliseconds of raw audio data to play out.
+// Automatic resample to the requested frequency.
+int AudioCodingModuleImpl::PlayoutData10Ms(int desired_freq_hz,
+ AudioFrame* audio_frame) {
+ // GetAudio always returns 10 ms, at the requested sample rate.
+ if (receiver_.GetAudio(desired_freq_hz, audio_frame) != 0) {
+ WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, id_,
+ "PlayoutData failed, RecOut Failed");
+ return -1;
+ }
+ audio_frame->id_ = id_;
+ return 0;
+}
+
+/////////////////////////////////////////
+// Statistics
+//
+
+// TODO(turajs) change the return value to void. Also change the corresponding
+// NetEq function.
+int AudioCodingModuleImpl::GetNetworkStatistics(NetworkStatistics* statistics) {
+ receiver_.GetNetworkStatistics(statistics);
+ return 0;
+}
+
+int AudioCodingModuleImpl::RegisterVADCallback(ACMVADCallback* vad_callback) {
+ WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceAudioCoding, id_,
+ "RegisterVADCallback()");
+ CriticalSectionScoped lock(callback_crit_sect_.get());
+ vad_callback_ = vad_callback;
+ return 0;
+}
+
+// TODO(kwiberg): Remove this method, and have callers call IncomingPacket
+// instead. The translation logic and state belong with them, not with
+// AudioCodingModuleImpl.
+int AudioCodingModuleImpl::IncomingPayload(const uint8_t* incoming_payload,
+ size_t payload_length,
+ uint8_t payload_type,
+ uint32_t timestamp) {
+ // We are not acquiring any lock when interacting with |aux_rtp_header_| no
+ // other method uses this member variable.
+ if (!aux_rtp_header_) {
+ // This is the first time that we are using |dummy_rtp_header_|
+ // so we have to create it.
+ aux_rtp_header_.reset(new WebRtcRTPHeader);
+ aux_rtp_header_->header.payloadType = payload_type;
+ // Don't matter in this case.
+ aux_rtp_header_->header.ssrc = 0;
+ aux_rtp_header_->header.markerBit = false;
+ // Start with random numbers.
+ aux_rtp_header_->header.sequenceNumber = 0x1234; // Arbitrary.
+ aux_rtp_header_->type.Audio.channel = 1;
+ }
+
+ aux_rtp_header_->header.timestamp = timestamp;
+ IncomingPacket(incoming_payload, payload_length, *aux_rtp_header_);
+ // Get ready for the next payload.
+ aux_rtp_header_->header.sequenceNumber++;
+ return 0;
+}
+
+int AudioCodingModuleImpl::SetOpusApplication(OpusApplicationMode application) {
+ CriticalSectionScoped lock(acm_crit_sect_.get());
+ if (!HaveValidEncoder("SetOpusApplication")) {
+ return -1;
+ }
+ AudioEncoder::Application app;
+ switch (application) {
+ case kVoip:
+ app = AudioEncoder::Application::kSpeech;
+ break;
+ case kAudio:
+ app = AudioEncoder::Application::kAudio;
+ break;
+ default:
+ FATAL();
+ return 0;
+ }
+ return rent_a_codec_.GetEncoderStack()->SetApplication(app) ? 0 : -1;
+}
+
+// Informs Opus encoder of the maximum playback rate the receiver will render.
+int AudioCodingModuleImpl::SetOpusMaxPlaybackRate(int frequency_hz) {
+ CriticalSectionScoped lock(acm_crit_sect_.get());
+ if (!HaveValidEncoder("SetOpusMaxPlaybackRate")) {
+ return -1;
+ }
+ rent_a_codec_.GetEncoderStack()->SetMaxPlaybackRate(frequency_hz);
+ return 0;
+}
+
+int AudioCodingModuleImpl::EnableOpusDtx() {
+ CriticalSectionScoped lock(acm_crit_sect_.get());
+ if (!HaveValidEncoder("EnableOpusDtx")) {
+ return -1;
+ }
+ return rent_a_codec_.GetEncoderStack()->SetDtx(true) ? 0 : -1;
+}
+
+int AudioCodingModuleImpl::DisableOpusDtx() {
+ CriticalSectionScoped lock(acm_crit_sect_.get());
+ if (!HaveValidEncoder("DisableOpusDtx")) {
+ return -1;
+ }
+ return rent_a_codec_.GetEncoderStack()->SetDtx(false) ? 0 : -1;
+}
+
+int AudioCodingModuleImpl::PlayoutTimestamp(uint32_t* timestamp) {
+ return receiver_.GetPlayoutTimestamp(timestamp) ? 0 : -1;
+}
+
+bool AudioCodingModuleImpl::HaveValidEncoder(const char* caller_name) const {
+ if (!rent_a_codec_.GetEncoderStack()) {
+ WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, id_,
+ "%s failed: No send codec is registered.", caller_name);
+ return false;
+ }
+ return true;
+}
+
+int AudioCodingModuleImpl::UnregisterReceiveCodec(uint8_t payload_type) {
+ return receiver_.RemoveCodec(payload_type);
+}
+
+int AudioCodingModuleImpl::EnableNack(size_t max_nack_list_size) {
+ return receiver_.EnableNack(max_nack_list_size);
+}
+
+void AudioCodingModuleImpl::DisableNack() {
+ receiver_.DisableNack();
+}
+
+std::vector<uint16_t> AudioCodingModuleImpl::GetNackList(
+ int64_t round_trip_time_ms) const {
+ return receiver_.GetNackList(round_trip_time_ms);
+}
+
+int AudioCodingModuleImpl::LeastRequiredDelayMs() const {
+ return receiver_.LeastRequiredDelayMs();
+}
+
+void AudioCodingModuleImpl::GetDecodingCallStatistics(
+ AudioDecodingCallStats* call_stats) const {
+ receiver_.GetDecodingCallStatistics(call_stats);
+}
+
+} // namespace acm2
+} // namespace webrtc
diff --git a/webrtc/modules/audio_coding/acm2/audio_coding_module_impl.h b/webrtc/modules/audio_coding/acm2/audio_coding_module_impl.h
new file mode 100644
index 0000000000..926671f199
--- /dev/null
+++ b/webrtc/modules/audio_coding/acm2/audio_coding_module_impl.h
@@ -0,0 +1,283 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_ACM2_AUDIO_CODING_MODULE_IMPL_H_
+#define WEBRTC_MODULES_AUDIO_CODING_ACM2_AUDIO_CODING_MODULE_IMPL_H_
+
+#include <string>
+#include <vector>
+
+#include "webrtc/base/buffer.h"
+#include "webrtc/base/scoped_ptr.h"
+#include "webrtc/base/thread_annotations.h"
+#include "webrtc/common_types.h"
+#include "webrtc/engine_configurations.h"
+#include "webrtc/modules/audio_coding/acm2/acm_receiver.h"
+#include "webrtc/modules/audio_coding/acm2/acm_resampler.h"
+#include "webrtc/modules/audio_coding/acm2/codec_manager.h"
+
+namespace webrtc {
+
+class CriticalSectionWrapper;
+class AudioCodingImpl;
+
+namespace acm2 {
+
+class AudioCodingModuleImpl final : public AudioCodingModule {
+ public:
+ friend webrtc::AudioCodingImpl;
+
+ explicit AudioCodingModuleImpl(const AudioCodingModule::Config& config);
+ ~AudioCodingModuleImpl() override;
+
+ /////////////////////////////////////////
+ // Sender
+ //
+
+ // Can be called multiple times for Codec, CNG, RED.
+ int RegisterSendCodec(const CodecInst& send_codec) override;
+
+ void RegisterExternalSendCodec(
+ AudioEncoder* external_speech_encoder) override;
+
+ // Get current send codec.
+ rtc::Optional<CodecInst> SendCodec() const override;
+
+ // Get current send frequency.
+ int SendFrequency() const override;
+
+ // Sets the bitrate to the specified value in bits/sec. In case the codec does
+ // not support the requested value it will choose an appropriate value
+ // instead.
+ void SetBitRate(int bitrate_bps) override;
+
+ // Register a transport callback which will be
+ // called to deliver the encoded buffers.
+ int RegisterTransportCallback(AudioPacketizationCallback* transport) override;
+
+ // Add 10 ms of raw (PCM) audio data to the encoder.
+ int Add10MsData(const AudioFrame& audio_frame) override;
+
+ /////////////////////////////////////////
+ // (RED) Redundant Coding
+ //
+
+ // Configure RED status i.e. on/off.
+ int SetREDStatus(bool enable_red) override;
+
+ // Get RED status.
+ bool REDStatus() const override;
+
+ /////////////////////////////////////////
+ // (FEC) Forward Error Correction (codec internal)
+ //
+
+ // Configure FEC status i.e. on/off.
+ int SetCodecFEC(bool enabled_codec_fec) override;
+
+ // Get FEC status.
+ bool CodecFEC() const override;
+
+ // Set target packet loss rate
+ int SetPacketLossRate(int loss_rate) override;
+
+ /////////////////////////////////////////
+ // (VAD) Voice Activity Detection
+ // and
+ // (CNG) Comfort Noise Generation
+ //
+
+ int SetVAD(bool enable_dtx = true,
+ bool enable_vad = false,
+ ACMVADMode mode = VADNormal) override;
+
+ int VAD(bool* dtx_enabled,
+ bool* vad_enabled,
+ ACMVADMode* mode) const override;
+
+ int RegisterVADCallback(ACMVADCallback* vad_callback) override;
+
+ /////////////////////////////////////////
+ // Receiver
+ //
+
+ // Initialize receiver, resets codec database etc.
+ int InitializeReceiver() override;
+
+ // Get current receive frequency.
+ int ReceiveFrequency() const override;
+
+ // Get current playout frequency.
+ int PlayoutFrequency() const override;
+
+ // Register possible receive codecs, can be called multiple times,
+ // for codecs, CNG, DTMF, RED.
+ int RegisterReceiveCodec(const CodecInst& receive_codec) override;
+
+ int RegisterExternalReceiveCodec(int rtp_payload_type,
+ AudioDecoder* external_decoder,
+ int sample_rate_hz,
+ int num_channels,
+ const std::string& name) override;
+
+ // Get current received codec.
+ int ReceiveCodec(CodecInst* current_codec) const override;
+
+ // Incoming packet from network parsed and ready for decode.
+ int IncomingPacket(const uint8_t* incoming_payload,
+ const size_t payload_length,
+ const WebRtcRTPHeader& rtp_info) override;
+
+ // Incoming payloads, without rtp-info, the rtp-info will be created in ACM.
+ // One usage for this API is when pre-encoded files are pushed in ACM.
+ int IncomingPayload(const uint8_t* incoming_payload,
+ const size_t payload_length,
+ uint8_t payload_type,
+ uint32_t timestamp) override;
+
+ // Minimum playout delay.
+ int SetMinimumPlayoutDelay(int time_ms) override;
+
+ // Maximum playout delay.
+ int SetMaximumPlayoutDelay(int time_ms) override;
+
+ // Smallest latency NetEq will maintain.
+ int LeastRequiredDelayMs() const override;
+
+ // Get playout timestamp.
+ int PlayoutTimestamp(uint32_t* timestamp) override;
+
+ // Get 10 milliseconds of raw audio data to play out, and
+ // automatic resample to the requested frequency if > 0.
+ int PlayoutData10Ms(int desired_freq_hz, AudioFrame* audio_frame) override;
+
+ /////////////////////////////////////////
+ // Statistics
+ //
+
+ int GetNetworkStatistics(NetworkStatistics* statistics) override;
+
+ int SetOpusApplication(OpusApplicationMode application) override;
+
+ // If current send codec is Opus, informs it about the maximum playback rate
+ // the receiver will render.
+ int SetOpusMaxPlaybackRate(int frequency_hz) override;
+
+ int EnableOpusDtx() override;
+
+ int DisableOpusDtx() override;
+
+ int UnregisterReceiveCodec(uint8_t payload_type) override;
+
+ int EnableNack(size_t max_nack_list_size) override;
+
+ void DisableNack() override;
+
+ std::vector<uint16_t> GetNackList(int64_t round_trip_time_ms) const override;
+
+ void GetDecodingCallStatistics(AudioDecodingCallStats* stats) const override;
+
+ private:
+ struct InputData {
+ uint32_t input_timestamp;
+ const int16_t* audio;
+ size_t length_per_channel;
+ size_t audio_channel;
+ // If a re-mix is required (up or down), this buffer will store a re-mixed
+ // version of the input.
+ int16_t buffer[WEBRTC_10MS_PCM_AUDIO];
+ };
+
+ // This member class writes values to the named UMA histogram, but only if
+ // the value has changed since the last time (and always for the first call).
+ class ChangeLogger {
+ public:
+ explicit ChangeLogger(const std::string& histogram_name)
+ : histogram_name_(histogram_name) {}
+ // Logs the new value if it is different from the last logged value, or if
+ // this is the first call.
+ void MaybeLog(int value);
+
+ private:
+ int last_value_ = 0;
+ int first_time_ = true;
+ const std::string histogram_name_;
+ };
+
+ int Add10MsDataInternal(const AudioFrame& audio_frame, InputData* input_data)
+ EXCLUSIVE_LOCKS_REQUIRED(acm_crit_sect_);
+ int Encode(const InputData& input_data)
+ EXCLUSIVE_LOCKS_REQUIRED(acm_crit_sect_);
+
+ int InitializeReceiverSafe() EXCLUSIVE_LOCKS_REQUIRED(acm_crit_sect_);
+
+ bool HaveValidEncoder(const char* caller_name) const
+ EXCLUSIVE_LOCKS_REQUIRED(acm_crit_sect_);
+
+ // Preprocessing of input audio, including resampling and down-mixing if
+ // required, before pushing audio into encoder's buffer.
+ //
+ // in_frame: input audio-frame
+ // ptr_out: pointer to output audio_frame. If no preprocessing is required
+ // |ptr_out| will be pointing to |in_frame|, otherwise pointing to
+ // |preprocess_frame_|.
+ //
+ // Return value:
+ // -1: if encountering an error.
+ // 0: otherwise.
+ int PreprocessToAddData(const AudioFrame& in_frame,
+ const AudioFrame** ptr_out)
+ EXCLUSIVE_LOCKS_REQUIRED(acm_crit_sect_);
+
+ // Change required states after starting to receive the codec corresponding
+ // to |index|.
+ int UpdateUponReceivingCodec(int index);
+
+ const rtc::scoped_ptr<CriticalSectionWrapper> acm_crit_sect_;
+ rtc::Buffer encode_buffer_ GUARDED_BY(acm_crit_sect_);
+ int id_; // TODO(henrik.lundin) Make const.
+ uint32_t expected_codec_ts_ GUARDED_BY(acm_crit_sect_);
+ uint32_t expected_in_ts_ GUARDED_BY(acm_crit_sect_);
+ ACMResampler resampler_ GUARDED_BY(acm_crit_sect_);
+ AcmReceiver receiver_; // AcmReceiver has it's own internal lock.
+ ChangeLogger bitrate_logger_ GUARDED_BY(acm_crit_sect_);
+ CodecManager codec_manager_ GUARDED_BY(acm_crit_sect_);
+ RentACodec rent_a_codec_ GUARDED_BY(acm_crit_sect_);
+
+ // This is to keep track of CN instances where we can send DTMFs.
+ uint8_t previous_pltype_ GUARDED_BY(acm_crit_sect_);
+
+ // Used when payloads are pushed into ACM without any RTP info
+ // One example is when pre-encoded bit-stream is pushed from
+ // a file.
+ // IMPORTANT: this variable is only used in IncomingPayload(), therefore,
+ // no lock acquired when interacting with this variable. If it is going to
+ // be used in other methods, locks need to be taken.
+ rtc::scoped_ptr<WebRtcRTPHeader> aux_rtp_header_;
+
+ bool receiver_initialized_ GUARDED_BY(acm_crit_sect_);
+
+ AudioFrame preprocess_frame_ GUARDED_BY(acm_crit_sect_);
+ bool first_10ms_data_ GUARDED_BY(acm_crit_sect_);
+
+ bool first_frame_ GUARDED_BY(acm_crit_sect_);
+ uint32_t last_timestamp_ GUARDED_BY(acm_crit_sect_);
+ uint32_t last_rtp_timestamp_ GUARDED_BY(acm_crit_sect_);
+
+ const rtc::scoped_ptr<CriticalSectionWrapper> callback_crit_sect_;
+ AudioPacketizationCallback* packetization_callback_
+ GUARDED_BY(callback_crit_sect_);
+ ACMVADCallback* vad_callback_ GUARDED_BY(callback_crit_sect_);
+};
+
+} // namespace acm2
+} // namespace webrtc
+
+#endif // WEBRTC_MODULES_AUDIO_CODING_ACM2_AUDIO_CODING_MODULE_IMPL_H_
diff --git a/webrtc/modules/audio_coding/acm2/audio_coding_module_unittest_oldapi.cc b/webrtc/modules/audio_coding/acm2/audio_coding_module_unittest_oldapi.cc
new file mode 100644
index 0000000000..6f82a96ee5
--- /dev/null
+++ b/webrtc/modules/audio_coding/acm2/audio_coding_module_unittest_oldapi.cc
@@ -0,0 +1,1789 @@
+/*
+ * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <stdio.h>
+#include <string.h>
+#include <vector>
+
+#include "testing/gtest/include/gtest/gtest.h"
+#include "webrtc/base/md5digest.h"
+#include "webrtc/base/platform_thread.h"
+#include "webrtc/base/scoped_ptr.h"
+#include "webrtc/base/thread_annotations.h"
+#include "webrtc/modules/audio_coding/codecs/audio_encoder.h"
+#include "webrtc/modules/audio_coding/codecs/g711/audio_decoder_pcm.h"
+#include "webrtc/modules/audio_coding/codecs/g711/audio_encoder_pcm.h"
+#include "webrtc/modules/audio_coding/codecs/isac/main/include/audio_encoder_isac.h"
+#include "webrtc/modules/audio_coding/codecs/mock/mock_audio_encoder.h"
+#include "webrtc/modules/audio_coding/acm2/acm_receive_test_oldapi.h"
+#include "webrtc/modules/audio_coding/acm2/acm_send_test_oldapi.h"
+#include "webrtc/modules/audio_coding/include/audio_coding_module.h"
+#include "webrtc/modules/audio_coding/include/audio_coding_module_typedefs.h"
+#include "webrtc/modules/audio_coding/neteq/audio_decoder_impl.h"
+#include "webrtc/modules/audio_coding/neteq/mock/mock_audio_decoder.h"
+#include "webrtc/modules/audio_coding/neteq/tools/audio_checksum.h"
+#include "webrtc/modules/audio_coding/neteq/tools/audio_loop.h"
+#include "webrtc/modules/audio_coding/neteq/tools/constant_pcm_packet_source.h"
+#include "webrtc/modules/audio_coding/neteq/tools/input_audio_file.h"
+#include "webrtc/modules/audio_coding/neteq/tools/output_audio_file.h"
+#include "webrtc/modules/audio_coding/neteq/tools/packet.h"
+#include "webrtc/modules/audio_coding/neteq/tools/rtp_file_source.h"
+#include "webrtc/modules/include/module_common_types.h"
+#include "webrtc/system_wrappers/include/clock.h"
+#include "webrtc/system_wrappers/include/critical_section_wrapper.h"
+#include "webrtc/system_wrappers/include/event_wrapper.h"
+#include "webrtc/system_wrappers/include/sleep.h"
+#include "webrtc/test/testsupport/fileutils.h"
+
+using ::testing::AtLeast;
+using ::testing::Invoke;
+using ::testing::_;
+
+namespace webrtc {
+
+namespace {
+const int kSampleRateHz = 16000;
+const int kNumSamples10ms = kSampleRateHz / 100;
+const int kFrameSizeMs = 10; // Multiple of 10.
+const int kFrameSizeSamples = kFrameSizeMs / 10 * kNumSamples10ms;
+const int kPayloadSizeBytes = kFrameSizeSamples * sizeof(int16_t);
+const uint8_t kPayloadType = 111;
+} // namespace
+
+class RtpUtility {
+ public:
+ RtpUtility(int samples_per_packet, uint8_t payload_type)
+ : samples_per_packet_(samples_per_packet), payload_type_(payload_type) {}
+
+ virtual ~RtpUtility() {}
+
+ void Populate(WebRtcRTPHeader* rtp_header) {
+ rtp_header->header.sequenceNumber = 0xABCD;
+ rtp_header->header.timestamp = 0xABCDEF01;
+ rtp_header->header.payloadType = payload_type_;
+ rtp_header->header.markerBit = false;
+ rtp_header->header.ssrc = 0x1234;
+ rtp_header->header.numCSRCs = 0;
+ rtp_header->frameType = kAudioFrameSpeech;
+
+ rtp_header->header.payload_type_frequency = kSampleRateHz;
+ rtp_header->type.Audio.channel = 1;
+ rtp_header->type.Audio.isCNG = false;
+ }
+
+ void Forward(WebRtcRTPHeader* rtp_header) {
+ ++rtp_header->header.sequenceNumber;
+ rtp_header->header.timestamp += samples_per_packet_;
+ }
+
+ private:
+ int samples_per_packet_;
+ uint8_t payload_type_;
+};
+
+class PacketizationCallbackStubOldApi : public AudioPacketizationCallback {
+ public:
+ PacketizationCallbackStubOldApi()
+ : num_calls_(0),
+ last_frame_type_(kEmptyFrame),
+ last_payload_type_(-1),
+ last_timestamp_(0),
+ crit_sect_(CriticalSectionWrapper::CreateCriticalSection()) {}
+
+ int32_t SendData(FrameType frame_type,
+ uint8_t payload_type,
+ uint32_t timestamp,
+ const uint8_t* payload_data,
+ size_t payload_len_bytes,
+ const RTPFragmentationHeader* fragmentation) override {
+ CriticalSectionScoped lock(crit_sect_.get());
+ ++num_calls_;
+ last_frame_type_ = frame_type;
+ last_payload_type_ = payload_type;
+ last_timestamp_ = timestamp;
+ last_payload_vec_.assign(payload_data, payload_data + payload_len_bytes);
+ return 0;
+ }
+
+ int num_calls() const {
+ CriticalSectionScoped lock(crit_sect_.get());
+ return num_calls_;
+ }
+
+ int last_payload_len_bytes() const {
+ CriticalSectionScoped lock(crit_sect_.get());
+ return last_payload_vec_.size();
+ }
+
+ FrameType last_frame_type() const {
+ CriticalSectionScoped lock(crit_sect_.get());
+ return last_frame_type_;
+ }
+
+ int last_payload_type() const {
+ CriticalSectionScoped lock(crit_sect_.get());
+ return last_payload_type_;
+ }
+
+ uint32_t last_timestamp() const {
+ CriticalSectionScoped lock(crit_sect_.get());
+ return last_timestamp_;
+ }
+
+ void SwapBuffers(std::vector<uint8_t>* payload) {
+ CriticalSectionScoped lock(crit_sect_.get());
+ last_payload_vec_.swap(*payload);
+ }
+
+ private:
+ int num_calls_ GUARDED_BY(crit_sect_);
+ FrameType last_frame_type_ GUARDED_BY(crit_sect_);
+ int last_payload_type_ GUARDED_BY(crit_sect_);
+ uint32_t last_timestamp_ GUARDED_BY(crit_sect_);
+ std::vector<uint8_t> last_payload_vec_ GUARDED_BY(crit_sect_);
+ const rtc::scoped_ptr<CriticalSectionWrapper> crit_sect_;
+};
+
+class AudioCodingModuleTestOldApi : public ::testing::Test {
+ protected:
+ AudioCodingModuleTestOldApi()
+ : id_(1),
+ rtp_utility_(new RtpUtility(kFrameSizeSamples, kPayloadType)),
+ clock_(Clock::GetRealTimeClock()) {}
+
+ ~AudioCodingModuleTestOldApi() {}
+
+ void TearDown() {}
+
+ void SetUp() {
+ acm_.reset(AudioCodingModule::Create(id_, clock_));
+
+ rtp_utility_->Populate(&rtp_header_);
+
+ input_frame_.sample_rate_hz_ = kSampleRateHz;
+ input_frame_.num_channels_ = 1;
+ input_frame_.samples_per_channel_ = kSampleRateHz * 10 / 1000; // 10 ms.
+ static_assert(kSampleRateHz * 10 / 1000 <= AudioFrame::kMaxDataSizeSamples,
+ "audio frame too small");
+ memset(input_frame_.data_,
+ 0,
+ input_frame_.samples_per_channel_ * sizeof(input_frame_.data_[0]));
+
+ ASSERT_EQ(0, acm_->RegisterTransportCallback(&packet_cb_));
+
+ SetUpL16Codec();
+ }
+
+ // Set up L16 codec.
+ virtual void SetUpL16Codec() {
+ ASSERT_EQ(0, AudioCodingModule::Codec("L16", &codec_, kSampleRateHz, 1));
+ codec_.pltype = kPayloadType;
+ }
+
+ virtual void RegisterCodec() {
+ ASSERT_EQ(0, acm_->RegisterReceiveCodec(codec_));
+ ASSERT_EQ(0, acm_->RegisterSendCodec(codec_));
+ }
+
+ virtual void InsertPacketAndPullAudio() {
+ InsertPacket();
+ PullAudio();
+ }
+
+ virtual void InsertPacket() {
+ const uint8_t kPayload[kPayloadSizeBytes] = {0};
+ ASSERT_EQ(0,
+ acm_->IncomingPacket(kPayload, kPayloadSizeBytes, rtp_header_));
+ rtp_utility_->Forward(&rtp_header_);
+ }
+
+ virtual void PullAudio() {
+ AudioFrame audio_frame;
+ ASSERT_EQ(0, acm_->PlayoutData10Ms(-1, &audio_frame));
+ }
+
+ virtual void InsertAudio() {
+ ASSERT_GE(acm_->Add10MsData(input_frame_), 0);
+ input_frame_.timestamp_ += kNumSamples10ms;
+ }
+
+ virtual void VerifyEncoding() {
+ int last_length = packet_cb_.last_payload_len_bytes();
+ EXPECT_TRUE(last_length == 2 * codec_.pacsize || last_length == 0)
+ << "Last encoded packet was " << last_length << " bytes.";
+ }
+
+ virtual void InsertAudioAndVerifyEncoding() {
+ InsertAudio();
+ VerifyEncoding();
+ }
+
+ const int id_;
+ rtc::scoped_ptr<RtpUtility> rtp_utility_;
+ rtc::scoped_ptr<AudioCodingModule> acm_;
+ PacketizationCallbackStubOldApi packet_cb_;
+ WebRtcRTPHeader rtp_header_;
+ AudioFrame input_frame_;
+ CodecInst codec_;
+ Clock* clock_;
+};
+
+// Check if the statistics are initialized correctly. Before any call to ACM
+// all fields have to be zero.
+#if defined(WEBRTC_ANDROID)
+#define MAYBE_InitializedToZero DISABLED_InitializedToZero
+#else
+#define MAYBE_InitializedToZero InitializedToZero
+#endif
+TEST_F(AudioCodingModuleTestOldApi, MAYBE_InitializedToZero) {
+ RegisterCodec();
+ AudioDecodingCallStats stats;
+ acm_->GetDecodingCallStatistics(&stats);
+ EXPECT_EQ(0, stats.calls_to_neteq);
+ EXPECT_EQ(0, stats.calls_to_silence_generator);
+ EXPECT_EQ(0, stats.decoded_normal);
+ EXPECT_EQ(0, stats.decoded_cng);
+ EXPECT_EQ(0, stats.decoded_plc);
+ EXPECT_EQ(0, stats.decoded_plc_cng);
+}
+
+// Insert some packets and pull audio. Check statistics are valid. Then,
+// simulate packet loss and check if PLC and PLC-to-CNG statistics are
+// correctly updated.
+#if defined(WEBRTC_ANDROID)
+#define MAYBE_NetEqCalls DISABLED_NetEqCalls
+#else
+#define MAYBE_NetEqCalls NetEqCalls
+#endif
+TEST_F(AudioCodingModuleTestOldApi, MAYBE_NetEqCalls) {
+ RegisterCodec();
+ AudioDecodingCallStats stats;
+ const int kNumNormalCalls = 10;
+
+ for (int num_calls = 0; num_calls < kNumNormalCalls; ++num_calls) {
+ InsertPacketAndPullAudio();
+ }
+ acm_->GetDecodingCallStatistics(&stats);
+ EXPECT_EQ(kNumNormalCalls, stats.calls_to_neteq);
+ EXPECT_EQ(0, stats.calls_to_silence_generator);
+ EXPECT_EQ(kNumNormalCalls, stats.decoded_normal);
+ EXPECT_EQ(0, stats.decoded_cng);
+ EXPECT_EQ(0, stats.decoded_plc);
+ EXPECT_EQ(0, stats.decoded_plc_cng);
+
+ const int kNumPlc = 3;
+ const int kNumPlcCng = 5;
+
+ // Simulate packet-loss. NetEq first performs PLC then PLC fades to CNG.
+ for (int n = 0; n < kNumPlc + kNumPlcCng; ++n) {
+ PullAudio();
+ }
+ acm_->GetDecodingCallStatistics(&stats);
+ EXPECT_EQ(kNumNormalCalls + kNumPlc + kNumPlcCng, stats.calls_to_neteq);
+ EXPECT_EQ(0, stats.calls_to_silence_generator);
+ EXPECT_EQ(kNumNormalCalls, stats.decoded_normal);
+ EXPECT_EQ(0, stats.decoded_cng);
+ EXPECT_EQ(kNumPlc, stats.decoded_plc);
+ EXPECT_EQ(kNumPlcCng, stats.decoded_plc_cng);
+}
+
+TEST_F(AudioCodingModuleTestOldApi, VerifyOutputFrame) {
+ AudioFrame audio_frame;
+ const int kSampleRateHz = 32000;
+ EXPECT_EQ(0, acm_->PlayoutData10Ms(kSampleRateHz, &audio_frame));
+ EXPECT_EQ(id_, audio_frame.id_);
+ EXPECT_EQ(0u, audio_frame.timestamp_);
+ EXPECT_GT(audio_frame.num_channels_, 0u);
+ EXPECT_EQ(static_cast<size_t>(kSampleRateHz / 100),
+ audio_frame.samples_per_channel_);
+ EXPECT_EQ(kSampleRateHz, audio_frame.sample_rate_hz_);
+}
+
+TEST_F(AudioCodingModuleTestOldApi, FailOnZeroDesiredFrequency) {
+ AudioFrame audio_frame;
+ EXPECT_EQ(-1, acm_->PlayoutData10Ms(0, &audio_frame));
+}
+
+// Checks that the transport callback is invoked once for each speech packet.
+// Also checks that the frame type is kAudioFrameSpeech.
+TEST_F(AudioCodingModuleTestOldApi, TransportCallbackIsInvokedForEachPacket) {
+ const int k10MsBlocksPerPacket = 3;
+ codec_.pacsize = k10MsBlocksPerPacket * kSampleRateHz / 100;
+ RegisterCodec();
+ const int kLoops = 10;
+ for (int i = 0; i < kLoops; ++i) {
+ EXPECT_EQ(i / k10MsBlocksPerPacket, packet_cb_.num_calls());
+ if (packet_cb_.num_calls() > 0)
+ EXPECT_EQ(kAudioFrameSpeech, packet_cb_.last_frame_type());
+ InsertAudioAndVerifyEncoding();
+ }
+ EXPECT_EQ(kLoops / k10MsBlocksPerPacket, packet_cb_.num_calls());
+ EXPECT_EQ(kAudioFrameSpeech, packet_cb_.last_frame_type());
+}
+
+#if defined(WEBRTC_CODEC_ISAC) || defined(WEBRTC_CODEC_ISACFX)
+// Verifies that the RTP timestamp series is not reset when the codec is
+// changed.
+TEST_F(AudioCodingModuleTestOldApi, TimestampSeriesContinuesWhenCodecChanges) {
+ RegisterCodec(); // This registers the default codec.
+ uint32_t expected_ts = input_frame_.timestamp_;
+ int blocks_per_packet = codec_.pacsize / (kSampleRateHz / 100);
+ // Encode 5 packets of the first codec type.
+ const int kNumPackets1 = 5;
+ for (int j = 0; j < kNumPackets1; ++j) {
+ for (int i = 0; i < blocks_per_packet; ++i) {
+ EXPECT_EQ(j, packet_cb_.num_calls());
+ InsertAudio();
+ }
+ EXPECT_EQ(j + 1, packet_cb_.num_calls());
+ EXPECT_EQ(expected_ts, packet_cb_.last_timestamp());
+ expected_ts += codec_.pacsize;
+ }
+
+ // Change codec.
+ ASSERT_EQ(0, AudioCodingModule::Codec("ISAC", &codec_, kSampleRateHz, 1));
+ RegisterCodec();
+ blocks_per_packet = codec_.pacsize / (kSampleRateHz / 100);
+ // Encode another 5 packets.
+ const int kNumPackets2 = 5;
+ for (int j = 0; j < kNumPackets2; ++j) {
+ for (int i = 0; i < blocks_per_packet; ++i) {
+ EXPECT_EQ(kNumPackets1 + j, packet_cb_.num_calls());
+ InsertAudio();
+ }
+ EXPECT_EQ(kNumPackets1 + j + 1, packet_cb_.num_calls());
+ EXPECT_EQ(expected_ts, packet_cb_.last_timestamp());
+ expected_ts += codec_.pacsize;
+ }
+}
+#endif
+
+// Introduce this class to set different expectations on the number of encoded
+// bytes. This class expects all encoded packets to be 9 bytes (matching one
+// CNG SID frame) or 0 bytes. This test depends on |input_frame_| containing
+// (near-)zero values. It also introduces a way to register comfort noise with
+// a custom payload type.
+class AudioCodingModuleTestWithComfortNoiseOldApi
+ : public AudioCodingModuleTestOldApi {
+ protected:
+ void RegisterCngCodec(int rtp_payload_type) {
+ CodecInst codec;
+ AudioCodingModule::Codec("CN", &codec, kSampleRateHz, 1);
+ codec.pltype = rtp_payload_type;
+ ASSERT_EQ(0, acm_->RegisterReceiveCodec(codec));
+ ASSERT_EQ(0, acm_->RegisterSendCodec(codec));
+ }
+
+ void VerifyEncoding() override {
+ int last_length = packet_cb_.last_payload_len_bytes();
+ EXPECT_TRUE(last_length == 9 || last_length == 0)
+ << "Last encoded packet was " << last_length << " bytes.";
+ }
+
+ void DoTest(int blocks_per_packet, int cng_pt) {
+ const int kLoops = 40;
+ // This array defines the expected frame types, and when they should arrive.
+ // We expect a frame to arrive each time the speech encoder would have
+ // produced a packet, and once every 100 ms the frame should be non-empty,
+ // that is contain comfort noise.
+ const struct {
+ int ix;
+ FrameType type;
+ } expectation[] = {{2, kAudioFrameCN},
+ {5, kEmptyFrame},
+ {8, kEmptyFrame},
+ {11, kAudioFrameCN},
+ {14, kEmptyFrame},
+ {17, kEmptyFrame},
+ {20, kAudioFrameCN},
+ {23, kEmptyFrame},
+ {26, kEmptyFrame},
+ {29, kEmptyFrame},
+ {32, kAudioFrameCN},
+ {35, kEmptyFrame},
+ {38, kEmptyFrame}};
+ for (int i = 0; i < kLoops; ++i) {
+ int num_calls_before = packet_cb_.num_calls();
+ EXPECT_EQ(i / blocks_per_packet, num_calls_before);
+ InsertAudioAndVerifyEncoding();
+ int num_calls = packet_cb_.num_calls();
+ if (num_calls == num_calls_before + 1) {
+ EXPECT_EQ(expectation[num_calls - 1].ix, i);
+ EXPECT_EQ(expectation[num_calls - 1].type, packet_cb_.last_frame_type())
+ << "Wrong frame type for lap " << i;
+ EXPECT_EQ(cng_pt, packet_cb_.last_payload_type());
+ } else {
+ EXPECT_EQ(num_calls, num_calls_before);
+ }
+ }
+ }
+};
+
+// Checks that the transport callback is invoked once per frame period of the
+// underlying speech encoder, even when comfort noise is produced.
+// Also checks that the frame type is kAudioFrameCN or kEmptyFrame.
+// This test and the next check the same thing, but differ in the order of
+// speech codec and CNG registration.
+TEST_F(AudioCodingModuleTestWithComfortNoiseOldApi,
+ TransportCallbackTestForComfortNoiseRegisterCngLast) {
+ const int k10MsBlocksPerPacket = 3;
+ codec_.pacsize = k10MsBlocksPerPacket * kSampleRateHz / 100;
+ RegisterCodec();
+ const int kCngPayloadType = 105;
+ RegisterCngCodec(kCngPayloadType);
+ ASSERT_EQ(0, acm_->SetVAD(true, true));
+ DoTest(k10MsBlocksPerPacket, kCngPayloadType);
+}
+
+TEST_F(AudioCodingModuleTestWithComfortNoiseOldApi,
+ TransportCallbackTestForComfortNoiseRegisterCngFirst) {
+ const int k10MsBlocksPerPacket = 3;
+ codec_.pacsize = k10MsBlocksPerPacket * kSampleRateHz / 100;
+ const int kCngPayloadType = 105;
+ RegisterCngCodec(kCngPayloadType);
+ RegisterCodec();
+ ASSERT_EQ(0, acm_->SetVAD(true, true));
+ DoTest(k10MsBlocksPerPacket, kCngPayloadType);
+}
+
+// A multi-threaded test for ACM. This base class is using the PCM16b 16 kHz
+// codec, while the derive class AcmIsacMtTest is using iSAC.
+class AudioCodingModuleMtTestOldApi : public AudioCodingModuleTestOldApi {
+ protected:
+ static const int kNumPackets = 500;
+ static const int kNumPullCalls = 500;
+
+ AudioCodingModuleMtTestOldApi()
+ : AudioCodingModuleTestOldApi(),
+ send_thread_(CbSendThread, this, "send"),
+ insert_packet_thread_(CbInsertPacketThread, this, "insert_packet"),
+ pull_audio_thread_(CbPullAudioThread, this, "pull_audio"),
+ test_complete_(EventWrapper::Create()),
+ send_count_(0),
+ insert_packet_count_(0),
+ pull_audio_count_(0),
+ crit_sect_(CriticalSectionWrapper::CreateCriticalSection()),
+ next_insert_packet_time_ms_(0),
+ fake_clock_(new SimulatedClock(0)) {
+ clock_ = fake_clock_.get();
+ }
+
+ void SetUp() {
+ AudioCodingModuleTestOldApi::SetUp();
+ RegisterCodec(); // Must be called before the threads start below.
+ StartThreads();
+ }
+
+ void StartThreads() {
+ send_thread_.Start();
+ send_thread_.SetPriority(rtc::kRealtimePriority);
+ insert_packet_thread_.Start();
+ insert_packet_thread_.SetPriority(rtc::kRealtimePriority);
+ pull_audio_thread_.Start();
+ pull_audio_thread_.SetPriority(rtc::kRealtimePriority);
+ }
+
+ void TearDown() {
+ AudioCodingModuleTestOldApi::TearDown();
+ pull_audio_thread_.Stop();
+ send_thread_.Stop();
+ insert_packet_thread_.Stop();
+ }
+
+ EventTypeWrapper RunTest() {
+ return test_complete_->Wait(10 * 60 * 1000); // 10 minutes' timeout.
+ }
+
+ virtual bool TestDone() {
+ if (packet_cb_.num_calls() > kNumPackets) {
+ CriticalSectionScoped lock(crit_sect_.get());
+ if (pull_audio_count_ > kNumPullCalls) {
+ // Both conditions for completion are met. End the test.
+ return true;
+ }
+ }
+ return false;
+ }
+
+ static bool CbSendThread(void* context) {
+ return reinterpret_cast<AudioCodingModuleMtTestOldApi*>(context)
+ ->CbSendImpl();
+ }
+
+ // The send thread doesn't have to care about the current simulated time,
+ // since only the AcmReceiver is using the clock.
+ bool CbSendImpl() {
+ SleepMs(1);
+ if (HasFatalFailure()) {
+ // End the test early if a fatal failure (ASSERT_*) has occurred.
+ test_complete_->Set();
+ }
+ ++send_count_;
+ InsertAudioAndVerifyEncoding();
+ if (TestDone()) {
+ test_complete_->Set();
+ }
+ return true;
+ }
+
+ static bool CbInsertPacketThread(void* context) {
+ return reinterpret_cast<AudioCodingModuleMtTestOldApi*>(context)
+ ->CbInsertPacketImpl();
+ }
+
+ bool CbInsertPacketImpl() {
+ SleepMs(1);
+ {
+ CriticalSectionScoped lock(crit_sect_.get());
+ if (clock_->TimeInMilliseconds() < next_insert_packet_time_ms_) {
+ return true;
+ }
+ next_insert_packet_time_ms_ += 10;
+ }
+ // Now we're not holding the crit sect when calling ACM.
+ ++insert_packet_count_;
+ InsertPacket();
+ return true;
+ }
+
+ static bool CbPullAudioThread(void* context) {
+ return reinterpret_cast<AudioCodingModuleMtTestOldApi*>(context)
+ ->CbPullAudioImpl();
+ }
+
+ bool CbPullAudioImpl() {
+ SleepMs(1);
+ {
+ CriticalSectionScoped lock(crit_sect_.get());
+ // Don't let the insert thread fall behind.
+ if (next_insert_packet_time_ms_ < clock_->TimeInMilliseconds()) {
+ return true;
+ }
+ ++pull_audio_count_;
+ }
+ // Now we're not holding the crit sect when calling ACM.
+ PullAudio();
+ fake_clock_->AdvanceTimeMilliseconds(10);
+ return true;
+ }
+
+ rtc::PlatformThread send_thread_;
+ rtc::PlatformThread insert_packet_thread_;
+ rtc::PlatformThread pull_audio_thread_;
+ const rtc::scoped_ptr<EventWrapper> test_complete_;
+ int send_count_;
+ int insert_packet_count_;
+ int pull_audio_count_ GUARDED_BY(crit_sect_);
+ const rtc::scoped_ptr<CriticalSectionWrapper> crit_sect_;
+ int64_t next_insert_packet_time_ms_ GUARDED_BY(crit_sect_);
+ rtc::scoped_ptr<SimulatedClock> fake_clock_;
+};
+
+#if defined(WEBRTC_IOS)
+#define MAYBE_DoTest DISABLED_DoTest
+#else
+#define MAYBE_DoTest DoTest
+#endif
+TEST_F(AudioCodingModuleMtTestOldApi, MAYBE_DoTest) {
+ EXPECT_EQ(kEventSignaled, RunTest());
+}
+
+// This is a multi-threaded ACM test using iSAC. The test encodes audio
+// from a PCM file. The most recent encoded frame is used as input to the
+// receiving part. Depending on timing, it may happen that the same RTP packet
+// is inserted into the receiver multiple times, but this is a valid use-case,
+// and simplifies the test code a lot.
+class AcmIsacMtTestOldApi : public AudioCodingModuleMtTestOldApi {
+ protected:
+ static const int kNumPackets = 500;
+ static const int kNumPullCalls = 500;
+
+ AcmIsacMtTestOldApi()
+ : AudioCodingModuleMtTestOldApi(), last_packet_number_(0) {}
+
+ ~AcmIsacMtTestOldApi() {}
+
+ void SetUp() {
+ AudioCodingModuleTestOldApi::SetUp();
+ RegisterCodec(); // Must be called before the threads start below.
+
+ // Set up input audio source to read from specified file, loop after 5
+ // seconds, and deliver blocks of 10 ms.
+ const std::string input_file_name =
+ webrtc::test::ResourcePath("audio_coding/speech_mono_16kHz", "pcm");
+ audio_loop_.Init(input_file_name, 5 * kSampleRateHz, kNumSamples10ms);
+
+ // Generate one packet to have something to insert.
+ int loop_counter = 0;
+ while (packet_cb_.last_payload_len_bytes() == 0) {
+ InsertAudio();
+ ASSERT_LT(loop_counter++, 10);
+ }
+ // Set |last_packet_number_| to one less that |num_calls| so that the packet
+ // will be fetched in the next InsertPacket() call.
+ last_packet_number_ = packet_cb_.num_calls() - 1;
+
+ StartThreads();
+ }
+
+ void RegisterCodec() override {
+ static_assert(kSampleRateHz == 16000, "test designed for iSAC 16 kHz");
+ AudioCodingModule::Codec("ISAC", &codec_, kSampleRateHz, 1);
+ codec_.pltype = kPayloadType;
+
+ // Register iSAC codec in ACM, effectively unregistering the PCM16B codec
+ // registered in AudioCodingModuleTestOldApi::SetUp();
+ ASSERT_EQ(0, acm_->RegisterReceiveCodec(codec_));
+ ASSERT_EQ(0, acm_->RegisterSendCodec(codec_));
+ }
+
+ void InsertPacket() {
+ int num_calls = packet_cb_.num_calls(); // Store locally for thread safety.
+ if (num_calls > last_packet_number_) {
+ // Get the new payload out from the callback handler.
+ // Note that since we swap buffers here instead of directly inserting
+ // a pointer to the data in |packet_cb_|, we avoid locking the callback
+ // for the duration of the IncomingPacket() call.
+ packet_cb_.SwapBuffers(&last_payload_vec_);
+ ASSERT_GT(last_payload_vec_.size(), 0u);
+ rtp_utility_->Forward(&rtp_header_);
+ last_packet_number_ = num_calls;
+ }
+ ASSERT_GT(last_payload_vec_.size(), 0u);
+ ASSERT_EQ(
+ 0,
+ acm_->IncomingPacket(
+ &last_payload_vec_[0], last_payload_vec_.size(), rtp_header_));
+ }
+
+ void InsertAudio() {
+ // TODO(kwiberg): Use std::copy here. Might be complications because AFAICS
+ // this call confuses the number of samples with the number of bytes, and
+ // ends up copying only half of what it should.
+ memcpy(input_frame_.data_, audio_loop_.GetNextBlock().data(),
+ kNumSamples10ms);
+ AudioCodingModuleTestOldApi::InsertAudio();
+ }
+
+ // Override the verification function with no-op, since iSAC produces variable
+ // payload sizes.
+ void VerifyEncoding() override {}
+
+ // This method is the same as AudioCodingModuleMtTestOldApi::TestDone(), but
+ // here it is using the constants defined in this class (i.e., shorter test
+ // run).
+ virtual bool TestDone() {
+ if (packet_cb_.num_calls() > kNumPackets) {
+ CriticalSectionScoped lock(crit_sect_.get());
+ if (pull_audio_count_ > kNumPullCalls) {
+ // Both conditions for completion are met. End the test.
+ return true;
+ }
+ }
+ return false;
+ }
+
+ int last_packet_number_;
+ std::vector<uint8_t> last_payload_vec_;
+ test::AudioLoop audio_loop_;
+};
+
+#if defined(WEBRTC_IOS)
+#define MAYBE_DoTest DISABLED_DoTest
+#else
+#define MAYBE_DoTest DoTest
+#endif
+#if defined(WEBRTC_CODEC_ISAC) || defined(WEBRTC_CODEC_ISACFX)
+TEST_F(AcmIsacMtTestOldApi, MAYBE_DoTest) {
+ EXPECT_EQ(kEventSignaled, RunTest());
+}
+#endif
+
+class AcmReRegisterIsacMtTestOldApi : public AudioCodingModuleTestOldApi {
+ protected:
+ static const int kRegisterAfterNumPackets = 5;
+ static const int kNumPackets = 10;
+ static const int kPacketSizeMs = 30;
+ static const int kPacketSizeSamples = kPacketSizeMs * 16;
+
+ AcmReRegisterIsacMtTestOldApi()
+ : AudioCodingModuleTestOldApi(),
+ receive_thread_(CbReceiveThread, this, "receive"),
+ codec_registration_thread_(CbCodecRegistrationThread,
+ this,
+ "codec_registration"),
+ test_complete_(EventWrapper::Create()),
+ crit_sect_(CriticalSectionWrapper::CreateCriticalSection()),
+ codec_registered_(false),
+ receive_packet_count_(0),
+ next_insert_packet_time_ms_(0),
+ fake_clock_(new SimulatedClock(0)) {
+ AudioEncoderIsac::Config config;
+ config.payload_type = kPayloadType;
+ isac_encoder_.reset(new AudioEncoderIsac(config));
+ clock_ = fake_clock_.get();
+ }
+
+ void SetUp() {
+ AudioCodingModuleTestOldApi::SetUp();
+ // Set up input audio source to read from specified file, loop after 5
+ // seconds, and deliver blocks of 10 ms.
+ const std::string input_file_name =
+ webrtc::test::ResourcePath("audio_coding/speech_mono_16kHz", "pcm");
+ audio_loop_.Init(input_file_name, 5 * kSampleRateHz, kNumSamples10ms);
+ RegisterCodec(); // Must be called before the threads start below.
+ StartThreads();
+ }
+
+ void RegisterCodec() override {
+ static_assert(kSampleRateHz == 16000, "test designed for iSAC 16 kHz");
+ AudioCodingModule::Codec("ISAC", &codec_, kSampleRateHz, 1);
+ codec_.pltype = kPayloadType;
+
+ // Register iSAC codec in ACM, effectively unregistering the PCM16B codec
+ // registered in AudioCodingModuleTestOldApi::SetUp();
+ // Only register the decoder for now. The encoder is registered later.
+ ASSERT_EQ(0, acm_->RegisterReceiveCodec(codec_));
+ }
+
+ void StartThreads() {
+ receive_thread_.Start();
+ receive_thread_.SetPriority(rtc::kRealtimePriority);
+ codec_registration_thread_.Start();
+ codec_registration_thread_.SetPriority(rtc::kRealtimePriority);
+ }
+
+ void TearDown() {
+ AudioCodingModuleTestOldApi::TearDown();
+ receive_thread_.Stop();
+ codec_registration_thread_.Stop();
+ }
+
+ EventTypeWrapper RunTest() {
+ return test_complete_->Wait(10 * 60 * 1000); // 10 minutes' timeout.
+ }
+
+ static bool CbReceiveThread(void* context) {
+ return reinterpret_cast<AcmReRegisterIsacMtTestOldApi*>(context)
+ ->CbReceiveImpl();
+ }
+
+ bool CbReceiveImpl() {
+ SleepMs(1);
+ const size_t max_encoded_bytes = isac_encoder_->MaxEncodedBytes();
+ rtc::scoped_ptr<uint8_t[]> encoded(new uint8_t[max_encoded_bytes]);
+ AudioEncoder::EncodedInfo info;
+ {
+ CriticalSectionScoped lock(crit_sect_.get());
+ if (clock_->TimeInMilliseconds() < next_insert_packet_time_ms_) {
+ return true;
+ }
+ next_insert_packet_time_ms_ += kPacketSizeMs;
+ ++receive_packet_count_;
+
+ // Encode new frame.
+ uint32_t input_timestamp = rtp_header_.header.timestamp;
+ while (info.encoded_bytes == 0) {
+ info =
+ isac_encoder_->Encode(input_timestamp, audio_loop_.GetNextBlock(),
+ max_encoded_bytes, encoded.get());
+ input_timestamp += 160; // 10 ms at 16 kHz.
+ }
+ EXPECT_EQ(rtp_header_.header.timestamp + kPacketSizeSamples,
+ input_timestamp);
+ EXPECT_EQ(rtp_header_.header.timestamp, info.encoded_timestamp);
+ EXPECT_EQ(rtp_header_.header.payloadType, info.payload_type);
+ }
+ // Now we're not holding the crit sect when calling ACM.
+
+ // Insert into ACM.
+ EXPECT_EQ(0, acm_->IncomingPacket(encoded.get(), info.encoded_bytes,
+ rtp_header_));
+
+ // Pull audio.
+ for (int i = 0; i < rtc::CheckedDivExact(kPacketSizeMs, 10); ++i) {
+ AudioFrame audio_frame;
+ EXPECT_EQ(0, acm_->PlayoutData10Ms(-1 /* default output frequency */,
+ &audio_frame));
+ fake_clock_->AdvanceTimeMilliseconds(10);
+ }
+ rtp_utility_->Forward(&rtp_header_);
+ return true;
+ }
+
+ static bool CbCodecRegistrationThread(void* context) {
+ return reinterpret_cast<AcmReRegisterIsacMtTestOldApi*>(context)
+ ->CbCodecRegistrationImpl();
+ }
+
+ bool CbCodecRegistrationImpl() {
+ SleepMs(1);
+ if (HasFatalFailure()) {
+ // End the test early if a fatal failure (ASSERT_*) has occurred.
+ test_complete_->Set();
+ }
+ CriticalSectionScoped lock(crit_sect_.get());
+ if (!codec_registered_ &&
+ receive_packet_count_ > kRegisterAfterNumPackets) {
+ // Register the iSAC encoder.
+ EXPECT_EQ(0, acm_->RegisterSendCodec(codec_));
+ codec_registered_ = true;
+ }
+ if (codec_registered_ && receive_packet_count_ > kNumPackets) {
+ test_complete_->Set();
+ }
+ return true;
+ }
+
+ rtc::PlatformThread receive_thread_;
+ rtc::PlatformThread codec_registration_thread_;
+ const rtc::scoped_ptr<EventWrapper> test_complete_;
+ const rtc::scoped_ptr<CriticalSectionWrapper> crit_sect_;
+ bool codec_registered_ GUARDED_BY(crit_sect_);
+ int receive_packet_count_ GUARDED_BY(crit_sect_);
+ int64_t next_insert_packet_time_ms_ GUARDED_BY(crit_sect_);
+ rtc::scoped_ptr<AudioEncoderIsac> isac_encoder_;
+ rtc::scoped_ptr<SimulatedClock> fake_clock_;
+ test::AudioLoop audio_loop_;
+};
+
+#if defined(WEBRTC_IOS)
+#define MAYBE_DoTest DISABLED_DoTest
+#else
+#define MAYBE_DoTest DoTest
+#endif
+#if defined(WEBRTC_CODEC_ISAC) || defined(WEBRTC_CODEC_ISACFX)
+TEST_F(AcmReRegisterIsacMtTestOldApi, MAYBE_DoTest) {
+ EXPECT_EQ(kEventSignaled, RunTest());
+}
+#endif
+
+// Disabling all of these tests on iOS until file support has been added.
+// See https://code.google.com/p/webrtc/issues/detail?id=4752 for details.
+#if !defined(WEBRTC_IOS)
+
+class AcmReceiverBitExactnessOldApi : public ::testing::Test {
+ public:
+ static std::string PlatformChecksum(std::string others,
+ std::string win64,
+ std::string android_arm32,
+ std::string android_arm64) {
+#if defined(_WIN32) && defined(WEBRTC_ARCH_64_BITS)
+ return win64;
+#elif defined(WEBRTC_ANDROID) && defined(WEBRTC_ARCH_ARM)
+ return android_arm32;
+#elif defined(WEBRTC_ANDROID) && defined(WEBRTC_ARCH_ARM64)
+ return android_arm64;
+#else
+ return others;
+#endif
+ }
+
+ protected:
+ struct ExternalDecoder {
+ int rtp_payload_type;
+ AudioDecoder* external_decoder;
+ int sample_rate_hz;
+ int num_channels;
+ std::string name;
+ };
+
+ void Run(int output_freq_hz,
+ const std::string& checksum_ref,
+ const std::vector<ExternalDecoder>& external_decoders) {
+ const std::string input_file_name =
+ webrtc::test::ResourcePath("audio_coding/neteq_universal_new", "rtp");
+ rtc::scoped_ptr<test::RtpFileSource> packet_source(
+ test::RtpFileSource::Create(input_file_name));
+#ifdef WEBRTC_ANDROID
+ // Filter out iLBC and iSAC-swb since they are not supported on Android.
+ packet_source->FilterOutPayloadType(102); // iLBC.
+ packet_source->FilterOutPayloadType(104); // iSAC-swb.
+#endif
+
+ test::AudioChecksum checksum;
+ const std::string output_file_name =
+ webrtc::test::OutputPath() +
+ ::testing::UnitTest::GetInstance()
+ ->current_test_info()
+ ->test_case_name() +
+ "_" + ::testing::UnitTest::GetInstance()->current_test_info()->name() +
+ "_output.pcm";
+ test::OutputAudioFile output_file(output_file_name);
+ test::AudioSinkFork output(&checksum, &output_file);
+
+ test::AcmReceiveTestOldApi test(
+ packet_source.get(),
+ &output,
+ output_freq_hz,
+ test::AcmReceiveTestOldApi::kArbitraryChannels);
+ ASSERT_NO_FATAL_FAILURE(test.RegisterNetEqTestCodecs());
+ for (const auto& ed : external_decoders) {
+ ASSERT_EQ(0, test.RegisterExternalReceiveCodec(
+ ed.rtp_payload_type, ed.external_decoder,
+ ed.sample_rate_hz, ed.num_channels, ed.name));
+ }
+ test.Run();
+
+ std::string checksum_string = checksum.Finish();
+ EXPECT_EQ(checksum_ref, checksum_string);
+
+ // Delete the output file.
+ remove(output_file_name.c_str());
+ }
+};
+
+#if (defined(WEBRTC_CODEC_ISAC) || defined(WEBRTC_CODEC_ISACFX)) && \
+ defined(WEBRTC_CODEC_ILBC) && defined(WEBRTC_CODEC_G722)
+TEST_F(AcmReceiverBitExactnessOldApi, 8kHzOutput) {
+ Run(8000, PlatformChecksum("908002dc01fc4eb1d2be24eb1d3f354b",
+ "dcee98c623b147ebe1b40dd30efa896e",
+ "adc92e173f908f93b96ba5844209815a",
+ "ba16137d3a5a1e637252289c57522bfe"),
+ std::vector<ExternalDecoder>());
+}
+
+TEST_F(AcmReceiverBitExactnessOldApi, 16kHzOutput) {
+ Run(16000, PlatformChecksum("a909560b5ca49fa472b17b7b277195e9",
+ "f790e7a8cce4e2c8b7bb5e0e4c5dac0d",
+ "8cffa6abcb3e18e33b9d857666dff66a",
+ "66ee001e23534d4dcf5d0f81f916c93b"),
+ std::vector<ExternalDecoder>());
+}
+
+TEST_F(AcmReceiverBitExactnessOldApi, 32kHzOutput) {
+ Run(32000, PlatformChecksum("441aab4b347fb3db4e9244337aca8d8e",
+ "306e0d990ee6e92de3fbecc0123ece37",
+ "3e126fe894720c3f85edadcc91964ba5",
+ "9c6ff204b14152c48fe41d5ab757943b"),
+ std::vector<ExternalDecoder>());
+}
+
+TEST_F(AcmReceiverBitExactnessOldApi, 48kHzOutput) {
+ Run(48000, PlatformChecksum("4ee2730fa1daae755e8a8fd3abd779ec",
+ "aa7c232f63a67b2a72703593bdd172e0",
+ "0155665e93067c4e89256b944dd11999",
+ "fc4f0da8844cd808d822bbddf3b9c285"),
+ std::vector<ExternalDecoder>());
+}
+
+TEST_F(AcmReceiverBitExactnessOldApi, 48kHzOutputExternalDecoder) {
+ // Class intended to forward a call from a mock DecodeInternal to Decode on
+ // the real decoder's Decode. DecodeInternal for the real decoder isn't
+ // public.
+ class DecodeForwarder {
+ public:
+ DecodeForwarder(AudioDecoder* decoder) : decoder_(decoder) {}
+ int Decode(const uint8_t* encoded,
+ size_t encoded_len,
+ int sample_rate_hz,
+ int16_t* decoded,
+ AudioDecoder::SpeechType* speech_type) {
+ return decoder_->Decode(encoded, encoded_len, sample_rate_hz,
+ decoder_->PacketDuration(encoded, encoded_len) *
+ decoder_->Channels() * sizeof(int16_t),
+ decoded, speech_type);
+ }
+
+ private:
+ AudioDecoder* const decoder_;
+ };
+
+ AudioDecoderPcmU decoder(1);
+ DecodeForwarder decode_forwarder(&decoder);
+ MockAudioDecoder mock_decoder;
+ // Set expectations on the mock decoder and also delegate the calls to the
+ // real decoder.
+ EXPECT_CALL(mock_decoder, IncomingPacket(_, _, _, _, _))
+ .Times(AtLeast(1))
+ .WillRepeatedly(Invoke(&decoder, &AudioDecoderPcmU::IncomingPacket));
+ EXPECT_CALL(mock_decoder, Channels())
+ .Times(AtLeast(1))
+ .WillRepeatedly(Invoke(&decoder, &AudioDecoderPcmU::Channels));
+ EXPECT_CALL(mock_decoder, DecodeInternal(_, _, _, _, _))
+ .Times(AtLeast(1))
+ .WillRepeatedly(Invoke(&decode_forwarder, &DecodeForwarder::Decode));
+ EXPECT_CALL(mock_decoder, HasDecodePlc())
+ .Times(AtLeast(1))
+ .WillRepeatedly(Invoke(&decoder, &AudioDecoderPcmU::HasDecodePlc));
+ EXPECT_CALL(mock_decoder, PacketDuration(_, _))
+ .Times(AtLeast(1))
+ .WillRepeatedly(Invoke(&decoder, &AudioDecoderPcmU::PacketDuration));
+ ExternalDecoder ed;
+ ed.rtp_payload_type = 0;
+ ed.external_decoder = &mock_decoder;
+ ed.sample_rate_hz = 8000;
+ ed.num_channels = 1;
+ ed.name = "MockPCMU";
+ std::vector<ExternalDecoder> external_decoders;
+ external_decoders.push_back(ed);
+
+ Run(48000, PlatformChecksum("4ee2730fa1daae755e8a8fd3abd779ec",
+ "aa7c232f63a67b2a72703593bdd172e0",
+ "0155665e93067c4e89256b944dd11999",
+ "fc4f0da8844cd808d822bbddf3b9c285"),
+ external_decoders);
+
+ EXPECT_CALL(mock_decoder, Die());
+}
+#endif
+
+// This test verifies bit exactness for the send-side of ACM. The test setup is
+// a chain of three different test classes:
+//
+// test::AcmSendTest -> AcmSenderBitExactness -> test::AcmReceiveTest
+//
+// The receiver side is driving the test by requesting new packets from
+// AcmSenderBitExactness::NextPacket(). This method, in turn, asks for the
+// packet from test::AcmSendTest::NextPacket, which inserts audio from the
+// input file until one packet is produced. (The input file loops indefinitely.)
+// Before passing the packet to the receiver, this test class verifies the
+// packet header and updates a payload checksum with the new payload. The
+// decoded output from the receiver is also verified with a (separate) checksum.
+class AcmSenderBitExactnessOldApi : public ::testing::Test,
+ public test::PacketSource {
+ protected:
+ static const int kTestDurationMs = 1000;
+
+ AcmSenderBitExactnessOldApi()
+ : frame_size_rtp_timestamps_(0),
+ packet_count_(0),
+ payload_type_(0),
+ last_sequence_number_(0),
+ last_timestamp_(0) {}
+
+ // Sets up the test::AcmSendTest object. Returns true on success, otherwise
+ // false.
+ bool SetUpSender() {
+ const std::string input_file_name =
+ webrtc::test::ResourcePath("audio_coding/testfile32kHz", "pcm");
+ // Note that |audio_source_| will loop forever. The test duration is set
+ // explicitly by |kTestDurationMs|.
+ audio_source_.reset(new test::InputAudioFile(input_file_name));
+ static const int kSourceRateHz = 32000;
+ send_test_.reset(new test::AcmSendTestOldApi(
+ audio_source_.get(), kSourceRateHz, kTestDurationMs));
+ return send_test_.get() != NULL;
+ }
+
+ // Registers a send codec in the test::AcmSendTest object. Returns true on
+ // success, false on failure.
+ bool RegisterSendCodec(const char* payload_name,
+ int sampling_freq_hz,
+ int channels,
+ int payload_type,
+ int frame_size_samples,
+ int frame_size_rtp_timestamps) {
+ payload_type_ = payload_type;
+ frame_size_rtp_timestamps_ = frame_size_rtp_timestamps;
+ return send_test_->RegisterCodec(payload_name,
+ sampling_freq_hz,
+ channels,
+ payload_type,
+ frame_size_samples);
+ }
+
+ bool RegisterExternalSendCodec(AudioEncoder* external_speech_encoder,
+ int payload_type) {
+ payload_type_ = payload_type;
+ frame_size_rtp_timestamps_ =
+ external_speech_encoder->Num10MsFramesInNextPacket() *
+ external_speech_encoder->RtpTimestampRateHz() / 100;
+ return send_test_->RegisterExternalCodec(external_speech_encoder);
+ }
+
+ // Runs the test. SetUpSender() and RegisterSendCodec() must have been called
+ // before calling this method.
+ void Run(const std::string& audio_checksum_ref,
+ const std::string& payload_checksum_ref,
+ int expected_packets,
+ test::AcmReceiveTestOldApi::NumOutputChannels expected_channels) {
+ // Set up the receiver used to decode the packets and verify the decoded
+ // output.
+ test::AudioChecksum audio_checksum;
+ const std::string output_file_name =
+ webrtc::test::OutputPath() +
+ ::testing::UnitTest::GetInstance()
+ ->current_test_info()
+ ->test_case_name() +
+ "_" + ::testing::UnitTest::GetInstance()->current_test_info()->name() +
+ "_output.pcm";
+ test::OutputAudioFile output_file(output_file_name);
+ // Have the output audio sent both to file and to the checksum calculator.
+ test::AudioSinkFork output(&audio_checksum, &output_file);
+ const int kOutputFreqHz = 8000;
+ test::AcmReceiveTestOldApi receive_test(
+ this, &output, kOutputFreqHz, expected_channels);
+ ASSERT_NO_FATAL_FAILURE(receive_test.RegisterDefaultCodecs());
+
+ // This is where the actual test is executed.
+ receive_test.Run();
+
+ // Extract and verify the audio checksum.
+ std::string checksum_string = audio_checksum.Finish();
+ EXPECT_EQ(audio_checksum_ref, checksum_string);
+
+ // Extract and verify the payload checksum.
+ char checksum_result[rtc::Md5Digest::kSize];
+ payload_checksum_.Finish(checksum_result, rtc::Md5Digest::kSize);
+ checksum_string = rtc::hex_encode(checksum_result, rtc::Md5Digest::kSize);
+ EXPECT_EQ(payload_checksum_ref, checksum_string);
+
+ // Verify number of packets produced.
+ EXPECT_EQ(expected_packets, packet_count_);
+
+ // Delete the output file.
+ remove(output_file_name.c_str());
+ }
+
+ // Returns a pointer to the next packet. Returns NULL if the source is
+ // depleted (i.e., the test duration is exceeded), or if an error occurred.
+ // Inherited from test::PacketSource.
+ test::Packet* NextPacket() override {
+ // Get the next packet from AcmSendTest. Ownership of |packet| is
+ // transferred to this method.
+ test::Packet* packet = send_test_->NextPacket();
+ if (!packet)
+ return NULL;
+
+ VerifyPacket(packet);
+ // TODO(henrik.lundin) Save the packet to file as well.
+
+ // Pass it on to the caller. The caller becomes the owner of |packet|.
+ return packet;
+ }
+
+ // Verifies the packet.
+ void VerifyPacket(const test::Packet* packet) {
+ EXPECT_TRUE(packet->valid_header());
+ // (We can check the header fields even if valid_header() is false.)
+ EXPECT_EQ(payload_type_, packet->header().payloadType);
+ if (packet_count_ > 0) {
+ // This is not the first packet.
+ uint16_t sequence_number_diff =
+ packet->header().sequenceNumber - last_sequence_number_;
+ EXPECT_EQ(1, sequence_number_diff);
+ uint32_t timestamp_diff = packet->header().timestamp - last_timestamp_;
+ EXPECT_EQ(frame_size_rtp_timestamps_, timestamp_diff);
+ }
+ ++packet_count_;
+ last_sequence_number_ = packet->header().sequenceNumber;
+ last_timestamp_ = packet->header().timestamp;
+ // Update the checksum.
+ payload_checksum_.Update(packet->payload(), packet->payload_length_bytes());
+ }
+
+ void SetUpTest(const char* codec_name,
+ int codec_sample_rate_hz,
+ int channels,
+ int payload_type,
+ int codec_frame_size_samples,
+ int codec_frame_size_rtp_timestamps) {
+ ASSERT_TRUE(SetUpSender());
+ ASSERT_TRUE(RegisterSendCodec(codec_name,
+ codec_sample_rate_hz,
+ channels,
+ payload_type,
+ codec_frame_size_samples,
+ codec_frame_size_rtp_timestamps));
+ }
+
+ void SetUpTestExternalEncoder(AudioEncoder* external_speech_encoder,
+ int payload_type) {
+ ASSERT_TRUE(SetUpSender());
+ ASSERT_TRUE(
+ RegisterExternalSendCodec(external_speech_encoder, payload_type));
+ }
+
+ rtc::scoped_ptr<test::AcmSendTestOldApi> send_test_;
+ rtc::scoped_ptr<test::InputAudioFile> audio_source_;
+ uint32_t frame_size_rtp_timestamps_;
+ int packet_count_;
+ uint8_t payload_type_;
+ uint16_t last_sequence_number_;
+ uint32_t last_timestamp_;
+ rtc::Md5Digest payload_checksum_;
+};
+
+#if defined(WEBRTC_CODEC_ISAC) || defined(WEBRTC_CODEC_ISACFX)
+TEST_F(AcmSenderBitExactnessOldApi, IsacWb30ms) {
+ ASSERT_NO_FATAL_FAILURE(SetUpTest("ISAC", 16000, 1, 103, 480, 480));
+ Run(AcmReceiverBitExactnessOldApi::PlatformChecksum(
+ "0b58f9eeee43d5891f5f6c75e77984a3",
+ "c7e5bdadfa2871df95639fcc297cf23d",
+ "0499ca260390769b3172136faad925b9",
+ "866abf524acd2807efbe65e133c23f95"),
+ AcmReceiverBitExactnessOldApi::PlatformChecksum(
+ "3c79f16f34218271f3dca4e2b1dfe1bb",
+ "d42cb5195463da26c8129bbfe73a22e6",
+ "83de248aea9c3c2bd680b6952401b4ca",
+ "3c79f16f34218271f3dca4e2b1dfe1bb"),
+ 33, test::AcmReceiveTestOldApi::kMonoOutput);
+}
+
+TEST_F(AcmSenderBitExactnessOldApi, IsacWb60ms) {
+ ASSERT_NO_FATAL_FAILURE(SetUpTest("ISAC", 16000, 1, 103, 960, 960));
+ Run(AcmReceiverBitExactnessOldApi::PlatformChecksum(
+ "1ad29139a04782a33daad8c2b9b35875",
+ "14d63c5f08127d280e722e3191b73bdd",
+ "8da003e16c5371af2dc2be79a50f9076",
+ "ef75e900e6f375e3061163c53fd09a63"),
+ AcmReceiverBitExactnessOldApi::PlatformChecksum(
+ "9e0a0ab743ad987b55b8e14802769c56",
+ "ebe04a819d3a9d83a83a17f271e1139a",
+ "97aeef98553b5a4b5a68f8b716e8eaf0",
+ "9e0a0ab743ad987b55b8e14802769c56"),
+ 16, test::AcmReceiveTestOldApi::kMonoOutput);
+}
+#endif
+
+#if defined(WEBRTC_ANDROID)
+#define MAYBE_IsacSwb30ms DISABLED_IsacSwb30ms
+#else
+#define MAYBE_IsacSwb30ms IsacSwb30ms
+#endif
+#if defined(WEBRTC_CODEC_ISAC)
+TEST_F(AcmSenderBitExactnessOldApi, MAYBE_IsacSwb30ms) {
+ ASSERT_NO_FATAL_FAILURE(SetUpTest("ISAC", 32000, 1, 104, 960, 960));
+ Run(AcmReceiverBitExactnessOldApi::PlatformChecksum(
+ "5683b58da0fbf2063c7adc2e6bfb3fb8",
+ "2b3c387d06f00b7b7aad4c9be56fb83d", "android_arm32_audio",
+ "android_arm64_audio"),
+ AcmReceiverBitExactnessOldApi::PlatformChecksum(
+ "ce86106a93419aefb063097108ec94ab",
+ "bcc2041e7744c7ebd9f701866856849c", "android_arm32_payload",
+ "android_arm64_payload"),
+ 33, test::AcmReceiveTestOldApi::kMonoOutput);
+}
+#endif
+
+TEST_F(AcmSenderBitExactnessOldApi, Pcm16_8000khz_10ms) {
+ ASSERT_NO_FATAL_FAILURE(SetUpTest("L16", 8000, 1, 107, 80, 80));
+ Run("de4a98e1406f8b798d99cd0704e862e2",
+ "c1edd36339ce0326cc4550041ad719a0",
+ 100,
+ test::AcmReceiveTestOldApi::kMonoOutput);
+}
+
+TEST_F(AcmSenderBitExactnessOldApi, Pcm16_16000khz_10ms) {
+ ASSERT_NO_FATAL_FAILURE(SetUpTest("L16", 16000, 1, 108, 160, 160));
+ Run("ae646d7b68384a1269cc080dd4501916",
+ "ad786526383178b08d80d6eee06e9bad",
+ 100,
+ test::AcmReceiveTestOldApi::kMonoOutput);
+}
+
+TEST_F(AcmSenderBitExactnessOldApi, Pcm16_32000khz_10ms) {
+ ASSERT_NO_FATAL_FAILURE(SetUpTest("L16", 32000, 1, 109, 320, 320));
+ Run("7fe325e8fbaf755e3c5df0b11a4774fb",
+ "5ef82ea885e922263606c6fdbc49f651",
+ 100,
+ test::AcmReceiveTestOldApi::kMonoOutput);
+}
+
+TEST_F(AcmSenderBitExactnessOldApi, Pcm16_stereo_8000khz_10ms) {
+ ASSERT_NO_FATAL_FAILURE(SetUpTest("L16", 8000, 2, 111, 80, 80));
+ Run("fb263b74e7ac3de915474d77e4744ceb",
+ "62ce5adb0d4965d0a52ec98ae7f98974",
+ 100,
+ test::AcmReceiveTestOldApi::kStereoOutput);
+}
+
+TEST_F(AcmSenderBitExactnessOldApi, Pcm16_stereo_16000khz_10ms) {
+ ASSERT_NO_FATAL_FAILURE(SetUpTest("L16", 16000, 2, 112, 160, 160));
+ Run("d09e9239553649d7ac93e19d304281fd",
+ "41ca8edac4b8c71cd54fd9f25ec14870",
+ 100,
+ test::AcmReceiveTestOldApi::kStereoOutput);
+}
+
+TEST_F(AcmSenderBitExactnessOldApi, Pcm16_stereo_32000khz_10ms) {
+ ASSERT_NO_FATAL_FAILURE(SetUpTest("L16", 32000, 2, 113, 320, 320));
+ Run("5f025d4f390982cc26b3d92fe02e3044",
+ "50e58502fb04421bf5b857dda4c96879",
+ 100,
+ test::AcmReceiveTestOldApi::kStereoOutput);
+}
+
+TEST_F(AcmSenderBitExactnessOldApi, Pcmu_20ms) {
+ ASSERT_NO_FATAL_FAILURE(SetUpTest("PCMU", 8000, 1, 0, 160, 160));
+ Run("81a9d4c0bb72e9becc43aef124c981e9",
+ "8f9b8750bd80fe26b6cbf6659b89f0f9",
+ 50,
+ test::AcmReceiveTestOldApi::kMonoOutput);
+}
+
+TEST_F(AcmSenderBitExactnessOldApi, Pcma_20ms) {
+ ASSERT_NO_FATAL_FAILURE(SetUpTest("PCMA", 8000, 1, 8, 160, 160));
+ Run("39611f798969053925a49dc06d08de29",
+ "6ad745e55aa48981bfc790d0eeef2dd1",
+ 50,
+ test::AcmReceiveTestOldApi::kMonoOutput);
+}
+
+TEST_F(AcmSenderBitExactnessOldApi, Pcmu_stereo_20ms) {
+ ASSERT_NO_FATAL_FAILURE(SetUpTest("PCMU", 8000, 2, 110, 160, 160));
+ Run("437bec032fdc5cbaa0d5175430af7b18",
+ "60b6f25e8d1e74cb679cfe756dd9bca5",
+ 50,
+ test::AcmReceiveTestOldApi::kStereoOutput);
+}
+
+TEST_F(AcmSenderBitExactnessOldApi, Pcma_stereo_20ms) {
+ ASSERT_NO_FATAL_FAILURE(SetUpTest("PCMA", 8000, 2, 118, 160, 160));
+ Run("a5c6d83c5b7cedbeff734238220a4b0c",
+ "92b282c83efd20e7eeef52ba40842cf7",
+ 50,
+ test::AcmReceiveTestOldApi::kStereoOutput);
+}
+
+#if defined(WEBRTC_ANDROID)
+#define MAYBE_Ilbc_30ms DISABLED_Ilbc_30ms
+#else
+#define MAYBE_Ilbc_30ms Ilbc_30ms
+#endif
+#if defined(WEBRTC_CODEC_ILBC)
+TEST_F(AcmSenderBitExactnessOldApi, MAYBE_Ilbc_30ms) {
+ ASSERT_NO_FATAL_FAILURE(SetUpTest("ILBC", 8000, 1, 102, 240, 240));
+ Run(AcmReceiverBitExactnessOldApi::PlatformChecksum(
+ "7b6ec10910debd9af08011d3ed5249f7",
+ "7b6ec10910debd9af08011d3ed5249f7", "android_arm32_audio",
+ "android_arm64_audio"),
+ AcmReceiverBitExactnessOldApi::PlatformChecksum(
+ "cfae2e9f6aba96e145f2bcdd5050ce78",
+ "cfae2e9f6aba96e145f2bcdd5050ce78", "android_arm32_payload",
+ "android_arm64_payload"),
+ 33, test::AcmReceiveTestOldApi::kMonoOutput);
+}
+#endif
+
+#if defined(WEBRTC_ANDROID)
+#define MAYBE_G722_20ms DISABLED_G722_20ms
+#else
+#define MAYBE_G722_20ms G722_20ms
+#endif
+#if defined(WEBRTC_CODEC_G722)
+TEST_F(AcmSenderBitExactnessOldApi, MAYBE_G722_20ms) {
+ ASSERT_NO_FATAL_FAILURE(SetUpTest("G722", 16000, 1, 9, 320, 160));
+ Run(AcmReceiverBitExactnessOldApi::PlatformChecksum(
+ "7d759436f2533582950d148b5161a36c",
+ "7d759436f2533582950d148b5161a36c", "android_arm32_audio",
+ "android_arm64_audio"),
+ AcmReceiverBitExactnessOldApi::PlatformChecksum(
+ "fc68a87e1380614e658087cb35d5ca10",
+ "fc68a87e1380614e658087cb35d5ca10", "android_arm32_payload",
+ "android_arm64_payload"),
+ 50, test::AcmReceiveTestOldApi::kMonoOutput);
+}
+#endif
+
+#if defined(WEBRTC_ANDROID)
+#define MAYBE_G722_stereo_20ms DISABLED_G722_stereo_20ms
+#else
+#define MAYBE_G722_stereo_20ms G722_stereo_20ms
+#endif
+#if defined(WEBRTC_CODEC_G722)
+TEST_F(AcmSenderBitExactnessOldApi, MAYBE_G722_stereo_20ms) {
+ ASSERT_NO_FATAL_FAILURE(SetUpTest("G722", 16000, 2, 119, 320, 160));
+ Run(AcmReceiverBitExactnessOldApi::PlatformChecksum(
+ "7190ee718ab3d80eca181e5f7140c210",
+ "7190ee718ab3d80eca181e5f7140c210", "android_arm32_audio",
+ "android_arm64_audio"),
+ AcmReceiverBitExactnessOldApi::PlatformChecksum(
+ "66516152eeaa1e650ad94ff85f668dac",
+ "66516152eeaa1e650ad94ff85f668dac", "android_arm32_payload",
+ "android_arm64_payload"),
+ 50, test::AcmReceiveTestOldApi::kStereoOutput);
+}
+#endif
+
+TEST_F(AcmSenderBitExactnessOldApi, Opus_stereo_20ms) {
+ ASSERT_NO_FATAL_FAILURE(SetUpTest("opus", 48000, 2, 120, 960, 960));
+ Run(AcmReceiverBitExactnessOldApi::PlatformChecksum(
+ "855041f2490b887302bce9d544731849",
+ "855041f2490b887302bce9d544731849",
+ "1e1a0fce893fef2d66886a7f09e2ebce",
+ "7417a66c28be42d5d9b2d64e0c191585"),
+ AcmReceiverBitExactnessOldApi::PlatformChecksum(
+ "d781cce1ab986b618d0da87226cdde30",
+ "d781cce1ab986b618d0da87226cdde30",
+ "1a1fe04dd12e755949987c8d729fb3e0",
+ "47b0b04f1d03076b857c86c72c2c298b"),
+ 50, test::AcmReceiveTestOldApi::kStereoOutput);
+}
+
+TEST_F(AcmSenderBitExactnessOldApi, Opus_stereo_20ms_voip) {
+ ASSERT_NO_FATAL_FAILURE(SetUpTest("opus", 48000, 2, 120, 960, 960));
+ // If not set, default will be kAudio in case of stereo.
+ EXPECT_EQ(0, send_test_->acm()->SetOpusApplication(kVoip));
+ Run(AcmReceiverBitExactnessOldApi::PlatformChecksum(
+ "9b9e12bc3cc793740966e11cbfa8b35b",
+ "9b9e12bc3cc793740966e11cbfa8b35b",
+ "57412a4b5771d19ff03ec35deffe7067",
+ "7ad0bbefcaa87e23187bf4a56d2f3513"),
+ AcmReceiverBitExactnessOldApi::PlatformChecksum(
+ "c7340b1189652ab6b5e80dade7390cb4",
+ "c7340b1189652ab6b5e80dade7390cb4",
+ "cdfe85939c411d12b61701c566e22d26",
+ "7a678fbe46df5bf0c67e88264a2d9275"),
+ 50, test::AcmReceiveTestOldApi::kStereoOutput);
+}
+
+// This test is for verifying the SetBitRate function. The bitrate is changed at
+// the beginning, and the number of generated bytes are checked.
+class AcmSetBitRateOldApi : public ::testing::Test {
+ protected:
+ static const int kTestDurationMs = 1000;
+
+ // Sets up the test::AcmSendTest object. Returns true on success, otherwise
+ // false.
+ bool SetUpSender() {
+ const std::string input_file_name =
+ webrtc::test::ResourcePath("audio_coding/testfile32kHz", "pcm");
+ // Note that |audio_source_| will loop forever. The test duration is set
+ // explicitly by |kTestDurationMs|.
+ audio_source_.reset(new test::InputAudioFile(input_file_name));
+ static const int kSourceRateHz = 32000;
+ send_test_.reset(new test::AcmSendTestOldApi(
+ audio_source_.get(), kSourceRateHz, kTestDurationMs));
+ return send_test_.get();
+ }
+
+ // Registers a send codec in the test::AcmSendTest object. Returns true on
+ // success, false on failure.
+ virtual bool RegisterSendCodec(const char* payload_name,
+ int sampling_freq_hz,
+ int channels,
+ int payload_type,
+ int frame_size_samples,
+ int frame_size_rtp_timestamps) {
+ return send_test_->RegisterCodec(payload_name, sampling_freq_hz, channels,
+ payload_type, frame_size_samples);
+ }
+
+ // Runs the test. SetUpSender() and RegisterSendCodec() must have been called
+ // before calling this method.
+ void Run(int target_bitrate_bps, int expected_total_bits) {
+ ASSERT_TRUE(send_test_->acm());
+ send_test_->acm()->SetBitRate(target_bitrate_bps);
+ int nr_bytes = 0;
+ while (test::Packet* next_packet = send_test_->NextPacket()) {
+ nr_bytes += next_packet->payload_length_bytes();
+ delete next_packet;
+ }
+ EXPECT_EQ(expected_total_bits, nr_bytes * 8);
+ }
+
+ void SetUpTest(const char* codec_name,
+ int codec_sample_rate_hz,
+ int channels,
+ int payload_type,
+ int codec_frame_size_samples,
+ int codec_frame_size_rtp_timestamps) {
+ ASSERT_TRUE(SetUpSender());
+ ASSERT_TRUE(RegisterSendCodec(codec_name, codec_sample_rate_hz, channels,
+ payload_type, codec_frame_size_samples,
+ codec_frame_size_rtp_timestamps));
+ }
+
+ rtc::scoped_ptr<test::AcmSendTestOldApi> send_test_;
+ rtc::scoped_ptr<test::InputAudioFile> audio_source_;
+};
+
+TEST_F(AcmSetBitRateOldApi, Opus_48khz_20ms_10kbps) {
+ ASSERT_NO_FATAL_FAILURE(SetUpTest("opus", 48000, 1, 107, 960, 960));
+#if defined(WEBRTC_ANDROID)
+ Run(10000, 9328);
+#else
+ Run(10000, 9072);
+#endif // WEBRTC_ANDROID
+
+}
+
+TEST_F(AcmSetBitRateOldApi, Opus_48khz_20ms_50kbps) {
+ ASSERT_NO_FATAL_FAILURE(SetUpTest("opus", 48000, 1, 107, 960, 960));
+#if defined(WEBRTC_ANDROID)
+ Run(50000, 47952);
+#else
+ Run(50000, 49600);
+#endif // WEBRTC_ANDROID
+}
+
+// The result on the Android platforms is inconsistent for this test case.
+// On android_rel the result is different from android and android arm64 rel.
+#if defined(WEBRTC_ANDROID)
+#define MAYBE_Opus_48khz_20ms_100kbps DISABLED_Opus_48khz_20ms_100kbps
+#else
+#define MAYBE_Opus_48khz_20ms_100kbps Opus_48khz_20ms_100kbps
+#endif
+TEST_F(AcmSetBitRateOldApi, MAYBE_Opus_48khz_20ms_100kbps) {
+ ASSERT_NO_FATAL_FAILURE(SetUpTest("opus", 48000, 1, 107, 960, 960));
+ Run(100000, 100888);
+}
+
+// These next 2 tests ensure that the SetBitRate function has no effect on PCM
+TEST_F(AcmSetBitRateOldApi, Pcm16_8khz_10ms_8kbps) {
+ ASSERT_NO_FATAL_FAILURE(SetUpTest("L16", 8000, 1, 107, 80, 80));
+ Run(8000, 128000);
+}
+
+TEST_F(AcmSetBitRateOldApi, Pcm16_8khz_10ms_32kbps) {
+ ASSERT_NO_FATAL_FAILURE(SetUpTest("L16", 8000, 1, 107, 80, 80));
+ Run(32000, 128000);
+}
+
+// This test is for verifying the SetBitRate function. The bitrate is changed
+// in the middle, and the number of generated bytes are before and after the
+// change are checked.
+class AcmChangeBitRateOldApi : public AcmSetBitRateOldApi {
+ protected:
+ AcmChangeBitRateOldApi() : sampling_freq_hz_(0), frame_size_samples_(0) {}
+
+ // Registers a send codec in the test::AcmSendTest object. Returns true on
+ // success, false on failure.
+ bool RegisterSendCodec(const char* payload_name,
+ int sampling_freq_hz,
+ int channels,
+ int payload_type,
+ int frame_size_samples,
+ int frame_size_rtp_timestamps) override {
+ frame_size_samples_ = frame_size_samples;
+ sampling_freq_hz_ = sampling_freq_hz;
+ return AcmSetBitRateOldApi::RegisterSendCodec(
+ payload_name, sampling_freq_hz, channels, payload_type,
+ frame_size_samples, frame_size_rtp_timestamps);
+ }
+
+ // Runs the test. SetUpSender() and RegisterSendCodec() must have been called
+ // before calling this method.
+ void Run(int target_bitrate_bps,
+ int expected_before_switch_bits,
+ int expected_after_switch_bits) {
+ ASSERT_TRUE(send_test_->acm());
+ int nr_packets =
+ sampling_freq_hz_ * kTestDurationMs / (frame_size_samples_ * 1000);
+ int nr_bytes_before = 0, nr_bytes_after = 0;
+ int packet_counter = 0;
+ while (test::Packet* next_packet = send_test_->NextPacket()) {
+ if (packet_counter == nr_packets / 2)
+ send_test_->acm()->SetBitRate(target_bitrate_bps);
+ if (packet_counter < nr_packets / 2)
+ nr_bytes_before += next_packet->payload_length_bytes();
+ else
+ nr_bytes_after += next_packet->payload_length_bytes();
+ packet_counter++;
+ delete next_packet;
+ }
+ EXPECT_EQ(expected_before_switch_bits, nr_bytes_before * 8);
+ EXPECT_EQ(expected_after_switch_bits, nr_bytes_after * 8);
+ }
+
+ uint32_t sampling_freq_hz_;
+ uint32_t frame_size_samples_;
+};
+
+TEST_F(AcmChangeBitRateOldApi, Opus_48khz_20ms_10kbps) {
+ ASSERT_NO_FATAL_FAILURE(SetUpTest("opus", 48000, 1, 107, 960, 960));
+#if defined(WEBRTC_ANDROID)
+ Run(10000, 32200, 5496);
+#else
+ Run(10000, 32200, 5432);
+#endif // WEBRTC_ANDROID
+}
+
+TEST_F(AcmChangeBitRateOldApi, Opus_48khz_20ms_50kbps) {
+ ASSERT_NO_FATAL_FAILURE(SetUpTest("opus", 48000, 1, 107, 960, 960));
+#if defined(WEBRTC_ANDROID)
+ Run(50000, 32200, 24912);
+#else
+ Run(50000, 32200, 24792);
+#endif // WEBRTC_ANDROID
+}
+
+TEST_F(AcmChangeBitRateOldApi, Opus_48khz_20ms_100kbps) {
+ ASSERT_NO_FATAL_FAILURE(SetUpTest("opus", 48000, 1, 107, 960, 960));
+#if defined(WEBRTC_ANDROID)
+ Run(100000, 32200, 51480);
+#else
+ Run(100000, 32200, 50584);
+#endif // WEBRTC_ANDROID
+}
+
+// These next 2 tests ensure that the SetBitRate function has no effect on PCM
+TEST_F(AcmChangeBitRateOldApi, Pcm16_8khz_10ms_8kbps) {
+ ASSERT_NO_FATAL_FAILURE(SetUpTest("L16", 8000, 1, 107, 80, 80));
+ Run(8000, 64000, 64000);
+}
+
+TEST_F(AcmChangeBitRateOldApi, Pcm16_8khz_10ms_32kbps) {
+ ASSERT_NO_FATAL_FAILURE(SetUpTest("L16", 8000, 1, 107, 80, 80));
+ Run(32000, 64000, 64000);
+}
+
+TEST_F(AcmSenderBitExactnessOldApi, External_Pcmu_20ms) {
+ CodecInst codec_inst;
+ codec_inst.channels = 1;
+ codec_inst.pacsize = 160;
+ codec_inst.pltype = 0;
+ AudioEncoderPcmU encoder(codec_inst);
+ MockAudioEncoder mock_encoder;
+ // Set expectations on the mock encoder and also delegate the calls to the
+ // real encoder.
+ EXPECT_CALL(mock_encoder, MaxEncodedBytes())
+ .Times(AtLeast(1))
+ .WillRepeatedly(Invoke(&encoder, &AudioEncoderPcmU::MaxEncodedBytes));
+ EXPECT_CALL(mock_encoder, SampleRateHz())
+ .Times(AtLeast(1))
+ .WillRepeatedly(Invoke(&encoder, &AudioEncoderPcmU::SampleRateHz));
+ EXPECT_CALL(mock_encoder, NumChannels())
+ .Times(AtLeast(1))
+ .WillRepeatedly(Invoke(&encoder, &AudioEncoderPcmU::NumChannels));
+ EXPECT_CALL(mock_encoder, RtpTimestampRateHz())
+ .Times(AtLeast(1))
+ .WillRepeatedly(Invoke(&encoder, &AudioEncoderPcmU::RtpTimestampRateHz));
+ EXPECT_CALL(mock_encoder, Num10MsFramesInNextPacket())
+ .Times(AtLeast(1))
+ .WillRepeatedly(
+ Invoke(&encoder, &AudioEncoderPcmU::Num10MsFramesInNextPacket));
+ EXPECT_CALL(mock_encoder, GetTargetBitrate())
+ .Times(AtLeast(1))
+ .WillRepeatedly(Invoke(&encoder, &AudioEncoderPcmU::GetTargetBitrate));
+ EXPECT_CALL(mock_encoder, EncodeInternal(_, _, _, _))
+ .Times(AtLeast(1))
+ .WillRepeatedly(Invoke(&encoder, &AudioEncoderPcmU::EncodeInternal));
+ EXPECT_CALL(mock_encoder, SetFec(_))
+ .Times(AtLeast(1))
+ .WillRepeatedly(Invoke(&encoder, &AudioEncoderPcmU::SetFec));
+ ASSERT_NO_FATAL_FAILURE(
+ SetUpTestExternalEncoder(&mock_encoder, codec_inst.pltype));
+ Run("81a9d4c0bb72e9becc43aef124c981e9", "8f9b8750bd80fe26b6cbf6659b89f0f9",
+ 50, test::AcmReceiveTestOldApi::kMonoOutput);
+}
+
+// This test fixture is implemented to run ACM and change the desired output
+// frequency during the call. The input packets are simply PCM16b-wb encoded
+// payloads with a constant value of |kSampleValue|. The test fixture itself
+// acts as PacketSource in between the receive test class and the constant-
+// payload packet source class. The output is both written to file, and analyzed
+// in this test fixture.
+class AcmSwitchingOutputFrequencyOldApi : public ::testing::Test,
+ public test::PacketSource,
+ public test::AudioSink {
+ protected:
+ static const size_t kTestNumPackets = 50;
+ static const int kEncodedSampleRateHz = 16000;
+ static const size_t kPayloadLenSamples = 30 * kEncodedSampleRateHz / 1000;
+ static const int kPayloadType = 108; // Default payload type for PCM16b-wb.
+
+ AcmSwitchingOutputFrequencyOldApi()
+ : first_output_(true),
+ num_packets_(0),
+ packet_source_(kPayloadLenSamples,
+ kSampleValue,
+ kEncodedSampleRateHz,
+ kPayloadType),
+ output_freq_2_(0),
+ has_toggled_(false) {}
+
+ void Run(int output_freq_1, int output_freq_2, int toggle_period_ms) {
+ // Set up the receiver used to decode the packets and verify the decoded
+ // output.
+ const std::string output_file_name =
+ webrtc::test::OutputPath() +
+ ::testing::UnitTest::GetInstance()
+ ->current_test_info()
+ ->test_case_name() +
+ "_" + ::testing::UnitTest::GetInstance()->current_test_info()->name() +
+ "_output.pcm";
+ test::OutputAudioFile output_file(output_file_name);
+ // Have the output audio sent both to file and to the WriteArray method in
+ // this class.
+ test::AudioSinkFork output(this, &output_file);
+ test::AcmReceiveTestToggleOutputFreqOldApi receive_test(
+ this,
+ &output,
+ output_freq_1,
+ output_freq_2,
+ toggle_period_ms,
+ test::AcmReceiveTestOldApi::kMonoOutput);
+ ASSERT_NO_FATAL_FAILURE(receive_test.RegisterDefaultCodecs());
+ output_freq_2_ = output_freq_2;
+
+ // This is where the actual test is executed.
+ receive_test.Run();
+
+ // Delete output file.
+ remove(output_file_name.c_str());
+ }
+
+ // Inherited from test::PacketSource.
+ test::Packet* NextPacket() override {
+ // Check if it is time to terminate the test. The packet source is of type
+ // ConstantPcmPacketSource, which is infinite, so we must end the test
+ // "manually".
+ if (num_packets_++ > kTestNumPackets) {
+ EXPECT_TRUE(has_toggled_);
+ return NULL; // Test ended.
+ }
+
+ // Get the next packet from the source.
+ return packet_source_.NextPacket();
+ }
+
+ // Inherited from test::AudioSink.
+ bool WriteArray(const int16_t* audio, size_t num_samples) {
+ // Skip checking the first output frame, since it has a number of zeros
+ // due to how NetEq is initialized.
+ if (first_output_) {
+ first_output_ = false;
+ return true;
+ }
+ for (size_t i = 0; i < num_samples; ++i) {
+ EXPECT_EQ(kSampleValue, audio[i]);
+ }
+ if (num_samples ==
+ static_cast<size_t>(output_freq_2_ / 100)) // Size of 10 ms frame.
+ has_toggled_ = true;
+ // The return value does not say if the values match the expectation, just
+ // that the method could process the samples.
+ return true;
+ }
+
+ const int16_t kSampleValue = 1000;
+ bool first_output_;
+ size_t num_packets_;
+ test::ConstantPcmPacketSource packet_source_;
+ int output_freq_2_;
+ bool has_toggled_;
+};
+
+TEST_F(AcmSwitchingOutputFrequencyOldApi, TestWithoutToggling) {
+ Run(16000, 16000, 1000);
+}
+
+TEST_F(AcmSwitchingOutputFrequencyOldApi, Toggle16KhzTo32Khz) {
+ Run(16000, 32000, 1000);
+}
+
+TEST_F(AcmSwitchingOutputFrequencyOldApi, Toggle32KhzTo16Khz) {
+ Run(32000, 16000, 1000);
+}
+
+TEST_F(AcmSwitchingOutputFrequencyOldApi, Toggle16KhzTo8Khz) {
+ Run(16000, 8000, 1000);
+}
+
+TEST_F(AcmSwitchingOutputFrequencyOldApi, Toggle8KhzTo16Khz) {
+ Run(8000, 16000, 1000);
+}
+
+#endif
+
+} // namespace webrtc
diff --git a/webrtc/modules/audio_coding/acm2/call_statistics.cc b/webrtc/modules/audio_coding/acm2/call_statistics.cc
new file mode 100644
index 0000000000..4441932c8c
--- /dev/null
+++ b/webrtc/modules/audio_coding/acm2/call_statistics.cc
@@ -0,0 +1,55 @@
+/*
+ * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/modules/audio_coding/acm2/call_statistics.h"
+
+#include <assert.h>
+
+namespace webrtc {
+
+namespace acm2 {
+
+void CallStatistics::DecodedByNetEq(AudioFrame::SpeechType speech_type) {
+ ++decoding_stat_.calls_to_neteq;
+ switch (speech_type) {
+ case AudioFrame::kNormalSpeech: {
+ ++decoding_stat_.decoded_normal;
+ break;
+ }
+ case AudioFrame::kPLC: {
+ ++decoding_stat_.decoded_plc;
+ break;
+ }
+ case AudioFrame::kCNG: {
+ ++decoding_stat_.decoded_cng;
+ break;
+ }
+ case AudioFrame::kPLCCNG: {
+ ++decoding_stat_.decoded_plc_cng;
+ break;
+ }
+ case AudioFrame::kUndefined: {
+ // If the audio is decoded by NetEq, |kUndefined| is not an option.
+ assert(false);
+ }
+ }
+}
+
+void CallStatistics::DecodedBySilenceGenerator() {
+ ++decoding_stat_.calls_to_silence_generator;
+}
+
+const AudioDecodingCallStats& CallStatistics::GetDecodingStatistics() const {
+ return decoding_stat_;
+}
+
+} // namespace acm2
+
+} // namespace webrtc
diff --git a/webrtc/modules/audio_coding/acm2/call_statistics.h b/webrtc/modules/audio_coding/acm2/call_statistics.h
new file mode 100644
index 0000000000..888afea0a7
--- /dev/null
+++ b/webrtc/modules/audio_coding/acm2/call_statistics.h
@@ -0,0 +1,63 @@
+/*
+ * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_ACM2_CALL_STATISTICS_H_
+#define WEBRTC_MODULES_AUDIO_CODING_ACM2_CALL_STATISTICS_H_
+
+#include "webrtc/common_types.h"
+#include "webrtc/modules/include/module_common_types.h"
+
+//
+// This class is for book keeping of calls to ACM. It is not useful to log API
+// calls which are supposed to be called every 10ms, e.g. PlayoutData10Ms(),
+// however, it is useful to know the number of such calls in a given time
+// interval. The current implementation covers calls to PlayoutData10Ms() with
+// detailed accounting of the decoded speech type.
+//
+// Thread Safety
+// =============
+// Please note that this class in not thread safe. The class must be protected
+// if different APIs are called from different threads.
+//
+
+namespace webrtc {
+
+namespace acm2 {
+
+class CallStatistics {
+ public:
+ CallStatistics() {}
+ ~CallStatistics() {}
+
+ // Call this method to indicate that NetEq engaged in decoding. |speech_type|
+ // is the audio-type according to NetEq.
+ void DecodedByNetEq(AudioFrame::SpeechType speech_type);
+
+ // Call this method to indicate that a decoding call resulted in generating
+ // silence, i.e. call to NetEq is bypassed and the output audio is zero.
+ void DecodedBySilenceGenerator();
+
+ // Get statistics for decoding. The statistics include the number of calls to
+ // NetEq and silence generator, as well as the type of speech pulled of off
+ // NetEq, c.f. declaration of AudioDecodingCallStats for detailed description.
+ const AudioDecodingCallStats& GetDecodingStatistics() const;
+
+ private:
+ // Reset the decoding statistics.
+ void ResetDecodingStatistics();
+
+ AudioDecodingCallStats decoding_stat_;
+};
+
+} // namespace acm2
+
+} // namespace webrtc
+
+#endif // WEBRTC_MODULES_AUDIO_CODING_ACM2_CALL_STATISTICS_H_
diff --git a/webrtc/modules/audio_coding/acm2/call_statistics_unittest.cc b/webrtc/modules/audio_coding/acm2/call_statistics_unittest.cc
new file mode 100644
index 0000000000..9ba0774ce1
--- /dev/null
+++ b/webrtc/modules/audio_coding/acm2/call_statistics_unittest.cc
@@ -0,0 +1,55 @@
+/*
+ * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "testing/gtest/include/gtest/gtest.h"
+#include "webrtc/modules/audio_coding/acm2/call_statistics.h"
+
+namespace webrtc {
+
+namespace acm2 {
+
+TEST(CallStatisticsTest, InitializedZero) {
+ CallStatistics call_stats;
+ AudioDecodingCallStats stats;
+
+ stats = call_stats.GetDecodingStatistics();
+ EXPECT_EQ(0, stats.calls_to_neteq);
+ EXPECT_EQ(0, stats.calls_to_silence_generator);
+ EXPECT_EQ(0, stats.decoded_normal);
+ EXPECT_EQ(0, stats.decoded_cng);
+ EXPECT_EQ(0, stats.decoded_plc);
+ EXPECT_EQ(0, stats.decoded_plc_cng);
+}
+
+TEST(CallStatisticsTest, AllCalls) {
+ CallStatistics call_stats;
+ AudioDecodingCallStats stats;
+
+ call_stats.DecodedBySilenceGenerator();
+ call_stats.DecodedByNetEq(AudioFrame::kNormalSpeech);
+ call_stats.DecodedByNetEq(AudioFrame::kPLC);
+ call_stats.DecodedByNetEq(AudioFrame::kPLCCNG);
+ call_stats.DecodedByNetEq(AudioFrame::kCNG);
+
+ stats = call_stats.GetDecodingStatistics();
+ EXPECT_EQ(4, stats.calls_to_neteq);
+ EXPECT_EQ(1, stats.calls_to_silence_generator);
+ EXPECT_EQ(1, stats.decoded_normal);
+ EXPECT_EQ(1, stats.decoded_cng);
+ EXPECT_EQ(1, stats.decoded_plc);
+ EXPECT_EQ(1, stats.decoded_plc_cng);
+}
+
+} // namespace acm2
+
+} // namespace webrtc
+
+
+
diff --git a/webrtc/modules/audio_coding/acm2/codec_manager.cc b/webrtc/modules/audio_coding/acm2/codec_manager.cc
new file mode 100644
index 0000000000..ad67377d42
--- /dev/null
+++ b/webrtc/modules/audio_coding/acm2/codec_manager.cc
@@ -0,0 +1,194 @@
+/*
+ * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/modules/audio_coding/acm2/codec_manager.h"
+
+#include "webrtc/base/checks.h"
+#include "webrtc/base/format_macros.h"
+#include "webrtc/engine_configurations.h"
+#include "webrtc/modules/audio_coding/acm2/rent_a_codec.h"
+#include "webrtc/system_wrappers/include/trace.h"
+
+namespace webrtc {
+namespace acm2 {
+
+namespace {
+
+// Check if the given codec is a valid to be registered as send codec.
+int IsValidSendCodec(const CodecInst& send_codec) {
+ int dummy_id = 0;
+ if ((send_codec.channels != 1) && (send_codec.channels != 2)) {
+ WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, dummy_id,
+ "Wrong number of channels (%" PRIuS ", only mono and stereo "
+ "are supported)",
+ send_codec.channels);
+ return -1;
+ }
+
+ auto maybe_codec_id = RentACodec::CodecIdByInst(send_codec);
+ if (!maybe_codec_id) {
+ WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, dummy_id,
+ "Invalid codec setting for the send codec.");
+ return -1;
+ }
+
+ // Telephone-event cannot be a send codec.
+ if (!STR_CASE_CMP(send_codec.plname, "telephone-event")) {
+ WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, dummy_id,
+ "telephone-event cannot be a send codec");
+ return -1;
+ }
+
+ if (!RentACodec::IsSupportedNumChannels(*maybe_codec_id, send_codec.channels)
+ .value_or(false)) {
+ WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, dummy_id,
+ "%" PRIuS " number of channels not supportedn for %s.",
+ send_codec.channels, send_codec.plname);
+ return -1;
+ }
+ return RentACodec::CodecIndexFromId(*maybe_codec_id).value_or(-1);
+}
+
+bool IsOpus(const CodecInst& codec) {
+ return
+#ifdef WEBRTC_CODEC_OPUS
+ !STR_CASE_CMP(codec.plname, "opus") ||
+#endif
+ false;
+}
+
+} // namespace
+
+CodecManager::CodecManager() {
+ thread_checker_.DetachFromThread();
+}
+
+CodecManager::~CodecManager() = default;
+
+bool CodecManager::RegisterEncoder(const CodecInst& send_codec) {
+ RTC_DCHECK(thread_checker_.CalledOnValidThread());
+ int codec_id = IsValidSendCodec(send_codec);
+
+ // Check for reported errors from function IsValidSendCodec().
+ if (codec_id < 0) {
+ return false;
+ }
+
+ int dummy_id = 0;
+ switch (RentACodec::RegisterRedPayloadType(
+ &codec_stack_params_.red_payload_types, send_codec)) {
+ case RentACodec::RegistrationResult::kOk:
+ return true;
+ case RentACodec::RegistrationResult::kBadFreq:
+ WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, dummy_id,
+ "RegisterSendCodec() failed, invalid frequency for RED"
+ " registration");
+ return false;
+ case RentACodec::RegistrationResult::kSkip:
+ break;
+ }
+ switch (RentACodec::RegisterCngPayloadType(
+ &codec_stack_params_.cng_payload_types, send_codec)) {
+ case RentACodec::RegistrationResult::kOk:
+ return true;
+ case RentACodec::RegistrationResult::kBadFreq:
+ WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, dummy_id,
+ "RegisterSendCodec() failed, invalid frequency for CNG"
+ " registration");
+ return false;
+ case RentACodec::RegistrationResult::kSkip:
+ break;
+ }
+
+ if (IsOpus(send_codec)) {
+ // VAD/DTX not supported.
+ codec_stack_params_.use_cng = false;
+ }
+
+ send_codec_inst_ = rtc::Optional<CodecInst>(send_codec);
+ codec_stack_params_.speech_encoder = nullptr; // Caller must recreate it.
+ return true;
+}
+
+CodecInst CodecManager::ForgeCodecInst(
+ const AudioEncoder* external_speech_encoder) {
+ CodecInst ci;
+ ci.channels = external_speech_encoder->NumChannels();
+ ci.plfreq = external_speech_encoder->SampleRateHz();
+ ci.pacsize = rtc::CheckedDivExact(
+ static_cast<int>(external_speech_encoder->Max10MsFramesInAPacket() *
+ ci.plfreq),
+ 100);
+ ci.pltype = -1; // Not valid.
+ ci.rate = -1; // Not valid.
+ static const char kName[] = "external";
+ memcpy(ci.plname, kName, sizeof(kName));
+ return ci;
+}
+
+bool CodecManager::SetCopyRed(bool enable) {
+ if (enable && codec_stack_params_.use_codec_fec) {
+ WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceAudioCoding, 0,
+ "Codec internal FEC and RED cannot be co-enabled.");
+ return false;
+ }
+ if (enable && send_codec_inst_ &&
+ codec_stack_params_.red_payload_types.count(send_codec_inst_->plfreq) <
+ 1) {
+ WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceAudioCoding, 0,
+ "Cannot enable RED at %i Hz.", send_codec_inst_->plfreq);
+ return false;
+ }
+ codec_stack_params_.use_red = enable;
+ return true;
+}
+
+bool CodecManager::SetVAD(bool enable, ACMVADMode mode) {
+ // Sanity check of the mode.
+ RTC_DCHECK(mode == VADNormal || mode == VADLowBitrate || mode == VADAggr ||
+ mode == VADVeryAggr);
+
+ // Check that the send codec is mono. We don't support VAD/DTX for stereo
+ // sending.
+ const bool stereo_send =
+ codec_stack_params_.speech_encoder
+ ? (codec_stack_params_.speech_encoder->NumChannels() != 1)
+ : false;
+ if (enable && stereo_send) {
+ WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, 0,
+ "VAD/DTX not supported for stereo sending");
+ return false;
+ }
+
+ // TODO(kwiberg): This doesn't protect Opus when injected as an external
+ // encoder.
+ if (send_codec_inst_ && IsOpus(*send_codec_inst_)) {
+ // VAD/DTX not supported, but don't fail.
+ enable = false;
+ }
+
+ codec_stack_params_.use_cng = enable;
+ codec_stack_params_.vad_mode = mode;
+ return true;
+}
+
+bool CodecManager::SetCodecFEC(bool enable_codec_fec) {
+ if (enable_codec_fec && codec_stack_params_.use_red) {
+ WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceAudioCoding, 0,
+ "Codec internal FEC and RED cannot be co-enabled.");
+ return false;
+ }
+
+ codec_stack_params_.use_codec_fec = enable_codec_fec;
+ return true;
+}
+
+} // namespace acm2
+} // namespace webrtc
diff --git a/webrtc/modules/audio_coding/acm2/codec_manager.h b/webrtc/modules/audio_coding/acm2/codec_manager.h
new file mode 100644
index 0000000000..9227e13f09
--- /dev/null
+++ b/webrtc/modules/audio_coding/acm2/codec_manager.h
@@ -0,0 +1,66 @@
+/*
+ * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_ACM2_CODEC_MANAGER_H_
+#define WEBRTC_MODULES_AUDIO_CODING_ACM2_CODEC_MANAGER_H_
+
+#include <map>
+
+#include "webrtc/base/constructormagic.h"
+#include "webrtc/base/optional.h"
+#include "webrtc/base/scoped_ptr.h"
+#include "webrtc/base/thread_checker.h"
+#include "webrtc/modules/audio_coding/acm2/rent_a_codec.h"
+#include "webrtc/modules/audio_coding/include/audio_coding_module_typedefs.h"
+#include "webrtc/common_types.h"
+
+namespace webrtc {
+
+class AudioDecoder;
+class AudioEncoder;
+
+namespace acm2 {
+
+class CodecManager final {
+ public:
+ CodecManager();
+ ~CodecManager();
+
+ // Parses the given specification. On success, returns true and updates the
+ // stored CodecInst and stack parameters; on error, returns false.
+ bool RegisterEncoder(const CodecInst& send_codec);
+
+ static CodecInst ForgeCodecInst(const AudioEncoder* external_speech_encoder);
+
+ const CodecInst* GetCodecInst() const {
+ return send_codec_inst_ ? &*send_codec_inst_ : nullptr;
+ }
+ const RentACodec::StackParameters* GetStackParams() const {
+ return &codec_stack_params_;
+ }
+ RentACodec::StackParameters* GetStackParams() { return &codec_stack_params_; }
+
+ bool SetCopyRed(bool enable);
+
+ bool SetVAD(bool enable, ACMVADMode mode);
+
+ bool SetCodecFEC(bool enable_codec_fec);
+
+ private:
+ rtc::ThreadChecker thread_checker_;
+ rtc::Optional<CodecInst> send_codec_inst_;
+ RentACodec::StackParameters codec_stack_params_;
+
+ RTC_DISALLOW_COPY_AND_ASSIGN(CodecManager);
+};
+
+} // namespace acm2
+} // namespace webrtc
+#endif // WEBRTC_MODULES_AUDIO_CODING_ACM2_CODEC_MANAGER_H_
diff --git a/webrtc/modules/audio_coding/acm2/codec_manager_unittest.cc b/webrtc/modules/audio_coding/acm2/codec_manager_unittest.cc
new file mode 100644
index 0000000000..dce8f38842
--- /dev/null
+++ b/webrtc/modules/audio_coding/acm2/codec_manager_unittest.cc
@@ -0,0 +1,73 @@
+/*
+ * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "testing/gtest/include/gtest/gtest.h"
+#include "webrtc/modules/audio_coding/codecs/mock/mock_audio_encoder.h"
+#include "webrtc/modules/audio_coding/acm2/codec_manager.h"
+#include "webrtc/modules/audio_coding/acm2/rent_a_codec.h"
+
+namespace webrtc {
+namespace acm2 {
+
+using ::testing::Return;
+
+namespace {
+
+// Create a MockAudioEncoder with some reasonable default behavior.
+rtc::scoped_ptr<MockAudioEncoder> CreateMockEncoder() {
+ auto enc = rtc_make_scoped_ptr(new MockAudioEncoder);
+ EXPECT_CALL(*enc, SampleRateHz()).WillRepeatedly(Return(8000));
+ EXPECT_CALL(*enc, NumChannels()).WillRepeatedly(Return(1));
+ EXPECT_CALL(*enc, Max10MsFramesInAPacket()).WillRepeatedly(Return(1));
+ EXPECT_CALL(*enc, Die());
+ return enc;
+}
+
+} // namespace
+
+TEST(CodecManagerTest, ExternalEncoderFec) {
+ auto enc0 = CreateMockEncoder();
+ auto enc1 = CreateMockEncoder();
+ {
+ ::testing::InSequence s;
+ EXPECT_CALL(*enc0, SetFec(false)).WillOnce(Return(true));
+ EXPECT_CALL(*enc0, Mark("A"));
+ EXPECT_CALL(*enc0, SetFec(true)).WillOnce(Return(true));
+ EXPECT_CALL(*enc1, SetFec(true)).WillOnce(Return(true));
+ EXPECT_CALL(*enc1, SetFec(false)).WillOnce(Return(true));
+ EXPECT_CALL(*enc0, Mark("B"));
+ EXPECT_CALL(*enc0, SetFec(false)).WillOnce(Return(true));
+ }
+
+ CodecManager cm;
+ RentACodec rac;
+ EXPECT_FALSE(cm.GetStackParams()->use_codec_fec);
+ cm.GetStackParams()->speech_encoder = enc0.get();
+ EXPECT_TRUE(rac.RentEncoderStack(cm.GetStackParams()));
+ EXPECT_FALSE(cm.GetStackParams()->use_codec_fec);
+ enc0->Mark("A");
+ EXPECT_EQ(true, cm.SetCodecFEC(true));
+ EXPECT_TRUE(rac.RentEncoderStack(cm.GetStackParams()));
+ EXPECT_TRUE(cm.GetStackParams()->use_codec_fec);
+ cm.GetStackParams()->speech_encoder = enc1.get();
+ EXPECT_TRUE(rac.RentEncoderStack(cm.GetStackParams()));
+ EXPECT_TRUE(cm.GetStackParams()->use_codec_fec);
+
+ EXPECT_EQ(true, cm.SetCodecFEC(false));
+ EXPECT_TRUE(rac.RentEncoderStack(cm.GetStackParams()));
+ enc0->Mark("B");
+ EXPECT_FALSE(cm.GetStackParams()->use_codec_fec);
+ cm.GetStackParams()->speech_encoder = enc0.get();
+ EXPECT_TRUE(rac.RentEncoderStack(cm.GetStackParams()));
+ EXPECT_FALSE(cm.GetStackParams()->use_codec_fec);
+}
+
+} // namespace acm2
+} // namespace webrtc
diff --git a/webrtc/modules/audio_coding/acm2/initial_delay_manager.cc b/webrtc/modules/audio_coding/acm2/initial_delay_manager.cc
new file mode 100644
index 0000000000..0c31b83eb3
--- /dev/null
+++ b/webrtc/modules/audio_coding/acm2/initial_delay_manager.cc
@@ -0,0 +1,242 @@
+/*
+ * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/modules/audio_coding/acm2/initial_delay_manager.h"
+
+namespace webrtc {
+
+namespace acm2 {
+
+InitialDelayManager::InitialDelayManager(int initial_delay_ms,
+ int late_packet_threshold)
+ : last_packet_type_(kUndefinedPacket),
+ last_receive_timestamp_(0),
+ timestamp_step_(0),
+ audio_payload_type_(kInvalidPayloadType),
+ initial_delay_ms_(initial_delay_ms),
+ buffered_audio_ms_(0),
+ buffering_(true),
+ playout_timestamp_(0),
+ late_packet_threshold_(late_packet_threshold) {
+ last_packet_rtp_info_.header.payloadType = kInvalidPayloadType;
+ last_packet_rtp_info_.header.ssrc = 0;
+ last_packet_rtp_info_.header.sequenceNumber = 0;
+ last_packet_rtp_info_.header.timestamp = 0;
+}
+
+void InitialDelayManager::UpdateLastReceivedPacket(
+ const WebRtcRTPHeader& rtp_info,
+ uint32_t receive_timestamp,
+ PacketType type,
+ bool new_codec,
+ int sample_rate_hz,
+ SyncStream* sync_stream) {
+ assert(sync_stream);
+
+ // If payload of audio packets is changing |new_codec| has to be true.
+ assert(!(!new_codec && type == kAudioPacket &&
+ rtp_info.header.payloadType != audio_payload_type_));
+
+ // Just shorthands.
+ const RTPHeader* current_header = &rtp_info.header;
+ RTPHeader* last_header = &last_packet_rtp_info_.header;
+
+ // Don't do anything if getting DTMF. The chance of DTMF in applications where
+ // initial delay is required is very low (we don't know of any). This avoids a
+ // lot of corner cases. The effect of ignoring DTMF packet is minimal. Note
+ // that DTMFs are inserted into NetEq just not accounted here.
+ if (type == kAvtPacket ||
+ (last_packet_type_ != kUndefinedPacket &&
+ !IsNewerSequenceNumber(current_header->sequenceNumber,
+ last_header->sequenceNumber))) {
+ sync_stream->num_sync_packets = 0;
+ return;
+ }
+
+ // Either if it is a new packet or the first packet record and set variables.
+ if (new_codec ||
+ last_packet_rtp_info_.header.payloadType == kInvalidPayloadType) {
+ timestamp_step_ = 0;
+ if (type == kAudioPacket)
+ audio_payload_type_ = rtp_info.header.payloadType;
+ else
+ audio_payload_type_ = kInvalidPayloadType; // Invalid.
+
+ RecordLastPacket(rtp_info, receive_timestamp, type);
+ sync_stream->num_sync_packets = 0;
+ buffered_audio_ms_ = 0;
+ buffering_ = true;
+
+ // If |buffering_| is set then |playout_timestamp_| should have correct
+ // value.
+ UpdatePlayoutTimestamp(*current_header, sample_rate_hz);
+ return;
+ }
+
+ uint32_t timestamp_increase = current_header->timestamp -
+ last_header->timestamp;
+
+ // |timestamp_increase| is invalid if this is the first packet. The effect is
+ // that |buffered_audio_ms_| is not increased.
+ if (last_packet_type_ == kUndefinedPacket) {
+ timestamp_increase = 0;
+ }
+
+ if (buffering_) {
+ buffered_audio_ms_ += timestamp_increase * 1000 / sample_rate_hz;
+
+ // A timestamp that reflects the initial delay, while buffering.
+ UpdatePlayoutTimestamp(*current_header, sample_rate_hz);
+
+ if (buffered_audio_ms_ >= initial_delay_ms_)
+ buffering_ = false;
+ }
+
+ if (current_header->sequenceNumber == last_header->sequenceNumber + 1) {
+ // Two consecutive audio packets, the previous packet-type is audio, so we
+ // can update |timestamp_step_|.
+ if (last_packet_type_ == kAudioPacket)
+ timestamp_step_ = timestamp_increase;
+ RecordLastPacket(rtp_info, receive_timestamp, type);
+ sync_stream->num_sync_packets = 0;
+ return;
+ }
+
+ uint16_t packet_gap = current_header->sequenceNumber -
+ last_header->sequenceNumber - 1;
+
+ // For smooth transitions leave a gap between audio and sync packets.
+ sync_stream->num_sync_packets = last_packet_type_ == kSyncPacket ?
+ packet_gap - 1 : packet_gap - 2;
+
+ // Do nothing if we haven't received any audio packet.
+ if (sync_stream->num_sync_packets > 0 &&
+ audio_payload_type_ != kInvalidPayloadType) {
+ if (timestamp_step_ == 0) {
+ // Make an estimate for |timestamp_step_| if it is not updated, yet.
+ assert(packet_gap > 0);
+ timestamp_step_ = timestamp_increase / (packet_gap + 1);
+ }
+ sync_stream->timestamp_step = timestamp_step_;
+
+ // Build the first sync-packet based on the current received packet.
+ memcpy(&sync_stream->rtp_info, &rtp_info, sizeof(rtp_info));
+ sync_stream->rtp_info.header.payloadType = audio_payload_type_;
+
+ uint16_t sequence_number_update = sync_stream->num_sync_packets + 1;
+ uint32_t timestamp_update = timestamp_step_ * sequence_number_update;
+
+ // Rewind sequence number and timestamps. This will give a more accurate
+ // description of the missing packets.
+ //
+ // Note that we leave a gap between the last packet in sync-stream and the
+ // current received packet, so it should be compensated for in the following
+ // computation of timestamps and sequence number.
+ sync_stream->rtp_info.header.sequenceNumber -= sequence_number_update;
+ sync_stream->receive_timestamp = receive_timestamp - timestamp_update;
+ sync_stream->rtp_info.header.timestamp -= timestamp_update;
+ sync_stream->rtp_info.header.payloadType = audio_payload_type_;
+ } else {
+ sync_stream->num_sync_packets = 0;
+ }
+
+ RecordLastPacket(rtp_info, receive_timestamp, type);
+ return;
+}
+
+void InitialDelayManager::RecordLastPacket(const WebRtcRTPHeader& rtp_info,
+ uint32_t receive_timestamp,
+ PacketType type) {
+ last_packet_type_ = type;
+ last_receive_timestamp_ = receive_timestamp;
+ memcpy(&last_packet_rtp_info_, &rtp_info, sizeof(rtp_info));
+}
+
+void InitialDelayManager::LatePackets(
+ uint32_t timestamp_now, SyncStream* sync_stream) {
+ assert(sync_stream);
+ sync_stream->num_sync_packets = 0;
+
+ // If there is no estimate of timestamp increment, |timestamp_step_|, then
+ // we cannot estimate the number of late packets.
+ // If the last packet has been CNG, estimating late packets is not meaningful,
+ // as a CNG packet is on unknown length.
+ // We can set a higher threshold if the last packet is CNG and continue
+ // execution, but this is how ACM1 code was written.
+ if (timestamp_step_ <= 0 ||
+ last_packet_type_ == kCngPacket ||
+ last_packet_type_ == kUndefinedPacket ||
+ audio_payload_type_ == kInvalidPayloadType) // No audio packet received.
+ return;
+
+ int num_late_packets = (timestamp_now - last_receive_timestamp_) /
+ timestamp_step_;
+
+ if (num_late_packets < late_packet_threshold_)
+ return;
+
+ int sync_offset = 1; // One gap at the end of the sync-stream.
+ if (last_packet_type_ != kSyncPacket) {
+ ++sync_offset; // One more gap at the beginning of the sync-stream.
+ --num_late_packets;
+ }
+ uint32_t timestamp_update = sync_offset * timestamp_step_;
+
+ sync_stream->num_sync_packets = num_late_packets;
+ if (num_late_packets == 0)
+ return;
+
+ // Build the first sync-packet in the sync-stream.
+ memcpy(&sync_stream->rtp_info, &last_packet_rtp_info_,
+ sizeof(last_packet_rtp_info_));
+
+ // Increase sequence number and timestamps.
+ sync_stream->rtp_info.header.sequenceNumber += sync_offset;
+ sync_stream->rtp_info.header.timestamp += timestamp_update;
+ sync_stream->receive_timestamp = last_receive_timestamp_ + timestamp_update;
+ sync_stream->timestamp_step = timestamp_step_;
+
+ // Sync-packets have audio payload-type.
+ sync_stream->rtp_info.header.payloadType = audio_payload_type_;
+
+ uint16_t sequence_number_update = num_late_packets + sync_offset - 1;
+ timestamp_update = sequence_number_update * timestamp_step_;
+
+ // Fake the last RTP, assuming the caller will inject the whole sync-stream.
+ last_packet_rtp_info_.header.timestamp += timestamp_update;
+ last_packet_rtp_info_.header.sequenceNumber += sequence_number_update;
+ last_packet_rtp_info_.header.payloadType = audio_payload_type_;
+ last_receive_timestamp_ += timestamp_update;
+
+ last_packet_type_ = kSyncPacket;
+ return;
+}
+
+bool InitialDelayManager::GetPlayoutTimestamp(uint32_t* playout_timestamp) {
+ if (!buffering_) {
+ return false;
+ }
+ *playout_timestamp = playout_timestamp_;
+ return true;
+}
+
+void InitialDelayManager::DisableBuffering() {
+ buffering_ = false;
+}
+
+void InitialDelayManager::UpdatePlayoutTimestamp(
+ const RTPHeader& current_header, int sample_rate_hz) {
+ playout_timestamp_ = current_header.timestamp - static_cast<uint32_t>(
+ initial_delay_ms_ * sample_rate_hz / 1000);
+}
+
+} // namespace acm2
+
+} // namespace webrtc
diff --git a/webrtc/modules/audio_coding/acm2/initial_delay_manager.h b/webrtc/modules/audio_coding/acm2/initial_delay_manager.h
new file mode 100644
index 0000000000..32dd1260f1
--- /dev/null
+++ b/webrtc/modules/audio_coding/acm2/initial_delay_manager.h
@@ -0,0 +1,120 @@
+/*
+ * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_ACM2_INITIAL_DELAY_MANAGER_H_
+#define WEBRTC_MODULES_AUDIO_CODING_ACM2_INITIAL_DELAY_MANAGER_H_
+
+#include "webrtc/base/scoped_ptr.h"
+#include "webrtc/modules/include/module_common_types.h"
+
+namespace webrtc {
+
+namespace acm2 {
+
+class InitialDelayManager {
+ public:
+ enum PacketType {
+ kUndefinedPacket, kCngPacket, kAvtPacket, kAudioPacket, kSyncPacket };
+
+ // Specifies a stream of sync-packets.
+ struct SyncStream {
+ SyncStream()
+ : num_sync_packets(0),
+ receive_timestamp(0),
+ timestamp_step(0) {
+ memset(&rtp_info, 0, sizeof(rtp_info));
+ }
+
+ int num_sync_packets;
+
+ // RTP header of the first sync-packet in the sequence.
+ WebRtcRTPHeader rtp_info;
+
+ // Received timestamp of the first sync-packet in the sequence.
+ uint32_t receive_timestamp;
+
+ // Samples per packet.
+ uint32_t timestamp_step;
+ };
+
+ InitialDelayManager(int initial_delay_ms, int late_packet_threshold);
+
+ // Update with the last received RTP header, |header|, and received timestamp,
+ // |received_timestamp|. |type| indicates the packet type. If codec is changed
+ // since the last time |new_codec| should be true. |sample_rate_hz| is the
+ // decoder's sampling rate in Hz. |header| has a field to store sampling rate
+ // but we are not sure if that is properly set at the send side, and |header|
+ // is declared constant in the caller of this function
+ // (AcmReceiver::InsertPacket()). |sync_stream| contains information required
+ // to generate a stream of sync packets.
+ void UpdateLastReceivedPacket(const WebRtcRTPHeader& header,
+ uint32_t receive_timestamp,
+ PacketType type,
+ bool new_codec,
+ int sample_rate_hz,
+ SyncStream* sync_stream);
+
+ // Based on the last received timestamp and given the current timestamp,
+ // sequence of late (or perhaps missing) packets is computed.
+ void LatePackets(uint32_t timestamp_now, SyncStream* sync_stream);
+
+ // Get playout timestamp.
+ // Returns true if the timestamp is valid (when buffering), otherwise false.
+ bool GetPlayoutTimestamp(uint32_t* playout_timestamp);
+
+ // True if buffered audio is less than the given initial delay (specified at
+ // the constructor). Buffering might be disabled by the client of this class.
+ bool buffering() { return buffering_; }
+
+ // Disable buffering in the class.
+ void DisableBuffering();
+
+ // True if any packet received for buffering.
+ bool PacketBuffered() { return last_packet_type_ != kUndefinedPacket; }
+
+ private:
+ static const uint8_t kInvalidPayloadType = 0xFF;
+
+ // Update playout timestamps. While buffering, this is about
+ // |initial_delay_ms| millisecond behind the latest received timestamp.
+ void UpdatePlayoutTimestamp(const RTPHeader& current_header,
+ int sample_rate_hz);
+
+ // Record an RTP headr and related parameter
+ void RecordLastPacket(const WebRtcRTPHeader& rtp_info,
+ uint32_t receive_timestamp,
+ PacketType type);
+
+ PacketType last_packet_type_;
+ WebRtcRTPHeader last_packet_rtp_info_;
+ uint32_t last_receive_timestamp_;
+ uint32_t timestamp_step_;
+ uint8_t audio_payload_type_;
+ const int initial_delay_ms_;
+ int buffered_audio_ms_;
+ bool buffering_;
+
+ // During the initial phase where packets are being accumulated and silence
+ // is played out, |playout_ts| is a timestamp which is equal to
+ // |initial_delay_ms_| milliseconds earlier than the most recently received
+ // RTP timestamp.
+ uint32_t playout_timestamp_;
+
+ // If the number of late packets exceed this value (computed based on current
+ // timestamp and last received timestamp), sequence of sync-packets is
+ // specified.
+ const int late_packet_threshold_;
+};
+
+} // namespace acm2
+
+} // namespace webrtc
+
+#endif // WEBRTC_MODULES_AUDIO_CODING_ACM2_INITIAL_DELAY_MANAGER_H_
diff --git a/webrtc/modules/audio_coding/acm2/initial_delay_manager_unittest.cc b/webrtc/modules/audio_coding/acm2/initial_delay_manager_unittest.cc
new file mode 100644
index 0000000000..d86d221851
--- /dev/null
+++ b/webrtc/modules/audio_coding/acm2/initial_delay_manager_unittest.cc
@@ -0,0 +1,376 @@
+/*
+ * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <string.h>
+
+#include "testing/gtest/include/gtest/gtest.h"
+#include "webrtc/modules/audio_coding/acm2/initial_delay_manager.h"
+
+namespace webrtc {
+
+namespace acm2 {
+
+namespace {
+
+const uint8_t kAudioPayloadType = 0;
+const uint8_t kCngPayloadType = 1;
+const uint8_t kAvtPayloadType = 2;
+
+const int kSamplingRateHz = 16000;
+const int kInitDelayMs = 200;
+const int kFrameSizeMs = 20;
+const uint32_t kTimestampStep = kFrameSizeMs * kSamplingRateHz / 1000;
+const int kLatePacketThreshold = 5;
+
+void InitRtpInfo(WebRtcRTPHeader* rtp_info) {
+ memset(rtp_info, 0, sizeof(*rtp_info));
+ rtp_info->header.markerBit = false;
+ rtp_info->header.payloadType = kAudioPayloadType;
+ rtp_info->header.sequenceNumber = 1234;
+ rtp_info->header.timestamp = 0xFFFFFFFD; // Close to wrap around.
+ rtp_info->header.ssrc = 0x87654321; // Arbitrary.
+ rtp_info->header.numCSRCs = 0; // Arbitrary.
+ rtp_info->header.paddingLength = 0;
+ rtp_info->header.headerLength = sizeof(RTPHeader);
+ rtp_info->header.payload_type_frequency = kSamplingRateHz;
+ rtp_info->header.extension.absoluteSendTime = 0;
+ rtp_info->header.extension.transmissionTimeOffset = 0;
+ rtp_info->frameType = kAudioFrameSpeech;
+}
+
+void ForwardRtpHeader(int n,
+ WebRtcRTPHeader* rtp_info,
+ uint32_t* rtp_receive_timestamp) {
+ rtp_info->header.sequenceNumber += n;
+ rtp_info->header.timestamp += n * kTimestampStep;
+ *rtp_receive_timestamp += n * kTimestampStep;
+}
+
+void NextRtpHeader(WebRtcRTPHeader* rtp_info,
+ uint32_t* rtp_receive_timestamp) {
+ ForwardRtpHeader(1, rtp_info, rtp_receive_timestamp);
+}
+
+} // namespace
+
+class InitialDelayManagerTest : public ::testing::Test {
+ protected:
+ InitialDelayManagerTest()
+ : manager_(new InitialDelayManager(kInitDelayMs, kLatePacketThreshold)),
+ rtp_receive_timestamp_(1111) { } // Arbitrary starting point.
+
+ virtual void SetUp() {
+ ASSERT_TRUE(manager_.get() != NULL);
+ InitRtpInfo(&rtp_info_);
+ }
+
+ void GetNextRtpHeader(WebRtcRTPHeader* rtp_info,
+ uint32_t* rtp_receive_timestamp) const {
+ memcpy(rtp_info, &rtp_info_, sizeof(*rtp_info));
+ *rtp_receive_timestamp = rtp_receive_timestamp_;
+ NextRtpHeader(rtp_info, rtp_receive_timestamp);
+ }
+
+ rtc::scoped_ptr<InitialDelayManager> manager_;
+ WebRtcRTPHeader rtp_info_;
+ uint32_t rtp_receive_timestamp_;
+};
+
+TEST_F(InitialDelayManagerTest, Init) {
+ EXPECT_TRUE(manager_->buffering());
+ EXPECT_FALSE(manager_->PacketBuffered());
+ manager_->DisableBuffering();
+ EXPECT_FALSE(manager_->buffering());
+ InitialDelayManager::SyncStream sync_stream;
+
+ // Call before any packet inserted.
+ manager_->LatePackets(0x6789ABCD, &sync_stream); // Arbitrary but large
+ // receive timestamp.
+ EXPECT_EQ(0, sync_stream.num_sync_packets);
+
+ // Insert non-audio packets, a CNG and DTMF.
+ rtp_info_.header.payloadType = kCngPayloadType;
+ manager_->UpdateLastReceivedPacket(rtp_info_, rtp_receive_timestamp_,
+ InitialDelayManager::kCngPacket, false,
+ kSamplingRateHz, &sync_stream);
+ EXPECT_EQ(0, sync_stream.num_sync_packets);
+ ForwardRtpHeader(5, &rtp_info_, &rtp_receive_timestamp_);
+ rtp_info_.header.payloadType = kAvtPayloadType;
+ manager_->UpdateLastReceivedPacket(rtp_info_, rtp_receive_timestamp_,
+ InitialDelayManager::kAvtPacket, false,
+ kSamplingRateHz, &sync_stream);
+ // Gap in sequence numbers but no audio received, sync-stream should be empty.
+ EXPECT_EQ(0, sync_stream.num_sync_packets);
+ manager_->LatePackets(0x45678987, &sync_stream); // Large arbitrary receive
+ // timestamp.
+ // |manager_| has no estimate of timestamp-step and has not received any
+ // audio packet.
+ EXPECT_EQ(0, sync_stream.num_sync_packets);
+
+
+ NextRtpHeader(&rtp_info_, &rtp_receive_timestamp_);
+ rtp_info_.header.payloadType = kAudioPayloadType;
+ // First packet.
+ manager_->UpdateLastReceivedPacket(rtp_info_, rtp_receive_timestamp_,
+ InitialDelayManager::kAudioPacket, true,
+ kSamplingRateHz, &sync_stream);
+ EXPECT_EQ(0, sync_stream.num_sync_packets);
+
+ // Call LatePAcket() after only one packet inserted.
+ manager_->LatePackets(0x6789ABCD, &sync_stream); // Arbitrary but large
+ // receive timestamp.
+ EXPECT_EQ(0, sync_stream.num_sync_packets);
+
+ // Gap in timestamp, but this packet is also flagged as "new," therefore,
+ // expecting empty sync-stream.
+ ForwardRtpHeader(5, &rtp_info_, &rtp_receive_timestamp_);
+ manager_->UpdateLastReceivedPacket(rtp_info_, rtp_receive_timestamp_,
+ InitialDelayManager::kAudioPacket, true,
+ kSamplingRateHz, &sync_stream);
+}
+
+TEST_F(InitialDelayManagerTest, MissingPacket) {
+ InitialDelayManager::SyncStream sync_stream;
+ // First packet.
+ manager_->UpdateLastReceivedPacket(rtp_info_, rtp_receive_timestamp_,
+ InitialDelayManager::kAudioPacket, true,
+ kSamplingRateHz, &sync_stream);
+ ASSERT_EQ(0, sync_stream.num_sync_packets);
+
+ // Second packet.
+ NextRtpHeader(&rtp_info_, &rtp_receive_timestamp_);
+ manager_->UpdateLastReceivedPacket(rtp_info_, rtp_receive_timestamp_,
+ InitialDelayManager::kAudioPacket, false,
+ kSamplingRateHz, &sync_stream);
+ ASSERT_EQ(0, sync_stream.num_sync_packets);
+
+ // Third packet, missing packets start from here.
+ NextRtpHeader(&rtp_info_, &rtp_receive_timestamp_);
+
+ // First sync-packet in sync-stream is one after the above packet.
+ WebRtcRTPHeader expected_rtp_info;
+ uint32_t expected_receive_timestamp;
+ GetNextRtpHeader(&expected_rtp_info, &expected_receive_timestamp);
+
+ const int kNumMissingPackets = 10;
+ ForwardRtpHeader(kNumMissingPackets, &rtp_info_, &rtp_receive_timestamp_);
+ manager_->UpdateLastReceivedPacket(rtp_info_, rtp_receive_timestamp_,
+ InitialDelayManager::kAudioPacket, false,
+ kSamplingRateHz, &sync_stream);
+ EXPECT_EQ(kNumMissingPackets - 2, sync_stream.num_sync_packets);
+ EXPECT_EQ(0, memcmp(&expected_rtp_info, &sync_stream.rtp_info,
+ sizeof(expected_rtp_info)));
+ EXPECT_EQ(kTimestampStep, sync_stream.timestamp_step);
+ EXPECT_EQ(expected_receive_timestamp, sync_stream.receive_timestamp);
+}
+
+// There hasn't been any consecutive packets to estimate timestamp-step.
+TEST_F(InitialDelayManagerTest, MissingPacketEstimateTimestamp) {
+ InitialDelayManager::SyncStream sync_stream;
+ // First packet.
+ manager_->UpdateLastReceivedPacket(rtp_info_, rtp_receive_timestamp_,
+ InitialDelayManager::kAudioPacket, true,
+ kSamplingRateHz, &sync_stream);
+ ASSERT_EQ(0, sync_stream.num_sync_packets);
+
+ // Second packet, missing packets start here.
+ NextRtpHeader(&rtp_info_, &rtp_receive_timestamp_);
+
+ // First sync-packet in sync-stream is one after the above.
+ WebRtcRTPHeader expected_rtp_info;
+ uint32_t expected_receive_timestamp;
+ GetNextRtpHeader(&expected_rtp_info, &expected_receive_timestamp);
+
+ const int kNumMissingPackets = 10;
+ ForwardRtpHeader(kNumMissingPackets, &rtp_info_, &rtp_receive_timestamp_);
+ manager_->UpdateLastReceivedPacket(rtp_info_, rtp_receive_timestamp_,
+ InitialDelayManager::kAudioPacket, false,
+ kSamplingRateHz, &sync_stream);
+ EXPECT_EQ(kNumMissingPackets - 2, sync_stream.num_sync_packets);
+ EXPECT_EQ(0, memcmp(&expected_rtp_info, &sync_stream.rtp_info,
+ sizeof(expected_rtp_info)));
+}
+
+TEST_F(InitialDelayManagerTest, MissingPacketWithCng) {
+ InitialDelayManager::SyncStream sync_stream;
+
+ // First packet.
+ manager_->UpdateLastReceivedPacket(rtp_info_, rtp_receive_timestamp_,
+ InitialDelayManager::kAudioPacket, true,
+ kSamplingRateHz, &sync_stream);
+ ASSERT_EQ(0, sync_stream.num_sync_packets);
+
+ // Second packet as CNG.
+ NextRtpHeader(&rtp_info_, &rtp_receive_timestamp_);
+ rtp_info_.header.payloadType = kCngPayloadType;
+ manager_->UpdateLastReceivedPacket(rtp_info_, rtp_receive_timestamp_,
+ InitialDelayManager::kCngPacket, false,
+ kSamplingRateHz, &sync_stream);
+ ASSERT_EQ(0, sync_stream.num_sync_packets);
+
+ // Audio packet after CNG. Missing packets start from this packet.
+ rtp_info_.header.payloadType = kAudioPayloadType;
+ NextRtpHeader(&rtp_info_, &rtp_receive_timestamp_);
+
+ // Timestamps are increased higher than regular packet.
+ const uint32_t kCngTimestampStep = 5 * kTimestampStep;
+ rtp_info_.header.timestamp += kCngTimestampStep;
+ rtp_receive_timestamp_ += kCngTimestampStep;
+
+ // First sync-packet in sync-stream is the one after the above packet.
+ WebRtcRTPHeader expected_rtp_info;
+ uint32_t expected_receive_timestamp;
+ GetNextRtpHeader(&expected_rtp_info, &expected_receive_timestamp);
+
+ const int kNumMissingPackets = 10;
+ ForwardRtpHeader(kNumMissingPackets, &rtp_info_, &rtp_receive_timestamp_);
+ manager_->UpdateLastReceivedPacket(rtp_info_, rtp_receive_timestamp_,
+ InitialDelayManager::kAudioPacket, false,
+ kSamplingRateHz, &sync_stream);
+ EXPECT_EQ(kNumMissingPackets - 2, sync_stream.num_sync_packets);
+ EXPECT_EQ(0, memcmp(&expected_rtp_info, &sync_stream.rtp_info,
+ sizeof(expected_rtp_info)));
+ EXPECT_EQ(kTimestampStep, sync_stream.timestamp_step);
+ EXPECT_EQ(expected_receive_timestamp, sync_stream.receive_timestamp);
+}
+
+TEST_F(InitialDelayManagerTest, LatePacket) {
+ InitialDelayManager::SyncStream sync_stream;
+ // First packet.
+ manager_->UpdateLastReceivedPacket(rtp_info_, rtp_receive_timestamp_,
+ InitialDelayManager::kAudioPacket, true,
+ kSamplingRateHz, &sync_stream);
+ ASSERT_EQ(0, sync_stream.num_sync_packets);
+
+ // Second packet.
+ NextRtpHeader(&rtp_info_, &rtp_receive_timestamp_);
+ manager_->UpdateLastReceivedPacket(rtp_info_, rtp_receive_timestamp_,
+ InitialDelayManager::kAudioPacket, false,
+ kSamplingRateHz, &sync_stream);
+ ASSERT_EQ(0, sync_stream.num_sync_packets);
+
+ // Timestamp increment for 10ms;
+ const uint32_t kTimestampStep10Ms = kSamplingRateHz / 100;
+
+ // 10 ms after the second packet is inserted.
+ uint32_t timestamp_now = rtp_receive_timestamp_ + kTimestampStep10Ms;
+
+ // Third packet, late packets start from this packet.
+ NextRtpHeader(&rtp_info_, &rtp_receive_timestamp_);
+
+ // First sync-packet in sync-stream, which is one after the above packet.
+ WebRtcRTPHeader expected_rtp_info;
+ uint32_t expected_receive_timestamp;
+ GetNextRtpHeader(&expected_rtp_info, &expected_receive_timestamp);
+
+ const int kLatePacketThreshold = 5;
+
+ int expected_num_late_packets = kLatePacketThreshold - 1;
+ for (int k = 0; k < 2; ++k) {
+ for (int n = 1; n < kLatePacketThreshold * kFrameSizeMs / 10; ++n) {
+ manager_->LatePackets(timestamp_now, &sync_stream);
+ EXPECT_EQ(0, sync_stream.num_sync_packets) <<
+ "try " << k << " loop number " << n;
+ timestamp_now += kTimestampStep10Ms;
+ }
+ manager_->LatePackets(timestamp_now, &sync_stream);
+
+ EXPECT_EQ(expected_num_late_packets, sync_stream.num_sync_packets) <<
+ "try " << k;
+ EXPECT_EQ(kTimestampStep, sync_stream.timestamp_step) <<
+ "try " << k;
+ EXPECT_EQ(expected_receive_timestamp, sync_stream.receive_timestamp) <<
+ "try " << k;
+ EXPECT_EQ(0, memcmp(&expected_rtp_info, &sync_stream.rtp_info,
+ sizeof(expected_rtp_info)));
+
+ timestamp_now += kTimestampStep10Ms;
+
+ // |manger_| assumes the |sync_stream| obtained by LatePacket() is fully
+ // injected. The last injected packet is sync-packet, therefore, there will
+ // not be any gap between sync stream of this and the next iteration.
+ ForwardRtpHeader(sync_stream.num_sync_packets, &expected_rtp_info,
+ &expected_receive_timestamp);
+ expected_num_late_packets = kLatePacketThreshold;
+ }
+
+ // Test "no-gap" for missing packet after late packet.
+ // |expected_rtp_info| is the expected sync-packet if any packet is missing.
+ memcpy(&rtp_info_, &expected_rtp_info, sizeof(rtp_info_));
+ rtp_receive_timestamp_ = expected_receive_timestamp;
+
+ int kNumMissingPackets = 3; // Arbitrary.
+ ForwardRtpHeader(kNumMissingPackets, &rtp_info_, &rtp_receive_timestamp_);
+ manager_->UpdateLastReceivedPacket(rtp_info_, rtp_receive_timestamp_,
+ InitialDelayManager::kAudioPacket, false,
+ kSamplingRateHz, &sync_stream);
+
+ // Note that there is one packet gap between the last sync-packet and the
+ // latest inserted packet.
+ EXPECT_EQ(kNumMissingPackets - 1, sync_stream.num_sync_packets);
+ EXPECT_EQ(kTimestampStep, sync_stream.timestamp_step);
+ EXPECT_EQ(expected_receive_timestamp, sync_stream.receive_timestamp);
+ EXPECT_EQ(0, memcmp(&expected_rtp_info, &sync_stream.rtp_info,
+ sizeof(expected_rtp_info)));
+}
+
+TEST_F(InitialDelayManagerTest, NoLatePacketAfterCng) {
+ InitialDelayManager::SyncStream sync_stream;
+
+ // First packet.
+ manager_->UpdateLastReceivedPacket(rtp_info_, rtp_receive_timestamp_,
+ InitialDelayManager::kAudioPacket, true,
+ kSamplingRateHz, &sync_stream);
+ ASSERT_EQ(0, sync_stream.num_sync_packets);
+
+ // Second packet as CNG.
+ NextRtpHeader(&rtp_info_, &rtp_receive_timestamp_);
+ rtp_info_.header.payloadType = kCngPayloadType;
+ manager_->UpdateLastReceivedPacket(rtp_info_, rtp_receive_timestamp_,
+ InitialDelayManager::kCngPacket, false,
+ kSamplingRateHz, &sync_stream);
+ ASSERT_EQ(0, sync_stream.num_sync_packets);
+
+ // Forward the time more then |kLatePacketThreshold| packets.
+ uint32_t timestamp_now = rtp_receive_timestamp_ + kTimestampStep * (3 +
+ kLatePacketThreshold);
+
+ manager_->LatePackets(timestamp_now, &sync_stream);
+ EXPECT_EQ(0, sync_stream.num_sync_packets);
+}
+
+TEST_F(InitialDelayManagerTest, BufferingAudio) {
+ InitialDelayManager::SyncStream sync_stream;
+
+ // Very first packet is not counted in calculation of buffered audio.
+ for (int n = 0; n < kInitDelayMs / kFrameSizeMs; ++n) {
+ manager_->UpdateLastReceivedPacket(rtp_info_, rtp_receive_timestamp_,
+ InitialDelayManager::kAudioPacket,
+ n == 0, kSamplingRateHz, &sync_stream);
+ EXPECT_EQ(0, sync_stream.num_sync_packets);
+ EXPECT_TRUE(manager_->buffering());
+ const uint32_t expected_playout_timestamp = rtp_info_.header.timestamp -
+ kInitDelayMs * kSamplingRateHz / 1000;
+ uint32_t actual_playout_timestamp = 0;
+ EXPECT_TRUE(manager_->GetPlayoutTimestamp(&actual_playout_timestamp));
+ EXPECT_EQ(expected_playout_timestamp, actual_playout_timestamp);
+ NextRtpHeader(&rtp_info_, &rtp_receive_timestamp_);
+ }
+
+ manager_->UpdateLastReceivedPacket(rtp_info_, rtp_receive_timestamp_,
+ InitialDelayManager::kAudioPacket,
+ false, kSamplingRateHz, &sync_stream);
+ EXPECT_EQ(0, sync_stream.num_sync_packets);
+ EXPECT_FALSE(manager_->buffering());
+}
+
+} // namespace acm2
+
+} // namespace webrtc
diff --git a/webrtc/modules/audio_coding/acm2/rent_a_codec.cc b/webrtc/modules/audio_coding/acm2/rent_a_codec.cc
new file mode 100644
index 0000000000..5695fd6e08
--- /dev/null
+++ b/webrtc/modules/audio_coding/acm2/rent_a_codec.cc
@@ -0,0 +1,307 @@
+/*
+ * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/modules/audio_coding/acm2/rent_a_codec.h"
+
+#include <utility>
+
+#include "webrtc/base/logging.h"
+#include "webrtc/modules/audio_coding/codecs/cng/audio_encoder_cng.h"
+#include "webrtc/modules/audio_coding/codecs/g711/audio_encoder_pcm.h"
+#ifdef WEBRTC_CODEC_G722
+#include "webrtc/modules/audio_coding/codecs/g722/audio_encoder_g722.h"
+#endif
+#ifdef WEBRTC_CODEC_ILBC
+#include "webrtc/modules/audio_coding/codecs/ilbc/audio_encoder_ilbc.h"
+#endif
+#ifdef WEBRTC_CODEC_ISACFX
+#include "webrtc/modules/audio_coding/codecs/isac/fix/include/audio_decoder_isacfix.h"
+#include "webrtc/modules/audio_coding/codecs/isac/fix/include/audio_encoder_isacfix.h"
+#endif
+#ifdef WEBRTC_CODEC_ISAC
+#include "webrtc/modules/audio_coding/codecs/isac/main/include/audio_decoder_isac.h"
+#include "webrtc/modules/audio_coding/codecs/isac/main/include/audio_encoder_isac.h"
+#endif
+#ifdef WEBRTC_CODEC_OPUS
+#include "webrtc/modules/audio_coding/codecs/opus/audio_encoder_opus.h"
+#endif
+#include "webrtc/modules/audio_coding/codecs/pcm16b/audio_encoder_pcm16b.h"
+#ifdef WEBRTC_CODEC_RED
+#include "webrtc/modules/audio_coding/codecs/red/audio_encoder_copy_red.h"
+#endif
+#include "webrtc/modules/audio_coding/acm2/acm_codec_database.h"
+#include "webrtc/modules/audio_coding/acm2/acm_common_defs.h"
+
+namespace webrtc {
+namespace acm2 {
+
+rtc::Optional<RentACodec::CodecId> RentACodec::CodecIdByParams(
+ const char* payload_name,
+ int sampling_freq_hz,
+ size_t channels) {
+ return CodecIdFromIndex(
+ ACMCodecDB::CodecId(payload_name, sampling_freq_hz, channels));
+}
+
+rtc::Optional<CodecInst> RentACodec::CodecInstById(CodecId codec_id) {
+ rtc::Optional<int> mi = CodecIndexFromId(codec_id);
+ return mi ? rtc::Optional<CodecInst>(Database()[*mi])
+ : rtc::Optional<CodecInst>();
+}
+
+rtc::Optional<RentACodec::CodecId> RentACodec::CodecIdByInst(
+ const CodecInst& codec_inst) {
+ return CodecIdFromIndex(ACMCodecDB::CodecNumber(codec_inst));
+}
+
+rtc::Optional<CodecInst> RentACodec::CodecInstByParams(const char* payload_name,
+ int sampling_freq_hz,
+ size_t channels) {
+ rtc::Optional<CodecId> codec_id =
+ CodecIdByParams(payload_name, sampling_freq_hz, channels);
+ if (!codec_id)
+ return rtc::Optional<CodecInst>();
+ rtc::Optional<CodecInst> ci = CodecInstById(*codec_id);
+ RTC_DCHECK(ci);
+
+ // Keep the number of channels from the function call. For most codecs it
+ // will be the same value as in default codec settings, but not for all.
+ ci->channels = channels;
+
+ return ci;
+}
+
+bool RentACodec::IsCodecValid(const CodecInst& codec_inst) {
+ return ACMCodecDB::CodecNumber(codec_inst) >= 0;
+}
+
+rtc::Optional<bool> RentACodec::IsSupportedNumChannels(CodecId codec_id,
+ size_t num_channels) {
+ auto i = CodecIndexFromId(codec_id);
+ return i ? rtc::Optional<bool>(
+ ACMCodecDB::codec_settings_[*i].channel_support >=
+ num_channels)
+ : rtc::Optional<bool>();
+}
+
+rtc::ArrayView<const CodecInst> RentACodec::Database() {
+ return rtc::ArrayView<const CodecInst>(ACMCodecDB::database_,
+ NumberOfCodecs());
+}
+
+rtc::Optional<NetEqDecoder> RentACodec::NetEqDecoderFromCodecId(
+ CodecId codec_id,
+ size_t num_channels) {
+ rtc::Optional<int> i = CodecIndexFromId(codec_id);
+ if (!i)
+ return rtc::Optional<NetEqDecoder>();
+ const NetEqDecoder ned = ACMCodecDB::neteq_decoders_[*i];
+ return rtc::Optional<NetEqDecoder>(
+ (ned == NetEqDecoder::kDecoderOpus && num_channels == 2)
+ ? NetEqDecoder::kDecoderOpus_2ch
+ : ned);
+}
+
+RentACodec::RegistrationResult RentACodec::RegisterCngPayloadType(
+ std::map<int, int>* pt_map,
+ const CodecInst& codec_inst) {
+ if (STR_CASE_CMP(codec_inst.plname, "CN") != 0)
+ return RegistrationResult::kSkip;
+ switch (codec_inst.plfreq) {
+ case 8000:
+ case 16000:
+ case 32000:
+ case 48000:
+ (*pt_map)[codec_inst.plfreq] = codec_inst.pltype;
+ return RegistrationResult::kOk;
+ default:
+ return RegistrationResult::kBadFreq;
+ }
+}
+
+RentACodec::RegistrationResult RentACodec::RegisterRedPayloadType(
+ std::map<int, int>* pt_map,
+ const CodecInst& codec_inst) {
+ if (STR_CASE_CMP(codec_inst.plname, "RED") != 0)
+ return RegistrationResult::kSkip;
+ switch (codec_inst.plfreq) {
+ case 8000:
+ (*pt_map)[codec_inst.plfreq] = codec_inst.pltype;
+ return RegistrationResult::kOk;
+ default:
+ return RegistrationResult::kBadFreq;
+ }
+}
+
+namespace {
+
+// Returns a new speech encoder, or null on error.
+// TODO(kwiberg): Don't handle errors here (bug 5033)
+rtc::scoped_ptr<AudioEncoder> CreateEncoder(
+ const CodecInst& speech_inst,
+ LockedIsacBandwidthInfo* bwinfo) {
+#if defined(WEBRTC_CODEC_ISACFX)
+ if (STR_CASE_CMP(speech_inst.plname, "isac") == 0)
+ return rtc_make_scoped_ptr(new AudioEncoderIsacFix(speech_inst, bwinfo));
+#endif
+#if defined(WEBRTC_CODEC_ISAC)
+ if (STR_CASE_CMP(speech_inst.plname, "isac") == 0)
+ return rtc_make_scoped_ptr(new AudioEncoderIsac(speech_inst, bwinfo));
+#endif
+#ifdef WEBRTC_CODEC_OPUS
+ if (STR_CASE_CMP(speech_inst.plname, "opus") == 0)
+ return rtc_make_scoped_ptr(new AudioEncoderOpus(speech_inst));
+#endif
+ if (STR_CASE_CMP(speech_inst.plname, "pcmu") == 0)
+ return rtc_make_scoped_ptr(new AudioEncoderPcmU(speech_inst));
+ if (STR_CASE_CMP(speech_inst.plname, "pcma") == 0)
+ return rtc_make_scoped_ptr(new AudioEncoderPcmA(speech_inst));
+ if (STR_CASE_CMP(speech_inst.plname, "l16") == 0)
+ return rtc_make_scoped_ptr(new AudioEncoderPcm16B(speech_inst));
+#ifdef WEBRTC_CODEC_ILBC
+ if (STR_CASE_CMP(speech_inst.plname, "ilbc") == 0)
+ return rtc_make_scoped_ptr(new AudioEncoderIlbc(speech_inst));
+#endif
+#ifdef WEBRTC_CODEC_G722
+ if (STR_CASE_CMP(speech_inst.plname, "g722") == 0)
+ return rtc_make_scoped_ptr(new AudioEncoderG722(speech_inst));
+#endif
+ LOG_F(LS_ERROR) << "Could not create encoder of type " << speech_inst.plname;
+ return rtc::scoped_ptr<AudioEncoder>();
+}
+
+rtc::scoped_ptr<AudioEncoder> CreateRedEncoder(AudioEncoder* encoder,
+ int red_payload_type) {
+#ifdef WEBRTC_CODEC_RED
+ AudioEncoderCopyRed::Config config;
+ config.payload_type = red_payload_type;
+ config.speech_encoder = encoder;
+ return rtc::scoped_ptr<AudioEncoder>(new AudioEncoderCopyRed(config));
+#else
+ return rtc::scoped_ptr<AudioEncoder>();
+#endif
+}
+
+rtc::scoped_ptr<AudioEncoder> CreateCngEncoder(AudioEncoder* encoder,
+ int payload_type,
+ ACMVADMode vad_mode) {
+ AudioEncoderCng::Config config;
+ config.num_channels = encoder->NumChannels();
+ config.payload_type = payload_type;
+ config.speech_encoder = encoder;
+ switch (vad_mode) {
+ case VADNormal:
+ config.vad_mode = Vad::kVadNormal;
+ break;
+ case VADLowBitrate:
+ config.vad_mode = Vad::kVadLowBitrate;
+ break;
+ case VADAggr:
+ config.vad_mode = Vad::kVadAggressive;
+ break;
+ case VADVeryAggr:
+ config.vad_mode = Vad::kVadVeryAggressive;
+ break;
+ default:
+ FATAL();
+ }
+ return rtc::scoped_ptr<AudioEncoder>(new AudioEncoderCng(config));
+}
+
+rtc::scoped_ptr<AudioDecoder> CreateIsacDecoder(
+ LockedIsacBandwidthInfo* bwinfo) {
+#if defined(WEBRTC_CODEC_ISACFX)
+ return rtc_make_scoped_ptr(new AudioDecoderIsacFix(bwinfo));
+#elif defined(WEBRTC_CODEC_ISAC)
+ return rtc_make_scoped_ptr(new AudioDecoderIsac(bwinfo));
+#else
+ FATAL() << "iSAC is not supported.";
+ return rtc::scoped_ptr<AudioDecoder>();
+#endif
+}
+
+} // namespace
+
+RentACodec::RentACodec() = default;
+RentACodec::~RentACodec() = default;
+
+AudioEncoder* RentACodec::RentEncoder(const CodecInst& codec_inst) {
+ rtc::scoped_ptr<AudioEncoder> enc =
+ CreateEncoder(codec_inst, &isac_bandwidth_info_);
+ if (!enc)
+ return nullptr;
+ speech_encoder_ = std::move(enc);
+ return speech_encoder_.get();
+}
+
+RentACodec::StackParameters::StackParameters() {
+ // Register the default payload types for RED and CNG.
+ for (const CodecInst& ci : RentACodec::Database()) {
+ RentACodec::RegisterCngPayloadType(&cng_payload_types, ci);
+ RentACodec::RegisterRedPayloadType(&red_payload_types, ci);
+ }
+}
+
+RentACodec::StackParameters::~StackParameters() = default;
+
+AudioEncoder* RentACodec::RentEncoderStack(StackParameters* param) {
+ RTC_DCHECK(param->speech_encoder);
+
+ if (param->use_codec_fec) {
+ // Switch FEC on. On failure, remember that FEC is off.
+ if (!param->speech_encoder->SetFec(true))
+ param->use_codec_fec = false;
+ } else {
+ // Switch FEC off. This shouldn't fail.
+ const bool success = param->speech_encoder->SetFec(false);
+ RTC_DCHECK(success);
+ }
+
+ auto pt = [&param](const std::map<int, int>& m) {
+ auto it = m.find(param->speech_encoder->SampleRateHz());
+ return it == m.end() ? rtc::Optional<int>()
+ : rtc::Optional<int>(it->second);
+ };
+ auto cng_pt = pt(param->cng_payload_types);
+ param->use_cng =
+ param->use_cng && cng_pt && param->speech_encoder->NumChannels() == 1;
+ auto red_pt = pt(param->red_payload_types);
+ param->use_red = param->use_red && red_pt;
+
+ if (param->use_cng || param->use_red) {
+ // The RED and CNG encoders need to be in sync with the speech encoder, so
+ // reset the latter to ensure its buffer is empty.
+ param->speech_encoder->Reset();
+ }
+ encoder_stack_ = param->speech_encoder;
+ if (param->use_red) {
+ red_encoder_ = CreateRedEncoder(encoder_stack_, *red_pt);
+ if (red_encoder_)
+ encoder_stack_ = red_encoder_.get();
+ } else {
+ red_encoder_.reset();
+ }
+ if (param->use_cng) {
+ cng_encoder_ = CreateCngEncoder(encoder_stack_, *cng_pt, param->vad_mode);
+ encoder_stack_ = cng_encoder_.get();
+ } else {
+ cng_encoder_.reset();
+ }
+ return encoder_stack_;
+}
+
+AudioDecoder* RentACodec::RentIsacDecoder() {
+ if (!isac_decoder_)
+ isac_decoder_ = CreateIsacDecoder(&isac_bandwidth_info_);
+ return isac_decoder_.get();
+}
+
+} // namespace acm2
+} // namespace webrtc
diff --git a/webrtc/modules/audio_coding/acm2/rent_a_codec.h b/webrtc/modules/audio_coding/acm2/rent_a_codec.h
new file mode 100644
index 0000000000..b1dcc9196c
--- /dev/null
+++ b/webrtc/modules/audio_coding/acm2/rent_a_codec.h
@@ -0,0 +1,249 @@
+/*
+ * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_ACM2_RENT_A_CODEC_H_
+#define WEBRTC_MODULES_AUDIO_CODING_ACM2_RENT_A_CODEC_H_
+
+#include <stddef.h>
+#include <map>
+
+#include "webrtc/base/array_view.h"
+#include "webrtc/base/constructormagic.h"
+#include "webrtc/base/optional.h"
+#include "webrtc/base/scoped_ptr.h"
+#include "webrtc/modules/audio_coding/codecs/audio_decoder.h"
+#include "webrtc/modules/audio_coding/codecs/audio_encoder.h"
+#include "webrtc/modules/audio_coding/include/audio_coding_module_typedefs.h"
+#include "webrtc/typedefs.h"
+
+#if defined(WEBRTC_CODEC_ISAC) || defined(WEBRTC_CODEC_ISACFX)
+#include "webrtc/modules/audio_coding/codecs/isac/locked_bandwidth_info.h"
+#else
+// Dummy implementation, for when we don't have iSAC.
+namespace webrtc {
+class LockedIsacBandwidthInfo {};
+}
+#endif
+
+namespace webrtc {
+
+struct CodecInst;
+
+namespace acm2 {
+
+class RentACodec {
+ public:
+ enum class CodecId {
+#if defined(WEBRTC_CODEC_ISAC) || defined(WEBRTC_CODEC_ISACFX)
+ kISAC,
+#endif
+#ifdef WEBRTC_CODEC_ISAC
+ kISACSWB,
+#endif
+ // Mono
+ kPCM16B,
+ kPCM16Bwb,
+ kPCM16Bswb32kHz,
+ // Stereo
+ kPCM16B_2ch,
+ kPCM16Bwb_2ch,
+ kPCM16Bswb32kHz_2ch,
+ // Mono
+ kPCMU,
+ kPCMA,
+ // Stereo
+ kPCMU_2ch,
+ kPCMA_2ch,
+#ifdef WEBRTC_CODEC_ILBC
+ kILBC,
+#endif
+#ifdef WEBRTC_CODEC_G722
+ kG722, // Mono
+ kG722_2ch, // Stereo
+#endif
+#ifdef WEBRTC_CODEC_OPUS
+ kOpus, // Mono and stereo
+#endif
+ kCNNB,
+ kCNWB,
+ kCNSWB,
+#ifdef ENABLE_48000_HZ
+ kCNFB,
+#endif
+ kAVT,
+#ifdef WEBRTC_CODEC_RED
+ kRED,
+#endif
+ kNumCodecs, // Implementation detail. Don't use.
+
+// Set unsupported codecs to -1.
+#if !defined(WEBRTC_CODEC_ISAC) && !defined(WEBRTC_CODEC_ISACFX)
+ kISAC = -1,
+#endif
+#ifndef WEBRTC_CODEC_ISAC
+ kISACSWB = -1,
+#endif
+ // 48 kHz not supported, always set to -1.
+ kPCM16Bswb48kHz = -1,
+#ifndef WEBRTC_CODEC_ILBC
+ kILBC = -1,
+#endif
+#ifndef WEBRTC_CODEC_G722
+ kG722 = -1, // Mono
+ kG722_2ch = -1, // Stereo
+#endif
+#ifndef WEBRTC_CODEC_OPUS
+ kOpus = -1, // Mono and stereo
+#endif
+#ifndef WEBRTC_CODEC_RED
+ kRED = -1,
+#endif
+#ifndef ENABLE_48000_HZ
+ kCNFB = -1,
+#endif
+
+ kNone = -1
+ };
+
+ enum class NetEqDecoder {
+ kDecoderPCMu,
+ kDecoderPCMa,
+ kDecoderPCMu_2ch,
+ kDecoderPCMa_2ch,
+ kDecoderILBC,
+ kDecoderISAC,
+ kDecoderISACswb,
+ kDecoderPCM16B,
+ kDecoderPCM16Bwb,
+ kDecoderPCM16Bswb32kHz,
+ kDecoderPCM16Bswb48kHz,
+ kDecoderPCM16B_2ch,
+ kDecoderPCM16Bwb_2ch,
+ kDecoderPCM16Bswb32kHz_2ch,
+ kDecoderPCM16Bswb48kHz_2ch,
+ kDecoderPCM16B_5ch,
+ kDecoderG722,
+ kDecoderG722_2ch,
+ kDecoderRED,
+ kDecoderAVT,
+ kDecoderCNGnb,
+ kDecoderCNGwb,
+ kDecoderCNGswb32kHz,
+ kDecoderCNGswb48kHz,
+ kDecoderArbitrary,
+ kDecoderOpus,
+ kDecoderOpus_2ch,
+ };
+
+ static inline size_t NumberOfCodecs() {
+ return static_cast<size_t>(CodecId::kNumCodecs);
+ }
+
+ static inline rtc::Optional<int> CodecIndexFromId(CodecId codec_id) {
+ const int i = static_cast<int>(codec_id);
+ return i >= 0 && i < static_cast<int>(NumberOfCodecs())
+ ? rtc::Optional<int>(i)
+ : rtc::Optional<int>();
+ }
+
+ static inline rtc::Optional<CodecId> CodecIdFromIndex(int codec_index) {
+ return static_cast<size_t>(codec_index) < NumberOfCodecs()
+ ? rtc::Optional<RentACodec::CodecId>(
+ static_cast<RentACodec::CodecId>(codec_index))
+ : rtc::Optional<RentACodec::CodecId>();
+ }
+
+ static rtc::Optional<CodecId> CodecIdByParams(const char* payload_name,
+ int sampling_freq_hz,
+ size_t channels);
+ static rtc::Optional<CodecInst> CodecInstById(CodecId codec_id);
+ static rtc::Optional<CodecId> CodecIdByInst(const CodecInst& codec_inst);
+ static rtc::Optional<CodecInst> CodecInstByParams(const char* payload_name,
+ int sampling_freq_hz,
+ size_t channels);
+ static bool IsCodecValid(const CodecInst& codec_inst);
+
+ static inline bool IsPayloadTypeValid(int payload_type) {
+ return payload_type >= 0 && payload_type <= 127;
+ }
+
+ static rtc::ArrayView<const CodecInst> Database();
+
+ static rtc::Optional<bool> IsSupportedNumChannels(CodecId codec_id,
+ size_t num_channels);
+
+ static rtc::Optional<NetEqDecoder> NetEqDecoderFromCodecId(
+ CodecId codec_id,
+ size_t num_channels);
+
+ // Parse codec_inst and extract payload types. If the given CodecInst was for
+ // the wrong sort of codec, return kSkip; otherwise, if the rate was illegal,
+ // return kBadFreq; otherwise, update the given RTP timestamp rate (Hz) ->
+ // payload type map and return kOk.
+ enum class RegistrationResult { kOk, kSkip, kBadFreq };
+ static RegistrationResult RegisterCngPayloadType(std::map<int, int>* pt_map,
+ const CodecInst& codec_inst);
+ static RegistrationResult RegisterRedPayloadType(std::map<int, int>* pt_map,
+ const CodecInst& codec_inst);
+
+ RentACodec();
+ ~RentACodec();
+
+ // Creates and returns an audio encoder built to the given specification.
+ // Returns null in case of error. The returned encoder is live until the next
+ // successful call to this function, or until the Rent-A-Codec is destroyed.
+ AudioEncoder* RentEncoder(const CodecInst& codec_inst);
+
+ struct StackParameters {
+ StackParameters();
+ ~StackParameters();
+
+ AudioEncoder* speech_encoder = nullptr;
+ bool use_codec_fec = false;
+ bool use_red = false;
+ bool use_cng = false;
+ ACMVADMode vad_mode = VADNormal;
+
+ // Maps from RTP timestamp rate (in Hz) to payload type.
+ std::map<int, int> cng_payload_types;
+ std::map<int, int> red_payload_types;
+ };
+
+ // Creates and returns an audio encoder stack constructed to the given
+ // specification. If the specification isn't compatible with the encoder, it
+ // will be changed to match (things will be switched off). The returned
+ // encoder is live until the next successful call to this function, or until
+ // the Rent-A-Codec is destroyed.
+ AudioEncoder* RentEncoderStack(StackParameters* param);
+
+ // The last return value of RentEncoderStack, or null if it hasn't been
+ // called.
+ AudioEncoder* GetEncoderStack() const { return encoder_stack_; }
+
+ // Creates and returns an iSAC decoder, which will remain live until the
+ // Rent-A-Codec is destroyed. Subsequent calls will simply return the same
+ // object.
+ AudioDecoder* RentIsacDecoder();
+
+ private:
+ rtc::scoped_ptr<AudioEncoder> speech_encoder_;
+ rtc::scoped_ptr<AudioEncoder> cng_encoder_;
+ rtc::scoped_ptr<AudioEncoder> red_encoder_;
+ rtc::scoped_ptr<AudioDecoder> isac_decoder_;
+ AudioEncoder* encoder_stack_ = nullptr;
+ LockedIsacBandwidthInfo isac_bandwidth_info_;
+
+ RTC_DISALLOW_COPY_AND_ASSIGN(RentACodec);
+};
+
+} // namespace acm2
+} // namespace webrtc
+
+#endif // WEBRTC_MODULES_AUDIO_CODING_ACM2_RENT_A_CODEC_H_
diff --git a/webrtc/modules/audio_coding/acm2/rent_a_codec_unittest.cc b/webrtc/modules/audio_coding/acm2/rent_a_codec_unittest.cc
new file mode 100644
index 0000000000..e838488e53
--- /dev/null
+++ b/webrtc/modules/audio_coding/acm2/rent_a_codec_unittest.cc
@@ -0,0 +1,222 @@
+/*
+ * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "testing/gtest/include/gtest/gtest.h"
+#include "webrtc/base/arraysize.h"
+#include "webrtc/modules/audio_coding/codecs/mock/mock_audio_encoder.h"
+#include "webrtc/modules/audio_coding/acm2/rent_a_codec.h"
+
+namespace webrtc {
+namespace acm2 {
+
+using ::testing::Return;
+
+namespace {
+const int kDataLengthSamples = 80;
+const int kPacketSizeSamples = 2 * kDataLengthSamples;
+const int16_t kZeroData[kDataLengthSamples] = {0};
+const CodecInst kDefaultCodecInst = {0, "pcmu", 8000, kPacketSizeSamples,
+ 1, 64000};
+const int kCngPt = 13;
+} // namespace
+
+class RentACodecTestF : public ::testing::Test {
+ protected:
+ void CreateCodec() {
+ speech_encoder_ = rent_a_codec_.RentEncoder(kDefaultCodecInst);
+ ASSERT_TRUE(speech_encoder_);
+ RentACodec::StackParameters param;
+ param.use_cng = true;
+ param.speech_encoder = speech_encoder_;
+ encoder_ = rent_a_codec_.RentEncoderStack(&param);
+ }
+
+ void EncodeAndVerify(size_t expected_out_length,
+ uint32_t expected_timestamp,
+ int expected_payload_type,
+ int expected_send_even_if_empty) {
+ uint8_t out[kPacketSizeSamples];
+ AudioEncoder::EncodedInfo encoded_info;
+ encoded_info =
+ encoder_->Encode(timestamp_, kZeroData, kPacketSizeSamples, out);
+ timestamp_ += kDataLengthSamples;
+ EXPECT_TRUE(encoded_info.redundant.empty());
+ EXPECT_EQ(expected_out_length, encoded_info.encoded_bytes);
+ EXPECT_EQ(expected_timestamp, encoded_info.encoded_timestamp);
+ if (expected_payload_type >= 0)
+ EXPECT_EQ(expected_payload_type, encoded_info.payload_type);
+ if (expected_send_even_if_empty >= 0)
+ EXPECT_EQ(static_cast<bool>(expected_send_even_if_empty),
+ encoded_info.send_even_if_empty);
+ }
+
+ RentACodec rent_a_codec_;
+ AudioEncoder* speech_encoder_ = nullptr;
+ AudioEncoder* encoder_ = nullptr;
+ uint32_t timestamp_ = 0;
+};
+
+// This test verifies that CNG frames are delivered as expected. Since the frame
+// size is set to 20 ms, we expect the first encode call to produce no output
+// (which is signaled as 0 bytes output of type kNoEncoding). The next encode
+// call should produce one SID frame of 9 bytes. The third call should not
+// result in any output (just like the first one). The fourth and final encode
+// call should produce an "empty frame", which is like no output, but with
+// AudioEncoder::EncodedInfo::send_even_if_empty set to true. (The reason to
+// produce an empty frame is to drive sending of DTMF packets in the RTP/RTCP
+// module.)
+TEST_F(RentACodecTestF, VerifyCngFrames) {
+ CreateCodec();
+ uint32_t expected_timestamp = timestamp_;
+ // Verify no frame.
+ {
+ SCOPED_TRACE("First encoding");
+ EncodeAndVerify(0, expected_timestamp, -1, -1);
+ }
+
+ // Verify SID frame delivered.
+ {
+ SCOPED_TRACE("Second encoding");
+ EncodeAndVerify(9, expected_timestamp, kCngPt, 1);
+ }
+
+ // Verify no frame.
+ {
+ SCOPED_TRACE("Third encoding");
+ EncodeAndVerify(0, expected_timestamp, -1, -1);
+ }
+
+ // Verify NoEncoding.
+ expected_timestamp += 2 * kDataLengthSamples;
+ {
+ SCOPED_TRACE("Fourth encoding");
+ EncodeAndVerify(0, expected_timestamp, kCngPt, 1);
+ }
+}
+
+TEST(RentACodecTest, ExternalEncoder) {
+ const int kSampleRateHz = 8000;
+ MockAudioEncoder external_encoder;
+ EXPECT_CALL(external_encoder, SampleRateHz())
+ .WillRepeatedly(Return(kSampleRateHz));
+ EXPECT_CALL(external_encoder, NumChannels()).WillRepeatedly(Return(1));
+ EXPECT_CALL(external_encoder, SetFec(false)).WillRepeatedly(Return(true));
+
+ RentACodec rac;
+ RentACodec::StackParameters param;
+ param.speech_encoder = &external_encoder;
+ EXPECT_EQ(&external_encoder, rac.RentEncoderStack(&param));
+ const int kPacketSizeSamples = kSampleRateHz / 100;
+ int16_t audio[kPacketSizeSamples] = {0};
+ uint8_t encoded[kPacketSizeSamples];
+ AudioEncoder::EncodedInfo info;
+
+ {
+ ::testing::InSequence s;
+ info.encoded_timestamp = 0;
+ EXPECT_CALL(external_encoder,
+ EncodeInternal(0, rtc::ArrayView<const int16_t>(audio),
+ arraysize(encoded), encoded))
+ .WillOnce(Return(info));
+ EXPECT_CALL(external_encoder, Mark("A"));
+ EXPECT_CALL(external_encoder, Mark("B"));
+ info.encoded_timestamp = 2;
+ EXPECT_CALL(external_encoder,
+ EncodeInternal(2, rtc::ArrayView<const int16_t>(audio),
+ arraysize(encoded), encoded))
+ .WillOnce(Return(info));
+ EXPECT_CALL(external_encoder, Die());
+ }
+
+ info = rac.GetEncoderStack()->Encode(0, audio, arraysize(encoded), encoded);
+ EXPECT_EQ(0u, info.encoded_timestamp);
+ external_encoder.Mark("A");
+
+ // Change to internal encoder.
+ CodecInst codec_inst = kDefaultCodecInst;
+ codec_inst.pacsize = kPacketSizeSamples;
+ param.speech_encoder = rac.RentEncoder(codec_inst);
+ ASSERT_TRUE(param.speech_encoder);
+ EXPECT_EQ(param.speech_encoder, rac.RentEncoderStack(&param));
+
+ // Don't expect any more calls to the external encoder.
+ info = rac.GetEncoderStack()->Encode(1, audio, arraysize(encoded), encoded);
+ external_encoder.Mark("B");
+
+ // Change back to external encoder again.
+ param.speech_encoder = &external_encoder;
+ EXPECT_EQ(&external_encoder, rac.RentEncoderStack(&param));
+ info = rac.GetEncoderStack()->Encode(2, audio, arraysize(encoded), encoded);
+ EXPECT_EQ(2u, info.encoded_timestamp);
+}
+
+// Verify that the speech encoder's Reset method is called when CNG or RED
+// (or both) are switched on, but not when they're switched off.
+void TestCngAndRedResetSpeechEncoder(bool use_cng, bool use_red) {
+ MockAudioEncoder speech_encoder;
+ EXPECT_CALL(speech_encoder, NumChannels()).WillRepeatedly(Return(1));
+ EXPECT_CALL(speech_encoder, Max10MsFramesInAPacket())
+ .WillRepeatedly(Return(2));
+ EXPECT_CALL(speech_encoder, SampleRateHz()).WillRepeatedly(Return(8000));
+ EXPECT_CALL(speech_encoder, SetFec(false)).WillRepeatedly(Return(true));
+ {
+ ::testing::InSequence s;
+ EXPECT_CALL(speech_encoder, Mark("disabled"));
+ EXPECT_CALL(speech_encoder, Mark("enabled"));
+ if (use_cng || use_red)
+ EXPECT_CALL(speech_encoder, Reset());
+ EXPECT_CALL(speech_encoder, Die());
+ }
+
+ RentACodec::StackParameters param1, param2;
+ param1.speech_encoder = &speech_encoder;
+ param2.speech_encoder = &speech_encoder;
+ param2.use_cng = use_cng;
+ param2.use_red = use_red;
+ speech_encoder.Mark("disabled");
+ RentACodec rac;
+ rac.RentEncoderStack(&param1);
+ speech_encoder.Mark("enabled");
+ rac.RentEncoderStack(&param2);
+}
+
+TEST(RentACodecTest, CngResetsSpeechEncoder) {
+ TestCngAndRedResetSpeechEncoder(true, false);
+}
+
+TEST(RentACodecTest, RedResetsSpeechEncoder) {
+ TestCngAndRedResetSpeechEncoder(false, true);
+}
+
+TEST(RentACodecTest, CngAndRedResetsSpeechEncoder) {
+ TestCngAndRedResetSpeechEncoder(true, true);
+}
+
+TEST(RentACodecTest, NoCngAndRedNoSpeechEncoderReset) {
+ TestCngAndRedResetSpeechEncoder(false, false);
+}
+
+TEST(RentACodecTest, RentEncoderError) {
+ const CodecInst codec_inst = {
+ 0, "Robert'); DROP TABLE Students;", 8000, 160, 1, 64000};
+ RentACodec rent_a_codec;
+ EXPECT_FALSE(rent_a_codec.RentEncoder(codec_inst));
+}
+
+#if GTEST_HAS_DEATH_TEST && !defined(WEBRTC_ANDROID)
+TEST(RentACodecTest, RentEncoderStackWithoutSpeechEncoder) {
+ RentACodec::StackParameters sp;
+ EXPECT_EQ(nullptr, sp.speech_encoder);
+ EXPECT_DEATH(RentACodec().RentEncoderStack(&sp), "");
+}
+#endif
+
+} // namespace acm2
+} // namespace webrtc
diff --git a/webrtc/modules/audio_coding/audio_coding.gypi b/webrtc/modules/audio_coding/audio_coding.gypi
index bc3c48d075..abdb1915c3 100644
--- a/webrtc/modules/audio_coding/audio_coding.gypi
+++ b/webrtc/modules/audio_coding/audio_coding.gypi
@@ -19,12 +19,195 @@
'codecs/isac/isacfix.gypi',
'codecs/pcm16b/pcm16b.gypi',
'codecs/red/red.gypi',
- 'main/audio_coding_module.gypi',
'neteq/neteq.gypi',
],
+ 'variables': {
+ 'audio_coding_dependencies': [
+ 'cng',
+ 'g711',
+ 'pcm16b',
+ '<(webrtc_root)/common.gyp:webrtc_common',
+ '<(webrtc_root)/common_audio/common_audio.gyp:common_audio',
+ '<(webrtc_root)/system_wrappers/system_wrappers.gyp:system_wrappers',
+ ],
+ 'audio_coding_defines': [],
+ 'conditions': [
+ ['include_opus==1', {
+ 'audio_coding_dependencies': ['webrtc_opus',],
+ 'audio_coding_defines': ['WEBRTC_CODEC_OPUS',],
+ }],
+ ['build_with_mozilla==0', {
+ 'conditions': [
+ ['target_arch=="arm"', {
+ 'audio_coding_dependencies': ['isac_fix',],
+ 'audio_coding_defines': ['WEBRTC_CODEC_ISACFX',],
+ }, {
+ 'audio_coding_dependencies': ['isac',],
+ 'audio_coding_defines': ['WEBRTC_CODEC_ISAC',],
+ }],
+ ],
+ 'audio_coding_dependencies': ['g722',],
+ 'audio_coding_defines': ['WEBRTC_CODEC_G722',],
+ }],
+ ['build_with_mozilla==0 and build_with_chromium==0', {
+ 'audio_coding_dependencies': ['ilbc', 'red',],
+ 'audio_coding_defines': ['WEBRTC_CODEC_ILBC', 'WEBRTC_CODEC_RED',],
+ }],
+ ],
+ },
+ 'targets': [
+ {
+ 'target_name': 'rent_a_codec',
+ 'type': 'static_library',
+ 'defines': [
+ '<@(audio_coding_defines)',
+ ],
+ 'dependencies': [
+ '<(webrtc_root)/common.gyp:webrtc_common',
+ ],
+ 'include_dirs': [
+ '<(webrtc_root)',
+ ],
+ 'direct_dependent_settings': {
+ 'include_dirs': [
+ '<(webrtc_root)',
+ ],
+ },
+ 'sources': [
+ 'acm2/acm_codec_database.cc',
+ 'acm2/acm_codec_database.h',
+ 'acm2/rent_a_codec.cc',
+ 'acm2/rent_a_codec.h',
+ ],
+ },
+ {
+ 'target_name': 'audio_coding_module',
+ 'type': 'static_library',
+ 'defines': [
+ '<@(audio_coding_defines)',
+ ],
+ 'dependencies': [
+ '<@(audio_coding_dependencies)',
+ '<(webrtc_root)/common.gyp:webrtc_common',
+ '<(webrtc_root)/webrtc.gyp:rtc_event_log',
+ 'neteq',
+ 'rent_a_codec',
+ ],
+ 'include_dirs': [
+ 'include',
+ '../include',
+ '<(webrtc_root)',
+ ],
+ 'direct_dependent_settings': {
+ 'include_dirs': [
+ 'include',
+ '../include',
+ '<(webrtc_root)',
+ ],
+ },
+ 'conditions': [
+ ['include_opus==1', {
+ 'export_dependent_settings': ['webrtc_opus'],
+ }],
+ ],
+ 'sources': [
+ 'acm2/acm_common_defs.h',
+ 'acm2/acm_receiver.cc',
+ 'acm2/acm_receiver.h',
+ 'acm2/acm_resampler.cc',
+ 'acm2/acm_resampler.h',
+ 'acm2/audio_coding_module.cc',
+ 'acm2/audio_coding_module_impl.cc',
+ 'acm2/audio_coding_module_impl.h',
+ 'acm2/call_statistics.cc',
+ 'acm2/call_statistics.h',
+ 'acm2/codec_manager.cc',
+ 'acm2/codec_manager.h',
+ 'acm2/initial_delay_manager.cc',
+ 'acm2/initial_delay_manager.h',
+ 'include/audio_coding_module.h',
+ 'include/audio_coding_module_typedefs.h',
+ ],
+ },
+ ],
'conditions': [
['include_opus==1', {
'includes': ['codecs/opus/opus.gypi',],
}],
+ ['include_tests==1', {
+ 'targets': [
+ {
+ 'target_name': 'acm_receive_test',
+ 'type': 'static_library',
+ 'defines': [
+ '<@(audio_coding_defines)',
+ ],
+ 'dependencies': [
+ '<@(audio_coding_dependencies)',
+ 'audio_coding_module',
+ 'neteq_unittest_tools',
+ '<(DEPTH)/testing/gtest.gyp:gtest',
+ ],
+ 'sources': [
+ 'acm2/acm_receive_test_oldapi.cc',
+ 'acm2/acm_receive_test_oldapi.h',
+ ],
+ }, # acm_receive_test
+ {
+ 'target_name': 'acm_send_test',
+ 'type': 'static_library',
+ 'defines': [
+ '<@(audio_coding_defines)',
+ ],
+ 'dependencies': [
+ '<@(audio_coding_dependencies)',
+ 'audio_coding_module',
+ 'neteq_unittest_tools',
+ '<(DEPTH)/testing/gtest.gyp:gtest',
+ ],
+ 'sources': [
+ 'acm2/acm_send_test_oldapi.cc',
+ 'acm2/acm_send_test_oldapi.h',
+ ],
+ }, # acm_send_test
+ {
+ 'target_name': 'delay_test',
+ 'type': 'executable',
+ 'dependencies': [
+ 'audio_coding_module',
+ '<(DEPTH)/testing/gtest.gyp:gtest',
+ '<(webrtc_root)/common.gyp:webrtc_common',
+ '<(webrtc_root)/test/test.gyp:test_support',
+ '<(webrtc_root)/system_wrappers/system_wrappers.gyp:system_wrappers',
+ '<(webrtc_root)/system_wrappers/system_wrappers.gyp:system_wrappers_default',
+ '<(DEPTH)/third_party/gflags/gflags.gyp:gflags',
+ ],
+ 'sources': [
+ 'test/delay_test.cc',
+ 'test/Channel.cc',
+ 'test/PCMFile.cc',
+ 'test/utility.cc',
+ ],
+ }, # delay_test
+ {
+ 'target_name': 'insert_packet_with_timing',
+ 'type': 'executable',
+ 'dependencies': [
+ 'audio_coding_module',
+ '<(DEPTH)/testing/gtest.gyp:gtest',
+ '<(webrtc_root)/common.gyp:webrtc_common',
+ '<(webrtc_root)/test/test.gyp:test_support',
+ '<(webrtc_root)/system_wrappers/system_wrappers.gyp:system_wrappers',
+ '<(webrtc_root)/system_wrappers/system_wrappers.gyp:system_wrappers_default',
+ '<(DEPTH)/third_party/gflags/gflags.gyp:gflags',
+ ],
+ 'sources': [
+ 'test/insert_packet_with_timing.cc',
+ 'test/Channel.cc',
+ 'test/PCMFile.cc',
+ ],
+ }, # delay_test
+ ],
+ }],
],
}
diff --git a/webrtc/modules/audio_coding/codecs/audio_decoder.cc b/webrtc/modules/audio_coding/codecs/audio_decoder.cc
index 08d101c5ae..d2984b97b0 100644
--- a/webrtc/modules/audio_coding/codecs/audio_decoder.cc
+++ b/webrtc/modules/audio_coding/codecs/audio_decoder.cc
@@ -13,12 +13,14 @@
#include <assert.h>
#include "webrtc/base/checks.h"
+#include "webrtc/base/trace_event.h"
namespace webrtc {
int AudioDecoder::Decode(const uint8_t* encoded, size_t encoded_len,
int sample_rate_hz, size_t max_decoded_bytes,
int16_t* decoded, SpeechType* speech_type) {
+ TRACE_EVENT0("webrtc", "AudioDecoder::Decode");
int duration = PacketDuration(encoded, encoded_len);
if (duration >= 0 &&
duration * Channels() * sizeof(int16_t) > max_decoded_bytes) {
@@ -31,6 +33,7 @@ int AudioDecoder::Decode(const uint8_t* encoded, size_t encoded_len,
int AudioDecoder::DecodeRedundant(const uint8_t* encoded, size_t encoded_len,
int sample_rate_hz, size_t max_decoded_bytes,
int16_t* decoded, SpeechType* speech_type) {
+ TRACE_EVENT0("webrtc", "AudioDecoder::DecodeRedundant");
int duration = PacketDurationRedundant(encoded, encoded_len);
if (duration >= 0 &&
duration * Channels() * sizeof(int16_t) > max_decoded_bytes) {
@@ -40,12 +43,6 @@ int AudioDecoder::DecodeRedundant(const uint8_t* encoded, size_t encoded_len,
speech_type);
}
-int AudioDecoder::DecodeInternal(const uint8_t* encoded, size_t encoded_len,
- int sample_rate_hz, int16_t* decoded,
- SpeechType* speech_type) {
- return kNotImplemented;
-}
-
int AudioDecoder::DecodeRedundantInternal(const uint8_t* encoded,
size_t encoded_len,
int sample_rate_hz, int16_t* decoded,
diff --git a/webrtc/modules/audio_coding/codecs/audio_decoder.h b/webrtc/modules/audio_coding/codecs/audio_decoder.h
index 6189be098d..81ac873183 100644
--- a/webrtc/modules/audio_coding/codecs/audio_decoder.h
+++ b/webrtc/modules/audio_coding/codecs/audio_decoder.h
@@ -14,7 +14,7 @@
#include <stdlib.h> // NULL
#include "webrtc/base/constructormagic.h"
-#include "webrtc/modules/audio_coding/codecs/cng/include/webrtc_cng.h"
+#include "webrtc/modules/audio_coding/codecs/cng/webrtc_cng.h"
#include "webrtc/typedefs.h"
namespace webrtc {
@@ -41,21 +41,21 @@ class AudioDecoder {
// is set to kComfortNoise, otherwise it is kSpeech. The desired output
// sample rate is provided in |sample_rate_hz|, which must be valid for the
// codec at hand.
- virtual int Decode(const uint8_t* encoded,
- size_t encoded_len,
- int sample_rate_hz,
- size_t max_decoded_bytes,
- int16_t* decoded,
- SpeechType* speech_type);
+ int Decode(const uint8_t* encoded,
+ size_t encoded_len,
+ int sample_rate_hz,
+ size_t max_decoded_bytes,
+ int16_t* decoded,
+ SpeechType* speech_type);
// Same as Decode(), but interfaces to the decoders redundant decode function.
// The default implementation simply calls the regular Decode() method.
- virtual int DecodeRedundant(const uint8_t* encoded,
- size_t encoded_len,
- int sample_rate_hz,
- size_t max_decoded_bytes,
- int16_t* decoded,
- SpeechType* speech_type);
+ int DecodeRedundant(const uint8_t* encoded,
+ size_t encoded_len,
+ int sample_rate_hz,
+ size_t max_decoded_bytes,
+ int16_t* decoded,
+ SpeechType* speech_type);
// Indicates if the decoder implements the DecodePlc method.
virtual bool HasDecodePlc() const;
@@ -107,7 +107,7 @@ class AudioDecoder {
size_t encoded_len,
int sample_rate_hz,
int16_t* decoded,
- SpeechType* speech_type);
+ SpeechType* speech_type) = 0;
virtual int DecodeRedundantInternal(const uint8_t* encoded,
size_t encoded_len,
diff --git a/webrtc/modules/audio_coding/codecs/audio_encoder.cc b/webrtc/modules/audio_coding/codecs/audio_encoder.cc
index 6d763005ac..e99fc30995 100644
--- a/webrtc/modules/audio_coding/codecs/audio_encoder.cc
+++ b/webrtc/modules/audio_coding/codecs/audio_encoder.cc
@@ -9,7 +9,9 @@
*/
#include "webrtc/modules/audio_coding/codecs/audio_encoder.h"
+
#include "webrtc/base/checks.h"
+#include "webrtc/base/trace_event.h"
namespace webrtc {
@@ -21,13 +23,14 @@ int AudioEncoder::RtpTimestampRateHz() const {
return SampleRateHz();
}
-AudioEncoder::EncodedInfo AudioEncoder::Encode(uint32_t rtp_timestamp,
- const int16_t* audio,
- size_t num_samples_per_channel,
- size_t max_encoded_bytes,
- uint8_t* encoded) {
- RTC_CHECK_EQ(num_samples_per_channel,
- static_cast<size_t>(SampleRateHz() / 100));
+AudioEncoder::EncodedInfo AudioEncoder::Encode(
+ uint32_t rtp_timestamp,
+ rtc::ArrayView<const int16_t> audio,
+ size_t max_encoded_bytes,
+ uint8_t* encoded) {
+ TRACE_EVENT0("webrtc", "AudioEncoder::Encode");
+ RTC_CHECK_EQ(audio.size(),
+ static_cast<size_t>(NumChannels() * SampleRateHz() / 100));
EncodedInfo info =
EncodeInternal(rtp_timestamp, audio, max_encoded_bytes, encoded);
RTC_CHECK_LE(info.encoded_bytes, max_encoded_bytes);
diff --git a/webrtc/modules/audio_coding/codecs/audio_encoder.h b/webrtc/modules/audio_coding/codecs/audio_encoder.h
index cda9d86f2e..a46b0e86a7 100644
--- a/webrtc/modules/audio_coding/codecs/audio_encoder.h
+++ b/webrtc/modules/audio_coding/codecs/audio_encoder.h
@@ -14,6 +14,7 @@
#include <algorithm>
#include <vector>
+#include "webrtc/base/array_view.h"
#include "webrtc/typedefs.h"
namespace webrtc {
@@ -60,7 +61,7 @@ class AudioEncoder {
// Returns the input sample rate in Hz and the number of input channels.
// These are constants set at instantiation time.
virtual int SampleRateHz() const = 0;
- virtual int NumChannels() const = 0;
+ virtual size_t NumChannels() const = 0;
// Returns the rate at which the RTP timestamps are updated. The default
// implementation returns SampleRateHz().
@@ -91,13 +92,12 @@ class AudioEncoder {
// Encode() checks some preconditions, calls EncodeInternal() which does the
// actual work, and then checks some postconditions.
EncodedInfo Encode(uint32_t rtp_timestamp,
- const int16_t* audio,
- size_t num_samples_per_channel,
+ rtc::ArrayView<const int16_t> audio,
size_t max_encoded_bytes,
uint8_t* encoded);
virtual EncodedInfo EncodeInternal(uint32_t rtp_timestamp,
- const int16_t* audio,
+ rtc::ArrayView<const int16_t> audio,
size_t max_encoded_bytes,
uint8_t* encoded) = 0;
@@ -125,7 +125,7 @@ class AudioEncoder {
// Tells the encoder about the highest sample rate the decoder is expected to
// use when decoding the bitstream. The encoder would typically use this
// information to adjust the quality of the encoding. The default
- // implementation just returns true.
+ // implementation does nothing.
virtual void SetMaxPlaybackRate(int frequency_hz);
// Tells the encoder what the projected packet loss rate is. The rate is in
diff --git a/webrtc/modules/audio_coding/codecs/cng/audio_encoder_cng.cc b/webrtc/modules/audio_coding/codecs/cng/audio_encoder_cng.cc
index 121524633c..180166c40c 100644
--- a/webrtc/modules/audio_coding/codecs/cng/audio_encoder_cng.cc
+++ b/webrtc/modules/audio_coding/codecs/cng/audio_encoder_cng.cc
@@ -8,7 +8,7 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-#include "webrtc/modules/audio_coding/codecs/cng/include/audio_encoder_cng.h"
+#include "webrtc/modules/audio_coding/codecs/cng/audio_encoder_cng.h"
#include <algorithm>
#include <limits>
@@ -75,7 +75,7 @@ int AudioEncoderCng::SampleRateHz() const {
return speech_encoder_->SampleRateHz();
}
-int AudioEncoderCng::NumChannels() const {
+size_t AudioEncoderCng::NumChannels() const {
return 1;
}
@@ -97,7 +97,7 @@ int AudioEncoderCng::GetTargetBitrate() const {
AudioEncoder::EncodedInfo AudioEncoderCng::EncodeInternal(
uint32_t rtp_timestamp,
- const int16_t* audio,
+ rtc::ArrayView<const int16_t> audio,
size_t max_encoded_bytes,
uint8_t* encoded) {
RTC_CHECK_GE(max_encoded_bytes,
@@ -106,9 +106,8 @@ AudioEncoder::EncodedInfo AudioEncoderCng::EncodeInternal(
RTC_CHECK_EQ(speech_buffer_.size(),
rtp_timestamps_.size() * samples_per_10ms_frame);
rtp_timestamps_.push_back(rtp_timestamp);
- for (size_t i = 0; i < samples_per_10ms_frame; ++i) {
- speech_buffer_.push_back(audio[i]);
- }
+ RTC_DCHECK_EQ(samples_per_10ms_frame, audio.size());
+ speech_buffer_.insert(speech_buffer_.end(), audio.cbegin(), audio.cend());
const size_t frames_to_encode = speech_encoder_->Num10MsFramesInNextPacket();
if (rtp_timestamps_.size() < frames_to_encode) {
return EncodedInfo();
@@ -242,9 +241,12 @@ AudioEncoder::EncodedInfo AudioEncoderCng::EncodeActive(
const size_t samples_per_10ms_frame = SamplesPer10msFrame();
AudioEncoder::EncodedInfo info;
for (size_t i = 0; i < frames_to_encode; ++i) {
- info = speech_encoder_->Encode(
- rtp_timestamps_.front(), &speech_buffer_[i * samples_per_10ms_frame],
- samples_per_10ms_frame, max_encoded_bytes, encoded);
+ info =
+ speech_encoder_->Encode(rtp_timestamps_.front(),
+ rtc::ArrayView<const int16_t>(
+ &speech_buffer_[i * samples_per_10ms_frame],
+ samples_per_10ms_frame),
+ max_encoded_bytes, encoded);
if (i + 1 == frames_to_encode) {
RTC_CHECK_GT(info.encoded_bytes, 0u) << "Encoder didn't deliver data.";
} else {
diff --git a/webrtc/modules/audio_coding/codecs/cng/audio_encoder_cng.h b/webrtc/modules/audio_coding/codecs/cng/audio_encoder_cng.h
new file mode 100644
index 0000000000..87383e2ac5
--- /dev/null
+++ b/webrtc/modules/audio_coding/codecs/cng/audio_encoder_cng.h
@@ -0,0 +1,95 @@
+/*
+ * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_CODECS_CNG_AUDIO_ENCODER_CNG_H_
+#define WEBRTC_MODULES_AUDIO_CODING_CODECS_CNG_AUDIO_ENCODER_CNG_H_
+
+#include <vector>
+
+#include "webrtc/base/scoped_ptr.h"
+#include "webrtc/common_audio/vad/include/vad.h"
+#include "webrtc/modules/audio_coding/codecs/audio_encoder.h"
+#include "webrtc/modules/audio_coding/codecs/cng/webrtc_cng.h"
+
+namespace webrtc {
+
+// Deleter for use with scoped_ptr.
+struct CngInstDeleter {
+ void operator()(CNG_enc_inst* ptr) const { WebRtcCng_FreeEnc(ptr); }
+};
+
+class Vad;
+
+class AudioEncoderCng final : public AudioEncoder {
+ public:
+ struct Config {
+ bool IsOk() const;
+
+ size_t num_channels = 1;
+ int payload_type = 13;
+ // Caller keeps ownership of the AudioEncoder object.
+ AudioEncoder* speech_encoder = nullptr;
+ Vad::Aggressiveness vad_mode = Vad::kVadNormal;
+ int sid_frame_interval_ms = 100;
+ int num_cng_coefficients = 8;
+ // The Vad pointer is mainly for testing. If a NULL pointer is passed, the
+ // AudioEncoderCng creates (and destroys) a Vad object internally. If an
+ // object is passed, the AudioEncoderCng assumes ownership of the Vad
+ // object.
+ Vad* vad = nullptr;
+ };
+
+ explicit AudioEncoderCng(const Config& config);
+ ~AudioEncoderCng() override;
+
+ size_t MaxEncodedBytes() const override;
+ int SampleRateHz() const override;
+ size_t NumChannels() const override;
+ int RtpTimestampRateHz() const override;
+ size_t Num10MsFramesInNextPacket() const override;
+ size_t Max10MsFramesInAPacket() const override;
+ int GetTargetBitrate() const override;
+ EncodedInfo EncodeInternal(uint32_t rtp_timestamp,
+ rtc::ArrayView<const int16_t> audio,
+ size_t max_encoded_bytes,
+ uint8_t* encoded) override;
+ void Reset() override;
+ bool SetFec(bool enable) override;
+ bool SetDtx(bool enable) override;
+ bool SetApplication(Application application) override;
+ void SetMaxPlaybackRate(int frequency_hz) override;
+ void SetProjectedPacketLossRate(double fraction) override;
+ void SetTargetBitrate(int target_bps) override;
+
+ private:
+ EncodedInfo EncodePassive(size_t frames_to_encode,
+ size_t max_encoded_bytes,
+ uint8_t* encoded);
+ EncodedInfo EncodeActive(size_t frames_to_encode,
+ size_t max_encoded_bytes,
+ uint8_t* encoded);
+ size_t SamplesPer10msFrame() const;
+
+ AudioEncoder* speech_encoder_;
+ const int cng_payload_type_;
+ const int num_cng_coefficients_;
+ const int sid_frame_interval_ms_;
+ std::vector<int16_t> speech_buffer_;
+ std::vector<uint32_t> rtp_timestamps_;
+ bool last_frame_active_;
+ rtc::scoped_ptr<Vad> vad_;
+ rtc::scoped_ptr<CNG_enc_inst, CngInstDeleter> cng_inst_;
+
+ RTC_DISALLOW_COPY_AND_ASSIGN(AudioEncoderCng);
+};
+
+} // namespace webrtc
+
+#endif // WEBRTC_MODULES_AUDIO_CODING_CODECS_CNG_AUDIO_ENCODER_CNG_H_
diff --git a/webrtc/modules/audio_coding/codecs/cng/audio_encoder_cng_unittest.cc b/webrtc/modules/audio_coding/codecs/cng/audio_encoder_cng_unittest.cc
index 0b837a0f12..feb3ed1f0a 100644
--- a/webrtc/modules/audio_coding/codecs/cng/audio_encoder_cng_unittest.cc
+++ b/webrtc/modules/audio_coding/codecs/cng/audio_encoder_cng_unittest.cc
@@ -13,7 +13,7 @@
#include "testing/gtest/include/gtest/gtest.h"
#include "webrtc/base/scoped_ptr.h"
#include "webrtc/common_audio/vad/mock/mock_vad.h"
-#include "webrtc/modules/audio_coding/codecs/cng/include/audio_encoder_cng.h"
+#include "webrtc/modules/audio_coding/codecs/cng/audio_encoder_cng.h"
#include "webrtc/modules/audio_coding/codecs/mock/mock_audio_encoder.h"
using ::testing::Return;
@@ -75,8 +75,10 @@ class AudioEncoderCngTest : public ::testing::Test {
void Encode() {
ASSERT_TRUE(cng_) << "Must call CreateCng() first.";
- encoded_info_ = cng_->Encode(timestamp_, audio_, num_audio_samples_10ms_,
- encoded_.size(), &encoded_[0]);
+ encoded_info_ = cng_->Encode(
+ timestamp_,
+ rtc::ArrayView<const int16_t>(audio_, num_audio_samples_10ms_),
+ encoded_.size(), &encoded_[0]);
timestamp_ += static_cast<uint32_t>(num_audio_samples_10ms_);
}
diff --git a/webrtc/modules/audio_coding/codecs/cng/cng.gypi b/webrtc/modules/audio_coding/codecs/cng/cng.gypi
index 78dc41a94f..c020f4740d 100644
--- a/webrtc/modules/audio_coding/codecs/cng/cng.gypi
+++ b/webrtc/modules/audio_coding/codecs/cng/cng.gypi
@@ -15,23 +15,13 @@
'<(webrtc_root)/common_audio/common_audio.gyp:common_audio',
'audio_encoder_interface',
],
- 'include_dirs': [
- 'include',
- '<(webrtc_root)',
- ],
- 'direct_dependent_settings': {
- 'include_dirs': [
- 'include',
- '<(webrtc_root)',
- ],
- },
'sources': [
- 'include/audio_encoder_cng.h',
- 'include/webrtc_cng.h',
'audio_encoder_cng.cc',
- 'webrtc_cng.c',
+ 'audio_encoder_cng.h',
'cng_helpfuns.c',
'cng_helpfuns.h',
+ 'webrtc_cng.c',
+ 'webrtc_cng.h',
],
},
], # targets
diff --git a/webrtc/modules/audio_coding/codecs/cng/include/audio_encoder_cng.h b/webrtc/modules/audio_coding/codecs/cng/include/audio_encoder_cng.h
deleted file mode 100644
index 3ca9eb60f3..0000000000
--- a/webrtc/modules/audio_coding/codecs/cng/include/audio_encoder_cng.h
+++ /dev/null
@@ -1,95 +0,0 @@
-/*
- * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_MODULES_AUDIO_CODING_CODECS_CNG_INCLUDE_AUDIO_ENCODER_CNG_H_
-#define WEBRTC_MODULES_AUDIO_CODING_CODECS_CNG_INCLUDE_AUDIO_ENCODER_CNG_H_
-
-#include <vector>
-
-#include "webrtc/base/scoped_ptr.h"
-#include "webrtc/common_audio/vad/include/vad.h"
-#include "webrtc/modules/audio_coding/codecs/audio_encoder.h"
-#include "webrtc/modules/audio_coding/codecs/cng/include/webrtc_cng.h"
-
-namespace webrtc {
-
-// Deleter for use with scoped_ptr.
-struct CngInstDeleter {
- void operator()(CNG_enc_inst* ptr) const { WebRtcCng_FreeEnc(ptr); }
-};
-
-class Vad;
-
-class AudioEncoderCng final : public AudioEncoder {
- public:
- struct Config {
- bool IsOk() const;
-
- int num_channels = 1;
- int payload_type = 13;
- // Caller keeps ownership of the AudioEncoder object.
- AudioEncoder* speech_encoder = nullptr;
- Vad::Aggressiveness vad_mode = Vad::kVadNormal;
- int sid_frame_interval_ms = 100;
- int num_cng_coefficients = 8;
- // The Vad pointer is mainly for testing. If a NULL pointer is passed, the
- // AudioEncoderCng creates (and destroys) a Vad object internally. If an
- // object is passed, the AudioEncoderCng assumes ownership of the Vad
- // object.
- Vad* vad = nullptr;
- };
-
- explicit AudioEncoderCng(const Config& config);
- ~AudioEncoderCng() override;
-
- size_t MaxEncodedBytes() const override;
- int SampleRateHz() const override;
- int NumChannels() const override;
- int RtpTimestampRateHz() const override;
- size_t Num10MsFramesInNextPacket() const override;
- size_t Max10MsFramesInAPacket() const override;
- int GetTargetBitrate() const override;
- EncodedInfo EncodeInternal(uint32_t rtp_timestamp,
- const int16_t* audio,
- size_t max_encoded_bytes,
- uint8_t* encoded) override;
- void Reset() override;
- bool SetFec(bool enable) override;
- bool SetDtx(bool enable) override;
- bool SetApplication(Application application) override;
- void SetMaxPlaybackRate(int frequency_hz) override;
- void SetProjectedPacketLossRate(double fraction) override;
- void SetTargetBitrate(int target_bps) override;
-
- private:
- EncodedInfo EncodePassive(size_t frames_to_encode,
- size_t max_encoded_bytes,
- uint8_t* encoded);
- EncodedInfo EncodeActive(size_t frames_to_encode,
- size_t max_encoded_bytes,
- uint8_t* encoded);
- size_t SamplesPer10msFrame() const;
-
- AudioEncoder* speech_encoder_;
- const int cng_payload_type_;
- const int num_cng_coefficients_;
- const int sid_frame_interval_ms_;
- std::vector<int16_t> speech_buffer_;
- std::vector<uint32_t> rtp_timestamps_;
- bool last_frame_active_;
- rtc::scoped_ptr<Vad> vad_;
- rtc::scoped_ptr<CNG_enc_inst, CngInstDeleter> cng_inst_;
-
- RTC_DISALLOW_COPY_AND_ASSIGN(AudioEncoderCng);
-};
-
-} // namespace webrtc
-
-#endif // WEBRTC_MODULES_AUDIO_CODING_CODECS_CNG_INCLUDE_AUDIO_ENCODER_CNG_H_
diff --git a/webrtc/modules/audio_coding/codecs/cng/include/webrtc_cng.h b/webrtc/modules/audio_coding/codecs/cng/include/webrtc_cng.h
deleted file mode 100644
index 35660c4c3c..0000000000
--- a/webrtc/modules/audio_coding/codecs/cng/include/webrtc_cng.h
+++ /dev/null
@@ -1,163 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-
-#ifndef WEBRTC_MODULES_AUDIO_CODING_CODECS_CNG_MAIN_INCLUDE_WEBRTC_CNG_H_
-#define WEBRTC_MODULES_AUDIO_CODING_CODECS_CNG_MAIN_INCLUDE_WEBRTC_CNG_H_
-
-#include <stddef.h>
-#include "webrtc/typedefs.h"
-
-#ifdef __cplusplus
-extern "C" {
-#endif
-
-#define WEBRTC_CNG_MAX_LPC_ORDER 12
-#define WEBRTC_CNG_MAX_OUTSIZE_ORDER 640
-
-/* Define Error codes. */
-
-/* 6100 Encoder */
-#define CNG_ENCODER_NOT_INITIATED 6120
-#define CNG_DISALLOWED_LPC_ORDER 6130
-#define CNG_DISALLOWED_FRAME_SIZE 6140
-#define CNG_DISALLOWED_SAMPLING_FREQUENCY 6150
-/* 6200 Decoder */
-#define CNG_DECODER_NOT_INITIATED 6220
-
-typedef struct WebRtcCngEncInst CNG_enc_inst;
-typedef struct WebRtcCngDecInst CNG_dec_inst;
-
-/****************************************************************************
- * WebRtcCng_CreateEnc/Dec(...)
- *
- * These functions create an instance to the specified structure
- *
- * Input:
- * - XXX_inst : Pointer to created instance that should be created
- *
- * Return value : 0 - Ok
- * -1 - Error
- */
-int16_t WebRtcCng_CreateEnc(CNG_enc_inst** cng_inst);
-int16_t WebRtcCng_CreateDec(CNG_dec_inst** cng_inst);
-
-/****************************************************************************
- * WebRtcCng_InitEnc/Dec(...)
- *
- * This function initializes a instance
- *
- * Input:
- * - cng_inst : Instance that should be initialized
- *
- * - fs : 8000 for narrowband and 16000 for wideband
- * - interval : generate SID data every interval ms
- * - quality : Number of refl. coefs, maximum allowed is 12
- *
- * Output:
- * - cng_inst : Initialized instance
- *
- * Return value : 0 - Ok
- * -1 - Error
- */
-
-int WebRtcCng_InitEnc(CNG_enc_inst* cng_inst, int fs, int16_t interval,
- int16_t quality);
-void WebRtcCng_InitDec(CNG_dec_inst* cng_inst);
-
-/****************************************************************************
- * WebRtcCng_FreeEnc/Dec(...)
- *
- * These functions frees the dynamic memory of a specified instance
- *
- * Input:
- * - cng_inst : Pointer to created instance that should be freed
- *
- * Return value : 0 - Ok
- * -1 - Error
- */
-int16_t WebRtcCng_FreeEnc(CNG_enc_inst* cng_inst);
-int16_t WebRtcCng_FreeDec(CNG_dec_inst* cng_inst);
-
-/****************************************************************************
- * WebRtcCng_Encode(...)
- *
- * These functions analyzes background noise
- *
- * Input:
- * - cng_inst : Pointer to created instance
- * - speech : Signal to be analyzed
- * - nrOfSamples : Size of speech vector
- * - forceSID : not zero to force SID frame and reset
- *
- * Output:
- * - bytesOut : Nr of bytes to transmit, might be 0
- *
- * Return value : 0 - Ok
- * -1 - Error
- */
-int WebRtcCng_Encode(CNG_enc_inst* cng_inst, int16_t* speech,
- size_t nrOfSamples, uint8_t* SIDdata,
- size_t* bytesOut, int16_t forceSID);
-
-/****************************************************************************
- * WebRtcCng_UpdateSid(...)
- *
- * These functions updates the CN state, when a new SID packet arrives
- *
- * Input:
- * - cng_inst : Pointer to created instance that should be freed
- * - SID : SID packet, all headers removed
- * - length : Length in bytes of SID packet
- *
- * Return value : 0 - Ok
- * -1 - Error
- */
-int16_t WebRtcCng_UpdateSid(CNG_dec_inst* cng_inst, uint8_t* SID,
- size_t length);
-
-/****************************************************************************
- * WebRtcCng_Generate(...)
- *
- * These functions generates CN data when needed
- *
- * Input:
- * - cng_inst : Pointer to created instance that should be freed
- * - outData : pointer to area to write CN data
- * - nrOfSamples : How much data to generate
- * - new_period : >0 if a new period of CNG, will reset history
- *
- * Return value : 0 - Ok
- * -1 - Error
- */
-int16_t WebRtcCng_Generate(CNG_dec_inst* cng_inst, int16_t* outData,
- size_t nrOfSamples, int16_t new_period);
-
-/*****************************************************************************
- * WebRtcCng_GetErrorCodeEnc/Dec(...)
- *
- * This functions can be used to check the error code of a CNG instance. When
- * a function returns -1 a error code will be set for that instance. The
- * function below extract the code of the last error that occurred in the
- * specified instance.
- *
- * Input:
- * - CNG_inst : CNG enc/dec instance
- *
- * Return value : Error code
- */
-int16_t WebRtcCng_GetErrorCodeEnc(CNG_enc_inst* cng_inst);
-int16_t WebRtcCng_GetErrorCodeDec(CNG_dec_inst* cng_inst);
-
-#ifdef __cplusplus
-}
-#endif
-
-#endif // WEBRTC_MODULES_AUDIO_CODING_CODECS_CNG_MAIN_INCLUDE_WEBRTC_CNG_H_
diff --git a/webrtc/modules/audio_coding/codecs/cng/webrtc_cng.h b/webrtc/modules/audio_coding/codecs/cng/webrtc_cng.h
new file mode 100644
index 0000000000..64bea1e26f
--- /dev/null
+++ b/webrtc/modules/audio_coding/codecs/cng/webrtc_cng.h
@@ -0,0 +1,163 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_CODECS_CNG_WEBRTC_CNG_H_
+#define WEBRTC_MODULES_AUDIO_CODING_CODECS_CNG_WEBRTC_CNG_H_
+
+#include <stddef.h>
+#include "webrtc/typedefs.h"
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+#define WEBRTC_CNG_MAX_LPC_ORDER 12
+#define WEBRTC_CNG_MAX_OUTSIZE_ORDER 640
+
+/* Define Error codes. */
+
+/* 6100 Encoder */
+#define CNG_ENCODER_NOT_INITIATED 6120
+#define CNG_DISALLOWED_LPC_ORDER 6130
+#define CNG_DISALLOWED_FRAME_SIZE 6140
+#define CNG_DISALLOWED_SAMPLING_FREQUENCY 6150
+/* 6200 Decoder */
+#define CNG_DECODER_NOT_INITIATED 6220
+
+typedef struct WebRtcCngEncInst CNG_enc_inst;
+typedef struct WebRtcCngDecInst CNG_dec_inst;
+
+/****************************************************************************
+ * WebRtcCng_CreateEnc/Dec(...)
+ *
+ * These functions create an instance to the specified structure
+ *
+ * Input:
+ * - XXX_inst : Pointer to created instance that should be created
+ *
+ * Return value : 0 - Ok
+ * -1 - Error
+ */
+int16_t WebRtcCng_CreateEnc(CNG_enc_inst** cng_inst);
+int16_t WebRtcCng_CreateDec(CNG_dec_inst** cng_inst);
+
+/****************************************************************************
+ * WebRtcCng_InitEnc/Dec(...)
+ *
+ * This function initializes a instance
+ *
+ * Input:
+ * - cng_inst : Instance that should be initialized
+ *
+ * - fs : 8000 for narrowband and 16000 for wideband
+ * - interval : generate SID data every interval ms
+ * - quality : Number of refl. coefs, maximum allowed is 12
+ *
+ * Output:
+ * - cng_inst : Initialized instance
+ *
+ * Return value : 0 - Ok
+ * -1 - Error
+ */
+
+int WebRtcCng_InitEnc(CNG_enc_inst* cng_inst, int fs, int16_t interval,
+ int16_t quality);
+void WebRtcCng_InitDec(CNG_dec_inst* cng_inst);
+
+/****************************************************************************
+ * WebRtcCng_FreeEnc/Dec(...)
+ *
+ * These functions frees the dynamic memory of a specified instance
+ *
+ * Input:
+ * - cng_inst : Pointer to created instance that should be freed
+ *
+ * Return value : 0 - Ok
+ * -1 - Error
+ */
+int16_t WebRtcCng_FreeEnc(CNG_enc_inst* cng_inst);
+int16_t WebRtcCng_FreeDec(CNG_dec_inst* cng_inst);
+
+/****************************************************************************
+ * WebRtcCng_Encode(...)
+ *
+ * These functions analyzes background noise
+ *
+ * Input:
+ * - cng_inst : Pointer to created instance
+ * - speech : Signal to be analyzed
+ * - nrOfSamples : Size of speech vector
+ * - forceSID : not zero to force SID frame and reset
+ *
+ * Output:
+ * - bytesOut : Nr of bytes to transmit, might be 0
+ *
+ * Return value : 0 - Ok
+ * -1 - Error
+ */
+int WebRtcCng_Encode(CNG_enc_inst* cng_inst, int16_t* speech,
+ size_t nrOfSamples, uint8_t* SIDdata,
+ size_t* bytesOut, int16_t forceSID);
+
+/****************************************************************************
+ * WebRtcCng_UpdateSid(...)
+ *
+ * These functions updates the CN state, when a new SID packet arrives
+ *
+ * Input:
+ * - cng_inst : Pointer to created instance that should be freed
+ * - SID : SID packet, all headers removed
+ * - length : Length in bytes of SID packet
+ *
+ * Return value : 0 - Ok
+ * -1 - Error
+ */
+int16_t WebRtcCng_UpdateSid(CNG_dec_inst* cng_inst, uint8_t* SID,
+ size_t length);
+
+/****************************************************************************
+ * WebRtcCng_Generate(...)
+ *
+ * These functions generates CN data when needed
+ *
+ * Input:
+ * - cng_inst : Pointer to created instance that should be freed
+ * - outData : pointer to area to write CN data
+ * - nrOfSamples : How much data to generate
+ * - new_period : >0 if a new period of CNG, will reset history
+ *
+ * Return value : 0 - Ok
+ * -1 - Error
+ */
+int16_t WebRtcCng_Generate(CNG_dec_inst* cng_inst, int16_t* outData,
+ size_t nrOfSamples, int16_t new_period);
+
+/*****************************************************************************
+ * WebRtcCng_GetErrorCodeEnc/Dec(...)
+ *
+ * This functions can be used to check the error code of a CNG instance. When
+ * a function returns -1 a error code will be set for that instance. The
+ * function below extract the code of the last error that occurred in the
+ * specified instance.
+ *
+ * Input:
+ * - CNG_inst : CNG enc/dec instance
+ *
+ * Return value : Error code
+ */
+int16_t WebRtcCng_GetErrorCodeEnc(CNG_enc_inst* cng_inst);
+int16_t WebRtcCng_GetErrorCodeDec(CNG_dec_inst* cng_inst);
+
+#ifdef __cplusplus
+}
+#endif
+
+#endif // WEBRTC_MODULES_AUDIO_CODING_CODECS_CNG_WEBRTC_CNG_H_
diff --git a/webrtc/modules/audio_coding/codecs/g711/audio_decoder_pcm.cc b/webrtc/modules/audio_coding/codecs/g711/audio_decoder_pcm.cc
index 12306d9167..9757b4a010 100644
--- a/webrtc/modules/audio_coding/codecs/g711/audio_decoder_pcm.cc
+++ b/webrtc/modules/audio_coding/codecs/g711/audio_decoder_pcm.cc
@@ -8,9 +8,9 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-#include "webrtc/modules/audio_coding/codecs/g711/include/audio_decoder_pcm.h"
+#include "webrtc/modules/audio_coding/codecs/g711/audio_decoder_pcm.h"
-#include "webrtc/modules/audio_coding/codecs/g711/include/g711_interface.h"
+#include "webrtc/modules/audio_coding/codecs/g711/g711_interface.h"
namespace webrtc {
diff --git a/webrtc/modules/audio_coding/codecs/g711/audio_decoder_pcm.h b/webrtc/modules/audio_coding/codecs/g711/audio_decoder_pcm.h
new file mode 100644
index 0000000000..9dc3a6fd7a
--- /dev/null
+++ b/webrtc/modules/audio_coding/codecs/g711/audio_decoder_pcm.h
@@ -0,0 +1,63 @@
+/*
+ * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_CODECS_G711_AUDIO_DECODER_PCM_H_
+#define WEBRTC_MODULES_AUDIO_CODING_CODECS_G711_AUDIO_DECODER_PCM_H_
+
+#include "webrtc/base/checks.h"
+#include "webrtc/modules/audio_coding/codecs/audio_decoder.h"
+
+namespace webrtc {
+
+class AudioDecoderPcmU final : public AudioDecoder {
+ public:
+ explicit AudioDecoderPcmU(size_t num_channels) : num_channels_(num_channels) {
+ RTC_DCHECK_GE(num_channels, 1u);
+ }
+ void Reset() override;
+ int PacketDuration(const uint8_t* encoded, size_t encoded_len) const override;
+ size_t Channels() const override;
+
+ protected:
+ int DecodeInternal(const uint8_t* encoded,
+ size_t encoded_len,
+ int sample_rate_hz,
+ int16_t* decoded,
+ SpeechType* speech_type) override;
+
+ private:
+ const size_t num_channels_;
+ RTC_DISALLOW_COPY_AND_ASSIGN(AudioDecoderPcmU);
+};
+
+class AudioDecoderPcmA final : public AudioDecoder {
+ public:
+ explicit AudioDecoderPcmA(size_t num_channels) : num_channels_(num_channels) {
+ RTC_DCHECK_GE(num_channels, 1u);
+ }
+ void Reset() override;
+ int PacketDuration(const uint8_t* encoded, size_t encoded_len) const override;
+ size_t Channels() const override;
+
+ protected:
+ int DecodeInternal(const uint8_t* encoded,
+ size_t encoded_len,
+ int sample_rate_hz,
+ int16_t* decoded,
+ SpeechType* speech_type) override;
+
+ private:
+ const size_t num_channels_;
+ RTC_DISALLOW_COPY_AND_ASSIGN(AudioDecoderPcmA);
+};
+
+} // namespace webrtc
+
+#endif // WEBRTC_MODULES_AUDIO_CODING_CODECS_G711_AUDIO_DECODER_PCM_H_
diff --git a/webrtc/modules/audio_coding/codecs/g711/audio_encoder_pcm.cc b/webrtc/modules/audio_coding/codecs/g711/audio_encoder_pcm.cc
index dde3cc6799..ff61db8e8d 100644
--- a/webrtc/modules/audio_coding/codecs/g711/audio_encoder_pcm.cc
+++ b/webrtc/modules/audio_coding/codecs/g711/audio_encoder_pcm.cc
@@ -8,27 +8,18 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-#include "webrtc/modules/audio_coding/codecs/g711/include/audio_encoder_pcm.h"
+#include "webrtc/modules/audio_coding/codecs/g711/audio_encoder_pcm.h"
#include <limits>
#include "webrtc/base/checks.h"
#include "webrtc/common_types.h"
-#include "webrtc/modules/audio_coding/codecs/g711/include/g711_interface.h"
+#include "webrtc/modules/audio_coding/codecs/g711/g711_interface.h"
namespace webrtc {
namespace {
-int16_t NumSamplesPerFrame(int num_channels,
- int frame_size_ms,
- int sample_rate_hz) {
- int samples_per_frame = num_channels * frame_size_ms * sample_rate_hz / 1000;
- RTC_CHECK_LE(samples_per_frame, std::numeric_limits<int16_t>::max())
- << "Frame size too large.";
- return static_cast<int16_t>(samples_per_frame);
-}
-
template <typename T>
typename T::Config CreateConfig(const CodecInst& codec_inst) {
typename T::Config config;
@@ -50,9 +41,8 @@ AudioEncoderPcm::AudioEncoderPcm(const Config& config, int sample_rate_hz)
payload_type_(config.payload_type),
num_10ms_frames_per_packet_(
static_cast<size_t>(config.frame_size_ms / 10)),
- full_frame_samples_(NumSamplesPerFrame(config.num_channels,
- config.frame_size_ms,
- sample_rate_hz_)),
+ full_frame_samples_(
+ config.num_channels * config.frame_size_ms * sample_rate_hz / 1000),
first_timestamp_in_buffer_(0) {
RTC_CHECK_GT(sample_rate_hz, 0) << "Sample rate must be larger than 0 Hz";
RTC_CHECK_EQ(config.frame_size_ms % 10, 0)
@@ -70,7 +60,7 @@ int AudioEncoderPcm::SampleRateHz() const {
return sample_rate_hz_;
}
-int AudioEncoderPcm::NumChannels() const {
+size_t AudioEncoderPcm::NumChannels() const {
return num_channels_;
}
@@ -83,21 +73,19 @@ size_t AudioEncoderPcm::Max10MsFramesInAPacket() const {
}
int AudioEncoderPcm::GetTargetBitrate() const {
- return 8 * BytesPerSample() * SampleRateHz() * NumChannels();
+ return static_cast<int>(
+ 8 * BytesPerSample() * SampleRateHz() * NumChannels());
}
AudioEncoder::EncodedInfo AudioEncoderPcm::EncodeInternal(
uint32_t rtp_timestamp,
- const int16_t* audio,
+ rtc::ArrayView<const int16_t> audio,
size_t max_encoded_bytes,
uint8_t* encoded) {
- const int num_samples = SampleRateHz() / 100 * NumChannels();
if (speech_buffer_.empty()) {
first_timestamp_in_buffer_ = rtp_timestamp;
}
- for (int i = 0; i < num_samples; ++i) {
- speech_buffer_.push_back(audio[i]);
- }
+ speech_buffer_.insert(speech_buffer_.end(), audio.begin(), audio.end());
if (speech_buffer_.size() < full_frame_samples_) {
return EncodedInfo();
}
@@ -125,7 +113,7 @@ size_t AudioEncoderPcmA::EncodeCall(const int16_t* audio,
return WebRtcG711_EncodeA(audio, input_len, encoded);
}
-int AudioEncoderPcmA::BytesPerSample() const {
+size_t AudioEncoderPcmA::BytesPerSample() const {
return 1;
}
@@ -138,7 +126,7 @@ size_t AudioEncoderPcmU::EncodeCall(const int16_t* audio,
return WebRtcG711_EncodeU(audio, input_len, encoded);
}
-int AudioEncoderPcmU::BytesPerSample() const {
+size_t AudioEncoderPcmU::BytesPerSample() const {
return 1;
}
diff --git a/webrtc/modules/audio_coding/codecs/g711/audio_encoder_pcm.h b/webrtc/modules/audio_coding/codecs/g711/audio_encoder_pcm.h
new file mode 100644
index 0000000000..b839488628
--- /dev/null
+++ b/webrtc/modules/audio_coding/codecs/g711/audio_encoder_pcm.h
@@ -0,0 +1,117 @@
+/*
+ * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_CODECS_G711_AUDIO_ENCODER_PCM_H_
+#define WEBRTC_MODULES_AUDIO_CODING_CODECS_G711_AUDIO_ENCODER_PCM_H_
+
+#include <vector>
+
+#include "webrtc/base/scoped_ptr.h"
+#include "webrtc/modules/audio_coding/codecs/audio_encoder.h"
+
+namespace webrtc {
+
+class AudioEncoderPcm : public AudioEncoder {
+ public:
+ struct Config {
+ public:
+ bool IsOk() const;
+
+ int frame_size_ms;
+ size_t num_channels;
+ int payload_type;
+
+ protected:
+ explicit Config(int pt)
+ : frame_size_ms(20), num_channels(1), payload_type(pt) {}
+ };
+
+ ~AudioEncoderPcm() override;
+
+ size_t MaxEncodedBytes() const override;
+ int SampleRateHz() const override;
+ size_t NumChannels() const override;
+ size_t Num10MsFramesInNextPacket() const override;
+ size_t Max10MsFramesInAPacket() const override;
+ int GetTargetBitrate() const override;
+ EncodedInfo EncodeInternal(uint32_t rtp_timestamp,
+ rtc::ArrayView<const int16_t> audio,
+ size_t max_encoded_bytes,
+ uint8_t* encoded) override;
+ void Reset() override;
+
+ protected:
+ AudioEncoderPcm(const Config& config, int sample_rate_hz);
+
+ virtual size_t EncodeCall(const int16_t* audio,
+ size_t input_len,
+ uint8_t* encoded) = 0;
+
+ virtual size_t BytesPerSample() const = 0;
+
+ private:
+ const int sample_rate_hz_;
+ const size_t num_channels_;
+ const int payload_type_;
+ const size_t num_10ms_frames_per_packet_;
+ const size_t full_frame_samples_;
+ std::vector<int16_t> speech_buffer_;
+ uint32_t first_timestamp_in_buffer_;
+};
+
+struct CodecInst;
+
+class AudioEncoderPcmA final : public AudioEncoderPcm {
+ public:
+ struct Config : public AudioEncoderPcm::Config {
+ Config() : AudioEncoderPcm::Config(8) {}
+ };
+
+ explicit AudioEncoderPcmA(const Config& config)
+ : AudioEncoderPcm(config, kSampleRateHz) {}
+ explicit AudioEncoderPcmA(const CodecInst& codec_inst);
+
+ protected:
+ size_t EncodeCall(const int16_t* audio,
+ size_t input_len,
+ uint8_t* encoded) override;
+
+ size_t BytesPerSample() const override;
+
+ private:
+ static const int kSampleRateHz = 8000;
+ RTC_DISALLOW_COPY_AND_ASSIGN(AudioEncoderPcmA);
+};
+
+class AudioEncoderPcmU final : public AudioEncoderPcm {
+ public:
+ struct Config : public AudioEncoderPcm::Config {
+ Config() : AudioEncoderPcm::Config(0) {}
+ };
+
+ explicit AudioEncoderPcmU(const Config& config)
+ : AudioEncoderPcm(config, kSampleRateHz) {}
+ explicit AudioEncoderPcmU(const CodecInst& codec_inst);
+
+ protected:
+ size_t EncodeCall(const int16_t* audio,
+ size_t input_len,
+ uint8_t* encoded) override;
+
+ size_t BytesPerSample() const override;
+
+ private:
+ static const int kSampleRateHz = 8000;
+ RTC_DISALLOW_COPY_AND_ASSIGN(AudioEncoderPcmU);
+};
+
+} // namespace webrtc
+
+#endif // WEBRTC_MODULES_AUDIO_CODING_CODECS_G711_AUDIO_ENCODER_PCM_H_
diff --git a/webrtc/modules/audio_coding/codecs/g711/g711.gypi b/webrtc/modules/audio_coding/codecs/g711/g711.gypi
index d35d7874e7..4b902809ea 100644
--- a/webrtc/modules/audio_coding/codecs/g711/g711.gypi
+++ b/webrtc/modules/audio_coding/codecs/g711/g711.gypi
@@ -14,25 +14,15 @@
'dependencies': [
'audio_encoder_interface',
],
- 'include_dirs': [
- 'include',
- '<(webrtc_root)',
- ],
- 'direct_dependent_settings': {
- 'include_dirs': [
- 'include',
- '<(webrtc_root)',
- ],
- },
'sources': [
- 'include/g711_interface.h',
- 'include/audio_decoder_pcm.h',
- 'include/audio_encoder_pcm.h',
+ 'audio_decoder_pcm.cc',
+ 'audio_decoder_pcm.h',
+ 'audio_encoder_pcm.cc',
+ 'audio_encoder_pcm.h',
'g711_interface.c',
+ 'g711_interface.h',
'g711.c',
'g711.h',
- 'audio_decoder_pcm.cc',
- 'audio_encoder_pcm.cc',
],
},
], # targets
diff --git a/webrtc/modules/audio_coding/codecs/g711/g711_interface.h b/webrtc/modules/audio_coding/codecs/g711/g711_interface.h
new file mode 100644
index 0000000000..00854bbb2c
--- /dev/null
+++ b/webrtc/modules/audio_coding/codecs/g711/g711_interface.h
@@ -0,0 +1,135 @@
+/*
+ * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_CODECS_G711_G711_INTERFACE_H_
+#define WEBRTC_MODULES_AUDIO_CODING_CODECS_G711_G711_INTERFACE_H_
+
+#include "webrtc/typedefs.h"
+
+// Comfort noise constants
+#define G711_WEBRTC_SPEECH 1
+#define G711_WEBRTC_CNG 2
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+/****************************************************************************
+ * WebRtcG711_EncodeA(...)
+ *
+ * This function encodes a G711 A-law frame and inserts it into a packet.
+ * Input speech length has be of any length.
+ *
+ * Input:
+ * - speechIn : Input speech vector
+ * - len : Samples in speechIn
+ *
+ * Output:
+ * - encoded : The encoded data vector
+ *
+ * Return value : Length (in bytes) of coded data.
+ * Always equal to len input parameter.
+ */
+
+size_t WebRtcG711_EncodeA(const int16_t* speechIn,
+ size_t len,
+ uint8_t* encoded);
+
+/****************************************************************************
+ * WebRtcG711_EncodeU(...)
+ *
+ * This function encodes a G711 U-law frame and inserts it into a packet.
+ * Input speech length has be of any length.
+ *
+ * Input:
+ * - speechIn : Input speech vector
+ * - len : Samples in speechIn
+ *
+ * Output:
+ * - encoded : The encoded data vector
+ *
+ * Return value : Length (in bytes) of coded data.
+ * Always equal to len input parameter.
+ */
+
+size_t WebRtcG711_EncodeU(const int16_t* speechIn,
+ size_t len,
+ uint8_t* encoded);
+
+/****************************************************************************
+ * WebRtcG711_DecodeA(...)
+ *
+ * This function decodes a packet G711 A-law frame.
+ *
+ * Input:
+ * - encoded : Encoded data
+ * - len : Bytes in encoded vector
+ *
+ * Output:
+ * - decoded : The decoded vector
+ * - speechType : 1 normal, 2 CNG (for G711 it should
+ * always return 1 since G711 does not have a
+ * built-in DTX/CNG scheme)
+ *
+ * Return value : >0 - Samples in decoded vector
+ * -1 - Error
+ */
+
+size_t WebRtcG711_DecodeA(const uint8_t* encoded,
+ size_t len,
+ int16_t* decoded,
+ int16_t* speechType);
+
+/****************************************************************************
+ * WebRtcG711_DecodeU(...)
+ *
+ * This function decodes a packet G711 U-law frame.
+ *
+ * Input:
+ * - encoded : Encoded data
+ * - len : Bytes in encoded vector
+ *
+ * Output:
+ * - decoded : The decoded vector
+ * - speechType : 1 normal, 2 CNG (for G711 it should
+ * always return 1 since G711 does not have a
+ * built-in DTX/CNG scheme)
+ *
+ * Return value : >0 - Samples in decoded vector
+ * -1 - Error
+ */
+
+size_t WebRtcG711_DecodeU(const uint8_t* encoded,
+ size_t len,
+ int16_t* decoded,
+ int16_t* speechType);
+
+/**********************************************************************
+* WebRtcG711_Version(...)
+*
+* This function gives the version string of the G.711 codec.
+*
+* Input:
+* - lenBytes: the size of Allocated space (in Bytes) where
+* the version number is written to (in string format).
+*
+* Output:
+* - version: Pointer to a buffer where the version number is
+* written to.
+*
+*/
+
+int16_t WebRtcG711_Version(char* version, int16_t lenBytes);
+
+#ifdef __cplusplus
+}
+#endif
+
+#endif // WEBRTC_MODULES_AUDIO_CODING_CODECS_G711_G711_INTERFACE_H_
diff --git a/webrtc/modules/audio_coding/codecs/g711/include/audio_decoder_pcm.h b/webrtc/modules/audio_coding/codecs/g711/include/audio_decoder_pcm.h
deleted file mode 100644
index 7bc37d3b7a..0000000000
--- a/webrtc/modules/audio_coding/codecs/g711/include/audio_decoder_pcm.h
+++ /dev/null
@@ -1,63 +0,0 @@
-/*
- * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_MODULES_AUDIO_CODING_CODECS_G711_INCLUDE_AUDIO_DECODER_PCM_H_
-#define WEBRTC_MODULES_AUDIO_CODING_CODECS_G711_INCLUDE_AUDIO_DECODER_PCM_H_
-
-#include "webrtc/base/checks.h"
-#include "webrtc/modules/audio_coding/codecs/audio_decoder.h"
-
-namespace webrtc {
-
-class AudioDecoderPcmU final : public AudioDecoder {
- public:
- explicit AudioDecoderPcmU(size_t num_channels) : num_channels_(num_channels) {
- RTC_DCHECK_GE(num_channels, 1u);
- }
- void Reset() override;
- int PacketDuration(const uint8_t* encoded, size_t encoded_len) const override;
- size_t Channels() const override;
-
- protected:
- int DecodeInternal(const uint8_t* encoded,
- size_t encoded_len,
- int sample_rate_hz,
- int16_t* decoded,
- SpeechType* speech_type) override;
-
- private:
- const size_t num_channels_;
- RTC_DISALLOW_COPY_AND_ASSIGN(AudioDecoderPcmU);
-};
-
-class AudioDecoderPcmA final : public AudioDecoder {
- public:
- explicit AudioDecoderPcmA(size_t num_channels) : num_channels_(num_channels) {
- RTC_DCHECK_GE(num_channels, 1u);
- }
- void Reset() override;
- int PacketDuration(const uint8_t* encoded, size_t encoded_len) const override;
- size_t Channels() const override;
-
- protected:
- int DecodeInternal(const uint8_t* encoded,
- size_t encoded_len,
- int sample_rate_hz,
- int16_t* decoded,
- SpeechType* speech_type) override;
-
- private:
- const size_t num_channels_;
- RTC_DISALLOW_COPY_AND_ASSIGN(AudioDecoderPcmA);
-};
-
-} // namespace webrtc
-
-#endif // WEBRTC_MODULES_AUDIO_CODING_CODECS_G711_INCLUDE_AUDIO_DECODER_PCM_H_
diff --git a/webrtc/modules/audio_coding/codecs/g711/include/audio_encoder_pcm.h b/webrtc/modules/audio_coding/codecs/g711/include/audio_encoder_pcm.h
deleted file mode 100644
index e532f9b1bc..0000000000
--- a/webrtc/modules/audio_coding/codecs/g711/include/audio_encoder_pcm.h
+++ /dev/null
@@ -1,117 +0,0 @@
-/*
- * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_MODULES_AUDIO_CODING_CODECS_G711_INCLUDE_AUDIO_ENCODER_PCM_H_
-#define WEBRTC_MODULES_AUDIO_CODING_CODECS_G711_INCLUDE_AUDIO_ENCODER_PCM_H_
-
-#include <vector>
-
-#include "webrtc/base/scoped_ptr.h"
-#include "webrtc/modules/audio_coding/codecs/audio_encoder.h"
-
-namespace webrtc {
-
-class AudioEncoderPcm : public AudioEncoder {
- public:
- struct Config {
- public:
- bool IsOk() const;
-
- int frame_size_ms;
- int num_channels;
- int payload_type;
-
- protected:
- explicit Config(int pt)
- : frame_size_ms(20), num_channels(1), payload_type(pt) {}
- };
-
- ~AudioEncoderPcm() override;
-
- size_t MaxEncodedBytes() const override;
- int SampleRateHz() const override;
- int NumChannels() const override;
- size_t Num10MsFramesInNextPacket() const override;
- size_t Max10MsFramesInAPacket() const override;
- int GetTargetBitrate() const override;
- EncodedInfo EncodeInternal(uint32_t rtp_timestamp,
- const int16_t* audio,
- size_t max_encoded_bytes,
- uint8_t* encoded) override;
- void Reset() override;
-
- protected:
- AudioEncoderPcm(const Config& config, int sample_rate_hz);
-
- virtual size_t EncodeCall(const int16_t* audio,
- size_t input_len,
- uint8_t* encoded) = 0;
-
- virtual int BytesPerSample() const = 0;
-
- private:
- const int sample_rate_hz_;
- const int num_channels_;
- const int payload_type_;
- const size_t num_10ms_frames_per_packet_;
- const size_t full_frame_samples_;
- std::vector<int16_t> speech_buffer_;
- uint32_t first_timestamp_in_buffer_;
-};
-
-struct CodecInst;
-
-class AudioEncoderPcmA final : public AudioEncoderPcm {
- public:
- struct Config : public AudioEncoderPcm::Config {
- Config() : AudioEncoderPcm::Config(8) {}
- };
-
- explicit AudioEncoderPcmA(const Config& config)
- : AudioEncoderPcm(config, kSampleRateHz) {}
- explicit AudioEncoderPcmA(const CodecInst& codec_inst);
-
- protected:
- size_t EncodeCall(const int16_t* audio,
- size_t input_len,
- uint8_t* encoded) override;
-
- int BytesPerSample() const override;
-
- private:
- static const int kSampleRateHz = 8000;
- RTC_DISALLOW_COPY_AND_ASSIGN(AudioEncoderPcmA);
-};
-
-class AudioEncoderPcmU final : public AudioEncoderPcm {
- public:
- struct Config : public AudioEncoderPcm::Config {
- Config() : AudioEncoderPcm::Config(0) {}
- };
-
- explicit AudioEncoderPcmU(const Config& config)
- : AudioEncoderPcm(config, kSampleRateHz) {}
- explicit AudioEncoderPcmU(const CodecInst& codec_inst);
-
- protected:
- size_t EncodeCall(const int16_t* audio,
- size_t input_len,
- uint8_t* encoded) override;
-
- int BytesPerSample() const override;
-
- private:
- static const int kSampleRateHz = 8000;
- RTC_DISALLOW_COPY_AND_ASSIGN(AudioEncoderPcmU);
-};
-
-} // namespace webrtc
-
-#endif // WEBRTC_MODULES_AUDIO_CODING_CODECS_G711_INCLUDE_AUDIO_ENCODER_PCM_H_
diff --git a/webrtc/modules/audio_coding/codecs/g711/include/g711_interface.h b/webrtc/modules/audio_coding/codecs/g711/include/g711_interface.h
deleted file mode 100644
index f9867f4504..0000000000
--- a/webrtc/modules/audio_coding/codecs/g711/include/g711_interface.h
+++ /dev/null
@@ -1,135 +0,0 @@
-/*
- * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef MODULES_AUDIO_CODING_CODECS_G711_MAIN_INCLUDE_G711_INTERFACE_H_
-#define MODULES_AUDIO_CODING_CODECS_G711_MAIN_INCLUDE_G711_INTERFACE_H_
-
-#include "webrtc/typedefs.h"
-
-// Comfort noise constants
-#define G711_WEBRTC_SPEECH 1
-#define G711_WEBRTC_CNG 2
-
-#ifdef __cplusplus
-extern "C" {
-#endif
-
-/****************************************************************************
- * WebRtcG711_EncodeA(...)
- *
- * This function encodes a G711 A-law frame and inserts it into a packet.
- * Input speech length has be of any length.
- *
- * Input:
- * - speechIn : Input speech vector
- * - len : Samples in speechIn
- *
- * Output:
- * - encoded : The encoded data vector
- *
- * Return value : Length (in bytes) of coded data.
- * Always equal to len input parameter.
- */
-
-size_t WebRtcG711_EncodeA(const int16_t* speechIn,
- size_t len,
- uint8_t* encoded);
-
-/****************************************************************************
- * WebRtcG711_EncodeU(...)
- *
- * This function encodes a G711 U-law frame and inserts it into a packet.
- * Input speech length has be of any length.
- *
- * Input:
- * - speechIn : Input speech vector
- * - len : Samples in speechIn
- *
- * Output:
- * - encoded : The encoded data vector
- *
- * Return value : Length (in bytes) of coded data.
- * Always equal to len input parameter.
- */
-
-size_t WebRtcG711_EncodeU(const int16_t* speechIn,
- size_t len,
- uint8_t* encoded);
-
-/****************************************************************************
- * WebRtcG711_DecodeA(...)
- *
- * This function decodes a packet G711 A-law frame.
- *
- * Input:
- * - encoded : Encoded data
- * - len : Bytes in encoded vector
- *
- * Output:
- * - decoded : The decoded vector
- * - speechType : 1 normal, 2 CNG (for G711 it should
- * always return 1 since G711 does not have a
- * built-in DTX/CNG scheme)
- *
- * Return value : >0 - Samples in decoded vector
- * -1 - Error
- */
-
-size_t WebRtcG711_DecodeA(const uint8_t* encoded,
- size_t len,
- int16_t* decoded,
- int16_t* speechType);
-
-/****************************************************************************
- * WebRtcG711_DecodeU(...)
- *
- * This function decodes a packet G711 U-law frame.
- *
- * Input:
- * - encoded : Encoded data
- * - len : Bytes in encoded vector
- *
- * Output:
- * - decoded : The decoded vector
- * - speechType : 1 normal, 2 CNG (for G711 it should
- * always return 1 since G711 does not have a
- * built-in DTX/CNG scheme)
- *
- * Return value : >0 - Samples in decoded vector
- * -1 - Error
- */
-
-size_t WebRtcG711_DecodeU(const uint8_t* encoded,
- size_t len,
- int16_t* decoded,
- int16_t* speechType);
-
-/**********************************************************************
-* WebRtcG711_Version(...)
-*
-* This function gives the version string of the G.711 codec.
-*
-* Input:
-* - lenBytes: the size of Allocated space (in Bytes) where
-* the version number is written to (in string format).
-*
-* Output:
-* - version: Pointer to a buffer where the version number is
-* written to.
-*
-*/
-
-int16_t WebRtcG711_Version(char* version, int16_t lenBytes);
-
-#ifdef __cplusplus
-}
-#endif
-
-#endif /* MODULES_AUDIO_CODING_CODECS_G711_MAIN_INCLUDE_G711_INCLUDE_H_ */
diff --git a/webrtc/modules/audio_coding/codecs/g711/test/testG711.cc b/webrtc/modules/audio_coding/codecs/g711/test/testG711.cc
index 94248f7a66..5675b1f8b0 100644
--- a/webrtc/modules/audio_coding/codecs/g711/test/testG711.cc
+++ b/webrtc/modules/audio_coding/codecs/g711/test/testG711.cc
@@ -17,7 +17,7 @@
#include <string.h>
/* include API */
-#include "g711_interface.h"
+#include "webrtc/modules/audio_coding/codecs/g711/g711_interface.h"
/* Runtime statistics */
#include <time.h>
diff --git a/webrtc/modules/audio_coding/codecs/g722/audio_decoder_g722.cc b/webrtc/modules/audio_coding/codecs/g722/audio_decoder_g722.cc
index 55ebe7a315..7676e90d9e 100644
--- a/webrtc/modules/audio_coding/codecs/g722/audio_decoder_g722.cc
+++ b/webrtc/modules/audio_coding/codecs/g722/audio_decoder_g722.cc
@@ -8,12 +8,12 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-#include "webrtc/modules/audio_coding/codecs/g722/include/audio_decoder_g722.h"
+#include "webrtc/modules/audio_coding/codecs/g722/audio_decoder_g722.h"
#include <string.h>
#include "webrtc/base/checks.h"
-#include "webrtc/modules/audio_coding/codecs/g722/include/g722_interface.h"
+#include "webrtc/modules/audio_coding/codecs/g722/g722_interface.h"
namespace webrtc {
diff --git a/webrtc/modules/audio_coding/codecs/g722/audio_decoder_g722.h b/webrtc/modules/audio_coding/codecs/g722/audio_decoder_g722.h
new file mode 100644
index 0000000000..7cc2ea9877
--- /dev/null
+++ b/webrtc/modules/audio_coding/codecs/g722/audio_decoder_g722.h
@@ -0,0 +1,72 @@
+/*
+ * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_CODECS_G722_AUDIO_DECODER_G722_H_
+#define WEBRTC_MODULES_AUDIO_CODING_CODECS_G722_AUDIO_DECODER_G722_H_
+
+#include "webrtc/modules/audio_coding/codecs/audio_decoder.h"
+
+typedef struct WebRtcG722DecInst G722DecInst;
+
+namespace webrtc {
+
+class AudioDecoderG722 final : public AudioDecoder {
+ public:
+ AudioDecoderG722();
+ ~AudioDecoderG722() override;
+ bool HasDecodePlc() const override;
+ void Reset() override;
+ int PacketDuration(const uint8_t* encoded, size_t encoded_len) const override;
+ size_t Channels() const override;
+
+ protected:
+ int DecodeInternal(const uint8_t* encoded,
+ size_t encoded_len,
+ int sample_rate_hz,
+ int16_t* decoded,
+ SpeechType* speech_type) override;
+
+ private:
+ G722DecInst* dec_state_;
+ RTC_DISALLOW_COPY_AND_ASSIGN(AudioDecoderG722);
+};
+
+class AudioDecoderG722Stereo final : public AudioDecoder {
+ public:
+ AudioDecoderG722Stereo();
+ ~AudioDecoderG722Stereo() override;
+ void Reset() override;
+
+ protected:
+ int DecodeInternal(const uint8_t* encoded,
+ size_t encoded_len,
+ int sample_rate_hz,
+ int16_t* decoded,
+ SpeechType* speech_type) override;
+ size_t Channels() const override;
+
+ private:
+ // Splits the stereo-interleaved payload in |encoded| into separate payloads
+ // for left and right channels. The separated payloads are written to
+ // |encoded_deinterleaved|, which must hold at least |encoded_len| samples.
+ // The left channel starts at offset 0, while the right channel starts at
+ // offset encoded_len / 2 into |encoded_deinterleaved|.
+ void SplitStereoPacket(const uint8_t* encoded,
+ size_t encoded_len,
+ uint8_t* encoded_deinterleaved);
+
+ G722DecInst* dec_state_left_;
+ G722DecInst* dec_state_right_;
+ RTC_DISALLOW_COPY_AND_ASSIGN(AudioDecoderG722Stereo);
+};
+
+} // namespace webrtc
+
+#endif // WEBRTC_MODULES_AUDIO_CODING_CODECS_G722_AUDIO_DECODER_G722_H_
diff --git a/webrtc/modules/audio_coding/codecs/g722/audio_encoder_g722.cc b/webrtc/modules/audio_coding/codecs/g722/audio_encoder_g722.cc
index 43b097fa0e..d7203b9da3 100644
--- a/webrtc/modules/audio_coding/codecs/g722/audio_encoder_g722.cc
+++ b/webrtc/modules/audio_coding/codecs/g722/audio_encoder_g722.cc
@@ -8,12 +8,12 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-#include "webrtc/modules/audio_coding/codecs/g722/include/audio_encoder_g722.h"
+#include "webrtc/modules/audio_coding/codecs/g722/audio_encoder_g722.h"
#include <limits>
#include "webrtc/base/checks.h"
#include "webrtc/common_types.h"
-#include "webrtc/modules/audio_coding/codecs/g722/include/g722_interface.h"
+#include "webrtc/modules/audio_coding/codecs/g722/g722_interface.h"
namespace webrtc {
@@ -48,7 +48,7 @@ AudioEncoderG722::AudioEncoderG722(const Config& config)
RTC_CHECK(config.IsOk());
const size_t samples_per_channel =
kSampleRateHz / 100 * num_10ms_frames_per_packet_;
- for (int i = 0; i < num_channels_; ++i) {
+ for (size_t i = 0; i < num_channels_; ++i) {
encoders_[i].speech_buffer.reset(new int16_t[samples_per_channel]);
encoders_[i].encoded_buffer.SetSize(samples_per_channel / 2);
}
@@ -68,7 +68,7 @@ int AudioEncoderG722::SampleRateHz() const {
return kSampleRateHz;
}
-int AudioEncoderG722::NumChannels() const {
+size_t AudioEncoderG722::NumChannels() const {
return num_channels_;
}
@@ -88,12 +88,12 @@ size_t AudioEncoderG722::Max10MsFramesInAPacket() const {
int AudioEncoderG722::GetTargetBitrate() const {
// 4 bits/sample, 16000 samples/s/channel.
- return 64000 * NumChannels();
+ return static_cast<int>(64000 * NumChannels());
}
AudioEncoder::EncodedInfo AudioEncoderG722::EncodeInternal(
uint32_t rtp_timestamp,
- const int16_t* audio,
+ rtc::ArrayView<const int16_t> audio,
size_t max_encoded_bytes,
uint8_t* encoded) {
RTC_CHECK_GE(max_encoded_bytes, MaxEncodedBytes());
@@ -104,7 +104,7 @@ AudioEncoder::EncodedInfo AudioEncoderG722::EncodeInternal(
// Deinterleave samples and save them in each channel's buffer.
const size_t start = kSampleRateHz / 100 * num_10ms_frames_buffered_;
for (size_t i = 0; i < kSampleRateHz / 100; ++i)
- for (int j = 0; j < num_channels_; ++j)
+ for (size_t j = 0; j < num_channels_; ++j)
encoders_[j].speech_buffer[start + i] = audio[i * num_channels_ + j];
// If we don't yet have enough samples for a packet, we're done for now.
@@ -116,7 +116,7 @@ AudioEncoder::EncodedInfo AudioEncoderG722::EncodeInternal(
RTC_CHECK_EQ(num_10ms_frames_buffered_, num_10ms_frames_per_packet_);
num_10ms_frames_buffered_ = 0;
const size_t samples_per_channel = SamplesPerChannel();
- for (int i = 0; i < num_channels_; ++i) {
+ for (size_t i = 0; i < num_channels_; ++i) {
const size_t encoded = WebRtcG722_Encode(
encoders_[i].encoder, encoders_[i].speech_buffer.get(),
samples_per_channel, encoders_[i].encoded_buffer.data());
@@ -127,12 +127,12 @@ AudioEncoder::EncodedInfo AudioEncoderG722::EncodeInternal(
// channel and the interleaved stream encodes two samples per byte, most
// significant half first.
for (size_t i = 0; i < samples_per_channel / 2; ++i) {
- for (int j = 0; j < num_channels_; ++j) {
+ for (size_t j = 0; j < num_channels_; ++j) {
uint8_t two_samples = encoders_[j].encoded_buffer.data()[i];
interleave_buffer_.data()[j] = two_samples >> 4;
interleave_buffer_.data()[num_channels_ + j] = two_samples & 0xf;
}
- for (int j = 0; j < num_channels_; ++j)
+ for (size_t j = 0; j < num_channels_; ++j)
encoded[i * num_channels_ + j] = interleave_buffer_.data()[2 * j] << 4 |
interleave_buffer_.data()[2 * j + 1];
}
@@ -145,7 +145,7 @@ AudioEncoder::EncodedInfo AudioEncoderG722::EncodeInternal(
void AudioEncoderG722::Reset() {
num_10ms_frames_buffered_ = 0;
- for (int i = 0; i < num_channels_; ++i)
+ for (size_t i = 0; i < num_channels_; ++i)
RTC_CHECK_EQ(0, WebRtcG722_EncoderInit(encoders_[i].encoder));
}
diff --git a/webrtc/modules/audio_coding/codecs/g722/audio_encoder_g722.h b/webrtc/modules/audio_coding/codecs/g722/audio_encoder_g722.h
new file mode 100644
index 0000000000..07d767e778
--- /dev/null
+++ b/webrtc/modules/audio_coding/codecs/g722/audio_encoder_g722.h
@@ -0,0 +1,73 @@
+/*
+ * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_CODECS_G722_AUDIO_ENCODER_G722_H_
+#define WEBRTC_MODULES_AUDIO_CODING_CODECS_G722_AUDIO_ENCODER_G722_H_
+
+#include "webrtc/base/buffer.h"
+#include "webrtc/base/scoped_ptr.h"
+#include "webrtc/modules/audio_coding/codecs/audio_encoder.h"
+#include "webrtc/modules/audio_coding/codecs/g722/g722_interface.h"
+
+namespace webrtc {
+
+struct CodecInst;
+
+class AudioEncoderG722 final : public AudioEncoder {
+ public:
+ struct Config {
+ bool IsOk() const;
+
+ int payload_type = 9;
+ int frame_size_ms = 20;
+ size_t num_channels = 1;
+ };
+
+ explicit AudioEncoderG722(const Config& config);
+ explicit AudioEncoderG722(const CodecInst& codec_inst);
+ ~AudioEncoderG722() override;
+
+ size_t MaxEncodedBytes() const override;
+ int SampleRateHz() const override;
+ size_t NumChannels() const override;
+ int RtpTimestampRateHz() const override;
+ size_t Num10MsFramesInNextPacket() const override;
+ size_t Max10MsFramesInAPacket() const override;
+ int GetTargetBitrate() const override;
+ EncodedInfo EncodeInternal(uint32_t rtp_timestamp,
+ rtc::ArrayView<const int16_t> audio,
+ size_t max_encoded_bytes,
+ uint8_t* encoded) override;
+ void Reset() override;
+
+ private:
+ // The encoder state for one channel.
+ struct EncoderState {
+ G722EncInst* encoder;
+ rtc::scoped_ptr<int16_t[]> speech_buffer; // Queued up for encoding.
+ rtc::Buffer encoded_buffer; // Already encoded.
+ EncoderState();
+ ~EncoderState();
+ };
+
+ size_t SamplesPerChannel() const;
+
+ const size_t num_channels_;
+ const int payload_type_;
+ const size_t num_10ms_frames_per_packet_;
+ size_t num_10ms_frames_buffered_;
+ uint32_t first_timestamp_in_buffer_;
+ const rtc::scoped_ptr<EncoderState[]> encoders_;
+ rtc::Buffer interleave_buffer_;
+ RTC_DISALLOW_COPY_AND_ASSIGN(AudioEncoderG722);
+};
+
+} // namespace webrtc
+#endif // WEBRTC_MODULES_AUDIO_CODING_CODECS_G722_AUDIO_ENCODER_G722_H_
diff --git a/webrtc/modules/audio_coding/codecs/g722/g722.gypi b/webrtc/modules/audio_coding/codecs/g722/g722.gypi
index aad11e3685..756fabe345 100644
--- a/webrtc/modules/audio_coding/codecs/g722/g722.gypi
+++ b/webrtc/modules/audio_coding/codecs/g722/g722.gypi
@@ -13,26 +13,16 @@
'dependencies': [
'audio_encoder_interface',
],
- 'include_dirs': [
- 'include',
- '<(webrtc_root)',
- ],
- 'direct_dependent_settings': {
- 'include_dirs': [
- 'include',
- '<(webrtc_root)',
- ],
- },
'sources': [
'audio_decoder_g722.cc',
+ 'audio_decoder_g722.h',
'audio_encoder_g722.cc',
- 'include/audio_decoder_g722.h',
- 'include/audio_encoder_g722.h',
- 'include/g722_interface.h',
+ 'audio_encoder_g722.h',
'g722_interface.c',
- 'g722_encode.c',
+ 'g722_interface.h',
'g722_decode.c',
'g722_enc_dec.h',
+ 'g722_encode.c',
],
},
], # targets
diff --git a/webrtc/modules/audio_coding/codecs/g722/g722_interface.h b/webrtc/modules/audio_coding/codecs/g722/g722_interface.h
new file mode 100644
index 0000000000..b411ef0e8e
--- /dev/null
+++ b/webrtc/modules/audio_coding/codecs/g722/g722_interface.h
@@ -0,0 +1,182 @@
+/*
+ * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_CODECS_G722_G722_INTERFACE_H_
+#define WEBRTC_MODULES_AUDIO_CODING_CODECS_G722_G722_INTERFACE_H_
+
+#include "webrtc/typedefs.h"
+
+/*
+ * Solution to support multiple instances
+ */
+
+typedef struct WebRtcG722EncInst G722EncInst;
+typedef struct WebRtcG722DecInst G722DecInst;
+
+/*
+ * Comfort noise constants
+ */
+
+#define G722_WEBRTC_SPEECH 1
+#define G722_WEBRTC_CNG 2
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+
+/****************************************************************************
+ * WebRtcG722_CreateEncoder(...)
+ *
+ * Create memory used for G722 encoder
+ *
+ * Input:
+ * - G722enc_inst : G722 instance for encoder
+ *
+ * Return value : 0 - Ok
+ * -1 - Error
+ */
+int16_t WebRtcG722_CreateEncoder(G722EncInst **G722enc_inst);
+
+
+/****************************************************************************
+ * WebRtcG722_EncoderInit(...)
+ *
+ * This function initializes a G722 instance
+ *
+ * Input:
+ * - G722enc_inst : G722 instance, i.e. the user that should receive
+ * be initialized
+ *
+ * Return value : 0 - Ok
+ * -1 - Error
+ */
+
+int16_t WebRtcG722_EncoderInit(G722EncInst *G722enc_inst);
+
+
+/****************************************************************************
+ * WebRtcG722_FreeEncoder(...)
+ *
+ * Free the memory used for G722 encoder
+ *
+ * Input:
+ * - G722enc_inst : G722 instance for encoder
+ *
+ * Return value : 0 - Ok
+ * -1 - Error
+ */
+int WebRtcG722_FreeEncoder(G722EncInst *G722enc_inst);
+
+
+
+/****************************************************************************
+ * WebRtcG722_Encode(...)
+ *
+ * This function encodes G722 encoded data.
+ *
+ * Input:
+ * - G722enc_inst : G722 instance, i.e. the user that should encode
+ * a packet
+ * - speechIn : Input speech vector
+ * - len : Samples in speechIn
+ *
+ * Output:
+ * - encoded : The encoded data vector
+ *
+ * Return value : Length (in bytes) of coded data
+ */
+
+size_t WebRtcG722_Encode(G722EncInst* G722enc_inst,
+ const int16_t* speechIn,
+ size_t len,
+ uint8_t* encoded);
+
+
+/****************************************************************************
+ * WebRtcG722_CreateDecoder(...)
+ *
+ * Create memory used for G722 encoder
+ *
+ * Input:
+ * - G722dec_inst : G722 instance for decoder
+ *
+ * Return value : 0 - Ok
+ * -1 - Error
+ */
+int16_t WebRtcG722_CreateDecoder(G722DecInst **G722dec_inst);
+
+/****************************************************************************
+ * WebRtcG722_DecoderInit(...)
+ *
+ * This function initializes a G722 instance
+ *
+ * Input:
+ * - inst : G722 instance
+ */
+
+void WebRtcG722_DecoderInit(G722DecInst* inst);
+
+/****************************************************************************
+ * WebRtcG722_FreeDecoder(...)
+ *
+ * Free the memory used for G722 decoder
+ *
+ * Input:
+ * - G722dec_inst : G722 instance for decoder
+ *
+ * Return value : 0 - Ok
+ * -1 - Error
+ */
+
+int WebRtcG722_FreeDecoder(G722DecInst *G722dec_inst);
+
+
+/****************************************************************************
+ * WebRtcG722_Decode(...)
+ *
+ * This function decodes a packet with G729 frame(s). Output speech length
+ * will be a multiple of 80 samples (80*frames/packet).
+ *
+ * Input:
+ * - G722dec_inst : G722 instance, i.e. the user that should decode
+ * a packet
+ * - encoded : Encoded G722 frame(s)
+ * - len : Bytes in encoded vector
+ *
+ * Output:
+ * - decoded : The decoded vector
+ * - speechType : 1 normal, 2 CNG (Since G722 does not have its own
+ * DTX/CNG scheme it should always return 1)
+ *
+ * Return value : Samples in decoded vector
+ */
+
+size_t WebRtcG722_Decode(G722DecInst *G722dec_inst,
+ const uint8_t* encoded,
+ size_t len,
+ int16_t *decoded,
+ int16_t *speechType);
+
+/****************************************************************************
+ * WebRtcG722_Version(...)
+ *
+ * Get a string with the current version of the codec
+ */
+
+int16_t WebRtcG722_Version(char *versionStr, short len);
+
+
+#ifdef __cplusplus
+}
+#endif
+
+
+#endif /* WEBRTC_MODULES_AUDIO_CODING_CODECS_G722_G722_INTERFACE_H_ */
diff --git a/webrtc/modules/audio_coding/codecs/g722/include/audio_decoder_g722.h b/webrtc/modules/audio_coding/codecs/g722/include/audio_decoder_g722.h
deleted file mode 100644
index b9fa68fc48..0000000000
--- a/webrtc/modules/audio_coding/codecs/g722/include/audio_decoder_g722.h
+++ /dev/null
@@ -1,72 +0,0 @@
-/*
- * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_MODULES_AUDIO_CODING_CODECS_G722_INCLUDE_AUDIO_DECODER_G722_H_
-#define WEBRTC_MODULES_AUDIO_CODING_CODECS_G722_INCLUDE_AUDIO_DECODER_G722_H_
-
-#include "webrtc/modules/audio_coding/codecs/audio_decoder.h"
-
-typedef struct WebRtcG722DecInst G722DecInst;
-
-namespace webrtc {
-
-class AudioDecoderG722 final : public AudioDecoder {
- public:
- AudioDecoderG722();
- ~AudioDecoderG722() override;
- bool HasDecodePlc() const override;
- void Reset() override;
- int PacketDuration(const uint8_t* encoded, size_t encoded_len) const override;
- size_t Channels() const override;
-
- protected:
- int DecodeInternal(const uint8_t* encoded,
- size_t encoded_len,
- int sample_rate_hz,
- int16_t* decoded,
- SpeechType* speech_type) override;
-
- private:
- G722DecInst* dec_state_;
- RTC_DISALLOW_COPY_AND_ASSIGN(AudioDecoderG722);
-};
-
-class AudioDecoderG722Stereo final : public AudioDecoder {
- public:
- AudioDecoderG722Stereo();
- ~AudioDecoderG722Stereo() override;
- void Reset() override;
-
- protected:
- int DecodeInternal(const uint8_t* encoded,
- size_t encoded_len,
- int sample_rate_hz,
- int16_t* decoded,
- SpeechType* speech_type) override;
- size_t Channels() const override;
-
- private:
- // Splits the stereo-interleaved payload in |encoded| into separate payloads
- // for left and right channels. The separated payloads are written to
- // |encoded_deinterleaved|, which must hold at least |encoded_len| samples.
- // The left channel starts at offset 0, while the right channel starts at
- // offset encoded_len / 2 into |encoded_deinterleaved|.
- void SplitStereoPacket(const uint8_t* encoded,
- size_t encoded_len,
- uint8_t* encoded_deinterleaved);
-
- G722DecInst* dec_state_left_;
- G722DecInst* dec_state_right_;
- RTC_DISALLOW_COPY_AND_ASSIGN(AudioDecoderG722Stereo);
-};
-
-} // namespace webrtc
-
-#endif // WEBRTC_MODULES_AUDIO_CODING_CODECS_G722_INCLUDE_AUDIO_DECODER_G722_H_
diff --git a/webrtc/modules/audio_coding/codecs/g722/include/audio_encoder_g722.h b/webrtc/modules/audio_coding/codecs/g722/include/audio_encoder_g722.h
deleted file mode 100644
index 12495c5f48..0000000000
--- a/webrtc/modules/audio_coding/codecs/g722/include/audio_encoder_g722.h
+++ /dev/null
@@ -1,73 +0,0 @@
-/*
- * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_MODULES_AUDIO_CODING_CODECS_G722_INCLUDE_AUDIO_ENCODER_G722_H_
-#define WEBRTC_MODULES_AUDIO_CODING_CODECS_G722_INCLUDE_AUDIO_ENCODER_G722_H_
-
-#include "webrtc/base/buffer.h"
-#include "webrtc/base/scoped_ptr.h"
-#include "webrtc/modules/audio_coding/codecs/audio_encoder.h"
-#include "webrtc/modules/audio_coding/codecs/g722/include/g722_interface.h"
-
-namespace webrtc {
-
-struct CodecInst;
-
-class AudioEncoderG722 final : public AudioEncoder {
- public:
- struct Config {
- bool IsOk() const;
-
- int payload_type = 9;
- int frame_size_ms = 20;
- int num_channels = 1;
- };
-
- explicit AudioEncoderG722(const Config& config);
- explicit AudioEncoderG722(const CodecInst& codec_inst);
- ~AudioEncoderG722() override;
-
- size_t MaxEncodedBytes() const override;
- int SampleRateHz() const override;
- int NumChannels() const override;
- int RtpTimestampRateHz() const override;
- size_t Num10MsFramesInNextPacket() const override;
- size_t Max10MsFramesInAPacket() const override;
- int GetTargetBitrate() const override;
- EncodedInfo EncodeInternal(uint32_t rtp_timestamp,
- const int16_t* audio,
- size_t max_encoded_bytes,
- uint8_t* encoded) override;
- void Reset() override;
-
- private:
- // The encoder state for one channel.
- struct EncoderState {
- G722EncInst* encoder;
- rtc::scoped_ptr<int16_t[]> speech_buffer; // Queued up for encoding.
- rtc::Buffer encoded_buffer; // Already encoded.
- EncoderState();
- ~EncoderState();
- };
-
- size_t SamplesPerChannel() const;
-
- const int num_channels_;
- const int payload_type_;
- const size_t num_10ms_frames_per_packet_;
- size_t num_10ms_frames_buffered_;
- uint32_t first_timestamp_in_buffer_;
- const rtc::scoped_ptr<EncoderState[]> encoders_;
- rtc::Buffer interleave_buffer_;
- RTC_DISALLOW_COPY_AND_ASSIGN(AudioEncoderG722);
-};
-
-} // namespace webrtc
-#endif // WEBRTC_MODULES_AUDIO_CODING_CODECS_G722_INCLUDE_AUDIO_ENCODER_G722_H_
diff --git a/webrtc/modules/audio_coding/codecs/g722/include/g722_interface.h b/webrtc/modules/audio_coding/codecs/g722/include/g722_interface.h
deleted file mode 100644
index 5a46ef2ad5..0000000000
--- a/webrtc/modules/audio_coding/codecs/g722/include/g722_interface.h
+++ /dev/null
@@ -1,182 +0,0 @@
-/*
- * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef MODULES_AUDIO_CODING_CODECS_G722_MAIN_INCLUDE_G722_INTERFACE_H_
-#define MODULES_AUDIO_CODING_CODECS_G722_MAIN_INCLUDE_G722_INTERFACE_H_
-
-#include "webrtc/typedefs.h"
-
-/*
- * Solution to support multiple instances
- */
-
-typedef struct WebRtcG722EncInst G722EncInst;
-typedef struct WebRtcG722DecInst G722DecInst;
-
-/*
- * Comfort noise constants
- */
-
-#define G722_WEBRTC_SPEECH 1
-#define G722_WEBRTC_CNG 2
-
-#ifdef __cplusplus
-extern "C" {
-#endif
-
-
-/****************************************************************************
- * WebRtcG722_CreateEncoder(...)
- *
- * Create memory used for G722 encoder
- *
- * Input:
- * - G722enc_inst : G722 instance for encoder
- *
- * Return value : 0 - Ok
- * -1 - Error
- */
-int16_t WebRtcG722_CreateEncoder(G722EncInst **G722enc_inst);
-
-
-/****************************************************************************
- * WebRtcG722_EncoderInit(...)
- *
- * This function initializes a G722 instance
- *
- * Input:
- * - G722enc_inst : G722 instance, i.e. the user that should receive
- * be initialized
- *
- * Return value : 0 - Ok
- * -1 - Error
- */
-
-int16_t WebRtcG722_EncoderInit(G722EncInst *G722enc_inst);
-
-
-/****************************************************************************
- * WebRtcG722_FreeEncoder(...)
- *
- * Free the memory used for G722 encoder
- *
- * Input:
- * - G722enc_inst : G722 instance for encoder
- *
- * Return value : 0 - Ok
- * -1 - Error
- */
-int WebRtcG722_FreeEncoder(G722EncInst *G722enc_inst);
-
-
-
-/****************************************************************************
- * WebRtcG722_Encode(...)
- *
- * This function encodes G722 encoded data.
- *
- * Input:
- * - G722enc_inst : G722 instance, i.e. the user that should encode
- * a packet
- * - speechIn : Input speech vector
- * - len : Samples in speechIn
- *
- * Output:
- * - encoded : The encoded data vector
- *
- * Return value : Length (in bytes) of coded data
- */
-
-size_t WebRtcG722_Encode(G722EncInst* G722enc_inst,
- const int16_t* speechIn,
- size_t len,
- uint8_t* encoded);
-
-
-/****************************************************************************
- * WebRtcG722_CreateDecoder(...)
- *
- * Create memory used for G722 encoder
- *
- * Input:
- * - G722dec_inst : G722 instance for decoder
- *
- * Return value : 0 - Ok
- * -1 - Error
- */
-int16_t WebRtcG722_CreateDecoder(G722DecInst **G722dec_inst);
-
-/****************************************************************************
- * WebRtcG722_DecoderInit(...)
- *
- * This function initializes a G722 instance
- *
- * Input:
- * - inst : G722 instance
- */
-
-void WebRtcG722_DecoderInit(G722DecInst* inst);
-
-/****************************************************************************
- * WebRtcG722_FreeDecoder(...)
- *
- * Free the memory used for G722 decoder
- *
- * Input:
- * - G722dec_inst : G722 instance for decoder
- *
- * Return value : 0 - Ok
- * -1 - Error
- */
-
-int WebRtcG722_FreeDecoder(G722DecInst *G722dec_inst);
-
-
-/****************************************************************************
- * WebRtcG722_Decode(...)
- *
- * This function decodes a packet with G729 frame(s). Output speech length
- * will be a multiple of 80 samples (80*frames/packet).
- *
- * Input:
- * - G722dec_inst : G722 instance, i.e. the user that should decode
- * a packet
- * - encoded : Encoded G722 frame(s)
- * - len : Bytes in encoded vector
- *
- * Output:
- * - decoded : The decoded vector
- * - speechType : 1 normal, 2 CNG (Since G722 does not have its own
- * DTX/CNG scheme it should always return 1)
- *
- * Return value : Samples in decoded vector
- */
-
-size_t WebRtcG722_Decode(G722DecInst *G722dec_inst,
- const uint8_t* encoded,
- size_t len,
- int16_t *decoded,
- int16_t *speechType);
-
-/****************************************************************************
- * WebRtcG722_Version(...)
- *
- * Get a string with the current version of the codec
- */
-
-int16_t WebRtcG722_Version(char *versionStr, short len);
-
-
-#ifdef __cplusplus
-}
-#endif
-
-
-#endif /* MODULES_AUDIO_CODING_CODECS_G722_MAIN_INCLUDE_G722_INCLUDE_H_ */
diff --git a/webrtc/modules/audio_coding/codecs/g722/test/testG722.cc b/webrtc/modules/audio_coding/codecs/g722/test/testG722.cc
index b473c138c6..c55a2eb357 100644
--- a/webrtc/modules/audio_coding/codecs/g722/test/testG722.cc
+++ b/webrtc/modules/audio_coding/codecs/g722/test/testG722.cc
@@ -18,7 +18,7 @@
#include "webrtc/typedefs.h"
/* include API */
-#include "g722_interface.h"
+#include "webrtc/modules/audio_coding/codecs/g722/g722_interface.h"
/* Runtime statistics */
#include <time.h>
diff --git a/webrtc/modules/audio_coding/codecs/ilbc/audio_decoder_ilbc.cc b/webrtc/modules/audio_coding/codecs/ilbc/audio_decoder_ilbc.cc
index ba6284f33d..9ae0e1a95e 100644
--- a/webrtc/modules/audio_coding/codecs/ilbc/audio_decoder_ilbc.cc
+++ b/webrtc/modules/audio_coding/codecs/ilbc/audio_decoder_ilbc.cc
@@ -8,10 +8,10 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-#include "webrtc/modules/audio_coding/codecs/ilbc/include/audio_decoder_ilbc.h"
+#include "webrtc/modules/audio_coding/codecs/ilbc/audio_decoder_ilbc.h"
#include "webrtc/base/checks.h"
-#include "webrtc/modules/audio_coding/codecs/ilbc/include/ilbc.h"
+#include "webrtc/modules/audio_coding/codecs/ilbc/ilbc.h"
namespace webrtc {
diff --git a/webrtc/modules/audio_coding/codecs/ilbc/audio_decoder_ilbc.h b/webrtc/modules/audio_coding/codecs/ilbc/audio_decoder_ilbc.h
new file mode 100644
index 0000000000..e890635da0
--- /dev/null
+++ b/webrtc/modules/audio_coding/codecs/ilbc/audio_decoder_ilbc.h
@@ -0,0 +1,42 @@
+/*
+ * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_CODECS_ILBC_AUDIO_DECODER_ILBC_H_
+#define WEBRTC_MODULES_AUDIO_CODING_CODECS_ILBC_AUDIO_DECODER_ILBC_H_
+
+#include "webrtc/modules/audio_coding/codecs/audio_decoder.h"
+
+typedef struct iLBC_decinst_t_ IlbcDecoderInstance;
+
+namespace webrtc {
+
+class AudioDecoderIlbc final : public AudioDecoder {
+ public:
+ AudioDecoderIlbc();
+ ~AudioDecoderIlbc() override;
+ bool HasDecodePlc() const override;
+ size_t DecodePlc(size_t num_frames, int16_t* decoded) override;
+ void Reset() override;
+ size_t Channels() const override;
+
+ protected:
+ int DecodeInternal(const uint8_t* encoded,
+ size_t encoded_len,
+ int sample_rate_hz,
+ int16_t* decoded,
+ SpeechType* speech_type) override;
+
+ private:
+ IlbcDecoderInstance* dec_state_;
+ RTC_DISALLOW_COPY_AND_ASSIGN(AudioDecoderIlbc);
+};
+
+} // namespace webrtc
+#endif // WEBRTC_MODULES_AUDIO_CODING_CODECS_ILBC_AUDIO_DECODER_ILBC_H_
diff --git a/webrtc/modules/audio_coding/codecs/ilbc/audio_encoder_ilbc.cc b/webrtc/modules/audio_coding/codecs/ilbc/audio_encoder_ilbc.cc
index 065dc06817..ddd6dde31c 100644
--- a/webrtc/modules/audio_coding/codecs/ilbc/audio_encoder_ilbc.cc
+++ b/webrtc/modules/audio_coding/codecs/ilbc/audio_encoder_ilbc.cc
@@ -8,13 +8,13 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-#include "webrtc/modules/audio_coding/codecs/ilbc/include/audio_encoder_ilbc.h"
+#include "webrtc/modules/audio_coding/codecs/ilbc/audio_encoder_ilbc.h"
-#include <cstring>
+#include <algorithm>
#include <limits>
#include "webrtc/base/checks.h"
#include "webrtc/common_types.h"
-#include "webrtc/modules/audio_coding/codecs/ilbc/include/ilbc.h"
+#include "webrtc/modules/audio_coding/codecs/ilbc/ilbc.h"
namespace webrtc {
@@ -64,7 +64,7 @@ int AudioEncoderIlbc::SampleRateHz() const {
return kSampleRateHz;
}
-int AudioEncoderIlbc::NumChannels() const {
+size_t AudioEncoderIlbc::NumChannels() const {
return 1;
}
@@ -91,7 +91,7 @@ int AudioEncoderIlbc::GetTargetBitrate() const {
AudioEncoder::EncodedInfo AudioEncoderIlbc::EncodeInternal(
uint32_t rtp_timestamp,
- const int16_t* audio,
+ rtc::ArrayView<const int16_t> audio,
size_t max_encoded_bytes,
uint8_t* encoded) {
RTC_DCHECK_GE(max_encoded_bytes, RequiredOutputSizeBytes());
@@ -101,9 +101,9 @@ AudioEncoder::EncodedInfo AudioEncoderIlbc::EncodeInternal(
first_timestamp_in_buffer_ = rtp_timestamp;
// Buffer input.
- std::memcpy(input_buffer_ + kSampleRateHz / 100 * num_10ms_frames_buffered_,
- audio,
- kSampleRateHz / 100 * sizeof(audio[0]));
+ RTC_DCHECK_EQ(static_cast<size_t>(kSampleRateHz / 100), audio.size());
+ std::copy(audio.cbegin(), audio.cend(),
+ input_buffer_ + kSampleRateHz / 100 * num_10ms_frames_buffered_);
// If we don't yet have enough buffered input for a whole packet, we're done
// for now.
diff --git a/webrtc/modules/audio_coding/codecs/ilbc/audio_encoder_ilbc.h b/webrtc/modules/audio_coding/codecs/ilbc/audio_encoder_ilbc.h
new file mode 100644
index 0000000000..102a274642
--- /dev/null
+++ b/webrtc/modules/audio_coding/codecs/ilbc/audio_encoder_ilbc.h
@@ -0,0 +1,63 @@
+/*
+ * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_CODECS_ILBC_AUDIO_ENCODER_ILBC_H_
+#define WEBRTC_MODULES_AUDIO_CODING_CODECS_ILBC_AUDIO_ENCODER_ILBC_H_
+
+#include "webrtc/base/scoped_ptr.h"
+#include "webrtc/modules/audio_coding/codecs/audio_encoder.h"
+#include "webrtc/modules/audio_coding/codecs/ilbc/ilbc.h"
+
+namespace webrtc {
+
+struct CodecInst;
+
+class AudioEncoderIlbc final : public AudioEncoder {
+ public:
+ struct Config {
+ bool IsOk() const;
+
+ int payload_type = 102;
+ int frame_size_ms = 30; // Valid values are 20, 30, 40, and 60 ms.
+ // Note that frame size 40 ms produces encodings with two 20 ms frames in
+ // them, and frame size 60 ms consists of two 30 ms frames.
+ };
+
+ explicit AudioEncoderIlbc(const Config& config);
+ explicit AudioEncoderIlbc(const CodecInst& codec_inst);
+ ~AudioEncoderIlbc() override;
+
+ size_t MaxEncodedBytes() const override;
+ int SampleRateHz() const override;
+ size_t NumChannels() const override;
+ size_t Num10MsFramesInNextPacket() const override;
+ size_t Max10MsFramesInAPacket() const override;
+ int GetTargetBitrate() const override;
+ EncodedInfo EncodeInternal(uint32_t rtp_timestamp,
+ rtc::ArrayView<const int16_t> audio,
+ size_t max_encoded_bytes,
+ uint8_t* encoded) override;
+ void Reset() override;
+
+ private:
+ size_t RequiredOutputSizeBytes() const;
+
+ static const size_t kMaxSamplesPerPacket = 480;
+ const Config config_;
+ const size_t num_10ms_frames_per_packet_;
+ size_t num_10ms_frames_buffered_;
+ uint32_t first_timestamp_in_buffer_;
+ int16_t input_buffer_[kMaxSamplesPerPacket];
+ IlbcEncoderInstance* encoder_;
+ RTC_DISALLOW_COPY_AND_ASSIGN(AudioEncoderIlbc);
+};
+
+} // namespace webrtc
+#endif // WEBRTC_MODULES_AUDIO_CODING_CODECS_ILBC_AUDIO_ENCODER_ILBC_H_
diff --git a/webrtc/modules/audio_coding/codecs/ilbc/ilbc.gypi b/webrtc/modules/audio_coding/codecs/ilbc/ilbc.gypi
index ac9f2e7b39..ffb0574588 100644
--- a/webrtc/modules/audio_coding/codecs/ilbc/ilbc.gypi
+++ b/webrtc/modules/audio_coding/codecs/ilbc/ilbc.gypi
@@ -15,24 +15,13 @@
'<(webrtc_root)/common_audio/common_audio.gyp:common_audio',
'audio_encoder_interface',
],
- 'include_dirs': [
- 'include',
- '<(webrtc_root)',
- ],
- 'direct_dependent_settings': {
- 'include_dirs': [
- 'include',
- '<(webrtc_root)',
- ],
- },
'sources': [
- 'include/audio_decoder_ilbc.h',
- 'include/audio_encoder_ilbc.h',
- 'include/ilbc.h',
'abs_quant.c',
'abs_quant_loop.c',
'audio_decoder_ilbc.cc',
+ 'audio_decoder_ilbc.h',
'audio_encoder_ilbc.cc',
+ 'audio_encoder_ilbc.h',
'augmented_cb_corr.c',
'bw_expand.c',
'cb_construct.c',
@@ -65,6 +54,7 @@
'hp_input.c',
'hp_output.c',
'ilbc.c',
+ 'ilbc.h',
'index_conv_dec.c',
'index_conv_enc.c',
'init_decode.c',
diff --git a/webrtc/modules/audio_coding/codecs/ilbc/ilbc.h b/webrtc/modules/audio_coding/codecs/ilbc/ilbc.h
new file mode 100644
index 0000000000..c021f5be52
--- /dev/null
+++ b/webrtc/modules/audio_coding/codecs/ilbc/ilbc.h
@@ -0,0 +1,258 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+/*
+ * ilbc.h
+ *
+ * This header file contains all of the API's for iLBC.
+ *
+ */
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_CODECS_ILBC_ILBC_H_
+#define WEBRTC_MODULES_AUDIO_CODING_CODECS_ILBC_ILBC_H_
+
+#include <stddef.h>
+
+/*
+ * Define the fixpoint numeric formats
+ */
+
+#include "webrtc/typedefs.h"
+
+/*
+ * Solution to support multiple instances
+ * Customer has to cast instance to proper type
+ */
+
+typedef struct iLBC_encinst_t_ IlbcEncoderInstance;
+
+typedef struct iLBC_decinst_t_ IlbcDecoderInstance;
+
+/*
+ * Comfort noise constants
+ */
+
+#define ILBC_SPEECH 1
+#define ILBC_CNG 2
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+ /****************************************************************************
+ * WebRtcIlbcfix_XxxAssign(...)
+ *
+ * These functions assigns the encoder/decoder instance to the specified
+ * memory location
+ *
+ * Input:
+ * - XXX_xxxinst : Pointer to created instance that should be
+ * assigned
+ * - ILBCXXX_inst_Addr : Pointer to the desired memory space
+ * - size : The size that this structure occupies (in Word16)
+ *
+ * Return value : 0 - Ok
+ * -1 - Error
+ */
+
+ int16_t WebRtcIlbcfix_EncoderAssign(IlbcEncoderInstance **iLBC_encinst,
+ int16_t *ILBCENC_inst_Addr,
+ int16_t *size);
+ int16_t WebRtcIlbcfix_DecoderAssign(IlbcDecoderInstance **iLBC_decinst,
+ int16_t *ILBCDEC_inst_Addr,
+ int16_t *size);
+
+
+ /****************************************************************************
+ * WebRtcIlbcfix_XxxAssign(...)
+ *
+ * These functions create a instance to the specified structure
+ *
+ * Input:
+ * - XXX_inst : Pointer to created instance that should be created
+ *
+ * Return value : 0 - Ok
+ * -1 - Error
+ */
+
+ int16_t WebRtcIlbcfix_EncoderCreate(IlbcEncoderInstance **iLBC_encinst);
+ int16_t WebRtcIlbcfix_DecoderCreate(IlbcDecoderInstance **iLBC_decinst);
+
+ /****************************************************************************
+ * WebRtcIlbcfix_XxxFree(...)
+ *
+ * These functions frees the dynamic memory of a specified instance
+ *
+ * Input:
+ * - XXX_inst : Pointer to created instance that should be freed
+ *
+ * Return value : 0 - Ok
+ * -1 - Error
+ */
+
+ int16_t WebRtcIlbcfix_EncoderFree(IlbcEncoderInstance *iLBC_encinst);
+ int16_t WebRtcIlbcfix_DecoderFree(IlbcDecoderInstance *iLBC_decinst);
+
+
+ /****************************************************************************
+ * WebRtcIlbcfix_EncoderInit(...)
+ *
+ * This function initializes a iLBC instance
+ *
+ * Input:
+ * - iLBCenc_inst : iLBC instance, i.e. the user that should receive
+ * be initialized
+ * - frameLen : The frame length of the codec 20/30 (ms)
+ *
+ * Return value : 0 - Ok
+ * -1 - Error
+ */
+
+ int16_t WebRtcIlbcfix_EncoderInit(IlbcEncoderInstance *iLBCenc_inst,
+ int16_t frameLen);
+
+ /****************************************************************************
+ * WebRtcIlbcfix_Encode(...)
+ *
+ * This function encodes one iLBC frame. Input speech length has be a
+ * multiple of the frame length.
+ *
+ * Input:
+ * - iLBCenc_inst : iLBC instance, i.e. the user that should encode
+ * a package
+ * - speechIn : Input speech vector
+ * - len : Samples in speechIn (160, 240, 320 or 480)
+ *
+ * Output:
+ * - encoded : The encoded data vector
+ *
+ * Return value : >0 - Length (in bytes) of coded data
+ * -1 - Error
+ */
+
+ int WebRtcIlbcfix_Encode(IlbcEncoderInstance *iLBCenc_inst,
+ const int16_t *speechIn,
+ size_t len,
+ uint8_t* encoded);
+
+ /****************************************************************************
+ * WebRtcIlbcfix_DecoderInit(...)
+ *
+ * This function initializes a iLBC instance with either 20 or 30 ms frames
+ * Alternatively the WebRtcIlbcfix_DecoderInit_XXms can be used. Then it's
+ * not needed to specify the frame length with a variable.
+ *
+ * Input:
+ * - IlbcDecoderInstance : iLBC decoder instance
+ * - frameLen : The frame length of the codec 20/30 (ms)
+ *
+ * Return value : 0 - Ok
+ * -1 - Error
+ */
+
+ int16_t WebRtcIlbcfix_DecoderInit(IlbcDecoderInstance *iLBCdec_inst,
+ int16_t frameLen);
+ void WebRtcIlbcfix_DecoderInit20Ms(IlbcDecoderInstance* iLBCdec_inst);
+ void WebRtcIlbcfix_Decoderinit30Ms(IlbcDecoderInstance* iLBCdec_inst);
+
+ /****************************************************************************
+ * WebRtcIlbcfix_Decode(...)
+ *
+ * This function decodes a packet with iLBC frame(s). Output speech length
+ * will be a multiple of 160 or 240 samples ((160 or 240)*frames/packet).
+ *
+ * Input:
+ * - iLBCdec_inst : iLBC instance, i.e. the user that should decode
+ * a packet
+ * - encoded : Encoded iLBC frame(s)
+ * - len : Bytes in encoded vector
+ *
+ * Output:
+ * - decoded : The decoded vector
+ * - speechType : 1 normal, 2 CNG
+ *
+ * Return value : >0 - Samples in decoded vector
+ * -1 - Error
+ */
+
+ int WebRtcIlbcfix_Decode(IlbcDecoderInstance* iLBCdec_inst,
+ const uint8_t* encoded,
+ size_t len,
+ int16_t* decoded,
+ int16_t* speechType);
+ int WebRtcIlbcfix_Decode20Ms(IlbcDecoderInstance* iLBCdec_inst,
+ const uint8_t* encoded,
+ size_t len,
+ int16_t* decoded,
+ int16_t* speechType);
+ int WebRtcIlbcfix_Decode30Ms(IlbcDecoderInstance* iLBCdec_inst,
+ const uint8_t* encoded,
+ size_t len,
+ int16_t* decoded,
+ int16_t* speechType);
+
+ /****************************************************************************
+ * WebRtcIlbcfix_DecodePlc(...)
+ *
+ * This function conducts PLC for iLBC frame(s). Output speech length
+ * will be a multiple of 160 or 240 samples.
+ *
+ * Input:
+ * - iLBCdec_inst : iLBC instance, i.e. the user that should perform
+ * a PLC
+ * - noOfLostFrames : Number of PLC frames to produce
+ *
+ * Output:
+ * - decoded : The "decoded" vector
+ *
+ * Return value : Samples in decoded PLC vector
+ */
+
+ size_t WebRtcIlbcfix_DecodePlc(IlbcDecoderInstance *iLBCdec_inst,
+ int16_t *decoded,
+ size_t noOfLostFrames);
+
+ /****************************************************************************
+ * WebRtcIlbcfix_NetEqPlc(...)
+ *
+ * This function updates the decoder when a packet loss has occured, but it
+ * does not produce any PLC data. Function can be used if another PLC method
+ * is used (i.e NetEq).
+ *
+ * Input:
+ * - iLBCdec_inst : iLBC instance that should be updated
+ * - noOfLostFrames : Number of lost frames
+ *
+ * Output:
+ * - decoded : The "decoded" vector (nothing in this case)
+ *
+ * Return value : Samples in decoded PLC vector
+ */
+
+ size_t WebRtcIlbcfix_NetEqPlc(IlbcDecoderInstance *iLBCdec_inst,
+ int16_t *decoded,
+ size_t noOfLostFrames);
+
+ /****************************************************************************
+ * WebRtcIlbcfix_version(...)
+ *
+ * This function returns the version number of iLBC
+ *
+ * Output:
+ * - version : Version number of iLBC (maximum 20 char)
+ */
+
+ void WebRtcIlbcfix_version(char *version);
+
+#ifdef __cplusplus
+}
+#endif
+
+#endif // WEBRTC_MODULES_AUDIO_CODING_CODECS_ILBC_ILBC_H_
diff --git a/webrtc/modules/audio_coding/codecs/ilbc/include/audio_decoder_ilbc.h b/webrtc/modules/audio_coding/codecs/ilbc/include/audio_decoder_ilbc.h
deleted file mode 100644
index fd52da7986..0000000000
--- a/webrtc/modules/audio_coding/codecs/ilbc/include/audio_decoder_ilbc.h
+++ /dev/null
@@ -1,42 +0,0 @@
-/*
- * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_MODULES_AUDIO_CODING_CODECS_ILBC_INCLUDE_AUDIO_DECODER_ILBC_H_
-#define WEBRTC_MODULES_AUDIO_CODING_CODECS_ILBC_INCLUDE_AUDIO_DECODER_ILBC_H_
-
-#include "webrtc/modules/audio_coding/codecs/audio_decoder.h"
-
-typedef struct iLBC_decinst_t_ IlbcDecoderInstance;
-
-namespace webrtc {
-
-class AudioDecoderIlbc final : public AudioDecoder {
- public:
- AudioDecoderIlbc();
- ~AudioDecoderIlbc() override;
- bool HasDecodePlc() const override;
- size_t DecodePlc(size_t num_frames, int16_t* decoded) override;
- void Reset() override;
- size_t Channels() const override;
-
- protected:
- int DecodeInternal(const uint8_t* encoded,
- size_t encoded_len,
- int sample_rate_hz,
- int16_t* decoded,
- SpeechType* speech_type) override;
-
- private:
- IlbcDecoderInstance* dec_state_;
- RTC_DISALLOW_COPY_AND_ASSIGN(AudioDecoderIlbc);
-};
-
-} // namespace webrtc
-#endif // WEBRTC_MODULES_AUDIO_CODING_CODECS_ILBC_INCLUDE_AUDIO_DECODER_ILBC_H_
diff --git a/webrtc/modules/audio_coding/codecs/ilbc/include/audio_encoder_ilbc.h b/webrtc/modules/audio_coding/codecs/ilbc/include/audio_encoder_ilbc.h
deleted file mode 100644
index 2bb3101fd4..0000000000
--- a/webrtc/modules/audio_coding/codecs/ilbc/include/audio_encoder_ilbc.h
+++ /dev/null
@@ -1,63 +0,0 @@
-/*
- * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_MODULES_AUDIO_CODING_CODECS_ILBC_INCLUDE_AUDIO_ENCODER_ILBC_H_
-#define WEBRTC_MODULES_AUDIO_CODING_CODECS_ILBC_INCLUDE_AUDIO_ENCODER_ILBC_H_
-
-#include "webrtc/base/scoped_ptr.h"
-#include "webrtc/modules/audio_coding/codecs/audio_encoder.h"
-#include "webrtc/modules/audio_coding/codecs/ilbc/include/ilbc.h"
-
-namespace webrtc {
-
-struct CodecInst;
-
-class AudioEncoderIlbc final : public AudioEncoder {
- public:
- struct Config {
- bool IsOk() const;
-
- int payload_type = 102;
- int frame_size_ms = 30; // Valid values are 20, 30, 40, and 60 ms.
- // Note that frame size 40 ms produces encodings with two 20 ms frames in
- // them, and frame size 60 ms consists of two 30 ms frames.
- };
-
- explicit AudioEncoderIlbc(const Config& config);
- explicit AudioEncoderIlbc(const CodecInst& codec_inst);
- ~AudioEncoderIlbc() override;
-
- size_t MaxEncodedBytes() const override;
- int SampleRateHz() const override;
- int NumChannels() const override;
- size_t Num10MsFramesInNextPacket() const override;
- size_t Max10MsFramesInAPacket() const override;
- int GetTargetBitrate() const override;
- EncodedInfo EncodeInternal(uint32_t rtp_timestamp,
- const int16_t* audio,
- size_t max_encoded_bytes,
- uint8_t* encoded) override;
- void Reset() override;
-
- private:
- size_t RequiredOutputSizeBytes() const;
-
- static const size_t kMaxSamplesPerPacket = 480;
- const Config config_;
- const size_t num_10ms_frames_per_packet_;
- size_t num_10ms_frames_buffered_;
- uint32_t first_timestamp_in_buffer_;
- int16_t input_buffer_[kMaxSamplesPerPacket];
- IlbcEncoderInstance* encoder_;
- RTC_DISALLOW_COPY_AND_ASSIGN(AudioEncoderIlbc);
-};
-
-} // namespace webrtc
-#endif // WEBRTC_MODULES_AUDIO_CODING_CODECS_ILBC_INCLUDE_AUDIO_ENCODER_ILBC_H_
diff --git a/webrtc/modules/audio_coding/codecs/ilbc/include/ilbc.h b/webrtc/modules/audio_coding/codecs/ilbc/include/ilbc.h
deleted file mode 100644
index 3be9142c8c..0000000000
--- a/webrtc/modules/audio_coding/codecs/ilbc/include/ilbc.h
+++ /dev/null
@@ -1,258 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-/*
- * ilbc.h
- *
- * This header file contains all of the API's for iLBC.
- *
- */
-
-#ifndef WEBRTC_MODULES_AUDIO_CODING_CODECS_ILBC_INCLUDE_ILBC_H_
-#define WEBRTC_MODULES_AUDIO_CODING_CODECS_ILBC_INCLUDE_ILBC_H_
-
-#include <stddef.h>
-
-/*
- * Define the fixpoint numeric formats
- */
-
-#include "webrtc/typedefs.h"
-
-/*
- * Solution to support multiple instances
- * Customer has to cast instance to proper type
- */
-
-typedef struct iLBC_encinst_t_ IlbcEncoderInstance;
-
-typedef struct iLBC_decinst_t_ IlbcDecoderInstance;
-
-/*
- * Comfort noise constants
- */
-
-#define ILBC_SPEECH 1
-#define ILBC_CNG 2
-
-#ifdef __cplusplus
-extern "C" {
-#endif
-
- /****************************************************************************
- * WebRtcIlbcfix_XxxAssign(...)
- *
- * These functions assigns the encoder/decoder instance to the specified
- * memory location
- *
- * Input:
- * - XXX_xxxinst : Pointer to created instance that should be
- * assigned
- * - ILBCXXX_inst_Addr : Pointer to the desired memory space
- * - size : The size that this structure occupies (in Word16)
- *
- * Return value : 0 - Ok
- * -1 - Error
- */
-
- int16_t WebRtcIlbcfix_EncoderAssign(IlbcEncoderInstance **iLBC_encinst,
- int16_t *ILBCENC_inst_Addr,
- int16_t *size);
- int16_t WebRtcIlbcfix_DecoderAssign(IlbcDecoderInstance **iLBC_decinst,
- int16_t *ILBCDEC_inst_Addr,
- int16_t *size);
-
-
- /****************************************************************************
- * WebRtcIlbcfix_XxxAssign(...)
- *
- * These functions create a instance to the specified structure
- *
- * Input:
- * - XXX_inst : Pointer to created instance that should be created
- *
- * Return value : 0 - Ok
- * -1 - Error
- */
-
- int16_t WebRtcIlbcfix_EncoderCreate(IlbcEncoderInstance **iLBC_encinst);
- int16_t WebRtcIlbcfix_DecoderCreate(IlbcDecoderInstance **iLBC_decinst);
-
- /****************************************************************************
- * WebRtcIlbcfix_XxxFree(...)
- *
- * These functions frees the dynamic memory of a specified instance
- *
- * Input:
- * - XXX_inst : Pointer to created instance that should be freed
- *
- * Return value : 0 - Ok
- * -1 - Error
- */
-
- int16_t WebRtcIlbcfix_EncoderFree(IlbcEncoderInstance *iLBC_encinst);
- int16_t WebRtcIlbcfix_DecoderFree(IlbcDecoderInstance *iLBC_decinst);
-
-
- /****************************************************************************
- * WebRtcIlbcfix_EncoderInit(...)
- *
- * This function initializes a iLBC instance
- *
- * Input:
- * - iLBCenc_inst : iLBC instance, i.e. the user that should receive
- * be initialized
- * - frameLen : The frame length of the codec 20/30 (ms)
- *
- * Return value : 0 - Ok
- * -1 - Error
- */
-
- int16_t WebRtcIlbcfix_EncoderInit(IlbcEncoderInstance *iLBCenc_inst,
- int16_t frameLen);
-
- /****************************************************************************
- * WebRtcIlbcfix_Encode(...)
- *
- * This function encodes one iLBC frame. Input speech length has be a
- * multiple of the frame length.
- *
- * Input:
- * - iLBCenc_inst : iLBC instance, i.e. the user that should encode
- * a package
- * - speechIn : Input speech vector
- * - len : Samples in speechIn (160, 240, 320 or 480)
- *
- * Output:
- * - encoded : The encoded data vector
- *
- * Return value : >0 - Length (in bytes) of coded data
- * -1 - Error
- */
-
- int WebRtcIlbcfix_Encode(IlbcEncoderInstance *iLBCenc_inst,
- const int16_t *speechIn,
- size_t len,
- uint8_t* encoded);
-
- /****************************************************************************
- * WebRtcIlbcfix_DecoderInit(...)
- *
- * This function initializes a iLBC instance with either 20 or 30 ms frames
- * Alternatively the WebRtcIlbcfix_DecoderInit_XXms can be used. Then it's
- * not needed to specify the frame length with a variable.
- *
- * Input:
- * - IlbcDecoderInstance : iLBC decoder instance
- * - frameLen : The frame length of the codec 20/30 (ms)
- *
- * Return value : 0 - Ok
- * -1 - Error
- */
-
- int16_t WebRtcIlbcfix_DecoderInit(IlbcDecoderInstance *iLBCdec_inst,
- int16_t frameLen);
- void WebRtcIlbcfix_DecoderInit20Ms(IlbcDecoderInstance* iLBCdec_inst);
- void WebRtcIlbcfix_Decoderinit30Ms(IlbcDecoderInstance* iLBCdec_inst);
-
- /****************************************************************************
- * WebRtcIlbcfix_Decode(...)
- *
- * This function decodes a packet with iLBC frame(s). Output speech length
- * will be a multiple of 160 or 240 samples ((160 or 240)*frames/packet).
- *
- * Input:
- * - iLBCdec_inst : iLBC instance, i.e. the user that should decode
- * a packet
- * - encoded : Encoded iLBC frame(s)
- * - len : Bytes in encoded vector
- *
- * Output:
- * - decoded : The decoded vector
- * - speechType : 1 normal, 2 CNG
- *
- * Return value : >0 - Samples in decoded vector
- * -1 - Error
- */
-
- int WebRtcIlbcfix_Decode(IlbcDecoderInstance* iLBCdec_inst,
- const uint8_t* encoded,
- size_t len,
- int16_t* decoded,
- int16_t* speechType);
- int WebRtcIlbcfix_Decode20Ms(IlbcDecoderInstance* iLBCdec_inst,
- const uint8_t* encoded,
- size_t len,
- int16_t* decoded,
- int16_t* speechType);
- int WebRtcIlbcfix_Decode30Ms(IlbcDecoderInstance* iLBCdec_inst,
- const uint8_t* encoded,
- size_t len,
- int16_t* decoded,
- int16_t* speechType);
-
- /****************************************************************************
- * WebRtcIlbcfix_DecodePlc(...)
- *
- * This function conducts PLC for iLBC frame(s). Output speech length
- * will be a multiple of 160 or 240 samples.
- *
- * Input:
- * - iLBCdec_inst : iLBC instance, i.e. the user that should perform
- * a PLC
- * - noOfLostFrames : Number of PLC frames to produce
- *
- * Output:
- * - decoded : The "decoded" vector
- *
- * Return value : Samples in decoded PLC vector
- */
-
- size_t WebRtcIlbcfix_DecodePlc(IlbcDecoderInstance *iLBCdec_inst,
- int16_t *decoded,
- size_t noOfLostFrames);
-
- /****************************************************************************
- * WebRtcIlbcfix_NetEqPlc(...)
- *
- * This function updates the decoder when a packet loss has occured, but it
- * does not produce any PLC data. Function can be used if another PLC method
- * is used (i.e NetEq).
- *
- * Input:
- * - iLBCdec_inst : iLBC instance that should be updated
- * - noOfLostFrames : Number of lost frames
- *
- * Output:
- * - decoded : The "decoded" vector (nothing in this case)
- *
- * Return value : Samples in decoded PLC vector
- */
-
- size_t WebRtcIlbcfix_NetEqPlc(IlbcDecoderInstance *iLBCdec_inst,
- int16_t *decoded,
- size_t noOfLostFrames);
-
- /****************************************************************************
- * WebRtcIlbcfix_version(...)
- *
- * This function returns the version number of iLBC
- *
- * Output:
- * - version : Version number of iLBC (maximum 20 char)
- */
-
- void WebRtcIlbcfix_version(char *version);
-
-#ifdef __cplusplus
-}
-#endif
-
-#endif
diff --git a/webrtc/modules/audio_coding/codecs/ilbc/test/iLBC_test.c b/webrtc/modules/audio_coding/codecs/ilbc/test/iLBC_test.c
index 1199c816d8..b440c7a45f 100644
--- a/webrtc/modules/audio_coding/codecs/ilbc/test/iLBC_test.c
+++ b/webrtc/modules/audio_coding/codecs/ilbc/test/iLBC_test.c
@@ -19,7 +19,7 @@
#include <stdlib.h>
#include <stdio.h>
#include <string.h>
-#include "ilbc.h"
+#include "webrtc/modules/audio_coding/codecs/ilbc/ilbc.h"
/*---------------------------------------------------------------*
* Main program to test iLBC encoding and decoding
diff --git a/webrtc/modules/audio_coding/codecs/ilbc/test/iLBC_testLib.c b/webrtc/modules/audio_coding/codecs/ilbc/test/iLBC_testLib.c
index f14192c2ae..7ffa4a7d0e 100644
--- a/webrtc/modules/audio_coding/codecs/ilbc/test/iLBC_testLib.c
+++ b/webrtc/modules/audio_coding/codecs/ilbc/test/iLBC_testLib.c
@@ -21,7 +21,7 @@ iLBC_test.c
#include <stdio.h>
#include <string.h>
#include <time.h>
-#include "ilbc.h"
+#include "webrtc/modules/audio_coding/codecs/ilbc/ilbc.h"
//#define JUNK_DATA
#ifdef JUNK_DATA
diff --git a/webrtc/modules/audio_coding/codecs/ilbc/test/iLBC_testprogram.c b/webrtc/modules/audio_coding/codecs/ilbc/test/iLBC_testprogram.c
index 303ede3e63..5454948287 100644
--- a/webrtc/modules/audio_coding/codecs/ilbc/test/iLBC_testprogram.c
+++ b/webrtc/modules/audio_coding/codecs/ilbc/test/iLBC_testprogram.c
@@ -21,13 +21,13 @@
#include <stdio.h>
#include <string.h>
-#include "defines.h"
-#include "nit_encode.h"
-#include "encode.h"
-#include "init_decode.h"
-#include "decode.h"
-#include "constants.h"
-#include "ilbc.h"
+#include "webrtc/modules/audio_coding/codecs/ilbc/defines.h"
+#include "webrtc/modules/audio_coding/codecs/ilbc/nit_encode.h"
+#include "webrtc/modules/audio_coding/codecs/ilbc/encode.h"
+#include "webrtc/modules/audio_coding/codecs/ilbc/init_decode.h"
+#include "webrtc/modules/audio_coding/codecs/ilbc/decode.h"
+#include "webrtc/modules/audio_coding/codecs/ilbc/constants.h"
+#include "webrtc/modules/audio_coding/codecs/ilbc/ilbc.h"
#define ILBCNOOFWORDS_MAX (NO_OF_BYTES_30MS)/2
diff --git a/webrtc/modules/audio_coding/codecs/isac/audio_encoder_isac_t.h b/webrtc/modules/audio_coding/codecs/isac/audio_encoder_isac_t.h
index b15ad942df..321dac3567 100644
--- a/webrtc/modules/audio_coding/codecs/isac/audio_encoder_isac_t.h
+++ b/webrtc/modules/audio_coding/codecs/isac/audio_encoder_isac_t.h
@@ -56,12 +56,12 @@ class AudioEncoderIsacT final : public AudioEncoder {
size_t MaxEncodedBytes() const override;
int SampleRateHz() const override;
- int NumChannels() const override;
+ size_t NumChannels() const override;
size_t Num10MsFramesInNextPacket() const override;
size_t Max10MsFramesInAPacket() const override;
int GetTargetBitrate() const override;
EncodedInfo EncodeInternal(uint32_t rtp_timestamp,
- const int16_t* audio,
+ rtc::ArrayView<const int16_t> audio,
size_t max_encoded_bytes,
uint8_t* encoded) override;
void Reset() override;
diff --git a/webrtc/modules/audio_coding/codecs/isac/audio_encoder_isac_t_impl.h b/webrtc/modules/audio_coding/codecs/isac/audio_encoder_isac_t_impl.h
index 279f80d6fc..d4438cc775 100644
--- a/webrtc/modules/audio_coding/codecs/isac/audio_encoder_isac_t_impl.h
+++ b/webrtc/modules/audio_coding/codecs/isac/audio_encoder_isac_t_impl.h
@@ -88,7 +88,7 @@ int AudioEncoderIsacT<T>::SampleRateHz() const {
}
template <typename T>
-int AudioEncoderIsacT<T>::NumChannels() const {
+size_t AudioEncoderIsacT<T>::NumChannels() const {
return 1;
}
@@ -115,7 +115,7 @@ int AudioEncoderIsacT<T>::GetTargetBitrate() const {
template <typename T>
AudioEncoder::EncodedInfo AudioEncoderIsacT<T>::EncodeInternal(
uint32_t rtp_timestamp,
- const int16_t* audio,
+ rtc::ArrayView<const int16_t> audio,
size_t max_encoded_bytes,
uint8_t* encoded) {
if (!packet_in_progress_) {
@@ -127,7 +127,7 @@ AudioEncoder::EncodedInfo AudioEncoderIsacT<T>::EncodeInternal(
IsacBandwidthInfo bwinfo = bwinfo_->Get();
T::SetBandwidthInfo(isac_state_, &bwinfo);
}
- int r = T::Encode(isac_state_, audio, encoded);
+ int r = T::Encode(isac_state_, audio.data(), encoded);
RTC_CHECK_GE(r, 0) << "Encode failed (error code "
<< T::GetErrorCode(isac_state_) << ")";
diff --git a/webrtc/modules/audio_coding/codecs/isac/fix/test/isac_speed_test.cc b/webrtc/modules/audio_coding/codecs/isac/fix/test/isac_speed_test.cc
index 632a4fe825..32f36c5261 100644
--- a/webrtc/modules/audio_coding/codecs/isac/fix/test/isac_speed_test.cc
+++ b/webrtc/modules/audio_coding/codecs/isac/fix/test/isac_speed_test.cc
@@ -92,7 +92,7 @@ float IsacSpeedTest::DecodeABlock(const uint8_t* bit_stream,
value = WebRtcIsacfix_Decode(ISACFIX_main_inst_, bit_stream, encoded_bytes,
out_data, &audio_type);
clocks = clock() - clocks;
- EXPECT_EQ(output_length_sample_, value);
+ EXPECT_EQ(output_length_sample_, static_cast<size_t>(value));
return 1000.0 * clocks / CLOCKS_PER_SEC;
}
diff --git a/webrtc/modules/audio_coding/codecs/isac/fix/test/test_iSACfixfloat.c b/webrtc/modules/audio_coding/codecs/isac/fix/test/test_iSACfixfloat.c
index b82af1c059..ac0fa350c9 100644
--- a/webrtc/modules/audio_coding/codecs/isac/fix/test/test_iSACfixfloat.c
+++ b/webrtc/modules/audio_coding/codecs/isac/fix/test/test_iSACfixfloat.c
@@ -112,12 +112,12 @@ int main(int argc, char* argv[]) {
char version_number[20];
int mode = -1, tmp, nbTest = 0; /*,sss;*/
-#ifdef _DEBUG
+#if !defined(NDEBUG)
FILE* fy;
double kbps;
size_t totalbits = 0;
int totalsmpls = 0;
-#endif /* _DEBUG */
+#endif
/* only one structure used for ISAC encoder */
ISAC_MainStruct* ISAC_main_inst;
@@ -126,12 +126,12 @@ int main(int argc, char* argv[]) {
BottleNeckModel BN_data;
f_bn = NULL;
-#ifdef _DEBUG
+#if !defined(NDEBUG)
fy = fopen("bit_rate.dat", "w");
fclose(fy);
fy = fopen("bytes_frames.dat", "w");
fclose(fy);
-#endif /* _DEBUG */
+#endif
// histfile = fopen("histo.dat", "ab");
// ratefile = fopen("rates.dat", "ab");
@@ -589,7 +589,7 @@ int main(int argc, char* argv[]) {
fprintf(stderr, " \rframe = %d", framecnt);
framecnt++;
-#ifdef _DEBUG
+#if !defined(NDEBUG)
totalsmpls += declen;
totalbits += 8 * stream_len;
@@ -598,15 +598,15 @@ int main(int argc, char* argv[]) {
fprintf(fy, "Frame %i = %0.14f\n", framecnt, kbps);
fclose(fy);
-#endif /* _DEBUG */
+#endif
}
-#ifdef _DEBUG
+#if !defined(NDEBUG)
printf("\n\ntotal bits = %" PRIuS " bits", totalbits);
printf("\nmeasured average bitrate = %0.3f kbits/s",
(double)totalbits * (FS / 1000) / totalsmpls);
printf("\n");
-#endif /* _DEBUG */
+#endif
/* Runtime statistics */
runtime = (double)(clock() / (double)CLOCKS_PER_SEC - starttime);
diff --git a/webrtc/modules/audio_coding/codecs/isac/isac_test.gypi b/webrtc/modules/audio_coding/codecs/isac/isac_test.gypi
index 47944b7f42..54cedb4e18 100644
--- a/webrtc/modules/audio_coding/codecs/isac/isac_test.gypi
+++ b/webrtc/modules/audio_coding/codecs/isac/isac_test.gypi
@@ -25,6 +25,19 @@
'./main/test/simpleKenny.c',
'./main/util/utility.c',
],
+ 'conditions': [
+ ['OS=="win" and clang==1', {
+ 'msvs_settings': {
+ 'VCCLCompilerTool': {
+ 'AdditionalOptions': [
+ # Disable warnings failing when compiling with Clang on Windows.
+ # https://bugs.chromium.org/p/webrtc/issues/detail?id=5366
+ '-Wno-format',
+ ],
+ },
+ },
+ }],
+ ], # conditions.
},
# ReleaseTest-API
{
@@ -63,6 +76,5 @@
'./main/util/utility.c',
],
},
-
],
}
diff --git a/webrtc/modules/audio_coding/codecs/isac/isacfix.gypi b/webrtc/modules/audio_coding/codecs/isac/isacfix.gypi
index f10de56c5a..7730d16dc9 100644
--- a/webrtc/modules/audio_coding/codecs/isac/isacfix.gypi
+++ b/webrtc/modules/audio_coding/codecs/isac/isacfix.gypi
@@ -77,11 +77,6 @@
'fix/source/structs.h',
],
'conditions': [
- ['OS!="win"', {
- 'defines': [
- 'WEBRTC_LINUX',
- ],
- }],
['target_arch=="arm" and arm_version>=7', {
'sources': [
'fix/source/lattice_armv7.S',
diff --git a/webrtc/modules/audio_coding/codecs/isac/main/test/ReleaseTest-API/ReleaseTest-API.cc b/webrtc/modules/audio_coding/codecs/isac/main/test/ReleaseTest-API/ReleaseTest-API.cc
index 2e5badd82c..4cef8f7b3b 100644
--- a/webrtc/modules/audio_coding/codecs/isac/main/test/ReleaseTest-API/ReleaseTest-API.cc
+++ b/webrtc/modules/audio_coding/codecs/isac/main/test/ReleaseTest-API/ReleaseTest-API.cc
@@ -73,10 +73,10 @@ int main(int argc, char* argv[]) {
FILE* plFile;
int32_t sendBN;
-#ifdef _DEBUG
+#if !defined(NDEBUG)
FILE* fy;
double kbps;
-#endif /* _DEBUG */
+#endif
size_t totalbits = 0;
int totalsmpls = 0;
@@ -103,12 +103,12 @@ int main(int argc, char* argv[]) {
BottleNeckModel BN_data;
-#ifdef _DEBUG
+#if !defined(NDEBUG)
fy = fopen("bit_rate.dat", "w");
fclose(fy);
fy = fopen("bytes_frames.dat", "w");
fclose(fy);
-#endif /* _DEBUG */
+#endif
/* Handling wrong input arguments in the command line */
if ((argc < 3) || (argc > 17)) {
@@ -885,14 +885,14 @@ int main(int argc, char* argv[]) {
totalsmpls += declen;
totalbits += 8 * stream_len;
-#ifdef _DEBUG
+#if !defined(NDEBUG)
kbps = ((double)sampFreqKHz * 1000.) / ((double)cur_framesmpls) * 8.0 *
stream_len / 1000.0; // kbits/s
fy = fopen("bit_rate.dat", "a");
fprintf(fy, "Frame %i = %0.14f\n", framecnt, kbps);
fclose(fy);
-#endif /* _DEBUG */
+#endif
}
printf("\n");
printf("total bits = %" PRIuS " bits\n", totalbits);
diff --git a/webrtc/modules/audio_coding/codecs/mock/mock_audio_encoder.h b/webrtc/modules/audio_coding/codecs/mock/mock_audio_encoder.h
index 95426d89e1..66adde4be1 100644
--- a/webrtc/modules/audio_coding/codecs/mock/mock_audio_encoder.h
+++ b/webrtc/modules/audio_coding/codecs/mock/mock_audio_encoder.h
@@ -24,7 +24,7 @@ class MockAudioEncoder final : public AudioEncoder {
MOCK_METHOD1(Mark, void(std::string desc));
MOCK_CONST_METHOD0(MaxEncodedBytes, size_t());
MOCK_CONST_METHOD0(SampleRateHz, int());
- MOCK_CONST_METHOD0(NumChannels, int());
+ MOCK_CONST_METHOD0(NumChannels, size_t());
MOCK_CONST_METHOD0(RtpTimestampRateHz, int());
MOCK_CONST_METHOD0(Num10MsFramesInNextPacket, size_t());
MOCK_CONST_METHOD0(Max10MsFramesInAPacket, size_t());
@@ -32,7 +32,7 @@ class MockAudioEncoder final : public AudioEncoder {
// Note, we explicitly chose not to create a mock for the Encode method.
MOCK_METHOD4(EncodeInternal,
EncodedInfo(uint32_t timestamp,
- const int16_t* audio,
+ rtc::ArrayView<const int16_t> audio,
size_t max_encoded_bytes,
uint8_t* encoded));
MOCK_METHOD0(Reset, void());
diff --git a/webrtc/modules/audio_coding/codecs/opus/audio_decoder_opus.cc b/webrtc/modules/audio_coding/codecs/opus/audio_decoder_opus.cc
index d1390e2ca4..f64e811afe 100644
--- a/webrtc/modules/audio_coding/codecs/opus/audio_decoder_opus.cc
+++ b/webrtc/modules/audio_coding/codecs/opus/audio_decoder_opus.cc
@@ -8,7 +8,7 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-#include "webrtc/modules/audio_coding/codecs/opus/include/audio_decoder_opus.h"
+#include "webrtc/modules/audio_coding/codecs/opus/audio_decoder_opus.h"
#include "webrtc/base/checks.h"
@@ -17,7 +17,7 @@ namespace webrtc {
AudioDecoderOpus::AudioDecoderOpus(size_t num_channels)
: channels_(num_channels) {
RTC_DCHECK(num_channels == 1 || num_channels == 2);
- WebRtcOpus_DecoderCreate(&dec_state_, static_cast<int>(channels_));
+ WebRtcOpus_DecoderCreate(&dec_state_, channels_);
WebRtcOpus_DecoderInit(dec_state_);
}
diff --git a/webrtc/modules/audio_coding/codecs/opus/audio_decoder_opus.h b/webrtc/modules/audio_coding/codecs/opus/audio_decoder_opus.h
new file mode 100644
index 0000000000..af32a84512
--- /dev/null
+++ b/webrtc/modules/audio_coding/codecs/opus/audio_decoder_opus.h
@@ -0,0 +1,51 @@
+/*
+ * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_CODECS_OPUS_AUDIO_DECODER_OPUS_H_
+#define WEBRTC_MODULES_AUDIO_CODING_CODECS_OPUS_AUDIO_DECODER_OPUS_H_
+
+#include "webrtc/modules/audio_coding/codecs/audio_decoder.h"
+#include "webrtc/modules/audio_coding/codecs/opus/opus_interface.h"
+
+namespace webrtc {
+
+class AudioDecoderOpus final : public AudioDecoder {
+ public:
+ explicit AudioDecoderOpus(size_t num_channels);
+ ~AudioDecoderOpus() override;
+
+ void Reset() override;
+ int PacketDuration(const uint8_t* encoded, size_t encoded_len) const override;
+ int PacketDurationRedundant(const uint8_t* encoded,
+ size_t encoded_len) const override;
+ bool PacketHasFec(const uint8_t* encoded, size_t encoded_len) const override;
+ size_t Channels() const override;
+
+ protected:
+ int DecodeInternal(const uint8_t* encoded,
+ size_t encoded_len,
+ int sample_rate_hz,
+ int16_t* decoded,
+ SpeechType* speech_type) override;
+ int DecodeRedundantInternal(const uint8_t* encoded,
+ size_t encoded_len,
+ int sample_rate_hz,
+ int16_t* decoded,
+ SpeechType* speech_type) override;
+
+ private:
+ OpusDecInst* dec_state_;
+ const size_t channels_;
+ RTC_DISALLOW_COPY_AND_ASSIGN(AudioDecoderOpus);
+};
+
+} // namespace webrtc
+
+#endif // WEBRTC_MODULES_AUDIO_CODING_CODECS_OPUS_AUDIO_DECODER_OPUS_H_
diff --git a/webrtc/modules/audio_coding/codecs/opus/audio_encoder_opus.cc b/webrtc/modules/audio_coding/codecs/opus/audio_encoder_opus.cc
index eac7412178..707d6c2488 100644
--- a/webrtc/modules/audio_coding/codecs/opus/audio_encoder_opus.cc
+++ b/webrtc/modules/audio_coding/codecs/opus/audio_encoder_opus.cc
@@ -8,12 +8,12 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-#include "webrtc/modules/audio_coding/codecs/opus/include/audio_encoder_opus.h"
+#include "webrtc/modules/audio_coding/codecs/opus/audio_encoder_opus.h"
#include "webrtc/base/checks.h"
#include "webrtc/base/safe_conversions.h"
#include "webrtc/common_types.h"
-#include "webrtc/modules/audio_coding/codecs/opus/include/opus_interface.h"
+#include "webrtc/modules/audio_coding/codecs/opus/opus_interface.h"
namespace webrtc {
@@ -114,7 +114,7 @@ int AudioEncoderOpus::SampleRateHz() const {
return kSampleRateHz;
}
-int AudioEncoderOpus::NumChannels() const {
+size_t AudioEncoderOpus::NumChannels() const {
return config_.num_channels;
}
@@ -132,24 +132,22 @@ int AudioEncoderOpus::GetTargetBitrate() const {
AudioEncoder::EncodedInfo AudioEncoderOpus::EncodeInternal(
uint32_t rtp_timestamp,
- const int16_t* audio,
+ rtc::ArrayView<const int16_t> audio,
size_t max_encoded_bytes,
uint8_t* encoded) {
if (input_buffer_.empty())
first_timestamp_in_buffer_ = rtp_timestamp;
- input_buffer_.insert(input_buffer_.end(), audio,
- audio + SamplesPer10msFrame());
+ RTC_DCHECK_EQ(SamplesPer10msFrame(), audio.size());
+ input_buffer_.insert(input_buffer_.end(), audio.cbegin(), audio.cend());
if (input_buffer_.size() <
- (static_cast<size_t>(Num10msFramesPerPacket()) * SamplesPer10msFrame())) {
+ (Num10msFramesPerPacket() * SamplesPer10msFrame())) {
return EncodedInfo();
}
- RTC_CHECK_EQ(
- input_buffer_.size(),
- static_cast<size_t>(Num10msFramesPerPacket()) * SamplesPer10msFrame());
+ RTC_CHECK_EQ(input_buffer_.size(),
+ Num10msFramesPerPacket() * SamplesPer10msFrame());
int status = WebRtcOpus_Encode(
inst_, &input_buffer_[0],
- rtc::CheckedDivExact(input_buffer_.size(),
- static_cast<size_t>(config_.num_channels)),
+ rtc::CheckedDivExact(input_buffer_.size(), config_.num_channels),
rtc::saturated_cast<int16_t>(max_encoded_bytes), encoded);
RTC_CHECK_GE(status, 0); // Fails only if fed invalid data.
input_buffer_.clear();
@@ -214,11 +212,11 @@ void AudioEncoderOpus::SetTargetBitrate(int bits_per_second) {
RTC_CHECK_EQ(0, WebRtcOpus_SetBitRate(inst_, config_.bitrate_bps));
}
-int AudioEncoderOpus::Num10msFramesPerPacket() const {
- return rtc::CheckedDivExact(config_.frame_size_ms, 10);
+size_t AudioEncoderOpus::Num10msFramesPerPacket() const {
+ return static_cast<size_t>(rtc::CheckedDivExact(config_.frame_size_ms, 10));
}
-int AudioEncoderOpus::SamplesPer10msFrame() const {
+size_t AudioEncoderOpus::SamplesPer10msFrame() const {
return rtc::CheckedDivExact(kSampleRateHz, 100) * config_.num_channels;
}
diff --git a/webrtc/modules/audio_coding/codecs/opus/audio_encoder_opus.h b/webrtc/modules/audio_coding/codecs/opus/audio_encoder_opus.h
new file mode 100644
index 0000000000..59c8f796ee
--- /dev/null
+++ b/webrtc/modules/audio_coding/codecs/opus/audio_encoder_opus.h
@@ -0,0 +1,102 @@
+/*
+ * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_CODECS_OPUS_AUDIO_ENCODER_OPUS_H_
+#define WEBRTC_MODULES_AUDIO_CODING_CODECS_OPUS_AUDIO_ENCODER_OPUS_H_
+
+#include <vector>
+
+#include "webrtc/base/constructormagic.h"
+#include "webrtc/modules/audio_coding/codecs/opus/opus_interface.h"
+#include "webrtc/modules/audio_coding/codecs/audio_encoder.h"
+
+namespace webrtc {
+
+struct CodecInst;
+
+class AudioEncoderOpus final : public AudioEncoder {
+ public:
+ enum ApplicationMode {
+ kVoip = 0,
+ kAudio = 1,
+ };
+
+ struct Config {
+ bool IsOk() const;
+ int frame_size_ms = 20;
+ size_t num_channels = 1;
+ int payload_type = 120;
+ ApplicationMode application = kVoip;
+ int bitrate_bps = 64000;
+ bool fec_enabled = false;
+ int max_playback_rate_hz = 48000;
+ int complexity = kDefaultComplexity;
+ bool dtx_enabled = false;
+
+ private:
+#if defined(WEBRTC_ANDROID) || defined(WEBRTC_IOS) || defined(WEBRTC_ARCH_ARM)
+ // If we are on Android, iOS and/or ARM, use a lower complexity setting as
+ // default, to save encoder complexity.
+ static const int kDefaultComplexity = 5;
+#else
+ static const int kDefaultComplexity = 9;
+#endif
+ };
+
+ explicit AudioEncoderOpus(const Config& config);
+ explicit AudioEncoderOpus(const CodecInst& codec_inst);
+ ~AudioEncoderOpus() override;
+
+ size_t MaxEncodedBytes() const override;
+ int SampleRateHz() const override;
+ size_t NumChannels() const override;
+ size_t Num10MsFramesInNextPacket() const override;
+ size_t Max10MsFramesInAPacket() const override;
+ int GetTargetBitrate() const override;
+
+ EncodedInfo EncodeInternal(uint32_t rtp_timestamp,
+ rtc::ArrayView<const int16_t> audio,
+ size_t max_encoded_bytes,
+ uint8_t* encoded) override;
+
+ void Reset() override;
+ bool SetFec(bool enable) override;
+
+ // Set Opus DTX. Once enabled, Opus stops transmission, when it detects voice
+ // being inactive. During that, it still sends 2 packets (one for content, one
+ // for signaling) about every 400 ms.
+ bool SetDtx(bool enable) override;
+
+ bool SetApplication(Application application) override;
+ void SetMaxPlaybackRate(int frequency_hz) override;
+ void SetProjectedPacketLossRate(double fraction) override;
+ void SetTargetBitrate(int target_bps) override;
+
+ // Getters for testing.
+ double packet_loss_rate() const { return packet_loss_rate_; }
+ ApplicationMode application() const { return config_.application; }
+ bool dtx_enabled() const { return config_.dtx_enabled; }
+
+ private:
+ size_t Num10msFramesPerPacket() const;
+ size_t SamplesPer10msFrame() const;
+ bool RecreateEncoderInstance(const Config& config);
+
+ Config config_;
+ double packet_loss_rate_;
+ std::vector<int16_t> input_buffer_;
+ OpusEncInst* inst_;
+ uint32_t first_timestamp_in_buffer_;
+ RTC_DISALLOW_COPY_AND_ASSIGN(AudioEncoderOpus);
+};
+
+} // namespace webrtc
+
+#endif // WEBRTC_MODULES_AUDIO_CODING_CODECS_OPUS_AUDIO_ENCODER_OPUS_H_
diff --git a/webrtc/modules/audio_coding/codecs/opus/audio_encoder_opus_unittest.cc b/webrtc/modules/audio_coding/codecs/opus/audio_encoder_opus_unittest.cc
index e69f259554..441e807b4f 100644
--- a/webrtc/modules/audio_coding/codecs/opus/audio_encoder_opus_unittest.cc
+++ b/webrtc/modules/audio_coding/codecs/opus/audio_encoder_opus_unittest.cc
@@ -12,7 +12,7 @@
#include "webrtc/base/checks.h"
#include "webrtc/base/scoped_ptr.h"
#include "webrtc/common_types.h"
-#include "webrtc/modules/audio_coding/codecs/opus/include/audio_encoder_opus.h"
+#include "webrtc/modules/audio_coding/codecs/opus/audio_encoder_opus.h"
namespace webrtc {
diff --git a/webrtc/modules/audio_coding/codecs/opus/include/audio_decoder_opus.h b/webrtc/modules/audio_coding/codecs/opus/include/audio_decoder_opus.h
deleted file mode 100644
index 6b0a88ae97..0000000000
--- a/webrtc/modules/audio_coding/codecs/opus/include/audio_decoder_opus.h
+++ /dev/null
@@ -1,51 +0,0 @@
-/*
- * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_MODULES_AUDIO_CODING_CODECS_OPUS_INCLUDE_AUDIO_DECODER_OPUS_H
-#define WEBRTC_MODULES_AUDIO_CODING_CODECS_OPUS_INCLUDE_AUDIO_DECODER_OPUS_H
-
-#include "webrtc/modules/audio_coding/codecs/audio_decoder.h"
-#include "webrtc/modules/audio_coding/codecs/opus/include/opus_interface.h"
-
-namespace webrtc {
-
-class AudioDecoderOpus final : public AudioDecoder {
- public:
- explicit AudioDecoderOpus(size_t num_channels);
- ~AudioDecoderOpus() override;
-
- void Reset() override;
- int PacketDuration(const uint8_t* encoded, size_t encoded_len) const override;
- int PacketDurationRedundant(const uint8_t* encoded,
- size_t encoded_len) const override;
- bool PacketHasFec(const uint8_t* encoded, size_t encoded_len) const override;
- size_t Channels() const override;
-
- protected:
- int DecodeInternal(const uint8_t* encoded,
- size_t encoded_len,
- int sample_rate_hz,
- int16_t* decoded,
- SpeechType* speech_type) override;
- int DecodeRedundantInternal(const uint8_t* encoded,
- size_t encoded_len,
- int sample_rate_hz,
- int16_t* decoded,
- SpeechType* speech_type) override;
-
- private:
- OpusDecInst* dec_state_;
- const size_t channels_;
- RTC_DISALLOW_COPY_AND_ASSIGN(AudioDecoderOpus);
-};
-
-} // namespace webrtc
-
-#endif // WEBRTC_MODULES_AUDIO_CODING_CODECS_OPUS_INCLUDE_AUDIO_DECODER_OPUS_H
diff --git a/webrtc/modules/audio_coding/codecs/opus/include/audio_encoder_opus.h b/webrtc/modules/audio_coding/codecs/opus/include/audio_encoder_opus.h
deleted file mode 100644
index 7f2b563fd9..0000000000
--- a/webrtc/modules/audio_coding/codecs/opus/include/audio_encoder_opus.h
+++ /dev/null
@@ -1,102 +0,0 @@
-/*
- * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_MODULES_AUDIO_CODING_CODECS_OPUS_INCLUDE_AUDIO_ENCODER_OPUS_H_
-#define WEBRTC_MODULES_AUDIO_CODING_CODECS_OPUS_INCLUDE_AUDIO_ENCODER_OPUS_H_
-
-#include <vector>
-
-#include "webrtc/base/constructormagic.h"
-#include "webrtc/modules/audio_coding/codecs/opus/include/opus_interface.h"
-#include "webrtc/modules/audio_coding/codecs/audio_encoder.h"
-
-namespace webrtc {
-
-struct CodecInst;
-
-class AudioEncoderOpus final : public AudioEncoder {
- public:
- enum ApplicationMode {
- kVoip = 0,
- kAudio = 1,
- };
-
- struct Config {
- bool IsOk() const;
- int frame_size_ms = 20;
- int num_channels = 1;
- int payload_type = 120;
- ApplicationMode application = kVoip;
- int bitrate_bps = 64000;
- bool fec_enabled = false;
- int max_playback_rate_hz = 48000;
- int complexity = kDefaultComplexity;
- bool dtx_enabled = false;
-
- private:
-#if defined(WEBRTC_ANDROID) || defined(WEBRTC_IOS) || defined(WEBRTC_ARCH_ARM)
- // If we are on Android, iOS and/or ARM, use a lower complexity setting as
- // default, to save encoder complexity.
- static const int kDefaultComplexity = 5;
-#else
- static const int kDefaultComplexity = 9;
-#endif
- };
-
- explicit AudioEncoderOpus(const Config& config);
- explicit AudioEncoderOpus(const CodecInst& codec_inst);
- ~AudioEncoderOpus() override;
-
- size_t MaxEncodedBytes() const override;
- int SampleRateHz() const override;
- int NumChannels() const override;
- size_t Num10MsFramesInNextPacket() const override;
- size_t Max10MsFramesInAPacket() const override;
- int GetTargetBitrate() const override;
-
- EncodedInfo EncodeInternal(uint32_t rtp_timestamp,
- const int16_t* audio,
- size_t max_encoded_bytes,
- uint8_t* encoded) override;
-
- void Reset() override;
- bool SetFec(bool enable) override;
-
- // Set Opus DTX. Once enabled, Opus stops transmission, when it detects voice
- // being inactive. During that, it still sends 2 packets (one for content, one
- // for signaling) about every 400 ms.
- bool SetDtx(bool enable) override;
-
- bool SetApplication(Application application) override;
- void SetMaxPlaybackRate(int frequency_hz) override;
- void SetProjectedPacketLossRate(double fraction) override;
- void SetTargetBitrate(int target_bps) override;
-
- // Getters for testing.
- double packet_loss_rate() const { return packet_loss_rate_; }
- ApplicationMode application() const { return config_.application; }
- bool dtx_enabled() const { return config_.dtx_enabled; }
-
- private:
- int Num10msFramesPerPacket() const;
- int SamplesPer10msFrame() const;
- bool RecreateEncoderInstance(const Config& config);
-
- Config config_;
- double packet_loss_rate_;
- std::vector<int16_t> input_buffer_;
- OpusEncInst* inst_;
- uint32_t first_timestamp_in_buffer_;
- RTC_DISALLOW_COPY_AND_ASSIGN(AudioEncoderOpus);
-};
-
-} // namespace webrtc
-
-#endif // WEBRTC_MODULES_AUDIO_CODING_CODECS_OPUS_INCLUDE_AUDIO_ENCODER_OPUS_H_
diff --git a/webrtc/modules/audio_coding/codecs/opus/include/opus_interface.h b/webrtc/modules/audio_coding/codecs/opus/include/opus_interface.h
deleted file mode 100644
index 50b2338ab5..0000000000
--- a/webrtc/modules/audio_coding/codecs/opus/include/opus_interface.h
+++ /dev/null
@@ -1,349 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_MODULES_AUDIO_CODING_CODECS_OPUS_INCLUDE_OPUS_INTERFACE_H_
-#define WEBRTC_MODULES_AUDIO_CODING_CODECS_OPUS_INCLUDE_OPUS_INTERFACE_H_
-
-#include <stddef.h>
-
-#include "webrtc/typedefs.h"
-
-#ifdef __cplusplus
-extern "C" {
-#endif
-
-// Opaque wrapper types for the codec state.
-typedef struct WebRtcOpusEncInst OpusEncInst;
-typedef struct WebRtcOpusDecInst OpusDecInst;
-
-/****************************************************************************
- * WebRtcOpus_EncoderCreate(...)
- *
- * This function create an Opus encoder.
- *
- * Input:
- * - channels : number of channels.
- * - application : 0 - VOIP applications.
- * Favor speech intelligibility.
- * 1 - Audio applications.
- * Favor faithfulness to the original input.
- *
- * Output:
- * - inst : a pointer to Encoder context that is created
- * if success.
- *
- * Return value : 0 - Success
- * -1 - Error
- */
-int16_t WebRtcOpus_EncoderCreate(OpusEncInst** inst,
- int32_t channels,
- int32_t application);
-
-int16_t WebRtcOpus_EncoderFree(OpusEncInst* inst);
-
-/****************************************************************************
- * WebRtcOpus_Encode(...)
- *
- * This function encodes audio as a series of Opus frames and inserts
- * it into a packet. Input buffer can be any length.
- *
- * Input:
- * - inst : Encoder context
- * - audio_in : Input speech data buffer
- * - samples : Samples per channel in audio_in
- * - length_encoded_buffer : Output buffer size
- *
- * Output:
- * - encoded : Output compressed data buffer
- *
- * Return value : >=0 - Length (in bytes) of coded data
- * -1 - Error
- */
-int WebRtcOpus_Encode(OpusEncInst* inst,
- const int16_t* audio_in,
- size_t samples,
- size_t length_encoded_buffer,
- uint8_t* encoded);
-
-/****************************************************************************
- * WebRtcOpus_SetBitRate(...)
- *
- * This function adjusts the target bitrate of the encoder.
- *
- * Input:
- * - inst : Encoder context
- * - rate : New target bitrate
- *
- * Return value : 0 - Success
- * -1 - Error
- */
-int16_t WebRtcOpus_SetBitRate(OpusEncInst* inst, int32_t rate);
-
-/****************************************************************************
- * WebRtcOpus_SetPacketLossRate(...)
- *
- * This function configures the encoder's expected packet loss percentage.
- *
- * Input:
- * - inst : Encoder context
- * - loss_rate : loss percentage in the range 0-100, inclusive.
- * Return value : 0 - Success
- * -1 - Error
- */
-int16_t WebRtcOpus_SetPacketLossRate(OpusEncInst* inst, int32_t loss_rate);
-
-/****************************************************************************
- * WebRtcOpus_SetMaxPlaybackRate(...)
- *
- * Configures the maximum playback rate for encoding. Due to hardware
- * limitations, the receiver may render audio up to a playback rate. Opus
- * encoder can use this information to optimize for network usage and encoding
- * complexity. This will affect the audio bandwidth in the coded audio. However,
- * the input/output sample rate is not affected.
- *
- * Input:
- * - inst : Encoder context
- * - frequency_hz : Maximum playback rate in Hz.
- * This parameter can take any value. The relation
- * between the value and the Opus internal mode is
- * as following:
- * frequency_hz <= 8000 narrow band
- * 8000 < frequency_hz <= 12000 medium band
- * 12000 < frequency_hz <= 16000 wide band
- * 16000 < frequency_hz <= 24000 super wide band
- * frequency_hz > 24000 full band
- * Return value : 0 - Success
- * -1 - Error
- */
-int16_t WebRtcOpus_SetMaxPlaybackRate(OpusEncInst* inst, int32_t frequency_hz);
-
-/* TODO(minyue): Check whether an API to check the FEC and the packet loss rate
- * is needed. It might not be very useful since there are not many use cases and
- * the caller can always maintain the states. */
-
-/****************************************************************************
- * WebRtcOpus_EnableFec()
- *
- * This function enables FEC for encoding.
- *
- * Input:
- * - inst : Encoder context
- *
- * Return value : 0 - Success
- * -1 - Error
- */
-int16_t WebRtcOpus_EnableFec(OpusEncInst* inst);
-
-/****************************************************************************
- * WebRtcOpus_DisableFec()
- *
- * This function disables FEC for encoding.
- *
- * Input:
- * - inst : Encoder context
- *
- * Return value : 0 - Success
- * -1 - Error
- */
-int16_t WebRtcOpus_DisableFec(OpusEncInst* inst);
-
-/****************************************************************************
- * WebRtcOpus_EnableDtx()
- *
- * This function enables Opus internal DTX for encoding.
- *
- * Input:
- * - inst : Encoder context
- *
- * Return value : 0 - Success
- * -1 - Error
- */
-int16_t WebRtcOpus_EnableDtx(OpusEncInst* inst);
-
-/****************************************************************************
- * WebRtcOpus_DisableDtx()
- *
- * This function disables Opus internal DTX for encoding.
- *
- * Input:
- * - inst : Encoder context
- *
- * Return value : 0 - Success
- * -1 - Error
- */
-int16_t WebRtcOpus_DisableDtx(OpusEncInst* inst);
-
-/*
- * WebRtcOpus_SetComplexity(...)
- *
- * This function adjusts the computational complexity. The effect is the same as
- * calling the complexity setting of Opus as an Opus encoder related CTL.
- *
- * Input:
- * - inst : Encoder context
- * - complexity : New target complexity (0-10, inclusive)
- *
- * Return value : 0 - Success
- * -1 - Error
- */
-int16_t WebRtcOpus_SetComplexity(OpusEncInst* inst, int32_t complexity);
-
-int16_t WebRtcOpus_DecoderCreate(OpusDecInst** inst, int channels);
-int16_t WebRtcOpus_DecoderFree(OpusDecInst* inst);
-
-/****************************************************************************
- * WebRtcOpus_DecoderChannels(...)
- *
- * This function returns the number of channels created for Opus decoder.
- */
-int WebRtcOpus_DecoderChannels(OpusDecInst* inst);
-
-/****************************************************************************
- * WebRtcOpus_DecoderInit(...)
- *
- * This function resets state of the decoder.
- *
- * Input:
- * - inst : Decoder context
- */
-void WebRtcOpus_DecoderInit(OpusDecInst* inst);
-
-/****************************************************************************
- * WebRtcOpus_Decode(...)
- *
- * This function decodes an Opus packet into one or more audio frames at the
- * ACM interface's sampling rate (32 kHz).
- *
- * Input:
- * - inst : Decoder context
- * - encoded : Encoded data
- * - encoded_bytes : Bytes in encoded vector
- *
- * Output:
- * - decoded : The decoded vector
- * - audio_type : 1 normal, 2 CNG (for Opus it should
- * always return 1 since we're not using Opus's
- * built-in DTX/CNG scheme)
- *
- * Return value : >0 - Samples per channel in decoded vector
- * -1 - Error
- */
-int WebRtcOpus_Decode(OpusDecInst* inst, const uint8_t* encoded,
- size_t encoded_bytes, int16_t* decoded,
- int16_t* audio_type);
-
-/****************************************************************************
- * WebRtcOpus_DecodePlc(...)
- *
- * This function processes PLC for opus frame(s).
- * Input:
- * - inst : Decoder context
- * - number_of_lost_frames : Number of PLC frames to produce
- *
- * Output:
- * - decoded : The decoded vector
- *
- * Return value : >0 - number of samples in decoded PLC vector
- * -1 - Error
- */
-int WebRtcOpus_DecodePlc(OpusDecInst* inst, int16_t* decoded,
- int number_of_lost_frames);
-
-/****************************************************************************
- * WebRtcOpus_DecodeFec(...)
- *
- * This function decodes the FEC data from an Opus packet into one or more audio
- * frames at the ACM interface's sampling rate (32 kHz).
- *
- * Input:
- * - inst : Decoder context
- * - encoded : Encoded data
- * - encoded_bytes : Bytes in encoded vector
- *
- * Output:
- * - decoded : The decoded vector (previous frame)
- *
- * Return value : >0 - Samples per channel in decoded vector
- * 0 - No FEC data in the packet
- * -1 - Error
- */
-int WebRtcOpus_DecodeFec(OpusDecInst* inst, const uint8_t* encoded,
- size_t encoded_bytes, int16_t* decoded,
- int16_t* audio_type);
-
-/****************************************************************************
- * WebRtcOpus_DurationEst(...)
- *
- * This function calculates the duration of an opus packet.
- * Input:
- * - inst : Decoder context
- * - payload : Encoded data pointer
- * - payload_length_bytes : Bytes of encoded data
- *
- * Return value : The duration of the packet, in samples per
- * channel.
- */
-int WebRtcOpus_DurationEst(OpusDecInst* inst,
- const uint8_t* payload,
- size_t payload_length_bytes);
-
-/****************************************************************************
- * WebRtcOpus_PlcDuration(...)
- *
- * This function calculates the duration of a frame returned by packet loss
- * concealment (PLC).
- *
- * Input:
- * - inst : Decoder context
- *
- * Return value : The duration of a frame returned by PLC, in
- * samples per channel.
- */
-int WebRtcOpus_PlcDuration(OpusDecInst* inst);
-
-/* TODO(minyue): Check whether it is needed to add a decoder context to the
- * arguments, like WebRtcOpus_DurationEst(...). In fact, the packet itself tells
- * the duration. The decoder context in WebRtcOpus_DurationEst(...) is not used.
- * So it may be advisable to remove it from WebRtcOpus_DurationEst(...). */
-
-/****************************************************************************
- * WebRtcOpus_FecDurationEst(...)
- *
- * This function calculates the duration of the FEC data within an opus packet.
- * Input:
- * - payload : Encoded data pointer
- * - payload_length_bytes : Bytes of encoded data
- *
- * Return value : >0 - The duration of the FEC data in the
- * packet in samples per channel.
- * 0 - No FEC data in the packet.
- */
-int WebRtcOpus_FecDurationEst(const uint8_t* payload,
- size_t payload_length_bytes);
-
-/****************************************************************************
- * WebRtcOpus_PacketHasFec(...)
- *
- * This function detects if an opus packet has FEC.
- * Input:
- * - payload : Encoded data pointer
- * - payload_length_bytes : Bytes of encoded data
- *
- * Return value : 0 - the packet does NOT contain FEC.
- * 1 - the packet contains FEC.
- */
-int WebRtcOpus_PacketHasFec(const uint8_t* payload,
- size_t payload_length_bytes);
-
-#ifdef __cplusplus
-} // extern "C"
-#endif
-
-#endif // WEBRTC_MODULES_AUDIO_CODING_CODECS_OPUS_INCLUDE_OPUS_INCLUDE_H_
diff --git a/webrtc/modules/audio_coding/codecs/opus/opus.gypi b/webrtc/modules/audio_coding/codecs/opus/opus.gypi
index 05da3e5e47..d7454d632d 100644
--- a/webrtc/modules/audio_coding/codecs/opus/opus.gypi
+++ b/webrtc/modules/audio_coding/codecs/opus/opus.gypi
@@ -39,17 +39,14 @@
'dependencies': [
'audio_encoder_interface',
],
- 'include_dirs': [
- '<(webrtc_root)',
- ],
'sources': [
'audio_decoder_opus.cc',
+ 'audio_decoder_opus.h',
'audio_encoder_opus.cc',
- 'include/audio_decoder_opus.h',
- 'include/audio_encoder_opus.h',
- 'include/opus_interface.h',
+ 'audio_encoder_opus.h',
'opus_inst.h',
'opus_interface.c',
+ 'opus_interface.h',
],
},
],
@@ -65,9 +62,6 @@
'<(webrtc_root)/test/test.gyp:test_support_main',
'<(DEPTH)/testing/gtest.gyp:gtest',
],
- 'include_dirs': [
- '<(webrtc_root)',
- ],
'sources': [
'opus_fec_test.cc',
],
diff --git a/webrtc/modules/audio_coding/codecs/opus/opus_fec_test.cc b/webrtc/modules/audio_coding/codecs/opus/opus_fec_test.cc
index f257210431..4f9f7ff7bb 100644
--- a/webrtc/modules/audio_coding/codecs/opus/opus_fec_test.cc
+++ b/webrtc/modules/audio_coding/codecs/opus/opus_fec_test.cc
@@ -9,8 +9,9 @@
*/
#include "testing/gtest/include/gtest/gtest.h"
+#include "webrtc/base/format_macros.h"
#include "webrtc/base/scoped_ptr.h"
-#include "webrtc/modules/audio_coding/codecs/opus/include/opus_interface.h"
+#include "webrtc/modules/audio_coding/codecs/opus/opus_interface.h"
#include "webrtc/test/testsupport/fileutils.h"
using ::std::string;
@@ -21,7 +22,7 @@ using ::testing::TestWithParam;
namespace webrtc {
// Define coding parameter as <channels, bit_rate, filename, extension>.
-typedef tuple<int, int, string, string> coding_param;
+typedef tuple<size_t, int, string, string> coding_param;
typedef struct mode mode;
struct mode {
@@ -47,7 +48,7 @@ class OpusFecTest : public TestWithParam<coding_param> {
int sampling_khz_;
size_t block_length_sample_;
- int channels_;
+ size_t channels_;
int bit_rate_;
size_t data_pointer_;
@@ -68,7 +69,7 @@ class OpusFecTest : public TestWithParam<coding_param> {
void OpusFecTest::SetUp() {
channels_ = get<0>(GetParam());
bit_rate_ = get<1>(GetParam());
- printf("Coding %d channel signal at %d bps.\n", channels_, bit_rate_);
+ printf("Coding %" PRIuS " channel signal at %d bps.\n", channels_, bit_rate_);
in_filename_ = test::ResourcePath(get<2>(GetParam()), get<3>(GetParam()));
diff --git a/webrtc/modules/audio_coding/codecs/opus/opus_inst.h b/webrtc/modules/audio_coding/codecs/opus/opus_inst.h
index 373db392a6..8d032baf35 100644
--- a/webrtc/modules/audio_coding/codecs/opus/opus_inst.h
+++ b/webrtc/modules/audio_coding/codecs/opus/opus_inst.h
@@ -11,17 +11,26 @@
#ifndef WEBRTC_MODULES_AUDIO_CODING_CODECS_OPUS_OPUS_INST_H_
#define WEBRTC_MODULES_AUDIO_CODING_CODECS_OPUS_OPUS_INST_H_
+#include <stddef.h>
+
#include "opus.h"
struct WebRtcOpusEncInst {
OpusEncoder* encoder;
+ size_t channels;
int in_dtx_mode;
+ // When Opus is in DTX mode, we use |zero_counts| to count consecutive zeros
+ // to break long zero segment so as to prevent DTX from going wrong. We use
+ // one counter for each channel. After each encoding, |zero_counts| contain
+ // the remaining zeros from the last frame.
+ // TODO(minyue): remove this when Opus gets an internal fix to DTX.
+ size_t* zero_counts;
};
struct WebRtcOpusDecInst {
OpusDecoder* decoder;
int prev_decoded_samples;
- int channels;
+ size_t channels;
int in_dtx_mode;
};
diff --git a/webrtc/modules/audio_coding/codecs/opus/opus_interface.c b/webrtc/modules/audio_coding/codecs/opus/opus_interface.c
index 1a632422c5..9dc7ef95fe 100644
--- a/webrtc/modules/audio_coding/codecs/opus/opus_interface.c
+++ b/webrtc/modules/audio_coding/codecs/opus/opus_interface.c
@@ -8,9 +8,10 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-#include "webrtc/modules/audio_coding/codecs/opus/include/opus_interface.h"
+#include "webrtc/modules/audio_coding/codecs/opus/opus_interface.h"
#include "webrtc/modules/audio_coding/codecs/opus/opus_inst.h"
+#include <assert.h>
#include <stdlib.h>
#include <string.h>
@@ -29,48 +30,61 @@ enum {
/* Default frame size, 20 ms @ 48 kHz, in samples (for one channel). */
kWebRtcOpusDefaultFrameSize = 960,
+
+ // Maximum number of consecutive zeros, beyond or equal to which DTX can fail.
+ kZeroBreakCount = 157,
+
+#if defined(OPUS_FIXED_POINT)
+ kZeroBreakValue = 10,
+#else
+ kZeroBreakValue = 1,
+#endif
};
int16_t WebRtcOpus_EncoderCreate(OpusEncInst** inst,
- int32_t channels,
+ size_t channels,
int32_t application) {
- OpusEncInst* state;
- if (inst != NULL) {
- state = (OpusEncInst*) calloc(1, sizeof(OpusEncInst));
- if (state) {
- int opus_app;
- switch (application) {
- case 0: {
- opus_app = OPUS_APPLICATION_VOIP;
- break;
- }
- case 1: {
- opus_app = OPUS_APPLICATION_AUDIO;
- break;
- }
- default: {
- free(state);
- return -1;
- }
- }
+ int opus_app;
+ if (!inst)
+ return -1;
- int error;
- state->encoder = opus_encoder_create(48000, channels, opus_app,
- &error);
- state->in_dtx_mode = 0;
- if (error == OPUS_OK && state->encoder != NULL) {
- *inst = state;
- return 0;
- }
- free(state);
- }
+ switch (application) {
+ case 0:
+ opus_app = OPUS_APPLICATION_VOIP;
+ break;
+ case 1:
+ opus_app = OPUS_APPLICATION_AUDIO;
+ break;
+ default:
+ return -1;
}
- return -1;
+
+ OpusEncInst* state = calloc(1, sizeof(OpusEncInst));
+ assert(state);
+
+ // Allocate zero counters.
+ state->zero_counts = calloc(channels, sizeof(size_t));
+ assert(state->zero_counts);
+
+ int error;
+ state->encoder = opus_encoder_create(48000, (int)channels, opus_app,
+ &error);
+ if (error != OPUS_OK || !state->encoder) {
+ WebRtcOpus_EncoderFree(state);
+ return -1;
+ }
+
+ state->in_dtx_mode = 0;
+ state->channels = channels;
+
+ *inst = state;
+ return 0;
}
int16_t WebRtcOpus_EncoderFree(OpusEncInst* inst) {
if (inst) {
opus_encoder_destroy(inst->encoder);
+ free(inst->zero_counts);
free(inst);
return 0;
} else {
@@ -84,13 +98,42 @@ int WebRtcOpus_Encode(OpusEncInst* inst,
size_t length_encoded_buffer,
uint8_t* encoded) {
int res;
+ size_t i;
+ size_t c;
+
+ int16_t buffer[2 * 48 * kWebRtcOpusMaxEncodeFrameSizeMs];
if (samples > 48 * kWebRtcOpusMaxEncodeFrameSizeMs) {
return -1;
}
+ const size_t channels = inst->channels;
+ int use_buffer = 0;
+
+ // Break long consecutive zeros by forcing a "1" every |kZeroBreakCount|
+ // samples.
+ if (inst->in_dtx_mode) {
+ for (i = 0; i < samples; ++i) {
+ for (c = 0; c < channels; ++c) {
+ if (audio_in[i * channels + c] == 0) {
+ ++inst->zero_counts[c];
+ if (inst->zero_counts[c] == kZeroBreakCount) {
+ if (!use_buffer) {
+ memcpy(buffer, audio_in, samples * channels * sizeof(int16_t));
+ use_buffer = 1;
+ }
+ buffer[i * channels + c] = kZeroBreakValue;
+ inst->zero_counts[c] = 0;
+ }
+ } else {
+ inst->zero_counts[c] = 0;
+ }
+ }
+ }
+ }
+
res = opus_encode(inst->encoder,
- (const opus_int16*)audio_in,
+ use_buffer ? buffer : audio_in,
(int)samples,
encoded,
(opus_int32)length_encoded_buffer);
@@ -205,7 +248,7 @@ int16_t WebRtcOpus_SetComplexity(OpusEncInst* inst, int32_t complexity) {
}
}
-int16_t WebRtcOpus_DecoderCreate(OpusDecInst** inst, int channels) {
+int16_t WebRtcOpus_DecoderCreate(OpusDecInst** inst, size_t channels) {
int error;
OpusDecInst* state;
@@ -217,7 +260,7 @@ int16_t WebRtcOpus_DecoderCreate(OpusDecInst** inst, int channels) {
}
/* Create new memory, always at 48000 Hz. */
- state->decoder = opus_decoder_create(48000, channels, &error);
+ state->decoder = opus_decoder_create(48000, (int)channels, &error);
if (error == OPUS_OK && state->decoder != NULL) {
/* Creation of memory all ok. */
state->channels = channels;
@@ -246,7 +289,7 @@ int16_t WebRtcOpus_DecoderFree(OpusDecInst* inst) {
}
}
-int WebRtcOpus_DecoderChannels(OpusDecInst* inst) {
+size_t WebRtcOpus_DecoderChannels(OpusDecInst* inst) {
return inst->channels;
}
diff --git a/webrtc/modules/audio_coding/codecs/opus/opus_interface.h b/webrtc/modules/audio_coding/codecs/opus/opus_interface.h
new file mode 100644
index 0000000000..754b49c808
--- /dev/null
+++ b/webrtc/modules/audio_coding/codecs/opus/opus_interface.h
@@ -0,0 +1,349 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_CODECS_OPUS_OPUS_INTERFACE_H_
+#define WEBRTC_MODULES_AUDIO_CODING_CODECS_OPUS_OPUS_INTERFACE_H_
+
+#include <stddef.h>
+
+#include "webrtc/typedefs.h"
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+// Opaque wrapper types for the codec state.
+typedef struct WebRtcOpusEncInst OpusEncInst;
+typedef struct WebRtcOpusDecInst OpusDecInst;
+
+/****************************************************************************
+ * WebRtcOpus_EncoderCreate(...)
+ *
+ * This function create an Opus encoder.
+ *
+ * Input:
+ * - channels : number of channels.
+ * - application : 0 - VOIP applications.
+ * Favor speech intelligibility.
+ * 1 - Audio applications.
+ * Favor faithfulness to the original input.
+ *
+ * Output:
+ * - inst : a pointer to Encoder context that is created
+ * if success.
+ *
+ * Return value : 0 - Success
+ * -1 - Error
+ */
+int16_t WebRtcOpus_EncoderCreate(OpusEncInst** inst,
+ size_t channels,
+ int32_t application);
+
+int16_t WebRtcOpus_EncoderFree(OpusEncInst* inst);
+
+/****************************************************************************
+ * WebRtcOpus_Encode(...)
+ *
+ * This function encodes audio as a series of Opus frames and inserts
+ * it into a packet. Input buffer can be any length.
+ *
+ * Input:
+ * - inst : Encoder context
+ * - audio_in : Input speech data buffer
+ * - samples : Samples per channel in audio_in
+ * - length_encoded_buffer : Output buffer size
+ *
+ * Output:
+ * - encoded : Output compressed data buffer
+ *
+ * Return value : >=0 - Length (in bytes) of coded data
+ * -1 - Error
+ */
+int WebRtcOpus_Encode(OpusEncInst* inst,
+ const int16_t* audio_in,
+ size_t samples,
+ size_t length_encoded_buffer,
+ uint8_t* encoded);
+
+/****************************************************************************
+ * WebRtcOpus_SetBitRate(...)
+ *
+ * This function adjusts the target bitrate of the encoder.
+ *
+ * Input:
+ * - inst : Encoder context
+ * - rate : New target bitrate
+ *
+ * Return value : 0 - Success
+ * -1 - Error
+ */
+int16_t WebRtcOpus_SetBitRate(OpusEncInst* inst, int32_t rate);
+
+/****************************************************************************
+ * WebRtcOpus_SetPacketLossRate(...)
+ *
+ * This function configures the encoder's expected packet loss percentage.
+ *
+ * Input:
+ * - inst : Encoder context
+ * - loss_rate : loss percentage in the range 0-100, inclusive.
+ * Return value : 0 - Success
+ * -1 - Error
+ */
+int16_t WebRtcOpus_SetPacketLossRate(OpusEncInst* inst, int32_t loss_rate);
+
+/****************************************************************************
+ * WebRtcOpus_SetMaxPlaybackRate(...)
+ *
+ * Configures the maximum playback rate for encoding. Due to hardware
+ * limitations, the receiver may render audio up to a playback rate. Opus
+ * encoder can use this information to optimize for network usage and encoding
+ * complexity. This will affect the audio bandwidth in the coded audio. However,
+ * the input/output sample rate is not affected.
+ *
+ * Input:
+ * - inst : Encoder context
+ * - frequency_hz : Maximum playback rate in Hz.
+ * This parameter can take any value. The relation
+ * between the value and the Opus internal mode is
+ * as following:
+ * frequency_hz <= 8000 narrow band
+ * 8000 < frequency_hz <= 12000 medium band
+ * 12000 < frequency_hz <= 16000 wide band
+ * 16000 < frequency_hz <= 24000 super wide band
+ * frequency_hz > 24000 full band
+ * Return value : 0 - Success
+ * -1 - Error
+ */
+int16_t WebRtcOpus_SetMaxPlaybackRate(OpusEncInst* inst, int32_t frequency_hz);
+
+/* TODO(minyue): Check whether an API to check the FEC and the packet loss rate
+ * is needed. It might not be very useful since there are not many use cases and
+ * the caller can always maintain the states. */
+
+/****************************************************************************
+ * WebRtcOpus_EnableFec()
+ *
+ * This function enables FEC for encoding.
+ *
+ * Input:
+ * - inst : Encoder context
+ *
+ * Return value : 0 - Success
+ * -1 - Error
+ */
+int16_t WebRtcOpus_EnableFec(OpusEncInst* inst);
+
+/****************************************************************************
+ * WebRtcOpus_DisableFec()
+ *
+ * This function disables FEC for encoding.
+ *
+ * Input:
+ * - inst : Encoder context
+ *
+ * Return value : 0 - Success
+ * -1 - Error
+ */
+int16_t WebRtcOpus_DisableFec(OpusEncInst* inst);
+
+/****************************************************************************
+ * WebRtcOpus_EnableDtx()
+ *
+ * This function enables Opus internal DTX for encoding.
+ *
+ * Input:
+ * - inst : Encoder context
+ *
+ * Return value : 0 - Success
+ * -1 - Error
+ */
+int16_t WebRtcOpus_EnableDtx(OpusEncInst* inst);
+
+/****************************************************************************
+ * WebRtcOpus_DisableDtx()
+ *
+ * This function disables Opus internal DTX for encoding.
+ *
+ * Input:
+ * - inst : Encoder context
+ *
+ * Return value : 0 - Success
+ * -1 - Error
+ */
+int16_t WebRtcOpus_DisableDtx(OpusEncInst* inst);
+
+/*
+ * WebRtcOpus_SetComplexity(...)
+ *
+ * This function adjusts the computational complexity. The effect is the same as
+ * calling the complexity setting of Opus as an Opus encoder related CTL.
+ *
+ * Input:
+ * - inst : Encoder context
+ * - complexity : New target complexity (0-10, inclusive)
+ *
+ * Return value : 0 - Success
+ * -1 - Error
+ */
+int16_t WebRtcOpus_SetComplexity(OpusEncInst* inst, int32_t complexity);
+
+int16_t WebRtcOpus_DecoderCreate(OpusDecInst** inst, size_t channels);
+int16_t WebRtcOpus_DecoderFree(OpusDecInst* inst);
+
+/****************************************************************************
+ * WebRtcOpus_DecoderChannels(...)
+ *
+ * This function returns the number of channels created for Opus decoder.
+ */
+size_t WebRtcOpus_DecoderChannels(OpusDecInst* inst);
+
+/****************************************************************************
+ * WebRtcOpus_DecoderInit(...)
+ *
+ * This function resets state of the decoder.
+ *
+ * Input:
+ * - inst : Decoder context
+ */
+void WebRtcOpus_DecoderInit(OpusDecInst* inst);
+
+/****************************************************************************
+ * WebRtcOpus_Decode(...)
+ *
+ * This function decodes an Opus packet into one or more audio frames at the
+ * ACM interface's sampling rate (32 kHz).
+ *
+ * Input:
+ * - inst : Decoder context
+ * - encoded : Encoded data
+ * - encoded_bytes : Bytes in encoded vector
+ *
+ * Output:
+ * - decoded : The decoded vector
+ * - audio_type : 1 normal, 2 CNG (for Opus it should
+ * always return 1 since we're not using Opus's
+ * built-in DTX/CNG scheme)
+ *
+ * Return value : >0 - Samples per channel in decoded vector
+ * -1 - Error
+ */
+int WebRtcOpus_Decode(OpusDecInst* inst, const uint8_t* encoded,
+ size_t encoded_bytes, int16_t* decoded,
+ int16_t* audio_type);
+
+/****************************************************************************
+ * WebRtcOpus_DecodePlc(...)
+ *
+ * This function processes PLC for opus frame(s).
+ * Input:
+ * - inst : Decoder context
+ * - number_of_lost_frames : Number of PLC frames to produce
+ *
+ * Output:
+ * - decoded : The decoded vector
+ *
+ * Return value : >0 - number of samples in decoded PLC vector
+ * -1 - Error
+ */
+int WebRtcOpus_DecodePlc(OpusDecInst* inst, int16_t* decoded,
+ int number_of_lost_frames);
+
+/****************************************************************************
+ * WebRtcOpus_DecodeFec(...)
+ *
+ * This function decodes the FEC data from an Opus packet into one or more audio
+ * frames at the ACM interface's sampling rate (32 kHz).
+ *
+ * Input:
+ * - inst : Decoder context
+ * - encoded : Encoded data
+ * - encoded_bytes : Bytes in encoded vector
+ *
+ * Output:
+ * - decoded : The decoded vector (previous frame)
+ *
+ * Return value : >0 - Samples per channel in decoded vector
+ * 0 - No FEC data in the packet
+ * -1 - Error
+ */
+int WebRtcOpus_DecodeFec(OpusDecInst* inst, const uint8_t* encoded,
+ size_t encoded_bytes, int16_t* decoded,
+ int16_t* audio_type);
+
+/****************************************************************************
+ * WebRtcOpus_DurationEst(...)
+ *
+ * This function calculates the duration of an opus packet.
+ * Input:
+ * - inst : Decoder context
+ * - payload : Encoded data pointer
+ * - payload_length_bytes : Bytes of encoded data
+ *
+ * Return value : The duration of the packet, in samples per
+ * channel.
+ */
+int WebRtcOpus_DurationEst(OpusDecInst* inst,
+ const uint8_t* payload,
+ size_t payload_length_bytes);
+
+/****************************************************************************
+ * WebRtcOpus_PlcDuration(...)
+ *
+ * This function calculates the duration of a frame returned by packet loss
+ * concealment (PLC).
+ *
+ * Input:
+ * - inst : Decoder context
+ *
+ * Return value : The duration of a frame returned by PLC, in
+ * samples per channel.
+ */
+int WebRtcOpus_PlcDuration(OpusDecInst* inst);
+
+/* TODO(minyue): Check whether it is needed to add a decoder context to the
+ * arguments, like WebRtcOpus_DurationEst(...). In fact, the packet itself tells
+ * the duration. The decoder context in WebRtcOpus_DurationEst(...) is not used.
+ * So it may be advisable to remove it from WebRtcOpus_DurationEst(...). */
+
+/****************************************************************************
+ * WebRtcOpus_FecDurationEst(...)
+ *
+ * This function calculates the duration of the FEC data within an opus packet.
+ * Input:
+ * - payload : Encoded data pointer
+ * - payload_length_bytes : Bytes of encoded data
+ *
+ * Return value : >0 - The duration of the FEC data in the
+ * packet in samples per channel.
+ * 0 - No FEC data in the packet.
+ */
+int WebRtcOpus_FecDurationEst(const uint8_t* payload,
+ size_t payload_length_bytes);
+
+/****************************************************************************
+ * WebRtcOpus_PacketHasFec(...)
+ *
+ * This function detects if an opus packet has FEC.
+ * Input:
+ * - payload : Encoded data pointer
+ * - payload_length_bytes : Bytes of encoded data
+ *
+ * Return value : 0 - the packet does NOT contain FEC.
+ * 1 - the packet contains FEC.
+ */
+int WebRtcOpus_PacketHasFec(const uint8_t* payload,
+ size_t payload_length_bytes);
+
+#ifdef __cplusplus
+} // extern "C"
+#endif
+
+#endif // WEBRTC_MODULES_AUDIO_CODING_CODECS_OPUS_OPUS_INTERFACE_H_
diff --git a/webrtc/modules/audio_coding/codecs/opus/opus_speed_test.cc b/webrtc/modules/audio_coding/codecs/opus/opus_speed_test.cc
index 29def14bf8..4d1aa42c89 100644
--- a/webrtc/modules/audio_coding/codecs/opus/opus_speed_test.cc
+++ b/webrtc/modules/audio_coding/codecs/opus/opus_speed_test.cc
@@ -8,7 +8,7 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-#include "webrtc/modules/audio_coding/codecs/opus/include/opus_interface.h"
+#include "webrtc/modules/audio_coding/codecs/opus/opus_interface.h"
#include "webrtc/modules/audio_coding/codecs/tools/audio_codec_speed_test.h"
using ::std::string;
@@ -77,7 +77,7 @@ float OpusSpeedTest::DecodeABlock(const uint8_t* bit_stream,
value = WebRtcOpus_Decode(opus_decoder_, bit_stream, encoded_bytes, out_data,
&audio_type);
clocks = clock() - clocks;
- EXPECT_EQ(output_length_sample_, value);
+ EXPECT_EQ(output_length_sample_, static_cast<size_t>(value));
return 1000.0 * clocks / CLOCKS_PER_SEC;
}
diff --git a/webrtc/modules/audio_coding/codecs/opus/opus_unittest.cc b/webrtc/modules/audio_coding/codecs/opus/opus_unittest.cc
index 4630e44807..c82b184b38 100644
--- a/webrtc/modules/audio_coding/codecs/opus/opus_unittest.cc
+++ b/webrtc/modules/audio_coding/codecs/opus/opus_unittest.cc
@@ -10,7 +10,8 @@
#include <string>
#include "testing/gtest/include/gtest/gtest.h"
-#include "webrtc/modules/audio_coding/codecs/opus/include/opus_interface.h"
+#include "webrtc/base/checks.h"
+#include "webrtc/modules/audio_coding/codecs/opus/opus_interface.h"
#include "webrtc/modules/audio_coding/codecs/opus/opus_inst.h"
#include "webrtc/modules/audio_coding/neteq/tools/audio_loop.h"
#include "webrtc/test/testsupport/fileutils.h"
@@ -35,17 +36,18 @@ class OpusTest : public TestWithParam<::testing::tuple<int, int>> {
protected:
OpusTest();
- void TestDtxEffect(bool dtx);
+ void TestDtxEffect(bool dtx, int block_length_ms);
// Prepare |speech_data_| for encoding, read from a hard-coded file.
// After preparation, |speech_data_.GetNextBlock()| returns a pointer to a
// block of |block_length_ms| milliseconds. The data is looped every
// |loop_length_ms| milliseconds.
- void PrepareSpeechData(int channel, int block_length_ms, int loop_length_ms);
+ void PrepareSpeechData(size_t channel,
+ int block_length_ms,
+ int loop_length_ms);
int EncodeDecode(WebRtcOpusEncInst* encoder,
- const int16_t* input_audio,
- size_t input_samples,
+ rtc::ArrayView<const int16_t> input_audio,
WebRtcOpusDecInst* decoder,
int16_t* output_audio,
int16_t* audio_type);
@@ -53,13 +55,16 @@ class OpusTest : public TestWithParam<::testing::tuple<int, int>> {
void SetMaxPlaybackRate(WebRtcOpusEncInst* encoder,
opus_int32 expect, int32_t set);
+ void CheckAudioBounded(const int16_t* audio, size_t samples, size_t channels,
+ uint16_t bound) const;
+
WebRtcOpusEncInst* opus_encoder_;
WebRtcOpusDecInst* opus_decoder_;
AudioLoop speech_data_;
uint8_t bitstream_[kMaxBytes];
size_t encoded_bytes_;
- int channels_;
+ size_t channels_;
int application_;
};
@@ -67,11 +72,11 @@ OpusTest::OpusTest()
: opus_encoder_(NULL),
opus_decoder_(NULL),
encoded_bytes_(0),
- channels_(::testing::get<0>(GetParam())),
+ channels_(static_cast<size_t>(::testing::get<0>(GetParam()))),
application_(::testing::get<1>(GetParam())) {
}
-void OpusTest::PrepareSpeechData(int channel, int block_length_ms,
+void OpusTest::PrepareSpeechData(size_t channel, int block_length_ms,
int loop_length_ms) {
const std::string file_name =
webrtc::test::ResourcePath((channel == 1) ?
@@ -95,14 +100,25 @@ void OpusTest::SetMaxPlaybackRate(WebRtcOpusEncInst* encoder,
EXPECT_EQ(expect, bandwidth);
}
+void OpusTest::CheckAudioBounded(const int16_t* audio, size_t samples,
+ size_t channels, uint16_t bound) const {
+ for (size_t i = 0; i < samples; ++i) {
+ for (size_t c = 0; c < channels; ++c) {
+ ASSERT_GE(audio[i * channels + c], -bound);
+ ASSERT_LE(audio[i * channels + c], bound);
+ }
+ }
+}
+
int OpusTest::EncodeDecode(WebRtcOpusEncInst* encoder,
- const int16_t* input_audio,
- size_t input_samples,
+ rtc::ArrayView<const int16_t> input_audio,
WebRtcOpusDecInst* decoder,
int16_t* output_audio,
int16_t* audio_type) {
- int encoded_bytes_int = WebRtcOpus_Encode(encoder, input_audio, input_samples,
- kMaxBytes, bitstream_);
+ int encoded_bytes_int = WebRtcOpus_Encode(
+ encoder, input_audio.data(),
+ rtc::CheckedDivExact(input_audio.size(), channels_),
+ kMaxBytes, bitstream_);
EXPECT_GE(encoded_bytes_int, 0);
encoded_bytes_ = static_cast<size_t>(encoded_bytes_int);
int est_len = WebRtcOpus_DurationEst(decoder, bitstream_, encoded_bytes_);
@@ -115,8 +131,9 @@ int OpusTest::EncodeDecode(WebRtcOpusEncInst* encoder,
// Test if encoder/decoder can enter DTX mode properly and do not enter DTX when
// they should not. This test is signal dependent.
-void OpusTest::TestDtxEffect(bool dtx) {
- PrepareSpeechData(channels_, 20, 2000);
+void OpusTest::TestDtxEffect(bool dtx, int block_length_ms) {
+ PrepareSpeechData(channels_, block_length_ms, 2000);
+ const size_t samples = kOpusRateKhz * block_length_ms;
// Create encoder memory.
EXPECT_EQ(0, WebRtcOpus_EncoderCreate(&opus_encoder_,
@@ -129,22 +146,20 @@ void OpusTest::TestDtxEffect(bool dtx) {
channels_ == 1 ? 32000 : 64000));
// Set input audio as silence.
- int16_t* silence = new int16_t[kOpus20msFrameSamples * channels_];
- memset(silence, 0, sizeof(int16_t) * kOpus20msFrameSamples * channels_);
+ std::vector<int16_t> silence(samples * channels_, 0);
// Setting DTX.
EXPECT_EQ(0, dtx ? WebRtcOpus_EnableDtx(opus_encoder_) :
WebRtcOpus_DisableDtx(opus_encoder_));
int16_t audio_type;
- int16_t* output_data_decode = new int16_t[kOpus20msFrameSamples * channels_];
+ int16_t* output_data_decode = new int16_t[samples * channels_];
for (int i = 0; i < 100; ++i) {
- EXPECT_EQ(kOpus20msFrameSamples,
+ EXPECT_EQ(samples,
static_cast<size_t>(EncodeDecode(
- opus_encoder_, speech_data_.GetNextBlock(),
- kOpus20msFrameSamples, opus_decoder_, output_data_decode,
- &audio_type)));
+ opus_encoder_, speech_data_.GetNextBlock(), opus_decoder_,
+ output_data_decode, &audio_type)));
// If not DTX, it should never enter DTX mode. If DTX, we do not care since
// whether it enters DTX depends on the signal type.
if (!dtx) {
@@ -158,10 +173,10 @@ void OpusTest::TestDtxEffect(bool dtx) {
// We input some silent segments. In DTX mode, the encoder will stop sending.
// However, DTX may happen after a while.
for (int i = 0; i < 30; ++i) {
- EXPECT_EQ(kOpus20msFrameSamples,
+ EXPECT_EQ(samples,
static_cast<size_t>(EncodeDecode(
- opus_encoder_, silence, kOpus20msFrameSamples, opus_decoder_,
- output_data_decode, &audio_type)));
+ opus_encoder_, silence, opus_decoder_, output_data_decode,
+ &audio_type)));
if (!dtx) {
EXPECT_GT(encoded_bytes_, 1U);
EXPECT_EQ(0, opus_encoder_->in_dtx_mode);
@@ -177,21 +192,47 @@ void OpusTest::TestDtxEffect(bool dtx) {
// When Opus is in DTX, it wakes up in a regular basis. It sends two packets,
// one with an arbitrary size and the other of 1-byte, then stops sending for
- // 19 frames.
- const int cycles = 5;
- for (int j = 0; j < cycles; ++j) {
- // DTX mode is maintained 19 frames.
- for (int i = 0; i < 19; ++i) {
- EXPECT_EQ(kOpus20msFrameSamples,
+ // a certain number of frames.
+
+ // |max_dtx_frames| is the maximum number of frames Opus can stay in DTX.
+ const int max_dtx_frames = 400 / block_length_ms + 1;
+
+ // We run |kRunTimeMs| milliseconds of pure silence.
+ const int kRunTimeMs = 2000;
+
+ // We check that, after a |kCheckTimeMs| milliseconds (given that the CNG in
+ // Opus needs time to adapt), the absolute values of DTX decoded signal are
+ // bounded by |kOutputValueBound|.
+ const int kCheckTimeMs = 1500;
+
+#if defined(OPUS_FIXED_POINT)
+ const uint16_t kOutputValueBound = 20;
+#else
+ const uint16_t kOutputValueBound = 2;
+#endif
+
+ int time = 0;
+ while (time < kRunTimeMs) {
+ // DTX mode is maintained for maximum |max_dtx_frames| frames.
+ int i = 0;
+ for (; i < max_dtx_frames; ++i) {
+ time += block_length_ms;
+ EXPECT_EQ(samples,
static_cast<size_t>(EncodeDecode(
- opus_encoder_, silence, kOpus20msFrameSamples,
- opus_decoder_, output_data_decode, &audio_type)));
+ opus_encoder_, silence, opus_decoder_, output_data_decode,
+ &audio_type)));
if (dtx) {
+ if (encoded_bytes_ > 1)
+ break;
EXPECT_EQ(0U, encoded_bytes_) // Send 0 byte.
<< "Opus should have entered DTX mode.";
EXPECT_EQ(1, opus_encoder_->in_dtx_mode);
EXPECT_EQ(1, opus_decoder_->in_dtx_mode);
EXPECT_EQ(2, audio_type); // Comfort noise.
+ if (time >= kCheckTimeMs) {
+ CheckAudioBounded(output_data_decode, samples, channels_,
+ kOutputValueBound);
+ }
} else {
EXPECT_GT(encoded_bytes_, 1U);
EXPECT_EQ(0, opus_encoder_->in_dtx_mode);
@@ -200,27 +241,31 @@ void OpusTest::TestDtxEffect(bool dtx) {
}
}
- // Quit DTX after 19 frames.
- EXPECT_EQ(kOpus20msFrameSamples,
- static_cast<size_t>(EncodeDecode(
- opus_encoder_, silence, kOpus20msFrameSamples, opus_decoder_,
- output_data_decode, &audio_type)));
+ if (dtx) {
+ // With DTX, Opus must stop transmission for some time.
+ EXPECT_GT(i, 1);
+ }
- EXPECT_GT(encoded_bytes_, 1U);
+ // We expect a normal payload.
EXPECT_EQ(0, opus_encoder_->in_dtx_mode);
EXPECT_EQ(0, opus_decoder_->in_dtx_mode);
EXPECT_EQ(0, audio_type); // Speech.
// Enters DTX again immediately.
- EXPECT_EQ(kOpus20msFrameSamples,
+ time += block_length_ms;
+ EXPECT_EQ(samples,
static_cast<size_t>(EncodeDecode(
- opus_encoder_, silence, kOpus20msFrameSamples, opus_decoder_,
- output_data_decode, &audio_type)));
+ opus_encoder_, silence, opus_decoder_, output_data_decode,
+ &audio_type)));
if (dtx) {
EXPECT_EQ(1U, encoded_bytes_); // Send 1 byte.
EXPECT_EQ(1, opus_encoder_->in_dtx_mode);
EXPECT_EQ(1, opus_decoder_->in_dtx_mode);
EXPECT_EQ(2, audio_type); // Comfort noise.
+ if (time >= kCheckTimeMs) {
+ CheckAudioBounded(output_data_decode, samples, channels_,
+ kOutputValueBound);
+ }
} else {
EXPECT_GT(encoded_bytes_, 1U);
EXPECT_EQ(0, opus_encoder_->in_dtx_mode);
@@ -232,10 +277,10 @@ void OpusTest::TestDtxEffect(bool dtx) {
silence[0] = 10000;
if (dtx) {
// Verify that encoder/decoder can jump out from DTX mode.
- EXPECT_EQ(kOpus20msFrameSamples,
+ EXPECT_EQ(samples,
static_cast<size_t>(EncodeDecode(
- opus_encoder_, silence, kOpus20msFrameSamples, opus_decoder_,
- output_data_decode, &audio_type)));
+ opus_encoder_, silence, opus_decoder_, output_data_decode,
+ &audio_type)));
EXPECT_GT(encoded_bytes_, 1U);
EXPECT_EQ(0, opus_encoder_->in_dtx_mode);
EXPECT_EQ(0, opus_decoder_->in_dtx_mode);
@@ -244,7 +289,6 @@ void OpusTest::TestDtxEffect(bool dtx) {
// Free memory.
delete[] output_data_decode;
- delete[] silence;
EXPECT_EQ(0, WebRtcOpus_EncoderFree(opus_encoder_));
EXPECT_EQ(0, WebRtcOpus_DecoderFree(opus_decoder_));
}
@@ -314,10 +358,9 @@ TEST_P(OpusTest, OpusEncodeDecode) {
int16_t audio_type;
int16_t* output_data_decode = new int16_t[kOpus20msFrameSamples * channels_];
EXPECT_EQ(kOpus20msFrameSamples,
- static_cast<size_t>(EncodeDecode(
- opus_encoder_, speech_data_.GetNextBlock(),
- kOpus20msFrameSamples, opus_decoder_, output_data_decode,
- &audio_type)));
+ static_cast<size_t>(
+ EncodeDecode(opus_encoder_, speech_data_.GetNextBlock(),
+ opus_decoder_, output_data_decode, &audio_type)));
// Free memory.
delete[] output_data_decode;
@@ -374,10 +417,9 @@ TEST_P(OpusTest, OpusDecodeInit) {
int16_t audio_type;
int16_t* output_data_decode = new int16_t[kOpus20msFrameSamples * channels_];
EXPECT_EQ(kOpus20msFrameSamples,
- static_cast<size_t>(EncodeDecode(
- opus_encoder_, speech_data_.GetNextBlock(),
- kOpus20msFrameSamples, opus_decoder_, output_data_decode,
- &audio_type)));
+ static_cast<size_t>(
+ EncodeDecode(opus_encoder_, speech_data_.GetNextBlock(),
+ opus_decoder_, output_data_decode, &audio_type)));
WebRtcOpus_DecoderInit(opus_decoder_);
@@ -444,11 +486,15 @@ TEST_P(OpusTest, OpusEnableDisableDtx) {
}
TEST_P(OpusTest, OpusDtxOff) {
- TestDtxEffect(false);
+ TestDtxEffect(false, 10);
+ TestDtxEffect(false, 20);
+ TestDtxEffect(false, 40);
}
TEST_P(OpusTest, OpusDtxOn) {
- TestDtxEffect(true);
+ TestDtxEffect(true, 10);
+ TestDtxEffect(true, 20);
+ TestDtxEffect(true, 40);
}
TEST_P(OpusTest, OpusSetPacketLossRate) {
@@ -513,10 +559,9 @@ TEST_P(OpusTest, OpusDecodePlc) {
int16_t audio_type;
int16_t* output_data_decode = new int16_t[kOpus20msFrameSamples * channels_];
EXPECT_EQ(kOpus20msFrameSamples,
- static_cast<size_t>(EncodeDecode(
- opus_encoder_, speech_data_.GetNextBlock(),
- kOpus20msFrameSamples, opus_decoder_, output_data_decode,
- &audio_type)));
+ static_cast<size_t>(
+ EncodeDecode(opus_encoder_, speech_data_.GetNextBlock(),
+ opus_decoder_, output_data_decode, &audio_type)));
// Call decoder PLC.
int16_t* plc_buffer = new int16_t[kOpus20msFrameSamples * channels_];
@@ -542,10 +587,11 @@ TEST_P(OpusTest, OpusDurationEstimation) {
EXPECT_EQ(0, WebRtcOpus_DecoderCreate(&opus_decoder_, channels_));
// 10 ms. We use only first 10 ms of a 20 ms block.
- int encoded_bytes_int = WebRtcOpus_Encode(opus_encoder_,
- speech_data_.GetNextBlock(),
- kOpus10msFrameSamples,
- kMaxBytes, bitstream_);
+ auto speech_block = speech_data_.GetNextBlock();
+ int encoded_bytes_int = WebRtcOpus_Encode(
+ opus_encoder_, speech_block.data(),
+ rtc::CheckedDivExact(speech_block.size(), 2 * channels_),
+ kMaxBytes, bitstream_);
EXPECT_GE(encoded_bytes_int, 0);
EXPECT_EQ(kOpus10msFrameSamples,
static_cast<size_t>(WebRtcOpus_DurationEst(
@@ -553,10 +599,11 @@ TEST_P(OpusTest, OpusDurationEstimation) {
static_cast<size_t>(encoded_bytes_int))));
// 20 ms
- encoded_bytes_int = WebRtcOpus_Encode(opus_encoder_,
- speech_data_.GetNextBlock(),
- kOpus20msFrameSamples,
- kMaxBytes, bitstream_);
+ speech_block = speech_data_.GetNextBlock();
+ encoded_bytes_int = WebRtcOpus_Encode(
+ opus_encoder_, speech_block.data(),
+ rtc::CheckedDivExact(speech_block.size(), channels_),
+ kMaxBytes, bitstream_);
EXPECT_GE(encoded_bytes_int, 0);
EXPECT_EQ(kOpus20msFrameSamples,
static_cast<size_t>(WebRtcOpus_DurationEst(
@@ -594,10 +641,11 @@ TEST_P(OpusTest, OpusDecodeRepacketized) {
OpusRepacketizer* rp = opus_repacketizer_create();
for (int idx = 0; idx < kPackets; idx++) {
- encoded_bytes_ = WebRtcOpus_Encode(opus_encoder_,
- speech_data_.GetNextBlock(),
- kOpus20msFrameSamples, kMaxBytes,
- bitstream_);
+ auto speech_block = speech_data_.GetNextBlock();
+ encoded_bytes_ =
+ WebRtcOpus_Encode(opus_encoder_, speech_block.data(),
+ rtc::CheckedDivExact(speech_block.size(), channels_),
+ kMaxBytes, bitstream_);
EXPECT_EQ(OPUS_OK, opus_repacketizer_cat(rp, bitstream_, encoded_bytes_));
}
diff --git a/webrtc/modules/audio_coding/codecs/pcm16b/audio_decoder_pcm16b.cc b/webrtc/modules/audio_coding/codecs/pcm16b/audio_decoder_pcm16b.cc
index 7d07b23a3c..834c070073 100644
--- a/webrtc/modules/audio_coding/codecs/pcm16b/audio_decoder_pcm16b.cc
+++ b/webrtc/modules/audio_coding/codecs/pcm16b/audio_decoder_pcm16b.cc
@@ -8,10 +8,10 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-#include "webrtc/modules/audio_coding/codecs/pcm16b/include/audio_decoder_pcm16b.h"
+#include "webrtc/modules/audio_coding/codecs/pcm16b/audio_decoder_pcm16b.h"
#include "webrtc/base/checks.h"
-#include "webrtc/modules/audio_coding/codecs/pcm16b/include/pcm16b.h"
+#include "webrtc/modules/audio_coding/codecs/pcm16b/pcm16b.h"
namespace webrtc {
diff --git a/webrtc/modules/audio_coding/codecs/pcm16b/audio_decoder_pcm16b.h b/webrtc/modules/audio_coding/codecs/pcm16b/audio_decoder_pcm16b.h
new file mode 100644
index 0000000000..692cb94282
--- /dev/null
+++ b/webrtc/modules/audio_coding/codecs/pcm16b/audio_decoder_pcm16b.h
@@ -0,0 +1,40 @@
+/*
+ * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_CODECS_PCM16B_AUDIO_DECODER_PCM16B_H_
+#define WEBRTC_MODULES_AUDIO_CODING_CODECS_PCM16B_AUDIO_DECODER_PCM16B_H_
+
+#include "webrtc/base/constructormagic.h"
+#include "webrtc/modules/audio_coding/codecs/audio_decoder.h"
+
+namespace webrtc {
+
+class AudioDecoderPcm16B final : public AudioDecoder {
+ public:
+ explicit AudioDecoderPcm16B(size_t num_channels);
+ void Reset() override;
+ int PacketDuration(const uint8_t* encoded, size_t encoded_len) const override;
+ size_t Channels() const override;
+
+ protected:
+ int DecodeInternal(const uint8_t* encoded,
+ size_t encoded_len,
+ int sample_rate_hz,
+ int16_t* decoded,
+ SpeechType* speech_type) override;
+
+ private:
+ const size_t num_channels_;
+ RTC_DISALLOW_COPY_AND_ASSIGN(AudioDecoderPcm16B);
+};
+
+} // namespace webrtc
+
+#endif // WEBRTC_MODULES_AUDIO_CODING_CODECS_PCM16B_AUDIO_DECODER_PCM16B_H_
diff --git a/webrtc/modules/audio_coding/codecs/pcm16b/audio_encoder_pcm16b.cc b/webrtc/modules/audio_coding/codecs/pcm16b/audio_encoder_pcm16b.cc
index 6c30c7ff62..f4d4022302 100644
--- a/webrtc/modules/audio_coding/codecs/pcm16b/audio_encoder_pcm16b.cc
+++ b/webrtc/modules/audio_coding/codecs/pcm16b/audio_encoder_pcm16b.cc
@@ -8,11 +8,11 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-#include "webrtc/modules/audio_coding/codecs/pcm16b/include/audio_encoder_pcm16b.h"
+#include "webrtc/modules/audio_coding/codecs/pcm16b/audio_encoder_pcm16b.h"
#include "webrtc/base/checks.h"
#include "webrtc/common_types.h"
-#include "webrtc/modules/audio_coding/codecs/pcm16b/include/pcm16b.h"
+#include "webrtc/modules/audio_coding/codecs/pcm16b/pcm16b.h"
namespace webrtc {
@@ -22,7 +22,7 @@ size_t AudioEncoderPcm16B::EncodeCall(const int16_t* audio,
return WebRtcPcm16b_Encode(audio, input_len, encoded);
}
-int AudioEncoderPcm16B::BytesPerSample() const {
+size_t AudioEncoderPcm16B::BytesPerSample() const {
return 2;
}
diff --git a/webrtc/modules/audio_coding/codecs/pcm16b/audio_encoder_pcm16b.h b/webrtc/modules/audio_coding/codecs/pcm16b/audio_encoder_pcm16b.h
new file mode 100644
index 0000000000..68ca2da77e
--- /dev/null
+++ b/webrtc/modules/audio_coding/codecs/pcm16b/audio_encoder_pcm16b.h
@@ -0,0 +1,48 @@
+/*
+ * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_CODECS_PCM16B_AUDIO_ENCODER_PCM16B_H_
+#define WEBRTC_MODULES_AUDIO_CODING_CODECS_PCM16B_AUDIO_ENCODER_PCM16B_H_
+
+#include "webrtc/base/scoped_ptr.h"
+#include "webrtc/modules/audio_coding/codecs/g711/audio_encoder_pcm.h"
+
+namespace webrtc {
+
+struct CodecInst;
+
+class AudioEncoderPcm16B final : public AudioEncoderPcm {
+ public:
+ struct Config : public AudioEncoderPcm::Config {
+ public:
+ Config() : AudioEncoderPcm::Config(107), sample_rate_hz(8000) {}
+ bool IsOk() const;
+
+ int sample_rate_hz;
+ };
+
+ explicit AudioEncoderPcm16B(const Config& config)
+ : AudioEncoderPcm(config, config.sample_rate_hz) {}
+ explicit AudioEncoderPcm16B(const CodecInst& codec_inst);
+
+ protected:
+ size_t EncodeCall(const int16_t* audio,
+ size_t input_len,
+ uint8_t* encoded) override;
+
+ size_t BytesPerSample() const override;
+
+ private:
+ RTC_DISALLOW_COPY_AND_ASSIGN(AudioEncoderPcm16B);
+};
+
+} // namespace webrtc
+
+#endif // WEBRTC_MODULES_AUDIO_CODING_CODECS_PCM16B_AUDIO_ENCODER_PCM16B_H_
diff --git a/webrtc/modules/audio_coding/codecs/pcm16b/include/audio_decoder_pcm16b.h b/webrtc/modules/audio_coding/codecs/pcm16b/include/audio_decoder_pcm16b.h
deleted file mode 100644
index 96131c4d21..0000000000
--- a/webrtc/modules/audio_coding/codecs/pcm16b/include/audio_decoder_pcm16b.h
+++ /dev/null
@@ -1,40 +0,0 @@
-/*
- * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_MODULES_AUDIO_CODING_CODECS_PCM16B_INCLUDE_AUDIO_DECODER_PCM16B_H_
-#define WEBRTC_MODULES_AUDIO_CODING_CODECS_PCM16B_INCLUDE_AUDIO_DECODER_PCM16B_H_
-
-#include "webrtc/base/constructormagic.h"
-#include "webrtc/modules/audio_coding/codecs/audio_decoder.h"
-
-namespace webrtc {
-
-class AudioDecoderPcm16B final : public AudioDecoder {
- public:
- explicit AudioDecoderPcm16B(size_t num_channels);
- void Reset() override;
- int PacketDuration(const uint8_t* encoded, size_t encoded_len) const override;
- size_t Channels() const override;
-
- protected:
- int DecodeInternal(const uint8_t* encoded,
- size_t encoded_len,
- int sample_rate_hz,
- int16_t* decoded,
- SpeechType* speech_type) override;
-
- private:
- const size_t num_channels_;
- RTC_DISALLOW_COPY_AND_ASSIGN(AudioDecoderPcm16B);
-};
-
-} // namespace webrtc
-
-#endif // WEBRTC_MODULES_AUDIO_CODING_CODECS_PCM16B_INCLUDE_AUDIO_DECODER_PCM16B_H_
diff --git a/webrtc/modules/audio_coding/codecs/pcm16b/include/audio_encoder_pcm16b.h b/webrtc/modules/audio_coding/codecs/pcm16b/include/audio_encoder_pcm16b.h
deleted file mode 100644
index e03da213df..0000000000
--- a/webrtc/modules/audio_coding/codecs/pcm16b/include/audio_encoder_pcm16b.h
+++ /dev/null
@@ -1,48 +0,0 @@
-/*
- * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_MODULES_AUDIO_CODING_CODECS_PCM16B_INCLUDE_AUDIO_ENCODER_PCM16B_H_
-#define WEBRTC_MODULES_AUDIO_CODING_CODECS_PCM16B_INCLUDE_AUDIO_ENCODER_PCM16B_H_
-
-#include "webrtc/base/scoped_ptr.h"
-#include "webrtc/modules/audio_coding/codecs/g711/include/audio_encoder_pcm.h"
-
-namespace webrtc {
-
-struct CodecInst;
-
-class AudioEncoderPcm16B final : public AudioEncoderPcm {
- public:
- struct Config : public AudioEncoderPcm::Config {
- public:
- Config() : AudioEncoderPcm::Config(107), sample_rate_hz(8000) {}
- bool IsOk() const;
-
- int sample_rate_hz;
- };
-
- explicit AudioEncoderPcm16B(const Config& config)
- : AudioEncoderPcm(config, config.sample_rate_hz) {}
- explicit AudioEncoderPcm16B(const CodecInst& codec_inst);
-
- protected:
- size_t EncodeCall(const int16_t* audio,
- size_t input_len,
- uint8_t* encoded) override;
-
- int BytesPerSample() const override;
-
-private:
- RTC_DISALLOW_COPY_AND_ASSIGN(AudioEncoderPcm16B);
-};
-
-} // namespace webrtc
-
-#endif // WEBRTC_MODULES_AUDIO_CODING_CODECS_PCM16B_INCLUDE_AUDIO_ENCODER_PCM16B_H_
diff --git a/webrtc/modules/audio_coding/codecs/pcm16b/include/pcm16b.h b/webrtc/modules/audio_coding/codecs/pcm16b/include/pcm16b.h
deleted file mode 100644
index d86a65db49..0000000000
--- a/webrtc/modules/audio_coding/codecs/pcm16b/include/pcm16b.h
+++ /dev/null
@@ -1,68 +0,0 @@
-/*
- * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_MODULES_AUDIO_CODING_CODECS_PCM16B_MAIN_INCLUDE_PCM16B_H_
-#define WEBRTC_MODULES_AUDIO_CODING_CODECS_PCM16B_MAIN_INCLUDE_PCM16B_H_
-/*
- * Define the fixpoint numeric formats
- */
-
-#include <stddef.h>
-
-#include "webrtc/typedefs.h"
-
-#ifdef __cplusplus
-extern "C" {
-#endif
-
-/****************************************************************************
- * WebRtcPcm16b_Encode(...)
- *
- * "Encode" a sample vector to 16 bit linear (Encoded standard is big endian)
- *
- * Input:
- * - speech : Input speech vector
- * - len : Number of samples in speech vector
- *
- * Output:
- * - encoded : Encoded data vector (big endian 16 bit)
- *
- * Returned value : Length (in bytes) of coded data.
- * Always equal to twice the len input parameter.
- */
-
-size_t WebRtcPcm16b_Encode(const int16_t* speech,
- size_t len,
- uint8_t* encoded);
-
-/****************************************************************************
- * WebRtcPcm16b_Decode(...)
- *
- * "Decode" a vector to 16 bit linear (Encoded standard is big endian)
- *
- * Input:
- * - encoded : Encoded data vector (big endian 16 bit)
- * - len : Number of bytes in encoded
- *
- * Output:
- * - speech : Decoded speech vector
- *
- * Returned value : Samples in speech
- */
-
-size_t WebRtcPcm16b_Decode(const uint8_t* encoded,
- size_t len,
- int16_t* speech);
-
-#ifdef __cplusplus
-}
-#endif
-
-#endif /* PCM16B */
diff --git a/webrtc/modules/audio_coding/codecs/pcm16b/pcm16b.gypi b/webrtc/modules/audio_coding/codecs/pcm16b/pcm16b.gypi
index 3dc2f772c1..d0dd21bb60 100644
--- a/webrtc/modules/audio_coding/codecs/pcm16b/pcm16b.gypi
+++ b/webrtc/modules/audio_coding/codecs/pcm16b/pcm16b.gypi
@@ -15,23 +15,13 @@
'audio_encoder_interface',
'g711',
],
- 'include_dirs': [
- 'include',
- '<(webrtc_root)',
- ],
- 'direct_dependent_settings': {
- 'include_dirs': [
- 'include',
- '<(webrtc_root)',
- ],
- },
'sources': [
- 'include/audio_decoder_pcm16b.h',
- 'include/audio_encoder_pcm16b.h',
- 'include/pcm16b.h',
'audio_decoder_pcm16b.cc',
+ 'audio_decoder_pcm16b.h',
'audio_encoder_pcm16b.cc',
+ 'audio_encoder_pcm16b.h',
'pcm16b.c',
+ 'pcm16b.h',
],
},
], # targets
diff --git a/webrtc/modules/audio_coding/codecs/pcm16b/pcm16b.h b/webrtc/modules/audio_coding/codecs/pcm16b/pcm16b.h
new file mode 100644
index 0000000000..f96e741c46
--- /dev/null
+++ b/webrtc/modules/audio_coding/codecs/pcm16b/pcm16b.h
@@ -0,0 +1,68 @@
+/*
+ * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_CODECS_PCM16B_PCM16B_H_
+#define WEBRTC_MODULES_AUDIO_CODING_CODECS_PCM16B_PCM16B_H_
+/*
+ * Define the fixpoint numeric formats
+ */
+
+#include <stddef.h>
+
+#include "webrtc/typedefs.h"
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+/****************************************************************************
+ * WebRtcPcm16b_Encode(...)
+ *
+ * "Encode" a sample vector to 16 bit linear (Encoded standard is big endian)
+ *
+ * Input:
+ * - speech : Input speech vector
+ * - len : Number of samples in speech vector
+ *
+ * Output:
+ * - encoded : Encoded data vector (big endian 16 bit)
+ *
+ * Returned value : Length (in bytes) of coded data.
+ * Always equal to twice the len input parameter.
+ */
+
+size_t WebRtcPcm16b_Encode(const int16_t* speech,
+ size_t len,
+ uint8_t* encoded);
+
+/****************************************************************************
+ * WebRtcPcm16b_Decode(...)
+ *
+ * "Decode" a vector to 16 bit linear (Encoded standard is big endian)
+ *
+ * Input:
+ * - encoded : Encoded data vector (big endian 16 bit)
+ * - len : Number of bytes in encoded
+ *
+ * Output:
+ * - speech : Decoded speech vector
+ *
+ * Returned value : Samples in speech
+ */
+
+size_t WebRtcPcm16b_Decode(const uint8_t* encoded,
+ size_t len,
+ int16_t* speech);
+
+#ifdef __cplusplus
+}
+#endif
+
+#endif /* WEBRTC_MODULES_AUDIO_CODING_CODECS_PCM16B_PCM16B_H_ */
diff --git a/webrtc/modules/audio_coding/codecs/red/audio_encoder_copy_red.cc b/webrtc/modules/audio_coding/codecs/red/audio_encoder_copy_red.cc
index a19d194e59..7ef1ce096b 100644
--- a/webrtc/modules/audio_coding/codecs/red/audio_encoder_copy_red.cc
+++ b/webrtc/modules/audio_coding/codecs/red/audio_encoder_copy_red.cc
@@ -32,7 +32,7 @@ int AudioEncoderCopyRed::SampleRateHz() const {
return speech_encoder_->SampleRateHz();
}
-int AudioEncoderCopyRed::NumChannels() const {
+size_t AudioEncoderCopyRed::NumChannels() const {
return speech_encoder_->NumChannels();
}
@@ -54,12 +54,11 @@ int AudioEncoderCopyRed::GetTargetBitrate() const {
AudioEncoder::EncodedInfo AudioEncoderCopyRed::EncodeInternal(
uint32_t rtp_timestamp,
- const int16_t* audio,
+ rtc::ArrayView<const int16_t> audio,
size_t max_encoded_bytes,
uint8_t* encoded) {
- EncodedInfo info = speech_encoder_->Encode(
- rtp_timestamp, audio, static_cast<size_t>(SampleRateHz() / 100),
- max_encoded_bytes, encoded);
+ EncodedInfo info =
+ speech_encoder_->Encode(rtp_timestamp, audio, max_encoded_bytes, encoded);
RTC_CHECK_GE(max_encoded_bytes,
info.encoded_bytes + secondary_info_.encoded_bytes);
RTC_CHECK(info.redundant.empty()) << "Cannot use nested redundant encoders.";
diff --git a/webrtc/modules/audio_coding/codecs/red/audio_encoder_copy_red.h b/webrtc/modules/audio_coding/codecs/red/audio_encoder_copy_red.h
index 7837010605..2f53765389 100644
--- a/webrtc/modules/audio_coding/codecs/red/audio_encoder_copy_red.h
+++ b/webrtc/modules/audio_coding/codecs/red/audio_encoder_copy_red.h
@@ -38,13 +38,13 @@ class AudioEncoderCopyRed final : public AudioEncoder {
size_t MaxEncodedBytes() const override;
int SampleRateHz() const override;
- int NumChannels() const override;
+ size_t NumChannels() const override;
int RtpTimestampRateHz() const override;
size_t Num10MsFramesInNextPacket() const override;
size_t Max10MsFramesInAPacket() const override;
int GetTargetBitrate() const override;
EncodedInfo EncodeInternal(uint32_t rtp_timestamp,
- const int16_t* audio,
+ rtc::ArrayView<const int16_t> audio,
size_t max_encoded_bytes,
uint8_t* encoded) override;
void Reset() override;
diff --git a/webrtc/modules/audio_coding/codecs/red/audio_encoder_copy_red_unittest.cc b/webrtc/modules/audio_coding/codecs/red/audio_encoder_copy_red_unittest.cc
index cb50652183..22601b6597 100644
--- a/webrtc/modules/audio_coding/codecs/red/audio_encoder_copy_red_unittest.cc
+++ b/webrtc/modules/audio_coding/codecs/red/audio_encoder_copy_red_unittest.cc
@@ -42,7 +42,7 @@ class AudioEncoderCopyRedTest : public ::testing::Test {
config.speech_encoder = &mock_encoder_;
red_.reset(new AudioEncoderCopyRed(config));
memset(audio_, 0, sizeof(audio_));
- EXPECT_CALL(mock_encoder_, NumChannels()).WillRepeatedly(Return(1));
+ EXPECT_CALL(mock_encoder_, NumChannels()).WillRepeatedly(Return(1U));
EXPECT_CALL(mock_encoder_, SampleRateHz())
.WillRepeatedly(Return(sample_rate_hz_));
EXPECT_CALL(mock_encoder_, MaxEncodedBytes())
@@ -60,8 +60,10 @@ class AudioEncoderCopyRedTest : public ::testing::Test {
void Encode() {
ASSERT_TRUE(red_.get() != NULL);
- encoded_info_ = red_->Encode(timestamp_, audio_, num_audio_samples_10ms,
- encoded_.size(), &encoded_[0]);
+ encoded_info_ = red_->Encode(
+ timestamp_,
+ rtc::ArrayView<const int16_t>(audio_, num_audio_samples_10ms),
+ encoded_.size(), &encoded_[0]);
timestamp_ += num_audio_samples_10ms;
}
@@ -83,7 +85,7 @@ class MockEncodeHelper {
}
AudioEncoder::EncodedInfo Encode(uint32_t timestamp,
- const int16_t* audio,
+ rtc::ArrayView<const int16_t> audio,
size_t max_encoded_bytes,
uint8_t* encoded) {
if (write_payload_) {
@@ -108,8 +110,8 @@ TEST_F(AudioEncoderCopyRedTest, CheckSampleRatePropagation) {
}
TEST_F(AudioEncoderCopyRedTest, CheckNumChannelsPropagation) {
- EXPECT_CALL(mock_encoder_, NumChannels()).WillOnce(Return(17));
- EXPECT_EQ(17, red_->NumChannels());
+ EXPECT_CALL(mock_encoder_, NumChannels()).WillOnce(Return(17U));
+ EXPECT_EQ(17U, red_->NumChannels());
}
TEST_F(AudioEncoderCopyRedTest, CheckFrameSizePropagation) {
diff --git a/webrtc/modules/audio_coding/codecs/tools/audio_codec_speed_test.cc b/webrtc/modules/audio_coding/codecs/tools/audio_codec_speed_test.cc
index 3395721f8b..3dc665482a 100644
--- a/webrtc/modules/audio_coding/codecs/tools/audio_codec_speed_test.cc
+++ b/webrtc/modules/audio_coding/codecs/tools/audio_codec_speed_test.cc
@@ -11,6 +11,7 @@
#include "webrtc/modules/audio_coding/codecs/tools/audio_codec_speed_test.h"
#include "testing/gtest/include/gtest/gtest.h"
+#include "webrtc/base/format_macros.h"
#include "webrtc/test/testsupport/fileutils.h"
using ::std::tr1::get;
@@ -23,8 +24,10 @@ AudioCodecSpeedTest::AudioCodecSpeedTest(int block_duration_ms,
: block_duration_ms_(block_duration_ms),
input_sampling_khz_(input_sampling_khz),
output_sampling_khz_(output_sampling_khz),
- input_length_sample_(block_duration_ms_ * input_sampling_khz_),
- output_length_sample_(block_duration_ms_ * output_sampling_khz_),
+ input_length_sample_(
+ static_cast<size_t>(block_duration_ms_ * input_sampling_khz_)),
+ output_length_sample_(
+ static_cast<size_t>(block_duration_ms_ * output_sampling_khz_)),
data_pointer_(0),
loop_length_samples_(0),
max_bytes_(0),
@@ -65,8 +68,7 @@ void AudioCodecSpeedTest::SetUp() {
memcpy(&in_data_[loop_length_samples_], &in_data_[0],
input_length_sample_ * channels_ * sizeof(int16_t));
- max_bytes_ =
- static_cast<size_t>(input_length_sample_ * channels_ * sizeof(int16_t));
+ max_bytes_ = input_length_sample_ * channels_ * sizeof(int16_t);
out_data_.reset(new int16_t[output_length_sample_ * channels_]);
bit_stream_.reset(new uint8_t[max_bytes_]);
@@ -98,7 +100,7 @@ void AudioCodecSpeedTest::EncodeDecode(size_t audio_duration_sec) {
size_t time_now_ms = 0;
float time_ms;
- printf("Coding %d kHz-sampled %d-channel audio at %d bps ...\n",
+ printf("Coding %d kHz-sampled %" PRIuS "-channel audio at %d bps ...\n",
input_sampling_khz_, channels_, bit_rate_);
while (time_now_ms < audio_duration_sec * 1000) {
diff --git a/webrtc/modules/audio_coding/codecs/tools/audio_codec_speed_test.h b/webrtc/modules/audio_coding/codecs/tools/audio_codec_speed_test.h
index 2736c2912e..fb7b3e5b1e 100644
--- a/webrtc/modules/audio_coding/codecs/tools/audio_codec_speed_test.h
+++ b/webrtc/modules/audio_coding/codecs/tools/audio_codec_speed_test.h
@@ -20,7 +20,8 @@ namespace webrtc {
// Define coding parameter as
// <channels, bit_rate, file_name, extension, if_save_output>.
-typedef std::tr1::tuple<int, int, std::string, std::string, bool> coding_param;
+typedef std::tr1::tuple<size_t, int, std::string, std::string, bool>
+ coding_param;
class AudioCodecSpeedTest : public testing::TestWithParam<coding_param> {
protected:
@@ -55,10 +56,10 @@ class AudioCodecSpeedTest : public testing::TestWithParam<coding_param> {
int output_sampling_khz_;
// Number of samples-per-channel in a frame.
- int input_length_sample_;
+ size_t input_length_sample_;
// Expected output number of samples-per-channel in a frame.
- int output_length_sample_;
+ size_t output_length_sample_;
rtc::scoped_ptr<int16_t[]> in_data_;
rtc::scoped_ptr<int16_t[]> out_data_;
@@ -74,7 +75,7 @@ class AudioCodecSpeedTest : public testing::TestWithParam<coding_param> {
float decoding_time_ms_;
FILE* out_file_;
- int channels_;
+ size_t channels_;
// Bit rate is in bit-per-second.
int bit_rate_;
diff --git a/webrtc/modules/audio_coding/include/audio_coding_module.h b/webrtc/modules/audio_coding/include/audio_coding_module.h
new file mode 100644
index 0000000000..9e7991f22f
--- /dev/null
+++ b/webrtc/modules/audio_coding/include/audio_coding_module.h
@@ -0,0 +1,746 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_INCLUDE_AUDIO_CODING_MODULE_H_
+#define WEBRTC_MODULES_AUDIO_CODING_INCLUDE_AUDIO_CODING_MODULE_H_
+
+#include <string>
+#include <vector>
+
+#include "webrtc/base/optional.h"
+#include "webrtc/common_types.h"
+#include "webrtc/modules/audio_coding/include/audio_coding_module_typedefs.h"
+#include "webrtc/modules/audio_coding/neteq/include/neteq.h"
+#include "webrtc/modules/include/module.h"
+#include "webrtc/system_wrappers/include/clock.h"
+#include "webrtc/typedefs.h"
+
+namespace webrtc {
+
+// forward declarations
+struct CodecInst;
+struct WebRtcRTPHeader;
+class AudioDecoder;
+class AudioEncoder;
+class AudioFrame;
+class RTPFragmentationHeader;
+
+#define WEBRTC_10MS_PCM_AUDIO 960 // 16 bits super wideband 48 kHz
+
+// Callback class used for sending data ready to be packetized
+class AudioPacketizationCallback {
+ public:
+ virtual ~AudioPacketizationCallback() {}
+
+ virtual int32_t SendData(FrameType frame_type,
+ uint8_t payload_type,
+ uint32_t timestamp,
+ const uint8_t* payload_data,
+ size_t payload_len_bytes,
+ const RTPFragmentationHeader* fragmentation) = 0;
+};
+
+// Callback class used for reporting VAD decision
+class ACMVADCallback {
+ public:
+ virtual ~ACMVADCallback() {}
+
+ virtual int32_t InFrameType(FrameType frame_type) = 0;
+};
+
+class AudioCodingModule {
+ protected:
+ AudioCodingModule() {}
+
+ public:
+ struct Config {
+ Config() : id(0), neteq_config(), clock(Clock::GetRealTimeClock()) {
+ // Post-decode VAD is disabled by default in NetEq, however, Audio
+ // Conference Mixer relies on VAD decisions and fails without them.
+ neteq_config.enable_post_decode_vad = true;
+ }
+
+ int id;
+ NetEq::Config neteq_config;
+ Clock* clock;
+ };
+
+ ///////////////////////////////////////////////////////////////////////////
+ // Creation and destruction of a ACM.
+ //
+ // The second method is used for testing where a simulated clock can be
+ // injected into ACM. ACM will take the ownership of the object clock and
+ // delete it when destroyed.
+ //
+ static AudioCodingModule* Create(int id);
+ static AudioCodingModule* Create(int id, Clock* clock);
+ static AudioCodingModule* Create(const Config& config);
+ virtual ~AudioCodingModule() = default;
+
+ ///////////////////////////////////////////////////////////////////////////
+ // Utility functions
+ //
+
+ ///////////////////////////////////////////////////////////////////////////
+ // uint8_t NumberOfCodecs()
+ // Returns number of supported codecs.
+ //
+ // Return value:
+ // number of supported codecs.
+ ///
+ static int NumberOfCodecs();
+
+ ///////////////////////////////////////////////////////////////////////////
+ // int32_t Codec()
+ // Get supported codec with list number.
+ //
+ // Input:
+ // -list_id : list number.
+ //
+ // Output:
+ // -codec : a structure where the parameters of the codec,
+ // given by list number is written to.
+ //
+ // Return value:
+ // -1 if the list number (list_id) is invalid.
+ // 0 if succeeded.
+ //
+ static int Codec(int list_id, CodecInst* codec);
+
+ ///////////////////////////////////////////////////////////////////////////
+ // int32_t Codec()
+ // Get supported codec with the given codec name, sampling frequency, and
+ // a given number of channels.
+ //
+ // Input:
+ // -payload_name : name of the codec.
+ // -sampling_freq_hz : sampling frequency of the codec. Note! for RED
+ // a sampling frequency of -1 is a valid input.
+ // -channels : number of channels ( 1 - mono, 2 - stereo).
+ //
+ // Output:
+ // -codec : a structure where the function returns the
+ // default parameters of the codec.
+ //
+ // Return value:
+ // -1 if no codec matches the given parameters.
+ // 0 if succeeded.
+ //
+ static int Codec(const char* payload_name, CodecInst* codec,
+ int sampling_freq_hz, size_t channels);
+
+ ///////////////////////////////////////////////////////////////////////////
+ // int32_t Codec()
+ //
+ // Returns the list number of the given codec name, sampling frequency, and
+ // a given number of channels.
+ //
+ // Input:
+ // -payload_name : name of the codec.
+ // -sampling_freq_hz : sampling frequency of the codec. Note! for RED
+ // a sampling frequency of -1 is a valid input.
+ // -channels : number of channels ( 1 - mono, 2 - stereo).
+ //
+ // Return value:
+ // if the codec is found, the index of the codec in the list,
+ // -1 if the codec is not found.
+ //
+ static int Codec(const char* payload_name, int sampling_freq_hz,
+ size_t channels);
+
+ ///////////////////////////////////////////////////////////////////////////
+ // bool IsCodecValid()
+ // Checks the validity of the parameters of the given codec.
+ //
+ // Input:
+ // -codec : the structure which keeps the parameters of the
+ // codec.
+ //
+ // Return value:
+ // true if the parameters are valid,
+ // false if any parameter is not valid.
+ //
+ static bool IsCodecValid(const CodecInst& codec);
+
+ ///////////////////////////////////////////////////////////////////////////
+ // Sender
+ //
+
+ ///////////////////////////////////////////////////////////////////////////
+ // int32_t RegisterSendCodec()
+ // Registers a codec, specified by |send_codec|, as sending codec.
+ // This API can be called multiple of times to register Codec. The last codec
+ // registered overwrites the previous ones.
+ // The API can also be used to change payload type for CNG and RED, which are
+ // registered by default to default payload types.
+ // Note that registering CNG and RED won't overwrite speech codecs.
+ // This API can be called to set/change the send payload-type, frame-size
+ // or encoding rate (if applicable for the codec).
+ //
+ // Note: If a stereo codec is registered as send codec, VAD/DTX will
+ // automatically be turned off, since it is not supported for stereo sending.
+ //
+ // Note: If a secondary encoder is already registered, and the new send-codec
+ // has a sampling rate that does not match the secondary encoder, the
+ // secondary encoder will be unregistered.
+ //
+ // Input:
+ // -send_codec : Parameters of the codec to be registered, c.f.
+ // common_types.h for the definition of
+ // CodecInst.
+ //
+ // Return value:
+ // -1 if failed to initialize,
+ // 0 if succeeded.
+ //
+ virtual int32_t RegisterSendCodec(const CodecInst& send_codec) = 0;
+
+ // Registers |external_speech_encoder| as encoder. The new encoder will
+ // replace any previously registered speech encoder (internal or external).
+ virtual void RegisterExternalSendCodec(
+ AudioEncoder* external_speech_encoder) = 0;
+
+ ///////////////////////////////////////////////////////////////////////////
+ // int32_t SendCodec()
+ // Get parameters for the codec currently registered as send codec.
+ //
+ // Return value:
+ // The send codec, or nothing if we don't have one
+ //
+ virtual rtc::Optional<CodecInst> SendCodec() const = 0;
+
+ ///////////////////////////////////////////////////////////////////////////
+ // int32_t SendFrequency()
+ // Get the sampling frequency of the current encoder in Hertz.
+ //
+ // Return value:
+ // positive; sampling frequency [Hz] of the current encoder.
+ // -1 if an error has happened.
+ //
+ virtual int32_t SendFrequency() const = 0;
+
+ ///////////////////////////////////////////////////////////////////////////
+ // Sets the bitrate to the specified value in bits/sec. If the value is not
+ // supported by the codec, it will choose another appropriate value.
+ virtual void SetBitRate(int bitrate_bps) = 0;
+
+ // int32_t RegisterTransportCallback()
+ // Register a transport callback which will be called to deliver
+ // the encoded buffers whenever Process() is called and a
+ // bit-stream is ready.
+ //
+ // Input:
+ // -transport : pointer to the callback class
+ // transport->SendData() is called whenever
+ // Process() is called and bit-stream is ready
+ // to deliver.
+ //
+ // Return value:
+ // -1 if the transport callback could not be registered
+ // 0 if registration is successful.
+ //
+ virtual int32_t RegisterTransportCallback(
+ AudioPacketizationCallback* transport) = 0;
+
+ ///////////////////////////////////////////////////////////////////////////
+ // int32_t Add10MsData()
+ // Add 10MS of raw (PCM) audio data and encode it. If the sampling
+ // frequency of the audio does not match the sampling frequency of the
+ // current encoder ACM will resample the audio. If an encoded packet was
+ // produced, it will be delivered via the callback object registered using
+ // RegisterTransportCallback, and the return value from this function will
+ // be the number of bytes encoded.
+ //
+ // Input:
+ // -audio_frame : the input audio frame, containing raw audio
+ // sampling frequency etc.,
+ // c.f. module_common_types.h for definition of
+ // AudioFrame.
+ //
+ // Return value:
+ // >= 0 number of bytes encoded.
+ // -1 some error occurred.
+ //
+ virtual int32_t Add10MsData(const AudioFrame& audio_frame) = 0;
+
+ ///////////////////////////////////////////////////////////////////////////
+ // (RED) Redundant Coding
+ //
+
+ ///////////////////////////////////////////////////////////////////////////
+ // int32_t SetREDStatus()
+ // configure RED status i.e. on/off.
+ //
+ // RFC 2198 describes a solution which has a single payload type which
+ // signifies a packet with redundancy. That packet then becomes a container,
+ // encapsulating multiple payloads into a single RTP packet.
+ // Such a scheme is flexible, since any amount of redundancy may be
+ // encapsulated within a single packet. There is, however, a small overhead
+ // since each encapsulated payload must be preceded by a header indicating
+ // the type of data enclosed.
+ //
+ // Input:
+ // -enable_red : if true RED is enabled, otherwise RED is
+ // disabled.
+ //
+ // Return value:
+ // -1 if failed to set RED status,
+ // 0 if succeeded.
+ //
+ virtual int32_t SetREDStatus(bool enable_red) = 0;
+
+ ///////////////////////////////////////////////////////////////////////////
+ // bool REDStatus()
+ // Get RED status
+ //
+ // Return value:
+ // true if RED is enabled,
+ // false if RED is disabled.
+ //
+ virtual bool REDStatus() const = 0;
+
+ ///////////////////////////////////////////////////////////////////////////
+ // (FEC) Forward Error Correction (codec internal)
+ //
+
+ ///////////////////////////////////////////////////////////////////////////
+ // int32_t SetCodecFEC()
+ // Configures codec internal FEC status i.e. on/off. No effects on codecs that
+ // do not provide internal FEC.
+ //
+ // Input:
+ // -enable_fec : if true FEC will be enabled otherwise the FEC is
+ // disabled.
+ //
+ // Return value:
+ // -1 if failed, or the codec does not support FEC
+ // 0 if succeeded.
+ //
+ virtual int SetCodecFEC(bool enable_codec_fec) = 0;
+
+ ///////////////////////////////////////////////////////////////////////////
+ // bool CodecFEC()
+ // Gets status of codec internal FEC.
+ //
+ // Return value:
+ // true if FEC is enabled,
+ // false if FEC is disabled.
+ //
+ virtual bool CodecFEC() const = 0;
+
+ ///////////////////////////////////////////////////////////////////////////
+ // int SetPacketLossRate()
+ // Sets expected packet loss rate for encoding. Some encoders provide packet
+ // loss gnostic encoding to make stream less sensitive to packet losses,
+ // through e.g., FEC. No effects on codecs that do not provide such encoding.
+ //
+ // Input:
+ // -packet_loss_rate : expected packet loss rate (0 -- 100 inclusive).
+ //
+ // Return value
+ // -1 if failed to set packet loss rate,
+ // 0 if succeeded.
+ //
+ virtual int SetPacketLossRate(int packet_loss_rate) = 0;
+
+ ///////////////////////////////////////////////////////////////////////////
+ // (VAD) Voice Activity Detection
+ //
+
+ ///////////////////////////////////////////////////////////////////////////
+ // int32_t SetVAD()
+ // If DTX is enabled & the codec does not have internal DTX/VAD
+ // WebRtc VAD will be automatically enabled and |enable_vad| is ignored.
+ //
+ // If DTX is disabled but VAD is enabled no DTX packets are send,
+ // regardless of whether the codec has internal DTX/VAD or not. In this
+ // case, WebRtc VAD is running to label frames as active/in-active.
+ //
+ // NOTE! VAD/DTX is not supported when sending stereo.
+ //
+ // Inputs:
+ // -enable_dtx : if true DTX is enabled,
+ // otherwise DTX is disabled.
+ // -enable_vad : if true VAD is enabled,
+ // otherwise VAD is disabled.
+ // -vad_mode : determines the aggressiveness of VAD. A more
+ // aggressive mode results in more frames labeled
+ // as in-active, c.f. definition of
+ // ACMVADMode in audio_coding_module_typedefs.h
+ // for valid values.
+ //
+ // Return value:
+ // -1 if failed to set up VAD/DTX,
+ // 0 if succeeded.
+ //
+ virtual int32_t SetVAD(const bool enable_dtx = true,
+ const bool enable_vad = false,
+ const ACMVADMode vad_mode = VADNormal) = 0;
+
+ ///////////////////////////////////////////////////////////////////////////
+ // int32_t VAD()
+ // Get VAD status.
+ //
+ // Outputs:
+ // -dtx_enabled : is set to true if DTX is enabled, otherwise
+ // is set to false.
+ // -vad_enabled : is set to true if VAD is enabled, otherwise
+ // is set to false.
+ // -vad_mode : is set to the current aggressiveness of VAD.
+ //
+ // Return value:
+ // -1 if fails to retrieve the setting of DTX/VAD,
+ // 0 if succeeded.
+ //
+ virtual int32_t VAD(bool* dtx_enabled, bool* vad_enabled,
+ ACMVADMode* vad_mode) const = 0;
+
+ ///////////////////////////////////////////////////////////////////////////
+ // int32_t RegisterVADCallback()
+ // Call this method to register a callback function which is called
+ // any time that ACM encounters an empty frame. That is a frame which is
+ // recognized inactive. Depending on the codec WebRtc VAD or internal codec
+ // VAD is employed to identify a frame as active/inactive.
+ //
+ // Input:
+ // -vad_callback : pointer to a callback function.
+ //
+ // Return value:
+ // -1 if failed to register the callback function.
+ // 0 if the callback function is registered successfully.
+ //
+ virtual int32_t RegisterVADCallback(ACMVADCallback* vad_callback) = 0;
+
+ ///////////////////////////////////////////////////////////////////////////
+ // Receiver
+ //
+
+ ///////////////////////////////////////////////////////////////////////////
+ // int32_t InitializeReceiver()
+ // Any decoder-related state of ACM will be initialized to the
+ // same state when ACM is created. This will not interrupt or
+ // effect encoding functionality of ACM. ACM would lose all the
+ // decoding-related settings by calling this function.
+ // For instance, all registered codecs are deleted and have to be
+ // registered again.
+ //
+ // Return value:
+ // -1 if failed to initialize,
+ // 0 if succeeded.
+ //
+ virtual int32_t InitializeReceiver() = 0;
+
+ ///////////////////////////////////////////////////////////////////////////
+ // int32_t ReceiveFrequency()
+ // Get sampling frequency of the last received payload.
+ //
+ // Return value:
+ // non-negative the sampling frequency in Hertz.
+ // -1 if an error has occurred.
+ //
+ virtual int32_t ReceiveFrequency() const = 0;
+
+ ///////////////////////////////////////////////////////////////////////////
+ // int32_t PlayoutFrequency()
+ // Get sampling frequency of audio played out.
+ //
+ // Return value:
+ // the sampling frequency in Hertz.
+ //
+ virtual int32_t PlayoutFrequency() const = 0;
+
+ ///////////////////////////////////////////////////////////////////////////
+ // int32_t RegisterReceiveCodec()
+ // Register possible decoders, can be called multiple times for
+ // codecs, CNG-NB, CNG-WB, CNG-SWB, AVT and RED.
+ //
+ // Input:
+ // -receive_codec : parameters of the codec to be registered, c.f.
+ // common_types.h for the definition of
+ // CodecInst.
+ //
+ // Return value:
+ // -1 if failed to register the codec
+ // 0 if the codec registered successfully.
+ //
+ virtual int RegisterReceiveCodec(const CodecInst& receive_codec) = 0;
+
+ // Registers an external decoder. The name is only used to provide information
+ // back to the caller about the decoder. Hence, the name is arbitrary, and may
+ // be empty.
+ virtual int RegisterExternalReceiveCodec(int rtp_payload_type,
+ AudioDecoder* external_decoder,
+ int sample_rate_hz,
+ int num_channels,
+ const std::string& name) = 0;
+
+ ///////////////////////////////////////////////////////////////////////////
+ // int32_t UnregisterReceiveCodec()
+ // Unregister the codec currently registered with a specific payload type
+ // from the list of possible receive codecs.
+ //
+ // Input:
+ // -payload_type : The number representing the payload type to
+ // unregister.
+ //
+ // Output:
+ // -1 if fails to unregister.
+ // 0 if the given codec is successfully unregistered.
+ //
+ virtual int UnregisterReceiveCodec(
+ uint8_t payload_type) = 0;
+
+ ///////////////////////////////////////////////////////////////////////////
+ // int32_t ReceiveCodec()
+ // Get the codec associated with last received payload.
+ //
+ // Output:
+ // -curr_receive_codec : parameters of the codec associated with the last
+ // received payload, c.f. common_types.h for
+ // the definition of CodecInst.
+ //
+ // Return value:
+ // -1 if failed to retrieve the codec,
+ // 0 if the codec is successfully retrieved.
+ //
+ virtual int32_t ReceiveCodec(CodecInst* curr_receive_codec) const = 0;
+
+ ///////////////////////////////////////////////////////////////////////////
+ // int32_t IncomingPacket()
+ // Call this function to insert a parsed RTP packet into ACM.
+ //
+ // Inputs:
+ // -incoming_payload : received payload.
+ // -payload_len_bytes : the length of payload in bytes.
+ // -rtp_info : the relevant information retrieved from RTP
+ // header.
+ //
+ // Return value:
+ // -1 if failed to push in the payload
+ // 0 if payload is successfully pushed in.
+ //
+ virtual int32_t IncomingPacket(const uint8_t* incoming_payload,
+ const size_t payload_len_bytes,
+ const WebRtcRTPHeader& rtp_info) = 0;
+
+ ///////////////////////////////////////////////////////////////////////////
+ // int32_t IncomingPayload()
+ // Call this API to push incoming payloads when there is no rtp-info.
+ // The rtp-info will be created in ACM. One usage for this API is when
+ // pre-encoded files are pushed in ACM
+ //
+ // Inputs:
+ // -incoming_payload : received payload.
+ // -payload_len_byte : the length, in bytes, of the received payload.
+ // -payload_type : the payload-type. This specifies which codec has
+ // to be used to decode the payload.
+ // -timestamp : send timestamp of the payload. ACM starts with
+ // a random value and increment it by the
+ // packet-size, which is given when the codec in
+ // question is registered by RegisterReceiveCodec().
+ // Therefore, it is essential to have the timestamp
+ // if the frame-size differ from the registered
+ // value or if the incoming payload contains DTX
+ // packets.
+ //
+ // Return value:
+ // -1 if failed to push in the payload
+ // 0 if payload is successfully pushed in.
+ //
+ virtual int32_t IncomingPayload(const uint8_t* incoming_payload,
+ const size_t payload_len_byte,
+ const uint8_t payload_type,
+ const uint32_t timestamp = 0) = 0;
+
+ ///////////////////////////////////////////////////////////////////////////
+ // int SetMinimumPlayoutDelay()
+ // Set a minimum for the playout delay, used for lip-sync. NetEq maintains
+ // such a delay unless channel condition yields to a higher delay.
+ //
+ // Input:
+ // -time_ms : minimum delay in milliseconds.
+ //
+ // Return value:
+ // -1 if failed to set the delay,
+ // 0 if the minimum delay is set.
+ //
+ virtual int SetMinimumPlayoutDelay(int time_ms) = 0;
+
+ ///////////////////////////////////////////////////////////////////////////
+ // int SetMaximumPlayoutDelay()
+ // Set a maximum for the playout delay
+ //
+ // Input:
+ // -time_ms : maximum delay in milliseconds.
+ //
+ // Return value:
+ // -1 if failed to set the delay,
+ // 0 if the maximum delay is set.
+ //
+ virtual int SetMaximumPlayoutDelay(int time_ms) = 0;
+
+ //
+ // The shortest latency, in milliseconds, required by jitter buffer. This
+ // is computed based on inter-arrival times and playout mode of NetEq. The
+ // actual delay is the maximum of least-required-delay and the minimum-delay
+ // specified by SetMinumumPlayoutDelay() API.
+ //
+ virtual int LeastRequiredDelayMs() const = 0;
+
+ ///////////////////////////////////////////////////////////////////////////
+ // int32_t PlayoutTimestamp()
+ // The send timestamp of an RTP packet is associated with the decoded
+ // audio of the packet in question. This function returns the timestamp of
+ // the latest audio obtained by calling PlayoutData10ms().
+ //
+ // Input:
+ // -timestamp : a reference to a uint32_t to receive the
+ // timestamp.
+ // Return value:
+ // 0 if the output is a correct timestamp.
+ // -1 if failed to output the correct timestamp.
+ //
+ // TODO(tlegrand): Change function to return the timestamp.
+ virtual int32_t PlayoutTimestamp(uint32_t* timestamp) = 0;
+
+ ///////////////////////////////////////////////////////////////////////////
+ // int32_t PlayoutData10Ms(
+ // Get 10 milliseconds of raw audio data for playout, at the given sampling
+ // frequency. ACM will perform a resampling if required.
+ //
+ // Input:
+ // -desired_freq_hz : the desired sampling frequency, in Hertz, of the
+ // output audio. If set to -1, the function returns
+ // the audio at the current sampling frequency.
+ //
+ // Output:
+ // -audio_frame : output audio frame which contains raw audio data
+ // and other relevant parameters, c.f.
+ // module_common_types.h for the definition of
+ // AudioFrame.
+ //
+ // Return value:
+ // -1 if the function fails,
+ // 0 if the function succeeds.
+ //
+ virtual int32_t PlayoutData10Ms(int32_t desired_freq_hz,
+ AudioFrame* audio_frame) = 0;
+
+ ///////////////////////////////////////////////////////////////////////////
+ // Codec specific
+ //
+
+ ///////////////////////////////////////////////////////////////////////////
+ // int SetOpusApplication()
+ // Sets the intended application if current send codec is Opus. Opus uses this
+ // to optimize the encoding for applications like VOIP and music. Currently,
+ // two modes are supported: kVoip and kAudio.
+ //
+ // Input:
+ // - application : intended application.
+ //
+ // Return value:
+ // -1 if current send codec is not Opus or error occurred in setting the
+ // Opus application mode.
+ // 0 if the Opus application mode is successfully set.
+ //
+ virtual int SetOpusApplication(OpusApplicationMode application) = 0;
+
+ ///////////////////////////////////////////////////////////////////////////
+ // int SetOpusMaxPlaybackRate()
+ // If current send codec is Opus, informs it about maximum playback rate the
+ // receiver will render. Opus can use this information to optimize the bit
+ // rate and increase the computation efficiency.
+ //
+ // Input:
+ // -frequency_hz : maximum playback rate in Hz.
+ //
+ // Return value:
+ // -1 if current send codec is not Opus or
+ // error occurred in setting the maximum playback rate,
+ // 0 if maximum bandwidth is set successfully.
+ //
+ virtual int SetOpusMaxPlaybackRate(int frequency_hz) = 0;
+
+ ///////////////////////////////////////////////////////////////////////////
+ // EnableOpusDtx()
+ // Enable the DTX, if current send codec is Opus.
+ //
+ // Return value:
+ // -1 if current send codec is not Opus or error occurred in enabling the
+ // Opus DTX.
+ // 0 if Opus DTX is enabled successfully.
+ //
+ virtual int EnableOpusDtx() = 0;
+
+ ///////////////////////////////////////////////////////////////////////////
+ // int DisableOpusDtx()
+ // If current send codec is Opus, disables its internal DTX.
+ //
+ // Return value:
+ // -1 if current send codec is not Opus or error occurred in disabling DTX.
+ // 0 if Opus DTX is disabled successfully.
+ //
+ virtual int DisableOpusDtx() = 0;
+
+ ///////////////////////////////////////////////////////////////////////////
+ // statistics
+ //
+
+ ///////////////////////////////////////////////////////////////////////////
+ // int32_t GetNetworkStatistics()
+ // Get network statistics. Note that the internal statistics of NetEq are
+ // reset by this call.
+ //
+ // Input:
+ // -network_statistics : a structure that contains network statistics.
+ //
+ // Return value:
+ // -1 if failed to set the network statistics,
+ // 0 if statistics are set successfully.
+ //
+ virtual int32_t GetNetworkStatistics(
+ NetworkStatistics* network_statistics) = 0;
+
+ //
+ // Enable NACK and set the maximum size of the NACK list. If NACK is already
+ // enable then the maximum NACK list size is modified accordingly.
+ //
+ // If the sequence number of last received packet is N, the sequence numbers
+ // of NACK list are in the range of [N - |max_nack_list_size|, N).
+ //
+ // |max_nack_list_size| should be positive (none zero) and less than or
+ // equal to |Nack::kNackListSizeLimit|. Otherwise, No change is applied and -1
+ // is returned. 0 is returned at success.
+ //
+ virtual int EnableNack(size_t max_nack_list_size) = 0;
+
+ // Disable NACK.
+ virtual void DisableNack() = 0;
+
+ //
+ // Get a list of packets to be retransmitted. |round_trip_time_ms| is an
+ // estimate of the round-trip-time (in milliseconds). Missing packets which
+ // will be playout in a shorter time than the round-trip-time (with respect
+ // to the time this API is called) will not be included in the list.
+ //
+ // Negative |round_trip_time_ms| results is an error message and empty list
+ // is returned.
+ //
+ virtual std::vector<uint16_t> GetNackList(
+ int64_t round_trip_time_ms) const = 0;
+
+ virtual void GetDecodingCallStatistics(
+ AudioDecodingCallStats* call_stats) const = 0;
+};
+
+} // namespace webrtc
+
+#endif // WEBRTC_MODULES_AUDIO_CODING_INCLUDE_AUDIO_CODING_MODULE_H_
diff --git a/webrtc/modules/audio_coding/include/audio_coding_module_typedefs.h b/webrtc/modules/audio_coding/include/audio_coding_module_typedefs.h
new file mode 100644
index 0000000000..280d6bffa2
--- /dev/null
+++ b/webrtc/modules/audio_coding/include/audio_coding_module_typedefs.h
@@ -0,0 +1,51 @@
+/*
+ * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_INCLUDE_AUDIO_CODING_MODULE_TYPEDEFS_H_
+#define WEBRTC_MODULES_AUDIO_CODING_INCLUDE_AUDIO_CODING_MODULE_TYPEDEFS_H_
+
+#include <map>
+
+#include "webrtc/modules/include/module_common_types.h"
+#include "webrtc/typedefs.h"
+
+namespace webrtc {
+
+///////////////////////////////////////////////////////////////////////////
+// enum ACMVADMode
+// An enumerator for aggressiveness of VAD
+// -VADNormal : least aggressive mode.
+// -VADLowBitrate : more aggressive than "VADNormal" to save on
+// bit-rate.
+// -VADAggr : an aggressive mode.
+// -VADVeryAggr : the most agressive mode.
+//
+enum ACMVADMode {
+ VADNormal = 0,
+ VADLowBitrate = 1,
+ VADAggr = 2,
+ VADVeryAggr = 3
+};
+
+///////////////////////////////////////////////////////////////////////////
+//
+// Enumeration of Opus mode for intended application.
+//
+// kVoip : optimized for voice signals.
+// kAudio : optimized for non-voice signals like music.
+//
+enum OpusApplicationMode {
+ kVoip = 0,
+ kAudio = 1,
+};
+
+} // namespace webrtc
+
+#endif // WEBRTC_MODULES_AUDIO_CODING_INCLUDE_AUDIO_CODING_MODULE_TYPEDEFS_H_
diff --git a/webrtc/modules/audio_coding/main/acm2/OWNERS b/webrtc/modules/audio_coding/main/acm2/OWNERS
deleted file mode 100644
index 3ee6b4bf5f..0000000000
--- a/webrtc/modules/audio_coding/main/acm2/OWNERS
+++ /dev/null
@@ -1,5 +0,0 @@
-
-# These are for the common case of adding or renaming files. If you're doing
-# structural changes, please get a review from a reviewer in this file.
-per-file *.gyp=*
-per-file *.gypi=*
diff --git a/webrtc/modules/audio_coding/main/acm2/acm_codec_database.cc b/webrtc/modules/audio_coding/main/acm2/acm_codec_database.cc
deleted file mode 100644
index f7842ce5b1..0000000000
--- a/webrtc/modules/audio_coding/main/acm2/acm_codec_database.cc
+++ /dev/null
@@ -1,364 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-/*
- * This file generates databases with information about all supported audio
- * codecs.
- */
-
-// TODO(tlegrand): Change constant input pointers in all functions to constant
-// references, where appropriate.
-#include "webrtc/modules/audio_coding/main/acm2/acm_codec_database.h"
-
-#include <assert.h>
-
-#include "webrtc/base/checks.h"
-#include "webrtc/modules/audio_coding/main/acm2/acm_common_defs.h"
-#include "webrtc/system_wrappers/include/trace.h"
-
-namespace webrtc {
-
-namespace acm2 {
-
-namespace {
-
-// Checks if the bitrate is valid for the codec.
-bool IsRateValid(int codec_id, int rate) {
- return ACMCodecDB::database_[codec_id].rate == rate;
-}
-
-// Checks if the bitrate is valid for iSAC.
-bool IsISACRateValid(int rate) {
- return (rate == -1) || ((rate <= 56000) && (rate >= 10000));
-}
-
-// Checks if the bitrate is valid for iLBC.
-bool IsILBCRateValid(int rate, int frame_size_samples) {
- if (((frame_size_samples == 240) || (frame_size_samples == 480)) &&
- (rate == 13300)) {
- return true;
- } else if (((frame_size_samples == 160) || (frame_size_samples == 320)) &&
- (rate == 15200)) {
- return true;
- } else {
- return false;
- }
-}
-
-// Checks if the bitrate is valid for Opus.
-bool IsOpusRateValid(int rate) {
- return (rate >= 6000) && (rate <= 510000);
-}
-
-} // namespace
-
-// Not yet used payload-types.
-// 83, 82, 81, 80, 79, 78, 77, 76, 75, 74, 73, 72, 71, 70, 69, 68,
-// 67, 66, 65
-
-const CodecInst ACMCodecDB::database_[] = {
-#if (defined(WEBRTC_CODEC_ISAC) || defined(WEBRTC_CODEC_ISACFX))
- {103, "ISAC", 16000, kIsacPacSize480, 1, kIsacWbDefaultRate},
-# if (defined(WEBRTC_CODEC_ISAC))
- {104, "ISAC", 32000, kIsacPacSize960, 1, kIsacSwbDefaultRate},
-# endif
-#endif
- // Mono
- {107, "L16", 8000, 80, 1, 128000},
- {108, "L16", 16000, 160, 1, 256000},
- {109, "L16", 32000, 320, 1, 512000},
- // Stereo
- {111, "L16", 8000, 80, 2, 128000},
- {112, "L16", 16000, 160, 2, 256000},
- {113, "L16", 32000, 320, 2, 512000},
- // G.711, PCM mu-law and A-law.
- // Mono
- {0, "PCMU", 8000, 160, 1, 64000},
- {8, "PCMA", 8000, 160, 1, 64000},
- // Stereo
- {110, "PCMU", 8000, 160, 2, 64000},
- {118, "PCMA", 8000, 160, 2, 64000},
-#ifdef WEBRTC_CODEC_ILBC
- {102, "ILBC", 8000, 240, 1, 13300},
-#endif
-#ifdef WEBRTC_CODEC_G722
- // Mono
- {9, "G722", 16000, 320, 1, 64000},
- // Stereo
- {119, "G722", 16000, 320, 2, 64000},
-#endif
-#ifdef WEBRTC_CODEC_OPUS
- // Opus internally supports 48, 24, 16, 12, 8 kHz.
- // Mono and stereo.
- {120, "opus", 48000, 960, 2, 64000},
-#endif
- // Comfort noise for four different sampling frequencies.
- {13, "CN", 8000, 240, 1, 0},
- {98, "CN", 16000, 480, 1, 0},
- {99, "CN", 32000, 960, 1, 0},
-#ifdef ENABLE_48000_HZ
- {100, "CN", 48000, 1440, 1, 0},
-#endif
- {106, "telephone-event", 8000, 240, 1, 0},
-#ifdef WEBRTC_CODEC_RED
- {127, "red", 8000, 0, 1, 0},
-#endif
- // To prevent compile errors due to trailing commas.
- {-1, "Null", -1, -1, -1, -1}
-};
-
-// Create database with all codec settings at compile time.
-// Each entry needs the following parameters in the given order:
-// Number of allowed packet sizes, a vector with the allowed packet sizes,
-// Basic block samples, max number of channels that are supported.
-const ACMCodecDB::CodecSettings ACMCodecDB::codec_settings_[] = {
-#if (defined(WEBRTC_CODEC_ISAC) || defined(WEBRTC_CODEC_ISACFX))
- {2, {kIsacPacSize480, kIsacPacSize960}, 0, 1},
-# if (defined(WEBRTC_CODEC_ISAC))
- {1, {kIsacPacSize960}, 0, 1},
-# endif
-#endif
- // Mono
- {4, {80, 160, 240, 320}, 0, 2},
- {4, {160, 320, 480, 640}, 0, 2},
- {2, {320, 640}, 0, 2},
- // Stereo
- {4, {80, 160, 240, 320}, 0, 2},
- {4, {160, 320, 480, 640}, 0, 2},
- {2, {320, 640}, 0, 2},
- // G.711, PCM mu-law and A-law.
- // Mono
- {6, {80, 160, 240, 320, 400, 480}, 0, 2},
- {6, {80, 160, 240, 320, 400, 480}, 0, 2},
- // Stereo
- {6, {80, 160, 240, 320, 400, 480}, 0, 2},
- {6, {80, 160, 240, 320, 400, 480}, 0, 2},
-#ifdef WEBRTC_CODEC_ILBC
- {4, {160, 240, 320, 480}, 0, 1},
-#endif
-#ifdef WEBRTC_CODEC_G722
- // Mono
- {6, {160, 320, 480, 640, 800, 960}, 0, 2},
- // Stereo
- {6, {160, 320, 480, 640, 800, 960}, 0, 2},
-#endif
-#ifdef WEBRTC_CODEC_OPUS
- // Opus supports frames shorter than 10ms,
- // but it doesn't help us to use them.
- // Mono and stereo.
- {4, {480, 960, 1920, 2880}, 0, 2},
-#endif
- // Comfort noise for three different sampling frequencies.
- {1, {240}, 240, 1},
- {1, {480}, 480, 1},
- {1, {960}, 960, 1},
-#ifdef ENABLE_48000_HZ
- {1, {1440}, 1440, 1},
-#endif
- {1, {240}, 240, 1},
-#ifdef WEBRTC_CODEC_RED
- {1, {0}, 0, 1},
-#endif
- // To prevent compile errors due to trailing commas.
- {-1, {-1}, -1, -1}
-};
-
-// Create a database of all NetEQ decoders at compile time.
-const NetEqDecoder ACMCodecDB::neteq_decoders_[] = {
-#if (defined(WEBRTC_CODEC_ISAC) || defined(WEBRTC_CODEC_ISACFX))
- NetEqDecoder::kDecoderISAC,
-# if (defined(WEBRTC_CODEC_ISAC))
- NetEqDecoder::kDecoderISACswb,
-# endif
-#endif
- // Mono
- NetEqDecoder::kDecoderPCM16B, NetEqDecoder::kDecoderPCM16Bwb,
- NetEqDecoder::kDecoderPCM16Bswb32kHz,
- // Stereo
- NetEqDecoder::kDecoderPCM16B_2ch, NetEqDecoder::kDecoderPCM16Bwb_2ch,
- NetEqDecoder::kDecoderPCM16Bswb32kHz_2ch,
- // G.711, PCM mu-las and A-law.
- // Mono
- NetEqDecoder::kDecoderPCMu, NetEqDecoder::kDecoderPCMa,
- // Stereo
- NetEqDecoder::kDecoderPCMu_2ch, NetEqDecoder::kDecoderPCMa_2ch,
-#ifdef WEBRTC_CODEC_ILBC
- NetEqDecoder::kDecoderILBC,
-#endif
-#ifdef WEBRTC_CODEC_G722
- // Mono
- NetEqDecoder::kDecoderG722,
- // Stereo
- NetEqDecoder::kDecoderG722_2ch,
-#endif
-#ifdef WEBRTC_CODEC_OPUS
- // Mono and stereo.
- NetEqDecoder::kDecoderOpus,
-#endif
- // Comfort noise for three different sampling frequencies.
- NetEqDecoder::kDecoderCNGnb, NetEqDecoder::kDecoderCNGwb,
- NetEqDecoder::kDecoderCNGswb32kHz,
-#ifdef ENABLE_48000_HZ
- NetEqDecoder::kDecoderCNGswb48kHz,
-#endif
- NetEqDecoder::kDecoderAVT,
-#ifdef WEBRTC_CODEC_RED
- NetEqDecoder::kDecoderRED,
-#endif
-};
-
-// Get codec information from database.
-// TODO(tlegrand): replace memcpy with a pointer to the data base memory.
-int ACMCodecDB::Codec(int codec_id, CodecInst* codec_inst) {
- // Error check to see that codec_id is not out of bounds.
- if (static_cast<size_t>(codec_id) >= RentACodec::NumberOfCodecs()) {
- return -1;
- }
-
- // Copy database information for the codec to the output.
- memcpy(codec_inst, &database_[codec_id], sizeof(CodecInst));
-
- return 0;
-}
-
-// Enumerator for error codes when asking for codec database id.
-enum {
- kInvalidCodec = -10,
- kInvalidPayloadtype = -30,
- kInvalidPacketSize = -40,
- kInvalidRate = -50
-};
-
-// Gets the codec id number from the database. If there is some mismatch in
-// the codec settings, the function will return an error code.
-// NOTE! The first mismatch found will generate the return value.
-int ACMCodecDB::CodecNumber(const CodecInst& codec_inst) {
- // Look for a matching codec in the database.
- int codec_id = CodecId(codec_inst);
-
- // Checks if we found a matching codec.
- if (codec_id == -1) {
- return kInvalidCodec;
- }
-
- // Checks the validity of payload type
- if (!ValidPayloadType(codec_inst.pltype)) {
- return kInvalidPayloadtype;
- }
-
- // Comfort Noise is special case, packet-size & rate is not checked.
- if (STR_CASE_CMP(database_[codec_id].plname, "CN") == 0) {
- return codec_id;
- }
-
- // RED is special case, packet-size & rate is not checked.
- if (STR_CASE_CMP(database_[codec_id].plname, "red") == 0) {
- return codec_id;
- }
-
- // Checks the validity of packet size.
- if (codec_settings_[codec_id].num_packet_sizes > 0) {
- bool packet_size_ok = false;
- int i;
- int packet_size_samples;
- for (i = 0; i < codec_settings_[codec_id].num_packet_sizes; i++) {
- packet_size_samples =
- codec_settings_[codec_id].packet_sizes_samples[i];
- if (codec_inst.pacsize == packet_size_samples) {
- packet_size_ok = true;
- break;
- }
- }
-
- if (!packet_size_ok) {
- return kInvalidPacketSize;
- }
- }
-
- if (codec_inst.pacsize < 1) {
- return kInvalidPacketSize;
- }
-
- // Check the validity of rate. Codecs with multiple rates have their own
- // function for this.
- if (STR_CASE_CMP("isac", codec_inst.plname) == 0) {
- return IsISACRateValid(codec_inst.rate) ? codec_id : kInvalidRate;
- } else if (STR_CASE_CMP("ilbc", codec_inst.plname) == 0) {
- return IsILBCRateValid(codec_inst.rate, codec_inst.pacsize)
- ? codec_id : kInvalidRate;
- } else if (STR_CASE_CMP("opus", codec_inst.plname) == 0) {
- return IsOpusRateValid(codec_inst.rate)
- ? codec_id : kInvalidRate;
- }
-
- return IsRateValid(codec_id, codec_inst.rate) ?
- codec_id : kInvalidRate;
-}
-
-// Looks for a matching payload name, frequency, and channels in the
-// codec list. Need to check all three since some codecs have several codec
-// entries with different frequencies and/or channels.
-// Does not check other codec settings, such as payload type and packet size.
-// Returns the id of the codec, or -1 if no match is found.
-int ACMCodecDB::CodecId(const CodecInst& codec_inst) {
- return (CodecId(codec_inst.plname, codec_inst.plfreq,
- codec_inst.channels));
-}
-
-int ACMCodecDB::CodecId(const char* payload_name, int frequency, int channels) {
- for (const CodecInst& ci : RentACodec::Database()) {
- bool name_match = false;
- bool frequency_match = false;
- bool channels_match = false;
-
- // Payload name, sampling frequency and number of channels need to match.
- // NOTE! If |frequency| is -1, the frequency is not applicable, and is
- // always treated as true, like for RED.
- name_match = (STR_CASE_CMP(ci.plname, payload_name) == 0);
- frequency_match = (frequency == ci.plfreq) || (frequency == -1);
- // The number of channels must match for all codecs but Opus.
- if (STR_CASE_CMP(payload_name, "opus") != 0) {
- channels_match = (channels == ci.channels);
- } else {
- // For opus we just check that number of channels is valid.
- channels_match = (channels == 1 || channels == 2);
- }
-
- if (name_match && frequency_match && channels_match) {
- // We have found a matching codec in the list.
- return &ci - RentACodec::Database().data();
- }
- }
-
- // We didn't find a matching codec.
- return -1;
-}
-// Gets codec id number from database for the receiver.
-int ACMCodecDB::ReceiverCodecNumber(const CodecInst& codec_inst) {
- // Look for a matching codec in the database.
- return CodecId(codec_inst);
-}
-
-// Returns the codec sampling frequency for codec with id = "codec_id" in
-// database.
-int ACMCodecDB::CodecFreq(int codec_id) {
- const size_t i = static_cast<size_t>(codec_id);
- const auto db = RentACodec::Database();
- return i < db.size() ? db[i].plfreq : -1;
-}
-
-// Checks if the payload type is in the valid range.
-bool ACMCodecDB::ValidPayloadType(int payload_type) {
- return (payload_type >= 0) && (payload_type <= 127);
-}
-
-} // namespace acm2
-
-} // namespace webrtc
diff --git a/webrtc/modules/audio_coding/main/acm2/acm_codec_database.h b/webrtc/modules/audio_coding/main/acm2/acm_codec_database.h
deleted file mode 100644
index 84c8846a57..0000000000
--- a/webrtc/modules/audio_coding/main/acm2/acm_codec_database.h
+++ /dev/null
@@ -1,115 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-/*
- * This file generates databases with information about all supported audio
- * codecs.
- */
-
-#ifndef WEBRTC_MODULES_AUDIO_CODING_MAIN_ACM2_ACM_CODEC_DATABASE_H_
-#define WEBRTC_MODULES_AUDIO_CODING_MAIN_ACM2_ACM_CODEC_DATABASE_H_
-
-#include "webrtc/common_types.h"
-#include "webrtc/engine_configurations.h"
-#include "webrtc/modules/audio_coding/main/acm2/rent_a_codec.h"
-#include "webrtc/modules/audio_coding/neteq/include/neteq.h"
-
-namespace webrtc {
-
-namespace acm2 {
-
-// TODO(tlegrand): replace class ACMCodecDB with a namespace.
-class ACMCodecDB {
- public:
- // kMaxNumCodecs - Maximum number of codecs that can be activated in one
- // build.
- // kMaxNumPacketSize - Maximum number of allowed packet sizes for one codec.
- // These might need to be increased if adding a new codec to the database
- static const int kMaxNumCodecs = 50;
- static const int kMaxNumPacketSize = 6;
-
- // Codec specific settings
- //
- // num_packet_sizes - number of allowed packet sizes.
- // packet_sizes_samples - list of the allowed packet sizes.
- // basic_block_samples - assigned a value different from 0 if the codec
- // requires to be fed with a specific number of samples
- // that can be different from packet size.
- // channel_support - number of channels supported to encode;
- // 1 = mono, 2 = stereo, etc.
- struct CodecSettings {
- int num_packet_sizes;
- int packet_sizes_samples[kMaxNumPacketSize];
- int basic_block_samples;
- int channel_support;
- };
-
- // Gets codec information from database at the position in database given by
- // [codec_id].
- // Input:
- // [codec_id] - number that specifies at what position in the database to
- // get the information.
- // Output:
- // [codec_inst] - filled with information about the codec.
- // Return:
- // 0 if successful, otherwise -1.
- static int Codec(int codec_id, CodecInst* codec_inst);
-
- // Returns codec id from database, given the information received in the input
- // [codec_inst].
- // Input:
- // [codec_inst] - Information about the codec for which we require the
- // database id.
- // Return:
- // codec id if successful, otherwise < 0.
- static int CodecNumber(const CodecInst& codec_inst);
- static int CodecId(const CodecInst& codec_inst);
- static int CodecId(const char* payload_name, int frequency, int channels);
- static int ReceiverCodecNumber(const CodecInst& codec_inst);
-
- // Returns the codec sampling frequency for codec with id = "codec_id" in
- // database.
- // TODO(tlegrand): Check if function is needed, or if we can change
- // to access database directly.
- // Input:
- // [codec_id] - number that specifies at what position in the database to
- // get the information.
- // Return:
- // codec sampling frequency if successful, otherwise -1.
- static int CodecFreq(int codec_id);
-
- // Check if the payload type is valid, meaning that it is in the valid range
- // of 0 to 127.
- // Input:
- // [payload_type] - payload type.
- static bool ValidPayloadType(int payload_type);
-
- // Databases with information about the supported codecs
- // database_ - stored information about all codecs: payload type, name,
- // sampling frequency, packet size in samples, default channel
- // support, and default rate.
- // codec_settings_ - stored codec settings: number of allowed packet sizes,
- // a vector with the allowed packet sizes, basic block
- // samples, and max number of channels that are supported.
- // neteq_decoders_ - list of supported decoders in NetEQ.
- static const CodecInst database_[kMaxNumCodecs];
- static const CodecSettings codec_settings_[kMaxNumCodecs];
-
- private:
- static const NetEqDecoder neteq_decoders_[kMaxNumCodecs];
-
- friend class RentACodec;
-};
-
-} // namespace acm2
-
-} // namespace webrtc
-
-#endif // WEBRTC_MODULES_AUDIO_CODING_MAIN_ACM2_ACM_CODEC_DATABASE_H_
diff --git a/webrtc/modules/audio_coding/main/acm2/acm_common_defs.h b/webrtc/modules/audio_coding/main/acm2/acm_common_defs.h
deleted file mode 100644
index 23e3519ed0..0000000000
--- a/webrtc/modules/audio_coding/main/acm2/acm_common_defs.h
+++ /dev/null
@@ -1,32 +0,0 @@
-/*
- * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_MODULES_AUDIO_CODING_MAIN_ACM2_ACM_COMMON_DEFS_H_
-#define WEBRTC_MODULES_AUDIO_CODING_MAIN_ACM2_ACM_COMMON_DEFS_H_
-
-#include "webrtc/engine_configurations.h"
-
-// Checks for enabled codecs, we prevent enabling codecs which are not
-// compatible.
-#if ((defined WEBRTC_CODEC_ISAC) && (defined WEBRTC_CODEC_ISACFX))
-#error iSAC and iSACFX codecs cannot be enabled at the same time
-#endif
-
-namespace webrtc {
-
-// General codec specific defines
-const int kIsacWbDefaultRate = 32000;
-const int kIsacSwbDefaultRate = 56000;
-const int kIsacPacSize480 = 480;
-const int kIsacPacSize960 = 960;
-
-} // namespace webrtc
-
-#endif // WEBRTC_MODULES_AUDIO_CODING_MAIN_ACM2_ACM_COMMON_DEFS_H_
diff --git a/webrtc/modules/audio_coding/main/acm2/acm_receive_test_oldapi.cc b/webrtc/modules/audio_coding/main/acm2/acm_receive_test_oldapi.cc
deleted file mode 100644
index fdcfdfc22d..0000000000
--- a/webrtc/modules/audio_coding/main/acm2/acm_receive_test_oldapi.cc
+++ /dev/null
@@ -1,221 +0,0 @@
-/*
- * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include "webrtc/modules/audio_coding/main/acm2/acm_receive_test_oldapi.h"
-
-#include <assert.h>
-#include <stdio.h>
-
-#include "testing/gtest/include/gtest/gtest.h"
-#include "webrtc/modules/audio_coding/main/include/audio_coding_module.h"
-#include "webrtc/modules/audio_coding/neteq/tools/audio_sink.h"
-#include "webrtc/modules/audio_coding/neteq/tools/packet.h"
-#include "webrtc/modules/audio_coding/neteq/tools/packet_source.h"
-
-namespace webrtc {
-namespace test {
-
-namespace {
-// Returns true if the codec should be registered, otherwise false. Changes
-// the number of channels for the Opus codec to always be 1.
-bool ModifyAndUseThisCodec(CodecInst* codec_param) {
- if (STR_CASE_CMP(codec_param->plname, "CN") == 0 &&
- codec_param->plfreq == 48000)
- return false; // Skip 48 kHz comfort noise.
-
- if (STR_CASE_CMP(codec_param->plname, "telephone-event") == 0)
- return false; // Skip DTFM.
-
- return true;
-}
-
-// Remaps payload types from ACM's default to those used in the resource file
-// neteq_universal_new.rtp. Returns true if the codec should be registered,
-// otherwise false. The payload types are set as follows (all are mono codecs):
-// PCMu = 0;
-// PCMa = 8;
-// Comfort noise 8 kHz = 13
-// Comfort noise 16 kHz = 98
-// Comfort noise 32 kHz = 99
-// iLBC = 102
-// iSAC wideband = 103
-// iSAC super-wideband = 104
-// AVT/DTMF = 106
-// RED = 117
-// PCM16b 8 kHz = 93
-// PCM16b 16 kHz = 94
-// PCM16b 32 kHz = 95
-// G.722 = 94
-bool RemapPltypeAndUseThisCodec(const char* plname,
- int plfreq,
- int channels,
- int* pltype) {
- if (channels != 1)
- return false; // Don't use non-mono codecs.
-
- // Re-map pltypes to those used in the NetEq test files.
- if (STR_CASE_CMP(plname, "PCMU") == 0 && plfreq == 8000) {
- *pltype = 0;
- } else if (STR_CASE_CMP(plname, "PCMA") == 0 && plfreq == 8000) {
- *pltype = 8;
- } else if (STR_CASE_CMP(plname, "CN") == 0 && plfreq == 8000) {
- *pltype = 13;
- } else if (STR_CASE_CMP(plname, "CN") == 0 && plfreq == 16000) {
- *pltype = 98;
- } else if (STR_CASE_CMP(plname, "CN") == 0 && plfreq == 32000) {
- *pltype = 99;
- } else if (STR_CASE_CMP(plname, "ILBC") == 0) {
- *pltype = 102;
- } else if (STR_CASE_CMP(plname, "ISAC") == 0 && plfreq == 16000) {
- *pltype = 103;
- } else if (STR_CASE_CMP(plname, "ISAC") == 0 && plfreq == 32000) {
- *pltype = 104;
- } else if (STR_CASE_CMP(plname, "telephone-event") == 0) {
- *pltype = 106;
- } else if (STR_CASE_CMP(plname, "red") == 0) {
- *pltype = 117;
- } else if (STR_CASE_CMP(plname, "L16") == 0 && plfreq == 8000) {
- *pltype = 93;
- } else if (STR_CASE_CMP(plname, "L16") == 0 && plfreq == 16000) {
- *pltype = 94;
- } else if (STR_CASE_CMP(plname, "L16") == 0 && plfreq == 32000) {
- *pltype = 95;
- } else if (STR_CASE_CMP(plname, "G722") == 0) {
- *pltype = 9;
- } else {
- // Don't use any other codecs.
- return false;
- }
- return true;
-}
-} // namespace
-
-AcmReceiveTestOldApi::AcmReceiveTestOldApi(
- PacketSource* packet_source,
- AudioSink* audio_sink,
- int output_freq_hz,
- NumOutputChannels exptected_output_channels)
- : clock_(0),
- acm_(webrtc::AudioCodingModule::Create(0, &clock_)),
- packet_source_(packet_source),
- audio_sink_(audio_sink),
- output_freq_hz_(output_freq_hz),
- exptected_output_channels_(exptected_output_channels) {
-}
-
-void AcmReceiveTestOldApi::RegisterDefaultCodecs() {
- CodecInst my_codec_param;
- for (int n = 0; n < acm_->NumberOfCodecs(); n++) {
- ASSERT_EQ(0, acm_->Codec(n, &my_codec_param)) << "Failed to get codec.";
- if (ModifyAndUseThisCodec(&my_codec_param)) {
- ASSERT_EQ(0, acm_->RegisterReceiveCodec(my_codec_param))
- << "Couldn't register receive codec.\n";
- }
- }
-}
-
-void AcmReceiveTestOldApi::RegisterNetEqTestCodecs() {
- CodecInst my_codec_param;
- for (int n = 0; n < acm_->NumberOfCodecs(); n++) {
- ASSERT_EQ(0, acm_->Codec(n, &my_codec_param)) << "Failed to get codec.";
- if (!ModifyAndUseThisCodec(&my_codec_param)) {
- // Skip this codec.
- continue;
- }
-
- if (RemapPltypeAndUseThisCodec(my_codec_param.plname,
- my_codec_param.plfreq,
- my_codec_param.channels,
- &my_codec_param.pltype)) {
- ASSERT_EQ(0, acm_->RegisterReceiveCodec(my_codec_param))
- << "Couldn't register receive codec.\n";
- }
- }
-}
-
-int AcmReceiveTestOldApi::RegisterExternalReceiveCodec(
- int rtp_payload_type,
- AudioDecoder* external_decoder,
- int sample_rate_hz,
- int num_channels) {
- return acm_->RegisterExternalReceiveCodec(rtp_payload_type, external_decoder,
- sample_rate_hz, num_channels);
-}
-
-void AcmReceiveTestOldApi::Run() {
- for (rtc::scoped_ptr<Packet> packet(packet_source_->NextPacket()); packet;
- packet.reset(packet_source_->NextPacket())) {
- // Pull audio until time to insert packet.
- while (clock_.TimeInMilliseconds() < packet->time_ms()) {
- AudioFrame output_frame;
- EXPECT_EQ(0, acm_->PlayoutData10Ms(output_freq_hz_, &output_frame));
- EXPECT_EQ(output_freq_hz_, output_frame.sample_rate_hz_);
- const size_t samples_per_block =
- static_cast<size_t>(output_freq_hz_ * 10 / 1000);
- EXPECT_EQ(samples_per_block, output_frame.samples_per_channel_);
- if (exptected_output_channels_ != kArbitraryChannels) {
- if (output_frame.speech_type_ == webrtc::AudioFrame::kPLC) {
- // Don't check number of channels for PLC output, since each test run
- // usually starts with a short period of mono PLC before decoding the
- // first packet.
- } else {
- EXPECT_EQ(exptected_output_channels_, output_frame.num_channels_);
- }
- }
- ASSERT_TRUE(audio_sink_->WriteAudioFrame(output_frame));
- clock_.AdvanceTimeMilliseconds(10);
- AfterGetAudio();
- }
-
- // Insert packet after converting from RTPHeader to WebRtcRTPHeader.
- WebRtcRTPHeader header;
- header.header = packet->header();
- header.frameType = kAudioFrameSpeech;
- memset(&header.type.Audio, 0, sizeof(RTPAudioHeader));
- EXPECT_EQ(0,
- acm_->IncomingPacket(
- packet->payload(),
- static_cast<int32_t>(packet->payload_length_bytes()),
- header))
- << "Failure when inserting packet:" << std::endl
- << " PT = " << static_cast<int>(header.header.payloadType) << std::endl
- << " TS = " << header.header.timestamp << std::endl
- << " SN = " << header.header.sequenceNumber;
- }
-}
-
-AcmReceiveTestToggleOutputFreqOldApi::AcmReceiveTestToggleOutputFreqOldApi(
- PacketSource* packet_source,
- AudioSink* audio_sink,
- int output_freq_hz_1,
- int output_freq_hz_2,
- int toggle_period_ms,
- NumOutputChannels exptected_output_channels)
- : AcmReceiveTestOldApi(packet_source,
- audio_sink,
- output_freq_hz_1,
- exptected_output_channels),
- output_freq_hz_1_(output_freq_hz_1),
- output_freq_hz_2_(output_freq_hz_2),
- toggle_period_ms_(toggle_period_ms),
- last_toggle_time_ms_(clock_.TimeInMilliseconds()) {
-}
-
-void AcmReceiveTestToggleOutputFreqOldApi::AfterGetAudio() {
- if (clock_.TimeInMilliseconds() >= last_toggle_time_ms_ + toggle_period_ms_) {
- output_freq_hz_ = (output_freq_hz_ == output_freq_hz_1_)
- ? output_freq_hz_2_
- : output_freq_hz_1_;
- last_toggle_time_ms_ = clock_.TimeInMilliseconds();
- }
-}
-
-} // namespace test
-} // namespace webrtc
diff --git a/webrtc/modules/audio_coding/main/acm2/acm_receive_test_oldapi.h b/webrtc/modules/audio_coding/main/acm2/acm_receive_test_oldapi.h
deleted file mode 100644
index 0b5671fe8c..0000000000
--- a/webrtc/modules/audio_coding/main/acm2/acm_receive_test_oldapi.h
+++ /dev/null
@@ -1,94 +0,0 @@
-/*
- * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_MODULES_AUDIO_CODING_MAIN_ACM2_ACM_RECEIVE_TEST_H_
-#define WEBRTC_MODULES_AUDIO_CODING_MAIN_ACM2_ACM_RECEIVE_TEST_H_
-
-#include "webrtc/base/constructormagic.h"
-#include "webrtc/base/scoped_ptr.h"
-#include "webrtc/system_wrappers/include/clock.h"
-
-namespace webrtc {
-class AudioCodingModule;
-class AudioDecoder;
-struct CodecInst;
-
-namespace test {
-class AudioSink;
-class PacketSource;
-
-class AcmReceiveTestOldApi {
- public:
- enum NumOutputChannels {
- kArbitraryChannels = 0,
- kMonoOutput = 1,
- kStereoOutput = 2
- };
-
- AcmReceiveTestOldApi(PacketSource* packet_source,
- AudioSink* audio_sink,
- int output_freq_hz,
- NumOutputChannels exptected_output_channels);
- virtual ~AcmReceiveTestOldApi() {}
-
- // Registers the codecs with default parameters from ACM.
- void RegisterDefaultCodecs();
-
- // Registers codecs with payload types matching the pre-encoded NetEq test
- // files.
- void RegisterNetEqTestCodecs();
-
- int RegisterExternalReceiveCodec(int rtp_payload_type,
- AudioDecoder* external_decoder,
- int sample_rate_hz,
- int num_channels);
-
- // Runs the test and returns true if successful.
- void Run();
-
- protected:
- // Method is called after each block of output audio is received from ACM.
- virtual void AfterGetAudio() {}
-
- SimulatedClock clock_;
- rtc::scoped_ptr<AudioCodingModule> acm_;
- PacketSource* packet_source_;
- AudioSink* audio_sink_;
- int output_freq_hz_;
- NumOutputChannels exptected_output_channels_;
-
- RTC_DISALLOW_COPY_AND_ASSIGN(AcmReceiveTestOldApi);
-};
-
-// This test toggles the output frequency every |toggle_period_ms|. The test
-// starts with |output_freq_hz_1|. Except for the toggling, it does the same
-// thing as AcmReceiveTestOldApi.
-class AcmReceiveTestToggleOutputFreqOldApi : public AcmReceiveTestOldApi {
- public:
- AcmReceiveTestToggleOutputFreqOldApi(
- PacketSource* packet_source,
- AudioSink* audio_sink,
- int output_freq_hz_1,
- int output_freq_hz_2,
- int toggle_period_ms,
- NumOutputChannels exptected_output_channels);
-
- protected:
- void AfterGetAudio() override;
-
- const int output_freq_hz_1_;
- const int output_freq_hz_2_;
- const int toggle_period_ms_;
- int64_t last_toggle_time_ms_;
-};
-
-} // namespace test
-} // namespace webrtc
-#endif // WEBRTC_MODULES_AUDIO_CODING_MAIN_ACM2_ACM_RECEIVE_TEST_H_
diff --git a/webrtc/modules/audio_coding/main/acm2/acm_receiver.cc b/webrtc/modules/audio_coding/main/acm2/acm_receiver.cc
deleted file mode 100644
index cf486ce06a..0000000000
--- a/webrtc/modules/audio_coding/main/acm2/acm_receiver.cc
+++ /dev/null
@@ -1,739 +0,0 @@
-/*
- * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include "webrtc/modules/audio_coding/main/acm2/acm_receiver.h"
-
-#include <stdlib.h> // malloc
-
-#include <algorithm> // sort
-#include <vector>
-
-#include "webrtc/base/checks.h"
-#include "webrtc/base/format_macros.h"
-#include "webrtc/base/logging.h"
-#include "webrtc/common_audio/signal_processing/include/signal_processing_library.h"
-#include "webrtc/common_types.h"
-#include "webrtc/modules/audio_coding/codecs/audio_decoder.h"
-#include "webrtc/modules/audio_coding/main/acm2/acm_common_defs.h"
-#include "webrtc/modules/audio_coding/main/acm2/acm_resampler.h"
-#include "webrtc/modules/audio_coding/main/acm2/call_statistics.h"
-#include "webrtc/modules/audio_coding/neteq/include/neteq.h"
-#include "webrtc/system_wrappers/include/clock.h"
-#include "webrtc/system_wrappers/include/critical_section_wrapper.h"
-#include "webrtc/system_wrappers/include/tick_util.h"
-#include "webrtc/system_wrappers/include/trace.h"
-
-namespace webrtc {
-
-namespace acm2 {
-
-namespace {
-
-// |vad_activity_| field of |audio_frame| is set to |previous_audio_activity_|
-// before the call to this function.
-void SetAudioFrameActivityAndType(bool vad_enabled,
- NetEqOutputType type,
- AudioFrame* audio_frame) {
- if (vad_enabled) {
- switch (type) {
- case kOutputNormal: {
- audio_frame->vad_activity_ = AudioFrame::kVadActive;
- audio_frame->speech_type_ = AudioFrame::kNormalSpeech;
- break;
- }
- case kOutputVADPassive: {
- audio_frame->vad_activity_ = AudioFrame::kVadPassive;
- audio_frame->speech_type_ = AudioFrame::kNormalSpeech;
- break;
- }
- case kOutputCNG: {
- audio_frame->vad_activity_ = AudioFrame::kVadPassive;
- audio_frame->speech_type_ = AudioFrame::kCNG;
- break;
- }
- case kOutputPLC: {
- // Don't change |audio_frame->vad_activity_|, it should be the same as
- // |previous_audio_activity_|.
- audio_frame->speech_type_ = AudioFrame::kPLC;
- break;
- }
- case kOutputPLCtoCNG: {
- audio_frame->vad_activity_ = AudioFrame::kVadPassive;
- audio_frame->speech_type_ = AudioFrame::kPLCCNG;
- break;
- }
- default:
- assert(false);
- }
- } else {
- // Always return kVadUnknown when receive VAD is inactive
- audio_frame->vad_activity_ = AudioFrame::kVadUnknown;
- switch (type) {
- case kOutputNormal: {
- audio_frame->speech_type_ = AudioFrame::kNormalSpeech;
- break;
- }
- case kOutputCNG: {
- audio_frame->speech_type_ = AudioFrame::kCNG;
- break;
- }
- case kOutputPLC: {
- audio_frame->speech_type_ = AudioFrame::kPLC;
- break;
- }
- case kOutputPLCtoCNG: {
- audio_frame->speech_type_ = AudioFrame::kPLCCNG;
- break;
- }
- case kOutputVADPassive: {
- // Normally, we should no get any VAD decision if post-decoding VAD is
- // not active. However, if post-decoding VAD has been active then
- // disabled, we might be here for couple of frames.
- audio_frame->speech_type_ = AudioFrame::kNormalSpeech;
- LOG(WARNING) << "Post-decoding VAD is disabled but output is "
- << "labeled VAD-passive";
- break;
- }
- default:
- assert(false);
- }
- }
-}
-
-// Is the given codec a CNG codec?
-// TODO(kwiberg): Move to RentACodec.
-bool IsCng(int codec_id) {
- auto i = RentACodec::CodecIdFromIndex(codec_id);
- return (i && (*i == RentACodec::CodecId::kCNNB ||
- *i == RentACodec::CodecId::kCNWB ||
- *i == RentACodec::CodecId::kCNSWB ||
- *i == RentACodec::CodecId::kCNFB));
-}
-
-} // namespace
-
-AcmReceiver::AcmReceiver(const AudioCodingModule::Config& config)
- : crit_sect_(CriticalSectionWrapper::CreateCriticalSection()),
- id_(config.id),
- last_audio_decoder_(nullptr),
- previous_audio_activity_(AudioFrame::kVadPassive),
- current_sample_rate_hz_(config.neteq_config.sample_rate_hz),
- audio_buffer_(new int16_t[AudioFrame::kMaxDataSizeSamples]),
- last_audio_buffer_(new int16_t[AudioFrame::kMaxDataSizeSamples]),
- neteq_(NetEq::Create(config.neteq_config)),
- vad_enabled_(true),
- clock_(config.clock),
- resampled_last_output_frame_(true),
- av_sync_(false),
- initial_delay_manager_(),
- missing_packets_sync_stream_(),
- late_packets_sync_stream_() {
- assert(clock_);
-
- // Make sure we are on the same page as NetEq. Post-decode VAD is disabled by
- // default in NetEq4, however, Audio Conference Mixer relies on VAD decision
- // and fails if VAD decision is not provided.
- if (vad_enabled_)
- neteq_->EnableVad();
- else
- neteq_->DisableVad();
-
- memset(audio_buffer_.get(), 0, AudioFrame::kMaxDataSizeSamples);
- memset(last_audio_buffer_.get(), 0, AudioFrame::kMaxDataSizeSamples);
-}
-
-AcmReceiver::~AcmReceiver() {
- delete neteq_;
-}
-
-int AcmReceiver::SetMinimumDelay(int delay_ms) {
- if (neteq_->SetMinimumDelay(delay_ms))
- return 0;
- LOG(LERROR) << "AcmReceiver::SetExtraDelay " << delay_ms;
- return -1;
-}
-
-int AcmReceiver::SetInitialDelay(int delay_ms) {
- if (delay_ms < 0 || delay_ms > 10000) {
- return -1;
- }
- CriticalSectionScoped lock(crit_sect_.get());
-
- if (delay_ms == 0) {
- av_sync_ = false;
- initial_delay_manager_.reset();
- missing_packets_sync_stream_.reset();
- late_packets_sync_stream_.reset();
- neteq_->SetMinimumDelay(0);
- return 0;
- }
-
- if (av_sync_ && initial_delay_manager_->PacketBuffered()) {
- // Too late for this API. Only works before a call is started.
- return -1;
- }
-
- // Most of places NetEq calls are not within AcmReceiver's critical section to
- // improve performance. Here, this call has to be placed before the following
- // block, therefore, we keep it inside critical section. Otherwise, we have to
- // release |neteq_crit_sect_| and acquire it again, which seems an overkill.
- if (!neteq_->SetMinimumDelay(delay_ms))
- return -1;
-
- const int kLatePacketThreshold = 5;
- av_sync_ = true;
- initial_delay_manager_.reset(new InitialDelayManager(delay_ms,
- kLatePacketThreshold));
- missing_packets_sync_stream_.reset(new InitialDelayManager::SyncStream);
- late_packets_sync_stream_.reset(new InitialDelayManager::SyncStream);
- return 0;
-}
-
-int AcmReceiver::SetMaximumDelay(int delay_ms) {
- if (neteq_->SetMaximumDelay(delay_ms))
- return 0;
- LOG(LERROR) << "AcmReceiver::SetExtraDelay " << delay_ms;
- return -1;
-}
-
-int AcmReceiver::LeastRequiredDelayMs() const {
- return neteq_->LeastRequiredDelayMs();
-}
-
-int AcmReceiver::current_sample_rate_hz() const {
- CriticalSectionScoped lock(crit_sect_.get());
- return current_sample_rate_hz_;
-}
-
-int AcmReceiver::InsertPacket(const WebRtcRTPHeader& rtp_header,
- const uint8_t* incoming_payload,
- size_t length_payload) {
- uint32_t receive_timestamp = 0;
- InitialDelayManager::PacketType packet_type =
- InitialDelayManager::kUndefinedPacket;
- bool new_codec = false;
- const RTPHeader* header = &rtp_header.header; // Just a shorthand.
-
- {
- CriticalSectionScoped lock(crit_sect_.get());
-
- const Decoder* decoder = RtpHeaderToDecoder(*header, incoming_payload);
- if (!decoder) {
- LOG_F(LS_ERROR) << "Payload-type "
- << static_cast<int>(header->payloadType)
- << " is not registered.";
- return -1;
- }
- const int sample_rate_hz = ACMCodecDB::CodecFreq(decoder->acm_codec_id);
- receive_timestamp = NowInTimestamp(sample_rate_hz);
-
- if (IsCng(decoder->acm_codec_id)) {
- // If this is a CNG while the audio codec is not mono skip pushing in
- // packets into NetEq.
- if (last_audio_decoder_ && last_audio_decoder_->channels > 1)
- return 0;
- packet_type = InitialDelayManager::kCngPacket;
- } else if (decoder->acm_codec_id ==
- *RentACodec::CodecIndexFromId(RentACodec::CodecId::kAVT)) {
- packet_type = InitialDelayManager::kAvtPacket;
- } else {
- if (decoder != last_audio_decoder_) {
- // This is either the first audio packet or send codec is changed.
- // Therefore, either NetEq buffer is empty or will be flushed when this
- // packet is inserted.
- new_codec = true;
- last_audio_decoder_ = decoder;
- }
- packet_type = InitialDelayManager::kAudioPacket;
- }
-
- if (av_sync_) {
- assert(initial_delay_manager_.get());
- assert(missing_packets_sync_stream_.get());
- // This updates |initial_delay_manager_| and specifies an stream of
- // sync-packets, if required to be inserted. We insert the sync-packets
- // when AcmReceiver lock is released and |decoder_lock_| is acquired.
- initial_delay_manager_->UpdateLastReceivedPacket(
- rtp_header, receive_timestamp, packet_type, new_codec, sample_rate_hz,
- missing_packets_sync_stream_.get());
- }
- } // |crit_sect_| is released.
-
- // If |missing_packets_sync_stream_| is allocated then we are in AV-sync and
- // we may need to insert sync-packets. We don't check |av_sync_| as we are
- // outside AcmReceiver's critical section.
- if (missing_packets_sync_stream_.get()) {
- InsertStreamOfSyncPackets(missing_packets_sync_stream_.get());
- }
-
- if (neteq_->InsertPacket(rtp_header, incoming_payload, length_payload,
- receive_timestamp) < 0) {
- LOG(LERROR) << "AcmReceiver::InsertPacket "
- << static_cast<int>(header->payloadType)
- << " Failed to insert packet";
- return -1;
- }
- return 0;
-}
-
-int AcmReceiver::GetAudio(int desired_freq_hz, AudioFrame* audio_frame) {
- enum NetEqOutputType type;
- size_t samples_per_channel;
- int num_channels;
- bool return_silence = false;
-
- {
- // Accessing members, take the lock.
- CriticalSectionScoped lock(crit_sect_.get());
-
- if (av_sync_) {
- assert(initial_delay_manager_.get());
- assert(late_packets_sync_stream_.get());
- return_silence = GetSilence(desired_freq_hz, audio_frame);
- uint32_t timestamp_now = NowInTimestamp(current_sample_rate_hz_);
- initial_delay_manager_->LatePackets(timestamp_now,
- late_packets_sync_stream_.get());
- }
- }
-
- // If |late_packets_sync_stream_| is allocated then we have been in AV-sync
- // mode and we might have to insert sync-packets.
- if (late_packets_sync_stream_.get()) {
- InsertStreamOfSyncPackets(late_packets_sync_stream_.get());
- if (return_silence) // Silence generated, don't pull from NetEq.
- return 0;
- }
-
- // Accessing members, take the lock.
- CriticalSectionScoped lock(crit_sect_.get());
-
- // Always write the output to |audio_buffer_| first.
- if (neteq_->GetAudio(AudioFrame::kMaxDataSizeSamples,
- audio_buffer_.get(),
- &samples_per_channel,
- &num_channels,
- &type) != NetEq::kOK) {
- LOG(LERROR) << "AcmReceiver::GetAudio - NetEq Failed.";
- return -1;
- }
-
- // NetEq always returns 10 ms of audio.
- current_sample_rate_hz_ = static_cast<int>(samples_per_channel * 100);
-
- // Update if resampling is required.
- bool need_resampling = (desired_freq_hz != -1) &&
- (current_sample_rate_hz_ != desired_freq_hz);
-
- if (need_resampling && !resampled_last_output_frame_) {
- // Prime the resampler with the last frame.
- int16_t temp_output[AudioFrame::kMaxDataSizeSamples];
- int samples_per_channel_int =
- resampler_.Resample10Msec(last_audio_buffer_.get(),
- current_sample_rate_hz_,
- desired_freq_hz,
- num_channels,
- AudioFrame::kMaxDataSizeSamples,
- temp_output);
- if (samples_per_channel_int < 0) {
- LOG(LERROR) << "AcmReceiver::GetAudio - "
- "Resampling last_audio_buffer_ failed.";
- return -1;
- }
- samples_per_channel = static_cast<size_t>(samples_per_channel_int);
- }
-
- // The audio in |audio_buffer_| is tansferred to |audio_frame_| below, either
- // through resampling, or through straight memcpy.
- // TODO(henrik.lundin) Glitches in the output may appear if the output rate
- // from NetEq changes. See WebRTC issue 3923.
- if (need_resampling) {
- int samples_per_channel_int =
- resampler_.Resample10Msec(audio_buffer_.get(),
- current_sample_rate_hz_,
- desired_freq_hz,
- num_channels,
- AudioFrame::kMaxDataSizeSamples,
- audio_frame->data_);
- if (samples_per_channel_int < 0) {
- LOG(LERROR) << "AcmReceiver::GetAudio - Resampling audio_buffer_ failed.";
- return -1;
- }
- samples_per_channel = static_cast<size_t>(samples_per_channel_int);
- resampled_last_output_frame_ = true;
- } else {
- resampled_last_output_frame_ = false;
- // We might end up here ONLY if codec is changed.
- memcpy(audio_frame->data_,
- audio_buffer_.get(),
- samples_per_channel * num_channels * sizeof(int16_t));
- }
-
- // Swap buffers, so that the current audio is stored in |last_audio_buffer_|
- // for next time.
- audio_buffer_.swap(last_audio_buffer_);
-
- audio_frame->num_channels_ = num_channels;
- audio_frame->samples_per_channel_ = samples_per_channel;
- audio_frame->sample_rate_hz_ = static_cast<int>(samples_per_channel * 100);
-
- // Should set |vad_activity| before calling SetAudioFrameActivityAndType().
- audio_frame->vad_activity_ = previous_audio_activity_;
- SetAudioFrameActivityAndType(vad_enabled_, type, audio_frame);
- previous_audio_activity_ = audio_frame->vad_activity_;
- call_stats_.DecodedByNetEq(audio_frame->speech_type_);
-
- // Computes the RTP timestamp of the first sample in |audio_frame| from
- // |GetPlayoutTimestamp|, which is the timestamp of the last sample of
- // |audio_frame|.
- uint32_t playout_timestamp = 0;
- if (GetPlayoutTimestamp(&playout_timestamp)) {
- audio_frame->timestamp_ = playout_timestamp -
- static_cast<uint32_t>(audio_frame->samples_per_channel_);
- } else {
- // Remain 0 until we have a valid |playout_timestamp|.
- audio_frame->timestamp_ = 0;
- }
-
- return 0;
-}
-
-int32_t AcmReceiver::AddCodec(int acm_codec_id,
- uint8_t payload_type,
- int channels,
- int sample_rate_hz,
- AudioDecoder* audio_decoder) {
- const auto neteq_decoder = [acm_codec_id, channels]() -> NetEqDecoder {
- if (acm_codec_id == -1)
- return NetEqDecoder::kDecoderArbitrary; // External decoder.
- const rtc::Maybe<RentACodec::CodecId> cid =
- RentACodec::CodecIdFromIndex(acm_codec_id);
- RTC_DCHECK(cid) << "Invalid codec index: " << acm_codec_id;
- const rtc::Maybe<NetEqDecoder> ned =
- RentACodec::NetEqDecoderFromCodecId(*cid, channels);
- RTC_DCHECK(ned) << "Invalid codec ID: " << static_cast<int>(*cid);
- return *ned;
- }();
-
- CriticalSectionScoped lock(crit_sect_.get());
-
- // The corresponding NetEq decoder ID.
- // If this codec has been registered before.
- auto it = decoders_.find(payload_type);
- if (it != decoders_.end()) {
- const Decoder& decoder = it->second;
- if (acm_codec_id != -1 && decoder.acm_codec_id == acm_codec_id &&
- decoder.channels == channels &&
- decoder.sample_rate_hz == sample_rate_hz) {
- // Re-registering the same codec. Do nothing and return.
- return 0;
- }
-
- // Changing codec. First unregister the old codec, then register the new
- // one.
- if (neteq_->RemovePayloadType(payload_type) != NetEq::kOK) {
- LOG(LERROR) << "Cannot remove payload " << static_cast<int>(payload_type);
- return -1;
- }
-
- decoders_.erase(it);
- }
-
- int ret_val;
- if (!audio_decoder) {
- ret_val = neteq_->RegisterPayloadType(neteq_decoder, payload_type);
- } else {
- ret_val = neteq_->RegisterExternalDecoder(audio_decoder, neteq_decoder,
- payload_type, sample_rate_hz);
- }
- if (ret_val != NetEq::kOK) {
- LOG(LERROR) << "AcmReceiver::AddCodec " << acm_codec_id
- << static_cast<int>(payload_type)
- << " channels: " << channels;
- return -1;
- }
-
- Decoder decoder;
- decoder.acm_codec_id = acm_codec_id;
- decoder.payload_type = payload_type;
- decoder.channels = channels;
- decoder.sample_rate_hz = sample_rate_hz;
- decoders_[payload_type] = decoder;
- return 0;
-}
-
-void AcmReceiver::EnableVad() {
- neteq_->EnableVad();
- CriticalSectionScoped lock(crit_sect_.get());
- vad_enabled_ = true;
-}
-
-void AcmReceiver::DisableVad() {
- neteq_->DisableVad();
- CriticalSectionScoped lock(crit_sect_.get());
- vad_enabled_ = false;
-}
-
-void AcmReceiver::FlushBuffers() {
- neteq_->FlushBuffers();
-}
-
-// If failed in removing one of the codecs, this method continues to remove as
-// many as it can.
-int AcmReceiver::RemoveAllCodecs() {
- int ret_val = 0;
- CriticalSectionScoped lock(crit_sect_.get());
- for (auto it = decoders_.begin(); it != decoders_.end(); ) {
- auto cur = it;
- ++it; // it will be valid even if we erase cur
- if (neteq_->RemovePayloadType(cur->second.payload_type) == 0) {
- decoders_.erase(cur);
- } else {
- LOG_F(LS_ERROR) << "Cannot remove payload "
- << static_cast<int>(cur->second.payload_type);
- ret_val = -1;
- }
- }
-
- // No codec is registered, invalidate last audio decoder.
- last_audio_decoder_ = nullptr;
- return ret_val;
-}
-
-int AcmReceiver::RemoveCodec(uint8_t payload_type) {
- CriticalSectionScoped lock(crit_sect_.get());
- auto it = decoders_.find(payload_type);
- if (it == decoders_.end()) { // Such a payload-type is not registered.
- return 0;
- }
- if (neteq_->RemovePayloadType(payload_type) != NetEq::kOK) {
- LOG(LERROR) << "AcmReceiver::RemoveCodec" << static_cast<int>(payload_type);
- return -1;
- }
- if (last_audio_decoder_ == &it->second)
- last_audio_decoder_ = nullptr;
- decoders_.erase(it);
- return 0;
-}
-
-void AcmReceiver::set_id(int id) {
- CriticalSectionScoped lock(crit_sect_.get());
- id_ = id;
-}
-
-bool AcmReceiver::GetPlayoutTimestamp(uint32_t* timestamp) {
- if (av_sync_) {
- assert(initial_delay_manager_.get());
- if (initial_delay_manager_->buffering()) {
- return initial_delay_manager_->GetPlayoutTimestamp(timestamp);
- }
- }
- return neteq_->GetPlayoutTimestamp(timestamp);
-}
-
-int AcmReceiver::last_audio_codec_id() const {
- CriticalSectionScoped lock(crit_sect_.get());
- return last_audio_decoder_ ? last_audio_decoder_->acm_codec_id : -1;
-}
-
-int AcmReceiver::RedPayloadType() const {
- const auto red_index =
- RentACodec::CodecIndexFromId(RentACodec::CodecId::kRED);
- if (red_index) {
- CriticalSectionScoped lock(crit_sect_.get());
- for (const auto& decoder_pair : decoders_) {
- const Decoder& decoder = decoder_pair.second;
- if (decoder.acm_codec_id == *red_index)
- return decoder.payload_type;
- }
- }
- LOG(WARNING) << "RED is not registered.";
- return -1;
-}
-
-int AcmReceiver::LastAudioCodec(CodecInst* codec) const {
- CriticalSectionScoped lock(crit_sect_.get());
- if (!last_audio_decoder_) {
- return -1;
- }
- memcpy(codec, &ACMCodecDB::database_[last_audio_decoder_->acm_codec_id],
- sizeof(CodecInst));
- codec->pltype = last_audio_decoder_->payload_type;
- codec->channels = last_audio_decoder_->channels;
- codec->plfreq = last_audio_decoder_->sample_rate_hz;
- return 0;
-}
-
-void AcmReceiver::GetNetworkStatistics(NetworkStatistics* acm_stat) {
- NetEqNetworkStatistics neteq_stat;
- // NetEq function always returns zero, so we don't check the return value.
- neteq_->NetworkStatistics(&neteq_stat);
-
- acm_stat->currentBufferSize = neteq_stat.current_buffer_size_ms;
- acm_stat->preferredBufferSize = neteq_stat.preferred_buffer_size_ms;
- acm_stat->jitterPeaksFound = neteq_stat.jitter_peaks_found ? true : false;
- acm_stat->currentPacketLossRate = neteq_stat.packet_loss_rate;
- acm_stat->currentDiscardRate = neteq_stat.packet_discard_rate;
- acm_stat->currentExpandRate = neteq_stat.expand_rate;
- acm_stat->currentSpeechExpandRate = neteq_stat.speech_expand_rate;
- acm_stat->currentPreemptiveRate = neteq_stat.preemptive_rate;
- acm_stat->currentAccelerateRate = neteq_stat.accelerate_rate;
- acm_stat->currentSecondaryDecodedRate = neteq_stat.secondary_decoded_rate;
- acm_stat->clockDriftPPM = neteq_stat.clockdrift_ppm;
- acm_stat->addedSamples = neteq_stat.added_zero_samples;
- acm_stat->meanWaitingTimeMs = neteq_stat.mean_waiting_time_ms;
- acm_stat->medianWaitingTimeMs = neteq_stat.median_waiting_time_ms;
- acm_stat->minWaitingTimeMs = neteq_stat.min_waiting_time_ms;
- acm_stat->maxWaitingTimeMs = neteq_stat.max_waiting_time_ms;
-}
-
-int AcmReceiver::DecoderByPayloadType(uint8_t payload_type,
- CodecInst* codec) const {
- CriticalSectionScoped lock(crit_sect_.get());
- auto it = decoders_.find(payload_type);
- if (it == decoders_.end()) {
- LOG(LERROR) << "AcmReceiver::DecoderByPayloadType "
- << static_cast<int>(payload_type);
- return -1;
- }
- const Decoder& decoder = it->second;
- memcpy(codec, &ACMCodecDB::database_[decoder.acm_codec_id],
- sizeof(CodecInst));
- codec->pltype = decoder.payload_type;
- codec->channels = decoder.channels;
- codec->plfreq = decoder.sample_rate_hz;
- return 0;
-}
-
-int AcmReceiver::EnableNack(size_t max_nack_list_size) {
- neteq_->EnableNack(max_nack_list_size);
- return 0;
-}
-
-void AcmReceiver::DisableNack() {
- neteq_->DisableNack();
-}
-
-std::vector<uint16_t> AcmReceiver::GetNackList(
- int64_t round_trip_time_ms) const {
- return neteq_->GetNackList(round_trip_time_ms);
-}
-
-void AcmReceiver::ResetInitialDelay() {
- {
- CriticalSectionScoped lock(crit_sect_.get());
- av_sync_ = false;
- initial_delay_manager_.reset(NULL);
- missing_packets_sync_stream_.reset(NULL);
- late_packets_sync_stream_.reset(NULL);
- }
- neteq_->SetMinimumDelay(0);
- // TODO(turajs): Should NetEq Buffer be flushed?
-}
-
-// This function is called within critical section, no need to acquire a lock.
-bool AcmReceiver::GetSilence(int desired_sample_rate_hz, AudioFrame* frame) {
- assert(av_sync_);
- assert(initial_delay_manager_.get());
- if (!initial_delay_manager_->buffering()) {
- return false;
- }
-
- // We stop accumulating packets, if the number of packets or the total size
- // exceeds a threshold.
- int num_packets;
- int max_num_packets;
- const float kBufferingThresholdScale = 0.9f;
- neteq_->PacketBufferStatistics(&num_packets, &max_num_packets);
- if (num_packets > max_num_packets * kBufferingThresholdScale) {
- initial_delay_manager_->DisableBuffering();
- return false;
- }
-
- // Update statistics.
- call_stats_.DecodedBySilenceGenerator();
-
- // Set the values if already got a packet, otherwise set to default values.
- if (last_audio_decoder_) {
- current_sample_rate_hz_ =
- ACMCodecDB::database_[last_audio_decoder_->acm_codec_id].plfreq;
- frame->num_channels_ = last_audio_decoder_->channels;
- } else {
- frame->num_channels_ = 1;
- }
-
- // Set the audio frame's sampling frequency.
- if (desired_sample_rate_hz > 0) {
- frame->sample_rate_hz_ = desired_sample_rate_hz;
- } else {
- frame->sample_rate_hz_ = current_sample_rate_hz_;
- }
-
- frame->samples_per_channel_ =
- static_cast<size_t>(frame->sample_rate_hz_ / 100); // Always 10 ms.
- frame->speech_type_ = AudioFrame::kCNG;
- frame->vad_activity_ = AudioFrame::kVadPassive;
- size_t samples = frame->samples_per_channel_ * frame->num_channels_;
- memset(frame->data_, 0, samples * sizeof(int16_t));
- return true;
-}
-
-const AcmReceiver::Decoder* AcmReceiver::RtpHeaderToDecoder(
- const RTPHeader& rtp_header,
- const uint8_t* payload) const {
- auto it = decoders_.find(rtp_header.payloadType);
- const auto red_index =
- RentACodec::CodecIndexFromId(RentACodec::CodecId::kRED);
- if (red_index && // This ensures that RED is defined in WebRTC.
- it != decoders_.end() && it->second.acm_codec_id == *red_index) {
- // This is a RED packet, get the payload of the audio codec.
- it = decoders_.find(payload[0] & 0x7F);
- }
-
- // Check if the payload is registered.
- return it != decoders_.end() ? &it->second : nullptr;
-}
-
-uint32_t AcmReceiver::NowInTimestamp(int decoder_sampling_rate) const {
- // Down-cast the time to (32-6)-bit since we only care about
- // the least significant bits. (32-6) bits cover 2^(32-6) = 67108864 ms.
- // We masked 6 most significant bits of 32-bit so there is no overflow in
- // the conversion from milliseconds to timestamp.
- const uint32_t now_in_ms = static_cast<uint32_t>(
- clock_->TimeInMilliseconds() & 0x03ffffff);
- return static_cast<uint32_t>(
- (decoder_sampling_rate / 1000) * now_in_ms);
-}
-
-// This function only interacts with |neteq_|, therefore, it does not have to
-// be within critical section of AcmReceiver. It is inserting packets
-// into NetEq, so we call it when |decode_lock_| is acquired. However, this is
-// not essential as sync-packets do not interact with codecs (especially BWE).
-void AcmReceiver::InsertStreamOfSyncPackets(
- InitialDelayManager::SyncStream* sync_stream) {
- assert(sync_stream);
- assert(av_sync_);
- for (int n = 0; n < sync_stream->num_sync_packets; ++n) {
- neteq_->InsertSyncPacket(sync_stream->rtp_info,
- sync_stream->receive_timestamp);
- ++sync_stream->rtp_info.header.sequenceNumber;
- sync_stream->rtp_info.header.timestamp += sync_stream->timestamp_step;
- sync_stream->receive_timestamp += sync_stream->timestamp_step;
- }
-}
-
-void AcmReceiver::GetDecodingCallStatistics(
- AudioDecodingCallStats* stats) const {
- CriticalSectionScoped lock(crit_sect_.get());
- *stats = call_stats_.GetDecodingStatistics();
-}
-
-} // namespace acm2
-
-} // namespace webrtc
diff --git a/webrtc/modules/audio_coding/main/acm2/acm_receiver.h b/webrtc/modules/audio_coding/main/acm2/acm_receiver.h
deleted file mode 100644
index 4775b8c6d9..0000000000
--- a/webrtc/modules/audio_coding/main/acm2/acm_receiver.h
+++ /dev/null
@@ -1,348 +0,0 @@
-/*
- * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_MODULES_AUDIO_CODING_MAIN_ACM2_ACM_RECEIVER_H_
-#define WEBRTC_MODULES_AUDIO_CODING_MAIN_ACM2_ACM_RECEIVER_H_
-
-#include <map>
-#include <vector>
-
-#include "webrtc/base/scoped_ptr.h"
-#include "webrtc/base/thread_annotations.h"
-#include "webrtc/common_audio/vad/include/webrtc_vad.h"
-#include "webrtc/engine_configurations.h"
-#include "webrtc/modules/audio_coding/main/include/audio_coding_module.h"
-#include "webrtc/modules/audio_coding/main/acm2/acm_codec_database.h"
-#include "webrtc/modules/audio_coding/main/acm2/acm_resampler.h"
-#include "webrtc/modules/audio_coding/main/acm2/call_statistics.h"
-#include "webrtc/modules/audio_coding/main/acm2/initial_delay_manager.h"
-#include "webrtc/modules/audio_coding/neteq/include/neteq.h"
-#include "webrtc/modules/interface/module_common_types.h"
-#include "webrtc/typedefs.h"
-
-namespace webrtc {
-
-struct CodecInst;
-class CriticalSectionWrapper;
-class NetEq;
-
-namespace acm2 {
-
-class AcmReceiver {
- public:
- struct Decoder {
- int acm_codec_id;
- uint8_t payload_type;
- // This field is meaningful for codecs where both mono and
- // stereo versions are registered under the same ID.
- int channels;
- int sample_rate_hz;
- };
-
- // Constructor of the class
- explicit AcmReceiver(const AudioCodingModule::Config& config);
-
- // Destructor of the class.
- ~AcmReceiver();
-
- //
- // Inserts a payload with its associated RTP-header into NetEq.
- //
- // Input:
- // - rtp_header : RTP header for the incoming payload containing
- // information about payload type, sequence number,
- // timestamp, SSRC and marker bit.
- // - incoming_payload : Incoming audio payload.
- // - length_payload : Length of incoming audio payload in bytes.
- //
- // Return value : 0 if OK.
- // <0 if NetEq returned an error.
- //
- int InsertPacket(const WebRtcRTPHeader& rtp_header,
- const uint8_t* incoming_payload,
- size_t length_payload);
-
- //
- // Asks NetEq for 10 milliseconds of decoded audio.
- //
- // Input:
- // -desired_freq_hz : specifies the sampling rate [Hz] of the output
- // audio. If set -1 indicates to resampling is
- // is required and the audio returned at the
- // sampling rate of the decoder.
- //
- // Output:
- // -audio_frame : an audio frame were output data and
- // associated parameters are written to.
- //
- // Return value : 0 if OK.
- // -1 if NetEq returned an error.
- //
- int GetAudio(int desired_freq_hz, AudioFrame* audio_frame);
-
- //
- // Adds a new codec to the NetEq codec database.
- //
- // Input:
- // - acm_codec_id : ACM codec ID; -1 means external decoder.
- // - payload_type : payload type.
- // - sample_rate_hz : sample rate.
- // - audio_decoder : pointer to a decoder object. If it's null, then
- // NetEq will internally create a decoder object
- // based on the value of |acm_codec_id| (which
- // mustn't be -1). Otherwise, NetEq will use the
- // given decoder for the given payload type. NetEq
- // won't take ownership of the decoder; it's up to
- // the caller to delete it when it's no longer
- // needed.
- //
- // Providing an existing decoder object here is
- // necessary for external decoders, but may also be
- // used for built-in decoders if NetEq doesn't have
- // all the info it needs to construct them properly
- // (e.g. iSAC, where the decoder needs to be paired
- // with an encoder).
- //
- // Return value : 0 if OK.
- // <0 if NetEq returned an error.
- //
- int AddCodec(int acm_codec_id,
- uint8_t payload_type,
- int channels,
- int sample_rate_hz,
- AudioDecoder* audio_decoder);
-
- //
- // Sets a minimum delay for packet buffer. The given delay is maintained,
- // unless channel condition dictates a higher delay.
- //
- // Input:
- // - delay_ms : minimum delay in milliseconds.
- //
- // Return value : 0 if OK.
- // <0 if NetEq returned an error.
- //
- int SetMinimumDelay(int delay_ms);
-
- //
- // Sets a maximum delay [ms] for the packet buffer. The target delay does not
- // exceed the given value, even if channel condition requires so.
- //
- // Input:
- // - delay_ms : maximum delay in milliseconds.
- //
- // Return value : 0 if OK.
- // <0 if NetEq returned an error.
- //
- int SetMaximumDelay(int delay_ms);
-
- //
- // Get least required delay computed based on channel conditions. Note that
- // this is before applying any user-defined limits (specified by calling
- // (SetMinimumDelay() and/or SetMaximumDelay()).
- //
- int LeastRequiredDelayMs() const;
-
- //
- // Sets an initial delay of |delay_ms| milliseconds. This introduces a playout
- // delay. Silence (zero signal) is played out until equivalent of |delay_ms|
- // millisecond of audio is buffered. Then, NetEq maintains the delay.
- //
- // Input:
- // - delay_ms : initial delay in milliseconds.
- //
- // Return value : 0 if OK.
- // <0 if NetEq returned an error.
- //
- int SetInitialDelay(int delay_ms);
-
- //
- // Resets the initial delay to zero.
- //
- void ResetInitialDelay();
-
- //
- // Get the current sampling frequency in Hz.
- //
- // Return value : Sampling frequency in Hz.
- //
- int current_sample_rate_hz() const;
-
- //
- // Get the current network statistics from NetEq.
- //
- // Output:
- // - statistics : The current network statistics.
- //
- void GetNetworkStatistics(NetworkStatistics* statistics);
-
- //
- // Enable post-decoding VAD.
- //
- void EnableVad();
-
- //
- // Disable post-decoding VAD.
- //
- void DisableVad();
-
- //
- // Returns whether post-decoding VAD is enabled (true) or disabled (false).
- //
- bool vad_enabled() const { return vad_enabled_; }
-
- //
- // Flushes the NetEq packet and speech buffers.
- //
- void FlushBuffers();
-
- //
- // Removes a payload-type from the NetEq codec database.
- //
- // Input:
- // - payload_type : the payload-type to be removed.
- //
- // Return value : 0 if OK.
- // -1 if an error occurred.
- //
- int RemoveCodec(uint8_t payload_type);
-
- //
- // Remove all registered codecs.
- //
- int RemoveAllCodecs();
-
- //
- // Set ID.
- //
- void set_id(int id); // TODO(turajs): can be inline.
-
- //
- // Gets the RTP timestamp of the last sample delivered by GetAudio().
- // Returns true if the RTP timestamp is valid, otherwise false.
- //
- bool GetPlayoutTimestamp(uint32_t* timestamp);
-
- //
- // Return the index of the codec associated with the last non-CNG/non-DTMF
- // received payload. If no non-CNG/non-DTMF payload is received -1 is
- // returned.
- //
- int last_audio_codec_id() const; // TODO(turajs): can be inline.
-
- //
- // Get the audio codec associated with the last non-CNG/non-DTMF received
- // payload. If no non-CNG/non-DTMF packet is received -1 is returned,
- // otherwise return 0.
- //
- int LastAudioCodec(CodecInst* codec) const;
-
- //
- // Return payload type of RED if it is registered, otherwise return -1;
- //
- int RedPayloadType() const;
-
- //
- // Get a decoder given its registered payload-type.
- //
- // Input:
- // -payload_type : the payload-type of the codec to be retrieved.
- //
- // Output:
- // -codec : codec associated with the given payload-type.
- //
- // Return value : 0 if succeeded.
- // -1 if failed, e.g. given payload-type is not
- // registered.
- //
- int DecoderByPayloadType(uint8_t payload_type,
- CodecInst* codec) const;
-
- //
- // Enable NACK and set the maximum size of the NACK list. If NACK is already
- // enabled then the maximum NACK list size is modified accordingly.
- //
- // Input:
- // -max_nack_list_size : maximum NACK list size
- // should be positive (none zero) and less than or
- // equal to |Nack::kNackListSizeLimit|
- // Return value
- // : 0 if succeeded.
- // -1 if failed
- //
- int EnableNack(size_t max_nack_list_size);
-
- // Disable NACK.
- void DisableNack();
-
- //
- // Get a list of packets to be retransmitted.
- //
- // Input:
- // -round_trip_time_ms : estimate of the round-trip-time (in milliseconds).
- // Return value : list of packets to be retransmitted.
- //
- std::vector<uint16_t> GetNackList(int64_t round_trip_time_ms) const;
-
- //
- // Get statistics of calls to GetAudio().
- void GetDecodingCallStatistics(AudioDecodingCallStats* stats) const;
-
- private:
- bool GetSilence(int desired_sample_rate_hz, AudioFrame* frame)
- EXCLUSIVE_LOCKS_REQUIRED(crit_sect_);
-
- int GetNumSyncPacketToInsert(uint16_t received_squence_number);
-
- const Decoder* RtpHeaderToDecoder(const RTPHeader& rtp_header,
- const uint8_t* payload) const
- EXCLUSIVE_LOCKS_REQUIRED(crit_sect_);
-
- uint32_t NowInTimestamp(int decoder_sampling_rate) const;
-
- void InsertStreamOfSyncPackets(InitialDelayManager::SyncStream* sync_stream);
-
- rtc::scoped_ptr<CriticalSectionWrapper> crit_sect_;
- int id_; // TODO(henrik.lundin) Make const.
- const Decoder* last_audio_decoder_ GUARDED_BY(crit_sect_);
- AudioFrame::VADActivity previous_audio_activity_ GUARDED_BY(crit_sect_);
- int current_sample_rate_hz_ GUARDED_BY(crit_sect_);
- ACMResampler resampler_ GUARDED_BY(crit_sect_);
- // Used in GetAudio, declared as member to avoid allocating every 10ms.
- // TODO(henrik.lundin) Stack-allocate in GetAudio instead?
- rtc::scoped_ptr<int16_t[]> audio_buffer_ GUARDED_BY(crit_sect_);
- rtc::scoped_ptr<int16_t[]> last_audio_buffer_ GUARDED_BY(crit_sect_);
- CallStatistics call_stats_ GUARDED_BY(crit_sect_);
- NetEq* neteq_;
- // Decoders map is keyed by payload type
- std::map<uint8_t, Decoder> decoders_ GUARDED_BY(crit_sect_);
- bool vad_enabled_;
- Clock* clock_; // TODO(henrik.lundin) Make const if possible.
- bool resampled_last_output_frame_ GUARDED_BY(crit_sect_);
-
- // Indicates if a non-zero initial delay is set, and the receiver is in
- // AV-sync mode.
- bool av_sync_;
- rtc::scoped_ptr<InitialDelayManager> initial_delay_manager_;
-
- // The following are defined as members to avoid creating them in every
- // iteration. |missing_packets_sync_stream_| is *ONLY* used in InsertPacket().
- // |late_packets_sync_stream_| is only used in GetAudio(). Both of these
- // member variables are allocated only when we AV-sync is enabled, i.e.
- // initial delay is set.
- rtc::scoped_ptr<InitialDelayManager::SyncStream> missing_packets_sync_stream_;
- rtc::scoped_ptr<InitialDelayManager::SyncStream> late_packets_sync_stream_;
-};
-
-} // namespace acm2
-
-} // namespace webrtc
-
-#endif // WEBRTC_MODULES_AUDIO_CODING_MAIN_ACM2_ACM_RECEIVER_H_
diff --git a/webrtc/modules/audio_coding/main/acm2/acm_receiver_unittest_oldapi.cc b/webrtc/modules/audio_coding/main/acm2/acm_receiver_unittest_oldapi.cc
deleted file mode 100644
index f0caacce10..0000000000
--- a/webrtc/modules/audio_coding/main/acm2/acm_receiver_unittest_oldapi.cc
+++ /dev/null
@@ -1,368 +0,0 @@
-/*
- * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include "webrtc/modules/audio_coding/main/acm2/acm_receiver.h"
-
-#include <algorithm> // std::min
-
-#include "testing/gtest/include/gtest/gtest.h"
-#include "webrtc/base/scoped_ptr.h"
-#include "webrtc/modules/audio_coding/main/include/audio_coding_module.h"
-#include "webrtc/modules/audio_coding/main/acm2/audio_coding_module_impl.h"
-#include "webrtc/modules/audio_coding/main/acm2/acm_codec_database.h"
-#include "webrtc/modules/audio_coding/neteq/tools/rtp_generator.h"
-#include "webrtc/system_wrappers/include/clock.h"
-#include "webrtc/test/test_suite.h"
-#include "webrtc/test/testsupport/fileutils.h"
-#include "webrtc/test/testsupport/gtest_disable.h"
-
-namespace webrtc {
-
-namespace acm2 {
-namespace {
-
-bool CodecsEqual(const CodecInst& codec_a, const CodecInst& codec_b) {
- if (strcmp(codec_a.plname, codec_b.plname) != 0 ||
- codec_a.plfreq != codec_b.plfreq ||
- codec_a.pltype != codec_b.pltype ||
- codec_b.channels != codec_a.channels)
- return false;
- return true;
-}
-
-struct CodecIdInst {
- explicit CodecIdInst(RentACodec::CodecId codec_id) {
- const auto codec_ix = RentACodec::CodecIndexFromId(codec_id);
- EXPECT_TRUE(codec_ix);
- id = *codec_ix;
- const auto codec_inst = RentACodec::CodecInstById(codec_id);
- EXPECT_TRUE(codec_inst);
- inst = *codec_inst;
- }
- int id;
- CodecInst inst;
-};
-
-} // namespace
-
-class AcmReceiverTestOldApi : public AudioPacketizationCallback,
- public ::testing::Test {
- protected:
- AcmReceiverTestOldApi()
- : timestamp_(0),
- packet_sent_(false),
- last_packet_send_timestamp_(timestamp_),
- last_frame_type_(kEmptyFrame) {
- AudioCodingModule::Config config;
- acm_.reset(new AudioCodingModuleImpl(config));
- receiver_.reset(new AcmReceiver(config));
- }
-
- ~AcmReceiverTestOldApi() {}
-
- void SetUp() override {
- ASSERT_TRUE(receiver_.get() != NULL);
- ASSERT_TRUE(acm_.get() != NULL);
- codecs_ = RentACodec::Database();
-
- acm_->InitializeReceiver();
- acm_->RegisterTransportCallback(this);
-
- rtp_header_.header.sequenceNumber = 0;
- rtp_header_.header.timestamp = 0;
- rtp_header_.header.markerBit = false;
- rtp_header_.header.ssrc = 0x12345678; // Arbitrary.
- rtp_header_.header.numCSRCs = 0;
- rtp_header_.header.payloadType = 0;
- rtp_header_.frameType = kAudioFrameSpeech;
- rtp_header_.type.Audio.isCNG = false;
- }
-
- void TearDown() override {}
-
- void InsertOnePacketOfSilence(int codec_id) {
- CodecInst codec;
- ACMCodecDB::Codec(codec_id, &codec);
- if (timestamp_ == 0) { // This is the first time inserting audio.
- ASSERT_EQ(0, acm_->RegisterSendCodec(codec));
- } else {
- CodecInst current_codec;
- ASSERT_EQ(0, acm_->SendCodec(&current_codec));
- if (!CodecsEqual(codec, current_codec))
- ASSERT_EQ(0, acm_->RegisterSendCodec(codec));
- }
- AudioFrame frame;
- // Frame setup according to the codec.
- frame.sample_rate_hz_ = codec.plfreq;
- frame.samples_per_channel_ = codec.plfreq / 100; // 10 ms.
- frame.num_channels_ = codec.channels;
- memset(frame.data_, 0, frame.samples_per_channel_ * frame.num_channels_ *
- sizeof(int16_t));
- packet_sent_ = false;
- last_packet_send_timestamp_ = timestamp_;
- while (!packet_sent_) {
- frame.timestamp_ = timestamp_;
- timestamp_ += frame.samples_per_channel_;
- ASSERT_GE(acm_->Add10MsData(frame), 0);
- }
- }
-
- template <size_t N>
- void AddSetOfCodecs(const RentACodec::CodecId(&ids)[N]) {
- for (auto id : ids) {
- const auto i = RentACodec::CodecIndexFromId(id);
- ASSERT_TRUE(i);
- ASSERT_EQ(
- 0, receiver_->AddCodec(*i, codecs_[*i].pltype, codecs_[*i].channels,
- codecs_[*i].plfreq, nullptr));
- }
- }
-
- int SendData(FrameType frame_type,
- uint8_t payload_type,
- uint32_t timestamp,
- const uint8_t* payload_data,
- size_t payload_len_bytes,
- const RTPFragmentationHeader* fragmentation) override {
- if (frame_type == kEmptyFrame)
- return 0;
-
- rtp_header_.header.payloadType = payload_type;
- rtp_header_.frameType = frame_type;
- if (frame_type == kAudioFrameSpeech)
- rtp_header_.type.Audio.isCNG = false;
- else
- rtp_header_.type.Audio.isCNG = true;
- rtp_header_.header.timestamp = timestamp;
-
- int ret_val = receiver_->InsertPacket(rtp_header_, payload_data,
- payload_len_bytes);
- if (ret_val < 0) {
- assert(false);
- return -1;
- }
- rtp_header_.header.sequenceNumber++;
- packet_sent_ = true;
- last_frame_type_ = frame_type;
- return 0;
- }
-
- rtc::scoped_ptr<AcmReceiver> receiver_;
- rtc::ArrayView<const CodecInst> codecs_;
- rtc::scoped_ptr<AudioCodingModule> acm_;
- WebRtcRTPHeader rtp_header_;
- uint32_t timestamp_;
- bool packet_sent_; // Set when SendData is called reset when inserting audio.
- uint32_t last_packet_send_timestamp_;
- FrameType last_frame_type_;
-};
-
-TEST_F(AcmReceiverTestOldApi, DISABLED_ON_ANDROID(AddCodecGetCodec)) {
- // Add codec.
- for (size_t n = 0; n < codecs_.size(); ++n) {
- if (n & 0x1) // Just add codecs with odd index.
- EXPECT_EQ(0,
- receiver_->AddCodec(n, codecs_[n].pltype, codecs_[n].channels,
- codecs_[n].plfreq, NULL));
- }
- // Get codec and compare.
- for (size_t n = 0; n < codecs_.size(); ++n) {
- CodecInst my_codec;
- if (n & 0x1) {
- // Codecs with odd index should match the reference.
- EXPECT_EQ(0, receiver_->DecoderByPayloadType(codecs_[n].pltype,
- &my_codec));
- EXPECT_TRUE(CodecsEqual(codecs_[n], my_codec));
- } else {
- // Codecs with even index are not registered.
- EXPECT_EQ(-1, receiver_->DecoderByPayloadType(codecs_[n].pltype,
- &my_codec));
- }
- }
-}
-
-TEST_F(AcmReceiverTestOldApi, DISABLED_ON_ANDROID(AddCodecChangePayloadType)) {
- const CodecIdInst codec1(RentACodec::CodecId::kPCMA);
- CodecInst codec2 = codec1.inst;
- ++codec2.pltype;
- CodecInst test_codec;
-
- // Register the same codec with different payloads.
- EXPECT_EQ(0, receiver_->AddCodec(codec1.id, codec1.inst.pltype,
- codec1.inst.channels, codec1.inst.plfreq,
- nullptr));
- EXPECT_EQ(0, receiver_->AddCodec(codec1.id, codec2.pltype, codec2.channels,
- codec2.plfreq, NULL));
-
- // Both payload types should exist.
- EXPECT_EQ(0,
- receiver_->DecoderByPayloadType(codec1.inst.pltype, &test_codec));
- EXPECT_EQ(true, CodecsEqual(codec1.inst, test_codec));
- EXPECT_EQ(0, receiver_->DecoderByPayloadType(codec2.pltype, &test_codec));
- EXPECT_EQ(true, CodecsEqual(codec2, test_codec));
-}
-
-TEST_F(AcmReceiverTestOldApi, DISABLED_ON_ANDROID(AddCodecChangeCodecId)) {
- const CodecIdInst codec1(RentACodec::CodecId::kPCMU);
- CodecIdInst codec2(RentACodec::CodecId::kPCMA);
- codec2.inst.pltype = codec1.inst.pltype;
- CodecInst test_codec;
-
- // Register the same payload type with different codec ID.
- EXPECT_EQ(0, receiver_->AddCodec(codec1.id, codec1.inst.pltype,
- codec1.inst.channels, codec1.inst.plfreq,
- nullptr));
- EXPECT_EQ(0, receiver_->AddCodec(codec2.id, codec2.inst.pltype,
- codec2.inst.channels, codec2.inst.plfreq,
- nullptr));
-
- // Make sure that the last codec is used.
- EXPECT_EQ(0,
- receiver_->DecoderByPayloadType(codec2.inst.pltype, &test_codec));
- EXPECT_EQ(true, CodecsEqual(codec2.inst, test_codec));
-}
-
-TEST_F(AcmReceiverTestOldApi, DISABLED_ON_ANDROID(AddCodecRemoveCodec)) {
- const CodecIdInst codec(RentACodec::CodecId::kPCMA);
- const int payload_type = codec.inst.pltype;
- EXPECT_EQ(
- 0, receiver_->AddCodec(codec.id, codec.inst.pltype, codec.inst.channels,
- codec.inst.plfreq, nullptr));
-
- // Remove non-existing codec should not fail. ACM1 legacy.
- EXPECT_EQ(0, receiver_->RemoveCodec(payload_type + 1));
-
- // Remove an existing codec.
- EXPECT_EQ(0, receiver_->RemoveCodec(payload_type));
-
- // Ask for the removed codec, must fail.
- CodecInst ci;
- EXPECT_EQ(-1, receiver_->DecoderByPayloadType(payload_type, &ci));
-}
-
-TEST_F(AcmReceiverTestOldApi, DISABLED_ON_ANDROID(SampleRate)) {
- const RentACodec::CodecId kCodecId[] = {RentACodec::CodecId::kISAC,
- RentACodec::CodecId::kISACSWB};
- AddSetOfCodecs(kCodecId);
-
- AudioFrame frame;
- const int kOutSampleRateHz = 8000; // Different than codec sample rate.
- for (const auto codec_id : kCodecId) {
- const CodecIdInst codec(codec_id);
- const int num_10ms_frames = codec.inst.pacsize / (codec.inst.plfreq / 100);
- InsertOnePacketOfSilence(codec.id);
- for (int k = 0; k < num_10ms_frames; ++k) {
- EXPECT_EQ(0, receiver_->GetAudio(kOutSampleRateHz, &frame));
- }
- EXPECT_EQ(std::min(32000, codec.inst.plfreq),
- receiver_->current_sample_rate_hz());
- }
-}
-
-TEST_F(AcmReceiverTestOldApi, DISABLED_ON_ANDROID(PostdecodingVad)) {
- receiver_->EnableVad();
- EXPECT_TRUE(receiver_->vad_enabled());
- const CodecIdInst codec(RentACodec::CodecId::kPCM16Bwb);
- ASSERT_EQ(
- 0, receiver_->AddCodec(codec.id, codec.inst.pltype, codec.inst.channels,
- codec.inst.plfreq, nullptr));
- const int kNumPackets = 5;
- const int num_10ms_frames = codec.inst.pacsize / (codec.inst.plfreq / 100);
- AudioFrame frame;
- for (int n = 0; n < kNumPackets; ++n) {
- InsertOnePacketOfSilence(codec.id);
- for (int k = 0; k < num_10ms_frames; ++k)
- ASSERT_EQ(0, receiver_->GetAudio(codec.inst.plfreq, &frame));
- }
- EXPECT_EQ(AudioFrame::kVadPassive, frame.vad_activity_);
-
- receiver_->DisableVad();
- EXPECT_FALSE(receiver_->vad_enabled());
-
- for (int n = 0; n < kNumPackets; ++n) {
- InsertOnePacketOfSilence(codec.id);
- for (int k = 0; k < num_10ms_frames; ++k)
- ASSERT_EQ(0, receiver_->GetAudio(codec.inst.plfreq, &frame));
- }
- EXPECT_EQ(AudioFrame::kVadUnknown, frame.vad_activity_);
-}
-
-#ifdef WEBRTC_CODEC_ISAC
-#define IF_ISAC_FLOAT(x) x
-#else
-#define IF_ISAC_FLOAT(x) DISABLED_##x
-#endif
-
-TEST_F(AcmReceiverTestOldApi,
- DISABLED_ON_ANDROID(IF_ISAC_FLOAT(LastAudioCodec))) {
- const RentACodec::CodecId kCodecId[] = {
- RentACodec::CodecId::kISAC, RentACodec::CodecId::kPCMA,
- RentACodec::CodecId::kISACSWB, RentACodec::CodecId::kPCM16Bswb32kHz};
- AddSetOfCodecs(kCodecId);
-
- const RentACodec::CodecId kCngId[] = {
- // Not including full-band.
- RentACodec::CodecId::kCNNB, RentACodec::CodecId::kCNWB,
- RentACodec::CodecId::kCNSWB};
- AddSetOfCodecs(kCngId);
-
- // Register CNG at sender side.
- for (auto id : kCngId)
- ASSERT_EQ(0, acm_->RegisterSendCodec(CodecIdInst(id).inst));
-
- CodecInst codec;
- // No audio payload is received.
- EXPECT_EQ(-1, receiver_->LastAudioCodec(&codec));
-
- // Start with sending DTX.
- ASSERT_EQ(0, acm_->SetVAD(true, true, VADVeryAggr));
- packet_sent_ = false;
- InsertOnePacketOfSilence(CodecIdInst(kCodecId[0]).id); // Enough to test
- // with one codec.
- ASSERT_TRUE(packet_sent_);
- EXPECT_EQ(kAudioFrameCN, last_frame_type_);
-
- // Has received, only, DTX. Last Audio codec is undefined.
- EXPECT_EQ(-1, receiver_->LastAudioCodec(&codec));
- EXPECT_EQ(-1, receiver_->last_audio_codec_id());
-
- for (auto id : kCodecId) {
- const CodecIdInst c(id);
-
- // Set DTX off to send audio payload.
- acm_->SetVAD(false, false, VADAggr);
- packet_sent_ = false;
- InsertOnePacketOfSilence(c.id);
-
- // Sanity check if Actually an audio payload received, and it should be
- // of type "speech."
- ASSERT_TRUE(packet_sent_);
- ASSERT_EQ(kAudioFrameSpeech, last_frame_type_);
- EXPECT_EQ(c.id, receiver_->last_audio_codec_id());
-
- // Set VAD on to send DTX. Then check if the "Last Audio codec" returns
- // the expected codec.
- acm_->SetVAD(true, true, VADAggr);
-
- // Do as many encoding until a DTX is sent.
- while (last_frame_type_ != kAudioFrameCN) {
- packet_sent_ = false;
- InsertOnePacketOfSilence(c.id);
- ASSERT_TRUE(packet_sent_);
- }
- EXPECT_EQ(c.id, receiver_->last_audio_codec_id());
- EXPECT_EQ(0, receiver_->LastAudioCodec(&codec));
- EXPECT_TRUE(CodecsEqual(c.inst, codec));
- }
-}
-
-} // namespace acm2
-
-} // namespace webrtc
diff --git a/webrtc/modules/audio_coding/main/acm2/acm_resampler.cc b/webrtc/modules/audio_coding/main/acm2/acm_resampler.cc
deleted file mode 100644
index cbcad85f5b..0000000000
--- a/webrtc/modules/audio_coding/main/acm2/acm_resampler.cc
+++ /dev/null
@@ -1,68 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include "webrtc/modules/audio_coding/main/acm2/acm_resampler.h"
-
-#include <assert.h>
-#include <string.h>
-
-#include "webrtc/common_audio/resampler/include/resampler.h"
-#include "webrtc/system_wrappers/include/logging.h"
-
-namespace webrtc {
-namespace acm2 {
-
-ACMResampler::ACMResampler() {
-}
-
-ACMResampler::~ACMResampler() {
-}
-
-int ACMResampler::Resample10Msec(const int16_t* in_audio,
- int in_freq_hz,
- int out_freq_hz,
- int num_audio_channels,
- size_t out_capacity_samples,
- int16_t* out_audio) {
- size_t in_length = static_cast<size_t>(in_freq_hz * num_audio_channels / 100);
- int out_length = out_freq_hz * num_audio_channels / 100;
- if (in_freq_hz == out_freq_hz) {
- if (out_capacity_samples < in_length) {
- assert(false);
- return -1;
- }
- memcpy(out_audio, in_audio, in_length * sizeof(int16_t));
- return static_cast<int>(in_length / num_audio_channels);
- }
-
- if (resampler_.InitializeIfNeeded(in_freq_hz, out_freq_hz,
- num_audio_channels) != 0) {
- LOG_FERR3(LS_ERROR, InitializeIfNeeded, in_freq_hz, out_freq_hz,
- num_audio_channels);
- return -1;
- }
-
- out_length =
- resampler_.Resample(in_audio, in_length, out_audio, out_capacity_samples);
- if (out_length == -1) {
- LOG_FERR4(LS_ERROR,
- Resample,
- in_audio,
- in_length,
- out_audio,
- out_capacity_samples);
- return -1;
- }
-
- return out_length / num_audio_channels;
-}
-
-} // namespace acm2
-} // namespace webrtc
diff --git a/webrtc/modules/audio_coding/main/acm2/acm_resampler.h b/webrtc/modules/audio_coding/main/acm2/acm_resampler.h
deleted file mode 100644
index a19b0c4569..0000000000
--- a/webrtc/modules/audio_coding/main/acm2/acm_resampler.h
+++ /dev/null
@@ -1,39 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_MODULES_AUDIO_CODING_MAIN_ACM2_ACM_RESAMPLER_H_
-#define WEBRTC_MODULES_AUDIO_CODING_MAIN_ACM2_ACM_RESAMPLER_H_
-
-#include "webrtc/common_audio/resampler/include/push_resampler.h"
-#include "webrtc/typedefs.h"
-
-namespace webrtc {
-namespace acm2 {
-
-class ACMResampler {
- public:
- ACMResampler();
- ~ACMResampler();
-
- int Resample10Msec(const int16_t* in_audio,
- int in_freq_hz,
- int out_freq_hz,
- int num_audio_channels,
- size_t out_capacity_samples,
- int16_t* out_audio);
-
- private:
- PushResampler<int16_t> resampler_;
-};
-
-} // namespace acm2
-} // namespace webrtc
-
-#endif // WEBRTC_MODULES_AUDIO_CODING_MAIN_ACM2_ACM_RESAMPLER_H_
diff --git a/webrtc/modules/audio_coding/main/acm2/acm_send_test_oldapi.cc b/webrtc/modules/audio_coding/main/acm2/acm_send_test_oldapi.cc
deleted file mode 100644
index ac38dc011d..0000000000
--- a/webrtc/modules/audio_coding/main/acm2/acm_send_test_oldapi.cc
+++ /dev/null
@@ -1,158 +0,0 @@
-/*
- * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include "webrtc/modules/audio_coding/main/acm2/acm_send_test_oldapi.h"
-
-#include <assert.h>
-#include <stdio.h>
-#include <string.h>
-
-#include "testing/gtest/include/gtest/gtest.h"
-#include "webrtc/base/checks.h"
-#include "webrtc/modules/audio_coding/codecs/audio_encoder.h"
-#include "webrtc/modules/audio_coding/main/include/audio_coding_module.h"
-#include "webrtc/modules/audio_coding/neteq/tools/input_audio_file.h"
-#include "webrtc/modules/audio_coding/neteq/tools/packet.h"
-
-namespace webrtc {
-namespace test {
-
-AcmSendTestOldApi::AcmSendTestOldApi(InputAudioFile* audio_source,
- int source_rate_hz,
- int test_duration_ms)
- : clock_(0),
- acm_(webrtc::AudioCodingModule::Create(0, &clock_)),
- audio_source_(audio_source),
- source_rate_hz_(source_rate_hz),
- input_block_size_samples_(
- static_cast<size_t>(source_rate_hz_ * kBlockSizeMs / 1000)),
- codec_registered_(false),
- test_duration_ms_(test_duration_ms),
- frame_type_(kAudioFrameSpeech),
- payload_type_(0),
- timestamp_(0),
- sequence_number_(0) {
- input_frame_.sample_rate_hz_ = source_rate_hz_;
- input_frame_.num_channels_ = 1;
- input_frame_.samples_per_channel_ = input_block_size_samples_;
- assert(input_block_size_samples_ * input_frame_.num_channels_ <=
- AudioFrame::kMaxDataSizeSamples);
- acm_->RegisterTransportCallback(this);
-}
-
-bool AcmSendTestOldApi::RegisterCodec(const char* payload_name,
- int sampling_freq_hz,
- int channels,
- int payload_type,
- int frame_size_samples) {
- CodecInst codec;
- RTC_CHECK_EQ(0, AudioCodingModule::Codec(payload_name, &codec,
- sampling_freq_hz, channels));
- codec.pltype = payload_type;
- codec.pacsize = frame_size_samples;
- codec_registered_ = (acm_->RegisterSendCodec(codec) == 0);
- input_frame_.num_channels_ = channels;
- assert(input_block_size_samples_ * input_frame_.num_channels_ <=
- AudioFrame::kMaxDataSizeSamples);
- return codec_registered_;
-}
-
-bool AcmSendTestOldApi::RegisterExternalCodec(
- AudioEncoder* external_speech_encoder) {
- acm_->RegisterExternalSendCodec(external_speech_encoder);
- input_frame_.num_channels_ = external_speech_encoder->NumChannels();
- assert(input_block_size_samples_ * input_frame_.num_channels_ <=
- AudioFrame::kMaxDataSizeSamples);
- return codec_registered_ = true;
-}
-
-Packet* AcmSendTestOldApi::NextPacket() {
- assert(codec_registered_);
- if (filter_.test(static_cast<size_t>(payload_type_))) {
- // This payload type should be filtered out. Since the payload type is the
- // same throughout the whole test run, no packet at all will be delivered.
- // We can just as well signal that the test is over by returning NULL.
- return NULL;
- }
- // Insert audio and process until one packet is produced.
- while (clock_.TimeInMilliseconds() < test_duration_ms_) {
- clock_.AdvanceTimeMilliseconds(kBlockSizeMs);
- RTC_CHECK(
- audio_source_->Read(input_block_size_samples_, input_frame_.data_));
- if (input_frame_.num_channels_ > 1) {
- InputAudioFile::DuplicateInterleaved(input_frame_.data_,
- input_block_size_samples_,
- input_frame_.num_channels_,
- input_frame_.data_);
- }
- data_to_send_ = false;
- RTC_CHECK_GE(acm_->Add10MsData(input_frame_), 0);
- input_frame_.timestamp_ += static_cast<uint32_t>(input_block_size_samples_);
- if (data_to_send_) {
- // Encoded packet received.
- return CreatePacket();
- }
- }
- // Test ended.
- return NULL;
-}
-
-// This method receives the callback from ACM when a new packet is produced.
-int32_t AcmSendTestOldApi::SendData(
- FrameType frame_type,
- uint8_t payload_type,
- uint32_t timestamp,
- const uint8_t* payload_data,
- size_t payload_len_bytes,
- const RTPFragmentationHeader* fragmentation) {
- // Store the packet locally.
- frame_type_ = frame_type;
- payload_type_ = payload_type;
- timestamp_ = timestamp;
- last_payload_vec_.assign(payload_data, payload_data + payload_len_bytes);
- assert(last_payload_vec_.size() == payload_len_bytes);
- data_to_send_ = true;
- return 0;
-}
-
-Packet* AcmSendTestOldApi::CreatePacket() {
- const size_t kRtpHeaderSize = 12;
- size_t allocated_bytes = last_payload_vec_.size() + kRtpHeaderSize;
- uint8_t* packet_memory = new uint8_t[allocated_bytes];
- // Populate the header bytes.
- packet_memory[0] = 0x80;
- packet_memory[1] = static_cast<uint8_t>(payload_type_);
- packet_memory[2] = (sequence_number_ >> 8) & 0xFF;
- packet_memory[3] = (sequence_number_) & 0xFF;
- packet_memory[4] = (timestamp_ >> 24) & 0xFF;
- packet_memory[5] = (timestamp_ >> 16) & 0xFF;
- packet_memory[6] = (timestamp_ >> 8) & 0xFF;
- packet_memory[7] = timestamp_ & 0xFF;
- // Set SSRC to 0x12345678.
- packet_memory[8] = 0x12;
- packet_memory[9] = 0x34;
- packet_memory[10] = 0x56;
- packet_memory[11] = 0x78;
-
- ++sequence_number_;
-
- // Copy the payload data.
- memcpy(packet_memory + kRtpHeaderSize,
- &last_payload_vec_[0],
- last_payload_vec_.size());
- Packet* packet =
- new Packet(packet_memory, allocated_bytes, clock_.TimeInMilliseconds());
- assert(packet);
- assert(packet->valid_header());
- return packet;
-}
-
-} // namespace test
-} // namespace webrtc
diff --git a/webrtc/modules/audio_coding/main/acm2/acm_send_test_oldapi.h b/webrtc/modules/audio_coding/main/acm2/acm_send_test_oldapi.h
deleted file mode 100644
index 3e65ec6c2d..0000000000
--- a/webrtc/modules/audio_coding/main/acm2/acm_send_test_oldapi.h
+++ /dev/null
@@ -1,91 +0,0 @@
-/*
- * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_MODULES_AUDIO_CODING_MAIN_ACM2_ACM_SEND_TEST_H_
-#define WEBRTC_MODULES_AUDIO_CODING_MAIN_ACM2_ACM_SEND_TEST_H_
-
-#include <vector>
-
-#include "webrtc/base/constructormagic.h"
-#include "webrtc/base/scoped_ptr.h"
-#include "webrtc/modules/audio_coding/main/include/audio_coding_module.h"
-#include "webrtc/modules/audio_coding/neteq/tools/packet_source.h"
-#include "webrtc/system_wrappers/include/clock.h"
-
-namespace webrtc {
-class AudioEncoder;
-
-namespace test {
-class InputAudioFile;
-class Packet;
-
-class AcmSendTestOldApi : public AudioPacketizationCallback,
- public PacketSource {
- public:
- AcmSendTestOldApi(InputAudioFile* audio_source,
- int source_rate_hz,
- int test_duration_ms);
- virtual ~AcmSendTestOldApi() {}
-
- // Registers the send codec. Returns true on success, false otherwise.
- bool RegisterCodec(const char* payload_name,
- int sampling_freq_hz,
- int channels,
- int payload_type,
- int frame_size_samples);
-
- // Registers an external send codec. Returns true on success, false otherwise.
- bool RegisterExternalCodec(AudioEncoder* external_speech_encoder);
-
- // Returns the next encoded packet. Returns NULL if the test duration was
- // exceeded. Ownership of the packet is handed over to the caller.
- // Inherited from PacketSource.
- Packet* NextPacket();
-
- // Inherited from AudioPacketizationCallback.
- int32_t SendData(FrameType frame_type,
- uint8_t payload_type,
- uint32_t timestamp,
- const uint8_t* payload_data,
- size_t payload_len_bytes,
- const RTPFragmentationHeader* fragmentation) override;
-
- AudioCodingModule* acm() { return acm_.get(); }
-
- private:
- static const int kBlockSizeMs = 10;
-
- // Creates a Packet object from the last packet produced by ACM (and received
- // through the SendData method as a callback). Ownership of the new Packet
- // object is transferred to the caller.
- Packet* CreatePacket();
-
- SimulatedClock clock_;
- rtc::scoped_ptr<AudioCodingModule> acm_;
- InputAudioFile* audio_source_;
- int source_rate_hz_;
- const size_t input_block_size_samples_;
- AudioFrame input_frame_;
- bool codec_registered_;
- int test_duration_ms_;
- // The following member variables are set whenever SendData() is called.
- FrameType frame_type_;
- int payload_type_;
- uint32_t timestamp_;
- uint16_t sequence_number_;
- std::vector<uint8_t> last_payload_vec_;
- bool data_to_send_;
-
- RTC_DISALLOW_COPY_AND_ASSIGN(AcmSendTestOldApi);
-};
-
-} // namespace test
-} // namespace webrtc
-#endif // WEBRTC_MODULES_AUDIO_CODING_MAIN_ACM2_ACM_SEND_TEST_H_
diff --git a/webrtc/modules/audio_coding/main/acm2/audio_coding_module.cc b/webrtc/modules/audio_coding/main/acm2/audio_coding_module.cc
deleted file mode 100644
index 77ee0f2789..0000000000
--- a/webrtc/modules/audio_coding/main/acm2/audio_coding_module.cc
+++ /dev/null
@@ -1,97 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include "webrtc/modules/audio_coding/main/include/audio_coding_module.h"
-
-#include "webrtc/base/checks.h"
-#include "webrtc/common_types.h"
-#include "webrtc/modules/audio_coding/main/acm2/audio_coding_module_impl.h"
-#include "webrtc/modules/audio_coding/main/acm2/rent_a_codec.h"
-#include "webrtc/system_wrappers/include/clock.h"
-#include "webrtc/system_wrappers/include/trace.h"
-
-namespace webrtc {
-
-// Create module
-AudioCodingModule* AudioCodingModule::Create(int id) {
- Config config;
- config.id = id;
- config.clock = Clock::GetRealTimeClock();
- return Create(config);
-}
-
-AudioCodingModule* AudioCodingModule::Create(int id, Clock* clock) {
- Config config;
- config.id = id;
- config.clock = clock;
- return Create(config);
-}
-
-AudioCodingModule* AudioCodingModule::Create(const Config& config) {
- return new acm2::AudioCodingModuleImpl(config);
-}
-
-int AudioCodingModule::NumberOfCodecs() {
- return static_cast<int>(acm2::RentACodec::NumberOfCodecs());
-}
-
-int AudioCodingModule::Codec(int list_id, CodecInst* codec) {
- auto codec_id = acm2::RentACodec::CodecIdFromIndex(list_id);
- if (!codec_id)
- return -1;
- auto ci = acm2::RentACodec::CodecInstById(*codec_id);
- if (!ci)
- return -1;
- *codec = *ci;
- return 0;
-}
-
-int AudioCodingModule::Codec(const char* payload_name,
- CodecInst* codec,
- int sampling_freq_hz,
- int channels) {
- rtc::Maybe<CodecInst> ci = acm2::RentACodec::CodecInstByParams(
- payload_name, sampling_freq_hz, channels);
- if (ci) {
- *codec = *ci;
- return 0;
- } else {
- // We couldn't find a matching codec, so set the parameters to unacceptable
- // values and return.
- codec->plname[0] = '\0';
- codec->pltype = -1;
- codec->pacsize = 0;
- codec->rate = 0;
- codec->plfreq = 0;
- return -1;
- }
-}
-
-int AudioCodingModule::Codec(const char* payload_name,
- int sampling_freq_hz,
- int channels) {
- rtc::Maybe<acm2::RentACodec::CodecId> ci = acm2::RentACodec::CodecIdByParams(
- payload_name, sampling_freq_hz, channels);
- if (!ci)
- return -1;
- rtc::Maybe<int> i = acm2::RentACodec::CodecIndexFromId(*ci);
- return i ? *i : -1;
-}
-
-// Checks the validity of the parameters of the given codec
-bool AudioCodingModule::IsCodecValid(const CodecInst& codec) {
- bool valid = acm2::RentACodec::IsCodecValid(codec);
- if (!valid)
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, -1,
- "Invalid codec setting");
- return valid;
-}
-
-} // namespace webrtc
diff --git a/webrtc/modules/audio_coding/main/acm2/audio_coding_module_impl.cc b/webrtc/modules/audio_coding/main/acm2/audio_coding_module_impl.cc
deleted file mode 100644
index b36c064800..0000000000
--- a/webrtc/modules/audio_coding/main/acm2/audio_coding_module_impl.cc
+++ /dev/null
@@ -1,802 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include "webrtc/modules/audio_coding/main/acm2/audio_coding_module_impl.h"
-
-#include <assert.h>
-#include <stdlib.h>
-#include <vector>
-
-#include "webrtc/base/checks.h"
-#include "webrtc/base/safe_conversions.h"
-#include "webrtc/engine_configurations.h"
-#include "webrtc/modules/audio_coding/main/include/audio_coding_module_typedefs.h"
-#include "webrtc/modules/audio_coding/main/acm2/acm_common_defs.h"
-#include "webrtc/modules/audio_coding/main/acm2/acm_resampler.h"
-#include "webrtc/modules/audio_coding/main/acm2/call_statistics.h"
-#include "webrtc/system_wrappers/include/critical_section_wrapper.h"
-#include "webrtc/system_wrappers/include/logging.h"
-#include "webrtc/system_wrappers/include/metrics.h"
-#include "webrtc/system_wrappers/include/rw_lock_wrapper.h"
-#include "webrtc/system_wrappers/include/trace.h"
-#include "webrtc/typedefs.h"
-
-namespace webrtc {
-
-namespace acm2 {
-
-namespace {
-
-// TODO(turajs): the same functionality is used in NetEq. If both classes
-// need them, make it a static function in ACMCodecDB.
-bool IsCodecRED(const CodecInst& codec) {
- return (STR_CASE_CMP(codec.plname, "RED") == 0);
-}
-
-bool IsCodecCN(const CodecInst& codec) {
- return (STR_CASE_CMP(codec.plname, "CN") == 0);
-}
-
-// Stereo-to-mono can be used as in-place.
-int DownMix(const AudioFrame& frame,
- size_t length_out_buff,
- int16_t* out_buff) {
- if (length_out_buff < frame.samples_per_channel_) {
- return -1;
- }
- for (size_t n = 0; n < frame.samples_per_channel_; ++n)
- out_buff[n] = (frame.data_[2 * n] + frame.data_[2 * n + 1]) >> 1;
- return 0;
-}
-
-// Mono-to-stereo can be used as in-place.
-int UpMix(const AudioFrame& frame, size_t length_out_buff, int16_t* out_buff) {
- if (length_out_buff < frame.samples_per_channel_) {
- return -1;
- }
- for (size_t n = frame.samples_per_channel_; n != 0; --n) {
- size_t i = n - 1;
- int16_t sample = frame.data_[i];
- out_buff[2 * i + 1] = sample;
- out_buff[2 * i] = sample;
- }
- return 0;
-}
-
-void ConvertEncodedInfoToFragmentationHeader(
- const AudioEncoder::EncodedInfo& info,
- RTPFragmentationHeader* frag) {
- if (info.redundant.empty()) {
- frag->fragmentationVectorSize = 0;
- return;
- }
-
- frag->VerifyAndAllocateFragmentationHeader(
- static_cast<uint16_t>(info.redundant.size()));
- frag->fragmentationVectorSize = static_cast<uint16_t>(info.redundant.size());
- size_t offset = 0;
- for (size_t i = 0; i < info.redundant.size(); ++i) {
- frag->fragmentationOffset[i] = offset;
- offset += info.redundant[i].encoded_bytes;
- frag->fragmentationLength[i] = info.redundant[i].encoded_bytes;
- frag->fragmentationTimeDiff[i] = rtc::checked_cast<uint16_t>(
- info.encoded_timestamp - info.redundant[i].encoded_timestamp);
- frag->fragmentationPlType[i] = info.redundant[i].payload_type;
- }
-}
-} // namespace
-
-void AudioCodingModuleImpl::ChangeLogger::MaybeLog(int value) {
- if (value != last_value_ || first_time_) {
- first_time_ = false;
- last_value_ = value;
- RTC_HISTOGRAM_COUNTS_100(histogram_name_, value);
- }
-}
-
-AudioCodingModuleImpl::AudioCodingModuleImpl(
- const AudioCodingModule::Config& config)
- : acm_crit_sect_(CriticalSectionWrapper::CreateCriticalSection()),
- id_(config.id),
- expected_codec_ts_(0xD87F3F9F),
- expected_in_ts_(0xD87F3F9F),
- receiver_(config),
- bitrate_logger_("WebRTC.Audio.TargetBitrateInKbps"),
- previous_pltype_(255),
- receiver_initialized_(false),
- first_10ms_data_(false),
- first_frame_(true),
- callback_crit_sect_(CriticalSectionWrapper::CreateCriticalSection()),
- packetization_callback_(NULL),
- vad_callback_(NULL) {
- if (InitializeReceiverSafe() < 0) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, id_,
- "Cannot initialize receiver");
- }
- WEBRTC_TRACE(webrtc::kTraceMemory, webrtc::kTraceAudioCoding, id_, "Created");
-}
-
-AudioCodingModuleImpl::~AudioCodingModuleImpl() = default;
-
-int32_t AudioCodingModuleImpl::Encode(const InputData& input_data) {
- AudioEncoder::EncodedInfo encoded_info;
- uint8_t previous_pltype;
-
- // Check if there is an encoder before.
- if (!HaveValidEncoder("Process"))
- return -1;
-
- AudioEncoder* audio_encoder = codec_manager_.CurrentEncoder();
- // Scale the timestamp to the codec's RTP timestamp rate.
- uint32_t rtp_timestamp =
- first_frame_ ? input_data.input_timestamp
- : last_rtp_timestamp_ +
- rtc::CheckedDivExact(
- input_data.input_timestamp - last_timestamp_,
- static_cast<uint32_t>(rtc::CheckedDivExact(
- audio_encoder->SampleRateHz(),
- audio_encoder->RtpTimestampRateHz())));
- last_timestamp_ = input_data.input_timestamp;
- last_rtp_timestamp_ = rtp_timestamp;
- first_frame_ = false;
-
- encode_buffer_.SetSize(audio_encoder->MaxEncodedBytes());
- encoded_info = audio_encoder->Encode(
- rtp_timestamp, input_data.audio, input_data.length_per_channel,
- encode_buffer_.size(), encode_buffer_.data());
- encode_buffer_.SetSize(encoded_info.encoded_bytes);
- bitrate_logger_.MaybeLog(audio_encoder->GetTargetBitrate() / 1000);
- if (encode_buffer_.size() == 0 && !encoded_info.send_even_if_empty) {
- // Not enough data.
- return 0;
- }
- previous_pltype = previous_pltype_; // Read it while we have the critsect.
-
- RTPFragmentationHeader my_fragmentation;
- ConvertEncodedInfoToFragmentationHeader(encoded_info, &my_fragmentation);
- FrameType frame_type;
- if (encode_buffer_.size() == 0 && encoded_info.send_even_if_empty) {
- frame_type = kEmptyFrame;
- encoded_info.payload_type = previous_pltype;
- } else {
- RTC_DCHECK_GT(encode_buffer_.size(), 0u);
- frame_type = encoded_info.speech ? kAudioFrameSpeech : kAudioFrameCN;
- }
-
- {
- CriticalSectionScoped lock(callback_crit_sect_.get());
- if (packetization_callback_) {
- packetization_callback_->SendData(
- frame_type, encoded_info.payload_type, encoded_info.encoded_timestamp,
- encode_buffer_.data(), encode_buffer_.size(),
- my_fragmentation.fragmentationVectorSize > 0 ? &my_fragmentation
- : nullptr);
- }
-
- if (vad_callback_) {
- // Callback with VAD decision.
- vad_callback_->InFrameType(frame_type);
- }
- }
- previous_pltype_ = encoded_info.payload_type;
- return static_cast<int32_t>(encode_buffer_.size());
-}
-
-/////////////////////////////////////////
-// Sender
-//
-
-// Can be called multiple times for Codec, CNG, RED.
-int AudioCodingModuleImpl::RegisterSendCodec(const CodecInst& send_codec) {
- CriticalSectionScoped lock(acm_crit_sect_.get());
- return codec_manager_.RegisterEncoder(send_codec);
-}
-
-void AudioCodingModuleImpl::RegisterExternalSendCodec(
- AudioEncoder* external_speech_encoder) {
- CriticalSectionScoped lock(acm_crit_sect_.get());
- codec_manager_.RegisterEncoder(external_speech_encoder);
-}
-
-// Get current send codec.
-int AudioCodingModuleImpl::SendCodec(CodecInst* current_codec) const {
- CriticalSectionScoped lock(acm_crit_sect_.get());
- return codec_manager_.GetCodecInst(current_codec);
-}
-
-// Get current send frequency.
-int AudioCodingModuleImpl::SendFrequency() const {
- WEBRTC_TRACE(webrtc::kTraceStream, webrtc::kTraceAudioCoding, id_,
- "SendFrequency()");
- CriticalSectionScoped lock(acm_crit_sect_.get());
-
- if (!codec_manager_.CurrentEncoder()) {
- WEBRTC_TRACE(webrtc::kTraceStream, webrtc::kTraceAudioCoding, id_,
- "SendFrequency Failed, no codec is registered");
- return -1;
- }
-
- return codec_manager_.CurrentEncoder()->SampleRateHz();
-}
-
-void AudioCodingModuleImpl::SetBitRate(int bitrate_bps) {
- CriticalSectionScoped lock(acm_crit_sect_.get());
- if (codec_manager_.CurrentEncoder()) {
- codec_manager_.CurrentEncoder()->SetTargetBitrate(bitrate_bps);
- }
-}
-
-// Register a transport callback which will be called to deliver
-// the encoded buffers.
-int AudioCodingModuleImpl::RegisterTransportCallback(
- AudioPacketizationCallback* transport) {
- CriticalSectionScoped lock(callback_crit_sect_.get());
- packetization_callback_ = transport;
- return 0;
-}
-
-// Add 10MS of raw (PCM) audio data to the encoder.
-int AudioCodingModuleImpl::Add10MsData(const AudioFrame& audio_frame) {
- InputData input_data;
- CriticalSectionScoped lock(acm_crit_sect_.get());
- int r = Add10MsDataInternal(audio_frame, &input_data);
- return r < 0 ? r : Encode(input_data);
-}
-
-int AudioCodingModuleImpl::Add10MsDataInternal(const AudioFrame& audio_frame,
- InputData* input_data) {
- if (audio_frame.samples_per_channel_ == 0) {
- assert(false);
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, id_,
- "Cannot Add 10 ms audio, payload length is zero");
- return -1;
- }
-
- if (audio_frame.sample_rate_hz_ > 48000) {
- assert(false);
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, id_,
- "Cannot Add 10 ms audio, input frequency not valid");
- return -1;
- }
-
- // If the length and frequency matches. We currently just support raw PCM.
- if (static_cast<size_t>(audio_frame.sample_rate_hz_ / 100) !=
- audio_frame.samples_per_channel_) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, id_,
- "Cannot Add 10 ms audio, input frequency and length doesn't"
- " match");
- return -1;
- }
-
- if (audio_frame.num_channels_ != 1 && audio_frame.num_channels_ != 2) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, id_,
- "Cannot Add 10 ms audio, invalid number of channels.");
- return -1;
- }
-
- // Do we have a codec registered?
- if (!HaveValidEncoder("Add10MsData")) {
- return -1;
- }
-
- const AudioFrame* ptr_frame;
- // Perform a resampling, also down-mix if it is required and can be
- // performed before resampling (a down mix prior to resampling will take
- // place if both primary and secondary encoders are mono and input is in
- // stereo).
- if (PreprocessToAddData(audio_frame, &ptr_frame) < 0) {
- return -1;
- }
-
- // Check whether we need an up-mix or down-mix?
- bool remix = ptr_frame->num_channels_ !=
- codec_manager_.CurrentEncoder()->NumChannels();
-
- if (remix) {
- if (ptr_frame->num_channels_ == 1) {
- if (UpMix(*ptr_frame, WEBRTC_10MS_PCM_AUDIO, input_data->buffer) < 0)
- return -1;
- } else {
- if (DownMix(*ptr_frame, WEBRTC_10MS_PCM_AUDIO, input_data->buffer) < 0)
- return -1;
- }
- }
-
- // When adding data to encoders this pointer is pointing to an audio buffer
- // with correct number of channels.
- const int16_t* ptr_audio = ptr_frame->data_;
-
- // For pushing data to primary, point the |ptr_audio| to correct buffer.
- if (codec_manager_.CurrentEncoder()->NumChannels() !=
- ptr_frame->num_channels_)
- ptr_audio = input_data->buffer;
-
- input_data->input_timestamp = ptr_frame->timestamp_;
- input_data->audio = ptr_audio;
- input_data->length_per_channel = ptr_frame->samples_per_channel_;
- input_data->audio_channel = codec_manager_.CurrentEncoder()->NumChannels();
-
- return 0;
-}
-
-// Perform a resampling and down-mix if required. We down-mix only if
-// encoder is mono and input is stereo. In case of dual-streaming, both
-// encoders has to be mono for down-mix to take place.
-// |*ptr_out| will point to the pre-processed audio-frame. If no pre-processing
-// is required, |*ptr_out| points to |in_frame|.
-int AudioCodingModuleImpl::PreprocessToAddData(const AudioFrame& in_frame,
- const AudioFrame** ptr_out) {
- bool resample = (in_frame.sample_rate_hz_ !=
- codec_manager_.CurrentEncoder()->SampleRateHz());
-
- // This variable is true if primary codec and secondary codec (if exists)
- // are both mono and input is stereo.
- bool down_mix = (in_frame.num_channels_ == 2) &&
- (codec_manager_.CurrentEncoder()->NumChannels() == 1);
-
- if (!first_10ms_data_) {
- expected_in_ts_ = in_frame.timestamp_;
- expected_codec_ts_ = in_frame.timestamp_;
- first_10ms_data_ = true;
- } else if (in_frame.timestamp_ != expected_in_ts_) {
- // TODO(turajs): Do we need a warning here.
- expected_codec_ts_ +=
- (in_frame.timestamp_ - expected_in_ts_) *
- static_cast<uint32_t>(
- (static_cast<double>(
- codec_manager_.CurrentEncoder()->SampleRateHz()) /
- static_cast<double>(in_frame.sample_rate_hz_)));
- expected_in_ts_ = in_frame.timestamp_;
- }
-
-
- if (!down_mix && !resample) {
- // No pre-processing is required.
- expected_in_ts_ += static_cast<uint32_t>(in_frame.samples_per_channel_);
- expected_codec_ts_ += static_cast<uint32_t>(in_frame.samples_per_channel_);
- *ptr_out = &in_frame;
- return 0;
- }
-
- *ptr_out = &preprocess_frame_;
- preprocess_frame_.num_channels_ = in_frame.num_channels_;
- int16_t audio[WEBRTC_10MS_PCM_AUDIO];
- const int16_t* src_ptr_audio = in_frame.data_;
- int16_t* dest_ptr_audio = preprocess_frame_.data_;
- if (down_mix) {
- // If a resampling is required the output of a down-mix is written into a
- // local buffer, otherwise, it will be written to the output frame.
- if (resample)
- dest_ptr_audio = audio;
- if (DownMix(in_frame, WEBRTC_10MS_PCM_AUDIO, dest_ptr_audio) < 0)
- return -1;
- preprocess_frame_.num_channels_ = 1;
- // Set the input of the resampler is the down-mixed signal.
- src_ptr_audio = audio;
- }
-
- preprocess_frame_.timestamp_ = expected_codec_ts_;
- preprocess_frame_.samples_per_channel_ = in_frame.samples_per_channel_;
- preprocess_frame_.sample_rate_hz_ = in_frame.sample_rate_hz_;
- // If it is required, we have to do a resampling.
- if (resample) {
- // The result of the resampler is written to output frame.
- dest_ptr_audio = preprocess_frame_.data_;
-
- int samples_per_channel = resampler_.Resample10Msec(
- src_ptr_audio, in_frame.sample_rate_hz_,
- codec_manager_.CurrentEncoder()->SampleRateHz(),
- preprocess_frame_.num_channels_, AudioFrame::kMaxDataSizeSamples,
- dest_ptr_audio);
-
- if (samples_per_channel < 0) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, id_,
- "Cannot add 10 ms audio, resampling failed");
- return -1;
- }
- preprocess_frame_.samples_per_channel_ =
- static_cast<size_t>(samples_per_channel);
- preprocess_frame_.sample_rate_hz_ =
- codec_manager_.CurrentEncoder()->SampleRateHz();
- }
-
- expected_codec_ts_ +=
- static_cast<uint32_t>(preprocess_frame_.samples_per_channel_);
- expected_in_ts_ += static_cast<uint32_t>(in_frame.samples_per_channel_);
-
- return 0;
-}
-
-/////////////////////////////////////////
-// (RED) Redundant Coding
-//
-
-bool AudioCodingModuleImpl::REDStatus() const {
- CriticalSectionScoped lock(acm_crit_sect_.get());
- return codec_manager_.red_enabled();
-}
-
-// Configure RED status i.e on/off.
-int AudioCodingModuleImpl::SetREDStatus(
-#ifdef WEBRTC_CODEC_RED
- bool enable_red) {
- CriticalSectionScoped lock(acm_crit_sect_.get());
- return codec_manager_.SetCopyRed(enable_red) ? 0 : -1;
-#else
- bool /* enable_red */) {
- WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceAudioCoding, id_,
- " WEBRTC_CODEC_RED is undefined");
- return -1;
-#endif
-}
-
-/////////////////////////////////////////
-// (FEC) Forward Error Correction (codec internal)
-//
-
-bool AudioCodingModuleImpl::CodecFEC() const {
- CriticalSectionScoped lock(acm_crit_sect_.get());
- return codec_manager_.codec_fec_enabled();
-}
-
-int AudioCodingModuleImpl::SetCodecFEC(bool enable_codec_fec) {
- CriticalSectionScoped lock(acm_crit_sect_.get());
- return codec_manager_.SetCodecFEC(enable_codec_fec);
-}
-
-int AudioCodingModuleImpl::SetPacketLossRate(int loss_rate) {
- CriticalSectionScoped lock(acm_crit_sect_.get());
- if (HaveValidEncoder("SetPacketLossRate")) {
- codec_manager_.CurrentEncoder()->SetProjectedPacketLossRate(loss_rate /
- 100.0);
- }
- return 0;
-}
-
-/////////////////////////////////////////
-// (VAD) Voice Activity Detection
-//
-int AudioCodingModuleImpl::SetVAD(bool enable_dtx,
- bool enable_vad,
- ACMVADMode mode) {
- // Note: |enable_vad| is not used; VAD is enabled based on the DTX setting.
- RTC_DCHECK_EQ(enable_dtx, enable_vad);
- CriticalSectionScoped lock(acm_crit_sect_.get());
- return codec_manager_.SetVAD(enable_dtx, mode);
-}
-
-// Get VAD/DTX settings.
-int AudioCodingModuleImpl::VAD(bool* dtx_enabled, bool* vad_enabled,
- ACMVADMode* mode) const {
- CriticalSectionScoped lock(acm_crit_sect_.get());
- codec_manager_.VAD(dtx_enabled, vad_enabled, mode);
- return 0;
-}
-
-/////////////////////////////////////////
-// Receiver
-//
-
-int AudioCodingModuleImpl::InitializeReceiver() {
- CriticalSectionScoped lock(acm_crit_sect_.get());
- return InitializeReceiverSafe();
-}
-
-// Initialize receiver, resets codec database etc.
-int AudioCodingModuleImpl::InitializeReceiverSafe() {
- // If the receiver is already initialized then we want to destroy any
- // existing decoders. After a call to this function, we should have a clean
- // start-up.
- if (receiver_initialized_) {
- if (receiver_.RemoveAllCodecs() < 0)
- return -1;
- }
- receiver_.set_id(id_);
- receiver_.ResetInitialDelay();
- receiver_.SetMinimumDelay(0);
- receiver_.SetMaximumDelay(0);
- receiver_.FlushBuffers();
-
- // Register RED and CN.
- auto db = RentACodec::Database();
- for (size_t i = 0; i < db.size(); i++) {
- if (IsCodecRED(db[i]) || IsCodecCN(db[i])) {
- if (receiver_.AddCodec(static_cast<int>(i),
- static_cast<uint8_t>(db[i].pltype), 1,
- db[i].plfreq, nullptr) < 0) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, id_,
- "Cannot register master codec.");
- return -1;
- }
- }
- }
- receiver_initialized_ = true;
- return 0;
-}
-
-// Get current receive frequency.
-int AudioCodingModuleImpl::ReceiveFrequency() const {
- WEBRTC_TRACE(webrtc::kTraceStream, webrtc::kTraceAudioCoding, id_,
- "ReceiveFrequency()");
-
- CriticalSectionScoped lock(acm_crit_sect_.get());
-
- int codec_id = receiver_.last_audio_codec_id();
-
- return codec_id < 0 ? receiver_.current_sample_rate_hz() :
- ACMCodecDB::database_[codec_id].plfreq;
-}
-
-// Get current playout frequency.
-int AudioCodingModuleImpl::PlayoutFrequency() const {
- WEBRTC_TRACE(webrtc::kTraceStream, webrtc::kTraceAudioCoding, id_,
- "PlayoutFrequency()");
-
- CriticalSectionScoped lock(acm_crit_sect_.get());
-
- return receiver_.current_sample_rate_hz();
-}
-
-// Register possible receive codecs, can be called multiple times,
-// for codecs, CNG (NB, WB and SWB), DTMF, RED.
-int AudioCodingModuleImpl::RegisterReceiveCodec(const CodecInst& codec) {
- CriticalSectionScoped lock(acm_crit_sect_.get());
- RTC_DCHECK(receiver_initialized_);
- if (codec.channels > 2 || codec.channels < 0) {
- LOG_F(LS_ERROR) << "Unsupported number of channels: " << codec.channels;
- return -1;
- }
-
- auto codec_id =
- RentACodec::CodecIdByParams(codec.plname, codec.plfreq, codec.channels);
- if (!codec_id) {
- LOG_F(LS_ERROR) << "Wrong codec params to be registered as receive codec";
- return -1;
- }
- auto codec_index = RentACodec::CodecIndexFromId(*codec_id);
- RTC_CHECK(codec_index) << "Invalid codec ID: " << static_cast<int>(*codec_id);
-
- // Check if the payload-type is valid.
- if (!ACMCodecDB::ValidPayloadType(codec.pltype)) {
- LOG_F(LS_ERROR) << "Invalid payload type " << codec.pltype << " for "
- << codec.plname;
- return -1;
- }
-
- // Get |decoder| associated with |codec|. |decoder| is NULL if |codec| does
- // not own its decoder.
- return receiver_.AddCodec(*codec_index, codec.pltype, codec.channels,
- codec.plfreq,
- codec_manager_.GetAudioDecoder(codec));
-}
-
-int AudioCodingModuleImpl::RegisterExternalReceiveCodec(
- int rtp_payload_type,
- AudioDecoder* external_decoder,
- int sample_rate_hz,
- int num_channels) {
- CriticalSectionScoped lock(acm_crit_sect_.get());
- RTC_DCHECK(receiver_initialized_);
- if (num_channels > 2 || num_channels < 0) {
- LOG_F(LS_ERROR) << "Unsupported number of channels: " << num_channels;
- return -1;
- }
-
- // Check if the payload-type is valid.
- if (!ACMCodecDB::ValidPayloadType(rtp_payload_type)) {
- LOG_F(LS_ERROR) << "Invalid payload-type " << rtp_payload_type
- << " for external decoder.";
- return -1;
- }
-
- return receiver_.AddCodec(-1 /* external */, rtp_payload_type, num_channels,
- sample_rate_hz, external_decoder);
-}
-
-// Get current received codec.
-int AudioCodingModuleImpl::ReceiveCodec(CodecInst* current_codec) const {
- CriticalSectionScoped lock(acm_crit_sect_.get());
- return receiver_.LastAudioCodec(current_codec);
-}
-
-// Incoming packet from network parsed and ready for decode.
-int AudioCodingModuleImpl::IncomingPacket(const uint8_t* incoming_payload,
- const size_t payload_length,
- const WebRtcRTPHeader& rtp_header) {
- return receiver_.InsertPacket(rtp_header, incoming_payload, payload_length);
-}
-
-// Minimum playout delay (Used for lip-sync).
-int AudioCodingModuleImpl::SetMinimumPlayoutDelay(int time_ms) {
- if ((time_ms < 0) || (time_ms > 10000)) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, id_,
- "Delay must be in the range of 0-1000 milliseconds.");
- return -1;
- }
- return receiver_.SetMinimumDelay(time_ms);
-}
-
-int AudioCodingModuleImpl::SetMaximumPlayoutDelay(int time_ms) {
- if ((time_ms < 0) || (time_ms > 10000)) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, id_,
- "Delay must be in the range of 0-1000 milliseconds.");
- return -1;
- }
- return receiver_.SetMaximumDelay(time_ms);
-}
-
-// Get 10 milliseconds of raw audio data to play out.
-// Automatic resample to the requested frequency.
-int AudioCodingModuleImpl::PlayoutData10Ms(int desired_freq_hz,
- AudioFrame* audio_frame) {
- // GetAudio always returns 10 ms, at the requested sample rate.
- if (receiver_.GetAudio(desired_freq_hz, audio_frame) != 0) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, id_,
- "PlayoutData failed, RecOut Failed");
- return -1;
- }
- audio_frame->id_ = id_;
- return 0;
-}
-
-/////////////////////////////////////////
-// Statistics
-//
-
-// TODO(turajs) change the return value to void. Also change the corresponding
-// NetEq function.
-int AudioCodingModuleImpl::GetNetworkStatistics(NetworkStatistics* statistics) {
- receiver_.GetNetworkStatistics(statistics);
- return 0;
-}
-
-int AudioCodingModuleImpl::RegisterVADCallback(ACMVADCallback* vad_callback) {
- WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceAudioCoding, id_,
- "RegisterVADCallback()");
- CriticalSectionScoped lock(callback_crit_sect_.get());
- vad_callback_ = vad_callback;
- return 0;
-}
-
-// TODO(kwiberg): Remove this method, and have callers call IncomingPacket
-// instead. The translation logic and state belong with them, not with
-// AudioCodingModuleImpl.
-int AudioCodingModuleImpl::IncomingPayload(const uint8_t* incoming_payload,
- size_t payload_length,
- uint8_t payload_type,
- uint32_t timestamp) {
- // We are not acquiring any lock when interacting with |aux_rtp_header_| no
- // other method uses this member variable.
- if (!aux_rtp_header_) {
- // This is the first time that we are using |dummy_rtp_header_|
- // so we have to create it.
- aux_rtp_header_.reset(new WebRtcRTPHeader);
- aux_rtp_header_->header.payloadType = payload_type;
- // Don't matter in this case.
- aux_rtp_header_->header.ssrc = 0;
- aux_rtp_header_->header.markerBit = false;
- // Start with random numbers.
- aux_rtp_header_->header.sequenceNumber = 0x1234; // Arbitrary.
- aux_rtp_header_->type.Audio.channel = 1;
- }
-
- aux_rtp_header_->header.timestamp = timestamp;
- IncomingPacket(incoming_payload, payload_length, *aux_rtp_header_);
- // Get ready for the next payload.
- aux_rtp_header_->header.sequenceNumber++;
- return 0;
-}
-
-int AudioCodingModuleImpl::SetOpusApplication(OpusApplicationMode application) {
- CriticalSectionScoped lock(acm_crit_sect_.get());
- if (!HaveValidEncoder("SetOpusApplication")) {
- return -1;
- }
- if (!codec_manager_.CurrentEncoderIsOpus())
- return -1;
- AudioEncoder::Application app;
- switch (application) {
- case kVoip:
- app = AudioEncoder::Application::kSpeech;
- break;
- case kAudio:
- app = AudioEncoder::Application::kAudio;
- break;
- default:
- FATAL();
- return 0;
- }
- return codec_manager_.CurrentEncoder()->SetApplication(app) ? 0 : -1;
-}
-
-// Informs Opus encoder of the maximum playback rate the receiver will render.
-int AudioCodingModuleImpl::SetOpusMaxPlaybackRate(int frequency_hz) {
- CriticalSectionScoped lock(acm_crit_sect_.get());
- if (!HaveValidEncoder("SetOpusMaxPlaybackRate")) {
- return -1;
- }
- if (!codec_manager_.CurrentEncoderIsOpus())
- return -1;
- codec_manager_.CurrentEncoder()->SetMaxPlaybackRate(frequency_hz);
- return 0;
-}
-
-int AudioCodingModuleImpl::EnableOpusDtx() {
- CriticalSectionScoped lock(acm_crit_sect_.get());
- if (!HaveValidEncoder("EnableOpusDtx")) {
- return -1;
- }
- if (!codec_manager_.CurrentEncoderIsOpus())
- return -1;
- return codec_manager_.CurrentEncoder()->SetDtx(true) ? 0 : -1;
-}
-
-int AudioCodingModuleImpl::DisableOpusDtx() {
- CriticalSectionScoped lock(acm_crit_sect_.get());
- if (!HaveValidEncoder("DisableOpusDtx")) {
- return -1;
- }
- if (!codec_manager_.CurrentEncoderIsOpus())
- return -1;
- return codec_manager_.CurrentEncoder()->SetDtx(false) ? 0 : -1;
-}
-
-int AudioCodingModuleImpl::PlayoutTimestamp(uint32_t* timestamp) {
- return receiver_.GetPlayoutTimestamp(timestamp) ? 0 : -1;
-}
-
-bool AudioCodingModuleImpl::HaveValidEncoder(const char* caller_name) const {
- if (!codec_manager_.CurrentEncoder()) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, id_,
- "%s failed: No send codec is registered.", caller_name);
- return false;
- }
- return true;
-}
-
-int AudioCodingModuleImpl::UnregisterReceiveCodec(uint8_t payload_type) {
- return receiver_.RemoveCodec(payload_type);
-}
-
-int AudioCodingModuleImpl::SetInitialPlayoutDelay(int delay_ms) {
- {
- CriticalSectionScoped lock(acm_crit_sect_.get());
- // Initialize receiver, if it is not initialized. Otherwise, initial delay
- // is reset upon initialization of the receiver.
- if (!receiver_initialized_)
- InitializeReceiverSafe();
- }
- return receiver_.SetInitialDelay(delay_ms);
-}
-
-int AudioCodingModuleImpl::EnableNack(size_t max_nack_list_size) {
- return receiver_.EnableNack(max_nack_list_size);
-}
-
-void AudioCodingModuleImpl::DisableNack() {
- receiver_.DisableNack();
-}
-
-std::vector<uint16_t> AudioCodingModuleImpl::GetNackList(
- int64_t round_trip_time_ms) const {
- return receiver_.GetNackList(round_trip_time_ms);
-}
-
-int AudioCodingModuleImpl::LeastRequiredDelayMs() const {
- return receiver_.LeastRequiredDelayMs();
-}
-
-void AudioCodingModuleImpl::GetDecodingCallStatistics(
- AudioDecodingCallStats* call_stats) const {
- receiver_.GetDecodingCallStatistics(call_stats);
-}
-
-} // namespace acm2
-} // namespace webrtc
diff --git a/webrtc/modules/audio_coding/main/acm2/audio_coding_module_impl.h b/webrtc/modules/audio_coding/main/acm2/audio_coding_module_impl.h
deleted file mode 100644
index f20861398b..0000000000
--- a/webrtc/modules/audio_coding/main/acm2/audio_coding_module_impl.h
+++ /dev/null
@@ -1,285 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_MODULES_AUDIO_CODING_MAIN_ACM2_AUDIO_CODING_MODULE_IMPL_H_
-#define WEBRTC_MODULES_AUDIO_CODING_MAIN_ACM2_AUDIO_CODING_MODULE_IMPL_H_
-
-#include <vector>
-
-#include "webrtc/base/buffer.h"
-#include "webrtc/base/scoped_ptr.h"
-#include "webrtc/base/thread_annotations.h"
-#include "webrtc/common_types.h"
-#include "webrtc/engine_configurations.h"
-#include "webrtc/modules/audio_coding/main/acm2/acm_codec_database.h"
-#include "webrtc/modules/audio_coding/main/acm2/acm_receiver.h"
-#include "webrtc/modules/audio_coding/main/acm2/acm_resampler.h"
-#include "webrtc/modules/audio_coding/main/acm2/codec_manager.h"
-
-namespace webrtc {
-
-class CriticalSectionWrapper;
-class AudioCodingImpl;
-
-namespace acm2 {
-
-class AudioCodingModuleImpl final : public AudioCodingModule {
- public:
- friend webrtc::AudioCodingImpl;
-
- explicit AudioCodingModuleImpl(const AudioCodingModule::Config& config);
- ~AudioCodingModuleImpl() override;
-
- /////////////////////////////////////////
- // Sender
- //
-
- // Can be called multiple times for Codec, CNG, RED.
- int RegisterSendCodec(const CodecInst& send_codec) override;
-
- void RegisterExternalSendCodec(
- AudioEncoder* external_speech_encoder) override;
-
- // Get current send codec.
- int SendCodec(CodecInst* current_codec) const override;
-
- // Get current send frequency.
- int SendFrequency() const override;
-
- // Sets the bitrate to the specified value in bits/sec. In case the codec does
- // not support the requested value it will choose an appropriate value
- // instead.
- void SetBitRate(int bitrate_bps) override;
-
- // Register a transport callback which will be
- // called to deliver the encoded buffers.
- int RegisterTransportCallback(AudioPacketizationCallback* transport) override;
-
- // Add 10 ms of raw (PCM) audio data to the encoder.
- int Add10MsData(const AudioFrame& audio_frame) override;
-
- /////////////////////////////////////////
- // (RED) Redundant Coding
- //
-
- // Configure RED status i.e. on/off.
- int SetREDStatus(bool enable_red) override;
-
- // Get RED status.
- bool REDStatus() const override;
-
- /////////////////////////////////////////
- // (FEC) Forward Error Correction (codec internal)
- //
-
- // Configure FEC status i.e. on/off.
- int SetCodecFEC(bool enabled_codec_fec) override;
-
- // Get FEC status.
- bool CodecFEC() const override;
-
- // Set target packet loss rate
- int SetPacketLossRate(int loss_rate) override;
-
- /////////////////////////////////////////
- // (VAD) Voice Activity Detection
- // and
- // (CNG) Comfort Noise Generation
- //
-
- int SetVAD(bool enable_dtx = true,
- bool enable_vad = false,
- ACMVADMode mode = VADNormal) override;
-
- int VAD(bool* dtx_enabled,
- bool* vad_enabled,
- ACMVADMode* mode) const override;
-
- int RegisterVADCallback(ACMVADCallback* vad_callback) override;
-
- /////////////////////////////////////////
- // Receiver
- //
-
- // Initialize receiver, resets codec database etc.
- int InitializeReceiver() override;
-
- // Get current receive frequency.
- int ReceiveFrequency() const override;
-
- // Get current playout frequency.
- int PlayoutFrequency() const override;
-
- // Register possible receive codecs, can be called multiple times,
- // for codecs, CNG, DTMF, RED.
- int RegisterReceiveCodec(const CodecInst& receive_codec) override;
-
- int RegisterExternalReceiveCodec(int rtp_payload_type,
- AudioDecoder* external_decoder,
- int sample_rate_hz,
- int num_channels) override;
-
- // Get current received codec.
- int ReceiveCodec(CodecInst* current_codec) const override;
-
- // Incoming packet from network parsed and ready for decode.
- int IncomingPacket(const uint8_t* incoming_payload,
- const size_t payload_length,
- const WebRtcRTPHeader& rtp_info) override;
-
- // Incoming payloads, without rtp-info, the rtp-info will be created in ACM.
- // One usage for this API is when pre-encoded files are pushed in ACM.
- int IncomingPayload(const uint8_t* incoming_payload,
- const size_t payload_length,
- uint8_t payload_type,
- uint32_t timestamp) override;
-
- // Minimum playout delay.
- int SetMinimumPlayoutDelay(int time_ms) override;
-
- // Maximum playout delay.
- int SetMaximumPlayoutDelay(int time_ms) override;
-
- // Smallest latency NetEq will maintain.
- int LeastRequiredDelayMs() const override;
-
- // Impose an initial delay on playout. ACM plays silence until |delay_ms|
- // audio is accumulated in NetEq buffer, then starts decoding payloads.
- int SetInitialPlayoutDelay(int delay_ms) override;
-
- // Get playout timestamp.
- int PlayoutTimestamp(uint32_t* timestamp) override;
-
- // Get 10 milliseconds of raw audio data to play out, and
- // automatic resample to the requested frequency if > 0.
- int PlayoutData10Ms(int desired_freq_hz, AudioFrame* audio_frame) override;
-
- /////////////////////////////////////////
- // Statistics
- //
-
- int GetNetworkStatistics(NetworkStatistics* statistics) override;
-
- int SetOpusApplication(OpusApplicationMode application) override;
-
- // If current send codec is Opus, informs it about the maximum playback rate
- // the receiver will render.
- int SetOpusMaxPlaybackRate(int frequency_hz) override;
-
- int EnableOpusDtx() override;
-
- int DisableOpusDtx() override;
-
- int UnregisterReceiveCodec(uint8_t payload_type) override;
-
- int EnableNack(size_t max_nack_list_size) override;
-
- void DisableNack() override;
-
- std::vector<uint16_t> GetNackList(int64_t round_trip_time_ms) const override;
-
- void GetDecodingCallStatistics(AudioDecodingCallStats* stats) const override;
-
- private:
- struct InputData {
- uint32_t input_timestamp;
- const int16_t* audio;
- size_t length_per_channel;
- uint8_t audio_channel;
- // If a re-mix is required (up or down), this buffer will store a re-mixed
- // version of the input.
- int16_t buffer[WEBRTC_10MS_PCM_AUDIO];
- };
-
- // This member class writes values to the named UMA histogram, but only if
- // the value has changed since the last time (and always for the first call).
- class ChangeLogger {
- public:
- explicit ChangeLogger(const std::string& histogram_name)
- : histogram_name_(histogram_name) {}
- // Logs the new value if it is different from the last logged value, or if
- // this is the first call.
- void MaybeLog(int value);
-
- private:
- int last_value_ = 0;
- int first_time_ = true;
- const std::string histogram_name_;
- };
-
- int Add10MsDataInternal(const AudioFrame& audio_frame, InputData* input_data)
- EXCLUSIVE_LOCKS_REQUIRED(acm_crit_sect_);
- int Encode(const InputData& input_data)
- EXCLUSIVE_LOCKS_REQUIRED(acm_crit_sect_);
-
- int InitializeReceiverSafe() EXCLUSIVE_LOCKS_REQUIRED(acm_crit_sect_);
-
- bool HaveValidEncoder(const char* caller_name) const
- EXCLUSIVE_LOCKS_REQUIRED(acm_crit_sect_);
-
- // Preprocessing of input audio, including resampling and down-mixing if
- // required, before pushing audio into encoder's buffer.
- //
- // in_frame: input audio-frame
- // ptr_out: pointer to output audio_frame. If no preprocessing is required
- // |ptr_out| will be pointing to |in_frame|, otherwise pointing to
- // |preprocess_frame_|.
- //
- // Return value:
- // -1: if encountering an error.
- // 0: otherwise.
- int PreprocessToAddData(const AudioFrame& in_frame,
- const AudioFrame** ptr_out)
- EXCLUSIVE_LOCKS_REQUIRED(acm_crit_sect_);
-
- // Change required states after starting to receive the codec corresponding
- // to |index|.
- int UpdateUponReceivingCodec(int index);
-
- const rtc::scoped_ptr<CriticalSectionWrapper> acm_crit_sect_;
- rtc::Buffer encode_buffer_ GUARDED_BY(acm_crit_sect_);
- int id_; // TODO(henrik.lundin) Make const.
- uint32_t expected_codec_ts_ GUARDED_BY(acm_crit_sect_);
- uint32_t expected_in_ts_ GUARDED_BY(acm_crit_sect_);
- ACMResampler resampler_ GUARDED_BY(acm_crit_sect_);
- AcmReceiver receiver_; // AcmReceiver has it's own internal lock.
- ChangeLogger bitrate_logger_ GUARDED_BY(acm_crit_sect_);
- CodecManager codec_manager_ GUARDED_BY(acm_crit_sect_);
-
- // This is to keep track of CN instances where we can send DTMFs.
- uint8_t previous_pltype_ GUARDED_BY(acm_crit_sect_);
-
- // Used when payloads are pushed into ACM without any RTP info
- // One example is when pre-encoded bit-stream is pushed from
- // a file.
- // IMPORTANT: this variable is only used in IncomingPayload(), therefore,
- // no lock acquired when interacting with this variable. If it is going to
- // be used in other methods, locks need to be taken.
- rtc::scoped_ptr<WebRtcRTPHeader> aux_rtp_header_;
-
- bool receiver_initialized_ GUARDED_BY(acm_crit_sect_);
-
- AudioFrame preprocess_frame_ GUARDED_BY(acm_crit_sect_);
- bool first_10ms_data_ GUARDED_BY(acm_crit_sect_);
-
- bool first_frame_ GUARDED_BY(acm_crit_sect_);
- uint32_t last_timestamp_ GUARDED_BY(acm_crit_sect_);
- uint32_t last_rtp_timestamp_ GUARDED_BY(acm_crit_sect_);
-
- const rtc::scoped_ptr<CriticalSectionWrapper> callback_crit_sect_;
- AudioPacketizationCallback* packetization_callback_
- GUARDED_BY(callback_crit_sect_);
- ACMVADCallback* vad_callback_ GUARDED_BY(callback_crit_sect_);
-};
-
-} // namespace acm2
-} // namespace webrtc
-
-#endif // WEBRTC_MODULES_AUDIO_CODING_MAIN_ACM2_AUDIO_CODING_MODULE_IMPL_H_
diff --git a/webrtc/modules/audio_coding/main/acm2/audio_coding_module_unittest_oldapi.cc b/webrtc/modules/audio_coding/main/acm2/audio_coding_module_unittest_oldapi.cc
deleted file mode 100644
index 879fb839fe..0000000000
--- a/webrtc/modules/audio_coding/main/acm2/audio_coding_module_unittest_oldapi.cc
+++ /dev/null
@@ -1,1792 +0,0 @@
-/*
- * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include <string.h>
-#include <vector>
-
-#include "testing/gtest/include/gtest/gtest.h"
-#include "webrtc/base/md5digest.h"
-#include "webrtc/base/scoped_ptr.h"
-#include "webrtc/base/thread_annotations.h"
-#include "webrtc/modules/audio_coding/codecs/audio_encoder.h"
-#include "webrtc/modules/audio_coding/codecs/g711/include/audio_decoder_pcm.h"
-#include "webrtc/modules/audio_coding/codecs/g711/include/audio_encoder_pcm.h"
-#include "webrtc/modules/audio_coding/codecs/isac/main/include/audio_encoder_isac.h"
-#include "webrtc/modules/audio_coding/codecs/mock/mock_audio_encoder.h"
-#include "webrtc/modules/audio_coding/main/acm2/acm_receive_test_oldapi.h"
-#include "webrtc/modules/audio_coding/main/acm2/acm_send_test_oldapi.h"
-#include "webrtc/modules/audio_coding/main/include/audio_coding_module.h"
-#include "webrtc/modules/audio_coding/main/include/audio_coding_module_typedefs.h"
-#include "webrtc/modules/audio_coding/neteq/audio_decoder_impl.h"
-#include "webrtc/modules/audio_coding/neteq/mock/mock_audio_decoder.h"
-#include "webrtc/modules/audio_coding/neteq/tools/audio_checksum.h"
-#include "webrtc/modules/audio_coding/neteq/tools/audio_loop.h"
-#include "webrtc/modules/audio_coding/neteq/tools/constant_pcm_packet_source.h"
-#include "webrtc/modules/audio_coding/neteq/tools/input_audio_file.h"
-#include "webrtc/modules/audio_coding/neteq/tools/output_audio_file.h"
-#include "webrtc/modules/audio_coding/neteq/tools/packet.h"
-#include "webrtc/modules/audio_coding/neteq/tools/rtp_file_source.h"
-#include "webrtc/modules/interface/module_common_types.h"
-#include "webrtc/system_wrappers/include/clock.h"
-#include "webrtc/system_wrappers/include/critical_section_wrapper.h"
-#include "webrtc/system_wrappers/include/event_wrapper.h"
-#include "webrtc/system_wrappers/include/sleep.h"
-#include "webrtc/system_wrappers/include/thread_wrapper.h"
-#include "webrtc/test/testsupport/fileutils.h"
-#include "webrtc/test/testsupport/gtest_disable.h"
-
-using ::testing::AtLeast;
-using ::testing::Invoke;
-using ::testing::_;
-
-namespace webrtc {
-
-namespace {
-const int kSampleRateHz = 16000;
-const int kNumSamples10ms = kSampleRateHz / 100;
-const int kFrameSizeMs = 10; // Multiple of 10.
-const int kFrameSizeSamples = kFrameSizeMs / 10 * kNumSamples10ms;
-const int kPayloadSizeBytes = kFrameSizeSamples * sizeof(int16_t);
-const uint8_t kPayloadType = 111;
-} // namespace
-
-class RtpUtility {
- public:
- RtpUtility(int samples_per_packet, uint8_t payload_type)
- : samples_per_packet_(samples_per_packet), payload_type_(payload_type) {}
-
- virtual ~RtpUtility() {}
-
- void Populate(WebRtcRTPHeader* rtp_header) {
- rtp_header->header.sequenceNumber = 0xABCD;
- rtp_header->header.timestamp = 0xABCDEF01;
- rtp_header->header.payloadType = payload_type_;
- rtp_header->header.markerBit = false;
- rtp_header->header.ssrc = 0x1234;
- rtp_header->header.numCSRCs = 0;
- rtp_header->frameType = kAudioFrameSpeech;
-
- rtp_header->header.payload_type_frequency = kSampleRateHz;
- rtp_header->type.Audio.channel = 1;
- rtp_header->type.Audio.isCNG = false;
- }
-
- void Forward(WebRtcRTPHeader* rtp_header) {
- ++rtp_header->header.sequenceNumber;
- rtp_header->header.timestamp += samples_per_packet_;
- }
-
- private:
- int samples_per_packet_;
- uint8_t payload_type_;
-};
-
-class PacketizationCallbackStubOldApi : public AudioPacketizationCallback {
- public:
- PacketizationCallbackStubOldApi()
- : num_calls_(0),
- last_frame_type_(kEmptyFrame),
- last_payload_type_(-1),
- last_timestamp_(0),
- crit_sect_(CriticalSectionWrapper::CreateCriticalSection()) {}
-
- int32_t SendData(FrameType frame_type,
- uint8_t payload_type,
- uint32_t timestamp,
- const uint8_t* payload_data,
- size_t payload_len_bytes,
- const RTPFragmentationHeader* fragmentation) override {
- CriticalSectionScoped lock(crit_sect_.get());
- ++num_calls_;
- last_frame_type_ = frame_type;
- last_payload_type_ = payload_type;
- last_timestamp_ = timestamp;
- last_payload_vec_.assign(payload_data, payload_data + payload_len_bytes);
- return 0;
- }
-
- int num_calls() const {
- CriticalSectionScoped lock(crit_sect_.get());
- return num_calls_;
- }
-
- int last_payload_len_bytes() const {
- CriticalSectionScoped lock(crit_sect_.get());
- return last_payload_vec_.size();
- }
-
- FrameType last_frame_type() const {
- CriticalSectionScoped lock(crit_sect_.get());
- return last_frame_type_;
- }
-
- int last_payload_type() const {
- CriticalSectionScoped lock(crit_sect_.get());
- return last_payload_type_;
- }
-
- uint32_t last_timestamp() const {
- CriticalSectionScoped lock(crit_sect_.get());
- return last_timestamp_;
- }
-
- void SwapBuffers(std::vector<uint8_t>* payload) {
- CriticalSectionScoped lock(crit_sect_.get());
- last_payload_vec_.swap(*payload);
- }
-
- private:
- int num_calls_ GUARDED_BY(crit_sect_);
- FrameType last_frame_type_ GUARDED_BY(crit_sect_);
- int last_payload_type_ GUARDED_BY(crit_sect_);
- uint32_t last_timestamp_ GUARDED_BY(crit_sect_);
- std::vector<uint8_t> last_payload_vec_ GUARDED_BY(crit_sect_);
- const rtc::scoped_ptr<CriticalSectionWrapper> crit_sect_;
-};
-
-class AudioCodingModuleTestOldApi : public ::testing::Test {
- protected:
- AudioCodingModuleTestOldApi()
- : id_(1),
- rtp_utility_(new RtpUtility(kFrameSizeSamples, kPayloadType)),
- clock_(Clock::GetRealTimeClock()) {}
-
- ~AudioCodingModuleTestOldApi() {}
-
- void TearDown() {}
-
- void SetUp() {
- acm_.reset(AudioCodingModule::Create(id_, clock_));
-
- rtp_utility_->Populate(&rtp_header_);
-
- input_frame_.sample_rate_hz_ = kSampleRateHz;
- input_frame_.num_channels_ = 1;
- input_frame_.samples_per_channel_ = kSampleRateHz * 10 / 1000; // 10 ms.
- static_assert(kSampleRateHz * 10 / 1000 <= AudioFrame::kMaxDataSizeSamples,
- "audio frame too small");
- memset(input_frame_.data_,
- 0,
- input_frame_.samples_per_channel_ * sizeof(input_frame_.data_[0]));
-
- ASSERT_EQ(0, acm_->RegisterTransportCallback(&packet_cb_));
-
- SetUpL16Codec();
- }
-
- // Set up L16 codec.
- virtual void SetUpL16Codec() {
- ASSERT_EQ(0, AudioCodingModule::Codec("L16", &codec_, kSampleRateHz, 1));
- codec_.pltype = kPayloadType;
- }
-
- virtual void RegisterCodec() {
- ASSERT_EQ(0, acm_->RegisterReceiveCodec(codec_));
- ASSERT_EQ(0, acm_->RegisterSendCodec(codec_));
- }
-
- virtual void InsertPacketAndPullAudio() {
- InsertPacket();
- PullAudio();
- }
-
- virtual void InsertPacket() {
- const uint8_t kPayload[kPayloadSizeBytes] = {0};
- ASSERT_EQ(0,
- acm_->IncomingPacket(kPayload, kPayloadSizeBytes, rtp_header_));
- rtp_utility_->Forward(&rtp_header_);
- }
-
- virtual void PullAudio() {
- AudioFrame audio_frame;
- ASSERT_EQ(0, acm_->PlayoutData10Ms(-1, &audio_frame));
- }
-
- virtual void InsertAudio() {
- ASSERT_GE(acm_->Add10MsData(input_frame_), 0);
- input_frame_.timestamp_ += kNumSamples10ms;
- }
-
- virtual void VerifyEncoding() {
- int last_length = packet_cb_.last_payload_len_bytes();
- EXPECT_TRUE(last_length == 2 * codec_.pacsize || last_length == 0)
- << "Last encoded packet was " << last_length << " bytes.";
- }
-
- virtual void InsertAudioAndVerifyEncoding() {
- InsertAudio();
- VerifyEncoding();
- }
-
- const int id_;
- rtc::scoped_ptr<RtpUtility> rtp_utility_;
- rtc::scoped_ptr<AudioCodingModule> acm_;
- PacketizationCallbackStubOldApi packet_cb_;
- WebRtcRTPHeader rtp_header_;
- AudioFrame input_frame_;
- CodecInst codec_;
- Clock* clock_;
-};
-
-// Check if the statistics are initialized correctly. Before any call to ACM
-// all fields have to be zero.
-TEST_F(AudioCodingModuleTestOldApi, DISABLED_ON_ANDROID(InitializedToZero)) {
- RegisterCodec();
- AudioDecodingCallStats stats;
- acm_->GetDecodingCallStatistics(&stats);
- EXPECT_EQ(0, stats.calls_to_neteq);
- EXPECT_EQ(0, stats.calls_to_silence_generator);
- EXPECT_EQ(0, stats.decoded_normal);
- EXPECT_EQ(0, stats.decoded_cng);
- EXPECT_EQ(0, stats.decoded_plc);
- EXPECT_EQ(0, stats.decoded_plc_cng);
-}
-
-// Apply an initial playout delay. Calls to AudioCodingModule::PlayoutData10ms()
-// should result in generating silence, check the associated field.
-TEST_F(AudioCodingModuleTestOldApi,
- DISABLED_ON_ANDROID(SilenceGeneratorCalled)) {
- RegisterCodec();
- AudioDecodingCallStats stats;
- const int kInitialDelay = 100;
-
- acm_->SetInitialPlayoutDelay(kInitialDelay);
-
- int num_calls = 0;
- for (int time_ms = 0; time_ms < kInitialDelay;
- time_ms += kFrameSizeMs, ++num_calls) {
- InsertPacketAndPullAudio();
- }
- acm_->GetDecodingCallStatistics(&stats);
- EXPECT_EQ(0, stats.calls_to_neteq);
- EXPECT_EQ(num_calls, stats.calls_to_silence_generator);
- EXPECT_EQ(0, stats.decoded_normal);
- EXPECT_EQ(0, stats.decoded_cng);
- EXPECT_EQ(0, stats.decoded_plc);
- EXPECT_EQ(0, stats.decoded_plc_cng);
-}
-
-// Insert some packets and pull audio. Check statistics are valid. Then,
-// simulate packet loss and check if PLC and PLC-to-CNG statistics are
-// correctly updated.
-TEST_F(AudioCodingModuleTestOldApi, DISABLED_ON_ANDROID(NetEqCalls)) {
- RegisterCodec();
- AudioDecodingCallStats stats;
- const int kNumNormalCalls = 10;
-
- for (int num_calls = 0; num_calls < kNumNormalCalls; ++num_calls) {
- InsertPacketAndPullAudio();
- }
- acm_->GetDecodingCallStatistics(&stats);
- EXPECT_EQ(kNumNormalCalls, stats.calls_to_neteq);
- EXPECT_EQ(0, stats.calls_to_silence_generator);
- EXPECT_EQ(kNumNormalCalls, stats.decoded_normal);
- EXPECT_EQ(0, stats.decoded_cng);
- EXPECT_EQ(0, stats.decoded_plc);
- EXPECT_EQ(0, stats.decoded_plc_cng);
-
- const int kNumPlc = 3;
- const int kNumPlcCng = 5;
-
- // Simulate packet-loss. NetEq first performs PLC then PLC fades to CNG.
- for (int n = 0; n < kNumPlc + kNumPlcCng; ++n) {
- PullAudio();
- }
- acm_->GetDecodingCallStatistics(&stats);
- EXPECT_EQ(kNumNormalCalls + kNumPlc + kNumPlcCng, stats.calls_to_neteq);
- EXPECT_EQ(0, stats.calls_to_silence_generator);
- EXPECT_EQ(kNumNormalCalls, stats.decoded_normal);
- EXPECT_EQ(0, stats.decoded_cng);
- EXPECT_EQ(kNumPlc, stats.decoded_plc);
- EXPECT_EQ(kNumPlcCng, stats.decoded_plc_cng);
-}
-
-TEST_F(AudioCodingModuleTestOldApi, VerifyOutputFrame) {
- AudioFrame audio_frame;
- const int kSampleRateHz = 32000;
- EXPECT_EQ(0, acm_->PlayoutData10Ms(kSampleRateHz, &audio_frame));
- EXPECT_EQ(id_, audio_frame.id_);
- EXPECT_EQ(0u, audio_frame.timestamp_);
- EXPECT_GT(audio_frame.num_channels_, 0);
- EXPECT_EQ(static_cast<size_t>(kSampleRateHz / 100),
- audio_frame.samples_per_channel_);
- EXPECT_EQ(kSampleRateHz, audio_frame.sample_rate_hz_);
-}
-
-TEST_F(AudioCodingModuleTestOldApi, FailOnZeroDesiredFrequency) {
- AudioFrame audio_frame;
- EXPECT_EQ(-1, acm_->PlayoutData10Ms(0, &audio_frame));
-}
-
-// Checks that the transport callback is invoked once for each speech packet.
-// Also checks that the frame type is kAudioFrameSpeech.
-TEST_F(AudioCodingModuleTestOldApi, TransportCallbackIsInvokedForEachPacket) {
- const int k10MsBlocksPerPacket = 3;
- codec_.pacsize = k10MsBlocksPerPacket * kSampleRateHz / 100;
- RegisterCodec();
- const int kLoops = 10;
- for (int i = 0; i < kLoops; ++i) {
- EXPECT_EQ(i / k10MsBlocksPerPacket, packet_cb_.num_calls());
- if (packet_cb_.num_calls() > 0)
- EXPECT_EQ(kAudioFrameSpeech, packet_cb_.last_frame_type());
- InsertAudioAndVerifyEncoding();
- }
- EXPECT_EQ(kLoops / k10MsBlocksPerPacket, packet_cb_.num_calls());
- EXPECT_EQ(kAudioFrameSpeech, packet_cb_.last_frame_type());
-}
-
-#if defined(WEBRTC_CODEC_ISAC) || defined(WEBRTC_CODEC_ISACFX)
-#define IF_ISAC(x) x
-#else
-#define IF_ISAC(x) DISABLED_##x
-#endif
-
-// Verifies that the RTP timestamp series is not reset when the codec is
-// changed.
-TEST_F(AudioCodingModuleTestOldApi,
- IF_ISAC(TimestampSeriesContinuesWhenCodecChanges)) {
- RegisterCodec(); // This registers the default codec.
- uint32_t expected_ts = input_frame_.timestamp_;
- int blocks_per_packet = codec_.pacsize / (kSampleRateHz / 100);
- // Encode 5 packets of the first codec type.
- const int kNumPackets1 = 5;
- for (int j = 0; j < kNumPackets1; ++j) {
- for (int i = 0; i < blocks_per_packet; ++i) {
- EXPECT_EQ(j, packet_cb_.num_calls());
- InsertAudio();
- }
- EXPECT_EQ(j + 1, packet_cb_.num_calls());
- EXPECT_EQ(expected_ts, packet_cb_.last_timestamp());
- expected_ts += codec_.pacsize;
- }
-
- // Change codec.
- ASSERT_EQ(0, AudioCodingModule::Codec("ISAC", &codec_, kSampleRateHz, 1));
- RegisterCodec();
- blocks_per_packet = codec_.pacsize / (kSampleRateHz / 100);
- // Encode another 5 packets.
- const int kNumPackets2 = 5;
- for (int j = 0; j < kNumPackets2; ++j) {
- for (int i = 0; i < blocks_per_packet; ++i) {
- EXPECT_EQ(kNumPackets1 + j, packet_cb_.num_calls());
- InsertAudio();
- }
- EXPECT_EQ(kNumPackets1 + j + 1, packet_cb_.num_calls());
- EXPECT_EQ(expected_ts, packet_cb_.last_timestamp());
- expected_ts += codec_.pacsize;
- }
-}
-
-// Introduce this class to set different expectations on the number of encoded
-// bytes. This class expects all encoded packets to be 9 bytes (matching one
-// CNG SID frame) or 0 bytes. This test depends on |input_frame_| containing
-// (near-)zero values. It also introduces a way to register comfort noise with
-// a custom payload type.
-class AudioCodingModuleTestWithComfortNoiseOldApi
- : public AudioCodingModuleTestOldApi {
- protected:
- void RegisterCngCodec(int rtp_payload_type) {
- CodecInst codec;
- AudioCodingModule::Codec("CN", &codec, kSampleRateHz, 1);
- codec.pltype = rtp_payload_type;
- ASSERT_EQ(0, acm_->RegisterReceiveCodec(codec));
- ASSERT_EQ(0, acm_->RegisterSendCodec(codec));
- }
-
- void VerifyEncoding() override {
- int last_length = packet_cb_.last_payload_len_bytes();
- EXPECT_TRUE(last_length == 9 || last_length == 0)
- << "Last encoded packet was " << last_length << " bytes.";
- }
-
- void DoTest(int blocks_per_packet, int cng_pt) {
- const int kLoops = 40;
- // This array defines the expected frame types, and when they should arrive.
- // We expect a frame to arrive each time the speech encoder would have
- // produced a packet, and once every 100 ms the frame should be non-empty,
- // that is contain comfort noise.
- const struct {
- int ix;
- FrameType type;
- } expectation[] = {{2, kAudioFrameCN},
- {5, kEmptyFrame},
- {8, kEmptyFrame},
- {11, kAudioFrameCN},
- {14, kEmptyFrame},
- {17, kEmptyFrame},
- {20, kAudioFrameCN},
- {23, kEmptyFrame},
- {26, kEmptyFrame},
- {29, kEmptyFrame},
- {32, kAudioFrameCN},
- {35, kEmptyFrame},
- {38, kEmptyFrame}};
- for (int i = 0; i < kLoops; ++i) {
- int num_calls_before = packet_cb_.num_calls();
- EXPECT_EQ(i / blocks_per_packet, num_calls_before);
- InsertAudioAndVerifyEncoding();
- int num_calls = packet_cb_.num_calls();
- if (num_calls == num_calls_before + 1) {
- EXPECT_EQ(expectation[num_calls - 1].ix, i);
- EXPECT_EQ(expectation[num_calls - 1].type, packet_cb_.last_frame_type())
- << "Wrong frame type for lap " << i;
- EXPECT_EQ(cng_pt, packet_cb_.last_payload_type());
- } else {
- EXPECT_EQ(num_calls, num_calls_before);
- }
- }
- }
-};
-
-// Checks that the transport callback is invoked once per frame period of the
-// underlying speech encoder, even when comfort noise is produced.
-// Also checks that the frame type is kAudioFrameCN or kEmptyFrame.
-// This test and the next check the same thing, but differ in the order of
-// speech codec and CNG registration.
-TEST_F(AudioCodingModuleTestWithComfortNoiseOldApi,
- TransportCallbackTestForComfortNoiseRegisterCngLast) {
- const int k10MsBlocksPerPacket = 3;
- codec_.pacsize = k10MsBlocksPerPacket * kSampleRateHz / 100;
- RegisterCodec();
- const int kCngPayloadType = 105;
- RegisterCngCodec(kCngPayloadType);
- ASSERT_EQ(0, acm_->SetVAD(true, true));
- DoTest(k10MsBlocksPerPacket, kCngPayloadType);
-}
-
-TEST_F(AudioCodingModuleTestWithComfortNoiseOldApi,
- TransportCallbackTestForComfortNoiseRegisterCngFirst) {
- const int k10MsBlocksPerPacket = 3;
- codec_.pacsize = k10MsBlocksPerPacket * kSampleRateHz / 100;
- const int kCngPayloadType = 105;
- RegisterCngCodec(kCngPayloadType);
- RegisterCodec();
- ASSERT_EQ(0, acm_->SetVAD(true, true));
- DoTest(k10MsBlocksPerPacket, kCngPayloadType);
-}
-
-// A multi-threaded test for ACM. This base class is using the PCM16b 16 kHz
-// codec, while the derive class AcmIsacMtTest is using iSAC.
-class AudioCodingModuleMtTestOldApi : public AudioCodingModuleTestOldApi {
- protected:
- static const int kNumPackets = 500;
- static const int kNumPullCalls = 500;
-
- AudioCodingModuleMtTestOldApi()
- : AudioCodingModuleTestOldApi(),
- send_thread_(ThreadWrapper::CreateThread(CbSendThread, this, "send")),
- insert_packet_thread_(ThreadWrapper::CreateThread(
- CbInsertPacketThread, this, "insert_packet")),
- pull_audio_thread_(ThreadWrapper::CreateThread(
- CbPullAudioThread, this, "pull_audio")),
- test_complete_(EventWrapper::Create()),
- send_count_(0),
- insert_packet_count_(0),
- pull_audio_count_(0),
- crit_sect_(CriticalSectionWrapper::CreateCriticalSection()),
- next_insert_packet_time_ms_(0),
- fake_clock_(new SimulatedClock(0)) {
- clock_ = fake_clock_.get();
- }
-
- void SetUp() {
- AudioCodingModuleTestOldApi::SetUp();
- RegisterCodec(); // Must be called before the threads start below.
- StartThreads();
- }
-
- void StartThreads() {
- ASSERT_TRUE(send_thread_->Start());
- send_thread_->SetPriority(kRealtimePriority);
- ASSERT_TRUE(insert_packet_thread_->Start());
- insert_packet_thread_->SetPriority(kRealtimePriority);
- ASSERT_TRUE(pull_audio_thread_->Start());
- pull_audio_thread_->SetPriority(kRealtimePriority);
- }
-
- void TearDown() {
- AudioCodingModuleTestOldApi::TearDown();
- pull_audio_thread_->Stop();
- send_thread_->Stop();
- insert_packet_thread_->Stop();
- }
-
- EventTypeWrapper RunTest() {
- return test_complete_->Wait(10 * 60 * 1000); // 10 minutes' timeout.
- }
-
- virtual bool TestDone() {
- if (packet_cb_.num_calls() > kNumPackets) {
- CriticalSectionScoped lock(crit_sect_.get());
- if (pull_audio_count_ > kNumPullCalls) {
- // Both conditions for completion are met. End the test.
- return true;
- }
- }
- return false;
- }
-
- static bool CbSendThread(void* context) {
- return reinterpret_cast<AudioCodingModuleMtTestOldApi*>(context)
- ->CbSendImpl();
- }
-
- // The send thread doesn't have to care about the current simulated time,
- // since only the AcmReceiver is using the clock.
- bool CbSendImpl() {
- SleepMs(1);
- if (HasFatalFailure()) {
- // End the test early if a fatal failure (ASSERT_*) has occurred.
- test_complete_->Set();
- }
- ++send_count_;
- InsertAudioAndVerifyEncoding();
- if (TestDone()) {
- test_complete_->Set();
- }
- return true;
- }
-
- static bool CbInsertPacketThread(void* context) {
- return reinterpret_cast<AudioCodingModuleMtTestOldApi*>(context)
- ->CbInsertPacketImpl();
- }
-
- bool CbInsertPacketImpl() {
- SleepMs(1);
- {
- CriticalSectionScoped lock(crit_sect_.get());
- if (clock_->TimeInMilliseconds() < next_insert_packet_time_ms_) {
- return true;
- }
- next_insert_packet_time_ms_ += 10;
- }
- // Now we're not holding the crit sect when calling ACM.
- ++insert_packet_count_;
- InsertPacket();
- return true;
- }
-
- static bool CbPullAudioThread(void* context) {
- return reinterpret_cast<AudioCodingModuleMtTestOldApi*>(context)
- ->CbPullAudioImpl();
- }
-
- bool CbPullAudioImpl() {
- SleepMs(1);
- {
- CriticalSectionScoped lock(crit_sect_.get());
- // Don't let the insert thread fall behind.
- if (next_insert_packet_time_ms_ < clock_->TimeInMilliseconds()) {
- return true;
- }
- ++pull_audio_count_;
- }
- // Now we're not holding the crit sect when calling ACM.
- PullAudio();
- fake_clock_->AdvanceTimeMilliseconds(10);
- return true;
- }
-
- rtc::scoped_ptr<ThreadWrapper> send_thread_;
- rtc::scoped_ptr<ThreadWrapper> insert_packet_thread_;
- rtc::scoped_ptr<ThreadWrapper> pull_audio_thread_;
- const rtc::scoped_ptr<EventWrapper> test_complete_;
- int send_count_;
- int insert_packet_count_;
- int pull_audio_count_ GUARDED_BY(crit_sect_);
- const rtc::scoped_ptr<CriticalSectionWrapper> crit_sect_;
- int64_t next_insert_packet_time_ms_ GUARDED_BY(crit_sect_);
- rtc::scoped_ptr<SimulatedClock> fake_clock_;
-};
-
-TEST_F(AudioCodingModuleMtTestOldApi, DISABLED_ON_IOS(DoTest)) {
- EXPECT_EQ(kEventSignaled, RunTest());
-}
-
-// This is a multi-threaded ACM test using iSAC. The test encodes audio
-// from a PCM file. The most recent encoded frame is used as input to the
-// receiving part. Depending on timing, it may happen that the same RTP packet
-// is inserted into the receiver multiple times, but this is a valid use-case,
-// and simplifies the test code a lot.
-class AcmIsacMtTestOldApi : public AudioCodingModuleMtTestOldApi {
- protected:
- static const int kNumPackets = 500;
- static const int kNumPullCalls = 500;
-
- AcmIsacMtTestOldApi()
- : AudioCodingModuleMtTestOldApi(), last_packet_number_(0) {}
-
- ~AcmIsacMtTestOldApi() {}
-
- void SetUp() {
- AudioCodingModuleTestOldApi::SetUp();
- RegisterCodec(); // Must be called before the threads start below.
-
- // Set up input audio source to read from specified file, loop after 5
- // seconds, and deliver blocks of 10 ms.
- const std::string input_file_name =
- webrtc::test::ResourcePath("audio_coding/speech_mono_16kHz", "pcm");
- audio_loop_.Init(input_file_name, 5 * kSampleRateHz, kNumSamples10ms);
-
- // Generate one packet to have something to insert.
- int loop_counter = 0;
- while (packet_cb_.last_payload_len_bytes() == 0) {
- InsertAudio();
- ASSERT_LT(loop_counter++, 10);
- }
- // Set |last_packet_number_| to one less that |num_calls| so that the packet
- // will be fetched in the next InsertPacket() call.
- last_packet_number_ = packet_cb_.num_calls() - 1;
-
- StartThreads();
- }
-
- void RegisterCodec() override {
- static_assert(kSampleRateHz == 16000, "test designed for iSAC 16 kHz");
- AudioCodingModule::Codec("ISAC", &codec_, kSampleRateHz, 1);
- codec_.pltype = kPayloadType;
-
- // Register iSAC codec in ACM, effectively unregistering the PCM16B codec
- // registered in AudioCodingModuleTestOldApi::SetUp();
- ASSERT_EQ(0, acm_->RegisterReceiveCodec(codec_));
- ASSERT_EQ(0, acm_->RegisterSendCodec(codec_));
- }
-
- void InsertPacket() {
- int num_calls = packet_cb_.num_calls(); // Store locally for thread safety.
- if (num_calls > last_packet_number_) {
- // Get the new payload out from the callback handler.
- // Note that since we swap buffers here instead of directly inserting
- // a pointer to the data in |packet_cb_|, we avoid locking the callback
- // for the duration of the IncomingPacket() call.
- packet_cb_.SwapBuffers(&last_payload_vec_);
- ASSERT_GT(last_payload_vec_.size(), 0u);
- rtp_utility_->Forward(&rtp_header_);
- last_packet_number_ = num_calls;
- }
- ASSERT_GT(last_payload_vec_.size(), 0u);
- ASSERT_EQ(
- 0,
- acm_->IncomingPacket(
- &last_payload_vec_[0], last_payload_vec_.size(), rtp_header_));
- }
-
- void InsertAudio() {
- memcpy(input_frame_.data_, audio_loop_.GetNextBlock(), kNumSamples10ms);
- AudioCodingModuleTestOldApi::InsertAudio();
- }
-
- // Override the verification function with no-op, since iSAC produces variable
- // payload sizes.
- void VerifyEncoding() override {}
-
- // This method is the same as AudioCodingModuleMtTestOldApi::TestDone(), but
- // here it is using the constants defined in this class (i.e., shorter test
- // run).
- virtual bool TestDone() {
- if (packet_cb_.num_calls() > kNumPackets) {
- CriticalSectionScoped lock(crit_sect_.get());
- if (pull_audio_count_ > kNumPullCalls) {
- // Both conditions for completion are met. End the test.
- return true;
- }
- }
- return false;
- }
-
- int last_packet_number_;
- std::vector<uint8_t> last_payload_vec_;
- test::AudioLoop audio_loop_;
-};
-
-TEST_F(AcmIsacMtTestOldApi, DISABLED_ON_IOS(IF_ISAC(DoTest))) {
- EXPECT_EQ(kEventSignaled, RunTest());
-}
-
-class AcmReRegisterIsacMtTestOldApi : public AudioCodingModuleTestOldApi {
- protected:
- static const int kRegisterAfterNumPackets = 5;
- static const int kNumPackets = 10;
- static const int kPacketSizeMs = 30;
- static const int kPacketSizeSamples = kPacketSizeMs * 16;
-
- AcmReRegisterIsacMtTestOldApi()
- : AudioCodingModuleTestOldApi(),
- receive_thread_(
- ThreadWrapper::CreateThread(CbReceiveThread, this, "receive")),
- codec_registration_thread_(
- ThreadWrapper::CreateThread(CbCodecRegistrationThread,
- this,
- "codec_registration")),
- test_complete_(EventWrapper::Create()),
- crit_sect_(CriticalSectionWrapper::CreateCriticalSection()),
- codec_registered_(false),
- receive_packet_count_(0),
- next_insert_packet_time_ms_(0),
- fake_clock_(new SimulatedClock(0)) {
- AudioEncoderIsac::Config config;
- config.payload_type = kPayloadType;
- isac_encoder_.reset(new AudioEncoderIsac(config));
- clock_ = fake_clock_.get();
- }
-
- void SetUp() {
- AudioCodingModuleTestOldApi::SetUp();
- // Set up input audio source to read from specified file, loop after 5
- // seconds, and deliver blocks of 10 ms.
- const std::string input_file_name =
- webrtc::test::ResourcePath("audio_coding/speech_mono_16kHz", "pcm");
- audio_loop_.Init(input_file_name, 5 * kSampleRateHz, kNumSamples10ms);
- RegisterCodec(); // Must be called before the threads start below.
- StartThreads();
- }
-
- void RegisterCodec() override {
- static_assert(kSampleRateHz == 16000, "test designed for iSAC 16 kHz");
- AudioCodingModule::Codec("ISAC", &codec_, kSampleRateHz, 1);
- codec_.pltype = kPayloadType;
-
- // Register iSAC codec in ACM, effectively unregistering the PCM16B codec
- // registered in AudioCodingModuleTestOldApi::SetUp();
- // Only register the decoder for now. The encoder is registered later.
- ASSERT_EQ(0, acm_->RegisterReceiveCodec(codec_));
- }
-
- void StartThreads() {
- ASSERT_TRUE(receive_thread_->Start());
- receive_thread_->SetPriority(kRealtimePriority);
- ASSERT_TRUE(codec_registration_thread_->Start());
- codec_registration_thread_->SetPriority(kRealtimePriority);
- }
-
- void TearDown() {
- AudioCodingModuleTestOldApi::TearDown();
- receive_thread_->Stop();
- codec_registration_thread_->Stop();
- }
-
- EventTypeWrapper RunTest() {
- return test_complete_->Wait(10 * 60 * 1000); // 10 minutes' timeout.
- }
-
- static bool CbReceiveThread(void* context) {
- return reinterpret_cast<AcmReRegisterIsacMtTestOldApi*>(context)
- ->CbReceiveImpl();
- }
-
- bool CbReceiveImpl() {
- SleepMs(1);
- const size_t max_encoded_bytes = isac_encoder_->MaxEncodedBytes();
- rtc::scoped_ptr<uint8_t[]> encoded(new uint8_t[max_encoded_bytes]);
- AudioEncoder::EncodedInfo info;
- {
- CriticalSectionScoped lock(crit_sect_.get());
- if (clock_->TimeInMilliseconds() < next_insert_packet_time_ms_) {
- return true;
- }
- next_insert_packet_time_ms_ += kPacketSizeMs;
- ++receive_packet_count_;
-
- // Encode new frame.
- uint32_t input_timestamp = rtp_header_.header.timestamp;
- while (info.encoded_bytes == 0) {
- info = isac_encoder_->Encode(
- input_timestamp, audio_loop_.GetNextBlock(), kNumSamples10ms,
- max_encoded_bytes, encoded.get());
- input_timestamp += 160; // 10 ms at 16 kHz.
- }
- EXPECT_EQ(rtp_header_.header.timestamp + kPacketSizeSamples,
- input_timestamp);
- EXPECT_EQ(rtp_header_.header.timestamp, info.encoded_timestamp);
- EXPECT_EQ(rtp_header_.header.payloadType, info.payload_type);
- }
- // Now we're not holding the crit sect when calling ACM.
-
- // Insert into ACM.
- EXPECT_EQ(0, acm_->IncomingPacket(encoded.get(), info.encoded_bytes,
- rtp_header_));
-
- // Pull audio.
- for (int i = 0; i < rtc::CheckedDivExact(kPacketSizeMs, 10); ++i) {
- AudioFrame audio_frame;
- EXPECT_EQ(0, acm_->PlayoutData10Ms(-1 /* default output frequency */,
- &audio_frame));
- fake_clock_->AdvanceTimeMilliseconds(10);
- }
- rtp_utility_->Forward(&rtp_header_);
- return true;
- }
-
- static bool CbCodecRegistrationThread(void* context) {
- return reinterpret_cast<AcmReRegisterIsacMtTestOldApi*>(context)
- ->CbCodecRegistrationImpl();
- }
-
- bool CbCodecRegistrationImpl() {
- SleepMs(1);
- if (HasFatalFailure()) {
- // End the test early if a fatal failure (ASSERT_*) has occurred.
- test_complete_->Set();
- }
- CriticalSectionScoped lock(crit_sect_.get());
- if (!codec_registered_ &&
- receive_packet_count_ > kRegisterAfterNumPackets) {
- // Register the iSAC encoder.
- EXPECT_EQ(0, acm_->RegisterSendCodec(codec_));
- codec_registered_ = true;
- }
- if (codec_registered_ && receive_packet_count_ > kNumPackets) {
- test_complete_->Set();
- }
- return true;
- }
-
- rtc::scoped_ptr<ThreadWrapper> receive_thread_;
- rtc::scoped_ptr<ThreadWrapper> codec_registration_thread_;
- const rtc::scoped_ptr<EventWrapper> test_complete_;
- const rtc::scoped_ptr<CriticalSectionWrapper> crit_sect_;
- bool codec_registered_ GUARDED_BY(crit_sect_);
- int receive_packet_count_ GUARDED_BY(crit_sect_);
- int64_t next_insert_packet_time_ms_ GUARDED_BY(crit_sect_);
- rtc::scoped_ptr<AudioEncoderIsac> isac_encoder_;
- rtc::scoped_ptr<SimulatedClock> fake_clock_;
- test::AudioLoop audio_loop_;
-};
-
-TEST_F(AcmReRegisterIsacMtTestOldApi, DISABLED_ON_IOS(IF_ISAC(DoTest))) {
- EXPECT_EQ(kEventSignaled, RunTest());
-}
-
-// Disabling all of these tests on iOS until file support has been added.
-// See https://code.google.com/p/webrtc/issues/detail?id=4752 for details.
-#if !defined(WEBRTC_IOS)
-
-class AcmReceiverBitExactnessOldApi : public ::testing::Test {
- public:
- static std::string PlatformChecksum(std::string win64,
- std::string android,
- std::string others) {
-#if defined(_WIN32) && defined(WEBRTC_ARCH_64_BITS)
- return win64;
-#elif defined(WEBRTC_ANDROID)
- return android;
-#else
- return others;
-#endif
- }
-
- protected:
- struct ExternalDecoder {
- int rtp_payload_type;
- AudioDecoder* external_decoder;
- int sample_rate_hz;
- int num_channels;
- };
-
- void Run(int output_freq_hz,
- const std::string& checksum_ref,
- const std::vector<ExternalDecoder>& external_decoders) {
- const std::string input_file_name =
- webrtc::test::ResourcePath("audio_coding/neteq_universal_new", "rtp");
- rtc::scoped_ptr<test::RtpFileSource> packet_source(
- test::RtpFileSource::Create(input_file_name));
-#ifdef WEBRTC_ANDROID
- // Filter out iLBC and iSAC-swb since they are not supported on Android.
- packet_source->FilterOutPayloadType(102); // iLBC.
- packet_source->FilterOutPayloadType(104); // iSAC-swb.
-#endif
-
- test::AudioChecksum checksum;
- const std::string output_file_name =
- webrtc::test::OutputPath() +
- ::testing::UnitTest::GetInstance()
- ->current_test_info()
- ->test_case_name() +
- "_" + ::testing::UnitTest::GetInstance()->current_test_info()->name() +
- "_output.pcm";
- test::OutputAudioFile output_file(output_file_name);
- test::AudioSinkFork output(&checksum, &output_file);
-
- test::AcmReceiveTestOldApi test(
- packet_source.get(),
- &output,
- output_freq_hz,
- test::AcmReceiveTestOldApi::kArbitraryChannels);
- ASSERT_NO_FATAL_FAILURE(test.RegisterNetEqTestCodecs());
- for (const auto& ed : external_decoders) {
- ASSERT_EQ(0, test.RegisterExternalReceiveCodec(
- ed.rtp_payload_type, ed.external_decoder,
- ed.sample_rate_hz, ed.num_channels));
- }
- test.Run();
-
- std::string checksum_string = checksum.Finish();
- EXPECT_EQ(checksum_ref, checksum_string);
- }
-};
-
-#if (defined(WEBRTC_CODEC_ISAC) || defined(WEBRTC_CODEC_ISAC)) && \
- defined(WEBRTC_CODEC_ILBC) && defined(WEBRTC_CODEC_G722)
-#define IF_ALL_CODECS(x) x
-#else
-#define IF_ALL_CODECS(x) DISABLED_##x
-#endif
-
-// Fails Android ARM64. https://code.google.com/p/webrtc/issues/detail?id=4199
-#if defined(WEBRTC_ANDROID) && defined(WEBRTC_ARCH_ARM64)
-#define MAYBE_8kHzOutput DISABLED_8kHzOutput
-#else
-#define MAYBE_8kHzOutput 8kHzOutput
-#endif
-TEST_F(AcmReceiverBitExactnessOldApi, IF_ALL_CODECS(MAYBE_8kHzOutput)) {
- Run(8000, PlatformChecksum("dcee98c623b147ebe1b40dd30efa896e",
- "adc92e173f908f93b96ba5844209815a",
- "908002dc01fc4eb1d2be24eb1d3f354b"),
- std::vector<ExternalDecoder>());
-}
-
-// Fails Android ARM64. https://code.google.com/p/webrtc/issues/detail?id=4199
-#if defined(WEBRTC_ANDROID) && defined(WEBRTC_ARCH_ARM64)
-#define MAYBE_16kHzOutput DISABLED_16kHzOutput
-#else
-#define MAYBE_16kHzOutput 16kHzOutput
-#endif
-TEST_F(AcmReceiverBitExactnessOldApi, IF_ALL_CODECS(MAYBE_16kHzOutput)) {
- Run(16000, PlatformChecksum("f790e7a8cce4e2c8b7bb5e0e4c5dac0d",
- "8cffa6abcb3e18e33b9d857666dff66a",
- "a909560b5ca49fa472b17b7b277195e9"),
- std::vector<ExternalDecoder>());
-}
-
-// Fails Android ARM64. https://code.google.com/p/webrtc/issues/detail?id=4199
-#if defined(WEBRTC_ANDROID) && defined(WEBRTC_ARCH_ARM64)
-#define MAYBE_32kHzOutput DISABLED_32kHzOutput
-#else
-#define MAYBE_32kHzOutput 32kHzOutput
-#endif
-TEST_F(AcmReceiverBitExactnessOldApi, IF_ALL_CODECS(MAYBE_32kHzOutput)) {
- Run(32000, PlatformChecksum("306e0d990ee6e92de3fbecc0123ece37",
- "3e126fe894720c3f85edadcc91964ba5",
- "441aab4b347fb3db4e9244337aca8d8e"),
- std::vector<ExternalDecoder>());
-}
-
-// Fails Android ARM64. https://code.google.com/p/webrtc/issues/detail?id=4199
-#if defined(WEBRTC_ANDROID) && defined(WEBRTC_ARCH_ARM64)
-#define MAYBE_48kHzOutput DISABLED_48kHzOutput
-#else
-#define MAYBE_48kHzOutput 48kHzOutput
-#endif
-TEST_F(AcmReceiverBitExactnessOldApi, IF_ALL_CODECS(MAYBE_48kHzOutput)) {
- Run(48000, PlatformChecksum("aa7c232f63a67b2a72703593bdd172e0",
- "0155665e93067c4e89256b944dd11999",
- "4ee2730fa1daae755e8a8fd3abd779ec"),
- std::vector<ExternalDecoder>());
-}
-
-// Fails Android ARM64. https://code.google.com/p/webrtc/issues/detail?id=4199
-#if defined(WEBRTC_ANDROID) && defined(__aarch64__)
-#define MAYBE_48kHzOutputExternalDecoder DISABLED_48kHzOutputExternalDecoder
-#else
-#define MAYBE_48kHzOutputExternalDecoder 48kHzOutputExternalDecoder
-#endif
-TEST_F(AcmReceiverBitExactnessOldApi,
- IF_ALL_CODECS(MAYBE_48kHzOutputExternalDecoder)) {
- AudioDecoderPcmU decoder(1);
- MockAudioDecoder mock_decoder;
- // Set expectations on the mock decoder and also delegate the calls to the
- // real decoder.
- EXPECT_CALL(mock_decoder, IncomingPacket(_, _, _, _, _))
- .Times(AtLeast(1))
- .WillRepeatedly(Invoke(&decoder, &AudioDecoderPcmU::IncomingPacket));
- EXPECT_CALL(mock_decoder, Channels())
- .Times(AtLeast(1))
- .WillRepeatedly(Invoke(&decoder, &AudioDecoderPcmU::Channels));
- EXPECT_CALL(mock_decoder, Decode(_, _, _, _, _, _))
- .Times(AtLeast(1))
- .WillRepeatedly(Invoke(&decoder, &AudioDecoderPcmU::Decode));
- EXPECT_CALL(mock_decoder, HasDecodePlc())
- .Times(AtLeast(1))
- .WillRepeatedly(Invoke(&decoder, &AudioDecoderPcmU::HasDecodePlc));
- EXPECT_CALL(mock_decoder, PacketDuration(_, _))
- .Times(AtLeast(1))
- .WillRepeatedly(Invoke(&decoder, &AudioDecoderPcmU::PacketDuration));
- ExternalDecoder ed;
- ed.rtp_payload_type = 0;
- ed.external_decoder = &mock_decoder;
- ed.sample_rate_hz = 8000;
- ed.num_channels = 1;
- std::vector<ExternalDecoder> external_decoders;
- external_decoders.push_back(ed);
-
- Run(48000, PlatformChecksum("aa7c232f63a67b2a72703593bdd172e0",
- "0155665e93067c4e89256b944dd11999",
- "4ee2730fa1daae755e8a8fd3abd779ec"),
- external_decoders);
-
- EXPECT_CALL(mock_decoder, Die());
-}
-
-// This test verifies bit exactness for the send-side of ACM. The test setup is
-// a chain of three different test classes:
-//
-// test::AcmSendTest -> AcmSenderBitExactness -> test::AcmReceiveTest
-//
-// The receiver side is driving the test by requesting new packets from
-// AcmSenderBitExactness::NextPacket(). This method, in turn, asks for the
-// packet from test::AcmSendTest::NextPacket, which inserts audio from the
-// input file until one packet is produced. (The input file loops indefinitely.)
-// Before passing the packet to the receiver, this test class verifies the
-// packet header and updates a payload checksum with the new payload. The
-// decoded output from the receiver is also verified with a (separate) checksum.
-class AcmSenderBitExactnessOldApi : public ::testing::Test,
- public test::PacketSource {
- protected:
- static const int kTestDurationMs = 1000;
-
- AcmSenderBitExactnessOldApi()
- : frame_size_rtp_timestamps_(0),
- packet_count_(0),
- payload_type_(0),
- last_sequence_number_(0),
- last_timestamp_(0) {}
-
- // Sets up the test::AcmSendTest object. Returns true on success, otherwise
- // false.
- bool SetUpSender() {
- const std::string input_file_name =
- webrtc::test::ResourcePath("audio_coding/testfile32kHz", "pcm");
- // Note that |audio_source_| will loop forever. The test duration is set
- // explicitly by |kTestDurationMs|.
- audio_source_.reset(new test::InputAudioFile(input_file_name));
- static const int kSourceRateHz = 32000;
- send_test_.reset(new test::AcmSendTestOldApi(
- audio_source_.get(), kSourceRateHz, kTestDurationMs));
- return send_test_.get() != NULL;
- }
-
- // Registers a send codec in the test::AcmSendTest object. Returns true on
- // success, false on failure.
- bool RegisterSendCodec(const char* payload_name,
- int sampling_freq_hz,
- int channels,
- int payload_type,
- int frame_size_samples,
- int frame_size_rtp_timestamps) {
- payload_type_ = payload_type;
- frame_size_rtp_timestamps_ = frame_size_rtp_timestamps;
- return send_test_->RegisterCodec(payload_name,
- sampling_freq_hz,
- channels,
- payload_type,
- frame_size_samples);
- }
-
- bool RegisterExternalSendCodec(AudioEncoder* external_speech_encoder,
- int payload_type) {
- payload_type_ = payload_type;
- frame_size_rtp_timestamps_ =
- external_speech_encoder->Num10MsFramesInNextPacket() *
- external_speech_encoder->RtpTimestampRateHz() / 100;
- return send_test_->RegisterExternalCodec(external_speech_encoder);
- }
-
- // Runs the test. SetUpSender() and RegisterSendCodec() must have been called
- // before calling this method.
- void Run(const std::string& audio_checksum_ref,
- const std::string& payload_checksum_ref,
- int expected_packets,
- test::AcmReceiveTestOldApi::NumOutputChannels expected_channels) {
- // Set up the receiver used to decode the packets and verify the decoded
- // output.
- test::AudioChecksum audio_checksum;
- const std::string output_file_name =
- webrtc::test::OutputPath() +
- ::testing::UnitTest::GetInstance()
- ->current_test_info()
- ->test_case_name() +
- "_" + ::testing::UnitTest::GetInstance()->current_test_info()->name() +
- "_output.pcm";
- test::OutputAudioFile output_file(output_file_name);
- // Have the output audio sent both to file and to the checksum calculator.
- test::AudioSinkFork output(&audio_checksum, &output_file);
- const int kOutputFreqHz = 8000;
- test::AcmReceiveTestOldApi receive_test(
- this, &output, kOutputFreqHz, expected_channels);
- ASSERT_NO_FATAL_FAILURE(receive_test.RegisterDefaultCodecs());
-
- // This is where the actual test is executed.
- receive_test.Run();
-
- // Extract and verify the audio checksum.
- std::string checksum_string = audio_checksum.Finish();
- EXPECT_EQ(audio_checksum_ref, checksum_string);
-
- // Extract and verify the payload checksum.
- char checksum_result[rtc::Md5Digest::kSize];
- payload_checksum_.Finish(checksum_result, rtc::Md5Digest::kSize);
- checksum_string = rtc::hex_encode(checksum_result, rtc::Md5Digest::kSize);
- EXPECT_EQ(payload_checksum_ref, checksum_string);
-
- // Verify number of packets produced.
- EXPECT_EQ(expected_packets, packet_count_);
- }
-
- // Returns a pointer to the next packet. Returns NULL if the source is
- // depleted (i.e., the test duration is exceeded), or if an error occurred.
- // Inherited from test::PacketSource.
- test::Packet* NextPacket() override {
- // Get the next packet from AcmSendTest. Ownership of |packet| is
- // transferred to this method.
- test::Packet* packet = send_test_->NextPacket();
- if (!packet)
- return NULL;
-
- VerifyPacket(packet);
- // TODO(henrik.lundin) Save the packet to file as well.
-
- // Pass it on to the caller. The caller becomes the owner of |packet|.
- return packet;
- }
-
- // Verifies the packet.
- void VerifyPacket(const test::Packet* packet) {
- EXPECT_TRUE(packet->valid_header());
- // (We can check the header fields even if valid_header() is false.)
- EXPECT_EQ(payload_type_, packet->header().payloadType);
- if (packet_count_ > 0) {
- // This is not the first packet.
- uint16_t sequence_number_diff =
- packet->header().sequenceNumber - last_sequence_number_;
- EXPECT_EQ(1, sequence_number_diff);
- uint32_t timestamp_diff = packet->header().timestamp - last_timestamp_;
- EXPECT_EQ(frame_size_rtp_timestamps_, timestamp_diff);
- }
- ++packet_count_;
- last_sequence_number_ = packet->header().sequenceNumber;
- last_timestamp_ = packet->header().timestamp;
- // Update the checksum.
- payload_checksum_.Update(packet->payload(), packet->payload_length_bytes());
- }
-
- void SetUpTest(const char* codec_name,
- int codec_sample_rate_hz,
- int channels,
- int payload_type,
- int codec_frame_size_samples,
- int codec_frame_size_rtp_timestamps) {
- ASSERT_TRUE(SetUpSender());
- ASSERT_TRUE(RegisterSendCodec(codec_name,
- codec_sample_rate_hz,
- channels,
- payload_type,
- codec_frame_size_samples,
- codec_frame_size_rtp_timestamps));
- }
-
- void SetUpTestExternalEncoder(AudioEncoder* external_speech_encoder,
- int payload_type) {
- ASSERT_TRUE(SetUpSender());
- ASSERT_TRUE(
- RegisterExternalSendCodec(external_speech_encoder, payload_type));
- }
-
- rtc::scoped_ptr<test::AcmSendTestOldApi> send_test_;
- rtc::scoped_ptr<test::InputAudioFile> audio_source_;
- uint32_t frame_size_rtp_timestamps_;
- int packet_count_;
- uint8_t payload_type_;
- uint16_t last_sequence_number_;
- uint32_t last_timestamp_;
- rtc::Md5Digest payload_checksum_;
-};
-
-// Fails Android ARM64. https://code.google.com/p/webrtc/issues/detail?id=4199
-#if defined(WEBRTC_ANDROID) && defined(WEBRTC_ARCH_ARM64)
-#define MAYBE_IsacWb30ms DISABLED_IsacWb30ms
-#else
-#define MAYBE_IsacWb30ms IsacWb30ms
-#endif
-TEST_F(AcmSenderBitExactnessOldApi, IF_ISAC(MAYBE_IsacWb30ms)) {
- ASSERT_NO_FATAL_FAILURE(SetUpTest("ISAC", 16000, 1, 103, 480, 480));
- Run(AcmReceiverBitExactnessOldApi::PlatformChecksum(
- "c7e5bdadfa2871df95639fcc297cf23d",
- "0499ca260390769b3172136faad925b9",
- "0b58f9eeee43d5891f5f6c75e77984a3"),
- AcmReceiverBitExactnessOldApi::PlatformChecksum(
- "d42cb5195463da26c8129bbfe73a22e6",
- "83de248aea9c3c2bd680b6952401b4ca",
- "3c79f16f34218271f3dca4e2b1dfe1bb"),
- 33,
- test::AcmReceiveTestOldApi::kMonoOutput);
-}
-
-// Fails Android ARM64. https://code.google.com/p/webrtc/issues/detail?id=4199
-#if defined(WEBRTC_ANDROID) && defined(WEBRTC_ARCH_ARM64)
-#define MAYBE_IsacWb60ms DISABLED_IsacWb60ms
-#else
-#define MAYBE_IsacWb60ms IsacWb60ms
-#endif
-TEST_F(AcmSenderBitExactnessOldApi, IF_ISAC(MAYBE_IsacWb60ms)) {
- ASSERT_NO_FATAL_FAILURE(SetUpTest("ISAC", 16000, 1, 103, 960, 960));
- Run(AcmReceiverBitExactnessOldApi::PlatformChecksum(
- "14d63c5f08127d280e722e3191b73bdd",
- "8da003e16c5371af2dc2be79a50f9076",
- "1ad29139a04782a33daad8c2b9b35875"),
- AcmReceiverBitExactnessOldApi::PlatformChecksum(
- "ebe04a819d3a9d83a83a17f271e1139a",
- "97aeef98553b5a4b5a68f8b716e8eaf0",
- "9e0a0ab743ad987b55b8e14802769c56"),
- 16,
- test::AcmReceiveTestOldApi::kMonoOutput);
-}
-
-#ifdef WEBRTC_CODEC_ISAC
-#define IF_ISAC_FLOAT(x) x
-#else
-#define IF_ISAC_FLOAT(x) DISABLED_##x
-#endif
-
-TEST_F(AcmSenderBitExactnessOldApi,
- DISABLED_ON_ANDROID(IF_ISAC_FLOAT(IsacSwb30ms))) {
- ASSERT_NO_FATAL_FAILURE(SetUpTest("ISAC", 32000, 1, 104, 960, 960));
- Run(AcmReceiverBitExactnessOldApi::PlatformChecksum(
- "2b3c387d06f00b7b7aad4c9be56fb83d",
- "",
- "5683b58da0fbf2063c7adc2e6bfb3fb8"),
- AcmReceiverBitExactnessOldApi::PlatformChecksum(
- "bcc2041e7744c7ebd9f701866856849c",
- "",
- "ce86106a93419aefb063097108ec94ab"),
- 33, test::AcmReceiveTestOldApi::kMonoOutput);
-}
-
-TEST_F(AcmSenderBitExactnessOldApi, Pcm16_8000khz_10ms) {
- ASSERT_NO_FATAL_FAILURE(SetUpTest("L16", 8000, 1, 107, 80, 80));
- Run("de4a98e1406f8b798d99cd0704e862e2",
- "c1edd36339ce0326cc4550041ad719a0",
- 100,
- test::AcmReceiveTestOldApi::kMonoOutput);
-}
-
-TEST_F(AcmSenderBitExactnessOldApi, Pcm16_16000khz_10ms) {
- ASSERT_NO_FATAL_FAILURE(SetUpTest("L16", 16000, 1, 108, 160, 160));
- Run("ae646d7b68384a1269cc080dd4501916",
- "ad786526383178b08d80d6eee06e9bad",
- 100,
- test::AcmReceiveTestOldApi::kMonoOutput);
-}
-
-TEST_F(AcmSenderBitExactnessOldApi, Pcm16_32000khz_10ms) {
- ASSERT_NO_FATAL_FAILURE(SetUpTest("L16", 32000, 1, 109, 320, 320));
- Run("7fe325e8fbaf755e3c5df0b11a4774fb",
- "5ef82ea885e922263606c6fdbc49f651",
- 100,
- test::AcmReceiveTestOldApi::kMonoOutput);
-}
-
-TEST_F(AcmSenderBitExactnessOldApi, Pcm16_stereo_8000khz_10ms) {
- ASSERT_NO_FATAL_FAILURE(SetUpTest("L16", 8000, 2, 111, 80, 80));
- Run("fb263b74e7ac3de915474d77e4744ceb",
- "62ce5adb0d4965d0a52ec98ae7f98974",
- 100,
- test::AcmReceiveTestOldApi::kStereoOutput);
-}
-
-TEST_F(AcmSenderBitExactnessOldApi, Pcm16_stereo_16000khz_10ms) {
- ASSERT_NO_FATAL_FAILURE(SetUpTest("L16", 16000, 2, 112, 160, 160));
- Run("d09e9239553649d7ac93e19d304281fd",
- "41ca8edac4b8c71cd54fd9f25ec14870",
- 100,
- test::AcmReceiveTestOldApi::kStereoOutput);
-}
-
-TEST_F(AcmSenderBitExactnessOldApi, Pcm16_stereo_32000khz_10ms) {
- ASSERT_NO_FATAL_FAILURE(SetUpTest("L16", 32000, 2, 113, 320, 320));
- Run("5f025d4f390982cc26b3d92fe02e3044",
- "50e58502fb04421bf5b857dda4c96879",
- 100,
- test::AcmReceiveTestOldApi::kStereoOutput);
-}
-
-TEST_F(AcmSenderBitExactnessOldApi, Pcmu_20ms) {
- ASSERT_NO_FATAL_FAILURE(SetUpTest("PCMU", 8000, 1, 0, 160, 160));
- Run("81a9d4c0bb72e9becc43aef124c981e9",
- "8f9b8750bd80fe26b6cbf6659b89f0f9",
- 50,
- test::AcmReceiveTestOldApi::kMonoOutput);
-}
-
-TEST_F(AcmSenderBitExactnessOldApi, Pcma_20ms) {
- ASSERT_NO_FATAL_FAILURE(SetUpTest("PCMA", 8000, 1, 8, 160, 160));
- Run("39611f798969053925a49dc06d08de29",
- "6ad745e55aa48981bfc790d0eeef2dd1",
- 50,
- test::AcmReceiveTestOldApi::kMonoOutput);
-}
-
-TEST_F(AcmSenderBitExactnessOldApi, Pcmu_stereo_20ms) {
- ASSERT_NO_FATAL_FAILURE(SetUpTest("PCMU", 8000, 2, 110, 160, 160));
- Run("437bec032fdc5cbaa0d5175430af7b18",
- "60b6f25e8d1e74cb679cfe756dd9bca5",
- 50,
- test::AcmReceiveTestOldApi::kStereoOutput);
-}
-
-TEST_F(AcmSenderBitExactnessOldApi, Pcma_stereo_20ms) {
- ASSERT_NO_FATAL_FAILURE(SetUpTest("PCMA", 8000, 2, 118, 160, 160));
- Run("a5c6d83c5b7cedbeff734238220a4b0c",
- "92b282c83efd20e7eeef52ba40842cf7",
- 50,
- test::AcmReceiveTestOldApi::kStereoOutput);
-}
-
-#ifdef WEBRTC_CODEC_ILBC
-#define IF_ILBC(x) x
-#else
-#define IF_ILBC(x) DISABLED_##x
-#endif
-
-TEST_F(AcmSenderBitExactnessOldApi, DISABLED_ON_ANDROID(IF_ILBC(Ilbc_30ms))) {
- ASSERT_NO_FATAL_FAILURE(SetUpTest("ILBC", 8000, 1, 102, 240, 240));
- Run(AcmReceiverBitExactnessOldApi::PlatformChecksum(
- "7b6ec10910debd9af08011d3ed5249f7",
- "android_audio",
- "7b6ec10910debd9af08011d3ed5249f7"),
- AcmReceiverBitExactnessOldApi::PlatformChecksum(
- "cfae2e9f6aba96e145f2bcdd5050ce78",
- "android_payload",
- "cfae2e9f6aba96e145f2bcdd5050ce78"),
- 33,
- test::AcmReceiveTestOldApi::kMonoOutput);
-}
-
-#ifdef WEBRTC_CODEC_G722
-#define IF_G722(x) x
-#else
-#define IF_G722(x) DISABLED_##x
-#endif
-
-TEST_F(AcmSenderBitExactnessOldApi, DISABLED_ON_ANDROID(IF_G722(G722_20ms))) {
- ASSERT_NO_FATAL_FAILURE(SetUpTest("G722", 16000, 1, 9, 320, 160));
- Run(AcmReceiverBitExactnessOldApi::PlatformChecksum(
- "7d759436f2533582950d148b5161a36c",
- "android_audio",
- "7d759436f2533582950d148b5161a36c"),
- AcmReceiverBitExactnessOldApi::PlatformChecksum(
- "fc68a87e1380614e658087cb35d5ca10",
- "android_payload",
- "fc68a87e1380614e658087cb35d5ca10"),
- 50,
- test::AcmReceiveTestOldApi::kMonoOutput);
-}
-
-TEST_F(AcmSenderBitExactnessOldApi,
- DISABLED_ON_ANDROID(IF_G722(G722_stereo_20ms))) {
- ASSERT_NO_FATAL_FAILURE(SetUpTest("G722", 16000, 2, 119, 320, 160));
- Run(AcmReceiverBitExactnessOldApi::PlatformChecksum(
- "7190ee718ab3d80eca181e5f7140c210",
- "android_audio",
- "7190ee718ab3d80eca181e5f7140c210"),
- AcmReceiverBitExactnessOldApi::PlatformChecksum(
- "66516152eeaa1e650ad94ff85f668dac",
- "android_payload",
- "66516152eeaa1e650ad94ff85f668dac"),
- 50,
- test::AcmReceiveTestOldApi::kStereoOutput);
-}
-
-// Fails Android ARM64. https://code.google.com/p/webrtc/issues/detail?id=4199
-#if defined(WEBRTC_ANDROID) && defined(WEBRTC_ARCH_ARM64)
-#define MAYBE_Opus_stereo_20ms DISABLED_Opus_stereo_20ms
-#else
-#define MAYBE_Opus_stereo_20ms Opus_stereo_20ms
-#endif
-TEST_F(AcmSenderBitExactnessOldApi, MAYBE_Opus_stereo_20ms) {
- ASSERT_NO_FATAL_FAILURE(SetUpTest("opus", 48000, 2, 120, 960, 960));
- Run(AcmReceiverBitExactnessOldApi::PlatformChecksum(
- "855041f2490b887302bce9d544731849",
- "1e1a0fce893fef2d66886a7f09e2ebce",
- "855041f2490b887302bce9d544731849"),
- AcmReceiverBitExactnessOldApi::PlatformChecksum(
- "d781cce1ab986b618d0da87226cdde30",
- "1a1fe04dd12e755949987c8d729fb3e0",
- "d781cce1ab986b618d0da87226cdde30"),
- 50,
- test::AcmReceiveTestOldApi::kStereoOutput);
-}
-
-// Fails Android ARM64. https://code.google.com/p/webrtc/issues/detail?id=4199
-#if defined(WEBRTC_ANDROID) && defined(WEBRTC_ARCH_ARM64)
-#define MAYBE_Opus_stereo_20ms_voip DISABLED_Opus_stereo_20ms_voip
-#else
-#define MAYBE_Opus_stereo_20ms_voip Opus_stereo_20ms_voip
-#endif
-TEST_F(AcmSenderBitExactnessOldApi, MAYBE_Opus_stereo_20ms_voip) {
- ASSERT_NO_FATAL_FAILURE(SetUpTest("opus", 48000, 2, 120, 960, 960));
- // If not set, default will be kAudio in case of stereo.
- EXPECT_EQ(0, send_test_->acm()->SetOpusApplication(kVoip));
- Run(AcmReceiverBitExactnessOldApi::PlatformChecksum(
- "9b9e12bc3cc793740966e11cbfa8b35b",
- "57412a4b5771d19ff03ec35deffe7067",
- "9b9e12bc3cc793740966e11cbfa8b35b"),
- AcmReceiverBitExactnessOldApi::PlatformChecksum(
- "c7340b1189652ab6b5e80dade7390cb4",
- "cdfe85939c411d12b61701c566e22d26",
- "c7340b1189652ab6b5e80dade7390cb4"),
- 50,
- test::AcmReceiveTestOldApi::kStereoOutput);
-}
-
-// This test is for verifying the SetBitRate function. The bitrate is changed at
-// the beginning, and the number of generated bytes are checked.
-class AcmSetBitRateOldApi : public ::testing::Test {
- protected:
- static const int kTestDurationMs = 1000;
-
- // Sets up the test::AcmSendTest object. Returns true on success, otherwise
- // false.
- bool SetUpSender() {
- const std::string input_file_name =
- webrtc::test::ResourcePath("audio_coding/testfile32kHz", "pcm");
- // Note that |audio_source_| will loop forever. The test duration is set
- // explicitly by |kTestDurationMs|.
- audio_source_.reset(new test::InputAudioFile(input_file_name));
- static const int kSourceRateHz = 32000;
- send_test_.reset(new test::AcmSendTestOldApi(
- audio_source_.get(), kSourceRateHz, kTestDurationMs));
- return send_test_.get();
- }
-
- // Registers a send codec in the test::AcmSendTest object. Returns true on
- // success, false on failure.
- virtual bool RegisterSendCodec(const char* payload_name,
- int sampling_freq_hz,
- int channels,
- int payload_type,
- int frame_size_samples,
- int frame_size_rtp_timestamps) {
- return send_test_->RegisterCodec(payload_name, sampling_freq_hz, channels,
- payload_type, frame_size_samples);
- }
-
- // Runs the test. SetUpSender() and RegisterSendCodec() must have been called
- // before calling this method.
- void Run(int target_bitrate_bps, int expected_total_bits) {
- ASSERT_TRUE(send_test_->acm());
- send_test_->acm()->SetBitRate(target_bitrate_bps);
- int nr_bytes = 0;
- while (test::Packet* next_packet = send_test_->NextPacket()) {
- nr_bytes += next_packet->payload_length_bytes();
- delete next_packet;
- }
- EXPECT_EQ(expected_total_bits, nr_bytes * 8);
- }
-
- void SetUpTest(const char* codec_name,
- int codec_sample_rate_hz,
- int channels,
- int payload_type,
- int codec_frame_size_samples,
- int codec_frame_size_rtp_timestamps) {
- ASSERT_TRUE(SetUpSender());
- ASSERT_TRUE(RegisterSendCodec(codec_name, codec_sample_rate_hz, channels,
- payload_type, codec_frame_size_samples,
- codec_frame_size_rtp_timestamps));
- }
-
- rtc::scoped_ptr<test::AcmSendTestOldApi> send_test_;
- rtc::scoped_ptr<test::InputAudioFile> audio_source_;
-};
-
-TEST_F(AcmSetBitRateOldApi, Opus_48khz_20ms_10kbps) {
- ASSERT_NO_FATAL_FAILURE(SetUpTest("opus", 48000, 1, 107, 960, 960));
-#if defined(WEBRTC_ANDROID)
- Run(10000, 9328);
-#else
- Run(10000, 9072);
-#endif // WEBRTC_ANDROID
-
-}
-
-TEST_F(AcmSetBitRateOldApi, Opus_48khz_20ms_50kbps) {
- ASSERT_NO_FATAL_FAILURE(SetUpTest("opus", 48000, 1, 107, 960, 960));
-#if defined(WEBRTC_ANDROID)
- Run(50000, 47952);
-#else
- Run(50000, 49600);
-#endif // WEBRTC_ANDROID
-}
-
-// The result on the Android platforms is inconsistent for this test case.
-// On android_rel the result is different from android and android arm64 rel.
-TEST_F(AcmSetBitRateOldApi, DISABLED_ON_ANDROID(Opus_48khz_20ms_100kbps)) {
- ASSERT_NO_FATAL_FAILURE(SetUpTest("opus", 48000, 1, 107, 960, 960));
- Run(100000, 100888);
-}
-
-// These next 2 tests ensure that the SetBitRate function has no effect on PCM
-TEST_F(AcmSetBitRateOldApi, Pcm16_8khz_10ms_8kbps) {
- ASSERT_NO_FATAL_FAILURE(SetUpTest("L16", 8000, 1, 107, 80, 80));
- Run(8000, 128000);
-}
-
-TEST_F(AcmSetBitRateOldApi, Pcm16_8khz_10ms_32kbps) {
- ASSERT_NO_FATAL_FAILURE(SetUpTest("L16", 8000, 1, 107, 80, 80));
- Run(32000, 128000);
-}
-
-// This test is for verifying the SetBitRate function. The bitrate is changed
-// in the middle, and the number of generated bytes are before and after the
-// change are checked.
-class AcmChangeBitRateOldApi : public AcmSetBitRateOldApi {
- protected:
- AcmChangeBitRateOldApi() : sampling_freq_hz_(0), frame_size_samples_(0) {}
-
- // Registers a send codec in the test::AcmSendTest object. Returns true on
- // success, false on failure.
- bool RegisterSendCodec(const char* payload_name,
- int sampling_freq_hz,
- int channels,
- int payload_type,
- int frame_size_samples,
- int frame_size_rtp_timestamps) override {
- frame_size_samples_ = frame_size_samples;
- sampling_freq_hz_ = sampling_freq_hz;
- return AcmSetBitRateOldApi::RegisterSendCodec(
- payload_name, sampling_freq_hz, channels, payload_type,
- frame_size_samples, frame_size_rtp_timestamps);
- }
-
- // Runs the test. SetUpSender() and RegisterSendCodec() must have been called
- // before calling this method.
- void Run(int target_bitrate_bps,
- int expected_before_switch_bits,
- int expected_after_switch_bits) {
- ASSERT_TRUE(send_test_->acm());
- int nr_packets =
- sampling_freq_hz_ * kTestDurationMs / (frame_size_samples_ * 1000);
- int nr_bytes_before = 0, nr_bytes_after = 0;
- int packet_counter = 0;
- while (test::Packet* next_packet = send_test_->NextPacket()) {
- if (packet_counter == nr_packets / 2)
- send_test_->acm()->SetBitRate(target_bitrate_bps);
- if (packet_counter < nr_packets / 2)
- nr_bytes_before += next_packet->payload_length_bytes();
- else
- nr_bytes_after += next_packet->payload_length_bytes();
- packet_counter++;
- delete next_packet;
- }
- EXPECT_EQ(expected_before_switch_bits, nr_bytes_before * 8);
- EXPECT_EQ(expected_after_switch_bits, nr_bytes_after * 8);
- }
-
- uint32_t sampling_freq_hz_;
- uint32_t frame_size_samples_;
-};
-
-TEST_F(AcmChangeBitRateOldApi, Opus_48khz_20ms_10kbps) {
- ASSERT_NO_FATAL_FAILURE(SetUpTest("opus", 48000, 1, 107, 960, 960));
-#if defined(WEBRTC_ANDROID)
- Run(10000, 32200, 5496);
-#else
- Run(10000, 32200, 5432);
-#endif // WEBRTC_ANDROID
-}
-
-TEST_F(AcmChangeBitRateOldApi, Opus_48khz_20ms_50kbps) {
- ASSERT_NO_FATAL_FAILURE(SetUpTest("opus", 48000, 1, 107, 960, 960));
-#if defined(WEBRTC_ANDROID)
- Run(50000, 32200, 24912);
-#else
- Run(50000, 32200, 24792);
-#endif // WEBRTC_ANDROID
-}
-
-TEST_F(AcmChangeBitRateOldApi, Opus_48khz_20ms_100kbps) {
- ASSERT_NO_FATAL_FAILURE(SetUpTest("opus", 48000, 1, 107, 960, 960));
-#if defined(WEBRTC_ANDROID)
- Run(100000, 32200, 51480);
-#else
- Run(100000, 32200, 50584);
-#endif // WEBRTC_ANDROID
-}
-
-// These next 2 tests ensure that the SetBitRate function has no effect on PCM
-TEST_F(AcmChangeBitRateOldApi, Pcm16_8khz_10ms_8kbps) {
- ASSERT_NO_FATAL_FAILURE(SetUpTest("L16", 8000, 1, 107, 80, 80));
- Run(8000, 64000, 64000);
-}
-
-TEST_F(AcmChangeBitRateOldApi, Pcm16_8khz_10ms_32kbps) {
- ASSERT_NO_FATAL_FAILURE(SetUpTest("L16", 8000, 1, 107, 80, 80));
- Run(32000, 64000, 64000);
-}
-
-TEST_F(AcmSenderBitExactnessOldApi, External_Pcmu_20ms) {
- CodecInst codec_inst;
- codec_inst.channels = 1;
- codec_inst.pacsize = 160;
- codec_inst.pltype = 0;
- AudioEncoderPcmU encoder(codec_inst);
- MockAudioEncoder mock_encoder;
- // Set expectations on the mock encoder and also delegate the calls to the
- // real encoder.
- EXPECT_CALL(mock_encoder, MaxEncodedBytes())
- .Times(AtLeast(1))
- .WillRepeatedly(Invoke(&encoder, &AudioEncoderPcmU::MaxEncodedBytes));
- EXPECT_CALL(mock_encoder, SampleRateHz())
- .Times(AtLeast(1))
- .WillRepeatedly(Invoke(&encoder, &AudioEncoderPcmU::SampleRateHz));
- EXPECT_CALL(mock_encoder, NumChannels())
- .Times(AtLeast(1))
- .WillRepeatedly(Invoke(&encoder, &AudioEncoderPcmU::NumChannels));
- EXPECT_CALL(mock_encoder, RtpTimestampRateHz())
- .Times(AtLeast(1))
- .WillRepeatedly(Invoke(&encoder, &AudioEncoderPcmU::RtpTimestampRateHz));
- EXPECT_CALL(mock_encoder, Num10MsFramesInNextPacket())
- .Times(AtLeast(1))
- .WillRepeatedly(
- Invoke(&encoder, &AudioEncoderPcmU::Num10MsFramesInNextPacket));
- EXPECT_CALL(mock_encoder, Max10MsFramesInAPacket())
- .Times(AtLeast(1))
- .WillRepeatedly(
- Invoke(&encoder, &AudioEncoderPcmU::Max10MsFramesInAPacket));
- EXPECT_CALL(mock_encoder, GetTargetBitrate())
- .Times(AtLeast(1))
- .WillRepeatedly(Invoke(&encoder, &AudioEncoderPcmU::GetTargetBitrate));
- EXPECT_CALL(mock_encoder, EncodeInternal(_, _, _, _))
- .Times(AtLeast(1))
- .WillRepeatedly(Invoke(&encoder, &AudioEncoderPcmU::EncodeInternal));
- ASSERT_NO_FATAL_FAILURE(
- SetUpTestExternalEncoder(&mock_encoder, codec_inst.pltype));
- Run("81a9d4c0bb72e9becc43aef124c981e9", "8f9b8750bd80fe26b6cbf6659b89f0f9",
- 50, test::AcmReceiveTestOldApi::kMonoOutput);
-}
-
-// This test fixture is implemented to run ACM and change the desired output
-// frequency during the call. The input packets are simply PCM16b-wb encoded
-// payloads with a constant value of |kSampleValue|. The test fixture itself
-// acts as PacketSource in between the receive test class and the constant-
-// payload packet source class. The output is both written to file, and analyzed
-// in this test fixture.
-class AcmSwitchingOutputFrequencyOldApi : public ::testing::Test,
- public test::PacketSource,
- public test::AudioSink {
- protected:
- static const size_t kTestNumPackets = 50;
- static const int kEncodedSampleRateHz = 16000;
- static const size_t kPayloadLenSamples = 30 * kEncodedSampleRateHz / 1000;
- static const int kPayloadType = 108; // Default payload type for PCM16b-wb.
-
- AcmSwitchingOutputFrequencyOldApi()
- : first_output_(true),
- num_packets_(0),
- packet_source_(kPayloadLenSamples,
- kSampleValue,
- kEncodedSampleRateHz,
- kPayloadType),
- output_freq_2_(0),
- has_toggled_(false) {}
-
- void Run(int output_freq_1, int output_freq_2, int toggle_period_ms) {
- // Set up the receiver used to decode the packets and verify the decoded
- // output.
- const std::string output_file_name =
- webrtc::test::OutputPath() +
- ::testing::UnitTest::GetInstance()
- ->current_test_info()
- ->test_case_name() +
- "_" + ::testing::UnitTest::GetInstance()->current_test_info()->name() +
- "_output.pcm";
- test::OutputAudioFile output_file(output_file_name);
- // Have the output audio sent both to file and to the WriteArray method in
- // this class.
- test::AudioSinkFork output(this, &output_file);
- test::AcmReceiveTestToggleOutputFreqOldApi receive_test(
- this,
- &output,
- output_freq_1,
- output_freq_2,
- toggle_period_ms,
- test::AcmReceiveTestOldApi::kMonoOutput);
- ASSERT_NO_FATAL_FAILURE(receive_test.RegisterDefaultCodecs());
- output_freq_2_ = output_freq_2;
-
- // This is where the actual test is executed.
- receive_test.Run();
- }
-
- // Inherited from test::PacketSource.
- test::Packet* NextPacket() override {
- // Check if it is time to terminate the test. The packet source is of type
- // ConstantPcmPacketSource, which is infinite, so we must end the test
- // "manually".
- if (num_packets_++ > kTestNumPackets) {
- EXPECT_TRUE(has_toggled_);
- return NULL; // Test ended.
- }
-
- // Get the next packet from the source.
- return packet_source_.NextPacket();
- }
-
- // Inherited from test::AudioSink.
- bool WriteArray(const int16_t* audio, size_t num_samples) {
- // Skip checking the first output frame, since it has a number of zeros
- // due to how NetEq is initialized.
- if (first_output_) {
- first_output_ = false;
- return true;
- }
- for (size_t i = 0; i < num_samples; ++i) {
- EXPECT_EQ(kSampleValue, audio[i]);
- }
- if (num_samples ==
- static_cast<size_t>(output_freq_2_ / 100)) // Size of 10 ms frame.
- has_toggled_ = true;
- // The return value does not say if the values match the expectation, just
- // that the method could process the samples.
- return true;
- }
-
- const int16_t kSampleValue = 1000;
- bool first_output_;
- size_t num_packets_;
- test::ConstantPcmPacketSource packet_source_;
- int output_freq_2_;
- bool has_toggled_;
-};
-
-TEST_F(AcmSwitchingOutputFrequencyOldApi, TestWithoutToggling) {
- Run(16000, 16000, 1000);
-}
-
-TEST_F(AcmSwitchingOutputFrequencyOldApi, Toggle16KhzTo32Khz) {
- Run(16000, 32000, 1000);
-}
-
-TEST_F(AcmSwitchingOutputFrequencyOldApi, Toggle32KhzTo16Khz) {
- Run(32000, 16000, 1000);
-}
-
-TEST_F(AcmSwitchingOutputFrequencyOldApi, Toggle16KhzTo8Khz) {
- Run(16000, 8000, 1000);
-}
-
-TEST_F(AcmSwitchingOutputFrequencyOldApi, Toggle8KhzTo16Khz) {
- Run(8000, 16000, 1000);
-}
-
-#endif
-
-} // namespace webrtc
diff --git a/webrtc/modules/audio_coding/main/acm2/call_statistics.cc b/webrtc/modules/audio_coding/main/acm2/call_statistics.cc
deleted file mode 100644
index 4c3e9fc393..0000000000
--- a/webrtc/modules/audio_coding/main/acm2/call_statistics.cc
+++ /dev/null
@@ -1,55 +0,0 @@
-/*
- * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include "webrtc/modules/audio_coding/main/acm2/call_statistics.h"
-
-#include <assert.h>
-
-namespace webrtc {
-
-namespace acm2 {
-
-void CallStatistics::DecodedByNetEq(AudioFrame::SpeechType speech_type) {
- ++decoding_stat_.calls_to_neteq;
- switch (speech_type) {
- case AudioFrame::kNormalSpeech: {
- ++decoding_stat_.decoded_normal;
- break;
- }
- case AudioFrame::kPLC: {
- ++decoding_stat_.decoded_plc;
- break;
- }
- case AudioFrame::kCNG: {
- ++decoding_stat_.decoded_cng;
- break;
- }
- case AudioFrame::kPLCCNG: {
- ++decoding_stat_.decoded_plc_cng;
- break;
- }
- case AudioFrame::kUndefined: {
- // If the audio is decoded by NetEq, |kUndefined| is not an option.
- assert(false);
- }
- }
-}
-
-void CallStatistics::DecodedBySilenceGenerator() {
- ++decoding_stat_.calls_to_silence_generator;
-}
-
-const AudioDecodingCallStats& CallStatistics::GetDecodingStatistics() const {
- return decoding_stat_;
-}
-
-} // namespace acm2
-
-} // namespace webrtc
diff --git a/webrtc/modules/audio_coding/main/acm2/call_statistics.h b/webrtc/modules/audio_coding/main/acm2/call_statistics.h
deleted file mode 100644
index 2aece0ff40..0000000000
--- a/webrtc/modules/audio_coding/main/acm2/call_statistics.h
+++ /dev/null
@@ -1,63 +0,0 @@
-/*
- * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_MODULES_AUDIO_CODING_MAIN_ACM2_CALL_STATISTICS_H_
-#define WEBRTC_MODULES_AUDIO_CODING_MAIN_ACM2_CALL_STATISTICS_H_
-
-#include "webrtc/common_types.h"
-#include "webrtc/modules/interface/module_common_types.h"
-
-//
-// This class is for book keeping of calls to ACM. It is not useful to log API
-// calls which are supposed to be called every 10ms, e.g. PlayoutData10Ms(),
-// however, it is useful to know the number of such calls in a given time
-// interval. The current implementation covers calls to PlayoutData10Ms() with
-// detailed accounting of the decoded speech type.
-//
-// Thread Safety
-// =============
-// Please note that this class in not thread safe. The class must be protected
-// if different APIs are called from different threads.
-//
-
-namespace webrtc {
-
-namespace acm2 {
-
-class CallStatistics {
- public:
- CallStatistics() {}
- ~CallStatistics() {}
-
- // Call this method to indicate that NetEq engaged in decoding. |speech_type|
- // is the audio-type according to NetEq.
- void DecodedByNetEq(AudioFrame::SpeechType speech_type);
-
- // Call this method to indicate that a decoding call resulted in generating
- // silence, i.e. call to NetEq is bypassed and the output audio is zero.
- void DecodedBySilenceGenerator();
-
- // Get statistics for decoding. The statistics include the number of calls to
- // NetEq and silence generator, as well as the type of speech pulled of off
- // NetEq, c.f. declaration of AudioDecodingCallStats for detailed description.
- const AudioDecodingCallStats& GetDecodingStatistics() const;
-
- private:
- // Reset the decoding statistics.
- void ResetDecodingStatistics();
-
- AudioDecodingCallStats decoding_stat_;
-};
-
-} // namespace acm2
-
-} // namespace webrtc
-
-#endif // WEBRTC_MODULES_AUDIO_CODING_MAIN_ACM2_CALL_STATISTICS_H_
diff --git a/webrtc/modules/audio_coding/main/acm2/call_statistics_unittest.cc b/webrtc/modules/audio_coding/main/acm2/call_statistics_unittest.cc
deleted file mode 100644
index 2bee96465d..0000000000
--- a/webrtc/modules/audio_coding/main/acm2/call_statistics_unittest.cc
+++ /dev/null
@@ -1,55 +0,0 @@
-/*
- * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include "testing/gtest/include/gtest/gtest.h"
-#include "webrtc/modules/audio_coding/main/acm2/call_statistics.h"
-
-namespace webrtc {
-
-namespace acm2 {
-
-TEST(CallStatisticsTest, InitializedZero) {
- CallStatistics call_stats;
- AudioDecodingCallStats stats;
-
- stats = call_stats.GetDecodingStatistics();
- EXPECT_EQ(0, stats.calls_to_neteq);
- EXPECT_EQ(0, stats.calls_to_silence_generator);
- EXPECT_EQ(0, stats.decoded_normal);
- EXPECT_EQ(0, stats.decoded_cng);
- EXPECT_EQ(0, stats.decoded_plc);
- EXPECT_EQ(0, stats.decoded_plc_cng);
-}
-
-TEST(CallStatisticsTest, AllCalls) {
- CallStatistics call_stats;
- AudioDecodingCallStats stats;
-
- call_stats.DecodedBySilenceGenerator();
- call_stats.DecodedByNetEq(AudioFrame::kNormalSpeech);
- call_stats.DecodedByNetEq(AudioFrame::kPLC);
- call_stats.DecodedByNetEq(AudioFrame::kPLCCNG);
- call_stats.DecodedByNetEq(AudioFrame::kCNG);
-
- stats = call_stats.GetDecodingStatistics();
- EXPECT_EQ(4, stats.calls_to_neteq);
- EXPECT_EQ(1, stats.calls_to_silence_generator);
- EXPECT_EQ(1, stats.decoded_normal);
- EXPECT_EQ(1, stats.decoded_cng);
- EXPECT_EQ(1, stats.decoded_plc);
- EXPECT_EQ(1, stats.decoded_plc_cng);
-}
-
-} // namespace acm2
-
-} // namespace webrtc
-
-
-
diff --git a/webrtc/modules/audio_coding/main/acm2/codec_manager.cc b/webrtc/modules/audio_coding/main/acm2/codec_manager.cc
deleted file mode 100644
index f9b77e8985..0000000000
--- a/webrtc/modules/audio_coding/main/acm2/codec_manager.cc
+++ /dev/null
@@ -1,465 +0,0 @@
-/*
- * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include "webrtc/modules/audio_coding/main/acm2/codec_manager.h"
-
-#include "webrtc/base/checks.h"
-#include "webrtc/engine_configurations.h"
-#include "webrtc/modules/audio_coding/main/acm2/acm_codec_database.h"
-#include "webrtc/system_wrappers/include/trace.h"
-
-namespace webrtc {
-namespace acm2 {
-
-namespace {
-bool IsCodecRED(const CodecInst& codec) {
- return (STR_CASE_CMP(codec.plname, "RED") == 0);
-}
-
-bool IsCodecCN(const CodecInst& codec) {
- return (STR_CASE_CMP(codec.plname, "CN") == 0);
-}
-
-// Check if the given codec is a valid to be registered as send codec.
-int IsValidSendCodec(const CodecInst& send_codec, bool is_primary_encoder) {
- int dummy_id = 0;
- if ((send_codec.channels != 1) && (send_codec.channels != 2)) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, dummy_id,
- "Wrong number of channels (%d, only mono and stereo are "
- "supported) for %s encoder",
- send_codec.channels,
- is_primary_encoder ? "primary" : "secondary");
- return -1;
- }
-
- int codec_id = ACMCodecDB::CodecNumber(send_codec);
- if (codec_id < 0) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, dummy_id,
- "Invalid codec setting for the send codec.");
- return -1;
- }
-
- // TODO(tlegrand): Remove this check. Already taken care of in
- // ACMCodecDB::CodecNumber().
- // Check if the payload-type is valid
- if (!ACMCodecDB::ValidPayloadType(send_codec.pltype)) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, dummy_id,
- "Invalid payload-type %d for %s.", send_codec.pltype,
- send_codec.plname);
- return -1;
- }
-
- // Telephone-event cannot be a send codec.
- if (!STR_CASE_CMP(send_codec.plname, "telephone-event")) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, dummy_id,
- "telephone-event cannot be a send codec");
- return -1;
- }
-
- if (ACMCodecDB::codec_settings_[codec_id].channel_support <
- send_codec.channels) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, dummy_id,
- "%d number of channels not supportedn for %s.",
- send_codec.channels, send_codec.plname);
- return -1;
- }
-
- if (!is_primary_encoder) {
- // If registering the secondary encoder, then RED and CN are not valid
- // choices as encoder.
- if (IsCodecRED(send_codec)) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, dummy_id,
- "RED cannot be secondary codec");
- return -1;
- }
-
- if (IsCodecCN(send_codec)) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, dummy_id,
- "DTX cannot be secondary codec");
- return -1;
- }
- }
- return codec_id;
-}
-
-bool IsIsac(const CodecInst& codec) {
- return
-#if (defined(WEBRTC_CODEC_ISAC) || defined(WEBRTC_CODEC_ISACFX))
- !STR_CASE_CMP(codec.plname, "isac") ||
-#endif
- false;
-}
-
-bool IsOpus(const CodecInst& codec) {
- return
-#ifdef WEBRTC_CODEC_OPUS
- !STR_CASE_CMP(codec.plname, "opus") ||
-#endif
- false;
-}
-
-bool IsPcmU(const CodecInst& codec) {
- return !STR_CASE_CMP(codec.plname, "pcmu");
-}
-
-bool IsPcmA(const CodecInst& codec) {
- return !STR_CASE_CMP(codec.plname, "pcma");
-}
-
-bool IsPcm16B(const CodecInst& codec) {
- return !STR_CASE_CMP(codec.plname, "l16");
-}
-
-bool IsIlbc(const CodecInst& codec) {
- return
-#ifdef WEBRTC_CODEC_ILBC
- !STR_CASE_CMP(codec.plname, "ilbc") ||
-#endif
- false;
-}
-
-bool IsG722(const CodecInst& codec) {
- return
-#ifdef WEBRTC_CODEC_G722
- !STR_CASE_CMP(codec.plname, "g722") ||
-#endif
- false;
-}
-
-bool CodecSupported(const CodecInst& codec) {
- return IsOpus(codec) || IsPcmU(codec) || IsPcmA(codec) || IsPcm16B(codec) ||
- IsIlbc(codec) || IsG722(codec) || IsIsac(codec);
-}
-
-const CodecInst kEmptyCodecInst = {-1, "noCodecRegistered", 0, 0, 0, 0};
-} // namespace
-
-CodecManager::CodecManager()
- : cng_nb_pltype_(255),
- cng_wb_pltype_(255),
- cng_swb_pltype_(255),
- cng_fb_pltype_(255),
- red_nb_pltype_(255),
- stereo_send_(false),
- dtx_enabled_(false),
- vad_mode_(VADNormal),
- send_codec_inst_(kEmptyCodecInst),
- red_enabled_(false),
- codec_fec_enabled_(false),
- encoder_is_opus_(false) {
- // Register the default payload type for RED and for CNG at sampling rates of
- // 8, 16, 32 and 48 kHz.
- for (const CodecInst& ci : RentACodec::Database()) {
- if (IsCodecRED(ci) && ci.plfreq == 8000) {
- red_nb_pltype_ = static_cast<uint8_t>(ci.pltype);
- } else if (IsCodecCN(ci)) {
- if (ci.plfreq == 8000) {
- cng_nb_pltype_ = static_cast<uint8_t>(ci.pltype);
- } else if (ci.plfreq == 16000) {
- cng_wb_pltype_ = static_cast<uint8_t>(ci.pltype);
- } else if (ci.plfreq == 32000) {
- cng_swb_pltype_ = static_cast<uint8_t>(ci.pltype);
- } else if (ci.plfreq == 48000) {
- cng_fb_pltype_ = static_cast<uint8_t>(ci.pltype);
- }
- }
- }
- thread_checker_.DetachFromThread();
-}
-
-CodecManager::~CodecManager() = default;
-
-int CodecManager::RegisterEncoder(const CodecInst& send_codec) {
- RTC_DCHECK(thread_checker_.CalledOnValidThread());
- int codec_id = IsValidSendCodec(send_codec, true);
-
- // Check for reported errors from function IsValidSendCodec().
- if (codec_id < 0) {
- return -1;
- }
-
- int dummy_id = 0;
- // RED can be registered with other payload type. If not registered a default
- // payload type is used.
- if (IsCodecRED(send_codec)) {
- // TODO(tlegrand): Remove this check. Already taken care of in
- // ACMCodecDB::CodecNumber().
- // Check if the payload-type is valid
- if (!ACMCodecDB::ValidPayloadType(send_codec.pltype)) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, dummy_id,
- "Invalid payload-type %d for %s.", send_codec.pltype,
- send_codec.plname);
- return -1;
- }
- // Set RED payload type.
- if (send_codec.plfreq == 8000) {
- red_nb_pltype_ = static_cast<uint8_t>(send_codec.pltype);
- } else {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, dummy_id,
- "RegisterSendCodec() failed, invalid frequency for RED "
- "registration");
- return -1;
- }
- return 0;
- }
-
- // CNG can be registered with other payload type. If not registered the
- // default payload types from codec database will be used.
- if (IsCodecCN(send_codec)) {
- // CNG is registered.
- switch (send_codec.plfreq) {
- case 8000: {
- cng_nb_pltype_ = static_cast<uint8_t>(send_codec.pltype);
- return 0;
- }
- case 16000: {
- cng_wb_pltype_ = static_cast<uint8_t>(send_codec.pltype);
- return 0;
- }
- case 32000: {
- cng_swb_pltype_ = static_cast<uint8_t>(send_codec.pltype);
- return 0;
- }
- case 48000: {
- cng_fb_pltype_ = static_cast<uint8_t>(send_codec.pltype);
- return 0;
- }
- default: {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, dummy_id,
- "RegisterSendCodec() failed, invalid frequency for CNG "
- "registration");
- return -1;
- }
- }
- }
-
- // Set Stereo, and make sure VAD and DTX is turned off.
- if (send_codec.channels == 2) {
- stereo_send_ = true;
- if (dtx_enabled_) {
- WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceAudioCoding, dummy_id,
- "VAD/DTX is turned off, not supported when sending stereo.");
- }
- dtx_enabled_ = false;
- } else {
- stereo_send_ = false;
- }
-
- // Check if the codec is already registered as send codec.
- bool new_codec = true;
- if (codec_owner_.Encoder()) {
- int new_codec_id = ACMCodecDB::CodecNumber(send_codec_inst_);
- RTC_DCHECK_GE(new_codec_id, 0);
- new_codec = new_codec_id != codec_id;
- }
-
- if (RedPayloadType(send_codec.plfreq) == -1) {
- red_enabled_ = false;
- }
-
- encoder_is_opus_ = IsOpus(send_codec);
-
- if (new_codec) {
- // This is a new codec. Register it and return.
- RTC_DCHECK(CodecSupported(send_codec));
- if (IsOpus(send_codec)) {
- // VAD/DTX not supported.
- dtx_enabled_ = false;
- }
- if (!codec_owner_.SetEncoders(
- send_codec, dtx_enabled_ ? CngPayloadType(send_codec.plfreq) : -1,
- vad_mode_, red_enabled_ ? RedPayloadType(send_codec.plfreq) : -1))
- return -1;
- RTC_DCHECK(codec_owner_.Encoder());
-
- codec_fec_enabled_ = codec_fec_enabled_ &&
- codec_owner_.Encoder()->SetFec(codec_fec_enabled_);
-
- send_codec_inst_ = send_codec;
- return 0;
- }
-
- // This is an existing codec; re-create it if any parameters have changed.
- if (send_codec_inst_.plfreq != send_codec.plfreq ||
- send_codec_inst_.pacsize != send_codec.pacsize ||
- send_codec_inst_.channels != send_codec.channels) {
- if (!codec_owner_.SetEncoders(
- send_codec, dtx_enabled_ ? CngPayloadType(send_codec.plfreq) : -1,
- vad_mode_, red_enabled_ ? RedPayloadType(send_codec.plfreq) : -1))
- return -1;
- RTC_DCHECK(codec_owner_.Encoder());
- }
- send_codec_inst_.plfreq = send_codec.plfreq;
- send_codec_inst_.pacsize = send_codec.pacsize;
- send_codec_inst_.channels = send_codec.channels;
- send_codec_inst_.pltype = send_codec.pltype;
-
- // Check if a change in Rate is required.
- if (send_codec.rate != send_codec_inst_.rate) {
- codec_owner_.Encoder()->SetTargetBitrate(send_codec.rate);
- send_codec_inst_.rate = send_codec.rate;
- }
-
- codec_fec_enabled_ =
- codec_fec_enabled_ && codec_owner_.Encoder()->SetFec(codec_fec_enabled_);
-
- return 0;
-}
-
-void CodecManager::RegisterEncoder(AudioEncoder* external_speech_encoder) {
- // Make up a CodecInst.
- send_codec_inst_.channels = external_speech_encoder->NumChannels();
- send_codec_inst_.plfreq = external_speech_encoder->SampleRateHz();
- send_codec_inst_.pacsize = rtc::CheckedDivExact(
- static_cast<int>(external_speech_encoder->Max10MsFramesInAPacket() *
- send_codec_inst_.plfreq),
- 100);
- send_codec_inst_.pltype = -1; // Not valid.
- send_codec_inst_.rate = -1; // Not valid.
- static const char kName[] = "external";
- memcpy(send_codec_inst_.plname, kName, sizeof(kName));
-
- if (stereo_send_)
- dtx_enabled_ = false;
- codec_fec_enabled_ =
- codec_fec_enabled_ && codec_owner_.Encoder()->SetFec(codec_fec_enabled_);
- int cng_pt = dtx_enabled_
- ? CngPayloadType(external_speech_encoder->SampleRateHz())
- : -1;
- int red_pt = red_enabled_ ? RedPayloadType(send_codec_inst_.plfreq) : -1;
- codec_owner_.SetEncoders(external_speech_encoder, cng_pt, vad_mode_, red_pt);
-}
-
-int CodecManager::GetCodecInst(CodecInst* current_codec) const {
- int dummy_id = 0;
- WEBRTC_TRACE(webrtc::kTraceStream, webrtc::kTraceAudioCoding, dummy_id,
- "SendCodec()");
-
- if (!codec_owner_.Encoder()) {
- WEBRTC_TRACE(webrtc::kTraceStream, webrtc::kTraceAudioCoding, dummy_id,
- "SendCodec Failed, no codec is registered");
- return -1;
- }
- *current_codec = send_codec_inst_;
- return 0;
-}
-
-bool CodecManager::SetCopyRed(bool enable) {
- if (enable && codec_fec_enabled_) {
- WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceAudioCoding, 0,
- "Codec internal FEC and RED cannot be co-enabled.");
- return false;
- }
- if (enable && RedPayloadType(send_codec_inst_.plfreq) == -1) {
- WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceAudioCoding, 0,
- "Cannot enable RED at %i Hz.", send_codec_inst_.plfreq);
- return false;
- }
- if (red_enabled_ != enable) {
- red_enabled_ = enable;
- if (codec_owner_.Encoder()) {
- int cng_pt = dtx_enabled_ ? CngPayloadType(send_codec_inst_.plfreq) : -1;
- int red_pt = red_enabled_ ? RedPayloadType(send_codec_inst_.plfreq) : -1;
- codec_owner_.ChangeCngAndRed(cng_pt, vad_mode_, red_pt);
- }
- }
- return true;
-}
-
-int CodecManager::SetVAD(bool enable, ACMVADMode mode) {
- // Sanity check of the mode.
- RTC_DCHECK(mode == VADNormal || mode == VADLowBitrate || mode == VADAggr ||
- mode == VADVeryAggr);
-
- // Check that the send codec is mono. We don't support VAD/DTX for stereo
- // sending.
- if (enable && stereo_send_) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, 0,
- "VAD/DTX not supported for stereo sending");
- dtx_enabled_ = false;
- return -1;
- }
-
- // If a send codec is registered, set VAD/DTX for the codec.
- if (IsOpus(send_codec_inst_)) {
- // VAD/DTX not supported.
- dtx_enabled_ = false;
- return 0;
- }
-
- if (dtx_enabled_ != enable || vad_mode_ != mode) {
- dtx_enabled_ = enable;
- vad_mode_ = mode;
- if (codec_owner_.Encoder()) {
- int cng_pt = dtx_enabled_ ? CngPayloadType(send_codec_inst_.plfreq) : -1;
- int red_pt = red_enabled_ ? RedPayloadType(send_codec_inst_.plfreq) : -1;
- codec_owner_.ChangeCngAndRed(cng_pt, vad_mode_, red_pt);
- }
- }
- return 0;
-}
-
-void CodecManager::VAD(bool* dtx_enabled,
- bool* vad_enabled,
- ACMVADMode* mode) const {
- *dtx_enabled = dtx_enabled_;
- *vad_enabled = dtx_enabled_;
- *mode = vad_mode_;
-}
-
-int CodecManager::SetCodecFEC(bool enable_codec_fec) {
- if (enable_codec_fec == true && red_enabled_ == true) {
- WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceAudioCoding, 0,
- "Codec internal FEC and RED cannot be co-enabled.");
- return -1;
- }
-
- RTC_CHECK(codec_owner_.Encoder());
- codec_fec_enabled_ =
- codec_owner_.Encoder()->SetFec(enable_codec_fec) && enable_codec_fec;
- return codec_fec_enabled_ == enable_codec_fec ? 0 : -1;
-}
-
-AudioDecoder* CodecManager::GetAudioDecoder(const CodecInst& codec) {
- return IsIsac(codec) ? codec_owner_.GetIsacDecoder() : nullptr;
-}
-
-int CodecManager::CngPayloadType(int sample_rate_hz) const {
- switch (sample_rate_hz) {
- case 8000:
- return cng_nb_pltype_;
- case 16000:
- return cng_wb_pltype_;
- case 32000:
- return cng_swb_pltype_;
- case 48000:
- return cng_fb_pltype_;
- default:
- FATAL() << sample_rate_hz << " Hz is not supported";
- return -1;
- }
-}
-
-int CodecManager::RedPayloadType(int sample_rate_hz) const {
- switch (sample_rate_hz) {
- case 8000:
- return red_nb_pltype_;
- case 16000:
- case 32000:
- case 48000:
- return -1;
- default:
- FATAL() << sample_rate_hz << " Hz is not supported";
- return -1;
- }
-}
-
-} // namespace acm2
-} // namespace webrtc
diff --git a/webrtc/modules/audio_coding/main/acm2/codec_manager.h b/webrtc/modules/audio_coding/main/acm2/codec_manager.h
deleted file mode 100644
index c6c262ea26..0000000000
--- a/webrtc/modules/audio_coding/main/acm2/codec_manager.h
+++ /dev/null
@@ -1,90 +0,0 @@
-/*
- * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_MODULES_AUDIO_CODING_MAIN_ACM2_CODEC_MANAGER_H_
-#define WEBRTC_MODULES_AUDIO_CODING_MAIN_ACM2_CODEC_MANAGER_H_
-
-#include "webrtc/base/constructormagic.h"
-#include "webrtc/base/scoped_ptr.h"
-#include "webrtc/base/thread_checker.h"
-#include "webrtc/modules/audio_coding/main/acm2/codec_owner.h"
-#include "webrtc/modules/audio_coding/main/include/audio_coding_module_typedefs.h"
-#include "webrtc/common_types.h"
-
-namespace webrtc {
-
-class AudioDecoder;
-class AudioEncoder;
-
-namespace acm2 {
-
-class CodecManager final {
- public:
- CodecManager();
- ~CodecManager();
-
- int RegisterEncoder(const CodecInst& send_codec);
-
- void RegisterEncoder(AudioEncoder* external_speech_encoder);
-
- int GetCodecInst(CodecInst* current_codec) const;
-
- bool SetCopyRed(bool enable);
-
- int SetVAD(bool enable, ACMVADMode mode);
-
- void VAD(bool* dtx_enabled, bool* vad_enabled, ACMVADMode* mode) const;
-
- int SetCodecFEC(bool enable_codec_fec);
-
- // Returns a pointer to AudioDecoder of the given codec. For iSAC, encoding
- // and decoding have to be performed on a shared codec instance. By calling
- // this method, we get the codec instance that ACM owns.
- // If |codec| does not share an instance between encoder and decoder, returns
- // null.
- AudioDecoder* GetAudioDecoder(const CodecInst& codec);
-
- bool stereo_send() const { return stereo_send_; }
-
- bool red_enabled() const { return red_enabled_; }
-
- bool codec_fec_enabled() const { return codec_fec_enabled_; }
-
- AudioEncoder* CurrentEncoder() { return codec_owner_.Encoder(); }
- const AudioEncoder* CurrentEncoder() const { return codec_owner_.Encoder(); }
-
- bool CurrentEncoderIsOpus() const { return encoder_is_opus_; }
-
- private:
- int CngPayloadType(int sample_rate_hz) const;
-
- int RedPayloadType(int sample_rate_hz) const;
-
- rtc::ThreadChecker thread_checker_;
- uint8_t cng_nb_pltype_;
- uint8_t cng_wb_pltype_;
- uint8_t cng_swb_pltype_;
- uint8_t cng_fb_pltype_;
- uint8_t red_nb_pltype_;
- bool stereo_send_;
- bool dtx_enabled_;
- ACMVADMode vad_mode_;
- CodecInst send_codec_inst_;
- bool red_enabled_;
- bool codec_fec_enabled_;
- CodecOwner codec_owner_;
- bool encoder_is_opus_;
-
- RTC_DISALLOW_COPY_AND_ASSIGN(CodecManager);
-};
-
-} // namespace acm2
-} // namespace webrtc
-#endif // WEBRTC_MODULES_AUDIO_CODING_MAIN_ACM2_CODEC_MANAGER_H_
diff --git a/webrtc/modules/audio_coding/main/acm2/codec_owner.cc b/webrtc/modules/audio_coding/main/acm2/codec_owner.cc
deleted file mode 100644
index df9a992dac..0000000000
--- a/webrtc/modules/audio_coding/main/acm2/codec_owner.cc
+++ /dev/null
@@ -1,213 +0,0 @@
-/*
- * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include "webrtc/modules/audio_coding/main/acm2/codec_owner.h"
-
-#include "webrtc/base/checks.h"
-#include "webrtc/base/logging.h"
-#include "webrtc/engine_configurations.h"
-#include "webrtc/modules/audio_coding/codecs/cng/include/audio_encoder_cng.h"
-#include "webrtc/modules/audio_coding/codecs/g711/include/audio_encoder_pcm.h"
-#ifdef WEBRTC_CODEC_G722
-#include "webrtc/modules/audio_coding/codecs/g722/include/audio_encoder_g722.h"
-#endif
-#ifdef WEBRTC_CODEC_ILBC
-#include "webrtc/modules/audio_coding/codecs/ilbc/include/audio_encoder_ilbc.h"
-#endif
-#ifdef WEBRTC_CODEC_ISACFX
-#include "webrtc/modules/audio_coding/codecs/isac/fix/include/audio_decoder_isacfix.h"
-#include "webrtc/modules/audio_coding/codecs/isac/fix/include/audio_encoder_isacfix.h"
-#endif
-#ifdef WEBRTC_CODEC_ISAC
-#include "webrtc/modules/audio_coding/codecs/isac/main/include/audio_decoder_isac.h"
-#include "webrtc/modules/audio_coding/codecs/isac/main/include/audio_encoder_isac.h"
-#endif
-#ifdef WEBRTC_CODEC_OPUS
-#include "webrtc/modules/audio_coding/codecs/opus/include/audio_encoder_opus.h"
-#endif
-#include "webrtc/modules/audio_coding/codecs/pcm16b/include/audio_encoder_pcm16b.h"
-#ifdef WEBRTC_CODEC_RED
-#include "webrtc/modules/audio_coding/codecs/red/audio_encoder_copy_red.h"
-#endif
-
-namespace webrtc {
-namespace acm2 {
-
-CodecOwner::CodecOwner() : external_speech_encoder_(nullptr) {
-}
-
-CodecOwner::~CodecOwner() = default;
-
-namespace {
-
-rtc::scoped_ptr<AudioDecoder> CreateIsacDecoder(
- LockedIsacBandwidthInfo* bwinfo) {
-#if defined(WEBRTC_CODEC_ISACFX)
- return rtc_make_scoped_ptr(new AudioDecoderIsacFix(bwinfo));
-#elif defined(WEBRTC_CODEC_ISAC)
- return rtc_make_scoped_ptr(new AudioDecoderIsac(bwinfo));
-#else
- FATAL() << "iSAC is not supported.";
- return rtc::scoped_ptr<AudioDecoder>();
-#endif
-}
-
-// Returns a new speech encoder, or null on error.
-// TODO(kwiberg): Don't handle errors here (bug 5033)
-rtc::scoped_ptr<AudioEncoder> CreateSpeechEncoder(
- const CodecInst& speech_inst,
- LockedIsacBandwidthInfo* bwinfo) {
-#if defined(WEBRTC_CODEC_ISACFX)
- if (STR_CASE_CMP(speech_inst.plname, "isac") == 0)
- return rtc_make_scoped_ptr(new AudioEncoderIsacFix(speech_inst, bwinfo));
-#endif
-#if defined(WEBRTC_CODEC_ISAC)
- if (STR_CASE_CMP(speech_inst.plname, "isac") == 0)
- return rtc_make_scoped_ptr(new AudioEncoderIsac(speech_inst, bwinfo));
-#endif
-#ifdef WEBRTC_CODEC_OPUS
- if (STR_CASE_CMP(speech_inst.plname, "opus") == 0)
- return rtc_make_scoped_ptr(new AudioEncoderOpus(speech_inst));
-#endif
- if (STR_CASE_CMP(speech_inst.plname, "pcmu") == 0)
- return rtc_make_scoped_ptr(new AudioEncoderPcmU(speech_inst));
- if (STR_CASE_CMP(speech_inst.plname, "pcma") == 0)
- return rtc_make_scoped_ptr(new AudioEncoderPcmA(speech_inst));
- if (STR_CASE_CMP(speech_inst.plname, "l16") == 0)
- return rtc_make_scoped_ptr(new AudioEncoderPcm16B(speech_inst));
-#ifdef WEBRTC_CODEC_ILBC
- if (STR_CASE_CMP(speech_inst.plname, "ilbc") == 0)
- return rtc_make_scoped_ptr(new AudioEncoderIlbc(speech_inst));
-#endif
-#ifdef WEBRTC_CODEC_G722
- if (STR_CASE_CMP(speech_inst.plname, "g722") == 0)
- return rtc_make_scoped_ptr(new AudioEncoderG722(speech_inst));
-#endif
- LOG_F(LS_ERROR) << "Could not create encoder of type " << speech_inst.plname;
- return rtc::scoped_ptr<AudioEncoder>();
-}
-
-AudioEncoder* CreateRedEncoder(int red_payload_type,
- AudioEncoder* encoder,
- rtc::scoped_ptr<AudioEncoder>* red_encoder) {
-#ifdef WEBRTC_CODEC_RED
- if (red_payload_type != -1) {
- AudioEncoderCopyRed::Config config;
- config.payload_type = red_payload_type;
- config.speech_encoder = encoder;
- red_encoder->reset(new AudioEncoderCopyRed(config));
- return red_encoder->get();
- }
-#endif
-
- red_encoder->reset();
- return encoder;
-}
-
-void CreateCngEncoder(int cng_payload_type,
- ACMVADMode vad_mode,
- AudioEncoder* encoder,
- rtc::scoped_ptr<AudioEncoder>* cng_encoder) {
- if (cng_payload_type == -1) {
- cng_encoder->reset();
- return;
- }
- AudioEncoderCng::Config config;
- config.num_channels = encoder->NumChannels();
- config.payload_type = cng_payload_type;
- config.speech_encoder = encoder;
- switch (vad_mode) {
- case VADNormal:
- config.vad_mode = Vad::kVadNormal;
- break;
- case VADLowBitrate:
- config.vad_mode = Vad::kVadLowBitrate;
- break;
- case VADAggr:
- config.vad_mode = Vad::kVadAggressive;
- break;
- case VADVeryAggr:
- config.vad_mode = Vad::kVadVeryAggressive;
- break;
- default:
- FATAL();
- }
- cng_encoder->reset(new AudioEncoderCng(config));
-}
-} // namespace
-
-bool CodecOwner::SetEncoders(const CodecInst& speech_inst,
- int cng_payload_type,
- ACMVADMode vad_mode,
- int red_payload_type) {
- speech_encoder_ = CreateSpeechEncoder(speech_inst, &isac_bandwidth_info_);
- if (!speech_encoder_)
- return false;
- external_speech_encoder_ = nullptr;
- ChangeCngAndRed(cng_payload_type, vad_mode, red_payload_type);
- return true;
-}
-
-void CodecOwner::SetEncoders(AudioEncoder* external_speech_encoder,
- int cng_payload_type,
- ACMVADMode vad_mode,
- int red_payload_type) {
- external_speech_encoder_ = external_speech_encoder;
- speech_encoder_.reset();
- ChangeCngAndRed(cng_payload_type, vad_mode, red_payload_type);
-}
-
-void CodecOwner::ChangeCngAndRed(int cng_payload_type,
- ACMVADMode vad_mode,
- int red_payload_type) {
- AudioEncoder* speech_encoder = SpeechEncoder();
- if (cng_payload_type != -1 || red_payload_type != -1) {
- // The RED and CNG encoders need to be in sync with the speech encoder, so
- // reset the latter to ensure its buffer is empty.
- speech_encoder->Reset();
- }
- AudioEncoder* encoder =
- CreateRedEncoder(red_payload_type, speech_encoder, &red_encoder_);
- CreateCngEncoder(cng_payload_type, vad_mode, encoder, &cng_encoder_);
- RTC_DCHECK_EQ(!!speech_encoder_ + !!external_speech_encoder_, 1);
-}
-
-AudioDecoder* CodecOwner::GetIsacDecoder() {
- if (!isac_decoder_)
- isac_decoder_ = CreateIsacDecoder(&isac_bandwidth_info_);
- return isac_decoder_.get();
-}
-
-AudioEncoder* CodecOwner::Encoder() {
- const auto& const_this = *this;
- return const_cast<AudioEncoder*>(const_this.Encoder());
-}
-
-const AudioEncoder* CodecOwner::Encoder() const {
- if (cng_encoder_)
- return cng_encoder_.get();
- if (red_encoder_)
- return red_encoder_.get();
- return SpeechEncoder();
-}
-
-AudioEncoder* CodecOwner::SpeechEncoder() {
- const auto* const_this = this;
- return const_cast<AudioEncoder*>(const_this->SpeechEncoder());
-}
-
-const AudioEncoder* CodecOwner::SpeechEncoder() const {
- RTC_DCHECK(!speech_encoder_ || !external_speech_encoder_);
- return external_speech_encoder_ ? external_speech_encoder_
- : speech_encoder_.get();
-}
-
-} // namespace acm2
-} // namespace webrtc
diff --git a/webrtc/modules/audio_coding/main/acm2/codec_owner.h b/webrtc/modules/audio_coding/main/acm2/codec_owner.h
deleted file mode 100644
index d0fb4f760e..0000000000
--- a/webrtc/modules/audio_coding/main/acm2/codec_owner.h
+++ /dev/null
@@ -1,86 +0,0 @@
-/*
- * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_MODULES_AUDIO_CODING_MAIN_ACM2_CODEC_OWNER_H_
-#define WEBRTC_MODULES_AUDIO_CODING_MAIN_ACM2_CODEC_OWNER_H_
-
-#include "webrtc/base/constructormagic.h"
-#include "webrtc/base/scoped_ptr.h"
-#include "webrtc/common_types.h"
-#include "webrtc/modules/audio_coding/codecs/audio_encoder.h"
-#include "webrtc/modules/audio_coding/codecs/audio_decoder.h"
-#include "webrtc/modules/audio_coding/main/include/audio_coding_module_typedefs.h"
-
-#if defined(WEBRTC_CODEC_ISAC) || defined(WEBRTC_CODEC_ISACFX)
-#include "webrtc/modules/audio_coding/codecs/isac/locked_bandwidth_info.h"
-#else
-// Dummy implementation, for when we don't have iSAC.
-namespace webrtc {
-class LockedIsacBandwidthInfo {};
-}
-#endif
-
-namespace webrtc {
-namespace acm2 {
-
-class CodecOwner {
- public:
- CodecOwner();
- ~CodecOwner();
-
- // Start using the specified encoder. Returns false on error.
- // TODO(kwiberg): Don't handle errors here (bug 5033)
- bool SetEncoders(const CodecInst& speech_inst,
- int cng_payload_type,
- ACMVADMode vad_mode,
- int red_payload_type) WARN_UNUSED_RESULT;
-
- void SetEncoders(AudioEncoder* external_speech_encoder,
- int cng_payload_type,
- ACMVADMode vad_mode,
- int red_payload_type);
-
- void ChangeCngAndRed(int cng_payload_type,
- ACMVADMode vad_mode,
- int red_payload_type);
-
- // Returns a pointer to an iSAC decoder owned by the CodecOwner. The decoder
- // will live as long as the CodecOwner exists.
- AudioDecoder* GetIsacDecoder();
-
- AudioEncoder* Encoder();
- const AudioEncoder* Encoder() const;
-
- private:
- AudioEncoder* SpeechEncoder();
- const AudioEncoder* SpeechEncoder() const;
-
- // At most one of these is non-null:
- rtc::scoped_ptr<AudioEncoder> speech_encoder_;
- AudioEncoder* external_speech_encoder_;
-
- // If we've created an iSAC decoder because someone called GetIsacDecoder,
- // store it here.
- rtc::scoped_ptr<AudioDecoder> isac_decoder_;
-
- // iSAC bandwidth estimation info, for use with iSAC encoders and decoders.
- LockedIsacBandwidthInfo isac_bandwidth_info_;
-
- // |cng_encoder_| and |red_encoder_| are valid iff CNG or RED, respectively,
- // are active.
- rtc::scoped_ptr<AudioEncoder> cng_encoder_;
- rtc::scoped_ptr<AudioEncoder> red_encoder_;
-
- RTC_DISALLOW_COPY_AND_ASSIGN(CodecOwner);
-};
-
-} // namespace acm2
-} // namespace webrtc
-#endif // WEBRTC_MODULES_AUDIO_CODING_MAIN_ACM2_CODEC_OWNER_H_
diff --git a/webrtc/modules/audio_coding/main/acm2/codec_owner_unittest.cc b/webrtc/modules/audio_coding/main/acm2/codec_owner_unittest.cc
deleted file mode 100644
index 6c232615a7..0000000000
--- a/webrtc/modules/audio_coding/main/acm2/codec_owner_unittest.cc
+++ /dev/null
@@ -1,210 +0,0 @@
-/*
- * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include <cstring>
-
-#include "testing/gtest/include/gtest/gtest.h"
-#include "webrtc/base/arraysize.h"
-#include "webrtc/base/safe_conversions.h"
-#include "webrtc/modules/audio_coding/codecs/mock/mock_audio_encoder.h"
-#include "webrtc/modules/audio_coding/main/acm2/codec_owner.h"
-
-namespace webrtc {
-namespace acm2 {
-
-using ::testing::Return;
-using ::testing::InSequence;
-
-namespace {
-const int kDataLengthSamples = 80;
-const int kPacketSizeSamples = 2 * kDataLengthSamples;
-const int16_t kZeroData[kDataLengthSamples] = {0};
-const CodecInst kDefaultCodecInst =
- {0, "pcmu", 8000, kPacketSizeSamples, 1, 64000};
-const int kCngPt = 13;
-} // namespace
-
-class CodecOwnerTest : public ::testing::Test {
- protected:
- CodecOwnerTest() : timestamp_(0) {}
-
- void CreateCodec() {
- ASSERT_TRUE(
- codec_owner_.SetEncoders(kDefaultCodecInst, kCngPt, VADNormal, -1));
- }
-
- void EncodeAndVerify(size_t expected_out_length,
- uint32_t expected_timestamp,
- int expected_payload_type,
- int expected_send_even_if_empty) {
- uint8_t out[kPacketSizeSamples];
- AudioEncoder::EncodedInfo encoded_info;
- encoded_info = codec_owner_.Encoder()->Encode(
- timestamp_, kZeroData, kDataLengthSamples, kPacketSizeSamples, out);
- timestamp_ += kDataLengthSamples;
- EXPECT_TRUE(encoded_info.redundant.empty());
- EXPECT_EQ(expected_out_length, encoded_info.encoded_bytes);
- EXPECT_EQ(expected_timestamp, encoded_info.encoded_timestamp);
- if (expected_payload_type >= 0)
- EXPECT_EQ(expected_payload_type, encoded_info.payload_type);
- if (expected_send_even_if_empty >= 0)
- EXPECT_EQ(static_cast<bool>(expected_send_even_if_empty),
- encoded_info.send_even_if_empty);
- }
-
- // Verify that the speech encoder's Reset method is called when CNG or RED
- // (or both) are switched on, but not when they're switched off.
- void TestCngAndRedResetSpeechEncoder(bool use_cng, bool use_red) {
- MockAudioEncoder speech_encoder;
- EXPECT_CALL(speech_encoder, NumChannels())
- .WillRepeatedly(Return(1));
- EXPECT_CALL(speech_encoder, Max10MsFramesInAPacket())
- .WillRepeatedly(Return(2));
- EXPECT_CALL(speech_encoder, SampleRateHz())
- .WillRepeatedly(Return(8000));
- {
- InSequence s;
- EXPECT_CALL(speech_encoder, Mark("start off"));
- EXPECT_CALL(speech_encoder, Mark("switch on"));
- if (use_cng || use_red)
- EXPECT_CALL(speech_encoder, Reset());
- EXPECT_CALL(speech_encoder, Mark("start on"));
- if (use_cng || use_red)
- EXPECT_CALL(speech_encoder, Reset());
- EXPECT_CALL(speech_encoder, Mark("switch off"));
- EXPECT_CALL(speech_encoder, Die());
- }
-
- int cng_pt = use_cng ? 17 : -1;
- int red_pt = use_red ? 19 : -1;
- speech_encoder.Mark("start off");
- codec_owner_.SetEncoders(&speech_encoder, -1, VADNormal, -1);
- speech_encoder.Mark("switch on");
- codec_owner_.ChangeCngAndRed(cng_pt, VADNormal, red_pt);
- speech_encoder.Mark("start on");
- codec_owner_.SetEncoders(&speech_encoder, cng_pt, VADNormal, red_pt);
- speech_encoder.Mark("switch off");
- codec_owner_.ChangeCngAndRed(-1, VADNormal, -1);
- }
-
- CodecOwner codec_owner_;
- uint32_t timestamp_;
-};
-
-// This test verifies that CNG frames are delivered as expected. Since the frame
-// size is set to 20 ms, we expect the first encode call to produce no output
-// (which is signaled as 0 bytes output of type kNoEncoding). The next encode
-// call should produce one SID frame of 9 bytes. The third call should not
-// result in any output (just like the first one). The fourth and final encode
-// call should produce an "empty frame", which is like no output, but with
-// AudioEncoder::EncodedInfo::send_even_if_empty set to true. (The reason to
-// produce an empty frame is to drive sending of DTMF packets in the RTP/RTCP
-// module.)
-TEST_F(CodecOwnerTest, VerifyCngFrames) {
- CreateCodec();
- uint32_t expected_timestamp = timestamp_;
- // Verify no frame.
- {
- SCOPED_TRACE("First encoding");
- EncodeAndVerify(0, expected_timestamp, -1, -1);
- }
-
- // Verify SID frame delivered.
- {
- SCOPED_TRACE("Second encoding");
- EncodeAndVerify(9, expected_timestamp, kCngPt, 1);
- }
-
- // Verify no frame.
- {
- SCOPED_TRACE("Third encoding");
- EncodeAndVerify(0, expected_timestamp, -1, -1);
- }
-
- // Verify NoEncoding.
- expected_timestamp += 2 * kDataLengthSamples;
- {
- SCOPED_TRACE("Fourth encoding");
- EncodeAndVerify(0, expected_timestamp, kCngPt, 1);
- }
-}
-
-TEST_F(CodecOwnerTest, ExternalEncoder) {
- MockAudioEncoder external_encoder;
- codec_owner_.SetEncoders(&external_encoder, -1, VADNormal, -1);
- const int kSampleRateHz = 8000;
- const int kPacketSizeSamples = kSampleRateHz / 100;
- int16_t audio[kPacketSizeSamples] = {0};
- uint8_t encoded[kPacketSizeSamples];
- AudioEncoder::EncodedInfo info;
- EXPECT_CALL(external_encoder, SampleRateHz())
- .WillRepeatedly(Return(kSampleRateHz));
-
- {
- InSequence s;
- info.encoded_timestamp = 0;
- EXPECT_CALL(external_encoder,
- EncodeInternal(0, audio, arraysize(encoded), encoded))
- .WillOnce(Return(info));
- EXPECT_CALL(external_encoder, Mark("A"));
- EXPECT_CALL(external_encoder, Mark("B"));
- info.encoded_timestamp = 2;
- EXPECT_CALL(external_encoder,
- EncodeInternal(2, audio, arraysize(encoded), encoded))
- .WillOnce(Return(info));
- EXPECT_CALL(external_encoder, Die());
- }
-
- info = codec_owner_.Encoder()->Encode(0, audio, arraysize(audio),
- arraysize(encoded), encoded);
- EXPECT_EQ(0u, info.encoded_timestamp);
- external_encoder.Mark("A");
-
- // Change to internal encoder.
- CodecInst codec_inst = kDefaultCodecInst;
- codec_inst.pacsize = kPacketSizeSamples;
- ASSERT_TRUE(codec_owner_.SetEncoders(codec_inst, -1, VADNormal, -1));
- // Don't expect any more calls to the external encoder.
- info = codec_owner_.Encoder()->Encode(1, audio, arraysize(audio),
- arraysize(encoded), encoded);
- external_encoder.Mark("B");
-
- // Change back to external encoder again.
- codec_owner_.SetEncoders(&external_encoder, -1, VADNormal, -1);
- info = codec_owner_.Encoder()->Encode(2, audio, arraysize(audio),
- arraysize(encoded), encoded);
- EXPECT_EQ(2u, info.encoded_timestamp);
-}
-
-TEST_F(CodecOwnerTest, CngResetsSpeechEncoder) {
- TestCngAndRedResetSpeechEncoder(true, false);
-}
-
-TEST_F(CodecOwnerTest, RedResetsSpeechEncoder) {
- TestCngAndRedResetSpeechEncoder(false, true);
-}
-
-TEST_F(CodecOwnerTest, CngAndRedResetsSpeechEncoder) {
- TestCngAndRedResetSpeechEncoder(true, true);
-}
-
-TEST_F(CodecOwnerTest, NoCngAndRedNoSpeechEncoderReset) {
- TestCngAndRedResetSpeechEncoder(false, false);
-}
-
-TEST_F(CodecOwnerTest, SetEncodersError) {
- CodecInst codec_inst = kDefaultCodecInst;
- static const char bad_name[] = "Robert'); DROP TABLE Students;";
- std::memcpy(codec_inst.plname, bad_name, sizeof bad_name);
- EXPECT_FALSE(codec_owner_.SetEncoders(codec_inst, -1, VADNormal, -1));
-}
-
-} // namespace acm2
-} // namespace webrtc
diff --git a/webrtc/modules/audio_coding/main/acm2/initial_delay_manager.cc b/webrtc/modules/audio_coding/main/acm2/initial_delay_manager.cc
deleted file mode 100644
index 786fb2e527..0000000000
--- a/webrtc/modules/audio_coding/main/acm2/initial_delay_manager.cc
+++ /dev/null
@@ -1,242 +0,0 @@
-/*
- * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include "webrtc/modules/audio_coding/main/acm2/initial_delay_manager.h"
-
-namespace webrtc {
-
-namespace acm2 {
-
-InitialDelayManager::InitialDelayManager(int initial_delay_ms,
- int late_packet_threshold)
- : last_packet_type_(kUndefinedPacket),
- last_receive_timestamp_(0),
- timestamp_step_(0),
- audio_payload_type_(kInvalidPayloadType),
- initial_delay_ms_(initial_delay_ms),
- buffered_audio_ms_(0),
- buffering_(true),
- playout_timestamp_(0),
- late_packet_threshold_(late_packet_threshold) {
- last_packet_rtp_info_.header.payloadType = kInvalidPayloadType;
- last_packet_rtp_info_.header.ssrc = 0;
- last_packet_rtp_info_.header.sequenceNumber = 0;
- last_packet_rtp_info_.header.timestamp = 0;
-}
-
-void InitialDelayManager::UpdateLastReceivedPacket(
- const WebRtcRTPHeader& rtp_info,
- uint32_t receive_timestamp,
- PacketType type,
- bool new_codec,
- int sample_rate_hz,
- SyncStream* sync_stream) {
- assert(sync_stream);
-
- // If payload of audio packets is changing |new_codec| has to be true.
- assert(!(!new_codec && type == kAudioPacket &&
- rtp_info.header.payloadType != audio_payload_type_));
-
- // Just shorthands.
- const RTPHeader* current_header = &rtp_info.header;
- RTPHeader* last_header = &last_packet_rtp_info_.header;
-
- // Don't do anything if getting DTMF. The chance of DTMF in applications where
- // initial delay is required is very low (we don't know of any). This avoids a
- // lot of corner cases. The effect of ignoring DTMF packet is minimal. Note
- // that DTMFs are inserted into NetEq just not accounted here.
- if (type == kAvtPacket ||
- (last_packet_type_ != kUndefinedPacket &&
- !IsNewerSequenceNumber(current_header->sequenceNumber,
- last_header->sequenceNumber))) {
- sync_stream->num_sync_packets = 0;
- return;
- }
-
- // Either if it is a new packet or the first packet record and set variables.
- if (new_codec ||
- last_packet_rtp_info_.header.payloadType == kInvalidPayloadType) {
- timestamp_step_ = 0;
- if (type == kAudioPacket)
- audio_payload_type_ = rtp_info.header.payloadType;
- else
- audio_payload_type_ = kInvalidPayloadType; // Invalid.
-
- RecordLastPacket(rtp_info, receive_timestamp, type);
- sync_stream->num_sync_packets = 0;
- buffered_audio_ms_ = 0;
- buffering_ = true;
-
- // If |buffering_| is set then |playout_timestamp_| should have correct
- // value.
- UpdatePlayoutTimestamp(*current_header, sample_rate_hz);
- return;
- }
-
- uint32_t timestamp_increase = current_header->timestamp -
- last_header->timestamp;
-
- // |timestamp_increase| is invalid if this is the first packet. The effect is
- // that |buffered_audio_ms_| is not increased.
- if (last_packet_type_ == kUndefinedPacket) {
- timestamp_increase = 0;
- }
-
- if (buffering_) {
- buffered_audio_ms_ += timestamp_increase * 1000 / sample_rate_hz;
-
- // A timestamp that reflects the initial delay, while buffering.
- UpdatePlayoutTimestamp(*current_header, sample_rate_hz);
-
- if (buffered_audio_ms_ >= initial_delay_ms_)
- buffering_ = false;
- }
-
- if (current_header->sequenceNumber == last_header->sequenceNumber + 1) {
- // Two consecutive audio packets, the previous packet-type is audio, so we
- // can update |timestamp_step_|.
- if (last_packet_type_ == kAudioPacket)
- timestamp_step_ = timestamp_increase;
- RecordLastPacket(rtp_info, receive_timestamp, type);
- sync_stream->num_sync_packets = 0;
- return;
- }
-
- uint16_t packet_gap = current_header->sequenceNumber -
- last_header->sequenceNumber - 1;
-
- // For smooth transitions leave a gap between audio and sync packets.
- sync_stream->num_sync_packets = last_packet_type_ == kSyncPacket ?
- packet_gap - 1 : packet_gap - 2;
-
- // Do nothing if we haven't received any audio packet.
- if (sync_stream->num_sync_packets > 0 &&
- audio_payload_type_ != kInvalidPayloadType) {
- if (timestamp_step_ == 0) {
- // Make an estimate for |timestamp_step_| if it is not updated, yet.
- assert(packet_gap > 0);
- timestamp_step_ = timestamp_increase / (packet_gap + 1);
- }
- sync_stream->timestamp_step = timestamp_step_;
-
- // Build the first sync-packet based on the current received packet.
- memcpy(&sync_stream->rtp_info, &rtp_info, sizeof(rtp_info));
- sync_stream->rtp_info.header.payloadType = audio_payload_type_;
-
- uint16_t sequence_number_update = sync_stream->num_sync_packets + 1;
- uint32_t timestamp_update = timestamp_step_ * sequence_number_update;
-
- // Rewind sequence number and timestamps. This will give a more accurate
- // description of the missing packets.
- //
- // Note that we leave a gap between the last packet in sync-stream and the
- // current received packet, so it should be compensated for in the following
- // computation of timestamps and sequence number.
- sync_stream->rtp_info.header.sequenceNumber -= sequence_number_update;
- sync_stream->receive_timestamp = receive_timestamp - timestamp_update;
- sync_stream->rtp_info.header.timestamp -= timestamp_update;
- sync_stream->rtp_info.header.payloadType = audio_payload_type_;
- } else {
- sync_stream->num_sync_packets = 0;
- }
-
- RecordLastPacket(rtp_info, receive_timestamp, type);
- return;
-}
-
-void InitialDelayManager::RecordLastPacket(const WebRtcRTPHeader& rtp_info,
- uint32_t receive_timestamp,
- PacketType type) {
- last_packet_type_ = type;
- last_receive_timestamp_ = receive_timestamp;
- memcpy(&last_packet_rtp_info_, &rtp_info, sizeof(rtp_info));
-}
-
-void InitialDelayManager::LatePackets(
- uint32_t timestamp_now, SyncStream* sync_stream) {
- assert(sync_stream);
- sync_stream->num_sync_packets = 0;
-
- // If there is no estimate of timestamp increment, |timestamp_step_|, then
- // we cannot estimate the number of late packets.
- // If the last packet has been CNG, estimating late packets is not meaningful,
- // as a CNG packet is on unknown length.
- // We can set a higher threshold if the last packet is CNG and continue
- // execution, but this is how ACM1 code was written.
- if (timestamp_step_ <= 0 ||
- last_packet_type_ == kCngPacket ||
- last_packet_type_ == kUndefinedPacket ||
- audio_payload_type_ == kInvalidPayloadType) // No audio packet received.
- return;
-
- int num_late_packets = (timestamp_now - last_receive_timestamp_) /
- timestamp_step_;
-
- if (num_late_packets < late_packet_threshold_)
- return;
-
- int sync_offset = 1; // One gap at the end of the sync-stream.
- if (last_packet_type_ != kSyncPacket) {
- ++sync_offset; // One more gap at the beginning of the sync-stream.
- --num_late_packets;
- }
- uint32_t timestamp_update = sync_offset * timestamp_step_;
-
- sync_stream->num_sync_packets = num_late_packets;
- if (num_late_packets == 0)
- return;
-
- // Build the first sync-packet in the sync-stream.
- memcpy(&sync_stream->rtp_info, &last_packet_rtp_info_,
- sizeof(last_packet_rtp_info_));
-
- // Increase sequence number and timestamps.
- sync_stream->rtp_info.header.sequenceNumber += sync_offset;
- sync_stream->rtp_info.header.timestamp += timestamp_update;
- sync_stream->receive_timestamp = last_receive_timestamp_ + timestamp_update;
- sync_stream->timestamp_step = timestamp_step_;
-
- // Sync-packets have audio payload-type.
- sync_stream->rtp_info.header.payloadType = audio_payload_type_;
-
- uint16_t sequence_number_update = num_late_packets + sync_offset - 1;
- timestamp_update = sequence_number_update * timestamp_step_;
-
- // Fake the last RTP, assuming the caller will inject the whole sync-stream.
- last_packet_rtp_info_.header.timestamp += timestamp_update;
- last_packet_rtp_info_.header.sequenceNumber += sequence_number_update;
- last_packet_rtp_info_.header.payloadType = audio_payload_type_;
- last_receive_timestamp_ += timestamp_update;
-
- last_packet_type_ = kSyncPacket;
- return;
-}
-
-bool InitialDelayManager::GetPlayoutTimestamp(uint32_t* playout_timestamp) {
- if (!buffering_) {
- return false;
- }
- *playout_timestamp = playout_timestamp_;
- return true;
-}
-
-void InitialDelayManager::DisableBuffering() {
- buffering_ = false;
-}
-
-void InitialDelayManager::UpdatePlayoutTimestamp(
- const RTPHeader& current_header, int sample_rate_hz) {
- playout_timestamp_ = current_header.timestamp - static_cast<uint32_t>(
- initial_delay_ms_ * sample_rate_hz / 1000);
-}
-
-} // namespace acm2
-
-} // namespace webrtc
diff --git a/webrtc/modules/audio_coding/main/acm2/initial_delay_manager.h b/webrtc/modules/audio_coding/main/acm2/initial_delay_manager.h
deleted file mode 100644
index c6942ec285..0000000000
--- a/webrtc/modules/audio_coding/main/acm2/initial_delay_manager.h
+++ /dev/null
@@ -1,120 +0,0 @@
-/*
- * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_MODULES_AUDIO_CODING_MAIN_ACM2_INITIAL_DELAY_MANAGER_H_
-#define WEBRTC_MODULES_AUDIO_CODING_MAIN_ACM2_INITIAL_DELAY_MANAGER_H_
-
-#include "webrtc/base/scoped_ptr.h"
-#include "webrtc/modules/interface/module_common_types.h"
-
-namespace webrtc {
-
-namespace acm2 {
-
-class InitialDelayManager {
- public:
- enum PacketType {
- kUndefinedPacket, kCngPacket, kAvtPacket, kAudioPacket, kSyncPacket };
-
- // Specifies a stream of sync-packets.
- struct SyncStream {
- SyncStream()
- : num_sync_packets(0),
- receive_timestamp(0),
- timestamp_step(0) {
- memset(&rtp_info, 0, sizeof(rtp_info));
- }
-
- int num_sync_packets;
-
- // RTP header of the first sync-packet in the sequence.
- WebRtcRTPHeader rtp_info;
-
- // Received timestamp of the first sync-packet in the sequence.
- uint32_t receive_timestamp;
-
- // Samples per packet.
- uint32_t timestamp_step;
- };
-
- InitialDelayManager(int initial_delay_ms, int late_packet_threshold);
-
- // Update with the last received RTP header, |header|, and received timestamp,
- // |received_timestamp|. |type| indicates the packet type. If codec is changed
- // since the last time |new_codec| should be true. |sample_rate_hz| is the
- // decoder's sampling rate in Hz. |header| has a field to store sampling rate
- // but we are not sure if that is properly set at the send side, and |header|
- // is declared constant in the caller of this function
- // (AcmReceiver::InsertPacket()). |sync_stream| contains information required
- // to generate a stream of sync packets.
- void UpdateLastReceivedPacket(const WebRtcRTPHeader& header,
- uint32_t receive_timestamp,
- PacketType type,
- bool new_codec,
- int sample_rate_hz,
- SyncStream* sync_stream);
-
- // Based on the last received timestamp and given the current timestamp,
- // sequence of late (or perhaps missing) packets is computed.
- void LatePackets(uint32_t timestamp_now, SyncStream* sync_stream);
-
- // Get playout timestamp.
- // Returns true if the timestamp is valid (when buffering), otherwise false.
- bool GetPlayoutTimestamp(uint32_t* playout_timestamp);
-
- // True if buffered audio is less than the given initial delay (specified at
- // the constructor). Buffering might be disabled by the client of this class.
- bool buffering() { return buffering_; }
-
- // Disable buffering in the class.
- void DisableBuffering();
-
- // True if any packet received for buffering.
- bool PacketBuffered() { return last_packet_type_ != kUndefinedPacket; }
-
- private:
- static const uint8_t kInvalidPayloadType = 0xFF;
-
- // Update playout timestamps. While buffering, this is about
- // |initial_delay_ms| millisecond behind the latest received timestamp.
- void UpdatePlayoutTimestamp(const RTPHeader& current_header,
- int sample_rate_hz);
-
- // Record an RTP headr and related parameter
- void RecordLastPacket(const WebRtcRTPHeader& rtp_info,
- uint32_t receive_timestamp,
- PacketType type);
-
- PacketType last_packet_type_;
- WebRtcRTPHeader last_packet_rtp_info_;
- uint32_t last_receive_timestamp_;
- uint32_t timestamp_step_;
- uint8_t audio_payload_type_;
- const int initial_delay_ms_;
- int buffered_audio_ms_;
- bool buffering_;
-
- // During the initial phase where packets are being accumulated and silence
- // is played out, |playout_ts| is a timestamp which is equal to
- // |initial_delay_ms_| milliseconds earlier than the most recently received
- // RTP timestamp.
- uint32_t playout_timestamp_;
-
- // If the number of late packets exceed this value (computed based on current
- // timestamp and last received timestamp), sequence of sync-packets is
- // specified.
- const int late_packet_threshold_;
-};
-
-} // namespace acm2
-
-} // namespace webrtc
-
-#endif // WEBRTC_MODULES_AUDIO_CODING_MAIN_ACM2_INITIAL_DELAY_MANAGER_H_
diff --git a/webrtc/modules/audio_coding/main/acm2/initial_delay_manager_unittest.cc b/webrtc/modules/audio_coding/main/acm2/initial_delay_manager_unittest.cc
deleted file mode 100644
index e973593eb4..0000000000
--- a/webrtc/modules/audio_coding/main/acm2/initial_delay_manager_unittest.cc
+++ /dev/null
@@ -1,376 +0,0 @@
-/*
- * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include <string.h>
-
-#include "testing/gtest/include/gtest/gtest.h"
-#include "webrtc/modules/audio_coding/main/acm2/initial_delay_manager.h"
-
-namespace webrtc {
-
-namespace acm2 {
-
-namespace {
-
-const uint8_t kAudioPayloadType = 0;
-const uint8_t kCngPayloadType = 1;
-const uint8_t kAvtPayloadType = 2;
-
-const int kSamplingRateHz = 16000;
-const int kInitDelayMs = 200;
-const int kFrameSizeMs = 20;
-const uint32_t kTimestampStep = kFrameSizeMs * kSamplingRateHz / 1000;
-const int kLatePacketThreshold = 5;
-
-void InitRtpInfo(WebRtcRTPHeader* rtp_info) {
- memset(rtp_info, 0, sizeof(*rtp_info));
- rtp_info->header.markerBit = false;
- rtp_info->header.payloadType = kAudioPayloadType;
- rtp_info->header.sequenceNumber = 1234;
- rtp_info->header.timestamp = 0xFFFFFFFD; // Close to wrap around.
- rtp_info->header.ssrc = 0x87654321; // Arbitrary.
- rtp_info->header.numCSRCs = 0; // Arbitrary.
- rtp_info->header.paddingLength = 0;
- rtp_info->header.headerLength = sizeof(RTPHeader);
- rtp_info->header.payload_type_frequency = kSamplingRateHz;
- rtp_info->header.extension.absoluteSendTime = 0;
- rtp_info->header.extension.transmissionTimeOffset = 0;
- rtp_info->frameType = kAudioFrameSpeech;
-}
-
-void ForwardRtpHeader(int n,
- WebRtcRTPHeader* rtp_info,
- uint32_t* rtp_receive_timestamp) {
- rtp_info->header.sequenceNumber += n;
- rtp_info->header.timestamp += n * kTimestampStep;
- *rtp_receive_timestamp += n * kTimestampStep;
-}
-
-void NextRtpHeader(WebRtcRTPHeader* rtp_info,
- uint32_t* rtp_receive_timestamp) {
- ForwardRtpHeader(1, rtp_info, rtp_receive_timestamp);
-}
-
-} // namespace
-
-class InitialDelayManagerTest : public ::testing::Test {
- protected:
- InitialDelayManagerTest()
- : manager_(new InitialDelayManager(kInitDelayMs, kLatePacketThreshold)),
- rtp_receive_timestamp_(1111) { } // Arbitrary starting point.
-
- virtual void SetUp() {
- ASSERT_TRUE(manager_.get() != NULL);
- InitRtpInfo(&rtp_info_);
- }
-
- void GetNextRtpHeader(WebRtcRTPHeader* rtp_info,
- uint32_t* rtp_receive_timestamp) const {
- memcpy(rtp_info, &rtp_info_, sizeof(*rtp_info));
- *rtp_receive_timestamp = rtp_receive_timestamp_;
- NextRtpHeader(rtp_info, rtp_receive_timestamp);
- }
-
- rtc::scoped_ptr<InitialDelayManager> manager_;
- WebRtcRTPHeader rtp_info_;
- uint32_t rtp_receive_timestamp_;
-};
-
-TEST_F(InitialDelayManagerTest, Init) {
- EXPECT_TRUE(manager_->buffering());
- EXPECT_FALSE(manager_->PacketBuffered());
- manager_->DisableBuffering();
- EXPECT_FALSE(manager_->buffering());
- InitialDelayManager::SyncStream sync_stream;
-
- // Call before any packet inserted.
- manager_->LatePackets(0x6789ABCD, &sync_stream); // Arbitrary but large
- // receive timestamp.
- EXPECT_EQ(0, sync_stream.num_sync_packets);
-
- // Insert non-audio packets, a CNG and DTMF.
- rtp_info_.header.payloadType = kCngPayloadType;
- manager_->UpdateLastReceivedPacket(rtp_info_, rtp_receive_timestamp_,
- InitialDelayManager::kCngPacket, false,
- kSamplingRateHz, &sync_stream);
- EXPECT_EQ(0, sync_stream.num_sync_packets);
- ForwardRtpHeader(5, &rtp_info_, &rtp_receive_timestamp_);
- rtp_info_.header.payloadType = kAvtPayloadType;
- manager_->UpdateLastReceivedPacket(rtp_info_, rtp_receive_timestamp_,
- InitialDelayManager::kAvtPacket, false,
- kSamplingRateHz, &sync_stream);
- // Gap in sequence numbers but no audio received, sync-stream should be empty.
- EXPECT_EQ(0, sync_stream.num_sync_packets);
- manager_->LatePackets(0x45678987, &sync_stream); // Large arbitrary receive
- // timestamp.
- // |manager_| has no estimate of timestamp-step and has not received any
- // audio packet.
- EXPECT_EQ(0, sync_stream.num_sync_packets);
-
-
- NextRtpHeader(&rtp_info_, &rtp_receive_timestamp_);
- rtp_info_.header.payloadType = kAudioPayloadType;
- // First packet.
- manager_->UpdateLastReceivedPacket(rtp_info_, rtp_receive_timestamp_,
- InitialDelayManager::kAudioPacket, true,
- kSamplingRateHz, &sync_stream);
- EXPECT_EQ(0, sync_stream.num_sync_packets);
-
- // Call LatePAcket() after only one packet inserted.
- manager_->LatePackets(0x6789ABCD, &sync_stream); // Arbitrary but large
- // receive timestamp.
- EXPECT_EQ(0, sync_stream.num_sync_packets);
-
- // Gap in timestamp, but this packet is also flagged as "new," therefore,
- // expecting empty sync-stream.
- ForwardRtpHeader(5, &rtp_info_, &rtp_receive_timestamp_);
- manager_->UpdateLastReceivedPacket(rtp_info_, rtp_receive_timestamp_,
- InitialDelayManager::kAudioPacket, true,
- kSamplingRateHz, &sync_stream);
-}
-
-TEST_F(InitialDelayManagerTest, MissingPacket) {
- InitialDelayManager::SyncStream sync_stream;
- // First packet.
- manager_->UpdateLastReceivedPacket(rtp_info_, rtp_receive_timestamp_,
- InitialDelayManager::kAudioPacket, true,
- kSamplingRateHz, &sync_stream);
- ASSERT_EQ(0, sync_stream.num_sync_packets);
-
- // Second packet.
- NextRtpHeader(&rtp_info_, &rtp_receive_timestamp_);
- manager_->UpdateLastReceivedPacket(rtp_info_, rtp_receive_timestamp_,
- InitialDelayManager::kAudioPacket, false,
- kSamplingRateHz, &sync_stream);
- ASSERT_EQ(0, sync_stream.num_sync_packets);
-
- // Third packet, missing packets start from here.
- NextRtpHeader(&rtp_info_, &rtp_receive_timestamp_);
-
- // First sync-packet in sync-stream is one after the above packet.
- WebRtcRTPHeader expected_rtp_info;
- uint32_t expected_receive_timestamp;
- GetNextRtpHeader(&expected_rtp_info, &expected_receive_timestamp);
-
- const int kNumMissingPackets = 10;
- ForwardRtpHeader(kNumMissingPackets, &rtp_info_, &rtp_receive_timestamp_);
- manager_->UpdateLastReceivedPacket(rtp_info_, rtp_receive_timestamp_,
- InitialDelayManager::kAudioPacket, false,
- kSamplingRateHz, &sync_stream);
- EXPECT_EQ(kNumMissingPackets - 2, sync_stream.num_sync_packets);
- EXPECT_EQ(0, memcmp(&expected_rtp_info, &sync_stream.rtp_info,
- sizeof(expected_rtp_info)));
- EXPECT_EQ(kTimestampStep, sync_stream.timestamp_step);
- EXPECT_EQ(expected_receive_timestamp, sync_stream.receive_timestamp);
-}
-
-// There hasn't been any consecutive packets to estimate timestamp-step.
-TEST_F(InitialDelayManagerTest, MissingPacketEstimateTimestamp) {
- InitialDelayManager::SyncStream sync_stream;
- // First packet.
- manager_->UpdateLastReceivedPacket(rtp_info_, rtp_receive_timestamp_,
- InitialDelayManager::kAudioPacket, true,
- kSamplingRateHz, &sync_stream);
- ASSERT_EQ(0, sync_stream.num_sync_packets);
-
- // Second packet, missing packets start here.
- NextRtpHeader(&rtp_info_, &rtp_receive_timestamp_);
-
- // First sync-packet in sync-stream is one after the above.
- WebRtcRTPHeader expected_rtp_info;
- uint32_t expected_receive_timestamp;
- GetNextRtpHeader(&expected_rtp_info, &expected_receive_timestamp);
-
- const int kNumMissingPackets = 10;
- ForwardRtpHeader(kNumMissingPackets, &rtp_info_, &rtp_receive_timestamp_);
- manager_->UpdateLastReceivedPacket(rtp_info_, rtp_receive_timestamp_,
- InitialDelayManager::kAudioPacket, false,
- kSamplingRateHz, &sync_stream);
- EXPECT_EQ(kNumMissingPackets - 2, sync_stream.num_sync_packets);
- EXPECT_EQ(0, memcmp(&expected_rtp_info, &sync_stream.rtp_info,
- sizeof(expected_rtp_info)));
-}
-
-TEST_F(InitialDelayManagerTest, MissingPacketWithCng) {
- InitialDelayManager::SyncStream sync_stream;
-
- // First packet.
- manager_->UpdateLastReceivedPacket(rtp_info_, rtp_receive_timestamp_,
- InitialDelayManager::kAudioPacket, true,
- kSamplingRateHz, &sync_stream);
- ASSERT_EQ(0, sync_stream.num_sync_packets);
-
- // Second packet as CNG.
- NextRtpHeader(&rtp_info_, &rtp_receive_timestamp_);
- rtp_info_.header.payloadType = kCngPayloadType;
- manager_->UpdateLastReceivedPacket(rtp_info_, rtp_receive_timestamp_,
- InitialDelayManager::kCngPacket, false,
- kSamplingRateHz, &sync_stream);
- ASSERT_EQ(0, sync_stream.num_sync_packets);
-
- // Audio packet after CNG. Missing packets start from this packet.
- rtp_info_.header.payloadType = kAudioPayloadType;
- NextRtpHeader(&rtp_info_, &rtp_receive_timestamp_);
-
- // Timestamps are increased higher than regular packet.
- const uint32_t kCngTimestampStep = 5 * kTimestampStep;
- rtp_info_.header.timestamp += kCngTimestampStep;
- rtp_receive_timestamp_ += kCngTimestampStep;
-
- // First sync-packet in sync-stream is the one after the above packet.
- WebRtcRTPHeader expected_rtp_info;
- uint32_t expected_receive_timestamp;
- GetNextRtpHeader(&expected_rtp_info, &expected_receive_timestamp);
-
- const int kNumMissingPackets = 10;
- ForwardRtpHeader(kNumMissingPackets, &rtp_info_, &rtp_receive_timestamp_);
- manager_->UpdateLastReceivedPacket(rtp_info_, rtp_receive_timestamp_,
- InitialDelayManager::kAudioPacket, false,
- kSamplingRateHz, &sync_stream);
- EXPECT_EQ(kNumMissingPackets - 2, sync_stream.num_sync_packets);
- EXPECT_EQ(0, memcmp(&expected_rtp_info, &sync_stream.rtp_info,
- sizeof(expected_rtp_info)));
- EXPECT_EQ(kTimestampStep, sync_stream.timestamp_step);
- EXPECT_EQ(expected_receive_timestamp, sync_stream.receive_timestamp);
-}
-
-TEST_F(InitialDelayManagerTest, LatePacket) {
- InitialDelayManager::SyncStream sync_stream;
- // First packet.
- manager_->UpdateLastReceivedPacket(rtp_info_, rtp_receive_timestamp_,
- InitialDelayManager::kAudioPacket, true,
- kSamplingRateHz, &sync_stream);
- ASSERT_EQ(0, sync_stream.num_sync_packets);
-
- // Second packet.
- NextRtpHeader(&rtp_info_, &rtp_receive_timestamp_);
- manager_->UpdateLastReceivedPacket(rtp_info_, rtp_receive_timestamp_,
- InitialDelayManager::kAudioPacket, false,
- kSamplingRateHz, &sync_stream);
- ASSERT_EQ(0, sync_stream.num_sync_packets);
-
- // Timestamp increment for 10ms;
- const uint32_t kTimestampStep10Ms = kSamplingRateHz / 100;
-
- // 10 ms after the second packet is inserted.
- uint32_t timestamp_now = rtp_receive_timestamp_ + kTimestampStep10Ms;
-
- // Third packet, late packets start from this packet.
- NextRtpHeader(&rtp_info_, &rtp_receive_timestamp_);
-
- // First sync-packet in sync-stream, which is one after the above packet.
- WebRtcRTPHeader expected_rtp_info;
- uint32_t expected_receive_timestamp;
- GetNextRtpHeader(&expected_rtp_info, &expected_receive_timestamp);
-
- const int kLatePacketThreshold = 5;
-
- int expected_num_late_packets = kLatePacketThreshold - 1;
- for (int k = 0; k < 2; ++k) {
- for (int n = 1; n < kLatePacketThreshold * kFrameSizeMs / 10; ++n) {
- manager_->LatePackets(timestamp_now, &sync_stream);
- EXPECT_EQ(0, sync_stream.num_sync_packets) <<
- "try " << k << " loop number " << n;
- timestamp_now += kTimestampStep10Ms;
- }
- manager_->LatePackets(timestamp_now, &sync_stream);
-
- EXPECT_EQ(expected_num_late_packets, sync_stream.num_sync_packets) <<
- "try " << k;
- EXPECT_EQ(kTimestampStep, sync_stream.timestamp_step) <<
- "try " << k;
- EXPECT_EQ(expected_receive_timestamp, sync_stream.receive_timestamp) <<
- "try " << k;
- EXPECT_EQ(0, memcmp(&expected_rtp_info, &sync_stream.rtp_info,
- sizeof(expected_rtp_info)));
-
- timestamp_now += kTimestampStep10Ms;
-
- // |manger_| assumes the |sync_stream| obtained by LatePacket() is fully
- // injected. The last injected packet is sync-packet, therefore, there will
- // not be any gap between sync stream of this and the next iteration.
- ForwardRtpHeader(sync_stream.num_sync_packets, &expected_rtp_info,
- &expected_receive_timestamp);
- expected_num_late_packets = kLatePacketThreshold;
- }
-
- // Test "no-gap" for missing packet after late packet.
- // |expected_rtp_info| is the expected sync-packet if any packet is missing.
- memcpy(&rtp_info_, &expected_rtp_info, sizeof(rtp_info_));
- rtp_receive_timestamp_ = expected_receive_timestamp;
-
- int kNumMissingPackets = 3; // Arbitrary.
- ForwardRtpHeader(kNumMissingPackets, &rtp_info_, &rtp_receive_timestamp_);
- manager_->UpdateLastReceivedPacket(rtp_info_, rtp_receive_timestamp_,
- InitialDelayManager::kAudioPacket, false,
- kSamplingRateHz, &sync_stream);
-
- // Note that there is one packet gap between the last sync-packet and the
- // latest inserted packet.
- EXPECT_EQ(kNumMissingPackets - 1, sync_stream.num_sync_packets);
- EXPECT_EQ(kTimestampStep, sync_stream.timestamp_step);
- EXPECT_EQ(expected_receive_timestamp, sync_stream.receive_timestamp);
- EXPECT_EQ(0, memcmp(&expected_rtp_info, &sync_stream.rtp_info,
- sizeof(expected_rtp_info)));
-}
-
-TEST_F(InitialDelayManagerTest, NoLatePacketAfterCng) {
- InitialDelayManager::SyncStream sync_stream;
-
- // First packet.
- manager_->UpdateLastReceivedPacket(rtp_info_, rtp_receive_timestamp_,
- InitialDelayManager::kAudioPacket, true,
- kSamplingRateHz, &sync_stream);
- ASSERT_EQ(0, sync_stream.num_sync_packets);
-
- // Second packet as CNG.
- NextRtpHeader(&rtp_info_, &rtp_receive_timestamp_);
- rtp_info_.header.payloadType = kCngPayloadType;
- manager_->UpdateLastReceivedPacket(rtp_info_, rtp_receive_timestamp_,
- InitialDelayManager::kCngPacket, false,
- kSamplingRateHz, &sync_stream);
- ASSERT_EQ(0, sync_stream.num_sync_packets);
-
- // Forward the time more then |kLatePacketThreshold| packets.
- uint32_t timestamp_now = rtp_receive_timestamp_ + kTimestampStep * (3 +
- kLatePacketThreshold);
-
- manager_->LatePackets(timestamp_now, &sync_stream);
- EXPECT_EQ(0, sync_stream.num_sync_packets);
-}
-
-TEST_F(InitialDelayManagerTest, BufferingAudio) {
- InitialDelayManager::SyncStream sync_stream;
-
- // Very first packet is not counted in calculation of buffered audio.
- for (int n = 0; n < kInitDelayMs / kFrameSizeMs; ++n) {
- manager_->UpdateLastReceivedPacket(rtp_info_, rtp_receive_timestamp_,
- InitialDelayManager::kAudioPacket,
- n == 0, kSamplingRateHz, &sync_stream);
- EXPECT_EQ(0, sync_stream.num_sync_packets);
- EXPECT_TRUE(manager_->buffering());
- const uint32_t expected_playout_timestamp = rtp_info_.header.timestamp -
- kInitDelayMs * kSamplingRateHz / 1000;
- uint32_t actual_playout_timestamp = 0;
- EXPECT_TRUE(manager_->GetPlayoutTimestamp(&actual_playout_timestamp));
- EXPECT_EQ(expected_playout_timestamp, actual_playout_timestamp);
- NextRtpHeader(&rtp_info_, &rtp_receive_timestamp_);
- }
-
- manager_->UpdateLastReceivedPacket(rtp_info_, rtp_receive_timestamp_,
- InitialDelayManager::kAudioPacket,
- false, kSamplingRateHz, &sync_stream);
- EXPECT_EQ(0, sync_stream.num_sync_packets);
- EXPECT_FALSE(manager_->buffering());
-}
-
-} // namespace acm2
-
-} // namespace webrtc
diff --git a/webrtc/modules/audio_coding/main/acm2/rent_a_codec.cc b/webrtc/modules/audio_coding/main/acm2/rent_a_codec.cc
deleted file mode 100644
index 42f0a4c7db..0000000000
--- a/webrtc/modules/audio_coding/main/acm2/rent_a_codec.cc
+++ /dev/null
@@ -1,70 +0,0 @@
-/*
- * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include "webrtc/modules/audio_coding/main/acm2/rent_a_codec.h"
-
-#include "webrtc/modules/audio_coding/main/acm2/acm_codec_database.h"
-#include "webrtc/modules/audio_coding/main/acm2/acm_common_defs.h"
-
-namespace webrtc {
-namespace acm2 {
-
-rtc::Maybe<RentACodec::CodecId> RentACodec::CodecIdByParams(
- const char* payload_name,
- int sampling_freq_hz,
- int channels) {
- return CodecIdFromIndex(
- ACMCodecDB::CodecId(payload_name, sampling_freq_hz, channels));
-}
-
-rtc::Maybe<CodecInst> RentACodec::CodecInstById(CodecId codec_id) {
- rtc::Maybe<int> mi = CodecIndexFromId(codec_id);
- return mi ? rtc::Maybe<CodecInst>(Database()[*mi]) : rtc::Maybe<CodecInst>();
-}
-
-rtc::Maybe<CodecInst> RentACodec::CodecInstByParams(const char* payload_name,
- int sampling_freq_hz,
- int channels) {
- rtc::Maybe<CodecId> codec_id =
- CodecIdByParams(payload_name, sampling_freq_hz, channels);
- if (!codec_id)
- return rtc::Maybe<CodecInst>();
- rtc::Maybe<CodecInst> ci = CodecInstById(*codec_id);
- RTC_DCHECK(ci);
-
- // Keep the number of channels from the function call. For most codecs it
- // will be the same value as in default codec settings, but not for all.
- ci->channels = channels;
-
- return ci;
-}
-
-bool RentACodec::IsCodecValid(const CodecInst& codec_inst) {
- return ACMCodecDB::CodecNumber(codec_inst) >= 0;
-}
-
-rtc::ArrayView<const CodecInst> RentACodec::Database() {
- return rtc::ArrayView<const CodecInst>(ACMCodecDB::database_,
- NumberOfCodecs());
-}
-
-rtc::Maybe<NetEqDecoder> RentACodec::NetEqDecoderFromCodecId(CodecId codec_id,
- int num_channels) {
- rtc::Maybe<int> i = CodecIndexFromId(codec_id);
- if (!i)
- return rtc::Maybe<NetEqDecoder>();
- const NetEqDecoder ned = ACMCodecDB::neteq_decoders_[*i];
- return (ned == NetEqDecoder::kDecoderOpus && num_channels == 2)
- ? NetEqDecoder::kDecoderOpus_2ch
- : ned;
-}
-
-} // namespace acm2
-} // namespace webrtc
diff --git a/webrtc/modules/audio_coding/main/acm2/rent_a_codec.h b/webrtc/modules/audio_coding/main/acm2/rent_a_codec.h
deleted file mode 100644
index 55a5d0361a..0000000000
--- a/webrtc/modules/audio_coding/main/acm2/rent_a_codec.h
+++ /dev/null
@@ -1,162 +0,0 @@
-/*
- * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_MODULES_AUDIO_CODING_MAIN_ACM2_RENT_A_CODEC_H_
-#define WEBRTC_MODULES_AUDIO_CODING_MAIN_ACM2_RENT_A_CODEC_H_
-
-#include <stddef.h>
-
-#include "webrtc/base/array_view.h"
-#include "webrtc/base/maybe.h"
-#include "webrtc/typedefs.h"
-
-namespace webrtc {
-
-struct CodecInst;
-
-namespace acm2 {
-
-class RentACodec {
- public:
- enum class CodecId {
-#if defined(WEBRTC_CODEC_ISAC) || defined(WEBRTC_CODEC_ISACFX)
- kISAC,
-#endif
-#ifdef WEBRTC_CODEC_ISAC
- kISACSWB,
-#endif
- // Mono
- kPCM16B,
- kPCM16Bwb,
- kPCM16Bswb32kHz,
- // Stereo
- kPCM16B_2ch,
- kPCM16Bwb_2ch,
- kPCM16Bswb32kHz_2ch,
- // Mono
- kPCMU,
- kPCMA,
- // Stereo
- kPCMU_2ch,
- kPCMA_2ch,
-#ifdef WEBRTC_CODEC_ILBC
- kILBC,
-#endif
-#ifdef WEBRTC_CODEC_G722
- kG722, // Mono
- kG722_2ch, // Stereo
-#endif
-#ifdef WEBRTC_CODEC_OPUS
- kOpus, // Mono and stereo
-#endif
- kCNNB,
- kCNWB,
- kCNSWB,
-#ifdef ENABLE_48000_HZ
- kCNFB,
-#endif
- kAVT,
-#ifdef WEBRTC_CODEC_RED
- kRED,
-#endif
- kNumCodecs, // Implementation detail. Don't use.
-
-// Set unsupported codecs to -1.
-#if !defined(WEBRTC_CODEC_ISAC) && !defined(WEBRTC_CODEC_ISACFX)
- kISAC = -1,
-#endif
-#ifndef WEBRTC_CODEC_ISAC
- kISACSWB = -1,
-#endif
- // 48 kHz not supported, always set to -1.
- kPCM16Bswb48kHz = -1,
-#ifndef WEBRTC_CODEC_ILBC
- kILBC = -1,
-#endif
-#ifndef WEBRTC_CODEC_G722
- kG722 = -1, // Mono
- kG722_2ch = -1, // Stereo
-#endif
-#ifndef WEBRTC_CODEC_OPUS
- kOpus = -1, // Mono and stereo
-#endif
-#ifndef WEBRTC_CODEC_RED
- kRED = -1,
-#endif
-#ifndef ENABLE_48000_HZ
- kCNFB = -1,
-#endif
-
- kNone = -1
- };
-
- enum class NetEqDecoder {
- kDecoderPCMu,
- kDecoderPCMa,
- kDecoderPCMu_2ch,
- kDecoderPCMa_2ch,
- kDecoderILBC,
- kDecoderISAC,
- kDecoderISACswb,
- kDecoderPCM16B,
- kDecoderPCM16Bwb,
- kDecoderPCM16Bswb32kHz,
- kDecoderPCM16Bswb48kHz,
- kDecoderPCM16B_2ch,
- kDecoderPCM16Bwb_2ch,
- kDecoderPCM16Bswb32kHz_2ch,
- kDecoderPCM16Bswb48kHz_2ch,
- kDecoderPCM16B_5ch,
- kDecoderG722,
- kDecoderG722_2ch,
- kDecoderRED,
- kDecoderAVT,
- kDecoderCNGnb,
- kDecoderCNGwb,
- kDecoderCNGswb32kHz,
- kDecoderCNGswb48kHz,
- kDecoderArbitrary,
- kDecoderOpus,
- kDecoderOpus_2ch,
- };
-
- static inline size_t NumberOfCodecs() {
- return static_cast<size_t>(CodecId::kNumCodecs);
- }
-
- static inline rtc::Maybe<int> CodecIndexFromId(CodecId codec_id) {
- const int i = static_cast<int>(codec_id);
- return i < static_cast<int>(NumberOfCodecs()) ? i : rtc::Maybe<int>();
- }
-
- static inline rtc::Maybe<CodecId> CodecIdFromIndex(int codec_index) {
- return static_cast<size_t>(codec_index) < NumberOfCodecs()
- ? static_cast<RentACodec::CodecId>(codec_index)
- : rtc::Maybe<RentACodec::CodecId>();
- }
-
- static rtc::Maybe<CodecId> CodecIdByParams(const char* payload_name,
- int sampling_freq_hz,
- int channels);
- static rtc::Maybe<CodecInst> CodecInstById(CodecId codec_id);
- static rtc::Maybe<CodecInst> CodecInstByParams(const char* payload_name,
- int sampling_freq_hz,
- int channels);
- static bool IsCodecValid(const CodecInst& codec_inst);
- static rtc::ArrayView<const CodecInst> Database();
-
- static rtc::Maybe<NetEqDecoder> NetEqDecoderFromCodecId(CodecId codec_id,
- int num_channels);
-};
-
-} // namespace acm2
-} // namespace webrtc
-
-#endif // WEBRTC_MODULES_AUDIO_CODING_MAIN_ACM2_RENT_A_CODEC_H_
diff --git a/webrtc/modules/audio_coding/main/audio_coding_module.gypi b/webrtc/modules/audio_coding/main/audio_coding_module.gypi
deleted file mode 100644
index 6fb37d25fa..0000000000
--- a/webrtc/modules/audio_coding/main/audio_coding_module.gypi
+++ /dev/null
@@ -1,193 +0,0 @@
-# Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
-#
-# Use of this source code is governed by a BSD-style license
-# that can be found in the LICENSE file in the root of the source
-# tree. An additional intellectual property rights grant can be found
-# in the file PATENTS. All contributing project authors may
-# be found in the AUTHORS file in the root of the source tree.
-
-{
- 'variables': {
- 'audio_coding_dependencies': [
- 'cng',
- 'g711',
- 'pcm16b',
- '<(webrtc_root)/common.gyp:webrtc_common',
- '<(webrtc_root)/common_audio/common_audio.gyp:common_audio',
- '<(webrtc_root)/system_wrappers/system_wrappers.gyp:system_wrappers',
- ],
- 'audio_coding_defines': [],
- 'conditions': [
- ['include_opus==1', {
- 'audio_coding_dependencies': ['webrtc_opus',],
- 'audio_coding_defines': ['WEBRTC_CODEC_OPUS',],
- }],
- ['build_with_mozilla==0', {
- 'conditions': [
- ['target_arch=="arm"', {
- 'audio_coding_dependencies': ['isac_fix',],
- 'audio_coding_defines': ['WEBRTC_CODEC_ISACFX',],
- }, {
- 'audio_coding_dependencies': ['isac',],
- 'audio_coding_defines': ['WEBRTC_CODEC_ISAC',],
- }],
- ],
- 'audio_coding_dependencies': ['g722',],
- 'audio_coding_defines': ['WEBRTC_CODEC_G722',],
- }],
- ['build_with_mozilla==0 and build_with_chromium==0', {
- 'audio_coding_dependencies': ['ilbc', 'red',],
- 'audio_coding_defines': ['WEBRTC_CODEC_ILBC', 'WEBRTC_CODEC_RED',],
- }],
- ],
- },
- 'targets': [
- {
- 'target_name': 'rent_a_codec',
- 'type': 'static_library',
- 'defines': [
- '<@(audio_coding_defines)',
- ],
- 'dependencies': [
- '<(webrtc_root)/common.gyp:webrtc_common',
- ],
- 'include_dirs': [
- '<(webrtc_root)',
- ],
- 'direct_dependent_settings': {
- 'include_dirs': [
- '<(webrtc_root)',
- ],
- },
- 'sources': [
- 'acm2/acm_codec_database.cc',
- 'acm2/acm_codec_database.h',
- 'acm2/rent_a_codec.cc',
- 'acm2/rent_a_codec.h',
- ],
- },
- {
- 'target_name': 'audio_coding_module',
- 'type': 'static_library',
- 'defines': [
- '<@(audio_coding_defines)',
- ],
- 'dependencies': [
- '<@(audio_coding_dependencies)',
- '<(webrtc_root)/common.gyp:webrtc_common',
- '<(webrtc_root)/webrtc.gyp:rtc_event_log',
- 'neteq',
- 'rent_a_codec',
- ],
- 'include_dirs': [
- 'include',
- '../../interface',
- '<(webrtc_root)',
- ],
- 'direct_dependent_settings': {
- 'include_dirs': [
- 'include',
- '../../interface',
- '<(webrtc_root)',
- ],
- },
- 'sources': [
- 'acm2/acm_common_defs.h',
- 'acm2/acm_receiver.cc',
- 'acm2/acm_receiver.h',
- 'acm2/acm_resampler.cc',
- 'acm2/acm_resampler.h',
- 'acm2/audio_coding_module.cc',
- 'acm2/audio_coding_module_impl.cc',
- 'acm2/audio_coding_module_impl.h',
- 'acm2/call_statistics.cc',
- 'acm2/call_statistics.h',
- 'acm2/codec_manager.cc',
- 'acm2/codec_manager.h',
- 'acm2/codec_owner.cc',
- 'acm2/codec_owner.h',
- 'acm2/initial_delay_manager.cc',
- 'acm2/initial_delay_manager.h',
- 'include/audio_coding_module.h',
- 'include/audio_coding_module_typedefs.h',
- ],
- },
- ],
- 'conditions': [
- ['include_tests==1', {
- 'targets': [
- {
- 'target_name': 'acm_receive_test',
- 'type': 'static_library',
- 'defines': [
- '<@(audio_coding_defines)',
- ],
- 'dependencies': [
- '<@(audio_coding_dependencies)',
- 'audio_coding_module',
- 'neteq_unittest_tools',
- '<(DEPTH)/testing/gtest.gyp:gtest',
- ],
- 'sources': [
- 'acm2/acm_receive_test_oldapi.cc',
- 'acm2/acm_receive_test_oldapi.h',
- ],
- }, # acm_receive_test
- {
- 'target_name': 'acm_send_test',
- 'type': 'static_library',
- 'defines': [
- '<@(audio_coding_defines)',
- ],
- 'dependencies': [
- '<@(audio_coding_dependencies)',
- 'audio_coding_module',
- 'neteq_unittest_tools',
- '<(DEPTH)/testing/gtest.gyp:gtest',
- ],
- 'sources': [
- 'acm2/acm_send_test_oldapi.cc',
- 'acm2/acm_send_test_oldapi.h',
- ],
- }, # acm_send_test
- {
- 'target_name': 'delay_test',
- 'type': 'executable',
- 'dependencies': [
- 'audio_coding_module',
- '<(DEPTH)/testing/gtest.gyp:gtest',
- '<(webrtc_root)/common.gyp:webrtc_common',
- '<(webrtc_root)/test/test.gyp:test_support',
- '<(webrtc_root)/system_wrappers/system_wrappers.gyp:system_wrappers',
- '<(webrtc_root)/system_wrappers/system_wrappers.gyp:system_wrappers_default',
- '<(DEPTH)/third_party/gflags/gflags.gyp:gflags',
- ],
- 'sources': [
- 'test/delay_test.cc',
- 'test/Channel.cc',
- 'test/PCMFile.cc',
- 'test/utility.cc',
- ],
- }, # delay_test
- {
- 'target_name': 'insert_packet_with_timing',
- 'type': 'executable',
- 'dependencies': [
- 'audio_coding_module',
- '<(DEPTH)/testing/gtest.gyp:gtest',
- '<(webrtc_root)/common.gyp:webrtc_common',
- '<(webrtc_root)/test/test.gyp:test_support',
- '<(webrtc_root)/system_wrappers/system_wrappers.gyp:system_wrappers',
- '<(webrtc_root)/system_wrappers/system_wrappers.gyp:system_wrappers_default',
- '<(DEPTH)/third_party/gflags/gflags.gyp:gflags',
- ],
- 'sources': [
- 'test/insert_packet_with_timing.cc',
- 'test/Channel.cc',
- 'test/PCMFile.cc',
- ],
- }, # delay_test
- ],
- }],
- ],
-}
diff --git a/webrtc/modules/audio_coding/main/include/audio_coding_module.h b/webrtc/modules/audio_coding/main/include/audio_coding_module.h
deleted file mode 100644
index b145cf423e..0000000000
--- a/webrtc/modules/audio_coding/main/include/audio_coding_module.h
+++ /dev/null
@@ -1,758 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_MODULES_AUDIO_CODING_MAIN_INCLUDE_AUDIO_CODING_MODULE_H_
-#define WEBRTC_MODULES_AUDIO_CODING_MAIN_INCLUDE_AUDIO_CODING_MODULE_H_
-
-#include <vector>
-
-#include "webrtc/common_types.h"
-#include "webrtc/modules/audio_coding/main/acm2/acm_codec_database.h"
-#include "webrtc/modules/audio_coding/main/include/audio_coding_module_typedefs.h"
-#include "webrtc/modules/audio_coding/neteq/include/neteq.h"
-#include "webrtc/modules/interface/module.h"
-#include "webrtc/system_wrappers/include/clock.h"
-#include "webrtc/typedefs.h"
-
-namespace webrtc {
-
-// forward declarations
-struct CodecInst;
-struct WebRtcRTPHeader;
-class AudioDecoder;
-class AudioEncoder;
-class AudioFrame;
-class RTPFragmentationHeader;
-
-#define WEBRTC_10MS_PCM_AUDIO 960 // 16 bits super wideband 48 kHz
-
-// Callback class used for sending data ready to be packetized
-class AudioPacketizationCallback {
- public:
- virtual ~AudioPacketizationCallback() {}
-
- virtual int32_t SendData(FrameType frame_type,
- uint8_t payload_type,
- uint32_t timestamp,
- const uint8_t* payload_data,
- size_t payload_len_bytes,
- const RTPFragmentationHeader* fragmentation) = 0;
-};
-
-// Callback class used for reporting VAD decision
-class ACMVADCallback {
- public:
- virtual ~ACMVADCallback() {}
-
- virtual int32_t InFrameType(FrameType frame_type) = 0;
-};
-
-class AudioCodingModule {
- protected:
- AudioCodingModule() {}
-
- public:
- struct Config {
- Config() : id(0), neteq_config(), clock(Clock::GetRealTimeClock()) {}
-
- int id;
- NetEq::Config neteq_config;
- Clock* clock;
- };
-
- ///////////////////////////////////////////////////////////////////////////
- // Creation and destruction of a ACM.
- //
- // The second method is used for testing where a simulated clock can be
- // injected into ACM. ACM will take the ownership of the object clock and
- // delete it when destroyed.
- //
- static AudioCodingModule* Create(int id);
- static AudioCodingModule* Create(int id, Clock* clock);
- static AudioCodingModule* Create(const Config& config);
- virtual ~AudioCodingModule() = default;
-
- ///////////////////////////////////////////////////////////////////////////
- // Utility functions
- //
-
- ///////////////////////////////////////////////////////////////////////////
- // uint8_t NumberOfCodecs()
- // Returns number of supported codecs.
- //
- // Return value:
- // number of supported codecs.
- ///
- static int NumberOfCodecs();
-
- ///////////////////////////////////////////////////////////////////////////
- // int32_t Codec()
- // Get supported codec with list number.
- //
- // Input:
- // -list_id : list number.
- //
- // Output:
- // -codec : a structure where the parameters of the codec,
- // given by list number is written to.
- //
- // Return value:
- // -1 if the list number (list_id) is invalid.
- // 0 if succeeded.
- //
- static int Codec(int list_id, CodecInst* codec);
-
- ///////////////////////////////////////////////////////////////////////////
- // int32_t Codec()
- // Get supported codec with the given codec name, sampling frequency, and
- // a given number of channels.
- //
- // Input:
- // -payload_name : name of the codec.
- // -sampling_freq_hz : sampling frequency of the codec. Note! for RED
- // a sampling frequency of -1 is a valid input.
- // -channels : number of channels ( 1 - mono, 2 - stereo).
- //
- // Output:
- // -codec : a structure where the function returns the
- // default parameters of the codec.
- //
- // Return value:
- // -1 if no codec matches the given parameters.
- // 0 if succeeded.
- //
- static int Codec(const char* payload_name, CodecInst* codec,
- int sampling_freq_hz, int channels);
-
- ///////////////////////////////////////////////////////////////////////////
- // int32_t Codec()
- //
- // Returns the list number of the given codec name, sampling frequency, and
- // a given number of channels.
- //
- // Input:
- // -payload_name : name of the codec.
- // -sampling_freq_hz : sampling frequency of the codec. Note! for RED
- // a sampling frequency of -1 is a valid input.
- // -channels : number of channels ( 1 - mono, 2 - stereo).
- //
- // Return value:
- // if the codec is found, the index of the codec in the list,
- // -1 if the codec is not found.
- //
- static int Codec(const char* payload_name, int sampling_freq_hz,
- int channels);
-
- ///////////////////////////////////////////////////////////////////////////
- // bool IsCodecValid()
- // Checks the validity of the parameters of the given codec.
- //
- // Input:
- // -codec : the structure which keeps the parameters of the
- // codec.
- //
- // Return value:
- // true if the parameters are valid,
- // false if any parameter is not valid.
- //
- static bool IsCodecValid(const CodecInst& codec);
-
- ///////////////////////////////////////////////////////////////////////////
- // Sender
- //
-
- ///////////////////////////////////////////////////////////////////////////
- // int32_t RegisterSendCodec()
- // Registers a codec, specified by |send_codec|, as sending codec.
- // This API can be called multiple of times to register Codec. The last codec
- // registered overwrites the previous ones.
- // The API can also be used to change payload type for CNG and RED, which are
- // registered by default to default payload types.
- // Note that registering CNG and RED won't overwrite speech codecs.
- // This API can be called to set/change the send payload-type, frame-size
- // or encoding rate (if applicable for the codec).
- //
- // Note: If a stereo codec is registered as send codec, VAD/DTX will
- // automatically be turned off, since it is not supported for stereo sending.
- //
- // Note: If a secondary encoder is already registered, and the new send-codec
- // has a sampling rate that does not match the secondary encoder, the
- // secondary encoder will be unregistered.
- //
- // Input:
- // -send_codec : Parameters of the codec to be registered, c.f.
- // common_types.h for the definition of
- // CodecInst.
- //
- // Return value:
- // -1 if failed to initialize,
- // 0 if succeeded.
- //
- virtual int32_t RegisterSendCodec(const CodecInst& send_codec) = 0;
-
- // Registers |external_speech_encoder| as encoder. The new encoder will
- // replace any previously registered speech encoder (internal or external).
- virtual void RegisterExternalSendCodec(
- AudioEncoder* external_speech_encoder) = 0;
-
- ///////////////////////////////////////////////////////////////////////////
- // int32_t SendCodec()
- // Get parameters for the codec currently registered as send codec.
- //
- // Output:
- // -current_send_codec : parameters of the send codec.
- //
- // Return value:
- // -1 if failed to get send codec,
- // 0 if succeeded.
- //
- virtual int32_t SendCodec(CodecInst* current_send_codec) const = 0;
-
- ///////////////////////////////////////////////////////////////////////////
- // int32_t SendFrequency()
- // Get the sampling frequency of the current encoder in Hertz.
- //
- // Return value:
- // positive; sampling frequency [Hz] of the current encoder.
- // -1 if an error has happened.
- //
- virtual int32_t SendFrequency() const = 0;
-
- ///////////////////////////////////////////////////////////////////////////
- // Sets the bitrate to the specified value in bits/sec. If the value is not
- // supported by the codec, it will choose another appropriate value.
- virtual void SetBitRate(int bitrate_bps) = 0;
-
- // int32_t RegisterTransportCallback()
- // Register a transport callback which will be called to deliver
- // the encoded buffers whenever Process() is called and a
- // bit-stream is ready.
- //
- // Input:
- // -transport : pointer to the callback class
- // transport->SendData() is called whenever
- // Process() is called and bit-stream is ready
- // to deliver.
- //
- // Return value:
- // -1 if the transport callback could not be registered
- // 0 if registration is successful.
- //
- virtual int32_t RegisterTransportCallback(
- AudioPacketizationCallback* transport) = 0;
-
- ///////////////////////////////////////////////////////////////////////////
- // int32_t Add10MsData()
- // Add 10MS of raw (PCM) audio data and encode it. If the sampling
- // frequency of the audio does not match the sampling frequency of the
- // current encoder ACM will resample the audio. If an encoded packet was
- // produced, it will be delivered via the callback object registered using
- // RegisterTransportCallback, and the return value from this function will
- // be the number of bytes encoded.
- //
- // Input:
- // -audio_frame : the input audio frame, containing raw audio
- // sampling frequency etc.,
- // c.f. module_common_types.h for definition of
- // AudioFrame.
- //
- // Return value:
- // >= 0 number of bytes encoded.
- // -1 some error occurred.
- //
- virtual int32_t Add10MsData(const AudioFrame& audio_frame) = 0;
-
- ///////////////////////////////////////////////////////////////////////////
- // (RED) Redundant Coding
- //
-
- ///////////////////////////////////////////////////////////////////////////
- // int32_t SetREDStatus()
- // configure RED status i.e. on/off.
- //
- // RFC 2198 describes a solution which has a single payload type which
- // signifies a packet with redundancy. That packet then becomes a container,
- // encapsulating multiple payloads into a single RTP packet.
- // Such a scheme is flexible, since any amount of redundancy may be
- // encapsulated within a single packet. There is, however, a small overhead
- // since each encapsulated payload must be preceded by a header indicating
- // the type of data enclosed.
- //
- // Input:
- // -enable_red : if true RED is enabled, otherwise RED is
- // disabled.
- //
- // Return value:
- // -1 if failed to set RED status,
- // 0 if succeeded.
- //
- virtual int32_t SetREDStatus(bool enable_red) = 0;
-
- ///////////////////////////////////////////////////////////////////////////
- // bool REDStatus()
- // Get RED status
- //
- // Return value:
- // true if RED is enabled,
- // false if RED is disabled.
- //
- virtual bool REDStatus() const = 0;
-
- ///////////////////////////////////////////////////////////////////////////
- // (FEC) Forward Error Correction (codec internal)
- //
-
- ///////////////////////////////////////////////////////////////////////////
- // int32_t SetCodecFEC()
- // Configures codec internal FEC status i.e. on/off. No effects on codecs that
- // do not provide internal FEC.
- //
- // Input:
- // -enable_fec : if true FEC will be enabled otherwise the FEC is
- // disabled.
- //
- // Return value:
- // -1 if failed, or the codec does not support FEC
- // 0 if succeeded.
- //
- virtual int SetCodecFEC(bool enable_codec_fec) = 0;
-
- ///////////////////////////////////////////////////////////////////////////
- // bool CodecFEC()
- // Gets status of codec internal FEC.
- //
- // Return value:
- // true if FEC is enabled,
- // false if FEC is disabled.
- //
- virtual bool CodecFEC() const = 0;
-
- ///////////////////////////////////////////////////////////////////////////
- // int SetPacketLossRate()
- // Sets expected packet loss rate for encoding. Some encoders provide packet
- // loss gnostic encoding to make stream less sensitive to packet losses,
- // through e.g., FEC. No effects on codecs that do not provide such encoding.
- //
- // Input:
- // -packet_loss_rate : expected packet loss rate (0 -- 100 inclusive).
- //
- // Return value
- // -1 if failed to set packet loss rate,
- // 0 if succeeded.
- //
- virtual int SetPacketLossRate(int packet_loss_rate) = 0;
-
- ///////////////////////////////////////////////////////////////////////////
- // (VAD) Voice Activity Detection
- //
-
- ///////////////////////////////////////////////////////////////////////////
- // int32_t SetVAD()
- // If DTX is enabled & the codec does not have internal DTX/VAD
- // WebRtc VAD will be automatically enabled and |enable_vad| is ignored.
- //
- // If DTX is disabled but VAD is enabled no DTX packets are send,
- // regardless of whether the codec has internal DTX/VAD or not. In this
- // case, WebRtc VAD is running to label frames as active/in-active.
- //
- // NOTE! VAD/DTX is not supported when sending stereo.
- //
- // Inputs:
- // -enable_dtx : if true DTX is enabled,
- // otherwise DTX is disabled.
- // -enable_vad : if true VAD is enabled,
- // otherwise VAD is disabled.
- // -vad_mode : determines the aggressiveness of VAD. A more
- // aggressive mode results in more frames labeled
- // as in-active, c.f. definition of
- // ACMVADMode in audio_coding_module_typedefs.h
- // for valid values.
- //
- // Return value:
- // -1 if failed to set up VAD/DTX,
- // 0 if succeeded.
- //
- virtual int32_t SetVAD(const bool enable_dtx = true,
- const bool enable_vad = false,
- const ACMVADMode vad_mode = VADNormal) = 0;
-
- ///////////////////////////////////////////////////////////////////////////
- // int32_t VAD()
- // Get VAD status.
- //
- // Outputs:
- // -dtx_enabled : is set to true if DTX is enabled, otherwise
- // is set to false.
- // -vad_enabled : is set to true if VAD is enabled, otherwise
- // is set to false.
- // -vad_mode : is set to the current aggressiveness of VAD.
- //
- // Return value:
- // -1 if fails to retrieve the setting of DTX/VAD,
- // 0 if succeeded.
- //
- virtual int32_t VAD(bool* dtx_enabled, bool* vad_enabled,
- ACMVADMode* vad_mode) const = 0;
-
- ///////////////////////////////////////////////////////////////////////////
- // int32_t RegisterVADCallback()
- // Call this method to register a callback function which is called
- // any time that ACM encounters an empty frame. That is a frame which is
- // recognized inactive. Depending on the codec WebRtc VAD or internal codec
- // VAD is employed to identify a frame as active/inactive.
- //
- // Input:
- // -vad_callback : pointer to a callback function.
- //
- // Return value:
- // -1 if failed to register the callback function.
- // 0 if the callback function is registered successfully.
- //
- virtual int32_t RegisterVADCallback(ACMVADCallback* vad_callback) = 0;
-
- ///////////////////////////////////////////////////////////////////////////
- // Receiver
- //
-
- ///////////////////////////////////////////////////////////////////////////
- // int32_t InitializeReceiver()
- // Any decoder-related state of ACM will be initialized to the
- // same state when ACM is created. This will not interrupt or
- // effect encoding functionality of ACM. ACM would lose all the
- // decoding-related settings by calling this function.
- // For instance, all registered codecs are deleted and have to be
- // registered again.
- //
- // Return value:
- // -1 if failed to initialize,
- // 0 if succeeded.
- //
- virtual int32_t InitializeReceiver() = 0;
-
- ///////////////////////////////////////////////////////////////////////////
- // int32_t ReceiveFrequency()
- // Get sampling frequency of the last received payload.
- //
- // Return value:
- // non-negative the sampling frequency in Hertz.
- // -1 if an error has occurred.
- //
- virtual int32_t ReceiveFrequency() const = 0;
-
- ///////////////////////////////////////////////////////////////////////////
- // int32_t PlayoutFrequency()
- // Get sampling frequency of audio played out.
- //
- // Return value:
- // the sampling frequency in Hertz.
- //
- virtual int32_t PlayoutFrequency() const = 0;
-
- ///////////////////////////////////////////////////////////////////////////
- // int32_t RegisterReceiveCodec()
- // Register possible decoders, can be called multiple times for
- // codecs, CNG-NB, CNG-WB, CNG-SWB, AVT and RED.
- //
- // Input:
- // -receive_codec : parameters of the codec to be registered, c.f.
- // common_types.h for the definition of
- // CodecInst.
- //
- // Return value:
- // -1 if failed to register the codec
- // 0 if the codec registered successfully.
- //
- virtual int RegisterReceiveCodec(const CodecInst& receive_codec) = 0;
-
- virtual int RegisterExternalReceiveCodec(int rtp_payload_type,
- AudioDecoder* external_decoder,
- int sample_rate_hz,
- int num_channels) = 0;
-
- ///////////////////////////////////////////////////////////////////////////
- // int32_t UnregisterReceiveCodec()
- // Unregister the codec currently registered with a specific payload type
- // from the list of possible receive codecs.
- //
- // Input:
- // -payload_type : The number representing the payload type to
- // unregister.
- //
- // Output:
- // -1 if fails to unregister.
- // 0 if the given codec is successfully unregistered.
- //
- virtual int UnregisterReceiveCodec(
- uint8_t payload_type) = 0;
-
- ///////////////////////////////////////////////////////////////////////////
- // int32_t ReceiveCodec()
- // Get the codec associated with last received payload.
- //
- // Output:
- // -curr_receive_codec : parameters of the codec associated with the last
- // received payload, c.f. common_types.h for
- // the definition of CodecInst.
- //
- // Return value:
- // -1 if failed to retrieve the codec,
- // 0 if the codec is successfully retrieved.
- //
- virtual int32_t ReceiveCodec(CodecInst* curr_receive_codec) const = 0;
-
- ///////////////////////////////////////////////////////////////////////////
- // int32_t IncomingPacket()
- // Call this function to insert a parsed RTP packet into ACM.
- //
- // Inputs:
- // -incoming_payload : received payload.
- // -payload_len_bytes : the length of payload in bytes.
- // -rtp_info : the relevant information retrieved from RTP
- // header.
- //
- // Return value:
- // -1 if failed to push in the payload
- // 0 if payload is successfully pushed in.
- //
- virtual int32_t IncomingPacket(const uint8_t* incoming_payload,
- const size_t payload_len_bytes,
- const WebRtcRTPHeader& rtp_info) = 0;
-
- ///////////////////////////////////////////////////////////////////////////
- // int32_t IncomingPayload()
- // Call this API to push incoming payloads when there is no rtp-info.
- // The rtp-info will be created in ACM. One usage for this API is when
- // pre-encoded files are pushed in ACM
- //
- // Inputs:
- // -incoming_payload : received payload.
- // -payload_len_byte : the length, in bytes, of the received payload.
- // -payload_type : the payload-type. This specifies which codec has
- // to be used to decode the payload.
- // -timestamp : send timestamp of the payload. ACM starts with
- // a random value and increment it by the
- // packet-size, which is given when the codec in
- // question is registered by RegisterReceiveCodec().
- // Therefore, it is essential to have the timestamp
- // if the frame-size differ from the registered
- // value or if the incoming payload contains DTX
- // packets.
- //
- // Return value:
- // -1 if failed to push in the payload
- // 0 if payload is successfully pushed in.
- //
- virtual int32_t IncomingPayload(const uint8_t* incoming_payload,
- const size_t payload_len_byte,
- const uint8_t payload_type,
- const uint32_t timestamp = 0) = 0;
-
- ///////////////////////////////////////////////////////////////////////////
- // int SetMinimumPlayoutDelay()
- // Set a minimum for the playout delay, used for lip-sync. NetEq maintains
- // such a delay unless channel condition yields to a higher delay.
- //
- // Input:
- // -time_ms : minimum delay in milliseconds.
- //
- // Return value:
- // -1 if failed to set the delay,
- // 0 if the minimum delay is set.
- //
- virtual int SetMinimumPlayoutDelay(int time_ms) = 0;
-
- ///////////////////////////////////////////////////////////////////////////
- // int SetMaximumPlayoutDelay()
- // Set a maximum for the playout delay
- //
- // Input:
- // -time_ms : maximum delay in milliseconds.
- //
- // Return value:
- // -1 if failed to set the delay,
- // 0 if the maximum delay is set.
- //
- virtual int SetMaximumPlayoutDelay(int time_ms) = 0;
-
- //
- // The shortest latency, in milliseconds, required by jitter buffer. This
- // is computed based on inter-arrival times and playout mode of NetEq. The
- // actual delay is the maximum of least-required-delay and the minimum-delay
- // specified by SetMinumumPlayoutDelay() API.
- //
- virtual int LeastRequiredDelayMs() const = 0;
-
- ///////////////////////////////////////////////////////////////////////////
- // int32_t PlayoutTimestamp()
- // The send timestamp of an RTP packet is associated with the decoded
- // audio of the packet in question. This function returns the timestamp of
- // the latest audio obtained by calling PlayoutData10ms().
- //
- // Input:
- // -timestamp : a reference to a uint32_t to receive the
- // timestamp.
- // Return value:
- // 0 if the output is a correct timestamp.
- // -1 if failed to output the correct timestamp.
- //
- // TODO(tlegrand): Change function to return the timestamp.
- virtual int32_t PlayoutTimestamp(uint32_t* timestamp) = 0;
-
- ///////////////////////////////////////////////////////////////////////////
- // int32_t PlayoutData10Ms(
- // Get 10 milliseconds of raw audio data for playout, at the given sampling
- // frequency. ACM will perform a resampling if required.
- //
- // Input:
- // -desired_freq_hz : the desired sampling frequency, in Hertz, of the
- // output audio. If set to -1, the function returns
- // the audio at the current sampling frequency.
- //
- // Output:
- // -audio_frame : output audio frame which contains raw audio data
- // and other relevant parameters, c.f.
- // module_common_types.h for the definition of
- // AudioFrame.
- //
- // Return value:
- // -1 if the function fails,
- // 0 if the function succeeds.
- //
- virtual int32_t PlayoutData10Ms(int32_t desired_freq_hz,
- AudioFrame* audio_frame) = 0;
-
- ///////////////////////////////////////////////////////////////////////////
- // Codec specific
- //
-
- ///////////////////////////////////////////////////////////////////////////
- // int SetOpusApplication()
- // Sets the intended application if current send codec is Opus. Opus uses this
- // to optimize the encoding for applications like VOIP and music. Currently,
- // two modes are supported: kVoip and kAudio.
- //
- // Input:
- // - application : intended application.
- //
- // Return value:
- // -1 if current send codec is not Opus or error occurred in setting the
- // Opus application mode.
- // 0 if the Opus application mode is successfully set.
- //
- virtual int SetOpusApplication(OpusApplicationMode application) = 0;
-
- ///////////////////////////////////////////////////////////////////////////
- // int SetOpusMaxPlaybackRate()
- // If current send codec is Opus, informs it about maximum playback rate the
- // receiver will render. Opus can use this information to optimize the bit
- // rate and increase the computation efficiency.
- //
- // Input:
- // -frequency_hz : maximum playback rate in Hz.
- //
- // Return value:
- // -1 if current send codec is not Opus or
- // error occurred in setting the maximum playback rate,
- // 0 if maximum bandwidth is set successfully.
- //
- virtual int SetOpusMaxPlaybackRate(int frequency_hz) = 0;
-
- ///////////////////////////////////////////////////////////////////////////
- // EnableOpusDtx()
- // Enable the DTX, if current send codec is Opus.
- //
- // Return value:
- // -1 if current send codec is not Opus or error occurred in enabling the
- // Opus DTX.
- // 0 if Opus DTX is enabled successfully.
- //
- virtual int EnableOpusDtx() = 0;
-
- ///////////////////////////////////////////////////////////////////////////
- // int DisableOpusDtx()
- // If current send codec is Opus, disables its internal DTX.
- //
- // Return value:
- // -1 if current send codec is not Opus or error occurred in disabling DTX.
- // 0 if Opus DTX is disabled successfully.
- //
- virtual int DisableOpusDtx() = 0;
-
- ///////////////////////////////////////////////////////////////////////////
- // statistics
- //
-
- ///////////////////////////////////////////////////////////////////////////
- // int32_t GetNetworkStatistics()
- // Get network statistics. Note that the internal statistics of NetEq are
- // reset by this call.
- //
- // Input:
- // -network_statistics : a structure that contains network statistics.
- //
- // Return value:
- // -1 if failed to set the network statistics,
- // 0 if statistics are set successfully.
- //
- virtual int32_t GetNetworkStatistics(
- NetworkStatistics* network_statistics) = 0;
-
- //
- // Set an initial delay for playout.
- // An initial delay yields ACM playout silence until equivalent of |delay_ms|
- // audio payload is accumulated in NetEq jitter. Thereafter, ACM pulls audio
- // from NetEq in its regular fashion, and the given delay is maintained
- // through out the call, unless channel conditions yield to a higher jitter
- // buffer delay.
- //
- // Input:
- // -delay_ms : delay in milliseconds.
- //
- // Return values:
- // -1 if failed to set the delay.
- // 0 if delay is set successfully.
- //
- virtual int SetInitialPlayoutDelay(int delay_ms) = 0;
-
- //
- // Enable NACK and set the maximum size of the NACK list. If NACK is already
- // enable then the maximum NACK list size is modified accordingly.
- //
- // If the sequence number of last received packet is N, the sequence numbers
- // of NACK list are in the range of [N - |max_nack_list_size|, N).
- //
- // |max_nack_list_size| should be positive (none zero) and less than or
- // equal to |Nack::kNackListSizeLimit|. Otherwise, No change is applied and -1
- // is returned. 0 is returned at success.
- //
- virtual int EnableNack(size_t max_nack_list_size) = 0;
-
- // Disable NACK.
- virtual void DisableNack() = 0;
-
- //
- // Get a list of packets to be retransmitted. |round_trip_time_ms| is an
- // estimate of the round-trip-time (in milliseconds). Missing packets which
- // will be playout in a shorter time than the round-trip-time (with respect
- // to the time this API is called) will not be included in the list.
- //
- // Negative |round_trip_time_ms| results is an error message and empty list
- // is returned.
- //
- virtual std::vector<uint16_t> GetNackList(
- int64_t round_trip_time_ms) const = 0;
-
- virtual void GetDecodingCallStatistics(
- AudioDecodingCallStats* call_stats) const = 0;
-};
-
-} // namespace webrtc
-
-#endif // WEBRTC_MODULES_AUDIO_CODING_MAIN_INCLUDE_AUDIO_CODING_MODULE_H_
diff --git a/webrtc/modules/audio_coding/main/include/audio_coding_module_typedefs.h b/webrtc/modules/audio_coding/main/include/audio_coding_module_typedefs.h
deleted file mode 100644
index 489df406f4..0000000000
--- a/webrtc/modules/audio_coding/main/include/audio_coding_module_typedefs.h
+++ /dev/null
@@ -1,51 +0,0 @@
-/*
- * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_MODULES_AUDIO_CODING_MAIN_INCLUDE_AUDIO_CODING_MODULE_TYPEDEFS_H_
-#define WEBRTC_MODULES_AUDIO_CODING_MAIN_INCLUDE_AUDIO_CODING_MODULE_TYPEDEFS_H_
-
-#include <map>
-
-#include "webrtc/modules/interface/module_common_types.h"
-#include "webrtc/typedefs.h"
-
-namespace webrtc {
-
-///////////////////////////////////////////////////////////////////////////
-// enum ACMVADMode
-// An enumerator for aggressiveness of VAD
-// -VADNormal : least aggressive mode.
-// -VADLowBitrate : more aggressive than "VADNormal" to save on
-// bit-rate.
-// -VADAggr : an aggressive mode.
-// -VADVeryAggr : the most agressive mode.
-//
-enum ACMVADMode {
- VADNormal = 0,
- VADLowBitrate = 1,
- VADAggr = 2,
- VADVeryAggr = 3
-};
-
-///////////////////////////////////////////////////////////////////////////
-//
-// Enumeration of Opus mode for intended application.
-//
-// kVoip : optimized for voice signals.
-// kAudio : optimized for non-voice signals like music.
-//
-enum OpusApplicationMode {
- kVoip = 0,
- kAudio = 1,
-};
-
-} // namespace webrtc
-
-#endif // WEBRTC_MODULES_AUDIO_CODING_MAIN_INCLUDE_AUDIO_CODING_MODULE_TYPEDEFS_H_
diff --git a/webrtc/modules/audio_coding/main/test/ACMTest.h b/webrtc/modules/audio_coding/main/test/ACMTest.h
deleted file mode 100644
index f73961f5e5..0000000000
--- a/webrtc/modules/audio_coding/main/test/ACMTest.h
+++ /dev/null
@@ -1,21 +0,0 @@
-/*
- * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_MODULES_AUDIO_CODING_MAIN_TEST_ACMTEST_H_
-#define WEBRTC_MODULES_AUDIO_CODING_MAIN_TEST_ACMTEST_H_
-
-class ACMTest {
- public:
- ACMTest() {}
- virtual ~ACMTest() {}
- virtual void Perform() = 0;
-};
-
-#endif // WEBRTC_MODULES_AUDIO_CODING_MAIN_TEST_ACMTEST_H_
diff --git a/webrtc/modules/audio_coding/main/test/APITest.cc b/webrtc/modules/audio_coding/main/test/APITest.cc
deleted file mode 100644
index 1313f35332..0000000000
--- a/webrtc/modules/audio_coding/main/test/APITest.cc
+++ /dev/null
@@ -1,1117 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include "webrtc/modules/audio_coding/main/test/APITest.h"
-
-#include <ctype.h>
-#include <stdio.h>
-#include <stdlib.h>
-#include <string.h>
-
-#include <iostream>
-#include <ostream>
-#include <string>
-
-#include "testing/gtest/include/gtest/gtest.h"
-#include "webrtc/common.h"
-#include "webrtc/common_types.h"
-#include "webrtc/engine_configurations.h"
-#include "webrtc/modules/audio_coding/main/acm2/acm_common_defs.h"
-#include "webrtc/modules/audio_coding/main/test/utility.h"
-#include "webrtc/system_wrappers/include/event_wrapper.h"
-#include "webrtc/system_wrappers/include/thread_wrapper.h"
-#include "webrtc/system_wrappers/include/tick_util.h"
-#include "webrtc/system_wrappers/include/trace.h"
-#include "webrtc/test/testsupport/fileutils.h"
-
-namespace webrtc {
-
-#define TEST_DURATION_SEC 600
-#define NUMBER_OF_SENDER_TESTS 6
-#define MAX_FILE_NAME_LENGTH_BYTE 500
-#define CHECK_THREAD_NULLITY(myThread, S) \
- if(myThread != NULL) { \
- (myThread)->Start(); \
- } else { \
- ADD_FAILURE() << S; \
- }
-
-void APITest::Wait(uint32_t waitLengthMs) {
- if (_randomTest) {
- return;
- } else {
- EventWrapper* myEvent = EventWrapper::Create();
- myEvent->Wait(waitLengthMs);
- delete myEvent;
- return;
- }
-}
-
-APITest::APITest(const Config& config)
- : _acmA(AudioCodingModule::Create(1)),
- _acmB(AudioCodingModule::Create(2)),
- _channel_A2B(NULL),
- _channel_B2A(NULL),
- _writeToFile(true),
- _pullEventA(NULL),
- _pushEventA(NULL),
- _processEventA(NULL),
- _apiEventA(NULL),
- _pullEventB(NULL),
- _pushEventB(NULL),
- _processEventB(NULL),
- _apiEventB(NULL),
- _codecCntrA(0),
- _codecCntrB(0),
- _thereIsEncoderA(false),
- _thereIsEncoderB(false),
- _thereIsDecoderA(false),
- _thereIsDecoderB(false),
- _sendVADA(false),
- _sendDTXA(false),
- _sendVADModeA(VADNormal),
- _sendVADB(false),
- _sendDTXB(false),
- _sendVADModeB(VADNormal),
- _minDelayA(0),
- _minDelayB(0),
- _dotPositionA(0),
- _dotMoveDirectionA(1),
- _dotPositionB(39),
- _dotMoveDirectionB(-1),
- _vadCallbackA(NULL),
- _vadCallbackB(NULL),
- _apiTestRWLock(*RWLockWrapper::CreateRWLock()),
- _randomTest(false),
- _testNumA(0),
- _testNumB(1) {
- int n;
- for (n = 0; n < 32; n++) {
- _payloadUsed[n] = false;
- }
-
- _movingDot[40] = '\0';
-
- for (int n = 0; n < 40; n++) {
- _movingDot[n] = ' ';
- }
-}
-
-APITest::~APITest() {
- DELETE_POINTER(_channel_A2B);
- DELETE_POINTER(_channel_B2A);
-
- DELETE_POINTER(_pushEventA);
- DELETE_POINTER(_pullEventA);
- DELETE_POINTER(_processEventA);
- DELETE_POINTER(_apiEventA);
-
- DELETE_POINTER(_pushEventB);
- DELETE_POINTER(_pullEventB);
- DELETE_POINTER(_processEventB);
- DELETE_POINTER(_apiEventB);
-
- _inFileA.Close();
- _outFileA.Close();
-
- _inFileB.Close();
- _outFileB.Close();
-
- DELETE_POINTER(_vadCallbackA);
- DELETE_POINTER(_vadCallbackB);
-
- delete &_apiTestRWLock;
-}
-
-int16_t APITest::SetUp() {
- CodecInst dummyCodec;
- int lastPayloadType = 0;
-
- int16_t numCodecs = _acmA->NumberOfCodecs();
- for (uint8_t n = 0; n < numCodecs; n++) {
- AudioCodingModule::Codec(n, &dummyCodec);
- if ((STR_CASE_CMP(dummyCodec.plname, "CN") == 0)
- && (dummyCodec.plfreq == 32000)) {
- continue;
- }
-
- printf("Register Receive Codec %s ", dummyCodec.plname);
-
- if ((n != 0) && !FixedPayloadTypeCodec(dummyCodec.plname)) {
- // Check registration with an already occupied payload type
- int currentPayloadType = dummyCodec.pltype;
- dummyCodec.pltype = 97; //lastPayloadType;
- CHECK_ERROR(_acmB->RegisterReceiveCodec(dummyCodec));
- dummyCodec.pltype = currentPayloadType;
- }
-
- if ((n < numCodecs - 1) && !FixedPayloadTypeCodec(dummyCodec.plname)) {
- // test if re-registration works;
- CodecInst nextCodec;
- int currentPayloadType = dummyCodec.pltype;
- AudioCodingModule::Codec(n + 1, &nextCodec);
- dummyCodec.pltype = nextCodec.pltype;
- if (!FixedPayloadTypeCodec(nextCodec.plname)) {
- _acmB->RegisterReceiveCodec(dummyCodec);
- }
- dummyCodec.pltype = currentPayloadType;
- }
-
- if ((n < numCodecs - 1) && !FixedPayloadTypeCodec(dummyCodec.plname)) {
- // test if un-registration works;
- CodecInst nextCodec;
- AudioCodingModule::Codec(n + 1, &nextCodec);
- nextCodec.pltype = dummyCodec.pltype;
- if (!FixedPayloadTypeCodec(nextCodec.plname)) {
- CHECK_ERROR_MT(_acmA->RegisterReceiveCodec(nextCodec));
- CHECK_ERROR_MT(_acmA->UnregisterReceiveCodec(nextCodec.pltype));
- }
- }
-
- CHECK_ERROR_MT(_acmA->RegisterReceiveCodec(dummyCodec));
- printf(" side A done!");
- CHECK_ERROR_MT(_acmB->RegisterReceiveCodec(dummyCodec));
- printf(" side B done!\n");
-
- if (!strcmp(dummyCodec.plname, "CN")) {
- CHECK_ERROR_MT(_acmA->RegisterSendCodec(dummyCodec));
- CHECK_ERROR_MT(_acmB->RegisterSendCodec(dummyCodec));
- }
- lastPayloadType = dummyCodec.pltype;
- if ((lastPayloadType >= 96) && (lastPayloadType <= 127)) {
- _payloadUsed[lastPayloadType - 96] = true;
- }
- }
- _thereIsDecoderA = true;
- _thereIsDecoderB = true;
-
- // Register Send Codec
- AudioCodingModule::Codec((uint8_t) _codecCntrA, &dummyCodec);
- CHECK_ERROR_MT(_acmA->RegisterSendCodec(dummyCodec));
- _thereIsEncoderA = true;
- //
- AudioCodingModule::Codec((uint8_t) _codecCntrB, &dummyCodec);
- CHECK_ERROR_MT(_acmB->RegisterSendCodec(dummyCodec));
- _thereIsEncoderB = true;
-
- uint16_t frequencyHz;
-
- printf("\n\nAPI Test\n");
- printf("========\n");
- printf("Hit enter to accept the default values indicated in []\n\n");
-
- //--- Input A
- std::string file_name = webrtc::test::ResourcePath(
- "audio_coding/testfile32kHz", "pcm");
- frequencyHz = 32000;
- printf("Enter input file at side A [%s]: ", file_name.c_str());
- PCMFile::ChooseFile(&file_name, 499, &frequencyHz);
- _inFileA.Open(file_name, frequencyHz, "rb", true);
-
- //--- Output A
- std::string out_file_a = webrtc::test::OutputPath() + "outA.pcm";
- printf("Enter output file at side A [%s]: ", out_file_a.c_str());
- PCMFile::ChooseFile(&out_file_a, 499, &frequencyHz);
- _outFileA.Open(out_file_a, frequencyHz, "wb");
-
- //--- Input B
- file_name = webrtc::test::ResourcePath("audio_coding/testfile32kHz", "pcm");
- printf("\n\nEnter input file at side B [%s]: ", file_name.c_str());
- PCMFile::ChooseFile(&file_name, 499, &frequencyHz);
- _inFileB.Open(file_name, frequencyHz, "rb", true);
-
- //--- Output B
- std::string out_file_b = webrtc::test::OutputPath() + "outB.pcm";
- printf("Enter output file at side B [%s]: ", out_file_b.c_str());
- PCMFile::ChooseFile(&out_file_b, 499, &frequencyHz);
- _outFileB.Open(out_file_b, frequencyHz, "wb");
-
- //--- Set A-to-B channel
- _channel_A2B = new Channel(2);
- CHECK_ERROR_MT(_acmA->RegisterTransportCallback(_channel_A2B));
- _channel_A2B->RegisterReceiverACM(_acmB.get());
-
- //--- Set B-to-A channel
- _channel_B2A = new Channel(1);
- CHECK_ERROR_MT(_acmB->RegisterTransportCallback(_channel_B2A));
- _channel_B2A->RegisterReceiverACM(_acmA.get());
-
- //--- EVENT TIMERS
- // A
- _pullEventA = EventTimerWrapper::Create();
- _pushEventA = EventTimerWrapper::Create();
- _processEventA = EventTimerWrapper::Create();
- _apiEventA = EventWrapper::Create();
- // B
- _pullEventB = EventTimerWrapper::Create();
- _pushEventB = EventTimerWrapper::Create();
- _processEventB = EventTimerWrapper::Create();
- _apiEventB = EventWrapper::Create();
-
- //--- I/O params
- // A
- _outFreqHzA = _outFileA.SamplingFrequency();
- // B
- _outFreqHzB = _outFileB.SamplingFrequency();
-
- //Trace::SetEncryptedTraceFile("ACMAPITestEncrypted.txt");
-
- char print[11];
-
- // Create a trace file.
- Trace::CreateTrace();
- Trace::SetTraceFile(
- (webrtc::test::OutputPath() + "acm_api_trace.txt").c_str());
-
- printf("\nRandom Test (y/n)?");
- EXPECT_TRUE(fgets(print, 10, stdin) != NULL);
- print[10] = '\0';
- if (strstr(print, "y") != NULL) {
- _randomTest = true;
- _verbose = false;
- _writeToFile = false;
- } else {
- _randomTest = false;
- printf("\nPrint Tests (y/n)? ");
- EXPECT_TRUE(fgets(print, 10, stdin) != NULL);
- print[10] = '\0';
- if (strstr(print, "y") == NULL) {
- EXPECT_TRUE(freopen("APITest_log.txt", "w", stdout) != 0);
- _verbose = false;
- }
- }
-
- _vadCallbackA = new VADCallback;
- _vadCallbackB = new VADCallback;
-
- return 0;
-}
-
-bool APITest::PushAudioThreadA(void* obj) {
- return static_cast<APITest*>(obj)->PushAudioRunA();
-}
-
-bool APITest::PushAudioThreadB(void* obj) {
- return static_cast<APITest*>(obj)->PushAudioRunB();
-}
-
-bool APITest::PullAudioThreadA(void* obj) {
- return static_cast<APITest*>(obj)->PullAudioRunA();
-}
-
-bool APITest::PullAudioThreadB(void* obj) {
- return static_cast<APITest*>(obj)->PullAudioRunB();
-}
-
-bool APITest::ProcessThreadA(void* obj) {
- return static_cast<APITest*>(obj)->ProcessRunA();
-}
-
-bool APITest::ProcessThreadB(void* obj) {
- return static_cast<APITest*>(obj)->ProcessRunB();
-}
-
-bool APITest::APIThreadA(void* obj) {
- return static_cast<APITest*>(obj)->APIRunA();
-}
-
-bool APITest::APIThreadB(void* obj) {
- return static_cast<APITest*>(obj)->APIRunB();
-}
-
-bool APITest::PullAudioRunA() {
- _pullEventA->Wait(100);
- AudioFrame audioFrame;
- if (_acmA->PlayoutData10Ms(_outFreqHzA, &audioFrame) < 0) {
- bool thereIsDecoder;
- {
- ReadLockScoped rl(_apiTestRWLock);
- thereIsDecoder = _thereIsDecoderA;
- }
- if (thereIsDecoder) {
- fprintf(stderr, "\n>>>>>> cannot pull audio A <<<<<<<< \n");
- }
- } else {
- if (_writeToFile) {
- _outFileA.Write10MsData(audioFrame);
- }
- }
- return true;
-}
-
-bool APITest::PullAudioRunB() {
- _pullEventB->Wait(100);
- AudioFrame audioFrame;
- if (_acmB->PlayoutData10Ms(_outFreqHzB, &audioFrame) < 0) {
- bool thereIsDecoder;
- {
- ReadLockScoped rl(_apiTestRWLock);
- thereIsDecoder = _thereIsDecoderB;
- }
- if (thereIsDecoder) {
- fprintf(stderr, "\n>>>>>> cannot pull audio B <<<<<<<< \n");
- fprintf(stderr, "%d %d\n", _testNumA, _testNumB);
- }
- } else {
- if (_writeToFile) {
- _outFileB.Write10MsData(audioFrame);
- }
- }
- return true;
-}
-
-bool APITest::PushAudioRunA() {
- _pushEventA->Wait(100);
- AudioFrame audioFrame;
- _inFileA.Read10MsData(audioFrame);
- if (_acmA->Add10MsData(audioFrame) < 0) {
- bool thereIsEncoder;
- {
- ReadLockScoped rl(_apiTestRWLock);
- thereIsEncoder = _thereIsEncoderA;
- }
- if (thereIsEncoder) {
- fprintf(stderr, "\n>>>> add10MsData at A failed <<<<\n");
- }
- }
- return true;
-}
-
-bool APITest::PushAudioRunB() {
- _pushEventB->Wait(100);
- AudioFrame audioFrame;
- _inFileB.Read10MsData(audioFrame);
- if (_acmB->Add10MsData(audioFrame) < 0) {
- bool thereIsEncoder;
- {
- ReadLockScoped rl(_apiTestRWLock);
- thereIsEncoder = _thereIsEncoderB;
- }
-
- if (thereIsEncoder) {
- fprintf(stderr, "\n>>>> cannot add audio to B <<<<");
- }
- }
-
- return true;
-}
-
-bool APITest::ProcessRunA() {
- _processEventA->Wait(100);
- return true;
-}
-
-bool APITest::ProcessRunB() {
- _processEventB->Wait(100);
- return true;
-}
-
-/*/
- *
- * In side A we test the APIs which are related to sender Side.
- *
-/*/
-
-void APITest::RunTest(char thread) {
- int testNum;
- {
- WriteLockScoped cs(_apiTestRWLock);
- if (thread == 'A') {
- _testNumA = (_testNumB + 1 + (rand() % 3)) % 4;
- testNum = _testNumA;
-
- _movingDot[_dotPositionA] = ' ';
- if (_dotPositionA == 0) {
- _dotMoveDirectionA = 1;
- }
- if (_dotPositionA == 19) {
- _dotMoveDirectionA = -1;
- }
- _dotPositionA += _dotMoveDirectionA;
- _movingDot[_dotPositionA] = (_dotMoveDirectionA > 0) ? '>' : '<';
- } else {
- _testNumB = (_testNumA + 1 + (rand() % 3)) % 4;
- testNum = _testNumB;
-
- _movingDot[_dotPositionB] = ' ';
- if (_dotPositionB == 20) {
- _dotMoveDirectionB = 1;
- }
- if (_dotPositionB == 39) {
- _dotMoveDirectionB = -1;
- }
- _dotPositionB += _dotMoveDirectionB;
- _movingDot[_dotPositionB] = (_dotMoveDirectionB > 0) ? '>' : '<';
- }
- //fprintf(stderr, "%c: %d \n", thread, testNum);
- //fflush(stderr);
- }
- switch (testNum) {
- case 0:
- CurrentCodec('A');
- ChangeCodec('A');
- break;
- case 1:
- if (!_randomTest) {
- fprintf(stdout, "\nTesting Delay ...\n");
- }
- TestDelay('A');
- break;
- case 2:
- TestSendVAD('A');
- break;
- case 3:
- TestRegisteration('A');
- break;
- default:
- fprintf(stderr, "Wrong Test Number\n");
- getc(stdin);
- exit(1);
- }
-}
-
-bool APITest::APIRunA() {
- _apiEventA->Wait(50);
-
- bool randomTest;
- {
- ReadLockScoped rl(_apiTestRWLock);
- randomTest = _randomTest;
- }
- if (randomTest) {
- RunTest('A');
- } else {
- CurrentCodec('A');
- ChangeCodec('A');
- if (_codecCntrA == 0) {
- fprintf(stdout, "\nTesting Delay ...\n");
- TestDelay('A');
- }
- // VAD TEST
- TestSendVAD('A');
- TestRegisteration('A');
- }
- return true;
-}
-
-bool APITest::APIRunB() {
- _apiEventB->Wait(50);
- bool randomTest;
- {
- ReadLockScoped rl(_apiTestRWLock);
- randomTest = _randomTest;
- }
- //_apiEventB->Wait(2000);
- if (randomTest) {
- RunTest('B');
- }
-
- return true;
-}
-
-void APITest::Perform() {
- SetUp();
-
- //--- THREADS
- // A
- // PUSH
- rtc::scoped_ptr<ThreadWrapper> myPushAudioThreadA =
- ThreadWrapper::CreateThread(PushAudioThreadA, this, "PushAudioThreadA");
- CHECK_THREAD_NULLITY(myPushAudioThreadA, "Unable to start A::PUSH thread");
- // PULL
- rtc::scoped_ptr<ThreadWrapper> myPullAudioThreadA =
- ThreadWrapper::CreateThread(PullAudioThreadA, this, "PullAudioThreadA");
- CHECK_THREAD_NULLITY(myPullAudioThreadA, "Unable to start A::PULL thread");
- // Process
- rtc::scoped_ptr<ThreadWrapper> myProcessThreadA = ThreadWrapper::CreateThread(
- ProcessThreadA, this, "ProcessThreadA");
- CHECK_THREAD_NULLITY(myProcessThreadA, "Unable to start A::Process thread");
- // API
- rtc::scoped_ptr<ThreadWrapper> myAPIThreadA = ThreadWrapper::CreateThread(
- APIThreadA, this, "APIThreadA");
- CHECK_THREAD_NULLITY(myAPIThreadA, "Unable to start A::API thread");
- // B
- // PUSH
- rtc::scoped_ptr<ThreadWrapper> myPushAudioThreadB =
- ThreadWrapper::CreateThread(PushAudioThreadB, this, "PushAudioThreadB");
- CHECK_THREAD_NULLITY(myPushAudioThreadB, "Unable to start B::PUSH thread");
- // PULL
- rtc::scoped_ptr<ThreadWrapper> myPullAudioThreadB =
- ThreadWrapper::CreateThread(PullAudioThreadB, this, "PullAudioThreadB");
- CHECK_THREAD_NULLITY(myPullAudioThreadB, "Unable to start B::PULL thread");
- // Process
- rtc::scoped_ptr<ThreadWrapper> myProcessThreadB = ThreadWrapper::CreateThread(
- ProcessThreadB, this, "ProcessThreadB");
- CHECK_THREAD_NULLITY(myProcessThreadB, "Unable to start B::Process thread");
- // API
- rtc::scoped_ptr<ThreadWrapper> myAPIThreadB = ThreadWrapper::CreateThread(
- APIThreadB, this, "APIThreadB");
- CHECK_THREAD_NULLITY(myAPIThreadB, "Unable to start B::API thread");
-
- //_apiEventA->StartTimer(true, 5000);
- //_apiEventB->StartTimer(true, 5000);
-
- _processEventA->StartTimer(true, 10);
- _processEventB->StartTimer(true, 10);
-
- _pullEventA->StartTimer(true, 10);
- _pullEventB->StartTimer(true, 10);
-
- _pushEventA->StartTimer(true, 10);
- _pushEventB->StartTimer(true, 10);
-
- // Keep main thread waiting for sender/receiver
- // threads to complete
- EventWrapper* completeEvent = EventWrapper::Create();
- uint64_t startTime = TickTime::MillisecondTimestamp();
- uint64_t currentTime;
- // Run test in 2 minutes (120000 ms).
- do {
- {
- //ReadLockScoped rl(_apiTestRWLock);
- //fprintf(stderr, "\r%s", _movingDot);
- }
- //fflush(stderr);
- completeEvent->Wait(50);
- currentTime = TickTime::MillisecondTimestamp();
- } while ((currentTime - startTime) < 120000);
-
- //completeEvent->Wait(0xFFFFFFFF);
- //(unsigned long)((unsigned long)TEST_DURATION_SEC * (unsigned long)1000));
- delete completeEvent;
-
- myPushAudioThreadA->Stop();
- myPullAudioThreadA->Stop();
- myProcessThreadA->Stop();
- myAPIThreadA->Stop();
-
- myPushAudioThreadB->Stop();
- myPullAudioThreadB->Stop();
- myProcessThreadB->Stop();
- myAPIThreadB->Stop();
-}
-
-void APITest::CheckVADStatus(char side) {
-
- bool dtxEnabled;
- bool vadEnabled;
- ACMVADMode vadMode;
-
- if (side == 'A') {
- _acmA->VAD(&dtxEnabled, &vadEnabled, &vadMode);
- _acmA->RegisterVADCallback(NULL);
- _vadCallbackA->Reset();
- _acmA->RegisterVADCallback(_vadCallbackA);
-
- if (!_randomTest) {
- if (_verbose) {
- fprintf(stdout, "DTX %3s, VAD %3s, Mode %d", dtxEnabled ? "ON" : "OFF",
- vadEnabled ? "ON" : "OFF", (int) vadMode);
- Wait(5000);
- fprintf(stdout, " => bit-rate %3.0f kbps\n", _channel_A2B->BitRate());
- } else {
- Wait(5000);
- fprintf(stdout, "DTX %3s, VAD %3s, Mode %d => bit-rate %3.0f kbps\n",
- dtxEnabled ? "ON" : "OFF", vadEnabled ? "ON" : "OFF",
- (int) vadMode, _channel_A2B->BitRate());
- }
- _vadCallbackA->PrintFrameTypes();
- }
-
- if (dtxEnabled != _sendDTXA) {
- fprintf(stderr, ">>> Error Enabling DTX <<<\n");
- }
- if ((vadEnabled != _sendVADA) && (!dtxEnabled)) {
- fprintf(stderr, ">>> Error Enabling VAD <<<\n");
- }
- if ((vadMode != _sendVADModeA) && vadEnabled) {
- fprintf(stderr, ">>> Error setting VAD-mode <<<\n");
- }
- } else {
- _acmB->VAD(&dtxEnabled, &vadEnabled, &vadMode);
-
- _acmB->RegisterVADCallback(NULL);
- _vadCallbackB->Reset();
- _acmB->RegisterVADCallback(_vadCallbackB);
-
- if (!_randomTest) {
- if (_verbose) {
- fprintf(stdout, "DTX %3s, VAD %3s, Mode %d", dtxEnabled ? "ON" : "OFF",
- vadEnabled ? "ON" : "OFF", (int) vadMode);
- Wait(5000);
- fprintf(stdout, " => bit-rate %3.0f kbps\n", _channel_B2A->BitRate());
- } else {
- Wait(5000);
- fprintf(stdout, "DTX %3s, VAD %3s, Mode %d => bit-rate %3.0f kbps\n",
- dtxEnabled ? "ON" : "OFF", vadEnabled ? "ON" : "OFF",
- (int) vadMode, _channel_B2A->BitRate());
- }
- _vadCallbackB->PrintFrameTypes();
- }
-
- if (dtxEnabled != _sendDTXB) {
- fprintf(stderr, ">>> Error Enabling DTX <<<\n");
- }
- if ((vadEnabled != _sendVADB) && (!dtxEnabled)) {
- fprintf(stderr, ">>> Error Enabling VAD <<<\n");
- }
- if ((vadMode != _sendVADModeB) && vadEnabled) {
- fprintf(stderr, ">>> Error setting VAD-mode <<<\n");
- }
- }
-}
-
-// Set Min delay, get delay, playout timestamp
-void APITest::TestDelay(char side) {
- AudioCodingModule* myACM;
- Channel* myChannel;
- int32_t* myMinDelay;
- EventTimerWrapper* myEvent = EventTimerWrapper::Create();
-
- uint32_t inTimestamp = 0;
- uint32_t outTimestamp = 0;
- double estimDelay = 0;
-
- double averageEstimDelay = 0;
- double averageDelay = 0;
-
- CircularBuffer estimDelayCB(100);
- estimDelayCB.SetArithMean(true);
-
- if (side == 'A') {
- myACM = _acmA.get();
- myChannel = _channel_B2A;
- myMinDelay = &_minDelayA;
- } else {
- myACM = _acmB.get();
- myChannel = _channel_A2B;
- myMinDelay = &_minDelayB;
- }
-
- CHECK_ERROR_MT(myACM->SetMinimumPlayoutDelay(*myMinDelay));
-
- inTimestamp = myChannel->LastInTimestamp();
- CHECK_ERROR_MT(myACM->PlayoutTimestamp(&outTimestamp));
-
- if (!_randomTest) {
- myEvent->StartTimer(true, 30);
- int n = 0;
- int settlePoint = 5000;
- while (n < settlePoint + 400) {
- myEvent->Wait(1000);
-
- inTimestamp = myChannel->LastInTimestamp();
- CHECK_ERROR_MT(myACM->PlayoutTimestamp(&outTimestamp));
-
- //std::cout << outTimestamp << std::endl << std::flush;
- estimDelay = (double) ((uint32_t)(inTimestamp - outTimestamp))
- / ((double) myACM->ReceiveFrequency() / 1000.0);
-
- estimDelayCB.Update(estimDelay);
-
- estimDelayCB.ArithMean(averageEstimDelay);
- //printf("\n %6.1f \n", estimDelay);
- //std::cout << " " << std::flush;
-
- if (_verbose) {
- fprintf(stdout,
- "\rExpected: %4d, retreived: %6.1f, measured: %6.1f",
- *myMinDelay, averageDelay, averageEstimDelay);
- std::cout << " " << std::flush;
- }
- if ((averageDelay > *myMinDelay) && (n < settlePoint)) {
- settlePoint = n;
- }
- n++;
- }
- myEvent->StopTimer();
- }
-
- if ((!_verbose) && (!_randomTest)) {
- fprintf(stdout, "\nExpected: %4d, retreived: %6.1f, measured: %6.1f",
- *myMinDelay, averageDelay, averageEstimDelay);
- }
-
- *myMinDelay = (rand() % 1000) + 1;
-
- NetworkStatistics networkStat;
- CHECK_ERROR_MT(myACM->GetNetworkStatistics(&networkStat));
-
- if (!_randomTest) {
- fprintf(stdout, "\n\nJitter Statistics at Side %c\n", side);
- fprintf(stdout, "--------------------------------------\n");
- fprintf(stdout, "buffer-size............. %d\n",
- networkStat.currentBufferSize);
- fprintf(stdout, "Preferred buffer-size... %d\n",
- networkStat.preferredBufferSize);
- fprintf(stdout, "Peaky jitter mode........%d\n",
- networkStat.jitterPeaksFound);
- fprintf(stdout, "packet-size rate........ %d\n",
- networkStat.currentPacketLossRate);
- fprintf(stdout, "discard rate............ %d\n",
- networkStat.currentDiscardRate);
- fprintf(stdout, "expand rate............. %d\n",
- networkStat.currentExpandRate);
- fprintf(stdout, "speech expand rate...... %d\n",
- networkStat.currentSpeechExpandRate);
- fprintf(stdout, "Preemptive rate......... %d\n",
- networkStat.currentPreemptiveRate);
- fprintf(stdout, "Accelerate rate......... %d\n",
- networkStat.currentAccelerateRate);
- fprintf(stdout, "Secondary decoded rate.. %d\n",
- networkStat.currentSecondaryDecodedRate);
- fprintf(stdout, "Clock-drift............. %d\n", networkStat.clockDriftPPM);
- fprintf(stdout, "Mean waiting time....... %d\n",
- networkStat.meanWaitingTimeMs);
- fprintf(stdout, "Median waiting time..... %d\n",
- networkStat.medianWaitingTimeMs);
- fprintf(stdout, "Min waiting time........ %d\n",
- networkStat.minWaitingTimeMs);
- fprintf(stdout, "Max waiting time........ %d\n",
- networkStat.maxWaitingTimeMs);
- }
-
- CHECK_ERROR_MT(myACM->SetMinimumPlayoutDelay(*myMinDelay));
-
- if (!_randomTest) {
- myEvent->Wait(500);
- fprintf(stdout, "\n");
- fprintf(stdout, "\n");
- }
- delete myEvent;
-}
-
-// Unregister a codec & register again.
-void APITest::TestRegisteration(char sendSide) {
- AudioCodingModule* sendACM;
- AudioCodingModule* receiveACM;
- bool* thereIsDecoder;
- EventWrapper* myEvent = EventWrapper::Create();
-
- if (!_randomTest) {
- fprintf(stdout, "\n\n");
- fprintf(stdout,
- "---------------------------------------------------------\n");
- fprintf(stdout, " Unregister/register Receive Codec\n");
- fprintf(stdout,
- "---------------------------------------------------------\n");
- }
-
- switch (sendSide) {
- case 'A': {
- sendACM = _acmA.get();
- receiveACM = _acmB.get();
- thereIsDecoder = &_thereIsDecoderB;
- break;
- }
- case 'B': {
- sendACM = _acmB.get();
- receiveACM = _acmA.get();
- thereIsDecoder = &_thereIsDecoderA;
- break;
- }
- default:
- fprintf(stderr, "Invalid sender-side in TestRegistration(%c)\n",
- sendSide);
- exit(-1);
- }
-
- CodecInst myCodec;
- if (sendACM->SendCodec(&myCodec) < 0) {
- AudioCodingModule::Codec(_codecCntrA, &myCodec);
- }
-
- if (!_randomTest) {
- fprintf(stdout, "Unregistering reveive codec, NO AUDIO.\n");
- fflush (stdout);
- }
- {
- WriteLockScoped wl(_apiTestRWLock);
- *thereIsDecoder = false;
- }
- //myEvent->Wait(20);
- CHECK_ERROR_MT(receiveACM->UnregisterReceiveCodec(myCodec.pltype));
- Wait(1000);
-
- int currentPayload = myCodec.pltype;
-
- if (!FixedPayloadTypeCodec(myCodec.plname)) {
- int32_t i;
- for (i = 0; i < 32; i++) {
- if (!_payloadUsed[i]) {
- if (!_randomTest) {
- fprintf(stdout,
- "Register receive codec with new Payload, AUDIO BACK.\n");
- }
- //myCodec.pltype = i + 96;
- //CHECK_ERROR_MT(receiveACM->RegisterReceiveCodec(myCodec));
- //CHECK_ERROR_MT(sendACM->RegisterSendCodec(myCodec));
- //myEvent->Wait(20);
- //{
- // WriteLockScoped wl(_apiTestRWLock);
- // *thereIsDecoder = true;
- //}
- Wait(1000);
-
- if (!_randomTest) {
- fprintf(stdout, "Unregistering reveive codec, NO AUDIO.\n");
- }
- //{
- // WriteLockScoped wl(_apiTestRWLock);
- // *thereIsDecoder = false;
- //}
- //myEvent->Wait(20);
- //CHECK_ERROR_MT(receiveACM->UnregisterReceiveCodec(myCodec.pltype));
- Wait(1000);
-
- myCodec.pltype = currentPayload;
- if (!_randomTest) {
- fprintf(stdout,
- "Register receive codec with default Payload, AUDIO BACK.\n");
- fflush (stdout);
- }
- CHECK_ERROR_MT(receiveACM->RegisterReceiveCodec(myCodec));
- //CHECK_ERROR_MT(sendACM->RegisterSendCodec(myCodec));
- myEvent->Wait(20);
- {
- WriteLockScoped wl(_apiTestRWLock);
- *thereIsDecoder = true;
- }
- Wait(1000);
-
- break;
- }
- }
- if (i == 32) {
- CHECK_ERROR_MT(receiveACM->RegisterReceiveCodec(myCodec));
- {
- WriteLockScoped wl(_apiTestRWLock);
- *thereIsDecoder = true;
- }
- }
- } else {
- if (!_randomTest) {
- fprintf(stdout,
- "Register receive codec with fixed Payload, AUDIO BACK.\n");
- fflush (stdout);
- }
- CHECK_ERROR_MT(receiveACM->RegisterReceiveCodec(myCodec));
- //CHECK_ERROR_MT(receiveACM->UnregisterReceiveCodec(myCodec.pltype));
- //CHECK_ERROR_MT(receiveACM->RegisterReceiveCodec(myCodec));
- myEvent->Wait(20);
- {
- WriteLockScoped wl(_apiTestRWLock);
- *thereIsDecoder = true;
- }
- }
- delete myEvent;
- if (!_randomTest) {
- fprintf(stdout,
- "---------------------------------------------------------\n");
- }
-}
-
-void APITest::TestSendVAD(char side) {
- if (_randomTest) {
- return;
- }
-
- bool* vad;
- bool* dtx;
- ACMVADMode* mode;
- Channel* myChannel;
- AudioCodingModule* myACM;
-
- CodecInst myCodec;
- if (!_randomTest) {
- fprintf(stdout, "\n\n");
- fprintf(stdout, "-----------------------------------------------\n");
- fprintf(stdout, " Test VAD API\n");
- fprintf(stdout, "-----------------------------------------------\n");
- }
-
- if (side == 'A') {
- AudioCodingModule::Codec(_codecCntrA, &myCodec);
- vad = &_sendVADA;
- dtx = &_sendDTXA;
- mode = &_sendVADModeA;
- myChannel = _channel_A2B;
- myACM = _acmA.get();
- } else {
- AudioCodingModule::Codec(_codecCntrB, &myCodec);
- vad = &_sendVADB;
- dtx = &_sendDTXB;
- mode = &_sendVADModeB;
- myChannel = _channel_B2A;
- myACM = _acmB.get();
- }
-
- CheckVADStatus(side);
- if (!_randomTest) {
- fprintf(stdout, "\n\n");
- }
-
- switch (*mode) {
- case VADNormal:
- *vad = true;
- *dtx = true;
- *mode = VADAggr;
- break;
- case VADLowBitrate:
- *vad = true;
- *dtx = true;
- *mode = VADVeryAggr;
- break;
- case VADAggr:
- *vad = true;
- *dtx = true;
- *mode = VADLowBitrate;
- break;
- case VADVeryAggr:
- *vad = false;
- *dtx = false;
- *mode = VADNormal;
- break;
- default:
- *mode = VADNormal;
- }
-
- *dtx = (myCodec.plfreq == 32000) ? false : *dtx;
-
- CHECK_ERROR_MT(myACM->SetVAD(*dtx, *vad, *mode));
- myChannel->ResetStats();
-
- CheckVADStatus(side);
- if (!_randomTest) {
- fprintf(stdout, "\n");
- fprintf(stdout, "-----------------------------------------------\n");
- }
-
- // Fault Test
- CHECK_PROTECTED_MT(myACM->SetVAD(false, true, (ACMVADMode) - 1));
- CHECK_PROTECTED_MT(myACM->SetVAD(false, true, (ACMVADMode) 4));
-
-}
-
-void APITest::CurrentCodec(char side) {
- CodecInst myCodec;
- if (side == 'A') {
- _acmA->SendCodec(&myCodec);
- } else {
- _acmB->SendCodec(&myCodec);
- }
-
- if (!_randomTest) {
- fprintf(stdout, "\n\n");
- fprintf(stdout, "Send codec in Side A\n");
- fprintf(stdout, "----------------------------\n");
- fprintf(stdout, "Name................. %s\n", myCodec.plname);
- fprintf(stdout, "Sampling Frequency... %d\n", myCodec.plfreq);
- fprintf(stdout, "Rate................. %d\n", myCodec.rate);
- fprintf(stdout, "Payload-type......... %d\n", myCodec.pltype);
- fprintf(stdout, "Packet-size.......... %d\n", myCodec.pacsize);
- }
-
- Wait(100);
-}
-
-void APITest::ChangeCodec(char side) {
- CodecInst myCodec;
- AudioCodingModule* myACM;
- uint8_t* codecCntr;
- bool* thereIsEncoder;
- bool* vad;
- bool* dtx;
- ACMVADMode* mode;
- Channel* myChannel;
- // Reset and Wait
- if (!_randomTest) {
- fprintf(stdout, "Reset Encoder Side A \n");
- }
- if (side == 'A') {
- myACM = _acmA.get();
- codecCntr = &_codecCntrA;
- {
- WriteLockScoped wl(_apiTestRWLock);
- thereIsEncoder = &_thereIsEncoderA;
- }
- vad = &_sendVADA;
- dtx = &_sendDTXA;
- mode = &_sendVADModeA;
- myChannel = _channel_A2B;
- } else {
- myACM = _acmB.get();
- codecCntr = &_codecCntrB;
- {
- WriteLockScoped wl(_apiTestRWLock);
- thereIsEncoder = &_thereIsEncoderB;
- }
- vad = &_sendVADB;
- dtx = &_sendDTXB;
- mode = &_sendVADModeB;
- myChannel = _channel_B2A;
- }
-
- Wait(100);
-
- // Register the next codec
- do {
- *codecCntr =
- (*codecCntr < AudioCodingModule::NumberOfCodecs() - 1) ?
- (*codecCntr + 1) : 0;
-
- if (*codecCntr == 0) {
- //printf("Initialize Sender Side A \n");
- {
- WriteLockScoped wl(_apiTestRWLock);
- *thereIsEncoder = false;
- }
- // After Initialization CN is lost, re-register them
- if (AudioCodingModule::Codec("CN", &myCodec, 8000, 1) >= 0) {
- CHECK_ERROR_MT(myACM->RegisterSendCodec(myCodec));
- }
- if (AudioCodingModule::Codec("CN", &myCodec, 16000, 1) >= 0) {
- CHECK_ERROR_MT(myACM->RegisterSendCodec(myCodec));
- }
- // VAD & DTX are disabled after initialization
- *vad = false;
- *dtx = false;
- _writeToFile = false;
- }
-
- AudioCodingModule::Codec(*codecCntr, &myCodec);
- } while (!STR_CASE_CMP(myCodec.plname, "CN")
- || !STR_CASE_CMP(myCodec.plname, "telephone-event")
- || !STR_CASE_CMP(myCodec.plname, "RED"));
-
- if (!_randomTest) {
- fprintf(stdout,"\n=====================================================\n");
- fprintf(stdout, " Registering New Codec %s, %d kHz, %d kbps\n",
- myCodec.plname, myCodec.plfreq / 1000, myCodec.rate / 1000);
- }
- //std::cout<< std::flush;
-
- // NO DTX for supe-wideband codec at this point
- if (myCodec.plfreq == 32000) {
- *dtx = false;
- CHECK_ERROR_MT(myACM->SetVAD(*dtx, *vad, *mode));
-
- }
-
- CHECK_ERROR_MT(myACM->RegisterSendCodec(myCodec));
- myChannel->ResetStats();
- {
- WriteLockScoped wl(_apiTestRWLock);
- *thereIsEncoder = true;
- }
- Wait(500);
-}
-
-} // namespace webrtc
diff --git a/webrtc/modules/audio_coding/main/test/APITest.h b/webrtc/modules/audio_coding/main/test/APITest.h
deleted file mode 100644
index d4c5b1ecdd..0000000000
--- a/webrtc/modules/audio_coding/main/test/APITest.h
+++ /dev/null
@@ -1,163 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_MODULES_AUDIO_CODING_MAIN_TEST_APITEST_H_
-#define WEBRTC_MODULES_AUDIO_CODING_MAIN_TEST_APITEST_H_
-
-#include "webrtc/base/scoped_ptr.h"
-#include "webrtc/modules/audio_coding/main/include/audio_coding_module.h"
-#include "webrtc/modules/audio_coding/main/test/ACMTest.h"
-#include "webrtc/modules/audio_coding/main/test/Channel.h"
-#include "webrtc/modules/audio_coding/main/test/PCMFile.h"
-#include "webrtc/modules/audio_coding/main/test/utility.h"
-#include "webrtc/system_wrappers/include/event_wrapper.h"
-#include "webrtc/system_wrappers/include/rw_lock_wrapper.h"
-
-namespace webrtc {
-
-class Config;
-
-enum APITESTAction {
- TEST_CHANGE_CODEC_ONLY = 0,
- DTX_TEST = 1
-};
-
-class APITest : public ACMTest {
- public:
- explicit APITest(const Config& config);
- ~APITest();
-
- void Perform();
- private:
- int16_t SetUp();
-
- static bool PushAudioThreadA(void* obj);
- static bool PullAudioThreadA(void* obj);
- static bool ProcessThreadA(void* obj);
- static bool APIThreadA(void* obj);
-
- static bool PushAudioThreadB(void* obj);
- static bool PullAudioThreadB(void* obj);
- static bool ProcessThreadB(void* obj);
- static bool APIThreadB(void* obj);
-
- void CheckVADStatus(char side);
-
- // Set Min delay, get delay, playout timestamp
- void TestDelay(char side);
-
- // Unregister a codec & register again.
- void TestRegisteration(char side);
-
- // Playout Mode, background noise mode.
- // Receiver Frequency, playout frequency.
- void TestPlayout(char receiveSide);
-
- //
- void TestSendVAD(char side);
-
- void CurrentCodec(char side);
-
- void ChangeCodec(char side);
-
- void Wait(uint32_t waitLengthMs);
-
- void RunTest(char thread);
-
- bool PushAudioRunA();
- bool PullAudioRunA();
- bool ProcessRunA();
- bool APIRunA();
-
- bool PullAudioRunB();
- bool PushAudioRunB();
- bool ProcessRunB();
- bool APIRunB();
-
- //--- ACMs
- rtc::scoped_ptr<AudioCodingModule> _acmA;
- rtc::scoped_ptr<AudioCodingModule> _acmB;
-
- //--- Channels
- Channel* _channel_A2B;
- Channel* _channel_B2A;
-
- //--- I/O files
- // A
- PCMFile _inFileA;
- PCMFile _outFileA;
- // B
- PCMFile _outFileB;
- PCMFile _inFileB;
-
- //--- I/O params
- // A
- int32_t _outFreqHzA;
- // B
- int32_t _outFreqHzB;
-
- // Should we write to file.
- // we might skip writing to file if we
- // run the test for a long time.
- bool _writeToFile;
- //--- Events
- // A
- EventTimerWrapper* _pullEventA; // pulling data from ACM
- EventTimerWrapper* _pushEventA; // pushing data to ACM
- EventTimerWrapper* _processEventA; // process
- EventWrapper* _apiEventA; // API calls
- // B
- EventTimerWrapper* _pullEventB; // pulling data from ACM
- EventTimerWrapper* _pushEventB; // pushing data to ACM
- EventTimerWrapper* _processEventB; // process
- EventWrapper* _apiEventB; // API calls
-
- // keep track of the codec in either side.
- uint8_t _codecCntrA;
- uint8_t _codecCntrB;
-
- // Is set to true if there is no encoder in either side
- bool _thereIsEncoderA;
- bool _thereIsEncoderB;
- bool _thereIsDecoderA;
- bool _thereIsDecoderB;
-
- bool _sendVADA;
- bool _sendDTXA;
- ACMVADMode _sendVADModeA;
-
- bool _sendVADB;
- bool _sendDTXB;
- ACMVADMode _sendVADModeB;
-
- int32_t _minDelayA;
- int32_t _minDelayB;
- bool _payloadUsed[32];
-
- bool _verbose;
-
- int _dotPositionA;
- int _dotMoveDirectionA;
- int _dotPositionB;
- int _dotMoveDirectionB;
-
- char _movingDot[41];
-
- VADCallback* _vadCallbackA;
- VADCallback* _vadCallbackB;
- RWLockWrapper& _apiTestRWLock;
- bool _randomTest;
- int _testNumA;
- int _testNumB;
-};
-
-} // namespace webrtc
-
-#endif // WEBRTC_MODULES_AUDIO_CODING_MAIN_TEST_APITEST_H_
diff --git a/webrtc/modules/audio_coding/main/test/Channel.cc b/webrtc/modules/audio_coding/main/test/Channel.cc
deleted file mode 100644
index 02bd783a38..0000000000
--- a/webrtc/modules/audio_coding/main/test/Channel.cc
+++ /dev/null
@@ -1,424 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include "webrtc/modules/audio_coding/main/test/Channel.h"
-
-#include <assert.h>
-#include <iostream>
-
-#include "webrtc/base/format_macros.h"
-#include "webrtc/system_wrappers/include/tick_util.h"
-#include "webrtc/system_wrappers/include/critical_section_wrapper.h"
-
-namespace webrtc {
-
-int32_t Channel::SendData(FrameType frameType,
- uint8_t payloadType,
- uint32_t timeStamp,
- const uint8_t* payloadData,
- size_t payloadSize,
- const RTPFragmentationHeader* fragmentation) {
- WebRtcRTPHeader rtpInfo;
- int32_t status;
- size_t payloadDataSize = payloadSize;
-
- rtpInfo.header.markerBit = false;
- rtpInfo.header.ssrc = 0;
- rtpInfo.header.sequenceNumber = (external_sequence_number_ < 0) ?
- _seqNo++ : static_cast<uint16_t>(external_sequence_number_);
- rtpInfo.header.payloadType = payloadType;
- rtpInfo.header.timestamp = (external_send_timestamp_ < 0) ? timeStamp :
- static_cast<uint32_t>(external_send_timestamp_);
-
- if (frameType == kAudioFrameCN) {
- rtpInfo.type.Audio.isCNG = true;
- } else {
- rtpInfo.type.Audio.isCNG = false;
- }
- if (frameType == kEmptyFrame) {
- // When frame is empty, we should not transmit it. The frame size of the
- // next non-empty frame will be based on the previous frame size.
- _useLastFrameSize = _lastFrameSizeSample > 0;
- return 0;
- }
-
- rtpInfo.type.Audio.channel = 1;
- // Treat fragmentation separately
- if (fragmentation != NULL) {
- // If silence for too long, send only new data.
- if ((fragmentation->fragmentationVectorSize == 2) &&
- (fragmentation->fragmentationTimeDiff[1] <= 0x3fff)) {
- // only 0x80 if we have multiple blocks
- _payloadData[0] = 0x80 + fragmentation->fragmentationPlType[1];
- size_t REDheader = (fragmentation->fragmentationTimeDiff[1] << 10) +
- fragmentation->fragmentationLength[1];
- _payloadData[1] = uint8_t((REDheader >> 16) & 0x000000FF);
- _payloadData[2] = uint8_t((REDheader >> 8) & 0x000000FF);
- _payloadData[3] = uint8_t(REDheader & 0x000000FF);
-
- _payloadData[4] = fragmentation->fragmentationPlType[0];
- // copy the RED data
- memcpy(_payloadData + 5,
- payloadData + fragmentation->fragmentationOffset[1],
- fragmentation->fragmentationLength[1]);
- // copy the normal data
- memcpy(_payloadData + 5 + fragmentation->fragmentationLength[1],
- payloadData + fragmentation->fragmentationOffset[0],
- fragmentation->fragmentationLength[0]);
- payloadDataSize += 5;
- } else {
- // single block (newest one)
- memcpy(_payloadData, payloadData + fragmentation->fragmentationOffset[0],
- fragmentation->fragmentationLength[0]);
- payloadDataSize = fragmentation->fragmentationLength[0];
- rtpInfo.header.payloadType = fragmentation->fragmentationPlType[0];
- }
- } else {
- memcpy(_payloadData, payloadData, payloadDataSize);
- if (_isStereo) {
- if (_leftChannel) {
- memcpy(&_rtpInfo, &rtpInfo, sizeof(WebRtcRTPHeader));
- _leftChannel = false;
- rtpInfo.type.Audio.channel = 1;
- } else {
- memcpy(&rtpInfo, &_rtpInfo, sizeof(WebRtcRTPHeader));
- _leftChannel = true;
- rtpInfo.type.Audio.channel = 2;
- }
- }
- }
-
- _channelCritSect->Enter();
- if (_saveBitStream) {
- //fwrite(payloadData, sizeof(uint8_t), payloadSize, _bitStreamFile);
- }
-
- if (!_isStereo) {
- CalcStatistics(rtpInfo, payloadSize);
- }
- _useLastFrameSize = false;
- _lastInTimestamp = timeStamp;
- _totalBytes += payloadDataSize;
- _channelCritSect->Leave();
-
- if (_useFECTestWithPacketLoss) {
- _packetLoss += 1;
- if (_packetLoss == 3) {
- _packetLoss = 0;
- return 0;
- }
- }
-
- if (num_packets_to_drop_ > 0) {
- num_packets_to_drop_--;
- return 0;
- }
-
- status = _receiverACM->IncomingPacket(_payloadData, payloadDataSize, rtpInfo);
-
- return status;
-}
-
-// TODO(turajs): rewite this method.
-void Channel::CalcStatistics(WebRtcRTPHeader& rtpInfo, size_t payloadSize) {
- int n;
- if ((rtpInfo.header.payloadType != _lastPayloadType)
- && (_lastPayloadType != -1)) {
- // payload-type is changed.
- // we have to terminate the calculations on the previous payload type
- // we ignore the last packet in that payload type just to make things
- // easier.
- for (n = 0; n < MAX_NUM_PAYLOADS; n++) {
- if (_lastPayloadType == _payloadStats[n].payloadType) {
- _payloadStats[n].newPacket = true;
- break;
- }
- }
- }
- _lastPayloadType = rtpInfo.header.payloadType;
-
- bool newPayload = true;
- ACMTestPayloadStats* currentPayloadStr = NULL;
- for (n = 0; n < MAX_NUM_PAYLOADS; n++) {
- if (rtpInfo.header.payloadType == _payloadStats[n].payloadType) {
- newPayload = false;
- currentPayloadStr = &_payloadStats[n];
- break;
- }
- }
-
- if (!newPayload) {
- if (!currentPayloadStr->newPacket) {
- if (!_useLastFrameSize) {
- _lastFrameSizeSample = (uint32_t) ((uint32_t) rtpInfo.header.timestamp -
- (uint32_t) currentPayloadStr->lastTimestamp);
- }
- assert(_lastFrameSizeSample > 0);
- int k = 0;
- for (; k < MAX_NUM_FRAMESIZES; ++k) {
- if ((currentPayloadStr->frameSizeStats[k].frameSizeSample ==
- _lastFrameSizeSample) ||
- (currentPayloadStr->frameSizeStats[k].frameSizeSample == 0)) {
- break;
- }
- }
- if (k == MAX_NUM_FRAMESIZES) {
- // New frame size found but no space to count statistics on it. Skip it.
- printf("No memory to store statistics for payload %d : frame size %d\n",
- _lastPayloadType, _lastFrameSizeSample);
- return;
- }
- ACMTestFrameSizeStats* currentFrameSizeStats = &(currentPayloadStr
- ->frameSizeStats[k]);
- currentFrameSizeStats->frameSizeSample = (int16_t) _lastFrameSizeSample;
-
- // increment the number of encoded samples.
- currentFrameSizeStats->totalEncodedSamples += _lastFrameSizeSample;
- // increment the number of recveived packets
- currentFrameSizeStats->numPackets++;
- // increment the total number of bytes (this is based on
- // the previous payload we don't know the frame-size of
- // the current payload.
- currentFrameSizeStats->totalPayloadLenByte += currentPayloadStr
- ->lastPayloadLenByte;
- // store the maximum payload-size (this is based on
- // the previous payload we don't know the frame-size of
- // the current payload.
- if (currentFrameSizeStats->maxPayloadLen
- < currentPayloadStr->lastPayloadLenByte) {
- currentFrameSizeStats->maxPayloadLen = currentPayloadStr
- ->lastPayloadLenByte;
- }
- // store the current values for the next time
- currentPayloadStr->lastTimestamp = rtpInfo.header.timestamp;
- currentPayloadStr->lastPayloadLenByte = payloadSize;
- } else {
- currentPayloadStr->newPacket = false;
- currentPayloadStr->lastPayloadLenByte = payloadSize;
- currentPayloadStr->lastTimestamp = rtpInfo.header.timestamp;
- currentPayloadStr->payloadType = rtpInfo.header.payloadType;
- memset(currentPayloadStr->frameSizeStats, 0, MAX_NUM_FRAMESIZES *
- sizeof(ACMTestFrameSizeStats));
- }
- } else {
- n = 0;
- while (_payloadStats[n].payloadType != -1) {
- n++;
- }
- // first packet
- _payloadStats[n].newPacket = false;
- _payloadStats[n].lastPayloadLenByte = payloadSize;
- _payloadStats[n].lastTimestamp = rtpInfo.header.timestamp;
- _payloadStats[n].payloadType = rtpInfo.header.payloadType;
- memset(_payloadStats[n].frameSizeStats, 0, MAX_NUM_FRAMESIZES *
- sizeof(ACMTestFrameSizeStats));
- }
-}
-
-Channel::Channel(int16_t chID)
- : _receiverACM(NULL),
- _seqNo(0),
- _channelCritSect(CriticalSectionWrapper::CreateCriticalSection()),
- _bitStreamFile(NULL),
- _saveBitStream(false),
- _lastPayloadType(-1),
- _isStereo(false),
- _leftChannel(true),
- _lastInTimestamp(0),
- _useLastFrameSize(false),
- _lastFrameSizeSample(0),
- _packetLoss(0),
- _useFECTestWithPacketLoss(false),
- _beginTime(TickTime::MillisecondTimestamp()),
- _totalBytes(0),
- external_send_timestamp_(-1),
- external_sequence_number_(-1),
- num_packets_to_drop_(0) {
- int n;
- int k;
- for (n = 0; n < MAX_NUM_PAYLOADS; n++) {
- _payloadStats[n].payloadType = -1;
- _payloadStats[n].newPacket = true;
- for (k = 0; k < MAX_NUM_FRAMESIZES; k++) {
- _payloadStats[n].frameSizeStats[k].frameSizeSample = 0;
- _payloadStats[n].frameSizeStats[k].maxPayloadLen = 0;
- _payloadStats[n].frameSizeStats[k].numPackets = 0;
- _payloadStats[n].frameSizeStats[k].totalPayloadLenByte = 0;
- _payloadStats[n].frameSizeStats[k].totalEncodedSamples = 0;
- }
- }
- if (chID >= 0) {
- _saveBitStream = true;
- char bitStreamFileName[500];
- sprintf(bitStreamFileName, "bitStream_%d.dat", chID);
- _bitStreamFile = fopen(bitStreamFileName, "wb");
- } else {
- _saveBitStream = false;
- }
-}
-
-Channel::~Channel() {
- delete _channelCritSect;
-}
-
-void Channel::RegisterReceiverACM(AudioCodingModule* acm) {
- _receiverACM = acm;
- return;
-}
-
-void Channel::ResetStats() {
- int n;
- int k;
- _channelCritSect->Enter();
- _lastPayloadType = -1;
- for (n = 0; n < MAX_NUM_PAYLOADS; n++) {
- _payloadStats[n].payloadType = -1;
- _payloadStats[n].newPacket = true;
- for (k = 0; k < MAX_NUM_FRAMESIZES; k++) {
- _payloadStats[n].frameSizeStats[k].frameSizeSample = 0;
- _payloadStats[n].frameSizeStats[k].maxPayloadLen = 0;
- _payloadStats[n].frameSizeStats[k].numPackets = 0;
- _payloadStats[n].frameSizeStats[k].totalPayloadLenByte = 0;
- _payloadStats[n].frameSizeStats[k].totalEncodedSamples = 0;
- }
- }
- _beginTime = TickTime::MillisecondTimestamp();
- _totalBytes = 0;
- _channelCritSect->Leave();
-}
-
-int16_t Channel::Stats(CodecInst& codecInst,
- ACMTestPayloadStats& payloadStats) {
- _channelCritSect->Enter();
- int n;
- payloadStats.payloadType = -1;
- for (n = 0; n < MAX_NUM_PAYLOADS; n++) {
- if (_payloadStats[n].payloadType == codecInst.pltype) {
- memcpy(&payloadStats, &_payloadStats[n], sizeof(ACMTestPayloadStats));
- break;
- }
- }
- if (payloadStats.payloadType == -1) {
- _channelCritSect->Leave();
- return -1;
- }
- for (n = 0; n < MAX_NUM_FRAMESIZES; n++) {
- if (payloadStats.frameSizeStats[n].frameSizeSample == 0) {
- _channelCritSect->Leave();
- return 0;
- }
- payloadStats.frameSizeStats[n].usageLenSec = (double) payloadStats
- .frameSizeStats[n].totalEncodedSamples / (double) codecInst.plfreq;
-
- payloadStats.frameSizeStats[n].rateBitPerSec =
- payloadStats.frameSizeStats[n].totalPayloadLenByte * 8
- / payloadStats.frameSizeStats[n].usageLenSec;
-
- }
- _channelCritSect->Leave();
- return 0;
-}
-
-void Channel::Stats(uint32_t* numPackets) {
- _channelCritSect->Enter();
- int k;
- int n;
- memset(numPackets, 0, MAX_NUM_PAYLOADS * sizeof(uint32_t));
- for (k = 0; k < MAX_NUM_PAYLOADS; k++) {
- if (_payloadStats[k].payloadType == -1) {
- break;
- }
- numPackets[k] = 0;
- for (n = 0; n < MAX_NUM_FRAMESIZES; n++) {
- if (_payloadStats[k].frameSizeStats[n].frameSizeSample == 0) {
- break;
- }
- numPackets[k] += _payloadStats[k].frameSizeStats[n].numPackets;
- }
- }
- _channelCritSect->Leave();
-}
-
-void Channel::Stats(uint8_t* payloadType, uint32_t* payloadLenByte) {
- _channelCritSect->Enter();
-
- int k;
- int n;
- memset(payloadLenByte, 0, MAX_NUM_PAYLOADS * sizeof(uint32_t));
- for (k = 0; k < MAX_NUM_PAYLOADS; k++) {
- if (_payloadStats[k].payloadType == -1) {
- break;
- }
- payloadType[k] = (uint8_t) _payloadStats[k].payloadType;
- payloadLenByte[k] = 0;
- for (n = 0; n < MAX_NUM_FRAMESIZES; n++) {
- if (_payloadStats[k].frameSizeStats[n].frameSizeSample == 0) {
- break;
- }
- payloadLenByte[k] += (uint16_t) _payloadStats[k].frameSizeStats[n]
- .totalPayloadLenByte;
- }
- }
-
- _channelCritSect->Leave();
-}
-
-void Channel::PrintStats(CodecInst& codecInst) {
- ACMTestPayloadStats payloadStats;
- Stats(codecInst, payloadStats);
- printf("%s %d kHz\n", codecInst.plname, codecInst.plfreq / 1000);
- printf("=====================================================\n");
- if (payloadStats.payloadType == -1) {
- printf("No Packets are sent with payload-type %d (%s)\n\n",
- codecInst.pltype, codecInst.plname);
- return;
- }
- for (int k = 0; k < MAX_NUM_FRAMESIZES; k++) {
- if (payloadStats.frameSizeStats[k].frameSizeSample == 0) {
- break;
- }
- printf("Frame-size.................... %d samples\n",
- payloadStats.frameSizeStats[k].frameSizeSample);
- printf("Average Rate.................. %.0f bits/sec\n",
- payloadStats.frameSizeStats[k].rateBitPerSec);
- printf("Maximum Payload-Size.......... %" PRIuS " Bytes\n",
- payloadStats.frameSizeStats[k].maxPayloadLen);
- printf(
- "Maximum Instantaneous Rate.... %.0f bits/sec\n",
- ((double) payloadStats.frameSizeStats[k].maxPayloadLen * 8.0
- * (double) codecInst.plfreq)
- / (double) payloadStats.frameSizeStats[k].frameSizeSample);
- printf("Number of Packets............. %u\n",
- (unsigned int) payloadStats.frameSizeStats[k].numPackets);
- printf("Duration...................... %0.3f sec\n\n",
- payloadStats.frameSizeStats[k].usageLenSec);
-
- }
-
-}
-
-uint32_t Channel::LastInTimestamp() {
- uint32_t timestamp;
- _channelCritSect->Enter();
- timestamp = _lastInTimestamp;
- _channelCritSect->Leave();
- return timestamp;
-}
-
-double Channel::BitRate() {
- double rate;
- uint64_t currTime = TickTime::MillisecondTimestamp();
- _channelCritSect->Enter();
- rate = ((double) _totalBytes * 8.0) / (double) (currTime - _beginTime);
- _channelCritSect->Leave();
- return rate;
-}
-
-} // namespace webrtc
diff --git a/webrtc/modules/audio_coding/main/test/Channel.h b/webrtc/modules/audio_coding/main/test/Channel.h
deleted file mode 100644
index 39d4dabd98..0000000000
--- a/webrtc/modules/audio_coding/main/test/Channel.h
+++ /dev/null
@@ -1,130 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_MODULES_AUDIO_CODING_MAIN_TEST_CHANNEL_H_
-#define WEBRTC_MODULES_AUDIO_CODING_MAIN_TEST_CHANNEL_H_
-
-#include <stdio.h>
-
-#include "webrtc/modules/audio_coding/main/include/audio_coding_module.h"
-#include "webrtc/modules/interface/module_common_types.h"
-#include "webrtc/typedefs.h"
-
-namespace webrtc {
-
-class CriticalSectionWrapper;
-
-#define MAX_NUM_PAYLOADS 50
-#define MAX_NUM_FRAMESIZES 6
-
-// TODO(turajs): Write constructor for this structure.
-struct ACMTestFrameSizeStats {
- uint16_t frameSizeSample;
- size_t maxPayloadLen;
- uint32_t numPackets;
- uint64_t totalPayloadLenByte;
- uint64_t totalEncodedSamples;
- double rateBitPerSec;
- double usageLenSec;
-};
-
-// TODO(turajs): Write constructor for this structure.
-struct ACMTestPayloadStats {
- bool newPacket;
- int16_t payloadType;
- size_t lastPayloadLenByte;
- uint32_t lastTimestamp;
- ACMTestFrameSizeStats frameSizeStats[MAX_NUM_FRAMESIZES];
-};
-
-class Channel : public AudioPacketizationCallback {
- public:
-
- Channel(int16_t chID = -1);
- ~Channel();
-
- int32_t SendData(FrameType frameType,
- uint8_t payloadType,
- uint32_t timeStamp,
- const uint8_t* payloadData,
- size_t payloadSize,
- const RTPFragmentationHeader* fragmentation) override;
-
- void RegisterReceiverACM(AudioCodingModule *acm);
-
- void ResetStats();
-
- int16_t Stats(CodecInst& codecInst, ACMTestPayloadStats& payloadStats);
-
- void Stats(uint32_t* numPackets);
-
- void Stats(uint8_t* payloadType, uint32_t* payloadLenByte);
-
- void PrintStats(CodecInst& codecInst);
-
- void SetIsStereo(bool isStereo) {
- _isStereo = isStereo;
- }
-
- uint32_t LastInTimestamp();
-
- void SetFECTestWithPacketLoss(bool usePacketLoss) {
- _useFECTestWithPacketLoss = usePacketLoss;
- }
-
- double BitRate();
-
- void set_send_timestamp(uint32_t new_send_ts) {
- external_send_timestamp_ = new_send_ts;
- }
-
- void set_sequence_number(uint16_t new_sequence_number) {
- external_sequence_number_ = new_sequence_number;
- }
-
- void set_num_packets_to_drop(int new_num_packets_to_drop) {
- num_packets_to_drop_ = new_num_packets_to_drop;
- }
-
- private:
- void CalcStatistics(WebRtcRTPHeader& rtpInfo, size_t payloadSize);
-
- AudioCodingModule* _receiverACM;
- uint16_t _seqNo;
- // 60msec * 32 sample(max)/msec * 2 description (maybe) * 2 bytes/sample
- uint8_t _payloadData[60 * 32 * 2 * 2];
-
- CriticalSectionWrapper* _channelCritSect;
- FILE* _bitStreamFile;
- bool _saveBitStream;
- int16_t _lastPayloadType;
- ACMTestPayloadStats _payloadStats[MAX_NUM_PAYLOADS];
- bool _isStereo;
- WebRtcRTPHeader _rtpInfo;
- bool _leftChannel;
- uint32_t _lastInTimestamp;
- bool _useLastFrameSize;
- uint32_t _lastFrameSizeSample;
- // FEC Test variables
- int16_t _packetLoss;
- bool _useFECTestWithPacketLoss;
- uint64_t _beginTime;
- uint64_t _totalBytes;
-
- // External timing info, defaulted to -1. Only used if they are
- // non-negative.
- int64_t external_send_timestamp_;
- int32_t external_sequence_number_;
- int num_packets_to_drop_;
-};
-
-} // namespace webrtc
-
-#endif // WEBRTC_MODULES_AUDIO_CODING_MAIN_TEST_CHANNEL_H_
diff --git a/webrtc/modules/audio_coding/main/test/EncodeDecodeTest.cc b/webrtc/modules/audio_coding/main/test/EncodeDecodeTest.cc
deleted file mode 100644
index d062af0fb9..0000000000
--- a/webrtc/modules/audio_coding/main/test/EncodeDecodeTest.cc
+++ /dev/null
@@ -1,352 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include "webrtc/modules/audio_coding/main/test/EncodeDecodeTest.h"
-
-#include <sstream>
-#include <stdio.h>
-#include <stdlib.h>
-
-#include "testing/gtest/include/gtest/gtest.h"
-#include "webrtc/base/scoped_ptr.h"
-#include "webrtc/common_types.h"
-#include "webrtc/modules/audio_coding/main/include/audio_coding_module.h"
-#include "webrtc/modules/audio_coding/main/acm2/acm_common_defs.h"
-#include "webrtc/modules/audio_coding/main/test/utility.h"
-#include "webrtc/system_wrappers/include/trace.h"
-#include "webrtc/test/testsupport/fileutils.h"
-
-namespace webrtc {
-
-TestPacketization::TestPacketization(RTPStream *rtpStream, uint16_t frequency)
- : _rtpStream(rtpStream),
- _frequency(frequency),
- _seqNo(0) {
-}
-
-TestPacketization::~TestPacketization() {
-}
-
-int32_t TestPacketization::SendData(
- const FrameType /* frameType */, const uint8_t payloadType,
- const uint32_t timeStamp, const uint8_t* payloadData,
- const size_t payloadSize,
- const RTPFragmentationHeader* /* fragmentation */) {
- _rtpStream->Write(payloadType, timeStamp, _seqNo++, payloadData, payloadSize,
- _frequency);
- return 1;
-}
-
-Sender::Sender()
- : _acm(NULL),
- _pcmFile(),
- _audioFrame(),
- _packetization(NULL) {
-}
-
-void Sender::Setup(AudioCodingModule *acm, RTPStream *rtpStream,
- std::string in_file_name, int sample_rate, int channels) {
- struct CodecInst sendCodec;
- int noOfCodecs = acm->NumberOfCodecs();
- int codecNo;
-
- // Open input file
- const std::string file_name = webrtc::test::ResourcePath(in_file_name, "pcm");
- _pcmFile.Open(file_name, sample_rate, "rb");
- if (channels == 2) {
- _pcmFile.ReadStereo(true);
- }
-
- // Set the codec for the current test.
- if ((testMode == 0) || (testMode == 1)) {
- // Set the codec id.
- codecNo = codeId;
- } else {
- // Choose codec on command line.
- printf("List of supported codec.\n");
- for (int n = 0; n < noOfCodecs; n++) {
- EXPECT_EQ(0, acm->Codec(n, &sendCodec));
- printf("%d %s\n", n, sendCodec.plname);
- }
- printf("Choose your codec:");
- ASSERT_GT(scanf("%d", &codecNo), 0);
- }
-
- EXPECT_EQ(0, acm->Codec(codecNo, &sendCodec));
-
- sendCodec.channels = channels;
-
- EXPECT_EQ(0, acm->RegisterSendCodec(sendCodec));
- _packetization = new TestPacketization(rtpStream, sendCodec.plfreq);
- EXPECT_EQ(0, acm->RegisterTransportCallback(_packetization));
-
- _acm = acm;
-}
-
-void Sender::Teardown() {
- _pcmFile.Close();
- delete _packetization;
-}
-
-bool Sender::Add10MsData() {
- if (!_pcmFile.EndOfFile()) {
- EXPECT_GT(_pcmFile.Read10MsData(_audioFrame), 0);
- int32_t ok = _acm->Add10MsData(_audioFrame);
- EXPECT_GE(ok, 0);
- return ok >= 0 ? true : false;
- }
- return false;
-}
-
-void Sender::Run() {
- while (true) {
- if (!Add10MsData()) {
- break;
- }
- }
-}
-
-Receiver::Receiver()
- : _playoutLengthSmpls(WEBRTC_10MS_PCM_AUDIO),
- _payloadSizeBytes(MAX_INCOMING_PAYLOAD) {
-}
-
-void Receiver::Setup(AudioCodingModule *acm, RTPStream *rtpStream,
- std::string out_file_name, int channels) {
- struct CodecInst recvCodec = CodecInst();
- int noOfCodecs;
- EXPECT_EQ(0, acm->InitializeReceiver());
-
- noOfCodecs = acm->NumberOfCodecs();
- for (int i = 0; i < noOfCodecs; i++) {
- EXPECT_EQ(0, acm->Codec(i, &recvCodec));
- if (recvCodec.channels == channels)
- EXPECT_EQ(0, acm->RegisterReceiveCodec(recvCodec));
- // Forces mono/stereo for Opus.
- if (!strcmp(recvCodec.plname, "opus")) {
- recvCodec.channels = channels;
- EXPECT_EQ(0, acm->RegisterReceiveCodec(recvCodec));
- }
- }
-
- int playSampFreq;
- std::string file_name;
- std::stringstream file_stream;
- file_stream << webrtc::test::OutputPath() << out_file_name
- << static_cast<int>(codeId) << ".pcm";
- file_name = file_stream.str();
- _rtpStream = rtpStream;
-
- if (testMode == 1) {
- playSampFreq = recvCodec.plfreq;
- _pcmFile.Open(file_name, recvCodec.plfreq, "wb+");
- } else if (testMode == 0) {
- playSampFreq = 32000;
- _pcmFile.Open(file_name, 32000, "wb+");
- } else {
- printf("\nValid output frequencies:\n");
- printf("8000\n16000\n32000\n-1,");
- printf("which means output frequency equal to received signal frequency");
- printf("\n\nChoose output sampling frequency: ");
- ASSERT_GT(scanf("%d", &playSampFreq), 0);
- file_name = webrtc::test::OutputPath() + out_file_name + ".pcm";
- _pcmFile.Open(file_name, playSampFreq, "wb+");
- }
-
- _realPayloadSizeBytes = 0;
- _playoutBuffer = new int16_t[WEBRTC_10MS_PCM_AUDIO];
- _frequency = playSampFreq;
- _acm = acm;
- _firstTime = true;
-}
-
-void Receiver::Teardown() {
- delete[] _playoutBuffer;
- _pcmFile.Close();
- if (testMode > 1) {
- Trace::ReturnTrace();
- }
-}
-
-bool Receiver::IncomingPacket() {
- if (!_rtpStream->EndOfFile()) {
- if (_firstTime) {
- _firstTime = false;
- _realPayloadSizeBytes = _rtpStream->Read(&_rtpInfo, _incomingPayload,
- _payloadSizeBytes, &_nextTime);
- if (_realPayloadSizeBytes == 0) {
- if (_rtpStream->EndOfFile()) {
- _firstTime = true;
- return true;
- } else {
- return false;
- }
- }
- }
-
- EXPECT_EQ(0, _acm->IncomingPacket(_incomingPayload, _realPayloadSizeBytes,
- _rtpInfo));
- _realPayloadSizeBytes = _rtpStream->Read(&_rtpInfo, _incomingPayload,
- _payloadSizeBytes, &_nextTime);
- if (_realPayloadSizeBytes == 0 && _rtpStream->EndOfFile()) {
- _firstTime = true;
- }
- }
- return true;
-}
-
-bool Receiver::PlayoutData() {
- AudioFrame audioFrame;
-
- int32_t ok =_acm->PlayoutData10Ms(_frequency, &audioFrame);
- EXPECT_EQ(0, ok);
- if (ok < 0){
- return false;
- }
- if (_playoutLengthSmpls == 0) {
- return false;
- }
- _pcmFile.Write10MsData(audioFrame.data_,
- audioFrame.samples_per_channel_ * audioFrame.num_channels_);
- return true;
-}
-
-void Receiver::Run() {
- uint8_t counter500Ms = 50;
- uint32_t clock = 0;
-
- while (counter500Ms > 0) {
- if (clock == 0 || clock >= _nextTime) {
- EXPECT_TRUE(IncomingPacket());
- if (clock == 0) {
- clock = _nextTime;
- }
- }
- if ((clock % 10) == 0) {
- if (!PlayoutData()) {
- clock++;
- continue;
- }
- }
- if (_rtpStream->EndOfFile()) {
- counter500Ms--;
- }
- clock++;
- }
-}
-
-EncodeDecodeTest::EncodeDecodeTest() {
- _testMode = 2;
- Trace::CreateTrace();
- Trace::SetTraceFile(
- (webrtc::test::OutputPath() + "acm_encdec_trace.txt").c_str());
-}
-
-EncodeDecodeTest::EncodeDecodeTest(int testMode) {
- //testMode == 0 for autotest
- //testMode == 1 for testing all codecs/parameters
- //testMode > 1 for specific user-input test (as it was used before)
- _testMode = testMode;
- if (_testMode != 0) {
- Trace::CreateTrace();
- Trace::SetTraceFile(
- (webrtc::test::OutputPath() + "acm_encdec_trace.txt").c_str());
- }
-}
-
-void EncodeDecodeTest::Perform() {
- int numCodecs = 1;
- int codePars[3]; // Frequency, packet size, rate.
- int numPars[52]; // Number of codec parameters sets (freq, pacsize, rate)
- // to test, for a given codec.
-
- codePars[0] = 0;
- codePars[1] = 0;
- codePars[2] = 0;
-
- rtc::scoped_ptr<AudioCodingModule> acm(AudioCodingModule::Create(0));
- struct CodecInst sendCodecTmp;
- numCodecs = acm->NumberOfCodecs();
-
- if (_testMode != 2) {
- for (int n = 0; n < numCodecs; n++) {
- EXPECT_EQ(0, acm->Codec(n, &sendCodecTmp));
- if (STR_CASE_CMP(sendCodecTmp.plname, "telephone-event") == 0) {
- numPars[n] = 0;
- } else if (STR_CASE_CMP(sendCodecTmp.plname, "cn") == 0) {
- numPars[n] = 0;
- } else if (STR_CASE_CMP(sendCodecTmp.plname, "red") == 0) {
- numPars[n] = 0;
- } else if (sendCodecTmp.channels == 2) {
- numPars[n] = 0;
- } else {
- numPars[n] = 1;
- }
- }
- } else {
- numCodecs = 1;
- numPars[0] = 1;
- }
-
- _receiver.testMode = _testMode;
-
- // Loop over all mono codecs:
- for (int codeId = 0; codeId < numCodecs; codeId++) {
- // Only encode using real mono encoders, not telephone-event and cng.
- for (int loopPars = 1; loopPars <= numPars[codeId]; loopPars++) {
- // Encode all data to file.
- std::string fileName = EncodeToFile(1, codeId, codePars, _testMode);
-
- RTPFile rtpFile;
- rtpFile.Open(fileName.c_str(), "rb");
-
- _receiver.codeId = codeId;
-
- rtpFile.ReadHeader();
- _receiver.Setup(acm.get(), &rtpFile, "encodeDecode_out", 1);
- _receiver.Run();
- _receiver.Teardown();
- rtpFile.Close();
- }
- }
-
- // End tracing.
- if (_testMode == 1) {
- Trace::ReturnTrace();
- }
-}
-
-std::string EncodeDecodeTest::EncodeToFile(int fileType,
- int codeId,
- int* codePars,
- int testMode) {
- rtc::scoped_ptr<AudioCodingModule> acm(AudioCodingModule::Create(1));
- RTPFile rtpFile;
- std::string fileName = webrtc::test::TempFilename(webrtc::test::OutputPath(),
- "encode_decode_rtp");
- rtpFile.Open(fileName.c_str(), "wb+");
- rtpFile.WriteHeader();
-
- // Store for auto_test and logging.
- _sender.testMode = testMode;
- _sender.codeId = codeId;
-
- _sender.Setup(acm.get(), &rtpFile, "audio_coding/testfile32kHz", 32000, 1);
- struct CodecInst sendCodecInst;
- if (acm->SendCodec(&sendCodecInst) >= 0) {
- _sender.Run();
- }
- _sender.Teardown();
- rtpFile.Close();
-
- return fileName;
-}
-
-} // namespace webrtc
diff --git a/webrtc/modules/audio_coding/main/test/EncodeDecodeTest.h b/webrtc/modules/audio_coding/main/test/EncodeDecodeTest.h
deleted file mode 100644
index 4ad92cec15..0000000000
--- a/webrtc/modules/audio_coding/main/test/EncodeDecodeTest.h
+++ /dev/null
@@ -1,123 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_MODULES_AUDIO_CODING_MAIN_TEST_ENCODEDECODETEST_H_
-#define WEBRTC_MODULES_AUDIO_CODING_MAIN_TEST_ENCODEDECODETEST_H_
-
-#include <stdio.h>
-#include <string.h>
-
-#include "webrtc/modules/audio_coding/main/include/audio_coding_module.h"
-#include "webrtc/modules/audio_coding/main/test/ACMTest.h"
-#include "webrtc/modules/audio_coding/main/test/PCMFile.h"
-#include "webrtc/modules/audio_coding/main/test/RTPFile.h"
-#include "webrtc/typedefs.h"
-
-namespace webrtc {
-
-#define MAX_INCOMING_PAYLOAD 8096
-
-// TestPacketization callback which writes the encoded payloads to file
-class TestPacketization : public AudioPacketizationCallback {
- public:
- TestPacketization(RTPStream *rtpStream, uint16_t frequency);
- ~TestPacketization();
- int32_t SendData(const FrameType frameType,
- const uint8_t payloadType,
- const uint32_t timeStamp,
- const uint8_t* payloadData,
- const size_t payloadSize,
- const RTPFragmentationHeader* fragmentation) override;
-
- private:
- static void MakeRTPheader(uint8_t* rtpHeader, uint8_t payloadType,
- int16_t seqNo, uint32_t timeStamp, uint32_t ssrc);
- RTPStream* _rtpStream;
- int32_t _frequency;
- int16_t _seqNo;
-};
-
-class Sender {
- public:
- Sender();
- void Setup(AudioCodingModule *acm, RTPStream *rtpStream,
- std::string in_file_name, int sample_rate, int channels);
- void Teardown();
- void Run();
- bool Add10MsData();
-
- //for auto_test and logging
- uint8_t testMode;
- uint8_t codeId;
-
- protected:
- AudioCodingModule* _acm;
-
- private:
- PCMFile _pcmFile;
- AudioFrame _audioFrame;
- TestPacketization* _packetization;
-};
-
-class Receiver {
- public:
- Receiver();
- virtual ~Receiver() {};
- void Setup(AudioCodingModule *acm, RTPStream *rtpStream,
- std::string out_file_name, int channels);
- void Teardown();
- void Run();
- virtual bool IncomingPacket();
- bool PlayoutData();
-
- //for auto_test and logging
- uint8_t codeId;
- uint8_t testMode;
-
- private:
- PCMFile _pcmFile;
- int16_t* _playoutBuffer;
- uint16_t _playoutLengthSmpls;
- int32_t _frequency;
- bool _firstTime;
-
- protected:
- AudioCodingModule* _acm;
- uint8_t _incomingPayload[MAX_INCOMING_PAYLOAD];
- RTPStream* _rtpStream;
- WebRtcRTPHeader _rtpInfo;
- size_t _realPayloadSizeBytes;
- size_t _payloadSizeBytes;
- uint32_t _nextTime;
-};
-
-class EncodeDecodeTest : public ACMTest {
- public:
- EncodeDecodeTest();
- explicit EncodeDecodeTest(int testMode);
- void Perform() override;
-
- uint16_t _playoutFreq;
- uint8_t _testMode;
-
- private:
- std::string EncodeToFile(int fileType,
- int codeId,
- int* codePars,
- int testMode);
-
- protected:
- Sender _sender;
- Receiver _receiver;
-};
-
-} // namespace webrtc
-
-#endif // WEBRTC_MODULES_AUDIO_CODING_MAIN_TEST_ENCODEDECODETEST_H_
diff --git a/webrtc/modules/audio_coding/main/test/PCMFile.cc b/webrtc/modules/audio_coding/main/test/PCMFile.cc
deleted file mode 100644
index d0ae7830de..0000000000
--- a/webrtc/modules/audio_coding/main/test/PCMFile.cc
+++ /dev/null
@@ -1,204 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include "PCMFile.h"
-
-#include <ctype.h>
-#include <stdio.h>
-#include <string.h>
-
-#include "testing/gtest/include/gtest/gtest.h"
-#include "webrtc/modules/interface/module_common_types.h"
-
-namespace webrtc {
-
-#define MAX_FILE_NAME_LENGTH_BYTE 500
-
-PCMFile::PCMFile()
- : pcm_file_(NULL),
- samples_10ms_(160),
- frequency_(16000),
- end_of_file_(false),
- auto_rewind_(false),
- rewinded_(false),
- read_stereo_(false),
- save_stereo_(false) {
- timestamp_ = (((uint32_t) rand() & 0x0000FFFF) << 16) |
- ((uint32_t) rand() & 0x0000FFFF);
-}
-
-PCMFile::PCMFile(uint32_t timestamp)
- : pcm_file_(NULL),
- samples_10ms_(160),
- frequency_(16000),
- end_of_file_(false),
- auto_rewind_(false),
- rewinded_(false),
- read_stereo_(false),
- save_stereo_(false) {
- timestamp_ = timestamp;
-}
-
-int16_t PCMFile::ChooseFile(std::string* file_name, int16_t max_len,
- uint16_t* frequency_hz) {
- char tmp_name[MAX_FILE_NAME_LENGTH_BYTE];
-
- EXPECT_TRUE(fgets(tmp_name, MAX_FILE_NAME_LENGTH_BYTE, stdin) != NULL);
- tmp_name[MAX_FILE_NAME_LENGTH_BYTE - 1] = '\0';
- int16_t n = 0;
-
- // Removing trailing spaces.
- while ((isspace(tmp_name[n]) || iscntrl(tmp_name[n])) && (tmp_name[n] != 0)
- && (n < MAX_FILE_NAME_LENGTH_BYTE)) {
- n++;
- }
- if (n > 0) {
- memmove(tmp_name, &tmp_name[n], MAX_FILE_NAME_LENGTH_BYTE - n);
- }
-
- // Removing trailing spaces.
- n = (int16_t)(strlen(tmp_name) - 1);
- if (n >= 0) {
- while ((isspace(tmp_name[n]) || iscntrl(tmp_name[n])) && (n >= 0)) {
- n--;
- }
- }
- if (n >= 0) {
- tmp_name[n + 1] = '\0';
- }
-
- int16_t len = (int16_t) strlen(tmp_name);
- if (len > max_len) {
- return -1;
- }
- if (len > 0) {
- std::string tmp_string(tmp_name, len + 1);
- *file_name = tmp_string;
- }
- printf("Enter the sampling frequency (in Hz) of the above file [%u]: ",
- *frequency_hz);
- EXPECT_TRUE(fgets(tmp_name, 10, stdin) != NULL);
- uint16_t tmp_frequency = (uint16_t) atoi(tmp_name);
- if (tmp_frequency > 0) {
- *frequency_hz = tmp_frequency;
- }
- return 0;
-}
-
-void PCMFile::Open(const std::string& file_name, uint16_t frequency,
- const char* mode, bool auto_rewind) {
- if ((pcm_file_ = fopen(file_name.c_str(), mode)) == NULL) {
- printf("Cannot open file %s.\n", file_name.c_str());
- ADD_FAILURE() << "Unable to read file";
- }
- frequency_ = frequency;
- samples_10ms_ = (uint16_t)(frequency_ / 100);
- auto_rewind_ = auto_rewind;
- end_of_file_ = false;
- rewinded_ = false;
-}
-
-int32_t PCMFile::SamplingFrequency() const {
- return frequency_;
-}
-
-uint16_t PCMFile::PayloadLength10Ms() const {
- return samples_10ms_;
-}
-
-int32_t PCMFile::Read10MsData(AudioFrame& audio_frame) {
- uint16_t channels = 1;
- if (read_stereo_) {
- channels = 2;
- }
-
- int32_t payload_size = (int32_t) fread(audio_frame.data_, sizeof(uint16_t),
- samples_10ms_ * channels, pcm_file_);
- if (payload_size < samples_10ms_ * channels) {
- for (int k = payload_size; k < samples_10ms_ * channels; k++) {
- audio_frame.data_[k] = 0;
- }
- if (auto_rewind_) {
- rewind(pcm_file_);
- rewinded_ = true;
- } else {
- end_of_file_ = true;
- }
- }
- audio_frame.samples_per_channel_ = samples_10ms_;
- audio_frame.sample_rate_hz_ = frequency_;
- audio_frame.num_channels_ = channels;
- audio_frame.timestamp_ = timestamp_;
- timestamp_ += samples_10ms_;
- return samples_10ms_;
-}
-
-void PCMFile::Write10MsData(AudioFrame& audio_frame) {
- if (audio_frame.num_channels_ == 1) {
- if (!save_stereo_) {
- if (fwrite(audio_frame.data_, sizeof(uint16_t),
- audio_frame.samples_per_channel_, pcm_file_) !=
- static_cast<size_t>(audio_frame.samples_per_channel_)) {
- return;
- }
- } else {
- int16_t* stereo_audio = new int16_t[2 * audio_frame.samples_per_channel_];
- for (size_t k = 0; k < audio_frame.samples_per_channel_; k++) {
- stereo_audio[k << 1] = audio_frame.data_[k];
- stereo_audio[(k << 1) + 1] = audio_frame.data_[k];
- }
- if (fwrite(stereo_audio, sizeof(int16_t),
- 2 * audio_frame.samples_per_channel_, pcm_file_) !=
- static_cast<size_t>(2 * audio_frame.samples_per_channel_)) {
- return;
- }
- delete[] stereo_audio;
- }
- } else {
- if (fwrite(audio_frame.data_, sizeof(int16_t),
- audio_frame.num_channels_ * audio_frame.samples_per_channel_,
- pcm_file_) !=
- static_cast<size_t>(audio_frame.num_channels_ *
- audio_frame.samples_per_channel_)) {
- return;
- }
- }
-}
-
-void PCMFile::Write10MsData(int16_t* playout_buffer, size_t length_smpls) {
- if (fwrite(playout_buffer, sizeof(uint16_t), length_smpls, pcm_file_) !=
- length_smpls) {
- return;
- }
-}
-
-void PCMFile::Close() {
- fclose(pcm_file_);
- pcm_file_ = NULL;
-}
-
-void PCMFile::Rewind() {
- rewind(pcm_file_);
- end_of_file_ = false;
-}
-
-bool PCMFile::Rewinded() {
- return rewinded_;
-}
-
-void PCMFile::SaveStereo(bool is_stereo) {
- save_stereo_ = is_stereo;
-}
-
-void PCMFile::ReadStereo(bool is_stereo) {
- read_stereo_ = is_stereo;
-}
-
-} // namespace webrtc
diff --git a/webrtc/modules/audio_coding/main/test/PCMFile.h b/webrtc/modules/audio_coding/main/test/PCMFile.h
deleted file mode 100644
index 8353898f03..0000000000
--- a/webrtc/modules/audio_coding/main/test/PCMFile.h
+++ /dev/null
@@ -1,68 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_MODULES_AUDIO_CODING_MAIN_TEST_PCMFILE_H_
-#define WEBRTC_MODULES_AUDIO_CODING_MAIN_TEST_PCMFILE_H_
-
-#include <stdio.h>
-#include <stdlib.h>
-
-#include <string>
-
-#include "webrtc/modules/interface/module_common_types.h"
-#include "webrtc/typedefs.h"
-
-namespace webrtc {
-
-class PCMFile {
- public:
- PCMFile();
- PCMFile(uint32_t timestamp);
- ~PCMFile() {
- if (pcm_file_ != NULL) {
- fclose(pcm_file_);
- }
- }
-
- void Open(const std::string& filename, uint16_t frequency, const char* mode,
- bool auto_rewind = false);
-
- int32_t Read10MsData(AudioFrame& audio_frame);
-
- void Write10MsData(int16_t *playout_buffer, size_t length_smpls);
- void Write10MsData(AudioFrame& audio_frame);
-
- uint16_t PayloadLength10Ms() const;
- int32_t SamplingFrequency() const;
- void Close();
- bool EndOfFile() const {
- return end_of_file_;
- }
- void Rewind();
- static int16_t ChooseFile(std::string* file_name, int16_t max_len,
- uint16_t* frequency_hz);
- bool Rewinded();
- void SaveStereo(bool is_stereo = true);
- void ReadStereo(bool is_stereo = true);
- private:
- FILE* pcm_file_;
- uint16_t samples_10ms_;
- int32_t frequency_;
- bool end_of_file_;
- bool auto_rewind_;
- bool rewinded_;
- uint32_t timestamp_;
- bool read_stereo_;
- bool save_stereo_;
-};
-
-} // namespace webrtc
-
-#endif // WEBRTC_MODULES_AUDIO_CODING_MAIN_TEST_PCMFILE_H_
diff --git a/webrtc/modules/audio_coding/main/test/PacketLossTest.cc b/webrtc/modules/audio_coding/main/test/PacketLossTest.cc
deleted file mode 100644
index f19d491d2d..0000000000
--- a/webrtc/modules/audio_coding/main/test/PacketLossTest.cc
+++ /dev/null
@@ -1,168 +0,0 @@
-/*
- * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include "webrtc/modules/audio_coding/main/test/PacketLossTest.h"
-
-#include "testing/gtest/include/gtest/gtest.h"
-#include "webrtc/common.h"
-#include "webrtc/test/testsupport/fileutils.h"
-
-namespace webrtc {
-
-ReceiverWithPacketLoss::ReceiverWithPacketLoss()
- : loss_rate_(0),
- burst_length_(1),
- packet_counter_(0),
- lost_packet_counter_(0),
- burst_lost_counter_(burst_length_) {
-}
-
-void ReceiverWithPacketLoss::Setup(AudioCodingModule *acm,
- RTPStream *rtpStream,
- std::string out_file_name,
- int channels,
- int loss_rate,
- int burst_length) {
- loss_rate_ = loss_rate;
- burst_length_ = burst_length;
- burst_lost_counter_ = burst_length_; // To prevent first packet gets lost.
- std::stringstream ss;
- ss << out_file_name << "_" << loss_rate_ << "_" << burst_length_ << "_";
- Receiver::Setup(acm, rtpStream, ss.str(), channels);
-}
-
-bool ReceiverWithPacketLoss::IncomingPacket() {
- if (!_rtpStream->EndOfFile()) {
- if (packet_counter_ == 0) {
- _realPayloadSizeBytes = _rtpStream->Read(&_rtpInfo, _incomingPayload,
- _payloadSizeBytes, &_nextTime);
- if (_realPayloadSizeBytes == 0) {
- if (_rtpStream->EndOfFile()) {
- packet_counter_ = 0;
- return true;
- } else {
- return false;
- }
- }
- }
-
- if (!PacketLost()) {
- _acm->IncomingPacket(_incomingPayload, _realPayloadSizeBytes, _rtpInfo);
- }
- packet_counter_++;
- _realPayloadSizeBytes = _rtpStream->Read(&_rtpInfo, _incomingPayload,
- _payloadSizeBytes, &_nextTime);
- if (_realPayloadSizeBytes == 0 && _rtpStream->EndOfFile()) {
- packet_counter_ = 0;
- lost_packet_counter_ = 0;
- }
- }
- return true;
-}
-
-bool ReceiverWithPacketLoss::PacketLost() {
- if (burst_lost_counter_ < burst_length_) {
- lost_packet_counter_++;
- burst_lost_counter_++;
- return true;
- }
-
- if (lost_packet_counter_ * 100 < loss_rate_ * packet_counter_) {
- lost_packet_counter_++;
- burst_lost_counter_ = 1;
- return true;
- }
- return false;
-}
-
-SenderWithFEC::SenderWithFEC()
- : expected_loss_rate_(0) {
-}
-
-void SenderWithFEC::Setup(AudioCodingModule *acm, RTPStream *rtpStream,
- std::string in_file_name, int sample_rate,
- int channels, int expected_loss_rate) {
- Sender::Setup(acm, rtpStream, in_file_name, sample_rate, channels);
- EXPECT_TRUE(SetFEC(true));
- EXPECT_TRUE(SetPacketLossRate(expected_loss_rate));
-}
-
-bool SenderWithFEC::SetFEC(bool enable_fec) {
- if (_acm->SetCodecFEC(enable_fec) == 0) {
- return true;
- }
- return false;
-}
-
-bool SenderWithFEC::SetPacketLossRate(int expected_loss_rate) {
- if (_acm->SetPacketLossRate(expected_loss_rate) == 0) {
- expected_loss_rate_ = expected_loss_rate;
- return true;
- }
- return false;
-}
-
-PacketLossTest::PacketLossTest(int channels, int expected_loss_rate,
- int actual_loss_rate, int burst_length)
- : channels_(channels),
- in_file_name_(channels_ == 1 ? "audio_coding/testfile32kHz" :
- "audio_coding/teststereo32kHz"),
- sample_rate_hz_(32000),
- sender_(new SenderWithFEC),
- receiver_(new ReceiverWithPacketLoss),
- expected_loss_rate_(expected_loss_rate),
- actual_loss_rate_(actual_loss_rate),
- burst_length_(burst_length) {
-}
-
-void PacketLossTest::Perform() {
-#ifndef WEBRTC_CODEC_OPUS
- return;
-#else
- rtc::scoped_ptr<AudioCodingModule> acm(AudioCodingModule::Create(0));
-
- int codec_id = acm->Codec("opus", 48000, channels_);
-
- RTPFile rtpFile;
- std::string fileName = webrtc::test::TempFilename(webrtc::test::OutputPath(),
- "packet_loss_test");
-
- // Encode to file
- rtpFile.Open(fileName.c_str(), "wb+");
- rtpFile.WriteHeader();
-
- sender_->testMode = 0;
- sender_->codeId = codec_id;
-
- sender_->Setup(acm.get(), &rtpFile, in_file_name_, sample_rate_hz_, channels_,
- expected_loss_rate_);
- struct CodecInst sendCodecInst;
- if (acm->SendCodec(&sendCodecInst) >= 0) {
- sender_->Run();
- }
- sender_->Teardown();
- rtpFile.Close();
-
- // Decode to file
- rtpFile.Open(fileName.c_str(), "rb");
- rtpFile.ReadHeader();
-
- receiver_->testMode = 0;
- receiver_->codeId = codec_id;
-
- receiver_->Setup(acm.get(), &rtpFile, "packetLoss_out", channels_,
- actual_loss_rate_, burst_length_);
- receiver_->Run();
- receiver_->Teardown();
- rtpFile.Close();
-#endif
-}
-
-} // namespace webrtc
diff --git a/webrtc/modules/audio_coding/main/test/PacketLossTest.h b/webrtc/modules/audio_coding/main/test/PacketLossTest.h
deleted file mode 100644
index d25dea264f..0000000000
--- a/webrtc/modules/audio_coding/main/test/PacketLossTest.h
+++ /dev/null
@@ -1,67 +0,0 @@
-/*
- * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_MODULES_AUDIO_CODING_MAIN_TEST_PACKETLOSSTEST_H_
-#define WEBRTC_MODULES_AUDIO_CODING_MAIN_TEST_PACKETLOSSTEST_H_
-
-#include <string>
-#include "webrtc/base/scoped_ptr.h"
-#include "webrtc/modules/audio_coding/main/test/EncodeDecodeTest.h"
-
-namespace webrtc {
-
-class ReceiverWithPacketLoss : public Receiver {
- public:
- ReceiverWithPacketLoss();
- void Setup(AudioCodingModule *acm, RTPStream *rtpStream,
- std::string out_file_name, int channels, int loss_rate,
- int burst_length);
- bool IncomingPacket() override;
-
- protected:
- bool PacketLost();
- int loss_rate_;
- int burst_length_;
- int packet_counter_;
- int lost_packet_counter_;
- int burst_lost_counter_;
-};
-
-class SenderWithFEC : public Sender {
- public:
- SenderWithFEC();
- void Setup(AudioCodingModule *acm, RTPStream *rtpStream,
- std::string in_file_name, int sample_rate, int channels,
- int expected_loss_rate);
- bool SetPacketLossRate(int expected_loss_rate);
- bool SetFEC(bool enable_fec);
- protected:
- int expected_loss_rate_;
-};
-
-class PacketLossTest : public ACMTest {
- public:
- PacketLossTest(int channels, int expected_loss_rate_, int actual_loss_rate,
- int burst_length);
- void Perform();
- protected:
- int channels_;
- std::string in_file_name_;
- int sample_rate_hz_;
- rtc::scoped_ptr<SenderWithFEC> sender_;
- rtc::scoped_ptr<ReceiverWithPacketLoss> receiver_;
- int expected_loss_rate_;
- int actual_loss_rate_;
- int burst_length_;
-};
-
-} // namespace webrtc
-
-#endif // WEBRTC_MODULES_AUDIO_CODING_MAIN_TEST_PACKETLOSSTEST_H_
diff --git a/webrtc/modules/audio_coding/main/test/RTPFile.h b/webrtc/modules/audio_coding/main/test/RTPFile.h
deleted file mode 100644
index c79b63e164..0000000000
--- a/webrtc/modules/audio_coding/main/test/RTPFile.h
+++ /dev/null
@@ -1,126 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_MODULES_AUDIO_CODING_MAIN_TEST_RTPFILE_H_
-#define WEBRTC_MODULES_AUDIO_CODING_MAIN_TEST_RTPFILE_H_
-
-#include <stdio.h>
-#include <queue>
-
-#include "webrtc/modules/audio_coding/main/include/audio_coding_module.h"
-#include "webrtc/modules/interface/module_common_types.h"
-#include "webrtc/system_wrappers/include/rw_lock_wrapper.h"
-#include "webrtc/typedefs.h"
-
-namespace webrtc {
-
-class RTPStream {
- public:
- virtual ~RTPStream() {
- }
-
- virtual void Write(const uint8_t payloadType, const uint32_t timeStamp,
- const int16_t seqNo, const uint8_t* payloadData,
- const size_t payloadSize, uint32_t frequency) = 0;
-
- // Returns the packet's payload size. Zero should be treated as an
- // end-of-stream (in the case that EndOfFile() is true) or an error.
- virtual size_t Read(WebRtcRTPHeader* rtpInfo, uint8_t* payloadData,
- size_t payloadSize, uint32_t* offset) = 0;
- virtual bool EndOfFile() const = 0;
-
- protected:
- void MakeRTPheader(uint8_t* rtpHeader, uint8_t payloadType, int16_t seqNo,
- uint32_t timeStamp, uint32_t ssrc);
-
- void ParseRTPHeader(WebRtcRTPHeader* rtpInfo, const uint8_t* rtpHeader);
-};
-
-class RTPPacket {
- public:
- RTPPacket(uint8_t payloadType, uint32_t timeStamp, int16_t seqNo,
- const uint8_t* payloadData, size_t payloadSize,
- uint32_t frequency);
-
- ~RTPPacket();
-
- uint8_t payloadType;
- uint32_t timeStamp;
- int16_t seqNo;
- uint8_t* payloadData;
- size_t payloadSize;
- uint32_t frequency;
-};
-
-class RTPBuffer : public RTPStream {
- public:
- RTPBuffer();
-
- ~RTPBuffer();
-
- void Write(const uint8_t payloadType,
- const uint32_t timeStamp,
- const int16_t seqNo,
- const uint8_t* payloadData,
- const size_t payloadSize,
- uint32_t frequency) override;
-
- size_t Read(WebRtcRTPHeader* rtpInfo,
- uint8_t* payloadData,
- size_t payloadSize,
- uint32_t* offset) override;
-
- bool EndOfFile() const override;
-
- private:
- RWLockWrapper* _queueRWLock;
- std::queue<RTPPacket *> _rtpQueue;
-};
-
-class RTPFile : public RTPStream {
- public:
- ~RTPFile() {
- }
-
- RTPFile()
- : _rtpFile(NULL),
- _rtpEOF(false) {
- }
-
- void Open(const char *outFilename, const char *mode);
-
- void Close();
-
- void WriteHeader();
-
- void ReadHeader();
-
- void Write(const uint8_t payloadType,
- const uint32_t timeStamp,
- const int16_t seqNo,
- const uint8_t* payloadData,
- const size_t payloadSize,
- uint32_t frequency) override;
-
- size_t Read(WebRtcRTPHeader* rtpInfo,
- uint8_t* payloadData,
- size_t payloadSize,
- uint32_t* offset) override;
-
- bool EndOfFile() const override { return _rtpEOF; }
-
- private:
- FILE* _rtpFile;
- bool _rtpEOF;
-};
-
-} // namespace webrtc
-
-#endif // WEBRTC_MODULES_AUDIO_CODING_MAIN_TEST_RTPFILE_H_
diff --git a/webrtc/modules/audio_coding/main/test/SpatialAudio.cc b/webrtc/modules/audio_coding/main/test/SpatialAudio.cc
deleted file mode 100644
index 17d4fc88b2..0000000000
--- a/webrtc/modules/audio_coding/main/test/SpatialAudio.cc
+++ /dev/null
@@ -1,196 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include <stdio.h>
-#include <string.h>
-
-#include <math.h>
-
-#include "webrtc/common_types.h"
-#include "webrtc/modules/audio_coding/main/test/SpatialAudio.h"
-#include "webrtc/system_wrappers/include/trace.h"
-#include "webrtc/system_wrappers/include/trace.h"
-#include "webrtc/test/testsupport/fileutils.h"
-
-namespace webrtc {
-
-#define NUM_PANN_COEFFS 10
-
-SpatialAudio::SpatialAudio(int testMode)
- : _acmLeft(AudioCodingModule::Create(1)),
- _acmRight(AudioCodingModule::Create(2)),
- _acmReceiver(AudioCodingModule::Create(3)),
- _testMode(testMode) {
-}
-
-SpatialAudio::~SpatialAudio() {
- delete _channel;
- _inFile.Close();
- _outFile.Close();
-}
-
-int16_t SpatialAudio::Setup() {
- _channel = new Channel;
-
- // Register callback for the sender side.
- CHECK_ERROR(_acmLeft->RegisterTransportCallback(_channel));
- CHECK_ERROR(_acmRight->RegisterTransportCallback(_channel));
- // Register the receiver ACM in channel
- _channel->RegisterReceiverACM(_acmReceiver.get());
-
- uint16_t sampFreqHz = 32000;
-
- const std::string file_name = webrtc::test::ResourcePath(
- "audio_coding/testfile32kHz", "pcm");
- _inFile.Open(file_name, sampFreqHz, "rb", false);
-
- std::string output_file = webrtc::test::OutputPath()
- + "out_spatial_autotest.pcm";
- if (_testMode == 1) {
- output_file = webrtc::test::OutputPath() + "testspatial_out.pcm";
- printf("\n");
- printf("Enter the output file [%s]: ", output_file.c_str());
- PCMFile::ChooseFile(&output_file, MAX_FILE_NAME_LENGTH_BYTE, &sampFreqHz);
- } else {
- output_file = webrtc::test::OutputPath() + "testspatial_out.pcm";
- }
- _outFile.Open(output_file, sampFreqHz, "wb", false);
- _outFile.SaveStereo(true);
-
- // Register all available codes as receiving codecs.
- CodecInst codecInst;
- int status;
- uint8_t num_encoders = _acmReceiver->NumberOfCodecs();
- // Register all available codes as receiving codecs once more.
- for (uint8_t n = 0; n < num_encoders; n++) {
- status = _acmReceiver->Codec(n, &codecInst);
- if (status < 0) {
- printf("Error in Codec(), no matching codec found");
- }
- status = _acmReceiver->RegisterReceiveCodec(codecInst);
- if (status < 0) {
- printf("Error in RegisterReceiveCodec() for payload type %d",
- codecInst.pltype);
- }
- }
-
- return 0;
-}
-
-void SpatialAudio::Perform() {
- if (_testMode == 0) {
- printf("Running SpatialAudio Test");
- WEBRTC_TRACE(webrtc::kTraceStateInfo, webrtc::kTraceAudioCoding, -1,
- "---------- SpatialAudio ----------");
- }
-
- Setup();
-
- CodecInst codecInst;
- _acmLeft->Codec((uint8_t) 1, &codecInst);
- CHECK_ERROR(_acmLeft->RegisterSendCodec(codecInst));
- EncodeDecode();
-
- int16_t pannCntr = 0;
-
- double leftPanning[NUM_PANN_COEFFS] = { 1.00, 0.95, 0.90, 0.85, 0.80, 0.75,
- 0.70, 0.60, 0.55, 0.50 };
- double rightPanning[NUM_PANN_COEFFS] = { 0.50, 0.55, 0.60, 0.70, 0.75, 0.80,
- 0.85, 0.90, 0.95, 1.00 };
-
- while ((pannCntr + 1) < NUM_PANN_COEFFS) {
- _acmLeft->Codec((uint8_t) 0, &codecInst);
- codecInst.pacsize = 480;
- CHECK_ERROR(_acmLeft->RegisterSendCodec(codecInst));
- CHECK_ERROR(_acmRight->RegisterSendCodec(codecInst));
-
- EncodeDecode(leftPanning[pannCntr], rightPanning[pannCntr]);
- pannCntr++;
-
- // Change codec
- _acmLeft->Codec((uint8_t) 3, &codecInst);
- codecInst.pacsize = 320;
- CHECK_ERROR(_acmLeft->RegisterSendCodec(codecInst));
- CHECK_ERROR(_acmRight->RegisterSendCodec(codecInst));
-
- EncodeDecode(leftPanning[pannCntr], rightPanning[pannCntr]);
- pannCntr++;
- if (_testMode == 0) {
- printf(".");
- }
- }
-
- _acmLeft->Codec((uint8_t) 4, &codecInst);
- CHECK_ERROR(_acmLeft->RegisterSendCodec(codecInst));
- EncodeDecode();
-
- _acmLeft->Codec((uint8_t) 0, &codecInst);
- codecInst.pacsize = 480;
- CHECK_ERROR(_acmLeft->RegisterSendCodec(codecInst));
- CHECK_ERROR(_acmRight->RegisterSendCodec(codecInst));
- pannCntr = NUM_PANN_COEFFS - 1;
- while (pannCntr >= 0) {
- EncodeDecode(leftPanning[pannCntr], rightPanning[pannCntr]);
- pannCntr--;
- if (_testMode == 0) {
- printf(".");
- }
- }
- if (_testMode == 0) {
- printf("Done!\n");
- }
-}
-
-void SpatialAudio::EncodeDecode(const double leftPanning,
- const double rightPanning) {
- AudioFrame audioFrame;
- int32_t outFileSampFreq = _outFile.SamplingFrequency();
-
- const double rightToLeftRatio = rightPanning / leftPanning;
-
- _channel->SetIsStereo(true);
-
- while (!_inFile.EndOfFile()) {
- _inFile.Read10MsData(audioFrame);
- for (size_t n = 0; n < audioFrame.samples_per_channel_; n++) {
- audioFrame.data_[n] = (int16_t) floor(
- audioFrame.data_[n] * leftPanning + 0.5);
- }
- CHECK_ERROR(_acmLeft->Add10MsData(audioFrame));
-
- for (size_t n = 0; n < audioFrame.samples_per_channel_; n++) {
- audioFrame.data_[n] = (int16_t) floor(
- audioFrame.data_[n] * rightToLeftRatio + 0.5);
- }
- CHECK_ERROR(_acmRight->Add10MsData(audioFrame));
-
- CHECK_ERROR(_acmReceiver->PlayoutData10Ms(outFileSampFreq, &audioFrame));
- _outFile.Write10MsData(audioFrame);
- }
- _inFile.Rewind();
-}
-
-void SpatialAudio::EncodeDecode() {
- AudioFrame audioFrame;
- int32_t outFileSampFreq = _outFile.SamplingFrequency();
-
- _channel->SetIsStereo(false);
-
- while (!_inFile.EndOfFile()) {
- _inFile.Read10MsData(audioFrame);
- CHECK_ERROR(_acmLeft->Add10MsData(audioFrame));
-
- CHECK_ERROR(_acmReceiver->PlayoutData10Ms(outFileSampFreq, &audioFrame));
- _outFile.Write10MsData(audioFrame);
- }
- _inFile.Rewind();
-}
-
-} // namespace webrtc
diff --git a/webrtc/modules/audio_coding/main/test/SpatialAudio.h b/webrtc/modules/audio_coding/main/test/SpatialAudio.h
deleted file mode 100644
index fc258977f3..0000000000
--- a/webrtc/modules/audio_coding/main/test/SpatialAudio.h
+++ /dev/null
@@ -1,47 +0,0 @@
-/*
- * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_MODULES_AUDIO_CODING_MAIN_TEST_SPATIALAUDIO_H_
-#define WEBRTC_MODULES_AUDIO_CODING_MAIN_TEST_SPATIALAUDIO_H_
-
-#include "webrtc/base/scoped_ptr.h"
-#include "webrtc/modules/audio_coding/main/include/audio_coding_module.h"
-#include "webrtc/modules/audio_coding/main/test/ACMTest.h"
-#include "webrtc/modules/audio_coding/main/test/Channel.h"
-#include "webrtc/modules/audio_coding/main/test/PCMFile.h"
-#include "webrtc/modules/audio_coding/main/test/utility.h"
-
-#define MAX_FILE_NAME_LENGTH_BYTE 500
-
-namespace webrtc {
-
-class SpatialAudio : public ACMTest {
- public:
- SpatialAudio(int testMode);
- ~SpatialAudio();
-
- void Perform();
- private:
- int16_t Setup();
- void EncodeDecode(double leftPanning, double rightPanning);
- void EncodeDecode();
-
- rtc::scoped_ptr<AudioCodingModule> _acmLeft;
- rtc::scoped_ptr<AudioCodingModule> _acmRight;
- rtc::scoped_ptr<AudioCodingModule> _acmReceiver;
- Channel* _channel;
- PCMFile _inFile;
- PCMFile _outFile;
- int _testMode;
-};
-
-} // namespace webrtc
-
-#endif // WEBRTC_MODULES_AUDIO_CODING_MAIN_TEST_SPATIALAUDIO_H_
diff --git a/webrtc/modules/audio_coding/main/test/TestAllCodecs.cc b/webrtc/modules/audio_coding/main/test/TestAllCodecs.cc
deleted file mode 100644
index 19189b6b8f..0000000000
--- a/webrtc/modules/audio_coding/main/test/TestAllCodecs.cc
+++ /dev/null
@@ -1,486 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include "webrtc/modules/audio_coding/main/test/TestAllCodecs.h"
-
-#include <cstdio>
-#include <limits>
-#include <string>
-
-#include "testing/gtest/include/gtest/gtest.h"
-
-#include "webrtc/common_types.h"
-#include "webrtc/engine_configurations.h"
-#include "webrtc/modules/audio_coding/main/include/audio_coding_module.h"
-#include "webrtc/modules/audio_coding/main/include/audio_coding_module_typedefs.h"
-#include "webrtc/modules/audio_coding/main/test/utility.h"
-#include "webrtc/system_wrappers/include/trace.h"
-#include "webrtc/test/testsupport/fileutils.h"
-#include "webrtc/typedefs.h"
-
-// Description of the test:
-// In this test we set up a one-way communication channel from a participant
-// called "a" to a participant called "b".
-// a -> channel_a_to_b -> b
-//
-// The test loops through all available mono codecs, encode at "a" sends over
-// the channel, and decodes at "b".
-
-namespace {
-const size_t kVariableSize = std::numeric_limits<size_t>::max();
-}
-
-namespace webrtc {
-
-// Class for simulating packet handling.
-TestPack::TestPack()
- : receiver_acm_(NULL),
- sequence_number_(0),
- timestamp_diff_(0),
- last_in_timestamp_(0),
- total_bytes_(0),
- payload_size_(0) {
-}
-
-TestPack::~TestPack() {
-}
-
-void TestPack::RegisterReceiverACM(AudioCodingModule* acm) {
- receiver_acm_ = acm;
- return;
-}
-
-int32_t TestPack::SendData(FrameType frame_type, uint8_t payload_type,
- uint32_t timestamp, const uint8_t* payload_data,
- size_t payload_size,
- const RTPFragmentationHeader* fragmentation) {
- WebRtcRTPHeader rtp_info;
- int32_t status;
-
- rtp_info.header.markerBit = false;
- rtp_info.header.ssrc = 0;
- rtp_info.header.sequenceNumber = sequence_number_++;
- rtp_info.header.payloadType = payload_type;
- rtp_info.header.timestamp = timestamp;
- if (frame_type == kAudioFrameCN) {
- rtp_info.type.Audio.isCNG = true;
- } else {
- rtp_info.type.Audio.isCNG = false;
- }
- if (frame_type == kEmptyFrame) {
- // Skip this frame.
- return 0;
- }
-
- // Only run mono for all test cases.
- rtp_info.type.Audio.channel = 1;
- memcpy(payload_data_, payload_data, payload_size);
-
- status = receiver_acm_->IncomingPacket(payload_data_, payload_size, rtp_info);
-
- payload_size_ = payload_size;
- timestamp_diff_ = timestamp - last_in_timestamp_;
- last_in_timestamp_ = timestamp;
- total_bytes_ += payload_size;
- return status;
-}
-
-size_t TestPack::payload_size() {
- return payload_size_;
-}
-
-uint32_t TestPack::timestamp_diff() {
- return timestamp_diff_;
-}
-
-void TestPack::reset_payload_size() {
- payload_size_ = 0;
-}
-
-TestAllCodecs::TestAllCodecs(int test_mode)
- : acm_a_(AudioCodingModule::Create(0)),
- acm_b_(AudioCodingModule::Create(1)),
- channel_a_to_b_(NULL),
- test_count_(0),
- packet_size_samples_(0),
- packet_size_bytes_(0) {
- // test_mode = 0 for silent test (auto test)
- test_mode_ = test_mode;
-}
-
-TestAllCodecs::~TestAllCodecs() {
- if (channel_a_to_b_ != NULL) {
- delete channel_a_to_b_;
- channel_a_to_b_ = NULL;
- }
-}
-
-void TestAllCodecs::Perform() {
- const std::string file_name = webrtc::test::ResourcePath(
- "audio_coding/testfile32kHz", "pcm");
- infile_a_.Open(file_name, 32000, "rb");
-
- if (test_mode_ == 0) {
- WEBRTC_TRACE(kTraceStateInfo, kTraceAudioCoding, -1,
- "---------- TestAllCodecs ----------");
- }
-
- acm_a_->InitializeReceiver();
- acm_b_->InitializeReceiver();
-
- uint8_t num_encoders = acm_a_->NumberOfCodecs();
- CodecInst my_codec_param;
- for (uint8_t n = 0; n < num_encoders; n++) {
- acm_b_->Codec(n, &my_codec_param);
- if (!strcmp(my_codec_param.plname, "opus")) {
- my_codec_param.channels = 1;
- }
- acm_b_->RegisterReceiveCodec(my_codec_param);
- }
-
- // Create and connect the channel
- channel_a_to_b_ = new TestPack;
- acm_a_->RegisterTransportCallback(channel_a_to_b_);
- channel_a_to_b_->RegisterReceiverACM(acm_b_.get());
-
- // All codecs are tested for all allowed sampling frequencies, rates and
- // packet sizes.
-#ifdef WEBRTC_CODEC_G722
- if (test_mode_ != 0) {
- printf("===============================================================\n");
- }
- test_count_++;
- OpenOutFile(test_count_);
- char codec_g722[] = "G722";
- RegisterSendCodec('A', codec_g722, 16000, 64000, 160, 0);
- Run(channel_a_to_b_);
- RegisterSendCodec('A', codec_g722, 16000, 64000, 320, 0);
- Run(channel_a_to_b_);
- RegisterSendCodec('A', codec_g722, 16000, 64000, 480, 0);
- Run(channel_a_to_b_);
- RegisterSendCodec('A', codec_g722, 16000, 64000, 640, 0);
- Run(channel_a_to_b_);
- RegisterSendCodec('A', codec_g722, 16000, 64000, 800, 0);
- Run(channel_a_to_b_);
- RegisterSendCodec('A', codec_g722, 16000, 64000, 960, 0);
- Run(channel_a_to_b_);
- outfile_b_.Close();
-#endif
-#ifdef WEBRTC_CODEC_ILBC
- if (test_mode_ != 0) {
- printf("===============================================================\n");
- }
- test_count_++;
- OpenOutFile(test_count_);
- char codec_ilbc[] = "ILBC";
- RegisterSendCodec('A', codec_ilbc, 8000, 13300, 240, 0);
- Run(channel_a_to_b_);
- RegisterSendCodec('A', codec_ilbc, 8000, 13300, 480, 0);
- Run(channel_a_to_b_);
- RegisterSendCodec('A', codec_ilbc, 8000, 15200, 160, 0);
- Run(channel_a_to_b_);
- RegisterSendCodec('A', codec_ilbc, 8000, 15200, 320, 0);
- Run(channel_a_to_b_);
- outfile_b_.Close();
-#endif
-#if (defined(WEBRTC_CODEC_ISAC) || defined(WEBRTC_CODEC_ISACFX))
- if (test_mode_ != 0) {
- printf("===============================================================\n");
- }
- test_count_++;
- OpenOutFile(test_count_);
- char codec_isac[] = "ISAC";
- RegisterSendCodec('A', codec_isac, 16000, -1, 480, kVariableSize);
- Run(channel_a_to_b_);
- RegisterSendCodec('A', codec_isac, 16000, -1, 960, kVariableSize);
- Run(channel_a_to_b_);
- RegisterSendCodec('A', codec_isac, 16000, 15000, 480, kVariableSize);
- Run(channel_a_to_b_);
- RegisterSendCodec('A', codec_isac, 16000, 32000, 960, kVariableSize);
- Run(channel_a_to_b_);
- outfile_b_.Close();
-#endif
-#ifdef WEBRTC_CODEC_ISAC
- if (test_mode_ != 0) {
- printf("===============================================================\n");
- }
- test_count_++;
- OpenOutFile(test_count_);
- RegisterSendCodec('A', codec_isac, 32000, -1, 960, kVariableSize);
- Run(channel_a_to_b_);
- RegisterSendCodec('A', codec_isac, 32000, 56000, 960, kVariableSize);
- Run(channel_a_to_b_);
- RegisterSendCodec('A', codec_isac, 32000, 37000, 960, kVariableSize);
- Run(channel_a_to_b_);
- RegisterSendCodec('A', codec_isac, 32000, 32000, 960, kVariableSize);
- Run(channel_a_to_b_);
- outfile_b_.Close();
-#endif
- if (test_mode_ != 0) {
- printf("===============================================================\n");
- }
- test_count_++;
- OpenOutFile(test_count_);
- char codec_l16[] = "L16";
- RegisterSendCodec('A', codec_l16, 8000, 128000, 80, 0);
- Run(channel_a_to_b_);
- RegisterSendCodec('A', codec_l16, 8000, 128000, 160, 0);
- Run(channel_a_to_b_);
- RegisterSendCodec('A', codec_l16, 8000, 128000, 240, 0);
- Run(channel_a_to_b_);
- RegisterSendCodec('A', codec_l16, 8000, 128000, 320, 0);
- Run(channel_a_to_b_);
- outfile_b_.Close();
- if (test_mode_ != 0) {
- printf("===============================================================\n");
- }
- test_count_++;
- OpenOutFile(test_count_);
- RegisterSendCodec('A', codec_l16, 16000, 256000, 160, 0);
- Run(channel_a_to_b_);
- RegisterSendCodec('A', codec_l16, 16000, 256000, 320, 0);
- Run(channel_a_to_b_);
- RegisterSendCodec('A', codec_l16, 16000, 256000, 480, 0);
- Run(channel_a_to_b_);
- RegisterSendCodec('A', codec_l16, 16000, 256000, 640, 0);
- Run(channel_a_to_b_);
- outfile_b_.Close();
- if (test_mode_ != 0) {
- printf("===============================================================\n");
- }
- test_count_++;
- OpenOutFile(test_count_);
- RegisterSendCodec('A', codec_l16, 32000, 512000, 320, 0);
- Run(channel_a_to_b_);
- RegisterSendCodec('A', codec_l16, 32000, 512000, 640, 0);
- Run(channel_a_to_b_);
- outfile_b_.Close();
- if (test_mode_ != 0) {
- printf("===============================================================\n");
- }
- test_count_++;
- OpenOutFile(test_count_);
- char codec_pcma[] = "PCMA";
- RegisterSendCodec('A', codec_pcma, 8000, 64000, 80, 0);
- Run(channel_a_to_b_);
- RegisterSendCodec('A', codec_pcma, 8000, 64000, 160, 0);
- Run(channel_a_to_b_);
- RegisterSendCodec('A', codec_pcma, 8000, 64000, 240, 0);
- Run(channel_a_to_b_);
- RegisterSendCodec('A', codec_pcma, 8000, 64000, 320, 0);
- Run(channel_a_to_b_);
- RegisterSendCodec('A', codec_pcma, 8000, 64000, 400, 0);
- Run(channel_a_to_b_);
- RegisterSendCodec('A', codec_pcma, 8000, 64000, 480, 0);
- Run(channel_a_to_b_);
- if (test_mode_ != 0) {
- printf("===============================================================\n");
- }
- char codec_pcmu[] = "PCMU";
- RegisterSendCodec('A', codec_pcmu, 8000, 64000, 80, 0);
- Run(channel_a_to_b_);
- RegisterSendCodec('A', codec_pcmu, 8000, 64000, 160, 0);
- Run(channel_a_to_b_);
- RegisterSendCodec('A', codec_pcmu, 8000, 64000, 240, 0);
- Run(channel_a_to_b_);
- RegisterSendCodec('A', codec_pcmu, 8000, 64000, 320, 0);
- Run(channel_a_to_b_);
- RegisterSendCodec('A', codec_pcmu, 8000, 64000, 400, 0);
- Run(channel_a_to_b_);
- RegisterSendCodec('A', codec_pcmu, 8000, 64000, 480, 0);
- Run(channel_a_to_b_);
- outfile_b_.Close();
-#ifdef WEBRTC_CODEC_OPUS
- if (test_mode_ != 0) {
- printf("===============================================================\n");
- }
- test_count_++;
- OpenOutFile(test_count_);
- char codec_opus[] = "OPUS";
- RegisterSendCodec('A', codec_opus, 48000, 6000, 480, kVariableSize);
- Run(channel_a_to_b_);
- RegisterSendCodec('A', codec_opus, 48000, 20000, 480*2, kVariableSize);
- Run(channel_a_to_b_);
- RegisterSendCodec('A', codec_opus, 48000, 32000, 480*4, kVariableSize);
- Run(channel_a_to_b_);
- RegisterSendCodec('A', codec_opus, 48000, 48000, 480, kVariableSize);
- Run(channel_a_to_b_);
- RegisterSendCodec('A', codec_opus, 48000, 64000, 480*4, kVariableSize);
- Run(channel_a_to_b_);
- RegisterSendCodec('A', codec_opus, 48000, 96000, 480*6, kVariableSize);
- Run(channel_a_to_b_);
- RegisterSendCodec('A', codec_opus, 48000, 500000, 480*2, kVariableSize);
- Run(channel_a_to_b_);
- outfile_b_.Close();
-#endif
- if (test_mode_ != 0) {
- printf("===============================================================\n");
-
- /* Print out all codecs that were not tested in the run */
- printf("The following codecs was not included in the test:\n");
-#ifndef WEBRTC_CODEC_G722
- printf(" G.722\n");
-#endif
-#ifndef WEBRTC_CODEC_ILBC
- printf(" iLBC\n");
-#endif
-#ifndef WEBRTC_CODEC_ISAC
- printf(" ISAC float\n");
-#endif
-#ifndef WEBRTC_CODEC_ISACFX
- printf(" ISAC fix\n");
-#endif
-
- printf("\nTo complete the test, listen to the %d number of output files.\n",
- test_count_);
- }
-}
-
-// Register Codec to use in the test
-//
-// Input: side - which ACM to use, 'A' or 'B'
-// codec_name - name to use when register the codec
-// sampling_freq_hz - sampling frequency in Herz
-// rate - bitrate in bytes
-// packet_size - packet size in samples
-// extra_byte - if extra bytes needed compared to the bitrate
-// used when registering, can be an internal header
-// set to kVariableSize if the codec is a variable
-// rate codec
-void TestAllCodecs::RegisterSendCodec(char side, char* codec_name,
- int32_t sampling_freq_hz, int rate,
- int packet_size, size_t extra_byte) {
- if (test_mode_ != 0) {
- // Print out codec and settings.
- printf("codec: %s Freq: %d Rate: %d PackSize: %d\n", codec_name,
- sampling_freq_hz, rate, packet_size);
- }
-
- // Store packet-size in samples, used to validate the received packet.
- // If G.722, store half the size to compensate for the timestamp bug in the
- // RFC for G.722.
- // If iSAC runs in adaptive mode, packet size in samples can change on the
- // fly, so we exclude this test by setting |packet_size_samples_| to -1.
- if (!strcmp(codec_name, "G722")) {
- packet_size_samples_ = packet_size / 2;
- } else if (!strcmp(codec_name, "ISAC") && (rate == -1)) {
- packet_size_samples_ = -1;
- } else {
- packet_size_samples_ = packet_size;
- }
-
- // Store the expected packet size in bytes, used to validate the received
- // packet. If variable rate codec (extra_byte == -1), set to -1.
- if (extra_byte != kVariableSize) {
- // Add 0.875 to always round up to a whole byte
- packet_size_bytes_ = static_cast<size_t>(
- static_cast<float>(packet_size * rate) /
- static_cast<float>(sampling_freq_hz * 8) + 0.875) + extra_byte;
- } else {
- // Packets will have a variable size.
- packet_size_bytes_ = kVariableSize;
- }
-
- // Set pointer to the ACM where to register the codec.
- AudioCodingModule* my_acm = NULL;
- switch (side) {
- case 'A': {
- my_acm = acm_a_.get();
- break;
- }
- case 'B': {
- my_acm = acm_b_.get();
- break;
- }
- default: {
- break;
- }
- }
- ASSERT_TRUE(my_acm != NULL);
-
- // Get all codec parameters before registering
- CodecInst my_codec_param;
- CHECK_ERROR(AudioCodingModule::Codec(codec_name, &my_codec_param,
- sampling_freq_hz, 1));
- my_codec_param.rate = rate;
- my_codec_param.pacsize = packet_size;
- CHECK_ERROR(my_acm->RegisterSendCodec(my_codec_param));
-}
-
-void TestAllCodecs::Run(TestPack* channel) {
- AudioFrame audio_frame;
-
- int32_t out_freq_hz = outfile_b_.SamplingFrequency();
- size_t receive_size;
- uint32_t timestamp_diff;
- channel->reset_payload_size();
- int error_count = 0;
-
- int counter = 0;
- while (!infile_a_.EndOfFile()) {
- // Add 10 msec to ACM.
- infile_a_.Read10MsData(audio_frame);
- CHECK_ERROR(acm_a_->Add10MsData(audio_frame));
-
- // Verify that the received packet size matches the settings.
- receive_size = channel->payload_size();
- if (receive_size) {
- if ((receive_size != packet_size_bytes_) &&
- (packet_size_bytes_ != kVariableSize)) {
- error_count++;
- }
-
- // Verify that the timestamp is updated with expected length. The counter
- // is used to avoid problems when switching codec or frame size in the
- // test.
- timestamp_diff = channel->timestamp_diff();
- if ((counter > 10) &&
- (static_cast<int>(timestamp_diff) != packet_size_samples_) &&
- (packet_size_samples_ > -1))
- error_count++;
- }
-
- // Run received side of ACM.
- CHECK_ERROR(acm_b_->PlayoutData10Ms(out_freq_hz, &audio_frame));
-
- // Write output speech to file.
- outfile_b_.Write10MsData(audio_frame.data_,
- audio_frame.samples_per_channel_);
-
- // Update loop counter
- counter++;
- }
-
- EXPECT_EQ(0, error_count);
-
- if (infile_a_.EndOfFile()) {
- infile_a_.Rewind();
- }
-}
-
-void TestAllCodecs::OpenOutFile(int test_number) {
- std::string filename = webrtc::test::OutputPath();
- std::ostringstream test_number_str;
- test_number_str << test_number;
- filename += "testallcodecs_out_";
- filename += test_number_str.str();
- filename += ".pcm";
- outfile_b_.Open(filename, 32000, "wb");
-}
-
-void TestAllCodecs::DisplaySendReceiveCodec() {
- CodecInst my_codec_param;
- acm_a_->SendCodec(&my_codec_param);
- printf("%s -> ", my_codec_param.plname);
- acm_b_->ReceiveCodec(&my_codec_param);
- printf("%s\n", my_codec_param.plname);
-}
-
-} // namespace webrtc
diff --git a/webrtc/modules/audio_coding/main/test/TestAllCodecs.h b/webrtc/modules/audio_coding/main/test/TestAllCodecs.h
deleted file mode 100644
index 1cdc0cba98..0000000000
--- a/webrtc/modules/audio_coding/main/test/TestAllCodecs.h
+++ /dev/null
@@ -1,84 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_MODULES_AUDIO_CODING_MAIN_TEST_TESTALLCODECS_H_
-#define WEBRTC_MODULES_AUDIO_CODING_MAIN_TEST_TESTALLCODECS_H_
-
-#include "webrtc/base/scoped_ptr.h"
-#include "webrtc/modules/audio_coding/main/test/ACMTest.h"
-#include "webrtc/modules/audio_coding/main/test/Channel.h"
-#include "webrtc/modules/audio_coding/main/test/PCMFile.h"
-#include "webrtc/typedefs.h"
-
-namespace webrtc {
-
-class Config;
-
-class TestPack : public AudioPacketizationCallback {
- public:
- TestPack();
- ~TestPack();
-
- void RegisterReceiverACM(AudioCodingModule* acm);
-
- int32_t SendData(FrameType frame_type,
- uint8_t payload_type,
- uint32_t timestamp,
- const uint8_t* payload_data,
- size_t payload_size,
- const RTPFragmentationHeader* fragmentation) override;
-
- size_t payload_size();
- uint32_t timestamp_diff();
- void reset_payload_size();
-
- private:
- AudioCodingModule* receiver_acm_;
- uint16_t sequence_number_;
- uint8_t payload_data_[60 * 32 * 2 * 2];
- uint32_t timestamp_diff_;
- uint32_t last_in_timestamp_;
- uint64_t total_bytes_;
- size_t payload_size_;
-};
-
-class TestAllCodecs : public ACMTest {
- public:
- explicit TestAllCodecs(int test_mode);
- ~TestAllCodecs();
-
- void Perform() override;
-
- private:
- // The default value of '-1' indicates that the registration is based only on
- // codec name, and a sampling frequency matching is not required.
- // This is useful for codecs which support several sampling frequency.
- // Note! Only mono mode is tested in this test.
- void RegisterSendCodec(char side, char* codec_name, int32_t sampling_freq_hz,
- int rate, int packet_size, size_t extra_byte);
-
- void Run(TestPack* channel);
- void OpenOutFile(int test_number);
- void DisplaySendReceiveCodec();
-
- int test_mode_;
- rtc::scoped_ptr<AudioCodingModule> acm_a_;
- rtc::scoped_ptr<AudioCodingModule> acm_b_;
- TestPack* channel_a_to_b_;
- PCMFile infile_a_;
- PCMFile outfile_b_;
- int test_count_;
- int packet_size_samples_;
- size_t packet_size_bytes_;
-};
-
-} // namespace webrtc
-
-#endif // WEBRTC_MODULES_AUDIO_CODING_MAIN_TEST_TESTALLCODECS_H_
diff --git a/webrtc/modules/audio_coding/main/test/TestRedFec.cc b/webrtc/modules/audio_coding/main/test/TestRedFec.cc
deleted file mode 100644
index 0627ae2d74..0000000000
--- a/webrtc/modules/audio_coding/main/test/TestRedFec.cc
+++ /dev/null
@@ -1,476 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include "webrtc/modules/audio_coding/main/test/TestRedFec.h"
-
-#include <assert.h>
-
-#include "webrtc/common.h"
-#include "webrtc/common_types.h"
-#include "webrtc/engine_configurations.h"
-#include "webrtc/modules/audio_coding/main/include/audio_coding_module_typedefs.h"
-#include "webrtc/modules/audio_coding/main/test/utility.h"
-#include "webrtc/system_wrappers/include/trace.h"
-#include "webrtc/test/testsupport/fileutils.h"
-
-#ifdef SUPPORT_RED_WB
-#undef SUPPORT_RED_WB
-#endif
-
-#ifdef SUPPORT_RED_SWB
-#undef SUPPORT_RED_SWB
-#endif
-
-#ifdef SUPPORT_RED_FB
-#undef SUPPORT_RED_FB
-#endif
-
-namespace webrtc {
-
-namespace {
- const char kNameL16[] = "L16";
- const char kNamePCMU[] = "PCMU";
- const char kNameCN[] = "CN";
- const char kNameRED[] = "RED";
-
- // These three are only used by code #ifdeffed on WEBRTC_CODEC_G722.
-#ifdef WEBRTC_CODEC_G722
- const char kNameISAC[] = "ISAC";
- const char kNameG722[] = "G722";
- const char kNameOPUS[] = "opus";
-#endif
-}
-
-TestRedFec::TestRedFec()
- : _acmA(AudioCodingModule::Create(0)),
- _acmB(AudioCodingModule::Create(1)),
- _channelA2B(NULL),
- _testCntr(0) {
-}
-
-TestRedFec::~TestRedFec() {
- if (_channelA2B != NULL) {
- delete _channelA2B;
- _channelA2B = NULL;
- }
-}
-
-void TestRedFec::Perform() {
- const std::string file_name = webrtc::test::ResourcePath(
- "audio_coding/testfile32kHz", "pcm");
- _inFileA.Open(file_name, 32000, "rb");
-
- ASSERT_EQ(0, _acmA->InitializeReceiver());
- ASSERT_EQ(0, _acmB->InitializeReceiver());
-
- uint8_t numEncoders = _acmA->NumberOfCodecs();
- CodecInst myCodecParam;
- for (uint8_t n = 0; n < numEncoders; n++) {
- EXPECT_EQ(0, _acmB->Codec(n, &myCodecParam));
- // Default number of channels is 2 for opus, so we change to 1 in this test.
- if (!strcmp(myCodecParam.plname, "opus")) {
- myCodecParam.channels = 1;
- }
- EXPECT_EQ(0, _acmB->RegisterReceiveCodec(myCodecParam));
- }
-
- // Create and connect the channel
- _channelA2B = new Channel;
- _acmA->RegisterTransportCallback(_channelA2B);
- _channelA2B->RegisterReceiverACM(_acmB.get());
-
- EXPECT_EQ(0, RegisterSendCodec('A', kNameL16, 8000));
- EXPECT_EQ(0, RegisterSendCodec('A', kNameCN, 8000));
- EXPECT_EQ(0, RegisterSendCodec('A', kNameRED));
- EXPECT_EQ(0, SetVAD(true, true, VADAggr));
- EXPECT_EQ(0, _acmA->SetREDStatus(true));
- EXPECT_TRUE(_acmA->REDStatus());
-
- OpenOutFile(_testCntr);
- Run();
- _outFileB.Close();
-
- RegisterSendCodec('A', kNamePCMU, 8000);
- // Switch to another 8 kHz codec, RED should remain switched on.
- EXPECT_TRUE(_acmA->REDStatus());
- OpenOutFile(_testCntr);
- Run();
- _outFileB.Close();
-
-#ifndef WEBRTC_CODEC_G722
- EXPECT_TRUE(false);
- printf("G722 needs to be activated to run this test\n");
- return;
-#else
- EXPECT_EQ(0, RegisterSendCodec('A', kNameG722, 16000));
- EXPECT_EQ(0, RegisterSendCodec('A', kNameCN, 16000));
-
-#ifdef SUPPORT_RED_WB
- // Switch codec, RED should remain.
- EXPECT_TRUE(_acmA->REDStatus());
-#else
- // Switch to a 16 kHz codec, RED should have been switched off.
- EXPECT_FALSE(_acmA->REDStatus());
-#endif
-
- OpenOutFile(_testCntr);
- EXPECT_EQ(0, SetVAD(true, true, VADAggr));
- EXPECT_EQ(0, _acmA->SetREDStatus(false));
- EXPECT_FALSE(_acmA->REDStatus());
- Run();
-#ifdef SUPPORT_RED_WB
- EXPECT_EQ(0, _acmA->SetREDStatus(true));
- EXPECT_TRUE(_acmA->REDStatus());
-#else
- EXPECT_EQ(-1, _acmA->SetREDStatus(true));
- EXPECT_FALSE(_acmA->REDStatus());
-#endif
- Run();
- _outFileB.Close();
-
- RegisterSendCodec('A', kNameISAC, 16000);
-
-#ifdef SUPPORT_RED_WB
- // Switch codec, RED should remain.
- EXPECT_TRUE(_acmA->REDStatus());
-#else
- EXPECT_FALSE(_acmA->REDStatus());
-#endif
-
- OpenOutFile(_testCntr);
- EXPECT_EQ(0, SetVAD(true, true, VADVeryAggr));
- EXPECT_EQ(0, _acmA->SetREDStatus(false));
- EXPECT_FALSE(_acmA->REDStatus());
- Run();
- _outFileB.Close();
-
-#ifdef SUPPORT_RED_WB
- EXPECT_EQ(0, _acmA->SetREDStatus(true));
- EXPECT_TRUE(_acmA->REDStatus());
-#else
- EXPECT_EQ(-1, _acmA->SetREDStatus(true));
- EXPECT_FALSE(_acmA->REDStatus());
-#endif
- OpenOutFile(_testCntr);
- Run();
- _outFileB.Close();
-
- RegisterSendCodec('A', kNameISAC, 32000);
-
-#if defined(SUPPORT_RED_SWB) && defined(SUPPORT_RED_WB)
- // Switch codec, RED should remain.
- EXPECT_TRUE(_acmA->REDStatus());
-#else
- // Switch to a 32 kHz codec, RED should have been switched off.
- EXPECT_FALSE(_acmA->REDStatus());
-#endif
-
- OpenOutFile(_testCntr);
- EXPECT_EQ(0, SetVAD(true, true, VADVeryAggr));
- EXPECT_EQ(0, _acmA->SetREDStatus(false));
- EXPECT_FALSE(_acmA->REDStatus());
- Run();
- _outFileB.Close();
-
-#ifdef SUPPORT_RED_SWB
- EXPECT_EQ(0, _acmA->SetREDStatus(true));
- EXPECT_TRUE(_acmA->REDStatus());
-#else
- EXPECT_EQ(-1, _acmA->SetREDStatus(true));
- EXPECT_FALSE(_acmA->REDStatus());
-#endif
- OpenOutFile(_testCntr);
- Run();
- _outFileB.Close();
-
- RegisterSendCodec('A', kNameISAC, 32000);
- EXPECT_EQ(0, SetVAD(false, false, VADNormal));
-
-#if defined(SUPPORT_RED_SWB) && defined(SUPPORT_RED_WB)
- OpenOutFile(_testCntr);
- EXPECT_EQ(0, _acmA->SetREDStatus(true));
- EXPECT_TRUE(_acmA->REDStatus());
- Run();
-
- RegisterSendCodec('A', kNameISAC, 16000);
- EXPECT_TRUE(_acmA->REDStatus());
- Run();
-
- RegisterSendCodec('A', kNameISAC, 32000);
- EXPECT_TRUE(_acmA->REDStatus());
- Run();
-
- RegisterSendCodec('A', kNameISAC, 16000);
- EXPECT_TRUE(_acmA->REDStatus());
- Run();
- _outFileB.Close();
-#else
- EXPECT_EQ(-1, _acmA->SetREDStatus(true));
- EXPECT_FALSE(_acmA->REDStatus());
-#endif
-
- _channelA2B->SetFECTestWithPacketLoss(true);
- // Following tests are under packet losses.
-
- EXPECT_EQ(0, RegisterSendCodec('A', kNameG722));
- EXPECT_EQ(0, RegisterSendCodec('A', kNameCN, 16000));
-
-#if defined(SUPPORT_RED_WB) && defined(SUPPORT_RED_SWB)
- // Switch codec, RED should remain.
- EXPECT_TRUE(_acmA->REDStatus());
-#else
- // Switch to a 16 kHz codec, RED should have been switched off.
- EXPECT_FALSE(_acmA->REDStatus());
-#endif
-
- OpenOutFile(_testCntr);
- EXPECT_EQ(0, SetVAD(true, true, VADAggr));
- EXPECT_EQ(0, _acmA->SetREDStatus(false));
- EXPECT_FALSE(_acmA->REDStatus());
- Run();
- _outFileB.Close();
-
-#ifdef SUPPORT_RED_WB
- EXPECT_EQ(0, _acmA->SetREDStatus(true));
- EXPECT_TRUE(_acmA->REDStatus());
-#else
- EXPECT_EQ(-1, _acmA->SetREDStatus(true));
- EXPECT_FALSE(_acmA->REDStatus());
-#endif
- OpenOutFile(_testCntr);
- Run();
- _outFileB.Close();
-
- RegisterSendCodec('A', kNameISAC, 16000);
-
-#ifdef SUPPORT_RED_WB
- // Switch codec, RED should remain.
- EXPECT_TRUE(_acmA->REDStatus());
-#else
- // Switch to a 16 kHz codec, RED should have been switched off.
- EXPECT_FALSE(_acmA->REDStatus());
-#endif
-
- OpenOutFile(_testCntr);
- EXPECT_EQ(0, SetVAD(true, true, VADVeryAggr));
- EXPECT_EQ(0, _acmA->SetREDStatus(false));
- EXPECT_FALSE(_acmA->REDStatus());
- Run();
- _outFileB.Close();
-#ifdef SUPPORT_RED_WB
- EXPECT_EQ(0, _acmA->SetREDStatus(true));
- EXPECT_TRUE(_acmA->REDStatus());
-#else
- EXPECT_EQ(-1, _acmA->SetREDStatus(true));
- EXPECT_FALSE(_acmA->REDStatus());
-#endif
- OpenOutFile(_testCntr);
- Run();
- _outFileB.Close();
-
- RegisterSendCodec('A', kNameISAC, 32000);
-
-#if defined(SUPPORT_RED_SWB) && defined(SUPPORT_RED_WB)
- // Switch codec, RED should remain.
- EXPECT_TRUE(_acmA->REDStatus());
-#else
- // Switch to a 32 kHz codec, RED should have been switched off.
- EXPECT_FALSE(_acmA->REDStatus());
-#endif
-
- OpenOutFile(_testCntr);
- EXPECT_EQ(0, SetVAD(true, true, VADVeryAggr));
- EXPECT_EQ(0, _acmA->SetREDStatus(false));
- EXPECT_FALSE(_acmA->REDStatus());
-#ifdef SUPPORT_RED_SWB
- EXPECT_EQ(0, _acmA->SetREDStatus(true));
- EXPECT_TRUE(_acmA->REDStatus());
-#else
- EXPECT_EQ(-1, _acmA->SetREDStatus(true));
- EXPECT_FALSE(_acmA->REDStatus());
-#endif
- OpenOutFile(_testCntr);
- Run();
- _outFileB.Close();
-
- RegisterSendCodec('A', kNameISAC, 32000);
- EXPECT_EQ(0, SetVAD(false, false, VADNormal));
-#if defined(SUPPORT_RED_SWB) && defined(SUPPORT_RED_WB)
- OpenOutFile(_testCntr);
- EXPECT_EQ(0, _acmA->SetREDStatus(true));
- EXPECT_TRUE(_acmA->REDStatus());
- Run();
-
- RegisterSendCodec('A', kNameISAC, 16000);
- EXPECT_TRUE(_acmA->REDStatus());
- Run();
-
- RegisterSendCodec('A', kNameISAC, 32000);
- EXPECT_TRUE(_acmA->REDStatus());
- Run();
-
- RegisterSendCodec('A', kNameISAC, 16000);
- EXPECT_TRUE(_acmA->REDStatus());
- Run();
- _outFileB.Close();
-#else
- EXPECT_EQ(-1, _acmA->SetREDStatus(true));
- EXPECT_FALSE(_acmA->REDStatus());
-#endif
-
-#ifndef WEBRTC_CODEC_OPUS
- EXPECT_TRUE(false);
- printf("Opus needs to be activated to run this test\n");
- return;
-#endif
-
- RegisterSendCodec('A', kNameOPUS, 48000);
-
-#if defined(SUPPORT_RED_FB) && defined(SUPPORT_RED_SWB) &&\
- defined(SUPPORT_RED_WB)
- // Switch to codec, RED should remain switched on.
- EXPECT_TRUE(_acmA->REDStatus());
-#else
- EXPECT_FALSE(_acmA->REDStatus());
-#endif
-
- // _channelA2B imposes 25% packet loss rate.
- EXPECT_EQ(0, _acmA->SetPacketLossRate(25));
-
-#ifdef SUPPORT_RED_FB
- EXPECT_EQ(0, _acmA->SetREDStatus(true));
- EXPECT_TRUE(_acmA->REDStatus());
- // Codec FEC and RED are mutually exclusive.
- EXPECT_EQ(-1, _acmA->SetCodecFEC(true));
-
- EXPECT_EQ(0, _acmA->SetREDStatus(false));
- EXPECT_EQ(0, _acmA->SetCodecFEC(true));
-
- // Codec FEC and RED are mutually exclusive.
- EXPECT_EQ(-1, _acmA->SetREDStatus(true));
-#else
- EXPECT_EQ(-1, _acmA->SetREDStatus(true));
- EXPECT_FALSE(_acmA->REDStatus());
- EXPECT_EQ(0, _acmA->SetCodecFEC(true));
-#endif
-
- EXPECT_TRUE(_acmA->CodecFEC());
- OpenOutFile(_testCntr);
- Run();
-
- // Switch to L16 with RED.
- RegisterSendCodec('A', kNameL16, 8000);
- EXPECT_EQ(0, SetVAD(false, false, VADNormal));
-
- // L16 does not support FEC, so FEC should be turned off automatically.
- EXPECT_FALSE(_acmA->CodecFEC());
-
- EXPECT_EQ(0, _acmA->SetREDStatus(true));
- EXPECT_TRUE(_acmA->REDStatus());
- Run();
-
- // Switch to Opus again.
- RegisterSendCodec('A', kNameOPUS, 48000);
-#ifdef SUPPORT_RED_FB
- // Switch to codec, RED should remain switched on.
- EXPECT_TRUE(_acmA->REDStatus());
-#else
- EXPECT_FALSE(_acmA->REDStatus());
-#endif
- EXPECT_EQ(0, _acmA->SetREDStatus(false));
- EXPECT_EQ(0, _acmA->SetCodecFEC(false));
- Run();
-
- EXPECT_EQ(0, _acmA->SetCodecFEC(true));
- _outFileB.Close();
-
- // Codecs does not support internal FEC, cannot enable FEC.
- RegisterSendCodec('A', kNameG722, 16000);
- EXPECT_FALSE(_acmA->REDStatus());
- EXPECT_EQ(-1, _acmA->SetCodecFEC(true));
- EXPECT_FALSE(_acmA->CodecFEC());
-
- RegisterSendCodec('A', kNameISAC, 16000);
- EXPECT_FALSE(_acmA->REDStatus());
- EXPECT_EQ(-1, _acmA->SetCodecFEC(true));
- EXPECT_FALSE(_acmA->CodecFEC());
-
- // Codecs does not support internal FEC, disable FEC does not trigger failure.
- RegisterSendCodec('A', kNameG722, 16000);
- EXPECT_FALSE(_acmA->REDStatus());
- EXPECT_EQ(0, _acmA->SetCodecFEC(false));
- EXPECT_FALSE(_acmA->CodecFEC());
-
- RegisterSendCodec('A', kNameISAC, 16000);
- EXPECT_FALSE(_acmA->REDStatus());
- EXPECT_EQ(0, _acmA->SetCodecFEC(false));
- EXPECT_FALSE(_acmA->CodecFEC());
-
-#endif // defined(WEBRTC_CODEC_G722)
-}
-
-int32_t TestRedFec::SetVAD(bool enableDTX, bool enableVAD, ACMVADMode vadMode) {
- return _acmA->SetVAD(enableDTX, enableVAD, vadMode);
-}
-
-int16_t TestRedFec::RegisterSendCodec(char side, const char* codecName,
- int32_t samplingFreqHz) {
- std::cout << std::flush;
- AudioCodingModule* myACM;
- switch (side) {
- case 'A': {
- myACM = _acmA.get();
- break;
- }
- case 'B': {
- myACM = _acmB.get();
- break;
- }
- default:
- return -1;
- }
-
- if (myACM == NULL) {
- assert(false);
- return -1;
- }
- CodecInst myCodecParam;
- EXPECT_GT(AudioCodingModule::Codec(codecName, &myCodecParam,
- samplingFreqHz, 1), -1);
- EXPECT_GT(myACM->RegisterSendCodec(myCodecParam), -1);
-
- // Initialization was successful.
- return 0;
-}
-
-void TestRedFec::Run() {
- AudioFrame audioFrame;
- int32_t outFreqHzB = _outFileB.SamplingFrequency();
-
- while (!_inFileA.EndOfFile()) {
- EXPECT_GT(_inFileA.Read10MsData(audioFrame), 0);
- EXPECT_GE(_acmA->Add10MsData(audioFrame), 0);
- EXPECT_EQ(0, _acmB->PlayoutData10Ms(outFreqHzB, &audioFrame));
- _outFileB.Write10MsData(audioFrame.data_, audioFrame.samples_per_channel_);
- }
- _inFileA.Rewind();
-}
-
-void TestRedFec::OpenOutFile(int16_t test_number) {
- std::string file_name;
- std::stringstream file_stream;
- file_stream << webrtc::test::OutputPath();
- file_stream << "TestRedFec_outFile_";
- file_stream << test_number << ".pcm";
- file_name = file_stream.str();
- _outFileB.Open(file_name, 16000, "wb");
-}
-
-} // namespace webrtc
diff --git a/webrtc/modules/audio_coding/main/test/TestRedFec.h b/webrtc/modules/audio_coding/main/test/TestRedFec.h
deleted file mode 100644
index ac0b6cdfc7..0000000000
--- a/webrtc/modules/audio_coding/main/test/TestRedFec.h
+++ /dev/null
@@ -1,51 +0,0 @@
-/*
- * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_MODULES_AUDIO_CODING_MAIN_TESTREDFEC_H_
-#define WEBRTC_MODULES_AUDIO_CODING_MAIN_TESTREDFEC_H_
-
-#include <string>
-#include "webrtc/base/scoped_ptr.h"
-#include "webrtc/modules/audio_coding/main/test/ACMTest.h"
-#include "webrtc/modules/audio_coding/main/test/Channel.h"
-#include "webrtc/modules/audio_coding/main/test/PCMFile.h"
-
-namespace webrtc {
-
-class Config;
-
-class TestRedFec : public ACMTest {
- public:
- explicit TestRedFec();
- ~TestRedFec();
-
- void Perform();
- private:
- // The default value of '-1' indicates that the registration is based only on
- // codec name and a sampling frequency matching is not required. This is
- // useful for codecs which support several sampling frequency.
- int16_t RegisterSendCodec(char side, const char* codecName,
- int32_t sampFreqHz = -1);
- void Run();
- void OpenOutFile(int16_t testNumber);
- int32_t SetVAD(bool enableDTX, bool enableVAD, ACMVADMode vadMode);
- rtc::scoped_ptr<AudioCodingModule> _acmA;
- rtc::scoped_ptr<AudioCodingModule> _acmB;
-
- Channel* _channelA2B;
-
- PCMFile _inFileA;
- PCMFile _outFileB;
- int16_t _testCntr;
-};
-
-} // namespace webrtc
-
-#endif // WEBRTC_MODULES_AUDIO_CODING_MAIN_TESTREDFEC_H_
diff --git a/webrtc/modules/audio_coding/main/test/TestStereo.cc b/webrtc/modules/audio_coding/main/test/TestStereo.cc
deleted file mode 100644
index 69cc3272bb..0000000000
--- a/webrtc/modules/audio_coding/main/test/TestStereo.cc
+++ /dev/null
@@ -1,837 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include "webrtc/modules/audio_coding/main/test/TestStereo.h"
-
-#include <assert.h>
-
-#include <string>
-
-#include "testing/gtest/include/gtest/gtest.h"
-#include "webrtc/common_types.h"
-#include "webrtc/engine_configurations.h"
-#include "webrtc/modules/audio_coding/main/include/audio_coding_module_typedefs.h"
-#include "webrtc/modules/audio_coding/main/test/utility.h"
-#include "webrtc/system_wrappers/include/trace.h"
-#include "webrtc/test/testsupport/fileutils.h"
-
-namespace webrtc {
-
-// Class for simulating packet handling
-TestPackStereo::TestPackStereo()
- : receiver_acm_(NULL),
- seq_no_(0),
- timestamp_diff_(0),
- last_in_timestamp_(0),
- total_bytes_(0),
- payload_size_(0),
- codec_mode_(kNotSet),
- lost_packet_(false) {
-}
-
-TestPackStereo::~TestPackStereo() {
-}
-
-void TestPackStereo::RegisterReceiverACM(AudioCodingModule* acm) {
- receiver_acm_ = acm;
- return;
-}
-
-int32_t TestPackStereo::SendData(const FrameType frame_type,
- const uint8_t payload_type,
- const uint32_t timestamp,
- const uint8_t* payload_data,
- const size_t payload_size,
- const RTPFragmentationHeader* fragmentation) {
- WebRtcRTPHeader rtp_info;
- int32_t status = 0;
-
- rtp_info.header.markerBit = false;
- rtp_info.header.ssrc = 0;
- rtp_info.header.sequenceNumber = seq_no_++;
- rtp_info.header.payloadType = payload_type;
- rtp_info.header.timestamp = timestamp;
- if (frame_type == kEmptyFrame) {
- // Skip this frame
- return 0;
- }
-
- if (lost_packet_ == false) {
- if (frame_type != kAudioFrameCN) {
- rtp_info.type.Audio.isCNG = false;
- rtp_info.type.Audio.channel = static_cast<int>(codec_mode_);
- } else {
- rtp_info.type.Audio.isCNG = true;
- rtp_info.type.Audio.channel = static_cast<int>(kMono);
- }
- status = receiver_acm_->IncomingPacket(payload_data, payload_size,
- rtp_info);
-
- if (frame_type != kAudioFrameCN) {
- payload_size_ = static_cast<int>(payload_size);
- } else {
- payload_size_ = -1;
- }
-
- timestamp_diff_ = timestamp - last_in_timestamp_;
- last_in_timestamp_ = timestamp;
- total_bytes_ += payload_size;
- }
- return status;
-}
-
-uint16_t TestPackStereo::payload_size() {
- return static_cast<uint16_t>(payload_size_);
-}
-
-uint32_t TestPackStereo::timestamp_diff() {
- return timestamp_diff_;
-}
-
-void TestPackStereo::reset_payload_size() {
- payload_size_ = 0;
-}
-
-void TestPackStereo::set_codec_mode(enum StereoMonoMode mode) {
- codec_mode_ = mode;
-}
-
-void TestPackStereo::set_lost_packet(bool lost) {
- lost_packet_ = lost;
-}
-
-TestStereo::TestStereo(int test_mode)
- : acm_a_(AudioCodingModule::Create(0)),
- acm_b_(AudioCodingModule::Create(1)),
- channel_a2b_(NULL),
- test_cntr_(0),
- pack_size_samp_(0),
- pack_size_bytes_(0),
- counter_(0)
-#ifdef WEBRTC_CODEC_G722
- , g722_pltype_(0)
-#endif
- , l16_8khz_pltype_(-1)
- , l16_16khz_pltype_(-1)
- , l16_32khz_pltype_(-1)
-#ifdef PCMA_AND_PCMU
- , pcma_pltype_(-1)
- , pcmu_pltype_(-1)
-#endif
-#ifdef WEBRTC_CODEC_OPUS
- , opus_pltype_(-1)
-#endif
- {
- // test_mode = 0 for silent test (auto test)
- test_mode_ = test_mode;
-}
-
-TestStereo::~TestStereo() {
- if (channel_a2b_ != NULL) {
- delete channel_a2b_;
- channel_a2b_ = NULL;
- }
-}
-
-void TestStereo::Perform() {
- uint16_t frequency_hz;
- int audio_channels;
- int codec_channels;
- bool dtx;
- bool vad;
- ACMVADMode vad_mode;
-
- // Open both mono and stereo test files in 32 kHz.
- const std::string file_name_stereo = webrtc::test::ResourcePath(
- "audio_coding/teststereo32kHz", "pcm");
- const std::string file_name_mono = webrtc::test::ResourcePath(
- "audio_coding/testfile32kHz", "pcm");
- frequency_hz = 32000;
- in_file_stereo_ = new PCMFile();
- in_file_mono_ = new PCMFile();
- in_file_stereo_->Open(file_name_stereo, frequency_hz, "rb");
- in_file_stereo_->ReadStereo(true);
- in_file_mono_->Open(file_name_mono, frequency_hz, "rb");
- in_file_mono_->ReadStereo(false);
-
- // Create and initialize two ACMs, one for each side of a one-to-one call.
- ASSERT_TRUE((acm_a_.get() != NULL) && (acm_b_.get() != NULL));
- EXPECT_EQ(0, acm_a_->InitializeReceiver());
- EXPECT_EQ(0, acm_b_->InitializeReceiver());
-
- // Register all available codes as receiving codecs.
- uint8_t num_encoders = acm_a_->NumberOfCodecs();
- CodecInst my_codec_param;
- for (uint8_t n = 0; n < num_encoders; n++) {
- EXPECT_EQ(0, acm_b_->Codec(n, &my_codec_param));
- EXPECT_EQ(0, acm_b_->RegisterReceiveCodec(my_codec_param));
- }
-
- // Test that unregister all receive codecs works.
- for (uint8_t n = 0; n < num_encoders; n++) {
- EXPECT_EQ(0, acm_b_->Codec(n, &my_codec_param));
- EXPECT_EQ(0, acm_b_->UnregisterReceiveCodec(my_codec_param.pltype));
- }
-
- // Register all available codes as receiving codecs once more.
- for (uint8_t n = 0; n < num_encoders; n++) {
- EXPECT_EQ(0, acm_b_->Codec(n, &my_codec_param));
- EXPECT_EQ(0, acm_b_->RegisterReceiveCodec(my_codec_param));
- }
-
- // Create and connect the channel.
- channel_a2b_ = new TestPackStereo;
- EXPECT_EQ(0, acm_a_->RegisterTransportCallback(channel_a2b_));
- channel_a2b_->RegisterReceiverACM(acm_b_.get());
-
- // Start with setting VAD/DTX, before we know we will send stereo.
- // Continue with setting a stereo codec as send codec and verify that
- // VAD/DTX gets turned off.
- EXPECT_EQ(0, acm_a_->SetVAD(true, true, VADNormal));
- EXPECT_EQ(0, acm_a_->VAD(&dtx, &vad, &vad_mode));
- EXPECT_TRUE(dtx);
- EXPECT_TRUE(vad);
- char codec_pcma_temp[] = "PCMA";
- RegisterSendCodec('A', codec_pcma_temp, 8000, 64000, 80, 2, pcma_pltype_);
- EXPECT_EQ(0, acm_a_->VAD(&dtx, &vad, &vad_mode));
- EXPECT_FALSE(dtx);
- EXPECT_FALSE(vad);
- if (test_mode_ != 0) {
- printf("\n");
- }
-
- //
- // Test Stereo-To-Stereo for all codecs.
- //
- audio_channels = 2;
- codec_channels = 2;
-
- // All codecs are tested for all allowed sampling frequencies, rates and
- // packet sizes.
-#ifdef WEBRTC_CODEC_G722
- if (test_mode_ != 0) {
- printf("===========================================================\n");
- printf("Test number: %d\n", test_cntr_ + 1);
- printf("Test type: Stereo-to-stereo\n");
- }
- channel_a2b_->set_codec_mode(kStereo);
- test_cntr_++;
- OpenOutFile(test_cntr_);
- char codec_g722[] = "G722";
- RegisterSendCodec('A', codec_g722, 16000, 64000, 160, codec_channels,
- g722_pltype_);
- Run(channel_a2b_, audio_channels, codec_channels);
- RegisterSendCodec('A', codec_g722, 16000, 64000, 320, codec_channels,
- g722_pltype_);
- Run(channel_a2b_, audio_channels, codec_channels);
- RegisterSendCodec('A', codec_g722, 16000, 64000, 480, codec_channels,
- g722_pltype_);
- Run(channel_a2b_, audio_channels, codec_channels);
- RegisterSendCodec('A', codec_g722, 16000, 64000, 640, codec_channels,
- g722_pltype_);
- Run(channel_a2b_, audio_channels, codec_channels);
- RegisterSendCodec('A', codec_g722, 16000, 64000, 800, codec_channels,
- g722_pltype_);
- Run(channel_a2b_, audio_channels, codec_channels);
- RegisterSendCodec('A', codec_g722, 16000, 64000, 960, codec_channels,
- g722_pltype_);
- Run(channel_a2b_, audio_channels, codec_channels);
- out_file_.Close();
-#endif
- if (test_mode_ != 0) {
- printf("===========================================================\n");
- printf("Test number: %d\n", test_cntr_ + 1);
- printf("Test type: Stereo-to-stereo\n");
- }
- channel_a2b_->set_codec_mode(kStereo);
- test_cntr_++;
- OpenOutFile(test_cntr_);
- char codec_l16[] = "L16";
- RegisterSendCodec('A', codec_l16, 8000, 128000, 80, codec_channels,
- l16_8khz_pltype_);
- Run(channel_a2b_, audio_channels, codec_channels);
- RegisterSendCodec('A', codec_l16, 8000, 128000, 160, codec_channels,
- l16_8khz_pltype_);
- Run(channel_a2b_, audio_channels, codec_channels);
- RegisterSendCodec('A', codec_l16, 8000, 128000, 240, codec_channels,
- l16_8khz_pltype_);
- Run(channel_a2b_, audio_channels, codec_channels);
- RegisterSendCodec('A', codec_l16, 8000, 128000, 320, codec_channels,
- l16_8khz_pltype_);
- Run(channel_a2b_, audio_channels, codec_channels);
- out_file_.Close();
-
- if (test_mode_ != 0) {
- printf("===========================================================\n");
- printf("Test number: %d\n", test_cntr_ + 1);
- printf("Test type: Stereo-to-stereo\n");
- }
- test_cntr_++;
- OpenOutFile(test_cntr_);
- RegisterSendCodec('A', codec_l16, 16000, 256000, 160, codec_channels,
- l16_16khz_pltype_);
- Run(channel_a2b_, audio_channels, codec_channels);
- RegisterSendCodec('A', codec_l16, 16000, 256000, 320, codec_channels,
- l16_16khz_pltype_);
- Run(channel_a2b_, audio_channels, codec_channels);
- RegisterSendCodec('A', codec_l16, 16000, 256000, 480, codec_channels,
- l16_16khz_pltype_);
- Run(channel_a2b_, audio_channels, codec_channels);
- RegisterSendCodec('A', codec_l16, 16000, 256000, 640, codec_channels,
- l16_16khz_pltype_);
- Run(channel_a2b_, audio_channels, codec_channels);
- out_file_.Close();
-
- if (test_mode_ != 0) {
- printf("===========================================================\n");
- printf("Test number: %d\n", test_cntr_ + 1);
- printf("Test type: Stereo-to-stereo\n");
- }
- test_cntr_++;
- OpenOutFile(test_cntr_);
- RegisterSendCodec('A', codec_l16, 32000, 512000, 320, codec_channels,
- l16_32khz_pltype_);
- Run(channel_a2b_, audio_channels, codec_channels);
- RegisterSendCodec('A', codec_l16, 32000, 512000, 640, codec_channels,
- l16_32khz_pltype_);
- Run(channel_a2b_, audio_channels, codec_channels);
- out_file_.Close();
-#ifdef PCMA_AND_PCMU
- if (test_mode_ != 0) {
- printf("===========================================================\n");
- printf("Test number: %d\n", test_cntr_ + 1);
- printf("Test type: Stereo-to-stereo\n");
- }
- channel_a2b_->set_codec_mode(kStereo);
- audio_channels = 2;
- codec_channels = 2;
- test_cntr_++;
- OpenOutFile(test_cntr_);
- char codec_pcma[] = "PCMA";
- RegisterSendCodec('A', codec_pcma, 8000, 64000, 80, codec_channels,
- pcma_pltype_);
- Run(channel_a2b_, audio_channels, codec_channels);
- RegisterSendCodec('A', codec_pcma, 8000, 64000, 160, codec_channels,
- pcma_pltype_);
- Run(channel_a2b_, audio_channels, codec_channels);
- RegisterSendCodec('A', codec_pcma, 8000, 64000, 240, codec_channels,
- pcma_pltype_);
- Run(channel_a2b_, audio_channels, codec_channels);
- RegisterSendCodec('A', codec_pcma, 8000, 64000, 320, codec_channels,
- pcma_pltype_);
- Run(channel_a2b_, audio_channels, codec_channels);
- RegisterSendCodec('A', codec_pcma, 8000, 64000, 400, codec_channels,
- pcma_pltype_);
- Run(channel_a2b_, audio_channels, codec_channels);
- RegisterSendCodec('A', codec_pcma, 8000, 64000, 480, codec_channels,
- pcma_pltype_);
- Run(channel_a2b_, audio_channels, codec_channels);
-
- // Test that VAD/DTX cannot be turned on while sending stereo.
- EXPECT_EQ(-1, acm_a_->SetVAD(true, true, VADNormal));
- EXPECT_EQ(0, acm_a_->VAD(&dtx, &vad, &vad_mode));
- EXPECT_FALSE(dtx);
- EXPECT_FALSE(vad);
- EXPECT_EQ(0, acm_a_->SetVAD(false, false, VADNormal));
- EXPECT_EQ(0, acm_a_->VAD(&dtx, &vad, &vad_mode));
- EXPECT_FALSE(dtx);
- EXPECT_FALSE(vad);
-
- out_file_.Close();
- if (test_mode_ != 0) {
- printf("===========================================================\n");
- printf("Test number: %d\n", test_cntr_ + 1);
- printf("Test type: Stereo-to-stereo\n");
- }
- test_cntr_++;
- OpenOutFile(test_cntr_);
- char codec_pcmu[] = "PCMU";
- RegisterSendCodec('A', codec_pcmu, 8000, 64000, 80, codec_channels,
- pcmu_pltype_);
- Run(channel_a2b_, audio_channels, codec_channels);
- RegisterSendCodec('A', codec_pcmu, 8000, 64000, 160, codec_channels,
- pcmu_pltype_);
- Run(channel_a2b_, audio_channels, codec_channels);
- RegisterSendCodec('A', codec_pcmu, 8000, 64000, 240, codec_channels,
- pcmu_pltype_);
- Run(channel_a2b_, audio_channels, codec_channels);
- RegisterSendCodec('A', codec_pcmu, 8000, 64000, 320, codec_channels,
- pcmu_pltype_);
- Run(channel_a2b_, audio_channels, codec_channels);
- RegisterSendCodec('A', codec_pcmu, 8000, 64000, 400, codec_channels,
- pcmu_pltype_);
- Run(channel_a2b_, audio_channels, codec_channels);
- RegisterSendCodec('A', codec_pcmu, 8000, 64000, 480, codec_channels,
- pcmu_pltype_);
- Run(channel_a2b_, audio_channels, codec_channels);
- out_file_.Close();
-#endif
-#ifdef WEBRTC_CODEC_OPUS
- if (test_mode_ != 0) {
- printf("===========================================================\n");
- printf("Test number: %d\n", test_cntr_ + 1);
- printf("Test type: Stereo-to-stereo\n");
- }
- channel_a2b_->set_codec_mode(kStereo);
- audio_channels = 2;
- codec_channels = 2;
- test_cntr_++;
- OpenOutFile(test_cntr_);
-
- char codec_opus[] = "opus";
- // Run Opus with 10 ms frame size.
- RegisterSendCodec('A', codec_opus, 48000, 64000, 480, codec_channels,
- opus_pltype_);
- Run(channel_a2b_, audio_channels, codec_channels);
- // Run Opus with 20 ms frame size.
- RegisterSendCodec('A', codec_opus, 48000, 64000, 480*2, codec_channels,
- opus_pltype_);
- Run(channel_a2b_, audio_channels, codec_channels);
- // Run Opus with 40 ms frame size.
- RegisterSendCodec('A', codec_opus, 48000, 64000, 480*4, codec_channels,
- opus_pltype_);
- Run(channel_a2b_, audio_channels, codec_channels);
- // Run Opus with 60 ms frame size.
- RegisterSendCodec('A', codec_opus, 48000, 64000, 480*6, codec_channels,
- opus_pltype_);
- Run(channel_a2b_, audio_channels, codec_channels);
- // Run Opus with 20 ms frame size and different bitrates.
- RegisterSendCodec('A', codec_opus, 48000, 40000, 960, codec_channels,
- opus_pltype_);
- Run(channel_a2b_, audio_channels, codec_channels);
- RegisterSendCodec('A', codec_opus, 48000, 510000, 960, codec_channels,
- opus_pltype_);
- Run(channel_a2b_, audio_channels, codec_channels);
- out_file_.Close();
-#endif
- //
- // Test Mono-To-Stereo for all codecs.
- //
- audio_channels = 1;
- codec_channels = 2;
-
-#ifdef WEBRTC_CODEC_G722
- if (test_mode_ != 0) {
- printf("===============================================================\n");
- printf("Test number: %d\n", test_cntr_ + 1);
- printf("Test type: Mono-to-stereo\n");
- }
- test_cntr_++;
- channel_a2b_->set_codec_mode(kStereo);
- OpenOutFile(test_cntr_);
- RegisterSendCodec('A', codec_g722, 16000, 64000, 160, codec_channels,
- g722_pltype_);
- Run(channel_a2b_, audio_channels, codec_channels);
- out_file_.Close();
-#endif
- if (test_mode_ != 0) {
- printf("===============================================================\n");
- printf("Test number: %d\n", test_cntr_ + 1);
- printf("Test type: Mono-to-stereo\n");
- }
- test_cntr_++;
- channel_a2b_->set_codec_mode(kStereo);
- OpenOutFile(test_cntr_);
- RegisterSendCodec('A', codec_l16, 8000, 128000, 80, codec_channels,
- l16_8khz_pltype_);
- Run(channel_a2b_, audio_channels, codec_channels);
- out_file_.Close();
- if (test_mode_ != 0) {
- printf("===============================================================\n");
- printf("Test number: %d\n", test_cntr_ + 1);
- printf("Test type: Mono-to-stereo\n");
- }
- test_cntr_++;
- OpenOutFile(test_cntr_);
- RegisterSendCodec('A', codec_l16, 16000, 256000, 160, codec_channels,
- l16_16khz_pltype_);
- Run(channel_a2b_, audio_channels, codec_channels);
- out_file_.Close();
- if (test_mode_ != 0) {
- printf("===============================================================\n");
- printf("Test number: %d\n", test_cntr_ + 1);
- printf("Test type: Mono-to-stereo\n");
- }
- test_cntr_++;
- OpenOutFile(test_cntr_);
- RegisterSendCodec('A', codec_l16, 32000, 512000, 320, codec_channels,
- l16_32khz_pltype_);
- Run(channel_a2b_, audio_channels, codec_channels);
- out_file_.Close();
-#ifdef PCMA_AND_PCMU
- if (test_mode_ != 0) {
- printf("===============================================================\n");
- printf("Test number: %d\n", test_cntr_ + 1);
- printf("Test type: Mono-to-stereo\n");
- }
- test_cntr_++;
- channel_a2b_->set_codec_mode(kStereo);
- OpenOutFile(test_cntr_);
- RegisterSendCodec('A', codec_pcmu, 8000, 64000, 80, codec_channels,
- pcmu_pltype_);
- Run(channel_a2b_, audio_channels, codec_channels);
- RegisterSendCodec('A', codec_pcma, 8000, 64000, 80, codec_channels,
- pcma_pltype_);
- Run(channel_a2b_, audio_channels, codec_channels);
- out_file_.Close();
-#endif
-#ifdef WEBRTC_CODEC_OPUS
- if (test_mode_ != 0) {
- printf("===============================================================\n");
- printf("Test number: %d\n", test_cntr_ + 1);
- printf("Test type: Mono-to-stereo\n");
- }
-
- // Keep encode and decode in stereo.
- test_cntr_++;
- channel_a2b_->set_codec_mode(kStereo);
- OpenOutFile(test_cntr_);
- RegisterSendCodec('A', codec_opus, 48000, 64000, 960, codec_channels,
- opus_pltype_);
- Run(channel_a2b_, audio_channels, codec_channels);
-
- // Encode in mono, decode in stereo mode.
- RegisterSendCodec('A', codec_opus, 48000, 64000, 960, 1, opus_pltype_);
- Run(channel_a2b_, audio_channels, codec_channels);
- out_file_.Close();
-#endif
-
- //
- // Test Stereo-To-Mono for all codecs.
- //
- audio_channels = 2;
- codec_channels = 1;
- channel_a2b_->set_codec_mode(kMono);
-
-#ifdef WEBRTC_CODEC_G722
- // Run stereo audio and mono codec.
- if (test_mode_ != 0) {
- printf("===============================================================\n");
- printf("Test number: %d\n", test_cntr_ + 1);
- printf("Test type: Stereo-to-mono\n");
- }
- test_cntr_++;
- OpenOutFile(test_cntr_);
- RegisterSendCodec('A', codec_g722, 16000, 64000, 160, codec_channels,
- g722_pltype_);
-
- // Make sure it is possible to set VAD/CNG, now that we are sending mono
- // again.
- EXPECT_EQ(0, acm_a_->SetVAD(true, true, VADNormal));
- EXPECT_EQ(0, acm_a_->VAD(&dtx, &vad, &vad_mode));
- EXPECT_TRUE(dtx);
- EXPECT_TRUE(vad);
- EXPECT_EQ(0, acm_a_->SetVAD(false, false, VADNormal));
- Run(channel_a2b_, audio_channels, codec_channels);
- out_file_.Close();
-#endif
- if (test_mode_ != 0) {
- printf("===============================================================\n");
- printf("Test number: %d\n", test_cntr_ + 1);
- printf("Test type: Stereo-to-mono\n");
- }
- test_cntr_++;
- OpenOutFile(test_cntr_);
- RegisterSendCodec('A', codec_l16, 8000, 128000, 80, codec_channels,
- l16_8khz_pltype_);
- Run(channel_a2b_, audio_channels, codec_channels);
- out_file_.Close();
- if (test_mode_ != 0) {
- printf("===============================================================\n");
- printf("Test number: %d\n", test_cntr_ + 1);
- printf("Test type: Stereo-to-mono\n");
- }
- test_cntr_++;
- OpenOutFile(test_cntr_);
- RegisterSendCodec('A', codec_l16, 16000, 256000, 160, codec_channels,
- l16_16khz_pltype_);
- Run(channel_a2b_, audio_channels, codec_channels);
- out_file_.Close();
- if (test_mode_ != 0) {
- printf("==============================================================\n");
- printf("Test number: %d\n", test_cntr_ + 1);
- printf("Test type: Stereo-to-mono\n");
- }
- test_cntr_++;
- OpenOutFile(test_cntr_);
- RegisterSendCodec('A', codec_l16, 32000, 512000, 320, codec_channels,
- l16_32khz_pltype_);
- Run(channel_a2b_, audio_channels, codec_channels);
- out_file_.Close();
-#ifdef PCMA_AND_PCMU
- if (test_mode_ != 0) {
- printf("===============================================================\n");
- printf("Test number: %d\n", test_cntr_ + 1);
- printf("Test type: Stereo-to-mono\n");
- }
- test_cntr_++;
- OpenOutFile(test_cntr_);
- RegisterSendCodec('A', codec_pcmu, 8000, 64000, 80, codec_channels,
- pcmu_pltype_);
- Run(channel_a2b_, audio_channels, codec_channels);
- RegisterSendCodec('A', codec_pcma, 8000, 64000, 80, codec_channels,
- pcma_pltype_);
- Run(channel_a2b_, audio_channels, codec_channels);
- out_file_.Close();
-#endif
-#ifdef WEBRTC_CODEC_OPUS
- if (test_mode_ != 0) {
- printf("===============================================================\n");
- printf("Test number: %d\n", test_cntr_ + 1);
- printf("Test type: Stereo-to-mono\n");
- }
- test_cntr_++;
- OpenOutFile(test_cntr_);
- // Encode and decode in mono.
- RegisterSendCodec('A', codec_opus, 48000, 32000, 960, codec_channels,
- opus_pltype_);
- CodecInst opus_codec_param;
- for (uint8_t n = 0; n < num_encoders; n++) {
- EXPECT_EQ(0, acm_b_->Codec(n, &opus_codec_param));
- if (!strcmp(opus_codec_param.plname, "opus")) {
- opus_codec_param.channels = 1;
- EXPECT_EQ(0, acm_b_->RegisterReceiveCodec(opus_codec_param));
- break;
- }
- }
- Run(channel_a2b_, audio_channels, codec_channels);
-
- // Encode in stereo, decode in mono.
- RegisterSendCodec('A', codec_opus, 48000, 32000, 960, 2, opus_pltype_);
- Run(channel_a2b_, audio_channels, codec_channels);
-
- out_file_.Close();
-
- // Test switching between decoding mono and stereo for Opus.
-
- // Decode in mono.
- test_cntr_++;
- OpenOutFile(test_cntr_);
- if (test_mode_ != 0) {
- // Print out codec and settings
- printf("Test number: %d\nCodec: Opus Freq: 48000 Rate :32000 PackSize: 960"
- " Decode: mono\n", test_cntr_);
- }
- Run(channel_a2b_, audio_channels, codec_channels);
- out_file_.Close();
- // Decode in stereo.
- test_cntr_++;
- OpenOutFile(test_cntr_);
- if (test_mode_ != 0) {
- // Print out codec and settings
- printf("Test number: %d\nCodec: Opus Freq: 48000 Rate :32000 PackSize: 960"
- " Decode: stereo\n", test_cntr_);
- }
- opus_codec_param.channels = 2;
- EXPECT_EQ(0, acm_b_->RegisterReceiveCodec(opus_codec_param));
- Run(channel_a2b_, audio_channels, 2);
- out_file_.Close();
- // Decode in mono.
- test_cntr_++;
- OpenOutFile(test_cntr_);
- if (test_mode_ != 0) {
- // Print out codec and settings
- printf("Test number: %d\nCodec: Opus Freq: 48000 Rate :32000 PackSize: 960"
- " Decode: mono\n", test_cntr_);
- }
- opus_codec_param.channels = 1;
- EXPECT_EQ(0, acm_b_->RegisterReceiveCodec(opus_codec_param));
- Run(channel_a2b_, audio_channels, codec_channels);
- out_file_.Close();
-
-#endif
-
- // Print out which codecs were tested, and which were not, in the run.
- if (test_mode_ != 0) {
- printf("\nThe following codecs was INCLUDED in the test:\n");
-#ifdef WEBRTC_CODEC_G722
- printf(" G.722\n");
-#endif
- printf(" PCM16\n");
- printf(" G.711\n");
-#ifdef WEBRTC_CODEC_OPUS
- printf(" Opus\n");
-#endif
- printf("\nTo complete the test, listen to the %d number of output "
- "files.\n",
- test_cntr_);
- }
-
- // Delete the file pointers.
- delete in_file_stereo_;
- delete in_file_mono_;
-}
-
-// Register Codec to use in the test
-//
-// Input: side - which ACM to use, 'A' or 'B'
-// codec_name - name to use when register the codec
-// sampling_freq_hz - sampling frequency in Herz
-// rate - bitrate in bytes
-// pack_size - packet size in samples
-// channels - number of channels; 1 for mono, 2 for stereo
-// payload_type - payload type for the codec
-void TestStereo::RegisterSendCodec(char side, char* codec_name,
- int32_t sampling_freq_hz, int rate,
- int pack_size, int channels,
- int payload_type) {
- if (test_mode_ != 0) {
- // Print out codec and settings
- printf("Codec: %s Freq: %d Rate: %d PackSize: %d\n", codec_name,
- sampling_freq_hz, rate, pack_size);
- }
-
- // Store packet size in samples, used to validate the received packet
- pack_size_samp_ = pack_size;
-
- // Store the expected packet size in bytes, used to validate the received
- // packet. Add 0.875 to always round up to a whole byte.
- pack_size_bytes_ = (uint16_t)(static_cast<float>(pack_size * rate) /
- static_cast<float>(sampling_freq_hz * 8) +
- 0.875);
-
- // Set pointer to the ACM where to register the codec
- AudioCodingModule* my_acm = NULL;
- switch (side) {
- case 'A': {
- my_acm = acm_a_.get();
- break;
- }
- case 'B': {
- my_acm = acm_b_.get();
- break;
- }
- default:
- break;
- }
- ASSERT_TRUE(my_acm != NULL);
-
- CodecInst my_codec_param;
- // Get all codec parameters before registering
- EXPECT_GT(AudioCodingModule::Codec(codec_name, &my_codec_param,
- sampling_freq_hz, channels), -1);
- my_codec_param.rate = rate;
- my_codec_param.pacsize = pack_size;
- EXPECT_EQ(0, my_acm->RegisterSendCodec(my_codec_param));
-
- send_codec_name_ = codec_name;
-}
-
-void TestStereo::Run(TestPackStereo* channel, int in_channels, int out_channels,
- int percent_loss) {
- AudioFrame audio_frame;
-
- int32_t out_freq_hz_b = out_file_.SamplingFrequency();
- uint16_t rec_size;
- uint32_t time_stamp_diff;
- channel->reset_payload_size();
- int error_count = 0;
- int variable_bytes = 0;
- int variable_packets = 0;
-
- while (1) {
- // Simulate packet loss by setting |packet_loss_| to "true" in
- // |percent_loss| percent of the loops.
- if (percent_loss > 0) {
- if (counter_ == floor((100 / percent_loss) + 0.5)) {
- counter_ = 0;
- channel->set_lost_packet(true);
- } else {
- channel->set_lost_packet(false);
- }
- counter_++;
- }
-
- // Add 10 msec to ACM
- if (in_channels == 1) {
- if (in_file_mono_->EndOfFile()) {
- break;
- }
- in_file_mono_->Read10MsData(audio_frame);
- } else {
- if (in_file_stereo_->EndOfFile()) {
- break;
- }
- in_file_stereo_->Read10MsData(audio_frame);
- }
- EXPECT_GE(acm_a_->Add10MsData(audio_frame), 0);
-
- // Verify that the received packet size matches the settings.
- rec_size = channel->payload_size();
- if ((0 < rec_size) & (rec_size < 65535)) {
- if (strcmp(send_codec_name_, "opus") == 0) {
- // Opus is a variable rate codec, hence calculate the average packet
- // size, and later make sure the average is in the right range.
- variable_bytes += rec_size;
- variable_packets++;
- } else {
- // For fixed rate codecs, check that packet size is correct.
- if ((rec_size != pack_size_bytes_ * out_channels)
- && (pack_size_bytes_ < 65535)) {
- error_count++;
- }
- }
- // Verify that the timestamp is updated with expected length
- time_stamp_diff = channel->timestamp_diff();
- if ((counter_ > 10) && (time_stamp_diff != pack_size_samp_)) {
- error_count++;
- }
- }
-
- // Run received side of ACM
- EXPECT_EQ(0, acm_b_->PlayoutData10Ms(out_freq_hz_b, &audio_frame));
-
- // Write output speech to file
- out_file_.Write10MsData(
- audio_frame.data_,
- audio_frame.samples_per_channel_ * audio_frame.num_channels_);
- }
-
- EXPECT_EQ(0, error_count);
-
- // Check that packet size is in the right range for variable rate codecs,
- // such as Opus.
- if (variable_packets > 0) {
- variable_bytes /= variable_packets;
- EXPECT_NEAR(variable_bytes, pack_size_bytes_, 3);
- }
-
- if (in_file_mono_->EndOfFile()) {
- in_file_mono_->Rewind();
- }
- if (in_file_stereo_->EndOfFile()) {
- in_file_stereo_->Rewind();
- }
- // Reset in case we ended with a lost packet
- channel->set_lost_packet(false);
-}
-
-void TestStereo::OpenOutFile(int16_t test_number) {
- std::string file_name;
- std::stringstream file_stream;
- file_stream << webrtc::test::OutputPath() << "teststereo_out_" << test_number
- << ".pcm";
- file_name = file_stream.str();
- out_file_.Open(file_name, 32000, "wb");
-}
-
-void TestStereo::DisplaySendReceiveCodec() {
- CodecInst my_codec_param;
- acm_a_->SendCodec(&my_codec_param);
- if (test_mode_ != 0) {
- printf("%s -> ", my_codec_param.plname);
- }
- acm_b_->ReceiveCodec(&my_codec_param);
- if (test_mode_ != 0) {
- printf("%s\n", my_codec_param.plname);
- }
-}
-
-} // namespace webrtc
diff --git a/webrtc/modules/audio_coding/main/test/TestStereo.h b/webrtc/modules/audio_coding/main/test/TestStereo.h
deleted file mode 100644
index b56e995272..0000000000
--- a/webrtc/modules/audio_coding/main/test/TestStereo.h
+++ /dev/null
@@ -1,117 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_MODULES_AUDIO_CODING_MAIN_TEST_TESTSTEREO_H_
-#define WEBRTC_MODULES_AUDIO_CODING_MAIN_TEST_TESTSTEREO_H_
-
-#include <math.h>
-
-#include "webrtc/base/scoped_ptr.h"
-#include "webrtc/modules/audio_coding/main/test/ACMTest.h"
-#include "webrtc/modules/audio_coding/main/test/Channel.h"
-#include "webrtc/modules/audio_coding/main/test/PCMFile.h"
-
-#define PCMA_AND_PCMU
-
-namespace webrtc {
-
-enum StereoMonoMode {
- kNotSet,
- kMono,
- kStereo
-};
-
-class TestPackStereo : public AudioPacketizationCallback {
- public:
- TestPackStereo();
- ~TestPackStereo();
-
- void RegisterReceiverACM(AudioCodingModule* acm);
-
- int32_t SendData(const FrameType frame_type,
- const uint8_t payload_type,
- const uint32_t timestamp,
- const uint8_t* payload_data,
- const size_t payload_size,
- const RTPFragmentationHeader* fragmentation) override;
-
- uint16_t payload_size();
- uint32_t timestamp_diff();
- void reset_payload_size();
- void set_codec_mode(StereoMonoMode mode);
- void set_lost_packet(bool lost);
-
- private:
- AudioCodingModule* receiver_acm_;
- int16_t seq_no_;
- uint32_t timestamp_diff_;
- uint32_t last_in_timestamp_;
- uint64_t total_bytes_;
- int payload_size_;
- StereoMonoMode codec_mode_;
- // Simulate packet losses
- bool lost_packet_;
-};
-
-class TestStereo : public ACMTest {
- public:
- explicit TestStereo(int test_mode);
- ~TestStereo();
-
- void Perform() override;
-
- private:
- // The default value of '-1' indicates that the registration is based only on
- // codec name and a sampling frequncy matching is not required. This is useful
- // for codecs which support several sampling frequency.
- void RegisterSendCodec(char side, char* codec_name, int32_t samp_freq_hz,
- int rate, int pack_size, int channels,
- int payload_type);
-
- void Run(TestPackStereo* channel, int in_channels, int out_channels,
- int percent_loss = 0);
- void OpenOutFile(int16_t test_number);
- void DisplaySendReceiveCodec();
-
- int test_mode_;
-
- rtc::scoped_ptr<AudioCodingModule> acm_a_;
- rtc::scoped_ptr<AudioCodingModule> acm_b_;
-
- TestPackStereo* channel_a2b_;
-
- PCMFile* in_file_stereo_;
- PCMFile* in_file_mono_;
- PCMFile out_file_;
- int16_t test_cntr_;
- uint16_t pack_size_samp_;
- uint16_t pack_size_bytes_;
- int counter_;
- char* send_codec_name_;
-
- // Payload types for stereo codecs and CNG
-#ifdef WEBRTC_CODEC_G722
- int g722_pltype_;
-#endif
- int l16_8khz_pltype_;
- int l16_16khz_pltype_;
- int l16_32khz_pltype_;
-#ifdef PCMA_AND_PCMU
- int pcma_pltype_;
- int pcmu_pltype_;
-#endif
-#ifdef WEBRTC_CODEC_OPUS
- int opus_pltype_;
-#endif
-};
-
-} // namespace webrtc
-
-#endif // WEBRTC_MODULES_AUDIO_CODING_MAIN_TEST_TESTSTEREO_H_
diff --git a/webrtc/modules/audio_coding/main/test/TestVADDTX.cc b/webrtc/modules/audio_coding/main/test/TestVADDTX.cc
deleted file mode 100644
index bd0335a5f3..0000000000
--- a/webrtc/modules/audio_coding/main/test/TestVADDTX.cc
+++ /dev/null
@@ -1,271 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include "webrtc/modules/audio_coding/main/test/TestVADDTX.h"
-
-#include <string>
-
-#include "webrtc/engine_configurations.h"
-#include "webrtc/modules/audio_coding/main/test/PCMFile.h"
-#include "webrtc/modules/audio_coding/main/test/utility.h"
-#include "webrtc/test/testsupport/fileutils.h"
-
-namespace webrtc {
-
-#ifdef WEBRTC_CODEC_ISAC
-const CodecInst kIsacWb = {103, "ISAC", 16000, 480, 1, 32000};
-const CodecInst kIsacSwb = {104, "ISAC", 32000, 960, 1, 56000};
-#endif
-
-#ifdef WEBRTC_CODEC_ILBC
-const CodecInst kIlbc = {102, "ILBC", 8000, 240, 1, 13300};
-#endif
-
-#ifdef WEBRTC_CODEC_OPUS
-const CodecInst kOpus = {120, "opus", 48000, 960, 1, 64000};
-const CodecInst kOpusStereo = {120, "opus", 48000, 960, 2, 64000};
-#endif
-
-ActivityMonitor::ActivityMonitor() {
- ResetStatistics();
-}
-
-int32_t ActivityMonitor::InFrameType(FrameType frame_type) {
- counter_[frame_type]++;
- return 0;
-}
-
-void ActivityMonitor::PrintStatistics() {
- printf("\n");
- printf("kEmptyFrame %u\n", counter_[kEmptyFrame]);
- printf("kAudioFrameSpeech %u\n", counter_[kAudioFrameSpeech]);
- printf("kAudioFrameCN %u\n", counter_[kAudioFrameCN]);
- printf("kVideoFrameKey %u\n", counter_[kVideoFrameKey]);
- printf("kVideoFrameDelta %u\n", counter_[kVideoFrameDelta]);
- printf("\n\n");
-}
-
-void ActivityMonitor::ResetStatistics() {
- memset(counter_, 0, sizeof(counter_));
-}
-
-void ActivityMonitor::GetStatistics(uint32_t* counter) {
- memcpy(counter, counter_, sizeof(counter_));
-}
-
-TestVadDtx::TestVadDtx()
- : acm_send_(AudioCodingModule::Create(0)),
- acm_receive_(AudioCodingModule::Create(1)),
- channel_(new Channel),
- monitor_(new ActivityMonitor) {
- EXPECT_EQ(0, acm_send_->RegisterTransportCallback(channel_.get()));
- channel_->RegisterReceiverACM(acm_receive_.get());
- EXPECT_EQ(0, acm_send_->RegisterVADCallback(monitor_.get()));
-}
-
-void TestVadDtx::RegisterCodec(CodecInst codec_param) {
- // Set the codec for sending and receiving.
- EXPECT_EQ(0, acm_send_->RegisterSendCodec(codec_param));
- EXPECT_EQ(0, acm_receive_->RegisterReceiveCodec(codec_param));
- channel_->SetIsStereo(codec_param.channels > 1);
-}
-
-// Encoding a file and see if the numbers that various packets occur follow
-// the expectation.
-void TestVadDtx::Run(std::string in_filename, int frequency, int channels,
- std::string out_filename, bool append,
- const int* expects) {
- monitor_->ResetStatistics();
-
- PCMFile in_file;
- in_file.Open(in_filename, frequency, "rb");
- in_file.ReadStereo(channels > 1);
-
- PCMFile out_file;
- if (append) {
- out_file.Open(out_filename, kOutputFreqHz, "ab");
- } else {
- out_file.Open(out_filename, kOutputFreqHz, "wb");
- }
-
- uint16_t frame_size_samples = in_file.PayloadLength10Ms();
- uint32_t time_stamp = 0x12345678;
- AudioFrame audio_frame;
- while (!in_file.EndOfFile()) {
- in_file.Read10MsData(audio_frame);
- audio_frame.timestamp_ = time_stamp;
- time_stamp += frame_size_samples;
- EXPECT_GE(acm_send_->Add10MsData(audio_frame), 0);
- acm_receive_->PlayoutData10Ms(kOutputFreqHz, &audio_frame);
- out_file.Write10MsData(audio_frame);
- }
-
- in_file.Close();
- out_file.Close();
-
-#ifdef PRINT_STAT
- monitor_->PrintStatistics();
-#endif
-
- uint32_t stats[5];
- monitor_->GetStatistics(stats);
- monitor_->ResetStatistics();
-
- for (const auto& st : stats) {
- int i = &st - stats; // Calculate the current position in stats.
- switch (expects[i]) {
- case 0: {
- EXPECT_EQ(0u, st) << "stats[" << i << "] error.";
- break;
- }
- case 1: {
- EXPECT_GT(st, 0u) << "stats[" << i << "] error.";
- break;
- }
- }
- }
-}
-
-// Following is the implementation of TestWebRtcVadDtx.
-TestWebRtcVadDtx::TestWebRtcVadDtx()
- : vad_enabled_(false),
- dtx_enabled_(false),
- output_file_num_(0) {
-}
-
-void TestWebRtcVadDtx::Perform() {
- // Go through various test cases.
-#ifdef WEBRTC_CODEC_ISAC
- // Register iSAC WB as send codec
- RegisterCodec(kIsacWb);
- RunTestCases();
-
- // Register iSAC SWB as send codec
- RegisterCodec(kIsacSwb);
- RunTestCases();
-#endif
-
-#ifdef WEBRTC_CODEC_ILBC
- // Register iLBC as send codec
- RegisterCodec(kIlbc);
- RunTestCases();
-#endif
-
-#ifdef WEBRTC_CODEC_OPUS
- // Register Opus as send codec
- RegisterCodec(kOpus);
- RunTestCases();
-#endif
-}
-
-// Test various configurations on VAD/DTX.
-void TestWebRtcVadDtx::RunTestCases() {
- // #1 DTX = OFF, VAD = OFF, VADNormal
- SetVAD(false, false, VADNormal);
- Test(true);
-
- // #2 DTX = ON, VAD = ON, VADAggr
- SetVAD(true, true, VADAggr);
- Test(false);
-
- // #3 DTX = ON, VAD = ON, VADLowBitrate
- SetVAD(true, true, VADLowBitrate);
- Test(false);
-
- // #4 DTX = ON, VAD = ON, VADVeryAggr
- SetVAD(true, true, VADVeryAggr);
- Test(false);
-
- // #5 DTX = ON, VAD = ON, VADNormal
- SetVAD(true, true, VADNormal);
- Test(false);
-}
-
-// Set the expectation and run the test.
-void TestWebRtcVadDtx::Test(bool new_outfile) {
- int expects[] = {-1, 1, dtx_enabled_, 0, 0};
- if (new_outfile) {
- output_file_num_++;
- }
- std::stringstream out_filename;
- out_filename << webrtc::test::OutputPath()
- << "testWebRtcVadDtx_outFile_"
- << output_file_num_
- << ".pcm";
- Run(webrtc::test::ResourcePath("audio_coding/testfile32kHz", "pcm"),
- 32000, 1, out_filename.str(), !new_outfile, expects);
-}
-
-void TestWebRtcVadDtx::SetVAD(bool enable_dtx, bool enable_vad,
- ACMVADMode vad_mode) {
- ACMVADMode mode;
- EXPECT_EQ(0, acm_send_->SetVAD(enable_dtx, enable_vad, vad_mode));
- EXPECT_EQ(0, acm_send_->VAD(&dtx_enabled_, &vad_enabled_, &mode));
-
- CodecInst codec_param;
- acm_send_->SendCodec(&codec_param);
- if (STR_CASE_CMP(codec_param.plname, "opus") == 0) {
- // If send codec is Opus, WebRTC VAD/DTX cannot be used.
- enable_dtx = enable_vad = false;
- }
-
- EXPECT_EQ(dtx_enabled_ , enable_dtx); // DTX should be set as expected.
-
- if (dtx_enabled_) {
- EXPECT_TRUE(vad_enabled_); // WebRTC DTX cannot run without WebRTC VAD.
- } else {
- // Using no DTX should not affect setting of VAD.
- EXPECT_EQ(enable_vad, vad_enabled_);
- }
-}
-
-// Following is the implementation of TestOpusDtx.
-void TestOpusDtx::Perform() {
-#ifdef WEBRTC_CODEC_ISAC
- // If we set other codec than Opus, DTX cannot be toggled.
- RegisterCodec(kIsacWb);
- EXPECT_EQ(-1, acm_send_->EnableOpusDtx());
- EXPECT_EQ(-1, acm_send_->DisableOpusDtx());
-#endif
-
-#ifdef WEBRTC_CODEC_OPUS
- int expects[] = {0, 1, 0, 0, 0};
-
- // Register Opus as send codec
- std::string out_filename = webrtc::test::OutputPath() +
- "testOpusDtx_outFile_mono.pcm";
- RegisterCodec(kOpus);
- EXPECT_EQ(0, acm_send_->DisableOpusDtx());
-
- Run(webrtc::test::ResourcePath("audio_coding/testfile32kHz", "pcm"),
- 32000, 1, out_filename, false, expects);
-
- EXPECT_EQ(0, acm_send_->EnableOpusDtx());
- expects[kEmptyFrame] = 1;
- Run(webrtc::test::ResourcePath("audio_coding/testfile32kHz", "pcm"),
- 32000, 1, out_filename, true, expects);
-
- // Register stereo Opus as send codec
- out_filename = webrtc::test::OutputPath() + "testOpusDtx_outFile_stereo.pcm";
- RegisterCodec(kOpusStereo);
- EXPECT_EQ(0, acm_send_->DisableOpusDtx());
- expects[kEmptyFrame] = 0;
- Run(webrtc::test::ResourcePath("audio_coding/teststereo32kHz", "pcm"),
- 32000, 2, out_filename, false, expects);
-
- EXPECT_EQ(0, acm_send_->EnableOpusDtx());
-
- expects[kEmptyFrame] = 1;
- Run(webrtc::test::ResourcePath("audio_coding/teststereo32kHz", "pcm"),
- 32000, 2, out_filename, true, expects);
-#endif
-}
-
-} // namespace webrtc
diff --git a/webrtc/modules/audio_coding/main/test/TestVADDTX.h b/webrtc/modules/audio_coding/main/test/TestVADDTX.h
deleted file mode 100644
index 07596e2e86..0000000000
--- a/webrtc/modules/audio_coding/main/test/TestVADDTX.h
+++ /dev/null
@@ -1,102 +0,0 @@
-/*
- * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_MODULES_AUDIO_CODING_MAIN_TEST_TESTVADDTX_H_
-#define WEBRTC_MODULES_AUDIO_CODING_MAIN_TEST_TESTVADDTX_H_
-
-
-#include "webrtc/base/scoped_ptr.h"
-#include "webrtc/common_types.h"
-#include "webrtc/modules/audio_coding/main/include/audio_coding_module.h"
-#include "webrtc/modules/audio_coding/main/include/audio_coding_module_typedefs.h"
-#include "webrtc/modules/audio_coding/main/test/ACMTest.h"
-#include "webrtc/modules/audio_coding/main/test/Channel.h"
-
-namespace webrtc {
-
-class ActivityMonitor : public ACMVADCallback {
- public:
- ActivityMonitor();
- int32_t InFrameType(FrameType frame_type);
- void PrintStatistics();
- void ResetStatistics();
- void GetStatistics(uint32_t* stats);
- private:
- // 0 - kEmptyFrame
- // 1 - kAudioFrameSpeech
- // 2 - kAudioFrameCN
- // 3 - kVideoFrameKey (not used by audio)
- // 4 - kVideoFrameDelta (not used by audio)
- uint32_t counter_[5];
-};
-
-
-// TestVadDtx is to verify that VAD/DTX perform as they should. It runs through
-// an audio file and check if the occurrence of various packet types follows
-// expectation. TestVadDtx needs its derived class to implement the Perform()
-// to put the test together.
-class TestVadDtx : public ACMTest {
- public:
- static const int kOutputFreqHz = 16000;
-
- TestVadDtx();
-
- virtual void Perform() = 0;
-
- protected:
- void RegisterCodec(CodecInst codec_param);
-
- // Encoding a file and see if the numbers that various packets occur follow
- // the expectation. Saves result to a file.
- // expects[x] means
- // -1 : do not care,
- // 0 : there have been no packets of type |x|,
- // 1 : there have been packets of type |x|,
- // with |x| indicates the following packet types
- // 0 - kEmptyFrame
- // 1 - kAudioFrameSpeech
- // 2 - kAudioFrameCN
- // 3 - kVideoFrameKey (not used by audio)
- // 4 - kVideoFrameDelta (not used by audio)
- void Run(std::string in_filename, int frequency, int channels,
- std::string out_filename, bool append, const int* expects);
-
- rtc::scoped_ptr<AudioCodingModule> acm_send_;
- rtc::scoped_ptr<AudioCodingModule> acm_receive_;
- rtc::scoped_ptr<Channel> channel_;
- rtc::scoped_ptr<ActivityMonitor> monitor_;
-};
-
-// TestWebRtcVadDtx is to verify that the WebRTC VAD/DTX perform as they should.
-class TestWebRtcVadDtx final : public TestVadDtx {
- public:
- TestWebRtcVadDtx();
-
- void Perform() override;
-
- private:
- void RunTestCases();
- void Test(bool new_outfile);
- void SetVAD(bool enable_dtx, bool enable_vad, ACMVADMode vad_mode);
-
- bool vad_enabled_;
- bool dtx_enabled_;
- int output_file_num_;
-};
-
-// TestOpusDtx is to verify that the Opus DTX performs as it should.
-class TestOpusDtx final : public TestVadDtx {
- public:
- void Perform() override;
-};
-
-} // namespace webrtc
-
-#endif // WEBRTC_MODULES_AUDIO_CODING_MAIN_TEST_TESTVADDTX_H_
diff --git a/webrtc/modules/audio_coding/main/test/Tester.cc b/webrtc/modules/audio_coding/main/test/Tester.cc
deleted file mode 100644
index 7302e5dcbe..0000000000
--- a/webrtc/modules/audio_coding/main/test/Tester.cc
+++ /dev/null
@@ -1,171 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include <stdio.h>
-#include <string>
-#include <vector>
-
-#include "testing/gtest/include/gtest/gtest.h"
-#include "webrtc/modules/audio_coding/main/include/audio_coding_module.h"
-#include "webrtc/modules/audio_coding/main/test/APITest.h"
-#include "webrtc/modules/audio_coding/main/test/EncodeDecodeTest.h"
-#include "webrtc/modules/audio_coding/main/test/iSACTest.h"
-#include "webrtc/modules/audio_coding/main/test/opus_test.h"
-#include "webrtc/modules/audio_coding/main/test/PacketLossTest.h"
-#include "webrtc/modules/audio_coding/main/test/TestAllCodecs.h"
-#include "webrtc/modules/audio_coding/main/test/TestRedFec.h"
-#include "webrtc/modules/audio_coding/main/test/TestStereo.h"
-#include "webrtc/modules/audio_coding/main/test/TestVADDTX.h"
-#include "webrtc/modules/audio_coding/main/test/TwoWayCommunication.h"
-#include "webrtc/system_wrappers/include/trace.h"
-#include "webrtc/test/testsupport/fileutils.h"
-#include "webrtc/test/testsupport/gtest_disable.h"
-
-using webrtc::Trace;
-
-// This parameter is used to describe how to run the tests. It is normally
-// set to 0, and all tests are run in quite mode.
-#define ACM_TEST_MODE 0
-
-TEST(AudioCodingModuleTest, TestAllCodecs) {
- Trace::CreateTrace();
- Trace::SetTraceFile((webrtc::test::OutputPath() +
- "acm_allcodecs_trace.txt").c_str());
- webrtc::TestAllCodecs(ACM_TEST_MODE).Perform();
- Trace::ReturnTrace();
-}
-
-TEST(AudioCodingModuleTest, DISABLED_ON_ANDROID(TestEncodeDecode)) {
- Trace::CreateTrace();
- Trace::SetTraceFile((webrtc::test::OutputPath() +
- "acm_encodedecode_trace.txt").c_str());
- webrtc::EncodeDecodeTest(ACM_TEST_MODE).Perform();
- Trace::ReturnTrace();
-}
-
-#ifdef WEBRTC_CODEC_RED
-#define IF_RED(x) x
-#else
-#define IF_RED(x) DISABLED_##x
-#endif
-
-TEST(AudioCodingModuleTest, DISABLED_ON_ANDROID(IF_RED(TestRedFec))) {
- Trace::CreateTrace();
- Trace::SetTraceFile((webrtc::test::OutputPath() +
- "acm_fec_trace.txt").c_str());
- webrtc::TestRedFec().Perform();
- Trace::ReturnTrace();
-}
-
-#if defined(WEBRTC_CODEC_ISAC) || defined(WEBRTC_CODEC_ISACFX)
-#define IF_ISAC(x) x
-#else
-#define IF_ISAC(x) DISABLED_##x
-#endif
-
-TEST(AudioCodingModuleTest, DISABLED_ON_ANDROID(IF_ISAC(TestIsac))) {
- Trace::CreateTrace();
- Trace::SetTraceFile((webrtc::test::OutputPath() +
- "acm_isac_trace.txt").c_str());
- webrtc::ISACTest(ACM_TEST_MODE).Perform();
- Trace::ReturnTrace();
-}
-
-#if (defined(WEBRTC_CODEC_ISAC) || defined(WEBRTC_CODEC_ISACFX)) && \
- defined(WEBRTC_CODEC_ILBC) && defined(WEBRTC_CODEC_G722)
-#define IF_ALL_CODECS(x) x
-#else
-#define IF_ALL_CODECS(x) DISABLED_##x
-#endif
-
-TEST(AudioCodingModuleTest,
- DISABLED_ON_ANDROID(IF_ALL_CODECS(TwoWayCommunication))) {
- Trace::CreateTrace();
- Trace::SetTraceFile((webrtc::test::OutputPath() +
- "acm_twowaycom_trace.txt").c_str());
- webrtc::TwoWayCommunication(ACM_TEST_MODE).Perform();
- Trace::ReturnTrace();
-}
-
-TEST(AudioCodingModuleTest, DISABLED_ON_ANDROID(TestStereo)) {
- Trace::CreateTrace();
- Trace::SetTraceFile((webrtc::test::OutputPath() +
- "acm_stereo_trace.txt").c_str());
- webrtc::TestStereo(ACM_TEST_MODE).Perform();
- Trace::ReturnTrace();
-}
-
-TEST(AudioCodingModuleTest, DISABLED_ON_ANDROID(TestWebRtcVadDtx)) {
- Trace::CreateTrace();
- Trace::SetTraceFile((webrtc::test::OutputPath() +
- "acm_vaddtx_trace.txt").c_str());
- webrtc::TestWebRtcVadDtx().Perform();
- Trace::ReturnTrace();
-}
-
-TEST(AudioCodingModuleTest, TestOpusDtx) {
- Trace::CreateTrace();
- Trace::SetTraceFile((webrtc::test::OutputPath() +
- "acm_opusdtx_trace.txt").c_str());
- webrtc::TestOpusDtx().Perform();
- Trace::ReturnTrace();
-}
-
-TEST(AudioCodingModuleTest, TestOpus) {
- Trace::CreateTrace();
- Trace::SetTraceFile((webrtc::test::OutputPath() +
- "acm_opus_trace.txt").c_str());
- webrtc::OpusTest().Perform();
- Trace::ReturnTrace();
-}
-
-TEST(AudioCodingModuleTest, TestPacketLoss) {
- Trace::CreateTrace();
- Trace::SetTraceFile((webrtc::test::OutputPath() +
- "acm_packetloss_trace.txt").c_str());
- webrtc::PacketLossTest(1, 10, 10, 1).Perform();
- Trace::ReturnTrace();
-}
-
-TEST(AudioCodingModuleTest, TestPacketLossBurst) {
- Trace::CreateTrace();
- Trace::SetTraceFile((webrtc::test::OutputPath() +
- "acm_packetloss_burst_trace.txt").c_str());
- webrtc::PacketLossTest(1, 10, 10, 2).Perform();
- Trace::ReturnTrace();
-}
-
-TEST(AudioCodingModuleTest, TestPacketLossStereo) {
- Trace::CreateTrace();
- Trace::SetTraceFile((webrtc::test::OutputPath() +
- "acm_packetloss_trace.txt").c_str());
- webrtc::PacketLossTest(2, 10, 10, 1).Perform();
- Trace::ReturnTrace();
-}
-
-TEST(AudioCodingModuleTest, TestPacketLossStereoBurst) {
- Trace::CreateTrace();
- Trace::SetTraceFile((webrtc::test::OutputPath() +
- "acm_packetloss_burst_trace.txt").c_str());
- webrtc::PacketLossTest(2, 10, 10, 2).Perform();
- Trace::ReturnTrace();
-}
-
-// The full API test is too long to run automatically on bots, but can be used
-// for offline testing. User interaction is needed.
-#ifdef ACM_TEST_FULL_API
- TEST(AudioCodingModuleTest, TestAPI) {
- Trace::CreateTrace();
- Trace::SetTraceFile((webrtc::test::OutputPath() +
- "acm_apitest_trace.txt").c_str());
- webrtc::APITest().Perform();
- Trace::ReturnTrace();
- }
-#endif
diff --git a/webrtc/modules/audio_coding/main/test/TimedTrace.h b/webrtc/modules/audio_coding/main/test/TimedTrace.h
deleted file mode 100644
index ef9609a267..0000000000
--- a/webrtc/modules/audio_coding/main/test/TimedTrace.h
+++ /dev/null
@@ -1,36 +0,0 @@
-/*
- * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef TIMED_TRACE_H
-#define TIMED_TRACE_H
-
-#include "webrtc/typedefs.h"
-
-#include <stdio.h>
-#include <stdlib.h>
-
-class TimedTrace {
- public:
- TimedTrace();
- ~TimedTrace();
-
- void SetTimeEllapsed(double myTime);
- double TimeEllapsed();
- void Tick10Msec();
- int16_t SetUp(char* fileName);
- void TimedLogg(char* message);
-
- private:
- static double _timeEllapsedSec;
- static FILE* _timedTraceFile;
-
-};
-
-#endif
diff --git a/webrtc/modules/audio_coding/main/test/TwoWayCommunication.cc b/webrtc/modules/audio_coding/main/test/TwoWayCommunication.cc
deleted file mode 100644
index 2ff2a85afe..0000000000
--- a/webrtc/modules/audio_coding/main/test/TwoWayCommunication.cc
+++ /dev/null
@@ -1,301 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include "TwoWayCommunication.h"
-
-#include <ctype.h>
-#include <stdio.h>
-#include <string.h>
-
-#ifdef WIN32
-#include <Windows.h>
-#endif
-
-#include "testing/gtest/include/gtest/gtest.h"
-#include "webrtc/engine_configurations.h"
-#include "webrtc/common_types.h"
-#include "webrtc/modules/audio_coding/main/test/PCMFile.h"
-#include "webrtc/modules/audio_coding/main/test/utility.h"
-#include "webrtc/system_wrappers/include/trace.h"
-#include "webrtc/test/testsupport/fileutils.h"
-
-namespace webrtc {
-
-#define MAX_FILE_NAME_LENGTH_BYTE 500
-
-TwoWayCommunication::TwoWayCommunication(int testMode)
- : _acmA(AudioCodingModule::Create(1)),
- _acmRefA(AudioCodingModule::Create(3)),
- _testMode(testMode) {
- AudioCodingModule::Config config;
- // The clicks will be more obvious in FAX mode. TODO(henrik.lundin) Really?
- config.neteq_config.playout_mode = kPlayoutFax;
- config.id = 2;
- _acmB.reset(AudioCodingModule::Create(config));
- config.id = 4;
- _acmRefB.reset(AudioCodingModule::Create(config));
-}
-
-TwoWayCommunication::~TwoWayCommunication() {
- delete _channel_A2B;
- delete _channel_B2A;
- delete _channelRef_A2B;
- delete _channelRef_B2A;
-#ifdef WEBRTC_DTMF_DETECTION
- if (_dtmfDetectorA != NULL) {
- delete _dtmfDetectorA;
- }
- if (_dtmfDetectorB != NULL) {
- delete _dtmfDetectorB;
- }
-#endif
- _inFileA.Close();
- _inFileB.Close();
- _outFileA.Close();
- _outFileB.Close();
- _outFileRefA.Close();
- _outFileRefB.Close();
-}
-
-void TwoWayCommunication::ChooseCodec(uint8_t* codecID_A,
- uint8_t* codecID_B) {
- rtc::scoped_ptr<AudioCodingModule> tmpACM(AudioCodingModule::Create(0));
- uint8_t noCodec = tmpACM->NumberOfCodecs();
- CodecInst codecInst;
- printf("List of Supported Codecs\n");
- printf("========================\n");
- for (uint8_t codecCntr = 0; codecCntr < noCodec; codecCntr++) {
- EXPECT_EQ(tmpACM->Codec(codecCntr, &codecInst), 0);
- printf("%d- %s\n", codecCntr, codecInst.plname);
- }
- printf("\nChoose a send codec for side A [0]: ");
- char myStr[15] = "";
- EXPECT_TRUE(fgets(myStr, 10, stdin) != NULL);
- *codecID_A = (uint8_t) atoi(myStr);
-
- printf("\nChoose a send codec for side B [0]: ");
- EXPECT_TRUE(fgets(myStr, 10, stdin) != NULL);
- *codecID_B = (uint8_t) atoi(myStr);
-
- printf("\n");
-}
-
-void TwoWayCommunication::SetUp() {
- uint8_t codecID_A;
- uint8_t codecID_B;
-
- ChooseCodec(&codecID_A, &codecID_B);
- CodecInst codecInst_A;
- CodecInst codecInst_B;
- CodecInst dummyCodec;
- EXPECT_EQ(0, _acmA->Codec(codecID_A, &codecInst_A));
- EXPECT_EQ(0, _acmB->Codec(codecID_B, &codecInst_B));
- EXPECT_EQ(0, _acmA->Codec(6, &dummyCodec));
-
- //--- Set A codecs
- EXPECT_EQ(0, _acmA->RegisterSendCodec(codecInst_A));
- EXPECT_EQ(0, _acmA->RegisterReceiveCodec(codecInst_B));
- //--- Set ref-A codecs
- EXPECT_EQ(0, _acmRefA->RegisterSendCodec(codecInst_A));
- EXPECT_EQ(0, _acmRefA->RegisterReceiveCodec(codecInst_B));
-
- //--- Set B codecs
- EXPECT_EQ(0, _acmB->RegisterSendCodec(codecInst_B));
- EXPECT_EQ(0, _acmB->RegisterReceiveCodec(codecInst_A));
-
- //--- Set ref-B codecs
- EXPECT_EQ(0, _acmRefB->RegisterSendCodec(codecInst_B));
- EXPECT_EQ(0, _acmRefB->RegisterReceiveCodec(codecInst_A));
-
- uint16_t frequencyHz;
-
- //--- Input A
- std::string in_file_name = webrtc::test::ResourcePath(
- "audio_coding/testfile32kHz", "pcm");
- frequencyHz = 32000;
- printf("Enter input file at side A [%s]: ", in_file_name.c_str());
- PCMFile::ChooseFile(&in_file_name, 499, &frequencyHz);
- _inFileA.Open(in_file_name, frequencyHz, "rb");
-
- //--- Output A
- std::string out_file_a = webrtc::test::OutputPath() + "outA.pcm";
- printf("Output file at side A: %s\n", out_file_a.c_str());
- printf("Sampling frequency (in Hz) of the above file: %u\n", frequencyHz);
- _outFileA.Open(out_file_a, frequencyHz, "wb");
- std::string ref_file_name = webrtc::test::OutputPath() + "ref_outA.pcm";
- _outFileRefA.Open(ref_file_name, frequencyHz, "wb");
-
- //--- Input B
- in_file_name = webrtc::test::ResourcePath("audio_coding/testfile32kHz",
- "pcm");
- frequencyHz = 32000;
- printf("\n\nEnter input file at side B [%s]: ", in_file_name.c_str());
- PCMFile::ChooseFile(&in_file_name, 499, &frequencyHz);
- _inFileB.Open(in_file_name, frequencyHz, "rb");
-
- //--- Output B
- std::string out_file_b = webrtc::test::OutputPath() + "outB.pcm";
- printf("Output file at side B: %s\n", out_file_b.c_str());
- printf("Sampling frequency (in Hz) of the above file: %u\n", frequencyHz);
- _outFileB.Open(out_file_b, frequencyHz, "wb");
- ref_file_name = webrtc::test::OutputPath() + "ref_outB.pcm";
- _outFileRefB.Open(ref_file_name, frequencyHz, "wb");
-
- //--- Set A-to-B channel
- _channel_A2B = new Channel;
- _acmA->RegisterTransportCallback(_channel_A2B);
- _channel_A2B->RegisterReceiverACM(_acmB.get());
- //--- Do the same for the reference
- _channelRef_A2B = new Channel;
- _acmRefA->RegisterTransportCallback(_channelRef_A2B);
- _channelRef_A2B->RegisterReceiverACM(_acmRefB.get());
-
- //--- Set B-to-A channel
- _channel_B2A = new Channel;
- _acmB->RegisterTransportCallback(_channel_B2A);
- _channel_B2A->RegisterReceiverACM(_acmA.get());
- //--- Do the same for reference
- _channelRef_B2A = new Channel;
- _acmRefB->RegisterTransportCallback(_channelRef_B2A);
- _channelRef_B2A->RegisterReceiverACM(_acmRefA.get());
-}
-
-void TwoWayCommunication::SetUpAutotest() {
- CodecInst codecInst_A;
- CodecInst codecInst_B;
- CodecInst dummyCodec;
-
- EXPECT_EQ(0, _acmA->Codec("ISAC", &codecInst_A, 16000, 1));
- EXPECT_EQ(0, _acmB->Codec("L16", &codecInst_B, 8000, 1));
- EXPECT_EQ(0, _acmA->Codec(6, &dummyCodec));
-
- //--- Set A codecs
- EXPECT_EQ(0, _acmA->RegisterSendCodec(codecInst_A));
- EXPECT_EQ(0, _acmA->RegisterReceiveCodec(codecInst_B));
-
- //--- Set ref-A codecs
- EXPECT_GT(_acmRefA->RegisterSendCodec(codecInst_A), -1);
- EXPECT_GT(_acmRefA->RegisterReceiveCodec(codecInst_B), -1);
-
- //--- Set B codecs
- EXPECT_GT(_acmB->RegisterSendCodec(codecInst_B), -1);
- EXPECT_GT(_acmB->RegisterReceiveCodec(codecInst_A), -1);
-
- //--- Set ref-B codecs
- EXPECT_EQ(0, _acmRefB->RegisterSendCodec(codecInst_B));
- EXPECT_EQ(0, _acmRefB->RegisterReceiveCodec(codecInst_A));
-
- uint16_t frequencyHz;
-
- //--- Input A and B
- std::string in_file_name = webrtc::test::ResourcePath(
- "audio_coding/testfile32kHz", "pcm");
- frequencyHz = 16000;
- _inFileA.Open(in_file_name, frequencyHz, "rb");
- _inFileB.Open(in_file_name, frequencyHz, "rb");
-
- //--- Output A
- std::string output_file_a = webrtc::test::OutputPath() + "outAutotestA.pcm";
- frequencyHz = 16000;
- _outFileA.Open(output_file_a, frequencyHz, "wb");
- std::string output_ref_file_a = webrtc::test::OutputPath()
- + "ref_outAutotestA.pcm";
- _outFileRefA.Open(output_ref_file_a, frequencyHz, "wb");
-
- //--- Output B
- std::string output_file_b = webrtc::test::OutputPath() + "outAutotestB.pcm";
- frequencyHz = 16000;
- _outFileB.Open(output_file_b, frequencyHz, "wb");
- std::string output_ref_file_b = webrtc::test::OutputPath()
- + "ref_outAutotestB.pcm";
- _outFileRefB.Open(output_ref_file_b, frequencyHz, "wb");
-
- //--- Set A-to-B channel
- _channel_A2B = new Channel;
- _acmA->RegisterTransportCallback(_channel_A2B);
- _channel_A2B->RegisterReceiverACM(_acmB.get());
- //--- Do the same for the reference
- _channelRef_A2B = new Channel;
- _acmRefA->RegisterTransportCallback(_channelRef_A2B);
- _channelRef_A2B->RegisterReceiverACM(_acmRefB.get());
-
- //--- Set B-to-A channel
- _channel_B2A = new Channel;
- _acmB->RegisterTransportCallback(_channel_B2A);
- _channel_B2A->RegisterReceiverACM(_acmA.get());
- //--- Do the same for reference
- _channelRef_B2A = new Channel;
- _acmRefB->RegisterTransportCallback(_channelRef_B2A);
- _channelRef_B2A->RegisterReceiverACM(_acmRefA.get());
-}
-
-void TwoWayCommunication::Perform() {
- if (_testMode == 0) {
- SetUpAutotest();
- } else {
- SetUp();
- }
- unsigned int msecPassed = 0;
- unsigned int secPassed = 0;
-
- int32_t outFreqHzA = _outFileA.SamplingFrequency();
- int32_t outFreqHzB = _outFileB.SamplingFrequency();
-
- AudioFrame audioFrame;
-
- CodecInst codecInst_B;
- CodecInst dummy;
-
- EXPECT_EQ(0, _acmB->SendCodec(&codecInst_B));
-
- // In the following loop we tests that the code can handle misuse of the APIs.
- // In the middle of a session with data flowing between two sides, called A
- // and B, APIs will be called, and the code should continue to run, and be
- // able to recover.
- while (!_inFileA.EndOfFile() && !_inFileB.EndOfFile()) {
- msecPassed += 10;
- EXPECT_GT(_inFileA.Read10MsData(audioFrame), 0);
- EXPECT_GE(_acmA->Add10MsData(audioFrame), 0);
- EXPECT_GE(_acmRefA->Add10MsData(audioFrame), 0);
-
- EXPECT_GT(_inFileB.Read10MsData(audioFrame), 0);
-
- EXPECT_GE(_acmB->Add10MsData(audioFrame), 0);
- EXPECT_GE(_acmRefB->Add10MsData(audioFrame), 0);
- EXPECT_EQ(0, _acmA->PlayoutData10Ms(outFreqHzA, &audioFrame));
- _outFileA.Write10MsData(audioFrame);
- EXPECT_EQ(0, _acmRefA->PlayoutData10Ms(outFreqHzA, &audioFrame));
- _outFileRefA.Write10MsData(audioFrame);
- EXPECT_EQ(0, _acmB->PlayoutData10Ms(outFreqHzB, &audioFrame));
- _outFileB.Write10MsData(audioFrame);
- EXPECT_EQ(0, _acmRefB->PlayoutData10Ms(outFreqHzB, &audioFrame));
- _outFileRefB.Write10MsData(audioFrame);
-
- // Update time counters each time a second of data has passed.
- if (msecPassed >= 1000) {
- msecPassed = 0;
- secPassed++;
- }
- // Re-register send codec on side B.
- if (((secPassed % 5) == 4) && (msecPassed >= 990)) {
- EXPECT_EQ(0, _acmB->RegisterSendCodec(codecInst_B));
- EXPECT_EQ(0, _acmB->SendCodec(&dummy));
- }
- // Initialize receiver on side A.
- if (((secPassed % 7) == 6) && (msecPassed == 0))
- EXPECT_EQ(0, _acmA->InitializeReceiver());
- // Re-register codec on side A.
- if (((secPassed % 7) == 6) && (msecPassed >= 990)) {
- EXPECT_EQ(0, _acmA->RegisterReceiveCodec(codecInst_B));
- }
- }
-}
-
-} // namespace webrtc
diff --git a/webrtc/modules/audio_coding/main/test/TwoWayCommunication.h b/webrtc/modules/audio_coding/main/test/TwoWayCommunication.h
deleted file mode 100644
index bf969fe683..0000000000
--- a/webrtc/modules/audio_coding/main/test/TwoWayCommunication.h
+++ /dev/null
@@ -1,60 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_MODULES_AUDIO_CODING_MAIN_TEST_TWOWAYCOMMUNICATION_H_
-#define WEBRTC_MODULES_AUDIO_CODING_MAIN_TEST_TWOWAYCOMMUNICATION_H_
-
-#include "webrtc/base/scoped_ptr.h"
-#include "webrtc/modules/audio_coding/main/include/audio_coding_module.h"
-#include "webrtc/modules/audio_coding/main/test/ACMTest.h"
-#include "webrtc/modules/audio_coding/main/test/Channel.h"
-#include "webrtc/modules/audio_coding/main/test/PCMFile.h"
-#include "webrtc/modules/audio_coding/main/test/utility.h"
-
-namespace webrtc {
-
-class TwoWayCommunication : public ACMTest {
- public:
- explicit TwoWayCommunication(int testMode);
- ~TwoWayCommunication();
-
- void Perform();
- private:
- void ChooseCodec(uint8_t* codecID_A, uint8_t* codecID_B);
- void SetUp();
- void SetUpAutotest();
-
- rtc::scoped_ptr<AudioCodingModule> _acmA;
- rtc::scoped_ptr<AudioCodingModule> _acmB;
-
- rtc::scoped_ptr<AudioCodingModule> _acmRefA;
- rtc::scoped_ptr<AudioCodingModule> _acmRefB;
-
- Channel* _channel_A2B;
- Channel* _channel_B2A;
-
- Channel* _channelRef_A2B;
- Channel* _channelRef_B2A;
-
- PCMFile _inFileA;
- PCMFile _inFileB;
-
- PCMFile _outFileA;
- PCMFile _outFileB;
-
- PCMFile _outFileRefA;
- PCMFile _outFileRefB;
-
- int _testMode;
-};
-
-} // namespace webrtc
-
-#endif // WEBRTC_MODULES_AUDIO_CODING_MAIN_TEST_TWOWAYCOMMUNICATION_H_
diff --git a/webrtc/modules/audio_coding/main/test/delay_test.cc b/webrtc/modules/audio_coding/main/test/delay_test.cc
deleted file mode 100644
index 6186d67fc9..0000000000
--- a/webrtc/modules/audio_coding/main/test/delay_test.cc
+++ /dev/null
@@ -1,270 +0,0 @@
-/*
- * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include <assert.h>
-#include <math.h>
-
-#include <iostream>
-
-#include "gflags/gflags.h"
-#include "testing/gtest/include/gtest/gtest.h"
-#include "webrtc/base/scoped_ptr.h"
-#include "webrtc/common.h"
-#include "webrtc/common_types.h"
-#include "webrtc/engine_configurations.h"
-#include "webrtc/modules/audio_coding/main/include/audio_coding_module.h"
-#include "webrtc/modules/audio_coding/main/include/audio_coding_module_typedefs.h"
-#include "webrtc/modules/audio_coding/main/acm2/acm_common_defs.h"
-#include "webrtc/modules/audio_coding/main/test/Channel.h"
-#include "webrtc/modules/audio_coding/main/test/PCMFile.h"
-#include "webrtc/modules/audio_coding/main/test/utility.h"
-#include "webrtc/system_wrappers/include/event_wrapper.h"
-#include "webrtc/test/testsupport/fileutils.h"
-
-DEFINE_string(codec, "isac", "Codec Name");
-DEFINE_int32(sample_rate_hz, 16000, "Sampling rate in Hertz.");
-DEFINE_int32(num_channels, 1, "Number of Channels.");
-DEFINE_string(input_file, "", "Input file, PCM16 32 kHz, optional.");
-DEFINE_int32(delay, 0, "Delay in millisecond.");
-DEFINE_int32(init_delay, 0, "Initial delay in millisecond.");
-DEFINE_bool(dtx, false, "Enable DTX at the sender side.");
-DEFINE_bool(packet_loss, false, "Apply packet loss, c.f. Channel{.cc, .h}.");
-DEFINE_bool(fec, false, "Use Forward Error Correction (FEC).");
-
-namespace webrtc {
-
-namespace {
-
-struct CodecSettings {
- char name[50];
- int sample_rate_hz;
- int num_channels;
-};
-
-struct AcmSettings {
- bool dtx;
- bool fec;
-};
-
-struct TestSettings {
- CodecSettings codec;
- AcmSettings acm;
- bool packet_loss;
-};
-
-} // namespace
-
-class DelayTest {
- public:
- DelayTest()
- : acm_a_(AudioCodingModule::Create(0)),
- acm_b_(AudioCodingModule::Create(1)),
- channel_a2b_(new Channel),
- test_cntr_(0),
- encoding_sample_rate_hz_(8000) {}
-
- ~DelayTest() {
- if (channel_a2b_ != NULL) {
- delete channel_a2b_;
- channel_a2b_ = NULL;
- }
- in_file_a_.Close();
- }
-
- void Initialize() {
- test_cntr_ = 0;
- std::string file_name = webrtc::test::ResourcePath(
- "audio_coding/testfile32kHz", "pcm");
- if (FLAGS_input_file.size() > 0)
- file_name = FLAGS_input_file;
- in_file_a_.Open(file_name, 32000, "rb");
- ASSERT_EQ(0, acm_a_->InitializeReceiver()) <<
- "Couldn't initialize receiver.\n";
- ASSERT_EQ(0, acm_b_->InitializeReceiver()) <<
- "Couldn't initialize receiver.\n";
- if (FLAGS_init_delay > 0) {
- ASSERT_EQ(0, acm_b_->SetInitialPlayoutDelay(FLAGS_init_delay)) <<
- "Failed to set initial delay.\n";
- }
-
- if (FLAGS_delay > 0) {
- ASSERT_EQ(0, acm_b_->SetMinimumPlayoutDelay(FLAGS_delay)) <<
- "Failed to set minimum delay.\n";
- }
-
- int num_encoders = acm_a_->NumberOfCodecs();
- CodecInst my_codec_param;
- for (int n = 0; n < num_encoders; n++) {
- EXPECT_EQ(0, acm_b_->Codec(n, &my_codec_param)) <<
- "Failed to get codec.";
- if (STR_CASE_CMP(my_codec_param.plname, "opus") == 0)
- my_codec_param.channels = 1;
- else if (my_codec_param.channels > 1)
- continue;
- if (STR_CASE_CMP(my_codec_param.plname, "CN") == 0 &&
- my_codec_param.plfreq == 48000)
- continue;
- if (STR_CASE_CMP(my_codec_param.plname, "telephone-event") == 0)
- continue;
- ASSERT_EQ(0, acm_b_->RegisterReceiveCodec(my_codec_param)) <<
- "Couldn't register receive codec.\n";
- }
-
- // Create and connect the channel
- ASSERT_EQ(0, acm_a_->RegisterTransportCallback(channel_a2b_)) <<
- "Couldn't register Transport callback.\n";
- channel_a2b_->RegisterReceiverACM(acm_b_.get());
- }
-
- void Perform(const TestSettings* config, size_t num_tests, int duration_sec,
- const char* output_prefix) {
- for (size_t n = 0; n < num_tests; ++n) {
- ApplyConfig(config[n]);
- Run(duration_sec, output_prefix);
- }
- }
-
- private:
- void ApplyConfig(const TestSettings& config) {
- printf("====================================\n");
- printf("Test %d \n"
- "Codec: %s, %d kHz, %d channel(s)\n"
- "ACM: DTX %s, FEC %s\n"
- "Channel: %s\n",
- ++test_cntr_, config.codec.name, config.codec.sample_rate_hz,
- config.codec.num_channels, config.acm.dtx ? "on" : "off",
- config.acm.fec ? "on" : "off",
- config.packet_loss ? "with packet-loss" : "no packet-loss");
- SendCodec(config.codec);
- ConfigAcm(config.acm);
- ConfigChannel(config.packet_loss);
- }
-
- void SendCodec(const CodecSettings& config) {
- CodecInst my_codec_param;
- ASSERT_EQ(0, AudioCodingModule::Codec(
- config.name, &my_codec_param, config.sample_rate_hz,
- config.num_channels)) << "Specified codec is not supported.\n";
-
- encoding_sample_rate_hz_ = my_codec_param.plfreq;
- ASSERT_EQ(0, acm_a_->RegisterSendCodec(my_codec_param)) <<
- "Failed to register send-codec.\n";
- }
-
- void ConfigAcm(const AcmSettings& config) {
- ASSERT_EQ(0, acm_a_->SetVAD(config.dtx, config.dtx, VADAggr)) <<
- "Failed to set VAD.\n";
- ASSERT_EQ(0, acm_a_->SetREDStatus(config.fec)) <<
- "Failed to set RED.\n";
- }
-
- void ConfigChannel(bool packet_loss) {
- channel_a2b_->SetFECTestWithPacketLoss(packet_loss);
- }
-
- void OpenOutFile(const char* output_id) {
- std::stringstream file_stream;
- file_stream << "delay_test_" << FLAGS_codec << "_" << FLAGS_sample_rate_hz
- << "Hz" << "_" << FLAGS_init_delay << "ms_" << FLAGS_delay << "ms.pcm";
- std::cout << "Output file: " << file_stream.str() << std::endl << std::endl;
- std::string file_name = webrtc::test::OutputPath() + file_stream.str();
- out_file_b_.Open(file_name.c_str(), 32000, "wb");
- }
-
- void Run(int duration_sec, const char* output_prefix) {
- OpenOutFile(output_prefix);
- AudioFrame audio_frame;
- uint32_t out_freq_hz_b = out_file_b_.SamplingFrequency();
-
- int num_frames = 0;
- int in_file_frames = 0;
- uint32_t playout_ts;
- uint32_t received_ts;
- double average_delay = 0;
- double inst_delay_sec = 0;
- while (num_frames < (duration_sec * 100)) {
- if (in_file_a_.EndOfFile()) {
- in_file_a_.Rewind();
- }
-
- // Print delay information every 16 frame
- if ((num_frames & 0x3F) == 0x3F) {
- NetworkStatistics statistics;
- acm_b_->GetNetworkStatistics(&statistics);
- fprintf(stdout, "delay: min=%3d max=%3d mean=%3d median=%3d"
- " ts-based average = %6.3f, "
- "curr buff-lev = %4u opt buff-lev = %4u \n",
- statistics.minWaitingTimeMs, statistics.maxWaitingTimeMs,
- statistics.meanWaitingTimeMs, statistics.medianWaitingTimeMs,
- average_delay, statistics.currentBufferSize,
- statistics.preferredBufferSize);
- fflush (stdout);
- }
-
- in_file_a_.Read10MsData(audio_frame);
- ASSERT_GE(acm_a_->Add10MsData(audio_frame), 0);
- ASSERT_EQ(0, acm_b_->PlayoutData10Ms(out_freq_hz_b, &audio_frame));
- out_file_b_.Write10MsData(
- audio_frame.data_,
- audio_frame.samples_per_channel_ * audio_frame.num_channels_);
- acm_b_->PlayoutTimestamp(&playout_ts);
- received_ts = channel_a2b_->LastInTimestamp();
- inst_delay_sec = static_cast<uint32_t>(received_ts - playout_ts)
- / static_cast<double>(encoding_sample_rate_hz_);
-
- if (num_frames > 10)
- average_delay = 0.95 * average_delay + 0.05 * inst_delay_sec;
-
- ++num_frames;
- ++in_file_frames;
- }
- out_file_b_.Close();
- }
-
- rtc::scoped_ptr<AudioCodingModule> acm_a_;
- rtc::scoped_ptr<AudioCodingModule> acm_b_;
-
- Channel* channel_a2b_;
-
- PCMFile in_file_a_;
- PCMFile out_file_b_;
- int test_cntr_;
- int encoding_sample_rate_hz_;
-};
-
-} // namespace webrtc
-
-int main(int argc, char* argv[]) {
- google::ParseCommandLineFlags(&argc, &argv, true);
- webrtc::TestSettings test_setting;
- strcpy(test_setting.codec.name, FLAGS_codec.c_str());
-
- if (FLAGS_sample_rate_hz != 8000 &&
- FLAGS_sample_rate_hz != 16000 &&
- FLAGS_sample_rate_hz != 32000 &&
- FLAGS_sample_rate_hz != 48000) {
- std::cout << "Invalid sampling rate.\n";
- return 1;
- }
- test_setting.codec.sample_rate_hz = FLAGS_sample_rate_hz;
- if (FLAGS_num_channels < 1 || FLAGS_num_channels > 2) {
- std::cout << "Only mono and stereo are supported.\n";
- return 1;
- }
- test_setting.codec.num_channels = FLAGS_num_channels;
- test_setting.acm.dtx = FLAGS_dtx;
- test_setting.acm.fec = FLAGS_fec;
- test_setting.packet_loss = FLAGS_packet_loss;
-
- webrtc::DelayTest delay_test;
- delay_test.Initialize();
- delay_test.Perform(&test_setting, 1, 240, "delay_test");
- return 0;
-}
diff --git a/webrtc/modules/audio_coding/main/test/iSACTest.cc b/webrtc/modules/audio_coding/main/test/iSACTest.cc
deleted file mode 100644
index 35c34d5947..0000000000
--- a/webrtc/modules/audio_coding/main/test/iSACTest.cc
+++ /dev/null
@@ -1,340 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include "webrtc/modules/audio_coding/main/test/iSACTest.h"
-
-#include <ctype.h>
-#include <stdio.h>
-#include <string.h>
-
-#if _WIN32
-#include <windows.h>
-#elif WEBRTC_LINUX
-#include <time.h>
-#else
-#include <sys/time.h>
-#include <time.h>
-#endif
-
-#include "webrtc/modules/audio_coding/main/acm2/acm_common_defs.h"
-#include "webrtc/modules/audio_coding/main/test/utility.h"
-#include "webrtc/system_wrappers/include/event_wrapper.h"
-#include "webrtc/system_wrappers/include/tick_util.h"
-#include "webrtc/system_wrappers/include/trace.h"
-#include "webrtc/test/testsupport/fileutils.h"
-
-namespace webrtc {
-
-void SetISACConfigDefault(ACMTestISACConfig& isacConfig) {
- isacConfig.currentRateBitPerSec = 0;
- isacConfig.currentFrameSizeMsec = 0;
- isacConfig.encodingMode = -1;
- isacConfig.initRateBitPerSec = 0;
- isacConfig.initFrameSizeInMsec = 0;
- isacConfig.enforceFrameSize = false;
- return;
-}
-
-int16_t SetISAConfig(ACMTestISACConfig& isacConfig, AudioCodingModule* acm,
- int testMode) {
-
- if ((isacConfig.currentRateBitPerSec != 0)
- || (isacConfig.currentFrameSizeMsec != 0)) {
- CodecInst sendCodec;
- EXPECT_EQ(0, acm->SendCodec(&sendCodec));
- if (isacConfig.currentRateBitPerSec < 0) {
- // Register iSAC in adaptive (channel-dependent) mode.
- sendCodec.rate = -1;
- EXPECT_EQ(0, acm->RegisterSendCodec(sendCodec));
- } else {
- if (isacConfig.currentRateBitPerSec != 0) {
- sendCodec.rate = isacConfig.currentRateBitPerSec;
- }
- if (isacConfig.currentFrameSizeMsec != 0) {
- sendCodec.pacsize = isacConfig.currentFrameSizeMsec
- * (sendCodec.plfreq / 1000);
- }
- EXPECT_EQ(0, acm->RegisterSendCodec(sendCodec));
- }
- }
-
- return 0;
-}
-
-ISACTest::ISACTest(int testMode)
- : _acmA(AudioCodingModule::Create(1)),
- _acmB(AudioCodingModule::Create(2)),
- _testMode(testMode) {}
-
-ISACTest::~ISACTest() {}
-
-void ISACTest::Setup() {
- int codecCntr;
- CodecInst codecParam;
-
- for (codecCntr = 0; codecCntr < AudioCodingModule::NumberOfCodecs();
- codecCntr++) {
- EXPECT_EQ(0, AudioCodingModule::Codec(codecCntr, &codecParam));
- if (!STR_CASE_CMP(codecParam.plname, "ISAC")
- && codecParam.plfreq == 16000) {
- memcpy(&_paramISAC16kHz, &codecParam, sizeof(CodecInst));
- _idISAC16kHz = codecCntr;
- }
- if (!STR_CASE_CMP(codecParam.plname, "ISAC")
- && codecParam.plfreq == 32000) {
- memcpy(&_paramISAC32kHz, &codecParam, sizeof(CodecInst));
- _idISAC32kHz = codecCntr;
- }
- }
-
- // Register both iSAC-wb & iSAC-swb in both sides as receiver codecs.
- EXPECT_EQ(0, _acmA->RegisterReceiveCodec(_paramISAC16kHz));
- EXPECT_EQ(0, _acmA->RegisterReceiveCodec(_paramISAC32kHz));
- EXPECT_EQ(0, _acmB->RegisterReceiveCodec(_paramISAC16kHz));
- EXPECT_EQ(0, _acmB->RegisterReceiveCodec(_paramISAC32kHz));
-
- //--- Set A-to-B channel
- _channel_A2B.reset(new Channel);
- EXPECT_EQ(0, _acmA->RegisterTransportCallback(_channel_A2B.get()));
- _channel_A2B->RegisterReceiverACM(_acmB.get());
-
- //--- Set B-to-A channel
- _channel_B2A.reset(new Channel);
- EXPECT_EQ(0, _acmB->RegisterTransportCallback(_channel_B2A.get()));
- _channel_B2A->RegisterReceiverACM(_acmA.get());
-
- file_name_swb_ = webrtc::test::ResourcePath("audio_coding/testfile32kHz",
- "pcm");
-
- EXPECT_EQ(0, _acmB->RegisterSendCodec(_paramISAC16kHz));
- EXPECT_EQ(0, _acmA->RegisterSendCodec(_paramISAC32kHz));
-
- _inFileA.Open(file_name_swb_, 32000, "rb");
- std::string fileNameA = webrtc::test::OutputPath() + "testisac_a.pcm";
- std::string fileNameB = webrtc::test::OutputPath() + "testisac_b.pcm";
- _outFileA.Open(fileNameA, 32000, "wb");
- _outFileB.Open(fileNameB, 32000, "wb");
-
- while (!_inFileA.EndOfFile()) {
- Run10ms();
- }
- CodecInst receiveCodec;
- EXPECT_EQ(0, _acmA->ReceiveCodec(&receiveCodec));
- EXPECT_EQ(0, _acmB->ReceiveCodec(&receiveCodec));
-
- _inFileA.Close();
- _outFileA.Close();
- _outFileB.Close();
-}
-
-void ISACTest::Perform() {
- Setup();
-
- int16_t testNr = 0;
- ACMTestISACConfig wbISACConfig;
- ACMTestISACConfig swbISACConfig;
-
- SetISACConfigDefault(wbISACConfig);
- SetISACConfigDefault(swbISACConfig);
-
- wbISACConfig.currentRateBitPerSec = -1;
- swbISACConfig.currentRateBitPerSec = -1;
- testNr++;
- EncodeDecode(testNr, wbISACConfig, swbISACConfig);
-
- if (_testMode != 0) {
- SetISACConfigDefault(wbISACConfig);
- SetISACConfigDefault(swbISACConfig);
-
- wbISACConfig.currentRateBitPerSec = -1;
- swbISACConfig.currentRateBitPerSec = -1;
- wbISACConfig.initRateBitPerSec = 13000;
- wbISACConfig.initFrameSizeInMsec = 60;
- swbISACConfig.initRateBitPerSec = 20000;
- swbISACConfig.initFrameSizeInMsec = 30;
- testNr++;
- EncodeDecode(testNr, wbISACConfig, swbISACConfig);
-
- SetISACConfigDefault(wbISACConfig);
- SetISACConfigDefault(swbISACConfig);
-
- wbISACConfig.currentRateBitPerSec = 20000;
- swbISACConfig.currentRateBitPerSec = 48000;
- testNr++;
- EncodeDecode(testNr, wbISACConfig, swbISACConfig);
-
- wbISACConfig.currentRateBitPerSec = 16000;
- swbISACConfig.currentRateBitPerSec = 30000;
- wbISACConfig.currentFrameSizeMsec = 60;
- testNr++;
- EncodeDecode(testNr, wbISACConfig, swbISACConfig);
- }
-
- SetISACConfigDefault(wbISACConfig);
- SetISACConfigDefault(swbISACConfig);
- testNr++;
- EncodeDecode(testNr, wbISACConfig, swbISACConfig);
-
- testNr++;
- if (_testMode == 0) {
- SwitchingSamplingRate(testNr, 4);
- } else {
- SwitchingSamplingRate(testNr, 80);
- }
-}
-
-void ISACTest::Run10ms() {
- AudioFrame audioFrame;
- EXPECT_GT(_inFileA.Read10MsData(audioFrame), 0);
- EXPECT_GE(_acmA->Add10MsData(audioFrame), 0);
- EXPECT_GE(_acmB->Add10MsData(audioFrame), 0);
- EXPECT_EQ(0, _acmA->PlayoutData10Ms(32000, &audioFrame));
- _outFileA.Write10MsData(audioFrame);
- EXPECT_EQ(0, _acmB->PlayoutData10Ms(32000, &audioFrame));
- _outFileB.Write10MsData(audioFrame);
-}
-
-void ISACTest::EncodeDecode(int testNr, ACMTestISACConfig& wbISACConfig,
- ACMTestISACConfig& swbISACConfig) {
- // Files in Side A and B
- _inFileA.Open(file_name_swb_, 32000, "rb", true);
- _inFileB.Open(file_name_swb_, 32000, "rb", true);
-
- std::string file_name_out;
- std::stringstream file_stream_a;
- std::stringstream file_stream_b;
- file_stream_a << webrtc::test::OutputPath();
- file_stream_b << webrtc::test::OutputPath();
- file_stream_a << "out_iSACTest_A_" << testNr << ".pcm";
- file_stream_b << "out_iSACTest_B_" << testNr << ".pcm";
- file_name_out = file_stream_a.str();
- _outFileA.Open(file_name_out, 32000, "wb");
- file_name_out = file_stream_b.str();
- _outFileB.Open(file_name_out, 32000, "wb");
-
- EXPECT_EQ(0, _acmA->RegisterSendCodec(_paramISAC16kHz));
- EXPECT_EQ(0, _acmA->RegisterSendCodec(_paramISAC32kHz));
- EXPECT_EQ(0, _acmB->RegisterSendCodec(_paramISAC32kHz));
- EXPECT_EQ(0, _acmB->RegisterSendCodec(_paramISAC16kHz));
-
- // Side A is sending super-wideband, and side B is sending wideband.
- SetISAConfig(swbISACConfig, _acmA.get(), _testMode);
- SetISAConfig(wbISACConfig, _acmB.get(), _testMode);
-
- bool adaptiveMode = false;
- if ((swbISACConfig.currentRateBitPerSec == -1)
- || (wbISACConfig.currentRateBitPerSec == -1)) {
- adaptiveMode = true;
- }
- _myTimer.Reset();
- _channel_A2B->ResetStats();
- _channel_B2A->ResetStats();
-
- char currentTime[500];
- CodecInst sendCodec;
- EventTimerWrapper* myEvent = EventTimerWrapper::Create();
- EXPECT_TRUE(myEvent->StartTimer(true, 10));
- while (!(_inFileA.EndOfFile() || _inFileA.Rewinded())) {
- Run10ms();
- _myTimer.Tick10ms();
- _myTimer.CurrentTimeHMS(currentTime);
-
- if ((adaptiveMode) && (_testMode != 0)) {
- myEvent->Wait(5000);
- EXPECT_EQ(0, _acmA->SendCodec(&sendCodec));
- EXPECT_EQ(0, _acmB->SendCodec(&sendCodec));
- }
- }
-
- if (_testMode != 0) {
- printf("\n\nSide A statistics\n\n");
- _channel_A2B->PrintStats(_paramISAC32kHz);
-
- printf("\n\nSide B statistics\n\n");
- _channel_B2A->PrintStats(_paramISAC16kHz);
- }
-
- _channel_A2B->ResetStats();
- _channel_B2A->ResetStats();
-
- _outFileA.Close();
- _outFileB.Close();
- _inFileA.Close();
- _inFileB.Close();
-}
-
-void ISACTest::SwitchingSamplingRate(int testNr, int maxSampRateChange) {
- // Files in Side A
- _inFileA.Open(file_name_swb_, 32000, "rb");
- _inFileB.Open(file_name_swb_, 32000, "rb");
-
- std::string file_name_out;
- std::stringstream file_stream_a;
- std::stringstream file_stream_b;
- file_stream_a << webrtc::test::OutputPath();
- file_stream_b << webrtc::test::OutputPath();
- file_stream_a << "out_iSACTest_A_" << testNr << ".pcm";
- file_stream_b << "out_iSACTest_B_" << testNr << ".pcm";
- file_name_out = file_stream_a.str();
- _outFileA.Open(file_name_out, 32000, "wb");
- file_name_out = file_stream_b.str();
- _outFileB.Open(file_name_out, 32000, "wb");
-
- // Start with side A sending super-wideband and side B seding wideband.
- // Toggle sending wideband/super-wideband in this test.
- EXPECT_EQ(0, _acmA->RegisterSendCodec(_paramISAC32kHz));
- EXPECT_EQ(0, _acmB->RegisterSendCodec(_paramISAC16kHz));
-
- int numSendCodecChanged = 0;
- _myTimer.Reset();
- char currentTime[50];
- while (numSendCodecChanged < (maxSampRateChange << 1)) {
- Run10ms();
- _myTimer.Tick10ms();
- _myTimer.CurrentTimeHMS(currentTime);
- if (_testMode == 2)
- printf("\r%s", currentTime);
- if (_inFileA.EndOfFile()) {
- if (_inFileA.SamplingFrequency() == 16000) {
- // Switch side A to send super-wideband.
- _inFileA.Close();
- _inFileA.Open(file_name_swb_, 32000, "rb");
- EXPECT_EQ(0, _acmA->RegisterSendCodec(_paramISAC32kHz));
- } else {
- // Switch side A to send wideband.
- _inFileA.Close();
- _inFileA.Open(file_name_swb_, 32000, "rb");
- EXPECT_EQ(0, _acmA->RegisterSendCodec(_paramISAC16kHz));
- }
- numSendCodecChanged++;
- }
-
- if (_inFileB.EndOfFile()) {
- if (_inFileB.SamplingFrequency() == 16000) {
- // Switch side B to send super-wideband.
- _inFileB.Close();
- _inFileB.Open(file_name_swb_, 32000, "rb");
- EXPECT_EQ(0, _acmB->RegisterSendCodec(_paramISAC32kHz));
- } else {
- // Switch side B to send wideband.
- _inFileB.Close();
- _inFileB.Open(file_name_swb_, 32000, "rb");
- EXPECT_EQ(0, _acmB->RegisterSendCodec(_paramISAC16kHz));
- }
- numSendCodecChanged++;
- }
- }
- _outFileA.Close();
- _outFileB.Close();
- _inFileA.Close();
- _inFileB.Close();
-}
-
-} // namespace webrtc
diff --git a/webrtc/modules/audio_coding/main/test/iSACTest.h b/webrtc/modules/audio_coding/main/test/iSACTest.h
deleted file mode 100644
index 0693d935e1..0000000000
--- a/webrtc/modules/audio_coding/main/test/iSACTest.h
+++ /dev/null
@@ -1,79 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_MODULES_AUDIO_CODING_MAIN_TEST_ISACTEST_H_
-#define WEBRTC_MODULES_AUDIO_CODING_MAIN_TEST_ISACTEST_H_
-
-#include <string.h>
-
-#include "webrtc/base/scoped_ptr.h"
-#include "webrtc/common_types.h"
-#include "webrtc/modules/audio_coding/main/include/audio_coding_module.h"
-#include "webrtc/modules/audio_coding/main/test/ACMTest.h"
-#include "webrtc/modules/audio_coding/main/test/Channel.h"
-#include "webrtc/modules/audio_coding/main/test/PCMFile.h"
-#include "webrtc/modules/audio_coding/main/test/utility.h"
-
-#define MAX_FILE_NAME_LENGTH_BYTE 500
-#define NO_OF_CLIENTS 15
-
-namespace webrtc {
-
-struct ACMTestISACConfig {
- int32_t currentRateBitPerSec;
- int16_t currentFrameSizeMsec;
- int16_t encodingMode;
- uint32_t initRateBitPerSec;
- int16_t initFrameSizeInMsec;
- bool enforceFrameSize;
-};
-
-class ISACTest : public ACMTest {
- public:
- explicit ISACTest(int testMode);
- ~ISACTest();
-
- void Perform();
- private:
- void Setup();
-
- void Run10ms();
-
- void EncodeDecode(int testNr, ACMTestISACConfig& wbISACConfig,
- ACMTestISACConfig& swbISACConfig);
-
- void SwitchingSamplingRate(int testNr, int maxSampRateChange);
-
- rtc::scoped_ptr<AudioCodingModule> _acmA;
- rtc::scoped_ptr<AudioCodingModule> _acmB;
-
- rtc::scoped_ptr<Channel> _channel_A2B;
- rtc::scoped_ptr<Channel> _channel_B2A;
-
- PCMFile _inFileA;
- PCMFile _inFileB;
-
- PCMFile _outFileA;
- PCMFile _outFileB;
-
- uint8_t _idISAC16kHz;
- uint8_t _idISAC32kHz;
- CodecInst _paramISAC16kHz;
- CodecInst _paramISAC32kHz;
-
- std::string file_name_swb_;
-
- ACMTestTimer _myTimer;
- int _testMode;
-};
-
-} // namespace webrtc
-
-#endif // WEBRTC_MODULES_AUDIO_CODING_MAIN_TEST_ISACTEST_H_
diff --git a/webrtc/modules/audio_coding/main/test/initial_delay_unittest.cc b/webrtc/modules/audio_coding/main/test/initial_delay_unittest.cc
deleted file mode 100644
index 8495e0e596..0000000000
--- a/webrtc/modules/audio_coding/main/test/initial_delay_unittest.cc
+++ /dev/null
@@ -1,175 +0,0 @@
-/*
- * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include "webrtc/modules/audio_coding/main/include/audio_coding_module.h"
-
-#include <assert.h>
-#include <math.h>
-
-#include <iostream>
-
-#include "testing/gtest/include/gtest/gtest.h"
-#include "webrtc/base/scoped_ptr.h"
-#include "webrtc/common_types.h"
-#include "webrtc/engine_configurations.h"
-#include "webrtc/modules/audio_coding/main/include/audio_coding_module_typedefs.h"
-#include "webrtc/modules/audio_coding/main/test/Channel.h"
-#include "webrtc/modules/audio_coding/main/test/PCMFile.h"
-#include "webrtc/modules/audio_coding/main/test/utility.h"
-#include "webrtc/system_wrappers/include/event_wrapper.h"
-#include "webrtc/test/testsupport/fileutils.h"
-#include "webrtc/test/testsupport/gtest_disable.h"
-
-namespace webrtc {
-
-namespace {
-
-double FrameRms(AudioFrame& frame) {
- size_t samples = frame.num_channels_ * frame.samples_per_channel_;
- double rms = 0;
- for (size_t n = 0; n < samples; ++n)
- rms += frame.data_[n] * frame.data_[n];
- rms /= samples;
- rms = sqrt(rms);
- return rms;
-}
-
-}
-
-class InitialPlayoutDelayTest : public ::testing::Test {
- protected:
- InitialPlayoutDelayTest()
- : acm_a_(AudioCodingModule::Create(0)),
- acm_b_(AudioCodingModule::Create(1)),
- channel_a2b_(NULL) {}
-
- ~InitialPlayoutDelayTest() {
- if (channel_a2b_ != NULL) {
- delete channel_a2b_;
- channel_a2b_ = NULL;
- }
- }
-
- void SetUp() {
- ASSERT_TRUE(acm_a_.get() != NULL);
- ASSERT_TRUE(acm_b_.get() != NULL);
-
- EXPECT_EQ(0, acm_b_->InitializeReceiver());
- EXPECT_EQ(0, acm_a_->InitializeReceiver());
-
- // Register all L16 codecs in receiver.
- CodecInst codec;
- const int kFsHz[3] = { 8000, 16000, 32000 };
- const int kChannels[2] = { 1, 2 };
- for (int n = 0; n < 3; ++n) {
- for (int k = 0; k < 2; ++k) {
- AudioCodingModule::Codec("L16", &codec, kFsHz[n], kChannels[k]);
- acm_b_->RegisterReceiveCodec(codec);
- }
- }
-
- // Create and connect the channel
- channel_a2b_ = new Channel;
- acm_a_->RegisterTransportCallback(channel_a2b_);
- channel_a2b_->RegisterReceiverACM(acm_b_.get());
- }
-
- void NbMono() {
- CodecInst codec;
- AudioCodingModule::Codec("L16", &codec, 8000, 1);
- codec.pacsize = codec.plfreq * 30 / 1000; // 30 ms packets.
- Run(codec, 1000);
- }
-
- void WbMono() {
- CodecInst codec;
- AudioCodingModule::Codec("L16", &codec, 16000, 1);
- codec.pacsize = codec.plfreq * 30 / 1000; // 30 ms packets.
- Run(codec, 1000);
- }
-
- void SwbMono() {
- CodecInst codec;
- AudioCodingModule::Codec("L16", &codec, 32000, 1);
- codec.pacsize = codec.plfreq * 10 / 1000; // 10 ms packets.
- Run(codec, 400); // Memory constraints limit the buffer at <500 ms.
- }
-
- void NbStereo() {
- CodecInst codec;
- AudioCodingModule::Codec("L16", &codec, 8000, 2);
- codec.pacsize = codec.plfreq * 30 / 1000; // 30 ms packets.
- Run(codec, 1000);
- }
-
- void WbStereo() {
- CodecInst codec;
- AudioCodingModule::Codec("L16", &codec, 16000, 2);
- codec.pacsize = codec.plfreq * 30 / 1000; // 30 ms packets.
- Run(codec, 1000);
- }
-
- void SwbStereo() {
- CodecInst codec;
- AudioCodingModule::Codec("L16", &codec, 32000, 2);
- codec.pacsize = codec.plfreq * 10 / 1000; // 10 ms packets.
- Run(codec, 400); // Memory constraints limit the buffer at <500 ms.
- }
-
- private:
- void Run(CodecInst codec, int initial_delay_ms) {
- AudioFrame in_audio_frame;
- AudioFrame out_audio_frame;
- int num_frames = 0;
- const int kAmp = 10000;
- in_audio_frame.sample_rate_hz_ = codec.plfreq;
- in_audio_frame.num_channels_ = codec.channels;
- in_audio_frame.samples_per_channel_ = codec.plfreq / 100; // 10 ms.
- size_t samples = in_audio_frame.num_channels_ *
- in_audio_frame.samples_per_channel_;
- for (size_t n = 0; n < samples; ++n) {
- in_audio_frame.data_[n] = kAmp;
- }
-
- uint32_t timestamp = 0;
- double rms = 0;
- ASSERT_EQ(0, acm_a_->RegisterSendCodec(codec));
- acm_b_->SetInitialPlayoutDelay(initial_delay_ms);
- while (rms < kAmp / 2) {
- in_audio_frame.timestamp_ = timestamp;
- timestamp += static_cast<uint32_t>(in_audio_frame.samples_per_channel_);
- ASSERT_GE(acm_a_->Add10MsData(in_audio_frame), 0);
- ASSERT_EQ(0, acm_b_->PlayoutData10Ms(codec.plfreq, &out_audio_frame));
- rms = FrameRms(out_audio_frame);
- ++num_frames;
- }
-
- ASSERT_GE(num_frames * 10, initial_delay_ms);
- ASSERT_LE(num_frames * 10, initial_delay_ms + 100);
- }
-
- rtc::scoped_ptr<AudioCodingModule> acm_a_;
- rtc::scoped_ptr<AudioCodingModule> acm_b_;
- Channel* channel_a2b_;
-};
-
-TEST_F(InitialPlayoutDelayTest, NbMono) { NbMono(); }
-
-TEST_F(InitialPlayoutDelayTest, WbMono) { WbMono(); }
-
-TEST_F(InitialPlayoutDelayTest, SwbMono) { SwbMono(); }
-
-TEST_F(InitialPlayoutDelayTest, NbStereo) { NbStereo(); }
-
-TEST_F(InitialPlayoutDelayTest, WbStereo) { WbStereo(); }
-
-TEST_F(InitialPlayoutDelayTest, SwbStereo) { SwbStereo(); }
-
-} // namespace webrtc
diff --git a/webrtc/modules/audio_coding/main/test/insert_packet_with_timing.cc b/webrtc/modules/audio_coding/main/test/insert_packet_with_timing.cc
deleted file mode 100644
index ea7266567e..0000000000
--- a/webrtc/modules/audio_coding/main/test/insert_packet_with_timing.cc
+++ /dev/null
@@ -1,311 +0,0 @@
-/*
- * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include <stdio.h>
-
-#include "gflags/gflags.h"
-#include "testing/gtest/include/gtest/gtest.h"
-#include "webrtc/base/scoped_ptr.h"
-#include "webrtc/common_types.h"
-#include "webrtc/modules/audio_coding/main/include/audio_coding_module.h"
-#include "webrtc/modules/audio_coding/main/test/Channel.h"
-#include "webrtc/modules/audio_coding/main/test/PCMFile.h"
-#include "webrtc/modules/interface/module_common_types.h"
-#include "webrtc/system_wrappers/include/clock.h"
-#include "webrtc/test/testsupport/fileutils.h"
-
-// Codec.
-DEFINE_string(codec, "opus", "Codec Name");
-DEFINE_int32(codec_sample_rate_hz, 48000, "Sampling rate in Hertz.");
-DEFINE_int32(codec_channels, 1, "Number of channels of the codec.");
-
-// PCM input/output.
-DEFINE_string(input, "", "Input PCM file at 16 kHz.");
-DEFINE_bool(input_stereo, false, "Input is stereo.");
-DEFINE_int32(input_fs_hz, 32000, "Input sample rate Hz.");
-DEFINE_string(output, "insert_rtp_with_timing_out.pcm", "OutputFile");
-DEFINE_int32(output_fs_hz, 32000, "Output sample rate Hz");
-
-// Timing files
-DEFINE_string(seq_num, "seq_num", "Sequence number file.");
-DEFINE_string(send_ts, "send_timestamp", "Send timestamp file.");
-DEFINE_string(receive_ts, "last_rec_timestamp", "Receive timestamp file");
-
-// Delay logging
-DEFINE_string(delay, "", "Log for delay.");
-
-// Other setups
-DEFINE_int32(init_delay, 0, "Initial delay.");
-DEFINE_bool(verbose, false, "Verbosity.");
-DEFINE_double(loss_rate, 0, "Rate of packet loss < 1");
-
-const int32_t kAudioPlayedOut = 0x00000001;
-const int32_t kPacketPushedIn = 0x00000001 << 1;
-const int kPlayoutPeriodMs = 10;
-
-namespace webrtc {
-
-class InsertPacketWithTiming {
- public:
- InsertPacketWithTiming()
- : sender_clock_(new SimulatedClock(0)),
- receiver_clock_(new SimulatedClock(0)),
- send_acm_(AudioCodingModule::Create(0, sender_clock_)),
- receive_acm_(AudioCodingModule::Create(0, receiver_clock_)),
- channel_(new Channel),
- seq_num_fid_(fopen(FLAGS_seq_num.c_str(), "rt")),
- send_ts_fid_(fopen(FLAGS_send_ts.c_str(), "rt")),
- receive_ts_fid_(fopen(FLAGS_receive_ts.c_str(), "rt")),
- pcm_out_fid_(fopen(FLAGS_output.c_str(), "wb")),
- samples_in_1ms_(48),
- num_10ms_in_codec_frame_(2), // Typical 20 ms frames.
- time_to_insert_packet_ms_(3), // An arbitrary offset on pushing packet.
- next_receive_ts_(0),
- time_to_playout_audio_ms_(kPlayoutPeriodMs),
- loss_threshold_(0),
- playout_timing_fid_(fopen("playout_timing.txt", "wt")) {}
-
- void SetUp() {
- ASSERT_TRUE(sender_clock_ != NULL);
- ASSERT_TRUE(receiver_clock_ != NULL);
-
- ASSERT_TRUE(send_acm_.get() != NULL);
- ASSERT_TRUE(receive_acm_.get() != NULL);
- ASSERT_TRUE(channel_ != NULL);
-
- ASSERT_TRUE(seq_num_fid_ != NULL);
- ASSERT_TRUE(send_ts_fid_ != NULL);
- ASSERT_TRUE(receive_ts_fid_ != NULL);
-
- ASSERT_TRUE(playout_timing_fid_ != NULL);
-
- next_receive_ts_ = ReceiveTimestamp();
-
- CodecInst codec;
- ASSERT_EQ(0, AudioCodingModule::Codec(FLAGS_codec.c_str(), &codec,
- FLAGS_codec_sample_rate_hz,
- FLAGS_codec_channels));
- ASSERT_EQ(0, receive_acm_->InitializeReceiver());
- ASSERT_EQ(0, send_acm_->RegisterSendCodec(codec));
- ASSERT_EQ(0, receive_acm_->RegisterReceiveCodec(codec));
-
- // Set codec-dependent parameters.
- samples_in_1ms_ = codec.plfreq / 1000;
- num_10ms_in_codec_frame_ = codec.pacsize / (codec.plfreq / 100);
-
- channel_->RegisterReceiverACM(receive_acm_.get());
- send_acm_->RegisterTransportCallback(channel_);
-
- if (FLAGS_input.size() == 0) {
- std::string file_name = test::ResourcePath("audio_coding/testfile32kHz",
- "pcm");
- pcm_in_fid_.Open(file_name, 32000, "r", true); // auto-rewind
- std::cout << "Input file " << file_name << " 32 kHz mono." << std::endl;
- } else {
- pcm_in_fid_.Open(FLAGS_input, static_cast<uint16_t>(FLAGS_input_fs_hz),
- "r", true); // auto-rewind
- std::cout << "Input file " << FLAGS_input << "at " << FLAGS_input_fs_hz
- << " Hz in " << ((FLAGS_input_stereo) ? "stereo." : "mono.")
- << std::endl;
- pcm_in_fid_.ReadStereo(FLAGS_input_stereo);
- }
-
- ASSERT_TRUE(pcm_out_fid_ != NULL);
- std::cout << "Output file " << FLAGS_output << " at " << FLAGS_output_fs_hz
- << " Hz." << std::endl;
-
- // Other setups
- if (FLAGS_init_delay > 0)
- EXPECT_EQ(0, receive_acm_->SetInitialPlayoutDelay(FLAGS_init_delay));
-
- if (FLAGS_loss_rate > 0)
- loss_threshold_ = RAND_MAX * FLAGS_loss_rate;
- else
- loss_threshold_ = 0;
- }
-
- void TickOneMillisecond(uint32_t* action) {
- // One millisecond passed.
- time_to_insert_packet_ms_--;
- time_to_playout_audio_ms_--;
- sender_clock_->AdvanceTimeMilliseconds(1);
- receiver_clock_->AdvanceTimeMilliseconds(1);
-
- // Reset action.
- *action = 0;
-
- // Is it time to pull audio?
- if (time_to_playout_audio_ms_ == 0) {
- time_to_playout_audio_ms_ = kPlayoutPeriodMs;
- receive_acm_->PlayoutData10Ms(static_cast<int>(FLAGS_output_fs_hz),
- &frame_);
- fwrite(frame_.data_, sizeof(frame_.data_[0]),
- frame_.samples_per_channel_ * frame_.num_channels_, pcm_out_fid_);
- *action |= kAudioPlayedOut;
- }
-
- // Is it time to push in next packet?
- if (time_to_insert_packet_ms_ <= .5) {
- *action |= kPacketPushedIn;
-
- // Update time-to-insert packet.
- uint32_t t = next_receive_ts_;
- next_receive_ts_ = ReceiveTimestamp();
- time_to_insert_packet_ms_ += static_cast<float>(next_receive_ts_ - t) /
- samples_in_1ms_;
-
- // Push in just enough audio.
- for (int n = 0; n < num_10ms_in_codec_frame_; n++) {
- pcm_in_fid_.Read10MsData(frame_);
- EXPECT_GE(send_acm_->Add10MsData(frame_), 0);
- }
-
- // Set the parameters for the packet to be pushed in receiver ACM right
- // now.
- uint32_t ts = SendTimestamp();
- int seq_num = SequenceNumber();
- bool lost = false;
- channel_->set_send_timestamp(ts);
- channel_->set_sequence_number(seq_num);
- if (loss_threshold_ > 0 && rand() < loss_threshold_) {
- channel_->set_num_packets_to_drop(1);
- lost = true;
- }
-
- if (FLAGS_verbose) {
- if (!lost) {
- std::cout << "\nInserting packet number " << seq_num
- << " timestamp " << ts << std::endl;
- } else {
- std::cout << "\nLost packet number " << seq_num
- << " timestamp " << ts << std::endl;
- }
- }
- }
- }
-
- void TearDown() {
- delete channel_;
-
- fclose(seq_num_fid_);
- fclose(send_ts_fid_);
- fclose(receive_ts_fid_);
- fclose(pcm_out_fid_);
- pcm_in_fid_.Close();
- }
-
- ~InsertPacketWithTiming() {
- delete sender_clock_;
- delete receiver_clock_;
- }
-
- // Are there more info to simulate.
- bool HasPackets() {
- if (feof(seq_num_fid_) || feof(send_ts_fid_) || feof(receive_ts_fid_))
- return false;
- return true;
- }
-
- // Jitter buffer delay.
- void Delay(int* optimal_delay, int* current_delay) {
- NetworkStatistics statistics;
- receive_acm_->GetNetworkStatistics(&statistics);
- *optimal_delay = statistics.preferredBufferSize;
- *current_delay = statistics.currentBufferSize;
- }
-
- private:
- uint32_t SendTimestamp() {
- uint32_t t;
- EXPECT_EQ(1, fscanf(send_ts_fid_, "%u\n", &t));
- return t;
- }
-
- uint32_t ReceiveTimestamp() {
- uint32_t t;
- EXPECT_EQ(1, fscanf(receive_ts_fid_, "%u\n", &t));
- return t;
- }
-
- int SequenceNumber() {
- int n;
- EXPECT_EQ(1, fscanf(seq_num_fid_, "%d\n", &n));
- return n;
- }
-
- // This class just creates these pointers, not deleting them. They are deleted
- // by the associated ACM.
- SimulatedClock* sender_clock_;
- SimulatedClock* receiver_clock_;
-
- rtc::scoped_ptr<AudioCodingModule> send_acm_;
- rtc::scoped_ptr<AudioCodingModule> receive_acm_;
- Channel* channel_;
-
- FILE* seq_num_fid_; // Input (text), one sequence number per line.
- FILE* send_ts_fid_; // Input (text), one send timestamp per line.
- FILE* receive_ts_fid_; // Input (text), one receive timestamp per line.
- FILE* pcm_out_fid_; // Output PCM16.
-
- PCMFile pcm_in_fid_; // Input PCM16.
-
- int samples_in_1ms_;
-
- // TODO(turajs): this can be computed from the send timestamp, but there is
- // some complication to account for lost and reordered packets.
- int num_10ms_in_codec_frame_;
-
- float time_to_insert_packet_ms_;
- uint32_t next_receive_ts_;
- uint32_t time_to_playout_audio_ms_;
-
- AudioFrame frame_;
-
- double loss_threshold_;
-
- // Output (text), sequence number, playout timestamp, time (ms) of playout,
- // per line.
- FILE* playout_timing_fid_;
-};
-
-} // webrtc
-
-int main(int argc, char* argv[]) {
- google::ParseCommandLineFlags(&argc, &argv, true);
- webrtc::InsertPacketWithTiming test;
- test.SetUp();
-
- FILE* delay_log = NULL;
- if (FLAGS_delay.size() > 0) {
- delay_log = fopen(FLAGS_delay.c_str(), "wt");
- if (delay_log == NULL) {
- std::cout << "Cannot open the file to log delay values." << std::endl;
- exit(1);
- }
- }
-
- uint32_t action_taken;
- int optimal_delay_ms;
- int current_delay_ms;
- while (test.HasPackets()) {
- test.TickOneMillisecond(&action_taken);
-
- if (action_taken != 0) {
- test.Delay(&optimal_delay_ms, &current_delay_ms);
- if (delay_log != NULL) {
- fprintf(delay_log, "%3d %3d\n", optimal_delay_ms, current_delay_ms);
- }
- }
- }
- std::cout << std::endl;
- test.TearDown();
- if (delay_log != NULL)
- fclose(delay_log);
-}
diff --git a/webrtc/modules/audio_coding/main/test/opus_test.cc b/webrtc/modules/audio_coding/main/test/opus_test.cc
deleted file mode 100644
index 00c66cb3aa..0000000000
--- a/webrtc/modules/audio_coding/main/test/opus_test.cc
+++ /dev/null
@@ -1,381 +0,0 @@
-/*
- * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include "webrtc/modules/audio_coding/main/test/opus_test.h"
-
-#include <assert.h>
-
-#include <string>
-
-#include "testing/gtest/include/gtest/gtest.h"
-#include "webrtc/common_types.h"
-#include "webrtc/engine_configurations.h"
-#include "webrtc/modules/audio_coding/codecs/opus/include/opus_interface.h"
-#include "webrtc/modules/audio_coding/main/include/audio_coding_module_typedefs.h"
-#include "webrtc/modules/audio_coding/main/acm2/acm_codec_database.h"
-#include "webrtc/modules/audio_coding/main/test/TestStereo.h"
-#include "webrtc/modules/audio_coding/main/test/utility.h"
-#include "webrtc/system_wrappers/include/trace.h"
-#include "webrtc/test/testsupport/fileutils.h"
-
-namespace webrtc {
-
-OpusTest::OpusTest()
- : acm_receiver_(AudioCodingModule::Create(0)),
- channel_a2b_(NULL),
- counter_(0),
- payload_type_(255),
- rtp_timestamp_(0) {}
-
-OpusTest::~OpusTest() {
- if (channel_a2b_ != NULL) {
- delete channel_a2b_;
- channel_a2b_ = NULL;
- }
- if (opus_mono_encoder_ != NULL) {
- WebRtcOpus_EncoderFree(opus_mono_encoder_);
- opus_mono_encoder_ = NULL;
- }
- if (opus_stereo_encoder_ != NULL) {
- WebRtcOpus_EncoderFree(opus_stereo_encoder_);
- opus_stereo_encoder_ = NULL;
- }
- if (opus_mono_decoder_ != NULL) {
- WebRtcOpus_DecoderFree(opus_mono_decoder_);
- opus_mono_decoder_ = NULL;
- }
- if (opus_stereo_decoder_ != NULL) {
- WebRtcOpus_DecoderFree(opus_stereo_decoder_);
- opus_stereo_decoder_ = NULL;
- }
-}
-
-void OpusTest::Perform() {
-#ifndef WEBRTC_CODEC_OPUS
- // Opus isn't defined, exit.
- return;
-#else
- uint16_t frequency_hz;
- int audio_channels;
- int16_t test_cntr = 0;
-
- // Open both mono and stereo test files in 32 kHz.
- const std::string file_name_stereo =
- webrtc::test::ResourcePath("audio_coding/teststereo32kHz", "pcm");
- const std::string file_name_mono =
- webrtc::test::ResourcePath("audio_coding/testfile32kHz", "pcm");
- frequency_hz = 32000;
- in_file_stereo_.Open(file_name_stereo, frequency_hz, "rb");
- in_file_stereo_.ReadStereo(true);
- in_file_mono_.Open(file_name_mono, frequency_hz, "rb");
- in_file_mono_.ReadStereo(false);
-
- // Create Opus encoders for mono and stereo.
- ASSERT_GT(WebRtcOpus_EncoderCreate(&opus_mono_encoder_, 1, 0), -1);
- ASSERT_GT(WebRtcOpus_EncoderCreate(&opus_stereo_encoder_, 2, 1), -1);
-
- // Create Opus decoders for mono and stereo for stand-alone testing of Opus.
- ASSERT_GT(WebRtcOpus_DecoderCreate(&opus_mono_decoder_, 1), -1);
- ASSERT_GT(WebRtcOpus_DecoderCreate(&opus_stereo_decoder_, 2), -1);
- WebRtcOpus_DecoderInit(opus_mono_decoder_);
- WebRtcOpus_DecoderInit(opus_stereo_decoder_);
-
- ASSERT_TRUE(acm_receiver_.get() != NULL);
- EXPECT_EQ(0, acm_receiver_->InitializeReceiver());
-
- // Register Opus stereo as receiving codec.
- CodecInst opus_codec_param;
- int codec_id = acm_receiver_->Codec("opus", 48000, 2);
- EXPECT_EQ(0, acm_receiver_->Codec(codec_id, &opus_codec_param));
- payload_type_ = opus_codec_param.pltype;
- EXPECT_EQ(0, acm_receiver_->RegisterReceiveCodec(opus_codec_param));
-
- // Create and connect the channel.
- channel_a2b_ = new TestPackStereo;
- channel_a2b_->RegisterReceiverACM(acm_receiver_.get());
-
- //
- // Test Stereo.
- //
-
- channel_a2b_->set_codec_mode(kStereo);
- audio_channels = 2;
- test_cntr++;
- OpenOutFile(test_cntr);
-
- // Run Opus with 2.5 ms frame size.
- Run(channel_a2b_, audio_channels, 64000, 120);
-
- // Run Opus with 5 ms frame size.
- Run(channel_a2b_, audio_channels, 64000, 240);
-
- // Run Opus with 10 ms frame size.
- Run(channel_a2b_, audio_channels, 64000, 480);
-
- // Run Opus with 20 ms frame size.
- Run(channel_a2b_, audio_channels, 64000, 960);
-
- // Run Opus with 40 ms frame size.
- Run(channel_a2b_, audio_channels, 64000, 1920);
-
- // Run Opus with 60 ms frame size.
- Run(channel_a2b_, audio_channels, 64000, 2880);
-
- out_file_.Close();
- out_file_standalone_.Close();
-
- //
- // Test Opus stereo with packet-losses.
- //
-
- test_cntr++;
- OpenOutFile(test_cntr);
-
- // Run Opus with 20 ms frame size, 1% packet loss.
- Run(channel_a2b_, audio_channels, 64000, 960, 1);
-
- // Run Opus with 20 ms frame size, 5% packet loss.
- Run(channel_a2b_, audio_channels, 64000, 960, 5);
-
- // Run Opus with 20 ms frame size, 10% packet loss.
- Run(channel_a2b_, audio_channels, 64000, 960, 10);
-
- out_file_.Close();
- out_file_standalone_.Close();
-
- //
- // Test Mono.
- //
- channel_a2b_->set_codec_mode(kMono);
- audio_channels = 1;
- test_cntr++;
- OpenOutFile(test_cntr);
-
- // Register Opus mono as receiving codec.
- opus_codec_param.channels = 1;
- EXPECT_EQ(0, acm_receiver_->RegisterReceiveCodec(opus_codec_param));
-
- // Run Opus with 2.5 ms frame size.
- Run(channel_a2b_, audio_channels, 32000, 120);
-
- // Run Opus with 5 ms frame size.
- Run(channel_a2b_, audio_channels, 32000, 240);
-
- // Run Opus with 10 ms frame size.
- Run(channel_a2b_, audio_channels, 32000, 480);
-
- // Run Opus with 20 ms frame size.
- Run(channel_a2b_, audio_channels, 32000, 960);
-
- // Run Opus with 40 ms frame size.
- Run(channel_a2b_, audio_channels, 32000, 1920);
-
- // Run Opus with 60 ms frame size.
- Run(channel_a2b_, audio_channels, 32000, 2880);
-
- out_file_.Close();
- out_file_standalone_.Close();
-
- //
- // Test Opus mono with packet-losses.
- //
- test_cntr++;
- OpenOutFile(test_cntr);
-
- // Run Opus with 20 ms frame size, 1% packet loss.
- Run(channel_a2b_, audio_channels, 64000, 960, 1);
-
- // Run Opus with 20 ms frame size, 5% packet loss.
- Run(channel_a2b_, audio_channels, 64000, 960, 5);
-
- // Run Opus with 20 ms frame size, 10% packet loss.
- Run(channel_a2b_, audio_channels, 64000, 960, 10);
-
- // Close the files.
- in_file_stereo_.Close();
- in_file_mono_.Close();
- out_file_.Close();
- out_file_standalone_.Close();
-#endif
-}
-
-void OpusTest::Run(TestPackStereo* channel, int channels, int bitrate,
- int frame_length, int percent_loss) {
- AudioFrame audio_frame;
- int32_t out_freq_hz_b = out_file_.SamplingFrequency();
- const int kBufferSizeSamples = 480 * 12 * 2; // Can hold 120 ms stereo audio.
- int16_t audio[kBufferSizeSamples];
- int16_t out_audio[kBufferSizeSamples];
- int16_t audio_type;
- int written_samples = 0;
- int read_samples = 0;
- int decoded_samples = 0;
- bool first_packet = true;
- uint32_t start_time_stamp = 0;
-
- channel->reset_payload_size();
- counter_ = 0;
-
- // Set encoder rate.
- EXPECT_EQ(0, WebRtcOpus_SetBitRate(opus_mono_encoder_, bitrate));
- EXPECT_EQ(0, WebRtcOpus_SetBitRate(opus_stereo_encoder_, bitrate));
-
-#if defined(WEBRTC_ANDROID) || defined(WEBRTC_IOS) || defined(WEBRTC_ARCH_ARM)
- // If we are on Android, iOS and/or ARM, use a lower complexity setting as
- // default.
- const int kOpusComplexity5 = 5;
- EXPECT_EQ(0, WebRtcOpus_SetComplexity(opus_mono_encoder_, kOpusComplexity5));
- EXPECT_EQ(0, WebRtcOpus_SetComplexity(opus_stereo_encoder_,
- kOpusComplexity5));
-#endif
-
- // Make sure the runtime is less than 60 seconds to pass Android test.
- for (size_t audio_length = 0; audio_length < 10000; audio_length += 10) {
- bool lost_packet = false;
-
- // Get 10 msec of audio.
- if (channels == 1) {
- if (in_file_mono_.EndOfFile()) {
- break;
- }
- in_file_mono_.Read10MsData(audio_frame);
- } else {
- if (in_file_stereo_.EndOfFile()) {
- break;
- }
- in_file_stereo_.Read10MsData(audio_frame);
- }
-
- // If input audio is sampled at 32 kHz, resampling to 48 kHz is required.
- EXPECT_EQ(480,
- resampler_.Resample10Msec(audio_frame.data_,
- audio_frame.sample_rate_hz_,
- 48000,
- channels,
- kBufferSizeSamples - written_samples,
- &audio[written_samples]));
- written_samples += 480 * channels;
-
- // Sometimes we need to loop over the audio vector to produce the right
- // number of packets.
- int loop_encode = (written_samples - read_samples) /
- (channels * frame_length);
-
- if (loop_encode > 0) {
- const int kMaxBytes = 1000; // Maximum number of bytes for one packet.
- size_t bitstream_len_byte;
- uint8_t bitstream[kMaxBytes];
- for (int i = 0; i < loop_encode; i++) {
- int bitstream_len_byte_int = WebRtcOpus_Encode(
- (channels == 1) ? opus_mono_encoder_ : opus_stereo_encoder_,
- &audio[read_samples], frame_length, kMaxBytes, bitstream);
- ASSERT_GE(bitstream_len_byte_int, 0);
- bitstream_len_byte = static_cast<size_t>(bitstream_len_byte_int);
-
- // Simulate packet loss by setting |packet_loss_| to "true" in
- // |percent_loss| percent of the loops.
- // TODO(tlegrand): Move handling of loss simulation to TestPackStereo.
- if (percent_loss > 0) {
- if (counter_ == floor((100 / percent_loss) + 0.5)) {
- counter_ = 0;
- lost_packet = true;
- channel->set_lost_packet(true);
- } else {
- lost_packet = false;
- channel->set_lost_packet(false);
- }
- counter_++;
- }
-
- // Run stand-alone Opus decoder, or decode PLC.
- if (channels == 1) {
- if (!lost_packet) {
- decoded_samples += WebRtcOpus_Decode(
- opus_mono_decoder_, bitstream, bitstream_len_byte,
- &out_audio[decoded_samples * channels], &audio_type);
- } else {
- decoded_samples += WebRtcOpus_DecodePlc(
- opus_mono_decoder_, &out_audio[decoded_samples * channels], 1);
- }
- } else {
- if (!lost_packet) {
- decoded_samples += WebRtcOpus_Decode(
- opus_stereo_decoder_, bitstream, bitstream_len_byte,
- &out_audio[decoded_samples * channels], &audio_type);
- } else {
- decoded_samples += WebRtcOpus_DecodePlc(
- opus_stereo_decoder_, &out_audio[decoded_samples * channels],
- 1);
- }
- }
-
- // Send data to the channel. "channel" will handle the loss simulation.
- channel->SendData(kAudioFrameSpeech, payload_type_, rtp_timestamp_,
- bitstream, bitstream_len_byte, NULL);
- if (first_packet) {
- first_packet = false;
- start_time_stamp = rtp_timestamp_;
- }
- rtp_timestamp_ += frame_length;
- read_samples += frame_length * channels;
- }
- if (read_samples == written_samples) {
- read_samples = 0;
- written_samples = 0;
- }
- }
-
- // Run received side of ACM.
- ASSERT_EQ(0, acm_receiver_->PlayoutData10Ms(out_freq_hz_b, &audio_frame));
-
- // Write output speech to file.
- out_file_.Write10MsData(
- audio_frame.data_,
- audio_frame.samples_per_channel_ * audio_frame.num_channels_);
-
- // Write stand-alone speech to file.
- out_file_standalone_.Write10MsData(
- out_audio, static_cast<size_t>(decoded_samples) * channels);
-
- if (audio_frame.timestamp_ > start_time_stamp) {
- // Number of channels should be the same for both stand-alone and
- // ACM-decoding.
- EXPECT_EQ(audio_frame.num_channels_, channels);
- }
-
- decoded_samples = 0;
- }
-
- if (in_file_mono_.EndOfFile()) {
- in_file_mono_.Rewind();
- }
- if (in_file_stereo_.EndOfFile()) {
- in_file_stereo_.Rewind();
- }
- // Reset in case we ended with a lost packet.
- channel->set_lost_packet(false);
-}
-
-void OpusTest::OpenOutFile(int test_number) {
- std::string file_name;
- std::stringstream file_stream;
- file_stream << webrtc::test::OutputPath() << "opustest_out_"
- << test_number << ".pcm";
- file_name = file_stream.str();
- out_file_.Open(file_name, 48000, "wb");
- file_stream.str("");
- file_name = file_stream.str();
- file_stream << webrtc::test::OutputPath() << "opusstandalone_out_"
- << test_number << ".pcm";
- file_name = file_stream.str();
- out_file_standalone_.Open(file_name, 48000, "wb");
-}
-
-} // namespace webrtc
diff --git a/webrtc/modules/audio_coding/main/test/opus_test.h b/webrtc/modules/audio_coding/main/test/opus_test.h
deleted file mode 100644
index 379bb86d5d..0000000000
--- a/webrtc/modules/audio_coding/main/test/opus_test.h
+++ /dev/null
@@ -1,57 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_MODULES_AUDIO_CODING_MAIN_TEST_OPUS_TEST_H_
-#define WEBRTC_MODULES_AUDIO_CODING_MAIN_TEST_OPUS_TEST_H_
-
-#include <math.h>
-
-#include "webrtc/base/scoped_ptr.h"
-#include "webrtc/modules/audio_coding/codecs/opus/include/opus_interface.h"
-#include "webrtc/modules/audio_coding/main/acm2/acm_resampler.h"
-#include "webrtc/modules/audio_coding/main/test/ACMTest.h"
-#include "webrtc/modules/audio_coding/main/test/Channel.h"
-#include "webrtc/modules/audio_coding/main/test/PCMFile.h"
-#include "webrtc/modules/audio_coding/main/test/TestStereo.h"
-
-namespace webrtc {
-
-class OpusTest : public ACMTest {
- public:
- OpusTest();
- ~OpusTest();
-
- void Perform();
-
- private:
- void Run(TestPackStereo* channel, int channels, int bitrate, int frame_length,
- int percent_loss = 0);
-
- void OpenOutFile(int test_number);
-
- rtc::scoped_ptr<AudioCodingModule> acm_receiver_;
- TestPackStereo* channel_a2b_;
- PCMFile in_file_stereo_;
- PCMFile in_file_mono_;
- PCMFile out_file_;
- PCMFile out_file_standalone_;
- int counter_;
- uint8_t payload_type_;
- int rtp_timestamp_;
- acm2::ACMResampler resampler_;
- WebRtcOpusEncInst* opus_mono_encoder_;
- WebRtcOpusEncInst* opus_stereo_encoder_;
- WebRtcOpusDecInst* opus_mono_decoder_;
- WebRtcOpusDecInst* opus_stereo_decoder_;
-};
-
-} // namespace webrtc
-
-#endif // WEBRTC_MODULES_AUDIO_CODING_MAIN_TEST_OPUS_TEST_H_
diff --git a/webrtc/modules/audio_coding/main/test/target_delay_unittest.cc b/webrtc/modules/audio_coding/main/test/target_delay_unittest.cc
deleted file mode 100644
index 20b10a376e..0000000000
--- a/webrtc/modules/audio_coding/main/test/target_delay_unittest.cc
+++ /dev/null
@@ -1,223 +0,0 @@
-/*
- * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include "testing/gtest/include/gtest/gtest.h"
-#include "webrtc/base/scoped_ptr.h"
-#include "webrtc/common_types.h"
-#include "webrtc/modules/audio_coding/codecs/pcm16b/include/pcm16b.h"
-#include "webrtc/modules/audio_coding/main/include/audio_coding_module.h"
-#include "webrtc/modules/audio_coding/main/test/utility.h"
-#include "webrtc/modules/interface/module_common_types.h"
-#include "webrtc/system_wrappers/include/sleep.h"
-#include "webrtc/test/testsupport/fileutils.h"
-#include "webrtc/test/testsupport/gtest_disable.h"
-
-namespace webrtc {
-
-class TargetDelayTest : public ::testing::Test {
- protected:
- TargetDelayTest() : acm_(AudioCodingModule::Create(0)) {}
-
- ~TargetDelayTest() {}
-
- void SetUp() {
- EXPECT_TRUE(acm_.get() != NULL);
-
- CodecInst codec;
- ASSERT_EQ(0, AudioCodingModule::Codec("L16", &codec, kSampleRateHz, 1));
- ASSERT_EQ(0, acm_->InitializeReceiver());
- ASSERT_EQ(0, acm_->RegisterReceiveCodec(codec));
-
- rtp_info_.header.payloadType = codec.pltype;
- rtp_info_.header.timestamp = 0;
- rtp_info_.header.ssrc = 0x12345678;
- rtp_info_.header.markerBit = false;
- rtp_info_.header.sequenceNumber = 0;
- rtp_info_.type.Audio.channel = 1;
- rtp_info_.type.Audio.isCNG = false;
- rtp_info_.frameType = kAudioFrameSpeech;
-
- int16_t audio[kFrameSizeSamples];
- const int kRange = 0x7FF; // 2047, easy for masking.
- for (size_t n = 0; n < kFrameSizeSamples; ++n)
- audio[n] = (rand() & kRange) - kRange / 2;
- WebRtcPcm16b_Encode(audio, kFrameSizeSamples, payload_);
- }
-
- void OutOfRangeInput() {
- EXPECT_EQ(-1, SetMinimumDelay(-1));
- EXPECT_EQ(-1, SetMinimumDelay(10001));
- }
-
- void NoTargetDelayBufferSizeChanges() {
- for (int n = 0; n < 30; ++n) // Run enough iterations.
- Run(true);
- int clean_optimal_delay = GetCurrentOptimalDelayMs();
- Run(false); // Run with jitter.
- int jittery_optimal_delay = GetCurrentOptimalDelayMs();
- EXPECT_GT(jittery_optimal_delay, clean_optimal_delay);
- int required_delay = RequiredDelay();
- EXPECT_GT(required_delay, 0);
- EXPECT_NEAR(required_delay, jittery_optimal_delay, 1);
- }
-
- void WithTargetDelayBufferNotChanging() {
- // A target delay that is one packet larger than jitter.
- const int kTargetDelayMs = (kInterarrivalJitterPacket + 1) *
- kNum10msPerFrame * 10;
- ASSERT_EQ(0, SetMinimumDelay(kTargetDelayMs));
- for (int n = 0; n < 30; ++n) // Run enough iterations to fill the buffer.
- Run(true);
- int clean_optimal_delay = GetCurrentOptimalDelayMs();
- EXPECT_EQ(kTargetDelayMs, clean_optimal_delay);
- Run(false); // Run with jitter.
- int jittery_optimal_delay = GetCurrentOptimalDelayMs();
- EXPECT_EQ(jittery_optimal_delay, clean_optimal_delay);
- }
-
- void RequiredDelayAtCorrectRange() {
- for (int n = 0; n < 30; ++n) // Run clean and store delay.
- Run(true);
- int clean_optimal_delay = GetCurrentOptimalDelayMs();
-
- // A relatively large delay.
- const int kTargetDelayMs = (kInterarrivalJitterPacket + 10) *
- kNum10msPerFrame * 10;
- ASSERT_EQ(0, SetMinimumDelay(kTargetDelayMs));
- for (int n = 0; n < 300; ++n) // Run enough iterations to fill the buffer.
- Run(true);
- Run(false); // Run with jitter.
-
- int jittery_optimal_delay = GetCurrentOptimalDelayMs();
- EXPECT_EQ(kTargetDelayMs, jittery_optimal_delay);
-
- int required_delay = RequiredDelay();
-
- // Checking |required_delay| is in correct range.
- EXPECT_GT(required_delay, 0);
- EXPECT_GT(jittery_optimal_delay, required_delay);
- EXPECT_GT(required_delay, clean_optimal_delay);
-
- // A tighter check for the value of |required_delay|.
- // The jitter forces a delay of
- // |kInterarrivalJitterPacket * kNum10msPerFrame * 10| milliseconds. So we
- // expect |required_delay| be close to that.
- EXPECT_NEAR(kInterarrivalJitterPacket * kNum10msPerFrame * 10,
- required_delay, 1);
- }
-
- void TargetDelayBufferMinMax() {
- const int kTargetMinDelayMs = kNum10msPerFrame * 10;
- ASSERT_EQ(0, SetMinimumDelay(kTargetMinDelayMs));
- for (int m = 0; m < 30; ++m) // Run enough iterations to fill the buffer.
- Run(true);
- int clean_optimal_delay = GetCurrentOptimalDelayMs();
- EXPECT_EQ(kTargetMinDelayMs, clean_optimal_delay);
-
- const int kTargetMaxDelayMs = 2 * (kNum10msPerFrame * 10);
- ASSERT_EQ(0, SetMaximumDelay(kTargetMaxDelayMs));
- for (int n = 0; n < 30; ++n) // Run enough iterations to fill the buffer.
- Run(false);
-
- int capped_optimal_delay = GetCurrentOptimalDelayMs();
- EXPECT_EQ(kTargetMaxDelayMs, capped_optimal_delay);
- }
-
- private:
- static const int kSampleRateHz = 16000;
- static const int kNum10msPerFrame = 2;
- static const size_t kFrameSizeSamples = 320; // 20 ms @ 16 kHz.
- // payload-len = frame-samples * 2 bytes/sample.
- static const int kPayloadLenBytes = 320 * 2;
- // Inter-arrival time in number of packets in a jittery channel. One is no
- // jitter.
- static const int kInterarrivalJitterPacket = 2;
-
- void Push() {
- rtp_info_.header.timestamp += kFrameSizeSamples;
- rtp_info_.header.sequenceNumber++;
- ASSERT_EQ(0, acm_->IncomingPacket(payload_, kFrameSizeSamples * 2,
- rtp_info_));
- }
-
- // Pull audio equivalent to the amount of audio in one RTP packet.
- void Pull() {
- AudioFrame frame;
- for (int k = 0; k < kNum10msPerFrame; ++k) { // Pull one frame.
- ASSERT_EQ(0, acm_->PlayoutData10Ms(-1, &frame));
- // Had to use ASSERT_TRUE, ASSERT_EQ generated error.
- ASSERT_TRUE(kSampleRateHz == frame.sample_rate_hz_);
- ASSERT_EQ(1, frame.num_channels_);
- ASSERT_TRUE(kSampleRateHz / 100 == frame.samples_per_channel_);
- }
- }
-
- void Run(bool clean) {
- for (int n = 0; n < 10; ++n) {
- for (int m = 0; m < 5; ++m) {
- Push();
- Pull();
- }
-
- if (!clean) {
- for (int m = 0; m < 10; ++m) { // Long enough to trigger delay change.
- Push();
- for (int n = 0; n < kInterarrivalJitterPacket; ++n)
- Pull();
- }
- }
- }
- }
-
- int SetMinimumDelay(int delay_ms) {
- return acm_->SetMinimumPlayoutDelay(delay_ms);
- }
-
- int SetMaximumDelay(int delay_ms) {
- return acm_->SetMaximumPlayoutDelay(delay_ms);
- }
-
- int GetCurrentOptimalDelayMs() {
- NetworkStatistics stats;
- acm_->GetNetworkStatistics(&stats);
- return stats.preferredBufferSize;
- }
-
- int RequiredDelay() {
- return acm_->LeastRequiredDelayMs();
- }
-
- rtc::scoped_ptr<AudioCodingModule> acm_;
- WebRtcRTPHeader rtp_info_;
- uint8_t payload_[kPayloadLenBytes];
-};
-
-TEST_F(TargetDelayTest, DISABLED_ON_ANDROID(OutOfRangeInput)) {
- OutOfRangeInput();
-}
-
-TEST_F(TargetDelayTest, DISABLED_ON_ANDROID(NoTargetDelayBufferSizeChanges)) {
- NoTargetDelayBufferSizeChanges();
-}
-
-TEST_F(TargetDelayTest, DISABLED_ON_ANDROID(WithTargetDelayBufferNotChanging)) {
- WithTargetDelayBufferNotChanging();
-}
-
-TEST_F(TargetDelayTest, DISABLED_ON_ANDROID(RequiredDelayAtCorrectRange)) {
- RequiredDelayAtCorrectRange();
-}
-
-TEST_F(TargetDelayTest, DISABLED_ON_ANDROID(TargetDelayBufferMinMax)) {
- TargetDelayBufferMinMax();
-}
-
-} // namespace webrtc
-
diff --git a/webrtc/modules/audio_coding/main/test/utility.cc b/webrtc/modules/audio_coding/main/test/utility.cc
deleted file mode 100644
index 34af5e703f..0000000000
--- a/webrtc/modules/audio_coding/main/test/utility.cc
+++ /dev/null
@@ -1,303 +0,0 @@
-/*
- * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include "utility.h"
-
-#include <assert.h>
-#include <stdio.h>
-#include <stdlib.h>
-#include <string.h>
-
-#include "testing/gtest/include/gtest/gtest.h"
-#include "webrtc/common.h"
-#include "webrtc/common_types.h"
-#include "webrtc/modules/audio_coding/main/include/audio_coding_module.h"
-#include "webrtc/modules/audio_coding/main/acm2/acm_common_defs.h"
-
-#define NUM_CODECS_WITH_FIXED_PAYLOAD_TYPE 13
-
-namespace webrtc {
-
-ACMTestTimer::ACMTestTimer()
- : _msec(0),
- _sec(0),
- _min(0),
- _hour(0) {
- return;
-}
-
-ACMTestTimer::~ACMTestTimer() {
- return;
-}
-
-void ACMTestTimer::Reset() {
- _msec = 0;
- _sec = 0;
- _min = 0;
- _hour = 0;
- return;
-}
-void ACMTestTimer::Tick10ms() {
- _msec += 10;
- Adjust();
- return;
-}
-
-void ACMTestTimer::Tick1ms() {
- _msec++;
- Adjust();
- return;
-}
-
-void ACMTestTimer::Tick100ms() {
- _msec += 100;
- Adjust();
- return;
-}
-
-void ACMTestTimer::Tick1sec() {
- _sec++;
- Adjust();
- return;
-}
-
-void ACMTestTimer::CurrentTimeHMS(char* currTime) {
- sprintf(currTime, "%4lu:%02u:%06.3f", _hour, _min,
- (double) _sec + (double) _msec / 1000.);
- return;
-}
-
-void ACMTestTimer::CurrentTime(unsigned long& h, unsigned char& m,
- unsigned char& s, unsigned short& ms) {
- h = _hour;
- m = _min;
- s = _sec;
- ms = _msec;
- return;
-}
-
-void ACMTestTimer::Adjust() {
- unsigned int n;
- if (_msec >= 1000) {
- n = _msec / 1000;
- _msec -= (1000 * n);
- _sec += n;
- }
- if (_sec >= 60) {
- n = _sec / 60;
- _sec -= (n * 60);
- _min += n;
- }
- if (_min >= 60) {
- n = _min / 60;
- _min -= (n * 60);
- _hour += n;
- }
-}
-
-int16_t ChooseCodec(CodecInst& codecInst) {
-
- PrintCodecs();
- //AudioCodingModule* tmpACM = AudioCodingModule::Create(0);
- uint8_t noCodec = AudioCodingModule::NumberOfCodecs();
- int8_t codecID;
- bool outOfRange = false;
- char myStr[15] = "";
- do {
- printf("\nChoose a codec [0]: ");
- EXPECT_TRUE(fgets(myStr, 10, stdin) != NULL);
- codecID = atoi(myStr);
- if ((codecID < 0) || (codecID >= noCodec)) {
- printf("\nOut of range.\n");
- outOfRange = true;
- }
- } while (outOfRange);
-
- CHECK_ERROR(AudioCodingModule::Codec((uint8_t )codecID, &codecInst));
- return 0;
-}
-
-void PrintCodecs() {
- uint8_t noCodec = AudioCodingModule::NumberOfCodecs();
-
- CodecInst codecInst;
- printf("No Name [Hz] [bps]\n");
- for (uint8_t codecCntr = 0; codecCntr < noCodec; codecCntr++) {
- AudioCodingModule::Codec(codecCntr, &codecInst);
- printf("%2d- %-18s %5d %6d\n", codecCntr, codecInst.plname,
- codecInst.plfreq, codecInst.rate);
- }
-
-}
-
-CircularBuffer::CircularBuffer(uint32_t len)
- : _buff(NULL),
- _idx(0),
- _buffIsFull(false),
- _calcAvg(false),
- _calcVar(false),
- _sum(0),
- _sumSqr(0) {
- _buff = new double[len];
- if (_buff == NULL) {
- _buffLen = 0;
- } else {
- for (uint32_t n = 0; n < len; n++) {
- _buff[n] = 0;
- }
- _buffLen = len;
- }
-}
-
-CircularBuffer::~CircularBuffer() {
- if (_buff != NULL) {
- delete[] _buff;
- _buff = NULL;
- }
-}
-
-void CircularBuffer::Update(const double newVal) {
- assert(_buffLen > 0);
-
- // store the value that is going to be overwritten
- double oldVal = _buff[_idx];
- // record the new value
- _buff[_idx] = newVal;
- // increment the index, to point to where we would
- // write next
- _idx++;
- // it is a circular buffer, if we are at the end
- // we have to cycle to the beginning
- if (_idx >= _buffLen) {
- // flag that the buffer is filled up.
- _buffIsFull = true;
- _idx = 0;
- }
-
- // Update
-
- if (_calcAvg) {
- // for the average we have to update
- // the sum
- _sum += (newVal - oldVal);
- }
-
- if (_calcVar) {
- // to calculate variance we have to update
- // the sum of squares
- _sumSqr += (double) (newVal - oldVal) * (double) (newVal + oldVal);
- }
-}
-
-void CircularBuffer::SetArithMean(bool enable) {
- assert(_buffLen > 0);
-
- if (enable && !_calcAvg) {
- uint32_t lim;
- if (_buffIsFull) {
- lim = _buffLen;
- } else {
- lim = _idx;
- }
- _sum = 0;
- for (uint32_t n = 0; n < lim; n++) {
- _sum += _buff[n];
- }
- }
- _calcAvg = enable;
-}
-
-void CircularBuffer::SetVariance(bool enable) {
- assert(_buffLen > 0);
-
- if (enable && !_calcVar) {
- uint32_t lim;
- if (_buffIsFull) {
- lim = _buffLen;
- } else {
- lim = _idx;
- }
- _sumSqr = 0;
- for (uint32_t n = 0; n < lim; n++) {
- _sumSqr += _buff[n] * _buff[n];
- }
- }
- _calcAvg = enable;
-}
-
-int16_t CircularBuffer::ArithMean(double& mean) {
- assert(_buffLen > 0);
-
- if (_buffIsFull) {
-
- mean = _sum / (double) _buffLen;
- return 0;
- } else {
- if (_idx > 0) {
- mean = _sum / (double) _idx;
- return 0;
- } else {
- return -1;
- }
-
- }
-}
-
-int16_t CircularBuffer::Variance(double& var) {
- assert(_buffLen > 0);
-
- if (_buffIsFull) {
- var = _sumSqr / (double) _buffLen;
- return 0;
- } else {
- if (_idx > 0) {
- var = _sumSqr / (double) _idx;
- return 0;
- } else {
- return -1;
- }
- }
-}
-
-bool FixedPayloadTypeCodec(const char* payloadName) {
- char fixPayloadTypeCodecs[NUM_CODECS_WITH_FIXED_PAYLOAD_TYPE][32] = { "PCMU",
- "PCMA", "GSM", "G723", "DVI4", "LPC", "PCMA", "G722", "QCELP", "CN",
- "MPA", "G728", "G729" };
-
- for (int n = 0; n < NUM_CODECS_WITH_FIXED_PAYLOAD_TYPE; n++) {
- if (!STR_CASE_CMP(payloadName, fixPayloadTypeCodecs[n])) {
- return true;
- }
- }
- return false;
-}
-
-void VADCallback::Reset() {
- memset(_numFrameTypes, 0, sizeof(_numFrameTypes));
-}
-
-VADCallback::VADCallback() {
- memset(_numFrameTypes, 0, sizeof(_numFrameTypes));
-}
-
-void VADCallback::PrintFrameTypes() {
- printf("kEmptyFrame......... %d\n", _numFrameTypes[kEmptyFrame]);
- printf("kAudioFrameSpeech... %d\n", _numFrameTypes[kAudioFrameSpeech]);
- printf("kAudioFrameCN....... %d\n", _numFrameTypes[kAudioFrameCN]);
- printf("kVideoFrameKey...... %d\n", _numFrameTypes[kVideoFrameKey]);
- printf("kVideoFrameDelta.... %d\n", _numFrameTypes[kVideoFrameDelta]);
-}
-
-int32_t VADCallback::InFrameType(FrameType frame_type) {
- _numFrameTypes[frame_type]++;
- return 0;
-}
-
-} // namespace webrtc
diff --git a/webrtc/modules/audio_coding/main/test/utility.h b/webrtc/modules/audio_coding/main/test/utility.h
deleted file mode 100644
index e936ec1cdd..0000000000
--- a/webrtc/modules/audio_coding/main/test/utility.h
+++ /dev/null
@@ -1,139 +0,0 @@
-/*
- * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_MODULES_AUDIO_CODING_MAIN_TEST_UTILITY_H_
-#define WEBRTC_MODULES_AUDIO_CODING_MAIN_TEST_UTILITY_H_
-
-#include "testing/gtest/include/gtest/gtest.h"
-#include "webrtc/modules/audio_coding/main/include/audio_coding_module.h"
-
-namespace webrtc {
-
-//-----------------------------
-#define CHECK_ERROR(f) \
- do { \
- EXPECT_GE(f, 0) << "Error Calling API"; \
- } while(0)
-
-//-----------------------------
-#define CHECK_PROTECTED(f) \
- do { \
- if (f >= 0) { \
- ADD_FAILURE() << "Error Calling API"; \
- } else { \
- printf("An expected error is caught.\n"); \
- } \
- } while(0)
-
-//----------------------------
-#define CHECK_ERROR_MT(f) \
- do { \
- if (f < 0) { \
- fprintf(stderr, "Error Calling API in file %s at line %d \n", \
- __FILE__, __LINE__); \
- } \
- } while(0)
-
-//----------------------------
-#define CHECK_PROTECTED_MT(f) \
- do { \
- if (f >= 0) { \
- fprintf(stderr, "Error Calling API in file %s at line %d \n", \
- __FILE__, __LINE__); \
- } else { \
- printf("An expected error is caught.\n"); \
- } \
- } while(0)
-
-#define DELETE_POINTER(p) \
- do { \
- if (p != NULL) { \
- delete p; \
- p = NULL; \
- } \
- } while(0)
-
-class ACMTestTimer {
- public:
- ACMTestTimer();
- ~ACMTestTimer();
-
- void Reset();
- void Tick10ms();
- void Tick1ms();
- void Tick100ms();
- void Tick1sec();
- void CurrentTimeHMS(char* currTime);
- void CurrentTime(unsigned long& h, unsigned char& m, unsigned char& s,
- unsigned short& ms);
-
- private:
- void Adjust();
-
- unsigned short _msec;
- unsigned char _sec;
- unsigned char _min;
- unsigned long _hour;
-};
-
-class CircularBuffer {
- public:
- CircularBuffer(uint32_t len);
- ~CircularBuffer();
-
- void SetArithMean(bool enable);
- void SetVariance(bool enable);
-
- void Update(const double newVal);
- void IsBufferFull();
-
- int16_t Variance(double& var);
- int16_t ArithMean(double& mean);
-
- protected:
- double* _buff;
- uint32_t _idx;
- uint32_t _buffLen;
-
- bool _buffIsFull;
- bool _calcAvg;
- bool _calcVar;
- double _sum;
- double _sumSqr;
-};
-
-int16_t ChooseCodec(CodecInst& codecInst);
-
-void PrintCodecs();
-
-bool FixedPayloadTypeCodec(const char* payloadName);
-
-class VADCallback : public ACMVADCallback {
- public:
- VADCallback();
- ~VADCallback() {
- }
-
- int32_t InFrameType(FrameType frame_type);
-
- void PrintFrameTypes();
- void Reset();
-
- private:
- uint32_t _numFrameTypes[5];
-};
-
-void UseLegacyAcm(webrtc::Config* config);
-
-void UseNewAcm(webrtc::Config* config);
-
-} // namespace webrtc
-
-#endif // WEBRTC_MODULES_AUDIO_CODING_MAIN_TEST_UTILITY_H_
diff --git a/webrtc/modules/audio_coding/neteq/audio_decoder_impl.cc b/webrtc/modules/audio_coding/neteq/audio_decoder_impl.cc
index a9ea44d6a6..d800cc7dbe 100644
--- a/webrtc/modules/audio_coding/neteq/audio_decoder_impl.cc
+++ b/webrtc/modules/audio_coding/neteq/audio_decoder_impl.cc
@@ -13,13 +13,13 @@
#include <assert.h>
#include "webrtc/base/checks.h"
-#include "webrtc/modules/audio_coding/codecs/cng/include/webrtc_cng.h"
-#include "webrtc/modules/audio_coding/codecs/g711/include/audio_decoder_pcm.h"
+#include "webrtc/modules/audio_coding/codecs/cng/webrtc_cng.h"
+#include "webrtc/modules/audio_coding/codecs/g711/audio_decoder_pcm.h"
#ifdef WEBRTC_CODEC_G722
-#include "webrtc/modules/audio_coding/codecs/g722/include/audio_decoder_g722.h"
+#include "webrtc/modules/audio_coding/codecs/g722/audio_decoder_g722.h"
#endif
#ifdef WEBRTC_CODEC_ILBC
-#include "webrtc/modules/audio_coding/codecs/ilbc/include/audio_decoder_ilbc.h"
+#include "webrtc/modules/audio_coding/codecs/ilbc/audio_decoder_ilbc.h"
#endif
#ifdef WEBRTC_CODEC_ISACFX
#include "webrtc/modules/audio_coding/codecs/isac/fix/include/audio_decoder_isacfix.h"
@@ -30,9 +30,9 @@
#include "webrtc/modules/audio_coding/codecs/isac/main/include/audio_encoder_isac.h"
#endif
#ifdef WEBRTC_CODEC_OPUS
-#include "webrtc/modules/audio_coding/codecs/opus/include/audio_decoder_opus.h"
+#include "webrtc/modules/audio_coding/codecs/opus/audio_decoder_opus.h"
#endif
-#include "webrtc/modules/audio_coding/codecs/pcm16b/include/audio_decoder_pcm16b.h"
+#include "webrtc/modules/audio_coding/codecs/pcm16b/audio_decoder_pcm16b.h"
namespace webrtc {
diff --git a/webrtc/modules/audio_coding/neteq/audio_decoder_impl.h b/webrtc/modules/audio_coding/neteq/audio_decoder_impl.h
index 3229033d92..bc8bdd9626 100644
--- a/webrtc/modules/audio_coding/neteq/audio_decoder_impl.h
+++ b/webrtc/modules/audio_coding/neteq/audio_decoder_impl.h
@@ -16,11 +16,11 @@
#include "webrtc/engine_configurations.h"
#include "webrtc/base/constructormagic.h"
#include "webrtc/modules/audio_coding/codecs/audio_decoder.h"
-#include "webrtc/modules/audio_coding/codecs/cng/include/webrtc_cng.h"
+#include "webrtc/modules/audio_coding/codecs/cng/webrtc_cng.h"
#ifdef WEBRTC_CODEC_G722
-#include "webrtc/modules/audio_coding/codecs/g722/include/g722_interface.h"
+#include "webrtc/modules/audio_coding/codecs/g722/g722_interface.h"
#endif
-#include "webrtc/modules/audio_coding/main/acm2/rent_a_codec.h"
+#include "webrtc/modules/audio_coding/acm2/rent_a_codec.h"
#include "webrtc/typedefs.h"
namespace webrtc {
diff --git a/webrtc/modules/audio_coding/neteq/audio_decoder_unittest.cc b/webrtc/modules/audio_coding/neteq/audio_decoder_unittest.cc
index 8f82fb11a4..599929e78d 100644
--- a/webrtc/modules/audio_coding/neteq/audio_decoder_unittest.cc
+++ b/webrtc/modules/audio_coding/neteq/audio_decoder_unittest.cc
@@ -18,20 +18,20 @@
#include "testing/gtest/include/gtest/gtest.h"
#include "webrtc/base/scoped_ptr.h"
-#include "webrtc/modules/audio_coding/codecs/g711/include/audio_decoder_pcm.h"
-#include "webrtc/modules/audio_coding/codecs/g711/include/audio_encoder_pcm.h"
-#include "webrtc/modules/audio_coding/codecs/g722/include/audio_decoder_g722.h"
-#include "webrtc/modules/audio_coding/codecs/g722/include/audio_encoder_g722.h"
-#include "webrtc/modules/audio_coding/codecs/ilbc/include/audio_decoder_ilbc.h"
-#include "webrtc/modules/audio_coding/codecs/ilbc/include/audio_encoder_ilbc.h"
+#include "webrtc/modules/audio_coding/codecs/g711/audio_decoder_pcm.h"
+#include "webrtc/modules/audio_coding/codecs/g711/audio_encoder_pcm.h"
+#include "webrtc/modules/audio_coding/codecs/g722/audio_decoder_g722.h"
+#include "webrtc/modules/audio_coding/codecs/g722/audio_encoder_g722.h"
+#include "webrtc/modules/audio_coding/codecs/ilbc/audio_decoder_ilbc.h"
+#include "webrtc/modules/audio_coding/codecs/ilbc/audio_encoder_ilbc.h"
#include "webrtc/modules/audio_coding/codecs/isac/fix/include/audio_decoder_isacfix.h"
#include "webrtc/modules/audio_coding/codecs/isac/fix/include/audio_encoder_isacfix.h"
#include "webrtc/modules/audio_coding/codecs/isac/main/include/audio_decoder_isac.h"
#include "webrtc/modules/audio_coding/codecs/isac/main/include/audio_encoder_isac.h"
-#include "webrtc/modules/audio_coding/codecs/opus/include/audio_decoder_opus.h"
-#include "webrtc/modules/audio_coding/codecs/opus/include/audio_encoder_opus.h"
-#include "webrtc/modules/audio_coding/codecs/pcm16b/include/audio_decoder_pcm16b.h"
-#include "webrtc/modules/audio_coding/codecs/pcm16b/include/audio_encoder_pcm16b.h"
+#include "webrtc/modules/audio_coding/codecs/opus/audio_decoder_opus.h"
+#include "webrtc/modules/audio_coding/codecs/opus/audio_encoder_opus.h"
+#include "webrtc/modules/audio_coding/codecs/pcm16b/audio_decoder_pcm16b.h"
+#include "webrtc/modules/audio_coding/codecs/pcm16b/audio_encoder_pcm16b.h"
#include "webrtc/modules/audio_coding/neteq/tools/resample_input_audio_file.h"
#include "webrtc/system_wrappers/include/data_log.h"
#include "webrtc/test/testsupport/fileutils.h"
@@ -158,7 +158,10 @@ class AudioDecoderTest : public ::testing::Test {
interleaved_input.get());
encoded_info_ = audio_encoder_->Encode(
- 0, interleaved_input.get(), audio_encoder_->SampleRateHz() / 100,
+ 0, rtc::ArrayView<const int16_t>(interleaved_input.get(),
+ audio_encoder_->NumChannels() *
+ audio_encoder_->SampleRateHz() /
+ 100),
data_length_ * 2, output);
}
EXPECT_EQ(payload_type_, encoded_info_.payload_type);
@@ -563,18 +566,14 @@ TEST_F(AudioDecoderIsacSwbTest, SetTargetBitrate) {
TestSetAndGetTargetBitratesWithFixedCodec(audio_encoder_.get(), 32000);
}
-// Fails Android ARM64. https://code.google.com/p/webrtc/issues/detail?id=4198
-#if defined(WEBRTC_ANDROID) && defined(WEBRTC_ARCH_ARM64)
-#define MAYBE_EncodeDecode DISABLED_EncodeDecode
-#else
-#define MAYBE_EncodeDecode EncodeDecode
-#endif
-TEST_F(AudioDecoderIsacFixTest, MAYBE_EncodeDecode) {
+TEST_F(AudioDecoderIsacFixTest, EncodeDecode) {
int tolerance = 11034;
double mse = 3.46e6;
int delay = 54; // Delay from input to output.
-#ifdef WEBRTC_ANDROID
+#if defined(WEBRTC_ANDROID) && defined(WEBRTC_ARCH_ARM)
static const int kEncodedBytes = 685;
+#elif defined(WEBRTC_ANDROID) && defined(WEBRTC_ARCH_ARM64)
+ static const int kEncodedBytes = 673;
#else
static const int kEncodedBytes = 671;
#endif
diff --git a/webrtc/modules/audio_coding/neteq/comfort_noise.cc b/webrtc/modules/audio_coding/neteq/comfort_noise.cc
index 3fe6607778..a5b08469be 100644
--- a/webrtc/modules/audio_coding/neteq/comfort_noise.cc
+++ b/webrtc/modules/audio_coding/neteq/comfort_noise.cc
@@ -14,7 +14,7 @@
#include "webrtc/base/logging.h"
#include "webrtc/modules/audio_coding/codecs/audio_decoder.h"
-#include "webrtc/modules/audio_coding/codecs/cng/include/webrtc_cng.h"
+#include "webrtc/modules/audio_coding/codecs/cng/webrtc_cng.h"
#include "webrtc/modules/audio_coding/neteq/decoder_database.h"
#include "webrtc/modules/audio_coding/neteq/dsp_helper.h"
#include "webrtc/modules/audio_coding/neteq/sync_buffer.h"
diff --git a/webrtc/modules/audio_coding/neteq/decision_logic.cc b/webrtc/modules/audio_coding/neteq/decision_logic.cc
index 14e0426d7d..39bb4662c7 100644
--- a/webrtc/modules/audio_coding/neteq/decision_logic.cc
+++ b/webrtc/modules/audio_coding/neteq/decision_logic.cc
@@ -128,9 +128,6 @@ Operations DecisionLogic::GetDecision(const SyncBuffer& sync_buffer,
const size_t cur_size_samples =
samples_left + packet_buffer_.NumSamplesInBuffer(decoder_database_,
decoder_frame_length);
- LOG(LS_VERBOSE) << "Buffers: " << packet_buffer_.NumPacketsInBuffer() <<
- " packets * " << decoder_frame_length << " samples/packet + " <<
- samples_left << " samples in sync buffer = " << cur_size_samples;
prev_time_scale_ = prev_time_scale_ &&
(prev_mode == kModeAccelerateSuccess ||
diff --git a/webrtc/modules/audio_coding/neteq/decision_logic_normal.cc b/webrtc/modules/audio_coding/neteq/decision_logic_normal.cc
index d3f6fa6dd4..0252d1cdfa 100644
--- a/webrtc/modules/audio_coding/neteq/decision_logic_normal.cc
+++ b/webrtc/modules/audio_coding/neteq/decision_logic_normal.cc
@@ -20,7 +20,7 @@
#include "webrtc/modules/audio_coding/neteq/expand.h"
#include "webrtc/modules/audio_coding/neteq/packet_buffer.h"
#include "webrtc/modules/audio_coding/neteq/sync_buffer.h"
-#include "webrtc/modules/interface/module_common_types.h"
+#include "webrtc/modules/include/module_common_types.h"
namespace webrtc {
diff --git a/webrtc/modules/audio_coding/neteq/decoder_database.cc b/webrtc/modules/audio_coding/neteq/decoder_database.cc
index 41803f754a..92d4bab1e4 100644
--- a/webrtc/modules/audio_coding/neteq/decoder_database.cc
+++ b/webrtc/modules/audio_coding/neteq/decoder_database.cc
@@ -13,6 +13,7 @@
#include <assert.h>
#include <utility> // pair
+#include "webrtc/base/checks.h"
#include "webrtc/base/logging.h"
#include "webrtc/modules/audio_coding/codecs/audio_decoder.h"
@@ -38,17 +39,17 @@ void DecoderDatabase::Reset() {
}
int DecoderDatabase::RegisterPayload(uint8_t rtp_payload_type,
- NetEqDecoder codec_type) {
+ NetEqDecoder codec_type,
+ const std::string& name) {
if (rtp_payload_type > 0x7F) {
return kInvalidRtpPayloadType;
}
if (!CodecSupported(codec_type)) {
return kCodecNotSupported;
}
- int fs_hz = CodecSampleRateHz(codec_type);
- std::pair<DecoderMap::iterator, bool> ret;
- DecoderInfo info(codec_type, fs_hz, NULL, false);
- ret = decoders_.insert(std::make_pair(rtp_payload_type, info));
+ const int fs_hz = CodecSampleRateHz(codec_type);
+ DecoderInfo info(codec_type, name, fs_hz, NULL, false);
+ auto ret = decoders_.insert(std::make_pair(rtp_payload_type, info));
if (ret.second == false) {
// Database already contains a decoder with type |rtp_payload_type|.
return kDecoderExists;
@@ -58,6 +59,7 @@ int DecoderDatabase::RegisterPayload(uint8_t rtp_payload_type,
int DecoderDatabase::InsertExternal(uint8_t rtp_payload_type,
NetEqDecoder codec_type,
+ const std::string& codec_name,
int fs_hz,
AudioDecoder* decoder) {
if (rtp_payload_type > 0x7F) {
@@ -73,7 +75,7 @@ int DecoderDatabase::InsertExternal(uint8_t rtp_payload_type,
return kInvalidPointer;
}
std::pair<DecoderMap::iterator, bool> ret;
- DecoderInfo info(codec_type, fs_hz, decoder, true);
+ DecoderInfo info(codec_type, codec_name, fs_hz, decoder, true);
ret = decoders_.insert(std::make_pair(rtp_payload_type, info));
if (ret.second == false) {
// Database already contains a decoder with type |rtp_payload_type|.
diff --git a/webrtc/modules/audio_coding/neteq/decoder_database.h b/webrtc/modules/audio_coding/neteq/decoder_database.h
index ea70997c14..f34904fda8 100644
--- a/webrtc/modules/audio_coding/neteq/decoder_database.h
+++ b/webrtc/modules/audio_coding/neteq/decoder_database.h
@@ -12,8 +12,10 @@
#define WEBRTC_MODULES_AUDIO_CODING_NETEQ_DECODER_DATABASE_H_
#include <map>
+#include <string>
#include "webrtc/base/constructormagic.h"
+#include "webrtc/base/scoped_ptr.h"
#include "webrtc/common_types.h" // NULL
#include "webrtc/modules/audio_coding/neteq/audio_decoder_impl.h"
#include "webrtc/modules/audio_coding/neteq/packet.h"
@@ -35,25 +37,28 @@ class DecoderDatabase {
// Struct used to store decoder info in the database.
struct DecoderInfo {
- // Constructors.
- DecoderInfo()
- : codec_type(NetEqDecoder::kDecoderArbitrary),
- fs_hz(8000),
- decoder(NULL),
- external(false) {}
+ DecoderInfo() = default;
DecoderInfo(NetEqDecoder ct, int fs, AudioDecoder* dec, bool ext)
+ : DecoderInfo(ct, "", fs, dec, ext) {}
+ DecoderInfo(NetEqDecoder ct,
+ const std::string& nm,
+ int fs,
+ AudioDecoder* dec,
+ bool ext)
: codec_type(ct),
+ name(nm),
fs_hz(fs),
+ rtp_sample_rate_hz(fs),
decoder(dec),
- external(ext) {
- }
- // Destructor. (Defined in decoder_database.cc.)
+ external(ext) {}
~DecoderInfo();
- NetEqDecoder codec_type;
- int fs_hz;
- AudioDecoder* decoder;
- bool external;
+ NetEqDecoder codec_type = NetEqDecoder::kDecoderArbitrary;
+ std::string name;
+ int fs_hz = 8000;
+ int rtp_sample_rate_hz = 8000;
+ AudioDecoder* decoder = nullptr;
+ bool external = false;
};
// Maximum value for 8 bits, and an invalid RTP payload type (since it is
@@ -75,16 +80,21 @@ class DecoderDatabase {
// using InsertExternal().
virtual void Reset();
- // Registers |rtp_payload_type| as a decoder of type |codec_type|. Returns
- // kOK on success; otherwise an error code.
+ // Registers |rtp_payload_type| as a decoder of type |codec_type|. The |name|
+ // is only used to populate the name field in the DecoderInfo struct in the
+ // database, and can be arbitrary (including empty). Returns kOK on success;
+ // otherwise an error code.
virtual int RegisterPayload(uint8_t rtp_payload_type,
- NetEqDecoder codec_type);
+ NetEqDecoder codec_type,
+ const std::string& name);
// Registers an externally created AudioDecoder object, and associates it
// as a decoder of type |codec_type| with |rtp_payload_type|.
virtual int InsertExternal(uint8_t rtp_payload_type,
NetEqDecoder codec_type,
- int fs_hz, AudioDecoder* decoder);
+ const std::string& codec_name,
+ int fs_hz,
+ AudioDecoder* decoder);
// Removes the entry for |rtp_payload_type| from the database.
// Returns kDecoderNotFound or kOK depending on the outcome of the operation.
diff --git a/webrtc/modules/audio_coding/neteq/decoder_database_unittest.cc b/webrtc/modules/audio_coding/neteq/decoder_database_unittest.cc
index e85d8d32fb..85aaef1143 100644
--- a/webrtc/modules/audio_coding/neteq/decoder_database_unittest.cc
+++ b/webrtc/modules/audio_coding/neteq/decoder_database_unittest.cc
@@ -19,7 +19,6 @@
#include "testing/gtest/include/gtest/gtest.h"
#include "webrtc/modules/audio_coding/neteq/mock/mock_audio_decoder.h"
-#include "webrtc/test/testsupport/gtest_disable.h"
namespace webrtc {
@@ -32,8 +31,10 @@ TEST(DecoderDatabase, CreateAndDestroy) {
TEST(DecoderDatabase, InsertAndRemove) {
DecoderDatabase db;
const uint8_t kPayloadType = 0;
- EXPECT_EQ(DecoderDatabase::kOK,
- db.RegisterPayload(kPayloadType, NetEqDecoder::kDecoderPCMu));
+ const std::string kCodecName = "Robert\'); DROP TABLE Students;";
+ EXPECT_EQ(
+ DecoderDatabase::kOK,
+ db.RegisterPayload(kPayloadType, NetEqDecoder::kDecoderPCMu, kCodecName));
EXPECT_EQ(1, db.Size());
EXPECT_FALSE(db.Empty());
EXPECT_EQ(DecoderDatabase::kOK, db.Remove(kPayloadType));
@@ -44,14 +45,17 @@ TEST(DecoderDatabase, InsertAndRemove) {
TEST(DecoderDatabase, GetDecoderInfo) {
DecoderDatabase db;
const uint8_t kPayloadType = 0;
- EXPECT_EQ(DecoderDatabase::kOK,
- db.RegisterPayload(kPayloadType, NetEqDecoder::kDecoderPCMu));
+ const std::string kCodecName = "Robert\'); DROP TABLE Students;";
+ EXPECT_EQ(
+ DecoderDatabase::kOK,
+ db.RegisterPayload(kPayloadType, NetEqDecoder::kDecoderPCMu, kCodecName));
const DecoderDatabase::DecoderInfo* info;
info = db.GetDecoderInfo(kPayloadType);
ASSERT_TRUE(info != NULL);
EXPECT_EQ(NetEqDecoder::kDecoderPCMu, info->codec_type);
EXPECT_EQ(NULL, info->decoder);
EXPECT_EQ(8000, info->fs_hz);
+ EXPECT_EQ(kCodecName, info->name);
EXPECT_FALSE(info->external);
info = db.GetDecoderInfo(kPayloadType + 1); // Other payload type.
EXPECT_TRUE(info == NULL); // Should not be found.
@@ -60,8 +64,10 @@ TEST(DecoderDatabase, GetDecoderInfo) {
TEST(DecoderDatabase, GetRtpPayloadType) {
DecoderDatabase db;
const uint8_t kPayloadType = 0;
- EXPECT_EQ(DecoderDatabase::kOK,
- db.RegisterPayload(kPayloadType, NetEqDecoder::kDecoderPCMu));
+ const std::string kCodecName = "Robert\'); DROP TABLE Students;";
+ EXPECT_EQ(
+ DecoderDatabase::kOK,
+ db.RegisterPayload(kPayloadType, NetEqDecoder::kDecoderPCMu, kCodecName));
EXPECT_EQ(kPayloadType, db.GetRtpPayloadType(NetEqDecoder::kDecoderPCMu));
const uint8_t expected_value = DecoderDatabase::kRtpPayloadTypeError;
EXPECT_EQ(expected_value,
@@ -72,8 +78,10 @@ TEST(DecoderDatabase, GetRtpPayloadType) {
TEST(DecoderDatabase, GetDecoder) {
DecoderDatabase db;
const uint8_t kPayloadType = 0;
+ const std::string kCodecName = "Robert\'); DROP TABLE Students;";
EXPECT_EQ(DecoderDatabase::kOK,
- db.RegisterPayload(kPayloadType, NetEqDecoder::kDecoderPCM16B));
+ db.RegisterPayload(kPayloadType, NetEqDecoder::kDecoderPCM16B,
+ kCodecName));
AudioDecoder* dec = db.GetDecoder(kPayloadType);
ASSERT_TRUE(dec != NULL);
}
@@ -86,14 +94,18 @@ TEST(DecoderDatabase, TypeTests) {
const uint8_t kPayloadTypeRed = 101;
const uint8_t kPayloadNotUsed = 102;
// Load into database.
+ EXPECT_EQ(
+ DecoderDatabase::kOK,
+ db.RegisterPayload(kPayloadTypePcmU, NetEqDecoder::kDecoderPCMu, "pcmu"));
EXPECT_EQ(DecoderDatabase::kOK,
- db.RegisterPayload(kPayloadTypePcmU, NetEqDecoder::kDecoderPCMu));
- EXPECT_EQ(DecoderDatabase::kOK,
- db.RegisterPayload(kPayloadTypeCng, NetEqDecoder::kDecoderCNGnb));
- EXPECT_EQ(DecoderDatabase::kOK,
- db.RegisterPayload(kPayloadTypeDtmf, NetEqDecoder::kDecoderAVT));
- EXPECT_EQ(DecoderDatabase::kOK,
- db.RegisterPayload(kPayloadTypeRed, NetEqDecoder::kDecoderRED));
+ db.RegisterPayload(kPayloadTypeCng, NetEqDecoder::kDecoderCNGnb,
+ "cng-nb"));
+ EXPECT_EQ(
+ DecoderDatabase::kOK,
+ db.RegisterPayload(kPayloadTypeDtmf, NetEqDecoder::kDecoderAVT, "avt"));
+ EXPECT_EQ(
+ DecoderDatabase::kOK,
+ db.RegisterPayload(kPayloadTypeRed, NetEqDecoder::kDecoderRED, "red"));
EXPECT_EQ(4, db.Size());
// Test.
EXPECT_FALSE(db.IsComfortNoise(kPayloadNotUsed));
@@ -112,11 +124,12 @@ TEST(DecoderDatabase, TypeTests) {
TEST(DecoderDatabase, ExternalDecoder) {
DecoderDatabase db;
const uint8_t kPayloadType = 0;
+ const std::string kCodecName = "Robert\'); DROP TABLE Students;";
MockAudioDecoder decoder;
// Load into database.
EXPECT_EQ(DecoderDatabase::kOK,
- db.InsertExternal(kPayloadType, NetEqDecoder::kDecoderPCMu, 8000,
- &decoder));
+ db.InsertExternal(kPayloadType, NetEqDecoder::kDecoderPCMu,
+ kCodecName, 8000, &decoder));
EXPECT_EQ(1, db.Size());
// Get decoder and make sure we get the external one.
EXPECT_EQ(&decoder, db.GetDecoder(kPayloadType));
@@ -125,6 +138,7 @@ TEST(DecoderDatabase, ExternalDecoder) {
info = db.GetDecoderInfo(kPayloadType);
ASSERT_TRUE(info != NULL);
EXPECT_EQ(NetEqDecoder::kDecoderPCMu, info->codec_type);
+ EXPECT_EQ(kCodecName, info->name);
EXPECT_EQ(&decoder, info->decoder);
EXPECT_EQ(8000, info->fs_hz);
EXPECT_TRUE(info->external);
@@ -146,7 +160,7 @@ TEST(DecoderDatabase, CheckPayloadTypes) {
for (uint8_t payload_type = 0; payload_type < kNumPayloads; ++payload_type) {
EXPECT_EQ(
DecoderDatabase::kOK,
- db.RegisterPayload(payload_type, NetEqDecoder::kDecoderArbitrary));
+ db.RegisterPayload(payload_type, NetEqDecoder::kDecoderArbitrary, ""));
}
PacketList packet_list;
for (int i = 0; i < kNumPayloads + 1; ++i) {
@@ -185,11 +199,11 @@ TEST(DecoderDatabase, IF_ISAC(ActiveDecoders)) {
DecoderDatabase db;
// Load payload types.
ASSERT_EQ(DecoderDatabase::kOK,
- db.RegisterPayload(0, NetEqDecoder::kDecoderPCMu));
+ db.RegisterPayload(0, NetEqDecoder::kDecoderPCMu, "pcmu"));
ASSERT_EQ(DecoderDatabase::kOK,
- db.RegisterPayload(103, NetEqDecoder::kDecoderISAC));
+ db.RegisterPayload(103, NetEqDecoder::kDecoderISAC, "isac"));
ASSERT_EQ(DecoderDatabase::kOK,
- db.RegisterPayload(13, NetEqDecoder::kDecoderCNGnb));
+ db.RegisterPayload(13, NetEqDecoder::kDecoderCNGnb, "cng-nb"));
// Verify that no decoders are active from the start.
EXPECT_EQ(NULL, db.GetActiveDecoder());
EXPECT_EQ(NULL, db.GetActiveCngDecoder());
diff --git a/webrtc/modules/audio_coding/neteq/delay_manager.cc b/webrtc/modules/audio_coding/neteq/delay_manager.cc
index 5140c0620f..806d02b8de 100644
--- a/webrtc/modules/audio_coding/neteq/delay_manager.cc
+++ b/webrtc/modules/audio_coding/neteq/delay_manager.cc
@@ -17,7 +17,7 @@
#include "webrtc/common_audio/signal_processing/include/signal_processing_library.h"
#include "webrtc/modules/audio_coding/neteq/delay_peak_detector.h"
-#include "webrtc/modules/interface/module_common_types.h"
+#include "webrtc/modules/include/module_common_types.h"
#include "webrtc/system_wrappers/include/logging.h"
namespace webrtc {
diff --git a/webrtc/modules/audio_coding/neteq/expand.cc b/webrtc/modules/audio_coding/neteq/expand.cc
index 2aa9fb0a8d..ef7af46597 100644
--- a/webrtc/modules/audio_coding/neteq/expand.cc
+++ b/webrtc/modules/audio_coding/neteq/expand.cc
@@ -519,7 +519,7 @@ void Expand::AnalyzeSignal(int16_t* random_vector) {
energy2 = WebRtcSpl_DotProductWithScale(vector2, vector2, expansion_length,
correlation_scale);
// Confirm that amplitude ratio sqrt(energy1 / energy2) is within 0.5 - 2.0,
- // i.e., energy1 / energy1 is within 0.25 - 4.
+ // i.e., energy1 / energy2 is within 0.25 - 4.
int16_t amplitude_ratio;
if ((energy1 / 4 < energy2) && (energy1 > energy2 / 4)) {
// Energy constraint fulfilled. Use both vectors and scale them
diff --git a/webrtc/modules/audio_coding/neteq/include/neteq.h b/webrtc/modules/audio_coding/neteq/include/neteq.h
index 205a0dfe80..1322223970 100644
--- a/webrtc/modules/audio_coding/neteq/include/neteq.h
+++ b/webrtc/modules/audio_coding/neteq/include/neteq.h
@@ -81,6 +81,7 @@ class NetEq {
Config()
: sample_rate_hz(16000),
enable_audio_classifier(false),
+ enable_post_decode_vad(false),
max_packets_in_buffer(50),
// |max_delay_ms| has the same effect as calling SetMaximumDelay().
max_delay_ms(2000),
@@ -92,6 +93,7 @@ class NetEq {
int sample_rate_hz; // Initial value. Will change with input data.
bool enable_audio_classifier;
+ bool enable_post_decode_vad;
size_t max_packets_in_buffer;
int max_delay_ms;
BackgroundNoiseMode background_noise_mode;
@@ -145,8 +147,7 @@ class NetEq {
// the same tick rate as the RTP timestamp of the current payload.
// Returns 0 on success, -1 on failure.
virtual int InsertPacket(const WebRtcRTPHeader& rtp_header,
- const uint8_t* payload,
- size_t length_bytes,
+ rtc::ArrayView<const uint8_t> payload,
uint32_t receive_timestamp) = 0;
// Inserts a sync-packet into packet queue. Sync-packets are decoded to
@@ -170,20 +171,27 @@ class NetEq {
// The speech type is written to |type|, if |type| is not NULL.
// Returns kOK on success, or kFail in case of an error.
virtual int GetAudio(size_t max_length, int16_t* output_audio,
- size_t* samples_per_channel, int* num_channels,
+ size_t* samples_per_channel, size_t* num_channels,
NetEqOutputType* type) = 0;
- // Associates |rtp_payload_type| with |codec| and stores the information in
- // the codec database. Returns 0 on success, -1 on failure.
+ // Associates |rtp_payload_type| with |codec| and |codec_name|, and stores the
+ // information in the codec database. Returns 0 on success, -1 on failure.
+ // The name is only used to provide information back to the caller about the
+ // decoders. Hence, the name is arbitrary, and may be empty.
virtual int RegisterPayloadType(NetEqDecoder codec,
+ const std::string& codec_name,
uint8_t rtp_payload_type) = 0;
// Provides an externally created decoder object |decoder| to insert in the
// decoder database. The decoder implements a decoder of type |codec| and
- // associates it with |rtp_payload_type|. The decoder will produce samples
- // at the rate |sample_rate_hz|. Returns kOK on success, kFail on failure.
+ // associates it with |rtp_payload_type| and |codec_name|. The decoder will
+ // produce samples at the rate |sample_rate_hz|. Returns kOK on success, kFail
+ // on failure.
+ // The name is only used to provide information back to the caller about the
+ // decoders. Hence, the name is arbitrary, and may be empty.
virtual int RegisterExternalDecoder(AudioDecoder* decoder,
NetEqDecoder codec,
+ const std::string& codec_name,
uint8_t rtp_payload_type,
int sample_rate_hz) = 0;
@@ -250,6 +258,11 @@ class NetEq {
// Returns true if the RTP timestamp is valid, otherwise false.
virtual bool GetPlayoutTimestamp(uint32_t* timestamp) = 0;
+ // Returns the sample rate in Hz of the audio produced in the last GetAudio
+ // call. If GetAudio has not been called yet, the configured sample rate
+ // (Config::sample_rate_hz) is returned.
+ virtual int last_output_sample_rate_hz() const = 0;
+
// Not implemented.
virtual int SetTargetNumberOfChannels() = 0;
diff --git a/webrtc/modules/audio_coding/neteq/mock/mock_audio_decoder.h b/webrtc/modules/audio_coding/neteq/mock/mock_audio_decoder.h
index 8debcbbb1e..c1cc09cb5e 100644
--- a/webrtc/modules/audio_coding/neteq/mock/mock_audio_decoder.h
+++ b/webrtc/modules/audio_coding/neteq/mock/mock_audio_decoder.h
@@ -22,9 +22,8 @@ class MockAudioDecoder : public AudioDecoder {
MockAudioDecoder() {}
virtual ~MockAudioDecoder() { Die(); }
MOCK_METHOD0(Die, void());
- MOCK_METHOD6(
- Decode,
- int(const uint8_t*, size_t, int, size_t, int16_t*, SpeechType*));
+ MOCK_METHOD5(DecodeInternal,
+ int(const uint8_t*, size_t, int, int16_t*, SpeechType*));
MOCK_CONST_METHOD0(HasDecodePlc, bool());
MOCK_METHOD2(DecodePlc, size_t(size_t, int16_t*));
MOCK_METHOD0(Reset, void());
diff --git a/webrtc/modules/audio_coding/neteq/mock/mock_decoder_database.h b/webrtc/modules/audio_coding/neteq/mock/mock_decoder_database.h
index d127c5d810..1b4a3c9da5 100644
--- a/webrtc/modules/audio_coding/neteq/mock/mock_decoder_database.h
+++ b/webrtc/modules/audio_coding/neteq/mock/mock_decoder_database.h
@@ -11,6 +11,8 @@
#ifndef WEBRTC_MODULES_AUDIO_CODING_NETEQ_MOCK_MOCK_DECODER_DATABASE_H_
#define WEBRTC_MODULES_AUDIO_CODING_NETEQ_MOCK_MOCK_DECODER_DATABASE_H_
+#include <string>
+
#include "webrtc/modules/audio_coding/neteq/decoder_database.h"
#include "testing/gmock/include/gmock/gmock.h"
@@ -27,10 +29,12 @@ class MockDecoderDatabase : public DecoderDatabase {
int());
MOCK_METHOD0(Reset,
void());
- MOCK_METHOD2(RegisterPayload,
- int(uint8_t rtp_payload_type, NetEqDecoder codec_type));
- MOCK_METHOD4(InsertExternal,
- int(uint8_t rtp_payload_type, NetEqDecoder codec_type, int fs_hz,
+ MOCK_METHOD3(RegisterPayload,
+ int(uint8_t rtp_payload_type, NetEqDecoder codec_type,
+ const std::string& name));
+ MOCK_METHOD5(InsertExternal,
+ int(uint8_t rtp_payload_type, NetEqDecoder codec_type,
+ const std::string& codec_name, int fs_hz,
AudioDecoder* decoder));
MOCK_METHOD1(Remove,
int(uint8_t rtp_payload_type));
diff --git a/webrtc/modules/audio_coding/neteq/mock/mock_external_decoder_pcm16b.h b/webrtc/modules/audio_coding/neteq/mock/mock_external_decoder_pcm16b.h
index 8cf89c083d..42c17ae054 100644
--- a/webrtc/modules/audio_coding/neteq/mock/mock_external_decoder_pcm16b.h
+++ b/webrtc/modules/audio_coding/neteq/mock/mock_external_decoder_pcm16b.h
@@ -15,7 +15,7 @@
#include "testing/gmock/include/gmock/gmock.h"
#include "webrtc/base/constructormagic.h"
-#include "webrtc/modules/audio_coding/codecs/pcm16b/include/pcm16b.h"
+#include "webrtc/modules/audio_coding/codecs/pcm16b/pcm16b.h"
#include "webrtc/typedefs.h"
namespace webrtc {
@@ -30,7 +30,6 @@ class ExternalPcm16B : public AudioDecoder {
ExternalPcm16B() {}
void Reset() override {}
- protected:
int DecodeInternal(const uint8_t* encoded,
size_t encoded_len,
int sample_rate_hz,
@@ -52,8 +51,8 @@ class MockExternalPcm16B : public ExternalPcm16B {
public:
MockExternalPcm16B() {
// By default, all calls are delegated to the real object.
- ON_CALL(*this, Decode(_, _, _, _, _, _))
- .WillByDefault(Invoke(&real_, &ExternalPcm16B::Decode));
+ ON_CALL(*this, DecodeInternal(_, _, _, _, _))
+ .WillByDefault(Invoke(&real_, &ExternalPcm16B::DecodeInternal));
ON_CALL(*this, HasDecodePlc())
.WillByDefault(Invoke(&real_, &ExternalPcm16B::HasDecodePlc));
ON_CALL(*this, DecodePlc(_, _))
@@ -68,11 +67,10 @@ class MockExternalPcm16B : public ExternalPcm16B {
virtual ~MockExternalPcm16B() { Die(); }
MOCK_METHOD0(Die, void());
- MOCK_METHOD6(Decode,
+ MOCK_METHOD5(DecodeInternal,
int(const uint8_t* encoded,
size_t encoded_len,
int sample_rate_hz,
- size_t max_decoded_bytes,
int16_t* decoded,
SpeechType* speech_type));
MOCK_CONST_METHOD0(HasDecodePlc,
diff --git a/webrtc/modules/audio_coding/neteq/nack.cc b/webrtc/modules/audio_coding/neteq/nack.cc
index fd3d762605..011914b3d9 100644
--- a/webrtc/modules/audio_coding/neteq/nack.cc
+++ b/webrtc/modules/audio_coding/neteq/nack.cc
@@ -15,7 +15,7 @@
#include <algorithm> // For std::max.
#include "webrtc/base/checks.h"
-#include "webrtc/modules/interface/module_common_types.h"
+#include "webrtc/modules/include/module_common_types.h"
#include "webrtc/system_wrappers/include/logging.h"
namespace webrtc {
diff --git a/webrtc/modules/audio_coding/neteq/nack.h b/webrtc/modules/audio_coding/neteq/nack.h
index 116b7e2192..17fef46464 100644
--- a/webrtc/modules/audio_coding/neteq/nack.h
+++ b/webrtc/modules/audio_coding/neteq/nack.h
@@ -15,7 +15,7 @@
#include <map>
#include "webrtc/base/scoped_ptr.h"
-#include "webrtc/modules/audio_coding/main/include/audio_coding_module_typedefs.h"
+#include "webrtc/modules/audio_coding/include/audio_coding_module_typedefs.h"
#include "webrtc/test/testsupport/gtest_prod_util.h"
//
diff --git a/webrtc/modules/audio_coding/neteq/nack_unittest.cc b/webrtc/modules/audio_coding/neteq/nack_unittest.cc
index 853af94ede..53b19dc50f 100644
--- a/webrtc/modules/audio_coding/neteq/nack_unittest.cc
+++ b/webrtc/modules/audio_coding/neteq/nack_unittest.cc
@@ -17,7 +17,7 @@
#include "testing/gtest/include/gtest/gtest.h"
#include "webrtc/base/scoped_ptr.h"
#include "webrtc/typedefs.h"
-#include "webrtc/modules/audio_coding/main/include/audio_coding_module_typedefs.h"
+#include "webrtc/modules/audio_coding/include/audio_coding_module_typedefs.h"
namespace webrtc {
namespace {
diff --git a/webrtc/modules/audio_coding/neteq/neteq.cc b/webrtc/modules/audio_coding/neteq/neteq.cc
index ca51c9602d..c31dbdc1a3 100644
--- a/webrtc/modules/audio_coding/neteq/neteq.cc
+++ b/webrtc/modules/audio_coding/neteq/neteq.cc
@@ -32,6 +32,8 @@ std::string NetEq::Config::ToString() const {
std::stringstream ss;
ss << "sample_rate_hz=" << sample_rate_hz << ", enable_audio_classifier="
<< (enable_audio_classifier ? "true" : "false")
+ << ", enable_post_decode_vad="
+ << (enable_post_decode_vad ? "true" : "false")
<< ", max_packets_in_buffer=" << max_packets_in_buffer
<< ", background_noise_mode=" << background_noise_mode
<< ", playout_mode=" << playout_mode
diff --git a/webrtc/modules/audio_coding/neteq/neteq_external_decoder_unittest.cc b/webrtc/modules/audio_coding/neteq/neteq_external_decoder_unittest.cc
index 09eb5614fe..c03fbb7347 100644
--- a/webrtc/modules/audio_coding/neteq/neteq_external_decoder_unittest.cc
+++ b/webrtc/modules/audio_coding/neteq/neteq_external_decoder_unittest.cc
@@ -98,14 +98,16 @@ class NetEqExternalDecoderUnitTest : public test::NetEqExternalDecoderTest {
next_arrival_time = GetArrivalTime(next_send_time);
} while (Lost()); // If lost, immediately read the next packet.
- EXPECT_CALL(*external_decoder_,
- Decode(_, payload_size_bytes_, 1000 * samples_per_ms_, _, _, _))
+ EXPECT_CALL(
+ *external_decoder_,
+ DecodeInternal(_, payload_size_bytes_, 1000 * samples_per_ms_, _, _))
.Times(NumExpectedDecodeCalls(num_loops));
uint32_t time_now = 0;
for (int k = 0; k < num_loops; ++k) {
while (time_now >= next_arrival_time) {
- InsertPacket(rtp_header_, encoded_, payload_size_bytes_,
+ InsertPacket(rtp_header_, rtc::ArrayView<const uint8_t>(
+ encoded_, payload_size_bytes_),
next_arrival_time);
// Get next input packet.
do {
@@ -124,17 +126,14 @@ class NetEqExternalDecoderUnitTest : public test::NetEqExternalDecoderTest {
}
}
- void InsertPacket(WebRtcRTPHeader rtp_header, const uint8_t* payload,
- size_t payload_size_bytes,
+ void InsertPacket(WebRtcRTPHeader rtp_header,
+ rtc::ArrayView<const uint8_t> payload,
uint32_t receive_timestamp) override {
- EXPECT_CALL(*external_decoder_,
- IncomingPacket(_,
- payload_size_bytes,
- rtp_header.header.sequenceNumber,
- rtp_header.header.timestamp,
- receive_timestamp));
+ EXPECT_CALL(
+ *external_decoder_,
+ IncomingPacket(_, payload.size(), rtp_header.header.sequenceNumber,
+ rtp_header.header.timestamp, receive_timestamp));
NetEqExternalDecoderTest::InsertPacket(rtp_header, payload,
- payload_size_bytes,
receive_timestamp);
}
@@ -181,15 +180,15 @@ class NetEqExternalVsInternalDecoderTest : public NetEqExternalDecoderUnitTest,
}
void SetUp() override {
- ASSERT_EQ(NetEq::kOK,
- neteq_internal_->RegisterPayloadType(
- NetEqDecoder::kDecoderPCM16Bswb32kHz, kPayloadType));
+ ASSERT_EQ(NetEq::kOK, neteq_internal_->RegisterPayloadType(
+ NetEqDecoder::kDecoderPCM16Bswb32kHz,
+ "pcm16-swb32", kPayloadType));
}
void GetAndVerifyOutput() override {
NetEqOutputType output_type;
size_t samples_per_channel;
- int num_channels;
+ size_t num_channels;
// Get audio from internal decoder instance.
EXPECT_EQ(NetEq::kOK,
neteq_internal_->GetAudio(kMaxBlockSize,
@@ -197,7 +196,7 @@ class NetEqExternalVsInternalDecoderTest : public NetEqExternalDecoderUnitTest,
&samples_per_channel,
&num_channels,
&output_type));
- EXPECT_EQ(1, num_channels);
+ EXPECT_EQ(1u, num_channels);
EXPECT_EQ(static_cast<size_t>(kOutputLengthMs * sample_rate_hz_ / 1000),
samples_per_channel);
@@ -210,18 +209,15 @@ class NetEqExternalVsInternalDecoderTest : public NetEqExternalDecoderUnitTest,
}
}
- void InsertPacket(WebRtcRTPHeader rtp_header, const uint8_t* payload,
- size_t payload_size_bytes,
+ void InsertPacket(WebRtcRTPHeader rtp_header,
+ rtc::ArrayView<const uint8_t> payload,
uint32_t receive_timestamp) override {
// Insert packet in internal decoder.
- ASSERT_EQ(
- NetEq::kOK,
- neteq_internal_->InsertPacket(
- rtp_header, payload, payload_size_bytes, receive_timestamp));
+ ASSERT_EQ(NetEq::kOK, neteq_internal_->InsertPacket(rtp_header, payload,
+ receive_timestamp));
// Insert packet in external decoder instance.
NetEqExternalDecoderUnitTest::InsertPacket(rtp_header, payload,
- payload_size_bytes,
receive_timestamp);
}
diff --git a/webrtc/modules/audio_coding/neteq/neteq_impl.cc b/webrtc/modules/audio_coding/neteq/neteq_impl.cc
index 92ce41e2ea..6c07da46f0 100644
--- a/webrtc/modules/audio_coding/neteq/neteq_impl.cc
+++ b/webrtc/modules/audio_coding/neteq/neteq_impl.cc
@@ -18,6 +18,7 @@
#include "webrtc/base/checks.h"
#include "webrtc/base/logging.h"
#include "webrtc/base/safe_conversions.h"
+#include "webrtc/base/trace_event.h"
#include "webrtc/common_audio/signal_processing/include/signal_processing_library.h"
#include "webrtc/modules/audio_coding/codecs/audio_decoder.h"
#include "webrtc/modules/audio_coding/neteq/accelerate.h"
@@ -42,7 +43,7 @@
#include "webrtc/modules/audio_coding/neteq/preemptive_expand.h"
#include "webrtc/modules/audio_coding/neteq/sync_buffer.h"
#include "webrtc/modules/audio_coding/neteq/timestamp_scaler.h"
-#include "webrtc/modules/interface/module_common_types.h"
+#include "webrtc/modules/include/module_common_types.h"
#include "webrtc/system_wrappers/include/critical_section_wrapper.h"
// Modify the code to obtain backwards bit-exactness. Once bit-exactness is no
@@ -106,28 +107,28 @@ NetEqImpl::NetEqImpl(const NetEq::Config& config,
}
fs_hz_ = fs;
fs_mult_ = fs / 8000;
+ last_output_sample_rate_hz_ = fs;
output_size_samples_ = static_cast<size_t>(kOutputSizeMs * 8 * fs_mult_);
decoder_frame_length_ = 3 * output_size_samples_;
WebRtcSpl_Init();
if (create_components) {
SetSampleRateAndChannels(fs, 1); // Default is 1 channel.
}
+ RTC_DCHECK(!vad_->enabled());
+ if (config.enable_post_decode_vad) {
+ vad_->Enable();
+ }
}
NetEqImpl::~NetEqImpl() = default;
int NetEqImpl::InsertPacket(const WebRtcRTPHeader& rtp_header,
- const uint8_t* payload,
- size_t length_bytes,
+ rtc::ArrayView<const uint8_t> payload,
uint32_t receive_timestamp) {
+ TRACE_EVENT0("webrtc", "NetEqImpl::InsertPacket");
CriticalSectionScoped lock(crit_sect_.get());
- LOG(LS_VERBOSE) << "InsertPacket: ts=" << rtp_header.header.timestamp <<
- ", sn=" << rtp_header.header.sequenceNumber <<
- ", pt=" << static_cast<int>(rtp_header.header.payloadType) <<
- ", ssrc=" << rtp_header.header.ssrc <<
- ", len=" << length_bytes;
- int error = InsertPacketInternal(rtp_header, payload, length_bytes,
- receive_timestamp, false);
+ int error =
+ InsertPacketInternal(rtp_header, payload, receive_timestamp, false);
if (error != 0) {
error_code_ = error;
return kFail;
@@ -138,15 +139,9 @@ int NetEqImpl::InsertPacket(const WebRtcRTPHeader& rtp_header,
int NetEqImpl::InsertSyncPacket(const WebRtcRTPHeader& rtp_header,
uint32_t receive_timestamp) {
CriticalSectionScoped lock(crit_sect_.get());
- LOG(LS_VERBOSE) << "InsertPacket-Sync: ts="
- << rtp_header.header.timestamp <<
- ", sn=" << rtp_header.header.sequenceNumber <<
- ", pt=" << static_cast<int>(rtp_header.header.payloadType) <<
- ", ssrc=" << rtp_header.header.ssrc;
-
const uint8_t kSyncPayload[] = { 's', 'y', 'n', 'c' };
- int error = InsertPacketInternal(
- rtp_header, kSyncPayload, sizeof(kSyncPayload), receive_timestamp, true);
+ int error =
+ InsertPacketInternal(rtp_header, kSyncPayload, receive_timestamp, true);
if (error != 0) {
error_code_ = error;
@@ -156,14 +151,12 @@ int NetEqImpl::InsertSyncPacket(const WebRtcRTPHeader& rtp_header,
}
int NetEqImpl::GetAudio(size_t max_length, int16_t* output_audio,
- size_t* samples_per_channel, int* num_channels,
+ size_t* samples_per_channel, size_t* num_channels,
NetEqOutputType* type) {
+ TRACE_EVENT0("webrtc", "NetEqImpl::GetAudio");
CriticalSectionScoped lock(crit_sect_.get());
- LOG(LS_VERBOSE) << "GetAudio";
int error = GetAudioInternal(max_length, output_audio, samples_per_channel,
num_channels);
- LOG(LS_VERBOSE) << "Produced " << *samples_per_channel <<
- " samples/channel for " << *num_channels << " channel(s)";
if (error != 0) {
error_code_ = error;
return kFail;
@@ -171,16 +164,24 @@ int NetEqImpl::GetAudio(size_t max_length, int16_t* output_audio,
if (type) {
*type = LastOutputType();
}
+ last_output_sample_rate_hz_ =
+ rtc::checked_cast<int>(*samples_per_channel * 100);
+ RTC_DCHECK(last_output_sample_rate_hz_ == 8000 ||
+ last_output_sample_rate_hz_ == 16000 ||
+ last_output_sample_rate_hz_ == 32000 ||
+ last_output_sample_rate_hz_ == 48000)
+ << "Unexpected sample rate " << last_output_sample_rate_hz_;
return kOK;
}
int NetEqImpl::RegisterPayloadType(NetEqDecoder codec,
+ const std::string& name,
uint8_t rtp_payload_type) {
CriticalSectionScoped lock(crit_sect_.get());
LOG(LS_VERBOSE) << "RegisterPayloadType "
<< static_cast<int>(rtp_payload_type) << " "
<< static_cast<int>(codec);
- int ret = decoder_database_->RegisterPayload(rtp_payload_type, codec);
+ int ret = decoder_database_->RegisterPayload(rtp_payload_type, codec, name);
if (ret != DecoderDatabase::kOK) {
switch (ret) {
case DecoderDatabase::kInvalidRtpPayloadType:
@@ -202,6 +203,7 @@ int NetEqImpl::RegisterPayloadType(NetEqDecoder codec,
int NetEqImpl::RegisterExternalDecoder(AudioDecoder* decoder,
NetEqDecoder codec,
+ const std::string& codec_name,
uint8_t rtp_payload_type,
int sample_rate_hz) {
CriticalSectionScoped lock(crit_sect_.get());
@@ -213,8 +215,8 @@ int NetEqImpl::RegisterExternalDecoder(AudioDecoder* decoder,
assert(false);
return kFail;
}
- int ret = decoder_database_->InsertExternal(rtp_payload_type, codec,
- sample_rate_hz, decoder);
+ int ret = decoder_database_->InsertExternal(
+ rtp_payload_type, codec, codec_name, sample_rate_hz, decoder);
if (ret != DecoderDatabase::kOK) {
switch (ret) {
case DecoderDatabase::kInvalidRtpPayloadType:
@@ -370,6 +372,11 @@ bool NetEqImpl::GetPlayoutTimestamp(uint32_t* timestamp) {
return true;
}
+int NetEqImpl::last_output_sample_rate_hz() const {
+ CriticalSectionScoped lock(crit_sect_.get());
+ return last_output_sample_rate_hz_;
+}
+
int NetEqImpl::SetTargetNumberOfChannels() {
return kNotImplemented;
}
@@ -441,12 +448,11 @@ const SyncBuffer* NetEqImpl::sync_buffer_for_test() const {
// Methods below this line are private.
int NetEqImpl::InsertPacketInternal(const WebRtcRTPHeader& rtp_header,
- const uint8_t* payload,
- size_t length_bytes,
+ rtc::ArrayView<const uint8_t> payload,
uint32_t receive_timestamp,
bool is_sync_packet) {
- if (!payload) {
- LOG_F(LS_ERROR) << "payload == NULL";
+ if (payload.empty()) {
+ LOG_F(LS_ERROR) << "payload is empty";
return kInvalidPointer;
}
// Sanity checks for sync-packets.
@@ -482,7 +488,7 @@ int NetEqImpl::InsertPacketInternal(const WebRtcRTPHeader& rtp_header,
packet->header.timestamp = rtp_header.header.timestamp;
packet->header.ssrc = rtp_header.header.ssrc;
packet->header.numCSRCs = 0;
- packet->payload_length = length_bytes;
+ packet->payload_length = payload.size();
packet->primary = true;
packet->waiting_time = 0;
packet->payload = new uint8_t[packet->payload_length];
@@ -490,8 +496,8 @@ int NetEqImpl::InsertPacketInternal(const WebRtcRTPHeader& rtp_header,
if (!packet->payload) {
LOG_F(LS_ERROR) << "Payload pointer is NULL.";
}
- assert(payload); // Already checked above.
- memcpy(packet->payload, payload, packet->payload_length);
+ assert(!payload.empty()); // Already checked above.
+ memcpy(packet->payload, payload.data(), packet->payload_length);
// Insert packet in a packet list.
packet_list.push_back(packet);
// Save main payloads header for later.
@@ -738,7 +744,7 @@ int NetEqImpl::InsertPacketInternal(const WebRtcRTPHeader& rtp_header,
int NetEqImpl::GetAudioInternal(size_t max_length,
int16_t* output,
size_t* samples_per_channel,
- int* num_channels) {
+ size_t* num_channels) {
PacketList packet_list;
DtmfEvent dtmf_event;
Operations operation;
@@ -749,8 +755,6 @@ int NetEqImpl::GetAudioInternal(size_t max_length,
last_mode_ = kModeError;
return return_value;
}
- LOG(LS_VERBOSE) << "GetDecision returned operation=" << operation <<
- " and " << packet_list.size() << " packet(s)";
AudioDecoder::SpeechType speech_type;
int length = 0;
@@ -864,10 +868,7 @@ int NetEqImpl::GetAudioInternal(size_t max_length,
const size_t samples_from_sync =
sync_buffer_->GetNextAudioInterleaved(num_output_samples_per_channel,
output);
- *num_channels = static_cast<int>(sync_buffer_->Channels());
- LOG(LS_VERBOSE) << "Sync buffer (" << *num_channels << " channel(s)):" <<
- " insert " << algorithm_buffer_->Size() << " samples, extract " <<
- samples_from_sync << " samples";
+ *num_channels = sync_buffer_->Channels();
if (sync_buffer_->FutureLength() < expand_->overlap_length()) {
// The sync buffer should always contain |overlap_length| samples, but now
// too many samples have been extracted. Reinstall the |overlap_length|
@@ -1325,7 +1326,6 @@ int NetEqImpl::DecodeCng(AudioDecoder* decoder, int* decoded_length,
&decoded_buffer_[*decoded_length], speech_type);
if (length > 0) {
*decoded_length += length;
- LOG(LS_VERBOSE) << "Decoded " << length << " CNG samples";
} else {
// Error.
LOG(LS_WARNING) << "Failed to decode CNG";
@@ -1365,34 +1365,17 @@ int NetEqImpl::DecodeLoop(PacketList* packet_list, const Operations& operation,
int decode_length;
if (packet->sync_packet) {
// Decode to silence with the same frame size as the last decode.
- LOG(LS_VERBOSE) << "Decoding sync-packet: " <<
- " ts=" << packet->header.timestamp <<
- ", sn=" << packet->header.sequenceNumber <<
- ", pt=" << static_cast<int>(packet->header.payloadType) <<
- ", ssrc=" << packet->header.ssrc <<
- ", len=" << packet->payload_length;
memset(&decoded_buffer_[*decoded_length], 0,
decoder_frame_length_ * decoder->Channels() *
sizeof(decoded_buffer_[0]));
decode_length = rtc::checked_cast<int>(decoder_frame_length_);
} else if (!packet->primary) {
// This is a redundant payload; call the special decoder method.
- LOG(LS_VERBOSE) << "Decoding packet (redundant):" <<
- " ts=" << packet->header.timestamp <<
- ", sn=" << packet->header.sequenceNumber <<
- ", pt=" << static_cast<int>(packet->header.payloadType) <<
- ", ssrc=" << packet->header.ssrc <<
- ", len=" << packet->payload_length;
decode_length = decoder->DecodeRedundant(
packet->payload, packet->payload_length, fs_hz_,
(decoded_buffer_length_ - *decoded_length) * sizeof(int16_t),
&decoded_buffer_[*decoded_length], speech_type);
} else {
- LOG(LS_VERBOSE) << "Decoding packet: ts=" << packet->header.timestamp <<
- ", sn=" << packet->header.sequenceNumber <<
- ", pt=" << static_cast<int>(packet->header.payloadType) <<
- ", ssrc=" << packet->header.ssrc <<
- ", len=" << packet->payload_length;
decode_length =
decoder->Decode(
packet->payload, packet->payload_length, fs_hz_,
@@ -1408,9 +1391,6 @@ int NetEqImpl::DecodeLoop(PacketList* packet_list, const Operations& operation,
// Update |decoder_frame_length_| with number of samples per channel.
decoder_frame_length_ =
static_cast<size_t>(decode_length) / decoder->Channels();
- LOG(LS_VERBOSE) << "Decoded " << decode_length << " samples ("
- << decoder->Channels() << " channel(s) -> "
- << decoder_frame_length_ << " samples per channel)";
} else if (decode_length < 0) {
// Error.
LOG(LS_WARNING) << "Decode " << decode_length << " " << payload_length;
diff --git a/webrtc/modules/audio_coding/neteq/neteq_impl.h b/webrtc/modules/audio_coding/neteq/neteq_impl.h
index c001e53b81..940deadd2f 100644
--- a/webrtc/modules/audio_coding/neteq/neteq_impl.h
+++ b/webrtc/modules/audio_coding/neteq/neteq_impl.h
@@ -11,6 +11,8 @@
#ifndef WEBRTC_MODULES_AUDIO_CODING_NETEQ_NETEQ_IMPL_H_
#define WEBRTC_MODULES_AUDIO_CODING_NETEQ_NETEQ_IMPL_H_
+#include <string>
+
#include "webrtc/base/constructormagic.h"
#include "webrtc/base/scoped_ptr.h"
#include "webrtc/base/thread_annotations.h"
@@ -79,8 +81,7 @@ class NetEqImpl : public webrtc::NetEq {
// the same tick rate as the RTP timestamp of the current payload.
// Returns 0 on success, -1 on failure.
int InsertPacket(const WebRtcRTPHeader& rtp_header,
- const uint8_t* payload,
- size_t length_bytes,
+ rtc::ArrayView<const uint8_t> payload,
uint32_t receive_timestamp) override;
// Inserts a sync-packet into packet queue. Sync-packets are decoded to
@@ -106,20 +107,16 @@ class NetEqImpl : public webrtc::NetEq {
int GetAudio(size_t max_length,
int16_t* output_audio,
size_t* samples_per_channel,
- int* num_channels,
+ size_t* num_channels,
NetEqOutputType* type) override;
- // Associates |rtp_payload_type| with |codec| and stores the information in
- // the codec database. Returns kOK on success, kFail on failure.
int RegisterPayloadType(NetEqDecoder codec,
+ const std::string& codec_name,
uint8_t rtp_payload_type) override;
- // Provides an externally created decoder object |decoder| to insert in the
- // decoder database. The decoder implements a decoder of type |codec| and
- // associates it with |rtp_payload_type|. The decoder will produce samples
- // at the rate |sample_rate_hz|. Returns kOK on success, kFail on failure.
int RegisterExternalDecoder(AudioDecoder* decoder,
NetEqDecoder codec,
+ const std::string& codec_name,
uint8_t rtp_payload_type,
int sample_rate_hz) override;
@@ -169,6 +166,8 @@ class NetEqImpl : public webrtc::NetEq {
bool GetPlayoutTimestamp(uint32_t* timestamp) override;
+ int last_output_sample_rate_hz() const override;
+
int SetTargetNumberOfChannels() override;
int SetTargetSampleRate() override;
@@ -207,8 +206,7 @@ class NetEqImpl : public webrtc::NetEq {
// above. Returns 0 on success, otherwise an error code.
// TODO(hlundin): Merge this with InsertPacket above?
int InsertPacketInternal(const WebRtcRTPHeader& rtp_header,
- const uint8_t* payload,
- size_t length_bytes,
+ rtc::ArrayView<const uint8_t> payload,
uint32_t receive_timestamp,
bool is_sync_packet)
EXCLUSIVE_LOCKS_REQUIRED(crit_sect_);
@@ -222,7 +220,8 @@ class NetEqImpl : public webrtc::NetEq {
int GetAudioInternal(size_t max_length,
int16_t* output,
size_t* samples_per_channel,
- int* num_channels) EXCLUSIVE_LOCKS_REQUIRED(crit_sect_);
+ size_t* num_channels)
+ EXCLUSIVE_LOCKS_REQUIRED(crit_sect_);
// Provides a decision to the GetAudioInternal method. The decision what to
// do is written to |operation|. Packets to decode are written to
@@ -377,6 +376,7 @@ class NetEqImpl : public webrtc::NetEq {
StatisticsCalculator stats_ GUARDED_BY(crit_sect_);
int fs_hz_ GUARDED_BY(crit_sect_);
int fs_mult_ GUARDED_BY(crit_sect_);
+ int last_output_sample_rate_hz_ GUARDED_BY(crit_sect_);
size_t output_size_samples_ GUARDED_BY(crit_sect_);
size_t decoder_frame_length_ GUARDED_BY(crit_sect_);
Modes last_mode_ GUARDED_BY(crit_sect_);
diff --git a/webrtc/modules/audio_coding/neteq/neteq_impl_unittest.cc b/webrtc/modules/audio_coding/neteq/neteq_impl_unittest.cc
index 90640ca1d2..f734883635 100644
--- a/webrtc/modules/audio_coding/neteq/neteq_impl_unittest.cc
+++ b/webrtc/modules/audio_coding/neteq/neteq_impl_unittest.cc
@@ -240,9 +240,10 @@ TEST_F(NetEqImplTest, RegisterPayloadType) {
CreateInstance();
uint8_t rtp_payload_type = 0;
NetEqDecoder codec_type = NetEqDecoder::kDecoderPCMu;
+ const std::string kCodecName = "Robert\'); DROP TABLE Students;";
EXPECT_CALL(*mock_decoder_database_,
- RegisterPayload(rtp_payload_type, codec_type));
- neteq_->RegisterPayloadType(codec_type, rtp_payload_type);
+ RegisterPayload(rtp_payload_type, codec_type, kCodecName));
+ neteq_->RegisterPayloadType(codec_type, kCodecName, rtp_payload_type);
}
TEST_F(NetEqImplTest, RemovePayloadType) {
@@ -359,13 +360,12 @@ TEST_F(NetEqImplTest, InsertPacket) {
.WillRepeatedly(Return(PayloadSplitter::kOK));
// Insert first packet.
- neteq_->InsertPacket(rtp_header, payload, kPayloadLength, kFirstReceiveTime);
+ neteq_->InsertPacket(rtp_header, payload, kFirstReceiveTime);
// Insert second packet.
rtp_header.header.timestamp += 160;
rtp_header.header.sequenceNumber += 1;
- neteq_->InsertPacket(rtp_header, payload, kPayloadLength,
- kFirstReceiveTime + 155);
+ neteq_->InsertPacket(rtp_header, payload, kFirstReceiveTime + 155);
}
TEST_F(NetEqImplTest, InsertPacketsUntilBufferIsFull) {
@@ -384,13 +384,12 @@ TEST_F(NetEqImplTest, InsertPacketsUntilBufferIsFull) {
rtp_header.header.ssrc = 0x87654321;
EXPECT_EQ(NetEq::kOK, neteq_->RegisterPayloadType(
- NetEqDecoder::kDecoderPCM16B, kPayloadType));
+ NetEqDecoder::kDecoderPCM16B, "", kPayloadType));
// Insert packets. The buffer should not flush.
for (size_t i = 1; i <= config_.max_packets_in_buffer; ++i) {
EXPECT_EQ(NetEq::kOK,
- neteq_->InsertPacket(
- rtp_header, payload, kPayloadLengthBytes, kReceiveTime));
+ neteq_->InsertPacket(rtp_header, payload, kReceiveTime));
rtp_header.header.timestamp += kPayloadLengthSamples;
rtp_header.header.sequenceNumber += 1;
EXPECT_EQ(i, packet_buffer_->NumPacketsInBuffer());
@@ -399,8 +398,7 @@ TEST_F(NetEqImplTest, InsertPacketsUntilBufferIsFull) {
// Insert one more packet and make sure the buffer got flushed. That is, it
// should only hold one single packet.
EXPECT_EQ(NetEq::kOK,
- neteq_->InsertPacket(
- rtp_header, payload, kPayloadLengthBytes, kReceiveTime));
+ neteq_->InsertPacket(rtp_header, payload, kReceiveTime));
EXPECT_EQ(1u, packet_buffer_->NumPacketsInBuffer());
const RTPHeader* test_header = packet_buffer_->NextRtpHeader();
EXPECT_EQ(rtp_header.header.timestamp, test_header->timestamp);
@@ -434,12 +432,11 @@ TEST_F(NetEqImplTest, VerifyTimestampPropagation) {
CountingSamplesDecoder() : next_value_(1) {}
// Produce as many samples as input bytes (|encoded_len|).
- int Decode(const uint8_t* encoded,
- size_t encoded_len,
- int /* sample_rate_hz */,
- size_t /* max_decoded_bytes */,
- int16_t* decoded,
- SpeechType* speech_type) override {
+ int DecodeInternal(const uint8_t* encoded,
+ size_t encoded_len,
+ int /* sample_rate_hz */,
+ int16_t* decoded,
+ SpeechType* speech_type) override {
for (size_t i = 0; i < encoded_len; ++i) {
decoded[i] = next_value_++;
}
@@ -459,25 +456,24 @@ TEST_F(NetEqImplTest, VerifyTimestampPropagation) {
EXPECT_EQ(NetEq::kOK, neteq_->RegisterExternalDecoder(
&decoder_, NetEqDecoder::kDecoderPCM16B,
- kPayloadType, kSampleRateHz));
+ "dummy name", kPayloadType, kSampleRateHz));
// Insert one packet.
EXPECT_EQ(NetEq::kOK,
- neteq_->InsertPacket(
- rtp_header, payload, kPayloadLengthBytes, kReceiveTime));
+ neteq_->InsertPacket(rtp_header, payload, kReceiveTime));
// Pull audio once.
const size_t kMaxOutputSize = static_cast<size_t>(10 * kSampleRateHz / 1000);
int16_t output[kMaxOutputSize];
size_t samples_per_channel;
- int num_channels;
+ size_t num_channels;
NetEqOutputType type;
EXPECT_EQ(
NetEq::kOK,
neteq_->GetAudio(
kMaxOutputSize, output, &samples_per_channel, &num_channels, &type));
ASSERT_EQ(kMaxOutputSize, samples_per_channel);
- EXPECT_EQ(1, num_channels);
+ EXPECT_EQ(1u, num_channels);
EXPECT_EQ(kOutputNormal, type);
// Start with a simple check that the fake decoder is behaving as expected.
@@ -531,33 +527,32 @@ TEST_F(NetEqImplTest, ReorderedPacket) {
int16_t dummy_output[kPayloadLengthSamples] = {0};
// The below expectation will make the mock decoder write
// |kPayloadLengthSamples| zeros to the output array, and mark it as speech.
- EXPECT_CALL(mock_decoder,
- Decode(Pointee(0), kPayloadLengthBytes, kSampleRateHz, _, _, _))
- .WillOnce(DoAll(SetArrayArgument<4>(dummy_output,
+ EXPECT_CALL(mock_decoder, DecodeInternal(Pointee(0), kPayloadLengthBytes,
+ kSampleRateHz, _, _))
+ .WillOnce(DoAll(SetArrayArgument<3>(dummy_output,
dummy_output + kPayloadLengthSamples),
- SetArgPointee<5>(AudioDecoder::kSpeech),
+ SetArgPointee<4>(AudioDecoder::kSpeech),
Return(kPayloadLengthSamples)));
EXPECT_EQ(NetEq::kOK, neteq_->RegisterExternalDecoder(
&mock_decoder, NetEqDecoder::kDecoderPCM16B,
- kPayloadType, kSampleRateHz));
+ "dummy name", kPayloadType, kSampleRateHz));
// Insert one packet.
EXPECT_EQ(NetEq::kOK,
- neteq_->InsertPacket(
- rtp_header, payload, kPayloadLengthBytes, kReceiveTime));
+ neteq_->InsertPacket(rtp_header, payload, kReceiveTime));
// Pull audio once.
const size_t kMaxOutputSize = static_cast<size_t>(10 * kSampleRateHz / 1000);
int16_t output[kMaxOutputSize];
size_t samples_per_channel;
- int num_channels;
+ size_t num_channels;
NetEqOutputType type;
EXPECT_EQ(
NetEq::kOK,
neteq_->GetAudio(
kMaxOutputSize, output, &samples_per_channel, &num_channels, &type));
ASSERT_EQ(kMaxOutputSize, samples_per_channel);
- EXPECT_EQ(1, num_channels);
+ EXPECT_EQ(1u, num_channels);
EXPECT_EQ(kOutputNormal, type);
// Insert two more packets. The first one is out of order, and is already too
@@ -566,22 +561,20 @@ TEST_F(NetEqImplTest, ReorderedPacket) {
rtp_header.header.timestamp -= kPayloadLengthSamples;
payload[0] = 1;
EXPECT_EQ(NetEq::kOK,
- neteq_->InsertPacket(
- rtp_header, payload, kPayloadLengthBytes, kReceiveTime));
+ neteq_->InsertPacket(rtp_header, payload, kReceiveTime));
rtp_header.header.sequenceNumber += 2;
rtp_header.header.timestamp += 2 * kPayloadLengthSamples;
payload[0] = 2;
EXPECT_EQ(NetEq::kOK,
- neteq_->InsertPacket(
- rtp_header, payload, kPayloadLengthBytes, kReceiveTime));
+ neteq_->InsertPacket(rtp_header, payload, kReceiveTime));
// Expect only the second packet to be decoded (the one with "2" as the first
// payload byte).
- EXPECT_CALL(mock_decoder,
- Decode(Pointee(2), kPayloadLengthBytes, kSampleRateHz, _, _, _))
- .WillOnce(DoAll(SetArrayArgument<4>(dummy_output,
+ EXPECT_CALL(mock_decoder, DecodeInternal(Pointee(2), kPayloadLengthBytes,
+ kSampleRateHz, _, _))
+ .WillOnce(DoAll(SetArrayArgument<3>(dummy_output,
dummy_output + kPayloadLengthSamples),
- SetArgPointee<5>(AudioDecoder::kSpeech),
+ SetArgPointee<4>(AudioDecoder::kSpeech),
Return(kPayloadLengthSamples)));
// Pull audio once.
@@ -590,7 +583,7 @@ TEST_F(NetEqImplTest, ReorderedPacket) {
neteq_->GetAudio(
kMaxOutputSize, output, &samples_per_channel, &num_channels, &type));
ASSERT_EQ(kMaxOutputSize, samples_per_channel);
- EXPECT_EQ(1, num_channels);
+ EXPECT_EQ(1u, num_channels);
EXPECT_EQ(kOutputNormal, type);
// Now check the packet buffer, and make sure it is empty, since the
@@ -622,35 +615,33 @@ TEST_F(NetEqImplTest, FirstPacketUnknown) {
// Insert one packet. Note that we have not registered any payload type, so
// this packet will be rejected.
EXPECT_EQ(NetEq::kFail,
- neteq_->InsertPacket(rtp_header, payload, kPayloadLengthBytes,
- kReceiveTime));
+ neteq_->InsertPacket(rtp_header, payload, kReceiveTime));
EXPECT_EQ(NetEq::kUnknownRtpPayloadType, neteq_->LastError());
// Pull audio once.
const size_t kMaxOutputSize = static_cast<size_t>(10 * kSampleRateHz / 1000);
int16_t output[kMaxOutputSize];
size_t samples_per_channel;
- int num_channels;
+ size_t num_channels;
NetEqOutputType type;
EXPECT_EQ(NetEq::kOK,
neteq_->GetAudio(kMaxOutputSize, output, &samples_per_channel,
&num_channels, &type));
ASSERT_LE(samples_per_channel, kMaxOutputSize);
EXPECT_EQ(kMaxOutputSize, samples_per_channel);
- EXPECT_EQ(1, num_channels);
+ EXPECT_EQ(1u, num_channels);
EXPECT_EQ(kOutputPLC, type);
// Register the payload type.
EXPECT_EQ(NetEq::kOK, neteq_->RegisterPayloadType(
- NetEqDecoder::kDecoderPCM16B, kPayloadType));
+ NetEqDecoder::kDecoderPCM16B, "", kPayloadType));
// Insert 10 packets.
for (size_t i = 0; i < 10; ++i) {
rtp_header.header.sequenceNumber++;
rtp_header.header.timestamp += kPayloadLengthSamples;
EXPECT_EQ(NetEq::kOK,
- neteq_->InsertPacket(rtp_header, payload, kPayloadLengthBytes,
- kReceiveTime));
+ neteq_->InsertPacket(rtp_header, payload, kReceiveTime));
EXPECT_EQ(i + 1, packet_buffer_->NumPacketsInBuffer());
}
@@ -661,7 +652,7 @@ TEST_F(NetEqImplTest, FirstPacketUnknown) {
&num_channels, &type));
ASSERT_LE(samples_per_channel, kMaxOutputSize);
EXPECT_EQ(kMaxOutputSize, samples_per_channel);
- EXPECT_EQ(1, num_channels);
+ EXPECT_EQ(1u, num_channels);
EXPECT_EQ(kOutputNormal, type)
<< "NetEq did not decode the packets as expected.";
}
@@ -697,54 +688,53 @@ TEST_F(NetEqImplTest, CodecInternalCng) {
// Pointee(x) verifies that first byte of the payload equals x, this makes it
// possible to verify that the correct payload is fed to Decode().
- EXPECT_CALL(mock_decoder, Decode(Pointee(0), kPayloadLengthBytes,
- kSampleRateKhz * 1000, _, _, _))
- .WillOnce(DoAll(SetArrayArgument<4>(dummy_output,
+ EXPECT_CALL(mock_decoder, DecodeInternal(Pointee(0), kPayloadLengthBytes,
+ kSampleRateKhz * 1000, _, _))
+ .WillOnce(DoAll(SetArrayArgument<3>(dummy_output,
dummy_output + kPayloadLengthSamples),
- SetArgPointee<5>(AudioDecoder::kSpeech),
+ SetArgPointee<4>(AudioDecoder::kSpeech),
Return(kPayloadLengthSamples)));
- EXPECT_CALL(mock_decoder, Decode(Pointee(1), kPayloadLengthBytes,
- kSampleRateKhz * 1000, _, _, _))
- .WillOnce(DoAll(SetArrayArgument<4>(dummy_output,
+ EXPECT_CALL(mock_decoder, DecodeInternal(Pointee(1), kPayloadLengthBytes,
+ kSampleRateKhz * 1000, _, _))
+ .WillOnce(DoAll(SetArrayArgument<3>(dummy_output,
dummy_output + kPayloadLengthSamples),
- SetArgPointee<5>(AudioDecoder::kComfortNoise),
+ SetArgPointee<4>(AudioDecoder::kComfortNoise),
Return(kPayloadLengthSamples)));
- EXPECT_CALL(mock_decoder, Decode(IsNull(), 0, kSampleRateKhz * 1000, _, _, _))
- .WillOnce(DoAll(SetArrayArgument<4>(dummy_output,
+ EXPECT_CALL(mock_decoder,
+ DecodeInternal(IsNull(), 0, kSampleRateKhz * 1000, _, _))
+ .WillOnce(DoAll(SetArrayArgument<3>(dummy_output,
dummy_output + kPayloadLengthSamples),
- SetArgPointee<5>(AudioDecoder::kComfortNoise),
+ SetArgPointee<4>(AudioDecoder::kComfortNoise),
Return(kPayloadLengthSamples)));
- EXPECT_CALL(mock_decoder, Decode(Pointee(2), kPayloadLengthBytes,
- kSampleRateKhz * 1000, _, _, _))
- .WillOnce(DoAll(SetArrayArgument<4>(dummy_output,
+ EXPECT_CALL(mock_decoder, DecodeInternal(Pointee(2), kPayloadLengthBytes,
+ kSampleRateKhz * 1000, _, _))
+ .WillOnce(DoAll(SetArrayArgument<3>(dummy_output,
dummy_output + kPayloadLengthSamples),
- SetArgPointee<5>(AudioDecoder::kSpeech),
+ SetArgPointee<4>(AudioDecoder::kSpeech),
Return(kPayloadLengthSamples)));
EXPECT_EQ(NetEq::kOK, neteq_->RegisterExternalDecoder(
&mock_decoder, NetEqDecoder::kDecoderOpus,
- kPayloadType, kSampleRateKhz * 1000));
+ "dummy name", kPayloadType, kSampleRateKhz * 1000));
// Insert one packet (decoder will return speech).
EXPECT_EQ(NetEq::kOK,
- neteq_->InsertPacket(
- rtp_header, payload, kPayloadLengthBytes, kReceiveTime));
+ neteq_->InsertPacket(rtp_header, payload, kReceiveTime));
// Insert second packet (decoder will return CNG).
payload[0] = 1;
rtp_header.header.sequenceNumber++;
rtp_header.header.timestamp += kPayloadLengthSamples;
EXPECT_EQ(NetEq::kOK,
- neteq_->InsertPacket(
- rtp_header, payload, kPayloadLengthBytes, kReceiveTime));
+ neteq_->InsertPacket(rtp_header, payload, kReceiveTime));
const size_t kMaxOutputSize = static_cast<size_t>(10 * kSampleRateKhz);
int16_t output[kMaxOutputSize];
size_t samples_per_channel;
- int num_channels;
+ size_t num_channels;
uint32_t timestamp;
uint32_t last_timestamp;
NetEqOutputType type;
@@ -769,7 +759,7 @@ TEST_F(NetEqImplTest, CodecInternalCng) {
for (size_t i = 1; i < 6; ++i) {
ASSERT_EQ(kMaxOutputSize, samples_per_channel);
- EXPECT_EQ(1, num_channels);
+ EXPECT_EQ(1u, num_channels);
EXPECT_EQ(expected_type[i - 1], type);
EXPECT_TRUE(neteq_->GetPlayoutTimestamp(&timestamp));
EXPECT_EQ(NetEq::kOK,
@@ -785,12 +775,11 @@ TEST_F(NetEqImplTest, CodecInternalCng) {
rtp_header.header.sequenceNumber += 2;
rtp_header.header.timestamp += 2 * kPayloadLengthSamples;
EXPECT_EQ(NetEq::kOK,
- neteq_->InsertPacket(
- rtp_header, payload, kPayloadLengthBytes, kReceiveTime));
+ neteq_->InsertPacket(rtp_header, payload, kReceiveTime));
for (size_t i = 6; i < 8; ++i) {
ASSERT_EQ(kMaxOutputSize, samples_per_channel);
- EXPECT_EQ(1, num_channels);
+ EXPECT_EQ(1u, num_channels);
EXPECT_EQ(expected_type[i - 1], type);
EXPECT_EQ(NetEq::kOK,
neteq_->GetAudio(kMaxOutputSize, output, &samples_per_channel,
@@ -810,7 +799,7 @@ TEST_F(NetEqImplTest, UnsupportedDecoder) {
UseNoMocks();
CreateInstance();
static const size_t kNetEqMaxFrameSize = 2880; // 60 ms @ 48 kHz.
- static const int kChannels = 2;
+ static const size_t kChannels = 2;
const uint8_t kPayloadType = 17; // Just an arbitrary number.
const uint32_t kReceiveTime = 17; // Value doesn't matter for this test.
@@ -866,13 +855,12 @@ TEST_F(NetEqImplTest, UnsupportedDecoder) {
EXPECT_EQ(NetEq::kOK, neteq_->RegisterExternalDecoder(
&decoder_, NetEqDecoder::kDecoderPCM16B,
- kPayloadType, kSampleRateHz));
+ "dummy name", kPayloadType, kSampleRateHz));
// Insert one packet.
payload[0] = kFirstPayloadValue; // This will make Decode() fail.
EXPECT_EQ(NetEq::kOK,
- neteq_->InsertPacket(
- rtp_header, payload, kPayloadLengthBytes, kReceiveTime));
+ neteq_->InsertPacket(rtp_header, payload, kReceiveTime));
// Insert another packet.
payload[0] = kSecondPayloadValue; // This will make Decode() successful.
@@ -881,14 +869,12 @@ TEST_F(NetEqImplTest, UnsupportedDecoder) {
// the second packet get decoded.
rtp_header.header.timestamp += 3 * kPayloadLengthSamples;
EXPECT_EQ(NetEq::kOK,
- neteq_->InsertPacket(
- rtp_header, payload, kPayloadLengthBytes, kReceiveTime));
+ neteq_->InsertPacket(rtp_header, payload, kReceiveTime));
- const size_t kMaxOutputSize =
- static_cast<size_t>(10 * kSampleRateHz / 1000 * kChannels);
+ const size_t kMaxOutputSize = 10 * kSampleRateHz / 1000 * kChannels;
int16_t output[kMaxOutputSize];
size_t samples_per_channel;
- int num_channels;
+ size_t num_channels;
NetEqOutputType type;
EXPECT_EQ(NetEq::kFail, neteq_->GetAudio(kMaxOutputSize, output,
@@ -926,14 +912,13 @@ TEST_F(NetEqImplTest, FloodBufferAndGetNetworkStats) {
rtp_header.header.ssrc = 0x87654321;
EXPECT_EQ(NetEq::kOK, neteq_->RegisterPayloadType(
- NetEqDecoder::kDecoderPCM16B, kPayloadType));
+ NetEqDecoder::kDecoderPCM16B, "", kPayloadType));
// Insert packets until the buffer flushes.
for (size_t i = 0; i <= config_.max_packets_in_buffer; ++i) {
EXPECT_EQ(i, packet_buffer_->NumPacketsInBuffer());
EXPECT_EQ(NetEq::kOK,
- neteq_->InsertPacket(rtp_header, payload, kPayloadLengthBytes,
- kReceiveTime));
+ neteq_->InsertPacket(rtp_header, payload, kReceiveTime));
rtp_header.header.timestamp +=
rtc::checked_cast<uint32_t>(kPayloadLengthSamples);
++rtp_header.header.sequenceNumber;
@@ -975,20 +960,19 @@ TEST_F(NetEqImplTest, DecodedPayloadTooShort) {
// |kPayloadLengthSamples| - 5 zeros to the output array, and mark it as
// speech. That is, the decoded length is 5 samples shorter than the expected.
EXPECT_CALL(mock_decoder,
- Decode(_, kPayloadLengthBytes, kSampleRateHz, _, _, _))
+ DecodeInternal(_, kPayloadLengthBytes, kSampleRateHz, _, _))
.WillOnce(
- DoAll(SetArrayArgument<4>(dummy_output,
+ DoAll(SetArrayArgument<3>(dummy_output,
dummy_output + kPayloadLengthSamples - 5),
- SetArgPointee<5>(AudioDecoder::kSpeech),
+ SetArgPointee<4>(AudioDecoder::kSpeech),
Return(kPayloadLengthSamples - 5)));
EXPECT_EQ(NetEq::kOK, neteq_->RegisterExternalDecoder(
&mock_decoder, NetEqDecoder::kDecoderPCM16B,
- kPayloadType, kSampleRateHz));
+ "dummy name", kPayloadType, kSampleRateHz));
// Insert one packet.
EXPECT_EQ(NetEq::kOK,
- neteq_->InsertPacket(rtp_header, payload, kPayloadLengthBytes,
- kReceiveTime));
+ neteq_->InsertPacket(rtp_header, payload, kReceiveTime));
EXPECT_EQ(5u, neteq_->sync_buffer_for_test()->FutureLength());
@@ -996,13 +980,13 @@ TEST_F(NetEqImplTest, DecodedPayloadTooShort) {
const size_t kMaxOutputSize = static_cast<size_t>(10 * kSampleRateHz / 1000);
int16_t output[kMaxOutputSize];
size_t samples_per_channel;
- int num_channels;
+ size_t num_channels;
NetEqOutputType type;
EXPECT_EQ(NetEq::kOK,
neteq_->GetAudio(kMaxOutputSize, output, &samples_per_channel,
&num_channels, &type));
ASSERT_EQ(kMaxOutputSize, samples_per_channel);
- EXPECT_EQ(1, num_channels);
+ EXPECT_EQ(1u, num_channels);
EXPECT_EQ(kOutputNormal, type);
EXPECT_CALL(mock_decoder, Die());
@@ -1050,57 +1034,56 @@ TEST_F(NetEqImplTest, DecodingError) {
InSequence sequence; // Dummy variable.
// Mock decoder works normally the first time.
EXPECT_CALL(mock_decoder,
- Decode(_, kPayloadLengthBytes, kSampleRateHz, _, _, _))
+ DecodeInternal(_, kPayloadLengthBytes, kSampleRateHz, _, _))
.Times(3)
.WillRepeatedly(
- DoAll(SetArrayArgument<4>(dummy_output,
+ DoAll(SetArrayArgument<3>(dummy_output,
dummy_output + kFrameLengthSamples),
- SetArgPointee<5>(AudioDecoder::kSpeech),
+ SetArgPointee<4>(AudioDecoder::kSpeech),
Return(kFrameLengthSamples)))
.RetiresOnSaturation();
// Then mock decoder fails. A common reason for failure can be buffer being
// too short
EXPECT_CALL(mock_decoder,
- Decode(_, kPayloadLengthBytes, kSampleRateHz, _, _, _))
+ DecodeInternal(_, kPayloadLengthBytes, kSampleRateHz, _, _))
.WillOnce(Return(-1))
.RetiresOnSaturation();
// Mock decoder finally returns to normal.
EXPECT_CALL(mock_decoder,
- Decode(_, kPayloadLengthBytes, kSampleRateHz, _, _, _))
+ DecodeInternal(_, kPayloadLengthBytes, kSampleRateHz, _, _))
.Times(2)
.WillRepeatedly(
- DoAll(SetArrayArgument<4>(dummy_output,
- dummy_output + kFrameLengthSamples),
- SetArgPointee<5>(AudioDecoder::kSpeech),
+ DoAll(SetArrayArgument<3>(dummy_output,
+ dummy_output + kFrameLengthSamples),
+ SetArgPointee<4>(AudioDecoder::kSpeech),
Return(kFrameLengthSamples)));
}
EXPECT_EQ(NetEq::kOK, neteq_->RegisterExternalDecoder(
&mock_decoder, NetEqDecoder::kDecoderPCM16B,
- kPayloadType, kSampleRateHz));
+ "dummy name", kPayloadType, kSampleRateHz));
// Insert packets.
for (int i = 0; i < 6; ++i) {
rtp_header.header.sequenceNumber += 1;
rtp_header.header.timestamp += kFrameLengthSamples;
EXPECT_EQ(NetEq::kOK,
- neteq_->InsertPacket(rtp_header, payload, kPayloadLengthBytes,
- kReceiveTime));
+ neteq_->InsertPacket(rtp_header, payload, kReceiveTime));
}
// Pull audio.
const size_t kMaxOutputSize = static_cast<size_t>(10 * kSampleRateHz / 1000);
int16_t output[kMaxOutputSize];
size_t samples_per_channel;
- int num_channels;
+ size_t num_channels;
NetEqOutputType type;
EXPECT_EQ(NetEq::kOK,
neteq_->GetAudio(kMaxOutputSize, output, &samples_per_channel,
&num_channels, &type));
EXPECT_EQ(kMaxOutputSize, samples_per_channel);
- EXPECT_EQ(1, num_channels);
+ EXPECT_EQ(1u, num_channels);
EXPECT_EQ(kOutputNormal, type);
// Pull audio again. Decoder fails.
@@ -1110,7 +1093,7 @@ TEST_F(NetEqImplTest, DecodingError) {
EXPECT_EQ(NetEq::kDecoderErrorCode, neteq_->LastError());
EXPECT_EQ(kDecoderErrorCode, neteq_->LastDecoderError());
EXPECT_EQ(kMaxOutputSize, samples_per_channel);
- EXPECT_EQ(1, num_channels);
+ EXPECT_EQ(1u, num_channels);
// TODO(minyue): should NetEq better give kOutputPLC, since it is actually an
// expansion.
EXPECT_EQ(kOutputNormal, type);
@@ -1120,7 +1103,7 @@ TEST_F(NetEqImplTest, DecodingError) {
neteq_->GetAudio(kMaxOutputSize, output, &samples_per_channel,
&num_channels, &type));
EXPECT_EQ(kMaxOutputSize, samples_per_channel);
- EXPECT_EQ(1, num_channels);
+ EXPECT_EQ(1u, num_channels);
EXPECT_EQ(kOutputPLC, type);
// Pull audio again, should behave normal.
@@ -1128,7 +1111,7 @@ TEST_F(NetEqImplTest, DecodingError) {
neteq_->GetAudio(kMaxOutputSize, output, &samples_per_channel,
&num_channels, &type));
EXPECT_EQ(kMaxOutputSize, samples_per_channel);
- EXPECT_EQ(1, num_channels);
+ EXPECT_EQ(1u, num_channels);
EXPECT_EQ(kOutputNormal, type);
EXPECT_CALL(mock_decoder, Die());
@@ -1174,55 +1157,54 @@ TEST_F(NetEqImplTest, DecodingErrorDuringInternalCng) {
InSequence sequence; // Dummy variable.
// Mock decoder works normally the first 2 times.
EXPECT_CALL(mock_decoder,
- Decode(_, kPayloadLengthBytes, kSampleRateHz, _, _, _))
+ DecodeInternal(_, kPayloadLengthBytes, kSampleRateHz, _, _))
.Times(2)
.WillRepeatedly(
- DoAll(SetArrayArgument<4>(dummy_output,
+ DoAll(SetArrayArgument<3>(dummy_output,
dummy_output + kFrameLengthSamples),
- SetArgPointee<5>(AudioDecoder::kComfortNoise),
+ SetArgPointee<4>(AudioDecoder::kComfortNoise),
Return(kFrameLengthSamples)))
.RetiresOnSaturation();
// Then mock decoder fails. A common reason for failure can be buffer being
// too short
- EXPECT_CALL(mock_decoder, Decode(nullptr, 0, kSampleRateHz, _, _, _))
+ EXPECT_CALL(mock_decoder, DecodeInternal(nullptr, 0, kSampleRateHz, _, _))
.WillOnce(Return(-1))
.RetiresOnSaturation();
// Mock decoder finally returns to normal.
- EXPECT_CALL(mock_decoder, Decode(nullptr, 0, kSampleRateHz, _, _, _))
+ EXPECT_CALL(mock_decoder, DecodeInternal(nullptr, 0, kSampleRateHz, _, _))
.Times(2)
.WillRepeatedly(
- DoAll(SetArrayArgument<4>(dummy_output,
- dummy_output + kFrameLengthSamples),
- SetArgPointee<5>(AudioDecoder::kComfortNoise),
+ DoAll(SetArrayArgument<3>(dummy_output,
+ dummy_output + kFrameLengthSamples),
+ SetArgPointee<4>(AudioDecoder::kComfortNoise),
Return(kFrameLengthSamples)));
}
EXPECT_EQ(NetEq::kOK, neteq_->RegisterExternalDecoder(
&mock_decoder, NetEqDecoder::kDecoderPCM16B,
- kPayloadType, kSampleRateHz));
+ "dummy name", kPayloadType, kSampleRateHz));
// Insert 2 packets. This will make netEq into codec internal CNG mode.
for (int i = 0; i < 2; ++i) {
rtp_header.header.sequenceNumber += 1;
rtp_header.header.timestamp += kFrameLengthSamples;
EXPECT_EQ(NetEq::kOK,
- neteq_->InsertPacket(rtp_header, payload, kPayloadLengthBytes,
- kReceiveTime));
+ neteq_->InsertPacket(rtp_header, payload, kReceiveTime));
}
// Pull audio.
const size_t kMaxOutputSize = static_cast<size_t>(10 * kSampleRateHz / 1000);
int16_t output[kMaxOutputSize];
size_t samples_per_channel;
- int num_channels;
+ size_t num_channels;
NetEqOutputType type;
EXPECT_EQ(NetEq::kOK,
neteq_->GetAudio(kMaxOutputSize, output, &samples_per_channel,
&num_channels, &type));
EXPECT_EQ(kMaxOutputSize, samples_per_channel);
- EXPECT_EQ(1, num_channels);
+ EXPECT_EQ(1u, num_channels);
EXPECT_EQ(kOutputCNG, type);
// Pull audio again. Decoder fails.
@@ -1232,7 +1214,7 @@ TEST_F(NetEqImplTest, DecodingErrorDuringInternalCng) {
EXPECT_EQ(NetEq::kDecoderErrorCode, neteq_->LastError());
EXPECT_EQ(kDecoderErrorCode, neteq_->LastDecoderError());
EXPECT_EQ(kMaxOutputSize, samples_per_channel);
- EXPECT_EQ(1, num_channels);
+ EXPECT_EQ(1u, num_channels);
// TODO(minyue): should NetEq better give kOutputPLC, since it is actually an
// expansion.
EXPECT_EQ(kOutputCNG, type);
@@ -1242,10 +1224,19 @@ TEST_F(NetEqImplTest, DecodingErrorDuringInternalCng) {
neteq_->GetAudio(kMaxOutputSize, output, &samples_per_channel,
&num_channels, &type));
EXPECT_EQ(kMaxOutputSize, samples_per_channel);
- EXPECT_EQ(1, num_channels);
+ EXPECT_EQ(1u, num_channels);
EXPECT_EQ(kOutputCNG, type);
EXPECT_CALL(mock_decoder, Die());
}
+// Tests that the return value from last_output_sample_rate_hz() is equal to the
+// configured inital sample rate.
+TEST_F(NetEqImplTest, InitialLastOutputSampleRate) {
+ UseNoMocks();
+ config_.sample_rate_hz = 48000;
+ CreateInstance();
+ EXPECT_EQ(48000, neteq_->last_output_sample_rate_hz());
+}
+
}// namespace webrtc
diff --git a/webrtc/modules/audio_coding/neteq/neteq_network_stats_unittest.cc b/webrtc/modules/audio_coding/neteq/neteq_network_stats_unittest.cc
index 16fa04c234..34ca9ea856 100644
--- a/webrtc/modules/audio_coding/neteq/neteq_network_stats_unittest.cc
+++ b/webrtc/modules/audio_coding/neteq/neteq_network_stats_unittest.cc
@@ -191,8 +191,7 @@ struct NetEqNetworkStatsCheck {
frame_size_samples_,
&rtp_header_);
if (!Lost(next_send_time)) {
- InsertPacket(rtp_header_, payload_, kPayloadSizeByte,
- next_send_time);
+ InsertPacket(rtp_header_, payload_, next_send_time);
}
}
GetOutputAudio(kMaxOutputSize, output_, &output_type);
diff --git a/webrtc/modules/audio_coding/neteq/neteq_stereo_unittest.cc b/webrtc/modules/audio_coding/neteq/neteq_stereo_unittest.cc
index 66874b8a50..d3f59ec668 100644
--- a/webrtc/modules/audio_coding/neteq/neteq_stereo_unittest.cc
+++ b/webrtc/modules/audio_coding/neteq/neteq_stereo_unittest.cc
@@ -16,19 +16,18 @@
#include "testing/gtest/include/gtest/gtest.h"
#include "webrtc/base/scoped_ptr.h"
-#include "webrtc/modules/audio_coding/codecs/pcm16b/include/pcm16b.h"
+#include "webrtc/modules/audio_coding/codecs/pcm16b/pcm16b.h"
#include "webrtc/modules/audio_coding/neteq/include/neteq.h"
#include "webrtc/modules/audio_coding/neteq/tools/input_audio_file.h"
#include "webrtc/modules/audio_coding/neteq/tools/rtp_generator.h"
#include "webrtc/test/testsupport/fileutils.h"
-#include "webrtc/test/testsupport/gtest_disable.h"
namespace webrtc {
struct TestParameters {
int frame_size;
int sample_rate;
- int num_channels;
+ size_t num_channels;
};
// This is a parameterized test. The test parameters are supplied through a
@@ -127,11 +126,10 @@ class NetEqStereoTest : public ::testing::TestWithParam<TestParameters> {
default:
FAIL() << "We shouldn't get here.";
}
+ ASSERT_EQ(NetEq::kOK, neteq_mono_->RegisterPayloadType(mono_decoder, "mono",
+ kPayloadTypeMono));
ASSERT_EQ(NetEq::kOK,
- neteq_mono_->RegisterPayloadType(mono_decoder,
- kPayloadTypeMono));
- ASSERT_EQ(NetEq::kOK,
- neteq_->RegisterPayloadType(multi_decoder,
+ neteq_->RegisterPayloadType(multi_decoder, "multi-channel",
kPayloadTypeMulti));
}
@@ -165,7 +163,7 @@ class NetEqStereoTest : public ::testing::TestWithParam<TestParameters> {
void VerifyOutput(size_t num_samples) {
for (size_t i = 0; i < num_samples; ++i) {
- for (int j = 0; j < num_channels_; ++j) {
+ for (size_t j = 0; j < num_channels_; ++j) {
ASSERT_EQ(output_[i], output_multi_channel_[i * num_channels_ + j]) <<
"Diff in sample " << i << ", channel " << j << ".";
}
@@ -196,14 +194,16 @@ class NetEqStereoTest : public ::testing::TestWithParam<TestParameters> {
while (time_now >= next_arrival_time) {
// Insert packet in mono instance.
ASSERT_EQ(NetEq::kOK,
- neteq_mono_->InsertPacket(rtp_header_mono_, encoded_,
- payload_size_bytes_,
+ neteq_mono_->InsertPacket(rtp_header_mono_,
+ rtc::ArrayView<const uint8_t>(
+ encoded_, payload_size_bytes_),
next_arrival_time));
// Insert packet in multi-channel instance.
- ASSERT_EQ(NetEq::kOK,
- neteq_->InsertPacket(rtp_header_, encoded_multi_channel_,
- multi_payload_size_bytes_,
- next_arrival_time));
+ ASSERT_EQ(NetEq::kOK, neteq_->InsertPacket(
+ rtp_header_, rtc::ArrayView<const uint8_t>(
+ encoded_multi_channel_,
+ multi_payload_size_bytes_),
+ next_arrival_time));
// Get next input packets (mono and multi-channel).
do {
next_send_time = GetNewPackets();
@@ -214,12 +214,12 @@ class NetEqStereoTest : public ::testing::TestWithParam<TestParameters> {
NetEqOutputType output_type;
// Get audio from mono instance.
size_t samples_per_channel;
- int num_channels;
+ size_t num_channels;
EXPECT_EQ(NetEq::kOK,
neteq_mono_->GetAudio(kMaxBlockSize, output_,
&samples_per_channel, &num_channels,
&output_type));
- EXPECT_EQ(1, num_channels);
+ EXPECT_EQ(1u, num_channels);
EXPECT_EQ(output_size_samples_, samples_per_channel);
// Get audio from multi-channel instance.
ASSERT_EQ(NetEq::kOK,
@@ -239,7 +239,7 @@ class NetEqStereoTest : public ::testing::TestWithParam<TestParameters> {
}
}
- const int num_channels_;
+ const size_t num_channels_;
const int sample_rate_hz_;
const int samples_per_ms_;
const int frame_size_ms_;
@@ -275,7 +275,12 @@ class NetEqStereoTestNoJitter : public NetEqStereoTest {
}
};
-TEST_P(NetEqStereoTestNoJitter, DISABLED_ON_ANDROID(RunTest)) {
+#if defined(WEBRTC_ANDROID)
+#define MAYBE_RunTest DISABLED_RunTest
+#else
+#define MAYBE_RunTest RunTest
+#endif
+TEST_P(NetEqStereoTestNoJitter, MAYBE_RunTest) {
RunTest(8);
}
@@ -300,7 +305,7 @@ class NetEqStereoTestPositiveDrift : public NetEqStereoTest {
double drift_factor;
};
-TEST_P(NetEqStereoTestPositiveDrift, DISABLED_ON_ANDROID(RunTest)) {
+TEST_P(NetEqStereoTestPositiveDrift, MAYBE_RunTest) {
RunTest(100);
}
@@ -313,7 +318,7 @@ class NetEqStereoTestNegativeDrift : public NetEqStereoTestPositiveDrift {
}
};
-TEST_P(NetEqStereoTestNegativeDrift, DISABLED_ON_ANDROID(RunTest)) {
+TEST_P(NetEqStereoTestNegativeDrift, MAYBE_RunTest) {
RunTest(100);
}
@@ -341,7 +346,7 @@ class NetEqStereoTestDelays : public NetEqStereoTest {
int frame_index_;
};
-TEST_P(NetEqStereoTestDelays, DISABLED_ON_ANDROID(RunTest)) {
+TEST_P(NetEqStereoTestDelays, MAYBE_RunTest) {
RunTest(1000);
}
@@ -360,7 +365,10 @@ class NetEqStereoTestLosses : public NetEqStereoTest {
int frame_index_;
};
-TEST_P(NetEqStereoTestLosses, DISABLED_ON_ANDROID(RunTest)) {
+// TODO(pbos): Enable on non-Android, this went failing while being accidentally
+// disabled on all platforms and not just Android.
+// https://bugs.chromium.org/p/webrtc/issues/detail?id=5387
+TEST_P(NetEqStereoTestLosses, DISABLED_RunTest) {
RunTest(100);
}
diff --git a/webrtc/modules/audio_coding/neteq/neteq_tests.gypi b/webrtc/modules/audio_coding/neteq/neteq_tests.gypi
index ee9583ab85..f02d3deee9 100644
--- a/webrtc/modules/audio_coding/neteq/neteq_tests.gypi
+++ b/webrtc/modules/audio_coding/neteq/neteq_tests.gypi
@@ -39,6 +39,21 @@
'defines': [
],
}, # neteq_rtpplay
+ {
+ 'target_name': 'neteq_unittest_proto',
+ 'type': 'static_library',
+ 'sources': [
+ 'neteq_unittest.proto',
+ ],
+ 'variables': {
+ 'proto_in_dir': '.',
+ # Workaround to protect against gyp's pathname relativization when
+ # this file is included by modules.gyp.
+ 'proto_out_protected': 'webrtc/audio_coding/neteq',
+ 'proto_out_dir': '<(proto_out_protected)',
+ },
+ 'includes': ['../../../build/protoc.gypi',],
+ },
],
}],
],
@@ -56,6 +71,7 @@
'isac',
'neteq_test_tools', # Test helpers
'pcm16b',
+ 'webrtc_opus',
],
'defines': [
'CODEC_ILBC',
@@ -72,6 +88,7 @@
'CODEC_CNGCODEC32',
'CODEC_ATEVENT_DECODE',
'CODEC_RED',
+ 'CODEC_OPUS',
],
'include_dirs': [
'include',
diff --git a/webrtc/modules/audio_coding/neteq/neteq_unittest.cc b/webrtc/modules/audio_coding/neteq/neteq_unittest.cc
index 4340f54975..8d52c615da 100644
--- a/webrtc/modules/audio_coding/neteq/neteq_unittest.cc
+++ b/webrtc/modules/audio_coding/neteq/neteq_unittest.cc
@@ -28,29 +28,91 @@
#include "webrtc/base/scoped_ptr.h"
#include "webrtc/modules/audio_coding/neteq/tools/audio_loop.h"
#include "webrtc/modules/audio_coding/neteq/tools/rtp_file_source.h"
-#include "webrtc/modules/audio_coding/codecs/pcm16b/include/pcm16b.h"
+#include "webrtc/modules/audio_coding/codecs/pcm16b/pcm16b.h"
#include "webrtc/test/testsupport/fileutils.h"
-#include "webrtc/test/testsupport/gtest_disable.h"
#include "webrtc/typedefs.h"
+#ifdef WEBRTC_NETEQ_UNITTEST_BITEXACT
+#ifdef WEBRTC_ANDROID_PLATFORM_BUILD
+#include "external/webrtc/webrtc/modules/audio_coding/neteq/neteq_unittest.pb.h"
+#else
+#include "webrtc/audio_coding/neteq/neteq_unittest.pb.h"
+#endif
+#endif
+
DEFINE_bool(gen_ref, false, "Generate reference files.");
-namespace webrtc {
+namespace {
-static bool IsAllZero(const int16_t* buf, size_t buf_length) {
+bool IsAllZero(const int16_t* buf, size_t buf_length) {
bool all_zero = true;
for (size_t n = 0; n < buf_length && all_zero; ++n)
all_zero = buf[n] == 0;
return all_zero;
}
-static bool IsAllNonZero(const int16_t* buf, size_t buf_length) {
+bool IsAllNonZero(const int16_t* buf, size_t buf_length) {
bool all_non_zero = true;
for (size_t n = 0; n < buf_length && all_non_zero; ++n)
all_non_zero = buf[n] != 0;
return all_non_zero;
}
+#ifdef WEBRTC_NETEQ_UNITTEST_BITEXACT
+void Convert(const webrtc::NetEqNetworkStatistics& stats_raw,
+ webrtc::neteq_unittest::NetEqNetworkStatistics* stats) {
+ stats->set_current_buffer_size_ms(stats_raw.current_buffer_size_ms);
+ stats->set_preferred_buffer_size_ms(stats_raw.preferred_buffer_size_ms);
+ stats->set_jitter_peaks_found(stats_raw.jitter_peaks_found);
+ stats->set_packet_loss_rate(stats_raw.packet_loss_rate);
+ stats->set_packet_discard_rate(stats_raw.packet_discard_rate);
+ stats->set_expand_rate(stats_raw.expand_rate);
+ stats->set_speech_expand_rate(stats_raw.speech_expand_rate);
+ stats->set_preemptive_rate(stats_raw.preemptive_rate);
+ stats->set_accelerate_rate(stats_raw.accelerate_rate);
+ stats->set_secondary_decoded_rate(stats_raw.secondary_decoded_rate);
+ stats->set_clockdrift_ppm(stats_raw.clockdrift_ppm);
+ stats->set_added_zero_samples(stats_raw.added_zero_samples);
+ stats->set_mean_waiting_time_ms(stats_raw.mean_waiting_time_ms);
+ stats->set_median_waiting_time_ms(stats_raw.median_waiting_time_ms);
+ stats->set_min_waiting_time_ms(stats_raw.min_waiting_time_ms);
+ stats->set_max_waiting_time_ms(stats_raw.max_waiting_time_ms);
+}
+
+void Convert(const webrtc::RtcpStatistics& stats_raw,
+ webrtc::neteq_unittest::RtcpStatistics* stats) {
+ stats->set_fraction_lost(stats_raw.fraction_lost);
+ stats->set_cumulative_lost(stats_raw.cumulative_lost);
+ stats->set_extended_max_sequence_number(
+ stats_raw.extended_max_sequence_number);
+ stats->set_jitter(stats_raw.jitter);
+}
+
+void WriteMessage(FILE* file, const std::string& message) {
+ int32_t size = message.length();
+ ASSERT_EQ(1u, fwrite(&size, sizeof(size), 1, file));
+ if (size <= 0)
+ return;
+ ASSERT_EQ(static_cast<size_t>(size),
+ fwrite(message.data(), sizeof(char), size, file));
+}
+
+void ReadMessage(FILE* file, std::string* message) {
+ int32_t size;
+ ASSERT_EQ(1u, fread(&size, sizeof(size), 1, file));
+ if (size <= 0)
+ return;
+ rtc::scoped_ptr<char[]> buffer(new char[size]);
+ ASSERT_EQ(static_cast<size_t>(size),
+ fread(buffer.get(), sizeof(char), size, file));
+ message->assign(buffer.get(), size);
+}
+#endif // WEBRTC_NETEQ_UNITTEST_BITEXACT
+
+} // namespace
+
+namespace webrtc {
+
class RefFiles {
public:
RefFiles(const std::string& input_file, const std::string& output_file);
@@ -128,92 +190,84 @@ void RefFiles::ReadFromFileAndCompare(const T (&test_results)[n],
}
}
-void RefFiles::WriteToFile(const NetEqNetworkStatistics& stats) {
- if (output_fp_) {
- ASSERT_EQ(1u, fwrite(&stats, sizeof(NetEqNetworkStatistics), 1,
- output_fp_));
- }
+void RefFiles::WriteToFile(const NetEqNetworkStatistics& stats_raw) {
+#ifdef WEBRTC_NETEQ_UNITTEST_BITEXACT
+ if (!output_fp_)
+ return;
+ neteq_unittest::NetEqNetworkStatistics stats;
+ Convert(stats_raw, &stats);
+
+ std::string stats_string;
+ ASSERT_TRUE(stats.SerializeToString(&stats_string));
+ WriteMessage(output_fp_, stats_string);
+#else
+ FAIL() << "Writing to reference file requires Proto Buffer.";
+#endif // WEBRTC_NETEQ_UNITTEST_BITEXACT
}
void RefFiles::ReadFromFileAndCompare(
const NetEqNetworkStatistics& stats) {
- // TODO(minyue): Update resource/audio_coding/neteq_network_stats.dat and
- // resource/audio_coding/neteq_network_stats_win32.dat.
- struct NetEqNetworkStatisticsOld {
- uint16_t current_buffer_size_ms; // Current jitter buffer size in ms.
- uint16_t preferred_buffer_size_ms; // Target buffer size in ms.
- uint16_t jitter_peaks_found; // 1 if adding extra delay due to peaky
- // jitter; 0 otherwise.
- uint16_t packet_loss_rate; // Loss rate (network + late) in Q14.
- uint16_t packet_discard_rate; // Late loss rate in Q14.
- uint16_t expand_rate; // Fraction (of original stream) of synthesized
- // audio inserted through expansion (in Q14).
- uint16_t preemptive_rate; // Fraction of data inserted through pre-emptive
- // expansion (in Q14).
- uint16_t accelerate_rate; // Fraction of data removed through acceleration
- // (in Q14).
- int32_t clockdrift_ppm; // Average clock-drift in parts-per-million
- // (positive or negative).
- int added_zero_samples; // Number of zero samples added in "off" mode.
- };
- if (input_fp_) {
- // Read from ref file.
- size_t stat_size = sizeof(NetEqNetworkStatisticsOld);
- NetEqNetworkStatisticsOld ref_stats;
- ASSERT_EQ(1u, fread(&ref_stats, stat_size, 1, input_fp_));
- // Compare
- ASSERT_EQ(stats.current_buffer_size_ms, ref_stats.current_buffer_size_ms);
- ASSERT_EQ(stats.preferred_buffer_size_ms,
- ref_stats.preferred_buffer_size_ms);
- ASSERT_EQ(stats.jitter_peaks_found, ref_stats.jitter_peaks_found);
- ASSERT_EQ(stats.packet_loss_rate, ref_stats.packet_loss_rate);
- ASSERT_EQ(stats.packet_discard_rate, ref_stats.packet_discard_rate);
- ASSERT_EQ(stats.expand_rate, ref_stats.expand_rate);
- ASSERT_EQ(stats.preemptive_rate, ref_stats.preemptive_rate);
- ASSERT_EQ(stats.accelerate_rate, ref_stats.accelerate_rate);
- ASSERT_EQ(stats.clockdrift_ppm, ref_stats.clockdrift_ppm);
- ASSERT_EQ(stats.added_zero_samples,
- static_cast<size_t>(ref_stats.added_zero_samples));
- ASSERT_EQ(stats.secondary_decoded_rate, 0);
- ASSERT_LE(stats.speech_expand_rate, ref_stats.expand_rate);
- }
+#ifdef WEBRTC_NETEQ_UNITTEST_BITEXACT
+ if (!input_fp_)
+ return;
+
+ std::string stats_string;
+ ReadMessage(input_fp_, &stats_string);
+ neteq_unittest::NetEqNetworkStatistics ref_stats;
+ ASSERT_TRUE(ref_stats.ParseFromString(stats_string));
+
+ // Compare
+ ASSERT_EQ(stats.current_buffer_size_ms, ref_stats.current_buffer_size_ms());
+ ASSERT_EQ(stats.preferred_buffer_size_ms,
+ ref_stats.preferred_buffer_size_ms());
+ ASSERT_EQ(stats.jitter_peaks_found, ref_stats.jitter_peaks_found());
+ ASSERT_EQ(stats.packet_loss_rate, ref_stats.packet_loss_rate());
+ ASSERT_EQ(stats.packet_discard_rate, ref_stats.packet_discard_rate());
+ ASSERT_EQ(stats.expand_rate, ref_stats.expand_rate());
+ ASSERT_EQ(stats.preemptive_rate, ref_stats.preemptive_rate());
+ ASSERT_EQ(stats.accelerate_rate, ref_stats.accelerate_rate());
+ ASSERT_EQ(stats.clockdrift_ppm, ref_stats.clockdrift_ppm());
+ ASSERT_EQ(stats.added_zero_samples, ref_stats.added_zero_samples());
+ ASSERT_EQ(stats.secondary_decoded_rate, ref_stats.secondary_decoded_rate());
+ ASSERT_LE(stats.speech_expand_rate, ref_stats.expand_rate());
+#else
+ FAIL() << "Reading from reference file requires Proto Buffer.";
+#endif // WEBRTC_NETEQ_UNITTEST_BITEXACT
}
-void RefFiles::WriteToFile(const RtcpStatistics& stats) {
- if (output_fp_) {
- ASSERT_EQ(1u, fwrite(&(stats.fraction_lost), sizeof(stats.fraction_lost), 1,
- output_fp_));
- ASSERT_EQ(1u, fwrite(&(stats.cumulative_lost),
- sizeof(stats.cumulative_lost), 1, output_fp_));
- ASSERT_EQ(1u, fwrite(&(stats.extended_max_sequence_number),
- sizeof(stats.extended_max_sequence_number), 1,
- output_fp_));
- ASSERT_EQ(1u, fwrite(&(stats.jitter), sizeof(stats.jitter), 1,
- output_fp_));
- }
+void RefFiles::WriteToFile(const RtcpStatistics& stats_raw) {
+#ifdef WEBRTC_NETEQ_UNITTEST_BITEXACT
+ if (!output_fp_)
+ return;
+ neteq_unittest::RtcpStatistics stats;
+ Convert(stats_raw, &stats);
+
+ std::string stats_string;
+ ASSERT_TRUE(stats.SerializeToString(&stats_string));
+ WriteMessage(output_fp_, stats_string);
+#else
+ FAIL() << "Writing to reference file requires Proto Buffer.";
+#endif // WEBRTC_NETEQ_UNITTEST_BITEXACT
}
-void RefFiles::ReadFromFileAndCompare(
- const RtcpStatistics& stats) {
- if (input_fp_) {
- // Read from ref file.
- RtcpStatistics ref_stats;
- ASSERT_EQ(1u, fread(&(ref_stats.fraction_lost),
- sizeof(ref_stats.fraction_lost), 1, input_fp_));
- ASSERT_EQ(1u, fread(&(ref_stats.cumulative_lost),
- sizeof(ref_stats.cumulative_lost), 1, input_fp_));
- ASSERT_EQ(1u, fread(&(ref_stats.extended_max_sequence_number),
- sizeof(ref_stats.extended_max_sequence_number), 1,
- input_fp_));
- ASSERT_EQ(1u, fread(&(ref_stats.jitter), sizeof(ref_stats.jitter), 1,
- input_fp_));
- // Compare
- ASSERT_EQ(ref_stats.fraction_lost, stats.fraction_lost);
- ASSERT_EQ(ref_stats.cumulative_lost, stats.cumulative_lost);
- ASSERT_EQ(ref_stats.extended_max_sequence_number,
- stats.extended_max_sequence_number);
- ASSERT_EQ(ref_stats.jitter, stats.jitter);
- }
+void RefFiles::ReadFromFileAndCompare(const RtcpStatistics& stats) {
+#ifdef WEBRTC_NETEQ_UNITTEST_BITEXACT
+ if (!input_fp_)
+ return;
+ std::string stats_string;
+ ReadMessage(input_fp_, &stats_string);
+ neteq_unittest::RtcpStatistics ref_stats;
+ ASSERT_TRUE(ref_stats.ParseFromString(stats_string));
+
+ // Compare
+ ASSERT_EQ(stats.fraction_lost, ref_stats.fraction_lost());
+ ASSERT_EQ(stats.cumulative_lost, ref_stats.cumulative_lost());
+ ASSERT_EQ(stats.extended_max_sequence_number,
+ ref_stats.extended_max_sequence_number());
+ ASSERT_EQ(stats.jitter, ref_stats.jitter());
+#else
+ FAIL() << "Reading from reference file requires Proto Buffer.";
+#endif // WEBRTC_NETEQ_UNITTEST_BITEXACT
}
class NetEqDecodingTest : public ::testing::Test {
@@ -224,7 +278,8 @@ class NetEqDecodingTest : public ::testing::Test {
static const size_t kBlockSize8kHz = kTimeStepMs * 8;
static const size_t kBlockSize16kHz = kTimeStepMs * 16;
static const size_t kBlockSize32kHz = kTimeStepMs * 32;
- static const size_t kMaxBlockSize = kBlockSize32kHz;
+ static const size_t kBlockSize48kHz = kTimeStepMs * 48;
+ static const size_t kMaxBlockSize = kBlockSize48kHz;
static const int kInitSampleRateHz = 8000;
NetEqDecodingTest();
@@ -234,10 +289,12 @@ class NetEqDecodingTest : public ::testing::Test {
void LoadDecoders();
void OpenInputFile(const std::string &rtp_file);
void Process(size_t* out_len);
+
void DecodeAndCompare(const std::string& rtp_file,
const std::string& ref_file,
const std::string& stat_ref_file,
const std::string& rtcp_ref_file);
+
static void PopulateRtpInfo(int frame_index,
int timestamp,
WebRtcRTPHeader* rtp_info);
@@ -304,32 +361,45 @@ void NetEqDecodingTest::TearDown() {
void NetEqDecodingTest::LoadDecoders() {
// Load PCMu.
- ASSERT_EQ(0, neteq_->RegisterPayloadType(NetEqDecoder::kDecoderPCMu, 0));
+ ASSERT_EQ(0,
+ neteq_->RegisterPayloadType(NetEqDecoder::kDecoderPCMu, "pcmu", 0));
// Load PCMa.
- ASSERT_EQ(0, neteq_->RegisterPayloadType(NetEqDecoder::kDecoderPCMa, 8));
+ ASSERT_EQ(0,
+ neteq_->RegisterPayloadType(NetEqDecoder::kDecoderPCMa, "pcma", 8));
#ifdef WEBRTC_CODEC_ILBC
// Load iLBC.
- ASSERT_EQ(0, neteq_->RegisterPayloadType(NetEqDecoder::kDecoderILBC, 102));
+ ASSERT_EQ(
+ 0, neteq_->RegisterPayloadType(NetEqDecoder::kDecoderILBC, "ilbc", 102));
#endif
#if defined(WEBRTC_CODEC_ISAC) || defined(WEBRTC_CODEC_ISACFX)
// Load iSAC.
- ASSERT_EQ(0, neteq_->RegisterPayloadType(NetEqDecoder::kDecoderISAC, 103));
+ ASSERT_EQ(
+ 0, neteq_->RegisterPayloadType(NetEqDecoder::kDecoderISAC, "isac", 103));
#endif
#ifdef WEBRTC_CODEC_ISAC
// Load iSAC SWB.
- ASSERT_EQ(0, neteq_->RegisterPayloadType(NetEqDecoder::kDecoderISACswb, 104));
+ ASSERT_EQ(0, neteq_->RegisterPayloadType(NetEqDecoder::kDecoderISACswb,
+ "isac-swb", 104));
+#endif
+#ifdef WEBRTC_CODEC_OPUS
+ ASSERT_EQ(0, neteq_->RegisterPayloadType(NetEqDecoder::kDecoderOpus,
+ "opus", 111));
#endif
// Load PCM16B nb.
- ASSERT_EQ(0, neteq_->RegisterPayloadType(NetEqDecoder::kDecoderPCM16B, 93));
+ ASSERT_EQ(0, neteq_->RegisterPayloadType(NetEqDecoder::kDecoderPCM16B,
+ "pcm16-nb", 93));
// Load PCM16B wb.
- ASSERT_EQ(0, neteq_->RegisterPayloadType(NetEqDecoder::kDecoderPCM16Bwb, 94));
+ ASSERT_EQ(0, neteq_->RegisterPayloadType(NetEqDecoder::kDecoderPCM16Bwb,
+ "pcm16-wb", 94));
// Load PCM16B swb32.
- ASSERT_EQ(
- 0, neteq_->RegisterPayloadType(NetEqDecoder::kDecoderPCM16Bswb32kHz, 95));
+ ASSERT_EQ(0, neteq_->RegisterPayloadType(NetEqDecoder::kDecoderPCM16Bswb32kHz,
+ "pcm16-swb32", 95));
// Load CNG 8 kHz.
- ASSERT_EQ(0, neteq_->RegisterPayloadType(NetEqDecoder::kDecoderCNGnb, 13));
+ ASSERT_EQ(0, neteq_->RegisterPayloadType(NetEqDecoder::kDecoderCNGnb,
+ "cng-nb", 13));
// Load CNG 16 kHz.
- ASSERT_EQ(0, neteq_->RegisterPayloadType(NetEqDecoder::kDecoderCNGwb, 98));
+ ASSERT_EQ(0, neteq_->RegisterPayloadType(NetEqDecoder::kDecoderCNGwb,
+ "cng-wb", 98));
}
void NetEqDecodingTest::OpenInputFile(const std::string &rtp_file) {
@@ -343,10 +413,11 @@ void NetEqDecodingTest::Process(size_t* out_len) {
WebRtcRTPHeader rtp_header;
packet_->ConvertHeader(&rtp_header);
ASSERT_EQ(0, neteq_->InsertPacket(
- rtp_header, packet_->payload(),
- packet_->payload_length_bytes(),
- static_cast<uint32_t>(
- packet_->time_ms() * (output_sample_rate_ / 1000))));
+ rtp_header,
+ rtc::ArrayView<const uint8_t>(
+ packet_->payload(), packet_->payload_length_bytes()),
+ static_cast<uint32_t>(packet_->time_ms() *
+ (output_sample_rate_ / 1000))));
}
// Get next packet.
packet_.reset(rtp_source_->NextPacket());
@@ -354,13 +425,15 @@ void NetEqDecodingTest::Process(size_t* out_len) {
// Get audio from NetEq.
NetEqOutputType type;
- int num_channels;
+ size_t num_channels;
ASSERT_EQ(0, neteq_->GetAudio(kMaxBlockSize, out_data_, out_len,
&num_channels, &type));
ASSERT_TRUE((*out_len == kBlockSize8kHz) ||
(*out_len == kBlockSize16kHz) ||
- (*out_len == kBlockSize32kHz));
+ (*out_len == kBlockSize32kHz) ||
+ (*out_len == kBlockSize48kHz));
output_sample_rate_ = static_cast<int>(*out_len / 10 * 1000);
+ EXPECT_EQ(output_sample_rate_, neteq_->last_output_sample_rate_hz());
// Increase time.
sim_clock_ += kTimeStepMs;
@@ -442,17 +515,17 @@ void NetEqDecodingTest::PopulateCng(int frame_index,
*payload_len = 1; // Only noise level, no spectral parameters.
}
-#if (defined(WEBRTC_CODEC_ISAC) || defined(WEBRTC_CODEC_ISAC)) && \
+#if !defined(WEBRTC_IOS) && !defined(WEBRTC_ANDROID) && \
+ defined(WEBRTC_NETEQ_UNITTEST_BITEXACT) && \
+ (defined(WEBRTC_CODEC_ISAC) || defined(WEBRTC_CODEC_ISACFX)) && \
defined(WEBRTC_CODEC_ILBC) && defined(WEBRTC_CODEC_G722)
-#define IF_ALL_CODECS(x) x
+#define MAYBE_TestBitExactness TestBitExactness
#else
-#define IF_ALL_CODECS(x) DISABLED_##x
+#define MAYBE_TestBitExactness DISABLED_TestBitExactness
#endif
-
-TEST_F(NetEqDecodingTest,
- DISABLED_ON_IOS(DISABLED_ON_ANDROID(IF_ALL_CODECS(TestBitExactness)))) {
- const std::string input_rtp_file = webrtc::test::ProjectRootPath() +
- "resources/audio_coding/neteq_universal_new.rtp";
+TEST_F(NetEqDecodingTest, MAYBE_TestBitExactness) {
+ const std::string input_rtp_file =
+ webrtc::test::ResourcePath("audio_coding/neteq_universal_new", "rtp");
// Note that neteq4_universal_ref.pcm and neteq4_universal_ref_win_32.pcm
// are identical. The latter could have been removed, but if clients still
// have a copy of the file, the test will fail.
@@ -480,6 +553,34 @@ TEST_F(NetEqDecodingTest,
}
}
+#if !defined(WEBRTC_IOS) && !defined(WEBRTC_ANDROID) && \
+ defined(WEBRTC_NETEQ_UNITTEST_BITEXACT) && \
+ defined(WEBRTC_CODEC_OPUS)
+#define MAYBE_TestOpusBitExactness TestOpusBitExactness
+#else
+#define MAYBE_TestOpusBitExactness DISABLED_TestOpusBitExactness
+#endif
+TEST_F(NetEqDecodingTest, MAYBE_TestOpusBitExactness) {
+ const std::string input_rtp_file =
+ webrtc::test::ResourcePath("audio_coding/neteq_opus", "rtp");
+ const std::string input_ref_file =
+ webrtc::test::ResourcePath("audio_coding/neteq4_opus_ref", "pcm");
+ const std::string network_stat_ref_file =
+ webrtc::test::ResourcePath("audio_coding/neteq4_opus_network_stats",
+ "dat");
+ const std::string rtcp_stat_ref_file =
+ webrtc::test::ResourcePath("audio_coding/neteq4_opus_rtcp_stats", "dat");
+
+ if (FLAGS_gen_ref) {
+ DecodeAndCompare(input_rtp_file, "", "", "");
+ } else {
+ DecodeAndCompare(input_rtp_file,
+ input_ref_file,
+ network_stat_ref_file,
+ rtcp_stat_ref_file);
+ }
+}
+
// Use fax mode to avoid time-scaling. This is to simplify the testing of
// packet waiting times in the packet buffer.
class NetEqDecodingTestFaxMode : public NetEqDecodingTest {
@@ -495,22 +596,19 @@ TEST_F(NetEqDecodingTestFaxMode, TestFrameWaitingTimeStatistics) {
const size_t kSamples = 10 * 16;
const size_t kPayloadBytes = kSamples * 2;
for (size_t i = 0; i < num_frames; ++i) {
- uint16_t payload[kSamples] = {0};
+ const uint8_t payload[kPayloadBytes] = {0};
WebRtcRTPHeader rtp_info;
rtp_info.header.sequenceNumber = i;
rtp_info.header.timestamp = i * kSamples;
rtp_info.header.ssrc = 0x1234; // Just an arbitrary SSRC.
rtp_info.header.payloadType = 94; // PCM16b WB codec.
rtp_info.header.markerBit = 0;
- ASSERT_EQ(0, neteq_->InsertPacket(
- rtp_info,
- reinterpret_cast<uint8_t*>(payload),
- kPayloadBytes, 0));
+ ASSERT_EQ(0, neteq_->InsertPacket(rtp_info, payload, 0));
}
// Pull out all data.
for (size_t i = 0; i < num_frames; ++i) {
size_t out_len;
- int num_channels;
+ size_t num_channels;
NetEqOutputType type;
ASSERT_EQ(0, neteq_->GetAudio(kMaxBlockSize, out_data_, &out_len,
&num_channels, &type));
@@ -549,13 +647,13 @@ TEST_F(NetEqDecodingTest, TestAverageInterArrivalTimeNegative) {
uint8_t payload[kPayloadBytes] = {0};
WebRtcRTPHeader rtp_info;
PopulateRtpInfo(frame_index, frame_index * kSamples, &rtp_info);
- ASSERT_EQ(0, neteq_->InsertPacket(rtp_info, payload, kPayloadBytes, 0));
+ ASSERT_EQ(0, neteq_->InsertPacket(rtp_info, payload, 0));
++frame_index;
}
// Pull out data once.
size_t out_len;
- int num_channels;
+ size_t num_channels;
NetEqOutputType type;
ASSERT_EQ(0, neteq_->GetAudio(kMaxBlockSize, out_data_, &out_len,
&num_channels, &type));
@@ -580,13 +678,13 @@ TEST_F(NetEqDecodingTest, TestAverageInterArrivalTimePositive) {
uint8_t payload[kPayloadBytes] = {0};
WebRtcRTPHeader rtp_info;
PopulateRtpInfo(frame_index, frame_index * kSamples, &rtp_info);
- ASSERT_EQ(0, neteq_->InsertPacket(rtp_info, payload, kPayloadBytes, 0));
+ ASSERT_EQ(0, neteq_->InsertPacket(rtp_info, payload, 0));
++frame_index;
}
// Pull out data once.
size_t out_len;
- int num_channels;
+ size_t num_channels;
NetEqOutputType type;
ASSERT_EQ(0, neteq_->GetAudio(kMaxBlockSize, out_data_, &out_len,
&num_channels, &type));
@@ -611,7 +709,7 @@ void NetEqDecodingTest::LongCngWithClockDrift(double drift_factor,
double next_input_time_ms = 0.0;
double t_ms;
size_t out_len;
- int num_channels;
+ size_t num_channels;
NetEqOutputType type;
// Insert speech for 5 seconds.
@@ -623,7 +721,7 @@ void NetEqDecodingTest::LongCngWithClockDrift(double drift_factor,
uint8_t payload[kPayloadBytes] = {0};
WebRtcRTPHeader rtp_info;
PopulateRtpInfo(seq_no, timestamp, &rtp_info);
- ASSERT_EQ(0, neteq_->InsertPacket(rtp_info, payload, kPayloadBytes, 0));
+ ASSERT_EQ(0, neteq_->InsertPacket(rtp_info, payload, 0));
++seq_no;
timestamp += kSamples;
next_input_time_ms += static_cast<double>(kFrameSizeMs) * drift_factor;
@@ -649,7 +747,9 @@ void NetEqDecodingTest::LongCngWithClockDrift(double drift_factor,
size_t payload_len;
WebRtcRTPHeader rtp_info;
PopulateCng(seq_no, timestamp, &rtp_info, payload, &payload_len);
- ASSERT_EQ(0, neteq_->InsertPacket(rtp_info, payload, payload_len, 0));
+ ASSERT_EQ(0, neteq_->InsertPacket(
+ rtp_info,
+ rtc::ArrayView<const uint8_t>(payload, payload_len), 0));
++seq_no;
timestamp += kCngPeriodSamples;
next_input_time_ms += static_cast<double>(kCngPeriodMs) * drift_factor;
@@ -696,7 +796,9 @@ void NetEqDecodingTest::LongCngWithClockDrift(double drift_factor,
size_t payload_len;
WebRtcRTPHeader rtp_info;
PopulateCng(seq_no, timestamp, &rtp_info, payload, &payload_len);
- ASSERT_EQ(0, neteq_->InsertPacket(rtp_info, payload, payload_len, 0));
+ ASSERT_EQ(0, neteq_->InsertPacket(
+ rtp_info,
+ rtc::ArrayView<const uint8_t>(payload, payload_len), 0));
++seq_no;
timestamp += kCngPeriodSamples;
next_input_time_ms += kCngPeriodMs * drift_factor;
@@ -712,7 +814,7 @@ void NetEqDecodingTest::LongCngWithClockDrift(double drift_factor,
uint8_t payload[kPayloadBytes] = {0};
WebRtcRTPHeader rtp_info;
PopulateRtpInfo(seq_no, timestamp, &rtp_info);
- ASSERT_EQ(0, neteq_->InsertPacket(rtp_info, payload, kPayloadBytes, 0));
+ ASSERT_EQ(0, neteq_->InsertPacket(rtp_info, payload, 0));
++seq_no;
timestamp += kSamples;
next_input_time_ms += kFrameSizeMs * drift_factor;
@@ -823,31 +925,30 @@ TEST_F(NetEqDecodingTest, UnknownPayloadType) {
WebRtcRTPHeader rtp_info;
PopulateRtpInfo(0, 0, &rtp_info);
rtp_info.header.payloadType = 1; // Not registered as a decoder.
- EXPECT_EQ(NetEq::kFail,
- neteq_->InsertPacket(rtp_info, payload, kPayloadBytes, 0));
+ EXPECT_EQ(NetEq::kFail, neteq_->InsertPacket(rtp_info, payload, 0));
EXPECT_EQ(NetEq::kUnknownRtpPayloadType, neteq_->LastError());
}
-#if defined(WEBRTC_CODEC_ISAC) || defined(WEBRTC_CODEC_ISACFX)
-#define IF_ISAC(x) x
+#if defined(WEBRTC_ANDROID)
+#define MAYBE_DecoderError DISABLED_DecoderError
#else
-#define IF_ISAC(x) DISABLED_##x
+#define MAYBE_DecoderError DecoderError
#endif
-
-TEST_F(NetEqDecodingTest, DISABLED_ON_ANDROID(IF_ISAC(DecoderError))) {
+#if defined(WEBRTC_CODEC_ISAC) || defined(WEBRTC_CODEC_ISACFX)
+TEST_F(NetEqDecodingTest, MAYBE_DecoderError) {
const size_t kPayloadBytes = 100;
uint8_t payload[kPayloadBytes] = {0};
WebRtcRTPHeader rtp_info;
PopulateRtpInfo(0, 0, &rtp_info);
rtp_info.header.payloadType = 103; // iSAC, but the payload is invalid.
- EXPECT_EQ(0, neteq_->InsertPacket(rtp_info, payload, kPayloadBytes, 0));
+ EXPECT_EQ(0, neteq_->InsertPacket(rtp_info, payload, 0));
NetEqOutputType type;
// Set all of |out_data_| to 1, and verify that it was set to 0 by the call
// to GetAudio.
for (size_t i = 0; i < kMaxBlockSize; ++i) {
out_data_[i] = 1;
}
- int num_channels;
+ size_t num_channels;
size_t samples_per_channel;
EXPECT_EQ(NetEq::kFail,
neteq_->GetAudio(kMaxBlockSize, out_data_,
@@ -872,6 +973,7 @@ TEST_F(NetEqDecodingTest, DISABLED_ON_ANDROID(IF_ISAC(DecoderError))) {
EXPECT_EQ(1, out_data_[i]);
}
}
+#endif
TEST_F(NetEqDecodingTest, GetAudioBeforeInsertPacket) {
NetEqOutputType type;
@@ -880,7 +982,7 @@ TEST_F(NetEqDecodingTest, GetAudioBeforeInsertPacket) {
for (size_t i = 0; i < kMaxBlockSize; ++i) {
out_data_[i] = 1;
}
- int num_channels;
+ size_t num_channels;
size_t samples_per_channel;
EXPECT_EQ(0, neteq_->GetAudio(kMaxBlockSize, out_data_,
&samples_per_channel,
@@ -894,6 +996,8 @@ TEST_F(NetEqDecodingTest, GetAudioBeforeInsertPacket) {
SCOPED_TRACE(ss.str()); // Print out the parameter values on failure.
EXPECT_EQ(0, out_data_[i]);
}
+ // Verify that the sample rate did not change from the initial configuration.
+ EXPECT_EQ(config_.sample_rate_hz, neteq_->last_output_sample_rate_hz());
}
class NetEqBgnTest : public NetEqDecodingTest {
@@ -934,27 +1038,29 @@ class NetEqBgnTest : public NetEqDecodingTest {
PopulateRtpInfo(0, 0, &rtp_info);
rtp_info.header.payloadType = payload_type;
- int number_channels = 0;
+ size_t number_channels = 0;
size_t samples_per_channel = 0;
uint32_t receive_timestamp = 0;
for (int n = 0; n < 10; ++n) { // Insert few packets and get audio.
- size_t enc_len_bytes = WebRtcPcm16b_Encode(
- input.GetNextBlock(), expected_samples_per_channel, payload);
+ auto block = input.GetNextBlock();
+ ASSERT_EQ(expected_samples_per_channel, block.size());
+ size_t enc_len_bytes =
+ WebRtcPcm16b_Encode(block.data(), block.size(), payload);
ASSERT_EQ(enc_len_bytes, expected_samples_per_channel * 2);
number_channels = 0;
samples_per_channel = 0;
- ASSERT_EQ(0,
- neteq_->InsertPacket(rtp_info, payload, enc_len_bytes,
- receive_timestamp));
+ ASSERT_EQ(0, neteq_->InsertPacket(rtp_info, rtc::ArrayView<const uint8_t>(
+ payload, enc_len_bytes),
+ receive_timestamp));
ASSERT_EQ(0,
neteq_->GetAudio(kBlockSize32kHz,
output,
&samples_per_channel,
&number_channels,
&type));
- ASSERT_EQ(1, number_channels);
+ ASSERT_EQ(1u, number_channels);
ASSERT_EQ(expected_samples_per_channel, samples_per_channel);
ASSERT_EQ(kOutputNormal, type);
@@ -976,7 +1082,7 @@ class NetEqBgnTest : public NetEqDecodingTest {
&samples_per_channel,
&number_channels,
&type));
- ASSERT_EQ(1, number_channels);
+ ASSERT_EQ(1u, number_channels);
ASSERT_EQ(expected_samples_per_channel, samples_per_channel);
// To be able to test the fading of background noise we need at lease to
@@ -997,7 +1103,7 @@ class NetEqBgnTest : public NetEqDecodingTest {
&samples_per_channel,
&number_channels,
&type));
- ASSERT_EQ(1, number_channels);
+ ASSERT_EQ(1u, number_channels);
ASSERT_EQ(expected_samples_per_channel, samples_per_channel);
if (type == kOutputPLCtoCNG) {
plc_to_cng = true;
@@ -1065,7 +1171,8 @@ TEST_F(NetEqBgnTestFade, RunTest) {
CheckBgn(32000);
}
-TEST_F(NetEqDecodingTest, IF_ISAC(SyncPacketInsert)) {
+#if defined(WEBRTC_CODEC_ISAC) || defined(WEBRTC_CODEC_ISACFX)
+TEST_F(NetEqDecodingTest, SyncPacketInsert) {
WebRtcRTPHeader rtp_info;
uint32_t receive_timestamp = 0;
// For the readability use the following payloads instead of the defaults of
@@ -1081,20 +1188,20 @@ TEST_F(NetEqDecodingTest, IF_ISAC(SyncPacketInsert)) {
// Register decoders.
ASSERT_EQ(0, neteq_->RegisterPayloadType(NetEqDecoder::kDecoderPCM16Bwb,
- kPcm16WbPayloadType));
+ "pcm16-wb", kPcm16WbPayloadType));
ASSERT_EQ(0, neteq_->RegisterPayloadType(NetEqDecoder::kDecoderCNGnb,
- kCngNbPayloadType));
+ "cng-nb", kCngNbPayloadType));
ASSERT_EQ(0, neteq_->RegisterPayloadType(NetEqDecoder::kDecoderCNGwb,
- kCngWbPayloadType));
+ "cng-wb", kCngWbPayloadType));
ASSERT_EQ(0, neteq_->RegisterPayloadType(NetEqDecoder::kDecoderCNGswb32kHz,
- kCngSwb32PayloadType));
+ "cng-swb32", kCngSwb32PayloadType));
ASSERT_EQ(0, neteq_->RegisterPayloadType(NetEqDecoder::kDecoderCNGswb48kHz,
- kCngSwb48PayloadType));
- ASSERT_EQ(0, neteq_->RegisterPayloadType(NetEqDecoder::kDecoderAVT,
+ "cng-swb48", kCngSwb48PayloadType));
+ ASSERT_EQ(0, neteq_->RegisterPayloadType(NetEqDecoder::kDecoderAVT, "avt",
kAvtPayloadType));
- ASSERT_EQ(0, neteq_->RegisterPayloadType(NetEqDecoder::kDecoderRED,
+ ASSERT_EQ(0, neteq_->RegisterPayloadType(NetEqDecoder::kDecoderRED, "red",
kRedPayloadType));
- ASSERT_EQ(0, neteq_->RegisterPayloadType(NetEqDecoder::kDecoderISAC,
+ ASSERT_EQ(0, neteq_->RegisterPayloadType(NetEqDecoder::kDecoderISAC, "isac",
kIsacPayloadType));
PopulateRtpInfo(0, 0, &rtp_info);
@@ -1106,8 +1213,7 @@ TEST_F(NetEqDecodingTest, IF_ISAC(SyncPacketInsert)) {
// Payload length of 10 ms PCM16 16 kHz.
const size_t kPayloadBytes = kBlockSize16kHz * sizeof(int16_t);
uint8_t payload[kPayloadBytes] = {0};
- ASSERT_EQ(0, neteq_->InsertPacket(
- rtp_info, payload, kPayloadBytes, receive_timestamp));
+ ASSERT_EQ(0, neteq_->InsertPacket(rtp_info, payload, receive_timestamp));
// Next packet. Last packet contained 10 ms audio.
rtp_info.header.sequenceNumber++;
@@ -1145,6 +1251,7 @@ TEST_F(NetEqDecodingTest, IF_ISAC(SyncPacketInsert)) {
--rtp_info.header.ssrc;
EXPECT_EQ(0, neteq_->InsertSyncPacket(rtp_info, receive_timestamp));
}
+#endif
// First insert several noise like packets, then sync-packets. Decoding all
// packets should not produce error, statistics should not show any packet loss
@@ -1165,17 +1272,16 @@ TEST_F(NetEqDecodingTest, SyncPacketDecode) {
// Insert some packets which decode to noise. We are not interested in
// actual decoded values.
NetEqOutputType output_type;
- int num_channels;
+ size_t num_channels;
size_t samples_per_channel;
uint32_t receive_timestamp = 0;
for (int n = 0; n < 100; ++n) {
- ASSERT_EQ(0, neteq_->InsertPacket(rtp_info, payload, kPayloadBytes,
- receive_timestamp));
+ ASSERT_EQ(0, neteq_->InsertPacket(rtp_info, payload, receive_timestamp));
ASSERT_EQ(0, neteq_->GetAudio(kBlockSize16kHz, decoded,
&samples_per_channel, &num_channels,
&output_type));
ASSERT_EQ(kBlockSize16kHz, samples_per_channel);
- ASSERT_EQ(1, num_channels);
+ ASSERT_EQ(1u, num_channels);
rtp_info.header.sequenceNumber++;
rtp_info.header.timestamp += kBlockSize16kHz;
@@ -1193,7 +1299,7 @@ TEST_F(NetEqDecodingTest, SyncPacketDecode) {
&samples_per_channel, &num_channels,
&output_type));
ASSERT_EQ(kBlockSize16kHz, samples_per_channel);
- ASSERT_EQ(1, num_channels);
+ ASSERT_EQ(1u, num_channels);
if (n > algorithmic_frame_delay) {
EXPECT_TRUE(IsAllZero(decoded, samples_per_channel * num_channels));
}
@@ -1205,8 +1311,7 @@ TEST_F(NetEqDecodingTest, SyncPacketDecode) {
// We insert regular packets, if sync packet are not correctly buffered then
// network statistics would show some packet loss.
for (int n = 0; n <= algorithmic_frame_delay + 10; ++n) {
- ASSERT_EQ(0, neteq_->InsertPacket(rtp_info, payload, kPayloadBytes,
- receive_timestamp));
+ ASSERT_EQ(0, neteq_->InsertPacket(rtp_info, payload, receive_timestamp));
ASSERT_EQ(0, neteq_->GetAudio(kBlockSize16kHz, decoded,
&samples_per_channel, &num_channels,
&output_type));
@@ -1243,18 +1348,17 @@ TEST_F(NetEqDecodingTest, SyncPacketBufferSizeAndOverridenByNetworkPackets) {
// Insert some packets which decode to noise. We are not interested in
// actual decoded values.
NetEqOutputType output_type;
- int num_channels;
+ size_t num_channels;
size_t samples_per_channel;
uint32_t receive_timestamp = 0;
int algorithmic_frame_delay = algorithmic_delay_ms_ / 10 + 1;
for (int n = 0; n < algorithmic_frame_delay; ++n) {
- ASSERT_EQ(0, neteq_->InsertPacket(rtp_info, payload, kPayloadBytes,
- receive_timestamp));
+ ASSERT_EQ(0, neteq_->InsertPacket(rtp_info, payload, receive_timestamp));
ASSERT_EQ(0, neteq_->GetAudio(kBlockSize16kHz, decoded,
&samples_per_channel, &num_channels,
&output_type));
ASSERT_EQ(kBlockSize16kHz, samples_per_channel);
- ASSERT_EQ(1, num_channels);
+ ASSERT_EQ(1u, num_channels);
rtp_info.header.sequenceNumber++;
rtp_info.header.timestamp += kBlockSize16kHz;
receive_timestamp += kBlockSize16kHz;
@@ -1281,8 +1385,7 @@ TEST_F(NetEqDecodingTest, SyncPacketBufferSizeAndOverridenByNetworkPackets) {
// Insert.
for (int n = 0; n < kNumSyncPackets; ++n) {
- ASSERT_EQ(0, neteq_->InsertPacket(rtp_info, payload, kPayloadBytes,
- receive_timestamp));
+ ASSERT_EQ(0, neteq_->InsertPacket(rtp_info, payload, receive_timestamp));
rtp_info.header.sequenceNumber++;
rtp_info.header.timestamp += kBlockSize16kHz;
receive_timestamp += kBlockSize16kHz;
@@ -1294,7 +1397,7 @@ TEST_F(NetEqDecodingTest, SyncPacketBufferSizeAndOverridenByNetworkPackets) {
&samples_per_channel, &num_channels,
&output_type));
ASSERT_EQ(kBlockSize16kHz, samples_per_channel);
- ASSERT_EQ(1, num_channels);
+ ASSERT_EQ(1u, num_channels);
EXPECT_TRUE(IsAllNonZero(decoded, samples_per_channel * num_channels));
}
}
@@ -1312,7 +1415,7 @@ void NetEqDecodingTest::WrapTest(uint16_t start_seq_no,
const size_t kPayloadBytes = kSamples * sizeof(int16_t);
double next_input_time_ms = 0.0;
int16_t decoded[kBlockSize16kHz];
- int num_channels;
+ size_t num_channels;
size_t samples_per_channel;
NetEqOutputType output_type;
uint32_t receive_timestamp = 0;
@@ -1334,8 +1437,7 @@ void NetEqDecodingTest::WrapTest(uint16_t start_seq_no,
if (drop_seq_numbers.find(seq_no) == drop_seq_numbers.end()) {
// This sequence number was not in the set to drop. Insert it.
ASSERT_EQ(0,
- neteq_->InsertPacket(rtp_info, payload, kPayloadBytes,
- receive_timestamp));
+ neteq_->InsertPacket(rtp_info, payload, receive_timestamp));
++packets_inserted;
}
NetEqNetworkStatistics network_stats;
@@ -1366,7 +1468,7 @@ void NetEqDecodingTest::WrapTest(uint16_t start_seq_no,
&samples_per_channel, &num_channels,
&output_type));
ASSERT_EQ(kBlockSize16kHz, samples_per_channel);
- ASSERT_EQ(1, num_channels);
+ ASSERT_EQ(1u, num_channels);
// Expect delay (in samples) to be less than 2 packets.
EXPECT_LE(timestamp - PlayoutTimestamp(),
@@ -1417,13 +1519,13 @@ void NetEqDecodingTest::DuplicateCng() {
// Insert three speech packets. Three are needed to get the frame length
// correct.
size_t out_len;
- int num_channels;
+ size_t num_channels;
NetEqOutputType type;
uint8_t payload[kPayloadBytes] = {0};
WebRtcRTPHeader rtp_info;
for (int i = 0; i < 3; ++i) {
PopulateRtpInfo(seq_no, timestamp, &rtp_info);
- ASSERT_EQ(0, neteq_->InsertPacket(rtp_info, payload, kPayloadBytes, 0));
+ ASSERT_EQ(0, neteq_->InsertPacket(rtp_info, payload, 0));
++seq_no;
timestamp += kSamples;
@@ -1442,7 +1544,9 @@ void NetEqDecodingTest::DuplicateCng() {
size_t payload_len;
PopulateCng(seq_no, timestamp, &rtp_info, payload, &payload_len);
// This is the first time this CNG packet is inserted.
- ASSERT_EQ(0, neteq_->InsertPacket(rtp_info, payload, payload_len, 0));
+ ASSERT_EQ(
+ 0, neteq_->InsertPacket(
+ rtp_info, rtc::ArrayView<const uint8_t>(payload, payload_len), 0));
// Pull audio once and make sure CNG is played.
ASSERT_EQ(0,
@@ -1454,7 +1558,9 @@ void NetEqDecodingTest::DuplicateCng() {
// Insert the same CNG packet again. Note that at this point it is old, since
// we have already decoded the first copy of it.
- ASSERT_EQ(0, neteq_->InsertPacket(rtp_info, payload, payload_len, 0));
+ ASSERT_EQ(
+ 0, neteq_->InsertPacket(
+ rtp_info, rtc::ArrayView<const uint8_t>(payload, payload_len), 0));
// Pull audio until we have played |kCngPeriodMs| of CNG. Start at 10 ms since
// we have already pulled out CNG once.
@@ -1472,7 +1578,7 @@ void NetEqDecodingTest::DuplicateCng() {
++seq_no;
timestamp += kCngPeriodSamples;
PopulateRtpInfo(seq_no, timestamp, &rtp_info);
- ASSERT_EQ(0, neteq_->InsertPacket(rtp_info, payload, kPayloadBytes, 0));
+ ASSERT_EQ(0, neteq_->InsertPacket(rtp_info, payload, 0));
// Pull audio once and verify that the output is speech again.
ASSERT_EQ(0,
@@ -1507,14 +1613,16 @@ TEST_F(NetEqDecodingTest, CngFirst) {
WebRtcRTPHeader rtp_info;
PopulateCng(seq_no, timestamp, &rtp_info, payload, &payload_len);
- ASSERT_EQ(NetEq::kOK,
- neteq_->InsertPacket(rtp_info, payload, payload_len, 0));
+ ASSERT_EQ(
+ NetEq::kOK,
+ neteq_->InsertPacket(
+ rtp_info, rtc::ArrayView<const uint8_t>(payload, payload_len), 0));
++seq_no;
timestamp += kCngPeriodSamples;
// Pull audio once and make sure CNG is played.
size_t out_len;
- int num_channels;
+ size_t num_channels;
NetEqOutputType type;
ASSERT_EQ(0, neteq_->GetAudio(kMaxBlockSize, out_data_, &out_len,
&num_channels, &type));
@@ -1524,7 +1632,7 @@ TEST_F(NetEqDecodingTest, CngFirst) {
// Insert some speech packets.
for (int i = 0; i < 3; ++i) {
PopulateRtpInfo(seq_no, timestamp, &rtp_info);
- ASSERT_EQ(0, neteq_->InsertPacket(rtp_info, payload, kPayloadBytes, 0));
+ ASSERT_EQ(0, neteq_->InsertPacket(rtp_info, payload, 0));
++seq_no;
timestamp += kSamples;
diff --git a/webrtc/modules/audio_coding/neteq/neteq_unittest.proto b/webrtc/modules/audio_coding/neteq/neteq_unittest.proto
new file mode 100644
index 0000000000..4b59848eb2
--- /dev/null
+++ b/webrtc/modules/audio_coding/neteq/neteq_unittest.proto
@@ -0,0 +1,29 @@
+syntax = "proto2";
+option optimize_for = LITE_RUNTIME;
+package webrtc.neteq_unittest;
+
+message NetEqNetworkStatistics {
+ optional uint32 current_buffer_size_ms = 1;
+ optional uint32 preferred_buffer_size_ms = 2;
+ optional uint32 jitter_peaks_found = 3;
+ optional uint32 packet_loss_rate = 4;
+ optional uint32 packet_discard_rate = 5;
+ optional uint32 expand_rate = 6;
+ optional uint32 speech_expand_rate = 7;
+ optional uint32 preemptive_rate = 8;
+ optional uint32 accelerate_rate = 9;
+ optional uint32 secondary_decoded_rate = 10;
+ optional int32 clockdrift_ppm = 11;
+ optional uint64 added_zero_samples = 12;
+ optional int32 mean_waiting_time_ms = 13;
+ optional int32 median_waiting_time_ms = 14;
+ optional int32 min_waiting_time_ms = 15;
+ optional int32 max_waiting_time_ms = 16;
+}
+
+message RtcpStatistics {
+ optional uint32 fraction_lost = 1;
+ optional uint32 cumulative_lost = 2;
+ optional uint32 extended_max_sequence_number = 3;
+ optional uint32 jitter = 4;
+} \ No newline at end of file
diff --git a/webrtc/modules/audio_coding/neteq/normal.cc b/webrtc/modules/audio_coding/neteq/normal.cc
index ebecbf94bd..1b888f70d1 100644
--- a/webrtc/modules/audio_coding/neteq/normal.cc
+++ b/webrtc/modules/audio_coding/neteq/normal.cc
@@ -16,7 +16,7 @@
#include "webrtc/common_audio/signal_processing/include/signal_processing_library.h"
#include "webrtc/modules/audio_coding/codecs/audio_decoder.h"
-#include "webrtc/modules/audio_coding/codecs/cng/include/webrtc_cng.h"
+#include "webrtc/modules/audio_coding/codecs/cng/webrtc_cng.h"
#include "webrtc/modules/audio_coding/neteq/audio_multi_vector.h"
#include "webrtc/modules/audio_coding/neteq/background_noise.h"
#include "webrtc/modules/audio_coding/neteq/decoder_database.h"
diff --git a/webrtc/modules/audio_coding/neteq/packet.h b/webrtc/modules/audio_coding/neteq/packet.h
index 723ed8b0a3..64b325e027 100644
--- a/webrtc/modules/audio_coding/neteq/packet.h
+++ b/webrtc/modules/audio_coding/neteq/packet.h
@@ -13,7 +13,7 @@
#include <list>
-#include "webrtc/modules/interface/module_common_types.h"
+#include "webrtc/modules/include/module_common_types.h"
#include "webrtc/typedefs.h"
namespace webrtc {
diff --git a/webrtc/modules/audio_coding/neteq/payload_splitter_unittest.cc b/webrtc/modules/audio_coding/neteq/payload_splitter_unittest.cc
index bf26a8f517..07c4bac0b6 100644
--- a/webrtc/modules/audio_coding/neteq/payload_splitter_unittest.cc
+++ b/webrtc/modules/audio_coding/neteq/payload_splitter_unittest.cc
@@ -310,10 +310,10 @@ TEST(RedPayloadSplitter, CheckRedPayloads) {
// easier to just register the payload types and let the actual implementation
// do its job.
DecoderDatabase decoder_database;
- decoder_database.RegisterPayload(0, NetEqDecoder::kDecoderCNGnb);
- decoder_database.RegisterPayload(1, NetEqDecoder::kDecoderPCMu);
- decoder_database.RegisterPayload(2, NetEqDecoder::kDecoderAVT);
- decoder_database.RegisterPayload(3, NetEqDecoder::kDecoderILBC);
+ decoder_database.RegisterPayload(0, NetEqDecoder::kDecoderCNGnb, "cng-nb");
+ decoder_database.RegisterPayload(1, NetEqDecoder::kDecoderPCMu, "pcmu");
+ decoder_database.RegisterPayload(2, NetEqDecoder::kDecoderAVT, "avt");
+ decoder_database.RegisterPayload(3, NetEqDecoder::kDecoderILBC, "ilbc");
PayloadSplitter splitter;
splitter.CheckRedPayloads(&packet_list, decoder_database);
@@ -745,8 +745,8 @@ TEST(FecPayloadSplitter, MixedPayload) {
PacketList packet_list;
DecoderDatabase decoder_database;
- decoder_database.RegisterPayload(0, NetEqDecoder::kDecoderOpus);
- decoder_database.RegisterPayload(1, NetEqDecoder::kDecoderPCMu);
+ decoder_database.RegisterPayload(0, NetEqDecoder::kDecoderOpus, "opus");
+ decoder_database.RegisterPayload(1, NetEqDecoder::kDecoderPCMu, "pcmu");
Packet* packet = CreatePacket(0, 10, 0xFF, true);
packet_list.push_back(packet);
@@ -802,7 +802,7 @@ TEST(FecPayloadSplitter, EmbedFecInRed) {
const int kTimestampOffset = 20 * 48; // 20 ms * 48 kHz.
uint8_t payload_types[] = {0, 0};
- decoder_database.RegisterPayload(0, NetEqDecoder::kDecoderOpus);
+ decoder_database.RegisterPayload(0, NetEqDecoder::kDecoderOpus, "opus");
Packet* packet = CreateRedPayload(2, payload_types, kTimestampOffset, true);
packet_list.push_back(packet);
diff --git a/webrtc/modules/audio_coding/neteq/rtcp.cc b/webrtc/modules/audio_coding/neteq/rtcp.cc
index cf8e0280bb..7ef40bc814 100644
--- a/webrtc/modules/audio_coding/neteq/rtcp.cc
+++ b/webrtc/modules/audio_coding/neteq/rtcp.cc
@@ -15,7 +15,7 @@
#include <algorithm>
#include "webrtc/common_audio/signal_processing/include/signal_processing_library.h"
-#include "webrtc/modules/interface/module_common_types.h"
+#include "webrtc/modules/include/module_common_types.h"
namespace webrtc {
diff --git a/webrtc/modules/audio_coding/neteq/statistics_calculator.cc b/webrtc/modules/audio_coding/neteq/statistics_calculator.cc
index e6a6fbf705..8f873762c5 100644
--- a/webrtc/modules/audio_coding/neteq/statistics_calculator.cc
+++ b/webrtc/modules/audio_coding/neteq/statistics_calculator.cc
@@ -50,7 +50,7 @@ void StatisticsCalculator::PeriodicUmaLogger::AdvanceClock(int step_ms) {
}
void StatisticsCalculator::PeriodicUmaLogger::LogToUma(int value) const {
- RTC_HISTOGRAM_COUNTS(uma_name_, value, 1, max_value_, 50);
+ RTC_HISTOGRAM_COUNTS_SPARSE(uma_name_, value, 1, max_value_, 50);
}
StatisticsCalculator::PeriodicUmaCount::PeriodicUmaCount(
@@ -187,9 +187,9 @@ void StatisticsCalculator::SecondaryDecodedSamples(int num_samples) {
}
void StatisticsCalculator::LogDelayedPacketOutageEvent(int outage_duration_ms) {
- RTC_HISTOGRAM_COUNTS("WebRTC.Audio.DelayedPacketOutageEventMs",
- outage_duration_ms, 1 /* min */, 2000 /* max */,
- 100 /* bucket count */);
+ RTC_HISTOGRAM_COUNTS_SPARSE("WebRTC.Audio.DelayedPacketOutageEventMs",
+ outage_duration_ms, 1 /* min */, 2000 /* max */,
+ 100 /* bucket count */);
delayed_packet_outage_counter_.RegisterSample();
}
diff --git a/webrtc/modules/audio_coding/neteq/test/NETEQTEST_RTPpacket.h b/webrtc/modules/audio_coding/neteq/test/NETEQTEST_RTPpacket.h
index 3fbce8be5c..56ed72fcee 100644
--- a/webrtc/modules/audio_coding/neteq/test/NETEQTEST_RTPpacket.h
+++ b/webrtc/modules/audio_coding/neteq/test/NETEQTEST_RTPpacket.h
@@ -14,7 +14,7 @@
#include <map>
#include <stdio.h>
#include "webrtc/typedefs.h"
-#include "webrtc/modules/interface/module_common_types.h"
+#include "webrtc/modules/include/module_common_types.h"
enum stereoModes {
stereoModeMono,
diff --git a/webrtc/modules/audio_coding/neteq/test/PayloadTypes.h b/webrtc/modules/audio_coding/neteq/test/PayloadTypes.h
index c46a3daece..aba525b162 100644
--- a/webrtc/modules/audio_coding/neteq/test/PayloadTypes.h
+++ b/webrtc/modules/audio_coding/neteq/test/PayloadTypes.h
@@ -39,7 +39,7 @@
#define NETEQ_CODEC_G722_1_16_PT 108
#define NETEQ_CODEC_G722_1_24_PT 109
#define NETEQ_CODEC_G722_1_32_PT 110
-#define NETEQ_CODEC_SC3_PT 111
+#define NETEQ_CODEC_OPUS_PT 111
#define NETEQ_CODEC_AMR_PT 112
#define NETEQ_CODEC_GSMEFR_PT 113
//#define NETEQ_CODEC_ILBCRCU_PT 114
diff --git a/webrtc/modules/audio_coding/neteq/test/RTPencode.cc b/webrtc/modules/audio_coding/neteq/test/RTPencode.cc
index cbb7436152..45586ee111 100644
--- a/webrtc/modules/audio_coding/neteq/test/RTPencode.cc
+++ b/webrtc/modules/audio_coding/neteq/test/RTPencode.cc
@@ -25,7 +25,9 @@
#include <algorithm>
+#include "webrtc/base/checks.h"
#include "webrtc/typedefs.h"
+
// needed for NetEqDecoder
#include "webrtc/modules/audio_coding/neteq/audio_decoder_impl.h"
#include "webrtc/modules/audio_coding/neteq/include/neteq.h"
@@ -36,6 +38,10 @@
#include "PayloadTypes.h"
+namespace {
+const size_t kRtpDataSize = 8000;
+}
+
/*********************/
/* Misc. definitions */
/*********************/
@@ -126,10 +132,10 @@ void stereoInterleave(unsigned char* data, size_t dataLen, size_t stride);
#include "webrtc_vad.h"
#if ((defined CODEC_PCM16B) || (defined NETEQ_ARBITRARY_CODEC))
-#include "pcm16b.h"
+#include "webrtc/modules/audio_coding/codecs/pcm16b/pcm16b.h"
#endif
#ifdef CODEC_G711
-#include "g711_interface.h"
+#include "webrtc/modules/audio_coding/codecs/g711/g711_interface.h"
#endif
#ifdef CODEC_G729
#include "G729Interface.h"
@@ -146,19 +152,19 @@ void stereoInterleave(unsigned char* data, size_t dataLen, size_t stride);
#include "AMRWBCreation.h"
#endif
#ifdef CODEC_ILBC
-#include "ilbc.h"
+#include "webrtc/modules/audio_coding/codecs/ilbc/ilbc.h"
#endif
#if (defined CODEC_ISAC || defined CODEC_ISAC_SWB)
-#include "isac.h"
+#include "webrtc/modules/audio_coding/codecs/isac/main/include/isac.h"
#endif
#ifdef NETEQ_ISACFIX_CODEC
-#include "isacfix.h"
+#include "webrtc/modules/audio_coding/codecs/isac/fix/include/isacfix.h"
#ifdef CODEC_ISAC
#error Cannot have both ISAC and ISACfix defined. Please de-select one.
#endif
#endif
#ifdef CODEC_G722
-#include "g722_interface.h"
+#include "webrtc/modules/audio_coding/codecs/g722/g722_interface.h"
#endif
#ifdef CODEC_G722_1_24
#include "G722_1Interface.h"
@@ -188,11 +194,14 @@ void stereoInterleave(unsigned char* data, size_t dataLen, size_t stride);
#endif
#if (defined(CODEC_CNGCODEC8) || defined(CODEC_CNGCODEC16) || \
defined(CODEC_CNGCODEC32) || defined(CODEC_CNGCODEC48))
-#include "webrtc_cng.h"
+#include "webrtc/modules/audio_coding/codecs/cng/webrtc_cng.h"
#endif
#if ((defined CODEC_SPEEX_8) || (defined CODEC_SPEEX_16))
#include "SpeexInterface.h"
#endif
+#ifdef CODEC_OPUS
+#include "webrtc/modules/audio_coding/codecs/opus/opus_interface.h"
+#endif
/***********************************/
/* Global codec instance variables */
@@ -264,6 +273,9 @@ SPEEX_encinst_t* SPEEX8enc_inst[2];
#ifdef CODEC_SPEEX_16
SPEEX_encinst_t* SPEEX16enc_inst[2];
#endif
+#ifdef CODEC_OPUS
+OpusEncInst* opus_inst[2];
+#endif
int main(int argc, char* argv[]) {
size_t packet_size;
@@ -275,7 +287,7 @@ int main(int argc, char* argv[]) {
int useRed = 0;
size_t len, enc_len;
int16_t org_data[4000];
- unsigned char rtp_data[8000];
+ unsigned char rtp_data[kRtpDataSize];
int16_t seqNo = 0xFFF;
uint32_t ssrc = 1235412312;
uint32_t timestamp = 0xAC1245;
@@ -286,12 +298,12 @@ int main(int argc, char* argv[]) {
uint32_t red_TS[2] = {0};
uint16_t red_len[2] = {0};
size_t RTPheaderLen = 12;
- uint8_t red_data[8000];
+ uint8_t red_data[kRtpDataSize];
#ifdef INSERT_OLD_PACKETS
uint16_t old_length, old_plen;
size_t old_enc_len;
int first_old_packet = 1;
- unsigned char old_rtp_data[8000];
+ unsigned char old_rtp_data[kRtpDataSize];
size_t packet_age = 0;
#endif
#ifdef INSERT_DTMF_PACKETS
@@ -429,6 +441,10 @@ int main(int argc, char* argv[]) {
printf(" : red_isac Redundancy RTP packet with 2*iSAC "
"frames\n");
#endif
+#endif // CODEC_RED
+#ifdef CODEC_OPUS
+ printf(" : opus Opus codec with FEC (48kHz, 32kbps, FEC"
+ " on and tuned for 5%% packet losses)\n");
#endif
printf("\n");
@@ -880,6 +896,10 @@ void NetEQTest_GetCodec_and_PT(char* name,
*PT = NETEQ_CODEC_ISAC_PT; /* this will be the PT for the sub-headers */
*fs = 16000;
*useRed = 1;
+ } else if (!strcmp(name, "opus")) {
+ *codec = webrtc::NetEqDecoder::kDecoderOpus;
+ *PT = NETEQ_CODEC_OPUS_PT; /* this will be the PT for the sub-headers */
+ *fs = 48000;
} else {
printf("Error: Not a supported codec (%s)\n", name);
exit(0);
@@ -1411,12 +1431,23 @@ int NetEQTest_init_coders(webrtc::NetEqDecoder coder,
}
break;
#endif
+#ifdef CODEC_OPUS
+ case webrtc::NetEqDecoder::kDecoderOpus:
+ ok = WebRtcOpus_EncoderCreate(&opus_inst[k], 1, 0);
+ if (ok != 0) {
+ printf("Error: Couldn't allocate memory for Opus encoding "
+ "instance\n");
+ exit(0);
+ }
+ WebRtcOpus_EnableFec(opus_inst[k]);
+ WebRtcOpus_SetPacketLossRate(opus_inst[k], 5);
+ break;
+#endif
default:
printf("Error: unknown codec in call to NetEQTest_init_coders.\n");
exit(0);
break;
}
-
if (ok != 0) {
return (ok);
}
@@ -1543,6 +1574,11 @@ int NetEQTest_free_coders(webrtc::NetEqDecoder coder, size_t numChannels) {
WebRtcGSMFR_FreeEnc(GSMFRenc_inst[k]);
break;
#endif
+#ifdef CODEC_OPUS
+ case webrtc::NetEqDecoder::kDecoderOpus:
+ WebRtcOpus_EncoderFree(opus_inst[k]);
+ break;
+#endif
default:
printf("Error: unknown codec in call to NetEQTest_init_coders.\n");
exit(0);
@@ -1687,6 +1723,11 @@ size_t NetEQTest_encode(webrtc::NetEqDecoder coder,
cdlen = static_cast<size_t>(res);
}
#endif
+#ifdef CODEC_OPUS
+ cdlen = WebRtcOpus_Encode(opus_inst[k], indata, frameLen, kRtpDataSize - 12,
+ encoded);
+ RTC_CHECK_GT(cdlen, 0u);
+#endif
indata += frameLen;
encoded += cdlen;
totalLen += cdlen;
diff --git a/webrtc/modules/audio_coding/neteq/test/neteq_ilbc_quality_test.cc b/webrtc/modules/audio_coding/neteq/test/neteq_ilbc_quality_test.cc
index 2042e0d2b8..0c09e92b4d 100644
--- a/webrtc/modules/audio_coding/neteq/test/neteq_ilbc_quality_test.cc
+++ b/webrtc/modules/audio_coding/neteq/test/neteq_ilbc_quality_test.cc
@@ -11,7 +11,7 @@
#include "webrtc/base/checks.h"
#include "webrtc/base/safe_conversions.h"
#include "webrtc/base/scoped_ptr.h"
-#include "webrtc/modules/audio_coding/codecs/ilbc/include/audio_encoder_ilbc.h"
+#include "webrtc/modules/audio_coding/codecs/ilbc/audio_encoder_ilbc.h"
#include "webrtc/modules/audio_coding/neteq/tools/neteq_quality_test.h"
#include "webrtc/test/testsupport/fileutils.h"
@@ -50,7 +50,7 @@ class NetEqIlbcQualityTest : public NetEqQualityTest {
NetEqDecoder::kDecoderILBC) {}
void SetUp() override {
- ASSERT_EQ(1, channels_) << "iLBC supports only mono audio.";
+ ASSERT_EQ(1u, channels_) << "iLBC supports only mono audio.";
AudioEncoderIlbc::Config config;
config.frame_size_ms = FLAGS_frame_size_ms;
encoder_.reset(new AudioEncoderIlbc(config));
@@ -66,8 +66,10 @@ class NetEqIlbcQualityTest : public NetEqQualityTest {
uint32_t dummy_timestamp = 0;
AudioEncoder::EncodedInfo info;
do {
- info = encoder_->Encode(dummy_timestamp, &in_data[encoded_samples],
- kFrameSizeSamples, max_bytes, payload);
+ info = encoder_->Encode(dummy_timestamp,
+ rtc::ArrayView<const int16_t>(
+ in_data + encoded_samples, kFrameSizeSamples),
+ max_bytes, payload);
encoded_samples += kFrameSizeSamples;
} while (info.encoded_bytes == 0);
return rtc::checked_cast<int>(info.encoded_bytes);
diff --git a/webrtc/modules/audio_coding/neteq/test/neteq_isac_quality_test.cc b/webrtc/modules/audio_coding/neteq/test/neteq_isac_quality_test.cc
index 66b0903f66..4ccebb3e66 100644
--- a/webrtc/modules/audio_coding/neteq/test/neteq_isac_quality_test.cc
+++ b/webrtc/modules/audio_coding/neteq/test/neteq_isac_quality_test.cc
@@ -59,7 +59,7 @@ NetEqIsacQualityTest::NetEqIsacQualityTest()
bit_rate_kbps_(FLAGS_bit_rate_kbps) {}
void NetEqIsacQualityTest::SetUp() {
- ASSERT_EQ(1, channels_) << "iSAC supports only mono audio.";
+ ASSERT_EQ(1u, channels_) << "iSAC supports only mono audio.";
// Create encoder memory.
WebRtcIsacfix_Create(&isac_encoder_);
ASSERT_TRUE(isac_encoder_ != NULL);
diff --git a/webrtc/modules/audio_coding/neteq/test/neteq_opus_quality_test.cc b/webrtc/modules/audio_coding/neteq/test/neteq_opus_quality_test.cc
index 5e8b2297d4..5ab55ba9e8 100644
--- a/webrtc/modules/audio_coding/neteq/test/neteq_opus_quality_test.cc
+++ b/webrtc/modules/audio_coding/neteq/test/neteq_opus_quality_test.cc
@@ -8,7 +8,7 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-#include "webrtc/modules/audio_coding/codecs/opus/include/opus_interface.h"
+#include "webrtc/modules/audio_coding/codecs/opus/opus_interface.h"
#include "webrtc/modules/audio_coding/codecs/opus/opus_inst.h"
#include "webrtc/modules/audio_coding/neteq/tools/neteq_quality_test.h"
diff --git a/webrtc/modules/audio_coding/neteq/test/neteq_pcmu_quality_test.cc b/webrtc/modules/audio_coding/neteq/test/neteq_pcmu_quality_test.cc
index 422a9fa6eb..ac478ab5ac 100644
--- a/webrtc/modules/audio_coding/neteq/test/neteq_pcmu_quality_test.cc
+++ b/webrtc/modules/audio_coding/neteq/test/neteq_pcmu_quality_test.cc
@@ -11,7 +11,7 @@
#include "webrtc/base/checks.h"
#include "webrtc/base/safe_conversions.h"
#include "webrtc/base/scoped_ptr.h"
-#include "webrtc/modules/audio_coding/codecs/g711/include/audio_encoder_pcm.h"
+#include "webrtc/modules/audio_coding/codecs/g711/audio_encoder_pcm.h"
#include "webrtc/modules/audio_coding/neteq/tools/neteq_quality_test.h"
#include "webrtc/test/testsupport/fileutils.h"
@@ -50,7 +50,7 @@ class NetEqPcmuQualityTest : public NetEqQualityTest {
NetEqDecoder::kDecoderPCMu) {}
void SetUp() override {
- ASSERT_EQ(1, channels_) << "PCMu supports only mono audio.";
+ ASSERT_EQ(1u, channels_) << "PCMu supports only mono audio.";
AudioEncoderPcmU::Config config;
config.frame_size_ms = FLAGS_frame_size_ms;
encoder_.reset(new AudioEncoderPcmU(config));
@@ -66,8 +66,10 @@ class NetEqPcmuQualityTest : public NetEqQualityTest {
uint32_t dummy_timestamp = 0;
AudioEncoder::EncodedInfo info;
do {
- info = encoder_->Encode(dummy_timestamp, &in_data[encoded_samples],
- kFrameSizeSamples, max_bytes, payload);
+ info = encoder_->Encode(dummy_timestamp,
+ rtc::ArrayView<const int16_t>(
+ in_data + encoded_samples, kFrameSizeSamples),
+ max_bytes, payload);
encoded_samples += kFrameSizeSamples;
} while (info.encoded_bytes == 0);
return rtc::checked_cast<int>(info.encoded_bytes);
diff --git a/webrtc/modules/audio_coding/neteq/timestamp_scaler.cc b/webrtc/modules/audio_coding/neteq/timestamp_scaler.cc
index eb69ac7889..c1abdc30f5 100644
--- a/webrtc/modules/audio_coding/neteq/timestamp_scaler.cc
+++ b/webrtc/modules/audio_coding/neteq/timestamp_scaler.cc
@@ -52,19 +52,11 @@ uint32_t TimestampScaler::ToInternal(uint32_t external_timestamp,
denominator_ = 1;
break;
}
- case NetEqDecoder::kDecoderCNGswb48kHz: {
- // Use timestamp scaling with factor 2/3 (32 kHz sample rate, but RTP
- // timestamps run on 48 kHz).
- // TODO(tlegrand): Remove scaling for kDecoderCNGswb48kHz once ACM has
- // full 48 kHz support.
- numerator_ = 2;
- denominator_ = 3;
- break;
- }
case NetEqDecoder::kDecoderAVT:
case NetEqDecoder::kDecoderCNGnb:
case NetEqDecoder::kDecoderCNGwb:
- case NetEqDecoder::kDecoderCNGswb32kHz: {
+ case NetEqDecoder::kDecoderCNGswb32kHz:
+ case NetEqDecoder::kDecoderCNGswb48kHz: {
// Do not change the timestamp scaling settings for DTMF or CNG.
break;
}
@@ -87,8 +79,6 @@ uint32_t TimestampScaler::ToInternal(uint32_t external_timestamp,
assert(denominator_ > 0); // Should not be possible.
external_ref_ = external_timestamp;
internal_ref_ += (external_diff * numerator_) / denominator_;
- LOG(LS_VERBOSE) << "Converting timestamp: " << external_timestamp <<
- " -> " << internal_ref_;
return internal_ref_;
} else {
// No scaling.
diff --git a/webrtc/modules/audio_coding/neteq/tools/audio_loop.cc b/webrtc/modules/audio_coding/neteq/tools/audio_loop.cc
index 2d2a7e3dd4..eed95753f0 100644
--- a/webrtc/modules/audio_coding/neteq/tools/audio_loop.cc
+++ b/webrtc/modules/audio_coding/neteq/tools/audio_loop.cc
@@ -43,13 +43,14 @@ bool AudioLoop::Init(const std::string file_name,
return true;
}
-const int16_t* AudioLoop::GetNextBlock() {
+rtc::ArrayView<const int16_t> AudioLoop::GetNextBlock() {
// Check that the AudioLoop is initialized.
- if (block_length_samples_ == 0) return NULL;
+ if (block_length_samples_ == 0)
+ return rtc::ArrayView<const int16_t>();
const int16_t* output_ptr = &audio_array_[next_index_];
next_index_ = (next_index_ + block_length_samples_) % loop_length_samples_;
- return output_ptr;
+ return rtc::ArrayView<const int16_t>(output_ptr, block_length_samples_);
}
diff --git a/webrtc/modules/audio_coding/neteq/tools/audio_loop.h b/webrtc/modules/audio_coding/neteq/tools/audio_loop.h
index a897ee5aef..14e20f68ac 100644
--- a/webrtc/modules/audio_coding/neteq/tools/audio_loop.h
+++ b/webrtc/modules/audio_coding/neteq/tools/audio_loop.h
@@ -13,6 +13,7 @@
#include <string>
+#include "webrtc/base/array_view.h"
#include "webrtc/base/constructormagic.h"
#include "webrtc/base/scoped_ptr.h"
#include "webrtc/typedefs.h"
@@ -40,10 +41,9 @@ class AudioLoop {
bool Init(const std::string file_name, size_t max_loop_length_samples,
size_t block_length_samples);
- // Returns a pointer to the next block of audio. The number given as
- // |block_length_samples| to the Init() function determines how many samples
- // that can be safely read from the pointer.
- const int16_t* GetNextBlock();
+ // Returns a (pointer,size) pair for the next block of audio. The size is
+ // equal to the |block_length_samples| Init() argument.
+ rtc::ArrayView<const int16_t> GetNextBlock();
private:
size_t next_index_;
diff --git a/webrtc/modules/audio_coding/neteq/tools/audio_sink.h b/webrtc/modules/audio_coding/neteq/tools/audio_sink.h
index 3bd2df5ca8..489a8b2ad8 100644
--- a/webrtc/modules/audio_coding/neteq/tools/audio_sink.h
+++ b/webrtc/modules/audio_coding/neteq/tools/audio_sink.h
@@ -12,7 +12,7 @@
#define WEBRTC_MODULES_AUDIO_CODING_NETEQ_TOOLS_AUDIO_SINK_H_
#include "webrtc/base/constructormagic.h"
-#include "webrtc/modules/interface/module_common_types.h"
+#include "webrtc/modules/include/module_common_types.h"
#include "webrtc/typedefs.h"
namespace webrtc {
diff --git a/webrtc/modules/audio_coding/neteq/tools/constant_pcm_packet_source.cc b/webrtc/modules/audio_coding/neteq/tools/constant_pcm_packet_source.cc
index dc07030dd6..5a9f79f877 100644
--- a/webrtc/modules/audio_coding/neteq/tools/constant_pcm_packet_source.cc
+++ b/webrtc/modules/audio_coding/neteq/tools/constant_pcm_packet_source.cc
@@ -13,7 +13,7 @@
#include <algorithm>
#include "webrtc/base/checks.h"
-#include "webrtc/modules/audio_coding/codecs/pcm16b/include/pcm16b.h"
+#include "webrtc/modules/audio_coding/codecs/pcm16b/pcm16b.h"
#include "webrtc/modules/audio_coding/neteq/tools/packet.h"
namespace webrtc {
diff --git a/webrtc/modules/audio_coding/neteq/tools/neteq_external_decoder_test.cc b/webrtc/modules/audio_coding/neteq/tools/neteq_external_decoder_test.cc
index 49750c26c8..694b9ed153 100644
--- a/webrtc/modules/audio_coding/neteq/tools/neteq_external_decoder_test.cc
+++ b/webrtc/modules/audio_coding/neteq/tools/neteq_external_decoder_test.cc
@@ -12,6 +12,7 @@
#include "webrtc/modules/audio_coding/neteq/tools/neteq_external_decoder_test.h"
#include "testing/gtest/include/gtest/gtest.h"
+#include "webrtc/base/format_macros.h"
namespace webrtc {
namespace test {
@@ -21,26 +22,25 @@ NetEqExternalDecoderTest::NetEqExternalDecoderTest(NetEqDecoder codec,
: codec_(codec),
decoder_(decoder),
sample_rate_hz_(CodecSampleRateHz(codec_)),
- channels_(static_cast<int>(decoder_->Channels())) {
+ channels_(decoder_->Channels()) {
NetEq::Config config;
config.sample_rate_hz = sample_rate_hz_;
neteq_.reset(NetEq::Create(config));
- printf("%d\n", channels_);
+ printf("%" PRIuS "\n", channels_);
}
void NetEqExternalDecoderTest::Init() {
- ASSERT_EQ(NetEq::kOK, neteq_->RegisterExternalDecoder(
- decoder_, codec_, kPayloadType, sample_rate_hz_));
+ ASSERT_EQ(NetEq::kOK,
+ neteq_->RegisterExternalDecoder(decoder_, codec_, name_,
+ kPayloadType, sample_rate_hz_));
}
-void NetEqExternalDecoderTest::InsertPacket(WebRtcRTPHeader rtp_header,
- const uint8_t* payload,
- size_t payload_size_bytes,
- uint32_t receive_timestamp) {
- ASSERT_EQ(
- NetEq::kOK,
- neteq_->InsertPacket(
- rtp_header, payload, payload_size_bytes, receive_timestamp));
+void NetEqExternalDecoderTest::InsertPacket(
+ WebRtcRTPHeader rtp_header,
+ rtc::ArrayView<const uint8_t> payload,
+ uint32_t receive_timestamp) {
+ ASSERT_EQ(NetEq::kOK,
+ neteq_->InsertPacket(rtp_header, payload, receive_timestamp));
}
size_t NetEqExternalDecoderTest::GetOutputAudio(size_t max_length,
@@ -48,7 +48,7 @@ size_t NetEqExternalDecoderTest::GetOutputAudio(size_t max_length,
NetEqOutputType* output_type) {
// Get audio from regular instance.
size_t samples_per_channel;
- int num_channels;
+ size_t num_channels;
EXPECT_EQ(NetEq::kOK,
neteq_->GetAudio(max_length,
output,
@@ -58,6 +58,7 @@ size_t NetEqExternalDecoderTest::GetOutputAudio(size_t max_length,
EXPECT_EQ(channels_, num_channels);
EXPECT_EQ(static_cast<size_t>(kOutputLengthMs * sample_rate_hz_ / 1000),
samples_per_channel);
+ EXPECT_EQ(sample_rate_hz_, neteq_->last_output_sample_rate_hz());
return samples_per_channel;
}
diff --git a/webrtc/modules/audio_coding/neteq/tools/neteq_external_decoder_test.h b/webrtc/modules/audio_coding/neteq/tools/neteq_external_decoder_test.h
index 0a41c6ec20..d7b01fe33a 100644
--- a/webrtc/modules/audio_coding/neteq/tools/neteq_external_decoder_test.h
+++ b/webrtc/modules/audio_coding/neteq/tools/neteq_external_decoder_test.h
@@ -11,10 +11,12 @@
#ifndef WEBRTC_MODULES_AUDIO_CODING_NETEQ_TOOLS_NETEQ_EXTERNAL_DECODER_TEST_H_
#define WEBRTC_MODULES_AUDIO_CODING_NETEQ_TOOLS_NETEQ_EXTERNAL_DECODER_TEST_H_
+#include <string>
+
#include "webrtc/base/scoped_ptr.h"
#include "webrtc/modules/audio_coding/codecs/audio_decoder.h"
#include "webrtc/modules/audio_coding/neteq/include/neteq.h"
-#include "webrtc/modules/interface/module_common_types.h"
+#include "webrtc/modules/include/module_common_types.h"
namespace webrtc {
namespace test {
@@ -36,8 +38,8 @@ class NetEqExternalDecoderTest {
// |payload_size_bytes| bytes. The |receive_timestamp| is an indication
// of the time when the packet was received, and should be measured with
// the same tick rate as the RTP timestamp of the current payload.
- virtual void InsertPacket(WebRtcRTPHeader rtp_header, const uint8_t* payload,
- size_t payload_size_bytes,
+ virtual void InsertPacket(WebRtcRTPHeader rtp_header,
+ rtc::ArrayView<const uint8_t> payload,
uint32_t receive_timestamp);
// Get 10 ms of audio data. The data is written to |output|, which can hold
@@ -49,9 +51,10 @@ class NetEqExternalDecoderTest {
private:
NetEqDecoder codec_;
+ std::string name_ = "dummy name";
AudioDecoder* decoder_;
int sample_rate_hz_;
- int channels_;
+ size_t channels_;
rtc::scoped_ptr<NetEq> neteq_;
};
diff --git a/webrtc/modules/audio_coding/neteq/tools/neteq_performance_test.cc b/webrtc/modules/audio_coding/neteq/tools/neteq_performance_test.cc
index 9fe4dffa91..7d1f9f9798 100644
--- a/webrtc/modules/audio_coding/neteq/tools/neteq_performance_test.cc
+++ b/webrtc/modules/audio_coding/neteq/tools/neteq_performance_test.cc
@@ -10,7 +10,7 @@
#include "webrtc/modules/audio_coding/neteq/tools/neteq_performance_test.h"
-#include "webrtc/modules/audio_coding/codecs/pcm16b/include/pcm16b.h"
+#include "webrtc/modules/audio_coding/codecs/pcm16b/pcm16b.h"
#include "webrtc/modules/audio_coding/neteq/include/neteq.h"
#include "webrtc/modules/audio_coding/neteq/tools/audio_loop.h"
#include "webrtc/modules/audio_coding/neteq/tools/rtp_generator.h"
@@ -34,6 +34,7 @@ int64_t NetEqPerformanceTest::Run(int runtime_ms,
const int kSampRateHz = 32000;
const webrtc::NetEqDecoder kDecoderType =
webrtc::NetEqDecoder::kDecoderPCM16Bswb32kHz;
+ const std::string kDecoderName = "pcm16-swb32";
const int kPayloadType = 95;
// Initialize NetEq instance.
@@ -41,7 +42,7 @@ int64_t NetEqPerformanceTest::Run(int runtime_ms,
config.sample_rate_hz = kSampRateHz;
NetEq* neteq = NetEq::Create(config);
// Register decoder in |neteq|.
- if (neteq->RegisterPayloadType(kDecoderType, kPayloadType) != 0)
+ if (neteq->RegisterPayloadType(kDecoderType, kDecoderName, kPayloadType) != 0)
return -1;
// Set up AudioLoop object.
@@ -62,12 +63,13 @@ int64_t NetEqPerformanceTest::Run(int runtime_ms,
bool drift_flipped = false;
int32_t packet_input_time_ms =
rtp_gen.GetRtpHeader(kPayloadType, kInputBlockSizeSamples, &rtp_header);
- const int16_t* input_samples = audio_loop.GetNextBlock();
- if (!input_samples) exit(1);
+ auto input_samples = audio_loop.GetNextBlock();
+ if (input_samples.empty())
+ exit(1);
uint8_t input_payload[kInputBlockSizeSamples * sizeof(int16_t)];
- size_t payload_len =
- WebRtcPcm16b_Encode(input_samples, kInputBlockSizeSamples, input_payload);
- assert(payload_len == kInputBlockSizeSamples * sizeof(int16_t));
+ size_t payload_len = WebRtcPcm16b_Encode(input_samples.data(),
+ input_samples.size(), input_payload);
+ RTC_CHECK_EQ(sizeof(input_payload), payload_len);
// Main loop.
webrtc::Clock* clock = webrtc::Clock::GetRealTimeClock();
@@ -81,9 +83,9 @@ int64_t NetEqPerformanceTest::Run(int runtime_ms,
}
if (!lost) {
// Insert packet.
- int error = neteq->InsertPacket(
- rtp_header, input_payload, payload_len,
- packet_input_time_ms * kSampRateHz / 1000);
+ int error =
+ neteq->InsertPacket(rtp_header, input_payload,
+ packet_input_time_ms * kSampRateHz / 1000);
if (error != NetEq::kOK)
return -1;
}
@@ -93,10 +95,10 @@ int64_t NetEqPerformanceTest::Run(int runtime_ms,
kInputBlockSizeSamples,
&rtp_header);
input_samples = audio_loop.GetNextBlock();
- if (!input_samples) return -1;
- payload_len = WebRtcPcm16b_Encode(const_cast<int16_t*>(input_samples),
- kInputBlockSizeSamples,
- input_payload);
+ if (input_samples.empty())
+ return -1;
+ payload_len = WebRtcPcm16b_Encode(input_samples.data(),
+ input_samples.size(), input_payload);
assert(payload_len == kInputBlockSizeSamples * sizeof(int16_t));
}
@@ -107,7 +109,7 @@ int64_t NetEqPerformanceTest::Run(int runtime_ms,
static const size_t kOutDataLen =
kOutputBlockSizeMs * kMaxSamplesPerMs * kMaxChannels;
int16_t out_data[kOutDataLen];
- int num_channels;
+ size_t num_channels;
size_t samples_per_channel;
int error = neteq->GetAudio(kOutDataLen, out_data, &samples_per_channel,
&num_channels, NULL);
diff --git a/webrtc/modules/audio_coding/neteq/tools/neteq_quality_test.cc b/webrtc/modules/audio_coding/neteq/tools/neteq_quality_test.cc
index 6826d1be74..9c64e0fb48 100644
--- a/webrtc/modules/audio_coding/neteq/tools/neteq_quality_test.cc
+++ b/webrtc/modules/audio_coding/neteq/tools/neteq_quality_test.cc
@@ -210,7 +210,7 @@ NetEqQualityTest::NetEqQualityTest(int block_duration_ms,
int out_sampling_khz,
NetEqDecoder decoder_type)
: decoder_type_(decoder_type),
- channels_(FLAGS_channels),
+ channels_(static_cast<size_t>(FLAGS_channels)),
decoded_time_ms_(0),
decodable_time_ms_(0),
drift_factor_(FLAGS_drift_factor),
@@ -292,7 +292,8 @@ bool GilbertElliotLoss::Lost() {
}
void NetEqQualityTest::SetUp() {
- ASSERT_EQ(0, neteq_->RegisterPayloadType(decoder_type_, kPayloadType));
+ ASSERT_EQ(0,
+ neteq_->RegisterPayloadType(decoder_type_, "noname", kPayloadType));
rtp_generator_->set_drift_factor(drift_factor_);
int units = block_duration_ms_ / kPacketLossTimeUnitMs;
@@ -377,9 +378,10 @@ int NetEqQualityTest::Transmit() {
<< " ms ";
if (payload_size_bytes_ > 0) {
if (!PacketLost()) {
- int ret = neteq_->InsertPacket(rtp_header_, &payload_[0],
- payload_size_bytes_,
- packet_input_time_ms * in_sampling_khz_);
+ int ret = neteq_->InsertPacket(
+ rtp_header_,
+ rtc::ArrayView<const uint8_t>(payload_.get(), payload_size_bytes_),
+ packet_input_time_ms * in_sampling_khz_);
if (ret != NetEq::kOK)
return -1;
Log() << "was sent.";
@@ -392,7 +394,7 @@ int NetEqQualityTest::Transmit() {
}
int NetEqQualityTest::DecodeBlock() {
- int channels;
+ size_t channels;
size_t samples;
int ret = neteq_->GetAudio(out_size_samples_ * channels_, &out_data_[0],
&samples, &channels, NULL);
diff --git a/webrtc/modules/audio_coding/neteq/tools/neteq_quality_test.h b/webrtc/modules/audio_coding/neteq/tools/neteq_quality_test.h
index e20be5796b..c2b2effee2 100644
--- a/webrtc/modules/audio_coding/neteq/tools/neteq_quality_test.h
+++ b/webrtc/modules/audio_coding/neteq/tools/neteq_quality_test.h
@@ -99,7 +99,7 @@ class NetEqQualityTest : public ::testing::Test {
std::ofstream& Log();
NetEqDecoder decoder_type_;
- const int channels_;
+ const size_t channels_;
private:
int decoded_time_ms_;
diff --git a/webrtc/modules/audio_coding/neteq/tools/neteq_rtpplay.cc b/webrtc/modules/audio_coding/neteq/tools/neteq_rtpplay.cc
index 0aaf8c71fd..3d79e5b5a2 100644
--- a/webrtc/modules/audio_coding/neteq/tools/neteq_rtpplay.cc
+++ b/webrtc/modules/audio_coding/neteq/tools/neteq_rtpplay.cc
@@ -26,7 +26,7 @@
#include "webrtc/base/checks.h"
#include "webrtc/base/safe_conversions.h"
#include "webrtc/base/scoped_ptr.h"
-#include "webrtc/modules/audio_coding/codecs/pcm16b/include/pcm16b.h"
+#include "webrtc/modules/audio_coding/codecs/pcm16b/pcm16b.h"
#include "webrtc/modules/audio_coding/neteq/include/neteq.h"
#include "webrtc/modules/audio_coding/neteq/tools/input_audio_file.h"
#include "webrtc/modules/audio_coding/neteq/tools/output_audio_file.h"
@@ -34,7 +34,7 @@
#include "webrtc/modules/audio_coding/neteq/tools/packet.h"
#include "webrtc/modules/audio_coding/neteq/tools/rtc_event_log_source.h"
#include "webrtc/modules/audio_coding/neteq/tools/rtp_file_source.h"
-#include "webrtc/modules/interface/module_common_types.h"
+#include "webrtc/modules/include/module_common_types.h"
#include "webrtc/system_wrappers/include/trace.h"
#include "webrtc/test/rtp_file_reader.h"
#include "webrtc/test/testsupport/fileutils.h"
@@ -189,8 +189,9 @@ std::string CodecName(webrtc::NetEqDecoder codec) {
void RegisterPayloadType(NetEq* neteq,
webrtc::NetEqDecoder codec,
+ const std::string& name,
google::int32 flag) {
- if (neteq->RegisterPayloadType(codec, static_cast<uint8_t>(flag))) {
+ if (neteq->RegisterPayloadType(codec, name, static_cast<uint8_t>(flag))) {
std::cerr << "Cannot register payload type " << flag << " as "
<< CodecName(codec) << std::endl;
exit(1);
@@ -200,30 +201,40 @@ void RegisterPayloadType(NetEq* neteq,
// Registers all decoders in |neteq|.
void RegisterPayloadTypes(NetEq* neteq) {
assert(neteq);
- RegisterPayloadType(neteq, webrtc::NetEqDecoder::kDecoderPCMu, FLAGS_pcmu);
- RegisterPayloadType(neteq, webrtc::NetEqDecoder::kDecoderPCMa, FLAGS_pcma);
- RegisterPayloadType(neteq, webrtc::NetEqDecoder::kDecoderILBC, FLAGS_ilbc);
- RegisterPayloadType(neteq, webrtc::NetEqDecoder::kDecoderISAC, FLAGS_isac);
- RegisterPayloadType(neteq, webrtc::NetEqDecoder::kDecoderISACswb,
+ RegisterPayloadType(neteq, webrtc::NetEqDecoder::kDecoderPCMu, "pcmu",
+ FLAGS_pcmu);
+ RegisterPayloadType(neteq, webrtc::NetEqDecoder::kDecoderPCMa, "pcma",
+ FLAGS_pcma);
+ RegisterPayloadType(neteq, webrtc::NetEqDecoder::kDecoderILBC, "ilbc",
+ FLAGS_ilbc);
+ RegisterPayloadType(neteq, webrtc::NetEqDecoder::kDecoderISAC, "isac",
+ FLAGS_isac);
+ RegisterPayloadType(neteq, webrtc::NetEqDecoder::kDecoderISACswb, "isac-swb",
FLAGS_isac_swb);
- RegisterPayloadType(neteq, webrtc::NetEqDecoder::kDecoderOpus, FLAGS_opus);
- RegisterPayloadType(neteq, webrtc::NetEqDecoder::kDecoderPCM16B,
+ RegisterPayloadType(neteq, webrtc::NetEqDecoder::kDecoderOpus, "opus",
+ FLAGS_opus);
+ RegisterPayloadType(neteq, webrtc::NetEqDecoder::kDecoderPCM16B, "pcm16-nb",
FLAGS_pcm16b);
- RegisterPayloadType(neteq, webrtc::NetEqDecoder::kDecoderPCM16Bwb,
+ RegisterPayloadType(neteq, webrtc::NetEqDecoder::kDecoderPCM16Bwb, "pcm16-wb",
FLAGS_pcm16b_wb);
RegisterPayloadType(neteq, webrtc::NetEqDecoder::kDecoderPCM16Bswb32kHz,
- FLAGS_pcm16b_swb32);
+ "pcm16-swb32", FLAGS_pcm16b_swb32);
RegisterPayloadType(neteq, webrtc::NetEqDecoder::kDecoderPCM16Bswb48kHz,
- FLAGS_pcm16b_swb48);
- RegisterPayloadType(neteq, webrtc::NetEqDecoder::kDecoderG722, FLAGS_g722);
- RegisterPayloadType(neteq, webrtc::NetEqDecoder::kDecoderAVT, FLAGS_avt);
- RegisterPayloadType(neteq, webrtc::NetEqDecoder::kDecoderRED, FLAGS_red);
- RegisterPayloadType(neteq, webrtc::NetEqDecoder::kDecoderCNGnb, FLAGS_cn_nb);
- RegisterPayloadType(neteq, webrtc::NetEqDecoder::kDecoderCNGwb, FLAGS_cn_wb);
+ "pcm16-swb48", FLAGS_pcm16b_swb48);
+ RegisterPayloadType(neteq, webrtc::NetEqDecoder::kDecoderG722, "g722",
+ FLAGS_g722);
+ RegisterPayloadType(neteq, webrtc::NetEqDecoder::kDecoderAVT, "avt",
+ FLAGS_avt);
+ RegisterPayloadType(neteq, webrtc::NetEqDecoder::kDecoderRED, "red",
+ FLAGS_red);
+ RegisterPayloadType(neteq, webrtc::NetEqDecoder::kDecoderCNGnb, "cng-nb",
+ FLAGS_cn_nb);
+ RegisterPayloadType(neteq, webrtc::NetEqDecoder::kDecoderCNGwb, "cng-wb",
+ FLAGS_cn_wb);
RegisterPayloadType(neteq, webrtc::NetEqDecoder::kDecoderCNGswb32kHz,
- FLAGS_cn_swb32);
+ "cng-swb32", FLAGS_cn_swb32);
RegisterPayloadType(neteq, webrtc::NetEqDecoder::kDecoderCNGswb48kHz,
- FLAGS_cn_swb48);
+ "cng-swb48", FLAGS_cn_swb48);
}
void PrintCodecMappingEntry(webrtc::NetEqDecoder codec, google::int32 flag) {
@@ -399,23 +410,12 @@ int main(int argc, char* argv[]) {
printf("Input file: %s\n", argv[1]);
- // TODO(ivoc): Modify the RtpFileSource::Create and RtcEventLogSource::Create
- // functions to return a nullptr on failure instead of crashing
- // the program.
-
- // This temporary solution uses a RtpFileReader directly to check if the file
- // is a valid RtpDump file.
bool is_rtp_dump = false;
- {
- rtc::scoped_ptr<webrtc::test::RtpFileReader> rtp_reader(
- webrtc::test::RtpFileReader::Create(
- webrtc::test::RtpFileReader::kRtpDump, argv[1]));
- if (rtp_reader)
- is_rtp_dump = true;
- }
rtc::scoped_ptr<webrtc::test::PacketSource> file_source;
webrtc::test::RtcEventLogSource* event_log_source = nullptr;
- if (is_rtp_dump) {
+ if (webrtc::test::RtpFileSource::ValidRtpDump(argv[1]) ||
+ webrtc::test::RtpFileSource::ValidPcap(argv[1])) {
+ is_rtp_dump = true;
file_source.reset(webrtc::test::RtpFileSource::Create(argv[1]));
} else {
event_log_source = webrtc::test::RtcEventLogSource::Create(argv[1]);
@@ -558,7 +558,7 @@ int main(int argc, char* argv[]) {
payload_ptr = payload.get();
}
int error = neteq->InsertPacket(
- rtp_header, payload_ptr, payload_len,
+ rtp_header, rtc::ArrayView<const uint8_t>(payload_ptr, payload_len),
static_cast<uint32_t>(packet->time_ms() * sample_rate_hz / 1000));
if (error != NetEq::kOK) {
if (neteq->LastError() == NetEq::kUnknownRtpPayloadType) {
@@ -609,7 +609,7 @@ int main(int argc, char* argv[]) {
static const size_t kOutDataLen =
kOutputBlockSizeMs * kMaxSamplesPerMs * kMaxChannels;
int16_t out_data[kOutDataLen];
- int num_channels;
+ size_t num_channels;
size_t samples_per_channel;
int error = neteq->GetAudio(kOutDataLen, out_data, &samples_per_channel,
&num_channels, NULL);
diff --git a/webrtc/modules/audio_coding/neteq/tools/packet.cc b/webrtc/modules/audio_coding/neteq/tools/packet.cc
index b8b27afdec..2b2fcc286e 100644
--- a/webrtc/modules/audio_coding/neteq/tools/packet.cc
+++ b/webrtc/modules/audio_coding/neteq/tools/packet.cc
@@ -12,8 +12,8 @@
#include <string.h>
-#include "webrtc/modules/interface/module_common_types.h"
-#include "webrtc/modules/rtp_rtcp/interface/rtp_header_parser.h"
+#include "webrtc/modules/include/module_common_types.h"
+#include "webrtc/modules/rtp_rtcp/include/rtp_header_parser.h"
namespace webrtc {
namespace test {
diff --git a/webrtc/modules/audio_coding/neteq/tools/rtc_event_log_source.cc b/webrtc/modules/audio_coding/neteq/tools/rtc_event_log_source.cc
index 9b17ba8f64..dad72eaecd 100644
--- a/webrtc/modules/audio_coding/neteq/tools/rtc_event_log_source.cc
+++ b/webrtc/modules/audio_coding/neteq/tools/rtc_event_log_source.cc
@@ -18,7 +18,7 @@
#include "webrtc/base/checks.h"
#include "webrtc/call/rtc_event_log.h"
#include "webrtc/modules/audio_coding/neteq/tools/packet.h"
-#include "webrtc/modules/rtp_rtcp/interface/rtp_header_parser.h"
+#include "webrtc/modules/rtp_rtcp/include/rtp_header_parser.h"
// Files generated at build-time by the protobuf compiler.
#ifdef WEBRTC_ANDROID_PLATFORM_BUILD
diff --git a/webrtc/modules/audio_coding/neteq/tools/rtc_event_log_source.h b/webrtc/modules/audio_coding/neteq/tools/rtc_event_log_source.h
index 7150bcfe89..90d5931224 100644
--- a/webrtc/modules/audio_coding/neteq/tools/rtc_event_log_source.h
+++ b/webrtc/modules/audio_coding/neteq/tools/rtc_event_log_source.h
@@ -16,7 +16,7 @@
#include "webrtc/base/constructormagic.h"
#include "webrtc/base/scoped_ptr.h"
#include "webrtc/modules/audio_coding/neteq/tools/packet_source.h"
-#include "webrtc/modules/rtp_rtcp/interface/rtp_rtcp_defines.h"
+#include "webrtc/modules/rtp_rtcp/include/rtp_rtcp_defines.h"
namespace webrtc {
diff --git a/webrtc/modules/audio_coding/neteq/tools/rtp_file_source.cc b/webrtc/modules/audio_coding/neteq/tools/rtp_file_source.cc
index 9681ad17ea..b7a3109c01 100644
--- a/webrtc/modules/audio_coding/neteq/tools/rtp_file_source.cc
+++ b/webrtc/modules/audio_coding/neteq/tools/rtp_file_source.cc
@@ -20,7 +20,7 @@
#include "webrtc/base/checks.h"
#include "webrtc/modules/audio_coding/neteq/tools/packet.h"
-#include "webrtc/modules/rtp_rtcp/interface/rtp_header_parser.h"
+#include "webrtc/modules/rtp_rtcp/include/rtp_header_parser.h"
#include "webrtc/test/rtp_file_reader.h"
namespace webrtc {
@@ -32,6 +32,18 @@ RtpFileSource* RtpFileSource::Create(const std::string& file_name) {
return source;
}
+bool RtpFileSource::ValidRtpDump(const std::string& file_name) {
+ rtc::scoped_ptr<RtpFileReader> temp_file(
+ RtpFileReader::Create(RtpFileReader::kRtpDump, file_name));
+ return !!temp_file;
+}
+
+bool RtpFileSource::ValidPcap(const std::string& file_name) {
+ rtc::scoped_ptr<RtpFileReader> temp_file(
+ RtpFileReader::Create(RtpFileReader::kPcap, file_name));
+ return !!temp_file;
+}
+
RtpFileSource::~RtpFileSource() {
}
diff --git a/webrtc/modules/audio_coding/neteq/tools/rtp_file_source.h b/webrtc/modules/audio_coding/neteq/tools/rtp_file_source.h
index d0856a819c..2febf68b91 100644
--- a/webrtc/modules/audio_coding/neteq/tools/rtp_file_source.h
+++ b/webrtc/modules/audio_coding/neteq/tools/rtp_file_source.h
@@ -18,7 +18,7 @@
#include "webrtc/base/scoped_ptr.h"
#include "webrtc/common_types.h"
#include "webrtc/modules/audio_coding/neteq/tools/packet_source.h"
-#include "webrtc/modules/rtp_rtcp/interface/rtp_rtcp_defines.h"
+#include "webrtc/modules/rtp_rtcp/include/rtp_rtcp_defines.h"
namespace webrtc {
@@ -34,6 +34,10 @@ class RtpFileSource : public PacketSource {
// opened, or has the wrong format, NULL will be returned.
static RtpFileSource* Create(const std::string& file_name);
+ // Checks whether a files is a valid RTP dump or PCAP (Wireshark) file.
+ static bool ValidRtpDump(const std::string& file_name);
+ static bool ValidPcap(const std::string& file_name);
+
virtual ~RtpFileSource();
// Registers an RTP header extension and binds it to |id|.
diff --git a/webrtc/modules/audio_coding/neteq/tools/rtp_generator.h b/webrtc/modules/audio_coding/neteq/tools/rtp_generator.h
index 6c16192daa..53371be8f6 100644
--- a/webrtc/modules/audio_coding/neteq/tools/rtp_generator.h
+++ b/webrtc/modules/audio_coding/neteq/tools/rtp_generator.h
@@ -12,7 +12,7 @@
#define WEBRTC_MODULES_AUDIO_CODING_NETEQ_TOOLS_RTP_GENERATOR_H_
#include "webrtc/base/constructormagic.h"
-#include "webrtc/modules/interface/module_common_types.h"
+#include "webrtc/modules/include/module_common_types.h"
#include "webrtc/typedefs.h"
namespace webrtc {
diff --git a/webrtc/modules/audio_coding/test/ACMTest.h b/webrtc/modules/audio_coding/test/ACMTest.h
new file mode 100644
index 0000000000..d7e87d34ba
--- /dev/null
+++ b/webrtc/modules/audio_coding/test/ACMTest.h
@@ -0,0 +1,21 @@
+/*
+ * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_TEST_ACMTEST_H_
+#define WEBRTC_MODULES_AUDIO_CODING_TEST_ACMTEST_H_
+
+class ACMTest {
+ public:
+ ACMTest() {}
+ virtual ~ACMTest() {}
+ virtual void Perform() = 0;
+};
+
+#endif // WEBRTC_MODULES_AUDIO_CODING_TEST_ACMTEST_H_
diff --git a/webrtc/modules/audio_coding/test/APITest.cc b/webrtc/modules/audio_coding/test/APITest.cc
new file mode 100644
index 0000000000..bf04d7c825
--- /dev/null
+++ b/webrtc/modules/audio_coding/test/APITest.cc
@@ -0,0 +1,1104 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/modules/audio_coding/test/APITest.h"
+
+#include <ctype.h>
+#include <stdio.h>
+#include <stdlib.h>
+#include <string.h>
+
+#include <iostream>
+#include <ostream>
+#include <string>
+
+#include "testing/gtest/include/gtest/gtest.h"
+#include "webrtc/base/platform_thread.h"
+#include "webrtc/common.h"
+#include "webrtc/common_types.h"
+#include "webrtc/engine_configurations.h"
+#include "webrtc/modules/audio_coding/acm2/acm_common_defs.h"
+#include "webrtc/modules/audio_coding/test/utility.h"
+#include "webrtc/system_wrappers/include/event_wrapper.h"
+#include "webrtc/system_wrappers/include/tick_util.h"
+#include "webrtc/system_wrappers/include/trace.h"
+#include "webrtc/test/testsupport/fileutils.h"
+
+namespace webrtc {
+
+#define TEST_DURATION_SEC 600
+#define NUMBER_OF_SENDER_TESTS 6
+#define MAX_FILE_NAME_LENGTH_BYTE 500
+
+void APITest::Wait(uint32_t waitLengthMs) {
+ if (_randomTest) {
+ return;
+ } else {
+ EventWrapper* myEvent = EventWrapper::Create();
+ myEvent->Wait(waitLengthMs);
+ delete myEvent;
+ return;
+ }
+}
+
+APITest::APITest(const Config& config)
+ : _acmA(AudioCodingModule::Create(1)),
+ _acmB(AudioCodingModule::Create(2)),
+ _channel_A2B(NULL),
+ _channel_B2A(NULL),
+ _writeToFile(true),
+ _pullEventA(NULL),
+ _pushEventA(NULL),
+ _processEventA(NULL),
+ _apiEventA(NULL),
+ _pullEventB(NULL),
+ _pushEventB(NULL),
+ _processEventB(NULL),
+ _apiEventB(NULL),
+ _codecCntrA(0),
+ _codecCntrB(0),
+ _thereIsEncoderA(false),
+ _thereIsEncoderB(false),
+ _thereIsDecoderA(false),
+ _thereIsDecoderB(false),
+ _sendVADA(false),
+ _sendDTXA(false),
+ _sendVADModeA(VADNormal),
+ _sendVADB(false),
+ _sendDTXB(false),
+ _sendVADModeB(VADNormal),
+ _minDelayA(0),
+ _minDelayB(0),
+ _dotPositionA(0),
+ _dotMoveDirectionA(1),
+ _dotPositionB(39),
+ _dotMoveDirectionB(-1),
+ _vadCallbackA(NULL),
+ _vadCallbackB(NULL),
+ _apiTestRWLock(*RWLockWrapper::CreateRWLock()),
+ _randomTest(false),
+ _testNumA(0),
+ _testNumB(1) {
+ int n;
+ for (n = 0; n < 32; n++) {
+ _payloadUsed[n] = false;
+ }
+
+ _movingDot[40] = '\0';
+
+ for (int n = 0; n < 40; n++) {
+ _movingDot[n] = ' ';
+ }
+}
+
+APITest::~APITest() {
+ DELETE_POINTER(_channel_A2B);
+ DELETE_POINTER(_channel_B2A);
+
+ DELETE_POINTER(_pushEventA);
+ DELETE_POINTER(_pullEventA);
+ DELETE_POINTER(_processEventA);
+ DELETE_POINTER(_apiEventA);
+
+ DELETE_POINTER(_pushEventB);
+ DELETE_POINTER(_pullEventB);
+ DELETE_POINTER(_processEventB);
+ DELETE_POINTER(_apiEventB);
+
+ _inFileA.Close();
+ _outFileA.Close();
+
+ _inFileB.Close();
+ _outFileB.Close();
+
+ DELETE_POINTER(_vadCallbackA);
+ DELETE_POINTER(_vadCallbackB);
+
+ delete &_apiTestRWLock;
+}
+
+int16_t APITest::SetUp() {
+ CodecInst dummyCodec;
+ int lastPayloadType = 0;
+
+ int16_t numCodecs = _acmA->NumberOfCodecs();
+ for (uint8_t n = 0; n < numCodecs; n++) {
+ AudioCodingModule::Codec(n, &dummyCodec);
+ if ((STR_CASE_CMP(dummyCodec.plname, "CN") == 0)
+ && (dummyCodec.plfreq == 32000)) {
+ continue;
+ }
+
+ printf("Register Receive Codec %s ", dummyCodec.plname);
+
+ if ((n != 0) && !FixedPayloadTypeCodec(dummyCodec.plname)) {
+ // Check registration with an already occupied payload type
+ int currentPayloadType = dummyCodec.pltype;
+ dummyCodec.pltype = 97; //lastPayloadType;
+ CHECK_ERROR(_acmB->RegisterReceiveCodec(dummyCodec));
+ dummyCodec.pltype = currentPayloadType;
+ }
+
+ if ((n < numCodecs - 1) && !FixedPayloadTypeCodec(dummyCodec.plname)) {
+ // test if re-registration works;
+ CodecInst nextCodec;
+ int currentPayloadType = dummyCodec.pltype;
+ AudioCodingModule::Codec(n + 1, &nextCodec);
+ dummyCodec.pltype = nextCodec.pltype;
+ if (!FixedPayloadTypeCodec(nextCodec.plname)) {
+ _acmB->RegisterReceiveCodec(dummyCodec);
+ }
+ dummyCodec.pltype = currentPayloadType;
+ }
+
+ if ((n < numCodecs - 1) && !FixedPayloadTypeCodec(dummyCodec.plname)) {
+ // test if un-registration works;
+ CodecInst nextCodec;
+ AudioCodingModule::Codec(n + 1, &nextCodec);
+ nextCodec.pltype = dummyCodec.pltype;
+ if (!FixedPayloadTypeCodec(nextCodec.plname)) {
+ CHECK_ERROR_MT(_acmA->RegisterReceiveCodec(nextCodec));
+ CHECK_ERROR_MT(_acmA->UnregisterReceiveCodec(nextCodec.pltype));
+ }
+ }
+
+ CHECK_ERROR_MT(_acmA->RegisterReceiveCodec(dummyCodec));
+ printf(" side A done!");
+ CHECK_ERROR_MT(_acmB->RegisterReceiveCodec(dummyCodec));
+ printf(" side B done!\n");
+
+ if (!strcmp(dummyCodec.plname, "CN")) {
+ CHECK_ERROR_MT(_acmA->RegisterSendCodec(dummyCodec));
+ CHECK_ERROR_MT(_acmB->RegisterSendCodec(dummyCodec));
+ }
+ lastPayloadType = dummyCodec.pltype;
+ if ((lastPayloadType >= 96) && (lastPayloadType <= 127)) {
+ _payloadUsed[lastPayloadType - 96] = true;
+ }
+ }
+ _thereIsDecoderA = true;
+ _thereIsDecoderB = true;
+
+ // Register Send Codec
+ AudioCodingModule::Codec((uint8_t) _codecCntrA, &dummyCodec);
+ CHECK_ERROR_MT(_acmA->RegisterSendCodec(dummyCodec));
+ _thereIsEncoderA = true;
+ //
+ AudioCodingModule::Codec((uint8_t) _codecCntrB, &dummyCodec);
+ CHECK_ERROR_MT(_acmB->RegisterSendCodec(dummyCodec));
+ _thereIsEncoderB = true;
+
+ uint16_t frequencyHz;
+
+ printf("\n\nAPI Test\n");
+ printf("========\n");
+ printf("Hit enter to accept the default values indicated in []\n\n");
+
+ //--- Input A
+ std::string file_name = webrtc::test::ResourcePath(
+ "audio_coding/testfile32kHz", "pcm");
+ frequencyHz = 32000;
+ printf("Enter input file at side A [%s]: ", file_name.c_str());
+ PCMFile::ChooseFile(&file_name, 499, &frequencyHz);
+ _inFileA.Open(file_name, frequencyHz, "rb", true);
+
+ //--- Output A
+ std::string out_file_a = webrtc::test::OutputPath() + "outA.pcm";
+ printf("Enter output file at side A [%s]: ", out_file_a.c_str());
+ PCMFile::ChooseFile(&out_file_a, 499, &frequencyHz);
+ _outFileA.Open(out_file_a, frequencyHz, "wb");
+
+ //--- Input B
+ file_name = webrtc::test::ResourcePath("audio_coding/testfile32kHz", "pcm");
+ printf("\n\nEnter input file at side B [%s]: ", file_name.c_str());
+ PCMFile::ChooseFile(&file_name, 499, &frequencyHz);
+ _inFileB.Open(file_name, frequencyHz, "rb", true);
+
+ //--- Output B
+ std::string out_file_b = webrtc::test::OutputPath() + "outB.pcm";
+ printf("Enter output file at side B [%s]: ", out_file_b.c_str());
+ PCMFile::ChooseFile(&out_file_b, 499, &frequencyHz);
+ _outFileB.Open(out_file_b, frequencyHz, "wb");
+
+ //--- Set A-to-B channel
+ _channel_A2B = new Channel(2);
+ CHECK_ERROR_MT(_acmA->RegisterTransportCallback(_channel_A2B));
+ _channel_A2B->RegisterReceiverACM(_acmB.get());
+
+ //--- Set B-to-A channel
+ _channel_B2A = new Channel(1);
+ CHECK_ERROR_MT(_acmB->RegisterTransportCallback(_channel_B2A));
+ _channel_B2A->RegisterReceiverACM(_acmA.get());
+
+ //--- EVENT TIMERS
+ // A
+ _pullEventA = EventTimerWrapper::Create();
+ _pushEventA = EventTimerWrapper::Create();
+ _processEventA = EventTimerWrapper::Create();
+ _apiEventA = EventWrapper::Create();
+ // B
+ _pullEventB = EventTimerWrapper::Create();
+ _pushEventB = EventTimerWrapper::Create();
+ _processEventB = EventTimerWrapper::Create();
+ _apiEventB = EventWrapper::Create();
+
+ //--- I/O params
+ // A
+ _outFreqHzA = _outFileA.SamplingFrequency();
+ // B
+ _outFreqHzB = _outFileB.SamplingFrequency();
+
+ //Trace::SetEncryptedTraceFile("ACMAPITestEncrypted.txt");
+
+ char print[11];
+
+ // Create a trace file.
+ Trace::CreateTrace();
+ Trace::SetTraceFile(
+ (webrtc::test::OutputPath() + "acm_api_trace.txt").c_str());
+
+ printf("\nRandom Test (y/n)?");
+ EXPECT_TRUE(fgets(print, 10, stdin) != NULL);
+ print[10] = '\0';
+ if (strstr(print, "y") != NULL) {
+ _randomTest = true;
+ _verbose = false;
+ _writeToFile = false;
+ } else {
+ _randomTest = false;
+ printf("\nPrint Tests (y/n)? ");
+ EXPECT_TRUE(fgets(print, 10, stdin) != NULL);
+ print[10] = '\0';
+ if (strstr(print, "y") == NULL) {
+ EXPECT_TRUE(freopen("APITest_log.txt", "w", stdout) != 0);
+ _verbose = false;
+ }
+ }
+
+ _vadCallbackA = new VADCallback;
+ _vadCallbackB = new VADCallback;
+
+ return 0;
+}
+
+bool APITest::PushAudioThreadA(void* obj) {
+ return static_cast<APITest*>(obj)->PushAudioRunA();
+}
+
+bool APITest::PushAudioThreadB(void* obj) {
+ return static_cast<APITest*>(obj)->PushAudioRunB();
+}
+
+bool APITest::PullAudioThreadA(void* obj) {
+ return static_cast<APITest*>(obj)->PullAudioRunA();
+}
+
+bool APITest::PullAudioThreadB(void* obj) {
+ return static_cast<APITest*>(obj)->PullAudioRunB();
+}
+
+bool APITest::ProcessThreadA(void* obj) {
+ return static_cast<APITest*>(obj)->ProcessRunA();
+}
+
+bool APITest::ProcessThreadB(void* obj) {
+ return static_cast<APITest*>(obj)->ProcessRunB();
+}
+
+bool APITest::APIThreadA(void* obj) {
+ return static_cast<APITest*>(obj)->APIRunA();
+}
+
+bool APITest::APIThreadB(void* obj) {
+ return static_cast<APITest*>(obj)->APIRunB();
+}
+
+bool APITest::PullAudioRunA() {
+ _pullEventA->Wait(100);
+ AudioFrame audioFrame;
+ if (_acmA->PlayoutData10Ms(_outFreqHzA, &audioFrame) < 0) {
+ bool thereIsDecoder;
+ {
+ ReadLockScoped rl(_apiTestRWLock);
+ thereIsDecoder = _thereIsDecoderA;
+ }
+ if (thereIsDecoder) {
+ fprintf(stderr, "\n>>>>>> cannot pull audio A <<<<<<<< \n");
+ }
+ } else {
+ if (_writeToFile) {
+ _outFileA.Write10MsData(audioFrame);
+ }
+ }
+ return true;
+}
+
+bool APITest::PullAudioRunB() {
+ _pullEventB->Wait(100);
+ AudioFrame audioFrame;
+ if (_acmB->PlayoutData10Ms(_outFreqHzB, &audioFrame) < 0) {
+ bool thereIsDecoder;
+ {
+ ReadLockScoped rl(_apiTestRWLock);
+ thereIsDecoder = _thereIsDecoderB;
+ }
+ if (thereIsDecoder) {
+ fprintf(stderr, "\n>>>>>> cannot pull audio B <<<<<<<< \n");
+ fprintf(stderr, "%d %d\n", _testNumA, _testNumB);
+ }
+ } else {
+ if (_writeToFile) {
+ _outFileB.Write10MsData(audioFrame);
+ }
+ }
+ return true;
+}
+
+bool APITest::PushAudioRunA() {
+ _pushEventA->Wait(100);
+ AudioFrame audioFrame;
+ _inFileA.Read10MsData(audioFrame);
+ if (_acmA->Add10MsData(audioFrame) < 0) {
+ bool thereIsEncoder;
+ {
+ ReadLockScoped rl(_apiTestRWLock);
+ thereIsEncoder = _thereIsEncoderA;
+ }
+ if (thereIsEncoder) {
+ fprintf(stderr, "\n>>>> add10MsData at A failed <<<<\n");
+ }
+ }
+ return true;
+}
+
+bool APITest::PushAudioRunB() {
+ _pushEventB->Wait(100);
+ AudioFrame audioFrame;
+ _inFileB.Read10MsData(audioFrame);
+ if (_acmB->Add10MsData(audioFrame) < 0) {
+ bool thereIsEncoder;
+ {
+ ReadLockScoped rl(_apiTestRWLock);
+ thereIsEncoder = _thereIsEncoderB;
+ }
+
+ if (thereIsEncoder) {
+ fprintf(stderr, "\n>>>> cannot add audio to B <<<<");
+ }
+ }
+
+ return true;
+}
+
+bool APITest::ProcessRunA() {
+ _processEventA->Wait(100);
+ return true;
+}
+
+bool APITest::ProcessRunB() {
+ _processEventB->Wait(100);
+ return true;
+}
+
+/*/
+ *
+ * In side A we test the APIs which are related to sender Side.
+ *
+/*/
+
+void APITest::RunTest(char thread) {
+ int testNum;
+ {
+ WriteLockScoped cs(_apiTestRWLock);
+ if (thread == 'A') {
+ _testNumA = (_testNumB + 1 + (rand() % 3)) % 4;
+ testNum = _testNumA;
+
+ _movingDot[_dotPositionA] = ' ';
+ if (_dotPositionA == 0) {
+ _dotMoveDirectionA = 1;
+ }
+ if (_dotPositionA == 19) {
+ _dotMoveDirectionA = -1;
+ }
+ _dotPositionA += _dotMoveDirectionA;
+ _movingDot[_dotPositionA] = (_dotMoveDirectionA > 0) ? '>' : '<';
+ } else {
+ _testNumB = (_testNumA + 1 + (rand() % 3)) % 4;
+ testNum = _testNumB;
+
+ _movingDot[_dotPositionB] = ' ';
+ if (_dotPositionB == 20) {
+ _dotMoveDirectionB = 1;
+ }
+ if (_dotPositionB == 39) {
+ _dotMoveDirectionB = -1;
+ }
+ _dotPositionB += _dotMoveDirectionB;
+ _movingDot[_dotPositionB] = (_dotMoveDirectionB > 0) ? '>' : '<';
+ }
+ //fprintf(stderr, "%c: %d \n", thread, testNum);
+ //fflush(stderr);
+ }
+ switch (testNum) {
+ case 0:
+ CurrentCodec('A');
+ ChangeCodec('A');
+ break;
+ case 1:
+ if (!_randomTest) {
+ fprintf(stdout, "\nTesting Delay ...\n");
+ }
+ TestDelay('A');
+ break;
+ case 2:
+ TestSendVAD('A');
+ break;
+ case 3:
+ TestRegisteration('A');
+ break;
+ default:
+ fprintf(stderr, "Wrong Test Number\n");
+ getc(stdin);
+ exit(1);
+ }
+}
+
+bool APITest::APIRunA() {
+ _apiEventA->Wait(50);
+
+ bool randomTest;
+ {
+ ReadLockScoped rl(_apiTestRWLock);
+ randomTest = _randomTest;
+ }
+ if (randomTest) {
+ RunTest('A');
+ } else {
+ CurrentCodec('A');
+ ChangeCodec('A');
+ if (_codecCntrA == 0) {
+ fprintf(stdout, "\nTesting Delay ...\n");
+ TestDelay('A');
+ }
+ // VAD TEST
+ TestSendVAD('A');
+ TestRegisteration('A');
+ }
+ return true;
+}
+
+bool APITest::APIRunB() {
+ _apiEventB->Wait(50);
+ bool randomTest;
+ {
+ ReadLockScoped rl(_apiTestRWLock);
+ randomTest = _randomTest;
+ }
+ //_apiEventB->Wait(2000);
+ if (randomTest) {
+ RunTest('B');
+ }
+
+ return true;
+}
+
+void APITest::Perform() {
+ SetUp();
+
+ //--- THREADS
+ // A
+ // PUSH
+ rtc::PlatformThread myPushAudioThreadA(PushAudioThreadA, this,
+ "PushAudioThreadA");
+ myPushAudioThreadA.Start();
+ // PULL
+ rtc::PlatformThread myPullAudioThreadA(PullAudioThreadA, this,
+ "PullAudioThreadA");
+ myPullAudioThreadA.Start();
+ // Process
+ rtc::PlatformThread myProcessThreadA(ProcessThreadA, this, "ProcessThreadA");
+ myProcessThreadA.Start();
+ // API
+ rtc::PlatformThread myAPIThreadA(APIThreadA, this, "APIThreadA");
+ myAPIThreadA.Start();
+ // B
+ // PUSH
+ rtc::PlatformThread myPushAudioThreadB(PushAudioThreadB, this,
+ "PushAudioThreadB");
+ myPushAudioThreadB.Start();
+ // PULL
+ rtc::PlatformThread myPullAudioThreadB(PullAudioThreadB, this,
+ "PullAudioThreadB");
+ myPullAudioThreadB.Start();
+ // Process
+ rtc::PlatformThread myProcessThreadB(ProcessThreadB, this, "ProcessThreadB");
+ myProcessThreadB.Start();
+ // API
+ rtc::PlatformThread myAPIThreadB(APIThreadB, this, "APIThreadB");
+ myAPIThreadB.Start();
+
+ //_apiEventA->StartTimer(true, 5000);
+ //_apiEventB->StartTimer(true, 5000);
+
+ _processEventA->StartTimer(true, 10);
+ _processEventB->StartTimer(true, 10);
+
+ _pullEventA->StartTimer(true, 10);
+ _pullEventB->StartTimer(true, 10);
+
+ _pushEventA->StartTimer(true, 10);
+ _pushEventB->StartTimer(true, 10);
+
+ // Keep main thread waiting for sender/receiver
+ // threads to complete
+ EventWrapper* completeEvent = EventWrapper::Create();
+ uint64_t startTime = TickTime::MillisecondTimestamp();
+ uint64_t currentTime;
+ // Run test in 2 minutes (120000 ms).
+ do {
+ {
+ //ReadLockScoped rl(_apiTestRWLock);
+ //fprintf(stderr, "\r%s", _movingDot);
+ }
+ //fflush(stderr);
+ completeEvent->Wait(50);
+ currentTime = TickTime::MillisecondTimestamp();
+ } while ((currentTime - startTime) < 120000);
+
+ //completeEvent->Wait(0xFFFFFFFF);
+ //(unsigned long)((unsigned long)TEST_DURATION_SEC * (unsigned long)1000));
+ delete completeEvent;
+
+ myPushAudioThreadA.Stop();
+ myPullAudioThreadA.Stop();
+ myProcessThreadA.Stop();
+ myAPIThreadA.Stop();
+
+ myPushAudioThreadB.Stop();
+ myPullAudioThreadB.Stop();
+ myProcessThreadB.Stop();
+ myAPIThreadB.Stop();
+}
+
+void APITest::CheckVADStatus(char side) {
+
+ bool dtxEnabled;
+ bool vadEnabled;
+ ACMVADMode vadMode;
+
+ if (side == 'A') {
+ _acmA->VAD(&dtxEnabled, &vadEnabled, &vadMode);
+ _acmA->RegisterVADCallback(NULL);
+ _vadCallbackA->Reset();
+ _acmA->RegisterVADCallback(_vadCallbackA);
+
+ if (!_randomTest) {
+ if (_verbose) {
+ fprintf(stdout, "DTX %3s, VAD %3s, Mode %d", dtxEnabled ? "ON" : "OFF",
+ vadEnabled ? "ON" : "OFF", (int) vadMode);
+ Wait(5000);
+ fprintf(stdout, " => bit-rate %3.0f kbps\n", _channel_A2B->BitRate());
+ } else {
+ Wait(5000);
+ fprintf(stdout, "DTX %3s, VAD %3s, Mode %d => bit-rate %3.0f kbps\n",
+ dtxEnabled ? "ON" : "OFF", vadEnabled ? "ON" : "OFF",
+ (int) vadMode, _channel_A2B->BitRate());
+ }
+ _vadCallbackA->PrintFrameTypes();
+ }
+
+ if (dtxEnabled != _sendDTXA) {
+ fprintf(stderr, ">>> Error Enabling DTX <<<\n");
+ }
+ if ((vadEnabled != _sendVADA) && (!dtxEnabled)) {
+ fprintf(stderr, ">>> Error Enabling VAD <<<\n");
+ }
+ if ((vadMode != _sendVADModeA) && vadEnabled) {
+ fprintf(stderr, ">>> Error setting VAD-mode <<<\n");
+ }
+ } else {
+ _acmB->VAD(&dtxEnabled, &vadEnabled, &vadMode);
+
+ _acmB->RegisterVADCallback(NULL);
+ _vadCallbackB->Reset();
+ _acmB->RegisterVADCallback(_vadCallbackB);
+
+ if (!_randomTest) {
+ if (_verbose) {
+ fprintf(stdout, "DTX %3s, VAD %3s, Mode %d", dtxEnabled ? "ON" : "OFF",
+ vadEnabled ? "ON" : "OFF", (int) vadMode);
+ Wait(5000);
+ fprintf(stdout, " => bit-rate %3.0f kbps\n", _channel_B2A->BitRate());
+ } else {
+ Wait(5000);
+ fprintf(stdout, "DTX %3s, VAD %3s, Mode %d => bit-rate %3.0f kbps\n",
+ dtxEnabled ? "ON" : "OFF", vadEnabled ? "ON" : "OFF",
+ (int) vadMode, _channel_B2A->BitRate());
+ }
+ _vadCallbackB->PrintFrameTypes();
+ }
+
+ if (dtxEnabled != _sendDTXB) {
+ fprintf(stderr, ">>> Error Enabling DTX <<<\n");
+ }
+ if ((vadEnabled != _sendVADB) && (!dtxEnabled)) {
+ fprintf(stderr, ">>> Error Enabling VAD <<<\n");
+ }
+ if ((vadMode != _sendVADModeB) && vadEnabled) {
+ fprintf(stderr, ">>> Error setting VAD-mode <<<\n");
+ }
+ }
+}
+
+// Set Min delay, get delay, playout timestamp
+void APITest::TestDelay(char side) {
+ AudioCodingModule* myACM;
+ Channel* myChannel;
+ int32_t* myMinDelay;
+ EventTimerWrapper* myEvent = EventTimerWrapper::Create();
+
+ uint32_t inTimestamp = 0;
+ uint32_t outTimestamp = 0;
+ double estimDelay = 0;
+
+ double averageEstimDelay = 0;
+ double averageDelay = 0;
+
+ CircularBuffer estimDelayCB(100);
+ estimDelayCB.SetArithMean(true);
+
+ if (side == 'A') {
+ myACM = _acmA.get();
+ myChannel = _channel_B2A;
+ myMinDelay = &_minDelayA;
+ } else {
+ myACM = _acmB.get();
+ myChannel = _channel_A2B;
+ myMinDelay = &_minDelayB;
+ }
+
+ CHECK_ERROR_MT(myACM->SetMinimumPlayoutDelay(*myMinDelay));
+
+ inTimestamp = myChannel->LastInTimestamp();
+ CHECK_ERROR_MT(myACM->PlayoutTimestamp(&outTimestamp));
+
+ if (!_randomTest) {
+ myEvent->StartTimer(true, 30);
+ int n = 0;
+ int settlePoint = 5000;
+ while (n < settlePoint + 400) {
+ myEvent->Wait(1000);
+
+ inTimestamp = myChannel->LastInTimestamp();
+ CHECK_ERROR_MT(myACM->PlayoutTimestamp(&outTimestamp));
+
+ //std::cout << outTimestamp << std::endl << std::flush;
+ estimDelay = (double) ((uint32_t)(inTimestamp - outTimestamp))
+ / ((double) myACM->ReceiveFrequency() / 1000.0);
+
+ estimDelayCB.Update(estimDelay);
+
+ estimDelayCB.ArithMean(averageEstimDelay);
+ //printf("\n %6.1f \n", estimDelay);
+ //std::cout << " " << std::flush;
+
+ if (_verbose) {
+ fprintf(stdout,
+ "\rExpected: %4d, retreived: %6.1f, measured: %6.1f",
+ *myMinDelay, averageDelay, averageEstimDelay);
+ std::cout << " " << std::flush;
+ }
+ if ((averageDelay > *myMinDelay) && (n < settlePoint)) {
+ settlePoint = n;
+ }
+ n++;
+ }
+ myEvent->StopTimer();
+ }
+
+ if ((!_verbose) && (!_randomTest)) {
+ fprintf(stdout, "\nExpected: %4d, retreived: %6.1f, measured: %6.1f",
+ *myMinDelay, averageDelay, averageEstimDelay);
+ }
+
+ *myMinDelay = (rand() % 1000) + 1;
+
+ NetworkStatistics networkStat;
+ CHECK_ERROR_MT(myACM->GetNetworkStatistics(&networkStat));
+
+ if (!_randomTest) {
+ fprintf(stdout, "\n\nJitter Statistics at Side %c\n", side);
+ fprintf(stdout, "--------------------------------------\n");
+ fprintf(stdout, "buffer-size............. %d\n",
+ networkStat.currentBufferSize);
+ fprintf(stdout, "Preferred buffer-size... %d\n",
+ networkStat.preferredBufferSize);
+ fprintf(stdout, "Peaky jitter mode........%d\n",
+ networkStat.jitterPeaksFound);
+ fprintf(stdout, "packet-size rate........ %d\n",
+ networkStat.currentPacketLossRate);
+ fprintf(stdout, "discard rate............ %d\n",
+ networkStat.currentDiscardRate);
+ fprintf(stdout, "expand rate............. %d\n",
+ networkStat.currentExpandRate);
+ fprintf(stdout, "speech expand rate...... %d\n",
+ networkStat.currentSpeechExpandRate);
+ fprintf(stdout, "Preemptive rate......... %d\n",
+ networkStat.currentPreemptiveRate);
+ fprintf(stdout, "Accelerate rate......... %d\n",
+ networkStat.currentAccelerateRate);
+ fprintf(stdout, "Secondary decoded rate.. %d\n",
+ networkStat.currentSecondaryDecodedRate);
+ fprintf(stdout, "Clock-drift............. %d\n", networkStat.clockDriftPPM);
+ fprintf(stdout, "Mean waiting time....... %d\n",
+ networkStat.meanWaitingTimeMs);
+ fprintf(stdout, "Median waiting time..... %d\n",
+ networkStat.medianWaitingTimeMs);
+ fprintf(stdout, "Min waiting time........ %d\n",
+ networkStat.minWaitingTimeMs);
+ fprintf(stdout, "Max waiting time........ %d\n",
+ networkStat.maxWaitingTimeMs);
+ }
+
+ CHECK_ERROR_MT(myACM->SetMinimumPlayoutDelay(*myMinDelay));
+
+ if (!_randomTest) {
+ myEvent->Wait(500);
+ fprintf(stdout, "\n");
+ fprintf(stdout, "\n");
+ }
+ delete myEvent;
+}
+
+// Unregister a codec & register again.
+void APITest::TestRegisteration(char sendSide) {
+ AudioCodingModule* sendACM;
+ AudioCodingModule* receiveACM;
+ bool* thereIsDecoder;
+ EventWrapper* myEvent = EventWrapper::Create();
+
+ if (!_randomTest) {
+ fprintf(stdout, "\n\n");
+ fprintf(stdout,
+ "---------------------------------------------------------\n");
+ fprintf(stdout, " Unregister/register Receive Codec\n");
+ fprintf(stdout,
+ "---------------------------------------------------------\n");
+ }
+
+ switch (sendSide) {
+ case 'A': {
+ sendACM = _acmA.get();
+ receiveACM = _acmB.get();
+ thereIsDecoder = &_thereIsDecoderB;
+ break;
+ }
+ case 'B': {
+ sendACM = _acmB.get();
+ receiveACM = _acmA.get();
+ thereIsDecoder = &_thereIsDecoderA;
+ break;
+ }
+ default:
+ fprintf(stderr, "Invalid sender-side in TestRegistration(%c)\n",
+ sendSide);
+ exit(-1);
+ }
+
+ auto myCodec = sendACM->SendCodec();
+ if (!myCodec) {
+ CodecInst ci;
+ AudioCodingModule::Codec(_codecCntrA, &ci);
+ myCodec = rtc::Optional<CodecInst>(ci);
+ }
+
+ if (!_randomTest) {
+ fprintf(stdout, "Unregistering reveive codec, NO AUDIO.\n");
+ fflush (stdout);
+ }
+ {
+ WriteLockScoped wl(_apiTestRWLock);
+ *thereIsDecoder = false;
+ }
+ //myEvent->Wait(20);
+ CHECK_ERROR_MT(receiveACM->UnregisterReceiveCodec(myCodec->pltype));
+ Wait(1000);
+
+ int currentPayload = myCodec->pltype;
+
+ if (!FixedPayloadTypeCodec(myCodec->plname)) {
+ int32_t i;
+ for (i = 0; i < 32; i++) {
+ if (!_payloadUsed[i]) {
+ if (!_randomTest) {
+ fprintf(stdout,
+ "Register receive codec with new Payload, AUDIO BACK.\n");
+ }
+ //myCodec->pltype = i + 96;
+ //CHECK_ERROR_MT(receiveACM->RegisterReceiveCodec(*myCodec));
+ //CHECK_ERROR_MT(sendACM->RegisterSendCodec(*myCodec));
+ //myEvent->Wait(20);
+ //{
+ // WriteLockScoped wl(_apiTestRWLock);
+ // *thereIsDecoder = true;
+ //}
+ Wait(1000);
+
+ if (!_randomTest) {
+ fprintf(stdout, "Unregistering reveive codec, NO AUDIO.\n");
+ }
+ //{
+ // WriteLockScoped wl(_apiTestRWLock);
+ // *thereIsDecoder = false;
+ //}
+ //myEvent->Wait(20);
+ //CHECK_ERROR_MT(receiveACM->UnregisterReceiveCodec(myCodec->pltype));
+ Wait(1000);
+
+ myCodec->pltype = currentPayload;
+ if (!_randomTest) {
+ fprintf(stdout,
+ "Register receive codec with default Payload, AUDIO BACK.\n");
+ fflush (stdout);
+ }
+ CHECK_ERROR_MT(receiveACM->RegisterReceiveCodec(*myCodec));
+ //CHECK_ERROR_MT(sendACM->RegisterSendCodec(*myCodec));
+ myEvent->Wait(20);
+ {
+ WriteLockScoped wl(_apiTestRWLock);
+ *thereIsDecoder = true;
+ }
+ Wait(1000);
+
+ break;
+ }
+ }
+ if (i == 32) {
+ CHECK_ERROR_MT(receiveACM->RegisterReceiveCodec(*myCodec));
+ {
+ WriteLockScoped wl(_apiTestRWLock);
+ *thereIsDecoder = true;
+ }
+ }
+ } else {
+ if (!_randomTest) {
+ fprintf(stdout,
+ "Register receive codec with fixed Payload, AUDIO BACK.\n");
+ fflush (stdout);
+ }
+ CHECK_ERROR_MT(receiveACM->RegisterReceiveCodec(*myCodec));
+ //CHECK_ERROR_MT(receiveACM->UnregisterReceiveCodec(myCodec->pltype));
+ //CHECK_ERROR_MT(receiveACM->RegisterReceiveCodec(*myCodec));
+ myEvent->Wait(20);
+ {
+ WriteLockScoped wl(_apiTestRWLock);
+ *thereIsDecoder = true;
+ }
+ }
+ delete myEvent;
+ if (!_randomTest) {
+ fprintf(stdout,
+ "---------------------------------------------------------\n");
+ }
+}
+
+void APITest::TestSendVAD(char side) {
+ if (_randomTest) {
+ return;
+ }
+
+ bool* vad;
+ bool* dtx;
+ ACMVADMode* mode;
+ Channel* myChannel;
+ AudioCodingModule* myACM;
+
+ CodecInst myCodec;
+ if (!_randomTest) {
+ fprintf(stdout, "\n\n");
+ fprintf(stdout, "-----------------------------------------------\n");
+ fprintf(stdout, " Test VAD API\n");
+ fprintf(stdout, "-----------------------------------------------\n");
+ }
+
+ if (side == 'A') {
+ AudioCodingModule::Codec(_codecCntrA, &myCodec);
+ vad = &_sendVADA;
+ dtx = &_sendDTXA;
+ mode = &_sendVADModeA;
+ myChannel = _channel_A2B;
+ myACM = _acmA.get();
+ } else {
+ AudioCodingModule::Codec(_codecCntrB, &myCodec);
+ vad = &_sendVADB;
+ dtx = &_sendDTXB;
+ mode = &_sendVADModeB;
+ myChannel = _channel_B2A;
+ myACM = _acmB.get();
+ }
+
+ CheckVADStatus(side);
+ if (!_randomTest) {
+ fprintf(stdout, "\n\n");
+ }
+
+ switch (*mode) {
+ case VADNormal:
+ *vad = true;
+ *dtx = true;
+ *mode = VADAggr;
+ break;
+ case VADLowBitrate:
+ *vad = true;
+ *dtx = true;
+ *mode = VADVeryAggr;
+ break;
+ case VADAggr:
+ *vad = true;
+ *dtx = true;
+ *mode = VADLowBitrate;
+ break;
+ case VADVeryAggr:
+ *vad = false;
+ *dtx = false;
+ *mode = VADNormal;
+ break;
+ default:
+ *mode = VADNormal;
+ }
+
+ *dtx = (myCodec.plfreq == 32000) ? false : *dtx;
+
+ CHECK_ERROR_MT(myACM->SetVAD(*dtx, *vad, *mode));
+ myChannel->ResetStats();
+
+ CheckVADStatus(side);
+ if (!_randomTest) {
+ fprintf(stdout, "\n");
+ fprintf(stdout, "-----------------------------------------------\n");
+ }
+
+ // Fault Test
+ CHECK_PROTECTED_MT(myACM->SetVAD(false, true, (ACMVADMode) - 1));
+ CHECK_PROTECTED_MT(myACM->SetVAD(false, true, (ACMVADMode) 4));
+
+}
+
+void APITest::CurrentCodec(char side) {
+ auto myCodec = (side == 'A' ? _acmA : _acmB)->SendCodec();
+
+ if (!_randomTest) {
+ fprintf(stdout, "\n\n");
+ fprintf(stdout, "Send codec in Side A\n");
+ fprintf(stdout, "----------------------------\n");
+ fprintf(stdout, "Name................. %s\n", myCodec->plname);
+ fprintf(stdout, "Sampling Frequency... %d\n", myCodec->plfreq);
+ fprintf(stdout, "Rate................. %d\n", myCodec->rate);
+ fprintf(stdout, "Payload-type......... %d\n", myCodec->pltype);
+ fprintf(stdout, "Packet-size.......... %d\n", myCodec->pacsize);
+ }
+
+ Wait(100);
+}
+
+void APITest::ChangeCodec(char side) {
+ CodecInst myCodec;
+ AudioCodingModule* myACM;
+ uint8_t* codecCntr;
+ bool* thereIsEncoder;
+ bool* vad;
+ bool* dtx;
+ ACMVADMode* mode;
+ Channel* myChannel;
+ // Reset and Wait
+ if (!_randomTest) {
+ fprintf(stdout, "Reset Encoder Side A \n");
+ }
+ if (side == 'A') {
+ myACM = _acmA.get();
+ codecCntr = &_codecCntrA;
+ {
+ WriteLockScoped wl(_apiTestRWLock);
+ thereIsEncoder = &_thereIsEncoderA;
+ }
+ vad = &_sendVADA;
+ dtx = &_sendDTXA;
+ mode = &_sendVADModeA;
+ myChannel = _channel_A2B;
+ } else {
+ myACM = _acmB.get();
+ codecCntr = &_codecCntrB;
+ {
+ WriteLockScoped wl(_apiTestRWLock);
+ thereIsEncoder = &_thereIsEncoderB;
+ }
+ vad = &_sendVADB;
+ dtx = &_sendDTXB;
+ mode = &_sendVADModeB;
+ myChannel = _channel_B2A;
+ }
+
+ Wait(100);
+
+ // Register the next codec
+ do {
+ *codecCntr =
+ (*codecCntr < AudioCodingModule::NumberOfCodecs() - 1) ?
+ (*codecCntr + 1) : 0;
+
+ if (*codecCntr == 0) {
+ //printf("Initialize Sender Side A \n");
+ {
+ WriteLockScoped wl(_apiTestRWLock);
+ *thereIsEncoder = false;
+ }
+ // After Initialization CN is lost, re-register them
+ if (AudioCodingModule::Codec("CN", &myCodec, 8000, 1) >= 0) {
+ CHECK_ERROR_MT(myACM->RegisterSendCodec(myCodec));
+ }
+ if (AudioCodingModule::Codec("CN", &myCodec, 16000, 1) >= 0) {
+ CHECK_ERROR_MT(myACM->RegisterSendCodec(myCodec));
+ }
+ // VAD & DTX are disabled after initialization
+ *vad = false;
+ *dtx = false;
+ _writeToFile = false;
+ }
+
+ AudioCodingModule::Codec(*codecCntr, &myCodec);
+ } while (!STR_CASE_CMP(myCodec.plname, "CN")
+ || !STR_CASE_CMP(myCodec.plname, "telephone-event")
+ || !STR_CASE_CMP(myCodec.plname, "RED"));
+
+ if (!_randomTest) {
+ fprintf(stdout,"\n=====================================================\n");
+ fprintf(stdout, " Registering New Codec %s, %d kHz, %d kbps\n",
+ myCodec.plname, myCodec.plfreq / 1000, myCodec.rate / 1000);
+ }
+ //std::cout<< std::flush;
+
+ // NO DTX for supe-wideband codec at this point
+ if (myCodec.plfreq == 32000) {
+ *dtx = false;
+ CHECK_ERROR_MT(myACM->SetVAD(*dtx, *vad, *mode));
+
+ }
+
+ CHECK_ERROR_MT(myACM->RegisterSendCodec(myCodec));
+ myChannel->ResetStats();
+ {
+ WriteLockScoped wl(_apiTestRWLock);
+ *thereIsEncoder = true;
+ }
+ Wait(500);
+}
+
+} // namespace webrtc
diff --git a/webrtc/modules/audio_coding/test/APITest.h b/webrtc/modules/audio_coding/test/APITest.h
new file mode 100644
index 0000000000..a1937c2b00
--- /dev/null
+++ b/webrtc/modules/audio_coding/test/APITest.h
@@ -0,0 +1,163 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_TEST_APITEST_H_
+#define WEBRTC_MODULES_AUDIO_CODING_TEST_APITEST_H_
+
+#include "webrtc/base/scoped_ptr.h"
+#include "webrtc/modules/audio_coding/include/audio_coding_module.h"
+#include "webrtc/modules/audio_coding/test/ACMTest.h"
+#include "webrtc/modules/audio_coding/test/Channel.h"
+#include "webrtc/modules/audio_coding/test/PCMFile.h"
+#include "webrtc/modules/audio_coding/test/utility.h"
+#include "webrtc/system_wrappers/include/event_wrapper.h"
+#include "webrtc/system_wrappers/include/rw_lock_wrapper.h"
+
+namespace webrtc {
+
+class Config;
+
+enum APITESTAction {
+ TEST_CHANGE_CODEC_ONLY = 0,
+ DTX_TEST = 1
+};
+
+class APITest : public ACMTest {
+ public:
+ explicit APITest(const Config& config);
+ ~APITest();
+
+ void Perform();
+ private:
+ int16_t SetUp();
+
+ static bool PushAudioThreadA(void* obj);
+ static bool PullAudioThreadA(void* obj);
+ static bool ProcessThreadA(void* obj);
+ static bool APIThreadA(void* obj);
+
+ static bool PushAudioThreadB(void* obj);
+ static bool PullAudioThreadB(void* obj);
+ static bool ProcessThreadB(void* obj);
+ static bool APIThreadB(void* obj);
+
+ void CheckVADStatus(char side);
+
+ // Set Min delay, get delay, playout timestamp
+ void TestDelay(char side);
+
+ // Unregister a codec & register again.
+ void TestRegisteration(char side);
+
+ // Playout Mode, background noise mode.
+ // Receiver Frequency, playout frequency.
+ void TestPlayout(char receiveSide);
+
+ //
+ void TestSendVAD(char side);
+
+ void CurrentCodec(char side);
+
+ void ChangeCodec(char side);
+
+ void Wait(uint32_t waitLengthMs);
+
+ void RunTest(char thread);
+
+ bool PushAudioRunA();
+ bool PullAudioRunA();
+ bool ProcessRunA();
+ bool APIRunA();
+
+ bool PullAudioRunB();
+ bool PushAudioRunB();
+ bool ProcessRunB();
+ bool APIRunB();
+
+ //--- ACMs
+ rtc::scoped_ptr<AudioCodingModule> _acmA;
+ rtc::scoped_ptr<AudioCodingModule> _acmB;
+
+ //--- Channels
+ Channel* _channel_A2B;
+ Channel* _channel_B2A;
+
+ //--- I/O files
+ // A
+ PCMFile _inFileA;
+ PCMFile _outFileA;
+ // B
+ PCMFile _outFileB;
+ PCMFile _inFileB;
+
+ //--- I/O params
+ // A
+ int32_t _outFreqHzA;
+ // B
+ int32_t _outFreqHzB;
+
+ // Should we write to file.
+ // we might skip writing to file if we
+ // run the test for a long time.
+ bool _writeToFile;
+ //--- Events
+ // A
+ EventTimerWrapper* _pullEventA; // pulling data from ACM
+ EventTimerWrapper* _pushEventA; // pushing data to ACM
+ EventTimerWrapper* _processEventA; // process
+ EventWrapper* _apiEventA; // API calls
+ // B
+ EventTimerWrapper* _pullEventB; // pulling data from ACM
+ EventTimerWrapper* _pushEventB; // pushing data to ACM
+ EventTimerWrapper* _processEventB; // process
+ EventWrapper* _apiEventB; // API calls
+
+ // keep track of the codec in either side.
+ uint8_t _codecCntrA;
+ uint8_t _codecCntrB;
+
+ // Is set to true if there is no encoder in either side
+ bool _thereIsEncoderA;
+ bool _thereIsEncoderB;
+ bool _thereIsDecoderA;
+ bool _thereIsDecoderB;
+
+ bool _sendVADA;
+ bool _sendDTXA;
+ ACMVADMode _sendVADModeA;
+
+ bool _sendVADB;
+ bool _sendDTXB;
+ ACMVADMode _sendVADModeB;
+
+ int32_t _minDelayA;
+ int32_t _minDelayB;
+ bool _payloadUsed[32];
+
+ bool _verbose;
+
+ int _dotPositionA;
+ int _dotMoveDirectionA;
+ int _dotPositionB;
+ int _dotMoveDirectionB;
+
+ char _movingDot[41];
+
+ VADCallback* _vadCallbackA;
+ VADCallback* _vadCallbackB;
+ RWLockWrapper& _apiTestRWLock;
+ bool _randomTest;
+ int _testNumA;
+ int _testNumB;
+};
+
+} // namespace webrtc
+
+#endif // WEBRTC_MODULES_AUDIO_CODING_TEST_APITEST_H_
diff --git a/webrtc/modules/audio_coding/test/Channel.cc b/webrtc/modules/audio_coding/test/Channel.cc
new file mode 100644
index 0000000000..31521fe1e3
--- /dev/null
+++ b/webrtc/modules/audio_coding/test/Channel.cc
@@ -0,0 +1,424 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/modules/audio_coding/test/Channel.h"
+
+#include <assert.h>
+#include <iostream>
+
+#include "webrtc/base/format_macros.h"
+#include "webrtc/system_wrappers/include/tick_util.h"
+#include "webrtc/system_wrappers/include/critical_section_wrapper.h"
+
+namespace webrtc {
+
+int32_t Channel::SendData(FrameType frameType,
+ uint8_t payloadType,
+ uint32_t timeStamp,
+ const uint8_t* payloadData,
+ size_t payloadSize,
+ const RTPFragmentationHeader* fragmentation) {
+ WebRtcRTPHeader rtpInfo;
+ int32_t status;
+ size_t payloadDataSize = payloadSize;
+
+ rtpInfo.header.markerBit = false;
+ rtpInfo.header.ssrc = 0;
+ rtpInfo.header.sequenceNumber = (external_sequence_number_ < 0) ?
+ _seqNo++ : static_cast<uint16_t>(external_sequence_number_);
+ rtpInfo.header.payloadType = payloadType;
+ rtpInfo.header.timestamp = (external_send_timestamp_ < 0) ? timeStamp :
+ static_cast<uint32_t>(external_send_timestamp_);
+
+ if (frameType == kAudioFrameCN) {
+ rtpInfo.type.Audio.isCNG = true;
+ } else {
+ rtpInfo.type.Audio.isCNG = false;
+ }
+ if (frameType == kEmptyFrame) {
+ // When frame is empty, we should not transmit it. The frame size of the
+ // next non-empty frame will be based on the previous frame size.
+ _useLastFrameSize = _lastFrameSizeSample > 0;
+ return 0;
+ }
+
+ rtpInfo.type.Audio.channel = 1;
+ // Treat fragmentation separately
+ if (fragmentation != NULL) {
+ // If silence for too long, send only new data.
+ if ((fragmentation->fragmentationVectorSize == 2) &&
+ (fragmentation->fragmentationTimeDiff[1] <= 0x3fff)) {
+ // only 0x80 if we have multiple blocks
+ _payloadData[0] = 0x80 + fragmentation->fragmentationPlType[1];
+ size_t REDheader = (fragmentation->fragmentationTimeDiff[1] << 10) +
+ fragmentation->fragmentationLength[1];
+ _payloadData[1] = uint8_t((REDheader >> 16) & 0x000000FF);
+ _payloadData[2] = uint8_t((REDheader >> 8) & 0x000000FF);
+ _payloadData[3] = uint8_t(REDheader & 0x000000FF);
+
+ _payloadData[4] = fragmentation->fragmentationPlType[0];
+ // copy the RED data
+ memcpy(_payloadData + 5,
+ payloadData + fragmentation->fragmentationOffset[1],
+ fragmentation->fragmentationLength[1]);
+ // copy the normal data
+ memcpy(_payloadData + 5 + fragmentation->fragmentationLength[1],
+ payloadData + fragmentation->fragmentationOffset[0],
+ fragmentation->fragmentationLength[0]);
+ payloadDataSize += 5;
+ } else {
+ // single block (newest one)
+ memcpy(_payloadData, payloadData + fragmentation->fragmentationOffset[0],
+ fragmentation->fragmentationLength[0]);
+ payloadDataSize = fragmentation->fragmentationLength[0];
+ rtpInfo.header.payloadType = fragmentation->fragmentationPlType[0];
+ }
+ } else {
+ memcpy(_payloadData, payloadData, payloadDataSize);
+ if (_isStereo) {
+ if (_leftChannel) {
+ memcpy(&_rtpInfo, &rtpInfo, sizeof(WebRtcRTPHeader));
+ _leftChannel = false;
+ rtpInfo.type.Audio.channel = 1;
+ } else {
+ memcpy(&rtpInfo, &_rtpInfo, sizeof(WebRtcRTPHeader));
+ _leftChannel = true;
+ rtpInfo.type.Audio.channel = 2;
+ }
+ }
+ }
+
+ _channelCritSect->Enter();
+ if (_saveBitStream) {
+ //fwrite(payloadData, sizeof(uint8_t), payloadSize, _bitStreamFile);
+ }
+
+ if (!_isStereo) {
+ CalcStatistics(rtpInfo, payloadSize);
+ }
+ _useLastFrameSize = false;
+ _lastInTimestamp = timeStamp;
+ _totalBytes += payloadDataSize;
+ _channelCritSect->Leave();
+
+ if (_useFECTestWithPacketLoss) {
+ _packetLoss += 1;
+ if (_packetLoss == 3) {
+ _packetLoss = 0;
+ return 0;
+ }
+ }
+
+ if (num_packets_to_drop_ > 0) {
+ num_packets_to_drop_--;
+ return 0;
+ }
+
+ status = _receiverACM->IncomingPacket(_payloadData, payloadDataSize, rtpInfo);
+
+ return status;
+}
+
+// TODO(turajs): rewite this method.
+void Channel::CalcStatistics(WebRtcRTPHeader& rtpInfo, size_t payloadSize) {
+ int n;
+ if ((rtpInfo.header.payloadType != _lastPayloadType)
+ && (_lastPayloadType != -1)) {
+ // payload-type is changed.
+ // we have to terminate the calculations on the previous payload type
+ // we ignore the last packet in that payload type just to make things
+ // easier.
+ for (n = 0; n < MAX_NUM_PAYLOADS; n++) {
+ if (_lastPayloadType == _payloadStats[n].payloadType) {
+ _payloadStats[n].newPacket = true;
+ break;
+ }
+ }
+ }
+ _lastPayloadType = rtpInfo.header.payloadType;
+
+ bool newPayload = true;
+ ACMTestPayloadStats* currentPayloadStr = NULL;
+ for (n = 0; n < MAX_NUM_PAYLOADS; n++) {
+ if (rtpInfo.header.payloadType == _payloadStats[n].payloadType) {
+ newPayload = false;
+ currentPayloadStr = &_payloadStats[n];
+ break;
+ }
+ }
+
+ if (!newPayload) {
+ if (!currentPayloadStr->newPacket) {
+ if (!_useLastFrameSize) {
+ _lastFrameSizeSample = (uint32_t) ((uint32_t) rtpInfo.header.timestamp -
+ (uint32_t) currentPayloadStr->lastTimestamp);
+ }
+ assert(_lastFrameSizeSample > 0);
+ int k = 0;
+ for (; k < MAX_NUM_FRAMESIZES; ++k) {
+ if ((currentPayloadStr->frameSizeStats[k].frameSizeSample ==
+ _lastFrameSizeSample) ||
+ (currentPayloadStr->frameSizeStats[k].frameSizeSample == 0)) {
+ break;
+ }
+ }
+ if (k == MAX_NUM_FRAMESIZES) {
+ // New frame size found but no space to count statistics on it. Skip it.
+ printf("No memory to store statistics for payload %d : frame size %d\n",
+ _lastPayloadType, _lastFrameSizeSample);
+ return;
+ }
+ ACMTestFrameSizeStats* currentFrameSizeStats = &(currentPayloadStr
+ ->frameSizeStats[k]);
+ currentFrameSizeStats->frameSizeSample = (int16_t) _lastFrameSizeSample;
+
+ // increment the number of encoded samples.
+ currentFrameSizeStats->totalEncodedSamples += _lastFrameSizeSample;
+ // increment the number of recveived packets
+ currentFrameSizeStats->numPackets++;
+ // increment the total number of bytes (this is based on
+ // the previous payload we don't know the frame-size of
+ // the current payload.
+ currentFrameSizeStats->totalPayloadLenByte += currentPayloadStr
+ ->lastPayloadLenByte;
+ // store the maximum payload-size (this is based on
+ // the previous payload we don't know the frame-size of
+ // the current payload.
+ if (currentFrameSizeStats->maxPayloadLen
+ < currentPayloadStr->lastPayloadLenByte) {
+ currentFrameSizeStats->maxPayloadLen = currentPayloadStr
+ ->lastPayloadLenByte;
+ }
+ // store the current values for the next time
+ currentPayloadStr->lastTimestamp = rtpInfo.header.timestamp;
+ currentPayloadStr->lastPayloadLenByte = payloadSize;
+ } else {
+ currentPayloadStr->newPacket = false;
+ currentPayloadStr->lastPayloadLenByte = payloadSize;
+ currentPayloadStr->lastTimestamp = rtpInfo.header.timestamp;
+ currentPayloadStr->payloadType = rtpInfo.header.payloadType;
+ memset(currentPayloadStr->frameSizeStats, 0, MAX_NUM_FRAMESIZES *
+ sizeof(ACMTestFrameSizeStats));
+ }
+ } else {
+ n = 0;
+ while (_payloadStats[n].payloadType != -1) {
+ n++;
+ }
+ // first packet
+ _payloadStats[n].newPacket = false;
+ _payloadStats[n].lastPayloadLenByte = payloadSize;
+ _payloadStats[n].lastTimestamp = rtpInfo.header.timestamp;
+ _payloadStats[n].payloadType = rtpInfo.header.payloadType;
+ memset(_payloadStats[n].frameSizeStats, 0, MAX_NUM_FRAMESIZES *
+ sizeof(ACMTestFrameSizeStats));
+ }
+}
+
+Channel::Channel(int16_t chID)
+ : _receiverACM(NULL),
+ _seqNo(0),
+ _channelCritSect(CriticalSectionWrapper::CreateCriticalSection()),
+ _bitStreamFile(NULL),
+ _saveBitStream(false),
+ _lastPayloadType(-1),
+ _isStereo(false),
+ _leftChannel(true),
+ _lastInTimestamp(0),
+ _useLastFrameSize(false),
+ _lastFrameSizeSample(0),
+ _packetLoss(0),
+ _useFECTestWithPacketLoss(false),
+ _beginTime(TickTime::MillisecondTimestamp()),
+ _totalBytes(0),
+ external_send_timestamp_(-1),
+ external_sequence_number_(-1),
+ num_packets_to_drop_(0) {
+ int n;
+ int k;
+ for (n = 0; n < MAX_NUM_PAYLOADS; n++) {
+ _payloadStats[n].payloadType = -1;
+ _payloadStats[n].newPacket = true;
+ for (k = 0; k < MAX_NUM_FRAMESIZES; k++) {
+ _payloadStats[n].frameSizeStats[k].frameSizeSample = 0;
+ _payloadStats[n].frameSizeStats[k].maxPayloadLen = 0;
+ _payloadStats[n].frameSizeStats[k].numPackets = 0;
+ _payloadStats[n].frameSizeStats[k].totalPayloadLenByte = 0;
+ _payloadStats[n].frameSizeStats[k].totalEncodedSamples = 0;
+ }
+ }
+ if (chID >= 0) {
+ _saveBitStream = true;
+ char bitStreamFileName[500];
+ sprintf(bitStreamFileName, "bitStream_%d.dat", chID);
+ _bitStreamFile = fopen(bitStreamFileName, "wb");
+ } else {
+ _saveBitStream = false;
+ }
+}
+
+Channel::~Channel() {
+ delete _channelCritSect;
+}
+
+void Channel::RegisterReceiverACM(AudioCodingModule* acm) {
+ _receiverACM = acm;
+ return;
+}
+
+void Channel::ResetStats() {
+ int n;
+ int k;
+ _channelCritSect->Enter();
+ _lastPayloadType = -1;
+ for (n = 0; n < MAX_NUM_PAYLOADS; n++) {
+ _payloadStats[n].payloadType = -1;
+ _payloadStats[n].newPacket = true;
+ for (k = 0; k < MAX_NUM_FRAMESIZES; k++) {
+ _payloadStats[n].frameSizeStats[k].frameSizeSample = 0;
+ _payloadStats[n].frameSizeStats[k].maxPayloadLen = 0;
+ _payloadStats[n].frameSizeStats[k].numPackets = 0;
+ _payloadStats[n].frameSizeStats[k].totalPayloadLenByte = 0;
+ _payloadStats[n].frameSizeStats[k].totalEncodedSamples = 0;
+ }
+ }
+ _beginTime = TickTime::MillisecondTimestamp();
+ _totalBytes = 0;
+ _channelCritSect->Leave();
+}
+
+int16_t Channel::Stats(CodecInst& codecInst,
+ ACMTestPayloadStats& payloadStats) {
+ _channelCritSect->Enter();
+ int n;
+ payloadStats.payloadType = -1;
+ for (n = 0; n < MAX_NUM_PAYLOADS; n++) {
+ if (_payloadStats[n].payloadType == codecInst.pltype) {
+ memcpy(&payloadStats, &_payloadStats[n], sizeof(ACMTestPayloadStats));
+ break;
+ }
+ }
+ if (payloadStats.payloadType == -1) {
+ _channelCritSect->Leave();
+ return -1;
+ }
+ for (n = 0; n < MAX_NUM_FRAMESIZES; n++) {
+ if (payloadStats.frameSizeStats[n].frameSizeSample == 0) {
+ _channelCritSect->Leave();
+ return 0;
+ }
+ payloadStats.frameSizeStats[n].usageLenSec = (double) payloadStats
+ .frameSizeStats[n].totalEncodedSamples / (double) codecInst.plfreq;
+
+ payloadStats.frameSizeStats[n].rateBitPerSec =
+ payloadStats.frameSizeStats[n].totalPayloadLenByte * 8
+ / payloadStats.frameSizeStats[n].usageLenSec;
+
+ }
+ _channelCritSect->Leave();
+ return 0;
+}
+
+void Channel::Stats(uint32_t* numPackets) {
+ _channelCritSect->Enter();
+ int k;
+ int n;
+ memset(numPackets, 0, MAX_NUM_PAYLOADS * sizeof(uint32_t));
+ for (k = 0; k < MAX_NUM_PAYLOADS; k++) {
+ if (_payloadStats[k].payloadType == -1) {
+ break;
+ }
+ numPackets[k] = 0;
+ for (n = 0; n < MAX_NUM_FRAMESIZES; n++) {
+ if (_payloadStats[k].frameSizeStats[n].frameSizeSample == 0) {
+ break;
+ }
+ numPackets[k] += _payloadStats[k].frameSizeStats[n].numPackets;
+ }
+ }
+ _channelCritSect->Leave();
+}
+
+void Channel::Stats(uint8_t* payloadType, uint32_t* payloadLenByte) {
+ _channelCritSect->Enter();
+
+ int k;
+ int n;
+ memset(payloadLenByte, 0, MAX_NUM_PAYLOADS * sizeof(uint32_t));
+ for (k = 0; k < MAX_NUM_PAYLOADS; k++) {
+ if (_payloadStats[k].payloadType == -1) {
+ break;
+ }
+ payloadType[k] = (uint8_t) _payloadStats[k].payloadType;
+ payloadLenByte[k] = 0;
+ for (n = 0; n < MAX_NUM_FRAMESIZES; n++) {
+ if (_payloadStats[k].frameSizeStats[n].frameSizeSample == 0) {
+ break;
+ }
+ payloadLenByte[k] += (uint16_t) _payloadStats[k].frameSizeStats[n]
+ .totalPayloadLenByte;
+ }
+ }
+
+ _channelCritSect->Leave();
+}
+
+void Channel::PrintStats(CodecInst& codecInst) {
+ ACMTestPayloadStats payloadStats;
+ Stats(codecInst, payloadStats);
+ printf("%s %d kHz\n", codecInst.plname, codecInst.plfreq / 1000);
+ printf("=====================================================\n");
+ if (payloadStats.payloadType == -1) {
+ printf("No Packets are sent with payload-type %d (%s)\n\n",
+ codecInst.pltype, codecInst.plname);
+ return;
+ }
+ for (int k = 0; k < MAX_NUM_FRAMESIZES; k++) {
+ if (payloadStats.frameSizeStats[k].frameSizeSample == 0) {
+ break;
+ }
+ printf("Frame-size.................... %d samples\n",
+ payloadStats.frameSizeStats[k].frameSizeSample);
+ printf("Average Rate.................. %.0f bits/sec\n",
+ payloadStats.frameSizeStats[k].rateBitPerSec);
+ printf("Maximum Payload-Size.......... %" PRIuS " Bytes\n",
+ payloadStats.frameSizeStats[k].maxPayloadLen);
+ printf(
+ "Maximum Instantaneous Rate.... %.0f bits/sec\n",
+ ((double) payloadStats.frameSizeStats[k].maxPayloadLen * 8.0
+ * (double) codecInst.plfreq)
+ / (double) payloadStats.frameSizeStats[k].frameSizeSample);
+ printf("Number of Packets............. %u\n",
+ (unsigned int) payloadStats.frameSizeStats[k].numPackets);
+ printf("Duration...................... %0.3f sec\n\n",
+ payloadStats.frameSizeStats[k].usageLenSec);
+
+ }
+
+}
+
+uint32_t Channel::LastInTimestamp() {
+ uint32_t timestamp;
+ _channelCritSect->Enter();
+ timestamp = _lastInTimestamp;
+ _channelCritSect->Leave();
+ return timestamp;
+}
+
+double Channel::BitRate() {
+ double rate;
+ uint64_t currTime = TickTime::MillisecondTimestamp();
+ _channelCritSect->Enter();
+ rate = ((double) _totalBytes * 8.0) / (double) (currTime - _beginTime);
+ _channelCritSect->Leave();
+ return rate;
+}
+
+} // namespace webrtc
diff --git a/webrtc/modules/audio_coding/test/Channel.h b/webrtc/modules/audio_coding/test/Channel.h
new file mode 100644
index 0000000000..b047aa9909
--- /dev/null
+++ b/webrtc/modules/audio_coding/test/Channel.h
@@ -0,0 +1,130 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_TEST_CHANNEL_H_
+#define WEBRTC_MODULES_AUDIO_CODING_TEST_CHANNEL_H_
+
+#include <stdio.h>
+
+#include "webrtc/modules/audio_coding/include/audio_coding_module.h"
+#include "webrtc/modules/include/module_common_types.h"
+#include "webrtc/typedefs.h"
+
+namespace webrtc {
+
+class CriticalSectionWrapper;
+
+#define MAX_NUM_PAYLOADS 50
+#define MAX_NUM_FRAMESIZES 6
+
+// TODO(turajs): Write constructor for this structure.
+struct ACMTestFrameSizeStats {
+ uint16_t frameSizeSample;
+ size_t maxPayloadLen;
+ uint32_t numPackets;
+ uint64_t totalPayloadLenByte;
+ uint64_t totalEncodedSamples;
+ double rateBitPerSec;
+ double usageLenSec;
+};
+
+// TODO(turajs): Write constructor for this structure.
+struct ACMTestPayloadStats {
+ bool newPacket;
+ int16_t payloadType;
+ size_t lastPayloadLenByte;
+ uint32_t lastTimestamp;
+ ACMTestFrameSizeStats frameSizeStats[MAX_NUM_FRAMESIZES];
+};
+
+class Channel : public AudioPacketizationCallback {
+ public:
+
+ Channel(int16_t chID = -1);
+ ~Channel();
+
+ int32_t SendData(FrameType frameType,
+ uint8_t payloadType,
+ uint32_t timeStamp,
+ const uint8_t* payloadData,
+ size_t payloadSize,
+ const RTPFragmentationHeader* fragmentation) override;
+
+ void RegisterReceiverACM(AudioCodingModule *acm);
+
+ void ResetStats();
+
+ int16_t Stats(CodecInst& codecInst, ACMTestPayloadStats& payloadStats);
+
+ void Stats(uint32_t* numPackets);
+
+ void Stats(uint8_t* payloadType, uint32_t* payloadLenByte);
+
+ void PrintStats(CodecInst& codecInst);
+
+ void SetIsStereo(bool isStereo) {
+ _isStereo = isStereo;
+ }
+
+ uint32_t LastInTimestamp();
+
+ void SetFECTestWithPacketLoss(bool usePacketLoss) {
+ _useFECTestWithPacketLoss = usePacketLoss;
+ }
+
+ double BitRate();
+
+ void set_send_timestamp(uint32_t new_send_ts) {
+ external_send_timestamp_ = new_send_ts;
+ }
+
+ void set_sequence_number(uint16_t new_sequence_number) {
+ external_sequence_number_ = new_sequence_number;
+ }
+
+ void set_num_packets_to_drop(int new_num_packets_to_drop) {
+ num_packets_to_drop_ = new_num_packets_to_drop;
+ }
+
+ private:
+ void CalcStatistics(WebRtcRTPHeader& rtpInfo, size_t payloadSize);
+
+ AudioCodingModule* _receiverACM;
+ uint16_t _seqNo;
+ // 60msec * 32 sample(max)/msec * 2 description (maybe) * 2 bytes/sample
+ uint8_t _payloadData[60 * 32 * 2 * 2];
+
+ CriticalSectionWrapper* _channelCritSect;
+ FILE* _bitStreamFile;
+ bool _saveBitStream;
+ int16_t _lastPayloadType;
+ ACMTestPayloadStats _payloadStats[MAX_NUM_PAYLOADS];
+ bool _isStereo;
+ WebRtcRTPHeader _rtpInfo;
+ bool _leftChannel;
+ uint32_t _lastInTimestamp;
+ bool _useLastFrameSize;
+ uint32_t _lastFrameSizeSample;
+ // FEC Test variables
+ int16_t _packetLoss;
+ bool _useFECTestWithPacketLoss;
+ uint64_t _beginTime;
+ uint64_t _totalBytes;
+
+ // External timing info, defaulted to -1. Only used if they are
+ // non-negative.
+ int64_t external_send_timestamp_;
+ int32_t external_sequence_number_;
+ int num_packets_to_drop_;
+};
+
+} // namespace webrtc
+
+#endif // WEBRTC_MODULES_AUDIO_CODING_TEST_CHANNEL_H_
diff --git a/webrtc/modules/audio_coding/test/EncodeDecodeTest.cc b/webrtc/modules/audio_coding/test/EncodeDecodeTest.cc
new file mode 100644
index 0000000000..ba3c8d9ad2
--- /dev/null
+++ b/webrtc/modules/audio_coding/test/EncodeDecodeTest.cc
@@ -0,0 +1,355 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/modules/audio_coding/test/EncodeDecodeTest.h"
+
+#include <sstream>
+#include <stdio.h>
+#include <stdlib.h>
+
+#include "testing/gtest/include/gtest/gtest.h"
+#include "webrtc/base/scoped_ptr.h"
+#include "webrtc/common_types.h"
+#include "webrtc/modules/audio_coding/include/audio_coding_module.h"
+#include "webrtc/modules/audio_coding/acm2/acm_common_defs.h"
+#include "webrtc/modules/audio_coding/test/utility.h"
+#include "webrtc/system_wrappers/include/trace.h"
+#include "webrtc/test/testsupport/fileutils.h"
+
+namespace webrtc {
+
+TestPacketization::TestPacketization(RTPStream *rtpStream, uint16_t frequency)
+ : _rtpStream(rtpStream),
+ _frequency(frequency),
+ _seqNo(0) {
+}
+
+TestPacketization::~TestPacketization() {
+}
+
+int32_t TestPacketization::SendData(
+ const FrameType /* frameType */, const uint8_t payloadType,
+ const uint32_t timeStamp, const uint8_t* payloadData,
+ const size_t payloadSize,
+ const RTPFragmentationHeader* /* fragmentation */) {
+ _rtpStream->Write(payloadType, timeStamp, _seqNo++, payloadData, payloadSize,
+ _frequency);
+ return 1;
+}
+
+Sender::Sender()
+ : _acm(NULL),
+ _pcmFile(),
+ _audioFrame(),
+ _packetization(NULL) {
+}
+
+void Sender::Setup(AudioCodingModule *acm, RTPStream *rtpStream,
+ std::string in_file_name, int sample_rate, size_t channels) {
+ struct CodecInst sendCodec;
+ int noOfCodecs = acm->NumberOfCodecs();
+ int codecNo;
+
+ // Open input file
+ const std::string file_name = webrtc::test::ResourcePath(in_file_name, "pcm");
+ _pcmFile.Open(file_name, sample_rate, "rb");
+ if (channels == 2) {
+ _pcmFile.ReadStereo(true);
+ }
+ // Set test length to 500 ms (50 blocks of 10 ms each).
+ _pcmFile.SetNum10MsBlocksToRead(50);
+ // Fast-forward 1 second (100 blocks) since the file starts with silence.
+ _pcmFile.FastForward(100);
+
+ // Set the codec for the current test.
+ if ((testMode == 0) || (testMode == 1)) {
+ // Set the codec id.
+ codecNo = codeId;
+ } else {
+ // Choose codec on command line.
+ printf("List of supported codec.\n");
+ for (int n = 0; n < noOfCodecs; n++) {
+ EXPECT_EQ(0, acm->Codec(n, &sendCodec));
+ printf("%d %s\n", n, sendCodec.plname);
+ }
+ printf("Choose your codec:");
+ ASSERT_GT(scanf("%d", &codecNo), 0);
+ }
+
+ EXPECT_EQ(0, acm->Codec(codecNo, &sendCodec));
+
+ sendCodec.channels = channels;
+
+ EXPECT_EQ(0, acm->RegisterSendCodec(sendCodec));
+ _packetization = new TestPacketization(rtpStream, sendCodec.plfreq);
+ EXPECT_EQ(0, acm->RegisterTransportCallback(_packetization));
+
+ _acm = acm;
+}
+
+void Sender::Teardown() {
+ _pcmFile.Close();
+ delete _packetization;
+}
+
+bool Sender::Add10MsData() {
+ if (!_pcmFile.EndOfFile()) {
+ EXPECT_GT(_pcmFile.Read10MsData(_audioFrame), 0);
+ int32_t ok = _acm->Add10MsData(_audioFrame);
+ EXPECT_GE(ok, 0);
+ return ok >= 0 ? true : false;
+ }
+ return false;
+}
+
+void Sender::Run() {
+ while (true) {
+ if (!Add10MsData()) {
+ break;
+ }
+ }
+}
+
+Receiver::Receiver()
+ : _playoutLengthSmpls(WEBRTC_10MS_PCM_AUDIO),
+ _payloadSizeBytes(MAX_INCOMING_PAYLOAD) {
+}
+
+void Receiver::Setup(AudioCodingModule *acm, RTPStream *rtpStream,
+ std::string out_file_name, size_t channels) {
+ struct CodecInst recvCodec = CodecInst();
+ int noOfCodecs;
+ EXPECT_EQ(0, acm->InitializeReceiver());
+
+ noOfCodecs = acm->NumberOfCodecs();
+ for (int i = 0; i < noOfCodecs; i++) {
+ EXPECT_EQ(0, acm->Codec(i, &recvCodec));
+ if (recvCodec.channels == channels)
+ EXPECT_EQ(0, acm->RegisterReceiveCodec(recvCodec));
+ // Forces mono/stereo for Opus.
+ if (!strcmp(recvCodec.plname, "opus")) {
+ recvCodec.channels = channels;
+ EXPECT_EQ(0, acm->RegisterReceiveCodec(recvCodec));
+ }
+ }
+
+ int playSampFreq;
+ std::string file_name;
+ std::stringstream file_stream;
+ file_stream << webrtc::test::OutputPath() << out_file_name
+ << static_cast<int>(codeId) << ".pcm";
+ file_name = file_stream.str();
+ _rtpStream = rtpStream;
+
+ if (testMode == 1) {
+ playSampFreq = recvCodec.plfreq;
+ _pcmFile.Open(file_name, recvCodec.plfreq, "wb+");
+ } else if (testMode == 0) {
+ playSampFreq = 32000;
+ _pcmFile.Open(file_name, 32000, "wb+");
+ } else {
+ printf("\nValid output frequencies:\n");
+ printf("8000\n16000\n32000\n-1,");
+ printf("which means output frequency equal to received signal frequency");
+ printf("\n\nChoose output sampling frequency: ");
+ ASSERT_GT(scanf("%d", &playSampFreq), 0);
+ file_name = webrtc::test::OutputPath() + out_file_name + ".pcm";
+ _pcmFile.Open(file_name, playSampFreq, "wb+");
+ }
+
+ _realPayloadSizeBytes = 0;
+ _playoutBuffer = new int16_t[WEBRTC_10MS_PCM_AUDIO];
+ _frequency = playSampFreq;
+ _acm = acm;
+ _firstTime = true;
+}
+
+void Receiver::Teardown() {
+ delete[] _playoutBuffer;
+ _pcmFile.Close();
+ if (testMode > 1) {
+ Trace::ReturnTrace();
+ }
+}
+
+bool Receiver::IncomingPacket() {
+ if (!_rtpStream->EndOfFile()) {
+ if (_firstTime) {
+ _firstTime = false;
+ _realPayloadSizeBytes = _rtpStream->Read(&_rtpInfo, _incomingPayload,
+ _payloadSizeBytes, &_nextTime);
+ if (_realPayloadSizeBytes == 0) {
+ if (_rtpStream->EndOfFile()) {
+ _firstTime = true;
+ return true;
+ } else {
+ return false;
+ }
+ }
+ }
+
+ EXPECT_EQ(0, _acm->IncomingPacket(_incomingPayload, _realPayloadSizeBytes,
+ _rtpInfo));
+ _realPayloadSizeBytes = _rtpStream->Read(&_rtpInfo, _incomingPayload,
+ _payloadSizeBytes, &_nextTime);
+ if (_realPayloadSizeBytes == 0 && _rtpStream->EndOfFile()) {
+ _firstTime = true;
+ }
+ }
+ return true;
+}
+
+bool Receiver::PlayoutData() {
+ AudioFrame audioFrame;
+
+ int32_t ok =_acm->PlayoutData10Ms(_frequency, &audioFrame);
+ EXPECT_EQ(0, ok);
+ if (ok < 0){
+ return false;
+ }
+ if (_playoutLengthSmpls == 0) {
+ return false;
+ }
+ _pcmFile.Write10MsData(audioFrame.data_,
+ audioFrame.samples_per_channel_ * audioFrame.num_channels_);
+ return true;
+}
+
+void Receiver::Run() {
+ uint8_t counter500Ms = 50;
+ uint32_t clock = 0;
+
+ while (counter500Ms > 0) {
+ if (clock == 0 || clock >= _nextTime) {
+ EXPECT_TRUE(IncomingPacket());
+ if (clock == 0) {
+ clock = _nextTime;
+ }
+ }
+ if ((clock % 10) == 0) {
+ if (!PlayoutData()) {
+ clock++;
+ continue;
+ }
+ }
+ if (_rtpStream->EndOfFile()) {
+ counter500Ms--;
+ }
+ clock++;
+ }
+}
+
+EncodeDecodeTest::EncodeDecodeTest() {
+ _testMode = 2;
+ Trace::CreateTrace();
+ Trace::SetTraceFile(
+ (webrtc::test::OutputPath() + "acm_encdec_trace.txt").c_str());
+}
+
+EncodeDecodeTest::EncodeDecodeTest(int testMode) {
+ //testMode == 0 for autotest
+ //testMode == 1 for testing all codecs/parameters
+ //testMode > 1 for specific user-input test (as it was used before)
+ _testMode = testMode;
+ if (_testMode != 0) {
+ Trace::CreateTrace();
+ Trace::SetTraceFile(
+ (webrtc::test::OutputPath() + "acm_encdec_trace.txt").c_str());
+ }
+}
+
+void EncodeDecodeTest::Perform() {
+ int numCodecs = 1;
+ int codePars[3]; // Frequency, packet size, rate.
+ int numPars[52]; // Number of codec parameters sets (freq, pacsize, rate)
+ // to test, for a given codec.
+
+ codePars[0] = 0;
+ codePars[1] = 0;
+ codePars[2] = 0;
+
+ rtc::scoped_ptr<AudioCodingModule> acm(AudioCodingModule::Create(0));
+ struct CodecInst sendCodecTmp;
+ numCodecs = acm->NumberOfCodecs();
+
+ if (_testMode != 2) {
+ for (int n = 0; n < numCodecs; n++) {
+ EXPECT_EQ(0, acm->Codec(n, &sendCodecTmp));
+ if (STR_CASE_CMP(sendCodecTmp.plname, "telephone-event") == 0) {
+ numPars[n] = 0;
+ } else if (STR_CASE_CMP(sendCodecTmp.plname, "cn") == 0) {
+ numPars[n] = 0;
+ } else if (STR_CASE_CMP(sendCodecTmp.plname, "red") == 0) {
+ numPars[n] = 0;
+ } else if (sendCodecTmp.channels == 2) {
+ numPars[n] = 0;
+ } else {
+ numPars[n] = 1;
+ }
+ }
+ } else {
+ numCodecs = 1;
+ numPars[0] = 1;
+ }
+
+ _receiver.testMode = _testMode;
+
+ // Loop over all mono codecs:
+ for (int codeId = 0; codeId < numCodecs; codeId++) {
+ // Only encode using real mono encoders, not telephone-event and cng.
+ for (int loopPars = 1; loopPars <= numPars[codeId]; loopPars++) {
+ // Encode all data to file.
+ std::string fileName = EncodeToFile(1, codeId, codePars, _testMode);
+
+ RTPFile rtpFile;
+ rtpFile.Open(fileName.c_str(), "rb");
+
+ _receiver.codeId = codeId;
+
+ rtpFile.ReadHeader();
+ _receiver.Setup(acm.get(), &rtpFile, "encodeDecode_out", 1);
+ _receiver.Run();
+ _receiver.Teardown();
+ rtpFile.Close();
+ }
+ }
+
+ // End tracing.
+ if (_testMode == 1) {
+ Trace::ReturnTrace();
+ }
+}
+
+std::string EncodeDecodeTest::EncodeToFile(int fileType,
+ int codeId,
+ int* codePars,
+ int testMode) {
+ rtc::scoped_ptr<AudioCodingModule> acm(AudioCodingModule::Create(1));
+ RTPFile rtpFile;
+ std::string fileName = webrtc::test::TempFilename(webrtc::test::OutputPath(),
+ "encode_decode_rtp");
+ rtpFile.Open(fileName.c_str(), "wb+");
+ rtpFile.WriteHeader();
+
+ // Store for auto_test and logging.
+ _sender.testMode = testMode;
+ _sender.codeId = codeId;
+
+ _sender.Setup(acm.get(), &rtpFile, "audio_coding/testfile32kHz", 32000, 1);
+ if (acm->SendCodec()) {
+ _sender.Run();
+ }
+ _sender.Teardown();
+ rtpFile.Close();
+
+ return fileName;
+}
+
+} // namespace webrtc
diff --git a/webrtc/modules/audio_coding/test/EncodeDecodeTest.h b/webrtc/modules/audio_coding/test/EncodeDecodeTest.h
new file mode 100644
index 0000000000..f9a9a5bb52
--- /dev/null
+++ b/webrtc/modules/audio_coding/test/EncodeDecodeTest.h
@@ -0,0 +1,123 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_TEST_ENCODEDECODETEST_H_
+#define WEBRTC_MODULES_AUDIO_CODING_TEST_ENCODEDECODETEST_H_
+
+#include <stdio.h>
+#include <string.h>
+
+#include "webrtc/modules/audio_coding/include/audio_coding_module.h"
+#include "webrtc/modules/audio_coding/test/ACMTest.h"
+#include "webrtc/modules/audio_coding/test/PCMFile.h"
+#include "webrtc/modules/audio_coding/test/RTPFile.h"
+#include "webrtc/typedefs.h"
+
+namespace webrtc {
+
+#define MAX_INCOMING_PAYLOAD 8096
+
+// TestPacketization callback which writes the encoded payloads to file
+class TestPacketization : public AudioPacketizationCallback {
+ public:
+ TestPacketization(RTPStream *rtpStream, uint16_t frequency);
+ ~TestPacketization();
+ int32_t SendData(const FrameType frameType,
+ const uint8_t payloadType,
+ const uint32_t timeStamp,
+ const uint8_t* payloadData,
+ const size_t payloadSize,
+ const RTPFragmentationHeader* fragmentation) override;
+
+ private:
+ static void MakeRTPheader(uint8_t* rtpHeader, uint8_t payloadType,
+ int16_t seqNo, uint32_t timeStamp, uint32_t ssrc);
+ RTPStream* _rtpStream;
+ int32_t _frequency;
+ int16_t _seqNo;
+};
+
+class Sender {
+ public:
+ Sender();
+ void Setup(AudioCodingModule *acm, RTPStream *rtpStream,
+ std::string in_file_name, int sample_rate, size_t channels);
+ void Teardown();
+ void Run();
+ bool Add10MsData();
+
+ //for auto_test and logging
+ uint8_t testMode;
+ uint8_t codeId;
+
+ protected:
+ AudioCodingModule* _acm;
+
+ private:
+ PCMFile _pcmFile;
+ AudioFrame _audioFrame;
+ TestPacketization* _packetization;
+};
+
+class Receiver {
+ public:
+ Receiver();
+ virtual ~Receiver() {};
+ void Setup(AudioCodingModule *acm, RTPStream *rtpStream,
+ std::string out_file_name, size_t channels);
+ void Teardown();
+ void Run();
+ virtual bool IncomingPacket();
+ bool PlayoutData();
+
+ //for auto_test and logging
+ uint8_t codeId;
+ uint8_t testMode;
+
+ private:
+ PCMFile _pcmFile;
+ int16_t* _playoutBuffer;
+ uint16_t _playoutLengthSmpls;
+ int32_t _frequency;
+ bool _firstTime;
+
+ protected:
+ AudioCodingModule* _acm;
+ uint8_t _incomingPayload[MAX_INCOMING_PAYLOAD];
+ RTPStream* _rtpStream;
+ WebRtcRTPHeader _rtpInfo;
+ size_t _realPayloadSizeBytes;
+ size_t _payloadSizeBytes;
+ uint32_t _nextTime;
+};
+
+class EncodeDecodeTest : public ACMTest {
+ public:
+ EncodeDecodeTest();
+ explicit EncodeDecodeTest(int testMode);
+ void Perform() override;
+
+ uint16_t _playoutFreq;
+ uint8_t _testMode;
+
+ private:
+ std::string EncodeToFile(int fileType,
+ int codeId,
+ int* codePars,
+ int testMode);
+
+ protected:
+ Sender _sender;
+ Receiver _receiver;
+};
+
+} // namespace webrtc
+
+#endif // WEBRTC_MODULES_AUDIO_CODING_TEST_ENCODEDECODETEST_H_
diff --git a/webrtc/modules/audio_coding/test/PCMFile.cc b/webrtc/modules/audio_coding/test/PCMFile.cc
new file mode 100644
index 0000000000..9289d73baa
--- /dev/null
+++ b/webrtc/modules/audio_coding/test/PCMFile.cc
@@ -0,0 +1,221 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/modules/audio_coding/test/PCMFile.h"
+
+#include <ctype.h>
+#include <stdio.h>
+#include <string.h>
+
+#include "testing/gtest/include/gtest/gtest.h"
+#include "webrtc/modules/include/module_common_types.h"
+
+namespace webrtc {
+
+#define MAX_FILE_NAME_LENGTH_BYTE 500
+
+PCMFile::PCMFile()
+ : pcm_file_(NULL),
+ samples_10ms_(160),
+ frequency_(16000),
+ end_of_file_(false),
+ auto_rewind_(false),
+ rewinded_(false),
+ read_stereo_(false),
+ save_stereo_(false) {
+ timestamp_ = (((uint32_t) rand() & 0x0000FFFF) << 16) |
+ ((uint32_t) rand() & 0x0000FFFF);
+}
+
+PCMFile::PCMFile(uint32_t timestamp)
+ : pcm_file_(NULL),
+ samples_10ms_(160),
+ frequency_(16000),
+ end_of_file_(false),
+ auto_rewind_(false),
+ rewinded_(false),
+ read_stereo_(false),
+ save_stereo_(false) {
+ timestamp_ = timestamp;
+}
+
+int16_t PCMFile::ChooseFile(std::string* file_name, int16_t max_len,
+ uint16_t* frequency_hz) {
+ char tmp_name[MAX_FILE_NAME_LENGTH_BYTE];
+
+ EXPECT_TRUE(fgets(tmp_name, MAX_FILE_NAME_LENGTH_BYTE, stdin) != NULL);
+ tmp_name[MAX_FILE_NAME_LENGTH_BYTE - 1] = '\0';
+ int16_t n = 0;
+
+ // Removing trailing spaces.
+ while ((isspace(tmp_name[n]) || iscntrl(tmp_name[n])) && (tmp_name[n] != 0)
+ && (n < MAX_FILE_NAME_LENGTH_BYTE)) {
+ n++;
+ }
+ if (n > 0) {
+ memmove(tmp_name, &tmp_name[n], MAX_FILE_NAME_LENGTH_BYTE - n);
+ }
+
+ // Removing trailing spaces.
+ n = (int16_t)(strlen(tmp_name) - 1);
+ if (n >= 0) {
+ while ((isspace(tmp_name[n]) || iscntrl(tmp_name[n])) && (n >= 0)) {
+ n--;
+ }
+ }
+ if (n >= 0) {
+ tmp_name[n + 1] = '\0';
+ }
+
+ int16_t len = (int16_t) strlen(tmp_name);
+ if (len > max_len) {
+ return -1;
+ }
+ if (len > 0) {
+ std::string tmp_string(tmp_name, len + 1);
+ *file_name = tmp_string;
+ }
+ printf("Enter the sampling frequency (in Hz) of the above file [%u]: ",
+ *frequency_hz);
+ EXPECT_TRUE(fgets(tmp_name, 10, stdin) != NULL);
+ uint16_t tmp_frequency = (uint16_t) atoi(tmp_name);
+ if (tmp_frequency > 0) {
+ *frequency_hz = tmp_frequency;
+ }
+ return 0;
+}
+
+void PCMFile::Open(const std::string& file_name, uint16_t frequency,
+ const char* mode, bool auto_rewind) {
+ if ((pcm_file_ = fopen(file_name.c_str(), mode)) == NULL) {
+ printf("Cannot open file %s.\n", file_name.c_str());
+ ADD_FAILURE() << "Unable to read file";
+ }
+ frequency_ = frequency;
+ samples_10ms_ = (uint16_t)(frequency_ / 100);
+ auto_rewind_ = auto_rewind;
+ end_of_file_ = false;
+ rewinded_ = false;
+}
+
+int32_t PCMFile::SamplingFrequency() const {
+ return frequency_;
+}
+
+uint16_t PCMFile::PayloadLength10Ms() const {
+ return samples_10ms_;
+}
+
+int32_t PCMFile::Read10MsData(AudioFrame& audio_frame) {
+ uint16_t channels = 1;
+ if (read_stereo_) {
+ channels = 2;
+ }
+
+ int32_t payload_size = (int32_t) fread(audio_frame.data_, sizeof(uint16_t),
+ samples_10ms_ * channels, pcm_file_);
+ if (payload_size < samples_10ms_ * channels) {
+ for (int k = payload_size; k < samples_10ms_ * channels; k++) {
+ audio_frame.data_[k] = 0;
+ }
+ if (auto_rewind_) {
+ rewind(pcm_file_);
+ rewinded_ = true;
+ } else {
+ end_of_file_ = true;
+ }
+ }
+ audio_frame.samples_per_channel_ = samples_10ms_;
+ audio_frame.sample_rate_hz_ = frequency_;
+ audio_frame.num_channels_ = channels;
+ audio_frame.timestamp_ = timestamp_;
+ timestamp_ += samples_10ms_;
+ ++blocks_read_;
+ if (num_10ms_blocks_to_read_ && blocks_read_ >= *num_10ms_blocks_to_read_)
+ end_of_file_ = true;
+ return samples_10ms_;
+}
+
+void PCMFile::Write10MsData(AudioFrame& audio_frame) {
+ if (audio_frame.num_channels_ == 1) {
+ if (!save_stereo_) {
+ if (fwrite(audio_frame.data_, sizeof(uint16_t),
+ audio_frame.samples_per_channel_, pcm_file_) !=
+ static_cast<size_t>(audio_frame.samples_per_channel_)) {
+ return;
+ }
+ } else {
+ int16_t* stereo_audio = new int16_t[2 * audio_frame.samples_per_channel_];
+ for (size_t k = 0; k < audio_frame.samples_per_channel_; k++) {
+ stereo_audio[k << 1] = audio_frame.data_[k];
+ stereo_audio[(k << 1) + 1] = audio_frame.data_[k];
+ }
+ if (fwrite(stereo_audio, sizeof(int16_t),
+ 2 * audio_frame.samples_per_channel_, pcm_file_) !=
+ static_cast<size_t>(2 * audio_frame.samples_per_channel_)) {
+ return;
+ }
+ delete[] stereo_audio;
+ }
+ } else {
+ if (fwrite(audio_frame.data_, sizeof(int16_t),
+ audio_frame.num_channels_ * audio_frame.samples_per_channel_,
+ pcm_file_) !=
+ static_cast<size_t>(audio_frame.num_channels_ *
+ audio_frame.samples_per_channel_)) {
+ return;
+ }
+ }
+}
+
+void PCMFile::Write10MsData(int16_t* playout_buffer, size_t length_smpls) {
+ if (fwrite(playout_buffer, sizeof(uint16_t), length_smpls, pcm_file_) !=
+ length_smpls) {
+ return;
+ }
+}
+
+void PCMFile::Close() {
+ fclose(pcm_file_);
+ pcm_file_ = NULL;
+ blocks_read_ = 0;
+}
+
+void PCMFile::FastForward(int num_10ms_blocks) {
+ const int channels = read_stereo_ ? 2 : 1;
+ long num_bytes_to_move =
+ num_10ms_blocks * sizeof(int16_t) * samples_10ms_ * channels;
+ int error = fseek(pcm_file_, num_bytes_to_move, SEEK_CUR);
+ RTC_DCHECK_EQ(error, 0);
+}
+
+void PCMFile::Rewind() {
+ rewind(pcm_file_);
+ end_of_file_ = false;
+ blocks_read_ = 0;
+}
+
+bool PCMFile::Rewinded() {
+ return rewinded_;
+}
+
+void PCMFile::SaveStereo(bool is_stereo) {
+ save_stereo_ = is_stereo;
+}
+
+void PCMFile::ReadStereo(bool is_stereo) {
+ read_stereo_ = is_stereo;
+}
+
+void PCMFile::SetNum10MsBlocksToRead(int value) {
+ num_10ms_blocks_to_read_ = rtc::Optional<int>(value);
+}
+
+} // namespace webrtc
diff --git a/webrtc/modules/audio_coding/test/PCMFile.h b/webrtc/modules/audio_coding/test/PCMFile.h
new file mode 100644
index 0000000000..840933a1bd
--- /dev/null
+++ b/webrtc/modules/audio_coding/test/PCMFile.h
@@ -0,0 +1,80 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_TEST_PCMFILE_H_
+#define WEBRTC_MODULES_AUDIO_CODING_TEST_PCMFILE_H_
+
+#include <stdio.h>
+#include <stdlib.h>
+
+#include <string>
+
+#include "webrtc/base/optional.h"
+#include "webrtc/modules/include/module_common_types.h"
+#include "webrtc/typedefs.h"
+
+namespace webrtc {
+
+class PCMFile {
+ public:
+ PCMFile();
+ PCMFile(uint32_t timestamp);
+ ~PCMFile() {
+ if (pcm_file_ != NULL) {
+ fclose(pcm_file_);
+ }
+ }
+
+ void Open(const std::string& filename, uint16_t frequency, const char* mode,
+ bool auto_rewind = false);
+
+ int32_t Read10MsData(AudioFrame& audio_frame);
+
+ void Write10MsData(int16_t *playout_buffer, size_t length_smpls);
+ void Write10MsData(AudioFrame& audio_frame);
+
+ uint16_t PayloadLength10Ms() const;
+ int32_t SamplingFrequency() const;
+ void Close();
+ bool EndOfFile() const {
+ return end_of_file_;
+ }
+ // Moves forward the specified number of 10 ms blocks. If a limit has been set
+ // with SetNum10MsBlocksToRead, fast-forwarding does not count towards this
+ // limit.
+ void FastForward(int num_10ms_blocks);
+ void Rewind();
+ static int16_t ChooseFile(std::string* file_name, int16_t max_len,
+ uint16_t* frequency_hz);
+ bool Rewinded();
+ void SaveStereo(bool is_stereo = true);
+ void ReadStereo(bool is_stereo = true);
+ // If set, the reading will stop after the specified number of blocks have
+ // been read. When that has happened, EndOfFile() will return true. Calling
+ // Rewind() will reset the counter and start over.
+ void SetNum10MsBlocksToRead(int value);
+
+ private:
+ FILE* pcm_file_;
+ uint16_t samples_10ms_;
+ int32_t frequency_;
+ bool end_of_file_;
+ bool auto_rewind_;
+ bool rewinded_;
+ uint32_t timestamp_;
+ bool read_stereo_;
+ bool save_stereo_;
+ rtc::Optional<int> num_10ms_blocks_to_read_;
+ int blocks_read_ = 0;
+};
+
+} // namespace webrtc
+
+#endif // WEBRTC_MODULES_AUDIO_CODING_TEST_PCMFILE_H_
diff --git a/webrtc/modules/audio_coding/test/PacketLossTest.cc b/webrtc/modules/audio_coding/test/PacketLossTest.cc
new file mode 100644
index 0000000000..ad3e83403e
--- /dev/null
+++ b/webrtc/modules/audio_coding/test/PacketLossTest.cc
@@ -0,0 +1,167 @@
+/*
+ * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/modules/audio_coding/test/PacketLossTest.h"
+
+#include "testing/gtest/include/gtest/gtest.h"
+#include "webrtc/common.h"
+#include "webrtc/test/testsupport/fileutils.h"
+
+namespace webrtc {
+
+ReceiverWithPacketLoss::ReceiverWithPacketLoss()
+ : loss_rate_(0),
+ burst_length_(1),
+ packet_counter_(0),
+ lost_packet_counter_(0),
+ burst_lost_counter_(burst_length_) {
+}
+
+void ReceiverWithPacketLoss::Setup(AudioCodingModule *acm,
+ RTPStream *rtpStream,
+ std::string out_file_name,
+ int channels,
+ int loss_rate,
+ int burst_length) {
+ loss_rate_ = loss_rate;
+ burst_length_ = burst_length;
+ burst_lost_counter_ = burst_length_; // To prevent first packet gets lost.
+ std::stringstream ss;
+ ss << out_file_name << "_" << loss_rate_ << "_" << burst_length_ << "_";
+ Receiver::Setup(acm, rtpStream, ss.str(), channels);
+}
+
+bool ReceiverWithPacketLoss::IncomingPacket() {
+ if (!_rtpStream->EndOfFile()) {
+ if (packet_counter_ == 0) {
+ _realPayloadSizeBytes = _rtpStream->Read(&_rtpInfo, _incomingPayload,
+ _payloadSizeBytes, &_nextTime);
+ if (_realPayloadSizeBytes == 0) {
+ if (_rtpStream->EndOfFile()) {
+ packet_counter_ = 0;
+ return true;
+ } else {
+ return false;
+ }
+ }
+ }
+
+ if (!PacketLost()) {
+ _acm->IncomingPacket(_incomingPayload, _realPayloadSizeBytes, _rtpInfo);
+ }
+ packet_counter_++;
+ _realPayloadSizeBytes = _rtpStream->Read(&_rtpInfo, _incomingPayload,
+ _payloadSizeBytes, &_nextTime);
+ if (_realPayloadSizeBytes == 0 && _rtpStream->EndOfFile()) {
+ packet_counter_ = 0;
+ lost_packet_counter_ = 0;
+ }
+ }
+ return true;
+}
+
+bool ReceiverWithPacketLoss::PacketLost() {
+ if (burst_lost_counter_ < burst_length_) {
+ lost_packet_counter_++;
+ burst_lost_counter_++;
+ return true;
+ }
+
+ if (lost_packet_counter_ * 100 < loss_rate_ * packet_counter_) {
+ lost_packet_counter_++;
+ burst_lost_counter_ = 1;
+ return true;
+ }
+ return false;
+}
+
+SenderWithFEC::SenderWithFEC()
+ : expected_loss_rate_(0) {
+}
+
+void SenderWithFEC::Setup(AudioCodingModule *acm, RTPStream *rtpStream,
+ std::string in_file_name, int sample_rate,
+ int channels, int expected_loss_rate) {
+ Sender::Setup(acm, rtpStream, in_file_name, sample_rate, channels);
+ EXPECT_TRUE(SetFEC(true));
+ EXPECT_TRUE(SetPacketLossRate(expected_loss_rate));
+}
+
+bool SenderWithFEC::SetFEC(bool enable_fec) {
+ if (_acm->SetCodecFEC(enable_fec) == 0) {
+ return true;
+ }
+ return false;
+}
+
+bool SenderWithFEC::SetPacketLossRate(int expected_loss_rate) {
+ if (_acm->SetPacketLossRate(expected_loss_rate) == 0) {
+ expected_loss_rate_ = expected_loss_rate;
+ return true;
+ }
+ return false;
+}
+
+PacketLossTest::PacketLossTest(int channels, int expected_loss_rate,
+ int actual_loss_rate, int burst_length)
+ : channels_(channels),
+ in_file_name_(channels_ == 1 ? "audio_coding/testfile32kHz" :
+ "audio_coding/teststereo32kHz"),
+ sample_rate_hz_(32000),
+ sender_(new SenderWithFEC),
+ receiver_(new ReceiverWithPacketLoss),
+ expected_loss_rate_(expected_loss_rate),
+ actual_loss_rate_(actual_loss_rate),
+ burst_length_(burst_length) {
+}
+
+void PacketLossTest::Perform() {
+#ifndef WEBRTC_CODEC_OPUS
+ return;
+#else
+ rtc::scoped_ptr<AudioCodingModule> acm(AudioCodingModule::Create(0));
+
+ int codec_id = acm->Codec("opus", 48000, channels_);
+
+ RTPFile rtpFile;
+ std::string fileName = webrtc::test::TempFilename(webrtc::test::OutputPath(),
+ "packet_loss_test");
+
+ // Encode to file
+ rtpFile.Open(fileName.c_str(), "wb+");
+ rtpFile.WriteHeader();
+
+ sender_->testMode = 0;
+ sender_->codeId = codec_id;
+
+ sender_->Setup(acm.get(), &rtpFile, in_file_name_, sample_rate_hz_, channels_,
+ expected_loss_rate_);
+ if (acm->SendCodec()) {
+ sender_->Run();
+ }
+ sender_->Teardown();
+ rtpFile.Close();
+
+ // Decode to file
+ rtpFile.Open(fileName.c_str(), "rb");
+ rtpFile.ReadHeader();
+
+ receiver_->testMode = 0;
+ receiver_->codeId = codec_id;
+
+ receiver_->Setup(acm.get(), &rtpFile, "packetLoss_out", channels_,
+ actual_loss_rate_, burst_length_);
+ receiver_->Run();
+ receiver_->Teardown();
+ rtpFile.Close();
+#endif
+}
+
+} // namespace webrtc
diff --git a/webrtc/modules/audio_coding/test/PacketLossTest.h b/webrtc/modules/audio_coding/test/PacketLossTest.h
new file mode 100644
index 0000000000..f3570ae1ca
--- /dev/null
+++ b/webrtc/modules/audio_coding/test/PacketLossTest.h
@@ -0,0 +1,67 @@
+/*
+ * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_TEST_PACKETLOSSTEST_H_
+#define WEBRTC_MODULES_AUDIO_CODING_TEST_PACKETLOSSTEST_H_
+
+#include <string>
+#include "webrtc/base/scoped_ptr.h"
+#include "webrtc/modules/audio_coding/test/EncodeDecodeTest.h"
+
+namespace webrtc {
+
+class ReceiverWithPacketLoss : public Receiver {
+ public:
+ ReceiverWithPacketLoss();
+ void Setup(AudioCodingModule *acm, RTPStream *rtpStream,
+ std::string out_file_name, int channels, int loss_rate,
+ int burst_length);
+ bool IncomingPacket() override;
+
+ protected:
+ bool PacketLost();
+ int loss_rate_;
+ int burst_length_;
+ int packet_counter_;
+ int lost_packet_counter_;
+ int burst_lost_counter_;
+};
+
+class SenderWithFEC : public Sender {
+ public:
+ SenderWithFEC();
+ void Setup(AudioCodingModule *acm, RTPStream *rtpStream,
+ std::string in_file_name, int sample_rate, int channels,
+ int expected_loss_rate);
+ bool SetPacketLossRate(int expected_loss_rate);
+ bool SetFEC(bool enable_fec);
+ protected:
+ int expected_loss_rate_;
+};
+
+class PacketLossTest : public ACMTest {
+ public:
+ PacketLossTest(int channels, int expected_loss_rate_, int actual_loss_rate,
+ int burst_length);
+ void Perform();
+ protected:
+ int channels_;
+ std::string in_file_name_;
+ int sample_rate_hz_;
+ rtc::scoped_ptr<SenderWithFEC> sender_;
+ rtc::scoped_ptr<ReceiverWithPacketLoss> receiver_;
+ int expected_loss_rate_;
+ int actual_loss_rate_;
+ int burst_length_;
+};
+
+} // namespace webrtc
+
+#endif // WEBRTC_MODULES_AUDIO_CODING_TEST_PACKETLOSSTEST_H_
diff --git a/webrtc/modules/audio_coding/main/test/RTPFile.cc b/webrtc/modules/audio_coding/test/RTPFile.cc
index 60777178c6..60777178c6 100644
--- a/webrtc/modules/audio_coding/main/test/RTPFile.cc
+++ b/webrtc/modules/audio_coding/test/RTPFile.cc
diff --git a/webrtc/modules/audio_coding/test/RTPFile.h b/webrtc/modules/audio_coding/test/RTPFile.h
new file mode 100644
index 0000000000..696d41ebd2
--- /dev/null
+++ b/webrtc/modules/audio_coding/test/RTPFile.h
@@ -0,0 +1,126 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_TEST_RTPFILE_H_
+#define WEBRTC_MODULES_AUDIO_CODING_TEST_RTPFILE_H_
+
+#include <stdio.h>
+#include <queue>
+
+#include "webrtc/modules/audio_coding/include/audio_coding_module.h"
+#include "webrtc/modules/include/module_common_types.h"
+#include "webrtc/system_wrappers/include/rw_lock_wrapper.h"
+#include "webrtc/typedefs.h"
+
+namespace webrtc {
+
+class RTPStream {
+ public:
+ virtual ~RTPStream() {
+ }
+
+ virtual void Write(const uint8_t payloadType, const uint32_t timeStamp,
+ const int16_t seqNo, const uint8_t* payloadData,
+ const size_t payloadSize, uint32_t frequency) = 0;
+
+ // Returns the packet's payload size. Zero should be treated as an
+ // end-of-stream (in the case that EndOfFile() is true) or an error.
+ virtual size_t Read(WebRtcRTPHeader* rtpInfo, uint8_t* payloadData,
+ size_t payloadSize, uint32_t* offset) = 0;
+ virtual bool EndOfFile() const = 0;
+
+ protected:
+ void MakeRTPheader(uint8_t* rtpHeader, uint8_t payloadType, int16_t seqNo,
+ uint32_t timeStamp, uint32_t ssrc);
+
+ void ParseRTPHeader(WebRtcRTPHeader* rtpInfo, const uint8_t* rtpHeader);
+};
+
+class RTPPacket {
+ public:
+ RTPPacket(uint8_t payloadType, uint32_t timeStamp, int16_t seqNo,
+ const uint8_t* payloadData, size_t payloadSize,
+ uint32_t frequency);
+
+ ~RTPPacket();
+
+ uint8_t payloadType;
+ uint32_t timeStamp;
+ int16_t seqNo;
+ uint8_t* payloadData;
+ size_t payloadSize;
+ uint32_t frequency;
+};
+
+class RTPBuffer : public RTPStream {
+ public:
+ RTPBuffer();
+
+ ~RTPBuffer();
+
+ void Write(const uint8_t payloadType,
+ const uint32_t timeStamp,
+ const int16_t seqNo,
+ const uint8_t* payloadData,
+ const size_t payloadSize,
+ uint32_t frequency) override;
+
+ size_t Read(WebRtcRTPHeader* rtpInfo,
+ uint8_t* payloadData,
+ size_t payloadSize,
+ uint32_t* offset) override;
+
+ bool EndOfFile() const override;
+
+ private:
+ RWLockWrapper* _queueRWLock;
+ std::queue<RTPPacket *> _rtpQueue;
+};
+
+class RTPFile : public RTPStream {
+ public:
+ ~RTPFile() {
+ }
+
+ RTPFile()
+ : _rtpFile(NULL),
+ _rtpEOF(false) {
+ }
+
+ void Open(const char *outFilename, const char *mode);
+
+ void Close();
+
+ void WriteHeader();
+
+ void ReadHeader();
+
+ void Write(const uint8_t payloadType,
+ const uint32_t timeStamp,
+ const int16_t seqNo,
+ const uint8_t* payloadData,
+ const size_t payloadSize,
+ uint32_t frequency) override;
+
+ size_t Read(WebRtcRTPHeader* rtpInfo,
+ uint8_t* payloadData,
+ size_t payloadSize,
+ uint32_t* offset) override;
+
+ bool EndOfFile() const override { return _rtpEOF; }
+
+ private:
+ FILE* _rtpFile;
+ bool _rtpEOF;
+};
+
+} // namespace webrtc
+
+#endif // WEBRTC_MODULES_AUDIO_CODING_TEST_RTPFILE_H_
diff --git a/webrtc/modules/audio_coding/test/SpatialAudio.cc b/webrtc/modules/audio_coding/test/SpatialAudio.cc
new file mode 100644
index 0000000000..c9f8080826
--- /dev/null
+++ b/webrtc/modules/audio_coding/test/SpatialAudio.cc
@@ -0,0 +1,196 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <stdio.h>
+#include <string.h>
+
+#include <math.h>
+
+#include "webrtc/common_types.h"
+#include "webrtc/modules/audio_coding/test/SpatialAudio.h"
+#include "webrtc/system_wrappers/include/trace.h"
+#include "webrtc/system_wrappers/include/trace.h"
+#include "webrtc/test/testsupport/fileutils.h"
+
+namespace webrtc {
+
+#define NUM_PANN_COEFFS 10
+
+SpatialAudio::SpatialAudio(int testMode)
+ : _acmLeft(AudioCodingModule::Create(1)),
+ _acmRight(AudioCodingModule::Create(2)),
+ _acmReceiver(AudioCodingModule::Create(3)),
+ _testMode(testMode) {
+}
+
+SpatialAudio::~SpatialAudio() {
+ delete _channel;
+ _inFile.Close();
+ _outFile.Close();
+}
+
+int16_t SpatialAudio::Setup() {
+ _channel = new Channel;
+
+ // Register callback for the sender side.
+ CHECK_ERROR(_acmLeft->RegisterTransportCallback(_channel));
+ CHECK_ERROR(_acmRight->RegisterTransportCallback(_channel));
+ // Register the receiver ACM in channel
+ _channel->RegisterReceiverACM(_acmReceiver.get());
+
+ uint16_t sampFreqHz = 32000;
+
+ const std::string file_name = webrtc::test::ResourcePath(
+ "audio_coding/testfile32kHz", "pcm");
+ _inFile.Open(file_name, sampFreqHz, "rb", false);
+
+ std::string output_file = webrtc::test::OutputPath()
+ + "out_spatial_autotest.pcm";
+ if (_testMode == 1) {
+ output_file = webrtc::test::OutputPath() + "testspatial_out.pcm";
+ printf("\n");
+ printf("Enter the output file [%s]: ", output_file.c_str());
+ PCMFile::ChooseFile(&output_file, MAX_FILE_NAME_LENGTH_BYTE, &sampFreqHz);
+ } else {
+ output_file = webrtc::test::OutputPath() + "testspatial_out.pcm";
+ }
+ _outFile.Open(output_file, sampFreqHz, "wb", false);
+ _outFile.SaveStereo(true);
+
+ // Register all available codes as receiving codecs.
+ CodecInst codecInst;
+ int status;
+ uint8_t num_encoders = _acmReceiver->NumberOfCodecs();
+ // Register all available codes as receiving codecs once more.
+ for (uint8_t n = 0; n < num_encoders; n++) {
+ status = _acmReceiver->Codec(n, &codecInst);
+ if (status < 0) {
+ printf("Error in Codec(), no matching codec found");
+ }
+ status = _acmReceiver->RegisterReceiveCodec(codecInst);
+ if (status < 0) {
+ printf("Error in RegisterReceiveCodec() for payload type %d",
+ codecInst.pltype);
+ }
+ }
+
+ return 0;
+}
+
+void SpatialAudio::Perform() {
+ if (_testMode == 0) {
+ printf("Running SpatialAudio Test");
+ WEBRTC_TRACE(webrtc::kTraceStateInfo, webrtc::kTraceAudioCoding, -1,
+ "---------- SpatialAudio ----------");
+ }
+
+ Setup();
+
+ CodecInst codecInst;
+ _acmLeft->Codec((uint8_t) 1, &codecInst);
+ CHECK_ERROR(_acmLeft->RegisterSendCodec(codecInst));
+ EncodeDecode();
+
+ int16_t pannCntr = 0;
+
+ double leftPanning[NUM_PANN_COEFFS] = { 1.00, 0.95, 0.90, 0.85, 0.80, 0.75,
+ 0.70, 0.60, 0.55, 0.50 };
+ double rightPanning[NUM_PANN_COEFFS] = { 0.50, 0.55, 0.60, 0.70, 0.75, 0.80,
+ 0.85, 0.90, 0.95, 1.00 };
+
+ while ((pannCntr + 1) < NUM_PANN_COEFFS) {
+ _acmLeft->Codec((uint8_t) 0, &codecInst);
+ codecInst.pacsize = 480;
+ CHECK_ERROR(_acmLeft->RegisterSendCodec(codecInst));
+ CHECK_ERROR(_acmRight->RegisterSendCodec(codecInst));
+
+ EncodeDecode(leftPanning[pannCntr], rightPanning[pannCntr]);
+ pannCntr++;
+
+ // Change codec
+ _acmLeft->Codec((uint8_t) 3, &codecInst);
+ codecInst.pacsize = 320;
+ CHECK_ERROR(_acmLeft->RegisterSendCodec(codecInst));
+ CHECK_ERROR(_acmRight->RegisterSendCodec(codecInst));
+
+ EncodeDecode(leftPanning[pannCntr], rightPanning[pannCntr]);
+ pannCntr++;
+ if (_testMode == 0) {
+ printf(".");
+ }
+ }
+
+ _acmLeft->Codec((uint8_t) 4, &codecInst);
+ CHECK_ERROR(_acmLeft->RegisterSendCodec(codecInst));
+ EncodeDecode();
+
+ _acmLeft->Codec((uint8_t) 0, &codecInst);
+ codecInst.pacsize = 480;
+ CHECK_ERROR(_acmLeft->RegisterSendCodec(codecInst));
+ CHECK_ERROR(_acmRight->RegisterSendCodec(codecInst));
+ pannCntr = NUM_PANN_COEFFS - 1;
+ while (pannCntr >= 0) {
+ EncodeDecode(leftPanning[pannCntr], rightPanning[pannCntr]);
+ pannCntr--;
+ if (_testMode == 0) {
+ printf(".");
+ }
+ }
+ if (_testMode == 0) {
+ printf("Done!\n");
+ }
+}
+
+void SpatialAudio::EncodeDecode(const double leftPanning,
+ const double rightPanning) {
+ AudioFrame audioFrame;
+ int32_t outFileSampFreq = _outFile.SamplingFrequency();
+
+ const double rightToLeftRatio = rightPanning / leftPanning;
+
+ _channel->SetIsStereo(true);
+
+ while (!_inFile.EndOfFile()) {
+ _inFile.Read10MsData(audioFrame);
+ for (size_t n = 0; n < audioFrame.samples_per_channel_; n++) {
+ audioFrame.data_[n] = (int16_t) floor(
+ audioFrame.data_[n] * leftPanning + 0.5);
+ }
+ CHECK_ERROR(_acmLeft->Add10MsData(audioFrame));
+
+ for (size_t n = 0; n < audioFrame.samples_per_channel_; n++) {
+ audioFrame.data_[n] = (int16_t) floor(
+ audioFrame.data_[n] * rightToLeftRatio + 0.5);
+ }
+ CHECK_ERROR(_acmRight->Add10MsData(audioFrame));
+
+ CHECK_ERROR(_acmReceiver->PlayoutData10Ms(outFileSampFreq, &audioFrame));
+ _outFile.Write10MsData(audioFrame);
+ }
+ _inFile.Rewind();
+}
+
+void SpatialAudio::EncodeDecode() {
+ AudioFrame audioFrame;
+ int32_t outFileSampFreq = _outFile.SamplingFrequency();
+
+ _channel->SetIsStereo(false);
+
+ while (!_inFile.EndOfFile()) {
+ _inFile.Read10MsData(audioFrame);
+ CHECK_ERROR(_acmLeft->Add10MsData(audioFrame));
+
+ CHECK_ERROR(_acmReceiver->PlayoutData10Ms(outFileSampFreq, &audioFrame));
+ _outFile.Write10MsData(audioFrame);
+ }
+ _inFile.Rewind();
+}
+
+} // namespace webrtc
diff --git a/webrtc/modules/audio_coding/test/SpatialAudio.h b/webrtc/modules/audio_coding/test/SpatialAudio.h
new file mode 100644
index 0000000000..3548cc98eb
--- /dev/null
+++ b/webrtc/modules/audio_coding/test/SpatialAudio.h
@@ -0,0 +1,47 @@
+/*
+ * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_TEST_SPATIALAUDIO_H_
+#define WEBRTC_MODULES_AUDIO_CODING_TEST_SPATIALAUDIO_H_
+
+#include "webrtc/base/scoped_ptr.h"
+#include "webrtc/modules/audio_coding/include/audio_coding_module.h"
+#include "webrtc/modules/audio_coding/test/ACMTest.h"
+#include "webrtc/modules/audio_coding/test/Channel.h"
+#include "webrtc/modules/audio_coding/test/PCMFile.h"
+#include "webrtc/modules/audio_coding/test/utility.h"
+
+#define MAX_FILE_NAME_LENGTH_BYTE 500
+
+namespace webrtc {
+
+class SpatialAudio : public ACMTest {
+ public:
+ SpatialAudio(int testMode);
+ ~SpatialAudio();
+
+ void Perform();
+ private:
+ int16_t Setup();
+ void EncodeDecode(double leftPanning, double rightPanning);
+ void EncodeDecode();
+
+ rtc::scoped_ptr<AudioCodingModule> _acmLeft;
+ rtc::scoped_ptr<AudioCodingModule> _acmRight;
+ rtc::scoped_ptr<AudioCodingModule> _acmReceiver;
+ Channel* _channel;
+ PCMFile _inFile;
+ PCMFile _outFile;
+ int _testMode;
+};
+
+} // namespace webrtc
+
+#endif // WEBRTC_MODULES_AUDIO_CODING_TEST_SPATIALAUDIO_H_
diff --git a/webrtc/modules/audio_coding/test/TestAllCodecs.cc b/webrtc/modules/audio_coding/test/TestAllCodecs.cc
new file mode 100644
index 0000000000..bacfd37188
--- /dev/null
+++ b/webrtc/modules/audio_coding/test/TestAllCodecs.cc
@@ -0,0 +1,489 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/modules/audio_coding/test/TestAllCodecs.h"
+
+#include <cstdio>
+#include <limits>
+#include <string>
+
+#include "testing/gtest/include/gtest/gtest.h"
+
+#include "webrtc/common_types.h"
+#include "webrtc/engine_configurations.h"
+#include "webrtc/modules/audio_coding/include/audio_coding_module.h"
+#include "webrtc/modules/audio_coding/include/audio_coding_module_typedefs.h"
+#include "webrtc/modules/audio_coding/test/utility.h"
+#include "webrtc/system_wrappers/include/trace.h"
+#include "webrtc/test/testsupport/fileutils.h"
+#include "webrtc/typedefs.h"
+
+// Description of the test:
+// In this test we set up a one-way communication channel from a participant
+// called "a" to a participant called "b".
+// a -> channel_a_to_b -> b
+//
+// The test loops through all available mono codecs, encode at "a" sends over
+// the channel, and decodes at "b".
+
+namespace {
+const size_t kVariableSize = std::numeric_limits<size_t>::max();
+}
+
+namespace webrtc {
+
+// Class for simulating packet handling.
+TestPack::TestPack()
+ : receiver_acm_(NULL),
+ sequence_number_(0),
+ timestamp_diff_(0),
+ last_in_timestamp_(0),
+ total_bytes_(0),
+ payload_size_(0) {
+}
+
+TestPack::~TestPack() {
+}
+
+void TestPack::RegisterReceiverACM(AudioCodingModule* acm) {
+ receiver_acm_ = acm;
+ return;
+}
+
+int32_t TestPack::SendData(FrameType frame_type, uint8_t payload_type,
+ uint32_t timestamp, const uint8_t* payload_data,
+ size_t payload_size,
+ const RTPFragmentationHeader* fragmentation) {
+ WebRtcRTPHeader rtp_info;
+ int32_t status;
+
+ rtp_info.header.markerBit = false;
+ rtp_info.header.ssrc = 0;
+ rtp_info.header.sequenceNumber = sequence_number_++;
+ rtp_info.header.payloadType = payload_type;
+ rtp_info.header.timestamp = timestamp;
+ if (frame_type == kAudioFrameCN) {
+ rtp_info.type.Audio.isCNG = true;
+ } else {
+ rtp_info.type.Audio.isCNG = false;
+ }
+ if (frame_type == kEmptyFrame) {
+ // Skip this frame.
+ return 0;
+ }
+
+ // Only run mono for all test cases.
+ rtp_info.type.Audio.channel = 1;
+ memcpy(payload_data_, payload_data, payload_size);
+
+ status = receiver_acm_->IncomingPacket(payload_data_, payload_size, rtp_info);
+
+ payload_size_ = payload_size;
+ timestamp_diff_ = timestamp - last_in_timestamp_;
+ last_in_timestamp_ = timestamp;
+ total_bytes_ += payload_size;
+ return status;
+}
+
+size_t TestPack::payload_size() {
+ return payload_size_;
+}
+
+uint32_t TestPack::timestamp_diff() {
+ return timestamp_diff_;
+}
+
+void TestPack::reset_payload_size() {
+ payload_size_ = 0;
+}
+
+TestAllCodecs::TestAllCodecs(int test_mode)
+ : acm_a_(AudioCodingModule::Create(0)),
+ acm_b_(AudioCodingModule::Create(1)),
+ channel_a_to_b_(NULL),
+ test_count_(0),
+ packet_size_samples_(0),
+ packet_size_bytes_(0) {
+ // test_mode = 0 for silent test (auto test)
+ test_mode_ = test_mode;
+}
+
+TestAllCodecs::~TestAllCodecs() {
+ if (channel_a_to_b_ != NULL) {
+ delete channel_a_to_b_;
+ channel_a_to_b_ = NULL;
+ }
+}
+
+void TestAllCodecs::Perform() {
+ const std::string file_name = webrtc::test::ResourcePath(
+ "audio_coding/testfile32kHz", "pcm");
+ infile_a_.Open(file_name, 32000, "rb");
+
+ if (test_mode_ == 0) {
+ WEBRTC_TRACE(kTraceStateInfo, kTraceAudioCoding, -1,
+ "---------- TestAllCodecs ----------");
+ }
+
+ acm_a_->InitializeReceiver();
+ acm_b_->InitializeReceiver();
+
+ uint8_t num_encoders = acm_a_->NumberOfCodecs();
+ CodecInst my_codec_param;
+ for (uint8_t n = 0; n < num_encoders; n++) {
+ acm_b_->Codec(n, &my_codec_param);
+ if (!strcmp(my_codec_param.plname, "opus")) {
+ my_codec_param.channels = 1;
+ }
+ acm_b_->RegisterReceiveCodec(my_codec_param);
+ }
+
+ // Create and connect the channel
+ channel_a_to_b_ = new TestPack;
+ acm_a_->RegisterTransportCallback(channel_a_to_b_);
+ channel_a_to_b_->RegisterReceiverACM(acm_b_.get());
+
+ // All codecs are tested for all allowed sampling frequencies, rates and
+ // packet sizes.
+#ifdef WEBRTC_CODEC_G722
+ if (test_mode_ != 0) {
+ printf("===============================================================\n");
+ }
+ test_count_++;
+ OpenOutFile(test_count_);
+ char codec_g722[] = "G722";
+ RegisterSendCodec('A', codec_g722, 16000, 64000, 160, 0);
+ Run(channel_a_to_b_);
+ RegisterSendCodec('A', codec_g722, 16000, 64000, 320, 0);
+ Run(channel_a_to_b_);
+ RegisterSendCodec('A', codec_g722, 16000, 64000, 480, 0);
+ Run(channel_a_to_b_);
+ RegisterSendCodec('A', codec_g722, 16000, 64000, 640, 0);
+ Run(channel_a_to_b_);
+ RegisterSendCodec('A', codec_g722, 16000, 64000, 800, 0);
+ Run(channel_a_to_b_);
+ RegisterSendCodec('A', codec_g722, 16000, 64000, 960, 0);
+ Run(channel_a_to_b_);
+ outfile_b_.Close();
+#endif
+#ifdef WEBRTC_CODEC_ILBC
+ if (test_mode_ != 0) {
+ printf("===============================================================\n");
+ }
+ test_count_++;
+ OpenOutFile(test_count_);
+ char codec_ilbc[] = "ILBC";
+ RegisterSendCodec('A', codec_ilbc, 8000, 13300, 240, 0);
+ Run(channel_a_to_b_);
+ RegisterSendCodec('A', codec_ilbc, 8000, 13300, 480, 0);
+ Run(channel_a_to_b_);
+ RegisterSendCodec('A', codec_ilbc, 8000, 15200, 160, 0);
+ Run(channel_a_to_b_);
+ RegisterSendCodec('A', codec_ilbc, 8000, 15200, 320, 0);
+ Run(channel_a_to_b_);
+ outfile_b_.Close();
+#endif
+#if (defined(WEBRTC_CODEC_ISAC) || defined(WEBRTC_CODEC_ISACFX))
+ if (test_mode_ != 0) {
+ printf("===============================================================\n");
+ }
+ test_count_++;
+ OpenOutFile(test_count_);
+ char codec_isac[] = "ISAC";
+ RegisterSendCodec('A', codec_isac, 16000, -1, 480, kVariableSize);
+ Run(channel_a_to_b_);
+ RegisterSendCodec('A', codec_isac, 16000, -1, 960, kVariableSize);
+ Run(channel_a_to_b_);
+ RegisterSendCodec('A', codec_isac, 16000, 15000, 480, kVariableSize);
+ Run(channel_a_to_b_);
+ RegisterSendCodec('A', codec_isac, 16000, 32000, 960, kVariableSize);
+ Run(channel_a_to_b_);
+ outfile_b_.Close();
+#endif
+#ifdef WEBRTC_CODEC_ISAC
+ if (test_mode_ != 0) {
+ printf("===============================================================\n");
+ }
+ test_count_++;
+ OpenOutFile(test_count_);
+ RegisterSendCodec('A', codec_isac, 32000, -1, 960, kVariableSize);
+ Run(channel_a_to_b_);
+ RegisterSendCodec('A', codec_isac, 32000, 56000, 960, kVariableSize);
+ Run(channel_a_to_b_);
+ RegisterSendCodec('A', codec_isac, 32000, 37000, 960, kVariableSize);
+ Run(channel_a_to_b_);
+ RegisterSendCodec('A', codec_isac, 32000, 32000, 960, kVariableSize);
+ Run(channel_a_to_b_);
+ outfile_b_.Close();
+#endif
+ if (test_mode_ != 0) {
+ printf("===============================================================\n");
+ }
+ test_count_++;
+ OpenOutFile(test_count_);
+ char codec_l16[] = "L16";
+ RegisterSendCodec('A', codec_l16, 8000, 128000, 80, 0);
+ Run(channel_a_to_b_);
+ RegisterSendCodec('A', codec_l16, 8000, 128000, 160, 0);
+ Run(channel_a_to_b_);
+ RegisterSendCodec('A', codec_l16, 8000, 128000, 240, 0);
+ Run(channel_a_to_b_);
+ RegisterSendCodec('A', codec_l16, 8000, 128000, 320, 0);
+ Run(channel_a_to_b_);
+ outfile_b_.Close();
+ if (test_mode_ != 0) {
+ printf("===============================================================\n");
+ }
+ test_count_++;
+ OpenOutFile(test_count_);
+ RegisterSendCodec('A', codec_l16, 16000, 256000, 160, 0);
+ Run(channel_a_to_b_);
+ RegisterSendCodec('A', codec_l16, 16000, 256000, 320, 0);
+ Run(channel_a_to_b_);
+ RegisterSendCodec('A', codec_l16, 16000, 256000, 480, 0);
+ Run(channel_a_to_b_);
+ RegisterSendCodec('A', codec_l16, 16000, 256000, 640, 0);
+ Run(channel_a_to_b_);
+ outfile_b_.Close();
+ if (test_mode_ != 0) {
+ printf("===============================================================\n");
+ }
+ test_count_++;
+ OpenOutFile(test_count_);
+ RegisterSendCodec('A', codec_l16, 32000, 512000, 320, 0);
+ Run(channel_a_to_b_);
+ RegisterSendCodec('A', codec_l16, 32000, 512000, 640, 0);
+ Run(channel_a_to_b_);
+ outfile_b_.Close();
+ if (test_mode_ != 0) {
+ printf("===============================================================\n");
+ }
+ test_count_++;
+ OpenOutFile(test_count_);
+ char codec_pcma[] = "PCMA";
+ RegisterSendCodec('A', codec_pcma, 8000, 64000, 80, 0);
+ Run(channel_a_to_b_);
+ RegisterSendCodec('A', codec_pcma, 8000, 64000, 160, 0);
+ Run(channel_a_to_b_);
+ RegisterSendCodec('A', codec_pcma, 8000, 64000, 240, 0);
+ Run(channel_a_to_b_);
+ RegisterSendCodec('A', codec_pcma, 8000, 64000, 320, 0);
+ Run(channel_a_to_b_);
+ RegisterSendCodec('A', codec_pcma, 8000, 64000, 400, 0);
+ Run(channel_a_to_b_);
+ RegisterSendCodec('A', codec_pcma, 8000, 64000, 480, 0);
+ Run(channel_a_to_b_);
+ if (test_mode_ != 0) {
+ printf("===============================================================\n");
+ }
+ char codec_pcmu[] = "PCMU";
+ RegisterSendCodec('A', codec_pcmu, 8000, 64000, 80, 0);
+ Run(channel_a_to_b_);
+ RegisterSendCodec('A', codec_pcmu, 8000, 64000, 160, 0);
+ Run(channel_a_to_b_);
+ RegisterSendCodec('A', codec_pcmu, 8000, 64000, 240, 0);
+ Run(channel_a_to_b_);
+ RegisterSendCodec('A', codec_pcmu, 8000, 64000, 320, 0);
+ Run(channel_a_to_b_);
+ RegisterSendCodec('A', codec_pcmu, 8000, 64000, 400, 0);
+ Run(channel_a_to_b_);
+ RegisterSendCodec('A', codec_pcmu, 8000, 64000, 480, 0);
+ Run(channel_a_to_b_);
+ outfile_b_.Close();
+#ifdef WEBRTC_CODEC_OPUS
+ if (test_mode_ != 0) {
+ printf("===============================================================\n");
+ }
+ test_count_++;
+ OpenOutFile(test_count_);
+ char codec_opus[] = "OPUS";
+ RegisterSendCodec('A', codec_opus, 48000, 6000, 480, kVariableSize);
+ Run(channel_a_to_b_);
+ RegisterSendCodec('A', codec_opus, 48000, 20000, 480*2, kVariableSize);
+ Run(channel_a_to_b_);
+ RegisterSendCodec('A', codec_opus, 48000, 32000, 480*4, kVariableSize);
+ Run(channel_a_to_b_);
+ RegisterSendCodec('A', codec_opus, 48000, 48000, 480, kVariableSize);
+ Run(channel_a_to_b_);
+ RegisterSendCodec('A', codec_opus, 48000, 64000, 480*4, kVariableSize);
+ Run(channel_a_to_b_);
+ RegisterSendCodec('A', codec_opus, 48000, 96000, 480*6, kVariableSize);
+ Run(channel_a_to_b_);
+ RegisterSendCodec('A', codec_opus, 48000, 500000, 480*2, kVariableSize);
+ Run(channel_a_to_b_);
+ outfile_b_.Close();
+#endif
+ if (test_mode_ != 0) {
+ printf("===============================================================\n");
+
+ /* Print out all codecs that were not tested in the run */
+ printf("The following codecs was not included in the test:\n");
+#ifndef WEBRTC_CODEC_G722
+ printf(" G.722\n");
+#endif
+#ifndef WEBRTC_CODEC_ILBC
+ printf(" iLBC\n");
+#endif
+#ifndef WEBRTC_CODEC_ISAC
+ printf(" ISAC float\n");
+#endif
+#ifndef WEBRTC_CODEC_ISACFX
+ printf(" ISAC fix\n");
+#endif
+
+ printf("\nTo complete the test, listen to the %d number of output files.\n",
+ test_count_);
+ }
+}
+
+// Register Codec to use in the test
+//
+// Input: side - which ACM to use, 'A' or 'B'
+// codec_name - name to use when register the codec
+// sampling_freq_hz - sampling frequency in Herz
+// rate - bitrate in bytes
+// packet_size - packet size in samples
+// extra_byte - if extra bytes needed compared to the bitrate
+// used when registering, can be an internal header
+// set to kVariableSize if the codec is a variable
+// rate codec
+void TestAllCodecs::RegisterSendCodec(char side, char* codec_name,
+ int32_t sampling_freq_hz, int rate,
+ int packet_size, size_t extra_byte) {
+ if (test_mode_ != 0) {
+ // Print out codec and settings.
+ printf("codec: %s Freq: %d Rate: %d PackSize: %d\n", codec_name,
+ sampling_freq_hz, rate, packet_size);
+ }
+
+ // Store packet-size in samples, used to validate the received packet.
+ // If G.722, store half the size to compensate for the timestamp bug in the
+ // RFC for G.722.
+ // If iSAC runs in adaptive mode, packet size in samples can change on the
+ // fly, so we exclude this test by setting |packet_size_samples_| to -1.
+ if (!strcmp(codec_name, "G722")) {
+ packet_size_samples_ = packet_size / 2;
+ } else if (!strcmp(codec_name, "ISAC") && (rate == -1)) {
+ packet_size_samples_ = -1;
+ } else {
+ packet_size_samples_ = packet_size;
+ }
+
+ // Store the expected packet size in bytes, used to validate the received
+ // packet. If variable rate codec (extra_byte == -1), set to -1.
+ if (extra_byte != kVariableSize) {
+ // Add 0.875 to always round up to a whole byte
+ packet_size_bytes_ = static_cast<size_t>(
+ static_cast<float>(packet_size * rate) /
+ static_cast<float>(sampling_freq_hz * 8) + 0.875) + extra_byte;
+ } else {
+ // Packets will have a variable size.
+ packet_size_bytes_ = kVariableSize;
+ }
+
+ // Set pointer to the ACM where to register the codec.
+ AudioCodingModule* my_acm = NULL;
+ switch (side) {
+ case 'A': {
+ my_acm = acm_a_.get();
+ break;
+ }
+ case 'B': {
+ my_acm = acm_b_.get();
+ break;
+ }
+ default: {
+ break;
+ }
+ }
+ ASSERT_TRUE(my_acm != NULL);
+
+ // Get all codec parameters before registering
+ CodecInst my_codec_param;
+ CHECK_ERROR(AudioCodingModule::Codec(codec_name, &my_codec_param,
+ sampling_freq_hz, 1));
+ my_codec_param.rate = rate;
+ my_codec_param.pacsize = packet_size;
+ CHECK_ERROR(my_acm->RegisterSendCodec(my_codec_param));
+}
+
+void TestAllCodecs::Run(TestPack* channel) {
+ AudioFrame audio_frame;
+
+ int32_t out_freq_hz = outfile_b_.SamplingFrequency();
+ size_t receive_size;
+ uint32_t timestamp_diff;
+ channel->reset_payload_size();
+ int error_count = 0;
+ int counter = 0;
+ // Set test length to 500 ms (50 blocks of 10 ms each).
+ infile_a_.SetNum10MsBlocksToRead(50);
+ // Fast-forward 1 second (100 blocks) since the file starts with silence.
+ infile_a_.FastForward(100);
+
+ while (!infile_a_.EndOfFile()) {
+ // Add 10 msec to ACM.
+ infile_a_.Read10MsData(audio_frame);
+ CHECK_ERROR(acm_a_->Add10MsData(audio_frame));
+
+ // Verify that the received packet size matches the settings.
+ receive_size = channel->payload_size();
+ if (receive_size) {
+ if ((receive_size != packet_size_bytes_) &&
+ (packet_size_bytes_ != kVariableSize)) {
+ error_count++;
+ }
+
+ // Verify that the timestamp is updated with expected length. The counter
+ // is used to avoid problems when switching codec or frame size in the
+ // test.
+ timestamp_diff = channel->timestamp_diff();
+ if ((counter > 10) &&
+ (static_cast<int>(timestamp_diff) != packet_size_samples_) &&
+ (packet_size_samples_ > -1))
+ error_count++;
+ }
+
+ // Run received side of ACM.
+ CHECK_ERROR(acm_b_->PlayoutData10Ms(out_freq_hz, &audio_frame));
+
+ // Write output speech to file.
+ outfile_b_.Write10MsData(audio_frame.data_,
+ audio_frame.samples_per_channel_);
+
+ // Update loop counter
+ counter++;
+ }
+
+ EXPECT_EQ(0, error_count);
+
+ if (infile_a_.EndOfFile()) {
+ infile_a_.Rewind();
+ }
+}
+
+void TestAllCodecs::OpenOutFile(int test_number) {
+ std::string filename = webrtc::test::OutputPath();
+ std::ostringstream test_number_str;
+ test_number_str << test_number;
+ filename += "testallcodecs_out_";
+ filename += test_number_str.str();
+ filename += ".pcm";
+ outfile_b_.Open(filename, 32000, "wb");
+}
+
+void TestAllCodecs::DisplaySendReceiveCodec() {
+ CodecInst my_codec_param;
+ printf("%s -> ", acm_a_->SendCodec()->plname);
+ acm_b_->ReceiveCodec(&my_codec_param);
+ printf("%s\n", my_codec_param.plname);
+}
+
+} // namespace webrtc
diff --git a/webrtc/modules/audio_coding/test/TestAllCodecs.h b/webrtc/modules/audio_coding/test/TestAllCodecs.h
new file mode 100644
index 0000000000..e79bd69faa
--- /dev/null
+++ b/webrtc/modules/audio_coding/test/TestAllCodecs.h
@@ -0,0 +1,84 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_TEST_TESTALLCODECS_H_
+#define WEBRTC_MODULES_AUDIO_CODING_TEST_TESTALLCODECS_H_
+
+#include "webrtc/base/scoped_ptr.h"
+#include "webrtc/modules/audio_coding/test/ACMTest.h"
+#include "webrtc/modules/audio_coding/test/Channel.h"
+#include "webrtc/modules/audio_coding/test/PCMFile.h"
+#include "webrtc/typedefs.h"
+
+namespace webrtc {
+
+class Config;
+
+class TestPack : public AudioPacketizationCallback {
+ public:
+ TestPack();
+ ~TestPack();
+
+ void RegisterReceiverACM(AudioCodingModule* acm);
+
+ int32_t SendData(FrameType frame_type,
+ uint8_t payload_type,
+ uint32_t timestamp,
+ const uint8_t* payload_data,
+ size_t payload_size,
+ const RTPFragmentationHeader* fragmentation) override;
+
+ size_t payload_size();
+ uint32_t timestamp_diff();
+ void reset_payload_size();
+
+ private:
+ AudioCodingModule* receiver_acm_;
+ uint16_t sequence_number_;
+ uint8_t payload_data_[60 * 32 * 2 * 2];
+ uint32_t timestamp_diff_;
+ uint32_t last_in_timestamp_;
+ uint64_t total_bytes_;
+ size_t payload_size_;
+};
+
+class TestAllCodecs : public ACMTest {
+ public:
+ explicit TestAllCodecs(int test_mode);
+ ~TestAllCodecs();
+
+ void Perform() override;
+
+ private:
+ // The default value of '-1' indicates that the registration is based only on
+ // codec name, and a sampling frequency matching is not required.
+ // This is useful for codecs which support several sampling frequency.
+ // Note! Only mono mode is tested in this test.
+ void RegisterSendCodec(char side, char* codec_name, int32_t sampling_freq_hz,
+ int rate, int packet_size, size_t extra_byte);
+
+ void Run(TestPack* channel);
+ void OpenOutFile(int test_number);
+ void DisplaySendReceiveCodec();
+
+ int test_mode_;
+ rtc::scoped_ptr<AudioCodingModule> acm_a_;
+ rtc::scoped_ptr<AudioCodingModule> acm_b_;
+ TestPack* channel_a_to_b_;
+ PCMFile infile_a_;
+ PCMFile outfile_b_;
+ int test_count_;
+ int packet_size_samples_;
+ size_t packet_size_bytes_;
+};
+
+} // namespace webrtc
+
+#endif // WEBRTC_MODULES_AUDIO_CODING_TEST_TESTALLCODECS_H_
diff --git a/webrtc/modules/audio_coding/test/TestRedFec.cc b/webrtc/modules/audio_coding/test/TestRedFec.cc
new file mode 100644
index 0000000000..a1bdc04e53
--- /dev/null
+++ b/webrtc/modules/audio_coding/test/TestRedFec.cc
@@ -0,0 +1,480 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/modules/audio_coding/test/TestRedFec.h"
+
+#include <assert.h>
+
+#include "webrtc/common.h"
+#include "webrtc/common_types.h"
+#include "webrtc/engine_configurations.h"
+#include "webrtc/modules/audio_coding/include/audio_coding_module_typedefs.h"
+#include "webrtc/modules/audio_coding/test/utility.h"
+#include "webrtc/system_wrappers/include/trace.h"
+#include "webrtc/test/testsupport/fileutils.h"
+
+#ifdef SUPPORT_RED_WB
+#undef SUPPORT_RED_WB
+#endif
+
+#ifdef SUPPORT_RED_SWB
+#undef SUPPORT_RED_SWB
+#endif
+
+#ifdef SUPPORT_RED_FB
+#undef SUPPORT_RED_FB
+#endif
+
+namespace webrtc {
+
+namespace {
+ const char kNameL16[] = "L16";
+ const char kNamePCMU[] = "PCMU";
+ const char kNameCN[] = "CN";
+ const char kNameRED[] = "RED";
+
+ // These three are only used by code #ifdeffed on WEBRTC_CODEC_G722.
+#ifdef WEBRTC_CODEC_G722
+ const char kNameISAC[] = "ISAC";
+ const char kNameG722[] = "G722";
+ const char kNameOPUS[] = "opus";
+#endif
+}
+
+TestRedFec::TestRedFec()
+ : _acmA(AudioCodingModule::Create(0)),
+ _acmB(AudioCodingModule::Create(1)),
+ _channelA2B(NULL),
+ _testCntr(0) {
+}
+
+TestRedFec::~TestRedFec() {
+ if (_channelA2B != NULL) {
+ delete _channelA2B;
+ _channelA2B = NULL;
+ }
+}
+
+void TestRedFec::Perform() {
+ const std::string file_name = webrtc::test::ResourcePath(
+ "audio_coding/testfile32kHz", "pcm");
+ _inFileA.Open(file_name, 32000, "rb");
+
+ ASSERT_EQ(0, _acmA->InitializeReceiver());
+ ASSERT_EQ(0, _acmB->InitializeReceiver());
+
+ uint8_t numEncoders = _acmA->NumberOfCodecs();
+ CodecInst myCodecParam;
+ for (uint8_t n = 0; n < numEncoders; n++) {
+ EXPECT_EQ(0, _acmB->Codec(n, &myCodecParam));
+ // Default number of channels is 2 for opus, so we change to 1 in this test.
+ if (!strcmp(myCodecParam.plname, "opus")) {
+ myCodecParam.channels = 1;
+ }
+ EXPECT_EQ(0, _acmB->RegisterReceiveCodec(myCodecParam));
+ }
+
+ // Create and connect the channel
+ _channelA2B = new Channel;
+ _acmA->RegisterTransportCallback(_channelA2B);
+ _channelA2B->RegisterReceiverACM(_acmB.get());
+
+ EXPECT_EQ(0, RegisterSendCodec('A', kNameL16, 8000));
+ EXPECT_EQ(0, RegisterSendCodec('A', kNameCN, 8000));
+ EXPECT_EQ(0, RegisterSendCodec('A', kNameRED));
+ EXPECT_EQ(0, SetVAD(true, true, VADAggr));
+ EXPECT_EQ(0, _acmA->SetREDStatus(true));
+ EXPECT_TRUE(_acmA->REDStatus());
+
+ OpenOutFile(_testCntr);
+ Run();
+ _outFileB.Close();
+
+ RegisterSendCodec('A', kNamePCMU, 8000);
+ // Switch to another 8 kHz codec, RED should remain switched on.
+ EXPECT_TRUE(_acmA->REDStatus());
+ OpenOutFile(_testCntr);
+ Run();
+ _outFileB.Close();
+
+#ifndef WEBRTC_CODEC_G722
+ EXPECT_TRUE(false);
+ printf("G722 needs to be activated to run this test\n");
+ return;
+#else
+ EXPECT_EQ(0, RegisterSendCodec('A', kNameG722, 16000));
+ EXPECT_EQ(0, RegisterSendCodec('A', kNameCN, 16000));
+
+#ifdef SUPPORT_RED_WB
+ // Switch codec, RED should remain.
+ EXPECT_TRUE(_acmA->REDStatus());
+#else
+ // Switch to a 16 kHz codec, RED should have been switched off.
+ EXPECT_FALSE(_acmA->REDStatus());
+#endif
+
+ OpenOutFile(_testCntr);
+ EXPECT_EQ(0, SetVAD(true, true, VADAggr));
+ EXPECT_EQ(0, _acmA->SetREDStatus(false));
+ EXPECT_FALSE(_acmA->REDStatus());
+ Run();
+#ifdef SUPPORT_RED_WB
+ EXPECT_EQ(0, _acmA->SetREDStatus(true));
+ EXPECT_TRUE(_acmA->REDStatus());
+#else
+ EXPECT_EQ(-1, _acmA->SetREDStatus(true));
+ EXPECT_FALSE(_acmA->REDStatus());
+#endif
+ Run();
+ _outFileB.Close();
+
+ RegisterSendCodec('A', kNameISAC, 16000);
+
+#ifdef SUPPORT_RED_WB
+ // Switch codec, RED should remain.
+ EXPECT_TRUE(_acmA->REDStatus());
+#else
+ EXPECT_FALSE(_acmA->REDStatus());
+#endif
+
+ OpenOutFile(_testCntr);
+ EXPECT_EQ(0, SetVAD(true, true, VADVeryAggr));
+ EXPECT_EQ(0, _acmA->SetREDStatus(false));
+ EXPECT_FALSE(_acmA->REDStatus());
+ Run();
+ _outFileB.Close();
+
+#ifdef SUPPORT_RED_WB
+ EXPECT_EQ(0, _acmA->SetREDStatus(true));
+ EXPECT_TRUE(_acmA->REDStatus());
+#else
+ EXPECT_EQ(-1, _acmA->SetREDStatus(true));
+ EXPECT_FALSE(_acmA->REDStatus());
+#endif
+ OpenOutFile(_testCntr);
+ Run();
+ _outFileB.Close();
+
+ RegisterSendCodec('A', kNameISAC, 32000);
+
+#if defined(SUPPORT_RED_SWB) && defined(SUPPORT_RED_WB)
+ // Switch codec, RED should remain.
+ EXPECT_TRUE(_acmA->REDStatus());
+#else
+ // Switch to a 32 kHz codec, RED should have been switched off.
+ EXPECT_FALSE(_acmA->REDStatus());
+#endif
+
+ OpenOutFile(_testCntr);
+ EXPECT_EQ(0, SetVAD(true, true, VADVeryAggr));
+ EXPECT_EQ(0, _acmA->SetREDStatus(false));
+ EXPECT_FALSE(_acmA->REDStatus());
+ Run();
+ _outFileB.Close();
+
+#ifdef SUPPORT_RED_SWB
+ EXPECT_EQ(0, _acmA->SetREDStatus(true));
+ EXPECT_TRUE(_acmA->REDStatus());
+#else
+ EXPECT_EQ(-1, _acmA->SetREDStatus(true));
+ EXPECT_FALSE(_acmA->REDStatus());
+#endif
+ OpenOutFile(_testCntr);
+ Run();
+ _outFileB.Close();
+
+ RegisterSendCodec('A', kNameISAC, 32000);
+ EXPECT_EQ(0, SetVAD(false, false, VADNormal));
+
+#if defined(SUPPORT_RED_SWB) && defined(SUPPORT_RED_WB)
+ OpenOutFile(_testCntr);
+ EXPECT_EQ(0, _acmA->SetREDStatus(true));
+ EXPECT_TRUE(_acmA->REDStatus());
+ Run();
+
+ RegisterSendCodec('A', kNameISAC, 16000);
+ EXPECT_TRUE(_acmA->REDStatus());
+ Run();
+
+ RegisterSendCodec('A', kNameISAC, 32000);
+ EXPECT_TRUE(_acmA->REDStatus());
+ Run();
+
+ RegisterSendCodec('A', kNameISAC, 16000);
+ EXPECT_TRUE(_acmA->REDStatus());
+ Run();
+ _outFileB.Close();
+#else
+ EXPECT_EQ(-1, _acmA->SetREDStatus(true));
+ EXPECT_FALSE(_acmA->REDStatus());
+#endif
+
+ _channelA2B->SetFECTestWithPacketLoss(true);
+ // Following tests are under packet losses.
+
+ EXPECT_EQ(0, RegisterSendCodec('A', kNameG722));
+ EXPECT_EQ(0, RegisterSendCodec('A', kNameCN, 16000));
+
+#if defined(SUPPORT_RED_WB) && defined(SUPPORT_RED_SWB)
+ // Switch codec, RED should remain.
+ EXPECT_TRUE(_acmA->REDStatus());
+#else
+ // Switch to a 16 kHz codec, RED should have been switched off.
+ EXPECT_FALSE(_acmA->REDStatus());
+#endif
+
+ OpenOutFile(_testCntr);
+ EXPECT_EQ(0, SetVAD(true, true, VADAggr));
+ EXPECT_EQ(0, _acmA->SetREDStatus(false));
+ EXPECT_FALSE(_acmA->REDStatus());
+ Run();
+ _outFileB.Close();
+
+#ifdef SUPPORT_RED_WB
+ EXPECT_EQ(0, _acmA->SetREDStatus(true));
+ EXPECT_TRUE(_acmA->REDStatus());
+#else
+ EXPECT_EQ(-1, _acmA->SetREDStatus(true));
+ EXPECT_FALSE(_acmA->REDStatus());
+#endif
+ OpenOutFile(_testCntr);
+ Run();
+ _outFileB.Close();
+
+ RegisterSendCodec('A', kNameISAC, 16000);
+
+#ifdef SUPPORT_RED_WB
+ // Switch codec, RED should remain.
+ EXPECT_TRUE(_acmA->REDStatus());
+#else
+ // Switch to a 16 kHz codec, RED should have been switched off.
+ EXPECT_FALSE(_acmA->REDStatus());
+#endif
+
+ OpenOutFile(_testCntr);
+ EXPECT_EQ(0, SetVAD(true, true, VADVeryAggr));
+ EXPECT_EQ(0, _acmA->SetREDStatus(false));
+ EXPECT_FALSE(_acmA->REDStatus());
+ Run();
+ _outFileB.Close();
+#ifdef SUPPORT_RED_WB
+ EXPECT_EQ(0, _acmA->SetREDStatus(true));
+ EXPECT_TRUE(_acmA->REDStatus());
+#else
+ EXPECT_EQ(-1, _acmA->SetREDStatus(true));
+ EXPECT_FALSE(_acmA->REDStatus());
+#endif
+ OpenOutFile(_testCntr);
+ Run();
+ _outFileB.Close();
+
+ RegisterSendCodec('A', kNameISAC, 32000);
+
+#if defined(SUPPORT_RED_SWB) && defined(SUPPORT_RED_WB)
+ // Switch codec, RED should remain.
+ EXPECT_TRUE(_acmA->REDStatus());
+#else
+ // Switch to a 32 kHz codec, RED should have been switched off.
+ EXPECT_FALSE(_acmA->REDStatus());
+#endif
+
+ OpenOutFile(_testCntr);
+ EXPECT_EQ(0, SetVAD(true, true, VADVeryAggr));
+ EXPECT_EQ(0, _acmA->SetREDStatus(false));
+ EXPECT_FALSE(_acmA->REDStatus());
+#ifdef SUPPORT_RED_SWB
+ EXPECT_EQ(0, _acmA->SetREDStatus(true));
+ EXPECT_TRUE(_acmA->REDStatus());
+#else
+ EXPECT_EQ(-1, _acmA->SetREDStatus(true));
+ EXPECT_FALSE(_acmA->REDStatus());
+#endif
+ OpenOutFile(_testCntr);
+ Run();
+ _outFileB.Close();
+
+ RegisterSendCodec('A', kNameISAC, 32000);
+ EXPECT_EQ(0, SetVAD(false, false, VADNormal));
+#if defined(SUPPORT_RED_SWB) && defined(SUPPORT_RED_WB)
+ OpenOutFile(_testCntr);
+ EXPECT_EQ(0, _acmA->SetREDStatus(true));
+ EXPECT_TRUE(_acmA->REDStatus());
+ Run();
+
+ RegisterSendCodec('A', kNameISAC, 16000);
+ EXPECT_TRUE(_acmA->REDStatus());
+ Run();
+
+ RegisterSendCodec('A', kNameISAC, 32000);
+ EXPECT_TRUE(_acmA->REDStatus());
+ Run();
+
+ RegisterSendCodec('A', kNameISAC, 16000);
+ EXPECT_TRUE(_acmA->REDStatus());
+ Run();
+ _outFileB.Close();
+#else
+ EXPECT_EQ(-1, _acmA->SetREDStatus(true));
+ EXPECT_FALSE(_acmA->REDStatus());
+#endif
+
+#ifndef WEBRTC_CODEC_OPUS
+ EXPECT_TRUE(false);
+ printf("Opus needs to be activated to run this test\n");
+ return;
+#endif
+
+ RegisterSendCodec('A', kNameOPUS, 48000);
+
+#if defined(SUPPORT_RED_FB) && defined(SUPPORT_RED_SWB) &&\
+ defined(SUPPORT_RED_WB)
+ // Switch to codec, RED should remain switched on.
+ EXPECT_TRUE(_acmA->REDStatus());
+#else
+ EXPECT_FALSE(_acmA->REDStatus());
+#endif
+
+ // _channelA2B imposes 25% packet loss rate.
+ EXPECT_EQ(0, _acmA->SetPacketLossRate(25));
+
+#ifdef SUPPORT_RED_FB
+ EXPECT_EQ(0, _acmA->SetREDStatus(true));
+ EXPECT_TRUE(_acmA->REDStatus());
+ // Codec FEC and RED are mutually exclusive.
+ EXPECT_EQ(-1, _acmA->SetCodecFEC(true));
+
+ EXPECT_EQ(0, _acmA->SetREDStatus(false));
+ EXPECT_EQ(0, _acmA->SetCodecFEC(true));
+
+ // Codec FEC and RED are mutually exclusive.
+ EXPECT_EQ(-1, _acmA->SetREDStatus(true));
+#else
+ EXPECT_EQ(-1, _acmA->SetREDStatus(true));
+ EXPECT_FALSE(_acmA->REDStatus());
+ EXPECT_EQ(0, _acmA->SetCodecFEC(true));
+#endif
+
+ EXPECT_TRUE(_acmA->CodecFEC());
+ OpenOutFile(_testCntr);
+ Run();
+
+ // Switch to L16 with RED.
+ RegisterSendCodec('A', kNameL16, 8000);
+ EXPECT_EQ(0, SetVAD(false, false, VADNormal));
+
+ // L16 does not support FEC, so FEC should be turned off automatically.
+ EXPECT_FALSE(_acmA->CodecFEC());
+
+ EXPECT_EQ(0, _acmA->SetREDStatus(true));
+ EXPECT_TRUE(_acmA->REDStatus());
+ Run();
+
+ // Switch to Opus again.
+ RegisterSendCodec('A', kNameOPUS, 48000);
+#ifdef SUPPORT_RED_FB
+ // Switch to codec, RED should remain switched on.
+ EXPECT_TRUE(_acmA->REDStatus());
+#else
+ EXPECT_FALSE(_acmA->REDStatus());
+#endif
+ EXPECT_EQ(0, _acmA->SetREDStatus(false));
+ EXPECT_EQ(0, _acmA->SetCodecFEC(false));
+ Run();
+
+ EXPECT_EQ(0, _acmA->SetCodecFEC(true));
+ _outFileB.Close();
+
+ // Codecs does not support internal FEC, cannot enable FEC.
+ RegisterSendCodec('A', kNameG722, 16000);
+ EXPECT_FALSE(_acmA->REDStatus());
+ EXPECT_EQ(-1, _acmA->SetCodecFEC(true));
+ EXPECT_FALSE(_acmA->CodecFEC());
+
+ RegisterSendCodec('A', kNameISAC, 16000);
+ EXPECT_FALSE(_acmA->REDStatus());
+ EXPECT_EQ(-1, _acmA->SetCodecFEC(true));
+ EXPECT_FALSE(_acmA->CodecFEC());
+
+ // Codecs does not support internal FEC, disable FEC does not trigger failure.
+ RegisterSendCodec('A', kNameG722, 16000);
+ EXPECT_FALSE(_acmA->REDStatus());
+ EXPECT_EQ(0, _acmA->SetCodecFEC(false));
+ EXPECT_FALSE(_acmA->CodecFEC());
+
+ RegisterSendCodec('A', kNameISAC, 16000);
+ EXPECT_FALSE(_acmA->REDStatus());
+ EXPECT_EQ(0, _acmA->SetCodecFEC(false));
+ EXPECT_FALSE(_acmA->CodecFEC());
+
+#endif // defined(WEBRTC_CODEC_G722)
+}
+
+int32_t TestRedFec::SetVAD(bool enableDTX, bool enableVAD, ACMVADMode vadMode) {
+ return _acmA->SetVAD(enableDTX, enableVAD, vadMode);
+}
+
+int16_t TestRedFec::RegisterSendCodec(char side, const char* codecName,
+ int32_t samplingFreqHz) {
+ std::cout << std::flush;
+ AudioCodingModule* myACM;
+ switch (side) {
+ case 'A': {
+ myACM = _acmA.get();
+ break;
+ }
+ case 'B': {
+ myACM = _acmB.get();
+ break;
+ }
+ default:
+ return -1;
+ }
+
+ if (myACM == NULL) {
+ assert(false);
+ return -1;
+ }
+ CodecInst myCodecParam;
+ EXPECT_GT(AudioCodingModule::Codec(codecName, &myCodecParam,
+ samplingFreqHz, 1), -1);
+ EXPECT_GT(myACM->RegisterSendCodec(myCodecParam), -1);
+
+ // Initialization was successful.
+ return 0;
+}
+
+void TestRedFec::Run() {
+ AudioFrame audioFrame;
+ int32_t outFreqHzB = _outFileB.SamplingFrequency();
+ // Set test length to 500 ms (50 blocks of 10 ms each).
+ _inFileA.SetNum10MsBlocksToRead(50);
+ // Fast-forward 1 second (100 blocks) since the file starts with silence.
+ _inFileA.FastForward(100);
+
+ while (!_inFileA.EndOfFile()) {
+ EXPECT_GT(_inFileA.Read10MsData(audioFrame), 0);
+ EXPECT_GE(_acmA->Add10MsData(audioFrame), 0);
+ EXPECT_EQ(0, _acmB->PlayoutData10Ms(outFreqHzB, &audioFrame));
+ _outFileB.Write10MsData(audioFrame.data_, audioFrame.samples_per_channel_);
+ }
+ _inFileA.Rewind();
+}
+
+void TestRedFec::OpenOutFile(int16_t test_number) {
+ std::string file_name;
+ std::stringstream file_stream;
+ file_stream << webrtc::test::OutputPath();
+ file_stream << "TestRedFec_outFile_";
+ file_stream << test_number << ".pcm";
+ file_name = file_stream.str();
+ _outFileB.Open(file_name, 16000, "wb");
+}
+
+} // namespace webrtc
diff --git a/webrtc/modules/audio_coding/test/TestRedFec.h b/webrtc/modules/audio_coding/test/TestRedFec.h
new file mode 100644
index 0000000000..6343d8e374
--- /dev/null
+++ b/webrtc/modules/audio_coding/test/TestRedFec.h
@@ -0,0 +1,51 @@
+/*
+ * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_TEST_TESTREDFEC_H_
+#define WEBRTC_MODULES_AUDIO_CODING_TEST_TESTREDFEC_H_
+
+#include <string>
+#include "webrtc/base/scoped_ptr.h"
+#include "webrtc/modules/audio_coding/test/ACMTest.h"
+#include "webrtc/modules/audio_coding/test/Channel.h"
+#include "webrtc/modules/audio_coding/test/PCMFile.h"
+
+namespace webrtc {
+
+class Config;
+
+class TestRedFec : public ACMTest {
+ public:
+ explicit TestRedFec();
+ ~TestRedFec();
+
+ void Perform();
+ private:
+ // The default value of '-1' indicates that the registration is based only on
+ // codec name and a sampling frequency matching is not required. This is
+ // useful for codecs which support several sampling frequency.
+ int16_t RegisterSendCodec(char side, const char* codecName,
+ int32_t sampFreqHz = -1);
+ void Run();
+ void OpenOutFile(int16_t testNumber);
+ int32_t SetVAD(bool enableDTX, bool enableVAD, ACMVADMode vadMode);
+ rtc::scoped_ptr<AudioCodingModule> _acmA;
+ rtc::scoped_ptr<AudioCodingModule> _acmB;
+
+ Channel* _channelA2B;
+
+ PCMFile _inFileA;
+ PCMFile _outFileB;
+ int16_t _testCntr;
+};
+
+} // namespace webrtc
+
+#endif // WEBRTC_MODULES_AUDIO_CODING_TEST_TESTREDFEC_H_
diff --git a/webrtc/modules/audio_coding/test/TestStereo.cc b/webrtc/modules/audio_coding/test/TestStereo.cc
new file mode 100644
index 0000000000..9bf560d323
--- /dev/null
+++ b/webrtc/modules/audio_coding/test/TestStereo.cc
@@ -0,0 +1,844 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/modules/audio_coding/test/TestStereo.h"
+
+#include <assert.h>
+
+#include <string>
+
+#include "testing/gtest/include/gtest/gtest.h"
+#include "webrtc/common_types.h"
+#include "webrtc/engine_configurations.h"
+#include "webrtc/modules/audio_coding/include/audio_coding_module_typedefs.h"
+#include "webrtc/modules/audio_coding/test/utility.h"
+#include "webrtc/system_wrappers/include/trace.h"
+#include "webrtc/test/testsupport/fileutils.h"
+
+namespace webrtc {
+
+// Class for simulating packet handling
+TestPackStereo::TestPackStereo()
+ : receiver_acm_(NULL),
+ seq_no_(0),
+ timestamp_diff_(0),
+ last_in_timestamp_(0),
+ total_bytes_(0),
+ payload_size_(0),
+ codec_mode_(kNotSet),
+ lost_packet_(false) {
+}
+
+TestPackStereo::~TestPackStereo() {
+}
+
+void TestPackStereo::RegisterReceiverACM(AudioCodingModule* acm) {
+ receiver_acm_ = acm;
+ return;
+}
+
+int32_t TestPackStereo::SendData(const FrameType frame_type,
+ const uint8_t payload_type,
+ const uint32_t timestamp,
+ const uint8_t* payload_data,
+ const size_t payload_size,
+ const RTPFragmentationHeader* fragmentation) {
+ WebRtcRTPHeader rtp_info;
+ int32_t status = 0;
+
+ rtp_info.header.markerBit = false;
+ rtp_info.header.ssrc = 0;
+ rtp_info.header.sequenceNumber = seq_no_++;
+ rtp_info.header.payloadType = payload_type;
+ rtp_info.header.timestamp = timestamp;
+ if (frame_type == kEmptyFrame) {
+ // Skip this frame
+ return 0;
+ }
+
+ if (lost_packet_ == false) {
+ if (frame_type != kAudioFrameCN) {
+ rtp_info.type.Audio.isCNG = false;
+ rtp_info.type.Audio.channel = static_cast<int>(codec_mode_);
+ } else {
+ rtp_info.type.Audio.isCNG = true;
+ rtp_info.type.Audio.channel = static_cast<int>(kMono);
+ }
+ status = receiver_acm_->IncomingPacket(payload_data, payload_size,
+ rtp_info);
+
+ if (frame_type != kAudioFrameCN) {
+ payload_size_ = static_cast<int>(payload_size);
+ } else {
+ payload_size_ = -1;
+ }
+
+ timestamp_diff_ = timestamp - last_in_timestamp_;
+ last_in_timestamp_ = timestamp;
+ total_bytes_ += payload_size;
+ }
+ return status;
+}
+
+uint16_t TestPackStereo::payload_size() {
+ return static_cast<uint16_t>(payload_size_);
+}
+
+uint32_t TestPackStereo::timestamp_diff() {
+ return timestamp_diff_;
+}
+
+void TestPackStereo::reset_payload_size() {
+ payload_size_ = 0;
+}
+
+void TestPackStereo::set_codec_mode(enum StereoMonoMode mode) {
+ codec_mode_ = mode;
+}
+
+void TestPackStereo::set_lost_packet(bool lost) {
+ lost_packet_ = lost;
+}
+
+TestStereo::TestStereo(int test_mode)
+ : acm_a_(AudioCodingModule::Create(0)),
+ acm_b_(AudioCodingModule::Create(1)),
+ channel_a2b_(NULL),
+ test_cntr_(0),
+ pack_size_samp_(0),
+ pack_size_bytes_(0),
+ counter_(0)
+#ifdef WEBRTC_CODEC_G722
+ , g722_pltype_(0)
+#endif
+ , l16_8khz_pltype_(-1)
+ , l16_16khz_pltype_(-1)
+ , l16_32khz_pltype_(-1)
+#ifdef PCMA_AND_PCMU
+ , pcma_pltype_(-1)
+ , pcmu_pltype_(-1)
+#endif
+#ifdef WEBRTC_CODEC_OPUS
+ , opus_pltype_(-1)
+#endif
+ {
+ // test_mode = 0 for silent test (auto test)
+ test_mode_ = test_mode;
+}
+
+TestStereo::~TestStereo() {
+ if (channel_a2b_ != NULL) {
+ delete channel_a2b_;
+ channel_a2b_ = NULL;
+ }
+}
+
+void TestStereo::Perform() {
+ uint16_t frequency_hz;
+ int audio_channels;
+ int codec_channels;
+ bool dtx;
+ bool vad;
+ ACMVADMode vad_mode;
+
+ // Open both mono and stereo test files in 32 kHz.
+ const std::string file_name_stereo = webrtc::test::ResourcePath(
+ "audio_coding/teststereo32kHz", "pcm");
+ const std::string file_name_mono = webrtc::test::ResourcePath(
+ "audio_coding/testfile32kHz", "pcm");
+ frequency_hz = 32000;
+ in_file_stereo_ = new PCMFile();
+ in_file_mono_ = new PCMFile();
+ in_file_stereo_->Open(file_name_stereo, frequency_hz, "rb");
+ in_file_stereo_->ReadStereo(true);
+ in_file_mono_->Open(file_name_mono, frequency_hz, "rb");
+ in_file_mono_->ReadStereo(false);
+
+ // Create and initialize two ACMs, one for each side of a one-to-one call.
+ ASSERT_TRUE((acm_a_.get() != NULL) && (acm_b_.get() != NULL));
+ EXPECT_EQ(0, acm_a_->InitializeReceiver());
+ EXPECT_EQ(0, acm_b_->InitializeReceiver());
+
+ // Register all available codes as receiving codecs.
+ uint8_t num_encoders = acm_a_->NumberOfCodecs();
+ CodecInst my_codec_param;
+ for (uint8_t n = 0; n < num_encoders; n++) {
+ EXPECT_EQ(0, acm_b_->Codec(n, &my_codec_param));
+ EXPECT_EQ(0, acm_b_->RegisterReceiveCodec(my_codec_param));
+ }
+
+ // Test that unregister all receive codecs works.
+ for (uint8_t n = 0; n < num_encoders; n++) {
+ EXPECT_EQ(0, acm_b_->Codec(n, &my_codec_param));
+ EXPECT_EQ(0, acm_b_->UnregisterReceiveCodec(my_codec_param.pltype));
+ }
+
+ // Register all available codes as receiving codecs once more.
+ for (uint8_t n = 0; n < num_encoders; n++) {
+ EXPECT_EQ(0, acm_b_->Codec(n, &my_codec_param));
+ EXPECT_EQ(0, acm_b_->RegisterReceiveCodec(my_codec_param));
+ }
+
+ // Create and connect the channel.
+ channel_a2b_ = new TestPackStereo;
+ EXPECT_EQ(0, acm_a_->RegisterTransportCallback(channel_a2b_));
+ channel_a2b_->RegisterReceiverACM(acm_b_.get());
+
+ // Start with setting VAD/DTX, before we know we will send stereo.
+ // Continue with setting a stereo codec as send codec and verify that
+ // VAD/DTX gets turned off.
+ EXPECT_EQ(0, acm_a_->SetVAD(true, true, VADNormal));
+ EXPECT_EQ(0, acm_a_->VAD(&dtx, &vad, &vad_mode));
+ EXPECT_TRUE(dtx);
+ EXPECT_TRUE(vad);
+ char codec_pcma_temp[] = "PCMA";
+ RegisterSendCodec('A', codec_pcma_temp, 8000, 64000, 80, 2, pcma_pltype_);
+ EXPECT_EQ(0, acm_a_->VAD(&dtx, &vad, &vad_mode));
+ EXPECT_FALSE(dtx);
+ EXPECT_FALSE(vad);
+ if (test_mode_ != 0) {
+ printf("\n");
+ }
+
+ //
+ // Test Stereo-To-Stereo for all codecs.
+ //
+ audio_channels = 2;
+ codec_channels = 2;
+
+ // All codecs are tested for all allowed sampling frequencies, rates and
+ // packet sizes.
+#ifdef WEBRTC_CODEC_G722
+ if (test_mode_ != 0) {
+ printf("===========================================================\n");
+ printf("Test number: %d\n", test_cntr_ + 1);
+ printf("Test type: Stereo-to-stereo\n");
+ }
+ channel_a2b_->set_codec_mode(kStereo);
+ test_cntr_++;
+ OpenOutFile(test_cntr_);
+ char codec_g722[] = "G722";
+ RegisterSendCodec('A', codec_g722, 16000, 64000, 160, codec_channels,
+ g722_pltype_);
+ Run(channel_a2b_, audio_channels, codec_channels);
+ RegisterSendCodec('A', codec_g722, 16000, 64000, 320, codec_channels,
+ g722_pltype_);
+ Run(channel_a2b_, audio_channels, codec_channels);
+ RegisterSendCodec('A', codec_g722, 16000, 64000, 480, codec_channels,
+ g722_pltype_);
+ Run(channel_a2b_, audio_channels, codec_channels);
+ RegisterSendCodec('A', codec_g722, 16000, 64000, 640, codec_channels,
+ g722_pltype_);
+ Run(channel_a2b_, audio_channels, codec_channels);
+ RegisterSendCodec('A', codec_g722, 16000, 64000, 800, codec_channels,
+ g722_pltype_);
+ Run(channel_a2b_, audio_channels, codec_channels);
+ RegisterSendCodec('A', codec_g722, 16000, 64000, 960, codec_channels,
+ g722_pltype_);
+ Run(channel_a2b_, audio_channels, codec_channels);
+ out_file_.Close();
+#endif
+ if (test_mode_ != 0) {
+ printf("===========================================================\n");
+ printf("Test number: %d\n", test_cntr_ + 1);
+ printf("Test type: Stereo-to-stereo\n");
+ }
+ channel_a2b_->set_codec_mode(kStereo);
+ test_cntr_++;
+ OpenOutFile(test_cntr_);
+ char codec_l16[] = "L16";
+ RegisterSendCodec('A', codec_l16, 8000, 128000, 80, codec_channels,
+ l16_8khz_pltype_);
+ Run(channel_a2b_, audio_channels, codec_channels);
+ RegisterSendCodec('A', codec_l16, 8000, 128000, 160, codec_channels,
+ l16_8khz_pltype_);
+ Run(channel_a2b_, audio_channels, codec_channels);
+ RegisterSendCodec('A', codec_l16, 8000, 128000, 240, codec_channels,
+ l16_8khz_pltype_);
+ Run(channel_a2b_, audio_channels, codec_channels);
+ RegisterSendCodec('A', codec_l16, 8000, 128000, 320, codec_channels,
+ l16_8khz_pltype_);
+ Run(channel_a2b_, audio_channels, codec_channels);
+ out_file_.Close();
+
+ if (test_mode_ != 0) {
+ printf("===========================================================\n");
+ printf("Test number: %d\n", test_cntr_ + 1);
+ printf("Test type: Stereo-to-stereo\n");
+ }
+ test_cntr_++;
+ OpenOutFile(test_cntr_);
+ RegisterSendCodec('A', codec_l16, 16000, 256000, 160, codec_channels,
+ l16_16khz_pltype_);
+ Run(channel_a2b_, audio_channels, codec_channels);
+ RegisterSendCodec('A', codec_l16, 16000, 256000, 320, codec_channels,
+ l16_16khz_pltype_);
+ Run(channel_a2b_, audio_channels, codec_channels);
+ RegisterSendCodec('A', codec_l16, 16000, 256000, 480, codec_channels,
+ l16_16khz_pltype_);
+ Run(channel_a2b_, audio_channels, codec_channels);
+ RegisterSendCodec('A', codec_l16, 16000, 256000, 640, codec_channels,
+ l16_16khz_pltype_);
+ Run(channel_a2b_, audio_channels, codec_channels);
+ out_file_.Close();
+
+ if (test_mode_ != 0) {
+ printf("===========================================================\n");
+ printf("Test number: %d\n", test_cntr_ + 1);
+ printf("Test type: Stereo-to-stereo\n");
+ }
+ test_cntr_++;
+ OpenOutFile(test_cntr_);
+ RegisterSendCodec('A', codec_l16, 32000, 512000, 320, codec_channels,
+ l16_32khz_pltype_);
+ Run(channel_a2b_, audio_channels, codec_channels);
+ RegisterSendCodec('A', codec_l16, 32000, 512000, 640, codec_channels,
+ l16_32khz_pltype_);
+ Run(channel_a2b_, audio_channels, codec_channels);
+ out_file_.Close();
+#ifdef PCMA_AND_PCMU
+ if (test_mode_ != 0) {
+ printf("===========================================================\n");
+ printf("Test number: %d\n", test_cntr_ + 1);
+ printf("Test type: Stereo-to-stereo\n");
+ }
+ channel_a2b_->set_codec_mode(kStereo);
+ audio_channels = 2;
+ codec_channels = 2;
+ test_cntr_++;
+ OpenOutFile(test_cntr_);
+ char codec_pcma[] = "PCMA";
+ RegisterSendCodec('A', codec_pcma, 8000, 64000, 80, codec_channels,
+ pcma_pltype_);
+ Run(channel_a2b_, audio_channels, codec_channels);
+ RegisterSendCodec('A', codec_pcma, 8000, 64000, 160, codec_channels,
+ pcma_pltype_);
+ Run(channel_a2b_, audio_channels, codec_channels);
+ RegisterSendCodec('A', codec_pcma, 8000, 64000, 240, codec_channels,
+ pcma_pltype_);
+ Run(channel_a2b_, audio_channels, codec_channels);
+ RegisterSendCodec('A', codec_pcma, 8000, 64000, 320, codec_channels,
+ pcma_pltype_);
+ Run(channel_a2b_, audio_channels, codec_channels);
+ RegisterSendCodec('A', codec_pcma, 8000, 64000, 400, codec_channels,
+ pcma_pltype_);
+ Run(channel_a2b_, audio_channels, codec_channels);
+ RegisterSendCodec('A', codec_pcma, 8000, 64000, 480, codec_channels,
+ pcma_pltype_);
+ Run(channel_a2b_, audio_channels, codec_channels);
+
+ // Test that VAD/DTX cannot be turned on while sending stereo.
+ EXPECT_EQ(-1, acm_a_->SetVAD(true, true, VADNormal));
+ EXPECT_EQ(0, acm_a_->VAD(&dtx, &vad, &vad_mode));
+ EXPECT_FALSE(dtx);
+ EXPECT_FALSE(vad);
+ EXPECT_EQ(0, acm_a_->SetVAD(false, false, VADNormal));
+ EXPECT_EQ(0, acm_a_->VAD(&dtx, &vad, &vad_mode));
+ EXPECT_FALSE(dtx);
+ EXPECT_FALSE(vad);
+
+ out_file_.Close();
+ if (test_mode_ != 0) {
+ printf("===========================================================\n");
+ printf("Test number: %d\n", test_cntr_ + 1);
+ printf("Test type: Stereo-to-stereo\n");
+ }
+ test_cntr_++;
+ OpenOutFile(test_cntr_);
+ char codec_pcmu[] = "PCMU";
+ RegisterSendCodec('A', codec_pcmu, 8000, 64000, 80, codec_channels,
+ pcmu_pltype_);
+ Run(channel_a2b_, audio_channels, codec_channels);
+ RegisterSendCodec('A', codec_pcmu, 8000, 64000, 160, codec_channels,
+ pcmu_pltype_);
+ Run(channel_a2b_, audio_channels, codec_channels);
+ RegisterSendCodec('A', codec_pcmu, 8000, 64000, 240, codec_channels,
+ pcmu_pltype_);
+ Run(channel_a2b_, audio_channels, codec_channels);
+ RegisterSendCodec('A', codec_pcmu, 8000, 64000, 320, codec_channels,
+ pcmu_pltype_);
+ Run(channel_a2b_, audio_channels, codec_channels);
+ RegisterSendCodec('A', codec_pcmu, 8000, 64000, 400, codec_channels,
+ pcmu_pltype_);
+ Run(channel_a2b_, audio_channels, codec_channels);
+ RegisterSendCodec('A', codec_pcmu, 8000, 64000, 480, codec_channels,
+ pcmu_pltype_);
+ Run(channel_a2b_, audio_channels, codec_channels);
+ out_file_.Close();
+#endif
+#ifdef WEBRTC_CODEC_OPUS
+ if (test_mode_ != 0) {
+ printf("===========================================================\n");
+ printf("Test number: %d\n", test_cntr_ + 1);
+ printf("Test type: Stereo-to-stereo\n");
+ }
+ channel_a2b_->set_codec_mode(kStereo);
+ audio_channels = 2;
+ codec_channels = 2;
+ test_cntr_++;
+ OpenOutFile(test_cntr_);
+
+ char codec_opus[] = "opus";
+ // Run Opus with 10 ms frame size.
+ RegisterSendCodec('A', codec_opus, 48000, 64000, 480, codec_channels,
+ opus_pltype_);
+ Run(channel_a2b_, audio_channels, codec_channels);
+ // Run Opus with 20 ms frame size.
+ RegisterSendCodec('A', codec_opus, 48000, 64000, 480*2, codec_channels,
+ opus_pltype_);
+ Run(channel_a2b_, audio_channels, codec_channels);
+ // Run Opus with 40 ms frame size.
+ RegisterSendCodec('A', codec_opus, 48000, 64000, 480*4, codec_channels,
+ opus_pltype_);
+ Run(channel_a2b_, audio_channels, codec_channels);
+ // Run Opus with 60 ms frame size.
+ RegisterSendCodec('A', codec_opus, 48000, 64000, 480*6, codec_channels,
+ opus_pltype_);
+ Run(channel_a2b_, audio_channels, codec_channels);
+ // Run Opus with 20 ms frame size and different bitrates.
+ RegisterSendCodec('A', codec_opus, 48000, 40000, 960, codec_channels,
+ opus_pltype_);
+ Run(channel_a2b_, audio_channels, codec_channels);
+ RegisterSendCodec('A', codec_opus, 48000, 510000, 960, codec_channels,
+ opus_pltype_);
+ Run(channel_a2b_, audio_channels, codec_channels);
+ out_file_.Close();
+#endif
+ //
+ // Test Mono-To-Stereo for all codecs.
+ //
+ audio_channels = 1;
+ codec_channels = 2;
+
+#ifdef WEBRTC_CODEC_G722
+ if (test_mode_ != 0) {
+ printf("===============================================================\n");
+ printf("Test number: %d\n", test_cntr_ + 1);
+ printf("Test type: Mono-to-stereo\n");
+ }
+ test_cntr_++;
+ channel_a2b_->set_codec_mode(kStereo);
+ OpenOutFile(test_cntr_);
+ RegisterSendCodec('A', codec_g722, 16000, 64000, 160, codec_channels,
+ g722_pltype_);
+ Run(channel_a2b_, audio_channels, codec_channels);
+ out_file_.Close();
+#endif
+ if (test_mode_ != 0) {
+ printf("===============================================================\n");
+ printf("Test number: %d\n", test_cntr_ + 1);
+ printf("Test type: Mono-to-stereo\n");
+ }
+ test_cntr_++;
+ channel_a2b_->set_codec_mode(kStereo);
+ OpenOutFile(test_cntr_);
+ RegisterSendCodec('A', codec_l16, 8000, 128000, 80, codec_channels,
+ l16_8khz_pltype_);
+ Run(channel_a2b_, audio_channels, codec_channels);
+ out_file_.Close();
+ if (test_mode_ != 0) {
+ printf("===============================================================\n");
+ printf("Test number: %d\n", test_cntr_ + 1);
+ printf("Test type: Mono-to-stereo\n");
+ }
+ test_cntr_++;
+ OpenOutFile(test_cntr_);
+ RegisterSendCodec('A', codec_l16, 16000, 256000, 160, codec_channels,
+ l16_16khz_pltype_);
+ Run(channel_a2b_, audio_channels, codec_channels);
+ out_file_.Close();
+ if (test_mode_ != 0) {
+ printf("===============================================================\n");
+ printf("Test number: %d\n", test_cntr_ + 1);
+ printf("Test type: Mono-to-stereo\n");
+ }
+ test_cntr_++;
+ OpenOutFile(test_cntr_);
+ RegisterSendCodec('A', codec_l16, 32000, 512000, 320, codec_channels,
+ l16_32khz_pltype_);
+ Run(channel_a2b_, audio_channels, codec_channels);
+ out_file_.Close();
+#ifdef PCMA_AND_PCMU
+ if (test_mode_ != 0) {
+ printf("===============================================================\n");
+ printf("Test number: %d\n", test_cntr_ + 1);
+ printf("Test type: Mono-to-stereo\n");
+ }
+ test_cntr_++;
+ channel_a2b_->set_codec_mode(kStereo);
+ OpenOutFile(test_cntr_);
+ RegisterSendCodec('A', codec_pcmu, 8000, 64000, 80, codec_channels,
+ pcmu_pltype_);
+ Run(channel_a2b_, audio_channels, codec_channels);
+ RegisterSendCodec('A', codec_pcma, 8000, 64000, 80, codec_channels,
+ pcma_pltype_);
+ Run(channel_a2b_, audio_channels, codec_channels);
+ out_file_.Close();
+#endif
+#ifdef WEBRTC_CODEC_OPUS
+ if (test_mode_ != 0) {
+ printf("===============================================================\n");
+ printf("Test number: %d\n", test_cntr_ + 1);
+ printf("Test type: Mono-to-stereo\n");
+ }
+
+ // Keep encode and decode in stereo.
+ test_cntr_++;
+ channel_a2b_->set_codec_mode(kStereo);
+ OpenOutFile(test_cntr_);
+ RegisterSendCodec('A', codec_opus, 48000, 64000, 960, codec_channels,
+ opus_pltype_);
+ Run(channel_a2b_, audio_channels, codec_channels);
+
+ // Encode in mono, decode in stereo mode.
+ RegisterSendCodec('A', codec_opus, 48000, 64000, 960, 1, opus_pltype_);
+ Run(channel_a2b_, audio_channels, codec_channels);
+ out_file_.Close();
+#endif
+
+ //
+ // Test Stereo-To-Mono for all codecs.
+ //
+ audio_channels = 2;
+ codec_channels = 1;
+ channel_a2b_->set_codec_mode(kMono);
+
+#ifdef WEBRTC_CODEC_G722
+ // Run stereo audio and mono codec.
+ if (test_mode_ != 0) {
+ printf("===============================================================\n");
+ printf("Test number: %d\n", test_cntr_ + 1);
+ printf("Test type: Stereo-to-mono\n");
+ }
+ test_cntr_++;
+ OpenOutFile(test_cntr_);
+ RegisterSendCodec('A', codec_g722, 16000, 64000, 160, codec_channels,
+ g722_pltype_);
+
+ // Make sure it is possible to set VAD/CNG, now that we are sending mono
+ // again.
+ EXPECT_EQ(0, acm_a_->SetVAD(true, true, VADNormal));
+ EXPECT_EQ(0, acm_a_->VAD(&dtx, &vad, &vad_mode));
+ EXPECT_TRUE(dtx);
+ EXPECT_TRUE(vad);
+ EXPECT_EQ(0, acm_a_->SetVAD(false, false, VADNormal));
+ Run(channel_a2b_, audio_channels, codec_channels);
+ out_file_.Close();
+#endif
+ if (test_mode_ != 0) {
+ printf("===============================================================\n");
+ printf("Test number: %d\n", test_cntr_ + 1);
+ printf("Test type: Stereo-to-mono\n");
+ }
+ test_cntr_++;
+ OpenOutFile(test_cntr_);
+ RegisterSendCodec('A', codec_l16, 8000, 128000, 80, codec_channels,
+ l16_8khz_pltype_);
+ Run(channel_a2b_, audio_channels, codec_channels);
+ out_file_.Close();
+ if (test_mode_ != 0) {
+ printf("===============================================================\n");
+ printf("Test number: %d\n", test_cntr_ + 1);
+ printf("Test type: Stereo-to-mono\n");
+ }
+ test_cntr_++;
+ OpenOutFile(test_cntr_);
+ RegisterSendCodec('A', codec_l16, 16000, 256000, 160, codec_channels,
+ l16_16khz_pltype_);
+ Run(channel_a2b_, audio_channels, codec_channels);
+ out_file_.Close();
+ if (test_mode_ != 0) {
+ printf("==============================================================\n");
+ printf("Test number: %d\n", test_cntr_ + 1);
+ printf("Test type: Stereo-to-mono\n");
+ }
+ test_cntr_++;
+ OpenOutFile(test_cntr_);
+ RegisterSendCodec('A', codec_l16, 32000, 512000, 320, codec_channels,
+ l16_32khz_pltype_);
+ Run(channel_a2b_, audio_channels, codec_channels);
+ out_file_.Close();
+#ifdef PCMA_AND_PCMU
+ if (test_mode_ != 0) {
+ printf("===============================================================\n");
+ printf("Test number: %d\n", test_cntr_ + 1);
+ printf("Test type: Stereo-to-mono\n");
+ }
+ test_cntr_++;
+ OpenOutFile(test_cntr_);
+ RegisterSendCodec('A', codec_pcmu, 8000, 64000, 80, codec_channels,
+ pcmu_pltype_);
+ Run(channel_a2b_, audio_channels, codec_channels);
+ RegisterSendCodec('A', codec_pcma, 8000, 64000, 80, codec_channels,
+ pcma_pltype_);
+ Run(channel_a2b_, audio_channels, codec_channels);
+ out_file_.Close();
+#endif
+#ifdef WEBRTC_CODEC_OPUS
+ if (test_mode_ != 0) {
+ printf("===============================================================\n");
+ printf("Test number: %d\n", test_cntr_ + 1);
+ printf("Test type: Stereo-to-mono\n");
+ }
+ test_cntr_++;
+ OpenOutFile(test_cntr_);
+ // Encode and decode in mono.
+ RegisterSendCodec('A', codec_opus, 48000, 32000, 960, codec_channels,
+ opus_pltype_);
+ CodecInst opus_codec_param;
+ for (uint8_t n = 0; n < num_encoders; n++) {
+ EXPECT_EQ(0, acm_b_->Codec(n, &opus_codec_param));
+ if (!strcmp(opus_codec_param.plname, "opus")) {
+ opus_codec_param.channels = 1;
+ EXPECT_EQ(0, acm_b_->RegisterReceiveCodec(opus_codec_param));
+ break;
+ }
+ }
+ Run(channel_a2b_, audio_channels, codec_channels);
+
+ // Encode in stereo, decode in mono.
+ RegisterSendCodec('A', codec_opus, 48000, 32000, 960, 2, opus_pltype_);
+ Run(channel_a2b_, audio_channels, codec_channels);
+
+ out_file_.Close();
+
+ // Test switching between decoding mono and stereo for Opus.
+
+ // Decode in mono.
+ test_cntr_++;
+ OpenOutFile(test_cntr_);
+ if (test_mode_ != 0) {
+ // Print out codec and settings
+ printf("Test number: %d\nCodec: Opus Freq: 48000 Rate :32000 PackSize: 960"
+ " Decode: mono\n", test_cntr_);
+ }
+ Run(channel_a2b_, audio_channels, codec_channels);
+ out_file_.Close();
+ // Decode in stereo.
+ test_cntr_++;
+ OpenOutFile(test_cntr_);
+ if (test_mode_ != 0) {
+ // Print out codec and settings
+ printf("Test number: %d\nCodec: Opus Freq: 48000 Rate :32000 PackSize: 960"
+ " Decode: stereo\n", test_cntr_);
+ }
+ opus_codec_param.channels = 2;
+ EXPECT_EQ(0, acm_b_->RegisterReceiveCodec(opus_codec_param));
+ Run(channel_a2b_, audio_channels, 2);
+ out_file_.Close();
+ // Decode in mono.
+ test_cntr_++;
+ OpenOutFile(test_cntr_);
+ if (test_mode_ != 0) {
+ // Print out codec and settings
+ printf("Test number: %d\nCodec: Opus Freq: 48000 Rate :32000 PackSize: 960"
+ " Decode: mono\n", test_cntr_);
+ }
+ opus_codec_param.channels = 1;
+ EXPECT_EQ(0, acm_b_->RegisterReceiveCodec(opus_codec_param));
+ Run(channel_a2b_, audio_channels, codec_channels);
+ out_file_.Close();
+
+#endif
+
+ // Print out which codecs were tested, and which were not, in the run.
+ if (test_mode_ != 0) {
+ printf("\nThe following codecs was INCLUDED in the test:\n");
+#ifdef WEBRTC_CODEC_G722
+ printf(" G.722\n");
+#endif
+ printf(" PCM16\n");
+ printf(" G.711\n");
+#ifdef WEBRTC_CODEC_OPUS
+ printf(" Opus\n");
+#endif
+ printf("\nTo complete the test, listen to the %d number of output "
+ "files.\n",
+ test_cntr_);
+ }
+
+ // Delete the file pointers.
+ delete in_file_stereo_;
+ delete in_file_mono_;
+}
+
+// Register Codec to use in the test
+//
+// Input: side - which ACM to use, 'A' or 'B'
+// codec_name - name to use when register the codec
+// sampling_freq_hz - sampling frequency in Herz
+// rate - bitrate in bytes
+// pack_size - packet size in samples
+// channels - number of channels; 1 for mono, 2 for stereo
+// payload_type - payload type for the codec
+void TestStereo::RegisterSendCodec(char side, char* codec_name,
+ int32_t sampling_freq_hz, int rate,
+ int pack_size, int channels,
+ int payload_type) {
+ if (test_mode_ != 0) {
+ // Print out codec and settings
+ printf("Codec: %s Freq: %d Rate: %d PackSize: %d\n", codec_name,
+ sampling_freq_hz, rate, pack_size);
+ }
+
+ // Store packet size in samples, used to validate the received packet
+ pack_size_samp_ = pack_size;
+
+ // Store the expected packet size in bytes, used to validate the received
+ // packet. Add 0.875 to always round up to a whole byte.
+ pack_size_bytes_ = (uint16_t)(static_cast<float>(pack_size * rate) /
+ static_cast<float>(sampling_freq_hz * 8) +
+ 0.875);
+
+ // Set pointer to the ACM where to register the codec
+ AudioCodingModule* my_acm = NULL;
+ switch (side) {
+ case 'A': {
+ my_acm = acm_a_.get();
+ break;
+ }
+ case 'B': {
+ my_acm = acm_b_.get();
+ break;
+ }
+ default:
+ break;
+ }
+ ASSERT_TRUE(my_acm != NULL);
+
+ CodecInst my_codec_param;
+ // Get all codec parameters before registering
+ EXPECT_GT(AudioCodingModule::Codec(codec_name, &my_codec_param,
+ sampling_freq_hz, channels), -1);
+ my_codec_param.rate = rate;
+ my_codec_param.pacsize = pack_size;
+ EXPECT_EQ(0, my_acm->RegisterSendCodec(my_codec_param));
+
+ send_codec_name_ = codec_name;
+}
+
+void TestStereo::Run(TestPackStereo* channel, int in_channels, int out_channels,
+ int percent_loss) {
+ AudioFrame audio_frame;
+
+ int32_t out_freq_hz_b = out_file_.SamplingFrequency();
+ uint16_t rec_size;
+ uint32_t time_stamp_diff;
+ channel->reset_payload_size();
+ int error_count = 0;
+ int variable_bytes = 0;
+ int variable_packets = 0;
+ // Set test length to 500 ms (50 blocks of 10 ms each).
+ in_file_mono_->SetNum10MsBlocksToRead(50);
+ in_file_stereo_->SetNum10MsBlocksToRead(50);
+ // Fast-forward 1 second (100 blocks) since the files start with silence.
+ in_file_stereo_->FastForward(100);
+ in_file_mono_->FastForward(100);
+
+ while (1) {
+ // Simulate packet loss by setting |packet_loss_| to "true" in
+ // |percent_loss| percent of the loops.
+ if (percent_loss > 0) {
+ if (counter_ == floor((100 / percent_loss) + 0.5)) {
+ counter_ = 0;
+ channel->set_lost_packet(true);
+ } else {
+ channel->set_lost_packet(false);
+ }
+ counter_++;
+ }
+
+ // Add 10 msec to ACM
+ if (in_channels == 1) {
+ if (in_file_mono_->EndOfFile()) {
+ break;
+ }
+ in_file_mono_->Read10MsData(audio_frame);
+ } else {
+ if (in_file_stereo_->EndOfFile()) {
+ break;
+ }
+ in_file_stereo_->Read10MsData(audio_frame);
+ }
+ EXPECT_GE(acm_a_->Add10MsData(audio_frame), 0);
+
+ // Verify that the received packet size matches the settings.
+ rec_size = channel->payload_size();
+ if ((0 < rec_size) & (rec_size < 65535)) {
+ if (strcmp(send_codec_name_, "opus") == 0) {
+ // Opus is a variable rate codec, hence calculate the average packet
+ // size, and later make sure the average is in the right range.
+ variable_bytes += rec_size;
+ variable_packets++;
+ } else {
+ // For fixed rate codecs, check that packet size is correct.
+ if ((rec_size != pack_size_bytes_ * out_channels)
+ && (pack_size_bytes_ < 65535)) {
+ error_count++;
+ }
+ }
+ // Verify that the timestamp is updated with expected length
+ time_stamp_diff = channel->timestamp_diff();
+ if ((counter_ > 10) && (time_stamp_diff != pack_size_samp_)) {
+ error_count++;
+ }
+ }
+
+ // Run received side of ACM
+ EXPECT_EQ(0, acm_b_->PlayoutData10Ms(out_freq_hz_b, &audio_frame));
+
+ // Write output speech to file
+ out_file_.Write10MsData(
+ audio_frame.data_,
+ audio_frame.samples_per_channel_ * audio_frame.num_channels_);
+ }
+
+ EXPECT_EQ(0, error_count);
+
+ // Check that packet size is in the right range for variable rate codecs,
+ // such as Opus.
+ if (variable_packets > 0) {
+ variable_bytes /= variable_packets;
+ EXPECT_NEAR(variable_bytes, pack_size_bytes_, 18);
+ }
+
+ if (in_file_mono_->EndOfFile()) {
+ in_file_mono_->Rewind();
+ }
+ if (in_file_stereo_->EndOfFile()) {
+ in_file_stereo_->Rewind();
+ }
+ // Reset in case we ended with a lost packet
+ channel->set_lost_packet(false);
+}
+
+void TestStereo::OpenOutFile(int16_t test_number) {
+ std::string file_name;
+ std::stringstream file_stream;
+ file_stream << webrtc::test::OutputPath() << "teststereo_out_" << test_number
+ << ".pcm";
+ file_name = file_stream.str();
+ out_file_.Open(file_name, 32000, "wb");
+}
+
+void TestStereo::DisplaySendReceiveCodec() {
+ auto send_codec = acm_a_->SendCodec();
+ if (test_mode_ != 0) {
+ ASSERT_TRUE(send_codec);
+ printf("%s -> ", send_codec->plname);
+ }
+ CodecInst receive_codec;
+ acm_b_->ReceiveCodec(&receive_codec);
+ if (test_mode_ != 0) {
+ printf("%s\n", receive_codec.plname);
+ }
+}
+
+} // namespace webrtc
diff --git a/webrtc/modules/audio_coding/test/TestStereo.h b/webrtc/modules/audio_coding/test/TestStereo.h
new file mode 100644
index 0000000000..4526be6960
--- /dev/null
+++ b/webrtc/modules/audio_coding/test/TestStereo.h
@@ -0,0 +1,117 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_TEST_TESTSTEREO_H_
+#define WEBRTC_MODULES_AUDIO_CODING_TEST_TESTSTEREO_H_
+
+#include <math.h>
+
+#include "webrtc/base/scoped_ptr.h"
+#include "webrtc/modules/audio_coding/test/ACMTest.h"
+#include "webrtc/modules/audio_coding/test/Channel.h"
+#include "webrtc/modules/audio_coding/test/PCMFile.h"
+
+#define PCMA_AND_PCMU
+
+namespace webrtc {
+
+enum StereoMonoMode {
+ kNotSet,
+ kMono,
+ kStereo
+};
+
+class TestPackStereo : public AudioPacketizationCallback {
+ public:
+ TestPackStereo();
+ ~TestPackStereo();
+
+ void RegisterReceiverACM(AudioCodingModule* acm);
+
+ int32_t SendData(const FrameType frame_type,
+ const uint8_t payload_type,
+ const uint32_t timestamp,
+ const uint8_t* payload_data,
+ const size_t payload_size,
+ const RTPFragmentationHeader* fragmentation) override;
+
+ uint16_t payload_size();
+ uint32_t timestamp_diff();
+ void reset_payload_size();
+ void set_codec_mode(StereoMonoMode mode);
+ void set_lost_packet(bool lost);
+
+ private:
+ AudioCodingModule* receiver_acm_;
+ int16_t seq_no_;
+ uint32_t timestamp_diff_;
+ uint32_t last_in_timestamp_;
+ uint64_t total_bytes_;
+ int payload_size_;
+ StereoMonoMode codec_mode_;
+ // Simulate packet losses
+ bool lost_packet_;
+};
+
+class TestStereo : public ACMTest {
+ public:
+ explicit TestStereo(int test_mode);
+ ~TestStereo();
+
+ void Perform() override;
+
+ private:
+ // The default value of '-1' indicates that the registration is based only on
+ // codec name and a sampling frequncy matching is not required. This is useful
+ // for codecs which support several sampling frequency.
+ void RegisterSendCodec(char side, char* codec_name, int32_t samp_freq_hz,
+ int rate, int pack_size, int channels,
+ int payload_type);
+
+ void Run(TestPackStereo* channel, int in_channels, int out_channels,
+ int percent_loss = 0);
+ void OpenOutFile(int16_t test_number);
+ void DisplaySendReceiveCodec();
+
+ int test_mode_;
+
+ rtc::scoped_ptr<AudioCodingModule> acm_a_;
+ rtc::scoped_ptr<AudioCodingModule> acm_b_;
+
+ TestPackStereo* channel_a2b_;
+
+ PCMFile* in_file_stereo_;
+ PCMFile* in_file_mono_;
+ PCMFile out_file_;
+ int16_t test_cntr_;
+ uint16_t pack_size_samp_;
+ uint16_t pack_size_bytes_;
+ int counter_;
+ char* send_codec_name_;
+
+ // Payload types for stereo codecs and CNG
+#ifdef WEBRTC_CODEC_G722
+ int g722_pltype_;
+#endif
+ int l16_8khz_pltype_;
+ int l16_16khz_pltype_;
+ int l16_32khz_pltype_;
+#ifdef PCMA_AND_PCMU
+ int pcma_pltype_;
+ int pcmu_pltype_;
+#endif
+#ifdef WEBRTC_CODEC_OPUS
+ int opus_pltype_;
+#endif
+};
+
+} // namespace webrtc
+
+#endif // WEBRTC_MODULES_AUDIO_CODING_TEST_TESTSTEREO_H_
diff --git a/webrtc/modules/audio_coding/test/TestVADDTX.cc b/webrtc/modules/audio_coding/test/TestVADDTX.cc
new file mode 100644
index 0000000000..229dc2d474
--- /dev/null
+++ b/webrtc/modules/audio_coding/test/TestVADDTX.cc
@@ -0,0 +1,276 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/modules/audio_coding/test/TestVADDTX.h"
+
+#include <string>
+
+#include "webrtc/engine_configurations.h"
+#include "webrtc/modules/audio_coding/test/PCMFile.h"
+#include "webrtc/modules/audio_coding/test/utility.h"
+#include "webrtc/test/testsupport/fileutils.h"
+
+namespace webrtc {
+
+#ifdef WEBRTC_CODEC_ISAC
+const CodecInst kIsacWb = {103, "ISAC", 16000, 480, 1, 32000};
+const CodecInst kIsacSwb = {104, "ISAC", 32000, 960, 1, 56000};
+#endif
+
+#ifdef WEBRTC_CODEC_ILBC
+const CodecInst kIlbc = {102, "ILBC", 8000, 240, 1, 13300};
+#endif
+
+#ifdef WEBRTC_CODEC_OPUS
+const CodecInst kOpus = {120, "opus", 48000, 960, 1, 64000};
+const CodecInst kOpusStereo = {120, "opus", 48000, 960, 2, 64000};
+#endif
+
+ActivityMonitor::ActivityMonitor() {
+ ResetStatistics();
+}
+
+int32_t ActivityMonitor::InFrameType(FrameType frame_type) {
+ counter_[frame_type]++;
+ return 0;
+}
+
+void ActivityMonitor::PrintStatistics() {
+ printf("\n");
+ printf("kEmptyFrame %u\n", counter_[kEmptyFrame]);
+ printf("kAudioFrameSpeech %u\n", counter_[kAudioFrameSpeech]);
+ printf("kAudioFrameCN %u\n", counter_[kAudioFrameCN]);
+ printf("kVideoFrameKey %u\n", counter_[kVideoFrameKey]);
+ printf("kVideoFrameDelta %u\n", counter_[kVideoFrameDelta]);
+ printf("\n\n");
+}
+
+void ActivityMonitor::ResetStatistics() {
+ memset(counter_, 0, sizeof(counter_));
+}
+
+void ActivityMonitor::GetStatistics(uint32_t* counter) {
+ memcpy(counter, counter_, sizeof(counter_));
+}
+
+TestVadDtx::TestVadDtx()
+ : acm_send_(AudioCodingModule::Create(0)),
+ acm_receive_(AudioCodingModule::Create(1)),
+ channel_(new Channel),
+ monitor_(new ActivityMonitor) {
+ EXPECT_EQ(0, acm_send_->RegisterTransportCallback(channel_.get()));
+ channel_->RegisterReceiverACM(acm_receive_.get());
+ EXPECT_EQ(0, acm_send_->RegisterVADCallback(monitor_.get()));
+}
+
+void TestVadDtx::RegisterCodec(CodecInst codec_param) {
+ // Set the codec for sending and receiving.
+ EXPECT_EQ(0, acm_send_->RegisterSendCodec(codec_param));
+ EXPECT_EQ(0, acm_receive_->RegisterReceiveCodec(codec_param));
+ channel_->SetIsStereo(codec_param.channels > 1);
+}
+
+// Encoding a file and see if the numbers that various packets occur follow
+// the expectation.
+void TestVadDtx::Run(std::string in_filename, int frequency, int channels,
+ std::string out_filename, bool append,
+ const int* expects) {
+ monitor_->ResetStatistics();
+
+ PCMFile in_file;
+ in_file.Open(in_filename, frequency, "rb");
+ in_file.ReadStereo(channels > 1);
+ // Set test length to 1000 ms (100 blocks of 10 ms each).
+ in_file.SetNum10MsBlocksToRead(100);
+ // Fast-forward both files 500 ms (50 blocks). The first second of the file is
+ // silence, but we want to keep half of that to test silence periods.
+ in_file.FastForward(50);
+
+ PCMFile out_file;
+ if (append) {
+ out_file.Open(out_filename, kOutputFreqHz, "ab");
+ } else {
+ out_file.Open(out_filename, kOutputFreqHz, "wb");
+ }
+
+ uint16_t frame_size_samples = in_file.PayloadLength10Ms();
+ uint32_t time_stamp = 0x12345678;
+ AudioFrame audio_frame;
+ while (!in_file.EndOfFile()) {
+ in_file.Read10MsData(audio_frame);
+ audio_frame.timestamp_ = time_stamp;
+ time_stamp += frame_size_samples;
+ EXPECT_GE(acm_send_->Add10MsData(audio_frame), 0);
+ acm_receive_->PlayoutData10Ms(kOutputFreqHz, &audio_frame);
+ out_file.Write10MsData(audio_frame);
+ }
+
+ in_file.Close();
+ out_file.Close();
+
+#ifdef PRINT_STAT
+ monitor_->PrintStatistics();
+#endif
+
+ uint32_t stats[5];
+ monitor_->GetStatistics(stats);
+ monitor_->ResetStatistics();
+
+ for (const auto& st : stats) {
+ int i = &st - stats; // Calculate the current position in stats.
+ switch (expects[i]) {
+ case 0: {
+ EXPECT_EQ(0u, st) << "stats[" << i << "] error.";
+ break;
+ }
+ case 1: {
+ EXPECT_GT(st, 0u) << "stats[" << i << "] error.";
+ break;
+ }
+ }
+ }
+}
+
+// Following is the implementation of TestWebRtcVadDtx.
+TestWebRtcVadDtx::TestWebRtcVadDtx()
+ : vad_enabled_(false),
+ dtx_enabled_(false),
+ output_file_num_(0) {
+}
+
+void TestWebRtcVadDtx::Perform() {
+ // Go through various test cases.
+#ifdef WEBRTC_CODEC_ISAC
+ // Register iSAC WB as send codec
+ RegisterCodec(kIsacWb);
+ RunTestCases();
+
+ // Register iSAC SWB as send codec
+ RegisterCodec(kIsacSwb);
+ RunTestCases();
+#endif
+
+#ifdef WEBRTC_CODEC_ILBC
+ // Register iLBC as send codec
+ RegisterCodec(kIlbc);
+ RunTestCases();
+#endif
+
+#ifdef WEBRTC_CODEC_OPUS
+ // Register Opus as send codec
+ RegisterCodec(kOpus);
+ RunTestCases();
+#endif
+}
+
+// Test various configurations on VAD/DTX.
+void TestWebRtcVadDtx::RunTestCases() {
+ // #1 DTX = OFF, VAD = OFF, VADNormal
+ SetVAD(false, false, VADNormal);
+ Test(true);
+
+ // #2 DTX = ON, VAD = ON, VADAggr
+ SetVAD(true, true, VADAggr);
+ Test(false);
+
+ // #3 DTX = ON, VAD = ON, VADLowBitrate
+ SetVAD(true, true, VADLowBitrate);
+ Test(false);
+
+ // #4 DTX = ON, VAD = ON, VADVeryAggr
+ SetVAD(true, true, VADVeryAggr);
+ Test(false);
+
+ // #5 DTX = ON, VAD = ON, VADNormal
+ SetVAD(true, true, VADNormal);
+ Test(false);
+}
+
+// Set the expectation and run the test.
+void TestWebRtcVadDtx::Test(bool new_outfile) {
+ int expects[] = {-1, 1, dtx_enabled_, 0, 0};
+ if (new_outfile) {
+ output_file_num_++;
+ }
+ std::stringstream out_filename;
+ out_filename << webrtc::test::OutputPath()
+ << "testWebRtcVadDtx_outFile_"
+ << output_file_num_
+ << ".pcm";
+ Run(webrtc::test::ResourcePath("audio_coding/testfile32kHz", "pcm"),
+ 32000, 1, out_filename.str(), !new_outfile, expects);
+}
+
+void TestWebRtcVadDtx::SetVAD(bool enable_dtx, bool enable_vad,
+ ACMVADMode vad_mode) {
+ ACMVADMode mode;
+ EXPECT_EQ(0, acm_send_->SetVAD(enable_dtx, enable_vad, vad_mode));
+ EXPECT_EQ(0, acm_send_->VAD(&dtx_enabled_, &vad_enabled_, &mode));
+
+ auto codec_param = acm_send_->SendCodec();
+ ASSERT_TRUE(codec_param);
+ if (STR_CASE_CMP(codec_param->plname, "opus") == 0) {
+ // If send codec is Opus, WebRTC VAD/DTX cannot be used.
+ enable_dtx = enable_vad = false;
+ }
+
+ EXPECT_EQ(dtx_enabled_ , enable_dtx); // DTX should be set as expected.
+
+ if (dtx_enabled_) {
+ EXPECT_TRUE(vad_enabled_); // WebRTC DTX cannot run without WebRTC VAD.
+ } else {
+ // Using no DTX should not affect setting of VAD.
+ EXPECT_EQ(enable_vad, vad_enabled_);
+ }
+}
+
+// Following is the implementation of TestOpusDtx.
+void TestOpusDtx::Perform() {
+#ifdef WEBRTC_CODEC_ISAC
+ // If we set other codec than Opus, DTX cannot be switched on.
+ RegisterCodec(kIsacWb);
+ EXPECT_EQ(-1, acm_send_->EnableOpusDtx());
+ EXPECT_EQ(0, acm_send_->DisableOpusDtx());
+#endif
+
+#ifdef WEBRTC_CODEC_OPUS
+ int expects[] = {0, 1, 0, 0, 0};
+
+ // Register Opus as send codec
+ std::string out_filename = webrtc::test::OutputPath() +
+ "testOpusDtx_outFile_mono.pcm";
+ RegisterCodec(kOpus);
+ EXPECT_EQ(0, acm_send_->DisableOpusDtx());
+
+ Run(webrtc::test::ResourcePath("audio_coding/testfile32kHz", "pcm"),
+ 32000, 1, out_filename, false, expects);
+
+ EXPECT_EQ(0, acm_send_->EnableOpusDtx());
+ expects[kEmptyFrame] = 1;
+ Run(webrtc::test::ResourcePath("audio_coding/testfile32kHz", "pcm"),
+ 32000, 1, out_filename, true, expects);
+
+ // Register stereo Opus as send codec
+ out_filename = webrtc::test::OutputPath() + "testOpusDtx_outFile_stereo.pcm";
+ RegisterCodec(kOpusStereo);
+ EXPECT_EQ(0, acm_send_->DisableOpusDtx());
+ expects[kEmptyFrame] = 0;
+ Run(webrtc::test::ResourcePath("audio_coding/teststereo32kHz", "pcm"),
+ 32000, 2, out_filename, false, expects);
+
+ EXPECT_EQ(0, acm_send_->EnableOpusDtx());
+
+ expects[kEmptyFrame] = 1;
+ Run(webrtc::test::ResourcePath("audio_coding/teststereo32kHz", "pcm"),
+ 32000, 2, out_filename, true, expects);
+#endif
+}
+
+} // namespace webrtc
diff --git a/webrtc/modules/audio_coding/test/TestVADDTX.h b/webrtc/modules/audio_coding/test/TestVADDTX.h
new file mode 100644
index 0000000000..1e7f0ef4d7
--- /dev/null
+++ b/webrtc/modules/audio_coding/test/TestVADDTX.h
@@ -0,0 +1,102 @@
+/*
+ * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_TEST_TESTVADDTX_H_
+#define WEBRTC_MODULES_AUDIO_CODING_TEST_TESTVADDTX_H_
+
+
+#include "webrtc/base/scoped_ptr.h"
+#include "webrtc/common_types.h"
+#include "webrtc/modules/audio_coding/include/audio_coding_module.h"
+#include "webrtc/modules/audio_coding/include/audio_coding_module_typedefs.h"
+#include "webrtc/modules/audio_coding/test/ACMTest.h"
+#include "webrtc/modules/audio_coding/test/Channel.h"
+
+namespace webrtc {
+
+class ActivityMonitor : public ACMVADCallback {
+ public:
+ ActivityMonitor();
+ int32_t InFrameType(FrameType frame_type);
+ void PrintStatistics();
+ void ResetStatistics();
+ void GetStatistics(uint32_t* stats);
+ private:
+ // 0 - kEmptyFrame
+ // 1 - kAudioFrameSpeech
+ // 2 - kAudioFrameCN
+ // 3 - kVideoFrameKey (not used by audio)
+ // 4 - kVideoFrameDelta (not used by audio)
+ uint32_t counter_[5];
+};
+
+
+// TestVadDtx is to verify that VAD/DTX perform as they should. It runs through
+// an audio file and check if the occurrence of various packet types follows
+// expectation. TestVadDtx needs its derived class to implement the Perform()
+// to put the test together.
+class TestVadDtx : public ACMTest {
+ public:
+ static const int kOutputFreqHz = 16000;
+
+ TestVadDtx();
+
+ virtual void Perform() = 0;
+
+ protected:
+ void RegisterCodec(CodecInst codec_param);
+
+ // Encoding a file and see if the numbers that various packets occur follow
+ // the expectation. Saves result to a file.
+ // expects[x] means
+ // -1 : do not care,
+ // 0 : there have been no packets of type |x|,
+ // 1 : there have been packets of type |x|,
+ // with |x| indicates the following packet types
+ // 0 - kEmptyFrame
+ // 1 - kAudioFrameSpeech
+ // 2 - kAudioFrameCN
+ // 3 - kVideoFrameKey (not used by audio)
+ // 4 - kVideoFrameDelta (not used by audio)
+ void Run(std::string in_filename, int frequency, int channels,
+ std::string out_filename, bool append, const int* expects);
+
+ rtc::scoped_ptr<AudioCodingModule> acm_send_;
+ rtc::scoped_ptr<AudioCodingModule> acm_receive_;
+ rtc::scoped_ptr<Channel> channel_;
+ rtc::scoped_ptr<ActivityMonitor> monitor_;
+};
+
+// TestWebRtcVadDtx is to verify that the WebRTC VAD/DTX perform as they should.
+class TestWebRtcVadDtx final : public TestVadDtx {
+ public:
+ TestWebRtcVadDtx();
+
+ void Perform() override;
+
+ private:
+ void RunTestCases();
+ void Test(bool new_outfile);
+ void SetVAD(bool enable_dtx, bool enable_vad, ACMVADMode vad_mode);
+
+ bool vad_enabled_;
+ bool dtx_enabled_;
+ int output_file_num_;
+};
+
+// TestOpusDtx is to verify that the Opus DTX performs as it should.
+class TestOpusDtx final : public TestVadDtx {
+ public:
+ void Perform() override;
+};
+
+} // namespace webrtc
+
+#endif // WEBRTC_MODULES_AUDIO_CODING_TEST_TESTVADDTX_H_
diff --git a/webrtc/modules/audio_coding/test/Tester.cc b/webrtc/modules/audio_coding/test/Tester.cc
new file mode 100644
index 0000000000..a27f0bc58b
--- /dev/null
+++ b/webrtc/modules/audio_coding/test/Tester.cc
@@ -0,0 +1,181 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <stdio.h>
+#include <string>
+#include <vector>
+
+#include "testing/gtest/include/gtest/gtest.h"
+#include "webrtc/modules/audio_coding/include/audio_coding_module.h"
+#include "webrtc/modules/audio_coding/test/APITest.h"
+#include "webrtc/modules/audio_coding/test/EncodeDecodeTest.h"
+#include "webrtc/modules/audio_coding/test/iSACTest.h"
+#include "webrtc/modules/audio_coding/test/opus_test.h"
+#include "webrtc/modules/audio_coding/test/PacketLossTest.h"
+#include "webrtc/modules/audio_coding/test/TestAllCodecs.h"
+#include "webrtc/modules/audio_coding/test/TestRedFec.h"
+#include "webrtc/modules/audio_coding/test/TestStereo.h"
+#include "webrtc/modules/audio_coding/test/TestVADDTX.h"
+#include "webrtc/modules/audio_coding/test/TwoWayCommunication.h"
+#include "webrtc/system_wrappers/include/trace.h"
+#include "webrtc/test/testsupport/fileutils.h"
+
+using webrtc::Trace;
+
+// This parameter is used to describe how to run the tests. It is normally
+// set to 0, and all tests are run in quite mode.
+#define ACM_TEST_MODE 0
+
+TEST(AudioCodingModuleTest, TestAllCodecs) {
+ Trace::CreateTrace();
+ Trace::SetTraceFile((webrtc::test::OutputPath() +
+ "acm_allcodecs_trace.txt").c_str());
+ webrtc::TestAllCodecs(ACM_TEST_MODE).Perform();
+ Trace::ReturnTrace();
+}
+
+#if defined(WEBRTC_ANDROID)
+TEST(AudioCodingModuleTest, DISABLED_TestEncodeDecode) {
+#else
+TEST(AudioCodingModuleTest, TestEncodeDecode) {
+#endif
+ Trace::CreateTrace();
+ Trace::SetTraceFile((webrtc::test::OutputPath() +
+ "acm_encodedecode_trace.txt").c_str());
+ webrtc::EncodeDecodeTest(ACM_TEST_MODE).Perform();
+ Trace::ReturnTrace();
+}
+
+#if defined(WEBRTC_CODEC_RED)
+#if defined(WEBRTC_ANDROID)
+TEST(AudioCodingModuleTest, DISABLED_TestRedFec) {
+#else
+TEST(AudioCodingModuleTest, TestRedFec) {
+#endif
+ Trace::CreateTrace();
+ Trace::SetTraceFile((webrtc::test::OutputPath() +
+ "acm_fec_trace.txt").c_str());
+ webrtc::TestRedFec().Perform();
+ Trace::ReturnTrace();
+}
+#endif
+
+#if defined(WEBRTC_CODEC_ISAC) || defined(WEBRTC_CODEC_ISACFX)
+#if defined(WEBRTC_ANDROID)
+TEST(AudioCodingModuleTest, DISABLED_TestIsac) {
+#else
+TEST(AudioCodingModuleTest, TestIsac) {
+#endif
+ Trace::CreateTrace();
+ Trace::SetTraceFile((webrtc::test::OutputPath() +
+ "acm_isac_trace.txt").c_str());
+ webrtc::ISACTest(ACM_TEST_MODE).Perform();
+ Trace::ReturnTrace();
+}
+#endif
+
+#if (defined(WEBRTC_CODEC_ISAC) || defined(WEBRTC_CODEC_ISACFX)) && \
+ defined(WEBRTC_CODEC_ILBC) && defined(WEBRTC_CODEC_G722)
+#if defined(WEBRTC_ANDROID)
+TEST(AudioCodingModuleTest, DISABLED_TwoWayCommunication) {
+#else
+TEST(AudioCodingModuleTest, TwoWayCommunication) {
+#endif
+ Trace::CreateTrace();
+ Trace::SetTraceFile((webrtc::test::OutputPath() +
+ "acm_twowaycom_trace.txt").c_str());
+ webrtc::TwoWayCommunication(ACM_TEST_MODE).Perform();
+ Trace::ReturnTrace();
+}
+#endif
+
+#if defined(WEBRTC_ANDROID)
+TEST(AudioCodingModuleTest, DISABLED_TestStereo) {
+#else
+TEST(AudioCodingModuleTest, TestStereo) {
+#endif
+ Trace::CreateTrace();
+ Trace::SetTraceFile((webrtc::test::OutputPath() +
+ "acm_stereo_trace.txt").c_str());
+ webrtc::TestStereo(ACM_TEST_MODE).Perform();
+ Trace::ReturnTrace();
+}
+
+#if defined(WEBRTC_ANDROID)
+TEST(AudioCodingModuleTest, DISABLED_TestWebRtcVadDtx) {
+#else
+TEST(AudioCodingModuleTest, TestWebRtcVadDtx) {
+#endif
+ Trace::CreateTrace();
+ Trace::SetTraceFile((webrtc::test::OutputPath() +
+ "acm_vaddtx_trace.txt").c_str());
+ webrtc::TestWebRtcVadDtx().Perform();
+ Trace::ReturnTrace();
+}
+
+TEST(AudioCodingModuleTest, TestOpusDtx) {
+ Trace::CreateTrace();
+ Trace::SetTraceFile((webrtc::test::OutputPath() +
+ "acm_opusdtx_trace.txt").c_str());
+ webrtc::TestOpusDtx().Perform();
+ Trace::ReturnTrace();
+}
+
+TEST(AudioCodingModuleTest, TestOpus) {
+ Trace::CreateTrace();
+ Trace::SetTraceFile((webrtc::test::OutputPath() +
+ "acm_opus_trace.txt").c_str());
+ webrtc::OpusTest().Perform();
+ Trace::ReturnTrace();
+}
+
+TEST(AudioCodingModuleTest, TestPacketLoss) {
+ Trace::CreateTrace();
+ Trace::SetTraceFile((webrtc::test::OutputPath() +
+ "acm_packetloss_trace.txt").c_str());
+ webrtc::PacketLossTest(1, 10, 10, 1).Perform();
+ Trace::ReturnTrace();
+}
+
+TEST(AudioCodingModuleTest, TestPacketLossBurst) {
+ Trace::CreateTrace();
+ Trace::SetTraceFile((webrtc::test::OutputPath() +
+ "acm_packetloss_burst_trace.txt").c_str());
+ webrtc::PacketLossTest(1, 10, 10, 2).Perform();
+ Trace::ReturnTrace();
+}
+
+TEST(AudioCodingModuleTest, TestPacketLossStereo) {
+ Trace::CreateTrace();
+ Trace::SetTraceFile((webrtc::test::OutputPath() +
+ "acm_packetloss_trace.txt").c_str());
+ webrtc::PacketLossTest(2, 10, 10, 1).Perform();
+ Trace::ReturnTrace();
+}
+
+TEST(AudioCodingModuleTest, TestPacketLossStereoBurst) {
+ Trace::CreateTrace();
+ Trace::SetTraceFile((webrtc::test::OutputPath() +
+ "acm_packetloss_burst_trace.txt").c_str());
+ webrtc::PacketLossTest(2, 10, 10, 2).Perform();
+ Trace::ReturnTrace();
+}
+
+// The full API test is too long to run automatically on bots, but can be used
+// for offline testing. User interaction is needed.
+#ifdef ACM_TEST_FULL_API
+ TEST(AudioCodingModuleTest, TestAPI) {
+ Trace::CreateTrace();
+ Trace::SetTraceFile((webrtc::test::OutputPath() +
+ "acm_apitest_trace.txt").c_str());
+ webrtc::APITest().Perform();
+ Trace::ReturnTrace();
+ }
+#endif
diff --git a/webrtc/modules/audio_coding/main/test/TimedTrace.cc b/webrtc/modules/audio_coding/test/TimedTrace.cc
index ff9b5eeb76..ff9b5eeb76 100644
--- a/webrtc/modules/audio_coding/main/test/TimedTrace.cc
+++ b/webrtc/modules/audio_coding/test/TimedTrace.cc
diff --git a/webrtc/modules/audio_coding/test/TimedTrace.h b/webrtc/modules/audio_coding/test/TimedTrace.h
new file mode 100644
index 0000000000..0793eb0c0c
--- /dev/null
+++ b/webrtc/modules/audio_coding/test/TimedTrace.h
@@ -0,0 +1,36 @@
+/*
+ * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_TEST_TIMEDTRACE_H_
+#define WEBRTC_MODULES_AUDIO_CODING_TEST_TIMEDTRACE_H_
+
+#include "webrtc/typedefs.h"
+
+#include <stdio.h>
+#include <stdlib.h>
+
+class TimedTrace {
+ public:
+ TimedTrace();
+ ~TimedTrace();
+
+ void SetTimeEllapsed(double myTime);
+ double TimeEllapsed();
+ void Tick10Msec();
+ int16_t SetUp(char* fileName);
+ void TimedLogg(char* message);
+
+ private:
+ static double _timeEllapsedSec;
+ static FILE* _timedTraceFile;
+
+};
+
+#endif // WEBRTC_MODULES_AUDIO_CODING_TEST_TIMEDTRACE_H_
diff --git a/webrtc/modules/audio_coding/test/TwoWayCommunication.cc b/webrtc/modules/audio_coding/test/TwoWayCommunication.cc
new file mode 100644
index 0000000000..56e136bd34
--- /dev/null
+++ b/webrtc/modules/audio_coding/test/TwoWayCommunication.cc
@@ -0,0 +1,299 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "TwoWayCommunication.h"
+
+#include <ctype.h>
+#include <stdio.h>
+#include <string.h>
+
+#ifdef WIN32
+#include <Windows.h>
+#endif
+
+#include "testing/gtest/include/gtest/gtest.h"
+#include "webrtc/engine_configurations.h"
+#include "webrtc/common_types.h"
+#include "webrtc/modules/audio_coding/test/PCMFile.h"
+#include "webrtc/modules/audio_coding/test/utility.h"
+#include "webrtc/system_wrappers/include/trace.h"
+#include "webrtc/test/testsupport/fileutils.h"
+
+namespace webrtc {
+
+#define MAX_FILE_NAME_LENGTH_BYTE 500
+
+TwoWayCommunication::TwoWayCommunication(int testMode)
+ : _acmA(AudioCodingModule::Create(1)),
+ _acmRefA(AudioCodingModule::Create(3)),
+ _testMode(testMode) {
+ AudioCodingModule::Config config;
+ // The clicks will be more obvious in FAX mode. TODO(henrik.lundin) Really?
+ config.neteq_config.playout_mode = kPlayoutFax;
+ config.id = 2;
+ _acmB.reset(AudioCodingModule::Create(config));
+ config.id = 4;
+ _acmRefB.reset(AudioCodingModule::Create(config));
+}
+
+TwoWayCommunication::~TwoWayCommunication() {
+ delete _channel_A2B;
+ delete _channel_B2A;
+ delete _channelRef_A2B;
+ delete _channelRef_B2A;
+#ifdef WEBRTC_DTMF_DETECTION
+ if (_dtmfDetectorA != NULL) {
+ delete _dtmfDetectorA;
+ }
+ if (_dtmfDetectorB != NULL) {
+ delete _dtmfDetectorB;
+ }
+#endif
+ _inFileA.Close();
+ _inFileB.Close();
+ _outFileA.Close();
+ _outFileB.Close();
+ _outFileRefA.Close();
+ _outFileRefB.Close();
+}
+
+void TwoWayCommunication::ChooseCodec(uint8_t* codecID_A,
+ uint8_t* codecID_B) {
+ rtc::scoped_ptr<AudioCodingModule> tmpACM(AudioCodingModule::Create(0));
+ uint8_t noCodec = tmpACM->NumberOfCodecs();
+ CodecInst codecInst;
+ printf("List of Supported Codecs\n");
+ printf("========================\n");
+ for (uint8_t codecCntr = 0; codecCntr < noCodec; codecCntr++) {
+ EXPECT_EQ(tmpACM->Codec(codecCntr, &codecInst), 0);
+ printf("%d- %s\n", codecCntr, codecInst.plname);
+ }
+ printf("\nChoose a send codec for side A [0]: ");
+ char myStr[15] = "";
+ EXPECT_TRUE(fgets(myStr, 10, stdin) != NULL);
+ *codecID_A = (uint8_t) atoi(myStr);
+
+ printf("\nChoose a send codec for side B [0]: ");
+ EXPECT_TRUE(fgets(myStr, 10, stdin) != NULL);
+ *codecID_B = (uint8_t) atoi(myStr);
+
+ printf("\n");
+}
+
+void TwoWayCommunication::SetUp() {
+ uint8_t codecID_A;
+ uint8_t codecID_B;
+
+ ChooseCodec(&codecID_A, &codecID_B);
+ CodecInst codecInst_A;
+ CodecInst codecInst_B;
+ CodecInst dummyCodec;
+ EXPECT_EQ(0, _acmA->Codec(codecID_A, &codecInst_A));
+ EXPECT_EQ(0, _acmB->Codec(codecID_B, &codecInst_B));
+ EXPECT_EQ(0, _acmA->Codec(6, &dummyCodec));
+
+ //--- Set A codecs
+ EXPECT_EQ(0, _acmA->RegisterSendCodec(codecInst_A));
+ EXPECT_EQ(0, _acmA->RegisterReceiveCodec(codecInst_B));
+ //--- Set ref-A codecs
+ EXPECT_EQ(0, _acmRefA->RegisterSendCodec(codecInst_A));
+ EXPECT_EQ(0, _acmRefA->RegisterReceiveCodec(codecInst_B));
+
+ //--- Set B codecs
+ EXPECT_EQ(0, _acmB->RegisterSendCodec(codecInst_B));
+ EXPECT_EQ(0, _acmB->RegisterReceiveCodec(codecInst_A));
+
+ //--- Set ref-B codecs
+ EXPECT_EQ(0, _acmRefB->RegisterSendCodec(codecInst_B));
+ EXPECT_EQ(0, _acmRefB->RegisterReceiveCodec(codecInst_A));
+
+ uint16_t frequencyHz;
+
+ //--- Input A
+ std::string in_file_name = webrtc::test::ResourcePath(
+ "audio_coding/testfile32kHz", "pcm");
+ frequencyHz = 32000;
+ printf("Enter input file at side A [%s]: ", in_file_name.c_str());
+ PCMFile::ChooseFile(&in_file_name, 499, &frequencyHz);
+ _inFileA.Open(in_file_name, frequencyHz, "rb");
+
+ //--- Output A
+ std::string out_file_a = webrtc::test::OutputPath() + "outA.pcm";
+ printf("Output file at side A: %s\n", out_file_a.c_str());
+ printf("Sampling frequency (in Hz) of the above file: %u\n", frequencyHz);
+ _outFileA.Open(out_file_a, frequencyHz, "wb");
+ std::string ref_file_name = webrtc::test::OutputPath() + "ref_outA.pcm";
+ _outFileRefA.Open(ref_file_name, frequencyHz, "wb");
+
+ //--- Input B
+ in_file_name = webrtc::test::ResourcePath("audio_coding/testfile32kHz",
+ "pcm");
+ frequencyHz = 32000;
+ printf("\n\nEnter input file at side B [%s]: ", in_file_name.c_str());
+ PCMFile::ChooseFile(&in_file_name, 499, &frequencyHz);
+ _inFileB.Open(in_file_name, frequencyHz, "rb");
+
+ //--- Output B
+ std::string out_file_b = webrtc::test::OutputPath() + "outB.pcm";
+ printf("Output file at side B: %s\n", out_file_b.c_str());
+ printf("Sampling frequency (in Hz) of the above file: %u\n", frequencyHz);
+ _outFileB.Open(out_file_b, frequencyHz, "wb");
+ ref_file_name = webrtc::test::OutputPath() + "ref_outB.pcm";
+ _outFileRefB.Open(ref_file_name, frequencyHz, "wb");
+
+ //--- Set A-to-B channel
+ _channel_A2B = new Channel;
+ _acmA->RegisterTransportCallback(_channel_A2B);
+ _channel_A2B->RegisterReceiverACM(_acmB.get());
+ //--- Do the same for the reference
+ _channelRef_A2B = new Channel;
+ _acmRefA->RegisterTransportCallback(_channelRef_A2B);
+ _channelRef_A2B->RegisterReceiverACM(_acmRefB.get());
+
+ //--- Set B-to-A channel
+ _channel_B2A = new Channel;
+ _acmB->RegisterTransportCallback(_channel_B2A);
+ _channel_B2A->RegisterReceiverACM(_acmA.get());
+ //--- Do the same for reference
+ _channelRef_B2A = new Channel;
+ _acmRefB->RegisterTransportCallback(_channelRef_B2A);
+ _channelRef_B2A->RegisterReceiverACM(_acmRefA.get());
+}
+
+void TwoWayCommunication::SetUpAutotest() {
+ CodecInst codecInst_A;
+ CodecInst codecInst_B;
+ CodecInst dummyCodec;
+
+ EXPECT_EQ(0, _acmA->Codec("ISAC", &codecInst_A, 16000, 1));
+ EXPECT_EQ(0, _acmB->Codec("L16", &codecInst_B, 8000, 1));
+ EXPECT_EQ(0, _acmA->Codec(6, &dummyCodec));
+
+ //--- Set A codecs
+ EXPECT_EQ(0, _acmA->RegisterSendCodec(codecInst_A));
+ EXPECT_EQ(0, _acmA->RegisterReceiveCodec(codecInst_B));
+
+ //--- Set ref-A codecs
+ EXPECT_GT(_acmRefA->RegisterSendCodec(codecInst_A), -1);
+ EXPECT_GT(_acmRefA->RegisterReceiveCodec(codecInst_B), -1);
+
+ //--- Set B codecs
+ EXPECT_GT(_acmB->RegisterSendCodec(codecInst_B), -1);
+ EXPECT_GT(_acmB->RegisterReceiveCodec(codecInst_A), -1);
+
+ //--- Set ref-B codecs
+ EXPECT_EQ(0, _acmRefB->RegisterSendCodec(codecInst_B));
+ EXPECT_EQ(0, _acmRefB->RegisterReceiveCodec(codecInst_A));
+
+ uint16_t frequencyHz;
+
+ //--- Input A and B
+ std::string in_file_name = webrtc::test::ResourcePath(
+ "audio_coding/testfile32kHz", "pcm");
+ frequencyHz = 16000;
+ _inFileA.Open(in_file_name, frequencyHz, "rb");
+ _inFileB.Open(in_file_name, frequencyHz, "rb");
+
+ //--- Output A
+ std::string output_file_a = webrtc::test::OutputPath() + "outAutotestA.pcm";
+ frequencyHz = 16000;
+ _outFileA.Open(output_file_a, frequencyHz, "wb");
+ std::string output_ref_file_a = webrtc::test::OutputPath()
+ + "ref_outAutotestA.pcm";
+ _outFileRefA.Open(output_ref_file_a, frequencyHz, "wb");
+
+ //--- Output B
+ std::string output_file_b = webrtc::test::OutputPath() + "outAutotestB.pcm";
+ frequencyHz = 16000;
+ _outFileB.Open(output_file_b, frequencyHz, "wb");
+ std::string output_ref_file_b = webrtc::test::OutputPath()
+ + "ref_outAutotestB.pcm";
+ _outFileRefB.Open(output_ref_file_b, frequencyHz, "wb");
+
+ //--- Set A-to-B channel
+ _channel_A2B = new Channel;
+ _acmA->RegisterTransportCallback(_channel_A2B);
+ _channel_A2B->RegisterReceiverACM(_acmB.get());
+ //--- Do the same for the reference
+ _channelRef_A2B = new Channel;
+ _acmRefA->RegisterTransportCallback(_channelRef_A2B);
+ _channelRef_A2B->RegisterReceiverACM(_acmRefB.get());
+
+ //--- Set B-to-A channel
+ _channel_B2A = new Channel;
+ _acmB->RegisterTransportCallback(_channel_B2A);
+ _channel_B2A->RegisterReceiverACM(_acmA.get());
+ //--- Do the same for reference
+ _channelRef_B2A = new Channel;
+ _acmRefB->RegisterTransportCallback(_channelRef_B2A);
+ _channelRef_B2A->RegisterReceiverACM(_acmRefA.get());
+}
+
+void TwoWayCommunication::Perform() {
+ if (_testMode == 0) {
+ SetUpAutotest();
+ } else {
+ SetUp();
+ }
+ unsigned int msecPassed = 0;
+ unsigned int secPassed = 0;
+
+ int32_t outFreqHzA = _outFileA.SamplingFrequency();
+ int32_t outFreqHzB = _outFileB.SamplingFrequency();
+
+ AudioFrame audioFrame;
+
+ auto codecInst_B = _acmB->SendCodec();
+ ASSERT_TRUE(codecInst_B);
+
+ // In the following loop we tests that the code can handle misuse of the APIs.
+ // In the middle of a session with data flowing between two sides, called A
+ // and B, APIs will be called, and the code should continue to run, and be
+ // able to recover.
+ while (!_inFileA.EndOfFile() && !_inFileB.EndOfFile()) {
+ msecPassed += 10;
+ EXPECT_GT(_inFileA.Read10MsData(audioFrame), 0);
+ EXPECT_GE(_acmA->Add10MsData(audioFrame), 0);
+ EXPECT_GE(_acmRefA->Add10MsData(audioFrame), 0);
+
+ EXPECT_GT(_inFileB.Read10MsData(audioFrame), 0);
+
+ EXPECT_GE(_acmB->Add10MsData(audioFrame), 0);
+ EXPECT_GE(_acmRefB->Add10MsData(audioFrame), 0);
+ EXPECT_EQ(0, _acmA->PlayoutData10Ms(outFreqHzA, &audioFrame));
+ _outFileA.Write10MsData(audioFrame);
+ EXPECT_EQ(0, _acmRefA->PlayoutData10Ms(outFreqHzA, &audioFrame));
+ _outFileRefA.Write10MsData(audioFrame);
+ EXPECT_EQ(0, _acmB->PlayoutData10Ms(outFreqHzB, &audioFrame));
+ _outFileB.Write10MsData(audioFrame);
+ EXPECT_EQ(0, _acmRefB->PlayoutData10Ms(outFreqHzB, &audioFrame));
+ _outFileRefB.Write10MsData(audioFrame);
+
+ // Update time counters each time a second of data has passed.
+ if (msecPassed >= 1000) {
+ msecPassed = 0;
+ secPassed++;
+ }
+ // Re-register send codec on side B.
+ if (((secPassed % 5) == 4) && (msecPassed >= 990)) {
+ EXPECT_EQ(0, _acmB->RegisterSendCodec(*codecInst_B));
+ EXPECT_TRUE(_acmB->SendCodec());
+ }
+ // Initialize receiver on side A.
+ if (((secPassed % 7) == 6) && (msecPassed == 0))
+ EXPECT_EQ(0, _acmA->InitializeReceiver());
+ // Re-register codec on side A.
+ if (((secPassed % 7) == 6) && (msecPassed >= 990)) {
+ EXPECT_EQ(0, _acmA->RegisterReceiveCodec(*codecInst_B));
+ }
+ }
+}
+
+} // namespace webrtc
diff --git a/webrtc/modules/audio_coding/test/TwoWayCommunication.h b/webrtc/modules/audio_coding/test/TwoWayCommunication.h
new file mode 100644
index 0000000000..77639935da
--- /dev/null
+++ b/webrtc/modules/audio_coding/test/TwoWayCommunication.h
@@ -0,0 +1,60 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_TEST_TWOWAYCOMMUNICATION_H_
+#define WEBRTC_MODULES_AUDIO_CODING_TEST_TWOWAYCOMMUNICATION_H_
+
+#include "webrtc/base/scoped_ptr.h"
+#include "webrtc/modules/audio_coding/include/audio_coding_module.h"
+#include "webrtc/modules/audio_coding/test/ACMTest.h"
+#include "webrtc/modules/audio_coding/test/Channel.h"
+#include "webrtc/modules/audio_coding/test/PCMFile.h"
+#include "webrtc/modules/audio_coding/test/utility.h"
+
+namespace webrtc {
+
+class TwoWayCommunication : public ACMTest {
+ public:
+ explicit TwoWayCommunication(int testMode);
+ ~TwoWayCommunication();
+
+ void Perform();
+ private:
+ void ChooseCodec(uint8_t* codecID_A, uint8_t* codecID_B);
+ void SetUp();
+ void SetUpAutotest();
+
+ rtc::scoped_ptr<AudioCodingModule> _acmA;
+ rtc::scoped_ptr<AudioCodingModule> _acmB;
+
+ rtc::scoped_ptr<AudioCodingModule> _acmRefA;
+ rtc::scoped_ptr<AudioCodingModule> _acmRefB;
+
+ Channel* _channel_A2B;
+ Channel* _channel_B2A;
+
+ Channel* _channelRef_A2B;
+ Channel* _channelRef_B2A;
+
+ PCMFile _inFileA;
+ PCMFile _inFileB;
+
+ PCMFile _outFileA;
+ PCMFile _outFileB;
+
+ PCMFile _outFileRefA;
+ PCMFile _outFileRefB;
+
+ int _testMode;
+};
+
+} // namespace webrtc
+
+#endif // WEBRTC_MODULES_AUDIO_CODING_TEST_TWOWAYCOMMUNICATION_H_
diff --git a/webrtc/modules/audio_coding/test/delay_test.cc b/webrtc/modules/audio_coding/test/delay_test.cc
new file mode 100644
index 0000000000..a8c137f501
--- /dev/null
+++ b/webrtc/modules/audio_coding/test/delay_test.cc
@@ -0,0 +1,265 @@
+/*
+ * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <assert.h>
+#include <math.h>
+
+#include <iostream>
+
+#include "gflags/gflags.h"
+#include "testing/gtest/include/gtest/gtest.h"
+#include "webrtc/base/scoped_ptr.h"
+#include "webrtc/common.h"
+#include "webrtc/common_types.h"
+#include "webrtc/engine_configurations.h"
+#include "webrtc/modules/audio_coding/include/audio_coding_module.h"
+#include "webrtc/modules/audio_coding/include/audio_coding_module_typedefs.h"
+#include "webrtc/modules/audio_coding/acm2/acm_common_defs.h"
+#include "webrtc/modules/audio_coding/test/Channel.h"
+#include "webrtc/modules/audio_coding/test/PCMFile.h"
+#include "webrtc/modules/audio_coding/test/utility.h"
+#include "webrtc/system_wrappers/include/event_wrapper.h"
+#include "webrtc/test/testsupport/fileutils.h"
+
+DEFINE_string(codec, "isac", "Codec Name");
+DEFINE_int32(sample_rate_hz, 16000, "Sampling rate in Hertz.");
+DEFINE_int32(num_channels, 1, "Number of Channels.");
+DEFINE_string(input_file, "", "Input file, PCM16 32 kHz, optional.");
+DEFINE_int32(delay, 0, "Delay in millisecond.");
+DEFINE_bool(dtx, false, "Enable DTX at the sender side.");
+DEFINE_bool(packet_loss, false, "Apply packet loss, c.f. Channel{.cc, .h}.");
+DEFINE_bool(fec, false, "Use Forward Error Correction (FEC).");
+
+namespace webrtc {
+
+namespace {
+
+struct CodecSettings {
+ char name[50];
+ int sample_rate_hz;
+ int num_channels;
+};
+
+struct AcmSettings {
+ bool dtx;
+ bool fec;
+};
+
+struct TestSettings {
+ CodecSettings codec;
+ AcmSettings acm;
+ bool packet_loss;
+};
+
+} // namespace
+
+class DelayTest {
+ public:
+ DelayTest()
+ : acm_a_(AudioCodingModule::Create(0)),
+ acm_b_(AudioCodingModule::Create(1)),
+ channel_a2b_(new Channel),
+ test_cntr_(0),
+ encoding_sample_rate_hz_(8000) {}
+
+ ~DelayTest() {
+ if (channel_a2b_ != NULL) {
+ delete channel_a2b_;
+ channel_a2b_ = NULL;
+ }
+ in_file_a_.Close();
+ }
+
+ void Initialize() {
+ test_cntr_ = 0;
+ std::string file_name = webrtc::test::ResourcePath(
+ "audio_coding/testfile32kHz", "pcm");
+ if (FLAGS_input_file.size() > 0)
+ file_name = FLAGS_input_file;
+ in_file_a_.Open(file_name, 32000, "rb");
+ ASSERT_EQ(0, acm_a_->InitializeReceiver()) <<
+ "Couldn't initialize receiver.\n";
+ ASSERT_EQ(0, acm_b_->InitializeReceiver()) <<
+ "Couldn't initialize receiver.\n";
+
+ if (FLAGS_delay > 0) {
+ ASSERT_EQ(0, acm_b_->SetMinimumPlayoutDelay(FLAGS_delay)) <<
+ "Failed to set minimum delay.\n";
+ }
+
+ int num_encoders = acm_a_->NumberOfCodecs();
+ CodecInst my_codec_param;
+ for (int n = 0; n < num_encoders; n++) {
+ EXPECT_EQ(0, acm_b_->Codec(n, &my_codec_param)) <<
+ "Failed to get codec.";
+ if (STR_CASE_CMP(my_codec_param.plname, "opus") == 0)
+ my_codec_param.channels = 1;
+ else if (my_codec_param.channels > 1)
+ continue;
+ if (STR_CASE_CMP(my_codec_param.plname, "CN") == 0 &&
+ my_codec_param.plfreq == 48000)
+ continue;
+ if (STR_CASE_CMP(my_codec_param.plname, "telephone-event") == 0)
+ continue;
+ ASSERT_EQ(0, acm_b_->RegisterReceiveCodec(my_codec_param)) <<
+ "Couldn't register receive codec.\n";
+ }
+
+ // Create and connect the channel
+ ASSERT_EQ(0, acm_a_->RegisterTransportCallback(channel_a2b_)) <<
+ "Couldn't register Transport callback.\n";
+ channel_a2b_->RegisterReceiverACM(acm_b_.get());
+ }
+
+ void Perform(const TestSettings* config, size_t num_tests, int duration_sec,
+ const char* output_prefix) {
+ for (size_t n = 0; n < num_tests; ++n) {
+ ApplyConfig(config[n]);
+ Run(duration_sec, output_prefix);
+ }
+ }
+
+ private:
+ void ApplyConfig(const TestSettings& config) {
+ printf("====================================\n");
+ printf("Test %d \n"
+ "Codec: %s, %d kHz, %d channel(s)\n"
+ "ACM: DTX %s, FEC %s\n"
+ "Channel: %s\n",
+ ++test_cntr_, config.codec.name, config.codec.sample_rate_hz,
+ config.codec.num_channels, config.acm.dtx ? "on" : "off",
+ config.acm.fec ? "on" : "off",
+ config.packet_loss ? "with packet-loss" : "no packet-loss");
+ SendCodec(config.codec);
+ ConfigAcm(config.acm);
+ ConfigChannel(config.packet_loss);
+ }
+
+ void SendCodec(const CodecSettings& config) {
+ CodecInst my_codec_param;
+ ASSERT_EQ(0, AudioCodingModule::Codec(
+ config.name, &my_codec_param, config.sample_rate_hz,
+ config.num_channels)) << "Specified codec is not supported.\n";
+
+ encoding_sample_rate_hz_ = my_codec_param.plfreq;
+ ASSERT_EQ(0, acm_a_->RegisterSendCodec(my_codec_param)) <<
+ "Failed to register send-codec.\n";
+ }
+
+ void ConfigAcm(const AcmSettings& config) {
+ ASSERT_EQ(0, acm_a_->SetVAD(config.dtx, config.dtx, VADAggr)) <<
+ "Failed to set VAD.\n";
+ ASSERT_EQ(0, acm_a_->SetREDStatus(config.fec)) <<
+ "Failed to set RED.\n";
+ }
+
+ void ConfigChannel(bool packet_loss) {
+ channel_a2b_->SetFECTestWithPacketLoss(packet_loss);
+ }
+
+ void OpenOutFile(const char* output_id) {
+ std::stringstream file_stream;
+ file_stream << "delay_test_" << FLAGS_codec << "_" << FLAGS_sample_rate_hz
+ << "Hz" << "_" << FLAGS_delay << "ms.pcm";
+ std::cout << "Output file: " << file_stream.str() << std::endl << std::endl;
+ std::string file_name = webrtc::test::OutputPath() + file_stream.str();
+ out_file_b_.Open(file_name.c_str(), 32000, "wb");
+ }
+
+ void Run(int duration_sec, const char* output_prefix) {
+ OpenOutFile(output_prefix);
+ AudioFrame audio_frame;
+ uint32_t out_freq_hz_b = out_file_b_.SamplingFrequency();
+
+ int num_frames = 0;
+ int in_file_frames = 0;
+ uint32_t playout_ts;
+ uint32_t received_ts;
+ double average_delay = 0;
+ double inst_delay_sec = 0;
+ while (num_frames < (duration_sec * 100)) {
+ if (in_file_a_.EndOfFile()) {
+ in_file_a_.Rewind();
+ }
+
+ // Print delay information every 16 frame
+ if ((num_frames & 0x3F) == 0x3F) {
+ NetworkStatistics statistics;
+ acm_b_->GetNetworkStatistics(&statistics);
+ fprintf(stdout, "delay: min=%3d max=%3d mean=%3d median=%3d"
+ " ts-based average = %6.3f, "
+ "curr buff-lev = %4u opt buff-lev = %4u \n",
+ statistics.minWaitingTimeMs, statistics.maxWaitingTimeMs,
+ statistics.meanWaitingTimeMs, statistics.medianWaitingTimeMs,
+ average_delay, statistics.currentBufferSize,
+ statistics.preferredBufferSize);
+ fflush (stdout);
+ }
+
+ in_file_a_.Read10MsData(audio_frame);
+ ASSERT_GE(acm_a_->Add10MsData(audio_frame), 0);
+ ASSERT_EQ(0, acm_b_->PlayoutData10Ms(out_freq_hz_b, &audio_frame));
+ out_file_b_.Write10MsData(
+ audio_frame.data_,
+ audio_frame.samples_per_channel_ * audio_frame.num_channels_);
+ acm_b_->PlayoutTimestamp(&playout_ts);
+ received_ts = channel_a2b_->LastInTimestamp();
+ inst_delay_sec = static_cast<uint32_t>(received_ts - playout_ts)
+ / static_cast<double>(encoding_sample_rate_hz_);
+
+ if (num_frames > 10)
+ average_delay = 0.95 * average_delay + 0.05 * inst_delay_sec;
+
+ ++num_frames;
+ ++in_file_frames;
+ }
+ out_file_b_.Close();
+ }
+
+ rtc::scoped_ptr<AudioCodingModule> acm_a_;
+ rtc::scoped_ptr<AudioCodingModule> acm_b_;
+
+ Channel* channel_a2b_;
+
+ PCMFile in_file_a_;
+ PCMFile out_file_b_;
+ int test_cntr_;
+ int encoding_sample_rate_hz_;
+};
+
+} // namespace webrtc
+
+int main(int argc, char* argv[]) {
+ google::ParseCommandLineFlags(&argc, &argv, true);
+ webrtc::TestSettings test_setting;
+ strcpy(test_setting.codec.name, FLAGS_codec.c_str());
+
+ if (FLAGS_sample_rate_hz != 8000 &&
+ FLAGS_sample_rate_hz != 16000 &&
+ FLAGS_sample_rate_hz != 32000 &&
+ FLAGS_sample_rate_hz != 48000) {
+ std::cout << "Invalid sampling rate.\n";
+ return 1;
+ }
+ test_setting.codec.sample_rate_hz = FLAGS_sample_rate_hz;
+ if (FLAGS_num_channels < 1 || FLAGS_num_channels > 2) {
+ std::cout << "Only mono and stereo are supported.\n";
+ return 1;
+ }
+ test_setting.codec.num_channels = FLAGS_num_channels;
+ test_setting.acm.dtx = FLAGS_dtx;
+ test_setting.acm.fec = FLAGS_fec;
+ test_setting.packet_loss = FLAGS_packet_loss;
+
+ webrtc::DelayTest delay_test;
+ delay_test.Initialize();
+ delay_test.Perform(&test_setting, 1, 240, "delay_test");
+ return 0;
+}
diff --git a/webrtc/modules/audio_coding/test/iSACTest.cc b/webrtc/modules/audio_coding/test/iSACTest.cc
new file mode 100644
index 0000000000..9f223fb81f
--- /dev/null
+++ b/webrtc/modules/audio_coding/test/iSACTest.cc
@@ -0,0 +1,343 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/modules/audio_coding/test/iSACTest.h"
+
+#include <ctype.h>
+#include <stdio.h>
+#include <string.h>
+
+#if _WIN32
+#include <windows.h>
+#elif WEBRTC_LINUX
+#include <time.h>
+#else
+#include <sys/time.h>
+#include <time.h>
+#endif
+
+#include "webrtc/modules/audio_coding/acm2/acm_common_defs.h"
+#include "webrtc/modules/audio_coding/test/utility.h"
+#include "webrtc/system_wrappers/include/event_wrapper.h"
+#include "webrtc/system_wrappers/include/tick_util.h"
+#include "webrtc/system_wrappers/include/trace.h"
+#include "webrtc/test/testsupport/fileutils.h"
+
+namespace webrtc {
+
+void SetISACConfigDefault(ACMTestISACConfig& isacConfig) {
+ isacConfig.currentRateBitPerSec = 0;
+ isacConfig.currentFrameSizeMsec = 0;
+ isacConfig.encodingMode = -1;
+ isacConfig.initRateBitPerSec = 0;
+ isacConfig.initFrameSizeInMsec = 0;
+ isacConfig.enforceFrameSize = false;
+ return;
+}
+
+int16_t SetISAConfig(ACMTestISACConfig& isacConfig, AudioCodingModule* acm,
+ int testMode) {
+
+ if ((isacConfig.currentRateBitPerSec != 0)
+ || (isacConfig.currentFrameSizeMsec != 0)) {
+ auto sendCodec = acm->SendCodec();
+ EXPECT_TRUE(sendCodec);
+ if (isacConfig.currentRateBitPerSec < 0) {
+ // Register iSAC in adaptive (channel-dependent) mode.
+ sendCodec->rate = -1;
+ EXPECT_EQ(0, acm->RegisterSendCodec(*sendCodec));
+ } else {
+ if (isacConfig.currentRateBitPerSec != 0) {
+ sendCodec->rate = isacConfig.currentRateBitPerSec;
+ }
+ if (isacConfig.currentFrameSizeMsec != 0) {
+ sendCodec->pacsize = isacConfig.currentFrameSizeMsec
+ * (sendCodec->plfreq / 1000);
+ }
+ EXPECT_EQ(0, acm->RegisterSendCodec(*sendCodec));
+ }
+ }
+
+ return 0;
+}
+
+ISACTest::ISACTest(int testMode)
+ : _acmA(AudioCodingModule::Create(1)),
+ _acmB(AudioCodingModule::Create(2)),
+ _testMode(testMode) {}
+
+ISACTest::~ISACTest() {}
+
+void ISACTest::Setup() {
+ int codecCntr;
+ CodecInst codecParam;
+
+ for (codecCntr = 0; codecCntr < AudioCodingModule::NumberOfCodecs();
+ codecCntr++) {
+ EXPECT_EQ(0, AudioCodingModule::Codec(codecCntr, &codecParam));
+ if (!STR_CASE_CMP(codecParam.plname, "ISAC")
+ && codecParam.plfreq == 16000) {
+ memcpy(&_paramISAC16kHz, &codecParam, sizeof(CodecInst));
+ _idISAC16kHz = codecCntr;
+ }
+ if (!STR_CASE_CMP(codecParam.plname, "ISAC")
+ && codecParam.plfreq == 32000) {
+ memcpy(&_paramISAC32kHz, &codecParam, sizeof(CodecInst));
+ _idISAC32kHz = codecCntr;
+ }
+ }
+
+ // Register both iSAC-wb & iSAC-swb in both sides as receiver codecs.
+ EXPECT_EQ(0, _acmA->RegisterReceiveCodec(_paramISAC16kHz));
+ EXPECT_EQ(0, _acmA->RegisterReceiveCodec(_paramISAC32kHz));
+ EXPECT_EQ(0, _acmB->RegisterReceiveCodec(_paramISAC16kHz));
+ EXPECT_EQ(0, _acmB->RegisterReceiveCodec(_paramISAC32kHz));
+
+ //--- Set A-to-B channel
+ _channel_A2B.reset(new Channel);
+ EXPECT_EQ(0, _acmA->RegisterTransportCallback(_channel_A2B.get()));
+ _channel_A2B->RegisterReceiverACM(_acmB.get());
+
+ //--- Set B-to-A channel
+ _channel_B2A.reset(new Channel);
+ EXPECT_EQ(0, _acmB->RegisterTransportCallback(_channel_B2A.get()));
+ _channel_B2A->RegisterReceiverACM(_acmA.get());
+
+ file_name_swb_ = webrtc::test::ResourcePath("audio_coding/testfile32kHz",
+ "pcm");
+
+ EXPECT_EQ(0, _acmB->RegisterSendCodec(_paramISAC16kHz));
+ EXPECT_EQ(0, _acmA->RegisterSendCodec(_paramISAC32kHz));
+
+ _inFileA.Open(file_name_swb_, 32000, "rb");
+ // Set test length to 500 ms (50 blocks of 10 ms each).
+ _inFileA.SetNum10MsBlocksToRead(50);
+ // Fast-forward 1 second (100 blocks) since the files start with silence.
+ _inFileA.FastForward(100);
+ std::string fileNameA = webrtc::test::OutputPath() + "testisac_a.pcm";
+ std::string fileNameB = webrtc::test::OutputPath() + "testisac_b.pcm";
+ _outFileA.Open(fileNameA, 32000, "wb");
+ _outFileB.Open(fileNameB, 32000, "wb");
+
+ while (!_inFileA.EndOfFile()) {
+ Run10ms();
+ }
+ CodecInst receiveCodec;
+ EXPECT_EQ(0, _acmA->ReceiveCodec(&receiveCodec));
+ EXPECT_EQ(0, _acmB->ReceiveCodec(&receiveCodec));
+
+ _inFileA.Close();
+ _outFileA.Close();
+ _outFileB.Close();
+}
+
+void ISACTest::Perform() {
+ Setup();
+
+ int16_t testNr = 0;
+ ACMTestISACConfig wbISACConfig;
+ ACMTestISACConfig swbISACConfig;
+
+ SetISACConfigDefault(wbISACConfig);
+ SetISACConfigDefault(swbISACConfig);
+
+ wbISACConfig.currentRateBitPerSec = -1;
+ swbISACConfig.currentRateBitPerSec = -1;
+ testNr++;
+ EncodeDecode(testNr, wbISACConfig, swbISACConfig);
+
+ if (_testMode != 0) {
+ SetISACConfigDefault(wbISACConfig);
+ SetISACConfigDefault(swbISACConfig);
+
+ wbISACConfig.currentRateBitPerSec = -1;
+ swbISACConfig.currentRateBitPerSec = -1;
+ wbISACConfig.initRateBitPerSec = 13000;
+ wbISACConfig.initFrameSizeInMsec = 60;
+ swbISACConfig.initRateBitPerSec = 20000;
+ swbISACConfig.initFrameSizeInMsec = 30;
+ testNr++;
+ EncodeDecode(testNr, wbISACConfig, swbISACConfig);
+
+ SetISACConfigDefault(wbISACConfig);
+ SetISACConfigDefault(swbISACConfig);
+
+ wbISACConfig.currentRateBitPerSec = 20000;
+ swbISACConfig.currentRateBitPerSec = 48000;
+ testNr++;
+ EncodeDecode(testNr, wbISACConfig, swbISACConfig);
+
+ wbISACConfig.currentRateBitPerSec = 16000;
+ swbISACConfig.currentRateBitPerSec = 30000;
+ wbISACConfig.currentFrameSizeMsec = 60;
+ testNr++;
+ EncodeDecode(testNr, wbISACConfig, swbISACConfig);
+ }
+
+ SetISACConfigDefault(wbISACConfig);
+ SetISACConfigDefault(swbISACConfig);
+ testNr++;
+ EncodeDecode(testNr, wbISACConfig, swbISACConfig);
+
+ testNr++;
+ if (_testMode == 0) {
+ SwitchingSamplingRate(testNr, 4);
+ } else {
+ SwitchingSamplingRate(testNr, 80);
+ }
+}
+
+void ISACTest::Run10ms() {
+ AudioFrame audioFrame;
+ EXPECT_GT(_inFileA.Read10MsData(audioFrame), 0);
+ EXPECT_GE(_acmA->Add10MsData(audioFrame), 0);
+ EXPECT_GE(_acmB->Add10MsData(audioFrame), 0);
+ EXPECT_EQ(0, _acmA->PlayoutData10Ms(32000, &audioFrame));
+ _outFileA.Write10MsData(audioFrame);
+ EXPECT_EQ(0, _acmB->PlayoutData10Ms(32000, &audioFrame));
+ _outFileB.Write10MsData(audioFrame);
+}
+
+void ISACTest::EncodeDecode(int testNr, ACMTestISACConfig& wbISACConfig,
+ ACMTestISACConfig& swbISACConfig) {
+ // Files in Side A and B
+ _inFileA.Open(file_name_swb_, 32000, "rb", true);
+ _inFileB.Open(file_name_swb_, 32000, "rb", true);
+
+ std::string file_name_out;
+ std::stringstream file_stream_a;
+ std::stringstream file_stream_b;
+ file_stream_a << webrtc::test::OutputPath();
+ file_stream_b << webrtc::test::OutputPath();
+ file_stream_a << "out_iSACTest_A_" << testNr << ".pcm";
+ file_stream_b << "out_iSACTest_B_" << testNr << ".pcm";
+ file_name_out = file_stream_a.str();
+ _outFileA.Open(file_name_out, 32000, "wb");
+ file_name_out = file_stream_b.str();
+ _outFileB.Open(file_name_out, 32000, "wb");
+
+ EXPECT_EQ(0, _acmA->RegisterSendCodec(_paramISAC16kHz));
+ EXPECT_EQ(0, _acmA->RegisterSendCodec(_paramISAC32kHz));
+ EXPECT_EQ(0, _acmB->RegisterSendCodec(_paramISAC32kHz));
+ EXPECT_EQ(0, _acmB->RegisterSendCodec(_paramISAC16kHz));
+
+ // Side A is sending super-wideband, and side B is sending wideband.
+ SetISAConfig(swbISACConfig, _acmA.get(), _testMode);
+ SetISAConfig(wbISACConfig, _acmB.get(), _testMode);
+
+ bool adaptiveMode = false;
+ if ((swbISACConfig.currentRateBitPerSec == -1)
+ || (wbISACConfig.currentRateBitPerSec == -1)) {
+ adaptiveMode = true;
+ }
+ _myTimer.Reset();
+ _channel_A2B->ResetStats();
+ _channel_B2A->ResetStats();
+
+ char currentTime[500];
+ EventTimerWrapper* myEvent = EventTimerWrapper::Create();
+ EXPECT_TRUE(myEvent->StartTimer(true, 10));
+ while (!(_inFileA.EndOfFile() || _inFileA.Rewinded())) {
+ Run10ms();
+ _myTimer.Tick10ms();
+ _myTimer.CurrentTimeHMS(currentTime);
+
+ if ((adaptiveMode) && (_testMode != 0)) {
+ myEvent->Wait(5000);
+ EXPECT_TRUE(_acmA->SendCodec());
+ EXPECT_TRUE(_acmB->SendCodec());
+ }
+ }
+
+ if (_testMode != 0) {
+ printf("\n\nSide A statistics\n\n");
+ _channel_A2B->PrintStats(_paramISAC32kHz);
+
+ printf("\n\nSide B statistics\n\n");
+ _channel_B2A->PrintStats(_paramISAC16kHz);
+ }
+
+ _channel_A2B->ResetStats();
+ _channel_B2A->ResetStats();
+
+ _outFileA.Close();
+ _outFileB.Close();
+ _inFileA.Close();
+ _inFileB.Close();
+}
+
+void ISACTest::SwitchingSamplingRate(int testNr, int maxSampRateChange) {
+ // Files in Side A
+ _inFileA.Open(file_name_swb_, 32000, "rb");
+ _inFileB.Open(file_name_swb_, 32000, "rb");
+
+ std::string file_name_out;
+ std::stringstream file_stream_a;
+ std::stringstream file_stream_b;
+ file_stream_a << webrtc::test::OutputPath();
+ file_stream_b << webrtc::test::OutputPath();
+ file_stream_a << "out_iSACTest_A_" << testNr << ".pcm";
+ file_stream_b << "out_iSACTest_B_" << testNr << ".pcm";
+ file_name_out = file_stream_a.str();
+ _outFileA.Open(file_name_out, 32000, "wb");
+ file_name_out = file_stream_b.str();
+ _outFileB.Open(file_name_out, 32000, "wb");
+
+ // Start with side A sending super-wideband and side B seding wideband.
+ // Toggle sending wideband/super-wideband in this test.
+ EXPECT_EQ(0, _acmA->RegisterSendCodec(_paramISAC32kHz));
+ EXPECT_EQ(0, _acmB->RegisterSendCodec(_paramISAC16kHz));
+
+ int numSendCodecChanged = 0;
+ _myTimer.Reset();
+ char currentTime[50];
+ while (numSendCodecChanged < (maxSampRateChange << 1)) {
+ Run10ms();
+ _myTimer.Tick10ms();
+ _myTimer.CurrentTimeHMS(currentTime);
+ if (_testMode == 2)
+ printf("\r%s", currentTime);
+ if (_inFileA.EndOfFile()) {
+ if (_inFileA.SamplingFrequency() == 16000) {
+ // Switch side A to send super-wideband.
+ _inFileA.Close();
+ _inFileA.Open(file_name_swb_, 32000, "rb");
+ EXPECT_EQ(0, _acmA->RegisterSendCodec(_paramISAC32kHz));
+ } else {
+ // Switch side A to send wideband.
+ _inFileA.Close();
+ _inFileA.Open(file_name_swb_, 32000, "rb");
+ EXPECT_EQ(0, _acmA->RegisterSendCodec(_paramISAC16kHz));
+ }
+ numSendCodecChanged++;
+ }
+
+ if (_inFileB.EndOfFile()) {
+ if (_inFileB.SamplingFrequency() == 16000) {
+ // Switch side B to send super-wideband.
+ _inFileB.Close();
+ _inFileB.Open(file_name_swb_, 32000, "rb");
+ EXPECT_EQ(0, _acmB->RegisterSendCodec(_paramISAC32kHz));
+ } else {
+ // Switch side B to send wideband.
+ _inFileB.Close();
+ _inFileB.Open(file_name_swb_, 32000, "rb");
+ EXPECT_EQ(0, _acmB->RegisterSendCodec(_paramISAC16kHz));
+ }
+ numSendCodecChanged++;
+ }
+ }
+ _outFileA.Close();
+ _outFileB.Close();
+ _inFileA.Close();
+ _inFileB.Close();
+}
+
+} // namespace webrtc
diff --git a/webrtc/modules/audio_coding/test/iSACTest.h b/webrtc/modules/audio_coding/test/iSACTest.h
new file mode 100644
index 0000000000..c5bb515437
--- /dev/null
+++ b/webrtc/modules/audio_coding/test/iSACTest.h
@@ -0,0 +1,79 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_TEST_ISACTEST_H_
+#define WEBRTC_MODULES_AUDIO_CODING_TEST_ISACTEST_H_
+
+#include <string.h>
+
+#include "webrtc/base/scoped_ptr.h"
+#include "webrtc/common_types.h"
+#include "webrtc/modules/audio_coding/include/audio_coding_module.h"
+#include "webrtc/modules/audio_coding/test/ACMTest.h"
+#include "webrtc/modules/audio_coding/test/Channel.h"
+#include "webrtc/modules/audio_coding/test/PCMFile.h"
+#include "webrtc/modules/audio_coding/test/utility.h"
+
+#define MAX_FILE_NAME_LENGTH_BYTE 500
+#define NO_OF_CLIENTS 15
+
+namespace webrtc {
+
+struct ACMTestISACConfig {
+ int32_t currentRateBitPerSec;
+ int16_t currentFrameSizeMsec;
+ int16_t encodingMode;
+ uint32_t initRateBitPerSec;
+ int16_t initFrameSizeInMsec;
+ bool enforceFrameSize;
+};
+
+class ISACTest : public ACMTest {
+ public:
+ explicit ISACTest(int testMode);
+ ~ISACTest();
+
+ void Perform();
+ private:
+ void Setup();
+
+ void Run10ms();
+
+ void EncodeDecode(int testNr, ACMTestISACConfig& wbISACConfig,
+ ACMTestISACConfig& swbISACConfig);
+
+ void SwitchingSamplingRate(int testNr, int maxSampRateChange);
+
+ rtc::scoped_ptr<AudioCodingModule> _acmA;
+ rtc::scoped_ptr<AudioCodingModule> _acmB;
+
+ rtc::scoped_ptr<Channel> _channel_A2B;
+ rtc::scoped_ptr<Channel> _channel_B2A;
+
+ PCMFile _inFileA;
+ PCMFile _inFileB;
+
+ PCMFile _outFileA;
+ PCMFile _outFileB;
+
+ uint8_t _idISAC16kHz;
+ uint8_t _idISAC32kHz;
+ CodecInst _paramISAC16kHz;
+ CodecInst _paramISAC32kHz;
+
+ std::string file_name_swb_;
+
+ ACMTestTimer _myTimer;
+ int _testMode;
+};
+
+} // namespace webrtc
+
+#endif // WEBRTC_MODULES_AUDIO_CODING_TEST_ISACTEST_H_
diff --git a/webrtc/modules/audio_coding/test/insert_packet_with_timing.cc b/webrtc/modules/audio_coding/test/insert_packet_with_timing.cc
new file mode 100644
index 0000000000..481df55ffd
--- /dev/null
+++ b/webrtc/modules/audio_coding/test/insert_packet_with_timing.cc
@@ -0,0 +1,307 @@
+/*
+ * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <stdio.h>
+
+#include "gflags/gflags.h"
+#include "testing/gtest/include/gtest/gtest.h"
+#include "webrtc/base/scoped_ptr.h"
+#include "webrtc/common_types.h"
+#include "webrtc/modules/audio_coding/include/audio_coding_module.h"
+#include "webrtc/modules/audio_coding/test/Channel.h"
+#include "webrtc/modules/audio_coding/test/PCMFile.h"
+#include "webrtc/modules/include/module_common_types.h"
+#include "webrtc/system_wrappers/include/clock.h"
+#include "webrtc/test/testsupport/fileutils.h"
+
+// Codec.
+DEFINE_string(codec, "opus", "Codec Name");
+DEFINE_int32(codec_sample_rate_hz, 48000, "Sampling rate in Hertz.");
+DEFINE_int32(codec_channels, 1, "Number of channels of the codec.");
+
+// PCM input/output.
+DEFINE_string(input, "", "Input PCM file at 16 kHz.");
+DEFINE_bool(input_stereo, false, "Input is stereo.");
+DEFINE_int32(input_fs_hz, 32000, "Input sample rate Hz.");
+DEFINE_string(output, "insert_rtp_with_timing_out.pcm", "OutputFile");
+DEFINE_int32(output_fs_hz, 32000, "Output sample rate Hz");
+
+// Timing files
+DEFINE_string(seq_num, "seq_num", "Sequence number file.");
+DEFINE_string(send_ts, "send_timestamp", "Send timestamp file.");
+DEFINE_string(receive_ts, "last_rec_timestamp", "Receive timestamp file");
+
+// Delay logging
+DEFINE_string(delay, "", "Log for delay.");
+
+// Other setups
+DEFINE_bool(verbose, false, "Verbosity.");
+DEFINE_double(loss_rate, 0, "Rate of packet loss < 1");
+
+const int32_t kAudioPlayedOut = 0x00000001;
+const int32_t kPacketPushedIn = 0x00000001 << 1;
+const int kPlayoutPeriodMs = 10;
+
+namespace webrtc {
+
+class InsertPacketWithTiming {
+ public:
+ InsertPacketWithTiming()
+ : sender_clock_(new SimulatedClock(0)),
+ receiver_clock_(new SimulatedClock(0)),
+ send_acm_(AudioCodingModule::Create(0, sender_clock_)),
+ receive_acm_(AudioCodingModule::Create(0, receiver_clock_)),
+ channel_(new Channel),
+ seq_num_fid_(fopen(FLAGS_seq_num.c_str(), "rt")),
+ send_ts_fid_(fopen(FLAGS_send_ts.c_str(), "rt")),
+ receive_ts_fid_(fopen(FLAGS_receive_ts.c_str(), "rt")),
+ pcm_out_fid_(fopen(FLAGS_output.c_str(), "wb")),
+ samples_in_1ms_(48),
+ num_10ms_in_codec_frame_(2), // Typical 20 ms frames.
+ time_to_insert_packet_ms_(3), // An arbitrary offset on pushing packet.
+ next_receive_ts_(0),
+ time_to_playout_audio_ms_(kPlayoutPeriodMs),
+ loss_threshold_(0),
+ playout_timing_fid_(fopen("playout_timing.txt", "wt")) {}
+
+ void SetUp() {
+ ASSERT_TRUE(sender_clock_ != NULL);
+ ASSERT_TRUE(receiver_clock_ != NULL);
+
+ ASSERT_TRUE(send_acm_.get() != NULL);
+ ASSERT_TRUE(receive_acm_.get() != NULL);
+ ASSERT_TRUE(channel_ != NULL);
+
+ ASSERT_TRUE(seq_num_fid_ != NULL);
+ ASSERT_TRUE(send_ts_fid_ != NULL);
+ ASSERT_TRUE(receive_ts_fid_ != NULL);
+
+ ASSERT_TRUE(playout_timing_fid_ != NULL);
+
+ next_receive_ts_ = ReceiveTimestamp();
+
+ CodecInst codec;
+ ASSERT_EQ(0, AudioCodingModule::Codec(FLAGS_codec.c_str(), &codec,
+ FLAGS_codec_sample_rate_hz,
+ FLAGS_codec_channels));
+ ASSERT_EQ(0, receive_acm_->InitializeReceiver());
+ ASSERT_EQ(0, send_acm_->RegisterSendCodec(codec));
+ ASSERT_EQ(0, receive_acm_->RegisterReceiveCodec(codec));
+
+ // Set codec-dependent parameters.
+ samples_in_1ms_ = codec.plfreq / 1000;
+ num_10ms_in_codec_frame_ = codec.pacsize / (codec.plfreq / 100);
+
+ channel_->RegisterReceiverACM(receive_acm_.get());
+ send_acm_->RegisterTransportCallback(channel_);
+
+ if (FLAGS_input.size() == 0) {
+ std::string file_name = test::ResourcePath("audio_coding/testfile32kHz",
+ "pcm");
+ pcm_in_fid_.Open(file_name, 32000, "r", true); // auto-rewind
+ std::cout << "Input file " << file_name << " 32 kHz mono." << std::endl;
+ } else {
+ pcm_in_fid_.Open(FLAGS_input, static_cast<uint16_t>(FLAGS_input_fs_hz),
+ "r", true); // auto-rewind
+ std::cout << "Input file " << FLAGS_input << "at " << FLAGS_input_fs_hz
+ << " Hz in " << ((FLAGS_input_stereo) ? "stereo." : "mono.")
+ << std::endl;
+ pcm_in_fid_.ReadStereo(FLAGS_input_stereo);
+ }
+
+ ASSERT_TRUE(pcm_out_fid_ != NULL);
+ std::cout << "Output file " << FLAGS_output << " at " << FLAGS_output_fs_hz
+ << " Hz." << std::endl;
+
+ // Other setups
+ if (FLAGS_loss_rate > 0)
+ loss_threshold_ = RAND_MAX * FLAGS_loss_rate;
+ else
+ loss_threshold_ = 0;
+ }
+
+ void TickOneMillisecond(uint32_t* action) {
+ // One millisecond passed.
+ time_to_insert_packet_ms_--;
+ time_to_playout_audio_ms_--;
+ sender_clock_->AdvanceTimeMilliseconds(1);
+ receiver_clock_->AdvanceTimeMilliseconds(1);
+
+ // Reset action.
+ *action = 0;
+
+ // Is it time to pull audio?
+ if (time_to_playout_audio_ms_ == 0) {
+ time_to_playout_audio_ms_ = kPlayoutPeriodMs;
+ receive_acm_->PlayoutData10Ms(static_cast<int>(FLAGS_output_fs_hz),
+ &frame_);
+ fwrite(frame_.data_, sizeof(frame_.data_[0]),
+ frame_.samples_per_channel_ * frame_.num_channels_, pcm_out_fid_);
+ *action |= kAudioPlayedOut;
+ }
+
+ // Is it time to push in next packet?
+ if (time_to_insert_packet_ms_ <= .5) {
+ *action |= kPacketPushedIn;
+
+ // Update time-to-insert packet.
+ uint32_t t = next_receive_ts_;
+ next_receive_ts_ = ReceiveTimestamp();
+ time_to_insert_packet_ms_ += static_cast<float>(next_receive_ts_ - t) /
+ samples_in_1ms_;
+
+ // Push in just enough audio.
+ for (int n = 0; n < num_10ms_in_codec_frame_; n++) {
+ pcm_in_fid_.Read10MsData(frame_);
+ EXPECT_GE(send_acm_->Add10MsData(frame_), 0);
+ }
+
+ // Set the parameters for the packet to be pushed in receiver ACM right
+ // now.
+ uint32_t ts = SendTimestamp();
+ int seq_num = SequenceNumber();
+ bool lost = false;
+ channel_->set_send_timestamp(ts);
+ channel_->set_sequence_number(seq_num);
+ if (loss_threshold_ > 0 && rand() < loss_threshold_) {
+ channel_->set_num_packets_to_drop(1);
+ lost = true;
+ }
+
+ if (FLAGS_verbose) {
+ if (!lost) {
+ std::cout << "\nInserting packet number " << seq_num
+ << " timestamp " << ts << std::endl;
+ } else {
+ std::cout << "\nLost packet number " << seq_num
+ << " timestamp " << ts << std::endl;
+ }
+ }
+ }
+ }
+
+ void TearDown() {
+ delete channel_;
+
+ fclose(seq_num_fid_);
+ fclose(send_ts_fid_);
+ fclose(receive_ts_fid_);
+ fclose(pcm_out_fid_);
+ pcm_in_fid_.Close();
+ }
+
+ ~InsertPacketWithTiming() {
+ delete sender_clock_;
+ delete receiver_clock_;
+ }
+
+ // Are there more info to simulate.
+ bool HasPackets() {
+ if (feof(seq_num_fid_) || feof(send_ts_fid_) || feof(receive_ts_fid_))
+ return false;
+ return true;
+ }
+
+ // Jitter buffer delay.
+ void Delay(int* optimal_delay, int* current_delay) {
+ NetworkStatistics statistics;
+ receive_acm_->GetNetworkStatistics(&statistics);
+ *optimal_delay = statistics.preferredBufferSize;
+ *current_delay = statistics.currentBufferSize;
+ }
+
+ private:
+ uint32_t SendTimestamp() {
+ uint32_t t;
+ EXPECT_EQ(1, fscanf(send_ts_fid_, "%u\n", &t));
+ return t;
+ }
+
+ uint32_t ReceiveTimestamp() {
+ uint32_t t;
+ EXPECT_EQ(1, fscanf(receive_ts_fid_, "%u\n", &t));
+ return t;
+ }
+
+ int SequenceNumber() {
+ int n;
+ EXPECT_EQ(1, fscanf(seq_num_fid_, "%d\n", &n));
+ return n;
+ }
+
+ // This class just creates these pointers, not deleting them. They are deleted
+ // by the associated ACM.
+ SimulatedClock* sender_clock_;
+ SimulatedClock* receiver_clock_;
+
+ rtc::scoped_ptr<AudioCodingModule> send_acm_;
+ rtc::scoped_ptr<AudioCodingModule> receive_acm_;
+ Channel* channel_;
+
+ FILE* seq_num_fid_; // Input (text), one sequence number per line.
+ FILE* send_ts_fid_; // Input (text), one send timestamp per line.
+ FILE* receive_ts_fid_; // Input (text), one receive timestamp per line.
+ FILE* pcm_out_fid_; // Output PCM16.
+
+ PCMFile pcm_in_fid_; // Input PCM16.
+
+ int samples_in_1ms_;
+
+ // TODO(turajs): this can be computed from the send timestamp, but there is
+ // some complication to account for lost and reordered packets.
+ int num_10ms_in_codec_frame_;
+
+ float time_to_insert_packet_ms_;
+ uint32_t next_receive_ts_;
+ uint32_t time_to_playout_audio_ms_;
+
+ AudioFrame frame_;
+
+ double loss_threshold_;
+
+ // Output (text), sequence number, playout timestamp, time (ms) of playout,
+ // per line.
+ FILE* playout_timing_fid_;
+};
+
+} // webrtc
+
+int main(int argc, char* argv[]) {
+ google::ParseCommandLineFlags(&argc, &argv, true);
+ webrtc::InsertPacketWithTiming test;
+ test.SetUp();
+
+ FILE* delay_log = NULL;
+ if (FLAGS_delay.size() > 0) {
+ delay_log = fopen(FLAGS_delay.c_str(), "wt");
+ if (delay_log == NULL) {
+ std::cout << "Cannot open the file to log delay values." << std::endl;
+ exit(1);
+ }
+ }
+
+ uint32_t action_taken;
+ int optimal_delay_ms;
+ int current_delay_ms;
+ while (test.HasPackets()) {
+ test.TickOneMillisecond(&action_taken);
+
+ if (action_taken != 0) {
+ test.Delay(&optimal_delay_ms, &current_delay_ms);
+ if (delay_log != NULL) {
+ fprintf(delay_log, "%3d %3d\n", optimal_delay_ms, current_delay_ms);
+ }
+ }
+ }
+ std::cout << std::endl;
+ test.TearDown();
+ if (delay_log != NULL)
+ fclose(delay_log);
+}
diff --git a/webrtc/modules/audio_coding/test/opus_test.cc b/webrtc/modules/audio_coding/test/opus_test.cc
new file mode 100644
index 0000000000..104b5e587b
--- /dev/null
+++ b/webrtc/modules/audio_coding/test/opus_test.cc
@@ -0,0 +1,383 @@
+/*
+ * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/modules/audio_coding/test/opus_test.h"
+
+#include <assert.h>
+
+#include <string>
+
+#include "testing/gtest/include/gtest/gtest.h"
+#include "webrtc/common_types.h"
+#include "webrtc/engine_configurations.h"
+#include "webrtc/modules/audio_coding/codecs/opus/opus_interface.h"
+#include "webrtc/modules/audio_coding/include/audio_coding_module_typedefs.h"
+#include "webrtc/modules/audio_coding/test/TestStereo.h"
+#include "webrtc/modules/audio_coding/test/utility.h"
+#include "webrtc/system_wrappers/include/trace.h"
+#include "webrtc/test/testsupport/fileutils.h"
+
+namespace webrtc {
+
+OpusTest::OpusTest()
+ : acm_receiver_(AudioCodingModule::Create(0)),
+ channel_a2b_(NULL),
+ counter_(0),
+ payload_type_(255),
+ rtp_timestamp_(0) {}
+
+OpusTest::~OpusTest() {
+ if (channel_a2b_ != NULL) {
+ delete channel_a2b_;
+ channel_a2b_ = NULL;
+ }
+ if (opus_mono_encoder_ != NULL) {
+ WebRtcOpus_EncoderFree(opus_mono_encoder_);
+ opus_mono_encoder_ = NULL;
+ }
+ if (opus_stereo_encoder_ != NULL) {
+ WebRtcOpus_EncoderFree(opus_stereo_encoder_);
+ opus_stereo_encoder_ = NULL;
+ }
+ if (opus_mono_decoder_ != NULL) {
+ WebRtcOpus_DecoderFree(opus_mono_decoder_);
+ opus_mono_decoder_ = NULL;
+ }
+ if (opus_stereo_decoder_ != NULL) {
+ WebRtcOpus_DecoderFree(opus_stereo_decoder_);
+ opus_stereo_decoder_ = NULL;
+ }
+}
+
+void OpusTest::Perform() {
+#ifndef WEBRTC_CODEC_OPUS
+ // Opus isn't defined, exit.
+ return;
+#else
+ uint16_t frequency_hz;
+ size_t audio_channels;
+ int16_t test_cntr = 0;
+
+ // Open both mono and stereo test files in 32 kHz.
+ const std::string file_name_stereo =
+ webrtc::test::ResourcePath("audio_coding/teststereo32kHz", "pcm");
+ const std::string file_name_mono =
+ webrtc::test::ResourcePath("audio_coding/testfile32kHz", "pcm");
+ frequency_hz = 32000;
+ in_file_stereo_.Open(file_name_stereo, frequency_hz, "rb");
+ in_file_stereo_.ReadStereo(true);
+ in_file_mono_.Open(file_name_mono, frequency_hz, "rb");
+ in_file_mono_.ReadStereo(false);
+
+ // Create Opus encoders for mono and stereo.
+ ASSERT_GT(WebRtcOpus_EncoderCreate(&opus_mono_encoder_, 1, 0), -1);
+ ASSERT_GT(WebRtcOpus_EncoderCreate(&opus_stereo_encoder_, 2, 1), -1);
+
+ // Create Opus decoders for mono and stereo for stand-alone testing of Opus.
+ ASSERT_GT(WebRtcOpus_DecoderCreate(&opus_mono_decoder_, 1), -1);
+ ASSERT_GT(WebRtcOpus_DecoderCreate(&opus_stereo_decoder_, 2), -1);
+ WebRtcOpus_DecoderInit(opus_mono_decoder_);
+ WebRtcOpus_DecoderInit(opus_stereo_decoder_);
+
+ ASSERT_TRUE(acm_receiver_.get() != NULL);
+ EXPECT_EQ(0, acm_receiver_->InitializeReceiver());
+
+ // Register Opus stereo as receiving codec.
+ CodecInst opus_codec_param;
+ int codec_id = acm_receiver_->Codec("opus", 48000, 2);
+ EXPECT_EQ(0, acm_receiver_->Codec(codec_id, &opus_codec_param));
+ payload_type_ = opus_codec_param.pltype;
+ EXPECT_EQ(0, acm_receiver_->RegisterReceiveCodec(opus_codec_param));
+
+ // Create and connect the channel.
+ channel_a2b_ = new TestPackStereo;
+ channel_a2b_->RegisterReceiverACM(acm_receiver_.get());
+
+ //
+ // Test Stereo.
+ //
+
+ channel_a2b_->set_codec_mode(kStereo);
+ audio_channels = 2;
+ test_cntr++;
+ OpenOutFile(test_cntr);
+
+ // Run Opus with 2.5 ms frame size.
+ Run(channel_a2b_, audio_channels, 64000, 120);
+
+ // Run Opus with 5 ms frame size.
+ Run(channel_a2b_, audio_channels, 64000, 240);
+
+ // Run Opus with 10 ms frame size.
+ Run(channel_a2b_, audio_channels, 64000, 480);
+
+ // Run Opus with 20 ms frame size.
+ Run(channel_a2b_, audio_channels, 64000, 960);
+
+ // Run Opus with 40 ms frame size.
+ Run(channel_a2b_, audio_channels, 64000, 1920);
+
+ // Run Opus with 60 ms frame size.
+ Run(channel_a2b_, audio_channels, 64000, 2880);
+
+ out_file_.Close();
+ out_file_standalone_.Close();
+
+ //
+ // Test Opus stereo with packet-losses.
+ //
+
+ test_cntr++;
+ OpenOutFile(test_cntr);
+
+ // Run Opus with 20 ms frame size, 1% packet loss.
+ Run(channel_a2b_, audio_channels, 64000, 960, 1);
+
+ // Run Opus with 20 ms frame size, 5% packet loss.
+ Run(channel_a2b_, audio_channels, 64000, 960, 5);
+
+ // Run Opus with 20 ms frame size, 10% packet loss.
+ Run(channel_a2b_, audio_channels, 64000, 960, 10);
+
+ out_file_.Close();
+ out_file_standalone_.Close();
+
+ //
+ // Test Mono.
+ //
+ channel_a2b_->set_codec_mode(kMono);
+ audio_channels = 1;
+ test_cntr++;
+ OpenOutFile(test_cntr);
+
+ // Register Opus mono as receiving codec.
+ opus_codec_param.channels = 1;
+ EXPECT_EQ(0, acm_receiver_->RegisterReceiveCodec(opus_codec_param));
+
+ // Run Opus with 2.5 ms frame size.
+ Run(channel_a2b_, audio_channels, 32000, 120);
+
+ // Run Opus with 5 ms frame size.
+ Run(channel_a2b_, audio_channels, 32000, 240);
+
+ // Run Opus with 10 ms frame size.
+ Run(channel_a2b_, audio_channels, 32000, 480);
+
+ // Run Opus with 20 ms frame size.
+ Run(channel_a2b_, audio_channels, 32000, 960);
+
+ // Run Opus with 40 ms frame size.
+ Run(channel_a2b_, audio_channels, 32000, 1920);
+
+ // Run Opus with 60 ms frame size.
+ Run(channel_a2b_, audio_channels, 32000, 2880);
+
+ out_file_.Close();
+ out_file_standalone_.Close();
+
+ //
+ // Test Opus mono with packet-losses.
+ //
+ test_cntr++;
+ OpenOutFile(test_cntr);
+
+ // Run Opus with 20 ms frame size, 1% packet loss.
+ Run(channel_a2b_, audio_channels, 64000, 960, 1);
+
+ // Run Opus with 20 ms frame size, 5% packet loss.
+ Run(channel_a2b_, audio_channels, 64000, 960, 5);
+
+ // Run Opus with 20 ms frame size, 10% packet loss.
+ Run(channel_a2b_, audio_channels, 64000, 960, 10);
+
+ // Close the files.
+ in_file_stereo_.Close();
+ in_file_mono_.Close();
+ out_file_.Close();
+ out_file_standalone_.Close();
+#endif
+}
+
+void OpusTest::Run(TestPackStereo* channel, size_t channels, int bitrate,
+ size_t frame_length, int percent_loss) {
+ AudioFrame audio_frame;
+ int32_t out_freq_hz_b = out_file_.SamplingFrequency();
+ const size_t kBufferSizeSamples = 480 * 12 * 2; // 120 ms stereo audio.
+ int16_t audio[kBufferSizeSamples];
+ int16_t out_audio[kBufferSizeSamples];
+ int16_t audio_type;
+ size_t written_samples = 0;
+ size_t read_samples = 0;
+ size_t decoded_samples = 0;
+ bool first_packet = true;
+ uint32_t start_time_stamp = 0;
+
+ channel->reset_payload_size();
+ counter_ = 0;
+
+ // Set encoder rate.
+ EXPECT_EQ(0, WebRtcOpus_SetBitRate(opus_mono_encoder_, bitrate));
+ EXPECT_EQ(0, WebRtcOpus_SetBitRate(opus_stereo_encoder_, bitrate));
+
+#if defined(WEBRTC_ANDROID) || defined(WEBRTC_IOS) || defined(WEBRTC_ARCH_ARM)
+ // If we are on Android, iOS and/or ARM, use a lower complexity setting as
+ // default.
+ const int kOpusComplexity5 = 5;
+ EXPECT_EQ(0, WebRtcOpus_SetComplexity(opus_mono_encoder_, kOpusComplexity5));
+ EXPECT_EQ(0, WebRtcOpus_SetComplexity(opus_stereo_encoder_,
+ kOpusComplexity5));
+#endif
+
+ // Fast-forward 1 second (100 blocks) since the files start with silence.
+ in_file_stereo_.FastForward(100);
+ in_file_mono_.FastForward(100);
+
+ // Limit the runtime to 1000 blocks of 10 ms each.
+ for (size_t audio_length = 0; audio_length < 1000; audio_length += 10) {
+ bool lost_packet = false;
+
+ // Get 10 msec of audio.
+ if (channels == 1) {
+ if (in_file_mono_.EndOfFile()) {
+ break;
+ }
+ in_file_mono_.Read10MsData(audio_frame);
+ } else {
+ if (in_file_stereo_.EndOfFile()) {
+ break;
+ }
+ in_file_stereo_.Read10MsData(audio_frame);
+ }
+
+ // If input audio is sampled at 32 kHz, resampling to 48 kHz is required.
+ EXPECT_EQ(480,
+ resampler_.Resample10Msec(audio_frame.data_,
+ audio_frame.sample_rate_hz_,
+ 48000,
+ channels,
+ kBufferSizeSamples - written_samples,
+ &audio[written_samples]));
+ written_samples += 480 * channels;
+
+ // Sometimes we need to loop over the audio vector to produce the right
+ // number of packets.
+ size_t loop_encode = (written_samples - read_samples) /
+ (channels * frame_length);
+
+ if (loop_encode > 0) {
+ const size_t kMaxBytes = 1000; // Maximum number of bytes for one packet.
+ size_t bitstream_len_byte;
+ uint8_t bitstream[kMaxBytes];
+ for (size_t i = 0; i < loop_encode; i++) {
+ int bitstream_len_byte_int = WebRtcOpus_Encode(
+ (channels == 1) ? opus_mono_encoder_ : opus_stereo_encoder_,
+ &audio[read_samples], frame_length, kMaxBytes, bitstream);
+ ASSERT_GE(bitstream_len_byte_int, 0);
+ bitstream_len_byte = static_cast<size_t>(bitstream_len_byte_int);
+
+ // Simulate packet loss by setting |packet_loss_| to "true" in
+ // |percent_loss| percent of the loops.
+ // TODO(tlegrand): Move handling of loss simulation to TestPackStereo.
+ if (percent_loss > 0) {
+ if (counter_ == floor((100 / percent_loss) + 0.5)) {
+ counter_ = 0;
+ lost_packet = true;
+ channel->set_lost_packet(true);
+ } else {
+ lost_packet = false;
+ channel->set_lost_packet(false);
+ }
+ counter_++;
+ }
+
+ // Run stand-alone Opus decoder, or decode PLC.
+ if (channels == 1) {
+ if (!lost_packet) {
+ decoded_samples += WebRtcOpus_Decode(
+ opus_mono_decoder_, bitstream, bitstream_len_byte,
+ &out_audio[decoded_samples * channels], &audio_type);
+ } else {
+ decoded_samples += WebRtcOpus_DecodePlc(
+ opus_mono_decoder_, &out_audio[decoded_samples * channels], 1);
+ }
+ } else {
+ if (!lost_packet) {
+ decoded_samples += WebRtcOpus_Decode(
+ opus_stereo_decoder_, bitstream, bitstream_len_byte,
+ &out_audio[decoded_samples * channels], &audio_type);
+ } else {
+ decoded_samples += WebRtcOpus_DecodePlc(
+ opus_stereo_decoder_, &out_audio[decoded_samples * channels],
+ 1);
+ }
+ }
+
+ // Send data to the channel. "channel" will handle the loss simulation.
+ channel->SendData(kAudioFrameSpeech, payload_type_, rtp_timestamp_,
+ bitstream, bitstream_len_byte, NULL);
+ if (first_packet) {
+ first_packet = false;
+ start_time_stamp = rtp_timestamp_;
+ }
+ rtp_timestamp_ += static_cast<uint32_t>(frame_length);
+ read_samples += frame_length * channels;
+ }
+ if (read_samples == written_samples) {
+ read_samples = 0;
+ written_samples = 0;
+ }
+ }
+
+ // Run received side of ACM.
+ ASSERT_EQ(0, acm_receiver_->PlayoutData10Ms(out_freq_hz_b, &audio_frame));
+
+ // Write output speech to file.
+ out_file_.Write10MsData(
+ audio_frame.data_,
+ audio_frame.samples_per_channel_ * audio_frame.num_channels_);
+
+ // Write stand-alone speech to file.
+ out_file_standalone_.Write10MsData(out_audio, decoded_samples * channels);
+
+ if (audio_frame.timestamp_ > start_time_stamp) {
+ // Number of channels should be the same for both stand-alone and
+ // ACM-decoding.
+ EXPECT_EQ(audio_frame.num_channels_, channels);
+ }
+
+ decoded_samples = 0;
+ }
+
+ if (in_file_mono_.EndOfFile()) {
+ in_file_mono_.Rewind();
+ }
+ if (in_file_stereo_.EndOfFile()) {
+ in_file_stereo_.Rewind();
+ }
+ // Reset in case we ended with a lost packet.
+ channel->set_lost_packet(false);
+}
+
+void OpusTest::OpenOutFile(int test_number) {
+ std::string file_name;
+ std::stringstream file_stream;
+ file_stream << webrtc::test::OutputPath() << "opustest_out_"
+ << test_number << ".pcm";
+ file_name = file_stream.str();
+ out_file_.Open(file_name, 48000, "wb");
+ file_stream.str("");
+ file_name = file_stream.str();
+ file_stream << webrtc::test::OutputPath() << "opusstandalone_out_"
+ << test_number << ".pcm";
+ file_name = file_stream.str();
+ out_file_standalone_.Open(file_name, 48000, "wb");
+}
+
+} // namespace webrtc
diff --git a/webrtc/modules/audio_coding/test/opus_test.h b/webrtc/modules/audio_coding/test/opus_test.h
new file mode 100644
index 0000000000..93c9ffb263
--- /dev/null
+++ b/webrtc/modules/audio_coding/test/opus_test.h
@@ -0,0 +1,60 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_TEST_OPUS_TEST_H_
+#define WEBRTC_MODULES_AUDIO_CODING_TEST_OPUS_TEST_H_
+
+#include <math.h>
+
+#include "webrtc/base/scoped_ptr.h"
+#include "webrtc/modules/audio_coding/codecs/opus/opus_interface.h"
+#include "webrtc/modules/audio_coding/acm2/acm_resampler.h"
+#include "webrtc/modules/audio_coding/test/ACMTest.h"
+#include "webrtc/modules/audio_coding/test/Channel.h"
+#include "webrtc/modules/audio_coding/test/PCMFile.h"
+#include "webrtc/modules/audio_coding/test/TestStereo.h"
+
+namespace webrtc {
+
+class OpusTest : public ACMTest {
+ public:
+ OpusTest();
+ ~OpusTest();
+
+ void Perform();
+
+ private:
+ void Run(TestPackStereo* channel,
+ size_t channels,
+ int bitrate,
+ size_t frame_length,
+ int percent_loss = 0);
+
+ void OpenOutFile(int test_number);
+
+ rtc::scoped_ptr<AudioCodingModule> acm_receiver_;
+ TestPackStereo* channel_a2b_;
+ PCMFile in_file_stereo_;
+ PCMFile in_file_mono_;
+ PCMFile out_file_;
+ PCMFile out_file_standalone_;
+ int counter_;
+ uint8_t payload_type_;
+ uint32_t rtp_timestamp_;
+ acm2::ACMResampler resampler_;
+ WebRtcOpusEncInst* opus_mono_encoder_;
+ WebRtcOpusEncInst* opus_stereo_encoder_;
+ WebRtcOpusDecInst* opus_mono_decoder_;
+ WebRtcOpusDecInst* opus_stereo_decoder_;
+};
+
+} // namespace webrtc
+
+#endif // WEBRTC_MODULES_AUDIO_CODING_TEST_OPUS_TEST_H_
diff --git a/webrtc/modules/audio_coding/test/target_delay_unittest.cc b/webrtc/modules/audio_coding/test/target_delay_unittest.cc
new file mode 100644
index 0000000000..195e9d8145
--- /dev/null
+++ b/webrtc/modules/audio_coding/test/target_delay_unittest.cc
@@ -0,0 +1,249 @@
+/*
+ * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "testing/gtest/include/gtest/gtest.h"
+#include "webrtc/base/scoped_ptr.h"
+#include "webrtc/common_types.h"
+#include "webrtc/modules/audio_coding/codecs/pcm16b/pcm16b.h"
+#include "webrtc/modules/audio_coding/include/audio_coding_module.h"
+#include "webrtc/modules/audio_coding/test/utility.h"
+#include "webrtc/modules/include/module_common_types.h"
+#include "webrtc/system_wrappers/include/sleep.h"
+#include "webrtc/test/testsupport/fileutils.h"
+
+namespace webrtc {
+
+class TargetDelayTest : public ::testing::Test {
+ protected:
+ TargetDelayTest() : acm_(AudioCodingModule::Create(0)) {}
+
+ ~TargetDelayTest() {}
+
+ void SetUp() {
+ EXPECT_TRUE(acm_.get() != NULL);
+
+ CodecInst codec;
+ ASSERT_EQ(0, AudioCodingModule::Codec("L16", &codec, kSampleRateHz, 1));
+ ASSERT_EQ(0, acm_->InitializeReceiver());
+ ASSERT_EQ(0, acm_->RegisterReceiveCodec(codec));
+
+ rtp_info_.header.payloadType = codec.pltype;
+ rtp_info_.header.timestamp = 0;
+ rtp_info_.header.ssrc = 0x12345678;
+ rtp_info_.header.markerBit = false;
+ rtp_info_.header.sequenceNumber = 0;
+ rtp_info_.type.Audio.channel = 1;
+ rtp_info_.type.Audio.isCNG = false;
+ rtp_info_.frameType = kAudioFrameSpeech;
+
+ int16_t audio[kFrameSizeSamples];
+ const int kRange = 0x7FF; // 2047, easy for masking.
+ for (size_t n = 0; n < kFrameSizeSamples; ++n)
+ audio[n] = (rand() & kRange) - kRange / 2;
+ WebRtcPcm16b_Encode(audio, kFrameSizeSamples, payload_);
+ }
+
+ void OutOfRangeInput() {
+ EXPECT_EQ(-1, SetMinimumDelay(-1));
+ EXPECT_EQ(-1, SetMinimumDelay(10001));
+ }
+
+ void NoTargetDelayBufferSizeChanges() {
+ for (int n = 0; n < 30; ++n) // Run enough iterations.
+ Run(true);
+ int clean_optimal_delay = GetCurrentOptimalDelayMs();
+ Run(false); // Run with jitter.
+ int jittery_optimal_delay = GetCurrentOptimalDelayMs();
+ EXPECT_GT(jittery_optimal_delay, clean_optimal_delay);
+ int required_delay = RequiredDelay();
+ EXPECT_GT(required_delay, 0);
+ EXPECT_NEAR(required_delay, jittery_optimal_delay, 1);
+ }
+
+ void WithTargetDelayBufferNotChanging() {
+ // A target delay that is one packet larger than jitter.
+ const int kTargetDelayMs = (kInterarrivalJitterPacket + 1) *
+ kNum10msPerFrame * 10;
+ ASSERT_EQ(0, SetMinimumDelay(kTargetDelayMs));
+ for (int n = 0; n < 30; ++n) // Run enough iterations to fill the buffer.
+ Run(true);
+ int clean_optimal_delay = GetCurrentOptimalDelayMs();
+ EXPECT_EQ(kTargetDelayMs, clean_optimal_delay);
+ Run(false); // Run with jitter.
+ int jittery_optimal_delay = GetCurrentOptimalDelayMs();
+ EXPECT_EQ(jittery_optimal_delay, clean_optimal_delay);
+ }
+
+ void RequiredDelayAtCorrectRange() {
+ for (int n = 0; n < 30; ++n) // Run clean and store delay.
+ Run(true);
+ int clean_optimal_delay = GetCurrentOptimalDelayMs();
+
+ // A relatively large delay.
+ const int kTargetDelayMs = (kInterarrivalJitterPacket + 10) *
+ kNum10msPerFrame * 10;
+ ASSERT_EQ(0, SetMinimumDelay(kTargetDelayMs));
+ for (int n = 0; n < 300; ++n) // Run enough iterations to fill the buffer.
+ Run(true);
+ Run(false); // Run with jitter.
+
+ int jittery_optimal_delay = GetCurrentOptimalDelayMs();
+ EXPECT_EQ(kTargetDelayMs, jittery_optimal_delay);
+
+ int required_delay = RequiredDelay();
+
+ // Checking |required_delay| is in correct range.
+ EXPECT_GT(required_delay, 0);
+ EXPECT_GT(jittery_optimal_delay, required_delay);
+ EXPECT_GT(required_delay, clean_optimal_delay);
+
+ // A tighter check for the value of |required_delay|.
+ // The jitter forces a delay of
+ // |kInterarrivalJitterPacket * kNum10msPerFrame * 10| milliseconds. So we
+ // expect |required_delay| be close to that.
+ EXPECT_NEAR(kInterarrivalJitterPacket * kNum10msPerFrame * 10,
+ required_delay, 1);
+ }
+
+ void TargetDelayBufferMinMax() {
+ const int kTargetMinDelayMs = kNum10msPerFrame * 10;
+ ASSERT_EQ(0, SetMinimumDelay(kTargetMinDelayMs));
+ for (int m = 0; m < 30; ++m) // Run enough iterations to fill the buffer.
+ Run(true);
+ int clean_optimal_delay = GetCurrentOptimalDelayMs();
+ EXPECT_EQ(kTargetMinDelayMs, clean_optimal_delay);
+
+ const int kTargetMaxDelayMs = 2 * (kNum10msPerFrame * 10);
+ ASSERT_EQ(0, SetMaximumDelay(kTargetMaxDelayMs));
+ for (int n = 0; n < 30; ++n) // Run enough iterations to fill the buffer.
+ Run(false);
+
+ int capped_optimal_delay = GetCurrentOptimalDelayMs();
+ EXPECT_EQ(kTargetMaxDelayMs, capped_optimal_delay);
+ }
+
+ private:
+ static const int kSampleRateHz = 16000;
+ static const int kNum10msPerFrame = 2;
+ static const size_t kFrameSizeSamples = 320; // 20 ms @ 16 kHz.
+ // payload-len = frame-samples * 2 bytes/sample.
+ static const int kPayloadLenBytes = 320 * 2;
+ // Inter-arrival time in number of packets in a jittery channel. One is no
+ // jitter.
+ static const int kInterarrivalJitterPacket = 2;
+
+ void Push() {
+ rtp_info_.header.timestamp += kFrameSizeSamples;
+ rtp_info_.header.sequenceNumber++;
+ ASSERT_EQ(0, acm_->IncomingPacket(payload_, kFrameSizeSamples * 2,
+ rtp_info_));
+ }
+
+ // Pull audio equivalent to the amount of audio in one RTP packet.
+ void Pull() {
+ AudioFrame frame;
+ for (int k = 0; k < kNum10msPerFrame; ++k) { // Pull one frame.
+ ASSERT_EQ(0, acm_->PlayoutData10Ms(-1, &frame));
+ // Had to use ASSERT_TRUE, ASSERT_EQ generated error.
+ ASSERT_TRUE(kSampleRateHz == frame.sample_rate_hz_);
+ ASSERT_EQ(1u, frame.num_channels_);
+ ASSERT_TRUE(kSampleRateHz / 100 == frame.samples_per_channel_);
+ }
+ }
+
+ void Run(bool clean) {
+ for (int n = 0; n < 10; ++n) {
+ for (int m = 0; m < 5; ++m) {
+ Push();
+ Pull();
+ }
+
+ if (!clean) {
+ for (int m = 0; m < 10; ++m) { // Long enough to trigger delay change.
+ Push();
+ for (int n = 0; n < kInterarrivalJitterPacket; ++n)
+ Pull();
+ }
+ }
+ }
+ }
+
+ int SetMinimumDelay(int delay_ms) {
+ return acm_->SetMinimumPlayoutDelay(delay_ms);
+ }
+
+ int SetMaximumDelay(int delay_ms) {
+ return acm_->SetMaximumPlayoutDelay(delay_ms);
+ }
+
+ int GetCurrentOptimalDelayMs() {
+ NetworkStatistics stats;
+ acm_->GetNetworkStatistics(&stats);
+ return stats.preferredBufferSize;
+ }
+
+ int RequiredDelay() {
+ return acm_->LeastRequiredDelayMs();
+ }
+
+ rtc::scoped_ptr<AudioCodingModule> acm_;
+ WebRtcRTPHeader rtp_info_;
+ uint8_t payload_[kPayloadLenBytes];
+};
+
+#if defined(WEBRTC_ANDROID)
+#define MAYBE_OutOfRangeInput DISABLED_OutOfRangeInput
+#else
+#define MAYBE_OutOfRangeInput OutOfRangeInput
+#endif
+TEST_F(TargetDelayTest, MAYBE_OutOfRangeInput) {
+ OutOfRangeInput();
+}
+
+#if defined(WEBRTC_ANDROID)
+#define MAYBE_NoTargetDelayBufferSizeChanges \
+ DISABLED_NoTargetDelayBufferSizeChanges
+#else
+#define MAYBE_NoTargetDelayBufferSizeChanges NoTargetDelayBufferSizeChanges
+#endif
+TEST_F(TargetDelayTest, MAYBE_NoTargetDelayBufferSizeChanges) {
+ NoTargetDelayBufferSizeChanges();
+}
+
+#if defined(WEBRTC_ANDROID)
+#define MAYBE_WithTargetDelayBufferNotChanging \
+ DISABLED_WithTargetDelayBufferNotChanging
+#else
+#define MAYBE_WithTargetDelayBufferNotChanging WithTargetDelayBufferNotChanging
+#endif
+TEST_F(TargetDelayTest, MAYBE_WithTargetDelayBufferNotChanging) {
+ WithTargetDelayBufferNotChanging();
+}
+
+#if defined(WEBRTC_ANDROID)
+#define MAYBE_RequiredDelayAtCorrectRange DISABLED_RequiredDelayAtCorrectRange
+#else
+#define MAYBE_RequiredDelayAtCorrectRange RequiredDelayAtCorrectRange
+#endif
+TEST_F(TargetDelayTest, MAYBE_RequiredDelayAtCorrectRange) {
+ RequiredDelayAtCorrectRange();
+}
+
+#if defined(WEBRTC_ANDROID)
+#define MAYBE_TargetDelayBufferMinMax DISABLED_TargetDelayBufferMinMax
+#else
+#define MAYBE_TargetDelayBufferMinMax TargetDelayBufferMinMax
+#endif
+TEST_F(TargetDelayTest, MAYBE_TargetDelayBufferMinMax) {
+ TargetDelayBufferMinMax();
+}
+
+} // namespace webrtc
+
diff --git a/webrtc/modules/audio_coding/test/utility.cc b/webrtc/modules/audio_coding/test/utility.cc
new file mode 100644
index 0000000000..89368bce51
--- /dev/null
+++ b/webrtc/modules/audio_coding/test/utility.cc
@@ -0,0 +1,303 @@
+/*
+ * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "utility.h"
+
+#include <assert.h>
+#include <stdio.h>
+#include <stdlib.h>
+#include <string.h>
+
+#include "testing/gtest/include/gtest/gtest.h"
+#include "webrtc/common.h"
+#include "webrtc/common_types.h"
+#include "webrtc/modules/audio_coding/include/audio_coding_module.h"
+#include "webrtc/modules/audio_coding/acm2/acm_common_defs.h"
+
+#define NUM_CODECS_WITH_FIXED_PAYLOAD_TYPE 13
+
+namespace webrtc {
+
+ACMTestTimer::ACMTestTimer()
+ : _msec(0),
+ _sec(0),
+ _min(0),
+ _hour(0) {
+ return;
+}
+
+ACMTestTimer::~ACMTestTimer() {
+ return;
+}
+
+void ACMTestTimer::Reset() {
+ _msec = 0;
+ _sec = 0;
+ _min = 0;
+ _hour = 0;
+ return;
+}
+void ACMTestTimer::Tick10ms() {
+ _msec += 10;
+ Adjust();
+ return;
+}
+
+void ACMTestTimer::Tick1ms() {
+ _msec++;
+ Adjust();
+ return;
+}
+
+void ACMTestTimer::Tick100ms() {
+ _msec += 100;
+ Adjust();
+ return;
+}
+
+void ACMTestTimer::Tick1sec() {
+ _sec++;
+ Adjust();
+ return;
+}
+
+void ACMTestTimer::CurrentTimeHMS(char* currTime) {
+ sprintf(currTime, "%4lu:%02u:%06.3f", _hour, _min,
+ (double) _sec + (double) _msec / 1000.);
+ return;
+}
+
+void ACMTestTimer::CurrentTime(unsigned long& h, unsigned char& m,
+ unsigned char& s, unsigned short& ms) {
+ h = _hour;
+ m = _min;
+ s = _sec;
+ ms = _msec;
+ return;
+}
+
+void ACMTestTimer::Adjust() {
+ unsigned int n;
+ if (_msec >= 1000) {
+ n = _msec / 1000;
+ _msec -= (1000 * n);
+ _sec += n;
+ }
+ if (_sec >= 60) {
+ n = _sec / 60;
+ _sec -= (n * 60);
+ _min += n;
+ }
+ if (_min >= 60) {
+ n = _min / 60;
+ _min -= (n * 60);
+ _hour += n;
+ }
+}
+
+int16_t ChooseCodec(CodecInst& codecInst) {
+
+ PrintCodecs();
+ //AudioCodingModule* tmpACM = AudioCodingModule::Create(0);
+ uint8_t noCodec = AudioCodingModule::NumberOfCodecs();
+ int8_t codecID;
+ bool outOfRange = false;
+ char myStr[15] = "";
+ do {
+ printf("\nChoose a codec [0]: ");
+ EXPECT_TRUE(fgets(myStr, 10, stdin) != NULL);
+ codecID = atoi(myStr);
+ if ((codecID < 0) || (codecID >= noCodec)) {
+ printf("\nOut of range.\n");
+ outOfRange = true;
+ }
+ } while (outOfRange);
+
+ CHECK_ERROR(AudioCodingModule::Codec((uint8_t )codecID, &codecInst));
+ return 0;
+}
+
+void PrintCodecs() {
+ uint8_t noCodec = AudioCodingModule::NumberOfCodecs();
+
+ CodecInst codecInst;
+ printf("No Name [Hz] [bps]\n");
+ for (uint8_t codecCntr = 0; codecCntr < noCodec; codecCntr++) {
+ AudioCodingModule::Codec(codecCntr, &codecInst);
+ printf("%2d- %-18s %5d %6d\n", codecCntr, codecInst.plname,
+ codecInst.plfreq, codecInst.rate);
+ }
+
+}
+
+CircularBuffer::CircularBuffer(uint32_t len)
+ : _buff(NULL),
+ _idx(0),
+ _buffIsFull(false),
+ _calcAvg(false),
+ _calcVar(false),
+ _sum(0),
+ _sumSqr(0) {
+ _buff = new double[len];
+ if (_buff == NULL) {
+ _buffLen = 0;
+ } else {
+ for (uint32_t n = 0; n < len; n++) {
+ _buff[n] = 0;
+ }
+ _buffLen = len;
+ }
+}
+
+CircularBuffer::~CircularBuffer() {
+ if (_buff != NULL) {
+ delete[] _buff;
+ _buff = NULL;
+ }
+}
+
+void CircularBuffer::Update(const double newVal) {
+ assert(_buffLen > 0);
+
+ // store the value that is going to be overwritten
+ double oldVal = _buff[_idx];
+ // record the new value
+ _buff[_idx] = newVal;
+ // increment the index, to point to where we would
+ // write next
+ _idx++;
+ // it is a circular buffer, if we are at the end
+ // we have to cycle to the beginning
+ if (_idx >= _buffLen) {
+ // flag that the buffer is filled up.
+ _buffIsFull = true;
+ _idx = 0;
+ }
+
+ // Update
+
+ if (_calcAvg) {
+ // for the average we have to update
+ // the sum
+ _sum += (newVal - oldVal);
+ }
+
+ if (_calcVar) {
+ // to calculate variance we have to update
+ // the sum of squares
+ _sumSqr += (double) (newVal - oldVal) * (double) (newVal + oldVal);
+ }
+}
+
+void CircularBuffer::SetArithMean(bool enable) {
+ assert(_buffLen > 0);
+
+ if (enable && !_calcAvg) {
+ uint32_t lim;
+ if (_buffIsFull) {
+ lim = _buffLen;
+ } else {
+ lim = _idx;
+ }
+ _sum = 0;
+ for (uint32_t n = 0; n < lim; n++) {
+ _sum += _buff[n];
+ }
+ }
+ _calcAvg = enable;
+}
+
+void CircularBuffer::SetVariance(bool enable) {
+ assert(_buffLen > 0);
+
+ if (enable && !_calcVar) {
+ uint32_t lim;
+ if (_buffIsFull) {
+ lim = _buffLen;
+ } else {
+ lim = _idx;
+ }
+ _sumSqr = 0;
+ for (uint32_t n = 0; n < lim; n++) {
+ _sumSqr += _buff[n] * _buff[n];
+ }
+ }
+ _calcAvg = enable;
+}
+
+int16_t CircularBuffer::ArithMean(double& mean) {
+ assert(_buffLen > 0);
+
+ if (_buffIsFull) {
+
+ mean = _sum / (double) _buffLen;
+ return 0;
+ } else {
+ if (_idx > 0) {
+ mean = _sum / (double) _idx;
+ return 0;
+ } else {
+ return -1;
+ }
+
+ }
+}
+
+int16_t CircularBuffer::Variance(double& var) {
+ assert(_buffLen > 0);
+
+ if (_buffIsFull) {
+ var = _sumSqr / (double) _buffLen;
+ return 0;
+ } else {
+ if (_idx > 0) {
+ var = _sumSqr / (double) _idx;
+ return 0;
+ } else {
+ return -1;
+ }
+ }
+}
+
+bool FixedPayloadTypeCodec(const char* payloadName) {
+ char fixPayloadTypeCodecs[NUM_CODECS_WITH_FIXED_PAYLOAD_TYPE][32] = { "PCMU",
+ "PCMA", "GSM", "G723", "DVI4", "LPC", "PCMA", "G722", "QCELP", "CN",
+ "MPA", "G728", "G729" };
+
+ for (int n = 0; n < NUM_CODECS_WITH_FIXED_PAYLOAD_TYPE; n++) {
+ if (!STR_CASE_CMP(payloadName, fixPayloadTypeCodecs[n])) {
+ return true;
+ }
+ }
+ return false;
+}
+
+void VADCallback::Reset() {
+ memset(_numFrameTypes, 0, sizeof(_numFrameTypes));
+}
+
+VADCallback::VADCallback() {
+ memset(_numFrameTypes, 0, sizeof(_numFrameTypes));
+}
+
+void VADCallback::PrintFrameTypes() {
+ printf("kEmptyFrame......... %d\n", _numFrameTypes[kEmptyFrame]);
+ printf("kAudioFrameSpeech... %d\n", _numFrameTypes[kAudioFrameSpeech]);
+ printf("kAudioFrameCN....... %d\n", _numFrameTypes[kAudioFrameCN]);
+ printf("kVideoFrameKey...... %d\n", _numFrameTypes[kVideoFrameKey]);
+ printf("kVideoFrameDelta.... %d\n", _numFrameTypes[kVideoFrameDelta]);
+}
+
+int32_t VADCallback::InFrameType(FrameType frame_type) {
+ _numFrameTypes[frame_type]++;
+ return 0;
+}
+
+} // namespace webrtc
diff --git a/webrtc/modules/audio_coding/test/utility.h b/webrtc/modules/audio_coding/test/utility.h
new file mode 100644
index 0000000000..23869be7ed
--- /dev/null
+++ b/webrtc/modules/audio_coding/test/utility.h
@@ -0,0 +1,139 @@
+/*
+ * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_TEST_UTILITY_H_
+#define WEBRTC_MODULES_AUDIO_CODING_TEST_UTILITY_H_
+
+#include "testing/gtest/include/gtest/gtest.h"
+#include "webrtc/modules/audio_coding/include/audio_coding_module.h"
+
+namespace webrtc {
+
+//-----------------------------
+#define CHECK_ERROR(f) \
+ do { \
+ EXPECT_GE(f, 0) << "Error Calling API"; \
+ } while(0)
+
+//-----------------------------
+#define CHECK_PROTECTED(f) \
+ do { \
+ if (f >= 0) { \
+ ADD_FAILURE() << "Error Calling API"; \
+ } else { \
+ printf("An expected error is caught.\n"); \
+ } \
+ } while(0)
+
+//----------------------------
+#define CHECK_ERROR_MT(f) \
+ do { \
+ if (f < 0) { \
+ fprintf(stderr, "Error Calling API in file %s at line %d \n", \
+ __FILE__, __LINE__); \
+ } \
+ } while(0)
+
+//----------------------------
+#define CHECK_PROTECTED_MT(f) \
+ do { \
+ if (f >= 0) { \
+ fprintf(stderr, "Error Calling API in file %s at line %d \n", \
+ __FILE__, __LINE__); \
+ } else { \
+ printf("An expected error is caught.\n"); \
+ } \
+ } while(0)
+
+#define DELETE_POINTER(p) \
+ do { \
+ if (p != NULL) { \
+ delete p; \
+ p = NULL; \
+ } \
+ } while(0)
+
+class ACMTestTimer {
+ public:
+ ACMTestTimer();
+ ~ACMTestTimer();
+
+ void Reset();
+ void Tick10ms();
+ void Tick1ms();
+ void Tick100ms();
+ void Tick1sec();
+ void CurrentTimeHMS(char* currTime);
+ void CurrentTime(unsigned long& h, unsigned char& m, unsigned char& s,
+ unsigned short& ms);
+
+ private:
+ void Adjust();
+
+ unsigned short _msec;
+ unsigned char _sec;
+ unsigned char _min;
+ unsigned long _hour;
+};
+
+class CircularBuffer {
+ public:
+ CircularBuffer(uint32_t len);
+ ~CircularBuffer();
+
+ void SetArithMean(bool enable);
+ void SetVariance(bool enable);
+
+ void Update(const double newVal);
+ void IsBufferFull();
+
+ int16_t Variance(double& var);
+ int16_t ArithMean(double& mean);
+
+ protected:
+ double* _buff;
+ uint32_t _idx;
+ uint32_t _buffLen;
+
+ bool _buffIsFull;
+ bool _calcAvg;
+ bool _calcVar;
+ double _sum;
+ double _sumSqr;
+};
+
+int16_t ChooseCodec(CodecInst& codecInst);
+
+void PrintCodecs();
+
+bool FixedPayloadTypeCodec(const char* payloadName);
+
+class VADCallback : public ACMVADCallback {
+ public:
+ VADCallback();
+ ~VADCallback() {
+ }
+
+ int32_t InFrameType(FrameType frame_type);
+
+ void PrintFrameTypes();
+ void Reset();
+
+ private:
+ uint32_t _numFrameTypes[5];
+};
+
+void UseLegacyAcm(webrtc::Config* config);
+
+void UseNewAcm(webrtc::Config* config);
+
+} // namespace webrtc
+
+#endif // WEBRTC_MODULES_AUDIO_CODING_TEST_UTILITY_H_
diff --git a/webrtc/modules/audio_conference_mixer/BUILD.gn b/webrtc/modules/audio_conference_mixer/BUILD.gn
index 3b9e2769ac..36391c7abc 100644
--- a/webrtc/modules/audio_conference_mixer/BUILD.gn
+++ b/webrtc/modules/audio_conference_mixer/BUILD.gn
@@ -9,15 +9,15 @@
config("audio_conference_mixer_config") {
visibility = [ ":*" ] # Only targets in this file can depend on this.
include_dirs = [
- "interface",
- "../interface",
+ "include",
+ "../include",
]
}
source_set("audio_conference_mixer") {
sources = [
- "interface/audio_conference_mixer.h",
- "interface/audio_conference_mixer_defines.h",
+ "include/audio_conference_mixer.h",
+ "include/audio_conference_mixer_defines.h",
"source/audio_conference_mixer_impl.cc",
"source/audio_conference_mixer_impl.h",
"source/audio_frame_manipulator.cc",
diff --git a/webrtc/modules/audio_conference_mixer/OWNERS b/webrtc/modules/audio_conference_mixer/OWNERS
index 34bc7389ba..ea2062a9a0 100644
--- a/webrtc/modules/audio_conference_mixer/OWNERS
+++ b/webrtc/modules/audio_conference_mixer/OWNERS
@@ -1,3 +1,8 @@
-andrew@webrtc.org
+minyue@webrtc.org
+
+# These are for the common case of adding or renaming files. If you're doing
+# structural changes, please get a review from a reviewer in this file.
+per-file *.gyp=*
+per-file *.gypi=*
per-file BUILD.gn=kjellander@webrtc.org
diff --git a/webrtc/modules/audio_conference_mixer/audio_conference_mixer.gypi b/webrtc/modules/audio_conference_mixer/audio_conference_mixer.gypi
index 5aa3cc449b..9d7179504c 100644
--- a/webrtc/modules/audio_conference_mixer/audio_conference_mixer.gypi
+++ b/webrtc/modules/audio_conference_mixer/audio_conference_mixer.gypi
@@ -17,8 +17,8 @@
'<(webrtc_root)/system_wrappers/system_wrappers.gyp:system_wrappers',
],
'sources': [
- 'interface/audio_conference_mixer.h',
- 'interface/audio_conference_mixer_defines.h',
+ 'include/audio_conference_mixer.h',
+ 'include/audio_conference_mixer_defines.h',
'source/audio_frame_manipulator.cc',
'source/audio_frame_manipulator.h',
'source/memory_pool.h',
diff --git a/webrtc/modules/audio_conference_mixer/include/audio_conference_mixer.h b/webrtc/modules/audio_conference_mixer/include/audio_conference_mixer.h
new file mode 100644
index 0000000000..7370442704
--- /dev/null
+++ b/webrtc/modules/audio_conference_mixer/include/audio_conference_mixer.h
@@ -0,0 +1,77 @@
+/*
+ * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_AUDIO_CONFERENCE_MIXER_INCLUDE_AUDIO_CONFERENCE_MIXER_H_
+#define WEBRTC_MODULES_AUDIO_CONFERENCE_MIXER_INCLUDE_AUDIO_CONFERENCE_MIXER_H_
+
+#include "webrtc/modules/audio_conference_mixer/include/audio_conference_mixer_defines.h"
+#include "webrtc/modules/include/module.h"
+#include "webrtc/modules/include/module_common_types.h"
+
+namespace webrtc {
+class AudioMixerOutputReceiver;
+class MixerParticipant;
+class Trace;
+
+class AudioConferenceMixer : public Module
+{
+public:
+ enum {kMaximumAmountOfMixedParticipants = 3};
+ enum Frequency
+ {
+ kNbInHz = 8000,
+ kWbInHz = 16000,
+ kSwbInHz = 32000,
+ kFbInHz = 48000,
+ kLowestPossible = -1,
+ kDefaultFrequency = kWbInHz
+ };
+
+ // Factory method. Constructor disabled.
+ static AudioConferenceMixer* Create(int id);
+ virtual ~AudioConferenceMixer() {}
+
+ // Module functions
+ int64_t TimeUntilNextProcess() override = 0;
+ int32_t Process() override = 0;
+
+ // Register/unregister a callback class for receiving the mixed audio.
+ virtual int32_t RegisterMixedStreamCallback(
+ AudioMixerOutputReceiver* receiver) = 0;
+ virtual int32_t UnRegisterMixedStreamCallback() = 0;
+
+ // Add/remove participants as candidates for mixing.
+ virtual int32_t SetMixabilityStatus(MixerParticipant* participant,
+ bool mixable) = 0;
+ // Returns true if a participant is a candidate for mixing.
+ virtual bool MixabilityStatus(
+ const MixerParticipant& participant) const = 0;
+
+ // Inform the mixer that the participant should always be mixed and not
+ // count toward the number of mixed participants. Note that a participant
+ // must have been added to the mixer (by calling SetMixabilityStatus())
+ // before this function can be successfully called.
+ virtual int32_t SetAnonymousMixabilityStatus(
+ MixerParticipant* participant, bool mixable) = 0;
+ // Returns true if the participant is mixed anonymously.
+ virtual bool AnonymousMixabilityStatus(
+ const MixerParticipant& participant) const = 0;
+
+ // Set the minimum sampling frequency at which to mix. The mixing algorithm
+ // may still choose to mix at a higher samling frequency to avoid
+ // downsampling of audio contributing to the mixed audio.
+ virtual int32_t SetMinimumMixingFrequency(Frequency freq) = 0;
+
+protected:
+ AudioConferenceMixer() {}
+};
+} // namespace webrtc
+
+#endif // WEBRTC_MODULES_AUDIO_CONFERENCE_MIXER_INCLUDE_AUDIO_CONFERENCE_MIXER_H_
diff --git a/webrtc/modules/audio_conference_mixer/include/audio_conference_mixer_defines.h b/webrtc/modules/audio_conference_mixer/include/audio_conference_mixer_defines.h
new file mode 100644
index 0000000000..5d58f42435
--- /dev/null
+++ b/webrtc/modules/audio_conference_mixer/include/audio_conference_mixer_defines.h
@@ -0,0 +1,60 @@
+/*
+ * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_AUDIO_CONFERENCE_MIXER_INCLUDE_AUDIO_CONFERENCE_MIXER_DEFINES_H_
+#define WEBRTC_MODULES_AUDIO_CONFERENCE_MIXER_INCLUDE_AUDIO_CONFERENCE_MIXER_DEFINES_H_
+
+#include "webrtc/modules/include/module_common_types.h"
+#include "webrtc/typedefs.h"
+
+namespace webrtc {
+class MixHistory;
+
+// A callback class that all mixer participants must inherit from/implement.
+class MixerParticipant
+{
+public:
+ // The implementation of this function should update audioFrame with new
+ // audio every time it's called.
+ //
+ // If it returns -1, the frame will not be added to the mix.
+ virtual int32_t GetAudioFrame(int32_t id,
+ AudioFrame* audioFrame) = 0;
+
+ // Returns true if the participant was mixed this mix iteration.
+ bool IsMixed() const;
+
+ // This function specifies the sampling frequency needed for the AudioFrame
+ // for future GetAudioFrame(..) calls.
+ virtual int32_t NeededFrequency(int32_t id) const = 0;
+
+ MixHistory* _mixHistory;
+protected:
+ MixerParticipant();
+ virtual ~MixerParticipant();
+};
+
+class AudioMixerOutputReceiver
+{
+public:
+ // This callback function provides the mixed audio for this mix iteration.
+ // Note that uniqueAudioFrames is an array of AudioFrame pointers with the
+ // size according to the size parameter.
+ virtual void NewMixedAudio(const int32_t id,
+ const AudioFrame& generalAudioFrame,
+ const AudioFrame** uniqueAudioFrames,
+ const uint32_t size) = 0;
+protected:
+ AudioMixerOutputReceiver() {}
+ virtual ~AudioMixerOutputReceiver() {}
+};
+} // namespace webrtc
+
+#endif // WEBRTC_MODULES_AUDIO_CONFERENCE_MIXER_INCLUDE_AUDIO_CONFERENCE_MIXER_DEFINES_H_
diff --git a/webrtc/modules/audio_conference_mixer/interface/audio_conference_mixer.h b/webrtc/modules/audio_conference_mixer/interface/audio_conference_mixer.h
deleted file mode 100644
index 7ff39579ee..0000000000
--- a/webrtc/modules/audio_conference_mixer/interface/audio_conference_mixer.h
+++ /dev/null
@@ -1,77 +0,0 @@
-/*
- * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_MODULES_AUDIO_CONFERENCE_MIXER_INTERFACE_AUDIO_CONFERENCE_MIXER_H_
-#define WEBRTC_MODULES_AUDIO_CONFERENCE_MIXER_INTERFACE_AUDIO_CONFERENCE_MIXER_H_
-
-#include "webrtc/modules/audio_conference_mixer/interface/audio_conference_mixer_defines.h"
-#include "webrtc/modules/interface/module.h"
-#include "webrtc/modules/interface/module_common_types.h"
-
-namespace webrtc {
-class AudioMixerOutputReceiver;
-class MixerParticipant;
-class Trace;
-
-class AudioConferenceMixer : public Module
-{
-public:
- enum {kMaximumAmountOfMixedParticipants = 3};
- enum Frequency
- {
- kNbInHz = 8000,
- kWbInHz = 16000,
- kSwbInHz = 32000,
- kFbInHz = 48000,
- kLowestPossible = -1,
- kDefaultFrequency = kWbInHz
- };
-
- // Factory method. Constructor disabled.
- static AudioConferenceMixer* Create(int id);
- virtual ~AudioConferenceMixer() {}
-
- // Module functions
- int64_t TimeUntilNextProcess() override = 0;
- int32_t Process() override = 0;
-
- // Register/unregister a callback class for receiving the mixed audio.
- virtual int32_t RegisterMixedStreamCallback(
- AudioMixerOutputReceiver* receiver) = 0;
- virtual int32_t UnRegisterMixedStreamCallback() = 0;
-
- // Add/remove participants as candidates for mixing.
- virtual int32_t SetMixabilityStatus(MixerParticipant* participant,
- bool mixable) = 0;
- // Returns true if a participant is a candidate for mixing.
- virtual bool MixabilityStatus(
- const MixerParticipant& participant) const = 0;
-
- // Inform the mixer that the participant should always be mixed and not
- // count toward the number of mixed participants. Note that a participant
- // must have been added to the mixer (by calling SetMixabilityStatus())
- // before this function can be successfully called.
- virtual int32_t SetAnonymousMixabilityStatus(
- MixerParticipant* participant, bool mixable) = 0;
- // Returns true if the participant is mixed anonymously.
- virtual bool AnonymousMixabilityStatus(
- const MixerParticipant& participant) const = 0;
-
- // Set the minimum sampling frequency at which to mix. The mixing algorithm
- // may still choose to mix at a higher samling frequency to avoid
- // downsampling of audio contributing to the mixed audio.
- virtual int32_t SetMinimumMixingFrequency(Frequency freq) = 0;
-
-protected:
- AudioConferenceMixer() {}
-};
-} // namespace webrtc
-
-#endif // WEBRTC_MODULES_AUDIO_CONFERENCE_MIXER_INTERFACE_AUDIO_CONFERENCE_MIXER_H_
diff --git a/webrtc/modules/audio_conference_mixer/interface/audio_conference_mixer_defines.h b/webrtc/modules/audio_conference_mixer/interface/audio_conference_mixer_defines.h
deleted file mode 100644
index d15b7fca02..0000000000
--- a/webrtc/modules/audio_conference_mixer/interface/audio_conference_mixer_defines.h
+++ /dev/null
@@ -1,60 +0,0 @@
-/*
- * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_MODULES_AUDIO_CONFERENCE_MIXER_INTERFACE_AUDIO_CONFERENCE_MIXER_DEFINES_H_
-#define WEBRTC_MODULES_AUDIO_CONFERENCE_MIXER_INTERFACE_AUDIO_CONFERENCE_MIXER_DEFINES_H_
-
-#include "webrtc/modules/interface/module_common_types.h"
-#include "webrtc/typedefs.h"
-
-namespace webrtc {
-class MixHistory;
-
-// A callback class that all mixer participants must inherit from/implement.
-class MixerParticipant
-{
-public:
- // The implementation of this function should update audioFrame with new
- // audio every time it's called.
- //
- // If it returns -1, the frame will not be added to the mix.
- virtual int32_t GetAudioFrame(int32_t id,
- AudioFrame* audioFrame) = 0;
-
- // Returns true if the participant was mixed this mix iteration.
- bool IsMixed() const;
-
- // This function specifies the sampling frequency needed for the AudioFrame
- // for future GetAudioFrame(..) calls.
- virtual int32_t NeededFrequency(int32_t id) const = 0;
-
- MixHistory* _mixHistory;
-protected:
- MixerParticipant();
- virtual ~MixerParticipant();
-};
-
-class AudioMixerOutputReceiver
-{
-public:
- // This callback function provides the mixed audio for this mix iteration.
- // Note that uniqueAudioFrames is an array of AudioFrame pointers with the
- // size according to the size parameter.
- virtual void NewMixedAudio(const int32_t id,
- const AudioFrame& generalAudioFrame,
- const AudioFrame** uniqueAudioFrames,
- const uint32_t size) = 0;
-protected:
- AudioMixerOutputReceiver() {}
- virtual ~AudioMixerOutputReceiver() {}
-};
-} // namespace webrtc
-
-#endif // WEBRTC_MODULES_AUDIO_CONFERENCE_MIXER_INTERFACE_AUDIO_CONFERENCE_MIXER_DEFINES_H_
diff --git a/webrtc/modules/audio_conference_mixer/source/audio_conference_mixer_impl.cc b/webrtc/modules/audio_conference_mixer/source/audio_conference_mixer_impl.cc
index 2d2cf9dbb8..afb060f46d 100644
--- a/webrtc/modules/audio_conference_mixer/source/audio_conference_mixer_impl.cc
+++ b/webrtc/modules/audio_conference_mixer/source/audio_conference_mixer_impl.cc
@@ -8,11 +8,11 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-#include "webrtc/modules/audio_conference_mixer/interface/audio_conference_mixer_defines.h"
+#include "webrtc/modules/audio_conference_mixer/include/audio_conference_mixer_defines.h"
#include "webrtc/modules/audio_conference_mixer/source/audio_conference_mixer_impl.h"
#include "webrtc/modules/audio_conference_mixer/source/audio_frame_manipulator.h"
#include "webrtc/modules/audio_processing/include/audio_processing.h"
-#include "webrtc/modules/utility/interface/audio_frame_operations.h"
+#include "webrtc/modules/utility/include/audio_frame_operations.h"
#include "webrtc/system_wrappers/include/critical_section_wrapper.h"
#include "webrtc/system_wrappers/include/trace.h"
@@ -50,8 +50,8 @@ void MixFrames(AudioFrame* mixed_frame, AudioFrame* frame, bool use_limiter) {
}
// Return the max number of channels from a |list| composed of AudioFrames.
-int MaxNumChannels(const AudioFrameList* list) {
- int max_num_channels = 1;
+size_t MaxNumChannels(const AudioFrameList* list) {
+ size_t max_num_channels = 1;
for (AudioFrameList::const_iterator iter = list->begin();
iter != list->end();
++iter) {
@@ -278,7 +278,7 @@ int32_t AudioConferenceMixerImpl::Process() {
// with an API instead of dynamically.
// Find the max channels over all mixing lists.
- const int num_mixed_channels = std::max(MaxNumChannels(&mixList),
+ const size_t num_mixed_channels = std::max(MaxNumChannels(&mixList),
std::max(MaxNumChannels(&additionalFramesList),
MaxNumChannels(&rampOutList)));
diff --git a/webrtc/modules/audio_conference_mixer/source/audio_conference_mixer_impl.h b/webrtc/modules/audio_conference_mixer/source/audio_conference_mixer_impl.h
index bc9a27e9f0..2466112769 100644
--- a/webrtc/modules/audio_conference_mixer/source/audio_conference_mixer_impl.h
+++ b/webrtc/modules/audio_conference_mixer/source/audio_conference_mixer_impl.h
@@ -16,10 +16,10 @@
#include "webrtc/base/scoped_ptr.h"
#include "webrtc/engine_configurations.h"
-#include "webrtc/modules/audio_conference_mixer/interface/audio_conference_mixer.h"
+#include "webrtc/modules/audio_conference_mixer/include/audio_conference_mixer.h"
#include "webrtc/modules/audio_conference_mixer/source/memory_pool.h"
#include "webrtc/modules/audio_conference_mixer/source/time_scheduler.h"
-#include "webrtc/modules/interface/module_common_types.h"
+#include "webrtc/modules/include/module_common_types.h"
namespace webrtc {
class AudioProcessing;
diff --git a/webrtc/modules/audio_conference_mixer/source/audio_frame_manipulator.cc b/webrtc/modules/audio_conference_mixer/source/audio_frame_manipulator.cc
index 636698e9c1..9c5d3b939d 100644
--- a/webrtc/modules/audio_conference_mixer/source/audio_frame_manipulator.cc
+++ b/webrtc/modules/audio_conference_mixer/source/audio_frame_manipulator.cc
@@ -9,7 +9,7 @@
*/
#include "webrtc/modules/audio_conference_mixer/source/audio_frame_manipulator.h"
-#include "webrtc/modules/interface/module_common_types.h"
+#include "webrtc/modules/include/module_common_types.h"
#include "webrtc/typedefs.h"
namespace {
diff --git a/webrtc/modules/audio_conference_mixer/test/audio_conference_mixer_unittest.cc b/webrtc/modules/audio_conference_mixer/test/audio_conference_mixer_unittest.cc
index d4fbd205f1..293bfa0db9 100644
--- a/webrtc/modules/audio_conference_mixer/test/audio_conference_mixer_unittest.cc
+++ b/webrtc/modules/audio_conference_mixer/test/audio_conference_mixer_unittest.cc
@@ -10,8 +10,8 @@
#include "testing/gmock/include/gmock/gmock.h"
#include "webrtc/base/scoped_ptr.h"
-#include "webrtc/modules/audio_conference_mixer/interface/audio_conference_mixer.h"
-#include "webrtc/modules/audio_conference_mixer/interface/audio_conference_mixer_defines.h"
+#include "webrtc/modules/audio_conference_mixer/include/audio_conference_mixer.h"
+#include "webrtc/modules/audio_conference_mixer/include/audio_conference_mixer_defines.h"
namespace webrtc {
diff --git a/webrtc/modules/audio_device/BUILD.gn b/webrtc/modules/audio_device/BUILD.gn
index 8875178c15..5897176845 100644
--- a/webrtc/modules/audio_device/BUILD.gn
+++ b/webrtc/modules/audio_device/BUILD.gn
@@ -10,7 +10,7 @@ import("../../build/webrtc.gni")
config("audio_device_config") {
include_dirs = [
- "../interface",
+ "../include",
"include",
"dummy", # Contains dummy audio device implementations.
]
@@ -50,54 +50,50 @@ source_set("audio_device") {
include_dirs += [ "android" ]
}
defines = []
+ cflags = []
if (rtc_include_internal_audio_device) {
defines += [ "WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE" ]
sources += [
- "android/audio_device_template.h",
- "android/audio_manager.cc",
- "android/audio_manager.h",
- "android/audio_record_jni.cc",
- "android/audio_record_jni.h",
- "android/audio_track_jni.cc",
- "android/audio_track_jni.h",
- "android/opensles_common.cc",
- "android/opensles_common.h",
- "android/opensles_player.cc",
- "android/opensles_player.h",
"audio_device_impl.cc",
"audio_device_impl.h",
- "ios/audio_device_ios.h",
- "ios/audio_device_ios.mm",
- "linux/alsasymboltable_linux.cc",
- "linux/alsasymboltable_linux.h",
- "linux/audio_device_alsa_linux.cc",
- "linux/audio_device_alsa_linux.h",
- "linux/audio_mixer_manager_alsa_linux.cc",
- "linux/audio_mixer_manager_alsa_linux.h",
- "linux/latebindingsymboltable_linux.cc",
- "linux/latebindingsymboltable_linux.h",
- "mac/audio_device_mac.cc",
- "mac/audio_device_mac.h",
- "mac/audio_mixer_manager_mac.cc",
- "mac/audio_mixer_manager_mac.h",
- "mac/portaudio/pa_memorybarrier.h",
- "mac/portaudio/pa_ringbuffer.c",
- "mac/portaudio/pa_ringbuffer.h",
- "win/audio_device_core_win.cc",
- "win/audio_device_core_win.h",
- "win/audio_device_wave_win.cc",
- "win/audio_device_wave_win.h",
- "win/audio_mixer_manager_win.cc",
- "win/audio_mixer_manager_win.h",
]
+ if (is_android) {
+ sources += [
+ "android/audio_device_template.h",
+ "android/audio_manager.cc",
+ "android/audio_manager.h",
+ "android/audio_record_jni.cc",
+ "android/audio_record_jni.h",
+ "android/audio_track_jni.cc",
+ "android/audio_track_jni.h",
+ "android/build_info.cc",
+ "android/build_info.h",
+ "android/opensles_common.cc",
+ "android/opensles_common.h",
+ "android/opensles_player.cc",
+ "android/opensles_player.h",
+ ]
+ libs = [
+ "log",
+ "OpenSLES",
+ ]
+ }
if (is_linux) {
+ sources += [
+ "linux/alsasymboltable_linux.cc",
+ "linux/alsasymboltable_linux.h",
+ "linux/audio_device_alsa_linux.cc",
+ "linux/audio_device_alsa_linux.h",
+ "linux/audio_mixer_manager_alsa_linux.cc",
+ "linux/audio_mixer_manager_alsa_linux.h",
+ "linux/latebindingsymboltable_linux.cc",
+ "linux/latebindingsymboltable_linux.h",
+ ]
defines += [ "LINUX_ALSA" ]
-
libs = [
"dl",
"X11",
]
-
if (rtc_include_pulse_audio) {
sources += [
"linux/audio_device_pulse_linux.cc",
@@ -107,26 +103,47 @@ source_set("audio_device") {
"linux/pulseaudiosymboltable_linux.cc",
"linux/pulseaudiosymboltable_linux.h",
]
-
defines += [ "LINUX_PULSE" ]
}
}
if (is_mac) {
+ sources += [
+ "mac/audio_device_mac.cc",
+ "mac/audio_device_mac.h",
+ "mac/audio_mixer_manager_mac.cc",
+ "mac/audio_mixer_manager_mac.h",
+ "mac/portaudio/pa_memorybarrier.h",
+ "mac/portaudio/pa_ringbuffer.c",
+ "mac/portaudio/pa_ringbuffer.h",
+ ]
libs = [
"AudioToolbox.framework",
"CoreAudio.framework",
]
}
if (is_ios) {
+ sources += [
+ "ios/audio_device_ios.h",
+ "ios/audio_device_ios.mm",
+ "ios/audio_device_not_implemented_ios.mm",
+ ]
cflags += [ "-fobjc-arc" ] # CLANG_ENABLE_OBJC_ARC = YES.
-
libs = [
"AudioToolbox.framework",
"AVFoundation.framework",
"Foundation.framework",
+ "UIKit.framework",
]
}
if (is_win) {
+ sources += [
+ "win/audio_device_core_win.cc",
+ "win/audio_device_core_win.h",
+ "win/audio_device_wave_win.cc",
+ "win/audio_device_wave_win.h",
+ "win/audio_mixer_manager_win.cc",
+ "win/audio_mixer_manager_win.h",
+ ]
libs = [
# Required for the built-in WASAPI AEC.
"dmoguids.lib",
diff --git a/webrtc/modules/audio_device/OWNERS b/webrtc/modules/audio_device/OWNERS
index bb11a4ec0e..12d67c035b 100644
--- a/webrtc/modules/audio_device/OWNERS
+++ b/webrtc/modules/audio_device/OWNERS
@@ -4,8 +4,6 @@ niklas.enbom@webrtc.org
tkchin@webrtc.org
xians@webrtc.org
-per-file *.isolate=kjellander@webrtc.org
-
# These are for the common case of adding or renaming files. If you're doing
# structural changes, please get a review from a reviewer in this file.
per-file *.gyp=*
diff --git a/webrtc/modules/audio_device/android/audio_device_unittest.cc b/webrtc/modules/audio_device/android/audio_device_unittest.cc
index 7b2d6354c4..768047df51 100644
--- a/webrtc/modules/audio_device/android/audio_device_unittest.cc
+++ b/webrtc/modules/audio_device/android/audio_device_unittest.cc
@@ -383,7 +383,7 @@ class MockAudioTransport : public AudioTransport {
int32_t(const void* audioSamples,
const size_t nSamples,
const size_t nBytesPerSample,
- const uint8_t nChannels,
+ const size_t nChannels,
const uint32_t samplesPerSec,
const uint32_t totalDelayMS,
const int32_t clockDrift,
@@ -393,7 +393,7 @@ class MockAudioTransport : public AudioTransport {
MOCK_METHOD8(NeedMorePlayData,
int32_t(const size_t nSamples,
const size_t nBytesPerSample,
- const uint8_t nChannels,
+ const size_t nChannels,
const uint32_t samplesPerSec,
void* audioSamples,
size_t& nSamplesOut,
@@ -423,7 +423,7 @@ class MockAudioTransport : public AudioTransport {
int32_t RealRecordedDataIsAvailable(const void* audioSamples,
const size_t nSamples,
const size_t nBytesPerSample,
- const uint8_t nChannels,
+ const size_t nChannels,
const uint32_t samplesPerSec,
const uint32_t totalDelayMS,
const int32_t clockDrift,
@@ -445,7 +445,7 @@ class MockAudioTransport : public AudioTransport {
int32_t RealNeedMorePlayData(const size_t nSamples,
const size_t nBytesPerSample,
- const uint8_t nChannels,
+ const size_t nChannels,
const uint32_t samplesPerSec,
void* audioSamples,
size_t& nSamplesOut,
@@ -521,10 +521,10 @@ class AudioDeviceTest : public ::testing::Test {
int record_sample_rate() const {
return record_parameters_.sample_rate();
}
- int playout_channels() const {
+ size_t playout_channels() const {
return playout_parameters_.channels();
}
- int record_channels() const {
+ size_t record_channels() const {
return record_parameters_.channels();
}
size_t playout_frames_per_10ms_buffer() const {
@@ -931,7 +931,7 @@ TEST_F(AudioDeviceTest, StartPlayoutAndRecordingVerifyCallbacks) {
// not contain any explicit verification that the audio quality is perfect.
TEST_F(AudioDeviceTest, RunPlayoutWithFileAsSource) {
// TODO(henrika): extend test when mono output is supported.
- EXPECT_EQ(1, playout_channels());
+ EXPECT_EQ(1u, playout_channels());
NiceMock<MockAudioTransport> mock(kPlayout);
const int num_callbacks = kFilePlayTimeInSec * kNumCallbacksPerSecond;
std::string file_name = GetFileName(playout_sample_rate());
diff --git a/webrtc/modules/audio_device/android/audio_manager.cc b/webrtc/modules/audio_device/android/audio_manager.cc
index 169a1929ce..1d08a6adc0 100644
--- a/webrtc/modules/audio_device/android/audio_manager.cc
+++ b/webrtc/modules/audio_device/android/audio_manager.cc
@@ -10,13 +10,15 @@
#include "webrtc/modules/audio_device/android/audio_manager.h"
+#include <utility>
+
#include <android/log.h>
#include "webrtc/base/arraysize.h"
#include "webrtc/base/checks.h"
#include "webrtc/base/scoped_ptr.h"
#include "webrtc/modules/audio_device/android/audio_common.h"
-#include "webrtc/modules/utility/interface/helpers_android.h"
+#include "webrtc/modules/utility/include/helpers_android.h"
#define TAG "AudioManager"
#define ALOGV(...) __android_log_print(ANDROID_LOG_VERBOSE, TAG, __VA_ARGS__)
@@ -29,15 +31,16 @@ namespace webrtc {
// AudioManager::JavaAudioManager implementation
AudioManager::JavaAudioManager::JavaAudioManager(
- NativeRegistration* native_reg, rtc::scoped_ptr<GlobalRef> audio_manager)
- : audio_manager_(audio_manager.Pass()),
+ NativeRegistration* native_reg,
+ rtc::scoped_ptr<GlobalRef> audio_manager)
+ : audio_manager_(std::move(audio_manager)),
init_(native_reg->GetMethodId("init", "()Z")),
dispose_(native_reg->GetMethodId("dispose", "()V")),
is_communication_mode_enabled_(
native_reg->GetMethodId("isCommunicationModeEnabled", "()Z")),
is_device_blacklisted_for_open_sles_usage_(
- native_reg->GetMethodId(
- "isDeviceBlacklistedForOpenSLESUsage", "()Z")) {
+ native_reg->GetMethodId("isDeviceBlacklistedForOpenSLESUsage",
+ "()Z")) {
ALOGD("JavaAudioManager::ctor%s", GetThreadInfo().c_str());
}
@@ -71,13 +74,12 @@ AudioManager::AudioManager()
hardware_agc_(false),
hardware_ns_(false),
low_latency_playout_(false),
- delay_estimate_in_milliseconds_(0),
- output_stream_type_(0) {
+ delay_estimate_in_milliseconds_(0) {
ALOGD("ctor%s", GetThreadInfo().c_str());
RTC_CHECK(j_environment_);
JNINativeMethod native_methods[] = {
{"nativeCacheAudioParameters",
- "(IIZZZZIIIJ)V",
+ "(IIZZZZIIJ)V",
reinterpret_cast<void*>(&webrtc::AudioManager::CacheAudioParameters)}};
j_native_registration_ = j_environment_->RegisterNatives(
"org/webrtc/voiceengine/WebRtcAudioManager",
@@ -180,14 +182,12 @@ void JNICALL AudioManager::CacheAudioParameters(JNIEnv* env,
jboolean low_latency_output,
jint output_buffer_size,
jint input_buffer_size,
- jint output_stream_type,
jlong native_audio_manager) {
webrtc::AudioManager* this_object =
reinterpret_cast<webrtc::AudioManager*>(native_audio_manager);
this_object->OnCacheAudioParameters(
env, sample_rate, channels, hardware_aec, hardware_agc, hardware_ns,
- low_latency_output, output_buffer_size, input_buffer_size,
- output_stream_type);
+ low_latency_output, output_buffer_size, input_buffer_size);
}
void AudioManager::OnCacheAudioParameters(JNIEnv* env,
@@ -198,8 +198,7 @@ void AudioManager::OnCacheAudioParameters(JNIEnv* env,
jboolean hardware_ns,
jboolean low_latency_output,
jint output_buffer_size,
- jint input_buffer_size,
- jint output_stream_type) {
+ jint input_buffer_size) {
ALOGD("OnCacheAudioParameters%s", GetThreadInfo().c_str());
ALOGD("hardware_aec: %d", hardware_aec);
ALOGD("hardware_agc: %d", hardware_agc);
@@ -209,17 +208,15 @@ void AudioManager::OnCacheAudioParameters(JNIEnv* env,
ALOGD("channels: %d", channels);
ALOGD("output_buffer_size: %d", output_buffer_size);
ALOGD("input_buffer_size: %d", input_buffer_size);
- ALOGD("output_stream_type: %d", output_stream_type);
RTC_DCHECK(thread_checker_.CalledOnValidThread());
hardware_aec_ = hardware_aec;
hardware_agc_ = hardware_agc;
hardware_ns_ = hardware_ns;
low_latency_playout_ = low_latency_output;
- output_stream_type_ = output_stream_type;
// TODO(henrika): add support for stereo output.
- playout_parameters_.reset(sample_rate, channels,
+ playout_parameters_.reset(sample_rate, static_cast<size_t>(channels),
static_cast<size_t>(output_buffer_size));
- record_parameters_.reset(sample_rate, channels,
+ record_parameters_.reset(sample_rate, static_cast<size_t>(channels),
static_cast<size_t>(input_buffer_size));
}
diff --git a/webrtc/modules/audio_device/android/audio_manager.h b/webrtc/modules/audio_device/android/audio_manager.h
index 5f23147b8a..26caf61afe 100644
--- a/webrtc/modules/audio_device/android/audio_manager.h
+++ b/webrtc/modules/audio_device/android/audio_manager.h
@@ -19,8 +19,8 @@
#include "webrtc/modules/audio_device/audio_device_config.h"
#include "webrtc/modules/audio_device/include/audio_device_defines.h"
#include "webrtc/modules/audio_device/audio_device_generic.h"
-#include "webrtc/modules/utility/interface/helpers_android.h"
-#include "webrtc/modules/utility/interface/jvm_android.h"
+#include "webrtc/modules/utility/include/helpers_android.h"
+#include "webrtc/modules/utility/include/jvm_android.h"
namespace webrtc {
@@ -93,8 +93,6 @@ class AudioManager {
// webrtc::kHighLatencyModeDelayEstimateInMilliseconds.
int GetDelayEstimateInMilliseconds() const;
- int OutputStreamType() const { return output_stream_type_; }
-
private:
// Called from Java side so we can cache the native audio parameters.
// This method will be called by the WebRtcAudioManager constructor, i.e.
@@ -109,7 +107,6 @@ class AudioManager {
jboolean low_latency_output,
jint output_buffer_size,
jint input_buffer_size,
- jint output_stream_type,
jlong native_audio_manager);
void OnCacheAudioParameters(JNIEnv* env,
jint sample_rate,
@@ -119,8 +116,7 @@ class AudioManager {
jboolean hardware_ns,
jboolean low_latency_output,
jint output_buffer_size,
- jint input_buffer_size,
- jint output_stream_type);
+ jint input_buffer_size);
// Stores thread ID in the constructor.
// We can then use ThreadChecker::CalledOnValidThread() to ensure that
@@ -159,13 +155,6 @@ class AudioManager {
// device supports low-latency output or not.
int delay_estimate_in_milliseconds_;
- // Contains the output stream type provided to this class at construction by
- // the AudioManager in Java land. Possible values are:
- // - AudioManager.STREAM_VOICE_CALL = 0
- // - AudioManager.STREAM_RING = 2
- // - AudioManager.STREAM_MUSIC = 3
- int output_stream_type_;
-
// Contains native parameters (e.g. sample rate, channel configuration).
// Set at construction in OnCacheAudioParameters() which is called from
// Java on the same thread as this object is created on.
diff --git a/webrtc/modules/audio_device/android/audio_manager_unittest.cc b/webrtc/modules/audio_device/android/audio_manager_unittest.cc
index a5bc840dff..ddae73067a 100644
--- a/webrtc/modules/audio_device/android/audio_manager_unittest.cc
+++ b/webrtc/modules/audio_device/android/audio_manager_unittest.cc
@@ -82,14 +82,14 @@ TEST_F(AudioManagerTest, ShowAudioParameterInfo) {
PRINT("%saudio layer: %s\n", kTag,
low_latency_out ? "Low latency OpenSL" : "Java/JNI based AudioTrack");
PRINT("%ssample rate: %d Hz\n", kTag, playout_parameters_.sample_rate());
- PRINT("%schannels: %d\n", kTag, playout_parameters_.channels());
+ PRINT("%schannels: %" PRIuS "\n", kTag, playout_parameters_.channels());
PRINT("%sframes per buffer: %" PRIuS " <=> %.2f ms\n", kTag,
playout_parameters_.frames_per_buffer(),
playout_parameters_.GetBufferSizeInMilliseconds());
PRINT("RECORD: \n");
PRINT("%saudio layer: %s\n", kTag, "Java/JNI based AudioRecord");
PRINT("%ssample rate: %d Hz\n", kTag, record_parameters_.sample_rate());
- PRINT("%schannels: %d\n", kTag, record_parameters_.channels());
+ PRINT("%schannels: %" PRIuS "\n", kTag, record_parameters_.channels());
PRINT("%sframes per buffer: %" PRIuS " <=> %.2f ms\n", kTag,
record_parameters_.frames_per_buffer(),
record_parameters_.GetBufferSizeInMilliseconds());
@@ -119,7 +119,7 @@ TEST_F(AudioManagerTest, AudioParametersWithDefaultConstruction) {
AudioParameters params;
EXPECT_FALSE(params.is_valid());
EXPECT_EQ(0, params.sample_rate());
- EXPECT_EQ(0, params.channels());
+ EXPECT_EQ(0U, params.channels());
EXPECT_EQ(0U, params.frames_per_buffer());
EXPECT_EQ(0U, params.frames_per_10ms_buffer());
EXPECT_EQ(0U, params.GetBytesPerFrame());
@@ -131,7 +131,7 @@ TEST_F(AudioManagerTest, AudioParametersWithDefaultConstruction) {
// Basic test of the AudioParameters class using non default construction.
TEST_F(AudioManagerTest, AudioParametersWithNonDefaultConstruction) {
const int kSampleRate = 48000;
- const int kChannels = 1;
+ const size_t kChannels = 1;
const size_t kFramesPerBuffer = 480;
const size_t kFramesPer10msBuffer = 480;
const size_t kBytesPerFrame = 2;
diff --git a/webrtc/modules/audio_device/android/audio_record_jni.cc b/webrtc/modules/audio_device/android/audio_record_jni.cc
index ba3212afc3..5dda7249ac 100644
--- a/webrtc/modules/audio_device/android/audio_record_jni.cc
+++ b/webrtc/modules/audio_device/android/audio_record_jni.cc
@@ -10,6 +10,8 @@
#include "webrtc/modules/audio_device/android/audio_record_jni.h"
+#include <utility>
+
#include <android/log.h>
#include "webrtc/base/arraysize.h"
@@ -28,23 +30,20 @@ namespace webrtc {
// AudioRecordJni::JavaAudioRecord implementation.
AudioRecordJni::JavaAudioRecord::JavaAudioRecord(
- NativeRegistration* native_reg, rtc::scoped_ptr<GlobalRef> audio_record)
- : audio_record_(audio_record.Pass()),
+ NativeRegistration* native_reg,
+ rtc::scoped_ptr<GlobalRef> audio_record)
+ : audio_record_(std::move(audio_record)),
init_recording_(native_reg->GetMethodId("initRecording", "(II)I")),
start_recording_(native_reg->GetMethodId("startRecording", "()Z")),
stop_recording_(native_reg->GetMethodId("stopRecording", "()Z")),
- enable_built_in_aec_(native_reg->GetMethodId(
- "enableBuiltInAEC", "(Z)Z")),
- enable_built_in_agc_(native_reg->GetMethodId(
- "enableBuiltInAGC", "(Z)Z")),
- enable_built_in_ns_(native_reg->GetMethodId(
- "enableBuiltInNS", "(Z)Z")) {
-}
+ enable_built_in_aec_(native_reg->GetMethodId("enableBuiltInAEC", "(Z)Z")),
+ enable_built_in_agc_(native_reg->GetMethodId("enableBuiltInAGC", "(Z)Z")),
+ enable_built_in_ns_(native_reg->GetMethodId("enableBuiltInNS", "(Z)Z")) {}
AudioRecordJni::JavaAudioRecord::~JavaAudioRecord() {}
int AudioRecordJni::JavaAudioRecord::InitRecording(
- int sample_rate, int channels) {
+ int sample_rate, size_t channels) {
return audio_record_->CallIntMethod(init_recording_,
static_cast<jint>(sample_rate),
static_cast<jint>(channels));
@@ -186,8 +185,8 @@ void AudioRecordJni::AttachAudioBuffer(AudioDeviceBuffer* audioBuffer) {
const int sample_rate_hz = audio_parameters_.sample_rate();
ALOGD("SetRecordingSampleRate(%d)", sample_rate_hz);
audio_device_buffer_->SetRecordingSampleRate(sample_rate_hz);
- const int channels = audio_parameters_.channels();
- ALOGD("SetRecordingChannels(%d)", channels);
+ const size_t channels = audio_parameters_.channels();
+ ALOGD("SetRecordingChannels(%" PRIuS ")", channels);
audio_device_buffer_->SetRecordingChannels(channels);
total_delay_in_milliseconds_ =
audio_manager_->GetDelayEstimateInMilliseconds();
diff --git a/webrtc/modules/audio_device/android/audio_record_jni.h b/webrtc/modules/audio_device/android/audio_record_jni.h
index efd516425a..766316a83a 100644
--- a/webrtc/modules/audio_device/android/audio_record_jni.h
+++ b/webrtc/modules/audio_device/android/audio_record_jni.h
@@ -17,8 +17,8 @@
#include "webrtc/modules/audio_device/android/audio_manager.h"
#include "webrtc/modules/audio_device/include/audio_device_defines.h"
#include "webrtc/modules/audio_device/audio_device_generic.h"
-#include "webrtc/modules/utility/interface/helpers_android.h"
-#include "webrtc/modules/utility/interface/jvm_android.h"
+#include "webrtc/modules/utility/include/helpers_android.h"
+#include "webrtc/modules/utility/include/jvm_android.h"
namespace webrtc {
@@ -49,7 +49,7 @@ class AudioRecordJni {
rtc::scoped_ptr<GlobalRef> audio_track);
~JavaAudioRecord();
- int InitRecording(int sample_rate, int channels);
+ int InitRecording(int sample_rate, size_t channels);
bool StartRecording();
bool StopRecording();
bool EnableBuiltInAEC(bool enable);
diff --git a/webrtc/modules/audio_device/android/audio_track_jni.cc b/webrtc/modules/audio_device/android/audio_track_jni.cc
index 29b21ae998..057e016405 100644
--- a/webrtc/modules/audio_device/android/audio_track_jni.cc
+++ b/webrtc/modules/audio_device/android/audio_track_jni.cc
@@ -11,6 +11,8 @@
#include "webrtc/modules/audio_device/android/audio_manager.h"
#include "webrtc/modules/audio_device/android/audio_track_jni.h"
+#include <utility>
+
#include <android/log.h>
#include "webrtc/base/arraysize.h"
@@ -28,16 +30,16 @@ namespace webrtc {
// AudioTrackJni::JavaAudioTrack implementation.
AudioTrackJni::JavaAudioTrack::JavaAudioTrack(
- NativeRegistration* native_reg, rtc::scoped_ptr<GlobalRef> audio_track)
- : audio_track_(audio_track.Pass()),
+ NativeRegistration* native_reg,
+ rtc::scoped_ptr<GlobalRef> audio_track)
+ : audio_track_(std::move(audio_track)),
init_playout_(native_reg->GetMethodId("initPlayout", "(II)V")),
start_playout_(native_reg->GetMethodId("startPlayout", "()Z")),
stop_playout_(native_reg->GetMethodId("stopPlayout", "()Z")),
set_stream_volume_(native_reg->GetMethodId("setStreamVolume", "(I)Z")),
- get_stream_max_volume_(native_reg->GetMethodId(
- "getStreamMaxVolume", "()I")),
- get_stream_volume_(native_reg->GetMethodId("getStreamVolume", "()I")) {
-}
+ get_stream_max_volume_(
+ native_reg->GetMethodId("getStreamMaxVolume", "()I")),
+ get_stream_volume_(native_reg->GetMethodId("getStreamVolume", "()I")) {}
AudioTrackJni::JavaAudioTrack::~JavaAudioTrack() {}
@@ -200,8 +202,8 @@ void AudioTrackJni::AttachAudioBuffer(AudioDeviceBuffer* audioBuffer) {
const int sample_rate_hz = audio_parameters_.sample_rate();
ALOGD("SetPlayoutSampleRate(%d)", sample_rate_hz);
audio_device_buffer_->SetPlayoutSampleRate(sample_rate_hz);
- const int channels = audio_parameters_.channels();
- ALOGD("SetPlayoutChannels(%d)", channels);
+ const size_t channels = audio_parameters_.channels();
+ ALOGD("SetPlayoutChannels(%" PRIuS ")", channels);
audio_device_buffer_->SetPlayoutChannels(channels);
}
diff --git a/webrtc/modules/audio_device/android/audio_track_jni.h b/webrtc/modules/audio_device/android/audio_track_jni.h
index 43bfcad657..067dc6c651 100644
--- a/webrtc/modules/audio_device/android/audio_track_jni.h
+++ b/webrtc/modules/audio_device/android/audio_track_jni.h
@@ -18,8 +18,8 @@
#include "webrtc/modules/audio_device/android/audio_manager.h"
#include "webrtc/modules/audio_device/include/audio_device_defines.h"
#include "webrtc/modules/audio_device/audio_device_generic.h"
-#include "webrtc/modules/utility/interface/helpers_android.h"
-#include "webrtc/modules/utility/interface/jvm_android.h"
+#include "webrtc/modules/utility/include/helpers_android.h"
+#include "webrtc/modules/utility/include/jvm_android.h"
namespace webrtc {
diff --git a/webrtc/modules/audio_device/android/build_info.cc b/webrtc/modules/audio_device/android/build_info.cc
index cb5dc293d7..6289697073 100644
--- a/webrtc/modules/audio_device/android/build_info.cc
+++ b/webrtc/modules/audio_device/android/build_info.cc
@@ -10,7 +10,7 @@
#include "webrtc/modules/audio_device/android/build_info.h"
-#include "webrtc/modules/utility/interface/helpers_android.h"
+#include "webrtc/modules/utility/include/helpers_android.h"
namespace webrtc {
diff --git a/webrtc/modules/audio_device/android/build_info.h b/webrtc/modules/audio_device/android/build_info.h
index d9b2871841..1490fa0772 100644
--- a/webrtc/modules/audio_device/android/build_info.h
+++ b/webrtc/modules/audio_device/android/build_info.h
@@ -14,7 +14,7 @@
#include <jni.h>
#include <string>
-#include "webrtc/modules/utility/interface/jvm_android.h"
+#include "webrtc/modules/utility/include/jvm_android.h"
namespace webrtc {
diff --git a/webrtc/modules/audio_device/android/ensure_initialized.cc b/webrtc/modules/audio_device/android/ensure_initialized.cc
index e8197b7ca0..b63aec1f27 100644
--- a/webrtc/modules/audio_device/android/ensure_initialized.cc
+++ b/webrtc/modules/audio_device/android/ensure_initialized.cc
@@ -14,11 +14,12 @@
// Note: this dependency is dangerous since it reaches into Chromium's base.
// There's a risk of e.g. macro clashes. This file may only be used in tests.
+#include "base/android/context_utils.h"
#include "base/android/jni_android.h"
#include "webrtc/base/checks.h"
#include "webrtc/modules/audio_device/android/audio_record_jni.h"
#include "webrtc/modules/audio_device/android/audio_track_jni.h"
-#include "webrtc/modules/utility/interface/jvm_android.h"
+#include "webrtc/modules/utility/include/jvm_android.h"
namespace webrtc {
namespace audiodevicemodule {
diff --git a/webrtc/modules/audio_device/android/java/src/org/webrtc/voiceengine/WebRtcAudioEffects.java b/webrtc/modules/audio_device/android/java/src/org/webrtc/voiceengine/WebRtcAudioEffects.java
index 9b90f4ab54..c3ab043868 100644
--- a/webrtc/modules/audio_device/android/java/src/org/webrtc/voiceengine/WebRtcAudioEffects.java
+++ b/webrtc/modules/audio_device/android/java/src/org/webrtc/voiceengine/WebRtcAudioEffects.java
@@ -10,6 +10,7 @@
package org.webrtc.voiceengine;
+import android.annotation.TargetApi;
import android.media.audiofx.AcousticEchoCanceler;
import android.media.audiofx.AudioEffect;
import android.media.audiofx.AudioEffect.Descriptor;
@@ -119,6 +120,7 @@ class WebRtcAudioEffects {
// Returns true if the platform AEC should be excluded based on its UUID.
// AudioEffect.queryEffects() can throw IllegalStateException.
+ @TargetApi(18)
private static boolean isAcousticEchoCancelerExcludedByUUID() {
for (Descriptor d : AudioEffect.queryEffects()) {
if (d.type.equals(AudioEffect.EFFECT_TYPE_AEC) &&
@@ -131,6 +133,7 @@ class WebRtcAudioEffects {
// Returns true if the platform AGC should be excluded based on its UUID.
// AudioEffect.queryEffects() can throw IllegalStateException.
+ @TargetApi(18)
private static boolean isAutomaticGainControlExcludedByUUID() {
for (Descriptor d : AudioEffect.queryEffects()) {
if (d.type.equals(AudioEffect.EFFECT_TYPE_AGC) &&
@@ -143,6 +146,7 @@ class WebRtcAudioEffects {
// Returns true if the platform NS should be excluded based on its UUID.
// AudioEffect.queryEffects() can throw IllegalStateException.
+ @TargetApi(18)
private static boolean isNoiseSuppressorExcludedByUUID() {
for (Descriptor d : AudioEffect.queryEffects()) {
if (d.type.equals(AudioEffect.EFFECT_TYPE_NS) &&
@@ -208,15 +212,6 @@ class WebRtcAudioEffects {
private WebRtcAudioEffects() {
Logging.d(TAG, "ctor" + WebRtcAudioUtils.getThreadInfo());
- for (Descriptor d : AudioEffect.queryEffects()) {
- if (effectTypeIsVoIP(d.type) || DEBUG) {
- // Only log information for VoIP effects (AEC, AEC and NS).
- Logging.d(TAG, "name: " + d.name + ", "
- + "mode: " + d.connectMode + ", "
- + "implementor: " + d.implementor + ", "
- + "UUID: " + d.uuid);
- }
- }
}
// Call this method to enable or disable the platform AEC. It modifies
@@ -282,6 +277,17 @@ class WebRtcAudioEffects {
assertTrue(agc == null);
assertTrue(ns == null);
+ // Add logging of supported effects but filter out "VoIP effects", i.e.,
+ // AEC, AEC and NS.
+ for (Descriptor d : AudioEffect.queryEffects()) {
+ if (effectTypeIsVoIP(d.type) || DEBUG) {
+ Logging.d(TAG, "name: " + d.name + ", "
+ + "mode: " + d.connectMode + ", "
+ + "implementor: " + d.implementor + ", "
+ + "UUID: " + d.uuid);
+ }
+ }
+
if (isAcousticEchoCancelerSupported()) {
// Create an AcousticEchoCanceler and attach it to the AudioRecord on
// the specified audio session.
@@ -366,7 +372,11 @@ class WebRtcAudioEffects {
// AudioEffect.Descriptor array that are actually not available on the device.
// As an example: Samsung Galaxy S6 includes an AGC in the descriptor but
// AutomaticGainControl.isAvailable() returns false.
+ @TargetApi(18)
private boolean effectTypeIsVoIP(UUID type) {
+ if (!WebRtcAudioUtils.runningOnJellyBeanMR2OrHigher())
+ return false;
+
return (AudioEffect.EFFECT_TYPE_AEC.equals(type)
&& isAcousticEchoCancelerSupported())
|| (AudioEffect.EFFECT_TYPE_AGC.equals(type)
diff --git a/webrtc/modules/audio_device/android/java/src/org/webrtc/voiceengine/WebRtcAudioManager.java b/webrtc/modules/audio_device/android/java/src/org/webrtc/voiceengine/WebRtcAudioManager.java
index cf2f03a2f1..1213f333d9 100644
--- a/webrtc/modules/audio_device/android/java/src/org/webrtc/voiceengine/WebRtcAudioManager.java
+++ b/webrtc/modules/audio_device/android/java/src/org/webrtc/voiceengine/WebRtcAudioManager.java
@@ -10,6 +10,7 @@
package org.webrtc.voiceengine;
+import android.annotation.TargetApi;
import android.content.Context;
import android.content.pm.PackageManager;
import android.media.AudioFormat;
@@ -33,11 +34,24 @@ import java.lang.Math;
// recommended to always use AudioManager.MODE_IN_COMMUNICATION.
// This class also adds support for output volume control of the
// STREAM_VOICE_CALL-type stream.
-class WebRtcAudioManager {
+public class WebRtcAudioManager {
private static final boolean DEBUG = false;
private static final String TAG = "WebRtcAudioManager";
+ private static boolean blacklistDeviceForOpenSLESUsage = false;
+ private static boolean blacklistDeviceForOpenSLESUsageIsOverridden = false;
+
+ // Call this method to override the deault list of blacklisted devices
+ // specified in WebRtcAudioUtils.BLACKLISTED_OPEN_SL_ES_MODELS.
+ // Allows an app to take control over which devices to exlude from using
+ // the OpenSL ES audio output path
+ public static synchronized void setBlacklistDeviceForOpenSLESUsage(
+ boolean enable) {
+ blacklistDeviceForOpenSLESUsageIsOverridden = true;
+ blacklistDeviceForOpenSLESUsage = enable;
+ }
+
// Default audio data format is PCM 16 bit per sample.
// Guaranteed to be supported by all devices.
private static final int BITS_PER_SAMPLE = 16;
@@ -71,7 +85,6 @@ class WebRtcAudioManager {
private int channels;
private int outputBufferSize;
private int inputBufferSize;
- private int outputStreamType;
WebRtcAudioManager(Context context, long nativeAudioManager) {
Logging.d(TAG, "ctor" + WebRtcAudioUtils.getThreadInfo());
@@ -85,7 +98,7 @@ class WebRtcAudioManager {
storeAudioParameters();
nativeCacheAudioParameters(
sampleRate, channels, hardwareAEC, hardwareAGC, hardwareNS,
- lowLatencyOutput, outputBufferSize, inputBufferSize, outputStreamType,
+ lowLatencyOutput, outputBufferSize, inputBufferSize,
nativeAudioManager);
}
@@ -110,8 +123,9 @@ class WebRtcAudioManager {
return (audioManager.getMode() == AudioManager.MODE_IN_COMMUNICATION);
}
- private boolean isDeviceBlacklistedForOpenSLESUsage() {
- boolean blacklisted =
+ private boolean isDeviceBlacklistedForOpenSLESUsage() {
+ boolean blacklisted = blacklistDeviceForOpenSLESUsageIsOverridden ?
+ blacklistDeviceForOpenSLESUsage :
WebRtcAudioUtils.deviceIsBlacklistedForOpenSLESUsage();
if (blacklisted) {
Logging.e(TAG, Build.MODEL + " is blacklisted for OpenSL ES usage!");
@@ -133,8 +147,6 @@ class WebRtcAudioManager {
getMinOutputFrameSize(sampleRate, channels);
// TODO(henrika): add support for low-latency input.
inputBufferSize = getMinInputFrameSize(sampleRate, channels);
- outputStreamType = WebRtcAudioUtils.getOutputStreamTypeFromAudioMode(
- audioManager.getMode());
}
// Gets the current earpiece state.
@@ -178,20 +190,26 @@ class WebRtcAudioManager {
// No overrides available. Deliver best possible estimate based on default
// Android AudioManager APIs.
final int sampleRateHz;
- if (!WebRtcAudioUtils.runningOnJellyBeanMR1OrHigher()) {
- sampleRateHz = WebRtcAudioUtils.getDefaultSampleRateHz();
+ if (WebRtcAudioUtils.runningOnJellyBeanMR1OrHigher()) {
+ sampleRateHz = getSampleRateOnJellyBeanMR10OrHigher();
} else {
- String sampleRateString = audioManager.getProperty(
- AudioManager.PROPERTY_OUTPUT_SAMPLE_RATE);
- sampleRateHz = (sampleRateString == null)
- ? WebRtcAudioUtils.getDefaultSampleRateHz()
- : Integer.parseInt(sampleRateString);
+ sampleRateHz = WebRtcAudioUtils.getDefaultSampleRateHz();
}
Logging.d(TAG, "Sample rate is set to " + sampleRateHz + " Hz");
return sampleRateHz;
}
+ @TargetApi(17)
+ private int getSampleRateOnJellyBeanMR10OrHigher() {
+ String sampleRateString = audioManager.getProperty(
+ AudioManager.PROPERTY_OUTPUT_SAMPLE_RATE);
+ return (sampleRateString == null)
+ ? WebRtcAudioUtils.getDefaultSampleRateHz()
+ : Integer.parseInt(sampleRateString);
+ }
+
// Returns the native output buffer size for low-latency output streams.
+ @TargetApi(17)
private int getLowLatencyOutputFramesPerBuffer() {
assertTrue(isLowLatencyOutputSupported());
if (!WebRtcAudioUtils.runningOnJellyBeanMR1OrHigher()) {
@@ -270,5 +288,5 @@ class WebRtcAudioManager {
private native void nativeCacheAudioParameters(
int sampleRate, int channels, boolean hardwareAEC, boolean hardwareAGC,
boolean hardwareNS, boolean lowLatencyOutput, int outputBufferSize,
- int inputBufferSize, int outputStreamType, long nativeAudioManager);
+ int inputBufferSize, long nativeAudioManager);
}
diff --git a/webrtc/modules/audio_device/android/java/src/org/webrtc/voiceengine/WebRtcAudioRecord.java b/webrtc/modules/audio_device/android/java/src/org/webrtc/voiceengine/WebRtcAudioRecord.java
index 7b31e08eed..ff77635843 100644
--- a/webrtc/modules/audio_device/android/java/src/org/webrtc/voiceengine/WebRtcAudioRecord.java
+++ b/webrtc/modules/audio_device/android/java/src/org/webrtc/voiceengine/WebRtcAudioRecord.java
@@ -192,10 +192,6 @@ class WebRtcAudioRecord {
Math.max(BUFFER_SIZE_FACTOR * minBufferSize, byteBuffer.capacity());
Logging.d(TAG, "bufferSizeInBytes: " + bufferSizeInBytes);
try {
- // TODO(henrika): the only supported audio source for input is currently
- // AudioSource.VOICE_COMMUNICATION. Is there any reason why we should
- // support other types, e.g. DEFAULT or MIC? Only reason I can think of
- // is if the device does not support VOICE_COMMUNICATION.
audioRecord = new AudioRecord(AudioSource.VOICE_COMMUNICATION,
sampleRate,
AudioFormat.CHANNEL_IN_MONO,
diff --git a/webrtc/modules/audio_device/android/java/src/org/webrtc/voiceengine/WebRtcAudioTrack.java b/webrtc/modules/audio_device/android/java/src/org/webrtc/voiceengine/WebRtcAudioTrack.java
index ec0e109169..11eb51383d 100644
--- a/webrtc/modules/audio_device/android/java/src/org/webrtc/voiceengine/WebRtcAudioTrack.java
+++ b/webrtc/modules/audio_device/android/java/src/org/webrtc/voiceengine/WebRtcAudioTrack.java
@@ -13,6 +13,7 @@ package org.webrtc.voiceengine;
import java.lang.Thread;
import java.nio.ByteBuffer;
+import android.annotation.TargetApi;
import android.content.Context;
import android.media.AudioFormat;
import android.media.AudioManager;
@@ -39,7 +40,6 @@ class WebRtcAudioTrack {
private final Context context;
private final long nativeAudioTrack;
private final AudioManager audioManager;
- private final int streamType;
private ByteBuffer byteBuffer;
@@ -91,13 +91,9 @@ class WebRtcAudioTrack {
assertTrue(sizeInBytes <= byteBuffer.remaining());
int bytesWritten = 0;
if (WebRtcAudioUtils.runningOnLollipopOrHigher()) {
- bytesWritten = audioTrack.write(byteBuffer,
- sizeInBytes,
- AudioTrack.WRITE_BLOCKING);
+ bytesWritten = writeOnLollipop(audioTrack, byteBuffer, sizeInBytes);
} else {
- bytesWritten = audioTrack.write(byteBuffer.array(),
- byteBuffer.arrayOffset(),
- sizeInBytes);
+ bytesWritten = writePreLollipop(audioTrack, byteBuffer, sizeInBytes);
}
if (bytesWritten != sizeInBytes) {
Logging.e(TAG, "AudioTrack.write failed: " + bytesWritten);
@@ -124,6 +120,15 @@ class WebRtcAudioTrack {
audioTrack.flush();
}
+ @TargetApi(21)
+ private int writeOnLollipop(AudioTrack audioTrack, ByteBuffer byteBuffer, int sizeInBytes) {
+ return audioTrack.write(byteBuffer, sizeInBytes, AudioTrack.WRITE_BLOCKING);
+ }
+
+ private int writePreLollipop(AudioTrack audioTrack, ByteBuffer byteBuffer, int sizeInBytes) {
+ return audioTrack.write(byteBuffer.array(), byteBuffer.arrayOffset(), sizeInBytes);
+ }
+
public void joinThread() {
keepAlive = false;
while (isAlive()) {
@@ -142,9 +147,6 @@ class WebRtcAudioTrack {
this.nativeAudioTrack = nativeAudioTrack;
audioManager = (AudioManager) context.getSystemService(
Context.AUDIO_SERVICE);
- this.streamType =
- WebRtcAudioUtils.getOutputStreamTypeFromAudioMode(
- audioManager.getMode());
if (DEBUG) {
WebRtcAudioUtils.logDeviceInfo(TAG);
}
@@ -181,7 +183,7 @@ class WebRtcAudioTrack {
// Create an AudioTrack object and initialize its associated audio buffer.
// The size of this buffer determines how long an AudioTrack can play
// before running out of data.
- audioTrack = new AudioTrack(streamType,
+ audioTrack = new AudioTrack(AudioManager.STREAM_VOICE_CALL,
sampleRate,
AudioFormat.CHANNEL_OUT_MONO,
AudioFormat.ENCODING_PCM_16BIT,
@@ -193,7 +195,7 @@ class WebRtcAudioTrack {
}
assertTrue(audioTrack.getState() == AudioTrack.STATE_INITIALIZED);
assertTrue(audioTrack.getPlayState() == AudioTrack.PLAYSTATE_STOPPED);
- assertTrue(audioTrack.getStreamType() == streamType);
+ assertTrue(audioTrack.getStreamType() == AudioManager.STREAM_VOICE_CALL);
}
private boolean startPlayout() {
@@ -217,32 +219,37 @@ class WebRtcAudioTrack {
return true;
}
- /** Get max possible volume index given type of audio stream. */
+ /** Get max possible volume index for a phone call audio stream. */
private int getStreamMaxVolume() {
Logging.d(TAG, "getStreamMaxVolume");
assertTrue(audioManager != null);
- return audioManager.getStreamMaxVolume(streamType);
+ return audioManager.getStreamMaxVolume(AudioManager.STREAM_VOICE_CALL);
}
- /** Set current volume level given type of audio stream. */
+ /** Set current volume level for a phone call audio stream. */
private boolean setStreamVolume(int volume) {
Logging.d(TAG, "setStreamVolume(" + volume + ")");
assertTrue(audioManager != null);
- if (WebRtcAudioUtils.runningOnLollipopOrHigher()) {
- if (audioManager.isVolumeFixed()) {
- Logging.e(TAG, "The device implements a fixed volume policy.");
- return false;
- }
+ if (isVolumeFixed()) {
+ Logging.e(TAG, "The device implements a fixed volume policy.");
+ return false;
}
- audioManager.setStreamVolume(streamType, volume, 0);
+ audioManager.setStreamVolume(AudioManager.STREAM_VOICE_CALL, volume, 0);
return true;
}
- /** Get current volume level given type of audio stream. */
+ @TargetApi(21)
+ private boolean isVolumeFixed() {
+ if (!WebRtcAudioUtils.runningOnLollipopOrHigher())
+ return false;
+ return audioManager.isVolumeFixed();
+ }
+
+ /** Get current volume level for a phone call audio stream. */
private int getStreamVolume() {
Logging.d(TAG, "getStreamVolume");
assertTrue(audioManager != null);
- return audioManager.getStreamVolume(streamType);
+ return audioManager.getStreamVolume(AudioManager.STREAM_VOICE_CALL);
}
/** Helper method which throws an exception when an assertion has failed. */
diff --git a/webrtc/modules/audio_device/android/java/src/org/webrtc/voiceengine/WebRtcAudioUtils.java b/webrtc/modules/audio_device/android/java/src/org/webrtc/voiceengine/WebRtcAudioUtils.java
index f08e11dad8..45f564a4dd 100644
--- a/webrtc/modules/audio_device/android/java/src/org/webrtc/voiceengine/WebRtcAudioUtils.java
+++ b/webrtc/modules/audio_device/android/java/src/org/webrtc/voiceengine/WebRtcAudioUtils.java
@@ -144,6 +144,11 @@ public final class WebRtcAudioUtils {
return Build.VERSION.SDK_INT >= Build.VERSION_CODES.JELLY_BEAN_MR1;
}
+ public static boolean runningOnJellyBeanMR2OrHigher() {
+ // July 24, 2013: Android 4.3. API Level 18.
+ return Build.VERSION.SDK_INT >= Build.VERSION_CODES.JELLY_BEAN_MR2;
+ }
+
public static boolean runningOnLollipopOrHigher() {
// API Level 21.
return Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP;
@@ -193,37 +198,5 @@ public final class WebRtcAudioUtils {
permission,
Process.myPid(),
Process.myUid()) == PackageManager.PERMISSION_GRANTED;
- }
-
- // Convert the provided audio |mode| into most suitable audio output stream
- // type. The stream type is used for creating audio streams and for volume
- // changes. It is essential that the mode and type are in-line to ensure
- // correct behavior. If for example a STREAM_MUSIC type of stream is created
- // in a MODE_IN_COMMUNICATION mode, audio will be played out and the volume
- // icon will look OK but the actual volume will not be changed when the user
- // changes the volume slider.
- // TODO(henrika): there is currently no mapping to STREAM_ALARM, STREAM_DTMF,
- // or STREAM_NOTIFICATION types since I am unable to see a reason for using
- // them. There are only four different modes.
- public static int getOutputStreamTypeFromAudioMode(int mode) {
- Logging.d(TAG, "getOutputStreamTypeFromAudioMode(mode=" + mode + ")");
- switch (mode) {
- case AudioManager.MODE_NORMAL:
- // The audio stream for music playback.
- Logging.d(TAG, "AudioManager.STREAM_MUSIC");
- return AudioManager.STREAM_MUSIC;
- case AudioManager.MODE_RINGTONE:
- // Audio stream for the phone ring.
- Logging.d(TAG, "AudioManager.STREAM_RING");
- return AudioManager.STREAM_RING;
- case AudioManager.MODE_IN_CALL:
- case AudioManager.MODE_IN_COMMUNICATION:
- // Audio stream for phone calls.
- Logging.d(TAG, "AudioManager.STREAM_VOICE_CALL");
- return AudioManager.STREAM_VOICE_CALL;
- default:
- Logging.d(TAG, "AudioManager.USE_DEFAULT_STREAM_TYPE");
- return AudioManager.USE_DEFAULT_STREAM_TYPE;
}
- }
}
diff --git a/webrtc/modules/audio_device/android/opensles_player.cc b/webrtc/modules/audio_device/android/opensles_player.cc
index 40967c5fb9..d2bff4905e 100644
--- a/webrtc/modules/audio_device/android/opensles_player.cc
+++ b/webrtc/modules/audio_device/android/opensles_player.cc
@@ -15,6 +15,7 @@
#include "webrtc/base/arraysize.h"
#include "webrtc/base/checks.h"
#include "webrtc/base/format_macros.h"
+#include "webrtc/base/timeutils.h"
#include "webrtc/modules/audio_device/android/audio_manager.h"
#include "webrtc/modules/audio_device/fine_audio_buffer.h"
@@ -38,7 +39,6 @@ namespace webrtc {
OpenSLESPlayer::OpenSLESPlayer(AudioManager* audio_manager)
: audio_parameters_(audio_manager->GetPlayoutAudioParameters()),
- stream_type_(audio_manager->OutputStreamType()),
audio_device_buffer_(NULL),
initialized_(false),
playing_(false),
@@ -47,11 +47,9 @@ OpenSLESPlayer::OpenSLESPlayer(AudioManager* audio_manager)
engine_(nullptr),
player_(nullptr),
simple_buffer_queue_(nullptr),
- volume_(nullptr) {
+ volume_(nullptr),
+ last_play_time_(0) {
ALOGD("ctor%s", GetThreadInfo().c_str());
- RTC_DCHECK(stream_type_ == SL_ANDROID_STREAM_VOICE ||
- stream_type_ == SL_ANDROID_STREAM_RING ||
- stream_type_ == SL_ANDROID_STREAM_MEDIA) << stream_type_;
// Use native audio output parameters provided by the audio manager and
// define the PCM format structure.
pcm_format_ = CreatePCMConfiguration(audio_parameters_.channels(),
@@ -99,6 +97,7 @@ int OpenSLESPlayer::InitPlayout() {
CreateMix();
initialized_ = true;
buffer_index_ = 0;
+ last_play_time_ = rtc::Time();
return 0;
}
@@ -180,15 +179,15 @@ void OpenSLESPlayer::AttachAudioBuffer(AudioDeviceBuffer* audioBuffer) {
const int sample_rate_hz = audio_parameters_.sample_rate();
ALOGD("SetPlayoutSampleRate(%d)", sample_rate_hz);
audio_device_buffer_->SetPlayoutSampleRate(sample_rate_hz);
- const int channels = audio_parameters_.channels();
- ALOGD("SetPlayoutChannels(%d)", channels);
+ const size_t channels = audio_parameters_.channels();
+ ALOGD("SetPlayoutChannels(%" PRIuS ")", channels);
audio_device_buffer_->SetPlayoutChannels(channels);
RTC_CHECK(audio_device_buffer_);
AllocateDataBuffers();
}
SLDataFormat_PCM OpenSLESPlayer::CreatePCMConfiguration(
- int channels,
+ size_t channels,
int sample_rate,
size_t bits_per_sample) {
ALOGD("CreatePCMConfiguration");
@@ -237,7 +236,16 @@ void OpenSLESPlayer::AllocateDataBuffers() {
RTC_DCHECK(thread_checker_.CalledOnValidThread());
RTC_DCHECK(!simple_buffer_queue_);
RTC_CHECK(audio_device_buffer_);
- bytes_per_buffer_ = audio_parameters_.GetBytesPerBuffer();
+ // Don't use the lowest possible size as native buffer size. Instead,
+ // use 10ms to better match the frame size that WebRTC uses. It will result
+ // in a reduced risk for audio glitches and also in a more "clean" sequence
+ // of callbacks from the OpenSL ES thread in to WebRTC when asking for audio
+ // to render.
+ ALOGD("lowest possible buffer size: %" PRIuS,
+ audio_parameters_.GetBytesPerBuffer());
+ bytes_per_buffer_ = audio_parameters_.GetBytesPerFrame() *
+ audio_parameters_.frames_per_10ms_buffer();
+ RTC_DCHECK_GE(bytes_per_buffer_, audio_parameters_.GetBytesPerBuffer());
ALOGD("native buffer size: %" PRIuS, bytes_per_buffer_);
// Create a modified audio buffer class which allows us to ask for any number
// of samples (and not only multiple of 10ms) to match the native OpenSL ES
@@ -351,7 +359,7 @@ bool OpenSLESPlayer::CreateAudioPlayer() {
false);
// Set audio player configuration to SL_ANDROID_STREAM_VOICE which
// corresponds to android.media.AudioManager.STREAM_VOICE_CALL.
- SLint32 stream_type = stream_type_;
+ SLint32 stream_type = SL_ANDROID_STREAM_VOICE;
RETURN_ON_ERROR(
(*player_config)
->SetConfiguration(player_config, SL_ANDROID_KEY_STREAM_TYPE,
@@ -422,6 +430,15 @@ void OpenSLESPlayer::FillBufferQueue() {
}
void OpenSLESPlayer::EnqueuePlayoutData() {
+ // Check delta time between two successive callbacks and provide a warning
+ // if it becomes very large.
+ // TODO(henrika): using 100ms as upper limit but this value is rather random.
+ const uint32_t current_time = rtc::Time();
+ const uint32_t diff = current_time - last_play_time_;
+ if (diff > 100) {
+ ALOGW("Bad OpenSL ES playout timing, dT=%u [ms]", diff);
+ }
+ last_play_time_ = current_time;
// Read audio data from the WebRTC source using the FineAudioBuffer object
// to adjust for differences in buffer size between WebRTC (10ms) and native
// OpenSL ES.
diff --git a/webrtc/modules/audio_device/android/opensles_player.h b/webrtc/modules/audio_device/android/opensles_player.h
index 96b1d49ac5..fa9e931218 100644
--- a/webrtc/modules/audio_device/android/opensles_player.h
+++ b/webrtc/modules/audio_device/android/opensles_player.h
@@ -22,7 +22,7 @@
#include "webrtc/modules/audio_device/android/opensles_common.h"
#include "webrtc/modules/audio_device/include/audio_device_defines.h"
#include "webrtc/modules/audio_device/audio_device_generic.h"
-#include "webrtc/modules/utility/interface/helpers_android.h"
+#include "webrtc/modules/utility/include/helpers_android.h"
namespace webrtc {
@@ -52,7 +52,7 @@ class OpenSLESPlayer {
// buffer count of 2 or more, and a buffer size and sample rate that are
// compatible with the device's native output configuration provided via the
// audio manager at construction.
- static const int kNumOfOpenSLESBuffers = 2;
+ static const int kNumOfOpenSLESBuffers = 4;
// There is no need for this class to use JNI.
static int32_t SetAndroidAudioDeviceObjects(void* javaVM, void* context) {
@@ -94,7 +94,7 @@ class OpenSLESPlayer {
void EnqueuePlayoutData();
// Configures the SL_DATAFORMAT_PCM structure.
- SLDataFormat_PCM CreatePCMConfiguration(int channels,
+ SLDataFormat_PCM CreatePCMConfiguration(size_t channels,
int sample_rate,
size_t bits_per_sample);
@@ -130,20 +130,6 @@ class OpenSLESPlayer {
// AudioManager.
const AudioParameters audio_parameters_;
- // Contains the stream type provided to this class at construction by the
- // AudioManager. Possible input values are:
- // - AudioManager.STREAM_VOICE_CALL = 0
- // - AudioManager.STREAM_RING = 2
- // - AudioManager.STREAM_MUSIC = 3
- // These value are mapped to the corresponding audio playback stream type
- // values in the "OpenSL ES domain":
- // - SL_ANDROID_STREAM_VOICE <=> STREAM_VOICE_CALL (0)
- // - SL_ANDROID_STREAM_RING <=> STREAM_RING (2)
- // - SL_ANDROID_STREAM_MEDIA <=> STREAM_MUSIC (3)
- // when creating the audio player. See SLES/OpenSLES_AndroidConfiguration.h
- // for details.
- const int stream_type_;
-
// Raw pointer handle provided to us in AttachAudioBuffer(). Owned by the
// AudioDeviceModuleImpl class and called by AudioDeviceModuleImpl::Create().
AudioDeviceBuffer* audio_device_buffer_;
@@ -209,6 +195,9 @@ class OpenSLESPlayer {
// This interface exposes controls for manipulating the object’s audio volume
// properties. This interface is supported on the Audio Player object.
SLVolumeItf volume_;
+
+ // Last time the OpenSL ES layer asked for audio data to play out.
+ uint32_t last_play_time_;
};
} // namespace webrtc
diff --git a/webrtc/modules/audio_device/audio_device.gypi b/webrtc/modules/audio_device/audio_device.gypi
index 0678d33802..2db9f1e17b 100644
--- a/webrtc/modules/audio_device/audio_device.gypi
+++ b/webrtc/modules/audio_device/audio_device.gypi
@@ -20,13 +20,13 @@
],
'include_dirs': [
'.',
- '../interface',
+ '../include',
'include',
'dummy', # Contains dummy audio device implementations.
],
'direct_dependent_settings': {
'include_dirs': [
- '../interface',
+ '../include',
'include',
],
},
@@ -86,48 +86,26 @@
}],
['include_internal_audio_device==1', {
'sources': [
- 'android/audio_device_template.h',
- 'android/audio_manager.cc',
- 'android/audio_manager.h',
- 'android/audio_record_jni.cc',
- 'android/audio_record_jni.h',
- 'android/audio_track_jni.cc',
- 'android/audio_track_jni.h',
- 'android/build_info.cc',
- 'android/build_info.h',
- 'android/opensles_common.cc',
- 'android/opensles_common.h',
- 'android/opensles_player.cc',
- 'android/opensles_player.h',
'audio_device_impl.cc',
'audio_device_impl.h',
- 'ios/audio_device_ios.h',
- 'ios/audio_device_ios.mm',
- 'ios/audio_device_not_implemented_ios.mm',
- 'linux/alsasymboltable_linux.cc',
- 'linux/alsasymboltable_linux.h',
- 'linux/audio_device_alsa_linux.cc',
- 'linux/audio_device_alsa_linux.h',
- 'linux/audio_mixer_manager_alsa_linux.cc',
- 'linux/audio_mixer_manager_alsa_linux.h',
- 'linux/latebindingsymboltable_linux.cc',
- 'linux/latebindingsymboltable_linux.h',
- 'mac/audio_device_mac.cc',
- 'mac/audio_device_mac.h',
- 'mac/audio_mixer_manager_mac.cc',
- 'mac/audio_mixer_manager_mac.h',
- 'mac/portaudio/pa_memorybarrier.h',
- 'mac/portaudio/pa_ringbuffer.c',
- 'mac/portaudio/pa_ringbuffer.h',
- 'win/audio_device_core_win.cc',
- 'win/audio_device_core_win.h',
- 'win/audio_device_wave_win.cc',
- 'win/audio_device_wave_win.h',
- 'win/audio_mixer_manager_win.cc',
- 'win/audio_mixer_manager_win.h',
],
'conditions': [
['OS=="android"', {
+ 'sources': [
+ 'android/audio_device_template.h',
+ 'android/audio_manager.cc',
+ 'android/audio_manager.h',
+ 'android/audio_record_jni.cc',
+ 'android/audio_record_jni.h',
+ 'android/audio_track_jni.cc',
+ 'android/audio_track_jni.h',
+ 'android/build_info.cc',
+ 'android/build_info.h',
+ 'android/opensles_common.cc',
+ 'android/opensles_common.h',
+ 'android/opensles_player.cc',
+ 'android/opensles_player.h',
+ ],
'link_settings': {
'libraries': [
'-llog',
@@ -136,6 +114,16 @@
},
}],
['OS=="linux"', {
+ 'sources': [
+ 'linux/alsasymboltable_linux.cc',
+ 'linux/alsasymboltable_linux.h',
+ 'linux/audio_device_alsa_linux.cc',
+ 'linux/audio_device_alsa_linux.h',
+ 'linux/audio_mixer_manager_alsa_linux.cc',
+ 'linux/audio_mixer_manager_alsa_linux.h',
+ 'linux/latebindingsymboltable_linux.cc',
+ 'linux/latebindingsymboltable_linux.h',
+ ],
'defines': [
'LINUX_ALSA',
],
@@ -161,6 +149,15 @@
],
}],
['OS=="mac"', {
+ 'sources': [
+ 'mac/audio_device_mac.cc',
+ 'mac/audio_device_mac.h',
+ 'mac/audio_mixer_manager_mac.cc',
+ 'mac/audio_mixer_manager_mac.h',
+ 'mac/portaudio/pa_memorybarrier.h',
+ 'mac/portaudio/pa_ringbuffer.c',
+ 'mac/portaudio/pa_ringbuffer.h',
+ ],
'link_settings': {
'libraries': [
'$(SDKROOT)/System/Library/Frameworks/AudioToolbox.framework',
@@ -169,6 +166,11 @@
},
}],
['OS=="ios"', {
+ 'sources': [
+ 'ios/audio_device_ios.h',
+ 'ios/audio_device_ios.mm',
+ 'ios/audio_device_not_implemented_ios.mm',
+ ],
'xcode_settings': {
'CLANG_ENABLE_OBJC_ARC': 'YES',
},
@@ -184,6 +186,14 @@
},
}],
['OS=="win"', {
+ 'sources': [
+ 'win/audio_device_core_win.cc',
+ 'win/audio_device_core_win.h',
+ 'win/audio_device_wave_win.cc',
+ 'win/audio_device_wave_win.h',
+ 'win/audio_mixer_manager_win.cc',
+ 'win/audio_mixer_manager_win.h',
+ ],
'link_settings': {
'libraries': [
# Required for the built-in WASAPI AEC.
@@ -194,18 +204,40 @@
],
},
}],
+ ['OS=="win" and clang==1', {
+ 'msvs_settings': {
+ 'VCCLCompilerTool': {
+ 'AdditionalOptions': [
+ # Disable warnings failing when compiling with Clang on Windows.
+ # https://bugs.chromium.org/p/webrtc/issues/detail?id=5366
+ '-Wno-bool-conversion',
+ '-Wno-delete-non-virtual-dtor',
+ '-Wno-logical-op-parentheses',
+ '-Wno-microsoft-extra-qualification',
+ '-Wno-microsoft-goto',
+ '-Wno-missing-braces',
+ '-Wno-parentheses-equality',
+ '-Wno-reorder',
+ '-Wno-shift-overflow',
+ '-Wno-tautological-compare',
+ '-Wno-unused-private-field',
+ ],
+ },
+ },
+ }],
], # conditions
}], # include_internal_audio_device==1
], # conditions
},
],
'conditions': [
- ['include_tests==1', {
+ # Does not compile on iOS: webrtc:4755.
+ ['include_tests==1 and OS!="ios"', {
'targets': [
{
'target_name': 'audio_device_tests',
- 'type': 'executable',
- 'dependencies': [
+ 'type': 'executable',
+ 'dependencies': [
'audio_device',
'webrtc_utility',
'<(webrtc_root)/test/test.gyp:test_support_main',
@@ -236,7 +268,7 @@
],
},
], # targets
- }], # include_tests
+ }], # include_tests==1 and OS!=ios
],
}
diff --git a/webrtc/modules/audio_device/audio_device_buffer.cc b/webrtc/modules/audio_device/audio_device_buffer.cc
index e7b487d687..48ae88ee90 100644
--- a/webrtc/modules/audio_device/audio_device_buffer.cc
+++ b/webrtc/modules/audio_device/audio_device_buffer.cc
@@ -169,7 +169,7 @@ int32_t AudioDeviceBuffer::PlayoutSampleRate() const
// SetRecordingChannels
// ----------------------------------------------------------------------------
-int32_t AudioDeviceBuffer::SetRecordingChannels(uint8_t channels)
+int32_t AudioDeviceBuffer::SetRecordingChannels(size_t channels)
{
CriticalSectionScoped lock(&_critSect);
_recChannels = channels;
@@ -181,7 +181,7 @@ int32_t AudioDeviceBuffer::SetRecordingChannels(uint8_t channels)
// SetPlayoutChannels
// ----------------------------------------------------------------------------
-int32_t AudioDeviceBuffer::SetPlayoutChannels(uint8_t channels)
+int32_t AudioDeviceBuffer::SetPlayoutChannels(size_t channels)
{
CriticalSectionScoped lock(&_critSect);
_playChannels = channels;
@@ -239,7 +239,7 @@ int32_t AudioDeviceBuffer::RecordingChannel(AudioDeviceModule::ChannelType& chan
// RecordingChannels
// ----------------------------------------------------------------------------
-uint8_t AudioDeviceBuffer::RecordingChannels() const
+size_t AudioDeviceBuffer::RecordingChannels() const
{
return _recChannels;
}
@@ -248,7 +248,7 @@ uint8_t AudioDeviceBuffer::RecordingChannels() const
// PlayoutChannels
// ----------------------------------------------------------------------------
-uint8_t AudioDeviceBuffer::PlayoutChannels() const
+size_t AudioDeviceBuffer::PlayoutChannels() const
{
return _playChannels;
}
@@ -487,7 +487,7 @@ int32_t AudioDeviceBuffer::RequestPlayoutData(size_t nSamples)
{
uint32_t playSampleRate = 0;
size_t playBytesPerSample = 0;
- uint8_t playChannels = 0;
+ size_t playChannels = 0;
{
CriticalSectionScoped lock(&_critSect);
diff --git a/webrtc/modules/audio_device/audio_device_buffer.h b/webrtc/modules/audio_device/audio_device_buffer.h
index 2ab7ff5547..1095971040 100644
--- a/webrtc/modules/audio_device/audio_device_buffer.h
+++ b/webrtc/modules/audio_device/audio_device_buffer.h
@@ -40,10 +40,10 @@ public:
int32_t RecordingSampleRate() const;
int32_t PlayoutSampleRate() const;
- virtual int32_t SetRecordingChannels(uint8_t channels);
- virtual int32_t SetPlayoutChannels(uint8_t channels);
- uint8_t RecordingChannels() const;
- uint8_t PlayoutChannels() const;
+ virtual int32_t SetRecordingChannels(size_t channels);
+ virtual int32_t SetPlayoutChannels(size_t channels);
+ size_t RecordingChannels() const;
+ size_t PlayoutChannels() const;
int32_t SetRecordingChannel(
const AudioDeviceModule::ChannelType channel);
int32_t RecordingChannel(
@@ -80,8 +80,8 @@ private:
uint32_t _recSampleRate;
uint32_t _playSampleRate;
- uint8_t _recChannels;
- uint8_t _playChannels;
+ size_t _recChannels;
+ size_t _playChannels;
// selected recording channel (left/right/both)
AudioDeviceModule::ChannelType _recChannel;
diff --git a/webrtc/modules/audio_device/dummy/file_audio_device.cc b/webrtc/modules/audio_device/dummy/file_audio_device.cc
index 9c7bf069d8..aac0962a50 100644
--- a/webrtc/modules/audio_device/dummy/file_audio_device.cc
+++ b/webrtc/modules/audio_device/dummy/file_audio_device.cc
@@ -7,21 +7,20 @@
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
-#include <iostream>
+#include "webrtc/base/platform_thread.h"
#include "webrtc/modules/audio_device/dummy/file_audio_device.h"
#include "webrtc/system_wrappers/include/sleep.h"
-#include "webrtc/system_wrappers/include/thread_wrapper.h"
namespace webrtc {
-int kRecordingFixedSampleRate = 48000;
-int kRecordingNumChannels = 2;
-int kPlayoutFixedSampleRate = 48000;
-int kPlayoutNumChannels = 2;
-int kPlayoutBufferSize = kPlayoutFixedSampleRate / 100
- * kPlayoutNumChannels * 2;
-int kRecordingBufferSize = kRecordingFixedSampleRate / 100
- * kRecordingNumChannels * 2;
+const int kRecordingFixedSampleRate = 48000;
+const size_t kRecordingNumChannels = 2;
+const int kPlayoutFixedSampleRate = 48000;
+const size_t kPlayoutNumChannels = 2;
+const size_t kPlayoutBufferSize =
+ kPlayoutFixedSampleRate / 100 * kPlayoutNumChannels * 2;
+const size_t kRecordingBufferSize =
+ kRecordingFixedSampleRate / 100 * kRecordingNumChannels * 2;
FileAudioDevice::FileAudioDevice(const int32_t id,
const char* inputFilename,
@@ -195,9 +194,7 @@ int32_t FileAudioDevice::StartPlayout() {
_playoutFramesLeft = 0;
if (!_playoutBuffer) {
- _playoutBuffer = new int8_t[2 *
- kPlayoutNumChannels *
- kPlayoutFixedSampleRate/100];
+ _playoutBuffer = new int8_t[kPlayoutBufferSize];
}
if (!_playoutBuffer) {
_playing = false;
@@ -214,17 +211,10 @@ int32_t FileAudioDevice::StartPlayout() {
return -1;
}
- const char* threadName = "webrtc_audio_module_play_thread";
- _ptrThreadPlay = ThreadWrapper::CreateThread(PlayThreadFunc, this,
- threadName);
- if (!_ptrThreadPlay->Start()) {
- _ptrThreadPlay.reset();
- _playing = false;
- delete [] _playoutBuffer;
- _playoutBuffer = NULL;
- return -1;
- }
- _ptrThreadPlay->SetPriority(kRealtimePriority);
+ _ptrThreadPlay.reset(new rtc::PlatformThread(
+ PlayThreadFunc, this, "webrtc_audio_module_play_thread"));
+ _ptrThreadPlay->Start();
+ _ptrThreadPlay->SetPriority(rtc::kRealtimePriority);
return 0;
}
@@ -277,17 +267,11 @@ int32_t FileAudioDevice::StartRecording() {
return -1;
}
- const char* threadName = "webrtc_audio_module_capture_thread";
- _ptrThreadRec = ThreadWrapper::CreateThread(RecThreadFunc, this, threadName);
+ _ptrThreadRec.reset(new rtc::PlatformThread(
+ RecThreadFunc, this, "webrtc_audio_module_capture_thread"));
- if (!_ptrThreadRec->Start()) {
- _ptrThreadRec.reset();
- _recording = false;
- delete [] _recordingBuffer;
- _recordingBuffer = NULL;
- return -1;
- }
- _ptrThreadRec->SetPriority(kRealtimePriority);
+ _ptrThreadRec->Start();
+ _ptrThreadRec->SetPriority(rtc::kRealtimePriority);
return 0;
}
@@ -514,7 +498,12 @@ bool FileAudioDevice::PlayThreadProcess()
}
_playoutFramesLeft = 0;
_critSect.Leave();
- SleepMs(10 - (_clock->CurrentNtpInMilliseconds() - currentTime));
+
+ uint64_t deltaTimeMillis = _clock->CurrentNtpInMilliseconds() - currentTime;
+ if(deltaTimeMillis < 10) {
+ SleepMs(10 - deltaTimeMillis);
+ }
+
return true;
}
@@ -544,7 +533,12 @@ bool FileAudioDevice::RecThreadProcess()
}
_critSect.Leave();
- SleepMs(10 - (_clock->CurrentNtpInMilliseconds() - currentTime));
+
+ uint64_t deltaTimeMillis = _clock->CurrentNtpInMilliseconds() - currentTime;
+ if(deltaTimeMillis < 10) {
+ SleepMs(10 - deltaTimeMillis);
+ }
+
return true;
}
diff --git a/webrtc/modules/audio_device/dummy/file_audio_device.h b/webrtc/modules/audio_device/dummy/file_audio_device.h
index 0e1665ea72..77179409ea 100644
--- a/webrtc/modules/audio_device/dummy/file_audio_device.h
+++ b/webrtc/modules/audio_device/dummy/file_audio_device.h
@@ -20,9 +20,12 @@
#include "webrtc/system_wrappers/include/file_wrapper.h"
#include "webrtc/system_wrappers/include/clock.h"
+namespace rtc {
+class PlatformThread;
+} // namespace rtc
+
namespace webrtc {
class EventWrapper;
-class ThreadWrapper;
// This is a fake audio device which plays audio from a file as its microphone
// and plays out into a file.
@@ -178,8 +181,9 @@ class FileAudioDevice : public AudioDeviceGeneric {
size_t _recordingFramesIn10MS;
size_t _playoutFramesIn10MS;
- rtc::scoped_ptr<ThreadWrapper> _ptrThreadRec;
- rtc::scoped_ptr<ThreadWrapper> _ptrThreadPlay;
+ // TODO(pbos): Make plain members instead of pointers and stop resetting them.
+ rtc::scoped_ptr<rtc::PlatformThread> _ptrThreadRec;
+ rtc::scoped_ptr<rtc::PlatformThread> _ptrThreadPlay;
bool _playing;
bool _recording;
diff --git a/webrtc/modules/audio_device/include/audio_device.h b/webrtc/modules/audio_device/include/audio_device.h
index c2c2b88103..15e08730c7 100644
--- a/webrtc/modules/audio_device/include/audio_device.h
+++ b/webrtc/modules/audio_device/include/audio_device.h
@@ -12,7 +12,7 @@
#define MODULES_AUDIO_DEVICE_INCLUDE_AUDIO_DEVICE_H_
#include "webrtc/modules/audio_device/include/audio_device_defines.h"
-#include "webrtc/modules/interface/module.h"
+#include "webrtc/modules/include/module.h"
namespace webrtc {
diff --git a/webrtc/modules/audio_device/include/audio_device_defines.h b/webrtc/modules/audio_device/include/audio_device_defines.h
index 3ebbd23cc5..b847729f05 100644
--- a/webrtc/modules/audio_device/include/audio_device_defines.h
+++ b/webrtc/modules/audio_device/include/audio_device_defines.h
@@ -49,7 +49,7 @@ class AudioTransport {
virtual int32_t RecordedDataIsAvailable(const void* audioSamples,
const size_t nSamples,
const size_t nBytesPerSample,
- const uint8_t nChannels,
+ const size_t nChannels,
const uint32_t samplesPerSec,
const uint32_t totalDelayMS,
const int32_t clockDrift,
@@ -59,7 +59,7 @@ class AudioTransport {
virtual int32_t NeedMorePlayData(const size_t nSamples,
const size_t nBytesPerSample,
- const uint8_t nChannels,
+ const size_t nChannels,
const uint32_t samplesPerSec,
void* audioSamples,
size_t& nSamplesOut,
@@ -82,10 +82,10 @@ class AudioTransport {
// TODO(xians): Remove this interface after Chrome and Libjingle switches
// to OnData().
virtual int OnDataAvailable(const int voe_channels[],
- int number_of_voe_channels,
+ size_t number_of_voe_channels,
const int16_t* audio_data,
int sample_rate,
- int number_of_channels,
+ size_t number_of_channels,
size_t number_of_frames,
int audio_delay_milliseconds,
int current_volume,
@@ -103,7 +103,7 @@ class AudioTransport {
const void* audio_data,
int bits_per_sample,
int sample_rate,
- int number_of_channels,
+ size_t number_of_channels,
size_t number_of_frames) {}
// Method to push the captured audio data to the specific VoE channel.
@@ -116,7 +116,7 @@ class AudioTransport {
const void* audio_data,
int bits_per_sample,
int sample_rate,
- int number_of_channels,
+ size_t number_of_channels,
size_t number_of_frames) {}
// Method to pull mixed render audio data from all active VoE channels.
@@ -125,7 +125,7 @@ class AudioTransport {
// channel.
virtual void PullRenderData(int bits_per_sample,
int sample_rate,
- int number_of_channels,
+ size_t number_of_channels,
size_t number_of_frames,
void* audio_data,
int64_t* elapsed_time_ms,
@@ -149,27 +149,27 @@ class AudioParameters {
channels_(0),
frames_per_buffer_(0),
frames_per_10ms_buffer_(0) {}
- AudioParameters(int sample_rate, int channels, size_t frames_per_buffer)
+ AudioParameters(int sample_rate, size_t channels, size_t frames_per_buffer)
: sample_rate_(sample_rate),
channels_(channels),
frames_per_buffer_(frames_per_buffer),
frames_per_10ms_buffer_(static_cast<size_t>(sample_rate / 100)) {}
- void reset(int sample_rate, int channels, size_t frames_per_buffer) {
+ void reset(int sample_rate, size_t channels, size_t frames_per_buffer) {
sample_rate_ = sample_rate;
channels_ = channels;
frames_per_buffer_ = frames_per_buffer;
frames_per_10ms_buffer_ = static_cast<size_t>(sample_rate / 100);
}
size_t bits_per_sample() const { return kBitsPerSample; }
- void reset(int sample_rate, int channels, double ms_per_buffer) {
+ void reset(int sample_rate, size_t channels, double ms_per_buffer) {
reset(sample_rate, channels,
static_cast<size_t>(sample_rate * ms_per_buffer + 0.5));
}
- void reset(int sample_rate, int channels) {
+ void reset(int sample_rate, size_t channels) {
reset(sample_rate, channels, static_cast<size_t>(0));
}
int sample_rate() const { return sample_rate_; }
- int channels() const { return channels_; }
+ size_t channels() const { return channels_; }
size_t frames_per_buffer() const { return frames_per_buffer_; }
size_t frames_per_10ms_buffer() const { return frames_per_10ms_buffer_; }
size_t GetBytesPerFrame() const { return channels_ * kBitsPerSample / 8; }
@@ -200,7 +200,7 @@ class AudioParameters {
private:
int sample_rate_;
- int channels_;
+ size_t channels_;
size_t frames_per_buffer_;
size_t frames_per_10ms_buffer_;
};
diff --git a/webrtc/modules/audio_device/ios/audio_device_ios.h b/webrtc/modules/audio_device/ios/audio_device_ios.h
index 8f8ba0a9c5..c4eb0d6f64 100644
--- a/webrtc/modules/audio_device/ios/audio_device_ios.h
+++ b/webrtc/modules/audio_device/ios/audio_device_ios.h
@@ -182,7 +182,10 @@ class AudioDeviceIOS : public AudioDeviceGeneric {
bool InitPlayOrRecord();
// Closes and deletes the voice-processing I/O unit.
- bool ShutdownPlayOrRecord();
+ void ShutdownPlayOrRecord();
+
+ // Helper method for destroying the existing audio unit.
+ void DisposeAudioUnit();
// Callback function called on a real-time priority I/O thread from the audio
// unit. This method is used to signal that recorded audio is available.
diff --git a/webrtc/modules/audio_device/ios/audio_device_ios.mm b/webrtc/modules/audio_device/ios/audio_device_ios.mm
index f26e9f1cc7..f6dee5b3cf 100644
--- a/webrtc/modules/audio_device/ios/audio_device_ios.mm
+++ b/webrtc/modules/audio_device/ios/audio_device_ios.mm
@@ -19,12 +19,24 @@
#include "webrtc/base/atomicops.h"
#include "webrtc/base/checks.h"
+#include "webrtc/base/criticalsection.h"
#include "webrtc/base/logging.h"
+#include "webrtc/base/thread_annotations.h"
#include "webrtc/modules/audio_device/fine_audio_buffer.h"
-#include "webrtc/modules/utility/interface/helpers_ios.h"
+#include "webrtc/modules/utility/include/helpers_ios.h"
namespace webrtc {
+// Protects |g_audio_session_users|.
+static rtc::GlobalLockPod g_lock;
+
+// Counts number of users (=instances of this object) who needs an active
+// audio session. This variable is used to ensure that we only activate an audio
+// session for the first user and deactivate it for the last.
+// Member is static to ensure that the value is counted for all instances
+// and not per instance.
+static int g_audio_session_users GUARDED_BY(g_lock) = 0;
+
#define LOGI() LOG(LS_INFO) << "AudioDeviceIOS::"
#define LOG_AND_RETURN_IF_ERROR(error, message) \
@@ -74,25 +86,62 @@ const UInt32 kBytesPerSample = 2;
// Can most likely be removed.
const UInt16 kFixedPlayoutDelayEstimate = 30;
const UInt16 kFixedRecordDelayEstimate = 30;
+// Calls to AudioUnitInitialize() can fail if called back-to-back on different
+// ADM instances. A fall-back solution is to allow multiple sequential calls
+// with as small delay between each. This factor sets the max number of allowed
+// initialization attempts.
+const int kMaxNumberOfAudioUnitInitializeAttempts = 5;
+
using ios::CheckAndLogError;
+// Verifies that the current audio session supports input audio and that the
+// required category and mode are enabled.
+static bool VerifyAudioSession(AVAudioSession* session) {
+ LOG(LS_INFO) << "VerifyAudioSession";
+ // Ensure that the device currently supports audio input.
+ if (!session.isInputAvailable) {
+ LOG(LS_ERROR) << "No audio input path is available!";
+ return false;
+ }
+
+ // Ensure that the required category and mode are actually activated.
+ if (![session.category isEqualToString:AVAudioSessionCategoryPlayAndRecord]) {
+ LOG(LS_ERROR)
+ << "Failed to set category to AVAudioSessionCategoryPlayAndRecord";
+ return false;
+ }
+ if (![session.mode isEqualToString:AVAudioSessionModeVoiceChat]) {
+ LOG(LS_ERROR) << "Failed to set mode to AVAudioSessionModeVoiceChat";
+ return false;
+ }
+ return true;
+}
+
// Activates an audio session suitable for full duplex VoIP sessions when
// |activate| is true. Also sets the preferred sample rate and IO buffer
// duration. Deactivates an active audio session if |activate| is set to false.
-static void ActivateAudioSession(AVAudioSession* session, bool activate) {
+static bool ActivateAudioSession(AVAudioSession* session, bool activate)
+ EXCLUSIVE_LOCKS_REQUIRED(g_lock) {
LOG(LS_INFO) << "ActivateAudioSession(" << activate << ")";
@autoreleasepool {
NSError* error = nil;
BOOL success = NO;
- // Deactivate the audio session and return if |activate| is false.
if (!activate) {
- success = [session setActive:NO error:&error];
- RTC_DCHECK(CheckAndLogError(success, error));
- return;
+ // Deactivate the audio session using an extra option and then return.
+ // AVAudioSessionSetActiveOptionNotifyOthersOnDeactivation is used to
+ // ensure that other audio sessions that were interrupted by our session
+ // can return to their active state. It is recommended for VoIP apps to
+ // use this option.
+ success = [session
+ setActive:NO
+ withOptions:AVAudioSessionSetActiveOptionNotifyOthersOnDeactivation
+ error:&error];
+ return CheckAndLogError(success, error);
}
+ // Go ahead and active our own audio session since |activate| is true.
// Use a category which supports simultaneous recording and playback.
// By default, using this category implies that our app’s audio is
// nonmixable, hence activating the session will interrupt any other
@@ -121,7 +170,6 @@ static void ActivateAudioSession(AVAudioSession* session, bool activate) {
RTC_DCHECK(CheckAndLogError(success, error));
// Set the preferred audio I/O buffer duration, in seconds.
- // TODO(henrika): add more comments here.
error = nil;
success = [session setPreferredIOBufferDuration:kPreferredIOBufferDuration
error:&error];
@@ -131,13 +179,15 @@ static void ActivateAudioSession(AVAudioSession* session, bool activate) {
// session (e.g. phone call) has higher priority than ours.
error = nil;
success = [session setActive:YES error:&error];
- RTC_DCHECK(CheckAndLogError(success, error));
- RTC_CHECK(session.isInputAvailable) << "No input path is available!";
+ if (!CheckAndLogError(success, error)) {
+ return false;
+ }
- // Ensure that category and mode are actually activated.
- RTC_DCHECK(
- [session.category isEqualToString:AVAudioSessionCategoryPlayAndRecord]);
- RTC_DCHECK([session.mode isEqualToString:AVAudioSessionModeVoiceChat]);
+ // Ensure that the active audio session has the correct category and mode.
+ if (!VerifyAudioSession(session)) {
+ LOG(LS_ERROR) << "Failed to verify audio session category and mode";
+ return false;
+ }
// Try to set the preferred number of hardware audio channels. These calls
// must be done after setting the audio session’s category and mode and
@@ -156,7 +206,52 @@ static void ActivateAudioSession(AVAudioSession* session, bool activate) {
[session setPreferredOutputNumberOfChannels:kPreferredNumberOfChannels
error:&error];
RTC_DCHECK(CheckAndLogError(success, error));
+ return true;
+ }
+}
+
+// An application can create more than one ADM and start audio streaming
+// for all of them. It is essential that we only activate the app's audio
+// session once (for the first one) and deactivate it once (for the last).
+static bool ActivateAudioSession() {
+ LOGI() << "ActivateAudioSession";
+ rtc::GlobalLockScope ls(&g_lock);
+ if (g_audio_session_users == 0) {
+ // The system provides an audio session object upon launch of an
+ // application. However, we must initialize the session in order to
+ // handle interruptions. Implicit initialization occurs when obtaining
+ // a reference to the AVAudioSession object.
+ AVAudioSession* session = [AVAudioSession sharedInstance];
+ // Try to activate the audio session and ask for a set of preferred audio
+ // parameters.
+ if (!ActivateAudioSession(session, true)) {
+ LOG(LS_ERROR) << "Failed to activate the audio session";
+ return false;
+ }
+ LOG(LS_INFO) << "The audio session is now activated";
+ }
+ ++g_audio_session_users;
+ LOG(LS_INFO) << "Number of audio session users: " << g_audio_session_users;
+ return true;
+}
+
+// If more than one object is using the audio session, ensure that only the
+// last object deactivates. Apple recommends: "activate your audio session
+// only as needed and deactivate it when you are not using audio".
+static bool DeactivateAudioSession() {
+ LOGI() << "DeactivateAudioSession";
+ rtc::GlobalLockScope ls(&g_lock);
+ if (g_audio_session_users == 1) {
+ AVAudioSession* session = [AVAudioSession sharedInstance];
+ if (!ActivateAudioSession(session, false)) {
+ LOG(LS_ERROR) << "Failed to deactivate the audio session";
+ return false;
+ }
+ LOG(LS_INFO) << "Our audio session is now deactivated";
}
+ --g_audio_session_users;
+ LOG(LS_INFO) << "Number of audio session users: " << g_audio_session_users;
+ return true;
}
#if !defined(NDEBUG)
@@ -198,12 +293,13 @@ AudioDeviceIOS::AudioDeviceIOS()
initialized_(false),
rec_is_initialized_(false),
play_is_initialized_(false),
- audio_interruption_observer_(nullptr) {
+ audio_interruption_observer_(nullptr),
+ route_change_observer_(nullptr) {
LOGI() << "ctor" << ios::GetCurrentThreadDescription();
}
AudioDeviceIOS::~AudioDeviceIOS() {
- LOGI() << "~dtor";
+ LOGI() << "~dtor" << ios::GetCurrentThreadDescription();
RTC_DCHECK(thread_checker_.CalledOnValidThread());
Terminate();
}
@@ -245,8 +341,16 @@ int32_t AudioDeviceIOS::Terminate() {
if (!initialized_) {
return 0;
}
- ShutdownPlayOrRecord();
+ StopPlayout();
+ StopRecording();
initialized_ = false;
+ {
+ rtc::GlobalLockScope ls(&g_lock);
+ if (g_audio_session_users != 0) {
+ LOG(LS_WARNING) << "Object is destructed with an active audio session";
+ }
+ RTC_DCHECK_GE(g_audio_session_users, 0);
+ }
return 0;
}
@@ -258,7 +362,7 @@ int32_t AudioDeviceIOS::InitPlayout() {
RTC_DCHECK(!playing_);
if (!rec_is_initialized_) {
if (!InitPlayOrRecord()) {
- LOG_F(LS_ERROR) << "InitPlayOrRecord failed!";
+ LOG_F(LS_ERROR) << "InitPlayOrRecord failed for InitPlayout!";
return -1;
}
}
@@ -274,7 +378,7 @@ int32_t AudioDeviceIOS::InitRecording() {
RTC_DCHECK(!recording_);
if (!play_is_initialized_) {
if (!InitPlayOrRecord()) {
- LOG_F(LS_ERROR) << "InitPlayOrRecord failed!";
+ LOG_F(LS_ERROR) << "InitPlayOrRecord failed for InitRecording!";
return -1;
}
}
@@ -291,9 +395,11 @@ int32_t AudioDeviceIOS::StartPlayout() {
if (!recording_) {
OSStatus result = AudioOutputUnitStart(vpio_unit_);
if (result != noErr) {
- LOG_F(LS_ERROR) << "AudioOutputUnitStart failed: " << result;
+ LOG_F(LS_ERROR) << "AudioOutputUnitStart failed for StartPlayout: "
+ << result;
return -1;
}
+ LOG(LS_INFO) << "Voice-Processing I/O audio unit is now started";
}
rtc::AtomicOps::ReleaseStore(&playing_, 1);
return 0;
@@ -322,9 +428,11 @@ int32_t AudioDeviceIOS::StartRecording() {
if (!playing_) {
OSStatus result = AudioOutputUnitStart(vpio_unit_);
if (result != noErr) {
- LOG_F(LS_ERROR) << "AudioOutputUnitStart failed: " << result;
+ LOG_F(LS_ERROR) << "AudioOutputUnitStart failed for StartRecording: "
+ << result;
return -1;
}
+ LOG(LS_INFO) << "Voice-Processing I/O audio unit is now started";
}
rtc::AtomicOps::ReleaseStore(&recording_, 1);
return 0;
@@ -474,11 +582,12 @@ void AudioDeviceIOS::RegisterNotificationObservers() {
LOG(LS_INFO) << " OldDeviceUnavailable";
break;
case AVAudioSessionRouteChangeReasonCategoryChange:
+ // It turns out that we see this notification (at least in iOS 9.2)
+ // when making a switch from a BT device to e.g. Speaker using the
+ // iOS Control Center and that we therefore must check if the sample
+ // rate has changed. And if so is the case, restart the audio unit.
LOG(LS_INFO) << " CategoryChange";
LOG(LS_INFO) << " New category: " << ios::GetAudioSessionCategory();
- // Don't see this as route change since it can be triggered in
- // combination with session interruptions as well.
- valid_route_change = false;
break;
case AVAudioSessionRouteChangeReasonOverride:
LOG(LS_INFO) << " Override";
@@ -490,9 +599,11 @@ void AudioDeviceIOS::RegisterNotificationObservers() {
LOG(LS_INFO) << " NoSuitableRouteForCategory";
break;
case AVAudioSessionRouteChangeReasonRouteConfigurationChange:
- // Ignore this type of route change since we are focusing
+ // The set of input and output ports has not changed, but their
+ // configuration has, e.g., a port’s selected data source has
+ // changed. Ignore this type of route change since we are focusing
// on detecting headset changes.
- LOG(LS_INFO) << " RouteConfigurationChange";
+ LOG(LS_INFO) << " RouteConfigurationChange (ignored)";
valid_route_change = false;
break;
}
@@ -630,7 +741,7 @@ void AudioDeviceIOS::SetupAudioBuffersForActiveAudioSession() {
bool AudioDeviceIOS::SetupAndInitializeVoiceProcessingAudioUnit() {
LOGI() << "SetupAndInitializeVoiceProcessingAudioUnit";
- RTC_DCHECK(!vpio_unit_);
+ RTC_DCHECK(!vpio_unit_) << "VoiceProcessingIO audio unit already exists";
// Create an audio component description to identify the Voice-Processing
// I/O audio unit.
AudioComponentDescription vpio_unit_description;
@@ -639,34 +750,48 @@ bool AudioDeviceIOS::SetupAndInitializeVoiceProcessingAudioUnit() {
vpio_unit_description.componentManufacturer = kAudioUnitManufacturer_Apple;
vpio_unit_description.componentFlags = 0;
vpio_unit_description.componentFlagsMask = 0;
+
// Obtain an audio unit instance given the description.
AudioComponent found_vpio_unit_ref =
AudioComponentFindNext(nullptr, &vpio_unit_description);
// Create a Voice-Processing IO audio unit.
- LOG_AND_RETURN_IF_ERROR(
- AudioComponentInstanceNew(found_vpio_unit_ref, &vpio_unit_),
- "Failed to create a VoiceProcessingIO audio unit");
+ OSStatus result = noErr;
+ result = AudioComponentInstanceNew(found_vpio_unit_ref, &vpio_unit_);
+ if (result != noErr) {
+ vpio_unit_ = nullptr;
+ LOG(LS_ERROR) << "AudioComponentInstanceNew failed: " << result;
+ return false;
+ }
// A VP I/O unit's bus 1 connects to input hardware (microphone). Enable
// input on the input scope of the input element.
AudioUnitElement input_bus = 1;
UInt32 enable_input = 1;
- LOG_AND_RETURN_IF_ERROR(
- AudioUnitSetProperty(vpio_unit_, kAudioOutputUnitProperty_EnableIO,
- kAudioUnitScope_Input, input_bus, &enable_input,
- sizeof(enable_input)),
- "Failed to enable input on input scope of input element");
+ result = AudioUnitSetProperty(vpio_unit_, kAudioOutputUnitProperty_EnableIO,
+ kAudioUnitScope_Input, input_bus, &enable_input,
+ sizeof(enable_input));
+ if (result != noErr) {
+ DisposeAudioUnit();
+ LOG(LS_ERROR) << "Failed to enable input on input scope of input element: "
+ << result;
+ return false;
+ }
// A VP I/O unit's bus 0 connects to output hardware (speaker). Enable
// output on the output scope of the output element.
AudioUnitElement output_bus = 0;
UInt32 enable_output = 1;
- LOG_AND_RETURN_IF_ERROR(
- AudioUnitSetProperty(vpio_unit_, kAudioOutputUnitProperty_EnableIO,
- kAudioUnitScope_Output, output_bus, &enable_output,
- sizeof(enable_output)),
- "Failed to enable output on output scope of output element");
+ result = AudioUnitSetProperty(vpio_unit_, kAudioOutputUnitProperty_EnableIO,
+ kAudioUnitScope_Output, output_bus,
+ &enable_output, sizeof(enable_output));
+ if (result != noErr) {
+ DisposeAudioUnit();
+ LOG(LS_ERROR)
+ << "Failed to enable output on output scope of output element: "
+ << result;
+ return false;
+ }
// Set the application formats for input and output:
// - use same format in both directions
@@ -694,38 +819,55 @@ bool AudioDeviceIOS::SetupAndInitializeVoiceProcessingAudioUnit() {
#endif
// Set the application format on the output scope of the input element/bus.
- LOG_AND_RETURN_IF_ERROR(
- AudioUnitSetProperty(vpio_unit_, kAudioUnitProperty_StreamFormat,
- kAudioUnitScope_Output, input_bus,
- &application_format, size),
- "Failed to set application format on output scope of input element");
+ result = AudioUnitSetProperty(vpio_unit_, kAudioUnitProperty_StreamFormat,
+ kAudioUnitScope_Output, input_bus,
+ &application_format, size);
+ if (result != noErr) {
+ DisposeAudioUnit();
+ LOG(LS_ERROR)
+ << "Failed to set application format on output scope of input bus: "
+ << result;
+ return false;
+ }
// Set the application format on the input scope of the output element/bus.
- LOG_AND_RETURN_IF_ERROR(
- AudioUnitSetProperty(vpio_unit_, kAudioUnitProperty_StreamFormat,
- kAudioUnitScope_Input, output_bus,
- &application_format, size),
- "Failed to set application format on input scope of output element");
+ result = AudioUnitSetProperty(vpio_unit_, kAudioUnitProperty_StreamFormat,
+ kAudioUnitScope_Input, output_bus,
+ &application_format, size);
+ if (result != noErr) {
+ DisposeAudioUnit();
+ LOG(LS_ERROR)
+ << "Failed to set application format on input scope of output bus: "
+ << result;
+ return false;
+ }
// Specify the callback function that provides audio samples to the audio
// unit.
AURenderCallbackStruct render_callback;
render_callback.inputProc = GetPlayoutData;
render_callback.inputProcRefCon = this;
- LOG_AND_RETURN_IF_ERROR(
- AudioUnitSetProperty(vpio_unit_, kAudioUnitProperty_SetRenderCallback,
- kAudioUnitScope_Input, output_bus, &render_callback,
- sizeof(render_callback)),
- "Failed to specify the render callback on the output element");
+ result = AudioUnitSetProperty(
+ vpio_unit_, kAudioUnitProperty_SetRenderCallback, kAudioUnitScope_Input,
+ output_bus, &render_callback, sizeof(render_callback));
+ if (result != noErr) {
+ DisposeAudioUnit();
+ LOG(LS_ERROR) << "Failed to specify the render callback on the output bus: "
+ << result;
+ return false;
+ }
// Disable AU buffer allocation for the recorder, we allocate our own.
// TODO(henrika): not sure that it actually saves resource to make this call.
UInt32 flag = 0;
- LOG_AND_RETURN_IF_ERROR(
- AudioUnitSetProperty(vpio_unit_, kAudioUnitProperty_ShouldAllocateBuffer,
- kAudioUnitScope_Output, input_bus, &flag,
- sizeof(flag)),
- "Failed to disable buffer allocation on the input element");
+ result = AudioUnitSetProperty(
+ vpio_unit_, kAudioUnitProperty_ShouldAllocateBuffer,
+ kAudioUnitScope_Output, input_bus, &flag, sizeof(flag));
+ if (result != noErr) {
+ DisposeAudioUnit();
+ LOG(LS_ERROR) << "Failed to disable buffer allocation on the input bus: "
+ << result;
+ }
// Specify the callback to be called by the I/O thread to us when input audio
// is available. The recorded samples can then be obtained by calling the
@@ -733,16 +875,39 @@ bool AudioDeviceIOS::SetupAndInitializeVoiceProcessingAudioUnit() {
AURenderCallbackStruct input_callback;
input_callback.inputProc = RecordedDataIsAvailable;
input_callback.inputProcRefCon = this;
- LOG_AND_RETURN_IF_ERROR(
- AudioUnitSetProperty(vpio_unit_,
- kAudioOutputUnitProperty_SetInputCallback,
- kAudioUnitScope_Global, input_bus, &input_callback,
- sizeof(input_callback)),
- "Failed to specify the input callback on the input element");
+ result = AudioUnitSetProperty(vpio_unit_,
+ kAudioOutputUnitProperty_SetInputCallback,
+ kAudioUnitScope_Global, input_bus,
+ &input_callback, sizeof(input_callback));
+ if (result != noErr) {
+ DisposeAudioUnit();
+ LOG(LS_ERROR) << "Failed to specify the input callback on the input bus: "
+ << result;
+ }
// Initialize the Voice-Processing I/O unit instance.
- LOG_AND_RETURN_IF_ERROR(AudioUnitInitialize(vpio_unit_),
- "Failed to initialize the Voice-Processing I/O unit");
+ // Calls to AudioUnitInitialize() can fail if called back-to-back on
+ // different ADM instances. The error message in this case is -66635 which is
+ // undocumented. Tests have shown that calling AudioUnitInitialize a second
+ // time, after a short sleep, avoids this issue.
+ // See webrtc:5166 for details.
+ int failed_initalize_attempts = 0;
+ result = AudioUnitInitialize(vpio_unit_);
+ while (result != noErr) {
+ LOG(LS_ERROR) << "Failed to initialize the Voice-Processing I/O unit: "
+ << result;
+ ++failed_initalize_attempts;
+ if (failed_initalize_attempts == kMaxNumberOfAudioUnitInitializeAttempts) {
+ // Max number of initialization attempts exceeded, hence abort.
+ LOG(LS_WARNING) << "Too many initialization attempts";
+ DisposeAudioUnit();
+ return false;
+ }
+ LOG(LS_INFO) << "pause 100ms and try audio unit initialization again...";
+ [NSThread sleepForTimeInterval:0.1f];
+ result = AudioUnitInitialize(vpio_unit_);
+ }
+ LOG(LS_INFO) << "Voice-Processing I/O unit is now initialized";
return true;
}
@@ -772,18 +937,29 @@ bool AudioDeviceIOS::RestartAudioUnitWithNewFormat(float sample_rate) {
// Prepare the audio unit to render audio again.
LOG_AND_RETURN_IF_ERROR(AudioUnitInitialize(vpio_unit_),
"Failed to initialize the Voice-Processing I/O unit");
+ LOG(LS_INFO) << "Voice-Processing I/O unit is now reinitialized";
// Start rendering audio using the new format.
LOG_AND_RETURN_IF_ERROR(AudioOutputUnitStart(vpio_unit_),
"Failed to start the Voice-Processing I/O unit");
+ LOG(LS_INFO) << "Voice-Processing I/O unit is now restarted";
return true;
}
bool AudioDeviceIOS::InitPlayOrRecord() {
LOGI() << "InitPlayOrRecord";
+ // Activate the audio session if not already activated.
+ if (!ActivateAudioSession()) {
+ return false;
+ }
+
+ // Ensure that the active audio session has the correct category and mode.
AVAudioSession* session = [AVAudioSession sharedInstance];
- // Activate the audio session and ask for a set of preferred audio parameters.
- ActivateAudioSession(session, true);
+ if (!VerifyAudioSession(session)) {
+ DeactivateAudioSession();
+ LOG(LS_ERROR) << "Failed to verify audio session category and mode";
+ return false;
+ }
// Start observing audio session interruptions and route changes.
RegisterNotificationObservers();
@@ -793,16 +969,16 @@ bool AudioDeviceIOS::InitPlayOrRecord() {
// Create, setup and initialize a new Voice-Processing I/O unit.
if (!SetupAndInitializeVoiceProcessingAudioUnit()) {
+ // Reduce usage count for the audio session and possibly deactivate it if
+ // this object is the only user.
+ DeactivateAudioSession();
return false;
}
return true;
}
-bool AudioDeviceIOS::ShutdownPlayOrRecord() {
+void AudioDeviceIOS::ShutdownPlayOrRecord() {
LOGI() << "ShutdownPlayOrRecord";
- // Remove audio session notification observers.
- UnregisterNotificationObservers();
-
// Close and delete the voice-processing I/O unit.
OSStatus result = -1;
if (nullptr != vpio_unit_) {
@@ -814,18 +990,25 @@ bool AudioDeviceIOS::ShutdownPlayOrRecord() {
if (result != noErr) {
LOG_F(LS_ERROR) << "AudioUnitUninitialize failed: " << result;
}
- result = AudioComponentInstanceDispose(vpio_unit_);
- if (result != noErr) {
- LOG_F(LS_ERROR) << "AudioComponentInstanceDispose failed: " << result;
- }
- vpio_unit_ = nullptr;
+ DisposeAudioUnit();
}
+ // Remove audio session notification observers.
+ UnregisterNotificationObservers();
+
// All I/O should be stopped or paused prior to deactivating the audio
// session, hence we deactivate as last action.
- AVAudioSession* session = [AVAudioSession sharedInstance];
- ActivateAudioSession(session, false);
- return true;
+ DeactivateAudioSession();
+}
+
+void AudioDeviceIOS::DisposeAudioUnit() {
+ if (nullptr == vpio_unit_)
+ return;
+ OSStatus result = AudioComponentInstanceDispose(vpio_unit_);
+ if (result != noErr) {
+ LOG(LS_ERROR) << "AudioComponentInstanceDispose failed:" << result;
+ }
+ vpio_unit_ = nullptr;
}
OSStatus AudioDeviceIOS::RecordedDataIsAvailable(
@@ -855,8 +1038,11 @@ OSStatus AudioDeviceIOS::OnRecordedDataIsAvailable(
if (in_number_frames != record_parameters_.frames_per_buffer()) {
// We have seen short bursts (1-2 frames) where |in_number_frames| changes.
// Add a log to keep track of longer sequences if that should ever happen.
+ // Also return since calling AudioUnitRender in this state will only result
+ // in kAudio_ParamError (-50) anyhow.
LOG(LS_WARNING) << "in_number_frames (" << in_number_frames
<< ") != " << record_parameters_.frames_per_buffer();
+ return noErr;
}
// Obtain the recorded audio samples by initiating a rendering cycle.
// Since it happens on the input bus, the |io_data| parameter is a reference
@@ -866,7 +1052,7 @@ OSStatus AudioDeviceIOS::OnRecordedDataIsAvailable(
result = AudioUnitRender(vpio_unit_, io_action_flags, in_time_stamp,
in_bus_number, in_number_frames, io_data);
if (result != noErr) {
- LOG_F(LS_ERROR) << "AudioOutputUnitStart failed: " << result;
+ LOG_F(LS_ERROR) << "AudioUnitRender failed: " << result;
return result;
}
// Get a pointer to the recorded audio and send it to the WebRTC ADB.
diff --git a/webrtc/modules/audio_device/ios/audio_device_unittest_ios.cc b/webrtc/modules/audio_device/ios/audio_device_unittest_ios.cc
index b892f28b1d..076a67430d 100644
--- a/webrtc/modules/audio_device/ios/audio_device_unittest_ios.cc
+++ b/webrtc/modules/audio_device/ios/audio_device_unittest_ios.cc
@@ -373,7 +373,7 @@ class MockAudioTransport : public AudioTransport {
int32_t(const void* audioSamples,
const size_t nSamples,
const size_t nBytesPerSample,
- const uint8_t nChannels,
+ const size_t nChannels,
const uint32_t samplesPerSec,
const uint32_t totalDelayMS,
const int32_t clockDrift,
@@ -383,7 +383,7 @@ class MockAudioTransport : public AudioTransport {
MOCK_METHOD8(NeedMorePlayData,
int32_t(const size_t nSamples,
const size_t nBytesPerSample,
- const uint8_t nChannels,
+ const size_t nChannels,
const uint32_t samplesPerSec,
void* audioSamples,
size_t& nSamplesOut,
@@ -413,7 +413,7 @@ class MockAudioTransport : public AudioTransport {
int32_t RealRecordedDataIsAvailable(const void* audioSamples,
const size_t nSamples,
const size_t nBytesPerSample,
- const uint8_t nChannels,
+ const size_t nChannels,
const uint32_t samplesPerSec,
const uint32_t totalDelayMS,
const int32_t clockDrift,
@@ -428,14 +428,16 @@ class MockAudioTransport : public AudioTransport {
audio_stream_->Write(audioSamples, nSamples);
}
if (ReceivedEnoughCallbacks()) {
- test_is_done_->Set();
+ if (test_is_done_) {
+ test_is_done_->Set();
+ }
}
return 0;
}
int32_t RealNeedMorePlayData(const size_t nSamples,
const size_t nBytesPerSample,
- const uint8_t nChannels,
+ const size_t nChannels,
const uint32_t samplesPerSec,
void* audioSamples,
size_t& nSamplesOut,
@@ -450,7 +452,9 @@ class MockAudioTransport : public AudioTransport {
audio_stream_->Read(audioSamples, nSamples);
}
if (ReceivedEnoughCallbacks()) {
- test_is_done_->Set();
+ if (test_is_done_) {
+ test_is_done_->Set();
+ }
}
return 0;
}
@@ -636,6 +640,62 @@ TEST_F(AudioDeviceTest, StopPlayoutRequiresInitToRestart) {
EXPECT_FALSE(audio_device()->PlayoutIsInitialized());
}
+// Verify that we can create two ADMs and start playing on the second ADM.
+// Only the first active instance shall activate an audio session and the
+// last active instance shall deactivate the audio session. The test does not
+// explicitly verify correct audio session calls but instead focuses on
+// ensuring that audio starts for both ADMs.
+TEST_F(AudioDeviceTest, StartPlayoutOnTwoInstances) {
+ // Create and initialize a second/extra ADM instance. The default ADM is
+ // created by the test harness.
+ rtc::scoped_refptr<AudioDeviceModule> second_audio_device =
+ CreateAudioDevice(AudioDeviceModule::kPlatformDefaultAudio);
+ EXPECT_NE(second_audio_device.get(), nullptr);
+ EXPECT_EQ(0, second_audio_device->Init());
+
+ // Start playout for the default ADM but don't wait here. Instead use the
+ // upcoming second stream for that. We set the same expectation on number
+ // of callbacks as for the second stream.
+ NiceMock<MockAudioTransport> mock(kPlayout);
+ mock.HandleCallbacks(nullptr, nullptr, 0);
+ EXPECT_CALL(
+ mock, NeedMorePlayData(playout_frames_per_10ms_buffer(), kBytesPerSample,
+ playout_channels(), playout_sample_rate(),
+ NotNull(), _, _, _))
+ .Times(AtLeast(kNumCallbacks));
+ EXPECT_EQ(0, audio_device()->RegisterAudioCallback(&mock));
+ StartPlayout();
+
+ // Initialize playout for the second ADM. If all is OK, the second ADM shall
+ // reuse the audio session activated when the first ADM started playing.
+ // This call will also ensure that we avoid a problem related to initializing
+ // two different audio unit instances back to back (see webrtc:5166 for
+ // details).
+ EXPECT_EQ(0, second_audio_device->InitPlayout());
+ EXPECT_TRUE(second_audio_device->PlayoutIsInitialized());
+
+ // Start playout for the second ADM and verify that it starts as intended.
+ // Passing this test ensures that initialization of the second audio unit
+ // has been done successfully and that there is no conflict with the already
+ // playing first ADM.
+ MockAudioTransport mock2(kPlayout);
+ mock2.HandleCallbacks(test_is_done_.get(), nullptr, kNumCallbacks);
+ EXPECT_CALL(
+ mock2, NeedMorePlayData(playout_frames_per_10ms_buffer(), kBytesPerSample,
+ playout_channels(), playout_sample_rate(),
+ NotNull(), _, _, _))
+ .Times(AtLeast(kNumCallbacks));
+ EXPECT_EQ(0, second_audio_device->RegisterAudioCallback(&mock2));
+ EXPECT_EQ(0, second_audio_device->StartPlayout());
+ EXPECT_TRUE(second_audio_device->Playing());
+ test_is_done_->Wait(kTestTimeOutInMilliseconds);
+ EXPECT_EQ(0, second_audio_device->StopPlayout());
+ EXPECT_FALSE(second_audio_device->Playing());
+ EXPECT_FALSE(second_audio_device->PlayoutIsInitialized());
+
+ EXPECT_EQ(0, second_audio_device->Terminate());
+}
+
// Start playout and verify that the native audio layer starts asking for real
// audio samples to play out using the NeedMorePlayData callback.
TEST_F(AudioDeviceTest, StartPlayoutVerifyCallbacks) {
diff --git a/webrtc/modules/audio_device/linux/audio_device_alsa_linux.cc b/webrtc/modules/audio_device/linux/audio_device_alsa_linux.cc
index 8fa4fdf6f0..bdbccde050 100644
--- a/webrtc/modules/audio_device/linux/audio_device_alsa_linux.cc
+++ b/webrtc/modules/audio_device/linux/audio_device_alsa_linux.cc
@@ -207,7 +207,7 @@ int32_t AudioDeviceLinuxALSA::Terminate()
// RECORDING
if (_ptrThreadRec)
{
- ThreadWrapper* tmpThread = _ptrThreadRec.release();
+ rtc::PlatformThread* tmpThread = _ptrThreadRec.release();
_critSect.Leave();
tmpThread->Stop();
@@ -219,7 +219,7 @@ int32_t AudioDeviceLinuxALSA::Terminate()
// PLAYOUT
if (_ptrThreadPlay)
{
- ThreadWrapper* tmpThread = _ptrThreadPlay.release();
+ rtc::PlatformThread* tmpThread = _ptrThreadPlay.release();
_critSect.Leave();
tmpThread->Stop();
@@ -1363,21 +1363,11 @@ int32_t AudioDeviceLinuxALSA::StartRecording()
return -1;
}
// RECORDING
- const char* threadName = "webrtc_audio_module_capture_thread";
- _ptrThreadRec = ThreadWrapper::CreateThread(
- RecThreadFunc, this, threadName);
+ _ptrThreadRec.reset(new rtc::PlatformThread(
+ RecThreadFunc, this, "webrtc_audio_module_capture_thread"));
- if (!_ptrThreadRec->Start())
- {
- WEBRTC_TRACE(kTraceCritical, kTraceAudioDevice, _id,
- " failed to start the rec audio thread");
- _recording = false;
- _ptrThreadRec.reset();
- delete [] _recordingBuffer;
- _recordingBuffer = NULL;
- return -1;
- }
- _ptrThreadRec->SetPriority(kRealtimePriority);
+ _ptrThreadRec->Start();
+ _ptrThreadRec->SetPriority(rtc::kRealtimePriority);
errVal = LATE(snd_pcm_prepare)(_handleRecord);
if (errVal < 0)
@@ -1517,20 +1507,10 @@ int32_t AudioDeviceLinuxALSA::StartPlayout()
}
// PLAYOUT
- const char* threadName = "webrtc_audio_module_play_thread";
- _ptrThreadPlay = ThreadWrapper::CreateThread(PlayThreadFunc, this,
- threadName);
- if (!_ptrThreadPlay->Start())
- {
- WEBRTC_TRACE(kTraceCritical, kTraceAudioDevice, _id,
- " failed to start the play audio thread");
- _playing = false;
- _ptrThreadPlay.reset();
- delete [] _playoutBuffer;
- _playoutBuffer = NULL;
- return -1;
- }
- _ptrThreadPlay->SetPriority(kRealtimePriority);
+ _ptrThreadPlay.reset(new rtc::PlatformThread(
+ PlayThreadFunc, this, "webrtc_audio_module_play_thread"));
+ _ptrThreadPlay->Start();
+ _ptrThreadPlay->SetPriority(rtc::kRealtimePriority);
int errVal = LATE(snd_pcm_prepare)(_handlePlayout);
if (errVal < 0)
diff --git a/webrtc/modules/audio_device/linux/audio_device_alsa_linux.h b/webrtc/modules/audio_device/linux/audio_device_alsa_linux.h
index e2391a0456..4a1a5191be 100644
--- a/webrtc/modules/audio_device/linux/audio_device_alsa_linux.h
+++ b/webrtc/modules/audio_device/linux/audio_device_alsa_linux.h
@@ -11,10 +11,10 @@
#ifndef WEBRTC_AUDIO_DEVICE_AUDIO_DEVICE_ALSA_LINUX_H
#define WEBRTC_AUDIO_DEVICE_AUDIO_DEVICE_ALSA_LINUX_H
+#include "webrtc/base/platform_thread.h"
#include "webrtc/modules/audio_device/audio_device_generic.h"
#include "webrtc/modules/audio_device/linux/audio_mixer_manager_alsa_linux.h"
#include "webrtc/system_wrappers/include/critical_section_wrapper.h"
-#include "webrtc/system_wrappers/include/thread_wrapper.h"
#if defined(USE_X11)
#include <X11/Xlib.h>
@@ -185,8 +185,10 @@ private:
CriticalSectionWrapper& _critSect;
- rtc::scoped_ptr<ThreadWrapper> _ptrThreadRec;
- rtc::scoped_ptr<ThreadWrapper> _ptrThreadPlay;
+ // TODO(pbos): Make plain members and start/stop instead of resetting these
+ // pointers. A thread can be reused.
+ rtc::scoped_ptr<rtc::PlatformThread> _ptrThreadRec;
+ rtc::scoped_ptr<rtc::PlatformThread> _ptrThreadPlay;
int32_t _id;
diff --git a/webrtc/modules/audio_device/linux/audio_device_pulse_linux.cc b/webrtc/modules/audio_device/linux/audio_device_pulse_linux.cc
index 929a758e40..42c3ea8295 100644
--- a/webrtc/modules/audio_device/linux/audio_device_pulse_linux.cc
+++ b/webrtc/modules/audio_device/linux/audio_device_pulse_linux.cc
@@ -200,33 +200,17 @@ int32_t AudioDeviceLinuxPulse::Init()
}
// RECORDING
- const char* threadName = "webrtc_audio_module_rec_thread";
- _ptrThreadRec = ThreadWrapper::CreateThread(RecThreadFunc, this,
- threadName);
- if (!_ptrThreadRec->Start())
- {
- WEBRTC_TRACE(kTraceCritical, kTraceAudioDevice, _id,
- " failed to start the rec audio thread");
-
- _ptrThreadRec.reset();
- return -1;
- }
+ _ptrThreadRec.reset(new rtc::PlatformThread(
+ RecThreadFunc, this, "webrtc_audio_module_rec_thread"));
- _ptrThreadRec->SetPriority(kRealtimePriority);
+ _ptrThreadRec->Start();
+ _ptrThreadRec->SetPriority(rtc::kRealtimePriority);
// PLAYOUT
- threadName = "webrtc_audio_module_play_thread";
- _ptrThreadPlay = ThreadWrapper::CreateThread(PlayThreadFunc, this,
- threadName);
- if (!_ptrThreadPlay->Start())
- {
- WEBRTC_TRACE(kTraceCritical, kTraceAudioDevice, _id,
- " failed to start the play audio thread");
-
- _ptrThreadPlay.reset();
- return -1;
- }
- _ptrThreadPlay->SetPriority(kRealtimePriority);
+ _ptrThreadPlay.reset(new rtc::PlatformThread(
+ PlayThreadFunc, this, "webrtc_audio_module_play_thread"));
+ _ptrThreadPlay->Start();
+ _ptrThreadPlay->SetPriority(rtc::kRealtimePriority);
_initialized = true;
@@ -246,7 +230,7 @@ int32_t AudioDeviceLinuxPulse::Terminate()
// RECORDING
if (_ptrThreadRec)
{
- ThreadWrapper* tmpThread = _ptrThreadRec.release();
+ rtc::PlatformThread* tmpThread = _ptrThreadRec.release();
_timeEventRec.Set();
tmpThread->Stop();
@@ -256,7 +240,7 @@ int32_t AudioDeviceLinuxPulse::Terminate()
// PLAYOUT
if (_ptrThreadPlay)
{
- ThreadWrapper* tmpThread = _ptrThreadPlay.release();
+ rtc::PlatformThread* tmpThread = _ptrThreadPlay.release();
_timeEventPlay.Set();
tmpThread->Stop();
diff --git a/webrtc/modules/audio_device/linux/audio_device_pulse_linux.h b/webrtc/modules/audio_device/linux/audio_device_pulse_linux.h
index 718331188d..de8df0be1b 100644
--- a/webrtc/modules/audio_device/linux/audio_device_pulse_linux.h
+++ b/webrtc/modules/audio_device/linux/audio_device_pulse_linux.h
@@ -11,11 +11,11 @@
#ifndef WEBRTC_AUDIO_DEVICE_AUDIO_DEVICE_PULSE_LINUX_H
#define WEBRTC_AUDIO_DEVICE_AUDIO_DEVICE_PULSE_LINUX_H
+#include "webrtc/base/platform_thread.h"
+#include "webrtc/base/thread_checker.h"
#include "webrtc/modules/audio_device/audio_device_generic.h"
#include "webrtc/modules/audio_device/linux/audio_mixer_manager_pulse_linux.h"
#include "webrtc/system_wrappers/include/critical_section_wrapper.h"
-#include "webrtc/system_wrappers/include/thread_wrapper.h"
-#include "webrtc/base/thread_checker.h"
#include <X11/Xlib.h>
#include <pulse/pulseaudio.h>
@@ -284,8 +284,9 @@ private:
EventWrapper& _recStartEvent;
EventWrapper& _playStartEvent;
- rtc::scoped_ptr<ThreadWrapper> _ptrThreadPlay;
- rtc::scoped_ptr<ThreadWrapper> _ptrThreadRec;
+ // TODO(pbos): Remove scoped_ptr and use directly without resetting.
+ rtc::scoped_ptr<rtc::PlatformThread> _ptrThreadPlay;
+ rtc::scoped_ptr<rtc::PlatformThread> _ptrThreadRec;
int32_t _id;
AudioMixerManagerLinuxPulse _mixerManager;
diff --git a/webrtc/modules/audio_device/mac/audio_device_mac.cc b/webrtc/modules/audio_device/mac/audio_device_mac.cc
index db98675bf6..0f33d1124d 100644
--- a/webrtc/modules/audio_device/mac/audio_device_mac.cc
+++ b/webrtc/modules/audio_device/mac/audio_device_mac.cc
@@ -10,3227 +10,2758 @@
#include "webrtc/base/arraysize.h"
#include "webrtc/base/checks.h"
+#include "webrtc/base/platform_thread.h"
#include "webrtc/modules/audio_device/audio_device_config.h"
#include "webrtc/modules/audio_device/mac/audio_device_mac.h"
#include "webrtc/modules/audio_device/mac/portaudio/pa_ringbuffer.h"
#include "webrtc/system_wrappers/include/event_wrapper.h"
-#include "webrtc/system_wrappers/include/thread_wrapper.h"
#include "webrtc/system_wrappers/include/trace.h"
#include <ApplicationServices/ApplicationServices.h>
-#include <libkern/OSAtomic.h> // OSAtomicCompareAndSwap()
-#include <mach/mach.h> // mach_task_self()
-#include <sys/sysctl.h> // sysctlbyname()
-
-
-
-namespace webrtc
-{
-
-#define WEBRTC_CA_RETURN_ON_ERR(expr) \
- do { \
- err = expr; \
- if (err != noErr) { \
- logCAMsg(kTraceError, kTraceAudioDevice, _id, \
- "Error in " #expr, (const char *)&err); \
- return -1; \
- } \
- } while(0)
-
-#define WEBRTC_CA_LOG_ERR(expr) \
- do { \
- err = expr; \
- if (err != noErr) { \
- logCAMsg(kTraceError, kTraceAudioDevice, _id, \
- "Error in " #expr, (const char *)&err); \
- } \
- } while(0)
-
-#define WEBRTC_CA_LOG_WARN(expr) \
- do { \
- err = expr; \
- if (err != noErr) { \
- logCAMsg(kTraceWarning, kTraceAudioDevice, _id, \
- "Error in " #expr, (const char *)&err); \
- } \
- } while(0)
-
-enum
-{
- MaxNumberDevices = 64
-};
-
-void AudioDeviceMac::AtomicSet32(int32_t* theValue, int32_t newValue)
-{
- while (1)
- {
- int32_t oldValue = *theValue;
- if (OSAtomicCompareAndSwap32Barrier(oldValue, newValue, theValue)
- == true)
- {
- return;
- }
+#include <libkern/OSAtomic.h> // OSAtomicCompareAndSwap()
+#include <mach/mach.h> // mach_task_self()
+#include <sys/sysctl.h> // sysctlbyname()
+
+namespace webrtc {
+
+#define WEBRTC_CA_RETURN_ON_ERR(expr) \
+ do { \
+ err = expr; \
+ if (err != noErr) { \
+ logCAMsg(kTraceError, kTraceAudioDevice, _id, "Error in " #expr, \
+ (const char*) & err); \
+ return -1; \
+ } \
+ } while (0)
+
+#define WEBRTC_CA_LOG_ERR(expr) \
+ do { \
+ err = expr; \
+ if (err != noErr) { \
+ logCAMsg(kTraceError, kTraceAudioDevice, _id, "Error in " #expr, \
+ (const char*) & err); \
+ } \
+ } while (0)
+
+#define WEBRTC_CA_LOG_WARN(expr) \
+ do { \
+ err = expr; \
+ if (err != noErr) { \
+ logCAMsg(kTraceWarning, kTraceAudioDevice, _id, "Error in " #expr, \
+ (const char*) & err); \
+ } \
+ } while (0)
+
+enum { MaxNumberDevices = 64 };
+
+void AudioDeviceMac::AtomicSet32(int32_t* theValue, int32_t newValue) {
+ while (1) {
+ int32_t oldValue = *theValue;
+ if (OSAtomicCompareAndSwap32Barrier(oldValue, newValue, theValue) == true) {
+ return;
}
+ }
}
-int32_t AudioDeviceMac::AtomicGet32(int32_t* theValue)
-{
- while (1)
- {
- int32_t value = *theValue;
- if (OSAtomicCompareAndSwap32Barrier(value, value, theValue) == true)
- {
- return value;
- }
+int32_t AudioDeviceMac::AtomicGet32(int32_t* theValue) {
+ while (1) {
+ int32_t value = *theValue;
+ if (OSAtomicCompareAndSwap32Barrier(value, value, theValue) == true) {
+ return value;
}
+ }
}
// CoreAudio errors are best interpreted as four character strings.
void AudioDeviceMac::logCAMsg(const TraceLevel level,
const TraceModule module,
- const int32_t id, const char *msg,
- const char *err)
-{
+ const int32_t id,
+ const char* msg,
+ const char* err) {
RTC_DCHECK(msg != NULL);
RTC_DCHECK(err != NULL);
#ifdef WEBRTC_ARCH_BIG_ENDIAN
- WEBRTC_TRACE(level, module, id, "%s: %.4s", msg, err);
+ WEBRTC_TRACE(level, module, id, "%s: %.4s", msg, err);
#else
- // We need to flip the characters in this case.
- WEBRTC_TRACE(level, module, id, "%s: %.1s%.1s%.1s%.1s", msg, err + 3, err
- + 2, err + 1, err);
+ // We need to flip the characters in this case.
+ WEBRTC_TRACE(level, module, id, "%s: %.1s%.1s%.1s%.1s", msg, err + 3, err + 2,
+ err + 1, err);
#endif
}
-AudioDeviceMac::AudioDeviceMac(const int32_t id) :
- _ptrAudioBuffer(NULL),
- _critSect(*CriticalSectionWrapper::CreateCriticalSection()),
- _stopEventRec(*EventWrapper::Create()),
- _stopEvent(*EventWrapper::Create()),
- _id(id),
- _mixerManager(id),
- _inputDeviceIndex(0),
- _outputDeviceIndex(0),
- _inputDeviceID(kAudioObjectUnknown),
- _outputDeviceID(kAudioObjectUnknown),
- _inputDeviceIsSpecified(false),
- _outputDeviceIsSpecified(false),
- _recChannels(N_REC_CHANNELS),
- _playChannels(N_PLAY_CHANNELS),
- _captureBufData(NULL),
- _renderBufData(NULL),
- _playBufType(AudioDeviceModule::kFixedBufferSize),
- _initialized(false),
- _isShutDown(false),
- _recording(false),
- _playing(false),
- _recIsInitialized(false),
- _playIsInitialized(false),
- _AGC(false),
- _renderDeviceIsAlive(1),
- _captureDeviceIsAlive(1),
- _twoDevices(true),
- _doStop(false),
- _doStopRec(false),
- _macBookPro(false),
- _macBookProPanRight(false),
- _captureLatencyUs(0),
- _renderLatencyUs(0),
- _captureDelayUs(0),
- _renderDelayUs(0),
- _renderDelayOffsetSamples(0),
- _playBufDelayFixed(20),
- _playWarning(0),
- _playError(0),
- _recWarning(0),
- _recError(0),
- _paCaptureBuffer(NULL),
- _paRenderBuffer(NULL),
- _captureBufSizeSamples(0),
- _renderBufSizeSamples(0),
- prev_key_state_()
-{
- WEBRTC_TRACE(kTraceMemory, kTraceAudioDevice, id,
- "%s created", __FUNCTION__);
-
- RTC_DCHECK(&_stopEvent != NULL);
- RTC_DCHECK(&_stopEventRec != NULL);
-
- memset(_renderConvertData, 0, sizeof(_renderConvertData));
- memset(&_outStreamFormat, 0, sizeof(AudioStreamBasicDescription));
- memset(&_outDesiredFormat, 0, sizeof(AudioStreamBasicDescription));
- memset(&_inStreamFormat, 0, sizeof(AudioStreamBasicDescription));
- memset(&_inDesiredFormat, 0, sizeof(AudioStreamBasicDescription));
-}
-
-
-AudioDeviceMac::~AudioDeviceMac()
-{
- WEBRTC_TRACE(kTraceMemory, kTraceAudioDevice, _id,
- "%s destroyed", __FUNCTION__);
-
- if (!_isShutDown)
- {
- Terminate();
- }
+AudioDeviceMac::AudioDeviceMac(const int32_t id)
+ : _ptrAudioBuffer(NULL),
+ _critSect(*CriticalSectionWrapper::CreateCriticalSection()),
+ _stopEventRec(*EventWrapper::Create()),
+ _stopEvent(*EventWrapper::Create()),
+ _id(id),
+ _mixerManager(id),
+ _inputDeviceIndex(0),
+ _outputDeviceIndex(0),
+ _inputDeviceID(kAudioObjectUnknown),
+ _outputDeviceID(kAudioObjectUnknown),
+ _inputDeviceIsSpecified(false),
+ _outputDeviceIsSpecified(false),
+ _recChannels(N_REC_CHANNELS),
+ _playChannels(N_PLAY_CHANNELS),
+ _captureBufData(NULL),
+ _renderBufData(NULL),
+ _playBufType(AudioDeviceModule::kFixedBufferSize),
+ _initialized(false),
+ _isShutDown(false),
+ _recording(false),
+ _playing(false),
+ _recIsInitialized(false),
+ _playIsInitialized(false),
+ _AGC(false),
+ _renderDeviceIsAlive(1),
+ _captureDeviceIsAlive(1),
+ _twoDevices(true),
+ _doStop(false),
+ _doStopRec(false),
+ _macBookPro(false),
+ _macBookProPanRight(false),
+ _captureLatencyUs(0),
+ _renderLatencyUs(0),
+ _captureDelayUs(0),
+ _renderDelayUs(0),
+ _renderDelayOffsetSamples(0),
+ _playBufDelayFixed(20),
+ _playWarning(0),
+ _playError(0),
+ _recWarning(0),
+ _recError(0),
+ _paCaptureBuffer(NULL),
+ _paRenderBuffer(NULL),
+ _captureBufSizeSamples(0),
+ _renderBufSizeSamples(0),
+ prev_key_state_(),
+ get_mic_volume_counter_ms_(0) {
+ WEBRTC_TRACE(kTraceMemory, kTraceAudioDevice, id, "%s created", __FUNCTION__);
+
+ RTC_DCHECK(&_stopEvent != NULL);
+ RTC_DCHECK(&_stopEventRec != NULL);
+
+ memset(_renderConvertData, 0, sizeof(_renderConvertData));
+ memset(&_outStreamFormat, 0, sizeof(AudioStreamBasicDescription));
+ memset(&_outDesiredFormat, 0, sizeof(AudioStreamBasicDescription));
+ memset(&_inStreamFormat, 0, sizeof(AudioStreamBasicDescription));
+ memset(&_inDesiredFormat, 0, sizeof(AudioStreamBasicDescription));
+}
+
+AudioDeviceMac::~AudioDeviceMac() {
+ WEBRTC_TRACE(kTraceMemory, kTraceAudioDevice, _id, "%s destroyed",
+ __FUNCTION__);
+
+ if (!_isShutDown) {
+ Terminate();
+ }
- RTC_DCHECK(!capture_worker_thread_.get());
- RTC_DCHECK(!render_worker_thread_.get());
+ RTC_DCHECK(!capture_worker_thread_.get());
+ RTC_DCHECK(!render_worker_thread_.get());
- if (_paRenderBuffer)
- {
- delete _paRenderBuffer;
- _paRenderBuffer = NULL;
- }
+ if (_paRenderBuffer) {
+ delete _paRenderBuffer;
+ _paRenderBuffer = NULL;
+ }
- if (_paCaptureBuffer)
- {
- delete _paCaptureBuffer;
- _paCaptureBuffer = NULL;
- }
+ if (_paCaptureBuffer) {
+ delete _paCaptureBuffer;
+ _paCaptureBuffer = NULL;
+ }
- if (_renderBufData)
- {
- delete[] _renderBufData;
- _renderBufData = NULL;
- }
+ if (_renderBufData) {
+ delete[] _renderBufData;
+ _renderBufData = NULL;
+ }
- if (_captureBufData)
- {
- delete[] _captureBufData;
- _captureBufData = NULL;
- }
+ if (_captureBufData) {
+ delete[] _captureBufData;
+ _captureBufData = NULL;
+ }
- kern_return_t kernErr = KERN_SUCCESS;
- kernErr = semaphore_destroy(mach_task_self(), _renderSemaphore);
- if (kernErr != KERN_SUCCESS)
- {
- WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
- " semaphore_destroy() error: %d", kernErr);
- }
+ kern_return_t kernErr = KERN_SUCCESS;
+ kernErr = semaphore_destroy(mach_task_self(), _renderSemaphore);
+ if (kernErr != KERN_SUCCESS) {
+ WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+ " semaphore_destroy() error: %d", kernErr);
+ }
- kernErr = semaphore_destroy(mach_task_self(), _captureSemaphore);
- if (kernErr != KERN_SUCCESS)
- {
- WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
- " semaphore_destroy() error: %d", kernErr);
- }
+ kernErr = semaphore_destroy(mach_task_self(), _captureSemaphore);
+ if (kernErr != KERN_SUCCESS) {
+ WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+ " semaphore_destroy() error: %d", kernErr);
+ }
- delete &_stopEvent;
- delete &_stopEventRec;
- delete &_critSect;
+ delete &_stopEvent;
+ delete &_stopEventRec;
+ delete &_critSect;
}
// ============================================================================
// API
// ============================================================================
-void AudioDeviceMac::AttachAudioBuffer(AudioDeviceBuffer* audioBuffer)
-{
+void AudioDeviceMac::AttachAudioBuffer(AudioDeviceBuffer* audioBuffer) {
+ CriticalSectionScoped lock(&_critSect);
- CriticalSectionScoped lock(&_critSect);
+ _ptrAudioBuffer = audioBuffer;
- _ptrAudioBuffer = audioBuffer;
-
- // inform the AudioBuffer about default settings for this implementation
- _ptrAudioBuffer->SetRecordingSampleRate(N_REC_SAMPLES_PER_SEC);
- _ptrAudioBuffer->SetPlayoutSampleRate(N_PLAY_SAMPLES_PER_SEC);
- _ptrAudioBuffer->SetRecordingChannels(N_REC_CHANNELS);
- _ptrAudioBuffer->SetPlayoutChannels(N_PLAY_CHANNELS);
+ // inform the AudioBuffer about default settings for this implementation
+ _ptrAudioBuffer->SetRecordingSampleRate(N_REC_SAMPLES_PER_SEC);
+ _ptrAudioBuffer->SetPlayoutSampleRate(N_PLAY_SAMPLES_PER_SEC);
+ _ptrAudioBuffer->SetRecordingChannels(N_REC_CHANNELS);
+ _ptrAudioBuffer->SetPlayoutChannels(N_PLAY_CHANNELS);
}
int32_t AudioDeviceMac::ActiveAudioLayer(
- AudioDeviceModule::AudioLayer& audioLayer) const
-{
- audioLayer = AudioDeviceModule::kPlatformDefaultAudio;
- return 0;
+ AudioDeviceModule::AudioLayer& audioLayer) const {
+ audioLayer = AudioDeviceModule::kPlatformDefaultAudio;
+ return 0;
}
-int32_t AudioDeviceMac::Init()
-{
+int32_t AudioDeviceMac::Init() {
+ CriticalSectionScoped lock(&_critSect);
- CriticalSectionScoped lock(&_critSect);
+ if (_initialized) {
+ return 0;
+ }
- if (_initialized)
- {
- return 0;
- }
+ OSStatus err = noErr;
- OSStatus err = noErr;
+ _isShutDown = false;
- _isShutDown = false;
-
- // PortAudio ring buffers require an elementCount which is a power of two.
- if (_renderBufData == NULL)
- {
- UInt32 powerOfTwo = 1;
- while (powerOfTwo < PLAY_BUF_SIZE_IN_SAMPLES)
- {
- powerOfTwo <<= 1;
- }
- _renderBufSizeSamples = powerOfTwo;
- _renderBufData = new SInt16[_renderBufSizeSamples];
+ // PortAudio ring buffers require an elementCount which is a power of two.
+ if (_renderBufData == NULL) {
+ UInt32 powerOfTwo = 1;
+ while (powerOfTwo < PLAY_BUF_SIZE_IN_SAMPLES) {
+ powerOfTwo <<= 1;
}
+ _renderBufSizeSamples = powerOfTwo;
+ _renderBufData = new SInt16[_renderBufSizeSamples];
+ }
- if (_paRenderBuffer == NULL)
- {
- _paRenderBuffer = new PaUtilRingBuffer;
- PaRingBufferSize bufSize = -1;
- bufSize = PaUtil_InitializeRingBuffer(_paRenderBuffer, sizeof(SInt16),
- _renderBufSizeSamples,
- _renderBufData);
- if (bufSize == -1)
- {
- WEBRTC_TRACE(kTraceCritical, kTraceAudioDevice,
- _id, " PaUtil_InitializeRingBuffer() error");
- return -1;
- }
+ if (_paRenderBuffer == NULL) {
+ _paRenderBuffer = new PaUtilRingBuffer;
+ PaRingBufferSize bufSize = -1;
+ bufSize = PaUtil_InitializeRingBuffer(
+ _paRenderBuffer, sizeof(SInt16), _renderBufSizeSamples, _renderBufData);
+ if (bufSize == -1) {
+ WEBRTC_TRACE(kTraceCritical, kTraceAudioDevice, _id,
+ " PaUtil_InitializeRingBuffer() error");
+ return -1;
}
+ }
- if (_captureBufData == NULL)
- {
- UInt32 powerOfTwo = 1;
- while (powerOfTwo < REC_BUF_SIZE_IN_SAMPLES)
- {
- powerOfTwo <<= 1;
- }
- _captureBufSizeSamples = powerOfTwo;
- _captureBufData = new Float32[_captureBufSizeSamples];
+ if (_captureBufData == NULL) {
+ UInt32 powerOfTwo = 1;
+ while (powerOfTwo < REC_BUF_SIZE_IN_SAMPLES) {
+ powerOfTwo <<= 1;
}
+ _captureBufSizeSamples = powerOfTwo;
+ _captureBufData = new Float32[_captureBufSizeSamples];
+ }
- if (_paCaptureBuffer == NULL)
- {
- _paCaptureBuffer = new PaUtilRingBuffer;
- PaRingBufferSize bufSize = -1;
- bufSize = PaUtil_InitializeRingBuffer(_paCaptureBuffer,
- sizeof(Float32),
- _captureBufSizeSamples,
- _captureBufData);
- if (bufSize == -1)
- {
- WEBRTC_TRACE(kTraceCritical, kTraceAudioDevice,
- _id, " PaUtil_InitializeRingBuffer() error");
- return -1;
- }
+ if (_paCaptureBuffer == NULL) {
+ _paCaptureBuffer = new PaUtilRingBuffer;
+ PaRingBufferSize bufSize = -1;
+ bufSize =
+ PaUtil_InitializeRingBuffer(_paCaptureBuffer, sizeof(Float32),
+ _captureBufSizeSamples, _captureBufData);
+ if (bufSize == -1) {
+ WEBRTC_TRACE(kTraceCritical, kTraceAudioDevice, _id,
+ " PaUtil_InitializeRingBuffer() error");
+ return -1;
}
+ }
- kern_return_t kernErr = KERN_SUCCESS;
- kernErr = semaphore_create(mach_task_self(), &_renderSemaphore,
- SYNC_POLICY_FIFO, 0);
- if (kernErr != KERN_SUCCESS)
- {
- WEBRTC_TRACE(kTraceCritical, kTraceAudioDevice, _id,
- " semaphore_create() error: %d", kernErr);
- return -1;
- }
+ kern_return_t kernErr = KERN_SUCCESS;
+ kernErr = semaphore_create(mach_task_self(), &_renderSemaphore,
+ SYNC_POLICY_FIFO, 0);
+ if (kernErr != KERN_SUCCESS) {
+ WEBRTC_TRACE(kTraceCritical, kTraceAudioDevice, _id,
+ " semaphore_create() error: %d", kernErr);
+ return -1;
+ }
- kernErr = semaphore_create(mach_task_self(), &_captureSemaphore,
- SYNC_POLICY_FIFO, 0);
- if (kernErr != KERN_SUCCESS)
- {
- WEBRTC_TRACE(kTraceCritical, kTraceAudioDevice, _id,
- " semaphore_create() error: %d", kernErr);
- return -1;
- }
+ kernErr = semaphore_create(mach_task_self(), &_captureSemaphore,
+ SYNC_POLICY_FIFO, 0);
+ if (kernErr != KERN_SUCCESS) {
+ WEBRTC_TRACE(kTraceCritical, kTraceAudioDevice, _id,
+ " semaphore_create() error: %d", kernErr);
+ return -1;
+ }
- // Setting RunLoop to NULL here instructs HAL to manage its own thread for
- // notifications. This was the default behaviour on OS X 10.5 and earlier,
- // but now must be explicitly specified. HAL would otherwise try to use the
- // main thread to issue notifications.
- AudioObjectPropertyAddress propertyAddress = {
- kAudioHardwarePropertyRunLoop,
- kAudioObjectPropertyScopeGlobal,
- kAudioObjectPropertyElementMaster };
- CFRunLoopRef runLoop = NULL;
- UInt32 size = sizeof(CFRunLoopRef);
- WEBRTC_CA_RETURN_ON_ERR(AudioObjectSetPropertyData(kAudioObjectSystemObject,
- &propertyAddress, 0, NULL, size, &runLoop));
-
- // Listen for any device changes.
- propertyAddress.mSelector = kAudioHardwarePropertyDevices;
- WEBRTC_CA_LOG_ERR(AudioObjectAddPropertyListener(kAudioObjectSystemObject,
- &propertyAddress, &objectListenerProc, this));
-
- // Determine if this is a MacBook Pro
- _macBookPro = false;
- _macBookProPanRight = false;
- char buf[128];
- size_t length = sizeof(buf);
- memset(buf, 0, length);
-
- int intErr = sysctlbyname("hw.model", buf, &length, NULL, 0);
- if (intErr != 0)
- {
- WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
- " Error in sysctlbyname(): %d", err);
- } else
- {
- WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
- " Hardware model: %s", buf);
- if (strncmp(buf, "MacBookPro", 10) == 0)
- {
- _macBookPro = true;
- }
+ // Setting RunLoop to NULL here instructs HAL to manage its own thread for
+ // notifications. This was the default behaviour on OS X 10.5 and earlier,
+ // but now must be explicitly specified. HAL would otherwise try to use the
+ // main thread to issue notifications.
+ AudioObjectPropertyAddress propertyAddress = {
+ kAudioHardwarePropertyRunLoop, kAudioObjectPropertyScopeGlobal,
+ kAudioObjectPropertyElementMaster};
+ CFRunLoopRef runLoop = NULL;
+ UInt32 size = sizeof(CFRunLoopRef);
+ WEBRTC_CA_RETURN_ON_ERR(AudioObjectSetPropertyData(
+ kAudioObjectSystemObject, &propertyAddress, 0, NULL, size, &runLoop));
+
+ // Listen for any device changes.
+ propertyAddress.mSelector = kAudioHardwarePropertyDevices;
+ WEBRTC_CA_LOG_ERR(AudioObjectAddPropertyListener(
+ kAudioObjectSystemObject, &propertyAddress, &objectListenerProc, this));
+
+ // Determine if this is a MacBook Pro
+ _macBookPro = false;
+ _macBookProPanRight = false;
+ char buf[128];
+ size_t length = sizeof(buf);
+ memset(buf, 0, length);
+
+ int intErr = sysctlbyname("hw.model", buf, &length, NULL, 0);
+ if (intErr != 0) {
+ WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+ " Error in sysctlbyname(): %d", err);
+ } else {
+ WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, " Hardware model: %s",
+ buf);
+ if (strncmp(buf, "MacBookPro", 10) == 0) {
+ _macBookPro = true;
}
+ }
- _playWarning = 0;
- _playError = 0;
- _recWarning = 0;
- _recError = 0;
+ _playWarning = 0;
+ _playError = 0;
+ _recWarning = 0;
+ _recError = 0;
- _initialized = true;
+ get_mic_volume_counter_ms_ = 0;
- return 0;
+ _initialized = true;
+
+ return 0;
}
-int32_t AudioDeviceMac::Terminate()
-{
+int32_t AudioDeviceMac::Terminate() {
+ if (!_initialized) {
+ return 0;
+ }
- if (!_initialized)
- {
- return 0;
- }
+ if (_recording) {
+ WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+ " Recording must be stopped");
+ return -1;
+ }
- if (_recording)
- {
- WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
- " Recording must be stopped");
- return -1;
- }
+ if (_playing) {
+ WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+ " Playback must be stopped");
+ return -1;
+ }
- if (_playing)
- {
- WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
- " Playback must be stopped");
- return -1;
- }
+ _critSect.Enter();
- _critSect.Enter();
+ _mixerManager.Close();
- _mixerManager.Close();
+ OSStatus err = noErr;
+ int retVal = 0;
- OSStatus err = noErr;
- int retVal = 0;
+ AudioObjectPropertyAddress propertyAddress = {
+ kAudioHardwarePropertyDevices, kAudioObjectPropertyScopeGlobal,
+ kAudioObjectPropertyElementMaster};
+ WEBRTC_CA_LOG_WARN(AudioObjectRemovePropertyListener(
+ kAudioObjectSystemObject, &propertyAddress, &objectListenerProc, this));
- AudioObjectPropertyAddress propertyAddress = {
- kAudioHardwarePropertyDevices, kAudioObjectPropertyScopeGlobal,
- kAudioObjectPropertyElementMaster };
- WEBRTC_CA_LOG_WARN(AudioObjectRemovePropertyListener(kAudioObjectSystemObject,
- &propertyAddress, &objectListenerProc, this));
-
- err = AudioHardwareUnload();
- if (err != noErr)
- {
- logCAMsg(kTraceError, kTraceAudioDevice, _id,
- "Error in AudioHardwareUnload()", (const char*) &err);
- retVal = -1;
- }
+ err = AudioHardwareUnload();
+ if (err != noErr) {
+ logCAMsg(kTraceError, kTraceAudioDevice, _id,
+ "Error in AudioHardwareUnload()", (const char*)&err);
+ retVal = -1;
+ }
- _isShutDown = true;
- _initialized = false;
- _outputDeviceIsSpecified = false;
- _inputDeviceIsSpecified = false;
+ _isShutDown = true;
+ _initialized = false;
+ _outputDeviceIsSpecified = false;
+ _inputDeviceIsSpecified = false;
- _critSect.Leave();
+ _critSect.Leave();
- return retVal;
+ return retVal;
}
-bool AudioDeviceMac::Initialized() const
-{
- return (_initialized);
+bool AudioDeviceMac::Initialized() const {
+ return (_initialized);
}
-int32_t AudioDeviceMac::SpeakerIsAvailable(bool& available)
-{
-
- bool wasInitialized = _mixerManager.SpeakerIsInitialized();
+int32_t AudioDeviceMac::SpeakerIsAvailable(bool& available) {
+ bool wasInitialized = _mixerManager.SpeakerIsInitialized();
- // Make an attempt to open up the
- // output mixer corresponding to the currently selected output device.
- //
- if (!wasInitialized && InitSpeaker() == -1)
- {
- available = false;
- return 0;
- }
+ // Make an attempt to open up the
+ // output mixer corresponding to the currently selected output device.
+ //
+ if (!wasInitialized && InitSpeaker() == -1) {
+ available = false;
+ return 0;
+ }
- // Given that InitSpeaker was successful, we know that a valid speaker
- // exists.
- available = true;
+ // Given that InitSpeaker was successful, we know that a valid speaker
+ // exists.
+ available = true;
- // Close the initialized output mixer
- //
- if (!wasInitialized)
- {
- _mixerManager.CloseSpeaker();
- }
+ // Close the initialized output mixer
+ //
+ if (!wasInitialized) {
+ _mixerManager.CloseSpeaker();
+ }
- return 0;
+ return 0;
}
-int32_t AudioDeviceMac::InitSpeaker()
-{
+int32_t AudioDeviceMac::InitSpeaker() {
+ CriticalSectionScoped lock(&_critSect);
- CriticalSectionScoped lock(&_critSect);
-
- if (_playing)
- {
- return -1;
- }
+ if (_playing) {
+ return -1;
+ }
- if (InitDevice(_outputDeviceIndex, _outputDeviceID, false) == -1)
- {
- return -1;
- }
+ if (InitDevice(_outputDeviceIndex, _outputDeviceID, false) == -1) {
+ return -1;
+ }
- if (_inputDeviceID == _outputDeviceID)
- {
- _twoDevices = false;
- } else
- {
- _twoDevices = true;
- }
+ if (_inputDeviceID == _outputDeviceID) {
+ _twoDevices = false;
+ } else {
+ _twoDevices = true;
+ }
- if (_mixerManager.OpenSpeaker(_outputDeviceID) == -1)
- {
- return -1;
- }
+ if (_mixerManager.OpenSpeaker(_outputDeviceID) == -1) {
+ return -1;
+ }
- return 0;
+ return 0;
}
-int32_t AudioDeviceMac::MicrophoneIsAvailable(bool& available)
-{
+int32_t AudioDeviceMac::MicrophoneIsAvailable(bool& available) {
+ bool wasInitialized = _mixerManager.MicrophoneIsInitialized();
- bool wasInitialized = _mixerManager.MicrophoneIsInitialized();
-
- // Make an attempt to open up the
- // input mixer corresponding to the currently selected output device.
- //
- if (!wasInitialized && InitMicrophone() == -1)
- {
- available = false;
- return 0;
- }
+ // Make an attempt to open up the
+ // input mixer corresponding to the currently selected output device.
+ //
+ if (!wasInitialized && InitMicrophone() == -1) {
+ available = false;
+ return 0;
+ }
- // Given that InitMicrophone was successful, we know that a valid microphone
- // exists.
- available = true;
+ // Given that InitMicrophone was successful, we know that a valid microphone
+ // exists.
+ available = true;
- // Close the initialized input mixer
- //
- if (!wasInitialized)
- {
- _mixerManager.CloseMicrophone();
- }
+ // Close the initialized input mixer
+ //
+ if (!wasInitialized) {
+ _mixerManager.CloseMicrophone();
+ }
- return 0;
+ return 0;
}
-int32_t AudioDeviceMac::InitMicrophone()
-{
-
- CriticalSectionScoped lock(&_critSect);
+int32_t AudioDeviceMac::InitMicrophone() {
+ CriticalSectionScoped lock(&_critSect);
- if (_recording)
- {
- return -1;
- }
+ if (_recording) {
+ return -1;
+ }
- if (InitDevice(_inputDeviceIndex, _inputDeviceID, true) == -1)
- {
- return -1;
- }
+ if (InitDevice(_inputDeviceIndex, _inputDeviceID, true) == -1) {
+ return -1;
+ }
- if (_inputDeviceID == _outputDeviceID)
- {
- _twoDevices = false;
- } else
- {
- _twoDevices = true;
- }
+ if (_inputDeviceID == _outputDeviceID) {
+ _twoDevices = false;
+ } else {
+ _twoDevices = true;
+ }
- if (_mixerManager.OpenMicrophone(_inputDeviceID) == -1)
- {
- return -1;
- }
+ if (_mixerManager.OpenMicrophone(_inputDeviceID) == -1) {
+ return -1;
+ }
- return 0;
+ return 0;
}
-bool AudioDeviceMac::SpeakerIsInitialized() const
-{
- return (_mixerManager.SpeakerIsInitialized());
+bool AudioDeviceMac::SpeakerIsInitialized() const {
+ return (_mixerManager.SpeakerIsInitialized());
}
-bool AudioDeviceMac::MicrophoneIsInitialized() const
-{
- return (_mixerManager.MicrophoneIsInitialized());
+bool AudioDeviceMac::MicrophoneIsInitialized() const {
+ return (_mixerManager.MicrophoneIsInitialized());
}
-int32_t AudioDeviceMac::SpeakerVolumeIsAvailable(bool& available)
-{
+int32_t AudioDeviceMac::SpeakerVolumeIsAvailable(bool& available) {
+ bool wasInitialized = _mixerManager.SpeakerIsInitialized();
- bool wasInitialized = _mixerManager.SpeakerIsInitialized();
-
- // Make an attempt to open up the
- // output mixer corresponding to the currently selected output device.
- //
- if (!wasInitialized && InitSpeaker() == -1)
- {
- // If we end up here it means that the selected speaker has no volume
- // control.
- available = false;
- return 0;
- }
+ // Make an attempt to open up the
+ // output mixer corresponding to the currently selected output device.
+ //
+ if (!wasInitialized && InitSpeaker() == -1) {
+ // If we end up here it means that the selected speaker has no volume
+ // control.
+ available = false;
+ return 0;
+ }
- // Given that InitSpeaker was successful, we know that a volume control exists
- //
- available = true;
+ // Given that InitSpeaker was successful, we know that a volume control exists
+ //
+ available = true;
- // Close the initialized output mixer
- //
- if (!wasInitialized)
- {
- _mixerManager.CloseSpeaker();
- }
+ // Close the initialized output mixer
+ //
+ if (!wasInitialized) {
+ _mixerManager.CloseSpeaker();
+ }
- return 0;
+ return 0;
}
-int32_t AudioDeviceMac::SetSpeakerVolume(uint32_t volume)
-{
-
- return (_mixerManager.SetSpeakerVolume(volume));
+int32_t AudioDeviceMac::SetSpeakerVolume(uint32_t volume) {
+ return (_mixerManager.SetSpeakerVolume(volume));
}
-int32_t AudioDeviceMac::SpeakerVolume(uint32_t& volume) const
-{
+int32_t AudioDeviceMac::SpeakerVolume(uint32_t& volume) const {
+ uint32_t level(0);
- uint32_t level(0);
-
- if (_mixerManager.SpeakerVolume(level) == -1)
- {
- return -1;
- }
+ if (_mixerManager.SpeakerVolume(level) == -1) {
+ return -1;
+ }
- volume = level;
- return 0;
+ volume = level;
+ return 0;
}
int32_t AudioDeviceMac::SetWaveOutVolume(uint16_t volumeLeft,
- uint16_t volumeRight)
-{
-
- WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
- " API call not supported on this platform");
- return -1;
+ uint16_t volumeRight) {
+ WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+ " API call not supported on this platform");
+ return -1;
}
-int32_t
-AudioDeviceMac::WaveOutVolume(uint16_t& /*volumeLeft*/,
- uint16_t& /*volumeRight*/) const
-{
-
- WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
- " API call not supported on this platform");
- return -1;
+int32_t AudioDeviceMac::WaveOutVolume(uint16_t& /*volumeLeft*/,
+ uint16_t& /*volumeRight*/) const {
+ WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+ " API call not supported on this platform");
+ return -1;
}
-int32_t AudioDeviceMac::MaxSpeakerVolume(uint32_t& maxVolume) const
-{
+int32_t AudioDeviceMac::MaxSpeakerVolume(uint32_t& maxVolume) const {
+ uint32_t maxVol(0);
- uint32_t maxVol(0);
-
- if (_mixerManager.MaxSpeakerVolume(maxVol) == -1)
- {
- return -1;
- }
+ if (_mixerManager.MaxSpeakerVolume(maxVol) == -1) {
+ return -1;
+ }
- maxVolume = maxVol;
- return 0;
+ maxVolume = maxVol;
+ return 0;
}
-int32_t AudioDeviceMac::MinSpeakerVolume(uint32_t& minVolume) const
-{
-
- uint32_t minVol(0);
+int32_t AudioDeviceMac::MinSpeakerVolume(uint32_t& minVolume) const {
+ uint32_t minVol(0);
- if (_mixerManager.MinSpeakerVolume(minVol) == -1)
- {
- return -1;
- }
+ if (_mixerManager.MinSpeakerVolume(minVol) == -1) {
+ return -1;
+ }
- minVolume = minVol;
- return 0;
+ minVolume = minVol;
+ return 0;
}
-int32_t
-AudioDeviceMac::SpeakerVolumeStepSize(uint16_t& stepSize) const
-{
-
- uint16_t delta(0);
+int32_t AudioDeviceMac::SpeakerVolumeStepSize(uint16_t& stepSize) const {
+ uint16_t delta(0);
- if (_mixerManager.SpeakerVolumeStepSize(delta) == -1)
- {
- return -1;
- }
+ if (_mixerManager.SpeakerVolumeStepSize(delta) == -1) {
+ return -1;
+ }
- stepSize = delta;
- return 0;
+ stepSize = delta;
+ return 0;
}
-int32_t AudioDeviceMac::SpeakerMuteIsAvailable(bool& available)
-{
-
- bool isAvailable(false);
- bool wasInitialized = _mixerManager.SpeakerIsInitialized();
+int32_t AudioDeviceMac::SpeakerMuteIsAvailable(bool& available) {
+ bool isAvailable(false);
+ bool wasInitialized = _mixerManager.SpeakerIsInitialized();
- // Make an attempt to open up the
- // output mixer corresponding to the currently selected output device.
- //
- if (!wasInitialized && InitSpeaker() == -1)
- {
- // If we end up here it means that the selected speaker has no volume
- // control, hence it is safe to state that there is no mute control
- // already at this stage.
- available = false;
- return 0;
- }
+ // Make an attempt to open up the
+ // output mixer corresponding to the currently selected output device.
+ //
+ if (!wasInitialized && InitSpeaker() == -1) {
+ // If we end up here it means that the selected speaker has no volume
+ // control, hence it is safe to state that there is no mute control
+ // already at this stage.
+ available = false;
+ return 0;
+ }
- // Check if the selected speaker has a mute control
- //
- _mixerManager.SpeakerMuteIsAvailable(isAvailable);
+ // Check if the selected speaker has a mute control
+ //
+ _mixerManager.SpeakerMuteIsAvailable(isAvailable);
- available = isAvailable;
+ available = isAvailable;
- // Close the initialized output mixer
- //
- if (!wasInitialized)
- {
- _mixerManager.CloseSpeaker();
- }
+ // Close the initialized output mixer
+ //
+ if (!wasInitialized) {
+ _mixerManager.CloseSpeaker();
+ }
- return 0;
+ return 0;
}
-int32_t AudioDeviceMac::SetSpeakerMute(bool enable)
-{
- return (_mixerManager.SetSpeakerMute(enable));
+int32_t AudioDeviceMac::SetSpeakerMute(bool enable) {
+ return (_mixerManager.SetSpeakerMute(enable));
}
-int32_t AudioDeviceMac::SpeakerMute(bool& enabled) const
-{
+int32_t AudioDeviceMac::SpeakerMute(bool& enabled) const {
+ bool muted(0);
- bool muted(0);
-
- if (_mixerManager.SpeakerMute(muted) == -1)
- {
- return -1;
- }
+ if (_mixerManager.SpeakerMute(muted) == -1) {
+ return -1;
+ }
- enabled = muted;
- return 0;
+ enabled = muted;
+ return 0;
}
-int32_t AudioDeviceMac::MicrophoneMuteIsAvailable(bool& available)
-{
+int32_t AudioDeviceMac::MicrophoneMuteIsAvailable(bool& available) {
+ bool isAvailable(false);
+ bool wasInitialized = _mixerManager.MicrophoneIsInitialized();
- bool isAvailable(false);
- bool wasInitialized = _mixerManager.MicrophoneIsInitialized();
-
- // Make an attempt to open up the
- // input mixer corresponding to the currently selected input device.
- //
- if (!wasInitialized && InitMicrophone() == -1)
- {
- // If we end up here it means that the selected microphone has no volume
- // control, hence it is safe to state that there is no boost control
- // already at this stage.
- available = false;
- return 0;
- }
+ // Make an attempt to open up the
+ // input mixer corresponding to the currently selected input device.
+ //
+ if (!wasInitialized && InitMicrophone() == -1) {
+ // If we end up here it means that the selected microphone has no volume
+ // control, hence it is safe to state that there is no boost control
+ // already at this stage.
+ available = false;
+ return 0;
+ }
- // Check if the selected microphone has a mute control
- //
- _mixerManager.MicrophoneMuteIsAvailable(isAvailable);
- available = isAvailable;
+ // Check if the selected microphone has a mute control
+ //
+ _mixerManager.MicrophoneMuteIsAvailable(isAvailable);
+ available = isAvailable;
- // Close the initialized input mixer
- //
- if (!wasInitialized)
- {
- _mixerManager.CloseMicrophone();
- }
+ // Close the initialized input mixer
+ //
+ if (!wasInitialized) {
+ _mixerManager.CloseMicrophone();
+ }
- return 0;
+ return 0;
}
-int32_t AudioDeviceMac::SetMicrophoneMute(bool enable)
-{
- return (_mixerManager.SetMicrophoneMute(enable));
+int32_t AudioDeviceMac::SetMicrophoneMute(bool enable) {
+ return (_mixerManager.SetMicrophoneMute(enable));
}
-int32_t AudioDeviceMac::MicrophoneMute(bool& enabled) const
-{
+int32_t AudioDeviceMac::MicrophoneMute(bool& enabled) const {
+ bool muted(0);
- bool muted(0);
-
- if (_mixerManager.MicrophoneMute(muted) == -1)
- {
- return -1;
- }
+ if (_mixerManager.MicrophoneMute(muted) == -1) {
+ return -1;
+ }
- enabled = muted;
- return 0;
+ enabled = muted;
+ return 0;
}
-int32_t AudioDeviceMac::MicrophoneBoostIsAvailable(bool& available)
-{
-
- bool isAvailable(false);
- bool wasInitialized = _mixerManager.MicrophoneIsInitialized();
+int32_t AudioDeviceMac::MicrophoneBoostIsAvailable(bool& available) {
+ bool isAvailable(false);
+ bool wasInitialized = _mixerManager.MicrophoneIsInitialized();
- // Enumerate all avaliable microphone and make an attempt to open up the
- // input mixer corresponding to the currently selected input device.
- //
- if (!wasInitialized && InitMicrophone() == -1)
- {
- // If we end up here it means that the selected microphone has no volume
- // control, hence it is safe to state that there is no boost control
- // already at this stage.
- available = false;
- return 0;
- }
+ // Enumerate all avaliable microphone and make an attempt to open up the
+ // input mixer corresponding to the currently selected input device.
+ //
+ if (!wasInitialized && InitMicrophone() == -1) {
+ // If we end up here it means that the selected microphone has no volume
+ // control, hence it is safe to state that there is no boost control
+ // already at this stage.
+ available = false;
+ return 0;
+ }
- // Check if the selected microphone has a boost control
- //
- _mixerManager.MicrophoneBoostIsAvailable(isAvailable);
- available = isAvailable;
+ // Check if the selected microphone has a boost control
+ //
+ _mixerManager.MicrophoneBoostIsAvailable(isAvailable);
+ available = isAvailable;
- // Close the initialized input mixer
- //
- if (!wasInitialized)
- {
- _mixerManager.CloseMicrophone();
- }
+ // Close the initialized input mixer
+ //
+ if (!wasInitialized) {
+ _mixerManager.CloseMicrophone();
+ }
- return 0;
+ return 0;
}
-int32_t AudioDeviceMac::SetMicrophoneBoost(bool enable)
-{
-
- return (_mixerManager.SetMicrophoneBoost(enable));
+int32_t AudioDeviceMac::SetMicrophoneBoost(bool enable) {
+ return (_mixerManager.SetMicrophoneBoost(enable));
}
-int32_t AudioDeviceMac::MicrophoneBoost(bool& enabled) const
-{
-
- bool onOff(0);
+int32_t AudioDeviceMac::MicrophoneBoost(bool& enabled) const {
+ bool onOff(0);
- if (_mixerManager.MicrophoneBoost(onOff) == -1)
- {
- return -1;
- }
+ if (_mixerManager.MicrophoneBoost(onOff) == -1) {
+ return -1;
+ }
- enabled = onOff;
- return 0;
+ enabled = onOff;
+ return 0;
}
-int32_t AudioDeviceMac::StereoRecordingIsAvailable(bool& available)
-{
-
- bool isAvailable(false);
- bool wasInitialized = _mixerManager.MicrophoneIsInitialized();
-
- if (!wasInitialized && InitMicrophone() == -1)
- {
- // Cannot open the specified device
- available = false;
- return 0;
- }
-
- // Check if the selected microphone can record stereo
- //
- _mixerManager.StereoRecordingIsAvailable(isAvailable);
- available = isAvailable;
-
- // Close the initialized input mixer
- //
- if (!wasInitialized)
- {
- _mixerManager.CloseMicrophone();
- }
+int32_t AudioDeviceMac::StereoRecordingIsAvailable(bool& available) {
+ bool isAvailable(false);
+ bool wasInitialized = _mixerManager.MicrophoneIsInitialized();
+ if (!wasInitialized && InitMicrophone() == -1) {
+ // Cannot open the specified device
+ available = false;
return 0;
-}
+ }
-int32_t AudioDeviceMac::SetStereoRecording(bool enable)
-{
+ // Check if the selected microphone can record stereo
+ //
+ _mixerManager.StereoRecordingIsAvailable(isAvailable);
+ available = isAvailable;
- if (enable)
- _recChannels = 2;
- else
- _recChannels = 1;
+ // Close the initialized input mixer
+ //
+ if (!wasInitialized) {
+ _mixerManager.CloseMicrophone();
+ }
- return 0;
+ return 0;
}
-int32_t AudioDeviceMac::StereoRecording(bool& enabled) const
-{
+int32_t AudioDeviceMac::SetStereoRecording(bool enable) {
+ if (enable)
+ _recChannels = 2;
+ else
+ _recChannels = 1;
- if (_recChannels == 2)
- enabled = true;
- else
- enabled = false;
-
- return 0;
+ return 0;
}
-int32_t AudioDeviceMac::StereoPlayoutIsAvailable(bool& available)
-{
-
- bool isAvailable(false);
- bool wasInitialized = _mixerManager.SpeakerIsInitialized();
+int32_t AudioDeviceMac::StereoRecording(bool& enabled) const {
+ if (_recChannels == 2)
+ enabled = true;
+ else
+ enabled = false;
- if (!wasInitialized && InitSpeaker() == -1)
- {
- // Cannot open the specified device
- available = false;
- return 0;
- }
-
- // Check if the selected microphone can record stereo
- //
- _mixerManager.StereoPlayoutIsAvailable(isAvailable);
- available = isAvailable;
+ return 0;
+}
- // Close the initialized input mixer
- //
- if (!wasInitialized)
- {
- _mixerManager.CloseSpeaker();
- }
+int32_t AudioDeviceMac::StereoPlayoutIsAvailable(bool& available) {
+ bool isAvailable(false);
+ bool wasInitialized = _mixerManager.SpeakerIsInitialized();
+ if (!wasInitialized && InitSpeaker() == -1) {
+ // Cannot open the specified device
+ available = false;
return 0;
-}
+ }
-int32_t AudioDeviceMac::SetStereoPlayout(bool enable)
-{
+ // Check if the selected microphone can record stereo
+ //
+ _mixerManager.StereoPlayoutIsAvailable(isAvailable);
+ available = isAvailable;
- if (enable)
- _playChannels = 2;
- else
- _playChannels = 1;
+ // Close the initialized input mixer
+ //
+ if (!wasInitialized) {
+ _mixerManager.CloseSpeaker();
+ }
- return 0;
+ return 0;
}
-int32_t AudioDeviceMac::StereoPlayout(bool& enabled) const
-{
-
- if (_playChannels == 2)
- enabled = true;
- else
- enabled = false;
+int32_t AudioDeviceMac::SetStereoPlayout(bool enable) {
+ if (enable)
+ _playChannels = 2;
+ else
+ _playChannels = 1;
- return 0;
+ return 0;
}
-int32_t AudioDeviceMac::SetAGC(bool enable)
-{
-
- _AGC = enable;
+int32_t AudioDeviceMac::StereoPlayout(bool& enabled) const {
+ if (_playChannels == 2)
+ enabled = true;
+ else
+ enabled = false;
- return 0;
+ return 0;
}
-bool AudioDeviceMac::AGC() const
-{
+int32_t AudioDeviceMac::SetAGC(bool enable) {
+ _AGC = enable;
- return _AGC;
+ return 0;
}
-int32_t AudioDeviceMac::MicrophoneVolumeIsAvailable(bool& available)
-{
+bool AudioDeviceMac::AGC() const {
+ return _AGC;
+}
- bool wasInitialized = _mixerManager.MicrophoneIsInitialized();
+int32_t AudioDeviceMac::MicrophoneVolumeIsAvailable(bool& available) {
+ bool wasInitialized = _mixerManager.MicrophoneIsInitialized();
- // Make an attempt to open up the
- // input mixer corresponding to the currently selected output device.
- //
- if (!wasInitialized && InitMicrophone() == -1)
- {
- // If we end up here it means that the selected microphone has no volume
- // control.
- available = false;
- return 0;
- }
+ // Make an attempt to open up the
+ // input mixer corresponding to the currently selected output device.
+ //
+ if (!wasInitialized && InitMicrophone() == -1) {
+ // If we end up here it means that the selected microphone has no volume
+ // control.
+ available = false;
+ return 0;
+ }
- // Given that InitMicrophone was successful, we know that a volume control
- // exists
- //
- available = true;
+ // Given that InitMicrophone was successful, we know that a volume control
+ // exists
+ //
+ available = true;
- // Close the initialized input mixer
- //
- if (!wasInitialized)
- {
- _mixerManager.CloseMicrophone();
- }
+ // Close the initialized input mixer
+ //
+ if (!wasInitialized) {
+ _mixerManager.CloseMicrophone();
+ }
- return 0;
+ return 0;
}
-int32_t AudioDeviceMac::SetMicrophoneVolume(uint32_t volume)
-{
-
- return (_mixerManager.SetMicrophoneVolume(volume));
+int32_t AudioDeviceMac::SetMicrophoneVolume(uint32_t volume) {
+ return (_mixerManager.SetMicrophoneVolume(volume));
}
-int32_t AudioDeviceMac::MicrophoneVolume(uint32_t& volume) const
-{
+int32_t AudioDeviceMac::MicrophoneVolume(uint32_t& volume) const {
+ uint32_t level(0);
- uint32_t level(0);
-
- if (_mixerManager.MicrophoneVolume(level) == -1)
- {
- WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
- " failed to retrive current microphone level");
- return -1;
- }
+ if (_mixerManager.MicrophoneVolume(level) == -1) {
+ WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+ " failed to retrive current microphone level");
+ return -1;
+ }
- volume = level;
- return 0;
+ volume = level;
+ return 0;
}
-int32_t
-AudioDeviceMac::MaxMicrophoneVolume(uint32_t& maxVolume) const
-{
-
- uint32_t maxVol(0);
+int32_t AudioDeviceMac::MaxMicrophoneVolume(uint32_t& maxVolume) const {
+ uint32_t maxVol(0);
- if (_mixerManager.MaxMicrophoneVolume(maxVol) == -1)
- {
- return -1;
- }
+ if (_mixerManager.MaxMicrophoneVolume(maxVol) == -1) {
+ return -1;
+ }
- maxVolume = maxVol;
- return 0;
+ maxVolume = maxVol;
+ return 0;
}
-int32_t
-AudioDeviceMac::MinMicrophoneVolume(uint32_t& minVolume) const
-{
-
- uint32_t minVol(0);
+int32_t AudioDeviceMac::MinMicrophoneVolume(uint32_t& minVolume) const {
+ uint32_t minVol(0);
- if (_mixerManager.MinMicrophoneVolume(minVol) == -1)
- {
- return -1;
- }
+ if (_mixerManager.MinMicrophoneVolume(minVol) == -1) {
+ return -1;
+ }
- minVolume = minVol;
- return 0;
+ minVolume = minVol;
+ return 0;
}
-int32_t
-AudioDeviceMac::MicrophoneVolumeStepSize(uint16_t& stepSize) const
-{
-
- uint16_t delta(0);
+int32_t AudioDeviceMac::MicrophoneVolumeStepSize(uint16_t& stepSize) const {
+ uint16_t delta(0);
- if (_mixerManager.MicrophoneVolumeStepSize(delta) == -1)
- {
- return -1;
- }
+ if (_mixerManager.MicrophoneVolumeStepSize(delta) == -1) {
+ return -1;
+ }
- stepSize = delta;
- return 0;
+ stepSize = delta;
+ return 0;
}
-int16_t AudioDeviceMac::PlayoutDevices()
-{
-
- AudioDeviceID playDevices[MaxNumberDevices];
- return GetNumberDevices(kAudioDevicePropertyScopeOutput, playDevices,
- MaxNumberDevices);
+int16_t AudioDeviceMac::PlayoutDevices() {
+ AudioDeviceID playDevices[MaxNumberDevices];
+ return GetNumberDevices(kAudioDevicePropertyScopeOutput, playDevices,
+ MaxNumberDevices);
}
-int32_t AudioDeviceMac::SetPlayoutDevice(uint16_t index)
-{
- CriticalSectionScoped lock(&_critSect);
+int32_t AudioDeviceMac::SetPlayoutDevice(uint16_t index) {
+ CriticalSectionScoped lock(&_critSect);
- if (_playIsInitialized)
- {
- return -1;
- }
+ if (_playIsInitialized) {
+ return -1;
+ }
- AudioDeviceID playDevices[MaxNumberDevices];
- uint32_t nDevices = GetNumberDevices(kAudioDevicePropertyScopeOutput,
- playDevices, MaxNumberDevices);
- WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
- " number of availiable waveform-audio output devices is %u",
- nDevices);
-
- if (index > (nDevices - 1))
- {
- WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
- " device index is out of range [0,%u]", (nDevices - 1));
- return -1;
- }
+ AudioDeviceID playDevices[MaxNumberDevices];
+ uint32_t nDevices = GetNumberDevices(kAudioDevicePropertyScopeOutput,
+ playDevices, MaxNumberDevices);
+ WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+ " number of availiable waveform-audio output devices is %u",
+ nDevices);
- _outputDeviceIndex = index;
- _outputDeviceIsSpecified = true;
+ if (index > (nDevices - 1)) {
+ WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+ " device index is out of range [0,%u]", (nDevices - 1));
+ return -1;
+ }
- return 0;
+ _outputDeviceIndex = index;
+ _outputDeviceIsSpecified = true;
+
+ return 0;
}
int32_t AudioDeviceMac::SetPlayoutDevice(
- AudioDeviceModule::WindowsDeviceType /*device*/)
-{
- WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
- "WindowsDeviceType not supported");
- return -1;
+ AudioDeviceModule::WindowsDeviceType /*device*/) {
+ WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+ "WindowsDeviceType not supported");
+ return -1;
}
-int32_t AudioDeviceMac::PlayoutDeviceName(
- uint16_t index,
- char name[kAdmMaxDeviceNameSize],
- char guid[kAdmMaxGuidSize])
-{
+int32_t AudioDeviceMac::PlayoutDeviceName(uint16_t index,
+ char name[kAdmMaxDeviceNameSize],
+ char guid[kAdmMaxGuidSize]) {
+ const uint16_t nDevices(PlayoutDevices());
- const uint16_t nDevices(PlayoutDevices());
-
- if ((index > (nDevices - 1)) || (name == NULL))
- {
- return -1;
- }
+ if ((index > (nDevices - 1)) || (name == NULL)) {
+ return -1;
+ }
- memset(name, 0, kAdmMaxDeviceNameSize);
+ memset(name, 0, kAdmMaxDeviceNameSize);
- if (guid != NULL)
- {
- memset(guid, 0, kAdmMaxGuidSize);
- }
+ if (guid != NULL) {
+ memset(guid, 0, kAdmMaxGuidSize);
+ }
- return GetDeviceName(kAudioDevicePropertyScopeOutput, index, name);
+ return GetDeviceName(kAudioDevicePropertyScopeOutput, index, name);
}
-int32_t AudioDeviceMac::RecordingDeviceName(
- uint16_t index,
- char name[kAdmMaxDeviceNameSize],
- char guid[kAdmMaxGuidSize])
-{
-
- const uint16_t nDevices(RecordingDevices());
+int32_t AudioDeviceMac::RecordingDeviceName(uint16_t index,
+ char name[kAdmMaxDeviceNameSize],
+ char guid[kAdmMaxGuidSize]) {
+ const uint16_t nDevices(RecordingDevices());
- if ((index > (nDevices - 1)) || (name == NULL))
- {
- return -1;
- }
+ if ((index > (nDevices - 1)) || (name == NULL)) {
+ return -1;
+ }
- memset(name, 0, kAdmMaxDeviceNameSize);
+ memset(name, 0, kAdmMaxDeviceNameSize);
- if (guid != NULL)
- {
- memset(guid, 0, kAdmMaxGuidSize);
- }
+ if (guid != NULL) {
+ memset(guid, 0, kAdmMaxGuidSize);
+ }
- return GetDeviceName(kAudioDevicePropertyScopeInput, index, name);
+ return GetDeviceName(kAudioDevicePropertyScopeInput, index, name);
}
-int16_t AudioDeviceMac::RecordingDevices()
-{
-
- AudioDeviceID recDevices[MaxNumberDevices];
- return GetNumberDevices(kAudioDevicePropertyScopeInput, recDevices,
- MaxNumberDevices);
+int16_t AudioDeviceMac::RecordingDevices() {
+ AudioDeviceID recDevices[MaxNumberDevices];
+ return GetNumberDevices(kAudioDevicePropertyScopeInput, recDevices,
+ MaxNumberDevices);
}
-int32_t AudioDeviceMac::SetRecordingDevice(uint16_t index)
-{
+int32_t AudioDeviceMac::SetRecordingDevice(uint16_t index) {
+ if (_recIsInitialized) {
+ return -1;
+ }
- if (_recIsInitialized)
- {
- return -1;
- }
+ AudioDeviceID recDevices[MaxNumberDevices];
+ uint32_t nDevices = GetNumberDevices(kAudioDevicePropertyScopeInput,
+ recDevices, MaxNumberDevices);
+ WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+ " number of availiable waveform-audio input devices is %u",
+ nDevices);
- AudioDeviceID recDevices[MaxNumberDevices];
- uint32_t nDevices = GetNumberDevices(kAudioDevicePropertyScopeInput,
- recDevices, MaxNumberDevices);
- WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
- " number of availiable waveform-audio input devices is %u",
- nDevices);
-
- if (index > (nDevices - 1))
- {
- WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
- " device index is out of range [0,%u]", (nDevices - 1));
- return -1;
- }
+ if (index > (nDevices - 1)) {
+ WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+ " device index is out of range [0,%u]", (nDevices - 1));
+ return -1;
+ }
- _inputDeviceIndex = index;
- _inputDeviceIsSpecified = true;
+ _inputDeviceIndex = index;
+ _inputDeviceIsSpecified = true;
- return 0;
+ return 0;
}
-
-int32_t
-AudioDeviceMac::SetRecordingDevice(AudioDeviceModule::WindowsDeviceType /*device*/)
-{
- WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
- "WindowsDeviceType not supported");
- return -1;
+int32_t AudioDeviceMac::SetRecordingDevice(
+ AudioDeviceModule::WindowsDeviceType /*device*/) {
+ WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+ "WindowsDeviceType not supported");
+ return -1;
}
-int32_t AudioDeviceMac::PlayoutIsAvailable(bool& available)
-{
-
- available = true;
+int32_t AudioDeviceMac::PlayoutIsAvailable(bool& available) {
+ available = true;
- // Try to initialize the playout side
- if (InitPlayout() == -1)
- {
- available = false;
- }
+ // Try to initialize the playout side
+ if (InitPlayout() == -1) {
+ available = false;
+ }
- // We destroy the IOProc created by InitPlayout() in implDeviceIOProc().
- // We must actually start playout here in order to have the IOProc
- // deleted by calling StopPlayout().
- if (StartPlayout() == -1)
- {
- available = false;
- }
+ // We destroy the IOProc created by InitPlayout() in implDeviceIOProc().
+ // We must actually start playout here in order to have the IOProc
+ // deleted by calling StopPlayout().
+ if (StartPlayout() == -1) {
+ available = false;
+ }
- // Cancel effect of initialization
- if (StopPlayout() == -1)
- {
- available = false;
- }
+ // Cancel effect of initialization
+ if (StopPlayout() == -1) {
+ available = false;
+ }
- return 0;
+ return 0;
}
-int32_t AudioDeviceMac::RecordingIsAvailable(bool& available)
-{
-
- available = true;
+int32_t AudioDeviceMac::RecordingIsAvailable(bool& available) {
+ available = true;
- // Try to initialize the recording side
- if (InitRecording() == -1)
- {
- available = false;
- }
+ // Try to initialize the recording side
+ if (InitRecording() == -1) {
+ available = false;
+ }
- // We destroy the IOProc created by InitRecording() in implInDeviceIOProc().
- // We must actually start recording here in order to have the IOProc
- // deleted by calling StopRecording().
- if (StartRecording() == -1)
- {
- available = false;
- }
+ // We destroy the IOProc created by InitRecording() in implInDeviceIOProc().
+ // We must actually start recording here in order to have the IOProc
+ // deleted by calling StopRecording().
+ if (StartRecording() == -1) {
+ available = false;
+ }
- // Cancel effect of initialization
- if (StopRecording() == -1)
- {
- available = false;
- }
+ // Cancel effect of initialization
+ if (StopRecording() == -1) {
+ available = false;
+ }
- return 0;
+ return 0;
}
-int32_t AudioDeviceMac::InitPlayout()
-{
- CriticalSectionScoped lock(&_critSect);
+int32_t AudioDeviceMac::InitPlayout() {
+ CriticalSectionScoped lock(&_critSect);
- if (_playing)
- {
- return -1;
- }
+ if (_playing) {
+ return -1;
+ }
- if (!_outputDeviceIsSpecified)
- {
- return -1;
- }
+ if (!_outputDeviceIsSpecified) {
+ return -1;
+ }
- if (_playIsInitialized)
- {
- return 0;
- }
+ if (_playIsInitialized) {
+ return 0;
+ }
- // Initialize the speaker (devices might have been added or removed)
- if (InitSpeaker() == -1)
- {
- WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
- " InitSpeaker() failed");
- }
+ // Initialize the speaker (devices might have been added or removed)
+ if (InitSpeaker() == -1) {
+ WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+ " InitSpeaker() failed");
+ }
- if (!MicrophoneIsInitialized())
- {
- // Make this call to check if we are using
- // one or two devices (_twoDevices)
- bool available = false;
- if (MicrophoneIsAvailable(available) == -1)
- {
- WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
- " MicrophoneIsAvailable() failed");
- }
+ if (!MicrophoneIsInitialized()) {
+ // Make this call to check if we are using
+ // one or two devices (_twoDevices)
+ bool available = false;
+ if (MicrophoneIsAvailable(available) == -1) {
+ WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+ " MicrophoneIsAvailable() failed");
}
+ }
- PaUtil_FlushRingBuffer(_paRenderBuffer);
-
- OSStatus err = noErr;
- UInt32 size = 0;
- _renderDelayOffsetSamples = 0;
- _renderDelayUs = 0;
- _renderLatencyUs = 0;
- _renderDeviceIsAlive = 1;
- _doStop = false;
+ PaUtil_FlushRingBuffer(_paRenderBuffer);
+
+ OSStatus err = noErr;
+ UInt32 size = 0;
+ _renderDelayOffsetSamples = 0;
+ _renderDelayUs = 0;
+ _renderLatencyUs = 0;
+ _renderDeviceIsAlive = 1;
+ _doStop = false;
+
+ // The internal microphone of a MacBook Pro is located under the left speaker
+ // grille. When the internal speakers are in use, we want to fully stereo
+ // pan to the right.
+ AudioObjectPropertyAddress propertyAddress = {
+ kAudioDevicePropertyDataSource, kAudioDevicePropertyScopeOutput, 0};
+ if (_macBookPro) {
+ _macBookProPanRight = false;
+ Boolean hasProperty =
+ AudioObjectHasProperty(_outputDeviceID, &propertyAddress);
+ if (hasProperty) {
+ UInt32 dataSource = 0;
+ size = sizeof(dataSource);
+ WEBRTC_CA_LOG_WARN(AudioObjectGetPropertyData(
+ _outputDeviceID, &propertyAddress, 0, NULL, &size, &dataSource));
+
+ if (dataSource == 'ispk') {
+ _macBookProPanRight = true;
+ WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+ "MacBook Pro using internal speakers; stereo"
+ " panning right");
+ } else {
+ WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+ "MacBook Pro not using internal speakers");
+ }
- // The internal microphone of a MacBook Pro is located under the left speaker
- // grille. When the internal speakers are in use, we want to fully stereo
- // pan to the right.
- AudioObjectPropertyAddress
- propertyAddress = { kAudioDevicePropertyDataSource,
- kAudioDevicePropertyScopeOutput, 0 };
- if (_macBookPro)
- {
- _macBookProPanRight = false;
- Boolean hasProperty = AudioObjectHasProperty(_outputDeviceID,
- &propertyAddress);
- if (hasProperty)
- {
- UInt32 dataSource = 0;
- size = sizeof(dataSource);
- WEBRTC_CA_LOG_WARN(AudioObjectGetPropertyData(_outputDeviceID,
- &propertyAddress, 0, NULL, &size, &dataSource));
-
- if (dataSource == 'ispk')
- {
- _macBookProPanRight = true;
- WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice,
- _id,
- "MacBook Pro using internal speakers; stereo"
- " panning right");
- } else
- {
- WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice,
- _id, "MacBook Pro not using internal speakers");
- }
-
- // Add a listener to determine if the status changes.
- WEBRTC_CA_LOG_WARN(AudioObjectAddPropertyListener(_outputDeviceID,
- &propertyAddress, &objectListenerProc, this));
- }
+ // Add a listener to determine if the status changes.
+ WEBRTC_CA_LOG_WARN(AudioObjectAddPropertyListener(
+ _outputDeviceID, &propertyAddress, &objectListenerProc, this));
}
+ }
- // Get current stream description
- propertyAddress.mSelector = kAudioDevicePropertyStreamFormat;
- memset(&_outStreamFormat, 0, sizeof(_outStreamFormat));
- size = sizeof(_outStreamFormat);
- WEBRTC_CA_RETURN_ON_ERR(AudioObjectGetPropertyData(_outputDeviceID,
- &propertyAddress, 0, NULL, &size, &_outStreamFormat));
-
- if (_outStreamFormat.mFormatID != kAudioFormatLinearPCM)
- {
- logCAMsg(kTraceError, kTraceAudioDevice, _id,
- "Unacceptable output stream format -> mFormatID",
- (const char *) &_outStreamFormat.mFormatID);
- return -1;
- }
+ // Get current stream description
+ propertyAddress.mSelector = kAudioDevicePropertyStreamFormat;
+ memset(&_outStreamFormat, 0, sizeof(_outStreamFormat));
+ size = sizeof(_outStreamFormat);
+ WEBRTC_CA_RETURN_ON_ERR(AudioObjectGetPropertyData(
+ _outputDeviceID, &propertyAddress, 0, NULL, &size, &_outStreamFormat));
+
+ if (_outStreamFormat.mFormatID != kAudioFormatLinearPCM) {
+ logCAMsg(kTraceError, kTraceAudioDevice, _id,
+ "Unacceptable output stream format -> mFormatID",
+ (const char*)&_outStreamFormat.mFormatID);
+ return -1;
+ }
- if (_outStreamFormat.mChannelsPerFrame > N_DEVICE_CHANNELS)
- {
- WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
- "Too many channels on output device (mChannelsPerFrame = %d)",
- _outStreamFormat.mChannelsPerFrame);
- return -1;
- }
+ if (_outStreamFormat.mChannelsPerFrame > N_DEVICE_CHANNELS) {
+ WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+ "Too many channels on output device (mChannelsPerFrame = %d)",
+ _outStreamFormat.mChannelsPerFrame);
+ return -1;
+ }
- if (_outStreamFormat.mFormatFlags & kAudioFormatFlagIsNonInterleaved)
- {
- WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
- "Non-interleaved audio data is not supported.",
- "AudioHardware streams should not have this format.");
- return -1;
- }
+ if (_outStreamFormat.mFormatFlags & kAudioFormatFlagIsNonInterleaved) {
+ WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+ "Non-interleaved audio data is not supported.",
+ "AudioHardware streams should not have this format.");
+ return -1;
+ }
+ WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "Ouput stream format:");
+ WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+ "mSampleRate = %f, mChannelsPerFrame = %u",
+ _outStreamFormat.mSampleRate,
+ _outStreamFormat.mChannelsPerFrame);
+ WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+ "mBytesPerPacket = %u, mFramesPerPacket = %u",
+ _outStreamFormat.mBytesPerPacket,
+ _outStreamFormat.mFramesPerPacket);
+ WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+ "mBytesPerFrame = %u, mBitsPerChannel = %u",
+ _outStreamFormat.mBytesPerFrame,
+ _outStreamFormat.mBitsPerChannel);
+ WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "mFormatFlags = %u",
+ _outStreamFormat.mFormatFlags);
+ logCAMsg(kTraceInfo, kTraceAudioDevice, _id, "mFormatID",
+ (const char*)&_outStreamFormat.mFormatID);
+
+ // Our preferred format to work with.
+ if (_outStreamFormat.mChannelsPerFrame < 2) {
+ // Disable stereo playout when we only have one channel on the device.
+ _playChannels = 1;
WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
- "Ouput stream format:");
- WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
- "mSampleRate = %f, mChannelsPerFrame = %u",
- _outStreamFormat.mSampleRate,
- _outStreamFormat.mChannelsPerFrame);
- WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
- "mBytesPerPacket = %u, mFramesPerPacket = %u",
- _outStreamFormat.mBytesPerPacket,
- _outStreamFormat.mFramesPerPacket);
- WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
- "mBytesPerFrame = %u, mBitsPerChannel = %u",
- _outStreamFormat.mBytesPerFrame,
- _outStreamFormat.mBitsPerChannel);
- WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
- "mFormatFlags = %u",
- _outStreamFormat.mFormatFlags);
- logCAMsg(kTraceInfo, kTraceAudioDevice, _id, "mFormatID",
- (const char *) &_outStreamFormat.mFormatID);
-
- // Our preferred format to work with.
- if (_outStreamFormat.mChannelsPerFrame < 2)
- {
- // Disable stereo playout when we only have one channel on the device.
- _playChannels = 1;
- WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
- "Stereo playout unavailable on this device");
- }
- WEBRTC_CA_RETURN_ON_ERR(SetDesiredPlayoutFormat());
+ "Stereo playout unavailable on this device");
+ }
+ WEBRTC_CA_RETURN_ON_ERR(SetDesiredPlayoutFormat());
- // Listen for format changes.
- propertyAddress.mSelector = kAudioDevicePropertyStreamFormat;
- WEBRTC_CA_RETURN_ON_ERR(AudioObjectAddPropertyListener(_outputDeviceID,
- &propertyAddress,
- &objectListenerProc,
- this));
-
- // Listen for processor overloads.
- propertyAddress.mSelector = kAudioDeviceProcessorOverload;
- WEBRTC_CA_LOG_WARN(AudioObjectAddPropertyListener(_outputDeviceID,
- &propertyAddress,
- &objectListenerProc,
- this));
-
- if (_twoDevices || !_recIsInitialized)
- {
- WEBRTC_CA_RETURN_ON_ERR(AudioDeviceCreateIOProcID(_outputDeviceID,
- deviceIOProc,
- this,
- &_deviceIOProcID));
- }
+ // Listen for format changes.
+ propertyAddress.mSelector = kAudioDevicePropertyStreamFormat;
+ WEBRTC_CA_RETURN_ON_ERR(AudioObjectAddPropertyListener(
+ _outputDeviceID, &propertyAddress, &objectListenerProc, this));
- _playIsInitialized = true;
+ // Listen for processor overloads.
+ propertyAddress.mSelector = kAudioDeviceProcessorOverload;
+ WEBRTC_CA_LOG_WARN(AudioObjectAddPropertyListener(
+ _outputDeviceID, &propertyAddress, &objectListenerProc, this));
- return 0;
+ if (_twoDevices || !_recIsInitialized) {
+ WEBRTC_CA_RETURN_ON_ERR(AudioDeviceCreateIOProcID(
+ _outputDeviceID, deviceIOProc, this, &_deviceIOProcID));
+ }
+
+ _playIsInitialized = true;
+
+ return 0;
}
-int32_t AudioDeviceMac::InitRecording()
-{
+int32_t AudioDeviceMac::InitRecording() {
+ CriticalSectionScoped lock(&_critSect);
- CriticalSectionScoped lock(&_critSect);
+ if (_recording) {
+ return -1;
+ }
- if (_recording)
- {
- return -1;
- }
+ if (!_inputDeviceIsSpecified) {
+ return -1;
+ }
- if (!_inputDeviceIsSpecified)
- {
- return -1;
- }
+ if (_recIsInitialized) {
+ return 0;
+ }
- if (_recIsInitialized)
- {
- return 0;
- }
+ // Initialize the microphone (devices might have been added or removed)
+ if (InitMicrophone() == -1) {
+ WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+ " InitMicrophone() failed");
+ }
- // Initialize the microphone (devices might have been added or removed)
- if (InitMicrophone() == -1)
- {
- WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
- " InitMicrophone() failed");
+ if (!SpeakerIsInitialized()) {
+ // Make this call to check if we are using
+ // one or two devices (_twoDevices)
+ bool available = false;
+ if (SpeakerIsAvailable(available) == -1) {
+ WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+ " SpeakerIsAvailable() failed");
}
+ }
- if (!SpeakerIsInitialized())
- {
- // Make this call to check if we are using
- // one or two devices (_twoDevices)
- bool available = false;
- if (SpeakerIsAvailable(available) == -1)
- {
- WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
- " SpeakerIsAvailable() failed");
- }
- }
+ OSStatus err = noErr;
+ UInt32 size = 0;
- OSStatus err = noErr;
- UInt32 size = 0;
-
- PaUtil_FlushRingBuffer(_paCaptureBuffer);
-
- _captureDelayUs = 0;
- _captureLatencyUs = 0;
- _captureDeviceIsAlive = 1;
- _doStopRec = false;
-
- // Get current stream description
- AudioObjectPropertyAddress
- propertyAddress = { kAudioDevicePropertyStreamFormat,
- kAudioDevicePropertyScopeInput, 0 };
- memset(&_inStreamFormat, 0, sizeof(_inStreamFormat));
- size = sizeof(_inStreamFormat);
- WEBRTC_CA_RETURN_ON_ERR(AudioObjectGetPropertyData(_inputDeviceID,
- &propertyAddress, 0, NULL, &size, &_inStreamFormat));
-
- if (_inStreamFormat.mFormatID != kAudioFormatLinearPCM)
- {
- logCAMsg(kTraceError, kTraceAudioDevice, _id,
- "Unacceptable input stream format -> mFormatID",
- (const char *) &_inStreamFormat.mFormatID);
- return -1;
- }
+ PaUtil_FlushRingBuffer(_paCaptureBuffer);
- if (_inStreamFormat.mChannelsPerFrame > N_DEVICE_CHANNELS)
- {
- WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
- "Too many channels on input device (mChannelsPerFrame = %d)",
- _inStreamFormat.mChannelsPerFrame);
- return -1;
- }
+ _captureDelayUs = 0;
+ _captureLatencyUs = 0;
+ _captureDeviceIsAlive = 1;
+ _doStopRec = false;
- const int io_block_size_samples = _inStreamFormat.mChannelsPerFrame *
- _inStreamFormat.mSampleRate / 100 * N_BLOCKS_IO;
- if (io_block_size_samples > _captureBufSizeSamples)
- {
- WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
- "Input IO block size (%d) is larger than ring buffer (%u)",
- io_block_size_samples, _captureBufSizeSamples);
- return -1;
- }
+ // Get current stream description
+ AudioObjectPropertyAddress propertyAddress = {
+ kAudioDevicePropertyStreamFormat, kAudioDevicePropertyScopeInput, 0};
+ memset(&_inStreamFormat, 0, sizeof(_inStreamFormat));
+ size = sizeof(_inStreamFormat);
+ WEBRTC_CA_RETURN_ON_ERR(AudioObjectGetPropertyData(
+ _inputDeviceID, &propertyAddress, 0, NULL, &size, &_inStreamFormat));
- WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
- " Input stream format:");
- WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
- " mSampleRate = %f, mChannelsPerFrame = %u",
- _inStreamFormat.mSampleRate, _inStreamFormat.mChannelsPerFrame);
- WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
- " mBytesPerPacket = %u, mFramesPerPacket = %u",
- _inStreamFormat.mBytesPerPacket,
- _inStreamFormat.mFramesPerPacket);
- WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
- " mBytesPerFrame = %u, mBitsPerChannel = %u",
- _inStreamFormat.mBytesPerFrame,
- _inStreamFormat.mBitsPerChannel);
- WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
- " mFormatFlags = %u",
- _inStreamFormat.mFormatFlags);
- logCAMsg(kTraceInfo, kTraceAudioDevice, _id, "mFormatID",
- (const char *) &_inStreamFormat.mFormatID);
+ if (_inStreamFormat.mFormatID != kAudioFormatLinearPCM) {
+ logCAMsg(kTraceError, kTraceAudioDevice, _id,
+ "Unacceptable input stream format -> mFormatID",
+ (const char*)&_inStreamFormat.mFormatID);
+ return -1;
+ }
- // Our preferred format to work with
- if (_inStreamFormat.mChannelsPerFrame >= 2 && (_recChannels == 2))
- {
- _inDesiredFormat.mChannelsPerFrame = 2;
- } else
- {
- // Disable stereo recording when we only have one channel on the device.
- _inDesiredFormat.mChannelsPerFrame = 1;
- _recChannels = 1;
- WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
- "Stereo recording unavailable on this device");
- }
+ if (_inStreamFormat.mChannelsPerFrame > N_DEVICE_CHANNELS) {
+ WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+ "Too many channels on input device (mChannelsPerFrame = %d)",
+ _inStreamFormat.mChannelsPerFrame);
+ return -1;
+ }
- if (_ptrAudioBuffer)
- {
- // Update audio buffer with the selected parameters
- _ptrAudioBuffer->SetRecordingSampleRate(N_REC_SAMPLES_PER_SEC);
- _ptrAudioBuffer->SetRecordingChannels((uint8_t) _recChannels);
- }
+ const int io_block_size_samples = _inStreamFormat.mChannelsPerFrame *
+ _inStreamFormat.mSampleRate / 100 *
+ N_BLOCKS_IO;
+ if (io_block_size_samples > _captureBufSizeSamples) {
+ WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+ "Input IO block size (%d) is larger than ring buffer (%u)",
+ io_block_size_samples, _captureBufSizeSamples);
+ return -1;
+ }
+
+ WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, " Input stream format:");
+ WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+ " mSampleRate = %f, mChannelsPerFrame = %u",
+ _inStreamFormat.mSampleRate, _inStreamFormat.mChannelsPerFrame);
+ WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+ " mBytesPerPacket = %u, mFramesPerPacket = %u",
+ _inStreamFormat.mBytesPerPacket,
+ _inStreamFormat.mFramesPerPacket);
+ WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+ " mBytesPerFrame = %u, mBitsPerChannel = %u",
+ _inStreamFormat.mBytesPerFrame, _inStreamFormat.mBitsPerChannel);
+ WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, " mFormatFlags = %u",
+ _inStreamFormat.mFormatFlags);
+ logCAMsg(kTraceInfo, kTraceAudioDevice, _id, "mFormatID",
+ (const char*)&_inStreamFormat.mFormatID);
+
+ // Our preferred format to work with
+ if (_inStreamFormat.mChannelsPerFrame >= 2 && (_recChannels == 2)) {
+ _inDesiredFormat.mChannelsPerFrame = 2;
+ } else {
+ // Disable stereo recording when we only have one channel on the device.
+ _inDesiredFormat.mChannelsPerFrame = 1;
+ _recChannels = 1;
+ WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+ "Stereo recording unavailable on this device");
+ }
+
+ if (_ptrAudioBuffer) {
+ // Update audio buffer with the selected parameters
+ _ptrAudioBuffer->SetRecordingSampleRate(N_REC_SAMPLES_PER_SEC);
+ _ptrAudioBuffer->SetRecordingChannels((uint8_t)_recChannels);
+ }
- _inDesiredFormat.mSampleRate = N_REC_SAMPLES_PER_SEC;
- _inDesiredFormat.mBytesPerPacket = _inDesiredFormat.mChannelsPerFrame
- * sizeof(SInt16);
- _inDesiredFormat.mFramesPerPacket = 1;
- _inDesiredFormat.mBytesPerFrame = _inDesiredFormat.mChannelsPerFrame
- * sizeof(SInt16);
- _inDesiredFormat.mBitsPerChannel = sizeof(SInt16) * 8;
+ _inDesiredFormat.mSampleRate = N_REC_SAMPLES_PER_SEC;
+ _inDesiredFormat.mBytesPerPacket =
+ _inDesiredFormat.mChannelsPerFrame * sizeof(SInt16);
+ _inDesiredFormat.mFramesPerPacket = 1;
+ _inDesiredFormat.mBytesPerFrame =
+ _inDesiredFormat.mChannelsPerFrame * sizeof(SInt16);
+ _inDesiredFormat.mBitsPerChannel = sizeof(SInt16) * 8;
- _inDesiredFormat.mFormatFlags = kLinearPCMFormatFlagIsSignedInteger
- | kLinearPCMFormatFlagIsPacked;
+ _inDesiredFormat.mFormatFlags =
+ kLinearPCMFormatFlagIsSignedInteger | kLinearPCMFormatFlagIsPacked;
#ifdef WEBRTC_ARCH_BIG_ENDIAN
- _inDesiredFormat.mFormatFlags |= kLinearPCMFormatFlagIsBigEndian;
+ _inDesiredFormat.mFormatFlags |= kLinearPCMFormatFlagIsBigEndian;
#endif
- _inDesiredFormat.mFormatID = kAudioFormatLinearPCM;
-
- WEBRTC_CA_RETURN_ON_ERR(AudioConverterNew(&_inStreamFormat, &_inDesiredFormat,
- &_captureConverter));
-
- // First try to set buffer size to desired value (10 ms * N_BLOCKS_IO)
- // TODO(xians): investigate this block.
- UInt32 bufByteCount = (UInt32)((_inStreamFormat.mSampleRate / 1000.0)
- * 10.0 * N_BLOCKS_IO * _inStreamFormat.mChannelsPerFrame
- * sizeof(Float32));
- if (_inStreamFormat.mFramesPerPacket != 0)
- {
- if (bufByteCount % _inStreamFormat.mFramesPerPacket != 0)
- {
- bufByteCount = ((UInt32)(bufByteCount
- / _inStreamFormat.mFramesPerPacket) + 1)
- * _inStreamFormat.mFramesPerPacket;
- }
+ _inDesiredFormat.mFormatID = kAudioFormatLinearPCM;
+
+ WEBRTC_CA_RETURN_ON_ERR(AudioConverterNew(&_inStreamFormat, &_inDesiredFormat,
+ &_captureConverter));
+
+ // First try to set buffer size to desired value (10 ms * N_BLOCKS_IO)
+ // TODO(xians): investigate this block.
+ UInt32 bufByteCount =
+ (UInt32)((_inStreamFormat.mSampleRate / 1000.0) * 10.0 * N_BLOCKS_IO *
+ _inStreamFormat.mChannelsPerFrame * sizeof(Float32));
+ if (_inStreamFormat.mFramesPerPacket != 0) {
+ if (bufByteCount % _inStreamFormat.mFramesPerPacket != 0) {
+ bufByteCount =
+ ((UInt32)(bufByteCount / _inStreamFormat.mFramesPerPacket) + 1) *
+ _inStreamFormat.mFramesPerPacket;
}
+ }
- // Ensure the buffer size is within the acceptable range provided by the device.
- propertyAddress.mSelector = kAudioDevicePropertyBufferSizeRange;
- AudioValueRange range;
- size = sizeof(range);
- WEBRTC_CA_RETURN_ON_ERR(AudioObjectGetPropertyData(_inputDeviceID,
- &propertyAddress, 0, NULL, &size, &range));
- if (range.mMinimum > bufByteCount)
- {
- bufByteCount = range.mMinimum;
- } else if (range.mMaximum < bufByteCount)
- {
- bufByteCount = range.mMaximum;
- }
+ // Ensure the buffer size is within the acceptable range provided by the
+ // device.
+ propertyAddress.mSelector = kAudioDevicePropertyBufferSizeRange;
+ AudioValueRange range;
+ size = sizeof(range);
+ WEBRTC_CA_RETURN_ON_ERR(AudioObjectGetPropertyData(
+ _inputDeviceID, &propertyAddress, 0, NULL, &size, &range));
+ if (range.mMinimum > bufByteCount) {
+ bufByteCount = range.mMinimum;
+ } else if (range.mMaximum < bufByteCount) {
+ bufByteCount = range.mMaximum;
+ }
- propertyAddress.mSelector = kAudioDevicePropertyBufferSize;
- size = sizeof(bufByteCount);
- WEBRTC_CA_RETURN_ON_ERR(AudioObjectSetPropertyData(_inputDeviceID,
- &propertyAddress, 0, NULL, size, &bufByteCount));
-
- // Get capture device latency
- propertyAddress.mSelector = kAudioDevicePropertyLatency;
- UInt32 latency = 0;
- size = sizeof(UInt32);
- WEBRTC_CA_RETURN_ON_ERR(AudioObjectGetPropertyData(_inputDeviceID,
- &propertyAddress, 0, NULL, &size, &latency));
- _captureLatencyUs = (UInt32)((1.0e6 * latency)
- / _inStreamFormat.mSampleRate);
-
- // Get capture stream latency
- propertyAddress.mSelector = kAudioDevicePropertyStreams;
- AudioStreamID stream = 0;
- size = sizeof(AudioStreamID);
- WEBRTC_CA_RETURN_ON_ERR(AudioObjectGetPropertyData(_inputDeviceID,
- &propertyAddress, 0, NULL, &size, &stream));
- propertyAddress.mSelector = kAudioStreamPropertyLatency;
- size = sizeof(UInt32);
- latency = 0;
- WEBRTC_CA_RETURN_ON_ERR(AudioObjectGetPropertyData(_inputDeviceID,
- &propertyAddress, 0, NULL, &size, &latency));
- _captureLatencyUs += (UInt32)((1.0e6 * latency)
- / _inStreamFormat.mSampleRate);
-
- // Listen for format changes
- // TODO(xians): should we be using kAudioDevicePropertyDeviceHasChanged?
- propertyAddress.mSelector = kAudioDevicePropertyStreamFormat;
- WEBRTC_CA_RETURN_ON_ERR(AudioObjectAddPropertyListener(_inputDeviceID,
- &propertyAddress, &objectListenerProc, this));
-
- // Listen for processor overloads
- propertyAddress.mSelector = kAudioDeviceProcessorOverload;
- WEBRTC_CA_LOG_WARN(AudioObjectAddPropertyListener(_inputDeviceID,
- &propertyAddress, &objectListenerProc, this));
-
- if (_twoDevices)
- {
- WEBRTC_CA_RETURN_ON_ERR(AudioDeviceCreateIOProcID(_inputDeviceID,
- inDeviceIOProc, this, &_inDeviceIOProcID));
- } else if (!_playIsInitialized)
- {
- WEBRTC_CA_RETURN_ON_ERR(AudioDeviceCreateIOProcID(_inputDeviceID,
- deviceIOProc, this, &_deviceIOProcID));
- }
+ propertyAddress.mSelector = kAudioDevicePropertyBufferSize;
+ size = sizeof(bufByteCount);
+ WEBRTC_CA_RETURN_ON_ERR(AudioObjectSetPropertyData(
+ _inputDeviceID, &propertyAddress, 0, NULL, size, &bufByteCount));
+
+ // Get capture device latency
+ propertyAddress.mSelector = kAudioDevicePropertyLatency;
+ UInt32 latency = 0;
+ size = sizeof(UInt32);
+ WEBRTC_CA_RETURN_ON_ERR(AudioObjectGetPropertyData(
+ _inputDeviceID, &propertyAddress, 0, NULL, &size, &latency));
+ _captureLatencyUs = (UInt32)((1.0e6 * latency) / _inStreamFormat.mSampleRate);
+
+ // Get capture stream latency
+ propertyAddress.mSelector = kAudioDevicePropertyStreams;
+ AudioStreamID stream = 0;
+ size = sizeof(AudioStreamID);
+ WEBRTC_CA_RETURN_ON_ERR(AudioObjectGetPropertyData(
+ _inputDeviceID, &propertyAddress, 0, NULL, &size, &stream));
+ propertyAddress.mSelector = kAudioStreamPropertyLatency;
+ size = sizeof(UInt32);
+ latency = 0;
+ WEBRTC_CA_RETURN_ON_ERR(AudioObjectGetPropertyData(
+ _inputDeviceID, &propertyAddress, 0, NULL, &size, &latency));
+ _captureLatencyUs +=
+ (UInt32)((1.0e6 * latency) / _inStreamFormat.mSampleRate);
+
+ // Listen for format changes
+ // TODO(xians): should we be using kAudioDevicePropertyDeviceHasChanged?
+ propertyAddress.mSelector = kAudioDevicePropertyStreamFormat;
+ WEBRTC_CA_RETURN_ON_ERR(AudioObjectAddPropertyListener(
+ _inputDeviceID, &propertyAddress, &objectListenerProc, this));
+
+ // Listen for processor overloads
+ propertyAddress.mSelector = kAudioDeviceProcessorOverload;
+ WEBRTC_CA_LOG_WARN(AudioObjectAddPropertyListener(
+ _inputDeviceID, &propertyAddress, &objectListenerProc, this));
+
+ if (_twoDevices) {
+ WEBRTC_CA_RETURN_ON_ERR(AudioDeviceCreateIOProcID(
+ _inputDeviceID, inDeviceIOProc, this, &_inDeviceIOProcID));
+ } else if (!_playIsInitialized) {
+ WEBRTC_CA_RETURN_ON_ERR(AudioDeviceCreateIOProcID(
+ _inputDeviceID, deviceIOProc, this, &_deviceIOProcID));
+ }
- // Mark recording side as initialized
- _recIsInitialized = true;
+ // Mark recording side as initialized
+ _recIsInitialized = true;
- return 0;
+ return 0;
}
-int32_t AudioDeviceMac::StartRecording()
-{
-
- CriticalSectionScoped lock(&_critSect);
+int32_t AudioDeviceMac::StartRecording() {
+ CriticalSectionScoped lock(&_critSect);
- if (!_recIsInitialized)
- {
- return -1;
- }
-
- if (_recording)
- {
- return 0;
- }
+ if (!_recIsInitialized) {
+ return -1;
+ }
- if (!_initialized)
- {
- WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
- " Recording worker thread has not been started");
- return -1;
- }
+ if (_recording) {
+ return 0;
+ }
- RTC_DCHECK(!capture_worker_thread_.get());
- capture_worker_thread_ =
- ThreadWrapper::CreateThread(RunCapture, this, "CaptureWorkerThread");
- RTC_DCHECK(capture_worker_thread_.get());
- capture_worker_thread_->Start();
- capture_worker_thread_->SetPriority(kRealtimePriority);
+ if (!_initialized) {
+ WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+ " Recording worker thread has not been started");
+ return -1;
+ }
- OSStatus err = noErr;
- if (_twoDevices)
- {
- WEBRTC_CA_RETURN_ON_ERR(AudioDeviceStart(_inputDeviceID, _inDeviceIOProcID));
- } else if (!_playing)
- {
- WEBRTC_CA_RETURN_ON_ERR(AudioDeviceStart(_inputDeviceID, _deviceIOProcID));
- }
+ RTC_DCHECK(!capture_worker_thread_.get());
+ capture_worker_thread_.reset(
+ new rtc::PlatformThread(RunCapture, this, "CaptureWorkerThread"));
+ RTC_DCHECK(capture_worker_thread_.get());
+ capture_worker_thread_->Start();
+ capture_worker_thread_->SetPriority(rtc::kRealtimePriority);
+
+ OSStatus err = noErr;
+ if (_twoDevices) {
+ WEBRTC_CA_RETURN_ON_ERR(
+ AudioDeviceStart(_inputDeviceID, _inDeviceIOProcID));
+ } else if (!_playing) {
+ WEBRTC_CA_RETURN_ON_ERR(AudioDeviceStart(_inputDeviceID, _deviceIOProcID));
+ }
- _recording = true;
+ _recording = true;
- return 0;
+ return 0;
}
-int32_t AudioDeviceMac::StopRecording()
-{
-
- CriticalSectionScoped lock(&_critSect);
+int32_t AudioDeviceMac::StopRecording() {
+ CriticalSectionScoped lock(&_critSect);
- if (!_recIsInitialized)
- {
- return 0;
- }
-
- OSStatus err = noErr;
+ if (!_recIsInitialized) {
+ return 0;
+ }
- // Stop device
- int32_t captureDeviceIsAlive = AtomicGet32(&_captureDeviceIsAlive);
- if (_twoDevices)
- {
- if (_recording && captureDeviceIsAlive == 1)
- {
- _recording = false;
- _doStopRec = true; // Signal to io proc to stop audio device
- _critSect.Leave(); // Cannot be under lock, risk of deadlock
- if (kEventTimeout == _stopEventRec.Wait(2000))
- {
- CriticalSectionScoped critScoped(&_critSect);
- WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
- " Timed out stopping the capture IOProc. "
- "We may have failed to detect a device removal.");
-
- WEBRTC_CA_LOG_WARN(AudioDeviceStop(_inputDeviceID,
- _inDeviceIOProcID));
- WEBRTC_CA_LOG_WARN(
- AudioDeviceDestroyIOProcID(_inputDeviceID,
- _inDeviceIOProcID));
- }
- _critSect.Enter();
- _doStopRec = false;
- WEBRTC_TRACE(kTraceDebug, kTraceAudioDevice, _id,
- " Recording stopped");
- }
- }
- else
- {
- // We signal a stop for a shared device even when rendering has
- // not yet ended. This is to ensure the IOProc will return early as
- // intended (by checking |_recording|) before accessing
- // resources we free below (e.g. the capture converter).
- //
- // In the case of a shared devcie, the IOProc will verify
- // rendering has ended before stopping itself.
- if (_recording && captureDeviceIsAlive == 1)
- {
- _recording = false;
- _doStop = true; // Signal to io proc to stop audio device
- _critSect.Leave(); // Cannot be under lock, risk of deadlock
- if (kEventTimeout == _stopEvent.Wait(2000))
- {
- CriticalSectionScoped critScoped(&_critSect);
- WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
- " Timed out stopping the shared IOProc. "
- "We may have failed to detect a device removal.");
-
- // We assume rendering on a shared device has stopped as well if
- // the IOProc times out.
- WEBRTC_CA_LOG_WARN(AudioDeviceStop(_outputDeviceID,
- _deviceIOProcID));
- WEBRTC_CA_LOG_WARN(AudioDeviceDestroyIOProcID(_outputDeviceID,
- _deviceIOProcID));
- }
- _critSect.Enter();
- _doStop = false;
- WEBRTC_TRACE(kTraceDebug, kTraceAudioDevice, _id,
- " Recording stopped (shared)");
- }
+ OSStatus err = noErr;
+
+ // Stop device
+ int32_t captureDeviceIsAlive = AtomicGet32(&_captureDeviceIsAlive);
+ if (_twoDevices) {
+ if (_recording && captureDeviceIsAlive == 1) {
+ _recording = false;
+ _doStopRec = true; // Signal to io proc to stop audio device
+ _critSect.Leave(); // Cannot be under lock, risk of deadlock
+ if (kEventTimeout == _stopEventRec.Wait(2000)) {
+ CriticalSectionScoped critScoped(&_critSect);
+ WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+ " Timed out stopping the capture IOProc. "
+ "We may have failed to detect a device removal.");
+
+ WEBRTC_CA_LOG_WARN(AudioDeviceStop(_inputDeviceID, _inDeviceIOProcID));
+ WEBRTC_CA_LOG_WARN(
+ AudioDeviceDestroyIOProcID(_inputDeviceID, _inDeviceIOProcID));
+ }
+ _critSect.Enter();
+ _doStopRec = false;
+ WEBRTC_TRACE(kTraceDebug, kTraceAudioDevice, _id, " Recording stopped");
+ }
+ } else {
+ // We signal a stop for a shared device even when rendering has
+ // not yet ended. This is to ensure the IOProc will return early as
+ // intended (by checking |_recording|) before accessing
+ // resources we free below (e.g. the capture converter).
+ //
+ // In the case of a shared devcie, the IOProc will verify
+ // rendering has ended before stopping itself.
+ if (_recording && captureDeviceIsAlive == 1) {
+ _recording = false;
+ _doStop = true; // Signal to io proc to stop audio device
+ _critSect.Leave(); // Cannot be under lock, risk of deadlock
+ if (kEventTimeout == _stopEvent.Wait(2000)) {
+ CriticalSectionScoped critScoped(&_critSect);
+ WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+ " Timed out stopping the shared IOProc. "
+ "We may have failed to detect a device removal.");
+
+ // We assume rendering on a shared device has stopped as well if
+ // the IOProc times out.
+ WEBRTC_CA_LOG_WARN(AudioDeviceStop(_outputDeviceID, _deviceIOProcID));
+ WEBRTC_CA_LOG_WARN(
+ AudioDeviceDestroyIOProcID(_outputDeviceID, _deviceIOProcID));
+ }
+ _critSect.Enter();
+ _doStop = false;
+ WEBRTC_TRACE(kTraceDebug, kTraceAudioDevice, _id,
+ " Recording stopped (shared)");
}
+ }
- // Setting this signal will allow the worker thread to be stopped.
- AtomicSet32(&_captureDeviceIsAlive, 0);
+ // Setting this signal will allow the worker thread to be stopped.
+ AtomicSet32(&_captureDeviceIsAlive, 0);
- if (capture_worker_thread_.get()) {
- _critSect.Leave();
- capture_worker_thread_->Stop();
- capture_worker_thread_.reset();
- _critSect.Enter();
- }
+ if (capture_worker_thread_.get()) {
+ _critSect.Leave();
+ capture_worker_thread_->Stop();
+ capture_worker_thread_.reset();
+ _critSect.Enter();
+ }
- WEBRTC_CA_LOG_WARN(AudioConverterDispose(_captureConverter));
+ WEBRTC_CA_LOG_WARN(AudioConverterDispose(_captureConverter));
- // Remove listeners.
- AudioObjectPropertyAddress
- propertyAddress = { kAudioDevicePropertyStreamFormat,
- kAudioDevicePropertyScopeInput, 0 };
- WEBRTC_CA_LOG_WARN(AudioObjectRemovePropertyListener(_inputDeviceID,
- &propertyAddress, &objectListenerProc, this));
+ // Remove listeners.
+ AudioObjectPropertyAddress propertyAddress = {
+ kAudioDevicePropertyStreamFormat, kAudioDevicePropertyScopeInput, 0};
+ WEBRTC_CA_LOG_WARN(AudioObjectRemovePropertyListener(
+ _inputDeviceID, &propertyAddress, &objectListenerProc, this));
- propertyAddress.mSelector = kAudioDeviceProcessorOverload;
- WEBRTC_CA_LOG_WARN(AudioObjectRemovePropertyListener(_inputDeviceID,
- &propertyAddress, &objectListenerProc, this));
+ propertyAddress.mSelector = kAudioDeviceProcessorOverload;
+ WEBRTC_CA_LOG_WARN(AudioObjectRemovePropertyListener(
+ _inputDeviceID, &propertyAddress, &objectListenerProc, this));
- _recIsInitialized = false;
- _recording = false;
+ _recIsInitialized = false;
+ _recording = false;
- return 0;
+ return 0;
}
-bool AudioDeviceMac::RecordingIsInitialized() const
-{
- return (_recIsInitialized);
+bool AudioDeviceMac::RecordingIsInitialized() const {
+ return (_recIsInitialized);
}
-bool AudioDeviceMac::Recording() const
-{
- return (_recording);
+bool AudioDeviceMac::Recording() const {
+ return (_recording);
}
-bool AudioDeviceMac::PlayoutIsInitialized() const
-{
- return (_playIsInitialized);
+bool AudioDeviceMac::PlayoutIsInitialized() const {
+ return (_playIsInitialized);
}
-int32_t AudioDeviceMac::StartPlayout()
-{
+int32_t AudioDeviceMac::StartPlayout() {
+ CriticalSectionScoped lock(&_critSect);
- CriticalSectionScoped lock(&_critSect);
-
- if (!_playIsInitialized)
- {
- return -1;
- }
+ if (!_playIsInitialized) {
+ return -1;
+ }
- if (_playing)
- {
- return 0;
- }
+ if (_playing) {
+ return 0;
+ }
- RTC_DCHECK(!render_worker_thread_.get());
- render_worker_thread_ =
- ThreadWrapper::CreateThread(RunRender, this, "RenderWorkerThread");
- render_worker_thread_->Start();
- render_worker_thread_->SetPriority(kRealtimePriority);
+ RTC_DCHECK(!render_worker_thread_.get());
+ render_worker_thread_.reset(
+ new rtc::PlatformThread(RunRender, this, "RenderWorkerThread"));
+ render_worker_thread_->Start();
+ render_worker_thread_->SetPriority(rtc::kRealtimePriority);
- if (_twoDevices || !_recording)
- {
- OSStatus err = noErr;
- WEBRTC_CA_RETURN_ON_ERR(AudioDeviceStart(_outputDeviceID, _deviceIOProcID));
- }
- _playing = true;
+ if (_twoDevices || !_recording) {
+ OSStatus err = noErr;
+ WEBRTC_CA_RETURN_ON_ERR(AudioDeviceStart(_outputDeviceID, _deviceIOProcID));
+ }
+ _playing = true;
- return 0;
+ return 0;
}
-int32_t AudioDeviceMac::StopPlayout()
-{
+int32_t AudioDeviceMac::StopPlayout() {
+ CriticalSectionScoped lock(&_critSect);
- CriticalSectionScoped lock(&_critSect);
+ if (!_playIsInitialized) {
+ return 0;
+ }
- if (!_playIsInitialized)
- {
- return 0;
+ OSStatus err = noErr;
+
+ int32_t renderDeviceIsAlive = AtomicGet32(&_renderDeviceIsAlive);
+ if (_playing && renderDeviceIsAlive == 1) {
+ // We signal a stop for a shared device even when capturing has not
+ // yet ended. This is to ensure the IOProc will return early as
+ // intended (by checking |_playing|) before accessing resources we
+ // free below (e.g. the render converter).
+ //
+ // In the case of a shared device, the IOProc will verify capturing
+ // has ended before stopping itself.
+ _playing = false;
+ _doStop = true; // Signal to io proc to stop audio device
+ _critSect.Leave(); // Cannot be under lock, risk of deadlock
+ if (kEventTimeout == _stopEvent.Wait(2000)) {
+ CriticalSectionScoped critScoped(&_critSect);
+ WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+ " Timed out stopping the render IOProc. "
+ "We may have failed to detect a device removal.");
+
+ // We assume capturing on a shared device has stopped as well if the
+ // IOProc times out.
+ WEBRTC_CA_LOG_WARN(AudioDeviceStop(_outputDeviceID, _deviceIOProcID));
+ WEBRTC_CA_LOG_WARN(
+ AudioDeviceDestroyIOProcID(_outputDeviceID, _deviceIOProcID));
}
+ _critSect.Enter();
+ _doStop = false;
+ WEBRTC_TRACE(kTraceDebug, kTraceAudioDevice, _id, "Playout stopped");
+ }
- OSStatus err = noErr;
+ // Setting this signal will allow the worker thread to be stopped.
+ AtomicSet32(&_renderDeviceIsAlive, 0);
+ if (render_worker_thread_.get()) {
+ _critSect.Leave();
+ render_worker_thread_->Stop();
+ render_worker_thread_.reset();
+ _critSect.Enter();
+ }
- int32_t renderDeviceIsAlive = AtomicGet32(&_renderDeviceIsAlive);
- if (_playing && renderDeviceIsAlive == 1)
- {
- // We signal a stop for a shared device even when capturing has not
- // yet ended. This is to ensure the IOProc will return early as
- // intended (by checking |_playing|) before accessing resources we
- // free below (e.g. the render converter).
- //
- // In the case of a shared device, the IOProc will verify capturing
- // has ended before stopping itself.
- _playing = false;
- _doStop = true; // Signal to io proc to stop audio device
- _critSect.Leave(); // Cannot be under lock, risk of deadlock
- if (kEventTimeout == _stopEvent.Wait(2000))
- {
- CriticalSectionScoped critScoped(&_critSect);
- WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
- " Timed out stopping the render IOProc. "
- "We may have failed to detect a device removal.");
-
- // We assume capturing on a shared device has stopped as well if the
- // IOProc times out.
- WEBRTC_CA_LOG_WARN(AudioDeviceStop(_outputDeviceID,
- _deviceIOProcID));
- WEBRTC_CA_LOG_WARN(AudioDeviceDestroyIOProcID(_outputDeviceID,
- _deviceIOProcID));
- }
- _critSect.Enter();
- _doStop = false;
- WEBRTC_TRACE(kTraceDebug, kTraceAudioDevice, _id,
- "Playout stopped");
- }
+ WEBRTC_CA_LOG_WARN(AudioConverterDispose(_renderConverter));
- // Setting this signal will allow the worker thread to be stopped.
- AtomicSet32(&_renderDeviceIsAlive, 0);
- if (render_worker_thread_.get()) {
- _critSect.Leave();
- render_worker_thread_->Stop();
- render_worker_thread_.reset();
- _critSect.Enter();
- }
+ // Remove listeners.
+ AudioObjectPropertyAddress propertyAddress = {
+ kAudioDevicePropertyStreamFormat, kAudioDevicePropertyScopeOutput, 0};
+ WEBRTC_CA_LOG_WARN(AudioObjectRemovePropertyListener(
+ _outputDeviceID, &propertyAddress, &objectListenerProc, this));
- WEBRTC_CA_LOG_WARN(AudioConverterDispose(_renderConverter));
+ propertyAddress.mSelector = kAudioDeviceProcessorOverload;
+ WEBRTC_CA_LOG_WARN(AudioObjectRemovePropertyListener(
+ _outputDeviceID, &propertyAddress, &objectListenerProc, this));
- // Remove listeners.
- AudioObjectPropertyAddress propertyAddress = {
- kAudioDevicePropertyStreamFormat, kAudioDevicePropertyScopeOutput,
- 0 };
- WEBRTC_CA_LOG_WARN(AudioObjectRemovePropertyListener(_outputDeviceID,
- &propertyAddress, &objectListenerProc, this));
-
- propertyAddress.mSelector = kAudioDeviceProcessorOverload;
- WEBRTC_CA_LOG_WARN(AudioObjectRemovePropertyListener(_outputDeviceID,
- &propertyAddress, &objectListenerProc, this));
-
- if (_macBookPro)
- {
- Boolean hasProperty = AudioObjectHasProperty(_outputDeviceID,
- &propertyAddress);
- if (hasProperty)
- {
- propertyAddress.mSelector = kAudioDevicePropertyDataSource;
- WEBRTC_CA_LOG_WARN(AudioObjectRemovePropertyListener(_outputDeviceID,
- &propertyAddress, &objectListenerProc, this));
- }
+ if (_macBookPro) {
+ Boolean hasProperty =
+ AudioObjectHasProperty(_outputDeviceID, &propertyAddress);
+ if (hasProperty) {
+ propertyAddress.mSelector = kAudioDevicePropertyDataSource;
+ WEBRTC_CA_LOG_WARN(AudioObjectRemovePropertyListener(
+ _outputDeviceID, &propertyAddress, &objectListenerProc, this));
}
+ }
- _playIsInitialized = false;
- _playing = false;
+ _playIsInitialized = false;
+ _playing = false;
- return 0;
+ return 0;
}
-int32_t AudioDeviceMac::PlayoutDelay(uint16_t& delayMS) const
-{
- int32_t renderDelayUs = AtomicGet32(&_renderDelayUs);
- delayMS = static_cast<uint16_t> (1e-3 * (renderDelayUs + _renderLatencyUs) +
- 0.5);
- return 0;
+int32_t AudioDeviceMac::PlayoutDelay(uint16_t& delayMS) const {
+ int32_t renderDelayUs = AtomicGet32(&_renderDelayUs);
+ delayMS =
+ static_cast<uint16_t>(1e-3 * (renderDelayUs + _renderLatencyUs) + 0.5);
+ return 0;
}
-int32_t AudioDeviceMac::RecordingDelay(uint16_t& delayMS) const
-{
- int32_t captureDelayUs = AtomicGet32(&_captureDelayUs);
- delayMS = static_cast<uint16_t> (1e-3 * (captureDelayUs +
- _captureLatencyUs) + 0.5);
- return 0;
+int32_t AudioDeviceMac::RecordingDelay(uint16_t& delayMS) const {
+ int32_t captureDelayUs = AtomicGet32(&_captureDelayUs);
+ delayMS =
+ static_cast<uint16_t>(1e-3 * (captureDelayUs + _captureLatencyUs) + 0.5);
+ return 0;
}
-bool AudioDeviceMac::Playing() const
-{
- return (_playing);
+bool AudioDeviceMac::Playing() const {
+ return (_playing);
}
int32_t AudioDeviceMac::SetPlayoutBuffer(
const AudioDeviceModule::BufferType type,
- uint16_t sizeMS)
-{
-
- if (type != AudioDeviceModule::kFixedBufferSize)
- {
- WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
- " Adaptive buffer size not supported on this platform");
- return -1;
- }
+ uint16_t sizeMS) {
+ if (type != AudioDeviceModule::kFixedBufferSize) {
+ WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+ " Adaptive buffer size not supported on this platform");
+ return -1;
+ }
- _playBufType = type;
- _playBufDelayFixed = sizeMS;
- return 0;
+ _playBufType = type;
+ _playBufDelayFixed = sizeMS;
+ return 0;
}
-int32_t AudioDeviceMac::PlayoutBuffer(
- AudioDeviceModule::BufferType& type,
- uint16_t& sizeMS) const
-{
-
- type = _playBufType;
- sizeMS = _playBufDelayFixed;
+int32_t AudioDeviceMac::PlayoutBuffer(AudioDeviceModule::BufferType& type,
+ uint16_t& sizeMS) const {
+ type = _playBufType;
+ sizeMS = _playBufDelayFixed;
- return 0;
+ return 0;
}
// Not implemented for Mac.
-int32_t AudioDeviceMac::CPULoad(uint16_t& /*load*/) const
-{
-
- WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
- " API call not supported on this platform");
+int32_t AudioDeviceMac::CPULoad(uint16_t& /*load*/) const {
+ WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+ " API call not supported on this platform");
- return -1;
+ return -1;
}
-bool AudioDeviceMac::PlayoutWarning() const
-{
- return (_playWarning > 0);
+bool AudioDeviceMac::PlayoutWarning() const {
+ return (_playWarning > 0);
}
-bool AudioDeviceMac::PlayoutError() const
-{
- return (_playError > 0);
+bool AudioDeviceMac::PlayoutError() const {
+ return (_playError > 0);
}
-bool AudioDeviceMac::RecordingWarning() const
-{
- return (_recWarning > 0);
+bool AudioDeviceMac::RecordingWarning() const {
+ return (_recWarning > 0);
}
-bool AudioDeviceMac::RecordingError() const
-{
- return (_recError > 0);
+bool AudioDeviceMac::RecordingError() const {
+ return (_recError > 0);
}
-void AudioDeviceMac::ClearPlayoutWarning()
-{
- _playWarning = 0;
+void AudioDeviceMac::ClearPlayoutWarning() {
+ _playWarning = 0;
}
-void AudioDeviceMac::ClearPlayoutError()
-{
- _playError = 0;
+void AudioDeviceMac::ClearPlayoutError() {
+ _playError = 0;
}
-void AudioDeviceMac::ClearRecordingWarning()
-{
- _recWarning = 0;
+void AudioDeviceMac::ClearRecordingWarning() {
+ _recWarning = 0;
}
-void AudioDeviceMac::ClearRecordingError()
-{
- _recError = 0;
+void AudioDeviceMac::ClearRecordingError() {
+ _recError = 0;
}
// ============================================================================
// Private Methods
// ============================================================================
-int32_t
-AudioDeviceMac::GetNumberDevices(const AudioObjectPropertyScope scope,
- AudioDeviceID scopedDeviceIds[],
- const uint32_t deviceListLength)
-{
- OSStatus err = noErr;
-
- AudioObjectPropertyAddress propertyAddress = {
- kAudioHardwarePropertyDevices, kAudioObjectPropertyScopeGlobal,
- kAudioObjectPropertyElementMaster };
- UInt32 size = 0;
- WEBRTC_CA_RETURN_ON_ERR(AudioObjectGetPropertyDataSize(kAudioObjectSystemObject,
- &propertyAddress, 0, NULL, &size));
- if (size == 0)
- {
- WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
- "No devices");
- return 0;
- }
-
- AudioDeviceID* deviceIds = (AudioDeviceID*) malloc(size);
- UInt32 numberDevices = size / sizeof(AudioDeviceID);
- AudioBufferList* bufferList = NULL;
- UInt32 numberScopedDevices = 0;
+int32_t AudioDeviceMac::GetNumberDevices(const AudioObjectPropertyScope scope,
+ AudioDeviceID scopedDeviceIds[],
+ const uint32_t deviceListLength) {
+ OSStatus err = noErr;
+
+ AudioObjectPropertyAddress propertyAddress = {
+ kAudioHardwarePropertyDevices, kAudioObjectPropertyScopeGlobal,
+ kAudioObjectPropertyElementMaster};
+ UInt32 size = 0;
+ WEBRTC_CA_RETURN_ON_ERR(AudioObjectGetPropertyDataSize(
+ kAudioObjectSystemObject, &propertyAddress, 0, NULL, &size));
+ if (size == 0) {
+ WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, "No devices");
+ return 0;
+ }
- // First check if there is a default device and list it
- UInt32 hardwareProperty = 0;
- if (scope == kAudioDevicePropertyScopeOutput)
- {
- hardwareProperty = kAudioHardwarePropertyDefaultOutputDevice;
- } else
- {
- hardwareProperty = kAudioHardwarePropertyDefaultInputDevice;
- }
+ AudioDeviceID* deviceIds = (AudioDeviceID*)malloc(size);
+ UInt32 numberDevices = size / sizeof(AudioDeviceID);
+ AudioBufferList* bufferList = NULL;
+ UInt32 numberScopedDevices = 0;
+
+ // First check if there is a default device and list it
+ UInt32 hardwareProperty = 0;
+ if (scope == kAudioDevicePropertyScopeOutput) {
+ hardwareProperty = kAudioHardwarePropertyDefaultOutputDevice;
+ } else {
+ hardwareProperty = kAudioHardwarePropertyDefaultInputDevice;
+ }
- AudioObjectPropertyAddress
- propertyAddressDefault = { hardwareProperty,
- kAudioObjectPropertyScopeGlobal,
- kAudioObjectPropertyElementMaster };
-
- AudioDeviceID usedID;
- UInt32 uintSize = sizeof(UInt32);
- WEBRTC_CA_RETURN_ON_ERR(AudioObjectGetPropertyData(kAudioObjectSystemObject,
- &propertyAddressDefault, 0, NULL, &uintSize, &usedID));
- if (usedID != kAudioDeviceUnknown)
- {
- scopedDeviceIds[numberScopedDevices] = usedID;
- numberScopedDevices++;
- } else
- {
- WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
- "GetNumberDevices(): Default device unknown");
- }
+ AudioObjectPropertyAddress propertyAddressDefault = {
+ hardwareProperty, kAudioObjectPropertyScopeGlobal,
+ kAudioObjectPropertyElementMaster};
+
+ AudioDeviceID usedID;
+ UInt32 uintSize = sizeof(UInt32);
+ WEBRTC_CA_RETURN_ON_ERR(AudioObjectGetPropertyData(kAudioObjectSystemObject,
+ &propertyAddressDefault, 0,
+ NULL, &uintSize, &usedID));
+ if (usedID != kAudioDeviceUnknown) {
+ scopedDeviceIds[numberScopedDevices] = usedID;
+ numberScopedDevices++;
+ } else {
+ WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+ "GetNumberDevices(): Default device unknown");
+ }
- // Then list the rest of the devices
- bool listOK = true;
+ // Then list the rest of the devices
+ bool listOK = true;
- WEBRTC_CA_LOG_ERR(AudioObjectGetPropertyData(kAudioObjectSystemObject,
- &propertyAddress, 0, NULL, &size, deviceIds));
- if (err != noErr)
- {
+ WEBRTC_CA_LOG_ERR(AudioObjectGetPropertyData(
+ kAudioObjectSystemObject, &propertyAddress, 0, NULL, &size, deviceIds));
+ if (err != noErr) {
+ listOK = false;
+ } else {
+ propertyAddress.mSelector = kAudioDevicePropertyStreamConfiguration;
+ propertyAddress.mScope = scope;
+ propertyAddress.mElement = 0;
+ for (UInt32 i = 0; i < numberDevices; i++) {
+ // Check for input channels
+ WEBRTC_CA_LOG_ERR(AudioObjectGetPropertyDataSize(
+ deviceIds[i], &propertyAddress, 0, NULL, &size));
+ if (err == kAudioHardwareBadDeviceError) {
+ // This device doesn't actually exist; continue iterating.
+ continue;
+ } else if (err != noErr) {
listOK = false;
- } else
- {
- propertyAddress.mSelector = kAudioDevicePropertyStreamConfiguration;
- propertyAddress.mScope = scope;
- propertyAddress.mElement = 0;
- for (UInt32 i = 0; i < numberDevices; i++)
- {
- // Check for input channels
- WEBRTC_CA_LOG_ERR(AudioObjectGetPropertyDataSize(deviceIds[i],
- &propertyAddress, 0, NULL, &size));
- if (err == kAudioHardwareBadDeviceError)
- {
- // This device doesn't actually exist; continue iterating.
- continue;
- } else if (err != noErr)
- {
- listOK = false;
- break;
- }
-
- bufferList = (AudioBufferList*) malloc(size);
- WEBRTC_CA_LOG_ERR(AudioObjectGetPropertyData(deviceIds[i],
- &propertyAddress, 0, NULL, &size, bufferList));
- if (err != noErr)
- {
- listOK = false;
- break;
- }
-
- if (bufferList->mNumberBuffers > 0)
- {
- if (numberScopedDevices >= deviceListLength)
- {
- WEBRTC_TRACE(kTraceError,
- kTraceAudioDevice, _id,
- "Device list is not long enough");
- listOK = false;
- break;
- }
-
- scopedDeviceIds[numberScopedDevices] = deviceIds[i];
- numberScopedDevices++;
- }
-
- free(bufferList);
- bufferList = NULL;
- } // for
- }
+ break;
+ }
- if (!listOK)
- {
- if (deviceIds)
- {
- free(deviceIds);
- deviceIds = NULL;
+ bufferList = (AudioBufferList*)malloc(size);
+ WEBRTC_CA_LOG_ERR(AudioObjectGetPropertyData(
+ deviceIds[i], &propertyAddress, 0, NULL, &size, bufferList));
+ if (err != noErr) {
+ listOK = false;
+ break;
+ }
+
+ if (bufferList->mNumberBuffers > 0) {
+ if (numberScopedDevices >= deviceListLength) {
+ WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+ "Device list is not long enough");
+ listOK = false;
+ break;
}
- if (bufferList)
- {
- free(bufferList);
- bufferList = NULL;
- }
+ scopedDeviceIds[numberScopedDevices] = deviceIds[i];
+ numberScopedDevices++;
+ }
- return -1;
+ free(bufferList);
+ bufferList = NULL;
+ } // for
+ }
+
+ if (!listOK) {
+ if (deviceIds) {
+ free(deviceIds);
+ deviceIds = NULL;
}
- // Happy ending
- if (deviceIds)
- {
- free(deviceIds);
- deviceIds = NULL;
+ if (bufferList) {
+ free(bufferList);
+ bufferList = NULL;
}
- return numberScopedDevices;
+ return -1;
+ }
+
+ // Happy ending
+ if (deviceIds) {
+ free(deviceIds);
+ deviceIds = NULL;
+ }
+
+ return numberScopedDevices;
}
-int32_t
-AudioDeviceMac::GetDeviceName(const AudioObjectPropertyScope scope,
- const uint16_t index,
- char* name)
-{
- OSStatus err = noErr;
- UInt32 len = kAdmMaxDeviceNameSize;
- AudioDeviceID deviceIds[MaxNumberDevices];
-
- int numberDevices = GetNumberDevices(scope, deviceIds, MaxNumberDevices);
- if (numberDevices < 0)
- {
- return -1;
- } else if (numberDevices == 0)
- {
- WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
- "No devices");
- return -1;
- }
+int32_t AudioDeviceMac::GetDeviceName(const AudioObjectPropertyScope scope,
+ const uint16_t index,
+ char* name) {
+ OSStatus err = noErr;
+ UInt32 len = kAdmMaxDeviceNameSize;
+ AudioDeviceID deviceIds[MaxNumberDevices];
- // If the number is below the number of devices, assume it's "WEBRTC ID"
- // otherwise assume it's a CoreAudio ID
- AudioDeviceID usedID;
-
- // Check if there is a default device
- bool isDefaultDevice = false;
- if (index == 0)
- {
- UInt32 hardwareProperty = 0;
- if (scope == kAudioDevicePropertyScopeOutput)
- {
- hardwareProperty = kAudioHardwarePropertyDefaultOutputDevice;
- } else
- {
- hardwareProperty = kAudioHardwarePropertyDefaultInputDevice;
- }
- AudioObjectPropertyAddress propertyAddress = { hardwareProperty,
- kAudioObjectPropertyScopeGlobal,
- kAudioObjectPropertyElementMaster };
- UInt32 size = sizeof(UInt32);
- WEBRTC_CA_RETURN_ON_ERR(AudioObjectGetPropertyData(kAudioObjectSystemObject,
- &propertyAddress, 0, NULL, &size, &usedID));
- if (usedID == kAudioDeviceUnknown)
- {
- WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
- "GetDeviceName(): Default device unknown");
- } else
- {
- isDefaultDevice = true;
- }
- }
+ int numberDevices = GetNumberDevices(scope, deviceIds, MaxNumberDevices);
+ if (numberDevices < 0) {
+ return -1;
+ } else if (numberDevices == 0) {
+ WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, "No devices");
+ return -1;
+ }
+
+ // If the number is below the number of devices, assume it's "WEBRTC ID"
+ // otherwise assume it's a CoreAudio ID
+ AudioDeviceID usedID;
+ // Check if there is a default device
+ bool isDefaultDevice = false;
+ if (index == 0) {
+ UInt32 hardwareProperty = 0;
+ if (scope == kAudioDevicePropertyScopeOutput) {
+ hardwareProperty = kAudioHardwarePropertyDefaultOutputDevice;
+ } else {
+ hardwareProperty = kAudioHardwarePropertyDefaultInputDevice;
+ }
AudioObjectPropertyAddress propertyAddress = {
- kAudioDevicePropertyDeviceName, scope, 0 };
-
- if (isDefaultDevice)
- {
- char devName[len];
-
- WEBRTC_CA_RETURN_ON_ERR(AudioObjectGetPropertyData(usedID,
- &propertyAddress, 0, NULL, &len, devName));
-
- sprintf(name, "default (%s)", devName);
- } else
- {
- if (index < numberDevices)
- {
- usedID = deviceIds[index];
- } else
- {
- usedID = index;
- }
+ hardwareProperty, kAudioObjectPropertyScopeGlobal,
+ kAudioObjectPropertyElementMaster};
+ UInt32 size = sizeof(UInt32);
+ WEBRTC_CA_RETURN_ON_ERR(AudioObjectGetPropertyData(
+ kAudioObjectSystemObject, &propertyAddress, 0, NULL, &size, &usedID));
+ if (usedID == kAudioDeviceUnknown) {
+ WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+ "GetDeviceName(): Default device unknown");
+ } else {
+ isDefaultDevice = true;
+ }
+ }
- WEBRTC_CA_RETURN_ON_ERR(AudioObjectGetPropertyData(usedID,
- &propertyAddress, 0, NULL, &len, name));
+ AudioObjectPropertyAddress propertyAddress = {kAudioDevicePropertyDeviceName,
+ scope, 0};
+
+ if (isDefaultDevice) {
+ char devName[len];
+
+ WEBRTC_CA_RETURN_ON_ERR(AudioObjectGetPropertyData(usedID, &propertyAddress,
+ 0, NULL, &len, devName));
+
+ sprintf(name, "default (%s)", devName);
+ } else {
+ if (index < numberDevices) {
+ usedID = deviceIds[index];
+ } else {
+ usedID = index;
}
- return 0;
+ WEBRTC_CA_RETURN_ON_ERR(AudioObjectGetPropertyData(usedID, &propertyAddress,
+ 0, NULL, &len, name));
+ }
+
+ return 0;
}
int32_t AudioDeviceMac::InitDevice(const uint16_t userDeviceIndex,
AudioDeviceID& deviceId,
- const bool isInput)
-{
- OSStatus err = noErr;
- UInt32 size = 0;
- AudioObjectPropertyScope deviceScope;
- AudioObjectPropertySelector defaultDeviceSelector;
- AudioDeviceID deviceIds[MaxNumberDevices];
-
- if (isInput)
- {
- deviceScope = kAudioDevicePropertyScopeInput;
- defaultDeviceSelector = kAudioHardwarePropertyDefaultInputDevice;
- } else
- {
- deviceScope = kAudioDevicePropertyScopeOutput;
- defaultDeviceSelector = kAudioHardwarePropertyDefaultOutputDevice;
- }
+ const bool isInput) {
+ OSStatus err = noErr;
+ UInt32 size = 0;
+ AudioObjectPropertyScope deviceScope;
+ AudioObjectPropertySelector defaultDeviceSelector;
+ AudioDeviceID deviceIds[MaxNumberDevices];
+
+ if (isInput) {
+ deviceScope = kAudioDevicePropertyScopeInput;
+ defaultDeviceSelector = kAudioHardwarePropertyDefaultInputDevice;
+ } else {
+ deviceScope = kAudioDevicePropertyScopeOutput;
+ defaultDeviceSelector = kAudioHardwarePropertyDefaultOutputDevice;
+ }
- AudioObjectPropertyAddress
- propertyAddress = { defaultDeviceSelector,
- kAudioObjectPropertyScopeGlobal,
- kAudioObjectPropertyElementMaster };
-
- // Get the actual device IDs
- int numberDevices = GetNumberDevices(deviceScope, deviceIds,
- MaxNumberDevices);
- if (numberDevices < 0)
- {
- return -1;
- } else if (numberDevices == 0)
- {
- WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
- "InitDevice(): No devices");
- return -1;
- }
+ AudioObjectPropertyAddress propertyAddress = {
+ defaultDeviceSelector, kAudioObjectPropertyScopeGlobal,
+ kAudioObjectPropertyElementMaster};
- bool isDefaultDevice = false;
- deviceId = kAudioDeviceUnknown;
- if (userDeviceIndex == 0)
- {
- // Try to use default system device
- size = sizeof(AudioDeviceID);
- WEBRTC_CA_RETURN_ON_ERR(AudioObjectGetPropertyData(kAudioObjectSystemObject,
- &propertyAddress, 0, NULL, &size, &deviceId));
- if (deviceId == kAudioDeviceUnknown)
- {
- WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
- " No default device exists");
- } else
- {
- isDefaultDevice = true;
- }
- }
+ // Get the actual device IDs
+ int numberDevices =
+ GetNumberDevices(deviceScope, deviceIds, MaxNumberDevices);
+ if (numberDevices < 0) {
+ return -1;
+ } else if (numberDevices == 0) {
+ WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+ "InitDevice(): No devices");
+ return -1;
+ }
- if (!isDefaultDevice)
- {
- deviceId = deviceIds[userDeviceIndex];
+ bool isDefaultDevice = false;
+ deviceId = kAudioDeviceUnknown;
+ if (userDeviceIndex == 0) {
+ // Try to use default system device
+ size = sizeof(AudioDeviceID);
+ WEBRTC_CA_RETURN_ON_ERR(AudioObjectGetPropertyData(
+ kAudioObjectSystemObject, &propertyAddress, 0, NULL, &size, &deviceId));
+ if (deviceId == kAudioDeviceUnknown) {
+ WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+ " No default device exists");
+ } else {
+ isDefaultDevice = true;
}
+ }
- // Obtain device name and manufacturer for logging.
- // Also use this as a test to ensure a user-set device ID is valid.
- char devName[128];
- char devManf[128];
- memset(devName, 0, sizeof(devName));
- memset(devManf, 0, sizeof(devManf));
-
- propertyAddress.mSelector = kAudioDevicePropertyDeviceName;
- propertyAddress.mScope = deviceScope;
- propertyAddress.mElement = 0;
- size = sizeof(devName);
- WEBRTC_CA_RETURN_ON_ERR(AudioObjectGetPropertyData(deviceId,
- &propertyAddress, 0, NULL, &size, devName));
-
- propertyAddress.mSelector = kAudioDevicePropertyDeviceManufacturer;
- size = sizeof(devManf);
- WEBRTC_CA_RETURN_ON_ERR(AudioObjectGetPropertyData(deviceId,
- &propertyAddress, 0, NULL, &size, devManf));
+ if (!isDefaultDevice) {
+ deviceId = deviceIds[userDeviceIndex];
+ }
- if (isInput)
- {
- WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
- " Input device: %s %s", devManf, devName);
- } else
- {
- WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
- " Output device: %s %s", devManf, devName);
- }
+ // Obtain device name and manufacturer for logging.
+ // Also use this as a test to ensure a user-set device ID is valid.
+ char devName[128];
+ char devManf[128];
+ memset(devName, 0, sizeof(devName));
+ memset(devManf, 0, sizeof(devManf));
+
+ propertyAddress.mSelector = kAudioDevicePropertyDeviceName;
+ propertyAddress.mScope = deviceScope;
+ propertyAddress.mElement = 0;
+ size = sizeof(devName);
+ WEBRTC_CA_RETURN_ON_ERR(AudioObjectGetPropertyData(deviceId, &propertyAddress,
+ 0, NULL, &size, devName));
+
+ propertyAddress.mSelector = kAudioDevicePropertyDeviceManufacturer;
+ size = sizeof(devManf);
+ WEBRTC_CA_RETURN_ON_ERR(AudioObjectGetPropertyData(deviceId, &propertyAddress,
+ 0, NULL, &size, devManf));
+
+ if (isInput) {
+ WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, " Input device: %s %s",
+ devManf, devName);
+ } else {
+ WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, " Output device: %s %s",
+ devManf, devName);
+ }
- return 0;
+ return 0;
}
-OSStatus AudioDeviceMac::SetDesiredPlayoutFormat()
-{
- // Our preferred format to work with.
- _outDesiredFormat.mSampleRate = N_PLAY_SAMPLES_PER_SEC;
- _outDesiredFormat.mChannelsPerFrame = _playChannels;
+OSStatus AudioDeviceMac::SetDesiredPlayoutFormat() {
+ // Our preferred format to work with.
+ _outDesiredFormat.mSampleRate = N_PLAY_SAMPLES_PER_SEC;
+ _outDesiredFormat.mChannelsPerFrame = _playChannels;
- if (_ptrAudioBuffer)
- {
- // Update audio buffer with the selected parameters.
- _ptrAudioBuffer->SetPlayoutSampleRate(N_PLAY_SAMPLES_PER_SEC);
- _ptrAudioBuffer->SetPlayoutChannels((uint8_t) _playChannels);
- }
+ if (_ptrAudioBuffer) {
+ // Update audio buffer with the selected parameters.
+ _ptrAudioBuffer->SetPlayoutSampleRate(N_PLAY_SAMPLES_PER_SEC);
+ _ptrAudioBuffer->SetPlayoutChannels((uint8_t)_playChannels);
+ }
- _renderDelayOffsetSamples = _renderBufSizeSamples - N_BUFFERS_OUT *
- ENGINE_PLAY_BUF_SIZE_IN_SAMPLES * _outDesiredFormat.mChannelsPerFrame;
+ _renderDelayOffsetSamples = _renderBufSizeSamples -
+ N_BUFFERS_OUT * ENGINE_PLAY_BUF_SIZE_IN_SAMPLES *
+ _outDesiredFormat.mChannelsPerFrame;
- _outDesiredFormat.mBytesPerPacket = _outDesiredFormat.mChannelsPerFrame *
- sizeof(SInt16);
- // In uncompressed audio, a packet is one frame.
- _outDesiredFormat.mFramesPerPacket = 1;
- _outDesiredFormat.mBytesPerFrame = _outDesiredFormat.mChannelsPerFrame *
- sizeof(SInt16);
- _outDesiredFormat.mBitsPerChannel = sizeof(SInt16) * 8;
+ _outDesiredFormat.mBytesPerPacket =
+ _outDesiredFormat.mChannelsPerFrame * sizeof(SInt16);
+ // In uncompressed audio, a packet is one frame.
+ _outDesiredFormat.mFramesPerPacket = 1;
+ _outDesiredFormat.mBytesPerFrame =
+ _outDesiredFormat.mChannelsPerFrame * sizeof(SInt16);
+ _outDesiredFormat.mBitsPerChannel = sizeof(SInt16) * 8;
- _outDesiredFormat.mFormatFlags = kLinearPCMFormatFlagIsSignedInteger |
- kLinearPCMFormatFlagIsPacked;
+ _outDesiredFormat.mFormatFlags =
+ kLinearPCMFormatFlagIsSignedInteger | kLinearPCMFormatFlagIsPacked;
#ifdef WEBRTC_ARCH_BIG_ENDIAN
- _outDesiredFormat.mFormatFlags |= kLinearPCMFormatFlagIsBigEndian;
+ _outDesiredFormat.mFormatFlags |= kLinearPCMFormatFlagIsBigEndian;
#endif
- _outDesiredFormat.mFormatID = kAudioFormatLinearPCM;
-
- OSStatus err = noErr;
- WEBRTC_CA_RETURN_ON_ERR(AudioConverterNew(&_outDesiredFormat,
- &_outStreamFormat,
- &_renderConverter));
-
- // Try to set buffer size to desired value (_playBufDelayFixed).
- UInt32 bufByteCount = static_cast<UInt32> ((_outStreamFormat.mSampleRate /
- 1000.0) *
- _playBufDelayFixed *
- _outStreamFormat.mChannelsPerFrame *
- sizeof(Float32));
- if (_outStreamFormat.mFramesPerPacket != 0)
- {
- if (bufByteCount % _outStreamFormat.mFramesPerPacket != 0)
- {
- bufByteCount = (static_cast<UInt32> (bufByteCount /
- _outStreamFormat.mFramesPerPacket) + 1) *
- _outStreamFormat.mFramesPerPacket;
- }
- }
-
- // Ensure the buffer size is within the range provided by the device.
- AudioObjectPropertyAddress propertyAddress =
- {kAudioDevicePropertyDataSource,
- kAudioDevicePropertyScopeOutput,
- 0};
- propertyAddress.mSelector = kAudioDevicePropertyBufferSizeRange;
- AudioValueRange range;
- UInt32 size = sizeof(range);
- WEBRTC_CA_RETURN_ON_ERR(AudioObjectGetPropertyData(_outputDeviceID,
- &propertyAddress,
- 0,
- NULL,
- &size,
- &range));
- if (range.mMinimum > bufByteCount)
- {
- bufByteCount = range.mMinimum;
- } else if (range.mMaximum < bufByteCount)
- {
- bufByteCount = range.mMaximum;
+ _outDesiredFormat.mFormatID = kAudioFormatLinearPCM;
+
+ OSStatus err = noErr;
+ WEBRTC_CA_RETURN_ON_ERR(AudioConverterNew(
+ &_outDesiredFormat, &_outStreamFormat, &_renderConverter));
+
+ // Try to set buffer size to desired value (_playBufDelayFixed).
+ UInt32 bufByteCount = static_cast<UInt32>(
+ (_outStreamFormat.mSampleRate / 1000.0) * _playBufDelayFixed *
+ _outStreamFormat.mChannelsPerFrame * sizeof(Float32));
+ if (_outStreamFormat.mFramesPerPacket != 0) {
+ if (bufByteCount % _outStreamFormat.mFramesPerPacket != 0) {
+ bufByteCount = (static_cast<UInt32>(bufByteCount /
+ _outStreamFormat.mFramesPerPacket) +
+ 1) *
+ _outStreamFormat.mFramesPerPacket;
}
+ }
- propertyAddress.mSelector = kAudioDevicePropertyBufferSize;
- size = sizeof(bufByteCount);
- WEBRTC_CA_RETURN_ON_ERR(AudioObjectSetPropertyData(_outputDeviceID,
- &propertyAddress,
- 0,
- NULL,
- size,
- &bufByteCount));
-
- // Get render device latency.
- propertyAddress.mSelector = kAudioDevicePropertyLatency;
- UInt32 latency = 0;
- size = sizeof(UInt32);
- WEBRTC_CA_RETURN_ON_ERR(AudioObjectGetPropertyData(_outputDeviceID,
- &propertyAddress,
- 0,
- NULL,
- &size,
- &latency));
- _renderLatencyUs = static_cast<uint32_t> ((1.0e6 * latency) /
- _outStreamFormat.mSampleRate);
-
- // Get render stream latency.
- propertyAddress.mSelector = kAudioDevicePropertyStreams;
- AudioStreamID stream = 0;
- size = sizeof(AudioStreamID);
- WEBRTC_CA_RETURN_ON_ERR(AudioObjectGetPropertyData(_outputDeviceID,
- &propertyAddress,
- 0,
- NULL,
- &size,
- &stream));
- propertyAddress.mSelector = kAudioStreamPropertyLatency;
- size = sizeof(UInt32);
- latency = 0;
- WEBRTC_CA_RETURN_ON_ERR(AudioObjectGetPropertyData(_outputDeviceID,
- &propertyAddress,
- 0,
- NULL,
- &size,
- &latency));
- _renderLatencyUs += static_cast<uint32_t> ((1.0e6 * latency) /
- _outStreamFormat.mSampleRate);
+ // Ensure the buffer size is within the range provided by the device.
+ AudioObjectPropertyAddress propertyAddress = {
+ kAudioDevicePropertyDataSource, kAudioDevicePropertyScopeOutput, 0};
+ propertyAddress.mSelector = kAudioDevicePropertyBufferSizeRange;
+ AudioValueRange range;
+ UInt32 size = sizeof(range);
+ WEBRTC_CA_RETURN_ON_ERR(AudioObjectGetPropertyData(
+ _outputDeviceID, &propertyAddress, 0, NULL, &size, &range));
+ if (range.mMinimum > bufByteCount) {
+ bufByteCount = range.mMinimum;
+ } else if (range.mMaximum < bufByteCount) {
+ bufByteCount = range.mMaximum;
+ }
- WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
- " initial playout status: _renderDelayOffsetSamples=%d,"
- " _renderDelayUs=%d, _renderLatencyUs=%d",
- _renderDelayOffsetSamples, _renderDelayUs, _renderLatencyUs);
- return 0;
+ propertyAddress.mSelector = kAudioDevicePropertyBufferSize;
+ size = sizeof(bufByteCount);
+ WEBRTC_CA_RETURN_ON_ERR(AudioObjectSetPropertyData(
+ _outputDeviceID, &propertyAddress, 0, NULL, size, &bufByteCount));
+
+ // Get render device latency.
+ propertyAddress.mSelector = kAudioDevicePropertyLatency;
+ UInt32 latency = 0;
+ size = sizeof(UInt32);
+ WEBRTC_CA_RETURN_ON_ERR(AudioObjectGetPropertyData(
+ _outputDeviceID, &propertyAddress, 0, NULL, &size, &latency));
+ _renderLatencyUs =
+ static_cast<uint32_t>((1.0e6 * latency) / _outStreamFormat.mSampleRate);
+
+ // Get render stream latency.
+ propertyAddress.mSelector = kAudioDevicePropertyStreams;
+ AudioStreamID stream = 0;
+ size = sizeof(AudioStreamID);
+ WEBRTC_CA_RETURN_ON_ERR(AudioObjectGetPropertyData(
+ _outputDeviceID, &propertyAddress, 0, NULL, &size, &stream));
+ propertyAddress.mSelector = kAudioStreamPropertyLatency;
+ size = sizeof(UInt32);
+ latency = 0;
+ WEBRTC_CA_RETURN_ON_ERR(AudioObjectGetPropertyData(
+ _outputDeviceID, &propertyAddress, 0, NULL, &size, &latency));
+ _renderLatencyUs +=
+ static_cast<uint32_t>((1.0e6 * latency) / _outStreamFormat.mSampleRate);
+
+ WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+ " initial playout status: _renderDelayOffsetSamples=%d,"
+ " _renderDelayUs=%d, _renderLatencyUs=%d",
+ _renderDelayOffsetSamples, _renderDelayUs, _renderLatencyUs);
+ return 0;
}
OSStatus AudioDeviceMac::objectListenerProc(
AudioObjectID objectId,
UInt32 numberAddresses,
const AudioObjectPropertyAddress addresses[],
- void* clientData)
-{
- AudioDeviceMac *ptrThis = (AudioDeviceMac *) clientData;
- RTC_DCHECK(ptrThis != NULL);
+ void* clientData) {
+ AudioDeviceMac* ptrThis = (AudioDeviceMac*)clientData;
+ RTC_DCHECK(ptrThis != NULL);
- ptrThis->implObjectListenerProc(objectId, numberAddresses, addresses);
+ ptrThis->implObjectListenerProc(objectId, numberAddresses, addresses);
- // AudioObjectPropertyListenerProc functions are supposed to return 0
- return 0;
+ // AudioObjectPropertyListenerProc functions are supposed to return 0
+ return 0;
}
OSStatus AudioDeviceMac::implObjectListenerProc(
const AudioObjectID objectId,
const UInt32 numberAddresses,
- const AudioObjectPropertyAddress addresses[])
-{
- WEBRTC_TRACE(kTraceDebug, kTraceAudioDevice, _id,
- "AudioDeviceMac::implObjectListenerProc()");
-
- for (UInt32 i = 0; i < numberAddresses; i++)
- {
- if (addresses[i].mSelector == kAudioHardwarePropertyDevices)
- {
- HandleDeviceChange();
- } else if (addresses[i].mSelector == kAudioDevicePropertyStreamFormat)
- {
- HandleStreamFormatChange(objectId, addresses[i]);
- } else if (addresses[i].mSelector == kAudioDevicePropertyDataSource)
- {
- HandleDataSourceChange(objectId, addresses[i]);
- } else if (addresses[i].mSelector == kAudioDeviceProcessorOverload)
- {
- HandleProcessorOverload(addresses[i]);
- }
+ const AudioObjectPropertyAddress addresses[]) {
+ WEBRTC_TRACE(kTraceDebug, kTraceAudioDevice, _id,
+ "AudioDeviceMac::implObjectListenerProc()");
+
+ for (UInt32 i = 0; i < numberAddresses; i++) {
+ if (addresses[i].mSelector == kAudioHardwarePropertyDevices) {
+ HandleDeviceChange();
+ } else if (addresses[i].mSelector == kAudioDevicePropertyStreamFormat) {
+ HandleStreamFormatChange(objectId, addresses[i]);
+ } else if (addresses[i].mSelector == kAudioDevicePropertyDataSource) {
+ HandleDataSourceChange(objectId, addresses[i]);
+ } else if (addresses[i].mSelector == kAudioDeviceProcessorOverload) {
+ HandleProcessorOverload(addresses[i]);
}
+ }
- return 0;
+ return 0;
}
-int32_t AudioDeviceMac::HandleDeviceChange()
-{
- OSStatus err = noErr;
+int32_t AudioDeviceMac::HandleDeviceChange() {
+ OSStatus err = noErr;
- WEBRTC_TRACE(kTraceDebug, kTraceAudioDevice, _id,
- "kAudioHardwarePropertyDevices");
-
- // A device has changed. Check if our registered devices have been removed.
- // Ensure the devices have been initialized, meaning the IDs are valid.
- if (MicrophoneIsInitialized())
- {
- AudioObjectPropertyAddress propertyAddress = {
- kAudioDevicePropertyDeviceIsAlive,
- kAudioDevicePropertyScopeInput, 0 };
- UInt32 deviceIsAlive = 1;
- UInt32 size = sizeof(UInt32);
- err = AudioObjectGetPropertyData(_inputDeviceID, &propertyAddress, 0,
- NULL, &size, &deviceIsAlive);
-
- if (err == kAudioHardwareBadDeviceError || deviceIsAlive == 0)
- {
- WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
- "Capture device is not alive (probably removed)");
- AtomicSet32(&_captureDeviceIsAlive, 0);
- _mixerManager.CloseMicrophone();
- if (_recError == 1)
- {
- WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice,
- _id, " pending recording error exists");
- }
- _recError = 1; // triggers callback from module process thread
- } else if (err != noErr)
- {
- logCAMsg(kTraceError, kTraceAudioDevice, _id,
- "Error in AudioDeviceGetProperty()", (const char*) &err);
- return -1;
- }
+ WEBRTC_TRACE(kTraceDebug, kTraceAudioDevice, _id,
+ "kAudioHardwarePropertyDevices");
+
+ // A device has changed. Check if our registered devices have been removed.
+ // Ensure the devices have been initialized, meaning the IDs are valid.
+ if (MicrophoneIsInitialized()) {
+ AudioObjectPropertyAddress propertyAddress = {
+ kAudioDevicePropertyDeviceIsAlive, kAudioDevicePropertyScopeInput, 0};
+ UInt32 deviceIsAlive = 1;
+ UInt32 size = sizeof(UInt32);
+ err = AudioObjectGetPropertyData(_inputDeviceID, &propertyAddress, 0, NULL,
+ &size, &deviceIsAlive);
+
+ if (err == kAudioHardwareBadDeviceError || deviceIsAlive == 0) {
+ WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+ "Capture device is not alive (probably removed)");
+ AtomicSet32(&_captureDeviceIsAlive, 0);
+ _mixerManager.CloseMicrophone();
+ if (_recError == 1) {
+ WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+ " pending recording error exists");
+ }
+ _recError = 1; // triggers callback from module process thread
+ } else if (err != noErr) {
+ logCAMsg(kTraceError, kTraceAudioDevice, _id,
+ "Error in AudioDeviceGetProperty()", (const char*)&err);
+ return -1;
}
+ }
- if (SpeakerIsInitialized())
- {
- AudioObjectPropertyAddress propertyAddress = {
- kAudioDevicePropertyDeviceIsAlive,
- kAudioDevicePropertyScopeOutput, 0 };
- UInt32 deviceIsAlive = 1;
- UInt32 size = sizeof(UInt32);
- err = AudioObjectGetPropertyData(_outputDeviceID, &propertyAddress, 0,
- NULL, &size, &deviceIsAlive);
-
- if (err == kAudioHardwareBadDeviceError || deviceIsAlive == 0)
- {
- WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
- "Render device is not alive (probably removed)");
- AtomicSet32(&_renderDeviceIsAlive, 0);
- _mixerManager.CloseSpeaker();
- if (_playError == 1)
- {
- WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice,
- _id, " pending playout error exists");
- }
- _playError = 1; // triggers callback from module process thread
- } else if (err != noErr)
- {
- logCAMsg(kTraceError, kTraceAudioDevice, _id,
- "Error in AudioDeviceGetProperty()", (const char*) &err);
- return -1;
- }
+ if (SpeakerIsInitialized()) {
+ AudioObjectPropertyAddress propertyAddress = {
+ kAudioDevicePropertyDeviceIsAlive, kAudioDevicePropertyScopeOutput, 0};
+ UInt32 deviceIsAlive = 1;
+ UInt32 size = sizeof(UInt32);
+ err = AudioObjectGetPropertyData(_outputDeviceID, &propertyAddress, 0, NULL,
+ &size, &deviceIsAlive);
+
+ if (err == kAudioHardwareBadDeviceError || deviceIsAlive == 0) {
+ WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+ "Render device is not alive (probably removed)");
+ AtomicSet32(&_renderDeviceIsAlive, 0);
+ _mixerManager.CloseSpeaker();
+ if (_playError == 1) {
+ WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+ " pending playout error exists");
+ }
+ _playError = 1; // triggers callback from module process thread
+ } else if (err != noErr) {
+ logCAMsg(kTraceError, kTraceAudioDevice, _id,
+ "Error in AudioDeviceGetProperty()", (const char*)&err);
+ return -1;
}
+ }
- return 0;
+ return 0;
}
int32_t AudioDeviceMac::HandleStreamFormatChange(
const AudioObjectID objectId,
- const AudioObjectPropertyAddress propertyAddress)
-{
- OSStatus err = noErr;
+ const AudioObjectPropertyAddress propertyAddress) {
+ OSStatus err = noErr;
- WEBRTC_TRACE(kTraceDebug, kTraceAudioDevice, _id,
- "Stream format changed");
+ WEBRTC_TRACE(kTraceDebug, kTraceAudioDevice, _id, "Stream format changed");
- if (objectId != _inputDeviceID && objectId != _outputDeviceID)
- {
- return 0;
- }
-
- // Get the new device format
- AudioStreamBasicDescription streamFormat;
- UInt32 size = sizeof(streamFormat);
- WEBRTC_CA_RETURN_ON_ERR(AudioObjectGetPropertyData(objectId,
- &propertyAddress, 0, NULL, &size, &streamFormat));
-
- if (streamFormat.mFormatID != kAudioFormatLinearPCM)
- {
- logCAMsg(kTraceError, kTraceAudioDevice, _id,
- "Unacceptable input stream format -> mFormatID",
- (const char *) &streamFormat.mFormatID);
- return -1;
- }
-
- if (streamFormat.mChannelsPerFrame > N_DEVICE_CHANNELS)
- {
- WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
- "Too many channels on device (mChannelsPerFrame = %d)",
- streamFormat.mChannelsPerFrame);
- return -1;
- }
+ if (objectId != _inputDeviceID && objectId != _outputDeviceID) {
+ return 0;
+ }
- WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
- "Stream format:");
- WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
- "mSampleRate = %f, mChannelsPerFrame = %u",
- streamFormat.mSampleRate, streamFormat.mChannelsPerFrame);
- WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
- "mBytesPerPacket = %u, mFramesPerPacket = %u",
- streamFormat.mBytesPerPacket, streamFormat.mFramesPerPacket);
- WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
- "mBytesPerFrame = %u, mBitsPerChannel = %u",
- streamFormat.mBytesPerFrame, streamFormat.mBitsPerChannel);
- WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
- "mFormatFlags = %u",
- streamFormat.mFormatFlags);
- logCAMsg(kTraceInfo, kTraceAudioDevice, _id, "mFormatID",
- (const char *) &streamFormat.mFormatID);
-
- if (propertyAddress.mScope == kAudioDevicePropertyScopeInput)
- {
- const int io_block_size_samples = streamFormat.mChannelsPerFrame *
- streamFormat.mSampleRate / 100 * N_BLOCKS_IO;
- if (io_block_size_samples > _captureBufSizeSamples)
- {
- WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
- "Input IO block size (%d) is larger than ring buffer (%u)",
- io_block_size_samples, _captureBufSizeSamples);
- return -1;
+ // Get the new device format
+ AudioStreamBasicDescription streamFormat;
+ UInt32 size = sizeof(streamFormat);
+ WEBRTC_CA_RETURN_ON_ERR(AudioObjectGetPropertyData(
+ objectId, &propertyAddress, 0, NULL, &size, &streamFormat));
- }
+ if (streamFormat.mFormatID != kAudioFormatLinearPCM) {
+ logCAMsg(kTraceError, kTraceAudioDevice, _id,
+ "Unacceptable input stream format -> mFormatID",
+ (const char*)&streamFormat.mFormatID);
+ return -1;
+ }
- memcpy(&_inStreamFormat, &streamFormat, sizeof(streamFormat));
-
- if (_inStreamFormat.mChannelsPerFrame >= 2 && (_recChannels == 2))
- {
- _inDesiredFormat.mChannelsPerFrame = 2;
- } else
- {
- // Disable stereo recording when we only have one channel on the device.
- _inDesiredFormat.mChannelsPerFrame = 1;
- _recChannels = 1;
- WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
- "Stereo recording unavailable on this device");
- }
+ if (streamFormat.mChannelsPerFrame > N_DEVICE_CHANNELS) {
+ WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+ "Too many channels on device (mChannelsPerFrame = %d)",
+ streamFormat.mChannelsPerFrame);
+ return -1;
+ }
- if (_ptrAudioBuffer)
- {
- // Update audio buffer with the selected parameters
- _ptrAudioBuffer->SetRecordingSampleRate(N_REC_SAMPLES_PER_SEC);
- _ptrAudioBuffer->SetRecordingChannels((uint8_t) _recChannels);
- }
+ WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "Stream format:");
+ WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+ "mSampleRate = %f, mChannelsPerFrame = %u",
+ streamFormat.mSampleRate, streamFormat.mChannelsPerFrame);
+ WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+ "mBytesPerPacket = %u, mFramesPerPacket = %u",
+ streamFormat.mBytesPerPacket, streamFormat.mFramesPerPacket);
+ WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+ "mBytesPerFrame = %u, mBitsPerChannel = %u",
+ streamFormat.mBytesPerFrame, streamFormat.mBitsPerChannel);
+ WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "mFormatFlags = %u",
+ streamFormat.mFormatFlags);
+ logCAMsg(kTraceInfo, kTraceAudioDevice, _id, "mFormatID",
+ (const char*)&streamFormat.mFormatID);
+
+ if (propertyAddress.mScope == kAudioDevicePropertyScopeInput) {
+ const int io_block_size_samples = streamFormat.mChannelsPerFrame *
+ streamFormat.mSampleRate / 100 *
+ N_BLOCKS_IO;
+ if (io_block_size_samples > _captureBufSizeSamples) {
+ WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+ "Input IO block size (%d) is larger than ring buffer (%u)",
+ io_block_size_samples, _captureBufSizeSamples);
+ return -1;
+ }
+
+ memcpy(&_inStreamFormat, &streamFormat, sizeof(streamFormat));
+
+ if (_inStreamFormat.mChannelsPerFrame >= 2 && (_recChannels == 2)) {
+ _inDesiredFormat.mChannelsPerFrame = 2;
+ } else {
+ // Disable stereo recording when we only have one channel on the device.
+ _inDesiredFormat.mChannelsPerFrame = 1;
+ _recChannels = 1;
+ WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+ "Stereo recording unavailable on this device");
+ }
+
+ if (_ptrAudioBuffer) {
+ // Update audio buffer with the selected parameters
+ _ptrAudioBuffer->SetRecordingSampleRate(N_REC_SAMPLES_PER_SEC);
+ _ptrAudioBuffer->SetRecordingChannels((uint8_t)_recChannels);
+ }
+
+ // Recreate the converter with the new format
+ // TODO(xians): make this thread safe
+ WEBRTC_CA_RETURN_ON_ERR(AudioConverterDispose(_captureConverter));
+
+ WEBRTC_CA_RETURN_ON_ERR(AudioConverterNew(&streamFormat, &_inDesiredFormat,
+ &_captureConverter));
+ } else {
+ memcpy(&_outStreamFormat, &streamFormat, sizeof(streamFormat));
- // Recreate the converter with the new format
- // TODO(xians): make this thread safe
- WEBRTC_CA_RETURN_ON_ERR(AudioConverterDispose(_captureConverter));
-
- WEBRTC_CA_RETURN_ON_ERR(AudioConverterNew(&streamFormat, &_inDesiredFormat,
- &_captureConverter));
- } else
- {
- memcpy(&_outStreamFormat, &streamFormat, sizeof(streamFormat));
-
- // Our preferred format to work with
- if (_outStreamFormat.mChannelsPerFrame < 2)
- {
- _playChannels = 1;
- WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
- "Stereo playout unavailable on this device");
- }
- WEBRTC_CA_RETURN_ON_ERR(SetDesiredPlayoutFormat());
+ // Our preferred format to work with
+ if (_outStreamFormat.mChannelsPerFrame < 2) {
+ _playChannels = 1;
+ WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+ "Stereo playout unavailable on this device");
}
- return 0;
+ WEBRTC_CA_RETURN_ON_ERR(SetDesiredPlayoutFormat());
+ }
+ return 0;
}
int32_t AudioDeviceMac::HandleDataSourceChange(
const AudioObjectID objectId,
- const AudioObjectPropertyAddress propertyAddress)
-{
- OSStatus err = noErr;
+ const AudioObjectPropertyAddress propertyAddress) {
+ OSStatus err = noErr;
- if (_macBookPro && propertyAddress.mScope
- == kAudioDevicePropertyScopeOutput)
- {
- WEBRTC_TRACE(kTraceDebug, kTraceAudioDevice, _id,
- "Data source changed");
-
- _macBookProPanRight = false;
- UInt32 dataSource = 0;
- UInt32 size = sizeof(UInt32);
- WEBRTC_CA_RETURN_ON_ERR(AudioObjectGetPropertyData(objectId,
- &propertyAddress, 0, NULL, &size, &dataSource));
- if (dataSource == 'ispk')
- {
- _macBookProPanRight = true;
- WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
- "MacBook Pro using internal speakers; stereo panning right");
- } else
- {
- WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
- "MacBook Pro not using internal speakers");
- }
+ if (_macBookPro &&
+ propertyAddress.mScope == kAudioDevicePropertyScopeOutput) {
+ WEBRTC_TRACE(kTraceDebug, kTraceAudioDevice, _id, "Data source changed");
+
+ _macBookProPanRight = false;
+ UInt32 dataSource = 0;
+ UInt32 size = sizeof(UInt32);
+ WEBRTC_CA_RETURN_ON_ERR(AudioObjectGetPropertyData(
+ objectId, &propertyAddress, 0, NULL, &size, &dataSource));
+ if (dataSource == 'ispk') {
+ _macBookProPanRight = true;
+ WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+ "MacBook Pro using internal speakers; stereo panning right");
+ } else {
+ WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+ "MacBook Pro not using internal speakers");
}
+ }
- return 0;
+ return 0;
}
int32_t AudioDeviceMac::HandleProcessorOverload(
- const AudioObjectPropertyAddress propertyAddress)
-{
- // TODO(xians): we probably want to notify the user in some way of the
- // overload. However, the Windows interpretations of these errors seem to
- // be more severe than what ProcessorOverload is thrown for.
- //
- // We don't log the notification, as it's sent from the HAL's IO thread. We
- // don't want to slow it down even further.
- if (propertyAddress.mScope == kAudioDevicePropertyScopeInput)
- {
- //WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, "Capture processor
- // overload");
- //_callback->ProblemIsReported(
- // SndCardStreamObserver::ERecordingProblem);
- } else
- {
- //WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
- // "Render processor overload");
- //_callback->ProblemIsReported(
- // SndCardStreamObserver::EPlaybackProblem);
- }
+ const AudioObjectPropertyAddress propertyAddress) {
+ // TODO(xians): we probably want to notify the user in some way of the
+ // overload. However, the Windows interpretations of these errors seem to
+ // be more severe than what ProcessorOverload is thrown for.
+ //
+ // We don't log the notification, as it's sent from the HAL's IO thread. We
+ // don't want to slow it down even further.
+ if (propertyAddress.mScope == kAudioDevicePropertyScopeInput) {
+ // WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, "Capture processor
+ // overload");
+ //_callback->ProblemIsReported(
+ // SndCardStreamObserver::ERecordingProblem);
+ } else {
+ // WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+ // "Render processor overload");
+ //_callback->ProblemIsReported(
+ // SndCardStreamObserver::EPlaybackProblem);
+ }
- return 0;
+ return 0;
}
// ============================================================================
// Thread Methods
// ============================================================================
-OSStatus AudioDeviceMac::deviceIOProc(AudioDeviceID, const AudioTimeStamp*,
+OSStatus AudioDeviceMac::deviceIOProc(AudioDeviceID,
+ const AudioTimeStamp*,
const AudioBufferList* inputData,
const AudioTimeStamp* inputTime,
AudioBufferList* outputData,
const AudioTimeStamp* outputTime,
- void *clientData)
-{
- AudioDeviceMac *ptrThis = (AudioDeviceMac *) clientData;
- RTC_DCHECK(ptrThis != NULL);
+ void* clientData) {
+ AudioDeviceMac* ptrThis = (AudioDeviceMac*)clientData;
+ RTC_DCHECK(ptrThis != NULL);
- ptrThis->implDeviceIOProc(inputData, inputTime, outputData, outputTime);
+ ptrThis->implDeviceIOProc(inputData, inputTime, outputData, outputTime);
- // AudioDeviceIOProc functions are supposed to return 0
- return 0;
+ // AudioDeviceIOProc functions are supposed to return 0
+ return 0;
}
OSStatus AudioDeviceMac::outConverterProc(AudioConverterRef,
- UInt32 *numberDataPackets,
- AudioBufferList *data,
- AudioStreamPacketDescription **,
- void *userData)
-{
- AudioDeviceMac *ptrThis = (AudioDeviceMac *) userData;
- RTC_DCHECK(ptrThis != NULL);
+ UInt32* numberDataPackets,
+ AudioBufferList* data,
+ AudioStreamPacketDescription**,
+ void* userData) {
+ AudioDeviceMac* ptrThis = (AudioDeviceMac*)userData;
+ RTC_DCHECK(ptrThis != NULL);
- return ptrThis->implOutConverterProc(numberDataPackets, data);
+ return ptrThis->implOutConverterProc(numberDataPackets, data);
}
-OSStatus AudioDeviceMac::inDeviceIOProc(AudioDeviceID, const AudioTimeStamp*,
+OSStatus AudioDeviceMac::inDeviceIOProc(AudioDeviceID,
+ const AudioTimeStamp*,
const AudioBufferList* inputData,
const AudioTimeStamp* inputTime,
AudioBufferList*,
- const AudioTimeStamp*, void* clientData)
-{
- AudioDeviceMac *ptrThis = (AudioDeviceMac *) clientData;
- RTC_DCHECK(ptrThis != NULL);
+ const AudioTimeStamp*,
+ void* clientData) {
+ AudioDeviceMac* ptrThis = (AudioDeviceMac*)clientData;
+ RTC_DCHECK(ptrThis != NULL);
- ptrThis->implInDeviceIOProc(inputData, inputTime);
+ ptrThis->implInDeviceIOProc(inputData, inputTime);
- // AudioDeviceIOProc functions are supposed to return 0
- return 0;
+ // AudioDeviceIOProc functions are supposed to return 0
+ return 0;
}
OSStatus AudioDeviceMac::inConverterProc(
AudioConverterRef,
- UInt32 *numberDataPackets,
- AudioBufferList *data,
- AudioStreamPacketDescription ** /*dataPacketDescription*/,
- void *userData)
-{
- AudioDeviceMac *ptrThis = static_cast<AudioDeviceMac*> (userData);
- RTC_DCHECK(ptrThis != NULL);
-
- return ptrThis->implInConverterProc(numberDataPackets, data);
-}
-
-OSStatus AudioDeviceMac::implDeviceIOProc(const AudioBufferList *inputData,
- const AudioTimeStamp *inputTime,
- AudioBufferList *outputData,
- const AudioTimeStamp *outputTime)
-{
- OSStatus err = noErr;
- UInt64 outputTimeNs = AudioConvertHostTimeToNanos(outputTime->mHostTime);
- UInt64 nowNs = AudioConvertHostTimeToNanos(AudioGetCurrentHostTime());
-
- if (!_twoDevices && _recording)
- {
- implInDeviceIOProc(inputData, inputTime);
- }
+ UInt32* numberDataPackets,
+ AudioBufferList* data,
+ AudioStreamPacketDescription** /*dataPacketDescription*/,
+ void* userData) {
+ AudioDeviceMac* ptrThis = static_cast<AudioDeviceMac*>(userData);
+ RTC_DCHECK(ptrThis != NULL);
+
+ return ptrThis->implInConverterProc(numberDataPackets, data);
+}
+
+OSStatus AudioDeviceMac::implDeviceIOProc(const AudioBufferList* inputData,
+ const AudioTimeStamp* inputTime,
+ AudioBufferList* outputData,
+ const AudioTimeStamp* outputTime) {
+ OSStatus err = noErr;
+ UInt64 outputTimeNs = AudioConvertHostTimeToNanos(outputTime->mHostTime);
+ UInt64 nowNs = AudioConvertHostTimeToNanos(AudioGetCurrentHostTime());
+
+ if (!_twoDevices && _recording) {
+ implInDeviceIOProc(inputData, inputTime);
+ }
- // Check if we should close down audio device
- // Double-checked locking optimization to remove locking overhead
- if (_doStop)
- {
- _critSect.Enter();
- if (_doStop)
- {
- if (_twoDevices || (!_recording && !_playing))
- {
- // In the case of a shared device, the single driving ioProc
- // is stopped here
- WEBRTC_CA_LOG_ERR(AudioDeviceStop(_outputDeviceID,
- _deviceIOProcID));
- WEBRTC_CA_LOG_WARN(AudioDeviceDestroyIOProcID(_outputDeviceID,
- _deviceIOProcID));
- if (err == noErr)
- {
- WEBRTC_TRACE(kTraceDebug, kTraceAudioDevice,
- _id, " Playout or shared device stopped");
- }
- }
-
- _doStop = false;
- _stopEvent.Set();
- _critSect.Leave();
- return 0;
+ // Check if we should close down audio device
+ // Double-checked locking optimization to remove locking overhead
+ if (_doStop) {
+ _critSect.Enter();
+ if (_doStop) {
+ if (_twoDevices || (!_recording && !_playing)) {
+ // In the case of a shared device, the single driving ioProc
+ // is stopped here
+ WEBRTC_CA_LOG_ERR(AudioDeviceStop(_outputDeviceID, _deviceIOProcID));
+ WEBRTC_CA_LOG_WARN(
+ AudioDeviceDestroyIOProcID(_outputDeviceID, _deviceIOProcID));
+ if (err == noErr) {
+ WEBRTC_TRACE(kTraceDebug, kTraceAudioDevice, _id,
+ " Playout or shared device stopped");
}
- _critSect.Leave();
- }
+ }
- if (!_playing)
- {
- // This can be the case when a shared device is capturing but not
- // rendering. We allow the checks above before returning to avoid a
- // timeout when capturing is stopped.
- return 0;
+ _doStop = false;
+ _stopEvent.Set();
+ _critSect.Leave();
+ return 0;
}
+ _critSect.Leave();
+ }
- RTC_DCHECK(_outStreamFormat.mBytesPerFrame != 0);
- UInt32 size = outputData->mBuffers->mDataByteSize
- / _outStreamFormat.mBytesPerFrame;
-
- // TODO(xians): signal an error somehow?
- err = AudioConverterFillComplexBuffer(_renderConverter, outConverterProc,
- this, &size, outputData, NULL);
- if (err != noErr)
- {
- if (err == 1)
- {
- // This is our own error.
- WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
- " Error in AudioConverterFillComplexBuffer()");
- return 1;
- } else
- {
- logCAMsg(kTraceError, kTraceAudioDevice, _id,
- "Error in AudioConverterFillComplexBuffer()",
- (const char *) &err);
- return 1;
- }
+ if (!_playing) {
+ // This can be the case when a shared device is capturing but not
+ // rendering. We allow the checks above before returning to avoid a
+ // timeout when capturing is stopped.
+ return 0;
+ }
+
+ RTC_DCHECK(_outStreamFormat.mBytesPerFrame != 0);
+ UInt32 size =
+ outputData->mBuffers->mDataByteSize / _outStreamFormat.mBytesPerFrame;
+
+ // TODO(xians): signal an error somehow?
+ err = AudioConverterFillComplexBuffer(_renderConverter, outConverterProc,
+ this, &size, outputData, NULL);
+ if (err != noErr) {
+ if (err == 1) {
+ // This is our own error.
+ WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+ " Error in AudioConverterFillComplexBuffer()");
+ return 1;
+ } else {
+ logCAMsg(kTraceError, kTraceAudioDevice, _id,
+ "Error in AudioConverterFillComplexBuffer()", (const char*)&err);
+ return 1;
}
+ }
- PaRingBufferSize bufSizeSamples =
- PaUtil_GetRingBufferReadAvailable(_paRenderBuffer);
+ PaRingBufferSize bufSizeSamples =
+ PaUtil_GetRingBufferReadAvailable(_paRenderBuffer);
- int32_t renderDelayUs = static_cast<int32_t> (1e-3 * (outputTimeNs - nowNs)
- + 0.5);
- renderDelayUs += static_cast<int32_t> ((1.0e6 * bufSizeSamples)
- / _outDesiredFormat.mChannelsPerFrame / _outDesiredFormat.mSampleRate
- + 0.5);
+ int32_t renderDelayUs =
+ static_cast<int32_t>(1e-3 * (outputTimeNs - nowNs) + 0.5);
+ renderDelayUs += static_cast<int32_t>(
+ (1.0e6 * bufSizeSamples) / _outDesiredFormat.mChannelsPerFrame /
+ _outDesiredFormat.mSampleRate +
+ 0.5);
- AtomicSet32(&_renderDelayUs, renderDelayUs);
+ AtomicSet32(&_renderDelayUs, renderDelayUs);
- return 0;
+ return 0;
}
-OSStatus AudioDeviceMac::implOutConverterProc(UInt32 *numberDataPackets,
- AudioBufferList *data)
-{
+OSStatus AudioDeviceMac::implOutConverterProc(UInt32* numberDataPackets,
+ AudioBufferList* data) {
RTC_DCHECK(data->mNumberBuffers == 1);
- PaRingBufferSize numSamples = *numberDataPackets
- * _outDesiredFormat.mChannelsPerFrame;
-
- data->mBuffers->mNumberChannels = _outDesiredFormat.mChannelsPerFrame;
- // Always give the converter as much as it wants, zero padding as required.
- data->mBuffers->mDataByteSize = *numberDataPackets
- * _outDesiredFormat.mBytesPerPacket;
- data->mBuffers->mData = _renderConvertData;
- memset(_renderConvertData, 0, sizeof(_renderConvertData));
-
- PaUtil_ReadRingBuffer(_paRenderBuffer, _renderConvertData, numSamples);
-
- kern_return_t kernErr = semaphore_signal_all(_renderSemaphore);
- if (kernErr != KERN_SUCCESS)
- {
- WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
- " semaphore_signal_all() error: %d", kernErr);
- return 1;
- }
+ PaRingBufferSize numSamples =
+ *numberDataPackets * _outDesiredFormat.mChannelsPerFrame;
- return 0;
+ data->mBuffers->mNumberChannels = _outDesiredFormat.mChannelsPerFrame;
+ // Always give the converter as much as it wants, zero padding as required.
+ data->mBuffers->mDataByteSize =
+ *numberDataPackets * _outDesiredFormat.mBytesPerPacket;
+ data->mBuffers->mData = _renderConvertData;
+ memset(_renderConvertData, 0, sizeof(_renderConvertData));
+
+ PaUtil_ReadRingBuffer(_paRenderBuffer, _renderConvertData, numSamples);
+
+ kern_return_t kernErr = semaphore_signal_all(_renderSemaphore);
+ if (kernErr != KERN_SUCCESS) {
+ WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+ " semaphore_signal_all() error: %d", kernErr);
+ return 1;
+ }
+
+ return 0;
}
-OSStatus AudioDeviceMac::implInDeviceIOProc(const AudioBufferList *inputData,
- const AudioTimeStamp *inputTime)
-{
- OSStatus err = noErr;
- UInt64 inputTimeNs = AudioConvertHostTimeToNanos(inputTime->mHostTime);
- UInt64 nowNs = AudioConvertHostTimeToNanos(AudioGetCurrentHostTime());
-
- // Check if we should close down audio device
- // Double-checked locking optimization to remove locking overhead
- if (_doStopRec)
- {
- _critSect.Enter();
- if (_doStopRec)
- {
- // This will be signalled only when a shared device is not in use.
- WEBRTC_CA_LOG_ERR(AudioDeviceStop(_inputDeviceID, _inDeviceIOProcID));
- WEBRTC_CA_LOG_WARN(AudioDeviceDestroyIOProcID(_inputDeviceID,
- _inDeviceIOProcID));
- if (err == noErr)
- {
- WEBRTC_TRACE(kTraceDebug, kTraceAudioDevice,
- _id, " Recording device stopped");
- }
-
- _doStopRec = false;
- _stopEventRec.Set();
- _critSect.Leave();
- return 0;
- }
- _critSect.Leave();
- }
+OSStatus AudioDeviceMac::implInDeviceIOProc(const AudioBufferList* inputData,
+ const AudioTimeStamp* inputTime) {
+ OSStatus err = noErr;
+ UInt64 inputTimeNs = AudioConvertHostTimeToNanos(inputTime->mHostTime);
+ UInt64 nowNs = AudioConvertHostTimeToNanos(AudioGetCurrentHostTime());
- if (!_recording)
- {
- // Allow above checks to avoid a timeout on stopping capture.
- return 0;
- }
+ // Check if we should close down audio device
+ // Double-checked locking optimization to remove locking overhead
+ if (_doStopRec) {
+ _critSect.Enter();
+ if (_doStopRec) {
+ // This will be signalled only when a shared device is not in use.
+ WEBRTC_CA_LOG_ERR(AudioDeviceStop(_inputDeviceID, _inDeviceIOProcID));
+ WEBRTC_CA_LOG_WARN(
+ AudioDeviceDestroyIOProcID(_inputDeviceID, _inDeviceIOProcID));
+ if (err == noErr) {
+ WEBRTC_TRACE(kTraceDebug, kTraceAudioDevice, _id,
+ " Recording device stopped");
+ }
- PaRingBufferSize bufSizeSamples =
- PaUtil_GetRingBufferReadAvailable(_paCaptureBuffer);
-
- int32_t captureDelayUs = static_cast<int32_t> (1e-3 * (nowNs - inputTimeNs)
- + 0.5);
- captureDelayUs
- += static_cast<int32_t> ((1.0e6 * bufSizeSamples)
- / _inStreamFormat.mChannelsPerFrame / _inStreamFormat.mSampleRate
- + 0.5);
-
- AtomicSet32(&_captureDelayUs, captureDelayUs);
-
- RTC_DCHECK(inputData->mNumberBuffers == 1);
- PaRingBufferSize numSamples = inputData->mBuffers->mDataByteSize
- * _inStreamFormat.mChannelsPerFrame / _inStreamFormat.mBytesPerPacket;
- PaUtil_WriteRingBuffer(_paCaptureBuffer, inputData->mBuffers->mData,
- numSamples);
-
- kern_return_t kernErr = semaphore_signal_all(_captureSemaphore);
- if (kernErr != KERN_SUCCESS)
- {
- WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
- " semaphore_signal_all() error: %d", kernErr);
+ _doStopRec = false;
+ _stopEventRec.Set();
+ _critSect.Leave();
+ return 0;
}
+ _critSect.Leave();
+ }
- return err;
-}
+ if (!_recording) {
+ // Allow above checks to avoid a timeout on stopping capture.
+ return 0;
+ }
-OSStatus AudioDeviceMac::implInConverterProc(UInt32 *numberDataPackets,
- AudioBufferList *data)
-{
- RTC_DCHECK(data->mNumberBuffers == 1);
- PaRingBufferSize numSamples = *numberDataPackets
- * _inStreamFormat.mChannelsPerFrame;
-
- while (PaUtil_GetRingBufferReadAvailable(_paCaptureBuffer) < numSamples)
- {
- mach_timespec_t timeout;
- timeout.tv_sec = 0;
- timeout.tv_nsec = TIMER_PERIOD_MS;
-
- kern_return_t kernErr = semaphore_timedwait(_captureSemaphore, timeout);
- if (kernErr == KERN_OPERATION_TIMED_OUT)
- {
- int32_t signal = AtomicGet32(&_captureDeviceIsAlive);
- if (signal == 0)
- {
- // The capture device is no longer alive; stop the worker thread.
- *numberDataPackets = 0;
- return 1;
- }
- } else if (kernErr != KERN_SUCCESS)
- {
- WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
- " semaphore_wait() error: %d", kernErr);
- }
- }
+ PaRingBufferSize bufSizeSamples =
+ PaUtil_GetRingBufferReadAvailable(_paCaptureBuffer);
- // Pass the read pointer directly to the converter to avoid a memcpy.
- void* dummyPtr;
- PaRingBufferSize dummySize;
- PaUtil_GetRingBufferReadRegions(_paCaptureBuffer, numSamples,
- &data->mBuffers->mData, &numSamples,
- &dummyPtr, &dummySize);
- PaUtil_AdvanceRingBufferReadIndex(_paCaptureBuffer, numSamples);
+ int32_t captureDelayUs =
+ static_cast<int32_t>(1e-3 * (nowNs - inputTimeNs) + 0.5);
+ captureDelayUs += static_cast<int32_t>((1.0e6 * bufSizeSamples) /
+ _inStreamFormat.mChannelsPerFrame /
+ _inStreamFormat.mSampleRate +
+ 0.5);
- data->mBuffers->mNumberChannels = _inStreamFormat.mChannelsPerFrame;
- *numberDataPackets = numSamples / _inStreamFormat.mChannelsPerFrame;
- data->mBuffers->mDataByteSize = *numberDataPackets
- * _inStreamFormat.mBytesPerPacket;
+ AtomicSet32(&_captureDelayUs, captureDelayUs);
- return 0;
+ RTC_DCHECK(inputData->mNumberBuffers == 1);
+ PaRingBufferSize numSamples = inputData->mBuffers->mDataByteSize *
+ _inStreamFormat.mChannelsPerFrame /
+ _inStreamFormat.mBytesPerPacket;
+ PaUtil_WriteRingBuffer(_paCaptureBuffer, inputData->mBuffers->mData,
+ numSamples);
+
+ kern_return_t kernErr = semaphore_signal_all(_captureSemaphore);
+ if (kernErr != KERN_SUCCESS) {
+ WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+ " semaphore_signal_all() error: %d", kernErr);
+ }
+
+ return err;
}
-bool AudioDeviceMac::RunRender(void* ptrThis)
-{
- return static_cast<AudioDeviceMac*> (ptrThis)->RenderWorkerThread();
-}
-
-bool AudioDeviceMac::RenderWorkerThread()
-{
- PaRingBufferSize numSamples = ENGINE_PLAY_BUF_SIZE_IN_SAMPLES
- * _outDesiredFormat.mChannelsPerFrame;
- while (PaUtil_GetRingBufferWriteAvailable(_paRenderBuffer)
- - _renderDelayOffsetSamples < numSamples)
- {
- mach_timespec_t timeout;
- timeout.tv_sec = 0;
- timeout.tv_nsec = TIMER_PERIOD_MS;
-
- kern_return_t kernErr = semaphore_timedwait(_renderSemaphore, timeout);
- if (kernErr == KERN_OPERATION_TIMED_OUT)
- {
- int32_t signal = AtomicGet32(&_renderDeviceIsAlive);
- if (signal == 0)
- {
- // The render device is no longer alive; stop the worker thread.
- return false;
- }
- } else if (kernErr != KERN_SUCCESS)
- {
- WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
- " semaphore_timedwait() error: %d", kernErr);
- }
+OSStatus AudioDeviceMac::implInConverterProc(UInt32* numberDataPackets,
+ AudioBufferList* data) {
+ RTC_DCHECK(data->mNumberBuffers == 1);
+ PaRingBufferSize numSamples =
+ *numberDataPackets * _inStreamFormat.mChannelsPerFrame;
+
+ while (PaUtil_GetRingBufferReadAvailable(_paCaptureBuffer) < numSamples) {
+ mach_timespec_t timeout;
+ timeout.tv_sec = 0;
+ timeout.tv_nsec = TIMER_PERIOD_MS;
+
+ kern_return_t kernErr = semaphore_timedwait(_captureSemaphore, timeout);
+ if (kernErr == KERN_OPERATION_TIMED_OUT) {
+ int32_t signal = AtomicGet32(&_captureDeviceIsAlive);
+ if (signal == 0) {
+ // The capture device is no longer alive; stop the worker thread.
+ *numberDataPackets = 0;
+ return 1;
+ }
+ } else if (kernErr != KERN_SUCCESS) {
+ WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+ " semaphore_wait() error: %d", kernErr);
}
+ }
- int8_t playBuffer[4 * ENGINE_PLAY_BUF_SIZE_IN_SAMPLES];
-
- if (!_ptrAudioBuffer)
- {
- WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
- " capture AudioBuffer is invalid");
+ // Pass the read pointer directly to the converter to avoid a memcpy.
+ void* dummyPtr;
+ PaRingBufferSize dummySize;
+ PaUtil_GetRingBufferReadRegions(_paCaptureBuffer, numSamples,
+ &data->mBuffers->mData, &numSamples,
+ &dummyPtr, &dummySize);
+ PaUtil_AdvanceRingBufferReadIndex(_paCaptureBuffer, numSamples);
+
+ data->mBuffers->mNumberChannels = _inStreamFormat.mChannelsPerFrame;
+ *numberDataPackets = numSamples / _inStreamFormat.mChannelsPerFrame;
+ data->mBuffers->mDataByteSize =
+ *numberDataPackets * _inStreamFormat.mBytesPerPacket;
+
+ return 0;
+}
+
+bool AudioDeviceMac::RunRender(void* ptrThis) {
+ return static_cast<AudioDeviceMac*>(ptrThis)->RenderWorkerThread();
+}
+
+bool AudioDeviceMac::RenderWorkerThread() {
+ PaRingBufferSize numSamples =
+ ENGINE_PLAY_BUF_SIZE_IN_SAMPLES * _outDesiredFormat.mChannelsPerFrame;
+ while (PaUtil_GetRingBufferWriteAvailable(_paRenderBuffer) -
+ _renderDelayOffsetSamples <
+ numSamples) {
+ mach_timespec_t timeout;
+ timeout.tv_sec = 0;
+ timeout.tv_nsec = TIMER_PERIOD_MS;
+
+ kern_return_t kernErr = semaphore_timedwait(_renderSemaphore, timeout);
+ if (kernErr == KERN_OPERATION_TIMED_OUT) {
+ int32_t signal = AtomicGet32(&_renderDeviceIsAlive);
+ if (signal == 0) {
+ // The render device is no longer alive; stop the worker thread.
return false;
+ }
+ } else if (kernErr != KERN_SUCCESS) {
+ WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+ " semaphore_timedwait() error: %d", kernErr);
}
+ }
- // Ask for new PCM data to be played out using the AudioDeviceBuffer.
- uint32_t nSamples =
- _ptrAudioBuffer->RequestPlayoutData(ENGINE_PLAY_BUF_SIZE_IN_SAMPLES);
+ int8_t playBuffer[4 * ENGINE_PLAY_BUF_SIZE_IN_SAMPLES];
- nSamples = _ptrAudioBuffer->GetPlayoutData(playBuffer);
- if (nSamples != ENGINE_PLAY_BUF_SIZE_IN_SAMPLES)
- {
- WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
- " invalid number of output samples(%d)", nSamples);
- }
+ if (!_ptrAudioBuffer) {
+ WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+ " capture AudioBuffer is invalid");
+ return false;
+ }
- uint32_t nOutSamples = nSamples * _outDesiredFormat.mChannelsPerFrame;
-
- SInt16 *pPlayBuffer = (SInt16 *) &playBuffer;
- if (_macBookProPanRight && (_playChannels == 2))
- {
- // Mix entirely into the right channel and zero the left channel.
- SInt32 sampleInt32 = 0;
- for (uint32_t sampleIdx = 0; sampleIdx < nOutSamples; sampleIdx
- += 2)
- {
- sampleInt32 = pPlayBuffer[sampleIdx];
- sampleInt32 += pPlayBuffer[sampleIdx + 1];
- sampleInt32 /= 2;
-
- if (sampleInt32 > 32767)
- {
- sampleInt32 = 32767;
- } else if (sampleInt32 < -32768)
- {
- sampleInt32 = -32768;
- }
-
- pPlayBuffer[sampleIdx] = 0;
- pPlayBuffer[sampleIdx + 1] = static_cast<SInt16> (sampleInt32);
- }
+ // Ask for new PCM data to be played out using the AudioDeviceBuffer.
+ uint32_t nSamples =
+ _ptrAudioBuffer->RequestPlayoutData(ENGINE_PLAY_BUF_SIZE_IN_SAMPLES);
+
+ nSamples = _ptrAudioBuffer->GetPlayoutData(playBuffer);
+ if (nSamples != ENGINE_PLAY_BUF_SIZE_IN_SAMPLES) {
+ WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+ " invalid number of output samples(%d)", nSamples);
+ }
+
+ uint32_t nOutSamples = nSamples * _outDesiredFormat.mChannelsPerFrame;
+
+ SInt16* pPlayBuffer = (SInt16*)&playBuffer;
+ if (_macBookProPanRight && (_playChannels == 2)) {
+ // Mix entirely into the right channel and zero the left channel.
+ SInt32 sampleInt32 = 0;
+ for (uint32_t sampleIdx = 0; sampleIdx < nOutSamples; sampleIdx += 2) {
+ sampleInt32 = pPlayBuffer[sampleIdx];
+ sampleInt32 += pPlayBuffer[sampleIdx + 1];
+ sampleInt32 /= 2;
+
+ if (sampleInt32 > 32767) {
+ sampleInt32 = 32767;
+ } else if (sampleInt32 < -32768) {
+ sampleInt32 = -32768;
+ }
+
+ pPlayBuffer[sampleIdx] = 0;
+ pPlayBuffer[sampleIdx + 1] = static_cast<SInt16>(sampleInt32);
}
+ }
- PaUtil_WriteRingBuffer(_paRenderBuffer, pPlayBuffer, nOutSamples);
+ PaUtil_WriteRingBuffer(_paRenderBuffer, pPlayBuffer, nOutSamples);
- return true;
+ return true;
}
-bool AudioDeviceMac::RunCapture(void* ptrThis)
-{
- return static_cast<AudioDeviceMac*> (ptrThis)->CaptureWorkerThread();
+bool AudioDeviceMac::RunCapture(void* ptrThis) {
+ return static_cast<AudioDeviceMac*>(ptrThis)->CaptureWorkerThread();
}
-bool AudioDeviceMac::CaptureWorkerThread()
-{
- OSStatus err = noErr;
- UInt32 noRecSamples = ENGINE_REC_BUF_SIZE_IN_SAMPLES
- * _inDesiredFormat.mChannelsPerFrame;
- SInt16 recordBuffer[noRecSamples];
- UInt32 size = ENGINE_REC_BUF_SIZE_IN_SAMPLES;
-
- AudioBufferList engineBuffer;
- engineBuffer.mNumberBuffers = 1; // Interleaved channels.
- engineBuffer.mBuffers->mNumberChannels = _inDesiredFormat.mChannelsPerFrame;
- engineBuffer.mBuffers->mDataByteSize = _inDesiredFormat.mBytesPerPacket
- * noRecSamples;
- engineBuffer.mBuffers->mData = recordBuffer;
-
- err = AudioConverterFillComplexBuffer(_captureConverter, inConverterProc,
- this, &size, &engineBuffer, NULL);
- if (err != noErr)
- {
- if (err == 1)
- {
- // This is our own error.
- return false;
- } else
- {
- logCAMsg(kTraceError, kTraceAudioDevice, _id,
- "Error in AudioConverterFillComplexBuffer()",
- (const char *) &err);
- return false;
- }
+bool AudioDeviceMac::CaptureWorkerThread() {
+ OSStatus err = noErr;
+ UInt32 noRecSamples =
+ ENGINE_REC_BUF_SIZE_IN_SAMPLES * _inDesiredFormat.mChannelsPerFrame;
+ SInt16 recordBuffer[noRecSamples];
+ UInt32 size = ENGINE_REC_BUF_SIZE_IN_SAMPLES;
+
+ AudioBufferList engineBuffer;
+ engineBuffer.mNumberBuffers = 1; // Interleaved channels.
+ engineBuffer.mBuffers->mNumberChannels = _inDesiredFormat.mChannelsPerFrame;
+ engineBuffer.mBuffers->mDataByteSize =
+ _inDesiredFormat.mBytesPerPacket * noRecSamples;
+ engineBuffer.mBuffers->mData = recordBuffer;
+
+ err = AudioConverterFillComplexBuffer(_captureConverter, inConverterProc,
+ this, &size, &engineBuffer, NULL);
+ if (err != noErr) {
+ if (err == 1) {
+ // This is our own error.
+ return false;
+ } else {
+ logCAMsg(kTraceError, kTraceAudioDevice, _id,
+ "Error in AudioConverterFillComplexBuffer()", (const char*)&err);
+ return false;
}
+ }
- // TODO(xians): what if the returned size is incorrect?
- if (size == ENGINE_REC_BUF_SIZE_IN_SAMPLES)
- {
- uint32_t currentMicLevel(0);
- uint32_t newMicLevel(0);
- int32_t msecOnPlaySide;
- int32_t msecOnRecordSide;
-
- int32_t captureDelayUs = AtomicGet32(&_captureDelayUs);
- int32_t renderDelayUs = AtomicGet32(&_renderDelayUs);
-
- msecOnPlaySide = static_cast<int32_t> (1e-3 * (renderDelayUs +
- _renderLatencyUs) + 0.5);
- msecOnRecordSide = static_cast<int32_t> (1e-3 * (captureDelayUs +
- _captureLatencyUs) +
- 0.5);
-
- if (!_ptrAudioBuffer)
- {
- WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
- " capture AudioBuffer is invalid");
- return false;
- }
+ // TODO(xians): what if the returned size is incorrect?
+ if (size == ENGINE_REC_BUF_SIZE_IN_SAMPLES) {
+ uint32_t currentMicLevel(0);
+ uint32_t newMicLevel(0);
+ int32_t msecOnPlaySide;
+ int32_t msecOnRecordSide;
- // store the recorded buffer (no action will be taken if the
- // #recorded samples is not a full buffer)
- _ptrAudioBuffer->SetRecordedBuffer((int8_t*) &recordBuffer,
- (uint32_t) size);
-
- if (AGC())
- {
- // store current mic level in the audio buffer if AGC is enabled
- if (MicrophoneVolume(currentMicLevel) == 0)
- {
- // this call does not affect the actual microphone volume
- _ptrAudioBuffer->SetCurrentMicLevel(currentMicLevel);
- }
- }
+ int32_t captureDelayUs = AtomicGet32(&_captureDelayUs);
+ int32_t renderDelayUs = AtomicGet32(&_renderDelayUs);
- _ptrAudioBuffer->SetVQEData(msecOnPlaySide, msecOnRecordSide, 0);
-
- _ptrAudioBuffer->SetTypingStatus(KeyPressed());
-
- // deliver recorded samples at specified sample rate, mic level etc.
- // to the observer using callback
- _ptrAudioBuffer->DeliverRecordedData();
-
- if (AGC())
- {
- newMicLevel = _ptrAudioBuffer->NewMicLevel();
- if (newMicLevel != 0)
- {
- // The VQE will only deliver non-zero microphone levels when
- // a change is needed.
- // Set this new mic level (received from the observer as return
- // value in the callback).
- WEBRTC_TRACE(kTraceStream, kTraceAudioDevice,
- _id, " AGC change of volume: old=%u => new=%u",
- currentMicLevel, newMicLevel);
- if (SetMicrophoneVolume(newMicLevel) == -1)
- {
- WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
- " the required modification of the microphone "
- "volume failed");
- }
- }
+ msecOnPlaySide =
+ static_cast<int32_t>(1e-3 * (renderDelayUs + _renderLatencyUs) + 0.5);
+ msecOnRecordSide =
+ static_cast<int32_t>(1e-3 * (captureDelayUs + _captureLatencyUs) + 0.5);
+
+ if (!_ptrAudioBuffer) {
+ WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+ " capture AudioBuffer is invalid");
+ return false;
+ }
+
+ // store the recorded buffer (no action will be taken if the
+ // #recorded samples is not a full buffer)
+ _ptrAudioBuffer->SetRecordedBuffer((int8_t*)&recordBuffer, (uint32_t)size);
+
+ if (AGC()) {
+ // Use mod to ensure we check the volume on the first pass.
+ if (get_mic_volume_counter_ms_ % kGetMicVolumeIntervalMs == 0) {
+ get_mic_volume_counter_ms_ = 0;
+ // store current mic level in the audio buffer if AGC is enabled
+ if (MicrophoneVolume(currentMicLevel) == 0) {
+ // this call does not affect the actual microphone volume
+ _ptrAudioBuffer->SetCurrentMicLevel(currentMicLevel);
}
+ }
+ get_mic_volume_counter_ms_ += kBufferSizeMs;
+ }
+
+ _ptrAudioBuffer->SetVQEData(msecOnPlaySide, msecOnRecordSide, 0);
+
+ _ptrAudioBuffer->SetTypingStatus(KeyPressed());
+
+ // deliver recorded samples at specified sample rate, mic level etc.
+ // to the observer using callback
+ _ptrAudioBuffer->DeliverRecordedData();
+
+ if (AGC()) {
+ newMicLevel = _ptrAudioBuffer->NewMicLevel();
+ if (newMicLevel != 0) {
+ // The VQE will only deliver non-zero microphone levels when
+ // a change is needed.
+ // Set this new mic level (received from the observer as return
+ // value in the callback).
+ WEBRTC_TRACE(kTraceStream, kTraceAudioDevice, _id,
+ " AGC change of volume: old=%u => new=%u",
+ currentMicLevel, newMicLevel);
+ if (SetMicrophoneVolume(newMicLevel) == -1) {
+ WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+ " the required modification of the microphone "
+ "volume failed");
+ }
+ }
}
+ }
- return true;
+ return true;
}
bool AudioDeviceMac::KeyPressed() {
bool key_down = false;
// Loop through all Mac virtual key constant values.
- for (unsigned int key_index = 0;
- key_index < arraysize(prev_key_state_);
- ++key_index) {
- bool keyState = CGEventSourceKeyState(
- kCGEventSourceStateHIDSystemState,
- key_index);
+ for (unsigned int key_index = 0; key_index < arraysize(prev_key_state_);
+ ++key_index) {
+ bool keyState =
+ CGEventSourceKeyState(kCGEventSourceStateHIDSystemState, key_index);
// A false -> true change in keymap means a key is pressed.
key_down |= (keyState && !prev_key_state_[key_index]);
// Save current state.
diff --git a/webrtc/modules/audio_device/mac/audio_device_mac.h b/webrtc/modules/audio_device/mac/audio_device_mac.h
index f2b66b4d06..ca3a51997d 100644
--- a/webrtc/modules/audio_device/mac/audio_device_mac.h
+++ b/webrtc/modules/audio_device/mac/audio_device_mac.h
@@ -23,350 +23,359 @@
struct PaUtilRingBuffer;
-namespace webrtc
-{
+namespace rtc {
+class PlatformThread;
+} // namespace rtc
+
+namespace webrtc {
class EventWrapper;
-class ThreadWrapper;
const uint32_t N_REC_SAMPLES_PER_SEC = 48000;
const uint32_t N_PLAY_SAMPLES_PER_SEC = 48000;
-const uint32_t N_REC_CHANNELS = 1; // default is mono recording
-const uint32_t N_PLAY_CHANNELS = 2; // default is stereo playout
+const uint32_t N_REC_CHANNELS = 1; // default is mono recording
+const uint32_t N_PLAY_CHANNELS = 2; // default is stereo playout
const uint32_t N_DEVICE_CHANNELS = 64;
-const uint32_t ENGINE_REC_BUF_SIZE_IN_SAMPLES = (N_REC_SAMPLES_PER_SEC / 100);
-const uint32_t ENGINE_PLAY_BUF_SIZE_IN_SAMPLES = (N_PLAY_SAMPLES_PER_SEC / 100);
+const int kBufferSizeMs = 10;
+
+const uint32_t ENGINE_REC_BUF_SIZE_IN_SAMPLES =
+ N_REC_SAMPLES_PER_SEC * kBufferSizeMs / 1000;
+const uint32_t ENGINE_PLAY_BUF_SIZE_IN_SAMPLES =
+ N_PLAY_SAMPLES_PER_SEC * kBufferSizeMs / 1000;
const int N_BLOCKS_IO = 2;
-const int N_BUFFERS_IN = 2; // Must be at least N_BLOCKS_IO.
+const int N_BUFFERS_IN = 2; // Must be at least N_BLOCKS_IO.
const int N_BUFFERS_OUT = 3; // Must be at least N_BLOCKS_IO.
-const uint32_t TIMER_PERIOD_MS = (2 * 10 * N_BLOCKS_IO * 1000000);
+const uint32_t TIMER_PERIOD_MS = 2 * 10 * N_BLOCKS_IO * 1000000;
const uint32_t REC_BUF_SIZE_IN_SAMPLES =
ENGINE_REC_BUF_SIZE_IN_SAMPLES * N_DEVICE_CHANNELS * N_BUFFERS_IN;
const uint32_t PLAY_BUF_SIZE_IN_SAMPLES =
ENGINE_PLAY_BUF_SIZE_IN_SAMPLES * N_PLAY_CHANNELS * N_BUFFERS_OUT;
-class AudioDeviceMac: public AudioDeviceGeneric
-{
-public:
- AudioDeviceMac(const int32_t id);
- ~AudioDeviceMac();
-
- // Retrieve the currently utilized audio layer
- virtual int32_t
- ActiveAudioLayer(AudioDeviceModule::AudioLayer& audioLayer) const;
-
- // Main initializaton and termination
- virtual int32_t Init();
- virtual int32_t Terminate();
- virtual bool Initialized() const;
-
- // Device enumeration
- virtual int16_t PlayoutDevices();
- virtual int16_t RecordingDevices();
- virtual int32_t PlayoutDeviceName(
- uint16_t index,
- char name[kAdmMaxDeviceNameSize],
- char guid[kAdmMaxGuidSize]);
- virtual int32_t RecordingDeviceName(
- uint16_t index,
- char name[kAdmMaxDeviceNameSize],
- char guid[kAdmMaxGuidSize]);
-
- // Device selection
- virtual int32_t SetPlayoutDevice(uint16_t index);
- virtual int32_t SetPlayoutDevice(
- AudioDeviceModule::WindowsDeviceType device);
- virtual int32_t SetRecordingDevice(uint16_t index);
- virtual int32_t SetRecordingDevice(
- AudioDeviceModule::WindowsDeviceType device);
-
- // Audio transport initialization
- virtual int32_t PlayoutIsAvailable(bool& available);
- virtual int32_t InitPlayout();
- virtual bool PlayoutIsInitialized() const;
- virtual int32_t RecordingIsAvailable(bool& available);
- virtual int32_t InitRecording();
- virtual bool RecordingIsInitialized() const;
-
- // Audio transport control
- virtual int32_t StartPlayout();
- virtual int32_t StopPlayout();
- virtual bool Playing() const;
- virtual int32_t StartRecording();
- virtual int32_t StopRecording();
- virtual bool Recording() const;
-
- // Microphone Automatic Gain Control (AGC)
- virtual int32_t SetAGC(bool enable);
- virtual bool AGC() const;
-
- // Volume control based on the Windows Wave API (Windows only)
- virtual int32_t SetWaveOutVolume(uint16_t volumeLeft, uint16_t volumeRight);
- virtual int32_t WaveOutVolume(uint16_t& volumeLeft,
- uint16_t& volumeRight) const;
-
- // Audio mixer initialization
- virtual int32_t InitSpeaker();
- virtual bool SpeakerIsInitialized() const;
- virtual int32_t InitMicrophone();
- virtual bool MicrophoneIsInitialized() const;
-
- // Speaker volume controls
- virtual int32_t SpeakerVolumeIsAvailable(bool& available);
- virtual int32_t SetSpeakerVolume(uint32_t volume);
- virtual int32_t SpeakerVolume(uint32_t& volume) const;
- virtual int32_t MaxSpeakerVolume(uint32_t& maxVolume) const;
- virtual int32_t MinSpeakerVolume(uint32_t& minVolume) const;
- virtual int32_t SpeakerVolumeStepSize(uint16_t& stepSize) const;
-
- // Microphone volume controls
- virtual int32_t MicrophoneVolumeIsAvailable(bool& available);
- virtual int32_t SetMicrophoneVolume(uint32_t volume);
- virtual int32_t MicrophoneVolume(uint32_t& volume) const;
- virtual int32_t MaxMicrophoneVolume(uint32_t& maxVolume) const;
- virtual int32_t MinMicrophoneVolume(uint32_t& minVolume) const;
- virtual int32_t
- MicrophoneVolumeStepSize(uint16_t& stepSize) const;
-
- // Microphone mute control
- virtual int32_t MicrophoneMuteIsAvailable(bool& available);
- virtual int32_t SetMicrophoneMute(bool enable);
- virtual int32_t MicrophoneMute(bool& enabled) const;
-
- // Speaker mute control
- virtual int32_t SpeakerMuteIsAvailable(bool& available);
- virtual int32_t SetSpeakerMute(bool enable);
- virtual int32_t SpeakerMute(bool& enabled) const;
-
- // Microphone boost control
- virtual int32_t MicrophoneBoostIsAvailable(bool& available);
- virtual int32_t SetMicrophoneBoost(bool enable);
- virtual int32_t MicrophoneBoost(bool& enabled) const;
-
- // Stereo support
- virtual int32_t StereoPlayoutIsAvailable(bool& available);
- virtual int32_t SetStereoPlayout(bool enable);
- virtual int32_t StereoPlayout(bool& enabled) const;
- virtual int32_t StereoRecordingIsAvailable(bool& available);
- virtual int32_t SetStereoRecording(bool enable);
- virtual int32_t StereoRecording(bool& enabled) const;
-
- // Delay information and control
- virtual int32_t
- SetPlayoutBuffer(const AudioDeviceModule::BufferType type,
- uint16_t sizeMS);
- virtual int32_t PlayoutBuffer(AudioDeviceModule::BufferType& type,
- uint16_t& sizeMS) const;
- virtual int32_t PlayoutDelay(uint16_t& delayMS) const;
- virtual int32_t RecordingDelay(uint16_t& delayMS) const;
-
- // CPU load
- virtual int32_t CPULoad(uint16_t& load) const;
-
- virtual bool PlayoutWarning() const;
- virtual bool PlayoutError() const;
- virtual bool RecordingWarning() const;
- virtual bool RecordingError() const;
- virtual void ClearPlayoutWarning();
- virtual void ClearPlayoutError();
- virtual void ClearRecordingWarning();
- virtual void ClearRecordingError();
-
- virtual void AttachAudioBuffer(AudioDeviceBuffer* audioBuffer);
-
-private:
- virtual int32_t MicrophoneIsAvailable(bool& available);
- virtual int32_t SpeakerIsAvailable(bool& available);
-
- static void AtomicSet32(int32_t* theValue, int32_t newValue);
- static int32_t AtomicGet32(int32_t* theValue);
-
- static void logCAMsg(const TraceLevel level,
- const TraceModule module,
- const int32_t id, const char *msg,
- const char *err);
-
- int32_t GetNumberDevices(const AudioObjectPropertyScope scope,
- AudioDeviceID scopedDeviceIds[],
- const uint32_t deviceListLength);
-
- int32_t GetDeviceName(const AudioObjectPropertyScope scope,
- const uint16_t index, char* name);
-
- int32_t InitDevice(uint16_t userDeviceIndex,
- AudioDeviceID& deviceId, bool isInput);
-
- // Always work with our preferred playout format inside VoE.
- // Then convert the output to the OS setting using an AudioConverter.
- OSStatus SetDesiredPlayoutFormat();
-
- static OSStatus
- objectListenerProc(AudioObjectID objectId, UInt32 numberAddresses,
- const AudioObjectPropertyAddress addresses[],
- void* clientData);
-
- OSStatus
- implObjectListenerProc(AudioObjectID objectId, UInt32 numberAddresses,
- const AudioObjectPropertyAddress addresses[]);
-
- int32_t HandleDeviceChange();
-
- int32_t
- HandleStreamFormatChange(AudioObjectID objectId,
+const int kGetMicVolumeIntervalMs = 1000;
+
+class AudioDeviceMac : public AudioDeviceGeneric {
+ public:
+ AudioDeviceMac(const int32_t id);
+ ~AudioDeviceMac();
+
+ // Retrieve the currently utilized audio layer
+ virtual int32_t ActiveAudioLayer(
+ AudioDeviceModule::AudioLayer& audioLayer) const;
+
+ // Main initializaton and termination
+ virtual int32_t Init();
+ virtual int32_t Terminate();
+ virtual bool Initialized() const;
+
+ // Device enumeration
+ virtual int16_t PlayoutDevices();
+ virtual int16_t RecordingDevices();
+ virtual int32_t PlayoutDeviceName(uint16_t index,
+ char name[kAdmMaxDeviceNameSize],
+ char guid[kAdmMaxGuidSize]);
+ virtual int32_t RecordingDeviceName(uint16_t index,
+ char name[kAdmMaxDeviceNameSize],
+ char guid[kAdmMaxGuidSize]);
+
+ // Device selection
+ virtual int32_t SetPlayoutDevice(uint16_t index);
+ virtual int32_t SetPlayoutDevice(AudioDeviceModule::WindowsDeviceType device);
+ virtual int32_t SetRecordingDevice(uint16_t index);
+ virtual int32_t SetRecordingDevice(
+ AudioDeviceModule::WindowsDeviceType device);
+
+ // Audio transport initialization
+ virtual int32_t PlayoutIsAvailable(bool& available);
+ virtual int32_t InitPlayout();
+ virtual bool PlayoutIsInitialized() const;
+ virtual int32_t RecordingIsAvailable(bool& available);
+ virtual int32_t InitRecording();
+ virtual bool RecordingIsInitialized() const;
+
+ // Audio transport control
+ virtual int32_t StartPlayout();
+ virtual int32_t StopPlayout();
+ virtual bool Playing() const;
+ virtual int32_t StartRecording();
+ virtual int32_t StopRecording();
+ virtual bool Recording() const;
+
+ // Microphone Automatic Gain Control (AGC)
+ virtual int32_t SetAGC(bool enable);
+ virtual bool AGC() const;
+
+ // Volume control based on the Windows Wave API (Windows only)
+ virtual int32_t SetWaveOutVolume(uint16_t volumeLeft, uint16_t volumeRight);
+ virtual int32_t WaveOutVolume(uint16_t& volumeLeft,
+ uint16_t& volumeRight) const;
+
+ // Audio mixer initialization
+ virtual int32_t InitSpeaker();
+ virtual bool SpeakerIsInitialized() const;
+ virtual int32_t InitMicrophone();
+ virtual bool MicrophoneIsInitialized() const;
+
+ // Speaker volume controls
+ virtual int32_t SpeakerVolumeIsAvailable(bool& available);
+ virtual int32_t SetSpeakerVolume(uint32_t volume);
+ virtual int32_t SpeakerVolume(uint32_t& volume) const;
+ virtual int32_t MaxSpeakerVolume(uint32_t& maxVolume) const;
+ virtual int32_t MinSpeakerVolume(uint32_t& minVolume) const;
+ virtual int32_t SpeakerVolumeStepSize(uint16_t& stepSize) const;
+
+ // Microphone volume controls
+ virtual int32_t MicrophoneVolumeIsAvailable(bool& available);
+ virtual int32_t SetMicrophoneVolume(uint32_t volume);
+ virtual int32_t MicrophoneVolume(uint32_t& volume) const;
+ virtual int32_t MaxMicrophoneVolume(uint32_t& maxVolume) const;
+ virtual int32_t MinMicrophoneVolume(uint32_t& minVolume) const;
+ virtual int32_t MicrophoneVolumeStepSize(uint16_t& stepSize) const;
+
+ // Microphone mute control
+ virtual int32_t MicrophoneMuteIsAvailable(bool& available);
+ virtual int32_t SetMicrophoneMute(bool enable);
+ virtual int32_t MicrophoneMute(bool& enabled) const;
+
+ // Speaker mute control
+ virtual int32_t SpeakerMuteIsAvailable(bool& available);
+ virtual int32_t SetSpeakerMute(bool enable);
+ virtual int32_t SpeakerMute(bool& enabled) const;
+
+ // Microphone boost control
+ virtual int32_t MicrophoneBoostIsAvailable(bool& available);
+ virtual int32_t SetMicrophoneBoost(bool enable);
+ virtual int32_t MicrophoneBoost(bool& enabled) const;
+
+ // Stereo support
+ virtual int32_t StereoPlayoutIsAvailable(bool& available);
+ virtual int32_t SetStereoPlayout(bool enable);
+ virtual int32_t StereoPlayout(bool& enabled) const;
+ virtual int32_t StereoRecordingIsAvailable(bool& available);
+ virtual int32_t SetStereoRecording(bool enable);
+ virtual int32_t StereoRecording(bool& enabled) const;
+
+ // Delay information and control
+ virtual int32_t SetPlayoutBuffer(const AudioDeviceModule::BufferType type,
+ uint16_t sizeMS);
+ virtual int32_t PlayoutBuffer(AudioDeviceModule::BufferType& type,
+ uint16_t& sizeMS) const;
+ virtual int32_t PlayoutDelay(uint16_t& delayMS) const;
+ virtual int32_t RecordingDelay(uint16_t& delayMS) const;
+
+ // CPU load
+ virtual int32_t CPULoad(uint16_t& load) const;
+
+ virtual bool PlayoutWarning() const;
+ virtual bool PlayoutError() const;
+ virtual bool RecordingWarning() const;
+ virtual bool RecordingError() const;
+ virtual void ClearPlayoutWarning();
+ virtual void ClearPlayoutError();
+ virtual void ClearRecordingWarning();
+ virtual void ClearRecordingError();
+
+ virtual void AttachAudioBuffer(AudioDeviceBuffer* audioBuffer);
+
+ private:
+ virtual int32_t MicrophoneIsAvailable(bool& available);
+ virtual int32_t SpeakerIsAvailable(bool& available);
+
+ static void AtomicSet32(int32_t* theValue, int32_t newValue);
+ static int32_t AtomicGet32(int32_t* theValue);
+
+ static void logCAMsg(const TraceLevel level,
+ const TraceModule module,
+ const int32_t id,
+ const char* msg,
+ const char* err);
+
+ int32_t GetNumberDevices(const AudioObjectPropertyScope scope,
+ AudioDeviceID scopedDeviceIds[],
+ const uint32_t deviceListLength);
+
+ int32_t GetDeviceName(const AudioObjectPropertyScope scope,
+ const uint16_t index,
+ char* name);
+
+ int32_t InitDevice(uint16_t userDeviceIndex,
+ AudioDeviceID& deviceId,
+ bool isInput);
+
+ // Always work with our preferred playout format inside VoE.
+ // Then convert the output to the OS setting using an AudioConverter.
+ OSStatus SetDesiredPlayoutFormat();
+
+ static OSStatus objectListenerProc(
+ AudioObjectID objectId,
+ UInt32 numberAddresses,
+ const AudioObjectPropertyAddress addresses[],
+ void* clientData);
+
+ OSStatus implObjectListenerProc(AudioObjectID objectId,
+ UInt32 numberAddresses,
+ const AudioObjectPropertyAddress addresses[]);
+
+ int32_t HandleDeviceChange();
+
+ int32_t HandleStreamFormatChange(AudioObjectID objectId,
+ AudioObjectPropertyAddress propertyAddress);
+
+ int32_t HandleDataSourceChange(AudioObjectID objectId,
AudioObjectPropertyAddress propertyAddress);
- int32_t
- HandleDataSourceChange(AudioObjectID objectId,
- AudioObjectPropertyAddress propertyAddress);
-
- int32_t
- HandleProcessorOverload(AudioObjectPropertyAddress propertyAddress);
-
- static OSStatus deviceIOProc(AudioDeviceID device,
- const AudioTimeStamp *now,
- const AudioBufferList *inputData,
- const AudioTimeStamp *inputTime,
- AudioBufferList *outputData,
+ int32_t HandleProcessorOverload(AudioObjectPropertyAddress propertyAddress);
+
+ static OSStatus deviceIOProc(AudioDeviceID device,
+ const AudioTimeStamp* now,
+ const AudioBufferList* inputData,
+ const AudioTimeStamp* inputTime,
+ AudioBufferList* outputData,
+ const AudioTimeStamp* outputTime,
+ void* clientData);
+
+ static OSStatus outConverterProc(
+ AudioConverterRef audioConverter,
+ UInt32* numberDataPackets,
+ AudioBufferList* data,
+ AudioStreamPacketDescription** dataPacketDescription,
+ void* userData);
+
+ static OSStatus inDeviceIOProc(AudioDeviceID device,
+ const AudioTimeStamp* now,
+ const AudioBufferList* inputData,
+ const AudioTimeStamp* inputTime,
+ AudioBufferList* outputData,
const AudioTimeStamp* outputTime,
- void *clientData);
+ void* clientData);
- static OSStatus
- outConverterProc(AudioConverterRef audioConverter,
- UInt32 *numberDataPackets, AudioBufferList *data,
- AudioStreamPacketDescription **dataPacketDescription,
- void *userData);
+ static OSStatus inConverterProc(
+ AudioConverterRef audioConverter,
+ UInt32* numberDataPackets,
+ AudioBufferList* data,
+ AudioStreamPacketDescription** dataPacketDescription,
+ void* inUserData);
- static OSStatus inDeviceIOProc(AudioDeviceID device,
- const AudioTimeStamp *now,
- const AudioBufferList *inputData,
- const AudioTimeStamp *inputTime,
- AudioBufferList *outputData,
- const AudioTimeStamp *outputTime,
- void *clientData);
+ OSStatus implDeviceIOProc(const AudioBufferList* inputData,
+ const AudioTimeStamp* inputTime,
+ AudioBufferList* outputData,
+ const AudioTimeStamp* outputTime);
- static OSStatus
- inConverterProc(AudioConverterRef audioConverter,
- UInt32 *numberDataPackets, AudioBufferList *data,
- AudioStreamPacketDescription **dataPacketDescription,
- void *inUserData);
+ OSStatus implOutConverterProc(UInt32* numberDataPackets,
+ AudioBufferList* data);
- OSStatus implDeviceIOProc(const AudioBufferList *inputData,
- const AudioTimeStamp *inputTime,
- AudioBufferList *outputData,
- const AudioTimeStamp *outputTime);
+ OSStatus implInDeviceIOProc(const AudioBufferList* inputData,
+ const AudioTimeStamp* inputTime);
- OSStatus implOutConverterProc(UInt32 *numberDataPackets,
- AudioBufferList *data);
+ OSStatus implInConverterProc(UInt32* numberDataPackets,
+ AudioBufferList* data);
- OSStatus implInDeviceIOProc(const AudioBufferList *inputData,
- const AudioTimeStamp *inputTime);
+ static bool RunCapture(void*);
+ static bool RunRender(void*);
+ bool CaptureWorkerThread();
+ bool RenderWorkerThread();
- OSStatus implInConverterProc(UInt32 *numberDataPackets,
- AudioBufferList *data);
+ bool KeyPressed();
- static bool RunCapture(void*);
- static bool RunRender(void*);
- bool CaptureWorkerThread();
- bool RenderWorkerThread();
+ AudioDeviceBuffer* _ptrAudioBuffer;
- bool KeyPressed();
+ CriticalSectionWrapper& _critSect;
- AudioDeviceBuffer* _ptrAudioBuffer;
+ EventWrapper& _stopEventRec;
+ EventWrapper& _stopEvent;
- CriticalSectionWrapper& _critSect;
+ // TODO(pbos): Replace with direct members, just start/stop, no need to
+ // recreate the thread.
+ // Only valid/running between calls to StartRecording and StopRecording.
+ rtc::scoped_ptr<rtc::PlatformThread> capture_worker_thread_;
- EventWrapper& _stopEventRec;
- EventWrapper& _stopEvent;
+ // Only valid/running between calls to StartPlayout and StopPlayout.
+ rtc::scoped_ptr<rtc::PlatformThread> render_worker_thread_;
- // Only valid/running between calls to StartRecording and StopRecording.
- rtc::scoped_ptr<ThreadWrapper> capture_worker_thread_;
+ int32_t _id;
- // Only valid/running between calls to StartPlayout and StopPlayout.
- rtc::scoped_ptr<ThreadWrapper> render_worker_thread_;
+ AudioMixerManagerMac _mixerManager;
- int32_t _id;
-
- AudioMixerManagerMac _mixerManager;
-
- uint16_t _inputDeviceIndex;
- uint16_t _outputDeviceIndex;
- AudioDeviceID _inputDeviceID;
- AudioDeviceID _outputDeviceID;
+ uint16_t _inputDeviceIndex;
+ uint16_t _outputDeviceIndex;
+ AudioDeviceID _inputDeviceID;
+ AudioDeviceID _outputDeviceID;
#if __MAC_OS_X_VERSION_MAX_ALLOWED >= 1050
- AudioDeviceIOProcID _inDeviceIOProcID;
- AudioDeviceIOProcID _deviceIOProcID;
+ AudioDeviceIOProcID _inDeviceIOProcID;
+ AudioDeviceIOProcID _deviceIOProcID;
#endif
- bool _inputDeviceIsSpecified;
- bool _outputDeviceIsSpecified;
+ bool _inputDeviceIsSpecified;
+ bool _outputDeviceIsSpecified;
+
+ uint8_t _recChannels;
+ uint8_t _playChannels;
- uint8_t _recChannels;
- uint8_t _playChannels;
+ Float32* _captureBufData;
+ SInt16* _renderBufData;
- Float32* _captureBufData;
- SInt16* _renderBufData;
+ SInt16 _renderConvertData[PLAY_BUF_SIZE_IN_SAMPLES];
- SInt16 _renderConvertData[PLAY_BUF_SIZE_IN_SAMPLES];
+ AudioDeviceModule::BufferType _playBufType;
- AudioDeviceModule::BufferType _playBufType;
+ bool _initialized;
+ bool _isShutDown;
+ bool _recording;
+ bool _playing;
+ bool _recIsInitialized;
+ bool _playIsInitialized;
+ bool _AGC;
- bool _initialized;
- bool _isShutDown;
- bool _recording;
- bool _playing;
- bool _recIsInitialized;
- bool _playIsInitialized;
- bool _AGC;
+ // Atomically set varaibles
+ int32_t _renderDeviceIsAlive;
+ int32_t _captureDeviceIsAlive;
- // Atomically set varaibles
- int32_t _renderDeviceIsAlive;
- int32_t _captureDeviceIsAlive;
+ bool _twoDevices;
+ bool _doStop; // For play if not shared device or play+rec if shared device
+ bool _doStopRec; // For rec if not shared device
+ bool _macBookPro;
+ bool _macBookProPanRight;
- bool _twoDevices;
- bool _doStop; // For play if not shared device or play+rec if shared device
- bool _doStopRec; // For rec if not shared device
- bool _macBookPro;
- bool _macBookProPanRight;
+ AudioConverterRef _captureConverter;
+ AudioConverterRef _renderConverter;
- AudioConverterRef _captureConverter;
- AudioConverterRef _renderConverter;
+ AudioStreamBasicDescription _outStreamFormat;
+ AudioStreamBasicDescription _outDesiredFormat;
+ AudioStreamBasicDescription _inStreamFormat;
+ AudioStreamBasicDescription _inDesiredFormat;
- AudioStreamBasicDescription _outStreamFormat;
- AudioStreamBasicDescription _outDesiredFormat;
- AudioStreamBasicDescription _inStreamFormat;
- AudioStreamBasicDescription _inDesiredFormat;
+ uint32_t _captureLatencyUs;
+ uint32_t _renderLatencyUs;
- uint32_t _captureLatencyUs;
- uint32_t _renderLatencyUs;
+ // Atomically set variables
+ mutable int32_t _captureDelayUs;
+ mutable int32_t _renderDelayUs;
- // Atomically set variables
- mutable int32_t _captureDelayUs;
- mutable int32_t _renderDelayUs;
+ int32_t _renderDelayOffsetSamples;
- int32_t _renderDelayOffsetSamples;
+ uint16_t _playBufDelayFixed; // fixed playback delay
- uint16_t _playBufDelayFixed; // fixed playback delay
+ uint16_t _playWarning;
+ uint16_t _playError;
+ uint16_t _recWarning;
+ uint16_t _recError;
- uint16_t _playWarning;
- uint16_t _playError;
- uint16_t _recWarning;
- uint16_t _recError;
+ PaUtilRingBuffer* _paCaptureBuffer;
+ PaUtilRingBuffer* _paRenderBuffer;
- PaUtilRingBuffer* _paCaptureBuffer;
- PaUtilRingBuffer* _paRenderBuffer;
+ semaphore_t _renderSemaphore;
+ semaphore_t _captureSemaphore;
- semaphore_t _renderSemaphore;
- semaphore_t _captureSemaphore;
+ int _captureBufSizeSamples;
+ int _renderBufSizeSamples;
- int _captureBufSizeSamples;
- int _renderBufSizeSamples;
+ // Typing detection
+ // 0x5c is key "9", after that comes function keys.
+ bool prev_key_state_[0x5d];
- // Typing detection
- // 0x5c is key "9", after that comes function keys.
- bool prev_key_state_[0x5d];
+ int get_mic_volume_counter_ms_;
};
} // namespace webrtc
diff --git a/webrtc/modules/audio_device/mac/audio_mixer_manager_mac.cc b/webrtc/modules/audio_device/mac/audio_mixer_manager_mac.cc
index 9016ffe508..e7e0754695 100644
--- a/webrtc/modules/audio_device/mac/audio_mixer_manager_mac.cc
+++ b/webrtc/modules/audio_device/mac/audio_mixer_manager_mac.cc
@@ -11,1134 +11,989 @@
#include "webrtc/modules/audio_device/mac/audio_mixer_manager_mac.h"
#include "webrtc/system_wrappers/include/trace.h"
-#include <unistd.h> // getpid()
+#include <unistd.h> // getpid()
namespace webrtc {
-
-#define WEBRTC_CA_RETURN_ON_ERR(expr) \
- do { \
- err = expr; \
- if (err != noErr) { \
- logCAMsg(kTraceError, kTraceAudioDevice, _id, \
- "Error in " #expr, (const char *)&err); \
- return -1; \
- } \
- } while(0)
-
-#define WEBRTC_CA_LOG_ERR(expr) \
- do { \
- err = expr; \
- if (err != noErr) { \
- logCAMsg(kTraceError, kTraceAudioDevice, _id, \
- "Error in " #expr, (const char *)&err); \
- } \
- } while(0)
-
-#define WEBRTC_CA_LOG_WARN(expr) \
- do { \
- err = expr; \
- if (err != noErr) { \
- logCAMsg(kTraceWarning, kTraceAudioDevice, _id, \
- "Error in " #expr, (const char *)&err); \
- } \
- } while(0)
-
-AudioMixerManagerMac::AudioMixerManagerMac(const int32_t id) :
- _critSect(*CriticalSectionWrapper::CreateCriticalSection()),
- _id(id),
- _inputDeviceID(kAudioObjectUnknown),
- _outputDeviceID(kAudioObjectUnknown),
- _noInputChannels(0),
- _noOutputChannels(0)
-{
- WEBRTC_TRACE(kTraceMemory, kTraceAudioDevice, _id,
- "%s constructed", __FUNCTION__);
-}
-
-AudioMixerManagerMac::~AudioMixerManagerMac()
-{
- WEBRTC_TRACE(kTraceMemory, kTraceAudioDevice, _id,
- "%s destructed", __FUNCTION__);
- Close();
-
- delete &_critSect;
+#define WEBRTC_CA_RETURN_ON_ERR(expr) \
+ do { \
+ err = expr; \
+ if (err != noErr) { \
+ logCAMsg(kTraceError, kTraceAudioDevice, _id, "Error in " #expr, \
+ (const char*) & err); \
+ return -1; \
+ } \
+ } while (0)
+
+#define WEBRTC_CA_LOG_ERR(expr) \
+ do { \
+ err = expr; \
+ if (err != noErr) { \
+ logCAMsg(kTraceError, kTraceAudioDevice, _id, "Error in " #expr, \
+ (const char*) & err); \
+ } \
+ } while (0)
+
+#define WEBRTC_CA_LOG_WARN(expr) \
+ do { \
+ err = expr; \
+ if (err != noErr) { \
+ logCAMsg(kTraceWarning, kTraceAudioDevice, _id, "Error in " #expr, \
+ (const char*) & err); \
+ } \
+ } while (0)
+
+AudioMixerManagerMac::AudioMixerManagerMac(const int32_t id)
+ : _critSect(*CriticalSectionWrapper::CreateCriticalSection()),
+ _id(id),
+ _inputDeviceID(kAudioObjectUnknown),
+ _outputDeviceID(kAudioObjectUnknown),
+ _noInputChannels(0),
+ _noOutputChannels(0) {
+ WEBRTC_TRACE(kTraceMemory, kTraceAudioDevice, _id, "%s constructed",
+ __FUNCTION__);
+}
+
+AudioMixerManagerMac::~AudioMixerManagerMac() {
+ WEBRTC_TRACE(kTraceMemory, kTraceAudioDevice, _id, "%s destructed",
+ __FUNCTION__);
+
+ Close();
+
+ delete &_critSect;
}
// ============================================================================
// PUBLIC METHODS
// ============================================================================
-int32_t AudioMixerManagerMac::Close()
-{
- WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "%s",
- __FUNCTION__);
+int32_t AudioMixerManagerMac::Close() {
+ WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "%s", __FUNCTION__);
- CriticalSectionScoped lock(&_critSect);
+ CriticalSectionScoped lock(&_critSect);
- CloseSpeaker();
- CloseMicrophone();
-
- return 0;
+ CloseSpeaker();
+ CloseMicrophone();
+ return 0;
}
-int32_t AudioMixerManagerMac::CloseSpeaker()
-{
- WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "%s",
- __FUNCTION__);
+int32_t AudioMixerManagerMac::CloseSpeaker() {
+ WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "%s", __FUNCTION__);
- CriticalSectionScoped lock(&_critSect);
+ CriticalSectionScoped lock(&_critSect);
- _outputDeviceID = kAudioObjectUnknown;
- _noOutputChannels = 0;
+ _outputDeviceID = kAudioObjectUnknown;
+ _noOutputChannels = 0;
- return 0;
+ return 0;
}
-int32_t AudioMixerManagerMac::CloseMicrophone()
-{
- WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "%s",
- __FUNCTION__);
+int32_t AudioMixerManagerMac::CloseMicrophone() {
+ WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "%s", __FUNCTION__);
- CriticalSectionScoped lock(&_critSect);
+ CriticalSectionScoped lock(&_critSect);
- _inputDeviceID = kAudioObjectUnknown;
- _noInputChannels = 0;
+ _inputDeviceID = kAudioObjectUnknown;
+ _noInputChannels = 0;
- return 0;
+ return 0;
}
-int32_t AudioMixerManagerMac::OpenSpeaker(AudioDeviceID deviceID)
-{
- WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
- "AudioMixerManagerMac::OpenSpeaker(id=%d)", deviceID);
-
- CriticalSectionScoped lock(&_critSect);
-
- OSStatus err = noErr;
- UInt32 size = 0;
- pid_t hogPid = -1;
+int32_t AudioMixerManagerMac::OpenSpeaker(AudioDeviceID deviceID) {
+ WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+ "AudioMixerManagerMac::OpenSpeaker(id=%d)", deviceID);
- _outputDeviceID = deviceID;
+ CriticalSectionScoped lock(&_critSect);
- // Check which process, if any, has hogged the device.
- AudioObjectPropertyAddress propertyAddress = { kAudioDevicePropertyHogMode,
- kAudioDevicePropertyScopeOutput, 0 };
+ OSStatus err = noErr;
+ UInt32 size = 0;
+ pid_t hogPid = -1;
- size = sizeof(hogPid);
- WEBRTC_CA_RETURN_ON_ERR(AudioObjectGetPropertyData(_outputDeviceID,
- &propertyAddress, 0, NULL, &size, &hogPid));
-
- if (hogPid == -1)
- {
- WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
- " No process has hogged the input device");
- }
- // getpid() is apparently "always successful"
- else if (hogPid == getpid())
- {
- WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
- " Our process has hogged the input device");
- } else
- {
- WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
- " Another process (pid = %d) has hogged the input device",
- static_cast<int> (hogPid));
-
- return -1;
- }
+ _outputDeviceID = deviceID;
- // get number of channels from stream format
- propertyAddress.mSelector = kAudioDevicePropertyStreamFormat;
+ // Check which process, if any, has hogged the device.
+ AudioObjectPropertyAddress propertyAddress = {
+ kAudioDevicePropertyHogMode, kAudioDevicePropertyScopeOutput, 0};
- // Get the stream format, to be able to read the number of channels.
- AudioStreamBasicDescription streamFormat;
- size = sizeof(AudioStreamBasicDescription);
- memset(&streamFormat, 0, size);
- WEBRTC_CA_RETURN_ON_ERR(AudioObjectGetPropertyData(_outputDeviceID,
- &propertyAddress, 0, NULL, &size, &streamFormat));
+ size = sizeof(hogPid);
+ WEBRTC_CA_RETURN_ON_ERR(AudioObjectGetPropertyData(
+ _outputDeviceID, &propertyAddress, 0, NULL, &size, &hogPid));
- _noOutputChannels = streamFormat.mChannelsPerFrame;
-
- return 0;
-}
-
-int32_t AudioMixerManagerMac::OpenMicrophone(AudioDeviceID deviceID)
-{
+ if (hogPid == -1) {
WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
- "AudioMixerManagerMac::OpenMicrophone(id=%d)", deviceID);
-
- CriticalSectionScoped lock(&_critSect);
-
- OSStatus err = noErr;
- UInt32 size = 0;
- pid_t hogPid = -1;
-
- _inputDeviceID = deviceID;
-
- // Check which process, if any, has hogged the device.
- AudioObjectPropertyAddress propertyAddress = { kAudioDevicePropertyHogMode,
- kAudioDevicePropertyScopeInput, 0 };
- size = sizeof(hogPid);
- WEBRTC_CA_RETURN_ON_ERR(AudioObjectGetPropertyData(_inputDeviceID,
- &propertyAddress, 0, NULL, &size, &hogPid));
- if (hogPid == -1)
- {
- WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
- " No process has hogged the input device");
- }
- // getpid() is apparently "always successful"
- else if (hogPid == getpid())
- {
- WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
- " Our process has hogged the input device");
- } else
- {
- WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
- " Another process (pid = %d) has hogged the input device",
- static_cast<int> (hogPid));
-
- return -1;
- }
-
- // get number of channels from stream format
- propertyAddress.mSelector = kAudioDevicePropertyStreamFormat;
-
- // Get the stream format, to be able to read the number of channels.
- AudioStreamBasicDescription streamFormat;
- size = sizeof(AudioStreamBasicDescription);
- memset(&streamFormat, 0, size);
- WEBRTC_CA_RETURN_ON_ERR(AudioObjectGetPropertyData(_inputDeviceID,
- &propertyAddress, 0, NULL, &size, &streamFormat));
-
- _noInputChannels = streamFormat.mChannelsPerFrame;
+ " No process has hogged the input device");
+ }
+ // getpid() is apparently "always successful"
+ else if (hogPid == getpid()) {
+ WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+ " Our process has hogged the input device");
+ } else {
+ WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+ " Another process (pid = %d) has hogged the input device",
+ static_cast<int>(hogPid));
- return 0;
-}
+ return -1;
+ }
-bool AudioMixerManagerMac::SpeakerIsInitialized() const
-{
- WEBRTC_TRACE(kTraceMemory, kTraceAudioDevice, _id, "%s",
- __FUNCTION__);
+ // get number of channels from stream format
+ propertyAddress.mSelector = kAudioDevicePropertyStreamFormat;
- return (_outputDeviceID != kAudioObjectUnknown);
-}
+ // Get the stream format, to be able to read the number of channels.
+ AudioStreamBasicDescription streamFormat;
+ size = sizeof(AudioStreamBasicDescription);
+ memset(&streamFormat, 0, size);
+ WEBRTC_CA_RETURN_ON_ERR(AudioObjectGetPropertyData(
+ _outputDeviceID, &propertyAddress, 0, NULL, &size, &streamFormat));
-bool AudioMixerManagerMac::MicrophoneIsInitialized() const
-{
- WEBRTC_TRACE(kTraceMemory, kTraceAudioDevice, _id, "%s",
- __FUNCTION__);
+ _noOutputChannels = streamFormat.mChannelsPerFrame;
- return (_inputDeviceID != kAudioObjectUnknown);
+ return 0;
}
-int32_t AudioMixerManagerMac::SetSpeakerVolume(uint32_t volume)
-{
- WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
- "AudioMixerManagerMac::SetSpeakerVolume(volume=%u)", volume);
+int32_t AudioMixerManagerMac::OpenMicrophone(AudioDeviceID deviceID) {
+ WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+ "AudioMixerManagerMac::OpenMicrophone(id=%d)", deviceID);
- CriticalSectionScoped lock(&_critSect);
+ CriticalSectionScoped lock(&_critSect);
- if (_outputDeviceID == kAudioObjectUnknown)
- {
- WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
- " device ID has not been set");
- return -1;
- }
+ OSStatus err = noErr;
+ UInt32 size = 0;
+ pid_t hogPid = -1;
- OSStatus err = noErr;
- UInt32 size = 0;
- bool success = false;
+ _inputDeviceID = deviceID;
- // volume range is 0.0 - 1.0, convert from 0 -255
- const Float32 vol = (Float32)(volume / 255.0);
+ // Check which process, if any, has hogged the device.
+ AudioObjectPropertyAddress propertyAddress = {
+ kAudioDevicePropertyHogMode, kAudioDevicePropertyScopeInput, 0};
+ size = sizeof(hogPid);
+ WEBRTC_CA_RETURN_ON_ERR(AudioObjectGetPropertyData(
+ _inputDeviceID, &propertyAddress, 0, NULL, &size, &hogPid));
+ if (hogPid == -1) {
+ WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+ " No process has hogged the input device");
+ }
+ // getpid() is apparently "always successful"
+ else if (hogPid == getpid()) {
+ WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+ " Our process has hogged the input device");
+ } else {
+ WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+ " Another process (pid = %d) has hogged the input device",
+ static_cast<int>(hogPid));
- assert(vol <= 1.0 && vol >= 0.0);
+ return -1;
+ }
- // Does the capture device have a master volume control?
- // If so, use it exclusively.
- AudioObjectPropertyAddress propertyAddress = {
- kAudioDevicePropertyVolumeScalar, kAudioDevicePropertyScopeOutput,
- 0 };
- Boolean isSettable = false;
- err = AudioObjectIsPropertySettable(_outputDeviceID, &propertyAddress,
- &isSettable);
- if (err == noErr && isSettable)
- {
- size = sizeof(vol);
- WEBRTC_CA_RETURN_ON_ERR(AudioObjectSetPropertyData(_outputDeviceID,
- &propertyAddress, 0, NULL, size, &vol));
+ // get number of channels from stream format
+ propertyAddress.mSelector = kAudioDevicePropertyStreamFormat;
- return 0;
- }
-
- // Otherwise try to set each channel.
- for (UInt32 i = 1; i <= _noOutputChannels; i++)
- {
- propertyAddress.mElement = i;
- isSettable = false;
- err = AudioObjectIsPropertySettable(_outputDeviceID, &propertyAddress,
- &isSettable);
- if (err == noErr && isSettable)
- {
- size = sizeof(vol);
- WEBRTC_CA_RETURN_ON_ERR(AudioObjectSetPropertyData(_outputDeviceID,
- &propertyAddress, 0, NULL, size, &vol));
- }
- success = true;
- }
+ // Get the stream format, to be able to read the number of channels.
+ AudioStreamBasicDescription streamFormat;
+ size = sizeof(AudioStreamBasicDescription);
+ memset(&streamFormat, 0, size);
+ WEBRTC_CA_RETURN_ON_ERR(AudioObjectGetPropertyData(
+ _inputDeviceID, &propertyAddress, 0, NULL, &size, &streamFormat));
- if (!success)
- {
- WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
- " Unable to set a volume on any output channel");
- return -1;
- }
+ _noInputChannels = streamFormat.mChannelsPerFrame;
- return 0;
+ return 0;
}
-int32_t AudioMixerManagerMac::SpeakerVolume(uint32_t& volume) const
-{
+bool AudioMixerManagerMac::SpeakerIsInitialized() const {
+ WEBRTC_TRACE(kTraceMemory, kTraceAudioDevice, _id, "%s", __FUNCTION__);
- if (_outputDeviceID == kAudioObjectUnknown)
- {
- WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
- " device ID has not been set");
- return -1;
- }
-
- OSStatus err = noErr;
- UInt32 size = 0;
- unsigned int channels = 0;
- Float32 channelVol = 0;
- Float32 vol = 0;
-
- // Does the device have a master volume control?
- // If so, use it exclusively.
- AudioObjectPropertyAddress propertyAddress = {
- kAudioDevicePropertyVolumeScalar, kAudioDevicePropertyScopeOutput,
- 0 };
- Boolean hasProperty = AudioObjectHasProperty(_outputDeviceID,
- &propertyAddress);
- if (hasProperty)
- {
- size = sizeof(vol);
- WEBRTC_CA_RETURN_ON_ERR(AudioObjectGetPropertyData(_outputDeviceID,
- &propertyAddress, 0, NULL, &size, &vol));
-
- // vol 0.0 to 1.0 -> convert to 0 - 255
- volume = static_cast<uint32_t> (vol * 255 + 0.5);
- } else
- {
- // Otherwise get the average volume across channels.
- vol = 0;
- for (UInt32 i = 1; i <= _noOutputChannels; i++)
- {
- channelVol = 0;
- propertyAddress.mElement = i;
- hasProperty = AudioObjectHasProperty(_outputDeviceID,
- &propertyAddress);
- if (hasProperty)
- {
- size = sizeof(channelVol);
- WEBRTC_CA_RETURN_ON_ERR(AudioObjectGetPropertyData(_outputDeviceID,
- &propertyAddress, 0, NULL, &size, &channelVol));
-
- vol += channelVol;
- channels++;
- }
- }
-
- if (channels == 0)
- {
- WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
- " Unable to get a volume on any channel");
- return -1;
- }
-
- assert(channels > 0);
- // vol 0.0 to 1.0 -> convert to 0 - 255
- volume = static_cast<uint32_t> (255 * vol / channels + 0.5);
- }
-
- WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
- " AudioMixerManagerMac::SpeakerVolume() => vol=%i", vol);
-
- return 0;
+ return (_outputDeviceID != kAudioObjectUnknown);
}
-int32_t
-AudioMixerManagerMac::MaxSpeakerVolume(uint32_t& maxVolume) const
-{
+bool AudioMixerManagerMac::MicrophoneIsInitialized() const {
+ WEBRTC_TRACE(kTraceMemory, kTraceAudioDevice, _id, "%s", __FUNCTION__);
- if (_outputDeviceID == kAudioObjectUnknown)
- {
- WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
- " device ID has not been set");
- return -1;
- }
-
- // volume range is 0.0 to 1.0
- // we convert that to 0 - 255
- maxVolume = 255;
-
- return 0;
+ return (_inputDeviceID != kAudioObjectUnknown);
}
-int32_t
-AudioMixerManagerMac::MinSpeakerVolume(uint32_t& minVolume) const
-{
+int32_t AudioMixerManagerMac::SetSpeakerVolume(uint32_t volume) {
+ WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+ "AudioMixerManagerMac::SetSpeakerVolume(volume=%u)", volume);
- if (_outputDeviceID == kAudioObjectUnknown)
- {
- WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
- " device ID has not been set");
- return -1;
- }
+ CriticalSectionScoped lock(&_critSect);
- // volume range is 0.0 to 1.0
- // we convert that to 0 - 255
- minVolume = 0;
+ if (_outputDeviceID == kAudioObjectUnknown) {
+ WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+ " device ID has not been set");
+ return -1;
+ }
- return 0;
-}
+ OSStatus err = noErr;
+ UInt32 size = 0;
+ bool success = false;
-int32_t
-AudioMixerManagerMac::SpeakerVolumeStepSize(uint16_t& stepSize) const
-{
+ // volume range is 0.0 - 1.0, convert from 0 -255
+ const Float32 vol = (Float32)(volume / 255.0);
- if (_outputDeviceID == kAudioObjectUnknown)
- {
- WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
- " device ID has not been set");
- return -1;
- }
+ assert(vol <= 1.0 && vol >= 0.0);
- // volume range is 0.0 to 1.0
- // we convert that to 0 - 255
- stepSize = 1;
+ // Does the capture device have a master volume control?
+ // If so, use it exclusively.
+ AudioObjectPropertyAddress propertyAddress = {
+ kAudioDevicePropertyVolumeScalar, kAudioDevicePropertyScopeOutput, 0};
+ Boolean isSettable = false;
+ err = AudioObjectIsPropertySettable(_outputDeviceID, &propertyAddress,
+ &isSettable);
+ if (err == noErr && isSettable) {
+ size = sizeof(vol);
+ WEBRTC_CA_RETURN_ON_ERR(AudioObjectSetPropertyData(
+ _outputDeviceID, &propertyAddress, 0, NULL, size, &vol));
return 0;
-}
+ }
-int32_t AudioMixerManagerMac::SpeakerVolumeIsAvailable(bool& available)
-{
- if (_outputDeviceID == kAudioObjectUnknown)
- {
- WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
- " device ID has not been set");
- return -1;
- }
-
- OSStatus err = noErr;
-
- // Does the capture device have a master volume control?
- // If so, use it exclusively.
- AudioObjectPropertyAddress propertyAddress = {
- kAudioDevicePropertyVolumeScalar, kAudioDevicePropertyScopeOutput,
- 0 };
- Boolean isSettable = false;
+ // Otherwise try to set each channel.
+ for (UInt32 i = 1; i <= _noOutputChannels; i++) {
+ propertyAddress.mElement = i;
+ isSettable = false;
err = AudioObjectIsPropertySettable(_outputDeviceID, &propertyAddress,
&isSettable);
- if (err == noErr && isSettable)
- {
- available = true;
- return 0;
- }
-
- // Otherwise try to set each channel.
- for (UInt32 i = 1; i <= _noOutputChannels; i++)
- {
- propertyAddress.mElement = i;
- isSettable = false;
- err = AudioObjectIsPropertySettable(_outputDeviceID, &propertyAddress,
- &isSettable);
- if (err != noErr || !isSettable)
- {
- available = false;
- WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
- " Volume cannot be set for output channel %d, err=%d",
- i, err);
- return -1;
- }
- }
-
+ if (err == noErr && isSettable) {
+ size = sizeof(vol);
+ WEBRTC_CA_RETURN_ON_ERR(AudioObjectSetPropertyData(
+ _outputDeviceID, &propertyAddress, 0, NULL, size, &vol));
+ }
+ success = true;
+ }
+
+ if (!success) {
+ WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+ " Unable to set a volume on any output channel");
+ return -1;
+ }
+
+ return 0;
+}
+
+int32_t AudioMixerManagerMac::SpeakerVolume(uint32_t& volume) const {
+ if (_outputDeviceID == kAudioObjectUnknown) {
+ WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+ " device ID has not been set");
+ return -1;
+ }
+
+ OSStatus err = noErr;
+ UInt32 size = 0;
+ unsigned int channels = 0;
+ Float32 channelVol = 0;
+ Float32 vol = 0;
+
+ // Does the device have a master volume control?
+ // If so, use it exclusively.
+ AudioObjectPropertyAddress propertyAddress = {
+ kAudioDevicePropertyVolumeScalar, kAudioDevicePropertyScopeOutput, 0};
+ Boolean hasProperty =
+ AudioObjectHasProperty(_outputDeviceID, &propertyAddress);
+ if (hasProperty) {
+ size = sizeof(vol);
+ WEBRTC_CA_RETURN_ON_ERR(AudioObjectGetPropertyData(
+ _outputDeviceID, &propertyAddress, 0, NULL, &size, &vol));
+
+ // vol 0.0 to 1.0 -> convert to 0 - 255
+ volume = static_cast<uint32_t>(vol * 255 + 0.5);
+ } else {
+ // Otherwise get the average volume across channels.
+ vol = 0;
+ for (UInt32 i = 1; i <= _noOutputChannels; i++) {
+ channelVol = 0;
+ propertyAddress.mElement = i;
+ hasProperty = AudioObjectHasProperty(_outputDeviceID, &propertyAddress);
+ if (hasProperty) {
+ size = sizeof(channelVol);
+ WEBRTC_CA_RETURN_ON_ERR(AudioObjectGetPropertyData(
+ _outputDeviceID, &propertyAddress, 0, NULL, &size, &channelVol));
+
+ vol += channelVol;
+ channels++;
+ }
+ }
+
+ if (channels == 0) {
+ WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+ " Unable to get a volume on any channel");
+ return -1;
+ }
+
+ assert(channels > 0);
+ // vol 0.0 to 1.0 -> convert to 0 - 255
+ volume = static_cast<uint32_t>(255 * vol / channels + 0.5);
+ }
+
+ WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+ " AudioMixerManagerMac::SpeakerVolume() => vol=%i", vol);
+
+ return 0;
+}
+
+int32_t AudioMixerManagerMac::MaxSpeakerVolume(uint32_t& maxVolume) const {
+ if (_outputDeviceID == kAudioObjectUnknown) {
+ WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+ " device ID has not been set");
+ return -1;
+ }
+
+ // volume range is 0.0 to 1.0
+ // we convert that to 0 - 255
+ maxVolume = 255;
+
+ return 0;
+}
+
+int32_t AudioMixerManagerMac::MinSpeakerVolume(uint32_t& minVolume) const {
+ if (_outputDeviceID == kAudioObjectUnknown) {
+ WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+ " device ID has not been set");
+ return -1;
+ }
+
+ // volume range is 0.0 to 1.0
+ // we convert that to 0 - 255
+ minVolume = 0;
+
+ return 0;
+}
+
+int32_t AudioMixerManagerMac::SpeakerVolumeStepSize(uint16_t& stepSize) const {
+ if (_outputDeviceID == kAudioObjectUnknown) {
+ WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+ " device ID has not been set");
+ return -1;
+ }
+
+ // volume range is 0.0 to 1.0
+ // we convert that to 0 - 255
+ stepSize = 1;
+
+ return 0;
+}
+
+int32_t AudioMixerManagerMac::SpeakerVolumeIsAvailable(bool& available) {
+ if (_outputDeviceID == kAudioObjectUnknown) {
+ WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+ " device ID has not been set");
+ return -1;
+ }
+
+ OSStatus err = noErr;
+
+ // Does the capture device have a master volume control?
+ // If so, use it exclusively.
+ AudioObjectPropertyAddress propertyAddress = {
+ kAudioDevicePropertyVolumeScalar, kAudioDevicePropertyScopeOutput, 0};
+ Boolean isSettable = false;
+ err = AudioObjectIsPropertySettable(_outputDeviceID, &propertyAddress,
+ &isSettable);
+ if (err == noErr && isSettable) {
available = true;
return 0;
-}
-
-int32_t AudioMixerManagerMac::SpeakerMuteIsAvailable(bool& available)
-{
- if (_outputDeviceID == kAudioObjectUnknown)
- {
- WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
- " device ID has not been set");
- return -1;
- }
+ }
- OSStatus err = noErr;
-
- // Does the capture device have a master mute control?
- // If so, use it exclusively.
- AudioObjectPropertyAddress propertyAddress = { kAudioDevicePropertyMute,
- kAudioDevicePropertyScopeOutput, 0 };
- Boolean isSettable = false;
+ // Otherwise try to set each channel.
+ for (UInt32 i = 1; i <= _noOutputChannels; i++) {
+ propertyAddress.mElement = i;
+ isSettable = false;
err = AudioObjectIsPropertySettable(_outputDeviceID, &propertyAddress,
&isSettable);
- if (err == noErr && isSettable)
- {
- available = true;
- return 0;
- }
-
- // Otherwise try to set each channel.
- for (UInt32 i = 1; i <= _noOutputChannels; i++)
- {
- propertyAddress.mElement = i;
- isSettable = false;
- err = AudioObjectIsPropertySettable(_outputDeviceID, &propertyAddress,
- &isSettable);
- if (err != noErr || !isSettable)
- {
- available = false;
- WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
- " Mute cannot be set for output channel %d, err=%d",
- i, err);
- return -1;
- }
- }
-
+ if (err != noErr || !isSettable) {
+ available = false;
+ WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+ " Volume cannot be set for output channel %d, err=%d", i,
+ err);
+ return -1;
+ }
+ }
+
+ available = true;
+ return 0;
+}
+
+int32_t AudioMixerManagerMac::SpeakerMuteIsAvailable(bool& available) {
+ if (_outputDeviceID == kAudioObjectUnknown) {
+ WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+ " device ID has not been set");
+ return -1;
+ }
+
+ OSStatus err = noErr;
+
+ // Does the capture device have a master mute control?
+ // If so, use it exclusively.
+ AudioObjectPropertyAddress propertyAddress = {
+ kAudioDevicePropertyMute, kAudioDevicePropertyScopeOutput, 0};
+ Boolean isSettable = false;
+ err = AudioObjectIsPropertySettable(_outputDeviceID, &propertyAddress,
+ &isSettable);
+ if (err == noErr && isSettable) {
available = true;
return 0;
-}
-
-int32_t AudioMixerManagerMac::SetSpeakerMute(bool enable)
-{
- WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
- "AudioMixerManagerMac::SetSpeakerMute(enable=%u)", enable);
-
- CriticalSectionScoped lock(&_critSect);
-
- if (_outputDeviceID == kAudioObjectUnknown)
- {
- WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
- " device ID has not been set");
- return -1;
- }
-
- OSStatus err = noErr;
- UInt32 size = 0;
- UInt32 mute = enable ? 1 : 0;
- bool success = false;
+ }
- // Does the render device have a master mute control?
- // If so, use it exclusively.
- AudioObjectPropertyAddress propertyAddress = { kAudioDevicePropertyMute,
- kAudioDevicePropertyScopeOutput, 0 };
- Boolean isSettable = false;
+ // Otherwise try to set each channel.
+ for (UInt32 i = 1; i <= _noOutputChannels; i++) {
+ propertyAddress.mElement = i;
+ isSettable = false;
err = AudioObjectIsPropertySettable(_outputDeviceID, &propertyAddress,
&isSettable);
- if (err == noErr && isSettable)
- {
- size = sizeof(mute);
- WEBRTC_CA_RETURN_ON_ERR(AudioObjectSetPropertyData(_outputDeviceID,
- &propertyAddress, 0, NULL, size, &mute));
-
- return 0;
- }
-
- // Otherwise try to set each channel.
- for (UInt32 i = 1; i <= _noOutputChannels; i++)
- {
- propertyAddress.mElement = i;
- isSettable = false;
- err = AudioObjectIsPropertySettable(_outputDeviceID, &propertyAddress,
- &isSettable);
- if (err == noErr && isSettable)
- {
- size = sizeof(mute);
- WEBRTC_CA_RETURN_ON_ERR(AudioObjectSetPropertyData(_outputDeviceID,
- &propertyAddress, 0, NULL, size, &mute));
- }
- success = true;
- }
-
- if (!success)
- {
- WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
- " Unable to set mute on any input channel");
- return -1;
- }
+ if (err != noErr || !isSettable) {
+ available = false;
+ WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+ " Mute cannot be set for output channel %d, err=%d", i, err);
+ return -1;
+ }
+ }
+
+ available = true;
+ return 0;
+}
+
+int32_t AudioMixerManagerMac::SetSpeakerMute(bool enable) {
+ WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+ "AudioMixerManagerMac::SetSpeakerMute(enable=%u)", enable);
+
+ CriticalSectionScoped lock(&_critSect);
+
+ if (_outputDeviceID == kAudioObjectUnknown) {
+ WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+ " device ID has not been set");
+ return -1;
+ }
+
+ OSStatus err = noErr;
+ UInt32 size = 0;
+ UInt32 mute = enable ? 1 : 0;
+ bool success = false;
+
+ // Does the render device have a master mute control?
+ // If so, use it exclusively.
+ AudioObjectPropertyAddress propertyAddress = {
+ kAudioDevicePropertyMute, kAudioDevicePropertyScopeOutput, 0};
+ Boolean isSettable = false;
+ err = AudioObjectIsPropertySettable(_outputDeviceID, &propertyAddress,
+ &isSettable);
+ if (err == noErr && isSettable) {
+ size = sizeof(mute);
+ WEBRTC_CA_RETURN_ON_ERR(AudioObjectSetPropertyData(
+ _outputDeviceID, &propertyAddress, 0, NULL, size, &mute));
return 0;
-}
+ }
-int32_t AudioMixerManagerMac::SpeakerMute(bool& enabled) const
-{
-
- if (_outputDeviceID == kAudioObjectUnknown)
- {
- WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
- " device ID has not been set");
- return -1;
- }
-
- OSStatus err = noErr;
- UInt32 size = 0;
- unsigned int channels = 0;
- UInt32 channelMuted = 0;
- UInt32 muted = 0;
-
- // Does the device have a master volume control?
- // If so, use it exclusively.
- AudioObjectPropertyAddress propertyAddress = { kAudioDevicePropertyMute,
- kAudioDevicePropertyScopeOutput, 0 };
- Boolean hasProperty = AudioObjectHasProperty(_outputDeviceID,
- &propertyAddress);
- if (hasProperty)
- {
- size = sizeof(muted);
- WEBRTC_CA_RETURN_ON_ERR(AudioObjectGetPropertyData(_outputDeviceID,
- &propertyAddress, 0, NULL, &size, &muted));
-
- // 1 means muted
- enabled = static_cast<bool> (muted);
- } else
- {
- // Otherwise check if all channels are muted.
- for (UInt32 i = 1; i <= _noOutputChannels; i++)
- {
- muted = 0;
- propertyAddress.mElement = i;
- hasProperty = AudioObjectHasProperty(_outputDeviceID,
- &propertyAddress);
- if (hasProperty)
- {
- size = sizeof(channelMuted);
- WEBRTC_CA_RETURN_ON_ERR(AudioObjectGetPropertyData(_outputDeviceID,
- &propertyAddress, 0, NULL, &size, &channelMuted));
-
- muted = (muted && channelMuted);
- channels++;
- }
- }
-
- if (channels == 0)
- {
- WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
- " Unable to get mute for any channel");
- return -1;
- }
-
- assert(channels > 0);
- // 1 means muted
- enabled = static_cast<bool> (muted);
- }
-
- WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
- " AudioMixerManagerMac::SpeakerMute() => enabled=%d, enabled");
-
- return 0;
-}
-
-int32_t AudioMixerManagerMac::StereoPlayoutIsAvailable(bool& available)
-{
- if (_outputDeviceID == kAudioObjectUnknown)
- {
- WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
- " device ID has not been set");
- return -1;
- }
-
- available = (_noOutputChannels == 2);
- return 0;
-}
-
-int32_t AudioMixerManagerMac::StereoRecordingIsAvailable(bool& available)
-{
- if (_inputDeviceID == kAudioObjectUnknown)
- {
- WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
- " device ID has not been set");
- return -1;
- }
-
- available = (_noInputChannels == 2);
- return 0;
-}
-
-int32_t AudioMixerManagerMac::MicrophoneMuteIsAvailable(bool& available)
-{
- if (_inputDeviceID == kAudioObjectUnknown)
- {
- WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
- " device ID has not been set");
- return -1;
- }
-
- OSStatus err = noErr;
-
- // Does the capture device have a master mute control?
- // If so, use it exclusively.
- AudioObjectPropertyAddress propertyAddress = { kAudioDevicePropertyMute,
- kAudioDevicePropertyScopeInput, 0 };
- Boolean isSettable = false;
- err = AudioObjectIsPropertySettable(_inputDeviceID, &propertyAddress,
+ // Otherwise try to set each channel.
+ for (UInt32 i = 1; i <= _noOutputChannels; i++) {
+ propertyAddress.mElement = i;
+ isSettable = false;
+ err = AudioObjectIsPropertySettable(_outputDeviceID, &propertyAddress,
&isSettable);
- if (err == noErr && isSettable)
- {
- available = true;
- return 0;
- }
-
- // Otherwise try to set each channel.
- for (UInt32 i = 1; i <= _noInputChannels; i++)
- {
- propertyAddress.mElement = i;
- isSettable = false;
- err = AudioObjectIsPropertySettable(_inputDeviceID, &propertyAddress,
- &isSettable);
- if (err != noErr || !isSettable)
- {
- available = false;
- WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
- " Mute cannot be set for output channel %d, err=%d",
- i, err);
- return -1;
- }
- }
-
+ if (err == noErr && isSettable) {
+ size = sizeof(mute);
+ WEBRTC_CA_RETURN_ON_ERR(AudioObjectSetPropertyData(
+ _outputDeviceID, &propertyAddress, 0, NULL, size, &mute));
+ }
+ success = true;
+ }
+
+ if (!success) {
+ WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+ " Unable to set mute on any input channel");
+ return -1;
+ }
+
+ return 0;
+}
+
+int32_t AudioMixerManagerMac::SpeakerMute(bool& enabled) const {
+ if (_outputDeviceID == kAudioObjectUnknown) {
+ WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+ " device ID has not been set");
+ return -1;
+ }
+
+ OSStatus err = noErr;
+ UInt32 size = 0;
+ unsigned int channels = 0;
+ UInt32 channelMuted = 0;
+ UInt32 muted = 0;
+
+ // Does the device have a master volume control?
+ // If so, use it exclusively.
+ AudioObjectPropertyAddress propertyAddress = {
+ kAudioDevicePropertyMute, kAudioDevicePropertyScopeOutput, 0};
+ Boolean hasProperty =
+ AudioObjectHasProperty(_outputDeviceID, &propertyAddress);
+ if (hasProperty) {
+ size = sizeof(muted);
+ WEBRTC_CA_RETURN_ON_ERR(AudioObjectGetPropertyData(
+ _outputDeviceID, &propertyAddress, 0, NULL, &size, &muted));
+
+ // 1 means muted
+ enabled = static_cast<bool>(muted);
+ } else {
+ // Otherwise check if all channels are muted.
+ for (UInt32 i = 1; i <= _noOutputChannels; i++) {
+ muted = 0;
+ propertyAddress.mElement = i;
+ hasProperty = AudioObjectHasProperty(_outputDeviceID, &propertyAddress);
+ if (hasProperty) {
+ size = sizeof(channelMuted);
+ WEBRTC_CA_RETURN_ON_ERR(AudioObjectGetPropertyData(
+ _outputDeviceID, &propertyAddress, 0, NULL, &size, &channelMuted));
+
+ muted = (muted && channelMuted);
+ channels++;
+ }
+ }
+
+ if (channels == 0) {
+ WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+ " Unable to get mute for any channel");
+ return -1;
+ }
+
+ assert(channels > 0);
+ // 1 means muted
+ enabled = static_cast<bool>(muted);
+ }
+
+ WEBRTC_TRACE(
+ kTraceInfo, kTraceAudioDevice, _id,
+ " AudioMixerManagerMac::SpeakerMute() => enabled=%d, enabled");
+
+ return 0;
+}
+
+int32_t AudioMixerManagerMac::StereoPlayoutIsAvailable(bool& available) {
+ if (_outputDeviceID == kAudioObjectUnknown) {
+ WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+ " device ID has not been set");
+ return -1;
+ }
+
+ available = (_noOutputChannels == 2);
+ return 0;
+}
+
+int32_t AudioMixerManagerMac::StereoRecordingIsAvailable(bool& available) {
+ if (_inputDeviceID == kAudioObjectUnknown) {
+ WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+ " device ID has not been set");
+ return -1;
+ }
+
+ available = (_noInputChannels == 2);
+ return 0;
+}
+
+int32_t AudioMixerManagerMac::MicrophoneMuteIsAvailable(bool& available) {
+ if (_inputDeviceID == kAudioObjectUnknown) {
+ WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+ " device ID has not been set");
+ return -1;
+ }
+
+ OSStatus err = noErr;
+
+ // Does the capture device have a master mute control?
+ // If so, use it exclusively.
+ AudioObjectPropertyAddress propertyAddress = {
+ kAudioDevicePropertyMute, kAudioDevicePropertyScopeInput, 0};
+ Boolean isSettable = false;
+ err = AudioObjectIsPropertySettable(_inputDeviceID, &propertyAddress,
+ &isSettable);
+ if (err == noErr && isSettable) {
available = true;
return 0;
-}
+ }
-int32_t AudioMixerManagerMac::SetMicrophoneMute(bool enable)
-{
- WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
- "AudioMixerManagerMac::SetMicrophoneMute(enable=%u)", enable);
-
- CriticalSectionScoped lock(&_critSect);
-
- if (_inputDeviceID == kAudioObjectUnknown)
- {
- WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
- " device ID has not been set");
- return -1;
- }
-
- OSStatus err = noErr;
- UInt32 size = 0;
- UInt32 mute = enable ? 1 : 0;
- bool success = false;
-
- // Does the capture device have a master mute control?
- // If so, use it exclusively.
- AudioObjectPropertyAddress propertyAddress = { kAudioDevicePropertyMute,
- kAudioDevicePropertyScopeInput, 0 };
- Boolean isSettable = false;
+ // Otherwise try to set each channel.
+ for (UInt32 i = 1; i <= _noInputChannels; i++) {
+ propertyAddress.mElement = i;
+ isSettable = false;
err = AudioObjectIsPropertySettable(_inputDeviceID, &propertyAddress,
&isSettable);
- if (err == noErr && isSettable)
- {
- size = sizeof(mute);
- WEBRTC_CA_RETURN_ON_ERR(AudioObjectSetPropertyData(_inputDeviceID,
- &propertyAddress, 0, NULL, size, &mute));
-
- return 0;
- }
-
- // Otherwise try to set each channel.
- for (UInt32 i = 1; i <= _noInputChannels; i++)
- {
- propertyAddress.mElement = i;
- isSettable = false;
- err = AudioObjectIsPropertySettable(_inputDeviceID, &propertyAddress,
- &isSettable);
- if (err == noErr && isSettable)
- {
- size = sizeof(mute);
- WEBRTC_CA_RETURN_ON_ERR(AudioObjectSetPropertyData(_inputDeviceID,
- &propertyAddress, 0, NULL, size, &mute));
- }
- success = true;
- }
-
- if (!success)
- {
- WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
- " Unable to set mute on any input channel");
- return -1;
- }
-
- return 0;
-}
-
-int32_t AudioMixerManagerMac::MicrophoneMute(bool& enabled) const
-{
-
- if (_inputDeviceID == kAudioObjectUnknown)
- {
- WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
- " device ID has not been set");
- return -1;
- }
-
- OSStatus err = noErr;
- UInt32 size = 0;
- unsigned int channels = 0;
- UInt32 channelMuted = 0;
- UInt32 muted = 0;
-
- // Does the device have a master volume control?
- // If so, use it exclusively.
- AudioObjectPropertyAddress propertyAddress = { kAudioDevicePropertyMute,
- kAudioDevicePropertyScopeInput, 0 };
- Boolean hasProperty = AudioObjectHasProperty(_inputDeviceID,
- &propertyAddress);
- if (hasProperty)
- {
- size = sizeof(muted);
- WEBRTC_CA_RETURN_ON_ERR(AudioObjectGetPropertyData(_inputDeviceID,
- &propertyAddress, 0, NULL, &size, &muted));
-
- // 1 means muted
- enabled = static_cast<bool> (muted);
- } else
- {
- // Otherwise check if all channels are muted.
- for (UInt32 i = 1; i <= _noInputChannels; i++)
- {
- muted = 0;
- propertyAddress.mElement = i;
- hasProperty = AudioObjectHasProperty(_inputDeviceID,
- &propertyAddress);
- if (hasProperty)
- {
- size = sizeof(channelMuted);
- WEBRTC_CA_RETURN_ON_ERR(AudioObjectGetPropertyData(_inputDeviceID,
- &propertyAddress, 0, NULL, &size, &channelMuted));
-
- muted = (muted && channelMuted);
- channels++;
- }
- }
-
- if (channels == 0)
- {
- WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
- " Unable to get mute for any channel");
- return -1;
- }
-
- assert(channels > 0);
- // 1 means muted
- enabled = static_cast<bool> (muted);
- }
-
- WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
- " AudioMixerManagerMac::MicrophoneMute() => enabled=%d",
- enabled);
+ if (err != noErr || !isSettable) {
+ available = false;
+ WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+ " Mute cannot be set for output channel %d, err=%d", i, err);
+ return -1;
+ }
+ }
+
+ available = true;
+ return 0;
+}
+
+int32_t AudioMixerManagerMac::SetMicrophoneMute(bool enable) {
+ WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+ "AudioMixerManagerMac::SetMicrophoneMute(enable=%u)", enable);
+
+ CriticalSectionScoped lock(&_critSect);
+
+ if (_inputDeviceID == kAudioObjectUnknown) {
+ WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+ " device ID has not been set");
+ return -1;
+ }
+
+ OSStatus err = noErr;
+ UInt32 size = 0;
+ UInt32 mute = enable ? 1 : 0;
+ bool success = false;
+
+ // Does the capture device have a master mute control?
+ // If so, use it exclusively.
+ AudioObjectPropertyAddress propertyAddress = {
+ kAudioDevicePropertyMute, kAudioDevicePropertyScopeInput, 0};
+ Boolean isSettable = false;
+ err = AudioObjectIsPropertySettable(_inputDeviceID, &propertyAddress,
+ &isSettable);
+ if (err == noErr && isSettable) {
+ size = sizeof(mute);
+ WEBRTC_CA_RETURN_ON_ERR(AudioObjectSetPropertyData(
+ _inputDeviceID, &propertyAddress, 0, NULL, size, &mute));
return 0;
-}
-
-int32_t AudioMixerManagerMac::MicrophoneBoostIsAvailable(bool& available)
-{
- if (_inputDeviceID == kAudioObjectUnknown)
- {
- WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
- " device ID has not been set");
- return -1;
- }
-
- available = false; // No AudioObjectPropertySelector value for Mic Boost
+ }
- return 0;
-}
-
-int32_t AudioMixerManagerMac::SetMicrophoneBoost(bool enable)
-{
- WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
- "AudioMixerManagerMac::SetMicrophoneBoost(enable=%u)", enable);
-
- CriticalSectionScoped lock(&_critSect);
-
- if (_inputDeviceID == kAudioObjectUnknown)
- {
- WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
- " device ID has not been set");
- return -1;
- }
-
- // Ensure that the selected microphone has a valid boost control.
- bool available(false);
- MicrophoneBoostIsAvailable(available);
- if (!available)
- {
- WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
- " it is not possible to enable microphone boost");
- return -1;
- }
-
- // It is assumed that the call above fails!
- return 0;
-}
-
-int32_t AudioMixerManagerMac::MicrophoneBoost(bool& enabled) const
-{
-
- if (_inputDeviceID == kAudioObjectUnknown)
- {
- WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
- " device ID has not been set");
- return -1;
- }
-
- // Microphone boost cannot be enabled on this platform!
- enabled = false;
-
- return 0;
-}
-
-int32_t AudioMixerManagerMac::MicrophoneVolumeIsAvailable(bool& available)
-{
- if (_inputDeviceID == kAudioObjectUnknown)
- {
- WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
- " device ID has not been set");
- return -1;
- }
-
- OSStatus err = noErr;
-
- // Does the capture device have a master volume control?
- // If so, use it exclusively.
- AudioObjectPropertyAddress
- propertyAddress = { kAudioDevicePropertyVolumeScalar,
- kAudioDevicePropertyScopeInput, 0 };
- Boolean isSettable = false;
+ // Otherwise try to set each channel.
+ for (UInt32 i = 1; i <= _noInputChannels; i++) {
+ propertyAddress.mElement = i;
+ isSettable = false;
err = AudioObjectIsPropertySettable(_inputDeviceID, &propertyAddress,
&isSettable);
- if (err == noErr && isSettable)
- {
- available = true;
- return 0;
- }
-
- // Otherwise try to set each channel.
- for (UInt32 i = 1; i <= _noInputChannels; i++)
- {
- propertyAddress.mElement = i;
- isSettable = false;
- err = AudioObjectIsPropertySettable(_inputDeviceID, &propertyAddress,
- &isSettable);
- if (err != noErr || !isSettable)
- {
- available = false;
- WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
- " Volume cannot be set for input channel %d, err=%d",
- i, err);
- return -1;
- }
- }
-
+ if (err == noErr && isSettable) {
+ size = sizeof(mute);
+ WEBRTC_CA_RETURN_ON_ERR(AudioObjectSetPropertyData(
+ _inputDeviceID, &propertyAddress, 0, NULL, size, &mute));
+ }
+ success = true;
+ }
+
+ if (!success) {
+ WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+ " Unable to set mute on any input channel");
+ return -1;
+ }
+
+ return 0;
+}
+
+int32_t AudioMixerManagerMac::MicrophoneMute(bool& enabled) const {
+ if (_inputDeviceID == kAudioObjectUnknown) {
+ WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+ " device ID has not been set");
+ return -1;
+ }
+
+ OSStatus err = noErr;
+ UInt32 size = 0;
+ unsigned int channels = 0;
+ UInt32 channelMuted = 0;
+ UInt32 muted = 0;
+
+ // Does the device have a master volume control?
+ // If so, use it exclusively.
+ AudioObjectPropertyAddress propertyAddress = {
+ kAudioDevicePropertyMute, kAudioDevicePropertyScopeInput, 0};
+ Boolean hasProperty =
+ AudioObjectHasProperty(_inputDeviceID, &propertyAddress);
+ if (hasProperty) {
+ size = sizeof(muted);
+ WEBRTC_CA_RETURN_ON_ERR(AudioObjectGetPropertyData(
+ _inputDeviceID, &propertyAddress, 0, NULL, &size, &muted));
+
+ // 1 means muted
+ enabled = static_cast<bool>(muted);
+ } else {
+ // Otherwise check if all channels are muted.
+ for (UInt32 i = 1; i <= _noInputChannels; i++) {
+ muted = 0;
+ propertyAddress.mElement = i;
+ hasProperty = AudioObjectHasProperty(_inputDeviceID, &propertyAddress);
+ if (hasProperty) {
+ size = sizeof(channelMuted);
+ WEBRTC_CA_RETURN_ON_ERR(AudioObjectGetPropertyData(
+ _inputDeviceID, &propertyAddress, 0, NULL, &size, &channelMuted));
+
+ muted = (muted && channelMuted);
+ channels++;
+ }
+ }
+
+ if (channels == 0) {
+ WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+ " Unable to get mute for any channel");
+ return -1;
+ }
+
+ assert(channels > 0);
+ // 1 means muted
+ enabled = static_cast<bool>(muted);
+ }
+
+ WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+ " AudioMixerManagerMac::MicrophoneMute() => enabled=%d",
+ enabled);
+
+ return 0;
+}
+
+int32_t AudioMixerManagerMac::MicrophoneBoostIsAvailable(bool& available) {
+ if (_inputDeviceID == kAudioObjectUnknown) {
+ WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+ " device ID has not been set");
+ return -1;
+ }
+
+ available = false; // No AudioObjectPropertySelector value for Mic Boost
+
+ return 0;
+}
+
+int32_t AudioMixerManagerMac::SetMicrophoneBoost(bool enable) {
+ WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+ "AudioMixerManagerMac::SetMicrophoneBoost(enable=%u)", enable);
+
+ CriticalSectionScoped lock(&_critSect);
+
+ if (_inputDeviceID == kAudioObjectUnknown) {
+ WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+ " device ID has not been set");
+ return -1;
+ }
+
+ // Ensure that the selected microphone has a valid boost control.
+ bool available(false);
+ MicrophoneBoostIsAvailable(available);
+ if (!available) {
+ WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+ " it is not possible to enable microphone boost");
+ return -1;
+ }
+
+ // It is assumed that the call above fails!
+ return 0;
+}
+
+int32_t AudioMixerManagerMac::MicrophoneBoost(bool& enabled) const {
+ if (_inputDeviceID == kAudioObjectUnknown) {
+ WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+ " device ID has not been set");
+ return -1;
+ }
+
+ // Microphone boost cannot be enabled on this platform!
+ enabled = false;
+
+ return 0;
+}
+
+int32_t AudioMixerManagerMac::MicrophoneVolumeIsAvailable(bool& available) {
+ if (_inputDeviceID == kAudioObjectUnknown) {
+ WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+ " device ID has not been set");
+ return -1;
+ }
+
+ OSStatus err = noErr;
+
+ // Does the capture device have a master volume control?
+ // If so, use it exclusively.
+ AudioObjectPropertyAddress propertyAddress = {
+ kAudioDevicePropertyVolumeScalar, kAudioDevicePropertyScopeInput, 0};
+ Boolean isSettable = false;
+ err = AudioObjectIsPropertySettable(_inputDeviceID, &propertyAddress,
+ &isSettable);
+ if (err == noErr && isSettable) {
available = true;
return 0;
-}
-
-int32_t AudioMixerManagerMac::SetMicrophoneVolume(uint32_t volume)
-{
- WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
- "AudioMixerManagerMac::SetMicrophoneVolume(volume=%u)", volume);
-
- CriticalSectionScoped lock(&_critSect);
+ }
- if (_inputDeviceID == kAudioObjectUnknown)
- {
- WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
- " device ID has not been set");
- return -1;
- }
-
- OSStatus err = noErr;
- UInt32 size = 0;
- bool success = false;
-
- // volume range is 0.0 - 1.0, convert from 0 - 255
- const Float32 vol = (Float32)(volume / 255.0);
-
- assert(vol <= 1.0 && vol >= 0.0);
-
- // Does the capture device have a master volume control?
- // If so, use it exclusively.
- AudioObjectPropertyAddress
- propertyAddress = { kAudioDevicePropertyVolumeScalar,
- kAudioDevicePropertyScopeInput, 0 };
- Boolean isSettable = false;
+ // Otherwise try to set each channel.
+ for (UInt32 i = 1; i <= _noInputChannels; i++) {
+ propertyAddress.mElement = i;
+ isSettable = false;
err = AudioObjectIsPropertySettable(_inputDeviceID, &propertyAddress,
&isSettable);
- if (err == noErr && isSettable)
- {
- size = sizeof(vol);
- WEBRTC_CA_RETURN_ON_ERR(AudioObjectSetPropertyData(_inputDeviceID,
- &propertyAddress, 0, NULL, size, &vol));
-
- return 0;
+ if (err != noErr || !isSettable) {
+ available = false;
+ WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+ " Volume cannot be set for input channel %d, err=%d", i,
+ err);
+ return -1;
}
+ }
- // Otherwise try to set each channel.
- for (UInt32 i = 1; i <= _noInputChannels; i++)
- {
- propertyAddress.mElement = i;
- isSettable = false;
- err = AudioObjectIsPropertySettable(_inputDeviceID, &propertyAddress,
- &isSettable);
- if (err == noErr && isSettable)
- {
- size = sizeof(vol);
- WEBRTC_CA_RETURN_ON_ERR(AudioObjectSetPropertyData(_inputDeviceID,
- &propertyAddress, 0, NULL, size, &vol));
- }
- success = true;
- }
-
- if (!success)
- {
- WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
- " Unable to set a level on any input channel");
- return -1;
- }
-
- return 0;
+ available = true;
+ return 0;
}
-int32_t
-AudioMixerManagerMac::MicrophoneVolume(uint32_t& volume) const
-{
-
- if (_inputDeviceID == kAudioObjectUnknown)
- {
- WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
- " device ID has not been set");
- return -1;
- }
+int32_t AudioMixerManagerMac::SetMicrophoneVolume(uint32_t volume) {
+ WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+ "AudioMixerManagerMac::SetMicrophoneVolume(volume=%u)", volume);
- OSStatus err = noErr;
- UInt32 size = 0;
- unsigned int channels = 0;
- Float32 channelVol = 0;
- Float32 volFloat32 = 0;
-
- // Does the device have a master volume control?
- // If so, use it exclusively.
- AudioObjectPropertyAddress
- propertyAddress = { kAudioDevicePropertyVolumeScalar,
- kAudioDevicePropertyScopeInput, 0 };
- Boolean hasProperty = AudioObjectHasProperty(_inputDeviceID,
- &propertyAddress);
- if (hasProperty)
- {
- size = sizeof(volFloat32);
- WEBRTC_CA_RETURN_ON_ERR(AudioObjectGetPropertyData(_inputDeviceID,
- &propertyAddress, 0, NULL, &size, &volFloat32));
-
- // vol 0.0 to 1.0 -> convert to 0 - 255
- volume = static_cast<uint32_t> (volFloat32 * 255 + 0.5);
- } else
- {
- // Otherwise get the average volume across channels.
- volFloat32 = 0;
- for (UInt32 i = 1; i <= _noInputChannels; i++)
- {
- channelVol = 0;
- propertyAddress.mElement = i;
- hasProperty = AudioObjectHasProperty(_inputDeviceID,
- &propertyAddress);
- if (hasProperty)
- {
- size = sizeof(channelVol);
- WEBRTC_CA_RETURN_ON_ERR(AudioObjectGetPropertyData(_inputDeviceID,
- &propertyAddress, 0, NULL, &size, &channelVol));
-
- volFloat32 += channelVol;
- channels++;
- }
- }
-
- if (channels == 0)
- {
- WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
- " Unable to get a level on any channel");
- return -1;
- }
-
- assert(channels > 0);
- // vol 0.0 to 1.0 -> convert to 0 - 255
- volume = static_cast<uint32_t>
- (255 * volFloat32 / channels + 0.5);
- }
+ CriticalSectionScoped lock(&_critSect);
- WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
- " AudioMixerManagerMac::MicrophoneVolume() => vol=%u",
- volume);
+ if (_inputDeviceID == kAudioObjectUnknown) {
+ WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+ " device ID has not been set");
+ return -1;
+ }
- return 0;
-}
+ OSStatus err = noErr;
+ UInt32 size = 0;
+ bool success = false;
-int32_t
-AudioMixerManagerMac::MaxMicrophoneVolume(uint32_t& maxVolume) const
-{
+ // volume range is 0.0 - 1.0, convert from 0 - 255
+ const Float32 vol = (Float32)(volume / 255.0);
- if (_inputDeviceID == kAudioObjectUnknown)
- {
- WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
- " device ID has not been set");
- return -1;
- }
+ assert(vol <= 1.0 && vol >= 0.0);
- // volume range is 0.0 to 1.0
- // we convert that to 0 - 255
- maxVolume = 255;
+ // Does the capture device have a master volume control?
+ // If so, use it exclusively.
+ AudioObjectPropertyAddress propertyAddress = {
+ kAudioDevicePropertyVolumeScalar, kAudioDevicePropertyScopeInput, 0};
+ Boolean isSettable = false;
+ err = AudioObjectIsPropertySettable(_inputDeviceID, &propertyAddress,
+ &isSettable);
+ if (err == noErr && isSettable) {
+ size = sizeof(vol);
+ WEBRTC_CA_RETURN_ON_ERR(AudioObjectSetPropertyData(
+ _inputDeviceID, &propertyAddress, 0, NULL, size, &vol));
return 0;
-}
-
-int32_t
-AudioMixerManagerMac::MinMicrophoneVolume(uint32_t& minVolume) const
-{
-
- if (_inputDeviceID == kAudioObjectUnknown)
- {
- WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
- " device ID has not been set");
- return -1;
- }
+ }
- // volume range is 0.0 to 1.0
- // we convert that to 0 - 10
- minVolume = 0;
+ // Otherwise try to set each channel.
+ for (UInt32 i = 1; i <= _noInputChannels; i++) {
+ propertyAddress.mElement = i;
+ isSettable = false;
+ err = AudioObjectIsPropertySettable(_inputDeviceID, &propertyAddress,
+ &isSettable);
+ if (err == noErr && isSettable) {
+ size = sizeof(vol);
+ WEBRTC_CA_RETURN_ON_ERR(AudioObjectSetPropertyData(
+ _inputDeviceID, &propertyAddress, 0, NULL, size, &vol));
+ }
+ success = true;
+ }
+
+ if (!success) {
+ WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+ " Unable to set a level on any input channel");
+ return -1;
+ }
+
+ return 0;
+}
+
+int32_t AudioMixerManagerMac::MicrophoneVolume(uint32_t& volume) const {
+ if (_inputDeviceID == kAudioObjectUnknown) {
+ WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+ " device ID has not been set");
+ return -1;
+ }
+
+ OSStatus err = noErr;
+ UInt32 size = 0;
+ unsigned int channels = 0;
+ Float32 channelVol = 0;
+ Float32 volFloat32 = 0;
+
+ // Does the device have a master volume control?
+ // If so, use it exclusively.
+ AudioObjectPropertyAddress propertyAddress = {
+ kAudioDevicePropertyVolumeScalar, kAudioDevicePropertyScopeInput, 0};
+ Boolean hasProperty =
+ AudioObjectHasProperty(_inputDeviceID, &propertyAddress);
+ if (hasProperty) {
+ size = sizeof(volFloat32);
+ WEBRTC_CA_RETURN_ON_ERR(AudioObjectGetPropertyData(
+ _inputDeviceID, &propertyAddress, 0, NULL, &size, &volFloat32));
+
+ // vol 0.0 to 1.0 -> convert to 0 - 255
+ volume = static_cast<uint32_t>(volFloat32 * 255 + 0.5);
+ } else {
+ // Otherwise get the average volume across channels.
+ volFloat32 = 0;
+ for (UInt32 i = 1; i <= _noInputChannels; i++) {
+ channelVol = 0;
+ propertyAddress.mElement = i;
+ hasProperty = AudioObjectHasProperty(_inputDeviceID, &propertyAddress);
+ if (hasProperty) {
+ size = sizeof(channelVol);
+ WEBRTC_CA_RETURN_ON_ERR(AudioObjectGetPropertyData(
+ _inputDeviceID, &propertyAddress, 0, NULL, &size, &channelVol));
+
+ volFloat32 += channelVol;
+ channels++;
+ }
+ }
+
+ if (channels == 0) {
+ WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+ " Unable to get a level on any channel");
+ return -1;
+ }
+
+ assert(channels > 0);
+ // vol 0.0 to 1.0 -> convert to 0 - 255
+ volume = static_cast<uint32_t>(255 * volFloat32 / channels + 0.5);
+ }
+
+ WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+ " AudioMixerManagerMac::MicrophoneVolume() => vol=%u",
+ volume);
+
+ return 0;
+}
+
+int32_t AudioMixerManagerMac::MaxMicrophoneVolume(uint32_t& maxVolume) const {
+ if (_inputDeviceID == kAudioObjectUnknown) {
+ WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+ " device ID has not been set");
+ return -1;
+ }
+
+ // volume range is 0.0 to 1.0
+ // we convert that to 0 - 255
+ maxVolume = 255;
+
+ return 0;
+}
+
+int32_t AudioMixerManagerMac::MinMicrophoneVolume(uint32_t& minVolume) const {
+ if (_inputDeviceID == kAudioObjectUnknown) {
+ WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+ " device ID has not been set");
+ return -1;
+ }
+
+ // volume range is 0.0 to 1.0
+ // we convert that to 0 - 10
+ minVolume = 0;
- return 0;
+ return 0;
}
-int32_t
-AudioMixerManagerMac::MicrophoneVolumeStepSize(uint16_t& stepSize) const
-{
-
- if (_inputDeviceID == kAudioObjectUnknown)
- {
- WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
- " device ID has not been set");
- return -1;
- }
-
- // volume range is 0.0 to 1.0
- // we convert that to 0 - 10
- stepSize = 1;
-
- return 0;
+int32_t AudioMixerManagerMac::MicrophoneVolumeStepSize(
+ uint16_t& stepSize) const {
+ if (_inputDeviceID == kAudioObjectUnknown) {
+ WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+ " device ID has not been set");
+ return -1;
+ }
+
+ // volume range is 0.0 to 1.0
+ // we convert that to 0 - 10
+ stepSize = 1;
+
+ return 0;
}
// ============================================================================
@@ -1148,18 +1003,18 @@ AudioMixerManagerMac::MicrophoneVolumeStepSize(uint16_t& stepSize) const
// CoreAudio errors are best interpreted as four character strings.
void AudioMixerManagerMac::logCAMsg(const TraceLevel level,
const TraceModule module,
- const int32_t id, const char *msg,
- const char *err)
-{
- assert(msg != NULL);
- assert(err != NULL);
+ const int32_t id,
+ const char* msg,
+ const char* err) {
+ assert(msg != NULL);
+ assert(err != NULL);
#ifdef WEBRTC_ARCH_BIG_ENDIAN
- WEBRTC_TRACE(level, module, id, "%s: %.4s", msg, err);
+ WEBRTC_TRACE(level, module, id, "%s: %.4s", msg, err);
#else
- // We need to flip the characters in this case.
- WEBRTC_TRACE(level, module, id, "%s: %.1s%.1s%.1s%.1s", msg, err + 3, err
- + 2, err + 1, err);
+ // We need to flip the characters in this case.
+ WEBRTC_TRACE(level, module, id, "%s: %.1s%.1s%.1s%.1s", msg, err + 3, err + 2,
+ err + 1, err);
#endif
}
diff --git a/webrtc/modules/audio_device/mac/audio_mixer_manager_mac.h b/webrtc/modules/audio_device/mac/audio_mixer_manager_mac.h
index 94d6928921..9cbfe2deb4 100644
--- a/webrtc/modules/audio_device/mac/audio_mixer_manager_mac.h
+++ b/webrtc/modules/audio_device/mac/audio_mixer_manager_mac.h
@@ -18,63 +18,62 @@
#include <CoreAudio/CoreAudio.h>
namespace webrtc {
-
-class AudioMixerManagerMac
-{
-public:
- int32_t OpenSpeaker(AudioDeviceID deviceID);
- int32_t OpenMicrophone(AudioDeviceID deviceID);
- int32_t SetSpeakerVolume(uint32_t volume);
- int32_t SpeakerVolume(uint32_t& volume) const;
- int32_t MaxSpeakerVolume(uint32_t& maxVolume) const;
- int32_t MinSpeakerVolume(uint32_t& minVolume) const;
- int32_t SpeakerVolumeStepSize(uint16_t& stepSize) const;
- int32_t SpeakerVolumeIsAvailable(bool& available);
- int32_t SpeakerMuteIsAvailable(bool& available);
- int32_t SetSpeakerMute(bool enable);
- int32_t SpeakerMute(bool& enabled) const;
- int32_t StereoPlayoutIsAvailable(bool& available);
- int32_t StereoRecordingIsAvailable(bool& available);
- int32_t MicrophoneMuteIsAvailable(bool& available);
- int32_t SetMicrophoneMute(bool enable);
- int32_t MicrophoneMute(bool& enabled) const;
- int32_t MicrophoneBoostIsAvailable(bool& available);
- int32_t SetMicrophoneBoost(bool enable);
- int32_t MicrophoneBoost(bool& enabled) const;
- int32_t MicrophoneVolumeIsAvailable(bool& available);
- int32_t SetMicrophoneVolume(uint32_t volume);
- int32_t MicrophoneVolume(uint32_t& volume) const;
- int32_t MaxMicrophoneVolume(uint32_t& maxVolume) const;
- int32_t MinMicrophoneVolume(uint32_t& minVolume) const;
- int32_t MicrophoneVolumeStepSize(uint16_t& stepSize) const;
- int32_t Close();
- int32_t CloseSpeaker();
- int32_t CloseMicrophone();
- bool SpeakerIsInitialized() const;
- bool MicrophoneIsInitialized() const;
-public:
- AudioMixerManagerMac(const int32_t id);
- ~AudioMixerManagerMac();
+class AudioMixerManagerMac {
+ public:
+ int32_t OpenSpeaker(AudioDeviceID deviceID);
+ int32_t OpenMicrophone(AudioDeviceID deviceID);
+ int32_t SetSpeakerVolume(uint32_t volume);
+ int32_t SpeakerVolume(uint32_t& volume) const;
+ int32_t MaxSpeakerVolume(uint32_t& maxVolume) const;
+ int32_t MinSpeakerVolume(uint32_t& minVolume) const;
+ int32_t SpeakerVolumeStepSize(uint16_t& stepSize) const;
+ int32_t SpeakerVolumeIsAvailable(bool& available);
+ int32_t SpeakerMuteIsAvailable(bool& available);
+ int32_t SetSpeakerMute(bool enable);
+ int32_t SpeakerMute(bool& enabled) const;
+ int32_t StereoPlayoutIsAvailable(bool& available);
+ int32_t StereoRecordingIsAvailable(bool& available);
+ int32_t MicrophoneMuteIsAvailable(bool& available);
+ int32_t SetMicrophoneMute(bool enable);
+ int32_t MicrophoneMute(bool& enabled) const;
+ int32_t MicrophoneBoostIsAvailable(bool& available);
+ int32_t SetMicrophoneBoost(bool enable);
+ int32_t MicrophoneBoost(bool& enabled) const;
+ int32_t MicrophoneVolumeIsAvailable(bool& available);
+ int32_t SetMicrophoneVolume(uint32_t volume);
+ int32_t MicrophoneVolume(uint32_t& volume) const;
+ int32_t MaxMicrophoneVolume(uint32_t& maxVolume) const;
+ int32_t MinMicrophoneVolume(uint32_t& minVolume) const;
+ int32_t MicrophoneVolumeStepSize(uint16_t& stepSize) const;
+ int32_t Close();
+ int32_t CloseSpeaker();
+ int32_t CloseMicrophone();
+ bool SpeakerIsInitialized() const;
+ bool MicrophoneIsInitialized() const;
-private:
- static void logCAMsg(const TraceLevel level,
- const TraceModule module,
- const int32_t id, const char *msg,
- const char *err);
+ public:
+ AudioMixerManagerMac(const int32_t id);
+ ~AudioMixerManagerMac();
-private:
- CriticalSectionWrapper& _critSect;
- int32_t _id;
+ private:
+ static void logCAMsg(const TraceLevel level,
+ const TraceModule module,
+ const int32_t id,
+ const char* msg,
+ const char* err);
- AudioDeviceID _inputDeviceID;
- AudioDeviceID _outputDeviceID;
+ private:
+ CriticalSectionWrapper& _critSect;
+ int32_t _id;
- uint16_t _noInputChannels;
- uint16_t _noOutputChannels;
+ AudioDeviceID _inputDeviceID;
+ AudioDeviceID _outputDeviceID;
+ uint16_t _noInputChannels;
+ uint16_t _noOutputChannels;
};
-
+
} // namespace webrtc
#endif // AUDIO_MIXER_MAC_H
diff --git a/webrtc/modules/audio_device/main/interface/audio_device.h b/webrtc/modules/audio_device/main/interface/audio_device.h
deleted file mode 100644
index 71f16b685a..0000000000
--- a/webrtc/modules/audio_device/main/interface/audio_device.h
+++ /dev/null
@@ -1,16 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef MODULES_AUDIO_DEVICE_MAIN_INTERFACE_AUDIO_DEVICE_H_
-#define MODULES_AUDIO_DEVICE_MAIN_INTERFACE_AUDIO_DEVICE_H_
-
-#include "webrtc/modules/audio_device/include/audio_device.h"
-
-#endif // MODULES_AUDIO_DEVICE_MAIN_INTERFACE_AUDIO_DEVICE_H_
diff --git a/webrtc/modules/audio_device/main/source/OWNERS b/webrtc/modules/audio_device/main/source/OWNERS
deleted file mode 100644
index 3ee6b4bf5f..0000000000
--- a/webrtc/modules/audio_device/main/source/OWNERS
+++ /dev/null
@@ -1,5 +0,0 @@
-
-# These are for the common case of adding or renaming files. If you're doing
-# structural changes, please get a review from a reviewer in this file.
-per-file *.gyp=*
-per-file *.gypi=*
diff --git a/webrtc/modules/audio_device/main/source/audio_device.gypi b/webrtc/modules/audio_device/main/source/audio_device.gypi
deleted file mode 100644
index 126eb98ac1..0000000000
--- a/webrtc/modules/audio_device/main/source/audio_device.gypi
+++ /dev/null
@@ -1,14 +0,0 @@
-# Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
-#
-# Use of this source code is governed by a BSD-style license
-# that can be found in the LICENSE file in the root of the source
-# tree. An additional intellectual property rights grant can be found
-# in the file PATENTS. All contributing project authors may
-# be found in the AUTHORS file in the root of the source tree.
-
-{
- 'includes': [
- '../../audio_device.gypi',
- ],
-}
-
diff --git a/webrtc/modules/audio_device/test/audio_device_test_api.cc b/webrtc/modules/audio_device/test/audio_device_test_api.cc
index f36a049ffc..26a2dcd8c4 100644
--- a/webrtc/modules/audio_device/test/audio_device_test_api.cc
+++ b/webrtc/modules/audio_device/test/audio_device_test_api.cc
@@ -85,7 +85,7 @@ class AudioTransportAPI: public AudioTransport {
int32_t RecordedDataIsAvailable(const void* audioSamples,
const size_t nSamples,
const size_t nBytesPerSample,
- const uint8_t nChannels,
+ const size_t nChannels,
const uint32_t sampleRate,
const uint32_t totalDelay,
const int32_t clockSkew,
@@ -110,7 +110,7 @@ class AudioTransportAPI: public AudioTransport {
int32_t NeedMorePlayData(const size_t nSamples,
const size_t nBytesPerSample,
- const uint8_t nChannels,
+ const size_t nChannels,
const uint32_t sampleRate,
void* audioSamples,
size_t& nSamplesOut,
@@ -128,29 +128,6 @@ class AudioTransportAPI: public AudioTransport {
return 0;
}
- int OnDataAvailable(const int voe_channels[],
- int number_of_voe_channels,
- const int16_t* audio_data,
- int sample_rate,
- int number_of_channels,
- size_t number_of_frames,
- int audio_delay_milliseconds,
- int current_volume,
- bool key_pressed,
- bool need_audio_processing) override {
- return 0;
- }
-
- void PushCaptureData(int voe_channel, const void* audio_data,
- int bits_per_sample, int sample_rate,
- int number_of_channels,
- size_t number_of_frames) override {}
-
- void PullRenderData(int bits_per_sample, int sample_rate,
- int number_of_channels, size_t number_of_frames,
- void* audio_data,
- int64_t* elapsed_time_ms,
- int64_t* ntp_time_ms) override {}
private:
uint32_t rec_count_;
uint32_t play_count_;
@@ -1040,9 +1017,15 @@ TEST_F(AudioDeviceAPITest, MicrophoneVolumeIsAvailable) {
// MicrophoneVolume
// MaxMicrophoneVolume
// MinMicrophoneVolume
-// NOTE: Disabled on mac due to issue 257.
-#ifndef WEBRTC_MAC
-TEST_F(AudioDeviceAPITest, MicrophoneVolumeTests) {
+
+// Disabled on Mac and Linux,
+// see https://bugs.chromium.org/p/webrtc/issues/detail?id=5414
+#if defined(WEBRTC_MAC) || defined(WEBRTC_LINUX)
+#define MAYBE_MicrophoneVolumeTests DISABLED_MicrophoneVolumeTests
+#else
+#define MAYBE_MicrophoneVolumeTests MicrophoneVolumeTests
+#endif
+TEST_F(AudioDeviceAPITest, MAYBE_MicrophoneVolumeTests) {
uint32_t vol(0);
uint32_t volume(0);
uint32_t maxVolume(0);
@@ -1135,7 +1118,6 @@ TEST_F(AudioDeviceAPITest, MicrophoneVolumeTests) {
EXPECT_EQ(0, audio_device_->SetMicrophoneVolume(maxVolume/10));
}
}
-#endif // !WEBRTC_MAC
TEST_F(AudioDeviceAPITest, SpeakerMuteIsAvailable) {
bool available;
diff --git a/webrtc/modules/audio_device/test/audio_device_test_defines.h b/webrtc/modules/audio_device/test/audio_device_test_defines.h
index 5068646bdd..cc8e3e3aef 100644
--- a/webrtc/modules/audio_device/test/audio_device_test_defines.h
+++ b/webrtc/modules/audio_device/test/audio_device_test_defines.h
@@ -13,7 +13,7 @@
#include "webrtc/common_types.h"
#include "webrtc/modules/audio_device/include/audio_device.h"
-#include "webrtc/modules/utility/interface/process_thread.h"
+#include "webrtc/modules/utility/include/process_thread.h"
#include "webrtc/system_wrappers/include/trace.h"
#ifdef _WIN32
diff --git a/webrtc/modules/audio_device/test/func_test_manager.cc b/webrtc/modules/audio_device/test/func_test_manager.cc
index 0ebfc8395f..0a2963e9ae 100644
--- a/webrtc/modules/audio_device/test/func_test_manager.cc
+++ b/webrtc/modules/audio_device/test/func_test_manager.cc
@@ -194,7 +194,7 @@ int32_t AudioTransportImpl::RecordedDataIsAvailable(
const void* audioSamples,
const size_t nSamples,
const size_t nBytesPerSample,
- const uint8_t nChannels,
+ const size_t nChannels,
const uint32_t samplesPerSec,
const uint32_t totalDelayMS,
const int32_t clockDrift,
@@ -339,7 +339,7 @@ int32_t AudioTransportImpl::RecordedDataIsAvailable(
int32_t AudioTransportImpl::NeedMorePlayData(
const size_t nSamples,
const size_t nBytesPerSample,
- const uint8_t nChannels,
+ const size_t nChannels,
const uint32_t samplesPerSec,
void* audioSamples,
size_t& nSamplesOut,
@@ -365,7 +365,7 @@ int32_t AudioTransportImpl::NeedMorePlayData(
int16_t* ptr16Out = NULL;
const size_t nSamplesIn = packet->nSamples;
- const uint8_t nChannelsIn = packet->nChannels;
+ const size_t nChannelsIn = packet->nChannels;
const uint32_t samplesPerSecIn = packet->samplesPerSec;
const size_t nBytesPerSampleIn = packet->nBytesPerSample;
@@ -573,32 +573,6 @@ int32_t AudioTransportImpl::NeedMorePlayData(
return 0;
}
-int AudioTransportImpl::OnDataAvailable(const int voe_channels[],
- int number_of_voe_channels,
- const int16_t* audio_data,
- int sample_rate,
- int number_of_channels,
- size_t number_of_frames,
- int audio_delay_milliseconds,
- int current_volume,
- bool key_pressed,
- bool need_audio_processing) {
- return 0;
-}
-
-void AudioTransportImpl::PushCaptureData(int voe_channel,
- const void* audio_data,
- int bits_per_sample, int sample_rate,
- int number_of_channels,
- size_t number_of_frames) {}
-
-void AudioTransportImpl::PullRenderData(int bits_per_sample, int sample_rate,
- int number_of_channels,
- size_t number_of_frames,
- void* audio_data,
- int64_t* elapsed_time_ms,
- int64_t* ntp_time_ms) {}
-
FuncTestManager::FuncTestManager() :
_audioDevice(NULL),
_audioEventObserver(NULL),
@@ -686,7 +660,7 @@ int32_t FuncTestManager::Close()
_audioDevice = NULL;
}
- // return the ThreadWrapper (singleton)
+ // return the PlatformThread (singleton)
Trace::ReturnTrace();
// PRINT_TEST_RESULTS;
diff --git a/webrtc/modules/audio_device/test/func_test_manager.h b/webrtc/modules/audio_device/test/func_test_manager.h
index 6ef13490d8..b7cc81cc1a 100644
--- a/webrtc/modules/audio_device/test/func_test_manager.h
+++ b/webrtc/modules/audio_device/test/func_test_manager.h
@@ -49,7 +49,7 @@ struct AudioPacket
uint8_t dataBuffer[4 * 960];
size_t nSamples;
size_t nBytesPerSample;
- uint8_t nChannels;
+ size_t nChannels;
uint32_t samplesPerSec;
};
@@ -88,7 +88,7 @@ public:
int32_t RecordedDataIsAvailable(const void* audioSamples,
const size_t nSamples,
const size_t nBytesPerSample,
- const uint8_t nChannels,
+ const size_t nChannels,
const uint32_t samplesPerSec,
const uint32_t totalDelayMS,
const int32_t clockDrift,
@@ -98,35 +98,13 @@ public:
int32_t NeedMorePlayData(const size_t nSamples,
const size_t nBytesPerSample,
- const uint8_t nChannels,
+ const size_t nChannels,
const uint32_t samplesPerSec,
void* audioSamples,
size_t& nSamplesOut,
int64_t* elapsed_time_ms,
int64_t* ntp_time_ms) override;
- int OnDataAvailable(const int voe_channels[],
- int number_of_voe_channels,
- const int16_t* audio_data,
- int sample_rate,
- int number_of_channels,
- size_t number_of_frames,
- int audio_delay_milliseconds,
- int current_volume,
- bool key_pressed,
- bool need_audio_processing) override;
-
- void PushCaptureData(int voe_channel, const void* audio_data,
- int bits_per_sample, int sample_rate,
- int number_of_channels,
- size_t number_of_frames) override;
-
- void PullRenderData(int bits_per_sample, int sample_rate,
- int number_of_channels, size_t number_of_frames,
- void* audio_data,
- int64_t* elapsed_time_ms,
- int64_t* ntp_time_ms) override;
-
AudioTransportImpl(AudioDeviceModule* audioDevice);
~AudioTransportImpl();
diff --git a/webrtc/modules/audio_device/win/audio_device_wave_win.cc b/webrtc/modules/audio_device/win/audio_device_wave_win.cc
index 96bee7425a..6f4d7df397 100644
--- a/webrtc/modules/audio_device/win/audio_device_wave_win.cc
+++ b/webrtc/modules/audio_device/win/audio_device_wave_win.cc
@@ -228,15 +228,9 @@ int32_t AudioDeviceWindowsWave::Init()
}
const char* threadName = "webrtc_audio_module_thread";
- _ptrThread = ThreadWrapper::CreateThread(ThreadFunc, this, threadName);
- if (!_ptrThread->Start())
- {
- WEBRTC_TRACE(kTraceCritical, kTraceAudioDevice, _id,
- "failed to start the audio thread");
- _ptrThread.reset();
- return -1;
- }
- _ptrThread->SetPriority(kRealtimePriority);
+ _ptrThread.reset(new rtc::PlatformThread(ThreadFunc, this, threadName));
+ _ptrThread->Start();
+ _ptrThread->SetPriority(rtc::kRealtimePriority);
const bool periodic(true);
if (!_timeEvent.StartTimer(periodic, TIMER_PERIOD_MS))
@@ -250,12 +244,8 @@ int32_t AudioDeviceWindowsWave::Init()
WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
"periodic timer (dT=%d) is now active", TIMER_PERIOD_MS);
- _hGetCaptureVolumeThread = CreateThread(NULL,
- 0,
- GetCaptureVolumeThread,
- this,
- 0,
- NULL);
+ _hGetCaptureVolumeThread =
+ CreateThread(NULL, 0, GetCaptureVolumeThread, this, 0, NULL);
if (_hGetCaptureVolumeThread == NULL)
{
WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
@@ -265,12 +255,8 @@ int32_t AudioDeviceWindowsWave::Init()
SetThreadPriority(_hGetCaptureVolumeThread, THREAD_PRIORITY_NORMAL);
- _hSetCaptureVolumeThread = CreateThread(NULL,
- 0,
- SetCaptureVolumeThread,
- this,
- 0,
- NULL);
+ _hSetCaptureVolumeThread =
+ CreateThread(NULL, 0, SetCaptureVolumeThread, this, 0, NULL);
if (_hSetCaptureVolumeThread == NULL)
{
WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
@@ -303,7 +289,7 @@ int32_t AudioDeviceWindowsWave::Terminate()
if (_ptrThread)
{
- ThreadWrapper* tmpThread = _ptrThread.release();
+ rtc::PlatformThread* tmpThread = _ptrThread.release();
_critSect.Leave();
_timeEvent.Set();
diff --git a/webrtc/modules/audio_device/win/audio_device_wave_win.h b/webrtc/modules/audio_device/win/audio_device_wave_win.h
index c99185c3ab..a1cfc6acbf 100644
--- a/webrtc/modules/audio_device/win/audio_device_wave_win.h
+++ b/webrtc/modules/audio_device/win/audio_device_wave_win.h
@@ -11,9 +11,9 @@
#ifndef WEBRTC_AUDIO_DEVICE_AUDIO_DEVICE_WAVE_WIN_H
#define WEBRTC_AUDIO_DEVICE_AUDIO_DEVICE_WAVE_WIN_H
+#include "webrtc/base/platform_thread.h"
#include "webrtc/modules/audio_device/audio_device_generic.h"
#include "webrtc/modules/audio_device/win/audio_mixer_manager_win.h"
-#include "webrtc/system_wrappers/include/thread_wrapper.h"
#pragma comment( lib, "winmm.lib" )
@@ -222,7 +222,8 @@ private:
HANDLE _hShutdownSetVolumeEvent;
HANDLE _hSetCaptureVolumeEvent;
- rtc::scoped_ptr<ThreadWrapper> _ptrThread;
+ // TODO(pbos): Remove scoped_ptr usage and use PlatformThread directly
+ rtc::scoped_ptr<rtc::PlatformThread> _ptrThread;
CriticalSectionWrapper& _critSectCb;
diff --git a/webrtc/modules/audio_processing/BUILD.gn b/webrtc/modules/audio_processing/BUILD.gn
index 2d0c602ef0..9d91911bc2 100644
--- a/webrtc/modules/audio_processing/BUILD.gn
+++ b/webrtc/modules/audio_processing/BUILD.gn
@@ -30,12 +30,12 @@ source_set("audio_processing") {
"aec/aec_resampler.c",
"aec/aec_resampler.h",
"aec/echo_cancellation.c",
+ "aec/echo_cancellation.h",
"aec/echo_cancellation_internal.h",
- "aec/include/echo_cancellation.h",
"aecm/aecm_core.c",
"aecm/aecm_core.h",
"aecm/echo_control_mobile.c",
- "aecm/include/echo_control_mobile.h",
+ "aecm/echo_control_mobile.h",
"agc/agc.cc",
"agc/agc.h",
"agc/agc_manager_direct.cc",
@@ -161,8 +161,8 @@ source_set("audio_processing") {
if (rtc_prefer_fixed_point) {
defines += [ "WEBRTC_NS_FIXED" ]
sources += [
- "ns/include/noise_suppression_x.h",
"ns/noise_suppression_x.c",
+ "ns/noise_suppression_x.h",
"ns/nsx_core.c",
"ns/nsx_core.h",
"ns/nsx_defines.h",
@@ -176,8 +176,8 @@ source_set("audio_processing") {
defines += [ "WEBRTC_NS_FLOAT" ]
sources += [
"ns/defines.h",
- "ns/include/noise_suppression.h",
"ns/noise_suppression.c",
+ "ns/noise_suppression.h",
"ns/ns_core.c",
"ns/ns_core.h",
"ns/windows_private.h",
diff --git a/webrtc/modules/audio_processing/OWNERS b/webrtc/modules/audio_processing/OWNERS
index 7b760682b0..d14f7f8614 100644
--- a/webrtc/modules/audio_processing/OWNERS
+++ b/webrtc/modules/audio_processing/OWNERS
@@ -1,5 +1,4 @@
aluebs@webrtc.org
-andrew@webrtc.org
henrik.lundin@webrtc.org
peah@webrtc.org
diff --git a/webrtc/modules/audio_processing/aec/aec_core.c b/webrtc/modules/audio_processing/aec/aec_core.c
index f8eed32372..901e0fde0b 100644
--- a/webrtc/modules/audio_processing/aec/aec_core.c
+++ b/webrtc/modules/audio_processing/aec/aec_core.c
@@ -44,7 +44,6 @@ static const int countLen = 50;
static const int kDelayMetricsAggregationWindow = 1250; // 5 seconds at 16 kHz.
// Quantities to control H band scaling for SWB input
-static const int flagHbandCn = 1; // flag for adding comfort noise in H band
static const float cnScaleHband =
(float)0.4; // scale for comfort noise in H band
// Initial bin for averaging nlp gain in low band
@@ -135,6 +134,9 @@ WebRtcAecFilterAdaptation WebRtcAec_FilterAdaptation;
WebRtcAecOverdriveAndSuppress WebRtcAec_OverdriveAndSuppress;
WebRtcAecComfortNoise WebRtcAec_ComfortNoise;
WebRtcAecSubBandCoherence WebRtcAec_SubbandCoherence;
+WebRtcAecStoreAsComplex WebRtcAec_StoreAsComplex;
+WebRtcAecPartitionDelay WebRtcAec_PartitionDelay;
+WebRtcAecWindowData WebRtcAec_WindowData;
__inline static float MulRe(float aRe, float aIm, float bRe, float bIm) {
return aRe * bRe - aIm * bIm;
@@ -151,40 +153,49 @@ static int CmpFloat(const void* a, const void* b) {
return (*da > *db) - (*da < *db);
}
-static void FilterFar(AecCore* aec, float yf[2][PART_LEN1]) {
+static void FilterFar(
+ int num_partitions,
+ int x_fft_buf_block_pos,
+ float x_fft_buf[2][kExtendedNumPartitions * PART_LEN1],
+ float h_fft_buf[2][kExtendedNumPartitions * PART_LEN1],
+ float y_fft[2][PART_LEN1]) {
int i;
- for (i = 0; i < aec->num_partitions; i++) {
+ for (i = 0; i < num_partitions; i++) {
int j;
- int xPos = (i + aec->xfBufBlockPos) * PART_LEN1;
+ int xPos = (i + x_fft_buf_block_pos) * PART_LEN1;
int pos = i * PART_LEN1;
// Check for wrap
- if (i + aec->xfBufBlockPos >= aec->num_partitions) {
- xPos -= aec->num_partitions * (PART_LEN1);
+ if (i + x_fft_buf_block_pos >= num_partitions) {
+ xPos -= num_partitions * (PART_LEN1);
}
for (j = 0; j < PART_LEN1; j++) {
- yf[0][j] += MulRe(aec->xfBuf[0][xPos + j],
- aec->xfBuf[1][xPos + j],
- aec->wfBuf[0][pos + j],
- aec->wfBuf[1][pos + j]);
- yf[1][j] += MulIm(aec->xfBuf[0][xPos + j],
- aec->xfBuf[1][xPos + j],
- aec->wfBuf[0][pos + j],
- aec->wfBuf[1][pos + j]);
+ y_fft[0][j] += MulRe(x_fft_buf[0][xPos + j],
+ x_fft_buf[1][xPos + j],
+ h_fft_buf[0][pos + j],
+ h_fft_buf[1][pos + j]);
+ y_fft[1][j] += MulIm(x_fft_buf[0][xPos + j],
+ x_fft_buf[1][xPos + j],
+ h_fft_buf[0][pos + j],
+ h_fft_buf[1][pos + j]);
}
}
}
-static void ScaleErrorSignal(AecCore* aec, float ef[2][PART_LEN1]) {
- const float mu = aec->extended_filter_enabled ? kExtendedMu : aec->normal_mu;
- const float error_threshold = aec->extended_filter_enabled
+static void ScaleErrorSignal(int extended_filter_enabled,
+ float normal_mu,
+ float normal_error_threshold,
+ float x_pow[PART_LEN1],
+ float ef[2][PART_LEN1]) {
+ const float mu = extended_filter_enabled ? kExtendedMu : normal_mu;
+ const float error_threshold = extended_filter_enabled
? kExtendedErrorThreshold
- : aec->normal_error_threshold;
+ : normal_error_threshold;
int i;
float abs_ef;
for (i = 0; i < (PART_LEN1); i++) {
- ef[0][i] /= (aec->xPow[i] + 1e-10f);
- ef[1][i] /= (aec->xPow[i] + 1e-10f);
+ ef[0][i] /= (x_pow[i] + 1e-10f);
+ ef[1][i] /= (x_pow[i] + 1e-10f);
abs_ef = sqrtf(ef[0][i] * ef[0][i] + ef[1][i] * ef[1][i]);
if (abs_ef > error_threshold) {
@@ -199,59 +210,40 @@ static void ScaleErrorSignal(AecCore* aec, float ef[2][PART_LEN1]) {
}
}
-// Time-unconstrined filter adaptation.
-// TODO(andrew): consider for a low-complexity mode.
-// static void FilterAdaptationUnconstrained(AecCore* aec, float *fft,
-// float ef[2][PART_LEN1]) {
-// int i, j;
-// for (i = 0; i < aec->num_partitions; i++) {
-// int xPos = (i + aec->xfBufBlockPos)*(PART_LEN1);
-// int pos;
-// // Check for wrap
-// if (i + aec->xfBufBlockPos >= aec->num_partitions) {
-// xPos -= aec->num_partitions * PART_LEN1;
-// }
-//
-// pos = i * PART_LEN1;
-//
-// for (j = 0; j < PART_LEN1; j++) {
-// aec->wfBuf[0][pos + j] += MulRe(aec->xfBuf[0][xPos + j],
-// -aec->xfBuf[1][xPos + j],
-// ef[0][j], ef[1][j]);
-// aec->wfBuf[1][pos + j] += MulIm(aec->xfBuf[0][xPos + j],
-// -aec->xfBuf[1][xPos + j],
-// ef[0][j], ef[1][j]);
-// }
-// }
-//}
-
-static void FilterAdaptation(AecCore* aec, float* fft, float ef[2][PART_LEN1]) {
+
+static void FilterAdaptation(
+ int num_partitions,
+ int x_fft_buf_block_pos,
+ float x_fft_buf[2][kExtendedNumPartitions * PART_LEN1],
+ float e_fft[2][PART_LEN1],
+ float h_fft_buf[2][kExtendedNumPartitions * PART_LEN1]) {
int i, j;
- for (i = 0; i < aec->num_partitions; i++) {
- int xPos = (i + aec->xfBufBlockPos) * (PART_LEN1);
+ float fft[PART_LEN2];
+ for (i = 0; i < num_partitions; i++) {
+ int xPos = (i + x_fft_buf_block_pos) * (PART_LEN1);
int pos;
// Check for wrap
- if (i + aec->xfBufBlockPos >= aec->num_partitions) {
- xPos -= aec->num_partitions * PART_LEN1;
+ if (i + x_fft_buf_block_pos >= num_partitions) {
+ xPos -= num_partitions * PART_LEN1;
}
pos = i * PART_LEN1;
for (j = 0; j < PART_LEN; j++) {
- fft[2 * j] = MulRe(aec->xfBuf[0][xPos + j],
- -aec->xfBuf[1][xPos + j],
- ef[0][j],
- ef[1][j]);
- fft[2 * j + 1] = MulIm(aec->xfBuf[0][xPos + j],
- -aec->xfBuf[1][xPos + j],
- ef[0][j],
- ef[1][j]);
+ fft[2 * j] = MulRe(x_fft_buf[0][xPos + j],
+ -x_fft_buf[1][xPos + j],
+ e_fft[0][j],
+ e_fft[1][j]);
+ fft[2 * j + 1] = MulIm(x_fft_buf[0][xPos + j],
+ -x_fft_buf[1][xPos + j],
+ e_fft[0][j],
+ e_fft[1][j]);
}
- fft[1] = MulRe(aec->xfBuf[0][xPos + PART_LEN],
- -aec->xfBuf[1][xPos + PART_LEN],
- ef[0][PART_LEN],
- ef[1][PART_LEN]);
+ fft[1] = MulRe(x_fft_buf[0][xPos + PART_LEN],
+ -x_fft_buf[1][xPos + PART_LEN],
+ e_fft[0][PART_LEN],
+ e_fft[1][PART_LEN]);
aec_rdft_inverse_128(fft);
memset(fft + PART_LEN, 0, sizeof(float) * PART_LEN);
@@ -265,12 +257,12 @@ static void FilterAdaptation(AecCore* aec, float* fft, float ef[2][PART_LEN1]) {
}
aec_rdft_forward_128(fft);
- aec->wfBuf[0][pos] += fft[0];
- aec->wfBuf[0][pos + PART_LEN] += fft[1];
+ h_fft_buf[0][pos] += fft[0];
+ h_fft_buf[0][pos + PART_LEN] += fft[1];
for (j = 1; j < PART_LEN; j++) {
- aec->wfBuf[0][pos + j] += fft[2 * j];
- aec->wfBuf[1][pos + j] += fft[2 * j + 1];
+ h_fft_buf[0][pos + j] += fft[2 * j];
+ h_fft_buf[1][pos + j] += fft[2 * j + 1];
}
}
}
@@ -334,12 +326,13 @@ const float WebRtcAec_kMinFarendPSD = 15;
// - sde : cross-PSD of near-end and residual echo
// - sxd : cross-PSD of near-end and far-end
//
-// In addition to updating the PSDs, also the filter diverge state is determined
-// upon actions are taken.
+// In addition to updating the PSDs, also the filter diverge state is
+// determined.
static void SmoothedPSD(AecCore* aec,
float efw[2][PART_LEN1],
float dfw[2][PART_LEN1],
- float xfw[2][PART_LEN1]) {
+ float xfw[2][PART_LEN1],
+ int* extreme_filter_divergence) {
// Power estimate smoothing coefficients.
const float* ptrGCoh = aec->extended_filter_enabled
? WebRtcAec_kExtendedSmoothingCoefficients[aec->mult - 1]
@@ -380,15 +373,12 @@ static void SmoothedPSD(AecCore* aec,
seSum += aec->se[i];
}
- // Divergent filter safeguard.
+ // Divergent filter safeguard update.
aec->divergeState = (aec->divergeState ? 1.05f : 1.0f) * seSum > sdSum;
- if (aec->divergeState)
- memcpy(efw, dfw, sizeof(efw[0][0]) * 2 * PART_LEN1);
-
- // Reset if error is significantly larger than nearend (13 dB).
- if (!aec->extended_filter_enabled && seSum > (19.95f * sdSum))
- memset(aec->wfBuf, 0, sizeof(aec->wfBuf));
+ // Signal extreme filter divergence if the error is significantly larger
+ // than the nearend (13 dB).
+ *extreme_filter_divergence = (seSum > (19.95f * sdSum));
}
// Window time domain data to be used by the fft.
@@ -417,32 +407,15 @@ __inline static void StoreAsComplex(const float* data,
static void SubbandCoherence(AecCore* aec,
float efw[2][PART_LEN1],
+ float dfw[2][PART_LEN1],
float xfw[2][PART_LEN1],
float* fft,
float* cohde,
- float* cohxd) {
- float dfw[2][PART_LEN1];
+ float* cohxd,
+ int* extreme_filter_divergence) {
int i;
- if (aec->delayEstCtr == 0)
- aec->delayIdx = PartitionDelay(aec);
-
- // Use delayed far.
- memcpy(xfw,
- aec->xfwBuf + aec->delayIdx * PART_LEN1,
- sizeof(xfw[0][0]) * 2 * PART_LEN1);
-
- // Windowed near fft
- WindowData(fft, aec->dBuf);
- aec_rdft_forward_128(fft);
- StoreAsComplex(fft, dfw);
-
- // Windowed error fft
- WindowData(fft, aec->eBuf);
- aec_rdft_forward_128(fft);
- StoreAsComplex(fft, efw);
-
- SmoothedPSD(aec, efw, dfw, xfw);
+ SmoothedPSD(aec, efw, dfw, xfw, extreme_filter_divergence);
// Subband coherence
for (i = 0; i < PART_LEN1; i++) {
@@ -458,23 +431,23 @@ static void SubbandCoherence(AecCore* aec,
static void GetHighbandGain(const float* lambda, float* nlpGainHband) {
int i;
- nlpGainHband[0] = (float)0.0;
+ *nlpGainHband = (float)0.0;
for (i = freqAvgIc; i < PART_LEN1 - 1; i++) {
- nlpGainHband[0] += lambda[i];
+ *nlpGainHband += lambda[i];
}
- nlpGainHband[0] /= (float)(PART_LEN1 - 1 - freqAvgIc);
+ *nlpGainHband /= (float)(PART_LEN1 - 1 - freqAvgIc);
}
static void ComfortNoise(AecCore* aec,
float efw[2][PART_LEN1],
- complex_t* comfortNoiseHband,
+ float comfortNoiseHband[2][PART_LEN1],
const float* noisePow,
const float* lambda) {
int i, num;
float rand[PART_LEN];
float noise, noiseAvg, tmp, tmpAvg;
int16_t randW16[PART_LEN];
- complex_t u[PART_LEN1];
+ float u[2][PART_LEN1];
const float pi2 = 6.28318530717959f;
@@ -486,22 +459,22 @@ static void ComfortNoise(AecCore* aec,
// Reject LF noise
u[0][0] = 0;
- u[0][1] = 0;
+ u[1][0] = 0;
for (i = 1; i < PART_LEN1; i++) {
tmp = pi2 * rand[i - 1];
noise = sqrtf(noisePow[i]);
- u[i][0] = noise * cosf(tmp);
- u[i][1] = -noise * sinf(tmp);
+ u[0][i] = noise * cosf(tmp);
+ u[1][i] = -noise * sinf(tmp);
}
- u[PART_LEN][1] = 0;
+ u[1][PART_LEN] = 0;
for (i = 0; i < PART_LEN1; i++) {
// This is the proper weighting to match the background noise power
tmp = sqrtf(WEBRTC_SPL_MAX(1 - lambda[i] * lambda[i], 0));
// tmp = 1 - lambda[i];
- efw[0][i] += tmp * u[i][0];
- efw[1][i] += tmp * u[i][1];
+ efw[0][i] += tmp * u[0][i];
+ efw[1][i] += tmp * u[1][i];
}
// For H band comfort noise
@@ -509,7 +482,7 @@ static void ComfortNoise(AecCore* aec,
noiseAvg = 0.0;
tmpAvg = 0.0;
num = 0;
- if (aec->num_bands > 1 && flagHbandCn == 1) {
+ if (aec->num_bands > 1) {
// average noise scale
// average over second half of freq spectrum (i.e., 4->8khz)
@@ -534,21 +507,24 @@ static void ComfortNoise(AecCore* aec,
// TODO: we should probably have a new random vector here.
// Reject LF noise
u[0][0] = 0;
- u[0][1] = 0;
+ u[1][0] = 0;
for (i = 1; i < PART_LEN1; i++) {
tmp = pi2 * rand[i - 1];
// Use average noise for H band
- u[i][0] = noiseAvg * (float)cos(tmp);
- u[i][1] = -noiseAvg * (float)sin(tmp);
+ u[0][i] = noiseAvg * (float)cos(tmp);
+ u[1][i] = -noiseAvg * (float)sin(tmp);
}
- u[PART_LEN][1] = 0;
+ u[1][PART_LEN] = 0;
for (i = 0; i < PART_LEN1; i++) {
// Use average NLP weight for H band
- comfortNoiseHband[i][0] = tmpAvg * u[i][0];
- comfortNoiseHband[i][1] = tmpAvg * u[i][1];
+ comfortNoiseHband[0][i] = tmpAvg * u[0][i];
+ comfortNoiseHband[1][i] = tmpAvg * u[1][i];
}
+ } else {
+ memset(comfortNoiseHband, 0,
+ 2 * PART_LEN1 * sizeof(comfortNoiseHband[0][0]));
}
}
@@ -837,21 +813,29 @@ static void UpdateDelayMetrics(AecCore* self) {
return;
}
-static void TimeToFrequency(float time_data[PART_LEN2],
- float freq_data[2][PART_LEN1],
- int window) {
- int i = 0;
-
- // TODO(bjornv): Should we have a different function/wrapper for windowed FFT?
- if (window) {
- for (i = 0; i < PART_LEN; i++) {
- time_data[i] *= WebRtcAec_sqrtHanning[i];
- time_data[PART_LEN + i] *= WebRtcAec_sqrtHanning[PART_LEN - i];
- }
+static void ScaledInverseFft(float freq_data[2][PART_LEN1],
+ float time_data[PART_LEN2],
+ float scale,
+ int conjugate) {
+ int i;
+ const float normalization = scale / ((float)PART_LEN2);
+ const float sign = (conjugate ? -1 : 1);
+ time_data[0] = freq_data[0][0] * normalization;
+ time_data[1] = freq_data[0][PART_LEN] * normalization;
+ for (i = 1; i < PART_LEN; i++) {
+ time_data[2 * i] = freq_data[0][i] * normalization;
+ time_data[2 * i + 1] = sign * freq_data[1][i] * normalization;
}
+ aec_rdft_inverse_128(time_data);
+}
+
+static void Fft(float time_data[PART_LEN2],
+ float freq_data[2][PART_LEN1]) {
+ int i;
aec_rdft_forward_128(time_data);
- // Reorder.
+
+ // Reorder fft output data.
freq_data[1][0] = 0;
freq_data[1][PART_LEN] = 0;
freq_data[0][0] = time_data[0];
@@ -862,13 +846,6 @@ static void TimeToFrequency(float time_data[PART_LEN2],
}
}
-static int MoveFarReadPtrWithoutSystemDelayUpdate(AecCore* self, int elements) {
- WebRtc_MoveReadPtr(self->far_buf_windowed, elements);
-#ifdef WEBRTC_AEC_DEBUG_DUMP
- WebRtc_MoveReadPtr(self->far_time_buf, elements);
-#endif
- return WebRtc_MoveReadPtr(self->far_buf, elements);
-}
static int SignalBasedDelayCorrection(AecCore* self) {
int delay_correction = 0;
@@ -909,7 +886,7 @@ static int SignalBasedDelayCorrection(AecCore* self) {
const int upper_bound = self->num_partitions * 3 / 4;
const int do_correction = delay <= lower_bound || delay > upper_bound;
if (do_correction == 1) {
- int available_read = (int)WebRtc_available_read(self->far_buf);
+ int available_read = (int)WebRtc_available_read(self->far_time_buf);
// With |shift_offset| we gradually rely on the delay estimates. For
// positive delays we reduce the correction by |shift_offset| to lower the
// risk of pushing the AEC into a non causal state. For negative delays
@@ -942,13 +919,94 @@ static int SignalBasedDelayCorrection(AecCore* self) {
return delay_correction;
}
-static void NonLinearProcessing(AecCore* aec,
- float* output,
- float* const* outputH) {
- float efw[2][PART_LEN1], xfw[2][PART_LEN1];
- complex_t comfortNoiseHband[PART_LEN1];
+static void EchoSubtraction(
+ AecCore* aec,
+ int num_partitions,
+ int x_fft_buf_block_pos,
+ int metrics_mode,
+ int extended_filter_enabled,
+ float normal_mu,
+ float normal_error_threshold,
+ float x_fft_buf[2][kExtendedNumPartitions * PART_LEN1],
+ float* const y,
+ float x_pow[PART_LEN1],
+ float h_fft_buf[2][kExtendedNumPartitions * PART_LEN1],
+ PowerLevel* linout_level,
+ float echo_subtractor_output[PART_LEN]) {
+ float s_fft[2][PART_LEN1];
+ float e_extended[PART_LEN2];
+ float s_extended[PART_LEN2];
+ float *s;
+ float e[PART_LEN];
+ float e_fft[2][PART_LEN1];
+ int i;
+ memset(s_fft, 0, sizeof(s_fft));
+
+ // Conditionally reset the echo subtraction filter if the filter has diverged
+ // significantly.
+ if (!aec->extended_filter_enabled &&
+ aec->extreme_filter_divergence) {
+ memset(aec->wfBuf, 0, sizeof(aec->wfBuf));
+ aec->extreme_filter_divergence = 0;
+ }
+
+ // Produce echo estimate s_fft.
+ WebRtcAec_FilterFar(num_partitions,
+ x_fft_buf_block_pos,
+ x_fft_buf,
+ h_fft_buf,
+ s_fft);
+
+ // Compute the time-domain echo estimate s.
+ ScaledInverseFft(s_fft, s_extended, 2.0f, 0);
+ s = &s_extended[PART_LEN];
+
+ // Compute the time-domain echo prediction error.
+ for (i = 0; i < PART_LEN; ++i) {
+ e[i] = y[i] - s[i];
+ }
+
+ // Compute the frequency domain echo prediction error.
+ memset(e_extended, 0, sizeof(float) * PART_LEN);
+ memcpy(e_extended + PART_LEN, e, sizeof(float) * PART_LEN);
+ Fft(e_extended, e_fft);
+
+ RTC_AEC_DEBUG_RAW_WRITE(aec->e_fft_file,
+ &e_fft[0][0],
+ sizeof(e_fft[0][0]) * PART_LEN1 * 2);
+
+ if (metrics_mode == 1) {
+ // Note that the first PART_LEN samples in fft (before transformation) are
+ // zero. Hence, the scaling by two in UpdateLevel() should not be
+ // performed. That scaling is taken care of in UpdateMetrics() instead.
+ UpdateLevel(linout_level, e_fft);
+ }
+
+ // Scale error signal inversely with far power.
+ WebRtcAec_ScaleErrorSignal(extended_filter_enabled,
+ normal_mu,
+ normal_error_threshold,
+ x_pow,
+ e_fft);
+ WebRtcAec_FilterAdaptation(num_partitions,
+ x_fft_buf_block_pos,
+ x_fft_buf,
+ e_fft,
+ h_fft_buf);
+ memcpy(echo_subtractor_output, e, sizeof(float) * PART_LEN);
+}
+
+
+static void EchoSuppression(AecCore* aec,
+ float farend[PART_LEN2],
+ float* echo_subtractor_output,
+ float* output,
+ float* const* outputH) {
+ float efw[2][PART_LEN1];
+ float xfw[2][PART_LEN1];
+ float dfw[2][PART_LEN1];
+ float comfortNoiseHband[2][PART_LEN1];
float fft[PART_LEN2];
- float scale, dtmp;
float nlpGainHband;
int i;
size_t j;
@@ -972,27 +1030,51 @@ static void NonLinearProcessing(AecCore* aec,
float* xfw_ptr = NULL;
- aec->delayEstCtr++;
- if (aec->delayEstCtr == delayEstInterval) {
- aec->delayEstCtr = 0;
- }
+ // Update eBuf with echo subtractor output.
+ memcpy(aec->eBuf + PART_LEN,
+ echo_subtractor_output,
+ sizeof(float) * PART_LEN);
- // initialize comfort noise for H band
- memset(comfortNoiseHband, 0, sizeof(comfortNoiseHband));
- nlpGainHband = (float)0.0;
- dtmp = (float)0.0;
+ // Analysis filter banks for the echo suppressor.
+ // Windowed near-end ffts.
+ WindowData(fft, aec->dBuf);
+ aec_rdft_forward_128(fft);
+ StoreAsComplex(fft, dfw);
+
+ // Windowed echo suppressor output ffts.
+ WindowData(fft, aec->eBuf);
+ aec_rdft_forward_128(fft);
+ StoreAsComplex(fft, efw);
- // We should always have at least one element stored in |far_buf|.
- assert(WebRtc_available_read(aec->far_buf_windowed) > 0);
// NLP
- WebRtc_ReadBuffer(aec->far_buf_windowed, (void**)&xfw_ptr, &xfw[0][0], 1);
- // TODO(bjornv): Investigate if we can reuse |far_buf_windowed| instead of
- // |xfwBuf|.
+ // Convert far-end partition to the frequency domain with windowing.
+ WindowData(fft, farend);
+ Fft(fft, xfw);
+ xfw_ptr = &xfw[0][0];
+
// Buffer far.
memcpy(aec->xfwBuf, xfw_ptr, sizeof(float) * 2 * PART_LEN1);
- WebRtcAec_SubbandCoherence(aec, efw, xfw, fft, cohde, cohxd);
+ aec->delayEstCtr++;
+ if (aec->delayEstCtr == delayEstInterval) {
+ aec->delayEstCtr = 0;
+ aec->delayIdx = WebRtcAec_PartitionDelay(aec);
+ }
+
+ // Use delayed far.
+ memcpy(xfw,
+ aec->xfwBuf + aec->delayIdx * PART_LEN1,
+ sizeof(xfw[0][0]) * 2 * PART_LEN1);
+
+ WebRtcAec_SubbandCoherence(aec, efw, dfw, xfw, fft, cohde, cohxd,
+ &aec->extreme_filter_divergence);
+
+ // Select the microphone signal as output if the filter is deemed to have
+ // diverged.
+ if (aec->divergeState) {
+ memcpy(efw, dfw, sizeof(efw[0][0]) * 2 * PART_LEN1);
+ }
hNlXdAvg = 0;
for (i = minPrefBand; i < prefBandSize + minPrefBand; i++) {
@@ -1098,67 +1180,51 @@ static void NonLinearProcessing(AecCore* aec,
// scaling only in UpdateMetrics().
UpdateLevel(&aec->nlpoutlevel, efw);
}
+
// Inverse error fft.
- fft[0] = efw[0][0];
- fft[1] = efw[0][PART_LEN];
- for (i = 1; i < PART_LEN; i++) {
- fft[2 * i] = efw[0][i];
- // Sign change required by Ooura fft.
- fft[2 * i + 1] = -efw[1][i];
- }
- aec_rdft_inverse_128(fft);
+ ScaledInverseFft(efw, fft, 2.0f, 1);
// Overlap and add to obtain output.
- scale = 2.0f / PART_LEN2;
for (i = 0; i < PART_LEN; i++) {
- fft[i] *= scale; // fft scaling
- fft[i] = fft[i] * WebRtcAec_sqrtHanning[i] + aec->outBuf[i];
-
- fft[PART_LEN + i] *= scale; // fft scaling
- aec->outBuf[i] = fft[PART_LEN + i] * WebRtcAec_sqrtHanning[PART_LEN - i];
+ output[i] = (fft[i] * WebRtcAec_sqrtHanning[i] +
+ aec->outBuf[i] * WebRtcAec_sqrtHanning[PART_LEN - i]);
// Saturate output to keep it in the allowed range.
output[i] = WEBRTC_SPL_SAT(
- WEBRTC_SPL_WORD16_MAX, fft[i], WEBRTC_SPL_WORD16_MIN);
+ WEBRTC_SPL_WORD16_MAX, output[i], WEBRTC_SPL_WORD16_MIN);
}
+ memcpy(aec->outBuf, &fft[PART_LEN], PART_LEN * sizeof(aec->outBuf[0]));
// For H band
if (aec->num_bands > 1) {
-
// H band gain
// average nlp over low band: average over second half of freq spectrum
// (4->8khz)
GetHighbandGain(hNl, &nlpGainHband);
// Inverse comfort_noise
- if (flagHbandCn == 1) {
- fft[0] = comfortNoiseHband[0][0];
- fft[1] = comfortNoiseHband[PART_LEN][0];
- for (i = 1; i < PART_LEN; i++) {
- fft[2 * i] = comfortNoiseHband[i][0];
- fft[2 * i + 1] = comfortNoiseHband[i][1];
- }
- aec_rdft_inverse_128(fft);
- scale = 2.0f / PART_LEN2;
- }
+ ScaledInverseFft(comfortNoiseHband, fft, 2.0f, 0);
// compute gain factor
for (j = 0; j < aec->num_bands - 1; ++j) {
for (i = 0; i < PART_LEN; i++) {
- dtmp = aec->dBufH[j][i];
- dtmp = dtmp * nlpGainHband; // for variable gain
+ outputH[j][i] = aec->dBufH[j][i] * nlpGainHband;
+ }
+ }
- // add some comfort noise where Hband is attenuated
- if (flagHbandCn == 1 && j == 0) {
- fft[i] *= scale; // fft scaling
- dtmp += cnScaleHband * fft[i];
- }
+ // Add some comfort noise where Hband is attenuated.
+ for (i = 0; i < PART_LEN; i++) {
+ outputH[0][i] += cnScaleHband * fft[i];
+ }
- // Saturate output to keep it in the allowed range.
+ // Saturate output to keep it in the allowed range.
+ for (j = 0; j < aec->num_bands - 1; ++j) {
+ for (i = 0; i < PART_LEN; i++) {
outputH[j][i] = WEBRTC_SPL_SAT(
- WEBRTC_SPL_WORD16_MAX, dtmp, WEBRTC_SPL_WORD16_MIN);
+ WEBRTC_SPL_WORD16_MAX, outputH[j][i], WEBRTC_SPL_WORD16_MIN);
}
}
+
}
// Copy the current block to the old position.
@@ -1177,11 +1243,9 @@ static void NonLinearProcessing(AecCore* aec,
static void ProcessBlock(AecCore* aec) {
size_t i;
- float y[PART_LEN], e[PART_LEN];
- float scale;
float fft[PART_LEN2];
- float xf[2][PART_LEN1], yf[2][PART_LEN1], ef[2][PART_LEN1];
+ float xf[2][PART_LEN1];
float df[2][PART_LEN1];
float far_spectrum = 0.0f;
float near_spectrum = 0.0f;
@@ -1198,15 +1262,18 @@ static void ProcessBlock(AecCore* aec) {
float nearend[PART_LEN];
float* nearend_ptr = NULL;
+ float farend[PART_LEN2];
+ float* farend_ptr = NULL;
+ float echo_subtractor_output[PART_LEN];
float output[PART_LEN];
float outputH[NUM_HIGH_BANDS_MAX][PART_LEN];
float* outputH_ptr[NUM_HIGH_BANDS_MAX];
+ float* xf_ptr = NULL;
+
for (i = 0; i < NUM_HIGH_BANDS_MAX; ++i) {
outputH_ptr[i] = outputH[i];
}
- float* xf_ptr = NULL;
-
// Concatenate old and new nearend blocks.
for (i = 0; i < aec->num_bands - 1; ++i) {
WebRtc_ReadBuffer(aec->nearFrBufH[i],
@@ -1218,25 +1285,28 @@ static void ProcessBlock(AecCore* aec) {
WebRtc_ReadBuffer(aec->nearFrBuf, (void**)&nearend_ptr, nearend, PART_LEN);
memcpy(aec->dBuf + PART_LEN, nearend_ptr, sizeof(nearend));
- // ---------- Ooura fft ----------
+ // We should always have at least one element stored in |far_buf|.
+ assert(WebRtc_available_read(aec->far_time_buf) > 0);
+ WebRtc_ReadBuffer(aec->far_time_buf, (void**)&farend_ptr, farend, 1);
#ifdef WEBRTC_AEC_DEBUG_DUMP
{
- float farend[PART_LEN];
- float* farend_ptr = NULL;
- WebRtc_ReadBuffer(aec->far_time_buf, (void**)&farend_ptr, farend, 1);
- RTC_AEC_DEBUG_WAV_WRITE(aec->farFile, farend_ptr, PART_LEN);
+ // TODO(minyue): |farend_ptr| starts from buffered samples. This will be
+ // modified when |aec->far_time_buf| is revised.
+ RTC_AEC_DEBUG_WAV_WRITE(aec->farFile, &farend_ptr[PART_LEN], PART_LEN);
+
RTC_AEC_DEBUG_WAV_WRITE(aec->nearFile, nearend_ptr, PART_LEN);
}
#endif
- // We should always have at least one element stored in |far_buf|.
- assert(WebRtc_available_read(aec->far_buf) > 0);
- WebRtc_ReadBuffer(aec->far_buf, (void**)&xf_ptr, &xf[0][0], 1);
+ // Convert far-end signal to the frequency domain.
+ memcpy(fft, farend_ptr, sizeof(float) * PART_LEN2);
+ Fft(fft, xf);
+ xf_ptr = &xf[0][0];
// Near fft
memcpy(fft, aec->dBuf, sizeof(float) * PART_LEN2);
- TimeToFrequency(fft, df, 0);
+ Fft(fft, df);
// Power smoothing
for (i = 0; i < PART_LEN1; i++) {
@@ -1314,60 +1384,25 @@ static void ProcessBlock(AecCore* aec) {
&xf_ptr[PART_LEN1],
sizeof(float) * PART_LEN1);
- memset(yf, 0, sizeof(yf));
-
- // Filter far
- WebRtcAec_FilterFar(aec, yf);
-
- // Inverse fft to obtain echo estimate and error.
- fft[0] = yf[0][0];
- fft[1] = yf[0][PART_LEN];
- for (i = 1; i < PART_LEN; i++) {
- fft[2 * i] = yf[0][i];
- fft[2 * i + 1] = yf[1][i];
- }
- aec_rdft_inverse_128(fft);
-
- scale = 2.0f / PART_LEN2;
- for (i = 0; i < PART_LEN; i++) {
- y[i] = fft[PART_LEN + i] * scale; // fft scaling
- }
-
- for (i = 0; i < PART_LEN; i++) {
- e[i] = nearend_ptr[i] - y[i];
- }
-
- // Error fft
- memcpy(aec->eBuf + PART_LEN, e, sizeof(float) * PART_LEN);
- memset(fft, 0, sizeof(float) * PART_LEN);
- memcpy(fft + PART_LEN, e, sizeof(float) * PART_LEN);
- // TODO(bjornv): Change to use TimeToFrequency().
- aec_rdft_forward_128(fft);
-
- ef[1][0] = 0;
- ef[1][PART_LEN] = 0;
- ef[0][0] = fft[0];
- ef[0][PART_LEN] = fft[1];
- for (i = 1; i < PART_LEN; i++) {
- ef[0][i] = fft[2 * i];
- ef[1][i] = fft[2 * i + 1];
- }
-
- RTC_AEC_DEBUG_RAW_WRITE(aec->e_fft_file,
- &ef[0][0],
- sizeof(ef[0][0]) * PART_LEN1 * 2);
-
- if (aec->metricsMode == 1) {
- // Note that the first PART_LEN samples in fft (before transformation) are
- // zero. Hence, the scaling by two in UpdateLevel() should not be
- // performed. That scaling is taken care of in UpdateMetrics() instead.
- UpdateLevel(&aec->linoutlevel, ef);
- }
-
- // Scale error signal inversely with far power.
- WebRtcAec_ScaleErrorSignal(aec, ef);
- WebRtcAec_FilterAdaptation(aec, fft, ef);
- NonLinearProcessing(aec, output, outputH_ptr);
+ // Perform echo subtraction.
+ EchoSubtraction(aec,
+ aec->num_partitions,
+ aec->xfBufBlockPos,
+ aec->metricsMode,
+ aec->extended_filter_enabled,
+ aec->normal_mu,
+ aec->normal_error_threshold,
+ aec->xfBuf,
+ nearend_ptr,
+ aec->xPow,
+ aec->wfBuf,
+ &aec->linoutlevel,
+ echo_subtractor_output);
+
+ RTC_AEC_DEBUG_WAV_WRITE(aec->outLinearFile, echo_subtractor_output, PART_LEN);
+
+ // Perform echo suppression.
+ EchoSuppression(aec, farend_ptr, echo_subtractor_output, output, outputH_ptr);
if (aec->metricsMode == 1) {
// Update power levels and echo metrics
@@ -1383,7 +1418,6 @@ static void ProcessBlock(AecCore* aec) {
WebRtc_WriteBuffer(aec->outFrBufH[i], outputH[i], PART_LEN);
}
- RTC_AEC_DEBUG_WAV_WRITE(aec->outLinearFile, e, PART_LEN);
RTC_AEC_DEBUG_WAV_WRITE(aec->outFile, output, PART_LEN);
}
@@ -1422,26 +1456,20 @@ AecCore* WebRtcAec_CreateAec() {
}
// Create far-end buffers.
- aec->far_buf =
- WebRtc_CreateBuffer(kBufSizePartitions, sizeof(float) * 2 * PART_LEN1);
- if (!aec->far_buf) {
- WebRtcAec_FreeAec(aec);
- return NULL;
- }
- aec->far_buf_windowed =
- WebRtc_CreateBuffer(kBufSizePartitions, sizeof(float) * 2 * PART_LEN1);
- if (!aec->far_buf_windowed) {
- WebRtcAec_FreeAec(aec);
- return NULL;
- }
-#ifdef WEBRTC_AEC_DEBUG_DUMP
- aec->instance_index = webrtc_aec_instance_count;
+ // For bit exactness with legacy code, each element in |far_time_buf| is
+ // supposed to contain |PART_LEN2| samples with an overlap of |PART_LEN|
+ // samples from the last frame.
+ // TODO(minyue): reduce |far_time_buf| to non-overlapped |PART_LEN| samples.
aec->far_time_buf =
- WebRtc_CreateBuffer(kBufSizePartitions, sizeof(float) * PART_LEN);
+ WebRtc_CreateBuffer(kBufSizePartitions, sizeof(float) * PART_LEN2);
if (!aec->far_time_buf) {
WebRtcAec_FreeAec(aec);
return NULL;
}
+
+#ifdef WEBRTC_AEC_DEBUG_DUMP
+ aec->instance_index = webrtc_aec_instance_count;
+
aec->farFile = aec->nearFile = aec->outFile = aec->outLinearFile = NULL;
aec->debug_dump_count = 0;
#endif
@@ -1477,6 +1505,10 @@ AecCore* WebRtcAec_CreateAec() {
WebRtcAec_OverdriveAndSuppress = OverdriveAndSuppress;
WebRtcAec_ComfortNoise = ComfortNoise;
WebRtcAec_SubbandCoherence = SubbandCoherence;
+ WebRtcAec_StoreAsComplex = StoreAsComplex;
+ WebRtcAec_PartitionDelay = PartitionDelay;
+ WebRtcAec_WindowData = WindowData;
+
#if defined(WEBRTC_ARCH_X86_FAMILY)
if (WebRtc_GetCPUInfo(kSSE2)) {
@@ -1515,11 +1547,8 @@ void WebRtcAec_FreeAec(AecCore* aec) {
WebRtc_FreeBuffer(aec->outFrBufH[i]);
}
- WebRtc_FreeBuffer(aec->far_buf);
- WebRtc_FreeBuffer(aec->far_buf_windowed);
-#ifdef WEBRTC_AEC_DEBUG_DUMP
WebRtc_FreeBuffer(aec->far_time_buf);
-#endif
+
RTC_AEC_DEBUG_WAV_CLOSE(aec->farFile);
RTC_AEC_DEBUG_WAV_CLOSE(aec->nearFile);
RTC_AEC_DEBUG_WAV_CLOSE(aec->outFile);
@@ -1555,10 +1584,9 @@ int WebRtcAec_InitAec(AecCore* aec, int sampFreq) {
}
// Initialize far-end buffers.
- WebRtc_InitBuffer(aec->far_buf);
- WebRtc_InitBuffer(aec->far_buf_windowed);
-#ifdef WEBRTC_AEC_DEBUG_DUMP
WebRtc_InitBuffer(aec->far_time_buf);
+
+#ifdef WEBRTC_AEC_DEBUG_DUMP
{
int process_rate = sampFreq > 16000 ? 16000 : sampFreq;
RTC_AEC_DEBUG_WAV_REOPEN("aec_far", aec->instance_index,
@@ -1693,6 +1721,8 @@ int WebRtcAec_InitAec(AecCore* aec, int sampFreq) {
aec->seed = 777;
aec->delayEstCtr = 0;
+ aec->extreme_filter_divergence = 0;
+
// Metrics disabled by default
aec->metricsMode = 0;
InitMetrics(aec);
@@ -1700,27 +1730,22 @@ int WebRtcAec_InitAec(AecCore* aec, int sampFreq) {
return 0;
}
-void WebRtcAec_BufferFarendPartition(AecCore* aec, const float* farend) {
- float fft[PART_LEN2];
- float xf[2][PART_LEN1];
+// For bit exactness with a legacy code, |farend| is supposed to contain
+// |PART_LEN2| samples with an overlap of |PART_LEN| samples from the last
+// frame.
+// TODO(minyue): reduce |farend| to non-overlapped |PART_LEN| samples.
+void WebRtcAec_BufferFarendPartition(AecCore* aec, const float* farend) {
// Check if the buffer is full, and in that case flush the oldest data.
- if (WebRtc_available_write(aec->far_buf) < 1) {
+ if (WebRtc_available_write(aec->far_time_buf) < 1) {
WebRtcAec_MoveFarReadPtr(aec, 1);
}
- // Convert far-end partition to the frequency domain without windowing.
- memcpy(fft, farend, sizeof(float) * PART_LEN2);
- TimeToFrequency(fft, xf, 0);
- WebRtc_WriteBuffer(aec->far_buf, &xf[0][0], 1);
- // Convert far-end partition to the frequency domain with windowing.
- memcpy(fft, farend, sizeof(float) * PART_LEN2);
- TimeToFrequency(fft, xf, 1);
- WebRtc_WriteBuffer(aec->far_buf_windowed, &xf[0][0], 1);
+ WebRtc_WriteBuffer(aec->far_time_buf, farend, 1);
}
int WebRtcAec_MoveFarReadPtr(AecCore* aec, int elements) {
- int elements_moved = MoveFarReadPtrWithoutSystemDelayUpdate(aec, elements);
+ int elements_moved = WebRtc_MoveReadPtr(aec->far_time_buf, elements);
aec->system_delay -= elements_moved * PART_LEN;
return elements_moved;
}
@@ -1794,14 +1819,14 @@ void WebRtcAec_ProcessFrames(AecCore* aec,
// rounding, like -16.
int move_elements = (aec->knownDelay - knownDelay - 32) / PART_LEN;
int moved_elements =
- MoveFarReadPtrWithoutSystemDelayUpdate(aec, move_elements);
+ WebRtc_MoveReadPtr(aec->far_time_buf, move_elements);
aec->knownDelay -= moved_elements * PART_LEN;
} else {
// 2 b) Apply signal based delay correction.
int move_elements = SignalBasedDelayCorrection(aec);
int moved_elements =
- MoveFarReadPtrWithoutSystemDelayUpdate(aec, move_elements);
- int far_near_buffer_diff = WebRtc_available_read(aec->far_buf) -
+ WebRtc_MoveReadPtr(aec->far_time_buf, move_elements);
+ int far_near_buffer_diff = WebRtc_available_read(aec->far_time_buf) -
WebRtc_available_read(aec->nearFrBuf) / PART_LEN;
WebRtc_SoftResetDelayEstimator(aec->delay_estimator, moved_elements);
WebRtc_SoftResetDelayEstimatorFarend(aec->delay_estimator_farend,
@@ -1880,10 +1905,6 @@ void WebRtcAec_GetEchoStats(AecCore* self,
*a_nlp = self->aNlp;
}
-#ifdef WEBRTC_AEC_DEBUG_DUMP
-void* WebRtcAec_far_time_buf(AecCore* self) { return self->far_time_buf; }
-#endif
-
void WebRtcAec_SetConfigCore(AecCore* self,
int nlp_mode,
int metrics_mode,
diff --git a/webrtc/modules/audio_processing/aec/aec_core_internal.h b/webrtc/modules/audio_processing/aec/aec_core_internal.h
index 2de028379b..3809c82567 100644
--- a/webrtc/modules/audio_processing/aec/aec_core_internal.h
+++ b/webrtc/modules/audio_processing/aec/aec_core_internal.h
@@ -95,8 +95,8 @@ struct AecCore {
int xfBufBlockPos;
- RingBuffer* far_buf;
- RingBuffer* far_buf_windowed;
+ RingBuffer* far_time_buf;
+
int system_delay; // Current system delay buffered in AEC.
int mult; // sampling frequency multiple
@@ -152,6 +152,10 @@ struct AecCore {
// Runtime selection of number of filter partitions.
int num_partitions;
+ // Flag that extreme filter divergence has been detected by the Echo
+ // Suppressor.
+ int extreme_filter_divergence;
+
#ifdef WEBRTC_AEC_DEBUG_DUMP
// Sequence number of this AEC instance, so that different instances can
// choose different dump file names.
@@ -161,7 +165,6 @@ struct AecCore {
// each time.
int debug_dump_count;
- RingBuffer* far_time_buf;
rtc_WavWriter* farFile;
rtc_WavWriter* nearFile;
rtc_WavWriter* outFile;
@@ -170,13 +173,25 @@ struct AecCore {
#endif
};
-typedef void (*WebRtcAecFilterFar)(AecCore* aec, float yf[2][PART_LEN1]);
+typedef void (*WebRtcAecFilterFar)(
+ int num_partitions,
+ int x_fft_buf_block_pos,
+ float x_fft_buf[2][kExtendedNumPartitions * PART_LEN1],
+ float h_fft_buf[2][kExtendedNumPartitions * PART_LEN1],
+ float y_fft[2][PART_LEN1]);
extern WebRtcAecFilterFar WebRtcAec_FilterFar;
-typedef void (*WebRtcAecScaleErrorSignal)(AecCore* aec, float ef[2][PART_LEN1]);
-extern WebRtcAecScaleErrorSignal WebRtcAec_ScaleErrorSignal;
-typedef void (*WebRtcAecFilterAdaptation)(AecCore* aec,
- float* fft,
+typedef void (*WebRtcAecScaleErrorSignal)(int extended_filter_enabled,
+ float normal_mu,
+ float normal_error_threshold,
+ float x_pow[PART_LEN1],
float ef[2][PART_LEN1]);
+extern WebRtcAecScaleErrorSignal WebRtcAec_ScaleErrorSignal;
+typedef void (*WebRtcAecFilterAdaptation)(
+ int num_partitions,
+ int x_fft_buf_block_pos,
+ float x_fft_buf[2][kExtendedNumPartitions * PART_LEN1],
+ float e_fft[2][PART_LEN1],
+ float h_fft_buf[2][kExtendedNumPartitions * PART_LEN1]);
extern WebRtcAecFilterAdaptation WebRtcAec_FilterAdaptation;
typedef void (*WebRtcAecOverdriveAndSuppress)(AecCore* aec,
float hNl[PART_LEN1],
@@ -186,17 +201,29 @@ extern WebRtcAecOverdriveAndSuppress WebRtcAec_OverdriveAndSuppress;
typedef void (*WebRtcAecComfortNoise)(AecCore* aec,
float efw[2][PART_LEN1],
- complex_t* comfortNoiseHband,
+ float comfortNoiseHband[2][PART_LEN1],
const float* noisePow,
const float* lambda);
extern WebRtcAecComfortNoise WebRtcAec_ComfortNoise;
typedef void (*WebRtcAecSubBandCoherence)(AecCore* aec,
float efw[2][PART_LEN1],
+ float dfw[2][PART_LEN1],
float xfw[2][PART_LEN1],
float* fft,
float* cohde,
- float* cohxd);
+ float* cohxd,
+ int* extreme_filter_divergence);
extern WebRtcAecSubBandCoherence WebRtcAec_SubbandCoherence;
+typedef int (*WebRtcAecPartitionDelay)(const AecCore* aec);
+extern WebRtcAecPartitionDelay WebRtcAec_PartitionDelay;
+
+typedef void (*WebRtcAecStoreAsComplex)(const float* data,
+ float data_complex[2][PART_LEN1]);
+extern WebRtcAecStoreAsComplex WebRtcAec_StoreAsComplex;
+
+typedef void (*WebRtcAecWindowData)(float* x_windowed, const float* x);
+extern WebRtcAecWindowData WebRtcAec_WindowData;
+
#endif // WEBRTC_MODULES_AUDIO_PROCESSING_AEC_AEC_CORE_INTERNAL_H_
diff --git a/webrtc/modules/audio_processing/aec/aec_core_mips.c b/webrtc/modules/audio_processing/aec/aec_core_mips.c
index bb33087aee..035a4b76af 100644
--- a/webrtc/modules/audio_processing/aec/aec_core_mips.c
+++ b/webrtc/modules/audio_processing/aec/aec_core_mips.c
@@ -20,13 +20,12 @@
#include "webrtc/modules/audio_processing/aec/aec_core_internal.h"
#include "webrtc/modules/audio_processing/aec/aec_rdft.h"
-static const int flagHbandCn = 1; // flag for adding comfort noise in H band
extern const float WebRtcAec_weightCurve[65];
extern const float WebRtcAec_overDriveCurve[65];
void WebRtcAec_ComfortNoise_mips(AecCore* aec,
float efw[2][PART_LEN1],
- complex_t* comfortNoiseHband,
+ float comfortNoiseHband[2][PART_LEN1],
const float* noisePow,
const float* lambda) {
int i, num;
@@ -274,7 +273,7 @@ void WebRtcAec_ComfortNoise_mips(AecCore* aec,
noiseAvg = 0.0;
tmpAvg = 0.0;
num = 0;
- if ((aec->sampFreq == 32000 || aec->sampFreq == 48000) && flagHbandCn == 1) {
+ if (aec->num_bands > 1) {
for (i = 0; i < PART_LEN; i++) {
rand[i] = ((float)randW16[i]) / 32768;
}
@@ -314,27 +313,35 @@ void WebRtcAec_ComfortNoise_mips(AecCore* aec,
for (i = 0; i < PART_LEN1; i++) {
// Use average NLP weight for H band
- comfortNoiseHband[i][0] = tmpAvg * u[i][0];
- comfortNoiseHband[i][1] = tmpAvg * u[i][1];
+ comfortNoiseHband[0][i] = tmpAvg * u[i][0];
+ comfortNoiseHband[1][i] = tmpAvg * u[i][1];
}
+ } else {
+ memset(comfortNoiseHband, 0,
+ 2 * PART_LEN1 * sizeof(comfortNoiseHband[0][0]));
}
}
-void WebRtcAec_FilterFar_mips(AecCore* aec, float yf[2][PART_LEN1]) {
+void WebRtcAec_FilterFar_mips(
+ int num_partitions,
+ int x_fft_buf_block_pos,
+ float x_fft_buf[2][kExtendedNumPartitions * PART_LEN1],
+ float h_fft_buf[2][kExtendedNumPartitions * PART_LEN1],
+ float y_fft[2][PART_LEN1]) {
int i;
- for (i = 0; i < aec->num_partitions; i++) {
- int xPos = (i + aec->xfBufBlockPos) * PART_LEN1;
+ for (i = 0; i < num_partitions; i++) {
+ int xPos = (i + x_fft_buf_block_pos) * PART_LEN1;
int pos = i * PART_LEN1;
// Check for wrap
- if (i + aec->xfBufBlockPos >= aec->num_partitions) {
- xPos -= aec->num_partitions * (PART_LEN1);
+ if (i + x_fft_buf_block_pos >= num_partitions) {
+ xPos -= num_partitions * (PART_LEN1);
}
- float* yf0 = yf[0];
- float* yf1 = yf[1];
- float* aRe = aec->xfBuf[0] + xPos;
- float* aIm = aec->xfBuf[1] + xPos;
- float* bRe = aec->wfBuf[0] + pos;
- float* bIm = aec->wfBuf[1] + pos;
+ float* yf0 = y_fft[0];
+ float* yf1 = y_fft[1];
+ float* aRe = x_fft_buf[0] + xPos;
+ float* aIm = x_fft_buf[1] + xPos;
+ float* bRe = h_fft_buf[0] + pos;
+ float* bIm = h_fft_buf[1] + pos;
float f0, f1, f2, f3, f4, f5, f6, f7, f8, f9, f10, f11, f12, f13;
int len = PART_LEN1 >> 1;
@@ -432,23 +439,27 @@ void WebRtcAec_FilterFar_mips(AecCore* aec, float yf[2][PART_LEN1]) {
}
}
-void WebRtcAec_FilterAdaptation_mips(AecCore* aec,
- float* fft,
- float ef[2][PART_LEN1]) {
+void WebRtcAec_FilterAdaptation_mips(
+ int num_partitions,
+ int x_fft_buf_block_pos,
+ float x_fft_buf[2][kExtendedNumPartitions * PART_LEN1],
+ float e_fft[2][PART_LEN1],
+ float h_fft_buf[2][kExtendedNumPartitions * PART_LEN1]) {
+ float fft[PART_LEN2];
int i;
- for (i = 0; i < aec->num_partitions; i++) {
- int xPos = (i + aec->xfBufBlockPos)*(PART_LEN1);
+ for (i = 0; i < num_partitions; i++) {
+ int xPos = (i + x_fft_buf_block_pos)*(PART_LEN1);
int pos;
// Check for wrap
- if (i + aec->xfBufBlockPos >= aec->num_partitions) {
- xPos -= aec->num_partitions * PART_LEN1;
+ if (i + x_fft_buf_block_pos >= num_partitions) {
+ xPos -= num_partitions * PART_LEN1;
}
pos = i * PART_LEN1;
- float* aRe = aec->xfBuf[0] + xPos;
- float* aIm = aec->xfBuf[1] + xPos;
- float* bRe = ef[0];
- float* bIm = ef[1];
+ float* aRe = x_fft_buf[0] + xPos;
+ float* aIm = x_fft_buf[1] + xPos;
+ float* bRe = e_fft[0];
+ float* bIm = e_fft[1];
float* fft_tmp;
float f0, f1, f2, f3, f4, f5, f6 ,f7, f8, f9, f10, f11, f12;
@@ -573,8 +584,8 @@ void WebRtcAec_FilterAdaptation_mips(AecCore* aec,
);
}
aec_rdft_forward_128(fft);
- aRe = aec->wfBuf[0] + pos;
- aIm = aec->wfBuf[1] + pos;
+ aRe = h_fft_buf[0] + pos;
+ aIm = h_fft_buf[1] + pos;
__asm __volatile (
".set push \n\t"
".set noreorder \n\t"
@@ -699,15 +710,18 @@ void WebRtcAec_OverdriveAndSuppress_mips(AecCore* aec,
}
}
-void WebRtcAec_ScaleErrorSignal_mips(AecCore* aec, float ef[2][PART_LEN1]) {
- const float mu = aec->extended_filter_enabled ? kExtendedMu : aec->normal_mu;
- const float error_threshold = aec->extended_filter_enabled
+void WebRtcAec_ScaleErrorSignal_mips(int extended_filter_enabled,
+ float normal_mu,
+ float normal_error_threshold,
+ float x_pow[PART_LEN1],
+ float ef[2][PART_LEN1]) {
+ const float mu = extended_filter_enabled ? kExtendedMu : normal_mu;
+ const float error_threshold = extended_filter_enabled
? kExtendedErrorThreshold
- : aec->normal_error_threshold;
+ : normal_error_threshold;
int len = (PART_LEN1);
float* ef0 = ef[0];
float* ef1 = ef[1];
- float* xPow = aec->xPow;
float fac1 = 1e-10f;
float err_th2 = error_threshold * error_threshold;
float f0, f1, f2;
@@ -719,7 +733,7 @@ void WebRtcAec_ScaleErrorSignal_mips(AecCore* aec, float ef[2][PART_LEN1]) {
".set push \n\t"
".set noreorder \n\t"
"1: \n\t"
- "lwc1 %[f0], 0(%[xPow]) \n\t"
+ "lwc1 %[f0], 0(%[x_pow]) \n\t"
"lwc1 %[f1], 0(%[ef0]) \n\t"
"lwc1 %[f2], 0(%[ef1]) \n\t"
"add.s %[f0], %[f0], %[fac1] \n\t"
@@ -747,7 +761,7 @@ void WebRtcAec_ScaleErrorSignal_mips(AecCore* aec, float ef[2][PART_LEN1]) {
"swc1 %[f1], 0(%[ef0]) \n\t"
"swc1 %[f2], 0(%[ef1]) \n\t"
"addiu %[len], %[len], -1 \n\t"
- "addiu %[xPow], %[xPow], 4 \n\t"
+ "addiu %[x_pow], %[x_pow], 4 \n\t"
"addiu %[ef0], %[ef0], 4 \n\t"
"bgtz %[len], 1b \n\t"
" addiu %[ef1], %[ef1], 4 \n\t"
@@ -756,7 +770,7 @@ void WebRtcAec_ScaleErrorSignal_mips(AecCore* aec, float ef[2][PART_LEN1]) {
#if !defined(MIPS32_R2_LE)
[f3] "=&f" (f3),
#endif
- [xPow] "+r" (xPow), [ef0] "+r" (ef0), [ef1] "+r" (ef1),
+ [x_pow] "+r" (x_pow), [ef0] "+r" (ef0), [ef1] "+r" (ef1),
[len] "+r" (len)
: [fac1] "f" (fac1), [err_th2] "f" (err_th2), [mu] "f" (mu),
[err_th] "f" (error_threshold)
@@ -771,4 +785,3 @@ void WebRtcAec_InitAec_mips(void) {
WebRtcAec_ComfortNoise = WebRtcAec_ComfortNoise_mips;
WebRtcAec_OverdriveAndSuppress = WebRtcAec_OverdriveAndSuppress_mips;
}
-
diff --git a/webrtc/modules/audio_processing/aec/aec_core_neon.c b/webrtc/modules/audio_processing/aec/aec_core_neon.c
index 9a677aaa67..7898ab2543 100644
--- a/webrtc/modules/audio_processing/aec/aec_core_neon.c
+++ b/webrtc/modules/audio_processing/aec/aec_core_neon.c
@@ -34,45 +34,49 @@ __inline static float MulIm(float aRe, float aIm, float bRe, float bIm) {
return aRe * bIm + aIm * bRe;
}
-static void FilterFarNEON(AecCore* aec, float yf[2][PART_LEN1]) {
+static void FilterFarNEON(
+ int num_partitions,
+ int x_fft_buf_block_pos,
+ float x_fft_buf[2][kExtendedNumPartitions * PART_LEN1],
+ float h_fft_buf[2][kExtendedNumPartitions * PART_LEN1],
+ float y_fft[2][PART_LEN1]) {
int i;
- const int num_partitions = aec->num_partitions;
for (i = 0; i < num_partitions; i++) {
int j;
- int xPos = (i + aec->xfBufBlockPos) * PART_LEN1;
+ int xPos = (i + x_fft_buf_block_pos) * PART_LEN1;
int pos = i * PART_LEN1;
// Check for wrap
- if (i + aec->xfBufBlockPos >= num_partitions) {
+ if (i + x_fft_buf_block_pos >= num_partitions) {
xPos -= num_partitions * PART_LEN1;
}
// vectorized code (four at once)
for (j = 0; j + 3 < PART_LEN1; j += 4) {
- const float32x4_t xfBuf_re = vld1q_f32(&aec->xfBuf[0][xPos + j]);
- const float32x4_t xfBuf_im = vld1q_f32(&aec->xfBuf[1][xPos + j]);
- const float32x4_t wfBuf_re = vld1q_f32(&aec->wfBuf[0][pos + j]);
- const float32x4_t wfBuf_im = vld1q_f32(&aec->wfBuf[1][pos + j]);
- const float32x4_t yf_re = vld1q_f32(&yf[0][j]);
- const float32x4_t yf_im = vld1q_f32(&yf[1][j]);
- const float32x4_t a = vmulq_f32(xfBuf_re, wfBuf_re);
- const float32x4_t e = vmlsq_f32(a, xfBuf_im, wfBuf_im);
- const float32x4_t c = vmulq_f32(xfBuf_re, wfBuf_im);
- const float32x4_t f = vmlaq_f32(c, xfBuf_im, wfBuf_re);
- const float32x4_t g = vaddq_f32(yf_re, e);
- const float32x4_t h = vaddq_f32(yf_im, f);
- vst1q_f32(&yf[0][j], g);
- vst1q_f32(&yf[1][j], h);
+ const float32x4_t x_fft_buf_re = vld1q_f32(&x_fft_buf[0][xPos + j]);
+ const float32x4_t x_fft_buf_im = vld1q_f32(&x_fft_buf[1][xPos + j]);
+ const float32x4_t h_fft_buf_re = vld1q_f32(&h_fft_buf[0][pos + j]);
+ const float32x4_t h_fft_buf_im = vld1q_f32(&h_fft_buf[1][pos + j]);
+ const float32x4_t y_fft_re = vld1q_f32(&y_fft[0][j]);
+ const float32x4_t y_fft_im = vld1q_f32(&y_fft[1][j]);
+ const float32x4_t a = vmulq_f32(x_fft_buf_re, h_fft_buf_re);
+ const float32x4_t e = vmlsq_f32(a, x_fft_buf_im, h_fft_buf_im);
+ const float32x4_t c = vmulq_f32(x_fft_buf_re, h_fft_buf_im);
+ const float32x4_t f = vmlaq_f32(c, x_fft_buf_im, h_fft_buf_re);
+ const float32x4_t g = vaddq_f32(y_fft_re, e);
+ const float32x4_t h = vaddq_f32(y_fft_im, f);
+ vst1q_f32(&y_fft[0][j], g);
+ vst1q_f32(&y_fft[1][j], h);
}
// scalar code for the remaining items.
for (; j < PART_LEN1; j++) {
- yf[0][j] += MulRe(aec->xfBuf[0][xPos + j],
- aec->xfBuf[1][xPos + j],
- aec->wfBuf[0][pos + j],
- aec->wfBuf[1][pos + j]);
- yf[1][j] += MulIm(aec->xfBuf[0][xPos + j],
- aec->xfBuf[1][xPos + j],
- aec->wfBuf[0][pos + j],
- aec->wfBuf[1][pos + j]);
+ y_fft[0][j] += MulRe(x_fft_buf[0][xPos + j],
+ x_fft_buf[1][xPos + j],
+ h_fft_buf[0][pos + j],
+ h_fft_buf[1][pos + j]);
+ y_fft[1][j] += MulIm(x_fft_buf[0][xPos + j],
+ x_fft_buf[1][xPos + j],
+ h_fft_buf[0][pos + j],
+ h_fft_buf[1][pos + j]);
}
}
}
@@ -122,20 +126,24 @@ static float32x4_t vsqrtq_f32(float32x4_t s) {
}
#endif // WEBRTC_ARCH_ARM64
-static void ScaleErrorSignalNEON(AecCore* aec, float ef[2][PART_LEN1]) {
- const float mu = aec->extended_filter_enabled ? kExtendedMu : aec->normal_mu;
- const float error_threshold = aec->extended_filter_enabled ?
- kExtendedErrorThreshold : aec->normal_error_threshold;
+static void ScaleErrorSignalNEON(int extended_filter_enabled,
+ float normal_mu,
+ float normal_error_threshold,
+ float x_pow[PART_LEN1],
+ float ef[2][PART_LEN1]) {
+ const float mu = extended_filter_enabled ? kExtendedMu : normal_mu;
+ const float error_threshold = extended_filter_enabled ?
+ kExtendedErrorThreshold : normal_error_threshold;
const float32x4_t k1e_10f = vdupq_n_f32(1e-10f);
const float32x4_t kMu = vmovq_n_f32(mu);
const float32x4_t kThresh = vmovq_n_f32(error_threshold);
int i;
// vectorized code (four at once)
for (i = 0; i + 3 < PART_LEN1; i += 4) {
- const float32x4_t xPow = vld1q_f32(&aec->xPow[i]);
+ const float32x4_t x_pow_local = vld1q_f32(&x_pow[i]);
const float32x4_t ef_re_base = vld1q_f32(&ef[0][i]);
const float32x4_t ef_im_base = vld1q_f32(&ef[1][i]);
- const float32x4_t xPowPlus = vaddq_f32(xPow, k1e_10f);
+ const float32x4_t xPowPlus = vaddq_f32(x_pow_local, k1e_10f);
float32x4_t ef_re = vdivq_f32(ef_re_base, xPowPlus);
float32x4_t ef_im = vdivq_f32(ef_im_base, xPowPlus);
const float32x4_t ef_re2 = vmulq_f32(ef_re, ef_re);
@@ -162,8 +170,8 @@ static void ScaleErrorSignalNEON(AecCore* aec, float ef[2][PART_LEN1]) {
// scalar code for the remaining items.
for (; i < PART_LEN1; i++) {
float abs_ef;
- ef[0][i] /= (aec->xPow[i] + 1e-10f);
- ef[1][i] /= (aec->xPow[i] + 1e-10f);
+ ef[0][i] /= (x_pow[i] + 1e-10f);
+ ef[1][i] /= (x_pow[i] + 1e-10f);
abs_ef = sqrtf(ef[0][i] * ef[0][i] + ef[1][i] * ef[1][i]);
if (abs_ef > error_threshold) {
@@ -178,34 +186,37 @@ static void ScaleErrorSignalNEON(AecCore* aec, float ef[2][PART_LEN1]) {
}
}
-static void FilterAdaptationNEON(AecCore* aec,
- float* fft,
- float ef[2][PART_LEN1]) {
+static void FilterAdaptationNEON(
+ int num_partitions,
+ int x_fft_buf_block_pos,
+ float x_fft_buf[2][kExtendedNumPartitions * PART_LEN1],
+ float e_fft[2][PART_LEN1],
+ float h_fft_buf[2][kExtendedNumPartitions * PART_LEN1]) {
+ float fft[PART_LEN2];
int i;
- const int num_partitions = aec->num_partitions;
for (i = 0; i < num_partitions; i++) {
- int xPos = (i + aec->xfBufBlockPos) * PART_LEN1;
+ int xPos = (i + x_fft_buf_block_pos) * PART_LEN1;
int pos = i * PART_LEN1;
int j;
// Check for wrap
- if (i + aec->xfBufBlockPos >= num_partitions) {
+ if (i + x_fft_buf_block_pos >= num_partitions) {
xPos -= num_partitions * PART_LEN1;
}
// Process the whole array...
for (j = 0; j < PART_LEN; j += 4) {
- // Load xfBuf and ef.
- const float32x4_t xfBuf_re = vld1q_f32(&aec->xfBuf[0][xPos + j]);
- const float32x4_t xfBuf_im = vld1q_f32(&aec->xfBuf[1][xPos + j]);
- const float32x4_t ef_re = vld1q_f32(&ef[0][j]);
- const float32x4_t ef_im = vld1q_f32(&ef[1][j]);
- // Calculate the product of conjugate(xfBuf) by ef.
+ // Load x_fft_buf and e_fft.
+ const float32x4_t x_fft_buf_re = vld1q_f32(&x_fft_buf[0][xPos + j]);
+ const float32x4_t x_fft_buf_im = vld1q_f32(&x_fft_buf[1][xPos + j]);
+ const float32x4_t e_fft_re = vld1q_f32(&e_fft[0][j]);
+ const float32x4_t e_fft_im = vld1q_f32(&e_fft[1][j]);
+ // Calculate the product of conjugate(x_fft_buf) by e_fft.
// re(conjugate(a) * b) = aRe * bRe + aIm * bIm
// im(conjugate(a) * b)= aRe * bIm - aIm * bRe
- const float32x4_t a = vmulq_f32(xfBuf_re, ef_re);
- const float32x4_t e = vmlaq_f32(a, xfBuf_im, ef_im);
- const float32x4_t c = vmulq_f32(xfBuf_re, ef_im);
- const float32x4_t f = vmlsq_f32(c, xfBuf_im, ef_re);
+ const float32x4_t a = vmulq_f32(x_fft_buf_re, e_fft_re);
+ const float32x4_t e = vmlaq_f32(a, x_fft_buf_im, e_fft_im);
+ const float32x4_t c = vmulq_f32(x_fft_buf_re, e_fft_im);
+ const float32x4_t f = vmlsq_f32(c, x_fft_buf_im, e_fft_re);
// Interleave real and imaginary parts.
const float32x4x2_t g_n_h = vzipq_f32(e, f);
// Store
@@ -213,10 +224,10 @@ static void FilterAdaptationNEON(AecCore* aec,
vst1q_f32(&fft[2 * j + 4], g_n_h.val[1]);
}
// ... and fixup the first imaginary entry.
- fft[1] = MulRe(aec->xfBuf[0][xPos + PART_LEN],
- -aec->xfBuf[1][xPos + PART_LEN],
- ef[0][PART_LEN],
- ef[1][PART_LEN]);
+ fft[1] = MulRe(x_fft_buf[0][xPos + PART_LEN],
+ -x_fft_buf[1][xPos + PART_LEN],
+ e_fft[0][PART_LEN],
+ e_fft[1][PART_LEN]);
aec_rdft_inverse_128(fft);
memset(fft + PART_LEN, 0, sizeof(float) * PART_LEN);
@@ -234,21 +245,21 @@ static void FilterAdaptationNEON(AecCore* aec,
aec_rdft_forward_128(fft);
{
- const float wt1 = aec->wfBuf[1][pos];
- aec->wfBuf[0][pos + PART_LEN] += fft[1];
+ const float wt1 = h_fft_buf[1][pos];
+ h_fft_buf[0][pos + PART_LEN] += fft[1];
for (j = 0; j < PART_LEN; j += 4) {
- float32x4_t wtBuf_re = vld1q_f32(&aec->wfBuf[0][pos + j]);
- float32x4_t wtBuf_im = vld1q_f32(&aec->wfBuf[1][pos + j]);
+ float32x4_t wtBuf_re = vld1q_f32(&h_fft_buf[0][pos + j]);
+ float32x4_t wtBuf_im = vld1q_f32(&h_fft_buf[1][pos + j]);
const float32x4_t fft0 = vld1q_f32(&fft[2 * j + 0]);
const float32x4_t fft4 = vld1q_f32(&fft[2 * j + 4]);
const float32x4x2_t fft_re_im = vuzpq_f32(fft0, fft4);
wtBuf_re = vaddq_f32(wtBuf_re, fft_re_im.val[0]);
wtBuf_im = vaddq_f32(wtBuf_im, fft_re_im.val[1]);
- vst1q_f32(&aec->wfBuf[0][pos + j], wtBuf_re);
- vst1q_f32(&aec->wfBuf[1][pos + j], wtBuf_im);
+ vst1q_f32(&h_fft_buf[0][pos + j], wtBuf_re);
+ vst1q_f32(&h_fft_buf[1][pos + j], wtBuf_im);
}
- aec->wfBuf[1][pos] = wt1;
+ h_fft_buf[1][pos] = wt1;
}
}
}
@@ -442,7 +453,7 @@ static void OverdriveAndSuppressNEON(AecCore* aec,
}
}
-static int PartitionDelay(const AecCore* aec) {
+static int PartitionDelayNEON(const AecCore* aec) {
// Measures the energy in each filter partition and returns the partition with
// highest energy.
// TODO(bjornv): Spread computational cost by computing one partition per
@@ -499,7 +510,8 @@ static int PartitionDelay(const AecCore* aec) {
static void SmoothedPSD(AecCore* aec,
float efw[2][PART_LEN1],
float dfw[2][PART_LEN1],
- float xfw[2][PART_LEN1]) {
+ float xfw[2][PART_LEN1],
+ int* extreme_filter_divergence) {
// Power estimate smoothing coefficients.
const float* ptrGCoh = aec->extended_filter_enabled
? WebRtcAec_kExtendedSmoothingCoefficients[aec->mult - 1]
@@ -615,19 +627,16 @@ static void SmoothedPSD(AecCore* aec,
seSum += aec->se[i];
}
- // Divergent filter safeguard.
+ // Divergent filter safeguard update.
aec->divergeState = (aec->divergeState ? 1.05f : 1.0f) * seSum > sdSum;
- if (aec->divergeState)
- memcpy(efw, dfw, sizeof(efw[0][0]) * 2 * PART_LEN1);
-
- // Reset if error is significantly larger than nearend (13 dB).
- if (!aec->extended_filter_enabled && seSum > (19.95f * sdSum))
- memset(aec->wfBuf, 0, sizeof(aec->wfBuf));
+ // Signal extreme filter divergence if the error is significantly larger
+ // than the nearend (13 dB).
+ *extreme_filter_divergence = (seSum > (19.95f * sdSum));
}
// Window time domain data to be used by the fft.
-__inline static void WindowData(float* x_windowed, const float* x) {
+static void WindowDataNEON(float* x_windowed, const float* x) {
int i;
for (i = 0; i < PART_LEN; i += 4) {
const float32x4_t vec_Buf1 = vld1q_f32(&x[i]);
@@ -648,8 +657,8 @@ __inline static void WindowData(float* x_windowed, const float* x) {
}
// Puts fft output data into a complex valued array.
-__inline static void StoreAsComplex(const float* data,
- float data_complex[2][PART_LEN1]) {
+static void StoreAsComplexNEON(const float* data,
+ float data_complex[2][PART_LEN1]) {
int i;
for (i = 0; i < PART_LEN; i += 4) {
const float32x4x2_t vec_data = vld2q_f32(&data[2 * i]);
@@ -665,32 +674,15 @@ __inline static void StoreAsComplex(const float* data,
static void SubbandCoherenceNEON(AecCore* aec,
float efw[2][PART_LEN1],
+ float dfw[2][PART_LEN1],
float xfw[2][PART_LEN1],
float* fft,
float* cohde,
- float* cohxd) {
- float dfw[2][PART_LEN1];
+ float* cohxd,
+ int* extreme_filter_divergence) {
int i;
- if (aec->delayEstCtr == 0)
- aec->delayIdx = PartitionDelay(aec);
-
- // Use delayed far.
- memcpy(xfw,
- aec->xfwBuf + aec->delayIdx * PART_LEN1,
- sizeof(xfw[0][0]) * 2 * PART_LEN1);
-
- // Windowed near fft
- WindowData(fft, aec->dBuf);
- aec_rdft_forward_128(fft);
- StoreAsComplex(fft, dfw);
-
- // Windowed error fft
- WindowData(fft, aec->eBuf);
- aec_rdft_forward_128(fft);
- StoreAsComplex(fft, efw);
-
- SmoothedPSD(aec, efw, dfw, xfw);
+ SmoothedPSD(aec, efw, dfw, xfw, extreme_filter_divergence);
{
const float32x4_t vec_1eminus10 = vdupq_n_f32(1e-10f);
@@ -732,5 +724,7 @@ void WebRtcAec_InitAec_neon(void) {
WebRtcAec_FilterAdaptation = FilterAdaptationNEON;
WebRtcAec_OverdriveAndSuppress = OverdriveAndSuppressNEON;
WebRtcAec_SubbandCoherence = SubbandCoherenceNEON;
+ WebRtcAec_StoreAsComplex = StoreAsComplexNEON;
+ WebRtcAec_PartitionDelay = PartitionDelayNEON;
+ WebRtcAec_WindowData = WindowDataNEON;
}
-
diff --git a/webrtc/modules/audio_processing/aec/aec_core_sse2.c b/webrtc/modules/audio_processing/aec/aec_core_sse2.c
index b1bffcbb9f..f897a4c0c7 100644
--- a/webrtc/modules/audio_processing/aec/aec_core_sse2.c
+++ b/webrtc/modules/audio_processing/aec/aec_core_sse2.c
@@ -29,67 +29,76 @@ __inline static float MulIm(float aRe, float aIm, float bRe, float bIm) {
return aRe * bIm + aIm * bRe;
}
-static void FilterFarSSE2(AecCore* aec, float yf[2][PART_LEN1]) {
+static void FilterFarSSE2(
+ int num_partitions,
+ int x_fft_buf_block_pos,
+ float x_fft_buf[2][kExtendedNumPartitions * PART_LEN1],
+ float h_fft_buf[2][kExtendedNumPartitions * PART_LEN1],
+ float y_fft[2][PART_LEN1]) {
+
int i;
- const int num_partitions = aec->num_partitions;
for (i = 0; i < num_partitions; i++) {
int j;
- int xPos = (i + aec->xfBufBlockPos) * PART_LEN1;
+ int xPos = (i + x_fft_buf_block_pos) * PART_LEN1;
int pos = i * PART_LEN1;
// Check for wrap
- if (i + aec->xfBufBlockPos >= num_partitions) {
+ if (i + x_fft_buf_block_pos >= num_partitions) {
xPos -= num_partitions * (PART_LEN1);
}
// vectorized code (four at once)
for (j = 0; j + 3 < PART_LEN1; j += 4) {
- const __m128 xfBuf_re = _mm_loadu_ps(&aec->xfBuf[0][xPos + j]);
- const __m128 xfBuf_im = _mm_loadu_ps(&aec->xfBuf[1][xPos + j]);
- const __m128 wfBuf_re = _mm_loadu_ps(&aec->wfBuf[0][pos + j]);
- const __m128 wfBuf_im = _mm_loadu_ps(&aec->wfBuf[1][pos + j]);
- const __m128 yf_re = _mm_loadu_ps(&yf[0][j]);
- const __m128 yf_im = _mm_loadu_ps(&yf[1][j]);
- const __m128 a = _mm_mul_ps(xfBuf_re, wfBuf_re);
- const __m128 b = _mm_mul_ps(xfBuf_im, wfBuf_im);
- const __m128 c = _mm_mul_ps(xfBuf_re, wfBuf_im);
- const __m128 d = _mm_mul_ps(xfBuf_im, wfBuf_re);
+ const __m128 x_fft_buf_re = _mm_loadu_ps(&x_fft_buf[0][xPos + j]);
+ const __m128 x_fft_buf_im = _mm_loadu_ps(&x_fft_buf[1][xPos + j]);
+ const __m128 h_fft_buf_re = _mm_loadu_ps(&h_fft_buf[0][pos + j]);
+ const __m128 h_fft_buf_im = _mm_loadu_ps(&h_fft_buf[1][pos + j]);
+ const __m128 y_fft_re = _mm_loadu_ps(&y_fft[0][j]);
+ const __m128 y_fft_im = _mm_loadu_ps(&y_fft[1][j]);
+ const __m128 a = _mm_mul_ps(x_fft_buf_re, h_fft_buf_re);
+ const __m128 b = _mm_mul_ps(x_fft_buf_im, h_fft_buf_im);
+ const __m128 c = _mm_mul_ps(x_fft_buf_re, h_fft_buf_im);
+ const __m128 d = _mm_mul_ps(x_fft_buf_im, h_fft_buf_re);
const __m128 e = _mm_sub_ps(a, b);
const __m128 f = _mm_add_ps(c, d);
- const __m128 g = _mm_add_ps(yf_re, e);
- const __m128 h = _mm_add_ps(yf_im, f);
- _mm_storeu_ps(&yf[0][j], g);
- _mm_storeu_ps(&yf[1][j], h);
+ const __m128 g = _mm_add_ps(y_fft_re, e);
+ const __m128 h = _mm_add_ps(y_fft_im, f);
+ _mm_storeu_ps(&y_fft[0][j], g);
+ _mm_storeu_ps(&y_fft[1][j], h);
}
// scalar code for the remaining items.
for (; j < PART_LEN1; j++) {
- yf[0][j] += MulRe(aec->xfBuf[0][xPos + j],
- aec->xfBuf[1][xPos + j],
- aec->wfBuf[0][pos + j],
- aec->wfBuf[1][pos + j]);
- yf[1][j] += MulIm(aec->xfBuf[0][xPos + j],
- aec->xfBuf[1][xPos + j],
- aec->wfBuf[0][pos + j],
- aec->wfBuf[1][pos + j]);
+ y_fft[0][j] += MulRe(x_fft_buf[0][xPos + j],
+ x_fft_buf[1][xPos + j],
+ h_fft_buf[0][pos + j],
+ h_fft_buf[1][pos + j]);
+ y_fft[1][j] += MulIm(x_fft_buf[0][xPos + j],
+ x_fft_buf[1][xPos + j],
+ h_fft_buf[0][pos + j],
+ h_fft_buf[1][pos + j]);
}
}
}
-static void ScaleErrorSignalSSE2(AecCore* aec, float ef[2][PART_LEN1]) {
+static void ScaleErrorSignalSSE2(int extended_filter_enabled,
+ float normal_mu,
+ float normal_error_threshold,
+ float x_pow[PART_LEN1],
+ float ef[2][PART_LEN1]) {
const __m128 k1e_10f = _mm_set1_ps(1e-10f);
- const __m128 kMu = aec->extended_filter_enabled ? _mm_set1_ps(kExtendedMu)
- : _mm_set1_ps(aec->normal_mu);
- const __m128 kThresh = aec->extended_filter_enabled
+ const __m128 kMu = extended_filter_enabled ? _mm_set1_ps(kExtendedMu)
+ : _mm_set1_ps(normal_mu);
+ const __m128 kThresh = extended_filter_enabled
? _mm_set1_ps(kExtendedErrorThreshold)
- : _mm_set1_ps(aec->normal_error_threshold);
+ : _mm_set1_ps(normal_error_threshold);
int i;
// vectorized code (four at once)
for (i = 0; i + 3 < PART_LEN1; i += 4) {
- const __m128 xPow = _mm_loadu_ps(&aec->xPow[i]);
+ const __m128 x_pow_local = _mm_loadu_ps(&x_pow[i]);
const __m128 ef_re_base = _mm_loadu_ps(&ef[0][i]);
const __m128 ef_im_base = _mm_loadu_ps(&ef[1][i]);
- const __m128 xPowPlus = _mm_add_ps(xPow, k1e_10f);
+ const __m128 xPowPlus = _mm_add_ps(x_pow_local, k1e_10f);
__m128 ef_re = _mm_div_ps(ef_re_base, xPowPlus);
__m128 ef_im = _mm_div_ps(ef_im_base, xPowPlus);
const __m128 ef_re2 = _mm_mul_ps(ef_re, ef_re);
@@ -116,14 +125,14 @@ static void ScaleErrorSignalSSE2(AecCore* aec, float ef[2][PART_LEN1]) {
// scalar code for the remaining items.
{
const float mu =
- aec->extended_filter_enabled ? kExtendedMu : aec->normal_mu;
- const float error_threshold = aec->extended_filter_enabled
+ extended_filter_enabled ? kExtendedMu : normal_mu;
+ const float error_threshold = extended_filter_enabled
? kExtendedErrorThreshold
- : aec->normal_error_threshold;
+ : normal_error_threshold;
for (; i < (PART_LEN1); i++) {
float abs_ef;
- ef[0][i] /= (aec->xPow[i] + 1e-10f);
- ef[1][i] /= (aec->xPow[i] + 1e-10f);
+ ef[0][i] /= (x_pow[i] + 1e-10f);
+ ef[1][i] /= (x_pow[i] + 1e-10f);
abs_ef = sqrtf(ef[0][i] * ef[0][i] + ef[1][i] * ef[1][i]);
if (abs_ef > error_threshold) {
@@ -139,33 +148,36 @@ static void ScaleErrorSignalSSE2(AecCore* aec, float ef[2][PART_LEN1]) {
}
}
-static void FilterAdaptationSSE2(AecCore* aec,
- float* fft,
- float ef[2][PART_LEN1]) {
+static void FilterAdaptationSSE2(
+ int num_partitions,
+ int x_fft_buf_block_pos,
+ float x_fft_buf[2][kExtendedNumPartitions * PART_LEN1],
+ float e_fft[2][PART_LEN1],
+ float h_fft_buf[2][kExtendedNumPartitions * PART_LEN1]) {
+ float fft[PART_LEN2];
int i, j;
- const int num_partitions = aec->num_partitions;
for (i = 0; i < num_partitions; i++) {
- int xPos = (i + aec->xfBufBlockPos) * (PART_LEN1);
+ int xPos = (i + x_fft_buf_block_pos) * (PART_LEN1);
int pos = i * PART_LEN1;
// Check for wrap
- if (i + aec->xfBufBlockPos >= num_partitions) {
+ if (i + x_fft_buf_block_pos >= num_partitions) {
xPos -= num_partitions * PART_LEN1;
}
// Process the whole array...
for (j = 0; j < PART_LEN; j += 4) {
- // Load xfBuf and ef.
- const __m128 xfBuf_re = _mm_loadu_ps(&aec->xfBuf[0][xPos + j]);
- const __m128 xfBuf_im = _mm_loadu_ps(&aec->xfBuf[1][xPos + j]);
- const __m128 ef_re = _mm_loadu_ps(&ef[0][j]);
- const __m128 ef_im = _mm_loadu_ps(&ef[1][j]);
- // Calculate the product of conjugate(xfBuf) by ef.
+ // Load x_fft_buf and e_fft.
+ const __m128 x_fft_buf_re = _mm_loadu_ps(&x_fft_buf[0][xPos + j]);
+ const __m128 x_fft_buf_im = _mm_loadu_ps(&x_fft_buf[1][xPos + j]);
+ const __m128 e_fft_re = _mm_loadu_ps(&e_fft[0][j]);
+ const __m128 e_fft_im = _mm_loadu_ps(&e_fft[1][j]);
+ // Calculate the product of conjugate(x_fft_buf) by e_fft.
// re(conjugate(a) * b) = aRe * bRe + aIm * bIm
// im(conjugate(a) * b)= aRe * bIm - aIm * bRe
- const __m128 a = _mm_mul_ps(xfBuf_re, ef_re);
- const __m128 b = _mm_mul_ps(xfBuf_im, ef_im);
- const __m128 c = _mm_mul_ps(xfBuf_re, ef_im);
- const __m128 d = _mm_mul_ps(xfBuf_im, ef_re);
+ const __m128 a = _mm_mul_ps(x_fft_buf_re, e_fft_re);
+ const __m128 b = _mm_mul_ps(x_fft_buf_im, e_fft_im);
+ const __m128 c = _mm_mul_ps(x_fft_buf_re, e_fft_im);
+ const __m128 d = _mm_mul_ps(x_fft_buf_im, e_fft_re);
const __m128 e = _mm_add_ps(a, b);
const __m128 f = _mm_sub_ps(c, d);
// Interleave real and imaginary parts.
@@ -176,10 +188,10 @@ static void FilterAdaptationSSE2(AecCore* aec,
_mm_storeu_ps(&fft[2 * j + 4], h);
}
// ... and fixup the first imaginary entry.
- fft[1] = MulRe(aec->xfBuf[0][xPos + PART_LEN],
- -aec->xfBuf[1][xPos + PART_LEN],
- ef[0][PART_LEN],
- ef[1][PART_LEN]);
+ fft[1] = MulRe(x_fft_buf[0][xPos + PART_LEN],
+ -x_fft_buf[1][xPos + PART_LEN],
+ e_fft[0][PART_LEN],
+ e_fft[1][PART_LEN]);
aec_rdft_inverse_128(fft);
memset(fft + PART_LEN, 0, sizeof(float) * PART_LEN);
@@ -197,11 +209,11 @@ static void FilterAdaptationSSE2(AecCore* aec,
aec_rdft_forward_128(fft);
{
- float wt1 = aec->wfBuf[1][pos];
- aec->wfBuf[0][pos + PART_LEN] += fft[1];
+ float wt1 = h_fft_buf[1][pos];
+ h_fft_buf[0][pos + PART_LEN] += fft[1];
for (j = 0; j < PART_LEN; j += 4) {
- __m128 wtBuf_re = _mm_loadu_ps(&aec->wfBuf[0][pos + j]);
- __m128 wtBuf_im = _mm_loadu_ps(&aec->wfBuf[1][pos + j]);
+ __m128 wtBuf_re = _mm_loadu_ps(&h_fft_buf[0][pos + j]);
+ __m128 wtBuf_im = _mm_loadu_ps(&h_fft_buf[1][pos + j]);
const __m128 fft0 = _mm_loadu_ps(&fft[2 * j + 0]);
const __m128 fft4 = _mm_loadu_ps(&fft[2 * j + 4]);
const __m128 fft_re =
@@ -210,10 +222,10 @@ static void FilterAdaptationSSE2(AecCore* aec,
_mm_shuffle_ps(fft0, fft4, _MM_SHUFFLE(3, 1, 3, 1));
wtBuf_re = _mm_add_ps(wtBuf_re, fft_re);
wtBuf_im = _mm_add_ps(wtBuf_im, fft_im);
- _mm_storeu_ps(&aec->wfBuf[0][pos + j], wtBuf_re);
- _mm_storeu_ps(&aec->wfBuf[1][pos + j], wtBuf_im);
+ _mm_storeu_ps(&h_fft_buf[0][pos + j], wtBuf_re);
+ _mm_storeu_ps(&h_fft_buf[1][pos + j], wtBuf_im);
}
- aec->wfBuf[1][pos] = wt1;
+ h_fft_buf[1][pos] = wt1;
}
}
}
@@ -427,7 +439,8 @@ __inline static void _mm_add_ps_4x1(__m128 sum, float *dst) {
sum = _mm_add_ps(sum, _mm_shuffle_ps(sum, sum, _MM_SHUFFLE(1, 1, 1, 1)));
_mm_store_ss(dst, sum);
}
-static int PartitionDelay(const AecCore* aec) {
+
+static int PartitionDelaySSE2(const AecCore* aec) {
// Measures the energy in each filter partition and returns the partition with
// highest energy.
// TODO(bjornv): Spread computational cost by computing one partition per
@@ -476,7 +489,8 @@ static int PartitionDelay(const AecCore* aec) {
static void SmoothedPSD(AecCore* aec,
float efw[2][PART_LEN1],
float dfw[2][PART_LEN1],
- float xfw[2][PART_LEN1]) {
+ float xfw[2][PART_LEN1],
+ int* extreme_filter_divergence) {
// Power estimate smoothing coefficients.
const float* ptrGCoh = aec->extended_filter_enabled
? WebRtcAec_kExtendedSmoothingCoefficients[aec->mult - 1]
@@ -595,19 +609,16 @@ static void SmoothedPSD(AecCore* aec,
seSum += aec->se[i];
}
- // Divergent filter safeguard.
+ // Divergent filter safeguard update.
aec->divergeState = (aec->divergeState ? 1.05f : 1.0f) * seSum > sdSum;
- if (aec->divergeState)
- memcpy(efw, dfw, sizeof(efw[0][0]) * 2 * PART_LEN1);
-
- // Reset if error is significantly larger than nearend (13 dB).
- if (!aec->extended_filter_enabled && seSum > (19.95f * sdSum))
- memset(aec->wfBuf, 0, sizeof(aec->wfBuf));
+ // Signal extreme filter divergence if the error is significantly larger
+ // than the nearend (13 dB).
+ *extreme_filter_divergence = (seSum > (19.95f * sdSum));
}
// Window time domain data to be used by the fft.
-__inline static void WindowData(float* x_windowed, const float* x) {
+static void WindowDataSSE2(float* x_windowed, const float* x) {
int i;
for (i = 0; i < PART_LEN; i += 4) {
const __m128 vec_Buf1 = _mm_loadu_ps(&x[i]);
@@ -627,8 +638,8 @@ __inline static void WindowData(float* x_windowed, const float* x) {
}
// Puts fft output data into a complex valued array.
-__inline static void StoreAsComplex(const float* data,
- float data_complex[2][PART_LEN1]) {
+static void StoreAsComplexSSE2(const float* data,
+ float data_complex[2][PART_LEN1]) {
int i;
for (i = 0; i < PART_LEN; i += 4) {
const __m128 vec_fft0 = _mm_loadu_ps(&data[2 * i]);
@@ -649,32 +660,15 @@ __inline static void StoreAsComplex(const float* data,
static void SubbandCoherenceSSE2(AecCore* aec,
float efw[2][PART_LEN1],
+ float dfw[2][PART_LEN1],
float xfw[2][PART_LEN1],
float* fft,
float* cohde,
- float* cohxd) {
- float dfw[2][PART_LEN1];
+ float* cohxd,
+ int* extreme_filter_divergence) {
int i;
- if (aec->delayEstCtr == 0)
- aec->delayIdx = PartitionDelay(aec);
-
- // Use delayed far.
- memcpy(xfw,
- aec->xfwBuf + aec->delayIdx * PART_LEN1,
- sizeof(xfw[0][0]) * 2 * PART_LEN1);
-
- // Windowed near fft
- WindowData(fft, aec->dBuf);
- aec_rdft_forward_128(fft);
- StoreAsComplex(fft, dfw);
-
- // Windowed error fft
- WindowData(fft, aec->eBuf);
- aec_rdft_forward_128(fft);
- StoreAsComplex(fft, efw);
-
- SmoothedPSD(aec, efw, dfw, xfw);
+ SmoothedPSD(aec, efw, dfw, xfw, extreme_filter_divergence);
{
const __m128 vec_1eminus10 = _mm_set1_ps(1e-10f);
@@ -728,4 +722,7 @@ void WebRtcAec_InitAec_SSE2(void) {
WebRtcAec_FilterAdaptation = FilterAdaptationSSE2;
WebRtcAec_OverdriveAndSuppress = OverdriveAndSuppressSSE2;
WebRtcAec_SubbandCoherence = SubbandCoherenceSSE2;
+ WebRtcAec_StoreAsComplex = StoreAsComplexSSE2;
+ WebRtcAec_PartitionDelay = PartitionDelaySSE2;
+ WebRtcAec_WindowData = WindowDataSSE2;
}
diff --git a/webrtc/modules/audio_processing/aec/echo_cancellation.c b/webrtc/modules/audio_processing/aec/echo_cancellation.c
index 0f5cd31ddb..aab1718b24 100644
--- a/webrtc/modules/audio_processing/aec/echo_cancellation.c
+++ b/webrtc/modules/audio_processing/aec/echo_cancellation.c
@@ -11,7 +11,7 @@
/*
* Contains the API functions for the AEC.
*/
-#include "webrtc/modules/audio_processing/aec/include/echo_cancellation.h"
+#include "webrtc/modules/audio_processing/aec/echo_cancellation.h"
#include <math.h>
#ifdef WEBRTC_AEC_DEBUG_DUMP
@@ -146,7 +146,6 @@ void* WebRtcAec_Create() {
}
aecpc->initFlag = 0;
- aecpc->lastError = 0;
#ifdef WEBRTC_AEC_DEBUG_DUMP
{
@@ -192,26 +191,22 @@ int32_t WebRtcAec_Init(void* aecInst, int32_t sampFreq, int32_t scSampFreq) {
sampFreq != 16000 &&
sampFreq != 32000 &&
sampFreq != 48000) {
- aecpc->lastError = AEC_BAD_PARAMETER_ERROR;
- return -1;
+ return AEC_BAD_PARAMETER_ERROR;
}
aecpc->sampFreq = sampFreq;
if (scSampFreq < 1 || scSampFreq > 96000) {
- aecpc->lastError = AEC_BAD_PARAMETER_ERROR;
- return -1;
+ return AEC_BAD_PARAMETER_ERROR;
}
aecpc->scSampFreq = scSampFreq;
// Initialize echo canceller core
if (WebRtcAec_InitAec(aecpc->aec, aecpc->sampFreq) == -1) {
- aecpc->lastError = AEC_UNSPECIFIED_ERROR;
- return -1;
+ return AEC_UNSPECIFIED_ERROR;
}
if (WebRtcAec_InitResampler(aecpc->resampler, aecpc->scSampFreq) == -1) {
- aecpc->lastError = AEC_UNSPECIFIED_ERROR;
- return -1;
+ return AEC_UNSPECIFIED_ERROR;
}
WebRtc_InitBuffer(aecpc->far_pre_buf);
@@ -261,13 +256,32 @@ int32_t WebRtcAec_Init(void* aecInst, int32_t sampFreq, int32_t scSampFreq) {
aecConfig.delay_logging = kAecFalse;
if (WebRtcAec_set_config(aecpc, aecConfig) == -1) {
- aecpc->lastError = AEC_UNSPECIFIED_ERROR;
- return -1;
+ return AEC_UNSPECIFIED_ERROR;
}
return 0;
}
+// Returns any error that is caused when buffering the
+// far-end signal.
+int32_t WebRtcAec_GetBufferFarendError(void* aecInst,
+ const float* farend,
+ size_t nrOfSamples) {
+ Aec* aecpc = aecInst;
+
+ if (!farend)
+ return AEC_NULL_POINTER_ERROR;
+
+ if (aecpc->initFlag != initCheck)
+ return AEC_UNINITIALIZED_ERROR;
+
+ // number of samples == 160 for SWB input
+ if (nrOfSamples != 80 && nrOfSamples != 160)
+ return AEC_BAD_PARAMETER_ERROR;
+
+ return 0;
+}
+
// only buffer L band for farend
int32_t WebRtcAec_BufferFarend(void* aecInst,
const float* farend,
@@ -277,21 +291,13 @@ int32_t WebRtcAec_BufferFarend(void* aecInst,
float new_farend[MAX_RESAMP_LEN];
const float* farend_ptr = farend;
- if (farend == NULL) {
- aecpc->lastError = AEC_NULL_POINTER_ERROR;
- return -1;
- }
+ // Get any error caused by buffering the farend signal.
+ int32_t error_code = WebRtcAec_GetBufferFarendError(aecInst, farend,
+ nrOfSamples);
- if (aecpc->initFlag != initCheck) {
- aecpc->lastError = AEC_UNINITIALIZED_ERROR;
- return -1;
- }
+ if (error_code != 0)
+ return error_code;
- // number of samples == 160 for SWB input
- if (nrOfSamples != 80 && nrOfSamples != 160) {
- aecpc->lastError = AEC_BAD_PARAMETER_ERROR;
- return -1;
- }
if (aecpc->skewMode == kAecTrue && aecpc->resample == kAecTrue) {
// Resample and get a new number of samples
@@ -311,7 +317,8 @@ int32_t WebRtcAec_BufferFarend(void* aecInst,
// Write the time-domain data to |far_pre_buf|.
WebRtc_WriteBuffer(aecpc->far_pre_buf, farend_ptr, newNrOfSamples);
- // Transform to frequency domain if we have enough data.
+ // TODO(minyue): reduce to |PART_LEN| samples for each buffering, when
+ // WebRtcAec_BufferFarendPartition() is changed to take |PART_LEN| samples.
while (WebRtc_available_read(aecpc->far_pre_buf) >= PART_LEN2) {
// We have enough data to pass to the FFT, hence read PART_LEN2 samples.
{
@@ -319,10 +326,6 @@ int32_t WebRtcAec_BufferFarend(void* aecInst,
float tmp[PART_LEN2];
WebRtc_ReadBuffer(aecpc->far_pre_buf, (void**)&ptmp, tmp, PART_LEN2);
WebRtcAec_BufferFarendPartition(aecpc->aec, ptmp);
-#ifdef WEBRTC_AEC_DEBUG_DUMP
- WebRtc_WriteBuffer(
- WebRtcAec_far_time_buf(aecpc->aec), &ptmp[PART_LEN], 1);
-#endif
}
// Rewind |far_pre_buf| PART_LEN samples for overlap before continuing.
@@ -343,29 +346,24 @@ int32_t WebRtcAec_Process(void* aecInst,
int32_t retVal = 0;
if (out == NULL) {
- aecpc->lastError = AEC_NULL_POINTER_ERROR;
- return -1;
+ return AEC_NULL_POINTER_ERROR;
}
if (aecpc->initFlag != initCheck) {
- aecpc->lastError = AEC_UNINITIALIZED_ERROR;
- return -1;
+ return AEC_UNINITIALIZED_ERROR;
}
// number of samples == 160 for SWB input
if (nrOfSamples != 80 && nrOfSamples != 160) {
- aecpc->lastError = AEC_BAD_PARAMETER_ERROR;
- return -1;
+ return AEC_BAD_PARAMETER_ERROR;
}
if (msInSndCardBuf < 0) {
msInSndCardBuf = 0;
- aecpc->lastError = AEC_BAD_PARAMETER_WARNING;
- retVal = -1;
+ retVal = AEC_BAD_PARAMETER_WARNING;
} else if (msInSndCardBuf > kMaxTrustedDelayMs) {
// The clamping is now done in ProcessExtended/Normal().
- aecpc->lastError = AEC_BAD_PARAMETER_WARNING;
- retVal = -1;
+ retVal = AEC_BAD_PARAMETER_WARNING;
}
// This returns the value of aec->extended_filter_enabled.
@@ -378,15 +376,13 @@ int32_t WebRtcAec_Process(void* aecInst,
msInSndCardBuf,
skew);
} else {
- if (ProcessNormal(aecpc,
- nearend,
- num_bands,
- out,
- nrOfSamples,
- msInSndCardBuf,
- skew) != 0) {
- retVal = -1;
- }
+ retVal = ProcessNormal(aecpc,
+ nearend,
+ num_bands,
+ out,
+ nrOfSamples,
+ msInSndCardBuf,
+ skew);
}
#ifdef WEBRTC_AEC_DEBUG_DUMP
@@ -405,31 +401,26 @@ int32_t WebRtcAec_Process(void* aecInst,
int WebRtcAec_set_config(void* handle, AecConfig config) {
Aec* self = (Aec*)handle;
if (self->initFlag != initCheck) {
- self->lastError = AEC_UNINITIALIZED_ERROR;
- return -1;
+ return AEC_UNINITIALIZED_ERROR;
}
if (config.skewMode != kAecFalse && config.skewMode != kAecTrue) {
- self->lastError = AEC_BAD_PARAMETER_ERROR;
- return -1;
+ return AEC_BAD_PARAMETER_ERROR;
}
self->skewMode = config.skewMode;
if (config.nlpMode != kAecNlpConservative &&
config.nlpMode != kAecNlpModerate &&
config.nlpMode != kAecNlpAggressive) {
- self->lastError = AEC_BAD_PARAMETER_ERROR;
- return -1;
+ return AEC_BAD_PARAMETER_ERROR;
}
if (config.metricsMode != kAecFalse && config.metricsMode != kAecTrue) {
- self->lastError = AEC_BAD_PARAMETER_ERROR;
- return -1;
+ return AEC_BAD_PARAMETER_ERROR;
}
if (config.delay_logging != kAecFalse && config.delay_logging != kAecTrue) {
- self->lastError = AEC_BAD_PARAMETER_ERROR;
- return -1;
+ return AEC_BAD_PARAMETER_ERROR;
}
WebRtcAec_SetConfigCore(
@@ -440,12 +431,10 @@ int WebRtcAec_set_config(void* handle, AecConfig config) {
int WebRtcAec_get_echo_status(void* handle, int* status) {
Aec* self = (Aec*)handle;
if (status == NULL) {
- self->lastError = AEC_NULL_POINTER_ERROR;
- return -1;
+ return AEC_NULL_POINTER_ERROR;
}
if (self->initFlag != initCheck) {
- self->lastError = AEC_UNINITIALIZED_ERROR;
- return -1;
+ return AEC_UNINITIALIZED_ERROR;
}
*status = WebRtcAec_echo_state(self->aec);
@@ -466,12 +455,10 @@ int WebRtcAec_GetMetrics(void* handle, AecMetrics* metrics) {
return -1;
}
if (metrics == NULL) {
- self->lastError = AEC_NULL_POINTER_ERROR;
- return -1;
+ return AEC_NULL_POINTER_ERROR;
}
if (self->initFlag != initCheck) {
- self->lastError = AEC_UNINITIALIZED_ERROR;
- return -1;
+ return AEC_UNINITIALIZED_ERROR;
}
WebRtcAec_GetEchoStats(self->aec, &erl, &erle, &a_nlp);
@@ -556,32 +543,24 @@ int WebRtcAec_GetDelayMetrics(void* handle,
float* fraction_poor_delays) {
Aec* self = handle;
if (median == NULL) {
- self->lastError = AEC_NULL_POINTER_ERROR;
- return -1;
+ return AEC_NULL_POINTER_ERROR;
}
if (std == NULL) {
- self->lastError = AEC_NULL_POINTER_ERROR;
- return -1;
+ return AEC_NULL_POINTER_ERROR;
}
if (self->initFlag != initCheck) {
- self->lastError = AEC_UNINITIALIZED_ERROR;
- return -1;
+ return AEC_UNINITIALIZED_ERROR;
}
if (WebRtcAec_GetDelayMetricsCore(self->aec, median, std,
fraction_poor_delays) ==
-1) {
// Logging disabled.
- self->lastError = AEC_UNSUPPORTED_FUNCTION_ERROR;
- return -1;
+ return AEC_UNSUPPORTED_FUNCTION_ERROR;
}
return 0;
}
-int32_t WebRtcAec_get_error_code(void* aecInst) {
- Aec* aecpc = aecInst;
- return aecpc->lastError;
-}
AecCore* WebRtcAec_aec_core(void* handle) {
if (!handle) {
@@ -617,7 +596,7 @@ static int ProcessNormal(Aec* aecpc,
retVal = WebRtcAec_GetSkew(aecpc->resampler, skew, &aecpc->skew);
if (retVal == -1) {
aecpc->skew = 0;
- aecpc->lastError = AEC_BAD_PARAMETER_WARNING;
+ retVal = AEC_BAD_PARAMETER_WARNING;
}
aecpc->skew /= aecpc->sampFactor * nrOfSamples;
diff --git a/webrtc/modules/audio_processing/aec/echo_cancellation.h b/webrtc/modules/audio_processing/aec/echo_cancellation.h
new file mode 100644
index 0000000000..de84b2e6d1
--- /dev/null
+++ b/webrtc/modules/audio_processing/aec/echo_cancellation.h
@@ -0,0 +1,251 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_AUDIO_PROCESSING_AEC_ECHO_CANCELLATION_H_
+#define WEBRTC_MODULES_AUDIO_PROCESSING_AEC_ECHO_CANCELLATION_H_
+
+#include <stddef.h>
+
+#include "webrtc/typedefs.h"
+
+// Errors
+#define AEC_UNSPECIFIED_ERROR 12000
+#define AEC_UNSUPPORTED_FUNCTION_ERROR 12001
+#define AEC_UNINITIALIZED_ERROR 12002
+#define AEC_NULL_POINTER_ERROR 12003
+#define AEC_BAD_PARAMETER_ERROR 12004
+
+// Warnings
+#define AEC_BAD_PARAMETER_WARNING 12050
+
+enum {
+ kAecNlpConservative = 0,
+ kAecNlpModerate,
+ kAecNlpAggressive
+};
+
+enum {
+ kAecFalse = 0,
+ kAecTrue
+};
+
+typedef struct {
+ int16_t nlpMode; // default kAecNlpModerate
+ int16_t skewMode; // default kAecFalse
+ int16_t metricsMode; // default kAecFalse
+ int delay_logging; // default kAecFalse
+ // float realSkew;
+} AecConfig;
+
+typedef struct {
+ int instant;
+ int average;
+ int max;
+ int min;
+} AecLevel;
+
+typedef struct {
+ AecLevel rerl;
+ AecLevel erl;
+ AecLevel erle;
+ AecLevel aNlp;
+} AecMetrics;
+
+struct AecCore;
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+/*
+ * Allocates the memory needed by the AEC. The memory needs to be initialized
+ * separately using the WebRtcAec_Init() function. Returns a pointer to the
+ * object or NULL on error.
+ */
+void* WebRtcAec_Create();
+
+/*
+ * This function releases the memory allocated by WebRtcAec_Create().
+ *
+ * Inputs Description
+ * -------------------------------------------------------------------
+ * void* aecInst Pointer to the AEC instance
+ */
+void WebRtcAec_Free(void* aecInst);
+
+/*
+ * Initializes an AEC instance.
+ *
+ * Inputs Description
+ * -------------------------------------------------------------------
+ * void* aecInst Pointer to the AEC instance
+ * int32_t sampFreq Sampling frequency of data
+ * int32_t scSampFreq Soundcard sampling frequency
+ *
+ * Outputs Description
+ * -------------------------------------------------------------------
+ * int32_t return 0: OK
+ * -1: error
+ */
+int32_t WebRtcAec_Init(void* aecInst, int32_t sampFreq, int32_t scSampFreq);
+
+/*
+ * Inserts an 80 or 160 sample block of data into the farend buffer.
+ *
+ * Inputs Description
+ * -------------------------------------------------------------------
+ * void* aecInst Pointer to the AEC instance
+ * const float* farend In buffer containing one frame of
+ * farend signal for L band
+ * int16_t nrOfSamples Number of samples in farend buffer
+ *
+ * Outputs Description
+ * -------------------------------------------------------------------
+ * int32_t return 0: OK
+ * 12000-12050: error code
+ */
+int32_t WebRtcAec_BufferFarend(void* aecInst,
+ const float* farend,
+ size_t nrOfSamples);
+
+/*
+ * Reports any errors that would arise if buffering a farend buffer
+ *
+ * Inputs Description
+ * -------------------------------------------------------------------
+ * void* aecInst Pointer to the AEC instance
+ * const float* farend In buffer containing one frame of
+ * farend signal for L band
+ * int16_t nrOfSamples Number of samples in farend buffer
+ *
+ * Outputs Description
+ * -------------------------------------------------------------------
+ * int32_t return 0: OK
+ * 12000-12050: error code
+ */
+int32_t WebRtcAec_GetBufferFarendError(void* aecInst,
+ const float* farend,
+ size_t nrOfSamples);
+
+/*
+ * Runs the echo canceller on an 80 or 160 sample blocks of data.
+ *
+ * Inputs Description
+ * -------------------------------------------------------------------
+ * void* aecInst Pointer to the AEC instance
+ * float* const* nearend In buffer containing one frame of
+ * nearend+echo signal for each band
+ * int num_bands Number of bands in nearend buffer
+ * int16_t nrOfSamples Number of samples in nearend buffer
+ * int16_t msInSndCardBuf Delay estimate for sound card and
+ * system buffers
+ * int16_t skew Difference between number of samples played
+ * and recorded at the soundcard (for clock skew
+ * compensation)
+ *
+ * Outputs Description
+ * -------------------------------------------------------------------
+ * float* const* out Out buffer, one frame of processed nearend
+ * for each band
+ * int32_t return 0: OK
+ * 12000-12050: error code
+ */
+int32_t WebRtcAec_Process(void* aecInst,
+ const float* const* nearend,
+ size_t num_bands,
+ float* const* out,
+ size_t nrOfSamples,
+ int16_t msInSndCardBuf,
+ int32_t skew);
+
+/*
+ * This function enables the user to set certain parameters on-the-fly.
+ *
+ * Inputs Description
+ * -------------------------------------------------------------------
+ * void* handle Pointer to the AEC instance
+ * AecConfig config Config instance that contains all
+ * properties to be set
+ *
+ * Outputs Description
+ * -------------------------------------------------------------------
+ * int return 0: OK
+ * 12000-12050: error code
+ */
+int WebRtcAec_set_config(void* handle, AecConfig config);
+
+/*
+ * Gets the current echo status of the nearend signal.
+ *
+ * Inputs Description
+ * -------------------------------------------------------------------
+ * void* handle Pointer to the AEC instance
+ *
+ * Outputs Description
+ * -------------------------------------------------------------------
+ * int* status 0: Almost certainly nearend single-talk
+ * 1: Might not be neared single-talk
+ * int return 0: OK
+ * 12000-12050: error code
+ */
+int WebRtcAec_get_echo_status(void* handle, int* status);
+
+/*
+ * Gets the current echo metrics for the session.
+ *
+ * Inputs Description
+ * -------------------------------------------------------------------
+ * void* handle Pointer to the AEC instance
+ *
+ * Outputs Description
+ * -------------------------------------------------------------------
+ * AecMetrics* metrics Struct which will be filled out with the
+ * current echo metrics.
+ * int return 0: OK
+ * 12000-12050: error code
+ */
+int WebRtcAec_GetMetrics(void* handle, AecMetrics* metrics);
+
+/*
+ * Gets the current delay metrics for the session.
+ *
+ * Inputs Description
+ * -------------------------------------------------------------------
+ * void* handle Pointer to the AEC instance
+ *
+ * Outputs Description
+ * -------------------------------------------------------------------
+ * int* median Delay median value.
+ * int* std Delay standard deviation.
+ * float* fraction_poor_delays Fraction of the delay estimates that may
+ * cause the AEC to perform poorly.
+ *
+ * int return 0: OK
+ * 12000-12050: error code
+ */
+int WebRtcAec_GetDelayMetrics(void* handle,
+ int* median,
+ int* std,
+ float* fraction_poor_delays);
+
+// Returns a pointer to the low level AEC handle.
+//
+// Input:
+// - handle : Pointer to the AEC instance.
+//
+// Return value:
+// - AecCore pointer : NULL for error.
+//
+struct AecCore* WebRtcAec_aec_core(void* handle);
+
+#ifdef __cplusplus
+}
+#endif
+#endif // WEBRTC_MODULES_AUDIO_PROCESSING_AEC_ECHO_CANCELLATION_H_
diff --git a/webrtc/modules/audio_processing/aec/echo_cancellation_internal.h b/webrtc/modules/audio_processing/aec/echo_cancellation_internal.h
index 95a6cf3324..e87219f33d 100644
--- a/webrtc/modules/audio_processing/aec/echo_cancellation_internal.h
+++ b/webrtc/modules/audio_processing/aec/echo_cancellation_internal.h
@@ -57,8 +57,6 @@ typedef struct {
RingBuffer* far_pre_buf; // Time domain far-end pre-buffer.
- int lastError;
-
int farend_started;
AecCore* aec;
diff --git a/webrtc/modules/audio_processing/aec/echo_cancellation_unittest.cc b/webrtc/modules/audio_processing/aec/echo_cancellation_unittest.cc
index 315ac3e9f9..42db082ff9 100644
--- a/webrtc/modules/audio_processing/aec/echo_cancellation_unittest.cc
+++ b/webrtc/modules/audio_processing/aec/echo_cancellation_unittest.cc
@@ -10,7 +10,7 @@
// TODO(bjornv): Make this a comprehensive test.
-#include "webrtc/modules/audio_processing/aec/include/echo_cancellation.h"
+#include "webrtc/modules/audio_processing/aec/echo_cancellation.h"
#include <stdlib.h>
#include <time.h>
diff --git a/webrtc/modules/audio_processing/aec/include/echo_cancellation.h b/webrtc/modules/audio_processing/aec/include/echo_cancellation.h
deleted file mode 100644
index a340cf84d0..0000000000
--- a/webrtc/modules/audio_processing/aec/include/echo_cancellation.h
+++ /dev/null
@@ -1,245 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_MODULES_AUDIO_PROCESSING_AEC_INCLUDE_ECHO_CANCELLATION_H_
-#define WEBRTC_MODULES_AUDIO_PROCESSING_AEC_INCLUDE_ECHO_CANCELLATION_H_
-
-#include <stddef.h>
-
-#include "webrtc/typedefs.h"
-
-// Errors
-#define AEC_UNSPECIFIED_ERROR 12000
-#define AEC_UNSUPPORTED_FUNCTION_ERROR 12001
-#define AEC_UNINITIALIZED_ERROR 12002
-#define AEC_NULL_POINTER_ERROR 12003
-#define AEC_BAD_PARAMETER_ERROR 12004
-
-// Warnings
-#define AEC_BAD_PARAMETER_WARNING 12050
-
-enum {
- kAecNlpConservative = 0,
- kAecNlpModerate,
- kAecNlpAggressive
-};
-
-enum {
- kAecFalse = 0,
- kAecTrue
-};
-
-typedef struct {
- int16_t nlpMode; // default kAecNlpModerate
- int16_t skewMode; // default kAecFalse
- int16_t metricsMode; // default kAecFalse
- int delay_logging; // default kAecFalse
- // float realSkew;
-} AecConfig;
-
-typedef struct {
- int instant;
- int average;
- int max;
- int min;
-} AecLevel;
-
-typedef struct {
- AecLevel rerl;
- AecLevel erl;
- AecLevel erle;
- AecLevel aNlp;
-} AecMetrics;
-
-struct AecCore;
-
-#ifdef __cplusplus
-extern "C" {
-#endif
-
-/*
- * Allocates the memory needed by the AEC. The memory needs to be initialized
- * separately using the WebRtcAec_Init() function. Returns a pointer to the
- * object or NULL on error.
- */
-void* WebRtcAec_Create();
-
-/*
- * This function releases the memory allocated by WebRtcAec_Create().
- *
- * Inputs Description
- * -------------------------------------------------------------------
- * void* aecInst Pointer to the AEC instance
- */
-void WebRtcAec_Free(void* aecInst);
-
-/*
- * Initializes an AEC instance.
- *
- * Inputs Description
- * -------------------------------------------------------------------
- * void* aecInst Pointer to the AEC instance
- * int32_t sampFreq Sampling frequency of data
- * int32_t scSampFreq Soundcard sampling frequency
- *
- * Outputs Description
- * -------------------------------------------------------------------
- * int32_t return 0: OK
- * -1: error
- */
-int32_t WebRtcAec_Init(void* aecInst, int32_t sampFreq, int32_t scSampFreq);
-
-/*
- * Inserts an 80 or 160 sample block of data into the farend buffer.
- *
- * Inputs Description
- * -------------------------------------------------------------------
- * void* aecInst Pointer to the AEC instance
- * const float* farend In buffer containing one frame of
- * farend signal for L band
- * int16_t nrOfSamples Number of samples in farend buffer
- *
- * Outputs Description
- * -------------------------------------------------------------------
- * int32_t return 0: OK
- * -1: error
- */
-int32_t WebRtcAec_BufferFarend(void* aecInst,
- const float* farend,
- size_t nrOfSamples);
-
-/*
- * Runs the echo canceller on an 80 or 160 sample blocks of data.
- *
- * Inputs Description
- * -------------------------------------------------------------------
- * void* aecInst Pointer to the AEC instance
- * float* const* nearend In buffer containing one frame of
- * nearend+echo signal for each band
- * int num_bands Number of bands in nearend buffer
- * int16_t nrOfSamples Number of samples in nearend buffer
- * int16_t msInSndCardBuf Delay estimate for sound card and
- * system buffers
- * int16_t skew Difference between number of samples played
- * and recorded at the soundcard (for clock skew
- * compensation)
- *
- * Outputs Description
- * -------------------------------------------------------------------
- * float* const* out Out buffer, one frame of processed nearend
- * for each band
- * int32_t return 0: OK
- * -1: error
- */
-int32_t WebRtcAec_Process(void* aecInst,
- const float* const* nearend,
- size_t num_bands,
- float* const* out,
- size_t nrOfSamples,
- int16_t msInSndCardBuf,
- int32_t skew);
-
-/*
- * This function enables the user to set certain parameters on-the-fly.
- *
- * Inputs Description
- * -------------------------------------------------------------------
- * void* handle Pointer to the AEC instance
- * AecConfig config Config instance that contains all
- * properties to be set
- *
- * Outputs Description
- * -------------------------------------------------------------------
- * int return 0: OK
- * -1: error
- */
-int WebRtcAec_set_config(void* handle, AecConfig config);
-
-/*
- * Gets the current echo status of the nearend signal.
- *
- * Inputs Description
- * -------------------------------------------------------------------
- * void* handle Pointer to the AEC instance
- *
- * Outputs Description
- * -------------------------------------------------------------------
- * int* status 0: Almost certainly nearend single-talk
- * 1: Might not be neared single-talk
- * int return 0: OK
- * -1: error
- */
-int WebRtcAec_get_echo_status(void* handle, int* status);
-
-/*
- * Gets the current echo metrics for the session.
- *
- * Inputs Description
- * -------------------------------------------------------------------
- * void* handle Pointer to the AEC instance
- *
- * Outputs Description
- * -------------------------------------------------------------------
- * AecMetrics* metrics Struct which will be filled out with the
- * current echo metrics.
- * int return 0: OK
- * -1: error
- */
-int WebRtcAec_GetMetrics(void* handle, AecMetrics* metrics);
-
-/*
- * Gets the current delay metrics for the session.
- *
- * Inputs Description
- * -------------------------------------------------------------------
- * void* handle Pointer to the AEC instance
- *
- * Outputs Description
- * -------------------------------------------------------------------
- * int* median Delay median value.
- * int* std Delay standard deviation.
- * float* fraction_poor_delays Fraction of the delay estimates that may
- * cause the AEC to perform poorly.
- *
- * int return 0: OK
- * -1: error
- */
-int WebRtcAec_GetDelayMetrics(void* handle,
- int* median,
- int* std,
- float* fraction_poor_delays);
-
-/*
- * Gets the last error code.
- *
- * Inputs Description
- * -------------------------------------------------------------------
- * void* aecInst Pointer to the AEC instance
- *
- * Outputs Description
- * -------------------------------------------------------------------
- * int32_t return 11000-11100: error code
- */
-int32_t WebRtcAec_get_error_code(void* aecInst);
-
-// Returns a pointer to the low level AEC handle.
-//
-// Input:
-// - handle : Pointer to the AEC instance.
-//
-// Return value:
-// - AecCore pointer : NULL for error.
-//
-struct AecCore* WebRtcAec_aec_core(void* handle);
-
-#ifdef __cplusplus
-}
-#endif
-#endif // WEBRTC_MODULES_AUDIO_PROCESSING_AEC_INCLUDE_ECHO_CANCELLATION_H_
diff --git a/webrtc/modules/audio_processing/aec/system_delay_unittest.cc b/webrtc/modules/audio_processing/aec/system_delay_unittest.cc
index 07e3cf8add..567118d828 100644
--- a/webrtc/modules/audio_processing/aec/system_delay_unittest.cc
+++ b/webrtc/modules/audio_processing/aec/system_delay_unittest.cc
@@ -13,8 +13,7 @@ extern "C" {
#include "webrtc/modules/audio_processing/aec/aec_core.h"
}
#include "webrtc/modules/audio_processing/aec/echo_cancellation_internal.h"
-#include "webrtc/modules/audio_processing/aec/include/echo_cancellation.h"
-#include "webrtc/test/testsupport/gtest_disable.h"
+#include "webrtc/modules/audio_processing/aec/echo_cancellation.h"
#include "webrtc/typedefs.h"
namespace {
diff --git a/webrtc/modules/audio_processing/aecm/aecm_core.c b/webrtc/modules/audio_processing/aecm/aecm_core.c
index f0d85d5328..6bf1cf7f3e 100644
--- a/webrtc/modules/audio_processing/aecm/aecm_core.c
+++ b/webrtc/modules/audio_processing/aecm/aecm_core.c
@@ -16,7 +16,7 @@
#include "webrtc/common_audio/ring_buffer.h"
#include "webrtc/common_audio/signal_processing/include/real_fft.h"
-#include "webrtc/modules/audio_processing/aecm/include/echo_control_mobile.h"
+#include "webrtc/modules/audio_processing/aecm/echo_control_mobile.h"
#include "webrtc/modules/audio_processing/utility/delay_estimator_wrapper.h"
#include "webrtc/system_wrappers/include/compile_assert_c.h"
#include "webrtc/system_wrappers/include/cpu_features_wrapper.h"
diff --git a/webrtc/modules/audio_processing/aecm/aecm_core_c.c b/webrtc/modules/audio_processing/aecm/aecm_core_c.c
index df95e8bedf..3a8fafa4ec 100644
--- a/webrtc/modules/audio_processing/aecm/aecm_core_c.c
+++ b/webrtc/modules/audio_processing/aecm/aecm_core_c.c
@@ -16,7 +16,7 @@
#include "webrtc/common_audio/ring_buffer.h"
#include "webrtc/common_audio/signal_processing/include/real_fft.h"
-#include "webrtc/modules/audio_processing/aecm/include/echo_control_mobile.h"
+#include "webrtc/modules/audio_processing/aecm/echo_control_mobile.h"
#include "webrtc/modules/audio_processing/utility/delay_estimator_wrapper.h"
#include "webrtc/system_wrappers/include/compile_assert_c.h"
#include "webrtc/system_wrappers/include/cpu_features_wrapper.h"
diff --git a/webrtc/modules/audio_processing/aecm/aecm_core_mips.c b/webrtc/modules/audio_processing/aecm/aecm_core_mips.c
index 3c2343a892..3ca9982ebf 100644
--- a/webrtc/modules/audio_processing/aecm/aecm_core_mips.c
+++ b/webrtc/modules/audio_processing/aecm/aecm_core_mips.c
@@ -12,7 +12,7 @@
#include <assert.h>
-#include "webrtc/modules/audio_processing/aecm/include/echo_control_mobile.h"
+#include "webrtc/modules/audio_processing/aecm/echo_control_mobile.h"
#include "webrtc/modules/audio_processing/utility/delay_estimator_wrapper.h"
static const ALIGN8_BEG int16_t WebRtcAecm_kSqrtHanning[] ALIGN8_END = {
diff --git a/webrtc/modules/audio_processing/aecm/echo_control_mobile.c b/webrtc/modules/audio_processing/aecm/echo_control_mobile.c
index 83781e97fe..91e6f0e80c 100644
--- a/webrtc/modules/audio_processing/aecm/echo_control_mobile.c
+++ b/webrtc/modules/audio_processing/aecm/echo_control_mobile.c
@@ -8,7 +8,7 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-#include "webrtc/modules/audio_processing/aecm/include/echo_control_mobile.h"
+#include "webrtc/modules/audio_processing/aecm/echo_control_mobile.h"
#ifdef AEC_DEBUG
#include <stdio.h>
@@ -68,8 +68,6 @@ typedef struct
// Structures
RingBuffer *farendBuf;
- int lastError;
-
AecmCore* aecmCore;
} AecMobile;
@@ -100,7 +98,6 @@ void* WebRtcAecm_Create() {
}
aecm->initFlag = 0;
- aecm->lastError = 0;
#ifdef AEC_DEBUG
aecm->aecmCore->farFile = fopen("aecFar.pcm","wb");
@@ -151,16 +148,14 @@ int32_t WebRtcAecm_Init(void *aecmInst, int32_t sampFreq)
if (sampFreq != 8000 && sampFreq != 16000)
{
- aecm->lastError = AECM_BAD_PARAMETER_ERROR;
- return -1;
+ return AECM_BAD_PARAMETER_ERROR;
}
aecm->sampFreq = sampFreq;
// Initialize AECM core
if (WebRtcAecm_InitCore(aecm->aecmCore, aecm->sampFreq) == -1)
{
- aecm->lastError = AECM_UNSPECIFIED_ERROR;
- return -1;
+ return AECM_UNSPECIFIED_ERROR;
}
// Initialize farend buffer
@@ -191,51 +186,53 @@ int32_t WebRtcAecm_Init(void *aecmInst, int32_t sampFreq)
if (WebRtcAecm_set_config(aecm, aecConfig) == -1)
{
- aecm->lastError = AECM_UNSPECIFIED_ERROR;
- return -1;
+ return AECM_UNSPECIFIED_ERROR;
}
return 0;
}
-int32_t WebRtcAecm_BufferFarend(void *aecmInst, const int16_t *farend,
- size_t nrOfSamples)
-{
+// Returns any error that is caused when buffering the
+// farend signal.
+int32_t WebRtcAecm_GetBufferFarendError(void *aecmInst, const int16_t *farend,
+ size_t nrOfSamples) {
AecMobile* aecm = aecmInst;
- int32_t retVal = 0;
- if (aecm == NULL)
- {
- return -1;
- }
+ if (aecm == NULL)
+ return -1;
- if (farend == NULL)
- {
- aecm->lastError = AECM_NULL_POINTER_ERROR;
- return -1;
- }
+ if (farend == NULL)
+ return AECM_NULL_POINTER_ERROR;
- if (aecm->initFlag != kInitCheck)
- {
- aecm->lastError = AECM_UNINITIALIZED_ERROR;
- return -1;
- }
+ if (aecm->initFlag != kInitCheck)
+ return AECM_UNINITIALIZED_ERROR;
- if (nrOfSamples != 80 && nrOfSamples != 160)
- {
- aecm->lastError = AECM_BAD_PARAMETER_ERROR;
- return -1;
- }
+ if (nrOfSamples != 80 && nrOfSamples != 160)
+ return AECM_BAD_PARAMETER_ERROR;
- // TODO: Is this really a good idea?
- if (!aecm->ECstartup)
- {
- WebRtcAecm_DelayComp(aecm);
- }
+ return 0;
+}
- WebRtc_WriteBuffer(aecm->farendBuf, farend, nrOfSamples);
- return retVal;
+int32_t WebRtcAecm_BufferFarend(void *aecmInst, const int16_t *farend,
+ size_t nrOfSamples) {
+ AecMobile* aecm = aecmInst;
+
+ const int32_t err =
+ WebRtcAecm_GetBufferFarendError(aecmInst, farend, nrOfSamples);
+
+ if (err != 0)
+ return err;
+
+ // TODO(unknown): Is this really a good idea?
+ if (!aecm->ECstartup)
+ {
+ WebRtcAecm_DelayComp(aecm);
+ }
+
+ WebRtc_WriteBuffer(aecm->farendBuf, farend, nrOfSamples);
+
+ return 0;
}
int32_t WebRtcAecm_Process(void *aecmInst, const int16_t *nearendNoisy,
@@ -259,38 +256,32 @@ int32_t WebRtcAecm_Process(void *aecmInst, const int16_t *nearendNoisy,
if (nearendNoisy == NULL)
{
- aecm->lastError = AECM_NULL_POINTER_ERROR;
- return -1;
+ return AECM_NULL_POINTER_ERROR;
}
if (out == NULL)
{
- aecm->lastError = AECM_NULL_POINTER_ERROR;
- return -1;
+ return AECM_NULL_POINTER_ERROR;
}
if (aecm->initFlag != kInitCheck)
{
- aecm->lastError = AECM_UNINITIALIZED_ERROR;
- return -1;
+ return AECM_UNINITIALIZED_ERROR;
}
if (nrOfSamples != 80 && nrOfSamples != 160)
{
- aecm->lastError = AECM_BAD_PARAMETER_ERROR;
- return -1;
+ return AECM_BAD_PARAMETER_ERROR;
}
if (msInSndCardBuf < 0)
{
msInSndCardBuf = 0;
- aecm->lastError = AECM_BAD_PARAMETER_WARNING;
- retVal = -1;
+ retVal = AECM_BAD_PARAMETER_WARNING;
} else if (msInSndCardBuf > 500)
{
msInSndCardBuf = 500;
- aecm->lastError = AECM_BAD_PARAMETER_WARNING;
- retVal = -1;
+ retVal = AECM_BAD_PARAMETER_WARNING;
}
msInSndCardBuf += 10;
aecm->msInSndCardBuf = msInSndCardBuf;
@@ -453,21 +444,18 @@ int32_t WebRtcAecm_set_config(void *aecmInst, AecmConfig config)
if (aecm->initFlag != kInitCheck)
{
- aecm->lastError = AECM_UNINITIALIZED_ERROR;
- return -1;
+ return AECM_UNINITIALIZED_ERROR;
}
if (config.cngMode != AecmFalse && config.cngMode != AecmTrue)
{
- aecm->lastError = AECM_BAD_PARAMETER_ERROR;
- return -1;
+ return AECM_BAD_PARAMETER_ERROR;
}
aecm->aecmCore->cngMode = config.cngMode;
if (config.echoMode < 0 || config.echoMode > 4)
{
- aecm->lastError = AECM_BAD_PARAMETER_ERROR;
- return -1;
+ return AECM_BAD_PARAMETER_ERROR;
}
aecm->echoMode = config.echoMode;
@@ -524,33 +512,6 @@ int32_t WebRtcAecm_set_config(void *aecmInst, AecmConfig config)
return 0;
}
-int32_t WebRtcAecm_get_config(void *aecmInst, AecmConfig *config)
-{
- AecMobile* aecm = aecmInst;
-
- if (aecm == NULL)
- {
- return -1;
- }
-
- if (config == NULL)
- {
- aecm->lastError = AECM_NULL_POINTER_ERROR;
- return -1;
- }
-
- if (aecm->initFlag != kInitCheck)
- {
- aecm->lastError = AECM_UNINITIALIZED_ERROR;
- return -1;
- }
-
- config->cngMode = aecm->aecmCore->cngMode;
- config->echoMode = aecm->echoMode;
-
- return 0;
-}
-
int32_t WebRtcAecm_InitEchoPath(void* aecmInst,
const void* echo_path,
size_t size_bytes)
@@ -562,19 +523,16 @@ int32_t WebRtcAecm_InitEchoPath(void* aecmInst,
return -1;
}
if (echo_path == NULL) {
- aecm->lastError = AECM_NULL_POINTER_ERROR;
- return -1;
+ return AECM_NULL_POINTER_ERROR;
}
if (size_bytes != WebRtcAecm_echo_path_size_bytes())
{
// Input channel size does not match the size of AECM
- aecm->lastError = AECM_BAD_PARAMETER_ERROR;
- return -1;
+ return AECM_BAD_PARAMETER_ERROR;
}
if (aecm->initFlag != kInitCheck)
{
- aecm->lastError = AECM_UNINITIALIZED_ERROR;
- return -1;
+ return AECM_UNINITIALIZED_ERROR;
}
WebRtcAecm_InitEchoPathCore(aecm->aecmCore, echo_path_ptr);
@@ -593,19 +551,16 @@ int32_t WebRtcAecm_GetEchoPath(void* aecmInst,
return -1;
}
if (echo_path == NULL) {
- aecm->lastError = AECM_NULL_POINTER_ERROR;
- return -1;
+ return AECM_NULL_POINTER_ERROR;
}
if (size_bytes != WebRtcAecm_echo_path_size_bytes())
{
// Input channel size does not match the size of AECM
- aecm->lastError = AECM_BAD_PARAMETER_ERROR;
- return -1;
+ return AECM_BAD_PARAMETER_ERROR;
}
if (aecm->initFlag != kInitCheck)
{
- aecm->lastError = AECM_UNINITIALIZED_ERROR;
- return -1;
+ return AECM_UNINITIALIZED_ERROR;
}
memcpy(echo_path_ptr, aecm->aecmCore->channelStored, size_bytes);
@@ -617,17 +572,6 @@ size_t WebRtcAecm_echo_path_size_bytes()
return (PART_LEN1 * sizeof(int16_t));
}
-int32_t WebRtcAecm_get_error_code(void *aecmInst)
-{
- AecMobile* aecm = aecmInst;
-
- if (aecm == NULL)
- {
- return -1;
- }
-
- return aecm->lastError;
-}
static int WebRtcAecm_EstBufDelay(AecMobile* aecm, short msInSndCardBuf) {
short delayNew, nSampSndCard;
diff --git a/webrtc/modules/audio_processing/aecm/echo_control_mobile.h b/webrtc/modules/audio_processing/aecm/echo_control_mobile.h
new file mode 100644
index 0000000000..b45ff59907
--- /dev/null
+++ b/webrtc/modules/audio_processing/aecm/echo_control_mobile.h
@@ -0,0 +1,209 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_AUDIO_PROCESSING_AECM_ECHO_CONTROL_MOBILE_H_
+#define WEBRTC_MODULES_AUDIO_PROCESSING_AECM_ECHO_CONTROL_MOBILE_H_
+
+#include <stdlib.h>
+
+#include "webrtc/typedefs.h"
+
+enum {
+ AecmFalse = 0,
+ AecmTrue
+};
+
+// Errors
+#define AECM_UNSPECIFIED_ERROR 12000
+#define AECM_UNSUPPORTED_FUNCTION_ERROR 12001
+#define AECM_UNINITIALIZED_ERROR 12002
+#define AECM_NULL_POINTER_ERROR 12003
+#define AECM_BAD_PARAMETER_ERROR 12004
+
+// Warnings
+#define AECM_BAD_PARAMETER_WARNING 12100
+
+typedef struct {
+ int16_t cngMode; // AECM_FALSE, AECM_TRUE (default)
+ int16_t echoMode; // 0, 1, 2, 3 (default), 4
+} AecmConfig;
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+/*
+ * Allocates the memory needed by the AECM. The memory needs to be
+ * initialized separately using the WebRtcAecm_Init() function.
+ * Returns a pointer to the instance and a nullptr at failure.
+ */
+void* WebRtcAecm_Create();
+
+/*
+ * This function releases the memory allocated by WebRtcAecm_Create()
+ *
+ * Inputs Description
+ * -------------------------------------------------------------------
+ * void* aecmInst Pointer to the AECM instance
+ */
+void WebRtcAecm_Free(void* aecmInst);
+
+/*
+ * Initializes an AECM instance.
+ *
+ * Inputs Description
+ * -------------------------------------------------------------------
+ * void* aecmInst Pointer to the AECM instance
+ * int32_t sampFreq Sampling frequency of data
+ *
+ * Outputs Description
+ * -------------------------------------------------------------------
+ * int32_t return 0: OK
+ * 1200-12004,12100: error/warning
+ */
+int32_t WebRtcAecm_Init(void* aecmInst, int32_t sampFreq);
+
+/*
+ * Inserts an 80 or 160 sample block of data into the farend buffer.
+ *
+ * Inputs Description
+ * -------------------------------------------------------------------
+ * void* aecmInst Pointer to the AECM instance
+ * int16_t* farend In buffer containing one frame of
+ * farend signal
+ * int16_t nrOfSamples Number of samples in farend buffer
+ *
+ * Outputs Description
+ * -------------------------------------------------------------------
+ * int32_t return 0: OK
+ * 1200-12004,12100: error/warning
+ */
+int32_t WebRtcAecm_BufferFarend(void* aecmInst,
+ const int16_t* farend,
+ size_t nrOfSamples);
+
+/*
+ * Reports any errors that would arise when buffering a farend buffer.
+ *
+ * Inputs Description
+ * -------------------------------------------------------------------
+ * void* aecmInst Pointer to the AECM instance
+ * int16_t* farend In buffer containing one frame of
+ * farend signal
+ * int16_t nrOfSamples Number of samples in farend buffer
+ *
+ * Outputs Description
+ * -------------------------------------------------------------------
+ * int32_t return 0: OK
+ * 1200-12004,12100: error/warning
+ */
+int32_t WebRtcAecm_GetBufferFarendError(void* aecmInst,
+ const int16_t* farend,
+ size_t nrOfSamples);
+
+/*
+ * Runs the AECM on an 80 or 160 sample blocks of data.
+ *
+ * Inputs Description
+ * -------------------------------------------------------------------
+ * void* aecmInst Pointer to the AECM instance
+ * int16_t* nearendNoisy In buffer containing one frame of
+ * reference nearend+echo signal. If
+ * noise reduction is active, provide
+ * the noisy signal here.
+ * int16_t* nearendClean In buffer containing one frame of
+ * nearend+echo signal. If noise
+ * reduction is active, provide the
+ * clean signal here. Otherwise pass a
+ * NULL pointer.
+ * int16_t nrOfSamples Number of samples in nearend buffer
+ * int16_t msInSndCardBuf Delay estimate for sound card and
+ * system buffers
+ *
+ * Outputs Description
+ * -------------------------------------------------------------------
+ * int16_t* out Out buffer, one frame of processed nearend
+ * int32_t return 0: OK
+ * 1200-12004,12100: error/warning
+ */
+int32_t WebRtcAecm_Process(void* aecmInst,
+ const int16_t* nearendNoisy,
+ const int16_t* nearendClean,
+ int16_t* out,
+ size_t nrOfSamples,
+ int16_t msInSndCardBuf);
+
+/*
+ * This function enables the user to set certain parameters on-the-fly
+ *
+ * Inputs Description
+ * -------------------------------------------------------------------
+ * void* aecmInst Pointer to the AECM instance
+ * AecmConfig config Config instance that contains all
+ * properties to be set
+ *
+ * Outputs Description
+ * -------------------------------------------------------------------
+ * int32_t return 0: OK
+ * 1200-12004,12100: error/warning
+ */
+int32_t WebRtcAecm_set_config(void* aecmInst, AecmConfig config);
+
+/*
+ * This function enables the user to set the echo path on-the-fly.
+ *
+ * Inputs Description
+ * -------------------------------------------------------------------
+ * void* aecmInst Pointer to the AECM instance
+ * void* echo_path Pointer to the echo path to be set
+ * size_t size_bytes Size in bytes of the echo path
+ *
+ * Outputs Description
+ * -------------------------------------------------------------------
+ * int32_t return 0: OK
+ * 1200-12004,12100: error/warning
+ */
+int32_t WebRtcAecm_InitEchoPath(void* aecmInst,
+ const void* echo_path,
+ size_t size_bytes);
+
+/*
+ * This function enables the user to get the currently used echo path
+ * on-the-fly
+ *
+ * Inputs Description
+ * -------------------------------------------------------------------
+ * void* aecmInst Pointer to the AECM instance
+ * void* echo_path Pointer to echo path
+ * size_t size_bytes Size in bytes of the echo path
+ *
+ * Outputs Description
+ * -------------------------------------------------------------------
+ * int32_t return 0: OK
+ * 1200-12004,12100: error/warning
+ */
+int32_t WebRtcAecm_GetEchoPath(void* aecmInst,
+ void* echo_path,
+ size_t size_bytes);
+
+/*
+ * This function enables the user to get the echo path size in bytes
+ *
+ * Outputs Description
+ * -------------------------------------------------------------------
+ * size_t return Size in bytes
+ */
+size_t WebRtcAecm_echo_path_size_bytes();
+
+
+#ifdef __cplusplus
+}
+#endif
+#endif // WEBRTC_MODULES_AUDIO_PROCESSING_AECM_ECHO_CONTROL_MOBILE_H_
diff --git a/webrtc/modules/audio_processing/aecm/include/echo_control_mobile.h b/webrtc/modules/audio_processing/aecm/include/echo_control_mobile.h
deleted file mode 100644
index 7ae15c2a3d..0000000000
--- a/webrtc/modules/audio_processing/aecm/include/echo_control_mobile.h
+++ /dev/null
@@ -1,218 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_MODULES_AUDIO_PROCESSING_AECM_INCLUDE_ECHO_CONTROL_MOBILE_H_
-#define WEBRTC_MODULES_AUDIO_PROCESSING_AECM_INCLUDE_ECHO_CONTROL_MOBILE_H_
-
-#include <stdlib.h>
-
-#include "webrtc/typedefs.h"
-
-enum {
- AecmFalse = 0,
- AecmTrue
-};
-
-// Errors
-#define AECM_UNSPECIFIED_ERROR 12000
-#define AECM_UNSUPPORTED_FUNCTION_ERROR 12001
-#define AECM_UNINITIALIZED_ERROR 12002
-#define AECM_NULL_POINTER_ERROR 12003
-#define AECM_BAD_PARAMETER_ERROR 12004
-
-// Warnings
-#define AECM_BAD_PARAMETER_WARNING 12100
-
-typedef struct {
- int16_t cngMode; // AECM_FALSE, AECM_TRUE (default)
- int16_t echoMode; // 0, 1, 2, 3 (default), 4
-} AecmConfig;
-
-#ifdef __cplusplus
-extern "C" {
-#endif
-
-/*
- * Allocates the memory needed by the AECM. The memory needs to be
- * initialized separately using the WebRtcAecm_Init() function.
- * Returns a pointer to the instance and a nullptr at failure.
- */
-void* WebRtcAecm_Create();
-
-/*
- * This function releases the memory allocated by WebRtcAecm_Create()
- *
- * Inputs Description
- * -------------------------------------------------------------------
- * void* aecmInst Pointer to the AECM instance
- */
-void WebRtcAecm_Free(void* aecmInst);
-
-/*
- * Initializes an AECM instance.
- *
- * Inputs Description
- * -------------------------------------------------------------------
- * void* aecmInst Pointer to the AECM instance
- * int32_t sampFreq Sampling frequency of data
- *
- * Outputs Description
- * -------------------------------------------------------------------
- * int32_t return 0: OK
- * -1: error
- */
-int32_t WebRtcAecm_Init(void* aecmInst, int32_t sampFreq);
-
-/*
- * Inserts an 80 or 160 sample block of data into the farend buffer.
- *
- * Inputs Description
- * -------------------------------------------------------------------
- * void* aecmInst Pointer to the AECM instance
- * int16_t* farend In buffer containing one frame of
- * farend signal
- * int16_t nrOfSamples Number of samples in farend buffer
- *
- * Outputs Description
- * -------------------------------------------------------------------
- * int32_t return 0: OK
- * -1: error
- */
-int32_t WebRtcAecm_BufferFarend(void* aecmInst,
- const int16_t* farend,
- size_t nrOfSamples);
-
-/*
- * Runs the AECM on an 80 or 160 sample blocks of data.
- *
- * Inputs Description
- * -------------------------------------------------------------------
- * void* aecmInst Pointer to the AECM instance
- * int16_t* nearendNoisy In buffer containing one frame of
- * reference nearend+echo signal. If
- * noise reduction is active, provide
- * the noisy signal here.
- * int16_t* nearendClean In buffer containing one frame of
- * nearend+echo signal. If noise
- * reduction is active, provide the
- * clean signal here. Otherwise pass a
- * NULL pointer.
- * int16_t nrOfSamples Number of samples in nearend buffer
- * int16_t msInSndCardBuf Delay estimate for sound card and
- * system buffers
- *
- * Outputs Description
- * -------------------------------------------------------------------
- * int16_t* out Out buffer, one frame of processed nearend
- * int32_t return 0: OK
- * -1: error
- */
-int32_t WebRtcAecm_Process(void* aecmInst,
- const int16_t* nearendNoisy,
- const int16_t* nearendClean,
- int16_t* out,
- size_t nrOfSamples,
- int16_t msInSndCardBuf);
-
-/*
- * This function enables the user to set certain parameters on-the-fly
- *
- * Inputs Description
- * -------------------------------------------------------------------
- * void* aecmInst Pointer to the AECM instance
- * AecmConfig config Config instance that contains all
- * properties to be set
- *
- * Outputs Description
- * -------------------------------------------------------------------
- * int32_t return 0: OK
- * -1: error
- */
-int32_t WebRtcAecm_set_config(void* aecmInst, AecmConfig config);
-
-/*
- * This function enables the user to set certain parameters on-the-fly
- *
- * Inputs Description
- * -------------------------------------------------------------------
- * void* aecmInst Pointer to the AECM instance
- *
- * Outputs Description
- * -------------------------------------------------------------------
- * AecmConfig* config Pointer to the config instance that
- * all properties will be written to
- * int32_t return 0: OK
- * -1: error
- */
-int32_t WebRtcAecm_get_config(void *aecmInst, AecmConfig *config);
-
-/*
- * This function enables the user to set the echo path on-the-fly.
- *
- * Inputs Description
- * -------------------------------------------------------------------
- * void* aecmInst Pointer to the AECM instance
- * void* echo_path Pointer to the echo path to be set
- * size_t size_bytes Size in bytes of the echo path
- *
- * Outputs Description
- * -------------------------------------------------------------------
- * int32_t return 0: OK
- * -1: error
- */
-int32_t WebRtcAecm_InitEchoPath(void* aecmInst,
- const void* echo_path,
- size_t size_bytes);
-
-/*
- * This function enables the user to get the currently used echo path
- * on-the-fly
- *
- * Inputs Description
- * -------------------------------------------------------------------
- * void* aecmInst Pointer to the AECM instance
- * void* echo_path Pointer to echo path
- * size_t size_bytes Size in bytes of the echo path
- *
- * Outputs Description
- * -------------------------------------------------------------------
- * int32_t return 0: OK
- * -1: error
- */
-int32_t WebRtcAecm_GetEchoPath(void* aecmInst,
- void* echo_path,
- size_t size_bytes);
-
-/*
- * This function enables the user to get the echo path size in bytes
- *
- * Outputs Description
- * -------------------------------------------------------------------
- * size_t return Size in bytes
- */
-size_t WebRtcAecm_echo_path_size_bytes();
-
-/*
- * Gets the last error code.
- *
- * Inputs Description
- * -------------------------------------------------------------------
- * void* aecmInst Pointer to the AECM instance
- *
- * Outputs Description
- * -------------------------------------------------------------------
- * int32_t return 11000-11100: error code
- */
-int32_t WebRtcAecm_get_error_code(void *aecmInst);
-
-#ifdef __cplusplus
-}
-#endif
-#endif // WEBRTC_MODULES_AUDIO_PROCESSING_AECM_INCLUDE_ECHO_CONTROL_MOBILE_H_
diff --git a/webrtc/modules/audio_processing/agc/agc.cc b/webrtc/modules/audio_processing/agc/agc.cc
index 706b963aa1..fc78f07ebb 100644
--- a/webrtc/modules/audio_processing/agc/agc.cc
+++ b/webrtc/modules/audio_processing/agc/agc.cc
@@ -19,7 +19,7 @@
#include "webrtc/base/checks.h"
#include "webrtc/modules/audio_processing/agc/histogram.h"
#include "webrtc/modules/audio_processing/agc/utility.h"
-#include "webrtc/modules/interface/module_common_types.h"
+#include "webrtc/modules/include/module_common_types.h"
namespace webrtc {
namespace {
diff --git a/webrtc/modules/audio_processing/agc/agc_manager_direct.cc b/webrtc/modules/audio_processing/agc/agc_manager_direct.cc
index 867022dcbf..e56984a1b1 100644
--- a/webrtc/modules/audio_processing/agc/agc_manager_direct.cc
+++ b/webrtc/modules/audio_processing/agc/agc_manager_direct.cc
@@ -19,7 +19,7 @@
#include "webrtc/modules/audio_processing/agc/gain_map_internal.h"
#include "webrtc/modules/audio_processing/gain_control_impl.h"
-#include "webrtc/modules/interface/module_common_types.h"
+#include "webrtc/modules/include/module_common_types.h"
#include "webrtc/system_wrappers/include/logging.h"
namespace webrtc {
@@ -168,19 +168,19 @@ int AgcManagerDirect::Initialize() {
// example, what happens when we change devices.
if (gctrl_->set_mode(GainControl::kFixedDigital) != 0) {
- LOG_FERR1(LS_ERROR, set_mode, GainControl::kFixedDigital);
+ LOG(LS_ERROR) << "set_mode(GainControl::kFixedDigital) failed.";
return -1;
}
if (gctrl_->set_target_level_dbfs(2) != 0) {
- LOG_FERR1(LS_ERROR, set_target_level_dbfs, 2);
+ LOG(LS_ERROR) << "set_target_level_dbfs(2) failed.";
return -1;
}
if (gctrl_->set_compression_gain_db(kDefaultCompressionGain) != 0) {
- LOG_FERR1(LS_ERROR, set_compression_gain_db, kDefaultCompressionGain);
+ LOG(LS_ERROR) << "set_compression_gain_db(kDefaultCompressionGain) failed.";
return -1;
}
if (gctrl_->enable_limiter(true) != 0) {
- LOG_FERR1(LS_ERROR, enable_limiter, true);
+ LOG(LS_ERROR) << "enable_limiter(true) failed.";
return -1;
}
return 0;
@@ -244,7 +244,7 @@ void AgcManagerDirect::Process(const int16_t* audio,
}
if (agc_->Process(audio, length, sample_rate_hz) != 0) {
- LOG_FERR0(LS_ERROR, Agc::Process);
+ LOG(LS_ERROR) << "Agc::Process failed";
assert(false);
}
@@ -434,7 +434,8 @@ void AgcManagerDirect::UpdateCompressor() {
compression_ = new_compression;
compression_accumulator_ = new_compression;
if (gctrl_->set_compression_gain_db(compression_) != 0) {
- LOG_FERR1(LS_ERROR, set_compression_gain_db, compression_);
+ LOG(LS_ERROR) << "set_compression_gain_db(" << compression_
+ << ") failed.";
}
}
}
diff --git a/webrtc/modules/audio_processing/agc/agc_unittest.cc b/webrtc/modules/audio_processing/agc/agc_unittest.cc
index 66a8a2b1b3..25b99d8773 100644
--- a/webrtc/modules/audio_processing/agc/agc_unittest.cc
+++ b/webrtc/modules/audio_processing/agc/agc_unittest.cc
@@ -13,7 +13,7 @@
#include "gmock/gmock.h"
#include "gtest/gtest.h"
-#include "webrtc/modules/interface/module_common_types.h"
+#include "webrtc/modules/include/module_common_types.h"
#include "webrtc/test/testsupport/fileutils.h"
#include "webrtc/tools/agc/test_utils.h"
diff --git a/webrtc/modules/audio_processing/agc/histogram.cc b/webrtc/modules/audio_processing/agc/histogram.cc
index 1d3035fe12..5c66727a9f 100644
--- a/webrtc/modules/audio_processing/agc/histogram.cc
+++ b/webrtc/modules/audio_processing/agc/histogram.cc
@@ -13,7 +13,7 @@
#include <cmath>
#include <cstring>
-#include "webrtc/modules/interface/module_common_types.h"
+#include "webrtc/modules/include/module_common_types.h"
namespace webrtc {
diff --git a/webrtc/modules/audio_processing/agc/legacy/analog_agc.c b/webrtc/modules/audio_processing/agc/legacy/analog_agc.c
index be644d9701..3a1dc9d5ce 100644
--- a/webrtc/modules/audio_processing/agc/legacy/analog_agc.c
+++ b/webrtc/modules/audio_processing/agc/legacy/analog_agc.c
@@ -250,34 +250,35 @@ int WebRtcAgc_AddMic(void *state, int16_t* const* in_mic, size_t num_bands,
return 0;
}
-int WebRtcAgc_AddFarend(void *state, const int16_t *in_far, size_t samples)
-{
+int WebRtcAgc_AddFarend(void *state, const int16_t *in_far, size_t samples) {
+ LegacyAgc* stt = (LegacyAgc*)state;
+
+ int err = WebRtcAgc_GetAddFarendError(state, samples);
+
+ if (err != 0)
+ return err;
+
+ return WebRtcAgc_AddFarendToDigital(&stt->digitalAgc, in_far, samples);
+}
+
+int WebRtcAgc_GetAddFarendError(void *state, size_t samples) {
LegacyAgc* stt;
stt = (LegacyAgc*)state;
- if (stt == NULL)
- {
- return -1;
- }
+ if (stt == NULL)
+ return -1;
- if (stt->fs == 8000)
- {
- if (samples != 80)
- {
- return -1;
- }
- } else if (stt->fs == 16000 || stt->fs == 32000 || stt->fs == 48000)
- {
- if (samples != 160)
- {
- return -1;
- }
- } else
- {
- return -1;
- }
+ if (stt->fs == 8000) {
+ if (samples != 80)
+ return -1;
+ } else if (stt->fs == 16000 || stt->fs == 32000 || stt->fs == 48000) {
+ if (samples != 160)
+ return -1;
+ } else {
+ return -1;
+ }
- return WebRtcAgc_AddFarendToDigital(&stt->digitalAgc, in_far, samples);
+ return 0;
}
int WebRtcAgc_VirtualMic(void *agcInst, int16_t* const* in_near,
diff --git a/webrtc/modules/audio_processing/agc/legacy/gain_control.h b/webrtc/modules/audio_processing/agc/legacy/gain_control.h
index 08c1988f01..db942fe5ec 100644
--- a/webrtc/modules/audio_processing/agc/legacy/gain_control.h
+++ b/webrtc/modules/audio_processing/agc/legacy/gain_control.h
@@ -50,6 +50,20 @@ extern "C"
#endif
/*
+ * This function analyses the number of samples passed to
+ * farend and produces any error code that could arise.
+ *
+ * Input:
+ * - agcInst : AGC instance.
+ * - samples : Number of samples in input vector.
+ *
+ * Return value:
+ * : 0 - Normal operation.
+ * : -1 - Error.
+ */
+int WebRtcAgc_GetAddFarendError(void* state, size_t samples);
+
+/*
* This function processes a 10 ms frame of far-end speech to determine
* if there is active speech. The length of the input speech vector must be
* given in samples (80 when FS=8000, and 160 when FS=16000, FS=32000 or
diff --git a/webrtc/modules/audio_processing/agc/mock_agc.h b/webrtc/modules/audio_processing/agc/mock_agc.h
index 13dbd2edd5..e362200d86 100644
--- a/webrtc/modules/audio_processing/agc/mock_agc.h
+++ b/webrtc/modules/audio_processing/agc/mock_agc.h
@@ -14,7 +14,7 @@
#include "webrtc/modules/audio_processing/agc/agc.h"
#include "gmock/gmock.h"
-#include "webrtc/modules/interface/module_common_types.h"
+#include "webrtc/modules/include/module_common_types.h"
namespace webrtc {
diff --git a/webrtc/modules/audio_processing/audio_buffer.cc b/webrtc/modules/audio_processing/audio_buffer.cc
index 81790a159b..ff64267e8c 100644
--- a/webrtc/modules/audio_processing/audio_buffer.cc
+++ b/webrtc/modules/audio_processing/audio_buffer.cc
@@ -26,7 +26,7 @@ const size_t kSamplesPer48kHzChannel = 480;
int KeyboardChannelIndex(const StreamConfig& stream_config) {
if (!stream_config.has_keyboard()) {
assert(false);
- return -1;
+ return 0;
}
return stream_config.num_channels();
@@ -44,9 +44,9 @@ size_t NumBandsFromSamplesPerChannel(size_t num_frames) {
} // namespace
AudioBuffer::AudioBuffer(size_t input_num_frames,
- int num_input_channels,
+ size_t num_input_channels,
size_t process_num_frames,
- int num_process_channels,
+ size_t num_process_channels,
size_t output_num_frames)
: input_num_frames_(input_num_frames),
num_input_channels_(num_input_channels),
@@ -74,7 +74,7 @@ AudioBuffer::AudioBuffer(size_t input_num_frames,
num_proc_channels_));
if (input_num_frames_ != proc_num_frames_) {
- for (int i = 0; i < num_proc_channels_; ++i) {
+ for (size_t i = 0; i < num_proc_channels_; ++i) {
input_resamplers_.push_back(
new PushSincResampler(input_num_frames_,
proc_num_frames_));
@@ -82,7 +82,7 @@ AudioBuffer::AudioBuffer(size_t input_num_frames,
}
if (output_num_frames_ != proc_num_frames_) {
- for (int i = 0; i < num_proc_channels_; ++i) {
+ for (size_t i = 0; i < num_proc_channels_; ++i) {
output_resamplers_.push_back(
new PushSincResampler(proc_num_frames_,
output_num_frames_));
@@ -130,7 +130,7 @@ void AudioBuffer::CopyFrom(const float* const* data,
// Resample.
if (input_num_frames_ != proc_num_frames_) {
- for (int i = 0; i < num_proc_channels_; ++i) {
+ for (size_t i = 0; i < num_proc_channels_; ++i) {
input_resamplers_[i]->Resample(data_ptr[i],
input_num_frames_,
process_buffer_->channels()[i],
@@ -140,7 +140,7 @@ void AudioBuffer::CopyFrom(const float* const* data,
}
// Convert to the S16 range.
- for (int i = 0; i < num_proc_channels_; ++i) {
+ for (size_t i = 0; i < num_proc_channels_; ++i) {
FloatToFloatS16(data_ptr[i],
proc_num_frames_,
data_->fbuf()->channels()[i]);
@@ -150,7 +150,7 @@ void AudioBuffer::CopyFrom(const float* const* data,
void AudioBuffer::CopyTo(const StreamConfig& stream_config,
float* const* data) {
assert(stream_config.num_frames() == output_num_frames_);
- assert(stream_config.num_channels() == num_channels_);
+ assert(stream_config.num_channels() == num_channels_ || num_channels_ == 1);
// Convert to the float range.
float* const* data_ptr = data;
@@ -158,7 +158,7 @@ void AudioBuffer::CopyTo(const StreamConfig& stream_config,
// Convert to an intermediate buffer for subsequent resampling.
data_ptr = process_buffer_->channels();
}
- for (int i = 0; i < num_channels_; ++i) {
+ for (size_t i = 0; i < num_channels_; ++i) {
FloatS16ToFloat(data_->fbuf()->channels()[i],
proc_num_frames_,
data_ptr[i]);
@@ -166,13 +166,18 @@ void AudioBuffer::CopyTo(const StreamConfig& stream_config,
// Resample.
if (output_num_frames_ != proc_num_frames_) {
- for (int i = 0; i < num_channels_; ++i) {
+ for (size_t i = 0; i < num_channels_; ++i) {
output_resamplers_[i]->Resample(data_ptr[i],
proc_num_frames_,
data[i],
output_num_frames_);
}
}
+
+ // Upmix.
+ for (size_t i = num_channels_; i < stream_config.num_channels(); ++i) {
+ memcpy(data[i], data[0], output_num_frames_ * sizeof(**data));
+ }
}
void AudioBuffer::InitForNewData() {
@@ -192,13 +197,13 @@ int16_t* const* AudioBuffer::channels() {
return data_->ibuf()->channels();
}
-const int16_t* const* AudioBuffer::split_bands_const(int channel) const {
+const int16_t* const* AudioBuffer::split_bands_const(size_t channel) const {
return split_data_.get() ?
split_data_->ibuf_const()->bands(channel) :
data_->ibuf_const()->bands(channel);
}
-int16_t* const* AudioBuffer::split_bands(int channel) {
+int16_t* const* AudioBuffer::split_bands(size_t channel) {
mixed_low_pass_valid_ = false;
return split_data_.get() ?
split_data_->ibuf()->bands(channel) :
@@ -249,13 +254,13 @@ float* const* AudioBuffer::channels_f() {
return data_->fbuf()->channels();
}
-const float* const* AudioBuffer::split_bands_const_f(int channel) const {
+const float* const* AudioBuffer::split_bands_const_f(size_t channel) const {
return split_data_.get() ?
split_data_->fbuf_const()->bands(channel) :
data_->fbuf_const()->bands(channel);
}
-float* const* AudioBuffer::split_bands_f(int channel) {
+float* const* AudioBuffer::split_bands_f(size_t channel) {
mixed_low_pass_valid_ = false;
return split_data_.get() ?
split_data_->fbuf()->bands(channel) :
@@ -336,11 +341,11 @@ AudioFrame::VADActivity AudioBuffer::activity() const {
return activity_;
}
-int AudioBuffer::num_channels() const {
+size_t AudioBuffer::num_channels() const {
return num_channels_;
}
-void AudioBuffer::set_num_channels(int num_channels) {
+void AudioBuffer::set_num_channels(size_t num_channels) {
num_channels_ = num_channels;
}
@@ -393,7 +398,7 @@ void AudioBuffer::DeinterleaveFrom(AudioFrame* frame) {
// Resample.
if (input_num_frames_ != proc_num_frames_) {
- for (int i = 0; i < num_proc_channels_; ++i) {
+ for (size_t i = 0; i < num_proc_channels_; ++i) {
input_resamplers_[i]->Resample(input_buffer_->fbuf_const()->channels()[i],
input_num_frames_,
data_->fbuf()->channels()[i],
@@ -418,7 +423,7 @@ void AudioBuffer::InterleaveTo(AudioFrame* frame, bool data_changed) {
output_buffer_.reset(
new IFChannelBuffer(output_num_frames_, num_channels_));
}
- for (int i = 0; i < num_channels_; ++i) {
+ for (size_t i = 0; i < num_channels_; ++i) {
output_resamplers_[i]->Resample(
data_->fbuf()->channels()[i], proc_num_frames_,
output_buffer_->fbuf()->channels()[i], output_num_frames_);
@@ -443,7 +448,7 @@ void AudioBuffer::CopyLowPassToReference() {
new ChannelBuffer<int16_t>(num_split_frames_,
num_proc_channels_));
}
- for (int i = 0; i < num_proc_channels_; i++) {
+ for (size_t i = 0; i < num_proc_channels_; i++) {
memcpy(low_pass_reference_channels_->channels()[i],
split_bands_const(i)[kBand0To8kHz],
low_pass_reference_channels_->num_frames_per_band() *
diff --git a/webrtc/modules/audio_processing/audio_buffer.h b/webrtc/modules/audio_processing/audio_buffer.h
index 864633f267..ff12ca2d95 100644
--- a/webrtc/modules/audio_processing/audio_buffer.h
+++ b/webrtc/modules/audio_processing/audio_buffer.h
@@ -15,7 +15,7 @@
#include "webrtc/common_audio/channel_buffer.h"
#include "webrtc/modules/audio_processing/include/audio_processing.h"
#include "webrtc/modules/audio_processing/splitting_filter.h"
-#include "webrtc/modules/interface/module_common_types.h"
+#include "webrtc/modules/include/module_common_types.h"
#include "webrtc/system_wrappers/include/scoped_vector.h"
#include "webrtc/typedefs.h"
@@ -34,14 +34,14 @@ class AudioBuffer {
public:
// TODO(ajm): Switch to take ChannelLayouts.
AudioBuffer(size_t input_num_frames,
- int num_input_channels,
+ size_t num_input_channels,
size_t process_num_frames,
- int num_process_channels,
+ size_t num_process_channels,
size_t output_num_frames);
virtual ~AudioBuffer();
- int num_channels() const;
- void set_num_channels(int num_channels);
+ size_t num_channels() const;
+ void set_num_channels(size_t num_channels);
size_t num_frames() const;
size_t num_frames_per_band() const;
size_t num_keyboard_frames() const;
@@ -65,10 +65,10 @@ class AudioBuffer {
// 0 <= channel < |num_proc_channels_|
// 0 <= band < |num_bands_|
// 0 <= sample < |num_split_frames_|
- int16_t* const* split_bands(int channel);
- const int16_t* const* split_bands_const(int channel) const;
- float* const* split_bands_f(int channel);
- const float* const* split_bands_const_f(int channel) const;
+ int16_t* const* split_bands(size_t channel);
+ const int16_t* const* split_bands_const(size_t channel) const;
+ float* const* split_bands_f(size_t channel);
+ const float* const* split_bands_const_f(size_t channel) const;
// Returns a pointer array to the channels for a specific band.
// Usage:
@@ -128,16 +128,16 @@ class AudioBuffer {
// The audio is passed into DeinterleaveFrom() or CopyFrom() with input
// format (samples per channel and number of channels).
const size_t input_num_frames_;
- const int num_input_channels_;
+ const size_t num_input_channels_;
// The audio is stored by DeinterleaveFrom() or CopyFrom() with processing
// format.
const size_t proc_num_frames_;
- const int num_proc_channels_;
+ const size_t num_proc_channels_;
// The audio is returned by InterleaveTo() and CopyTo() with output samples
// per channels and the current number of channels. This last one can be
// changed at any time using set_num_channels().
const size_t output_num_frames_;
- int num_channels_;
+ size_t num_channels_;
size_t num_bands_;
size_t num_split_frames_;
diff --git a/webrtc/modules/audio_processing/audio_processing.gypi b/webrtc/modules/audio_processing/audio_processing.gypi
index 8f1fbdf0be..7ddd4f5a15 100644
--- a/webrtc/modules/audio_processing/audio_processing.gypi
+++ b/webrtc/modules/audio_processing/audio_processing.gypi
@@ -41,11 +41,11 @@
'aec/aec_resampler.h',
'aec/echo_cancellation.c',
'aec/echo_cancellation_internal.h',
- 'aec/include/echo_cancellation.h',
+ 'aec/echo_cancellation.h',
'aecm/aecm_core.c',
'aecm/aecm_core.h',
'aecm/echo_control_mobile.c',
- 'aecm/include/echo_control_mobile.h',
+ 'aecm/echo_control_mobile.h',
'agc/agc.cc',
'agc/agc.h',
'agc/agc_manager_direct.cc',
@@ -162,7 +162,7 @@
['prefer_fixed_point==1', {
'defines': ['WEBRTC_NS_FIXED'],
'sources': [
- 'ns/include/noise_suppression_x.h',
+ 'ns/noise_suppression_x.h',
'ns/noise_suppression_x.c',
'ns/nsx_core.c',
'ns/nsx_core.h',
@@ -183,7 +183,7 @@
'defines': ['WEBRTC_NS_FLOAT'],
'sources': [
'ns/defines.h',
- 'ns/include/noise_suppression.h',
+ 'ns/noise_suppression.h',
'ns/noise_suppression.c',
'ns/ns_core.c',
'ns/ns_core.h',
diff --git a/webrtc/modules/audio_processing/audio_processing_impl.cc b/webrtc/modules/audio_processing/audio_processing_impl.cc
index c6574151d0..744309c774 100644
--- a/webrtc/modules/audio_processing/audio_processing_impl.cc
+++ b/webrtc/modules/audio_processing/audio_processing_impl.cc
@@ -15,6 +15,7 @@
#include "webrtc/base/checks.h"
#include "webrtc/base/platform_file.h"
+#include "webrtc/base/trace_event.h"
#include "webrtc/common_audio/audio_converter.h"
#include "webrtc/common_audio/channel_buffer.h"
#include "webrtc/common_audio/include/audio_util.h"
@@ -36,8 +37,7 @@ extern "C" {
#include "webrtc/modules/audio_processing/processing_component.h"
#include "webrtc/modules/audio_processing/transient/transient_suppressor.h"
#include "webrtc/modules/audio_processing/voice_detection_impl.h"
-#include "webrtc/modules/interface/module_common_types.h"
-#include "webrtc/system_wrappers/include/critical_section_wrapper.h"
+#include "webrtc/modules/include/module_common_types.h"
#include "webrtc/system_wrappers/include/file_wrapper.h"
#include "webrtc/system_wrappers/include/logging.h"
#include "webrtc/system_wrappers/include/metrics.h"
@@ -75,7 +75,6 @@ static bool LayoutHasKeyboard(AudioProcessing::ChannelLayout layout) {
assert(false);
return false;
}
-
} // namespace
// Throughout webrtc, it's assumed that success is represented by zero.
@@ -147,6 +146,35 @@ class GainControlForNewAgc : public GainControl, public VolumeCallbacks {
int volume_;
};
+struct AudioProcessingImpl::ApmPublicSubmodules {
+ ApmPublicSubmodules()
+ : echo_cancellation(nullptr),
+ echo_control_mobile(nullptr),
+ gain_control(nullptr) {}
+ // Accessed externally of APM without any lock acquired.
+ EchoCancellationImpl* echo_cancellation;
+ EchoControlMobileImpl* echo_control_mobile;
+ GainControlImpl* gain_control;
+ rtc::scoped_ptr<HighPassFilterImpl> high_pass_filter;
+ rtc::scoped_ptr<LevelEstimatorImpl> level_estimator;
+ rtc::scoped_ptr<NoiseSuppressionImpl> noise_suppression;
+ rtc::scoped_ptr<VoiceDetectionImpl> voice_detection;
+ rtc::scoped_ptr<GainControlForNewAgc> gain_control_for_new_agc;
+
+ // Accessed internally from both render and capture.
+ rtc::scoped_ptr<TransientSuppressor> transient_suppressor;
+ rtc::scoped_ptr<IntelligibilityEnhancer> intelligibility_enhancer;
+};
+
+struct AudioProcessingImpl::ApmPrivateSubmodules {
+ explicit ApmPrivateSubmodules(Beamformer<float>* beamformer)
+ : beamformer(beamformer) {}
+ // Accessed internally from capture or during initialization
+ std::list<ProcessingComponent*> component_list;
+ rtc::scoped_ptr<Beamformer<float>> beamformer;
+ rtc::scoped_ptr<AgcManagerDirect> agc_manager;
+};
+
const int AudioProcessing::kNativeSampleRatesHz[] = {
AudioProcessing::kSampleRate8kHz,
AudioProcessing::kSampleRate16kHz,
@@ -172,7 +200,7 @@ AudioProcessing* AudioProcessing::Create(const Config& config,
AudioProcessingImpl* apm = new AudioProcessingImpl(config, beamformer);
if (apm->Initialize() != kNoError) {
delete apm;
- apm = NULL;
+ apm = nullptr;
}
return apm;
@@ -183,102 +211,82 @@ AudioProcessingImpl::AudioProcessingImpl(const Config& config)
AudioProcessingImpl::AudioProcessingImpl(const Config& config,
Beamformer<float>* beamformer)
- : echo_cancellation_(NULL),
- echo_control_mobile_(NULL),
- gain_control_(NULL),
- high_pass_filter_(NULL),
- level_estimator_(NULL),
- noise_suppression_(NULL),
- voice_detection_(NULL),
- crit_(CriticalSectionWrapper::CreateCriticalSection()),
-#ifdef WEBRTC_AUDIOPROC_DEBUG_DUMP
- debug_file_(FileWrapper::Create()),
- event_msg_(new audioproc::Event()),
-#endif
- api_format_({{{kSampleRate16kHz, 1, false},
- {kSampleRate16kHz, 1, false},
- {kSampleRate16kHz, 1, false},
- {kSampleRate16kHz, 1, false}}}),
- fwd_proc_format_(kSampleRate16kHz),
- rev_proc_format_(kSampleRate16kHz, 1),
- split_rate_(kSampleRate16kHz),
- stream_delay_ms_(0),
- delay_offset_ms_(0),
- was_stream_delay_set_(false),
- last_stream_delay_ms_(0),
- last_aec_system_delay_ms_(0),
- stream_delay_jumps_(-1),
- aec_system_delay_jumps_(-1),
- output_will_be_muted_(false),
- key_pressed_(false),
+ : public_submodules_(new ApmPublicSubmodules()),
+ private_submodules_(new ApmPrivateSubmodules(beamformer)),
+ constants_(config.Get<ExperimentalAgc>().startup_min_volume,
#if defined(WEBRTC_ANDROID) || defined(WEBRTC_IOS)
- use_new_agc_(false),
+ false,
#else
- use_new_agc_(config.Get<ExperimentalAgc>().enabled),
+ config.Get<ExperimentalAgc>().enabled,
#endif
- agc_startup_min_volume_(config.Get<ExperimentalAgc>().startup_min_volume),
+ config.Get<Intelligibility>().enabled),
+
#if defined(WEBRTC_ANDROID) || defined(WEBRTC_IOS)
- transient_suppressor_enabled_(false),
+ capture_(false,
#else
- transient_suppressor_enabled_(config.Get<ExperimentalNs>().enabled),
+ capture_(config.Get<ExperimentalNs>().enabled,
#endif
- beamformer_enabled_(config.Get<Beamforming>().enabled),
- beamformer_(beamformer),
- array_geometry_(config.Get<Beamforming>().array_geometry),
- target_direction_(config.Get<Beamforming>().target_direction),
- intelligibility_enabled_(config.Get<Intelligibility>().enabled) {
- echo_cancellation_ = new EchoCancellationImpl(this, crit_);
- component_list_.push_back(echo_cancellation_);
-
- echo_control_mobile_ = new EchoControlMobileImpl(this, crit_);
- component_list_.push_back(echo_control_mobile_);
-
- gain_control_ = new GainControlImpl(this, crit_);
- component_list_.push_back(gain_control_);
-
- high_pass_filter_ = new HighPassFilterImpl(this, crit_);
- component_list_.push_back(high_pass_filter_);
-
- level_estimator_ = new LevelEstimatorImpl(this, crit_);
- component_list_.push_back(level_estimator_);
-
- noise_suppression_ = new NoiseSuppressionImpl(this, crit_);
- component_list_.push_back(noise_suppression_);
-
- voice_detection_ = new VoiceDetectionImpl(this, crit_);
- component_list_.push_back(voice_detection_);
-
- gain_control_for_new_agc_.reset(new GainControlForNewAgc(gain_control_));
+ config.Get<Beamforming>().array_geometry,
+ config.Get<Beamforming>().target_direction),
+ capture_nonlocked_(config.Get<Beamforming>().enabled)
+{
+ {
+ rtc::CritScope cs_render(&crit_render_);
+ rtc::CritScope cs_capture(&crit_capture_);
+
+ public_submodules_->echo_cancellation =
+ new EchoCancellationImpl(this, &crit_render_, &crit_capture_);
+ public_submodules_->echo_control_mobile =
+ new EchoControlMobileImpl(this, &crit_render_, &crit_capture_);
+ public_submodules_->gain_control =
+ new GainControlImpl(this, &crit_capture_, &crit_capture_);
+ public_submodules_->high_pass_filter.reset(
+ new HighPassFilterImpl(&crit_capture_));
+ public_submodules_->level_estimator.reset(
+ new LevelEstimatorImpl(&crit_capture_));
+ public_submodules_->noise_suppression.reset(
+ new NoiseSuppressionImpl(&crit_capture_));
+ public_submodules_->voice_detection.reset(
+ new VoiceDetectionImpl(&crit_capture_));
+ public_submodules_->gain_control_for_new_agc.reset(
+ new GainControlForNewAgc(public_submodules_->gain_control));
+
+ private_submodules_->component_list.push_back(
+ public_submodules_->echo_cancellation);
+ private_submodules_->component_list.push_back(
+ public_submodules_->echo_control_mobile);
+ private_submodules_->component_list.push_back(
+ public_submodules_->gain_control);
+ }
SetExtraOptions(config);
}
AudioProcessingImpl::~AudioProcessingImpl() {
- {
- CriticalSectionScoped crit_scoped(crit_);
- // Depends on gain_control_ and gain_control_for_new_agc_.
- agc_manager_.reset();
- // Depends on gain_control_.
- gain_control_for_new_agc_.reset();
- while (!component_list_.empty()) {
- ProcessingComponent* component = component_list_.front();
- component->Destroy();
- delete component;
- component_list_.pop_front();
- }
+ // Depends on gain_control_ and
+ // public_submodules_->gain_control_for_new_agc.
+ private_submodules_->agc_manager.reset();
+ // Depends on gain_control_.
+ public_submodules_->gain_control_for_new_agc.reset();
+ while (!private_submodules_->component_list.empty()) {
+ ProcessingComponent* component =
+ private_submodules_->component_list.front();
+ component->Destroy();
+ delete component;
+ private_submodules_->component_list.pop_front();
+ }
#ifdef WEBRTC_AUDIOPROC_DEBUG_DUMP
- if (debug_file_->Open()) {
- debug_file_->CloseFile();
- }
-#endif
+ if (debug_dump_.debug_file->Open()) {
+ debug_dump_.debug_file->CloseFile();
}
- delete crit_;
- crit_ = NULL;
+#endif
}
int AudioProcessingImpl::Initialize() {
- CriticalSectionScoped crit_scoped(crit_);
+ // Run in a single-threaded manner during initialization.
+ rtc::CritScope cs_render(&crit_render_);
+ rtc::CritScope cs_capture(&crit_capture_);
return InitializeLocked();
}
@@ -306,44 +314,73 @@ int AudioProcessingImpl::Initialize(int input_sample_rate_hz,
}
int AudioProcessingImpl::Initialize(const ProcessingConfig& processing_config) {
- CriticalSectionScoped crit_scoped(crit_);
+ // Run in a single-threaded manner during initialization.
+ rtc::CritScope cs_render(&crit_render_);
+ rtc::CritScope cs_capture(&crit_capture_);
+ return InitializeLocked(processing_config);
+}
+
+int AudioProcessingImpl::MaybeInitializeRender(
+ const ProcessingConfig& processing_config) {
+ return MaybeInitialize(processing_config);
+}
+
+int AudioProcessingImpl::MaybeInitializeCapture(
+ const ProcessingConfig& processing_config) {
+ return MaybeInitialize(processing_config);
+}
+
+// Calls InitializeLocked() if any of the audio parameters have changed from
+// their current values (needs to be called while holding the crit_render_lock).
+int AudioProcessingImpl::MaybeInitialize(
+ const ProcessingConfig& processing_config) {
+ // Called from both threads. Thread check is therefore not possible.
+ if (processing_config == formats_.api_format) {
+ return kNoError;
+ }
+
+ rtc::CritScope cs_capture(&crit_capture_);
return InitializeLocked(processing_config);
}
int AudioProcessingImpl::InitializeLocked() {
const int fwd_audio_buffer_channels =
- beamformer_enabled_ ? api_format_.input_stream().num_channels()
- : api_format_.output_stream().num_channels();
+ capture_nonlocked_.beamformer_enabled
+ ? formats_.api_format.input_stream().num_channels()
+ : formats_.api_format.output_stream().num_channels();
const int rev_audio_buffer_out_num_frames =
- api_format_.reverse_output_stream().num_frames() == 0
- ? rev_proc_format_.num_frames()
- : api_format_.reverse_output_stream().num_frames();
- if (api_format_.reverse_input_stream().num_channels() > 0) {
- render_audio_.reset(new AudioBuffer(
- api_format_.reverse_input_stream().num_frames(),
- api_format_.reverse_input_stream().num_channels(),
- rev_proc_format_.num_frames(), rev_proc_format_.num_channels(),
+ formats_.api_format.reverse_output_stream().num_frames() == 0
+ ? formats_.rev_proc_format.num_frames()
+ : formats_.api_format.reverse_output_stream().num_frames();
+ if (formats_.api_format.reverse_input_stream().num_channels() > 0) {
+ render_.render_audio.reset(new AudioBuffer(
+ formats_.api_format.reverse_input_stream().num_frames(),
+ formats_.api_format.reverse_input_stream().num_channels(),
+ formats_.rev_proc_format.num_frames(),
+ formats_.rev_proc_format.num_channels(),
rev_audio_buffer_out_num_frames));
if (rev_conversion_needed()) {
- render_converter_ = AudioConverter::Create(
- api_format_.reverse_input_stream().num_channels(),
- api_format_.reverse_input_stream().num_frames(),
- api_format_.reverse_output_stream().num_channels(),
- api_format_.reverse_output_stream().num_frames());
+ render_.render_converter = AudioConverter::Create(
+ formats_.api_format.reverse_input_stream().num_channels(),
+ formats_.api_format.reverse_input_stream().num_frames(),
+ formats_.api_format.reverse_output_stream().num_channels(),
+ formats_.api_format.reverse_output_stream().num_frames());
} else {
- render_converter_.reset(nullptr);
+ render_.render_converter.reset(nullptr);
}
} else {
- render_audio_.reset(nullptr);
- render_converter_.reset(nullptr);
+ render_.render_audio.reset(nullptr);
+ render_.render_converter.reset(nullptr);
}
- capture_audio_.reset(new AudioBuffer(
- api_format_.input_stream().num_frames(),
- api_format_.input_stream().num_channels(), fwd_proc_format_.num_frames(),
- fwd_audio_buffer_channels, api_format_.output_stream().num_frames()));
+ capture_.capture_audio.reset(
+ new AudioBuffer(formats_.api_format.input_stream().num_frames(),
+ formats_.api_format.input_stream().num_channels(),
+ capture_nonlocked_.fwd_proc_format.num_frames(),
+ fwd_audio_buffer_channels,
+ formats_.api_format.output_stream().num_frames()));
// Initialize all components.
- for (auto item : component_list_) {
+ for (auto item : private_submodules_->component_list) {
int err = item->Initialize();
if (err != kNoError) {
return err;
@@ -351,15 +388,16 @@ int AudioProcessingImpl::InitializeLocked() {
}
InitializeExperimentalAgc();
-
InitializeTransient();
-
InitializeBeamformer();
-
InitializeIntelligibility();
+ InitializeHighPassFilter();
+ InitializeNoiseSuppression();
+ InitializeLevelEstimator();
+ InitializeVoiceDetection();
#ifdef WEBRTC_AUDIOPROC_DEBUG_DUMP
- if (debug_file_->Open()) {
+ if (debug_dump_.debug_file->Open()) {
int err = WriteInitMessage();
if (err != kNoError) {
return err;
@@ -372,16 +410,13 @@ int AudioProcessingImpl::InitializeLocked() {
int AudioProcessingImpl::InitializeLocked(const ProcessingConfig& config) {
for (const auto& stream : config.streams) {
- if (stream.num_channels() < 0) {
- return kBadNumberChannelsError;
- }
if (stream.num_channels() > 0 && stream.sample_rate_hz() <= 0) {
return kBadSampleRateError;
}
}
- const int num_in_channels = config.input_stream().num_channels();
- const int num_out_channels = config.output_stream().num_channels();
+ const size_t num_in_channels = config.input_stream().num_channels();
+ const size_t num_out_channels = config.output_stream().num_channels();
// Need at least one input channel.
// Need either one output channel or as many outputs as there are inputs.
@@ -390,18 +425,17 @@ int AudioProcessingImpl::InitializeLocked(const ProcessingConfig& config) {
return kBadNumberChannelsError;
}
- if (beamformer_enabled_ &&
- (static_cast<size_t>(num_in_channels) != array_geometry_.size() ||
- num_out_channels > 1)) {
+ if (capture_nonlocked_.beamformer_enabled &&
+ num_in_channels != capture_.array_geometry.size()) {
return kBadNumberChannelsError;
}
- api_format_ = config;
+ formats_.api_format = config;
// We process at the closest native rate >= min(input rate, output rate)...
const int min_proc_rate =
- std::min(api_format_.input_stream().sample_rate_hz(),
- api_format_.output_stream().sample_rate_hz());
+ std::min(formats_.api_format.input_stream().sample_rate_hz(),
+ formats_.api_format.output_stream().sample_rate_hz());
int fwd_proc_rate;
for (size_t i = 0; i < kNumNativeSampleRates; ++i) {
fwd_proc_rate = kNativeSampleRatesHz[i];
@@ -410,20 +444,20 @@ int AudioProcessingImpl::InitializeLocked(const ProcessingConfig& config) {
}
}
// ...with one exception.
- if (echo_control_mobile_->is_enabled() &&
+ if (public_submodules_->echo_control_mobile->is_enabled() &&
min_proc_rate > kMaxAECMSampleRateHz) {
fwd_proc_rate = kMaxAECMSampleRateHz;
}
- fwd_proc_format_ = StreamConfig(fwd_proc_rate);
+ capture_nonlocked_.fwd_proc_format = StreamConfig(fwd_proc_rate);
// We normally process the reverse stream at 16 kHz. Unless...
int rev_proc_rate = kSampleRate16kHz;
- if (fwd_proc_format_.sample_rate_hz() == kSampleRate8kHz) {
+ if (capture_nonlocked_.fwd_proc_format.sample_rate_hz() == kSampleRate8kHz) {
// ...the forward stream is at 8 kHz.
rev_proc_rate = kSampleRate8kHz;
} else {
- if (api_format_.reverse_input_stream().sample_rate_hz() ==
+ if (formats_.api_format.reverse_input_stream().sample_rate_hz() ==
kSampleRate32kHz) {
// ...or the input is at 32 kHz, in which case we use the splitting
// filter rather than the resampler.
@@ -433,66 +467,89 @@ int AudioProcessingImpl::InitializeLocked(const ProcessingConfig& config) {
// Always downmix the reverse stream to mono for analysis. This has been
// demonstrated to work well for AEC in most practical scenarios.
- rev_proc_format_ = StreamConfig(rev_proc_rate, 1);
+ formats_.rev_proc_format = StreamConfig(rev_proc_rate, 1);
- if (fwd_proc_format_.sample_rate_hz() == kSampleRate32kHz ||
- fwd_proc_format_.sample_rate_hz() == kSampleRate48kHz) {
- split_rate_ = kSampleRate16kHz;
+ if (capture_nonlocked_.fwd_proc_format.sample_rate_hz() == kSampleRate32kHz ||
+ capture_nonlocked_.fwd_proc_format.sample_rate_hz() == kSampleRate48kHz) {
+ capture_nonlocked_.split_rate = kSampleRate16kHz;
} else {
- split_rate_ = fwd_proc_format_.sample_rate_hz();
+ capture_nonlocked_.split_rate =
+ capture_nonlocked_.fwd_proc_format.sample_rate_hz();
}
return InitializeLocked();
}
-// Calls InitializeLocked() if any of the audio parameters have changed from
-// their current values.
-int AudioProcessingImpl::MaybeInitializeLocked(
- const ProcessingConfig& processing_config) {
- if (processing_config == api_format_) {
- return kNoError;
- }
- return InitializeLocked(processing_config);
-}
-
void AudioProcessingImpl::SetExtraOptions(const Config& config) {
- CriticalSectionScoped crit_scoped(crit_);
- for (auto item : component_list_) {
+ // Run in a single-threaded manner when setting the extra options.
+ rtc::CritScope cs_render(&crit_render_);
+ rtc::CritScope cs_capture(&crit_capture_);
+ for (auto item : private_submodules_->component_list) {
item->SetExtraOptions(config);
}
- if (transient_suppressor_enabled_ != config.Get<ExperimentalNs>().enabled) {
- transient_suppressor_enabled_ = config.Get<ExperimentalNs>().enabled;
+ if (capture_.transient_suppressor_enabled !=
+ config.Get<ExperimentalNs>().enabled) {
+ capture_.transient_suppressor_enabled =
+ config.Get<ExperimentalNs>().enabled;
InitializeTransient();
}
+
+#ifdef WEBRTC_ANDROID_PLATFORM_BUILD
+ if (capture_nonlocked_.beamformer_enabled !=
+ config.Get<Beamforming>().enabled) {
+ capture_nonlocked_.beamformer_enabled = config.Get<Beamforming>().enabled;
+ if (config.Get<Beamforming>().array_geometry.size() > 1) {
+ capture_.array_geometry = config.Get<Beamforming>().array_geometry;
+ }
+ capture_.target_direction = config.Get<Beamforming>().target_direction;
+ InitializeBeamformer();
+ }
+#endif // WEBRTC_ANDROID_PLATFORM_BUILD
}
+int AudioProcessingImpl::input_sample_rate_hz() const {
+ // Accessed from outside APM, hence a lock is needed.
+ rtc::CritScope cs(&crit_capture_);
+ return formats_.api_format.input_stream().sample_rate_hz();
+}
int AudioProcessingImpl::proc_sample_rate_hz() const {
- return fwd_proc_format_.sample_rate_hz();
+ // Used as callback from submodules, hence locking is not allowed.
+ return capture_nonlocked_.fwd_proc_format.sample_rate_hz();
}
int AudioProcessingImpl::proc_split_sample_rate_hz() const {
- return split_rate_;
+ // Used as callback from submodules, hence locking is not allowed.
+ return capture_nonlocked_.split_rate;
+}
+
+size_t AudioProcessingImpl::num_reverse_channels() const {
+ // Used as callback from submodules, hence locking is not allowed.
+ return formats_.rev_proc_format.num_channels();
}
-int AudioProcessingImpl::num_reverse_channels() const {
- return rev_proc_format_.num_channels();
+size_t AudioProcessingImpl::num_input_channels() const {
+ // Used as callback from submodules, hence locking is not allowed.
+ return formats_.api_format.input_stream().num_channels();
}
-int AudioProcessingImpl::num_input_channels() const {
- return api_format_.input_stream().num_channels();
+size_t AudioProcessingImpl::num_proc_channels() const {
+ // Used as callback from submodules, hence locking is not allowed.
+ return capture_nonlocked_.beamformer_enabled ? 1 : num_output_channels();
}
-int AudioProcessingImpl::num_output_channels() const {
- return api_format_.output_stream().num_channels();
+size_t AudioProcessingImpl::num_output_channels() const {
+ // Used as callback from submodules, hence locking is not allowed.
+ return formats_.api_format.output_stream().num_channels();
}
void AudioProcessingImpl::set_output_will_be_muted(bool muted) {
- CriticalSectionScoped lock(crit_);
- output_will_be_muted_ = muted;
- if (agc_manager_.get()) {
- agc_manager_->SetCaptureMuted(output_will_be_muted_);
+ rtc::CritScope cs(&crit_capture_);
+ capture_.output_will_be_muted = muted;
+ if (private_submodules_->agc_manager.get()) {
+ private_submodules_->agc_manager->SetCaptureMuted(
+ capture_.output_will_be_muted);
}
}
@@ -504,13 +561,21 @@ int AudioProcessingImpl::ProcessStream(const float* const* src,
int output_sample_rate_hz,
ChannelLayout output_layout,
float* const* dest) {
- CriticalSectionScoped crit_scoped(crit_);
- StreamConfig input_stream = api_format_.input_stream();
+ TRACE_EVENT0("webrtc", "AudioProcessing::ProcessStream_ChannelLayout");
+ StreamConfig input_stream;
+ StreamConfig output_stream;
+ {
+ // Access the formats_.api_format.input_stream beneath the capture lock.
+ // The lock must be released as it is later required in the call
+ // to ProcessStream(,,,);
+ rtc::CritScope cs(&crit_capture_);
+ input_stream = formats_.api_format.input_stream();
+ output_stream = formats_.api_format.output_stream();
+ }
+
input_stream.set_sample_rate_hz(input_sample_rate_hz);
input_stream.set_num_channels(ChannelsFromLayout(input_layout));
input_stream.set_has_keyboard(LayoutHasKeyboard(input_layout));
-
- StreamConfig output_stream = api_format_.output_stream();
output_stream.set_sample_rate_hz(output_sample_rate_hz);
output_stream.set_num_channels(ChannelsFromLayout(output_layout));
output_stream.set_has_keyboard(LayoutHasKeyboard(output_layout));
@@ -525,44 +590,64 @@ int AudioProcessingImpl::ProcessStream(const float* const* src,
const StreamConfig& input_config,
const StreamConfig& output_config,
float* const* dest) {
- CriticalSectionScoped crit_scoped(crit_);
- if (!src || !dest) {
- return kNullPointerError;
+ TRACE_EVENT0("webrtc", "AudioProcessing::ProcessStream_StreamConfig");
+ ProcessingConfig processing_config;
+ {
+ // Acquire the capture lock in order to safely call the function
+ // that retrieves the render side data. This function accesses apm
+ // getters that need the capture lock held when being called.
+ rtc::CritScope cs_capture(&crit_capture_);
+ public_submodules_->echo_cancellation->ReadQueuedRenderData();
+ public_submodules_->echo_control_mobile->ReadQueuedRenderData();
+ public_submodules_->gain_control->ReadQueuedRenderData();
+
+ if (!src || !dest) {
+ return kNullPointerError;
+ }
+
+ processing_config = formats_.api_format;
}
- ProcessingConfig processing_config = api_format_;
processing_config.input_stream() = input_config;
processing_config.output_stream() = output_config;
- RETURN_ON_ERR(MaybeInitializeLocked(processing_config));
+ {
+ // Do conditional reinitialization.
+ rtc::CritScope cs_render(&crit_render_);
+ RETURN_ON_ERR(MaybeInitializeCapture(processing_config));
+ }
+ rtc::CritScope cs_capture(&crit_capture_);
assert(processing_config.input_stream().num_frames() ==
- api_format_.input_stream().num_frames());
+ formats_.api_format.input_stream().num_frames());
#ifdef WEBRTC_AUDIOPROC_DEBUG_DUMP
- if (debug_file_->Open()) {
+ if (debug_dump_.debug_file->Open()) {
RETURN_ON_ERR(WriteConfigMessage(false));
- event_msg_->set_type(audioproc::Event::STREAM);
- audioproc::Stream* msg = event_msg_->mutable_stream();
+ debug_dump_.capture.event_msg->set_type(audioproc::Event::STREAM);
+ audioproc::Stream* msg = debug_dump_.capture.event_msg->mutable_stream();
const size_t channel_size =
- sizeof(float) * api_format_.input_stream().num_frames();
- for (int i = 0; i < api_format_.input_stream().num_channels(); ++i)
+ sizeof(float) * formats_.api_format.input_stream().num_frames();
+ for (size_t i = 0; i < formats_.api_format.input_stream().num_channels();
+ ++i)
msg->add_input_channel(src[i], channel_size);
}
#endif
- capture_audio_->CopyFrom(src, api_format_.input_stream());
+ capture_.capture_audio->CopyFrom(src, formats_.api_format.input_stream());
RETURN_ON_ERR(ProcessStreamLocked());
- capture_audio_->CopyTo(api_format_.output_stream(), dest);
+ capture_.capture_audio->CopyTo(formats_.api_format.output_stream(), dest);
#ifdef WEBRTC_AUDIOPROC_DEBUG_DUMP
- if (debug_file_->Open()) {
- audioproc::Stream* msg = event_msg_->mutable_stream();
+ if (debug_dump_.debug_file->Open()) {
+ audioproc::Stream* msg = debug_dump_.capture.event_msg->mutable_stream();
const size_t channel_size =
- sizeof(float) * api_format_.output_stream().num_frames();
- for (int i = 0; i < api_format_.output_stream().num_channels(); ++i)
+ sizeof(float) * formats_.api_format.output_stream().num_frames();
+ for (size_t i = 0; i < formats_.api_format.output_stream().num_channels();
+ ++i)
msg->add_output_channel(dest[i], channel_size);
- RETURN_ON_ERR(WriteMessageToDebugFile());
+ RETURN_ON_ERR(WriteMessageToDebugFile(debug_dump_.debug_file.get(),
+ &crit_debug_, &debug_dump_.capture));
}
#endif
@@ -570,7 +655,20 @@ int AudioProcessingImpl::ProcessStream(const float* const* src,
}
int AudioProcessingImpl::ProcessStream(AudioFrame* frame) {
- CriticalSectionScoped crit_scoped(crit_);
+ TRACE_EVENT0("webrtc", "AudioProcessing::ProcessStream_AudioFrame");
+ {
+ // Acquire the capture lock in order to safely call the function
+ // that retrieves the render side data. This function accesses apm
+ // getters that need the capture lock held when being called.
+ // The lock needs to be released as
+ // public_submodules_->echo_control_mobile->is_enabled() aquires this lock
+ // as well.
+ rtc::CritScope cs_capture(&crit_capture_);
+ public_submodules_->echo_cancellation->ReadQueuedRenderData();
+ public_submodules_->echo_control_mobile->ReadQueuedRenderData();
+ public_submodules_->gain_control->ReadQueuedRenderData();
+ }
+
if (!frame) {
return kNullPointerError;
}
@@ -581,46 +679,62 @@ int AudioProcessingImpl::ProcessStream(AudioFrame* frame) {
frame->sample_rate_hz_ != kSampleRate48kHz) {
return kBadSampleRateError;
}
- if (echo_control_mobile_->is_enabled() &&
+
+ if (public_submodules_->echo_control_mobile->is_enabled() &&
frame->sample_rate_hz_ > kMaxAECMSampleRateHz) {
LOG(LS_ERROR) << "AECM only supports 16 or 8 kHz sample rates";
return kUnsupportedComponentError;
}
- // TODO(ajm): The input and output rates and channels are currently
- // constrained to be identical in the int16 interface.
- ProcessingConfig processing_config = api_format_;
+ ProcessingConfig processing_config;
+ {
+ // Aquire lock for the access of api_format.
+ // The lock is released immediately due to the conditional
+ // reinitialization.
+ rtc::CritScope cs_capture(&crit_capture_);
+ // TODO(ajm): The input and output rates and channels are currently
+ // constrained to be identical in the int16 interface.
+ processing_config = formats_.api_format;
+ }
processing_config.input_stream().set_sample_rate_hz(frame->sample_rate_hz_);
processing_config.input_stream().set_num_channels(frame->num_channels_);
processing_config.output_stream().set_sample_rate_hz(frame->sample_rate_hz_);
processing_config.output_stream().set_num_channels(frame->num_channels_);
- RETURN_ON_ERR(MaybeInitializeLocked(processing_config));
- if (frame->samples_per_channel_ != api_format_.input_stream().num_frames()) {
+ {
+ // Do conditional reinitialization.
+ rtc::CritScope cs_render(&crit_render_);
+ RETURN_ON_ERR(MaybeInitializeCapture(processing_config));
+ }
+ rtc::CritScope cs_capture(&crit_capture_);
+ if (frame->samples_per_channel_ !=
+ formats_.api_format.input_stream().num_frames()) {
return kBadDataLengthError;
}
#ifdef WEBRTC_AUDIOPROC_DEBUG_DUMP
- if (debug_file_->Open()) {
- event_msg_->set_type(audioproc::Event::STREAM);
- audioproc::Stream* msg = event_msg_->mutable_stream();
+ if (debug_dump_.debug_file->Open()) {
+ debug_dump_.capture.event_msg->set_type(audioproc::Event::STREAM);
+ audioproc::Stream* msg = debug_dump_.capture.event_msg->mutable_stream();
const size_t data_size =
sizeof(int16_t) * frame->samples_per_channel_ * frame->num_channels_;
msg->set_input_data(frame->data_, data_size);
}
#endif
- capture_audio_->DeinterleaveFrom(frame);
+ capture_.capture_audio->DeinterleaveFrom(frame);
RETURN_ON_ERR(ProcessStreamLocked());
- capture_audio_->InterleaveTo(frame, output_copy_needed(is_data_processed()));
+ capture_.capture_audio->InterleaveTo(frame,
+ output_copy_needed(is_data_processed()));
#ifdef WEBRTC_AUDIOPROC_DEBUG_DUMP
- if (debug_file_->Open()) {
- audioproc::Stream* msg = event_msg_->mutable_stream();
+ if (debug_dump_.debug_file->Open()) {
+ audioproc::Stream* msg = debug_dump_.capture.event_msg->mutable_stream();
const size_t data_size =
sizeof(int16_t) * frame->samples_per_channel_ * frame->num_channels_;
msg->set_output_data(frame->data_, data_size);
- RETURN_ON_ERR(WriteMessageToDebugFile());
+ RETURN_ON_ERR(WriteMessageToDebugFile(debug_dump_.debug_file.get(),
+ &crit_debug_, &debug_dump_.capture));
}
#endif
@@ -629,22 +743,25 @@ int AudioProcessingImpl::ProcessStream(AudioFrame* frame) {
int AudioProcessingImpl::ProcessStreamLocked() {
#ifdef WEBRTC_AUDIOPROC_DEBUG_DUMP
- if (debug_file_->Open()) {
- audioproc::Stream* msg = event_msg_->mutable_stream();
- msg->set_delay(stream_delay_ms_);
- msg->set_drift(echo_cancellation_->stream_drift_samples());
+ if (debug_dump_.debug_file->Open()) {
+ audioproc::Stream* msg = debug_dump_.capture.event_msg->mutable_stream();
+ msg->set_delay(capture_nonlocked_.stream_delay_ms);
+ msg->set_drift(
+ public_submodules_->echo_cancellation->stream_drift_samples());
msg->set_level(gain_control()->stream_analog_level());
- msg->set_keypress(key_pressed_);
+ msg->set_keypress(capture_.key_pressed);
}
#endif
MaybeUpdateHistograms();
- AudioBuffer* ca = capture_audio_.get(); // For brevity.
+ AudioBuffer* ca = capture_.capture_audio.get(); // For brevity.
- if (use_new_agc_ && gain_control_->is_enabled()) {
- agc_manager_->AnalyzePreProcess(ca->channels()[0], ca->num_channels(),
- fwd_proc_format_.num_frames());
+ if (constants_.use_new_agc &&
+ public_submodules_->gain_control->is_enabled()) {
+ private_submodules_->agc_manager->AnalyzePreProcess(
+ ca->channels()[0], ca->num_channels(),
+ capture_nonlocked_.fwd_proc_format.num_frames());
}
bool data_processed = is_data_processed();
@@ -652,34 +769,41 @@ int AudioProcessingImpl::ProcessStreamLocked() {
ca->SplitIntoFrequencyBands();
}
- if (intelligibility_enabled_) {
- intelligibility_enhancer_->AnalyzeCaptureAudio(
- ca->split_channels_f(kBand0To8kHz), split_rate_, ca->num_channels());
+ if (constants_.intelligibility_enabled) {
+ public_submodules_->intelligibility_enhancer->AnalyzeCaptureAudio(
+ ca->split_channels_f(kBand0To8kHz), capture_nonlocked_.split_rate,
+ ca->num_channels());
}
- if (beamformer_enabled_) {
- beamformer_->ProcessChunk(*ca->split_data_f(), ca->split_data_f());
+ if (capture_nonlocked_.beamformer_enabled) {
+ private_submodules_->beamformer->ProcessChunk(*ca->split_data_f(),
+ ca->split_data_f());
ca->set_num_channels(1);
}
- RETURN_ON_ERR(high_pass_filter_->ProcessCaptureAudio(ca));
- RETURN_ON_ERR(gain_control_->AnalyzeCaptureAudio(ca));
- RETURN_ON_ERR(noise_suppression_->AnalyzeCaptureAudio(ca));
- RETURN_ON_ERR(echo_cancellation_->ProcessCaptureAudio(ca));
+ public_submodules_->high_pass_filter->ProcessCaptureAudio(ca);
+ RETURN_ON_ERR(public_submodules_->gain_control->AnalyzeCaptureAudio(ca));
+ public_submodules_->noise_suppression->AnalyzeCaptureAudio(ca);
+ RETURN_ON_ERR(public_submodules_->echo_cancellation->ProcessCaptureAudio(ca));
- if (echo_control_mobile_->is_enabled() && noise_suppression_->is_enabled()) {
+ if (public_submodules_->echo_control_mobile->is_enabled() &&
+ public_submodules_->noise_suppression->is_enabled()) {
ca->CopyLowPassToReference();
}
- RETURN_ON_ERR(noise_suppression_->ProcessCaptureAudio(ca));
- RETURN_ON_ERR(echo_control_mobile_->ProcessCaptureAudio(ca));
- RETURN_ON_ERR(voice_detection_->ProcessCaptureAudio(ca));
+ public_submodules_->noise_suppression->ProcessCaptureAudio(ca);
+ RETURN_ON_ERR(
+ public_submodules_->echo_control_mobile->ProcessCaptureAudio(ca));
+ public_submodules_->voice_detection->ProcessCaptureAudio(ca);
- if (use_new_agc_ && gain_control_->is_enabled() &&
- (!beamformer_enabled_ || beamformer_->is_target_present())) {
- agc_manager_->Process(ca->split_bands_const(0)[kBand0To8kHz],
- ca->num_frames_per_band(), split_rate_);
+ if (constants_.use_new_agc &&
+ public_submodules_->gain_control->is_enabled() &&
+ (!capture_nonlocked_.beamformer_enabled ||
+ private_submodules_->beamformer->is_target_present())) {
+ private_submodules_->agc_manager->Process(
+ ca->split_bands_const(0)[kBand0To8kHz], ca->num_frames_per_band(),
+ capture_nonlocked_.split_rate);
}
- RETURN_ON_ERR(gain_control_->ProcessCaptureAudio(ca));
+ RETURN_ON_ERR(public_submodules_->gain_control->ProcessCaptureAudio(ca));
if (synthesis_needed(data_processed)) {
ca->MergeFrequencyBands();
@@ -687,21 +811,23 @@ int AudioProcessingImpl::ProcessStreamLocked() {
// TODO(aluebs): Investigate if the transient suppression placement should be
// before or after the AGC.
- if (transient_suppressor_enabled_) {
+ if (capture_.transient_suppressor_enabled) {
float voice_probability =
- agc_manager_.get() ? agc_manager_->voice_probability() : 1.f;
+ private_submodules_->agc_manager.get()
+ ? private_submodules_->agc_manager->voice_probability()
+ : 1.f;
- transient_suppressor_->Suppress(
+ public_submodules_->transient_suppressor->Suppress(
ca->channels_f()[0], ca->num_frames(), ca->num_channels(),
ca->split_bands_const_f(0)[kBand0To8kHz], ca->num_frames_per_band(),
ca->keyboard_data(), ca->num_keyboard_frames(), voice_probability,
- key_pressed_);
+ capture_.key_pressed);
}
// The level estimator operates on the recombined data.
- RETURN_ON_ERR(level_estimator_->ProcessStream(ca));
+ public_submodules_->level_estimator->ProcessStream(ca);
- was_stream_delay_set_ = false;
+ capture_.was_stream_delay_set = false;
return kNoError;
}
@@ -709,13 +835,15 @@ int AudioProcessingImpl::AnalyzeReverseStream(const float* const* data,
size_t samples_per_channel,
int rev_sample_rate_hz,
ChannelLayout layout) {
+ TRACE_EVENT0("webrtc", "AudioProcessing::AnalyzeReverseStream_ChannelLayout");
+ rtc::CritScope cs(&crit_render_);
const StreamConfig reverse_config = {
rev_sample_rate_hz, ChannelsFromLayout(layout), LayoutHasKeyboard(layout),
};
if (samples_per_channel != reverse_config.num_frames()) {
return kBadDataLengthError;
}
- return AnalyzeReverseStream(data, reverse_config, reverse_config);
+ return AnalyzeReverseStreamLocked(data, reverse_config, reverse_config);
}
int AudioProcessingImpl::ProcessReverseStream(
@@ -723,13 +851,17 @@ int AudioProcessingImpl::ProcessReverseStream(
const StreamConfig& reverse_input_config,
const StreamConfig& reverse_output_config,
float* const* dest) {
- RETURN_ON_ERR(
- AnalyzeReverseStream(src, reverse_input_config, reverse_output_config));
+ TRACE_EVENT0("webrtc", "AudioProcessing::ProcessReverseStream_StreamConfig");
+ rtc::CritScope cs(&crit_render_);
+ RETURN_ON_ERR(AnalyzeReverseStreamLocked(src, reverse_input_config,
+ reverse_output_config));
if (is_rev_processed()) {
- render_audio_->CopyTo(api_format_.reverse_output_stream(), dest);
- } else if (rev_conversion_needed()) {
- render_converter_->Convert(src, reverse_input_config.num_samples(), dest,
- reverse_output_config.num_samples());
+ render_.render_audio->CopyTo(formats_.api_format.reverse_output_stream(),
+ dest);
+ } else if (render_check_rev_conversion_needed()) {
+ render_.render_converter->Convert(src, reverse_input_config.num_samples(),
+ dest,
+ reverse_output_config.num_samples());
} else {
CopyAudioIfNeeded(src, reverse_input_config.num_frames(),
reverse_input_config.num_channels(), dest);
@@ -738,55 +870,61 @@ int AudioProcessingImpl::ProcessReverseStream(
return kNoError;
}
-int AudioProcessingImpl::AnalyzeReverseStream(
+int AudioProcessingImpl::AnalyzeReverseStreamLocked(
const float* const* src,
const StreamConfig& reverse_input_config,
const StreamConfig& reverse_output_config) {
- CriticalSectionScoped crit_scoped(crit_);
- if (src == NULL) {
+ if (src == nullptr) {
return kNullPointerError;
}
- if (reverse_input_config.num_channels() <= 0) {
+ if (reverse_input_config.num_channels() == 0) {
return kBadNumberChannelsError;
}
- ProcessingConfig processing_config = api_format_;
+ ProcessingConfig processing_config = formats_.api_format;
processing_config.reverse_input_stream() = reverse_input_config;
processing_config.reverse_output_stream() = reverse_output_config;
- RETURN_ON_ERR(MaybeInitializeLocked(processing_config));
+ RETURN_ON_ERR(MaybeInitializeRender(processing_config));
assert(reverse_input_config.num_frames() ==
- api_format_.reverse_input_stream().num_frames());
+ formats_.api_format.reverse_input_stream().num_frames());
#ifdef WEBRTC_AUDIOPROC_DEBUG_DUMP
- if (debug_file_->Open()) {
- event_msg_->set_type(audioproc::Event::REVERSE_STREAM);
- audioproc::ReverseStream* msg = event_msg_->mutable_reverse_stream();
+ if (debug_dump_.debug_file->Open()) {
+ debug_dump_.render.event_msg->set_type(audioproc::Event::REVERSE_STREAM);
+ audioproc::ReverseStream* msg =
+ debug_dump_.render.event_msg->mutable_reverse_stream();
const size_t channel_size =
- sizeof(float) * api_format_.reverse_input_stream().num_frames();
- for (int i = 0; i < api_format_.reverse_input_stream().num_channels(); ++i)
+ sizeof(float) * formats_.api_format.reverse_input_stream().num_frames();
+ for (size_t i = 0;
+ i < formats_.api_format.reverse_input_stream().num_channels(); ++i)
msg->add_channel(src[i], channel_size);
- RETURN_ON_ERR(WriteMessageToDebugFile());
+ RETURN_ON_ERR(WriteMessageToDebugFile(debug_dump_.debug_file.get(),
+ &crit_debug_, &debug_dump_.render));
}
#endif
- render_audio_->CopyFrom(src, api_format_.reverse_input_stream());
+ render_.render_audio->CopyFrom(src,
+ formats_.api_format.reverse_input_stream());
return ProcessReverseStreamLocked();
}
int AudioProcessingImpl::ProcessReverseStream(AudioFrame* frame) {
+ TRACE_EVENT0("webrtc", "AudioProcessing::ProcessReverseStream_AudioFrame");
RETURN_ON_ERR(AnalyzeReverseStream(frame));
+ rtc::CritScope cs(&crit_render_);
if (is_rev_processed()) {
- render_audio_->InterleaveTo(frame, true);
+ render_.render_audio->InterleaveTo(frame, true);
}
return kNoError;
}
int AudioProcessingImpl::AnalyzeReverseStream(AudioFrame* frame) {
- CriticalSectionScoped crit_scoped(crit_);
- if (frame == NULL) {
+ TRACE_EVENT0("webrtc", "AudioProcessing::AnalyzeReverseStream_AudioFrame");
+ rtc::CritScope cs(&crit_render_);
+ if (frame == nullptr) {
return kNullPointerError;
}
// Must be a native rate.
@@ -797,7 +935,8 @@ int AudioProcessingImpl::AnalyzeReverseStream(AudioFrame* frame) {
return kBadSampleRateError;
}
// This interface does not tolerate different forward and reverse rates.
- if (frame->sample_rate_hz_ != api_format_.input_stream().sample_rate_hz()) {
+ if (frame->sample_rate_hz_ !=
+ formats_.api_format.input_stream().sample_rate_hz()) {
return kBadSampleRateError;
}
@@ -805,7 +944,7 @@ int AudioProcessingImpl::AnalyzeReverseStream(AudioFrame* frame) {
return kBadNumberChannelsError;
}
- ProcessingConfig processing_config = api_format_;
+ ProcessingConfig processing_config = formats_.api_format;
processing_config.reverse_input_stream().set_sample_rate_hz(
frame->sample_rate_hz_);
processing_config.reverse_input_stream().set_num_channels(
@@ -815,44 +954,52 @@ int AudioProcessingImpl::AnalyzeReverseStream(AudioFrame* frame) {
processing_config.reverse_output_stream().set_num_channels(
frame->num_channels_);
- RETURN_ON_ERR(MaybeInitializeLocked(processing_config));
+ RETURN_ON_ERR(MaybeInitializeRender(processing_config));
if (frame->samples_per_channel_ !=
- api_format_.reverse_input_stream().num_frames()) {
+ formats_.api_format.reverse_input_stream().num_frames()) {
return kBadDataLengthError;
}
#ifdef WEBRTC_AUDIOPROC_DEBUG_DUMP
- if (debug_file_->Open()) {
- event_msg_->set_type(audioproc::Event::REVERSE_STREAM);
- audioproc::ReverseStream* msg = event_msg_->mutable_reverse_stream();
+ if (debug_dump_.debug_file->Open()) {
+ debug_dump_.render.event_msg->set_type(audioproc::Event::REVERSE_STREAM);
+ audioproc::ReverseStream* msg =
+ debug_dump_.render.event_msg->mutable_reverse_stream();
const size_t data_size =
sizeof(int16_t) * frame->samples_per_channel_ * frame->num_channels_;
msg->set_data(frame->data_, data_size);
- RETURN_ON_ERR(WriteMessageToDebugFile());
+ RETURN_ON_ERR(WriteMessageToDebugFile(debug_dump_.debug_file.get(),
+ &crit_debug_, &debug_dump_.render));
}
#endif
- render_audio_->DeinterleaveFrom(frame);
+ render_.render_audio->DeinterleaveFrom(frame);
return ProcessReverseStreamLocked();
}
int AudioProcessingImpl::ProcessReverseStreamLocked() {
- AudioBuffer* ra = render_audio_.get(); // For brevity.
- if (rev_proc_format_.sample_rate_hz() == kSampleRate32kHz) {
+ AudioBuffer* ra = render_.render_audio.get(); // For brevity.
+ if (formats_.rev_proc_format.sample_rate_hz() == kSampleRate32kHz) {
ra->SplitIntoFrequencyBands();
}
- if (intelligibility_enabled_) {
- intelligibility_enhancer_->ProcessRenderAudio(
- ra->split_channels_f(kBand0To8kHz), split_rate_, ra->num_channels());
+ if (constants_.intelligibility_enabled) {
+ // Currently run in single-threaded mode when the intelligibility
+ // enhancer is activated.
+ // TODO(peah): Fix to be properly multi-threaded.
+ rtc::CritScope cs(&crit_capture_);
+ public_submodules_->intelligibility_enhancer->ProcessRenderAudio(
+ ra->split_channels_f(kBand0To8kHz), capture_nonlocked_.split_rate,
+ ra->num_channels());
}
- RETURN_ON_ERR(echo_cancellation_->ProcessRenderAudio(ra));
- RETURN_ON_ERR(echo_control_mobile_->ProcessRenderAudio(ra));
- if (!use_new_agc_) {
- RETURN_ON_ERR(gain_control_->ProcessRenderAudio(ra));
+ RETURN_ON_ERR(public_submodules_->echo_cancellation->ProcessRenderAudio(ra));
+ RETURN_ON_ERR(
+ public_submodules_->echo_control_mobile->ProcessRenderAudio(ra));
+ if (!constants_.use_new_agc) {
+ RETURN_ON_ERR(public_submodules_->gain_control->ProcessRenderAudio(ra));
}
- if (rev_proc_format_.sample_rate_hz() == kSampleRate32kHz &&
+ if (formats_.rev_proc_format.sample_rate_hz() == kSampleRate32kHz &&
is_rev_processed()) {
ra->MergeFrequencyBands();
}
@@ -861,9 +1008,10 @@ int AudioProcessingImpl::ProcessReverseStreamLocked() {
}
int AudioProcessingImpl::set_stream_delay_ms(int delay) {
+ rtc::CritScope cs(&crit_capture_);
Error retval = kNoError;
- was_stream_delay_set_ = true;
- delay += delay_offset_ms_;
+ capture_.was_stream_delay_set = true;
+ delay += capture_.delay_offset_ms;
if (delay < 0) {
delay = 0;
@@ -876,50 +1024,56 @@ int AudioProcessingImpl::set_stream_delay_ms(int delay) {
retval = kBadStreamParameterWarning;
}
- stream_delay_ms_ = delay;
+ capture_nonlocked_.stream_delay_ms = delay;
return retval;
}
int AudioProcessingImpl::stream_delay_ms() const {
- return stream_delay_ms_;
+ // Used as callback from submodules, hence locking is not allowed.
+ return capture_nonlocked_.stream_delay_ms;
}
bool AudioProcessingImpl::was_stream_delay_set() const {
- return was_stream_delay_set_;
+ // Used as callback from submodules, hence locking is not allowed.
+ return capture_.was_stream_delay_set;
}
void AudioProcessingImpl::set_stream_key_pressed(bool key_pressed) {
- key_pressed_ = key_pressed;
+ rtc::CritScope cs(&crit_capture_);
+ capture_.key_pressed = key_pressed;
}
void AudioProcessingImpl::set_delay_offset_ms(int offset) {
- CriticalSectionScoped crit_scoped(crit_);
- delay_offset_ms_ = offset;
+ rtc::CritScope cs(&crit_capture_);
+ capture_.delay_offset_ms = offset;
}
int AudioProcessingImpl::delay_offset_ms() const {
- return delay_offset_ms_;
+ rtc::CritScope cs(&crit_capture_);
+ return capture_.delay_offset_ms;
}
int AudioProcessingImpl::StartDebugRecording(
const char filename[AudioProcessing::kMaxFilenameSize]) {
- CriticalSectionScoped crit_scoped(crit_);
+ // Run in a single-threaded manner.
+ rtc::CritScope cs_render(&crit_render_);
+ rtc::CritScope cs_capture(&crit_capture_);
static_assert(kMaxFilenameSize == FileWrapper::kMaxFileNameSize, "");
- if (filename == NULL) {
+ if (filename == nullptr) {
return kNullPointerError;
}
#ifdef WEBRTC_AUDIOPROC_DEBUG_DUMP
// Stop any ongoing recording.
- if (debug_file_->Open()) {
- if (debug_file_->CloseFile() == -1) {
+ if (debug_dump_.debug_file->Open()) {
+ if (debug_dump_.debug_file->CloseFile() == -1) {
return kFileError;
}
}
- if (debug_file_->OpenFile(filename, false) == -1) {
- debug_file_->CloseFile();
+ if (debug_dump_.debug_file->OpenFile(filename, false) == -1) {
+ debug_dump_.debug_file->CloseFile();
return kFileError;
}
@@ -932,21 +1086,23 @@ int AudioProcessingImpl::StartDebugRecording(
}
int AudioProcessingImpl::StartDebugRecording(FILE* handle) {
- CriticalSectionScoped crit_scoped(crit_);
+ // Run in a single-threaded manner.
+ rtc::CritScope cs_render(&crit_render_);
+ rtc::CritScope cs_capture(&crit_capture_);
- if (handle == NULL) {
+ if (handle == nullptr) {
return kNullPointerError;
}
#ifdef WEBRTC_AUDIOPROC_DEBUG_DUMP
// Stop any ongoing recording.
- if (debug_file_->Open()) {
- if (debug_file_->CloseFile() == -1) {
+ if (debug_dump_.debug_file->Open()) {
+ if (debug_dump_.debug_file->CloseFile() == -1) {
return kFileError;
}
}
- if (debug_file_->OpenFromFileHandle(handle, true, false) == -1) {
+ if (debug_dump_.debug_file->OpenFromFileHandle(handle, true, false) == -1) {
return kFileError;
}
@@ -960,17 +1116,22 @@ int AudioProcessingImpl::StartDebugRecording(FILE* handle) {
int AudioProcessingImpl::StartDebugRecordingForPlatformFile(
rtc::PlatformFile handle) {
+ // Run in a single-threaded manner.
+ rtc::CritScope cs_render(&crit_render_);
+ rtc::CritScope cs_capture(&crit_capture_);
FILE* stream = rtc::FdopenPlatformFileForWriting(handle);
return StartDebugRecording(stream);
}
int AudioProcessingImpl::StopDebugRecording() {
- CriticalSectionScoped crit_scoped(crit_);
+ // Run in a single-threaded manner.
+ rtc::CritScope cs_render(&crit_render_);
+ rtc::CritScope cs_capture(&crit_capture_);
#ifdef WEBRTC_AUDIOPROC_DEBUG_DUMP
// We just return if recording hasn't started.
- if (debug_file_->Open()) {
- if (debug_file_->CloseFile() == -1) {
+ if (debug_dump_.debug_file->Open()) {
+ if (debug_dump_.debug_file->CloseFile() == -1) {
return kFileError;
}
}
@@ -981,58 +1142,87 @@ int AudioProcessingImpl::StopDebugRecording() {
}
EchoCancellation* AudioProcessingImpl::echo_cancellation() const {
- return echo_cancellation_;
+ // Adding a lock here has no effect as it allows any access to the submodule
+ // from the returned pointer.
+ return public_submodules_->echo_cancellation;
}
EchoControlMobile* AudioProcessingImpl::echo_control_mobile() const {
- return echo_control_mobile_;
+ // Adding a lock here has no effect as it allows any access to the submodule
+ // from the returned pointer.
+ return public_submodules_->echo_control_mobile;
}
GainControl* AudioProcessingImpl::gain_control() const {
- if (use_new_agc_) {
- return gain_control_for_new_agc_.get();
+ // Adding a lock here has no effect as it allows any access to the submodule
+ // from the returned pointer.
+ if (constants_.use_new_agc) {
+ return public_submodules_->gain_control_for_new_agc.get();
}
- return gain_control_;
+ return public_submodules_->gain_control;
}
HighPassFilter* AudioProcessingImpl::high_pass_filter() const {
- return high_pass_filter_;
+ // Adding a lock here has no effect as it allows any access to the submodule
+ // from the returned pointer.
+ return public_submodules_->high_pass_filter.get();
}
LevelEstimator* AudioProcessingImpl::level_estimator() const {
- return level_estimator_;
+ // Adding a lock here has no effect as it allows any access to the submodule
+ // from the returned pointer.
+ return public_submodules_->level_estimator.get();
}
NoiseSuppression* AudioProcessingImpl::noise_suppression() const {
- return noise_suppression_;
+ // Adding a lock here has no effect as it allows any access to the submodule
+ // from the returned pointer.
+ return public_submodules_->noise_suppression.get();
}
VoiceDetection* AudioProcessingImpl::voice_detection() const {
- return voice_detection_;
+ // Adding a lock here has no effect as it allows any access to the submodule
+ // from the returned pointer.
+ return public_submodules_->voice_detection.get();
}
bool AudioProcessingImpl::is_data_processed() const {
- if (beamformer_enabled_) {
+ if (capture_nonlocked_.beamformer_enabled) {
return true;
}
int enabled_count = 0;
- for (auto item : component_list_) {
+ for (auto item : private_submodules_->component_list) {
if (item->is_component_enabled()) {
enabled_count++;
}
}
+ if (public_submodules_->high_pass_filter->is_enabled()) {
+ enabled_count++;
+ }
+ if (public_submodules_->noise_suppression->is_enabled()) {
+ enabled_count++;
+ }
+ if (public_submodules_->level_estimator->is_enabled()) {
+ enabled_count++;
+ }
+ if (public_submodules_->voice_detection->is_enabled()) {
+ enabled_count++;
+ }
- // Data is unchanged if no components are enabled, or if only level_estimator_
- // or voice_detection_ is enabled.
+ // Data is unchanged if no components are enabled, or if only
+ // public_submodules_->level_estimator
+ // or public_submodules_->voice_detection is enabled.
if (enabled_count == 0) {
return false;
} else if (enabled_count == 1) {
- if (level_estimator_->is_enabled() || voice_detection_->is_enabled()) {
+ if (public_submodules_->level_estimator->is_enabled() ||
+ public_submodules_->voice_detection->is_enabled()) {
return false;
}
} else if (enabled_count == 2) {
- if (level_estimator_->is_enabled() && voice_detection_->is_enabled()) {
+ if (public_submodules_->level_estimator->is_enabled() &&
+ public_submodules_->voice_detection->is_enabled()) {
return false;
}
}
@@ -1041,149 +1231,194 @@ bool AudioProcessingImpl::is_data_processed() const {
bool AudioProcessingImpl::output_copy_needed(bool is_data_processed) const {
// Check if we've upmixed or downmixed the audio.
- return ((api_format_.output_stream().num_channels() !=
- api_format_.input_stream().num_channels()) ||
- is_data_processed || transient_suppressor_enabled_);
+ return ((formats_.api_format.output_stream().num_channels() !=
+ formats_.api_format.input_stream().num_channels()) ||
+ is_data_processed || capture_.transient_suppressor_enabled);
}
bool AudioProcessingImpl::synthesis_needed(bool is_data_processed) const {
return (is_data_processed &&
- (fwd_proc_format_.sample_rate_hz() == kSampleRate32kHz ||
- fwd_proc_format_.sample_rate_hz() == kSampleRate48kHz));
+ (capture_nonlocked_.fwd_proc_format.sample_rate_hz() ==
+ kSampleRate32kHz ||
+ capture_nonlocked_.fwd_proc_format.sample_rate_hz() ==
+ kSampleRate48kHz));
}
bool AudioProcessingImpl::analysis_needed(bool is_data_processed) const {
- if (!is_data_processed && !voice_detection_->is_enabled() &&
- !transient_suppressor_enabled_) {
- // Only level_estimator_ is enabled.
+ if (!is_data_processed &&
+ !public_submodules_->voice_detection->is_enabled() &&
+ !capture_.transient_suppressor_enabled) {
+ // Only public_submodules_->level_estimator is enabled.
return false;
- } else if (fwd_proc_format_.sample_rate_hz() == kSampleRate32kHz ||
- fwd_proc_format_.sample_rate_hz() == kSampleRate48kHz) {
- // Something besides level_estimator_ is enabled, and we have super-wb.
+ } else if (capture_nonlocked_.fwd_proc_format.sample_rate_hz() ==
+ kSampleRate32kHz ||
+ capture_nonlocked_.fwd_proc_format.sample_rate_hz() ==
+ kSampleRate48kHz) {
+ // Something besides public_submodules_->level_estimator is enabled, and we
+ // have super-wb.
return true;
}
return false;
}
bool AudioProcessingImpl::is_rev_processed() const {
- return intelligibility_enabled_ && intelligibility_enhancer_->active();
+ return constants_.intelligibility_enabled &&
+ public_submodules_->intelligibility_enhancer->active();
+}
+
+bool AudioProcessingImpl::render_check_rev_conversion_needed() const {
+ return rev_conversion_needed();
}
bool AudioProcessingImpl::rev_conversion_needed() const {
- return (api_format_.reverse_input_stream() !=
- api_format_.reverse_output_stream());
+ return (formats_.api_format.reverse_input_stream() !=
+ formats_.api_format.reverse_output_stream());
}
void AudioProcessingImpl::InitializeExperimentalAgc() {
- if (use_new_agc_) {
- if (!agc_manager_.get()) {
- agc_manager_.reset(new AgcManagerDirect(gain_control_,
- gain_control_for_new_agc_.get(),
- agc_startup_min_volume_));
+ if (constants_.use_new_agc) {
+ if (!private_submodules_->agc_manager.get()) {
+ private_submodules_->agc_manager.reset(new AgcManagerDirect(
+ public_submodules_->gain_control,
+ public_submodules_->gain_control_for_new_agc.get(),
+ constants_.agc_startup_min_volume));
}
- agc_manager_->Initialize();
- agc_manager_->SetCaptureMuted(output_will_be_muted_);
+ private_submodules_->agc_manager->Initialize();
+ private_submodules_->agc_manager->SetCaptureMuted(
+ capture_.output_will_be_muted);
}
}
void AudioProcessingImpl::InitializeTransient() {
- if (transient_suppressor_enabled_) {
- if (!transient_suppressor_.get()) {
- transient_suppressor_.reset(new TransientSuppressor());
+ if (capture_.transient_suppressor_enabled) {
+ if (!public_submodules_->transient_suppressor.get()) {
+ public_submodules_->transient_suppressor.reset(new TransientSuppressor());
}
- transient_suppressor_->Initialize(
- fwd_proc_format_.sample_rate_hz(), split_rate_,
- api_format_.output_stream().num_channels());
+ public_submodules_->transient_suppressor->Initialize(
+ capture_nonlocked_.fwd_proc_format.sample_rate_hz(),
+ capture_nonlocked_.split_rate,
+ num_proc_channels());
}
}
void AudioProcessingImpl::InitializeBeamformer() {
- if (beamformer_enabled_) {
- if (!beamformer_) {
- beamformer_.reset(
- new NonlinearBeamformer(array_geometry_, target_direction_));
+ if (capture_nonlocked_.beamformer_enabled) {
+ if (!private_submodules_->beamformer) {
+ private_submodules_->beamformer.reset(new NonlinearBeamformer(
+ capture_.array_geometry, capture_.target_direction));
}
- beamformer_->Initialize(kChunkSizeMs, split_rate_);
+ private_submodules_->beamformer->Initialize(kChunkSizeMs,
+ capture_nonlocked_.split_rate);
}
}
void AudioProcessingImpl::InitializeIntelligibility() {
- if (intelligibility_enabled_) {
+ if (constants_.intelligibility_enabled) {
IntelligibilityEnhancer::Config config;
- config.sample_rate_hz = split_rate_;
- config.num_capture_channels = capture_audio_->num_channels();
- config.num_render_channels = render_audio_->num_channels();
- intelligibility_enhancer_.reset(new IntelligibilityEnhancer(config));
+ config.sample_rate_hz = capture_nonlocked_.split_rate;
+ config.num_capture_channels = capture_.capture_audio->num_channels();
+ config.num_render_channels = render_.render_audio->num_channels();
+ public_submodules_->intelligibility_enhancer.reset(
+ new IntelligibilityEnhancer(config));
}
}
+void AudioProcessingImpl::InitializeHighPassFilter() {
+ public_submodules_->high_pass_filter->Initialize(num_proc_channels(),
+ proc_sample_rate_hz());
+}
+
+void AudioProcessingImpl::InitializeNoiseSuppression() {
+ public_submodules_->noise_suppression->Initialize(num_proc_channels(),
+ proc_sample_rate_hz());
+}
+
+void AudioProcessingImpl::InitializeLevelEstimator() {
+ public_submodules_->level_estimator->Initialize();
+}
+
+void AudioProcessingImpl::InitializeVoiceDetection() {
+ public_submodules_->voice_detection->Initialize(proc_split_sample_rate_hz());
+}
+
void AudioProcessingImpl::MaybeUpdateHistograms() {
static const int kMinDiffDelayMs = 60;
if (echo_cancellation()->is_enabled()) {
// Activate delay_jumps_ counters if we know echo_cancellation is runnning.
// If a stream has echo we know that the echo_cancellation is in process.
- if (stream_delay_jumps_ == -1 && echo_cancellation()->stream_has_echo()) {
- stream_delay_jumps_ = 0;
+ if (capture_.stream_delay_jumps == -1 &&
+ echo_cancellation()->stream_has_echo()) {
+ capture_.stream_delay_jumps = 0;
}
- if (aec_system_delay_jumps_ == -1 &&
+ if (capture_.aec_system_delay_jumps == -1 &&
echo_cancellation()->stream_has_echo()) {
- aec_system_delay_jumps_ = 0;
+ capture_.aec_system_delay_jumps = 0;
}
// Detect a jump in platform reported system delay and log the difference.
- const int diff_stream_delay_ms = stream_delay_ms_ - last_stream_delay_ms_;
- if (diff_stream_delay_ms > kMinDiffDelayMs && last_stream_delay_ms_ != 0) {
- RTC_HISTOGRAM_COUNTS("WebRTC.Audio.PlatformReportedStreamDelayJump",
- diff_stream_delay_ms, kMinDiffDelayMs, 1000, 100);
- if (stream_delay_jumps_ == -1) {
- stream_delay_jumps_ = 0; // Activate counter if needed.
+ const int diff_stream_delay_ms =
+ capture_nonlocked_.stream_delay_ms - capture_.last_stream_delay_ms;
+ if (diff_stream_delay_ms > kMinDiffDelayMs &&
+ capture_.last_stream_delay_ms != 0) {
+ RTC_HISTOGRAM_COUNTS_SPARSE(
+ "WebRTC.Audio.PlatformReportedStreamDelayJump", diff_stream_delay_ms,
+ kMinDiffDelayMs, 1000, 100);
+ if (capture_.stream_delay_jumps == -1) {
+ capture_.stream_delay_jumps = 0; // Activate counter if needed.
}
- stream_delay_jumps_++;
+ capture_.stream_delay_jumps++;
}
- last_stream_delay_ms_ = stream_delay_ms_;
+ capture_.last_stream_delay_ms = capture_nonlocked_.stream_delay_ms;
// Detect a jump in AEC system delay and log the difference.
- const int frames_per_ms = rtc::CheckedDivExact(split_rate_, 1000);
+ const int frames_per_ms =
+ rtc::CheckedDivExact(capture_nonlocked_.split_rate, 1000);
const int aec_system_delay_ms =
WebRtcAec_system_delay(echo_cancellation()->aec_core()) / frames_per_ms;
const int diff_aec_system_delay_ms =
- aec_system_delay_ms - last_aec_system_delay_ms_;
+ aec_system_delay_ms - capture_.last_aec_system_delay_ms;
if (diff_aec_system_delay_ms > kMinDiffDelayMs &&
- last_aec_system_delay_ms_ != 0) {
- RTC_HISTOGRAM_COUNTS("WebRTC.Audio.AecSystemDelayJump",
- diff_aec_system_delay_ms, kMinDiffDelayMs, 1000,
- 100);
- if (aec_system_delay_jumps_ == -1) {
- aec_system_delay_jumps_ = 0; // Activate counter if needed.
+ capture_.last_aec_system_delay_ms != 0) {
+ RTC_HISTOGRAM_COUNTS_SPARSE("WebRTC.Audio.AecSystemDelayJump",
+ diff_aec_system_delay_ms, kMinDiffDelayMs,
+ 1000, 100);
+ if (capture_.aec_system_delay_jumps == -1) {
+ capture_.aec_system_delay_jumps = 0; // Activate counter if needed.
}
- aec_system_delay_jumps_++;
+ capture_.aec_system_delay_jumps++;
}
- last_aec_system_delay_ms_ = aec_system_delay_ms;
+ capture_.last_aec_system_delay_ms = aec_system_delay_ms;
}
}
void AudioProcessingImpl::UpdateHistogramsOnCallEnd() {
- CriticalSectionScoped crit_scoped(crit_);
- if (stream_delay_jumps_ > -1) {
- RTC_HISTOGRAM_ENUMERATION(
+ // Run in a single-threaded manner.
+ rtc::CritScope cs_render(&crit_render_);
+ rtc::CritScope cs_capture(&crit_capture_);
+
+ if (capture_.stream_delay_jumps > -1) {
+ RTC_HISTOGRAM_ENUMERATION_SPARSE(
"WebRTC.Audio.NumOfPlatformReportedStreamDelayJumps",
- stream_delay_jumps_, 51);
+ capture_.stream_delay_jumps, 51);
}
- stream_delay_jumps_ = -1;
- last_stream_delay_ms_ = 0;
+ capture_.stream_delay_jumps = -1;
+ capture_.last_stream_delay_ms = 0;
- if (aec_system_delay_jumps_ > -1) {
- RTC_HISTOGRAM_ENUMERATION("WebRTC.Audio.NumOfAecSystemDelayJumps",
- aec_system_delay_jumps_, 51);
+ if (capture_.aec_system_delay_jumps > -1) {
+ RTC_HISTOGRAM_ENUMERATION_SPARSE("WebRTC.Audio.NumOfAecSystemDelayJumps",
+ capture_.aec_system_delay_jumps, 51);
}
- aec_system_delay_jumps_ = -1;
- last_aec_system_delay_ms_ = 0;
+ capture_.aec_system_delay_jumps = -1;
+ capture_.last_aec_system_delay_ms = 0;
}
#ifdef WEBRTC_AUDIOPROC_DEBUG_DUMP
-int AudioProcessingImpl::WriteMessageToDebugFile() {
- int32_t size = event_msg_->ByteSize();
+int AudioProcessingImpl::WriteMessageToDebugFile(
+ FileWrapper* debug_file,
+ rtc::CriticalSection* crit_debug,
+ ApmDebugDumpThreadState* debug_state) {
+ int32_t size = debug_state->event_msg->ByteSize();
if (size <= 0) {
return kUnspecifiedError;
}
@@ -1192,82 +1427,100 @@ int AudioProcessingImpl::WriteMessageToDebugFile() {
// pretty safe in assuming little-endian.
#endif
- if (!event_msg_->SerializeToString(&event_str_)) {
+ if (!debug_state->event_msg->SerializeToString(&debug_state->event_str)) {
return kUnspecifiedError;
}
- // Write message preceded by its size.
- if (!debug_file_->Write(&size, sizeof(int32_t))) {
- return kFileError;
- }
- if (!debug_file_->Write(event_str_.data(), event_str_.length())) {
- return kFileError;
+ {
+ // Ensure atomic writes of the message.
+ rtc::CritScope cs_capture(crit_debug);
+ // Write message preceded by its size.
+ if (!debug_file->Write(&size, sizeof(int32_t))) {
+ return kFileError;
+ }
+ if (!debug_file->Write(debug_state->event_str.data(),
+ debug_state->event_str.length())) {
+ return kFileError;
+ }
}
- event_msg_->Clear();
+ debug_state->event_msg->Clear();
return kNoError;
}
int AudioProcessingImpl::WriteInitMessage() {
- event_msg_->set_type(audioproc::Event::INIT);
- audioproc::Init* msg = event_msg_->mutable_init();
- msg->set_sample_rate(api_format_.input_stream().sample_rate_hz());
- msg->set_num_input_channels(api_format_.input_stream().num_channels());
- msg->set_num_output_channels(api_format_.output_stream().num_channels());
- msg->set_num_reverse_channels(
- api_format_.reverse_input_stream().num_channels());
+ debug_dump_.capture.event_msg->set_type(audioproc::Event::INIT);
+ audioproc::Init* msg = debug_dump_.capture.event_msg->mutable_init();
+ msg->set_sample_rate(formats_.api_format.input_stream().sample_rate_hz());
+
+ msg->set_num_input_channels(static_cast<google::protobuf::int32>(
+ formats_.api_format.input_stream().num_channels()));
+ msg->set_num_output_channels(static_cast<google::protobuf::int32>(
+ formats_.api_format.output_stream().num_channels()));
+ msg->set_num_reverse_channels(static_cast<google::protobuf::int32>(
+ formats_.api_format.reverse_input_stream().num_channels()));
msg->set_reverse_sample_rate(
- api_format_.reverse_input_stream().sample_rate_hz());
- msg->set_output_sample_rate(api_format_.output_stream().sample_rate_hz());
- // TODO(ekmeyerson): Add reverse output fields to event_msg_.
-
- RETURN_ON_ERR(WriteMessageToDebugFile());
+ formats_.api_format.reverse_input_stream().sample_rate_hz());
+ msg->set_output_sample_rate(
+ formats_.api_format.output_stream().sample_rate_hz());
+ // TODO(ekmeyerson): Add reverse output fields to
+ // debug_dump_.capture.event_msg.
+
+ RETURN_ON_ERR(WriteMessageToDebugFile(debug_dump_.debug_file.get(),
+ &crit_debug_, &debug_dump_.capture));
return kNoError;
}
int AudioProcessingImpl::WriteConfigMessage(bool forced) {
audioproc::Config config;
- config.set_aec_enabled(echo_cancellation_->is_enabled());
+ config.set_aec_enabled(public_submodules_->echo_cancellation->is_enabled());
config.set_aec_delay_agnostic_enabled(
- echo_cancellation_->is_delay_agnostic_enabled());
+ public_submodules_->echo_cancellation->is_delay_agnostic_enabled());
config.set_aec_drift_compensation_enabled(
- echo_cancellation_->is_drift_compensation_enabled());
+ public_submodules_->echo_cancellation->is_drift_compensation_enabled());
config.set_aec_extended_filter_enabled(
- echo_cancellation_->is_extended_filter_enabled());
- config.set_aec_suppression_level(
- static_cast<int>(echo_cancellation_->suppression_level()));
+ public_submodules_->echo_cancellation->is_extended_filter_enabled());
+ config.set_aec_suppression_level(static_cast<int>(
+ public_submodules_->echo_cancellation->suppression_level()));
- config.set_aecm_enabled(echo_control_mobile_->is_enabled());
+ config.set_aecm_enabled(
+ public_submodules_->echo_control_mobile->is_enabled());
config.set_aecm_comfort_noise_enabled(
- echo_control_mobile_->is_comfort_noise_enabled());
- config.set_aecm_routing_mode(
- static_cast<int>(echo_control_mobile_->routing_mode()));
+ public_submodules_->echo_control_mobile->is_comfort_noise_enabled());
+ config.set_aecm_routing_mode(static_cast<int>(
+ public_submodules_->echo_control_mobile->routing_mode()));
- config.set_agc_enabled(gain_control_->is_enabled());
- config.set_agc_mode(static_cast<int>(gain_control_->mode()));
- config.set_agc_limiter_enabled(gain_control_->is_limiter_enabled());
- config.set_noise_robust_agc_enabled(use_new_agc_);
+ config.set_agc_enabled(public_submodules_->gain_control->is_enabled());
+ config.set_agc_mode(
+ static_cast<int>(public_submodules_->gain_control->mode()));
+ config.set_agc_limiter_enabled(
+ public_submodules_->gain_control->is_limiter_enabled());
+ config.set_noise_robust_agc_enabled(constants_.use_new_agc);
- config.set_hpf_enabled(high_pass_filter_->is_enabled());
+ config.set_hpf_enabled(public_submodules_->high_pass_filter->is_enabled());
- config.set_ns_enabled(noise_suppression_->is_enabled());
- config.set_ns_level(static_cast<int>(noise_suppression_->level()));
+ config.set_ns_enabled(public_submodules_->noise_suppression->is_enabled());
+ config.set_ns_level(
+ static_cast<int>(public_submodules_->noise_suppression->level()));
- config.set_transient_suppression_enabled(transient_suppressor_enabled_);
+ config.set_transient_suppression_enabled(
+ capture_.transient_suppressor_enabled);
std::string serialized_config = config.SerializeAsString();
- if (!forced && last_serialized_config_ == serialized_config) {
+ if (!forced &&
+ debug_dump_.capture.last_serialized_config == serialized_config) {
return kNoError;
}
- last_serialized_config_ = serialized_config;
+ debug_dump_.capture.last_serialized_config = serialized_config;
- event_msg_->set_type(audioproc::Event::CONFIG);
- event_msg_->mutable_config()->CopyFrom(config);
+ debug_dump_.capture.event_msg->set_type(audioproc::Event::CONFIG);
+ debug_dump_.capture.event_msg->mutable_config()->CopyFrom(config);
- RETURN_ON_ERR(WriteMessageToDebugFile());
+ RETURN_ON_ERR(WriteMessageToDebugFile(debug_dump_.debug_file.get(),
+ &crit_debug_, &debug_dump_.capture));
return kNoError;
}
#endif // WEBRTC_AUDIOPROC_DEBUG_DUMP
diff --git a/webrtc/modules/audio_processing/audio_processing_impl.h b/webrtc/modules/audio_processing/audio_processing_impl.h
index 542886ee10..b310896903 100644
--- a/webrtc/modules/audio_processing/audio_processing_impl.h
+++ b/webrtc/modules/audio_processing/audio_processing_impl.h
@@ -15,50 +15,38 @@
#include <string>
#include <vector>
+#include "webrtc/base/criticalsection.h"
#include "webrtc/base/scoped_ptr.h"
#include "webrtc/base/thread_annotations.h"
+#include "webrtc/modules/audio_processing/audio_buffer.h"
#include "webrtc/modules/audio_processing/include/audio_processing.h"
+#include "webrtc/system_wrappers/include/file_wrapper.h"
+
+#ifdef WEBRTC_AUDIOPROC_DEBUG_DUMP
+// Files generated at build-time by the protobuf compiler.
+#ifdef WEBRTC_ANDROID_PLATFORM_BUILD
+#include "external/webrtc/webrtc/modules/audio_processing/debug.pb.h"
+#else
+#include "webrtc/audio_processing/debug.pb.h"
+#endif
+#endif // WEBRTC_AUDIOPROC_DEBUG_DUMP
namespace webrtc {
class AgcManagerDirect;
-class AudioBuffer;
class AudioConverter;
template<typename T>
class Beamformer;
-class CriticalSectionWrapper;
-class EchoCancellationImpl;
-class EchoControlMobileImpl;
-class FileWrapper;
-class GainControlImpl;
-class GainControlForNewAgc;
-class HighPassFilterImpl;
-class LevelEstimatorImpl;
-class NoiseSuppressionImpl;
-class ProcessingComponent;
-class TransientSuppressor;
-class VoiceDetectionImpl;
-class IntelligibilityEnhancer;
-
-#ifdef WEBRTC_AUDIOPROC_DEBUG_DUMP
-namespace audioproc {
-
-class Event;
-
-} // namespace audioproc
-#endif
-
class AudioProcessingImpl : public AudioProcessing {
public:
+ // Methods forcing APM to run in a single-threaded manner.
+ // Acquires both the render and capture locks.
explicit AudioProcessingImpl(const Config& config);
-
// AudioProcessingImpl takes ownership of beamformer.
AudioProcessingImpl(const Config& config, Beamformer<float>* beamformer);
virtual ~AudioProcessingImpl();
-
- // AudioProcessing methods.
int Initialize() override;
int Initialize(int input_sample_rate_hz,
int output_sample_rate_hz,
@@ -68,12 +56,14 @@ class AudioProcessingImpl : public AudioProcessing {
ChannelLayout reverse_layout) override;
int Initialize(const ProcessingConfig& processing_config) override;
void SetExtraOptions(const Config& config) override;
- int proc_sample_rate_hz() const override;
- int proc_split_sample_rate_hz() const override;
- int num_input_channels() const override;
- int num_output_channels() const override;
- int num_reverse_channels() const override;
- void set_output_will_be_muted(bool muted) override;
+ void UpdateHistogramsOnCallEnd() override;
+ int StartDebugRecording(const char filename[kMaxFilenameSize]) override;
+ int StartDebugRecording(FILE* handle) override;
+ int StartDebugRecordingForPlatformFile(rtc::PlatformFile handle) override;
+ int StopDebugRecording() override;
+
+ // Capture-side exclusive methods possibly running APM in a
+ // multi-threaded manner. Acquire the capture lock.
int ProcessStream(AudioFrame* frame) override;
int ProcessStream(const float* const* src,
size_t samples_per_channel,
@@ -86,6 +76,15 @@ class AudioProcessingImpl : public AudioProcessing {
const StreamConfig& input_config,
const StreamConfig& output_config,
float* const* dest) override;
+ void set_output_will_be_muted(bool muted) override;
+ int set_stream_delay_ms(int delay) override;
+ void set_delay_offset_ms(int offset) override;
+ int delay_offset_ms() const override;
+ void set_stream_key_pressed(bool key_pressed) override;
+ int input_sample_rate_hz() const override;
+
+ // Render-side exclusive methods possibly running APM in a
+ // multi-threaded manner. Acquire the render lock.
int AnalyzeReverseStream(AudioFrame* frame) override;
int ProcessReverseStream(AudioFrame* frame) override;
int AnalyzeReverseStream(const float* const* data,
@@ -96,17 +95,25 @@ class AudioProcessingImpl : public AudioProcessing {
const StreamConfig& reverse_input_config,
const StreamConfig& reverse_output_config,
float* const* dest) override;
- int set_stream_delay_ms(int delay) override;
+
+ // Methods only accessed from APM submodules or
+ // from AudioProcessing tests in a single-threaded manner.
+ // Hence there is no need for locks in these.
+ int proc_sample_rate_hz() const override;
+ int proc_split_sample_rate_hz() const override;
+ size_t num_input_channels() const override;
+ size_t num_proc_channels() const override;
+ size_t num_output_channels() const override;
+ size_t num_reverse_channels() const override;
int stream_delay_ms() const override;
- bool was_stream_delay_set() const override;
- void set_delay_offset_ms(int offset) override;
- int delay_offset_ms() const override;
- void set_stream_key_pressed(bool key_pressed) override;
- int StartDebugRecording(const char filename[kMaxFilenameSize]) override;
- int StartDebugRecording(FILE* handle) override;
- int StartDebugRecordingForPlatformFile(rtc::PlatformFile handle) override;
- int StopDebugRecording() override;
- void UpdateHistogramsOnCallEnd() override;
+ bool was_stream_delay_set() const override
+ EXCLUSIVE_LOCKS_REQUIRED(crit_capture_);
+
+ // Methods returning pointers to APM submodules.
+ // No locks are aquired in those, as those locks
+ // would offer no protection (the submodules are
+ // created only once in a single-treaded manner
+ // during APM creation).
EchoCancellation* echo_cancellation() const override;
EchoControlMobile* echo_control_mobile() const override;
GainControl* gain_control() const override;
@@ -117,101 +124,216 @@ class AudioProcessingImpl : public AudioProcessing {
protected:
// Overridden in a mock.
- virtual int InitializeLocked() EXCLUSIVE_LOCKS_REQUIRED(crit_);
+ virtual int InitializeLocked()
+ EXCLUSIVE_LOCKS_REQUIRED(crit_render_, crit_capture_);
private:
+ struct ApmPublicSubmodules;
+ struct ApmPrivateSubmodules;
+
+#ifdef WEBRTC_AUDIOPROC_DEBUG_DUMP
+ // State for the debug dump.
+ struct ApmDebugDumpThreadState {
+ ApmDebugDumpThreadState() : event_msg(new audioproc::Event()) {}
+ rtc::scoped_ptr<audioproc::Event> event_msg; // Protobuf message.
+ std::string event_str; // Memory for protobuf serialization.
+
+ // Serialized string of last saved APM configuration.
+ std::string last_serialized_config;
+ };
+
+ struct ApmDebugDumpState {
+ ApmDebugDumpState() : debug_file(FileWrapper::Create()) {}
+ rtc::scoped_ptr<FileWrapper> debug_file;
+ ApmDebugDumpThreadState render;
+ ApmDebugDumpThreadState capture;
+ };
+#endif
+
+ // Method for modifying the formats struct that are called from both
+ // the render and capture threads. The check for whether modifications
+ // are needed is done while holding the render lock only, thereby avoiding
+ // that the capture thread blocks the render thread.
+ // The struct is modified in a single-threaded manner by holding both the
+ // render and capture locks.
+ int MaybeInitialize(const ProcessingConfig& config)
+ EXCLUSIVE_LOCKS_REQUIRED(crit_render_);
+
+ int MaybeInitializeRender(const ProcessingConfig& processing_config)
+ EXCLUSIVE_LOCKS_REQUIRED(crit_render_);
+
+ int MaybeInitializeCapture(const ProcessingConfig& processing_config)
+ EXCLUSIVE_LOCKS_REQUIRED(crit_render_);
+
+ // Method for checking for the need of conversion. Accesses the formats
+ // structs in a read manner but the requirement for the render lock to be held
+ // was added as it currently anyway is always called in that manner.
+ bool rev_conversion_needed() const EXCLUSIVE_LOCKS_REQUIRED(crit_render_);
+ bool render_check_rev_conversion_needed() const
+ EXCLUSIVE_LOCKS_REQUIRED(crit_render_);
+
+ // Methods requiring APM running in a single-threaded manner.
+ // Are called with both the render and capture locks already
+ // acquired.
+ void InitializeExperimentalAgc()
+ EXCLUSIVE_LOCKS_REQUIRED(crit_render_, crit_capture_);
+ void InitializeTransient()
+ EXCLUSIVE_LOCKS_REQUIRED(crit_render_, crit_capture_);
+ void InitializeBeamformer()
+ EXCLUSIVE_LOCKS_REQUIRED(crit_render_, crit_capture_);
+ void InitializeIntelligibility()
+ EXCLUSIVE_LOCKS_REQUIRED(crit_render_, crit_capture_);
+ void InitializeHighPassFilter()
+ EXCLUSIVE_LOCKS_REQUIRED(crit_capture_);
+ void InitializeNoiseSuppression()
+ EXCLUSIVE_LOCKS_REQUIRED(crit_capture_);
+ void InitializeLevelEstimator()
+ EXCLUSIVE_LOCKS_REQUIRED(crit_capture_);
+ void InitializeVoiceDetection()
+ EXCLUSIVE_LOCKS_REQUIRED(crit_capture_);
int InitializeLocked(const ProcessingConfig& config)
- EXCLUSIVE_LOCKS_REQUIRED(crit_);
- int MaybeInitializeLocked(const ProcessingConfig& config)
- EXCLUSIVE_LOCKS_REQUIRED(crit_);
+ EXCLUSIVE_LOCKS_REQUIRED(crit_render_, crit_capture_);
+
+ // Capture-side exclusive methods possibly running APM in a multi-threaded
+ // manner that are called with the render lock already acquired.
+ int ProcessStreamLocked() EXCLUSIVE_LOCKS_REQUIRED(crit_capture_);
+ bool output_copy_needed(bool is_data_processed) const
+ EXCLUSIVE_LOCKS_REQUIRED(crit_capture_);
+ bool is_data_processed() const EXCLUSIVE_LOCKS_REQUIRED(crit_capture_);
+ bool synthesis_needed(bool is_data_processed) const
+ EXCLUSIVE_LOCKS_REQUIRED(crit_capture_);
+ bool analysis_needed(bool is_data_processed) const
+ EXCLUSIVE_LOCKS_REQUIRED(crit_capture_);
+ void MaybeUpdateHistograms() EXCLUSIVE_LOCKS_REQUIRED(crit_capture_);
+
+ // Render-side exclusive methods possibly running APM in a multi-threaded
+ // manner that are called with the render lock already acquired.
// TODO(ekm): Remove once all clients updated to new interface.
- int AnalyzeReverseStream(const float* const* src,
- const StreamConfig& input_config,
- const StreamConfig& output_config);
- int ProcessStreamLocked() EXCLUSIVE_LOCKS_REQUIRED(crit_);
- int ProcessReverseStreamLocked() EXCLUSIVE_LOCKS_REQUIRED(crit_);
-
- bool is_data_processed() const;
- bool output_copy_needed(bool is_data_processed) const;
- bool synthesis_needed(bool is_data_processed) const;
- bool analysis_needed(bool is_data_processed) const;
- bool is_rev_processed() const;
- bool rev_conversion_needed() const;
- void InitializeExperimentalAgc() EXCLUSIVE_LOCKS_REQUIRED(crit_);
- void InitializeTransient() EXCLUSIVE_LOCKS_REQUIRED(crit_);
- void InitializeBeamformer() EXCLUSIVE_LOCKS_REQUIRED(crit_);
- void InitializeIntelligibility() EXCLUSIVE_LOCKS_REQUIRED(crit_);
- void MaybeUpdateHistograms() EXCLUSIVE_LOCKS_REQUIRED(crit_);
-
- EchoCancellationImpl* echo_cancellation_;
- EchoControlMobileImpl* echo_control_mobile_;
- GainControlImpl* gain_control_;
- HighPassFilterImpl* high_pass_filter_;
- LevelEstimatorImpl* level_estimator_;
- NoiseSuppressionImpl* noise_suppression_;
- VoiceDetectionImpl* voice_detection_;
- rtc::scoped_ptr<GainControlForNewAgc> gain_control_for_new_agc_;
-
- std::list<ProcessingComponent*> component_list_;
- CriticalSectionWrapper* crit_;
- rtc::scoped_ptr<AudioBuffer> render_audio_;
- rtc::scoped_ptr<AudioBuffer> capture_audio_;
- rtc::scoped_ptr<AudioConverter> render_converter_;
+ int AnalyzeReverseStreamLocked(const float* const* src,
+ const StreamConfig& input_config,
+ const StreamConfig& output_config)
+ EXCLUSIVE_LOCKS_REQUIRED(crit_render_);
+ bool is_rev_processed() const EXCLUSIVE_LOCKS_REQUIRED(crit_render_);
+ int ProcessReverseStreamLocked() EXCLUSIVE_LOCKS_REQUIRED(crit_render_);
+
+// Debug dump methods that are internal and called without locks.
+// TODO(peah): Make thread safe.
#ifdef WEBRTC_AUDIOPROC_DEBUG_DUMP
// TODO(andrew): make this more graceful. Ideally we would split this stuff
// out into a separate class with an "enabled" and "disabled" implementation.
- int WriteMessageToDebugFile();
- int WriteInitMessage();
+ static int WriteMessageToDebugFile(FileWrapper* debug_file,
+ rtc::CriticalSection* crit_debug,
+ ApmDebugDumpThreadState* debug_state);
+ int WriteInitMessage() EXCLUSIVE_LOCKS_REQUIRED(crit_render_, crit_capture_);
// Writes Config message. If not |forced|, only writes the current config if
// it is different from the last saved one; if |forced|, writes the config
// regardless of the last saved.
- int WriteConfigMessage(bool forced);
+ int WriteConfigMessage(bool forced) EXCLUSIVE_LOCKS_REQUIRED(crit_capture_)
+ EXCLUSIVE_LOCKS_REQUIRED(crit_capture_);
- rtc::scoped_ptr<FileWrapper> debug_file_;
- rtc::scoped_ptr<audioproc::Event> event_msg_; // Protobuf message.
- std::string event_str_; // Memory for protobuf serialization.
+ // Critical section.
+ mutable rtc::CriticalSection crit_debug_;
- // Serialized string of last saved APM configuration.
- std::string last_serialized_config_;
+ // Debug dump state.
+ ApmDebugDumpState debug_dump_;
#endif
- // Format of processing streams at input/output call sites.
- ProcessingConfig api_format_;
-
- // Only the rate and samples fields of fwd_proc_format_ are used because the
- // forward processing number of channels is mutable and is tracked by the
- // capture_audio_.
- StreamConfig fwd_proc_format_;
- StreamConfig rev_proc_format_;
- int split_rate_;
-
- int stream_delay_ms_;
- int delay_offset_ms_;
- bool was_stream_delay_set_;
- int last_stream_delay_ms_;
- int last_aec_system_delay_ms_;
- int stream_delay_jumps_;
- int aec_system_delay_jumps_;
-
- bool output_will_be_muted_ GUARDED_BY(crit_);
-
- bool key_pressed_;
-
- // Only set through the constructor's Config parameter.
- const bool use_new_agc_;
- rtc::scoped_ptr<AgcManagerDirect> agc_manager_ GUARDED_BY(crit_);
- int agc_startup_min_volume_;
-
- bool transient_suppressor_enabled_;
- rtc::scoped_ptr<TransientSuppressor> transient_suppressor_;
- const bool beamformer_enabled_;
- rtc::scoped_ptr<Beamformer<float>> beamformer_;
- const std::vector<Point> array_geometry_;
- const SphericalPointf target_direction_;
-
- bool intelligibility_enabled_;
- rtc::scoped_ptr<IntelligibilityEnhancer> intelligibility_enhancer_;
+ // Critical sections.
+ mutable rtc::CriticalSection crit_render_ ACQUIRED_BEFORE(crit_capture_);
+ mutable rtc::CriticalSection crit_capture_;
+
+ // Structs containing the pointers to the submodules.
+ rtc::scoped_ptr<ApmPublicSubmodules> public_submodules_;
+ rtc::scoped_ptr<ApmPrivateSubmodules> private_submodules_
+ GUARDED_BY(crit_capture_);
+
+ // State that is written to while holding both the render and capture locks
+ // but can be read without any lock being held.
+ // As this is only accessed internally of APM, and all internal methods in APM
+ // either are holding the render or capture locks, this construct is safe as
+ // it is not possible to read the variables while writing them.
+ struct ApmFormatState {
+ ApmFormatState()
+ : // Format of processing streams at input/output call sites.
+ api_format({{{kSampleRate16kHz, 1, false},
+ {kSampleRate16kHz, 1, false},
+ {kSampleRate16kHz, 1, false},
+ {kSampleRate16kHz, 1, false}}}),
+ rev_proc_format(kSampleRate16kHz, 1) {}
+ ProcessingConfig api_format;
+ StreamConfig rev_proc_format;
+ } formats_;
+
+ // APM constants.
+ const struct ApmConstants {
+ ApmConstants(int agc_startup_min_volume,
+ bool use_new_agc,
+ bool intelligibility_enabled)
+ : // Format of processing streams at input/output call sites.
+ agc_startup_min_volume(agc_startup_min_volume),
+ use_new_agc(use_new_agc),
+ intelligibility_enabled(intelligibility_enabled) {}
+ int agc_startup_min_volume;
+ bool use_new_agc;
+ bool intelligibility_enabled;
+ } constants_;
+
+ struct ApmCaptureState {
+ ApmCaptureState(bool transient_suppressor_enabled,
+ const std::vector<Point>& array_geometry,
+ SphericalPointf target_direction)
+ : aec_system_delay_jumps(-1),
+ delay_offset_ms(0),
+ was_stream_delay_set(false),
+ last_stream_delay_ms(0),
+ last_aec_system_delay_ms(0),
+ stream_delay_jumps(-1),
+ output_will_be_muted(false),
+ key_pressed(false),
+ transient_suppressor_enabled(transient_suppressor_enabled),
+ array_geometry(array_geometry),
+ target_direction(target_direction),
+ fwd_proc_format(kSampleRate16kHz),
+ split_rate(kSampleRate16kHz) {}
+ int aec_system_delay_jumps;
+ int delay_offset_ms;
+ bool was_stream_delay_set;
+ int last_stream_delay_ms;
+ int last_aec_system_delay_ms;
+ int stream_delay_jumps;
+ bool output_will_be_muted;
+ bool key_pressed;
+ bool transient_suppressor_enabled;
+ std::vector<Point> array_geometry;
+ SphericalPointf target_direction;
+ rtc::scoped_ptr<AudioBuffer> capture_audio;
+ // Only the rate and samples fields of fwd_proc_format_ are used because the
+ // forward processing number of channels is mutable and is tracked by the
+ // capture_audio_.
+ StreamConfig fwd_proc_format;
+ int split_rate;
+ } capture_ GUARDED_BY(crit_capture_);
+
+ struct ApmCaptureNonLockedState {
+ ApmCaptureNonLockedState(bool beamformer_enabled)
+ : fwd_proc_format(kSampleRate16kHz),
+ split_rate(kSampleRate16kHz),
+ stream_delay_ms(0),
+ beamformer_enabled(beamformer_enabled) {}
+ // Only the rate and samples fields of fwd_proc_format_ are used because the
+ // forward processing number of channels is mutable and is tracked by the
+ // capture_audio_.
+ StreamConfig fwd_proc_format;
+ int split_rate;
+ int stream_delay_ms;
+ bool beamformer_enabled;
+ } capture_nonlocked_;
+
+ struct ApmRenderState {
+ rtc::scoped_ptr<AudioConverter> render_converter;
+ rtc::scoped_ptr<AudioBuffer> render_audio;
+ } render_ GUARDED_BY(crit_render_);
};
} // namespace webrtc
diff --git a/webrtc/modules/audio_processing/audio_processing_impl_locking_unittest.cc b/webrtc/modules/audio_processing/audio_processing_impl_locking_unittest.cc
new file mode 100644
index 0000000000..e1e6a310a5
--- /dev/null
+++ b/webrtc/modules/audio_processing/audio_processing_impl_locking_unittest.cc
@@ -0,0 +1,1133 @@
+/*
+ * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/modules/audio_processing/audio_processing_impl.h"
+
+#include <algorithm>
+#include <vector>
+
+#include "testing/gtest/include/gtest/gtest.h"
+#include "webrtc/base/array_view.h"
+#include "webrtc/base/criticalsection.h"
+#include "webrtc/base/event.h"
+#include "webrtc/base/platform_thread.h"
+#include "webrtc/base/random.h"
+#include "webrtc/config.h"
+#include "webrtc/modules/audio_processing/test/test_utils.h"
+#include "webrtc/modules/include/module_common_types.h"
+#include "webrtc/system_wrappers/include/sleep.h"
+
+namespace webrtc {
+
+namespace {
+
+class AudioProcessingImplLockTest;
+
+// Type of the render thread APM API call to use in the test.
+enum class RenderApiImpl {
+ ProcessReverseStreamImpl1,
+ ProcessReverseStreamImpl2,
+ AnalyzeReverseStreamImpl1,
+ AnalyzeReverseStreamImpl2
+};
+
+// Type of the capture thread APM API call to use in the test.
+enum class CaptureApiImpl {
+ ProcessStreamImpl1,
+ ProcessStreamImpl2,
+ ProcessStreamImpl3
+};
+
+// The runtime parameter setting scheme to use in the test.
+enum class RuntimeParameterSettingScheme {
+ SparseStreamMetadataChangeScheme,
+ ExtremeStreamMetadataChangeScheme,
+ FixedMonoStreamMetadataScheme,
+ FixedStereoStreamMetadataScheme
+};
+
+// Variant of echo canceller settings to use in the test.
+enum class AecType {
+ BasicWebRtcAecSettings,
+ AecTurnedOff,
+ BasicWebRtcAecSettingsWithExtentedFilter,
+ BasicWebRtcAecSettingsWithDelayAgnosticAec,
+ BasicWebRtcAecSettingsWithAecMobile
+};
+
+// Thread-safe random number generator wrapper.
+class RandomGenerator {
+ public:
+ RandomGenerator() : rand_gen_(42U) {}
+
+ int RandInt(int min, int max) {
+ rtc::CritScope cs(&crit_);
+ return rand_gen_.Rand(min, max);
+ }
+
+ int RandInt(int max) {
+ rtc::CritScope cs(&crit_);
+ return rand_gen_.Rand(max);
+ }
+
+ float RandFloat() {
+ rtc::CritScope cs(&crit_);
+ return rand_gen_.Rand<float>();
+ }
+
+ private:
+ rtc::CriticalSection crit_;
+ Random rand_gen_ GUARDED_BY(crit_);
+};
+
+// Variables related to the audio data and formats.
+struct AudioFrameData {
+ explicit AudioFrameData(int max_frame_size) {
+ // Set up the two-dimensional arrays needed for the APM API calls.
+ input_framechannels.resize(2 * max_frame_size);
+ input_frame.resize(2);
+ input_frame[0] = &input_framechannels[0];
+ input_frame[1] = &input_framechannels[max_frame_size];
+
+ output_frame_channels.resize(2 * max_frame_size);
+ output_frame.resize(2);
+ output_frame[0] = &output_frame_channels[0];
+ output_frame[1] = &output_frame_channels[max_frame_size];
+ }
+
+ AudioFrame frame;
+ std::vector<float*> output_frame;
+ std::vector<float> output_frame_channels;
+ AudioProcessing::ChannelLayout output_channel_layout =
+ AudioProcessing::ChannelLayout::kMono;
+ int input_sample_rate_hz = 16000;
+ int input_number_of_channels = -1;
+ std::vector<float*> input_frame;
+ std::vector<float> input_framechannels;
+ AudioProcessing::ChannelLayout input_channel_layout =
+ AudioProcessing::ChannelLayout::kMono;
+ int output_sample_rate_hz = 16000;
+ int output_number_of_channels = -1;
+ StreamConfig input_stream_config;
+ StreamConfig output_stream_config;
+ int input_samples_per_channel = -1;
+ int output_samples_per_channel = -1;
+};
+
+// The configuration for the test.
+struct TestConfig {
+ // Test case generator for the test configurations to use in the brief tests.
+ static std::vector<TestConfig> GenerateBriefTestConfigs() {
+ std::vector<TestConfig> test_configs;
+ AecType aec_types[] = {AecType::BasicWebRtcAecSettingsWithDelayAgnosticAec,
+ AecType::BasicWebRtcAecSettingsWithAecMobile};
+ for (auto aec_type : aec_types) {
+ TestConfig test_config;
+ test_config.aec_type = aec_type;
+
+ test_config.min_number_of_calls = 300;
+
+ // Perform tests only with the extreme runtime parameter setting scheme.
+ test_config.runtime_parameter_setting_scheme =
+ RuntimeParameterSettingScheme::ExtremeStreamMetadataChangeScheme;
+
+ // Only test 16 kHz for this test suite.
+ test_config.initial_sample_rate_hz = 16000;
+
+ // Create test config for the second processing API function set.
+ test_config.render_api_function =
+ RenderApiImpl::ProcessReverseStreamImpl2;
+ test_config.capture_api_function = CaptureApiImpl::ProcessStreamImpl2;
+
+ // Create test config for the first processing API function set.
+ test_configs.push_back(test_config);
+ test_config.render_api_function =
+ RenderApiImpl::AnalyzeReverseStreamImpl2;
+ test_config.capture_api_function = CaptureApiImpl::ProcessStreamImpl3;
+ test_configs.push_back(test_config);
+ }
+
+ // Return the created test configurations.
+ return test_configs;
+ }
+
+ // Test case generator for the test configurations to use in the extensive
+ // tests.
+ static std::vector<TestConfig> GenerateExtensiveTestConfigs() {
+ // Lambda functions for the test config generation.
+ auto add_processing_apis = [](TestConfig test_config) {
+ struct AllowedApiCallCombinations {
+ RenderApiImpl render_api;
+ CaptureApiImpl capture_api;
+ };
+
+ const AllowedApiCallCombinations api_calls[] = {
+ {RenderApiImpl::ProcessReverseStreamImpl1,
+ CaptureApiImpl::ProcessStreamImpl1},
+ {RenderApiImpl::AnalyzeReverseStreamImpl1,
+ CaptureApiImpl::ProcessStreamImpl1},
+ {RenderApiImpl::ProcessReverseStreamImpl2,
+ CaptureApiImpl::ProcessStreamImpl2},
+ {RenderApiImpl::ProcessReverseStreamImpl2,
+ CaptureApiImpl::ProcessStreamImpl3},
+ {RenderApiImpl::AnalyzeReverseStreamImpl2,
+ CaptureApiImpl::ProcessStreamImpl2},
+ {RenderApiImpl::AnalyzeReverseStreamImpl2,
+ CaptureApiImpl::ProcessStreamImpl3}};
+ std::vector<TestConfig> out;
+ for (auto api_call : api_calls) {
+ test_config.render_api_function = api_call.render_api;
+ test_config.capture_api_function = api_call.capture_api;
+ out.push_back(test_config);
+ }
+ return out;
+ };
+
+ auto add_aec_settings = [](const std::vector<TestConfig>& in) {
+ std::vector<TestConfig> out;
+ AecType aec_types[] = {
+ AecType::BasicWebRtcAecSettings, AecType::AecTurnedOff,
+ AecType::BasicWebRtcAecSettingsWithExtentedFilter,
+ AecType::BasicWebRtcAecSettingsWithDelayAgnosticAec,
+ AecType::BasicWebRtcAecSettingsWithAecMobile};
+ for (auto test_config : in) {
+ for (auto aec_type : aec_types) {
+ test_config.aec_type = aec_type;
+ out.push_back(test_config);
+ }
+ }
+ return out;
+ };
+
+ auto add_settings_scheme = [](const std::vector<TestConfig>& in) {
+ std::vector<TestConfig> out;
+ RuntimeParameterSettingScheme schemes[] = {
+ RuntimeParameterSettingScheme::SparseStreamMetadataChangeScheme,
+ RuntimeParameterSettingScheme::ExtremeStreamMetadataChangeScheme,
+ RuntimeParameterSettingScheme::FixedMonoStreamMetadataScheme,
+ RuntimeParameterSettingScheme::FixedStereoStreamMetadataScheme};
+
+ for (auto test_config : in) {
+ for (auto scheme : schemes) {
+ test_config.runtime_parameter_setting_scheme = scheme;
+ out.push_back(test_config);
+ }
+ }
+ return out;
+ };
+
+ auto add_sample_rates = [](const std::vector<TestConfig>& in) {
+ const int sample_rates[] = {8000, 16000, 32000, 48000};
+
+ std::vector<TestConfig> out;
+ for (auto test_config : in) {
+ auto available_rates =
+ (test_config.aec_type ==
+ AecType::BasicWebRtcAecSettingsWithAecMobile
+ ? rtc::ArrayView<const int>(sample_rates, 2)
+ : rtc::ArrayView<const int>(sample_rates));
+
+ for (auto rate : available_rates) {
+ test_config.initial_sample_rate_hz = rate;
+ out.push_back(test_config);
+ }
+ }
+ return out;
+ };
+
+ // Generate test configurations of the relevant combinations of the
+ // parameters to
+ // test.
+ TestConfig test_config;
+ test_config.min_number_of_calls = 10000;
+ return add_sample_rates(add_settings_scheme(
+ add_aec_settings(add_processing_apis(test_config))));
+ }
+
+ RenderApiImpl render_api_function = RenderApiImpl::ProcessReverseStreamImpl2;
+ CaptureApiImpl capture_api_function = CaptureApiImpl::ProcessStreamImpl2;
+ RuntimeParameterSettingScheme runtime_parameter_setting_scheme =
+ RuntimeParameterSettingScheme::ExtremeStreamMetadataChangeScheme;
+ int initial_sample_rate_hz = 16000;
+ AecType aec_type = AecType::BasicWebRtcAecSettingsWithDelayAgnosticAec;
+ int min_number_of_calls = 300;
+};
+
+// Handler for the frame counters.
+class FrameCounters {
+ public:
+ void IncreaseRenderCounter() {
+ rtc::CritScope cs(&crit_);
+ render_count++;
+ }
+
+ void IncreaseCaptureCounter() {
+ rtc::CritScope cs(&crit_);
+ capture_count++;
+ }
+
+ int GetCaptureCounter() const {
+ rtc::CritScope cs(&crit_);
+ return capture_count;
+ }
+
+ int GetRenderCounter() const {
+ rtc::CritScope cs(&crit_);
+ return render_count;
+ }
+
+ int CaptureMinusRenderCounters() const {
+ rtc::CritScope cs(&crit_);
+ return capture_count - render_count;
+ }
+
+ int RenderMinusCaptureCounters() const {
+ return -CaptureMinusRenderCounters();
+ }
+
+ bool BothCountersExceedeThreshold(int threshold) {
+ rtc::CritScope cs(&crit_);
+ return (render_count > threshold && capture_count > threshold);
+ }
+
+ private:
+ mutable rtc::CriticalSection crit_;
+ int render_count GUARDED_BY(crit_) = 0;
+ int capture_count GUARDED_BY(crit_) = 0;
+};
+
+// Class for handling the capture side processing.
+class CaptureProcessor {
+ public:
+ CaptureProcessor(int max_frame_size,
+ RandomGenerator* rand_gen,
+ rtc::Event* render_call_event,
+ rtc::Event* capture_call_event,
+ FrameCounters* shared_counters_state,
+ AudioProcessingImplLockTest* test_framework,
+ TestConfig* test_config,
+ AudioProcessing* apm);
+ bool Process();
+
+ private:
+ static const int kMaxCallDifference = 10;
+ static const float kCaptureInputFloatLevel;
+ static const int kCaptureInputFixLevel = 1024;
+
+ void PrepareFrame();
+ void CallApmCaptureSide();
+ void ApplyRuntimeSettingScheme();
+
+ RandomGenerator* const rand_gen_ = nullptr;
+ rtc::Event* const render_call_event_ = nullptr;
+ rtc::Event* const capture_call_event_ = nullptr;
+ FrameCounters* const frame_counters_ = nullptr;
+ AudioProcessingImplLockTest* const test_ = nullptr;
+ const TestConfig* const test_config_ = nullptr;
+ AudioProcessing* const apm_ = nullptr;
+ AudioFrameData frame_data_;
+};
+
+// Class for handling the stats processing.
+class StatsProcessor {
+ public:
+ StatsProcessor(RandomGenerator* rand_gen,
+ TestConfig* test_config,
+ AudioProcessing* apm);
+ bool Process();
+
+ private:
+ RandomGenerator* rand_gen_ = nullptr;
+ TestConfig* test_config_ = nullptr;
+ AudioProcessing* apm_ = nullptr;
+};
+
+// Class for handling the render side processing.
+class RenderProcessor {
+ public:
+ RenderProcessor(int max_frame_size,
+ RandomGenerator* rand_gen,
+ rtc::Event* render_call_event,
+ rtc::Event* capture_call_event,
+ FrameCounters* shared_counters_state,
+ AudioProcessingImplLockTest* test_framework,
+ TestConfig* test_config,
+ AudioProcessing* apm);
+ bool Process();
+
+ private:
+ static const int kMaxCallDifference = 10;
+ static const int kRenderInputFixLevel = 16384;
+ static const float kRenderInputFloatLevel;
+
+ void PrepareFrame();
+ void CallApmRenderSide();
+ void ApplyRuntimeSettingScheme();
+
+ RandomGenerator* const rand_gen_ = nullptr;
+ rtc::Event* const render_call_event_ = nullptr;
+ rtc::Event* const capture_call_event_ = nullptr;
+ FrameCounters* const frame_counters_ = nullptr;
+ AudioProcessingImplLockTest* const test_ = nullptr;
+ const TestConfig* const test_config_ = nullptr;
+ AudioProcessing* const apm_ = nullptr;
+ AudioFrameData frame_data_;
+ bool first_render_call_ = true;
+};
+
+class AudioProcessingImplLockTest
+ : public ::testing::TestWithParam<TestConfig> {
+ public:
+ AudioProcessingImplLockTest();
+ bool RunTest();
+ bool MaybeEndTest();
+
+ private:
+ static const int kTestTimeOutLimit = 10 * 60 * 1000;
+ static const int kMaxFrameSize = 480;
+
+ // ::testing::TestWithParam<> implementation
+ void SetUp() override;
+ void TearDown() override;
+
+ // Thread callback for the render thread
+ static bool RenderProcessorThreadFunc(void* context) {
+ return reinterpret_cast<AudioProcessingImplLockTest*>(context)
+ ->render_thread_state_.Process();
+ }
+
+ // Thread callback for the capture thread
+ static bool CaptureProcessorThreadFunc(void* context) {
+ return reinterpret_cast<AudioProcessingImplLockTest*>(context)
+ ->capture_thread_state_.Process();
+ }
+
+ // Thread callback for the stats thread
+ static bool StatsProcessorThreadFunc(void* context) {
+ return reinterpret_cast<AudioProcessingImplLockTest*>(context)
+ ->stats_thread_state_.Process();
+ }
+
+ // Tests whether all the required render and capture side calls have been
+ // done.
+ bool TestDone() {
+ return frame_counters_.BothCountersExceedeThreshold(
+ test_config_.min_number_of_calls);
+ }
+
+ // Start the threads used in the test.
+ void StartThreads() {
+ render_thread_.Start();
+ render_thread_.SetPriority(rtc::kRealtimePriority);
+ capture_thread_.Start();
+ capture_thread_.SetPriority(rtc::kRealtimePriority);
+ stats_thread_.Start();
+ stats_thread_.SetPriority(rtc::kNormalPriority);
+ }
+
+ // Event handlers for the test.
+ rtc::Event test_complete_;
+ rtc::Event render_call_event_;
+ rtc::Event capture_call_event_;
+
+ // Thread related variables.
+ rtc::PlatformThread render_thread_;
+ rtc::PlatformThread capture_thread_;
+ rtc::PlatformThread stats_thread_;
+ mutable RandomGenerator rand_gen_;
+
+ rtc::scoped_ptr<AudioProcessing> apm_;
+ TestConfig test_config_;
+ FrameCounters frame_counters_;
+ RenderProcessor render_thread_state_;
+ CaptureProcessor capture_thread_state_;
+ StatsProcessor stats_thread_state_;
+};
+
+// Sleeps a random time between 0 and max_sleep milliseconds.
+void SleepRandomMs(int max_sleep, RandomGenerator* rand_gen) {
+ int sleeptime = rand_gen->RandInt(0, max_sleep);
+ SleepMs(sleeptime);
+}
+
+// Populates a float audio frame with random data.
+void PopulateAudioFrame(float** frame,
+ float amplitude,
+ size_t num_channels,
+ size_t samples_per_channel,
+ RandomGenerator* rand_gen) {
+ for (size_t ch = 0; ch < num_channels; ch++) {
+ for (size_t k = 0; k < samples_per_channel; k++) {
+ // Store random 16 bit quantized float number between +-amplitude.
+ frame[ch][k] = amplitude * (2 * rand_gen->RandFloat() - 1);
+ }
+ }
+}
+
+// Populates an audioframe frame of AudioFrame type with random data.
+void PopulateAudioFrame(AudioFrame* frame,
+ int16_t amplitude,
+ RandomGenerator* rand_gen) {
+ ASSERT_GT(amplitude, 0);
+ ASSERT_LE(amplitude, 32767);
+ for (size_t ch = 0; ch < frame->num_channels_; ch++) {
+ for (size_t k = 0; k < frame->samples_per_channel_; k++) {
+ // Store random 16 bit number between -(amplitude+1) and
+ // amplitude.
+ frame->data_[k * ch] =
+ rand_gen->RandInt(2 * amplitude + 1) - amplitude - 1;
+ }
+ }
+}
+
+AudioProcessingImplLockTest::AudioProcessingImplLockTest()
+ : test_complete_(false, false),
+ render_call_event_(false, false),
+ capture_call_event_(false, false),
+ render_thread_(RenderProcessorThreadFunc, this, "render"),
+ capture_thread_(CaptureProcessorThreadFunc, this, "capture"),
+ stats_thread_(StatsProcessorThreadFunc, this, "stats"),
+ apm_(AudioProcessingImpl::Create()),
+ render_thread_state_(kMaxFrameSize,
+ &rand_gen_,
+ &render_call_event_,
+ &capture_call_event_,
+ &frame_counters_,
+ this,
+ &test_config_,
+ apm_.get()),
+ capture_thread_state_(kMaxFrameSize,
+ &rand_gen_,
+ &render_call_event_,
+ &capture_call_event_,
+ &frame_counters_,
+ this,
+ &test_config_,
+ apm_.get()),
+ stats_thread_state_(&rand_gen_, &test_config_, apm_.get()) {}
+
+// Run the test with a timeout.
+bool AudioProcessingImplLockTest::RunTest() {
+ StartThreads();
+ return test_complete_.Wait(kTestTimeOutLimit);
+}
+
+bool AudioProcessingImplLockTest::MaybeEndTest() {
+ if (HasFatalFailure() || TestDone()) {
+ test_complete_.Set();
+ return true;
+ }
+ return false;
+}
+
+// Setup of test and APM.
+void AudioProcessingImplLockTest::SetUp() {
+ test_config_ = static_cast<TestConfig>(GetParam());
+
+ ASSERT_EQ(apm_->kNoError, apm_->level_estimator()->Enable(true));
+ ASSERT_EQ(apm_->kNoError, apm_->gain_control()->Enable(true));
+
+ ASSERT_EQ(apm_->kNoError,
+ apm_->gain_control()->set_mode(GainControl::kAdaptiveDigital));
+ ASSERT_EQ(apm_->kNoError, apm_->gain_control()->Enable(true));
+
+ ASSERT_EQ(apm_->kNoError, apm_->noise_suppression()->Enable(true));
+ ASSERT_EQ(apm_->kNoError, apm_->voice_detection()->Enable(true));
+
+ Config config;
+ if (test_config_.aec_type == AecType::AecTurnedOff) {
+ ASSERT_EQ(apm_->kNoError, apm_->echo_control_mobile()->Enable(false));
+ ASSERT_EQ(apm_->kNoError, apm_->echo_cancellation()->Enable(false));
+ } else if (test_config_.aec_type ==
+ AecType::BasicWebRtcAecSettingsWithAecMobile) {
+ ASSERT_EQ(apm_->kNoError, apm_->echo_control_mobile()->Enable(true));
+ ASSERT_EQ(apm_->kNoError, apm_->echo_cancellation()->Enable(false));
+ } else {
+ ASSERT_EQ(apm_->kNoError, apm_->echo_control_mobile()->Enable(false));
+ ASSERT_EQ(apm_->kNoError, apm_->echo_cancellation()->Enable(true));
+ ASSERT_EQ(apm_->kNoError, apm_->echo_cancellation()->enable_metrics(true));
+ ASSERT_EQ(apm_->kNoError,
+ apm_->echo_cancellation()->enable_delay_logging(true));
+
+ config.Set<ExtendedFilter>(
+ new ExtendedFilter(test_config_.aec_type ==
+ AecType::BasicWebRtcAecSettingsWithExtentedFilter));
+
+ config.Set<DelayAgnostic>(
+ new DelayAgnostic(test_config_.aec_type ==
+ AecType::BasicWebRtcAecSettingsWithDelayAgnosticAec));
+
+ apm_->SetExtraOptions(config);
+ }
+}
+
+void AudioProcessingImplLockTest::TearDown() {
+ render_call_event_.Set();
+ capture_call_event_.Set();
+ render_thread_.Stop();
+ capture_thread_.Stop();
+ stats_thread_.Stop();
+}
+
+StatsProcessor::StatsProcessor(RandomGenerator* rand_gen,
+ TestConfig* test_config,
+ AudioProcessing* apm)
+ : rand_gen_(rand_gen), test_config_(test_config), apm_(apm) {}
+
+// Implements the callback functionality for the statistics
+// collection thread.
+bool StatsProcessor::Process() {
+ SleepRandomMs(100, rand_gen_);
+
+ EXPECT_EQ(apm_->echo_cancellation()->is_enabled(),
+ ((test_config_->aec_type != AecType::AecTurnedOff) &&
+ (test_config_->aec_type !=
+ AecType::BasicWebRtcAecSettingsWithAecMobile)));
+ apm_->echo_cancellation()->stream_drift_samples();
+ EXPECT_EQ(apm_->echo_control_mobile()->is_enabled(),
+ (test_config_->aec_type != AecType::AecTurnedOff) &&
+ (test_config_->aec_type ==
+ AecType::BasicWebRtcAecSettingsWithAecMobile));
+ EXPECT_TRUE(apm_->gain_control()->is_enabled());
+ apm_->gain_control()->stream_analog_level();
+ EXPECT_TRUE(apm_->noise_suppression()->is_enabled());
+
+ // The below return values are not testable.
+ apm_->noise_suppression()->speech_probability();
+ apm_->voice_detection()->is_enabled();
+
+ return true;
+}
+
+const float CaptureProcessor::kCaptureInputFloatLevel = 0.03125f;
+
+CaptureProcessor::CaptureProcessor(int max_frame_size,
+ RandomGenerator* rand_gen,
+ rtc::Event* render_call_event,
+ rtc::Event* capture_call_event,
+ FrameCounters* shared_counters_state,
+ AudioProcessingImplLockTest* test_framework,
+ TestConfig* test_config,
+ AudioProcessing* apm)
+ : rand_gen_(rand_gen),
+ render_call_event_(render_call_event),
+ capture_call_event_(capture_call_event),
+ frame_counters_(shared_counters_state),
+ test_(test_framework),
+ test_config_(test_config),
+ apm_(apm),
+ frame_data_(max_frame_size) {}
+
+// Implements the callback functionality for the capture thread.
+bool CaptureProcessor::Process() {
+ // Sleep a random time to simulate thread jitter.
+ SleepRandomMs(3, rand_gen_);
+
+ // Check whether the test is done.
+ if (test_->MaybeEndTest()) {
+ return false;
+ }
+
+ // Ensure that the number of render and capture calls do not
+ // differ too much.
+ if (frame_counters_->CaptureMinusRenderCounters() > kMaxCallDifference) {
+ render_call_event_->Wait(rtc::Event::kForever);
+ }
+
+ // Apply any specified capture side APM non-processing runtime calls.
+ ApplyRuntimeSettingScheme();
+
+ // Apply the capture side processing call.
+ CallApmCaptureSide();
+
+ // Increase the number of capture-side calls.
+ frame_counters_->IncreaseCaptureCounter();
+
+ // Flag to the render thread that another capture API call has occurred
+ // by triggering this threads call event.
+ capture_call_event_->Set();
+
+ return true;
+}
+
+// Prepares a frame with relevant audio data and metadata.
+void CaptureProcessor::PrepareFrame() {
+ // Restrict to a common fixed sample rate if the AudioFrame
+ // interface is used.
+ if (test_config_->capture_api_function ==
+ CaptureApiImpl::ProcessStreamImpl1) {
+ frame_data_.input_sample_rate_hz = test_config_->initial_sample_rate_hz;
+ frame_data_.output_sample_rate_hz = test_config_->initial_sample_rate_hz;
+ }
+
+ // Prepare the audioframe data and metadata.
+ frame_data_.input_samples_per_channel =
+ frame_data_.input_sample_rate_hz * AudioProcessing::kChunkSizeMs / 1000;
+ frame_data_.frame.sample_rate_hz_ = frame_data_.input_sample_rate_hz;
+ frame_data_.frame.num_channels_ = frame_data_.input_number_of_channels;
+ frame_data_.frame.samples_per_channel_ =
+ frame_data_.input_samples_per_channel;
+ PopulateAudioFrame(&frame_data_.frame, kCaptureInputFixLevel, rand_gen_);
+
+ // Prepare the float audio input data and metadata.
+ frame_data_.input_stream_config.set_sample_rate_hz(
+ frame_data_.input_sample_rate_hz);
+ frame_data_.input_stream_config.set_num_channels(
+ frame_data_.input_number_of_channels);
+ frame_data_.input_stream_config.set_has_keyboard(false);
+ PopulateAudioFrame(&frame_data_.input_frame[0], kCaptureInputFloatLevel,
+ frame_data_.input_number_of_channels,
+ frame_data_.input_samples_per_channel, rand_gen_);
+ frame_data_.input_channel_layout =
+ (frame_data_.input_number_of_channels == 1
+ ? AudioProcessing::ChannelLayout::kMono
+ : AudioProcessing::ChannelLayout::kStereo);
+
+ // Prepare the float audio output data and metadata.
+ frame_data_.output_samples_per_channel =
+ frame_data_.output_sample_rate_hz * AudioProcessing::kChunkSizeMs / 1000;
+ frame_data_.output_stream_config.set_sample_rate_hz(
+ frame_data_.output_sample_rate_hz);
+ frame_data_.output_stream_config.set_num_channels(
+ frame_data_.output_number_of_channels);
+ frame_data_.output_stream_config.set_has_keyboard(false);
+ frame_data_.output_channel_layout =
+ (frame_data_.output_number_of_channels == 1
+ ? AudioProcessing::ChannelLayout::kMono
+ : AudioProcessing::ChannelLayout::kStereo);
+}
+
+// Applies the capture side processing API call.
+void CaptureProcessor::CallApmCaptureSide() {
+ // Prepare a proper capture side processing API call input.
+ PrepareFrame();
+
+ // Set the stream delay
+ apm_->set_stream_delay_ms(30);
+
+ // Call the specified capture side API processing method.
+ int result = AudioProcessing::kNoError;
+ switch (test_config_->capture_api_function) {
+ case CaptureApiImpl::ProcessStreamImpl1:
+ result = apm_->ProcessStream(&frame_data_.frame);
+ break;
+ case CaptureApiImpl::ProcessStreamImpl2:
+ result = apm_->ProcessStream(
+ &frame_data_.input_frame[0], frame_data_.input_samples_per_channel,
+ frame_data_.input_sample_rate_hz, frame_data_.input_channel_layout,
+ frame_data_.output_sample_rate_hz, frame_data_.output_channel_layout,
+ &frame_data_.output_frame[0]);
+ break;
+ case CaptureApiImpl::ProcessStreamImpl3:
+ result = apm_->ProcessStream(
+ &frame_data_.input_frame[0], frame_data_.input_stream_config,
+ frame_data_.output_stream_config, &frame_data_.output_frame[0]);
+ break;
+ default:
+ FAIL();
+ }
+
+ // Check the return code for error.
+ ASSERT_EQ(AudioProcessing::kNoError, result);
+}
+
+// Applies any runtime capture APM API calls and audio stream characteristics
+// specified by the scheme for the test.
+void CaptureProcessor::ApplyRuntimeSettingScheme() {
+ const int capture_count_local = frame_counters_->GetCaptureCounter();
+
+ // Update the number of channels and sample rates for the input and output.
+ // Note that the counts frequencies for when to set parameters
+ // are set using prime numbers in order to ensure that the
+ // permutation scheme in the parameter setting changes.
+ switch (test_config_->runtime_parameter_setting_scheme) {
+ case RuntimeParameterSettingScheme::SparseStreamMetadataChangeScheme:
+ if (capture_count_local == 0)
+ frame_data_.input_sample_rate_hz = 16000;
+ else if (capture_count_local % 11 == 0)
+ frame_data_.input_sample_rate_hz = 32000;
+ else if (capture_count_local % 73 == 0)
+ frame_data_.input_sample_rate_hz = 48000;
+ else if (capture_count_local % 89 == 0)
+ frame_data_.input_sample_rate_hz = 16000;
+ else if (capture_count_local % 97 == 0)
+ frame_data_.input_sample_rate_hz = 8000;
+
+ if (capture_count_local == 0)
+ frame_data_.input_number_of_channels = 1;
+ else if (capture_count_local % 4 == 0)
+ frame_data_.input_number_of_channels =
+ (frame_data_.input_number_of_channels == 1 ? 2 : 1);
+
+ if (capture_count_local == 0)
+ frame_data_.output_sample_rate_hz = 16000;
+ else if (capture_count_local % 5 == 0)
+ frame_data_.output_sample_rate_hz = 32000;
+ else if (capture_count_local % 47 == 0)
+ frame_data_.output_sample_rate_hz = 48000;
+ else if (capture_count_local % 53 == 0)
+ frame_data_.output_sample_rate_hz = 16000;
+ else if (capture_count_local % 71 == 0)
+ frame_data_.output_sample_rate_hz = 8000;
+
+ if (capture_count_local == 0)
+ frame_data_.output_number_of_channels = 1;
+ else if (capture_count_local % 8 == 0)
+ frame_data_.output_number_of_channels =
+ (frame_data_.output_number_of_channels == 1 ? 2 : 1);
+ break;
+ case RuntimeParameterSettingScheme::ExtremeStreamMetadataChangeScheme:
+ if (capture_count_local % 2 == 0) {
+ frame_data_.input_number_of_channels = 1;
+ frame_data_.input_sample_rate_hz = 16000;
+ frame_data_.output_number_of_channels = 1;
+ frame_data_.output_sample_rate_hz = 16000;
+ } else {
+ frame_data_.input_number_of_channels =
+ (frame_data_.input_number_of_channels == 1 ? 2 : 1);
+ if (frame_data_.input_sample_rate_hz == 8000)
+ frame_data_.input_sample_rate_hz = 16000;
+ else if (frame_data_.input_sample_rate_hz == 16000)
+ frame_data_.input_sample_rate_hz = 32000;
+ else if (frame_data_.input_sample_rate_hz == 32000)
+ frame_data_.input_sample_rate_hz = 48000;
+ else if (frame_data_.input_sample_rate_hz == 48000)
+ frame_data_.input_sample_rate_hz = 8000;
+
+ frame_data_.output_number_of_channels =
+ (frame_data_.output_number_of_channels == 1 ? 2 : 1);
+ if (frame_data_.output_sample_rate_hz == 8000)
+ frame_data_.output_sample_rate_hz = 16000;
+ else if (frame_data_.output_sample_rate_hz == 16000)
+ frame_data_.output_sample_rate_hz = 32000;
+ else if (frame_data_.output_sample_rate_hz == 32000)
+ frame_data_.output_sample_rate_hz = 48000;
+ else if (frame_data_.output_sample_rate_hz == 48000)
+ frame_data_.output_sample_rate_hz = 8000;
+ }
+ break;
+ case RuntimeParameterSettingScheme::FixedMonoStreamMetadataScheme:
+ if (capture_count_local == 0) {
+ frame_data_.input_sample_rate_hz = 16000;
+ frame_data_.input_number_of_channels = 1;
+ frame_data_.output_sample_rate_hz = 16000;
+ frame_data_.output_number_of_channels = 1;
+ }
+ break;
+ case RuntimeParameterSettingScheme::FixedStereoStreamMetadataScheme:
+ if (capture_count_local == 0) {
+ frame_data_.input_sample_rate_hz = 16000;
+ frame_data_.input_number_of_channels = 2;
+ frame_data_.output_sample_rate_hz = 16000;
+ frame_data_.output_number_of_channels = 2;
+ }
+ break;
+ default:
+ FAIL();
+ }
+
+ // Call any specified runtime APM setter and
+ // getter calls.
+ switch (test_config_->runtime_parameter_setting_scheme) {
+ case RuntimeParameterSettingScheme::SparseStreamMetadataChangeScheme:
+ case RuntimeParameterSettingScheme::FixedMonoStreamMetadataScheme:
+ break;
+ case RuntimeParameterSettingScheme::ExtremeStreamMetadataChangeScheme:
+ case RuntimeParameterSettingScheme::FixedStereoStreamMetadataScheme:
+ if (capture_count_local % 2 == 0) {
+ ASSERT_EQ(AudioProcessing::Error::kNoError,
+ apm_->set_stream_delay_ms(30));
+ apm_->set_stream_key_pressed(true);
+ apm_->set_delay_offset_ms(15);
+ EXPECT_EQ(apm_->delay_offset_ms(), 15);
+ } else {
+ ASSERT_EQ(AudioProcessing::Error::kNoError,
+ apm_->set_stream_delay_ms(50));
+ apm_->set_stream_key_pressed(false);
+ apm_->set_delay_offset_ms(20);
+ EXPECT_EQ(apm_->delay_offset_ms(), 20);
+ apm_->delay_offset_ms();
+ }
+ break;
+ default:
+ FAIL();
+ }
+
+ // Restric the number of output channels not to exceed
+ // the number of input channels.
+ frame_data_.output_number_of_channels =
+ std::min(frame_data_.output_number_of_channels,
+ frame_data_.input_number_of_channels);
+}
+
+const float RenderProcessor::kRenderInputFloatLevel = 0.5f;
+
+RenderProcessor::RenderProcessor(int max_frame_size,
+ RandomGenerator* rand_gen,
+ rtc::Event* render_call_event,
+ rtc::Event* capture_call_event,
+ FrameCounters* shared_counters_state,
+ AudioProcessingImplLockTest* test_framework,
+ TestConfig* test_config,
+ AudioProcessing* apm)
+ : rand_gen_(rand_gen),
+ render_call_event_(render_call_event),
+ capture_call_event_(capture_call_event),
+ frame_counters_(shared_counters_state),
+ test_(test_framework),
+ test_config_(test_config),
+ apm_(apm),
+ frame_data_(max_frame_size) {}
+
+// Implements the callback functionality for the render thread.
+bool RenderProcessor::Process() {
+ // Conditional wait to ensure that a capture call has been done
+ // before the first render call is performed (implicitly
+ // required by the APM API).
+ if (first_render_call_) {
+ capture_call_event_->Wait(rtc::Event::kForever);
+ first_render_call_ = false;
+ }
+
+ // Sleep a random time to simulate thread jitter.
+ SleepRandomMs(3, rand_gen_);
+
+ // Check whether the test is done.
+ if (test_->MaybeEndTest()) {
+ return false;
+ }
+
+ // Ensure that the number of render and capture calls do not
+ // differ too much.
+ if (frame_counters_->RenderMinusCaptureCounters() > kMaxCallDifference) {
+ capture_call_event_->Wait(rtc::Event::kForever);
+ }
+
+ // Apply any specified render side APM non-processing runtime calls.
+ ApplyRuntimeSettingScheme();
+
+ // Apply the render side processing call.
+ CallApmRenderSide();
+
+ // Increase the number of render-side calls.
+ frame_counters_->IncreaseRenderCounter();
+
+ // Flag to the capture thread that another render API call has occurred
+ // by triggering this threads call event.
+ render_call_event_->Set();
+ return true;
+}
+
+// Prepares the render side frame and the accompanying metadata
+// with the appropriate information.
+void RenderProcessor::PrepareFrame() {
+ // Restrict to a common fixed sample rate if the AudioFrame interface is
+ // used.
+ if ((test_config_->render_api_function ==
+ RenderApiImpl::AnalyzeReverseStreamImpl1) ||
+ (test_config_->render_api_function ==
+ RenderApiImpl::ProcessReverseStreamImpl1) ||
+ (test_config_->aec_type !=
+ AecType::BasicWebRtcAecSettingsWithAecMobile)) {
+ frame_data_.input_sample_rate_hz = test_config_->initial_sample_rate_hz;
+ frame_data_.output_sample_rate_hz = test_config_->initial_sample_rate_hz;
+ }
+
+ // Prepare the audioframe data and metadata
+ frame_data_.input_samples_per_channel =
+ frame_data_.input_sample_rate_hz * AudioProcessing::kChunkSizeMs / 1000;
+ frame_data_.frame.sample_rate_hz_ = frame_data_.input_sample_rate_hz;
+ frame_data_.frame.num_channels_ = frame_data_.input_number_of_channels;
+ frame_data_.frame.samples_per_channel_ =
+ frame_data_.input_samples_per_channel;
+ PopulateAudioFrame(&frame_data_.frame, kRenderInputFixLevel, rand_gen_);
+
+ // Prepare the float audio input data and metadata.
+ frame_data_.input_stream_config.set_sample_rate_hz(
+ frame_data_.input_sample_rate_hz);
+ frame_data_.input_stream_config.set_num_channels(
+ frame_data_.input_number_of_channels);
+ frame_data_.input_stream_config.set_has_keyboard(false);
+ PopulateAudioFrame(&frame_data_.input_frame[0], kRenderInputFloatLevel,
+ frame_data_.input_number_of_channels,
+ frame_data_.input_samples_per_channel, rand_gen_);
+ frame_data_.input_channel_layout =
+ (frame_data_.input_number_of_channels == 1
+ ? AudioProcessing::ChannelLayout::kMono
+ : AudioProcessing::ChannelLayout::kStereo);
+
+ // Prepare the float audio output data and metadata.
+ frame_data_.output_samples_per_channel =
+ frame_data_.output_sample_rate_hz * AudioProcessing::kChunkSizeMs / 1000;
+ frame_data_.output_stream_config.set_sample_rate_hz(
+ frame_data_.output_sample_rate_hz);
+ frame_data_.output_stream_config.set_num_channels(
+ frame_data_.output_number_of_channels);
+ frame_data_.output_stream_config.set_has_keyboard(false);
+ frame_data_.output_channel_layout =
+ (frame_data_.output_number_of_channels == 1
+ ? AudioProcessing::ChannelLayout::kMono
+ : AudioProcessing::ChannelLayout::kStereo);
+}
+
+// Makes the render side processing API call.
+void RenderProcessor::CallApmRenderSide() {
+ // Prepare a proper render side processing API call input.
+ PrepareFrame();
+
+ // Call the specified render side API processing method.
+ int result = AudioProcessing::kNoError;
+ switch (test_config_->render_api_function) {
+ case RenderApiImpl::ProcessReverseStreamImpl1:
+ result = apm_->ProcessReverseStream(&frame_data_.frame);
+ break;
+ case RenderApiImpl::ProcessReverseStreamImpl2:
+ result = apm_->ProcessReverseStream(
+ &frame_data_.input_frame[0], frame_data_.input_stream_config,
+ frame_data_.output_stream_config, &frame_data_.output_frame[0]);
+ break;
+ case RenderApiImpl::AnalyzeReverseStreamImpl1:
+ result = apm_->AnalyzeReverseStream(&frame_data_.frame);
+ break;
+ case RenderApiImpl::AnalyzeReverseStreamImpl2:
+ result = apm_->AnalyzeReverseStream(
+ &frame_data_.input_frame[0], frame_data_.input_samples_per_channel,
+ frame_data_.input_sample_rate_hz, frame_data_.input_channel_layout);
+ break;
+ default:
+ FAIL();
+ }
+
+ // Check the return code for error.
+ ASSERT_EQ(AudioProcessing::kNoError, result);
+}
+
+// Applies any render capture side APM API calls and audio stream
+// characteristics
+// specified by the scheme for the test.
+void RenderProcessor::ApplyRuntimeSettingScheme() {
+ const int render_count_local = frame_counters_->GetRenderCounter();
+
+ // Update the number of channels and sample rates for the input and output.
+ // Note that the counts frequencies for when to set parameters
+ // are set using prime numbers in order to ensure that the
+ // permutation scheme in the parameter setting changes.
+ switch (test_config_->runtime_parameter_setting_scheme) {
+ case RuntimeParameterSettingScheme::SparseStreamMetadataChangeScheme:
+ if (render_count_local == 0)
+ frame_data_.input_sample_rate_hz = 16000;
+ else if (render_count_local % 47 == 0)
+ frame_data_.input_sample_rate_hz = 32000;
+ else if (render_count_local % 71 == 0)
+ frame_data_.input_sample_rate_hz = 48000;
+ else if (render_count_local % 79 == 0)
+ frame_data_.input_sample_rate_hz = 16000;
+ else if (render_count_local % 83 == 0)
+ frame_data_.input_sample_rate_hz = 8000;
+
+ if (render_count_local == 0)
+ frame_data_.input_number_of_channels = 1;
+ else if (render_count_local % 4 == 0)
+ frame_data_.input_number_of_channels =
+ (frame_data_.input_number_of_channels == 1 ? 2 : 1);
+
+ if (render_count_local == 0)
+ frame_data_.output_sample_rate_hz = 16000;
+ else if (render_count_local % 17 == 0)
+ frame_data_.output_sample_rate_hz = 32000;
+ else if (render_count_local % 19 == 0)
+ frame_data_.output_sample_rate_hz = 48000;
+ else if (render_count_local % 29 == 0)
+ frame_data_.output_sample_rate_hz = 16000;
+ else if (render_count_local % 61 == 0)
+ frame_data_.output_sample_rate_hz = 8000;
+
+ if (render_count_local == 0)
+ frame_data_.output_number_of_channels = 1;
+ else if (render_count_local % 8 == 0)
+ frame_data_.output_number_of_channels =
+ (frame_data_.output_number_of_channels == 1 ? 2 : 1);
+ break;
+ case RuntimeParameterSettingScheme::ExtremeStreamMetadataChangeScheme:
+ if (render_count_local == 0) {
+ frame_data_.input_number_of_channels = 1;
+ frame_data_.input_sample_rate_hz = 16000;
+ frame_data_.output_number_of_channels = 1;
+ frame_data_.output_sample_rate_hz = 16000;
+ } else {
+ frame_data_.input_number_of_channels =
+ (frame_data_.input_number_of_channels == 1 ? 2 : 1);
+ if (frame_data_.input_sample_rate_hz == 8000)
+ frame_data_.input_sample_rate_hz = 16000;
+ else if (frame_data_.input_sample_rate_hz == 16000)
+ frame_data_.input_sample_rate_hz = 32000;
+ else if (frame_data_.input_sample_rate_hz == 32000)
+ frame_data_.input_sample_rate_hz = 48000;
+ else if (frame_data_.input_sample_rate_hz == 48000)
+ frame_data_.input_sample_rate_hz = 8000;
+
+ frame_data_.output_number_of_channels =
+ (frame_data_.output_number_of_channels == 1 ? 2 : 1);
+ if (frame_data_.output_sample_rate_hz == 8000)
+ frame_data_.output_sample_rate_hz = 16000;
+ else if (frame_data_.output_sample_rate_hz == 16000)
+ frame_data_.output_sample_rate_hz = 32000;
+ else if (frame_data_.output_sample_rate_hz == 32000)
+ frame_data_.output_sample_rate_hz = 48000;
+ else if (frame_data_.output_sample_rate_hz == 48000)
+ frame_data_.output_sample_rate_hz = 8000;
+ }
+ break;
+ case RuntimeParameterSettingScheme::FixedMonoStreamMetadataScheme:
+ if (render_count_local == 0) {
+ frame_data_.input_sample_rate_hz = 16000;
+ frame_data_.input_number_of_channels = 1;
+ frame_data_.output_sample_rate_hz = 16000;
+ frame_data_.output_number_of_channels = 1;
+ }
+ break;
+ case RuntimeParameterSettingScheme::FixedStereoStreamMetadataScheme:
+ if (render_count_local == 0) {
+ frame_data_.input_sample_rate_hz = 16000;
+ frame_data_.input_number_of_channels = 2;
+ frame_data_.output_sample_rate_hz = 16000;
+ frame_data_.output_number_of_channels = 2;
+ }
+ break;
+ default:
+ FAIL();
+ }
+
+ // Restric the number of output channels not to exceed
+ // the number of input channels.
+ frame_data_.output_number_of_channels =
+ std::min(frame_data_.output_number_of_channels,
+ frame_data_.input_number_of_channels);
+}
+
+} // anonymous namespace
+
+TEST_P(AudioProcessingImplLockTest, LockTest) {
+ // Run test and verify that it did not time out.
+ ASSERT_TRUE(RunTest());
+}
+
+// Instantiate tests from the extreme test configuration set.
+INSTANTIATE_TEST_CASE_P(
+ DISABLED_AudioProcessingImplLockExtensive,
+ AudioProcessingImplLockTest,
+ ::testing::ValuesIn(TestConfig::GenerateExtensiveTestConfigs()));
+
+INSTANTIATE_TEST_CASE_P(
+ AudioProcessingImplLockBrief,
+ AudioProcessingImplLockTest,
+ ::testing::ValuesIn(TestConfig::GenerateBriefTestConfigs()));
+
+} // namespace webrtc
diff --git a/webrtc/modules/audio_processing/audio_processing_impl_unittest.cc b/webrtc/modules/audio_processing/audio_processing_impl_unittest.cc
index f4c36d0009..ed20daaa61 100644
--- a/webrtc/modules/audio_processing/audio_processing_impl_unittest.cc
+++ b/webrtc/modules/audio_processing/audio_processing_impl_unittest.cc
@@ -14,7 +14,7 @@
#include "testing/gtest/include/gtest/gtest.h"
#include "webrtc/config.h"
#include "webrtc/modules/audio_processing/test/test_utils.h"
-#include "webrtc/modules/interface/module_common_types.h"
+#include "webrtc/modules/include/module_common_types.h"
using ::testing::Invoke;
using ::testing::Return;
diff --git a/webrtc/modules/audio_processing/audio_processing_performance_unittest.cc b/webrtc/modules/audio_processing/audio_processing_performance_unittest.cc
new file mode 100644
index 0000000000..0c8c060ea3
--- /dev/null
+++ b/webrtc/modules/audio_processing/audio_processing_performance_unittest.cc
@@ -0,0 +1,724 @@
+/*
+ * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#include "webrtc/modules/audio_processing/audio_processing_impl.h"
+
+#include <math.h>
+
+#include <algorithm>
+#include <vector>
+
+#include "testing/gtest/include/gtest/gtest.h"
+#include "webrtc/base/array_view.h"
+#include "webrtc/base/criticalsection.h"
+#include "webrtc/base/platform_thread.h"
+#include "webrtc/base/random.h"
+#include "webrtc/base/safe_conversions.h"
+#include "webrtc/config.h"
+#include "webrtc/modules/audio_processing/test/test_utils.h"
+#include "webrtc/modules/include/module_common_types.h"
+#include "webrtc/system_wrappers/include/clock.h"
+#include "webrtc/system_wrappers/include/event_wrapper.h"
+#include "webrtc/system_wrappers/include/sleep.h"
+#include "webrtc/test/testsupport/perf_test.h"
+
+namespace webrtc {
+
+namespace {
+
+static const bool kPrintAllDurations = false;
+
+class CallSimulator;
+
+// Type of the render thread APM API call to use in the test.
+enum class ProcessorType { kRender, kCapture };
+
+// Variant of APM processing settings to use in the test.
+enum class SettingsType {
+ kDefaultApmDesktop,
+ kDefaultApmMobile,
+ kDefaultApmDesktopAndBeamformer,
+ kDefaultApmDesktopAndIntelligibilityEnhancer,
+ kAllSubmodulesTurnedOff,
+ kDefaultDesktopApmWithoutDelayAgnostic,
+ kDefaultDesktopApmWithoutExtendedFilter
+};
+
+// Variables related to the audio data and formats.
+struct AudioFrameData {
+ explicit AudioFrameData(size_t max_frame_size) {
+ // Set up the two-dimensional arrays needed for the APM API calls.
+ input_framechannels.resize(2 * max_frame_size);
+ input_frame.resize(2);
+ input_frame[0] = &input_framechannels[0];
+ input_frame[1] = &input_framechannels[max_frame_size];
+
+ output_frame_channels.resize(2 * max_frame_size);
+ output_frame.resize(2);
+ output_frame[0] = &output_frame_channels[0];
+ output_frame[1] = &output_frame_channels[max_frame_size];
+ }
+
+ std::vector<float> output_frame_channels;
+ std::vector<float*> output_frame;
+ std::vector<float> input_framechannels;
+ std::vector<float*> input_frame;
+ StreamConfig input_stream_config;
+ StreamConfig output_stream_config;
+};
+
+// The configuration for the test.
+struct SimulationConfig {
+ SimulationConfig(int sample_rate_hz, SettingsType simulation_settings)
+ : sample_rate_hz(sample_rate_hz),
+ simulation_settings(simulation_settings) {}
+
+ static std::vector<SimulationConfig> GenerateSimulationConfigs() {
+ std::vector<SimulationConfig> simulation_configs;
+#ifndef WEBRTC_ANDROID
+ const SettingsType desktop_settings[] = {
+ SettingsType::kDefaultApmDesktop, SettingsType::kAllSubmodulesTurnedOff,
+ SettingsType::kDefaultDesktopApmWithoutDelayAgnostic,
+ SettingsType::kDefaultDesktopApmWithoutExtendedFilter};
+
+ const int desktop_sample_rates[] = {8000, 16000, 32000, 48000};
+
+ for (auto sample_rate : desktop_sample_rates) {
+ for (auto settings : desktop_settings) {
+ simulation_configs.push_back(SimulationConfig(sample_rate, settings));
+ }
+ }
+
+ const SettingsType intelligibility_enhancer_settings[] = {
+ SettingsType::kDefaultApmDesktopAndIntelligibilityEnhancer};
+
+ const int intelligibility_enhancer_sample_rates[] = {8000, 16000, 32000,
+ 48000};
+
+ for (auto sample_rate : intelligibility_enhancer_sample_rates) {
+ for (auto settings : intelligibility_enhancer_settings) {
+ simulation_configs.push_back(SimulationConfig(sample_rate, settings));
+ }
+ }
+
+ const SettingsType beamformer_settings[] = {
+ SettingsType::kDefaultApmDesktopAndBeamformer};
+
+ const int beamformer_sample_rates[] = {8000, 16000, 32000, 48000};
+
+ for (auto sample_rate : beamformer_sample_rates) {
+ for (auto settings : beamformer_settings) {
+ simulation_configs.push_back(SimulationConfig(sample_rate, settings));
+ }
+ }
+#endif
+
+ const SettingsType mobile_settings[] = {SettingsType::kDefaultApmMobile};
+
+ const int mobile_sample_rates[] = {8000, 16000};
+
+ for (auto sample_rate : mobile_sample_rates) {
+ for (auto settings : mobile_settings) {
+ simulation_configs.push_back(SimulationConfig(sample_rate, settings));
+ }
+ }
+
+ return simulation_configs;
+ }
+
+ std::string SettingsDescription() const {
+ std::string description;
+ switch (simulation_settings) {
+ case SettingsType::kDefaultApmMobile:
+ description = "DefaultApmMobile";
+ break;
+ case SettingsType::kDefaultApmDesktop:
+ description = "DefaultApmDesktop";
+ break;
+ case SettingsType::kDefaultApmDesktopAndBeamformer:
+ description = "DefaultApmDesktopAndBeamformer";
+ break;
+ case SettingsType::kDefaultApmDesktopAndIntelligibilityEnhancer:
+ description = "DefaultApmDesktopAndIntelligibilityEnhancer";
+ break;
+ case SettingsType::kAllSubmodulesTurnedOff:
+ description = "AllSubmodulesOff";
+ break;
+ case SettingsType::kDefaultDesktopApmWithoutDelayAgnostic:
+ description = "DefaultDesktopApmWithoutDelayAgnostic";
+ break;
+ case SettingsType::kDefaultDesktopApmWithoutExtendedFilter:
+ description = "DefaultDesktopApmWithoutExtendedFilter";
+ break;
+ }
+ return description;
+ }
+
+ int sample_rate_hz = 16000;
+ SettingsType simulation_settings = SettingsType::kDefaultApmDesktop;
+};
+
+// Handler for the frame counters.
+class FrameCounters {
+ public:
+ void IncreaseRenderCounter() {
+ rtc::CritScope cs(&crit_);
+ render_count_++;
+ }
+
+ void IncreaseCaptureCounter() {
+ rtc::CritScope cs(&crit_);
+ capture_count_++;
+ }
+
+ int GetCaptureCounter() const {
+ rtc::CritScope cs(&crit_);
+ return capture_count_;
+ }
+
+ int GetRenderCounter() const {
+ rtc::CritScope cs(&crit_);
+ return render_count_;
+ }
+
+ int CaptureMinusRenderCounters() const {
+ rtc::CritScope cs(&crit_);
+ return capture_count_ - render_count_;
+ }
+
+ int RenderMinusCaptureCounters() const {
+ return -CaptureMinusRenderCounters();
+ }
+
+ bool BothCountersExceedeThreshold(int threshold) const {
+ rtc::CritScope cs(&crit_);
+ return (render_count_ > threshold && capture_count_ > threshold);
+ }
+
+ private:
+ mutable rtc::CriticalSection crit_;
+ int render_count_ GUARDED_BY(crit_) = 0;
+ int capture_count_ GUARDED_BY(crit_) = 0;
+};
+
+// Class that protects a flag using a lock.
+class LockedFlag {
+ public:
+ bool get_flag() const {
+ rtc::CritScope cs(&crit_);
+ return flag_;
+ }
+
+ void set_flag() {
+ rtc::CritScope cs(&crit_);
+ flag_ = true;
+ }
+
+ private:
+ mutable rtc::CriticalSection crit_;
+ bool flag_ GUARDED_BY(crit_) = false;
+};
+
+// Parent class for the thread processors.
+class TimedThreadApiProcessor {
+ public:
+ TimedThreadApiProcessor(ProcessorType processor_type,
+ Random* rand_gen,
+ FrameCounters* shared_counters_state,
+ LockedFlag* capture_call_checker,
+ CallSimulator* test_framework,
+ const SimulationConfig* simulation_config,
+ AudioProcessing* apm,
+ int num_durations_to_store,
+ float input_level,
+ int num_channels)
+ : rand_gen_(rand_gen),
+ frame_counters_(shared_counters_state),
+ capture_call_checker_(capture_call_checker),
+ test_(test_framework),
+ simulation_config_(simulation_config),
+ apm_(apm),
+ frame_data_(kMaxFrameSize),
+ clock_(webrtc::Clock::GetRealTimeClock()),
+ num_durations_to_store_(num_durations_to_store),
+ input_level_(input_level),
+ processor_type_(processor_type),
+ num_channels_(num_channels) {
+ api_call_durations_.reserve(num_durations_to_store_);
+ }
+
+ // Implements the callback functionality for the threads.
+ bool Process();
+
+ // Method for printing out the simulation statistics.
+ void print_processor_statistics(std::string processor_name) const {
+ const std::string modifier = "_api_call_duration";
+
+ // Lambda function for creating a test printout string.
+ auto create_mean_and_std_string = [](int64_t average,
+ int64_t standard_dev) {
+ std::string s = std::to_string(average);
+ s += ", ";
+ s += std::to_string(standard_dev);
+ return s;
+ };
+
+ const std::string sample_rate_name =
+ "_" + std::to_string(simulation_config_->sample_rate_hz) + "Hz";
+
+ webrtc::test::PrintResultMeanAndError(
+ "apm_timing", sample_rate_name, processor_name,
+ create_mean_and_std_string(GetDurationAverage(),
+ GetDurationStandardDeviation()),
+ "us", false);
+
+ if (kPrintAllDurations) {
+ std::string value_string = "";
+ for (int64_t duration : api_call_durations_) {
+ value_string += std::to_string(duration) + ",";
+ }
+ webrtc::test::PrintResultList("apm_call_durations", sample_rate_name,
+ processor_name, value_string, "us", false);
+ }
+ }
+
+ void AddDuration(int64_t duration) {
+ if (api_call_durations_.size() < num_durations_to_store_) {
+ api_call_durations_.push_back(duration);
+ }
+ }
+
+ private:
+ static const int kMaxCallDifference = 10;
+ static const int kMaxFrameSize = 480;
+ static const int kNumInitializationFrames = 5;
+
+ int64_t GetDurationStandardDeviation() const {
+ double variance = 0;
+ const int64_t average_duration = GetDurationAverage();
+ for (size_t k = kNumInitializationFrames; k < api_call_durations_.size();
+ k++) {
+ int64_t tmp = api_call_durations_[k] - average_duration;
+ variance += static_cast<double>(tmp * tmp);
+ }
+ const int denominator = rtc::checked_cast<int>(api_call_durations_.size()) -
+ kNumInitializationFrames;
+ return (denominator > 0
+ ? rtc::checked_cast<int64_t>(sqrt(variance / denominator))
+ : -1);
+ }
+
+ int64_t GetDurationAverage() const {
+ int64_t average_duration = 0;
+ for (size_t k = kNumInitializationFrames; k < api_call_durations_.size();
+ k++) {
+ average_duration += api_call_durations_[k];
+ }
+ const int denominator = rtc::checked_cast<int>(api_call_durations_.size()) -
+ kNumInitializationFrames;
+ return (denominator > 0 ? average_duration / denominator : -1);
+ }
+
+ int ProcessCapture() {
+ // Set the stream delay.
+ apm_->set_stream_delay_ms(30);
+
+ // Call and time the specified capture side API processing method.
+ const int64_t start_time = clock_->TimeInMicroseconds();
+ const int result = apm_->ProcessStream(
+ &frame_data_.input_frame[0], frame_data_.input_stream_config,
+ frame_data_.output_stream_config, &frame_data_.output_frame[0]);
+ const int64_t end_time = clock_->TimeInMicroseconds();
+
+ frame_counters_->IncreaseCaptureCounter();
+
+ AddDuration(end_time - start_time);
+
+ if (first_process_call_) {
+ // Flag that the capture side has been called at least once
+ // (needed to ensure that a capture call has been done
+ // before the first render call is performed (implicitly
+ // required by the APM API).
+ capture_call_checker_->set_flag();
+ first_process_call_ = false;
+ }
+ return result;
+ }
+
+ bool ReadyToProcessCapture() {
+ return (frame_counters_->CaptureMinusRenderCounters() <=
+ kMaxCallDifference);
+ }
+
+ int ProcessRender() {
+ // Call and time the specified render side API processing method.
+ const int64_t start_time = clock_->TimeInMicroseconds();
+ const int result = apm_->ProcessReverseStream(
+ &frame_data_.input_frame[0], frame_data_.input_stream_config,
+ frame_data_.output_stream_config, &frame_data_.output_frame[0]);
+ const int64_t end_time = clock_->TimeInMicroseconds();
+ frame_counters_->IncreaseRenderCounter();
+
+ AddDuration(end_time - start_time);
+
+ return result;
+ }
+
+ bool ReadyToProcessRender() {
+ // Do not process until at least one capture call has been done.
+ // (implicitly required by the APM API).
+ if (first_process_call_ && !capture_call_checker_->get_flag()) {
+ return false;
+ }
+
+ // Ensure that the number of render and capture calls do not differ too
+ // much.
+ if (frame_counters_->RenderMinusCaptureCounters() > kMaxCallDifference) {
+ return false;
+ }
+
+ first_process_call_ = false;
+ return true;
+ }
+
+ void PrepareFrame() {
+ // Lambda function for populating a float multichannel audio frame
+ // with random data.
+ auto populate_audio_frame = [](float amplitude, size_t num_channels,
+ size_t samples_per_channel, Random* rand_gen,
+ float** frame) {
+ for (size_t ch = 0; ch < num_channels; ch++) {
+ for (size_t k = 0; k < samples_per_channel; k++) {
+ // Store random float number with a value between +-amplitude.
+ frame[ch][k] = amplitude * (2 * rand_gen->Rand<float>() - 1);
+ }
+ }
+ };
+
+ // Prepare the audio input data and metadata.
+ frame_data_.input_stream_config.set_sample_rate_hz(
+ simulation_config_->sample_rate_hz);
+ frame_data_.input_stream_config.set_num_channels(num_channels_);
+ frame_data_.input_stream_config.set_has_keyboard(false);
+ populate_audio_frame(input_level_, num_channels_,
+ (simulation_config_->sample_rate_hz *
+ AudioProcessing::kChunkSizeMs / 1000),
+ rand_gen_, &frame_data_.input_frame[0]);
+
+ // Prepare the float audio output data and metadata.
+ frame_data_.output_stream_config.set_sample_rate_hz(
+ simulation_config_->sample_rate_hz);
+ frame_data_.output_stream_config.set_num_channels(1);
+ frame_data_.output_stream_config.set_has_keyboard(false);
+ }
+
+ bool ReadyToProcess() {
+ switch (processor_type_) {
+ case ProcessorType::kRender:
+ return ReadyToProcessRender();
+ break;
+ case ProcessorType::kCapture:
+ return ReadyToProcessCapture();
+ break;
+ }
+
+ // Should not be reached, but the return statement is needed for the code to
+ // build successfully on Android.
+ RTC_NOTREACHED();
+ return false;
+ }
+
+ Random* rand_gen_ = nullptr;
+ FrameCounters* frame_counters_ = nullptr;
+ LockedFlag* capture_call_checker_ = nullptr;
+ CallSimulator* test_ = nullptr;
+ const SimulationConfig* const simulation_config_ = nullptr;
+ AudioProcessing* apm_ = nullptr;
+ AudioFrameData frame_data_;
+ webrtc::Clock* clock_;
+ const size_t num_durations_to_store_;
+ std::vector<int64_t> api_call_durations_;
+ const float input_level_;
+ bool first_process_call_ = true;
+ const ProcessorType processor_type_;
+ const int num_channels_ = 1;
+};
+
+// Class for managing the test simulation.
+class CallSimulator : public ::testing::TestWithParam<SimulationConfig> {
+ public:
+ CallSimulator()
+ : test_complete_(EventWrapper::Create()),
+ render_thread_(
+ new rtc::PlatformThread(RenderProcessorThreadFunc, this, "render")),
+ capture_thread_(new rtc::PlatformThread(CaptureProcessorThreadFunc,
+ this,
+ "capture")),
+ rand_gen_(42U),
+ simulation_config_(static_cast<SimulationConfig>(GetParam())) {}
+
+ // Run the call simulation with a timeout.
+ EventTypeWrapper Run() {
+ StartThreads();
+
+ EventTypeWrapper result = test_complete_->Wait(kTestTimeout);
+
+ StopThreads();
+
+ render_thread_state_->print_processor_statistics(
+ simulation_config_.SettingsDescription() + "_render");
+ capture_thread_state_->print_processor_statistics(
+ simulation_config_.SettingsDescription() + "_capture");
+
+ return result;
+ }
+
+ // Tests whether all the required render and capture side calls have been
+ // done.
+ bool MaybeEndTest() {
+ if (frame_counters_.BothCountersExceedeThreshold(kMinNumFramesToProcess)) {
+ test_complete_->Set();
+ return true;
+ }
+ return false;
+ }
+
+ private:
+ static const float kCaptureInputFloatLevel;
+ static const float kRenderInputFloatLevel;
+ static const int kMinNumFramesToProcess = 150;
+ static const int32_t kTestTimeout = 3 * 10 * kMinNumFramesToProcess;
+
+ // ::testing::TestWithParam<> implementation.
+ void TearDown() override { StopThreads(); }
+
+ // Stop all running threads.
+ void StopThreads() {
+ render_thread_->Stop();
+ capture_thread_->Stop();
+ }
+
+ // Simulator and APM setup.
+ void SetUp() override {
+ // Lambda function for setting the default APM runtime settings for desktop.
+ auto set_default_desktop_apm_runtime_settings = [](AudioProcessing* apm) {
+ ASSERT_EQ(apm->kNoError, apm->level_estimator()->Enable(true));
+ ASSERT_EQ(apm->kNoError, apm->gain_control()->Enable(true));
+ ASSERT_EQ(apm->kNoError,
+ apm->gain_control()->set_mode(GainControl::kAdaptiveDigital));
+ ASSERT_EQ(apm->kNoError, apm->gain_control()->Enable(true));
+ ASSERT_EQ(apm->kNoError, apm->noise_suppression()->Enable(true));
+ ASSERT_EQ(apm->kNoError, apm->voice_detection()->Enable(true));
+ ASSERT_EQ(apm->kNoError, apm->echo_control_mobile()->Enable(false));
+ ASSERT_EQ(apm->kNoError, apm->echo_cancellation()->Enable(true));
+ ASSERT_EQ(apm->kNoError, apm->echo_cancellation()->enable_metrics(true));
+ ASSERT_EQ(apm->kNoError,
+ apm->echo_cancellation()->enable_delay_logging(true));
+ };
+
+ // Lambda function for setting the default APM runtime settings for mobile.
+ auto set_default_mobile_apm_runtime_settings = [](AudioProcessing* apm) {
+ ASSERT_EQ(apm->kNoError, apm->level_estimator()->Enable(true));
+ ASSERT_EQ(apm->kNoError, apm->gain_control()->Enable(true));
+ ASSERT_EQ(apm->kNoError,
+ apm->gain_control()->set_mode(GainControl::kAdaptiveDigital));
+ ASSERT_EQ(apm->kNoError, apm->gain_control()->Enable(true));
+ ASSERT_EQ(apm->kNoError, apm->noise_suppression()->Enable(true));
+ ASSERT_EQ(apm->kNoError, apm->voice_detection()->Enable(true));
+ ASSERT_EQ(apm->kNoError, apm->echo_control_mobile()->Enable(true));
+ ASSERT_EQ(apm->kNoError, apm->echo_cancellation()->Enable(false));
+ };
+
+ // Lambda function for turning off all of the APM runtime settings
+ // submodules.
+ auto turn_off_default_apm_runtime_settings = [](AudioProcessing* apm) {
+ ASSERT_EQ(apm->kNoError, apm->level_estimator()->Enable(false));
+ ASSERT_EQ(apm->kNoError, apm->gain_control()->Enable(false));
+ ASSERT_EQ(apm->kNoError,
+ apm->gain_control()->set_mode(GainControl::kAdaptiveDigital));
+ ASSERT_EQ(apm->kNoError, apm->gain_control()->Enable(false));
+ ASSERT_EQ(apm->kNoError, apm->noise_suppression()->Enable(false));
+ ASSERT_EQ(apm->kNoError, apm->voice_detection()->Enable(false));
+ ASSERT_EQ(apm->kNoError, apm->echo_control_mobile()->Enable(false));
+ ASSERT_EQ(apm->kNoError, apm->echo_cancellation()->Enable(false));
+ ASSERT_EQ(apm->kNoError, apm->echo_cancellation()->enable_metrics(false));
+ ASSERT_EQ(apm->kNoError,
+ apm->echo_cancellation()->enable_delay_logging(false));
+ };
+
+ // Lambda function for adding default desktop APM settings to a config.
+ auto add_default_desktop_config = [](Config* config) {
+ config->Set<ExtendedFilter>(new ExtendedFilter(true));
+ config->Set<DelayAgnostic>(new DelayAgnostic(true));
+ };
+
+ // Lambda function for adding beamformer settings to a config.
+ auto add_beamformer_config = [](Config* config) {
+ const size_t num_mics = 2;
+ const std::vector<Point> array_geometry =
+ ParseArrayGeometry("0 0 0 0.05 0 0", num_mics);
+ RTC_CHECK_EQ(array_geometry.size(), num_mics);
+
+ config->Set<Beamforming>(
+ new Beamforming(true, array_geometry,
+ SphericalPointf(DegreesToRadians(90), 0.f, 1.f)));
+ };
+
+ int num_capture_channels = 1;
+ switch (simulation_config_.simulation_settings) {
+ case SettingsType::kDefaultApmMobile: {
+ apm_.reset(AudioProcessingImpl::Create());
+ ASSERT_TRUE(!!apm_);
+ set_default_mobile_apm_runtime_settings(apm_.get());
+ break;
+ }
+ case SettingsType::kDefaultApmDesktop: {
+ Config config;
+ add_default_desktop_config(&config);
+ apm_.reset(AudioProcessingImpl::Create(config));
+ ASSERT_TRUE(!!apm_);
+ set_default_desktop_apm_runtime_settings(apm_.get());
+ apm_->SetExtraOptions(config);
+ break;
+ }
+ case SettingsType::kDefaultApmDesktopAndBeamformer: {
+ Config config;
+ add_beamformer_config(&config);
+ add_default_desktop_config(&config);
+ apm_.reset(AudioProcessingImpl::Create(config));
+ ASSERT_TRUE(!!apm_);
+ set_default_desktop_apm_runtime_settings(apm_.get());
+ apm_->SetExtraOptions(config);
+ num_capture_channels = 2;
+ break;
+ }
+ case SettingsType::kDefaultApmDesktopAndIntelligibilityEnhancer: {
+ Config config;
+ config.Set<Intelligibility>(new Intelligibility(true));
+ add_default_desktop_config(&config);
+ apm_.reset(AudioProcessingImpl::Create(config));
+ ASSERT_TRUE(!!apm_);
+ set_default_desktop_apm_runtime_settings(apm_.get());
+ apm_->SetExtraOptions(config);
+ break;
+ }
+ case SettingsType::kAllSubmodulesTurnedOff: {
+ apm_.reset(AudioProcessingImpl::Create());
+ ASSERT_TRUE(!!apm_);
+ turn_off_default_apm_runtime_settings(apm_.get());
+ break;
+ }
+ case SettingsType::kDefaultDesktopApmWithoutDelayAgnostic: {
+ Config config;
+ config.Set<ExtendedFilter>(new ExtendedFilter(true));
+ config.Set<DelayAgnostic>(new DelayAgnostic(false));
+ apm_.reset(AudioProcessingImpl::Create(config));
+ ASSERT_TRUE(!!apm_);
+ set_default_desktop_apm_runtime_settings(apm_.get());
+ apm_->SetExtraOptions(config);
+ break;
+ }
+ case SettingsType::kDefaultDesktopApmWithoutExtendedFilter: {
+ Config config;
+ config.Set<ExtendedFilter>(new ExtendedFilter(false));
+ config.Set<DelayAgnostic>(new DelayAgnostic(true));
+ apm_.reset(AudioProcessingImpl::Create(config));
+ ASSERT_TRUE(!!apm_);
+ set_default_desktop_apm_runtime_settings(apm_.get());
+ apm_->SetExtraOptions(config);
+ break;
+ }
+ }
+
+ render_thread_state_.reset(new TimedThreadApiProcessor(
+ ProcessorType::kRender, &rand_gen_, &frame_counters_,
+ &capture_call_checker_, this, &simulation_config_, apm_.get(),
+ kMinNumFramesToProcess, kRenderInputFloatLevel, 1));
+ capture_thread_state_.reset(new TimedThreadApiProcessor(
+ ProcessorType::kCapture, &rand_gen_, &frame_counters_,
+ &capture_call_checker_, this, &simulation_config_, apm_.get(),
+ kMinNumFramesToProcess, kCaptureInputFloatLevel, num_capture_channels));
+ }
+
+ // Thread callback for the render thread.
+ static bool RenderProcessorThreadFunc(void* context) {
+ return reinterpret_cast<CallSimulator*>(context)
+ ->render_thread_state_->Process();
+ }
+
+ // Thread callback for the capture thread.
+ static bool CaptureProcessorThreadFunc(void* context) {
+ return reinterpret_cast<CallSimulator*>(context)
+ ->capture_thread_state_->Process();
+ }
+
+ // Start the threads used in the test.
+ void StartThreads() {
+ ASSERT_NO_FATAL_FAILURE(render_thread_->Start());
+ render_thread_->SetPriority(rtc::kRealtimePriority);
+ ASSERT_NO_FATAL_FAILURE(capture_thread_->Start());
+ capture_thread_->SetPriority(rtc::kRealtimePriority);
+ }
+
+ // Event handler for the test.
+ const rtc::scoped_ptr<EventWrapper> test_complete_;
+
+ // Thread related variables.
+ rtc::scoped_ptr<rtc::PlatformThread> render_thread_;
+ rtc::scoped_ptr<rtc::PlatformThread> capture_thread_;
+ Random rand_gen_;
+
+ rtc::scoped_ptr<AudioProcessing> apm_;
+ const SimulationConfig simulation_config_;
+ FrameCounters frame_counters_;
+ LockedFlag capture_call_checker_;
+ rtc::scoped_ptr<TimedThreadApiProcessor> render_thread_state_;
+ rtc::scoped_ptr<TimedThreadApiProcessor> capture_thread_state_;
+};
+
+// Implements the callback functionality for the threads.
+bool TimedThreadApiProcessor::Process() {
+ PrepareFrame();
+
+ // Wait in a spinlock manner until it is ok to start processing.
+ // Note that SleepMs is not applicable since it only allows sleeping
+ // on a millisecond basis which is too long.
+ while (!ReadyToProcess()) {
+ }
+
+ int result = AudioProcessing::kNoError;
+ switch (processor_type_) {
+ case ProcessorType::kRender:
+ result = ProcessRender();
+ break;
+ case ProcessorType::kCapture:
+ result = ProcessCapture();
+ break;
+ }
+
+ EXPECT_EQ(result, AudioProcessing::kNoError);
+
+ return !test_->MaybeEndTest();
+}
+
+const float CallSimulator::kRenderInputFloatLevel = 0.5f;
+const float CallSimulator::kCaptureInputFloatLevel = 0.03125f;
+} // anonymous namespace
+
+TEST_P(CallSimulator, ApiCallDurationTest) {
+ // Run test and verify that it did not time out.
+ EXPECT_EQ(kEventSignaled, Run());
+}
+
+INSTANTIATE_TEST_CASE_P(
+ AudioProcessingPerformanceTest,
+ CallSimulator,
+ ::testing::ValuesIn(SimulationConfig::GenerateSimulationConfigs()));
+
+} // namespace webrtc
diff --git a/webrtc/modules/audio_processing/audio_processing_tests.gypi b/webrtc/modules/audio_processing/audio_processing_tests.gypi
index 0314c69b04..523602baba 100644
--- a/webrtc/modules/audio_processing/audio_processing_tests.gypi
+++ b/webrtc/modules/audio_processing/audio_processing_tests.gypi
@@ -128,7 +128,11 @@
'<(webrtc_root)/test/test.gyp:test_support',
'<(DEPTH)/third_party/gflags/gflags.gyp:gflags',
],
- 'sources': [ 'test/audioproc_float.cc', ],
+ 'sources': [
+ 'test/audio_file_processor.cc',
+ 'test/audio_file_processor.h',
+ 'test/audioproc_float.cc',
+ ],
},
{
'target_name': 'unpack_aecdump',
diff --git a/webrtc/modules/audio_processing/beamformer/array_util.cc b/webrtc/modules/audio_processing/beamformer/array_util.cc
index 8aaeee9f59..6b1c474269 100644
--- a/webrtc/modules/audio_processing/beamformer/array_util.cc
+++ b/webrtc/modules/audio_processing/beamformer/array_util.cc
@@ -56,7 +56,7 @@ bool ArePerpendicular(const Point& a, const Point& b) {
return std::abs(DotProduct(a, b)) < kMaxDotProduct;
}
-rtc::Maybe<Point> GetDirectionIfLinear(
+rtc::Optional<Point> GetDirectionIfLinear(
const std::vector<Point>& array_geometry) {
RTC_DCHECK_GT(array_geometry.size(), 1u);
const Point first_pair_direction =
@@ -65,13 +65,14 @@ rtc::Maybe<Point> GetDirectionIfLinear(
const Point pair_direction =
PairDirection(array_geometry[i - 1], array_geometry[i]);
if (!AreParallel(first_pair_direction, pair_direction)) {
- return rtc::Maybe<Point>();
+ return rtc::Optional<Point>();
}
}
- return first_pair_direction;
+ return rtc::Optional<Point>(first_pair_direction);
}
-rtc::Maybe<Point> GetNormalIfPlanar(const std::vector<Point>& array_geometry) {
+rtc::Optional<Point> GetNormalIfPlanar(
+ const std::vector<Point>& array_geometry) {
RTC_DCHECK_GT(array_geometry.size(), 1u);
const Point first_pair_direction =
PairDirection(array_geometry[0], array_geometry[1]);
@@ -85,30 +86,30 @@ rtc::Maybe<Point> GetNormalIfPlanar(const std::vector<Point>& array_geometry) {
}
}
if (is_linear) {
- return rtc::Maybe<Point>();
+ return rtc::Optional<Point>();
}
const Point normal_direction =
CrossProduct(first_pair_direction, pair_direction);
for (; i < array_geometry.size(); ++i) {
pair_direction = PairDirection(array_geometry[i - 1], array_geometry[i]);
if (!ArePerpendicular(normal_direction, pair_direction)) {
- return rtc::Maybe<Point>();
+ return rtc::Optional<Point>();
}
}
- return normal_direction;
+ return rtc::Optional<Point>(normal_direction);
}
-rtc::Maybe<Point> GetArrayNormalIfExists(
+rtc::Optional<Point> GetArrayNormalIfExists(
const std::vector<Point>& array_geometry) {
- const rtc::Maybe<Point> direction = GetDirectionIfLinear(array_geometry);
+ const rtc::Optional<Point> direction = GetDirectionIfLinear(array_geometry);
if (direction) {
- return Point(direction->y(), -direction->x(), 0.f);
+ return rtc::Optional<Point>(Point(direction->y(), -direction->x(), 0.f));
}
- const rtc::Maybe<Point> normal = GetNormalIfPlanar(array_geometry);
+ const rtc::Optional<Point> normal = GetNormalIfPlanar(array_geometry);
if (normal && normal->z() < kMaxDotProduct) {
return normal;
}
- return rtc::Maybe<Point>();
+ return rtc::Optional<Point>();
}
Point AzimuthToPoint(float azimuth) {
diff --git a/webrtc/modules/audio_processing/beamformer/array_util.h b/webrtc/modules/audio_processing/beamformer/array_util.h
index 7fff9735a1..f86ad5dee6 100644
--- a/webrtc/modules/audio_processing/beamformer/array_util.h
+++ b/webrtc/modules/audio_processing/beamformer/array_util.h
@@ -14,7 +14,7 @@
#include <cmath>
#include <vector>
-#include "webrtc/base/maybe.h"
+#include "webrtc/base/optional.h"
namespace webrtc {
@@ -59,15 +59,16 @@ float GetMinimumSpacing(const std::vector<Point>& array_geometry);
// If the given array geometry is linear it returns the direction without
// normalizing.
-rtc::Maybe<Point> GetDirectionIfLinear(
+rtc::Optional<Point> GetDirectionIfLinear(
const std::vector<Point>& array_geometry);
// If the given array geometry is planar it returns the normal without
// normalizing.
-rtc::Maybe<Point> GetNormalIfPlanar(const std::vector<Point>& array_geometry);
+rtc::Optional<Point> GetNormalIfPlanar(
+ const std::vector<Point>& array_geometry);
// Returns the normal of an array if it has one and it is in the xy-plane.
-rtc::Maybe<Point> GetArrayNormalIfExists(
+rtc::Optional<Point> GetArrayNormalIfExists(
const std::vector<Point>& array_geometry);
// The resulting Point will be in the xy-plane.
diff --git a/webrtc/modules/audio_processing/beamformer/complex_matrix.h b/webrtc/modules/audio_processing/beamformer/complex_matrix.h
index bfa3563b89..707c51564b 100644
--- a/webrtc/modules/audio_processing/beamformer/complex_matrix.h
+++ b/webrtc/modules/audio_processing/beamformer/complex_matrix.h
@@ -27,10 +27,10 @@ class ComplexMatrix : public Matrix<complex<T> > {
public:
ComplexMatrix() : Matrix<complex<T> >() {}
- ComplexMatrix(int num_rows, int num_columns)
+ ComplexMatrix(size_t num_rows, size_t num_columns)
: Matrix<complex<T> >(num_rows, num_columns) {}
- ComplexMatrix(const complex<T>* data, int num_rows, int num_columns)
+ ComplexMatrix(const complex<T>* data, size_t num_rows, size_t num_columns)
: Matrix<complex<T> >(data, num_rows, num_columns) {}
// Complex Matrix operations.
@@ -51,7 +51,7 @@ class ComplexMatrix : public Matrix<complex<T> > {
ComplexMatrix& ConjugateTranspose() {
this->CopyDataToScratch();
- int num_rows = this->num_rows();
+ size_t num_rows = this->num_rows();
this->SetNumRows(this->num_columns());
this->SetNumColumns(num_rows);
this->Resize();
@@ -82,8 +82,8 @@ class ComplexMatrix : public Matrix<complex<T> > {
private:
ComplexMatrix& ConjugateTranspose(const complex<T>* const* src) {
complex<T>* const* elements = this->elements();
- for (int i = 0; i < this->num_rows(); ++i) {
- for (int j = 0; j < this->num_columns(); ++j) {
+ for (size_t i = 0; i < this->num_rows(); ++i) {
+ for (size_t j = 0; j < this->num_columns(); ++j) {
elements[i][j] = conj(src[j][i]);
}
}
diff --git a/webrtc/modules/audio_processing/beamformer/covariance_matrix_generator.cc b/webrtc/modules/audio_processing/beamformer/covariance_matrix_generator.cc
index d0728325fc..78f4df5ca9 100644
--- a/webrtc/modules/audio_processing/beamformer/covariance_matrix_generator.cc
+++ b/webrtc/modules/audio_processing/beamformer/covariance_matrix_generator.cc
@@ -27,7 +27,7 @@ float BesselJ0(float x) {
// Calculates the Euclidean norm for a row vector.
float Norm(const ComplexMatrix<float>& x) {
- RTC_CHECK_EQ(1, x.num_rows());
+ RTC_CHECK_EQ(1u, x.num_rows());
const size_t length = x.num_columns();
const complex<float>* elems = x.elements()[0];
float result = 0.f;
@@ -43,8 +43,8 @@ void CovarianceMatrixGenerator::UniformCovarianceMatrix(
float wave_number,
const std::vector<Point>& geometry,
ComplexMatrix<float>* mat) {
- RTC_CHECK_EQ(static_cast<int>(geometry.size()), mat->num_rows());
- RTC_CHECK_EQ(static_cast<int>(geometry.size()), mat->num_columns());
+ RTC_CHECK_EQ(geometry.size(), mat->num_rows());
+ RTC_CHECK_EQ(geometry.size(), mat->num_columns());
complex<float>* const* mat_els = mat->elements();
for (size_t i = 0; i < geometry.size(); ++i) {
@@ -68,8 +68,8 @@ void CovarianceMatrixGenerator::AngledCovarianceMatrix(
int sample_rate,
const std::vector<Point>& geometry,
ComplexMatrix<float>* mat) {
- RTC_CHECK_EQ(static_cast<int>(geometry.size()), mat->num_rows());
- RTC_CHECK_EQ(static_cast<int>(geometry.size()), mat->num_columns());
+ RTC_CHECK_EQ(geometry.size(), mat->num_rows());
+ RTC_CHECK_EQ(geometry.size(), mat->num_columns());
ComplexMatrix<float> interf_cov_vector(1, geometry.size());
ComplexMatrix<float> interf_cov_vector_transposed(geometry.size(), 1);
@@ -94,8 +94,8 @@ void CovarianceMatrixGenerator::PhaseAlignmentMasks(
const std::vector<Point>& geometry,
float angle,
ComplexMatrix<float>* mat) {
- RTC_CHECK_EQ(1, mat->num_rows());
- RTC_CHECK_EQ(static_cast<int>(geometry.size()), mat->num_columns());
+ RTC_CHECK_EQ(1u, mat->num_rows());
+ RTC_CHECK_EQ(geometry.size(), mat->num_columns());
float freq_in_hertz =
(static_cast<float>(frequency_bin) / fft_size) * sample_rate;
diff --git a/webrtc/modules/audio_processing/beamformer/matrix.h b/webrtc/modules/audio_processing/beamformer/matrix.h
index 162aef1dac..51c1cece97 100644
--- a/webrtc/modules/audio_processing/beamformer/matrix.h
+++ b/webrtc/modules/audio_processing/beamformer/matrix.h
@@ -67,7 +67,7 @@ class Matrix {
Matrix() : num_rows_(0), num_columns_(0) {}
// Allocates space for the elements and initializes all values to zero.
- Matrix(int num_rows, int num_columns)
+ Matrix(size_t num_rows, size_t num_columns)
: num_rows_(num_rows), num_columns_(num_columns) {
Resize();
scratch_data_.resize(num_rows_ * num_columns_);
@@ -75,7 +75,7 @@ class Matrix {
}
// Copies |data| into the new Matrix.
- Matrix(const T* data, int num_rows, int num_columns)
+ Matrix(const T* data, size_t num_rows, size_t num_columns)
: num_rows_(0), num_columns_(0) {
CopyFrom(data, num_rows, num_columns);
scratch_data_.resize(num_rows_ * num_columns_);
@@ -90,23 +90,23 @@ class Matrix {
}
// Copy |data| into the Matrix. The current data is lost.
- void CopyFrom(const T* const data, int num_rows, int num_columns) {
+ void CopyFrom(const T* const data, size_t num_rows, size_t num_columns) {
Resize(num_rows, num_columns);
memcpy(&data_[0], data, num_rows_ * num_columns_ * sizeof(data_[0]));
}
Matrix& CopyFromColumn(const T* const* src,
size_t column_index,
- int num_rows) {
+ size_t num_rows) {
Resize(1, num_rows);
- for (int i = 0; i < num_columns_; ++i) {
+ for (size_t i = 0; i < num_columns_; ++i) {
data_[i] = src[i][column_index];
}
return *this;
}
- void Resize(int num_rows, int num_columns) {
+ void Resize(size_t num_rows, size_t num_columns) {
if (num_rows != num_rows_ || num_columns != num_columns_) {
num_rows_ = num_rows;
num_columns_ = num_columns;
@@ -115,8 +115,8 @@ class Matrix {
}
// Accessors and mutators.
- int num_rows() const { return num_rows_; }
- int num_columns() const { return num_columns_; }
+ size_t num_rows() const { return num_rows_; }
+ size_t num_columns() const { return num_columns_; }
T* const* elements() { return &elements_[0]; }
const T* const* elements() const { return &elements_[0]; }
@@ -124,7 +124,7 @@ class Matrix {
RTC_CHECK_EQ(num_rows_, num_columns_);
T trace = 0;
- for (int i = 0; i < num_rows_; ++i) {
+ for (size_t i = 0; i < num_rows_; ++i) {
trace += elements_[i][i];
}
return trace;
@@ -282,8 +282,8 @@ class Matrix {
std::ostringstream ss;
ss << std::endl << "Matrix" << std::endl;
- for (int i = 0; i < num_rows_; ++i) {
- for (int j = 0; j < num_columns_; ++j) {
+ for (size_t i = 0; i < num_rows_; ++i) {
+ for (size_t j = 0; j < num_columns_; ++j) {
ss << elements_[i][j] << " ";
}
ss << std::endl;
@@ -294,8 +294,8 @@ class Matrix {
}
protected:
- void SetNumRows(const int num_rows) { num_rows_ = num_rows; }
- void SetNumColumns(const int num_columns) { num_columns_ = num_columns; }
+ void SetNumRows(const size_t num_rows) { num_rows_ = num_rows; }
+ void SetNumColumns(const size_t num_columns) { num_columns_ = num_columns; }
T* data() { return &data_[0]; }
const T* data() const { return &data_[0]; }
const T* const* scratch_elements() const { return &scratch_elements_[0]; }
@@ -307,7 +307,7 @@ class Matrix {
data_.resize(size);
elements_.resize(num_rows_);
- for (int i = 0; i < num_rows_; ++i) {
+ for (size_t i = 0; i < num_rows_; ++i) {
elements_[i] = &data_[i * num_columns_];
}
}
@@ -317,14 +317,14 @@ class Matrix {
scratch_data_ = data_;
scratch_elements_.resize(num_rows_);
- for (int i = 0; i < num_rows_; ++i) {
+ for (size_t i = 0; i < num_rows_; ++i) {
scratch_elements_[i] = &scratch_data_[i * num_columns_];
}
}
private:
- int num_rows_;
- int num_columns_;
+ size_t num_rows_;
+ size_t num_columns_;
std::vector<T> data_;
std::vector<T*> elements_;
@@ -336,8 +336,8 @@ class Matrix {
// Helpers for Transpose and Multiply operations that unify in-place and
// out-of-place solutions.
Matrix& Transpose(const T* const* src) {
- for (int i = 0; i < num_rows_; ++i) {
- for (int j = 0; j < num_columns_; ++j) {
+ for (size_t i = 0; i < num_rows_; ++i) {
+ for (size_t j = 0; j < num_columns_; ++j) {
elements_[i][j] = src[j][i];
}
}
@@ -345,11 +345,13 @@ class Matrix {
return *this;
}
- Matrix& Multiply(const T* const* lhs, int num_rows_rhs, const T* const* rhs) {
- for (int row = 0; row < num_rows_; ++row) {
- for (int col = 0; col < num_columns_; ++col) {
+ Matrix& Multiply(const T* const* lhs,
+ size_t num_rows_rhs,
+ const T* const* rhs) {
+ for (size_t row = 0; row < num_rows_; ++row) {
+ for (size_t col = 0; col < num_columns_; ++col) {
T cur_element = 0;
- for (int i = 0; i < num_rows_rhs; ++i) {
+ for (size_t i = 0; i < num_rows_rhs; ++i) {
cur_element += lhs[row][i] * rhs[i][col];
}
diff --git a/webrtc/modules/audio_processing/beamformer/matrix_test_helpers.h b/webrtc/modules/audio_processing/beamformer/matrix_test_helpers.h
index 7c58670068..9891a8220c 100644
--- a/webrtc/modules/audio_processing/beamformer/matrix_test_helpers.h
+++ b/webrtc/modules/audio_processing/beamformer/matrix_test_helpers.h
@@ -34,8 +34,8 @@ class MatrixTestHelpers {
const T* const* expected_elements = expected.elements();
const T* const* actual_elements = actual.elements();
- for (int i = 0; i < expected.num_rows(); ++i) {
- for (int j = 0; j < expected.num_columns(); ++j) {
+ for (size_t i = 0; i < expected.num_rows(); ++i) {
+ for (size_t j = 0; j < expected.num_columns(); ++j) {
EXPECT_EQ(expected_elements[i][j], actual_elements[i][j]);
}
}
@@ -48,8 +48,8 @@ class MatrixTestHelpers {
const float* const* expected_elements = expected.elements();
const float* const* actual_elements = actual.elements();
- for (int i = 0; i < expected.num_rows(); ++i) {
- for (int j = 0; j < expected.num_columns(); ++j) {
+ for (size_t i = 0; i < expected.num_rows(); ++i) {
+ for (size_t j = 0; j < expected.num_columns(); ++j) {
EXPECT_NEAR(expected_elements[i][j], actual_elements[i][j], kTolerance);
}
}
@@ -63,8 +63,8 @@ class MatrixTestHelpers {
const complex<float>* const* expected_elements = expected.elements();
const complex<float>* const* actual_elements = actual.elements();
- for (int i = 0; i < expected.num_rows(); ++i) {
- for (int j = 0; j < expected.num_columns(); ++j) {
+ for (size_t i = 0; i < expected.num_rows(); ++i) {
+ for (size_t j = 0; j < expected.num_columns(); ++j) {
EXPECT_NEAR(expected_elements[i][j].real(),
actual_elements[i][j].real(),
kTolerance);
@@ -84,8 +84,8 @@ class MatrixTestHelpers {
const complex<float>* const* expected_elements = expected.elements();
const complex<float>* const* actual_elements = actual.elements();
- for (int i = 0; i < expected.num_rows(); ++i) {
- for (int j = 0; j < expected.num_columns(); ++j) {
+ for (size_t i = 0; i < expected.num_rows(); ++i) {
+ for (size_t j = 0; j < expected.num_columns(); ++j) {
EXPECT_NEAR(expected_elements[i][j].real(),
actual_elements[i][j].real(),
tolerance);
diff --git a/webrtc/modules/audio_processing/beamformer/nonlinear_beamformer.cc b/webrtc/modules/audio_processing/beamformer/nonlinear_beamformer.cc
index 029fa089fc..6ea7234f6f 100644
--- a/webrtc/modules/audio_processing/beamformer/nonlinear_beamformer.cc
+++ b/webrtc/modules/audio_processing/beamformer/nonlinear_beamformer.cc
@@ -79,7 +79,7 @@ const float kCompensationGain = 2.f;
// The returned norm is clamped to be non-negative.
float Norm(const ComplexMatrix<float>& mat,
const ComplexMatrix<float>& norm_mat) {
- RTC_CHECK_EQ(norm_mat.num_rows(), 1);
+ RTC_CHECK_EQ(1u, norm_mat.num_rows());
RTC_CHECK_EQ(norm_mat.num_columns(), mat.num_rows());
RTC_CHECK_EQ(norm_mat.num_columns(), mat.num_columns());
@@ -89,8 +89,8 @@ float Norm(const ComplexMatrix<float>& mat,
const complex<float>* const* mat_els = mat.elements();
const complex<float>* const* norm_mat_els = norm_mat.elements();
- for (int i = 0; i < norm_mat.num_columns(); ++i) {
- for (int j = 0; j < norm_mat.num_columns(); ++j) {
+ for (size_t i = 0; i < norm_mat.num_columns(); ++i) {
+ for (size_t j = 0; j < norm_mat.num_columns(); ++j) {
first_product += conj(norm_mat_els[0][j]) * mat_els[j][i];
}
second_product += first_product * norm_mat_els[0][i];
@@ -102,15 +102,15 @@ float Norm(const ComplexMatrix<float>& mat,
// Does conjugate(|lhs|) * |rhs| for row vectors |lhs| and |rhs|.
complex<float> ConjugateDotProduct(const ComplexMatrix<float>& lhs,
const ComplexMatrix<float>& rhs) {
- RTC_CHECK_EQ(lhs.num_rows(), 1);
- RTC_CHECK_EQ(rhs.num_rows(), 1);
+ RTC_CHECK_EQ(1u, lhs.num_rows());
+ RTC_CHECK_EQ(1u, rhs.num_rows());
RTC_CHECK_EQ(lhs.num_columns(), rhs.num_columns());
const complex<float>* const* lhs_elements = lhs.elements();
const complex<float>* const* rhs_elements = rhs.elements();
complex<float> result = complex<float>(0.f, 0.f);
- for (int i = 0; i < lhs.num_columns(); ++i) {
+ for (size_t i = 0; i < lhs.num_columns(); ++i) {
result += conj(lhs_elements[0][i]) * rhs_elements[0][i];
}
@@ -126,8 +126,8 @@ size_t Round(float x) {
float SumAbs(const ComplexMatrix<float>& mat) {
float sum_abs = 0.f;
const complex<float>* const* mat_els = mat.elements();
- for (int i = 0; i < mat.num_rows(); ++i) {
- for (int j = 0; j < mat.num_columns(); ++j) {
+ for (size_t i = 0; i < mat.num_rows(); ++i) {
+ for (size_t j = 0; j < mat.num_columns(); ++j) {
sum_abs += std::abs(mat_els[i][j]);
}
}
@@ -138,8 +138,8 @@ float SumAbs(const ComplexMatrix<float>& mat) {
float SumSquares(const ComplexMatrix<float>& mat) {
float sum_squares = 0.f;
const complex<float>* const* mat_els = mat.elements();
- for (int i = 0; i < mat.num_rows(); ++i) {
- for (int j = 0; j < mat.num_columns(); ++j) {
+ for (size_t i = 0; i < mat.num_rows(); ++i) {
+ for (size_t j = 0; j < mat.num_columns(); ++j) {
float abs_value = std::abs(mat_els[i][j]);
sum_squares += abs_value * abs_value;
}
@@ -150,20 +150,20 @@ float SumSquares(const ComplexMatrix<float>& mat) {
// Does |out| = |in|.' * conj(|in|) for row vector |in|.
void TransposedConjugatedProduct(const ComplexMatrix<float>& in,
ComplexMatrix<float>* out) {
- RTC_CHECK_EQ(in.num_rows(), 1);
+ RTC_CHECK_EQ(1u, in.num_rows());
RTC_CHECK_EQ(out->num_rows(), in.num_columns());
RTC_CHECK_EQ(out->num_columns(), in.num_columns());
const complex<float>* in_elements = in.elements()[0];
complex<float>* const* out_elements = out->elements();
- for (int i = 0; i < out->num_rows(); ++i) {
- for (int j = 0; j < out->num_columns(); ++j) {
+ for (size_t i = 0; i < out->num_rows(); ++i) {
+ for (size_t j = 0; j < out->num_columns(); ++j) {
out_elements[i][j] = in_elements[i] * conj(in_elements[j]);
}
}
}
std::vector<Point> GetCenteredArray(std::vector<Point> array_geometry) {
- for (int dim = 0; dim < 3; ++dim) {
+ for (size_t dim = 0; dim < 3; ++dim) {
float center = 0.f;
for (size_t i = 0; i < array_geometry.size(); ++i) {
center += array_geometry[i].c[dim];
@@ -379,7 +379,7 @@ void NonlinearBeamformer::ProcessChunk(const ChannelBuffer<float>& input,
(high_pass_postfilter_mask_ - old_high_pass_mask) /
input.num_frames_per_band();
// Apply the smoothed high-pass mask to the first channel of each band.
- // This can be done because the effct of the linear beamformer is negligible
+ // This can be done because the effect of the linear beamformer is negligible
// compared to the post-filter.
for (size_t i = 1; i < input.num_bands(); ++i) {
float smoothed_mask = old_high_pass_mask;
@@ -408,13 +408,13 @@ bool NonlinearBeamformer::IsInBeam(const SphericalPointf& spherical_point) {
}
void NonlinearBeamformer::ProcessAudioBlock(const complex_f* const* input,
- int num_input_channels,
+ size_t num_input_channels,
size_t num_freq_bins,
- int num_output_channels,
+ size_t num_output_channels,
complex_f* const* output) {
- RTC_CHECK_EQ(num_freq_bins, kNumFreqBins);
- RTC_CHECK_EQ(num_input_channels, num_input_channels_);
- RTC_CHECK_EQ(num_output_channels, 1);
+ RTC_CHECK_EQ(kNumFreqBins, num_freq_bins);
+ RTC_CHECK_EQ(num_input_channels_, num_input_channels);
+ RTC_CHECK_EQ(1u, num_output_channels);
// Calculating the post-filter masks. Note that we need two for each
// frequency bin to account for the positive and negative interferer
@@ -483,7 +483,7 @@ void NonlinearBeamformer::ApplyMasks(const complex_f* const* input,
const complex_f* delay_sum_mask_els =
normalized_delay_sum_masks_[f_ix].elements()[0];
- for (int c_ix = 0; c_ix < num_input_channels_; ++c_ix) {
+ for (size_t c_ix = 0; c_ix < num_input_channels_; ++c_ix) {
output_channel[f_ix] += input[c_ix][f_ix] * delay_sum_mask_els[c_ix];
}
diff --git a/webrtc/modules/audio_processing/beamformer/nonlinear_beamformer.h b/webrtc/modules/audio_processing/beamformer/nonlinear_beamformer.h
index 565c1f349f..29c416ca91 100644
--- a/webrtc/modules/audio_processing/beamformer/nonlinear_beamformer.h
+++ b/webrtc/modules/audio_processing/beamformer/nonlinear_beamformer.h
@@ -67,9 +67,9 @@ class NonlinearBeamformer
// Process one frequency-domain block of audio. This is where the fun
// happens. Implements LappedTransform::Callback.
void ProcessAudioBlock(const complex<float>* const* input,
- int num_input_channels,
+ size_t num_input_channels,
size_t num_freq_bins,
- int num_output_channels,
+ size_t num_output_channels,
complex<float>* const* output) override;
private:
@@ -129,12 +129,12 @@ class NonlinearBeamformer
float window_[kFftSize];
// Parameters exposed to the user.
- const int num_input_channels_;
+ const size_t num_input_channels_;
int sample_rate_hz_;
const std::vector<Point> array_geometry_;
// The normal direction of the array if it has one and it is in the xy-plane.
- const rtc::Maybe<Point> array_normal_;
+ const rtc::Optional<Point> array_normal_;
// Minimum spacing between microphone pairs.
const float min_mic_spacing_;
diff --git a/webrtc/modules/audio_processing/beamformer/nonlinear_beamformer_test.cc b/webrtc/modules/audio_processing/beamformer/nonlinear_beamformer_test.cc
index cc752485e9..d187552692 100644
--- a/webrtc/modules/audio_processing/beamformer/nonlinear_beamformer_test.cc
+++ b/webrtc/modules/audio_processing/beamformer/nonlinear_beamformer_test.cc
@@ -12,6 +12,7 @@
#include "gflags/gflags.h"
#include "webrtc/base/checks.h"
+#include "webrtc/base/format_macros.h"
#include "webrtc/common_audio/channel_buffer.h"
#include "webrtc/common_audio/wav_file.h"
#include "webrtc/modules/audio_processing/beamformer/nonlinear_beamformer.h"
@@ -52,9 +53,9 @@ int main(int argc, char* argv[]) {
NonlinearBeamformer bf(array_geometry);
bf.Initialize(kChunkSizeMs, in_file.sample_rate());
- printf("Input file: %s\nChannels: %d, Sample rate: %d Hz\n\n",
+ printf("Input file: %s\nChannels: %" PRIuS ", Sample rate: %d Hz\n\n",
FLAGS_i.c_str(), in_file.num_channels(), in_file.sample_rate());
- printf("Output file: %s\nChannels: %d, Sample rate: %d Hz\n\n",
+ printf("Output file: %s\nChannels: %" PRIuS ", Sample rate: %d Hz\n\n",
FLAGS_o.c_str(), out_file.num_channels(), out_file.sample_rate());
ChannelBuffer<float> in_buf(
diff --git a/webrtc/modules/audio_processing/common.h b/webrtc/modules/audio_processing/common.h
index ed8a0544c3..d4ddb92b50 100644
--- a/webrtc/modules/audio_processing/common.h
+++ b/webrtc/modules/audio_processing/common.h
@@ -17,7 +17,7 @@
namespace webrtc {
-static inline int ChannelsFromLayout(AudioProcessing::ChannelLayout layout) {
+static inline size_t ChannelsFromLayout(AudioProcessing::ChannelLayout layout) {
switch (layout) {
case AudioProcessing::kMono:
case AudioProcessing::kMonoAndKeyboard:
@@ -27,7 +27,7 @@ static inline int ChannelsFromLayout(AudioProcessing::ChannelLayout layout) {
return 2;
}
assert(false);
- return -1;
+ return 0;
}
} // namespace webrtc
diff --git a/webrtc/modules/audio_processing/echo_cancellation_impl.cc b/webrtc/modules/audio_processing/echo_cancellation_impl.cc
index 56ee9e0fff..debc597c54 100644
--- a/webrtc/modules/audio_processing/echo_cancellation_impl.cc
+++ b/webrtc/modules/audio_processing/echo_cancellation_impl.cc
@@ -16,9 +16,8 @@
extern "C" {
#include "webrtc/modules/audio_processing/aec/aec_core.h"
}
-#include "webrtc/modules/audio_processing/aec/include/echo_cancellation.h"
+#include "webrtc/modules/audio_processing/aec/echo_cancellation.h"
#include "webrtc/modules/audio_processing/audio_buffer.h"
-#include "webrtc/system_wrappers/include/critical_section_wrapper.h"
namespace webrtc {
@@ -53,13 +52,22 @@ AudioProcessing::Error MapError(int err) {
return AudioProcessing::kUnspecifiedError;
}
}
+
+// Maximum length that a frame of samples can have.
+static const size_t kMaxAllowedValuesOfSamplesPerFrame = 160;
+// Maximum number of frames to buffer in the render queue.
+// TODO(peah): Decrease this once we properly handle hugely unbalanced
+// reverse and forward call numbers.
+static const size_t kMaxNumFramesToBuffer = 100;
} // namespace
EchoCancellationImpl::EchoCancellationImpl(const AudioProcessing* apm,
- CriticalSectionWrapper* crit)
+ rtc::CriticalSection* crit_render,
+ rtc::CriticalSection* crit_capture)
: ProcessingComponent(),
apm_(apm),
- crit_(crit),
+ crit_render_(crit_render),
+ crit_capture_(crit_capture),
drift_compensation_enabled_(false),
metrics_enabled_(false),
suppression_level_(kModerateSuppression),
@@ -68,87 +76,131 @@ EchoCancellationImpl::EchoCancellationImpl(const AudioProcessing* apm,
stream_has_echo_(false),
delay_logging_enabled_(false),
extended_filter_enabled_(false),
- delay_agnostic_enabled_(false) {
+ delay_agnostic_enabled_(false),
+ render_queue_element_max_size_(0) {
+ RTC_DCHECK(apm);
+ RTC_DCHECK(crit_render);
+ RTC_DCHECK(crit_capture);
}
EchoCancellationImpl::~EchoCancellationImpl() {}
int EchoCancellationImpl::ProcessRenderAudio(const AudioBuffer* audio) {
+ rtc::CritScope cs_render(crit_render_);
if (!is_component_enabled()) {
- return apm_->kNoError;
+ return AudioProcessing::kNoError;
}
assert(audio->num_frames_per_band() <= 160);
assert(audio->num_channels() == apm_->num_reverse_channels());
- int err = apm_->kNoError;
+ int err = AudioProcessing::kNoError;
// The ordering convention must be followed to pass to the correct AEC.
size_t handle_index = 0;
- for (int i = 0; i < apm_->num_output_channels(); i++) {
- for (int j = 0; j < audio->num_channels(); j++) {
+ render_queue_buffer_.clear();
+ for (size_t i = 0; i < apm_->num_output_channels(); i++) {
+ for (size_t j = 0; j < audio->num_channels(); j++) {
Handle* my_handle = static_cast<Handle*>(handle(handle_index));
- err = WebRtcAec_BufferFarend(
- my_handle,
- audio->split_bands_const_f(j)[kBand0To8kHz],
+ // Retrieve any error code produced by the buffering of the farend
+ // signal
+ err = WebRtcAec_GetBufferFarendError(
+ my_handle, audio->split_bands_const_f(j)[kBand0To8kHz],
audio->num_frames_per_band());
- if (err != apm_->kNoError) {
- return GetHandleError(my_handle); // TODO(ajm): warning possible?
+ if (err != AudioProcessing::kNoError) {
+ return MapError(err); // TODO(ajm): warning possible?
}
- handle_index++;
+ // Buffer the samples in the render queue.
+ render_queue_buffer_.insert(render_queue_buffer_.end(),
+ audio->split_bands_const_f(j)[kBand0To8kHz],
+ (audio->split_bands_const_f(j)[kBand0To8kHz] +
+ audio->num_frames_per_band()));
}
}
- return apm_->kNoError;
+ // Insert the samples into the queue.
+ if (!render_signal_queue_->Insert(&render_queue_buffer_)) {
+ // The data queue is full and needs to be emptied.
+ ReadQueuedRenderData();
+
+ // Retry the insert (should always work).
+ RTC_DCHECK_EQ(render_signal_queue_->Insert(&render_queue_buffer_), true);
+ }
+
+ return AudioProcessing::kNoError;
+}
+
+// Read chunks of data that were received and queued on the render side from
+// a queue. All the data chunks are buffered into the farend signal of the AEC.
+void EchoCancellationImpl::ReadQueuedRenderData() {
+ rtc::CritScope cs_capture(crit_capture_);
+ if (!is_component_enabled()) {
+ return;
+ }
+
+ while (render_signal_queue_->Remove(&capture_queue_buffer_)) {
+ size_t handle_index = 0;
+ size_t buffer_index = 0;
+ const size_t num_frames_per_band =
+ capture_queue_buffer_.size() /
+ (apm_->num_output_channels() * apm_->num_reverse_channels());
+ for (size_t i = 0; i < apm_->num_output_channels(); i++) {
+ for (size_t j = 0; j < apm_->num_reverse_channels(); j++) {
+ Handle* my_handle = static_cast<Handle*>(handle(handle_index));
+ WebRtcAec_BufferFarend(my_handle, &capture_queue_buffer_[buffer_index],
+ num_frames_per_band);
+
+ buffer_index += num_frames_per_band;
+ handle_index++;
+ }
+ }
+ }
}
int EchoCancellationImpl::ProcessCaptureAudio(AudioBuffer* audio) {
+ rtc::CritScope cs_capture(crit_capture_);
if (!is_component_enabled()) {
- return apm_->kNoError;
+ return AudioProcessing::kNoError;
}
if (!apm_->was_stream_delay_set()) {
- return apm_->kStreamParameterNotSetError;
+ return AudioProcessing::kStreamParameterNotSetError;
}
if (drift_compensation_enabled_ && !was_stream_drift_set_) {
- return apm_->kStreamParameterNotSetError;
+ return AudioProcessing::kStreamParameterNotSetError;
}
assert(audio->num_frames_per_band() <= 160);
- assert(audio->num_channels() == apm_->num_output_channels());
+ assert(audio->num_channels() == apm_->num_proc_channels());
- int err = apm_->kNoError;
+ int err = AudioProcessing::kNoError;
// The ordering convention must be followed to pass to the correct AEC.
size_t handle_index = 0;
stream_has_echo_ = false;
- for (int i = 0; i < audio->num_channels(); i++) {
- for (int j = 0; j < apm_->num_reverse_channels(); j++) {
+ for (size_t i = 0; i < audio->num_channels(); i++) {
+ for (size_t j = 0; j < apm_->num_reverse_channels(); j++) {
Handle* my_handle = handle(handle_index);
- err = WebRtcAec_Process(
- my_handle,
- audio->split_bands_const_f(i),
- audio->num_bands(),
- audio->split_bands_f(i),
- audio->num_frames_per_band(),
- apm_->stream_delay_ms(),
- stream_drift_samples_);
-
- if (err != apm_->kNoError) {
- err = GetHandleError(my_handle);
+ err = WebRtcAec_Process(my_handle, audio->split_bands_const_f(i),
+ audio->num_bands(), audio->split_bands_f(i),
+ audio->num_frames_per_band(),
+ apm_->stream_delay_ms(), stream_drift_samples_);
+
+ if (err != AudioProcessing::kNoError) {
+ err = MapError(err);
// TODO(ajm): Figure out how to return warnings properly.
- if (err != apm_->kBadStreamParameterWarning) {
+ if (err != AudioProcessing::kBadStreamParameterWarning) {
return err;
}
}
int status = 0;
err = WebRtcAec_get_echo_status(my_handle, &status);
- if (err != apm_->kNoError) {
- return GetHandleError(my_handle);
+ if (err != AudioProcessing::kNoError) {
+ return MapError(err);
}
if (status == 1) {
@@ -160,77 +212,92 @@ int EchoCancellationImpl::ProcessCaptureAudio(AudioBuffer* audio) {
}
was_stream_drift_set_ = false;
- return apm_->kNoError;
+ return AudioProcessing::kNoError;
}
int EchoCancellationImpl::Enable(bool enable) {
- CriticalSectionScoped crit_scoped(crit_);
+ // Run in a single-threaded manner.
+ rtc::CritScope cs_render(crit_render_);
+ rtc::CritScope cs_capture(crit_capture_);
// Ensure AEC and AECM are not both enabled.
+ // The is_enabled call is safe from a deadlock perspective
+ // as both locks are already held in the correct order.
if (enable && apm_->echo_control_mobile()->is_enabled()) {
- return apm_->kBadParameterError;
+ return AudioProcessing::kBadParameterError;
}
return EnableComponent(enable);
}
bool EchoCancellationImpl::is_enabled() const {
+ rtc::CritScope cs(crit_capture_);
return is_component_enabled();
}
int EchoCancellationImpl::set_suppression_level(SuppressionLevel level) {
- CriticalSectionScoped crit_scoped(crit_);
- if (MapSetting(level) == -1) {
- return apm_->kBadParameterError;
+ {
+ if (MapSetting(level) == -1) {
+ return AudioProcessing::kBadParameterError;
+ }
+ rtc::CritScope cs(crit_capture_);
+ suppression_level_ = level;
}
-
- suppression_level_ = level;
return Configure();
}
EchoCancellation::SuppressionLevel EchoCancellationImpl::suppression_level()
const {
+ rtc::CritScope cs(crit_capture_);
return suppression_level_;
}
int EchoCancellationImpl::enable_drift_compensation(bool enable) {
- CriticalSectionScoped crit_scoped(crit_);
- drift_compensation_enabled_ = enable;
+ {
+ rtc::CritScope cs(crit_capture_);
+ drift_compensation_enabled_ = enable;
+ }
return Configure();
}
bool EchoCancellationImpl::is_drift_compensation_enabled() const {
+ rtc::CritScope cs(crit_capture_);
return drift_compensation_enabled_;
}
void EchoCancellationImpl::set_stream_drift_samples(int drift) {
+ rtc::CritScope cs(crit_capture_);
was_stream_drift_set_ = true;
stream_drift_samples_ = drift;
}
int EchoCancellationImpl::stream_drift_samples() const {
+ rtc::CritScope cs(crit_capture_);
return stream_drift_samples_;
}
int EchoCancellationImpl::enable_metrics(bool enable) {
- CriticalSectionScoped crit_scoped(crit_);
- metrics_enabled_ = enable;
+ {
+ rtc::CritScope cs(crit_capture_);
+ metrics_enabled_ = enable;
+ }
return Configure();
}
bool EchoCancellationImpl::are_metrics_enabled() const {
+ rtc::CritScope cs(crit_capture_);
return metrics_enabled_;
}
// TODO(ajm): we currently just use the metrics from the first AEC. Think more
// aboue the best way to extend this to multi-channel.
int EchoCancellationImpl::GetMetrics(Metrics* metrics) {
- CriticalSectionScoped crit_scoped(crit_);
+ rtc::CritScope cs(crit_capture_);
if (metrics == NULL) {
- return apm_->kNullPointerError;
+ return AudioProcessing::kNullPointerError;
}
if (!is_component_enabled() || !metrics_enabled_) {
- return apm_->kNotEnabledError;
+ return AudioProcessing::kNotEnabledError;
}
AecMetrics my_metrics;
@@ -239,8 +306,8 @@ int EchoCancellationImpl::GetMetrics(Metrics* metrics) {
Handle* my_handle = static_cast<Handle*>(handle(0));
int err = WebRtcAec_GetMetrics(my_handle, &my_metrics);
- if (err != apm_->kNoError) {
- return GetHandleError(my_handle);
+ if (err != AudioProcessing::kNoError) {
+ return MapError(err);
}
metrics->residual_echo_return_loss.instant = my_metrics.rerl.instant;
@@ -263,62 +330,70 @@ int EchoCancellationImpl::GetMetrics(Metrics* metrics) {
metrics->a_nlp.maximum = my_metrics.aNlp.max;
metrics->a_nlp.minimum = my_metrics.aNlp.min;
- return apm_->kNoError;
+ return AudioProcessing::kNoError;
}
bool EchoCancellationImpl::stream_has_echo() const {
+ rtc::CritScope cs(crit_capture_);
return stream_has_echo_;
}
int EchoCancellationImpl::enable_delay_logging(bool enable) {
- CriticalSectionScoped crit_scoped(crit_);
- delay_logging_enabled_ = enable;
+ {
+ rtc::CritScope cs(crit_capture_);
+ delay_logging_enabled_ = enable;
+ }
return Configure();
}
bool EchoCancellationImpl::is_delay_logging_enabled() const {
+ rtc::CritScope cs(crit_capture_);
return delay_logging_enabled_;
}
bool EchoCancellationImpl::is_delay_agnostic_enabled() const {
+ rtc::CritScope cs(crit_capture_);
return delay_agnostic_enabled_;
}
bool EchoCancellationImpl::is_extended_filter_enabled() const {
+ rtc::CritScope cs(crit_capture_);
return extended_filter_enabled_;
}
// TODO(bjornv): How should we handle the multi-channel case?
int EchoCancellationImpl::GetDelayMetrics(int* median, int* std) {
+ rtc::CritScope cs(crit_capture_);
float fraction_poor_delays = 0;
return GetDelayMetrics(median, std, &fraction_poor_delays);
}
int EchoCancellationImpl::GetDelayMetrics(int* median, int* std,
float* fraction_poor_delays) {
- CriticalSectionScoped crit_scoped(crit_);
+ rtc::CritScope cs(crit_capture_);
if (median == NULL) {
- return apm_->kNullPointerError;
+ return AudioProcessing::kNullPointerError;
}
if (std == NULL) {
- return apm_->kNullPointerError;
+ return AudioProcessing::kNullPointerError;
}
if (!is_component_enabled() || !delay_logging_enabled_) {
- return apm_->kNotEnabledError;
+ return AudioProcessing::kNotEnabledError;
}
Handle* my_handle = static_cast<Handle*>(handle(0));
- if (WebRtcAec_GetDelayMetrics(my_handle, median, std, fraction_poor_delays) !=
- apm_->kNoError) {
- return GetHandleError(my_handle);
+ const int err =
+ WebRtcAec_GetDelayMetrics(my_handle, median, std, fraction_poor_delays);
+ if (err != AudioProcessing::kNoError) {
+ return MapError(err);
}
- return apm_->kNoError;
+ return AudioProcessing::kNoError;
}
struct AecCore* EchoCancellationImpl::aec_core() const {
- CriticalSectionScoped crit_scoped(crit_);
+ rtc::CritScope cs(crit_capture_);
if (!is_component_enabled()) {
return NULL;
}
@@ -328,16 +403,51 @@ struct AecCore* EchoCancellationImpl::aec_core() const {
int EchoCancellationImpl::Initialize() {
int err = ProcessingComponent::Initialize();
- if (err != apm_->kNoError || !is_component_enabled()) {
- return err;
+ {
+ rtc::CritScope cs(crit_capture_);
+ if (err != AudioProcessing::kNoError || !is_component_enabled()) {
+ return err;
+ }
}
- return apm_->kNoError;
+ AllocateRenderQueue();
+
+ return AudioProcessing::kNoError;
+}
+
+void EchoCancellationImpl::AllocateRenderQueue() {
+ const size_t new_render_queue_element_max_size = std::max<size_t>(
+ static_cast<size_t>(1),
+ kMaxAllowedValuesOfSamplesPerFrame * num_handles_required());
+
+ rtc::CritScope cs_render(crit_render_);
+ rtc::CritScope cs_capture(crit_capture_);
+
+ // Reallocate the queue if the queue item size is too small to fit the
+ // data to put in the queue.
+ if (render_queue_element_max_size_ < new_render_queue_element_max_size) {
+ render_queue_element_max_size_ = new_render_queue_element_max_size;
+
+ std::vector<float> template_queue_element(render_queue_element_max_size_);
+
+ render_signal_queue_.reset(
+ new SwapQueue<std::vector<float>, RenderQueueItemVerifier<float>>(
+ kMaxNumFramesToBuffer, template_queue_element,
+ RenderQueueItemVerifier<float>(render_queue_element_max_size_)));
+
+ render_queue_buffer_.resize(render_queue_element_max_size_);
+ capture_queue_buffer_.resize(render_queue_element_max_size_);
+ } else {
+ render_signal_queue_->Clear();
+ }
}
void EchoCancellationImpl::SetExtraOptions(const Config& config) {
- extended_filter_enabled_ = config.Get<ExtendedFilter>().enabled;
- delay_agnostic_enabled_ = config.Get<DelayAgnostic>().enabled;
+ {
+ rtc::CritScope cs(crit_capture_);
+ extended_filter_enabled_ = config.Get<ExtendedFilter>().enabled;
+ delay_agnostic_enabled_ = config.Get<DelayAgnostic>().enabled;
+ }
Configure();
}
@@ -351,23 +461,25 @@ void EchoCancellationImpl::DestroyHandle(void* handle) const {
}
int EchoCancellationImpl::InitializeHandle(void* handle) const {
+ // Not locked as it only relies on APM public API which is threadsafe.
+
assert(handle != NULL);
// TODO(ajm): Drift compensation is disabled in practice. If restored, it
// should be managed internally and not depend on the hardware sample rate.
// For now, just hardcode a 48 kHz value.
return WebRtcAec_Init(static_cast<Handle*>(handle),
- apm_->proc_sample_rate_hz(),
- 48000);
+ apm_->proc_sample_rate_hz(), 48000);
}
int EchoCancellationImpl::ConfigureHandle(void* handle) const {
+ rtc::CritScope cs_render(crit_render_);
+ rtc::CritScope cs_capture(crit_capture_);
assert(handle != NULL);
AecConfig config;
config.metricsMode = metrics_enabled_;
config.nlpMode = MapSetting(suppression_level_);
config.skewMode = drift_compensation_enabled_;
config.delay_logging = delay_logging_enabled_;
-
WebRtcAec_enable_extended_filter(
WebRtcAec_aec_core(static_cast<Handle*>(handle)),
extended_filter_enabled_ ? 1 : 0);
@@ -377,13 +489,14 @@ int EchoCancellationImpl::ConfigureHandle(void* handle) const {
return WebRtcAec_set_config(static_cast<Handle*>(handle), config);
}
-int EchoCancellationImpl::num_handles_required() const {
- return apm_->num_output_channels() *
- apm_->num_reverse_channels();
+size_t EchoCancellationImpl::num_handles_required() const {
+ // Not locked as it only relies on APM public API which is threadsafe.
+ return apm_->num_output_channels() * apm_->num_reverse_channels();
}
int EchoCancellationImpl::GetHandleError(void* handle) const {
+ // Not locked as it does not rely on anything in the state.
assert(handle != NULL);
- return MapError(WebRtcAec_get_error_code(static_cast<Handle*>(handle)));
+ return AudioProcessing::kUnspecifiedError;
}
} // namespace webrtc
diff --git a/webrtc/modules/audio_processing/echo_cancellation_impl.h b/webrtc/modules/audio_processing/echo_cancellation_impl.h
index 070dcabc5d..a40a267e32 100644
--- a/webrtc/modules/audio_processing/echo_cancellation_impl.h
+++ b/webrtc/modules/audio_processing/echo_cancellation_impl.h
@@ -11,19 +11,22 @@
#ifndef WEBRTC_MODULES_AUDIO_PROCESSING_ECHO_CANCELLATION_IMPL_H_
#define WEBRTC_MODULES_AUDIO_PROCESSING_ECHO_CANCELLATION_IMPL_H_
+#include "webrtc/base/criticalsection.h"
+#include "webrtc/base/scoped_ptr.h"
+#include "webrtc/common_audio/swap_queue.h"
#include "webrtc/modules/audio_processing/include/audio_processing.h"
#include "webrtc/modules/audio_processing/processing_component.h"
namespace webrtc {
class AudioBuffer;
-class CriticalSectionWrapper;
class EchoCancellationImpl : public EchoCancellation,
public ProcessingComponent {
public:
EchoCancellationImpl(const AudioProcessing* apm,
- CriticalSectionWrapper* crit);
+ rtc::CriticalSection* crit_render,
+ rtc::CriticalSection* crit_capture);
virtual ~EchoCancellationImpl();
int ProcessRenderAudio(const AudioBuffer* audio);
@@ -38,10 +41,13 @@ class EchoCancellationImpl : public EchoCancellation,
// ProcessingComponent implementation.
int Initialize() override;
void SetExtraOptions(const Config& config) override;
-
bool is_delay_agnostic_enabled() const;
bool is_extended_filter_enabled() const;
+ // Reads render side data that has been queued on the render call.
+ // Called holding the capture lock.
+ void ReadQueuedRenderData();
+
private:
// EchoCancellation implementation.
int Enable(bool enable) override;
@@ -58,6 +64,7 @@ class EchoCancellationImpl : public EchoCancellation,
int GetDelayMetrics(int* median,
int* std,
float* fraction_poor_delays) override;
+
struct AecCore* aec_core() const override;
// ProcessingComponent implementation.
@@ -65,20 +72,35 @@ class EchoCancellationImpl : public EchoCancellation,
int InitializeHandle(void* handle) const override;
int ConfigureHandle(void* handle) const override;
void DestroyHandle(void* handle) const override;
- int num_handles_required() const override;
+ size_t num_handles_required() const override;
int GetHandleError(void* handle) const override;
+ void AllocateRenderQueue();
+
+ // Not guarded as its public API is thread safe.
const AudioProcessing* apm_;
- CriticalSectionWrapper* crit_;
- bool drift_compensation_enabled_;
- bool metrics_enabled_;
- SuppressionLevel suppression_level_;
- int stream_drift_samples_;
- bool was_stream_drift_set_;
- bool stream_has_echo_;
- bool delay_logging_enabled_;
- bool extended_filter_enabled_;
- bool delay_agnostic_enabled_;
+
+ rtc::CriticalSection* const crit_render_ ACQUIRED_BEFORE(crit_capture_);
+ rtc::CriticalSection* const crit_capture_;
+
+ bool drift_compensation_enabled_ GUARDED_BY(crit_capture_);
+ bool metrics_enabled_ GUARDED_BY(crit_capture_);
+ SuppressionLevel suppression_level_ GUARDED_BY(crit_capture_);
+ int stream_drift_samples_ GUARDED_BY(crit_capture_);
+ bool was_stream_drift_set_ GUARDED_BY(crit_capture_);
+ bool stream_has_echo_ GUARDED_BY(crit_capture_);
+ bool delay_logging_enabled_ GUARDED_BY(crit_capture_);
+ bool extended_filter_enabled_ GUARDED_BY(crit_capture_);
+ bool delay_agnostic_enabled_ GUARDED_BY(crit_capture_);
+
+ size_t render_queue_element_max_size_ GUARDED_BY(crit_render_)
+ GUARDED_BY(crit_capture_);
+ std::vector<float> render_queue_buffer_ GUARDED_BY(crit_render_);
+ std::vector<float> capture_queue_buffer_ GUARDED_BY(crit_capture_);
+
+ // Lock protection not needed.
+ rtc::scoped_ptr<SwapQueue<std::vector<float>, RenderQueueItemVerifier<float>>>
+ render_signal_queue_;
};
} // namespace webrtc
diff --git a/webrtc/modules/audio_processing/echo_cancellation_impl_unittest.cc b/webrtc/modules/audio_processing/echo_cancellation_impl_unittest.cc
index b2e11981fa..7f152bf942 100644
--- a/webrtc/modules/audio_processing/echo_cancellation_impl_unittest.cc
+++ b/webrtc/modules/audio_processing/echo_cancellation_impl_unittest.cc
@@ -14,7 +14,6 @@ extern "C" {
#include "webrtc/modules/audio_processing/aec/aec_core.h"
}
#include "webrtc/modules/audio_processing/include/audio_processing.h"
-#include "webrtc/test/testsupport/gtest_disable.h"
namespace webrtc {
diff --git a/webrtc/modules/audio_processing/echo_control_mobile_impl.cc b/webrtc/modules/audio_processing/echo_control_mobile_impl.cc
index 954aac7d4a..f2df5f7984 100644
--- a/webrtc/modules/audio_processing/echo_control_mobile_impl.cc
+++ b/webrtc/modules/audio_processing/echo_control_mobile_impl.cc
@@ -13,9 +13,8 @@
#include <assert.h>
#include <string.h>
-#include "webrtc/modules/audio_processing/aecm/include/echo_control_mobile.h"
+#include "webrtc/modules/audio_processing/aecm/echo_control_mobile.h"
#include "webrtc/modules/audio_processing/audio_buffer.h"
-#include "webrtc/system_wrappers/include/critical_section_wrapper.h"
#include "webrtc/system_wrappers/include/logging.h"
namespace webrtc {
@@ -56,6 +55,12 @@ AudioProcessing::Error MapError(int err) {
return AudioProcessing::kUnspecifiedError;
}
}
+// Maximum length that a frame of samples can have.
+static const size_t kMaxAllowedValuesOfSamplesPerFrame = 160;
+// Maximum number of frames to buffer in the render queue.
+// TODO(peah): Decrease this once we properly handle hugely unbalanced
+// reverse and forward call numbers.
+static const size_t kMaxNumFramesToBuffer = 100;
} // namespace
size_t EchoControlMobile::echo_path_size_bytes() {
@@ -63,13 +68,20 @@ size_t EchoControlMobile::echo_path_size_bytes() {
}
EchoControlMobileImpl::EchoControlMobileImpl(const AudioProcessing* apm,
- CriticalSectionWrapper* crit)
- : ProcessingComponent(),
- apm_(apm),
- crit_(crit),
- routing_mode_(kSpeakerphone),
- comfort_noise_enabled_(true),
- external_echo_path_(NULL) {}
+ rtc::CriticalSection* crit_render,
+ rtc::CriticalSection* crit_capture)
+ : ProcessingComponent(),
+ apm_(apm),
+ crit_render_(crit_render),
+ crit_capture_(crit_capture),
+ routing_mode_(kSpeakerphone),
+ comfort_noise_enabled_(true),
+ external_echo_path_(NULL),
+ render_queue_element_max_size_(0) {
+ RTC_DCHECK(apm);
+ RTC_DCHECK(crit_render);
+ RTC_DCHECK(crit_capture);
+}
EchoControlMobileImpl::~EchoControlMobileImpl() {
if (external_echo_path_ != NULL) {
@@ -79,53 +91,98 @@ EchoControlMobileImpl::~EchoControlMobileImpl() {
}
int EchoControlMobileImpl::ProcessRenderAudio(const AudioBuffer* audio) {
+ rtc::CritScope cs_render(crit_render_);
+
if (!is_component_enabled()) {
- return apm_->kNoError;
+ return AudioProcessing::kNoError;
}
assert(audio->num_frames_per_band() <= 160);
assert(audio->num_channels() == apm_->num_reverse_channels());
- int err = apm_->kNoError;
-
+ int err = AudioProcessing::kNoError;
// The ordering convention must be followed to pass to the correct AECM.
size_t handle_index = 0;
- for (int i = 0; i < apm_->num_output_channels(); i++) {
- for (int j = 0; j < audio->num_channels(); j++) {
+ render_queue_buffer_.clear();
+ for (size_t i = 0; i < apm_->num_output_channels(); i++) {
+ for (size_t j = 0; j < audio->num_channels(); j++) {
Handle* my_handle = static_cast<Handle*>(handle(handle_index));
- err = WebRtcAecm_BufferFarend(
- my_handle,
- audio->split_bands_const(j)[kBand0To8kHz],
+ err = WebRtcAecm_GetBufferFarendError(
+ my_handle, audio->split_bands_const(j)[kBand0To8kHz],
audio->num_frames_per_band());
- if (err != apm_->kNoError) {
- return GetHandleError(my_handle); // TODO(ajm): warning possible?
- }
+ if (err != AudioProcessing::kNoError)
+ return MapError(err); // TODO(ajm): warning possible?);
+
+ // Buffer the samples in the render queue.
+ render_queue_buffer_.insert(render_queue_buffer_.end(),
+ audio->split_bands_const(j)[kBand0To8kHz],
+ (audio->split_bands_const(j)[kBand0To8kHz] +
+ audio->num_frames_per_band()));
handle_index++;
}
}
- return apm_->kNoError;
+ // Insert the samples into the queue.
+ if (!render_signal_queue_->Insert(&render_queue_buffer_)) {
+ // The data queue is full and needs to be emptied.
+ ReadQueuedRenderData();
+
+ // Retry the insert (should always work).
+ RTC_DCHECK_EQ(render_signal_queue_->Insert(&render_queue_buffer_), true);
+ }
+
+ return AudioProcessing::kNoError;
+}
+
+// Read chunks of data that were received and queued on the render side from
+// a queue. All the data chunks are buffered into the farend signal of the AEC.
+void EchoControlMobileImpl::ReadQueuedRenderData() {
+ rtc::CritScope cs_capture(crit_capture_);
+
+ if (!is_component_enabled()) {
+ return;
+ }
+
+ while (render_signal_queue_->Remove(&capture_queue_buffer_)) {
+ size_t handle_index = 0;
+ size_t buffer_index = 0;
+ const size_t num_frames_per_band =
+ capture_queue_buffer_.size() /
+ (apm_->num_output_channels() * apm_->num_reverse_channels());
+ for (size_t i = 0; i < apm_->num_output_channels(); i++) {
+ for (size_t j = 0; j < apm_->num_reverse_channels(); j++) {
+ Handle* my_handle = static_cast<Handle*>(handle(handle_index));
+ WebRtcAecm_BufferFarend(my_handle, &capture_queue_buffer_[buffer_index],
+ num_frames_per_band);
+
+ buffer_index += num_frames_per_band;
+ handle_index++;
+ }
+ }
+ }
}
int EchoControlMobileImpl::ProcessCaptureAudio(AudioBuffer* audio) {
+ rtc::CritScope cs_capture(crit_capture_);
+
if (!is_component_enabled()) {
- return apm_->kNoError;
+ return AudioProcessing::kNoError;
}
if (!apm_->was_stream_delay_set()) {
- return apm_->kStreamParameterNotSetError;
+ return AudioProcessing::kStreamParameterNotSetError;
}
assert(audio->num_frames_per_band() <= 160);
assert(audio->num_channels() == apm_->num_output_channels());
- int err = apm_->kNoError;
+ int err = AudioProcessing::kNoError;
// The ordering convention must be followed to pass to the correct AECM.
size_t handle_index = 0;
- for (int i = 0; i < audio->num_channels(); i++) {
+ for (size_t i = 0; i < audio->num_channels(); i++) {
// TODO(ajm): improve how this works, possibly inside AECM.
// This is kind of hacked up.
const int16_t* noisy = audio->low_pass_reference(i);
@@ -134,7 +191,7 @@ int EchoControlMobileImpl::ProcessCaptureAudio(AudioBuffer* audio) {
noisy = clean;
clean = NULL;
}
- for (int j = 0; j < apm_->num_reverse_channels(); j++) {
+ for (size_t j = 0; j < apm_->num_reverse_channels(); j++) {
Handle* my_handle = static_cast<Handle*>(handle(handle_index));
err = WebRtcAecm_Process(
my_handle,
@@ -144,109 +201,158 @@ int EchoControlMobileImpl::ProcessCaptureAudio(AudioBuffer* audio) {
audio->num_frames_per_band(),
apm_->stream_delay_ms());
- if (err != apm_->kNoError) {
- return GetHandleError(my_handle); // TODO(ajm): warning possible?
- }
+ if (err != AudioProcessing::kNoError)
+ return MapError(err);
handle_index++;
}
}
- return apm_->kNoError;
+ return AudioProcessing::kNoError;
}
int EchoControlMobileImpl::Enable(bool enable) {
- CriticalSectionScoped crit_scoped(crit_);
// Ensure AEC and AECM are not both enabled.
+ rtc::CritScope cs_render(crit_render_);
+ rtc::CritScope cs_capture(crit_capture_);
+ // The is_enabled call is safe from a deadlock perspective
+ // as both locks are allready held in the correct order.
if (enable && apm_->echo_cancellation()->is_enabled()) {
- return apm_->kBadParameterError;
+ return AudioProcessing::kBadParameterError;
}
return EnableComponent(enable);
}
bool EchoControlMobileImpl::is_enabled() const {
+ rtc::CritScope cs(crit_capture_);
return is_component_enabled();
}
int EchoControlMobileImpl::set_routing_mode(RoutingMode mode) {
- CriticalSectionScoped crit_scoped(crit_);
if (MapSetting(mode) == -1) {
- return apm_->kBadParameterError;
+ return AudioProcessing::kBadParameterError;
}
- routing_mode_ = mode;
+ {
+ rtc::CritScope cs(crit_capture_);
+ routing_mode_ = mode;
+ }
return Configure();
}
EchoControlMobile::RoutingMode EchoControlMobileImpl::routing_mode()
const {
+ rtc::CritScope cs(crit_capture_);
return routing_mode_;
}
int EchoControlMobileImpl::enable_comfort_noise(bool enable) {
- CriticalSectionScoped crit_scoped(crit_);
- comfort_noise_enabled_ = enable;
+ {
+ rtc::CritScope cs(crit_capture_);
+ comfort_noise_enabled_ = enable;
+ }
return Configure();
}
bool EchoControlMobileImpl::is_comfort_noise_enabled() const {
+ rtc::CritScope cs(crit_capture_);
return comfort_noise_enabled_;
}
int EchoControlMobileImpl::SetEchoPath(const void* echo_path,
size_t size_bytes) {
- CriticalSectionScoped crit_scoped(crit_);
- if (echo_path == NULL) {
- return apm_->kNullPointerError;
- }
- if (size_bytes != echo_path_size_bytes()) {
- // Size mismatch
- return apm_->kBadParameterError;
- }
+ {
+ rtc::CritScope cs_render(crit_render_);
+ rtc::CritScope cs_capture(crit_capture_);
+ if (echo_path == NULL) {
+ return AudioProcessing::kNullPointerError;
+ }
+ if (size_bytes != echo_path_size_bytes()) {
+ // Size mismatch
+ return AudioProcessing::kBadParameterError;
+ }
- if (external_echo_path_ == NULL) {
- external_echo_path_ = new unsigned char[size_bytes];
+ if (external_echo_path_ == NULL) {
+ external_echo_path_ = new unsigned char[size_bytes];
+ }
+ memcpy(external_echo_path_, echo_path, size_bytes);
}
- memcpy(external_echo_path_, echo_path, size_bytes);
return Initialize();
}
int EchoControlMobileImpl::GetEchoPath(void* echo_path,
size_t size_bytes) const {
- CriticalSectionScoped crit_scoped(crit_);
+ rtc::CritScope cs(crit_capture_);
if (echo_path == NULL) {
- return apm_->kNullPointerError;
+ return AudioProcessing::kNullPointerError;
}
if (size_bytes != echo_path_size_bytes()) {
// Size mismatch
- return apm_->kBadParameterError;
+ return AudioProcessing::kBadParameterError;
}
if (!is_component_enabled()) {
- return apm_->kNotEnabledError;
+ return AudioProcessing::kNotEnabledError;
}
// Get the echo path from the first channel
Handle* my_handle = static_cast<Handle*>(handle(0));
- if (WebRtcAecm_GetEchoPath(my_handle, echo_path, size_bytes) != 0) {
- return GetHandleError(my_handle);
- }
+ int32_t err = WebRtcAecm_GetEchoPath(my_handle, echo_path, size_bytes);
+ if (err != 0)
+ return MapError(err);
- return apm_->kNoError;
+ return AudioProcessing::kNoError;
}
int EchoControlMobileImpl::Initialize() {
- if (!is_component_enabled()) {
- return apm_->kNoError;
+ {
+ rtc::CritScope cs_capture(crit_capture_);
+ if (!is_component_enabled()) {
+ return AudioProcessing::kNoError;
+ }
}
- if (apm_->proc_sample_rate_hz() > apm_->kSampleRate16kHz) {
+ if (apm_->proc_sample_rate_hz() > AudioProcessing::kSampleRate16kHz) {
LOG(LS_ERROR) << "AECM only supports 16 kHz or lower sample rates";
- return apm_->kBadSampleRateError;
+ return AudioProcessing::kBadSampleRateError;
}
- return ProcessingComponent::Initialize();
+ int err = ProcessingComponent::Initialize();
+ if (err != AudioProcessing::kNoError) {
+ return err;
+ }
+
+ AllocateRenderQueue();
+
+ return AudioProcessing::kNoError;
+}
+
+void EchoControlMobileImpl::AllocateRenderQueue() {
+ const size_t new_render_queue_element_max_size = std::max<size_t>(
+ static_cast<size_t>(1),
+ kMaxAllowedValuesOfSamplesPerFrame * num_handles_required());
+
+ rtc::CritScope cs_render(crit_render_);
+ rtc::CritScope cs_capture(crit_capture_);
+
+ // Reallocate the queue if the queue item size is too small to fit the
+ // data to put in the queue.
+ if (render_queue_element_max_size_ < new_render_queue_element_max_size) {
+ render_queue_element_max_size_ = new_render_queue_element_max_size;
+
+ std::vector<int16_t> template_queue_element(render_queue_element_max_size_);
+
+ render_signal_queue_.reset(
+ new SwapQueue<std::vector<int16_t>, RenderQueueItemVerifier<int16_t>>(
+ kMaxNumFramesToBuffer, template_queue_element,
+ RenderQueueItemVerifier<int16_t>(render_queue_element_max_size_)));
+
+ render_queue_buffer_.resize(render_queue_element_max_size_);
+ capture_queue_buffer_.resize(render_queue_element_max_size_);
+ } else {
+ render_signal_queue_->Clear();
+ }
}
void* EchoControlMobileImpl::CreateHandle() const {
@@ -254,10 +360,14 @@ void* EchoControlMobileImpl::CreateHandle() const {
}
void EchoControlMobileImpl::DestroyHandle(void* handle) const {
+ // This method is only called in a non-concurrent manner during APM
+ // destruction.
WebRtcAecm_Free(static_cast<Handle*>(handle));
}
int EchoControlMobileImpl::InitializeHandle(void* handle) const {
+ rtc::CritScope cs_render(crit_render_);
+ rtc::CritScope cs_capture(crit_capture_);
assert(handle != NULL);
Handle* my_handle = static_cast<Handle*>(handle);
if (WebRtcAecm_Init(my_handle, apm_->proc_sample_rate_hz()) != 0) {
@@ -271,10 +381,12 @@ int EchoControlMobileImpl::InitializeHandle(void* handle) const {
}
}
- return apm_->kNoError;
+ return AudioProcessing::kNoError;
}
int EchoControlMobileImpl::ConfigureHandle(void* handle) const {
+ rtc::CritScope cs_render(crit_render_);
+ rtc::CritScope cs_capture(crit_capture_);
AecmConfig config;
config.cngMode = comfort_noise_enabled_;
config.echoMode = MapSetting(routing_mode_);
@@ -282,13 +394,14 @@ int EchoControlMobileImpl::ConfigureHandle(void* handle) const {
return WebRtcAecm_set_config(static_cast<Handle*>(handle), config);
}
-int EchoControlMobileImpl::num_handles_required() const {
- return apm_->num_output_channels() *
- apm_->num_reverse_channels();
+size_t EchoControlMobileImpl::num_handles_required() const {
+ // Not locked as it only relies on APM public API which is threadsafe.
+ return apm_->num_output_channels() * apm_->num_reverse_channels();
}
int EchoControlMobileImpl::GetHandleError(void* handle) const {
+ // Not locked as it does not rely on anything in the state.
assert(handle != NULL);
- return MapError(WebRtcAecm_get_error_code(static_cast<Handle*>(handle)));
+ return AudioProcessing::kUnspecifiedError;
}
} // namespace webrtc
diff --git a/webrtc/modules/audio_processing/echo_control_mobile_impl.h b/webrtc/modules/audio_processing/echo_control_mobile_impl.h
index da7022545f..4d6529d3ac 100644
--- a/webrtc/modules/audio_processing/echo_control_mobile_impl.h
+++ b/webrtc/modules/audio_processing/echo_control_mobile_impl.h
@@ -11,19 +11,23 @@
#ifndef WEBRTC_MODULES_AUDIO_PROCESSING_ECHO_CONTROL_MOBILE_IMPL_H_
#define WEBRTC_MODULES_AUDIO_PROCESSING_ECHO_CONTROL_MOBILE_IMPL_H_
+#include "webrtc/base/criticalsection.h"
+#include "webrtc/base/scoped_ptr.h"
+#include "webrtc/common_audio/swap_queue.h"
#include "webrtc/modules/audio_processing/include/audio_processing.h"
#include "webrtc/modules/audio_processing/processing_component.h"
namespace webrtc {
class AudioBuffer;
-class CriticalSectionWrapper;
class EchoControlMobileImpl : public EchoControlMobile,
public ProcessingComponent {
public:
EchoControlMobileImpl(const AudioProcessing* apm,
- CriticalSectionWrapper* crit);
+ rtc::CriticalSection* crit_render,
+ rtc::CriticalSection* crit_capture);
+
virtual ~EchoControlMobileImpl();
int ProcessRenderAudio(const AudioBuffer* audio);
@@ -37,6 +41,9 @@ class EchoControlMobileImpl : public EchoControlMobile,
// ProcessingComponent implementation.
int Initialize() override;
+ // Reads render side data that has been queued on the render call.
+ void ReadQueuedRenderData();
+
private:
// EchoControlMobile implementation.
int Enable(bool enable) override;
@@ -46,18 +53,37 @@ class EchoControlMobileImpl : public EchoControlMobile,
int GetEchoPath(void* echo_path, size_t size_bytes) const override;
// ProcessingComponent implementation.
+ // Called holding both the render and capture locks.
void* CreateHandle() const override;
int InitializeHandle(void* handle) const override;
int ConfigureHandle(void* handle) const override;
void DestroyHandle(void* handle) const override;
- int num_handles_required() const override;
+ size_t num_handles_required() const override;
int GetHandleError(void* handle) const override;
+ void AllocateRenderQueue();
+
+ // Not guarded as its public API is thread safe.
const AudioProcessing* apm_;
- CriticalSectionWrapper* crit_;
- RoutingMode routing_mode_;
- bool comfort_noise_enabled_;
- unsigned char* external_echo_path_;
+
+ rtc::CriticalSection* const crit_render_ ACQUIRED_BEFORE(crit_capture_);
+ rtc::CriticalSection* const crit_capture_;
+
+ RoutingMode routing_mode_ GUARDED_BY(crit_capture_);
+ bool comfort_noise_enabled_ GUARDED_BY(crit_capture_);
+ unsigned char* external_echo_path_ GUARDED_BY(crit_render_)
+ GUARDED_BY(crit_capture_);
+
+ size_t render_queue_element_max_size_ GUARDED_BY(crit_render_)
+ GUARDED_BY(crit_capture_);
+
+ std::vector<int16_t> render_queue_buffer_ GUARDED_BY(crit_render_);
+ std::vector<int16_t> capture_queue_buffer_ GUARDED_BY(crit_capture_);
+
+ // Lock protection not needed.
+ rtc::scoped_ptr<
+ SwapQueue<std::vector<int16_t>, RenderQueueItemVerifier<int16_t>>>
+ render_signal_queue_;
};
} // namespace webrtc
diff --git a/webrtc/modules/audio_processing/gain_control_impl.cc b/webrtc/modules/audio_processing/gain_control_impl.cc
index 3b1537e796..04a6c7ba29 100644
--- a/webrtc/modules/audio_processing/gain_control_impl.cc
+++ b/webrtc/modules/audio_processing/gain_control_impl.cc
@@ -14,7 +14,6 @@
#include "webrtc/modules/audio_processing/audio_buffer.h"
#include "webrtc/modules/audio_processing/agc/legacy/gain_control.h"
-#include "webrtc/system_wrappers/include/critical_section_wrapper.h"
namespace webrtc {
@@ -33,60 +32,113 @@ int16_t MapSetting(GainControl::Mode mode) {
assert(false);
return -1;
}
+
+// Maximum length that a frame of samples can have.
+static const size_t kMaxAllowedValuesOfSamplesPerFrame = 160;
+// Maximum number of frames to buffer in the render queue.
+// TODO(peah): Decrease this once we properly handle hugely unbalanced
+// reverse and forward call numbers.
+static const size_t kMaxNumFramesToBuffer = 100;
+
} // namespace
GainControlImpl::GainControlImpl(const AudioProcessing* apm,
- CriticalSectionWrapper* crit)
- : ProcessingComponent(),
- apm_(apm),
- crit_(crit),
- mode_(kAdaptiveAnalog),
- minimum_capture_level_(0),
- maximum_capture_level_(255),
- limiter_enabled_(true),
- target_level_dbfs_(3),
- compression_gain_db_(9),
- analog_capture_level_(0),
- was_analog_level_set_(false),
- stream_is_saturated_(false) {}
+ rtc::CriticalSection* crit_render,
+ rtc::CriticalSection* crit_capture)
+ : ProcessingComponent(),
+ apm_(apm),
+ crit_render_(crit_render),
+ crit_capture_(crit_capture),
+ mode_(kAdaptiveAnalog),
+ minimum_capture_level_(0),
+ maximum_capture_level_(255),
+ limiter_enabled_(true),
+ target_level_dbfs_(3),
+ compression_gain_db_(9),
+ analog_capture_level_(0),
+ was_analog_level_set_(false),
+ stream_is_saturated_(false),
+ render_queue_element_max_size_(0) {
+ RTC_DCHECK(apm);
+ RTC_DCHECK(crit_render);
+ RTC_DCHECK(crit_capture);
+}
GainControlImpl::~GainControlImpl() {}
int GainControlImpl::ProcessRenderAudio(AudioBuffer* audio) {
+ rtc::CritScope cs(crit_render_);
if (!is_component_enabled()) {
- return apm_->kNoError;
+ return AudioProcessing::kNoError;
}
assert(audio->num_frames_per_band() <= 160);
- for (int i = 0; i < num_handles(); i++) {
+ render_queue_buffer_.resize(0);
+ for (size_t i = 0; i < num_handles(); i++) {
Handle* my_handle = static_cast<Handle*>(handle(i));
- int err = WebRtcAgc_AddFarend(
- my_handle,
- audio->mixed_low_pass_data(),
- audio->num_frames_per_band());
+ int err =
+ WebRtcAgc_GetAddFarendError(my_handle, audio->num_frames_per_band());
- if (err != apm_->kNoError) {
+ if (err != AudioProcessing::kNoError)
return GetHandleError(my_handle);
- }
+
+ // Buffer the samples in the render queue.
+ render_queue_buffer_.insert(
+ render_queue_buffer_.end(), audio->mixed_low_pass_data(),
+ (audio->mixed_low_pass_data() + audio->num_frames_per_band()));
+ }
+
+ // Insert the samples into the queue.
+ if (!render_signal_queue_->Insert(&render_queue_buffer_)) {
+ // The data queue is full and needs to be emptied.
+ ReadQueuedRenderData();
+
+ // Retry the insert (should always work).
+ RTC_DCHECK_EQ(render_signal_queue_->Insert(&render_queue_buffer_), true);
+ }
+
+ return AudioProcessing::kNoError;
+}
+
+// Read chunks of data that were received and queued on the render side from
+// a queue. All the data chunks are buffered into the farend signal of the AGC.
+void GainControlImpl::ReadQueuedRenderData() {
+ rtc::CritScope cs(crit_capture_);
+
+ if (!is_component_enabled()) {
+ return;
}
- return apm_->kNoError;
+ while (render_signal_queue_->Remove(&capture_queue_buffer_)) {
+ size_t buffer_index = 0;
+ const size_t num_frames_per_band =
+ capture_queue_buffer_.size() / num_handles();
+ for (size_t i = 0; i < num_handles(); i++) {
+ Handle* my_handle = static_cast<Handle*>(handle(i));
+ WebRtcAgc_AddFarend(my_handle, &capture_queue_buffer_[buffer_index],
+ num_frames_per_band);
+
+ buffer_index += num_frames_per_band;
+ }
+ }
}
int GainControlImpl::AnalyzeCaptureAudio(AudioBuffer* audio) {
+ rtc::CritScope cs(crit_capture_);
+
if (!is_component_enabled()) {
- return apm_->kNoError;
+ return AudioProcessing::kNoError;
}
assert(audio->num_frames_per_band() <= 160);
assert(audio->num_channels() == num_handles());
- int err = apm_->kNoError;
+ int err = AudioProcessing::kNoError;
if (mode_ == kAdaptiveAnalog) {
capture_levels_.assign(num_handles(), analog_capture_level_);
- for (int i = 0; i < num_handles(); i++) {
+ for (size_t i = 0; i < num_handles(); i++) {
Handle* my_handle = static_cast<Handle*>(handle(i));
err = WebRtcAgc_AddMic(
my_handle,
@@ -94,13 +146,13 @@ int GainControlImpl::AnalyzeCaptureAudio(AudioBuffer* audio) {
audio->num_bands(),
audio->num_frames_per_band());
- if (err != apm_->kNoError) {
+ if (err != AudioProcessing::kNoError) {
return GetHandleError(my_handle);
}
}
} else if (mode_ == kAdaptiveDigital) {
- for (int i = 0; i < num_handles(); i++) {
+ for (size_t i = 0; i < num_handles(); i++) {
Handle* my_handle = static_cast<Handle*>(handle(i));
int32_t capture_level_out = 0;
@@ -114,34 +166,38 @@ int GainControlImpl::AnalyzeCaptureAudio(AudioBuffer* audio) {
capture_levels_[i] = capture_level_out;
- if (err != apm_->kNoError) {
+ if (err != AudioProcessing::kNoError) {
return GetHandleError(my_handle);
}
}
}
- return apm_->kNoError;
+ return AudioProcessing::kNoError;
}
int GainControlImpl::ProcessCaptureAudio(AudioBuffer* audio) {
+ rtc::CritScope cs(crit_capture_);
+
if (!is_component_enabled()) {
- return apm_->kNoError;
+ return AudioProcessing::kNoError;
}
if (mode_ == kAdaptiveAnalog && !was_analog_level_set_) {
- return apm_->kStreamParameterNotSetError;
+ return AudioProcessing::kStreamParameterNotSetError;
}
assert(audio->num_frames_per_band() <= 160);
assert(audio->num_channels() == num_handles());
stream_is_saturated_ = false;
- for (int i = 0; i < num_handles(); i++) {
+ for (size_t i = 0; i < num_handles(); i++) {
Handle* my_handle = static_cast<Handle*>(handle(i));
int32_t capture_level_out = 0;
uint8_t saturation_warning = 0;
+ // The call to stream_has_echo() is ok from a deadlock perspective
+ // as the capture lock is allready held.
int err = WebRtcAgc_Process(
my_handle,
audio->split_bands_const(i),
@@ -153,7 +209,7 @@ int GainControlImpl::ProcessCaptureAudio(AudioBuffer* audio) {
apm_->echo_cancellation()->stream_has_echo(),
&saturation_warning);
- if (err != apm_->kNoError) {
+ if (err != AudioProcessing::kNoError) {
return GetHandleError(my_handle);
}
@@ -166,7 +222,7 @@ int GainControlImpl::ProcessCaptureAudio(AudioBuffer* audio) {
if (mode_ == kAdaptiveAnalog) {
// Take the analog level to be the average across the handles.
analog_capture_level_ = 0;
- for (int i = 0; i < num_handles(); i++) {
+ for (size_t i = 0; i < num_handles(); i++) {
analog_capture_level_ += capture_levels_[i];
}
@@ -174,22 +230,24 @@ int GainControlImpl::ProcessCaptureAudio(AudioBuffer* audio) {
}
was_analog_level_set_ = false;
- return apm_->kNoError;
+ return AudioProcessing::kNoError;
}
// TODO(ajm): ensure this is called under kAdaptiveAnalog.
int GainControlImpl::set_stream_analog_level(int level) {
- CriticalSectionScoped crit_scoped(crit_);
+ rtc::CritScope cs(crit_capture_);
+
was_analog_level_set_ = true;
if (level < minimum_capture_level_ || level > maximum_capture_level_) {
- return apm_->kBadParameterError;
+ return AudioProcessing::kBadParameterError;
}
analog_capture_level_ = level;
- return apm_->kNoError;
+ return AudioProcessing::kNoError;
}
int GainControlImpl::stream_analog_level() {
+ rtc::CritScope cs(crit_capture_);
// TODO(ajm): enable this assertion?
//assert(mode_ == kAdaptiveAnalog);
@@ -197,18 +255,21 @@ int GainControlImpl::stream_analog_level() {
}
int GainControlImpl::Enable(bool enable) {
- CriticalSectionScoped crit_scoped(crit_);
+ rtc::CritScope cs_render(crit_render_);
+ rtc::CritScope cs_capture(crit_capture_);
return EnableComponent(enable);
}
bool GainControlImpl::is_enabled() const {
+ rtc::CritScope cs(crit_capture_);
return is_component_enabled();
}
int GainControlImpl::set_mode(Mode mode) {
- CriticalSectionScoped crit_scoped(crit_);
+ rtc::CritScope cs_render(crit_render_);
+ rtc::CritScope cs_capture(crit_capture_);
if (MapSetting(mode) == -1) {
- return apm_->kBadParameterError;
+ return AudioProcessing::kBadParameterError;
}
mode_ = mode;
@@ -216,22 +277,23 @@ int GainControlImpl::set_mode(Mode mode) {
}
GainControl::Mode GainControlImpl::mode() const {
+ rtc::CritScope cs(crit_capture_);
return mode_;
}
int GainControlImpl::set_analog_level_limits(int minimum,
int maximum) {
- CriticalSectionScoped crit_scoped(crit_);
+ rtc::CritScope cs(crit_capture_);
if (minimum < 0) {
- return apm_->kBadParameterError;
+ return AudioProcessing::kBadParameterError;
}
if (maximum > 65535) {
- return apm_->kBadParameterError;
+ return AudioProcessing::kBadParameterError;
}
if (maximum < minimum) {
- return apm_->kBadParameterError;
+ return AudioProcessing::kBadParameterError;
}
minimum_capture_level_ = minimum;
@@ -241,21 +303,24 @@ int GainControlImpl::set_analog_level_limits(int minimum,
}
int GainControlImpl::analog_level_minimum() const {
+ rtc::CritScope cs(crit_capture_);
return minimum_capture_level_;
}
int GainControlImpl::analog_level_maximum() const {
+ rtc::CritScope cs(crit_capture_);
return maximum_capture_level_;
}
bool GainControlImpl::stream_is_saturated() const {
+ rtc::CritScope cs(crit_capture_);
return stream_is_saturated_;
}
int GainControlImpl::set_target_level_dbfs(int level) {
- CriticalSectionScoped crit_scoped(crit_);
+ rtc::CritScope cs(crit_capture_);
if (level > 31 || level < 0) {
- return apm_->kBadParameterError;
+ return AudioProcessing::kBadParameterError;
}
target_level_dbfs_ = level;
@@ -263,13 +328,14 @@ int GainControlImpl::set_target_level_dbfs(int level) {
}
int GainControlImpl::target_level_dbfs() const {
+ rtc::CritScope cs(crit_capture_);
return target_level_dbfs_;
}
int GainControlImpl::set_compression_gain_db(int gain) {
- CriticalSectionScoped crit_scoped(crit_);
+ rtc::CritScope cs(crit_capture_);
if (gain < 0 || gain > 90) {
- return apm_->kBadParameterError;
+ return AudioProcessing::kBadParameterError;
}
compression_gain_db_ = gain;
@@ -277,27 +343,59 @@ int GainControlImpl::set_compression_gain_db(int gain) {
}
int GainControlImpl::compression_gain_db() const {
+ rtc::CritScope cs(crit_capture_);
return compression_gain_db_;
}
int GainControlImpl::enable_limiter(bool enable) {
- CriticalSectionScoped crit_scoped(crit_);
+ rtc::CritScope cs(crit_capture_);
limiter_enabled_ = enable;
return Configure();
}
bool GainControlImpl::is_limiter_enabled() const {
+ rtc::CritScope cs(crit_capture_);
return limiter_enabled_;
}
int GainControlImpl::Initialize() {
int err = ProcessingComponent::Initialize();
- if (err != apm_->kNoError || !is_component_enabled()) {
+ if (err != AudioProcessing::kNoError || !is_component_enabled()) {
return err;
}
- capture_levels_.assign(num_handles(), analog_capture_level_);
- return apm_->kNoError;
+ AllocateRenderQueue();
+
+ rtc::CritScope cs_capture(crit_capture_);
+ const int n = num_handles();
+ RTC_CHECK_GE(n, 0) << "Bad number of handles: " << n;
+
+ capture_levels_.assign(n, analog_capture_level_);
+ return AudioProcessing::kNoError;
+}
+
+void GainControlImpl::AllocateRenderQueue() {
+ const size_t new_render_queue_element_max_size =
+ std::max<size_t>(static_cast<size_t>(1),
+ kMaxAllowedValuesOfSamplesPerFrame * num_handles());
+
+ rtc::CritScope cs_render(crit_render_);
+ rtc::CritScope cs_capture(crit_capture_);
+
+ if (render_queue_element_max_size_ < new_render_queue_element_max_size) {
+ render_queue_element_max_size_ = new_render_queue_element_max_size;
+ std::vector<int16_t> template_queue_element(render_queue_element_max_size_);
+
+ render_signal_queue_.reset(
+ new SwapQueue<std::vector<int16_t>, RenderQueueItemVerifier<int16_t>>(
+ kMaxNumFramesToBuffer, template_queue_element,
+ RenderQueueItemVerifier<int16_t>(render_queue_element_max_size_)));
+
+ render_queue_buffer_.resize(render_queue_element_max_size_);
+ capture_queue_buffer_.resize(render_queue_element_max_size_);
+ } else {
+ render_signal_queue_->Clear();
+ }
}
void* GainControlImpl::CreateHandle() const {
@@ -309,6 +407,9 @@ void GainControlImpl::DestroyHandle(void* handle) const {
}
int GainControlImpl::InitializeHandle(void* handle) const {
+ rtc::CritScope cs_render(crit_render_);
+ rtc::CritScope cs_capture(crit_capture_);
+
return WebRtcAgc_Init(static_cast<Handle*>(handle),
minimum_capture_level_,
maximum_capture_level_,
@@ -317,6 +418,8 @@ int GainControlImpl::InitializeHandle(void* handle) const {
}
int GainControlImpl::ConfigureHandle(void* handle) const {
+ rtc::CritScope cs_render(crit_render_);
+ rtc::CritScope cs_capture(crit_capture_);
WebRtcAgcConfig config;
// TODO(ajm): Flip the sign here (since AGC expects a positive value) if we
// change the interface.
@@ -330,14 +433,15 @@ int GainControlImpl::ConfigureHandle(void* handle) const {
return WebRtcAgc_set_config(static_cast<Handle*>(handle), config);
}
-int GainControlImpl::num_handles_required() const {
- return apm_->num_output_channels();
+size_t GainControlImpl::num_handles_required() const {
+ // Not locked as it only relies on APM public API which is threadsafe.
+ return apm_->num_proc_channels();
}
int GainControlImpl::GetHandleError(void* handle) const {
// The AGC has no get_error() function.
// (Despite listing errors in its interface...)
assert(handle != NULL);
- return apm_->kUnspecifiedError;
+ return AudioProcessing::kUnspecifiedError;
}
} // namespace webrtc
diff --git a/webrtc/modules/audio_processing/gain_control_impl.h b/webrtc/modules/audio_processing/gain_control_impl.h
index f24d200cf2..72789ba5e1 100644
--- a/webrtc/modules/audio_processing/gain_control_impl.h
+++ b/webrtc/modules/audio_processing/gain_control_impl.h
@@ -13,19 +13,23 @@
#include <vector>
+#include "webrtc/base/criticalsection.h"
+#include "webrtc/base/scoped_ptr.h"
+#include "webrtc/base/thread_annotations.h"
+#include "webrtc/common_audio/swap_queue.h"
#include "webrtc/modules/audio_processing/include/audio_processing.h"
#include "webrtc/modules/audio_processing/processing_component.h"
namespace webrtc {
class AudioBuffer;
-class CriticalSectionWrapper;
class GainControlImpl : public GainControl,
public ProcessingComponent {
public:
GainControlImpl(const AudioProcessing* apm,
- CriticalSectionWrapper* crit);
+ rtc::CriticalSection* crit_render,
+ rtc::CriticalSection* crit_capture);
virtual ~GainControlImpl();
int ProcessRenderAudio(AudioBuffer* audio);
@@ -41,6 +45,9 @@ class GainControlImpl : public GainControl,
bool is_limiter_enabled() const override;
Mode mode() const override;
+ // Reads render side data that has been queued on the render call.
+ void ReadQueuedRenderData();
+
private:
// GainControl implementation.
int Enable(bool enable) override;
@@ -61,21 +68,37 @@ class GainControlImpl : public GainControl,
int InitializeHandle(void* handle) const override;
int ConfigureHandle(void* handle) const override;
void DestroyHandle(void* handle) const override;
- int num_handles_required() const override;
+ size_t num_handles_required() const override;
int GetHandleError(void* handle) const override;
+ void AllocateRenderQueue();
+
+ // Not guarded as its public API is thread safe.
const AudioProcessing* apm_;
- CriticalSectionWrapper* crit_;
- Mode mode_;
- int minimum_capture_level_;
- int maximum_capture_level_;
- bool limiter_enabled_;
- int target_level_dbfs_;
- int compression_gain_db_;
- std::vector<int> capture_levels_;
- int analog_capture_level_;
- bool was_analog_level_set_;
- bool stream_is_saturated_;
+
+ rtc::CriticalSection* const crit_render_ ACQUIRED_BEFORE(crit_capture_);
+ rtc::CriticalSection* const crit_capture_;
+
+ Mode mode_ GUARDED_BY(crit_capture_);
+ int minimum_capture_level_ GUARDED_BY(crit_capture_);
+ int maximum_capture_level_ GUARDED_BY(crit_capture_);
+ bool limiter_enabled_ GUARDED_BY(crit_capture_);
+ int target_level_dbfs_ GUARDED_BY(crit_capture_);
+ int compression_gain_db_ GUARDED_BY(crit_capture_);
+ std::vector<int> capture_levels_ GUARDED_BY(crit_capture_);
+ int analog_capture_level_ GUARDED_BY(crit_capture_);
+ bool was_analog_level_set_ GUARDED_BY(crit_capture_);
+ bool stream_is_saturated_ GUARDED_BY(crit_capture_);
+
+ size_t render_queue_element_max_size_ GUARDED_BY(crit_render_)
+ GUARDED_BY(crit_capture_);
+ std::vector<int16_t> render_queue_buffer_ GUARDED_BY(crit_render_);
+ std::vector<int16_t> capture_queue_buffer_ GUARDED_BY(crit_capture_);
+
+ // Lock protection not needed.
+ rtc::scoped_ptr<
+ SwapQueue<std::vector<int16_t>, RenderQueueItemVerifier<int16_t>>>
+ render_signal_queue_;
};
} // namespace webrtc
diff --git a/webrtc/modules/audio_processing/high_pass_filter_impl.cc b/webrtc/modules/audio_processing/high_pass_filter_impl.cc
index 29e482078e..375d58febb 100644
--- a/webrtc/modules/audio_processing/high_pass_filter_impl.cc
+++ b/webrtc/modules/audio_processing/high_pass_filter_impl.cc
@@ -10,159 +10,125 @@
#include "webrtc/modules/audio_processing/high_pass_filter_impl.h"
-#include <assert.h>
-
#include "webrtc/common_audio/signal_processing/include/signal_processing_library.h"
#include "webrtc/modules/audio_processing/audio_buffer.h"
#include "webrtc/system_wrappers/include/critical_section_wrapper.h"
-#include "webrtc/typedefs.h"
-
namespace webrtc {
namespace {
-const int16_t kFilterCoefficients8kHz[5] =
- {3798, -7596, 3798, 7807, -3733};
-
-const int16_t kFilterCoefficients[5] =
- {4012, -8024, 4012, 8002, -3913};
-
-struct FilterState {
- int16_t y[4];
- int16_t x[2];
- const int16_t* ba;
-};
-
-int InitializeFilter(FilterState* hpf, int sample_rate_hz) {
- assert(hpf != NULL);
+const int16_t kFilterCoefficients8kHz[5] = {3798, -7596, 3798, 7807, -3733};
+const int16_t kFilterCoefficients[5] = {4012, -8024, 4012, 8002, -3913};
+} // namespace
- if (sample_rate_hz == AudioProcessing::kSampleRate8kHz) {
- hpf->ba = kFilterCoefficients8kHz;
- } else {
- hpf->ba = kFilterCoefficients;
+class HighPassFilterImpl::BiquadFilter {
+ public:
+ explicit BiquadFilter(int sample_rate_hz) :
+ ba_(sample_rate_hz == AudioProcessing::kSampleRate8kHz ?
+ kFilterCoefficients8kHz : kFilterCoefficients)
+ {
+ Reset();
}
- WebRtcSpl_MemSetW16(hpf->x, 0, 2);
- WebRtcSpl_MemSetW16(hpf->y, 0, 4);
-
- return AudioProcessing::kNoError;
-}
-
-int Filter(FilterState* hpf, int16_t* data, size_t length) {
- assert(hpf != NULL);
-
- int32_t tmp_int32 = 0;
- int16_t* y = hpf->y;
- int16_t* x = hpf->x;
- const int16_t* ba = hpf->ba;
-
- for (size_t i = 0; i < length; i++) {
- // y[i] = b[0] * x[i] + b[1] * x[i-1] + b[2] * x[i-2]
- // + -a[1] * y[i-1] + -a[2] * y[i-2];
-
- tmp_int32 = y[1] * ba[3]; // -a[1] * y[i-1] (low part)
- tmp_int32 += y[3] * ba[4]; // -a[2] * y[i-2] (low part)
- tmp_int32 = (tmp_int32 >> 15);
- tmp_int32 += y[0] * ba[3]; // -a[1] * y[i-1] (high part)
- tmp_int32 += y[2] * ba[4]; // -a[2] * y[i-2] (high part)
- tmp_int32 = (tmp_int32 << 1);
-
- tmp_int32 += data[i] * ba[0]; // b[0]*x[0]
- tmp_int32 += x[0] * ba[1]; // b[1]*x[i-1]
- tmp_int32 += x[1] * ba[2]; // b[2]*x[i-2]
-
- // Update state (input part)
- x[1] = x[0];
- x[0] = data[i];
-
- // Update state (filtered part)
- y[2] = y[0];
- y[3] = y[1];
- y[0] = static_cast<int16_t>(tmp_int32 >> 13);
- y[1] = static_cast<int16_t>(
- (tmp_int32 - (static_cast<int32_t>(y[0]) << 13)) << 2);
-
- // Rounding in Q12, i.e. add 2^11
- tmp_int32 += 2048;
-
- // Saturate (to 2^27) so that the HP filtered signal does not overflow
- tmp_int32 = WEBRTC_SPL_SAT(static_cast<int32_t>(134217727),
- tmp_int32,
- static_cast<int32_t>(-134217728));
-
- // Convert back to Q0 and use rounding.
- data[i] = (int16_t)(tmp_int32 >> 12);
+ void Reset() {
+ std::memset(x_, 0, sizeof(x_));
+ std::memset(y_, 0, sizeof(y_));
}
- return AudioProcessing::kNoError;
-}
-} // namespace
+ void Process(int16_t* data, size_t length) {
+ const int16_t* const ba = ba_;
+ int16_t* x = x_;
+ int16_t* y = y_;
+ int32_t tmp_int32 = 0;
+
+ for (size_t i = 0; i < length; i++) {
+ // y[i] = b[0] * x[i] + b[1] * x[i-1] + b[2] * x[i-2]
+ // + -a[1] * y[i-1] + -a[2] * y[i-2];
+
+ tmp_int32 = y[1] * ba[3]; // -a[1] * y[i-1] (low part)
+ tmp_int32 += y[3] * ba[4]; // -a[2] * y[i-2] (low part)
+ tmp_int32 = (tmp_int32 >> 15);
+ tmp_int32 += y[0] * ba[3]; // -a[1] * y[i-1] (high part)
+ tmp_int32 += y[2] * ba[4]; // -a[2] * y[i-2] (high part)
+ tmp_int32 = (tmp_int32 << 1);
+
+ tmp_int32 += data[i] * ba[0]; // b[0] * x[0]
+ tmp_int32 += x[0] * ba[1]; // b[1] * x[i-1]
+ tmp_int32 += x[1] * ba[2]; // b[2] * x[i-2]
+
+ // Update state (input part).
+ x[1] = x[0];
+ x[0] = data[i];
+
+ // Update state (filtered part).
+ y[2] = y[0];
+ y[3] = y[1];
+ y[0] = static_cast<int16_t>(tmp_int32 >> 13);
+ y[1] = static_cast<int16_t>(
+ (tmp_int32 - (static_cast<int32_t>(y[0]) << 13)) << 2);
+
+ // Rounding in Q12, i.e. add 2^11.
+ tmp_int32 += 2048;
+
+ // Saturate (to 2^27) so that the HP filtered signal does not overflow.
+ tmp_int32 = WEBRTC_SPL_SAT(static_cast<int32_t>(134217727),
+ tmp_int32,
+ static_cast<int32_t>(-134217728));
+
+ // Convert back to Q0 and use rounding.
+ data[i] = static_cast<int16_t>(tmp_int32 >> 12);
+ }
+ }
-typedef FilterState Handle;
+ private:
+ const int16_t* const ba_ = nullptr;
+ int16_t x_[2];
+ int16_t y_[4];
+};
-HighPassFilterImpl::HighPassFilterImpl(const AudioProcessing* apm,
- CriticalSectionWrapper* crit)
- : ProcessingComponent(),
- apm_(apm),
- crit_(crit) {}
+HighPassFilterImpl::HighPassFilterImpl(rtc::CriticalSection* crit)
+ : crit_(crit) {
+ RTC_DCHECK(crit_);
+}
HighPassFilterImpl::~HighPassFilterImpl() {}
-int HighPassFilterImpl::ProcessCaptureAudio(AudioBuffer* audio) {
- int err = apm_->kNoError;
-
- if (!is_component_enabled()) {
- return apm_->kNoError;
+void HighPassFilterImpl::Initialize(size_t channels, int sample_rate_hz) {
+ std::vector<rtc::scoped_ptr<BiquadFilter>> new_filters(channels);
+ for (size_t i = 0; i < channels; i++) {
+ new_filters[i].reset(new BiquadFilter(sample_rate_hz));
}
+ rtc::CritScope cs(crit_);
+ filters_.swap(new_filters);
+}
- assert(audio->num_frames_per_band() <= 160);
-
- for (int i = 0; i < num_handles(); i++) {
- Handle* my_handle = static_cast<Handle*>(handle(i));
- err = Filter(my_handle,
- audio->split_bands(i)[kBand0To8kHz],
- audio->num_frames_per_band());
-
- if (err != apm_->kNoError) {
- return GetHandleError(my_handle);
- }
+void HighPassFilterImpl::ProcessCaptureAudio(AudioBuffer* audio) {
+ RTC_DCHECK(audio);
+ rtc::CritScope cs(crit_);
+ if (!enabled_) {
+ return;
}
- return apm_->kNoError;
+ RTC_DCHECK_GE(160u, audio->num_frames_per_band());
+ RTC_DCHECK_EQ(filters_.size(), audio->num_channels());
+ for (size_t i = 0; i < filters_.size(); i++) {
+ filters_[i]->Process(audio->split_bands(i)[kBand0To8kHz],
+ audio->num_frames_per_band());
+ }
}
int HighPassFilterImpl::Enable(bool enable) {
- CriticalSectionScoped crit_scoped(crit_);
- return EnableComponent(enable);
+ rtc::CritScope cs(crit_);
+ if (!enabled_ && enable) {
+ for (auto& filter : filters_) {
+ filter->Reset();
+ }
+ }
+ enabled_ = enable;
+ return AudioProcessing::kNoError;
}
bool HighPassFilterImpl::is_enabled() const {
- return is_component_enabled();
-}
-
-void* HighPassFilterImpl::CreateHandle() const {
- return new FilterState;
-}
-
-void HighPassFilterImpl::DestroyHandle(void* handle) const {
- delete static_cast<Handle*>(handle);
-}
-
-int HighPassFilterImpl::InitializeHandle(void* handle) const {
- return InitializeFilter(static_cast<Handle*>(handle),
- apm_->proc_sample_rate_hz());
-}
-
-int HighPassFilterImpl::ConfigureHandle(void* /*handle*/) const {
- return apm_->kNoError; // Not configurable.
-}
-
-int HighPassFilterImpl::num_handles_required() const {
- return apm_->num_output_channels();
-}
-
-int HighPassFilterImpl::GetHandleError(void* handle) const {
- // The component has no detailed errors.
- assert(handle != NULL);
- return apm_->kUnspecifiedError;
+ rtc::CritScope cs(crit_);
+ return enabled_;
}
} // namespace webrtc
diff --git a/webrtc/modules/audio_processing/high_pass_filter_impl.h b/webrtc/modules/audio_processing/high_pass_filter_impl.h
index 90b393e903..0e985bac7a 100644
--- a/webrtc/modules/audio_processing/high_pass_filter_impl.h
+++ b/webrtc/modules/audio_processing/high_pass_filter_impl.h
@@ -11,39 +11,34 @@
#ifndef WEBRTC_MODULES_AUDIO_PROCESSING_HIGH_PASS_FILTER_IMPL_H_
#define WEBRTC_MODULES_AUDIO_PROCESSING_HIGH_PASS_FILTER_IMPL_H_
+#include "webrtc/base/constructormagic.h"
+#include "webrtc/base/criticalsection.h"
+#include "webrtc/base/scoped_ptr.h"
#include "webrtc/modules/audio_processing/include/audio_processing.h"
-#include "webrtc/modules/audio_processing/processing_component.h"
namespace webrtc {
class AudioBuffer;
-class CriticalSectionWrapper;
-class HighPassFilterImpl : public HighPassFilter,
- public ProcessingComponent {
+class HighPassFilterImpl : public HighPassFilter {
public:
- HighPassFilterImpl(const AudioProcessing* apm, CriticalSectionWrapper* crit);
- virtual ~HighPassFilterImpl();
+ explicit HighPassFilterImpl(rtc::CriticalSection* crit);
+ ~HighPassFilterImpl() override;
- int ProcessCaptureAudio(AudioBuffer* audio);
+ // TODO(peah): Fold into ctor, once public API is removed.
+ void Initialize(size_t channels, int sample_rate_hz);
+ void ProcessCaptureAudio(AudioBuffer* audio);
// HighPassFilter implementation.
+ int Enable(bool enable) override;
bool is_enabled() const override;
private:
- // HighPassFilter implementation.
- int Enable(bool enable) override;
-
- // ProcessingComponent implementation.
- void* CreateHandle() const override;
- int InitializeHandle(void* handle) const override;
- int ConfigureHandle(void* handle) const override;
- void DestroyHandle(void* handle) const override;
- int num_handles_required() const override;
- int GetHandleError(void* handle) const override;
-
- const AudioProcessing* apm_;
- CriticalSectionWrapper* crit_;
+ class BiquadFilter;
+ rtc::CriticalSection* const crit_ = nullptr;
+ bool enabled_ GUARDED_BY(crit_) = false;
+ std::vector<rtc::scoped_ptr<BiquadFilter>> filters_ GUARDED_BY(crit_);
+ RTC_DISALLOW_IMPLICIT_CONSTRUCTORS(HighPassFilterImpl);
};
} // namespace webrtc
diff --git a/webrtc/modules/audio_processing/include/audio_processing.h b/webrtc/modules/audio_processing/include/audio_processing.h
index c8ddc6a483..9a3a4b32d5 100644
--- a/webrtc/modules/audio_processing/include/audio_processing.h
+++ b/webrtc/modules/audio_processing/include/audio_processing.h
@@ -65,6 +65,7 @@ class VoiceDetection;
struct ExtendedFilter {
ExtendedFilter() : enabled(false) {}
explicit ExtendedFilter(bool enabled) : enabled(enabled) {}
+ static const ConfigOptionID identifier = ConfigOptionID::kExtendedFilter;
bool enabled;
};
@@ -76,6 +77,7 @@ struct ExtendedFilter {
struct DelayAgnostic {
DelayAgnostic() : enabled(false) {}
explicit DelayAgnostic(bool enabled) : enabled(enabled) {}
+ static const ConfigOptionID identifier = ConfigOptionID::kDelayAgnostic;
bool enabled;
};
@@ -96,6 +98,7 @@ struct ExperimentalAgc {
: enabled(enabled), startup_min_volume(kAgcStartupMinVolume) {}
ExperimentalAgc(bool enabled, int startup_min_volume)
: enabled(enabled), startup_min_volume(startup_min_volume) {}
+ static const ConfigOptionID identifier = ConfigOptionID::kExperimentalAgc;
bool enabled;
int startup_min_volume;
};
@@ -105,6 +108,7 @@ struct ExperimentalAgc {
struct ExperimentalNs {
ExperimentalNs() : enabled(false) {}
explicit ExperimentalNs(bool enabled) : enabled(enabled) {}
+ static const ConfigOptionID identifier = ConfigOptionID::kExperimentalNs;
bool enabled;
};
@@ -127,6 +131,7 @@ struct Beamforming {
: enabled(enabled),
array_geometry(array_geometry),
target_direction(target_direction) {}
+ static const ConfigOptionID identifier = ConfigOptionID::kBeamforming;
const bool enabled;
const std::vector<Point> array_geometry;
const SphericalPointf target_direction;
@@ -141,6 +146,7 @@ struct Beamforming {
struct Intelligibility {
Intelligibility() : enabled(false) {}
explicit Intelligibility(bool enabled) : enabled(enabled) {}
+ static const ConfigOptionID identifier = ConfigOptionID::kIntelligibility;
bool enabled;
};
@@ -279,13 +285,18 @@ class AudioProcessing {
// ensures the options are applied immediately.
virtual void SetExtraOptions(const Config& config) = 0;
+ // TODO(peah): Remove after voice engine no longer requires it to resample
+ // the reverse stream to the forward rate.
+ virtual int input_sample_rate_hz() const = 0;
+
// TODO(ajm): Only intended for internal use. Make private and friend the
// necessary classes?
virtual int proc_sample_rate_hz() const = 0;
virtual int proc_split_sample_rate_hz() const = 0;
- virtual int num_input_channels() const = 0;
- virtual int num_output_channels() const = 0;
- virtual int num_reverse_channels() const = 0;
+ virtual size_t num_input_channels() const = 0;
+ virtual size_t num_proc_channels() const = 0;
+ virtual size_t num_output_channels() const = 0;
+ virtual size_t num_reverse_channels() const = 0;
// Set to true when the output of AudioProcessing will be muted or in some
// other way not used. Ideally, the captured audio would still be processed,
@@ -497,7 +508,7 @@ class StreamConfig {
// is true, the last channel in any corresponding list of
// channels is the keyboard channel.
StreamConfig(int sample_rate_hz = 0,
- int num_channels = 0,
+ size_t num_channels = 0,
bool has_keyboard = false)
: sample_rate_hz_(sample_rate_hz),
num_channels_(num_channels),
@@ -508,14 +519,14 @@ class StreamConfig {
sample_rate_hz_ = value;
num_frames_ = calculate_frames(value);
}
- void set_num_channels(int value) { num_channels_ = value; }
+ void set_num_channels(size_t value) { num_channels_ = value; }
void set_has_keyboard(bool value) { has_keyboard_ = value; }
int sample_rate_hz() const { return sample_rate_hz_; }
// The number of channels in the stream, not including the keyboard channel if
// present.
- int num_channels() const { return num_channels_; }
+ size_t num_channels() const { return num_channels_; }
bool has_keyboard() const { return has_keyboard_; }
size_t num_frames() const { return num_frames_; }
@@ -536,7 +547,7 @@ class StreamConfig {
}
int sample_rate_hz_;
- int num_channels_;
+ size_t num_channels_;
bool has_keyboard_;
size_t num_frames_;
};
diff --git a/webrtc/modules/audio_processing/include/mock_audio_processing.h b/webrtc/modules/audio_processing/include/mock_audio_processing.h
index 4ff52baf1c..9e1f2d5861 100644
--- a/webrtc/modules/audio_processing/include/mock_audio_processing.h
+++ b/webrtc/modules/audio_processing/include/mock_audio_processing.h
@@ -201,11 +201,11 @@ class MockAudioProcessing : public AudioProcessing {
MOCK_CONST_METHOD0(proc_split_sample_rate_hz,
int());
MOCK_CONST_METHOD0(num_input_channels,
- int());
+ size_t());
MOCK_CONST_METHOD0(num_output_channels,
- int());
+ size_t());
MOCK_CONST_METHOD0(num_reverse_channels,
- int());
+ size_t());
MOCK_METHOD1(set_output_will_be_muted,
void(bool muted));
MOCK_CONST_METHOD0(output_will_be_muted,
diff --git a/webrtc/modules/audio_processing/intelligibility/intelligibility_enhancer.cc b/webrtc/modules/audio_processing/intelligibility/intelligibility_enhancer.cc
index d014ce060c..fe964aba8c 100644
--- a/webrtc/modules/audio_processing/intelligibility/intelligibility_enhancer.cc
+++ b/webrtc/modules/audio_processing/intelligibility/intelligibility_enhancer.cc
@@ -54,12 +54,12 @@ IntelligibilityEnhancer::TransformCallback::TransformCallback(
void IntelligibilityEnhancer::TransformCallback::ProcessAudioBlock(
const complex<float>* const* in_block,
- int in_channels,
+ size_t in_channels,
size_t frames,
- int /* out_channels */,
+ size_t /* out_channels */,
complex<float>* const* out_block) {
RTC_DCHECK_EQ(parent_->freqs_, frames);
- for (int i = 0; i < in_channels; ++i) {
+ for (size_t i = 0; i < in_channels; ++i) {
parent_->DispatchAudio(source_, in_block[i], out_block[i]);
}
}
@@ -129,7 +129,7 @@ IntelligibilityEnhancer::IntelligibilityEnhancer(const Config& config)
void IntelligibilityEnhancer::ProcessRenderAudio(float* const* audio,
int sample_rate_hz,
- int num_channels) {
+ size_t num_channels) {
RTC_CHECK_EQ(sample_rate_hz_, sample_rate_hz);
RTC_CHECK_EQ(num_render_channels_, num_channels);
@@ -138,7 +138,7 @@ void IntelligibilityEnhancer::ProcessRenderAudio(float* const* audio,
}
if (active_) {
- for (int i = 0; i < num_render_channels_; ++i) {
+ for (size_t i = 0; i < num_render_channels_; ++i) {
memcpy(audio[i], temp_render_out_buffer_.channels()[i],
chunk_length_ * sizeof(**audio));
}
@@ -147,7 +147,7 @@ void IntelligibilityEnhancer::ProcessRenderAudio(float* const* audio,
void IntelligibilityEnhancer::AnalyzeCaptureAudio(float* const* audio,
int sample_rate_hz,
- int num_channels) {
+ size_t num_channels) {
RTC_CHECK_EQ(sample_rate_hz_, sample_rate_hz);
RTC_CHECK_EQ(num_capture_channels_, num_channels);
diff --git a/webrtc/modules/audio_processing/intelligibility/intelligibility_enhancer.h b/webrtc/modules/audio_processing/intelligibility/intelligibility_enhancer.h
index 1e9e35ac2a..1eb22342ad 100644
--- a/webrtc/modules/audio_processing/intelligibility/intelligibility_enhancer.h
+++ b/webrtc/modules/audio_processing/intelligibility/intelligibility_enhancer.h
@@ -47,8 +47,8 @@ class IntelligibilityEnhancer {
gain_change_limit(0.1f),
rho(0.02f) {}
int sample_rate_hz;
- int num_capture_channels;
- int num_render_channels;
+ size_t num_capture_channels;
+ size_t num_render_channels;
intelligibility::VarianceArray::StepType var_type;
float var_decay_rate;
size_t var_window_size;
@@ -63,12 +63,12 @@ class IntelligibilityEnhancer {
// Reads and processes chunk of noise stream in time domain.
void AnalyzeCaptureAudio(float* const* audio,
int sample_rate_hz,
- int num_channels);
+ size_t num_channels);
// Reads chunk of speech in time domain and updates with modified signal.
void ProcessRenderAudio(float* const* audio,
int sample_rate_hz,
- int num_channels);
+ size_t num_channels);
bool active() const;
private:
@@ -85,9 +85,9 @@ class IntelligibilityEnhancer {
// All in frequency domain, receives input |in_block|, applies
// intelligibility enhancement, and writes result to |out_block|.
void ProcessAudioBlock(const std::complex<float>* const* in_block,
- int in_channels,
+ size_t in_channels,
size_t frames,
- int out_channels,
+ size_t out_channels,
std::complex<float>* const* out_block) override;
private:
@@ -144,8 +144,8 @@ class IntelligibilityEnhancer {
const size_t bank_size_; // Num ERB filters.
const int sample_rate_hz_;
const int erb_resolution_;
- const int num_capture_channels_;
- const int num_render_channels_;
+ const size_t num_capture_channels_;
+ const size_t num_render_channels_;
const int analysis_rate_; // Num blocks before gains recalculated.
const bool active_; // Whether render gains are being updated.
diff --git a/webrtc/modules/audio_processing/intelligibility/test/intelligibility_proc.cc b/webrtc/modules/audio_processing/intelligibility/test/intelligibility_proc.cc
index 27d0ab48bb..4d2f5f4c5d 100644
--- a/webrtc/modules/audio_processing/intelligibility/test/intelligibility_proc.cc
+++ b/webrtc/modules/audio_processing/intelligibility/test/intelligibility_proc.cc
@@ -68,7 +68,7 @@ DEFINE_string(out_file,
"Enhanced output. Use '-' to "
"play through aplay immediately.");
-const int kNumChannels = 1;
+const size_t kNumChannels = 1;
// void function for gtest
void void_main(int argc, char* argv[]) {
diff --git a/webrtc/modules/audio_processing/level_estimator_impl.cc b/webrtc/modules/audio_processing/level_estimator_impl.cc
index 35fe697c2d..187873e33e 100644
--- a/webrtc/modules/audio_processing/level_estimator_impl.cc
+++ b/webrtc/modules/audio_processing/level_estimator_impl.cc
@@ -11,76 +11,55 @@
#include "webrtc/modules/audio_processing/level_estimator_impl.h"
#include "webrtc/modules/audio_processing/audio_buffer.h"
-#include "webrtc/modules/audio_processing/include/audio_processing.h"
#include "webrtc/modules/audio_processing/rms_level.h"
#include "webrtc/system_wrappers/include/critical_section_wrapper.h"
namespace webrtc {
-LevelEstimatorImpl::LevelEstimatorImpl(const AudioProcessing* apm,
- CriticalSectionWrapper* crit)
- : ProcessingComponent(),
- crit_(crit) {}
+LevelEstimatorImpl::LevelEstimatorImpl(rtc::CriticalSection* crit)
+ : crit_(crit), rms_(new RMSLevel()) {
+ RTC_DCHECK(crit);
+}
LevelEstimatorImpl::~LevelEstimatorImpl() {}
-int LevelEstimatorImpl::ProcessStream(AudioBuffer* audio) {
- if (!is_component_enabled()) {
- return AudioProcessing::kNoError;
- }
+void LevelEstimatorImpl::Initialize() {
+ rtc::CritScope cs(crit_);
+ rms_->Reset();
+}
- RMSLevel* rms_level = static_cast<RMSLevel*>(handle(0));
- for (int i = 0; i < audio->num_channels(); ++i) {
- rms_level->Process(audio->channels_const()[i],
- audio->num_frames());
+void LevelEstimatorImpl::ProcessStream(AudioBuffer* audio) {
+ RTC_DCHECK(audio);
+ rtc::CritScope cs(crit_);
+ if (!enabled_) {
+ return;
}
- return AudioProcessing::kNoError;
+ for (size_t i = 0; i < audio->num_channels(); i++) {
+ rms_->Process(audio->channels_const()[i], audio->num_frames());
+ }
}
int LevelEstimatorImpl::Enable(bool enable) {
- CriticalSectionScoped crit_scoped(crit_);
- return EnableComponent(enable);
+ rtc::CritScope cs(crit_);
+ if (enable && !enabled_) {
+ rms_->Reset();
+ }
+ enabled_ = enable;
+ return AudioProcessing::kNoError;
}
bool LevelEstimatorImpl::is_enabled() const {
- return is_component_enabled();
+ rtc::CritScope cs(crit_);
+ return enabled_;
}
int LevelEstimatorImpl::RMS() {
- if (!is_component_enabled()) {
+ rtc::CritScope cs(crit_);
+ if (!enabled_) {
return AudioProcessing::kNotEnabledError;
}
- RMSLevel* rms_level = static_cast<RMSLevel*>(handle(0));
- return rms_level->RMS();
-}
-
-// The ProcessingComponent implementation is pretty weird in this class since
-// we have only a single instance of the trivial underlying component.
-void* LevelEstimatorImpl::CreateHandle() const {
- return new RMSLevel;
-}
-
-void LevelEstimatorImpl::DestroyHandle(void* handle) const {
- delete static_cast<RMSLevel*>(handle);
+ return rms_->RMS();
}
-
-int LevelEstimatorImpl::InitializeHandle(void* handle) const {
- static_cast<RMSLevel*>(handle)->Reset();
- return AudioProcessing::kNoError;
-}
-
-int LevelEstimatorImpl::ConfigureHandle(void* /*handle*/) const {
- return AudioProcessing::kNoError;
-}
-
-int LevelEstimatorImpl::num_handles_required() const {
- return 1;
-}
-
-int LevelEstimatorImpl::GetHandleError(void* /*handle*/) const {
- return AudioProcessing::kUnspecifiedError;
-}
-
} // namespace webrtc
diff --git a/webrtc/modules/audio_processing/level_estimator_impl.h b/webrtc/modules/audio_processing/level_estimator_impl.h
index 0d0050c7e7..4401da37e4 100644
--- a/webrtc/modules/audio_processing/level_estimator_impl.h
+++ b/webrtc/modules/audio_processing/level_estimator_impl.h
@@ -11,43 +11,36 @@
#ifndef WEBRTC_MODULES_AUDIO_PROCESSING_LEVEL_ESTIMATOR_IMPL_H_
#define WEBRTC_MODULES_AUDIO_PROCESSING_LEVEL_ESTIMATOR_IMPL_H_
+#include "webrtc/base/constructormagic.h"
+#include "webrtc/base/criticalsection.h"
+#include "webrtc/base/scoped_ptr.h"
#include "webrtc/modules/audio_processing/include/audio_processing.h"
-#include "webrtc/modules/audio_processing/processing_component.h"
-#include "webrtc/modules/audio_processing/rms_level.h"
namespace webrtc {
class AudioBuffer;
-class CriticalSectionWrapper;
+class RMSLevel;
-class LevelEstimatorImpl : public LevelEstimator,
- public ProcessingComponent {
+class LevelEstimatorImpl : public LevelEstimator {
public:
- LevelEstimatorImpl(const AudioProcessing* apm,
- CriticalSectionWrapper* crit);
- virtual ~LevelEstimatorImpl();
+ explicit LevelEstimatorImpl(rtc::CriticalSection* crit);
+ ~LevelEstimatorImpl() override;
- int ProcessStream(AudioBuffer* audio);
+ // TODO(peah): Fold into ctor, once public API is removed.
+ void Initialize();
+ void ProcessStream(AudioBuffer* audio);
// LevelEstimator implementation.
- bool is_enabled() const override;
-
- private:
- // LevelEstimator implementation.
int Enable(bool enable) override;
+ bool is_enabled() const override;
int RMS() override;
- // ProcessingComponent implementation.
- void* CreateHandle() const override;
- int InitializeHandle(void* handle) const override;
- int ConfigureHandle(void* handle) const override;
- void DestroyHandle(void* handle) const override;
- int num_handles_required() const override;
- int GetHandleError(void* handle) const override;
-
- CriticalSectionWrapper* crit_;
+ private:
+ rtc::CriticalSection* const crit_ = nullptr;
+ bool enabled_ GUARDED_BY(crit_) = false;
+ rtc::scoped_ptr<RMSLevel> rms_ GUARDED_BY(crit_);
+ RTC_DISALLOW_IMPLICIT_CONSTRUCTORS(LevelEstimatorImpl);
};
-
} // namespace webrtc
#endif // WEBRTC_MODULES_AUDIO_PROCESSING_LEVEL_ESTIMATOR_IMPL_H_
diff --git a/webrtc/modules/audio_processing/logging/aec_logging.h b/webrtc/modules/audio_processing/logging/aec_logging.h
index 3cf9ff89ed..b062913be2 100644
--- a/webrtc/modules/audio_processing/logging/aec_logging.h
+++ b/webrtc/modules/audio_processing/logging/aec_logging.h
@@ -43,6 +43,20 @@
(void) fwrite(data, data_size, 1, file); \
} while (0)
+// Dumps a raw scalar int32 to file.
+#define RTC_AEC_DEBUG_RAW_WRITE_SCALAR_INT32(file, data) \
+ do { \
+ int32_t value_to_store = data; \
+ (void) fwrite(&value_to_store, sizeof(value_to_store), 1, file); \
+ } while (0)
+
+// Dumps a raw scalar double to file.
+#define RTC_AEC_DEBUG_RAW_WRITE_SCALAR_DOUBLE(file, data) \
+ do { \
+ double value_to_store = data; \
+ (void) fwrite(&value_to_store, sizeof(value_to_store), 1, file); \
+ } while (0)
+
// Opens a raw data file for writing using the specified sample rate.
#define RTC_AEC_DEBUG_RAW_OPEN(name, instance_counter, file) \
do { \
@@ -73,6 +87,14 @@
do { \
} while (0)
+#define RTC_AEC_DEBUG_RAW_WRITE_SCALAR_INT32(file, data) \
+ do { \
+ } while (0)
+
+#define RTC_AEC_DEBUG_RAW_WRITE_SCALAR_DOUBLE(file, data) \
+ do { \
+ } while (0)
+
#define RTC_AEC_DEBUG_RAW_OPEN(file, name, instance_counter) \
do { \
} while (0)
diff --git a/webrtc/modules/audio_processing/noise_suppression_impl.cc b/webrtc/modules/audio_processing/noise_suppression_impl.cc
index 65ec3c445e..de7e856676 100644
--- a/webrtc/modules/audio_processing/noise_suppression_impl.cc
+++ b/webrtc/modules/audio_processing/noise_suppression_impl.cc
@@ -10,172 +10,166 @@
#include "webrtc/modules/audio_processing/noise_suppression_impl.h"
-#include <assert.h>
-
#include "webrtc/modules/audio_processing/audio_buffer.h"
#if defined(WEBRTC_NS_FLOAT)
-#include "webrtc/modules/audio_processing/ns/include/noise_suppression.h"
+#include "webrtc/modules/audio_processing/ns/noise_suppression.h"
+#define NS_CREATE WebRtcNs_Create
+#define NS_FREE WebRtcNs_Free
+#define NS_INIT WebRtcNs_Init
+#define NS_SET_POLICY WebRtcNs_set_policy
+typedef NsHandle NsState;
#elif defined(WEBRTC_NS_FIXED)
-#include "webrtc/modules/audio_processing/ns/include/noise_suppression_x.h"
+#include "webrtc/modules/audio_processing/ns/noise_suppression_x.h"
+#define NS_CREATE WebRtcNsx_Create
+#define NS_FREE WebRtcNsx_Free
+#define NS_INIT WebRtcNsx_Init
+#define NS_SET_POLICY WebRtcNsx_set_policy
+typedef NsxHandle NsState;
#endif
-#include "webrtc/system_wrappers/include/critical_section_wrapper.h"
-
namespace webrtc {
-
-#if defined(WEBRTC_NS_FLOAT)
-typedef NsHandle Handle;
-#elif defined(WEBRTC_NS_FIXED)
-typedef NsxHandle Handle;
-#endif
-
-namespace {
-int MapSetting(NoiseSuppression::Level level) {
- switch (level) {
- case NoiseSuppression::kLow:
- return 0;
- case NoiseSuppression::kModerate:
- return 1;
- case NoiseSuppression::kHigh:
- return 2;
- case NoiseSuppression::kVeryHigh:
- return 3;
+class NoiseSuppressionImpl::Suppressor {
+ public:
+ explicit Suppressor(int sample_rate_hz) {
+ state_ = NS_CREATE();
+ RTC_CHECK(state_);
+ int error = NS_INIT(state_, sample_rate_hz);
+ RTC_DCHECK_EQ(0, error);
+ }
+ ~Suppressor() {
+ NS_FREE(state_);
}
- assert(false);
- return -1;
+ NsState* state() { return state_; }
+ private:
+ NsState* state_ = nullptr;
+ RTC_DISALLOW_IMPLICIT_CONSTRUCTORS(Suppressor);
+};
+
+NoiseSuppressionImpl::NoiseSuppressionImpl(rtc::CriticalSection* crit)
+ : crit_(crit) {
+ RTC_DCHECK(crit);
}
-} // namespace
-
-NoiseSuppressionImpl::NoiseSuppressionImpl(const AudioProcessing* apm,
- CriticalSectionWrapper* crit)
- : ProcessingComponent(),
- apm_(apm),
- crit_(crit),
- level_(kModerate) {}
NoiseSuppressionImpl::~NoiseSuppressionImpl() {}
-int NoiseSuppressionImpl::AnalyzeCaptureAudio(AudioBuffer* audio) {
-#if defined(WEBRTC_NS_FLOAT)
- if (!is_component_enabled()) {
- return apm_->kNoError;
+void NoiseSuppressionImpl::Initialize(size_t channels, int sample_rate_hz) {
+ rtc::CritScope cs(crit_);
+ channels_ = channels;
+ sample_rate_hz_ = sample_rate_hz;
+ std::vector<rtc::scoped_ptr<Suppressor>> new_suppressors;
+ if (enabled_) {
+ new_suppressors.resize(channels);
+ for (size_t i = 0; i < channels; i++) {
+ new_suppressors[i].reset(new Suppressor(sample_rate_hz));
+ }
}
- assert(audio->num_frames_per_band() <= 160);
- assert(audio->num_channels() == num_handles());
+ suppressors_.swap(new_suppressors);
+ set_level(level_);
+}
- for (int i = 0; i < num_handles(); ++i) {
- Handle* my_handle = static_cast<Handle*>(handle(i));
+void NoiseSuppressionImpl::AnalyzeCaptureAudio(AudioBuffer* audio) {
+ RTC_DCHECK(audio);
+#if defined(WEBRTC_NS_FLOAT)
+ rtc::CritScope cs(crit_);
+ if (!enabled_) {
+ return;
+ }
- WebRtcNs_Analyze(my_handle, audio->split_bands_const_f(i)[kBand0To8kHz]);
+ RTC_DCHECK_GE(160u, audio->num_frames_per_band());
+ RTC_DCHECK_EQ(suppressors_.size(), audio->num_channels());
+ for (size_t i = 0; i < suppressors_.size(); i++) {
+ WebRtcNs_Analyze(suppressors_[i]->state(),
+ audio->split_bands_const_f(i)[kBand0To8kHz]);
}
#endif
- return apm_->kNoError;
}
-int NoiseSuppressionImpl::ProcessCaptureAudio(AudioBuffer* audio) {
- if (!is_component_enabled()) {
- return apm_->kNoError;
+void NoiseSuppressionImpl::ProcessCaptureAudio(AudioBuffer* audio) {
+ RTC_DCHECK(audio);
+ rtc::CritScope cs(crit_);
+ if (!enabled_) {
+ return;
}
- assert(audio->num_frames_per_band() <= 160);
- assert(audio->num_channels() == num_handles());
- for (int i = 0; i < num_handles(); ++i) {
- Handle* my_handle = static_cast<Handle*>(handle(i));
+ RTC_DCHECK_GE(160u, audio->num_frames_per_band());
+ RTC_DCHECK_EQ(suppressors_.size(), audio->num_channels());
+ for (size_t i = 0; i < suppressors_.size(); i++) {
#if defined(WEBRTC_NS_FLOAT)
- WebRtcNs_Process(my_handle,
+ WebRtcNs_Process(suppressors_[i]->state(),
audio->split_bands_const_f(i),
audio->num_bands(),
audio->split_bands_f(i));
#elif defined(WEBRTC_NS_FIXED)
- WebRtcNsx_Process(my_handle,
+ WebRtcNsx_Process(suppressors_[i]->state(),
audio->split_bands_const(i),
audio->num_bands(),
audio->split_bands(i));
#endif
}
- return apm_->kNoError;
}
int NoiseSuppressionImpl::Enable(bool enable) {
- CriticalSectionScoped crit_scoped(crit_);
- return EnableComponent(enable);
+ rtc::CritScope cs(crit_);
+ if (enabled_ != enable) {
+ enabled_ = enable;
+ Initialize(channels_, sample_rate_hz_);
+ }
+ return AudioProcessing::kNoError;
}
bool NoiseSuppressionImpl::is_enabled() const {
- return is_component_enabled();
+ rtc::CritScope cs(crit_);
+ return enabled_;
}
int NoiseSuppressionImpl::set_level(Level level) {
- CriticalSectionScoped crit_scoped(crit_);
- if (MapSetting(level) == -1) {
- return apm_->kBadParameterError;
+ int policy = 1;
+ switch (level) {
+ case NoiseSuppression::kLow:
+ policy = 0;
+ break;
+ case NoiseSuppression::kModerate:
+ policy = 1;
+ break;
+ case NoiseSuppression::kHigh:
+ policy = 2;
+ break;
+ case NoiseSuppression::kVeryHigh:
+ policy = 3;
+ break;
+ default:
+ RTC_NOTREACHED();
}
-
+ rtc::CritScope cs(crit_);
level_ = level;
- return Configure();
+ for (auto& suppressor : suppressors_) {
+ int error = NS_SET_POLICY(suppressor->state(), policy);
+ RTC_DCHECK_EQ(0, error);
+ }
+ return AudioProcessing::kNoError;
}
NoiseSuppression::Level NoiseSuppressionImpl::level() const {
+ rtc::CritScope cs(crit_);
return level_;
}
float NoiseSuppressionImpl::speech_probability() const {
+ rtc::CritScope cs(crit_);
#if defined(WEBRTC_NS_FLOAT)
float probability_average = 0.0f;
- for (int i = 0; i < num_handles(); i++) {
- Handle* my_handle = static_cast<Handle*>(handle(i));
- probability_average += WebRtcNs_prior_speech_probability(my_handle);
+ for (auto& suppressor : suppressors_) {
+ probability_average +=
+ WebRtcNs_prior_speech_probability(suppressor->state());
}
- return probability_average / num_handles();
+ if (!suppressors_.empty()) {
+ probability_average /= suppressors_.size();
+ }
+ return probability_average;
#elif defined(WEBRTC_NS_FIXED)
+ // TODO(peah): Returning error code as a float! Remove this.
// Currently not available for the fixed point implementation.
- return apm_->kUnsupportedFunctionError;
+ return AudioProcessing::kUnsupportedFunctionError;
#endif
}
-
-void* NoiseSuppressionImpl::CreateHandle() const {
-#if defined(WEBRTC_NS_FLOAT)
- return WebRtcNs_Create();
-#elif defined(WEBRTC_NS_FIXED)
- return WebRtcNsx_Create();
-#endif
-}
-
-void NoiseSuppressionImpl::DestroyHandle(void* handle) const {
-#if defined(WEBRTC_NS_FLOAT)
- WebRtcNs_Free(static_cast<Handle*>(handle));
-#elif defined(WEBRTC_NS_FIXED)
- WebRtcNsx_Free(static_cast<Handle*>(handle));
-#endif
-}
-
-int NoiseSuppressionImpl::InitializeHandle(void* handle) const {
-#if defined(WEBRTC_NS_FLOAT)
- return WebRtcNs_Init(static_cast<Handle*>(handle),
- apm_->proc_sample_rate_hz());
-#elif defined(WEBRTC_NS_FIXED)
- return WebRtcNsx_Init(static_cast<Handle*>(handle),
- apm_->proc_sample_rate_hz());
-#endif
-}
-
-int NoiseSuppressionImpl::ConfigureHandle(void* handle) const {
-#if defined(WEBRTC_NS_FLOAT)
- return WebRtcNs_set_policy(static_cast<Handle*>(handle),
- MapSetting(level_));
-#elif defined(WEBRTC_NS_FIXED)
- return WebRtcNsx_set_policy(static_cast<Handle*>(handle),
- MapSetting(level_));
-#endif
-}
-
-int NoiseSuppressionImpl::num_handles_required() const {
- return apm_->num_output_channels();
-}
-
-int NoiseSuppressionImpl::GetHandleError(void* handle) const {
- // The NS has no get_error() function.
- assert(handle != NULL);
- return apm_->kUnspecifiedError;
-}
} // namespace webrtc
diff --git a/webrtc/modules/audio_processing/noise_suppression_impl.h b/webrtc/modules/audio_processing/noise_suppression_impl.h
index 76a39b8e09..debbc61bc9 100644
--- a/webrtc/modules/audio_processing/noise_suppression_impl.h
+++ b/webrtc/modules/audio_processing/noise_suppression_impl.h
@@ -11,47 +11,42 @@
#ifndef WEBRTC_MODULES_AUDIO_PROCESSING_NOISE_SUPPRESSION_IMPL_H_
#define WEBRTC_MODULES_AUDIO_PROCESSING_NOISE_SUPPRESSION_IMPL_H_
+#include "webrtc/base/constructormagic.h"
+#include "webrtc/base/criticalsection.h"
+#include "webrtc/base/scoped_ptr.h"
#include "webrtc/modules/audio_processing/include/audio_processing.h"
-#include "webrtc/modules/audio_processing/processing_component.h"
namespace webrtc {
class AudioBuffer;
-class CriticalSectionWrapper;
-class NoiseSuppressionImpl : public NoiseSuppression,
- public ProcessingComponent {
+class NoiseSuppressionImpl : public NoiseSuppression {
public:
- NoiseSuppressionImpl(const AudioProcessing* apm,
- CriticalSectionWrapper* crit);
- virtual ~NoiseSuppressionImpl();
+ explicit NoiseSuppressionImpl(rtc::CriticalSection* crit);
+ ~NoiseSuppressionImpl() override;
- int AnalyzeCaptureAudio(AudioBuffer* audio);
- int ProcessCaptureAudio(AudioBuffer* audio);
+ // TODO(peah): Fold into ctor, once public API is removed.
+ void Initialize(size_t channels, int sample_rate_hz);
+ void AnalyzeCaptureAudio(AudioBuffer* audio);
+ void ProcessCaptureAudio(AudioBuffer* audio);
// NoiseSuppression implementation.
+ int Enable(bool enable) override;
bool is_enabled() const override;
- float speech_probability() const override;
+ int set_level(Level level) override;
Level level() const override;
+ float speech_probability() const override;
private:
- // NoiseSuppression implementation.
- int Enable(bool enable) override;
- int set_level(Level level) override;
-
- // ProcessingComponent implementation.
- void* CreateHandle() const override;
- int InitializeHandle(void* handle) const override;
- int ConfigureHandle(void* handle) const override;
- void DestroyHandle(void* handle) const override;
- int num_handles_required() const override;
- int GetHandleError(void* handle) const override;
-
- const AudioProcessing* apm_;
- CriticalSectionWrapper* crit_;
- Level level_;
+ class Suppressor;
+ rtc::CriticalSection* const crit_;
+ bool enabled_ GUARDED_BY(crit_) = false;
+ Level level_ GUARDED_BY(crit_) = kModerate;
+ size_t channels_ GUARDED_BY(crit_) = 0;
+ int sample_rate_hz_ GUARDED_BY(crit_) = 0;
+ std::vector<rtc::scoped_ptr<Suppressor>> suppressors_ GUARDED_BY(crit_);
+ RTC_DISALLOW_IMPLICIT_CONSTRUCTORS(NoiseSuppressionImpl);
};
-
} // namespace webrtc
#endif // WEBRTC_MODULES_AUDIO_PROCESSING_NOISE_SUPPRESSION_IMPL_H_
diff --git a/webrtc/modules/audio_processing/ns/include/noise_suppression.h b/webrtc/modules/audio_processing/ns/include/noise_suppression.h
deleted file mode 100644
index 9dac56bdee..0000000000
--- a/webrtc/modules/audio_processing/ns/include/noise_suppression.h
+++ /dev/null
@@ -1,116 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_MODULES_AUDIO_PROCESSING_NS_INCLUDE_NOISE_SUPPRESSION_H_
-#define WEBRTC_MODULES_AUDIO_PROCESSING_NS_INCLUDE_NOISE_SUPPRESSION_H_
-
-#include <stddef.h>
-
-#include "webrtc/typedefs.h"
-
-typedef struct NsHandleT NsHandle;
-
-#ifdef __cplusplus
-extern "C" {
-#endif
-
-/*
- * This function creates an instance of the floating point Noise Suppression.
- */
-NsHandle* WebRtcNs_Create();
-
-/*
- * This function frees the dynamic memory of a specified noise suppression
- * instance.
- *
- * Input:
- * - NS_inst : Pointer to NS instance that should be freed
- */
-void WebRtcNs_Free(NsHandle* NS_inst);
-
-/*
- * This function initializes a NS instance and has to be called before any other
- * processing is made.
- *
- * Input:
- * - NS_inst : Instance that should be initialized
- * - fs : sampling frequency
- *
- * Output:
- * - NS_inst : Initialized instance
- *
- * Return value : 0 - Ok
- * -1 - Error
- */
-int WebRtcNs_Init(NsHandle* NS_inst, uint32_t fs);
-
-/*
- * This changes the aggressiveness of the noise suppression method.
- *
- * Input:
- * - NS_inst : Noise suppression instance.
- * - mode : 0: Mild, 1: Medium , 2: Aggressive
- *
- * Output:
- * - NS_inst : Updated instance.
- *
- * Return value : 0 - Ok
- * -1 - Error
- */
-int WebRtcNs_set_policy(NsHandle* NS_inst, int mode);
-
-/*
- * This functions estimates the background noise for the inserted speech frame.
- * The input and output signals should always be 10ms (80 or 160 samples).
- *
- * Input
- * - NS_inst : Noise suppression instance.
- * - spframe : Pointer to speech frame buffer for L band
- *
- * Output:
- * - NS_inst : Updated NS instance
- */
-void WebRtcNs_Analyze(NsHandle* NS_inst, const float* spframe);
-
-/*
- * This functions does Noise Suppression for the inserted speech frame. The
- * input and output signals should always be 10ms (80 or 160 samples).
- *
- * Input
- * - NS_inst : Noise suppression instance.
- * - spframe : Pointer to speech frame buffer for each band
- * - num_bands : Number of bands
- *
- * Output:
- * - NS_inst : Updated NS instance
- * - outframe : Pointer to output frame for each band
- */
-void WebRtcNs_Process(NsHandle* NS_inst,
- const float* const* spframe,
- size_t num_bands,
- float* const* outframe);
-
-/* Returns the internally used prior speech probability of the current frame.
- * There is a frequency bin based one as well, with which this should not be
- * confused.
- *
- * Input
- * - handle : Noise suppression instance.
- *
- * Return value : Prior speech probability in interval [0.0, 1.0].
- * -1 - NULL pointer or uninitialized instance.
- */
-float WebRtcNs_prior_speech_probability(NsHandle* handle);
-
-#ifdef __cplusplus
-}
-#endif
-
-#endif // WEBRTC_MODULES_AUDIO_PROCESSING_NS_INCLUDE_NOISE_SUPPRESSION_H_
diff --git a/webrtc/modules/audio_processing/ns/include/noise_suppression_x.h b/webrtc/modules/audio_processing/ns/include/noise_suppression_x.h
deleted file mode 100644
index 88fe4cd635..0000000000
--- a/webrtc/modules/audio_processing/ns/include/noise_suppression_x.h
+++ /dev/null
@@ -1,88 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_MODULES_AUDIO_PROCESSING_NS_INCLUDE_NOISE_SUPPRESSION_X_H_
-#define WEBRTC_MODULES_AUDIO_PROCESSING_NS_INCLUDE_NOISE_SUPPRESSION_X_H_
-
-#include "webrtc/typedefs.h"
-
-typedef struct NsxHandleT NsxHandle;
-
-#ifdef __cplusplus
-extern "C" {
-#endif
-
-/*
- * This function creates an instance of the fixed point Noise Suppression.
- */
-NsxHandle* WebRtcNsx_Create();
-
-/*
- * This function frees the dynamic memory of a specified Noise Suppression
- * instance.
- *
- * Input:
- * - nsxInst : Pointer to NS instance that should be freed
- */
-void WebRtcNsx_Free(NsxHandle* nsxInst);
-
-/*
- * This function initializes a NS instance
- *
- * Input:
- * - nsxInst : Instance that should be initialized
- * - fs : sampling frequency
- *
- * Output:
- * - nsxInst : Initialized instance
- *
- * Return value : 0 - Ok
- * -1 - Error
- */
-int WebRtcNsx_Init(NsxHandle* nsxInst, uint32_t fs);
-
-/*
- * This changes the aggressiveness of the noise suppression method.
- *
- * Input:
- * - nsxInst : Instance that should be initialized
- * - mode : 0: Mild, 1: Medium , 2: Aggressive
- *
- * Output:
- * - nsxInst : Initialized instance
- *
- * Return value : 0 - Ok
- * -1 - Error
- */
-int WebRtcNsx_set_policy(NsxHandle* nsxInst, int mode);
-
-/*
- * This functions does noise suppression for the inserted speech frame. The
- * input and output signals should always be 10ms (80 or 160 samples).
- *
- * Input
- * - nsxInst : NSx instance. Needs to be initiated before call.
- * - speechFrame : Pointer to speech frame buffer for each band
- * - num_bands : Number of bands
- *
- * Output:
- * - nsxInst : Updated NSx instance
- * - outFrame : Pointer to output frame for each band
- */
-void WebRtcNsx_Process(NsxHandle* nsxInst,
- const short* const* speechFrame,
- int num_bands,
- short* const* outFrame);
-
-#ifdef __cplusplus
-}
-#endif
-
-#endif // WEBRTC_MODULES_AUDIO_PROCESSING_NS_INCLUDE_NOISE_SUPPRESSION_X_H_
diff --git a/webrtc/modules/audio_processing/ns/noise_suppression.c b/webrtc/modules/audio_processing/ns/noise_suppression.c
index 13f1b2d6dc..dd05e0ab3d 100644
--- a/webrtc/modules/audio_processing/ns/noise_suppression.c
+++ b/webrtc/modules/audio_processing/ns/noise_suppression.c
@@ -8,7 +8,7 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-#include "webrtc/modules/audio_processing/ns/include/noise_suppression.h"
+#include "webrtc/modules/audio_processing/ns/noise_suppression.h"
#include <stdlib.h>
#include <string.h>
diff --git a/webrtc/modules/audio_processing/ns/noise_suppression.h b/webrtc/modules/audio_processing/ns/noise_suppression.h
new file mode 100644
index 0000000000..8018118b60
--- /dev/null
+++ b/webrtc/modules/audio_processing/ns/noise_suppression.h
@@ -0,0 +1,116 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_AUDIO_PROCESSING_NS_NOISE_SUPPRESSION_H_
+#define WEBRTC_MODULES_AUDIO_PROCESSING_NS_NOISE_SUPPRESSION_H_
+
+#include <stddef.h>
+
+#include "webrtc/typedefs.h"
+
+typedef struct NsHandleT NsHandle;
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+/*
+ * This function creates an instance of the floating point Noise Suppression.
+ */
+NsHandle* WebRtcNs_Create();
+
+/*
+ * This function frees the dynamic memory of a specified noise suppression
+ * instance.
+ *
+ * Input:
+ * - NS_inst : Pointer to NS instance that should be freed
+ */
+void WebRtcNs_Free(NsHandle* NS_inst);
+
+/*
+ * This function initializes a NS instance and has to be called before any other
+ * processing is made.
+ *
+ * Input:
+ * - NS_inst : Instance that should be initialized
+ * - fs : sampling frequency
+ *
+ * Output:
+ * - NS_inst : Initialized instance
+ *
+ * Return value : 0 - Ok
+ * -1 - Error
+ */
+int WebRtcNs_Init(NsHandle* NS_inst, uint32_t fs);
+
+/*
+ * This changes the aggressiveness of the noise suppression method.
+ *
+ * Input:
+ * - NS_inst : Noise suppression instance.
+ * - mode : 0: Mild, 1: Medium , 2: Aggressive
+ *
+ * Output:
+ * - NS_inst : Updated instance.
+ *
+ * Return value : 0 - Ok
+ * -1 - Error
+ */
+int WebRtcNs_set_policy(NsHandle* NS_inst, int mode);
+
+/*
+ * This functions estimates the background noise for the inserted speech frame.
+ * The input and output signals should always be 10ms (80 or 160 samples).
+ *
+ * Input
+ * - NS_inst : Noise suppression instance.
+ * - spframe : Pointer to speech frame buffer for L band
+ *
+ * Output:
+ * - NS_inst : Updated NS instance
+ */
+void WebRtcNs_Analyze(NsHandle* NS_inst, const float* spframe);
+
+/*
+ * This functions does Noise Suppression for the inserted speech frame. The
+ * input and output signals should always be 10ms (80 or 160 samples).
+ *
+ * Input
+ * - NS_inst : Noise suppression instance.
+ * - spframe : Pointer to speech frame buffer for each band
+ * - num_bands : Number of bands
+ *
+ * Output:
+ * - NS_inst : Updated NS instance
+ * - outframe : Pointer to output frame for each band
+ */
+void WebRtcNs_Process(NsHandle* NS_inst,
+ const float* const* spframe,
+ size_t num_bands,
+ float* const* outframe);
+
+/* Returns the internally used prior speech probability of the current frame.
+ * There is a frequency bin based one as well, with which this should not be
+ * confused.
+ *
+ * Input
+ * - handle : Noise suppression instance.
+ *
+ * Return value : Prior speech probability in interval [0.0, 1.0].
+ * -1 - NULL pointer or uninitialized instance.
+ */
+float WebRtcNs_prior_speech_probability(NsHandle* handle);
+
+#ifdef __cplusplus
+}
+#endif
+
+#endif // WEBRTC_MODULES_AUDIO_PROCESSING_NS_NOISE_SUPPRESSION_H_
diff --git a/webrtc/modules/audio_processing/ns/noise_suppression_x.c b/webrtc/modules/audio_processing/ns/noise_suppression_x.c
index 150fe608dd..0a5ba13300 100644
--- a/webrtc/modules/audio_processing/ns/noise_suppression_x.c
+++ b/webrtc/modules/audio_processing/ns/noise_suppression_x.c
@@ -8,7 +8,7 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-#include "webrtc/modules/audio_processing/ns/include/noise_suppression_x.h"
+#include "webrtc/modules/audio_processing/ns/noise_suppression_x.h"
#include <stdlib.h>
diff --git a/webrtc/modules/audio_processing/ns/noise_suppression_x.h b/webrtc/modules/audio_processing/ns/noise_suppression_x.h
new file mode 100644
index 0000000000..02b44cc091
--- /dev/null
+++ b/webrtc/modules/audio_processing/ns/noise_suppression_x.h
@@ -0,0 +1,88 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_AUDIO_PROCESSING_NS_NOISE_SUPPRESSION_X_H_
+#define WEBRTC_MODULES_AUDIO_PROCESSING_NS_NOISE_SUPPRESSION_X_H_
+
+#include "webrtc/typedefs.h"
+
+typedef struct NsxHandleT NsxHandle;
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+/*
+ * This function creates an instance of the fixed point Noise Suppression.
+ */
+NsxHandle* WebRtcNsx_Create();
+
+/*
+ * This function frees the dynamic memory of a specified Noise Suppression
+ * instance.
+ *
+ * Input:
+ * - nsxInst : Pointer to NS instance that should be freed
+ */
+void WebRtcNsx_Free(NsxHandle* nsxInst);
+
+/*
+ * This function initializes a NS instance
+ *
+ * Input:
+ * - nsxInst : Instance that should be initialized
+ * - fs : sampling frequency
+ *
+ * Output:
+ * - nsxInst : Initialized instance
+ *
+ * Return value : 0 - Ok
+ * -1 - Error
+ */
+int WebRtcNsx_Init(NsxHandle* nsxInst, uint32_t fs);
+
+/*
+ * This changes the aggressiveness of the noise suppression method.
+ *
+ * Input:
+ * - nsxInst : Instance that should be initialized
+ * - mode : 0: Mild, 1: Medium , 2: Aggressive
+ *
+ * Output:
+ * - nsxInst : Initialized instance
+ *
+ * Return value : 0 - Ok
+ * -1 - Error
+ */
+int WebRtcNsx_set_policy(NsxHandle* nsxInst, int mode);
+
+/*
+ * This functions does noise suppression for the inserted speech frame. The
+ * input and output signals should always be 10ms (80 or 160 samples).
+ *
+ * Input
+ * - nsxInst : NSx instance. Needs to be initiated before call.
+ * - speechFrame : Pointer to speech frame buffer for each band
+ * - num_bands : Number of bands
+ *
+ * Output:
+ * - nsxInst : Updated NSx instance
+ * - outFrame : Pointer to output frame for each band
+ */
+void WebRtcNsx_Process(NsxHandle* nsxInst,
+ const short* const* speechFrame,
+ int num_bands,
+ short* const* outFrame);
+
+#ifdef __cplusplus
+}
+#endif
+
+#endif // WEBRTC_MODULES_AUDIO_PROCESSING_NS_NOISE_SUPPRESSION_X_H_
diff --git a/webrtc/modules/audio_processing/ns/ns_core.c b/webrtc/modules/audio_processing/ns/ns_core.c
index 1d6091400e..5ce64cee29 100644
--- a/webrtc/modules/audio_processing/ns/ns_core.c
+++ b/webrtc/modules/audio_processing/ns/ns_core.c
@@ -15,7 +15,7 @@
#include "webrtc/common_audio/fft4g.h"
#include "webrtc/common_audio/signal_processing/include/signal_processing_library.h"
-#include "webrtc/modules/audio_processing/ns/include/noise_suppression.h"
+#include "webrtc/modules/audio_processing/ns/noise_suppression.h"
#include "webrtc/modules/audio_processing/ns/ns_core.h"
#include "webrtc/modules/audio_processing/ns/windows_private.h"
diff --git a/webrtc/modules/audio_processing/ns/nsx_core.c b/webrtc/modules/audio_processing/ns/nsx_core.c
index 71445792f5..25f16d26ab 100644
--- a/webrtc/modules/audio_processing/ns/nsx_core.c
+++ b/webrtc/modules/audio_processing/ns/nsx_core.c
@@ -8,7 +8,7 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-#include "webrtc/modules/audio_processing/ns/include/noise_suppression_x.h"
+#include "webrtc/modules/audio_processing/ns/noise_suppression_x.h"
#include <assert.h>
#include <math.h>
diff --git a/webrtc/modules/audio_processing/ns/nsx_core_c.c b/webrtc/modules/audio_processing/ns/nsx_core_c.c
index 14322d38cc..da7aa3d5db 100644
--- a/webrtc/modules/audio_processing/ns/nsx_core_c.c
+++ b/webrtc/modules/audio_processing/ns/nsx_core_c.c
@@ -10,7 +10,7 @@
#include <assert.h>
-#include "webrtc/modules/audio_processing/ns/include/noise_suppression_x.h"
+#include "webrtc/modules/audio_processing/ns/noise_suppression_x.h"
#include "webrtc/modules/audio_processing/ns/nsx_core.h"
#include "webrtc/modules/audio_processing/ns/nsx_defines.h"
diff --git a/webrtc/modules/audio_processing/ns/nsx_core_mips.c b/webrtc/modules/audio_processing/ns/nsx_core_mips.c
index d99be8720b..7688d82d78 100644
--- a/webrtc/modules/audio_processing/ns/nsx_core_mips.c
+++ b/webrtc/modules/audio_processing/ns/nsx_core_mips.c
@@ -11,7 +11,7 @@
#include <assert.h>
#include <string.h>
-#include "webrtc/modules/audio_processing/ns/include/noise_suppression_x.h"
+#include "webrtc/modules/audio_processing/ns/noise_suppression_x.h"
#include "webrtc/modules/audio_processing/ns/nsx_core.h"
static const int16_t kIndicatorTable[17] = {
diff --git a/webrtc/modules/audio_processing/processing_component.cc b/webrtc/modules/audio_processing/processing_component.cc
index 9e16d7c4ee..7abd8e2100 100644
--- a/webrtc/modules/audio_processing/processing_component.cc
+++ b/webrtc/modules/audio_processing/processing_component.cc
@@ -55,12 +55,12 @@ bool ProcessingComponent::is_component_enabled() const {
return enabled_;
}
-void* ProcessingComponent::handle(int index) const {
+void* ProcessingComponent::handle(size_t index) const {
assert(index < num_handles_);
return handles_[index];
}
-int ProcessingComponent::num_handles() const {
+size_t ProcessingComponent::num_handles() const {
return num_handles_;
}
@@ -70,12 +70,12 @@ int ProcessingComponent::Initialize() {
}
num_handles_ = num_handles_required();
- if (num_handles_ > static_cast<int>(handles_.size())) {
+ if (num_handles_ > handles_.size()) {
handles_.resize(num_handles_, NULL);
}
- assert(static_cast<int>(handles_.size()) >= num_handles_);
- for (int i = 0; i < num_handles_; i++) {
+ assert(handles_.size() >= num_handles_);
+ for (size_t i = 0; i < num_handles_; i++) {
if (handles_[i] == NULL) {
handles_[i] = CreateHandle();
if (handles_[i] == NULL) {
@@ -98,8 +98,8 @@ int ProcessingComponent::Configure() {
return AudioProcessing::kNoError;
}
- assert(static_cast<int>(handles_.size()) >= num_handles_);
- for (int i = 0; i < num_handles_; i++) {
+ assert(handles_.size() >= num_handles_);
+ for (size_t i = 0; i < num_handles_; i++) {
int err = ConfigureHandle(handles_[i]);
if (err != AudioProcessing::kNoError) {
return GetHandleError(handles_[i]);
diff --git a/webrtc/modules/audio_processing/processing_component.h b/webrtc/modules/audio_processing/processing_component.h
index 8ee3ac6c7d..577f1570ad 100644
--- a/webrtc/modules/audio_processing/processing_component.h
+++ b/webrtc/modules/audio_processing/processing_component.h
@@ -17,6 +17,22 @@
namespace webrtc {
+// Functor to use when supplying a verifier function for the queue item
+// verifcation.
+template <typename T>
+class RenderQueueItemVerifier {
+ public:
+ explicit RenderQueueItemVerifier(size_t minimum_capacity)
+ : minimum_capacity_(minimum_capacity) {}
+
+ bool operator()(const std::vector<T>& v) const {
+ return v.capacity() >= minimum_capacity_;
+ }
+
+ private:
+ size_t minimum_capacity_;
+};
+
class ProcessingComponent {
public:
ProcessingComponent();
@@ -31,21 +47,21 @@ class ProcessingComponent {
protected:
virtual int Configure();
int EnableComponent(bool enable);
- void* handle(int index) const;
- int num_handles() const;
+ void* handle(size_t index) const;
+ size_t num_handles() const;
private:
virtual void* CreateHandle() const = 0;
virtual int InitializeHandle(void* handle) const = 0;
virtual int ConfigureHandle(void* handle) const = 0;
virtual void DestroyHandle(void* handle) const = 0;
- virtual int num_handles_required() const = 0;
+ virtual size_t num_handles_required() const = 0;
virtual int GetHandleError(void* handle) const = 0;
std::vector<void*> handles_;
bool initialized_;
bool enabled_;
- int num_handles_;
+ size_t num_handles_;
};
} // namespace webrtc
diff --git a/webrtc/modules/audio_processing/splitting_filter.cc b/webrtc/modules/audio_processing/splitting_filter.cc
index 60427e2db6..46cc9352c2 100644
--- a/webrtc/modules/audio_processing/splitting_filter.cc
+++ b/webrtc/modules/audio_processing/splitting_filter.cc
@@ -16,7 +16,7 @@
namespace webrtc {
-SplittingFilter::SplittingFilter(int num_channels,
+SplittingFilter::SplittingFilter(size_t num_channels,
size_t num_bands,
size_t num_frames)
: num_bands_(num_bands) {
@@ -24,7 +24,7 @@ SplittingFilter::SplittingFilter(int num_channels,
if (num_bands_ == 2) {
two_bands_states_.resize(num_channels);
} else if (num_bands_ == 3) {
- for (int i = 0; i < num_channels; ++i) {
+ for (size_t i = 0; i < num_channels; ++i) {
three_band_filter_banks_.push_back(new ThreeBandFilterBank(num_frames));
}
}
@@ -58,8 +58,7 @@ void SplittingFilter::Synthesis(const IFChannelBuffer* bands,
void SplittingFilter::TwoBandsAnalysis(const IFChannelBuffer* data,
IFChannelBuffer* bands) {
- RTC_DCHECK_EQ(static_cast<int>(two_bands_states_.size()),
- data->num_channels());
+ RTC_DCHECK_EQ(two_bands_states_.size(), data->num_channels());
for (size_t i = 0; i < two_bands_states_.size(); ++i) {
WebRtcSpl_AnalysisQMF(data->ibuf_const()->channels()[i],
data->num_frames(),
@@ -72,8 +71,7 @@ void SplittingFilter::TwoBandsAnalysis(const IFChannelBuffer* data,
void SplittingFilter::TwoBandsSynthesis(const IFChannelBuffer* bands,
IFChannelBuffer* data) {
- RTC_DCHECK_EQ(static_cast<int>(two_bands_states_.size()),
- data->num_channels());
+ RTC_DCHECK_EQ(two_bands_states_.size(), data->num_channels());
for (size_t i = 0; i < two_bands_states_.size(); ++i) {
WebRtcSpl_SynthesisQMF(bands->ibuf_const()->channels(0)[i],
bands->ibuf_const()->channels(1)[i],
@@ -86,8 +84,7 @@ void SplittingFilter::TwoBandsSynthesis(const IFChannelBuffer* bands,
void SplittingFilter::ThreeBandsAnalysis(const IFChannelBuffer* data,
IFChannelBuffer* bands) {
- RTC_DCHECK_EQ(static_cast<int>(three_band_filter_banks_.size()),
- data->num_channels());
+ RTC_DCHECK_EQ(three_band_filter_banks_.size(), data->num_channels());
for (size_t i = 0; i < three_band_filter_banks_.size(); ++i) {
three_band_filter_banks_[i]->Analysis(data->fbuf_const()->channels()[i],
data->num_frames(),
@@ -97,8 +94,7 @@ void SplittingFilter::ThreeBandsAnalysis(const IFChannelBuffer* data,
void SplittingFilter::ThreeBandsSynthesis(const IFChannelBuffer* bands,
IFChannelBuffer* data) {
- RTC_DCHECK_EQ(static_cast<int>(three_band_filter_banks_.size()),
- data->num_channels());
+ RTC_DCHECK_EQ(three_band_filter_banks_.size(), data->num_channels());
for (size_t i = 0; i < three_band_filter_banks_.size(); ++i) {
three_band_filter_banks_[i]->Synthesis(bands->fbuf_const()->bands(i),
bands->num_frames_per_band(),
diff --git a/webrtc/modules/audio_processing/splitting_filter.h b/webrtc/modules/audio_processing/splitting_filter.h
index 4698d3fe2b..6b81c2fb05 100644
--- a/webrtc/modules/audio_processing/splitting_filter.h
+++ b/webrtc/modules/audio_processing/splitting_filter.h
@@ -45,7 +45,7 @@ struct TwoBandsStates {
// used.
class SplittingFilter {
public:
- SplittingFilter(int num_channels, size_t num_bands, size_t num_frames);
+ SplittingFilter(size_t num_channels, size_t num_bands, size_t num_frames);
void Analysis(const IFChannelBuffer* data, IFChannelBuffer* bands);
void Synthesis(const IFChannelBuffer* bands, IFChannelBuffer* data);
diff --git a/webrtc/modules/audio_processing/test/audio_file_processor.cc b/webrtc/modules/audio_processing/test/audio_file_processor.cc
new file mode 100644
index 0000000000..56e9b4b96f
--- /dev/null
+++ b/webrtc/modules/audio_processing/test/audio_file_processor.cc
@@ -0,0 +1,180 @@
+/*
+ * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/modules/audio_processing/test/audio_file_processor.h"
+
+#include <algorithm>
+#include <utility>
+
+#include "webrtc/base/checks.h"
+#include "webrtc/modules/audio_processing/test/protobuf_utils.h"
+
+using rtc::scoped_ptr;
+using rtc::CheckedDivExact;
+using std::vector;
+using webrtc::audioproc::Event;
+using webrtc::audioproc::Init;
+using webrtc::audioproc::ReverseStream;
+using webrtc::audioproc::Stream;
+
+namespace webrtc {
+namespace {
+
+// Returns a StreamConfig corresponding to file.
+StreamConfig GetStreamConfig(const WavFile& file) {
+ return StreamConfig(file.sample_rate(), file.num_channels());
+}
+
+// Returns a ChannelBuffer corresponding to file.
+ChannelBuffer<float> GetChannelBuffer(const WavFile& file) {
+ return ChannelBuffer<float>(
+ CheckedDivExact(file.sample_rate(), AudioFileProcessor::kChunksPerSecond),
+ file.num_channels());
+}
+
+} // namespace
+
+WavFileProcessor::WavFileProcessor(scoped_ptr<AudioProcessing> ap,
+ scoped_ptr<WavReader> in_file,
+ scoped_ptr<WavWriter> out_file)
+ : ap_(std::move(ap)),
+ in_buf_(GetChannelBuffer(*in_file)),
+ out_buf_(GetChannelBuffer(*out_file)),
+ input_config_(GetStreamConfig(*in_file)),
+ output_config_(GetStreamConfig(*out_file)),
+ buffer_reader_(std::move(in_file)),
+ buffer_writer_(std::move(out_file)) {}
+
+bool WavFileProcessor::ProcessChunk() {
+ if (!buffer_reader_.Read(&in_buf_)) {
+ return false;
+ }
+ {
+ const auto st = ScopedTimer(mutable_proc_time());
+ RTC_CHECK_EQ(kNoErr,
+ ap_->ProcessStream(in_buf_.channels(), input_config_,
+ output_config_, out_buf_.channels()));
+ }
+ buffer_writer_.Write(out_buf_);
+ return true;
+}
+
+AecDumpFileProcessor::AecDumpFileProcessor(scoped_ptr<AudioProcessing> ap,
+ FILE* dump_file,
+ scoped_ptr<WavWriter> out_file)
+ : ap_(std::move(ap)),
+ dump_file_(dump_file),
+ out_buf_(GetChannelBuffer(*out_file)),
+ output_config_(GetStreamConfig(*out_file)),
+ buffer_writer_(std::move(out_file)) {
+ RTC_CHECK(dump_file_) << "Could not open dump file for reading.";
+}
+
+AecDumpFileProcessor::~AecDumpFileProcessor() {
+ fclose(dump_file_);
+}
+
+bool AecDumpFileProcessor::ProcessChunk() {
+ Event event_msg;
+
+ // Continue until we process our first Stream message.
+ do {
+ if (!ReadMessageFromFile(dump_file_, &event_msg)) {
+ return false;
+ }
+
+ if (event_msg.type() == Event::INIT) {
+ RTC_CHECK(event_msg.has_init());
+ HandleMessage(event_msg.init());
+
+ } else if (event_msg.type() == Event::STREAM) {
+ RTC_CHECK(event_msg.has_stream());
+ HandleMessage(event_msg.stream());
+
+ } else if (event_msg.type() == Event::REVERSE_STREAM) {
+ RTC_CHECK(event_msg.has_reverse_stream());
+ HandleMessage(event_msg.reverse_stream());
+ }
+ } while (event_msg.type() != Event::STREAM);
+
+ return true;
+}
+
+void AecDumpFileProcessor::HandleMessage(const Init& msg) {
+ RTC_CHECK(msg.has_sample_rate());
+ RTC_CHECK(msg.has_num_input_channels());
+ RTC_CHECK(msg.has_num_reverse_channels());
+
+ in_buf_.reset(new ChannelBuffer<float>(
+ CheckedDivExact(msg.sample_rate(), kChunksPerSecond),
+ msg.num_input_channels()));
+ const int reverse_sample_rate = msg.has_reverse_sample_rate()
+ ? msg.reverse_sample_rate()
+ : msg.sample_rate();
+ reverse_buf_.reset(new ChannelBuffer<float>(
+ CheckedDivExact(reverse_sample_rate, kChunksPerSecond),
+ msg.num_reverse_channels()));
+ input_config_ = StreamConfig(msg.sample_rate(), msg.num_input_channels());
+ reverse_config_ =
+ StreamConfig(reverse_sample_rate, msg.num_reverse_channels());
+
+ const ProcessingConfig config = {
+ {input_config_, output_config_, reverse_config_, reverse_config_}};
+ RTC_CHECK_EQ(kNoErr, ap_->Initialize(config));
+}
+
+void AecDumpFileProcessor::HandleMessage(const Stream& msg) {
+ RTC_CHECK(!msg.has_input_data());
+ RTC_CHECK_EQ(in_buf_->num_channels(),
+ static_cast<size_t>(msg.input_channel_size()));
+
+ for (int i = 0; i < msg.input_channel_size(); ++i) {
+ RTC_CHECK_EQ(in_buf_->num_frames() * sizeof(*in_buf_->channels()[i]),
+ msg.input_channel(i).size());
+ std::memcpy(in_buf_->channels()[i], msg.input_channel(i).data(),
+ msg.input_channel(i).size());
+ }
+ {
+ const auto st = ScopedTimer(mutable_proc_time());
+ RTC_CHECK_EQ(kNoErr, ap_->set_stream_delay_ms(msg.delay()));
+ ap_->echo_cancellation()->set_stream_drift_samples(msg.drift());
+ if (msg.has_keypress()) {
+ ap_->set_stream_key_pressed(msg.keypress());
+ }
+ RTC_CHECK_EQ(kNoErr,
+ ap_->ProcessStream(in_buf_->channels(), input_config_,
+ output_config_, out_buf_.channels()));
+ }
+
+ buffer_writer_.Write(out_buf_);
+}
+
+void AecDumpFileProcessor::HandleMessage(const ReverseStream& msg) {
+ RTC_CHECK(!msg.has_data());
+ RTC_CHECK_EQ(reverse_buf_->num_channels(),
+ static_cast<size_t>(msg.channel_size()));
+
+ for (int i = 0; i < msg.channel_size(); ++i) {
+ RTC_CHECK_EQ(reverse_buf_->num_frames() * sizeof(*in_buf_->channels()[i]),
+ msg.channel(i).size());
+ std::memcpy(reverse_buf_->channels()[i], msg.channel(i).data(),
+ msg.channel(i).size());
+ }
+ {
+ const auto st = ScopedTimer(mutable_proc_time());
+ // TODO(ajm): This currently discards the processed output, which is needed
+ // for e.g. intelligibility enhancement.
+ RTC_CHECK_EQ(kNoErr, ap_->ProcessReverseStream(
+ reverse_buf_->channels(), reverse_config_,
+ reverse_config_, reverse_buf_->channels()));
+ }
+}
+
+} // namespace webrtc
diff --git a/webrtc/modules/audio_processing/test/audio_file_processor.h b/webrtc/modules/audio_processing/test/audio_file_processor.h
new file mode 100644
index 0000000000..a3153b2244
--- /dev/null
+++ b/webrtc/modules/audio_processing/test/audio_file_processor.h
@@ -0,0 +1,139 @@
+/*
+ * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_AUDIO_PROCESSING_TEST_AUDIO_FILE_PROCESSOR_H_
+#define WEBRTC_MODULES_AUDIO_PROCESSING_TEST_AUDIO_FILE_PROCESSOR_H_
+
+#include <algorithm>
+#include <limits>
+#include <vector>
+
+#include "webrtc/base/scoped_ptr.h"
+#include "webrtc/common_audio/channel_buffer.h"
+#include "webrtc/common_audio/wav_file.h"
+#include "webrtc/modules/audio_processing/include/audio_processing.h"
+#include "webrtc/modules/audio_processing/test/test_utils.h"
+#include "webrtc/system_wrappers/include/tick_util.h"
+
+#ifdef WEBRTC_ANDROID_PLATFORM_BUILD
+#include "external/webrtc/webrtc/modules/audio_processing/debug.pb.h"
+#else
+#include "webrtc/audio_processing/debug.pb.h"
+#endif
+
+namespace webrtc {
+
+// Holds a few statistics about a series of TickIntervals.
+struct TickIntervalStats {
+ TickIntervalStats() : min(std::numeric_limits<int64_t>::max()) {}
+ TickInterval sum;
+ TickInterval max;
+ TickInterval min;
+};
+
+// Interface for processing an input file with an AudioProcessing instance and
+// dumping the results to an output file.
+class AudioFileProcessor {
+ public:
+ static const int kChunksPerSecond = 1000 / AudioProcessing::kChunkSizeMs;
+
+ virtual ~AudioFileProcessor() {}
+
+ // Processes one AudioProcessing::kChunkSizeMs of data from the input file and
+ // writes to the output file.
+ virtual bool ProcessChunk() = 0;
+
+ // Returns the execution time of all AudioProcessing calls.
+ const TickIntervalStats& proc_time() const { return proc_time_; }
+
+ protected:
+ // RAII class for execution time measurement. Updates the provided
+ // TickIntervalStats based on the time between ScopedTimer creation and
+ // leaving the enclosing scope.
+ class ScopedTimer {
+ public:
+ explicit ScopedTimer(TickIntervalStats* proc_time)
+ : proc_time_(proc_time), start_time_(TickTime::Now()) {}
+
+ ~ScopedTimer() {
+ TickInterval interval = TickTime::Now() - start_time_;
+ proc_time_->sum += interval;
+ proc_time_->max = std::max(proc_time_->max, interval);
+ proc_time_->min = std::min(proc_time_->min, interval);
+ }
+
+ private:
+ TickIntervalStats* const proc_time_;
+ TickTime start_time_;
+ };
+
+ TickIntervalStats* mutable_proc_time() { return &proc_time_; }
+
+ private:
+ TickIntervalStats proc_time_;
+};
+
+// Used to read from and write to WavFile objects.
+class WavFileProcessor final : public AudioFileProcessor {
+ public:
+ // Takes ownership of all parameters.
+ WavFileProcessor(rtc::scoped_ptr<AudioProcessing> ap,
+ rtc::scoped_ptr<WavReader> in_file,
+ rtc::scoped_ptr<WavWriter> out_file);
+ virtual ~WavFileProcessor() {}
+
+ // Processes one chunk from the WAV input and writes to the WAV output.
+ bool ProcessChunk() override;
+
+ private:
+ rtc::scoped_ptr<AudioProcessing> ap_;
+
+ ChannelBuffer<float> in_buf_;
+ ChannelBuffer<float> out_buf_;
+ const StreamConfig input_config_;
+ const StreamConfig output_config_;
+ ChannelBufferWavReader buffer_reader_;
+ ChannelBufferWavWriter buffer_writer_;
+};
+
+// Used to read from an aecdump file and write to a WavWriter.
+class AecDumpFileProcessor final : public AudioFileProcessor {
+ public:
+ // Takes ownership of all parameters.
+ AecDumpFileProcessor(rtc::scoped_ptr<AudioProcessing> ap,
+ FILE* dump_file,
+ rtc::scoped_ptr<WavWriter> out_file);
+
+ virtual ~AecDumpFileProcessor();
+
+ // Processes messages from the aecdump file until the first Stream message is
+ // completed. Passes other data from the aecdump messages as appropriate.
+ bool ProcessChunk() override;
+
+ private:
+ void HandleMessage(const webrtc::audioproc::Init& msg);
+ void HandleMessage(const webrtc::audioproc::Stream& msg);
+ void HandleMessage(const webrtc::audioproc::ReverseStream& msg);
+
+ rtc::scoped_ptr<AudioProcessing> ap_;
+ FILE* dump_file_;
+
+ rtc::scoped_ptr<ChannelBuffer<float>> in_buf_;
+ rtc::scoped_ptr<ChannelBuffer<float>> reverse_buf_;
+ ChannelBuffer<float> out_buf_;
+ StreamConfig input_config_;
+ StreamConfig reverse_config_;
+ const StreamConfig output_config_;
+ ChannelBufferWavWriter buffer_writer_;
+};
+
+} // namespace webrtc
+
+#endif // WEBRTC_MODULES_AUDIO_PROCESSING_TEST_AUDIO_FILE_PROCESSOR_H_
diff --git a/webrtc/modules/audio_processing/test/audio_processing_unittest.cc b/webrtc/modules/audio_processing/test/audio_processing_unittest.cc
index c013a369fe..324cb7bec6 100644
--- a/webrtc/modules/audio_processing/test/audio_processing_unittest.cc
+++ b/webrtc/modules/audio_processing/test/audio_processing_unittest.cc
@@ -14,6 +14,7 @@
#include <limits>
#include <queue>
+#include "webrtc/base/arraysize.h"
#include "webrtc/base/scoped_ptr.h"
#include "webrtc/common_audio/include/audio_util.h"
#include "webrtc/common_audio/resampler/include/push_resampler.h"
@@ -23,11 +24,10 @@
#include "webrtc/modules/audio_processing/include/audio_processing.h"
#include "webrtc/modules/audio_processing/test/protobuf_utils.h"
#include "webrtc/modules/audio_processing/test/test_utils.h"
-#include "webrtc/modules/interface/module_common_types.h"
+#include "webrtc/modules/include/module_common_types.h"
#include "webrtc/system_wrappers/include/event_wrapper.h"
#include "webrtc/system_wrappers/include/trace.h"
#include "webrtc/test/testsupport/fileutils.h"
-#include "webrtc/test/testsupport/gtest_disable.h"
#ifdef WEBRTC_ANDROID_PLATFORM_BUILD
#include "gtest/gtest.h"
#include "external/webrtc/webrtc/modules/audio_processing/test/unittest.pb.h"
@@ -53,11 +53,8 @@ namespace {
// file. This is the typical case. When the file should be updated, it can
// be set to true with the command-line switch --write_ref_data.
bool write_ref_data = false;
-const int kChannels[] = {1, 2};
-const size_t kChannelsSize = sizeof(kChannels) / sizeof(*kChannels);
-
+const google::protobuf::int32 kChannels[] = {1, 2};
const int kSampleRates[] = {8000, 16000, 32000, 48000};
-const size_t kSampleRatesSize = sizeof(kSampleRates) / sizeof(*kSampleRates);
#if defined(WEBRTC_AUDIOPROC_FIXED_PROFILE)
// AECM doesn't support super-wb.
@@ -65,8 +62,6 @@ const int kProcessSampleRates[] = {8000, 16000};
#elif defined(WEBRTC_AUDIOPROC_FLOAT_PROFILE)
const int kProcessSampleRates[] = {8000, 16000, 32000, 48000};
#endif
-const size_t kProcessSampleRatesSize = sizeof(kProcessSampleRates) /
- sizeof(*kProcessSampleRates);
enum StreamDirection { kForward = 0, kReverse };
@@ -77,7 +72,7 @@ void ConvertToFloat(const int16_t* int_data, ChannelBuffer<float>* cb) {
cb->num_frames(),
cb->num_channels(),
cb_int.channels());
- for (int i = 0; i < cb->num_channels(); ++i) {
+ for (size_t i = 0; i < cb->num_channels(); ++i) {
S16ToFloat(cb_int.channels()[i],
cb->num_frames(),
cb->channels()[i]);
@@ -89,7 +84,7 @@ void ConvertToFloat(const AudioFrame& frame, ChannelBuffer<float>* cb) {
}
// Number of channels including the keyboard channel.
-int TotalChannelsFromLayout(AudioProcessing::ChannelLayout layout) {
+size_t TotalChannelsFromLayout(AudioProcessing::ChannelLayout layout) {
switch (layout) {
case AudioProcessing::kMono:
return 1;
@@ -100,7 +95,7 @@ int TotalChannelsFromLayout(AudioProcessing::ChannelLayout layout) {
return 3;
}
assert(false);
- return -1;
+ return 0;
}
int TruncateToMultipleOf10(int value) {
@@ -108,25 +103,25 @@ int TruncateToMultipleOf10(int value) {
}
void MixStereoToMono(const float* stereo, float* mono,
- int samples_per_channel) {
- for (int i = 0; i < samples_per_channel; ++i)
+ size_t samples_per_channel) {
+ for (size_t i = 0; i < samples_per_channel; ++i)
mono[i] = (stereo[i * 2] + stereo[i * 2 + 1]) / 2;
}
void MixStereoToMono(const int16_t* stereo, int16_t* mono,
- int samples_per_channel) {
- for (int i = 0; i < samples_per_channel; ++i)
+ size_t samples_per_channel) {
+ for (size_t i = 0; i < samples_per_channel; ++i)
mono[i] = (stereo[i * 2] + stereo[i * 2 + 1]) >> 1;
}
-void CopyLeftToRightChannel(int16_t* stereo, int samples_per_channel) {
- for (int i = 0; i < samples_per_channel; i++) {
+void CopyLeftToRightChannel(int16_t* stereo, size_t samples_per_channel) {
+ for (size_t i = 0; i < samples_per_channel; i++) {
stereo[i * 2 + 1] = stereo[i * 2];
}
}
-void VerifyChannelsAreEqual(int16_t* stereo, int samples_per_channel) {
- for (int i = 0; i < samples_per_channel; i++) {
+void VerifyChannelsAreEqual(int16_t* stereo, size_t samples_per_channel) {
+ for (size_t i = 0; i < samples_per_channel; i++) {
EXPECT_EQ(stereo[i * 2 + 1], stereo[i * 2]);
}
}
@@ -139,7 +134,7 @@ void SetFrameTo(AudioFrame* frame, int16_t value) {
}
void SetFrameTo(AudioFrame* frame, int16_t left, int16_t right) {
- ASSERT_EQ(2, frame->num_channels_);
+ ASSERT_EQ(2u, frame->num_channels_);
for (size_t i = 0; i < frame->samples_per_channel_ * 2; i += 2) {
frame->data_[i] = left;
frame->data_[i + 1] = right;
@@ -199,9 +194,9 @@ T AbsValue(T a) {
}
int16_t MaxAudioFrame(const AudioFrame& frame) {
- const int length = frame.samples_per_channel_ * frame.num_channels_;
+ const size_t length = frame.samples_per_channel_ * frame.num_channels_;
int16_t max_data = AbsValue(frame.data_[0]);
- for (int i = 1; i < length; i++) {
+ for (size_t i = 1; i < length; i++) {
max_data = std::max(max_data, AbsValue(frame.data_[i]));
}
@@ -264,10 +259,10 @@ std::string OutputFilePath(std::string name,
int output_rate,
int reverse_input_rate,
int reverse_output_rate,
- int num_input_channels,
- int num_output_channels,
- int num_reverse_input_channels,
- int num_reverse_output_channels,
+ size_t num_input_channels,
+ size_t num_output_channels,
+ size_t num_reverse_input_channels,
+ size_t num_reverse_output_channels,
StreamDirection file_direction) {
std::ostringstream ss;
ss << name << "_i" << num_input_channels << "_" << input_rate / 1000 << "_ir"
@@ -362,9 +357,9 @@ class ApmTest : public ::testing::Test {
void Init(int sample_rate_hz,
int output_sample_rate_hz,
int reverse_sample_rate_hz,
- int num_input_channels,
- int num_output_channels,
- int num_reverse_channels,
+ size_t num_input_channels,
+ size_t num_output_channels,
+ size_t num_reverse_channels,
bool open_output_file);
void Init(AudioProcessing* ap);
void EnableAllComponents();
@@ -377,12 +372,12 @@ class ApmTest : public ::testing::Test {
void ProcessDelayVerificationTest(int delay_ms, int system_delay_ms,
int delay_min, int delay_max);
void TestChangingChannelsInt16Interface(
- int num_channels,
+ size_t num_channels,
AudioProcessing::Error expected_return);
- void TestChangingForwardChannels(int num_in_channels,
- int num_out_channels,
+ void TestChangingForwardChannels(size_t num_in_channels,
+ size_t num_out_channels,
AudioProcessing::Error expected_return);
- void TestChangingReverseChannels(int num_rev_channels,
+ void TestChangingReverseChannels(size_t num_rev_channels,
AudioProcessing::Error expected_return);
void RunQuantizedVolumeDoesNotGetStuckTest(int sample_rate);
void RunManualVolumeChangeIsPossibleTest(int sample_rate);
@@ -403,7 +398,7 @@ class ApmTest : public ::testing::Test {
rtc::scoped_ptr<ChannelBuffer<float> > float_cb_;
rtc::scoped_ptr<ChannelBuffer<float> > revfloat_cb_;
int output_sample_rate_hz_;
- int num_output_channels_;
+ size_t num_output_channels_;
FILE* far_file_;
FILE* near_file_;
FILE* out_file_;
@@ -487,9 +482,9 @@ void ApmTest::Init(AudioProcessing* ap) {
void ApmTest::Init(int sample_rate_hz,
int output_sample_rate_hz,
int reverse_sample_rate_hz,
- int num_input_channels,
- int num_output_channels,
- int num_reverse_channels,
+ size_t num_input_channels,
+ size_t num_output_channels,
+ size_t num_reverse_channels,
bool open_output_file) {
SetContainerFormat(sample_rate_hz, num_input_channels, frame_, &float_cb_);
output_sample_rate_hz_ = output_sample_rate_hz;
@@ -821,7 +816,7 @@ TEST_F(ApmTest, DelayOffsetWithLimitsIsSetProperly) {
}
void ApmTest::TestChangingChannelsInt16Interface(
- int num_channels,
+ size_t num_channels,
AudioProcessing::Error expected_return) {
frame_->num_channels_ = num_channels;
EXPECT_EQ(expected_return, apm_->ProcessStream(frame_));
@@ -829,8 +824,8 @@ void ApmTest::TestChangingChannelsInt16Interface(
}
void ApmTest::TestChangingForwardChannels(
- int num_in_channels,
- int num_out_channels,
+ size_t num_in_channels,
+ size_t num_out_channels,
AudioProcessing::Error expected_return) {
const StreamConfig input_stream = {frame_->sample_rate_hz_, num_in_channels};
const StreamConfig output_stream = {output_sample_rate_hz_, num_out_channels};
@@ -841,7 +836,7 @@ void ApmTest::TestChangingForwardChannels(
}
void ApmTest::TestChangingReverseChannels(
- int num_rev_channels,
+ size_t num_rev_channels,
AudioProcessing::Error expected_return) {
const ProcessingConfig processing_config = {
{{frame_->sample_rate_hz_, apm_->num_input_channels()},
@@ -862,11 +857,11 @@ TEST_F(ApmTest, ChannelsInt16Interface) {
TestChangingChannelsInt16Interface(0, apm_->kBadNumberChannelsError);
- for (int i = 1; i < 4; i++) {
+ for (size_t i = 1; i < 4; i++) {
TestChangingChannelsInt16Interface(i, kNoErr);
EXPECT_EQ(i, apm_->num_input_channels());
// We always force the number of reverse channels used for processing to 1.
- EXPECT_EQ(1, apm_->num_reverse_channels());
+ EXPECT_EQ(1u, apm_->num_reverse_channels());
}
}
@@ -877,8 +872,8 @@ TEST_F(ApmTest, Channels) {
TestChangingForwardChannels(0, 1, apm_->kBadNumberChannelsError);
TestChangingReverseChannels(0, apm_->kBadNumberChannelsError);
- for (int i = 1; i < 4; ++i) {
- for (int j = 0; j < 1; ++j) {
+ for (size_t i = 1; i < 4; ++i) {
+ for (size_t j = 0; j < 1; ++j) {
// Output channels much be one or match input channels.
if (j == 1 || i == j) {
TestChangingForwardChannels(i, j, kNoErr);
@@ -887,7 +882,7 @@ TEST_F(ApmTest, Channels) {
EXPECT_EQ(i, apm_->num_input_channels());
EXPECT_EQ(j, apm_->num_output_channels());
// The number of reverse channels used for processing to is always 1.
- EXPECT_EQ(1, apm_->num_reverse_channels());
+ EXPECT_EQ(1u, apm_->num_reverse_channels());
} else {
TestChangingForwardChannels(i, j,
AudioProcessing::kBadNumberChannelsError);
@@ -902,7 +897,7 @@ TEST_F(ApmTest, SampleRatesInt) {
EXPECT_EQ(apm_->kBadSampleRateError, ProcessStreamChooser(kIntFormat));
// Testing valid sample rates
int fs[] = {8000, 16000, 32000, 48000};
- for (size_t i = 0; i < sizeof(fs) / sizeof(*fs); i++) {
+ for (size_t i = 0; i < arraysize(fs); i++) {
SetContainerFormat(fs[i], 2, frame_, &float_cb_);
EXPECT_NOERR(ProcessStreamChooser(kIntFormat));
}
@@ -921,7 +916,7 @@ TEST_F(ApmTest, EchoCancellation) {
EchoCancellation::kModerateSuppression,
EchoCancellation::kHighSuppression,
};
- for (size_t i = 0; i < sizeof(level)/sizeof(*level); i++) {
+ for (size_t i = 0; i < arraysize(level); i++) {
EXPECT_EQ(apm_->kNoError,
apm_->echo_cancellation()->set_suppression_level(level[i]));
EXPECT_EQ(level[i],
@@ -998,7 +993,7 @@ TEST_F(ApmTest, DISABLED_EchoCancellationReportsCorrectDelays) {
// Test a couple of corner cases and verify that the estimated delay is
// within a valid region (set to +-1.5 blocks). Note that these cases are
// sampling frequency dependent.
- for (size_t i = 0; i < kProcessSampleRatesSize; i++) {
+ for (size_t i = 0; i < arraysize(kProcessSampleRates); i++) {
Init(kProcessSampleRates[i],
kProcessSampleRates[i],
kProcessSampleRates[i],
@@ -1070,7 +1065,7 @@ TEST_F(ApmTest, EchoControlMobile) {
EchoControlMobile::kSpeakerphone,
EchoControlMobile::kLoudSpeakerphone,
};
- for (size_t i = 0; i < sizeof(mode)/sizeof(*mode); i++) {
+ for (size_t i = 0; i < arraysize(mode); i++) {
EXPECT_EQ(apm_->kNoError,
apm_->echo_control_mobile()->set_routing_mode(mode[i]));
EXPECT_EQ(mode[i],
@@ -1135,7 +1130,7 @@ TEST_F(ApmTest, GainControl) {
GainControl::kAdaptiveDigital,
GainControl::kFixedDigital
};
- for (size_t i = 0; i < sizeof(mode)/sizeof(*mode); i++) {
+ for (size_t i = 0; i < arraysize(mode); i++) {
EXPECT_EQ(apm_->kNoError,
apm_->gain_control()->set_mode(mode[i]));
EXPECT_EQ(mode[i], apm_->gain_control()->mode());
@@ -1151,7 +1146,7 @@ TEST_F(ApmTest, GainControl) {
apm_->gain_control()->target_level_dbfs()));
int level_dbfs[] = {0, 6, 31};
- for (size_t i = 0; i < sizeof(level_dbfs)/sizeof(*level_dbfs); i++) {
+ for (size_t i = 0; i < arraysize(level_dbfs); i++) {
EXPECT_EQ(apm_->kNoError,
apm_->gain_control()->set_target_level_dbfs(level_dbfs[i]));
EXPECT_EQ(level_dbfs[i], apm_->gain_control()->target_level_dbfs());
@@ -1169,7 +1164,7 @@ TEST_F(ApmTest, GainControl) {
apm_->gain_control()->compression_gain_db()));
int gain_db[] = {0, 10, 90};
- for (size_t i = 0; i < sizeof(gain_db)/sizeof(*gain_db); i++) {
+ for (size_t i = 0; i < arraysize(gain_db); i++) {
EXPECT_EQ(apm_->kNoError,
apm_->gain_control()->set_compression_gain_db(gain_db[i]));
EXPECT_EQ(gain_db[i], apm_->gain_control()->compression_gain_db());
@@ -1200,14 +1195,14 @@ TEST_F(ApmTest, GainControl) {
apm_->gain_control()->analog_level_maximum()));
int min_level[] = {0, 255, 1024};
- for (size_t i = 0; i < sizeof(min_level)/sizeof(*min_level); i++) {
+ for (size_t i = 0; i < arraysize(min_level); i++) {
EXPECT_EQ(apm_->kNoError,
apm_->gain_control()->set_analog_level_limits(min_level[i], 1024));
EXPECT_EQ(min_level[i], apm_->gain_control()->analog_level_minimum());
}
int max_level[] = {0, 1024, 65535};
- for (size_t i = 0; i < sizeof(min_level)/sizeof(*min_level); i++) {
+ for (size_t i = 0; i < arraysize(min_level); i++) {
EXPECT_EQ(apm_->kNoError,
apm_->gain_control()->set_analog_level_limits(0, max_level[i]));
EXPECT_EQ(max_level[i], apm_->gain_control()->analog_level_maximum());
@@ -1246,7 +1241,7 @@ void ApmTest::RunQuantizedVolumeDoesNotGetStuckTest(int sample_rate) {
// Verifies that despite volume slider quantization, the AGC can continue to
// increase its volume.
TEST_F(ApmTest, QuantizedVolumeDoesNotGetStuck) {
- for (size_t i = 0; i < kSampleRatesSize; ++i) {
+ for (size_t i = 0; i < arraysize(kSampleRates); ++i) {
RunQuantizedVolumeDoesNotGetStuckTest(kSampleRates[i]);
}
}
@@ -1291,7 +1286,7 @@ void ApmTest::RunManualVolumeChangeIsPossibleTest(int sample_rate) {
}
TEST_F(ApmTest, ManualVolumeChangeIsPossible) {
- for (size_t i = 0; i < kSampleRatesSize; ++i) {
+ for (size_t i = 0; i < arraysize(kSampleRates); ++i) {
RunManualVolumeChangeIsPossibleTest(kSampleRates[i]);
}
}
@@ -1299,11 +1294,11 @@ TEST_F(ApmTest, ManualVolumeChangeIsPossible) {
#if !defined(WEBRTC_ANDROID) && !defined(WEBRTC_IOS)
TEST_F(ApmTest, AgcOnlyAdaptsWhenTargetSignalIsPresent) {
const int kSampleRateHz = 16000;
- const int kSamplesPerChannel =
- AudioProcessing::kChunkSizeMs * kSampleRateHz / 1000;
- const int kNumInputChannels = 2;
- const int kNumOutputChannels = 1;
- const int kNumChunks = 700;
+ const size_t kSamplesPerChannel =
+ static_cast<size_t>(AudioProcessing::kChunkSizeMs * kSampleRateHz / 1000);
+ const size_t kNumInputChannels = 2;
+ const size_t kNumOutputChannels = 1;
+ const size_t kNumChunks = 700;
const float kScaleFactor = 0.25f;
Config config;
std::vector<webrtc::Point> geometry;
@@ -1317,8 +1312,8 @@ TEST_F(ApmTest, AgcOnlyAdaptsWhenTargetSignalIsPresent) {
EXPECT_EQ(kNoErr, apm->gain_control()->Enable(true));
ChannelBuffer<float> src_buf(kSamplesPerChannel, kNumInputChannels);
ChannelBuffer<float> dest_buf(kSamplesPerChannel, kNumOutputChannels);
- const int max_length = kSamplesPerChannel * std::max(kNumInputChannels,
- kNumOutputChannels);
+ const size_t max_length = kSamplesPerChannel * std::max(kNumInputChannels,
+ kNumOutputChannels);
rtc::scoped_ptr<int16_t[]> int_data(new int16_t[max_length]);
rtc::scoped_ptr<float[]> float_data(new float[max_length]);
std::string filename = ResourceFilePath("far", kSampleRateHz);
@@ -1330,13 +1325,13 @@ TEST_F(ApmTest, AgcOnlyAdaptsWhenTargetSignalIsPresent) {
bool is_target = false;
EXPECT_CALL(*beamformer, is_target_present())
.WillRepeatedly(testing::ReturnPointee(&is_target));
- for (int i = 0; i < kNumChunks; ++i) {
+ for (size_t i = 0; i < kNumChunks; ++i) {
ASSERT_TRUE(ReadChunk(far_file,
int_data.get(),
float_data.get(),
&src_buf));
- for (int j = 0; j < kNumInputChannels; ++j) {
- for (int k = 0; k < kSamplesPerChannel; ++k) {
+ for (size_t j = 0; j < kNumInputChannels; ++j) {
+ for (size_t k = 0; k < kSamplesPerChannel; ++k) {
src_buf.channels()[j][k] *= kScaleFactor;
}
}
@@ -1355,13 +1350,13 @@ TEST_F(ApmTest, AgcOnlyAdaptsWhenTargetSignalIsPresent) {
apm->gain_control()->compression_gain_db());
rewind(far_file);
is_target = true;
- for (int i = 0; i < kNumChunks; ++i) {
+ for (size_t i = 0; i < kNumChunks; ++i) {
ASSERT_TRUE(ReadChunk(far_file,
int_data.get(),
float_data.get(),
&src_buf));
- for (int j = 0; j < kNumInputChannels; ++j) {
- for (int k = 0; k < kSamplesPerChannel; ++k) {
+ for (size_t j = 0; j < kNumInputChannels; ++j) {
+ for (size_t k = 0; k < kSamplesPerChannel; ++k) {
src_buf.channels()[j][k] *= kScaleFactor;
}
}
@@ -1390,7 +1385,7 @@ TEST_F(ApmTest, NoiseSuppression) {
NoiseSuppression::kHigh,
NoiseSuppression::kVeryHigh
};
- for (size_t i = 0; i < sizeof(level)/sizeof(*level); i++) {
+ for (size_t i = 0; i < arraysize(level); i++) {
EXPECT_EQ(apm_->kNoError,
apm_->noise_suppression()->set_level(level[i]));
EXPECT_EQ(level[i], apm_->noise_suppression()->level());
@@ -1492,7 +1487,7 @@ TEST_F(ApmTest, VoiceDetection) {
VoiceDetection::kModerateLikelihood,
VoiceDetection::kHighLikelihood
};
- for (size_t i = 0; i < sizeof(likelihood)/sizeof(*likelihood); i++) {
+ for (size_t i = 0; i < arraysize(likelihood); i++) {
EXPECT_EQ(apm_->kNoError,
apm_->voice_detection()->set_likelihood(likelihood[i]));
EXPECT_EQ(likelihood[i], apm_->voice_detection()->likelihood());
@@ -1524,7 +1519,7 @@ TEST_F(ApmTest, VoiceDetection) {
AudioFrame::kVadPassive,
AudioFrame::kVadUnknown
};
- for (size_t i = 0; i < sizeof(activity)/sizeof(*activity); i++) {
+ for (size_t i = 0; i < arraysize(activity); i++) {
frame_->vad_activity_ = activity[i];
EXPECT_EQ(apm_->kNoError, apm_->ProcessStream(frame_));
EXPECT_EQ(activity[i], frame_->vad_activity_);
@@ -1550,7 +1545,7 @@ TEST_F(ApmTest, AllProcessingDisabledByDefault) {
}
TEST_F(ApmTest, NoProcessingWhenAllComponentsDisabled) {
- for (size_t i = 0; i < kSampleRatesSize; i++) {
+ for (size_t i = 0; i < arraysize(kSampleRates); i++) {
Init(kSampleRates[i], kSampleRates[i], kSampleRates[i], 2, 2, 2, false);
SetFrameTo(frame_, 1000, 2000);
AudioFrame frame_copy;
@@ -1602,7 +1597,7 @@ TEST_F(ApmTest, NoProcessingWhenAllComponentsDisabledFloat) {
TEST_F(ApmTest, IdenticalInputChannelsResultInIdenticalOutputChannels) {
EnableAllComponents();
- for (size_t i = 0; i < kProcessSampleRatesSize; i++) {
+ for (size_t i = 0; i < arraysize(kProcessSampleRates); i++) {
Init(kProcessSampleRates[i],
kProcessSampleRates[i],
kProcessSampleRates[i],
@@ -1751,7 +1746,8 @@ void ApmTest::ProcessDebugDump(const std::string& in_filename,
const audioproc::ReverseStream msg = event_msg.reverse_stream();
if (msg.channel_size() > 0) {
- ASSERT_EQ(revframe_->num_channels_, msg.channel_size());
+ ASSERT_EQ(revframe_->num_channels_,
+ static_cast<size_t>(msg.channel_size()));
for (int i = 0; i < msg.channel_size(); ++i) {
memcpy(revfloat_cb_->channels()[i],
msg.channel(i).data(),
@@ -1781,7 +1777,8 @@ void ApmTest::ProcessDebugDump(const std::string& in_filename,
}
if (msg.input_channel_size() > 0) {
- ASSERT_EQ(frame_->num_channels_, msg.input_channel_size());
+ ASSERT_EQ(frame_->num_channels_,
+ static_cast<size_t>(msg.input_channel_size()));
for (int i = 0; i < msg.input_channel_size(); ++i) {
memcpy(float_cb_->channels()[i],
msg.input_channel(i).data(),
@@ -1939,11 +1936,14 @@ TEST_F(ApmTest, FloatAndIntInterfacesGiveSimilarResults) {
if (test->num_input_channels() != test->num_output_channels())
continue;
- const int num_render_channels = test->num_reverse_channels();
- const int num_input_channels = test->num_input_channels();
- const int num_output_channels = test->num_output_channels();
- const int samples_per_channel = test->sample_rate() *
- AudioProcessing::kChunkSizeMs / 1000;
+ const size_t num_render_channels =
+ static_cast<size_t>(test->num_reverse_channels());
+ const size_t num_input_channels =
+ static_cast<size_t>(test->num_input_channels());
+ const size_t num_output_channels =
+ static_cast<size_t>(test->num_output_channels());
+ const size_t samples_per_channel = static_cast<size_t>(
+ test->sample_rate() * AudioProcessing::kChunkSizeMs / 1000);
Init(test->sample_rate(), test->sample_rate(), test->sample_rate(),
num_input_channels, num_output_channels, num_render_channels, true);
@@ -1984,7 +1984,7 @@ TEST_F(ApmTest, FloatAndIntInterfacesGiveSimilarResults) {
test->sample_rate(),
LayoutFromChannels(num_output_channels),
float_cb_->channels()));
- for (int j = 0; j < num_output_channels; ++j) {
+ for (size_t j = 0; j < num_output_channels; ++j) {
FloatToS16(float_cb_->channels()[j],
samples_per_channel,
output_cb.channels()[j]);
@@ -2017,7 +2017,7 @@ TEST_F(ApmTest, FloatAndIntInterfacesGiveSimilarResults) {
0.01);
// Reset in case of downmixing.
- frame_->num_channels_ = test->num_input_channels();
+ frame_->num_channels_ = static_cast<size_t>(test->num_input_channels());
}
rewind(far_file_);
rewind(near_file_);
@@ -2035,9 +2035,9 @@ TEST_F(ApmTest, Process) {
OpenFileAndReadMessage(ref_filename_, &ref_data);
} else {
// Write the desired tests to the protobuf reference file.
- for (size_t i = 0; i < kChannelsSize; i++) {
- for (size_t j = 0; j < kChannelsSize; j++) {
- for (size_t l = 0; l < kProcessSampleRatesSize; l++) {
+ for (size_t i = 0; i < arraysize(kChannels); i++) {
+ for (size_t j = 0; j < arraysize(kChannels); j++) {
+ for (size_t l = 0; l < arraysize(kProcessSampleRates); l++) {
audioproc::Test* test = ref_data.add_test();
test->set_num_reverse_channels(kChannels[i]);
test->set_num_input_channels(kChannels[j]);
@@ -2078,9 +2078,9 @@ TEST_F(ApmTest, Process) {
Init(test->sample_rate(),
test->sample_rate(),
test->sample_rate(),
- test->num_input_channels(),
- test->num_output_channels(),
- test->num_reverse_channels(),
+ static_cast<size_t>(test->num_input_channels()),
+ static_cast<size_t>(test->num_output_channels()),
+ static_cast<size_t>(test->num_reverse_channels()),
true);
int frame_count = 0;
@@ -2105,7 +2105,8 @@ TEST_F(ApmTest, Process) {
EXPECT_EQ(apm_->kNoError, apm_->ProcessStream(frame_));
// Ensure the frame was downmixed properly.
- EXPECT_EQ(test->num_output_channels(), frame_->num_channels_);
+ EXPECT_EQ(static_cast<size_t>(test->num_output_channels()),
+ frame_->num_channels_);
max_output_average += MaxAudioFrame(*frame_);
@@ -2135,7 +2136,7 @@ TEST_F(ApmTest, Process) {
ASSERT_EQ(frame_size, write_count);
// Reset in case of downmixing.
- frame_->num_channels_ = test->num_input_channels();
+ frame_->num_channels_ = static_cast<size_t>(test->num_input_channels());
frame_count++;
}
max_output_average /= frame_count;
@@ -2264,12 +2265,11 @@ TEST_F(ApmTest, NoErrorsWithKeyboardChannel) {
{AudioProcessing::kStereoAndKeyboard, AudioProcessing::kMono},
{AudioProcessing::kStereoAndKeyboard, AudioProcessing::kStereo},
};
- size_t channel_format_size = sizeof(cf) / sizeof(*cf);
rtc::scoped_ptr<AudioProcessing> ap(AudioProcessing::Create());
// Enable one component just to ensure some processing takes place.
ap->noise_suppression()->Enable(true);
- for (size_t i = 0; i < channel_format_size; ++i) {
+ for (size_t i = 0; i < arraysize(cf); ++i) {
const int in_rate = 44100;
const int out_rate = 48000;
ChannelBuffer<float> in_cb(SamplesFromRate(in_rate),
@@ -2296,7 +2296,7 @@ TEST_F(ApmTest, NoErrorsWithKeyboardChannel) {
// error results to the supplied accumulators.
void UpdateBestSNR(const float* ref,
const float* test,
- int length,
+ size_t length,
int expected_delay,
double* variance_acc,
double* sq_error_acc) {
@@ -2308,7 +2308,7 @@ void UpdateBestSNR(const float* ref,
++delay) {
double sq_error = 0;
double variance = 0;
- for (int i = 0; i < length - delay; ++i) {
+ for (size_t i = 0; i < length - delay; ++i) {
double error = test[i + delay] - ref[i];
sq_error += error * error;
variance += ref[i] * ref[i];
@@ -2360,14 +2360,10 @@ class AudioProcessingTest
static void SetUpTestCase() {
// Create all needed output reference files.
const int kNativeRates[] = {8000, 16000, 32000, 48000};
- const size_t kNativeRatesSize =
- sizeof(kNativeRates) / sizeof(*kNativeRates);
- const int kNumChannels[] = {1, 2};
- const size_t kNumChannelsSize =
- sizeof(kNumChannels) / sizeof(*kNumChannels);
- for (size_t i = 0; i < kNativeRatesSize; ++i) {
- for (size_t j = 0; j < kNumChannelsSize; ++j) {
- for (size_t k = 0; k < kNumChannelsSize; ++k) {
+ const size_t kNumChannels[] = {1, 2};
+ for (size_t i = 0; i < arraysize(kNativeRates); ++i) {
+ for (size_t j = 0; j < arraysize(kNumChannels); ++j) {
+ for (size_t k = 0; k < arraysize(kNumChannels); ++k) {
// The reference files always have matching input and output channels.
ProcessFormat(kNativeRates[i], kNativeRates[i], kNativeRates[i],
kNativeRates[i], kNumChannels[j], kNumChannels[j],
@@ -2388,10 +2384,10 @@ class AudioProcessingTest
int output_rate,
int reverse_input_rate,
int reverse_output_rate,
- int num_input_channels,
- int num_output_channels,
- int num_reverse_input_channels,
- int num_reverse_output_channels,
+ size_t num_input_channels,
+ size_t num_output_channels,
+ size_t num_reverse_input_channels,
+ size_t num_reverse_output_channels,
std::string output_file_prefix) {
Config config;
config.Set<ExperimentalAgc>(new ExperimentalAgc(false));
@@ -2466,18 +2462,19 @@ class AudioProcessingTest
// Dump forward output to file.
Interleave(out_cb.channels(), out_cb.num_frames(), out_cb.num_channels(),
float_data.get());
- int out_length = out_cb.num_channels() * out_cb.num_frames();
+ size_t out_length = out_cb.num_channels() * out_cb.num_frames();
- ASSERT_EQ(static_cast<size_t>(out_length),
+ ASSERT_EQ(out_length,
fwrite(float_data.get(), sizeof(float_data[0]),
out_length, out_file));
// Dump reverse output to file.
Interleave(rev_out_cb.channels(), rev_out_cb.num_frames(),
rev_out_cb.num_channels(), float_data.get());
- int rev_out_length = rev_out_cb.num_channels() * rev_out_cb.num_frames();
+ size_t rev_out_length =
+ rev_out_cb.num_channels() * rev_out_cb.num_frames();
- ASSERT_EQ(static_cast<size_t>(rev_out_length),
+ ASSERT_EQ(rev_out_length,
fwrite(float_data.get(), sizeof(float_data[0]), rev_out_length,
rev_out_file));
@@ -2513,9 +2510,8 @@ TEST_P(AudioProcessingTest, Formats) {
{2, 2, 1, 1},
{2, 2, 2, 2},
};
- size_t channel_format_size = sizeof(cf) / sizeof(*cf);
- for (size_t i = 0; i < channel_format_size; ++i) {
+ for (size_t i = 0; i < arraysize(cf); ++i) {
ProcessFormat(input_rate_, output_rate_, reverse_input_rate_,
reverse_output_rate_, cf[i].num_input, cf[i].num_output,
cf[i].num_reverse_input, cf[i].num_reverse_output, "out");
@@ -2565,8 +2561,8 @@ TEST_P(AudioProcessingTest, Formats) {
ASSERT_TRUE(out_file != NULL);
ASSERT_TRUE(ref_file != NULL);
- const int ref_length = SamplesFromRate(ref_rate) * out_num;
- const int out_length = SamplesFromRate(out_rate) * out_num;
+ const size_t ref_length = SamplesFromRate(ref_rate) * out_num;
+ const size_t out_length = SamplesFromRate(out_rate) * out_num;
// Data from the reference file.
rtc::scoped_ptr<float[]> ref_data(new float[ref_length]);
// Data from the output file.
@@ -2606,8 +2602,9 @@ TEST_P(AudioProcessingTest, Formats) {
if (out_rate != ref_rate) {
// Resample the output back to its internal processing rate if
// necssary.
- ASSERT_EQ(ref_length, resampler.Resample(out_ptr, out_length,
- cmp_data.get(), ref_length));
+ ASSERT_EQ(ref_length,
+ static_cast<size_t>(resampler.Resample(
+ out_ptr, out_length, cmp_data.get(), ref_length)));
out_ptr = cmp_data.get();
}
@@ -2752,9 +2749,5 @@ INSTANTIATE_TEST_CASE_P(
std::tr1::make_tuple(16000, 16000, 16000, 16000, 0, 0)));
#endif
-// TODO(henrike): re-implement functionality lost when removing the old main
-// function. See
-// https://code.google.com/p/webrtc/issues/detail?id=1981
-
} // namespace
} // namespace webrtc
diff --git a/webrtc/modules/audio_processing/test/audioproc_float.cc b/webrtc/modules/audio_processing/test/audioproc_float.cc
index 811e9070fa..a489d255c8 100644
--- a/webrtc/modules/audio_processing/test/audioproc_float.cc
+++ b/webrtc/modules/audio_processing/test/audioproc_float.cc
@@ -9,35 +9,50 @@
*/
#include <stdio.h>
+#include <iostream>
#include <sstream>
#include <string>
+#include <utility>
#include "gflags/gflags.h"
#include "webrtc/base/checks.h"
+#include "webrtc/base/format_macros.h"
#include "webrtc/base/scoped_ptr.h"
#include "webrtc/common_audio/channel_buffer.h"
#include "webrtc/common_audio/wav_file.h"
#include "webrtc/modules/audio_processing/include/audio_processing.h"
+#include "webrtc/modules/audio_processing/test/audio_file_processor.h"
#include "webrtc/modules/audio_processing/test/protobuf_utils.h"
#include "webrtc/modules/audio_processing/test/test_utils.h"
#include "webrtc/system_wrappers/include/tick_util.h"
#include "webrtc/test/testsupport/trace_to_stderr.h"
-DEFINE_string(dump, "", "The name of the debug dump file to read from.");
-DEFINE_string(i, "", "The name of the input file to read from.");
-DEFINE_string(i_rev, "", "The name of the reverse input file to read from.");
-DEFINE_string(o, "out.wav", "Name of the output file to write to.");
-DEFINE_string(o_rev,
- "out_rev.wav",
- "Name of the reverse output file to write to.");
-DEFINE_int32(out_channels, 0, "Number of output channels. Defaults to input.");
-DEFINE_int32(out_sample_rate, 0,
- "Output sample rate in Hz. Defaults to input.");
+namespace {
+
+bool ValidateOutChannels(const char* flagname, int32_t value) {
+ return value >= 0;
+}
+
+} // namespace
+
+DEFINE_string(dump, "", "Name of the aecdump debug file to read from.");
+DEFINE_string(i, "", "Name of the capture input stream file to read from.");
+DEFINE_string(
+ o,
+ "out.wav",
+ "Name of the output file to write the processed capture stream to.");
+DEFINE_int32(out_channels, 1, "Number of output channels.");
+const bool out_channels_dummy =
+ google::RegisterFlagValidator(&FLAGS_out_channels, &ValidateOutChannels);
+DEFINE_int32(out_sample_rate, 48000, "Output sample rate in Hz.");
DEFINE_string(mic_positions, "",
"Space delimited cartesian coordinates of microphones in meters. "
"The coordinates of each point are contiguous. "
"For a two element array: \"x1 y1 z1 x2 y2 z2\"");
-DEFINE_double(target_angle_degrees, 90, "The azimuth of the target in radians");
+DEFINE_double(
+ target_angle_degrees,
+ 90,
+ "The azimuth of the target in degrees. Only applies to beamforming.");
DEFINE_bool(aec, false, "Enable echo cancellation.");
DEFINE_bool(agc, false, "Enable automatic gain control.");
@@ -64,15 +79,6 @@ const char kUsage[] =
"All components are disabled by default. If any bi-directional components\n"
"are enabled, only debug dump files are permitted.";
-// Returns a StreamConfig corresponding to wav_file if it's non-nullptr.
-// Otherwise returns a default initialized StreamConfig.
-StreamConfig MakeStreamConfig(const WavFile* wav_file) {
- if (wav_file) {
- return {wav_file->sample_rate(), wav_file->num_channels()};
- }
- return {};
-}
-
} // namespace
int main(int argc, char* argv[]) {
@@ -84,158 +90,75 @@ int main(int argc, char* argv[]) {
"An input file must be specified with either -i or -dump.\n");
return 1;
}
- if (!FLAGS_dump.empty()) {
- fprintf(stderr, "FIXME: the -dump option is not yet implemented.\n");
+ if (FLAGS_dump.empty() && (FLAGS_aec || FLAGS_ie)) {
+ fprintf(stderr, "-aec and -ie require a -dump file.\n");
+ return 1;
+ }
+ if (FLAGS_ie) {
+ fprintf(stderr,
+ "FIXME(ajm): The intelligibility enhancer output is not dumped.\n");
return 1;
}
test::TraceToStderr trace_to_stderr(true);
- WavReader in_file(FLAGS_i);
- // If the output format is uninitialized, use the input format.
- const int out_channels =
- FLAGS_out_channels ? FLAGS_out_channels : in_file.num_channels();
- const int out_sample_rate =
- FLAGS_out_sample_rate ? FLAGS_out_sample_rate : in_file.sample_rate();
- WavWriter out_file(FLAGS_o, out_sample_rate, out_channels);
-
Config config;
- config.Set<ExperimentalNs>(new ExperimentalNs(FLAGS_ts || FLAGS_all));
- config.Set<Intelligibility>(new Intelligibility(FLAGS_ie || FLAGS_all));
-
if (FLAGS_bf || FLAGS_all) {
- const size_t num_mics = in_file.num_channels();
- const std::vector<Point> array_geometry =
- ParseArrayGeometry(FLAGS_mic_positions, num_mics);
- RTC_CHECK_EQ(array_geometry.size(), num_mics);
-
+ if (FLAGS_mic_positions.empty()) {
+ fprintf(stderr, "-mic_positions must be specified when -bf is used.\n");
+ return 1;
+ }
config.Set<Beamforming>(new Beamforming(
- true, array_geometry,
+ true, ParseArrayGeometry(FLAGS_mic_positions),
SphericalPointf(DegreesToRadians(FLAGS_target_angle_degrees), 0.f,
1.f)));
}
+ config.Set<ExperimentalNs>(new ExperimentalNs(FLAGS_ts || FLAGS_all));
+ config.Set<Intelligibility>(new Intelligibility(FLAGS_ie || FLAGS_all));
rtc::scoped_ptr<AudioProcessing> ap(AudioProcessing::Create(config));
- if (!FLAGS_dump.empty()) {
- RTC_CHECK_EQ(kNoErr,
- ap->echo_cancellation()->Enable(FLAGS_aec || FLAGS_all));
- } else if (FLAGS_aec) {
- fprintf(stderr, "-aec requires a -dump file.\n");
- return -1;
- }
- bool process_reverse = !FLAGS_i_rev.empty();
+ RTC_CHECK_EQ(kNoErr, ap->echo_cancellation()->Enable(FLAGS_aec || FLAGS_all));
RTC_CHECK_EQ(kNoErr, ap->gain_control()->Enable(FLAGS_agc || FLAGS_all));
- RTC_CHECK_EQ(kNoErr,
- ap->gain_control()->set_mode(GainControl::kFixedDigital));
RTC_CHECK_EQ(kNoErr, ap->high_pass_filter()->Enable(FLAGS_hpf || FLAGS_all));
RTC_CHECK_EQ(kNoErr, ap->noise_suppression()->Enable(FLAGS_ns || FLAGS_all));
- if (FLAGS_ns_level != -1)
+ if (FLAGS_ns_level != -1) {
RTC_CHECK_EQ(kNoErr,
ap->noise_suppression()->set_level(
static_cast<NoiseSuppression::Level>(FLAGS_ns_level)));
-
- printf("Input file: %s\nChannels: %d, Sample rate: %d Hz\n\n",
- FLAGS_i.c_str(), in_file.num_channels(), in_file.sample_rate());
- printf("Output file: %s\nChannels: %d, Sample rate: %d Hz\n\n",
- FLAGS_o.c_str(), out_file.num_channels(), out_file.sample_rate());
-
- ChannelBuffer<float> in_buf(
- rtc::CheckedDivExact(in_file.sample_rate(), kChunksPerSecond),
- in_file.num_channels());
- ChannelBuffer<float> out_buf(
- rtc::CheckedDivExact(out_file.sample_rate(), kChunksPerSecond),
- out_file.num_channels());
-
- std::vector<float> in_interleaved(in_buf.size());
- std::vector<float> out_interleaved(out_buf.size());
-
- rtc::scoped_ptr<WavReader> in_rev_file;
- rtc::scoped_ptr<WavWriter> out_rev_file;
- rtc::scoped_ptr<ChannelBuffer<float>> in_rev_buf;
- rtc::scoped_ptr<ChannelBuffer<float>> out_rev_buf;
- std::vector<float> in_rev_interleaved;
- std::vector<float> out_rev_interleaved;
- if (process_reverse) {
- in_rev_file.reset(new WavReader(FLAGS_i_rev));
- out_rev_file.reset(new WavWriter(FLAGS_o_rev, in_rev_file->sample_rate(),
- in_rev_file->num_channels()));
- printf("In rev file: %s\nChannels: %d, Sample rate: %d Hz\n\n",
- FLAGS_i_rev.c_str(), in_rev_file->num_channels(),
- in_rev_file->sample_rate());
- printf("Out rev file: %s\nChannels: %d, Sample rate: %d Hz\n\n",
- FLAGS_o_rev.c_str(), out_rev_file->num_channels(),
- out_rev_file->sample_rate());
- in_rev_buf.reset(new ChannelBuffer<float>(
- rtc::CheckedDivExact(in_rev_file->sample_rate(), kChunksPerSecond),
- in_rev_file->num_channels()));
- in_rev_interleaved.resize(in_rev_buf->size());
- out_rev_buf.reset(new ChannelBuffer<float>(
- rtc::CheckedDivExact(out_rev_file->sample_rate(), kChunksPerSecond),
- out_rev_file->num_channels()));
- out_rev_interleaved.resize(out_rev_buf->size());
+ }
+ ap->set_stream_key_pressed(FLAGS_ts);
+
+ rtc::scoped_ptr<AudioFileProcessor> processor;
+ auto out_file = rtc_make_scoped_ptr(new WavWriter(
+ FLAGS_o, FLAGS_out_sample_rate, static_cast<size_t>(FLAGS_out_channels)));
+ std::cout << FLAGS_o << ": " << out_file->FormatAsString() << std::endl;
+ if (FLAGS_dump.empty()) {
+ auto in_file = rtc_make_scoped_ptr(new WavReader(FLAGS_i));
+ std::cout << FLAGS_i << ": " << in_file->FormatAsString() << std::endl;
+ processor.reset(new WavFileProcessor(std::move(ap), std::move(in_file),
+ std::move(out_file)));
+
+ } else {
+ processor.reset(new AecDumpFileProcessor(
+ std::move(ap), fopen(FLAGS_dump.c_str(), "rb"), std::move(out_file)));
}
- TickTime processing_start_time;
- TickInterval accumulated_time;
int num_chunks = 0;
-
- const auto input_config = MakeStreamConfig(&in_file);
- const auto output_config = MakeStreamConfig(&out_file);
- const auto reverse_input_config = MakeStreamConfig(in_rev_file.get());
- const auto reverse_output_config = MakeStreamConfig(out_rev_file.get());
-
- while (in_file.ReadSamples(in_interleaved.size(),
- &in_interleaved[0]) == in_interleaved.size()) {
- // Have logs display the file time rather than wallclock time.
+ while (processor->ProcessChunk()) {
trace_to_stderr.SetTimeSeconds(num_chunks * 1.f / kChunksPerSecond);
- FloatS16ToFloat(&in_interleaved[0], in_interleaved.size(),
- &in_interleaved[0]);
- Deinterleave(&in_interleaved[0], in_buf.num_frames(),
- in_buf.num_channels(), in_buf.channels());
- if (process_reverse) {
- in_rev_file->ReadSamples(in_rev_interleaved.size(),
- in_rev_interleaved.data());
- FloatS16ToFloat(in_rev_interleaved.data(), in_rev_interleaved.size(),
- in_rev_interleaved.data());
- Deinterleave(in_rev_interleaved.data(), in_rev_buf->num_frames(),
- in_rev_buf->num_channels(), in_rev_buf->channels());
- }
-
- if (FLAGS_perf) {
- processing_start_time = TickTime::Now();
- }
- RTC_CHECK_EQ(kNoErr, ap->ProcessStream(in_buf.channels(), input_config,
- output_config, out_buf.channels()));
- if (process_reverse) {
- RTC_CHECK_EQ(kNoErr, ap->ProcessReverseStream(
- in_rev_buf->channels(), reverse_input_config,
- reverse_output_config, out_rev_buf->channels()));
- }
- if (FLAGS_perf) {
- accumulated_time += TickTime::Now() - processing_start_time;
- }
-
- Interleave(out_buf.channels(), out_buf.num_frames(),
- out_buf.num_channels(), &out_interleaved[0]);
- FloatToFloatS16(&out_interleaved[0], out_interleaved.size(),
- &out_interleaved[0]);
- out_file.WriteSamples(&out_interleaved[0], out_interleaved.size());
- if (process_reverse) {
- Interleave(out_rev_buf->channels(), out_rev_buf->num_frames(),
- out_rev_buf->num_channels(), out_rev_interleaved.data());
- FloatToFloatS16(out_rev_interleaved.data(), out_rev_interleaved.size(),
- out_rev_interleaved.data());
- out_rev_file->WriteSamples(out_rev_interleaved.data(),
- out_rev_interleaved.size());
- }
- num_chunks++;
+ ++num_chunks;
}
+
if (FLAGS_perf) {
- int64_t execution_time_ms = accumulated_time.Milliseconds();
- printf("\nExecution time: %.3f s\nFile time: %.2f s\n"
- "Time per chunk: %.3f ms\n",
- execution_time_ms * 0.001f, num_chunks * 1.f / kChunksPerSecond,
- execution_time_ms * 1.f / num_chunks);
+ const auto& proc_time = processor->proc_time();
+ int64_t exec_time_us = proc_time.sum.Microseconds();
+ printf(
+ "\nExecution time: %.3f s, File time: %.2f s\n"
+ "Time per chunk (mean, max, min):\n%.0f us, %.0f us, %.0f us\n",
+ exec_time_us * 1e-6, num_chunks * 1.f / kChunksPerSecond,
+ exec_time_us * 1.f / num_chunks, 1.f * proc_time.max.Microseconds(),
+ 1.f * proc_time.min.Microseconds());
}
+
return 0;
}
diff --git a/webrtc/modules/audio_processing/test/debug_dump_test.cc b/webrtc/modules/audio_processing/test/debug_dump_test.cc
new file mode 100644
index 0000000000..005faa0f44
--- /dev/null
+++ b/webrtc/modules/audio_processing/test/debug_dump_test.cc
@@ -0,0 +1,612 @@
+/*
+ * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <stddef.h> // size_t
+#include <string>
+#include <vector>
+
+#include "testing/gtest/include/gtest/gtest.h"
+#include "webrtc/audio_processing/debug.pb.h"
+#include "webrtc/base/checks.h"
+#include "webrtc/base/scoped_ptr.h"
+#include "webrtc/common_audio/channel_buffer.h"
+#include "webrtc/modules/audio_coding/neteq/tools/resample_input_audio_file.h"
+#include "webrtc/modules/audio_processing/include/audio_processing.h"
+#include "webrtc/modules/audio_processing/test/protobuf_utils.h"
+#include "webrtc/modules/audio_processing/test/test_utils.h"
+#include "webrtc/test/testsupport/fileutils.h"
+
+namespace webrtc {
+namespace test {
+
+namespace {
+
+void MaybeResetBuffer(rtc::scoped_ptr<ChannelBuffer<float>>* buffer,
+ const StreamConfig& config) {
+ auto& buffer_ref = *buffer;
+ if (!buffer_ref.get() || buffer_ref->num_frames() != config.num_frames() ||
+ buffer_ref->num_channels() != config.num_channels()) {
+ buffer_ref.reset(new ChannelBuffer<float>(config.num_frames(),
+ config.num_channels()));
+ }
+}
+
+class DebugDumpGenerator {
+ public:
+ DebugDumpGenerator(const std::string& input_file_name,
+ int input_file_rate_hz,
+ int input_channels,
+ const std::string& reverse_file_name,
+ int reverse_file_rate_hz,
+ int reverse_channels,
+ const Config& config,
+ const std::string& dump_file_name);
+
+ // Constructor that uses default input files.
+ explicit DebugDumpGenerator(const Config& config);
+
+ ~DebugDumpGenerator();
+
+ // Changes the sample rate of the input audio to the APM.
+ void SetInputRate(int rate_hz);
+
+ // Sets if converts stereo input signal to mono by discarding other channels.
+ void ForceInputMono(bool mono);
+
+ // Changes the sample rate of the reverse audio to the APM.
+ void SetReverseRate(int rate_hz);
+
+ // Sets if converts stereo reverse signal to mono by discarding other
+ // channels.
+ void ForceReverseMono(bool mono);
+
+ // Sets the required sample rate of the APM output.
+ void SetOutputRate(int rate_hz);
+
+ // Sets the required channels of the APM output.
+ void SetOutputChannels(int channels);
+
+ std::string dump_file_name() const { return dump_file_name_; }
+
+ void StartRecording();
+ void Process(size_t num_blocks);
+ void StopRecording();
+ AudioProcessing* apm() const { return apm_.get(); }
+
+ private:
+ static void ReadAndDeinterleave(ResampleInputAudioFile* audio, int channels,
+ const StreamConfig& config,
+ float* const* buffer);
+
+ // APM input/output settings.
+ StreamConfig input_config_;
+ StreamConfig reverse_config_;
+ StreamConfig output_config_;
+
+ // Input file format.
+ const std::string input_file_name_;
+ ResampleInputAudioFile input_audio_;
+ const int input_file_channels_;
+
+ // Reverse file format.
+ const std::string reverse_file_name_;
+ ResampleInputAudioFile reverse_audio_;
+ const int reverse_file_channels_;
+
+ // Buffer for APM input/output.
+ rtc::scoped_ptr<ChannelBuffer<float>> input_;
+ rtc::scoped_ptr<ChannelBuffer<float>> reverse_;
+ rtc::scoped_ptr<ChannelBuffer<float>> output_;
+
+ rtc::scoped_ptr<AudioProcessing> apm_;
+
+ const std::string dump_file_name_;
+};
+
+DebugDumpGenerator::DebugDumpGenerator(const std::string& input_file_name,
+ int input_rate_hz,
+ int input_channels,
+ const std::string& reverse_file_name,
+ int reverse_rate_hz,
+ int reverse_channels,
+ const Config& config,
+ const std::string& dump_file_name)
+ : input_config_(input_rate_hz, input_channels),
+ reverse_config_(reverse_rate_hz, reverse_channels),
+ output_config_(input_rate_hz, input_channels),
+ input_audio_(input_file_name, input_rate_hz, input_rate_hz),
+ input_file_channels_(input_channels),
+ reverse_audio_(reverse_file_name, reverse_rate_hz, reverse_rate_hz),
+ reverse_file_channels_(reverse_channels),
+ input_(new ChannelBuffer<float>(input_config_.num_frames(),
+ input_config_.num_channels())),
+ reverse_(new ChannelBuffer<float>(reverse_config_.num_frames(),
+ reverse_config_.num_channels())),
+ output_(new ChannelBuffer<float>(output_config_.num_frames(),
+ output_config_.num_channels())),
+ apm_(AudioProcessing::Create(config)),
+ dump_file_name_(dump_file_name) {
+}
+
+DebugDumpGenerator::DebugDumpGenerator(const Config& config)
+ : DebugDumpGenerator(ResourcePath("near32_stereo", "pcm"), 32000, 2,
+ ResourcePath("far32_stereo", "pcm"), 32000, 2,
+ config,
+ TempFilename(OutputPath(), "debug_aec")) {
+}
+
+DebugDumpGenerator::~DebugDumpGenerator() {
+ remove(dump_file_name_.c_str());
+}
+
+void DebugDumpGenerator::SetInputRate(int rate_hz) {
+ input_audio_.set_output_rate_hz(rate_hz);
+ input_config_.set_sample_rate_hz(rate_hz);
+ MaybeResetBuffer(&input_, input_config_);
+}
+
+void DebugDumpGenerator::ForceInputMono(bool mono) {
+ const int channels = mono ? 1 : input_file_channels_;
+ input_config_.set_num_channels(channels);
+ MaybeResetBuffer(&input_, input_config_);
+}
+
+void DebugDumpGenerator::SetReverseRate(int rate_hz) {
+ reverse_audio_.set_output_rate_hz(rate_hz);
+ reverse_config_.set_sample_rate_hz(rate_hz);
+ MaybeResetBuffer(&reverse_, reverse_config_);
+}
+
+void DebugDumpGenerator::ForceReverseMono(bool mono) {
+ const int channels = mono ? 1 : reverse_file_channels_;
+ reverse_config_.set_num_channels(channels);
+ MaybeResetBuffer(&reverse_, reverse_config_);
+}
+
+void DebugDumpGenerator::SetOutputRate(int rate_hz) {
+ output_config_.set_sample_rate_hz(rate_hz);
+ MaybeResetBuffer(&output_, output_config_);
+}
+
+void DebugDumpGenerator::SetOutputChannels(int channels) {
+ output_config_.set_num_channels(channels);
+ MaybeResetBuffer(&output_, output_config_);
+}
+
+void DebugDumpGenerator::StartRecording() {
+ apm_->StartDebugRecording(dump_file_name_.c_str());
+}
+
+void DebugDumpGenerator::Process(size_t num_blocks) {
+ for (size_t i = 0; i < num_blocks; ++i) {
+ ReadAndDeinterleave(&reverse_audio_, reverse_file_channels_,
+ reverse_config_, reverse_->channels());
+ ReadAndDeinterleave(&input_audio_, input_file_channels_, input_config_,
+ input_->channels());
+ RTC_CHECK_EQ(AudioProcessing::kNoError, apm_->set_stream_delay_ms(100));
+ apm_->set_stream_key_pressed(i % 10 == 9);
+ RTC_CHECK_EQ(AudioProcessing::kNoError,
+ apm_->ProcessStream(input_->channels(), input_config_,
+ output_config_, output_->channels()));
+
+ RTC_CHECK_EQ(AudioProcessing::kNoError,
+ apm_->ProcessReverseStream(reverse_->channels(),
+ reverse_config_,
+ reverse_config_,
+ reverse_->channels()));
+ }
+}
+
+void DebugDumpGenerator::StopRecording() {
+ apm_->StopDebugRecording();
+}
+
+void DebugDumpGenerator::ReadAndDeinterleave(ResampleInputAudioFile* audio,
+ int channels,
+ const StreamConfig& config,
+ float* const* buffer) {
+ const size_t num_frames = config.num_frames();
+ const int out_channels = config.num_channels();
+
+ std::vector<int16_t> signal(channels * num_frames);
+
+ audio->Read(num_frames * channels, &signal[0]);
+
+ // We only allow reducing number of channels by discarding some channels.
+ RTC_CHECK_LE(out_channels, channels);
+ for (int channel = 0; channel < out_channels; ++channel) {
+ for (size_t i = 0; i < num_frames; ++i) {
+ buffer[channel][i] = S16ToFloat(signal[i * channels + channel]);
+ }
+ }
+}
+
+} // namespace
+
+class DebugDumpTest : public ::testing::Test {
+ public:
+ DebugDumpTest();
+
+ // VerifyDebugDump replays a debug dump using APM and verifies that the result
+ // is bit-exact-identical to the output channel in the dump. This is only
+ // guaranteed if the debug dump is started on the first frame.
+ void VerifyDebugDump(const std::string& dump_file_name);
+
+ private:
+ // Following functions are facilities for replaying debug dumps.
+ void OnInitEvent(const audioproc::Init& msg);
+ void OnStreamEvent(const audioproc::Stream& msg);
+ void OnReverseStreamEvent(const audioproc::ReverseStream& msg);
+ void OnConfigEvent(const audioproc::Config& msg);
+
+ void MaybeRecreateApm(const audioproc::Config& msg);
+ void ConfigureApm(const audioproc::Config& msg);
+
+ // Buffer for APM input/output.
+ rtc::scoped_ptr<ChannelBuffer<float>> input_;
+ rtc::scoped_ptr<ChannelBuffer<float>> reverse_;
+ rtc::scoped_ptr<ChannelBuffer<float>> output_;
+
+ rtc::scoped_ptr<AudioProcessing> apm_;
+
+ StreamConfig input_config_;
+ StreamConfig reverse_config_;
+ StreamConfig output_config_;
+};
+
+DebugDumpTest::DebugDumpTest()
+ : input_(nullptr), // will be created upon usage.
+ reverse_(nullptr),
+ output_(nullptr),
+ apm_(nullptr) {
+}
+
+void DebugDumpTest::VerifyDebugDump(const std::string& in_filename) {
+ FILE* in_file = fopen(in_filename.c_str(), "rb");
+ ASSERT_TRUE(in_file);
+ audioproc::Event event_msg;
+
+ while (ReadMessageFromFile(in_file, &event_msg)) {
+ switch (event_msg.type()) {
+ case audioproc::Event::INIT:
+ OnInitEvent(event_msg.init());
+ break;
+ case audioproc::Event::STREAM:
+ OnStreamEvent(event_msg.stream());
+ break;
+ case audioproc::Event::REVERSE_STREAM:
+ OnReverseStreamEvent(event_msg.reverse_stream());
+ break;
+ case audioproc::Event::CONFIG:
+ OnConfigEvent(event_msg.config());
+ break;
+ case audioproc::Event::UNKNOWN_EVENT:
+ // We do not expect receive UNKNOWN event currently.
+ FAIL();
+ }
+ }
+ fclose(in_file);
+}
+
+// OnInitEvent reset the input/output/reserve channel format.
+void DebugDumpTest::OnInitEvent(const audioproc::Init& msg) {
+ ASSERT_TRUE(msg.has_num_input_channels());
+ ASSERT_TRUE(msg.has_output_sample_rate());
+ ASSERT_TRUE(msg.has_num_output_channels());
+ ASSERT_TRUE(msg.has_reverse_sample_rate());
+ ASSERT_TRUE(msg.has_num_reverse_channels());
+
+ input_config_ = StreamConfig(msg.sample_rate(), msg.num_input_channels());
+ output_config_ =
+ StreamConfig(msg.output_sample_rate(), msg.num_output_channels());
+ reverse_config_ =
+ StreamConfig(msg.reverse_sample_rate(), msg.num_reverse_channels());
+
+ MaybeResetBuffer(&input_, input_config_);
+ MaybeResetBuffer(&output_, output_config_);
+ MaybeResetBuffer(&reverse_, reverse_config_);
+}
+
+// OnStreamEvent replays an input signal and verifies the output.
+void DebugDumpTest::OnStreamEvent(const audioproc::Stream& msg) {
+ // APM should have been created.
+ ASSERT_TRUE(apm_.get());
+
+ EXPECT_NOERR(apm_->gain_control()->set_stream_analog_level(msg.level()));
+ EXPECT_NOERR(apm_->set_stream_delay_ms(msg.delay()));
+ apm_->echo_cancellation()->set_stream_drift_samples(msg.drift());
+ if (msg.has_keypress())
+ apm_->set_stream_key_pressed(msg.keypress());
+ else
+ apm_->set_stream_key_pressed(true);
+
+ ASSERT_EQ(input_config_.num_channels(),
+ static_cast<size_t>(msg.input_channel_size()));
+ ASSERT_EQ(input_config_.num_frames() * sizeof(float),
+ msg.input_channel(0).size());
+
+ for (int i = 0; i < msg.input_channel_size(); ++i) {
+ memcpy(input_->channels()[i], msg.input_channel(i).data(),
+ msg.input_channel(i).size());
+ }
+
+ ASSERT_EQ(AudioProcessing::kNoError,
+ apm_->ProcessStream(input_->channels(), input_config_,
+ output_config_, output_->channels()));
+
+ // Check that output of APM is bit-exact to the output in the dump.
+ ASSERT_EQ(output_config_.num_channels(),
+ static_cast<size_t>(msg.output_channel_size()));
+ ASSERT_EQ(output_config_.num_frames() * sizeof(float),
+ msg.output_channel(0).size());
+ for (int i = 0; i < msg.output_channel_size(); ++i) {
+ ASSERT_EQ(0, memcmp(output_->channels()[i], msg.output_channel(i).data(),
+ msg.output_channel(i).size()));
+ }
+}
+
+void DebugDumpTest::OnReverseStreamEvent(const audioproc::ReverseStream& msg) {
+ // APM should have been created.
+ ASSERT_TRUE(apm_.get());
+
+ ASSERT_GT(msg.channel_size(), 0);
+ ASSERT_EQ(reverse_config_.num_channels(),
+ static_cast<size_t>(msg.channel_size()));
+ ASSERT_EQ(reverse_config_.num_frames() * sizeof(float),
+ msg.channel(0).size());
+
+ for (int i = 0; i < msg.channel_size(); ++i) {
+ memcpy(reverse_->channels()[i], msg.channel(i).data(),
+ msg.channel(i).size());
+ }
+
+ ASSERT_EQ(AudioProcessing::kNoError,
+ apm_->ProcessReverseStream(reverse_->channels(),
+ reverse_config_,
+ reverse_config_,
+ reverse_->channels()));
+}
+
+void DebugDumpTest::OnConfigEvent(const audioproc::Config& msg) {
+ MaybeRecreateApm(msg);
+ ConfigureApm(msg);
+}
+
+void DebugDumpTest::MaybeRecreateApm(const audioproc::Config& msg) {
+ // These configurations cannot be changed on the fly.
+ Config config;
+ ASSERT_TRUE(msg.has_aec_delay_agnostic_enabled());
+ config.Set<DelayAgnostic>(
+ new DelayAgnostic(msg.aec_delay_agnostic_enabled()));
+
+ ASSERT_TRUE(msg.has_noise_robust_agc_enabled());
+ config.Set<ExperimentalAgc>(
+ new ExperimentalAgc(msg.noise_robust_agc_enabled()));
+
+ ASSERT_TRUE(msg.has_transient_suppression_enabled());
+ config.Set<ExperimentalNs>(
+ new ExperimentalNs(msg.transient_suppression_enabled()));
+
+ ASSERT_TRUE(msg.has_aec_extended_filter_enabled());
+ config.Set<ExtendedFilter>(new ExtendedFilter(
+ msg.aec_extended_filter_enabled()));
+
+ // We only create APM once, since changes on these fields should not
+ // happen in current implementation.
+ if (!apm_.get()) {
+ apm_.reset(AudioProcessing::Create(config));
+ }
+}
+
+void DebugDumpTest::ConfigureApm(const audioproc::Config& msg) {
+ // AEC configs.
+ ASSERT_TRUE(msg.has_aec_enabled());
+ EXPECT_EQ(AudioProcessing::kNoError,
+ apm_->echo_cancellation()->Enable(msg.aec_enabled()));
+
+ ASSERT_TRUE(msg.has_aec_drift_compensation_enabled());
+ EXPECT_EQ(AudioProcessing::kNoError,
+ apm_->echo_cancellation()->enable_drift_compensation(
+ msg.aec_drift_compensation_enabled()));
+
+ ASSERT_TRUE(msg.has_aec_suppression_level());
+ EXPECT_EQ(AudioProcessing::kNoError,
+ apm_->echo_cancellation()->set_suppression_level(
+ static_cast<EchoCancellation::SuppressionLevel>(
+ msg.aec_suppression_level())));
+
+ // AECM configs.
+ ASSERT_TRUE(msg.has_aecm_enabled());
+ EXPECT_EQ(AudioProcessing::kNoError,
+ apm_->echo_control_mobile()->Enable(msg.aecm_enabled()));
+
+ ASSERT_TRUE(msg.has_aecm_comfort_noise_enabled());
+ EXPECT_EQ(AudioProcessing::kNoError,
+ apm_->echo_control_mobile()->enable_comfort_noise(
+ msg.aecm_comfort_noise_enabled()));
+
+ ASSERT_TRUE(msg.has_aecm_routing_mode());
+ EXPECT_EQ(AudioProcessing::kNoError,
+ apm_->echo_control_mobile()->set_routing_mode(
+ static_cast<EchoControlMobile::RoutingMode>(
+ msg.aecm_routing_mode())));
+
+ // AGC configs.
+ ASSERT_TRUE(msg.has_agc_enabled());
+ EXPECT_EQ(AudioProcessing::kNoError,
+ apm_->gain_control()->Enable(msg.agc_enabled()));
+
+ ASSERT_TRUE(msg.has_agc_mode());
+ EXPECT_EQ(AudioProcessing::kNoError,
+ apm_->gain_control()->set_mode(
+ static_cast<GainControl::Mode>(msg.agc_mode())));
+
+ ASSERT_TRUE(msg.has_agc_limiter_enabled());
+ EXPECT_EQ(AudioProcessing::kNoError,
+ apm_->gain_control()->enable_limiter(msg.agc_limiter_enabled()));
+
+ // HPF configs.
+ ASSERT_TRUE(msg.has_hpf_enabled());
+ EXPECT_EQ(AudioProcessing::kNoError,
+ apm_->high_pass_filter()->Enable(msg.hpf_enabled()));
+
+ // NS configs.
+ ASSERT_TRUE(msg.has_ns_enabled());
+ EXPECT_EQ(AudioProcessing::kNoError,
+ apm_->noise_suppression()->Enable(msg.ns_enabled()));
+
+ ASSERT_TRUE(msg.has_ns_level());
+ EXPECT_EQ(AudioProcessing::kNoError,
+ apm_->noise_suppression()->set_level(
+ static_cast<NoiseSuppression::Level>(msg.ns_level())));
+}
+
+TEST_F(DebugDumpTest, SimpleCase) {
+ Config config;
+ DebugDumpGenerator generator(config);
+ generator.StartRecording();
+ generator.Process(100);
+ generator.StopRecording();
+ VerifyDebugDump(generator.dump_file_name());
+}
+
+TEST_F(DebugDumpTest, ChangeInputFormat) {
+ Config config;
+ DebugDumpGenerator generator(config);
+ generator.StartRecording();
+ generator.Process(100);
+ generator.SetInputRate(48000);
+
+ generator.ForceInputMono(true);
+ // Number of output channel should not be larger than that of input. APM will
+ // fail otherwise.
+ generator.SetOutputChannels(1);
+
+ generator.Process(100);
+ generator.StopRecording();
+ VerifyDebugDump(generator.dump_file_name());
+}
+
+TEST_F(DebugDumpTest, ChangeReverseFormat) {
+ Config config;
+ DebugDumpGenerator generator(config);
+ generator.StartRecording();
+ generator.Process(100);
+ generator.SetReverseRate(48000);
+ generator.ForceReverseMono(true);
+ generator.Process(100);
+ generator.StopRecording();
+ VerifyDebugDump(generator.dump_file_name());
+}
+
+TEST_F(DebugDumpTest, ChangeOutputFormat) {
+ Config config;
+ DebugDumpGenerator generator(config);
+ generator.StartRecording();
+ generator.Process(100);
+ generator.SetOutputRate(48000);
+ generator.SetOutputChannels(1);
+ generator.Process(100);
+ generator.StopRecording();
+ VerifyDebugDump(generator.dump_file_name());
+}
+
+TEST_F(DebugDumpTest, ToggleAec) {
+ Config config;
+ DebugDumpGenerator generator(config);
+ generator.StartRecording();
+ generator.Process(100);
+
+ EchoCancellation* aec = generator.apm()->echo_cancellation();
+ EXPECT_EQ(AudioProcessing::kNoError, aec->Enable(!aec->is_enabled()));
+
+ generator.Process(100);
+ generator.StopRecording();
+ VerifyDebugDump(generator.dump_file_name());
+}
+
+TEST_F(DebugDumpTest, ToggleDelayAgnosticAec) {
+ Config config;
+ config.Set<DelayAgnostic>(new DelayAgnostic(true));
+ DebugDumpGenerator generator(config);
+ generator.StartRecording();
+ generator.Process(100);
+
+ EchoCancellation* aec = generator.apm()->echo_cancellation();
+ EXPECT_EQ(AudioProcessing::kNoError, aec->Enable(!aec->is_enabled()));
+
+ generator.Process(100);
+ generator.StopRecording();
+ VerifyDebugDump(generator.dump_file_name());
+}
+
+TEST_F(DebugDumpTest, ToggleAecLevel) {
+ Config config;
+ DebugDumpGenerator generator(config);
+ EchoCancellation* aec = generator.apm()->echo_cancellation();
+ EXPECT_EQ(AudioProcessing::kNoError, aec->Enable(true));
+ EXPECT_EQ(AudioProcessing::kNoError,
+ aec->set_suppression_level(EchoCancellation::kLowSuppression));
+ generator.StartRecording();
+ generator.Process(100);
+
+ EXPECT_EQ(AudioProcessing::kNoError,
+ aec->set_suppression_level(EchoCancellation::kHighSuppression));
+ generator.Process(100);
+ generator.StopRecording();
+ VerifyDebugDump(generator.dump_file_name());
+}
+
+#if defined(WEBRTC_ANDROID)
+// AGC may not be supported on Android.
+#define MAYBE_ToggleAgc DISABLED_ToggleAgc
+#else
+#define MAYBE_ToggleAgc ToggleAgc
+#endif
+TEST_F(DebugDumpTest, MAYBE_ToggleAgc) {
+ Config config;
+ DebugDumpGenerator generator(config);
+ generator.StartRecording();
+ generator.Process(100);
+
+ GainControl* agc = generator.apm()->gain_control();
+ EXPECT_EQ(AudioProcessing::kNoError, agc->Enable(!agc->is_enabled()));
+
+ generator.Process(100);
+ generator.StopRecording();
+ VerifyDebugDump(generator.dump_file_name());
+}
+
+TEST_F(DebugDumpTest, ToggleNs) {
+ Config config;
+ DebugDumpGenerator generator(config);
+ generator.StartRecording();
+ generator.Process(100);
+
+ NoiseSuppression* ns = generator.apm()->noise_suppression();
+ EXPECT_EQ(AudioProcessing::kNoError, ns->Enable(!ns->is_enabled()));
+
+ generator.Process(100);
+ generator.StopRecording();
+ VerifyDebugDump(generator.dump_file_name());
+}
+
+TEST_F(DebugDumpTest, TransientSuppressionOn) {
+ Config config;
+ config.Set<ExperimentalNs>(new ExperimentalNs(true));
+ DebugDumpGenerator generator(config);
+ generator.StartRecording();
+ generator.Process(100);
+ generator.StopRecording();
+ VerifyDebugDump(generator.dump_file_name());
+}
+
+} // namespace test
+} // namespace webrtc
diff --git a/webrtc/modules/audio_processing/test/process_test.cc b/webrtc/modules/audio_processing/test/process_test.cc
index 43165404c8..6e20a787e7 100644
--- a/webrtc/modules/audio_processing/test/process_test.cc
+++ b/webrtc/modules/audio_processing/test/process_test.cc
@@ -17,12 +17,13 @@
#include <algorithm>
+#include "webrtc/base/format_macros.h"
#include "webrtc/base/scoped_ptr.h"
#include "webrtc/common.h"
#include "webrtc/modules/audio_processing/include/audio_processing.h"
#include "webrtc/modules/audio_processing/test/protobuf_utils.h"
#include "webrtc/modules/audio_processing/test/test_utils.h"
-#include "webrtc/modules/interface/module_common_types.h"
+#include "webrtc/modules/include/module_common_types.h"
#include "webrtc/system_wrappers/include/cpu_features_wrapper.h"
#include "webrtc/system_wrappers/include/tick_util.h"
#include "webrtc/test/testsupport/fileutils.h"
@@ -159,9 +160,9 @@ void void_main(int argc, char* argv[]) {
int32_t sample_rate_hz = 16000;
- int num_capture_input_channels = 1;
- int num_capture_output_channels = 1;
- int num_render_channels = 1;
+ size_t num_capture_input_channels = 1;
+ size_t num_capture_output_channels = 1;
+ size_t num_render_channels = 1;
int samples_per_channel = sample_rate_hz / 100;
@@ -207,14 +208,14 @@ void void_main(int argc, char* argv[]) {
} else if (strcmp(argv[i], "-ch") == 0) {
i++;
ASSERT_LT(i + 1, argc) << "Specify number of channels after -ch";
- ASSERT_EQ(1, sscanf(argv[i], "%d", &num_capture_input_channels));
+ ASSERT_EQ(1, sscanf(argv[i], "%" PRIuS, &num_capture_input_channels));
i++;
- ASSERT_EQ(1, sscanf(argv[i], "%d", &num_capture_output_channels));
+ ASSERT_EQ(1, sscanf(argv[i], "%" PRIuS, &num_capture_output_channels));
} else if (strcmp(argv[i], "-rch") == 0) {
i++;
ASSERT_LT(i, argc) << "Specify number of channels after -rch";
- ASSERT_EQ(1, sscanf(argv[i], "%d", &num_render_channels));
+ ASSERT_EQ(1, sscanf(argv[i], "%" PRIuS, &num_render_channels));
} else if (strcmp(argv[i], "-aec") == 0) {
ASSERT_EQ(apm->kNoError, apm->echo_cancellation()->Enable(true));
@@ -447,10 +448,10 @@ void void_main(int argc, char* argv[]) {
if (verbose) {
printf("Sample rate: %d Hz\n", sample_rate_hz);
- printf("Primary channels: %d (in), %d (out)\n",
+ printf("Primary channels: %" PRIuS " (in), %" PRIuS " (out)\n",
num_capture_input_channels,
num_capture_output_channels);
- printf("Reverse channels: %d \n", num_render_channels);
+ printf("Reverse channels: %" PRIuS "\n", num_render_channels);
}
const std::string out_path = webrtc::test::OutputPath();
@@ -601,14 +602,18 @@ void void_main(int argc, char* argv[]) {
if (msg.has_output_sample_rate()) {
output_sample_rate = msg.output_sample_rate();
}
- output_layout = LayoutFromChannels(msg.num_output_channels());
- ASSERT_EQ(kNoErr, apm->Initialize(
- msg.sample_rate(),
- output_sample_rate,
- reverse_sample_rate,
- LayoutFromChannels(msg.num_input_channels()),
- output_layout,
- LayoutFromChannels(msg.num_reverse_channels())));
+ output_layout =
+ LayoutFromChannels(static_cast<size_t>(msg.num_output_channels()));
+ ASSERT_EQ(kNoErr,
+ apm->Initialize(
+ msg.sample_rate(),
+ output_sample_rate,
+ reverse_sample_rate,
+ LayoutFromChannels(
+ static_cast<size_t>(msg.num_input_channels())),
+ output_layout,
+ LayoutFromChannels(
+ static_cast<size_t>(msg.num_reverse_channels()))));
samples_per_channel = msg.sample_rate() / 100;
far_frame.sample_rate_hz_ = reverse_sample_rate;
@@ -636,11 +641,11 @@ void void_main(int argc, char* argv[]) {
}
if (!raw_output) {
- // The WAV file needs to be reset every time, because it cant change
- // it's sample rate or number of channels.
- output_wav_file.reset(new WavWriter(out_filename + ".wav",
- output_sample_rate,
- msg.num_output_channels()));
+ // The WAV file needs to be reset every time, because it can't change
+ // its sample rate or number of channels.
+ output_wav_file.reset(new WavWriter(
+ out_filename + ".wav", output_sample_rate,
+ static_cast<size_t>(msg.num_output_channels())));
}
} else if (event_msg.type() == Event::REVERSE_STREAM) {
@@ -1049,7 +1054,9 @@ void void_main(int argc, char* argv[]) {
}
}
}
- printf("100%% complete\r");
+ if (progress) {
+ printf("100%% complete\r");
+ }
if (aecm_echo_path_out_file != NULL) {
const size_t path_size =
diff --git a/webrtc/modules/audio_processing/test/test_utils.cc b/webrtc/modules/audio_processing/test/test_utils.cc
index 1b9ac3ce4c..0bd70126ae 100644
--- a/webrtc/modules/audio_processing/test/test_utils.cc
+++ b/webrtc/modules/audio_processing/test/test_utils.cc
@@ -8,6 +8,8 @@
* be found in the AUTHORS file in the root of the source tree.
*/
+#include <utility>
+
#include "webrtc/base/checks.h"
#include "webrtc/modules/audio_processing/test/test_utils.h"
@@ -31,6 +33,35 @@ void RawFile::WriteSamples(const float* samples, size_t num_samples) {
fwrite(samples, sizeof(*samples), num_samples, file_handle_);
}
+ChannelBufferWavReader::ChannelBufferWavReader(rtc::scoped_ptr<WavReader> file)
+ : file_(std::move(file)) {}
+
+bool ChannelBufferWavReader::Read(ChannelBuffer<float>* buffer) {
+ RTC_CHECK_EQ(file_->num_channels(), buffer->num_channels());
+ interleaved_.resize(buffer->size());
+ if (file_->ReadSamples(interleaved_.size(), &interleaved_[0]) !=
+ interleaved_.size()) {
+ return false;
+ }
+
+ FloatS16ToFloat(&interleaved_[0], interleaved_.size(), &interleaved_[0]);
+ Deinterleave(&interleaved_[0], buffer->num_frames(), buffer->num_channels(),
+ buffer->channels());
+ return true;
+}
+
+ChannelBufferWavWriter::ChannelBufferWavWriter(rtc::scoped_ptr<WavWriter> file)
+ : file_(std::move(file)) {}
+
+void ChannelBufferWavWriter::Write(const ChannelBuffer<float>& buffer) {
+ RTC_CHECK_EQ(file_->num_channels(), buffer.num_channels());
+ interleaved_.resize(buffer.size());
+ Interleave(buffer.channels(), buffer.num_frames(), buffer.num_channels(),
+ &interleaved_[0]);
+ FloatToFloatS16(&interleaved_[0], interleaved_.size(), &interleaved_[0]);
+ file_->WriteSamples(&interleaved_[0], interleaved_.size());
+}
+
void WriteIntData(const int16_t* data,
size_t length,
WavWriter* wav_file,
@@ -44,8 +75,8 @@ void WriteIntData(const int16_t* data,
}
void WriteFloatData(const float* const* data,
- int samples_per_channel,
- int num_channels,
+ size_t samples_per_channel,
+ size_t num_channels,
WavWriter* wav_file,
RawFile* raw_file) {
size_t length = num_channels * samples_per_channel;
@@ -74,8 +105,8 @@ FILE* OpenFile(const std::string& filename, const char* mode) {
return file;
}
-int SamplesFromRate(int rate) {
- return AudioProcessing::kChunkSizeMs * rate / 1000;
+size_t SamplesFromRate(int rate) {
+ return static_cast<size_t>(AudioProcessing::kChunkSizeMs * rate / 1000);
}
void SetFrameSampleRate(AudioFrame* frame,
@@ -85,35 +116,39 @@ void SetFrameSampleRate(AudioFrame* frame,
sample_rate_hz / 1000;
}
-AudioProcessing::ChannelLayout LayoutFromChannels(int num_channels) {
+AudioProcessing::ChannelLayout LayoutFromChannels(size_t num_channels) {
switch (num_channels) {
case 1:
return AudioProcessing::kMono;
case 2:
return AudioProcessing::kStereo;
default:
- assert(false);
+ RTC_CHECK(false);
return AudioProcessing::kMono;
}
}
-std::vector<Point> ParseArrayGeometry(const std::string& mic_positions,
- size_t num_mics) {
+std::vector<Point> ParseArrayGeometry(const std::string& mic_positions) {
const std::vector<float> values = ParseList<float>(mic_positions);
- RTC_CHECK_EQ(values.size(), 3 * num_mics)
- << "Could not parse mic_positions or incorrect number of points.";
+ const size_t num_mics =
+ rtc::CheckedDivExact(values.size(), static_cast<size_t>(3));
+ RTC_CHECK_GT(num_mics, 0u) << "mic_positions is not large enough.";
std::vector<Point> result;
result.reserve(num_mics);
for (size_t i = 0; i < values.size(); i += 3) {
- double x = values[i + 0];
- double y = values[i + 1];
- double z = values[i + 2];
- result.push_back(Point(x, y, z));
+ result.push_back(Point(values[i + 0], values[i + 1], values[i + 2]));
}
return result;
}
+std::vector<Point> ParseArrayGeometry(const std::string& mic_positions,
+ size_t num_mics) {
+ std::vector<Point> result = ParseArrayGeometry(mic_positions);
+ RTC_CHECK_EQ(result.size(), num_mics)
+ << "Could not parse mic_positions or incorrect number of points.";
+ return result;
+}
} // namespace webrtc
diff --git a/webrtc/modules/audio_processing/test/test_utils.h b/webrtc/modules/audio_processing/test/test_utils.h
index 8dd380b15d..e23beb66f4 100644
--- a/webrtc/modules/audio_processing/test/test_utils.h
+++ b/webrtc/modules/audio_processing/test/test_utils.h
@@ -22,7 +22,7 @@
#include "webrtc/common_audio/channel_buffer.h"
#include "webrtc/common_audio/wav_file.h"
#include "webrtc/modules/audio_processing/include/audio_processing.h"
-#include "webrtc/modules/interface/module_common_types.h"
+#include "webrtc/modules/include/module_common_types.h"
namespace webrtc {
@@ -43,28 +43,57 @@ class RawFile final {
RTC_DISALLOW_COPY_AND_ASSIGN(RawFile);
};
+// Reads ChannelBuffers from a provided WavReader.
+class ChannelBufferWavReader final {
+ public:
+ explicit ChannelBufferWavReader(rtc::scoped_ptr<WavReader> file);
+
+ // Reads data from the file according to the |buffer| format. Returns false if
+ // a full buffer can't be read from the file.
+ bool Read(ChannelBuffer<float>* buffer);
+
+ private:
+ rtc::scoped_ptr<WavReader> file_;
+ std::vector<float> interleaved_;
+
+ RTC_DISALLOW_COPY_AND_ASSIGN(ChannelBufferWavReader);
+};
+
+// Writes ChannelBuffers to a provided WavWriter.
+class ChannelBufferWavWriter final {
+ public:
+ explicit ChannelBufferWavWriter(rtc::scoped_ptr<WavWriter> file);
+ void Write(const ChannelBuffer<float>& buffer);
+
+ private:
+ rtc::scoped_ptr<WavWriter> file_;
+ std::vector<float> interleaved_;
+
+ RTC_DISALLOW_COPY_AND_ASSIGN(ChannelBufferWavWriter);
+};
+
void WriteIntData(const int16_t* data,
size_t length,
WavWriter* wav_file,
RawFile* raw_file);
void WriteFloatData(const float* const* data,
- int samples_per_channel,
- int num_channels,
+ size_t samples_per_channel,
+ size_t num_channels,
WavWriter* wav_file,
RawFile* raw_file);
// Exits on failure; do not use in unit tests.
FILE* OpenFile(const std::string& filename, const char* mode);
-int SamplesFromRate(int rate);
+size_t SamplesFromRate(int rate);
void SetFrameSampleRate(AudioFrame* frame,
int sample_rate_hz);
template <typename T>
void SetContainerFormat(int sample_rate_hz,
- int num_channels,
+ size_t num_channels,
AudioFrame* frame,
rtc::scoped_ptr<ChannelBuffer<T> >* cb) {
SetFrameSampleRate(frame, sample_rate_hz);
@@ -72,14 +101,14 @@ void SetContainerFormat(int sample_rate_hz,
cb->reset(new ChannelBuffer<T>(frame->samples_per_channel_, num_channels));
}
-AudioProcessing::ChannelLayout LayoutFromChannels(int num_channels);
+AudioProcessing::ChannelLayout LayoutFromChannels(size_t num_channels);
template <typename T>
-float ComputeSNR(const T* ref, const T* test, int length, float* variance) {
+float ComputeSNR(const T* ref, const T* test, size_t length, float* variance) {
float mse = 0;
float mean = 0;
*variance = 0;
- for (int i = 0; i < length; ++i) {
+ for (size_t i = 0; i < length; ++i) {
T error = ref[i] - test[i];
mse += error * error;
*variance += ref[i] * ref[i];
@@ -118,6 +147,9 @@ std::vector<T> ParseList(const std::string& to_parse) {
std::vector<Point> ParseArrayGeometry(const std::string& mic_positions,
size_t num_mics);
+// Same as above, but without the num_mics check for when it isn't available.
+std::vector<Point> ParseArrayGeometry(const std::string& mic_positions);
+
} // namespace webrtc
#endif // WEBRTC_MODULES_AUDIO_PROCESSING_TEST_TEST_UTILS_H_
diff --git a/webrtc/modules/audio_processing/test/unpack.cc b/webrtc/modules/audio_processing/test/unpack.cc
index 24578e240c..8b2b082f97 100644
--- a/webrtc/modules/audio_processing/test/unpack.cc
+++ b/webrtc/modules/audio_processing/test/unpack.cc
@@ -17,6 +17,7 @@
#include "gflags/gflags.h"
#include "webrtc/audio_processing/debug.pb.h"
+#include "webrtc/base/format_macros.h"
#include "webrtc/base/scoped_ptr.h"
#include "webrtc/modules/audio_processing/test/protobuf_utils.h"
#include "webrtc/modules/audio_processing/test/test_utils.h"
@@ -76,12 +77,12 @@ int do_main(int argc, char* argv[]) {
Event event_msg;
int frame_count = 0;
- int reverse_samples_per_channel = 0;
- int input_samples_per_channel = 0;
- int output_samples_per_channel = 0;
- int num_reverse_channels = 0;
- int num_input_channels = 0;
- int num_output_channels = 0;
+ size_t reverse_samples_per_channel = 0;
+ size_t input_samples_per_channel = 0;
+ size_t output_samples_per_channel = 0;
+ size_t num_reverse_channels = 0;
+ size_t num_input_channels = 0;
+ size_t num_output_channels = 0;
rtc::scoped_ptr<WavWriter> reverse_wav_file;
rtc::scoped_ptr<WavWriter> input_wav_file;
rtc::scoped_ptr<WavWriter> output_wav_file;
@@ -117,7 +118,7 @@ int do_main(int argc, char* argv[]) {
}
rtc::scoped_ptr<const float* []> data(
new const float* [num_reverse_channels]);
- for (int i = 0; i < num_reverse_channels; ++i) {
+ for (size_t i = 0; i < num_reverse_channels; ++i) {
data[i] = reinterpret_cast<const float*>(msg.channel(i).data());
}
WriteFloatData(data.get(),
@@ -148,7 +149,7 @@ int do_main(int argc, char* argv[]) {
}
rtc::scoped_ptr<const float* []> data(
new const float* [num_input_channels]);
- for (int i = 0; i < num_input_channels; ++i) {
+ for (size_t i = 0; i < num_input_channels; ++i) {
data[i] = reinterpret_cast<const float*>(msg.input_channel(i).data());
}
WriteFloatData(data.get(),
@@ -172,7 +173,7 @@ int do_main(int argc, char* argv[]) {
}
rtc::scoped_ptr<const float* []> data(
new const float* [num_output_channels]);
- for (int i = 0; i < num_output_channels; ++i) {
+ for (size_t i = 0; i < num_output_channels; ++i) {
data[i] =
reinterpret_cast<const float*>(msg.output_channel(i).data());
}
@@ -268,11 +269,14 @@ int do_main(int argc, char* argv[]) {
" Reverse sample rate: %d\n",
reverse_sample_rate);
num_input_channels = msg.num_input_channels();
- fprintf(settings_file, " Input channels: %d\n", num_input_channels);
+ fprintf(settings_file, " Input channels: %" PRIuS "\n",
+ num_input_channels);
num_output_channels = msg.num_output_channels();
- fprintf(settings_file, " Output channels: %d\n", num_output_channels);
+ fprintf(settings_file, " Output channels: %" PRIuS "\n",
+ num_output_channels);
num_reverse_channels = msg.num_reverse_channels();
- fprintf(settings_file, " Reverse channels: %d\n", num_reverse_channels);
+ fprintf(settings_file, " Reverse channels: %" PRIuS "\n",
+ num_reverse_channels);
fprintf(settings_file, "\n");
@@ -283,9 +287,12 @@ int do_main(int argc, char* argv[]) {
output_sample_rate = input_sample_rate;
}
- reverse_samples_per_channel = reverse_sample_rate / 100;
- input_samples_per_channel = input_sample_rate / 100;
- output_samples_per_channel = output_sample_rate / 100;
+ reverse_samples_per_channel =
+ static_cast<size_t>(reverse_sample_rate / 100);
+ input_samples_per_channel =
+ static_cast<size_t>(input_sample_rate / 100);
+ output_samples_per_channel =
+ static_cast<size_t>(output_sample_rate / 100);
if (!FLAGS_raw) {
// The WAV files need to be reset every time, because they cant change
diff --git a/webrtc/modules/audio_processing/transient/file_utils_unittest.cc b/webrtc/modules/audio_processing/transient/file_utils_unittest.cc
index 7a035d2b41..7fb7d2d6a9 100644
--- a/webrtc/modules/audio_processing/transient/file_utils_unittest.cc
+++ b/webrtc/modules/audio_processing/transient/file_utils_unittest.cc
@@ -17,7 +17,6 @@
#include "webrtc/base/scoped_ptr.h"
#include "webrtc/system_wrappers/include/file_wrapper.h"
#include "webrtc/test/testsupport/fileutils.h"
-#include "webrtc/test/testsupport/gtest_disable.h"
#include "webrtc/typedefs.h"
namespace webrtc {
@@ -59,7 +58,12 @@ class TransientFileUtilsTest: public ::testing::Test {
const std::string kTestFileNamef;
};
-TEST_F(TransientFileUtilsTest, DISABLED_ON_IOS(ConvertByteArrayToFloat)) {
+#if defined(WEBRTC_IOS)
+#define MAYBE_ConvertByteArrayToFloat DISABLED_ConvertByteArrayToFloat
+#else
+#define MAYBE_ConvertByteArrayToFloat ConvertByteArrayToFloat
+#endif
+TEST_F(TransientFileUtilsTest, MAYBE_ConvertByteArrayToFloat) {
float value = 0.0;
EXPECT_EQ(0, ConvertByteArrayToFloat(kPiBytesf, &value));
@@ -72,7 +76,12 @@ TEST_F(TransientFileUtilsTest, DISABLED_ON_IOS(ConvertByteArrayToFloat)) {
EXPECT_FLOAT_EQ(kAvogadro, value);
}
-TEST_F(TransientFileUtilsTest, DISABLED_ON_IOS(ConvertByteArrayToDouble)) {
+#if defined(WEBRTC_IOS)
+#define MAYBE_ConvertByteArrayToDouble DISABLED_ConvertByteArrayToDouble
+#else
+#define MAYBE_ConvertByteArrayToDouble ConvertByteArrayToDouble
+#endif
+TEST_F(TransientFileUtilsTest, MAYBE_ConvertByteArrayToDouble) {
double value = 0.0;
EXPECT_EQ(0, ConvertByteArrayToDouble(kPiBytes, &value));
@@ -85,7 +94,12 @@ TEST_F(TransientFileUtilsTest, DISABLED_ON_IOS(ConvertByteArrayToDouble)) {
EXPECT_DOUBLE_EQ(kAvogadro, value);
}
-TEST_F(TransientFileUtilsTest, DISABLED_ON_IOS(ConvertFloatToByteArray)) {
+#if defined(WEBRTC_IOS)
+#define MAYBE_ConvertFloatToByteArray DISABLED_ConvertFloatToByteArray
+#else
+#define MAYBE_ConvertFloatToByteArray ConvertFloatToByteArray
+#endif
+TEST_F(TransientFileUtilsTest, MAYBE_ConvertFloatToByteArray) {
rtc::scoped_ptr<uint8_t[]> bytes(new uint8_t[4]);
EXPECT_EQ(0, ConvertFloatToByteArray(kPi, bytes.get()));
@@ -98,7 +112,12 @@ TEST_F(TransientFileUtilsTest, DISABLED_ON_IOS(ConvertFloatToByteArray)) {
EXPECT_EQ(0, memcmp(bytes.get(), kAvogadroBytesf, 4));
}
-TEST_F(TransientFileUtilsTest, DISABLED_ON_IOS(ConvertDoubleToByteArray)) {
+#if defined(WEBRTC_IOS)
+#define MAYBE_ConvertDoubleToByteArray DISABLED_ConvertDoubleToByteArray
+#else
+#define MAYBE_ConvertDoubleToByteArray ConvertDoubleToByteArray
+#endif
+TEST_F(TransientFileUtilsTest, MAYBE_ConvertDoubleToByteArray) {
rtc::scoped_ptr<uint8_t[]> bytes(new uint8_t[8]);
EXPECT_EQ(0, ConvertDoubleToByteArray(kPi, bytes.get()));
@@ -111,7 +130,12 @@ TEST_F(TransientFileUtilsTest, DISABLED_ON_IOS(ConvertDoubleToByteArray)) {
EXPECT_EQ(0, memcmp(bytes.get(), kAvogadroBytes, 8));
}
-TEST_F(TransientFileUtilsTest, DISABLED_ON_IOS(ReadInt16BufferFromFile)) {
+#if defined(WEBRTC_IOS)
+#define MAYBE_ReadInt16BufferFromFile DISABLED_ReadInt16BufferFromFile
+#else
+#define MAYBE_ReadInt16BufferFromFile ReadInt16BufferFromFile
+#endif
+TEST_F(TransientFileUtilsTest, MAYBE_ReadInt16BufferFromFile) {
std::string test_filename = kTestFileName;
rtc::scoped_ptr<FileWrapper> file(FileWrapper::Create());
@@ -149,8 +173,13 @@ TEST_F(TransientFileUtilsTest, DISABLED_ON_IOS(ReadInt16BufferFromFile)) {
EXPECT_EQ(17631, buffer[kBufferLength - 1]);
}
-TEST_F(TransientFileUtilsTest,
- DISABLED_ON_IOS(ReadInt16FromFileToFloatBuffer)) {
+#if defined(WEBRTC_IOS)
+#define MAYBE_ReadInt16FromFileToFloatBuffer \
+ DISABLED_ReadInt16FromFileToFloatBuffer
+#else
+#define MAYBE_ReadInt16FromFileToFloatBuffer ReadInt16FromFileToFloatBuffer
+#endif
+TEST_F(TransientFileUtilsTest, MAYBE_ReadInt16FromFileToFloatBuffer) {
std::string test_filename = kTestFileName;
rtc::scoped_ptr<FileWrapper> file(FileWrapper::Create());
@@ -191,8 +220,13 @@ TEST_F(TransientFileUtilsTest,
EXPECT_DOUBLE_EQ(17631, buffer[kBufferLength - 1]);
}
-TEST_F(TransientFileUtilsTest,
- DISABLED_ON_IOS(ReadInt16FromFileToDoubleBuffer)) {
+#if defined(WEBRTC_IOS)
+#define MAYBE_ReadInt16FromFileToDoubleBuffer \
+ DISABLED_ReadInt16FromFileToDoubleBuffer
+#else
+#define MAYBE_ReadInt16FromFileToDoubleBuffer ReadInt16FromFileToDoubleBuffer
+#endif
+TEST_F(TransientFileUtilsTest, MAYBE_ReadInt16FromFileToDoubleBuffer) {
std::string test_filename = kTestFileName;
rtc::scoped_ptr<FileWrapper> file(FileWrapper::Create());
@@ -232,7 +266,12 @@ TEST_F(TransientFileUtilsTest,
EXPECT_DOUBLE_EQ(17631, buffer[kBufferLength - 1]);
}
-TEST_F(TransientFileUtilsTest, DISABLED_ON_IOS(ReadFloatBufferFromFile)) {
+#if defined(WEBRTC_IOS)
+#define MAYBE_ReadFloatBufferFromFile DISABLED_ReadFloatBufferFromFile
+#else
+#define MAYBE_ReadFloatBufferFromFile ReadFloatBufferFromFile
+#endif
+TEST_F(TransientFileUtilsTest, MAYBE_ReadFloatBufferFromFile) {
std::string test_filename = kTestFileNamef;
rtc::scoped_ptr<FileWrapper> file(FileWrapper::Create());
@@ -269,7 +308,12 @@ TEST_F(TransientFileUtilsTest, DISABLED_ON_IOS(ReadFloatBufferFromFile)) {
EXPECT_FLOAT_EQ(kAvogadro, buffer[2]);
}
-TEST_F(TransientFileUtilsTest, DISABLED_ON_IOS(ReadDoubleBufferFromFile)) {
+#if defined(WEBRTC_IOS)
+#define MAYBE_ReadDoubleBufferFromFile DISABLED_ReadDoubleBufferFromFile
+#else
+#define MAYBE_ReadDoubleBufferFromFile ReadDoubleBufferFromFile
+#endif
+TEST_F(TransientFileUtilsTest, MAYBE_ReadDoubleBufferFromFile) {
std::string test_filename = kTestFileName;
rtc::scoped_ptr<FileWrapper> file(FileWrapper::Create());
@@ -306,7 +350,12 @@ TEST_F(TransientFileUtilsTest, DISABLED_ON_IOS(ReadDoubleBufferFromFile)) {
EXPECT_DOUBLE_EQ(kAvogadro, buffer[2]);
}
-TEST_F(TransientFileUtilsTest, DISABLED_ON_IOS(WriteInt16BufferToFile)) {
+#if defined(WEBRTC_IOS)
+#define MAYBE_WriteInt16BufferToFile DISABLED_WriteInt16BufferToFile
+#else
+#define MAYBE_WriteInt16BufferToFile WriteInt16BufferToFile
+#endif
+TEST_F(TransientFileUtilsTest, MAYBE_WriteInt16BufferToFile) {
rtc::scoped_ptr<FileWrapper> file(FileWrapper::Create());
std::string kOutFileName = test::TempFilename(test::OutputPath(),
@@ -348,7 +397,12 @@ TEST_F(TransientFileUtilsTest, DISABLED_ON_IOS(WriteInt16BufferToFile)) {
kBufferLength * sizeof(written_buffer[0])));
}
-TEST_F(TransientFileUtilsTest, DISABLED_ON_IOS(WriteFloatBufferToFile)) {
+#if defined(WEBRTC_IOS)
+#define MAYBE_WriteFloatBufferToFile DISABLED_WriteFloatBufferToFile
+#else
+#define MAYBE_WriteFloatBufferToFile WriteFloatBufferToFile
+#endif
+TEST_F(TransientFileUtilsTest, MAYBE_WriteFloatBufferToFile) {
rtc::scoped_ptr<FileWrapper> file(FileWrapper::Create());
std::string kOutFileName = test::TempFilename(test::OutputPath(),
@@ -390,7 +444,12 @@ TEST_F(TransientFileUtilsTest, DISABLED_ON_IOS(WriteFloatBufferToFile)) {
kBufferLength * sizeof(written_buffer[0])));
}
-TEST_F(TransientFileUtilsTest, DISABLED_ON_IOS(WriteDoubleBufferToFile)) {
+#if defined(WEBRTC_IOS)
+#define MAYBE_WriteDoubleBufferToFile DISABLED_WriteDoubleBufferToFile
+#else
+#define MAYBE_WriteDoubleBufferToFile WriteDoubleBufferToFile
+#endif
+TEST_F(TransientFileUtilsTest, MAYBE_WriteDoubleBufferToFile) {
rtc::scoped_ptr<FileWrapper> file(FileWrapper::Create());
std::string kOutFileName = test::TempFilename(test::OutputPath(),
@@ -432,7 +491,12 @@ TEST_F(TransientFileUtilsTest, DISABLED_ON_IOS(WriteDoubleBufferToFile)) {
kBufferLength * sizeof(written_buffer[0])));
}
-TEST_F(TransientFileUtilsTest, DISABLED_ON_IOS(ExpectedErrorReturnValues)) {
+#if defined(WEBRTC_IOS)
+#define MAYBE_ExpectedErrorReturnValues DISABLED_ExpectedErrorReturnValues
+#else
+#define MAYBE_ExpectedErrorReturnValues ExpectedErrorReturnValues
+#endif
+TEST_F(TransientFileUtilsTest, MAYBE_ExpectedErrorReturnValues) {
std::string test_filename = kTestFileName;
double value;
diff --git a/webrtc/modules/audio_processing/transient/transient_detector_unittest.cc b/webrtc/modules/audio_processing/transient/transient_detector_unittest.cc
index 6a70a3f92c..b60077510b 100644
--- a/webrtc/modules/audio_processing/transient/transient_detector_unittest.cc
+++ b/webrtc/modules/audio_processing/transient/transient_detector_unittest.cc
@@ -19,8 +19,7 @@
#include "webrtc/modules/audio_processing/transient/file_utils.h"
#include "webrtc/system_wrappers/include/file_wrapper.h"
#include "webrtc/test/testsupport/fileutils.h"
-#include "webrtc/test/testsupport/gtest_disable.h"
- #include "webrtc/typedefs.h"
+#include "webrtc/typedefs.h"
namespace webrtc {
@@ -37,7 +36,11 @@ static const size_t kNumberOfSampleRates =
// The files contain all the results in double precision (Little endian).
// The audio files used with different sample rates are stored in the same
// directory.
-TEST(TransientDetectorTest, DISABLED_ON_IOS(CorrectnessBasedOnFiles)) {
+#if defined(WEBRTC_IOS)
+TEST(TransientDetectorTest, DISABLED_CorrectnessBasedOnFiles) {
+#else
+TEST(TransientDetectorTest, CorrectnessBasedOnFiles) {
+#endif
for (size_t i = 0; i < kNumberOfSampleRates; ++i) {
int sample_rate_hz = kSampleRatesHz[i];
diff --git a/webrtc/modules/audio_processing/transient/transient_suppression_test.cc b/webrtc/modules/audio_processing/transient/transient_suppression_test.cc
index 506abaf203..b7b7595abf 100644
--- a/webrtc/modules/audio_processing/transient/transient_suppression_test.cc
+++ b/webrtc/modules/audio_processing/transient/transient_suppression_test.cc
@@ -19,7 +19,7 @@
#include "webrtc/base/scoped_ptr.h"
#include "webrtc/common_audio/include/audio_util.h"
#include "webrtc/modules/audio_processing/agc/agc.h"
-#include "webrtc/modules/interface/module_common_types.h"
+#include "webrtc/modules/include/module_common_types.h"
#include "webrtc/test/testsupport/fileutils.h"
#include "webrtc/typedefs.h"
diff --git a/webrtc/modules/audio_processing/transient/wpd_tree_unittest.cc b/webrtc/modules/audio_processing/transient/wpd_tree_unittest.cc
index 7c99f4f161..e4e9048f88 100644
--- a/webrtc/modules/audio_processing/transient/wpd_tree_unittest.cc
+++ b/webrtc/modules/audio_processing/transient/wpd_tree_unittest.cc
@@ -19,7 +19,6 @@
#include "webrtc/modules/audio_processing/transient/file_utils.h"
#include "webrtc/system_wrappers/include/file_wrapper.h"
#include "webrtc/test/testsupport/fileutils.h"
-#include "webrtc/test/testsupport/gtest_disable.h"
namespace webrtc {
@@ -69,7 +68,11 @@ TEST(WPDTreeTest, Construction) {
// It also writes the results in its own set of files in the out directory.
// Matlab and output files contain all the results in double precision (Little
// endian) appended.
-TEST(WPDTreeTest, DISABLED_ON_IOS(CorrectnessBasedOnMatlabFiles)) {
+#if defined(WEBRTC_IOS)
+TEST(WPDTreeTest, DISABLED_CorrectnessBasedOnMatlabFiles) {
+#else
+TEST(WPDTreeTest, CorrectnessBasedOnMatlabFiles) {
+#endif
// 10 ms at 16000 Hz.
const size_t kTestBufferSize = 160;
const int kLevels = 3;
diff --git a/webrtc/modules/audio_processing/typing_detection.h b/webrtc/modules/audio_processing/typing_detection.h
index 5fa6456e9e..40608f885d 100644
--- a/webrtc/modules/audio_processing/typing_detection.h
+++ b/webrtc/modules/audio_processing/typing_detection.h
@@ -11,7 +11,7 @@
#ifndef WEBRTC_MODULES_AUDIO_PROCESSING_TYPING_DETECTION_H_
#define WEBRTC_MODULES_AUDIO_PROCESSING_TYPING_DETECTION_H_
-#include "webrtc/modules/interface/module_common_types.h"
+#include "webrtc/modules/include/module_common_types.h"
#include "webrtc/typedefs.h"
namespace webrtc {
diff --git a/webrtc/modules/audio_processing/vad/pitch_based_vad.cc b/webrtc/modules/audio_processing/vad/pitch_based_vad.cc
index 39ec37e6ec..fce144de6b 100644
--- a/webrtc/modules/audio_processing/vad/pitch_based_vad.cc
+++ b/webrtc/modules/audio_processing/vad/pitch_based_vad.cc
@@ -18,7 +18,7 @@
#include "webrtc/modules/audio_processing/vad/common.h"
#include "webrtc/modules/audio_processing/vad/noise_gmm_tables.h"
#include "webrtc/modules/audio_processing/vad/voice_gmm_tables.h"
-#include "webrtc/modules/interface/module_common_types.h"
+#include "webrtc/modules/include/module_common_types.h"
namespace webrtc {
diff --git a/webrtc/modules/audio_processing/vad/standalone_vad.cc b/webrtc/modules/audio_processing/vad/standalone_vad.cc
index 468b8ff3f0..1209526a92 100644
--- a/webrtc/modules/audio_processing/vad/standalone_vad.cc
+++ b/webrtc/modules/audio_processing/vad/standalone_vad.cc
@@ -12,8 +12,8 @@
#include <assert.h>
-#include "webrtc/modules/interface/module_common_types.h"
-#include "webrtc/modules/utility/interface/audio_frame_operations.h"
+#include "webrtc/modules/include/module_common_types.h"
+#include "webrtc/modules/utility/include/audio_frame_operations.h"
#include "webrtc/typedefs.h"
namespace webrtc {
diff --git a/webrtc/modules/audio_processing/vad/standalone_vad_unittest.cc b/webrtc/modules/audio_processing/vad/standalone_vad_unittest.cc
index 942008e733..1d1dcc7066 100644
--- a/webrtc/modules/audio_processing/vad/standalone_vad_unittest.cc
+++ b/webrtc/modules/audio_processing/vad/standalone_vad_unittest.cc
@@ -14,9 +14,8 @@
#include "testing/gtest/include/gtest/gtest.h"
#include "webrtc/base/scoped_ptr.h"
-#include "webrtc/modules/interface/module_common_types.h"
+#include "webrtc/modules/include/module_common_types.h"
#include "webrtc/test/testsupport/fileutils.h"
-#include "webrtc/test/testsupport/gtest_disable.h"
namespace webrtc {
@@ -55,7 +54,11 @@ TEST(StandaloneVadTest, Api) {
EXPECT_EQ(kMode, vad->mode());
}
-TEST(StandaloneVadTest, DISABLED_ON_IOS(ActivityDetection)) {
+#if defined(WEBRTC_IOS)
+TEST(StandaloneVadTest, DISABLED_ActivityDetection) {
+#else
+TEST(StandaloneVadTest, ActivityDetection) {
+#endif
rtc::scoped_ptr<StandaloneVad> vad(StandaloneVad::Create());
const size_t kDataLength = kLength10Ms;
int16_t data[kDataLength] = {0};
diff --git a/webrtc/modules/audio_processing/vad/vad_audio_proc.cc b/webrtc/modules/audio_processing/vad/vad_audio_proc.cc
index 8535d1ff57..1a595597b6 100644
--- a/webrtc/modules/audio_processing/vad/vad_audio_proc.cc
+++ b/webrtc/modules/audio_processing/vad/vad_audio_proc.cc
@@ -23,7 +23,7 @@ extern "C" {
#include "webrtc/modules/audio_coding/codecs/isac/main/source/pitch_estimator.h"
#include "webrtc/modules/audio_coding/codecs/isac/main/source/structs.h"
}
-#include "webrtc/modules/interface/module_common_types.h"
+#include "webrtc/modules/include/module_common_types.h"
namespace webrtc {
diff --git a/webrtc/modules/audio_processing/vad/vad_audio_proc_unittest.cc b/webrtc/modules/audio_processing/vad/vad_audio_proc_unittest.cc
index f509af476f..a8a4ead2e3 100644
--- a/webrtc/modules/audio_processing/vad/vad_audio_proc_unittest.cc
+++ b/webrtc/modules/audio_processing/vad/vad_audio_proc_unittest.cc
@@ -21,7 +21,7 @@
#include "testing/gtest/include/gtest/gtest.h"
#include "webrtc/modules/audio_processing/vad/common.h"
-#include "webrtc/modules/interface/module_common_types.h"
+#include "webrtc/modules/include/module_common_types.h"
#include "webrtc/test/testsupport/fileutils.h"
namespace webrtc {
diff --git a/webrtc/modules/audio_processing/vad/voice_activity_detector.cc b/webrtc/modules/audio_processing/vad/voice_activity_detector.cc
index ef56a3574c..fc9d103918 100644
--- a/webrtc/modules/audio_processing/vad/voice_activity_detector.cc
+++ b/webrtc/modules/audio_processing/vad/voice_activity_detector.cc
@@ -18,7 +18,7 @@ namespace webrtc {
namespace {
const size_t kMaxLength = 320;
-const int kNumChannels = 1;
+const size_t kNumChannels = 1;
const double kDefaultVoiceValue = 1.0;
const double kNeutralProbability = 0.5;
diff --git a/webrtc/modules/audio_processing/voice_detection_impl.cc b/webrtc/modules/audio_processing/voice_detection_impl.cc
index 374189e709..22d218c371 100644
--- a/webrtc/modules/audio_processing/voice_detection_impl.cc
+++ b/webrtc/modules/audio_processing/voice_detection_impl.cc
@@ -10,61 +10,61 @@
#include "webrtc/modules/audio_processing/voice_detection_impl.h"
-#include <assert.h>
-
#include "webrtc/common_audio/vad/include/webrtc_vad.h"
#include "webrtc/modules/audio_processing/audio_buffer.h"
-#include "webrtc/system_wrappers/include/critical_section_wrapper.h"
namespace webrtc {
-
-typedef VadInst Handle;
-
-namespace {
-int MapSetting(VoiceDetection::Likelihood likelihood) {
- switch (likelihood) {
- case VoiceDetection::kVeryLowLikelihood:
- return 3;
- case VoiceDetection::kLowLikelihood:
- return 2;
- case VoiceDetection::kModerateLikelihood:
- return 1;
- case VoiceDetection::kHighLikelihood:
- return 0;
+class VoiceDetectionImpl::Vad {
+ public:
+ Vad() {
+ state_ = WebRtcVad_Create();
+ RTC_CHECK(state_);
+ int error = WebRtcVad_Init(state_);
+ RTC_DCHECK_EQ(0, error);
+ }
+ ~Vad() {
+ WebRtcVad_Free(state_);
}
- assert(false);
- return -1;
+ VadInst* state() { return state_; }
+ private:
+ VadInst* state_ = nullptr;
+ RTC_DISALLOW_COPY_AND_ASSIGN(Vad);
+};
+
+VoiceDetectionImpl::VoiceDetectionImpl(rtc::CriticalSection* crit)
+ : crit_(crit) {
+ RTC_DCHECK(crit);
}
-} // namespace
-
-VoiceDetectionImpl::VoiceDetectionImpl(const AudioProcessing* apm,
- CriticalSectionWrapper* crit)
- : ProcessingComponent(),
- apm_(apm),
- crit_(crit),
- stream_has_voice_(false),
- using_external_vad_(false),
- likelihood_(kLowLikelihood),
- frame_size_ms_(10),
- frame_size_samples_(0) {}
VoiceDetectionImpl::~VoiceDetectionImpl() {}
-int VoiceDetectionImpl::ProcessCaptureAudio(AudioBuffer* audio) {
- if (!is_component_enabled()) {
- return apm_->kNoError;
+void VoiceDetectionImpl::Initialize(int sample_rate_hz) {
+ rtc::CritScope cs(crit_);
+ sample_rate_hz_ = sample_rate_hz;
+ rtc::scoped_ptr<Vad> new_vad;
+ if (enabled_) {
+ new_vad.reset(new Vad());
}
+ vad_.swap(new_vad);
+ using_external_vad_ = false;
+ frame_size_samples_ =
+ static_cast<size_t>(frame_size_ms_ * sample_rate_hz_) / 1000;
+ set_likelihood(likelihood_);
+}
+void VoiceDetectionImpl::ProcessCaptureAudio(AudioBuffer* audio) {
+ rtc::CritScope cs(crit_);
+ if (!enabled_) {
+ return;
+ }
if (using_external_vad_) {
using_external_vad_ = false;
- return apm_->kNoError;
+ return;
}
- assert(audio->num_frames_per_band() <= 160);
+ RTC_DCHECK_GE(160u, audio->num_frames_per_band());
// TODO(ajm): concatenate data in frame buffer here.
-
- int vad_ret = WebRtcVad_Process(static_cast<Handle*>(handle(0)),
- apm_->proc_split_sample_rate_hz(),
+ int vad_ret = WebRtcVad_Process(vad_->state(), sample_rate_hz_,
audio->mixed_low_pass_data(),
frame_size_samples_);
if (vad_ret == 0) {
@@ -74,103 +74,81 @@ int VoiceDetectionImpl::ProcessCaptureAudio(AudioBuffer* audio) {
stream_has_voice_ = true;
audio->set_activity(AudioFrame::kVadActive);
} else {
- return apm_->kUnspecifiedError;
+ RTC_NOTREACHED();
}
-
- return apm_->kNoError;
}
int VoiceDetectionImpl::Enable(bool enable) {
- CriticalSectionScoped crit_scoped(crit_);
- return EnableComponent(enable);
+ rtc::CritScope cs(crit_);
+ if (enabled_ != enable) {
+ enabled_ = enable;
+ Initialize(sample_rate_hz_);
+ }
+ return AudioProcessing::kNoError;
}
bool VoiceDetectionImpl::is_enabled() const {
- return is_component_enabled();
+ rtc::CritScope cs(crit_);
+ return enabled_;
}
int VoiceDetectionImpl::set_stream_has_voice(bool has_voice) {
+ rtc::CritScope cs(crit_);
using_external_vad_ = true;
stream_has_voice_ = has_voice;
- return apm_->kNoError;
+ return AudioProcessing::kNoError;
}
bool VoiceDetectionImpl::stream_has_voice() const {
+ rtc::CritScope cs(crit_);
// TODO(ajm): enable this assertion?
//assert(using_external_vad_ || is_component_enabled());
return stream_has_voice_;
}
int VoiceDetectionImpl::set_likelihood(VoiceDetection::Likelihood likelihood) {
- CriticalSectionScoped crit_scoped(crit_);
- if (MapSetting(likelihood) == -1) {
- return apm_->kBadParameterError;
- }
-
+ rtc::CritScope cs(crit_);
likelihood_ = likelihood;
- return Configure();
+ if (enabled_) {
+ int mode = 2;
+ switch (likelihood) {
+ case VoiceDetection::kVeryLowLikelihood:
+ mode = 3;
+ break;
+ case VoiceDetection::kLowLikelihood:
+ mode = 2;
+ break;
+ case VoiceDetection::kModerateLikelihood:
+ mode = 1;
+ break;
+ case VoiceDetection::kHighLikelihood:
+ mode = 0;
+ break;
+ default:
+ RTC_NOTREACHED();
+ break;
+ }
+ int error = WebRtcVad_set_mode(vad_->state(), mode);
+ RTC_DCHECK_EQ(0, error);
+ }
+ return AudioProcessing::kNoError;
}
VoiceDetection::Likelihood VoiceDetectionImpl::likelihood() const {
+ rtc::CritScope cs(crit_);
return likelihood_;
}
int VoiceDetectionImpl::set_frame_size_ms(int size) {
- CriticalSectionScoped crit_scoped(crit_);
- assert(size == 10); // TODO(ajm): remove when supported.
- if (size != 10 &&
- size != 20 &&
- size != 30) {
- return apm_->kBadParameterError;
- }
-
+ rtc::CritScope cs(crit_);
+ RTC_DCHECK_EQ(10, size); // TODO(ajm): remove when supported.
frame_size_ms_ = size;
-
- return Initialize();
+ Initialize(sample_rate_hz_);
+ return AudioProcessing::kNoError;
}
int VoiceDetectionImpl::frame_size_ms() const {
+ rtc::CritScope cs(crit_);
return frame_size_ms_;
}
-
-int VoiceDetectionImpl::Initialize() {
- int err = ProcessingComponent::Initialize();
- if (err != apm_->kNoError || !is_component_enabled()) {
- return err;
- }
-
- using_external_vad_ = false;
- frame_size_samples_ = static_cast<size_t>(
- frame_size_ms_ * apm_->proc_split_sample_rate_hz() / 1000);
- // TODO(ajm): intialize frame buffer here.
-
- return apm_->kNoError;
-}
-
-void* VoiceDetectionImpl::CreateHandle() const {
- return WebRtcVad_Create();
-}
-
-void VoiceDetectionImpl::DestroyHandle(void* handle) const {
- WebRtcVad_Free(static_cast<Handle*>(handle));
-}
-
-int VoiceDetectionImpl::InitializeHandle(void* handle) const {
- return WebRtcVad_Init(static_cast<Handle*>(handle));
-}
-
-int VoiceDetectionImpl::ConfigureHandle(void* handle) const {
- return WebRtcVad_set_mode(static_cast<Handle*>(handle),
- MapSetting(likelihood_));
-}
-
-int VoiceDetectionImpl::num_handles_required() const {
- return 1;
-}
-
-int VoiceDetectionImpl::GetHandleError(void* handle) const {
- // The VAD has no get_error() function.
- assert(handle != NULL);
- return apm_->kUnspecifiedError;
-}
} // namespace webrtc
diff --git a/webrtc/modules/audio_processing/voice_detection_impl.h b/webrtc/modules/audio_processing/voice_detection_impl.h
index b18808316e..0d6d8cf14a 100644
--- a/webrtc/modules/audio_processing/voice_detection_impl.h
+++ b/webrtc/modules/audio_processing/voice_detection_impl.h
@@ -11,31 +11,27 @@
#ifndef WEBRTC_MODULES_AUDIO_PROCESSING_VOICE_DETECTION_IMPL_H_
#define WEBRTC_MODULES_AUDIO_PROCESSING_VOICE_DETECTION_IMPL_H_
+#include "webrtc/base/constructormagic.h"
+#include "webrtc/base/criticalsection.h"
+#include "webrtc/base/scoped_ptr.h"
#include "webrtc/modules/audio_processing/include/audio_processing.h"
-#include "webrtc/modules/audio_processing/processing_component.h"
namespace webrtc {
class AudioBuffer;
-class CriticalSectionWrapper;
-class VoiceDetectionImpl : public VoiceDetection,
- public ProcessingComponent {
+class VoiceDetectionImpl : public VoiceDetection {
public:
- VoiceDetectionImpl(const AudioProcessing* apm, CriticalSectionWrapper* crit);
- virtual ~VoiceDetectionImpl();
+ explicit VoiceDetectionImpl(rtc::CriticalSection* crit);
+ ~VoiceDetectionImpl() override;
- int ProcessCaptureAudio(AudioBuffer* audio);
+ // TODO(peah): Fold into ctor, once public API is removed.
+ void Initialize(int sample_rate_hz);
+ void ProcessCaptureAudio(AudioBuffer* audio);
// VoiceDetection implementation.
- bool is_enabled() const override;
-
- // ProcessingComponent implementation.
- int Initialize() override;
-
- private:
- // VoiceDetection implementation.
int Enable(bool enable) override;
+ bool is_enabled() const override;
int set_stream_has_voice(bool has_voice) override;
bool stream_has_voice() const override;
int set_likelihood(Likelihood likelihood) override;
@@ -43,21 +39,18 @@ class VoiceDetectionImpl : public VoiceDetection,
int set_frame_size_ms(int size) override;
int frame_size_ms() const override;
- // ProcessingComponent implementation.
- void* CreateHandle() const override;
- int InitializeHandle(void* handle) const override;
- int ConfigureHandle(void* handle) const override;
- void DestroyHandle(void* handle) const override;
- int num_handles_required() const override;
- int GetHandleError(void* handle) const override;
-
- const AudioProcessing* apm_;
- CriticalSectionWrapper* crit_;
- bool stream_has_voice_;
- bool using_external_vad_;
- Likelihood likelihood_;
- int frame_size_ms_;
- size_t frame_size_samples_;
+ private:
+ class Vad;
+ rtc::CriticalSection* const crit_;
+ bool enabled_ GUARDED_BY(crit_) = false;
+ bool stream_has_voice_ GUARDED_BY(crit_) = false;
+ bool using_external_vad_ GUARDED_BY(crit_) = false;
+ Likelihood likelihood_ GUARDED_BY(crit_) = kLowLikelihood;
+ int frame_size_ms_ GUARDED_BY(crit_) = 10;
+ size_t frame_size_samples_ GUARDED_BY(crit_) = 0;
+ int sample_rate_hz_ GUARDED_BY(crit_) = 0;
+ rtc::scoped_ptr<Vad> vad_ GUARDED_BY(crit_);
+ RTC_DISALLOW_IMPLICIT_CONSTRUCTORS(VoiceDetectionImpl);
};
} // namespace webrtc
diff --git a/webrtc/modules/bitrate_controller/BUILD.gn b/webrtc/modules/bitrate_controller/BUILD.gn
index 4ef536b572..5e3741ba93 100644
--- a/webrtc/modules/bitrate_controller/BUILD.gn
+++ b/webrtc/modules/bitrate_controller/BUILD.gn
@@ -10,7 +10,6 @@ import("../../build/webrtc.gni")
source_set("bitrate_controller") {
sources = [
- "bitrate_allocator.cc",
"bitrate_controller_impl.cc",
"bitrate_controller_impl.h",
"include/bitrate_allocator.h",
diff --git a/webrtc/modules/bitrate_controller/bitrate_allocator.cc b/webrtc/modules/bitrate_controller/bitrate_allocator.cc
deleted file mode 100644
index 0aec528cde..0000000000
--- a/webrtc/modules/bitrate_controller/bitrate_allocator.cc
+++ /dev/null
@@ -1,190 +0,0 @@
-/*
- * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- *
- */
-
-#include "webrtc/modules/bitrate_controller/include/bitrate_allocator.h"
-
-#include <algorithm>
-#include <utility>
-
-#include "webrtc/modules/bitrate_controller/include/bitrate_controller.h"
-
-namespace webrtc {
-
-// Allow packets to be transmitted in up to 2 times max video bitrate if the
-// bandwidth estimate allows it.
-const int kTransmissionMaxBitrateMultiplier = 2;
-const int kDefaultBitrateBps = 300000;
-
-BitrateAllocator::BitrateAllocator()
- : crit_sect_(CriticalSectionWrapper::CreateCriticalSection()),
- bitrate_observers_(),
- enforce_min_bitrate_(true),
- last_bitrate_bps_(kDefaultBitrateBps),
- last_fraction_loss_(0),
- last_rtt_(0) {
-}
-
-
-void BitrateAllocator::OnNetworkChanged(uint32_t bitrate,
- uint8_t fraction_loss,
- int64_t rtt) {
- CriticalSectionScoped lock(crit_sect_.get());
- last_bitrate_bps_ = bitrate;
- last_fraction_loss_ = fraction_loss;
- last_rtt_ = rtt;
- ObserverBitrateMap allocation = AllocateBitrates();
- for (const auto& kv : allocation)
- kv.first->OnNetworkChanged(kv.second, last_fraction_loss_, last_rtt_);
-}
-
-BitrateAllocator::ObserverBitrateMap BitrateAllocator::AllocateBitrates() {
- if (bitrate_observers_.empty())
- return ObserverBitrateMap();
-
- uint32_t sum_min_bitrates = 0;
- for (const auto& observer : bitrate_observers_)
- sum_min_bitrates += observer.second.min_bitrate;
- if (last_bitrate_bps_ <= sum_min_bitrates)
- return LowRateAllocation(last_bitrate_bps_);
- else
- return NormalRateAllocation(last_bitrate_bps_, sum_min_bitrates);
-}
-
-int BitrateAllocator::AddBitrateObserver(BitrateObserver* observer,
- uint32_t min_bitrate_bps,
- uint32_t max_bitrate_bps) {
- CriticalSectionScoped lock(crit_sect_.get());
-
- BitrateObserverConfList::iterator it =
- FindObserverConfigurationPair(observer);
-
- // Allow the max bitrate to be exceeded for FEC and retransmissions.
- // TODO(holmer): We have to get rid of this hack as it makes it difficult to
- // properly allocate bitrate. The allocator should instead distribute any
- // extra bitrate after all streams have maxed out.
- max_bitrate_bps *= kTransmissionMaxBitrateMultiplier;
- if (it != bitrate_observers_.end()) {
- // Update current configuration.
- it->second.min_bitrate = min_bitrate_bps;
- it->second.max_bitrate = max_bitrate_bps;
- } else {
- // Add new settings.
- bitrate_observers_.push_back(BitrateObserverConfiguration(
- observer, BitrateConfiguration(min_bitrate_bps, max_bitrate_bps)));
- bitrate_observers_modified_ = true;
- }
-
- ObserverBitrateMap allocation = AllocateBitrates();
- int new_observer_bitrate_bps = 0;
- for (auto& kv : allocation) {
- kv.first->OnNetworkChanged(kv.second, last_fraction_loss_, last_rtt_);
- if (kv.first == observer)
- new_observer_bitrate_bps = kv.second;
- }
- return new_observer_bitrate_bps;
-}
-
-void BitrateAllocator::RemoveBitrateObserver(BitrateObserver* observer) {
- CriticalSectionScoped lock(crit_sect_.get());
- BitrateObserverConfList::iterator it =
- FindObserverConfigurationPair(observer);
- if (it != bitrate_observers_.end()) {
- bitrate_observers_.erase(it);
- bitrate_observers_modified_ = true;
- }
-}
-
-void BitrateAllocator::GetMinMaxBitrateSumBps(int* min_bitrate_sum_bps,
- int* max_bitrate_sum_bps) const {
- *min_bitrate_sum_bps = 0;
- *max_bitrate_sum_bps = 0;
-
- CriticalSectionScoped lock(crit_sect_.get());
- for (const auto& observer : bitrate_observers_) {
- *min_bitrate_sum_bps += observer.second.min_bitrate;
- *max_bitrate_sum_bps += observer.second.max_bitrate;
- }
-}
-
-BitrateAllocator::BitrateObserverConfList::iterator
-BitrateAllocator::FindObserverConfigurationPair(
- const BitrateObserver* observer) {
- for (auto it = bitrate_observers_.begin(); it != bitrate_observers_.end();
- ++it) {
- if (it->first == observer)
- return it;
- }
- return bitrate_observers_.end();
-}
-
-void BitrateAllocator::EnforceMinBitrate(bool enforce_min_bitrate) {
- CriticalSectionScoped lock(crit_sect_.get());
- enforce_min_bitrate_ = enforce_min_bitrate;
-}
-
-BitrateAllocator::ObserverBitrateMap BitrateAllocator::NormalRateAllocation(
- uint32_t bitrate,
- uint32_t sum_min_bitrates) {
- uint32_t number_of_observers = bitrate_observers_.size();
- uint32_t bitrate_per_observer =
- (bitrate - sum_min_bitrates) / number_of_observers;
- // Use map to sort list based on max bitrate.
- ObserverSortingMap list_max_bitrates;
- for (const auto& observer : bitrate_observers_) {
- list_max_bitrates.insert(std::pair<uint32_t, ObserverConfiguration>(
- observer.second.max_bitrate,
- ObserverConfiguration(observer.first, observer.second.min_bitrate)));
- }
- ObserverBitrateMap allocation;
- ObserverSortingMap::iterator max_it = list_max_bitrates.begin();
- while (max_it != list_max_bitrates.end()) {
- number_of_observers--;
- uint32_t observer_allowance =
- max_it->second.min_bitrate + bitrate_per_observer;
- if (max_it->first < observer_allowance) {
- // We have more than enough for this observer.
- // Carry the remainder forward.
- uint32_t remainder = observer_allowance - max_it->first;
- if (number_of_observers != 0) {
- bitrate_per_observer += remainder / number_of_observers;
- }
- allocation[max_it->second.observer] = max_it->first;
- } else {
- allocation[max_it->second.observer] = observer_allowance;
- }
- list_max_bitrates.erase(max_it);
- // Prepare next iteration.
- max_it = list_max_bitrates.begin();
- }
- return allocation;
-}
-
-BitrateAllocator::ObserverBitrateMap BitrateAllocator::LowRateAllocation(
- uint32_t bitrate) {
- ObserverBitrateMap allocation;
- if (enforce_min_bitrate_) {
- // Min bitrate to all observers.
- for (const auto& observer : bitrate_observers_)
- allocation[observer.first] = observer.second.min_bitrate;
- } else {
- // Allocate up to |min_bitrate| to one observer at a time, until
- // |bitrate| is depleted.
- uint32_t remainder = bitrate;
- for (const auto& observer : bitrate_observers_) {
- uint32_t allocated_bitrate =
- std::min(remainder, observer.second.min_bitrate);
- allocation[observer.first] = allocated_bitrate;
- remainder -= allocated_bitrate;
- }
- }
- return allocation;
-}
-} // namespace webrtc
diff --git a/webrtc/modules/bitrate_controller/bitrate_allocator_unittest.cc b/webrtc/modules/bitrate_controller/bitrate_allocator_unittest.cc
deleted file mode 100644
index 4fc7e83b5b..0000000000
--- a/webrtc/modules/bitrate_controller/bitrate_allocator_unittest.cc
+++ /dev/null
@@ -1,212 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include <algorithm>
-#include <vector>
-
-#include "testing/gtest/include/gtest/gtest.h"
-#include "webrtc/modules/bitrate_controller/include/bitrate_allocator.h"
-#include "webrtc/modules/bitrate_controller/include/bitrate_controller.h"
-
-namespace webrtc {
-
-class TestBitrateObserver : public BitrateObserver {
- public:
- TestBitrateObserver()
- : last_bitrate_(0), last_fraction_loss_(0), last_rtt_(0) {}
-
- virtual void OnNetworkChanged(uint32_t bitrate,
- uint8_t fraction_loss,
- int64_t rtt) {
- last_bitrate_ = bitrate;
- last_fraction_loss_ = fraction_loss;
- last_rtt_ = rtt;
- }
- uint32_t last_bitrate_;
- uint8_t last_fraction_loss_;
- int64_t last_rtt_;
-};
-
-class BitrateAllocatorTest : public ::testing::Test {
- protected:
- BitrateAllocatorTest() : allocator_(new BitrateAllocator()) {
- allocator_->OnNetworkChanged(300000u, 0, 0);
- }
- ~BitrateAllocatorTest() {}
-
- rtc::scoped_ptr<BitrateAllocator> allocator_;
-};
-
-TEST_F(BitrateAllocatorTest, UpdatingBitrateObserver) {
- TestBitrateObserver bitrate_observer;
- int start_bitrate =
- allocator_->AddBitrateObserver(&bitrate_observer, 100000, 1500000);
- EXPECT_EQ(300000, start_bitrate);
- allocator_->OnNetworkChanged(200000, 0, 0);
- EXPECT_EQ(200000u, bitrate_observer.last_bitrate_);
-
- // TODO(pbos): Expect capping to 1.5M instead of 3M when not boosting the max
- // bitrate for FEC/retransmissions (see TODO in BitrateAllocator).
- allocator_->OnNetworkChanged(4000000, 0, 0);
- EXPECT_EQ(3000000u, bitrate_observer.last_bitrate_);
- start_bitrate =
- allocator_->AddBitrateObserver(&bitrate_observer, 100000, 4000000);
- EXPECT_EQ(4000000, start_bitrate);
-
- start_bitrate =
- allocator_->AddBitrateObserver(&bitrate_observer, 100000, 1500000);
- EXPECT_EQ(3000000, start_bitrate);
- EXPECT_EQ(3000000u, bitrate_observer.last_bitrate_);
- allocator_->OnNetworkChanged(1500000, 0, 0);
- EXPECT_EQ(1500000u, bitrate_observer.last_bitrate_);
-}
-
-TEST_F(BitrateAllocatorTest, TwoBitrateObserversOneRtcpObserver) {
- TestBitrateObserver bitrate_observer_1;
- TestBitrateObserver bitrate_observer_2;
- int start_bitrate =
- allocator_->AddBitrateObserver(&bitrate_observer_1, 100000, 300000);
- EXPECT_EQ(300000, start_bitrate);
- start_bitrate =
- allocator_->AddBitrateObserver(&bitrate_observer_2, 200000, 300000);
- EXPECT_EQ(200000, start_bitrate);
-
- // Test too low start bitrate, hence lower than sum of min. Min bitrates will
- // be allocated to all observers.
- allocator_->OnNetworkChanged(200000, 0, 50);
- EXPECT_EQ(100000u, bitrate_observer_1.last_bitrate_);
- EXPECT_EQ(0, bitrate_observer_1.last_fraction_loss_);
- EXPECT_EQ(50, bitrate_observer_1.last_rtt_);
- EXPECT_EQ(200000u, bitrate_observer_2.last_bitrate_);
- EXPECT_EQ(0, bitrate_observer_2.last_fraction_loss_);
- EXPECT_EQ(50, bitrate_observer_2.last_rtt_);
-
- // Test a bitrate which should be distributed equally.
- allocator_->OnNetworkChanged(500000, 0, 50);
- const uint32_t kBitrateToShare = 500000 - 200000 - 100000;
- EXPECT_EQ(100000u + kBitrateToShare / 2, bitrate_observer_1.last_bitrate_);
- EXPECT_EQ(200000u + kBitrateToShare / 2, bitrate_observer_2.last_bitrate_);
-
- // Limited by 2x max bitrates since we leave room for FEC and retransmissions.
- allocator_->OnNetworkChanged(1500000, 0, 50);
- EXPECT_EQ(600000u, bitrate_observer_1.last_bitrate_);
- EXPECT_EQ(600000u, bitrate_observer_2.last_bitrate_);
-}
-
-class BitrateAllocatorTestNoEnforceMin : public ::testing::Test {
- protected:
- BitrateAllocatorTestNoEnforceMin() : allocator_(new BitrateAllocator()) {
- allocator_->EnforceMinBitrate(false);
- allocator_->OnNetworkChanged(300000u, 0, 0);
- }
- ~BitrateAllocatorTestNoEnforceMin() {}
-
- rtc::scoped_ptr<BitrateAllocator> allocator_;
-};
-
-// The following three tests verify that the EnforceMinBitrate() method works
-// as intended.
-TEST_F(BitrateAllocatorTestNoEnforceMin, OneBitrateObserver) {
- TestBitrateObserver bitrate_observer_1;
- int start_bitrate =
- allocator_->AddBitrateObserver(&bitrate_observer_1, 100000, 400000);
- EXPECT_EQ(300000, start_bitrate);
-
- // High REMB.
- allocator_->OnNetworkChanged(150000, 0, 0);
- EXPECT_EQ(150000u, bitrate_observer_1.last_bitrate_);
-
- // Low REMB.
- allocator_->OnNetworkChanged(10000, 0, 0);
- EXPECT_EQ(10000u, bitrate_observer_1.last_bitrate_);
-
- allocator_->RemoveBitrateObserver(&bitrate_observer_1);
-}
-
-TEST_F(BitrateAllocatorTestNoEnforceMin, ThreeBitrateObservers) {
- TestBitrateObserver bitrate_observer_1;
- TestBitrateObserver bitrate_observer_2;
- TestBitrateObserver bitrate_observer_3;
- // Set up the observers with min bitrates at 100000, 200000, and 300000.
- int start_bitrate =
- allocator_->AddBitrateObserver(&bitrate_observer_1, 100000, 400000);
- EXPECT_EQ(300000, start_bitrate);
-
- start_bitrate =
- allocator_->AddBitrateObserver(&bitrate_observer_2, 200000, 400000);
- EXPECT_EQ(200000, start_bitrate);
- EXPECT_EQ(100000u, bitrate_observer_1.last_bitrate_);
-
- start_bitrate =
- allocator_->AddBitrateObserver(&bitrate_observer_3, 300000, 400000);
- EXPECT_EQ(0, start_bitrate);
- EXPECT_EQ(100000u, bitrate_observer_1.last_bitrate_);
- EXPECT_EQ(200000u, bitrate_observer_2.last_bitrate_);
-
- // High REMB. Make sure the controllers get a fair share of the surplus
- // (i.e., what is left after each controller gets its min rate).
- allocator_->OnNetworkChanged(690000, 0, 0);
- // Verify that each observer gets its min rate (sum of min rates is 600000),
- // and that the remaining 90000 is divided equally among the three.
- uint32_t bitrate_to_share = 690000u - 100000u - 200000u - 300000u;
- EXPECT_EQ(100000u + bitrate_to_share / 3, bitrate_observer_1.last_bitrate_);
- EXPECT_EQ(200000u + bitrate_to_share / 3, bitrate_observer_2.last_bitrate_);
- EXPECT_EQ(300000u + bitrate_to_share / 3, bitrate_observer_3.last_bitrate_);
-
- // High REMB, but below the sum of min bitrates.
- allocator_->OnNetworkChanged(500000, 0, 0);
- // Verify that the first and second observers get their min bitrates, and the
- // third gets the remainder.
- EXPECT_EQ(100000u, bitrate_observer_1.last_bitrate_); // Min bitrate.
- EXPECT_EQ(200000u, bitrate_observer_2.last_bitrate_); // Min bitrate.
- EXPECT_EQ(200000u, bitrate_observer_3.last_bitrate_); // Remainder.
-
- // Low REMB.
- allocator_->OnNetworkChanged(10000, 0, 0);
- // Verify that the first observer gets all the rate, and the rest get zero.
- EXPECT_EQ(10000u, bitrate_observer_1.last_bitrate_);
- EXPECT_EQ(0u, bitrate_observer_2.last_bitrate_);
- EXPECT_EQ(0u, bitrate_observer_3.last_bitrate_);
-
- allocator_->RemoveBitrateObserver(&bitrate_observer_1);
- allocator_->RemoveBitrateObserver(&bitrate_observer_2);
- allocator_->RemoveBitrateObserver(&bitrate_observer_3);
-}
-
-TEST_F(BitrateAllocatorTest, ThreeBitrateObserversLowRembEnforceMin) {
- TestBitrateObserver bitrate_observer_1;
- TestBitrateObserver bitrate_observer_2;
- TestBitrateObserver bitrate_observer_3;
- int start_bitrate =
- allocator_->AddBitrateObserver(&bitrate_observer_1, 100000, 400000);
- EXPECT_EQ(300000, start_bitrate);
-
- start_bitrate =
- allocator_->AddBitrateObserver(&bitrate_observer_2, 200000, 400000);
- EXPECT_EQ(200000, start_bitrate);
- EXPECT_EQ(100000u, bitrate_observer_1.last_bitrate_);
-
- start_bitrate =
- allocator_->AddBitrateObserver(&bitrate_observer_3, 300000, 400000);
- EXPECT_EQ(300000, start_bitrate);
- EXPECT_EQ(100000, static_cast<int>(bitrate_observer_1.last_bitrate_));
- EXPECT_EQ(200000, static_cast<int>(bitrate_observer_2.last_bitrate_));
-
- // Low REMB. Verify that all observers still get their respective min bitrate.
- allocator_->OnNetworkChanged(1000, 0, 0);
- EXPECT_EQ(100000u, bitrate_observer_1.last_bitrate_); // Min cap.
- EXPECT_EQ(200000u, bitrate_observer_2.last_bitrate_); // Min cap.
- EXPECT_EQ(300000u, bitrate_observer_3.last_bitrate_); // Min cap.
-
- allocator_->RemoveBitrateObserver(&bitrate_observer_1);
- allocator_->RemoveBitrateObserver(&bitrate_observer_2);
- allocator_->RemoveBitrateObserver(&bitrate_observer_3);
-}
-} // namespace webrtc
diff --git a/webrtc/modules/bitrate_controller/bitrate_controller.gypi b/webrtc/modules/bitrate_controller/bitrate_controller.gypi
index 44c1b89ef2..3d86f2e32a 100644
--- a/webrtc/modules/bitrate_controller/bitrate_controller.gypi
+++ b/webrtc/modules/bitrate_controller/bitrate_controller.gypi
@@ -15,11 +15,9 @@
'<(webrtc_root)/system_wrappers/system_wrappers.gyp:system_wrappers',
],
'sources': [
- 'bitrate_allocator.cc',
'bitrate_controller_impl.cc',
'bitrate_controller_impl.h',
'include/bitrate_controller.h',
- 'include/bitrate_allocator.h',
'send_side_bandwidth_estimation.cc',
'send_side_bandwidth_estimation.h',
],
diff --git a/webrtc/modules/bitrate_controller/bitrate_controller_impl.cc b/webrtc/modules/bitrate_controller/bitrate_controller_impl.cc
index 8857ee4b4a..f8fd2bb987 100644
--- a/webrtc/modules/bitrate_controller/bitrate_controller_impl.cc
+++ b/webrtc/modules/bitrate_controller/bitrate_controller_impl.cc
@@ -14,7 +14,7 @@
#include <algorithm>
#include <utility>
-#include "webrtc/modules/rtp_rtcp/interface/rtp_rtcp_defines.h"
+#include "webrtc/modules/rtp_rtcp/include/rtp_rtcp_defines.h"
namespace webrtc {
@@ -129,6 +129,11 @@ void BitrateControllerImpl::SetReservedBitrate(uint32_t reserved_bitrate_bps) {
MaybeTriggerOnNetworkChanged();
}
+void BitrateControllerImpl::SetEventLog(RtcEventLog* event_log) {
+ rtc::CritScope cs(&critsect_);
+ bandwidth_estimation_.SetEventLog(event_log);
+}
+
void BitrateControllerImpl::OnReceivedEstimatedBitrate(uint32_t bitrate) {
{
rtc::CritScope cs(&critsect_);
diff --git a/webrtc/modules/bitrate_controller/bitrate_controller_impl.h b/webrtc/modules/bitrate_controller/bitrate_controller_impl.h
index a33a0e6f04..b601899631 100644
--- a/webrtc/modules/bitrate_controller/bitrate_controller_impl.h
+++ b/webrtc/modules/bitrate_controller/bitrate_controller_impl.h
@@ -41,6 +41,8 @@ class BitrateControllerImpl : public BitrateController {
void SetReservedBitrate(uint32_t reserved_bitrate_bps) override;
+ void SetEventLog(RtcEventLog* event_log) override;
+
int64_t TimeUntilNextProcess() override;
int32_t Process() override;
diff --git a/webrtc/modules/bitrate_controller/bitrate_controller_unittest.cc b/webrtc/modules/bitrate_controller/bitrate_controller_unittest.cc
index 72831c78d6..2b9e589fbd 100644
--- a/webrtc/modules/bitrate_controller/bitrate_controller_unittest.cc
+++ b/webrtc/modules/bitrate_controller/bitrate_controller_unittest.cc
@@ -14,7 +14,7 @@
#include <vector>
#include "webrtc/modules/bitrate_controller/include/bitrate_controller.h"
-#include "webrtc/modules/rtp_rtcp/interface/rtp_rtcp_defines.h"
+#include "webrtc/modules/rtp_rtcp/include/rtp_rtcp_defines.h"
using webrtc::RtcpBandwidthObserver;
using webrtc::BitrateObserver;
diff --git a/webrtc/modules/bitrate_controller/include/bitrate_allocator.h b/webrtc/modules/bitrate_controller/include/bitrate_allocator.h
deleted file mode 100644
index 34b9ed5328..0000000000
--- a/webrtc/modules/bitrate_controller/include/bitrate_allocator.h
+++ /dev/null
@@ -1,99 +0,0 @@
-/*
- * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- *
- * Usage: this class will register multiple RtcpBitrateObserver's one at each
- * RTCP module. It will aggregate the results and run one bandwidth estimation
- * and push the result to the encoders via BitrateObserver(s).
- */
-
-#ifndef WEBRTC_MODULES_BITRATE_CONTROLLER_INCLUDE_BITRATE_ALLOCATOR_H_
-#define WEBRTC_MODULES_BITRATE_CONTROLLER_INCLUDE_BITRATE_ALLOCATOR_H_
-
-#include <list>
-#include <map>
-#include <utility>
-
-#include "webrtc/base/scoped_ptr.h"
-#include "webrtc/base/thread_annotations.h"
-#include "webrtc/system_wrappers/include/critical_section_wrapper.h"
-
-namespace webrtc {
-
-class BitrateObserver;
-
-class BitrateAllocator {
- public:
- BitrateAllocator();
-
- void OnNetworkChanged(uint32_t target_bitrate,
- uint8_t fraction_loss,
- int64_t rtt);
-
- // Set the start and max send bitrate used by the bandwidth management.
- //
- // |observer| updates bitrates if already in use.
- // |min_bitrate_bps| = 0 equals no min bitrate.
- // |max_bitrate_bps| = 0 equals no max bitrate.
- // Returns bitrate allocated for the bitrate observer.
- int AddBitrateObserver(BitrateObserver* observer,
- uint32_t min_bitrate_bps,
- uint32_t max_bitrate_bps);
-
- void RemoveBitrateObserver(BitrateObserver* observer);
-
- void GetMinMaxBitrateSumBps(int* min_bitrate_sum_bps,
- int* max_bitrate_sum_bps) const;
-
- // This method controls the behavior when the available bitrate is lower than
- // the minimum bitrate, or the sum of minimum bitrates.
- // When true, the bitrate will never be set lower than the minimum bitrate(s).
- // When false, the bitrate observers will be allocated rates up to their
- // respective minimum bitrate, satisfying one observer after the other.
- void EnforceMinBitrate(bool enforce_min_bitrate);
-
- private:
- struct BitrateConfiguration {
- BitrateConfiguration(uint32_t min_bitrate, uint32_t max_bitrate)
- : min_bitrate(min_bitrate), max_bitrate(max_bitrate) {}
- uint32_t min_bitrate;
- uint32_t max_bitrate;
- };
- struct ObserverConfiguration {
- ObserverConfiguration(BitrateObserver* observer, uint32_t bitrate)
- : observer(observer), min_bitrate(bitrate) {}
- BitrateObserver* const observer;
- uint32_t min_bitrate;
- };
- typedef std::pair<BitrateObserver*, BitrateConfiguration>
- BitrateObserverConfiguration;
- typedef std::list<BitrateObserverConfiguration> BitrateObserverConfList;
- typedef std::multimap<uint32_t, ObserverConfiguration> ObserverSortingMap;
- typedef std::map<BitrateObserver*, int> ObserverBitrateMap;
-
- BitrateObserverConfList::iterator FindObserverConfigurationPair(
- const BitrateObserver* observer) EXCLUSIVE_LOCKS_REQUIRED(crit_sect_);
- ObserverBitrateMap AllocateBitrates() EXCLUSIVE_LOCKS_REQUIRED(crit_sect_);
- ObserverBitrateMap NormalRateAllocation(uint32_t bitrate,
- uint32_t sum_min_bitrates)
- EXCLUSIVE_LOCKS_REQUIRED(crit_sect_);
-
- ObserverBitrateMap LowRateAllocation(uint32_t bitrate)
- EXCLUSIVE_LOCKS_REQUIRED(crit_sect_);
-
- rtc::scoped_ptr<CriticalSectionWrapper> crit_sect_;
- // Stored in a list to keep track of the insertion order.
- BitrateObserverConfList bitrate_observers_ GUARDED_BY(crit_sect_);
- bool bitrate_observers_modified_ GUARDED_BY(crit_sect_);
- bool enforce_min_bitrate_ GUARDED_BY(crit_sect_);
- uint32_t last_bitrate_bps_ GUARDED_BY(crit_sect_);
- uint8_t last_fraction_loss_ GUARDED_BY(crit_sect_);
- int64_t last_rtt_ GUARDED_BY(crit_sect_);
-};
-} // namespace webrtc
-#endif // WEBRTC_MODULES_BITRATE_CONTROLLER_INCLUDE_BITRATE_ALLOCATOR_H_
diff --git a/webrtc/modules/bitrate_controller/include/bitrate_controller.h b/webrtc/modules/bitrate_controller/include/bitrate_controller.h
index bb532886c7..d1eca8e0fe 100644
--- a/webrtc/modules/bitrate_controller/include/bitrate_controller.h
+++ b/webrtc/modules/bitrate_controller/include/bitrate_controller.h
@@ -17,12 +17,13 @@
#include <map>
-#include "webrtc/modules/interface/module.h"
-#include "webrtc/modules/rtp_rtcp/interface/rtp_rtcp_defines.h"
+#include "webrtc/modules/include/module.h"
+#include "webrtc/modules/rtp_rtcp/include/rtp_rtcp_defines.h"
namespace webrtc {
class CriticalSectionWrapper;
+class RtcEventLog;
struct PacketInfo;
class BitrateObserver {
@@ -56,6 +57,8 @@ class BitrateController : public Module {
virtual void SetStartBitrate(int start_bitrate_bps) = 0;
virtual void SetMinMaxBitrate(int min_bitrate_bps, int max_bitrate_bps) = 0;
+ virtual void SetEventLog(RtcEventLog* event_log) = 0;
+
// Gets the available payload bandwidth in bits per second. Note that
// this bandwidth excludes packet headers.
virtual bool AvailableBandwidth(uint32_t* bandwidth) const = 0;
diff --git a/webrtc/modules/bitrate_controller/include/mock/mock_bitrate_controller.h b/webrtc/modules/bitrate_controller/include/mock/mock_bitrate_controller.h
new file mode 100644
index 0000000000..7a7d2e406b
--- /dev/null
+++ b/webrtc/modules/bitrate_controller/include/mock/mock_bitrate_controller.h
@@ -0,0 +1,30 @@
+/*
+ * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_BITRATE_CONTROLLER_INCLUDE_MOCK_MOCK_BITRATE_CONTROLLER_H_
+#define WEBRTC_MODULES_BITRATE_CONTROLLER_INCLUDE_MOCK_MOCK_BITRATE_CONTROLLER_H_
+
+#include "testing/gmock/include/gmock/gmock.h"
+#include "webrtc/modules/bitrate_controller/include/bitrate_controller.h"
+
+namespace webrtc {
+namespace test {
+
+class MockBitrateObserver : public BitrateObserver {
+ public:
+ MOCK_METHOD3(OnNetworkChanged,
+ void(uint32_t bitrate_bps,
+ uint8_t fraction_loss,
+ int64_t rtt_ms));
+};
+} // namespace test
+} // namespace webrtc
+
+#endif // WEBRTC_MODULES_BITRATE_CONTROLLER_INCLUDE_MOCK_MOCK_BITRATE_CONTROLLER_H_
diff --git a/webrtc/modules/bitrate_controller/send_side_bandwidth_estimation.cc b/webrtc/modules/bitrate_controller/send_side_bandwidth_estimation.cc
index 9fa7b0dfc0..258c4d94de 100644
--- a/webrtc/modules/bitrate_controller/send_side_bandwidth_estimation.cc
+++ b/webrtc/modules/bitrate_controller/send_side_bandwidth_estimation.cc
@@ -13,9 +13,10 @@
#include <cmath>
#include "webrtc/base/checks.h"
+#include "webrtc/base/logging.h"
#include "webrtc/system_wrappers/include/field_trial.h"
-#include "webrtc/system_wrappers/include/logging.h"
#include "webrtc/system_wrappers/include/metrics.h"
+#include "webrtc/call/rtc_event_log.h"
namespace webrtc {
namespace {
@@ -59,7 +60,8 @@ SendSideBandwidthEstimation::SendSideBandwidthEstimation()
initially_lost_packets_(0),
bitrate_at_2_seconds_kbps_(0),
uma_update_state_(kNoUpdate),
- rampup_uma_stats_updated_(kNumUmaRampupMetrics, false) {}
+ rampup_uma_stats_updated_(kNumUmaRampupMetrics, false),
+ event_log_(nullptr) {}
SendSideBandwidthEstimation::~SendSideBandwidthEstimation() {}
@@ -144,8 +146,8 @@ void SendSideBandwidthEstimation::UpdateUmaStats(int64_t now_ms,
for (size_t i = 0; i < kNumUmaRampupMetrics; ++i) {
if (!rampup_uma_stats_updated_[i] &&
bitrate_kbps >= kUmaRampupMetrics[i].bitrate_kbps) {
- RTC_HISTOGRAM_COUNTS_100000(kUmaRampupMetrics[i].metric_name,
- now_ms - first_report_time_ms_);
+ RTC_HISTOGRAM_COUNTS_SPARSE_100000(kUmaRampupMetrics[i].metric_name,
+ now_ms - first_report_time_ms_);
rampup_uma_stats_updated_[i] = true;
}
}
@@ -154,22 +156,19 @@ void SendSideBandwidthEstimation::UpdateUmaStats(int64_t now_ms,
} else if (uma_update_state_ == kNoUpdate) {
uma_update_state_ = kFirstDone;
bitrate_at_2_seconds_kbps_ = bitrate_kbps;
- RTC_HISTOGRAM_COUNTS(
- "WebRTC.BWE.InitiallyLostPackets", initially_lost_packets_, 0, 100, 50);
- RTC_HISTOGRAM_COUNTS(
- "WebRTC.BWE.InitialRtt", static_cast<int>(rtt), 0, 2000, 50);
- RTC_HISTOGRAM_COUNTS("WebRTC.BWE.InitialBandwidthEstimate",
- bitrate_at_2_seconds_kbps_,
- 0,
- 2000,
- 50);
+ RTC_HISTOGRAM_COUNTS_SPARSE("WebRTC.BWE.InitiallyLostPackets",
+ initially_lost_packets_, 0, 100, 50);
+ RTC_HISTOGRAM_COUNTS_SPARSE("WebRTC.BWE.InitialRtt", static_cast<int>(rtt),
+ 0, 2000, 50);
+ RTC_HISTOGRAM_COUNTS_SPARSE("WebRTC.BWE.InitialBandwidthEstimate",
+ bitrate_at_2_seconds_kbps_, 0, 2000, 50);
} else if (uma_update_state_ == kFirstDone &&
now_ms - first_report_time_ms_ >= kBweConverganceTimeMs) {
uma_update_state_ = kDone;
int bitrate_diff_kbps =
std::max(bitrate_at_2_seconds_kbps_ - bitrate_kbps, 0);
- RTC_HISTOGRAM_COUNTS(
- "WebRTC.BWE.InitialVsConvergedDiff", bitrate_diff_kbps, 0, 2000, 50);
+ RTC_HISTOGRAM_COUNTS_SPARSE("WebRTC.BWE.InitialVsConvergedDiff",
+ bitrate_diff_kbps, 0, 2000, 50);
}
}
@@ -206,6 +205,11 @@ void SendSideBandwidthEstimation::UpdateEstimate(int64_t now_ms) {
// rates).
bitrate_ += 1000;
+ if (event_log_) {
+ event_log_->LogBwePacketLossEvent(
+ bitrate_, last_fraction_loss_,
+ expected_packets_since_last_loss_update_);
+ }
} else if (last_fraction_loss_ <= 26) {
// Loss between 2% - 10%: Do nothing.
} else {
@@ -224,6 +228,11 @@ void SendSideBandwidthEstimation::UpdateEstimate(int64_t now_ms) {
512.0);
has_decreased_since_last_fraction_loss_ = true;
}
+ if (event_log_) {
+ event_log_->LogBwePacketLossEvent(
+ bitrate_, last_fraction_loss_,
+ expected_packets_since_last_loss_update_);
+ }
}
}
bitrate_ = CapBitrateToThresholds(now_ms, bitrate_);
@@ -274,4 +283,9 @@ uint32_t SendSideBandwidthEstimation::CapBitrateToThresholds(
}
return bitrate;
}
+
+void SendSideBandwidthEstimation::SetEventLog(RtcEventLog* event_log) {
+ event_log_ = event_log;
+}
+
} // namespace webrtc
diff --git a/webrtc/modules/bitrate_controller/send_side_bandwidth_estimation.h b/webrtc/modules/bitrate_controller/send_side_bandwidth_estimation.h
index 40061d3ee7..7ffb42cb54 100644
--- a/webrtc/modules/bitrate_controller/send_side_bandwidth_estimation.h
+++ b/webrtc/modules/bitrate_controller/send_side_bandwidth_estimation.h
@@ -15,10 +15,13 @@
#include <deque>
-#include "webrtc/modules/rtp_rtcp/interface/rtp_rtcp_defines.h"
+#include "webrtc/modules/rtp_rtcp/include/rtp_rtcp_defines.h"
#include "webrtc/system_wrappers/include/critical_section_wrapper.h"
namespace webrtc {
+
+class RtcEventLog;
+
class SendSideBandwidthEstimation {
public:
SendSideBandwidthEstimation();
@@ -42,6 +45,8 @@ class SendSideBandwidthEstimation {
void SetMinMaxBitrate(int min_bitrate, int max_bitrate);
int GetMinBitrate() const;
+ void SetEventLog(RtcEventLog* event_log);
+
private:
enum UmaState { kNoUpdate, kFirstDone, kDone };
@@ -81,6 +86,7 @@ class SendSideBandwidthEstimation {
int bitrate_at_2_seconds_kbps_;
UmaState uma_update_state_;
std::vector<bool> rampup_uma_stats_updated_;
+ RtcEventLog* event_log_;
};
} // namespace webrtc
#endif // WEBRTC_MODULES_BITRATE_CONTROLLER_SEND_SIDE_BANDWIDTH_ESTIMATION_H_
diff --git a/webrtc/modules/desktop_capture/differ_block.cc b/webrtc/modules/desktop_capture/differ_block.cc
index 0dcb377411..d4cbda3601 100644
--- a/webrtc/modules/desktop_capture/differ_block.cc
+++ b/webrtc/modules/desktop_capture/differ_block.cc
@@ -12,7 +12,7 @@
#include <string.h>
-#include "build/build_config.h"
+#include "webrtc/typedefs.h"
#include "webrtc/modules/desktop_capture/differ_block_sse2.h"
#include "webrtc/system_wrappers/include/cpu_features_wrapper.h"
@@ -38,7 +38,7 @@ bool BlockDifference(const uint8_t* image1,
static bool (*diff_proc)(const uint8_t*, const uint8_t*, int) = NULL;
if (!diff_proc) {
-#if defined(ARCH_CPU_ARM_FAMILY) || defined(ARCH_CPU_MIPS_FAMILY)
+#if defined(WEBRTC_ARCH_ARM_FAMILY) || defined(WEBRTC_ARCH_MIPS_FAMILY)
// For ARM and MIPS processors, always use C version.
// TODO(hclam): Implement a NEON version.
diff_proc = &BlockDifference_C;
diff --git a/webrtc/modules/desktop_capture/screen_capturer_win.cc b/webrtc/modules/desktop_capture/screen_capturer_win.cc
index 1f33155656..18be4eb30b 100644
--- a/webrtc/modules/desktop_capture/screen_capturer_win.cc
+++ b/webrtc/modules/desktop_capture/screen_capturer_win.cc
@@ -10,6 +10,8 @@
#include "webrtc/modules/desktop_capture/screen_capturer.h"
+#include <utility>
+
#include "webrtc/modules/desktop_capture/desktop_capture_options.h"
#include "webrtc/modules/desktop_capture/win/screen_capturer_win_gdi.h"
#include "webrtc/modules/desktop_capture/win/screen_capturer_win_magnifier.h"
@@ -22,7 +24,7 @@ ScreenCapturer* ScreenCapturer::Create(const DesktopCaptureOptions& options) {
new ScreenCapturerWinGdi(options));
if (options.allow_use_magnification_api())
- return new ScreenCapturerWinMagnifier(gdi_capturer.Pass());
+ return new ScreenCapturerWinMagnifier(std::move(gdi_capturer));
return gdi_capturer.release();
}
diff --git a/webrtc/modules/desktop_capture/win/screen_capturer_win_magnifier.cc b/webrtc/modules/desktop_capture/win/screen_capturer_win_magnifier.cc
index db40478023..066943d294 100644
--- a/webrtc/modules/desktop_capture/win/screen_capturer_win_magnifier.cc
+++ b/webrtc/modules/desktop_capture/win/screen_capturer_win_magnifier.cc
@@ -12,6 +12,8 @@
#include <assert.h>
+#include <utility>
+
#include "webrtc/modules/desktop_capture/desktop_capture_options.h"
#include "webrtc/modules/desktop_capture/desktop_frame.h"
#include "webrtc/modules/desktop_capture/desktop_frame_win.h"
@@ -37,7 +39,7 @@ Atomic32 ScreenCapturerWinMagnifier::tls_index_(TLS_OUT_OF_INDEXES);
ScreenCapturerWinMagnifier::ScreenCapturerWinMagnifier(
rtc::scoped_ptr<ScreenCapturer> fallback_capturer)
- : fallback_capturer_(fallback_capturer.Pass()),
+ : fallback_capturer_(std::move(fallback_capturer)),
fallback_capturer_started_(false),
callback_(NULL),
current_screen_id_(kFullDesktopScreenId),
@@ -53,8 +55,7 @@ ScreenCapturerWinMagnifier::ScreenCapturerWinMagnifier(
host_window_(NULL),
magnifier_window_(NULL),
magnifier_initialized_(false),
- magnifier_capture_succeeded_(true) {
-}
+ magnifier_capture_succeeded_(true) {}
ScreenCapturerWinMagnifier::~ScreenCapturerWinMagnifier() {
// DestroyWindow must be called before MagUninitialize. magnifier_window_ is
@@ -236,7 +237,7 @@ BOOL ScreenCapturerWinMagnifier::OnMagImageScalingCallback(
RECT unclipped,
RECT clipped,
HRGN dirty) {
- assert(tls_index_.Value() != TLS_OUT_OF_INDEXES);
+ assert(tls_index_.Value() != static_cast<int32_t>(TLS_OUT_OF_INDEXES));
ScreenCapturerWinMagnifier* owner =
reinterpret_cast<ScreenCapturerWinMagnifier*>(
@@ -369,7 +370,7 @@ bool ScreenCapturerWinMagnifier::InitializeMagnifier() {
}
}
- if (tls_index_.Value() == TLS_OUT_OF_INDEXES) {
+ if (tls_index_.Value() == static_cast<int32_t>(TLS_OUT_OF_INDEXES)) {
// More than one threads may get here at the same time, but only one will
// write to tls_index_ using CompareExchange.
DWORD new_tls_index = TlsAlloc();
@@ -377,7 +378,7 @@ bool ScreenCapturerWinMagnifier::InitializeMagnifier() {
TlsFree(new_tls_index);
}
- assert(tls_index_.Value() != TLS_OUT_OF_INDEXES);
+ assert(tls_index_.Value() != static_cast<int32_t>(TLS_OUT_OF_INDEXES));
TlsSetValue(tls_index_.Value(), this);
magnifier_initialized_ = true;
diff --git a/webrtc/modules/desktop_capture/window_capturer_win.cc b/webrtc/modules/desktop_capture/window_capturer_win.cc
index c0d71167a5..54b2768aa8 100644
--- a/webrtc/modules/desktop_capture/window_capturer_win.cc
+++ b/webrtc/modules/desktop_capture/window_capturer_win.cc
@@ -156,15 +156,16 @@ void WindowCapturerWin::Capture(const DesktopRegion& region) {
return;
}
- // Stop capturing if the window has been closed or hidden.
- if (!IsWindow(window_) || !IsWindowVisible(window_)) {
+ // Stop capturing if the window has been closed.
+ if (!IsWindow(window_)) {
callback_->OnCaptureCompleted(NULL);
return;
}
- // Return a 1x1 black frame if the window is minimized, to match the behavior
- // on Mac.
- if (IsIconic(window_)) {
+ // Return a 1x1 black frame if the window is minimized or invisible, to match
+ // behavior on mace. Window can be temporarily invisible during the
+ // transition of full screen mode on/off.
+ if (IsIconic(window_) || !IsWindowVisible(window_)) {
BasicDesktopFrame* frame = new BasicDesktopFrame(DesktopSize(1, 1));
memset(frame->data(), 0, frame->stride() * frame->size().height());
diff --git a/webrtc/modules/include/module.h b/webrtc/modules/include/module.h
new file mode 100644
index 0000000000..d02aa95dc8
--- /dev/null
+++ b/webrtc/modules/include/module.h
@@ -0,0 +1,81 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_INCLUDE_MODULE_H_
+#define WEBRTC_MODULES_INCLUDE_MODULE_H_
+
+#include "webrtc/typedefs.h"
+
+namespace webrtc {
+
+class ProcessThread;
+
+class Module {
+ public:
+ // Returns the number of milliseconds until the module wants a worker
+ // thread to call Process.
+ // This method is called on the same worker thread as Process will
+ // be called on.
+ // TODO(tommi): Almost all implementations of this function, need to know
+ // the current tick count. Consider passing it as an argument. It could
+ // also improve the accuracy of when the next callback occurs since the
+ // thread that calls Process() will also have it's tick count reference
+ // which might not match with what the implementations use.
+ virtual int64_t TimeUntilNextProcess() = 0;
+
+ // Process any pending tasks such as timeouts.
+ // Called on a worker thread.
+ virtual int32_t Process() = 0;
+
+ // This method is called when the module is attached to a *running* process
+ // thread or detached from one. In the case of detaching, |process_thread|
+ // will be nullptr.
+ //
+ // This method will be called in the following cases:
+ //
+ // * Non-null process_thread:
+ // * ProcessThread::RegisterModule() is called while the thread is running.
+ // * ProcessThread::Start() is called and RegisterModule has previously
+ // been called. The thread will be started immediately after notifying
+ // all modules.
+ //
+ // * Null process_thread:
+ // * ProcessThread::DeRegisterModule() is called while the thread is
+ // running.
+ // * ProcessThread::Stop() was called and the thread has been stopped.
+ //
+ // NOTE: This method is not called from the worker thread itself, but from
+ // the thread that registers/deregisters the module or calls Start/Stop.
+ virtual void ProcessThreadAttached(ProcessThread* process_thread) {}
+
+ protected:
+ virtual ~Module() {}
+};
+
+// Reference counted version of the Module interface.
+class RefCountedModule : public Module {
+ public:
+ // Increase the reference count by one.
+ // Returns the incremented reference count.
+ virtual int32_t AddRef() const = 0;
+
+ // Decrease the reference count by one.
+ // Returns the decreased reference count.
+ // Returns 0 if the last reference was just released.
+ // When the reference count reaches 0 the object will self-destruct.
+ virtual int32_t Release() const = 0;
+
+ protected:
+ ~RefCountedModule() override = default;
+};
+
+} // namespace webrtc
+
+#endif // WEBRTC_MODULES_INCLUDE_MODULE_H_
diff --git a/webrtc/modules/include/module_common_types.h b/webrtc/modules/include/module_common_types.h
new file mode 100644
index 0000000000..89c5f1b49b
--- /dev/null
+++ b/webrtc/modules/include/module_common_types.h
@@ -0,0 +1,809 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_INCLUDE_MODULE_COMMON_TYPES_H_
+#define WEBRTC_MODULES_INCLUDE_MODULE_COMMON_TYPES_H_
+
+#include <assert.h>
+#include <string.h> // memcpy
+
+#include <algorithm>
+#include <limits>
+
+#include "webrtc/base/constructormagic.h"
+#include "webrtc/common_types.h"
+#include "webrtc/common_video/rotation.h"
+#include "webrtc/typedefs.h"
+
+namespace webrtc {
+
+struct RTPAudioHeader {
+ uint8_t numEnergy; // number of valid entries in arrOfEnergy
+ uint8_t arrOfEnergy[kRtpCsrcSize]; // one energy byte (0-9) per channel
+ bool isCNG; // is this CNG
+ size_t channel; // number of channels 2 = stereo
+};
+
+const int16_t kNoPictureId = -1;
+const int16_t kMaxOneBytePictureId = 0x7F; // 7 bits
+const int16_t kMaxTwoBytePictureId = 0x7FFF; // 15 bits
+const int16_t kNoTl0PicIdx = -1;
+const uint8_t kNoTemporalIdx = 0xFF;
+const uint8_t kNoSpatialIdx = 0xFF;
+const uint8_t kNoGofIdx = 0xFF;
+const uint8_t kNumVp9Buffers = 8;
+const size_t kMaxVp9RefPics = 3;
+const size_t kMaxVp9FramesInGof = 0xFF; // 8 bits
+const size_t kMaxVp9NumberOfSpatialLayers = 8;
+const int kNoKeyIdx = -1;
+
+struct RTPVideoHeaderVP8 {
+ void InitRTPVideoHeaderVP8() {
+ nonReference = false;
+ pictureId = kNoPictureId;
+ tl0PicIdx = kNoTl0PicIdx;
+ temporalIdx = kNoTemporalIdx;
+ layerSync = false;
+ keyIdx = kNoKeyIdx;
+ partitionId = 0;
+ beginningOfPartition = false;
+ }
+
+ bool nonReference; // Frame is discardable.
+ int16_t pictureId; // Picture ID index, 15 bits;
+ // kNoPictureId if PictureID does not exist.
+ int16_t tl0PicIdx; // TL0PIC_IDX, 8 bits;
+ // kNoTl0PicIdx means no value provided.
+ uint8_t temporalIdx; // Temporal layer index, or kNoTemporalIdx.
+ bool layerSync; // This frame is a layer sync frame.
+ // Disabled if temporalIdx == kNoTemporalIdx.
+ int keyIdx; // 5 bits; kNoKeyIdx means not used.
+ int partitionId; // VP8 partition ID
+ bool beginningOfPartition; // True if this packet is the first
+ // in a VP8 partition. Otherwise false
+};
+
+enum TemporalStructureMode {
+ kTemporalStructureMode1, // 1 temporal layer structure - i.e., IPPP...
+ kTemporalStructureMode2, // 2 temporal layers 0-1-0-1...
+ kTemporalStructureMode3 // 3 temporal layers 0-2-1-2-0-2-1-2...
+};
+
+struct GofInfoVP9 {
+ void SetGofInfoVP9(TemporalStructureMode tm) {
+ switch (tm) {
+ case kTemporalStructureMode1:
+ num_frames_in_gof = 1;
+ temporal_idx[0] = 0;
+ temporal_up_switch[0] = false;
+ num_ref_pics[0] = 1;
+ pid_diff[0][0] = 1;
+ break;
+ case kTemporalStructureMode2:
+ num_frames_in_gof = 2;
+ temporal_idx[0] = 0;
+ temporal_up_switch[0] = false;
+ num_ref_pics[0] = 1;
+ pid_diff[0][0] = 2;
+
+ temporal_idx[1] = 1;
+ temporal_up_switch[1] = true;
+ num_ref_pics[1] = 1;
+ pid_diff[1][0] = 1;
+ break;
+ case kTemporalStructureMode3:
+ num_frames_in_gof = 4;
+ temporal_idx[0] = 0;
+ temporal_up_switch[0] = false;
+ num_ref_pics[0] = 1;
+ pid_diff[0][0] = 4;
+
+ temporal_idx[1] = 2;
+ temporal_up_switch[1] = true;
+ num_ref_pics[1] = 1;
+ pid_diff[1][0] = 1;
+
+ temporal_idx[2] = 1;
+ temporal_up_switch[2] = true;
+ num_ref_pics[2] = 1;
+ pid_diff[2][0] = 2;
+
+ temporal_idx[3] = 2;
+ temporal_up_switch[3] = false;
+ num_ref_pics[3] = 2;
+ pid_diff[3][0] = 1;
+ pid_diff[3][1] = 2;
+ break;
+ default:
+ assert(false);
+ }
+ }
+
+ void CopyGofInfoVP9(const GofInfoVP9& src) {
+ num_frames_in_gof = src.num_frames_in_gof;
+ for (size_t i = 0; i < num_frames_in_gof; ++i) {
+ temporal_idx[i] = src.temporal_idx[i];
+ temporal_up_switch[i] = src.temporal_up_switch[i];
+ num_ref_pics[i] = src.num_ref_pics[i];
+ for (uint8_t r = 0; r < num_ref_pics[i]; ++r) {
+ pid_diff[i][r] = src.pid_diff[i][r];
+ }
+ }
+ }
+
+ size_t num_frames_in_gof;
+ uint8_t temporal_idx[kMaxVp9FramesInGof];
+ bool temporal_up_switch[kMaxVp9FramesInGof];
+ uint8_t num_ref_pics[kMaxVp9FramesInGof];
+ uint8_t pid_diff[kMaxVp9FramesInGof][kMaxVp9RefPics];
+};
+
+struct RTPVideoHeaderVP9 {
+ void InitRTPVideoHeaderVP9() {
+ inter_pic_predicted = false;
+ flexible_mode = false;
+ beginning_of_frame = false;
+ end_of_frame = false;
+ ss_data_available = false;
+ picture_id = kNoPictureId;
+ max_picture_id = kMaxTwoBytePictureId;
+ tl0_pic_idx = kNoTl0PicIdx;
+ temporal_idx = kNoTemporalIdx;
+ spatial_idx = kNoSpatialIdx;
+ temporal_up_switch = false;
+ inter_layer_predicted = false;
+ gof_idx = kNoGofIdx;
+ num_ref_pics = 0;
+ num_spatial_layers = 1;
+ }
+
+ bool inter_pic_predicted; // This layer frame is dependent on previously
+ // coded frame(s).
+ bool flexible_mode; // This frame is in flexible mode.
+ bool beginning_of_frame; // True if this packet is the first in a VP9 layer
+ // frame.
+ bool end_of_frame; // True if this packet is the last in a VP9 layer frame.
+ bool ss_data_available; // True if SS data is available in this payload
+ // descriptor.
+ int16_t picture_id; // PictureID index, 15 bits;
+ // kNoPictureId if PictureID does not exist.
+ int16_t max_picture_id; // Maximum picture ID index; either 0x7F or 0x7FFF;
+ int16_t tl0_pic_idx; // TL0PIC_IDX, 8 bits;
+ // kNoTl0PicIdx means no value provided.
+ uint8_t temporal_idx; // Temporal layer index, or kNoTemporalIdx.
+ uint8_t spatial_idx; // Spatial layer index, or kNoSpatialIdx.
+ bool temporal_up_switch; // True if upswitch to higher frame rate is possible
+ // starting from this frame.
+ bool inter_layer_predicted; // Frame is dependent on directly lower spatial
+ // layer frame.
+
+ uint8_t gof_idx; // Index to predefined temporal frame info in SS data.
+
+ uint8_t num_ref_pics; // Number of reference pictures used by this layer
+ // frame.
+ uint8_t pid_diff[kMaxVp9RefPics]; // P_DIFF signaled to derive the PictureID
+ // of the reference pictures.
+ int16_t ref_picture_id[kMaxVp9RefPics]; // PictureID of reference pictures.
+
+ // SS data.
+ size_t num_spatial_layers; // Always populated.
+ bool spatial_layer_resolution_present;
+ uint16_t width[kMaxVp9NumberOfSpatialLayers];
+ uint16_t height[kMaxVp9NumberOfSpatialLayers];
+ GofInfoVP9 gof;
+};
+
+// The packetization types that we support: single, aggregated, and fragmented.
+enum H264PacketizationTypes {
+ kH264SingleNalu, // This packet contains a single NAL unit.
+ kH264StapA, // This packet contains STAP-A (single time
+ // aggregation) packets. If this packet has an
+ // associated NAL unit type, it'll be for the
+ // first such aggregated packet.
+ kH264FuA, // This packet contains a FU-A (fragmentation
+ // unit) packet, meaning it is a part of a frame
+ // that was too large to fit into a single packet.
+};
+
+struct RTPVideoHeaderH264 {
+ uint8_t nalu_type; // The NAL unit type. If this is a header for a
+ // fragmented packet, it's the NAL unit type of
+ // the original data. If this is the header for an
+ // aggregated packet, it's the NAL unit type of
+ // the first NAL unit in the packet.
+ H264PacketizationTypes packetization_type;
+};
+
+union RTPVideoTypeHeader {
+ RTPVideoHeaderVP8 VP8;
+ RTPVideoHeaderVP9 VP9;
+ RTPVideoHeaderH264 H264;
+};
+
+enum RtpVideoCodecTypes {
+ kRtpVideoNone,
+ kRtpVideoGeneric,
+ kRtpVideoVp8,
+ kRtpVideoVp9,
+ kRtpVideoH264
+};
+// Since RTPVideoHeader is used as a member of a union, it can't have a
+// non-trivial default constructor.
+struct RTPVideoHeader {
+ uint16_t width; // size
+ uint16_t height;
+ VideoRotation rotation;
+
+ bool isFirstPacket; // first packet in frame
+ uint8_t simulcastIdx; // Index if the simulcast encoder creating
+ // this frame, 0 if not using simulcast.
+ RtpVideoCodecTypes codec;
+ RTPVideoTypeHeader codecHeader;
+};
+union RTPTypeHeader {
+ RTPAudioHeader Audio;
+ RTPVideoHeader Video;
+};
+
+struct WebRtcRTPHeader {
+ RTPHeader header;
+ FrameType frameType;
+ RTPTypeHeader type;
+ // NTP time of the capture time in local timebase in milliseconds.
+ int64_t ntp_time_ms;
+};
+
+class RTPFragmentationHeader {
+ public:
+ RTPFragmentationHeader()
+ : fragmentationVectorSize(0),
+ fragmentationOffset(NULL),
+ fragmentationLength(NULL),
+ fragmentationTimeDiff(NULL),
+ fragmentationPlType(NULL) {};
+
+ ~RTPFragmentationHeader() {
+ delete[] fragmentationOffset;
+ delete[] fragmentationLength;
+ delete[] fragmentationTimeDiff;
+ delete[] fragmentationPlType;
+ }
+
+ void CopyFrom(const RTPFragmentationHeader& src) {
+ if (this == &src) {
+ return;
+ }
+
+ if (src.fragmentationVectorSize != fragmentationVectorSize) {
+ // new size of vectors
+
+ // delete old
+ delete[] fragmentationOffset;
+ fragmentationOffset = NULL;
+ delete[] fragmentationLength;
+ fragmentationLength = NULL;
+ delete[] fragmentationTimeDiff;
+ fragmentationTimeDiff = NULL;
+ delete[] fragmentationPlType;
+ fragmentationPlType = NULL;
+
+ if (src.fragmentationVectorSize > 0) {
+ // allocate new
+ if (src.fragmentationOffset) {
+ fragmentationOffset = new size_t[src.fragmentationVectorSize];
+ }
+ if (src.fragmentationLength) {
+ fragmentationLength = new size_t[src.fragmentationVectorSize];
+ }
+ if (src.fragmentationTimeDiff) {
+ fragmentationTimeDiff = new uint16_t[src.fragmentationVectorSize];
+ }
+ if (src.fragmentationPlType) {
+ fragmentationPlType = new uint8_t[src.fragmentationVectorSize];
+ }
+ }
+ // set new size
+ fragmentationVectorSize = src.fragmentationVectorSize;
+ }
+
+ if (src.fragmentationVectorSize > 0) {
+ // copy values
+ if (src.fragmentationOffset) {
+ memcpy(fragmentationOffset, src.fragmentationOffset,
+ src.fragmentationVectorSize * sizeof(size_t));
+ }
+ if (src.fragmentationLength) {
+ memcpy(fragmentationLength, src.fragmentationLength,
+ src.fragmentationVectorSize * sizeof(size_t));
+ }
+ if (src.fragmentationTimeDiff) {
+ memcpy(fragmentationTimeDiff, src.fragmentationTimeDiff,
+ src.fragmentationVectorSize * sizeof(uint16_t));
+ }
+ if (src.fragmentationPlType) {
+ memcpy(fragmentationPlType, src.fragmentationPlType,
+ src.fragmentationVectorSize * sizeof(uint8_t));
+ }
+ }
+ }
+
+ void VerifyAndAllocateFragmentationHeader(const size_t size) {
+ assert(size <= std::numeric_limits<uint16_t>::max());
+ const uint16_t size16 = static_cast<uint16_t>(size);
+ if (fragmentationVectorSize < size16) {
+ uint16_t oldVectorSize = fragmentationVectorSize;
+ {
+ // offset
+ size_t* oldOffsets = fragmentationOffset;
+ fragmentationOffset = new size_t[size16];
+ memset(fragmentationOffset + oldVectorSize, 0,
+ sizeof(size_t) * (size16 - oldVectorSize));
+ // copy old values
+ memcpy(fragmentationOffset, oldOffsets,
+ sizeof(size_t) * oldVectorSize);
+ delete[] oldOffsets;
+ }
+ // length
+ {
+ size_t* oldLengths = fragmentationLength;
+ fragmentationLength = new size_t[size16];
+ memset(fragmentationLength + oldVectorSize, 0,
+ sizeof(size_t) * (size16 - oldVectorSize));
+ memcpy(fragmentationLength, oldLengths,
+ sizeof(size_t) * oldVectorSize);
+ delete[] oldLengths;
+ }
+ // time diff
+ {
+ uint16_t* oldTimeDiffs = fragmentationTimeDiff;
+ fragmentationTimeDiff = new uint16_t[size16];
+ memset(fragmentationTimeDiff + oldVectorSize, 0,
+ sizeof(uint16_t) * (size16 - oldVectorSize));
+ memcpy(fragmentationTimeDiff, oldTimeDiffs,
+ sizeof(uint16_t) * oldVectorSize);
+ delete[] oldTimeDiffs;
+ }
+ // payload type
+ {
+ uint8_t* oldTimePlTypes = fragmentationPlType;
+ fragmentationPlType = new uint8_t[size16];
+ memset(fragmentationPlType + oldVectorSize, 0,
+ sizeof(uint8_t) * (size16 - oldVectorSize));
+ memcpy(fragmentationPlType, oldTimePlTypes,
+ sizeof(uint8_t) * oldVectorSize);
+ delete[] oldTimePlTypes;
+ }
+ fragmentationVectorSize = size16;
+ }
+ }
+
+ uint16_t fragmentationVectorSize; // Number of fragmentations
+ size_t* fragmentationOffset; // Offset of pointer to data for each
+ // fragmentation
+ size_t* fragmentationLength; // Data size for each fragmentation
+ uint16_t* fragmentationTimeDiff; // Timestamp difference relative "now" for
+ // each fragmentation
+ uint8_t* fragmentationPlType; // Payload type of each fragmentation
+
+ private:
+ RTC_DISALLOW_COPY_AND_ASSIGN(RTPFragmentationHeader);
+};
+
+struct RTCPVoIPMetric {
+ // RFC 3611 4.7
+ uint8_t lossRate;
+ uint8_t discardRate;
+ uint8_t burstDensity;
+ uint8_t gapDensity;
+ uint16_t burstDuration;
+ uint16_t gapDuration;
+ uint16_t roundTripDelay;
+ uint16_t endSystemDelay;
+ uint8_t signalLevel;
+ uint8_t noiseLevel;
+ uint8_t RERL;
+ uint8_t Gmin;
+ uint8_t Rfactor;
+ uint8_t extRfactor;
+ uint8_t MOSLQ;
+ uint8_t MOSCQ;
+ uint8_t RXconfig;
+ uint16_t JBnominal;
+ uint16_t JBmax;
+ uint16_t JBabsMax;
+};
+
+// Types for the FEC packet masks. The type |kFecMaskRandom| is based on a
+// random loss model. The type |kFecMaskBursty| is based on a bursty/consecutive
+// loss model. The packet masks are defined in
+// modules/rtp_rtcp/fec_private_tables_random(bursty).h
+enum FecMaskType {
+ kFecMaskRandom,
+ kFecMaskBursty,
+};
+
+// Struct containing forward error correction settings.
+struct FecProtectionParams {
+ int fec_rate;
+ bool use_uep_protection;
+ int max_fec_frames;
+ FecMaskType fec_mask_type;
+};
+
+// Interface used by the CallStats class to distribute call statistics.
+// Callbacks will be triggered as soon as the class has been registered to a
+// CallStats object using RegisterStatsObserver.
+class CallStatsObserver {
+ public:
+ virtual void OnRttUpdate(int64_t avg_rtt_ms, int64_t max_rtt_ms) = 0;
+
+ virtual ~CallStatsObserver() {}
+};
+
+struct VideoContentMetrics {
+ VideoContentMetrics()
+ : motion_magnitude(0.0f),
+ spatial_pred_err(0.0f),
+ spatial_pred_err_h(0.0f),
+ spatial_pred_err_v(0.0f) {}
+
+ void Reset() {
+ motion_magnitude = 0.0f;
+ spatial_pred_err = 0.0f;
+ spatial_pred_err_h = 0.0f;
+ spatial_pred_err_v = 0.0f;
+ }
+ float motion_magnitude;
+ float spatial_pred_err;
+ float spatial_pred_err_h;
+ float spatial_pred_err_v;
+};
+
+/* This class holds up to 60 ms of super-wideband (32 kHz) stereo audio. It
+ * allows for adding and subtracting frames while keeping track of the resulting
+ * states.
+ *
+ * Notes
+ * - The total number of samples in |data_| is
+ * samples_per_channel_ * num_channels_
+ *
+ * - Stereo data is interleaved starting with the left channel.
+ *
+ * - The +operator assume that you would never add exactly opposite frames when
+ * deciding the resulting state. To do this use the -operator.
+ */
+class AudioFrame {
+ public:
+ // Stereo, 32 kHz, 60 ms (2 * 32 * 60)
+ static const size_t kMaxDataSizeSamples = 3840;
+
+ enum VADActivity {
+ kVadActive = 0,
+ kVadPassive = 1,
+ kVadUnknown = 2
+ };
+ enum SpeechType {
+ kNormalSpeech = 0,
+ kPLC = 1,
+ kCNG = 2,
+ kPLCCNG = 3,
+ kUndefined = 4
+ };
+
+ AudioFrame();
+ virtual ~AudioFrame() {}
+
+ // Resets all members to their default state (except does not modify the
+ // contents of |data_|).
+ void Reset();
+
+ // |interleaved_| is not changed by this method.
+ void UpdateFrame(int id, uint32_t timestamp, const int16_t* data,
+ size_t samples_per_channel, int sample_rate_hz,
+ SpeechType speech_type, VADActivity vad_activity,
+ size_t num_channels = 1, uint32_t energy = -1);
+
+ AudioFrame& Append(const AudioFrame& rhs);
+
+ void CopyFrom(const AudioFrame& src);
+
+ void Mute();
+
+ AudioFrame& operator>>=(const int rhs);
+ AudioFrame& operator+=(const AudioFrame& rhs);
+ AudioFrame& operator-=(const AudioFrame& rhs);
+
+ int id_;
+ // RTP timestamp of the first sample in the AudioFrame.
+ uint32_t timestamp_;
+ // Time since the first frame in milliseconds.
+ // -1 represents an uninitialized value.
+ int64_t elapsed_time_ms_;
+ // NTP time of the estimated capture time in local timebase in milliseconds.
+ // -1 represents an uninitialized value.
+ int64_t ntp_time_ms_;
+ int16_t data_[kMaxDataSizeSamples];
+ size_t samples_per_channel_;
+ int sample_rate_hz_;
+ size_t num_channels_;
+ SpeechType speech_type_;
+ VADActivity vad_activity_;
+ // Note that there is no guarantee that |energy_| is correct. Any user of this
+ // member must verify that the value is correct.
+ // TODO(henrike) Remove |energy_|.
+ // See https://code.google.com/p/webrtc/issues/detail?id=3315.
+ uint32_t energy_;
+ bool interleaved_;
+
+ private:
+ RTC_DISALLOW_COPY_AND_ASSIGN(AudioFrame);
+};
+
+inline AudioFrame::AudioFrame()
+ : data_() {
+ Reset();
+}
+
+inline void AudioFrame::Reset() {
+ id_ = -1;
+ // TODO(wu): Zero is a valid value for |timestamp_|. We should initialize
+ // to an invalid value, or add a new member to indicate invalidity.
+ timestamp_ = 0;
+ elapsed_time_ms_ = -1;
+ ntp_time_ms_ = -1;
+ samples_per_channel_ = 0;
+ sample_rate_hz_ = 0;
+ num_channels_ = 0;
+ speech_type_ = kUndefined;
+ vad_activity_ = kVadUnknown;
+ energy_ = 0xffffffff;
+ interleaved_ = true;
+}
+
+inline void AudioFrame::UpdateFrame(int id,
+ uint32_t timestamp,
+ const int16_t* data,
+ size_t samples_per_channel,
+ int sample_rate_hz,
+ SpeechType speech_type,
+ VADActivity vad_activity,
+ size_t num_channels,
+ uint32_t energy) {
+ id_ = id;
+ timestamp_ = timestamp;
+ samples_per_channel_ = samples_per_channel;
+ sample_rate_hz_ = sample_rate_hz;
+ speech_type_ = speech_type;
+ vad_activity_ = vad_activity;
+ num_channels_ = num_channels;
+ energy_ = energy;
+
+ const size_t length = samples_per_channel * num_channels;
+ assert(length <= kMaxDataSizeSamples);
+ if (data != NULL) {
+ memcpy(data_, data, sizeof(int16_t) * length);
+ } else {
+ memset(data_, 0, sizeof(int16_t) * length);
+ }
+}
+
+inline void AudioFrame::CopyFrom(const AudioFrame& src) {
+ if (this == &src) return;
+
+ id_ = src.id_;
+ timestamp_ = src.timestamp_;
+ elapsed_time_ms_ = src.elapsed_time_ms_;
+ ntp_time_ms_ = src.ntp_time_ms_;
+ samples_per_channel_ = src.samples_per_channel_;
+ sample_rate_hz_ = src.sample_rate_hz_;
+ speech_type_ = src.speech_type_;
+ vad_activity_ = src.vad_activity_;
+ num_channels_ = src.num_channels_;
+ energy_ = src.energy_;
+ interleaved_ = src.interleaved_;
+
+ const size_t length = samples_per_channel_ * num_channels_;
+ assert(length <= kMaxDataSizeSamples);
+ memcpy(data_, src.data_, sizeof(int16_t) * length);
+}
+
+inline void AudioFrame::Mute() {
+ memset(data_, 0, samples_per_channel_ * num_channels_ * sizeof(int16_t));
+}
+
+inline AudioFrame& AudioFrame::operator>>=(const int rhs) {
+ assert((num_channels_ > 0) && (num_channels_ < 3));
+ if ((num_channels_ > 2) || (num_channels_ < 1)) return *this;
+
+ for (size_t i = 0; i < samples_per_channel_ * num_channels_; i++) {
+ data_[i] = static_cast<int16_t>(data_[i] >> rhs);
+ }
+ return *this;
+}
+
+inline AudioFrame& AudioFrame::Append(const AudioFrame& rhs) {
+ // Sanity check
+ assert((num_channels_ > 0) && (num_channels_ < 3));
+ assert(interleaved_ == rhs.interleaved_);
+ if ((num_channels_ > 2) || (num_channels_ < 1)) return *this;
+ if (num_channels_ != rhs.num_channels_) return *this;
+
+ if ((vad_activity_ == kVadActive) || rhs.vad_activity_ == kVadActive) {
+ vad_activity_ = kVadActive;
+ } else if (vad_activity_ == kVadUnknown || rhs.vad_activity_ == kVadUnknown) {
+ vad_activity_ = kVadUnknown;
+ }
+ if (speech_type_ != rhs.speech_type_) {
+ speech_type_ = kUndefined;
+ }
+
+ size_t offset = samples_per_channel_ * num_channels_;
+ for (size_t i = 0; i < rhs.samples_per_channel_ * rhs.num_channels_; i++) {
+ data_[offset + i] = rhs.data_[i];
+ }
+ samples_per_channel_ += rhs.samples_per_channel_;
+ return *this;
+}
+
+namespace {
+inline int16_t ClampToInt16(int32_t input) {
+ if (input < -0x00008000) {
+ return -0x8000;
+ } else if (input > 0x00007FFF) {
+ return 0x7FFF;
+ } else {
+ return static_cast<int16_t>(input);
+ }
+}
+}
+
+inline AudioFrame& AudioFrame::operator+=(const AudioFrame& rhs) {
+ // Sanity check
+ assert((num_channels_ > 0) && (num_channels_ < 3));
+ assert(interleaved_ == rhs.interleaved_);
+ if ((num_channels_ > 2) || (num_channels_ < 1)) return *this;
+ if (num_channels_ != rhs.num_channels_) return *this;
+
+ bool noPrevData = false;
+ if (samples_per_channel_ != rhs.samples_per_channel_) {
+ if (samples_per_channel_ == 0) {
+ // special case we have no data to start with
+ samples_per_channel_ = rhs.samples_per_channel_;
+ noPrevData = true;
+ } else {
+ return *this;
+ }
+ }
+
+ if ((vad_activity_ == kVadActive) || rhs.vad_activity_ == kVadActive) {
+ vad_activity_ = kVadActive;
+ } else if (vad_activity_ == kVadUnknown || rhs.vad_activity_ == kVadUnknown) {
+ vad_activity_ = kVadUnknown;
+ }
+
+ if (speech_type_ != rhs.speech_type_) speech_type_ = kUndefined;
+
+ if (noPrevData) {
+ memcpy(data_, rhs.data_,
+ sizeof(int16_t) * rhs.samples_per_channel_ * num_channels_);
+ } else {
+ // IMPROVEMENT this can be done very fast in assembly
+ for (size_t i = 0; i < samples_per_channel_ * num_channels_; i++) {
+ int32_t wrap_guard =
+ static_cast<int32_t>(data_[i]) + static_cast<int32_t>(rhs.data_[i]);
+ data_[i] = ClampToInt16(wrap_guard);
+ }
+ }
+ energy_ = 0xffffffff;
+ return *this;
+}
+
+inline AudioFrame& AudioFrame::operator-=(const AudioFrame& rhs) {
+ // Sanity check
+ assert((num_channels_ > 0) && (num_channels_ < 3));
+ assert(interleaved_ == rhs.interleaved_);
+ if ((num_channels_ > 2) || (num_channels_ < 1)) return *this;
+
+ if ((samples_per_channel_ != rhs.samples_per_channel_) ||
+ (num_channels_ != rhs.num_channels_)) {
+ return *this;
+ }
+ if ((vad_activity_ != kVadPassive) || rhs.vad_activity_ != kVadPassive) {
+ vad_activity_ = kVadUnknown;
+ }
+ speech_type_ = kUndefined;
+
+ for (size_t i = 0; i < samples_per_channel_ * num_channels_; i++) {
+ int32_t wrap_guard =
+ static_cast<int32_t>(data_[i]) - static_cast<int32_t>(rhs.data_[i]);
+ data_[i] = ClampToInt16(wrap_guard);
+ }
+ energy_ = 0xffffffff;
+ return *this;
+}
+
+inline bool IsNewerSequenceNumber(uint16_t sequence_number,
+ uint16_t prev_sequence_number) {
+ // Distinguish between elements that are exactly 0x8000 apart.
+ // If s1>s2 and |s1-s2| = 0x8000: IsNewer(s1,s2)=true, IsNewer(s2,s1)=false
+ // rather than having IsNewer(s1,s2) = IsNewer(s2,s1) = false.
+ if (static_cast<uint16_t>(sequence_number - prev_sequence_number) == 0x8000) {
+ return sequence_number > prev_sequence_number;
+ }
+ return sequence_number != prev_sequence_number &&
+ static_cast<uint16_t>(sequence_number - prev_sequence_number) < 0x8000;
+}
+
+inline bool IsNewerTimestamp(uint32_t timestamp, uint32_t prev_timestamp) {
+ // Distinguish between elements that are exactly 0x80000000 apart.
+ // If t1>t2 and |t1-t2| = 0x80000000: IsNewer(t1,t2)=true,
+ // IsNewer(t2,t1)=false
+ // rather than having IsNewer(t1,t2) = IsNewer(t2,t1) = false.
+ if (static_cast<uint32_t>(timestamp - prev_timestamp) == 0x80000000) {
+ return timestamp > prev_timestamp;
+ }
+ return timestamp != prev_timestamp &&
+ static_cast<uint32_t>(timestamp - prev_timestamp) < 0x80000000;
+}
+
+inline uint16_t LatestSequenceNumber(uint16_t sequence_number1,
+ uint16_t sequence_number2) {
+ return IsNewerSequenceNumber(sequence_number1, sequence_number2)
+ ? sequence_number1
+ : sequence_number2;
+}
+
+inline uint32_t LatestTimestamp(uint32_t timestamp1, uint32_t timestamp2) {
+ return IsNewerTimestamp(timestamp1, timestamp2) ? timestamp1 : timestamp2;
+}
+
+// Utility class to unwrap a sequence number to a larger type, for easier
+// handling large ranges. Note that sequence numbers will never be unwrapped
+// to a negative value.
+class SequenceNumberUnwrapper {
+ public:
+ SequenceNumberUnwrapper() : last_seq_(-1) {}
+
+ // Get the unwrapped sequence, but don't update the internal state.
+ int64_t UnwrapWithoutUpdate(uint16_t sequence_number) {
+ if (last_seq_ == -1)
+ return sequence_number;
+
+ uint16_t cropped_last = static_cast<uint16_t>(last_seq_);
+ int64_t delta = sequence_number - cropped_last;
+ if (IsNewerSequenceNumber(sequence_number, cropped_last)) {
+ if (delta < 0)
+ delta += (1 << 16); // Wrap forwards.
+ } else if (delta > 0 && (last_seq_ + delta - (1 << 16)) >= 0) {
+ // If sequence_number is older but delta is positive, this is a backwards
+ // wrap-around. However, don't wrap backwards past 0 (unwrapped).
+ delta -= (1 << 16);
+ }
+
+ return last_seq_ + delta;
+ }
+
+ // Only update the internal state to the specified last (unwrapped) sequence.
+ void UpdateLast(int64_t last_sequence) { last_seq_ = last_sequence; }
+
+ // Unwrap the sequence number and update the internal state.
+ int64_t Unwrap(uint16_t sequence_number) {
+ int64_t unwrapped = UnwrapWithoutUpdate(sequence_number);
+ UpdateLast(unwrapped);
+ return unwrapped;
+ }
+
+ private:
+ int64_t last_seq_;
+};
+
+} // namespace webrtc
+
+#endif // WEBRTC_MODULES_INCLUDE_MODULE_COMMON_TYPES_H_
diff --git a/webrtc/modules/interface/module.h b/webrtc/modules/interface/module.h
deleted file mode 100644
index ffd3065a5c..0000000000
--- a/webrtc/modules/interface/module.h
+++ /dev/null
@@ -1,81 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef MODULES_INTERFACE_MODULE_H_
-#define MODULES_INTERFACE_MODULE_H_
-
-#include "webrtc/typedefs.h"
-
-namespace webrtc {
-
-class ProcessThread;
-
-class Module {
- public:
- // Returns the number of milliseconds until the module wants a worker
- // thread to call Process.
- // This method is called on the same worker thread as Process will
- // be called on.
- // TODO(tommi): Almost all implementations of this function, need to know
- // the current tick count. Consider passing it as an argument. It could
- // also improve the accuracy of when the next callback occurs since the
- // thread that calls Process() will also have it's tick count reference
- // which might not match with what the implementations use.
- virtual int64_t TimeUntilNextProcess() = 0;
-
- // Process any pending tasks such as timeouts.
- // Called on a worker thread.
- virtual int32_t Process() = 0;
-
- // This method is called when the module is attached to a *running* process
- // thread or detached from one. In the case of detaching, |process_thread|
- // will be nullptr.
- //
- // This method will be called in the following cases:
- //
- // * Non-null process_thread:
- // * ProcessThread::RegisterModule() is called while the thread is running.
- // * ProcessThread::Start() is called and RegisterModule has previously
- // been called. The thread will be started immediately after notifying
- // all modules.
- //
- // * Null process_thread:
- // * ProcessThread::DeRegisterModule() is called while the thread is
- // running.
- // * ProcessThread::Stop() was called and the thread has been stopped.
- //
- // NOTE: This method is not called from the worker thread itself, but from
- // the thread that registers/deregisters the module or calls Start/Stop.
- virtual void ProcessThreadAttached(ProcessThread* process_thread) {}
-
- protected:
- virtual ~Module() {}
-};
-
-// Reference counted version of the Module interface.
-class RefCountedModule : public Module {
- public:
- // Increase the reference count by one.
- // Returns the incremented reference count.
- virtual int32_t AddRef() const = 0;
-
- // Decrease the reference count by one.
- // Returns the decreased reference count.
- // Returns 0 if the last reference was just released.
- // When the reference count reaches 0 the object will self-destruct.
- virtual int32_t Release() const = 0;
-
- protected:
- ~RefCountedModule() override = default;
-};
-
-} // namespace webrtc
-
-#endif // MODULES_INTERFACE_MODULE_H_
diff --git a/webrtc/modules/interface/module_common_types.h b/webrtc/modules/interface/module_common_types.h
deleted file mode 100644
index 45e93d8fad..0000000000
--- a/webrtc/modules/interface/module_common_types.h
+++ /dev/null
@@ -1,810 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef MODULE_COMMON_TYPES_H
-#define MODULE_COMMON_TYPES_H
-
-#include <assert.h>
-#include <string.h> // memcpy
-
-#include <algorithm>
-#include <limits>
-
-#include "webrtc/base/constructormagic.h"
-#include "webrtc/common_types.h"
-#include "webrtc/common_video/rotation.h"
-#include "webrtc/typedefs.h"
-
-namespace webrtc {
-
-struct RTPAudioHeader {
- uint8_t numEnergy; // number of valid entries in arrOfEnergy
- uint8_t arrOfEnergy[kRtpCsrcSize]; // one energy byte (0-9) per channel
- bool isCNG; // is this CNG
- uint8_t channel; // number of channels 2 = stereo
-};
-
-const int16_t kNoPictureId = -1;
-const int16_t kMaxOneBytePictureId = 0x7F; // 7 bits
-const int16_t kMaxTwoBytePictureId = 0x7FFF; // 15 bits
-const int16_t kNoTl0PicIdx = -1;
-const uint8_t kNoTemporalIdx = 0xFF;
-const uint8_t kNoSpatialIdx = 0xFF;
-const uint8_t kNoGofIdx = 0xFF;
-const size_t kMaxVp9RefPics = 3;
-const size_t kMaxVp9FramesInGof = 0xFF; // 8 bits
-const size_t kMaxVp9NumberOfSpatialLayers = 8;
-const int kNoKeyIdx = -1;
-
-struct RTPVideoHeaderVP8 {
- void InitRTPVideoHeaderVP8() {
- nonReference = false;
- pictureId = kNoPictureId;
- tl0PicIdx = kNoTl0PicIdx;
- temporalIdx = kNoTemporalIdx;
- layerSync = false;
- keyIdx = kNoKeyIdx;
- partitionId = 0;
- beginningOfPartition = false;
- }
-
- bool nonReference; // Frame is discardable.
- int16_t pictureId; // Picture ID index, 15 bits;
- // kNoPictureId if PictureID does not exist.
- int16_t tl0PicIdx; // TL0PIC_IDX, 8 bits;
- // kNoTl0PicIdx means no value provided.
- uint8_t temporalIdx; // Temporal layer index, or kNoTemporalIdx.
- bool layerSync; // This frame is a layer sync frame.
- // Disabled if temporalIdx == kNoTemporalIdx.
- int keyIdx; // 5 bits; kNoKeyIdx means not used.
- int partitionId; // VP8 partition ID
- bool beginningOfPartition; // True if this packet is the first
- // in a VP8 partition. Otherwise false
-};
-
-enum TemporalStructureMode {
- kTemporalStructureMode1, // 1 temporal layer structure - i.e., IPPP...
- kTemporalStructureMode2, // 2 temporal layers 0-1-0-1...
- kTemporalStructureMode3 // 3 temporal layers 0-2-1-2-0-2-1-2...
-};
-
-struct GofInfoVP9 {
- void SetGofInfoVP9(TemporalStructureMode tm) {
- switch (tm) {
- case kTemporalStructureMode1:
- num_frames_in_gof = 1;
- temporal_idx[0] = 0;
- temporal_up_switch[0] = false;
- num_ref_pics[0] = 1;
- pid_diff[0][0] = 1;
- break;
- case kTemporalStructureMode2:
- num_frames_in_gof = 2;
- temporal_idx[0] = 0;
- temporal_up_switch[0] = false;
- num_ref_pics[0] = 1;
- pid_diff[0][0] = 2;
-
- temporal_idx[1] = 1;
- temporal_up_switch[1] = true;
- num_ref_pics[1] = 1;
- pid_diff[1][0] = 1;
- break;
- case kTemporalStructureMode3:
- num_frames_in_gof = 4;
- temporal_idx[0] = 0;
- temporal_up_switch[0] = false;
- num_ref_pics[0] = 1;
- pid_diff[0][0] = 4;
-
- temporal_idx[1] = 2;
- temporal_up_switch[1] = true;
- num_ref_pics[1] = 1;
- pid_diff[1][0] = 1;
-
- temporal_idx[2] = 1;
- temporal_up_switch[2] = true;
- num_ref_pics[2] = 1;
- pid_diff[2][0] = 2;
-
- temporal_idx[3] = 2;
- temporal_up_switch[3] = false;
- num_ref_pics[3] = 2;
- pid_diff[3][0] = 1;
- pid_diff[3][1] = 2;
- break;
- default:
- assert(false);
- }
- }
-
- void CopyGofInfoVP9(const GofInfoVP9& src) {
- num_frames_in_gof = src.num_frames_in_gof;
- for (size_t i = 0; i < num_frames_in_gof; ++i) {
- temporal_idx[i] = src.temporal_idx[i];
- temporal_up_switch[i] = src.temporal_up_switch[i];
- num_ref_pics[i] = src.num_ref_pics[i];
- for (size_t r = 0; r < num_ref_pics[i]; ++r) {
- pid_diff[i][r] = src.pid_diff[i][r];
- }
- }
- }
-
- size_t num_frames_in_gof;
- uint8_t temporal_idx[kMaxVp9FramesInGof];
- bool temporal_up_switch[kMaxVp9FramesInGof];
- size_t num_ref_pics[kMaxVp9FramesInGof];
- int16_t pid_diff[kMaxVp9FramesInGof][kMaxVp9RefPics];
-};
-
-struct RTPVideoHeaderVP9 {
- void InitRTPVideoHeaderVP9() {
- inter_pic_predicted = false;
- flexible_mode = false;
- beginning_of_frame = false;
- end_of_frame = false;
- ss_data_available = false;
- picture_id = kNoPictureId;
- max_picture_id = kMaxTwoBytePictureId;
- tl0_pic_idx = kNoTl0PicIdx;
- temporal_idx = kNoTemporalIdx;
- spatial_idx = kNoSpatialIdx;
- temporal_up_switch = false;
- inter_layer_predicted = false;
- gof_idx = kNoGofIdx;
- num_ref_pics = 0;
- num_spatial_layers = 1;
- }
-
- bool inter_pic_predicted; // This layer frame is dependent on previously
- // coded frame(s).
- bool flexible_mode; // This frame is in flexible mode.
- bool beginning_of_frame; // True if this packet is the first in a VP9 layer
- // frame.
- bool end_of_frame; // True if this packet is the last in a VP9 layer frame.
- bool ss_data_available; // True if SS data is available in this payload
- // descriptor.
- int16_t picture_id; // PictureID index, 15 bits;
- // kNoPictureId if PictureID does not exist.
- int16_t max_picture_id; // Maximum picture ID index; either 0x7F or 0x7FFF;
- int16_t tl0_pic_idx; // TL0PIC_IDX, 8 bits;
- // kNoTl0PicIdx means no value provided.
- uint8_t temporal_idx; // Temporal layer index, or kNoTemporalIdx.
- uint8_t spatial_idx; // Spatial layer index, or kNoSpatialIdx.
- bool temporal_up_switch; // True if upswitch to higher frame rate is possible
- // starting from this frame.
- bool inter_layer_predicted; // Frame is dependent on directly lower spatial
- // layer frame.
-
- uint8_t gof_idx; // Index to predefined temporal frame info in SS data.
-
- size_t num_ref_pics; // Number of reference pictures used by this layer
- // frame.
- int16_t pid_diff[kMaxVp9RefPics]; // P_DIFF signaled to derive the PictureID
- // of the reference pictures.
- int16_t ref_picture_id[kMaxVp9RefPics]; // PictureID of reference pictures.
-
- // SS data.
- size_t num_spatial_layers; // Always populated.
- bool spatial_layer_resolution_present;
- uint16_t width[kMaxVp9NumberOfSpatialLayers];
- uint16_t height[kMaxVp9NumberOfSpatialLayers];
- GofInfoVP9 gof;
-};
-
-// The packetization types that we support: single, aggregated, and fragmented.
-enum H264PacketizationTypes {
- kH264SingleNalu, // This packet contains a single NAL unit.
- kH264StapA, // This packet contains STAP-A (single time
- // aggregation) packets. If this packet has an
- // associated NAL unit type, it'll be for the
- // first such aggregated packet.
- kH264FuA, // This packet contains a FU-A (fragmentation
- // unit) packet, meaning it is a part of a frame
- // that was too large to fit into a single packet.
-};
-
-struct RTPVideoHeaderH264 {
- uint8_t nalu_type; // The NAL unit type. If this is a header for a
- // fragmented packet, it's the NAL unit type of
- // the original data. If this is the header for an
- // aggregated packet, it's the NAL unit type of
- // the first NAL unit in the packet.
- H264PacketizationTypes packetization_type;
-};
-
-union RTPVideoTypeHeader {
- RTPVideoHeaderVP8 VP8;
- RTPVideoHeaderVP9 VP9;
- RTPVideoHeaderH264 H264;
-};
-
-enum RtpVideoCodecTypes {
- kRtpVideoNone,
- kRtpVideoGeneric,
- kRtpVideoVp8,
- kRtpVideoVp9,
- kRtpVideoH264
-};
-// Since RTPVideoHeader is used as a member of a union, it can't have a
-// non-trivial default constructor.
-struct RTPVideoHeader {
- uint16_t width; // size
- uint16_t height;
- VideoRotation rotation;
-
- bool isFirstPacket; // first packet in frame
- uint8_t simulcastIdx; // Index if the simulcast encoder creating
- // this frame, 0 if not using simulcast.
- RtpVideoCodecTypes codec;
- RTPVideoTypeHeader codecHeader;
-};
-union RTPTypeHeader {
- RTPAudioHeader Audio;
- RTPVideoHeader Video;
-};
-
-struct WebRtcRTPHeader {
- RTPHeader header;
- FrameType frameType;
- RTPTypeHeader type;
- // NTP time of the capture time in local timebase in milliseconds.
- int64_t ntp_time_ms;
-};
-
-class RTPFragmentationHeader {
- public:
- RTPFragmentationHeader()
- : fragmentationVectorSize(0),
- fragmentationOffset(NULL),
- fragmentationLength(NULL),
- fragmentationTimeDiff(NULL),
- fragmentationPlType(NULL) {};
-
- ~RTPFragmentationHeader() {
- delete[] fragmentationOffset;
- delete[] fragmentationLength;
- delete[] fragmentationTimeDiff;
- delete[] fragmentationPlType;
- }
-
- void CopyFrom(const RTPFragmentationHeader& src) {
- if (this == &src) {
- return;
- }
-
- if (src.fragmentationVectorSize != fragmentationVectorSize) {
- // new size of vectors
-
- // delete old
- delete[] fragmentationOffset;
- fragmentationOffset = NULL;
- delete[] fragmentationLength;
- fragmentationLength = NULL;
- delete[] fragmentationTimeDiff;
- fragmentationTimeDiff = NULL;
- delete[] fragmentationPlType;
- fragmentationPlType = NULL;
-
- if (src.fragmentationVectorSize > 0) {
- // allocate new
- if (src.fragmentationOffset) {
- fragmentationOffset = new size_t[src.fragmentationVectorSize];
- }
- if (src.fragmentationLength) {
- fragmentationLength = new size_t[src.fragmentationVectorSize];
- }
- if (src.fragmentationTimeDiff) {
- fragmentationTimeDiff = new uint16_t[src.fragmentationVectorSize];
- }
- if (src.fragmentationPlType) {
- fragmentationPlType = new uint8_t[src.fragmentationVectorSize];
- }
- }
- // set new size
- fragmentationVectorSize = src.fragmentationVectorSize;
- }
-
- if (src.fragmentationVectorSize > 0) {
- // copy values
- if (src.fragmentationOffset) {
- memcpy(fragmentationOffset, src.fragmentationOffset,
- src.fragmentationVectorSize * sizeof(size_t));
- }
- if (src.fragmentationLength) {
- memcpy(fragmentationLength, src.fragmentationLength,
- src.fragmentationVectorSize * sizeof(size_t));
- }
- if (src.fragmentationTimeDiff) {
- memcpy(fragmentationTimeDiff, src.fragmentationTimeDiff,
- src.fragmentationVectorSize * sizeof(uint16_t));
- }
- if (src.fragmentationPlType) {
- memcpy(fragmentationPlType, src.fragmentationPlType,
- src.fragmentationVectorSize * sizeof(uint8_t));
- }
- }
- }
-
- void VerifyAndAllocateFragmentationHeader(const size_t size) {
- assert(size <= std::numeric_limits<uint16_t>::max());
- const uint16_t size16 = static_cast<uint16_t>(size);
- if (fragmentationVectorSize < size16) {
- uint16_t oldVectorSize = fragmentationVectorSize;
- {
- // offset
- size_t* oldOffsets = fragmentationOffset;
- fragmentationOffset = new size_t[size16];
- memset(fragmentationOffset + oldVectorSize, 0,
- sizeof(size_t) * (size16 - oldVectorSize));
- // copy old values
- memcpy(fragmentationOffset, oldOffsets,
- sizeof(size_t) * oldVectorSize);
- delete[] oldOffsets;
- }
- // length
- {
- size_t* oldLengths = fragmentationLength;
- fragmentationLength = new size_t[size16];
- memset(fragmentationLength + oldVectorSize, 0,
- sizeof(size_t) * (size16 - oldVectorSize));
- memcpy(fragmentationLength, oldLengths,
- sizeof(size_t) * oldVectorSize);
- delete[] oldLengths;
- }
- // time diff
- {
- uint16_t* oldTimeDiffs = fragmentationTimeDiff;
- fragmentationTimeDiff = new uint16_t[size16];
- memset(fragmentationTimeDiff + oldVectorSize, 0,
- sizeof(uint16_t) * (size16 - oldVectorSize));
- memcpy(fragmentationTimeDiff, oldTimeDiffs,
- sizeof(uint16_t) * oldVectorSize);
- delete[] oldTimeDiffs;
- }
- // payload type
- {
- uint8_t* oldTimePlTypes = fragmentationPlType;
- fragmentationPlType = new uint8_t[size16];
- memset(fragmentationPlType + oldVectorSize, 0,
- sizeof(uint8_t) * (size16 - oldVectorSize));
- memcpy(fragmentationPlType, oldTimePlTypes,
- sizeof(uint8_t) * oldVectorSize);
- delete[] oldTimePlTypes;
- }
- fragmentationVectorSize = size16;
- }
- }
-
- uint16_t fragmentationVectorSize; // Number of fragmentations
- size_t* fragmentationOffset; // Offset of pointer to data for each
- // fragmentation
- size_t* fragmentationLength; // Data size for each fragmentation
- uint16_t* fragmentationTimeDiff; // Timestamp difference relative "now" for
- // each fragmentation
- uint8_t* fragmentationPlType; // Payload type of each fragmentation
-
- private:
- RTC_DISALLOW_COPY_AND_ASSIGN(RTPFragmentationHeader);
-};
-
-struct RTCPVoIPMetric {
- // RFC 3611 4.7
- uint8_t lossRate;
- uint8_t discardRate;
- uint8_t burstDensity;
- uint8_t gapDensity;
- uint16_t burstDuration;
- uint16_t gapDuration;
- uint16_t roundTripDelay;
- uint16_t endSystemDelay;
- uint8_t signalLevel;
- uint8_t noiseLevel;
- uint8_t RERL;
- uint8_t Gmin;
- uint8_t Rfactor;
- uint8_t extRfactor;
- uint8_t MOSLQ;
- uint8_t MOSCQ;
- uint8_t RXconfig;
- uint16_t JBnominal;
- uint16_t JBmax;
- uint16_t JBabsMax;
-};
-
-// Types for the FEC packet masks. The type |kFecMaskRandom| is based on a
-// random loss model. The type |kFecMaskBursty| is based on a bursty/consecutive
-// loss model. The packet masks are defined in
-// modules/rtp_rtcp/fec_private_tables_random(bursty).h
-enum FecMaskType {
- kFecMaskRandom,
- kFecMaskBursty,
-};
-
-// Struct containing forward error correction settings.
-struct FecProtectionParams {
- int fec_rate;
- bool use_uep_protection;
- int max_fec_frames;
- FecMaskType fec_mask_type;
-};
-
-// Interface used by the CallStats class to distribute call statistics.
-// Callbacks will be triggered as soon as the class has been registered to a
-// CallStats object using RegisterStatsObserver.
-class CallStatsObserver {
- public:
- virtual void OnRttUpdate(int64_t avg_rtt_ms, int64_t max_rtt_ms) = 0;
-
- virtual ~CallStatsObserver() {}
-};
-
-struct VideoContentMetrics {
- VideoContentMetrics()
- : motion_magnitude(0.0f),
- spatial_pred_err(0.0f),
- spatial_pred_err_h(0.0f),
- spatial_pred_err_v(0.0f) {}
-
- void Reset() {
- motion_magnitude = 0.0f;
- spatial_pred_err = 0.0f;
- spatial_pred_err_h = 0.0f;
- spatial_pred_err_v = 0.0f;
- }
- float motion_magnitude;
- float spatial_pred_err;
- float spatial_pred_err_h;
- float spatial_pred_err_v;
-};
-
-/* This class holds up to 60 ms of super-wideband (32 kHz) stereo audio. It
- * allows for adding and subtracting frames while keeping track of the resulting
- * states.
- *
- * Notes
- * - The total number of samples in |data_| is
- * samples_per_channel_ * num_channels_
- *
- * - Stereo data is interleaved starting with the left channel.
- *
- * - The +operator assume that you would never add exactly opposite frames when
- * deciding the resulting state. To do this use the -operator.
- */
-class AudioFrame {
- public:
- // Stereo, 32 kHz, 60 ms (2 * 32 * 60)
- static const size_t kMaxDataSizeSamples = 3840;
-
- enum VADActivity {
- kVadActive = 0,
- kVadPassive = 1,
- kVadUnknown = 2
- };
- enum SpeechType {
- kNormalSpeech = 0,
- kPLC = 1,
- kCNG = 2,
- kPLCCNG = 3,
- kUndefined = 4
- };
-
- AudioFrame();
- virtual ~AudioFrame() {}
-
- // Resets all members to their default state (except does not modify the
- // contents of |data_|).
- void Reset();
-
- // |interleaved_| is not changed by this method.
- void UpdateFrame(int id, uint32_t timestamp, const int16_t* data,
- size_t samples_per_channel, int sample_rate_hz,
- SpeechType speech_type, VADActivity vad_activity,
- int num_channels = 1, uint32_t energy = -1);
-
- AudioFrame& Append(const AudioFrame& rhs);
-
- void CopyFrom(const AudioFrame& src);
-
- void Mute();
-
- AudioFrame& operator>>=(const int rhs);
- AudioFrame& operator+=(const AudioFrame& rhs);
- AudioFrame& operator-=(const AudioFrame& rhs);
-
- int id_;
- // RTP timestamp of the first sample in the AudioFrame.
- uint32_t timestamp_;
- // Time since the first frame in milliseconds.
- // -1 represents an uninitialized value.
- int64_t elapsed_time_ms_;
- // NTP time of the estimated capture time in local timebase in milliseconds.
- // -1 represents an uninitialized value.
- int64_t ntp_time_ms_;
- int16_t data_[kMaxDataSizeSamples];
- size_t samples_per_channel_;
- int sample_rate_hz_;
- int num_channels_;
- SpeechType speech_type_;
- VADActivity vad_activity_;
- // Note that there is no guarantee that |energy_| is correct. Any user of this
- // member must verify that the value is correct.
- // TODO(henrike) Remove |energy_|.
- // See https://code.google.com/p/webrtc/issues/detail?id=3315.
- uint32_t energy_;
- bool interleaved_;
-
- private:
- RTC_DISALLOW_COPY_AND_ASSIGN(AudioFrame);
-};
-
-inline AudioFrame::AudioFrame()
- : data_() {
- Reset();
-}
-
-inline void AudioFrame::Reset() {
- id_ = -1;
- // TODO(wu): Zero is a valid value for |timestamp_|. We should initialize
- // to an invalid value, or add a new member to indicate invalidity.
- timestamp_ = 0;
- elapsed_time_ms_ = -1;
- ntp_time_ms_ = -1;
- samples_per_channel_ = 0;
- sample_rate_hz_ = 0;
- num_channels_ = 0;
- speech_type_ = kUndefined;
- vad_activity_ = kVadUnknown;
- energy_ = 0xffffffff;
- interleaved_ = true;
-}
-
-inline void AudioFrame::UpdateFrame(int id,
- uint32_t timestamp,
- const int16_t* data,
- size_t samples_per_channel,
- int sample_rate_hz,
- SpeechType speech_type,
- VADActivity vad_activity,
- int num_channels,
- uint32_t energy) {
- id_ = id;
- timestamp_ = timestamp;
- samples_per_channel_ = samples_per_channel;
- sample_rate_hz_ = sample_rate_hz;
- speech_type_ = speech_type;
- vad_activity_ = vad_activity;
- num_channels_ = num_channels;
- energy_ = energy;
-
- assert(num_channels >= 0);
- const size_t length = samples_per_channel * num_channels;
- assert(length <= kMaxDataSizeSamples);
- if (data != NULL) {
- memcpy(data_, data, sizeof(int16_t) * length);
- } else {
- memset(data_, 0, sizeof(int16_t) * length);
- }
-}
-
-inline void AudioFrame::CopyFrom(const AudioFrame& src) {
- if (this == &src) return;
-
- id_ = src.id_;
- timestamp_ = src.timestamp_;
- elapsed_time_ms_ = src.elapsed_time_ms_;
- ntp_time_ms_ = src.ntp_time_ms_;
- samples_per_channel_ = src.samples_per_channel_;
- sample_rate_hz_ = src.sample_rate_hz_;
- speech_type_ = src.speech_type_;
- vad_activity_ = src.vad_activity_;
- num_channels_ = src.num_channels_;
- energy_ = src.energy_;
- interleaved_ = src.interleaved_;
-
- assert(num_channels_ >= 0);
- const size_t length = samples_per_channel_ * num_channels_;
- assert(length <= kMaxDataSizeSamples);
- memcpy(data_, src.data_, sizeof(int16_t) * length);
-}
-
-inline void AudioFrame::Mute() {
- memset(data_, 0, samples_per_channel_ * num_channels_ * sizeof(int16_t));
-}
-
-inline AudioFrame& AudioFrame::operator>>=(const int rhs) {
- assert((num_channels_ > 0) && (num_channels_ < 3));
- if ((num_channels_ > 2) || (num_channels_ < 1)) return *this;
-
- for (size_t i = 0; i < samples_per_channel_ * num_channels_; i++) {
- data_[i] = static_cast<int16_t>(data_[i] >> rhs);
- }
- return *this;
-}
-
-inline AudioFrame& AudioFrame::Append(const AudioFrame& rhs) {
- // Sanity check
- assert((num_channels_ > 0) && (num_channels_ < 3));
- assert(interleaved_ == rhs.interleaved_);
- if ((num_channels_ > 2) || (num_channels_ < 1)) return *this;
- if (num_channels_ != rhs.num_channels_) return *this;
-
- if ((vad_activity_ == kVadActive) || rhs.vad_activity_ == kVadActive) {
- vad_activity_ = kVadActive;
- } else if (vad_activity_ == kVadUnknown || rhs.vad_activity_ == kVadUnknown) {
- vad_activity_ = kVadUnknown;
- }
- if (speech_type_ != rhs.speech_type_) {
- speech_type_ = kUndefined;
- }
-
- size_t offset = samples_per_channel_ * num_channels_;
- for (size_t i = 0; i < rhs.samples_per_channel_ * rhs.num_channels_; i++) {
- data_[offset + i] = rhs.data_[i];
- }
- samples_per_channel_ += rhs.samples_per_channel_;
- return *this;
-}
-
-namespace {
-inline int16_t ClampToInt16(int32_t input) {
- if (input < -0x00008000) {
- return -0x8000;
- } else if (input > 0x00007FFF) {
- return 0x7FFF;
- } else {
- return static_cast<int16_t>(input);
- }
-}
-}
-
-inline AudioFrame& AudioFrame::operator+=(const AudioFrame& rhs) {
- // Sanity check
- assert((num_channels_ > 0) && (num_channels_ < 3));
- assert(interleaved_ == rhs.interleaved_);
- if ((num_channels_ > 2) || (num_channels_ < 1)) return *this;
- if (num_channels_ != rhs.num_channels_) return *this;
-
- bool noPrevData = false;
- if (samples_per_channel_ != rhs.samples_per_channel_) {
- if (samples_per_channel_ == 0) {
- // special case we have no data to start with
- samples_per_channel_ = rhs.samples_per_channel_;
- noPrevData = true;
- } else {
- return *this;
- }
- }
-
- if ((vad_activity_ == kVadActive) || rhs.vad_activity_ == kVadActive) {
- vad_activity_ = kVadActive;
- } else if (vad_activity_ == kVadUnknown || rhs.vad_activity_ == kVadUnknown) {
- vad_activity_ = kVadUnknown;
- }
-
- if (speech_type_ != rhs.speech_type_) speech_type_ = kUndefined;
-
- if (noPrevData) {
- memcpy(data_, rhs.data_,
- sizeof(int16_t) * rhs.samples_per_channel_ * num_channels_);
- } else {
- // IMPROVEMENT this can be done very fast in assembly
- for (size_t i = 0; i < samples_per_channel_ * num_channels_; i++) {
- int32_t wrap_guard =
- static_cast<int32_t>(data_[i]) + static_cast<int32_t>(rhs.data_[i]);
- data_[i] = ClampToInt16(wrap_guard);
- }
- }
- energy_ = 0xffffffff;
- return *this;
-}
-
-inline AudioFrame& AudioFrame::operator-=(const AudioFrame& rhs) {
- // Sanity check
- assert((num_channels_ > 0) && (num_channels_ < 3));
- assert(interleaved_ == rhs.interleaved_);
- if ((num_channels_ > 2) || (num_channels_ < 1)) return *this;
-
- if ((samples_per_channel_ != rhs.samples_per_channel_) ||
- (num_channels_ != rhs.num_channels_)) {
- return *this;
- }
- if ((vad_activity_ != kVadPassive) || rhs.vad_activity_ != kVadPassive) {
- vad_activity_ = kVadUnknown;
- }
- speech_type_ = kUndefined;
-
- for (size_t i = 0; i < samples_per_channel_ * num_channels_; i++) {
- int32_t wrap_guard =
- static_cast<int32_t>(data_[i]) - static_cast<int32_t>(rhs.data_[i]);
- data_[i] = ClampToInt16(wrap_guard);
- }
- energy_ = 0xffffffff;
- return *this;
-}
-
-inline bool IsNewerSequenceNumber(uint16_t sequence_number,
- uint16_t prev_sequence_number) {
- // Distinguish between elements that are exactly 0x8000 apart.
- // If s1>s2 and |s1-s2| = 0x8000: IsNewer(s1,s2)=true, IsNewer(s2,s1)=false
- // rather than having IsNewer(s1,s2) = IsNewer(s2,s1) = false.
- if (static_cast<uint16_t>(sequence_number - prev_sequence_number) == 0x8000) {
- return sequence_number > prev_sequence_number;
- }
- return sequence_number != prev_sequence_number &&
- static_cast<uint16_t>(sequence_number - prev_sequence_number) < 0x8000;
-}
-
-inline bool IsNewerTimestamp(uint32_t timestamp, uint32_t prev_timestamp) {
- // Distinguish between elements that are exactly 0x80000000 apart.
- // If t1>t2 and |t1-t2| = 0x80000000: IsNewer(t1,t2)=true,
- // IsNewer(t2,t1)=false
- // rather than having IsNewer(t1,t2) = IsNewer(t2,t1) = false.
- if (static_cast<uint32_t>(timestamp - prev_timestamp) == 0x80000000) {
- return timestamp > prev_timestamp;
- }
- return timestamp != prev_timestamp &&
- static_cast<uint32_t>(timestamp - prev_timestamp) < 0x80000000;
-}
-
-inline uint16_t LatestSequenceNumber(uint16_t sequence_number1,
- uint16_t sequence_number2) {
- return IsNewerSequenceNumber(sequence_number1, sequence_number2)
- ? sequence_number1
- : sequence_number2;
-}
-
-inline uint32_t LatestTimestamp(uint32_t timestamp1, uint32_t timestamp2) {
- return IsNewerTimestamp(timestamp1, timestamp2) ? timestamp1 : timestamp2;
-}
-
-// Utility class to unwrap a sequence number to a larger type, for easier
-// handling large ranges. Note that sequence numbers will never be unwrapped
-// to a negative value.
-class SequenceNumberUnwrapper {
- public:
- SequenceNumberUnwrapper() : last_seq_(-1) {}
-
- // Get the unwrapped sequence, but don't update the internal state.
- int64_t UnwrapWithoutUpdate(uint16_t sequence_number) {
- if (last_seq_ == -1)
- return sequence_number;
-
- uint16_t cropped_last = static_cast<uint16_t>(last_seq_);
- int64_t delta = sequence_number - cropped_last;
- if (IsNewerSequenceNumber(sequence_number, cropped_last)) {
- if (delta < 0)
- delta += (1 << 16); // Wrap forwards.
- } else if (delta > 0 && (last_seq_ + delta - (1 << 16)) >= 0) {
- // If sequence_number is older but delta is positive, this is a backwards
- // wrap-around. However, don't wrap backwards past 0 (unwrapped).
- delta -= (1 << 16);
- }
-
- return last_seq_ + delta;
- }
-
- // Only update the internal state to the specified last (unwrapped) sequence.
- void UpdateLast(int64_t last_sequence) { last_seq_ = last_sequence; }
-
- // Unwrap the sequence number and update the internal state.
- int64_t Unwrap(uint16_t sequence_number) {
- int64_t unwrapped = UnwrapWithoutUpdate(sequence_number);
- UpdateLast(unwrapped);
- return unwrapped;
- }
-
- private:
- int64_t last_seq_;
-};
-
-} // namespace webrtc
-
-#endif // MODULE_COMMON_TYPES_H
diff --git a/webrtc/modules/media_file/BUILD.gn b/webrtc/modules/media_file/BUILD.gn
index 05cfb4e555..2a4be728f3 100644
--- a/webrtc/modules/media_file/BUILD.gn
+++ b/webrtc/modules/media_file/BUILD.gn
@@ -14,12 +14,12 @@ config("media_file_config") {
source_set("media_file") {
sources = [
- "interface/media_file.h",
- "interface/media_file_defines.h",
- "source/media_file_impl.cc",
- "source/media_file_impl.h",
- "source/media_file_utility.cc",
- "source/media_file_utility.h",
+ "media_file.h",
+ "media_file_defines.h",
+ "media_file_impl.cc",
+ "media_file_impl.h",
+ "media_file_utility.cc",
+ "media_file_utility.h",
]
if (is_win) {
diff --git a/webrtc/modules/media_file/OWNERS b/webrtc/modules/media_file/OWNERS
index beb9729e04..f6467a4161 100644
--- a/webrtc/modules/media_file/OWNERS
+++ b/webrtc/modules/media_file/OWNERS
@@ -1,5 +1,10 @@
-mflodman@webrtc.org
-perkj@webrtc.org
-niklas.enbom@webrtc.org
-
-per-file BUILD.gn=kjellander@webrtc.org
+mflodman@webrtc.org
+perkj@webrtc.org
+niklas.enbom@webrtc.org
+
+# These are for the common case of adding or renaming files. If you're doing
+# structural changes, please get a review from a reviewer in this file.
+per-file *.gyp=*
+per-file *.gypi=*
+
+per-file BUILD.gn=kjellander@webrtc.org
diff --git a/webrtc/modules/media_file/interface/media_file.h b/webrtc/modules/media_file/interface/media_file.h
deleted file mode 100644
index 5b09ad4383..0000000000
--- a/webrtc/modules/media_file/interface/media_file.h
+++ /dev/null
@@ -1,180 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_MODULES_MEDIA_FILE_INTERFACE_MEDIA_FILE_H_
-#define WEBRTC_MODULES_MEDIA_FILE_INTERFACE_MEDIA_FILE_H_
-
-#include "webrtc/common_types.h"
-#include "webrtc/modules/interface/module.h"
-#include "webrtc/modules/interface/module_common_types.h"
-#include "webrtc/modules/media_file/interface/media_file_defines.h"
-#include "webrtc/typedefs.h"
-
-namespace webrtc {
-class MediaFile : public Module
-{
-public:
- // Factory method. Constructor disabled. id is the identifier for the
- // MediaFile instance.
- static MediaFile* CreateMediaFile(const int32_t id);
- static void DestroyMediaFile(MediaFile* module);
-
- // Put 10-60ms of audio data from file into the audioBuffer depending on
- // codec frame size. dataLengthInBytes is both an input and output
- // parameter. As input parameter it indicates the size of audioBuffer.
- // As output parameter it indicates the number of bytes written to
- // audioBuffer.
- // Note: This API only play mono audio but can be used on file containing
- // audio with more channels (in which case the audio will be converted to
- // mono).
- virtual int32_t PlayoutAudioData(
- int8_t* audioBuffer,
- size_t& dataLengthInBytes) = 0;
-
- // Put 10-60ms, depending on codec frame size, of audio data from file into
- // audioBufferLeft and audioBufferRight. The buffers contain the left and
- // right channel of played out stereo audio.
- // dataLengthInBytes is both an input and output parameter. As input
- // parameter it indicates the size of both audioBufferLeft and
- // audioBufferRight. As output parameter it indicates the number of bytes
- // written to both audio buffers.
- // Note: This API can only be successfully called for WAV files with stereo
- // audio.
- virtual int32_t PlayoutStereoData(
- int8_t* audioBufferLeft,
- int8_t* audioBufferRight,
- size_t& dataLengthInBytes) = 0;
-
- // Open the file specified by fileName (relative path is allowed) for
- // reading. FileCallback::PlayNotification(..) will be called after
- // notificationTimeMs of the file has been played if notificationTimeMs is
- // greater than zero. If loop is true the file will be played until
- // StopPlaying() is called. When end of file is reached the file is read
- // from the start. format specifies the type of file fileName refers to.
- // codecInst specifies the encoding of the audio data. Note that
- // file formats that contain this information (like WAV files) don't need to
- // provide a non-NULL codecInst. startPointMs and stopPointMs, unless zero,
- // specify what part of the file should be read. From startPointMs ms to
- // stopPointMs ms.
- // Note: codecInst.channels should be set to 2 for stereo (and 1 for
- // mono). Stereo audio is only supported for WAV files.
- virtual int32_t StartPlayingAudioFile(
- const char* fileName,
- const uint32_t notificationTimeMs = 0,
- const bool loop = false,
- const FileFormats format = kFileFormatPcm16kHzFile,
- const CodecInst* codecInst = NULL,
- const uint32_t startPointMs = 0,
- const uint32_t stopPointMs = 0) = 0;
-
- // Prepare for playing audio from stream.
- // FileCallback::PlayNotification(..) will be called after
- // notificationTimeMs of the file has been played if notificationTimeMs is
- // greater than zero. format specifies the type of file fileName refers to.
- // codecInst specifies the encoding of the audio data. Note that
- // file formats that contain this information (like WAV files) don't need to
- // provide a non-NULL codecInst. startPointMs and stopPointMs, unless zero,
- // specify what part of the file should be read. From startPointMs ms to
- // stopPointMs ms.
- // Note: codecInst.channels should be set to 2 for stereo (and 1 for
- // mono). Stereo audio is only supported for WAV files.
- virtual int32_t StartPlayingAudioStream(
- InStream& stream,
- const uint32_t notificationTimeMs = 0,
- const FileFormats format = kFileFormatPcm16kHzFile,
- const CodecInst* codecInst = NULL,
- const uint32_t startPointMs = 0,
- const uint32_t stopPointMs = 0) = 0;
-
- // Stop playing from file or stream.
- virtual int32_t StopPlaying() = 0;
-
- // Return true if playing.
- virtual bool IsPlaying() = 0;
-
-
- // Set durationMs to the number of ms that has been played from file.
- virtual int32_t PlayoutPositionMs(
- uint32_t& durationMs) const = 0;
-
- // Write one audio frame, i.e. the bufferLength first bytes of audioBuffer,
- // to file. The audio frame size is determined by the codecInst.pacsize
- // parameter of the last sucessfull StartRecordingAudioFile(..) call.
- // Note: bufferLength must be exactly one frame.
- virtual int32_t IncomingAudioData(
- const int8_t* audioBuffer,
- const size_t bufferLength) = 0;
-
- // Open/creates file specified by fileName for writing (relative path is
- // allowed). FileCallback::RecordNotification(..) will be called after
- // notificationTimeMs of audio data has been recorded if
- // notificationTimeMs is greater than zero.
- // format specifies the type of file that should be created/opened.
- // codecInst specifies the encoding of the audio data. maxSizeBytes
- // specifies the number of bytes allowed to be written to file if it is
- // greater than zero.
- // Note: codecInst.channels should be set to 2 for stereo (and 1 for
- // mono). Stereo is only supported for WAV files.
- virtual int32_t StartRecordingAudioFile(
- const char* fileName,
- const FileFormats format,
- const CodecInst& codecInst,
- const uint32_t notificationTimeMs = 0,
- const uint32_t maxSizeBytes = 0) = 0;
-
- // Prepare for recording audio to stream.
- // FileCallback::RecordNotification(..) will be called after
- // notificationTimeMs of audio data has been recorded if
- // notificationTimeMs is greater than zero.
- // format specifies the type of file that stream should correspond to.
- // codecInst specifies the encoding of the audio data.
- // Note: codecInst.channels should be set to 2 for stereo (and 1 for
- // mono). Stereo is only supported for WAV files.
- virtual int32_t StartRecordingAudioStream(
- OutStream& stream,
- const FileFormats format,
- const CodecInst& codecInst,
- const uint32_t notificationTimeMs = 0) = 0;
-
- // Stop recording to file or stream.
- virtual int32_t StopRecording() = 0;
-
- // Return true if recording.
- virtual bool IsRecording() = 0;
-
- // Set durationMs to the number of ms that has been recorded to file.
- virtual int32_t RecordDurationMs(uint32_t& durationMs) = 0;
-
- // Return true if recording or playing is stereo.
- virtual bool IsStereo() = 0;
-
- // Register callback to receive media file related notifications. Disables
- // callbacks if callback is NULL.
- virtual int32_t SetModuleFileCallback(FileCallback* callback) = 0;
-
- // Set durationMs to the size of the file (in ms) specified by fileName.
- // format specifies the type of file fileName refers to. freqInHz specifies
- // the sampling frequency of the file.
- virtual int32_t FileDurationMs(
- const char* fileName,
- uint32_t& durationMs,
- const FileFormats format,
- const uint32_t freqInHz = 16000) = 0;
-
- // Update codecInst according to the current audio codec being used for
- // reading or writing.
- virtual int32_t codec_info(CodecInst& codecInst) const = 0;
-
-protected:
- MediaFile() {}
- virtual ~MediaFile() {}
-};
-} // namespace webrtc
-#endif // WEBRTC_MODULES_MEDIA_FILE_INTERFACE_MEDIA_FILE_H_
diff --git a/webrtc/modules/media_file/interface/media_file_defines.h b/webrtc/modules/media_file/interface/media_file_defines.h
deleted file mode 100644
index ded71a8ca7..0000000000
--- a/webrtc/modules/media_file/interface/media_file_defines.h
+++ /dev/null
@@ -1,51 +0,0 @@
-/*
- * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_MODULES_MEDIA_FILE_INTERFACE_MEDIA_FILE_DEFINES_H_
-#define WEBRTC_MODULES_MEDIA_FILE_INTERFACE_MEDIA_FILE_DEFINES_H_
-
-#include "webrtc/engine_configurations.h"
-#include "webrtc/modules/interface/module_common_types.h"
-#include "webrtc/typedefs.h"
-
-namespace webrtc {
-// Callback class for the MediaFile class.
-class FileCallback
-{
-public:
- virtual ~FileCallback(){}
-
- // This function is called by MediaFile when a file has been playing for
- // durationMs ms. id is the identifier for the MediaFile instance calling
- // the callback.
- virtual void PlayNotification(const int32_t id,
- const uint32_t durationMs) = 0;
-
- // This function is called by MediaFile when a file has been recording for
- // durationMs ms. id is the identifier for the MediaFile instance calling
- // the callback.
- virtual void RecordNotification(const int32_t id,
- const uint32_t durationMs) = 0;
-
- // This function is called by MediaFile when a file has been stopped
- // playing. id is the identifier for the MediaFile instance calling the
- // callback.
- virtual void PlayFileEnded(const int32_t id) = 0;
-
- // This function is called by MediaFile when a file has been stopped
- // recording. id is the identifier for the MediaFile instance calling the
- // callback.
- virtual void RecordFileEnded(const int32_t id) = 0;
-
-protected:
- FileCallback() {}
-};
-} // namespace webrtc
-#endif // WEBRTC_MODULES_MEDIA_FILE_INTERFACE_MEDIA_FILE_DEFINES_H_
diff --git a/webrtc/modules/media_file/media_file.gypi b/webrtc/modules/media_file/media_file.gypi
index 4ec80c3c52..94a99a22f1 100644
--- a/webrtc/modules/media_file/media_file.gypi
+++ b/webrtc/modules/media_file/media_file.gypi
@@ -17,12 +17,12 @@
'<(webrtc_root)/common_audio/common_audio.gyp:common_audio',
],
'sources': [
- 'interface/media_file.h',
- 'interface/media_file_defines.h',
- 'source/media_file_impl.cc',
- 'source/media_file_impl.h',
- 'source/media_file_utility.cc',
- 'source/media_file_utility.h',
+ 'media_file.h',
+ 'media_file_defines.h',
+ 'media_file_impl.cc',
+ 'media_file_impl.h',
+ 'media_file_utility.cc',
+ 'media_file_utility.h',
], # source
# TODO(jschuh): Bug 1348: fix size_t to int truncations.
'msvs_disabled_warnings': [ 4267, ],
diff --git a/webrtc/modules/media_file/media_file.h b/webrtc/modules/media_file/media_file.h
new file mode 100644
index 0000000000..f6924d6bb0
--- /dev/null
+++ b/webrtc/modules/media_file/media_file.h
@@ -0,0 +1,180 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_MEDIA_FILE_MEDIA_FILE_H_
+#define WEBRTC_MODULES_MEDIA_FILE_MEDIA_FILE_H_
+
+#include "webrtc/common_types.h"
+#include "webrtc/modules/include/module.h"
+#include "webrtc/modules/include/module_common_types.h"
+#include "webrtc/modules/media_file/media_file_defines.h"
+#include "webrtc/typedefs.h"
+
+namespace webrtc {
+class MediaFile : public Module
+{
+public:
+ // Factory method. Constructor disabled. id is the identifier for the
+ // MediaFile instance.
+ static MediaFile* CreateMediaFile(const int32_t id);
+ static void DestroyMediaFile(MediaFile* module);
+
+ // Put 10-60ms of audio data from file into the audioBuffer depending on
+ // codec frame size. dataLengthInBytes is both an input and output
+ // parameter. As input parameter it indicates the size of audioBuffer.
+ // As output parameter it indicates the number of bytes written to
+ // audioBuffer.
+ // Note: This API only play mono audio but can be used on file containing
+ // audio with more channels (in which case the audio will be converted to
+ // mono).
+ virtual int32_t PlayoutAudioData(
+ int8_t* audioBuffer,
+ size_t& dataLengthInBytes) = 0;
+
+ // Put 10-60ms, depending on codec frame size, of audio data from file into
+ // audioBufferLeft and audioBufferRight. The buffers contain the left and
+ // right channel of played out stereo audio.
+ // dataLengthInBytes is both an input and output parameter. As input
+ // parameter it indicates the size of both audioBufferLeft and
+ // audioBufferRight. As output parameter it indicates the number of bytes
+ // written to both audio buffers.
+ // Note: This API can only be successfully called for WAV files with stereo
+ // audio.
+ virtual int32_t PlayoutStereoData(
+ int8_t* audioBufferLeft,
+ int8_t* audioBufferRight,
+ size_t& dataLengthInBytes) = 0;
+
+ // Open the file specified by fileName (relative path is allowed) for
+ // reading. FileCallback::PlayNotification(..) will be called after
+ // notificationTimeMs of the file has been played if notificationTimeMs is
+ // greater than zero. If loop is true the file will be played until
+ // StopPlaying() is called. When end of file is reached the file is read
+ // from the start. format specifies the type of file fileName refers to.
+ // codecInst specifies the encoding of the audio data. Note that
+ // file formats that contain this information (like WAV files) don't need to
+ // provide a non-NULL codecInst. startPointMs and stopPointMs, unless zero,
+ // specify what part of the file should be read. From startPointMs ms to
+ // stopPointMs ms.
+ // Note: codecInst.channels should be set to 2 for stereo (and 1 for
+ // mono). Stereo audio is only supported for WAV files.
+ virtual int32_t StartPlayingAudioFile(
+ const char* fileName,
+ const uint32_t notificationTimeMs = 0,
+ const bool loop = false,
+ const FileFormats format = kFileFormatPcm16kHzFile,
+ const CodecInst* codecInst = NULL,
+ const uint32_t startPointMs = 0,
+ const uint32_t stopPointMs = 0) = 0;
+
+ // Prepare for playing audio from stream.
+ // FileCallback::PlayNotification(..) will be called after
+ // notificationTimeMs of the file has been played if notificationTimeMs is
+ // greater than zero. format specifies the type of file fileName refers to.
+ // codecInst specifies the encoding of the audio data. Note that
+ // file formats that contain this information (like WAV files) don't need to
+ // provide a non-NULL codecInst. startPointMs and stopPointMs, unless zero,
+ // specify what part of the file should be read. From startPointMs ms to
+ // stopPointMs ms.
+ // Note: codecInst.channels should be set to 2 for stereo (and 1 for
+ // mono). Stereo audio is only supported for WAV files.
+ virtual int32_t StartPlayingAudioStream(
+ InStream& stream,
+ const uint32_t notificationTimeMs = 0,
+ const FileFormats format = kFileFormatPcm16kHzFile,
+ const CodecInst* codecInst = NULL,
+ const uint32_t startPointMs = 0,
+ const uint32_t stopPointMs = 0) = 0;
+
+ // Stop playing from file or stream.
+ virtual int32_t StopPlaying() = 0;
+
+ // Return true if playing.
+ virtual bool IsPlaying() = 0;
+
+
+ // Set durationMs to the number of ms that has been played from file.
+ virtual int32_t PlayoutPositionMs(
+ uint32_t& durationMs) const = 0;
+
+ // Write one audio frame, i.e. the bufferLength first bytes of audioBuffer,
+ // to file. The audio frame size is determined by the codecInst.pacsize
+ // parameter of the last sucessfull StartRecordingAudioFile(..) call.
+ // Note: bufferLength must be exactly one frame.
+ virtual int32_t IncomingAudioData(
+ const int8_t* audioBuffer,
+ const size_t bufferLength) = 0;
+
+ // Open/creates file specified by fileName for writing (relative path is
+ // allowed). FileCallback::RecordNotification(..) will be called after
+ // notificationTimeMs of audio data has been recorded if
+ // notificationTimeMs is greater than zero.
+ // format specifies the type of file that should be created/opened.
+ // codecInst specifies the encoding of the audio data. maxSizeBytes
+ // specifies the number of bytes allowed to be written to file if it is
+ // greater than zero.
+ // Note: codecInst.channels should be set to 2 for stereo (and 1 for
+ // mono). Stereo is only supported for WAV files.
+ virtual int32_t StartRecordingAudioFile(
+ const char* fileName,
+ const FileFormats format,
+ const CodecInst& codecInst,
+ const uint32_t notificationTimeMs = 0,
+ const uint32_t maxSizeBytes = 0) = 0;
+
+ // Prepare for recording audio to stream.
+ // FileCallback::RecordNotification(..) will be called after
+ // notificationTimeMs of audio data has been recorded if
+ // notificationTimeMs is greater than zero.
+ // format specifies the type of file that stream should correspond to.
+ // codecInst specifies the encoding of the audio data.
+ // Note: codecInst.channels should be set to 2 for stereo (and 1 for
+ // mono). Stereo is only supported for WAV files.
+ virtual int32_t StartRecordingAudioStream(
+ OutStream& stream,
+ const FileFormats format,
+ const CodecInst& codecInst,
+ const uint32_t notificationTimeMs = 0) = 0;
+
+ // Stop recording to file or stream.
+ virtual int32_t StopRecording() = 0;
+
+ // Return true if recording.
+ virtual bool IsRecording() = 0;
+
+ // Set durationMs to the number of ms that has been recorded to file.
+ virtual int32_t RecordDurationMs(uint32_t& durationMs) = 0;
+
+ // Return true if recording or playing is stereo.
+ virtual bool IsStereo() = 0;
+
+ // Register callback to receive media file related notifications. Disables
+ // callbacks if callback is NULL.
+ virtual int32_t SetModuleFileCallback(FileCallback* callback) = 0;
+
+ // Set durationMs to the size of the file (in ms) specified by fileName.
+ // format specifies the type of file fileName refers to. freqInHz specifies
+ // the sampling frequency of the file.
+ virtual int32_t FileDurationMs(
+ const char* fileName,
+ uint32_t& durationMs,
+ const FileFormats format,
+ const uint32_t freqInHz = 16000) = 0;
+
+ // Update codecInst according to the current audio codec being used for
+ // reading or writing.
+ virtual int32_t codec_info(CodecInst& codecInst) const = 0;
+
+protected:
+ MediaFile() {}
+ virtual ~MediaFile() {}
+};
+} // namespace webrtc
+#endif // WEBRTC_MODULES_MEDIA_FILE_MEDIA_FILE_H_
diff --git a/webrtc/modules/media_file/media_file_defines.h b/webrtc/modules/media_file/media_file_defines.h
new file mode 100644
index 0000000000..a021a148a5
--- /dev/null
+++ b/webrtc/modules/media_file/media_file_defines.h
@@ -0,0 +1,51 @@
+/*
+ * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_MEDIA_FILE_MEDIA_FILE_DEFINES_H_
+#define WEBRTC_MODULES_MEDIA_FILE_MEDIA_FILE_DEFINES_H_
+
+#include "webrtc/engine_configurations.h"
+#include "webrtc/modules/include/module_common_types.h"
+#include "webrtc/typedefs.h"
+
+namespace webrtc {
+// Callback class for the MediaFile class.
+class FileCallback
+{
+public:
+ virtual ~FileCallback(){}
+
+ // This function is called by MediaFile when a file has been playing for
+ // durationMs ms. id is the identifier for the MediaFile instance calling
+ // the callback.
+ virtual void PlayNotification(const int32_t id,
+ const uint32_t durationMs) = 0;
+
+ // This function is called by MediaFile when a file has been recording for
+ // durationMs ms. id is the identifier for the MediaFile instance calling
+ // the callback.
+ virtual void RecordNotification(const int32_t id,
+ const uint32_t durationMs) = 0;
+
+ // This function is called by MediaFile when a file has been stopped
+ // playing. id is the identifier for the MediaFile instance calling the
+ // callback.
+ virtual void PlayFileEnded(const int32_t id) = 0;
+
+ // This function is called by MediaFile when a file has been stopped
+ // recording. id is the identifier for the MediaFile instance calling the
+ // callback.
+ virtual void RecordFileEnded(const int32_t id) = 0;
+
+protected:
+ FileCallback() {}
+};
+} // namespace webrtc
+#endif // WEBRTC_MODULES_MEDIA_FILE_MEDIA_FILE_DEFINES_H_
diff --git a/webrtc/modules/media_file/media_file_impl.cc b/webrtc/modules/media_file/media_file_impl.cc
new file mode 100644
index 0000000000..abc7b9d9e0
--- /dev/null
+++ b/webrtc/modules/media_file/media_file_impl.cc
@@ -0,0 +1,1137 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <assert.h>
+
+#include "webrtc/base/format_macros.h"
+#include "webrtc/modules/media_file/media_file_impl.h"
+#include "webrtc/system_wrappers/include/critical_section_wrapper.h"
+#include "webrtc/system_wrappers/include/file_wrapper.h"
+#include "webrtc/system_wrappers/include/tick_util.h"
+#include "webrtc/system_wrappers/include/trace.h"
+
+namespace webrtc {
+MediaFile* MediaFile::CreateMediaFile(const int32_t id)
+{
+ return new MediaFileImpl(id);
+}
+
+void MediaFile::DestroyMediaFile(MediaFile* module)
+{
+ delete static_cast<MediaFileImpl*>(module);
+}
+
+MediaFileImpl::MediaFileImpl(const int32_t id)
+ : _id(id),
+ _crit(CriticalSectionWrapper::CreateCriticalSection()),
+ _callbackCrit(CriticalSectionWrapper::CreateCriticalSection()),
+ _ptrFileUtilityObj(NULL),
+ codec_info_(),
+ _ptrInStream(NULL),
+ _ptrOutStream(NULL),
+ _fileFormat((FileFormats)-1),
+ _recordDurationMs(0),
+ _playoutPositionMs(0),
+ _notificationMs(0),
+ _playingActive(false),
+ _recordingActive(false),
+ _isStereo(false),
+ _openFile(false),
+ _fileName(),
+ _ptrCallback(NULL)
+{
+ WEBRTC_TRACE(kTraceMemory, kTraceFile, id, "Created");
+
+ codec_info_.plname[0] = '\0';
+ _fileName[0] = '\0';
+}
+
+
+MediaFileImpl::~MediaFileImpl()
+{
+ WEBRTC_TRACE(kTraceMemory, kTraceFile, _id, "~MediaFileImpl()");
+ {
+ CriticalSectionScoped lock(_crit);
+
+ if(_playingActive)
+ {
+ StopPlaying();
+ }
+
+ if(_recordingActive)
+ {
+ StopRecording();
+ }
+
+ delete _ptrFileUtilityObj;
+
+ if(_openFile)
+ {
+ delete _ptrInStream;
+ _ptrInStream = NULL;
+ delete _ptrOutStream;
+ _ptrOutStream = NULL;
+ }
+ }
+
+ delete _crit;
+ delete _callbackCrit;
+}
+
+int64_t MediaFileImpl::TimeUntilNextProcess()
+{
+ WEBRTC_TRACE(
+ kTraceWarning,
+ kTraceFile,
+ _id,
+ "TimeUntilNextProcess: This method is not used by MediaFile class.");
+ return -1;
+}
+
+int32_t MediaFileImpl::Process()
+{
+ WEBRTC_TRACE(kTraceWarning, kTraceFile, _id,
+ "Process: This method is not used by MediaFile class.");
+ return -1;
+}
+
+int32_t MediaFileImpl::PlayoutAudioData(int8_t* buffer,
+ size_t& dataLengthInBytes)
+{
+ WEBRTC_TRACE(kTraceStream, kTraceFile, _id,
+ "MediaFileImpl::PlayoutData(buffer= 0x%x, bufLen= %" PRIuS ")",
+ buffer, dataLengthInBytes);
+
+ const size_t bufferLengthInBytes = dataLengthInBytes;
+ dataLengthInBytes = 0;
+
+ if(buffer == NULL || bufferLengthInBytes == 0)
+ {
+ WEBRTC_TRACE(kTraceError, kTraceFile, _id,
+ "Buffer pointer or length is NULL!");
+ return -1;
+ }
+
+ int32_t bytesRead = 0;
+ {
+ CriticalSectionScoped lock(_crit);
+
+ if(!_playingActive)
+ {
+ WEBRTC_TRACE(kTraceWarning, kTraceFile, _id,
+ "Not currently playing!");
+ return -1;
+ }
+
+ if(!_ptrFileUtilityObj)
+ {
+ WEBRTC_TRACE(kTraceError, kTraceFile, _id,
+ "Playing, but no FileUtility object!");
+ StopPlaying();
+ return -1;
+ }
+
+ switch(_fileFormat)
+ {
+ case kFileFormatPcm32kHzFile:
+ case kFileFormatPcm16kHzFile:
+ case kFileFormatPcm8kHzFile:
+ bytesRead = _ptrFileUtilityObj->ReadPCMData(
+ *_ptrInStream,
+ buffer,
+ bufferLengthInBytes);
+ break;
+ case kFileFormatCompressedFile:
+ bytesRead = _ptrFileUtilityObj->ReadCompressedData(
+ *_ptrInStream,
+ buffer,
+ bufferLengthInBytes);
+ break;
+ case kFileFormatWavFile:
+ bytesRead = _ptrFileUtilityObj->ReadWavDataAsMono(
+ *_ptrInStream,
+ buffer,
+ bufferLengthInBytes);
+ break;
+ case kFileFormatPreencodedFile:
+ bytesRead = _ptrFileUtilityObj->ReadPreEncodedData(
+ *_ptrInStream,
+ buffer,
+ bufferLengthInBytes);
+ if(bytesRead > 0)
+ {
+ dataLengthInBytes = static_cast<size_t>(bytesRead);
+ return 0;
+ }
+ break;
+ default:
+ {
+ WEBRTC_TRACE(kTraceError, kTraceFile, _id,
+ "Invalid file format: %d", _fileFormat);
+ assert(false);
+ break;
+ }
+ }
+
+ if( bytesRead > 0)
+ {
+ dataLengthInBytes = static_cast<size_t>(bytesRead);
+ }
+ }
+ HandlePlayCallbacks(bytesRead);
+ return 0;
+}
+
+void MediaFileImpl::HandlePlayCallbacks(int32_t bytesRead)
+{
+ bool playEnded = false;
+ uint32_t callbackNotifyMs = 0;
+
+ if(bytesRead > 0)
+ {
+ // Check if it's time for PlayNotification(..).
+ _playoutPositionMs = _ptrFileUtilityObj->PlayoutPositionMs();
+ if(_notificationMs)
+ {
+ if(_playoutPositionMs >= _notificationMs)
+ {
+ _notificationMs = 0;
+ callbackNotifyMs = _playoutPositionMs;
+ }
+ }
+ }
+ else
+ {
+ // If no bytes were read assume end of file.
+ StopPlaying();
+ playEnded = true;
+ }
+
+ // Only _callbackCrit may and should be taken when making callbacks.
+ CriticalSectionScoped lock(_callbackCrit);
+ if(_ptrCallback)
+ {
+ if(callbackNotifyMs)
+ {
+ _ptrCallback->PlayNotification(_id, callbackNotifyMs);
+ }
+ if(playEnded)
+ {
+ _ptrCallback->PlayFileEnded(_id);
+ }
+ }
+}
+
+int32_t MediaFileImpl::PlayoutStereoData(
+ int8_t* bufferLeft,
+ int8_t* bufferRight,
+ size_t& dataLengthInBytes)
+{
+ WEBRTC_TRACE(kTraceStream, kTraceFile, _id,
+ "MediaFileImpl::PlayoutStereoData(Left = 0x%x, Right = 0x%x,"
+ " Len= %" PRIuS ")",
+ bufferLeft,
+ bufferRight,
+ dataLengthInBytes);
+
+ const size_t bufferLengthInBytes = dataLengthInBytes;
+ dataLengthInBytes = 0;
+
+ if(bufferLeft == NULL || bufferRight == NULL || bufferLengthInBytes == 0)
+ {
+ WEBRTC_TRACE(kTraceError, kTraceFile, _id,
+ "A buffer pointer or the length is NULL!");
+ return -1;
+ }
+
+ bool playEnded = false;
+ uint32_t callbackNotifyMs = 0;
+ {
+ CriticalSectionScoped lock(_crit);
+
+ if(!_playingActive || !_isStereo)
+ {
+ WEBRTC_TRACE(kTraceWarning, kTraceFile, _id,
+ "Not currently playing stereo!");
+ return -1;
+ }
+
+ if(!_ptrFileUtilityObj)
+ {
+ WEBRTC_TRACE(
+ kTraceError,
+ kTraceFile,
+ _id,
+ "Playing stereo, but the FileUtility objects is NULL!");
+ StopPlaying();
+ return -1;
+ }
+
+ // Stereo playout only supported for WAV files.
+ int32_t bytesRead = 0;
+ switch(_fileFormat)
+ {
+ case kFileFormatWavFile:
+ bytesRead = _ptrFileUtilityObj->ReadWavDataAsStereo(
+ *_ptrInStream,
+ bufferLeft,
+ bufferRight,
+ bufferLengthInBytes);
+ break;
+ default:
+ WEBRTC_TRACE(kTraceError, kTraceFile, _id,
+ "Trying to read non-WAV as stereo audio\
+ (not supported)");
+ break;
+ }
+
+ if(bytesRead > 0)
+ {
+ dataLengthInBytes = static_cast<size_t>(bytesRead);
+
+ // Check if it's time for PlayNotification(..).
+ _playoutPositionMs = _ptrFileUtilityObj->PlayoutPositionMs();
+ if(_notificationMs)
+ {
+ if(_playoutPositionMs >= _notificationMs)
+ {
+ _notificationMs = 0;
+ callbackNotifyMs = _playoutPositionMs;
+ }
+ }
+ }
+ else
+ {
+ // If no bytes were read assume end of file.
+ StopPlaying();
+ playEnded = true;
+ }
+ }
+
+ CriticalSectionScoped lock(_callbackCrit);
+ if(_ptrCallback)
+ {
+ if(callbackNotifyMs)
+ {
+ _ptrCallback->PlayNotification(_id, callbackNotifyMs);
+ }
+ if(playEnded)
+ {
+ _ptrCallback->PlayFileEnded(_id);
+ }
+ }
+ return 0;
+}
+
+int32_t MediaFileImpl::StartPlayingAudioFile(
+ const char* fileName,
+ const uint32_t notificationTimeMs,
+ const bool loop,
+ const FileFormats format,
+ const CodecInst* codecInst,
+ const uint32_t startPointMs,
+ const uint32_t stopPointMs)
+{
+ if(!ValidFileName(fileName))
+ {
+ return -1;
+ }
+ if(!ValidFileFormat(format,codecInst))
+ {
+ return -1;
+ }
+ if(!ValidFilePositions(startPointMs,stopPointMs))
+ {
+ return -1;
+ }
+
+ // Check that the file will play longer than notificationTimeMs ms.
+ if((startPointMs && stopPointMs && !loop) &&
+ (notificationTimeMs > (stopPointMs - startPointMs)))
+ {
+ WEBRTC_TRACE(
+ kTraceError,
+ kTraceFile,
+ _id,
+ "specified notification time is longer than amount of ms that will\
+ be played");
+ return -1;
+ }
+
+ FileWrapper* inputStream = FileWrapper::Create();
+ if(inputStream == NULL)
+ {
+ WEBRTC_TRACE(kTraceMemory, kTraceFile, _id,
+ "Failed to allocate input stream for file %s", fileName);
+ return -1;
+ }
+
+ if(inputStream->OpenFile(fileName, true, loop) != 0)
+ {
+ delete inputStream;
+ WEBRTC_TRACE(kTraceError, kTraceFile, _id,
+ "Could not open input file %s", fileName);
+ return -1;
+ }
+
+ if(StartPlayingStream(*inputStream, loop, notificationTimeMs,
+ format, codecInst, startPointMs, stopPointMs) == -1)
+ {
+ inputStream->CloseFile();
+ delete inputStream;
+ return -1;
+ }
+
+ CriticalSectionScoped lock(_crit);
+ _openFile = true;
+ strncpy(_fileName, fileName, sizeof(_fileName));
+ _fileName[sizeof(_fileName) - 1] = '\0';
+ return 0;
+}
+
+int32_t MediaFileImpl::StartPlayingAudioStream(
+ InStream& stream,
+ const uint32_t notificationTimeMs,
+ const FileFormats format,
+ const CodecInst* codecInst,
+ const uint32_t startPointMs,
+ const uint32_t stopPointMs)
+{
+ return StartPlayingStream(stream, false, notificationTimeMs, format,
+ codecInst, startPointMs, stopPointMs);
+}
+
+int32_t MediaFileImpl::StartPlayingStream(
+ InStream& stream,
+ bool loop,
+ const uint32_t notificationTimeMs,
+ const FileFormats format,
+ const CodecInst* codecInst,
+ const uint32_t startPointMs,
+ const uint32_t stopPointMs)
+{
+ if(!ValidFileFormat(format,codecInst))
+ {
+ return -1;
+ }
+
+ if(!ValidFilePositions(startPointMs,stopPointMs))
+ {
+ return -1;
+ }
+
+ CriticalSectionScoped lock(_crit);
+ if(_playingActive || _recordingActive)
+ {
+ WEBRTC_TRACE(
+ kTraceError,
+ kTraceFile,
+ _id,
+ "StartPlaying called, but already playing or recording file %s",
+ (_fileName[0] == '\0') ? "(name not set)" : _fileName);
+ return -1;
+ }
+
+ if(_ptrFileUtilityObj != NULL)
+ {
+ WEBRTC_TRACE(kTraceError,
+ kTraceFile,
+ _id,
+ "StartPlaying called, but FileUtilityObj already exists!");
+ StopPlaying();
+ return -1;
+ }
+
+ _ptrFileUtilityObj = new ModuleFileUtility(_id);
+ if(_ptrFileUtilityObj == NULL)
+ {
+ WEBRTC_TRACE(kTraceMemory, kTraceFile, _id,
+ "Failed to create FileUtilityObj!");
+ return -1;
+ }
+
+ switch(format)
+ {
+ case kFileFormatWavFile:
+ {
+ if(_ptrFileUtilityObj->InitWavReading(stream, startPointMs,
+ stopPointMs) == -1)
+ {
+ WEBRTC_TRACE(kTraceError, kTraceFile, _id,
+ "Not a valid WAV file!");
+ StopPlaying();
+ return -1;
+ }
+ _fileFormat = kFileFormatWavFile;
+ break;
+ }
+ case kFileFormatCompressedFile:
+ {
+ if(_ptrFileUtilityObj->InitCompressedReading(stream, startPointMs,
+ stopPointMs) == -1)
+ {
+ WEBRTC_TRACE(kTraceError, kTraceFile, _id,
+ "Not a valid Compressed file!");
+ StopPlaying();
+ return -1;
+ }
+ _fileFormat = kFileFormatCompressedFile;
+ break;
+ }
+ case kFileFormatPcm8kHzFile:
+ case kFileFormatPcm16kHzFile:
+ case kFileFormatPcm32kHzFile:
+ {
+ // ValidFileFormat() called in the beginneing of this function
+ // prevents codecInst from being NULL here.
+ assert(codecInst != NULL);
+ if(!ValidFrequency(codecInst->plfreq) ||
+ _ptrFileUtilityObj->InitPCMReading(stream, startPointMs,
+ stopPointMs,
+ codecInst->plfreq) == -1)
+ {
+ WEBRTC_TRACE(kTraceError, kTraceFile, _id,
+ "Not a valid raw 8 or 16 KHz PCM file!");
+ StopPlaying();
+ return -1;
+ }
+
+ _fileFormat = format;
+ break;
+ }
+ case kFileFormatPreencodedFile:
+ {
+ // ValidFileFormat() called in the beginneing of this function
+ // prevents codecInst from being NULL here.
+ assert(codecInst != NULL);
+ if(_ptrFileUtilityObj->InitPreEncodedReading(stream, *codecInst) ==
+ -1)
+ {
+ WEBRTC_TRACE(kTraceError, kTraceFile, _id,
+ "Not a valid PreEncoded file!");
+ StopPlaying();
+ return -1;
+ }
+
+ _fileFormat = kFileFormatPreencodedFile;
+ break;
+ }
+ default:
+ {
+ WEBRTC_TRACE(kTraceError, kTraceFile, _id,
+ "Invalid file format: %d", format);
+ assert(false);
+ break;
+ }
+ }
+ if(_ptrFileUtilityObj->codec_info(codec_info_) == -1)
+ {
+ WEBRTC_TRACE(kTraceError, kTraceFile, _id,
+ "Failed to retrieve codec info!");
+ StopPlaying();
+ return -1;
+ }
+
+ _isStereo = (codec_info_.channels == 2);
+ if(_isStereo && (_fileFormat != kFileFormatWavFile))
+ {
+ WEBRTC_TRACE(kTraceWarning, kTraceFile, _id,
+ "Stereo is only allowed for WAV files");
+ StopPlaying();
+ return -1;
+ }
+ _playingActive = true;
+ _playoutPositionMs = _ptrFileUtilityObj->PlayoutPositionMs();
+ _ptrInStream = &stream;
+ _notificationMs = notificationTimeMs;
+
+ return 0;
+}
+
+int32_t MediaFileImpl::StopPlaying()
+{
+
+ CriticalSectionScoped lock(_crit);
+ _isStereo = false;
+ if(_ptrFileUtilityObj)
+ {
+ delete _ptrFileUtilityObj;
+ _ptrFileUtilityObj = NULL;
+ }
+ if(_ptrInStream)
+ {
+ // If MediaFileImpl opened the InStream it must be reclaimed here.
+ if(_openFile)
+ {
+ delete _ptrInStream;
+ _openFile = false;
+ }
+ _ptrInStream = NULL;
+ }
+
+ codec_info_.pltype = 0;
+ codec_info_.plname[0] = '\0';
+
+ if(!_playingActive)
+ {
+ WEBRTC_TRACE(kTraceWarning, kTraceFile, _id,
+ "playing is not active!");
+ return -1;
+ }
+
+ _playingActive = false;
+ return 0;
+}
+
+bool MediaFileImpl::IsPlaying()
+{
+ WEBRTC_TRACE(kTraceStream, kTraceFile, _id, "MediaFileImpl::IsPlaying()");
+ CriticalSectionScoped lock(_crit);
+ return _playingActive;
+}
+
+int32_t MediaFileImpl::IncomingAudioData(
+ const int8_t* buffer,
+ const size_t bufferLengthInBytes)
+{
+ WEBRTC_TRACE(kTraceStream, kTraceFile, _id,
+ "MediaFile::IncomingData(buffer= 0x%x, bufLen= %" PRIuS,
+ buffer, bufferLengthInBytes);
+
+ if(buffer == NULL || bufferLengthInBytes == 0)
+ {
+ WEBRTC_TRACE(kTraceError, kTraceFile, _id,
+ "Buffer pointer or length is NULL!");
+ return -1;
+ }
+
+ bool recordingEnded = false;
+ uint32_t callbackNotifyMs = 0;
+ {
+ CriticalSectionScoped lock(_crit);
+
+ if(!_recordingActive)
+ {
+ WEBRTC_TRACE(kTraceWarning, kTraceFile, _id,
+ "Not currently recording!");
+ return -1;
+ }
+ if(_ptrOutStream == NULL)
+ {
+ WEBRTC_TRACE(kTraceError, kTraceFile, _id,
+ "Recording is active, but output stream is NULL!");
+ assert(false);
+ return -1;
+ }
+
+ int32_t bytesWritten = 0;
+ uint32_t samplesWritten = codec_info_.pacsize;
+ if(_ptrFileUtilityObj)
+ {
+ switch(_fileFormat)
+ {
+ case kFileFormatPcm8kHzFile:
+ case kFileFormatPcm16kHzFile:
+ case kFileFormatPcm32kHzFile:
+ bytesWritten = _ptrFileUtilityObj->WritePCMData(
+ *_ptrOutStream,
+ buffer,
+ bufferLengthInBytes);
+
+ // Sample size is 2 bytes.
+ if(bytesWritten > 0)
+ {
+ samplesWritten = bytesWritten/sizeof(int16_t);
+ }
+ break;
+ case kFileFormatCompressedFile:
+ bytesWritten = _ptrFileUtilityObj->WriteCompressedData(
+ *_ptrOutStream, buffer, bufferLengthInBytes);
+ break;
+ case kFileFormatWavFile:
+ bytesWritten = _ptrFileUtilityObj->WriteWavData(
+ *_ptrOutStream,
+ buffer,
+ bufferLengthInBytes);
+ if(bytesWritten > 0 && STR_NCASE_CMP(codec_info_.plname,
+ "L16", 4) == 0)
+ {
+ // Sample size is 2 bytes.
+ samplesWritten = bytesWritten/sizeof(int16_t);
+ }
+ break;
+ case kFileFormatPreencodedFile:
+ bytesWritten = _ptrFileUtilityObj->WritePreEncodedData(
+ *_ptrOutStream, buffer, bufferLengthInBytes);
+ break;
+ default:
+ WEBRTC_TRACE(kTraceError, kTraceFile, _id,
+ "Invalid file format: %d", _fileFormat);
+ assert(false);
+ break;
+ }
+ } else {
+ // TODO (hellner): quick look at the code makes me think that this
+ // code is never executed. Remove?
+ if(_ptrOutStream)
+ {
+ if(_ptrOutStream->Write(buffer, bufferLengthInBytes))
+ {
+ bytesWritten = static_cast<int32_t>(bufferLengthInBytes);
+ }
+ }
+ }
+
+ _recordDurationMs += samplesWritten / (codec_info_.plfreq / 1000);
+
+ // Check if it's time for RecordNotification(..).
+ if(_notificationMs)
+ {
+ if(_recordDurationMs >= _notificationMs)
+ {
+ _notificationMs = 0;
+ callbackNotifyMs = _recordDurationMs;
+ }
+ }
+ if(bytesWritten < (int32_t)bufferLengthInBytes)
+ {
+ WEBRTC_TRACE(kTraceWarning, kTraceFile, _id,
+ "Failed to write all requested bytes!");
+ StopRecording();
+ recordingEnded = true;
+ }
+ }
+
+ // Only _callbackCrit may and should be taken when making callbacks.
+ CriticalSectionScoped lock(_callbackCrit);
+ if(_ptrCallback)
+ {
+ if(callbackNotifyMs)
+ {
+ _ptrCallback->RecordNotification(_id, callbackNotifyMs);
+ }
+ if(recordingEnded)
+ {
+ _ptrCallback->RecordFileEnded(_id);
+ return -1;
+ }
+ }
+ return 0;
+}
+
+int32_t MediaFileImpl::StartRecordingAudioFile(
+ const char* fileName,
+ const FileFormats format,
+ const CodecInst& codecInst,
+ const uint32_t notificationTimeMs,
+ const uint32_t maxSizeBytes)
+{
+ if(!ValidFileName(fileName))
+ {
+ return -1;
+ }
+ if(!ValidFileFormat(format,&codecInst))
+ {
+ return -1;
+ }
+
+ FileWrapper* outputStream = FileWrapper::Create();
+ if(outputStream == NULL)
+ {
+ WEBRTC_TRACE(kTraceMemory, kTraceFile, _id,
+ "Failed to allocate memory for output stream");
+ return -1;
+ }
+
+ if(outputStream->OpenFile(fileName, false) != 0)
+ {
+ delete outputStream;
+ WEBRTC_TRACE(kTraceError, kTraceFile, _id,
+ "Could not open output file '%s' for writing!",
+ fileName);
+ return -1;
+ }
+
+ if(maxSizeBytes)
+ {
+ outputStream->SetMaxFileSize(maxSizeBytes);
+ }
+
+ if(StartRecordingAudioStream(*outputStream, format, codecInst,
+ notificationTimeMs) == -1)
+ {
+ outputStream->CloseFile();
+ delete outputStream;
+ return -1;
+ }
+
+ CriticalSectionScoped lock(_crit);
+ _openFile = true;
+ strncpy(_fileName, fileName, sizeof(_fileName));
+ _fileName[sizeof(_fileName) - 1] = '\0';
+ return 0;
+}
+
+int32_t MediaFileImpl::StartRecordingAudioStream(
+ OutStream& stream,
+ const FileFormats format,
+ const CodecInst& codecInst,
+ const uint32_t notificationTimeMs)
+{
+ // Check codec info
+ if(!ValidFileFormat(format,&codecInst))
+ {
+ return -1;
+ }
+
+ CriticalSectionScoped lock(_crit);
+ if(_recordingActive || _playingActive)
+ {
+ WEBRTC_TRACE(
+ kTraceError,
+ kTraceFile,
+ _id,
+ "StartRecording called, but already recording or playing file %s!",
+ _fileName);
+ return -1;
+ }
+
+ if(_ptrFileUtilityObj != NULL)
+ {
+ WEBRTC_TRACE(
+ kTraceError,
+ kTraceFile,
+ _id,
+ "StartRecording called, but fileUtilityObj already exists!");
+ StopRecording();
+ return -1;
+ }
+
+ _ptrFileUtilityObj = new ModuleFileUtility(_id);
+ if(_ptrFileUtilityObj == NULL)
+ {
+ WEBRTC_TRACE(kTraceMemory, kTraceFile, _id,
+ "Cannot allocate fileUtilityObj!");
+ return -1;
+ }
+
+ CodecInst tmpAudioCodec;
+ memcpy(&tmpAudioCodec, &codecInst, sizeof(CodecInst));
+ switch(format)
+ {
+ case kFileFormatWavFile:
+ {
+ if(_ptrFileUtilityObj->InitWavWriting(stream, codecInst) == -1)
+ {
+ WEBRTC_TRACE(kTraceError, kTraceFile, _id,
+ "Failed to initialize WAV file!");
+ delete _ptrFileUtilityObj;
+ _ptrFileUtilityObj = NULL;
+ return -1;
+ }
+ _fileFormat = kFileFormatWavFile;
+ break;
+ }
+ case kFileFormatCompressedFile:
+ {
+ // Write compression codec name at beginning of file
+ if(_ptrFileUtilityObj->InitCompressedWriting(stream, codecInst) ==
+ -1)
+ {
+ WEBRTC_TRACE(kTraceError, kTraceFile, _id,
+ "Failed to initialize Compressed file!");
+ delete _ptrFileUtilityObj;
+ _ptrFileUtilityObj = NULL;
+ return -1;
+ }
+ _fileFormat = kFileFormatCompressedFile;
+ break;
+ }
+ case kFileFormatPcm8kHzFile:
+ case kFileFormatPcm16kHzFile:
+ {
+ if(!ValidFrequency(codecInst.plfreq) ||
+ _ptrFileUtilityObj->InitPCMWriting(stream, codecInst.plfreq) ==
+ -1)
+ {
+ WEBRTC_TRACE(kTraceError, kTraceFile, _id,
+ "Failed to initialize 8 or 16KHz PCM file!");
+ delete _ptrFileUtilityObj;
+ _ptrFileUtilityObj = NULL;
+ return -1;
+ }
+ _fileFormat = format;
+ break;
+ }
+ case kFileFormatPreencodedFile:
+ {
+ if(_ptrFileUtilityObj->InitPreEncodedWriting(stream, codecInst) ==
+ -1)
+ {
+ WEBRTC_TRACE(kTraceError, kTraceFile, _id,
+ "Failed to initialize Pre-Encoded file!");
+ delete _ptrFileUtilityObj;
+ _ptrFileUtilityObj = NULL;
+ return -1;
+ }
+
+ _fileFormat = kFileFormatPreencodedFile;
+ break;
+ }
+ default:
+ {
+ WEBRTC_TRACE(kTraceError, kTraceFile, _id,
+ "Invalid file format %d specified!", format);
+ delete _ptrFileUtilityObj;
+ _ptrFileUtilityObj = NULL;
+ return -1;
+ }
+ }
+ _isStereo = (tmpAudioCodec.channels == 2);
+ if(_isStereo)
+ {
+ if(_fileFormat != kFileFormatWavFile)
+ {
+ WEBRTC_TRACE(kTraceWarning, kTraceFile, _id,
+ "Stereo is only allowed for WAV files");
+ StopRecording();
+ return -1;
+ }
+ if((STR_NCASE_CMP(tmpAudioCodec.plname, "L16", 4) != 0) &&
+ (STR_NCASE_CMP(tmpAudioCodec.plname, "PCMU", 5) != 0) &&
+ (STR_NCASE_CMP(tmpAudioCodec.plname, "PCMA", 5) != 0))
+ {
+ WEBRTC_TRACE(
+ kTraceWarning,
+ kTraceFile,
+ _id,
+ "Stereo is only allowed for codec PCMU, PCMA and L16 ");
+ StopRecording();
+ return -1;
+ }
+ }
+ memcpy(&codec_info_, &tmpAudioCodec, sizeof(CodecInst));
+ _recordingActive = true;
+ _ptrOutStream = &stream;
+ _notificationMs = notificationTimeMs;
+ _recordDurationMs = 0;
+ return 0;
+}
+
+int32_t MediaFileImpl::StopRecording()
+{
+
+ CriticalSectionScoped lock(_crit);
+ if(!_recordingActive)
+ {
+ WEBRTC_TRACE(kTraceWarning, kTraceFile, _id,
+ "recording is not active!");
+ return -1;
+ }
+
+ _isStereo = false;
+
+ if(_ptrFileUtilityObj != NULL)
+ {
+ // Both AVI and WAV header has to be updated before closing the stream
+ // because they contain size information.
+ if((_fileFormat == kFileFormatWavFile) &&
+ (_ptrOutStream != NULL))
+ {
+ _ptrFileUtilityObj->UpdateWavHeader(*_ptrOutStream);
+ }
+ delete _ptrFileUtilityObj;
+ _ptrFileUtilityObj = NULL;
+ }
+
+ if(_ptrOutStream != NULL)
+ {
+ // If MediaFileImpl opened the OutStream it must be reclaimed here.
+ if(_openFile)
+ {
+ delete _ptrOutStream;
+ _openFile = false;
+ }
+ _ptrOutStream = NULL;
+ }
+
+ _recordingActive = false;
+ codec_info_.pltype = 0;
+ codec_info_.plname[0] = '\0';
+
+ return 0;
+}
+
+bool MediaFileImpl::IsRecording()
+{
+ WEBRTC_TRACE(kTraceStream, kTraceFile, _id, "MediaFileImpl::IsRecording()");
+ CriticalSectionScoped lock(_crit);
+ return _recordingActive;
+}
+
+int32_t MediaFileImpl::RecordDurationMs(uint32_t& durationMs)
+{
+
+ CriticalSectionScoped lock(_crit);
+ if(!_recordingActive)
+ {
+ durationMs = 0;
+ return -1;
+ }
+ durationMs = _recordDurationMs;
+ return 0;
+}
+
+bool MediaFileImpl::IsStereo()
+{
+ WEBRTC_TRACE(kTraceStream, kTraceFile, _id, "MediaFileImpl::IsStereo()");
+ CriticalSectionScoped lock(_crit);
+ return _isStereo;
+}
+
+int32_t MediaFileImpl::SetModuleFileCallback(FileCallback* callback)
+{
+
+ CriticalSectionScoped lock(_callbackCrit);
+
+ _ptrCallback = callback;
+ return 0;
+}
+
+int32_t MediaFileImpl::FileDurationMs(const char* fileName,
+ uint32_t& durationMs,
+ const FileFormats format,
+ const uint32_t freqInHz)
+{
+
+ if(!ValidFileName(fileName))
+ {
+ return -1;
+ }
+ if(!ValidFrequency(freqInHz))
+ {
+ return -1;
+ }
+
+ ModuleFileUtility* utilityObj = new ModuleFileUtility(_id);
+ if(utilityObj == NULL)
+ {
+ WEBRTC_TRACE(kTraceError, kTraceFile, _id,
+ "failed to allocate utility object!");
+ return -1;
+ }
+
+ const int32_t duration = utilityObj->FileDurationMs(fileName, format,
+ freqInHz);
+ delete utilityObj;
+ if(duration == -1)
+ {
+ durationMs = 0;
+ return -1;
+ }
+
+ durationMs = duration;
+ return 0;
+}
+
+int32_t MediaFileImpl::PlayoutPositionMs(uint32_t& positionMs) const
+{
+ CriticalSectionScoped lock(_crit);
+ if(!_playingActive)
+ {
+ positionMs = 0;
+ return -1;
+ }
+ positionMs = _playoutPositionMs;
+ return 0;
+}
+
+int32_t MediaFileImpl::codec_info(CodecInst& codecInst) const
+{
+ CriticalSectionScoped lock(_crit);
+ if(!_playingActive && !_recordingActive)
+ {
+ WEBRTC_TRACE(kTraceError, kTraceFile, _id,
+ "Neither playout nor recording has been initialized!");
+ return -1;
+ }
+ if (codec_info_.pltype == 0 && codec_info_.plname[0] == '\0')
+ {
+ WEBRTC_TRACE(kTraceError, kTraceFile, _id,
+ "The CodecInst for %s is unknown!",
+ _playingActive ? "Playback" : "Recording");
+ return -1;
+ }
+ memcpy(&codecInst,&codec_info_,sizeof(CodecInst));
+ return 0;
+}
+
+bool MediaFileImpl::ValidFileFormat(const FileFormats format,
+ const CodecInst* codecInst)
+{
+ if(codecInst == NULL)
+ {
+ if(format == kFileFormatPreencodedFile ||
+ format == kFileFormatPcm8kHzFile ||
+ format == kFileFormatPcm16kHzFile ||
+ format == kFileFormatPcm32kHzFile)
+ {
+ WEBRTC_TRACE(kTraceError, kTraceFile, -1,
+ "Codec info required for file format specified!");
+ return false;
+ }
+ }
+ return true;
+}
+
+bool MediaFileImpl::ValidFileName(const char* fileName)
+{
+ if((fileName == NULL) ||(fileName[0] == '\0'))
+ {
+ WEBRTC_TRACE(kTraceError, kTraceFile, -1, "FileName not specified!");
+ return false;
+ }
+ return true;
+}
+
+
+bool MediaFileImpl::ValidFilePositions(const uint32_t startPointMs,
+ const uint32_t stopPointMs)
+{
+ if(startPointMs == 0 && stopPointMs == 0) // Default values
+ {
+ return true;
+ }
+ if(stopPointMs &&(startPointMs >= stopPointMs))
+ {
+ WEBRTC_TRACE(kTraceError, kTraceFile, -1,
+ "startPointMs must be less than stopPointMs!");
+ return false;
+ }
+ if(stopPointMs &&((stopPointMs - startPointMs) < 20))
+ {
+ WEBRTC_TRACE(kTraceError, kTraceFile, -1,
+ "minimum play duration for files is 20 ms!");
+ return false;
+ }
+ return true;
+}
+
+bool MediaFileImpl::ValidFrequency(const uint32_t frequency)
+{
+ if((frequency == 8000) || (frequency == 16000)|| (frequency == 32000))
+ {
+ return true;
+ }
+ WEBRTC_TRACE(kTraceError, kTraceFile, -1,
+ "Frequency should be 8000, 16000 or 32000 (Hz)");
+ return false;
+}
+} // namespace webrtc
diff --git a/webrtc/modules/media_file/media_file_impl.h b/webrtc/modules/media_file/media_file_impl.h
new file mode 100644
index 0000000000..c23f514c75
--- /dev/null
+++ b/webrtc/modules/media_file/media_file_impl.h
@@ -0,0 +1,148 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_MEDIA_FILE_MEDIA_FILE_IMPL_H_
+#define WEBRTC_MODULES_MEDIA_FILE_MEDIA_FILE_IMPL_H_
+
+#include "webrtc/common_types.h"
+#include "webrtc/modules/include/module_common_types.h"
+#include "webrtc/modules/media_file/media_file.h"
+#include "webrtc/modules/media_file/media_file_defines.h"
+#include "webrtc/modules/media_file/media_file_utility.h"
+#include "webrtc/system_wrappers/include/critical_section_wrapper.h"
+
+namespace webrtc {
+class MediaFileImpl : public MediaFile
+{
+
+public:
+ MediaFileImpl(const int32_t id);
+ ~MediaFileImpl();
+
+ int32_t Process() override;
+ int64_t TimeUntilNextProcess() override;
+
+ // MediaFile functions
+ int32_t PlayoutAudioData(int8_t* audioBuffer,
+ size_t& dataLengthInBytes) override;
+
+ int32_t PlayoutStereoData(int8_t* audioBufferLeft,
+ int8_t* audioBufferRight,
+ size_t& dataLengthInBytes) override;
+
+ int32_t StartPlayingAudioFile(
+ const char* fileName,
+ const uint32_t notificationTimeMs = 0,
+ const bool loop = false,
+ const FileFormats format = kFileFormatPcm16kHzFile,
+ const CodecInst* codecInst = NULL,
+ const uint32_t startPointMs = 0,
+ const uint32_t stopPointMs = 0) override;
+
+ int32_t StartPlayingAudioStream(
+ InStream& stream,
+ const uint32_t notificationTimeMs = 0,
+ const FileFormats format = kFileFormatPcm16kHzFile,
+ const CodecInst* codecInst = NULL,
+ const uint32_t startPointMs = 0,
+ const uint32_t stopPointMs = 0) override;
+
+ int32_t StopPlaying() override;
+
+ bool IsPlaying() override;
+
+ int32_t PlayoutPositionMs(uint32_t& positionMs) const override;
+
+ int32_t IncomingAudioData(const int8_t* audioBuffer,
+ const size_t bufferLength) override;
+
+ int32_t StartRecordingAudioFile(const char* fileName,
+ const FileFormats format,
+ const CodecInst& codecInst,
+ const uint32_t notificationTimeMs = 0,
+ const uint32_t maxSizeBytes = 0) override;
+
+ int32_t StartRecordingAudioStream(
+ OutStream& stream,
+ const FileFormats format,
+ const CodecInst& codecInst,
+ const uint32_t notificationTimeMs = 0) override;
+
+ int32_t StopRecording() override;
+
+ bool IsRecording() override;
+
+ int32_t RecordDurationMs(uint32_t& durationMs) override;
+
+ bool IsStereo() override;
+
+ int32_t SetModuleFileCallback(FileCallback* callback) override;
+
+ int32_t FileDurationMs(const char* fileName,
+ uint32_t& durationMs,
+ const FileFormats format,
+ const uint32_t freqInHz = 16000) override;
+
+ int32_t codec_info(CodecInst& codecInst) const override;
+
+private:
+ // Returns true if the combination of format and codecInst is valid.
+ static bool ValidFileFormat(const FileFormats format,
+ const CodecInst* codecInst);
+
+
+ // Returns true if the filename is valid
+ static bool ValidFileName(const char* fileName);
+
+ // Returns true if the combination of startPointMs and stopPointMs is valid.
+ static bool ValidFilePositions(const uint32_t startPointMs,
+ const uint32_t stopPointMs);
+
+ // Returns true if frequencyInHz is a supported frequency.
+ static bool ValidFrequency(const uint32_t frequencyInHz);
+
+ void HandlePlayCallbacks(int32_t bytesRead);
+
+ int32_t StartPlayingStream(
+ InStream& stream,
+ bool loop,
+ const uint32_t notificationTimeMs,
+ const FileFormats format,
+ const CodecInst* codecInst,
+ const uint32_t startPointMs,
+ const uint32_t stopPointMs);
+
+ int32_t _id;
+ CriticalSectionWrapper* _crit;
+ CriticalSectionWrapper* _callbackCrit;
+
+ ModuleFileUtility* _ptrFileUtilityObj;
+ CodecInst codec_info_;
+
+ InStream* _ptrInStream;
+ OutStream* _ptrOutStream;
+
+ FileFormats _fileFormat;
+ uint32_t _recordDurationMs;
+ uint32_t _playoutPositionMs;
+ uint32_t _notificationMs;
+
+ bool _playingActive;
+ bool _recordingActive;
+ bool _isStereo;
+ bool _openFile;
+
+ char _fileName[512];
+
+ FileCallback* _ptrCallback;
+};
+} // namespace webrtc
+
+#endif // WEBRTC_MODULES_MEDIA_FILE_MEDIA_FILE_IMPL_H_
diff --git a/webrtc/modules/media_file/media_file_unittest.cc b/webrtc/modules/media_file/media_file_unittest.cc
new file mode 100644
index 0000000000..6541a8fb7c
--- /dev/null
+++ b/webrtc/modules/media_file/media_file_unittest.cc
@@ -0,0 +1,106 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "testing/gtest/include/gtest/gtest.h"
+#include "webrtc/modules/media_file/media_file.h"
+#include "webrtc/system_wrappers/include/sleep.h"
+#include "webrtc/test/testsupport/fileutils.h"
+
+class MediaFileTest : public testing::Test {
+ protected:
+ void SetUp() {
+ // Use number 0 as the the identifier and pass to CreateMediaFile.
+ media_file_ = webrtc::MediaFile::CreateMediaFile(0);
+ ASSERT_TRUE(media_file_ != NULL);
+ }
+ void TearDown() {
+ webrtc::MediaFile::DestroyMediaFile(media_file_);
+ media_file_ = NULL;
+ }
+ webrtc::MediaFile* media_file_;
+};
+
+#if defined(WEBRTC_ANDROID) || defined(WEBRTC_IOS)
+#define MAYBE_StartPlayingAudioFileWithoutError \
+ DISABLED_StartPlayingAudioFileWithoutError
+#else
+#define MAYBE_StartPlayingAudioFileWithoutError \
+ StartPlayingAudioFileWithoutError
+#endif
+TEST_F(MediaFileTest, MAYBE_StartPlayingAudioFileWithoutError) {
+ // TODO(leozwang): Use hard coded filename here, we want to
+ // loop through all audio files in future
+ const std::string audio_file = webrtc::test::ProjectRootPath() +
+ "data/voice_engine/audio_tiny48.wav";
+ ASSERT_EQ(0, media_file_->StartPlayingAudioFile(
+ audio_file.c_str(),
+ 0,
+ false,
+ webrtc::kFileFormatWavFile));
+
+ ASSERT_EQ(true, media_file_->IsPlaying());
+
+ webrtc::SleepMs(1);
+
+ ASSERT_EQ(0, media_file_->StopPlaying());
+}
+
+#if defined(WEBRTC_IOS)
+#define MAYBE_WriteWavFile DISABLED_WriteWavFile
+#else
+#define MAYBE_WriteWavFile WriteWavFile
+#endif
+TEST_F(MediaFileTest, MAYBE_WriteWavFile) {
+ // Write file.
+ static const size_t kHeaderSize = 44;
+ static const size_t kPayloadSize = 320;
+ webrtc::CodecInst codec = {
+ 0, "L16", 16000, static_cast<int>(kPayloadSize), 1
+ };
+ std::string outfile = webrtc::test::OutputPath() + "wavtest.wav";
+ ASSERT_EQ(0,
+ media_file_->StartRecordingAudioFile(
+ outfile.c_str(), webrtc::kFileFormatWavFile, codec));
+ static const int8_t kFakeData[kPayloadSize] = {0};
+ ASSERT_EQ(0, media_file_->IncomingAudioData(kFakeData, kPayloadSize));
+ ASSERT_EQ(0, media_file_->StopRecording());
+
+ // Check the file we just wrote.
+ static const uint8_t kExpectedHeader[] = {
+ 'R', 'I', 'F', 'F',
+ 0x64, 0x1, 0, 0, // size of whole file - 8: 320 + 44 - 8
+ 'W', 'A', 'V', 'E',
+ 'f', 'm', 't', ' ',
+ 0x10, 0, 0, 0, // size of fmt block - 8: 24 - 8
+ 0x1, 0, // format: PCM (1)
+ 0x1, 0, // channels: 1
+ 0x80, 0x3e, 0, 0, // sample rate: 16000
+ 0, 0x7d, 0, 0, // byte rate: 2 * 16000
+ 0x2, 0, // block align: NumChannels * BytesPerSample
+ 0x10, 0, // bits per sample: 2 * 8
+ 'd', 'a', 't', 'a',
+ 0x40, 0x1, 0, 0, // size of payload: 320
+ };
+ static_assert(sizeof(kExpectedHeader) == kHeaderSize, "header size");
+
+ EXPECT_EQ(kHeaderSize + kPayloadSize, webrtc::test::GetFileSize(outfile));
+ FILE* f = fopen(outfile.c_str(), "rb");
+ ASSERT_TRUE(f);
+
+ uint8_t header[kHeaderSize];
+ ASSERT_EQ(1u, fread(header, kHeaderSize, 1, f));
+ EXPECT_EQ(0, memcmp(kExpectedHeader, header, kHeaderSize));
+
+ uint8_t payload[kPayloadSize];
+ ASSERT_EQ(1u, fread(payload, kPayloadSize, 1, f));
+ EXPECT_EQ(0, memcmp(kFakeData, payload, kPayloadSize));
+
+ EXPECT_EQ(0, fclose(f));
+}
diff --git a/webrtc/modules/media_file/media_file_utility.cc b/webrtc/modules/media_file/media_file_utility.cc
new file mode 100644
index 0000000000..8a815cc25d
--- /dev/null
+++ b/webrtc/modules/media_file/media_file_utility.cc
@@ -0,0 +1,1559 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/modules/media_file/media_file_utility.h"
+
+#include <assert.h>
+#include <sys/stat.h>
+#include <sys/types.h>
+#include <limits>
+
+#include "webrtc/base/format_macros.h"
+#include "webrtc/common_audio/wav_header.h"
+#include "webrtc/common_types.h"
+#include "webrtc/engine_configurations.h"
+#include "webrtc/modules/include/module_common_types.h"
+#include "webrtc/system_wrappers/include/file_wrapper.h"
+#include "webrtc/system_wrappers/include/trace.h"
+
+namespace {
+
+// First 16 bytes the WAVE header. ckID should be "RIFF", wave_ckID should be
+// "WAVE" and ckSize is the chunk size (4 + n)
+struct WAVE_RIFF_header
+{
+ int8_t ckID[4];
+ int32_t ckSize;
+ int8_t wave_ckID[4];
+};
+
+// First 8 byte of the format chunk. fmt_ckID should be "fmt ". fmt_ckSize is
+// the chunk size (16, 18 or 40 byte)
+struct WAVE_CHUNK_header
+{
+ int8_t fmt_ckID[4];
+ uint32_t fmt_ckSize;
+};
+} // unnamed namespace
+
+namespace webrtc {
+ModuleFileUtility::ModuleFileUtility(const int32_t id)
+ : _wavFormatObj(),
+ _dataSize(0),
+ _readSizeBytes(0),
+ _id(id),
+ _stopPointInMs(0),
+ _startPointInMs(0),
+ _playoutPositionMs(0),
+ _bytesWritten(0),
+ codec_info_(),
+ _codecId(kCodecNoCodec),
+ _bytesPerSample(0),
+ _readPos(0),
+ _reading(false),
+ _writing(false),
+ _tempData() {
+ WEBRTC_TRACE(kTraceMemory, kTraceFile, _id,
+ "ModuleFileUtility::ModuleFileUtility()");
+ memset(&codec_info_,0,sizeof(CodecInst));
+ codec_info_.pltype = -1;
+}
+
+ModuleFileUtility::~ModuleFileUtility()
+{
+ WEBRTC_TRACE(kTraceMemory, kTraceFile, _id,
+ "ModuleFileUtility::~ModuleFileUtility()");
+}
+
+int32_t ModuleFileUtility::ReadWavHeader(InStream& wav)
+{
+ WAVE_RIFF_header RIFFheaderObj;
+ WAVE_CHUNK_header CHUNKheaderObj;
+ // TODO (hellner): tmpStr and tmpStr2 seems unnecessary here.
+ char tmpStr[6] = "FOUR";
+ unsigned char tmpStr2[4];
+ size_t i;
+ bool dataFound = false;
+ bool fmtFound = false;
+ int8_t dummyRead;
+
+
+ _dataSize = 0;
+ int len = wav.Read(&RIFFheaderObj, sizeof(WAVE_RIFF_header));
+ if (len != static_cast<int>(sizeof(WAVE_RIFF_header)))
+ {
+ WEBRTC_TRACE(kTraceError, kTraceFile, _id,
+ "Not a wave file (too short)");
+ return -1;
+ }
+
+ for (i = 0; i < 4; i++)
+ {
+ tmpStr[i] = RIFFheaderObj.ckID[i];
+ }
+ if(strcmp(tmpStr, "RIFF") != 0)
+ {
+ WEBRTC_TRACE(kTraceError, kTraceFile, _id,
+ "Not a wave file (does not have RIFF)");
+ return -1;
+ }
+ for (i = 0; i < 4; i++)
+ {
+ tmpStr[i] = RIFFheaderObj.wave_ckID[i];
+ }
+ if(strcmp(tmpStr, "WAVE") != 0)
+ {
+ WEBRTC_TRACE(kTraceError, kTraceFile, _id,
+ "Not a wave file (does not have WAVE)");
+ return -1;
+ }
+
+ len = wav.Read(&CHUNKheaderObj, sizeof(WAVE_CHUNK_header));
+
+ // WAVE files are stored in little endian byte order. Make sure that the
+ // data can be read on big endian as well.
+ // TODO (hellner): little endian to system byte order should be done in
+ // in a subroutine.
+ memcpy(tmpStr2, &CHUNKheaderObj.fmt_ckSize, 4);
+ CHUNKheaderObj.fmt_ckSize =
+ (uint32_t)tmpStr2[0] + (((uint32_t)tmpStr2[1]) << 8) +
+ (((uint32_t)tmpStr2[2]) << 16) + (((uint32_t)tmpStr2[3]) << 24);
+
+ memcpy(tmpStr, CHUNKheaderObj.fmt_ckID, 4);
+
+ while ((len == static_cast<int>(sizeof(WAVE_CHUNK_header))) &&
+ (!fmtFound || !dataFound))
+ {
+ if(strcmp(tmpStr, "fmt ") == 0)
+ {
+ len = wav.Read(&_wavFormatObj, sizeof(WAVE_FMTINFO_header));
+
+ memcpy(tmpStr2, &_wavFormatObj.formatTag, 2);
+ _wavFormatObj.formatTag =
+ (uint32_t)tmpStr2[0] + (((uint32_t)tmpStr2[1])<<8);
+ memcpy(tmpStr2, &_wavFormatObj.nChannels, 2);
+ _wavFormatObj.nChannels =
+ (int16_t) ((uint32_t)tmpStr2[0] +
+ (((uint32_t)tmpStr2[1])<<8));
+ memcpy(tmpStr2, &_wavFormatObj.nSamplesPerSec, 4);
+ _wavFormatObj.nSamplesPerSec =
+ (int32_t) ((uint32_t)tmpStr2[0] +
+ (((uint32_t)tmpStr2[1])<<8) +
+ (((uint32_t)tmpStr2[2])<<16) +
+ (((uint32_t)tmpStr2[3])<<24));
+ memcpy(tmpStr2, &_wavFormatObj.nAvgBytesPerSec, 4);
+ _wavFormatObj.nAvgBytesPerSec =
+ (int32_t) ((uint32_t)tmpStr2[0] +
+ (((uint32_t)tmpStr2[1])<<8) +
+ (((uint32_t)tmpStr2[2])<<16) +
+ (((uint32_t)tmpStr2[3])<<24));
+ memcpy(tmpStr2, &_wavFormatObj.nBlockAlign, 2);
+ _wavFormatObj.nBlockAlign =
+ (int16_t) ((uint32_t)tmpStr2[0] +
+ (((uint32_t)tmpStr2[1])<<8));
+ memcpy(tmpStr2, &_wavFormatObj.nBitsPerSample, 2);
+ _wavFormatObj.nBitsPerSample =
+ (int16_t) ((uint32_t)tmpStr2[0] +
+ (((uint32_t)tmpStr2[1])<<8));
+
+ if (CHUNKheaderObj.fmt_ckSize < sizeof(WAVE_FMTINFO_header))
+ {
+ WEBRTC_TRACE(kTraceError, kTraceFile, _id,
+ "Chunk size is too small");
+ return -1;
+ }
+ for (i = 0;
+ i < CHUNKheaderObj.fmt_ckSize - sizeof(WAVE_FMTINFO_header);
+ i++)
+ {
+ len = wav.Read(&dummyRead, 1);
+ if(len != 1)
+ {
+ WEBRTC_TRACE(kTraceError, kTraceFile, _id,
+ "File corrupted, reached EOF (reading fmt)");
+ return -1;
+ }
+ }
+ fmtFound = true;
+ }
+ else if(strcmp(tmpStr, "data") == 0)
+ {
+ _dataSize = CHUNKheaderObj.fmt_ckSize;
+ dataFound = true;
+ break;
+ }
+ else
+ {
+ for (i = 0; i < CHUNKheaderObj.fmt_ckSize; i++)
+ {
+ len = wav.Read(&dummyRead, 1);
+ if(len != 1)
+ {
+ WEBRTC_TRACE(kTraceError, kTraceFile, _id,
+ "File corrupted, reached EOF (reading other)");
+ return -1;
+ }
+ }
+ }
+
+ len = wav.Read(&CHUNKheaderObj, sizeof(WAVE_CHUNK_header));
+
+ memcpy(tmpStr2, &CHUNKheaderObj.fmt_ckSize, 4);
+ CHUNKheaderObj.fmt_ckSize =
+ (uint32_t)tmpStr2[0] + (((uint32_t)tmpStr2[1]) << 8) +
+ (((uint32_t)tmpStr2[2]) << 16) + (((uint32_t)tmpStr2[3]) << 24);
+
+ memcpy(tmpStr, CHUNKheaderObj.fmt_ckID, 4);
+ }
+
+ // Either a proper format chunk has been read or a data chunk was come
+ // across.
+ if( (_wavFormatObj.formatTag != kWavFormatPcm) &&
+ (_wavFormatObj.formatTag != kWavFormatALaw) &&
+ (_wavFormatObj.formatTag != kWavFormatMuLaw))
+ {
+ WEBRTC_TRACE(kTraceError, kTraceFile, _id,
+ "Coding formatTag value=%d not supported!",
+ _wavFormatObj.formatTag);
+ return -1;
+ }
+ if((_wavFormatObj.nChannels < 1) ||
+ (_wavFormatObj.nChannels > 2))
+ {
+ WEBRTC_TRACE(kTraceError, kTraceFile, _id,
+ "nChannels value=%d not supported!",
+ _wavFormatObj.nChannels);
+ return -1;
+ }
+
+ if((_wavFormatObj.nBitsPerSample != 8) &&
+ (_wavFormatObj.nBitsPerSample != 16))
+ {
+ WEBRTC_TRACE(kTraceError, kTraceFile, _id,
+ "nBitsPerSample value=%d not supported!",
+ _wavFormatObj.nBitsPerSample);
+ return -1;
+ }
+
+ // Calculate the number of bytes that 10 ms of audio data correspond to.
+ size_t samples_per_10ms =
+ ((_wavFormatObj.formatTag == kWavFormatPcm) &&
+ (_wavFormatObj.nSamplesPerSec == 44100)) ?
+ 440 : static_cast<size_t>(_wavFormatObj.nSamplesPerSec / 100);
+ _readSizeBytes = samples_per_10ms * _wavFormatObj.nChannels *
+ (_wavFormatObj.nBitsPerSample / 8);
+ return 0;
+}
+
+int32_t ModuleFileUtility::InitWavCodec(uint32_t samplesPerSec,
+ size_t channels,
+ uint32_t bitsPerSample,
+ uint32_t formatTag)
+{
+ codec_info_.pltype = -1;
+ codec_info_.plfreq = samplesPerSec;
+ codec_info_.channels = channels;
+ codec_info_.rate = bitsPerSample * samplesPerSec;
+
+ // Calculate the packet size for 10ms frames
+ switch(formatTag)
+ {
+ case kWavFormatALaw:
+ strcpy(codec_info_.plname, "PCMA");
+ _codecId = kCodecPcma;
+ codec_info_.pltype = 8;
+ codec_info_.pacsize = codec_info_.plfreq / 100;
+ break;
+ case kWavFormatMuLaw:
+ strcpy(codec_info_.plname, "PCMU");
+ _codecId = kCodecPcmu;
+ codec_info_.pltype = 0;
+ codec_info_.pacsize = codec_info_.plfreq / 100;
+ break;
+ case kWavFormatPcm:
+ codec_info_.pacsize = (bitsPerSample * (codec_info_.plfreq / 100)) / 8;
+ if(samplesPerSec == 8000)
+ {
+ strcpy(codec_info_.plname, "L16");
+ _codecId = kCodecL16_8Khz;
+ }
+ else if(samplesPerSec == 16000)
+ {
+ strcpy(codec_info_.plname, "L16");
+ _codecId = kCodecL16_16kHz;
+ }
+ else if(samplesPerSec == 32000)
+ {
+ strcpy(codec_info_.plname, "L16");
+ _codecId = kCodecL16_32Khz;
+ }
+ // Set the packet size for "odd" sampling frequencies so that it
+ // properly corresponds to _readSizeBytes.
+ else if(samplesPerSec == 11025)
+ {
+ strcpy(codec_info_.plname, "L16");
+ _codecId = kCodecL16_16kHz;
+ codec_info_.pacsize = 110;
+ codec_info_.plfreq = 11000;
+ }
+ else if(samplesPerSec == 22050)
+ {
+ strcpy(codec_info_.plname, "L16");
+ _codecId = kCodecL16_16kHz;
+ codec_info_.pacsize = 220;
+ codec_info_.plfreq = 22000;
+ }
+ else if(samplesPerSec == 44100)
+ {
+ strcpy(codec_info_.plname, "L16");
+ _codecId = kCodecL16_16kHz;
+ codec_info_.pacsize = 440;
+ codec_info_.plfreq = 44000;
+ }
+ else if(samplesPerSec == 48000)
+ {
+ strcpy(codec_info_.plname, "L16");
+ _codecId = kCodecL16_16kHz;
+ codec_info_.pacsize = 480;
+ codec_info_.plfreq = 48000;
+ }
+ else
+ {
+ WEBRTC_TRACE(kTraceError, kTraceFile, _id,
+ "Unsupported PCM frequency!");
+ return -1;
+ }
+ break;
+ default:
+ WEBRTC_TRACE(kTraceError, kTraceFile, _id,
+ "unknown WAV format TAG!");
+ return -1;
+ break;
+ }
+ return 0;
+}
+
+int32_t ModuleFileUtility::InitWavReading(InStream& wav,
+ const uint32_t start,
+ const uint32_t stop)
+{
+
+ _reading = false;
+
+ if(ReadWavHeader(wav) == -1)
+ {
+ WEBRTC_TRACE(kTraceError, kTraceFile, _id,
+ "failed to read WAV header!");
+ return -1;
+ }
+
+ _playoutPositionMs = 0;
+ _readPos = 0;
+
+ if(start > 0)
+ {
+ uint8_t dummy[WAV_MAX_BUFFER_SIZE];
+ int readLength;
+ if(_readSizeBytes <= WAV_MAX_BUFFER_SIZE)
+ {
+ while (_playoutPositionMs < start)
+ {
+ readLength = wav.Read(dummy, _readSizeBytes);
+ if(readLength == static_cast<int>(_readSizeBytes))
+ {
+ _readPos += _readSizeBytes;
+ _playoutPositionMs += 10;
+ }
+ else // Must have reached EOF before start position!
+ {
+ WEBRTC_TRACE(kTraceError, kTraceFile, _id,
+ "InitWavReading(), EOF before start position");
+ return -1;
+ }
+ }
+ }
+ else
+ {
+ return -1;
+ }
+ }
+ if( InitWavCodec(_wavFormatObj.nSamplesPerSec, _wavFormatObj.nChannels,
+ _wavFormatObj.nBitsPerSample,
+ _wavFormatObj.formatTag) != 0)
+ {
+ return -1;
+ }
+ _bytesPerSample = static_cast<size_t>(_wavFormatObj.nBitsPerSample / 8);
+
+
+ _startPointInMs = start;
+ _stopPointInMs = stop;
+ _reading = true;
+ return 0;
+}
+
+int32_t ModuleFileUtility::ReadWavDataAsMono(
+ InStream& wav,
+ int8_t* outData,
+ const size_t bufferSize)
+{
+ WEBRTC_TRACE(
+ kTraceStream,
+ kTraceFile,
+ _id,
+ "ModuleFileUtility::ReadWavDataAsMono(wav= 0x%x, outData= 0x%d, "
+ "bufSize= %" PRIuS ")",
+ &wav,
+ outData,
+ bufferSize);
+
+ // The number of bytes that should be read from file.
+ const size_t totalBytesNeeded = _readSizeBytes;
+ // The number of bytes that will be written to outData.
+ const size_t bytesRequested = (codec_info_.channels == 2) ?
+ totalBytesNeeded >> 1 : totalBytesNeeded;
+ if(bufferSize < bytesRequested)
+ {
+ WEBRTC_TRACE(kTraceError, kTraceFile, _id,
+ "ReadWavDataAsMono: output buffer is too short!");
+ return -1;
+ }
+ if(outData == NULL)
+ {
+ WEBRTC_TRACE(kTraceError, kTraceFile, _id,
+ "ReadWavDataAsMono: output buffer NULL!");
+ return -1;
+ }
+
+ if(!_reading)
+ {
+ WEBRTC_TRACE(kTraceError, kTraceFile, _id,
+ "ReadWavDataAsMono: no longer reading file.");
+ return -1;
+ }
+
+ int32_t bytesRead = ReadWavData(
+ wav,
+ (codec_info_.channels == 2) ? _tempData : (uint8_t*)outData,
+ totalBytesNeeded);
+ if(bytesRead == 0)
+ {
+ return 0;
+ }
+ if(bytesRead < 0)
+ {
+ WEBRTC_TRACE(kTraceError, kTraceFile, _id,
+ "ReadWavDataAsMono: failed to read data from WAV file.");
+ return -1;
+ }
+ // Output data is should be mono.
+ if(codec_info_.channels == 2)
+ {
+ for (size_t i = 0; i < bytesRequested / _bytesPerSample; i++)
+ {
+ // Sample value is the average of left and right buffer rounded to
+ // closest integer value. Note samples can be either 1 or 2 byte.
+ if(_bytesPerSample == 1)
+ {
+ _tempData[i] = ((_tempData[2 * i] + _tempData[(2 * i) + 1] +
+ 1) >> 1);
+ }
+ else
+ {
+ int16_t* sampleData = (int16_t*) _tempData;
+ sampleData[i] = ((sampleData[2 * i] + sampleData[(2 * i) + 1] +
+ 1) >> 1);
+ }
+ }
+ memcpy(outData, _tempData, bytesRequested);
+ }
+ return static_cast<int32_t>(bytesRequested);
+}
+
+int32_t ModuleFileUtility::ReadWavDataAsStereo(
+ InStream& wav,
+ int8_t* outDataLeft,
+ int8_t* outDataRight,
+ const size_t bufferSize)
+{
+ WEBRTC_TRACE(
+ kTraceStream,
+ kTraceFile,
+ _id,
+ "ModuleFileUtility::ReadWavDataAsStereo(wav= 0x%x, outLeft= 0x%x, "
+ "outRight= 0x%x, bufSize= %" PRIuS ")",
+ &wav,
+ outDataLeft,
+ outDataRight,
+ bufferSize);
+
+ if((outDataLeft == NULL) ||
+ (outDataRight == NULL))
+ {
+ WEBRTC_TRACE(kTraceError, kTraceFile, _id,
+ "ReadWavDataAsMono: an input buffer is NULL!");
+ return -1;
+ }
+ if(codec_info_.channels != 2)
+ {
+ WEBRTC_TRACE(
+ kTraceError,
+ kTraceFile,
+ _id,
+ "ReadWavDataAsStereo: WAV file does not contain stereo data!");
+ return -1;
+ }
+ if(! _reading)
+ {
+ WEBRTC_TRACE(kTraceError, kTraceFile, _id,
+ "ReadWavDataAsStereo: no longer reading file.");
+ return -1;
+ }
+
+ // The number of bytes that should be read from file.
+ const size_t totalBytesNeeded = _readSizeBytes;
+ // The number of bytes that will be written to the left and the right
+ // buffers.
+ const size_t bytesRequested = totalBytesNeeded >> 1;
+ if(bufferSize < bytesRequested)
+ {
+ WEBRTC_TRACE(kTraceError, kTraceFile, _id,
+ "ReadWavData: Output buffers are too short!");
+ assert(false);
+ return -1;
+ }
+
+ int32_t bytesRead = ReadWavData(wav, _tempData, totalBytesNeeded);
+ if(bytesRead <= 0)
+ {
+ WEBRTC_TRACE(kTraceError, kTraceFile, _id,
+ "ReadWavDataAsStereo: failed to read data from WAV file.");
+ return -1;
+ }
+
+ // Turn interleaved audio to left and right buffer. Note samples can be
+ // either 1 or 2 bytes
+ if(_bytesPerSample == 1)
+ {
+ for (size_t i = 0; i < bytesRequested; i++)
+ {
+ outDataLeft[i] = _tempData[2 * i];
+ outDataRight[i] = _tempData[(2 * i) + 1];
+ }
+ }
+ else if(_bytesPerSample == 2)
+ {
+ int16_t* sampleData = reinterpret_cast<int16_t*>(_tempData);
+ int16_t* outLeft = reinterpret_cast<int16_t*>(outDataLeft);
+ int16_t* outRight = reinterpret_cast<int16_t*>(
+ outDataRight);
+
+ // Bytes requested to samples requested.
+ size_t sampleCount = bytesRequested >> 1;
+ for (size_t i = 0; i < sampleCount; i++)
+ {
+ outLeft[i] = sampleData[2 * i];
+ outRight[i] = sampleData[(2 * i) + 1];
+ }
+ } else {
+ WEBRTC_TRACE(kTraceError, kTraceFile, _id,
+ "ReadWavStereoData: unsupported sample size %" PRIuS "!",
+ _bytesPerSample);
+ assert(false);
+ return -1;
+ }
+ return static_cast<int32_t>(bytesRequested);
+}
+
+int32_t ModuleFileUtility::ReadWavData(InStream& wav,
+ uint8_t* buffer,
+ size_t dataLengthInBytes)
+{
+ WEBRTC_TRACE(kTraceStream, kTraceFile, _id,
+ "ModuleFileUtility::ReadWavData(wav= 0x%x, buffer= 0x%x, "
+ "dataLen= %" PRIuS ")", &wav, buffer, dataLengthInBytes);
+
+
+ if(buffer == NULL)
+ {
+ WEBRTC_TRACE(kTraceError, kTraceFile, _id,
+ "ReadWavDataAsMono: output buffer NULL!");
+ return -1;
+ }
+
+ // Make sure that a read won't return too few samples.
+ // TODO (hellner): why not read the remaining bytes needed from the start
+ // of the file?
+ if(_dataSize < (_readPos + dataLengthInBytes))
+ {
+ // Rewind() being -1 may be due to the file not supposed to be looped.
+ if(wav.Rewind() == -1)
+ {
+ _reading = false;
+ return 0;
+ }
+ if(InitWavReading(wav, _startPointInMs, _stopPointInMs) == -1)
+ {
+ _reading = false;
+ return -1;
+ }
+ }
+
+ int32_t bytesRead = wav.Read(buffer, dataLengthInBytes);
+ if(bytesRead < 0)
+ {
+ _reading = false;
+ return -1;
+ }
+
+ // This should never happen due to earlier sanity checks.
+ // TODO (hellner): change to an assert and fail here since this should
+ // never happen...
+ if(bytesRead < (int32_t)dataLengthInBytes)
+ {
+ if((wav.Rewind() == -1) ||
+ (InitWavReading(wav, _startPointInMs, _stopPointInMs) == -1))
+ {
+ _reading = false;
+ return -1;
+ }
+ else
+ {
+ bytesRead = wav.Read(buffer, dataLengthInBytes);
+ if(bytesRead < (int32_t)dataLengthInBytes)
+ {
+ _reading = false;
+ return -1;
+ }
+ }
+ }
+
+ _readPos += bytesRead;
+
+ // TODO (hellner): Why is dataLengthInBytes let dictate the number of bytes
+ // to read when exactly 10ms should be read?!
+ _playoutPositionMs += 10;
+ if((_stopPointInMs > 0) &&
+ (_playoutPositionMs >= _stopPointInMs))
+ {
+ if((wav.Rewind() == -1) ||
+ (InitWavReading(wav, _startPointInMs, _stopPointInMs) == -1))
+ {
+ _reading = false;
+ }
+ }
+ return bytesRead;
+}
+
+int32_t ModuleFileUtility::InitWavWriting(OutStream& wav,
+ const CodecInst& codecInst)
+{
+
+ if(set_codec_info(codecInst) != 0)
+ {
+ WEBRTC_TRACE(kTraceError, kTraceFile, _id,
+ "codecInst identifies unsupported codec!");
+ return -1;
+ }
+ _writing = false;
+ size_t channels = (codecInst.channels == 0) ? 1 : codecInst.channels;
+
+ if(STR_CASE_CMP(codecInst.plname, "PCMU") == 0)
+ {
+ _bytesPerSample = 1;
+ if(WriteWavHeader(wav, 8000, _bytesPerSample, channels,
+ kWavFormatMuLaw, 0) == -1)
+ {
+ return -1;
+ }
+ }
+ else if(STR_CASE_CMP(codecInst.plname, "PCMA") == 0)
+ {
+ _bytesPerSample = 1;
+ if(WriteWavHeader(wav, 8000, _bytesPerSample, channels, kWavFormatALaw,
+ 0) == -1)
+ {
+ return -1;
+ }
+ }
+ else if(STR_CASE_CMP(codecInst.plname, "L16") == 0)
+ {
+ _bytesPerSample = 2;
+ if(WriteWavHeader(wav, codecInst.plfreq, _bytesPerSample, channels,
+ kWavFormatPcm, 0) == -1)
+ {
+ return -1;
+ }
+ }
+ else
+ {
+ WEBRTC_TRACE(kTraceError, kTraceFile, _id,
+ "codecInst identifies unsupported codec for WAV file!");
+ return -1;
+ }
+ _writing = true;
+ _bytesWritten = 0;
+ return 0;
+}
+
+int32_t ModuleFileUtility::WriteWavData(OutStream& out,
+ const int8_t* buffer,
+ const size_t dataLength)
+{
+ WEBRTC_TRACE(kTraceStream, kTraceFile, _id,
+ "ModuleFileUtility::WriteWavData(out= 0x%x, buf= 0x%x, "
+ "dataLen= %" PRIuS ")", &out, buffer, dataLength);
+
+ if(buffer == NULL)
+ {
+ WEBRTC_TRACE(kTraceError, kTraceFile, _id,
+ "WriteWavData: input buffer NULL!");
+ return -1;
+ }
+
+ if(!out.Write(buffer, dataLength))
+ {
+ return -1;
+ }
+ _bytesWritten += dataLength;
+ return static_cast<int32_t>(dataLength);
+}
+
+
+int32_t ModuleFileUtility::WriteWavHeader(
+ OutStream& wav,
+ uint32_t freq,
+ size_t bytesPerSample,
+ size_t channels,
+ uint32_t format,
+ size_t lengthInBytes)
+{
+ // Frame size in bytes for 10 ms of audio.
+ // TODO (hellner): 44.1 kHz has 440 samples frame size. Doesn't seem to
+ // be taken into consideration here!
+ const size_t frameSize = (freq / 100) * channels;
+
+ // Calculate the number of full frames that the wave file contain.
+ const size_t dataLengthInBytes = frameSize * (lengthInBytes / frameSize);
+
+ uint8_t buf[kWavHeaderSize];
+ webrtc::WriteWavHeader(buf, channels, freq, static_cast<WavFormat>(format),
+ bytesPerSample, dataLengthInBytes / bytesPerSample);
+ wav.Write(buf, kWavHeaderSize);
+ return 0;
+}
+
+int32_t ModuleFileUtility::UpdateWavHeader(OutStream& wav)
+{
+ int32_t res = -1;
+ if(wav.Rewind() == -1)
+ {
+ return -1;
+ }
+ size_t channels = (codec_info_.channels == 0) ? 1 : codec_info_.channels;
+
+ if(STR_CASE_CMP(codec_info_.plname, "L16") == 0)
+ {
+ res = WriteWavHeader(wav, codec_info_.plfreq, 2, channels,
+ kWavFormatPcm, _bytesWritten);
+ } else if(STR_CASE_CMP(codec_info_.plname, "PCMU") == 0) {
+ res = WriteWavHeader(wav, 8000, 1, channels, kWavFormatMuLaw,
+ _bytesWritten);
+ } else if(STR_CASE_CMP(codec_info_.plname, "PCMA") == 0) {
+ res = WriteWavHeader(wav, 8000, 1, channels, kWavFormatALaw,
+ _bytesWritten);
+ } else {
+ // Allow calling this API even if not writing to a WAVE file.
+ // TODO (hellner): why?!
+ return 0;
+ }
+ return res;
+}
+
+
+int32_t ModuleFileUtility::InitPreEncodedReading(InStream& in,
+ const CodecInst& cinst)
+{
+
+ uint8_t preEncodedID;
+ in.Read(&preEncodedID, 1);
+
+ MediaFileUtility_CodecType codecType =
+ (MediaFileUtility_CodecType)preEncodedID;
+
+ if(set_codec_info(cinst) != 0)
+ {
+ WEBRTC_TRACE(kTraceError, kTraceFile, _id,
+ "Pre-encoded file send codec mismatch!");
+ return -1;
+ }
+ if(codecType != _codecId)
+ {
+ WEBRTC_TRACE(kTraceError, kTraceFile, _id,
+ "Pre-encoded file format codec mismatch!");
+ return -1;
+ }
+ memcpy(&codec_info_,&cinst,sizeof(CodecInst));
+ _reading = true;
+ return 0;
+}
+
+int32_t ModuleFileUtility::ReadPreEncodedData(
+ InStream& in,
+ int8_t* outData,
+ const size_t bufferSize)
+{
+ WEBRTC_TRACE(kTraceStream, kTraceFile, _id,
+ "ModuleFileUtility::ReadPreEncodedData(in= 0x%x, "
+ "outData= 0x%x, bufferSize= %" PRIuS ")", &in, outData,
+ bufferSize);
+
+ if(outData == NULL)
+ {
+ WEBRTC_TRACE(kTraceError, kTraceFile, _id, "output buffer NULL");
+ }
+
+ size_t frameLen;
+ uint8_t buf[64];
+ // Each frame has a two byte header containing the frame length.
+ int32_t res = in.Read(buf, 2);
+ if(res != 2)
+ {
+ if(!in.Rewind())
+ {
+ // The first byte is the codec identifier.
+ in.Read(buf, 1);
+ res = in.Read(buf, 2);
+ }
+ else
+ {
+ return -1;
+ }
+ }
+ frameLen = buf[0] + buf[1] * 256;
+ if(bufferSize < frameLen)
+ {
+ WEBRTC_TRACE(kTraceError, kTraceFile, _id,
+ "buffer not large enough to read %" PRIuS " bytes of "
+ "pre-encoded data!", frameLen);
+ return -1;
+ }
+ return in.Read(outData, frameLen);
+}
+
+int32_t ModuleFileUtility::InitPreEncodedWriting(
+ OutStream& out,
+ const CodecInst& codecInst)
+{
+
+ if(set_codec_info(codecInst) != 0)
+ {
+ WEBRTC_TRACE(kTraceError, kTraceFile, _id, "CodecInst not recognized!");
+ return -1;
+ }
+ _writing = true;
+ _bytesWritten = 1;
+ out.Write(&_codecId, 1);
+ return 0;
+}
+
+int32_t ModuleFileUtility::WritePreEncodedData(
+ OutStream& out,
+ const int8_t* buffer,
+ const size_t dataLength)
+{
+ WEBRTC_TRACE(kTraceStream, kTraceFile, _id,
+ "ModuleFileUtility::WritePreEncodedData(out= 0x%x, "
+ "inData= 0x%x, dataLen= %" PRIuS ")", &out, buffer,
+ dataLength);
+
+ if(buffer == NULL)
+ {
+ WEBRTC_TRACE(kTraceError, kTraceFile, _id,"buffer NULL");
+ }
+
+ size_t bytesWritten = 0;
+ // The first two bytes is the size of the frame.
+ int16_t lengthBuf;
+ lengthBuf = (int16_t)dataLength;
+ if(dataLength > static_cast<size_t>(std::numeric_limits<int16_t>::max()) ||
+ !out.Write(&lengthBuf, 2))
+ {
+ return -1;
+ }
+ bytesWritten = 2;
+
+ if(!out.Write(buffer, dataLength))
+ {
+ return -1;
+ }
+ bytesWritten += dataLength;
+ return static_cast<int32_t>(bytesWritten);
+}
+
+int32_t ModuleFileUtility::InitCompressedReading(
+ InStream& in,
+ const uint32_t start,
+ const uint32_t stop)
+{
+ WEBRTC_TRACE(kTraceDebug, kTraceFile, _id,
+ "ModuleFileUtility::InitCompressedReading(in= 0x%x, "
+ "start= %d, stop= %d)", &in, start, stop);
+
+#if defined(WEBRTC_CODEC_ILBC)
+ int16_t read_len = 0;
+#endif
+ _codecId = kCodecNoCodec;
+ _playoutPositionMs = 0;
+ _reading = false;
+
+ _startPointInMs = start;
+ _stopPointInMs = stop;
+
+ // Read the codec name
+ int32_t cnt = 0;
+ char buf[64];
+ do
+ {
+ in.Read(&buf[cnt++], 1);
+ } while ((buf[cnt-1] != '\n') && (64 > cnt));
+
+ if(cnt==64)
+ {
+ return -1;
+ }
+ buf[cnt]=0;
+
+#ifdef WEBRTC_CODEC_ILBC
+ if(!strcmp("#!iLBC20\n", buf))
+ {
+ codec_info_.pltype = 102;
+ strcpy(codec_info_.plname, "ilbc");
+ codec_info_.plfreq = 8000;
+ codec_info_.pacsize = 160;
+ codec_info_.channels = 1;
+ codec_info_.rate = 13300;
+ _codecId = kCodecIlbc20Ms;
+
+ if(_startPointInMs > 0)
+ {
+ while (_playoutPositionMs <= _startPointInMs)
+ {
+ read_len = in.Read(buf, 38);
+ if(read_len != 38)
+ {
+ return -1;
+ }
+ _playoutPositionMs += 20;
+ }
+ }
+ }
+
+ if(!strcmp("#!iLBC30\n", buf))
+ {
+ codec_info_.pltype = 102;
+ strcpy(codec_info_.plname, "ilbc");
+ codec_info_.plfreq = 8000;
+ codec_info_.pacsize = 240;
+ codec_info_.channels = 1;
+ codec_info_.rate = 13300;
+ _codecId = kCodecIlbc30Ms;
+
+ if(_startPointInMs > 0)
+ {
+ while (_playoutPositionMs <= _startPointInMs)
+ {
+ read_len = in.Read(buf, 50);
+ if(read_len != 50)
+ {
+ return -1;
+ }
+ _playoutPositionMs += 20;
+ }
+ }
+ }
+#endif
+ if(_codecId == kCodecNoCodec)
+ {
+ return -1;
+ }
+ _reading = true;
+ return 0;
+}
+
+int32_t ModuleFileUtility::ReadCompressedData(InStream& in,
+ int8_t* outData,
+ size_t bufferSize)
+{
+ WEBRTC_TRACE(kTraceStream, kTraceFile, _id,
+ "ModuleFileUtility::ReadCompressedData(in=0x%x, outData=0x%x, "
+ "bytes=%" PRIuS ")", &in, outData, bufferSize);
+
+ int bytesRead = 0;
+
+ if(! _reading)
+ {
+ WEBRTC_TRACE(kTraceError, kTraceFile, _id, "not currently reading!");
+ return -1;
+ }
+
+#ifdef WEBRTC_CODEC_ILBC
+ if((_codecId == kCodecIlbc20Ms) ||
+ (_codecId == kCodecIlbc30Ms))
+ {
+ size_t byteSize = 0;
+ if(_codecId == kCodecIlbc30Ms)
+ {
+ byteSize = 50;
+ }
+ if(_codecId == kCodecIlbc20Ms)
+ {
+ byteSize = 38;
+ }
+ if(bufferSize < byteSize)
+ {
+ WEBRTC_TRACE(kTraceError, kTraceFile, _id,
+ "output buffer is too short to read ILBC compressed "
+ "data.");
+ assert(false);
+ return -1;
+ }
+
+ bytesRead = in.Read(outData, byteSize);
+ if(bytesRead != static_cast<int>(byteSize))
+ {
+ if(!in.Rewind())
+ {
+ InitCompressedReading(in, _startPointInMs, _stopPointInMs);
+ bytesRead = in.Read(outData, byteSize);
+ if(bytesRead != static_cast<int>(byteSize))
+ {
+ _reading = false;
+ return -1;
+ }
+ }
+ else
+ {
+ _reading = false;
+ return -1;
+ }
+ }
+ }
+#endif
+ if(bytesRead == 0)
+ {
+ WEBRTC_TRACE(kTraceError, kTraceFile, _id,
+ "ReadCompressedData() no bytes read, codec not supported");
+ return -1;
+ }
+
+ _playoutPositionMs += 20;
+ if((_stopPointInMs > 0) &&
+ (_playoutPositionMs >= _stopPointInMs))
+ {
+ if(!in.Rewind())
+ {
+ InitCompressedReading(in, _startPointInMs, _stopPointInMs);
+ }
+ else
+ {
+ _reading = false;
+ }
+ }
+ return bytesRead;
+}
+
+int32_t ModuleFileUtility::InitCompressedWriting(
+ OutStream& out,
+ const CodecInst& codecInst)
+{
+ WEBRTC_TRACE(kTraceDebug, kTraceFile, _id,
+ "ModuleFileUtility::InitCompressedWriting(out= 0x%x, "
+ "codecName= %s)", &out, codecInst.plname);
+
+ _writing = false;
+
+#ifdef WEBRTC_CODEC_ILBC
+ if(STR_CASE_CMP(codecInst.plname, "ilbc") == 0)
+ {
+ if(codecInst.pacsize == 160)
+ {
+ _codecId = kCodecIlbc20Ms;
+ out.Write("#!iLBC20\n",9);
+ }
+ else if(codecInst.pacsize == 240)
+ {
+ _codecId = kCodecIlbc30Ms;
+ out.Write("#!iLBC30\n",9);
+ }
+ else
+ {
+ WEBRTC_TRACE(kTraceError, kTraceFile, _id,
+ "codecInst defines unsupported compression codec!");
+ return -1;
+ }
+ memcpy(&codec_info_,&codecInst,sizeof(CodecInst));
+ _writing = true;
+ return 0;
+ }
+#endif
+
+ WEBRTC_TRACE(kTraceError, kTraceFile, _id,
+ "codecInst defines unsupported compression codec!");
+ return -1;
+}
+
+int32_t ModuleFileUtility::WriteCompressedData(
+ OutStream& out,
+ const int8_t* buffer,
+ const size_t dataLength)
+{
+ WEBRTC_TRACE(kTraceStream, kTraceFile, _id,
+ "ModuleFileUtility::WriteCompressedData(out= 0x%x, buf= 0x%x, "
+ "dataLen= %" PRIuS ")", &out, buffer, dataLength);
+
+ if(buffer == NULL)
+ {
+ WEBRTC_TRACE(kTraceError, kTraceFile, _id,"buffer NULL");
+ }
+
+ if(!out.Write(buffer, dataLength))
+ {
+ return -1;
+ }
+ return static_cast<int32_t>(dataLength);
+}
+
+int32_t ModuleFileUtility::InitPCMReading(InStream& pcm,
+ const uint32_t start,
+ const uint32_t stop,
+ uint32_t freq)
+{
+ WEBRTC_TRACE(kTraceInfo, kTraceFile, _id,
+ "ModuleFileUtility::InitPCMReading(pcm= 0x%x, start=%d, "
+ "stop=%d, freq=%d)", &pcm, start, stop, freq);
+
+ int8_t dummy[320];
+ int read_len;
+
+ _playoutPositionMs = 0;
+ _startPointInMs = start;
+ _stopPointInMs = stop;
+ _reading = false;
+
+ if(freq == 8000)
+ {
+ strcpy(codec_info_.plname, "L16");
+ codec_info_.pltype = -1;
+ codec_info_.plfreq = 8000;
+ codec_info_.pacsize = 160;
+ codec_info_.channels = 1;
+ codec_info_.rate = 128000;
+ _codecId = kCodecL16_8Khz;
+ }
+ else if(freq == 16000)
+ {
+ strcpy(codec_info_.plname, "L16");
+ codec_info_.pltype = -1;
+ codec_info_.plfreq = 16000;
+ codec_info_.pacsize = 320;
+ codec_info_.channels = 1;
+ codec_info_.rate = 256000;
+ _codecId = kCodecL16_16kHz;
+ }
+ else if(freq == 32000)
+ {
+ strcpy(codec_info_.plname, "L16");
+ codec_info_.pltype = -1;
+ codec_info_.plfreq = 32000;
+ codec_info_.pacsize = 320;
+ codec_info_.channels = 1;
+ codec_info_.rate = 512000;
+ _codecId = kCodecL16_32Khz;
+ }
+
+ // Readsize for 10ms of audio data (2 bytes per sample).
+ _readSizeBytes = 2 * codec_info_. plfreq / 100;
+ if(_startPointInMs > 0)
+ {
+ while (_playoutPositionMs < _startPointInMs)
+ {
+ read_len = pcm.Read(dummy, _readSizeBytes);
+ if(read_len != static_cast<int>(_readSizeBytes))
+ {
+ return -1; // Must have reached EOF before start position!
+ }
+ _playoutPositionMs += 10;
+ }
+ }
+ _reading = true;
+ return 0;
+}
+
+int32_t ModuleFileUtility::ReadPCMData(InStream& pcm,
+ int8_t* outData,
+ size_t bufferSize)
+{
+ WEBRTC_TRACE(kTraceStream, kTraceFile, _id,
+ "ModuleFileUtility::ReadPCMData(pcm= 0x%x, outData= 0x%x, "
+ "bufSize= %" PRIuS ")", &pcm, outData, bufferSize);
+
+ if(outData == NULL)
+ {
+ WEBRTC_TRACE(kTraceError, kTraceFile, _id, "buffer NULL");
+ }
+
+ // Readsize for 10ms of audio data (2 bytes per sample).
+ size_t bytesRequested = static_cast<size_t>(2 * codec_info_.plfreq / 100);
+ if(bufferSize < bytesRequested)
+ {
+ WEBRTC_TRACE(kTraceError, kTraceFile, _id,
+ "ReadPCMData: buffer not long enough for a 10ms frame.");
+ assert(false);
+ return -1;
+ }
+
+ int bytesRead = pcm.Read(outData, bytesRequested);
+ if(bytesRead < static_cast<int>(bytesRequested))
+ {
+ if(pcm.Rewind() == -1)
+ {
+ _reading = false;
+ }
+ else
+ {
+ if(InitPCMReading(pcm, _startPointInMs, _stopPointInMs,
+ codec_info_.plfreq) == -1)
+ {
+ _reading = false;
+ }
+ else
+ {
+ size_t rest = bytesRequested - bytesRead;
+ int len = pcm.Read(&(outData[bytesRead]), rest);
+ if(len == static_cast<int>(rest))
+ {
+ bytesRead += len;
+ }
+ else
+ {
+ _reading = false;
+ }
+ }
+ if(bytesRead <= 0)
+ {
+ WEBRTC_TRACE(kTraceError, kTraceFile, _id,
+ "ReadPCMData: Failed to rewind audio file.");
+ return -1;
+ }
+ }
+ }
+
+ if(bytesRead <= 0)
+ {
+ WEBRTC_TRACE(kTraceStream, kTraceFile, _id,
+ "ReadPCMData: end of file");
+ return -1;
+ }
+ _playoutPositionMs += 10;
+ if(_stopPointInMs && _playoutPositionMs >= _stopPointInMs)
+ {
+ if(!pcm.Rewind())
+ {
+ if(InitPCMReading(pcm, _startPointInMs, _stopPointInMs,
+ codec_info_.plfreq) == -1)
+ {
+ _reading = false;
+ }
+ }
+ }
+ return bytesRead;
+}
+
+int32_t ModuleFileUtility::InitPCMWriting(OutStream& out, uint32_t freq)
+{
+
+ if(freq == 8000)
+ {
+ strcpy(codec_info_.plname, "L16");
+ codec_info_.pltype = -1;
+ codec_info_.plfreq = 8000;
+ codec_info_.pacsize = 160;
+ codec_info_.channels = 1;
+ codec_info_.rate = 128000;
+
+ _codecId = kCodecL16_8Khz;
+ }
+ else if(freq == 16000)
+ {
+ strcpy(codec_info_.plname, "L16");
+ codec_info_.pltype = -1;
+ codec_info_.plfreq = 16000;
+ codec_info_.pacsize = 320;
+ codec_info_.channels = 1;
+ codec_info_.rate = 256000;
+
+ _codecId = kCodecL16_16kHz;
+ }
+ else if(freq == 32000)
+ {
+ strcpy(codec_info_.plname, "L16");
+ codec_info_.pltype = -1;
+ codec_info_.plfreq = 32000;
+ codec_info_.pacsize = 320;
+ codec_info_.channels = 1;
+ codec_info_.rate = 512000;
+
+ _codecId = kCodecL16_32Khz;
+ }
+ if((_codecId != kCodecL16_8Khz) &&
+ (_codecId != kCodecL16_16kHz) &&
+ (_codecId != kCodecL16_32Khz))
+ {
+ WEBRTC_TRACE(kTraceError, kTraceFile, _id,
+ "CodecInst is not 8KHz PCM or 16KHz PCM!");
+ return -1;
+ }
+ _writing = true;
+ _bytesWritten = 0;
+ return 0;
+}
+
+int32_t ModuleFileUtility::WritePCMData(OutStream& out,
+ const int8_t* buffer,
+ const size_t dataLength)
+{
+ WEBRTC_TRACE(kTraceStream, kTraceFile, _id,
+ "ModuleFileUtility::WritePCMData(out= 0x%x, buf= 0x%x, "
+ "dataLen= %" PRIuS ")", &out, buffer, dataLength);
+
+ if(buffer == NULL)
+ {
+ WEBRTC_TRACE(kTraceError, kTraceFile, _id, "buffer NULL");
+ }
+
+ if(!out.Write(buffer, dataLength))
+ {
+ return -1;
+ }
+
+ _bytesWritten += dataLength;
+ return static_cast<int32_t>(dataLength);
+}
+
+int32_t ModuleFileUtility::codec_info(CodecInst& codecInst)
+{
+ WEBRTC_TRACE(kTraceStream, kTraceFile, _id,
+ "ModuleFileUtility::codec_info(codecInst= 0x%x)", &codecInst);
+
+ if(!_reading && !_writing)
+ {
+ WEBRTC_TRACE(kTraceError, kTraceFile, _id,
+ "CodecInst: not currently reading audio file!");
+ return -1;
+ }
+ memcpy(&codecInst,&codec_info_,sizeof(CodecInst));
+ return 0;
+}
+
+int32_t ModuleFileUtility::set_codec_info(const CodecInst& codecInst)
+{
+
+ _codecId = kCodecNoCodec;
+ if(STR_CASE_CMP(codecInst.plname, "PCMU") == 0)
+ {
+ _codecId = kCodecPcmu;
+ }
+ else if(STR_CASE_CMP(codecInst.plname, "PCMA") == 0)
+ {
+ _codecId = kCodecPcma;
+ }
+ else if(STR_CASE_CMP(codecInst.plname, "L16") == 0)
+ {
+ if(codecInst.plfreq == 8000)
+ {
+ _codecId = kCodecL16_8Khz;
+ }
+ else if(codecInst.plfreq == 16000)
+ {
+ _codecId = kCodecL16_16kHz;
+ }
+ else if(codecInst.plfreq == 32000)
+ {
+ _codecId = kCodecL16_32Khz;
+ }
+ }
+#ifdef WEBRTC_CODEC_ILBC
+ else if(STR_CASE_CMP(codecInst.plname, "ilbc") == 0)
+ {
+ if(codecInst.pacsize == 160)
+ {
+ _codecId = kCodecIlbc20Ms;
+ }
+ else if(codecInst.pacsize == 240)
+ {
+ _codecId = kCodecIlbc30Ms;
+ }
+ }
+#endif
+#if(defined(WEBRTC_CODEC_ISAC) || defined(WEBRTC_CODEC_ISACFX))
+ else if(STR_CASE_CMP(codecInst.plname, "isac") == 0)
+ {
+ if(codecInst.plfreq == 16000)
+ {
+ _codecId = kCodecIsac;
+ }
+ else if(codecInst.plfreq == 32000)
+ {
+ _codecId = kCodecIsacSwb;
+ }
+ }
+#endif
+#ifdef WEBRTC_CODEC_G722
+ else if(STR_CASE_CMP(codecInst.plname, "G722") == 0)
+ {
+ _codecId = kCodecG722;
+ }
+#endif
+ if(_codecId == kCodecNoCodec)
+ {
+ return -1;
+ }
+ memcpy(&codec_info_, &codecInst, sizeof(CodecInst));
+ return 0;
+}
+
+int32_t ModuleFileUtility::FileDurationMs(const char* fileName,
+ const FileFormats fileFormat,
+ const uint32_t freqInHz)
+{
+
+ if(fileName == NULL)
+ {
+ WEBRTC_TRACE(kTraceError, kTraceFile, _id, "filename NULL");
+ return -1;
+ }
+
+ int32_t time_in_ms = -1;
+ struct stat file_size;
+ if(stat(fileName,&file_size) == -1)
+ {
+ WEBRTC_TRACE(kTraceError, kTraceFile, _id,
+ "failed to retrieve file size with stat!");
+ return -1;
+ }
+ FileWrapper* inStreamObj = FileWrapper::Create();
+ if(inStreamObj == NULL)
+ {
+ WEBRTC_TRACE(kTraceMemory, kTraceFile, _id,
+ "failed to create InStream object!");
+ return -1;
+ }
+ if(inStreamObj->OpenFile(fileName, true) == -1)
+ {
+ delete inStreamObj;
+ WEBRTC_TRACE(kTraceError, kTraceFile, _id,
+ "failed to open file %s!", fileName);
+ return -1;
+ }
+
+ switch (fileFormat)
+ {
+ case kFileFormatWavFile:
+ {
+ if(ReadWavHeader(*inStreamObj) == -1)
+ {
+ WEBRTC_TRACE(kTraceError, kTraceFile, _id,
+ "failed to read WAV file header!");
+ return -1;
+ }
+ time_in_ms = ((file_size.st_size - 44) /
+ (_wavFormatObj.nAvgBytesPerSec/1000));
+ break;
+ }
+ case kFileFormatPcm16kHzFile:
+ {
+ // 16 samples per ms. 2 bytes per sample.
+ int32_t denominator = 16*2;
+ time_in_ms = (file_size.st_size)/denominator;
+ break;
+ }
+ case kFileFormatPcm8kHzFile:
+ {
+ // 8 samples per ms. 2 bytes per sample.
+ int32_t denominator = 8*2;
+ time_in_ms = (file_size.st_size)/denominator;
+ break;
+ }
+ case kFileFormatCompressedFile:
+ {
+ int32_t cnt = 0;
+ int read_len = 0;
+ char buf[64];
+ do
+ {
+ read_len = inStreamObj->Read(&buf[cnt++], 1);
+ if(read_len != 1)
+ {
+ return -1;
+ }
+ } while ((buf[cnt-1] != '\n') && (64 > cnt));
+
+ if(cnt == 64)
+ {
+ return -1;
+ }
+ else
+ {
+ buf[cnt] = 0;
+ }
+#ifdef WEBRTC_CODEC_ILBC
+ if(!strcmp("#!iLBC20\n", buf))
+ {
+ // 20 ms is 304 bits
+ time_in_ms = ((file_size.st_size)*160)/304;
+ break;
+ }
+ if(!strcmp("#!iLBC30\n", buf))
+ {
+ // 30 ms takes 400 bits.
+ // file size in bytes * 8 / 400 is the number of
+ // 30 ms frames in the file ->
+ // time_in_ms = file size * 8 / 400 * 30
+ time_in_ms = ((file_size.st_size)*240)/400;
+ break;
+ }
+#endif
+ break;
+ }
+ case kFileFormatPreencodedFile:
+ {
+ WEBRTC_TRACE(kTraceError, kTraceFile, _id,
+ "cannot determine duration of Pre-Encoded file!");
+ break;
+ }
+ default:
+ WEBRTC_TRACE(kTraceError, kTraceFile, _id,
+ "unsupported file format %d!", fileFormat);
+ break;
+ }
+ inStreamObj->CloseFile();
+ delete inStreamObj;
+ return time_in_ms;
+}
+
+uint32_t ModuleFileUtility::PlayoutPositionMs()
+{
+ WEBRTC_TRACE(kTraceStream, kTraceFile, _id,
+ "ModuleFileUtility::PlayoutPosition()");
+
+ return _reading ? _playoutPositionMs : 0;
+}
+} // namespace webrtc
diff --git a/webrtc/modules/media_file/media_file_utility.h b/webrtc/modules/media_file/media_file_utility.h
new file mode 100644
index 0000000000..bc2fa5a2f0
--- /dev/null
+++ b/webrtc/modules/media_file/media_file_utility.h
@@ -0,0 +1,284 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+// Note: the class cannot be used for reading and writing at the same time.
+#ifndef WEBRTC_MODULES_MEDIA_FILE_MEDIA_FILE_UTILITY_H_
+#define WEBRTC_MODULES_MEDIA_FILE_MEDIA_FILE_UTILITY_H_
+
+#include <stdio.h>
+
+#include "webrtc/common_types.h"
+#include "webrtc/modules/media_file/media_file_defines.h"
+
+namespace webrtc {
+class InStream;
+class OutStream;
+
+class ModuleFileUtility
+{
+public:
+
+ ModuleFileUtility(const int32_t id);
+ ~ModuleFileUtility();
+
+ // Prepare for playing audio from stream.
+ // startPointMs and stopPointMs, unless zero, specify what part of the file
+ // should be read. From startPointMs ms to stopPointMs ms.
+ int32_t InitWavReading(InStream& stream,
+ const uint32_t startPointMs = 0,
+ const uint32_t stopPointMs = 0);
+
+ // Put 10-60ms of audio data from stream into the audioBuffer depending on
+ // codec frame size. dataLengthInBytes indicates the size of audioBuffer.
+ // The return value is the number of bytes written to audioBuffer.
+ // Note: This API only play mono audio but can be used on file containing
+ // audio with more channels (in which case the audio will be converted to
+ // mono).
+ int32_t ReadWavDataAsMono(InStream& stream, int8_t* audioBuffer,
+ const size_t dataLengthInBytes);
+
+ // Put 10-60ms, depending on codec frame size, of audio data from file into
+ // audioBufferLeft and audioBufferRight. The buffers contain the left and
+ // right channel of played out stereo audio.
+ // dataLengthInBytes indicates the size of both audioBufferLeft and
+ // audioBufferRight.
+ // The return value is the number of bytes read for each buffer.
+ // Note: This API can only be successfully called for WAV files with stereo
+ // audio.
+ int32_t ReadWavDataAsStereo(InStream& wav,
+ int8_t* audioBufferLeft,
+ int8_t* audioBufferRight,
+ const size_t bufferLength);
+
+ // Prepare for recording audio to stream.
+ // codecInst specifies the encoding of the audio data.
+ // Note: codecInst.channels should be set to 2 for stereo (and 1 for
+ // mono). Stereo is only supported for WAV files.
+ int32_t InitWavWriting(OutStream& stream, const CodecInst& codecInst);
+
+ // Write one audio frame, i.e. the bufferLength first bytes of audioBuffer,
+ // to file. The audio frame size is determined by the codecInst.pacsize
+ // parameter of the last sucessfull StartRecordingAudioFile(..) call.
+ // The return value is the number of bytes written to audioBuffer.
+ int32_t WriteWavData(OutStream& stream,
+ const int8_t* audioBuffer,
+ const size_t bufferLength);
+
+ // Finalizes the WAV header so that it is correct if nothing more will be
+ // written to stream.
+ // Note: this API must be called before closing stream to ensure that the
+ // WAVE header is updated with the file size. Don't call this API
+ // if more samples are to be written to stream.
+ int32_t UpdateWavHeader(OutStream& stream);
+
+ // Prepare for playing audio from stream.
+ // startPointMs and stopPointMs, unless zero, specify what part of the file
+ // should be read. From startPointMs ms to stopPointMs ms.
+ // freqInHz is the PCM sampling frequency.
+ // NOTE, allowed frequencies are 8000, 16000 and 32000 (Hz)
+ int32_t InitPCMReading(InStream& stream,
+ const uint32_t startPointMs = 0,
+ const uint32_t stopPointMs = 0,
+ const uint32_t freqInHz = 16000);
+
+ // Put 10-60ms of audio data from stream into the audioBuffer depending on
+ // codec frame size. dataLengthInBytes indicates the size of audioBuffer.
+ // The return value is the number of bytes written to audioBuffer.
+ int32_t ReadPCMData(InStream& stream, int8_t* audioBuffer,
+ const size_t dataLengthInBytes);
+
+ // Prepare for recording audio to stream.
+ // freqInHz is the PCM sampling frequency.
+ // NOTE, allowed frequencies are 8000, 16000 and 32000 (Hz)
+ int32_t InitPCMWriting(OutStream& stream, const uint32_t freqInHz = 16000);
+
+ // Write one 10ms audio frame, i.e. the bufferLength first bytes of
+ // audioBuffer, to file. The audio frame size is determined by the freqInHz
+ // parameter of the last sucessfull InitPCMWriting(..) call.
+ // The return value is the number of bytes written to audioBuffer.
+ int32_t WritePCMData(OutStream& stream,
+ const int8_t* audioBuffer,
+ size_t bufferLength);
+
+ // Prepare for playing audio from stream.
+ // startPointMs and stopPointMs, unless zero, specify what part of the file
+ // should be read. From startPointMs ms to stopPointMs ms.
+ int32_t InitCompressedReading(InStream& stream,
+ const uint32_t startPointMs = 0,
+ const uint32_t stopPointMs = 0);
+
+ // Put 10-60ms of audio data from stream into the audioBuffer depending on
+ // codec frame size. dataLengthInBytes indicates the size of audioBuffer.
+ // The return value is the number of bytes written to audioBuffer.
+ int32_t ReadCompressedData(InStream& stream,
+ int8_t* audioBuffer,
+ const size_t dataLengthInBytes);
+
+ // Prepare for recording audio to stream.
+ // codecInst specifies the encoding of the audio data.
+ int32_t InitCompressedWriting(OutStream& stream,
+ const CodecInst& codecInst);
+
+ // Write one audio frame, i.e. the bufferLength first bytes of audioBuffer,
+ // to file. The audio frame size is determined by the codecInst.pacsize
+ // parameter of the last sucessfull InitCompressedWriting(..) call.
+ // The return value is the number of bytes written to stream.
+ // Note: bufferLength must be exactly one frame.
+ int32_t WriteCompressedData(OutStream& stream,
+ const int8_t* audioBuffer,
+ const size_t bufferLength);
+
+ // Prepare for playing audio from stream.
+ // codecInst specifies the encoding of the audio data.
+ int32_t InitPreEncodedReading(InStream& stream,
+ const CodecInst& codecInst);
+
+ // Put 10-60ms of audio data from stream into the audioBuffer depending on
+ // codec frame size. dataLengthInBytes indicates the size of audioBuffer.
+ // The return value is the number of bytes written to audioBuffer.
+ int32_t ReadPreEncodedData(InStream& stream,
+ int8_t* audioBuffer,
+ const size_t dataLengthInBytes);
+
+ // Prepare for recording audio to stream.
+ // codecInst specifies the encoding of the audio data.
+ int32_t InitPreEncodedWriting(OutStream& stream,
+ const CodecInst& codecInst);
+
+ // Write one audio frame, i.e. the bufferLength first bytes of audioBuffer,
+ // to stream. The audio frame size is determined by the codecInst.pacsize
+ // parameter of the last sucessfull InitPreEncodedWriting(..) call.
+ // The return value is the number of bytes written to stream.
+ // Note: bufferLength must be exactly one frame.
+ int32_t WritePreEncodedData(OutStream& stream,
+ const int8_t* inData,
+ const size_t dataLengthInBytes);
+
+ // Set durationMs to the size of the file (in ms) specified by fileName.
+ // freqInHz specifies the sampling frequency of the file.
+ int32_t FileDurationMs(const char* fileName,
+ const FileFormats fileFormat,
+ const uint32_t freqInHz = 16000);
+
+ // Return the number of ms that have been played so far.
+ uint32_t PlayoutPositionMs();
+
+ // Update codecInst according to the current audio codec being used for
+ // reading or writing.
+ int32_t codec_info(CodecInst& codecInst);
+
+private:
+ // Biggest WAV frame supported is 10 ms at 48kHz of 2 channel, 16 bit audio.
+ static const size_t WAV_MAX_BUFFER_SIZE = 480 * 2 * 2;
+
+
+ int32_t InitWavCodec(uint32_t samplesPerSec,
+ size_t channels,
+ uint32_t bitsPerSample,
+ uint32_t formatTag);
+
+ // Parse the WAV header in stream.
+ int32_t ReadWavHeader(InStream& stream);
+
+ // Update the WAV header. freqInHz, bytesPerSample, channels, format,
+ // lengthInBytes specify characterists of the audio data.
+ // freqInHz is the sampling frequency. bytesPerSample is the sample size in
+ // bytes. channels is the number of channels, e.g. 1 is mono and 2 is
+ // stereo. format is the encode format (e.g. PCMU, PCMA, PCM etc).
+ // lengthInBytes is the number of bytes the audio samples are using up.
+ int32_t WriteWavHeader(OutStream& stream,
+ uint32_t freqInHz,
+ size_t bytesPerSample,
+ size_t channels,
+ uint32_t format,
+ size_t lengthInBytes);
+
+ // Put dataLengthInBytes of audio data from stream into the audioBuffer.
+ // The return value is the number of bytes written to audioBuffer.
+ int32_t ReadWavData(InStream& stream, uint8_t* audioBuffer,
+ size_t dataLengthInBytes);
+
+ // Update the current audio codec being used for reading or writing
+ // according to codecInst.
+ int32_t set_codec_info(const CodecInst& codecInst);
+
+ struct WAVE_FMTINFO_header
+ {
+ int16_t formatTag;
+ int16_t nChannels;
+ int32_t nSamplesPerSec;
+ int32_t nAvgBytesPerSec;
+ int16_t nBlockAlign;
+ int16_t nBitsPerSample;
+ };
+ // Identifiers for preencoded files.
+ enum MediaFileUtility_CodecType
+ {
+ kCodecNoCodec = 0,
+ kCodecIsac,
+ kCodecIsacSwb,
+ kCodecIsacLc,
+ kCodecL16_8Khz,
+ kCodecL16_16kHz,
+ kCodecL16_32Khz,
+ kCodecPcmu,
+ kCodecPcma,
+ kCodecIlbc20Ms,
+ kCodecIlbc30Ms,
+ kCodecG722,
+ kCodecG722_1_32Kbps,
+ kCodecG722_1_24Kbps,
+ kCodecG722_1_16Kbps,
+ kCodecG722_1c_48,
+ kCodecG722_1c_32,
+ kCodecG722_1c_24,
+ kCodecAmr,
+ kCodecAmrWb,
+ kCodecG729,
+ kCodecG729_1,
+ kCodecG726_40,
+ kCodecG726_32,
+ kCodecG726_24,
+ kCodecG726_16,
+ kCodecSpeex8Khz,
+ kCodecSpeex16Khz
+ };
+
+ // TODO (hellner): why store multiple formats. Just store either codec_info_
+ // or _wavFormatObj and supply conversion functions.
+ WAVE_FMTINFO_header _wavFormatObj;
+ size_t _dataSize; // Chunk size if reading a WAV file
+ // Number of bytes to read. I.e. frame size in bytes. May be multiple
+ // chunks if reading WAV.
+ size_t _readSizeBytes;
+
+ int32_t _id;
+
+ uint32_t _stopPointInMs;
+ uint32_t _startPointInMs;
+ uint32_t _playoutPositionMs;
+ size_t _bytesWritten;
+
+ CodecInst codec_info_;
+ MediaFileUtility_CodecType _codecId;
+
+ // The amount of bytes, on average, used for one audio sample.
+ size_t _bytesPerSample;
+ size_t _readPos;
+
+ // Only reading or writing can be enabled, not both.
+ bool _reading;
+ bool _writing;
+
+ // Scratch buffer used for turning stereo audio to mono.
+ uint8_t _tempData[WAV_MAX_BUFFER_SIZE];
+};
+} // namespace webrtc
+#endif // WEBRTC_MODULES_MEDIA_FILE_MEDIA_FILE_UTILITY_H_
diff --git a/webrtc/modules/media_file/source/OWNERS b/webrtc/modules/media_file/source/OWNERS
deleted file mode 100644
index 3ee6b4bf5f..0000000000
--- a/webrtc/modules/media_file/source/OWNERS
+++ /dev/null
@@ -1,5 +0,0 @@
-
-# These are for the common case of adding or renaming files. If you're doing
-# structural changes, please get a review from a reviewer in this file.
-per-file *.gyp=*
-per-file *.gypi=*
diff --git a/webrtc/modules/media_file/source/media_file_impl.cc b/webrtc/modules/media_file/source/media_file_impl.cc
deleted file mode 100644
index 50175b86d5..0000000000
--- a/webrtc/modules/media_file/source/media_file_impl.cc
+++ /dev/null
@@ -1,1137 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include <assert.h>
-
-#include "webrtc/base/format_macros.h"
-#include "webrtc/modules/media_file/source/media_file_impl.h"
-#include "webrtc/system_wrappers/include/critical_section_wrapper.h"
-#include "webrtc/system_wrappers/include/file_wrapper.h"
-#include "webrtc/system_wrappers/include/tick_util.h"
-#include "webrtc/system_wrappers/include/trace.h"
-
-namespace webrtc {
-MediaFile* MediaFile::CreateMediaFile(const int32_t id)
-{
- return new MediaFileImpl(id);
-}
-
-void MediaFile::DestroyMediaFile(MediaFile* module)
-{
- delete static_cast<MediaFileImpl*>(module);
-}
-
-MediaFileImpl::MediaFileImpl(const int32_t id)
- : _id(id),
- _crit(CriticalSectionWrapper::CreateCriticalSection()),
- _callbackCrit(CriticalSectionWrapper::CreateCriticalSection()),
- _ptrFileUtilityObj(NULL),
- codec_info_(),
- _ptrInStream(NULL),
- _ptrOutStream(NULL),
- _fileFormat((FileFormats)-1),
- _recordDurationMs(0),
- _playoutPositionMs(0),
- _notificationMs(0),
- _playingActive(false),
- _recordingActive(false),
- _isStereo(false),
- _openFile(false),
- _fileName(),
- _ptrCallback(NULL)
-{
- WEBRTC_TRACE(kTraceMemory, kTraceFile, id, "Created");
-
- codec_info_.plname[0] = '\0';
- _fileName[0] = '\0';
-}
-
-
-MediaFileImpl::~MediaFileImpl()
-{
- WEBRTC_TRACE(kTraceMemory, kTraceFile, _id, "~MediaFileImpl()");
- {
- CriticalSectionScoped lock(_crit);
-
- if(_playingActive)
- {
- StopPlaying();
- }
-
- if(_recordingActive)
- {
- StopRecording();
- }
-
- delete _ptrFileUtilityObj;
-
- if(_openFile)
- {
- delete _ptrInStream;
- _ptrInStream = NULL;
- delete _ptrOutStream;
- _ptrOutStream = NULL;
- }
- }
-
- delete _crit;
- delete _callbackCrit;
-}
-
-int64_t MediaFileImpl::TimeUntilNextProcess()
-{
- WEBRTC_TRACE(
- kTraceWarning,
- kTraceFile,
- _id,
- "TimeUntilNextProcess: This method is not used by MediaFile class.");
- return -1;
-}
-
-int32_t MediaFileImpl::Process()
-{
- WEBRTC_TRACE(kTraceWarning, kTraceFile, _id,
- "Process: This method is not used by MediaFile class.");
- return -1;
-}
-
-int32_t MediaFileImpl::PlayoutAudioData(int8_t* buffer,
- size_t& dataLengthInBytes)
-{
- WEBRTC_TRACE(kTraceStream, kTraceFile, _id,
- "MediaFileImpl::PlayoutData(buffer= 0x%x, bufLen= %" PRIuS ")",
- buffer, dataLengthInBytes);
-
- const size_t bufferLengthInBytes = dataLengthInBytes;
- dataLengthInBytes = 0;
-
- if(buffer == NULL || bufferLengthInBytes == 0)
- {
- WEBRTC_TRACE(kTraceError, kTraceFile, _id,
- "Buffer pointer or length is NULL!");
- return -1;
- }
-
- int32_t bytesRead = 0;
- {
- CriticalSectionScoped lock(_crit);
-
- if(!_playingActive)
- {
- WEBRTC_TRACE(kTraceWarning, kTraceFile, _id,
- "Not currently playing!");
- return -1;
- }
-
- if(!_ptrFileUtilityObj)
- {
- WEBRTC_TRACE(kTraceError, kTraceFile, _id,
- "Playing, but no FileUtility object!");
- StopPlaying();
- return -1;
- }
-
- switch(_fileFormat)
- {
- case kFileFormatPcm32kHzFile:
- case kFileFormatPcm16kHzFile:
- case kFileFormatPcm8kHzFile:
- bytesRead = _ptrFileUtilityObj->ReadPCMData(
- *_ptrInStream,
- buffer,
- bufferLengthInBytes);
- break;
- case kFileFormatCompressedFile:
- bytesRead = _ptrFileUtilityObj->ReadCompressedData(
- *_ptrInStream,
- buffer,
- bufferLengthInBytes);
- break;
- case kFileFormatWavFile:
- bytesRead = _ptrFileUtilityObj->ReadWavDataAsMono(
- *_ptrInStream,
- buffer,
- bufferLengthInBytes);
- break;
- case kFileFormatPreencodedFile:
- bytesRead = _ptrFileUtilityObj->ReadPreEncodedData(
- *_ptrInStream,
- buffer,
- bufferLengthInBytes);
- if(bytesRead > 0)
- {
- dataLengthInBytes = static_cast<size_t>(bytesRead);
- return 0;
- }
- break;
- default:
- {
- WEBRTC_TRACE(kTraceError, kTraceFile, _id,
- "Invalid file format: %d", _fileFormat);
- assert(false);
- break;
- }
- }
-
- if( bytesRead > 0)
- {
- dataLengthInBytes = static_cast<size_t>(bytesRead);
- }
- }
- HandlePlayCallbacks(bytesRead);
- return 0;
-}
-
-void MediaFileImpl::HandlePlayCallbacks(int32_t bytesRead)
-{
- bool playEnded = false;
- uint32_t callbackNotifyMs = 0;
-
- if(bytesRead > 0)
- {
- // Check if it's time for PlayNotification(..).
- _playoutPositionMs = _ptrFileUtilityObj->PlayoutPositionMs();
- if(_notificationMs)
- {
- if(_playoutPositionMs >= _notificationMs)
- {
- _notificationMs = 0;
- callbackNotifyMs = _playoutPositionMs;
- }
- }
- }
- else
- {
- // If no bytes were read assume end of file.
- StopPlaying();
- playEnded = true;
- }
-
- // Only _callbackCrit may and should be taken when making callbacks.
- CriticalSectionScoped lock(_callbackCrit);
- if(_ptrCallback)
- {
- if(callbackNotifyMs)
- {
- _ptrCallback->PlayNotification(_id, callbackNotifyMs);
- }
- if(playEnded)
- {
- _ptrCallback->PlayFileEnded(_id);
- }
- }
-}
-
-int32_t MediaFileImpl::PlayoutStereoData(
- int8_t* bufferLeft,
- int8_t* bufferRight,
- size_t& dataLengthInBytes)
-{
- WEBRTC_TRACE(kTraceStream, kTraceFile, _id,
- "MediaFileImpl::PlayoutStereoData(Left = 0x%x, Right = 0x%x,"
- " Len= %" PRIuS ")",
- bufferLeft,
- bufferRight,
- dataLengthInBytes);
-
- const size_t bufferLengthInBytes = dataLengthInBytes;
- dataLengthInBytes = 0;
-
- if(bufferLeft == NULL || bufferRight == NULL || bufferLengthInBytes == 0)
- {
- WEBRTC_TRACE(kTraceError, kTraceFile, _id,
- "A buffer pointer or the length is NULL!");
- return -1;
- }
-
- bool playEnded = false;
- uint32_t callbackNotifyMs = 0;
- {
- CriticalSectionScoped lock(_crit);
-
- if(!_playingActive || !_isStereo)
- {
- WEBRTC_TRACE(kTraceWarning, kTraceFile, _id,
- "Not currently playing stereo!");
- return -1;
- }
-
- if(!_ptrFileUtilityObj)
- {
- WEBRTC_TRACE(
- kTraceError,
- kTraceFile,
- _id,
- "Playing stereo, but the FileUtility objects is NULL!");
- StopPlaying();
- return -1;
- }
-
- // Stereo playout only supported for WAV files.
- int32_t bytesRead = 0;
- switch(_fileFormat)
- {
- case kFileFormatWavFile:
- bytesRead = _ptrFileUtilityObj->ReadWavDataAsStereo(
- *_ptrInStream,
- bufferLeft,
- bufferRight,
- bufferLengthInBytes);
- break;
- default:
- WEBRTC_TRACE(kTraceError, kTraceFile, _id,
- "Trying to read non-WAV as stereo audio\
- (not supported)");
- break;
- }
-
- if(bytesRead > 0)
- {
- dataLengthInBytes = static_cast<size_t>(bytesRead);
-
- // Check if it's time for PlayNotification(..).
- _playoutPositionMs = _ptrFileUtilityObj->PlayoutPositionMs();
- if(_notificationMs)
- {
- if(_playoutPositionMs >= _notificationMs)
- {
- _notificationMs = 0;
- callbackNotifyMs = _playoutPositionMs;
- }
- }
- }
- else
- {
- // If no bytes were read assume end of file.
- StopPlaying();
- playEnded = true;
- }
- }
-
- CriticalSectionScoped lock(_callbackCrit);
- if(_ptrCallback)
- {
- if(callbackNotifyMs)
- {
- _ptrCallback->PlayNotification(_id, callbackNotifyMs);
- }
- if(playEnded)
- {
- _ptrCallback->PlayFileEnded(_id);
- }
- }
- return 0;
-}
-
-int32_t MediaFileImpl::StartPlayingAudioFile(
- const char* fileName,
- const uint32_t notificationTimeMs,
- const bool loop,
- const FileFormats format,
- const CodecInst* codecInst,
- const uint32_t startPointMs,
- const uint32_t stopPointMs)
-{
- if(!ValidFileName(fileName))
- {
- return -1;
- }
- if(!ValidFileFormat(format,codecInst))
- {
- return -1;
- }
- if(!ValidFilePositions(startPointMs,stopPointMs))
- {
- return -1;
- }
-
- // Check that the file will play longer than notificationTimeMs ms.
- if((startPointMs && stopPointMs && !loop) &&
- (notificationTimeMs > (stopPointMs - startPointMs)))
- {
- WEBRTC_TRACE(
- kTraceError,
- kTraceFile,
- _id,
- "specified notification time is longer than amount of ms that will\
- be played");
- return -1;
- }
-
- FileWrapper* inputStream = FileWrapper::Create();
- if(inputStream == NULL)
- {
- WEBRTC_TRACE(kTraceMemory, kTraceFile, _id,
- "Failed to allocate input stream for file %s", fileName);
- return -1;
- }
-
- if(inputStream->OpenFile(fileName, true, loop) != 0)
- {
- delete inputStream;
- WEBRTC_TRACE(kTraceError, kTraceFile, _id,
- "Could not open input file %s", fileName);
- return -1;
- }
-
- if(StartPlayingStream(*inputStream, loop, notificationTimeMs,
- format, codecInst, startPointMs, stopPointMs) == -1)
- {
- inputStream->CloseFile();
- delete inputStream;
- return -1;
- }
-
- CriticalSectionScoped lock(_crit);
- _openFile = true;
- strncpy(_fileName, fileName, sizeof(_fileName));
- _fileName[sizeof(_fileName) - 1] = '\0';
- return 0;
-}
-
-int32_t MediaFileImpl::StartPlayingAudioStream(
- InStream& stream,
- const uint32_t notificationTimeMs,
- const FileFormats format,
- const CodecInst* codecInst,
- const uint32_t startPointMs,
- const uint32_t stopPointMs)
-{
- return StartPlayingStream(stream, false, notificationTimeMs, format,
- codecInst, startPointMs, stopPointMs);
-}
-
-int32_t MediaFileImpl::StartPlayingStream(
- InStream& stream,
- bool loop,
- const uint32_t notificationTimeMs,
- const FileFormats format,
- const CodecInst* codecInst,
- const uint32_t startPointMs,
- const uint32_t stopPointMs)
-{
- if(!ValidFileFormat(format,codecInst))
- {
- return -1;
- }
-
- if(!ValidFilePositions(startPointMs,stopPointMs))
- {
- return -1;
- }
-
- CriticalSectionScoped lock(_crit);
- if(_playingActive || _recordingActive)
- {
- WEBRTC_TRACE(
- kTraceError,
- kTraceFile,
- _id,
- "StartPlaying called, but already playing or recording file %s",
- (_fileName[0] == '\0') ? "(name not set)" : _fileName);
- return -1;
- }
-
- if(_ptrFileUtilityObj != NULL)
- {
- WEBRTC_TRACE(kTraceError,
- kTraceFile,
- _id,
- "StartPlaying called, but FileUtilityObj already exists!");
- StopPlaying();
- return -1;
- }
-
- _ptrFileUtilityObj = new ModuleFileUtility(_id);
- if(_ptrFileUtilityObj == NULL)
- {
- WEBRTC_TRACE(kTraceMemory, kTraceFile, _id,
- "Failed to create FileUtilityObj!");
- return -1;
- }
-
- switch(format)
- {
- case kFileFormatWavFile:
- {
- if(_ptrFileUtilityObj->InitWavReading(stream, startPointMs,
- stopPointMs) == -1)
- {
- WEBRTC_TRACE(kTraceError, kTraceFile, _id,
- "Not a valid WAV file!");
- StopPlaying();
- return -1;
- }
- _fileFormat = kFileFormatWavFile;
- break;
- }
- case kFileFormatCompressedFile:
- {
- if(_ptrFileUtilityObj->InitCompressedReading(stream, startPointMs,
- stopPointMs) == -1)
- {
- WEBRTC_TRACE(kTraceError, kTraceFile, _id,
- "Not a valid Compressed file!");
- StopPlaying();
- return -1;
- }
- _fileFormat = kFileFormatCompressedFile;
- break;
- }
- case kFileFormatPcm8kHzFile:
- case kFileFormatPcm16kHzFile:
- case kFileFormatPcm32kHzFile:
- {
- // ValidFileFormat() called in the beginneing of this function
- // prevents codecInst from being NULL here.
- assert(codecInst != NULL);
- if(!ValidFrequency(codecInst->plfreq) ||
- _ptrFileUtilityObj->InitPCMReading(stream, startPointMs,
- stopPointMs,
- codecInst->plfreq) == -1)
- {
- WEBRTC_TRACE(kTraceError, kTraceFile, _id,
- "Not a valid raw 8 or 16 KHz PCM file!");
- StopPlaying();
- return -1;
- }
-
- _fileFormat = format;
- break;
- }
- case kFileFormatPreencodedFile:
- {
- // ValidFileFormat() called in the beginneing of this function
- // prevents codecInst from being NULL here.
- assert(codecInst != NULL);
- if(_ptrFileUtilityObj->InitPreEncodedReading(stream, *codecInst) ==
- -1)
- {
- WEBRTC_TRACE(kTraceError, kTraceFile, _id,
- "Not a valid PreEncoded file!");
- StopPlaying();
- return -1;
- }
-
- _fileFormat = kFileFormatPreencodedFile;
- break;
- }
- default:
- {
- WEBRTC_TRACE(kTraceError, kTraceFile, _id,
- "Invalid file format: %d", format);
- assert(false);
- break;
- }
- }
- if(_ptrFileUtilityObj->codec_info(codec_info_) == -1)
- {
- WEBRTC_TRACE(kTraceError, kTraceFile, _id,
- "Failed to retrieve codec info!");
- StopPlaying();
- return -1;
- }
-
- _isStereo = (codec_info_.channels == 2);
- if(_isStereo && (_fileFormat != kFileFormatWavFile))
- {
- WEBRTC_TRACE(kTraceWarning, kTraceFile, _id,
- "Stereo is only allowed for WAV files");
- StopPlaying();
- return -1;
- }
- _playingActive = true;
- _playoutPositionMs = _ptrFileUtilityObj->PlayoutPositionMs();
- _ptrInStream = &stream;
- _notificationMs = notificationTimeMs;
-
- return 0;
-}
-
-int32_t MediaFileImpl::StopPlaying()
-{
-
- CriticalSectionScoped lock(_crit);
- _isStereo = false;
- if(_ptrFileUtilityObj)
- {
- delete _ptrFileUtilityObj;
- _ptrFileUtilityObj = NULL;
- }
- if(_ptrInStream)
- {
- // If MediaFileImpl opened the InStream it must be reclaimed here.
- if(_openFile)
- {
- delete _ptrInStream;
- _openFile = false;
- }
- _ptrInStream = NULL;
- }
-
- codec_info_.pltype = 0;
- codec_info_.plname[0] = '\0';
-
- if(!_playingActive)
- {
- WEBRTC_TRACE(kTraceWarning, kTraceFile, _id,
- "playing is not active!");
- return -1;
- }
-
- _playingActive = false;
- return 0;
-}
-
-bool MediaFileImpl::IsPlaying()
-{
- WEBRTC_TRACE(kTraceStream, kTraceFile, _id, "MediaFileImpl::IsPlaying()");
- CriticalSectionScoped lock(_crit);
- return _playingActive;
-}
-
-int32_t MediaFileImpl::IncomingAudioData(
- const int8_t* buffer,
- const size_t bufferLengthInBytes)
-{
- WEBRTC_TRACE(kTraceStream, kTraceFile, _id,
- "MediaFile::IncomingData(buffer= 0x%x, bufLen= %" PRIuS,
- buffer, bufferLengthInBytes);
-
- if(buffer == NULL || bufferLengthInBytes == 0)
- {
- WEBRTC_TRACE(kTraceError, kTraceFile, _id,
- "Buffer pointer or length is NULL!");
- return -1;
- }
-
- bool recordingEnded = false;
- uint32_t callbackNotifyMs = 0;
- {
- CriticalSectionScoped lock(_crit);
-
- if(!_recordingActive)
- {
- WEBRTC_TRACE(kTraceWarning, kTraceFile, _id,
- "Not currently recording!");
- return -1;
- }
- if(_ptrOutStream == NULL)
- {
- WEBRTC_TRACE(kTraceError, kTraceFile, _id,
- "Recording is active, but output stream is NULL!");
- assert(false);
- return -1;
- }
-
- int32_t bytesWritten = 0;
- uint32_t samplesWritten = codec_info_.pacsize;
- if(_ptrFileUtilityObj)
- {
- switch(_fileFormat)
- {
- case kFileFormatPcm8kHzFile:
- case kFileFormatPcm16kHzFile:
- case kFileFormatPcm32kHzFile:
- bytesWritten = _ptrFileUtilityObj->WritePCMData(
- *_ptrOutStream,
- buffer,
- bufferLengthInBytes);
-
- // Sample size is 2 bytes.
- if(bytesWritten > 0)
- {
- samplesWritten = bytesWritten/sizeof(int16_t);
- }
- break;
- case kFileFormatCompressedFile:
- bytesWritten = _ptrFileUtilityObj->WriteCompressedData(
- *_ptrOutStream, buffer, bufferLengthInBytes);
- break;
- case kFileFormatWavFile:
- bytesWritten = _ptrFileUtilityObj->WriteWavData(
- *_ptrOutStream,
- buffer,
- bufferLengthInBytes);
- if(bytesWritten > 0 && STR_NCASE_CMP(codec_info_.plname,
- "L16", 4) == 0)
- {
- // Sample size is 2 bytes.
- samplesWritten = bytesWritten/sizeof(int16_t);
- }
- break;
- case kFileFormatPreencodedFile:
- bytesWritten = _ptrFileUtilityObj->WritePreEncodedData(
- *_ptrOutStream, buffer, bufferLengthInBytes);
- break;
- default:
- WEBRTC_TRACE(kTraceError, kTraceFile, _id,
- "Invalid file format: %d", _fileFormat);
- assert(false);
- break;
- }
- } else {
- // TODO (hellner): quick look at the code makes me think that this
- // code is never executed. Remove?
- if(_ptrOutStream)
- {
- if(_ptrOutStream->Write(buffer, bufferLengthInBytes))
- {
- bytesWritten = static_cast<int32_t>(bufferLengthInBytes);
- }
- }
- }
-
- _recordDurationMs += samplesWritten / (codec_info_.plfreq / 1000);
-
- // Check if it's time for RecordNotification(..).
- if(_notificationMs)
- {
- if(_recordDurationMs >= _notificationMs)
- {
- _notificationMs = 0;
- callbackNotifyMs = _recordDurationMs;
- }
- }
- if(bytesWritten < (int32_t)bufferLengthInBytes)
- {
- WEBRTC_TRACE(kTraceWarning, kTraceFile, _id,
- "Failed to write all requested bytes!");
- StopRecording();
- recordingEnded = true;
- }
- }
-
- // Only _callbackCrit may and should be taken when making callbacks.
- CriticalSectionScoped lock(_callbackCrit);
- if(_ptrCallback)
- {
- if(callbackNotifyMs)
- {
- _ptrCallback->RecordNotification(_id, callbackNotifyMs);
- }
- if(recordingEnded)
- {
- _ptrCallback->RecordFileEnded(_id);
- return -1;
- }
- }
- return 0;
-}
-
-int32_t MediaFileImpl::StartRecordingAudioFile(
- const char* fileName,
- const FileFormats format,
- const CodecInst& codecInst,
- const uint32_t notificationTimeMs,
- const uint32_t maxSizeBytes)
-{
- if(!ValidFileName(fileName))
- {
- return -1;
- }
- if(!ValidFileFormat(format,&codecInst))
- {
- return -1;
- }
-
- FileWrapper* outputStream = FileWrapper::Create();
- if(outputStream == NULL)
- {
- WEBRTC_TRACE(kTraceMemory, kTraceFile, _id,
- "Failed to allocate memory for output stream");
- return -1;
- }
-
- if(outputStream->OpenFile(fileName, false) != 0)
- {
- delete outputStream;
- WEBRTC_TRACE(kTraceError, kTraceFile, _id,
- "Could not open output file '%s' for writing!",
- fileName);
- return -1;
- }
-
- if(maxSizeBytes)
- {
- outputStream->SetMaxFileSize(maxSizeBytes);
- }
-
- if(StartRecordingAudioStream(*outputStream, format, codecInst,
- notificationTimeMs) == -1)
- {
- outputStream->CloseFile();
- delete outputStream;
- return -1;
- }
-
- CriticalSectionScoped lock(_crit);
- _openFile = true;
- strncpy(_fileName, fileName, sizeof(_fileName));
- _fileName[sizeof(_fileName) - 1] = '\0';
- return 0;
-}
-
-int32_t MediaFileImpl::StartRecordingAudioStream(
- OutStream& stream,
- const FileFormats format,
- const CodecInst& codecInst,
- const uint32_t notificationTimeMs)
-{
- // Check codec info
- if(!ValidFileFormat(format,&codecInst))
- {
- return -1;
- }
-
- CriticalSectionScoped lock(_crit);
- if(_recordingActive || _playingActive)
- {
- WEBRTC_TRACE(
- kTraceError,
- kTraceFile,
- _id,
- "StartRecording called, but already recording or playing file %s!",
- _fileName);
- return -1;
- }
-
- if(_ptrFileUtilityObj != NULL)
- {
- WEBRTC_TRACE(
- kTraceError,
- kTraceFile,
- _id,
- "StartRecording called, but fileUtilityObj already exists!");
- StopRecording();
- return -1;
- }
-
- _ptrFileUtilityObj = new ModuleFileUtility(_id);
- if(_ptrFileUtilityObj == NULL)
- {
- WEBRTC_TRACE(kTraceMemory, kTraceFile, _id,
- "Cannot allocate fileUtilityObj!");
- return -1;
- }
-
- CodecInst tmpAudioCodec;
- memcpy(&tmpAudioCodec, &codecInst, sizeof(CodecInst));
- switch(format)
- {
- case kFileFormatWavFile:
- {
- if(_ptrFileUtilityObj->InitWavWriting(stream, codecInst) == -1)
- {
- WEBRTC_TRACE(kTraceError, kTraceFile, _id,
- "Failed to initialize WAV file!");
- delete _ptrFileUtilityObj;
- _ptrFileUtilityObj = NULL;
- return -1;
- }
- _fileFormat = kFileFormatWavFile;
- break;
- }
- case kFileFormatCompressedFile:
- {
- // Write compression codec name at beginning of file
- if(_ptrFileUtilityObj->InitCompressedWriting(stream, codecInst) ==
- -1)
- {
- WEBRTC_TRACE(kTraceError, kTraceFile, _id,
- "Failed to initialize Compressed file!");
- delete _ptrFileUtilityObj;
- _ptrFileUtilityObj = NULL;
- return -1;
- }
- _fileFormat = kFileFormatCompressedFile;
- break;
- }
- case kFileFormatPcm8kHzFile:
- case kFileFormatPcm16kHzFile:
- {
- if(!ValidFrequency(codecInst.plfreq) ||
- _ptrFileUtilityObj->InitPCMWriting(stream, codecInst.plfreq) ==
- -1)
- {
- WEBRTC_TRACE(kTraceError, kTraceFile, _id,
- "Failed to initialize 8 or 16KHz PCM file!");
- delete _ptrFileUtilityObj;
- _ptrFileUtilityObj = NULL;
- return -1;
- }
- _fileFormat = format;
- break;
- }
- case kFileFormatPreencodedFile:
- {
- if(_ptrFileUtilityObj->InitPreEncodedWriting(stream, codecInst) ==
- -1)
- {
- WEBRTC_TRACE(kTraceError, kTraceFile, _id,
- "Failed to initialize Pre-Encoded file!");
- delete _ptrFileUtilityObj;
- _ptrFileUtilityObj = NULL;
- return -1;
- }
-
- _fileFormat = kFileFormatPreencodedFile;
- break;
- }
- default:
- {
- WEBRTC_TRACE(kTraceError, kTraceFile, _id,
- "Invalid file format %d specified!", format);
- delete _ptrFileUtilityObj;
- _ptrFileUtilityObj = NULL;
- return -1;
- }
- }
- _isStereo = (tmpAudioCodec.channels == 2);
- if(_isStereo)
- {
- if(_fileFormat != kFileFormatWavFile)
- {
- WEBRTC_TRACE(kTraceWarning, kTraceFile, _id,
- "Stereo is only allowed for WAV files");
- StopRecording();
- return -1;
- }
- if((STR_NCASE_CMP(tmpAudioCodec.plname, "L16", 4) != 0) &&
- (STR_NCASE_CMP(tmpAudioCodec.plname, "PCMU", 5) != 0) &&
- (STR_NCASE_CMP(tmpAudioCodec.plname, "PCMA", 5) != 0))
- {
- WEBRTC_TRACE(
- kTraceWarning,
- kTraceFile,
- _id,
- "Stereo is only allowed for codec PCMU, PCMA and L16 ");
- StopRecording();
- return -1;
- }
- }
- memcpy(&codec_info_, &tmpAudioCodec, sizeof(CodecInst));
- _recordingActive = true;
- _ptrOutStream = &stream;
- _notificationMs = notificationTimeMs;
- _recordDurationMs = 0;
- return 0;
-}
-
-int32_t MediaFileImpl::StopRecording()
-{
-
- CriticalSectionScoped lock(_crit);
- if(!_recordingActive)
- {
- WEBRTC_TRACE(kTraceWarning, kTraceFile, _id,
- "recording is not active!");
- return -1;
- }
-
- _isStereo = false;
-
- if(_ptrFileUtilityObj != NULL)
- {
- // Both AVI and WAV header has to be updated before closing the stream
- // because they contain size information.
- if((_fileFormat == kFileFormatWavFile) &&
- (_ptrOutStream != NULL))
- {
- _ptrFileUtilityObj->UpdateWavHeader(*_ptrOutStream);
- }
- delete _ptrFileUtilityObj;
- _ptrFileUtilityObj = NULL;
- }
-
- if(_ptrOutStream != NULL)
- {
- // If MediaFileImpl opened the OutStream it must be reclaimed here.
- if(_openFile)
- {
- delete _ptrOutStream;
- _openFile = false;
- }
- _ptrOutStream = NULL;
- }
-
- _recordingActive = false;
- codec_info_.pltype = 0;
- codec_info_.plname[0] = '\0';
-
- return 0;
-}
-
-bool MediaFileImpl::IsRecording()
-{
- WEBRTC_TRACE(kTraceStream, kTraceFile, _id, "MediaFileImpl::IsRecording()");
- CriticalSectionScoped lock(_crit);
- return _recordingActive;
-}
-
-int32_t MediaFileImpl::RecordDurationMs(uint32_t& durationMs)
-{
-
- CriticalSectionScoped lock(_crit);
- if(!_recordingActive)
- {
- durationMs = 0;
- return -1;
- }
- durationMs = _recordDurationMs;
- return 0;
-}
-
-bool MediaFileImpl::IsStereo()
-{
- WEBRTC_TRACE(kTraceStream, kTraceFile, _id, "MediaFileImpl::IsStereo()");
- CriticalSectionScoped lock(_crit);
- return _isStereo;
-}
-
-int32_t MediaFileImpl::SetModuleFileCallback(FileCallback* callback)
-{
-
- CriticalSectionScoped lock(_callbackCrit);
-
- _ptrCallback = callback;
- return 0;
-}
-
-int32_t MediaFileImpl::FileDurationMs(const char* fileName,
- uint32_t& durationMs,
- const FileFormats format,
- const uint32_t freqInHz)
-{
-
- if(!ValidFileName(fileName))
- {
- return -1;
- }
- if(!ValidFrequency(freqInHz))
- {
- return -1;
- }
-
- ModuleFileUtility* utilityObj = new ModuleFileUtility(_id);
- if(utilityObj == NULL)
- {
- WEBRTC_TRACE(kTraceError, kTraceFile, _id,
- "failed to allocate utility object!");
- return -1;
- }
-
- const int32_t duration = utilityObj->FileDurationMs(fileName, format,
- freqInHz);
- delete utilityObj;
- if(duration == -1)
- {
- durationMs = 0;
- return -1;
- }
-
- durationMs = duration;
- return 0;
-}
-
-int32_t MediaFileImpl::PlayoutPositionMs(uint32_t& positionMs) const
-{
- CriticalSectionScoped lock(_crit);
- if(!_playingActive)
- {
- positionMs = 0;
- return -1;
- }
- positionMs = _playoutPositionMs;
- return 0;
-}
-
-int32_t MediaFileImpl::codec_info(CodecInst& codecInst) const
-{
- CriticalSectionScoped lock(_crit);
- if(!_playingActive && !_recordingActive)
- {
- WEBRTC_TRACE(kTraceError, kTraceFile, _id,
- "Neither playout nor recording has been initialized!");
- return -1;
- }
- if (codec_info_.pltype == 0 && codec_info_.plname[0] == '\0')
- {
- WEBRTC_TRACE(kTraceError, kTraceFile, _id,
- "The CodecInst for %s is unknown!",
- _playingActive ? "Playback" : "Recording");
- return -1;
- }
- memcpy(&codecInst,&codec_info_,sizeof(CodecInst));
- return 0;
-}
-
-bool MediaFileImpl::ValidFileFormat(const FileFormats format,
- const CodecInst* codecInst)
-{
- if(codecInst == NULL)
- {
- if(format == kFileFormatPreencodedFile ||
- format == kFileFormatPcm8kHzFile ||
- format == kFileFormatPcm16kHzFile ||
- format == kFileFormatPcm32kHzFile)
- {
- WEBRTC_TRACE(kTraceError, kTraceFile, -1,
- "Codec info required for file format specified!");
- return false;
- }
- }
- return true;
-}
-
-bool MediaFileImpl::ValidFileName(const char* fileName)
-{
- if((fileName == NULL) ||(fileName[0] == '\0'))
- {
- WEBRTC_TRACE(kTraceError, kTraceFile, -1, "FileName not specified!");
- return false;
- }
- return true;
-}
-
-
-bool MediaFileImpl::ValidFilePositions(const uint32_t startPointMs,
- const uint32_t stopPointMs)
-{
- if(startPointMs == 0 && stopPointMs == 0) // Default values
- {
- return true;
- }
- if(stopPointMs &&(startPointMs >= stopPointMs))
- {
- WEBRTC_TRACE(kTraceError, kTraceFile, -1,
- "startPointMs must be less than stopPointMs!");
- return false;
- }
- if(stopPointMs &&((stopPointMs - startPointMs) < 20))
- {
- WEBRTC_TRACE(kTraceError, kTraceFile, -1,
- "minimum play duration for files is 20 ms!");
- return false;
- }
- return true;
-}
-
-bool MediaFileImpl::ValidFrequency(const uint32_t frequency)
-{
- if((frequency == 8000) || (frequency == 16000)|| (frequency == 32000))
- {
- return true;
- }
- WEBRTC_TRACE(kTraceError, kTraceFile, -1,
- "Frequency should be 8000, 16000 or 32000 (Hz)");
- return false;
-}
-} // namespace webrtc
diff --git a/webrtc/modules/media_file/source/media_file_impl.h b/webrtc/modules/media_file/source/media_file_impl.h
deleted file mode 100644
index cdb54d880d..0000000000
--- a/webrtc/modules/media_file/source/media_file_impl.h
+++ /dev/null
@@ -1,148 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_MODULES_MEDIA_FILE_SOURCE_MEDIA_FILE_IMPL_H_
-#define WEBRTC_MODULES_MEDIA_FILE_SOURCE_MEDIA_FILE_IMPL_H_
-
-#include "webrtc/common_types.h"
-#include "webrtc/modules/interface/module_common_types.h"
-#include "webrtc/modules/media_file/interface/media_file.h"
-#include "webrtc/modules/media_file/interface/media_file_defines.h"
-#include "webrtc/modules/media_file/source/media_file_utility.h"
-#include "webrtc/system_wrappers/include/critical_section_wrapper.h"
-
-namespace webrtc {
-class MediaFileImpl : public MediaFile
-{
-
-public:
- MediaFileImpl(const int32_t id);
- ~MediaFileImpl();
-
- int32_t Process() override;
- int64_t TimeUntilNextProcess() override;
-
- // MediaFile functions
- int32_t PlayoutAudioData(int8_t* audioBuffer,
- size_t& dataLengthInBytes) override;
-
- int32_t PlayoutStereoData(int8_t* audioBufferLeft,
- int8_t* audioBufferRight,
- size_t& dataLengthInBytes) override;
-
- int32_t StartPlayingAudioFile(
- const char* fileName,
- const uint32_t notificationTimeMs = 0,
- const bool loop = false,
- const FileFormats format = kFileFormatPcm16kHzFile,
- const CodecInst* codecInst = NULL,
- const uint32_t startPointMs = 0,
- const uint32_t stopPointMs = 0) override;
-
- int32_t StartPlayingAudioStream(
- InStream& stream,
- const uint32_t notificationTimeMs = 0,
- const FileFormats format = kFileFormatPcm16kHzFile,
- const CodecInst* codecInst = NULL,
- const uint32_t startPointMs = 0,
- const uint32_t stopPointMs = 0) override;
-
- int32_t StopPlaying() override;
-
- bool IsPlaying() override;
-
- int32_t PlayoutPositionMs(uint32_t& positionMs) const override;
-
- int32_t IncomingAudioData(const int8_t* audioBuffer,
- const size_t bufferLength) override;
-
- int32_t StartRecordingAudioFile(const char* fileName,
- const FileFormats format,
- const CodecInst& codecInst,
- const uint32_t notificationTimeMs = 0,
- const uint32_t maxSizeBytes = 0) override;
-
- int32_t StartRecordingAudioStream(
- OutStream& stream,
- const FileFormats format,
- const CodecInst& codecInst,
- const uint32_t notificationTimeMs = 0) override;
-
- int32_t StopRecording() override;
-
- bool IsRecording() override;
-
- int32_t RecordDurationMs(uint32_t& durationMs) override;
-
- bool IsStereo() override;
-
- int32_t SetModuleFileCallback(FileCallback* callback) override;
-
- int32_t FileDurationMs(const char* fileName,
- uint32_t& durationMs,
- const FileFormats format,
- const uint32_t freqInHz = 16000) override;
-
- int32_t codec_info(CodecInst& codecInst) const override;
-
-private:
- // Returns true if the combination of format and codecInst is valid.
- static bool ValidFileFormat(const FileFormats format,
- const CodecInst* codecInst);
-
-
- // Returns true if the filename is valid
- static bool ValidFileName(const char* fileName);
-
- // Returns true if the combination of startPointMs and stopPointMs is valid.
- static bool ValidFilePositions(const uint32_t startPointMs,
- const uint32_t stopPointMs);
-
- // Returns true if frequencyInHz is a supported frequency.
- static bool ValidFrequency(const uint32_t frequencyInHz);
-
- void HandlePlayCallbacks(int32_t bytesRead);
-
- int32_t StartPlayingStream(
- InStream& stream,
- bool loop,
- const uint32_t notificationTimeMs,
- const FileFormats format,
- const CodecInst* codecInst,
- const uint32_t startPointMs,
- const uint32_t stopPointMs);
-
- int32_t _id;
- CriticalSectionWrapper* _crit;
- CriticalSectionWrapper* _callbackCrit;
-
- ModuleFileUtility* _ptrFileUtilityObj;
- CodecInst codec_info_;
-
- InStream* _ptrInStream;
- OutStream* _ptrOutStream;
-
- FileFormats _fileFormat;
- uint32_t _recordDurationMs;
- uint32_t _playoutPositionMs;
- uint32_t _notificationMs;
-
- bool _playingActive;
- bool _recordingActive;
- bool _isStereo;
- bool _openFile;
-
- char _fileName[512];
-
- FileCallback* _ptrCallback;
-};
-} // namespace webrtc
-
-#endif // WEBRTC_MODULES_MEDIA_FILE_SOURCE_MEDIA_FILE_IMPL_H_
diff --git a/webrtc/modules/media_file/source/media_file_unittest.cc b/webrtc/modules/media_file/source/media_file_unittest.cc
deleted file mode 100644
index 370d13228a..0000000000
--- a/webrtc/modules/media_file/source/media_file_unittest.cc
+++ /dev/null
@@ -1,96 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include "testing/gtest/include/gtest/gtest.h"
-#include "webrtc/modules/media_file/interface/media_file.h"
-#include "webrtc/system_wrappers/include/sleep.h"
-#include "webrtc/test/testsupport/fileutils.h"
-#include "webrtc/test/testsupport/gtest_disable.h"
-
-class MediaFileTest : public testing::Test {
- protected:
- void SetUp() {
- // Use number 0 as the the identifier and pass to CreateMediaFile.
- media_file_ = webrtc::MediaFile::CreateMediaFile(0);
- ASSERT_TRUE(media_file_ != NULL);
- }
- void TearDown() {
- webrtc::MediaFile::DestroyMediaFile(media_file_);
- media_file_ = NULL;
- }
- webrtc::MediaFile* media_file_;
-};
-
-TEST_F(MediaFileTest, DISABLED_ON_IOS(
- DISABLED_ON_ANDROID(StartPlayingAudioFileWithoutError))) {
- // TODO(leozwang): Use hard coded filename here, we want to
- // loop through all audio files in future
- const std::string audio_file = webrtc::test::ProjectRootPath() +
- "data/voice_engine/audio_tiny48.wav";
- ASSERT_EQ(0, media_file_->StartPlayingAudioFile(
- audio_file.c_str(),
- 0,
- false,
- webrtc::kFileFormatWavFile));
-
- ASSERT_EQ(true, media_file_->IsPlaying());
-
- webrtc::SleepMs(1);
-
- ASSERT_EQ(0, media_file_->StopPlaying());
-}
-
-TEST_F(MediaFileTest, DISABLED_ON_IOS(WriteWavFile)) {
- // Write file.
- static const size_t kHeaderSize = 44;
- static const size_t kPayloadSize = 320;
- webrtc::CodecInst codec = {
- 0, "L16", 16000, static_cast<int>(kPayloadSize), 1
- };
- std::string outfile = webrtc::test::OutputPath() + "wavtest.wav";
- ASSERT_EQ(0,
- media_file_->StartRecordingAudioFile(
- outfile.c_str(), webrtc::kFileFormatWavFile, codec));
- static const int8_t kFakeData[kPayloadSize] = {0};
- ASSERT_EQ(0, media_file_->IncomingAudioData(kFakeData, kPayloadSize));
- ASSERT_EQ(0, media_file_->StopRecording());
-
- // Check the file we just wrote.
- static const uint8_t kExpectedHeader[] = {
- 'R', 'I', 'F', 'F',
- 0x64, 0x1, 0, 0, // size of whole file - 8: 320 + 44 - 8
- 'W', 'A', 'V', 'E',
- 'f', 'm', 't', ' ',
- 0x10, 0, 0, 0, // size of fmt block - 8: 24 - 8
- 0x1, 0, // format: PCM (1)
- 0x1, 0, // channels: 1
- 0x80, 0x3e, 0, 0, // sample rate: 16000
- 0, 0x7d, 0, 0, // byte rate: 2 * 16000
- 0x2, 0, // block align: NumChannels * BytesPerSample
- 0x10, 0, // bits per sample: 2 * 8
- 'd', 'a', 't', 'a',
- 0x40, 0x1, 0, 0, // size of payload: 320
- };
- static_assert(sizeof(kExpectedHeader) == kHeaderSize, "header size");
-
- EXPECT_EQ(kHeaderSize + kPayloadSize, webrtc::test::GetFileSize(outfile));
- FILE* f = fopen(outfile.c_str(), "rb");
- ASSERT_TRUE(f);
-
- uint8_t header[kHeaderSize];
- ASSERT_EQ(1u, fread(header, kHeaderSize, 1, f));
- EXPECT_EQ(0, memcmp(kExpectedHeader, header, kHeaderSize));
-
- uint8_t payload[kPayloadSize];
- ASSERT_EQ(1u, fread(payload, kPayloadSize, 1, f));
- EXPECT_EQ(0, memcmp(kFakeData, payload, kPayloadSize));
-
- EXPECT_EQ(0, fclose(f));
-}
diff --git a/webrtc/modules/media_file/source/media_file_utility.cc b/webrtc/modules/media_file/source/media_file_utility.cc
deleted file mode 100644
index 61ae442d0e..0000000000
--- a/webrtc/modules/media_file/source/media_file_utility.cc
+++ /dev/null
@@ -1,1656 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include "webrtc/modules/media_file/source/media_file_utility.h"
-
-#include <assert.h>
-#include <sys/stat.h>
-#include <sys/types.h>
-#include <limits>
-
-#include "webrtc/base/format_macros.h"
-#include "webrtc/common_audio/wav_header.h"
-#include "webrtc/common_types.h"
-#include "webrtc/engine_configurations.h"
-#include "webrtc/modules/interface/module_common_types.h"
-#include "webrtc/system_wrappers/include/file_wrapper.h"
-#include "webrtc/system_wrappers/include/trace.h"
-
-namespace {
-
-// First 16 bytes the WAVE header. ckID should be "RIFF", wave_ckID should be
-// "WAVE" and ckSize is the chunk size (4 + n)
-struct WAVE_RIFF_header
-{
- int8_t ckID[4];
- int32_t ckSize;
- int8_t wave_ckID[4];
-};
-
-// First 8 byte of the format chunk. fmt_ckID should be "fmt ". fmt_ckSize is
-// the chunk size (16, 18 or 40 byte)
-struct WAVE_CHUNK_header
-{
- int8_t fmt_ckID[4];
- int32_t fmt_ckSize;
-};
-} // unnamed namespace
-
-namespace webrtc {
-ModuleFileUtility::ModuleFileUtility(const int32_t id)
- : _wavFormatObj(),
- _dataSize(0),
- _readSizeBytes(0),
- _id(id),
- _stopPointInMs(0),
- _startPointInMs(0),
- _playoutPositionMs(0),
- _bytesWritten(0),
- codec_info_(),
- _codecId(kCodecNoCodec),
- _bytesPerSample(0),
- _readPos(0),
- _reading(false),
- _writing(false),
- _tempData() {
- WEBRTC_TRACE(kTraceMemory, kTraceFile, _id,
- "ModuleFileUtility::ModuleFileUtility()");
- memset(&codec_info_,0,sizeof(CodecInst));
- codec_info_.pltype = -1;
-}
-
-ModuleFileUtility::~ModuleFileUtility()
-{
- WEBRTC_TRACE(kTraceMemory, kTraceFile, _id,
- "ModuleFileUtility::~ModuleFileUtility()");
-}
-
-int32_t ModuleFileUtility::ReadWavHeader(InStream& wav)
-{
- WAVE_RIFF_header RIFFheaderObj;
- WAVE_CHUNK_header CHUNKheaderObj;
- // TODO (hellner): tmpStr and tmpStr2 seems unnecessary here.
- char tmpStr[6] = "FOUR";
- unsigned char tmpStr2[4];
- int32_t i, len;
- bool dataFound = false;
- bool fmtFound = false;
- int8_t dummyRead;
-
-
- _dataSize = 0;
- len = wav.Read(&RIFFheaderObj, sizeof(WAVE_RIFF_header));
- if(len != sizeof(WAVE_RIFF_header))
- {
- WEBRTC_TRACE(kTraceError, kTraceFile, _id,
- "Not a wave file (too short)");
- return -1;
- }
-
- for (i = 0; i < 4; i++)
- {
- tmpStr[i] = RIFFheaderObj.ckID[i];
- }
- if(strcmp(tmpStr, "RIFF") != 0)
- {
- WEBRTC_TRACE(kTraceError, kTraceFile, _id,
- "Not a wave file (does not have RIFF)");
- return -1;
- }
- for (i = 0; i < 4; i++)
- {
- tmpStr[i] = RIFFheaderObj.wave_ckID[i];
- }
- if(strcmp(tmpStr, "WAVE") != 0)
- {
- WEBRTC_TRACE(kTraceError, kTraceFile, _id,
- "Not a wave file (does not have WAVE)");
- return -1;
- }
-
- len = wav.Read(&CHUNKheaderObj, sizeof(WAVE_CHUNK_header));
-
- // WAVE files are stored in little endian byte order. Make sure that the
- // data can be read on big endian as well.
- // TODO (hellner): little endian to system byte order should be done in
- // in a subroutine.
- memcpy(tmpStr2, &CHUNKheaderObj.fmt_ckSize, 4);
- CHUNKheaderObj.fmt_ckSize =
- (int32_t) ((uint32_t) tmpStr2[0] +
- (((uint32_t)tmpStr2[1])<<8) +
- (((uint32_t)tmpStr2[2])<<16) +
- (((uint32_t)tmpStr2[3])<<24));
-
- memcpy(tmpStr, CHUNKheaderObj.fmt_ckID, 4);
-
- while ((len == sizeof(WAVE_CHUNK_header)) && (!fmtFound || !dataFound))
- {
- if(strcmp(tmpStr, "fmt ") == 0)
- {
- len = wav.Read(&_wavFormatObj, sizeof(WAVE_FMTINFO_header));
-
- memcpy(tmpStr2, &_wavFormatObj.formatTag, 2);
- _wavFormatObj.formatTag =
- (uint32_t)tmpStr2[0] + (((uint32_t)tmpStr2[1])<<8);
- memcpy(tmpStr2, &_wavFormatObj.nChannels, 2);
- _wavFormatObj.nChannels =
- (int16_t) ((uint32_t)tmpStr2[0] +
- (((uint32_t)tmpStr2[1])<<8));
- memcpy(tmpStr2, &_wavFormatObj.nSamplesPerSec, 4);
- _wavFormatObj.nSamplesPerSec =
- (int32_t) ((uint32_t)tmpStr2[0] +
- (((uint32_t)tmpStr2[1])<<8) +
- (((uint32_t)tmpStr2[2])<<16) +
- (((uint32_t)tmpStr2[3])<<24));
- memcpy(tmpStr2, &_wavFormatObj.nAvgBytesPerSec, 4);
- _wavFormatObj.nAvgBytesPerSec =
- (int32_t) ((uint32_t)tmpStr2[0] +
- (((uint32_t)tmpStr2[1])<<8) +
- (((uint32_t)tmpStr2[2])<<16) +
- (((uint32_t)tmpStr2[3])<<24));
- memcpy(tmpStr2, &_wavFormatObj.nBlockAlign, 2);
- _wavFormatObj.nBlockAlign =
- (int16_t) ((uint32_t)tmpStr2[0] +
- (((uint32_t)tmpStr2[1])<<8));
- memcpy(tmpStr2, &_wavFormatObj.nBitsPerSample, 2);
- _wavFormatObj.nBitsPerSample =
- (int16_t) ((uint32_t)tmpStr2[0] +
- (((uint32_t)tmpStr2[1])<<8));
-
- for (i = 0;
- i < (CHUNKheaderObj.fmt_ckSize -
- (int32_t)sizeof(WAVE_FMTINFO_header));
- i++)
- {
- len = wav.Read(&dummyRead, 1);
- if(len != 1)
- {
- WEBRTC_TRACE(kTraceError, kTraceFile, _id,
- "File corrupted, reached EOF (reading fmt)");
- return -1;
- }
- }
- fmtFound = true;
- }
- else if(strcmp(tmpStr, "data") == 0)
- {
- _dataSize = CHUNKheaderObj.fmt_ckSize;
- dataFound = true;
- break;
- }
- else
- {
- for (i = 0; i < (CHUNKheaderObj.fmt_ckSize); i++)
- {
- len = wav.Read(&dummyRead, 1);
- if(len != 1)
- {
- WEBRTC_TRACE(kTraceError, kTraceFile, _id,
- "File corrupted, reached EOF (reading other)");
- return -1;
- }
- }
- }
-
- len = wav.Read(&CHUNKheaderObj, sizeof(WAVE_CHUNK_header));
-
- memcpy(tmpStr2, &CHUNKheaderObj.fmt_ckSize, 4);
- CHUNKheaderObj.fmt_ckSize =
- (int32_t) ((uint32_t)tmpStr2[0] +
- (((uint32_t)tmpStr2[1])<<8) +
- (((uint32_t)tmpStr2[2])<<16) +
- (((uint32_t)tmpStr2[3])<<24));
-
- memcpy(tmpStr, CHUNKheaderObj.fmt_ckID, 4);
- }
-
- // Either a proper format chunk has been read or a data chunk was come
- // across.
- if( (_wavFormatObj.formatTag != kWavFormatPcm) &&
- (_wavFormatObj.formatTag != kWavFormatALaw) &&
- (_wavFormatObj.formatTag != kWavFormatMuLaw))
- {
- WEBRTC_TRACE(kTraceError, kTraceFile, _id,
- "Coding formatTag value=%d not supported!",
- _wavFormatObj.formatTag);
- return -1;
- }
- if((_wavFormatObj.nChannels < 1) ||
- (_wavFormatObj.nChannels > 2))
- {
- WEBRTC_TRACE(kTraceError, kTraceFile, _id,
- "nChannels value=%d not supported!",
- _wavFormatObj.nChannels);
- return -1;
- }
-
- if((_wavFormatObj.nBitsPerSample != 8) &&
- (_wavFormatObj.nBitsPerSample != 16))
- {
- WEBRTC_TRACE(kTraceError, kTraceFile, _id,
- "nBitsPerSample value=%d not supported!",
- _wavFormatObj.nBitsPerSample);
- return -1;
- }
-
- // Calculate the number of bytes that 10 ms of audio data correspond to.
- if(_wavFormatObj.formatTag == kWavFormatPcm)
- {
- // TODO (hellner): integer division for 22050 and 11025 would yield
- // the same result as the else statement. Remove those
- // special cases?
- if(_wavFormatObj.nSamplesPerSec == 44100)
- {
- _readSizeBytes = 440 * _wavFormatObj.nChannels *
- (_wavFormatObj.nBitsPerSample / 8);
- } else if(_wavFormatObj.nSamplesPerSec == 22050) {
- _readSizeBytes = 220 * _wavFormatObj.nChannels *
- (_wavFormatObj.nBitsPerSample / 8);
- } else if(_wavFormatObj.nSamplesPerSec == 11025) {
- _readSizeBytes = 110 * _wavFormatObj.nChannels *
- (_wavFormatObj.nBitsPerSample / 8);
- } else {
- _readSizeBytes = (_wavFormatObj.nSamplesPerSec/100) *
- _wavFormatObj.nChannels * (_wavFormatObj.nBitsPerSample / 8);
- }
-
- } else {
- _readSizeBytes = (_wavFormatObj.nSamplesPerSec/100) *
- _wavFormatObj.nChannels * (_wavFormatObj.nBitsPerSample / 8);
- }
- return 0;
-}
-
-int32_t ModuleFileUtility::InitWavCodec(uint32_t samplesPerSec,
- uint32_t channels,
- uint32_t bitsPerSample,
- uint32_t formatTag)
-{
- codec_info_.pltype = -1;
- codec_info_.plfreq = samplesPerSec;
- codec_info_.channels = channels;
- codec_info_.rate = bitsPerSample * samplesPerSec;
-
- // Calculate the packet size for 10ms frames
- switch(formatTag)
- {
- case kWavFormatALaw:
- strcpy(codec_info_.plname, "PCMA");
- _codecId = kCodecPcma;
- codec_info_.pltype = 8;
- codec_info_.pacsize = codec_info_.plfreq / 100;
- break;
- case kWavFormatMuLaw:
- strcpy(codec_info_.plname, "PCMU");
- _codecId = kCodecPcmu;
- codec_info_.pltype = 0;
- codec_info_.pacsize = codec_info_.plfreq / 100;
- break;
- case kWavFormatPcm:
- codec_info_.pacsize = (bitsPerSample * (codec_info_.plfreq / 100)) / 8;
- if(samplesPerSec == 8000)
- {
- strcpy(codec_info_.plname, "L16");
- _codecId = kCodecL16_8Khz;
- }
- else if(samplesPerSec == 16000)
- {
- strcpy(codec_info_.plname, "L16");
- _codecId = kCodecL16_16kHz;
- }
- else if(samplesPerSec == 32000)
- {
- strcpy(codec_info_.plname, "L16");
- _codecId = kCodecL16_32Khz;
- }
- // Set the packet size for "odd" sampling frequencies so that it
- // properly corresponds to _readSizeBytes.
- else if(samplesPerSec == 11025)
- {
- strcpy(codec_info_.plname, "L16");
- _codecId = kCodecL16_16kHz;
- codec_info_.pacsize = 110;
- codec_info_.plfreq = 11000;
- }
- else if(samplesPerSec == 22050)
- {
- strcpy(codec_info_.plname, "L16");
- _codecId = kCodecL16_16kHz;
- codec_info_.pacsize = 220;
- codec_info_.plfreq = 22000;
- }
- else if(samplesPerSec == 44100)
- {
- strcpy(codec_info_.plname, "L16");
- _codecId = kCodecL16_16kHz;
- codec_info_.pacsize = 440;
- codec_info_.plfreq = 44000;
- }
- else if(samplesPerSec == 48000)
- {
- strcpy(codec_info_.plname, "L16");
- _codecId = kCodecL16_16kHz;
- codec_info_.pacsize = 480;
- codec_info_.plfreq = 48000;
- }
- else
- {
- WEBRTC_TRACE(kTraceError, kTraceFile, _id,
- "Unsupported PCM frequency!");
- return -1;
- }
- break;
- default:
- WEBRTC_TRACE(kTraceError, kTraceFile, _id,
- "unknown WAV format TAG!");
- return -1;
- break;
- }
- return 0;
-}
-
-int32_t ModuleFileUtility::InitWavReading(InStream& wav,
- const uint32_t start,
- const uint32_t stop)
-{
-
- _reading = false;
-
- if(ReadWavHeader(wav) == -1)
- {
- WEBRTC_TRACE(kTraceError, kTraceFile, _id,
- "failed to read WAV header!");
- return -1;
- }
-
- _playoutPositionMs = 0;
- _readPos = 0;
-
- if(start > 0)
- {
- uint8_t dummy[WAV_MAX_BUFFER_SIZE];
- int32_t readLength;
- if(_readSizeBytes <= WAV_MAX_BUFFER_SIZE)
- {
- while (_playoutPositionMs < start)
- {
- readLength = wav.Read(dummy, _readSizeBytes);
- if(readLength == _readSizeBytes)
- {
- _readPos += readLength;
- _playoutPositionMs += 10;
- }
- else // Must have reached EOF before start position!
- {
- WEBRTC_TRACE(kTraceError, kTraceFile, _id,
- "InitWavReading(), EOF before start position");
- return -1;
- }
- }
- }
- else
- {
- return -1;
- }
- }
- if( InitWavCodec(_wavFormatObj.nSamplesPerSec, _wavFormatObj.nChannels,
- _wavFormatObj.nBitsPerSample,
- _wavFormatObj.formatTag) != 0)
- {
- return -1;
- }
- _bytesPerSample = _wavFormatObj.nBitsPerSample / 8;
-
-
- _startPointInMs = start;
- _stopPointInMs = stop;
- _reading = true;
- return 0;
-}
-
-int32_t ModuleFileUtility::ReadWavDataAsMono(
- InStream& wav,
- int8_t* outData,
- const size_t bufferSize)
-{
- WEBRTC_TRACE(
- kTraceStream,
- kTraceFile,
- _id,
- "ModuleFileUtility::ReadWavDataAsMono(wav= 0x%x, outData= 0x%d, "
- "bufSize= %" PRIuS ")",
- &wav,
- outData,
- bufferSize);
-
- // The number of bytes that should be read from file.
- const uint32_t totalBytesNeeded = _readSizeBytes;
- // The number of bytes that will be written to outData.
- const uint32_t bytesRequested = (codec_info_.channels == 2) ?
- totalBytesNeeded >> 1 : totalBytesNeeded;
- if(bufferSize < bytesRequested)
- {
- WEBRTC_TRACE(kTraceError, kTraceFile, _id,
- "ReadWavDataAsMono: output buffer is too short!");
- return -1;
- }
- if(outData == NULL)
- {
- WEBRTC_TRACE(kTraceError, kTraceFile, _id,
- "ReadWavDataAsMono: output buffer NULL!");
- return -1;
- }
-
- if(!_reading)
- {
- WEBRTC_TRACE(kTraceError, kTraceFile, _id,
- "ReadWavDataAsMono: no longer reading file.");
- return -1;
- }
-
- int32_t bytesRead = ReadWavData(
- wav,
- (codec_info_.channels == 2) ? _tempData : (uint8_t*)outData,
- totalBytesNeeded);
- if(bytesRead == 0)
- {
- return 0;
- }
- if(bytesRead < 0)
- {
- WEBRTC_TRACE(kTraceError, kTraceFile, _id,
- "ReadWavDataAsMono: failed to read data from WAV file.");
- return -1;
- }
- // Output data is should be mono.
- if(codec_info_.channels == 2)
- {
- for (uint32_t i = 0; i < bytesRequested / _bytesPerSample; i++)
- {
- // Sample value is the average of left and right buffer rounded to
- // closest integer value. Note samples can be either 1 or 2 byte.
- if(_bytesPerSample == 1)
- {
- _tempData[i] = ((_tempData[2 * i] + _tempData[(2 * i) + 1] +
- 1) >> 1);
- }
- else
- {
- int16_t* sampleData = (int16_t*) _tempData;
- sampleData[i] = ((sampleData[2 * i] + sampleData[(2 * i) + 1] +
- 1) >> 1);
- }
- }
- memcpy(outData, _tempData, bytesRequested);
- }
- return bytesRequested;
-}
-
-int32_t ModuleFileUtility::ReadWavDataAsStereo(
- InStream& wav,
- int8_t* outDataLeft,
- int8_t* outDataRight,
- const size_t bufferSize)
-{
- WEBRTC_TRACE(
- kTraceStream,
- kTraceFile,
- _id,
- "ModuleFileUtility::ReadWavDataAsStereo(wav= 0x%x, outLeft= 0x%x, "
- "outRight= 0x%x, bufSize= %" PRIuS ")",
- &wav,
- outDataLeft,
- outDataRight,
- bufferSize);
-
- if((outDataLeft == NULL) ||
- (outDataRight == NULL))
- {
- WEBRTC_TRACE(kTraceError, kTraceFile, _id,
- "ReadWavDataAsMono: an input buffer is NULL!");
- return -1;
- }
- if(codec_info_.channels != 2)
- {
- WEBRTC_TRACE(
- kTraceError,
- kTraceFile,
- _id,
- "ReadWavDataAsStereo: WAV file does not contain stereo data!");
- return -1;
- }
- if(! _reading)
- {
- WEBRTC_TRACE(kTraceError, kTraceFile, _id,
- "ReadWavDataAsStereo: no longer reading file.");
- return -1;
- }
-
- // The number of bytes that should be read from file.
- const uint32_t totalBytesNeeded = _readSizeBytes;
- // The number of bytes that will be written to the left and the right
- // buffers.
- const uint32_t bytesRequested = totalBytesNeeded >> 1;
- if(bufferSize < bytesRequested)
- {
- WEBRTC_TRACE(kTraceError, kTraceFile, _id,
- "ReadWavData: Output buffers are too short!");
- assert(false);
- return -1;
- }
-
- int32_t bytesRead = ReadWavData(wav, _tempData, totalBytesNeeded);
- if(bytesRead <= 0)
- {
- WEBRTC_TRACE(kTraceError, kTraceFile, _id,
- "ReadWavDataAsStereo: failed to read data from WAV file.");
- return -1;
- }
-
- // Turn interleaved audio to left and right buffer. Note samples can be
- // either 1 or 2 bytes
- if(_bytesPerSample == 1)
- {
- for (uint32_t i = 0; i < bytesRequested; i++)
- {
- outDataLeft[i] = _tempData[2 * i];
- outDataRight[i] = _tempData[(2 * i) + 1];
- }
- }
- else if(_bytesPerSample == 2)
- {
- int16_t* sampleData = reinterpret_cast<int16_t*>(_tempData);
- int16_t* outLeft = reinterpret_cast<int16_t*>(outDataLeft);
- int16_t* outRight = reinterpret_cast<int16_t*>(
- outDataRight);
-
- // Bytes requested to samples requested.
- uint32_t sampleCount = bytesRequested >> 1;
- for (uint32_t i = 0; i < sampleCount; i++)
- {
- outLeft[i] = sampleData[2 * i];
- outRight[i] = sampleData[(2 * i) + 1];
- }
- } else {
- WEBRTC_TRACE(kTraceError, kTraceFile, _id,
- "ReadWavStereoData: unsupported sample size %d!",
- _bytesPerSample);
- assert(false);
- return -1;
- }
- return bytesRequested;
-}
-
-int32_t ModuleFileUtility::ReadWavData(
- InStream& wav,
- uint8_t* buffer,
- const uint32_t dataLengthInBytes)
-{
- WEBRTC_TRACE(
- kTraceStream,
- kTraceFile,
- _id,
- "ModuleFileUtility::ReadWavData(wav= 0x%x, buffer= 0x%x, dataLen= %ld)",
- &wav,
- buffer,
- dataLengthInBytes);
-
-
- if(buffer == NULL)
- {
- WEBRTC_TRACE(kTraceError, kTraceFile, _id,
- "ReadWavDataAsMono: output buffer NULL!");
- return -1;
- }
-
- // Make sure that a read won't return too few samples.
- // TODO (hellner): why not read the remaining bytes needed from the start
- // of the file?
- if((_dataSize - _readPos) < (int32_t)dataLengthInBytes)
- {
- // Rewind() being -1 may be due to the file not supposed to be looped.
- if(wav.Rewind() == -1)
- {
- _reading = false;
- return 0;
- }
- if(InitWavReading(wav, _startPointInMs, _stopPointInMs) == -1)
- {
- _reading = false;
- return -1;
- }
- }
-
- int32_t bytesRead = wav.Read(buffer, dataLengthInBytes);
- if(bytesRead < 0)
- {
- _reading = false;
- return -1;
- }
-
- // This should never happen due to earlier sanity checks.
- // TODO (hellner): change to an assert and fail here since this should
- // never happen...
- if(bytesRead < (int32_t)dataLengthInBytes)
- {
- if((wav.Rewind() == -1) ||
- (InitWavReading(wav, _startPointInMs, _stopPointInMs) == -1))
- {
- _reading = false;
- return -1;
- }
- else
- {
- bytesRead = wav.Read(buffer, dataLengthInBytes);
- if(bytesRead < (int32_t)dataLengthInBytes)
- {
- _reading = false;
- return -1;
- }
- }
- }
-
- _readPos += bytesRead;
-
- // TODO (hellner): Why is dataLengthInBytes let dictate the number of bytes
- // to read when exactly 10ms should be read?!
- _playoutPositionMs += 10;
- if((_stopPointInMs > 0) &&
- (_playoutPositionMs >= _stopPointInMs))
- {
- if((wav.Rewind() == -1) ||
- (InitWavReading(wav, _startPointInMs, _stopPointInMs) == -1))
- {
- _reading = false;
- }
- }
- return bytesRead;
-}
-
-int32_t ModuleFileUtility::InitWavWriting(OutStream& wav,
- const CodecInst& codecInst)
-{
-
- if(set_codec_info(codecInst) != 0)
- {
- WEBRTC_TRACE(kTraceError, kTraceFile, _id,
- "codecInst identifies unsupported codec!");
- return -1;
- }
- _writing = false;
- uint32_t channels = (codecInst.channels == 0) ?
- 1 : codecInst.channels;
-
- if(STR_CASE_CMP(codecInst.plname, "PCMU") == 0)
- {
- _bytesPerSample = 1;
- if(WriteWavHeader(wav, 8000, _bytesPerSample, channels,
- kWavFormatMuLaw, 0) == -1)
- {
- return -1;
- }
- }else if(STR_CASE_CMP(codecInst.plname, "PCMA") == 0)
- {
- _bytesPerSample = 1;
- if(WriteWavHeader(wav, 8000, _bytesPerSample, channels, kWavFormatALaw,
- 0) == -1)
- {
- return -1;
- }
- }
- else if(STR_CASE_CMP(codecInst.plname, "L16") == 0)
- {
- _bytesPerSample = 2;
- if(WriteWavHeader(wav, codecInst.plfreq, _bytesPerSample, channels,
- kWavFormatPcm, 0) == -1)
- {
- return -1;
- }
- }
- else
- {
- WEBRTC_TRACE(kTraceError, kTraceFile, _id,
- "codecInst identifies unsupported codec for WAV file!");
- return -1;
- }
- _writing = true;
- _bytesWritten = 0;
- return 0;
-}
-
-int32_t ModuleFileUtility::WriteWavData(OutStream& out,
- const int8_t* buffer,
- const size_t dataLength)
-{
- WEBRTC_TRACE(
- kTraceStream,
- kTraceFile,
- _id,
- "ModuleFileUtility::WriteWavData(out= 0x%x, buf= 0x%x, dataLen= %" PRIuS
- ")",
- &out,
- buffer,
- dataLength);
-
- if(buffer == NULL)
- {
- WEBRTC_TRACE(kTraceError, kTraceFile, _id,
- "WriteWavData: input buffer NULL!");
- return -1;
- }
-
- if(!out.Write(buffer, dataLength))
- {
- return -1;
- }
- _bytesWritten += dataLength;
- return static_cast<int32_t>(dataLength);
-}
-
-
-int32_t ModuleFileUtility::WriteWavHeader(
- OutStream& wav,
- const uint32_t freq,
- const uint32_t bytesPerSample,
- const uint32_t channels,
- const uint32_t format,
- const uint32_t lengthInBytes)
-{
- // Frame size in bytes for 10 ms of audio.
- // TODO (hellner): 44.1 kHz has 440 samples frame size. Doesn't seem to
- // be taken into consideration here!
- const int32_t frameSize = (freq / 100) * channels;
-
- // Calculate the number of full frames that the wave file contain.
- const int32_t dataLengthInBytes = frameSize * (lengthInBytes / frameSize);
-
- uint8_t buf[kWavHeaderSize];
- webrtc::WriteWavHeader(buf, channels, freq, static_cast<WavFormat>(format),
- bytesPerSample, dataLengthInBytes / bytesPerSample);
- wav.Write(buf, kWavHeaderSize);
- return 0;
-}
-
-int32_t ModuleFileUtility::UpdateWavHeader(OutStream& wav)
-{
- int32_t res = -1;
- if(wav.Rewind() == -1)
- {
- return -1;
- }
- uint32_t channels = (codec_info_.channels == 0) ?
- 1 : codec_info_.channels;
-
- if(STR_CASE_CMP(codec_info_.plname, "L16") == 0)
- {
- res = WriteWavHeader(wav, codec_info_.plfreq, 2, channels,
- kWavFormatPcm, _bytesWritten);
- } else if(STR_CASE_CMP(codec_info_.plname, "PCMU") == 0) {
- res = WriteWavHeader(wav, 8000, 1, channels, kWavFormatMuLaw,
- _bytesWritten);
- } else if(STR_CASE_CMP(codec_info_.plname, "PCMA") == 0) {
- res = WriteWavHeader(wav, 8000, 1, channels, kWavFormatALaw,
- _bytesWritten);
- } else {
- // Allow calling this API even if not writing to a WAVE file.
- // TODO (hellner): why?!
- return 0;
- }
- return res;
-}
-
-
-int32_t ModuleFileUtility::InitPreEncodedReading(InStream& in,
- const CodecInst& cinst)
-{
-
- uint8_t preEncodedID;
- in.Read(&preEncodedID, 1);
-
- MediaFileUtility_CodecType codecType =
- (MediaFileUtility_CodecType)preEncodedID;
-
- if(set_codec_info(cinst) != 0)
- {
- WEBRTC_TRACE(kTraceError, kTraceFile, _id,
- "Pre-encoded file send codec mismatch!");
- return -1;
- }
- if(codecType != _codecId)
- {
- WEBRTC_TRACE(kTraceError, kTraceFile, _id,
- "Pre-encoded file format codec mismatch!");
- return -1;
- }
- memcpy(&codec_info_,&cinst,sizeof(CodecInst));
- _reading = true;
- return 0;
-}
-
-int32_t ModuleFileUtility::ReadPreEncodedData(
- InStream& in,
- int8_t* outData,
- const size_t bufferSize)
-{
- WEBRTC_TRACE(
- kTraceStream,
- kTraceFile,
- _id,
- "ModuleFileUtility::ReadPreEncodedData(in= 0x%x, outData= 0x%x, "
- "bufferSize= %" PRIuS ")",
- &in,
- outData,
- bufferSize);
-
- if(outData == NULL)
- {
- WEBRTC_TRACE(kTraceError, kTraceFile, _id, "output buffer NULL");
- }
-
- uint32_t frameLen;
- uint8_t buf[64];
- // Each frame has a two byte header containing the frame length.
- int32_t res = in.Read(buf, 2);
- if(res != 2)
- {
- if(!in.Rewind())
- {
- // The first byte is the codec identifier.
- in.Read(buf, 1);
- res = in.Read(buf, 2);
- }
- else
- {
- return -1;
- }
- }
- frameLen = buf[0] + buf[1] * 256;
- if(bufferSize < frameLen)
- {
- WEBRTC_TRACE(
- kTraceError,
- kTraceFile,
- _id,
- "buffer not large enough to read %d bytes of pre-encoded data!",
- frameLen);
- return -1;
- }
- return in.Read(outData, frameLen);
-}
-
-int32_t ModuleFileUtility::InitPreEncodedWriting(
- OutStream& out,
- const CodecInst& codecInst)
-{
-
- if(set_codec_info(codecInst) != 0)
- {
- WEBRTC_TRACE(kTraceError, kTraceFile, _id, "CodecInst not recognized!");
- return -1;
- }
- _writing = true;
- _bytesWritten = 1;
- out.Write(&_codecId, 1);
- return 0;
-}
-
-int32_t ModuleFileUtility::WritePreEncodedData(
- OutStream& out,
- const int8_t* buffer,
- const size_t dataLength)
-{
- WEBRTC_TRACE(
- kTraceStream,
- kTraceFile,
- _id,
- "ModuleFileUtility::WritePreEncodedData(out= 0x%x, inData= 0x%x, "
- "dataLen= %" PRIuS ")",
- &out,
- buffer,
- dataLength);
-
- if(buffer == NULL)
- {
- WEBRTC_TRACE(kTraceError, kTraceFile, _id,"buffer NULL");
- }
-
- size_t bytesWritten = 0;
- // The first two bytes is the size of the frame.
- int16_t lengthBuf;
- lengthBuf = (int16_t)dataLength;
- if(dataLength > static_cast<size_t>(std::numeric_limits<int16_t>::max()) ||
- !out.Write(&lengthBuf, 2))
- {
- return -1;
- }
- bytesWritten = 2;
-
- if(!out.Write(buffer, dataLength))
- {
- return -1;
- }
- bytesWritten += dataLength;
- return static_cast<int32_t>(bytesWritten);
-}
-
-int32_t ModuleFileUtility::InitCompressedReading(
- InStream& in,
- const uint32_t start,
- const uint32_t stop)
-{
- WEBRTC_TRACE(
- kTraceDebug,
- kTraceFile,
- _id,
- "ModuleFileUtility::InitCompressedReading(in= 0x%x, start= %d,\
- stop= %d)",
- &in,
- start,
- stop);
-
-#if defined(WEBRTC_CODEC_ILBC)
- int16_t read_len = 0;
-#endif
- _codecId = kCodecNoCodec;
- _playoutPositionMs = 0;
- _reading = false;
-
- _startPointInMs = start;
- _stopPointInMs = stop;
-
- // Read the codec name
- int32_t cnt = 0;
- char buf[64];
- do
- {
- in.Read(&buf[cnt++], 1);
- } while ((buf[cnt-1] != '\n') && (64 > cnt));
-
- if(cnt==64)
- {
- return -1;
- } else {
- buf[cnt]=0;
- }
-
-#ifdef WEBRTC_CODEC_ILBC
- if(!strcmp("#!iLBC20\n", buf))
- {
- codec_info_.pltype = 102;
- strcpy(codec_info_.plname, "ilbc");
- codec_info_.plfreq = 8000;
- codec_info_.pacsize = 160;
- codec_info_.channels = 1;
- codec_info_.rate = 13300;
- _codecId = kCodecIlbc20Ms;
-
- if(_startPointInMs > 0)
- {
- while (_playoutPositionMs <= _startPointInMs)
- {
- read_len = in.Read(buf, 38);
- if(read_len == 38)
- {
- _playoutPositionMs += 20;
- }
- else
- {
- return -1;
- }
- }
- }
- }
-
- if(!strcmp("#!iLBC30\n", buf))
- {
- codec_info_.pltype = 102;
- strcpy(codec_info_.plname, "ilbc");
- codec_info_.plfreq = 8000;
- codec_info_.pacsize = 240;
- codec_info_.channels = 1;
- codec_info_.rate = 13300;
- _codecId = kCodecIlbc30Ms;
-
- if(_startPointInMs > 0)
- {
- while (_playoutPositionMs <= _startPointInMs)
- {
- read_len = in.Read(buf, 50);
- if(read_len == 50)
- {
- _playoutPositionMs += 20;
- }
- else
- {
- return -1;
- }
- }
- }
- }
-#endif
- if(_codecId == kCodecNoCodec)
- {
- return -1;
- }
- _reading = true;
- return 0;
-}
-
-int32_t ModuleFileUtility::ReadCompressedData(InStream& in,
- int8_t* outData,
- size_t bufferSize)
-{
- WEBRTC_TRACE(
- kTraceStream,
- kTraceFile,
- _id,
- "ModuleFileUtility::ReadCompressedData(in=0x%x, outData=0x%x, bytes=%"
- PRIuS ")",
- &in,
- outData,
- bufferSize);
-
- uint32_t bytesRead = 0;
-
- if(! _reading)
- {
- WEBRTC_TRACE(kTraceError, kTraceFile, _id, "not currently reading!");
- return -1;
- }
-
-#ifdef WEBRTC_CODEC_ILBC
- if((_codecId == kCodecIlbc20Ms) ||
- (_codecId == kCodecIlbc30Ms))
- {
- uint32_t byteSize = 0;
- if(_codecId == kCodecIlbc30Ms)
- {
- byteSize = 50;
- }
- if(_codecId == kCodecIlbc20Ms)
- {
- byteSize = 38;
- }
- if(bufferSize < byteSize)
- {
- WEBRTC_TRACE(kTraceError, kTraceFile, _id,
- "output buffer is too short to read ILBC compressed\
- data.");
- assert(false);
- return -1;
- }
-
- bytesRead = in.Read(outData, byteSize);
- if(bytesRead != byteSize)
- {
- if(!in.Rewind())
- {
- InitCompressedReading(in, _startPointInMs, _stopPointInMs);
- bytesRead = in.Read(outData, byteSize);
- if(bytesRead != byteSize)
- {
- _reading = false;
- return -1;
- }
- }
- else
- {
- _reading = false;
- return -1;
- }
- }
- }
-#endif
- if(bytesRead == 0)
- {
- WEBRTC_TRACE(kTraceError, kTraceFile, _id,
- "ReadCompressedData() no bytes read, codec not supported");
- return -1;
- }
-
- _playoutPositionMs += 20;
- if((_stopPointInMs > 0) &&
- (_playoutPositionMs >= _stopPointInMs))
- {
- if(!in.Rewind())
- {
- InitCompressedReading(in, _startPointInMs, _stopPointInMs);
- }
- else
- {
- _reading = false;
- }
- }
- return bytesRead;
-}
-
-int32_t ModuleFileUtility::InitCompressedWriting(
- OutStream& out,
- const CodecInst& codecInst)
-{
- WEBRTC_TRACE(kTraceDebug, kTraceFile, _id,
- "ModuleFileUtility::InitCompressedWriting(out= 0x%x,\
- codecName= %s)",
- &out, codecInst.plname);
-
- _writing = false;
-
-#ifdef WEBRTC_CODEC_ILBC
- if(STR_CASE_CMP(codecInst.plname, "ilbc") == 0)
- {
- if(codecInst.pacsize == 160)
- {
- _codecId = kCodecIlbc20Ms;
- out.Write("#!iLBC20\n",9);
- }
- else if(codecInst.pacsize == 240)
- {
- _codecId = kCodecIlbc30Ms;
- out.Write("#!iLBC30\n",9);
- }
- else
- {
- WEBRTC_TRACE(kTraceError, kTraceFile, _id,
- "codecInst defines unsupported compression codec!");
- return -1;
- }
- memcpy(&codec_info_,&codecInst,sizeof(CodecInst));
- _writing = true;
- return 0;
- }
-#endif
-
- WEBRTC_TRACE(kTraceError, kTraceFile, _id,
- "codecInst defines unsupported compression codec!");
- return -1;
-}
-
-int32_t ModuleFileUtility::WriteCompressedData(
- OutStream& out,
- const int8_t* buffer,
- const size_t dataLength)
-{
- WEBRTC_TRACE(
- kTraceStream,
- kTraceFile,
- _id,
- "ModuleFileUtility::WriteCompressedData(out= 0x%x, buf= 0x%x, "
- "dataLen= %" PRIuS ")",
- &out,
- buffer,
- dataLength);
-
- if(buffer == NULL)
- {
- WEBRTC_TRACE(kTraceError, kTraceFile, _id,"buffer NULL");
- }
-
- if(!out.Write(buffer, dataLength))
- {
- return -1;
- }
- return static_cast<int32_t>(dataLength);
-}
-
-int32_t ModuleFileUtility::InitPCMReading(InStream& pcm,
- const uint32_t start,
- const uint32_t stop,
- uint32_t freq)
-{
- WEBRTC_TRACE(
- kTraceInfo,
- kTraceFile,
- _id,
- "ModuleFileUtility::InitPCMReading(pcm= 0x%x, start=%d, stop=%d,\
- freq=%d)",
- &pcm,
- start,
- stop,
- freq);
-
- int8_t dummy[320];
- int32_t read_len;
-
- _playoutPositionMs = 0;
- _startPointInMs = start;
- _stopPointInMs = stop;
- _reading = false;
-
- if(freq == 8000)
- {
- strcpy(codec_info_.plname, "L16");
- codec_info_.pltype = -1;
- codec_info_.plfreq = 8000;
- codec_info_.pacsize = 160;
- codec_info_.channels = 1;
- codec_info_.rate = 128000;
- _codecId = kCodecL16_8Khz;
- }
- else if(freq == 16000)
- {
- strcpy(codec_info_.plname, "L16");
- codec_info_.pltype = -1;
- codec_info_.plfreq = 16000;
- codec_info_.pacsize = 320;
- codec_info_.channels = 1;
- codec_info_.rate = 256000;
- _codecId = kCodecL16_16kHz;
- }
- else if(freq == 32000)
- {
- strcpy(codec_info_.plname, "L16");
- codec_info_.pltype = -1;
- codec_info_.plfreq = 32000;
- codec_info_.pacsize = 320;
- codec_info_.channels = 1;
- codec_info_.rate = 512000;
- _codecId = kCodecL16_32Khz;
- }
-
- // Readsize for 10ms of audio data (2 bytes per sample).
- _readSizeBytes = 2 * codec_info_. plfreq / 100;
- if(_startPointInMs > 0)
- {
- while (_playoutPositionMs < _startPointInMs)
- {
- read_len = pcm.Read(dummy, _readSizeBytes);
- if(read_len == _readSizeBytes)
- {
- _playoutPositionMs += 10;
- }
- else // Must have reached EOF before start position!
- {
- return -1;
- }
- }
- }
- _reading = true;
- return 0;
-}
-
-int32_t ModuleFileUtility::ReadPCMData(InStream& pcm,
- int8_t* outData,
- size_t bufferSize)
-{
- WEBRTC_TRACE(
- kTraceStream,
- kTraceFile,
- _id,
- "ModuleFileUtility::ReadPCMData(pcm= 0x%x, outData= 0x%x, bufSize= %"
- PRIuS ")",
- &pcm,
- outData,
- bufferSize);
-
- if(outData == NULL)
- {
- WEBRTC_TRACE(kTraceError, kTraceFile, _id,"buffer NULL");
- }
-
- // Readsize for 10ms of audio data (2 bytes per sample).
- uint32_t bytesRequested = 2 * codec_info_.plfreq / 100;
- if(bufferSize < bytesRequested)
- {
- WEBRTC_TRACE(kTraceError, kTraceFile, _id,
- "ReadPCMData: buffer not long enough for a 10ms frame.");
- assert(false);
- return -1;
- }
-
- uint32_t bytesRead = pcm.Read(outData, bytesRequested);
- if(bytesRead < bytesRequested)
- {
- if(pcm.Rewind() == -1)
- {
- _reading = false;
- }
- else
- {
- if(InitPCMReading(pcm, _startPointInMs, _stopPointInMs,
- codec_info_.plfreq) == -1)
- {
- _reading = false;
- }
- else
- {
- int32_t rest = bytesRequested - bytesRead;
- int32_t len = pcm.Read(&(outData[bytesRead]), rest);
- if(len == rest)
- {
- bytesRead += len;
- }
- else
- {
- _reading = false;
- }
- }
- if(bytesRead <= 0)
- {
- WEBRTC_TRACE(kTraceError, kTraceFile, _id,
- "ReadPCMData: Failed to rewind audio file.");
- return -1;
- }
- }
- }
-
- if(bytesRead <= 0)
- {
- WEBRTC_TRACE(kTraceStream, kTraceFile, _id,
- "ReadPCMData: end of file");
- return -1;
- }
- _playoutPositionMs += 10;
- if(_stopPointInMs && _playoutPositionMs >= _stopPointInMs)
- {
- if(!pcm.Rewind())
- {
- if(InitPCMReading(pcm, _startPointInMs, _stopPointInMs,
- codec_info_.plfreq) == -1)
- {
- _reading = false;
- }
- }
- }
- return bytesRead;
-}
-
-int32_t ModuleFileUtility::InitPCMWriting(OutStream& out, uint32_t freq)
-{
-
- if(freq == 8000)
- {
- strcpy(codec_info_.plname, "L16");
- codec_info_.pltype = -1;
- codec_info_.plfreq = 8000;
- codec_info_.pacsize = 160;
- codec_info_.channels = 1;
- codec_info_.rate = 128000;
-
- _codecId = kCodecL16_8Khz;
- }
- else if(freq == 16000)
- {
- strcpy(codec_info_.plname, "L16");
- codec_info_.pltype = -1;
- codec_info_.plfreq = 16000;
- codec_info_.pacsize = 320;
- codec_info_.channels = 1;
- codec_info_.rate = 256000;
-
- _codecId = kCodecL16_16kHz;
- }
- else if(freq == 32000)
- {
- strcpy(codec_info_.plname, "L16");
- codec_info_.pltype = -1;
- codec_info_.plfreq = 32000;
- codec_info_.pacsize = 320;
- codec_info_.channels = 1;
- codec_info_.rate = 512000;
-
- _codecId = kCodecL16_32Khz;
- }
- if((_codecId != kCodecL16_8Khz) &&
- (_codecId != kCodecL16_16kHz) &&
- (_codecId != kCodecL16_32Khz))
- {
- WEBRTC_TRACE(kTraceError, kTraceFile, _id,
- "CodecInst is not 8KHz PCM or 16KHz PCM!");
- return -1;
- }
- _writing = true;
- _bytesWritten = 0;
- return 0;
-}
-
-int32_t ModuleFileUtility::WritePCMData(OutStream& out,
- const int8_t* buffer,
- const size_t dataLength)
-{
- WEBRTC_TRACE(
- kTraceStream,
- kTraceFile,
- _id,
- "ModuleFileUtility::WritePCMData(out= 0x%x, buf= 0x%x, dataLen= %" PRIuS
- ")",
- &out,
- buffer,
- dataLength);
-
- if(buffer == NULL)
- {
- WEBRTC_TRACE(kTraceError, kTraceFile, _id, "buffer NULL");
- }
-
- if(!out.Write(buffer, dataLength))
- {
- return -1;
- }
-
- _bytesWritten += dataLength;
- return static_cast<int32_t>(dataLength);
-}
-
-int32_t ModuleFileUtility::codec_info(CodecInst& codecInst)
-{
- WEBRTC_TRACE(kTraceStream, kTraceFile, _id,
- "ModuleFileUtility::codec_info(codecInst= 0x%x)", &codecInst);
-
- if(!_reading && !_writing)
- {
- WEBRTC_TRACE(kTraceError, kTraceFile, _id,
- "CodecInst: not currently reading audio file!");
- return -1;
- }
- memcpy(&codecInst,&codec_info_,sizeof(CodecInst));
- return 0;
-}
-
-int32_t ModuleFileUtility::set_codec_info(const CodecInst& codecInst)
-{
-
- _codecId = kCodecNoCodec;
- if(STR_CASE_CMP(codecInst.plname, "PCMU") == 0)
- {
- _codecId = kCodecPcmu;
- }
- else if(STR_CASE_CMP(codecInst.plname, "PCMA") == 0)
- {
- _codecId = kCodecPcma;
- }
- else if(STR_CASE_CMP(codecInst.plname, "L16") == 0)
- {
- if(codecInst.plfreq == 8000)
- {
- _codecId = kCodecL16_8Khz;
- }
- else if(codecInst.plfreq == 16000)
- {
- _codecId = kCodecL16_16kHz;
- }
- else if(codecInst.plfreq == 32000)
- {
- _codecId = kCodecL16_32Khz;
- }
- }
-#ifdef WEBRTC_CODEC_ILBC
- else if(STR_CASE_CMP(codecInst.plname, "ilbc") == 0)
- {
- if(codecInst.pacsize == 160)
- {
- _codecId = kCodecIlbc20Ms;
- }
- else if(codecInst.pacsize == 240)
- {
- _codecId = kCodecIlbc30Ms;
- }
- }
-#endif
-#if(defined(WEBRTC_CODEC_ISAC) || defined(WEBRTC_CODEC_ISACFX))
- else if(STR_CASE_CMP(codecInst.plname, "isac") == 0)
- {
- if(codecInst.plfreq == 16000)
- {
- _codecId = kCodecIsac;
- }
- else if(codecInst.plfreq == 32000)
- {
- _codecId = kCodecIsacSwb;
- }
- }
-#endif
-#ifdef WEBRTC_CODEC_G722
- else if(STR_CASE_CMP(codecInst.plname, "G722") == 0)
- {
- _codecId = kCodecG722;
- }
-#endif
- if(_codecId == kCodecNoCodec)
- {
- return -1;
- }
- memcpy(&codec_info_, &codecInst, sizeof(CodecInst));
- return 0;
-}
-
-int32_t ModuleFileUtility::FileDurationMs(const char* fileName,
- const FileFormats fileFormat,
- const uint32_t freqInHz)
-{
-
- if(fileName == NULL)
- {
- WEBRTC_TRACE(kTraceError, kTraceFile, _id, "filename NULL");
- return -1;
- }
-
- int32_t time_in_ms = -1;
- struct stat file_size;
- if(stat(fileName,&file_size) == -1)
- {
- WEBRTC_TRACE(kTraceError, kTraceFile, _id,
- "failed to retrieve file size with stat!");
- return -1;
- }
- FileWrapper* inStreamObj = FileWrapper::Create();
- if(inStreamObj == NULL)
- {
- WEBRTC_TRACE(kTraceMemory, kTraceFile, _id,
- "failed to create InStream object!");
- return -1;
- }
- if(inStreamObj->OpenFile(fileName, true) == -1)
- {
- delete inStreamObj;
- WEBRTC_TRACE(kTraceError, kTraceFile, _id,
- "failed to open file %s!", fileName);
- return -1;
- }
-
- switch (fileFormat)
- {
- case kFileFormatWavFile:
- {
- if(ReadWavHeader(*inStreamObj) == -1)
- {
- WEBRTC_TRACE(kTraceError, kTraceFile, _id,
- "failed to read WAV file header!");
- return -1;
- }
- time_in_ms = ((file_size.st_size - 44) /
- (_wavFormatObj.nAvgBytesPerSec/1000));
- break;
- }
- case kFileFormatPcm16kHzFile:
- {
- // 16 samples per ms. 2 bytes per sample.
- int32_t denominator = 16*2;
- time_in_ms = (file_size.st_size)/denominator;
- break;
- }
- case kFileFormatPcm8kHzFile:
- {
- // 8 samples per ms. 2 bytes per sample.
- int32_t denominator = 8*2;
- time_in_ms = (file_size.st_size)/denominator;
- break;
- }
- case kFileFormatCompressedFile:
- {
- int32_t cnt = 0;
- int32_t read_len = 0;
- char buf[64];
- do
- {
- read_len = inStreamObj->Read(&buf[cnt++], 1);
- if(read_len != 1)
- {
- return -1;
- }
- } while ((buf[cnt-1] != '\n') && (64 > cnt));
-
- if(cnt == 64)
- {
- return -1;
- }
- else
- {
- buf[cnt] = 0;
- }
-#ifdef WEBRTC_CODEC_ILBC
- if(!strcmp("#!iLBC20\n", buf))
- {
- // 20 ms is 304 bits
- time_in_ms = ((file_size.st_size)*160)/304;
- break;
- }
- if(!strcmp("#!iLBC30\n", buf))
- {
- // 30 ms takes 400 bits.
- // file size in bytes * 8 / 400 is the number of
- // 30 ms frames in the file ->
- // time_in_ms = file size * 8 / 400 * 30
- time_in_ms = ((file_size.st_size)*240)/400;
- break;
- }
-#endif
- break;
- }
- case kFileFormatPreencodedFile:
- {
- WEBRTC_TRACE(kTraceError, kTraceFile, _id,
- "cannot determine duration of Pre-Encoded file!");
- break;
- }
- default:
- WEBRTC_TRACE(kTraceError, kTraceFile, _id,
- "unsupported file format %d!", fileFormat);
- break;
- }
- inStreamObj->CloseFile();
- delete inStreamObj;
- return time_in_ms;
-}
-
-uint32_t ModuleFileUtility::PlayoutPositionMs()
-{
- WEBRTC_TRACE(kTraceStream, kTraceFile, _id,
- "ModuleFileUtility::PlayoutPosition()");
-
- if(_reading)
- {
- return _playoutPositionMs;
- }
- else
- {
- return 0;
- }
-}
-} // namespace webrtc
diff --git a/webrtc/modules/media_file/source/media_file_utility.h b/webrtc/modules/media_file/source/media_file_utility.h
deleted file mode 100644
index 2823ceca8a..0000000000
--- a/webrtc/modules/media_file/source/media_file_utility.h
+++ /dev/null
@@ -1,284 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-// Note: the class cannot be used for reading and writing at the same time.
-#ifndef WEBRTC_MODULES_MEDIA_FILE_SOURCE_MEDIA_FILE_UTILITY_H_
-#define WEBRTC_MODULES_MEDIA_FILE_SOURCE_MEDIA_FILE_UTILITY_H_
-
-#include <stdio.h>
-
-#include "webrtc/common_types.h"
-#include "webrtc/modules/media_file/interface/media_file_defines.h"
-
-namespace webrtc {
-class InStream;
-class OutStream;
-
-class ModuleFileUtility
-{
-public:
-
- ModuleFileUtility(const int32_t id);
- ~ModuleFileUtility();
-
- // Prepare for playing audio from stream.
- // startPointMs and stopPointMs, unless zero, specify what part of the file
- // should be read. From startPointMs ms to stopPointMs ms.
- int32_t InitWavReading(InStream& stream,
- const uint32_t startPointMs = 0,
- const uint32_t stopPointMs = 0);
-
- // Put 10-60ms of audio data from stream into the audioBuffer depending on
- // codec frame size. dataLengthInBytes indicates the size of audioBuffer.
- // The return value is the number of bytes written to audioBuffer.
- // Note: This API only play mono audio but can be used on file containing
- // audio with more channels (in which case the audio will be converted to
- // mono).
- int32_t ReadWavDataAsMono(InStream& stream, int8_t* audioBuffer,
- const size_t dataLengthInBytes);
-
- // Put 10-60ms, depending on codec frame size, of audio data from file into
- // audioBufferLeft and audioBufferRight. The buffers contain the left and
- // right channel of played out stereo audio.
- // dataLengthInBytes indicates the size of both audioBufferLeft and
- // audioBufferRight.
- // The return value is the number of bytes read for each buffer.
- // Note: This API can only be successfully called for WAV files with stereo
- // audio.
- int32_t ReadWavDataAsStereo(InStream& wav,
- int8_t* audioBufferLeft,
- int8_t* audioBufferRight,
- const size_t bufferLength);
-
- // Prepare for recording audio to stream.
- // codecInst specifies the encoding of the audio data.
- // Note: codecInst.channels should be set to 2 for stereo (and 1 for
- // mono). Stereo is only supported for WAV files.
- int32_t InitWavWriting(OutStream& stream, const CodecInst& codecInst);
-
- // Write one audio frame, i.e. the bufferLength first bytes of audioBuffer,
- // to file. The audio frame size is determined by the codecInst.pacsize
- // parameter of the last sucessfull StartRecordingAudioFile(..) call.
- // The return value is the number of bytes written to audioBuffer.
- int32_t WriteWavData(OutStream& stream,
- const int8_t* audioBuffer,
- const size_t bufferLength);
-
- // Finalizes the WAV header so that it is correct if nothing more will be
- // written to stream.
- // Note: this API must be called before closing stream to ensure that the
- // WAVE header is updated with the file size. Don't call this API
- // if more samples are to be written to stream.
- int32_t UpdateWavHeader(OutStream& stream);
-
- // Prepare for playing audio from stream.
- // startPointMs and stopPointMs, unless zero, specify what part of the file
- // should be read. From startPointMs ms to stopPointMs ms.
- // freqInHz is the PCM sampling frequency.
- // NOTE, allowed frequencies are 8000, 16000 and 32000 (Hz)
- int32_t InitPCMReading(InStream& stream,
- const uint32_t startPointMs = 0,
- const uint32_t stopPointMs = 0,
- const uint32_t freqInHz = 16000);
-
- // Put 10-60ms of audio data from stream into the audioBuffer depending on
- // codec frame size. dataLengthInBytes indicates the size of audioBuffer.
- // The return value is the number of bytes written to audioBuffer.
- int32_t ReadPCMData(InStream& stream, int8_t* audioBuffer,
- const size_t dataLengthInBytes);
-
- // Prepare for recording audio to stream.
- // freqInHz is the PCM sampling frequency.
- // NOTE, allowed frequencies are 8000, 16000 and 32000 (Hz)
- int32_t InitPCMWriting(OutStream& stream, const uint32_t freqInHz = 16000);
-
- // Write one 10ms audio frame, i.e. the bufferLength first bytes of
- // audioBuffer, to file. The audio frame size is determined by the freqInHz
- // parameter of the last sucessfull InitPCMWriting(..) call.
- // The return value is the number of bytes written to audioBuffer.
- int32_t WritePCMData(OutStream& stream,
- const int8_t* audioBuffer,
- size_t bufferLength);
-
- // Prepare for playing audio from stream.
- // startPointMs and stopPointMs, unless zero, specify what part of the file
- // should be read. From startPointMs ms to stopPointMs ms.
- int32_t InitCompressedReading(InStream& stream,
- const uint32_t startPointMs = 0,
- const uint32_t stopPointMs = 0);
-
- // Put 10-60ms of audio data from stream into the audioBuffer depending on
- // codec frame size. dataLengthInBytes indicates the size of audioBuffer.
- // The return value is the number of bytes written to audioBuffer.
- int32_t ReadCompressedData(InStream& stream,
- int8_t* audioBuffer,
- const size_t dataLengthInBytes);
-
- // Prepare for recording audio to stream.
- // codecInst specifies the encoding of the audio data.
- int32_t InitCompressedWriting(OutStream& stream,
- const CodecInst& codecInst);
-
- // Write one audio frame, i.e. the bufferLength first bytes of audioBuffer,
- // to file. The audio frame size is determined by the codecInst.pacsize
- // parameter of the last sucessfull InitCompressedWriting(..) call.
- // The return value is the number of bytes written to stream.
- // Note: bufferLength must be exactly one frame.
- int32_t WriteCompressedData(OutStream& stream,
- const int8_t* audioBuffer,
- const size_t bufferLength);
-
- // Prepare for playing audio from stream.
- // codecInst specifies the encoding of the audio data.
- int32_t InitPreEncodedReading(InStream& stream,
- const CodecInst& codecInst);
-
- // Put 10-60ms of audio data from stream into the audioBuffer depending on
- // codec frame size. dataLengthInBytes indicates the size of audioBuffer.
- // The return value is the number of bytes written to audioBuffer.
- int32_t ReadPreEncodedData(InStream& stream,
- int8_t* audioBuffer,
- const size_t dataLengthInBytes);
-
- // Prepare for recording audio to stream.
- // codecInst specifies the encoding of the audio data.
- int32_t InitPreEncodedWriting(OutStream& stream,
- const CodecInst& codecInst);
-
- // Write one audio frame, i.e. the bufferLength first bytes of audioBuffer,
- // to stream. The audio frame size is determined by the codecInst.pacsize
- // parameter of the last sucessfull InitPreEncodedWriting(..) call.
- // The return value is the number of bytes written to stream.
- // Note: bufferLength must be exactly one frame.
- int32_t WritePreEncodedData(OutStream& stream,
- const int8_t* inData,
- const size_t dataLengthInBytes);
-
- // Set durationMs to the size of the file (in ms) specified by fileName.
- // freqInHz specifies the sampling frequency of the file.
- int32_t FileDurationMs(const char* fileName,
- const FileFormats fileFormat,
- const uint32_t freqInHz = 16000);
-
- // Return the number of ms that have been played so far.
- uint32_t PlayoutPositionMs();
-
- // Update codecInst according to the current audio codec being used for
- // reading or writing.
- int32_t codec_info(CodecInst& codecInst);
-
-private:
- // Biggest WAV frame supported is 10 ms at 48kHz of 2 channel, 16 bit audio.
- enum{WAV_MAX_BUFFER_SIZE = 480*2*2};
-
-
- int32_t InitWavCodec(uint32_t samplesPerSec,
- uint32_t channels,
- uint32_t bitsPerSample,
- uint32_t formatTag);
-
- // Parse the WAV header in stream.
- int32_t ReadWavHeader(InStream& stream);
-
- // Update the WAV header. freqInHz, bytesPerSample, channels, format,
- // lengthInBytes specify characterists of the audio data.
- // freqInHz is the sampling frequency. bytesPerSample is the sample size in
- // bytes. channels is the number of channels, e.g. 1 is mono and 2 is
- // stereo. format is the encode format (e.g. PCMU, PCMA, PCM etc).
- // lengthInBytes is the number of bytes the audio samples are using up.
- int32_t WriteWavHeader(OutStream& stream,
- const uint32_t freqInHz,
- const uint32_t bytesPerSample,
- const uint32_t channels,
- const uint32_t format,
- const uint32_t lengthInBytes);
-
- // Put dataLengthInBytes of audio data from stream into the audioBuffer.
- // The return value is the number of bytes written to audioBuffer.
- int32_t ReadWavData(InStream& stream, uint8_t* audioBuffer,
- const uint32_t dataLengthInBytes);
-
- // Update the current audio codec being used for reading or writing
- // according to codecInst.
- int32_t set_codec_info(const CodecInst& codecInst);
-
- struct WAVE_FMTINFO_header
- {
- int16_t formatTag;
- int16_t nChannels;
- int32_t nSamplesPerSec;
- int32_t nAvgBytesPerSec;
- int16_t nBlockAlign;
- int16_t nBitsPerSample;
- };
- // Identifiers for preencoded files.
- enum MediaFileUtility_CodecType
- {
- kCodecNoCodec = 0,
- kCodecIsac,
- kCodecIsacSwb,
- kCodecIsacLc,
- kCodecL16_8Khz,
- kCodecL16_16kHz,
- kCodecL16_32Khz,
- kCodecPcmu,
- kCodecPcma,
- kCodecIlbc20Ms,
- kCodecIlbc30Ms,
- kCodecG722,
- kCodecG722_1_32Kbps,
- kCodecG722_1_24Kbps,
- kCodecG722_1_16Kbps,
- kCodecG722_1c_48,
- kCodecG722_1c_32,
- kCodecG722_1c_24,
- kCodecAmr,
- kCodecAmrWb,
- kCodecG729,
- kCodecG729_1,
- kCodecG726_40,
- kCodecG726_32,
- kCodecG726_24,
- kCodecG726_16,
- kCodecSpeex8Khz,
- kCodecSpeex16Khz
- };
-
- // TODO (hellner): why store multiple formats. Just store either codec_info_
- // or _wavFormatObj and supply conversion functions.
- WAVE_FMTINFO_header _wavFormatObj;
- int32_t _dataSize; // Chunk size if reading a WAV file
- // Number of bytes to read. I.e. frame size in bytes. May be multiple
- // chunks if reading WAV.
- int32_t _readSizeBytes;
-
- int32_t _id;
-
- uint32_t _stopPointInMs;
- uint32_t _startPointInMs;
- uint32_t _playoutPositionMs;
- size_t _bytesWritten;
-
- CodecInst codec_info_;
- MediaFileUtility_CodecType _codecId;
-
- // The amount of bytes, on average, used for one audio sample.
- int32_t _bytesPerSample;
- int32_t _readPos;
-
- // Only reading or writing can be enabled, not both.
- bool _reading;
- bool _writing;
-
- // Scratch buffer used for turning stereo audio to mono.
- uint8_t _tempData[WAV_MAX_BUFFER_SIZE];
-};
-} // namespace webrtc
-#endif // WEBRTC_MODULES_MEDIA_FILE_SOURCE_MEDIA_FILE_UTILITY_H_
diff --git a/webrtc/modules/module_common_types_unittest.cc b/webrtc/modules/module_common_types_unittest.cc
index bc0b7a1a5b..acd58476a1 100644
--- a/webrtc/modules/module_common_types_unittest.cc
+++ b/webrtc/modules/module_common_types_unittest.cc
@@ -8,7 +8,7 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-#include "webrtc/modules/interface/module_common_types.h"
+#include "webrtc/modules/include/module_common_types.h"
#include "testing/gtest/include/gtest/gtest.h"
diff --git a/webrtc/modules/modules.gyp b/webrtc/modules/modules.gyp
index f3ac454c19..a7dc0ffe53 100644
--- a/webrtc/modules/modules.gyp
+++ b/webrtc/modules/modules.gyp
@@ -49,352 +49,6 @@
},
'targets': [
{
- 'target_name': 'modules_unittests',
- 'type': '<(gtest_target_type)',
- 'defines': [
- '<@(audio_coding_defines)',
- ],
- 'dependencies': [
- 'acm_receive_test',
- 'acm_send_test',
- 'audio_coding_module',
- 'audio_conference_mixer',
- 'audio_device' ,
- 'audio_processing',
- 'audioproc_test_utils',
- 'bitrate_controller',
- 'bwe_simulator',
- 'cng',
- 'desktop_capture',
- 'isac_fix',
- 'media_file',
- 'neteq',
- 'neteq_test_support',
- 'neteq_unittest_tools',
- 'paced_sender',
- 'pcm16b', # Needed by NetEq tests.
- 'red',
- 'remote_bitrate_estimator',
- 'rtp_rtcp',
- 'video_codecs_test_framework',
- 'video_processing',
- 'webrtc_utility',
- 'webrtc_video_coding',
- '<@(neteq_dependencies)',
- '<(DEPTH)/testing/gmock.gyp:gmock',
- '<(DEPTH)/testing/gtest.gyp:gtest',
- '<(DEPTH)/third_party/gflags/gflags.gyp:gflags',
- '<(webrtc_root)/common.gyp:webrtc_common',
- '<(webrtc_root)/common_audio/common_audio.gyp:common_audio',
- '<(webrtc_root)/modules/modules.gyp:video_capture',
- '<(webrtc_root)/modules/video_coding/codecs/vp8/vp8.gyp:webrtc_vp8',
- '<(webrtc_root)/modules/video_coding/codecs/vp9/vp9.gyp:webrtc_vp9',
- '<(webrtc_root)/system_wrappers/system_wrappers.gyp:system_wrappers',
- '<(webrtc_root)/test/test.gyp:frame_generator',
- '<(webrtc_root)/test/test.gyp:rtp_test_utils',
- '<(webrtc_root)/test/test.gyp:test_support_main',
- '<(webrtc_root)/test/webrtc_test_common.gyp:webrtc_test_common',
- '<(webrtc_root)/tools/tools.gyp:agc_test_utils',
- ],
- 'sources': [
- 'audio_coding/codecs/cng/audio_encoder_cng_unittest.cc',
- 'audio_coding/main/acm2/acm_receiver_unittest_oldapi.cc',
- 'audio_coding/main/acm2/audio_coding_module_unittest_oldapi.cc',
- 'audio_coding/main/acm2/call_statistics_unittest.cc',
- 'audio_coding/main/acm2/codec_owner_unittest.cc',
- 'audio_coding/main/acm2/initial_delay_manager_unittest.cc',
- 'audio_coding/codecs/cng/cng_unittest.cc',
- 'audio_coding/codecs/isac/fix/source/filters_unittest.cc',
- 'audio_coding/codecs/isac/fix/source/filterbanks_unittest.cc',
- 'audio_coding/codecs/isac/fix/source/lpc_masking_model_unittest.cc',
- 'audio_coding/codecs/isac/fix/source/transform_unittest.cc',
- 'audio_coding/codecs/isac/main/source/audio_encoder_isac_unittest.cc',
- 'audio_coding/codecs/isac/main/source/isac_unittest.cc',
- 'audio_coding/codecs/isac/unittest.cc',
- 'audio_coding/codecs/opus/audio_encoder_opus_unittest.cc',
- 'audio_coding/codecs/opus/opus_unittest.cc',
- 'audio_coding/codecs/red/audio_encoder_copy_red_unittest.cc',
- 'audio_coding/neteq/audio_classifier_unittest.cc',
- 'audio_coding/neteq/audio_multi_vector_unittest.cc',
- 'audio_coding/neteq/audio_vector_unittest.cc',
- 'audio_coding/neteq/background_noise_unittest.cc',
- 'audio_coding/neteq/buffer_level_filter_unittest.cc',
- 'audio_coding/neteq/comfort_noise_unittest.cc',
- 'audio_coding/neteq/decision_logic_unittest.cc',
- 'audio_coding/neteq/decoder_database_unittest.cc',
- 'audio_coding/neteq/delay_manager_unittest.cc',
- 'audio_coding/neteq/delay_peak_detector_unittest.cc',
- 'audio_coding/neteq/dsp_helper_unittest.cc',
- 'audio_coding/neteq/dtmf_buffer_unittest.cc',
- 'audio_coding/neteq/dtmf_tone_generator_unittest.cc',
- 'audio_coding/neteq/expand_unittest.cc',
- 'audio_coding/neteq/merge_unittest.cc',
- 'audio_coding/neteq/nack_unittest.cc',
- 'audio_coding/neteq/neteq_external_decoder_unittest.cc',
- 'audio_coding/neteq/neteq_impl_unittest.cc',
- 'audio_coding/neteq/neteq_network_stats_unittest.cc',
- 'audio_coding/neteq/neteq_stereo_unittest.cc',
- 'audio_coding/neteq/neteq_unittest.cc',
- 'audio_coding/neteq/normal_unittest.cc',
- 'audio_coding/neteq/packet_buffer_unittest.cc',
- 'audio_coding/neteq/payload_splitter_unittest.cc',
- 'audio_coding/neteq/post_decode_vad_unittest.cc',
- 'audio_coding/neteq/random_vector_unittest.cc',
- 'audio_coding/neteq/sync_buffer_unittest.cc',
- 'audio_coding/neteq/timestamp_scaler_unittest.cc',
- 'audio_coding/neteq/time_stretch_unittest.cc',
- 'audio_coding/neteq/mock/mock_audio_decoder.h',
- 'audio_coding/neteq/mock/mock_audio_vector.h',
- 'audio_coding/neteq/mock/mock_buffer_level_filter.h',
- 'audio_coding/neteq/mock/mock_decoder_database.h',
- 'audio_coding/neteq/mock/mock_delay_manager.h',
- 'audio_coding/neteq/mock/mock_delay_peak_detector.h',
- 'audio_coding/neteq/mock/mock_dtmf_buffer.h',
- 'audio_coding/neteq/mock/mock_dtmf_tone_generator.h',
- 'audio_coding/neteq/mock/mock_expand.h',
- 'audio_coding/neteq/mock/mock_external_decoder_pcm16b.h',
- 'audio_coding/neteq/mock/mock_packet_buffer.h',
- 'audio_coding/neteq/mock/mock_payload_splitter.h',
- 'audio_coding/neteq/tools/input_audio_file_unittest.cc',
- 'audio_coding/neteq/tools/packet_unittest.cc',
- 'audio_conference_mixer/test/audio_conference_mixer_unittest.cc',
- 'audio_device/fine_audio_buffer_unittest.cc',
- 'audio_processing/aec/echo_cancellation_unittest.cc',
- 'audio_processing/aec/system_delay_unittest.cc',
- 'audio_processing/agc/agc_manager_direct_unittest.cc',
- # TODO(ajm): Fix to match new interface.
- # 'audio_processing/agc/agc_unittest.cc',
- 'audio_processing/agc/histogram_unittest.cc',
- 'audio_processing/agc/mock_agc.h',
- 'audio_processing/beamformer/array_util_unittest.cc',
- 'audio_processing/beamformer/complex_matrix_unittest.cc',
- 'audio_processing/beamformer/covariance_matrix_generator_unittest.cc',
- 'audio_processing/beamformer/matrix_unittest.cc',
- 'audio_processing/beamformer/mock_nonlinear_beamformer.h',
- 'audio_processing/beamformer/nonlinear_beamformer_unittest.cc',
- 'audio_processing/echo_cancellation_impl_unittest.cc',
- 'audio_processing/intelligibility/intelligibility_enhancer_unittest.cc',
- 'audio_processing/intelligibility/intelligibility_utils_unittest.cc',
- 'audio_processing/splitting_filter_unittest.cc',
- 'audio_processing/transient/dyadic_decimator_unittest.cc',
- 'audio_processing/transient/file_utils.cc',
- 'audio_processing/transient/file_utils.h',
- 'audio_processing/transient/file_utils_unittest.cc',
- 'audio_processing/transient/moving_moments_unittest.cc',
- 'audio_processing/transient/transient_detector_unittest.cc',
- 'audio_processing/transient/transient_suppressor_unittest.cc',
- 'audio_processing/transient/wpd_node_unittest.cc',
- 'audio_processing/transient/wpd_tree_unittest.cc',
- 'audio_processing/utility/delay_estimator_unittest.cc',
- 'audio_processing/vad/gmm_unittest.cc',
- 'audio_processing/vad/pitch_based_vad_unittest.cc',
- 'audio_processing/vad/pitch_internal_unittest.cc',
- 'audio_processing/vad/pole_zero_filter_unittest.cc',
- 'audio_processing/vad/standalone_vad_unittest.cc',
- 'audio_processing/vad/vad_audio_proc_unittest.cc',
- 'audio_processing/vad/vad_circular_buffer_unittest.cc',
- 'audio_processing/vad/voice_activity_detector_unittest.cc',
- 'bitrate_controller/bitrate_allocator_unittest.cc',
- 'bitrate_controller/bitrate_controller_unittest.cc',
- 'bitrate_controller/send_side_bandwidth_estimation_unittest.cc',
- 'desktop_capture/desktop_and_cursor_composer_unittest.cc',
- 'desktop_capture/desktop_region_unittest.cc',
- 'desktop_capture/differ_block_unittest.cc',
- 'desktop_capture/differ_unittest.cc',
- 'desktop_capture/mouse_cursor_monitor_unittest.cc',
- 'desktop_capture/screen_capturer_helper_unittest.cc',
- 'desktop_capture/screen_capturer_mac_unittest.cc',
- 'desktop_capture/screen_capturer_mock_objects.h',
- 'desktop_capture/screen_capturer_unittest.cc',
- 'desktop_capture/window_capturer_unittest.cc',
- 'desktop_capture/win/cursor_unittest.cc',
- 'desktop_capture/win/cursor_unittest_resources.h',
- 'desktop_capture/win/cursor_unittest_resources.rc',
- 'media_file/source/media_file_unittest.cc',
- 'module_common_types_unittest.cc',
- 'pacing/bitrate_prober_unittest.cc',
- 'pacing/paced_sender_unittest.cc',
- 'pacing/packet_router_unittest.cc',
- 'remote_bitrate_estimator/bwe_simulations.cc',
- 'remote_bitrate_estimator/include/mock/mock_remote_bitrate_observer.h',
- 'remote_bitrate_estimator/include/mock/mock_remote_bitrate_estimator.h',
- 'remote_bitrate_estimator/inter_arrival_unittest.cc',
- 'remote_bitrate_estimator/overuse_detector_unittest.cc',
- 'remote_bitrate_estimator/rate_statistics_unittest.cc',
- 'remote_bitrate_estimator/remote_bitrate_estimator_abs_send_time_unittest.cc',
- 'remote_bitrate_estimator/remote_bitrate_estimator_single_stream_unittest.cc',
- 'remote_bitrate_estimator/remote_bitrate_estimator_unittest_helper.cc',
- 'remote_bitrate_estimator/remote_bitrate_estimator_unittest_helper.h',
- 'remote_bitrate_estimator/remote_estimator_proxy_unittest.cc',
- 'remote_bitrate_estimator/send_time_history_unittest.cc',
- 'remote_bitrate_estimator/test/bwe_test_framework_unittest.cc',
- 'remote_bitrate_estimator/test/bwe_unittest.cc',
- 'remote_bitrate_estimator/test/metric_recorder_unittest.cc',
- 'remote_bitrate_estimator/test/estimators/nada_unittest.cc',
- 'remote_bitrate_estimator/transport_feedback_adapter_unittest.cc',
- 'rtp_rtcp/source/mock/mock_rtp_payload_strategy.h',
- 'rtp_rtcp/source/byte_io_unittest.cc',
- 'rtp_rtcp/source/fec_receiver_unittest.cc',
- 'rtp_rtcp/source/fec_test_helper.cc',
- 'rtp_rtcp/source/fec_test_helper.h',
- 'rtp_rtcp/source/h264_sps_parser_unittest.cc',
- 'rtp_rtcp/source/h264_bitstream_parser_unittest.cc',
- 'rtp_rtcp/source/nack_rtx_unittest.cc',
- 'rtp_rtcp/source/packet_loss_stats_unittest.cc',
- 'rtp_rtcp/source/producer_fec_unittest.cc',
- 'rtp_rtcp/source/receive_statistics_unittest.cc',
- 'rtp_rtcp/source/remote_ntp_time_estimator_unittest.cc',
- 'rtp_rtcp/source/rtcp_format_remb_unittest.cc',
- 'rtp_rtcp/source/rtcp_packet_unittest.cc',
- 'rtp_rtcp/source/rtcp_packet/transport_feedback_unittest.cc',
- 'rtp_rtcp/source/rtcp_receiver_unittest.cc',
- 'rtp_rtcp/source/rtcp_sender_unittest.cc',
- 'rtp_rtcp/source/rtcp_utility_unittest.cc',
- 'rtp_rtcp/source/rtp_fec_unittest.cc',
- 'rtp_rtcp/source/rtp_format_h264_unittest.cc',
- 'rtp_rtcp/source/rtp_format_vp8_test_helper.cc',
- 'rtp_rtcp/source/rtp_format_vp8_test_helper.h',
- 'rtp_rtcp/source/rtp_format_vp8_unittest.cc',
- 'rtp_rtcp/source/rtp_format_vp9_unittest.cc',
- 'rtp_rtcp/source/rtp_packet_history_unittest.cc',
- 'rtp_rtcp/source/rtp_payload_registry_unittest.cc',
- 'rtp_rtcp/source/rtp_rtcp_impl_unittest.cc',
- 'rtp_rtcp/source/rtp_header_extension_unittest.cc',
- 'rtp_rtcp/source/rtp_sender_unittest.cc',
- 'rtp_rtcp/source/vp8_partition_aggregator_unittest.cc',
- 'rtp_rtcp/test/testAPI/test_api.cc',
- 'rtp_rtcp/test/testAPI/test_api.h',
- 'rtp_rtcp/test/testAPI/test_api_audio.cc',
- 'rtp_rtcp/test/testAPI/test_api_rtcp.cc',
- 'rtp_rtcp/test/testAPI/test_api_video.cc',
- 'utility/source/audio_frame_operations_unittest.cc',
- 'utility/source/file_player_unittests.cc',
- 'utility/source/process_thread_impl_unittest.cc',
- 'video_coding/codecs/test/packet_manipulator_unittest.cc',
- 'video_coding/codecs/test/stats_unittest.cc',
- 'video_coding/codecs/test/videoprocessor_unittest.cc',
- 'video_coding/codecs/vp8/default_temporal_layers_unittest.cc',
- 'video_coding/codecs/vp8/reference_picture_selection_unittest.cc',
- 'video_coding/codecs/vp8/screenshare_layers_unittest.cc',
- 'video_coding/codecs/vp8/simulcast_encoder_adapter_unittest.cc',
- 'video_coding/codecs/vp8/simulcast_unittest.cc',
- 'video_coding/codecs/vp8/simulcast_unittest.h',
- 'video_coding/main/interface/mock/mock_vcm_callbacks.h',
- 'video_coding/main/source/decoding_state_unittest.cc',
- 'video_coding/main/source/jitter_buffer_unittest.cc',
- 'video_coding/main/source/jitter_estimator_tests.cc',
- 'video_coding/main/source/media_optimization_unittest.cc',
- 'video_coding/main/source/receiver_unittest.cc',
- 'video_coding/main/source/session_info_unittest.cc',
- 'video_coding/main/source/timing_unittest.cc',
- 'video_coding/main/source/video_coding_robustness_unittest.cc',
- 'video_coding/main/source/video_receiver_unittest.cc',
- 'video_coding/main/source/video_sender_unittest.cc',
- 'video_coding/main/source/qm_select_unittest.cc',
- 'video_coding/main/source/test/stream_generator.cc',
- 'video_coding/main/source/test/stream_generator.h',
- 'video_coding/utility/quality_scaler_unittest.cc',
- 'video_processing/main/test/unit_test/brightness_detection_test.cc',
- 'video_processing/main/test/unit_test/content_metrics_test.cc',
- 'video_processing/main/test/unit_test/deflickering_test.cc',
- 'video_processing/main/test/unit_test/video_processing_unittest.cc',
- 'video_processing/main/test/unit_test/video_processing_unittest.h',
- ],
- 'conditions': [
- ['enable_bwe_test_logging==1', {
- 'defines': [ 'BWE_TEST_LOGGING_COMPILE_TIME_ENABLE=1' ],
- }, {
- 'defines': [ 'BWE_TEST_LOGGING_COMPILE_TIME_ENABLE=0' ],
- 'sources!': [
- 'remote_bitrate_estimator/test/bwe_test_logging.cc'
- ],
- }],
- # Run screen/window capturer tests only on platforms where they are
- # supported.
- ['desktop_capture_supported==0', {
- 'sources!': [
- 'desktop_capture/desktop_and_cursor_composer_unittest.cc',
- 'desktop_capture/mouse_cursor_monitor_unittest.cc',
- 'desktop_capture/screen_capturer_helper_unittest.cc',
- 'desktop_capture/screen_capturer_mac_unittest.cc',
- 'desktop_capture/screen_capturer_mock_objects.h',
- 'desktop_capture/screen_capturer_unittest.cc',
- 'desktop_capture/window_capturer_unittest.cc',
- ],
- }],
- ['prefer_fixed_point==1', {
- 'defines': [ 'WEBRTC_AUDIOPROC_FIXED_PROFILE' ],
- }, {
- 'defines': [ 'WEBRTC_AUDIOPROC_FLOAT_PROFILE' ],
- }],
- ['enable_protobuf==1', {
- 'defines': [
- 'WEBRTC_AUDIOPROC_DEBUG_DUMP',
- ],
- 'dependencies': [
- 'audioproc_protobuf_utils',
- 'audioproc_unittest_proto',
- ],
- 'sources': [
- 'audio_processing/audio_processing_impl_unittest.cc',
- 'audio_processing/test/audio_processing_unittest.cc',
- 'audio_processing/test/test_utils.h',
- ],
- }],
- ['build_libvpx==1', {
- 'dependencies': [
- '<(libvpx_dir)/libvpx.gyp:libvpx_new',
- ],
- }],
- ['OS=="android"', {
- 'dependencies': [
- '<(DEPTH)/testing/android/native_test.gyp:native_test_native_code',
- ],
- # Need to disable error due to the line in
- # base/android/jni_android.h triggering it:
- # const BASE_EXPORT jobject GetApplicationContext()
- # error: type qualifiers ignored on function return type
- 'cflags': [
- '-Wno-ignored-qualifiers',
- ],
- 'sources': [
- 'audio_device/android/audio_device_unittest.cc',
- 'audio_device/android/audio_manager_unittest.cc',
- 'audio_device/android/ensure_initialized.cc',
- 'audio_device/android/ensure_initialized.h',
- ],
- }],
- ['OS=="ios"', {
- 'sources': [
- 'video_coding/codecs/h264/h264_video_toolbox_nalu_unittest.cc',
- 'audio_device/ios/audio_device_unittest_ios.cc',
- ],
- 'mac_bundle_resources': [
- '<(DEPTH)/resources/audio_coding/speech_mono_16kHz.pcm',
- '<(DEPTH)/resources/audio_coding/testfile32kHz.pcm',
- '<(DEPTH)/resources/audio_coding/teststereo32kHz.pcm',
- '<(DEPTH)/resources/audio_device/audio_short16.pcm',
- '<(DEPTH)/resources/audio_device/audio_short44.pcm',
- '<(DEPTH)/resources/audio_device/audio_short48.pcm',
- '<(DEPTH)/resources/audio_processing/agc/agc_no_circular_buffer.dat',
- '<(DEPTH)/resources/audio_processing/agc/agc_pitch_gain.dat',
- '<(DEPTH)/resources/audio_processing/agc/agc_pitch_lag.dat',
- '<(DEPTH)/resources/audio_processing/agc/agc_spectral_peak.dat',
- '<(DEPTH)/resources/audio_processing/agc/agc_voicing_prob.dat',
- '<(DEPTH)/resources/audio_processing/agc/agc_with_circular_buffer.dat',
- '<(DEPTH)/resources/short_mixed_mono_48.dat',
- '<(DEPTH)/resources/short_mixed_mono_48.pcm',
- '<(DEPTH)/resources/short_mixed_stereo_48.dat',
- '<(DEPTH)/resources/short_mixed_stereo_48.pcm',
- ],
- }],
- ],
- # Disable warnings to enable Win64 build, issue 1323.
- 'msvs_disabled_warnings': [
- 4267, # size_t to int truncation.
- ],
- },
- {
'target_name': 'modules_tests',
'type': '<(gtest_target_type)',
'dependencies': [
@@ -417,25 +71,24 @@
'<@(audio_coding_defines)',
],
'sources': [
- 'audio_coding/main/test/APITest.cc',
- 'audio_coding/main/test/Channel.cc',
- 'audio_coding/main/test/EncodeDecodeTest.cc',
- 'audio_coding/main/test/PCMFile.cc',
- 'audio_coding/main/test/PacketLossTest.cc',
- 'audio_coding/main/test/RTPFile.cc',
- 'audio_coding/main/test/SpatialAudio.cc',
- 'audio_coding/main/test/TestAllCodecs.cc',
- 'audio_coding/main/test/TestRedFec.cc',
- 'audio_coding/main/test/TestStereo.cc',
- 'audio_coding/main/test/TestVADDTX.cc',
- 'audio_coding/main/test/Tester.cc',
- 'audio_coding/main/test/TimedTrace.cc',
- 'audio_coding/main/test/TwoWayCommunication.cc',
- 'audio_coding/main/test/iSACTest.cc',
- 'audio_coding/main/test/initial_delay_unittest.cc',
- 'audio_coding/main/test/opus_test.cc',
- 'audio_coding/main/test/target_delay_unittest.cc',
- 'audio_coding/main/test/utility.cc',
+ 'audio_coding/test/APITest.cc',
+ 'audio_coding/test/Channel.cc',
+ 'audio_coding/test/EncodeDecodeTest.cc',
+ 'audio_coding/test/PCMFile.cc',
+ 'audio_coding/test/PacketLossTest.cc',
+ 'audio_coding/test/RTPFile.cc',
+ 'audio_coding/test/SpatialAudio.cc',
+ 'audio_coding/test/TestAllCodecs.cc',
+ 'audio_coding/test/TestRedFec.cc',
+ 'audio_coding/test/TestStereo.cc',
+ 'audio_coding/test/TestVADDTX.cc',
+ 'audio_coding/test/Tester.cc',
+ 'audio_coding/test/TimedTrace.cc',
+ 'audio_coding/test/TwoWayCommunication.cc',
+ 'audio_coding/test/iSACTest.cc',
+ 'audio_coding/test/opus_test.cc',
+ 'audio_coding/test/target_delay_unittest.cc',
+ 'audio_coding/test/utility.cc',
'rtp_rtcp/test/testFec/test_fec.cc',
'video_coding/codecs/test/videoprocessor_integrationtest.cc',
'video_coding/codecs/vp8/test/vp8_impl_unittest.cc',
@@ -450,6 +103,379 @@
},
],
'conditions': [
+ # Does not compile on iOS for ia32 or x64: webrtc:4755.
+ ['OS!="ios" or target_arch=="arm" or target_arch=="arm64"', {
+ 'targets': [
+ {
+ 'target_name': 'modules_unittests',
+ 'type': '<(gtest_target_type)',
+ 'defines': [
+ '<@(audio_coding_defines)',
+ ],
+ 'dependencies': [
+ 'acm_receive_test',
+ 'acm_send_test',
+ 'audio_coding_module',
+ 'audio_conference_mixer',
+ 'audio_device' ,
+ 'audio_processing',
+ 'audioproc_test_utils',
+ 'bitrate_controller',
+ 'bwe_simulator',
+ 'cng',
+ 'desktop_capture',
+ 'isac_fix',
+ 'media_file',
+ 'neteq',
+ 'neteq_test_support',
+ 'neteq_unittest_tools',
+ 'paced_sender',
+ 'pcm16b', # Needed by NetEq tests.
+ 'red',
+ 'remote_bitrate_estimator',
+ 'rtp_rtcp',
+ 'video_codecs_test_framework',
+ 'video_processing',
+ 'webrtc_utility',
+ 'webrtc_video_coding',
+ '<@(neteq_dependencies)',
+ '<(DEPTH)/testing/gmock.gyp:gmock',
+ '<(DEPTH)/testing/gtest.gyp:gtest',
+ '<(DEPTH)/third_party/gflags/gflags.gyp:gflags',
+ '<(webrtc_root)/common.gyp:webrtc_common',
+ '<(webrtc_root)/common_audio/common_audio.gyp:common_audio',
+ '<(webrtc_root)/common_video/common_video.gyp:common_video',
+ '<(webrtc_root)/modules/modules.gyp:video_capture',
+ '<(webrtc_root)/modules/video_coding/codecs/vp8/vp8.gyp:webrtc_vp8',
+ '<(webrtc_root)/modules/video_coding/codecs/vp9/vp9.gyp:webrtc_vp9',
+ '<(webrtc_root)/system_wrappers/system_wrappers.gyp:system_wrappers',
+ '<(webrtc_root)/test/test.gyp:fake_video_frames',
+ '<(webrtc_root)/test/test.gyp:rtp_test_utils',
+ '<(webrtc_root)/test/test.gyp:test_support_main',
+ '<(webrtc_root)/test/webrtc_test_common.gyp:webrtc_test_common',
+ '<(webrtc_root)/tools/tools.gyp:agc_test_utils',
+ ],
+ 'sources': [
+ 'audio_coding/codecs/cng/audio_encoder_cng_unittest.cc',
+ 'audio_coding/acm2/acm_receiver_unittest_oldapi.cc',
+ 'audio_coding/acm2/audio_coding_module_unittest_oldapi.cc',
+ 'audio_coding/acm2/call_statistics_unittest.cc',
+ 'audio_coding/acm2/codec_manager_unittest.cc',
+ 'audio_coding/acm2/initial_delay_manager_unittest.cc',
+ 'audio_coding/acm2/rent_a_codec_unittest.cc',
+ 'audio_coding/codecs/cng/cng_unittest.cc',
+ 'audio_coding/codecs/isac/fix/source/filters_unittest.cc',
+ 'audio_coding/codecs/isac/fix/source/filterbanks_unittest.cc',
+ 'audio_coding/codecs/isac/fix/source/lpc_masking_model_unittest.cc',
+ 'audio_coding/codecs/isac/fix/source/transform_unittest.cc',
+ 'audio_coding/codecs/isac/main/source/audio_encoder_isac_unittest.cc',
+ 'audio_coding/codecs/isac/main/source/isac_unittest.cc',
+ 'audio_coding/codecs/isac/unittest.cc',
+ 'audio_coding/codecs/opus/audio_encoder_opus_unittest.cc',
+ 'audio_coding/codecs/opus/opus_unittest.cc',
+ 'audio_coding/codecs/red/audio_encoder_copy_red_unittest.cc',
+ 'audio_coding/neteq/audio_classifier_unittest.cc',
+ 'audio_coding/neteq/audio_multi_vector_unittest.cc',
+ 'audio_coding/neteq/audio_vector_unittest.cc',
+ 'audio_coding/neteq/background_noise_unittest.cc',
+ 'audio_coding/neteq/buffer_level_filter_unittest.cc',
+ 'audio_coding/neteq/comfort_noise_unittest.cc',
+ 'audio_coding/neteq/decision_logic_unittest.cc',
+ 'audio_coding/neteq/decoder_database_unittest.cc',
+ 'audio_coding/neteq/delay_manager_unittest.cc',
+ 'audio_coding/neteq/delay_peak_detector_unittest.cc',
+ 'audio_coding/neteq/dsp_helper_unittest.cc',
+ 'audio_coding/neteq/dtmf_buffer_unittest.cc',
+ 'audio_coding/neteq/dtmf_tone_generator_unittest.cc',
+ 'audio_coding/neteq/expand_unittest.cc',
+ 'audio_coding/neteq/merge_unittest.cc',
+ 'audio_coding/neteq/nack_unittest.cc',
+ 'audio_coding/neteq/neteq_external_decoder_unittest.cc',
+ 'audio_coding/neteq/neteq_impl_unittest.cc',
+ 'audio_coding/neteq/neteq_network_stats_unittest.cc',
+ 'audio_coding/neteq/neteq_stereo_unittest.cc',
+ 'audio_coding/neteq/neteq_unittest.cc',
+ 'audio_coding/neteq/normal_unittest.cc',
+ 'audio_coding/neteq/packet_buffer_unittest.cc',
+ 'audio_coding/neteq/payload_splitter_unittest.cc',
+ 'audio_coding/neteq/post_decode_vad_unittest.cc',
+ 'audio_coding/neteq/random_vector_unittest.cc',
+ 'audio_coding/neteq/sync_buffer_unittest.cc',
+ 'audio_coding/neteq/timestamp_scaler_unittest.cc',
+ 'audio_coding/neteq/time_stretch_unittest.cc',
+ 'audio_coding/neteq/mock/mock_audio_decoder.h',
+ 'audio_coding/neteq/mock/mock_audio_vector.h',
+ 'audio_coding/neteq/mock/mock_buffer_level_filter.h',
+ 'audio_coding/neteq/mock/mock_decoder_database.h',
+ 'audio_coding/neteq/mock/mock_delay_manager.h',
+ 'audio_coding/neteq/mock/mock_delay_peak_detector.h',
+ 'audio_coding/neteq/mock/mock_dtmf_buffer.h',
+ 'audio_coding/neteq/mock/mock_dtmf_tone_generator.h',
+ 'audio_coding/neteq/mock/mock_expand.h',
+ 'audio_coding/neteq/mock/mock_external_decoder_pcm16b.h',
+ 'audio_coding/neteq/mock/mock_packet_buffer.h',
+ 'audio_coding/neteq/mock/mock_payload_splitter.h',
+ 'audio_coding/neteq/tools/input_audio_file_unittest.cc',
+ 'audio_coding/neteq/tools/packet_unittest.cc',
+ 'audio_conference_mixer/test/audio_conference_mixer_unittest.cc',
+ 'audio_device/fine_audio_buffer_unittest.cc',
+ 'audio_processing/aec/echo_cancellation_unittest.cc',
+ 'audio_processing/aec/system_delay_unittest.cc',
+ 'audio_processing/agc/agc_manager_direct_unittest.cc',
+ # TODO(ajm): Fix to match new interface.
+ # 'audio_processing/agc/agc_unittest.cc',
+ 'audio_processing/agc/histogram_unittest.cc',
+ 'audio_processing/agc/mock_agc.h',
+ 'audio_processing/beamformer/array_util_unittest.cc',
+ 'audio_processing/beamformer/complex_matrix_unittest.cc',
+ 'audio_processing/beamformer/covariance_matrix_generator_unittest.cc',
+ 'audio_processing/beamformer/matrix_unittest.cc',
+ 'audio_processing/beamformer/mock_nonlinear_beamformer.h',
+ 'audio_processing/beamformer/nonlinear_beamformer_unittest.cc',
+ 'audio_processing/echo_cancellation_impl_unittest.cc',
+ 'audio_processing/intelligibility/intelligibility_enhancer_unittest.cc',
+ 'audio_processing/intelligibility/intelligibility_utils_unittest.cc',
+ 'audio_processing/splitting_filter_unittest.cc',
+ 'audio_processing/transient/dyadic_decimator_unittest.cc',
+ 'audio_processing/transient/file_utils.cc',
+ 'audio_processing/transient/file_utils.h',
+ 'audio_processing/transient/file_utils_unittest.cc',
+ 'audio_processing/transient/moving_moments_unittest.cc',
+ 'audio_processing/transient/transient_detector_unittest.cc',
+ 'audio_processing/transient/transient_suppressor_unittest.cc',
+ 'audio_processing/transient/wpd_node_unittest.cc',
+ 'audio_processing/transient/wpd_tree_unittest.cc',
+ 'audio_processing/utility/delay_estimator_unittest.cc',
+ 'audio_processing/vad/gmm_unittest.cc',
+ 'audio_processing/vad/pitch_based_vad_unittest.cc',
+ 'audio_processing/vad/pitch_internal_unittest.cc',
+ 'audio_processing/vad/pole_zero_filter_unittest.cc',
+ 'audio_processing/vad/standalone_vad_unittest.cc',
+ 'audio_processing/vad/vad_audio_proc_unittest.cc',
+ 'audio_processing/vad/vad_circular_buffer_unittest.cc',
+ 'audio_processing/vad/voice_activity_detector_unittest.cc',
+ 'bitrate_controller/bitrate_controller_unittest.cc',
+ 'bitrate_controller/send_side_bandwidth_estimation_unittest.cc',
+ 'desktop_capture/desktop_and_cursor_composer_unittest.cc',
+ 'desktop_capture/desktop_region_unittest.cc',
+ 'desktop_capture/differ_block_unittest.cc',
+ 'desktop_capture/differ_unittest.cc',
+ 'desktop_capture/mouse_cursor_monitor_unittest.cc',
+ 'desktop_capture/screen_capturer_helper_unittest.cc',
+ 'desktop_capture/screen_capturer_mac_unittest.cc',
+ 'desktop_capture/screen_capturer_mock_objects.h',
+ 'desktop_capture/screen_capturer_unittest.cc',
+ 'desktop_capture/window_capturer_unittest.cc',
+ 'desktop_capture/win/cursor_unittest.cc',
+ 'desktop_capture/win/cursor_unittest_resources.h',
+ 'desktop_capture/win/cursor_unittest_resources.rc',
+ 'media_file/media_file_unittest.cc',
+ 'module_common_types_unittest.cc',
+ 'pacing/bitrate_prober_unittest.cc',
+ 'pacing/paced_sender_unittest.cc',
+ 'pacing/packet_router_unittest.cc',
+ 'remote_bitrate_estimator/bwe_simulations.cc',
+ 'remote_bitrate_estimator/include/mock/mock_remote_bitrate_observer.h',
+ 'remote_bitrate_estimator/include/mock/mock_remote_bitrate_estimator.h',
+ 'remote_bitrate_estimator/inter_arrival_unittest.cc',
+ 'remote_bitrate_estimator/overuse_detector_unittest.cc',
+ 'remote_bitrate_estimator/rate_statistics_unittest.cc',
+ 'remote_bitrate_estimator/remote_bitrate_estimator_abs_send_time_unittest.cc',
+ 'remote_bitrate_estimator/remote_bitrate_estimator_single_stream_unittest.cc',
+ 'remote_bitrate_estimator/remote_bitrate_estimator_unittest_helper.cc',
+ 'remote_bitrate_estimator/remote_bitrate_estimator_unittest_helper.h',
+ 'remote_bitrate_estimator/remote_estimator_proxy_unittest.cc',
+ 'remote_bitrate_estimator/send_time_history_unittest.cc',
+ 'remote_bitrate_estimator/test/bwe_test_framework_unittest.cc',
+ 'remote_bitrate_estimator/test/bwe_unittest.cc',
+ 'remote_bitrate_estimator/test/metric_recorder_unittest.cc',
+ 'remote_bitrate_estimator/test/estimators/nada_unittest.cc',
+ 'remote_bitrate_estimator/transport_feedback_adapter_unittest.cc',
+ 'rtp_rtcp/source/mock/mock_rtp_payload_strategy.h',
+ 'rtp_rtcp/source/byte_io_unittest.cc',
+ 'rtp_rtcp/source/fec_receiver_unittest.cc',
+ 'rtp_rtcp/source/fec_test_helper.cc',
+ 'rtp_rtcp/source/fec_test_helper.h',
+ 'rtp_rtcp/source/h264_sps_parser_unittest.cc',
+ 'rtp_rtcp/source/h264_bitstream_parser_unittest.cc',
+ 'rtp_rtcp/source/nack_rtx_unittest.cc',
+ 'rtp_rtcp/source/packet_loss_stats_unittest.cc',
+ 'rtp_rtcp/source/producer_fec_unittest.cc',
+ 'rtp_rtcp/source/receive_statistics_unittest.cc',
+ 'rtp_rtcp/source/remote_ntp_time_estimator_unittest.cc',
+ 'rtp_rtcp/source/rtcp_format_remb_unittest.cc',
+ 'rtp_rtcp/source/rtcp_packet_unittest.cc',
+ 'rtp_rtcp/source/rtcp_packet/app_unittest.cc',
+ 'rtp_rtcp/source/rtcp_packet/bye_unittest.cc',
+ 'rtp_rtcp/source/rtcp_packet/compound_packet_unittest.cc',
+ 'rtp_rtcp/source/rtcp_packet/dlrr_unittest.cc',
+ 'rtp_rtcp/source/rtcp_packet/extended_jitter_report_unittest.cc',
+ 'rtp_rtcp/source/rtcp_packet/nack_unittest.cc',
+ 'rtp_rtcp/source/rtcp_packet/pli_unittest.cc',
+ 'rtp_rtcp/source/rtcp_packet/receiver_report_unittest.cc',
+ 'rtp_rtcp/source/rtcp_packet/report_block_unittest.cc',
+ 'rtp_rtcp/source/rtcp_packet/rrtr_unittest.cc',
+ 'rtp_rtcp/source/rtcp_packet/sli_unittest.cc',
+ 'rtp_rtcp/source/rtcp_packet/tmmbn_unittest.cc',
+ 'rtp_rtcp/source/rtcp_packet/tmmbr_unittest.cc',
+ 'rtp_rtcp/source/rtcp_packet/transport_feedback_unittest.cc',
+ 'rtp_rtcp/source/rtcp_packet/voip_metric_unittest.cc',
+ 'rtp_rtcp/source/rtcp_receiver_unittest.cc',
+ 'rtp_rtcp/source/rtcp_sender_unittest.cc',
+ 'rtp_rtcp/source/rtcp_utility_unittest.cc',
+ 'rtp_rtcp/source/rtp_fec_unittest.cc',
+ 'rtp_rtcp/source/rtp_format_h264_unittest.cc',
+ 'rtp_rtcp/source/rtp_format_vp8_test_helper.cc',
+ 'rtp_rtcp/source/rtp_format_vp8_test_helper.h',
+ 'rtp_rtcp/source/rtp_format_vp8_unittest.cc',
+ 'rtp_rtcp/source/rtp_format_vp9_unittest.cc',
+ 'rtp_rtcp/source/rtp_packet_history_unittest.cc',
+ 'rtp_rtcp/source/rtp_payload_registry_unittest.cc',
+ 'rtp_rtcp/source/rtp_rtcp_impl_unittest.cc',
+ 'rtp_rtcp/source/rtp_header_extension_unittest.cc',
+ 'rtp_rtcp/source/rtp_sender_unittest.cc',
+ 'rtp_rtcp/source/time_util_unittest.cc',
+ 'rtp_rtcp/source/vp8_partition_aggregator_unittest.cc',
+ 'rtp_rtcp/test/testAPI/test_api.cc',
+ 'rtp_rtcp/test/testAPI/test_api.h',
+ 'rtp_rtcp/test/testAPI/test_api_audio.cc',
+ 'rtp_rtcp/test/testAPI/test_api_rtcp.cc',
+ 'rtp_rtcp/test/testAPI/test_api_video.cc',
+ 'utility/source/audio_frame_operations_unittest.cc',
+ 'utility/source/file_player_unittests.cc',
+ 'utility/source/process_thread_impl_unittest.cc',
+ 'video_coding/codecs/test/packet_manipulator_unittest.cc',
+ 'video_coding/codecs/test/stats_unittest.cc',
+ 'video_coding/codecs/test/videoprocessor_unittest.cc',
+ 'video_coding/codecs/vp8/default_temporal_layers_unittest.cc',
+ 'video_coding/codecs/vp8/reference_picture_selection_unittest.cc',
+ 'video_coding/codecs/vp8/screenshare_layers_unittest.cc',
+ 'video_coding/codecs/vp8/simulcast_encoder_adapter_unittest.cc',
+ 'video_coding/codecs/vp8/simulcast_unittest.cc',
+ 'video_coding/codecs/vp8/simulcast_unittest.h',
+ 'video_coding/codecs/vp9/screenshare_layers_unittest.cc',
+ 'video_coding/include/mock/mock_vcm_callbacks.h',
+ 'video_coding/decoding_state_unittest.cc',
+ 'video_coding/jitter_buffer_unittest.cc',
+ 'video_coding/jitter_estimator_tests.cc',
+ 'video_coding/media_optimization_unittest.cc',
+ 'video_coding/receiver_unittest.cc',
+ 'video_coding/session_info_unittest.cc',
+ 'video_coding/timing_unittest.cc',
+ 'video_coding/video_coding_robustness_unittest.cc',
+ 'video_coding/video_receiver_unittest.cc',
+ 'video_coding/video_sender_unittest.cc',
+ 'video_coding/qm_select_unittest.cc',
+ 'video_coding/test/stream_generator.cc',
+ 'video_coding/test/stream_generator.h',
+ 'video_coding/utility/quality_scaler_unittest.cc',
+ 'video_processing/test/brightness_detection_test.cc',
+ 'video_processing/test/content_metrics_test.cc',
+ 'video_processing/test/deflickering_test.cc',
+ 'video_processing/test/denoiser_test.cc',
+ 'video_processing/test/video_processing_unittest.cc',
+ 'video_processing/test/video_processing_unittest.h',
+ ],
+ 'conditions': [
+ ['enable_bwe_test_logging==1', {
+ 'defines': [ 'BWE_TEST_LOGGING_COMPILE_TIME_ENABLE=1' ],
+ }, {
+ 'defines': [ 'BWE_TEST_LOGGING_COMPILE_TIME_ENABLE=0' ],
+ 'sources!': [
+ 'remote_bitrate_estimator/test/bwe_test_logging.cc'
+ ],
+ }],
+ # Run screen/window capturer tests only on platforms where they are
+ # supported.
+ ['desktop_capture_supported==0', {
+ 'sources!': [
+ 'desktop_capture/desktop_and_cursor_composer_unittest.cc',
+ 'desktop_capture/mouse_cursor_monitor_unittest.cc',
+ 'desktop_capture/screen_capturer_helper_unittest.cc',
+ 'desktop_capture/screen_capturer_mac_unittest.cc',
+ 'desktop_capture/screen_capturer_mock_objects.h',
+ 'desktop_capture/screen_capturer_unittest.cc',
+ 'desktop_capture/window_capturer_unittest.cc',
+ ],
+ }],
+ ['prefer_fixed_point==1', {
+ 'defines': [ 'WEBRTC_AUDIOPROC_FIXED_PROFILE' ],
+ }, {
+ 'defines': [ 'WEBRTC_AUDIOPROC_FLOAT_PROFILE' ],
+ }],
+ ['enable_protobuf==1', {
+ 'defines': [
+ 'WEBRTC_AUDIOPROC_DEBUG_DUMP',
+ 'WEBRTC_NETEQ_UNITTEST_BITEXACT',
+ ],
+ 'dependencies': [
+ 'audioproc_protobuf_utils',
+ 'audioproc_unittest_proto',
+ 'neteq_unittest_proto',
+ ],
+ 'sources': [
+ 'audio_processing/audio_processing_impl_locking_unittest.cc',
+ 'audio_processing/audio_processing_impl_unittest.cc',
+ 'audio_processing/test/audio_processing_unittest.cc',
+ 'audio_processing/test/debug_dump_test.cc',
+ 'audio_processing/test/test_utils.h',
+ ],
+ }],
+ ['build_libvpx==1', {
+ 'dependencies': [
+ '<(libvpx_dir)/libvpx.gyp:libvpx_new',
+ ],
+ }],
+ ['OS=="android"', {
+ 'dependencies': [
+ '<(DEPTH)/testing/android/native_test.gyp:native_test_native_code',
+ ],
+ # Need to disable error due to the line in
+ # base/android/jni_android.h triggering it:
+ # const BASE_EXPORT jobject GetApplicationContext()
+ # error: type qualifiers ignored on function return type
+ 'cflags': [
+ '-Wno-ignored-qualifiers',
+ ],
+ 'sources': [
+ 'audio_device/android/audio_device_unittest.cc',
+ 'audio_device/android/audio_manager_unittest.cc',
+ 'audio_device/android/ensure_initialized.cc',
+ 'audio_device/android/ensure_initialized.h',
+ ],
+ }],
+ ['OS=="ios"', {
+ 'sources': [
+ 'video_coding/codecs/h264/h264_video_toolbox_nalu_unittest.cc',
+ 'audio_device/ios/audio_device_unittest_ios.cc',
+ ],
+ 'mac_bundle_resources': [
+ '<(DEPTH)/resources/audio_coding/speech_mono_16kHz.pcm',
+ '<(DEPTH)/resources/audio_coding/testfile32kHz.pcm',
+ '<(DEPTH)/resources/audio_coding/teststereo32kHz.pcm',
+ '<(DEPTH)/resources/audio_device/audio_short16.pcm',
+ '<(DEPTH)/resources/audio_device/audio_short44.pcm',
+ '<(DEPTH)/resources/audio_device/audio_short48.pcm',
+ '<(DEPTH)/resources/audio_processing/agc/agc_no_circular_buffer.dat',
+ '<(DEPTH)/resources/audio_processing/agc/agc_pitch_gain.dat',
+ '<(DEPTH)/resources/audio_processing/agc/agc_pitch_lag.dat',
+ '<(DEPTH)/resources/audio_processing/agc/agc_spectral_peak.dat',
+ '<(DEPTH)/resources/audio_processing/agc/agc_voicing_prob.dat',
+ '<(DEPTH)/resources/audio_processing/agc/agc_with_circular_buffer.dat',
+ '<(DEPTH)/resources/short_mixed_mono_48.dat',
+ '<(DEPTH)/resources/short_mixed_mono_48.pcm',
+ '<(DEPTH)/resources/short_mixed_stereo_48.dat',
+ '<(DEPTH)/resources/short_mixed_stereo_48.pcm',
+ ],
+ }],
+ ],
+ # Disable warnings to enable Win64 build, issue 1323.
+ 'msvs_disabled_warnings': [
+ 4267, # size_t to int truncation.
+ ],
+ },
+ ],
+ }],
['OS=="android"', {
'targets': [
{
diff --git a/webrtc/modules/modules_unittests.isolate b/webrtc/modules/modules_unittests.isolate
index ba444df8c1..d988821af0 100644
--- a/webrtc/modules/modules_unittests.isolate
+++ b/webrtc/modules/modules_unittests.isolate
@@ -27,10 +27,7 @@
'<(DEPTH)/resources/audio_coding/neteq4_universal_ref.pcm',
'<(DEPTH)/resources/audio_coding/neteq4_universal_ref_win_32.pcm',
'<(DEPTH)/resources/audio_coding/neteq4_universal_ref_win_64.pcm',
- '<(DEPTH)/resources/audio_coding/neteq_network_stats.dat',
- '<(DEPTH)/resources/audio_coding/neteq_rtcp_stats.dat',
'<(DEPTH)/resources/audio_coding/neteq_universal_new.rtp',
- '<(DEPTH)/resources/audio_coding/neteq_universal_ref.pcm',
'<(DEPTH)/resources/audio_coding/speech_mono_16kHz.pcm',
'<(DEPTH)/resources/audio_coding/speech_mono_32_48kHz.pcm',
'<(DEPTH)/resources/audio_coding/testfile32kHz.pcm',
diff --git a/webrtc/modules/pacing/BUILD.gn b/webrtc/modules/pacing/BUILD.gn
index 3e478c1e76..0354c64fcb 100644
--- a/webrtc/modules/pacing/BUILD.gn
+++ b/webrtc/modules/pacing/BUILD.gn
@@ -10,10 +10,10 @@ source_set("pacing") {
sources = [
"bitrate_prober.cc",
"bitrate_prober.h",
- "include/paced_sender.h",
- "include/packet_router.h",
"paced_sender.cc",
+ "paced_sender.h",
"packet_router.cc",
+ "packet_router.h",
]
configs += [ "../..:common_config" ]
diff --git a/webrtc/modules/pacing/bitrate_prober.cc b/webrtc/modules/pacing/bitrate_prober.cc
index bbbe54f54e..41ad5fa11a 100644
--- a/webrtc/modules/pacing/bitrate_prober.cc
+++ b/webrtc/modules/pacing/bitrate_prober.cc
@@ -15,8 +15,8 @@
#include <limits>
#include <sstream>
-#include "webrtc/modules/pacing/include/paced_sender.h"
-#include "webrtc/system_wrappers/include/logging.h"
+#include "webrtc/base/logging.h"
+#include "webrtc/modules/pacing/paced_sender.h"
namespace webrtc {
diff --git a/webrtc/modules/pacing/include/mock/mock_paced_sender.h b/webrtc/modules/pacing/include/mock/mock_paced_sender.h
deleted file mode 100644
index b2cefdff8b..0000000000
--- a/webrtc/modules/pacing/include/mock/mock_paced_sender.h
+++ /dev/null
@@ -1,38 +0,0 @@
-/*
- * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_MODULES_PACING_INCLUDE_MOCK_MOCK_PACED_SENDER_H_
-#define WEBRTC_MODULES_PACING_INCLUDE_MOCK_MOCK_PACED_SENDER_H_
-
-#include "testing/gmock/include/gmock/gmock.h"
-
-#include <vector>
-
-#include "webrtc/modules/pacing/include/paced_sender.h"
-#include "webrtc/system_wrappers/include/clock.h"
-
-namespace webrtc {
-
-class MockPacedSender : public PacedSender {
- public:
- MockPacedSender() : PacedSender(Clock::GetRealTimeClock(), NULL, 0, 0, 0) {}
- MOCK_METHOD6(SendPacket, bool(Priority priority,
- uint32_t ssrc,
- uint16_t sequence_number,
- int64_t capture_time_ms,
- size_t bytes,
- bool retransmission));
- MOCK_CONST_METHOD0(QueueInMs, int64_t());
- MOCK_CONST_METHOD0(QueueInPackets, int());
-};
-
-} // namespace webrtc
-
-#endif // WEBRTC_MODULES_PACING_INCLUDE_MOCK_MOCK_PACED_SENDER_H_
diff --git a/webrtc/modules/pacing/include/paced_sender.h b/webrtc/modules/pacing/include/paced_sender.h
deleted file mode 100644
index f142f55173..0000000000
--- a/webrtc/modules/pacing/include/paced_sender.h
+++ /dev/null
@@ -1,153 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_MODULES_PACING_INCLUDE_PACED_SENDER_H_
-#define WEBRTC_MODULES_PACING_INCLUDE_PACED_SENDER_H_
-
-#include <list>
-#include <set>
-
-#include "webrtc/base/scoped_ptr.h"
-#include "webrtc/base/thread_annotations.h"
-#include "webrtc/modules/interface/module.h"
-#include "webrtc/modules/rtp_rtcp/interface/rtp_rtcp_defines.h"
-#include "webrtc/typedefs.h"
-
-namespace webrtc {
-class BitrateProber;
-class Clock;
-class CriticalSectionWrapper;
-
-namespace paced_sender {
-class IntervalBudget;
-struct Packet;
-class PacketQueue;
-} // namespace paced_sender
-
-class PacedSender : public Module, public RtpPacketSender {
- public:
- class Callback {
- public:
- // Note: packets sent as a result of a callback should not pass by this
- // module again.
- // Called when it's time to send a queued packet.
- // Returns false if packet cannot be sent.
- virtual bool TimeToSendPacket(uint32_t ssrc,
- uint16_t sequence_number,
- int64_t capture_time_ms,
- bool retransmission) = 0;
- // Called when it's a good time to send a padding data.
- // Returns the number of bytes sent.
- virtual size_t TimeToSendPadding(size_t bytes) = 0;
-
- protected:
- virtual ~Callback() {}
- };
-
- static const int64_t kDefaultMaxQueueLengthMs = 2000;
- // Pace in kbits/s until we receive first estimate.
- static const int kDefaultInitialPaceKbps = 2000;
- // Pacing-rate relative to our target send rate.
- // Multiplicative factor that is applied to the target bitrate to calculate
- // the number of bytes that can be transmitted per interval.
- // Increasing this factor will result in lower delays in cases of bitrate
- // overshoots from the encoder.
- static const float kDefaultPaceMultiplier;
-
- static const size_t kMinProbePacketSize = 200;
-
- PacedSender(Clock* clock,
- Callback* callback,
- int bitrate_kbps,
- int max_bitrate_kbps,
- int min_bitrate_kbps);
-
- virtual ~PacedSender();
-
- // Temporarily pause all sending.
- void Pause();
-
- // Resume sending packets.
- void Resume();
-
- // Enable bitrate probing. Enabled by default, mostly here to simplify
- // testing. Must be called before any packets are being sent to have an
- // effect.
- void SetProbingEnabled(bool enabled);
-
- // Set target bitrates for the pacer.
- // We will pace out bursts of packets at a bitrate of |max_bitrate_kbps|.
- // |bitrate_kbps| is our estimate of what we are allowed to send on average.
- // Padding packets will be utilized to reach |min_bitrate| unless enough media
- // packets are available.
- void UpdateBitrate(int bitrate_kbps,
- int max_bitrate_kbps,
- int min_bitrate_kbps);
-
- // Returns true if we send the packet now, else it will add the packet
- // information to the queue and call TimeToSendPacket when it's time to send.
- void InsertPacket(RtpPacketSender::Priority priority,
- uint32_t ssrc,
- uint16_t sequence_number,
- int64_t capture_time_ms,
- size_t bytes,
- bool retransmission) override;
-
- // Returns the time since the oldest queued packet was enqueued.
- virtual int64_t QueueInMs() const;
-
- virtual size_t QueueSizePackets() const;
-
- // Returns the number of milliseconds it will take to send the current
- // packets in the queue, given the current size and bitrate, ignoring prio.
- virtual int64_t ExpectedQueueTimeMs() const;
-
- // Returns the number of milliseconds until the module want a worker thread
- // to call Process.
- int64_t TimeUntilNextProcess() override;
-
- // Process any pending packets in the queue(s).
- int32_t Process() override;
-
- private:
- // Updates the number of bytes that can be sent for the next time interval.
- void UpdateBytesPerInterval(int64_t delta_time_in_ms)
- EXCLUSIVE_LOCKS_REQUIRED(critsect_);
-
- bool SendPacket(const paced_sender::Packet& packet)
- EXCLUSIVE_LOCKS_REQUIRED(critsect_);
- void SendPadding(size_t padding_needed) EXCLUSIVE_LOCKS_REQUIRED(critsect_);
-
- Clock* const clock_;
- Callback* const callback_;
-
- rtc::scoped_ptr<CriticalSectionWrapper> critsect_;
- bool paused_ GUARDED_BY(critsect_);
- bool probing_enabled_;
- // This is the media budget, keeping track of how many bits of media
- // we can pace out during the current interval.
- rtc::scoped_ptr<paced_sender::IntervalBudget> media_budget_
- GUARDED_BY(critsect_);
- // This is the padding budget, keeping track of how many bits of padding we're
- // allowed to send out during the current interval. This budget will be
- // utilized when there's no media to send.
- rtc::scoped_ptr<paced_sender::IntervalBudget> padding_budget_
- GUARDED_BY(critsect_);
-
- rtc::scoped_ptr<BitrateProber> prober_ GUARDED_BY(critsect_);
- int bitrate_bps_ GUARDED_BY(critsect_);
-
- int64_t time_last_update_us_ GUARDED_BY(critsect_);
-
- rtc::scoped_ptr<paced_sender::PacketQueue> packets_ GUARDED_BY(critsect_);
- uint64_t packet_counter_;
-};
-} // namespace webrtc
-#endif // WEBRTC_MODULES_PACING_INCLUDE_PACED_SENDER_H_
diff --git a/webrtc/modules/pacing/include/packet_router.h b/webrtc/modules/pacing/include/packet_router.h
deleted file mode 100644
index 9d461d13a9..0000000000
--- a/webrtc/modules/pacing/include/packet_router.h
+++ /dev/null
@@ -1,66 +0,0 @@
-/*
- * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_MODULES_PACING_INCLUDE_PACKET_ROUTER_H_
-#define WEBRTC_MODULES_PACING_INCLUDE_PACKET_ROUTER_H_
-
-#include <list>
-
-#include "webrtc/base/constructormagic.h"
-#include "webrtc/base/criticalsection.h"
-#include "webrtc/base/scoped_ptr.h"
-#include "webrtc/base/thread_annotations.h"
-#include "webrtc/common_types.h"
-#include "webrtc/modules/pacing/include/paced_sender.h"
-#include "webrtc/modules/rtp_rtcp/interface/rtp_rtcp_defines.h"
-
-namespace webrtc {
-
-class RtpRtcp;
-namespace rtcp {
-class TransportFeedback;
-} // namespace rtcp
-
-// PacketRouter routes outgoing data to the correct sending RTP module, based
-// on the simulcast layer in RTPVideoHeader.
-class PacketRouter : public PacedSender::Callback,
- public TransportSequenceNumberAllocator {
- public:
- PacketRouter();
- virtual ~PacketRouter();
-
- void AddRtpModule(RtpRtcp* rtp_module);
- void RemoveRtpModule(RtpRtcp* rtp_module);
-
- // Implements PacedSender::Callback.
- bool TimeToSendPacket(uint32_t ssrc,
- uint16_t sequence_number,
- int64_t capture_timestamp,
- bool retransmission) override;
-
- size_t TimeToSendPadding(size_t bytes) override;
-
- void SetTransportWideSequenceNumber(uint16_t sequence_number);
- uint16_t AllocateSequenceNumber() override;
-
- // Send transport feedback packet to send-side.
- virtual bool SendFeedback(rtcp::TransportFeedback* packet);
-
- private:
- rtc::CriticalSection modules_lock_;
- // Map from ssrc to sending rtp module.
- std::list<RtpRtcp*> rtp_modules_ GUARDED_BY(modules_lock_);
-
- volatile int transport_seq_;
-
- RTC_DISALLOW_COPY_AND_ASSIGN(PacketRouter);
-};
-} // namespace webrtc
-#endif // WEBRTC_MODULES_PACING_INCLUDE_PACKET_ROUTER_H_
diff --git a/webrtc/modules/pacing/mock/mock_paced_sender.h b/webrtc/modules/pacing/mock/mock_paced_sender.h
new file mode 100644
index 0000000000..01d5f6a6e9
--- /dev/null
+++ b/webrtc/modules/pacing/mock/mock_paced_sender.h
@@ -0,0 +1,38 @@
+/*
+ * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_PACING_MOCK_MOCK_PACED_SENDER_H_
+#define WEBRTC_MODULES_PACING_MOCK_MOCK_PACED_SENDER_H_
+
+#include "testing/gmock/include/gmock/gmock.h"
+
+#include <vector>
+
+#include "webrtc/modules/pacing/paced_sender.h"
+#include "webrtc/system_wrappers/include/clock.h"
+
+namespace webrtc {
+
+class MockPacedSender : public PacedSender {
+ public:
+ MockPacedSender() : PacedSender(Clock::GetRealTimeClock(), NULL, 0, 0, 0) {}
+ MOCK_METHOD6(SendPacket, bool(Priority priority,
+ uint32_t ssrc,
+ uint16_t sequence_number,
+ int64_t capture_time_ms,
+ size_t bytes,
+ bool retransmission));
+ MOCK_CONST_METHOD0(QueueInMs, int64_t());
+ MOCK_CONST_METHOD0(QueueInPackets, int());
+};
+
+} // namespace webrtc
+
+#endif // WEBRTC_MODULES_PACING_MOCK_MOCK_PACED_SENDER_H_
diff --git a/webrtc/modules/pacing/paced_sender.cc b/webrtc/modules/pacing/paced_sender.cc
index 5d7ae17b23..121f860c7d 100644
--- a/webrtc/modules/pacing/paced_sender.cc
+++ b/webrtc/modules/pacing/paced_sender.cc
@@ -8,20 +8,19 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-#include "webrtc/modules/pacing/include/paced_sender.h"
-
-#include <assert.h>
+#include "webrtc/modules/pacing/paced_sender.h"
#include <map>
#include <queue>
#include <set>
-#include "webrtc/modules/interface/module_common_types.h"
+#include "webrtc/base/checks.h"
+#include "webrtc/base/logging.h"
+#include "webrtc/modules/include/module_common_types.h"
#include "webrtc/modules/pacing/bitrate_prober.h"
#include "webrtc/system_wrappers/include/clock.h"
#include "webrtc/system_wrappers/include/critical_section_wrapper.h"
#include "webrtc/system_wrappers/include/field_trial.h"
-#include "webrtc/system_wrappers/include/logging.h"
namespace {
// Time limit in milliseconds between packet bursts.
@@ -33,6 +32,9 @@ const int64_t kMaxIntervalTimeMs = 30;
} // namespace
+// TODO(sprang): Move at least PacketQueue and MediaBudget out to separate
+// files, so that we can more easily test them.
+
namespace webrtc {
namespace paced_sender {
struct Packet {
@@ -86,13 +88,19 @@ struct Comparator {
// Class encapsulating a priority queue with some extensions.
class PacketQueue {
public:
- PacketQueue() : bytes_(0) {}
+ explicit PacketQueue(Clock* clock)
+ : bytes_(0),
+ clock_(clock),
+ queue_time_sum_(0),
+ time_last_updated_(clock_->TimeInMilliseconds()) {}
virtual ~PacketQueue() {}
void Push(const Packet& packet) {
- if (!AddToDupeSet(packet)) {
+ if (!AddToDupeSet(packet))
return;
- }
+
+ UpdateQueueTime(packet.enqueue_time_ms);
+
// Store packet in list, use pointers in priority queue for cheaper moves.
// Packets have a handle to its own iterator in the list, for easy removal
// when popping from queue.
@@ -114,7 +122,11 @@ class PacketQueue {
void FinalizePop(const Packet& packet) {
RemoveFromDupeSet(packet);
bytes_ -= packet.bytes;
+ queue_time_sum_ -= (time_last_updated_ - packet.enqueue_time_ms);
packet_list_.erase(packet.this_it);
+ RTC_DCHECK_EQ(packet_list_.size(), prio_queue_.size());
+ if (packet_list_.empty())
+ RTC_DCHECK_EQ(0u, queue_time_sum_);
}
bool Empty() const { return prio_queue_.empty(); }
@@ -123,13 +135,29 @@ class PacketQueue {
uint64_t SizeInBytes() const { return bytes_; }
- int64_t OldestEnqueueTime() const {
- std::list<Packet>::const_reverse_iterator it = packet_list_.rbegin();
+ int64_t OldestEnqueueTimeMs() const {
+ auto it = packet_list_.rbegin();
if (it == packet_list_.rend())
return 0;
return it->enqueue_time_ms;
}
+ void UpdateQueueTime(int64_t timestamp_ms) {
+ RTC_DCHECK_GE(timestamp_ms, time_last_updated_);
+ int64_t delta = timestamp_ms - time_last_updated_;
+ // Use packet packet_list_.size() not prio_queue_.size() here, as there
+ // might be an outstanding element popped from prio_queue_ currently in the
+ // SendPacket() call, while packet_list_ will always be correct.
+ queue_time_sum_ += delta * packet_list_.size();
+ time_last_updated_ = timestamp_ms;
+ }
+
+ int64_t AverageQueueTimeMs() const {
+ if (prio_queue_.empty())
+ return 0;
+ return queue_time_sum_ / packet_list_.size();
+ }
+
private:
// Try to add a packet to the set of ssrc/seqno identifiers currently in the
// queue. Return true if inserted, false if this is a duplicate.
@@ -147,7 +175,7 @@ class PacketQueue {
void RemoveFromDupeSet(const Packet& packet) {
SsrcSeqNoMap::iterator it = dupe_map_.find(packet.ssrc);
- assert(it != dupe_map_.end());
+ RTC_DCHECK(it != dupe_map_.end());
it->second.erase(packet.sequence_number);
if (it->second.empty()) {
dupe_map_.erase(it);
@@ -165,6 +193,9 @@ class PacketQueue {
// Map<ssrc, set<seq_no> >, for checking duplicates.
typedef std::map<uint32_t, std::set<uint16_t> > SsrcSeqNoMap;
SsrcSeqNoMap dupe_map_;
+ Clock* const clock_;
+ int64_t queue_time_sum_;
+ int64_t time_last_updated_;
};
class IntervalBudget {
@@ -209,6 +240,7 @@ class IntervalBudget {
};
} // namespace paced_sender
+const int64_t PacedSender::kMaxQueueLengthMs = 2000;
const float PacedSender::kDefaultPaceMultiplier = 2.5f;
PacedSender::PacedSender(Clock* clock,
@@ -225,8 +257,9 @@ PacedSender::PacedSender(Clock* clock,
padding_budget_(new paced_sender::IntervalBudget(min_bitrate_kbps)),
prober_(new BitrateProber()),
bitrate_bps_(1000 * bitrate_kbps),
+ max_bitrate_kbps_(max_bitrate_kbps),
time_last_update_us_(clock->TimeInMicroseconds()),
- packets_(new paced_sender::PacketQueue()),
+ packets_(new paced_sender::PacketQueue(clock)),
packet_counter_(0) {
UpdateBytesPerInterval(kMinPacketLimitMs);
}
@@ -244,7 +277,7 @@ void PacedSender::Resume() {
}
void PacedSender::SetProbingEnabled(bool enabled) {
- assert(packet_counter_ == 0);
+ RTC_CHECK_EQ(0u, packet_counter_);
probing_enabled_ = enabled;
}
@@ -252,9 +285,12 @@ void PacedSender::UpdateBitrate(int bitrate_kbps,
int max_bitrate_kbps,
int min_bitrate_kbps) {
CriticalSectionScoped cs(critsect_.get());
- media_budget_->set_target_rate_kbps(max_bitrate_kbps);
+ // Don't set media bitrate here as it may be boosted in order to meet max
+ // queue time constraint. Just update max_bitrate_kbps_ and let media_budget_
+ // be updated in Process().
padding_budget_->set_target_rate_kbps(min_bitrate_kbps);
bitrate_bps_ = 1000 * bitrate_kbps;
+ max_bitrate_kbps_ = max_bitrate_kbps;
}
void PacedSender::InsertPacket(RtpPacketSender::Priority priority,
@@ -265,25 +301,23 @@ void PacedSender::InsertPacket(RtpPacketSender::Priority priority,
bool retransmission) {
CriticalSectionScoped cs(critsect_.get());
- if (probing_enabled_ && !prober_->IsProbing()) {
+ if (probing_enabled_ && !prober_->IsProbing())
prober_->SetEnabled(true);
- }
prober_->MaybeInitializeProbe(bitrate_bps_);
- if (capture_time_ms < 0) {
- capture_time_ms = clock_->TimeInMilliseconds();
- }
+ int64_t now_ms = clock_->TimeInMilliseconds();
+ if (capture_time_ms < 0)
+ capture_time_ms = now_ms;
- packets_->Push(paced_sender::Packet(
- priority, ssrc, sequence_number, capture_time_ms,
- clock_->TimeInMilliseconds(), bytes, retransmission, packet_counter_++));
+ packets_->Push(paced_sender::Packet(priority, ssrc, sequence_number,
+ capture_time_ms, now_ms, bytes,
+ retransmission, packet_counter_++));
}
int64_t PacedSender::ExpectedQueueTimeMs() const {
CriticalSectionScoped cs(critsect_.get());
- int target_rate = media_budget_->target_rate_kbps();
- assert(target_rate > 0);
- return static_cast<int64_t>(packets_->SizeInBytes() * 8 / target_rate);
+ RTC_DCHECK_GT(max_bitrate_kbps_, 0);
+ return static_cast<int64_t>(packets_->SizeInBytes() * 8 / max_bitrate_kbps_);
}
size_t PacedSender::QueueSizePackets() const {
@@ -294,20 +328,25 @@ size_t PacedSender::QueueSizePackets() const {
int64_t PacedSender::QueueInMs() const {
CriticalSectionScoped cs(critsect_.get());
- int64_t oldest_packet = packets_->OldestEnqueueTime();
+ int64_t oldest_packet = packets_->OldestEnqueueTimeMs();
if (oldest_packet == 0)
return 0;
return clock_->TimeInMilliseconds() - oldest_packet;
}
+int64_t PacedSender::AverageQueueTimeMs() {
+ CriticalSectionScoped cs(critsect_.get());
+ packets_->UpdateQueueTime(clock_->TimeInMilliseconds());
+ return packets_->AverageQueueTimeMs();
+}
+
int64_t PacedSender::TimeUntilNextProcess() {
CriticalSectionScoped cs(critsect_.get());
if (prober_->IsProbing()) {
int64_t ret = prober_->TimeUntilNextProbe(clock_->TimeInMilliseconds());
- if (ret >= 0) {
+ if (ret >= 0)
return ret;
- }
}
int64_t elapsed_time_us = clock_->TimeInMicroseconds() - time_last_update_us_;
int64_t elapsed_time_ms = (elapsed_time_us + 500) / 1000;
@@ -319,27 +358,42 @@ int32_t PacedSender::Process() {
CriticalSectionScoped cs(critsect_.get());
int64_t elapsed_time_ms = (now_us - time_last_update_us_ + 500) / 1000;
time_last_update_us_ = now_us;
- if (paused_)
- return 0;
- if (elapsed_time_ms > 0) {
+ int target_bitrate_kbps = max_bitrate_kbps_;
+ // TODO(holmer): Remove the !paused_ check when issue 5307 has been fixed.
+ if (!paused_ && elapsed_time_ms > 0) {
+ size_t queue_size_bytes = packets_->SizeInBytes();
+ if (queue_size_bytes > 0) {
+ // Assuming equal size packets and input/output rate, the average packet
+ // has avg_time_left_ms left to get queue_size_bytes out of the queue, if
+ // time constraint shall be met. Determine bitrate needed for that.
+ packets_->UpdateQueueTime(clock_->TimeInMilliseconds());
+ int64_t avg_time_left_ms = std::max<int64_t>(
+ 1, kMaxQueueLengthMs - packets_->AverageQueueTimeMs());
+ int min_bitrate_needed_kbps =
+ static_cast<int>(queue_size_bytes * 8 / avg_time_left_ms);
+ if (min_bitrate_needed_kbps > target_bitrate_kbps)
+ target_bitrate_kbps = min_bitrate_needed_kbps;
+ }
+
+ media_budget_->set_target_rate_kbps(target_bitrate_kbps);
+
int64_t delta_time_ms = std::min(kMaxIntervalTimeMs, elapsed_time_ms);
UpdateBytesPerInterval(delta_time_ms);
}
while (!packets_->Empty()) {
- if (media_budget_->bytes_remaining() == 0 && !prober_->IsProbing()) {
+ if (media_budget_->bytes_remaining() == 0 && !prober_->IsProbing())
return 0;
- }
// Since we need to release the lock in order to send, we first pop the
// element from the priority queue but keep it in storage, so that we can
// reinsert it if send fails.
const paced_sender::Packet& packet = packets_->BeginPop();
+
if (SendPacket(packet)) {
// Send succeeded, remove it from the queue.
packets_->FinalizePop(packet);
- if (prober_->IsProbing()) {
+ if (prober_->IsProbing())
return 0;
- }
} else {
// Send failed, put it back into the queue.
packets_->CancelPop(packet);
@@ -347,14 +401,16 @@ int32_t PacedSender::Process() {
}
}
- if (!packets_->Empty())
+ // TODO(holmer): Remove the paused_ check when issue 5307 has been fixed.
+ if (paused_ || !packets_->Empty())
return 0;
size_t padding_needed;
- if (prober_->IsProbing())
+ if (prober_->IsProbing()) {
padding_needed = prober_->RecommendedPacketSize();
- else
+ } else {
padding_needed = padding_budget_->bytes_remaining();
+ }
if (padding_needed > 0)
SendPadding(static_cast<size_t>(padding_needed));
@@ -362,6 +418,11 @@ int32_t PacedSender::Process() {
}
bool PacedSender::SendPacket(const paced_sender::Packet& packet) {
+ // TODO(holmer): Because of this bug issue 5307 we have to send audio
+ // packets even when the pacer is paused. Here we assume audio packets are
+ // always high priority and that they are the only high priority packets.
+ if (paused_ && packet.priority != kHighPriority)
+ return false;
critsect_->Leave();
const bool success = callback_->TimeToSendPacket(packet.ssrc,
packet.sequence_number,
@@ -369,7 +430,9 @@ bool PacedSender::SendPacket(const paced_sender::Packet& packet) {
packet.retransmission);
critsect_->Enter();
- if (success) {
+ // TODO(holmer): High priority packets should only be accounted for if we are
+ // allocating bandwidth for audio.
+ if (success && packet.priority != kHighPriority) {
// Update media bytes sent.
prober_->PacketSent(clock_->TimeInMilliseconds(), packet.bytes);
media_budget_->UseBudget(packet.bytes);
diff --git a/webrtc/modules/pacing/paced_sender.h b/webrtc/modules/pacing/paced_sender.h
new file mode 100644
index 0000000000..62e794fdbc
--- /dev/null
+++ b/webrtc/modules/pacing/paced_sender.h
@@ -0,0 +1,164 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_PACING_PACED_SENDER_H_
+#define WEBRTC_MODULES_PACING_PACED_SENDER_H_
+
+#include <list>
+#include <set>
+
+#include "webrtc/base/scoped_ptr.h"
+#include "webrtc/base/thread_annotations.h"
+#include "webrtc/modules/include/module.h"
+#include "webrtc/modules/rtp_rtcp/include/rtp_rtcp_defines.h"
+#include "webrtc/typedefs.h"
+
+namespace webrtc {
+class BitrateProber;
+class Clock;
+class CriticalSectionWrapper;
+
+namespace paced_sender {
+class IntervalBudget;
+struct Packet;
+class PacketQueue;
+} // namespace paced_sender
+
+class PacedSender : public Module, public RtpPacketSender {
+ public:
+ class Callback {
+ public:
+ // Note: packets sent as a result of a callback should not pass by this
+ // module again.
+ // Called when it's time to send a queued packet.
+ // Returns false if packet cannot be sent.
+ virtual bool TimeToSendPacket(uint32_t ssrc,
+ uint16_t sequence_number,
+ int64_t capture_time_ms,
+ bool retransmission) = 0;
+ // Called when it's a good time to send a padding data.
+ // Returns the number of bytes sent.
+ virtual size_t TimeToSendPadding(size_t bytes) = 0;
+
+ protected:
+ virtual ~Callback() {}
+ };
+
+ // Expected max pacer delay in ms. If ExpectedQueueTimeMs() is higher than
+ // this value, the packet producers should wait (eg drop frames rather than
+ // encoding them). Bitrate sent may temporarily exceed target set by
+ // UpdateBitrate() so that this limit will be upheld.
+ static const int64_t kMaxQueueLengthMs;
+ // Pace in kbits/s until we receive first estimate.
+ static const int kDefaultInitialPaceKbps = 2000;
+ // Pacing-rate relative to our target send rate.
+ // Multiplicative factor that is applied to the target bitrate to calculate
+ // the number of bytes that can be transmitted per interval.
+ // Increasing this factor will result in lower delays in cases of bitrate
+ // overshoots from the encoder.
+ static const float kDefaultPaceMultiplier;
+
+ static const size_t kMinProbePacketSize = 200;
+
+ PacedSender(Clock* clock,
+ Callback* callback,
+ int bitrate_kbps,
+ int max_bitrate_kbps,
+ int min_bitrate_kbps);
+
+ virtual ~PacedSender();
+
+ // Temporarily pause all sending.
+ void Pause();
+
+ // Resume sending packets.
+ void Resume();
+
+ // Enable bitrate probing. Enabled by default, mostly here to simplify
+ // testing. Must be called before any packets are being sent to have an
+ // effect.
+ void SetProbingEnabled(bool enabled);
+
+ // Set target bitrates for the pacer.
+ // We will pace out bursts of packets at a bitrate of |max_bitrate_kbps|.
+ // |bitrate_kbps| is our estimate of what we are allowed to send on average.
+ // Padding packets will be utilized to reach |min_bitrate| unless enough media
+ // packets are available.
+ void UpdateBitrate(int bitrate_kbps,
+ int max_bitrate_kbps,
+ int min_bitrate_kbps);
+
+ // Returns true if we send the packet now, else it will add the packet
+ // information to the queue and call TimeToSendPacket when it's time to send.
+ void InsertPacket(RtpPacketSender::Priority priority,
+ uint32_t ssrc,
+ uint16_t sequence_number,
+ int64_t capture_time_ms,
+ size_t bytes,
+ bool retransmission) override;
+
+ // Returns the time since the oldest queued packet was enqueued.
+ virtual int64_t QueueInMs() const;
+
+ virtual size_t QueueSizePackets() const;
+
+ // Returns the number of milliseconds it will take to send the current
+ // packets in the queue, given the current size and bitrate, ignoring prio.
+ virtual int64_t ExpectedQueueTimeMs() const;
+
+ // Returns the average time since being enqueued, in milliseconds, for all
+ // packets currently in the pacer queue, or 0 if queue is empty.
+ virtual int64_t AverageQueueTimeMs();
+
+ // Returns the number of milliseconds until the module want a worker thread
+ // to call Process.
+ int64_t TimeUntilNextProcess() override;
+
+ // Process any pending packets in the queue(s).
+ int32_t Process() override;
+
+ private:
+ // Updates the number of bytes that can be sent for the next time interval.
+ void UpdateBytesPerInterval(int64_t delta_time_in_ms)
+ EXCLUSIVE_LOCKS_REQUIRED(critsect_);
+
+ bool SendPacket(const paced_sender::Packet& packet)
+ EXCLUSIVE_LOCKS_REQUIRED(critsect_);
+ void SendPadding(size_t padding_needed) EXCLUSIVE_LOCKS_REQUIRED(critsect_);
+
+ Clock* const clock_;
+ Callback* const callback_;
+
+ rtc::scoped_ptr<CriticalSectionWrapper> critsect_;
+ bool paused_ GUARDED_BY(critsect_);
+ bool probing_enabled_;
+ // This is the media budget, keeping track of how many bits of media
+ // we can pace out during the current interval.
+ rtc::scoped_ptr<paced_sender::IntervalBudget> media_budget_
+ GUARDED_BY(critsect_);
+ // This is the padding budget, keeping track of how many bits of padding we're
+ // allowed to send out during the current interval. This budget will be
+ // utilized when there's no media to send.
+ rtc::scoped_ptr<paced_sender::IntervalBudget> padding_budget_
+ GUARDED_BY(critsect_);
+
+ rtc::scoped_ptr<BitrateProber> prober_ GUARDED_BY(critsect_);
+ // Actual configured bitrates (media_budget_ may temporarily be higher in
+ // order to meet pace time constraint).
+ int bitrate_bps_ GUARDED_BY(critsect_);
+ int max_bitrate_kbps_ GUARDED_BY(critsect_);
+
+ int64_t time_last_update_us_ GUARDED_BY(critsect_);
+
+ rtc::scoped_ptr<paced_sender::PacketQueue> packets_ GUARDED_BY(critsect_);
+ uint64_t packet_counter_;
+};
+} // namespace webrtc
+#endif // WEBRTC_MODULES_PACING_PACED_SENDER_H_
diff --git a/webrtc/modules/pacing/paced_sender_unittest.cc b/webrtc/modules/pacing/paced_sender_unittest.cc
index c27444c5ac..588bf3b669 100644
--- a/webrtc/modules/pacing/paced_sender_unittest.cc
+++ b/webrtc/modules/pacing/paced_sender_unittest.cc
@@ -12,7 +12,7 @@
#include "testing/gmock/include/gmock/gmock.h"
#include "testing/gtest/include/gtest/gtest.h"
-#include "webrtc/modules/pacing/include/paced_sender.h"
+#include "webrtc/modules/pacing/paced_sender.h"
#include "webrtc/system_wrappers/include/clock.h"
using testing::_;
@@ -471,13 +471,15 @@ TEST_F(PacedSenderTest, Priority) {
sequence_number++, capture_time_ms, 250, false);
send_bucket_->InsertPacket(PacedSender::kNormalPriority, ssrc,
sequence_number++, capture_time_ms, 250, false);
+ send_bucket_->InsertPacket(PacedSender::kNormalPriority, ssrc,
+ sequence_number++, capture_time_ms, 250, false);
send_bucket_->InsertPacket(PacedSender::kHighPriority, ssrc,
sequence_number++, capture_time_ms, 250, false);
// Expect all high and normal priority to be sent out first.
EXPECT_CALL(callback_, TimeToSendPadding(_)).Times(0);
EXPECT_CALL(callback_, TimeToSendPacket(ssrc, _, capture_time_ms, false))
- .Times(3)
+ .Times(4)
.WillRepeatedly(Return(true));
EXPECT_EQ(5, send_bucket_->TimeUntilNextProcess());
@@ -497,6 +499,37 @@ TEST_F(PacedSenderTest, Priority) {
EXPECT_EQ(0, send_bucket_->Process());
}
+TEST_F(PacedSenderTest, HighPrioDoesntAffectBudget) {
+ uint32_t ssrc = 12346;
+ uint16_t sequence_number = 1234;
+ int64_t capture_time_ms = 56789;
+
+ // As high prio packets doesn't affect the budget, we should be able to send
+ // a high number of them at once.
+ for (int i = 0; i < 25; ++i) {
+ SendAndExpectPacket(PacedSender::kHighPriority, ssrc, sequence_number++,
+ capture_time_ms, 250, false);
+ }
+ send_bucket_->Process();
+ // Low prio packets does affect the budget, so we should only be able to send
+ // 3 at once, the 4th should be queued.
+ for (int i = 0; i < 3; ++i) {
+ SendAndExpectPacket(PacedSender::kLowPriority, ssrc, sequence_number++,
+ capture_time_ms, 250, false);
+ }
+ send_bucket_->InsertPacket(PacedSender::kLowPriority, ssrc, sequence_number,
+ capture_time_ms, 250, false);
+ EXPECT_EQ(5, send_bucket_->TimeUntilNextProcess());
+ clock_.AdvanceTimeMilliseconds(5);
+ send_bucket_->Process();
+ EXPECT_CALL(callback_,
+ TimeToSendPacket(ssrc, sequence_number++, capture_time_ms, false))
+ .Times(1);
+ EXPECT_EQ(5, send_bucket_->TimeUntilNextProcess());
+ clock_.AdvanceTimeMilliseconds(5);
+ send_bucket_->Process();
+}
+
TEST_F(PacedSenderTest, Pause) {
uint32_t ssrc_low_priority = 12345;
uint32_t ssrc = 12346;
@@ -560,20 +593,16 @@ TEST_F(PacedSenderTest, Pause) {
EXPECT_CALL(callback_, TimeToSendPacket(_, _, capture_time_ms, false))
.Times(3)
.WillRepeatedly(Return(true));
- send_bucket_->Resume();
-
- EXPECT_EQ(5, send_bucket_->TimeUntilNextProcess());
- clock_.AdvanceTimeMilliseconds(5);
- EXPECT_EQ(0, send_bucket_->TimeUntilNextProcess());
- EXPECT_EQ(0, send_bucket_->Process());
-
EXPECT_CALL(callback_, TimeToSendPacket(_, _, second_capture_time_ms, false))
.Times(1)
.WillRepeatedly(Return(true));
+ send_bucket_->Resume();
+
EXPECT_EQ(5, send_bucket_->TimeUntilNextProcess());
clock_.AdvanceTimeMilliseconds(5);
EXPECT_EQ(0, send_bucket_->TimeUntilNextProcess());
EXPECT_EQ(0, send_bucket_->Process());
+
EXPECT_EQ(0, send_bucket_->QueueInMs());
}
@@ -664,10 +693,9 @@ TEST_F(PacedSenderTest, ExpectedQueueTimeMs) {
EXPECT_EQ(0, send_bucket_->ExpectedQueueTimeMs());
- // Allow for aliasing, duration should be in [expected(n - 1), expected(n)].
- EXPECT_LE(duration, queue_in_ms);
- EXPECT_GE(duration,
- queue_in_ms - static_cast<int64_t>(kPacketSize * 8 / kMaxBitrate));
+ // Allow for aliasing, duration should be within one pack of max time limit.
+ EXPECT_NEAR(duration, PacedSender::kMaxQueueLengthMs,
+ static_cast<int64_t>(kPacketSize * 8 / kMaxBitrate));
}
TEST_F(PacedSenderTest, QueueTimeGrowsOverTime) {
@@ -830,5 +858,51 @@ TEST_F(PacedSenderTest, PaddingOveruse) {
send_bucket_->Process();
}
+TEST_F(PacedSenderTest, AverageQueueTime) {
+ uint32_t ssrc = 12346;
+ uint16_t sequence_number = 1234;
+ const size_t kPacketSize = 1200;
+ const int kBitrateBps = 10 * kPacketSize * 8; // 10 packets per second.
+ const int kBitrateKbps = (kBitrateBps + 500) / 1000;
+
+ send_bucket_->UpdateBitrate(kBitrateKbps, kBitrateKbps, kBitrateKbps);
+
+ EXPECT_EQ(0, send_bucket_->AverageQueueTimeMs());
+
+ int64_t first_capture_time = clock_.TimeInMilliseconds();
+ send_bucket_->InsertPacket(PacedSender::kNormalPriority, ssrc,
+ sequence_number, first_capture_time, kPacketSize,
+ false);
+ clock_.AdvanceTimeMilliseconds(10);
+ send_bucket_->InsertPacket(PacedSender::kNormalPriority, ssrc,
+ sequence_number + 1, clock_.TimeInMilliseconds(),
+ kPacketSize, false);
+ clock_.AdvanceTimeMilliseconds(10);
+
+ EXPECT_EQ((20 + 10) / 2, send_bucket_->AverageQueueTimeMs());
+
+ // Only first packet (queued for 20ms) should be removed, leave the second
+ // packet (queued for 10ms) alone in the queue.
+ EXPECT_CALL(callback_, TimeToSendPacket(ssrc, sequence_number,
+ first_capture_time, false))
+ .Times(1)
+ .WillRepeatedly(Return(true));
+ send_bucket_->Process();
+
+ EXPECT_EQ(10, send_bucket_->AverageQueueTimeMs());
+
+ clock_.AdvanceTimeMilliseconds(10);
+ EXPECT_CALL(callback_, TimeToSendPacket(ssrc, sequence_number + 1,
+ first_capture_time + 10, false))
+ .Times(1)
+ .WillRepeatedly(Return(true));
+ for (int i = 0; i < 3; ++i) {
+ clock_.AdvanceTimeMilliseconds(30); // Max delta.
+ send_bucket_->Process();
+ }
+
+ EXPECT_EQ(0, send_bucket_->AverageQueueTimeMs());
+}
+
} // namespace test
} // namespace webrtc
diff --git a/webrtc/modules/pacing/pacing.gypi b/webrtc/modules/pacing/pacing.gypi
index faa97841c1..90f663c1b0 100644
--- a/webrtc/modules/pacing/pacing.gypi
+++ b/webrtc/modules/pacing/pacing.gypi
@@ -17,12 +17,12 @@
'<(webrtc_root)/modules/modules.gyp:rtp_rtcp',
],
'sources': [
- 'include/paced_sender.h',
- 'include/packet_router.h',
'bitrate_prober.cc',
'bitrate_prober.h',
'paced_sender.cc',
+ 'paced_sender.h',
'packet_router.cc',
+ 'packet_router.h',
],
},
], # targets
diff --git a/webrtc/modules/pacing/packet_router.cc b/webrtc/modules/pacing/packet_router.cc
index 563773b41f..5fd350834a 100644
--- a/webrtc/modules/pacing/packet_router.cc
+++ b/webrtc/modules/pacing/packet_router.cc
@@ -8,12 +8,12 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-#include "webrtc/modules/pacing/include/packet_router.h"
+#include "webrtc/modules/pacing/packet_router.h"
#include "webrtc/base/atomicops.h"
#include "webrtc/base/checks.h"
-#include "webrtc/modules/rtp_rtcp/interface/rtp_rtcp.h"
-#include "webrtc/modules/rtp_rtcp/interface/rtp_rtcp_defines.h"
+#include "webrtc/modules/rtp_rtcp/include/rtp_rtcp.h"
+#include "webrtc/modules/rtp_rtcp/include/rtp_rtcp_defines.h"
#include "webrtc/modules/rtp_rtcp/source/rtcp_packet/transport_feedback.h"
namespace webrtc {
diff --git a/webrtc/modules/pacing/packet_router.h b/webrtc/modules/pacing/packet_router.h
new file mode 100644
index 0000000000..edef1aa9b3
--- /dev/null
+++ b/webrtc/modules/pacing/packet_router.h
@@ -0,0 +1,66 @@
+/*
+ * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_PACING_PACKET_ROUTER_H_
+#define WEBRTC_MODULES_PACING_PACKET_ROUTER_H_
+
+#include <list>
+
+#include "webrtc/base/constructormagic.h"
+#include "webrtc/base/criticalsection.h"
+#include "webrtc/base/scoped_ptr.h"
+#include "webrtc/base/thread_annotations.h"
+#include "webrtc/common_types.h"
+#include "webrtc/modules/pacing/paced_sender.h"
+#include "webrtc/modules/rtp_rtcp/include/rtp_rtcp_defines.h"
+
+namespace webrtc {
+
+class RtpRtcp;
+namespace rtcp {
+class TransportFeedback;
+} // namespace rtcp
+
+// PacketRouter routes outgoing data to the correct sending RTP module, based
+// on the simulcast layer in RTPVideoHeader.
+class PacketRouter : public PacedSender::Callback,
+ public TransportSequenceNumberAllocator {
+ public:
+ PacketRouter();
+ virtual ~PacketRouter();
+
+ void AddRtpModule(RtpRtcp* rtp_module);
+ void RemoveRtpModule(RtpRtcp* rtp_module);
+
+ // Implements PacedSender::Callback.
+ bool TimeToSendPacket(uint32_t ssrc,
+ uint16_t sequence_number,
+ int64_t capture_timestamp,
+ bool retransmission) override;
+
+ size_t TimeToSendPadding(size_t bytes) override;
+
+ void SetTransportWideSequenceNumber(uint16_t sequence_number);
+ uint16_t AllocateSequenceNumber() override;
+
+ // Send transport feedback packet to send-side.
+ virtual bool SendFeedback(rtcp::TransportFeedback* packet);
+
+ private:
+ rtc::CriticalSection modules_lock_;
+ // Map from ssrc to sending rtp module.
+ std::list<RtpRtcp*> rtp_modules_ GUARDED_BY(modules_lock_);
+
+ volatile int transport_seq_;
+
+ RTC_DISALLOW_COPY_AND_ASSIGN(PacketRouter);
+};
+} // namespace webrtc
+#endif // WEBRTC_MODULES_PACING_PACKET_ROUTER_H_
diff --git a/webrtc/modules/pacing/packet_router_unittest.cc b/webrtc/modules/pacing/packet_router_unittest.cc
index eecb13757c..31acf44b9b 100644
--- a/webrtc/modules/pacing/packet_router_unittest.cc
+++ b/webrtc/modules/pacing/packet_router_unittest.cc
@@ -13,8 +13,8 @@
#include "webrtc/base/checks.h"
#include "testing/gmock/include/gmock/gmock.h"
#include "testing/gtest/include/gtest/gtest.h"
-#include "webrtc/modules/pacing/include/packet_router.h"
-#include "webrtc/modules/rtp_rtcp/interface/rtp_rtcp.h"
+#include "webrtc/modules/pacing/packet_router.h"
+#include "webrtc/modules/rtp_rtcp/include/rtp_rtcp.h"
#include "webrtc/modules/rtp_rtcp/mocks/mock_rtp_rtcp.h"
#include "webrtc/base/scoped_ptr.h"
diff --git a/webrtc/modules/remote_bitrate_estimator/aimd_rate_control.cc b/webrtc/modules/remote_bitrate_estimator/aimd_rate_control.cc
index 2d5573228d..4820e6295f 100644
--- a/webrtc/modules/remote_bitrate_estimator/aimd_rate_control.cc
+++ b/webrtc/modules/remote_bitrate_estimator/aimd_rate_control.cc
@@ -88,8 +88,7 @@ uint32_t AimdRateControl::LatestEstimate() const {
uint32_t AimdRateControl::UpdateBandwidthEstimate(int64_t now_ms) {
current_bitrate_bps_ = ChangeBitrate(current_bitrate_bps_,
- current_input_._incomingBitRate,
- now_ms);
+ current_input_.incoming_bitrate, now_ms);
if (now_ms - time_of_last_log_ > kLogIntervalMs) {
time_of_last_log_ = now_ms;
}
@@ -109,21 +108,21 @@ void AimdRateControl::Update(const RateControlInput* input, int64_t now_ms) {
const int64_t kInitializationTimeMs = 5000;
RTC_DCHECK_LE(kBitrateWindowMs, kInitializationTimeMs);
if (time_first_incoming_estimate_ < 0) {
- if (input->_incomingBitRate > 0) {
+ if (input->incoming_bitrate > 0) {
time_first_incoming_estimate_ = now_ms;
}
} else if (now_ms - time_first_incoming_estimate_ > kInitializationTimeMs &&
- input->_incomingBitRate > 0) {
- current_bitrate_bps_ = input->_incomingBitRate;
+ input->incoming_bitrate > 0) {
+ current_bitrate_bps_ = input->incoming_bitrate;
bitrate_is_initialized_ = true;
}
}
- if (updated_ && current_input_._bwState == kBwOverusing) {
+ if (updated_ && current_input_.bw_state == kBwOverusing) {
// Only update delay factor and incoming bit rate. We always want to react
// on an over-use.
- current_input_._noiseVar = input->_noiseVar;
- current_input_._incomingBitRate = input->_incomingBitRate;
+ current_input_.noise_var = input->noise_var;
+ current_input_.incoming_bitrate = input->incoming_bitrate;
} else {
updated_ = true;
current_input_ = *input;
@@ -145,7 +144,7 @@ uint32_t AimdRateControl::ChangeBitrate(uint32_t current_bitrate_bps,
// An over-use should always trigger us to reduce the bitrate, even though
// we have not yet established our first estimate. By acting on the over-use,
// we will end up with a valid estimate.
- if (!bitrate_is_initialized_ && current_input_._bwState != kBwOverusing)
+ if (!bitrate_is_initialized_ && current_input_.bw_state != kBwOverusing)
return current_bitrate_bps_;
updated_ = false;
ChangeState(current_input_, now_ms);
@@ -284,7 +283,7 @@ void AimdRateControl::UpdateMaxBitRateEstimate(float incoming_bitrate_kbps) {
void AimdRateControl::ChangeState(const RateControlInput& input,
int64_t now_ms) {
- switch (current_input_._bwState) {
+ switch (current_input_.bw_state) {
case kBwNormal:
if (rate_control_state_ == kRcHold) {
time_last_bitrate_change_ = now_ms;
diff --git a/webrtc/modules/remote_bitrate_estimator/aimd_rate_control.h b/webrtc/modules/remote_bitrate_estimator/aimd_rate_control.h
index bc5ca41dff..93ae2190d6 100644
--- a/webrtc/modules/remote_bitrate_estimator/aimd_rate_control.h
+++ b/webrtc/modules/remote_bitrate_estimator/aimd_rate_control.h
@@ -84,4 +84,4 @@ class AimdRateControl {
};
} // namespace webrtc
-#endif // WEBRTC_MODULES_REMOTE_BITRATE_ESTIMATOR_AIMD_RATE_CONTROL_H_
+#endif // WEBRTC_MODULES_REMOTE_BITRATE_ESTIMATOR_AIMD_RATE_CONTROL_H_
diff --git a/webrtc/modules/remote_bitrate_estimator/bwe_simulations.cc b/webrtc/modules/remote_bitrate_estimator/bwe_simulations.cc
index cb8d0db5c7..11fd64f84e 100644
--- a/webrtc/modules/remote_bitrate_estimator/bwe_simulations.cc
+++ b/webrtc/modules/remote_bitrate_estimator/bwe_simulations.cc
@@ -36,7 +36,7 @@ class BweSimulation : public BweTest,
VerboseLogging(true);
}
- test::Random random_;
+ Random random_;
private:
RTC_DISALLOW_COPY_AND_ASSIGN(BweSimulation);
@@ -74,7 +74,6 @@ TEST_P(BweSimulation, Verizon4gDownlinkTest) {
}
TEST_P(BweSimulation, Choke1000kbps500kbps1000kbpsBiDirectional) {
-
const int kFlowIds[] = {0, 1};
const size_t kNumFlows = sizeof(kFlowIds) / sizeof(kFlowIds[0]);
@@ -106,7 +105,6 @@ TEST_P(BweSimulation, Choke1000kbps500kbps1000kbpsBiDirectional) {
}
TEST_P(BweSimulation, Choke1000kbps500kbps1000kbps) {
-
AdaptiveVideoSource source(0, 30, 300, 0, 0);
VideoSender sender(&uplink_, &source, GetParam());
ChokeFilter choke(&uplink_, 0);
@@ -243,7 +241,7 @@ TEST_P(BweSimulation, PacerGoogleWifiTrace3Mbps) {
}
TEST_P(BweSimulation, SelfFairnessTest) {
- srand(Clock::GetRealTimeClock()->TimeInMicroseconds());
+ Random prng(Clock::GetRealTimeClock()->TimeInMicroseconds());
const int kAllFlowIds[] = {0, 1, 2, 3};
const size_t kNumFlows = sizeof(kAllFlowIds) / sizeof(kAllFlowIds[0]);
rtc::scoped_ptr<VideoSource> sources[kNumFlows];
@@ -252,7 +250,7 @@ TEST_P(BweSimulation, SelfFairnessTest) {
// Streams started 20 seconds apart to give them different advantage when
// competing for the bandwidth.
sources[i].reset(new AdaptiveVideoSource(kAllFlowIds[i], 30, 300, 0,
- i * (rand() % 40000)));
+ i * prng.Rand(39999)));
senders[i].reset(new VideoSender(&uplink_, sources[i].get(), GetParam()));
}
@@ -283,9 +281,9 @@ TEST_P(BweSimulation, PacedSelfFairness50msTest) {
const int64_t kAverageOffsetMs = 20 * 1000;
const int kNumRmcatFlows = 4;
int64_t offsets_ms[kNumRmcatFlows];
- offsets_ms[0] = random_.Rand(0, 2 * kAverageOffsetMs);
+ offsets_ms[0] = random_.Rand(2 * kAverageOffsetMs);
for (int i = 1; i < kNumRmcatFlows; ++i) {
- offsets_ms[i] = offsets_ms[i - 1] + random_.Rand(0, 2 * kAverageOffsetMs);
+ offsets_ms[i] = offsets_ms[i - 1] + random_.Rand(2 * kAverageOffsetMs);
}
RunFairnessTest(GetParam(), kNumRmcatFlows, 0, 1000, 3000, 50, 50, 0,
offsets_ms);
@@ -295,9 +293,9 @@ TEST_P(BweSimulation, PacedSelfFairness500msTest) {
const int64_t kAverageOffsetMs = 20 * 1000;
const int kNumRmcatFlows = 4;
int64_t offsets_ms[kNumRmcatFlows];
- offsets_ms[0] = random_.Rand(0, 2 * kAverageOffsetMs);
+ offsets_ms[0] = random_.Rand(2 * kAverageOffsetMs);
for (int i = 1; i < kNumRmcatFlows; ++i) {
- offsets_ms[i] = offsets_ms[i - 1] + random_.Rand(0, 2 * kAverageOffsetMs);
+ offsets_ms[i] = offsets_ms[i - 1] + random_.Rand(2 * kAverageOffsetMs);
}
RunFairnessTest(GetParam(), kNumRmcatFlows, 0, 1000, 3000, 500, 50, 0,
offsets_ms);
@@ -307,28 +305,28 @@ TEST_P(BweSimulation, PacedSelfFairness1000msTest) {
const int64_t kAverageOffsetMs = 20 * 1000;
const int kNumRmcatFlows = 4;
int64_t offsets_ms[kNumRmcatFlows];
- offsets_ms[0] = random_.Rand(0, 2 * kAverageOffsetMs);
+ offsets_ms[0] = random_.Rand(2 * kAverageOffsetMs);
for (int i = 1; i < kNumRmcatFlows; ++i) {
- offsets_ms[i] = offsets_ms[i - 1] + random_.Rand(0, 2 * kAverageOffsetMs);
+ offsets_ms[i] = offsets_ms[i - 1] + random_.Rand(2 * kAverageOffsetMs);
}
RunFairnessTest(GetParam(), 4, 0, 1000, 3000, 1000, 50, 0, offsets_ms);
}
TEST_P(BweSimulation, TcpFairness50msTest) {
const int64_t kAverageOffsetMs = 20 * 1000;
- int64_t offset_ms[] = {random_.Rand(0, 2 * kAverageOffsetMs), 0};
+ int64_t offset_ms[] = {random_.Rand(2 * kAverageOffsetMs), 0};
RunFairnessTest(GetParam(), 1, 1, 1000, 2000, 50, 50, 0, offset_ms);
}
TEST_P(BweSimulation, TcpFairness500msTest) {
const int64_t kAverageOffsetMs = 20 * 1000;
- int64_t offset_ms[] = {random_.Rand(0, 2 * kAverageOffsetMs), 0};
+ int64_t offset_ms[] = {random_.Rand(2 * kAverageOffsetMs), 0};
RunFairnessTest(GetParam(), 1, 1, 1000, 2000, 500, 50, 0, offset_ms);
}
TEST_P(BweSimulation, TcpFairness1000msTest) {
const int kAverageOffsetMs = 20 * 1000;
- int64_t offset_ms[] = {random_.Rand(0, 2 * kAverageOffsetMs), 0};
+ int64_t offset_ms[] = {random_.Rand(2 * kAverageOffsetMs), 0};
RunFairnessTest(GetParam(), 1, 1, 1000, 2000, 1000, 50, 0, offset_ms);
}
diff --git a/webrtc/modules/remote_bitrate_estimator/include/bwe_defines.h b/webrtc/modules/remote_bitrate_estimator/include/bwe_defines.h
index 844fde5b71..3fb7e29e5b 100644
--- a/webrtc/modules/remote_bitrate_estimator/include/bwe_defines.h
+++ b/webrtc/modules/remote_bitrate_estimator/include/bwe_defines.h
@@ -8,53 +8,40 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-#ifndef WEBRTC_MODULES_RTP_RTCP_SOURCE_BWE_DEFINES_H_
-#define WEBRTC_MODULES_RTP_RTCP_SOURCE_BWE_DEFINES_H_
+#ifndef WEBRTC_MODULES_REMOTE_BITRATE_ESTIMATOR_INCLUDE_BWE_DEFINES_H_
+#define WEBRTC_MODULES_REMOTE_BITRATE_ESTIMATOR_INCLUDE_BWE_DEFINES_H_
#include "webrtc/typedefs.h"
-#define BWE_MAX(a,b) ((a)>(b)?(a):(b))
-#define BWE_MIN(a,b) ((a)<(b)?(a):(b))
+#define BWE_MAX(a, b) ((a) > (b) ? (a) : (b))
+#define BWE_MIN(a, b) ((a) < (b) ? (a) : (b))
namespace webrtc {
static const int64_t kBitrateWindowMs = 1000;
-enum BandwidthUsage
-{
- kBwNormal = 0,
- kBwUnderusing = 1,
- kBwOverusing = 2,
+enum BandwidthUsage {
+ kBwNormal = 0,
+ kBwUnderusing = 1,
+ kBwOverusing = 2,
};
-enum RateControlState
-{
- kRcHold,
- kRcIncrease,
- kRcDecrease
-};
+enum RateControlState { kRcHold, kRcIncrease, kRcDecrease };
-enum RateControlRegion
-{
- kRcNearMax,
- kRcAboveMax,
- kRcMaxUnknown
-};
+enum RateControlRegion { kRcNearMax, kRcAboveMax, kRcMaxUnknown };
+
+struct RateControlInput {
+ RateControlInput(BandwidthUsage bw_state,
+ uint32_t incoming_bitrate,
+ double noise_var)
+ : bw_state(bw_state),
+ incoming_bitrate(incoming_bitrate),
+ noise_var(noise_var) {}
-class RateControlInput
-{
-public:
- RateControlInput(BandwidthUsage bwState,
- uint32_t incomingBitRate,
- double noiseVar)
- : _bwState(bwState),
- _incomingBitRate(incomingBitRate),
- _noiseVar(noiseVar) {}
-
- BandwidthUsage _bwState;
- uint32_t _incomingBitRate;
- double _noiseVar;
+ BandwidthUsage bw_state;
+ uint32_t incoming_bitrate;
+ double noise_var;
};
} // namespace webrtc
-#endif // WEBRTC_MODULES_RTP_RTCP_SOURCE_BWE_DEFINES_H_
+#endif // WEBRTC_MODULES_REMOTE_BITRATE_ESTIMATOR_INCLUDE_BWE_DEFINES_H_
diff --git a/webrtc/modules/remote_bitrate_estimator/include/mock/mock_remote_bitrate_observer.h b/webrtc/modules/remote_bitrate_estimator/include/mock/mock_remote_bitrate_observer.h
index edfac977a2..ae05912b5f 100644
--- a/webrtc/modules/remote_bitrate_estimator/include/mock/mock_remote_bitrate_observer.h
+++ b/webrtc/modules/remote_bitrate_estimator/include/mock/mock_remote_bitrate_observer.h
@@ -8,8 +8,8 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-#ifndef WEBRTC_MODULES_REMOTE_BITRATE_ESTIMATOR_INCLUDE_MOCK_MOCK_REMOTE_BITRATE_ESTIMATOR_H_
-#define WEBRTC_MODULES_REMOTE_BITRATE_ESTIMATOR_INCLUDE_MOCK_MOCK_REMOTE_BITRATE_ESTIMATOR_H_
+#ifndef WEBRTC_MODULES_REMOTE_BITRATE_ESTIMATOR_INCLUDE_MOCK_MOCK_REMOTE_BITRATE_OBSERVER_H_
+#define WEBRTC_MODULES_REMOTE_BITRATE_ESTIMATOR_INCLUDE_MOCK_MOCK_REMOTE_BITRATE_OBSERVER_H_
#include <vector>
@@ -26,4 +26,4 @@ class MockRemoteBitrateObserver : public RemoteBitrateObserver {
} // namespace webrtc
-#endif // WEBRTC_MODULES_REMOTE_BITRATE_ESTIMATOR_INCLUDE_MOCK_MOCK_REMOTE_BITRATE_ESTIMATOR_H_
+#endif // WEBRTC_MODULES_REMOTE_BITRATE_ESTIMATOR_INCLUDE_MOCK_MOCK_REMOTE_BITRATE_OBSERVER_H_
diff --git a/webrtc/modules/remote_bitrate_estimator/include/remote_bitrate_estimator.h b/webrtc/modules/remote_bitrate_estimator/include/remote_bitrate_estimator.h
index 4bd9d8c7bc..0734cbf255 100644
--- a/webrtc/modules/remote_bitrate_estimator/include/remote_bitrate_estimator.h
+++ b/webrtc/modules/remote_bitrate_estimator/include/remote_bitrate_estimator.h
@@ -17,9 +17,9 @@
#include <vector>
#include "webrtc/common_types.h"
-#include "webrtc/modules/interface/module.h"
-#include "webrtc/modules/interface/module_common_types.h"
-#include "webrtc/modules/rtp_rtcp/interface/rtp_rtcp_defines.h"
+#include "webrtc/modules/include/module.h"
+#include "webrtc/modules/include/module_common_types.h"
+#include "webrtc/modules/rtp_rtcp/include/rtp_rtcp_defines.h"
#include "webrtc/typedefs.h"
namespace webrtc {
diff --git a/webrtc/modules/remote_bitrate_estimator/include/send_time_history.h b/webrtc/modules/remote_bitrate_estimator/include/send_time_history.h
index 92d2e28132..a643c1f103 100644
--- a/webrtc/modules/remote_bitrate_estimator/include/send_time_history.h
+++ b/webrtc/modules/remote_bitrate_estimator/include/send_time_history.h
@@ -8,8 +8,8 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-#ifndef WEBRTC_MODULES_BITRATE_CONTROLLER_SEND_TIME_HISTORY_H_
-#define WEBRTC_MODULES_BITRATE_CONTROLLER_SEND_TIME_HISTORY_H_
+#ifndef WEBRTC_MODULES_REMOTE_BITRATE_ESTIMATOR_INCLUDE_SEND_TIME_HISTORY_H_
+#define WEBRTC_MODULES_REMOTE_BITRATE_ESTIMATOR_INCLUDE_SEND_TIME_HISTORY_H_
#include <map>
@@ -45,4 +45,4 @@ class SendTimeHistory {
};
} // namespace webrtc
-#endif // WEBRTC_MODULES_BITRATE_CONTROLLER_SEND_TIME_HISTORY_H_
+#endif // WEBRTC_MODULES_REMOTE_BITRATE_ESTIMATOR_INCLUDE_SEND_TIME_HISTORY_H_
diff --git a/webrtc/modules/remote_bitrate_estimator/inter_arrival.cc b/webrtc/modules/remote_bitrate_estimator/inter_arrival.cc
index 3dee305bad..f75bc2b03e 100644
--- a/webrtc/modules/remote_bitrate_estimator/inter_arrival.cc
+++ b/webrtc/modules/remote_bitrate_estimator/inter_arrival.cc
@@ -14,7 +14,7 @@
#include <cassert>
#include "webrtc/base/logging.h"
-#include "webrtc/modules/interface/module_common_types.h"
+#include "webrtc/modules/include/module_common_types.h"
namespace webrtc {
@@ -71,8 +71,7 @@ bool InterArrival::ComputeDeltas(uint32_t timestamp,
current_timestamp_group_.first_timestamp = timestamp;
current_timestamp_group_.timestamp = timestamp;
current_timestamp_group_.size = 0;
- }
- else {
+ } else {
current_timestamp_group_.timestamp = LatestTimestamp(
current_timestamp_group_.timestamp, timestamp);
}
diff --git a/webrtc/modules/remote_bitrate_estimator/overuse_detector.cc b/webrtc/modules/remote_bitrate_estimator/overuse_detector.cc
index c9340892f2..0acd7c29c5 100644
--- a/webrtc/modules/remote_bitrate_estimator/overuse_detector.cc
+++ b/webrtc/modules/remote_bitrate_estimator/overuse_detector.cc
@@ -10,11 +10,13 @@
#include "webrtc/modules/remote_bitrate_estimator/overuse_detector.h"
-#include <algorithm>
-#include <sstream>
#include <math.h>
#include <stdlib.h>
+#include <algorithm>
+#include <sstream>
+#include <string>
+
#include "webrtc/base/checks.h"
#include "webrtc/base/common.h"
#include "webrtc/modules/remote_bitrate_estimator/include/bwe_defines.h"
diff --git a/webrtc/modules/remote_bitrate_estimator/overuse_detector.h b/webrtc/modules/remote_bitrate_estimator/overuse_detector.h
index bb69a8a0a1..56e9c14206 100644
--- a/webrtc/modules/remote_bitrate_estimator/overuse_detector.h
+++ b/webrtc/modules/remote_bitrate_estimator/overuse_detector.h
@@ -13,7 +13,7 @@
#include <list>
#include "webrtc/base/constructormagic.h"
-#include "webrtc/modules/interface/module_common_types.h"
+#include "webrtc/modules/include/module_common_types.h"
#include "webrtc/modules/remote_bitrate_estimator/include/bwe_defines.h"
#include "webrtc/typedefs.h"
diff --git a/webrtc/modules/remote_bitrate_estimator/overuse_detector_unittest.cc b/webrtc/modules/remote_bitrate_estimator/overuse_detector_unittest.cc
index dcad04b5f6..50909ebd01 100644
--- a/webrtc/modules/remote_bitrate_estimator/overuse_detector_unittest.cc
+++ b/webrtc/modules/remote_bitrate_estimator/overuse_detector_unittest.cc
@@ -9,11 +9,14 @@
*/
#include <math.h>
+
+#include <algorithm>
#include <cmath>
#include <cstdlib>
#include "testing/gtest/include/gtest/gtest.h"
+#include "webrtc/base/random.h"
#include "webrtc/base/scoped_ptr.h"
#include "webrtc/common_types.h"
#include "webrtc/modules/remote_bitrate_estimator/inter_arrival.h"
@@ -21,8 +24,6 @@
#include "webrtc/modules/remote_bitrate_estimator/overuse_estimator.h"
#include "webrtc/modules/remote_bitrate_estimator/rate_statistics.h"
#include "webrtc/test/field_trial.h"
-#include "webrtc/test/random.h"
-#include "webrtc/test/testsupport/gtest_disable.h"
namespace webrtc {
namespace testing {
@@ -38,7 +39,7 @@ class OveruseDetectorTest : public ::testing::Test {
overuse_detector_(),
overuse_estimator_(new OveruseEstimator(options_)),
inter_arrival_(new InterArrival(5 * 90, kRtpTimestampToMs, true)),
- random_(1234) {}
+ random_(123456789) {}
protected:
void SetUp() override {
@@ -55,9 +56,10 @@ class OveruseDetectorTest : public ::testing::Test {
}
rtp_timestamp_ += mean_ms * 90;
now_ms_ += mean_ms;
- receive_time_ms_ =
- std::max(receive_time_ms_,
- now_ms_ + random_.Gaussian(0, standard_deviation_ms));
+ receive_time_ms_ = std::max<int64_t>(
+ receive_time_ms_,
+ now_ms_ + static_cast<int64_t>(
+ random_.Gaussian(0, standard_deviation_ms) + 0.5));
if (kBwOverusing == overuse_detector_->State()) {
if (last_overuse + 1 != i) {
unique_overuse++;
@@ -77,9 +79,10 @@ class OveruseDetectorTest : public ::testing::Test {
}
rtp_timestamp_ += mean_ms * 90;
now_ms_ += mean_ms + drift_per_frame_ms;
- receive_time_ms_ =
- std::max(receive_time_ms_,
- now_ms_ + random_.Gaussian(0, standard_deviation_ms));
+ receive_time_ms_ = std::max<int64_t>(
+ receive_time_ms_,
+ now_ms_ + static_cast<int64_t>(
+ random_.Gaussian(0, standard_deviation_ms) + 0.5));
if (kBwOverusing == overuse_detector_->State()) {
return i + 1;
}
@@ -114,7 +117,7 @@ class OveruseDetectorTest : public ::testing::Test {
rtc::scoped_ptr<OveruseDetector> overuse_detector_;
rtc::scoped_ptr<OveruseEstimator> overuse_estimator_;
rtc::scoped_ptr<InterArrival> inter_arrival_;
- test::Random random_;
+ Random random_;
};
TEST_F(OveruseDetectorTest, GaussianRandom) {
@@ -222,7 +225,7 @@ TEST_F(OveruseDetectorTest, DISABLED_OveruseWithHighVariance100Kbit10fps) {
UpdateDetector(rtp_timestamp, now_ms_, packet_size);
rtp_timestamp += frame_duration_ms * 90;
if (i % 2) {
- offset = rand() % 50;
+ offset = random_.Rand(0, 49);
now_ms_ += frame_duration_ms - offset;
} else {
now_ms_ += frame_duration_ms + offset;
@@ -254,7 +257,7 @@ TEST_F(OveruseDetectorTest, DISABLED_OveruseWithLowVariance100Kbit10fps) {
UpdateDetector(rtp_timestamp, now_ms_, packet_size);
rtp_timestamp += frame_duration_ms * 90;
if (i % 2) {
- offset = rand() % 2;
+ offset = random_.Rand(0, 1);
now_ms_ += frame_duration_ms - offset;
} else {
now_ms_ += frame_duration_ms + offset;
@@ -290,7 +293,7 @@ TEST_F(OveruseDetectorTest, OveruseWithLowVariance2000Kbit30fps) {
UpdateDetector(rtp_timestamp, now_ms_, packet_size);
rtp_timestamp += frame_duration_ms * 90;
if (i % 2) {
- offset = rand() % 2;
+ offset = random_.Rand(0, 1);
now_ms_ += frame_duration_ms - offset;
} else {
now_ms_ += frame_duration_ms + offset;
@@ -314,8 +317,13 @@ TEST_F(OveruseDetectorTest, OveruseWithLowVariance2000Kbit30fps) {
EXPECT_EQ(kBwOverusing, overuse_detector_->State());
}
-TEST_F(OveruseDetectorTest,
- DISABLED_ON_ANDROID(LowGaussianVariance30Kbit3fps)) {
+#if defined(WEBRTC_ANDROID)
+#define MAYBE_LowGaussianVariance30Kbit3fps \
+ DISABLED_LowGaussianVariance30Kbit3fps
+#else
+#define MAYBE_LowGaussianVariance30Kbit3fps LowGaussianVariance30Kbit3fps
+#endif
+TEST_F(OveruseDetectorTest, MAYBE_LowGaussianVariance30Kbit3fps) {
size_t packet_size = 1200;
int packets_per_frame = 1;
int frame_duration_ms = 333;
@@ -323,10 +331,10 @@ TEST_F(OveruseDetectorTest,
int sigma_ms = 3;
int unique_overuse = Run100000Samples(packets_per_frame, packet_size,
frame_duration_ms, sigma_ms);
- EXPECT_EQ(13, unique_overuse);
+ EXPECT_EQ(1, unique_overuse);
int frames_until_overuse = RunUntilOveruse(packets_per_frame, packet_size,
frame_duration_ms, sigma_ms, drift_per_frame_ms);
- EXPECT_EQ(14, frames_until_overuse);
+ EXPECT_EQ(13, frames_until_overuse);
}
TEST_F(OveruseDetectorTest, LowGaussianVarianceFastDrift30Kbit3fps) {
@@ -337,7 +345,7 @@ TEST_F(OveruseDetectorTest, LowGaussianVarianceFastDrift30Kbit3fps) {
int sigma_ms = 3;
int unique_overuse = Run100000Samples(packets_per_frame, packet_size,
frame_duration_ms, sigma_ms);
- EXPECT_EQ(13, unique_overuse);
+ EXPECT_EQ(1, unique_overuse);
int frames_until_overuse = RunUntilOveruse(packets_per_frame, packet_size,
frame_duration_ms, sigma_ms, drift_per_frame_ms);
EXPECT_EQ(4, frames_until_overuse);
@@ -351,10 +359,10 @@ TEST_F(OveruseDetectorTest, HighGaussianVariance30Kbit3fps) {
int sigma_ms = 10;
int unique_overuse = Run100000Samples(packets_per_frame, packet_size,
frame_duration_ms, sigma_ms);
- EXPECT_EQ(46, unique_overuse);
+ EXPECT_EQ(1, unique_overuse);
int frames_until_overuse = RunUntilOveruse(packets_per_frame, packet_size,
frame_duration_ms, sigma_ms, drift_per_frame_ms);
- EXPECT_EQ(42, frames_until_overuse);
+ EXPECT_EQ(32, frames_until_overuse);
}
TEST_F(OveruseDetectorTest, HighGaussianVarianceFastDrift30Kbit3fps) {
@@ -365,14 +373,19 @@ TEST_F(OveruseDetectorTest, HighGaussianVarianceFastDrift30Kbit3fps) {
int sigma_ms = 10;
int unique_overuse = Run100000Samples(packets_per_frame, packet_size,
frame_duration_ms, sigma_ms);
- EXPECT_EQ(46, unique_overuse);
+ EXPECT_EQ(1, unique_overuse);
int frames_until_overuse = RunUntilOveruse(packets_per_frame, packet_size,
frame_duration_ms, sigma_ms, drift_per_frame_ms);
EXPECT_EQ(4, frames_until_overuse);
}
-TEST_F(OveruseDetectorTest,
- DISABLED_ON_ANDROID(LowGaussianVariance100Kbit5fps)) {
+#if defined(WEBRTC_ANDROID)
+#define MAYBE_LowGaussianVariance100Kbit5fps \
+ DISABLED_LowGaussianVariance100Kbit5fps
+#else
+#define MAYBE_LowGaussianVariance100Kbit5fps LowGaussianVariance100Kbit5fps
+#endif
+TEST_F(OveruseDetectorTest, MAYBE_LowGaussianVariance100Kbit5fps) {
size_t packet_size = 1200;
int packets_per_frame = 2;
int frame_duration_ms = 200;
@@ -380,14 +393,19 @@ TEST_F(OveruseDetectorTest,
int sigma_ms = 3;
int unique_overuse = Run100000Samples(packets_per_frame, packet_size,
frame_duration_ms, sigma_ms);
- EXPECT_EQ(12, unique_overuse);
+ EXPECT_EQ(0, unique_overuse);
int frames_until_overuse = RunUntilOveruse(packets_per_frame, packet_size,
frame_duration_ms, sigma_ms, drift_per_frame_ms);
- EXPECT_EQ(12, frames_until_overuse);
+ EXPECT_EQ(13, frames_until_overuse);
}
-TEST_F(OveruseDetectorTest,
- DISABLED_ON_ANDROID(HighGaussianVariance100Kbit5fps)) {
+#if defined(WEBRTC_ANDROID)
+#define MAYBE_HighGaussianVariance100Kbit5fps \
+ DISABLED_HighGaussianVariance100Kbit5fps
+#else
+#define MAYBE_HighGaussianVariance100Kbit5fps HighGaussianVariance100Kbit5fps
+#endif
+TEST_F(OveruseDetectorTest, MAYBE_HighGaussianVariance100Kbit5fps) {
size_t packet_size = 1200;
int packets_per_frame = 2;
int frame_duration_ms = 200;
@@ -395,14 +413,19 @@ TEST_F(OveruseDetectorTest,
int sigma_ms = 10;
int unique_overuse = Run100000Samples(packets_per_frame, packet_size,
frame_duration_ms, sigma_ms);
- EXPECT_EQ(16, unique_overuse);
+ EXPECT_EQ(1, unique_overuse);
int frames_until_overuse = RunUntilOveruse(packets_per_frame, packet_size,
frame_duration_ms, sigma_ms, drift_per_frame_ms);
- EXPECT_EQ(37, frames_until_overuse);
+ EXPECT_EQ(32, frames_until_overuse);
}
-TEST_F(OveruseDetectorTest,
- DISABLED_ON_ANDROID(LowGaussianVariance100Kbit10fps)) {
+#if defined(WEBRTC_ANDROID)
+#define MAYBE_LowGaussianVariance100Kbit10fps \
+ DISABLED_LowGaussianVariance100Kbit10fps
+#else
+#define MAYBE_LowGaussianVariance100Kbit10fps LowGaussianVariance100Kbit10fps
+#endif
+TEST_F(OveruseDetectorTest, MAYBE_LowGaussianVariance100Kbit10fps) {
size_t packet_size = 1200;
int packets_per_frame = 1;
int frame_duration_ms = 100;
@@ -410,14 +433,19 @@ TEST_F(OveruseDetectorTest,
int sigma_ms = 3;
int unique_overuse = Run100000Samples(packets_per_frame, packet_size,
frame_duration_ms, sigma_ms);
- EXPECT_EQ(12, unique_overuse);
+ EXPECT_EQ(1, unique_overuse);
int frames_until_overuse = RunUntilOveruse(packets_per_frame, packet_size,
frame_duration_ms, sigma_ms, drift_per_frame_ms);
- EXPECT_EQ(12, frames_until_overuse);
+ EXPECT_EQ(13, frames_until_overuse);
}
-TEST_F(OveruseDetectorTest,
- DISABLED_ON_ANDROID(HighGaussianVariance100Kbit10fps)) {
+#if defined(WEBRTC_ANDROID)
+#define MAYBE_HighGaussianVariance100Kbit10fps \
+ DISABLED_HighGaussianVariance100Kbit10fps
+#else
+#define MAYBE_HighGaussianVariance100Kbit10fps HighGaussianVariance100Kbit10fps
+#endif
+TEST_F(OveruseDetectorTest, MAYBE_HighGaussianVariance100Kbit10fps) {
size_t packet_size = 1200;
int packets_per_frame = 1;
int frame_duration_ms = 100;
@@ -425,14 +453,19 @@ TEST_F(OveruseDetectorTest,
int sigma_ms = 10;
int unique_overuse = Run100000Samples(packets_per_frame, packet_size,
frame_duration_ms, sigma_ms);
- EXPECT_EQ(12, unique_overuse);
+ EXPECT_EQ(0, unique_overuse);
int frames_until_overuse = RunUntilOveruse(packets_per_frame, packet_size,
frame_duration_ms, sigma_ms, drift_per_frame_ms);
- EXPECT_EQ(37, frames_until_overuse);
+ EXPECT_EQ(32, frames_until_overuse);
}
-TEST_F(OveruseDetectorTest,
- DISABLED_ON_ANDROID(LowGaussianVariance300Kbit30fps)) {
+#if defined(WEBRTC_ANDROID)
+#define MAYBE_LowGaussianVariance300Kbit30fps \
+ DISABLED_LowGaussianVariance300Kbit30fps
+#else
+#define MAYBE_LowGaussianVariance300Kbit30fps LowGaussianVariance300Kbit30fps
+#endif
+TEST_F(OveruseDetectorTest, MAYBE_LowGaussianVariance300Kbit30fps) {
size_t packet_size = 1200;
int packets_per_frame = 1;
int frame_duration_ms = 33;
@@ -443,7 +476,7 @@ TEST_F(OveruseDetectorTest,
EXPECT_EQ(0, unique_overuse);
int frames_until_overuse = RunUntilOveruse(packets_per_frame, packet_size,
frame_duration_ms, sigma_ms, drift_per_frame_ms);
- EXPECT_EQ(14, frames_until_overuse);
+ EXPECT_EQ(15, frames_until_overuse);
}
TEST_F(OveruseDetectorTest, LowGaussianVarianceFastDrift300Kbit30fps) {
@@ -471,7 +504,7 @@ TEST_F(OveruseDetectorTest, HighGaussianVariance300Kbit30fps) {
EXPECT_EQ(0, unique_overuse);
int frames_until_overuse = RunUntilOveruse(packets_per_frame, packet_size,
frame_duration_ms, sigma_ms, drift_per_frame_ms);
- EXPECT_EQ(49, frames_until_overuse);
+ EXPECT_EQ(41, frames_until_overuse);
}
TEST_F(OveruseDetectorTest, HighGaussianVarianceFastDrift300Kbit30fps) {
@@ -485,11 +518,16 @@ TEST_F(OveruseDetectorTest, HighGaussianVarianceFastDrift300Kbit30fps) {
EXPECT_EQ(0, unique_overuse);
int frames_until_overuse = RunUntilOveruse(packets_per_frame, packet_size,
frame_duration_ms, sigma_ms, drift_per_frame_ms);
- EXPECT_EQ(8, frames_until_overuse);
+ EXPECT_EQ(10, frames_until_overuse);
}
-TEST_F(OveruseDetectorTest,
- DISABLED_ON_ANDROID(LowGaussianVariance1000Kbit30fps)) {
+#if defined(WEBRTC_ANDROID)
+#define MAYBE_LowGaussianVariance1000Kbit30fps \
+ DISABLED_LowGaussianVariance1000Kbit30fps
+#else
+#define MAYBE_LowGaussianVariance1000Kbit30fps LowGaussianVariance1000Kbit30fps
+#endif
+TEST_F(OveruseDetectorTest, MAYBE_LowGaussianVariance1000Kbit30fps) {
size_t packet_size = 1200;
int packets_per_frame = 3;
int frame_duration_ms = 33;
@@ -500,7 +538,7 @@ TEST_F(OveruseDetectorTest,
EXPECT_EQ(0, unique_overuse);
int frames_until_overuse = RunUntilOveruse(packets_per_frame, packet_size,
frame_duration_ms, sigma_ms, drift_per_frame_ms);
- EXPECT_EQ(14, frames_until_overuse);
+ EXPECT_EQ(15, frames_until_overuse);
}
TEST_F(OveruseDetectorTest, LowGaussianVarianceFastDrift1000Kbit30fps) {
@@ -528,7 +566,7 @@ TEST_F(OveruseDetectorTest, HighGaussianVariance1000Kbit30fps) {
EXPECT_EQ(0, unique_overuse);
int frames_until_overuse = RunUntilOveruse(packets_per_frame, packet_size,
frame_duration_ms, sigma_ms, drift_per_frame_ms);
- EXPECT_EQ(49, frames_until_overuse);
+ EXPECT_EQ(41, frames_until_overuse);
}
TEST_F(OveruseDetectorTest, HighGaussianVarianceFastDrift1000Kbit30fps) {
@@ -542,11 +580,16 @@ TEST_F(OveruseDetectorTest, HighGaussianVarianceFastDrift1000Kbit30fps) {
EXPECT_EQ(0, unique_overuse);
int frames_until_overuse = RunUntilOveruse(packets_per_frame, packet_size,
frame_duration_ms, sigma_ms, drift_per_frame_ms);
- EXPECT_EQ(8, frames_until_overuse);
+ EXPECT_EQ(10, frames_until_overuse);
}
-TEST_F(OveruseDetectorTest,
- DISABLED_ON_ANDROID(LowGaussianVariance2000Kbit30fps)) {
+#if defined(WEBRTC_ANDROID)
+#define MAYBE_LowGaussianVariance2000Kbit30fps \
+ DISABLED_LowGaussianVariance2000Kbit30fps
+#else
+#define MAYBE_LowGaussianVariance2000Kbit30fps LowGaussianVariance2000Kbit30fps
+#endif
+TEST_F(OveruseDetectorTest, MAYBE_LowGaussianVariance2000Kbit30fps) {
size_t packet_size = 1200;
int packets_per_frame = 6;
int frame_duration_ms = 33;
@@ -557,7 +600,7 @@ TEST_F(OveruseDetectorTest,
EXPECT_EQ(0, unique_overuse);
int frames_until_overuse = RunUntilOveruse(packets_per_frame, packet_size,
frame_duration_ms, sigma_ms, drift_per_frame_ms);
- EXPECT_EQ(14, frames_until_overuse);
+ EXPECT_EQ(15, frames_until_overuse);
}
TEST_F(OveruseDetectorTest, LowGaussianVarianceFastDrift2000Kbit30fps) {
@@ -585,7 +628,7 @@ TEST_F(OveruseDetectorTest, HighGaussianVariance2000Kbit30fps) {
EXPECT_EQ(0, unique_overuse);
int frames_until_overuse = RunUntilOveruse(packets_per_frame, packet_size,
frame_duration_ms, sigma_ms, drift_per_frame_ms);
- EXPECT_EQ(49, frames_until_overuse);
+ EXPECT_EQ(41, frames_until_overuse);
}
TEST_F(OveruseDetectorTest, HighGaussianVarianceFastDrift2000Kbit30fps) {
@@ -599,7 +642,7 @@ TEST_F(OveruseDetectorTest, HighGaussianVarianceFastDrift2000Kbit30fps) {
EXPECT_EQ(0, unique_overuse);
int frames_until_overuse = RunUntilOveruse(packets_per_frame, packet_size,
frame_duration_ms, sigma_ms, drift_per_frame_ms);
- EXPECT_EQ(8, frames_until_overuse);
+ EXPECT_EQ(10, frames_until_overuse);
}
class OveruseDetectorExperimentTest : public OveruseDetectorTest {
diff --git a/webrtc/modules/remote_bitrate_estimator/overuse_estimator.cc b/webrtc/modules/remote_bitrate_estimator/overuse_estimator.cc
index 4be7b7493b..83917912e8 100644
--- a/webrtc/modules/remote_bitrate_estimator/overuse_estimator.cc
+++ b/webrtc/modules/remote_bitrate_estimator/overuse_estimator.cc
@@ -10,14 +10,15 @@
#include "webrtc/modules/remote_bitrate_estimator/overuse_estimator.h"
-#include <algorithm>
#include <assert.h>
#include <math.h>
#include <stdlib.h>
#include <string.h>
+#include <algorithm>
+
+#include "webrtc/base/logging.h"
#include "webrtc/modules/remote_bitrate_estimator/include/bwe_defines.h"
-#include "webrtc/system_wrappers/include/logging.h"
namespace webrtc {
diff --git a/webrtc/modules/remote_bitrate_estimator/remote_bitrate_estimator_abs_send_time.cc b/webrtc/modules/remote_bitrate_estimator/remote_bitrate_estimator_abs_send_time.cc
index 2dc32a7ee7..97e5cd32e5 100644
--- a/webrtc/modules/remote_bitrate_estimator/remote_bitrate_estimator_abs_send_time.cc
+++ b/webrtc/modules/remote_bitrate_estimator/remote_bitrate_estimator_abs_send_time.cc
@@ -12,14 +12,16 @@
#include <math.h>
+#include <algorithm>
+
#include "webrtc/base/constructormagic.h"
+#include "webrtc/base/logging.h"
#include "webrtc/base/scoped_ptr.h"
#include "webrtc/base/thread_annotations.h"
+#include "webrtc/modules/pacing/paced_sender.h"
#include "webrtc/modules/remote_bitrate_estimator/include/remote_bitrate_estimator.h"
-#include "webrtc/modules/pacing/include/paced_sender.h"
#include "webrtc/system_wrappers/include/clock.h"
#include "webrtc/system_wrappers/include/critical_section_wrapper.h"
-#include "webrtc/system_wrappers/include/logging.h"
#include "webrtc/typedefs.h"
namespace webrtc {
@@ -241,6 +243,7 @@ void RemoteBitrateEstimatorAbsSendTime::IncomingPacket(int64_t arrival_time_ms,
if (!header.extension.hasAbsoluteSendTime) {
LOG(LS_WARNING) << "RemoteBitrateEstimatorAbsSendTimeImpl: Incoming packet "
"is missing absolute send time extension!";
+ return;
}
IncomingPacketInfo(arrival_time_ms, header.extension.absoluteSendTime,
payload_size, header.ssrc, was_paced);
diff --git a/webrtc/modules/remote_bitrate_estimator/remote_bitrate_estimator_abs_send_time_unittest.cc b/webrtc/modules/remote_bitrate_estimator/remote_bitrate_estimator_abs_send_time_unittest.cc
index 195c95aacb..908daf6c31 100644
--- a/webrtc/modules/remote_bitrate_estimator/remote_bitrate_estimator_abs_send_time_unittest.cc
+++ b/webrtc/modules/remote_bitrate_estimator/remote_bitrate_estimator_abs_send_time_unittest.cc
@@ -17,7 +17,6 @@ namespace webrtc {
class RemoteBitrateEstimatorAbsSendTimeTest :
public RemoteBitrateEstimatorTest {
public:
-
RemoteBitrateEstimatorAbsSendTimeTest() {}
virtual void SetUp() {
bitrate_estimator_.reset(new RemoteBitrateEstimatorAbsSendTime(
diff --git a/webrtc/modules/remote_bitrate_estimator/remote_bitrate_estimator_single_stream.cc b/webrtc/modules/remote_bitrate_estimator/remote_bitrate_estimator_single_stream.cc
index f1a1cb6602..4b7732c80f 100644
--- a/webrtc/modules/remote_bitrate_estimator/remote_bitrate_estimator_single_stream.cc
+++ b/webrtc/modules/remote_bitrate_estimator/remote_bitrate_estimator_single_stream.cc
@@ -10,16 +10,18 @@
#include "webrtc/modules/remote_bitrate_estimator/remote_bitrate_estimator_single_stream.h"
+#include <utility>
+
#include "webrtc/base/constructormagic.h"
+#include "webrtc/base/logging.h"
#include "webrtc/base/scoped_ptr.h"
#include "webrtc/base/thread_annotations.h"
+#include "webrtc/modules/remote_bitrate_estimator/aimd_rate_control.h"
#include "webrtc/modules/remote_bitrate_estimator/inter_arrival.h"
#include "webrtc/modules/remote_bitrate_estimator/overuse_detector.h"
#include "webrtc/modules/remote_bitrate_estimator/overuse_estimator.h"
-#include "webrtc/modules/remote_bitrate_estimator/aimd_rate_control.h"
#include "webrtc/system_wrappers/include/clock.h"
#include "webrtc/system_wrappers/include/critical_section_wrapper.h"
-#include "webrtc/system_wrappers/include/logging.h"
#include "webrtc/typedefs.h"
namespace webrtc {
@@ -28,19 +30,20 @@ enum { kTimestampGroupLengthMs = 5 };
static const double kTimestampToMs = 1.0 / 90.0;
struct RemoteBitrateEstimatorSingleStream::Detector {
- explicit Detector(int64_t last_packet_time_ms,
- const OverUseDetectorOptions& options,
- bool enable_burst_grouping)
- : last_packet_time_ms(last_packet_time_ms),
- inter_arrival(90 * kTimestampGroupLengthMs, kTimestampToMs,
- enable_burst_grouping),
- estimator(options),
- detector(options) {}
- int64_t last_packet_time_ms;
- InterArrival inter_arrival;
- OveruseEstimator estimator;
- OveruseDetector detector;
- };
+ explicit Detector(int64_t last_packet_time_ms,
+ const OverUseDetectorOptions& options,
+ bool enable_burst_grouping)
+ : last_packet_time_ms(last_packet_time_ms),
+ inter_arrival(90 * kTimestampGroupLengthMs,
+ kTimestampToMs,
+ enable_burst_grouping),
+ estimator(options),
+ detector(options) {}
+ int64_t last_packet_time_ms;
+ InterArrival inter_arrival;
+ OveruseEstimator estimator;
+ OveruseDetector detector;
+};
RemoteBitrateEstimatorSingleStream::RemoteBitrateEstimatorSingleStream(
RemoteBitrateObserver* observer,
diff --git a/webrtc/modules/remote_bitrate_estimator/remote_bitrate_estimator_single_stream_unittest.cc b/webrtc/modules/remote_bitrate_estimator/remote_bitrate_estimator_single_stream_unittest.cc
index a6c182a7bc..7a26a7e63b 100644
--- a/webrtc/modules/remote_bitrate_estimator/remote_bitrate_estimator_single_stream_unittest.cc
+++ b/webrtc/modules/remote_bitrate_estimator/remote_bitrate_estimator_single_stream_unittest.cc
@@ -17,7 +17,6 @@ namespace webrtc {
class RemoteBitrateEstimatorSingleTest :
public RemoteBitrateEstimatorTest {
public:
-
RemoteBitrateEstimatorSingleTest() {}
virtual void SetUp() {
bitrate_estimator_.reset(new RemoteBitrateEstimatorSingleStream(
diff --git a/webrtc/modules/remote_bitrate_estimator/remote_bitrate_estimator_unittest_helper.cc b/webrtc/modules/remote_bitrate_estimator/remote_bitrate_estimator_unittest_helper.cc
index 315f5422d9..8b9c0b9a1d 100644
--- a/webrtc/modules/remote_bitrate_estimator/remote_bitrate_estimator_unittest_helper.cc
+++ b/webrtc/modules/remote_bitrate_estimator/remote_bitrate_estimator_unittest_helper.cc
@@ -10,6 +10,7 @@
#include "webrtc/modules/remote_bitrate_estimator/remote_bitrate_estimator_unittest_helper.h"
#include <algorithm>
+#include <limits>
#include <utility>
namespace webrtc {
@@ -383,11 +384,11 @@ void RemoteBitrateEstimatorTest::RateIncreaseReorderingTestHelper(
2 * kFrameIntervalAbsSendTime);
IncomingPacket(kDefaultSsrc, 1000, clock_.TimeInMilliseconds(), timestamp,
absolute_send_time, true);
- IncomingPacket(
- kDefaultSsrc, 1000, clock_.TimeInMilliseconds(),
- timestamp - 90 * kFrameIntervalMs,
- AddAbsSendTime(absolute_send_time, -int(kFrameIntervalAbsSendTime)),
- true);
+ IncomingPacket(kDefaultSsrc, 1000, clock_.TimeInMilliseconds(),
+ timestamp - 90 * kFrameIntervalMs,
+ AddAbsSendTime(absolute_send_time,
+ -static_cast<int>(kFrameIntervalAbsSendTime)),
+ true);
}
bitrate_estimator_->Process();
EXPECT_TRUE(bitrate_observer_->updated());
@@ -520,8 +521,8 @@ void RemoteBitrateEstimatorTest::TestTimestampGroupingTestHelper() {
uint32_t timestamp = 0;
// Initialize absolute_send_time (24 bits) so that it will definitely wrap
// during the test.
- uint32_t absolute_send_time =
- AddAbsSendTime((1 << 24), -int(50 * kFrameIntervalAbsSendTime));
+ uint32_t absolute_send_time = AddAbsSendTime(
+ (1 << 24), -static_cast<int>(50 * kFrameIntervalAbsSendTime));
// Initial set of frames to increase the bitrate. 6 seconds to have enough
// time for the first estimate to be generated and for Process() to be called.
for (int i = 0; i <= 6 * kFramerate; ++i) {
@@ -556,8 +557,10 @@ void RemoteBitrateEstimatorTest::TestTimestampGroupingTestHelper() {
// Increase time until next batch to simulate over-use.
clock_.AdvanceTimeMilliseconds(10);
timestamp += 90 * kFrameIntervalMs - kTimestampGroupLength;
- absolute_send_time = AddAbsSendTime(absolute_send_time, AddAbsSendTime(
- kFrameIntervalAbsSendTime, -int(kTimestampGroupLengthAbsSendTime)));
+ absolute_send_time = AddAbsSendTime(
+ absolute_send_time,
+ AddAbsSendTime(kFrameIntervalAbsSendTime,
+ -static_cast<int>(kTimestampGroupLengthAbsSendTime)));
bitrate_estimator_->Process();
}
EXPECT_TRUE(bitrate_observer_->updated());
diff --git a/webrtc/modules/remote_bitrate_estimator/remote_bitrate_estimator_unittest_helper.h b/webrtc/modules/remote_bitrate_estimator/remote_bitrate_estimator_unittest_helper.h
index 606bb6c4e6..8343d7d57b 100644
--- a/webrtc/modules/remote_bitrate_estimator/remote_bitrate_estimator_unittest_helper.h
+++ b/webrtc/modules/remote_bitrate_estimator/remote_bitrate_estimator_unittest_helper.h
@@ -14,6 +14,7 @@
#include <list>
#include <map>
#include <utility>
+#include <vector>
#include "testing/gtest/include/gtest/gtest.h"
#include "webrtc/base/constructormagic.h"
diff --git a/webrtc/modules/remote_bitrate_estimator/remote_bitrate_estimators_test.cc b/webrtc/modules/remote_bitrate_estimator/remote_bitrate_estimators_test.cc
index d6f049f6ac..2ce144129b 100644
--- a/webrtc/modules/remote_bitrate_estimator/remote_bitrate_estimators_test.cc
+++ b/webrtc/modules/remote_bitrate_estimator/remote_bitrate_estimators_test.cc
@@ -13,8 +13,10 @@
#include <unistd.h>
#endif
+#include <algorithm>
#include <sstream>
+#include "webrtc/base/random.h"
#include "webrtc/modules/remote_bitrate_estimator/include/remote_bitrate_estimator.h"
#include "webrtc/modules/remote_bitrate_estimator/test/bwe_test.h"
#include "webrtc/modules/remote_bitrate_estimator/test/packet_receiver.h"
@@ -242,18 +244,20 @@ class BweFeedbackTest
: public BweTest,
public ::testing::TestWithParam<BandwidthEstimatorType> {
public:
- BweFeedbackTest() : BweTest() {}
+#ifdef WEBRTC_WIN
+ BweFeedbackTest()
+ : BweTest(), random_(Clock::GetRealTimeClock()->TimeInMicroseconds()) {}
+#else
+ BweFeedbackTest()
+ : BweTest(),
+ // Multiply the time by a random-ish odd number derived from the PID.
+ random_((getpid() | 1) *
+ Clock::GetRealTimeClock()->TimeInMicroseconds()) {}
+#endif
virtual ~BweFeedbackTest() {}
protected:
- void SetUp() override {
- unsigned int seed = Clock::GetRealTimeClock()->TimeInMicroseconds();
-#ifndef WEBRTC_WIN
- seed *= getpid();
-#endif
- srand(seed);
- BweTest::SetUp();
- }
+ Random random_;
private:
RTC_DISALLOW_COPY_AND_ASSIGN(BweFeedbackTest);
@@ -356,7 +360,7 @@ TEST_P(BweFeedbackTest, PacedSelfFairness50msTest) {
const int kNumRmcatFlows = 4;
int64_t offset_ms[kNumRmcatFlows];
for (int i = 0; i < kNumRmcatFlows; ++i) {
- offset_ms[i] = std::max(0, 5000 * i + rand() % 2001 - 1000);
+ offset_ms[i] = std::max(0, 5000 * i + random_.Rand(-1000, 1000));
}
RunFairnessTest(GetParam(), kNumRmcatFlows, 0, 300, 3000, 50, kRttMs,
@@ -370,7 +374,7 @@ TEST_P(BweFeedbackTest, PacedSelfFairness500msTest) {
const int kNumRmcatFlows = 4;
int64_t offset_ms[kNumRmcatFlows];
for (int i = 0; i < kNumRmcatFlows; ++i) {
- offset_ms[i] = std::max(0, 5000 * i + rand() % 2001 - 1000);
+ offset_ms[i] = std::max(0, 5000 * i + random_.Rand(-1000, 1000));
}
RunFairnessTest(GetParam(), kNumRmcatFlows, 0, 300, 3000, 500, kRttMs,
@@ -384,7 +388,7 @@ TEST_P(BweFeedbackTest, PacedSelfFairness1000msTest) {
const int kNumRmcatFlows = 4;
int64_t offset_ms[kNumRmcatFlows];
for (int i = 0; i < kNumRmcatFlows; ++i) {
- offset_ms[i] = std::max(0, 5000 * i + rand() % 2001 - 1000);
+ offset_ms[i] = std::max(0, 5000 * i + random_.Rand(-1000, 1000));
}
RunFairnessTest(GetParam(), kNumRmcatFlows, 0, 300, 3000, 1000, kRttMs,
@@ -397,7 +401,7 @@ TEST_P(BweFeedbackTest, TcpFairness50msTest) {
int64_t offset_ms[2]; // One TCP, one RMCAT flow.
for (int i = 0; i < 2; ++i) {
- offset_ms[i] = std::max(0, 5000 * i + rand() % 2001 - 1000);
+ offset_ms[i] = std::max(0, 5000 * i + random_.Rand(-1000, 1000));
}
RunFairnessTest(GetParam(), 1, 1, 300, 2000, 50, kRttMs, kMaxJitterMs,
@@ -410,7 +414,7 @@ TEST_P(BweFeedbackTest, TcpFairness500msTest) {
int64_t offset_ms[2]; // One TCP, one RMCAT flow.
for (int i = 0; i < 2; ++i) {
- offset_ms[i] = std::max(0, 5000 * i + rand() % 2001 - 1000);
+ offset_ms[i] = std::max(0, 5000 * i + random_.Rand(-1000, 1000));
}
RunFairnessTest(GetParam(), 1, 1, 300, 2000, 500, kRttMs, kMaxJitterMs,
@@ -423,7 +427,7 @@ TEST_P(BweFeedbackTest, TcpFairness1000msTest) {
int64_t offset_ms[2]; // One TCP, one RMCAT flow.
for (int i = 0; i < 2; ++i) {
- offset_ms[i] = std::max(0, 5000 * i + rand() % 2001 - 1000);
+ offset_ms[i] = std::max(0, 5000 * i + random_.Rand(-1000, 1000));
}
RunFairnessTest(GetParam(), 1, 1, 300, 2000, 1000, kRttMs, kMaxJitterMs,
diff --git a/webrtc/modules/remote_bitrate_estimator/remote_estimator_proxy.cc b/webrtc/modules/remote_bitrate_estimator/remote_estimator_proxy.cc
index b7f9f65dbc..15ca42dda9 100644
--- a/webrtc/modules/remote_bitrate_estimator/remote_estimator_proxy.cc
+++ b/webrtc/modules/remote_bitrate_estimator/remote_estimator_proxy.cc
@@ -13,14 +13,14 @@
#include "webrtc/base/checks.h"
#include "webrtc/base/logging.h"
#include "webrtc/system_wrappers/include/clock.h"
-#include "webrtc/modules/pacing/include/packet_router.h"
+#include "webrtc/modules/pacing/packet_router.h"
#include "webrtc/modules/rtp_rtcp/source/rtcp_packet/transport_feedback.h"
-#include "webrtc/modules/rtp_rtcp/interface/rtp_rtcp.h"
+#include "webrtc/modules/rtp_rtcp/include/rtp_rtcp.h"
namespace webrtc {
// TODO(sprang): Tune these!
-const int RemoteEstimatorProxy::kDefaultProcessIntervalMs = 200;
+const int RemoteEstimatorProxy::kDefaultProcessIntervalMs = 50;
const int RemoteEstimatorProxy::kBackWindowMs = 500;
RemoteEstimatorProxy::RemoteEstimatorProxy(Clock* clock,
diff --git a/webrtc/modules/remote_bitrate_estimator/remote_estimator_proxy.h b/webrtc/modules/remote_bitrate_estimator/remote_estimator_proxy.h
index e867ff77a4..98a68b3dcf 100644
--- a/webrtc/modules/remote_bitrate_estimator/remote_estimator_proxy.h
+++ b/webrtc/modules/remote_bitrate_estimator/remote_estimator_proxy.h
@@ -15,7 +15,7 @@
#include <vector>
#include "webrtc/base/criticalsection.h"
-#include "webrtc/modules/interface/module_common_types.h"
+#include "webrtc/modules/include/module_common_types.h"
#include "webrtc/modules/remote_bitrate_estimator/include/remote_bitrate_estimator.h"
namespace webrtc {
diff --git a/webrtc/modules/remote_bitrate_estimator/remote_estimator_proxy_unittest.cc b/webrtc/modules/remote_bitrate_estimator/remote_estimator_proxy_unittest.cc
index 826a724e33..7ddd31467b 100644
--- a/webrtc/modules/remote_bitrate_estimator/remote_estimator_proxy_unittest.cc
+++ b/webrtc/modules/remote_bitrate_estimator/remote_estimator_proxy_unittest.cc
@@ -11,7 +11,7 @@
#include "testing/gmock/include/gmock/gmock.h"
#include "testing/gtest/include/gtest/gtest.h"
-#include "webrtc/modules/pacing/include/packet_router.h"
+#include "webrtc/modules/pacing/packet_router.h"
#include "webrtc/modules/remote_bitrate_estimator/remote_estimator_proxy.h"
#include "webrtc/modules/rtp_rtcp/source/rtcp_packet/transport_feedback.h"
#include "webrtc/system_wrappers/include/clock.h"
diff --git a/webrtc/modules/remote_bitrate_estimator/test/bwe.h b/webrtc/modules/remote_bitrate_estimator/test/bwe.h
index ef9b3149d7..8d29de2619 100644
--- a/webrtc/modules/remote_bitrate_estimator/test/bwe.h
+++ b/webrtc/modules/remote_bitrate_estimator/test/bwe.h
@@ -11,7 +11,10 @@
#ifndef WEBRTC_MODULES_REMOTE_BITRATE_ESTIMATOR_TEST_BWE_H_
#define WEBRTC_MODULES_REMOTE_BITRATE_ESTIMATOR_TEST_BWE_H_
+#include <list>
+#include <map>
#include <sstream>
+#include <string>
#include "webrtc/test/testsupport/gtest_prod_util.h"
#include "webrtc/modules/remote_bitrate_estimator/test/packet.h"
diff --git a/webrtc/modules/remote_bitrate_estimator/test/bwe_test.cc b/webrtc/modules/remote_bitrate_estimator/test/bwe_test.cc
index f837638474..9da21c1aaa 100644
--- a/webrtc/modules/remote_bitrate_estimator/test/bwe_test.cc
+++ b/webrtc/modules/remote_bitrate_estimator/test/bwe_test.cc
@@ -12,9 +12,10 @@
#include <sstream>
+#include "webrtc/base/arraysize.h"
#include "webrtc/base/common.h"
#include "webrtc/base/scoped_ptr.h"
-#include "webrtc/modules/interface/module_common_types.h"
+#include "webrtc/modules/include/module_common_types.h"
#include "webrtc/modules/remote_bitrate_estimator/test/bwe_test_framework.h"
#include "webrtc/modules/remote_bitrate_estimator/test/metric_recorder.h"
#include "webrtc/modules/remote_bitrate_estimator/test/packet_receiver.h"
@@ -662,7 +663,7 @@ void BweTest::RunSelfFairness(BandwidthEstimatorType bwe_type) {
void BweTest::RunRoundTripTimeFairness(BandwidthEstimatorType bwe_type) {
const int kAllFlowIds[] = {0, 1, 2, 3, 4}; // Five RMCAT flows.
const int64_t kAllOneWayDelayMs[] = {10, 25, 50, 100, 150};
- const size_t kNumFlows = ARRAY_SIZE(kAllFlowIds);
+ const size_t kNumFlows = arraysize(kAllFlowIds);
rtc::scoped_ptr<AdaptiveVideoSource> sources[kNumFlows];
rtc::scoped_ptr<VideoSender> senders[kNumFlows];
rtc::scoped_ptr<MetricRecorder> metric_recorders[kNumFlows];
@@ -774,10 +775,10 @@ void BweTest::RunMultipleShortTcpFairness(
const int kAllTcpFlowIds[] = {2, 3, 4, 5, 6, 7, 8, 9, 10, 11};
assert(tcp_starting_times_ms.size() == tcp_file_sizes_bytes.size() &&
- tcp_starting_times_ms.size() == ARRAY_SIZE(kAllTcpFlowIds));
+ tcp_starting_times_ms.size() == arraysize(kAllTcpFlowIds));
- const size_t kNumRmcatFlows = ARRAY_SIZE(kAllRmcatFlowIds);
- const size_t kNumTotalFlows = kNumRmcatFlows + ARRAY_SIZE(kAllTcpFlowIds);
+ const size_t kNumRmcatFlows = arraysize(kAllRmcatFlowIds);
+ const size_t kNumTotalFlows = kNumRmcatFlows + arraysize(kAllTcpFlowIds);
rtc::scoped_ptr<AdaptiveVideoSource> sources[kNumRmcatFlows];
rtc::scoped_ptr<PacketSender> senders[kNumTotalFlows];
@@ -869,7 +870,7 @@ void BweTest::RunMultipleShortTcpFairness(
// During the test, one of the flows is paused and later resumed.
void BweTest::RunPauseResumeFlows(BandwidthEstimatorType bwe_type) {
const int kAllFlowIds[] = {0, 1, 2}; // Three RMCAT flows.
- const size_t kNumFlows = ARRAY_SIZE(kAllFlowIds);
+ const size_t kNumFlows = arraysize(kAllFlowIds);
rtc::scoped_ptr<AdaptiveVideoSource> sources[kNumFlows];
rtc::scoped_ptr<VideoSender> senders[kNumFlows];
@@ -947,7 +948,7 @@ std::vector<int> BweTest::GetFileSizesBytes(int num_files) {
const int kMinKbytes = 100;
const int kMaxKbytes = 1000;
- test::Random random(0x12345678);
+ Random random(0x12345678);
std::vector<int> tcp_file_sizes_bytes;
while (num_files-- > 0) {
@@ -960,7 +961,7 @@ std::vector<int> BweTest::GetFileSizesBytes(int num_files) {
std::vector<int64_t> BweTest::GetStartingTimesMs(int num_files) {
// OFF state behaves as an exp. distribution with mean = 10 seconds.
const float kMeanMs = 10000.0f;
- test::Random random(0x12345678);
+ Random random(0x12345678);
std::vector<int64_t> tcp_starting_times_ms;
diff --git a/webrtc/modules/remote_bitrate_estimator/test/bwe_test_baselinefile.h b/webrtc/modules/remote_bitrate_estimator/test/bwe_test_baselinefile.h
index 64dfa85535..b3df7124e3 100644
--- a/webrtc/modules/remote_bitrate_estimator/test/bwe_test_baselinefile.h
+++ b/webrtc/modules/remote_bitrate_estimator/test/bwe_test_baselinefile.h
@@ -12,7 +12,7 @@
#define WEBRTC_MODULES_REMOTE_BITRATE_ESTIMATOR_TEST_BWE_TEST_BASELINEFILE_H_
#include <string>
-#include "webrtc/modules/interface/module_common_types.h"
+#include "webrtc/modules/include/module_common_types.h"
namespace webrtc {
namespace testing {
diff --git a/webrtc/modules/remote_bitrate_estimator/test/bwe_test_fileutils.h b/webrtc/modules/remote_bitrate_estimator/test/bwe_test_fileutils.h
index 2881eba424..d470324ac3 100644
--- a/webrtc/modules/remote_bitrate_estimator/test/bwe_test_fileutils.h
+++ b/webrtc/modules/remote_bitrate_estimator/test/bwe_test_fileutils.h
@@ -16,7 +16,7 @@
#include <string>
#include "webrtc/base/constructormagic.h"
-#include "webrtc/modules/interface/module_common_types.h"
+#include "webrtc/modules/include/module_common_types.h"
namespace webrtc {
namespace testing {
diff --git a/webrtc/modules/remote_bitrate_estimator/test/bwe_test_framework.cc b/webrtc/modules/remote_bitrate_estimator/test/bwe_test_framework.cc
index 4574d3d8a1..41bf836c9e 100644
--- a/webrtc/modules/remote_bitrate_estimator/test/bwe_test_framework.cc
+++ b/webrtc/modules/remote_bitrate_estimator/test/bwe_test_framework.cc
@@ -323,7 +323,7 @@ void LossFilter::SetLoss(float loss_percent) {
void LossFilter::RunFor(int64_t /*time_ms*/, Packets* in_out) {
assert(in_out);
for (PacketsIt it = in_out->begin(); it != in_out->end(); ) {
- if (random_.Rand() < loss_fraction_) {
+ if (random_.Rand<float>() < loss_fraction_) {
delete *it;
it = in_out->erase(it);
} else {
@@ -391,7 +391,7 @@ void JitterFilter::SetMaxJitter(int64_t max_jitter_ms) {
}
namespace {
-inline int64_t TruncatedNSigmaGaussian(test::Random* const random,
+inline int64_t TruncatedNSigmaGaussian(Random* const random,
int64_t mean,
int64_t std_dev) {
int64_t gaussian_random = random->Gaussian(mean, std_dev);
@@ -459,7 +459,7 @@ void ReorderFilter::RunFor(int64_t /*time_ms*/, Packets* in_out) {
PacketsIt last_it = in_out->begin();
PacketsIt it = last_it;
while (++it != in_out->end()) {
- if (random_.Rand() < reorder_fraction_) {
+ if (random_.Rand<float>() < reorder_fraction_) {
int64_t t1 = (*last_it)->send_time_us();
int64_t t2 = (*it)->send_time_us();
std::swap(*last_it, *it);
@@ -586,7 +586,7 @@ bool TraceBasedDeliveryFilter::Init(const std::string& filename) {
return false;
}
int64_t first_timestamp = -1;
- while(!feof(trace_file)) {
+ while (!feof(trace_file)) {
const size_t kMaxLineLength = 100;
char line[kMaxLineLength];
if (fgets(line, kMaxLineLength, trace_file)) {
@@ -680,6 +680,7 @@ VideoSource::VideoSource(int flow_id,
frame_period_ms_(1000.0 / fps),
bits_per_second_(1000 * kbps),
frame_size_bytes_(bits_per_second_ / 8 / fps),
+ random_(0x12345678),
flow_id_(flow_id),
next_frame_ms_(first_frame_offset_ms),
next_frame_rand_ms_(0),
@@ -713,9 +714,7 @@ void VideoSource::RunFor(int64_t time_ms, Packets* in_out) {
const int64_t kRandAmplitude = 2;
// A variance picked uniformly from {-1, 0, 1} ms is added to the frame
// timestamp.
- next_frame_rand_ms_ =
- kRandAmplitude * static_cast<float>(rand()) / RAND_MAX -
- kRandAmplitude / 2;
+ next_frame_rand_ms_ = kRandAmplitude * (random_.Rand<float>() - 0.5);
// Ensure frame will not have a negative timestamp.
int64_t next_frame_ms =
diff --git a/webrtc/modules/remote_bitrate_estimator/test/bwe_test_framework.h b/webrtc/modules/remote_bitrate_estimator/test/bwe_test_framework.h
index 6b24cf30a6..3bb9b95f4b 100644
--- a/webrtc/modules/remote_bitrate_estimator/test/bwe_test_framework.h
+++ b/webrtc/modules/remote_bitrate_estimator/test/bwe_test_framework.h
@@ -17,21 +17,23 @@
#include <algorithm>
#include <list>
#include <numeric>
+#include <set>
#include <sstream>
#include <string>
+#include <utility>
#include <vector>
#include "webrtc/base/common.h"
+#include "webrtc/base/random.h"
#include "webrtc/base/scoped_ptr.h"
#include "webrtc/modules/bitrate_controller/include/bitrate_controller.h"
-#include "webrtc/modules/interface/module_common_types.h"
-#include "webrtc/modules/pacing/include/paced_sender.h"
+#include "webrtc/modules/include/module_common_types.h"
+#include "webrtc/modules/pacing/paced_sender.h"
#include "webrtc/modules/remote_bitrate_estimator/include/remote_bitrate_estimator.h"
#include "webrtc/modules/remote_bitrate_estimator/test/bwe_test_logging.h"
#include "webrtc/modules/remote_bitrate_estimator/test/packet.h"
-#include "webrtc/modules/rtp_rtcp/interface/rtp_rtcp_defines.h"
+#include "webrtc/modules/rtp_rtcp/include/rtp_rtcp_defines.h"
#include "webrtc/system_wrappers/include/clock.h"
-#include "webrtc/test/random.h"
namespace webrtc {
@@ -44,7 +46,7 @@ class DelayCapHelper;
class RateCounter {
public:
- RateCounter(int64_t window_size_ms)
+ explicit RateCounter(int64_t window_size_ms)
: window_size_us_(1000 * window_size_ms),
recently_received_packets_(0),
recently_received_bytes_(0),
@@ -265,7 +267,7 @@ class LossFilter : public PacketProcessor {
virtual void RunFor(int64_t time_ms, Packets* in_out);
private:
- test::Random random_;
+ Random random_;
float loss_fraction_;
RTC_DISALLOW_IMPLICIT_CONSTRUCTORS(LossFilter);
@@ -299,7 +301,7 @@ class JitterFilter : public PacketProcessor {
int64_t MeanUs();
private:
- test::Random random_;
+ Random random_;
int64_t stddev_jitter_us_;
int64_t last_send_time_us_;
bool reordering_; // False by default.
@@ -318,7 +320,7 @@ class ReorderFilter : public PacketProcessor {
virtual void RunFor(int64_t time_ms, Packets* in_out);
private:
- test::Random random_;
+ Random random_;
float reorder_fraction_;
RTC_DISALLOW_IMPLICIT_CONSTRUCTORS(ReorderFilter);
@@ -415,6 +417,7 @@ class VideoSource {
uint32_t frame_size_bytes_;
private:
+ Random random_;
const int flow_id_;
int64_t next_frame_ms_;
int64_t next_frame_rand_ms_;
diff --git a/webrtc/modules/remote_bitrate_estimator/test/bwe_test_framework_unittest.cc b/webrtc/modules/remote_bitrate_estimator/test/bwe_test_framework_unittest.cc
index 627260678b..6bdfa847df 100644
--- a/webrtc/modules/remote_bitrate_estimator/test/bwe_test_framework_unittest.cc
+++ b/webrtc/modules/remote_bitrate_estimator/test/bwe_test_framework_unittest.cc
@@ -22,39 +22,6 @@ namespace webrtc {
namespace testing {
namespace bwe {
-TEST(BweTestFramework_RandomTest, Gaussian) {
- enum {
- kN = 100000,
- kBuckets = 100,
- kMean = 49,
- kStddev = 10
- };
-
- test::Random random(0x12345678);
-
- int buckets[kBuckets] = {0};
- for (int i = 0; i < kN; ++i) {
- int index = random.Gaussian(kMean, kStddev);
- if (index >= 0 && index < kBuckets) {
- buckets[index]++;
- }
- }
-
- const double kPi = 3.14159265358979323846;
- const double kScale = kN / (kStddev * sqrt(2.0 * kPi));
- const double kDiv = -2.0 * kStddev * kStddev;
- double self_corr = 0.0;
- double bucket_corr = 0.0;
- for (int n = 0; n < kBuckets; ++n) {
- double normal_dist = kScale * exp((n - kMean) * (n - kMean) / kDiv);
- self_corr += normal_dist * normal_dist;
- bucket_corr += normal_dist * buckets[n];
- }
- printf("Correlation: %f (random sample), %f (self), %f (quotient)\n",
- bucket_corr, self_corr, bucket_corr / self_corr);
- EXPECT_NEAR(1.0, bucket_corr / self_corr, 0.0004);
-}
-
static bool IsSequenceNumberSorted(const Packets& packets) {
PacketsConstIt last_it = packets.begin();
for (PacketsConstIt it = last_it; it != packets.end(); ++it) {
@@ -533,7 +500,7 @@ TEST(BweTestFramework_JitterFilterTest, Jitter1031) {
TestJitterFilter(1031);
}
-static void TestReorderFilter(uint16_t reorder_percent, uint16_t near_value) {
+static void TestReorderFilter(uint16_t reorder_percent) {
const uint16_t kPacketCount = 10000;
// Generate packets with 10 ms interval.
@@ -559,16 +526,23 @@ static void TestReorderFilter(uint16_t reorder_percent, uint16_t near_value) {
for (auto* packet : packets) {
const MediaPacket* media_packet = static_cast<const MediaPacket*>(packet);
uint16_t sequence_number = media_packet->header().sequenceNumber;
+ // The expected position for sequence number s is in position s-1.
if (sequence_number < last_sequence_number) {
distance += last_sequence_number - sequence_number;
}
last_sequence_number = sequence_number;
}
- // Because reordering is random, we allow a threshold when comparing. The
- // maximum distance a packet can be moved is PacketCount - 1.
- EXPECT_NEAR(
- ((kPacketCount - 1) * reorder_percent) / 100, distance, near_value);
+ // The probability that two elements are swapped is p = reorder_percent / 100.
+ double p = static_cast<double>(reorder_percent) / 100;
+ // The expected number of swaps we perform is p * (PacketCount - 1),
+ // and each swap increases the distance by one.
+ double mean = p * (kPacketCount - 1);
+ // If pair i is chosen to be swapped with probability p, the variance for that
+ // pair is p * (1 - p). Since there are (kPacketCount - 1) independent pairs,
+ // the variance for the number of swaps is (kPacketCount - 1) * p * (1 - p).
+ double std_deviation = sqrt((kPacketCount - 1) * p * (1 - p));
+ EXPECT_NEAR(mean, distance, 3 * std_deviation);
for (auto* packet : packets)
delete packet;
@@ -576,23 +550,23 @@ static void TestReorderFilter(uint16_t reorder_percent, uint16_t near_value) {
TEST(BweTestFramework_ReorderFilterTest, Reorder0) {
// For 0% reordering, no packets should have been moved, so result is exact.
- TestReorderFilter(0, 0);
+ TestReorderFilter(0);
}
TEST(BweTestFramework_ReorderFilterTest, Reorder10) {
- TestReorderFilter(10, 30);
+ TestReorderFilter(10);
}
TEST(BweTestFramework_ReorderFilterTest, Reorder20) {
- TestReorderFilter(20, 20);
+ TestReorderFilter(20);
}
TEST(BweTestFramework_ReorderFilterTest, Reorder50) {
- TestReorderFilter(50, 20);
+ TestReorderFilter(50);
}
TEST(BweTestFramework_ReorderFilterTest, Reorder70) {
- TestReorderFilter(70, 20);
+ TestReorderFilter(70);
}
TEST(BweTestFramework_ReorderFilterTest, Reorder100) {
@@ -600,7 +574,7 @@ TEST(BweTestFramework_ReorderFilterTest, Reorder100) {
// adjacent packets, when the likelihood of a swap is 1.0, a swap will always
// occur, so the stream will be in order except for the first packet, which
// has been moved to the end. Therefore we expect the result to be exact here.
- TestReorderFilter(100.0, 0);
+ TestReorderFilter(100.0);
}
class BweTestFramework_ChokeFilterTest : public ::testing::Test {
diff --git a/webrtc/modules/remote_bitrate_estimator/test/bwe_test_logging.cc b/webrtc/modules/remote_bitrate_estimator/test/bwe_test_logging.cc
index dcc08d8dde..3a84e81a0b 100644
--- a/webrtc/modules/remote_bitrate_estimator/test/bwe_test_logging.cc
+++ b/webrtc/modules/remote_bitrate_estimator/test/bwe_test_logging.cc
@@ -18,8 +18,8 @@
#include <algorithm>
#include <sstream>
+#include "webrtc/base/platform_thread.h"
#include "webrtc/system_wrappers/include/critical_section_wrapper.h"
-#include "webrtc/system_wrappers/include/thread_wrapper.h"
namespace webrtc {
namespace testing {
@@ -57,27 +57,27 @@ Logging* Logging::GetInstance() {
void Logging::SetGlobalContext(uint32_t name) {
CriticalSectionScoped cs(crit_sect_.get());
- thread_map_[ThreadWrapper::GetThreadId()].global_state.tag = ToString(name);
+ thread_map_[rtc::CurrentThreadId()].global_state.tag = ToString(name);
}
void Logging::SetGlobalContext(const std::string& name) {
CriticalSectionScoped cs(crit_sect_.get());
- thread_map_[ThreadWrapper::GetThreadId()].global_state.tag = name;
+ thread_map_[rtc::CurrentThreadId()].global_state.tag = name;
}
void Logging::SetGlobalContext(const char* name) {
CriticalSectionScoped cs(crit_sect_.get());
- thread_map_[ThreadWrapper::GetThreadId()].global_state.tag = name;
+ thread_map_[rtc::CurrentThreadId()].global_state.tag = name;
}
void Logging::SetGlobalEnable(bool enabled) {
CriticalSectionScoped cs(crit_sect_.get());
- thread_map_[ThreadWrapper::GetThreadId()].global_state.enabled = enabled;
+ thread_map_[rtc::CurrentThreadId()].global_state.enabled = enabled;
}
void Logging::Log(const char format[], ...) {
CriticalSectionScoped cs(crit_sect_.get());
- ThreadMap::iterator it = thread_map_.find(ThreadWrapper::GetThreadId());
+ ThreadMap::iterator it = thread_map_.find(rtc::CurrentThreadId());
assert(it != thread_map_.end());
const State& state = it->second.stack.top();
if (state.enabled) {
@@ -96,7 +96,7 @@ void Logging::Plot(int figure, double value) {
void Logging::Plot(int figure, double value, const std::string& alg_name) {
CriticalSectionScoped cs(crit_sect_.get());
- ThreadMap::iterator it = thread_map_.find(ThreadWrapper::GetThreadId());
+ ThreadMap::iterator it = thread_map_.find(rtc::CurrentThreadId());
assert(it != thread_map_.end());
const State& state = it->second.stack.top();
std::string label = state.tag + '@' + alg_name;
@@ -119,7 +119,7 @@ void Logging::PlotBar(int figure,
double value,
int flow_id) {
CriticalSectionScoped cs(crit_sect_.get());
- ThreadMap::iterator it = thread_map_.find(ThreadWrapper::GetThreadId());
+ ThreadMap::iterator it = thread_map_.find(rtc::CurrentThreadId());
assert(it != thread_map_.end());
const State& state = it->second.stack.top();
if (state.enabled) {
@@ -132,7 +132,7 @@ void Logging::PlotBaselineBar(int figure,
double value,
int flow_id) {
CriticalSectionScoped cs(crit_sect_.get());
- ThreadMap::iterator it = thread_map_.find(ThreadWrapper::GetThreadId());
+ ThreadMap::iterator it = thread_map_.find(rtc::CurrentThreadId());
assert(it != thread_map_.end());
const State& state = it->second.stack.top();
if (state.enabled) {
@@ -148,7 +148,7 @@ void Logging::PlotErrorBar(int figure,
const std::string& error_title,
int flow_id) {
CriticalSectionScoped cs(crit_sect_.get());
- ThreadMap::iterator it = thread_map_.find(ThreadWrapper::GetThreadId());
+ ThreadMap::iterator it = thread_map_.find(rtc::CurrentThreadId());
assert(it != thread_map_.end());
const State& state = it->second.stack.top();
if (state.enabled) {
@@ -167,7 +167,7 @@ void Logging::PlotLimitErrorBar(int figure,
const std::string& limit_title,
int flow_id) {
CriticalSectionScoped cs(crit_sect_.get());
- ThreadMap::iterator it = thread_map_.find(ThreadWrapper::GetThreadId());
+ ThreadMap::iterator it = thread_map_.find(rtc::CurrentThreadId());
assert(it != thread_map_.end());
const State& state = it->second.stack.top();
if (state.enabled) {
@@ -182,7 +182,7 @@ void Logging::PlotLabel(int figure,
const std::string& y_label,
int num_flows) {
CriticalSectionScoped cs(crit_sect_.get());
- ThreadMap::iterator it = thread_map_.find(ThreadWrapper::GetThreadId());
+ ThreadMap::iterator it = thread_map_.find(rtc::CurrentThreadId());
assert(it != thread_map_.end());
const State& state = it->second.stack.top();
if (state.enabled) {
@@ -219,7 +219,7 @@ void Logging::PushState(const std::string& append_to_tag, int64_t timestamp_ms,
bool enabled) {
CriticalSectionScoped cs(crit_sect_.get());
State new_state(append_to_tag, timestamp_ms, enabled);
- ThreadState* thread_state = &thread_map_[ThreadWrapper::GetThreadId()];
+ ThreadState* thread_state = &thread_map_[rtc::CurrentThreadId()];
std::stack<State>* stack = &thread_state->stack;
if (stack->empty()) {
new_state.MergePrevious(thread_state->global_state);
@@ -231,7 +231,7 @@ void Logging::PushState(const std::string& append_to_tag, int64_t timestamp_ms,
void Logging::PopState() {
CriticalSectionScoped cs(crit_sect_.get());
- ThreadMap::iterator it = thread_map_.find(ThreadWrapper::GetThreadId());
+ ThreadMap::iterator it = thread_map_.find(rtc::CurrentThreadId());
assert(it != thread_map_.end());
std::stack<State>* stack = &it->second.stack;
int64_t newest_timestamp_ms = stack->top().timestamp_ms;
diff --git a/webrtc/modules/remote_bitrate_estimator/test/bwe_test_logging.h b/webrtc/modules/remote_bitrate_estimator/test/bwe_test_logging.h
index 4115d30c2a..cc7807ba8a 100644
--- a/webrtc/modules/remote_bitrate_estimator/test/bwe_test_logging.h
+++ b/webrtc/modules/remote_bitrate_estimator/test/bwe_test_logging.h
@@ -130,12 +130,12 @@
#define BWE_TEST_LOGGING_GLOBAL_CONTEXT(name) \
do { \
webrtc::testing::bwe::Logging::GetInstance()->SetGlobalContext(name); \
- } while (0);
+ } while (0)
#define BWE_TEST_LOGGING_GLOBAL_ENABLE(enabled) \
do { \
webrtc::testing::bwe::Logging::GetInstance()->SetGlobalEnable(enabled); \
- } while (0);
+ } while (0)
#define __BWE_TEST_LOGGING_CONTEXT_NAME(ctx, line) ctx ## line
#define __BWE_TEST_LOGGING_CONTEXT_DECLARE(ctx, line, name, time, enabled) \
@@ -155,36 +155,36 @@
do { \
BWE_TEST_LOGGING_CONTEXT(name); \
webrtc::testing::bwe::Logging::GetInstance()->Log(format, _1); \
- } while (0);
+ } while (0)
#define BWE_TEST_LOGGING_LOG2(name, format, _1, _2) \
do { \
BWE_TEST_LOGGING_CONTEXT(name); \
webrtc::testing::bwe::Logging::GetInstance()->Log(format, _1, _2); \
- } while (0);
+ } while (0)
#define BWE_TEST_LOGGING_LOG3(name, format, _1, _2, _3) \
do { \
BWE_TEST_LOGGING_CONTEXT(name); \
webrtc::testing::bwe::Logging::GetInstance()->Log(format, _1, _2, _3); \
- } while (0);
+ } while (0)
#define BWE_TEST_LOGGING_LOG4(name, format, _1, _2, _3, _4) \
do { \
BWE_TEST_LOGGING_CONTEXT(name); \
webrtc::testing::bwe::Logging::GetInstance()->Log(format, _1, _2, _3, \
_4); \
- } while (0);
+ } while (0)
#define BWE_TEST_LOGGING_LOG5(name, format, _1, _2, _3, _4, _5) \
do {\
BWE_TEST_LOGGING_CONTEXT(name); \
webrtc::testing::bwe::Logging::GetInstance()->Log(format, _1, _2, _3, \
_4, _5); \
- } while (0);
+ } while (0)
#define BWE_TEST_LOGGING_PLOT(figure, name, time, value) \
do { \
__BWE_TEST_LOGGING_CONTEXT_DECLARE(__bwe_log_, __PLOT__, name, \
static_cast<int64_t>(time), true); \
webrtc::testing::bwe::Logging::GetInstance()->Plot(figure, value); \
- } while (0);
+ } while (0)
#define BWE_TEST_LOGGING_PLOT_WITH_NAME(figure, name, time, value, alg_name) \
do { \
@@ -192,21 +192,21 @@
static_cast<int64_t>(time), true); \
webrtc::testing::bwe::Logging::GetInstance()->Plot(figure, value, \
alg_name); \
- } while (0);
+ } while (0)
#define BWE_TEST_LOGGING_BAR(figure, name, value, flow_id) \
do { \
BWE_TEST_LOGGING_CONTEXT(name); \
webrtc::testing::bwe::Logging::GetInstance()->PlotBar(figure, name, value, \
flow_id); \
- } while (0);
+ } while (0)
#define BWE_TEST_LOGGING_BASELINEBAR(figure, name, value, flow_id) \
do { \
BWE_TEST_LOGGING_CONTEXT(name); \
webrtc::testing::bwe::Logging::GetInstance()->PlotBaselineBar( \
figure, name, value, flow_id); \
- } while (0);
+ } while (0)
#define BWE_TEST_LOGGING_ERRORBAR(figure, name, value, ylow, yhigh, title, \
flow_id) \
@@ -214,7 +214,7 @@
BWE_TEST_LOGGING_CONTEXT(name); \
webrtc::testing::bwe::Logging::GetInstance()->PlotErrorBar( \
figure, name, value, ylow, yhigh, title, flow_id); \
- } while (0);
+ } while (0)
#define BWE_TEST_LOGGING_LIMITERRORBAR( \
figure, name, value, ylow, yhigh, error_title, ymax, limit_title, flow_id) \
@@ -223,14 +223,14 @@
webrtc::testing::bwe::Logging::GetInstance()->PlotLimitErrorBar( \
figure, name, value, ylow, yhigh, error_title, ymax, limit_title, \
flow_id); \
- } while (0);
+ } while (0)
#define BWE_TEST_LOGGING_LABEL(figure, title, y_label, num_flows) \
do { \
BWE_TEST_LOGGING_CONTEXT(title); \
webrtc::testing::bwe::Logging::GetInstance()->PlotLabel( \
figure, title, y_label, num_flows); \
- } while (0);
+ } while (0)
namespace webrtc {
diff --git a/webrtc/modules/remote_bitrate_estimator/test/bwe_unittest.cc b/webrtc/modules/remote_bitrate_estimator/test/bwe_unittest.cc
index 6b3ce4847c..6245ccd25d 100644
--- a/webrtc/modules/remote_bitrate_estimator/test/bwe_unittest.cc
+++ b/webrtc/modules/remote_bitrate_estimator/test/bwe_unittest.cc
@@ -13,6 +13,7 @@
#include <vector>
#include "testing/gtest/include/gtest/gtest.h"
+#include "webrtc/base/arraysize.h"
namespace webrtc {
namespace testing {
@@ -241,7 +242,7 @@ TEST_F(BweReceiverTest, PacketLossDuplicatedPackets) {
// Missing the element 5.
const uint16_t kSequenceNumbers[] = {1, 2, 3, 4, 6, 7, 8};
- const int kNumPackets = ARRAY_SIZE(kSequenceNumbers);
+ const int kNumPackets = arraysize(kSequenceNumbers);
// Insert each sequence number twice.
for (int i = 0; i < 2; ++i) {
diff --git a/webrtc/modules/remote_bitrate_estimator/test/estimators/nada.cc b/webrtc/modules/remote_bitrate_estimator/test/estimators/nada.cc
index d77447f1ea..6166ff8c2d 100644
--- a/webrtc/modules/remote_bitrate_estimator/test/estimators/nada.cc
+++ b/webrtc/modules/remote_bitrate_estimator/test/estimators/nada.cc
@@ -18,10 +18,11 @@
#include <algorithm>
#include <vector>
+#include "webrtc/base/arraysize.h"
#include "webrtc/base/common.h"
#include "webrtc/modules/remote_bitrate_estimator/test/estimators/nada.h"
#include "webrtc/modules/remote_bitrate_estimator/test/bwe_test_logging.h"
-#include "webrtc/modules/rtp_rtcp/interface/receive_statistics.h"
+#include "webrtc/modules/rtp_rtcp/include/receive_statistics.h"
namespace webrtc {
namespace testing {
@@ -63,7 +64,7 @@ void NadaBweReceiver::ReceivePacket(int64_t arrival_time_ms,
}
delay_signal_ms_ = delay_ms - baseline_delay_ms_; // Refered as d_n.
- const int kMedian = ARRAY_SIZE(last_delays_ms_);
+ const int kMedian = arraysize(last_delays_ms_);
last_delays_ms_[(last_delays_index_++) % kMedian] = delay_signal_ms_;
int size = std::min(last_delays_index_, kMedian);
diff --git a/webrtc/modules/remote_bitrate_estimator/test/estimators/nada.h b/webrtc/modules/remote_bitrate_estimator/test/estimators/nada.h
index eee90cf463..bf23d09884 100644
--- a/webrtc/modules/remote_bitrate_estimator/test/estimators/nada.h
+++ b/webrtc/modules/remote_bitrate_estimator/test/estimators/nada.h
@@ -20,7 +20,7 @@
#include <list>
#include <map>
-#include "webrtc/modules/interface/module_common_types.h"
+#include "webrtc/modules/include/module_common_types.h"
#include "webrtc/modules/remote_bitrate_estimator/test/bwe.h"
#include "webrtc/voice_engine/channel.h"
diff --git a/webrtc/modules/remote_bitrate_estimator/test/estimators/nada_unittest.cc b/webrtc/modules/remote_bitrate_estimator/test/estimators/nada_unittest.cc
index a0f56b73b7..51afae1df4 100644
--- a/webrtc/modules/remote_bitrate_estimator/test/estimators/nada_unittest.cc
+++ b/webrtc/modules/remote_bitrate_estimator/test/estimators/nada_unittest.cc
@@ -13,6 +13,7 @@
#include <algorithm>
#include <numeric>
+#include "webrtc/base/arraysize.h"
#include "webrtc/base/common.h"
#include "webrtc/base/scoped_ptr.h"
#include "webrtc/modules/remote_bitrate_estimator/test/bwe_test_framework.h"
@@ -357,7 +358,7 @@ TEST_F(NadaReceiverSideTest, FeedbackIncreasingDelay) {
// Baseline delay will be 50 ms.
// Delay signals should be: [0 10 20 30 40 50 60 70] ms.
const int64_t kMedianFilteredDelaysMs[] = {0, 5, 10, 15, 20, 30, 40, 50};
- const int kNumPackets = ARRAY_SIZE(kMedianFilteredDelaysMs);
+ const int kNumPackets = arraysize(kMedianFilteredDelaysMs);
const float kAlpha = 0.1f; // Used for exponential smoothing.
int64_t exp_smoothed_delays_ms[kNumPackets];
@@ -426,7 +427,7 @@ TEST_F(NadaReceiverSideTest, FeedbackWarpedDelay) {
// Delay signals should be: [0 200 400 600 800 1000 1200 1400] ms.
const int64_t kMedianFilteredDelaysMs[] = {
0, 100, 200, 300, 400, 600, 800, 1000};
- const int kNumPackets = ARRAY_SIZE(kMedianFilteredDelaysMs);
+ const int kNumPackets = arraysize(kMedianFilteredDelaysMs);
const float kAlpha = 0.1f; // Used for exponential smoothing.
int64_t exp_smoothed_delays_ms[kNumPackets];
@@ -480,7 +481,7 @@ TEST_F(FilterTest, ExponentialSmoothingConstantArray) {
TEST_F(FilterTest, ExponentialSmoothingInitialPertubation) {
const int64_t kSignal[] = {90000, 0, 0, 0, 0, 0};
- const int kNumElements = ARRAY_SIZE(kSignal);
+ const int kNumElements = arraysize(kSignal);
int64_t exp_smoothed[kNumElements];
ExponentialSmoothingFilter(kSignal, kNumElements, exp_smoothed);
for (int i = 1; i < kNumElements; ++i) {
diff --git a/webrtc/modules/remote_bitrate_estimator/test/estimators/remb.cc b/webrtc/modules/remote_bitrate_estimator/test/estimators/remb.cc
index b18b9f06b9..9599b01933 100644
--- a/webrtc/modules/remote_bitrate_estimator/test/estimators/remb.cc
+++ b/webrtc/modules/remote_bitrate_estimator/test/estimators/remb.cc
@@ -17,7 +17,7 @@
#include "webrtc/modules/bitrate_controller/include/bitrate_controller.h"
#include "webrtc/modules/remote_bitrate_estimator/remote_bitrate_estimator_abs_send_time.h"
#include "webrtc/modules/remote_bitrate_estimator/test/bwe_test_logging.h"
-#include "webrtc/modules/rtp_rtcp/interface/receive_statistics.h"
+#include "webrtc/modules/rtp_rtcp/include/receive_statistics.h"
namespace webrtc {
namespace testing {
diff --git a/webrtc/modules/remote_bitrate_estimator/test/estimators/tcp.cc b/webrtc/modules/remote_bitrate_estimator/test/estimators/tcp.cc
index a02abc6ab8..b7e4f971fa 100644
--- a/webrtc/modules/remote_bitrate_estimator/test/estimators/tcp.cc
+++ b/webrtc/modules/remote_bitrate_estimator/test/estimators/tcp.cc
@@ -16,7 +16,7 @@
#include "webrtc/base/common.h"
#include "webrtc/modules/bitrate_controller/include/bitrate_controller.h"
#include "webrtc/modules/remote_bitrate_estimator/test/bwe_test_logging.h"
-#include "webrtc/modules/rtp_rtcp/interface/receive_statistics.h"
+#include "webrtc/modules/rtp_rtcp/include/receive_statistics.h"
namespace webrtc {
namespace testing {
diff --git a/webrtc/modules/remote_bitrate_estimator/test/metric_recorder.cc b/webrtc/modules/remote_bitrate_estimator/test/metric_recorder.cc
index 6202b4a6a3..559757c0eb 100644
--- a/webrtc/modules/remote_bitrate_estimator/test/metric_recorder.cc
+++ b/webrtc/modules/remote_bitrate_estimator/test/metric_recorder.cc
@@ -10,10 +10,10 @@
#include "webrtc/modules/remote_bitrate_estimator/test/metric_recorder.h"
-#include "webrtc/modules/remote_bitrate_estimator/test/packet_sender.h"
-
#include <algorithm>
+#include "webrtc/modules/remote_bitrate_estimator/test/packet_sender.h"
+
namespace webrtc {
namespace testing {
namespace bwe {
diff --git a/webrtc/modules/remote_bitrate_estimator/test/packet.h b/webrtc/modules/remote_bitrate_estimator/test/packet.h
index 11885a4544..4a361c4dc2 100644
--- a/webrtc/modules/remote_bitrate_estimator/test/packet.h
+++ b/webrtc/modules/remote_bitrate_estimator/test/packet.h
@@ -16,7 +16,7 @@
#include <vector>
#include "webrtc/common_types.h"
-#include "webrtc/modules/rtp_rtcp/interface/rtp_rtcp_defines.h"
+#include "webrtc/modules/rtp_rtcp/include/rtp_rtcp_defines.h"
#include "webrtc/modules/remote_bitrate_estimator/include/remote_bitrate_estimator.h"
namespace webrtc {
diff --git a/webrtc/modules/remote_bitrate_estimator/test/packet_receiver.cc b/webrtc/modules/remote_bitrate_estimator/test/packet_receiver.cc
index f70c212af7..793e06421f 100644
--- a/webrtc/modules/remote_bitrate_estimator/test/packet_receiver.cc
+++ b/webrtc/modules/remote_bitrate_estimator/test/packet_receiver.cc
@@ -14,10 +14,10 @@
#include "testing/gtest/include/gtest/gtest.h"
#include "webrtc/base/common.h"
-#include "webrtc/modules/interface/module_common_types.h"
+#include "webrtc/modules/include/module_common_types.h"
#include "webrtc/modules/remote_bitrate_estimator/test/bwe.h"
#include "webrtc/modules/remote_bitrate_estimator/test/bwe_test_framework.h"
-#include "webrtc/modules/rtp_rtcp/interface/receive_statistics.h"
+#include "webrtc/modules/rtp_rtcp/include/receive_statistics.h"
#include "webrtc/system_wrappers/include/clock.h"
namespace webrtc {
diff --git a/webrtc/modules/remote_bitrate_estimator/test/packet_sender.cc b/webrtc/modules/remote_bitrate_estimator/test/packet_sender.cc
index f1faa49d7e..3bcbc0a071 100644
--- a/webrtc/modules/remote_bitrate_estimator/test/packet_sender.cc
+++ b/webrtc/modules/remote_bitrate_estimator/test/packet_sender.cc
@@ -15,7 +15,7 @@
#include <sstream>
#include "webrtc/base/checks.h"
-#include "webrtc/modules/interface/module_common_types.h"
+#include "webrtc/modules/include/module_common_types.h"
#include "webrtc/modules/remote_bitrate_estimator/test/bwe.h"
#include "webrtc/modules/remote_bitrate_estimator/test/metric_recorder.h"
diff --git a/webrtc/modules/remote_bitrate_estimator/test/packet_sender.h b/webrtc/modules/remote_bitrate_estimator/test/packet_sender.h
index c42647e2d3..f48ed62f57 100644
--- a/webrtc/modules/remote_bitrate_estimator/test/packet_sender.h
+++ b/webrtc/modules/remote_bitrate_estimator/test/packet_sender.h
@@ -13,11 +13,12 @@
#include <list>
#include <limits>
+#include <set>
#include <string>
#include "webrtc/base/constructormagic.h"
#include "webrtc/base/scoped_ptr.h"
-#include "webrtc/modules/interface/module.h"
+#include "webrtc/modules/include/module.h"
#include "webrtc/modules/remote_bitrate_estimator/test/bwe.h"
#include "webrtc/modules/remote_bitrate_estimator/test/bwe_test_framework.h"
@@ -149,7 +150,7 @@ class TcpSender : public PacketSender {
private:
struct InFlight {
public:
- InFlight(const MediaPacket& packet)
+ explicit InFlight(const MediaPacket& packet)
: sequence_number(packet.header().sequenceNumber),
time_ms(packet.send_time_ms()) {}
diff --git a/webrtc/modules/remote_bitrate_estimator/tools/bwe_rtp.cc b/webrtc/modules/remote_bitrate_estimator/tools/bwe_rtp.cc
index 9493805a1c..f138035de5 100644
--- a/webrtc/modules/remote_bitrate_estimator/tools/bwe_rtp.cc
+++ b/webrtc/modules/remote_bitrate_estimator/tools/bwe_rtp.cc
@@ -10,15 +10,17 @@
#include "webrtc/modules/remote_bitrate_estimator/tools/bwe_rtp.h"
-#include <sstream>
#include <stdio.h>
+
+#include <set>
+#include <sstream>
#include <string>
#include "gflags/gflags.h"
#include "webrtc/modules/remote_bitrate_estimator/remote_bitrate_estimator_abs_send_time.h"
#include "webrtc/modules/remote_bitrate_estimator/remote_bitrate_estimator_single_stream.h"
-#include "webrtc/modules/rtp_rtcp/interface/rtp_header_parser.h"
-#include "webrtc/modules/rtp_rtcp/interface/rtp_payload_registry.h"
+#include "webrtc/modules/rtp_rtcp/include/rtp_header_parser.h"
+#include "webrtc/modules/rtp_rtcp/include/rtp_payload_registry.h"
#include "webrtc/test/rtp_file_reader.h"
namespace flags {
diff --git a/webrtc/modules/remote_bitrate_estimator/tools/bwe_rtp_play.cc b/webrtc/modules/remote_bitrate_estimator/tools/bwe_rtp_play.cc
index 19e4a07b4d..4574faf8b7 100644
--- a/webrtc/modules/remote_bitrate_estimator/tools/bwe_rtp_play.cc
+++ b/webrtc/modules/remote_bitrate_estimator/tools/bwe_rtp_play.cc
@@ -14,8 +14,8 @@
#include "webrtc/base/scoped_ptr.h"
#include "webrtc/modules/remote_bitrate_estimator/include/remote_bitrate_estimator.h"
#include "webrtc/modules/remote_bitrate_estimator/tools/bwe_rtp.h"
-#include "webrtc/modules/rtp_rtcp/interface/rtp_header_parser.h"
-#include "webrtc/modules/rtp_rtcp/interface/rtp_payload_registry.h"
+#include "webrtc/modules/rtp_rtcp/include/rtp_header_parser.h"
+#include "webrtc/modules/rtp_rtcp/include/rtp_payload_registry.h"
#include "webrtc/test/rtp_file_reader.h"
class Observer : public webrtc::RemoteBitrateObserver {
diff --git a/webrtc/modules/remote_bitrate_estimator/tools/rtp_to_text.cc b/webrtc/modules/remote_bitrate_estimator/tools/rtp_to_text.cc
index e277481886..bf698728e8 100644
--- a/webrtc/modules/remote_bitrate_estimator/tools/rtp_to_text.cc
+++ b/webrtc/modules/remote_bitrate_estimator/tools/rtp_to_text.cc
@@ -14,8 +14,8 @@
#include "webrtc/base/format_macros.h"
#include "webrtc/base/scoped_ptr.h"
#include "webrtc/modules/remote_bitrate_estimator/tools/bwe_rtp.h"
-#include "webrtc/modules/rtp_rtcp/interface/rtp_header_parser.h"
-#include "webrtc/modules/rtp_rtcp/interface/rtp_payload_registry.h"
+#include "webrtc/modules/rtp_rtcp/include/rtp_header_parser.h"
+#include "webrtc/modules/rtp_rtcp/include/rtp_payload_registry.h"
#include "webrtc/test/rtp_file_reader.h"
int main(int argc, char** argv) {
diff --git a/webrtc/modules/remote_bitrate_estimator/transport_feedback_adapter.cc b/webrtc/modules/remote_bitrate_estimator/transport_feedback_adapter.cc
index f2e073aa53..5904594ac8 100644
--- a/webrtc/modules/remote_bitrate_estimator/transport_feedback_adapter.cc
+++ b/webrtc/modules/remote_bitrate_estimator/transport_feedback_adapter.cc
@@ -8,14 +8,15 @@
* be found in the AUTHORS file in the root of the source tree.
*/
+#include "webrtc/modules/remote_bitrate_estimator/transport_feedback_adapter.h"
+
#include <limits>
#include "webrtc/base/checks.h"
#include "webrtc/base/logging.h"
#include "webrtc/modules/remote_bitrate_estimator/remote_bitrate_estimator_abs_send_time.h"
-#include "webrtc/modules/remote_bitrate_estimator/transport_feedback_adapter.h"
#include "webrtc/modules/rtp_rtcp/source/rtcp_packet/transport_feedback.h"
-#include "webrtc/modules/utility/interface/process_thread.h"
+#include "webrtc/modules/utility/include/process_thread.h"
namespace webrtc {
diff --git a/webrtc/modules/remote_bitrate_estimator/transport_feedback_adapter.h b/webrtc/modules/remote_bitrate_estimator/transport_feedback_adapter.h
index 58829b072b..93f30e6cee 100644
--- a/webrtc/modules/remote_bitrate_estimator/transport_feedback_adapter.h
+++ b/webrtc/modules/remote_bitrate_estimator/transport_feedback_adapter.h
@@ -15,8 +15,8 @@
#include "webrtc/base/criticalsection.h"
#include "webrtc/base/thread_annotations.h"
-#include "webrtc/modules/rtp_rtcp/interface/rtp_rtcp_defines.h"
-#include "webrtc/modules/interface/module_common_types.h"
+#include "webrtc/modules/rtp_rtcp/include/rtp_rtcp_defines.h"
+#include "webrtc/modules/include/module_common_types.h"
#include "webrtc/modules/remote_bitrate_estimator/include/remote_bitrate_estimator.h"
#include "webrtc/modules/remote_bitrate_estimator/include/send_time_history.h"
diff --git a/webrtc/modules/remote_bitrate_estimator/transport_feedback_adapter_unittest.cc b/webrtc/modules/remote_bitrate_estimator/transport_feedback_adapter_unittest.cc
index b2bc646e2d..64d0e55397 100644
--- a/webrtc/modules/remote_bitrate_estimator/transport_feedback_adapter_unittest.cc
+++ b/webrtc/modules/remote_bitrate_estimator/transport_feedback_adapter_unittest.cc
@@ -18,9 +18,9 @@
#include "webrtc/base/scoped_ptr.h"
#include "webrtc/modules/remote_bitrate_estimator/include/mock/mock_remote_bitrate_estimator.h"
#include "webrtc/modules/remote_bitrate_estimator/transport_feedback_adapter.h"
-#include "webrtc/modules/rtp_rtcp/interface/rtp_rtcp_defines.h"
+#include "webrtc/modules/rtp_rtcp/include/rtp_rtcp_defines.h"
#include "webrtc/modules/rtp_rtcp/source/rtcp_packet/transport_feedback.h"
-#include "webrtc/modules/utility/interface/mock/mock_process_thread.h"
+#include "webrtc/modules/utility/include/mock/mock_process_thread.h"
#include "webrtc/system_wrappers/include/clock.h"
using ::testing::_;
diff --git a/webrtc/modules/rtp_rtcp/BUILD.gn b/webrtc/modules/rtp_rtcp/BUILD.gn
index c651424611..a3d3403172 100644
--- a/webrtc/modules/rtp_rtcp/BUILD.gn
+++ b/webrtc/modules/rtp_rtcp/BUILD.gn
@@ -10,14 +10,14 @@ import("../../build/webrtc.gni")
source_set("rtp_rtcp") {
sources = [
- "interface/fec_receiver.h",
- "interface/receive_statistics.h",
- "interface/remote_ntp_time_estimator.h",
- "interface/rtp_header_parser.h",
- "interface/rtp_payload_registry.h",
- "interface/rtp_receiver.h",
- "interface/rtp_rtcp.h",
- "interface/rtp_rtcp_defines.h",
+ "include/fec_receiver.h",
+ "include/receive_statistics.h",
+ "include/remote_ntp_time_estimator.h",
+ "include/rtp_header_parser.h",
+ "include/rtp_payload_registry.h",
+ "include/rtp_receiver.h",
+ "include/rtp_rtcp.h",
+ "include/rtp_rtcp_defines.h",
"mocks/mock_rtp_rtcp.h",
"source/bitrate.cc",
"source/bitrate.h",
@@ -46,8 +46,40 @@ source_set("rtp_rtcp") {
"source/remote_ntp_time_estimator.cc",
"source/rtcp_packet.cc",
"source/rtcp_packet.h",
+ "source/rtcp_packet/app.cc",
+ "source/rtcp_packet/app.h",
+ "source/rtcp_packet/bye.cc",
+ "source/rtcp_packet/bye.h",
+ "source/rtcp_packet/compound_packet.cc",
+ "source/rtcp_packet/compound_packet.h",
+ "source/rtcp_packet/dlrr.cc",
+ "source/rtcp_packet/dlrr.h",
+ "source/rtcp_packet/extended_jitter_report.cc",
+ "source/rtcp_packet/extended_jitter_report.h",
+ "source/rtcp_packet/nack.cc",
+ "source/rtcp_packet/nack.h",
+ "source/rtcp_packet/pli.cc",
+ "source/rtcp_packet/pli.h",
+ "source/rtcp_packet/psfb.cc",
+ "source/rtcp_packet/psfb.h",
+ "source/rtcp_packet/receiver_report.cc",
+ "source/rtcp_packet/receiver_report.h",
+ "source/rtcp_packet/report_block.cc",
+ "source/rtcp_packet/report_block.h",
+ "source/rtcp_packet/rrtr.cc",
+ "source/rtcp_packet/rrtr.h",
+ "source/rtcp_packet/rtpfb.cc",
+ "source/rtcp_packet/rtpfb.h",
+ "source/rtcp_packet/sli.cc",
+ "source/rtcp_packet/sli.h",
+ "source/rtcp_packet/tmmbn.cc",
+ "source/rtcp_packet/tmmbn.h",
+ "source/rtcp_packet/tmmbr.cc",
+ "source/rtcp_packet/tmmbr.h",
"source/rtcp_packet/transport_feedback.cc",
"source/rtcp_packet/transport_feedback.h",
+ "source/rtcp_packet/voip_metric.cc",
+ "source/rtcp_packet/voip_metric.h",
"source/rtcp_receiver.cc",
"source/rtcp_receiver.h",
"source/rtcp_receiver_help.cc",
diff --git a/webrtc/modules/rtp_rtcp/OWNERS b/webrtc/modules/rtp_rtcp/OWNERS
index 2b08b6b2dc..fd12dcea0c 100644
--- a/webrtc/modules/rtp_rtcp/OWNERS
+++ b/webrtc/modules/rtp_rtcp/OWNERS
@@ -1,6 +1,11 @@
-stefan@webrtc.org
-henrik.lundin@webrtc.org
-mflodman@webrtc.org
-asapersson@webrtc.org
-
-per-file BUILD.gn=kjellander@webrtc.org
+stefan@webrtc.org
+henrik.lundin@webrtc.org
+mflodman@webrtc.org
+asapersson@webrtc.org
+
+# These are for the common case of adding or renaming files. If you're doing
+# structural changes, please get a review from a reviewer in this file.
+per-file *.gyp=*
+per-file *.gypi=*
+
+per-file BUILD.gn=kjellander@webrtc.org
diff --git a/webrtc/modules/rtp_rtcp/include/fec_receiver.h b/webrtc/modules/rtp_rtcp/include/fec_receiver.h
new file mode 100644
index 0000000000..65e85ad7a5
--- /dev/null
+++ b/webrtc/modules/rtp_rtcp/include/fec_receiver.h
@@ -0,0 +1,46 @@
+/*
+ * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_RTP_RTCP_INCLUDE_FEC_RECEIVER_H_
+#define WEBRTC_MODULES_RTP_RTCP_INCLUDE_FEC_RECEIVER_H_
+
+#include "webrtc/modules/rtp_rtcp/include/rtp_rtcp_defines.h"
+#include "webrtc/typedefs.h"
+
+namespace webrtc {
+
+struct FecPacketCounter {
+ FecPacketCounter()
+ : num_packets(0),
+ num_fec_packets(0),
+ num_recovered_packets(0) {}
+
+ size_t num_packets; // Number of received packets.
+ size_t num_fec_packets; // Number of received FEC packets.
+ size_t num_recovered_packets; // Number of recovered media packets using FEC.
+};
+
+class FecReceiver {
+ public:
+ static FecReceiver* Create(RtpData* callback);
+
+ virtual ~FecReceiver() {}
+
+ virtual int32_t AddReceivedRedPacket(const RTPHeader& rtp_header,
+ const uint8_t* incoming_rtp_packet,
+ size_t packet_length,
+ uint8_t ulpfec_payload_type) = 0;
+
+ virtual int32_t ProcessReceivedFec() = 0;
+
+ virtual FecPacketCounter GetPacketCounter() const = 0;
+};
+} // namespace webrtc
+#endif // WEBRTC_MODULES_RTP_RTCP_INCLUDE_FEC_RECEIVER_H_
diff --git a/webrtc/modules/rtp_rtcp/include/receive_statistics.h b/webrtc/modules/rtp_rtcp/include/receive_statistics.h
new file mode 100644
index 0000000000..b4a7cd0de2
--- /dev/null
+++ b/webrtc/modules/rtp_rtcp/include/receive_statistics.h
@@ -0,0 +1,102 @@
+/*
+ * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_RTP_RTCP_INCLUDE_RECEIVE_STATISTICS_H_
+#define WEBRTC_MODULES_RTP_RTCP_INCLUDE_RECEIVE_STATISTICS_H_
+
+#include <map>
+
+#include "webrtc/modules/include/module.h"
+#include "webrtc/modules/include/module_common_types.h"
+#include "webrtc/typedefs.h"
+
+namespace webrtc {
+
+class Clock;
+
+class StreamStatistician {
+ public:
+ virtual ~StreamStatistician();
+
+ virtual bool GetStatistics(RtcpStatistics* statistics, bool reset) = 0;
+ virtual void GetDataCounters(size_t* bytes_received,
+ uint32_t* packets_received) const = 0;
+
+ // Gets received stream data counters (includes reset counter values).
+ virtual void GetReceiveStreamDataCounters(
+ StreamDataCounters* data_counters) const = 0;
+
+ virtual uint32_t BitrateReceived() const = 0;
+
+ // Returns true if the packet with RTP header |header| is likely to be a
+ // retransmitted packet, false otherwise.
+ virtual bool IsRetransmitOfOldPacket(const RTPHeader& header,
+ int64_t min_rtt) const = 0;
+
+ // Returns true if |sequence_number| is received in order, false otherwise.
+ virtual bool IsPacketInOrder(uint16_t sequence_number) const = 0;
+};
+
+typedef std::map<uint32_t, StreamStatistician*> StatisticianMap;
+
+class ReceiveStatistics : public Module {
+ public:
+ virtual ~ReceiveStatistics() {}
+
+ static ReceiveStatistics* Create(Clock* clock);
+
+ // Updates the receive statistics with this packet.
+ virtual void IncomingPacket(const RTPHeader& rtp_header,
+ size_t packet_length,
+ bool retransmitted) = 0;
+
+ // Increment counter for number of FEC packets received.
+ virtual void FecPacketReceived(const RTPHeader& header,
+ size_t packet_length) = 0;
+
+ // Returns a map of all statisticians which have seen an incoming packet
+ // during the last two seconds.
+ virtual StatisticianMap GetActiveStatisticians() const = 0;
+
+ // Returns a pointer to the statistician of an ssrc.
+ virtual StreamStatistician* GetStatistician(uint32_t ssrc) const = 0;
+
+ // Sets the max reordering threshold in number of packets.
+ virtual void SetMaxReorderingThreshold(int max_reordering_threshold) = 0;
+
+ // Called on new RTCP stats creation.
+ virtual void RegisterRtcpStatisticsCallback(
+ RtcpStatisticsCallback* callback) = 0;
+
+ // Called on new RTP stats creation.
+ virtual void RegisterRtpStatisticsCallback(
+ StreamDataCountersCallback* callback) = 0;
+};
+
+class NullReceiveStatistics : public ReceiveStatistics {
+ public:
+ void IncomingPacket(const RTPHeader& rtp_header,
+ size_t packet_length,
+ bool retransmitted) override;
+ void FecPacketReceived(const RTPHeader& header,
+ size_t packet_length) override;
+ StatisticianMap GetActiveStatisticians() const override;
+ StreamStatistician* GetStatistician(uint32_t ssrc) const override;
+ int64_t TimeUntilNextProcess() override;
+ int32_t Process() override;
+ void SetMaxReorderingThreshold(int max_reordering_threshold) override;
+ void RegisterRtcpStatisticsCallback(
+ RtcpStatisticsCallback* callback) override;
+ void RegisterRtpStatisticsCallback(
+ StreamDataCountersCallback* callback) override;
+};
+
+} // namespace webrtc
+#endif // WEBRTC_MODULES_RTP_RTCP_INCLUDE_RECEIVE_STATISTICS_H_
diff --git a/webrtc/modules/rtp_rtcp/include/remote_ntp_time_estimator.h b/webrtc/modules/rtp_rtcp/include/remote_ntp_time_estimator.h
new file mode 100644
index 0000000000..56c6e48691
--- /dev/null
+++ b/webrtc/modules/rtp_rtcp/include/remote_ntp_time_estimator.h
@@ -0,0 +1,51 @@
+/*
+ * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_RTP_RTCP_INCLUDE_REMOTE_NTP_TIME_ESTIMATOR_H_
+#define WEBRTC_MODULES_RTP_RTCP_INCLUDE_REMOTE_NTP_TIME_ESTIMATOR_H_
+
+#include "webrtc/base/scoped_ptr.h"
+#include "webrtc/system_wrappers/include/rtp_to_ntp.h"
+
+namespace webrtc {
+
+class Clock;
+class TimestampExtrapolator;
+
+// RemoteNtpTimeEstimator can be used to estimate a given RTP timestamp's NTP
+// time in local timebase.
+// Note that it needs to be trained with at least 2 RTCP SR (by calling
+// |UpdateRtcpTimestamp|) before it can be used.
+class RemoteNtpTimeEstimator {
+ public:
+ explicit RemoteNtpTimeEstimator(Clock* clock);
+
+ ~RemoteNtpTimeEstimator();
+
+ // Updates the estimator with round trip time |rtt|, NTP seconds |ntp_secs|,
+ // NTP fraction |ntp_frac| and RTP timestamp |rtcp_timestamp|.
+ bool UpdateRtcpTimestamp(int64_t rtt, uint32_t ntp_secs, uint32_t ntp_frac,
+ uint32_t rtp_timestamp);
+
+ // Estimates the NTP timestamp in local timebase from |rtp_timestamp|.
+ // Returns the NTP timestamp in ms when success. -1 if failed.
+ int64_t Estimate(uint32_t rtp_timestamp);
+
+ private:
+ Clock* clock_;
+ rtc::scoped_ptr<TimestampExtrapolator> ts_extrapolator_;
+ RtcpList rtcp_list_;
+ int64_t last_timing_log_ms_;
+ RTC_DISALLOW_COPY_AND_ASSIGN(RemoteNtpTimeEstimator);
+};
+
+} // namespace webrtc
+
+#endif // WEBRTC_MODULES_RTP_RTCP_INCLUDE_REMOTE_NTP_TIME_ESTIMATOR_H_
diff --git a/webrtc/modules/rtp_rtcp/include/rtp_cvo.h b/webrtc/modules/rtp_rtcp/include/rtp_cvo.h
new file mode 100644
index 0000000000..2e30d898ec
--- /dev/null
+++ b/webrtc/modules/rtp_rtcp/include/rtp_cvo.h
@@ -0,0 +1,54 @@
+/*
+ * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#ifndef WEBRTC_MODULES_RTP_RTCP_INCLUDE_RTP_CVO_H_
+#define WEBRTC_MODULES_RTP_RTCP_INCLUDE_RTP_CVO_H_
+
+#include "webrtc/common_video/rotation.h"
+
+namespace webrtc {
+
+// Please refer to http://www.etsi.org/deliver/etsi_ts/126100_126199/126114/
+// 12.07.00_60/ts_126114v120700p.pdf Section 7.4.5. The rotation of a frame is
+// the clockwise angle the frames must be rotated in order to display the frames
+// correctly if the display is rotated in its natural orientation.
+inline uint8_t ConvertVideoRotationToCVOByte(VideoRotation rotation) {
+ switch (rotation) {
+ case kVideoRotation_0:
+ return 0;
+ case kVideoRotation_90:
+ return 1;
+ case kVideoRotation_180:
+ return 2;
+ case kVideoRotation_270:
+ return 3;
+ }
+ assert(false);
+ return 0;
+}
+
+inline VideoRotation ConvertCVOByteToVideoRotation(uint8_t rotation) {
+ switch (rotation) {
+ case 0:
+ return kVideoRotation_0;
+ case 1:
+ return kVideoRotation_90;
+ case 2:
+ return kVideoRotation_180;
+ break;
+ case 3:
+ return kVideoRotation_270;
+ default:
+ assert(false);
+ return kVideoRotation_0;
+ }
+}
+
+} // namespace webrtc
+#endif // WEBRTC_MODULES_RTP_RTCP_INCLUDE_RTP_CVO_H_
diff --git a/webrtc/modules/rtp_rtcp/include/rtp_header_parser.h b/webrtc/modules/rtp_rtcp/include/rtp_header_parser.h
new file mode 100644
index 0000000000..329de32611
--- /dev/null
+++ b/webrtc/modules/rtp_rtcp/include/rtp_header_parser.h
@@ -0,0 +1,44 @@
+/*
+ * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#ifndef WEBRTC_MODULES_RTP_RTCP_INCLUDE_RTP_HEADER_PARSER_H_
+#define WEBRTC_MODULES_RTP_RTCP_INCLUDE_RTP_HEADER_PARSER_H_
+
+#include "webrtc/modules/rtp_rtcp/include/rtp_rtcp_defines.h"
+#include "webrtc/typedefs.h"
+
+namespace webrtc {
+
+struct RTPHeader;
+
+class RtpHeaderParser {
+ public:
+ static RtpHeaderParser* Create();
+ virtual ~RtpHeaderParser() {}
+
+ // Returns true if the packet is an RTCP packet, false otherwise.
+ static bool IsRtcp(const uint8_t* packet, size_t length);
+
+ // Parses the packet and stores the parsed packet in |header|. Returns true on
+ // success, false otherwise.
+ // This method is thread-safe in the sense that it can parse multiple packets
+ // at once.
+ virtual bool Parse(const uint8_t* packet,
+ size_t length,
+ RTPHeader* header) const = 0;
+
+ // Registers an RTP header extension and binds it to |id|.
+ virtual bool RegisterRtpHeaderExtension(RTPExtensionType type,
+ uint8_t id) = 0;
+
+ // De-registers an RTP header extension.
+ virtual bool DeregisterRtpHeaderExtension(RTPExtensionType type) = 0;
+};
+} // namespace webrtc
+#endif // WEBRTC_MODULES_RTP_RTCP_INCLUDE_RTP_HEADER_PARSER_H_
diff --git a/webrtc/modules/rtp_rtcp/include/rtp_payload_registry.h b/webrtc/modules/rtp_rtcp/include/rtp_payload_registry.h
new file mode 100644
index 0000000000..fae864107f
--- /dev/null
+++ b/webrtc/modules/rtp_rtcp/include/rtp_payload_registry.h
@@ -0,0 +1,203 @@
+/*
+ * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_RTP_RTCP_INCLUDE_RTP_PAYLOAD_REGISTRY_H_
+#define WEBRTC_MODULES_RTP_RTCP_INCLUDE_RTP_PAYLOAD_REGISTRY_H_
+
+#include <map>
+
+#include "webrtc/base/scoped_ptr.h"
+#include "webrtc/modules/rtp_rtcp/source/rtp_receiver_strategy.h"
+#include "webrtc/modules/rtp_rtcp/source/rtp_utility.h"
+
+namespace webrtc {
+
+// This strategy deals with the audio/video-specific aspects
+// of payload handling.
+class RTPPayloadStrategy {
+ public:
+ virtual ~RTPPayloadStrategy() {}
+
+ virtual bool CodecsMustBeUnique() const = 0;
+
+ virtual bool PayloadIsCompatible(const RtpUtility::Payload& payload,
+ const uint32_t frequency,
+ const size_t channels,
+ const uint32_t rate) const = 0;
+
+ virtual void UpdatePayloadRate(RtpUtility::Payload* payload,
+ const uint32_t rate) const = 0;
+
+ virtual RtpUtility::Payload* CreatePayloadType(
+ const char payloadName[RTP_PAYLOAD_NAME_SIZE],
+ const int8_t payloadType,
+ const uint32_t frequency,
+ const size_t channels,
+ const uint32_t rate) const = 0;
+
+ virtual int GetPayloadTypeFrequency(
+ const RtpUtility::Payload& payload) const = 0;
+
+ static RTPPayloadStrategy* CreateStrategy(const bool handling_audio);
+
+ protected:
+ RTPPayloadStrategy() {}
+};
+
+class RTPPayloadRegistry {
+ public:
+ // The registry takes ownership of the strategy.
+ explicit RTPPayloadRegistry(RTPPayloadStrategy* rtp_payload_strategy);
+ ~RTPPayloadRegistry();
+
+ int32_t RegisterReceivePayload(
+ const char payload_name[RTP_PAYLOAD_NAME_SIZE],
+ const int8_t payload_type,
+ const uint32_t frequency,
+ const size_t channels,
+ const uint32_t rate,
+ bool* created_new_payload_type);
+
+ int32_t DeRegisterReceivePayload(
+ const int8_t payload_type);
+
+ int32_t ReceivePayloadType(
+ const char payload_name[RTP_PAYLOAD_NAME_SIZE],
+ const uint32_t frequency,
+ const size_t channels,
+ const uint32_t rate,
+ int8_t* payload_type) const;
+
+ bool RtxEnabled() const;
+
+ void SetRtxSsrc(uint32_t ssrc);
+
+ bool GetRtxSsrc(uint32_t* ssrc) const;
+
+ void SetRtxPayloadType(int payload_type, int associated_payload_type);
+
+ bool IsRtx(const RTPHeader& header) const;
+
+ // DEPRECATED. Use RestoreOriginalPacket below that takes a uint8_t*
+ // restored_packet, instead of a uint8_t**.
+ // TODO(noahric): Remove this when all callers have been updated.
+ bool RestoreOriginalPacket(uint8_t** restored_packet,
+ const uint8_t* packet,
+ size_t* packet_length,
+ uint32_t original_ssrc,
+ const RTPHeader& header) const;
+
+ bool RestoreOriginalPacket(uint8_t* restored_packet,
+ const uint8_t* packet,
+ size_t* packet_length,
+ uint32_t original_ssrc,
+ const RTPHeader& header) const;
+
+ bool IsRed(const RTPHeader& header) const;
+
+ // Returns true if the media of this RTP packet is encapsulated within an
+ // extra header, such as RTX or RED.
+ bool IsEncapsulated(const RTPHeader& header) const;
+
+ bool GetPayloadSpecifics(uint8_t payload_type, PayloadUnion* payload) const;
+
+ int GetPayloadTypeFrequency(uint8_t payload_type) const;
+
+ // DEPRECATED. Use PayloadTypeToPayload below that returns const Payload*
+ // instead of taking output parameter.
+ // TODO(danilchap): Remove this when all callers have been updated.
+ bool PayloadTypeToPayload(const uint8_t payload_type,
+ RtpUtility::Payload*& payload) const { // NOLINT
+ payload =
+ const_cast<RtpUtility::Payload*>(PayloadTypeToPayload(payload_type));
+ return payload != nullptr;
+ }
+ const RtpUtility::Payload* PayloadTypeToPayload(uint8_t payload_type) const;
+
+ void ResetLastReceivedPayloadTypes() {
+ CriticalSectionScoped cs(crit_sect_.get());
+ last_received_payload_type_ = -1;
+ last_received_media_payload_type_ = -1;
+ }
+
+ // This sets the payload type of the packets being received from the network
+ // on the media SSRC. For instance if packets are encapsulated with RED, this
+ // payload type will be the RED payload type.
+ void SetIncomingPayloadType(const RTPHeader& header);
+
+ // Returns true if the new media payload type has not changed.
+ bool ReportMediaPayloadType(uint8_t media_payload_type);
+
+ int8_t red_payload_type() const {
+ CriticalSectionScoped cs(crit_sect_.get());
+ return red_payload_type_;
+ }
+ int8_t ulpfec_payload_type() const {
+ CriticalSectionScoped cs(crit_sect_.get());
+ return ulpfec_payload_type_;
+ }
+ int8_t last_received_payload_type() const {
+ CriticalSectionScoped cs(crit_sect_.get());
+ return last_received_payload_type_;
+ }
+ void set_last_received_payload_type(int8_t last_received_payload_type) {
+ CriticalSectionScoped cs(crit_sect_.get());
+ last_received_payload_type_ = last_received_payload_type;
+ }
+
+ int8_t last_received_media_payload_type() const {
+ CriticalSectionScoped cs(crit_sect_.get());
+ return last_received_media_payload_type_;
+ }
+
+ bool use_rtx_payload_mapping_on_restore() const {
+ CriticalSectionScoped cs(crit_sect_.get());
+ return use_rtx_payload_mapping_on_restore_;
+ }
+
+ void set_use_rtx_payload_mapping_on_restore(bool val) {
+ CriticalSectionScoped cs(crit_sect_.get());
+ use_rtx_payload_mapping_on_restore_ = val;
+ }
+
+ private:
+ // Prunes the payload type map of the specific payload type, if it exists.
+ void DeregisterAudioCodecOrRedTypeRegardlessOfPayloadType(
+ const char payload_name[RTP_PAYLOAD_NAME_SIZE],
+ const size_t payload_name_length,
+ const uint32_t frequency,
+ const size_t channels,
+ const uint32_t rate);
+
+ bool IsRtxInternal(const RTPHeader& header) const;
+
+ rtc::scoped_ptr<CriticalSectionWrapper> crit_sect_;
+ RtpUtility::PayloadTypeMap payload_type_map_;
+ rtc::scoped_ptr<RTPPayloadStrategy> rtp_payload_strategy_;
+ int8_t red_payload_type_;
+ int8_t ulpfec_payload_type_;
+ int8_t incoming_payload_type_;
+ int8_t last_received_payload_type_;
+ int8_t last_received_media_payload_type_;
+ bool rtx_;
+ // TODO(changbin): Remove rtx_payload_type_ once interop with old clients that
+ // only understand one RTX PT is no longer needed.
+ int rtx_payload_type_;
+ // Mapping rtx_payload_type_map_[rtx] = associated.
+ std::map<int, int> rtx_payload_type_map_;
+ // When true, use rtx_payload_type_map_ when restoring RTX packets to get the
+ // correct payload type.
+ bool use_rtx_payload_mapping_on_restore_;
+ uint32_t ssrc_rtx_;
+};
+
+} // namespace webrtc
+
+#endif // WEBRTC_MODULES_RTP_RTCP_INCLUDE_RTP_PAYLOAD_REGISTRY_H_
diff --git a/webrtc/modules/rtp_rtcp/include/rtp_receiver.h b/webrtc/modules/rtp_rtcp/include/rtp_receiver.h
new file mode 100644
index 0000000000..0640d5cc19
--- /dev/null
+++ b/webrtc/modules/rtp_rtcp/include/rtp_receiver.h
@@ -0,0 +1,103 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_RTP_RTCP_INCLUDE_RTP_RECEIVER_H_
+#define WEBRTC_MODULES_RTP_RTCP_INCLUDE_RTP_RECEIVER_H_
+
+#include "webrtc/modules/rtp_rtcp/include/rtp_rtcp_defines.h"
+#include "webrtc/typedefs.h"
+
+namespace webrtc {
+
+class RTPPayloadRegistry;
+
+class TelephoneEventHandler {
+ public:
+ virtual ~TelephoneEventHandler() {}
+
+ // The following three methods implement the TelephoneEventHandler interface.
+ // Forward DTMFs to decoder for playout.
+ virtual void SetTelephoneEventForwardToDecoder(bool forward_to_decoder) = 0;
+
+ // Is forwarding of outband telephone events turned on/off?
+ virtual bool TelephoneEventForwardToDecoder() const = 0;
+
+ // Is TelephoneEvent configured with payload type payload_type
+ virtual bool TelephoneEventPayloadType(const int8_t payload_type) const = 0;
+};
+
+class RtpReceiver {
+ public:
+ // Creates a video-enabled RTP receiver.
+ static RtpReceiver* CreateVideoReceiver(
+ Clock* clock,
+ RtpData* incoming_payload_callback,
+ RtpFeedback* incoming_messages_callback,
+ RTPPayloadRegistry* rtp_payload_registry);
+
+ // Creates an audio-enabled RTP receiver.
+ static RtpReceiver* CreateAudioReceiver(
+ Clock* clock,
+ RtpAudioFeedback* incoming_audio_feedback,
+ RtpData* incoming_payload_callback,
+ RtpFeedback* incoming_messages_callback,
+ RTPPayloadRegistry* rtp_payload_registry);
+
+ virtual ~RtpReceiver() {}
+
+ // Returns a TelephoneEventHandler if available.
+ virtual TelephoneEventHandler* GetTelephoneEventHandler() = 0;
+
+ // Registers a receive payload in the payload registry and notifies the media
+ // receiver strategy.
+ virtual int32_t RegisterReceivePayload(
+ const char payload_name[RTP_PAYLOAD_NAME_SIZE],
+ const int8_t payload_type,
+ const uint32_t frequency,
+ const size_t channels,
+ const uint32_t rate) = 0;
+
+ // De-registers |payload_type| from the payload registry.
+ virtual int32_t DeRegisterReceivePayload(const int8_t payload_type) = 0;
+
+ // Parses the media specific parts of an RTP packet and updates the receiver
+ // state. This for instance means that any changes in SSRC and payload type is
+ // detected and acted upon.
+ virtual bool IncomingRtpPacket(const RTPHeader& rtp_header,
+ const uint8_t* payload,
+ size_t payload_length,
+ PayloadUnion payload_specific,
+ bool in_order) = 0;
+
+ // Returns the currently configured NACK method.
+ virtual NACKMethod NACK() const = 0;
+
+ // Turn negative acknowledgement (NACK) requests on/off.
+ virtual void SetNACKStatus(const NACKMethod method) = 0;
+
+ // Gets the last received timestamp. Returns true if a packet has been
+ // received, false otherwise.
+ virtual bool Timestamp(uint32_t* timestamp) const = 0;
+ // Gets the time in milliseconds when the last timestamp was received.
+ // Returns true if a packet has been received, false otherwise.
+ virtual bool LastReceivedTimeMs(int64_t* receive_time_ms) const = 0;
+
+ // Returns the remote SSRC of the currently received RTP stream.
+ virtual uint32_t SSRC() const = 0;
+
+ // Returns the current remote CSRCs.
+ virtual int32_t CSRCs(uint32_t array_of_csrc[kRtpCsrcSize]) const = 0;
+
+ // Returns the current energy of the RTP stream received.
+ virtual int32_t Energy(uint8_t array_of_energy[kRtpCsrcSize]) const = 0;
+};
+} // namespace webrtc
+
+#endif // WEBRTC_MODULES_RTP_RTCP_INCLUDE_RTP_RECEIVER_H_
diff --git a/webrtc/modules/rtp_rtcp/include/rtp_rtcp.h b/webrtc/modules/rtp_rtcp/include/rtp_rtcp.h
new file mode 100644
index 0000000000..6a7022a94c
--- /dev/null
+++ b/webrtc/modules/rtp_rtcp/include/rtp_rtcp.h
@@ -0,0 +1,653 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_RTP_RTCP_INCLUDE_RTP_RTCP_H_
+#define WEBRTC_MODULES_RTP_RTCP_INCLUDE_RTP_RTCP_H_
+
+#include <set>
+#include <utility>
+#include <vector>
+
+#include "webrtc/modules/include/module.h"
+#include "webrtc/modules/rtp_rtcp/include/rtp_rtcp_defines.h"
+
+namespace webrtc {
+// Forward declarations.
+class ReceiveStatistics;
+class RemoteBitrateEstimator;
+class RtpReceiver;
+class Transport;
+namespace rtcp {
+class TransportFeedback;
+}
+
+class RtpRtcp : public Module {
+ public:
+ struct Configuration {
+ Configuration();
+
+ /* id - Unique identifier of this RTP/RTCP module object
+ * audio - True for a audio version of the RTP/RTCP module
+ * object false will create a video version
+ * clock - The clock to use to read time. If NULL object
+ * will be using the system clock.
+ * incoming_data - Callback object that will receive the incoming
+ * data. May not be NULL; default callback will do
+ * nothing.
+ * incoming_messages - Callback object that will receive the incoming
+ * RTP messages. May not be NULL; default callback
+ * will do nothing.
+ * outgoing_transport - Transport object that will be called when packets
+ * are ready to be sent out on the network
+ * intra_frame_callback - Called when the receiver request a intra frame.
+ * bandwidth_callback - Called when we receive a changed estimate from
+ * the receiver of out stream.
+ * audio_messages - Telephone events. May not be NULL; default
+ * callback will do nothing.
+ * remote_bitrate_estimator - Estimates the bandwidth available for a set of
+ * streams from the same client.
+ * paced_sender - Spread any bursts of packets into smaller
+ * bursts to minimize packet loss.
+ */
+ bool audio;
+ bool receiver_only;
+ Clock* clock;
+ ReceiveStatistics* receive_statistics;
+ Transport* outgoing_transport;
+ RtcpIntraFrameObserver* intra_frame_callback;
+ RtcpBandwidthObserver* bandwidth_callback;
+ TransportFeedbackObserver* transport_feedback_callback;
+ RtcpRttStats* rtt_stats;
+ RtcpPacketTypeCounterObserver* rtcp_packet_type_counter_observer;
+ RtpAudioFeedback* audio_messages;
+ RemoteBitrateEstimator* remote_bitrate_estimator;
+ RtpPacketSender* paced_sender;
+ TransportSequenceNumberAllocator* transport_sequence_number_allocator;
+ BitrateStatisticsObserver* send_bitrate_observer;
+ FrameCountObserver* send_frame_count_observer;
+ SendSideDelayObserver* send_side_delay_observer;
+ };
+
+ /*
+ * Create a RTP/RTCP module object using the system clock.
+ *
+ * configuration - Configuration of the RTP/RTCP module.
+ */
+ static RtpRtcp* CreateRtpRtcp(const RtpRtcp::Configuration& configuration);
+
+ /**************************************************************************
+ *
+ * Receiver functions
+ *
+ ***************************************************************************/
+
+ virtual int32_t IncomingRtcpPacket(const uint8_t* incoming_packet,
+ size_t incoming_packet_length) = 0;
+
+ virtual void SetRemoteSSRC(uint32_t ssrc) = 0;
+
+ /**************************************************************************
+ *
+ * Sender
+ *
+ ***************************************************************************/
+
+ /*
+ * set MTU
+ *
+ * size - Max transfer unit in bytes, default is 1500
+ *
+ * return -1 on failure else 0
+ */
+ virtual int32_t SetMaxTransferUnit(uint16_t size) = 0;
+
+ /*
+ * set transtport overhead
+ * default is IPv4 and UDP with no encryption
+ *
+ * TCP - true for TCP false UDP
+ * IPv6 - true for IP version 6 false for version 4
+ * authenticationOverhead - number of bytes to leave for an
+ * authentication header
+ *
+ * return -1 on failure else 0
+ */
+ virtual int32_t SetTransportOverhead(
+ bool TCP,
+ bool IPV6,
+ uint8_t authenticationOverhead = 0) = 0;
+
+ /*
+ * Get max payload length
+ *
+ * A combination of the configuration MaxTransferUnit and
+ * TransportOverhead.
+ * Does not account FEC/ULP/RED overhead if FEC is enabled.
+ * Does not account for RTP headers
+ */
+ virtual uint16_t MaxPayloadLength() const = 0;
+
+ /*
+ * Get max data payload length
+ *
+ * A combination of the configuration MaxTransferUnit, headers and
+ * TransportOverhead.
+ * Takes into account FEC/ULP/RED overhead if FEC is enabled.
+ * Takes into account RTP headers
+ */
+ virtual uint16_t MaxDataPayloadLength() const = 0;
+
+ /*
+ * set codec name and payload type
+ *
+ * return -1 on failure else 0
+ */
+ virtual int32_t RegisterSendPayload(
+ const CodecInst& voiceCodec) = 0;
+
+ /*
+ * set codec name and payload type
+ *
+ * return -1 on failure else 0
+ */
+ virtual int32_t RegisterSendPayload(
+ const VideoCodec& videoCodec) = 0;
+
+ /*
+ * Unregister a send payload
+ *
+ * payloadType - payload type of codec
+ *
+ * return -1 on failure else 0
+ */
+ virtual int32_t DeRegisterSendPayload(int8_t payloadType) = 0;
+
+ /*
+ * (De)register RTP header extension type and id.
+ *
+ * return -1 on failure else 0
+ */
+ virtual int32_t RegisterSendRtpHeaderExtension(RTPExtensionType type,
+ uint8_t id) = 0;
+
+ virtual int32_t DeregisterSendRtpHeaderExtension(RTPExtensionType type) = 0;
+
+ /*
+ * get start timestamp
+ */
+ virtual uint32_t StartTimestamp() const = 0;
+
+ /*
+ * configure start timestamp, default is a random number
+ *
+ * timestamp - start timestamp
+ */
+ virtual void SetStartTimestamp(uint32_t timestamp) = 0;
+
+ /*
+ * Get SequenceNumber
+ */
+ virtual uint16_t SequenceNumber() const = 0;
+
+ /*
+ * Set SequenceNumber, default is a random number
+ */
+ virtual void SetSequenceNumber(uint16_t seq) = 0;
+
+ // Returns true if the ssrc matched this module, false otherwise.
+ virtual bool SetRtpStateForSsrc(uint32_t ssrc,
+ const RtpState& rtp_state) = 0;
+ virtual bool GetRtpStateForSsrc(uint32_t ssrc, RtpState* rtp_state) = 0;
+
+ /*
+ * Get SSRC
+ */
+ virtual uint32_t SSRC() const = 0;
+
+ /*
+ * configure SSRC, default is a random number
+ */
+ virtual void SetSSRC(uint32_t ssrc) = 0;
+
+ /*
+ * Set CSRC
+ *
+ * csrcs - vector of CSRCs
+ */
+ virtual void SetCsrcs(const std::vector<uint32_t>& csrcs) = 0;
+
+ /*
+ * Turn on/off sending RTX (RFC 4588). The modes can be set as a combination
+ * of values of the enumerator RtxMode.
+ */
+ virtual void SetRtxSendStatus(int modes) = 0;
+
+ /*
+ * Get status of sending RTX (RFC 4588). The returned value can be
+ * a combination of values of the enumerator RtxMode.
+ */
+ virtual int RtxSendStatus() const = 0;
+
+ // Sets the SSRC to use when sending RTX packets. This doesn't enable RTX,
+ // only the SSRC is set.
+ virtual void SetRtxSsrc(uint32_t ssrc) = 0;
+
+ // Sets the payload type to use when sending RTX packets. Note that this
+ // doesn't enable RTX, only the payload type is set.
+ virtual void SetRtxSendPayloadType(int payload_type,
+ int associated_payload_type) = 0;
+
+ // Gets the payload type pair of (RTX, associated) to use when sending RTX
+ // packets.
+ virtual std::pair<int, int> RtxSendPayloadType() const = 0;
+
+ /*
+ * sends kRtcpByeCode when going from true to false
+ *
+ * sending - on/off
+ *
+ * return -1 on failure else 0
+ */
+ virtual int32_t SetSendingStatus(bool sending) = 0;
+
+ /*
+ * get send status
+ */
+ virtual bool Sending() const = 0;
+
+ /*
+ * Starts/Stops media packets, on by default
+ *
+ * sending - on/off
+ */
+ virtual void SetSendingMediaStatus(bool sending) = 0;
+
+ /*
+ * get send status
+ */
+ virtual bool SendingMedia() const = 0;
+
+ /*
+ * get sent bitrate in Kbit/s
+ */
+ virtual void BitrateSent(uint32_t* totalRate,
+ uint32_t* videoRate,
+ uint32_t* fecRate,
+ uint32_t* nackRate) const = 0;
+
+ /*
+ * Used by the codec module to deliver a video or audio frame for
+ * packetization.
+ *
+ * frameType - type of frame to send
+ * payloadType - payload type of frame to send
+ * timestamp - timestamp of frame to send
+ * payloadData - payload buffer of frame to send
+ * payloadSize - size of payload buffer to send
+ * fragmentation - fragmentation offset data for fragmented frames such
+ * as layers or RED
+ *
+ * return -1 on failure else 0
+ */
+ virtual int32_t SendOutgoingData(
+ FrameType frameType,
+ int8_t payloadType,
+ uint32_t timeStamp,
+ int64_t capture_time_ms,
+ const uint8_t* payloadData,
+ size_t payloadSize,
+ const RTPFragmentationHeader* fragmentation = NULL,
+ const RTPVideoHeader* rtpVideoHdr = NULL) = 0;
+
+ virtual bool TimeToSendPacket(uint32_t ssrc,
+ uint16_t sequence_number,
+ int64_t capture_time_ms,
+ bool retransmission) = 0;
+
+ virtual size_t TimeToSendPadding(size_t bytes) = 0;
+
+ // Called on generation of new statistics after an RTP send.
+ virtual void RegisterSendChannelRtpStatisticsCallback(
+ StreamDataCountersCallback* callback) = 0;
+ virtual StreamDataCountersCallback*
+ GetSendChannelRtpStatisticsCallback() const = 0;
+
+ /**************************************************************************
+ *
+ * RTCP
+ *
+ ***************************************************************************/
+
+ /*
+ * Get RTCP status
+ */
+ virtual RtcpMode RTCP() const = 0;
+
+ /*
+ * configure RTCP status i.e on(compound or non- compound)/off
+ *
+ * method - RTCP method to use
+ */
+ virtual void SetRTCPStatus(RtcpMode method) = 0;
+
+ /*
+ * Set RTCP CName (i.e unique identifier)
+ *
+ * return -1 on failure else 0
+ */
+ virtual int32_t SetCNAME(const char* c_name) = 0;
+
+ /*
+ * Get remote CName
+ *
+ * return -1 on failure else 0
+ */
+ virtual int32_t RemoteCNAME(uint32_t remoteSSRC,
+ char cName[RTCP_CNAME_SIZE]) const = 0;
+
+ /*
+ * Get remote NTP
+ *
+ * return -1 on failure else 0
+ */
+ virtual int32_t RemoteNTP(
+ uint32_t *ReceivedNTPsecs,
+ uint32_t *ReceivedNTPfrac,
+ uint32_t *RTCPArrivalTimeSecs,
+ uint32_t *RTCPArrivalTimeFrac,
+ uint32_t *rtcp_timestamp) const = 0;
+
+ /*
+ * AddMixedCNAME
+ *
+ * return -1 on failure else 0
+ */
+ virtual int32_t AddMixedCNAME(uint32_t SSRC, const char* c_name) = 0;
+
+ /*
+ * RemoveMixedCNAME
+ *
+ * return -1 on failure else 0
+ */
+ virtual int32_t RemoveMixedCNAME(uint32_t SSRC) = 0;
+
+ /*
+ * Get RoundTripTime
+ *
+ * return -1 on failure else 0
+ */
+ virtual int32_t RTT(uint32_t remoteSSRC,
+ int64_t* RTT,
+ int64_t* avgRTT,
+ int64_t* minRTT,
+ int64_t* maxRTT) const = 0;
+
+ /*
+ * Force a send of a RTCP packet
+ * periodic SR and RR are triggered via the process function
+ *
+ * return -1 on failure else 0
+ */
+ virtual int32_t SendRTCP(RTCPPacketType rtcpPacketType) = 0;
+
+ /*
+ * Force a send of a RTCP packet with more than one packet type.
+ * periodic SR and RR are triggered via the process function
+ *
+ * return -1 on failure else 0
+ */
+ virtual int32_t SendCompoundRTCP(
+ const std::set<RTCPPacketType>& rtcpPacketTypes) = 0;
+
+ /*
+ * Good state of RTP receiver inform sender
+ */
+ virtual int32_t SendRTCPReferencePictureSelection(
+ const uint64_t pictureID) = 0;
+
+ /*
+ * Send a RTCP Slice Loss Indication (SLI)
+ * 6 least significant bits of pictureID
+ */
+ virtual int32_t SendRTCPSliceLossIndication(uint8_t pictureID) = 0;
+
+ /*
+ * Statistics of the amount of data sent
+ *
+ * return -1 on failure else 0
+ */
+ virtual int32_t DataCountersRTP(
+ size_t* bytesSent,
+ uint32_t* packetsSent) const = 0;
+
+ /*
+ * Get send statistics for the RTP and RTX stream.
+ */
+ virtual void GetSendStreamDataCounters(
+ StreamDataCounters* rtp_counters,
+ StreamDataCounters* rtx_counters) const = 0;
+
+ /*
+ * Get packet loss statistics for the RTP stream.
+ */
+ virtual void GetRtpPacketLossStats(
+ bool outgoing,
+ uint32_t ssrc,
+ struct RtpPacketLossStats* loss_stats) const = 0;
+
+ /*
+ * Get received RTCP sender info
+ *
+ * return -1 on failure else 0
+ */
+ virtual int32_t RemoteRTCPStat(RTCPSenderInfo* senderInfo) = 0;
+
+ /*
+ * Get received RTCP report block
+ *
+ * return -1 on failure else 0
+ */
+ virtual int32_t RemoteRTCPStat(
+ std::vector<RTCPReportBlock>* receiveBlocks) const = 0;
+
+ /*
+ * (APP) Application specific data
+ *
+ * return -1 on failure else 0
+ */
+ virtual int32_t SetRTCPApplicationSpecificData(uint8_t subType,
+ uint32_t name,
+ const uint8_t* data,
+ uint16_t length) = 0;
+ /*
+ * (XR) VOIP metric
+ *
+ * return -1 on failure else 0
+ */
+ virtual int32_t SetRTCPVoIPMetrics(
+ const RTCPVoIPMetric* VoIPMetric) = 0;
+
+ /*
+ * (XR) Receiver Reference Time Report
+ */
+ virtual void SetRtcpXrRrtrStatus(bool enable) = 0;
+
+ virtual bool RtcpXrRrtrStatus() const = 0;
+
+ /*
+ * (REMB) Receiver Estimated Max Bitrate
+ */
+ virtual bool REMB() const = 0;
+
+ virtual void SetREMBStatus(bool enable) = 0;
+
+ virtual void SetREMBData(uint32_t bitrate,
+ const std::vector<uint32_t>& ssrcs) = 0;
+
+ /*
+ * (TMMBR) Temporary Max Media Bit Rate
+ */
+ virtual bool TMMBR() const = 0;
+
+ virtual void SetTMMBRStatus(bool enable) = 0;
+
+ /*
+ * (NACK)
+ */
+
+ /*
+ * TODO(holmer): Propagate this API to VideoEngine.
+ * Returns the currently configured selective retransmission settings.
+ */
+ virtual int SelectiveRetransmissions() const = 0;
+
+ /*
+ * TODO(holmer): Propagate this API to VideoEngine.
+ * Sets the selective retransmission settings, which will decide which
+ * packets will be retransmitted if NACKed. Settings are constructed by
+ * combining the constants in enum RetransmissionMode with bitwise OR.
+ * All packets are retransmitted if kRetransmitAllPackets is set, while no
+ * packets are retransmitted if kRetransmitOff is set.
+ * By default all packets except FEC packets are retransmitted. For VP8
+ * with temporal scalability only base layer packets are retransmitted.
+ *
+ * Returns -1 on failure, otherwise 0.
+ */
+ virtual int SetSelectiveRetransmissions(uint8_t settings) = 0;
+
+ /*
+ * Send a Negative acknowledgement packet
+ *
+ * return -1 on failure else 0
+ */
+ virtual int32_t SendNACK(const uint16_t* nackList, uint16_t size) = 0;
+
+ /*
+ * Store the sent packets, needed to answer to a Negative acknowledgement
+ * requests
+ */
+ virtual void SetStorePacketsStatus(bool enable, uint16_t numberToStore) = 0;
+
+ // Returns true if the module is configured to store packets.
+ virtual bool StorePackets() const = 0;
+
+ // Called on receipt of RTCP report block from remote side.
+ virtual void RegisterRtcpStatisticsCallback(
+ RtcpStatisticsCallback* callback) = 0;
+ virtual RtcpStatisticsCallback*
+ GetRtcpStatisticsCallback() = 0;
+ // BWE feedback packets.
+ virtual bool SendFeedbackPacket(const rtcp::TransportFeedback& packet) = 0;
+
+ /**************************************************************************
+ *
+ * Audio
+ *
+ ***************************************************************************/
+
+ /*
+ * set audio packet size, used to determine when it's time to send a DTMF
+ * packet in silence (CNG)
+ *
+ * return -1 on failure else 0
+ */
+ virtual int32_t SetAudioPacketSize(uint16_t packetSizeSamples) = 0;
+
+ /*
+ * Send a TelephoneEvent tone using RFC 2833 (4733)
+ *
+ * return -1 on failure else 0
+ */
+ virtual int32_t SendTelephoneEventOutband(uint8_t key,
+ uint16_t time_ms,
+ uint8_t level) = 0;
+
+ /*
+ * Set payload type for Redundant Audio Data RFC 2198
+ *
+ * return -1 on failure else 0
+ */
+ virtual int32_t SetSendREDPayloadType(int8_t payloadType) = 0;
+
+ /*
+ * Get payload type for Redundant Audio Data RFC 2198
+ *
+ * return -1 on failure else 0
+ */
+ // DEPRECATED. Use SendREDPayloadType below that takes output parameter
+ // by pointer instead of by reference.
+ // TODO(danilchap): Remove this when all callers have been updated.
+ int32_t SendREDPayloadType(int8_t& payloadType) const { // NOLINT
+ return SendREDPayloadType(&payloadType);
+ }
+ virtual int32_t SendREDPayloadType(int8_t* payload_type) const = 0;
+ /*
+ * Store the audio level in dBov for header-extension-for-audio-level-
+ * indication.
+ * This API shall be called before transmision of an RTP packet to ensure
+ * that the |level| part of the extended RTP header is updated.
+ *
+ * return -1 on failure else 0.
+ */
+ virtual int32_t SetAudioLevel(uint8_t level_dBov) = 0;
+
+ /**************************************************************************
+ *
+ * Video
+ *
+ ***************************************************************************/
+
+ /*
+ * Set the target send bitrate
+ */
+ virtual void SetTargetSendBitrate(uint32_t bitrate_bps) = 0;
+
+ /*
+ * Turn on/off generic FEC
+ */
+ virtual void SetGenericFECStatus(bool enable,
+ uint8_t payload_type_red,
+ uint8_t payload_type_fec) = 0;
+
+ /*
+ * Get generic FEC setting
+ */
+ // DEPRECATED. Use GenericFECStatus below that takes output parameters
+ // by pointers instead of by references.
+ // TODO(danilchap): Remove this when all callers have been updated.
+ void GenericFECStatus(bool& enable, // NOLINT
+ uint8_t& payloadTypeRED, // NOLINT
+ uint8_t& payloadTypeFEC) { // NOLINT
+ GenericFECStatus(&enable, &payloadTypeRED, &payloadTypeFEC);
+ }
+ virtual void GenericFECStatus(bool* enable,
+ uint8_t* payload_type_red,
+ uint8_t* payload_type_fec) = 0;
+
+ virtual int32_t SetFecParameters(
+ const FecProtectionParams* delta_params,
+ const FecProtectionParams* key_params) = 0;
+
+ /*
+ * Set method for requestion a new key frame
+ *
+ * return -1 on failure else 0
+ */
+ virtual int32_t SetKeyFrameRequestMethod(KeyFrameRequestMethod method) = 0;
+
+ /*
+ * send a request for a keyframe
+ *
+ * return -1 on failure else 0
+ */
+ virtual int32_t RequestKeyFrame() = 0;
+};
+} // namespace webrtc
+#endif // WEBRTC_MODULES_RTP_RTCP_INCLUDE_RTP_RTCP_H_
diff --git a/webrtc/modules/rtp_rtcp/include/rtp_rtcp_defines.h b/webrtc/modules/rtp_rtcp/include/rtp_rtcp_defines.h
new file mode 100644
index 0000000000..fad97f19cc
--- /dev/null
+++ b/webrtc/modules/rtp_rtcp/include/rtp_rtcp_defines.h
@@ -0,0 +1,416 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_RTP_RTCP_INCLUDE_RTP_RTCP_DEFINES_H_
+#define WEBRTC_MODULES_RTP_RTCP_INCLUDE_RTP_RTCP_DEFINES_H_
+
+#include <stddef.h>
+#include <list>
+
+#include "webrtc/modules/include/module_common_types.h"
+#include "webrtc/system_wrappers/include/clock.h"
+#include "webrtc/typedefs.h"
+
+#define RTCP_CNAME_SIZE 256 // RFC 3550 page 44, including null termination
+#define IP_PACKET_SIZE 1500 // we assume ethernet
+#define MAX_NUMBER_OF_PARALLEL_TELEPHONE_EVENTS 10
+#define TIMEOUT_SEI_MESSAGES_MS 30000 // in milliseconds
+
+namespace webrtc {
+namespace rtcp {
+class TransportFeedback;
+}
+
+const int kVideoPayloadTypeFrequency = 90000;
+
+// Minimum RTP header size in bytes.
+const uint8_t kRtpHeaderSize = 12;
+
+struct AudioPayload {
+ uint32_t frequency;
+ size_t channels;
+ uint32_t rate;
+};
+
+struct VideoPayload {
+ RtpVideoCodecTypes videoCodecType;
+ uint32_t maxRate;
+};
+
+union PayloadUnion {
+ AudioPayload Audio;
+ VideoPayload Video;
+};
+
+enum RTPAliveType { kRtpDead = 0, kRtpNoRtp = 1, kRtpAlive = 2 };
+
+enum ProtectionType {
+ kUnprotectedPacket,
+ kProtectedPacket
+};
+
+enum StorageType {
+ kDontRetransmit,
+ kAllowRetransmission
+};
+
+enum RTPExtensionType {
+ kRtpExtensionNone,
+ kRtpExtensionTransmissionTimeOffset,
+ kRtpExtensionAudioLevel,
+ kRtpExtensionAbsoluteSendTime,
+ kRtpExtensionVideoRotation,
+ kRtpExtensionTransportSequenceNumber,
+};
+
+enum RTCPAppSubTypes { kAppSubtypeBwe = 0x00 };
+
+// TODO(sprang): Make this an enum class once rtcp_receiver has been cleaned up.
+enum RTCPPacketType : uint32_t {
+ kRtcpReport = 0x0001,
+ kRtcpSr = 0x0002,
+ kRtcpRr = 0x0004,
+ kRtcpSdes = 0x0008,
+ kRtcpBye = 0x0010,
+ kRtcpPli = 0x0020,
+ kRtcpNack = 0x0040,
+ kRtcpFir = 0x0080,
+ kRtcpTmmbr = 0x0100,
+ kRtcpTmmbn = 0x0200,
+ kRtcpSrReq = 0x0400,
+ kRtcpXrVoipMetric = 0x0800,
+ kRtcpApp = 0x1000,
+ kRtcpSli = 0x4000,
+ kRtcpRpsi = 0x8000,
+ kRtcpRemb = 0x10000,
+ kRtcpTransmissionTimeOffset = 0x20000,
+ kRtcpXrReceiverReferenceTime = 0x40000,
+ kRtcpXrDlrrReportBlock = 0x80000,
+ kRtcpTransportFeedback = 0x100000,
+};
+
+enum KeyFrameRequestMethod { kKeyFrameReqPliRtcp, kKeyFrameReqFirRtcp };
+
+enum RtpRtcpPacketType { kPacketRtp = 0, kPacketKeepAlive = 1 };
+
+enum NACKMethod { kNackOff = 0, kNackRtcp = 2 };
+
+enum RetransmissionMode : uint8_t {
+ kRetransmitOff = 0x0,
+ kRetransmitFECPackets = 0x1,
+ kRetransmitBaseLayer = 0x2,
+ kRetransmitHigherLayers = 0x4,
+ kRetransmitAllPackets = 0xFF
+};
+
+enum RtxMode {
+ kRtxOff = 0x0,
+ kRtxRetransmitted = 0x1, // Only send retransmissions over RTX.
+ kRtxRedundantPayloads = 0x2 // Preventively send redundant payloads
+ // instead of padding.
+};
+
+const size_t kRtxHeaderSize = 2;
+
+struct RTCPSenderInfo {
+ uint32_t NTPseconds;
+ uint32_t NTPfraction;
+ uint32_t RTPtimeStamp;
+ uint32_t sendPacketCount;
+ uint32_t sendOctetCount;
+};
+
+struct RTCPReportBlock {
+ RTCPReportBlock()
+ : remoteSSRC(0), sourceSSRC(0), fractionLost(0), cumulativeLost(0),
+ extendedHighSeqNum(0), jitter(0), lastSR(0),
+ delaySinceLastSR(0) {}
+
+ RTCPReportBlock(uint32_t remote_ssrc,
+ uint32_t source_ssrc,
+ uint8_t fraction_lost,
+ uint32_t cumulative_lost,
+ uint32_t extended_high_sequence_number,
+ uint32_t jitter,
+ uint32_t last_sender_report,
+ uint32_t delay_since_last_sender_report)
+ : remoteSSRC(remote_ssrc),
+ sourceSSRC(source_ssrc),
+ fractionLost(fraction_lost),
+ cumulativeLost(cumulative_lost),
+ extendedHighSeqNum(extended_high_sequence_number),
+ jitter(jitter),
+ lastSR(last_sender_report),
+ delaySinceLastSR(delay_since_last_sender_report) {}
+
+ // Fields as described by RFC 3550 6.4.2.
+ uint32_t remoteSSRC; // SSRC of sender of this report.
+ uint32_t sourceSSRC; // SSRC of the RTP packet sender.
+ uint8_t fractionLost;
+ uint32_t cumulativeLost; // 24 bits valid.
+ uint32_t extendedHighSeqNum;
+ uint32_t jitter;
+ uint32_t lastSR;
+ uint32_t delaySinceLastSR;
+};
+
+struct RtcpReceiveTimeInfo {
+ // Fields as described by RFC 3611 4.5.
+ uint32_t sourceSSRC;
+ uint32_t lastRR;
+ uint32_t delaySinceLastRR;
+};
+
+typedef std::list<RTCPReportBlock> ReportBlockList;
+
+struct RtpState {
+ RtpState()
+ : sequence_number(0),
+ start_timestamp(0),
+ timestamp(0),
+ capture_time_ms(-1),
+ last_timestamp_time_ms(-1),
+ media_has_been_sent(false) {}
+ uint16_t sequence_number;
+ uint32_t start_timestamp;
+ uint32_t timestamp;
+ int64_t capture_time_ms;
+ int64_t last_timestamp_time_ms;
+ bool media_has_been_sent;
+};
+
+class RtpData {
+ public:
+ virtual ~RtpData() {}
+
+ virtual int32_t OnReceivedPayloadData(const uint8_t* payloadData,
+ const size_t payloadSize,
+ const WebRtcRTPHeader* rtpHeader) = 0;
+
+ virtual bool OnRecoveredPacket(const uint8_t* packet,
+ size_t packet_length) = 0;
+};
+
+class RtpFeedback {
+ public:
+ virtual ~RtpFeedback() {}
+
+ // Receiving payload change or SSRC change. (return success!)
+ /*
+ * channels - number of channels in codec (1 = mono, 2 = stereo)
+ */
+ virtual int32_t OnInitializeDecoder(
+ const int8_t payloadType,
+ const char payloadName[RTP_PAYLOAD_NAME_SIZE],
+ const int frequency,
+ const size_t channels,
+ const uint32_t rate) = 0;
+
+ virtual void OnIncomingSSRCChanged(const uint32_t ssrc) = 0;
+
+ virtual void OnIncomingCSRCChanged(const uint32_t CSRC, const bool added) = 0;
+};
+
+class RtpAudioFeedback {
+ public:
+ virtual void OnPlayTelephoneEvent(const uint8_t event,
+ const uint16_t lengthMs,
+ const uint8_t volume) = 0;
+
+ protected:
+ virtual ~RtpAudioFeedback() {}
+};
+
+class RtcpIntraFrameObserver {
+ public:
+ virtual void OnReceivedIntraFrameRequest(uint32_t ssrc) = 0;
+
+ virtual void OnReceivedSLI(uint32_t ssrc,
+ uint8_t picture_id) = 0;
+
+ virtual void OnReceivedRPSI(uint32_t ssrc,
+ uint64_t picture_id) = 0;
+
+ virtual void OnLocalSsrcChanged(uint32_t old_ssrc, uint32_t new_ssrc) = 0;
+
+ virtual ~RtcpIntraFrameObserver() {}
+};
+
+class RtcpBandwidthObserver {
+ public:
+ // REMB or TMMBR
+ virtual void OnReceivedEstimatedBitrate(uint32_t bitrate) = 0;
+
+ virtual void OnReceivedRtcpReceiverReport(
+ const ReportBlockList& report_blocks,
+ int64_t rtt,
+ int64_t now_ms) = 0;
+
+ virtual ~RtcpBandwidthObserver() {}
+};
+
+struct PacketInfo {
+ PacketInfo(int64_t arrival_time_ms, uint16_t sequence_number)
+ : PacketInfo(-1, arrival_time_ms, -1, sequence_number, 0, false) {}
+
+ PacketInfo(int64_t arrival_time_ms,
+ int64_t send_time_ms,
+ uint16_t sequence_number,
+ size_t payload_size,
+ bool was_paced)
+ : PacketInfo(-1,
+ arrival_time_ms,
+ send_time_ms,
+ sequence_number,
+ payload_size,
+ was_paced) {}
+
+ PacketInfo(int64_t creation_time_ms,
+ int64_t arrival_time_ms,
+ int64_t send_time_ms,
+ uint16_t sequence_number,
+ size_t payload_size,
+ bool was_paced)
+ : creation_time_ms(creation_time_ms),
+ arrival_time_ms(arrival_time_ms),
+ send_time_ms(send_time_ms),
+ sequence_number(sequence_number),
+ payload_size(payload_size),
+ was_paced(was_paced) {}
+
+ // Time corresponding to when this object was created.
+ int64_t creation_time_ms;
+ // Time corresponding to when the packet was received. Timestamped with the
+ // receiver's clock.
+ int64_t arrival_time_ms;
+ // Time corresponding to when the packet was sent, timestamped with the
+ // sender's clock.
+ int64_t send_time_ms;
+ // Packet identifier, incremented with 1 for every packet generated by the
+ // sender.
+ uint16_t sequence_number;
+ // Size of the packet excluding RTP headers.
+ size_t payload_size;
+ // True if the packet was paced out by the pacer.
+ bool was_paced;
+};
+
+class TransportFeedbackObserver {
+ public:
+ TransportFeedbackObserver() {}
+ virtual ~TransportFeedbackObserver() {}
+
+ // Note: Transport-wide sequence number as sequence number. Arrival time
+ // must be set to 0.
+ virtual void AddPacket(uint16_t sequence_number,
+ size_t length,
+ bool was_paced) = 0;
+
+ virtual void OnTransportFeedback(const rtcp::TransportFeedback& feedback) = 0;
+};
+
+class RtcpRttStats {
+ public:
+ virtual void OnRttUpdate(int64_t rtt) = 0;
+
+ virtual int64_t LastProcessedRtt() const = 0;
+
+ virtual ~RtcpRttStats() {}
+};
+
+// Null object version of RtpFeedback.
+class NullRtpFeedback : public RtpFeedback {
+ public:
+ virtual ~NullRtpFeedback() {}
+
+ int32_t OnInitializeDecoder(const int8_t payloadType,
+ const char payloadName[RTP_PAYLOAD_NAME_SIZE],
+ const int frequency,
+ const size_t channels,
+ const uint32_t rate) override {
+ return 0;
+ }
+
+ void OnIncomingSSRCChanged(const uint32_t ssrc) override {}
+ void OnIncomingCSRCChanged(const uint32_t CSRC, const bool added) override {}
+};
+
+// Null object version of RtpData.
+class NullRtpData : public RtpData {
+ public:
+ virtual ~NullRtpData() {}
+
+ int32_t OnReceivedPayloadData(const uint8_t* payloadData,
+ const size_t payloadSize,
+ const WebRtcRTPHeader* rtpHeader) override {
+ return 0;
+ }
+
+ bool OnRecoveredPacket(const uint8_t* packet, size_t packet_length) override {
+ return true;
+ }
+};
+
+// Null object version of RtpAudioFeedback.
+class NullRtpAudioFeedback : public RtpAudioFeedback {
+ public:
+ virtual ~NullRtpAudioFeedback() {}
+
+ void OnPlayTelephoneEvent(const uint8_t event,
+ const uint16_t lengthMs,
+ const uint8_t volume) override {}
+};
+
+// Statistics about packet loss for a single directional connection. All values
+// are totals since the connection initiated.
+struct RtpPacketLossStats {
+ // The number of packets lost in events where no adjacent packets were also
+ // lost.
+ uint64_t single_packet_loss_count;
+ // The number of events in which more than one adjacent packet was lost.
+ uint64_t multiple_packet_loss_event_count;
+ // The number of packets lost in events where more than one adjacent packet
+ // was lost.
+ uint64_t multiple_packet_loss_packet_count;
+};
+
+class RtpPacketSender {
+ public:
+ RtpPacketSender() {}
+ virtual ~RtpPacketSender() {}
+
+ enum Priority {
+ kHighPriority = 0, // Pass through; will be sent immediately.
+ kNormalPriority = 2, // Put in back of the line.
+ kLowPriority = 3, // Put in back of the low priority line.
+ };
+ // Low priority packets are mixed with the normal priority packets
+ // while we are paused.
+
+ // Returns true if we send the packet now, else it will add the packet
+ // information to the queue and call TimeToSendPacket when it's time to send.
+ virtual void InsertPacket(Priority priority,
+ uint32_t ssrc,
+ uint16_t sequence_number,
+ int64_t capture_time_ms,
+ size_t bytes,
+ bool retransmission) = 0;
+};
+
+class TransportSequenceNumberAllocator {
+ public:
+ TransportSequenceNumberAllocator() {}
+ virtual ~TransportSequenceNumberAllocator() {}
+
+ virtual uint16_t AllocateSequenceNumber() = 0;
+};
+
+} // namespace webrtc
+#endif // WEBRTC_MODULES_RTP_RTCP_INCLUDE_RTP_RTCP_DEFINES_H_
diff --git a/webrtc/modules/rtp_rtcp/interface/fec_receiver.h b/webrtc/modules/rtp_rtcp/interface/fec_receiver.h
deleted file mode 100644
index 3608165dab..0000000000
--- a/webrtc/modules/rtp_rtcp/interface/fec_receiver.h
+++ /dev/null
@@ -1,46 +0,0 @@
-/*
- * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_MODULES_RTP_RTCP_INTERFACE_FEC_RECEIVER_H_
-#define WEBRTC_MODULES_RTP_RTCP_INTERFACE_FEC_RECEIVER_H_
-
-#include "webrtc/modules/rtp_rtcp/interface/rtp_rtcp_defines.h"
-#include "webrtc/typedefs.h"
-
-namespace webrtc {
-
-struct FecPacketCounter {
- FecPacketCounter()
- : num_packets(0),
- num_fec_packets(0),
- num_recovered_packets(0) {}
-
- size_t num_packets; // Number of received packets.
- size_t num_fec_packets; // Number of received FEC packets.
- size_t num_recovered_packets; // Number of recovered media packets using FEC.
-};
-
-class FecReceiver {
- public:
- static FecReceiver* Create(RtpData* callback);
-
- virtual ~FecReceiver() {}
-
- virtual int32_t AddReceivedRedPacket(const RTPHeader& rtp_header,
- const uint8_t* incoming_rtp_packet,
- size_t packet_length,
- uint8_t ulpfec_payload_type) = 0;
-
- virtual int32_t ProcessReceivedFec() = 0;
-
- virtual FecPacketCounter GetPacketCounter() const = 0;
-};
-} // namespace webrtc
-#endif // WEBRTC_MODULES_RTP_RTCP_INTERFACE_FEC_RECEIVER_H_
diff --git a/webrtc/modules/rtp_rtcp/interface/receive_statistics.h b/webrtc/modules/rtp_rtcp/interface/receive_statistics.h
deleted file mode 100644
index 6bd5cd846e..0000000000
--- a/webrtc/modules/rtp_rtcp/interface/receive_statistics.h
+++ /dev/null
@@ -1,102 +0,0 @@
-/*
- * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_MODULES_RTP_RTCP_INTERFACE_RECEIVE_STATISTICS_H_
-#define WEBRTC_MODULES_RTP_RTCP_INTERFACE_RECEIVE_STATISTICS_H_
-
-#include <map>
-
-#include "webrtc/modules/interface/module.h"
-#include "webrtc/modules/interface/module_common_types.h"
-#include "webrtc/typedefs.h"
-
-namespace webrtc {
-
-class Clock;
-
-class StreamStatistician {
- public:
- virtual ~StreamStatistician();
-
- virtual bool GetStatistics(RtcpStatistics* statistics, bool reset) = 0;
- virtual void GetDataCounters(size_t* bytes_received,
- uint32_t* packets_received) const = 0;
-
- // Gets received stream data counters (includes reset counter values).
- virtual void GetReceiveStreamDataCounters(
- StreamDataCounters* data_counters) const = 0;
-
- virtual uint32_t BitrateReceived() const = 0;
-
- // Returns true if the packet with RTP header |header| is likely to be a
- // retransmitted packet, false otherwise.
- virtual bool IsRetransmitOfOldPacket(const RTPHeader& header,
- int64_t min_rtt) const = 0;
-
- // Returns true if |sequence_number| is received in order, false otherwise.
- virtual bool IsPacketInOrder(uint16_t sequence_number) const = 0;
-};
-
-typedef std::map<uint32_t, StreamStatistician*> StatisticianMap;
-
-class ReceiveStatistics : public Module {
- public:
- virtual ~ReceiveStatistics() {}
-
- static ReceiveStatistics* Create(Clock* clock);
-
- // Updates the receive statistics with this packet.
- virtual void IncomingPacket(const RTPHeader& rtp_header,
- size_t packet_length,
- bool retransmitted) = 0;
-
- // Increment counter for number of FEC packets received.
- virtual void FecPacketReceived(const RTPHeader& header,
- size_t packet_length) = 0;
-
- // Returns a map of all statisticians which have seen an incoming packet
- // during the last two seconds.
- virtual StatisticianMap GetActiveStatisticians() const = 0;
-
- // Returns a pointer to the statistician of an ssrc.
- virtual StreamStatistician* GetStatistician(uint32_t ssrc) const = 0;
-
- // Sets the max reordering threshold in number of packets.
- virtual void SetMaxReorderingThreshold(int max_reordering_threshold) = 0;
-
- // Called on new RTCP stats creation.
- virtual void RegisterRtcpStatisticsCallback(
- RtcpStatisticsCallback* callback) = 0;
-
- // Called on new RTP stats creation.
- virtual void RegisterRtpStatisticsCallback(
- StreamDataCountersCallback* callback) = 0;
-};
-
-class NullReceiveStatistics : public ReceiveStatistics {
- public:
- void IncomingPacket(const RTPHeader& rtp_header,
- size_t packet_length,
- bool retransmitted) override;
- void FecPacketReceived(const RTPHeader& header,
- size_t packet_length) override;
- StatisticianMap GetActiveStatisticians() const override;
- StreamStatistician* GetStatistician(uint32_t ssrc) const override;
- int64_t TimeUntilNextProcess() override;
- int32_t Process() override;
- void SetMaxReorderingThreshold(int max_reordering_threshold) override;
- void RegisterRtcpStatisticsCallback(
- RtcpStatisticsCallback* callback) override;
- void RegisterRtpStatisticsCallback(
- StreamDataCountersCallback* callback) override;
-};
-
-} // namespace webrtc
-#endif // WEBRTC_MODULES_RTP_RTCP_INTERFACE_RECEIVE_STATISTICS_H_
diff --git a/webrtc/modules/rtp_rtcp/interface/remote_ntp_time_estimator.h b/webrtc/modules/rtp_rtcp/interface/remote_ntp_time_estimator.h
deleted file mode 100644
index 0ffba212a6..0000000000
--- a/webrtc/modules/rtp_rtcp/interface/remote_ntp_time_estimator.h
+++ /dev/null
@@ -1,51 +0,0 @@
-/*
- * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_MODULES_RTP_RTCP_INTERFACE_REMOTE_NTP_TIME_ESTIMATOR_H_
-#define WEBRTC_MODULES_RTP_RTCP_INTERFACE_REMOTE_NTP_TIME_ESTIMATOR_H_
-
-#include "webrtc/base/scoped_ptr.h"
-#include "webrtc/system_wrappers/include/rtp_to_ntp.h"
-
-namespace webrtc {
-
-class Clock;
-class TimestampExtrapolator;
-
-// RemoteNtpTimeEstimator can be used to estimate a given RTP timestamp's NTP
-// time in local timebase.
-// Note that it needs to be trained with at least 2 RTCP SR (by calling
-// |UpdateRtcpTimestamp|) before it can be used.
-class RemoteNtpTimeEstimator {
- public:
- explicit RemoteNtpTimeEstimator(Clock* clock);
-
- ~RemoteNtpTimeEstimator();
-
- // Updates the estimator with round trip time |rtt|, NTP seconds |ntp_secs|,
- // NTP fraction |ntp_frac| and RTP timestamp |rtcp_timestamp|.
- bool UpdateRtcpTimestamp(int64_t rtt, uint32_t ntp_secs, uint32_t ntp_frac,
- uint32_t rtp_timestamp);
-
- // Estimates the NTP timestamp in local timebase from |rtp_timestamp|.
- // Returns the NTP timestamp in ms when success. -1 if failed.
- int64_t Estimate(uint32_t rtp_timestamp);
-
- private:
- Clock* clock_;
- rtc::scoped_ptr<TimestampExtrapolator> ts_extrapolator_;
- RtcpList rtcp_list_;
- int64_t last_timing_log_ms_;
- RTC_DISALLOW_COPY_AND_ASSIGN(RemoteNtpTimeEstimator);
-};
-
-} // namespace webrtc
-
-#endif // WEBRTC_MODULES_RTP_RTCP_INTERFACE_REMOTE_NTP_TIME_ESTIMATOR_H_
diff --git a/webrtc/modules/rtp_rtcp/interface/rtp_cvo.h b/webrtc/modules/rtp_rtcp/interface/rtp_cvo.h
deleted file mode 100644
index c7a0268ef0..0000000000
--- a/webrtc/modules/rtp_rtcp/interface/rtp_cvo.h
+++ /dev/null
@@ -1,54 +0,0 @@
-/*
- * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-#ifndef WEBRTC_MODULES_RTP_RTCP_INTERFACE_RTP_CVO__H_
-#define WEBRTC_MODULES_RTP_RTCP_INTERFACE_RTP_CVO__H_
-
-#include "webrtc/common_video/rotation.h"
-
-namespace webrtc {
-
-// Please refer to http://www.etsi.org/deliver/etsi_ts/126100_126199/126114/
-// 12.07.00_60/ts_126114v120700p.pdf Section 7.4.5. The rotation of a frame is
-// the clockwise angle the frames must be rotated in order to display the frames
-// correctly if the display is rotated in its natural orientation.
-inline uint8_t ConvertVideoRotationToCVOByte(VideoRotation rotation) {
- switch (rotation) {
- case kVideoRotation_0:
- return 0;
- case kVideoRotation_90:
- return 1;
- case kVideoRotation_180:
- return 2;
- case kVideoRotation_270:
- return 3;
- }
- assert(false);
- return 0;
-}
-
-inline VideoRotation ConvertCVOByteToVideoRotation(uint8_t rotation) {
- switch (rotation) {
- case 0:
- return kVideoRotation_0;
- case 1:
- return kVideoRotation_90;
- case 2:
- return kVideoRotation_180;
- break;
- case 3:
- return kVideoRotation_270;
- default:
- assert(false);
- return kVideoRotation_0;
- }
-}
-
-} // namespace webrtc
-#endif // WEBRTC_MODULES_RTP_RTCP_INTERFACE_RTP_CVO__H_
diff --git a/webrtc/modules/rtp_rtcp/interface/rtp_header_parser.h b/webrtc/modules/rtp_rtcp/interface/rtp_header_parser.h
deleted file mode 100644
index 2809996b25..0000000000
--- a/webrtc/modules/rtp_rtcp/interface/rtp_header_parser.h
+++ /dev/null
@@ -1,44 +0,0 @@
-/*
- * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-#ifndef WEBRTC_MODULES_RTP_RTCP_INTERFACE_RTP_HEADER_PARSER_H_
-#define WEBRTC_MODULES_RTP_RTCP_INTERFACE_RTP_HEADER_PARSER_H_
-
-#include "webrtc/modules/rtp_rtcp/interface/rtp_rtcp_defines.h"
-#include "webrtc/typedefs.h"
-
-namespace webrtc {
-
-struct RTPHeader;
-
-class RtpHeaderParser {
- public:
- static RtpHeaderParser* Create();
- virtual ~RtpHeaderParser() {}
-
- // Returns true if the packet is an RTCP packet, false otherwise.
- static bool IsRtcp(const uint8_t* packet, size_t length);
-
- // Parses the packet and stores the parsed packet in |header|. Returns true on
- // success, false otherwise.
- // This method is thread-safe in the sense that it can parse multiple packets
- // at once.
- virtual bool Parse(const uint8_t* packet,
- size_t length,
- RTPHeader* header) const = 0;
-
- // Registers an RTP header extension and binds it to |id|.
- virtual bool RegisterRtpHeaderExtension(RTPExtensionType type,
- uint8_t id) = 0;
-
- // De-registers an RTP header extension.
- virtual bool DeregisterRtpHeaderExtension(RTPExtensionType type) = 0;
-};
-} // namespace webrtc
-#endif // WEBRTC_MODULES_RTP_RTCP_INTERFACE_RTP_HEADER_PARSER_H_
diff --git a/webrtc/modules/rtp_rtcp/interface/rtp_payload_registry.h b/webrtc/modules/rtp_rtcp/interface/rtp_payload_registry.h
deleted file mode 100644
index 313bef1112..0000000000
--- a/webrtc/modules/rtp_rtcp/interface/rtp_payload_registry.h
+++ /dev/null
@@ -1,193 +0,0 @@
-/*
- * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_MODULES_RTP_RTCP_INTERFACE_RTP_PAYLOAD_REGISTRY_H_
-#define WEBRTC_MODULES_RTP_RTCP_INTERFACE_RTP_PAYLOAD_REGISTRY_H_
-
-#include "webrtc/base/scoped_ptr.h"
-#include "webrtc/modules/rtp_rtcp/source/rtp_receiver_strategy.h"
-#include "webrtc/modules/rtp_rtcp/source/rtp_utility.h"
-
-namespace webrtc {
-
-// This strategy deals with the audio/video-specific aspects
-// of payload handling.
-class RTPPayloadStrategy {
- public:
- virtual ~RTPPayloadStrategy() {}
-
- virtual bool CodecsMustBeUnique() const = 0;
-
- virtual bool PayloadIsCompatible(const RtpUtility::Payload& payload,
- const uint32_t frequency,
- const uint8_t channels,
- const uint32_t rate) const = 0;
-
- virtual void UpdatePayloadRate(RtpUtility::Payload* payload,
- const uint32_t rate) const = 0;
-
- virtual RtpUtility::Payload* CreatePayloadType(
- const char payloadName[RTP_PAYLOAD_NAME_SIZE],
- const int8_t payloadType,
- const uint32_t frequency,
- const uint8_t channels,
- const uint32_t rate) const = 0;
-
- virtual int GetPayloadTypeFrequency(
- const RtpUtility::Payload& payload) const = 0;
-
- static RTPPayloadStrategy* CreateStrategy(const bool handling_audio);
-
- protected:
- RTPPayloadStrategy() {}
-};
-
-class RTPPayloadRegistry {
- public:
- // The registry takes ownership of the strategy.
- RTPPayloadRegistry(RTPPayloadStrategy* rtp_payload_strategy);
- ~RTPPayloadRegistry();
-
- int32_t RegisterReceivePayload(
- const char payload_name[RTP_PAYLOAD_NAME_SIZE],
- const int8_t payload_type,
- const uint32_t frequency,
- const uint8_t channels,
- const uint32_t rate,
- bool* created_new_payload_type);
-
- int32_t DeRegisterReceivePayload(
- const int8_t payload_type);
-
- int32_t ReceivePayloadType(
- const char payload_name[RTP_PAYLOAD_NAME_SIZE],
- const uint32_t frequency,
- const uint8_t channels,
- const uint32_t rate,
- int8_t* payload_type) const;
-
- bool RtxEnabled() const;
-
- void SetRtxSsrc(uint32_t ssrc);
-
- bool GetRtxSsrc(uint32_t* ssrc) const;
-
- void SetRtxPayloadType(int payload_type, int associated_payload_type);
-
- bool IsRtx(const RTPHeader& header) const;
-
- // DEPRECATED. Use RestoreOriginalPacket below that takes a uint8_t*
- // restored_packet, instead of a uint8_t**.
- // TODO(noahric): Remove this when all callers have been updated.
- bool RestoreOriginalPacket(uint8_t** restored_packet,
- const uint8_t* packet,
- size_t* packet_length,
- uint32_t original_ssrc,
- const RTPHeader& header) const;
-
- bool RestoreOriginalPacket(uint8_t* restored_packet,
- const uint8_t* packet,
- size_t* packet_length,
- uint32_t original_ssrc,
- const RTPHeader& header) const;
-
- bool IsRed(const RTPHeader& header) const;
-
- // Returns true if the media of this RTP packet is encapsulated within an
- // extra header, such as RTX or RED.
- bool IsEncapsulated(const RTPHeader& header) const;
-
- bool GetPayloadSpecifics(uint8_t payload_type, PayloadUnion* payload) const;
-
- int GetPayloadTypeFrequency(uint8_t payload_type) const;
-
- bool PayloadTypeToPayload(const uint8_t payload_type,
- RtpUtility::Payload*& payload) const;
-
- void ResetLastReceivedPayloadTypes() {
- CriticalSectionScoped cs(crit_sect_.get());
- last_received_payload_type_ = -1;
- last_received_media_payload_type_ = -1;
- }
-
- // This sets the payload type of the packets being received from the network
- // on the media SSRC. For instance if packets are encapsulated with RED, this
- // payload type will be the RED payload type.
- void SetIncomingPayloadType(const RTPHeader& header);
-
- // Returns true if the new media payload type has not changed.
- bool ReportMediaPayloadType(uint8_t media_payload_type);
-
- int8_t red_payload_type() const {
- CriticalSectionScoped cs(crit_sect_.get());
- return red_payload_type_;
- }
- int8_t ulpfec_payload_type() const {
- CriticalSectionScoped cs(crit_sect_.get());
- return ulpfec_payload_type_;
- }
- int8_t last_received_payload_type() const {
- CriticalSectionScoped cs(crit_sect_.get());
- return last_received_payload_type_;
- }
- void set_last_received_payload_type(int8_t last_received_payload_type) {
- CriticalSectionScoped cs(crit_sect_.get());
- last_received_payload_type_ = last_received_payload_type;
- }
-
- int8_t last_received_media_payload_type() const {
- CriticalSectionScoped cs(crit_sect_.get());
- return last_received_media_payload_type_;
- };
-
- bool use_rtx_payload_mapping_on_restore() const {
- CriticalSectionScoped cs(crit_sect_.get());
- return use_rtx_payload_mapping_on_restore_;
- }
-
- void set_use_rtx_payload_mapping_on_restore(bool val) {
- CriticalSectionScoped cs(crit_sect_.get());
- use_rtx_payload_mapping_on_restore_ = val;
- }
-
- private:
- // Prunes the payload type map of the specific payload type, if it exists.
- void DeregisterAudioCodecOrRedTypeRegardlessOfPayloadType(
- const char payload_name[RTP_PAYLOAD_NAME_SIZE],
- const size_t payload_name_length,
- const uint32_t frequency,
- const uint8_t channels,
- const uint32_t rate);
-
- bool IsRtxInternal(const RTPHeader& header) const;
-
- rtc::scoped_ptr<CriticalSectionWrapper> crit_sect_;
- RtpUtility::PayloadTypeMap payload_type_map_;
- rtc::scoped_ptr<RTPPayloadStrategy> rtp_payload_strategy_;
- int8_t red_payload_type_;
- int8_t ulpfec_payload_type_;
- int8_t incoming_payload_type_;
- int8_t last_received_payload_type_;
- int8_t last_received_media_payload_type_;
- bool rtx_;
- // TODO(changbin): Remove rtx_payload_type_ once interop with old clients that
- // only understand one RTX PT is no longer needed.
- int rtx_payload_type_;
- // Mapping rtx_payload_type_map_[rtx] = associated.
- std::map<int, int> rtx_payload_type_map_;
- // When true, use rtx_payload_type_map_ when restoring RTX packets to get the
- // correct payload type.
- bool use_rtx_payload_mapping_on_restore_;
- uint32_t ssrc_rtx_;
-};
-
-} // namespace webrtc
-
-#endif // WEBRTC_MODULES_RTP_RTCP_INTERFACE_RTP_PAYLOAD_REGISTRY_H_
diff --git a/webrtc/modules/rtp_rtcp/interface/rtp_receiver.h b/webrtc/modules/rtp_rtcp/interface/rtp_receiver.h
deleted file mode 100644
index 2fb8ac5d61..0000000000
--- a/webrtc/modules/rtp_rtcp/interface/rtp_receiver.h
+++ /dev/null
@@ -1,103 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_MODULES_RTP_RTCP_INTERFACE_RTP_RECEIVER_H_
-#define WEBRTC_MODULES_RTP_RTCP_INTERFACE_RTP_RECEIVER_H_
-
-#include "webrtc/modules/rtp_rtcp/interface/rtp_rtcp_defines.h"
-#include "webrtc/typedefs.h"
-
-namespace webrtc {
-
-class RTPPayloadRegistry;
-
-class TelephoneEventHandler {
- public:
- virtual ~TelephoneEventHandler() {}
-
- // The following three methods implement the TelephoneEventHandler interface.
- // Forward DTMFs to decoder for playout.
- virtual void SetTelephoneEventForwardToDecoder(bool forward_to_decoder) = 0;
-
- // Is forwarding of outband telephone events turned on/off?
- virtual bool TelephoneEventForwardToDecoder() const = 0;
-
- // Is TelephoneEvent configured with payload type payload_type
- virtual bool TelephoneEventPayloadType(const int8_t payload_type) const = 0;
-};
-
-class RtpReceiver {
- public:
- // Creates a video-enabled RTP receiver.
- static RtpReceiver* CreateVideoReceiver(
- Clock* clock,
- RtpData* incoming_payload_callback,
- RtpFeedback* incoming_messages_callback,
- RTPPayloadRegistry* rtp_payload_registry);
-
- // Creates an audio-enabled RTP receiver.
- static RtpReceiver* CreateAudioReceiver(
- Clock* clock,
- RtpAudioFeedback* incoming_audio_feedback,
- RtpData* incoming_payload_callback,
- RtpFeedback* incoming_messages_callback,
- RTPPayloadRegistry* rtp_payload_registry);
-
- virtual ~RtpReceiver() {}
-
- // Returns a TelephoneEventHandler if available.
- virtual TelephoneEventHandler* GetTelephoneEventHandler() = 0;
-
- // Registers a receive payload in the payload registry and notifies the media
- // receiver strategy.
- virtual int32_t RegisterReceivePayload(
- const char payload_name[RTP_PAYLOAD_NAME_SIZE],
- const int8_t payload_type,
- const uint32_t frequency,
- const uint8_t channels,
- const uint32_t rate) = 0;
-
- // De-registers |payload_type| from the payload registry.
- virtual int32_t DeRegisterReceivePayload(const int8_t payload_type) = 0;
-
- // Parses the media specific parts of an RTP packet and updates the receiver
- // state. This for instance means that any changes in SSRC and payload type is
- // detected and acted upon.
- virtual bool IncomingRtpPacket(const RTPHeader& rtp_header,
- const uint8_t* payload,
- size_t payload_length,
- PayloadUnion payload_specific,
- bool in_order) = 0;
-
- // Returns the currently configured NACK method.
- virtual NACKMethod NACK() const = 0;
-
- // Turn negative acknowledgement (NACK) requests on/off.
- virtual void SetNACKStatus(const NACKMethod method) = 0;
-
- // Gets the last received timestamp. Returns true if a packet has been
- // received, false otherwise.
- virtual bool Timestamp(uint32_t* timestamp) const = 0;
- // Gets the time in milliseconds when the last timestamp was received.
- // Returns true if a packet has been received, false otherwise.
- virtual bool LastReceivedTimeMs(int64_t* receive_time_ms) const = 0;
-
- // Returns the remote SSRC of the currently received RTP stream.
- virtual uint32_t SSRC() const = 0;
-
- // Returns the current remote CSRCs.
- virtual int32_t CSRCs(uint32_t array_of_csrc[kRtpCsrcSize]) const = 0;
-
- // Returns the current energy of the RTP stream received.
- virtual int32_t Energy(uint8_t array_of_energy[kRtpCsrcSize]) const = 0;
-};
-} // namespace webrtc
-
-#endif // WEBRTC_MODULES_RTP_RTCP_INTERFACE_RTP_RECEIVER_H_
diff --git a/webrtc/modules/rtp_rtcp/interface/rtp_rtcp.h b/webrtc/modules/rtp_rtcp/interface/rtp_rtcp.h
deleted file mode 100644
index f907408573..0000000000
--- a/webrtc/modules/rtp_rtcp/interface/rtp_rtcp.h
+++ /dev/null
@@ -1,641 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_MODULES_RTP_RTCP_INTERFACE_RTP_RTCP_H_
-#define WEBRTC_MODULES_RTP_RTCP_INTERFACE_RTP_RTCP_H_
-
-#include <set>
-#include <vector>
-
-#include "webrtc/modules/interface/module.h"
-#include "webrtc/modules/rtp_rtcp/interface/rtp_rtcp_defines.h"
-
-namespace webrtc {
-// Forward declarations.
-class ReceiveStatistics;
-class RemoteBitrateEstimator;
-class RtpReceiver;
-class Transport;
-namespace rtcp {
-class TransportFeedback;
-}
-
-class RtpRtcp : public Module {
- public:
- struct Configuration {
- Configuration();
-
- /* id - Unique identifier of this RTP/RTCP module object
- * audio - True for a audio version of the RTP/RTCP module
- * object false will create a video version
- * clock - The clock to use to read time. If NULL object
- * will be using the system clock.
- * incoming_data - Callback object that will receive the incoming
- * data. May not be NULL; default callback will do
- * nothing.
- * incoming_messages - Callback object that will receive the incoming
- * RTP messages. May not be NULL; default callback
- * will do nothing.
- * outgoing_transport - Transport object that will be called when packets
- * are ready to be sent out on the network
- * intra_frame_callback - Called when the receiver request a intra frame.
- * bandwidth_callback - Called when we receive a changed estimate from
- * the receiver of out stream.
- * audio_messages - Telephone events. May not be NULL; default
- * callback will do nothing.
- * remote_bitrate_estimator - Estimates the bandwidth available for a set of
- * streams from the same client.
- * paced_sender - Spread any bursts of packets into smaller
- * bursts to minimize packet loss.
- */
- bool audio;
- bool receiver_only;
- Clock* clock;
- ReceiveStatistics* receive_statistics;
- Transport* outgoing_transport;
- RtcpIntraFrameObserver* intra_frame_callback;
- RtcpBandwidthObserver* bandwidth_callback;
- TransportFeedbackObserver* transport_feedback_callback;
- RtcpRttStats* rtt_stats;
- RtcpPacketTypeCounterObserver* rtcp_packet_type_counter_observer;
- RtpAudioFeedback* audio_messages;
- RemoteBitrateEstimator* remote_bitrate_estimator;
- RtpPacketSender* paced_sender;
- TransportSequenceNumberAllocator* transport_sequence_number_allocator;
- BitrateStatisticsObserver* send_bitrate_observer;
- FrameCountObserver* send_frame_count_observer;
- SendSideDelayObserver* send_side_delay_observer;
- };
-
- /*
- * Create a RTP/RTCP module object using the system clock.
- *
- * configuration - Configuration of the RTP/RTCP module.
- */
- static RtpRtcp* CreateRtpRtcp(const RtpRtcp::Configuration& configuration);
-
- /**************************************************************************
- *
- * Receiver functions
- *
- ***************************************************************************/
-
- virtual int32_t IncomingRtcpPacket(const uint8_t* incoming_packet,
- size_t incoming_packet_length) = 0;
-
- virtual void SetRemoteSSRC(uint32_t ssrc) = 0;
-
- /**************************************************************************
- *
- * Sender
- *
- ***************************************************************************/
-
- /*
- * set MTU
- *
- * size - Max transfer unit in bytes, default is 1500
- *
- * return -1 on failure else 0
- */
- virtual int32_t SetMaxTransferUnit(uint16_t size) = 0;
-
- /*
- * set transtport overhead
- * default is IPv4 and UDP with no encryption
- *
- * TCP - true for TCP false UDP
- * IPv6 - true for IP version 6 false for version 4
- * authenticationOverhead - number of bytes to leave for an
- * authentication header
- *
- * return -1 on failure else 0
- */
- virtual int32_t SetTransportOverhead(
- bool TCP,
- bool IPV6,
- uint8_t authenticationOverhead = 0) = 0;
-
- /*
- * Get max payload length
- *
- * A combination of the configuration MaxTransferUnit and
- * TransportOverhead.
- * Does not account FEC/ULP/RED overhead if FEC is enabled.
- * Does not account for RTP headers
- */
- virtual uint16_t MaxPayloadLength() const = 0;
-
- /*
- * Get max data payload length
- *
- * A combination of the configuration MaxTransferUnit, headers and
- * TransportOverhead.
- * Takes into account FEC/ULP/RED overhead if FEC is enabled.
- * Takes into account RTP headers
- */
- virtual uint16_t MaxDataPayloadLength() const = 0;
-
- /*
- * set codec name and payload type
- *
- * return -1 on failure else 0
- */
- virtual int32_t RegisterSendPayload(
- const CodecInst& voiceCodec) = 0;
-
- /*
- * set codec name and payload type
- *
- * return -1 on failure else 0
- */
- virtual int32_t RegisterSendPayload(
- const VideoCodec& videoCodec) = 0;
-
- /*
- * Unregister a send payload
- *
- * payloadType - payload type of codec
- *
- * return -1 on failure else 0
- */
- virtual int32_t DeRegisterSendPayload(int8_t payloadType) = 0;
-
- /*
- * (De)register RTP header extension type and id.
- *
- * return -1 on failure else 0
- */
- virtual int32_t RegisterSendRtpHeaderExtension(RTPExtensionType type,
- uint8_t id) = 0;
-
- virtual int32_t DeregisterSendRtpHeaderExtension(RTPExtensionType type) = 0;
-
- /*
- * get start timestamp
- */
- virtual uint32_t StartTimestamp() const = 0;
-
- /*
- * configure start timestamp, default is a random number
- *
- * timestamp - start timestamp
- */
- virtual void SetStartTimestamp(uint32_t timestamp) = 0;
-
- /*
- * Get SequenceNumber
- */
- virtual uint16_t SequenceNumber() const = 0;
-
- /*
- * Set SequenceNumber, default is a random number
- */
- virtual void SetSequenceNumber(uint16_t seq) = 0;
-
- // Returns true if the ssrc matched this module, false otherwise.
- virtual bool SetRtpStateForSsrc(uint32_t ssrc,
- const RtpState& rtp_state) = 0;
- virtual bool GetRtpStateForSsrc(uint32_t ssrc, RtpState* rtp_state) = 0;
-
- /*
- * Get SSRC
- */
- virtual uint32_t SSRC() const = 0;
-
- /*
- * configure SSRC, default is a random number
- */
- virtual void SetSSRC(uint32_t ssrc) = 0;
-
- /*
- * Set CSRC
- *
- * csrcs - vector of CSRCs
- */
- virtual void SetCsrcs(const std::vector<uint32_t>& csrcs) = 0;
-
- /*
- * Turn on/off sending RTX (RFC 4588). The modes can be set as a combination
- * of values of the enumerator RtxMode.
- */
- virtual void SetRtxSendStatus(int modes) = 0;
-
- /*
- * Get status of sending RTX (RFC 4588). The returned value can be
- * a combination of values of the enumerator RtxMode.
- */
- virtual int RtxSendStatus() const = 0;
-
- // Sets the SSRC to use when sending RTX packets. This doesn't enable RTX,
- // only the SSRC is set.
- virtual void SetRtxSsrc(uint32_t ssrc) = 0;
-
- // Sets the payload type to use when sending RTX packets. Note that this
- // doesn't enable RTX, only the payload type is set.
- virtual void SetRtxSendPayloadType(int payload_type,
- int associated_payload_type) = 0;
-
- // Gets the payload type pair of (RTX, associated) to use when sending RTX
- // packets.
- virtual std::pair<int, int> RtxSendPayloadType() const = 0;
-
- /*
- * sends kRtcpByeCode when going from true to false
- *
- * sending - on/off
- *
- * return -1 on failure else 0
- */
- virtual int32_t SetSendingStatus(bool sending) = 0;
-
- /*
- * get send status
- */
- virtual bool Sending() const = 0;
-
- /*
- * Starts/Stops media packets, on by default
- *
- * sending - on/off
- */
- virtual void SetSendingMediaStatus(bool sending) = 0;
-
- /*
- * get send status
- */
- virtual bool SendingMedia() const = 0;
-
- /*
- * get sent bitrate in Kbit/s
- */
- virtual void BitrateSent(uint32_t* totalRate,
- uint32_t* videoRate,
- uint32_t* fecRate,
- uint32_t* nackRate) const = 0;
-
- /*
- * Used by the codec module to deliver a video or audio frame for
- * packetization.
- *
- * frameType - type of frame to send
- * payloadType - payload type of frame to send
- * timestamp - timestamp of frame to send
- * payloadData - payload buffer of frame to send
- * payloadSize - size of payload buffer to send
- * fragmentation - fragmentation offset data for fragmented frames such
- * as layers or RED
- *
- * return -1 on failure else 0
- */
- virtual int32_t SendOutgoingData(
- FrameType frameType,
- int8_t payloadType,
- uint32_t timeStamp,
- int64_t capture_time_ms,
- const uint8_t* payloadData,
- size_t payloadSize,
- const RTPFragmentationHeader* fragmentation = NULL,
- const RTPVideoHeader* rtpVideoHdr = NULL) = 0;
-
- virtual bool TimeToSendPacket(uint32_t ssrc,
- uint16_t sequence_number,
- int64_t capture_time_ms,
- bool retransmission) = 0;
-
- virtual size_t TimeToSendPadding(size_t bytes) = 0;
-
- // Called on generation of new statistics after an RTP send.
- virtual void RegisterSendChannelRtpStatisticsCallback(
- StreamDataCountersCallback* callback) = 0;
- virtual StreamDataCountersCallback*
- GetSendChannelRtpStatisticsCallback() const = 0;
-
- /**************************************************************************
- *
- * RTCP
- *
- ***************************************************************************/
-
- /*
- * Get RTCP status
- */
- virtual RtcpMode RTCP() const = 0;
-
- /*
- * configure RTCP status i.e on(compound or non- compound)/off
- *
- * method - RTCP method to use
- */
- virtual void SetRTCPStatus(RtcpMode method) = 0;
-
- /*
- * Set RTCP CName (i.e unique identifier)
- *
- * return -1 on failure else 0
- */
- virtual int32_t SetCNAME(const char* c_name) = 0;
-
- /*
- * Get remote CName
- *
- * return -1 on failure else 0
- */
- virtual int32_t RemoteCNAME(uint32_t remoteSSRC,
- char cName[RTCP_CNAME_SIZE]) const = 0;
-
- /*
- * Get remote NTP
- *
- * return -1 on failure else 0
- */
- virtual int32_t RemoteNTP(
- uint32_t *ReceivedNTPsecs,
- uint32_t *ReceivedNTPfrac,
- uint32_t *RTCPArrivalTimeSecs,
- uint32_t *RTCPArrivalTimeFrac,
- uint32_t *rtcp_timestamp) const = 0;
-
- /*
- * AddMixedCNAME
- *
- * return -1 on failure else 0
- */
- virtual int32_t AddMixedCNAME(uint32_t SSRC, const char* c_name) = 0;
-
- /*
- * RemoveMixedCNAME
- *
- * return -1 on failure else 0
- */
- virtual int32_t RemoveMixedCNAME(uint32_t SSRC) = 0;
-
- /*
- * Get RoundTripTime
- *
- * return -1 on failure else 0
- */
- virtual int32_t RTT(uint32_t remoteSSRC,
- int64_t* RTT,
- int64_t* avgRTT,
- int64_t* minRTT,
- int64_t* maxRTT) const = 0;
-
- /*
- * Force a send of a RTCP packet
- * periodic SR and RR are triggered via the process function
- *
- * return -1 on failure else 0
- */
- virtual int32_t SendRTCP(RTCPPacketType rtcpPacketType) = 0;
-
- /*
- * Force a send of a RTCP packet with more than one packet type.
- * periodic SR and RR are triggered via the process function
- *
- * return -1 on failure else 0
- */
- virtual int32_t SendCompoundRTCP(
- const std::set<RTCPPacketType>& rtcpPacketTypes) = 0;
-
- /*
- * Good state of RTP receiver inform sender
- */
- virtual int32_t SendRTCPReferencePictureSelection(
- const uint64_t pictureID) = 0;
-
- /*
- * Send a RTCP Slice Loss Indication (SLI)
- * 6 least significant bits of pictureID
- */
- virtual int32_t SendRTCPSliceLossIndication(uint8_t pictureID) = 0;
-
- /*
- * Statistics of the amount of data sent
- *
- * return -1 on failure else 0
- */
- virtual int32_t DataCountersRTP(
- size_t* bytesSent,
- uint32_t* packetsSent) const = 0;
-
- /*
- * Get send statistics for the RTP and RTX stream.
- */
- virtual void GetSendStreamDataCounters(
- StreamDataCounters* rtp_counters,
- StreamDataCounters* rtx_counters) const = 0;
-
- /*
- * Get packet loss statistics for the RTP stream.
- */
- virtual void GetRtpPacketLossStats(
- bool outgoing,
- uint32_t ssrc,
- struct RtpPacketLossStats* loss_stats) const = 0;
-
- /*
- * Get received RTCP sender info
- *
- * return -1 on failure else 0
- */
- virtual int32_t RemoteRTCPStat(RTCPSenderInfo* senderInfo) = 0;
-
- /*
- * Get received RTCP report block
- *
- * return -1 on failure else 0
- */
- virtual int32_t RemoteRTCPStat(
- std::vector<RTCPReportBlock>* receiveBlocks) const = 0;
-
- /*
- * (APP) Application specific data
- *
- * return -1 on failure else 0
- */
- virtual int32_t SetRTCPApplicationSpecificData(uint8_t subType,
- uint32_t name,
- const uint8_t* data,
- uint16_t length) = 0;
- /*
- * (XR) VOIP metric
- *
- * return -1 on failure else 0
- */
- virtual int32_t SetRTCPVoIPMetrics(
- const RTCPVoIPMetric* VoIPMetric) = 0;
-
- /*
- * (XR) Receiver Reference Time Report
- */
- virtual void SetRtcpXrRrtrStatus(bool enable) = 0;
-
- virtual bool RtcpXrRrtrStatus() const = 0;
-
- /*
- * (REMB) Receiver Estimated Max Bitrate
- */
- virtual bool REMB() const = 0;
-
- virtual void SetREMBStatus(bool enable) = 0;
-
- virtual void SetREMBData(uint32_t bitrate,
- const std::vector<uint32_t>& ssrcs) = 0;
-
- /*
- * (TMMBR) Temporary Max Media Bit Rate
- */
- virtual bool TMMBR() const = 0;
-
- virtual void SetTMMBRStatus(bool enable) = 0;
-
- /*
- * (NACK)
- */
-
- /*
- * TODO(holmer): Propagate this API to VideoEngine.
- * Returns the currently configured selective retransmission settings.
- */
- virtual int SelectiveRetransmissions() const = 0;
-
- /*
- * TODO(holmer): Propagate this API to VideoEngine.
- * Sets the selective retransmission settings, which will decide which
- * packets will be retransmitted if NACKed. Settings are constructed by
- * combining the constants in enum RetransmissionMode with bitwise OR.
- * All packets are retransmitted if kRetransmitAllPackets is set, while no
- * packets are retransmitted if kRetransmitOff is set.
- * By default all packets except FEC packets are retransmitted. For VP8
- * with temporal scalability only base layer packets are retransmitted.
- *
- * Returns -1 on failure, otherwise 0.
- */
- virtual int SetSelectiveRetransmissions(uint8_t settings) = 0;
-
- /*
- * Send a Negative acknowledgement packet
- *
- * return -1 on failure else 0
- */
- virtual int32_t SendNACK(const uint16_t* nackList, uint16_t size) = 0;
-
- /*
- * Store the sent packets, needed to answer to a Negative acknowledgement
- * requests
- */
- virtual void SetStorePacketsStatus(bool enable, uint16_t numberToStore) = 0;
-
- // Returns true if the module is configured to store packets.
- virtual bool StorePackets() const = 0;
-
- // Called on receipt of RTCP report block from remote side.
- virtual void RegisterRtcpStatisticsCallback(
- RtcpStatisticsCallback* callback) = 0;
- virtual RtcpStatisticsCallback*
- GetRtcpStatisticsCallback() = 0;
- // BWE feedback packets.
- virtual bool SendFeedbackPacket(const rtcp::TransportFeedback& packet) = 0;
-
- /**************************************************************************
- *
- * Audio
- *
- ***************************************************************************/
-
- /*
- * set audio packet size, used to determine when it's time to send a DTMF
- * packet in silence (CNG)
- *
- * return -1 on failure else 0
- */
- virtual int32_t SetAudioPacketSize(uint16_t packetSizeSamples) = 0;
-
- /*
- * Send a TelephoneEvent tone using RFC 2833 (4733)
- *
- * return -1 on failure else 0
- */
- virtual int32_t SendTelephoneEventOutband(uint8_t key,
- uint16_t time_ms,
- uint8_t level) = 0;
-
- /*
- * Set payload type for Redundant Audio Data RFC 2198
- *
- * return -1 on failure else 0
- */
- virtual int32_t SetSendREDPayloadType(int8_t payloadType) = 0;
-
- /*
- * Get payload type for Redundant Audio Data RFC 2198
- *
- * return -1 on failure else 0
- */
- virtual int32_t SendREDPayloadType(
- int8_t& payloadType) const = 0;
-
- /*
- * Store the audio level in dBov for header-extension-for-audio-level-
- * indication.
- * This API shall be called before transmision of an RTP packet to ensure
- * that the |level| part of the extended RTP header is updated.
- *
- * return -1 on failure else 0.
- */
- virtual int32_t SetAudioLevel(uint8_t level_dBov) = 0;
-
- /**************************************************************************
- *
- * Video
- *
- ***************************************************************************/
-
- /*
- * Set the target send bitrate
- */
- virtual void SetTargetSendBitrate(uint32_t bitrate_bps) = 0;
-
- /*
- * Turn on/off generic FEC
- */
- virtual void SetGenericFECStatus(bool enable,
- uint8_t payload_type_red,
- uint8_t payload_type_fec) = 0;
-
- /*
- * Get generic FEC setting
- */
- virtual void GenericFECStatus(bool& enable,
- uint8_t& payloadTypeRED,
- uint8_t& payloadTypeFEC) = 0;
-
-
- virtual int32_t SetFecParameters(
- const FecProtectionParams* delta_params,
- const FecProtectionParams* key_params) = 0;
-
- /*
- * Set method for requestion a new key frame
- *
- * return -1 on failure else 0
- */
- virtual int32_t SetKeyFrameRequestMethod(KeyFrameRequestMethod method) = 0;
-
- /*
- * send a request for a keyframe
- *
- * return -1 on failure else 0
- */
- virtual int32_t RequestKeyFrame() = 0;
-};
-} // namespace webrtc
-#endif // WEBRTC_MODULES_RTP_RTCP_INTERFACE_RTP_RTCP_H_
diff --git a/webrtc/modules/rtp_rtcp/interface/rtp_rtcp_defines.h b/webrtc/modules/rtp_rtcp/interface/rtp_rtcp_defines.h
deleted file mode 100644
index 6936352aca..0000000000
--- a/webrtc/modules/rtp_rtcp/interface/rtp_rtcp_defines.h
+++ /dev/null
@@ -1,440 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_MODULES_RTP_RTCP_INTERFACE_RTP_RTCP_DEFINES_H_
-#define WEBRTC_MODULES_RTP_RTCP_INTERFACE_RTP_RTCP_DEFINES_H_
-
-#include <stddef.h>
-#include <list>
-
-#include "webrtc/modules/interface/module_common_types.h"
-#include "webrtc/system_wrappers/include/clock.h"
-#include "webrtc/typedefs.h"
-
-#define RTCP_CNAME_SIZE 256 // RFC 3550 page 44, including null termination
-#define IP_PACKET_SIZE 1500 // we assume ethernet
-#define MAX_NUMBER_OF_PARALLEL_TELEPHONE_EVENTS 10
-#define TIMEOUT_SEI_MESSAGES_MS 30000 // in milliseconds
-
-namespace webrtc {
-namespace rtcp {
-class TransportFeedback;
-}
-
-const int kVideoPayloadTypeFrequency = 90000;
-
-// Minimum RTP header size in bytes.
-const uint8_t kRtpHeaderSize = 12;
-
-struct AudioPayload
-{
- uint32_t frequency;
- uint8_t channels;
- uint32_t rate;
-};
-
-struct VideoPayload
-{
- RtpVideoCodecTypes videoCodecType;
- uint32_t maxRate;
-};
-
-union PayloadUnion
-{
- AudioPayload Audio;
- VideoPayload Video;
-};
-
-enum RTPAliveType
-{
- kRtpDead = 0,
- kRtpNoRtp = 1,
- kRtpAlive = 2
-};
-
-enum ProtectionType {
- kUnprotectedPacket,
- kProtectedPacket
-};
-
-enum StorageType {
- kDontRetransmit,
- kAllowRetransmission
-};
-
-enum RTPExtensionType {
- kRtpExtensionNone,
- kRtpExtensionTransmissionTimeOffset,
- kRtpExtensionAudioLevel,
- kRtpExtensionAbsoluteSendTime,
- kRtpExtensionVideoRotation,
- kRtpExtensionTransportSequenceNumber,
-};
-
-enum RTCPAppSubTypes
-{
- kAppSubtypeBwe = 0x00
-};
-
-// TODO(sprang): Make this an enum class once rtcp_receiver has been cleaned up.
-enum RTCPPacketType : uint32_t {
- kRtcpReport = 0x0001,
- kRtcpSr = 0x0002,
- kRtcpRr = 0x0004,
- kRtcpSdes = 0x0008,
- kRtcpBye = 0x0010,
- kRtcpPli = 0x0020,
- kRtcpNack = 0x0040,
- kRtcpFir = 0x0080,
- kRtcpTmmbr = 0x0100,
- kRtcpTmmbn = 0x0200,
- kRtcpSrReq = 0x0400,
- kRtcpXrVoipMetric = 0x0800,
- kRtcpApp = 0x1000,
- kRtcpSli = 0x4000,
- kRtcpRpsi = 0x8000,
- kRtcpRemb = 0x10000,
- kRtcpTransmissionTimeOffset = 0x20000,
- kRtcpXrReceiverReferenceTime = 0x40000,
- kRtcpXrDlrrReportBlock = 0x80000,
- kRtcpTransportFeedback = 0x100000,
-};
-
-enum KeyFrameRequestMethod { kKeyFrameReqPliRtcp, kKeyFrameReqFirRtcp };
-
-enum RtpRtcpPacketType
-{
- kPacketRtp = 0,
- kPacketKeepAlive = 1
-};
-
-enum NACKMethod
-{
- kNackOff = 0,
- kNackRtcp = 2
-};
-
-enum RetransmissionMode : uint8_t {
- kRetransmitOff = 0x0,
- kRetransmitFECPackets = 0x1,
- kRetransmitBaseLayer = 0x2,
- kRetransmitHigherLayers = 0x4,
- kRetransmitAllPackets = 0xFF
-};
-
-enum RtxMode {
- kRtxOff = 0x0,
- kRtxRetransmitted = 0x1, // Only send retransmissions over RTX.
- kRtxRedundantPayloads = 0x2 // Preventively send redundant payloads
- // instead of padding.
-};
-
-const size_t kRtxHeaderSize = 2;
-
-struct RTCPSenderInfo
-{
- uint32_t NTPseconds;
- uint32_t NTPfraction;
- uint32_t RTPtimeStamp;
- uint32_t sendPacketCount;
- uint32_t sendOctetCount;
-};
-
-struct RTCPReportBlock {
- RTCPReportBlock()
- : remoteSSRC(0), sourceSSRC(0), fractionLost(0), cumulativeLost(0),
- extendedHighSeqNum(0), jitter(0), lastSR(0),
- delaySinceLastSR(0) {}
-
- RTCPReportBlock(uint32_t remote_ssrc,
- uint32_t source_ssrc,
- uint8_t fraction_lost,
- uint32_t cumulative_lost,
- uint32_t extended_high_sequence_number,
- uint32_t jitter,
- uint32_t last_sender_report,
- uint32_t delay_since_last_sender_report)
- : remoteSSRC(remote_ssrc),
- sourceSSRC(source_ssrc),
- fractionLost(fraction_lost),
- cumulativeLost(cumulative_lost),
- extendedHighSeqNum(extended_high_sequence_number),
- jitter(jitter),
- lastSR(last_sender_report),
- delaySinceLastSR(delay_since_last_sender_report) {}
-
- // Fields as described by RFC 3550 6.4.2.
- uint32_t remoteSSRC; // SSRC of sender of this report.
- uint32_t sourceSSRC; // SSRC of the RTP packet sender.
- uint8_t fractionLost;
- uint32_t cumulativeLost; // 24 bits valid.
- uint32_t extendedHighSeqNum;
- uint32_t jitter;
- uint32_t lastSR;
- uint32_t delaySinceLastSR;
-};
-
-struct RtcpReceiveTimeInfo {
- // Fields as described by RFC 3611 4.5.
- uint32_t sourceSSRC;
- uint32_t lastRR;
- uint32_t delaySinceLastRR;
-};
-
-typedef std::list<RTCPReportBlock> ReportBlockList;
-
-struct RtpState {
- RtpState()
- : sequence_number(0),
- start_timestamp(0),
- timestamp(0),
- capture_time_ms(-1),
- last_timestamp_time_ms(-1),
- media_has_been_sent(false) {}
- uint16_t sequence_number;
- uint32_t start_timestamp;
- uint32_t timestamp;
- int64_t capture_time_ms;
- int64_t last_timestamp_time_ms;
- bool media_has_been_sent;
-};
-
-class RtpData
-{
-public:
- virtual ~RtpData() {}
-
- virtual int32_t OnReceivedPayloadData(
- const uint8_t* payloadData,
- const size_t payloadSize,
- const WebRtcRTPHeader* rtpHeader) = 0;
-
- virtual bool OnRecoveredPacket(const uint8_t* packet,
- size_t packet_length) = 0;
-};
-
-class RtpFeedback
-{
-public:
- virtual ~RtpFeedback() {}
-
- // Receiving payload change or SSRC change. (return success!)
- /*
- * channels - number of channels in codec (1 = mono, 2 = stereo)
- */
- virtual int32_t OnInitializeDecoder(
- const int8_t payloadType,
- const char payloadName[RTP_PAYLOAD_NAME_SIZE],
- const int frequency,
- const uint8_t channels,
- const uint32_t rate) = 0;
-
- virtual void OnIncomingSSRCChanged(const uint32_t ssrc) = 0;
-
- virtual void OnIncomingCSRCChanged(const uint32_t CSRC,
- const bool added) = 0;
-};
-
-class RtpAudioFeedback {
- public:
- virtual void OnPlayTelephoneEvent(const uint8_t event,
- const uint16_t lengthMs,
- const uint8_t volume) = 0;
-
- protected:
- virtual ~RtpAudioFeedback() {}
-};
-
-class RtcpIntraFrameObserver {
- public:
- virtual void OnReceivedIntraFrameRequest(uint32_t ssrc) = 0;
-
- virtual void OnReceivedSLI(uint32_t ssrc,
- uint8_t picture_id) = 0;
-
- virtual void OnReceivedRPSI(uint32_t ssrc,
- uint64_t picture_id) = 0;
-
- virtual void OnLocalSsrcChanged(uint32_t old_ssrc, uint32_t new_ssrc) = 0;
-
- virtual ~RtcpIntraFrameObserver() {}
-};
-
-class RtcpBandwidthObserver {
- public:
- // REMB or TMMBR
- virtual void OnReceivedEstimatedBitrate(uint32_t bitrate) = 0;
-
- virtual void OnReceivedRtcpReceiverReport(
- const ReportBlockList& report_blocks,
- int64_t rtt,
- int64_t now_ms) = 0;
-
- virtual ~RtcpBandwidthObserver() {}
-};
-
-struct PacketInfo {
- PacketInfo(int64_t arrival_time_ms, uint16_t sequence_number)
- : PacketInfo(-1, arrival_time_ms, -1, sequence_number, 0, false) {}
-
- PacketInfo(int64_t arrival_time_ms,
- int64_t send_time_ms,
- uint16_t sequence_number,
- size_t payload_size,
- bool was_paced)
- : PacketInfo(-1,
- arrival_time_ms,
- send_time_ms,
- sequence_number,
- payload_size,
- was_paced) {}
-
- PacketInfo(int64_t creation_time_ms,
- int64_t arrival_time_ms,
- int64_t send_time_ms,
- uint16_t sequence_number,
- size_t payload_size,
- bool was_paced)
- : creation_time_ms(creation_time_ms),
- arrival_time_ms(arrival_time_ms),
- send_time_ms(send_time_ms),
- sequence_number(sequence_number),
- payload_size(payload_size),
- was_paced(was_paced) {}
-
- // Time corresponding to when this object was created.
- int64_t creation_time_ms;
- // Time corresponding to when the packet was received. Timestamped with the
- // receiver's clock.
- int64_t arrival_time_ms;
- // Time corresponding to when the packet was sent, timestamped with the
- // sender's clock.
- int64_t send_time_ms;
- // Packet identifier, incremented with 1 for every packet generated by the
- // sender.
- uint16_t sequence_number;
- // Size of the packet excluding RTP headers.
- size_t payload_size;
- // True if the packet was paced out by the pacer.
- bool was_paced;
-};
-
-class TransportFeedbackObserver {
- public:
- TransportFeedbackObserver() {}
- virtual ~TransportFeedbackObserver() {}
-
- // Note: Transport-wide sequence number as sequence number. Arrival time
- // must be set to 0.
- virtual void AddPacket(uint16_t sequence_number,
- size_t length,
- bool was_paced) = 0;
-
- virtual void OnTransportFeedback(const rtcp::TransportFeedback& feedback) = 0;
-};
-
-class RtcpRttStats {
- public:
- virtual void OnRttUpdate(int64_t rtt) = 0;
-
- virtual int64_t LastProcessedRtt() const = 0;
-
- virtual ~RtcpRttStats() {};
-};
-
-// Null object version of RtpFeedback.
-class NullRtpFeedback : public RtpFeedback {
- public:
- virtual ~NullRtpFeedback() {}
-
- int32_t OnInitializeDecoder(const int8_t payloadType,
- const char payloadName[RTP_PAYLOAD_NAME_SIZE],
- const int frequency,
- const uint8_t channels,
- const uint32_t rate) override {
- return 0;
- }
-
- void OnIncomingSSRCChanged(const uint32_t ssrc) override {}
- void OnIncomingCSRCChanged(const uint32_t CSRC, const bool added) override {}
-};
-
-// Null object version of RtpData.
-class NullRtpData : public RtpData {
- public:
- virtual ~NullRtpData() {}
-
- int32_t OnReceivedPayloadData(const uint8_t* payloadData,
- const size_t payloadSize,
- const WebRtcRTPHeader* rtpHeader) override {
- return 0;
- }
-
- bool OnRecoveredPacket(const uint8_t* packet, size_t packet_length) override {
- return true;
- }
-};
-
-// Null object version of RtpAudioFeedback.
-class NullRtpAudioFeedback : public RtpAudioFeedback {
- public:
- virtual ~NullRtpAudioFeedback() {}
-
- void OnPlayTelephoneEvent(const uint8_t event,
- const uint16_t lengthMs,
- const uint8_t volume) override {}
-};
-
-// Statistics about packet loss for a single directional connection. All values
-// are totals since the connection initiated.
-struct RtpPacketLossStats {
- // The number of packets lost in events where no adjacent packets were also
- // lost.
- uint64_t single_packet_loss_count;
- // The number of events in which more than one adjacent packet was lost.
- uint64_t multiple_packet_loss_event_count;
- // The number of packets lost in events where more than one adjacent packet
- // was lost.
- uint64_t multiple_packet_loss_packet_count;
-};
-
-class RtpPacketSender {
- public:
- RtpPacketSender() {}
- virtual ~RtpPacketSender() {}
-
- enum Priority {
- kHighPriority = 0, // Pass through; will be sent immediately.
- kNormalPriority = 2, // Put in back of the line.
- kLowPriority = 3, // Put in back of the low priority line.
- };
- // Low priority packets are mixed with the normal priority packets
- // while we are paused.
-
- // Returns true if we send the packet now, else it will add the packet
- // information to the queue and call TimeToSendPacket when it's time to send.
- virtual void InsertPacket(Priority priority,
- uint32_t ssrc,
- uint16_t sequence_number,
- int64_t capture_time_ms,
- size_t bytes,
- bool retransmission) = 0;
-};
-
-class TransportSequenceNumberAllocator {
- public:
- TransportSequenceNumberAllocator() {}
- virtual ~TransportSequenceNumberAllocator() {}
-
- virtual uint16_t AllocateSequenceNumber() = 0;
-};
-
-} // namespace webrtc
-#endif // WEBRTC_MODULES_RTP_RTCP_INTERFACE_RTP_RTCP_DEFINES_H_
diff --git a/webrtc/modules/rtp_rtcp/mocks/mock_rtp_rtcp.h b/webrtc/modules/rtp_rtcp/mocks/mock_rtp_rtcp.h
index bc4aec8967..796be1304c 100644
--- a/webrtc/modules/rtp_rtcp/mocks/mock_rtp_rtcp.h
+++ b/webrtc/modules/rtp_rtcp/mocks/mock_rtp_rtcp.h
@@ -11,11 +11,15 @@
#ifndef WEBRTC_MODULES_RTP_RTCP_MOCKS_MOCK_RTP_RTCP_H_
#define WEBRTC_MODULES_RTP_RTCP_MOCKS_MOCK_RTP_RTCP_H_
+#include <set>
+#include <utility>
+#include <vector>
+
#include "testing/gmock/include/gmock/gmock.h"
-#include "webrtc/modules/interface/module.h"
-#include "webrtc/modules/rtp_rtcp/interface/rtp_rtcp.h"
-#include "webrtc/modules/rtp_rtcp/interface/rtp_rtcp_defines.h"
+#include "webrtc/modules/include/module.h"
+#include "webrtc/modules/rtp_rtcp/include/rtp_rtcp.h"
+#include "webrtc/modules/rtp_rtcp/include/rtp_rtcp_defines.h"
#include "webrtc/modules/rtp_rtcp/source/rtcp_packet/transport_feedback.h"
namespace webrtc {
@@ -108,7 +112,10 @@ class MockRtpRtcp : public RtpRtcp {
MOCK_CONST_METHOD0(SendingMedia,
bool());
MOCK_CONST_METHOD4(BitrateSent,
- void(uint32_t* totalRate, uint32_t* videoRate, uint32_t* fecRate, uint32_t* nackRate));
+ void(uint32_t* totalRate,
+ uint32_t* videoRate,
+ uint32_t* fecRate,
+ uint32_t* nackRate));
MOCK_METHOD1(RegisterVideoBitrateObserver, void(BitrateStatisticsObserver*));
MOCK_CONST_METHOD0(GetVideoBitrateObserver, BitrateStatisticsObserver*(void));
MOCK_CONST_METHOD1(EstimatedReceiveBandwidth,
@@ -172,7 +179,10 @@ class MockRtpRtcp : public RtpRtcp {
MOCK_CONST_METHOD1(RemoteRTCPStat,
int32_t(std::vector<RTCPReportBlock>* receiveBlocks));
MOCK_METHOD4(SetRTCPApplicationSpecificData,
- int32_t(const uint8_t subType, const uint32_t name, const uint8_t* data, const uint16_t length));
+ int32_t(const uint8_t subType,
+ const uint32_t name,
+ const uint8_t* data,
+ const uint16_t length));
MOCK_METHOD1(SetRTCPVoIPMetrics,
int32_t(const RTCPVoIPMetric* VoIPMetric));
MOCK_METHOD1(SetRtcpXrRrtrStatus,
@@ -214,12 +224,9 @@ class MockRtpRtcp : public RtpRtcp {
int32_t(const uint8_t key, const uint16_t time_ms, const uint8_t level));
MOCK_METHOD1(SetSendREDPayloadType,
int32_t(const int8_t payloadType));
- MOCK_CONST_METHOD1(SendREDPayloadType,
- int32_t(int8_t& payloadType));
+ MOCK_CONST_METHOD1(SendREDPayloadType, int32_t(int8_t* payloadType));
MOCK_METHOD2(SetRTPAudioLevelIndicationStatus,
int32_t(const bool enable, const uint8_t ID));
- MOCK_CONST_METHOD2(GetRTPAudioLevelIndicationStatus,
- int32_t(bool& enable, uint8_t& ID));
MOCK_METHOD1(SetAudioLevel,
int32_t(const uint8_t level_dBov));
MOCK_METHOD1(SetTargetSendBitrate,
@@ -229,9 +236,9 @@ class MockRtpRtcp : public RtpRtcp {
const uint8_t payload_type_red,
const uint8_t payload_type_fec));
MOCK_METHOD3(GenericFECStatus,
- void(bool& enable,
- uint8_t& payloadTypeRED,
- uint8_t& payloadTypeFEC));
+ void(bool* enable,
+ uint8_t* payloadTypeRED,
+ uint8_t* payloadTypeFEC));
MOCK_METHOD2(SetFecParameters,
int32_t(const FecProtectionParams* delta_params,
const FecProtectionParams* key_params));
@@ -239,8 +246,6 @@ class MockRtpRtcp : public RtpRtcp {
int32_t(const KeyFrameRequestMethod method));
MOCK_METHOD0(RequestKeyFrame,
int32_t());
- MOCK_CONST_METHOD3(Version,
- int32_t(char* version, uint32_t& remaining_buffer_in_bytes, uint32_t& position));
MOCK_METHOD0(TimeUntilNextProcess,
int64_t());
MOCK_METHOD0(Process,
diff --git a/webrtc/modules/rtp_rtcp/rtp_rtcp.gypi b/webrtc/modules/rtp_rtcp/rtp_rtcp.gypi
index e35a75cd0f..d340f746be 100644
--- a/webrtc/modules/rtp_rtcp/rtp_rtcp.gypi
+++ b/webrtc/modules/rtp_rtcp/rtp_rtcp.gypi
@@ -17,14 +17,14 @@
],
'sources': [
# Common
- 'interface/fec_receiver.h',
- 'interface/receive_statistics.h',
- 'interface/remote_ntp_time_estimator.h',
- 'interface/rtp_header_parser.h',
- 'interface/rtp_payload_registry.h',
- 'interface/rtp_receiver.h',
- 'interface/rtp_rtcp.h',
- 'interface/rtp_rtcp_defines.h',
+ 'include/fec_receiver.h',
+ 'include/receive_statistics.h',
+ 'include/remote_ntp_time_estimator.h',
+ 'include/rtp_header_parser.h',
+ 'include/rtp_payload_registry.h',
+ 'include/rtp_receiver.h',
+ 'include/rtp_rtcp.h',
+ 'include/rtp_rtcp_defines.h',
'source/bitrate.cc',
'source/bitrate.h',
'source/byte_io.h',
@@ -41,8 +41,40 @@
'source/rtp_rtcp_impl.h',
'source/rtcp_packet.cc',
'source/rtcp_packet.h',
+ 'source/rtcp_packet/app.cc',
+ 'source/rtcp_packet/app.h',
+ 'source/rtcp_packet/bye.cc',
+ 'source/rtcp_packet/bye.h',
+ 'source/rtcp_packet/compound_packet.cc',
+ 'source/rtcp_packet/compound_packet.h',
+ 'source/rtcp_packet/dlrr.cc',
+ 'source/rtcp_packet/dlrr.h',
+ 'source/rtcp_packet/extended_jitter_report.cc',
+ 'source/rtcp_packet/extended_jitter_report.h',
+ 'source/rtcp_packet/nack.cc',
+ 'source/rtcp_packet/nack.h',
+ 'source/rtcp_packet/pli.cc',
+ 'source/rtcp_packet/pli.h',
+ 'source/rtcp_packet/psfb.cc',
+ 'source/rtcp_packet/psfb.h',
+ 'source/rtcp_packet/receiver_report.cc',
+ 'source/rtcp_packet/receiver_report.h',
+ 'source/rtcp_packet/report_block.cc',
+ 'source/rtcp_packet/report_block.h',
+ 'source/rtcp_packet/rrtr.cc',
+ 'source/rtcp_packet/rrtr.h',
+ 'source/rtcp_packet/rtpfb.cc',
+ 'source/rtcp_packet/rtpfb.h',
+ 'source/rtcp_packet/sli.cc',
+ 'source/rtcp_packet/sli.h',
+ 'source/rtcp_packet/tmmbn.cc',
+ 'source/rtcp_packet/tmmbn.h',
+ 'source/rtcp_packet/tmmbr.cc',
+ 'source/rtcp_packet/tmmbr.h',
'source/rtcp_packet/transport_feedback.cc',
'source/rtcp_packet/transport_feedback.h',
+ 'source/rtcp_packet/voip_metric.cc',
+ 'source/rtcp_packet/voip_metric.h',
'source/rtcp_receiver.cc',
'source/rtcp_receiver.h',
'source/rtcp_receiver_help.cc',
diff --git a/webrtc/modules/rtp_rtcp/source/CPPLINT.cfg b/webrtc/modules/rtp_rtcp/source/CPPLINT.cfg
new file mode 100644
index 0000000000..c318452482
--- /dev/null
+++ b/webrtc/modules/rtp_rtcp/source/CPPLINT.cfg
@@ -0,0 +1,6 @@
+#tmmbr_help is refactored in CL#1474693002
+exclude_files=tmmbr_help.*
+#rtcp_utility planned to be removed when webrtc:5260 will be finished.
+exclude_files=rtcp_utility.*
+#rtcp_receiver/rtcp_receiver_help will be refactored more deeply as part of webrtc:5260
+exclude_files=rtcp_receiver.*
diff --git a/webrtc/modules/rtp_rtcp/source/byte_io.h b/webrtc/modules/rtp_rtcp/source/byte_io.h
index d8903b8483..c69c178078 100644
--- a/webrtc/modules/rtp_rtcp/source/byte_io.h
+++ b/webrtc/modules/rtp_rtcp/source/byte_io.h
@@ -51,17 +51,14 @@ namespace webrtc {
// platform that doesn't use two's complement, implement conversion to/from
// wire format.
-namespace {
-inline void AssertTwosComplement() {
- // Assume the if any one signed integer type is two's complement, then all
- // other will be too.
- static_assert(
- (-1 & 0x03) == 0x03,
- "Only two's complement representation of signed integers supported.");
-}
+// Assume the if any one signed integer type is two's complement, then all
+// other will be too.
+static_assert(
+ (-1 & 0x03) == 0x03,
+ "Only two's complement representation of signed integers supported.");
+
// Plain const char* won't work for static_assert, use #define instead.
#define kSizeErrorMsg "Byte size must be less than or equal to data type size."
-}
// Utility class for getting the unsigned equivalent of a signed type.
template <typename T>
diff --git a/webrtc/modules/rtp_rtcp/source/dtmf_queue.cc b/webrtc/modules/rtp_rtcp/source/dtmf_queue.cc
index becea912ab..ab21b8704a 100644
--- a/webrtc/modules/rtp_rtcp/source/dtmf_queue.cc
+++ b/webrtc/modules/rtp_rtcp/source/dtmf_queue.cc
@@ -10,7 +10,7 @@
#include "webrtc/modules/rtp_rtcp/source/dtmf_queue.h"
-#include <string.h> //memset
+#include <string.h>
namespace webrtc {
DTMFqueue::DTMFqueue()
@@ -21,7 +21,9 @@ DTMFqueue::DTMFqueue()
memset(dtmf_level_, 0, sizeof(dtmf_level_));
}
-DTMFqueue::~DTMFqueue() { delete dtmf_critsect_; }
+DTMFqueue::~DTMFqueue() {
+ delete dtmf_critsect_;
+}
int32_t DTMFqueue::AddDTMF(uint8_t key, uint16_t len, uint8_t level) {
CriticalSectionScoped lock(dtmf_critsect_);
diff --git a/webrtc/modules/rtp_rtcp/source/fec_private_tables_bursty.h b/webrtc/modules/rtp_rtcp/source/fec_private_tables_bursty.h
index 6105ae1d24..0b39908bb1 100644
--- a/webrtc/modules/rtp_rtcp/source/fec_private_tables_bursty.h
+++ b/webrtc/modules/rtp_rtcp/source/fec_private_tables_bursty.h
@@ -27,7 +27,8 @@
#include "webrtc/typedefs.h"
-namespace {
+namespace webrtc {
+namespace fec_private_tables {
const uint8_t kMaskBursty1_1[2] = {
0x80, 0x00
@@ -756,5 +757,6 @@ const uint8_t** kPacketMaskBurstyTbl[12] = {
kPacketMaskBursty12
};
-} // namespace
+} // namespace fec_private_tables
+} // namespace webrtc
#endif // WEBRTC_MODULES_RTP_RTCP_SOURCE_FEC_PRIVATE_TABLES_BURSTY_H_
diff --git a/webrtc/modules/rtp_rtcp/source/fec_private_tables_random.h b/webrtc/modules/rtp_rtcp/source/fec_private_tables_random.h
index ff6de43b76..295d749873 100644
--- a/webrtc/modules/rtp_rtcp/source/fec_private_tables_random.h
+++ b/webrtc/modules/rtp_rtcp/source/fec_private_tables_random.h
@@ -8,8 +8,8 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-#ifndef WEBRTC_MODULES_RTP_RTCP_SOURCE_FEC_PRIVATE_TABLES_H_
-#define WEBRTC_MODULES_RTP_RTCP_SOURCE_FEC_PRIVATE_TABLES_H_
+#ifndef WEBRTC_MODULES_RTP_RTCP_SOURCE_FEC_PRIVATE_TABLES_RANDOM_H_
+#define WEBRTC_MODULES_RTP_RTCP_SOURCE_FEC_PRIVATE_TABLES_RANDOM_H_
// This file contains a set of packets masks for the FEC code. The masks in
// this table are specifically designed to favor recovery to random loss.
@@ -17,7 +17,8 @@
#include "webrtc/typedefs.h"
-namespace {
+namespace webrtc {
+namespace fec_private_tables {
const uint8_t kMaskRandom10_1[2] = {
0xff, 0xc0
@@ -24518,5 +24519,6 @@ const uint8_t** kPacketMaskRandomTbl[48] = {
kPacketMaskRandom48
};
-} // namespace
-#endif // WEBRTC_MODULES_RTP_RTCP_SOURCE_FEC_PRIVATE_TABLES_H_
+} // namespace fec_private_tables
+} // namespace webrtc
+#endif // WEBRTC_MODULES_RTP_RTCP_SOURCE_FEC_PRIVATE_TABLES_RANDOM_H_
diff --git a/webrtc/modules/rtp_rtcp/source/fec_receiver_impl.h b/webrtc/modules/rtp_rtcp/source/fec_receiver_impl.h
index 24db39b902..6a63813f40 100644
--- a/webrtc/modules/rtp_rtcp/source/fec_receiver_impl.h
+++ b/webrtc/modules/rtp_rtcp/source/fec_receiver_impl.h
@@ -14,8 +14,8 @@
// This header is included to get the nested declaration of Packet structure.
#include "webrtc/base/scoped_ptr.h"
-#include "webrtc/modules/rtp_rtcp/interface/fec_receiver.h"
-#include "webrtc/modules/rtp_rtcp/interface/rtp_rtcp_defines.h"
+#include "webrtc/modules/rtp_rtcp/include/fec_receiver.h"
+#include "webrtc/modules/rtp_rtcp/include/rtp_rtcp_defines.h"
#include "webrtc/modules/rtp_rtcp/source/forward_error_correction.h"
#include "webrtc/typedefs.h"
@@ -25,7 +25,7 @@ class CriticalSectionWrapper;
class FecReceiverImpl : public FecReceiver {
public:
- FecReceiverImpl(RtpData* callback);
+ explicit FecReceiverImpl(RtpData* callback);
virtual ~FecReceiverImpl();
int32_t AddReceivedRedPacket(const RTPHeader& rtp_header,
diff --git a/webrtc/modules/rtp_rtcp/source/fec_receiver_unittest.cc b/webrtc/modules/rtp_rtcp/source/fec_receiver_unittest.cc
index f64b537a52..bb22e1d580 100644
--- a/webrtc/modules/rtp_rtcp/source/fec_receiver_unittest.cc
+++ b/webrtc/modules/rtp_rtcp/source/fec_receiver_unittest.cc
@@ -15,8 +15,8 @@
#include "testing/gmock/include/gmock/gmock.h"
#include "testing/gtest/include/gtest/gtest.h"
#include "webrtc/base/scoped_ptr.h"
-#include "webrtc/modules/rtp_rtcp/interface/fec_receiver.h"
-#include "webrtc/modules/rtp_rtcp/interface/rtp_header_parser.h"
+#include "webrtc/modules/rtp_rtcp/include/fec_receiver.h"
+#include "webrtc/modules/rtp_rtcp/include/rtp_header_parser.h"
#include "webrtc/modules/rtp_rtcp/mocks/mock_rtp_rtcp.h"
#include "webrtc/modules/rtp_rtcp/source/byte_io.h"
#include "webrtc/modules/rtp_rtcp/source/fec_test_helper.h"
diff --git a/webrtc/modules/rtp_rtcp/source/fec_test_helper.h b/webrtc/modules/rtp_rtcp/source/fec_test_helper.h
index e1791adba3..aacc2d1ecc 100644
--- a/webrtc/modules/rtp_rtcp/source/fec_test_helper.h
+++ b/webrtc/modules/rtp_rtcp/source/fec_test_helper.h
@@ -11,7 +11,7 @@
#ifndef WEBRTC_MODULES_RTP_RTCP_SOURCE_FEC_TEST_HELPER_H_
#define WEBRTC_MODULES_RTP_RTCP_SOURCE_FEC_TEST_HELPER_H_
-#include "webrtc/modules/interface/module_common_types.h"
+#include "webrtc/modules/include/module_common_types.h"
#include "webrtc/modules/rtp_rtcp/source/forward_error_correction.h"
namespace webrtc {
@@ -54,6 +54,6 @@ class FrameGenerator {
uint16_t seq_num_;
uint32_t timestamp_;
};
-}
+} // namespace webrtc
#endif // WEBRTC_MODULES_RTP_RTCP_SOURCE_FEC_TEST_HELPER_H_
diff --git a/webrtc/modules/rtp_rtcp/source/forward_error_correction.cc b/webrtc/modules/rtp_rtcp/source/forward_error_correction.cc
index aad418f015..b85d813790 100644
--- a/webrtc/modules/rtp_rtcp/source/forward_error_correction.cc
+++ b/webrtc/modules/rtp_rtcp/source/forward_error_correction.cc
@@ -10,15 +10,15 @@
#include "webrtc/modules/rtp_rtcp/source/forward_error_correction.h"
-#include <assert.h>
#include <stdlib.h>
#include <string.h>
#include <algorithm>
#include <iterator>
+#include "webrtc/base/checks.h"
#include "webrtc/base/logging.h"
-#include "webrtc/modules/rtp_rtcp/interface/rtp_rtcp_defines.h"
+#include "webrtc/modules/rtp_rtcp/include/rtp_rtcp_defines.h"
#include "webrtc/modules/rtp_rtcp/source/byte_io.h"
#include "webrtc/modules/rtp_rtcp/source/forward_error_correction_internal.h"
@@ -36,11 +36,11 @@ const uint8_t kUlpHeaderSizeLBitClear = (2 + kMaskSizeLBitClear);
// Transport header size in bytes. Assume UDP/IPv4 as a reasonable minimum.
const uint8_t kTransportOverhead = 28;
-enum {
- kMaxFecPackets = ForwardErrorCorrection::kMaxMediaPackets
-};
+enum { kMaxFecPackets = ForwardErrorCorrection::kMaxMediaPackets };
-int32_t ForwardErrorCorrection::Packet::AddRef() { return ++ref_count_; }
+int32_t ForwardErrorCorrection::Packet::AddRef() {
+ return ++ref_count_;
+}
int32_t ForwardErrorCorrection::Packet::Release() {
int32_t ref_count;
@@ -72,7 +72,8 @@ class FecPacket : public ForwardErrorCorrection::SortablePacket {
};
bool ForwardErrorCorrection::SortablePacket::LessThan(
- const SortablePacket* first, const SortablePacket* second) {
+ const SortablePacket* first,
+ const SortablePacket* second) {
return IsNewerSequenceNumber(second->seq_num, first->seq_num);
}
@@ -83,8 +84,7 @@ ForwardErrorCorrection::RecoveredPacket::RecoveredPacket() {}
ForwardErrorCorrection::RecoveredPacket::~RecoveredPacket() {}
ForwardErrorCorrection::ForwardErrorCorrection()
- : generated_fec_packets_(kMaxMediaPackets),
- fec_packet_received_(false) {}
+ : generated_fec_packets_(kMaxMediaPackets), fec_packet_received_(false) {}
ForwardErrorCorrection::~ForwardErrorCorrection() {}
@@ -112,7 +112,6 @@ int32_t ForwardErrorCorrection::GenerateFEC(const PacketList& media_packet_list,
FecMaskType fec_mask_type,
PacketList* fec_packet_list) {
const uint16_t num_media_packets = media_packet_list.size();
-
// Sanity check arguments.
assert(num_media_packets > 0);
assert(num_important_packets >= 0 &&
@@ -126,12 +125,10 @@ int32_t ForwardErrorCorrection::GenerateFEC(const PacketList& media_packet_list,
}
bool l_bit = (num_media_packets > 8 * kMaskSizeLBitClear);
- int num_maskBytes = l_bit ? kMaskSizeLBitSet : kMaskSizeLBitClear;
+ int num_mask_bytes = l_bit ? kMaskSizeLBitSet : kMaskSizeLBitClear;
// Do some error checking on the media packets.
- PacketList::const_iterator media_list_it = media_packet_list.begin();
- while (media_list_it != media_packet_list.end()) {
- Packet* media_packet = *media_list_it;
+ for (Packet* media_packet : media_packet_list) {
assert(media_packet);
if (media_packet->length < kRtpHeaderSize) {
@@ -146,7 +143,6 @@ int32_t ForwardErrorCorrection::GenerateFEC(const PacketList& media_packet_list,
LOG(LS_WARNING) << "Media packet " << media_packet->length << " bytes "
<< "with overhead is larger than " << IP_PACKET_SIZE;
}
- media_list_it++;
}
int num_fec_packets =
@@ -167,29 +163,29 @@ int32_t ForwardErrorCorrection::GenerateFEC(const PacketList& media_packet_list,
// -- Generate packet masks --
// Always allocate space for a large mask.
- uint8_t* packet_mask = new uint8_t[num_fec_packets * kMaskSizeLBitSet];
- memset(packet_mask, 0, num_fec_packets * num_maskBytes);
+ rtc::scoped_ptr<uint8_t[]> packet_mask(
+ new uint8_t[num_fec_packets * kMaskSizeLBitSet]);
+ memset(packet_mask.get(), 0, num_fec_packets * num_mask_bytes);
internal::GeneratePacketMasks(num_media_packets, num_fec_packets,
num_important_packets, use_unequal_protection,
- mask_table, packet_mask);
-
- int num_maskBits = InsertZerosInBitMasks(media_packet_list, packet_mask,
- num_maskBytes, num_fec_packets);
+ mask_table, packet_mask.get());
- l_bit = (num_maskBits > 8 * kMaskSizeLBitClear);
+ int num_mask_bits = InsertZerosInBitMasks(
+ media_packet_list, packet_mask.get(), num_mask_bytes, num_fec_packets);
- if (num_maskBits < 0) {
- delete[] packet_mask;
+ if (num_mask_bits < 0) {
return -1;
}
+ l_bit = (num_mask_bits > 8 * kMaskSizeLBitClear);
if (l_bit) {
- num_maskBytes = kMaskSizeLBitSet;
+ num_mask_bytes = kMaskSizeLBitSet;
}
- GenerateFecBitStrings(media_packet_list, packet_mask, num_fec_packets, l_bit);
- GenerateFecUlpHeaders(media_packet_list, packet_mask, l_bit, num_fec_packets);
+ GenerateFecBitStrings(media_packet_list, packet_mask.get(), num_fec_packets,
+ l_bit);
+ GenerateFecUlpHeaders(media_packet_list, packet_mask.get(), l_bit,
+ num_fec_packets);
- delete[] packet_mask;
return 0;
}
@@ -206,26 +202,30 @@ int ForwardErrorCorrection::GetNumberOfFecPackets(int num_media_packets,
}
void ForwardErrorCorrection::GenerateFecBitStrings(
- const PacketList& media_packet_list, uint8_t* packet_mask,
- int num_fec_packets, bool l_bit) {
+ const PacketList& media_packet_list,
+ uint8_t* packet_mask,
+ int num_fec_packets,
+ bool l_bit) {
if (media_packet_list.empty()) {
return;
}
uint8_t media_payload_length[2];
- const int num_maskBytes = l_bit ? kMaskSizeLBitSet : kMaskSizeLBitClear;
+ const int num_mask_bytes = l_bit ? kMaskSizeLBitSet : kMaskSizeLBitClear;
const uint16_t ulp_header_size =
l_bit ? kUlpHeaderSizeLBitSet : kUlpHeaderSizeLBitClear;
const uint16_t fec_rtp_offset =
kFecHeaderSize + ulp_header_size - kRtpHeaderSize;
for (int i = 0; i < num_fec_packets; ++i) {
+ Packet* const fec_packet = &generated_fec_packets_[i];
PacketList::const_iterator media_list_it = media_packet_list.begin();
- uint32_t pkt_mask_idx = i * num_maskBytes;
+ uint32_t pkt_mask_idx = i * num_mask_bytes;
uint32_t media_pkt_idx = 0;
uint16_t fec_packet_length = 0;
uint16_t prev_seq_num = ParseSequenceNumber((*media_list_it)->data);
while (media_list_it != media_packet_list.end()) {
- // Each FEC packet has a multiple byte mask.
+ // Each FEC packet has a multiple byte mask. Determine if this media
+ // packet should be included in FEC packet i.
if (packet_mask[pkt_mask_idx] & (1 << (7 - media_pkt_idx))) {
Packet* media_packet = *media_list_it;
@@ -235,42 +235,40 @@ void ForwardErrorCorrection::GenerateFecBitStrings(
fec_packet_length = media_packet->length + fec_rtp_offset;
// On the first protected packet, we don't need to XOR.
- if (generated_fec_packets_[i].length == 0) {
+ if (fec_packet->length == 0) {
// Copy the first 2 bytes of the RTP header.
- memcpy(generated_fec_packets_[i].data, media_packet->data, 2);
+ memcpy(fec_packet->data, media_packet->data, 2);
// Copy the 5th to 8th bytes of the RTP header.
- memcpy(&generated_fec_packets_[i].data[4], &media_packet->data[4], 4);
+ memcpy(&fec_packet->data[4], &media_packet->data[4], 4);
// Copy network-ordered payload size.
- memcpy(&generated_fec_packets_[i].data[8], media_payload_length, 2);
+ memcpy(&fec_packet->data[8], media_payload_length, 2);
// Copy RTP payload, leaving room for the ULP header.
- memcpy(
- &generated_fec_packets_[i].data[kFecHeaderSize + ulp_header_size],
- &media_packet->data[kRtpHeaderSize],
- media_packet->length - kRtpHeaderSize);
+ memcpy(&fec_packet->data[kFecHeaderSize + ulp_header_size],
+ &media_packet->data[kRtpHeaderSize],
+ media_packet->length - kRtpHeaderSize);
} else {
// XOR with the first 2 bytes of the RTP header.
- generated_fec_packets_[i].data[0] ^= media_packet->data[0];
- generated_fec_packets_[i].data[1] ^= media_packet->data[1];
+ fec_packet->data[0] ^= media_packet->data[0];
+ fec_packet->data[1] ^= media_packet->data[1];
// XOR with the 5th to 8th bytes of the RTP header.
for (uint32_t j = 4; j < 8; ++j) {
- generated_fec_packets_[i].data[j] ^= media_packet->data[j];
+ fec_packet->data[j] ^= media_packet->data[j];
}
// XOR with the network-ordered payload size.
- generated_fec_packets_[i].data[8] ^= media_payload_length[0];
- generated_fec_packets_[i].data[9] ^= media_payload_length[1];
+ fec_packet->data[8] ^= media_payload_length[0];
+ fec_packet->data[9] ^= media_payload_length[1];
// XOR with RTP payload, leaving room for the ULP header.
for (int32_t j = kFecHeaderSize + ulp_header_size;
j < fec_packet_length; j++) {
- generated_fec_packets_[i].data[j] ^=
- media_packet->data[j - fec_rtp_offset];
+ fec_packet->data[j] ^= media_packet->data[j - fec_rtp_offset];
}
}
- if (fec_packet_length > generated_fec_packets_[i].length) {
- generated_fec_packets_[i].length = fec_packet_length;
+ if (fec_packet_length > fec_packet->length) {
+ fec_packet->length = fec_packet_length;
}
}
media_list_it++;
@@ -279,19 +277,18 @@ void ForwardErrorCorrection::GenerateFecBitStrings(
media_pkt_idx += static_cast<uint16_t>(seq_num - prev_seq_num);
prev_seq_num = seq_num;
}
- if (media_pkt_idx == 8) {
- // Switch to the next mask byte.
- media_pkt_idx = 0;
- pkt_mask_idx++;
- }
+ pkt_mask_idx += media_pkt_idx / 8;
+ media_pkt_idx %= 8;
}
- assert(generated_fec_packets_[i].length);
- //Note: This shouldn't happen: means packet mask is wrong or poorly designed
+ RTC_DCHECK_GT(fec_packet->length, 0u)
+ << "Packet mask is wrong or poorly designed.";
}
}
int ForwardErrorCorrection::InsertZerosInBitMasks(
- const PacketList& media_packets, uint8_t* packet_mask, int num_mask_bytes,
+ const PacketList& media_packets,
+ uint8_t* packet_mask,
+ int num_mask_bytes,
int num_fec_packets) {
uint8_t* new_mask = NULL;
if (media_packets.size() <= 1) {
@@ -307,6 +304,9 @@ int ForwardErrorCorrection::InsertZerosInBitMasks(
// required.
return media_packets.size();
}
+ // We can only protect 8 * kMaskSizeLBitSet packets.
+ if (total_missing_seq_nums + media_packets.size() > 8 * kMaskSizeLBitSet)
+ return -1;
// Allocate the new mask.
int new_mask_bytes = kMaskSizeLBitClear;
if (media_packets.size() + total_missing_seq_nums > 8 * kMaskSizeLBitClear) {
@@ -357,7 +357,8 @@ int ForwardErrorCorrection::InsertZerosInBitMasks(
return new_bit_index;
}
-void ForwardErrorCorrection::InsertZeroColumns(int num_zeros, uint8_t* new_mask,
+void ForwardErrorCorrection::InsertZeroColumns(int num_zeros,
+ uint8_t* new_mask,
int new_mask_bytes,
int num_fec_packets,
int new_bit_index) {
@@ -368,9 +369,12 @@ void ForwardErrorCorrection::InsertZeroColumns(int num_zeros, uint8_t* new_mask,
}
}
-void ForwardErrorCorrection::CopyColumn(uint8_t* new_mask, int new_mask_bytes,
- uint8_t* old_mask, int old_mask_bytes,
- int num_fec_packets, int new_bit_index,
+void ForwardErrorCorrection::CopyColumn(uint8_t* new_mask,
+ int new_mask_bytes,
+ uint8_t* old_mask,
+ int old_mask_bytes,
+ int num_fec_packets,
+ int new_bit_index,
int old_bit_index) {
// Copy column from the old mask to the beginning of the new mask and shift it
// out from the old mask.
@@ -386,7 +390,9 @@ void ForwardErrorCorrection::CopyColumn(uint8_t* new_mask, int new_mask_bytes,
}
void ForwardErrorCorrection::GenerateFecUlpHeaders(
- const PacketList& media_packet_list, uint8_t* packet_mask, bool l_bit,
+ const PacketList& media_packet_list,
+ uint8_t* packet_mask,
+ bool l_bit,
int num_fec_packets) {
// -- Generate FEC and ULP headers --
//
@@ -412,33 +418,34 @@ void ForwardErrorCorrection::GenerateFecUlpHeaders(
PacketList::const_iterator media_list_it = media_packet_list.begin();
Packet* media_packet = *media_list_it;
assert(media_packet != NULL);
- int num_maskBytes = l_bit ? kMaskSizeLBitSet : kMaskSizeLBitClear;
+ int num_mask_bytes = l_bit ? kMaskSizeLBitSet : kMaskSizeLBitClear;
const uint16_t ulp_header_size =
l_bit ? kUlpHeaderSizeLBitSet : kUlpHeaderSizeLBitClear;
for (int i = 0; i < num_fec_packets; ++i) {
+ Packet* const fec_packet = &generated_fec_packets_[i];
// -- FEC header --
- generated_fec_packets_[i].data[0] &= 0x7f; // Set E to zero.
+ fec_packet->data[0] &= 0x7f; // Set E to zero.
if (l_bit == 0) {
- generated_fec_packets_[i].data[0] &= 0xbf; // Clear the L bit.
+ fec_packet->data[0] &= 0xbf; // Clear the L bit.
} else {
- generated_fec_packets_[i].data[0] |= 0x40; // Set the L bit.
+ fec_packet->data[0] |= 0x40; // Set the L bit.
}
// Two byte sequence number from first RTP packet to SN base.
// We use the same sequence number base for every FEC packet,
// but that's not required in general.
- memcpy(&generated_fec_packets_[i].data[2], &media_packet->data[2], 2);
+ memcpy(&fec_packet->data[2], &media_packet->data[2], 2);
// -- ULP header --
// Copy the payload size to the protection length field.
// (We protect the entire packet.)
ByteWriter<uint16_t>::WriteBigEndian(
- &generated_fec_packets_[i].data[10],
- generated_fec_packets_[i].length - kFecHeaderSize - ulp_header_size);
+ &fec_packet->data[10],
+ fec_packet->length - kFecHeaderSize - ulp_header_size);
// Copy the packet mask.
- memcpy(&generated_fec_packets_[i].data[12], &packet_mask[i * num_maskBytes],
- num_maskBytes);
+ memcpy(&fec_packet->data[12], &packet_mask[i * num_mask_bytes],
+ num_mask_bytes);
}
}
@@ -460,7 +467,7 @@ void ForwardErrorCorrection::ResetState(
ProtectedPacketList::iterator protected_packet_list_it;
protected_packet_list_it = fec_packet->protected_pkt_list.begin();
while (protected_packet_list_it != fec_packet->protected_pkt_list.end()) {
- delete* protected_packet_list_it;
+ delete *protected_packet_list_it;
protected_packet_list_it =
fec_packet->protected_pkt_list.erase(protected_packet_list_it);
}
@@ -472,7 +479,8 @@ void ForwardErrorCorrection::ResetState(
}
void ForwardErrorCorrection::InsertMediaPacket(
- ReceivedPacket* rx_packet, RecoveredPacketList* recovered_packet_list) {
+ ReceivedPacket* rx_packet,
+ RecoveredPacketList* recovered_packet_list) {
RecoveredPacketList::iterator recovered_packet_list_it =
recovered_packet_list->begin();
@@ -538,9 +546,9 @@ void ForwardErrorCorrection::InsertFECPacket(
const uint16_t seq_num_base =
ByteReader<uint16_t>::ReadBigEndian(&fec_packet->pkt->data[2]);
- const uint16_t maskSizeBytes =
- (fec_packet->pkt->data[0] & 0x40) ? kMaskSizeLBitSet
- : kMaskSizeLBitClear; // L bit set?
+ const uint16_t maskSizeBytes = (fec_packet->pkt->data[0] & 0x40)
+ ? kMaskSizeLBitSet
+ : kMaskSizeLBitClear; // L bit set?
for (uint16_t byte_idx = 0; byte_idx < maskSizeBytes; ++byte_idx) {
uint8_t packet_mask = fec_packet->pkt->data[12 + byte_idx];
@@ -574,7 +582,8 @@ void ForwardErrorCorrection::InsertFECPacket(
}
void ForwardErrorCorrection::AssignRecoveredPackets(
- FecPacket* fec_packet, const RecoveredPacketList* recovered_packets) {
+ FecPacket* fec_packet,
+ const RecoveredPacketList* recovered_packets) {
// Search for missing packets which have arrived or have been recovered by
// another FEC packet.
ProtectedPacketList* not_recovered = &fec_packet->protected_pkt_list;
@@ -599,7 +608,6 @@ void ForwardErrorCorrection::AssignRecoveredPackets(
void ForwardErrorCorrection::InsertPackets(
ReceivedPacketList* received_packet_list,
RecoveredPacketList* recovered_packet_list) {
-
while (!received_packet_list->empty()) {
ReceivedPacket* rx_packet = received_packet_list->front();
@@ -611,9 +619,9 @@ void ForwardErrorCorrection::InsertPackets(
// old FEC packets based on timestamp information or better sequence number
// thresholding (e.g., to distinguish between wrap-around and reordering).
if (!fec_packet_list_.empty()) {
- uint16_t seq_num_diff = abs(
- static_cast<int>(rx_packet->seq_num) -
- static_cast<int>(fec_packet_list_.front()->seq_num));
+ uint16_t seq_num_diff =
+ abs(static_cast<int>(rx_packet->seq_num) -
+ static_cast<int>(fec_packet_list_.front()->seq_num));
if (seq_num_diff > 0x3fff) {
DiscardFECPacket(fec_packet_list_.front());
fec_packet_list_.pop_front();
@@ -637,9 +645,9 @@ void ForwardErrorCorrection::InsertPackets(
bool ForwardErrorCorrection::InitRecovery(const FecPacket* fec_packet,
RecoveredPacket* recovered) {
// This is the first packet which we try to recover with.
- const uint16_t ulp_header_size =
- fec_packet->pkt->data[0] & 0x40 ? kUlpHeaderSizeLBitSet
- : kUlpHeaderSizeLBitClear; // L bit set?
+ const uint16_t ulp_header_size = fec_packet->pkt->data[0] & 0x40
+ ? kUlpHeaderSizeLBitSet
+ : kUlpHeaderSizeLBitClear; // L bit set?
if (fec_packet->pkt->length <
static_cast<size_t>(kFecHeaderSize + ulp_header_size)) {
LOG(LS_WARNING)
diff --git a/webrtc/modules/rtp_rtcp/source/forward_error_correction.h b/webrtc/modules/rtp_rtcp/source/forward_error_correction.h
index f92f014db3..9ba6ce0438 100644
--- a/webrtc/modules/rtp_rtcp/source/forward_error_correction.h
+++ b/webrtc/modules/rtp_rtcp/source/forward_error_correction.h
@@ -15,7 +15,7 @@
#include <vector>
#include "webrtc/base/scoped_ref_ptr.h"
-#include "webrtc/modules/rtp_rtcp/interface/rtp_rtcp_defines.h"
+#include "webrtc/modules/rtp_rtcp/include/rtp_rtcp_defines.h"
#include "webrtc/system_wrappers/include/ref_count.h"
#include "webrtc/typedefs.h"
diff --git a/webrtc/modules/rtp_rtcp/source/forward_error_correction_internal.cc b/webrtc/modules/rtp_rtcp/source/forward_error_correction_internal.cc
index 6d9be90de1..fae59078b1 100644
--- a/webrtc/modules/rtp_rtcp/source/forward_error_correction_internal.cc
+++ b/webrtc/modules/rtp_rtcp/source/forward_error_correction_internal.cc
@@ -17,6 +17,8 @@
#include "webrtc/modules/rtp_rtcp/source/fec_private_tables_random.h"
namespace {
+using webrtc::fec_private_tables::kPacketMaskBurstyTbl;
+using webrtc::fec_private_tables::kPacketMaskRandomTbl;
// Allow for different modes of protection for packets in UEP case.
enum ProtectionMode {
@@ -37,8 +39,11 @@ enum ProtectionMode {
// [0, num_rows * num_sub_mask_bytes]
// \param[out] packet_mask A pointer to hold the output mask, of size
// [0, x * num_mask_bytes], where x >= num_rows.
-void FitSubMask(int num_mask_bytes, int num_sub_mask_bytes, int num_rows,
- const uint8_t* sub_mask, uint8_t* packet_mask) {
+void FitSubMask(int num_mask_bytes,
+ int num_sub_mask_bytes,
+ int num_rows,
+ const uint8_t* sub_mask,
+ uint8_t* packet_mask) {
if (num_mask_bytes == num_sub_mask_bytes) {
memcpy(packet_mask, sub_mask, num_rows * num_sub_mask_bytes);
} else {
@@ -70,13 +75,15 @@ void FitSubMask(int num_mask_bytes, int num_sub_mask_bytes, int num_rows,
// \param[out] packet_mask A pointer to hold the output mask, of size
// [0, x * num_mask_bytes],
// where x >= end_row_fec.
-// TODO (marpan): This function is doing three things at the same time:
+// TODO(marpan): This function is doing three things at the same time:
// shift within a byte, byte shift and resizing.
// Split up into subroutines.
-void ShiftFitSubMask(int num_mask_bytes, int res_mask_bytes,
- int num_column_shift, int end_row, const uint8_t* sub_mask,
+void ShiftFitSubMask(int num_mask_bytes,
+ int res_mask_bytes,
+ int num_column_shift,
+ int end_row,
+ const uint8_t* sub_mask,
uint8_t* packet_mask) {
-
// Number of bit shifts within a byte
const int num_bit_shifts = (num_column_shift % 8);
const int num_byte_shifts = num_column_shift >> 3;
@@ -128,7 +135,6 @@ void ShiftFitSubMask(int num_mask_bytes, int res_mask_bytes,
// For the first byte in the row (j=0 case).
shift_right_curr_byte = sub_mask[pkt_mask_idx2] >> num_bit_shifts;
packet_mask[pkt_mask_idx] = shift_right_curr_byte;
-
}
}
} // namespace
@@ -151,7 +157,9 @@ FecMaskType PacketMaskTable::InitMaskType(FecMaskType fec_mask_type,
assert(num_media_packets <= static_cast<int>(sizeof(kPacketMaskRandomTbl) /
sizeof(*kPacketMaskRandomTbl)));
switch (fec_mask_type) {
- case kFecMaskRandom: { return kFecMaskRandom; }
+ case kFecMaskRandom: {
+ return kFecMaskRandom;
+ }
case kFecMaskBursty: {
int max_media_packets = static_cast<int>(sizeof(kPacketMaskBurstyTbl) /
sizeof(*kPacketMaskBurstyTbl));
@@ -170,17 +178,24 @@ FecMaskType PacketMaskTable::InitMaskType(FecMaskType fec_mask_type,
// |fec_mask_type|.
const uint8_t*** PacketMaskTable::InitMaskTable(FecMaskType fec_mask_type) {
switch (fec_mask_type) {
- case kFecMaskRandom: { return kPacketMaskRandomTbl; }
- case kFecMaskBursty: { return kPacketMaskBurstyTbl; }
+ case kFecMaskRandom: {
+ return kPacketMaskRandomTbl;
+ }
+ case kFecMaskBursty: {
+ return kPacketMaskBurstyTbl;
+ }
}
assert(false);
return kPacketMaskRandomTbl;
}
// Remaining protection after important (first partition) packet protection
-void RemainingPacketProtection(int num_media_packets, int num_fec_remaining,
- int num_fec_for_imp_packets, int num_mask_bytes,
- ProtectionMode mode, uint8_t* packet_mask,
+void RemainingPacketProtection(int num_media_packets,
+ int num_fec_remaining,
+ int num_fec_for_imp_packets,
+ int num_mask_bytes,
+ ProtectionMode mode,
+ uint8_t* packet_mask,
const PacketMaskTable& mask_table) {
if (mode == kModeNoOverlap) {
// sub_mask21
@@ -191,8 +206,10 @@ void RemainingPacketProtection(int num_media_packets, int num_fec_remaining,
const int res_mask_bytes =
(l_bit == 1) ? kMaskSizeLBitSet : kMaskSizeLBitClear;
- const uint8_t* packet_mask_sub_21 = mask_table.fec_packet_mask_table()[
- num_media_packets - num_fec_for_imp_packets - 1][num_fec_remaining - 1];
+ const uint8_t* packet_mask_sub_21 =
+ mask_table.fec_packet_mask_table()[num_media_packets -
+ num_fec_for_imp_packets -
+ 1][num_fec_remaining - 1];
ShiftFitSubMask(num_mask_bytes, res_mask_bytes, num_fec_for_imp_packets,
(num_fec_for_imp_packets + num_fec_remaining),
@@ -201,8 +218,9 @@ void RemainingPacketProtection(int num_media_packets, int num_fec_remaining,
} else if (mode == kModeOverlap || mode == kModeBiasFirstPacket) {
// sub_mask22
- const uint8_t* packet_mask_sub_22 = mask_table
- .fec_packet_mask_table()[num_media_packets - 1][num_fec_remaining - 1];
+ const uint8_t* packet_mask_sub_22 =
+ mask_table.fec_packet_mask_table()[num_media_packets -
+ 1][num_fec_remaining - 1];
FitSubMask(num_mask_bytes, num_mask_bytes, num_fec_remaining,
packet_mask_sub_22,
@@ -217,41 +235,42 @@ void RemainingPacketProtection(int num_media_packets, int num_fec_remaining,
} else {
assert(false);
}
-
}
// Protection for important (first partition) packets
-void ImportantPacketProtection(int num_fec_for_imp_packets, int num_imp_packets,
- int num_mask_bytes, uint8_t* packet_mask,
+void ImportantPacketProtection(int num_fec_for_imp_packets,
+ int num_imp_packets,
+ int num_mask_bytes,
+ uint8_t* packet_mask,
const PacketMaskTable& mask_table) {
const int l_bit = num_imp_packets > 16 ? 1 : 0;
const int num_imp_mask_bytes =
(l_bit == 1) ? kMaskSizeLBitSet : kMaskSizeLBitClear;
// Get sub_mask1 from table
- const uint8_t* packet_mask_sub_1 = mask_table.fec_packet_mask_table()[
- num_imp_packets - 1][num_fec_for_imp_packets - 1];
+ const uint8_t* packet_mask_sub_1 =
+ mask_table.fec_packet_mask_table()[num_imp_packets -
+ 1][num_fec_for_imp_packets - 1];
FitSubMask(num_mask_bytes, num_imp_mask_bytes, num_fec_for_imp_packets,
packet_mask_sub_1, packet_mask);
-
}
// This function sets the protection allocation: i.e., how many FEC packets
// to use for num_imp (1st partition) packets, given the: number of media
// packets, number of FEC packets, and number of 1st partition packets.
-int SetProtectionAllocation(int num_media_packets, int num_fec_packets,
+int SetProtectionAllocation(int num_media_packets,
+ int num_fec_packets,
int num_imp_packets) {
-
- // TODO (marpan): test different cases for protection allocation:
+ // TODO(marpan): test different cases for protection allocation:
// Use at most (alloc_par * num_fec_packets) for important packets.
float alloc_par = 0.5;
int max_num_fec_for_imp = alloc_par * num_fec_packets;
- int num_fec_for_imp_packets =
- (num_imp_packets < max_num_fec_for_imp) ? num_imp_packets
- : max_num_fec_for_imp;
+ int num_fec_for_imp_packets = (num_imp_packets < max_num_fec_for_imp)
+ ? num_imp_packets
+ : max_num_fec_for_imp;
// Fall back to equal protection in this case
if (num_fec_packets == 1 && (num_media_packets > 2 * num_imp_packets)) {
@@ -268,7 +287,7 @@ int SetProtectionAllocation(int num_media_packets, int num_fec_packets,
// Current version has 3 modes (options) to build UEP mask from existing ones.
// Various other combinations may be added in future versions.
// Longer-term, we may add another set of tables specifically for UEP cases.
-// TODO (marpan): also consider modification of masks for bursty loss cases.
+// TODO(marpan): also consider modification of masks for bursty loss cases.
// Mask is characterized as (#packets_to_protect, #fec_for_protection).
// Protection factor defined as: (#fec_for_protection / #packets_to_protect).
@@ -306,13 +325,14 @@ int SetProtectionAllocation(int num_media_packets, int num_fec_packets,
// Protection Mode 2 may be extended for a sort of sliding protection
// (i.e., vary the number/density of "1s" across columns) across packets.
-void UnequalProtectionMask(int num_media_packets, int num_fec_packets,
- int num_imp_packets, int num_mask_bytes,
+void UnequalProtectionMask(int num_media_packets,
+ int num_fec_packets,
+ int num_imp_packets,
+ int num_mask_bytes,
uint8_t* packet_mask,
const PacketMaskTable& mask_table) {
-
// Set Protection type and allocation
- // TODO (marpan): test/update for best mode and some combinations thereof.
+ // TODO(marpan): test/update for best mode and some combinations thereof.
ProtectionMode mode = kModeOverlap;
int num_fec_for_imp_packets = 0;
@@ -341,11 +361,12 @@ void UnequalProtectionMask(int num_media_packets, int num_fec_packets,
num_fec_for_imp_packets, num_mask_bytes, mode,
packet_mask, mask_table);
}
-
}
-void GeneratePacketMasks(int num_media_packets, int num_fec_packets,
- int num_imp_packets, bool use_unequal_protection,
+void GeneratePacketMasks(int num_media_packets,
+ int num_fec_packets,
+ int num_imp_packets,
+ bool use_unequal_protection,
const PacketMaskTable& mask_table,
uint8_t* packet_mask) {
assert(num_media_packets > 0);
@@ -361,16 +382,15 @@ void GeneratePacketMasks(int num_media_packets, int num_fec_packets,
// Retrieve corresponding mask table directly:for equal-protection case.
// Mask = (k,n-k), with protection factor = (n-k)/k,
// where k = num_media_packets, n=total#packets, (n-k)=num_fec_packets.
- memcpy(packet_mask, mask_table.fec_packet_mask_table()[
- num_media_packets - 1][num_fec_packets - 1],
+ memcpy(packet_mask,
+ mask_table.fec_packet_mask_table()[num_media_packets -
+ 1][num_fec_packets - 1],
num_fec_packets * num_mask_bytes);
- } else //UEP case
- {
+ } else { // UEP case
UnequalProtectionMask(num_media_packets, num_fec_packets, num_imp_packets,
num_mask_bytes, packet_mask, mask_table);
-
} // End of UEP modification
-} //End of GetPacketMasks
+} // End of GetPacketMasks
} // namespace internal
} // namespace webrtc
diff --git a/webrtc/modules/rtp_rtcp/source/h264_bitstream_parser.h b/webrtc/modules/rtp_rtcp/source/h264_bitstream_parser.h
index 53ef2a61f4..28276afb72 100644
--- a/webrtc/modules/rtp_rtcp/source/h264_bitstream_parser.h
+++ b/webrtc/modules/rtp_rtcp/source/h264_bitstream_parser.h
@@ -11,8 +11,8 @@
#ifndef WEBRTC_MODULES_RTP_RTCP_SOURCE_H264_BITSTREAM_PARSER_H_
#define WEBRTC_MODULES_RTP_RTCP_SOURCE_H264_BITSTREAM_PARSER_H_
-#include <stdint.h>
#include <stddef.h>
+#include <stdint.h>
namespace rtc {
class BitBuffer;
diff --git a/webrtc/modules/rtp_rtcp/source/h264_sps_parser_unittest.cc b/webrtc/modules/rtp_rtcp/source/h264_sps_parser_unittest.cc
index ceadf4cb6b..7a7e3ed293 100644
--- a/webrtc/modules/rtp_rtcp/source/h264_sps_parser_unittest.cc
+++ b/webrtc/modules/rtp_rtcp/source/h264_sps_parser_unittest.cc
@@ -12,6 +12,7 @@
#include "testing/gtest/include/gtest/gtest.h"
+#include "webrtc/base/arraysize.h"
#include "webrtc/base/bitbuffer.h"
namespace webrtc {
@@ -121,7 +122,7 @@ TEST(H264SpsParserTest, TestSampleSPSHdLandscape) {
const uint8_t buffer[] = {0x7A, 0x00, 0x1F, 0xBC, 0xD9, 0x40, 0x50, 0x05,
0xBA, 0x10, 0x00, 0x00, 0x03, 0x00, 0xC0, 0x00,
0x00, 0x2A, 0xE0, 0xF1, 0x83, 0x19, 0x60};
- H264SpsParser parser = H264SpsParser(buffer, ARRAY_SIZE(buffer));
+ H264SpsParser parser = H264SpsParser(buffer, arraysize(buffer));
EXPECT_TRUE(parser.Parse());
EXPECT_EQ(1280u, parser.width());
EXPECT_EQ(720u, parser.height());
@@ -133,7 +134,7 @@ TEST(H264SpsParserTest, TestSampleSPSVgaLandscape) {
const uint8_t buffer[] = {0x7A, 0x00, 0x1E, 0xBC, 0xD9, 0x40, 0xA0, 0x2F,
0xF8, 0x98, 0x40, 0x00, 0x00, 0x03, 0x01, 0x80,
0x00, 0x00, 0x56, 0x83, 0xC5, 0x8B, 0x65, 0x80};
- H264SpsParser parser = H264SpsParser(buffer, ARRAY_SIZE(buffer));
+ H264SpsParser parser = H264SpsParser(buffer, arraysize(buffer));
EXPECT_TRUE(parser.Parse());
EXPECT_EQ(640u, parser.width());
EXPECT_EQ(360u, parser.height());
@@ -145,7 +146,7 @@ TEST(H264SpsParserTest, TestSampleSPSWeirdResolution) {
const uint8_t buffer[] = {0x7A, 0x00, 0x0D, 0xBC, 0xD9, 0x43, 0x43, 0x3E,
0x5E, 0x10, 0x00, 0x00, 0x03, 0x00, 0x60, 0x00,
0x00, 0x15, 0xA0, 0xF1, 0x42, 0x99, 0x60};
- H264SpsParser parser = H264SpsParser(buffer, ARRAY_SIZE(buffer));
+ H264SpsParser parser = H264SpsParser(buffer, arraysize(buffer));
EXPECT_TRUE(parser.Parse());
EXPECT_EQ(200u, parser.width());
EXPECT_EQ(400u, parser.height());
@@ -154,7 +155,7 @@ TEST(H264SpsParserTest, TestSampleSPSWeirdResolution) {
TEST(H264SpsParserTest, TestSyntheticSPSQvgaLandscape) {
uint8_t buffer[kSpsBufferMaxSize] = {0};
GenerateFakeSps(320u, 180u, buffer);
- H264SpsParser parser = H264SpsParser(buffer, ARRAY_SIZE(buffer));
+ H264SpsParser parser = H264SpsParser(buffer, arraysize(buffer));
EXPECT_TRUE(parser.Parse());
EXPECT_EQ(320u, parser.width());
EXPECT_EQ(180u, parser.height());
@@ -163,7 +164,7 @@ TEST(H264SpsParserTest, TestSyntheticSPSQvgaLandscape) {
TEST(H264SpsParserTest, TestSyntheticSPSWeirdResolution) {
uint8_t buffer[kSpsBufferMaxSize] = {0};
GenerateFakeSps(156u, 122u, buffer);
- H264SpsParser parser = H264SpsParser(buffer, ARRAY_SIZE(buffer));
+ H264SpsParser parser = H264SpsParser(buffer, arraysize(buffer));
EXPECT_TRUE(parser.Parse());
EXPECT_EQ(156u, parser.width());
EXPECT_EQ(122u, parser.height());
diff --git a/webrtc/modules/rtp_rtcp/source/mock/mock_rtp_payload_strategy.h b/webrtc/modules/rtp_rtcp/source/mock/mock_rtp_payload_strategy.h
index f577cbaad1..011829cc6c 100644
--- a/webrtc/modules/rtp_rtcp/source/mock/mock_rtp_payload_strategy.h
+++ b/webrtc/modules/rtp_rtcp/source/mock/mock_rtp_payload_strategy.h
@@ -8,11 +8,11 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-#ifndef WEBRTC_MODULES_RTP_RTCP_SOURCE_MOCK_MOCK_RTP_PAYLOAD_REGISTRY_H_
-#define WEBRTC_MODULES_RTP_RTCP_SOURCE_MOCK_MOCK_RTP_PAYLOAD_REGISTRY_H_
+#ifndef WEBRTC_MODULES_RTP_RTCP_SOURCE_MOCK_MOCK_RTP_PAYLOAD_STRATEGY_H_
+#define WEBRTC_MODULES_RTP_RTCP_SOURCE_MOCK_MOCK_RTP_PAYLOAD_STRATEGY_H_
#include "testing/gmock/include/gmock/gmock.h"
-#include "webrtc/modules/rtp_rtcp/interface/rtp_payload_registry.h"
+#include "webrtc/modules/rtp_rtcp/include/rtp_payload_registry.h"
namespace webrtc {
@@ -23,7 +23,7 @@ class MockRTPPayloadStrategy : public RTPPayloadStrategy {
MOCK_CONST_METHOD4(PayloadIsCompatible,
bool(const RtpUtility::Payload& payload,
const uint32_t frequency,
- const uint8_t channels,
+ const size_t channels,
const uint32_t rate));
MOCK_CONST_METHOD2(UpdatePayloadRate,
void(RtpUtility::Payload* payload, const uint32_t rate));
@@ -34,10 +34,10 @@ class MockRTPPayloadStrategy : public RTPPayloadStrategy {
RtpUtility::Payload*(const char payloadName[RTP_PAYLOAD_NAME_SIZE],
const int8_t payloadType,
const uint32_t frequency,
- const uint8_t channels,
+ const size_t channels,
const uint32_t rate));
};
} // namespace webrtc
-#endif // WEBRTC_MODULES_RTP_RTCP_SOURCE_MOCK_MOCK_RTP_PAYLOAD_REGISTRY_H_
+#endif // WEBRTC_MODULES_RTP_RTCP_SOURCE_MOCK_MOCK_RTP_PAYLOAD_STRATEGY_H_
diff --git a/webrtc/modules/rtp_rtcp/source/nack_rtx_unittest.cc b/webrtc/modules/rtp_rtcp/source/nack_rtx_unittest.cc
index 07a3693507..e19c31bfec 100644
--- a/webrtc/modules/rtp_rtcp/source/nack_rtx_unittest.cc
+++ b/webrtc/modules/rtp_rtcp/source/nack_rtx_unittest.cc
@@ -16,15 +16,15 @@
#include "testing/gtest/include/gtest/gtest.h"
#include "webrtc/base/scoped_ptr.h"
#include "webrtc/common_types.h"
-#include "webrtc/modules/rtp_rtcp/interface/receive_statistics.h"
-#include "webrtc/modules/rtp_rtcp/interface/rtp_header_parser.h"
-#include "webrtc/modules/rtp_rtcp/interface/rtp_payload_registry.h"
-#include "webrtc/modules/rtp_rtcp/interface/rtp_receiver.h"
-#include "webrtc/modules/rtp_rtcp/interface/rtp_rtcp.h"
-#include "webrtc/modules/rtp_rtcp/interface/rtp_rtcp_defines.h"
+#include "webrtc/modules/rtp_rtcp/include/receive_statistics.h"
+#include "webrtc/modules/rtp_rtcp/include/rtp_header_parser.h"
+#include "webrtc/modules/rtp_rtcp/include/rtp_payload_registry.h"
+#include "webrtc/modules/rtp_rtcp/include/rtp_receiver.h"
+#include "webrtc/modules/rtp_rtcp/include/rtp_rtcp.h"
+#include "webrtc/modules/rtp_rtcp/include/rtp_rtcp_defines.h"
#include "webrtc/transport.h"
-using namespace webrtc;
+namespace webrtc {
const int kVideoNackListSize = 30;
const uint32_t kTestSsrc = 3456;
@@ -35,8 +35,7 @@ const int kNumFrames = 30;
const int kPayloadType = 123;
const int kRtxPayloadType = 98;
-class VerifyingRtxReceiver : public NullRtpData
-{
+class VerifyingRtxReceiver : public NullRtpData {
public:
VerifyingRtxReceiver() {}
@@ -54,7 +53,7 @@ class VerifyingRtxReceiver : public NullRtpData
class TestRtpFeedback : public NullRtpFeedback {
public:
- TestRtpFeedback(RtpRtcp* rtp_rtcp) : rtp_rtcp_(rtp_rtcp) {}
+ explicit TestRtpFeedback(RtpRtcp* rtp_rtcp) : rtp_rtcp_(rtp_rtcp) {}
virtual ~TestRtpFeedback() {}
void OnIncomingSSRCChanged(const uint32_t ssrc) override {
@@ -86,9 +85,7 @@ class RtxLoopBackTransport : public webrtc::Transport {
rtp_receiver_ = receiver;
}
- void DropEveryNthPacket(int n) {
- packet_loss_ = n;
- }
+ void DropEveryNthPacket(int n) { packet_loss_ = n; }
void DropConsecutivePackets(int start, int total) {
consecutive_drop_start_ = start;
@@ -100,15 +97,13 @@ class RtxLoopBackTransport : public webrtc::Transport {
size_t len,
const PacketOptions& options) override {
count_++;
- const unsigned char* ptr = static_cast<const unsigned char*>(data);
+ const unsigned char* ptr = static_cast<const unsigned char*>(data);
uint32_t ssrc = (ptr[8] << 24) + (ptr[9] << 16) + (ptr[10] << 8) + ptr[11];
- if (ssrc == rtx_ssrc_) count_rtx_ssrc_++;
+ if (ssrc == rtx_ssrc_)
+ count_rtx_ssrc_++;
uint16_t sequence_number = (ptr[2] << 8) + ptr[3];
size_t packet_length = len;
- // TODO(pbos): Figure out why this needs to be initialized. Likely this
- // is hiding a bug either in test setup or other code.
- // https://code.google.com/p/webrtc/issues/detail?id=3183
- uint8_t restored_packet[1500] = {0};
+ uint8_t restored_packet[1500];
RTPHeader header;
rtc::scoped_ptr<RtpHeaderParser> parser(RtpHeaderParser::Create());
if (!parser->Parse(ptr, len, &header)) {
@@ -136,21 +131,19 @@ class RtxLoopBackTransport : public webrtc::Transport {
if (!parser->Parse(restored_packet, packet_length, &header)) {
return false;
}
+ ptr = restored_packet;
} else {
rtp_payload_registry_->SetIncomingPayloadType(header);
}
- const uint8_t* restored_packet_payload =
- restored_packet + header.headerLength;
- packet_length -= header.headerLength;
PayloadUnion payload_specific;
if (!rtp_payload_registry_->GetPayloadSpecifics(header.payloadType,
&payload_specific)) {
return false;
}
- if (!rtp_receiver_->IncomingRtpPacket(header, restored_packet_payload,
- packet_length, payload_specific,
- true)) {
+ if (!rtp_receiver_->IncomingRtpPacket(header, ptr + header.headerLength,
+ packet_length - header.headerLength,
+ payload_specific, true)) {
return false;
}
return true;
@@ -194,8 +187,7 @@ class RtpRtcpRtxNackTest : public ::testing::Test {
rtp_feedback_.reset(new TestRtpFeedback(rtp_rtcp_module_));
rtp_receiver_.reset(RtpReceiver::CreateVideoReceiver(
- &fake_clock, &receiver_, rtp_feedback_.get(),
- &rtp_payload_registry_));
+ &fake_clock, &receiver_, rtp_feedback_.get(), &rtp_payload_registry_));
rtp_rtcp_module_->SetSSRC(kTestSsrc);
rtp_rtcp_module_->SetRTCPStatus(RtcpMode::kCompound);
@@ -215,11 +207,9 @@ class RtpRtcpRtxNackTest : public ::testing::Test {
EXPECT_EQ(0, rtp_rtcp_module_->RegisterSendPayload(video_codec));
rtp_rtcp_module_->SetRtxSendPayloadType(kRtxPayloadType, kPayloadType);
- EXPECT_EQ(0, rtp_receiver_->RegisterReceivePayload(video_codec.plName,
- video_codec.plType,
- 90000,
- 0,
- video_codec.maxBitrate));
+ EXPECT_EQ(0, rtp_receiver_->RegisterReceivePayload(
+ video_codec.plName, video_codec.plType, 90000, 0,
+ video_codec.maxBitrate));
rtp_payload_registry_.SetRtxPayloadType(kRtxPayloadType, kPayloadType);
for (size_t n = 0; n < payload_data_length; n++) {
@@ -230,8 +220,7 @@ class RtpRtcpRtxNackTest : public ::testing::Test {
int BuildNackList(uint16_t* nack_list) {
receiver_.sequence_numbers_.sort();
std::list<uint16_t> missing_sequence_numbers;
- std::list<uint16_t>::iterator it =
- receiver_.sequence_numbers_.begin();
+ std::list<uint16_t>::iterator it = receiver_.sequence_numbers_.begin();
while (it != receiver_.sequence_numbers_.end()) {
uint16_t sequence_number_1 = *it;
@@ -239,15 +228,14 @@ class RtpRtcpRtxNackTest : public ::testing::Test {
if (it != receiver_.sequence_numbers_.end()) {
uint16_t sequence_number_2 = *it;
// Add all missing sequence numbers to list
- for (uint16_t i = sequence_number_1 + 1; i < sequence_number_2;
- ++i) {
+ for (uint16_t i = sequence_number_1 + 1; i < sequence_number_2; ++i) {
missing_sequence_numbers.push_back(i);
}
}
}
int n = 0;
for (it = missing_sequence_numbers.begin();
- it != missing_sequence_numbers.end(); ++it) {
+ it != missing_sequence_numbers.end(); ++it) {
nack_list[n++] = (*it);
}
return n;
@@ -298,7 +286,7 @@ class RtpRtcpRtxNackTest : public ::testing::Test {
rtc::scoped_ptr<TestRtpFeedback> rtp_feedback_;
RtxLoopBackTransport transport_;
VerifyingRtxReceiver receiver_;
- uint8_t payload_data[65000];
+ uint8_t payload_data[65000];
size_t payload_data_length;
SimulatedClock fake_clock;
};
@@ -345,8 +333,10 @@ TEST_F(RtpRtcpRtxNackTest, RtxNack) {
RunRtxTest(kRtxRetransmitted, 10);
EXPECT_EQ(kTestSequenceNumber, *(receiver_.sequence_numbers_.begin()));
EXPECT_EQ(kTestSequenceNumber + kTestNumberOfPackets - 1,
- *(receiver_.sequence_numbers_.rbegin()));
+ *(receiver_.sequence_numbers_.rbegin()));
EXPECT_EQ(kTestNumberOfPackets, receiver_.sequence_numbers_.size());
EXPECT_EQ(kTestNumberOfRtxPackets, transport_.count_rtx_ssrc_);
EXPECT_TRUE(ExpectedPacketsReceived());
}
+
+} // namespace webrtc
diff --git a/webrtc/modules/rtp_rtcp/source/producer_fec_unittest.cc b/webrtc/modules/rtp_rtcp/source/producer_fec_unittest.cc
index 683b951f1e..be4b453454 100644
--- a/webrtc/modules/rtp_rtcp/source/producer_fec_unittest.cc
+++ b/webrtc/modules/rtp_rtcp/source/producer_fec_unittest.cc
@@ -9,8 +9,10 @@
*/
#include <list>
+#include <vector>
#include "testing/gtest/include/gtest/gtest.h"
+#include "webrtc/modules/rtp_rtcp/source/byte_io.h"
#include "webrtc/modules/rtp_rtcp/source/fec_test_helper.h"
#include "webrtc/modules/rtp_rtcp/source/forward_error_correction.h"
#include "webrtc/modules/rtp_rtcp/source/producer_fec.h"
@@ -54,6 +56,53 @@ class ProducerFecTest : public ::testing::Test {
FrameGenerator* generator_;
};
+// Verifies bug found via fuzzing, where a gap in the packet sequence caused us
+// to move past the end of the current FEC packet mask byte without moving to
+// the next byte. That likely caused us to repeatedly read from the same byte,
+// and if that byte didn't protect packets we would generate empty FEC.
+TEST_F(ProducerFecTest, NoEmptyFecWithSeqNumGaps) {
+ struct Packet {
+ size_t header_size;
+ size_t payload_size;
+ uint16_t seq_num;
+ bool marker_bit;
+ };
+ std::vector<Packet> protected_packets;
+ protected_packets.push_back({15, 3, 41, 0});
+ protected_packets.push_back({14, 1, 43, 0});
+ protected_packets.push_back({19, 0, 48, 0});
+ protected_packets.push_back({19, 0, 50, 0});
+ protected_packets.push_back({14, 3, 51, 0});
+ protected_packets.push_back({13, 8, 52, 0});
+ protected_packets.push_back({19, 2, 53, 0});
+ protected_packets.push_back({12, 3, 54, 0});
+ protected_packets.push_back({21, 0, 55, 0});
+ protected_packets.push_back({13, 3, 57, 1});
+ FecProtectionParams params = {117, 0, 3, kFecMaskBursty};
+ producer_->SetFecParameters(&params, 0);
+ uint8_t packet[28] = {0};
+ for (Packet p : protected_packets) {
+ if (p.marker_bit) {
+ packet[1] |= 0x80;
+ } else {
+ packet[1] &= ~0x80;
+ }
+ ByteWriter<uint16_t>::WriteBigEndian(&packet[2], p.seq_num);
+ producer_->AddRtpPacketAndGenerateFec(packet, p.payload_size,
+ p.header_size);
+ uint16_t num_fec_packets = producer_->NumAvailableFecPackets();
+ std::vector<RedPacket*> fec_packets;
+ if (num_fec_packets > 0) {
+ fec_packets =
+ producer_->GetFecPackets(kRedPayloadType, 99, 100, p.header_size);
+ EXPECT_EQ(num_fec_packets, fec_packets.size());
+ }
+ for (RedPacket* fec_packet : fec_packets) {
+ delete fec_packet;
+ }
+ }
+}
+
TEST_F(ProducerFecTest, OneFrameFec) {
// The number of media packets (|kNumPackets|), number of frames (one for
// this test), and the protection factor (|params->fec_rate|) are set to make
diff --git a/webrtc/modules/rtp_rtcp/source/receive_statistics_impl.cc b/webrtc/modules/rtp_rtcp/source/receive_statistics_impl.cc
index 8ac7e0a383..24f1e2c96e 100644
--- a/webrtc/modules/rtp_rtcp/source/receive_statistics_impl.cc
+++ b/webrtc/modules/rtp_rtcp/source/receive_statistics_impl.cc
@@ -14,7 +14,7 @@
#include "webrtc/base/scoped_ptr.h"
#include "webrtc/modules/rtp_rtcp/source/bitrate.h"
-#include "webrtc/modules/rtp_rtcp/source/rtp_utility.h"
+#include "webrtc/modules/rtp_rtcp/source/time_util.h"
#include "webrtc/system_wrappers/include/critical_section_wrapper.h"
namespace webrtc {
@@ -37,8 +37,6 @@ StreamStatisticianImpl::StreamStatisticianImpl(
cumulative_loss_(0),
jitter_q4_transmission_time_offset_(0),
last_receive_time_ms_(0),
- last_receive_time_secs_(0),
- last_receive_time_frac_(0),
last_received_timestamp_(0),
last_received_transmission_time_offset_(0),
received_seq_first_(0),
@@ -79,9 +77,7 @@ void StreamStatisticianImpl::UpdateCounters(const RTPHeader& header,
// are received, 4 will be ignored.
if (in_order) {
// Current time in samples.
- uint32_t receive_time_secs;
- uint32_t receive_time_frac;
- clock_->CurrentNtp(receive_time_secs, receive_time_frac);
+ NtpTime receive_time(*clock_);
// Wrong if we use RetransmitOfOldPacket.
if (receive_counters_.transmitted.packets > 1 &&
@@ -97,11 +93,10 @@ void StreamStatisticianImpl::UpdateCounters(const RTPHeader& header,
if (header.timestamp != last_received_timestamp_ &&
(receive_counters_.transmitted.packets -
receive_counters_.retransmitted.packets) > 1) {
- UpdateJitter(header, receive_time_secs, receive_time_frac);
+ UpdateJitter(header, receive_time);
}
last_received_timestamp_ = header.timestamp;
- last_receive_time_secs_ = receive_time_secs;
- last_receive_time_frac_ = receive_time_frac;
+ last_receive_time_ntp_ = receive_time;
last_receive_time_ms_ = clock_->TimeInMilliseconds();
}
@@ -113,14 +108,11 @@ void StreamStatisticianImpl::UpdateCounters(const RTPHeader& header,
}
void StreamStatisticianImpl::UpdateJitter(const RTPHeader& header,
- uint32_t receive_time_secs,
- uint32_t receive_time_frac) {
- uint32_t receive_time_rtp = RtpUtility::ConvertNTPTimeToRTP(
- receive_time_secs, receive_time_frac, header.payload_type_frequency);
+ NtpTime receive_time) {
+ uint32_t receive_time_rtp =
+ NtpToRtp(receive_time, header.payload_type_frequency);
uint32_t last_receive_time_rtp =
- RtpUtility::ConvertNTPTimeToRTP(last_receive_time_secs_,
- last_receive_time_frac_,
- header.payload_type_frequency);
+ NtpToRtp(last_receive_time_ntp_, header.payload_type_frequency);
int32_t time_diff_samples = (receive_time_rtp - last_receive_time_rtp) -
(header.timestamp - last_received_timestamp_);
@@ -267,6 +259,7 @@ RtcpStatistics StreamStatisticianImpl::CalculateRtcpStatistics() {
stats.fraction_lost = local_fraction_lost;
// We need a counter for cumulative loss too.
+ // TODO(danilchap): Ensure cumulative loss is below maximum value of 2^24.
cumulative_loss_ += missing;
stats.cumulative_lost = cumulative_loss_;
stats.extended_max_sequence_number =
@@ -319,8 +312,8 @@ void StreamStatisticianImpl::ProcessBitrate() {
void StreamStatisticianImpl::LastReceiveTimeNtp(uint32_t* secs,
uint32_t* frac) const {
CriticalSectionScoped cs(stream_lock_.get());
- *secs = last_receive_time_secs_;
- *frac = last_receive_time_frac_;
+ *secs = last_receive_time_ntp_.seconds();
+ *frac = last_receive_time_ntp_.fractions();
}
bool StreamStatisticianImpl::IsRetransmitOfOldPacket(
diff --git a/webrtc/modules/rtp_rtcp/source/receive_statistics_impl.h b/webrtc/modules/rtp_rtcp/source/receive_statistics_impl.h
index fe42990fe9..025dcd42c7 100644
--- a/webrtc/modules/rtp_rtcp/source/receive_statistics_impl.h
+++ b/webrtc/modules/rtp_rtcp/source/receive_statistics_impl.h
@@ -11,13 +11,15 @@
#ifndef WEBRTC_MODULES_RTP_RTCP_SOURCE_RECEIVE_STATISTICS_IMPL_H_
#define WEBRTC_MODULES_RTP_RTCP_SOURCE_RECEIVE_STATISTICS_IMPL_H_
-#include "webrtc/modules/rtp_rtcp/interface/receive_statistics.h"
+#include "webrtc/modules/rtp_rtcp/include/receive_statistics.h"
#include <algorithm>
+#include <map>
#include "webrtc/base/scoped_ptr.h"
#include "webrtc/modules/rtp_rtcp/source/bitrate.h"
#include "webrtc/system_wrappers/include/critical_section_wrapper.h"
+#include "webrtc/system_wrappers/include/ntp_time.h"
namespace webrtc {
@@ -51,9 +53,7 @@ class StreamStatisticianImpl : public StreamStatistician {
private:
bool InOrderPacketInternal(uint16_t sequence_number) const;
RtcpStatistics CalculateRtcpStatistics();
- void UpdateJitter(const RTPHeader& header,
- uint32_t receive_time_secs,
- uint32_t receive_time_frac);
+ void UpdateJitter(const RTPHeader& header, NtpTime receive_time);
void UpdateCounters(const RTPHeader& rtp_header,
size_t packet_length,
bool retransmitted);
@@ -72,8 +72,7 @@ class StreamStatisticianImpl : public StreamStatistician {
uint32_t jitter_q4_transmission_time_offset_;
int64_t last_receive_time_ms_;
- uint32_t last_receive_time_secs_;
- uint32_t last_receive_time_frac_;
+ NtpTime last_receive_time_ntp_;
uint32_t last_received_timestamp_;
int32_t last_received_transmission_time_offset_;
uint16_t received_seq_first_;
diff --git a/webrtc/modules/rtp_rtcp/source/receive_statistics_unittest.cc b/webrtc/modules/rtp_rtcp/source/receive_statistics_unittest.cc
index 5b522281df..c265c17c04 100644
--- a/webrtc/modules/rtp_rtcp/source/receive_statistics_unittest.cc
+++ b/webrtc/modules/rtp_rtcp/source/receive_statistics_unittest.cc
@@ -11,7 +11,7 @@
#include "testing/gmock/include/gmock/gmock.h"
#include "testing/gtest/include/gtest/gtest.h"
#include "webrtc/base/scoped_ptr.h"
-#include "webrtc/modules/rtp_rtcp/interface/receive_statistics.h"
+#include "webrtc/modules/rtp_rtcp/include/receive_statistics.h"
#include "webrtc/system_wrappers/include/clock.h"
namespace webrtc {
diff --git a/webrtc/modules/rtp_rtcp/source/remote_ntp_time_estimator.cc b/webrtc/modules/rtp_rtcp/source/remote_ntp_time_estimator.cc
index 74fc9cdc56..ccc15ec417 100644
--- a/webrtc/modules/rtp_rtcp/source/remote_ntp_time_estimator.cc
+++ b/webrtc/modules/rtp_rtcp/source/remote_ntp_time_estimator.cc
@@ -8,7 +8,7 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-#include "webrtc/modules/rtp_rtcp/interface/remote_ntp_time_estimator.h"
+#include "webrtc/modules/rtp_rtcp/include/remote_ntp_time_estimator.h"
#include "webrtc/base/logging.h"
#include "webrtc/system_wrappers/include/clock.h"
diff --git a/webrtc/modules/rtp_rtcp/source/remote_ntp_time_estimator_unittest.cc b/webrtc/modules/rtp_rtcp/source/remote_ntp_time_estimator_unittest.cc
index bc9cf2ee39..797c7883a9 100644
--- a/webrtc/modules/rtp_rtcp/source/remote_ntp_time_estimator_unittest.cc
+++ b/webrtc/modules/rtp_rtcp/source/remote_ntp_time_estimator_unittest.cc
@@ -11,7 +11,7 @@
#include "testing/gmock/include/gmock/gmock.h"
#include "testing/gtest/include/gtest/gtest.h"
#include "webrtc/common_types.h"
-#include "webrtc/modules/rtp_rtcp/interface/remote_ntp_time_estimator.h"
+#include "webrtc/modules/rtp_rtcp/include/remote_ntp_time_estimator.h"
#include "webrtc/system_wrappers/include/clock.h"
using ::testing::_;
diff --git a/webrtc/modules/rtp_rtcp/source/rtcp_format_remb_unittest.cc b/webrtc/modules/rtp_rtcp/source/rtcp_format_remb_unittest.cc
index 7a7645fd1b..87c0259b3e 100644
--- a/webrtc/modules/rtp_rtcp/source/rtcp_format_remb_unittest.cc
+++ b/webrtc/modules/rtp_rtcp/source/rtcp_format_remb_unittest.cc
@@ -21,16 +21,13 @@
#include "webrtc/test/null_transport.h"
#include "webrtc/typedefs.h"
+namespace webrtc {
namespace {
-using namespace webrtc;
-
-
class TestTransport : public Transport {
public:
- TestTransport(RTCPReceiver* rtcp_receiver) :
- rtcp_receiver_(rtcp_receiver) {
- }
+ explicit TestTransport(RTCPReceiver* rtcp_receiver)
+ : rtcp_receiver_(rtcp_receiver) {}
bool SendRtp(const uint8_t* /*data*/,
size_t /*len*/,
@@ -38,9 +35,8 @@ class TestTransport : public Transport {
return false;
}
bool SendRtcp(const uint8_t* packet, size_t packetLength) override {
- RTCPUtility::RTCPParserV2 rtcpParser((uint8_t*)packet,
- packetLength,
- true); // Allow non-compound RTCP
+ RTCPUtility::RTCPParserV2 rtcpParser(packet, packetLength,
+ true); // Allow non-compound RTCP
EXPECT_TRUE(rtcpParser.IsValid());
RTCPHelp::RTCPPacketInformation rtcpPacketInformation;
@@ -53,11 +49,11 @@ class TestTransport : public Transport {
rtcpPacketInformation.receiverEstimatedMaxBitrate);
return true;
}
+
private:
RTCPReceiver* rtcp_receiver_;
};
-
class RtcpFormatRembTest : public ::testing::Test {
protected:
RtcpFormatRembTest()
@@ -134,3 +130,4 @@ TEST_F(RtcpFormatRembTest, TestCompund) {
EXPECT_EQ(0, rtcp_sender_->SendRTCP(feedback_state, kRtcpRemb));
}
} // namespace
+} // namespace webrtc
diff --git a/webrtc/modules/rtp_rtcp/source/rtcp_packet.cc b/webrtc/modules/rtp_rtcp/source/rtcp_packet.cc
index 792caa7b8b..eef2978371 100644
--- a/webrtc/modules/rtp_rtcp/source/rtcp_packet.cc
+++ b/webrtc/modules/rtp_rtcp/source/rtcp_packet.cc
@@ -10,6 +10,8 @@
#include "webrtc/modules/rtp_rtcp/source/rtcp_packet.h"
+#include <algorithm>
+
#include "webrtc/base/checks.h"
#include "webrtc/base/logging.h"
#include "webrtc/modules/rtp_rtcp/source/byte_io.h"
@@ -19,38 +21,25 @@ using webrtc::RTCPUtility::kBtReceiverReferenceTime;
using webrtc::RTCPUtility::kBtVoipMetric;
using webrtc::RTCPUtility::PT_APP;
-using webrtc::RTCPUtility::PT_BYE;
using webrtc::RTCPUtility::PT_IJ;
using webrtc::RTCPUtility::PT_PSFB;
-using webrtc::RTCPUtility::PT_RR;
using webrtc::RTCPUtility::PT_RTPFB;
using webrtc::RTCPUtility::PT_SDES;
using webrtc::RTCPUtility::PT_SR;
using webrtc::RTCPUtility::PT_XR;
using webrtc::RTCPUtility::RTCPPacketAPP;
-using webrtc::RTCPUtility::RTCPPacketBYE;
using webrtc::RTCPUtility::RTCPPacketPSFBAPP;
using webrtc::RTCPUtility::RTCPPacketPSFBFIR;
using webrtc::RTCPUtility::RTCPPacketPSFBFIRItem;
-using webrtc::RTCPUtility::RTCPPacketPSFBPLI;
using webrtc::RTCPUtility::RTCPPacketPSFBREMBItem;
using webrtc::RTCPUtility::RTCPPacketPSFBRPSI;
-using webrtc::RTCPUtility::RTCPPacketPSFBSLI;
-using webrtc::RTCPUtility::RTCPPacketPSFBSLIItem;
using webrtc::RTCPUtility::RTCPPacketReportBlockItem;
-using webrtc::RTCPUtility::RTCPPacketRR;
using webrtc::RTCPUtility::RTCPPacketRTPFBNACK;
using webrtc::RTCPUtility::RTCPPacketRTPFBNACKItem;
-using webrtc::RTCPUtility::RTCPPacketRTPFBTMMBN;
-using webrtc::RTCPUtility::RTCPPacketRTPFBTMMBNItem;
-using webrtc::RTCPUtility::RTCPPacketRTPFBTMMBR;
-using webrtc::RTCPUtility::RTCPPacketRTPFBTMMBRItem;
using webrtc::RTCPUtility::RTCPPacketSR;
using webrtc::RTCPUtility::RTCPPacketXRDLRRReportBlockItem;
-using webrtc::RTCPUtility::RTCPPacketXRReceiverReferenceTimeItem;
using webrtc::RTCPUtility::RTCPPacketXR;
-using webrtc::RTCPUtility::RTCPPacketXRVOIPMetricItem;
namespace webrtc {
namespace rtcp {
@@ -122,21 +111,6 @@ void CreateSenderReport(const RTCPPacketSR& sr,
AssignUWord32(buffer, pos, sr.SenderOctetCount);
}
-// Receiver report (RR), header (RFC 3550).
-//
-// 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
-// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
-// |V=2|P| RC | PT=RR=201 | length |
-// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
-// | SSRC of packet sender |
-// +=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+
-
-void CreateReceiverReport(const RTCPPacketRR& rr,
- uint8_t* buffer,
- size_t* pos) {
- AssignUWord32(buffer, pos, rr.SenderSSRC);
-}
-
// Report block (RFC 3550).
//
// 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
@@ -154,43 +128,15 @@ void CreateReceiverReport(const RTCPPacketRR& rr,
// | delay since last SR (DLSR) |
// +=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+
-void CreateReportBlocks(const std::vector<RTCPPacketReportBlockItem>& blocks,
+void CreateReportBlocks(const std::vector<ReportBlock>& blocks,
uint8_t* buffer,
size_t* pos) {
- for (std::vector<RTCPPacketReportBlockItem>::const_iterator
- it = blocks.begin(); it != blocks.end(); ++it) {
- AssignUWord32(buffer, pos, (*it).SSRC);
- AssignUWord8(buffer, pos, (*it).FractionLost);
- AssignUWord24(buffer, pos, (*it).CumulativeNumOfPacketsLost);
- AssignUWord32(buffer, pos, (*it).ExtendedHighestSequenceNumber);
- AssignUWord32(buffer, pos, (*it).Jitter);
- AssignUWord32(buffer, pos, (*it).LastSR);
- AssignUWord32(buffer, pos, (*it).DelayLastSR);
+ for (const ReportBlock& block : blocks) {
+ block.Create(buffer + *pos);
+ *pos += ReportBlock::kLength;
}
}
-// Transmission Time Offsets in RTP Streams (RFC 5450).
-//
-// 0 1 2 3
-// 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
-// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
-// hdr |V=2|P| RC | PT=IJ=195 | length |
-// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
-// | inter-arrival jitter |
-// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
-// . .
-// . .
-// . .
-// | inter-arrival jitter |
-// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
-
-void CreateIj(const std::vector<uint32_t>& ij_items,
- uint8_t* buffer,
- size_t* pos) {
- for (uint32_t item : ij_items)
- AssignUWord32(buffer, pos, item);
-}
-
// Source Description (SDES) (RFC 3550).
//
// 0 1 2 3
@@ -233,129 +179,6 @@ void CreateSdes(const std::vector<Sdes::Chunk>& chunks,
}
}
-// Bye packet (BYE) (RFC 3550).
-//
-// 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
-// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
-// |V=2|P| SC | PT=BYE=203 | length |
-// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
-// | SSRC/CSRC |
-// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
-// : ... :
-// +=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+
-// (opt) | length | reason for leaving ...
-// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
-
-void CreateBye(const RTCPPacketBYE& bye,
- const std::vector<uint32_t>& csrcs,
- uint8_t* buffer,
- size_t* pos) {
- AssignUWord32(buffer, pos, bye.SenderSSRC);
- for (uint32_t csrc : csrcs)
- AssignUWord32(buffer, pos, csrc);
-}
-
-// Application-Defined packet (APP) (RFC 3550).
-//
-// 0 1 2 3
-// 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
-// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
-// |V=2|P| subtype | PT=APP=204 | length |
-// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
-// | SSRC/CSRC |
-// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
-// | name (ASCII) |
-// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
-// | application-dependent data ...
-// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
-
-void CreateApp(const RTCPPacketAPP& app,
- uint32_t ssrc,
- uint8_t* buffer,
- size_t* pos) {
- AssignUWord32(buffer, pos, ssrc);
- AssignUWord32(buffer, pos, app.Name);
- memcpy(buffer + *pos, app.Data, app.Size);
- *pos += app.Size;
-}
-
-// RFC 4585: Feedback format.
-//
-// Common packet format:
-//
-// 0 1 2 3
-// 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
-// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
-// |V=2|P| FMT | PT | length |
-// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
-// | SSRC of packet sender |
-// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
-// | SSRC of media source |
-// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
-// : Feedback Control Information (FCI) :
-// :
-//
-
-// Picture loss indication (PLI) (RFC 4585).
-//
-// FCI: no feedback control information.
-
-void CreatePli(const RTCPPacketPSFBPLI& pli,
- uint8_t* buffer,
- size_t* pos) {
- AssignUWord32(buffer, pos, pli.SenderSSRC);
- AssignUWord32(buffer, pos, pli.MediaSSRC);
-}
-
-// Slice loss indication (SLI) (RFC 4585).
-//
-// FCI:
-//
-// 0 1 2 3
-// 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
-// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
-// | First | Number | PictureID |
-// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
-
-void CreateSli(const RTCPPacketPSFBSLI& sli,
- const RTCPPacketPSFBSLIItem& sli_item,
- uint8_t* buffer,
- size_t* pos) {
- AssignUWord32(buffer, pos, sli.SenderSSRC);
- AssignUWord32(buffer, pos, sli.MediaSSRC);
-
- AssignUWord8(buffer, pos, sli_item.FirstMB >> 5);
- AssignUWord8(buffer, pos, (sli_item.FirstMB << 3) +
- ((sli_item.NumberOfMB >> 10) & 0x07));
- AssignUWord8(buffer, pos, sli_item.NumberOfMB >> 2);
- AssignUWord8(buffer, pos, (sli_item.NumberOfMB << 6) + sli_item.PictureId);
-}
-
-// Generic NACK (RFC 4585).
-//
-// FCI:
-//
-// 0 1 2 3
-// 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
-// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
-// | PID | BLP |
-// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
-
-void CreateNack(const RTCPPacketRTPFBNACK& nack,
- const std::vector<RTCPPacketRTPFBNACKItem>& nack_fields,
- size_t start_index,
- size_t end_index,
- uint8_t* buffer,
- size_t* pos) {
- AssignUWord32(buffer, pos, nack.SenderSSRC);
- AssignUWord32(buffer, pos, nack.MediaSSRC);
- for (size_t i = start_index; i < end_index; ++i) {
- const RTCPPacketRTPFBNACKItem& nack_item = nack_fields[i];
- AssignUWord16(buffer, pos, nack_item.PacketID);
- AssignUWord16(buffer, pos, nack_item.BitMask);
- }
-}
-
// Reference picture selection indication (RPSI) (RFC 4585).
//
// FCI:
@@ -407,66 +230,6 @@ void CreateFir(const RTCPPacketPSFBFIR& fir,
AssignUWord24(buffer, pos, 0);
}
-void CreateTmmbrItem(const RTCPPacketRTPFBTMMBRItem& tmmbr_item,
- uint8_t* buffer,
- size_t* pos) {
- uint32_t bitrate_bps = tmmbr_item.MaxTotalMediaBitRate * 1000;
- uint32_t mantissa = 0;
- uint8_t exp = 0;
- ComputeMantissaAnd6bitBase2Exponent(bitrate_bps, 17, &mantissa, &exp);
-
- AssignUWord32(buffer, pos, tmmbr_item.SSRC);
- AssignUWord8(buffer, pos, (exp << 2) + ((mantissa >> 15) & 0x03));
- AssignUWord8(buffer, pos, mantissa >> 7);
- AssignUWord8(buffer, pos, (mantissa << 1) +
- ((tmmbr_item.MeasuredOverhead >> 8) & 0x01));
- AssignUWord8(buffer, pos, tmmbr_item.MeasuredOverhead);
-}
-
-// Temporary Maximum Media Stream Bit Rate Request (TMMBR) (RFC 5104).
-//
-// FCI:
-//
-// 0 1 2 3
-// 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
-// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
-// | SSRC |
-// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
-// | MxTBR Exp | MxTBR Mantissa |Measured Overhead|
-// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
-
-void CreateTmmbr(const RTCPPacketRTPFBTMMBR& tmmbr,
- const RTCPPacketRTPFBTMMBRItem& tmmbr_item,
- uint8_t* buffer,
- size_t* pos) {
- AssignUWord32(buffer, pos, tmmbr.SenderSSRC);
- AssignUWord32(buffer, pos, kUnusedMediaSourceSsrc0);
- CreateTmmbrItem(tmmbr_item, buffer, pos);
-}
-
-// Temporary Maximum Media Stream Bit Rate Notification (TMMBN) (RFC 5104).
-//
-// FCI:
-//
-// 0 1 2 3
-// 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
-// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
-// | SSRC |
-// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
-// | MxTBR Exp | MxTBR Mantissa |Measured Overhead|
-// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
-
-void CreateTmmbn(const RTCPPacketRTPFBTMMBN& tmmbn,
- const std::vector<RTCPPacketRTPFBTMMBRItem>& tmmbn_items,
- uint8_t* buffer,
- size_t* pos) {
- AssignUWord32(buffer, pos, tmmbn.SenderSSRC);
- AssignUWord32(buffer, pos, kUnusedMediaSourceSsrc0);
- for (uint8_t i = 0; i < tmmbn_items.size(); ++i) {
- CreateTmmbrItem(tmmbn_items[i], buffer, pos);
- }
-}
-
// Receiver Estimated Max Bitrate (REMB) (draft-alvestrand-rmcat-remb).
//
// 0 1 2 3
@@ -529,130 +292,6 @@ void CreateXrHeader(const RTCPPacketXR& header,
AssignUWord32(buffer, pos, header.OriginatorSSRC);
}
-void CreateXrBlockHeader(uint8_t block_type,
- uint16_t block_length,
- uint8_t* buffer,
- size_t* pos) {
- AssignUWord8(buffer, pos, block_type);
- AssignUWord8(buffer, pos, 0);
- AssignUWord16(buffer, pos, block_length);
-}
-
-// Receiver Reference Time Report Block (RFC 3611).
-//
-// 0 1 2 3
-// 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
-// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
-// | BT=4 | reserved | block length = 2 |
-// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
-// | NTP timestamp, most significant word |
-// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
-// | NTP timestamp, least significant word |
-// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
-
-void CreateRrtr(const std::vector<RTCPPacketXRReceiverReferenceTimeItem>& rrtrs,
- uint8_t* buffer,
- size_t* pos) {
- const uint16_t kBlockLength = 2;
- for (std::vector<RTCPPacketXRReceiverReferenceTimeItem>::const_iterator it =
- rrtrs.begin(); it != rrtrs.end(); ++it) {
- CreateXrBlockHeader(kBtReceiverReferenceTime, kBlockLength, buffer, pos);
- AssignUWord32(buffer, pos, (*it).NTPMostSignificant);
- AssignUWord32(buffer, pos, (*it).NTPLeastSignificant);
- }
-}
-
-// DLRR Report Block (RFC 3611).
-//
-// 0 1 2 3
-// 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
-// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
-// | BT=5 | reserved | block length |
-// +=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+
-// | SSRC_1 (SSRC of first receiver) | sub-
-// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ block
-// | last RR (LRR) | 1
-// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
-// | delay since last RR (DLRR) |
-// +=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+
-// | SSRC_2 (SSRC of second receiver) | sub-
-// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ block
-// : ... : 2
-
-void CreateDlrr(const std::vector<Xr::DlrrBlock>& dlrrs,
- uint8_t* buffer,
- size_t* pos) {
- for (std::vector<Xr::DlrrBlock>::const_iterator it = dlrrs.begin();
- it != dlrrs.end(); ++it) {
- if ((*it).empty()) {
- continue;
- }
- uint16_t block_length = 3 * (*it).size();
- CreateXrBlockHeader(kBtDlrr, block_length, buffer, pos);
- for (Xr::DlrrBlock::const_iterator it_block = (*it).begin();
- it_block != (*it).end(); ++it_block) {
- AssignUWord32(buffer, pos, (*it_block).SSRC);
- AssignUWord32(buffer, pos, (*it_block).LastRR);
- AssignUWord32(buffer, pos, (*it_block).DelayLastRR);
- }
- }
-}
-
-// VoIP Metrics Report Block (RFC 3611).
-//
-// 0 1 2 3
-// 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
-// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
-// | BT=7 | reserved | block length = 8 |
-// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
-// | SSRC of source |
-// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
-// | loss rate | discard rate | burst density | gap density |
-// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
-// | burst duration | gap duration |
-// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
-// | round trip delay | end system delay |
-// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
-// | signal level | noise level | RERL | Gmin |
-// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
-// | R factor | ext. R factor | MOS-LQ | MOS-CQ |
-// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
-// | RX config | reserved | JB nominal |
-// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
-// | JB maximum | JB abs max |
-// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
-
-void CreateVoipMetric(const std::vector<RTCPPacketXRVOIPMetricItem>& metrics,
- uint8_t* buffer,
- size_t* pos) {
- const uint16_t kBlockLength = 8;
- for (std::vector<RTCPPacketXRVOIPMetricItem>::const_iterator it =
- metrics.begin(); it != metrics.end(); ++it) {
- CreateXrBlockHeader(kBtVoipMetric, kBlockLength, buffer, pos);
- AssignUWord32(buffer, pos, (*it).SSRC);
- AssignUWord8(buffer, pos, (*it).lossRate);
- AssignUWord8(buffer, pos, (*it).discardRate);
- AssignUWord8(buffer, pos, (*it).burstDensity);
- AssignUWord8(buffer, pos, (*it).gapDensity);
- AssignUWord16(buffer, pos, (*it).burstDuration);
- AssignUWord16(buffer, pos, (*it).gapDuration);
- AssignUWord16(buffer, pos, (*it).roundTripDelay);
- AssignUWord16(buffer, pos, (*it).endSystemDelay);
- AssignUWord8(buffer, pos, (*it).signalLevel);
- AssignUWord8(buffer, pos, (*it).noiseLevel);
- AssignUWord8(buffer, pos, (*it).RERL);
- AssignUWord8(buffer, pos, (*it).Gmin);
- AssignUWord8(buffer, pos, (*it).Rfactor);
- AssignUWord8(buffer, pos, (*it).extRfactor);
- AssignUWord8(buffer, pos, (*it).MOSLQ);
- AssignUWord8(buffer, pos, (*it).MOSCQ);
- AssignUWord8(buffer, pos, (*it).RXconfig);
- AssignUWord8(buffer, pos, 0);
- AssignUWord16(buffer, pos, (*it).JBnominal);
- AssignUWord16(buffer, pos, (*it).JBmax);
- AssignUWord16(buffer, pos, (*it).JBabsMax);
- }
-}
} // namespace
void RtcpPacket::Append(RtcpPacket* packet) {
@@ -751,17 +390,6 @@ void RtcpPacket::CreateHeader(
AssignUWord16(buffer, pos, length);
}
-bool Empty::Create(uint8_t* packet,
- size_t* index,
- size_t max_length,
- RtcpPacket::PacketReadyCallback* callback) const {
- return true;
-}
-
-size_t Empty::BlockLength() const {
- return 0;
-}
-
bool SenderReport::Create(uint8_t* packet,
size_t* index,
size_t max_length,
@@ -781,58 +409,11 @@ bool SenderReport::WithReportBlock(const ReportBlock& block) {
LOG(LS_WARNING) << "Max report blocks reached.";
return false;
}
- report_blocks_.push_back(block.report_block_);
+ report_blocks_.push_back(block);
sr_.NumberOfReportBlocks = report_blocks_.size();
return true;
}
-bool ReceiverReport::Create(uint8_t* packet,
- size_t* index,
- size_t max_length,
- RtcpPacket::PacketReadyCallback* callback) const {
- while (*index + BlockLength() > max_length) {
- if (!OnBufferFull(packet, index, callback))
- return false;
- }
- CreateHeader(rr_.NumberOfReportBlocks, PT_RR, HeaderLength(), packet, index);
- CreateReceiverReport(rr_, packet, index);
- CreateReportBlocks(report_blocks_, packet, index);
- return true;
-}
-
-bool ReceiverReport::WithReportBlock(const ReportBlock& block) {
- if (report_blocks_.size() >= kMaxNumberOfReportBlocks) {
- LOG(LS_WARNING) << "Max report blocks reached.";
- return false;
- }
- report_blocks_.push_back(block.report_block_);
- rr_.NumberOfReportBlocks = report_blocks_.size();
- return true;
-}
-
-bool Ij::Create(uint8_t* packet,
- size_t* index,
- size_t max_length,
- RtcpPacket::PacketReadyCallback* callback) const {
- while (*index + BlockLength() > max_length) {
- if (!OnBufferFull(packet, index, callback))
- return false;
- }
- size_t length = ij_items_.size();
- CreateHeader(length, PT_IJ, length, packet, index);
- CreateIj(ij_items_, packet, index);
- return true;
-}
-
-bool Ij::WithJitterItem(uint32_t jitter) {
- if (ij_items_.size() >= kMaxNumberOfIjItems) {
- LOG(LS_WARNING) << "Max inter-arrival jitter items reached.";
- return false;
- }
- ij_items_.push_back(jitter);
- return true;
-}
-
bool Sdes::Create(uint8_t* packet,
size_t* index,
size_t max_length,
@@ -876,129 +457,6 @@ size_t Sdes::BlockLength() const {
return length;
}
-bool Bye::Create(uint8_t* packet,
- size_t* index,
- size_t max_length,
- RtcpPacket::PacketReadyCallback* callback) const {
- while (*index + BlockLength() > max_length) {
- if (!OnBufferFull(packet, index, callback))
- return false;
- }
- size_t length = HeaderLength();
- CreateHeader(length, PT_BYE, length, packet, index);
- CreateBye(bye_, csrcs_, packet, index);
- return true;
-}
-
-bool Bye::WithCsrc(uint32_t csrc) {
- if (csrcs_.size() >= kMaxNumberOfCsrcs) {
- LOG(LS_WARNING) << "Max CSRC size reached.";
- return false;
- }
- csrcs_.push_back(csrc);
- return true;
-}
-
-bool App::Create(uint8_t* packet,
- size_t* index,
- size_t max_length,
- RtcpPacket::PacketReadyCallback* callback) const {
- while (*index + BlockLength() > max_length) {
- if (!OnBufferFull(packet, index, callback))
- return false;
- }
- CreateHeader(app_.SubType, PT_APP, HeaderLength(), packet, index);
- CreateApp(app_, ssrc_, packet, index);
- return true;
-}
-
-bool Pli::Create(uint8_t* packet,
- size_t* index,
- size_t max_length,
- RtcpPacket::PacketReadyCallback* callback) const {
- while (*index + BlockLength() > max_length) {
- if (!OnBufferFull(packet, index, callback))
- return false;
- }
- const uint8_t kFmt = 1;
- CreateHeader(kFmt, PT_PSFB, HeaderLength(), packet, index);
- CreatePli(pli_, packet, index);
- return true;
-}
-
-bool Sli::Create(uint8_t* packet,
- size_t* index,
- size_t max_length,
- RtcpPacket::PacketReadyCallback* callback) const {
- while (*index + BlockLength() > max_length) {
- if (!OnBufferFull(packet, index, callback))
- return false;
- }
- const uint8_t kFmt = 2;
- CreateHeader(kFmt, PT_PSFB, HeaderLength(), packet, index);
- CreateSli(sli_, sli_item_, packet, index);
- return true;
-}
-
-bool Nack::Create(uint8_t* packet,
- size_t* index,
- size_t max_length,
- RtcpPacket::PacketReadyCallback* callback) const {
- assert(!nack_fields_.empty());
- // If nack list can't fit in packet, try to fragment.
- size_t nack_index = 0;
- do {
- size_t bytes_left_in_buffer = max_length - *index;
- if (bytes_left_in_buffer < kCommonFbFmtLength + 4) {
- if (!OnBufferFull(packet, index, callback))
- return false;
- continue;
- }
- int64_t num_nack_fields =
- std::min((bytes_left_in_buffer - kCommonFbFmtLength) / 4,
- nack_fields_.size() - nack_index);
-
- const uint8_t kFmt = 1;
- size_t size_bytes = (num_nack_fields * 4) + kCommonFbFmtLength;
- size_t header_length = ((size_bytes + 3) / 4) - 1; // As 32bit words - 1
- CreateHeader(kFmt, PT_RTPFB, header_length, packet, index);
- CreateNack(nack_, nack_fields_, nack_index, nack_index + num_nack_fields,
- packet, index);
-
- nack_index += num_nack_fields;
- } while (nack_index < nack_fields_.size());
-
- return true;
-}
-
-size_t Nack::BlockLength() const {
- return (nack_fields_.size() * 4) + kCommonFbFmtLength;
-}
-
-void Nack::WithList(const uint16_t* nack_list, int length) {
- assert(nack_list);
- assert(nack_fields_.empty());
- int i = 0;
- while (i < length) {
- uint16_t pid = nack_list[i++];
- // Bitmask specifies losses in any of the 16 packets following the pid.
- uint16_t bitmask = 0;
- while (i < length) {
- int shift = static_cast<uint16_t>(nack_list[i] - pid) - 1;
- if (shift >= 0 && shift <= 15) {
- bitmask |= (1 << shift);
- ++i;
- } else {
- break;
- }
- }
- RTCPUtility::RTCPPacketRTPFBNACKItem item;
- item.PacketID = pid;
- item.BitMask = bitmask;
- nack_fields_.push_back(item);
- }
-}
-
bool Rpsi::Create(uint8_t* packet,
size_t* index,
size_t max_length,
@@ -1077,48 +535,6 @@ void Remb::AppliesTo(uint32_t ssrc) {
remb_item_.SSRCs[remb_item_.NumberOfSSRCs++] = ssrc;
}
-bool Tmmbr::Create(uint8_t* packet,
- size_t* index,
- size_t max_length,
- RtcpPacket::PacketReadyCallback* callback) const {
- while (*index + BlockLength() > max_length) {
- if (!OnBufferFull(packet, index, callback))
- return false;
- }
- const uint8_t kFmt = 3;
- CreateHeader(kFmt, PT_RTPFB, HeaderLength(), packet, index);
- CreateTmmbr(tmmbr_, tmmbr_item_, packet, index);
- return true;
-}
-
-bool Tmmbn::WithTmmbr(uint32_t ssrc, uint32_t bitrate_kbps, uint16_t overhead) {
- assert(overhead <= 0x1ff);
- if (tmmbn_items_.size() >= kMaxNumberOfTmmbrs) {
- LOG(LS_WARNING) << "Max TMMBN size reached.";
- return false;
- }
- RTCPPacketRTPFBTMMBRItem tmmbn_item;
- tmmbn_item.SSRC = ssrc;
- tmmbn_item.MaxTotalMediaBitRate = bitrate_kbps;
- tmmbn_item.MeasuredOverhead = overhead;
- tmmbn_items_.push_back(tmmbn_item);
- return true;
-}
-
-bool Tmmbn::Create(uint8_t* packet,
- size_t* index,
- size_t max_length,
- RtcpPacket::PacketReadyCallback* callback) const {
- while (*index + BlockLength() > max_length) {
- if (!OnBufferFull(packet, index, callback))
- return false;
- }
- const uint8_t kFmt = 4;
- CreateHeader(kFmt, PT_RTPFB, HeaderLength(), packet, index);
- CreateTmmbn(tmmbn_, tmmbn_items_, packet, index);
- return true;
-}
-
bool Xr::Create(uint8_t* packet,
size_t* index,
size_t max_length,
@@ -1129,29 +545,38 @@ bool Xr::Create(uint8_t* packet,
}
CreateHeader(0U, PT_XR, HeaderLength(), packet, index);
CreateXrHeader(xr_header_, packet, index);
- CreateRrtr(rrtr_blocks_, packet, index);
- CreateDlrr(dlrr_blocks_, packet, index);
- CreateVoipMetric(voip_metric_blocks_, packet, index);
+ for (const Rrtr& block : rrtr_blocks_) {
+ block.Create(packet + *index);
+ *index += Rrtr::kLength;
+ }
+ for (const Dlrr& block : dlrr_blocks_) {
+ block.Create(packet + *index);
+ *index += block.BlockLength();
+ }
+ for (const VoipMetric& block : voip_metric_blocks_) {
+ block.Create(packet + *index);
+ *index += VoipMetric::kLength;
+ }
return true;
}
bool Xr::WithRrtr(Rrtr* rrtr) {
- assert(rrtr);
+ RTC_DCHECK(rrtr);
if (rrtr_blocks_.size() >= kMaxNumberOfRrtrBlocks) {
LOG(LS_WARNING) << "Max RRTR blocks reached.";
return false;
}
- rrtr_blocks_.push_back(rrtr->rrtr_block_);
+ rrtr_blocks_.push_back(*rrtr);
return true;
}
bool Xr::WithDlrr(Dlrr* dlrr) {
- assert(dlrr);
+ RTC_DCHECK(dlrr);
if (dlrr_blocks_.size() >= kMaxNumberOfDlrrBlocks) {
LOG(LS_WARNING) << "Max DLRR blocks reached.";
return false;
}
- dlrr_blocks_.push_back(dlrr->dlrr_block_);
+ dlrr_blocks_.push_back(*dlrr);
return true;
}
@@ -1161,38 +586,18 @@ bool Xr::WithVoipMetric(VoipMetric* voip_metric) {
LOG(LS_WARNING) << "Max Voip Metric blocks reached.";
return false;
}
- voip_metric_blocks_.push_back(voip_metric->metric_);
+ voip_metric_blocks_.push_back(*voip_metric);
return true;
}
size_t Xr::DlrrLength() const {
- const size_t kBlockHeaderLen = 4;
- const size_t kSubBlockLen = 12;
size_t length = 0;
- for (std::vector<DlrrBlock>::const_iterator it = dlrr_blocks_.begin();
- it != dlrr_blocks_.end(); ++it) {
- if (!(*it).empty()) {
- length += kBlockHeaderLen + kSubBlockLen * (*it).size();
- }
+ for (const Dlrr& block : dlrr_blocks_) {
+ length += block.BlockLength();
}
return length;
}
-bool Dlrr::WithDlrrItem(uint32_t ssrc,
- uint32_t last_rr,
- uint32_t delay_last_rr) {
- if (dlrr_block_.size() >= kMaxNumberOfDlrrItems) {
- LOG(LS_WARNING) << "Max DLRR items reached.";
- return false;
- }
- RTCPPacketXRDLRRReportBlockItem dlrr;
- dlrr.SSRC = ssrc;
- dlrr.LastRR = last_rr;
- dlrr.DelayLastRR = delay_last_rr;
- dlrr_block_.push_back(dlrr);
- return true;
-}
-
RawPacket::RawPacket(size_t buffer_length)
: buffer_length_(buffer_length), length_(0) {
buffer_.reset(new uint8_t[buffer_length]);
diff --git a/webrtc/modules/rtp_rtcp/source/rtcp_packet.h b/webrtc/modules/rtp_rtcp/source/rtcp_packet.h
index 3c34957c36..01c97c38ba 100644
--- a/webrtc/modules/rtp_rtcp/source/rtcp_packet.h
+++ b/webrtc/modules/rtp_rtcp/source/rtcp_packet.h
@@ -9,16 +9,20 @@
*
*/
-#ifndef WEBRTC_MODULES_RTP_RTCP_RTCP_PACKET_H_
-#define WEBRTC_MODULES_RTP_RTCP_RTCP_PACKET_H_
+#ifndef WEBRTC_MODULES_RTP_RTCP_SOURCE_RTCP_PACKET_H_
+#define WEBRTC_MODULES_RTP_RTCP_SOURCE_RTCP_PACKET_H_
#include <map>
#include <string>
#include <vector>
#include "webrtc/base/scoped_ptr.h"
+#include "webrtc/modules/rtp_rtcp/include/rtp_rtcp_defines.h"
+#include "webrtc/modules/rtp_rtcp/source/rtcp_packet/dlrr.h"
+#include "webrtc/modules/rtp_rtcp/source/rtcp_packet/report_block.h"
+#include "webrtc/modules/rtp_rtcp/source/rtcp_packet/rrtr.h"
+#include "webrtc/modules/rtp_rtcp/source/rtcp_packet/voip_metric.h"
#include "webrtc/modules/rtp_rtcp/source/rtcp_utility.h"
-#include "webrtc/modules/rtp_rtcp/interface/rtp_rtcp_defines.h"
#include "webrtc/typedefs.h"
namespace webrtc {
@@ -27,10 +31,7 @@ namespace rtcp {
static const int kCommonFbFmtLength = 12;
static const int kReportBlockLength = 24;
-class Dlrr;
class RawPacket;
-class Rrtr;
-class VoipMetric;
// Class for building RTCP packets.
//
@@ -115,93 +116,17 @@ class RtcpPacket {
size_t HeaderLength() const;
static const size_t kHeaderLength = 4;
+ std::vector<RtcpPacket*> appended_packets_;
private:
bool CreateAndAddAppended(uint8_t* packet,
size_t* index,
size_t max_length,
PacketReadyCallback* callback) const;
-
- std::vector<RtcpPacket*> appended_packets_;
};
// TODO(sprang): Move RtcpPacket subclasses out to separate files.
-class Empty : public RtcpPacket {
- public:
- Empty() : RtcpPacket() {}
-
- virtual ~Empty() {}
-
- protected:
- bool Create(uint8_t* packet,
- size_t* index,
- size_t max_length,
- RtcpPacket::PacketReadyCallback* callback) const override;
-
- size_t BlockLength() const override;
-
- private:
- RTC_DISALLOW_COPY_AND_ASSIGN(Empty);
-};
-
-// From RFC 3550, RTP: A Transport Protocol for Real-Time Applications.
-//
-// RTCP report block (RFC 3550).
-//
-// 0 1 2 3
-// 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
-// +=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+
-// | SSRC_1 (SSRC of first source) |
-// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
-// | fraction lost | cumulative number of packets lost |
-// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
-// | extended highest sequence number received |
-// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
-// | interarrival jitter |
-// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
-// | last SR (LSR) |
-// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
-// | delay since last SR (DLSR) |
-// +=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+
-
-class ReportBlock {
- public:
- ReportBlock() {
- // TODO(asapersson): Consider adding a constructor to struct.
- memset(&report_block_, 0, sizeof(report_block_));
- }
-
- ~ReportBlock() {}
-
- void To(uint32_t ssrc) {
- report_block_.SSRC = ssrc;
- }
- void WithFractionLost(uint8_t fraction_lost) {
- report_block_.FractionLost = fraction_lost;
- }
- void WithCumulativeLost(uint32_t cumulative_lost) {
- report_block_.CumulativeNumOfPacketsLost = cumulative_lost;
- }
- void WithExtHighestSeqNum(uint32_t ext_highest_seq_num) {
- report_block_.ExtendedHighestSequenceNumber = ext_highest_seq_num;
- }
- void WithJitter(uint32_t jitter) {
- report_block_.Jitter = jitter;
- }
- void WithLastSr(uint32_t last_sr) {
- report_block_.LastSR = last_sr;
- }
- void WithDelayLastSr(uint32_t delay_last_sr) {
- report_block_.DelayLastSR = delay_last_sr;
- }
-
- private:
- friend class SenderReport;
- friend class ReceiverReport;
- RTCPUtility::RTCPPacketReportBlockItem report_block_;
-};
-
// RTCP sender report (RFC 3550).
//
// 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
@@ -268,102 +193,11 @@ class SenderReport : public RtcpPacket {
}
RTCPUtility::RTCPPacketSR sr_;
- std::vector<RTCPUtility::RTCPPacketReportBlockItem> report_blocks_;
+ std::vector<ReportBlock> report_blocks_;
RTC_DISALLOW_COPY_AND_ASSIGN(SenderReport);
};
-//
-// RTCP receiver report (RFC 3550).
-//
-// 0 1 2 3
-// 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
-// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
-// |V=2|P| RC | PT=RR=201 | length |
-// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
-// | SSRC of packet sender |
-// +=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+
-// | report block(s) |
-// | .... |
-
-class ReceiverReport : public RtcpPacket {
- public:
- ReceiverReport() : RtcpPacket() {
- memset(&rr_, 0, sizeof(rr_));
- }
-
- virtual ~ReceiverReport() {}
-
- void From(uint32_t ssrc) {
- rr_.SenderSSRC = ssrc;
- }
- bool WithReportBlock(const ReportBlock& block);
-
- protected:
- bool Create(uint8_t* packet,
- size_t* index,
- size_t max_length,
- RtcpPacket::PacketReadyCallback* callback) const override;
-
- private:
- static const int kMaxNumberOfReportBlocks = 0x1F;
-
- size_t BlockLength() const {
- const size_t kRrHeaderLength = 8;
- return kRrHeaderLength + report_blocks_.size() * kReportBlockLength;
- }
-
- RTCPUtility::RTCPPacketRR rr_;
- std::vector<RTCPUtility::RTCPPacketReportBlockItem> report_blocks_;
-
- RTC_DISALLOW_COPY_AND_ASSIGN(ReceiverReport);
-};
-
-// Transmission Time Offsets in RTP Streams (RFC 5450).
-//
-// 0 1 2 3
-// 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
-// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
-// hdr |V=2|P| RC | PT=IJ=195 | length |
-// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
-// | inter-arrival jitter |
-// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
-// . .
-// . .
-// . .
-// | inter-arrival jitter |
-// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
-//
-// If present, this RTCP packet must be placed after a receiver report
-// (inside a compound RTCP packet), and MUST have the same value for RC
-// (reception report count) as the receiver report.
-
-class Ij : public RtcpPacket {
- public:
- Ij() : RtcpPacket() {}
-
- virtual ~Ij() {}
-
- bool WithJitterItem(uint32_t jitter);
-
- protected:
- bool Create(uint8_t* packet,
- size_t* index,
- size_t max_length,
- RtcpPacket::PacketReadyCallback* callback) const override;
-
- private:
- static const int kMaxNumberOfIjItems = 0x1f;
-
- size_t BlockLength() const {
- return kHeaderLength + 4 * ij_items_.size();
- }
-
- std::vector<uint32_t> ij_items_;
-
- RTC_DISALLOW_COPY_AND_ASSIGN(Ij);
-};
-
// Source Description (SDES) (RFC 3550).
//
// 0 1 2 3
@@ -420,262 +254,6 @@ class Sdes : public RtcpPacket {
RTC_DISALLOW_COPY_AND_ASSIGN(Sdes);
};
-//
-// Bye packet (BYE) (RFC 3550).
-//
-// 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
-// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
-// |V=2|P| SC | PT=BYE=203 | length |
-// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
-// | SSRC/CSRC |
-// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
-// : ... :
-// +=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+
-// (opt) | length | reason for leaving ...
-// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
-
-class Bye : public RtcpPacket {
- public:
- Bye() : RtcpPacket() {
- memset(&bye_, 0, sizeof(bye_));
- }
-
- virtual ~Bye() {}
-
- void From(uint32_t ssrc) {
- bye_.SenderSSRC = ssrc;
- }
-
- bool WithCsrc(uint32_t csrc);
-
- // TODO(sprang): Add support for reason field?
-
- protected:
- bool Create(uint8_t* packet,
- size_t* index,
- size_t max_length,
- RtcpPacket::PacketReadyCallback* callback) const override;
-
- private:
- static const int kMaxNumberOfCsrcs = 0x1f - 1; // First item is sender SSRC.
-
- size_t BlockLength() const {
- size_t source_count = 1 + csrcs_.size();
- return kHeaderLength + 4 * source_count;
- }
-
- RTCPUtility::RTCPPacketBYE bye_;
- std::vector<uint32_t> csrcs_;
-
- RTC_DISALLOW_COPY_AND_ASSIGN(Bye);
-};
-
-// Application-Defined packet (APP) (RFC 3550).
-//
-// 0 1 2 3
-// 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
-// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
-// |V=2|P| subtype | PT=APP=204 | length |
-// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
-// | SSRC/CSRC |
-// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
-// | name (ASCII) |
-// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
-// | application-dependent data ...
-// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
-
-class App : public RtcpPacket {
- public:
- App()
- : RtcpPacket(),
- ssrc_(0) {
- memset(&app_, 0, sizeof(app_));
- }
-
- virtual ~App() {}
-
- void From(uint32_t ssrc) {
- ssrc_ = ssrc;
- }
- void WithSubType(uint8_t subtype) {
- assert(subtype <= 0x1f);
- app_.SubType = subtype;
- }
- void WithName(uint32_t name) {
- app_.Name = name;
- }
- void WithData(const uint8_t* data, uint16_t data_length) {
- assert(data);
- assert(data_length <= kRtcpAppCode_DATA_SIZE);
- assert(data_length % 4 == 0);
- memcpy(app_.Data, data, data_length);
- app_.Size = data_length;
- }
-
- protected:
- bool Create(uint8_t* packet,
- size_t* index,
- size_t max_length,
- RtcpPacket::PacketReadyCallback* callback) const override;
-
- private:
- size_t BlockLength() const {
- return 12 + app_.Size;
- }
-
- uint32_t ssrc_;
- RTCPUtility::RTCPPacketAPP app_;
-
- RTC_DISALLOW_COPY_AND_ASSIGN(App);
-};
-
-// RFC 4585: Feedback format.
-//
-// Common packet format:
-//
-// 0 1 2 3
-// 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
-// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
-// |V=2|P| FMT | PT | length |
-// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
-// | SSRC of packet sender |
-// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
-// | SSRC of media source |
-// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
-// : Feedback Control Information (FCI) :
-// :
-
-// Picture loss indication (PLI) (RFC 4585).
-//
-// FCI: no feedback control information.
-
-class Pli : public RtcpPacket {
- public:
- Pli() : RtcpPacket() {
- memset(&pli_, 0, sizeof(pli_));
- }
-
- virtual ~Pli() {}
-
- void From(uint32_t ssrc) {
- pli_.SenderSSRC = ssrc;
- }
- void To(uint32_t ssrc) {
- pli_.MediaSSRC = ssrc;
- }
-
- protected:
- bool Create(uint8_t* packet,
- size_t* index,
- size_t max_length,
- RtcpPacket::PacketReadyCallback* callback) const override;
-
- private:
- size_t BlockLength() const {
- return kCommonFbFmtLength;
- }
-
- RTCPUtility::RTCPPacketPSFBPLI pli_;
-
- RTC_DISALLOW_COPY_AND_ASSIGN(Pli);
-};
-
-// Slice loss indication (SLI) (RFC 4585).
-//
-// FCI:
-// 0 1 2 3
-// 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
-// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
-// | First | Number | PictureID |
-// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
-
-class Sli : public RtcpPacket {
- public:
- Sli() : RtcpPacket() {
- memset(&sli_, 0, sizeof(sli_));
- memset(&sli_item_, 0, sizeof(sli_item_));
- }
-
- virtual ~Sli() {}
-
- void From(uint32_t ssrc) {
- sli_.SenderSSRC = ssrc;
- }
- void To(uint32_t ssrc) {
- sli_.MediaSSRC = ssrc;
- }
- void WithFirstMb(uint16_t first_mb) {
- assert(first_mb <= 0x1fff);
- sli_item_.FirstMB = first_mb;
- }
- void WithNumberOfMb(uint16_t number_mb) {
- assert(number_mb <= 0x1fff);
- sli_item_.NumberOfMB = number_mb;
- }
- void WithPictureId(uint8_t picture_id) {
- assert(picture_id <= 0x3f);
- sli_item_.PictureId = picture_id;
- }
-
- protected:
- bool Create(uint8_t* packet,
- size_t* index,
- size_t max_length,
- RtcpPacket::PacketReadyCallback* callback) const override;
-
- private:
- size_t BlockLength() const {
- const size_t kFciLength = 4;
- return kCommonFbFmtLength + kFciLength;
- }
-
- RTCPUtility::RTCPPacketPSFBSLI sli_;
- RTCPUtility::RTCPPacketPSFBSLIItem sli_item_;
-
- RTC_DISALLOW_COPY_AND_ASSIGN(Sli);
-};
-
-// Generic NACK (RFC 4585).
-//
-// FCI:
-// 0 1 2 3
-// 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
-// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
-// | PID | BLP |
-// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
-
-class Nack : public RtcpPacket {
- public:
- Nack() : RtcpPacket() {
- memset(&nack_, 0, sizeof(nack_));
- }
-
- virtual ~Nack() {}
-
- void From(uint32_t ssrc) {
- nack_.SenderSSRC = ssrc;
- }
- void To(uint32_t ssrc) {
- nack_.MediaSSRC = ssrc;
- }
- void WithList(const uint16_t* nack_list, int length);
-
- protected:
- bool Create(uint8_t* packet,
- size_t* index,
- size_t max_length,
- RtcpPacket::PacketReadyCallback* callback) const override;
-
- size_t BlockLength() const override;
-
- private:
-
- RTCPUtility::RTCPPacketRTPFBNACK nack_;
- std::vector<RTCPUtility::RTCPPacketRTPFBNACKItem> nack_fields_;
-
- RTC_DISALLOW_COPY_AND_ASSIGN(Nack);
-};
-
// Reference picture selection indication (RPSI) (RFC 4585).
//
// FCI:
@@ -775,105 +353,6 @@ class Fir : public RtcpPacket {
RTCPUtility::RTCPPacketPSFBFIRItem fir_item_;
};
-// Temporary Maximum Media Stream Bit Rate Request (TMMBR) (RFC 5104).
-//
-// FCI:
-//
-// 0 1 2 3
-// 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
-// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
-// | SSRC |
-// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
-// | MxTBR Exp | MxTBR Mantissa |Measured Overhead|
-// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
-
-class Tmmbr : public RtcpPacket {
- public:
- Tmmbr() : RtcpPacket() {
- memset(&tmmbr_, 0, sizeof(tmmbr_));
- memset(&tmmbr_item_, 0, sizeof(tmmbr_item_));
- }
-
- virtual ~Tmmbr() {}
-
- void From(uint32_t ssrc) {
- tmmbr_.SenderSSRC = ssrc;
- }
- void To(uint32_t ssrc) {
- tmmbr_item_.SSRC = ssrc;
- }
- void WithBitrateKbps(uint32_t bitrate_kbps) {
- tmmbr_item_.MaxTotalMediaBitRate = bitrate_kbps;
- }
- void WithOverhead(uint16_t overhead) {
- assert(overhead <= 0x1ff);
- tmmbr_item_.MeasuredOverhead = overhead;
- }
-
- protected:
- bool Create(uint8_t* packet,
- size_t* index,
- size_t max_length,
- RtcpPacket::PacketReadyCallback* callback) const override;
-
- private:
- size_t BlockLength() const {
- const size_t kFciLen = 8;
- return kCommonFbFmtLength + kFciLen;
- }
-
- RTCPUtility::RTCPPacketRTPFBTMMBR tmmbr_;
- RTCPUtility::RTCPPacketRTPFBTMMBRItem tmmbr_item_;
-
- RTC_DISALLOW_COPY_AND_ASSIGN(Tmmbr);
-};
-
-// Temporary Maximum Media Stream Bit Rate Notification (TMMBN) (RFC 5104).
-//
-// FCI:
-//
-// 0 1 2 3
-// 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
-// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
-// | SSRC |
-// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
-// | MxTBR Exp | MxTBR Mantissa |Measured Overhead|
-// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
-
-class Tmmbn : public RtcpPacket {
- public:
- Tmmbn() : RtcpPacket() {
- memset(&tmmbn_, 0, sizeof(tmmbn_));
- }
-
- virtual ~Tmmbn() {}
-
- void From(uint32_t ssrc) {
- tmmbn_.SenderSSRC = ssrc;
- }
- // Max 50 TMMBR can be added per TMMBN.
- bool WithTmmbr(uint32_t ssrc, uint32_t bitrate_kbps, uint16_t overhead);
-
- protected:
- bool Create(uint8_t* packet,
- size_t* index,
- size_t max_length,
- RtcpPacket::PacketReadyCallback* callback) const override;
-
- private:
- static const int kMaxNumberOfTmmbrs = 50;
-
- size_t BlockLength() const {
- const size_t kFciLen = 8;
- return kCommonFbFmtLength + kFciLen * tmmbn_items_.size();
- }
-
- RTCPUtility::RTCPPacketRTPFBTMMBN tmmbn_;
- std::vector<RTCPUtility::RTCPPacketRTPFBTMMBRItem> tmmbn_items_;
-
- RTC_DISALLOW_COPY_AND_ASSIGN(Tmmbn);
-};
-
// Receiver Estimated Max Bitrate (REMB) (draft-alvestrand-rmcat-remb).
//
// 0 1 2 3
@@ -978,163 +457,22 @@ class Xr : public RtcpPacket {
return kXrHeaderLength + RrtrLength() + DlrrLength() + VoipMetricLength();
}
- size_t RrtrLength() const {
- const size_t kRrtrBlockLength = 12;
- return kRrtrBlockLength * rrtr_blocks_.size();
- }
+ size_t RrtrLength() const { return Rrtr::kLength * rrtr_blocks_.size(); }
size_t DlrrLength() const;
size_t VoipMetricLength() const {
- const size_t kVoipMetricBlockLength = 36;
- return kVoipMetricBlockLength * voip_metric_blocks_.size();
+ return VoipMetric::kLength * voip_metric_blocks_.size();
}
RTCPUtility::RTCPPacketXR xr_header_;
- std::vector<RTCPUtility::RTCPPacketXRReceiverReferenceTimeItem> rrtr_blocks_;
- std::vector<DlrrBlock> dlrr_blocks_;
- std::vector<RTCPUtility::RTCPPacketXRVOIPMetricItem> voip_metric_blocks_;
+ std::vector<Rrtr> rrtr_blocks_;
+ std::vector<Dlrr> dlrr_blocks_;
+ std::vector<VoipMetric> voip_metric_blocks_;
RTC_DISALLOW_COPY_AND_ASSIGN(Xr);
};
-// Receiver Reference Time Report Block (RFC 3611).
-//
-// 0 1 2 3
-// 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
-// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
-// | BT=4 | reserved | block length = 2 |
-// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
-// | NTP timestamp, most significant word |
-// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
-// | NTP timestamp, least significant word |
-// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
-
-class Rrtr {
- public:
- Rrtr() {
- memset(&rrtr_block_, 0, sizeof(rrtr_block_));
- }
- ~Rrtr() {}
-
- void WithNtpSec(uint32_t sec) {
- rrtr_block_.NTPMostSignificant = sec;
- }
- void WithNtpFrac(uint32_t frac) {
- rrtr_block_.NTPLeastSignificant = frac;
- }
-
- private:
- friend class Xr;
- RTCPUtility::RTCPPacketXRReceiverReferenceTimeItem rrtr_block_;
-
- RTC_DISALLOW_COPY_AND_ASSIGN(Rrtr);
-};
-
-// DLRR Report Block (RFC 3611).
-//
-// 0 1 2 3
-// 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
-// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
-// | BT=5 | reserved | block length |
-// +=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+
-// | SSRC_1 (SSRC of first receiver) | sub-
-// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ block
-// | last RR (LRR) | 1
-// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
-// | delay since last RR (DLRR) |
-// +=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+
-// | SSRC_2 (SSRC of second receiver) | sub-
-// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ block
-// : ... : 2
-
-class Dlrr {
- public:
- Dlrr() {}
- ~Dlrr() {}
-
- // Max 100 DLRR Items can be added per DLRR report block.
- bool WithDlrrItem(uint32_t ssrc, uint32_t last_rr, uint32_t delay_last_rr);
-
- private:
- friend class Xr;
- static const int kMaxNumberOfDlrrItems = 100;
-
- std::vector<RTCPUtility::RTCPPacketXRDLRRReportBlockItem> dlrr_block_;
-
- RTC_DISALLOW_COPY_AND_ASSIGN(Dlrr);
-};
-
-// VoIP Metrics Report Block (RFC 3611).
-//
-// 0 1 2 3
-// 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
-// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
-// | BT=7 | reserved | block length = 8 |
-// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
-// | SSRC of source |
-// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
-// | loss rate | discard rate | burst density | gap density |
-// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
-// | burst duration | gap duration |
-// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
-// | round trip delay | end system delay |
-// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
-// | signal level | noise level | RERL | Gmin |
-// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
-// | R factor | ext. R factor | MOS-LQ | MOS-CQ |
-// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
-// | RX config | reserved | JB nominal |
-// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
-// | JB maximum | JB abs max |
-// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
-
-class VoipMetric {
- public:
- VoipMetric() {
- memset(&metric_, 0, sizeof(metric_));
- }
- ~VoipMetric() {}
-
- void To(uint32_t ssrc) { metric_.SSRC = ssrc; }
- void LossRate(uint8_t loss_rate) { metric_.lossRate = loss_rate; }
- void DiscardRate(uint8_t discard_rate) { metric_.discardRate = discard_rate; }
- void BurstDensity(uint8_t burst_density) {
- metric_.burstDensity = burst_density;
- }
- void GapDensity(uint8_t gap_density) { metric_.gapDensity = gap_density; }
- void BurstDuration(uint16_t burst_duration) {
- metric_.burstDuration = burst_duration;
- }
- void GapDuration(uint16_t gap_duration) {
- metric_.gapDuration = gap_duration;
- }
- void RoundTripDelay(uint16_t round_trip_delay) {
- metric_.roundTripDelay = round_trip_delay;
- }
- void EndSystemDelay(uint16_t end_system_delay) {
- metric_.endSystemDelay = end_system_delay;
- }
- void SignalLevel(uint8_t signal_level) { metric_.signalLevel = signal_level; }
- void NoiseLevel(uint8_t noise_level) { metric_.noiseLevel = noise_level; }
- void Rerl(uint8_t rerl) { metric_.RERL = rerl; }
- void Gmin(uint8_t gmin) { metric_.Gmin = gmin; }
- void Rfactor(uint8_t rfactor) { metric_.Rfactor = rfactor; }
- void ExtRfactor(uint8_t extrfactor) { metric_.extRfactor = extrfactor; }
- void MosLq(uint8_t moslq) { metric_.MOSLQ = moslq; }
- void MosCq(uint8_t moscq) { metric_.MOSCQ = moscq; }
- void RxConfig(uint8_t rxconfig) { metric_.RXconfig = rxconfig; }
- void JbNominal(uint16_t jbnominal) { metric_.JBnominal = jbnominal; }
- void JbMax(uint16_t jbmax) { metric_.JBmax = jbmax; }
- void JbAbsMax(uint16_t jbabsmax) { metric_.JBabsMax = jbabsmax; }
-
- private:
- friend class Xr;
- RTCPUtility::RTCPPacketXRVOIPMetricItem metric_;
-
- RTC_DISALLOW_COPY_AND_ASSIGN(VoipMetric);
-};
-
// Class holding a RTCP packet.
//
// Takes a built rtcp packet.
@@ -1163,4 +501,4 @@ class RawPacket {
} // namespace rtcp
} // namespace webrtc
-#endif // WEBRTC_MODULES_RTP_RTCP_RTCP_PACKET_H_
+#endif // WEBRTC_MODULES_RTP_RTCP_SOURCE_RTCP_PACKET_H_
diff --git a/webrtc/modules/rtp_rtcp/source/rtcp_packet/app.cc b/webrtc/modules/rtp_rtcp/source/rtcp_packet/app.cc
new file mode 100644
index 0000000000..a1ad8d6427
--- /dev/null
+++ b/webrtc/modules/rtp_rtcp/source/rtcp_packet/app.cc
@@ -0,0 +1,79 @@
+/*
+ * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/modules/rtp_rtcp/source/rtcp_packet/app.h"
+
+#include "webrtc/base/checks.h"
+#include "webrtc/base/logging.h"
+#include "webrtc/modules/rtp_rtcp/source/byte_io.h"
+
+using webrtc::RTCPUtility::RtcpCommonHeader;
+
+namespace webrtc {
+namespace rtcp {
+
+// Application-Defined packet (APP) (RFC 3550).
+//
+// 0 1 2 3
+// 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+// |V=2|P| subtype | PT=APP=204 | length |
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+// 0 | SSRC/CSRC |
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+// 4 | name (ASCII) |
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+// 8 | application-dependent data ...
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+bool App::Parse(const RtcpCommonHeader& header, const uint8_t* payload) {
+ RTC_DCHECK(header.packet_type == kPacketType);
+
+ sub_type_ = header.count_or_format;
+ ssrc_ = ByteReader<uint32_t>::ReadBigEndian(&payload[0]);
+ name_ = ByteReader<uint32_t>::ReadBigEndian(&payload[4]);
+ data_.SetData(&payload[8], header.payload_size_bytes - 8);
+ return true;
+}
+
+void App::WithSubType(uint8_t subtype) {
+ RTC_DCHECK_LE(subtype, 0x1f);
+ sub_type_ = subtype;
+}
+
+void App::WithData(const uint8_t* data, size_t data_length) {
+ RTC_DCHECK(data);
+ RTC_DCHECK_EQ(0u, data_length % 4) << "Data must be 32 bits aligned.";
+ RTC_DCHECK(data_length <= kMaxDataSize) << "App data size << " << data_length
+ << "exceed maximum of "
+ << kMaxDataSize << " bytes.";
+ data_.SetData(data, data_length);
+}
+
+bool App::Create(uint8_t* packet,
+ size_t* index,
+ size_t max_length,
+ RtcpPacket::PacketReadyCallback* callback) const {
+ while (*index + BlockLength() > max_length) {
+ if (!OnBufferFull(packet, index, callback))
+ return false;
+ }
+ const size_t index_end = *index + BlockLength();
+ CreateHeader(sub_type_, kPacketType, HeaderLength(), packet, index);
+
+ ByteWriter<uint32_t>::WriteBigEndian(&packet[*index + 0], ssrc_);
+ ByteWriter<uint32_t>::WriteBigEndian(&packet[*index + 4], name_);
+ memcpy(&packet[*index + 8], data_.data(), data_.size());
+ *index += (8 + data_.size());
+ RTC_DCHECK_EQ(index_end, *index);
+ return true;
+}
+
+} // namespace rtcp
+} // namespace webrtc
diff --git a/webrtc/modules/rtp_rtcp/source/rtcp_packet/app.h b/webrtc/modules/rtp_rtcp/source/rtcp_packet/app.h
new file mode 100644
index 0000000000..16bd3fc2a2
--- /dev/null
+++ b/webrtc/modules/rtp_rtcp/source/rtcp_packet/app.h
@@ -0,0 +1,66 @@
+/*
+ * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_RTP_RTCP_SOURCE_RTCP_PACKET_APP_H_
+#define WEBRTC_MODULES_RTP_RTCP_SOURCE_RTCP_PACKET_APP_H_
+
+#include "webrtc/base/buffer.h"
+#include "webrtc/modules/rtp_rtcp/include/rtp_rtcp_defines.h"
+#include "webrtc/modules/rtp_rtcp/source/rtcp_packet.h"
+#include "webrtc/modules/rtp_rtcp/source/rtcp_utility.h"
+
+namespace webrtc {
+namespace rtcp {
+
+class App : public RtcpPacket {
+ public:
+ static const uint8_t kPacketType = 204;
+ // 28 bytes for UDP header
+ // 12 bytes for RTCP app header
+ static const size_t kMaxDataSize = IP_PACKET_SIZE - 12 - 28;
+ App() : sub_type_(0), ssrc_(0), name_(0) {}
+
+ virtual ~App() {}
+
+ // Parse assumes header is already parsed and validated.
+ bool Parse(const RTCPUtility::RtcpCommonHeader& header,
+ const uint8_t* payload); // Size of the payload is in the header.
+
+ void From(uint32_t ssrc) { ssrc_ = ssrc; }
+ void WithSubType(uint8_t subtype);
+ void WithName(uint32_t name) { name_ = name; }
+ void WithData(const uint8_t* data, size_t data_length);
+
+ uint8_t sub_type() const { return sub_type_; }
+ uint32_t ssrc() const { return ssrc_; }
+ uint32_t name() const { return name_; }
+ size_t data_size() const { return data_.size(); }
+ const uint8_t* data() const { return data_.data(); }
+
+ protected:
+ bool Create(uint8_t* packet,
+ size_t* index,
+ size_t max_length,
+ RtcpPacket::PacketReadyCallback* callback) const override;
+
+ private:
+ size_t BlockLength() const override { return 12 + data_.size(); }
+
+ uint8_t sub_type_;
+ uint32_t ssrc_;
+ uint32_t name_;
+ rtc::Buffer data_;
+
+ RTC_DISALLOW_COPY_AND_ASSIGN(App);
+};
+
+} // namespace rtcp
+} // namespace webrtc
+#endif // WEBRTC_MODULES_RTP_RTCP_SOURCE_RTCP_PACKET_APP_H_
diff --git a/webrtc/modules/rtp_rtcp/source/rtcp_packet/app_unittest.cc b/webrtc/modules/rtp_rtcp/source/rtcp_packet/app_unittest.cc
new file mode 100644
index 0000000000..4451fe8fb5
--- /dev/null
+++ b/webrtc/modules/rtp_rtcp/source/rtcp_packet/app_unittest.cc
@@ -0,0 +1,81 @@
+/*
+ * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/modules/rtp_rtcp/source/rtcp_packet/app.h"
+
+#include <limits>
+
+#include "testing/gtest/include/gtest/gtest.h"
+#include "webrtc/modules/rtp_rtcp/source/rtcp_utility.h"
+
+using webrtc::rtcp::App;
+using webrtc::rtcp::RawPacket;
+using webrtc::RTCPUtility::RtcpCommonHeader;
+using webrtc::RTCPUtility::RtcpParseCommonHeader;
+
+namespace webrtc {
+namespace {
+
+const uint32_t kName = ((uint32_t)'n' << 24) | ((uint32_t)'a' << 16) |
+ ((uint32_t)'m' << 8) | (uint32_t)'e';
+const uint32_t kSenderSsrc = 0x12345678;
+
+class RtcpPacketAppTest : public ::testing::Test {
+ protected:
+ void BuildPacket() { packet = app.Build(); }
+ void ParsePacket() {
+ RtcpCommonHeader header;
+ EXPECT_TRUE(
+ RtcpParseCommonHeader(packet->Buffer(), packet->Length(), &header));
+ // Check there is exactly one RTCP packet in the buffer.
+ EXPECT_EQ(header.BlockSize(), packet->Length());
+ EXPECT_TRUE(parsed_.Parse(
+ header, packet->Buffer() + RtcpCommonHeader::kHeaderSizeBytes));
+ }
+
+ App app;
+ rtc::scoped_ptr<RawPacket> packet;
+ const App& parsed() { return parsed_; }
+
+ private:
+ App parsed_;
+};
+
+TEST_F(RtcpPacketAppTest, WithNoData) {
+ app.WithSubType(30);
+ app.WithName(kName);
+
+ BuildPacket();
+ ParsePacket();
+
+ EXPECT_EQ(30U, parsed().sub_type());
+ EXPECT_EQ(kName, parsed().name());
+ EXPECT_EQ(0u, parsed().data_size());
+}
+
+TEST_F(RtcpPacketAppTest, WithData) {
+ app.From(kSenderSsrc);
+ app.WithSubType(30);
+ app.WithName(kName);
+ const uint8_t kData[] = {'t', 'e', 's', 't', 'd', 'a', 't', 'a'};
+ const size_t kDataLength = sizeof(kData) / sizeof(kData[0]);
+ app.WithData(kData, kDataLength);
+
+ BuildPacket();
+ ParsePacket();
+
+ EXPECT_EQ(30U, parsed().sub_type());
+ EXPECT_EQ(kName, parsed().name());
+ EXPECT_EQ(kDataLength, parsed().data_size());
+ EXPECT_EQ(0, memcmp(kData, parsed().data(), kDataLength));
+}
+
+} // namespace
+} // namespace webrtc
diff --git a/webrtc/modules/rtp_rtcp/source/rtcp_packet/bye.cc b/webrtc/modules/rtp_rtcp/source/rtcp_packet/bye.cc
new file mode 100644
index 0000000000..4cfc921ce5
--- /dev/null
+++ b/webrtc/modules/rtp_rtcp/source/rtcp_packet/bye.cc
@@ -0,0 +1,133 @@
+/*
+ * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/modules/rtp_rtcp/source/rtcp_packet/bye.h"
+
+#include "webrtc/base/checks.h"
+#include "webrtc/base/logging.h"
+#include "webrtc/modules/rtp_rtcp/source/byte_io.h"
+
+using webrtc::RTCPUtility::RtcpCommonHeader;
+
+namespace webrtc {
+namespace rtcp {
+
+// Bye packet (BYE) (RFC 3550).
+//
+// 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+// |V=2|P| SC | PT=BYE=203 | length |
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+// | SSRC/CSRC |
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+// : ... :
+// +=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+
+// (opt) | length | reason for leaving ...
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+Bye::Bye() : sender_ssrc_(0) {}
+
+bool Bye::Parse(const RtcpCommonHeader& header, const uint8_t* payload) {
+ RTC_DCHECK(header.packet_type == kPacketType);
+
+ const uint8_t src_count = header.count_or_format;
+ // Validate packet.
+ if (header.payload_size_bytes < 4u * src_count) {
+ LOG(LS_WARNING)
+ << "Packet is too small to contain CSRCs it promise to have.";
+ return false;
+ }
+ bool has_reason = (header.payload_size_bytes > 4u * src_count);
+ uint8_t reason_length = 0;
+ if (has_reason) {
+ reason_length = payload[4u * src_count];
+ if (header.payload_size_bytes - 4u * src_count < 1u + reason_length) {
+ LOG(LS_WARNING) << "Invalid reason length: " << reason_length;
+ return false;
+ }
+ }
+ // Once sure packet is valid, copy values.
+ if (src_count == 0) { // A count value of zero is valid, but useless.
+ sender_ssrc_ = 0;
+ csrcs_.clear();
+ } else {
+ sender_ssrc_ = ByteReader<uint32_t>::ReadBigEndian(payload);
+ csrcs_.resize(src_count - 1);
+ for (size_t i = 1; i < src_count; ++i)
+ csrcs_[i - 1] = ByteReader<uint32_t>::ReadBigEndian(&payload[4 * i]);
+ }
+
+ if (has_reason) {
+ reason_.assign(reinterpret_cast<const char*>(&payload[4u * src_count + 1]),
+ reason_length);
+ } else {
+ reason_.clear();
+ }
+
+ return true;
+}
+
+bool Bye::Create(uint8_t* packet,
+ size_t* index,
+ size_t max_length,
+ RtcpPacket::PacketReadyCallback* callback) const {
+ while (*index + BlockLength() > max_length) {
+ if (!OnBufferFull(packet, index, callback))
+ return false;
+ }
+ const size_t index_end = *index + BlockLength();
+
+ CreateHeader(1 + csrcs_.size(), kPacketType, HeaderLength(), packet, index);
+ // Store srcs of the leaving clients.
+ ByteWriter<uint32_t>::WriteBigEndian(&packet[*index], sender_ssrc_);
+ *index += sizeof(uint32_t);
+ for (uint32_t csrc : csrcs_) {
+ ByteWriter<uint32_t>::WriteBigEndian(&packet[*index], csrc);
+ *index += sizeof(uint32_t);
+ }
+ // Store the reason to leave.
+ if (!reason_.empty()) {
+ uint8_t reason_length = reason_.size();
+ packet[(*index)++] = reason_length;
+ memcpy(&packet[*index], reason_.data(), reason_length);
+ *index += reason_length;
+ // Add padding bytes if needed.
+ size_t bytes_to_pad = index_end - *index;
+ RTC_DCHECK_LE(bytes_to_pad, 3u);
+ if (bytes_to_pad > 0) {
+ memset(&packet[*index], 0, bytes_to_pad);
+ *index += bytes_to_pad;
+ }
+ }
+ RTC_DCHECK_EQ(index_end, *index);
+ return true;
+}
+
+bool Bye::WithCsrc(uint32_t csrc) {
+ if (csrcs_.size() >= kMaxNumberOfCsrcs) {
+ LOG(LS_WARNING) << "Max CSRC size reached.";
+ return false;
+ }
+ csrcs_.push_back(csrc);
+ return true;
+}
+
+void Bye::WithReason(const std::string& reason) {
+ RTC_DCHECK_LE(reason.size(), 0xffu);
+ reason_ = reason;
+}
+
+size_t Bye::BlockLength() const {
+ size_t src_count = (1 + csrcs_.size());
+ size_t reason_size_in_32bits = reason_.empty() ? 0 : (reason_.size() / 4 + 1);
+ return kHeaderLength + 4 * (src_count + reason_size_in_32bits);
+}
+
+} // namespace rtcp
+} // namespace webrtc
diff --git a/webrtc/modules/rtp_rtcp/source/rtcp_packet/bye.h b/webrtc/modules/rtp_rtcp/source/rtcp_packet/bye.h
new file mode 100644
index 0000000000..6b4a181330
--- /dev/null
+++ b/webrtc/modules/rtp_rtcp/source/rtcp_packet/bye.h
@@ -0,0 +1,63 @@
+/*
+ * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ *
+ */
+
+#ifndef WEBRTC_MODULES_RTP_RTCP_SOURCE_RTCP_PACKET_BYE_H_
+#define WEBRTC_MODULES_RTP_RTCP_SOURCE_RTCP_PACKET_BYE_H_
+
+#include <string>
+#include <vector>
+
+#include "webrtc/modules/rtp_rtcp/source/rtcp_packet.h"
+#include "webrtc/modules/rtp_rtcp/source/rtcp_utility.h"
+
+namespace webrtc {
+namespace rtcp {
+
+class Bye : public RtcpPacket {
+ public:
+ static const uint8_t kPacketType = 203;
+
+ Bye();
+ virtual ~Bye() {}
+
+ // Parse assumes header is already parsed and validated.
+ bool Parse(const RTCPUtility::RtcpCommonHeader& header,
+ const uint8_t* payload); // Size of the payload is in the header.
+
+ void From(uint32_t ssrc) { sender_ssrc_ = ssrc; }
+ bool WithCsrc(uint32_t csrc);
+ void WithReason(const std::string& reason);
+
+ uint32_t sender_ssrc() const { return sender_ssrc_; }
+ const std::vector<uint32_t>& csrcs() const { return csrcs_; }
+ const std::string& reason() const { return reason_; }
+
+ protected:
+ bool Create(uint8_t* packet,
+ size_t* index,
+ size_t max_length,
+ RtcpPacket::PacketReadyCallback* callback) const override;
+
+ private:
+ static const int kMaxNumberOfCsrcs = 0x1f - 1; // First item is sender SSRC.
+
+ size_t BlockLength() const override;
+
+ uint32_t sender_ssrc_;
+ std::vector<uint32_t> csrcs_;
+ std::string reason_;
+
+ RTC_DISALLOW_COPY_AND_ASSIGN(Bye);
+};
+
+} // namespace rtcp
+} // namespace webrtc
+#endif // WEBRTC_MODULES_RTP_RTCP_SOURCE_RTCP_PACKET_BYE_H_
diff --git a/webrtc/modules/rtp_rtcp/source/rtcp_packet/bye_unittest.cc b/webrtc/modules/rtp_rtcp/source/rtcp_packet/bye_unittest.cc
new file mode 100644
index 0000000000..d2ae8ed782
--- /dev/null
+++ b/webrtc/modules/rtp_rtcp/source/rtcp_packet/bye_unittest.cc
@@ -0,0 +1,173 @@
+/*
+ * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/modules/rtp_rtcp/source/rtcp_packet/bye.h"
+
+#include "testing/gmock/include/gmock/gmock.h"
+#include "testing/gtest/include/gtest/gtest.h"
+#include "webrtc/modules/rtp_rtcp/source/rtcp_utility.h"
+
+using ::testing::ElementsAre;
+
+using webrtc::rtcp::Bye;
+using webrtc::rtcp::RawPacket;
+using webrtc::RTCPUtility::RtcpCommonHeader;
+using webrtc::RTCPUtility::RtcpParseCommonHeader;
+
+namespace webrtc {
+namespace {
+
+const uint32_t kSenderSsrc = 0x12345678;
+const uint32_t kCsrc1 = 0x22232425;
+const uint32_t kCsrc2 = 0x33343536;
+
+class RtcpPacketByeTest : public ::testing::Test {
+ protected:
+ void BuildPacket() { packet = bye.Build(); }
+ void ParsePacket() {
+ RtcpCommonHeader header;
+ EXPECT_TRUE(
+ RtcpParseCommonHeader(packet->Buffer(), packet->Length(), &header));
+ // Check that there is exactly one RTCP packet in the buffer.
+ EXPECT_EQ(header.BlockSize(), packet->Length());
+ EXPECT_TRUE(parsed_bye.Parse(
+ header, packet->Buffer() + RtcpCommonHeader::kHeaderSizeBytes));
+ }
+
+ Bye bye;
+ rtc::scoped_ptr<RawPacket> packet;
+ Bye parsed_bye;
+};
+
+TEST_F(RtcpPacketByeTest, Bye) {
+ bye.From(kSenderSsrc);
+
+ BuildPacket();
+ ParsePacket();
+
+ EXPECT_EQ(kSenderSsrc, parsed_bye.sender_ssrc());
+ EXPECT_TRUE(parsed_bye.csrcs().empty());
+ EXPECT_TRUE(parsed_bye.reason().empty());
+}
+
+TEST_F(RtcpPacketByeTest, WithCsrcs) {
+ bye.From(kSenderSsrc);
+ EXPECT_TRUE(bye.WithCsrc(kCsrc1));
+ EXPECT_TRUE(bye.WithCsrc(kCsrc2));
+ EXPECT_TRUE(bye.reason().empty());
+
+ BuildPacket();
+ EXPECT_EQ(16u, packet->Length()); // Header: 4, 3xSRCs: 12, Reason: 0.
+
+ ParsePacket();
+
+ EXPECT_EQ(kSenderSsrc, parsed_bye.sender_ssrc());
+ EXPECT_THAT(parsed_bye.csrcs(), ElementsAre(kCsrc1, kCsrc2));
+ EXPECT_TRUE(parsed_bye.reason().empty());
+}
+
+TEST_F(RtcpPacketByeTest, WithCsrcsAndReason) {
+ const std::string kReason = "Some Reason";
+
+ bye.From(kSenderSsrc);
+ EXPECT_TRUE(bye.WithCsrc(kCsrc1));
+ EXPECT_TRUE(bye.WithCsrc(kCsrc2));
+ bye.WithReason(kReason);
+
+ BuildPacket();
+ EXPECT_EQ(28u, packet->Length()); // Header: 4, 3xSRCs: 12, Reason: 12.
+
+ ParsePacket();
+
+ EXPECT_EQ(kSenderSsrc, parsed_bye.sender_ssrc());
+ EXPECT_THAT(parsed_bye.csrcs(), ElementsAre(kCsrc1, kCsrc2));
+ EXPECT_EQ(kReason, parsed_bye.reason());
+}
+
+TEST_F(RtcpPacketByeTest, WithTooManyCsrcs) {
+ bye.From(kSenderSsrc);
+ const int kMaxCsrcs = (1 << 5) - 2; // 5 bit len, first item is sender SSRC.
+ for (int i = 0; i < kMaxCsrcs; ++i) {
+ EXPECT_TRUE(bye.WithCsrc(i));
+ }
+ EXPECT_FALSE(bye.WithCsrc(kMaxCsrcs));
+}
+
+TEST_F(RtcpPacketByeTest, WithAReason) {
+ const std::string kReason = "Some Random Reason";
+
+ bye.From(kSenderSsrc);
+ bye.WithReason(kReason);
+
+ BuildPacket();
+ ParsePacket();
+
+ EXPECT_EQ(kSenderSsrc, parsed_bye.sender_ssrc());
+ EXPECT_TRUE(parsed_bye.csrcs().empty());
+ EXPECT_EQ(kReason, parsed_bye.reason());
+}
+
+TEST_F(RtcpPacketByeTest, WithReasons) {
+ // Test that packet creation/parsing behave with reasons of different length
+ // both when it require padding and when it does not.
+ for (size_t reminder = 0; reminder < 4; ++reminder) {
+ const std::string kReason(4 + reminder, 'a' + reminder);
+ bye.From(kSenderSsrc);
+ bye.WithReason(kReason);
+
+ BuildPacket();
+ ParsePacket();
+
+ EXPECT_EQ(kReason, parsed_bye.reason());
+ }
+}
+
+TEST_F(RtcpPacketByeTest, ParseEmptyPacket) {
+ RtcpCommonHeader header;
+ header.packet_type = Bye::kPacketType;
+ header.count_or_format = 0;
+ header.payload_size_bytes = 0;
+ uint8_t empty_payload[1];
+
+ EXPECT_TRUE(parsed_bye.Parse(header, empty_payload + 1));
+ EXPECT_EQ(0u, parsed_bye.sender_ssrc());
+ EXPECT_TRUE(parsed_bye.csrcs().empty());
+ EXPECT_TRUE(parsed_bye.reason().empty());
+}
+
+TEST_F(RtcpPacketByeTest, ParseFailOnInvalidSrcCount) {
+ bye.From(kSenderSsrc);
+
+ BuildPacket();
+
+ RtcpCommonHeader header;
+ RtcpParseCommonHeader(packet->Buffer(), packet->Length(), &header);
+ header.count_or_format = 2; // Lie there are 2 ssrcs, not one.
+
+ EXPECT_FALSE(parsed_bye.Parse(
+ header, packet->Buffer() + RtcpCommonHeader::kHeaderSizeBytes));
+}
+
+TEST_F(RtcpPacketByeTest, ParseFailOnInvalidReasonLength) {
+ bye.From(kSenderSsrc);
+ bye.WithReason("18 characters long");
+
+ BuildPacket();
+
+ RtcpCommonHeader header;
+ RtcpParseCommonHeader(packet->Buffer(), packet->Length(), &header);
+ header.payload_size_bytes -= 4; // Payload is usually 32bit aligned.
+
+ EXPECT_FALSE(parsed_bye.Parse(
+ header, packet->Buffer() + RtcpCommonHeader::kHeaderSizeBytes));
+}
+
+} // namespace
+} // namespace webrtc
diff --git a/webrtc/modules/rtp_rtcp/source/rtcp_packet/compound_packet.cc b/webrtc/modules/rtp_rtcp/source/rtcp_packet/compound_packet.cc
new file mode 100644
index 0000000000..8f5afd5dd1
--- /dev/null
+++ b/webrtc/modules/rtp_rtcp/source/rtcp_packet/compound_packet.cc
@@ -0,0 +1,28 @@
+/*
+ * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/modules/rtp_rtcp/source/rtcp_packet/compound_packet.h"
+
+namespace webrtc {
+namespace rtcp {
+
+bool CompoundPacket::Create(uint8_t* packet,
+ size_t* index,
+ size_t max_length,
+ RtcpPacket::PacketReadyCallback* callback) const {
+ return true;
+}
+
+size_t CompoundPacket::BlockLength() const {
+ return 0;
+}
+
+} // namespace rtcp
+} // namespace webrtc
diff --git a/webrtc/modules/rtp_rtcp/source/rtcp_packet/compound_packet.h b/webrtc/modules/rtp_rtcp/source/rtcp_packet/compound_packet.h
new file mode 100644
index 0000000000..f2f49a8ffb
--- /dev/null
+++ b/webrtc/modules/rtp_rtcp/source/rtcp_packet/compound_packet.h
@@ -0,0 +1,41 @@
+/*
+ * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ *
+ */
+
+#ifndef WEBRTC_MODULES_RTP_RTCP_SOURCE_RTCP_PACKET_COMPOUND_PACKET_H_
+#define WEBRTC_MODULES_RTP_RTCP_SOURCE_RTCP_PACKET_COMPOUND_PACKET_H_
+
+#include "webrtc/base/basictypes.h"
+#include "webrtc/modules/rtp_rtcp/source/rtcp_packet.h"
+
+namespace webrtc {
+namespace rtcp {
+
+class CompoundPacket : public RtcpPacket {
+ public:
+ CompoundPacket() : RtcpPacket() {}
+
+ virtual ~CompoundPacket() {}
+
+ protected:
+ bool Create(uint8_t* packet,
+ size_t* index,
+ size_t max_length,
+ RtcpPacket::PacketReadyCallback* callback) const override;
+
+ size_t BlockLength() const override;
+
+ private:
+ RTC_DISALLOW_COPY_AND_ASSIGN(CompoundPacket);
+};
+
+} // namespace rtcp
+} // namespace webrtc
+#endif // WEBRTC_MODULES_RTP_RTCP_SOURCE_RTCP_PACKET_COMPOUND_PACKET_H_
diff --git a/webrtc/modules/rtp_rtcp/source/rtcp_packet/compound_packet_unittest.cc b/webrtc/modules/rtp_rtcp/source/rtcp_packet/compound_packet_unittest.cc
new file mode 100644
index 0000000000..83dc5f6ed3
--- /dev/null
+++ b/webrtc/modules/rtp_rtcp/source/rtcp_packet/compound_packet_unittest.cc
@@ -0,0 +1,157 @@
+/*
+ * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/modules/rtp_rtcp/source/rtcp_packet/compound_packet.h"
+
+#include "testing/gtest/include/gtest/gtest.h"
+#include "webrtc/modules/rtp_rtcp/source/rtcp_packet.h"
+#include "webrtc/modules/rtp_rtcp/source/rtcp_packet/bye.h"
+#include "webrtc/modules/rtp_rtcp/source/rtcp_packet/receiver_report.h"
+#include "webrtc/test/rtcp_packet_parser.h"
+
+using webrtc::rtcp::Bye;
+using webrtc::rtcp::CompoundPacket;
+using webrtc::rtcp::Fir;
+using webrtc::rtcp::RawPacket;
+using webrtc::rtcp::ReceiverReport;
+using webrtc::rtcp::ReportBlock;
+using webrtc::rtcp::SenderReport;
+using webrtc::test::RtcpPacketParser;
+
+namespace webrtc {
+
+const uint32_t kSenderSsrc = 0x12345678;
+
+TEST(RtcpCompoundPacketTest, AppendPacket) {
+ Fir fir;
+ ReportBlock rb;
+ ReceiverReport rr;
+ rr.From(kSenderSsrc);
+ EXPECT_TRUE(rr.WithReportBlock(rb));
+ rr.Append(&fir);
+
+ rtc::scoped_ptr<RawPacket> packet(rr.Build());
+ RtcpPacketParser parser;
+ parser.Parse(packet->Buffer(), packet->Length());
+ EXPECT_EQ(1, parser.receiver_report()->num_packets());
+ EXPECT_EQ(kSenderSsrc, parser.receiver_report()->Ssrc());
+ EXPECT_EQ(1, parser.report_block()->num_packets());
+ EXPECT_EQ(1, parser.fir()->num_packets());
+}
+
+TEST(RtcpCompoundPacketTest, AppendPacketOnEmpty) {
+ CompoundPacket empty;
+ ReceiverReport rr;
+ rr.From(kSenderSsrc);
+ empty.Append(&rr);
+
+ rtc::scoped_ptr<RawPacket> packet(empty.Build());
+ RtcpPacketParser parser;
+ parser.Parse(packet->Buffer(), packet->Length());
+ EXPECT_EQ(1, parser.receiver_report()->num_packets());
+ EXPECT_EQ(0, parser.report_block()->num_packets());
+}
+
+TEST(RtcpCompoundPacketTest, AppendPacketWithOwnAppendedPacket) {
+ Fir fir;
+ Bye bye;
+ ReportBlock rb;
+
+ ReceiverReport rr;
+ EXPECT_TRUE(rr.WithReportBlock(rb));
+ rr.Append(&fir);
+
+ SenderReport sr;
+ sr.Append(&bye);
+ sr.Append(&rr);
+
+ rtc::scoped_ptr<RawPacket> packet(sr.Build());
+ RtcpPacketParser parser;
+ parser.Parse(packet->Buffer(), packet->Length());
+ EXPECT_EQ(1, parser.sender_report()->num_packets());
+ EXPECT_EQ(1, parser.receiver_report()->num_packets());
+ EXPECT_EQ(1, parser.report_block()->num_packets());
+ EXPECT_EQ(1, parser.bye()->num_packets());
+ EXPECT_EQ(1, parser.fir()->num_packets());
+}
+
+TEST(RtcpCompoundPacketTest, BuildWithInputBuffer) {
+ Fir fir;
+ ReportBlock rb;
+ ReceiverReport rr;
+ rr.From(kSenderSsrc);
+ EXPECT_TRUE(rr.WithReportBlock(rb));
+ rr.Append(&fir);
+
+ const size_t kRrLength = 8;
+ const size_t kReportBlockLength = 24;
+ const size_t kFirLength = 20;
+
+ class Verifier : public rtcp::RtcpPacket::PacketReadyCallback {
+ public:
+ void OnPacketReady(uint8_t* data, size_t length) override {
+ RtcpPacketParser parser;
+ parser.Parse(data, length);
+ EXPECT_EQ(1, parser.receiver_report()->num_packets());
+ EXPECT_EQ(1, parser.report_block()->num_packets());
+ EXPECT_EQ(1, parser.fir()->num_packets());
+ ++packets_created_;
+ }
+
+ int packets_created_ = 0;
+ } verifier;
+ const size_t kBufferSize = kRrLength + kReportBlockLength + kFirLength;
+ uint8_t buffer[kBufferSize];
+ EXPECT_TRUE(rr.BuildExternalBuffer(buffer, kBufferSize, &verifier));
+ EXPECT_EQ(1, verifier.packets_created_);
+}
+
+TEST(RtcpCompoundPacketTest, BuildWithTooSmallBuffer_FragmentedSend) {
+ Fir fir;
+ ReportBlock rb;
+ ReceiverReport rr;
+ rr.From(kSenderSsrc);
+ EXPECT_TRUE(rr.WithReportBlock(rb));
+ rr.Append(&fir);
+
+ const size_t kRrLength = 8;
+ const size_t kReportBlockLength = 24;
+
+ class Verifier : public rtcp::RtcpPacket::PacketReadyCallback {
+ public:
+ void OnPacketReady(uint8_t* data, size_t length) override {
+ RtcpPacketParser parser;
+ parser.Parse(data, length);
+ switch (packets_created_++) {
+ case 0:
+ EXPECT_EQ(1, parser.receiver_report()->num_packets());
+ EXPECT_EQ(1, parser.report_block()->num_packets());
+ EXPECT_EQ(0, parser.fir()->num_packets());
+ break;
+ case 1:
+ EXPECT_EQ(0, parser.receiver_report()->num_packets());
+ EXPECT_EQ(0, parser.report_block()->num_packets());
+ EXPECT_EQ(1, parser.fir()->num_packets());
+ break;
+ default:
+ ADD_FAILURE() << "OnPacketReady not expected to be called "
+ << packets_created_ << " times.";
+ }
+ }
+
+ int packets_created_ = 0;
+ } verifier;
+ const size_t kBufferSize = kRrLength + kReportBlockLength;
+ uint8_t buffer[kBufferSize];
+ EXPECT_TRUE(rr.BuildExternalBuffer(buffer, kBufferSize, &verifier));
+ EXPECT_EQ(2, verifier.packets_created_);
+}
+
+} // namespace webrtc
diff --git a/webrtc/modules/rtp_rtcp/source/rtcp_packet/dlrr.cc b/webrtc/modules/rtp_rtcp/source/rtcp_packet/dlrr.cc
new file mode 100644
index 0000000000..6d6c48fada
--- /dev/null
+++ b/webrtc/modules/rtp_rtcp/source/rtcp_packet/dlrr.cc
@@ -0,0 +1,100 @@
+/*
+ * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/modules/rtp_rtcp/source/rtcp_packet/dlrr.h"
+
+#include "webrtc/base/checks.h"
+#include "webrtc/base/logging.h"
+#include "webrtc/modules/rtp_rtcp/source/byte_io.h"
+
+namespace webrtc {
+namespace rtcp {
+// DLRR Report Block (RFC 3611).
+//
+// 0 1 2 3
+// 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+// | BT=5 | reserved | block length |
+// +=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+
+// | SSRC_1 (SSRC of first receiver) | sub-
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ block
+// | last RR (LRR) | 1
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+// | delay since last RR (DLRR) |
+// +=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+
+// | SSRC_2 (SSRC of second receiver) | sub-
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ block
+// : ... : 2
+bool Dlrr::Parse(const uint8_t* buffer, uint16_t block_length_32bits) {
+ RTC_DCHECK(buffer[0] == kBlockType);
+ // kReserved = buffer[1];
+ RTC_DCHECK_EQ(block_length_32bits,
+ ByteReader<uint16_t>::ReadBigEndian(&buffer[2]));
+ if (block_length_32bits % 3 != 0) {
+ LOG(LS_WARNING) << "Invalid size for dlrr block.";
+ return false;
+ }
+
+ size_t blocks_count = block_length_32bits / 3;
+ const uint8_t* read_at = buffer + kBlockHeaderLength;
+ sub_blocks_.resize(blocks_count);
+ for (SubBlock& sub_block : sub_blocks_) {
+ sub_block.ssrc = ByteReader<uint32_t>::ReadBigEndian(&read_at[0]);
+ sub_block.last_rr = ByteReader<uint32_t>::ReadBigEndian(&read_at[4]);
+ sub_block.delay_since_last_rr =
+ ByteReader<uint32_t>::ReadBigEndian(&read_at[8]);
+ read_at += kSubBlockLength;
+ }
+ return true;
+}
+
+size_t Dlrr::BlockLength() const {
+ if (sub_blocks_.empty())
+ return 0;
+ return kBlockHeaderLength + kSubBlockLength * sub_blocks_.size();
+}
+
+void Dlrr::Create(uint8_t* buffer) const {
+ if (sub_blocks_.empty()) // No subblocks, no need to write header either.
+ return;
+ // Create block header.
+ const uint8_t kReserved = 0;
+ buffer[0] = kBlockType;
+ buffer[1] = kReserved;
+ ByteWriter<uint16_t>::WriteBigEndian(&buffer[2], 3 * sub_blocks_.size());
+ // Create sub blocks.
+ uint8_t* write_at = buffer + kBlockHeaderLength;
+ for (const SubBlock& sub_block : sub_blocks_) {
+ ByteWriter<uint32_t>::WriteBigEndian(&write_at[0], sub_block.ssrc);
+ ByteWriter<uint32_t>::WriteBigEndian(&write_at[4], sub_block.last_rr);
+ ByteWriter<uint32_t>::WriteBigEndian(&write_at[8],
+ sub_block.delay_since_last_rr);
+ write_at += kSubBlockLength;
+ }
+ RTC_DCHECK_EQ(buffer + BlockLength(), write_at);
+}
+
+bool Dlrr::WithDlrrItem(uint32_t ssrc,
+ uint32_t last_rr,
+ uint32_t delay_last_rr) {
+ if (sub_blocks_.size() >= kMaxNumberOfDlrrItems) {
+ LOG(LS_WARNING) << "Max DLRR items reached.";
+ return false;
+ }
+ SubBlock block;
+ block.ssrc = ssrc;
+ block.last_rr = last_rr;
+ block.delay_since_last_rr = delay_last_rr;
+ sub_blocks_.push_back(block);
+ return true;
+}
+
+} // namespace rtcp
+} // namespace webrtc
diff --git a/webrtc/modules/rtp_rtcp/source/rtcp_packet/dlrr.h b/webrtc/modules/rtp_rtcp/source/rtcp_packet/dlrr.h
new file mode 100644
index 0000000000..9af2dedf3f
--- /dev/null
+++ b/webrtc/modules/rtp_rtcp/source/rtcp_packet/dlrr.h
@@ -0,0 +1,63 @@
+/*
+ * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ *
+ */
+
+#ifndef WEBRTC_MODULES_RTP_RTCP_SOURCE_RTCP_PACKET_DLRR_H_
+#define WEBRTC_MODULES_RTP_RTCP_SOURCE_RTCP_PACKET_DLRR_H_
+
+#include <vector>
+
+#include "webrtc/base/basictypes.h"
+
+namespace webrtc {
+namespace rtcp {
+
+// DLRR Report Block: Delay since the Last Receiver Report (RFC 3611).
+class Dlrr {
+ public:
+ struct SubBlock {
+ // RFC 3611 4.5
+ uint32_t ssrc;
+ uint32_t last_rr;
+ uint32_t delay_since_last_rr;
+ };
+
+ static const uint8_t kBlockType = 5;
+ static const size_t kMaxNumberOfDlrrItems = 100;
+
+ Dlrr() {}
+ Dlrr(const Dlrr& other) = default;
+ ~Dlrr() {}
+
+ Dlrr& operator=(const Dlrr& other) = default;
+
+ // Second parameter is value read from block header,
+ // i.e. size of block in 32bits excluding block header itself.
+ bool Parse(const uint8_t* buffer, uint16_t block_length_32bits);
+
+ size_t BlockLength() const;
+ // Fills buffer with the Dlrr.
+ // Consumes BlockLength() bytes.
+ void Create(uint8_t* buffer) const;
+
+ // Max 100 DLRR Items can be added per DLRR report block.
+ bool WithDlrrItem(uint32_t ssrc, uint32_t last_rr, uint32_t delay_last_rr);
+
+ const std::vector<SubBlock>& sub_blocks() const { return sub_blocks_; }
+
+ private:
+ static const size_t kBlockHeaderLength = 4;
+ static const size_t kSubBlockLength = 12;
+
+ std::vector<SubBlock> sub_blocks_;
+};
+} // namespace rtcp
+} // namespace webrtc
+#endif // WEBRTC_MODULES_RTP_RTCP_SOURCE_RTCP_PACKET_DLRR_H_
diff --git a/webrtc/modules/rtp_rtcp/source/rtcp_packet/dlrr_unittest.cc b/webrtc/modules/rtp_rtcp/source/rtcp_packet/dlrr_unittest.cc
new file mode 100644
index 0000000000..c7c139c560
--- /dev/null
+++ b/webrtc/modules/rtp_rtcp/source/rtcp_packet/dlrr_unittest.cc
@@ -0,0 +1,102 @@
+/*
+ * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/modules/rtp_rtcp/source/rtcp_packet/dlrr.h"
+
+#include "testing/gtest/include/gtest/gtest.h"
+
+#include "webrtc/modules/rtp_rtcp/source/byte_io.h"
+
+using webrtc::rtcp::Dlrr;
+
+namespace webrtc {
+namespace {
+
+const uint32_t kSsrc = 0x12345678;
+const uint32_t kLastRR = 0x23344556;
+const uint32_t kDelay = 0x33343536;
+const uint8_t kBlock[] = {0x05, 0x00, 0x00, 0x03, 0x12, 0x34, 0x56, 0x78,
+ 0x23, 0x34, 0x45, 0x56, 0x33, 0x34, 0x35, 0x36};
+const size_t kBlockSizeBytes = sizeof(kBlock);
+
+TEST(RtcpPacketDlrrTest, Empty) {
+ Dlrr dlrr;
+
+ EXPECT_EQ(0u, dlrr.BlockLength());
+}
+
+TEST(RtcpPacketDlrrTest, Create) {
+ Dlrr dlrr;
+ EXPECT_TRUE(dlrr.WithDlrrItem(kSsrc, kLastRR, kDelay));
+
+ ASSERT_EQ(kBlockSizeBytes, dlrr.BlockLength());
+ uint8_t buffer[kBlockSizeBytes];
+
+ dlrr.Create(buffer);
+ EXPECT_EQ(0, memcmp(buffer, kBlock, kBlockSizeBytes));
+}
+
+TEST(RtcpPacketDlrrTest, Parse) {
+ Dlrr dlrr;
+ uint16_t block_length = ByteReader<uint16_t>::ReadBigEndian(&kBlock[2]);
+ EXPECT_TRUE(dlrr.Parse(kBlock, block_length));
+
+ EXPECT_EQ(1u, dlrr.sub_blocks().size());
+ const Dlrr::SubBlock& block = dlrr.sub_blocks().front();
+ EXPECT_EQ(kSsrc, block.ssrc);
+ EXPECT_EQ(kLastRR, block.last_rr);
+ EXPECT_EQ(kDelay, block.delay_since_last_rr);
+}
+
+TEST(RtcpPacketDlrrTest, ParseFailsOnBadSize) {
+ const size_t kBigBufferSize = 0x100; // More than enough.
+ uint8_t buffer[kBigBufferSize];
+ buffer[0] = Dlrr::kBlockType;
+ buffer[1] = 0; // Reserved.
+ buffer[2] = 0; // Most significant size byte.
+ for (uint8_t size = 3; size < 6; ++size) {
+ buffer[3] = size;
+ Dlrr dlrr;
+ // Parse should be successful only when size is multiple of 3.
+ EXPECT_EQ(size % 3 == 0, dlrr.Parse(buffer, static_cast<uint16_t>(size)));
+ }
+}
+
+TEST(RtcpPacketDlrrTest, FailsOnTooManySubBlocks) {
+ Dlrr dlrr;
+ for (size_t i = 1; i <= Dlrr::kMaxNumberOfDlrrItems; ++i) {
+ EXPECT_TRUE(dlrr.WithDlrrItem(kSsrc + i, kLastRR + i, kDelay + i));
+ }
+ EXPECT_FALSE(dlrr.WithDlrrItem(kSsrc, kLastRR, kDelay));
+}
+
+TEST(RtcpPacketDlrrTest, CreateAndParseMaxSubBlocks) {
+ const size_t kBufferSize = 0x1000; // More than enough.
+ uint8_t buffer[kBufferSize];
+
+ // Create.
+ Dlrr dlrr;
+ for (size_t i = 1; i <= Dlrr::kMaxNumberOfDlrrItems; ++i) {
+ EXPECT_TRUE(dlrr.WithDlrrItem(kSsrc + i, kLastRR + i, kDelay + i));
+ }
+ size_t used_buffer_size = dlrr.BlockLength();
+ ASSERT_LE(used_buffer_size, kBufferSize);
+ dlrr.Create(buffer);
+
+ // Parse.
+ Dlrr parsed;
+ uint16_t block_length = ByteReader<uint16_t>::ReadBigEndian(&buffer[2]);
+ EXPECT_EQ(used_buffer_size, (block_length + 1) * 4u);
+ EXPECT_TRUE(parsed.Parse(buffer, block_length));
+ EXPECT_TRUE(parsed.sub_blocks().size() == Dlrr::kMaxNumberOfDlrrItems);
+}
+
+} // namespace
+} // namespace webrtc
diff --git a/webrtc/modules/rtp_rtcp/source/rtcp_packet/extended_jitter_report.cc b/webrtc/modules/rtp_rtcp/source/rtcp_packet/extended_jitter_report.cc
new file mode 100644
index 0000000000..030f9f81fa
--- /dev/null
+++ b/webrtc/modules/rtp_rtcp/source/rtcp_packet/extended_jitter_report.cc
@@ -0,0 +1,95 @@
+/*
+ * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/modules/rtp_rtcp/source/rtcp_packet/extended_jitter_report.h"
+
+#include "webrtc/base/checks.h"
+#include "webrtc/base/logging.h"
+#include "webrtc/modules/rtp_rtcp/source/byte_io.h"
+#include "webrtc/modules/rtp_rtcp/source/rtp_utility.h"
+
+using webrtc::RTCPUtility::RtcpCommonHeader;
+
+namespace webrtc {
+namespace rtcp {
+
+// Transmission Time Offsets in RTP Streams (RFC 5450).
+//
+// 0 1 2 3
+// 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+// hdr |V=2|P| RC | PT=IJ=195 | length |
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+// | inter-arrival jitter |
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+// . .
+// . .
+// . .
+// | inter-arrival jitter |
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+//
+// If present, this RTCP packet must be placed after a receiver report
+// (inside a compound RTCP packet), and MUST have the same value for RC
+// (reception report count) as the receiver report.
+
+bool ExtendedJitterReport::Parse(const RtcpCommonHeader& header,
+ const uint8_t* payload) {
+ RTC_DCHECK(header.packet_type == kPacketType);
+
+ const uint8_t jitters_count = header.count_or_format;
+ const size_t kJitterSizeBytes = 4u;
+
+ if (header.payload_size_bytes < jitters_count * kJitterSizeBytes) {
+ LOG(LS_WARNING) << "Packet is too small to contain all the jitter.";
+ return false;
+ }
+
+ inter_arrival_jitters_.resize(jitters_count);
+ for (size_t index = 0; index < jitters_count; ++index) {
+ inter_arrival_jitters_[index] =
+ ByteReader<uint32_t>::ReadBigEndian(&payload[index * kJitterSizeBytes]);
+ }
+
+ return true;
+}
+
+bool ExtendedJitterReport::WithJitter(uint32_t jitter) {
+ if (inter_arrival_jitters_.size() >= kMaxNumberOfJitters) {
+ LOG(LS_WARNING) << "Max inter-arrival jitter items reached.";
+ return false;
+ }
+ inter_arrival_jitters_.push_back(jitter);
+ return true;
+}
+
+bool ExtendedJitterReport::Create(
+ uint8_t* packet,
+ size_t* index,
+ size_t max_length,
+ RtcpPacket::PacketReadyCallback* callback) const {
+ while (*index + BlockLength() > max_length) {
+ if (!OnBufferFull(packet, index, callback))
+ return false;
+ }
+ const size_t index_end = *index + BlockLength();
+ size_t length = inter_arrival_jitters_.size();
+ CreateHeader(length, kPacketType, length, packet, index);
+
+ for (uint32_t jitter : inter_arrival_jitters_) {
+ ByteWriter<uint32_t>::WriteBigEndian(packet + *index, jitter);
+ *index += sizeof(uint32_t);
+ }
+ // Sanity check.
+ RTC_DCHECK_EQ(index_end, *index);
+ return true;
+}
+
+} // namespace rtcp
+} // namespace webrtc
diff --git a/webrtc/modules/rtp_rtcp/source/rtcp_packet/extended_jitter_report.h b/webrtc/modules/rtp_rtcp/source/rtcp_packet/extended_jitter_report.h
new file mode 100644
index 0000000000..49de7be1a8
--- /dev/null
+++ b/webrtc/modules/rtp_rtcp/source/rtcp_packet/extended_jitter_report.h
@@ -0,0 +1,63 @@
+/*
+ * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_RTP_RTCP_SOURCE_RTCP_PACKET_EXTENDED_JITTER_REPORT_H_
+#define WEBRTC_MODULES_RTP_RTCP_SOURCE_RTCP_PACKET_EXTENDED_JITTER_REPORT_H_
+
+#include <vector>
+
+#include "webrtc/base/checks.h"
+#include "webrtc/modules/rtp_rtcp/source/rtcp_packet.h"
+#include "webrtc/modules/rtp_rtcp/source/rtcp_utility.h"
+
+namespace webrtc {
+namespace rtcp {
+
+class ExtendedJitterReport : public RtcpPacket {
+ public:
+ static const uint8_t kPacketType = 195;
+
+ ExtendedJitterReport() : RtcpPacket() {}
+
+ virtual ~ExtendedJitterReport() {}
+
+ // Parse assumes header is already parsed and validated.
+ bool Parse(const RTCPUtility::RtcpCommonHeader& header,
+ const uint8_t* payload); // Size of the payload is in the header.
+
+ bool WithJitter(uint32_t jitter);
+
+ size_t jitters_count() const { return inter_arrival_jitters_.size(); }
+ uint32_t jitter(size_t index) const {
+ RTC_DCHECK_LT(index, jitters_count());
+ return inter_arrival_jitters_[index];
+ }
+
+ protected:
+ bool Create(uint8_t* packet,
+ size_t* index,
+ size_t max_length,
+ RtcpPacket::PacketReadyCallback* callback) const override;
+
+ private:
+ static const int kMaxNumberOfJitters = 0x1f;
+
+ size_t BlockLength() const override {
+ return kHeaderLength + 4 * inter_arrival_jitters_.size();
+ }
+
+ std::vector<uint32_t> inter_arrival_jitters_;
+
+ RTC_DISALLOW_COPY_AND_ASSIGN(ExtendedJitterReport);
+};
+
+} // namespace rtcp
+} // namespace webrtc
+#endif // WEBRTC_MODULES_RTP_RTCP_SOURCE_RTCP_PACKET_EXTENDED_JITTER_REPORT_H_
diff --git a/webrtc/modules/rtp_rtcp/source/rtcp_packet/extended_jitter_report_unittest.cc b/webrtc/modules/rtp_rtcp/source/rtcp_packet/extended_jitter_report_unittest.cc
new file mode 100644
index 0000000000..09d7b6305f
--- /dev/null
+++ b/webrtc/modules/rtp_rtcp/source/rtcp_packet/extended_jitter_report_unittest.cc
@@ -0,0 +1,98 @@
+/*
+ * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/modules/rtp_rtcp/source/rtcp_packet/extended_jitter_report.h"
+
+#include <limits>
+
+#include "testing/gtest/include/gtest/gtest.h"
+#include "webrtc/modules/rtp_rtcp/source/rtcp_utility.h"
+
+using webrtc::rtcp::RawPacket;
+using webrtc::rtcp::ExtendedJitterReport;
+using webrtc::RTCPUtility::RtcpCommonHeader;
+using webrtc::RTCPUtility::RtcpParseCommonHeader;
+
+namespace webrtc {
+namespace {
+
+class RtcpPacketExtendedJitterReportTest : public ::testing::Test {
+ protected:
+ void BuildPacket() { packet = ij.Build(); }
+ void ParsePacket() {
+ RtcpCommonHeader header;
+ EXPECT_TRUE(
+ RtcpParseCommonHeader(packet->Buffer(), packet->Length(), &header));
+ EXPECT_EQ(header.BlockSize(), packet->Length());
+ EXPECT_TRUE(parsed_.Parse(
+ header, packet->Buffer() + RtcpCommonHeader::kHeaderSizeBytes));
+ }
+
+ ExtendedJitterReport ij;
+ rtc::scoped_ptr<RawPacket> packet;
+ const ExtendedJitterReport& parsed() { return parsed_; }
+
+ private:
+ ExtendedJitterReport parsed_;
+};
+
+TEST_F(RtcpPacketExtendedJitterReportTest, NoItem) {
+ // No initialization because packet is empty.
+ BuildPacket();
+ ParsePacket();
+
+ EXPECT_EQ(0u, parsed().jitters_count());
+}
+
+TEST_F(RtcpPacketExtendedJitterReportTest, OneItem) {
+ EXPECT_TRUE(ij.WithJitter(0x11121314));
+
+ BuildPacket();
+ ParsePacket();
+
+ EXPECT_EQ(1u, parsed().jitters_count());
+ EXPECT_EQ(0x11121314U, parsed().jitter(0));
+}
+
+TEST_F(RtcpPacketExtendedJitterReportTest, TwoItems) {
+ EXPECT_TRUE(ij.WithJitter(0x11121418));
+ EXPECT_TRUE(ij.WithJitter(0x22242628));
+
+ BuildPacket();
+ ParsePacket();
+
+ EXPECT_EQ(2u, parsed().jitters_count());
+ EXPECT_EQ(0x11121418U, parsed().jitter(0));
+ EXPECT_EQ(0x22242628U, parsed().jitter(1));
+}
+
+TEST_F(RtcpPacketExtendedJitterReportTest, TooManyItems) {
+ const int kMaxIjItems = (1 << 5) - 1;
+ for (int i = 0; i < kMaxIjItems; ++i) {
+ EXPECT_TRUE(ij.WithJitter(i));
+ }
+ EXPECT_FALSE(ij.WithJitter(kMaxIjItems));
+}
+
+TEST_F(RtcpPacketExtendedJitterReportTest, ParseFailWithTooManyItems) {
+ ij.WithJitter(0x11121418);
+ BuildPacket();
+ RtcpCommonHeader header;
+ RtcpParseCommonHeader(packet->Buffer(), packet->Length(), &header);
+ header.count_or_format++; // Damage package.
+
+ ExtendedJitterReport parsed;
+
+ EXPECT_FALSE(parsed.Parse(
+ header, packet->Buffer() + RtcpCommonHeader::kHeaderSizeBytes));
+}
+
+} // namespace
+} // namespace webrtc
diff --git a/webrtc/modules/rtp_rtcp/source/rtcp_packet/nack.cc b/webrtc/modules/rtp_rtcp/source/rtcp_packet/nack.cc
new file mode 100644
index 0000000000..8b9b354a06
--- /dev/null
+++ b/webrtc/modules/rtp_rtcp/source/rtcp_packet/nack.cc
@@ -0,0 +1,163 @@
+/*
+ * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/modules/rtp_rtcp/source/rtcp_packet/nack.h"
+
+#include <algorithm>
+
+#include "webrtc/base/checks.h"
+#include "webrtc/base/logging.h"
+#include "webrtc/modules/rtp_rtcp/source/byte_io.h"
+
+using webrtc::RTCPUtility::RtcpCommonHeader;
+
+namespace webrtc {
+namespace rtcp {
+
+// RFC 4585: Feedback format.
+//
+// Common packet format:
+//
+// 0 1 2 3
+// 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+// |V=2|P| FMT | PT | length |
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+// 0 | SSRC of packet sender |
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+// 4 | SSRC of media source |
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+// : Feedback Control Information (FCI) :
+// : :
+//
+// Generic NACK (RFC 4585).
+//
+// FCI:
+// 0 1 2 3
+// 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+// | PID | BLP |
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+bool Nack::Parse(const RtcpCommonHeader& header, const uint8_t* payload) {
+ RTC_DCHECK(header.packet_type == kPacketType);
+ RTC_DCHECK(header.count_or_format == kFeedbackMessageType);
+
+ if (header.payload_size_bytes < kCommonFeedbackLength + kNackItemLength) {
+ LOG(LS_WARNING) << "Payload length " << header.payload_size_bytes
+ << " is too small for a Nack.";
+ return false;
+ }
+ size_t nack_items =
+ (header.payload_size_bytes - kCommonFeedbackLength) / kNackItemLength;
+
+ ParseCommonFeedback(payload);
+ const uint8_t* next_nack = payload + kCommonFeedbackLength;
+
+ packet_ids_.clear();
+ packed_.resize(nack_items);
+ for (size_t index = 0; index < nack_items; ++index) {
+ packed_[index].first_pid = ByteReader<uint16_t>::ReadBigEndian(next_nack);
+ packed_[index].bitmask = ByteReader<uint16_t>::ReadBigEndian(next_nack + 2);
+ next_nack += kNackItemLength;
+ }
+ Unpack();
+
+ return true;
+}
+
+bool Nack::Create(uint8_t* packet,
+ size_t* index,
+ size_t max_length,
+ RtcpPacket::PacketReadyCallback* callback) const {
+ RTC_DCHECK(!packed_.empty());
+ // If nack list can't fit in packet, try to fragment.
+ size_t nack_index = 0;
+ const size_t kCommonFbFmtLength = kHeaderLength + kCommonFeedbackLength;
+ do {
+ size_t bytes_left_in_buffer = max_length - *index;
+ if (bytes_left_in_buffer < kCommonFbFmtLength + kNackItemLength) {
+ if (!OnBufferFull(packet, index, callback))
+ return false;
+ continue;
+ }
+ size_t num_nack_fields =
+ std::min((bytes_left_in_buffer - kCommonFbFmtLength) / kNackItemLength,
+ packed_.size() - nack_index);
+
+ size_t size_bytes =
+ (num_nack_fields * kNackItemLength) + kCommonFbFmtLength;
+ size_t header_length = ((size_bytes + 3) / 4) - 1; // As 32bit words - 1
+ CreateHeader(kFeedbackMessageType, kPacketType, header_length, packet,
+ index);
+ CreateCommonFeedback(packet + *index);
+ *index += kCommonFeedbackLength;
+ size_t end_index = nack_index + num_nack_fields;
+ for (; nack_index < end_index; ++nack_index) {
+ const auto& item = packed_[nack_index];
+ ByteWriter<uint16_t>::WriteBigEndian(packet + *index + 0, item.first_pid);
+ ByteWriter<uint16_t>::WriteBigEndian(packet + *index + 2, item.bitmask);
+ *index += kNackItemLength;
+ }
+ RTC_DCHECK_LE(*index, max_length);
+ } while (nack_index < packed_.size());
+
+ return true;
+}
+
+size_t Nack::BlockLength() const {
+ return (packed_.size() * kNackItemLength) + kCommonFeedbackLength +
+ kHeaderLength;
+}
+
+void Nack::WithList(const uint16_t* nack_list, size_t length) {
+ RTC_DCHECK(nack_list);
+ RTC_DCHECK(packet_ids_.empty());
+ RTC_DCHECK(packed_.empty());
+ packet_ids_.assign(nack_list, nack_list + length);
+ Pack();
+}
+
+void Nack::Pack() {
+ RTC_DCHECK(!packet_ids_.empty());
+ RTC_DCHECK(packed_.empty());
+ auto it = packet_ids_.begin();
+ const auto end = packet_ids_.end();
+ while (it != end) {
+ PackedNack item;
+ item.first_pid = *it++;
+ // Bitmask specifies losses in any of the 16 packets following the pid.
+ item.bitmask = 0;
+ while (it != end) {
+ uint16_t shift = static_cast<uint16_t>(*it - item.first_pid - 1);
+ if (shift <= 15) {
+ item.bitmask |= (1 << shift);
+ ++it;
+ } else {
+ break;
+ }
+ }
+ packed_.push_back(item);
+ }
+}
+
+void Nack::Unpack() {
+ RTC_DCHECK(packet_ids_.empty());
+ RTC_DCHECK(!packed_.empty());
+ for (const PackedNack& item : packed_) {
+ packet_ids_.push_back(item.first_pid);
+ uint16_t pid = item.first_pid + 1;
+ for (uint16_t bitmask = item.bitmask; bitmask != 0; bitmask >>= 1, ++pid)
+ if (bitmask & 1)
+ packet_ids_.push_back(pid);
+ }
+}
+
+} // namespace rtcp
+} // namespace webrtc
diff --git a/webrtc/modules/rtp_rtcp/source/rtcp_packet/nack.h b/webrtc/modules/rtp_rtcp/source/rtcp_packet/nack.h
new file mode 100644
index 0000000000..fb2be113a2
--- /dev/null
+++ b/webrtc/modules/rtp_rtcp/source/rtcp_packet/nack.h
@@ -0,0 +1,63 @@
+/*
+ * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_RTP_RTCP_SOURCE_RTCP_PACKET_NACK_H_
+#define WEBRTC_MODULES_RTP_RTCP_SOURCE_RTCP_PACKET_NACK_H_
+
+#include <vector>
+
+#include "webrtc/base/basictypes.h"
+#include "webrtc/modules/rtp_rtcp/source/rtcp_packet/rtpfb.h"
+#include "webrtc/modules/rtp_rtcp/source/rtcp_utility.h"
+
+namespace webrtc {
+namespace rtcp {
+
+class Nack : public Rtpfb {
+ public:
+ const uint8_t kFeedbackMessageType = 1;
+ Nack() {}
+
+ virtual ~Nack() {}
+
+ // Parse assumes header is already parsed and validated.
+ bool Parse(const RTCPUtility::RtcpCommonHeader& header,
+ const uint8_t* payload); // Size of the payload is in the header.
+
+ void WithList(const uint16_t* nack_list, size_t length);
+ const std::vector<uint16_t>& packet_ids() const { return packet_ids_; }
+
+ protected:
+ bool Create(uint8_t* packet,
+ size_t* index,
+ size_t max_length,
+ RtcpPacket::PacketReadyCallback* callback) const override;
+
+ size_t BlockLength() const override;
+
+ private:
+ const size_t kNackItemLength = 4;
+ struct PackedNack {
+ uint16_t first_pid;
+ uint16_t bitmask;
+ };
+
+ void Pack(); // Fills packed_ using packed_ids_. (used in WithList).
+ void Unpack(); // Fills packet_ids_ using packed_. (used in Parse).
+
+ std::vector<PackedNack> packed_;
+ std::vector<uint16_t> packet_ids_;
+
+ RTC_DISALLOW_COPY_AND_ASSIGN(Nack);
+};
+
+} // namespace rtcp
+} // namespace webrtc
+#endif // WEBRTC_MODULES_RTP_RTCP_SOURCE_RTCP_PACKET_NACK_H_
diff --git a/webrtc/modules/rtp_rtcp/source/rtcp_packet/nack_unittest.cc b/webrtc/modules/rtp_rtcp/source/rtcp_packet/nack_unittest.cc
new file mode 100644
index 0000000000..01e30f5644
--- /dev/null
+++ b/webrtc/modules/rtp_rtcp/source/rtcp_packet/nack_unittest.cc
@@ -0,0 +1,190 @@
+/*
+ * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/modules/rtp_rtcp/source/rtcp_packet/nack.h"
+
+#include "testing/gmock/include/gmock/gmock.h"
+#include "testing/gtest/include/gtest/gtest.h"
+
+using ::testing::_;
+using ::testing::ElementsAreArray;
+using ::testing::Invoke;
+using ::testing::UnorderedElementsAreArray;
+
+using webrtc::rtcp::Nack;
+using webrtc::rtcp::RawPacket;
+using webrtc::RTCPUtility::RtcpCommonHeader;
+using webrtc::RTCPUtility::RtcpParseCommonHeader;
+
+namespace webrtc {
+namespace {
+
+const uint32_t kSenderSsrc = 0x12345678;
+const uint32_t kRemoteSsrc = 0x23456789;
+
+const uint16_t kList[] = {0, 1, 3, 8, 16};
+const size_t kListLength = sizeof(kList) / sizeof(kList[0]);
+const uint8_t kPacket[] = {0x81, 205, 0x00, 0x03, 0x12, 0x34, 0x56, 0x78,
+ 0x23, 0x45, 0x67, 0x89, 0x00, 0x00, 0x80, 0x85};
+const size_t kPacketLength = sizeof(kPacket);
+
+const uint16_t kWrapList[] = {0xffdc, 0xffec, 0xfffe, 0xffff, 0x0000,
+ 0x0001, 0x0003, 0x0014, 0x0064};
+const size_t kWrapListLength = sizeof(kWrapList) / sizeof(kWrapList[0]);
+const uint8_t kWrapPacket[] = {0x81, 205, 0x00, 0x06, 0x12, 0x34, 0x56, 0x78,
+ 0x23, 0x45, 0x67, 0x89, 0xff, 0xdc, 0x80, 0x00,
+ 0xff, 0xfe, 0x00, 0x17, 0x00, 0x14, 0x00, 0x00,
+ 0x00, 0x64, 0x00, 0x00};
+const size_t kWrapPacketLength = sizeof(kWrapPacket);
+
+TEST(RtcpPacketNackTest, Create) {
+ Nack nack;
+ nack.From(kSenderSsrc);
+ nack.To(kRemoteSsrc);
+ nack.WithList(kList, kListLength);
+
+ rtc::scoped_ptr<RawPacket> packet = nack.Build();
+
+ EXPECT_EQ(kPacketLength, packet->Length());
+ EXPECT_EQ(0, memcmp(kPacket, packet->Buffer(), kPacketLength));
+}
+
+TEST(RtcpPacketNackTest, Parse) {
+ RtcpCommonHeader header;
+ EXPECT_TRUE(RtcpParseCommonHeader(kPacket, kPacketLength, &header));
+ EXPECT_EQ(kPacketLength, header.BlockSize());
+ Nack parsed;
+
+ EXPECT_TRUE(
+ parsed.Parse(header, kPacket + RtcpCommonHeader::kHeaderSizeBytes));
+ const Nack& const_parsed = parsed;
+
+ EXPECT_EQ(kSenderSsrc, const_parsed.sender_ssrc());
+ EXPECT_EQ(kRemoteSsrc, const_parsed.media_ssrc());
+ EXPECT_THAT(const_parsed.packet_ids(), ElementsAreArray(kList));
+}
+
+TEST(RtcpPacketNackTest, CreateWrap) {
+ Nack nack;
+ nack.From(kSenderSsrc);
+ nack.To(kRemoteSsrc);
+ nack.WithList(kWrapList, kWrapListLength);
+
+ rtc::scoped_ptr<RawPacket> packet = nack.Build();
+
+ EXPECT_EQ(kWrapPacketLength, packet->Length());
+ EXPECT_EQ(0, memcmp(kWrapPacket, packet->Buffer(), kWrapPacketLength));
+}
+
+TEST(RtcpPacketNackTest, ParseWrap) {
+ RtcpCommonHeader header;
+ EXPECT_TRUE(RtcpParseCommonHeader(kWrapPacket, kWrapPacketLength, &header));
+ EXPECT_EQ(kWrapPacketLength, header.BlockSize());
+
+ Nack parsed;
+ EXPECT_TRUE(
+ parsed.Parse(header, kWrapPacket + RtcpCommonHeader::kHeaderSizeBytes));
+
+ EXPECT_EQ(kSenderSsrc, parsed.sender_ssrc());
+ EXPECT_EQ(kRemoteSsrc, parsed.media_ssrc());
+ EXPECT_THAT(parsed.packet_ids(), ElementsAreArray(kWrapList));
+}
+
+TEST(RtcpPacketNackTest, BadOrder) {
+ // Does not guarantee optimal packing, but should guarantee correctness.
+ const uint16_t kUnorderedList[] = {1, 25, 13, 12, 9, 27, 29};
+ const size_t kUnorderedListLength =
+ sizeof(kUnorderedList) / sizeof(kUnorderedList[0]);
+ Nack nack;
+ nack.From(kSenderSsrc);
+ nack.To(kRemoteSsrc);
+ nack.WithList(kUnorderedList, kUnorderedListLength);
+
+ rtc::scoped_ptr<RawPacket> packet = nack.Build();
+
+ Nack parsed;
+ RtcpCommonHeader header;
+ EXPECT_TRUE(
+ RtcpParseCommonHeader(packet->Buffer(), packet->Length(), &header));
+ EXPECT_TRUE(parsed.Parse(
+ header, packet->Buffer() + RtcpCommonHeader::kHeaderSizeBytes));
+
+ EXPECT_EQ(kSenderSsrc, parsed.sender_ssrc());
+ EXPECT_EQ(kRemoteSsrc, parsed.media_ssrc());
+ EXPECT_THAT(parsed.packet_ids(), UnorderedElementsAreArray(kUnorderedList));
+}
+
+TEST(RtcpPacketNackTest, CreateFragmented) {
+ Nack nack;
+ const uint16_t kList[] = {1, 100, 200, 300, 400};
+ const uint16_t kListLength = sizeof(kList) / sizeof(kList[0]);
+ nack.From(kSenderSsrc);
+ nack.To(kRemoteSsrc);
+ nack.WithList(kList, kListLength);
+
+ class MockPacketReadyCallback : public rtcp::RtcpPacket::PacketReadyCallback {
+ public:
+ MOCK_METHOD2(OnPacketReady, void(uint8_t*, size_t));
+ } verifier;
+
+ class NackVerifier {
+ public:
+ explicit NackVerifier(std::vector<uint16_t> ids) : ids_(ids) {}
+ void operator()(uint8_t* data, size_t length) {
+ RtcpCommonHeader header;
+ EXPECT_TRUE(RtcpParseCommonHeader(data, length, &header));
+ EXPECT_EQ(length, header.BlockSize());
+ Nack nack;
+ EXPECT_TRUE(
+ nack.Parse(header, data + RtcpCommonHeader::kHeaderSizeBytes));
+ EXPECT_EQ(kSenderSsrc, nack.sender_ssrc());
+ EXPECT_EQ(kRemoteSsrc, nack.media_ssrc());
+ EXPECT_THAT(nack.packet_ids(), ElementsAreArray(ids_));
+ }
+ std::vector<uint16_t> ids_;
+ } packet1({1, 100, 200}), packet2({300, 400});
+
+ EXPECT_CALL(verifier, OnPacketReady(_, _))
+ .WillOnce(Invoke(packet1))
+ .WillOnce(Invoke(packet2));
+ const size_t kBufferSize = 12 + (3 * 4); // Fits common header + 3 nack items
+ uint8_t buffer[kBufferSize];
+ EXPECT_TRUE(nack.BuildExternalBuffer(buffer, kBufferSize, &verifier));
+}
+
+TEST(RtcpPacketNackTest, CreateFailsWithTooSmallBuffer) {
+ const uint16_t kList[] = {1};
+ const size_t kMinNackBlockSize = 16;
+ Nack nack;
+ nack.From(kSenderSsrc);
+ nack.To(kRemoteSsrc);
+ nack.WithList(kList, 1);
+ class Verifier : public rtcp::RtcpPacket::PacketReadyCallback {
+ public:
+ void OnPacketReady(uint8_t* data, size_t length) override {
+ ADD_FAILURE() << "Buffer should be too small.";
+ }
+ } verifier;
+ uint8_t buffer[kMinNackBlockSize - 1];
+ EXPECT_FALSE(
+ nack.BuildExternalBuffer(buffer, kMinNackBlockSize - 1, &verifier));
+}
+
+TEST(RtcpPacketNackTest, ParseFailsWithTooSmallBuffer) {
+ RtcpCommonHeader header;
+ EXPECT_TRUE(RtcpParseCommonHeader(kPacket, kPacketLength, &header));
+ header.payload_size_bytes--; // Damage the packet
+ Nack parsed;
+ EXPECT_FALSE(
+ parsed.Parse(header, kPacket + RtcpCommonHeader::kHeaderSizeBytes));
+}
+
+} // namespace
+} // namespace webrtc
diff --git a/webrtc/modules/rtp_rtcp/source/rtcp_packet/pli.cc b/webrtc/modules/rtp_rtcp/source/rtcp_packet/pli.cc
new file mode 100644
index 0000000000..3673491058
--- /dev/null
+++ b/webrtc/modules/rtp_rtcp/source/rtcp_packet/pli.cc
@@ -0,0 +1,70 @@
+/*
+ * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/modules/rtp_rtcp/source/rtcp_packet/pli.h"
+
+#include "webrtc/base/checks.h"
+#include "webrtc/base/logging.h"
+
+using webrtc::RTCPUtility::RtcpCommonHeader;
+
+namespace webrtc {
+namespace rtcp {
+
+// RFC 4585: Feedback format.
+//
+// Common packet format:
+//
+// 0 1 2 3
+// 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+// |V=2|P| FMT | PT | length |
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+// | SSRC of packet sender |
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+// | SSRC of media source |
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+// : Feedback Control Information (FCI) :
+// : :
+
+//
+// Picture loss indication (PLI) (RFC 4585).
+// FCI: no feedback control information.
+bool Pli::Parse(const RtcpCommonHeader& header, const uint8_t* payload) {
+ RTC_DCHECK(header.packet_type == kPacketType);
+ RTC_DCHECK(header.count_or_format == kFeedbackMessageType);
+
+ if (header.payload_size_bytes < kCommonFeedbackLength) {
+ LOG(LS_WARNING) << "Packet is too small to be a valid PLI packet";
+ return false;
+ }
+
+ ParseCommonFeedback(payload);
+ return true;
+}
+
+bool Pli::Create(uint8_t* packet,
+ size_t* index,
+ size_t max_length,
+ RtcpPacket::PacketReadyCallback* callback) const {
+ while (*index + BlockLength() > max_length) {
+ if (!OnBufferFull(packet, index, callback))
+ return false;
+ }
+
+ CreateHeader(kFeedbackMessageType, kPacketType, HeaderLength(), packet,
+ index);
+ CreateCommonFeedback(packet + *index);
+ *index += kCommonFeedbackLength;
+ return true;
+}
+
+} // namespace rtcp
+} // namespace webrtc
diff --git a/webrtc/modules/rtp_rtcp/source/rtcp_packet/pli.h b/webrtc/modules/rtp_rtcp/source/rtcp_packet/pli.h
new file mode 100644
index 0000000000..5567825830
--- /dev/null
+++ b/webrtc/modules/rtp_rtcp/source/rtcp_packet/pli.h
@@ -0,0 +1,49 @@
+/*
+ * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ *
+ */
+
+#ifndef WEBRTC_MODULES_RTP_RTCP_SOURCE_RTCP_PACKET_PLI_H_
+#define WEBRTC_MODULES_RTP_RTCP_SOURCE_RTCP_PACKET_PLI_H_
+
+#include "webrtc/base/basictypes.h"
+#include "webrtc/modules/rtp_rtcp/source/rtcp_packet/psfb.h"
+
+namespace webrtc {
+namespace rtcp {
+
+// Picture loss indication (PLI) (RFC 4585).
+class Pli : public Psfb {
+ public:
+ static const uint8_t kFeedbackMessageType = 1;
+
+ Pli() {}
+ virtual ~Pli() {}
+
+ // Parse assumes header is already parsed and validated.
+ bool Parse(const RTCPUtility::RtcpCommonHeader& header,
+ const uint8_t* payload); // Size of the payload is in the header.
+
+ protected:
+ bool Create(uint8_t* packet,
+ size_t* index,
+ size_t max_length,
+ RtcpPacket::PacketReadyCallback* callback) const override;
+
+ private:
+ size_t BlockLength() const override {
+ return kHeaderLength + kCommonFeedbackLength;
+ }
+
+ RTC_DISALLOW_COPY_AND_ASSIGN(Pli);
+};
+
+} // namespace rtcp
+} // namespace webrtc
+#endif // WEBRTC_MODULES_RTP_RTCP_SOURCE_RTCP_PACKET_PLI_H_
diff --git a/webrtc/modules/rtp_rtcp/source/rtcp_packet/pli_unittest.cc b/webrtc/modules/rtp_rtcp/source/rtcp_packet/pli_unittest.cc
new file mode 100644
index 0000000000..1c47c3ffb1
--- /dev/null
+++ b/webrtc/modules/rtp_rtcp/source/rtcp_packet/pli_unittest.cc
@@ -0,0 +1,66 @@
+/*
+ * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/modules/rtp_rtcp/source/rtcp_packet/pli.h"
+
+#include "testing/gtest/include/gtest/gtest.h"
+#include "webrtc/modules/rtp_rtcp/source/rtcp_utility.h"
+
+using webrtc::rtcp::Pli;
+using webrtc::rtcp::RawPacket;
+using webrtc::RTCPUtility::RtcpCommonHeader;
+using webrtc::RTCPUtility::RtcpParseCommonHeader;
+
+namespace webrtc {
+namespace {
+
+const uint32_t kSenderSsrc = 0x12345678;
+const uint32_t kRemoteSsrc = 0x23456789;
+// Manually created Pli packet matching constants above.
+const uint8_t kPacket[] = {0x81, 206, 0x00, 0x02,
+ 0x12, 0x34, 0x56, 0x78,
+ 0x23, 0x45, 0x67, 0x89};
+const size_t kPacketLength = sizeof(kPacket);
+
+TEST(RtcpPacketPliTest, Parse) {
+ RtcpCommonHeader header;
+ EXPECT_TRUE(RtcpParseCommonHeader(kPacket, kPacketLength, &header));
+ Pli mutable_parsed;
+ EXPECT_TRUE(mutable_parsed.Parse(
+ header, kPacket + RtcpCommonHeader::kHeaderSizeBytes));
+ const Pli& parsed = mutable_parsed; // Read values from constant object.
+
+ EXPECT_EQ(kSenderSsrc, parsed.sender_ssrc());
+ EXPECT_EQ(kRemoteSsrc, parsed.media_ssrc());
+}
+
+TEST(RtcpPacketPliTest, Create) {
+ Pli pli;
+ pli.From(kSenderSsrc);
+ pli.To(kRemoteSsrc);
+
+ rtc::scoped_ptr<RawPacket> packet(pli.Build());
+
+ ASSERT_EQ(kPacketLength, packet->Length());
+ EXPECT_EQ(0, memcmp(kPacket, packet->Buffer(), kPacketLength));
+}
+
+TEST(RtcpPacketPliTest, ParseFailsOnTooSmallPacket) {
+ RtcpCommonHeader header;
+ EXPECT_TRUE(RtcpParseCommonHeader(kPacket, kPacketLength, &header));
+ header.payload_size_bytes--;
+
+ Pli parsed;
+ EXPECT_FALSE(
+ parsed.Parse(header, kPacket + RtcpCommonHeader::kHeaderSizeBytes));
+}
+
+} // namespace
+} // namespace webrtc
diff --git a/webrtc/modules/rtp_rtcp/source/rtcp_packet/psfb.cc b/webrtc/modules/rtp_rtcp/source/rtcp_packet/psfb.cc
new file mode 100644
index 0000000000..d1ee401dab
--- /dev/null
+++ b/webrtc/modules/rtp_rtcp/source/rtcp_packet/psfb.cc
@@ -0,0 +1,45 @@
+/*
+ * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/modules/rtp_rtcp/source/rtcp_packet/psfb.h"
+
+#include "webrtc/modules/rtp_rtcp/source/byte_io.h"
+
+namespace webrtc {
+namespace rtcp {
+
+// RFC 4585: Feedback format.
+//
+// Common packet format:
+//
+// 0 1 2 3
+// 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+// |V=2|P| FMT | PT | length |
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+// 0 | SSRC of packet sender |
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+// 4 | SSRC of media source |
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+// : Feedback Control Information (FCI) :
+// : :
+
+void Psfb::ParseCommonFeedback(const uint8_t* payload) {
+ sender_ssrc_ = ByteReader<uint32_t>::ReadBigEndian(&payload[0]);
+ media_ssrc_ = ByteReader<uint32_t>::ReadBigEndian(&payload[4]);
+}
+
+void Psfb::CreateCommonFeedback(uint8_t* payload) const {
+ ByteWriter<uint32_t>::WriteBigEndian(&payload[0], sender_ssrc_);
+ ByteWriter<uint32_t>::WriteBigEndian(&payload[4], media_ssrc_);
+}
+
+} // namespace rtcp
+} // namespace webrtc
diff --git a/webrtc/modules/rtp_rtcp/source/rtcp_packet/psfb.h b/webrtc/modules/rtp_rtcp/source/rtcp_packet/psfb.h
new file mode 100644
index 0000000000..dddcdecba6
--- /dev/null
+++ b/webrtc/modules/rtp_rtcp/source/rtcp_packet/psfb.h
@@ -0,0 +1,48 @@
+/*
+ * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ *
+ */
+
+#ifndef WEBRTC_MODULES_RTP_RTCP_SOURCE_RTCP_PACKET_PSFB_H_
+#define WEBRTC_MODULES_RTP_RTCP_SOURCE_RTCP_PACKET_PSFB_H_
+
+#include "webrtc/base/basictypes.h"
+#include "webrtc/modules/rtp_rtcp/source/rtcp_packet.h"
+
+namespace webrtc {
+namespace rtcp {
+
+// PSFB: Payload-specific feedback message.
+// RFC 4585, Section 6.3.
+class Psfb : public RtcpPacket {
+ public:
+ static const uint8_t kPacketType = 206;
+
+ Psfb() : sender_ssrc_(0), media_ssrc_(0) {}
+ virtual ~Psfb() {}
+
+ void From(uint32_t ssrc) { sender_ssrc_ = ssrc; }
+ void To(uint32_t ssrc) { media_ssrc_ = ssrc; }
+
+ uint32_t sender_ssrc() const { return sender_ssrc_; }
+ uint32_t media_ssrc() const { return media_ssrc_; }
+
+ protected:
+ static const size_t kCommonFeedbackLength = 8;
+ void ParseCommonFeedback(const uint8_t* payload);
+ void CreateCommonFeedback(uint8_t* payload) const;
+
+ private:
+ uint32_t sender_ssrc_;
+ uint32_t media_ssrc_;
+};
+
+} // namespace rtcp
+} // namespace webrtc
+#endif // WEBRTC_MODULES_RTP_RTCP_SOURCE_RTCP_PACKET_PSFB_H_
diff --git a/webrtc/modules/rtp_rtcp/source/rtcp_packet/receiver_report.cc b/webrtc/modules/rtp_rtcp/source/rtcp_packet/receiver_report.cc
new file mode 100644
index 0000000000..ef64b4f51b
--- /dev/null
+++ b/webrtc/modules/rtp_rtcp/source/rtcp_packet/receiver_report.cc
@@ -0,0 +1,89 @@
+/*
+ * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/modules/rtp_rtcp/source/rtcp_packet/receiver_report.h"
+
+#include "webrtc/base/checks.h"
+#include "webrtc/base/logging.h"
+#include "webrtc/modules/rtp_rtcp/source/byte_io.h"
+
+using webrtc::RTCPUtility::RtcpCommonHeader;
+
+namespace webrtc {
+namespace rtcp {
+
+//
+// RTCP receiver report (RFC 3550).
+//
+// 0 1 2 3
+// 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+// |V=2|P| RC | PT=RR=201 | length |
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+// | SSRC of packet sender |
+// +=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+
+// | report block(s) |
+// | .... |
+bool ReceiverReport::Parse(const RTCPUtility::RtcpCommonHeader& header,
+ const uint8_t* payload) {
+ RTC_DCHECK(header.packet_type == kPacketType);
+
+ const uint8_t report_blocks_count = header.count_or_format;
+
+ if (header.payload_size_bytes <
+ kRrBaseLength + report_blocks_count * ReportBlock::kLength) {
+ LOG(LS_WARNING) << "Packet is too small to contain all the data.";
+ return false;
+ }
+
+ sender_ssrc_ = ByteReader<uint32_t>::ReadBigEndian(payload);
+
+ const uint8_t* next_report_block = payload + kRrBaseLength;
+
+ report_blocks_.resize(report_blocks_count);
+ for (ReportBlock& block : report_blocks_) {
+ block.Parse(next_report_block, ReportBlock::kLength);
+ next_report_block += ReportBlock::kLength;
+ }
+
+ RTC_DCHECK_LE(next_report_block, payload + header.payload_size_bytes);
+ return true;
+}
+
+bool ReceiverReport::Create(uint8_t* packet,
+ size_t* index,
+ size_t max_length,
+ RtcpPacket::PacketReadyCallback* callback) const {
+ while (*index + BlockLength() > max_length) {
+ if (!OnBufferFull(packet, index, callback))
+ return false;
+ }
+ CreateHeader(report_blocks_.size(), kPacketType, HeaderLength(), packet,
+ index);
+ ByteWriter<uint32_t>::WriteBigEndian(packet + *index, sender_ssrc_);
+ *index += kRrBaseLength;
+ for (const ReportBlock& block : report_blocks_) {
+ block.Create(packet + *index);
+ *index += ReportBlock::kLength;
+ }
+ return true;
+}
+
+bool ReceiverReport::WithReportBlock(const ReportBlock& block) {
+ if (report_blocks_.size() >= kMaxNumberOfReportBlocks) {
+ LOG(LS_WARNING) << "Max report blocks reached.";
+ return false;
+ }
+ report_blocks_.push_back(block);
+ return true;
+}
+
+} // namespace rtcp
+} // namespace webrtc
diff --git a/webrtc/modules/rtp_rtcp/source/rtcp_packet/receiver_report.h b/webrtc/modules/rtp_rtcp/source/rtcp_packet/receiver_report.h
new file mode 100644
index 0000000000..172a84ea2f
--- /dev/null
+++ b/webrtc/modules/rtp_rtcp/source/rtcp_packet/receiver_report.h
@@ -0,0 +1,66 @@
+/*
+ * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_RTP_RTCP_SOURCE_RTCP_PACKET_RECEIVER_REPORT_H_
+#define WEBRTC_MODULES_RTP_RTCP_SOURCE_RTCP_PACKET_RECEIVER_REPORT_H_
+
+#include <vector>
+
+#include "webrtc/base/basictypes.h"
+#include "webrtc/modules/rtp_rtcp/source/rtcp_packet.h"
+#include "webrtc/modules/rtp_rtcp/source/rtcp_packet/report_block.h"
+#include "webrtc/modules/rtp_rtcp/source/rtcp_utility.h"
+
+namespace webrtc {
+namespace rtcp {
+
+class ReceiverReport : public RtcpPacket {
+ public:
+ static const uint8_t kPacketType = 201;
+ ReceiverReport() : sender_ssrc_(0) {}
+
+ virtual ~ReceiverReport() {}
+
+ // Parse assumes header is already parsed and validated.
+ bool Parse(const RTCPUtility::RtcpCommonHeader& header,
+ const uint8_t* payload); // Size of the payload is in the header.
+
+ void From(uint32_t ssrc) { sender_ssrc_ = ssrc; }
+ bool WithReportBlock(const ReportBlock& block);
+
+ uint32_t sender_ssrc() const { return sender_ssrc_; }
+ const std::vector<ReportBlock>& report_blocks() const {
+ return report_blocks_;
+ }
+
+ protected:
+ bool Create(uint8_t* packet,
+ size_t* index,
+ size_t max_length,
+ RtcpPacket::PacketReadyCallback* callback) const override;
+
+ private:
+ static const size_t kRrBaseLength = 4;
+ static const size_t kMaxNumberOfReportBlocks = 0x1F;
+
+ size_t BlockLength() const {
+ return kHeaderLength + kRrBaseLength +
+ report_blocks_.size() * ReportBlock::kLength;
+ }
+
+ uint32_t sender_ssrc_;
+ std::vector<ReportBlock> report_blocks_;
+
+ RTC_DISALLOW_COPY_AND_ASSIGN(ReceiverReport);
+};
+
+} // namespace rtcp
+} // namespace webrtc
+#endif // WEBRTC_MODULES_RTP_RTCP_SOURCE_RTCP_PACKET_RECEIVER_REPORT_H_
diff --git a/webrtc/modules/rtp_rtcp/source/rtcp_packet/receiver_report_unittest.cc b/webrtc/modules/rtp_rtcp/source/rtcp_packet/receiver_report_unittest.cc
new file mode 100644
index 0000000000..ff3da600a5
--- /dev/null
+++ b/webrtc/modules/rtp_rtcp/source/rtcp_packet/receiver_report_unittest.cc
@@ -0,0 +1,145 @@
+/*
+ * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/modules/rtp_rtcp/source/rtcp_packet/receiver_report.h"
+
+#include "testing/gtest/include/gtest/gtest.h"
+
+using webrtc::rtcp::RawPacket;
+using webrtc::rtcp::ReceiverReport;
+using webrtc::rtcp::ReportBlock;
+using webrtc::RTCPUtility::RtcpCommonHeader;
+using webrtc::RTCPUtility::RtcpParseCommonHeader;
+
+namespace webrtc {
+namespace {
+const uint32_t kSenderSsrc = 0x12345678;
+const uint32_t kRemoteSsrc = 0x23456789;
+const uint8_t kFractionLost = 55;
+const uint32_t kCumulativeLost = 0x111213;
+const uint32_t kExtHighestSeqNum = 0x22232425;
+const uint32_t kJitter = 0x33343536;
+const uint32_t kLastSr = 0x44454647;
+const uint32_t kDelayLastSr = 0x55565758;
+// Manually created ReceiverReport with one ReportBlock matching constants
+// above.
+// Having this block allows to test Create and Parse separately.
+const uint8_t kPacket[] = {0x81, 201, 0x00, 0x07, 0x12, 0x34, 0x56, 0x78,
+ 0x23, 0x45, 0x67, 0x89, 55, 0x11, 0x12, 0x13,
+ 0x22, 0x23, 0x24, 0x25, 0x33, 0x34, 0x35, 0x36,
+ 0x44, 0x45, 0x46, 0x47, 0x55, 0x56, 0x57, 0x58};
+const size_t kPacketLength = sizeof(kPacket);
+
+class RtcpPacketReceiverReportTest : public ::testing::Test {
+ protected:
+ void BuildPacket() { packet = rr.Build(); }
+ void ParsePacket() {
+ RtcpCommonHeader header;
+ EXPECT_TRUE(
+ RtcpParseCommonHeader(packet->Buffer(), packet->Length(), &header));
+ EXPECT_EQ(header.BlockSize(), packet->Length());
+ EXPECT_TRUE(parsed_.Parse(
+ header, packet->Buffer() + RtcpCommonHeader::kHeaderSizeBytes));
+ }
+
+ ReceiverReport rr;
+ rtc::scoped_ptr<RawPacket> packet;
+ const ReceiverReport& parsed() { return parsed_; }
+
+ private:
+ ReceiverReport parsed_;
+};
+
+TEST_F(RtcpPacketReceiverReportTest, Parse) {
+ RtcpCommonHeader header;
+ RtcpParseCommonHeader(kPacket, kPacketLength, &header);
+ EXPECT_TRUE(rr.Parse(header, kPacket + RtcpCommonHeader::kHeaderSizeBytes));
+ const ReceiverReport& parsed = rr;
+
+ EXPECT_EQ(kSenderSsrc, parsed.sender_ssrc());
+ EXPECT_EQ(1u, parsed.report_blocks().size());
+ const ReportBlock& rb = parsed.report_blocks().front();
+ EXPECT_EQ(kRemoteSsrc, rb.source_ssrc());
+ EXPECT_EQ(kFractionLost, rb.fraction_lost());
+ EXPECT_EQ(kCumulativeLost, rb.cumulative_lost());
+ EXPECT_EQ(kExtHighestSeqNum, rb.extended_high_seq_num());
+ EXPECT_EQ(kJitter, rb.jitter());
+ EXPECT_EQ(kLastSr, rb.last_sr());
+ EXPECT_EQ(kDelayLastSr, rb.delay_since_last_sr());
+}
+
+TEST_F(RtcpPacketReceiverReportTest, ParseFailsOnIncorrectSize) {
+ RtcpCommonHeader header;
+ RtcpParseCommonHeader(kPacket, kPacketLength, &header);
+ header.count_or_format++; // Damage the packet.
+ EXPECT_FALSE(rr.Parse(header, kPacket + RtcpCommonHeader::kHeaderSizeBytes));
+}
+
+TEST_F(RtcpPacketReceiverReportTest, Create) {
+ rr.From(kSenderSsrc);
+ ReportBlock rb;
+ rb.To(kRemoteSsrc);
+ rb.WithFractionLost(kFractionLost);
+ rb.WithCumulativeLost(kCumulativeLost);
+ rb.WithExtHighestSeqNum(kExtHighestSeqNum);
+ rb.WithJitter(kJitter);
+ rb.WithLastSr(kLastSr);
+ rb.WithDelayLastSr(kDelayLastSr);
+ rr.WithReportBlock(rb);
+
+ BuildPacket();
+
+ ASSERT_EQ(kPacketLength, packet->Length());
+ EXPECT_EQ(0, memcmp(kPacket, packet->Buffer(), kPacketLength));
+}
+
+TEST_F(RtcpPacketReceiverReportTest, WithoutReportBlocks) {
+ rr.From(kSenderSsrc);
+
+ BuildPacket();
+ ParsePacket();
+
+ EXPECT_EQ(kSenderSsrc, parsed().sender_ssrc());
+ EXPECT_EQ(0u, parsed().report_blocks().size());
+}
+
+TEST_F(RtcpPacketReceiverReportTest, WithTwoReportBlocks) {
+ ReportBlock rb1;
+ rb1.To(kRemoteSsrc);
+ ReportBlock rb2;
+ rb2.To(kRemoteSsrc + 1);
+
+ rr.From(kSenderSsrc);
+ EXPECT_TRUE(rr.WithReportBlock(rb1));
+ EXPECT_TRUE(rr.WithReportBlock(rb2));
+
+ BuildPacket();
+ ParsePacket();
+
+ EXPECT_EQ(kSenderSsrc, parsed().sender_ssrc());
+ EXPECT_EQ(2u, parsed().report_blocks().size());
+ EXPECT_EQ(kRemoteSsrc, parsed().report_blocks()[0].source_ssrc());
+ EXPECT_EQ(kRemoteSsrc + 1, parsed().report_blocks()[1].source_ssrc());
+}
+
+TEST_F(RtcpPacketReceiverReportTest, WithTooManyReportBlocks) {
+ rr.From(kSenderSsrc);
+ const size_t kMaxReportBlocks = (1 << 5) - 1;
+ ReportBlock rb;
+ for (size_t i = 0; i < kMaxReportBlocks; ++i) {
+ rb.To(kRemoteSsrc + i);
+ EXPECT_TRUE(rr.WithReportBlock(rb));
+ }
+ rb.To(kRemoteSsrc + kMaxReportBlocks);
+ EXPECT_FALSE(rr.WithReportBlock(rb));
+}
+
+} // namespace
+} // namespace webrtc
diff --git a/webrtc/modules/rtp_rtcp/source/rtcp_packet/report_block.cc b/webrtc/modules/rtp_rtcp/source/rtcp_packet/report_block.cc
new file mode 100644
index 0000000000..4911dbf5b7
--- /dev/null
+++ b/webrtc/modules/rtp_rtcp/source/rtcp_packet/report_block.cc
@@ -0,0 +1,89 @@
+/*
+ * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/modules/rtp_rtcp/source/rtcp_packet/report_block.h"
+
+#include "webrtc/base/checks.h"
+#include "webrtc/base/logging.h"
+#include "webrtc/modules/rtp_rtcp/source/byte_io.h"
+
+namespace webrtc {
+namespace rtcp {
+
+// From RFC 3550, RTP: A Transport Protocol for Real-Time Applications.
+//
+// RTCP report block (RFC 3550).
+//
+// 0 1 2 3
+// 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
+// +=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+
+// 0 | SSRC_1 (SSRC of first source) |
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+// 4 | fraction lost | cumulative number of packets lost |
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+// 8 | extended highest sequence number received |
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+// 12 | interarrival jitter |
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+// 16 | last SR (LSR) |
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+// 20 | delay since last SR (DLSR) |
+// 24 +=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+
+ReportBlock::ReportBlock()
+ : source_ssrc_(0),
+ fraction_lost_(0),
+ cumulative_lost_(0),
+ extended_high_seq_num_(0),
+ jitter_(0),
+ last_sr_(0),
+ delay_since_last_sr_(0) {}
+
+bool ReportBlock::Parse(const uint8_t* buffer, size_t length) {
+ RTC_DCHECK(buffer != nullptr);
+ if (length < ReportBlock::kLength) {
+ LOG(LS_ERROR) << "Report Block should be 24 bytes long";
+ return false;
+ }
+
+ source_ssrc_ = ByteReader<uint32_t>::ReadBigEndian(&buffer[0]);
+ fraction_lost_ = buffer[4];
+ cumulative_lost_ = ByteReader<uint32_t, 3>::ReadBigEndian(&buffer[5]);
+ extended_high_seq_num_ = ByteReader<uint32_t>::ReadBigEndian(&buffer[8]);
+ jitter_ = ByteReader<uint32_t>::ReadBigEndian(&buffer[12]);
+ last_sr_ = ByteReader<uint32_t>::ReadBigEndian(&buffer[16]);
+ delay_since_last_sr_ = ByteReader<uint32_t>::ReadBigEndian(&buffer[20]);
+
+ return true;
+}
+
+void ReportBlock::Create(uint8_t* buffer) const {
+ // Runtime check should be done while setting cumulative_lost.
+ RTC_DCHECK_LT(cumulative_lost(), (1u << 24)); // Have only 3 bytes for it.
+
+ ByteWriter<uint32_t>::WriteBigEndian(&buffer[0], source_ssrc());
+ ByteWriter<uint8_t>::WriteBigEndian(&buffer[4], fraction_lost());
+ ByteWriter<uint32_t, 3>::WriteBigEndian(&buffer[5], cumulative_lost());
+ ByteWriter<uint32_t>::WriteBigEndian(&buffer[8], extended_high_seq_num());
+ ByteWriter<uint32_t>::WriteBigEndian(&buffer[12], jitter());
+ ByteWriter<uint32_t>::WriteBigEndian(&buffer[16], last_sr());
+ ByteWriter<uint32_t>::WriteBigEndian(&buffer[20], delay_since_last_sr());
+}
+
+bool ReportBlock::WithCumulativeLost(uint32_t cumulative_lost) {
+ if (cumulative_lost >= (1u << 24)) { // Have only 3 bytes to store it.
+ LOG(LS_WARNING) << "Cumulative lost is too big to fit into Report Block";
+ return false;
+ }
+ cumulative_lost_ = cumulative_lost;
+ return true;
+}
+
+} // namespace rtcp
+} // namespace webrtc
diff --git a/webrtc/modules/rtp_rtcp/source/rtcp_packet/report_block.h b/webrtc/modules/rtp_rtcp/source/rtcp_packet/report_block.h
new file mode 100644
index 0000000000..ef99e17297
--- /dev/null
+++ b/webrtc/modules/rtp_rtcp/source/rtcp_packet/report_block.h
@@ -0,0 +1,67 @@
+/*
+ * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ *
+ */
+
+#ifndef WEBRTC_MODULES_RTP_RTCP_SOURCE_RTCP_PACKET_REPORT_BLOCK_H_
+#define WEBRTC_MODULES_RTP_RTCP_SOURCE_RTCP_PACKET_REPORT_BLOCK_H_
+
+#include "webrtc/base/basictypes.h"
+
+namespace webrtc {
+namespace rtcp {
+
+class ReportBlock {
+ public:
+ static const size_t kLength = 24;
+
+ ReportBlock();
+ ~ReportBlock() {}
+
+ bool Parse(const uint8_t* buffer, size_t length);
+
+ // Fills buffer with the ReportBlock.
+ // Consumes ReportBlock::kLength bytes.
+ void Create(uint8_t* buffer) const;
+
+ void To(uint32_t ssrc) { source_ssrc_ = ssrc; }
+ void WithFractionLost(uint8_t fraction_lost) {
+ fraction_lost_ = fraction_lost;
+ }
+ bool WithCumulativeLost(uint32_t cumulative_lost);
+ void WithExtHighestSeqNum(uint32_t ext_highest_seq_num) {
+ extended_high_seq_num_ = ext_highest_seq_num;
+ }
+ void WithJitter(uint32_t jitter) { jitter_ = jitter; }
+ void WithLastSr(uint32_t last_sr) { last_sr_ = last_sr; }
+ void WithDelayLastSr(uint32_t delay_last_sr) {
+ delay_since_last_sr_ = delay_last_sr;
+ }
+
+ uint32_t source_ssrc() const { return source_ssrc_; }
+ uint8_t fraction_lost() const { return fraction_lost_; }
+ uint32_t cumulative_lost() const { return cumulative_lost_; }
+ uint32_t extended_high_seq_num() const { return extended_high_seq_num_; }
+ uint32_t jitter() const { return jitter_; }
+ uint32_t last_sr() const { return last_sr_; }
+ uint32_t delay_since_last_sr() const { return delay_since_last_sr_; }
+
+ private:
+ uint32_t source_ssrc_;
+ uint8_t fraction_lost_;
+ uint32_t cumulative_lost_;
+ uint32_t extended_high_seq_num_;
+ uint32_t jitter_;
+ uint32_t last_sr_;
+ uint32_t delay_since_last_sr_;
+};
+
+} // namespace rtcp
+} // namespace webrtc
+#endif // WEBRTC_MODULES_RTP_RTCP_SOURCE_RTCP_PACKET_REPORT_BLOCK_H_
diff --git a/webrtc/modules/rtp_rtcp/source/rtcp_packet/report_block_unittest.cc b/webrtc/modules/rtp_rtcp/source/rtcp_packet/report_block_unittest.cc
new file mode 100644
index 0000000000..85bbb404a4
--- /dev/null
+++ b/webrtc/modules/rtp_rtcp/source/rtcp_packet/report_block_unittest.cc
@@ -0,0 +1,86 @@
+/*
+ * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/modules/rtp_rtcp/source/rtcp_packet/report_block.h"
+
+#include <limits>
+
+#include "testing/gtest/include/gtest/gtest.h"
+#include "webrtc/base/random.h"
+
+using webrtc::rtcp::ReportBlock;
+
+namespace webrtc {
+namespace {
+
+const uint32_t kRemoteSsrc = 0x23456789;
+const uint8_t kFractionLost = 55;
+// Use values that are streamed differently LE and BE.
+const uint32_t kCumulativeLost = 0x111213;
+const uint32_t kExtHighestSeqNum = 0x22232425;
+const uint32_t kJitter = 0x33343536;
+const uint32_t kLastSr = 0x44454647;
+const uint32_t kDelayLastSr = 0x55565758;
+const size_t kBufferLength = ReportBlock::kLength;
+
+TEST(RtcpPacketReportBlockTest, ParseChecksLength) {
+ uint8_t buffer[kBufferLength];
+ memset(buffer, 0, sizeof(buffer));
+
+ ReportBlock rb;
+ EXPECT_FALSE(rb.Parse(buffer, kBufferLength - 1));
+ EXPECT_TRUE(rb.Parse(buffer, kBufferLength));
+}
+
+TEST(RtcpPacketReportBlockTest, ParseAnyData) {
+ uint8_t buffer[kBufferLength];
+ // Fill buffer with semi-random data.
+ Random generator(0x256F8A285EC829ull);
+ for (size_t i = 0; i < kBufferLength; ++i)
+ buffer[i] = static_cast<uint8_t>(generator.Rand(0, 0xff));
+
+ ReportBlock rb;
+ EXPECT_TRUE(rb.Parse(buffer, kBufferLength));
+}
+
+TEST(RtcpPacketReportBlockTest, ParseMatchCreate) {
+ ReportBlock rb;
+ rb.To(kRemoteSsrc);
+ rb.WithFractionLost(kFractionLost);
+ rb.WithCumulativeLost(kCumulativeLost);
+ rb.WithExtHighestSeqNum(kExtHighestSeqNum);
+ rb.WithJitter(kJitter);
+ rb.WithLastSr(kLastSr);
+ rb.WithDelayLastSr(kDelayLastSr);
+
+ uint8_t buffer[kBufferLength];
+ rb.Create(buffer);
+
+ ReportBlock parsed;
+ EXPECT_TRUE(parsed.Parse(buffer, kBufferLength));
+
+ EXPECT_EQ(kRemoteSsrc, parsed.source_ssrc());
+ EXPECT_EQ(kFractionLost, parsed.fraction_lost());
+ EXPECT_EQ(kCumulativeLost, parsed.cumulative_lost());
+ EXPECT_EQ(kExtHighestSeqNum, parsed.extended_high_seq_num());
+ EXPECT_EQ(kJitter, parsed.jitter());
+ EXPECT_EQ(kLastSr, parsed.last_sr());
+ EXPECT_EQ(kDelayLastSr, parsed.delay_since_last_sr());
+}
+
+TEST(RtcpPacketReportBlockTest, ValidateCumulativeLost) {
+ const uint32_t kMaxCumulativeLost = 0xffffff;
+ ReportBlock rb;
+ EXPECT_FALSE(rb.WithCumulativeLost(kMaxCumulativeLost + 1));
+ EXPECT_TRUE(rb.WithCumulativeLost(kMaxCumulativeLost));
+}
+
+} // namespace
+} // namespace webrtc
diff --git a/webrtc/modules/rtp_rtcp/source/rtcp_packet/rrtr.cc b/webrtc/modules/rtp_rtcp/source/rtcp_packet/rrtr.cc
new file mode 100644
index 0000000000..db4ae67326
--- /dev/null
+++ b/webrtc/modules/rtp_rtcp/source/rtcp_packet/rrtr.cc
@@ -0,0 +1,49 @@
+/*
+ * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/modules/rtp_rtcp/source/rtcp_packet/rrtr.h"
+
+#include "webrtc/base/checks.h"
+#include "webrtc/modules/rtp_rtcp/source/byte_io.h"
+
+namespace webrtc {
+namespace rtcp {
+// Receiver Reference Time Report Block (RFC 3611).
+//
+// 0 1 2 3
+// 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+// | BT=4 | reserved | block length = 2 |
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+// | NTP timestamp, most significant word |
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+// | NTP timestamp, least significant word |
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+
+void Rrtr::Parse(const uint8_t* buffer) {
+ RTC_DCHECK(buffer[0] == kBlockType);
+ // reserved = buffer[1];
+ RTC_DCHECK(ByteReader<uint16_t>::ReadBigEndian(&buffer[2]) == kBlockLength);
+ uint32_t seconds = ByteReader<uint32_t>::ReadBigEndian(&buffer[4]);
+ uint32_t fraction = ByteReader<uint32_t>::ReadBigEndian(&buffer[8]);
+ ntp_.Set(seconds, fraction);
+}
+
+void Rrtr::Create(uint8_t* buffer) const {
+ const uint8_t kReserved = 0;
+ buffer[0] = kBlockType;
+ buffer[1] = kReserved;
+ ByteWriter<uint16_t>::WriteBigEndian(&buffer[2], kBlockLength);
+ ByteWriter<uint32_t>::WriteBigEndian(&buffer[4], ntp_.seconds());
+ ByteWriter<uint32_t>::WriteBigEndian(&buffer[8], ntp_.fractions());
+}
+
+} // namespace rtcp
+} // namespace webrtc
diff --git a/webrtc/modules/rtp_rtcp/source/rtcp_packet/rrtr.h b/webrtc/modules/rtp_rtcp/source/rtcp_packet/rrtr.h
new file mode 100644
index 0000000000..3354f61df6
--- /dev/null
+++ b/webrtc/modules/rtp_rtcp/source/rtcp_packet/rrtr.h
@@ -0,0 +1,49 @@
+/*
+ * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ *
+ */
+
+#ifndef WEBRTC_MODULES_RTP_RTCP_SOURCE_RTCP_PACKET_RRTR_H_
+#define WEBRTC_MODULES_RTP_RTCP_SOURCE_RTCP_PACKET_RRTR_H_
+
+#include "webrtc/base/basictypes.h"
+#include "webrtc/system_wrappers/include/ntp_time.h"
+
+namespace webrtc {
+namespace rtcp {
+
+class Rrtr {
+ public:
+ static const uint8_t kBlockType = 4;
+ static const uint16_t kBlockLength = 2;
+ static const size_t kLength = 4 * (kBlockLength + 1); // 12
+
+ Rrtr() {}
+ Rrtr(const Rrtr&) = default;
+ ~Rrtr() {}
+
+ Rrtr& operator=(const Rrtr&) = default;
+
+ void Parse(const uint8_t* buffer);
+
+ // Fills buffer with the Rrtr.
+ // Consumes Rrtr::kLength bytes.
+ void Create(uint8_t* buffer) const;
+
+ void WithNtp(const NtpTime& ntp) { ntp_ = ntp; }
+
+ NtpTime ntp() const { return ntp_; }
+
+ private:
+ NtpTime ntp_;
+};
+
+} // namespace rtcp
+} // namespace webrtc
+#endif // WEBRTC_MODULES_RTP_RTCP_SOURCE_RTCP_PACKET_RRTR_H_
diff --git a/webrtc/modules/rtp_rtcp/source/rtcp_packet/rrtr_unittest.cc b/webrtc/modules/rtp_rtcp/source/rtcp_packet/rrtr_unittest.cc
new file mode 100644
index 0000000000..6536e06186
--- /dev/null
+++ b/webrtc/modules/rtp_rtcp/source/rtcp_packet/rrtr_unittest.cc
@@ -0,0 +1,51 @@
+/*
+ * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/modules/rtp_rtcp/source/rtcp_packet/rrtr.h"
+
+#include "testing/gtest/include/gtest/gtest.h"
+
+using webrtc::rtcp::Rrtr;
+
+namespace webrtc {
+namespace {
+
+const uint32_t kNtpSec = 0x12345678;
+const uint32_t kNtpFrac = 0x23456789;
+const uint8_t kBlock[] = {0x04, 0x00, 0x00, 0x02,
+ 0x12, 0x34, 0x56, 0x78,
+ 0x23, 0x45, 0x67, 0x89};
+const size_t kBlockSizeBytes = sizeof(kBlock);
+static_assert(
+ kBlockSizeBytes == Rrtr::kLength,
+ "Size of manually created Rrtr block should match class constant");
+
+TEST(RtcpPacketRrtrTest, Create) {
+ uint8_t buffer[Rrtr::kLength];
+ Rrtr rrtr;
+ rrtr.WithNtp(NtpTime(kNtpSec, kNtpFrac));
+
+ rrtr.Create(buffer);
+ EXPECT_EQ(0, memcmp(buffer, kBlock, kBlockSizeBytes));
+}
+
+TEST(RtcpPacketRrtrTest, Parse) {
+ Rrtr read_rrtr;
+ read_rrtr.Parse(kBlock);
+
+ // Run checks on const object to ensure all accessors have const modifier.
+ const Rrtr& parsed = read_rrtr;
+
+ EXPECT_EQ(kNtpSec, parsed.ntp().seconds());
+ EXPECT_EQ(kNtpFrac, parsed.ntp().fractions());
+}
+
+} // namespace
+} // namespace webrtc
diff --git a/webrtc/modules/rtp_rtcp/source/rtcp_packet/rtpfb.cc b/webrtc/modules/rtp_rtcp/source/rtcp_packet/rtpfb.cc
new file mode 100644
index 0000000000..b5571d45a3
--- /dev/null
+++ b/webrtc/modules/rtp_rtcp/source/rtcp_packet/rtpfb.cc
@@ -0,0 +1,45 @@
+/*
+ * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/modules/rtp_rtcp/source/rtcp_packet/rtpfb.h"
+
+#include "webrtc/modules/rtp_rtcp/source/byte_io.h"
+
+namespace webrtc {
+namespace rtcp {
+
+// RFC 4585, Section 6.1: Feedback format.
+//
+// Common packet format:
+//
+// 0 1 2 3
+// 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+// |V=2|P| FMT | PT | length |
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+// 0 | SSRC of packet sender |
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+// 4 | SSRC of media source |
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+// : Feedback Control Information (FCI) :
+// : :
+
+void Rtpfb::ParseCommonFeedback(const uint8_t* payload) {
+ sender_ssrc_ = ByteReader<uint32_t>::ReadBigEndian(&payload[0]);
+ media_ssrc_ = ByteReader<uint32_t>::ReadBigEndian(&payload[4]);
+}
+
+void Rtpfb::CreateCommonFeedback(uint8_t* payload) const {
+ ByteWriter<uint32_t>::WriteBigEndian(&payload[0], sender_ssrc_);
+ ByteWriter<uint32_t>::WriteBigEndian(&payload[4], media_ssrc_);
+}
+
+} // namespace rtcp
+} // namespace webrtc
diff --git a/webrtc/modules/rtp_rtcp/source/rtcp_packet/rtpfb.h b/webrtc/modules/rtp_rtcp/source/rtcp_packet/rtpfb.h
new file mode 100644
index 0000000000..801aa085c4
--- /dev/null
+++ b/webrtc/modules/rtp_rtcp/source/rtcp_packet/rtpfb.h
@@ -0,0 +1,48 @@
+/*
+ * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ *
+ */
+
+#ifndef WEBRTC_MODULES_RTP_RTCP_SOURCE_RTCP_PACKET_RTPFB_H_
+#define WEBRTC_MODULES_RTP_RTCP_SOURCE_RTCP_PACKET_RTPFB_H_
+
+#include "webrtc/base/basictypes.h"
+#include "webrtc/modules/rtp_rtcp/source/rtcp_packet.h"
+
+namespace webrtc {
+namespace rtcp {
+
+// RTPFB: Transport layer feedback message.
+// RFC4585, Section 6.2
+class Rtpfb : public RtcpPacket {
+ public:
+ static const uint8_t kPacketType = 205;
+
+ Rtpfb() : sender_ssrc_(0), media_ssrc_(0) {}
+ virtual ~Rtpfb() {}
+
+ void From(uint32_t ssrc) { sender_ssrc_ = ssrc; }
+ void To(uint32_t ssrc) { media_ssrc_ = ssrc; }
+
+ uint32_t sender_ssrc() const { return sender_ssrc_; }
+ uint32_t media_ssrc() const { return media_ssrc_; }
+
+ protected:
+ static const size_t kCommonFeedbackLength = 8;
+ void ParseCommonFeedback(const uint8_t* payload);
+ void CreateCommonFeedback(uint8_t* payload) const;
+
+ private:
+ uint32_t sender_ssrc_;
+ uint32_t media_ssrc_;
+};
+
+} // namespace rtcp
+} // namespace webrtc
+#endif // WEBRTC_MODULES_RTP_RTCP_SOURCE_RTCP_PACKET_RTPFB_H_
diff --git a/webrtc/modules/rtp_rtcp/source/rtcp_packet/sli.cc b/webrtc/modules/rtp_rtcp/source/rtcp_packet/sli.cc
new file mode 100644
index 0000000000..829f3a9db9
--- /dev/null
+++ b/webrtc/modules/rtp_rtcp/source/rtcp_packet/sli.cc
@@ -0,0 +1,108 @@
+/*
+ * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/modules/rtp_rtcp/source/rtcp_packet/sli.h"
+
+#include "webrtc/base/checks.h"
+#include "webrtc/base/logging.h"
+#include "webrtc/modules/rtp_rtcp/source/byte_io.h"
+
+using webrtc::RTCPUtility::RtcpCommonHeader;
+
+namespace webrtc {
+namespace rtcp {
+// RFC 4585: Feedback format.
+//
+// Common packet format:
+//
+// 0 1 2 3
+// 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+// |V=2|P| FMT | PT | length |
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+// | SSRC of packet sender |
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+// | SSRC of media source |
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+// : Feedback Control Information (FCI) :
+// : :
+//
+// Slice loss indication (SLI) (RFC 4585).
+// FCI:
+// 0 1 2 3
+// 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+// | First | Number | PictureID |
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+Sli::Macroblocks::Macroblocks(uint8_t picture_id,
+ uint16_t first,
+ uint16_t number) {
+ RTC_DCHECK_LE(first, 0x1fff);
+ RTC_DCHECK_LE(number, 0x1fff);
+ RTC_DCHECK_LE(picture_id, 0x3f);
+ item_ = (first << 19) | (number << 6) | picture_id;
+}
+
+void Sli::Macroblocks::Parse(const uint8_t* buffer) {
+ item_ = ByteReader<uint32_t>::ReadBigEndian(buffer);
+}
+
+void Sli::Macroblocks::Create(uint8_t* buffer) const {
+ ByteWriter<uint32_t>::WriteBigEndian(buffer, item_);
+}
+
+bool Sli::Parse(const RtcpCommonHeader& header, const uint8_t* payload) {
+ RTC_DCHECK(header.packet_type == kPacketType);
+ RTC_DCHECK(header.count_or_format == kFeedbackMessageType);
+
+ if (header.payload_size_bytes <
+ kCommonFeedbackLength + Macroblocks::kLength) {
+ LOG(LS_WARNING) << "Packet is too small to be a valid SLI packet";
+ return false;
+ }
+
+ size_t number_of_items =
+ (header.payload_size_bytes - kCommonFeedbackLength) /
+ Macroblocks::kLength;
+
+ ParseCommonFeedback(payload);
+ items_.resize(number_of_items);
+
+ const uint8_t* next_item = payload + kCommonFeedbackLength;
+ for (Macroblocks& item : items_) {
+ item.Parse(next_item);
+ next_item += Macroblocks::kLength;
+ }
+
+ return true;
+}
+
+bool Sli::Create(uint8_t* packet,
+ size_t* index,
+ size_t max_length,
+ RtcpPacket::PacketReadyCallback* callback) const {
+ RTC_DCHECK(!items_.empty());
+ while (*index + BlockLength() > max_length) {
+ if (!OnBufferFull(packet, index, callback))
+ return false;
+ }
+ CreateHeader(kFeedbackMessageType, kPacketType, HeaderLength(), packet,
+ index);
+ CreateCommonFeedback(packet + *index);
+ *index += kCommonFeedbackLength;
+ for (const Macroblocks& item : items_) {
+ item.Create(packet + *index);
+ *index += Macroblocks::kLength;
+ }
+ return true;
+}
+
+} // namespace rtcp
+} // namespace webrtc
diff --git a/webrtc/modules/rtp_rtcp/source/rtcp_packet/sli.h b/webrtc/modules/rtp_rtcp/source/rtcp_packet/sli.h
new file mode 100644
index 0000000000..5d9e6c93e9
--- /dev/null
+++ b/webrtc/modules/rtp_rtcp/source/rtcp_packet/sli.h
@@ -0,0 +1,81 @@
+/*
+ * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ *
+ */
+
+#ifndef WEBRTC_MODULES_RTP_RTCP_SOURCE_RTCP_PACKET_SLI_H_
+#define WEBRTC_MODULES_RTP_RTCP_SOURCE_RTCP_PACKET_SLI_H_
+
+#include <vector>
+
+#include "webrtc/base/basictypes.h"
+#include "webrtc/modules/rtp_rtcp/source/rtcp_packet/psfb.h"
+#include "webrtc/modules/rtp_rtcp/source/rtcp_utility.h"
+
+namespace webrtc {
+namespace rtcp {
+
+// Slice loss indication (SLI) (RFC 4585).
+class Sli : public Psfb {
+ public:
+ static const uint8_t kFeedbackMessageType = 2;
+ class Macroblocks {
+ public:
+ static const size_t kLength = 4;
+ Macroblocks() : item_(0) {}
+ Macroblocks(uint8_t picture_id, uint16_t first, uint16_t number);
+ ~Macroblocks() {}
+
+ void Parse(const uint8_t* buffer);
+ void Create(uint8_t* buffer) const;
+
+ uint16_t first() const { return item_ >> 19; }
+ uint16_t number() const { return (item_ >> 6) & 0x1fff; }
+ uint8_t picture_id() const { return (item_ & 0x3f); }
+
+ private:
+ uint32_t item_;
+ };
+
+ Sli() {}
+ virtual ~Sli() {}
+
+ // Parse assumes header is already parsed and validated.
+ bool Parse(const RTCPUtility::RtcpCommonHeader& header,
+ const uint8_t* payload); // Size of the payload is in the header.
+
+ void WithPictureId(uint8_t picture_id,
+ uint16_t first_macroblock = 0,
+ uint16_t number_macroblocks = 0x1fff) {
+ items_.push_back(
+ Macroblocks(picture_id, first_macroblock, number_macroblocks));
+ }
+
+ const std::vector<Macroblocks>& macroblocks() const { return items_; }
+
+ protected:
+ bool Create(uint8_t* packet,
+ size_t* index,
+ size_t max_length,
+ RtcpPacket::PacketReadyCallback* callback) const override;
+
+ private:
+ size_t BlockLength() const override {
+ return RtcpPacket::kHeaderLength + Psfb::kCommonFeedbackLength +
+ items_.size() * Macroblocks::kLength;
+ }
+
+ std::vector<Macroblocks> items_;
+
+ RTC_DISALLOW_COPY_AND_ASSIGN(Sli);
+};
+
+} // namespace rtcp
+} // namespace webrtc
+#endif // WEBRTC_MODULES_RTP_RTCP_SOURCE_RTCP_PACKET_SLI_H_
diff --git a/webrtc/modules/rtp_rtcp/source/rtcp_packet/sli_unittest.cc b/webrtc/modules/rtp_rtcp/source/rtcp_packet/sli_unittest.cc
new file mode 100644
index 0000000000..c2be16846b
--- /dev/null
+++ b/webrtc/modules/rtp_rtcp/source/rtcp_packet/sli_unittest.cc
@@ -0,0 +1,91 @@
+/*
+ * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/modules/rtp_rtcp/source/rtcp_packet/sli.h"
+
+#include "testing/gmock/include/gmock/gmock.h"
+#include "testing/gtest/include/gtest/gtest.h"
+
+using testing::ElementsAreArray;
+using testing::make_tuple;
+using webrtc::rtcp::RawPacket;
+using webrtc::rtcp::Sli;
+using webrtc::RTCPUtility::RtcpCommonHeader;
+using webrtc::RTCPUtility::RtcpParseCommonHeader;
+
+namespace webrtc {
+namespace {
+
+const uint32_t kSenderSsrc = 0x12345678;
+const uint32_t kRemoteSsrc = 0x23456789;
+
+const uint8_t kPictureId = 0x3f;
+const uint16_t kFirstMb = 0x1e61;
+const uint16_t kNumberOfMb = 0x1a0a;
+const uint32_t kSliItem = (static_cast<uint32_t>(kFirstMb) << 19) |
+ (static_cast<uint32_t>(kNumberOfMb) << 6) |
+ static_cast<uint32_t>(kPictureId);
+
+// Manually created Sli packet matching constants above.
+const uint8_t kPacket[] = {0x82, 206, 0x00, 0x03,
+ 0x12, 0x34, 0x56, 0x78,
+ 0x23, 0x45, 0x67, 0x89,
+ (kSliItem >> 24) & 0xff,
+ (kSliItem >> 16) & 0xff,
+ (kSliItem >> 8) & 0xff,
+ kSliItem & 0xff};
+const size_t kPacketLength = sizeof(kPacket);
+
+bool ParseSli(const uint8_t* buffer, size_t length, Sli* sli) {
+ RtcpCommonHeader header;
+ EXPECT_TRUE(RtcpParseCommonHeader(buffer, length, &header));
+ EXPECT_EQ(length, header.BlockSize());
+ return sli->Parse(header, buffer + RtcpCommonHeader::kHeaderSizeBytes);
+}
+
+TEST(RtcpPacketSliTest, Create) {
+ Sli sli;
+ sli.From(kSenderSsrc);
+ sli.To(kRemoteSsrc);
+ sli.WithPictureId(kPictureId, kFirstMb, kNumberOfMb);
+
+ rtc::scoped_ptr<RawPacket> packet(sli.Build());
+
+ EXPECT_THAT(make_tuple(packet->Buffer(), packet->Length()),
+ ElementsAreArray(kPacket));
+}
+
+TEST(RtcpPacketSliTest, Parse) {
+ Sli mutable_parsed;
+ EXPECT_TRUE(ParseSli(kPacket, kPacketLength, &mutable_parsed));
+ const Sli& parsed = mutable_parsed; // Read values from constant object.
+
+ EXPECT_EQ(kSenderSsrc, parsed.sender_ssrc());
+ EXPECT_EQ(kRemoteSsrc, parsed.media_ssrc());
+ EXPECT_EQ(1u, parsed.macroblocks().size());
+ EXPECT_EQ(kFirstMb, parsed.macroblocks()[0].first());
+ EXPECT_EQ(kNumberOfMb, parsed.macroblocks()[0].number());
+ EXPECT_EQ(kPictureId, parsed.macroblocks()[0].picture_id());
+}
+
+TEST(RtcpPacketSliTest, ParseFailsOnTooSmallPacket) {
+ Sli sli;
+ sli.From(kSenderSsrc);
+ sli.To(kRemoteSsrc);
+ sli.WithPictureId(kPictureId, kFirstMb, kNumberOfMb);
+
+ rtc::scoped_ptr<RawPacket> packet(sli.Build());
+ packet->MutableBuffer()[3]--; // Decrease size by 1 word (4 bytes).
+
+ EXPECT_FALSE(ParseSli(packet->Buffer(), packet->Length() - 4, &sli));
+}
+
+} // namespace
+} // namespace webrtc
diff --git a/webrtc/modules/rtp_rtcp/source/rtcp_packet/tmmbn.cc b/webrtc/modules/rtp_rtcp/source/rtcp_packet/tmmbn.cc
new file mode 100644
index 0000000000..fd0219cf82
--- /dev/null
+++ b/webrtc/modules/rtp_rtcp/source/rtcp_packet/tmmbn.cc
@@ -0,0 +1,119 @@
+/*
+ * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/modules/rtp_rtcp/source/rtcp_packet/tmmbn.h"
+
+#include "webrtc/base/logging.h"
+#include "webrtc/modules/rtp_rtcp/source/byte_io.h"
+
+using webrtc::RTCPUtility::PT_RTPFB;
+using webrtc::RTCPUtility::RTCPPacketRTPFBTMMBN;
+using webrtc::RTCPUtility::RTCPPacketRTPFBTMMBRItem;
+
+namespace webrtc {
+namespace rtcp {
+namespace {
+const uint32_t kUnusedMediaSourceSsrc0 = 0;
+void AssignUWord8(uint8_t* buffer, size_t* offset, uint8_t value) {
+ buffer[(*offset)++] = value;
+}
+void AssignUWord32(uint8_t* buffer, size_t* offset, uint32_t value) {
+ ByteWriter<uint32_t>::WriteBigEndian(buffer + *offset, value);
+ *offset += 4;
+}
+
+void ComputeMantissaAnd6bitBase2Exponent(uint32_t input_base10,
+ uint8_t bits_mantissa,
+ uint32_t* mantissa,
+ uint8_t* exp) {
+ // input_base10 = mantissa * 2^exp
+ assert(bits_mantissa <= 32);
+ uint32_t mantissa_max = (1 << bits_mantissa) - 1;
+ uint8_t exponent = 0;
+ for (uint32_t i = 0; i < 64; ++i) {
+ if (input_base10 <= (mantissa_max << i)) {
+ exponent = i;
+ break;
+ }
+ }
+ *exp = exponent;
+ *mantissa = (input_base10 >> exponent);
+}
+
+void CreateTmmbrItem(const RTCPPacketRTPFBTMMBRItem& tmmbr_item,
+ uint8_t* buffer,
+ size_t* pos) {
+ uint32_t bitrate_bps = tmmbr_item.MaxTotalMediaBitRate * 1000;
+ uint32_t mantissa = 0;
+ uint8_t exp = 0;
+ ComputeMantissaAnd6bitBase2Exponent(bitrate_bps, 17, &mantissa, &exp);
+
+ AssignUWord32(buffer, pos, tmmbr_item.SSRC);
+ AssignUWord8(buffer, pos, (exp << 2) + ((mantissa >> 15) & 0x03));
+ AssignUWord8(buffer, pos, mantissa >> 7);
+ AssignUWord8(buffer, pos, (mantissa << 1) +
+ ((tmmbr_item.MeasuredOverhead >> 8) & 0x01));
+ AssignUWord8(buffer, pos, tmmbr_item.MeasuredOverhead);
+}
+
+// Temporary Maximum Media Stream Bit Rate Notification (TMMBN) (RFC 5104).
+//
+// FCI:
+//
+// 0 1 2 3
+// 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+// | SSRC |
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+// | MxTBR Exp | MxTBR Mantissa |Measured Overhead|
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+
+void CreateTmmbn(const RTCPPacketRTPFBTMMBN& tmmbn,
+ const std::vector<RTCPPacketRTPFBTMMBRItem>& tmmbn_items,
+ uint8_t* buffer,
+ size_t* pos) {
+ AssignUWord32(buffer, pos, tmmbn.SenderSSRC);
+ AssignUWord32(buffer, pos, kUnusedMediaSourceSsrc0);
+ for (uint8_t i = 0; i < tmmbn_items.size(); ++i) {
+ CreateTmmbrItem(tmmbn_items[i], buffer, pos);
+ }
+}
+} // namespace
+
+bool Tmmbn::WithTmmbr(uint32_t ssrc, uint32_t bitrate_kbps, uint16_t overhead) {
+ assert(overhead <= 0x1ff);
+ if (tmmbn_items_.size() >= kMaxNumberOfTmmbrs) {
+ LOG(LS_WARNING) << "Max TMMBN size reached.";
+ return false;
+ }
+ RTCPPacketRTPFBTMMBRItem tmmbn_item;
+ tmmbn_item.SSRC = ssrc;
+ tmmbn_item.MaxTotalMediaBitRate = bitrate_kbps;
+ tmmbn_item.MeasuredOverhead = overhead;
+ tmmbn_items_.push_back(tmmbn_item);
+ return true;
+}
+
+bool Tmmbn::Create(uint8_t* packet,
+ size_t* index,
+ size_t max_length,
+ RtcpPacket::PacketReadyCallback* callback) const {
+ while (*index + BlockLength() > max_length) {
+ if (!OnBufferFull(packet, index, callback))
+ return false;
+ }
+ const uint8_t kFmt = 4;
+ CreateHeader(kFmt, PT_RTPFB, HeaderLength(), packet, index);
+ CreateTmmbn(tmmbn_, tmmbn_items_, packet, index);
+ return true;
+}
+
+} // namespace rtcp
+} // namespace webrtc
diff --git a/webrtc/modules/rtp_rtcp/source/rtcp_packet/tmmbn.h b/webrtc/modules/rtp_rtcp/source/rtcp_packet/tmmbn.h
new file mode 100644
index 0000000000..82bf9dd9e9
--- /dev/null
+++ b/webrtc/modules/rtp_rtcp/source/rtcp_packet/tmmbn.h
@@ -0,0 +1,60 @@
+/*
+ * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ *
+ */
+
+#ifndef WEBRTC_MODULES_RTP_RTCP_SOURCE_RTCP_PACKET_TMMBN_H_
+#define WEBRTC_MODULES_RTP_RTCP_SOURCE_RTCP_PACKET_TMMBN_H_
+
+#include <vector>
+#include "webrtc/base/basictypes.h"
+#include "webrtc/modules/rtp_rtcp/source/rtcp_packet.h"
+#include "webrtc/modules/rtp_rtcp/source/rtcp_utility.h"
+
+namespace webrtc {
+namespace rtcp {
+
+// Temporary Maximum Media Stream Bit Rate Notification (TMMBN) (RFC 5104).
+class Tmmbn : public RtcpPacket {
+ public:
+ Tmmbn() : RtcpPacket() {
+ memset(&tmmbn_, 0, sizeof(tmmbn_));
+ }
+
+ virtual ~Tmmbn() {}
+
+ void From(uint32_t ssrc) {
+ tmmbn_.SenderSSRC = ssrc;
+ }
+ // Max 50 TMMBR can be added per TMMBN.
+ bool WithTmmbr(uint32_t ssrc, uint32_t bitrate_kbps, uint16_t overhead);
+
+ protected:
+ bool Create(uint8_t* packet,
+ size_t* index,
+ size_t max_length,
+ RtcpPacket::PacketReadyCallback* callback) const override;
+
+ private:
+ static const int kMaxNumberOfTmmbrs = 50;
+
+ size_t BlockLength() const {
+ const size_t kFciLen = 8;
+ return kCommonFbFmtLength + kFciLen * tmmbn_items_.size();
+ }
+
+ RTCPUtility::RTCPPacketRTPFBTMMBN tmmbn_;
+ std::vector<RTCPUtility::RTCPPacketRTPFBTMMBRItem> tmmbn_items_;
+
+ RTC_DISALLOW_COPY_AND_ASSIGN(Tmmbn);
+};
+
+} // namespace rtcp
+} // namespace webrtc
+#endif // WEBRTC_MODULES_RTP_RTCP_SOURCE_RTCP_PACKET_TMMBN_H_
diff --git a/webrtc/modules/rtp_rtcp/source/rtcp_packet/tmmbn_unittest.cc b/webrtc/modules/rtp_rtcp/source/rtcp_packet/tmmbn_unittest.cc
new file mode 100644
index 0000000000..32d64a97b4
--- /dev/null
+++ b/webrtc/modules/rtp_rtcp/source/rtcp_packet/tmmbn_unittest.cc
@@ -0,0 +1,84 @@
+/*
+ * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/modules/rtp_rtcp/source/rtcp_packet/tmmbn.h"
+
+#include "testing/gtest/include/gtest/gtest.h"
+
+#include "webrtc/modules/rtp_rtcp/source/byte_io.h"
+#include "webrtc/test/rtcp_packet_parser.h"
+
+using webrtc::rtcp::RawPacket;
+using webrtc::rtcp::Tmmbn;
+using webrtc::test::RtcpPacketParser;
+
+namespace webrtc {
+const uint32_t kSenderSsrc = 0x12345678;
+const uint32_t kRemoteSsrc = 0x23456789;
+
+TEST(RtcpPacketTest, TmmbnWithNoItem) {
+ Tmmbn tmmbn;
+ tmmbn.From(kSenderSsrc);
+
+ rtc::scoped_ptr<RawPacket> packet(tmmbn.Build());
+ RtcpPacketParser parser;
+ parser.Parse(packet->Buffer(), packet->Length());
+ EXPECT_EQ(1, parser.tmmbn()->num_packets());
+ EXPECT_EQ(kSenderSsrc, parser.tmmbn()->Ssrc());
+ EXPECT_EQ(0, parser.tmmbn_items()->num_packets());
+}
+
+TEST(RtcpPacketTest, TmmbnWithOneItem) {
+ Tmmbn tmmbn;
+ tmmbn.From(kSenderSsrc);
+ EXPECT_TRUE(tmmbn.WithTmmbr(kRemoteSsrc, 312, 60));
+
+ rtc::scoped_ptr<RawPacket> packet(tmmbn.Build());
+ RtcpPacketParser parser;
+ parser.Parse(packet->Buffer(), packet->Length());
+ EXPECT_EQ(1, parser.tmmbn()->num_packets());
+ EXPECT_EQ(kSenderSsrc, parser.tmmbn()->Ssrc());
+ EXPECT_EQ(1, parser.tmmbn_items()->num_packets());
+ EXPECT_EQ(kRemoteSsrc, parser.tmmbn_items()->Ssrc(0));
+ EXPECT_EQ(312U, parser.tmmbn_items()->BitrateKbps(0));
+ EXPECT_EQ(60U, parser.tmmbn_items()->Overhead(0));
+}
+
+TEST(RtcpPacketTest, TmmbnWithTwoItems) {
+ Tmmbn tmmbn;
+ tmmbn.From(kSenderSsrc);
+ EXPECT_TRUE(tmmbn.WithTmmbr(kRemoteSsrc, 312, 60));
+ EXPECT_TRUE(tmmbn.WithTmmbr(kRemoteSsrc + 1, 1288, 40));
+
+ rtc::scoped_ptr<RawPacket> packet(tmmbn.Build());
+ RtcpPacketParser parser;
+ parser.Parse(packet->Buffer(), packet->Length());
+ EXPECT_EQ(1, parser.tmmbn()->num_packets());
+ EXPECT_EQ(kSenderSsrc, parser.tmmbn()->Ssrc());
+ EXPECT_EQ(2, parser.tmmbn_items()->num_packets());
+ EXPECT_EQ(kRemoteSsrc, parser.tmmbn_items()->Ssrc(0));
+ EXPECT_EQ(312U, parser.tmmbn_items()->BitrateKbps(0));
+ EXPECT_EQ(60U, parser.tmmbn_items()->Overhead(0));
+ EXPECT_EQ(kRemoteSsrc + 1, parser.tmmbn_items()->Ssrc(1));
+ EXPECT_EQ(1288U, parser.tmmbn_items()->BitrateKbps(1));
+ EXPECT_EQ(40U, parser.tmmbn_items()->Overhead(1));
+}
+
+TEST(RtcpPacketTest, TmmbnWithTooManyItems) {
+ Tmmbn tmmbn;
+ tmmbn.From(kSenderSsrc);
+ const int kMaxTmmbrItems = 50;
+ for (int i = 0; i < kMaxTmmbrItems; ++i)
+ EXPECT_TRUE(tmmbn.WithTmmbr(kRemoteSsrc + i, 312, 60));
+
+ EXPECT_FALSE(tmmbn.WithTmmbr(kRemoteSsrc + kMaxTmmbrItems, 312, 60));
+}
+
+} // namespace webrtc
diff --git a/webrtc/modules/rtp_rtcp/source/rtcp_packet/tmmbr.cc b/webrtc/modules/rtp_rtcp/source/rtcp_packet/tmmbr.cc
new file mode 100644
index 0000000000..4df167de79
--- /dev/null
+++ b/webrtc/modules/rtp_rtcp/source/rtcp_packet/tmmbr.cc
@@ -0,0 +1,105 @@
+/*
+ * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/modules/rtp_rtcp/source/rtcp_packet/tmmbr.h"
+
+#include "webrtc/base/logging.h"
+#include "webrtc/modules/rtp_rtcp/source/byte_io.h"
+
+using webrtc::RTCPUtility::PT_RTPFB;
+using webrtc::RTCPUtility::RTCPPacketRTPFBTMMBR;
+using webrtc::RTCPUtility::RTCPPacketRTPFBTMMBRItem;
+
+namespace webrtc {
+namespace rtcp {
+namespace {
+const uint32_t kUnusedMediaSourceSsrc0 = 0;
+
+void AssignUWord8(uint8_t* buffer, size_t* offset, uint8_t value) {
+ buffer[(*offset)++] = value;
+}
+
+void AssignUWord32(uint8_t* buffer, size_t* offset, uint32_t value) {
+ ByteWriter<uint32_t>::WriteBigEndian(buffer + *offset, value);
+ *offset += 4;
+}
+
+void ComputeMantissaAnd6bitBase2Exponent(uint32_t input_base10,
+ uint8_t bits_mantissa,
+ uint32_t* mantissa,
+ uint8_t* exp) {
+ // input_base10 = mantissa * 2^exp
+ assert(bits_mantissa <= 32);
+ uint32_t mantissa_max = (1 << bits_mantissa) - 1;
+ uint8_t exponent = 0;
+ for (uint32_t i = 0; i < 64; ++i) {
+ if (input_base10 <= (mantissa_max << i)) {
+ exponent = i;
+ break;
+ }
+ }
+ *exp = exponent;
+ *mantissa = (input_base10 >> exponent);
+}
+
+void CreateTmmbrItem(const RTCPPacketRTPFBTMMBRItem& tmmbr_item,
+ uint8_t* buffer,
+ size_t* pos) {
+ uint32_t bitrate_bps = tmmbr_item.MaxTotalMediaBitRate * 1000;
+ uint32_t mantissa = 0;
+ uint8_t exp = 0;
+ ComputeMantissaAnd6bitBase2Exponent(bitrate_bps, 17, &mantissa, &exp);
+
+ AssignUWord32(buffer, pos, tmmbr_item.SSRC);
+ AssignUWord8(buffer, pos, (exp << 2) + ((mantissa >> 15) & 0x03));
+ AssignUWord8(buffer, pos, mantissa >> 7);
+ AssignUWord8(buffer, pos, (mantissa << 1) +
+ ((tmmbr_item.MeasuredOverhead >> 8) & 0x01));
+ AssignUWord8(buffer, pos, tmmbr_item.MeasuredOverhead);
+}
+
+// Temporary Maximum Media Stream Bit Rate Request (TMMBR) (RFC 5104).
+//
+// FCI:
+//
+// 0 1 2 3
+// 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+// | SSRC |
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+// | MxTBR Exp | MxTBR Mantissa |Measured Overhead|
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+
+void CreateTmmbr(const RTCPPacketRTPFBTMMBR& tmmbr,
+ const RTCPPacketRTPFBTMMBRItem& tmmbr_item,
+ uint8_t* buffer,
+ size_t* pos) {
+ AssignUWord32(buffer, pos, tmmbr.SenderSSRC);
+ AssignUWord32(buffer, pos, kUnusedMediaSourceSsrc0);
+ CreateTmmbrItem(tmmbr_item, buffer, pos);
+}
+} // namespace
+
+bool Tmmbr::Create(uint8_t* packet,
+ size_t* index,
+ size_t max_length,
+ RtcpPacket::PacketReadyCallback* callback) const {
+ while (*index + BlockLength() > max_length) {
+ if (!OnBufferFull(packet, index, callback))
+ return false;
+ }
+ const uint8_t kFmt = 3;
+ CreateHeader(kFmt, PT_RTPFB, HeaderLength(), packet, index);
+ CreateTmmbr(tmmbr_, tmmbr_item_, packet, index);
+ return true;
+}
+
+} // namespace rtcp
+} // namespace webrtc
diff --git a/webrtc/modules/rtp_rtcp/source/rtcp_packet/tmmbr.h b/webrtc/modules/rtp_rtcp/source/rtcp_packet/tmmbr.h
new file mode 100644
index 0000000000..84a4180ad3
--- /dev/null
+++ b/webrtc/modules/rtp_rtcp/source/rtcp_packet/tmmbr.h
@@ -0,0 +1,64 @@
+/*
+ * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ *
+ */
+
+#ifndef WEBRTC_MODULES_RTP_RTCP_SOURCE_RTCP_PACKET_TMMBR_H_
+#define WEBRTC_MODULES_RTP_RTCP_SOURCE_RTCP_PACKET_TMMBR_H_
+
+#include "webrtc/base/basictypes.h"
+#include "webrtc/modules/rtp_rtcp/source/rtcp_packet.h"
+#include "webrtc/modules/rtp_rtcp/source/rtcp_utility.h"
+
+namespace webrtc {
+namespace rtcp {
+// Temporary Maximum Media Stream Bit Rate Request (TMMBR) (RFC 5104).
+class Tmmbr : public RtcpPacket {
+ public:
+ Tmmbr() : RtcpPacket() {
+ memset(&tmmbr_, 0, sizeof(tmmbr_));
+ memset(&tmmbr_item_, 0, sizeof(tmmbr_item_));
+ }
+
+ virtual ~Tmmbr() {}
+
+ void From(uint32_t ssrc) {
+ tmmbr_.SenderSSRC = ssrc;
+ }
+ void To(uint32_t ssrc) {
+ tmmbr_item_.SSRC = ssrc;
+ }
+ void WithBitrateKbps(uint32_t bitrate_kbps) {
+ tmmbr_item_.MaxTotalMediaBitRate = bitrate_kbps;
+ }
+ void WithOverhead(uint16_t overhead) {
+ assert(overhead <= 0x1ff);
+ tmmbr_item_.MeasuredOverhead = overhead;
+ }
+
+ protected:
+ bool Create(uint8_t* packet,
+ size_t* index,
+ size_t max_length,
+ RtcpPacket::PacketReadyCallback* callback) const override;
+
+ private:
+ size_t BlockLength() const {
+ const size_t kFciLen = 8;
+ return kCommonFbFmtLength + kFciLen;
+ }
+
+ RTCPUtility::RTCPPacketRTPFBTMMBR tmmbr_;
+ RTCPUtility::RTCPPacketRTPFBTMMBRItem tmmbr_item_;
+
+ RTC_DISALLOW_COPY_AND_ASSIGN(Tmmbr);
+};
+} // namespace rtcp
+} // namespace webrtc
+#endif // WEBRTC_MODULES_RTP_RTCP_SOURCE_RTCP_PACKET_TMMBR_H_
diff --git a/webrtc/modules/rtp_rtcp/source/rtcp_packet/tmmbr_unittest.cc b/webrtc/modules/rtp_rtcp/source/rtcp_packet/tmmbr_unittest.cc
new file mode 100644
index 0000000000..6d71caa251
--- /dev/null
+++ b/webrtc/modules/rtp_rtcp/source/rtcp_packet/tmmbr_unittest.cc
@@ -0,0 +1,43 @@
+/*
+ * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/modules/rtp_rtcp/source/rtcp_packet/tmmbr.h"
+
+#include "testing/gtest/include/gtest/gtest.h"
+
+#include "webrtc/modules/rtp_rtcp/source/byte_io.h"
+#include "webrtc/test/rtcp_packet_parser.h"
+
+using webrtc::rtcp::RawPacket;
+using webrtc::rtcp::Tmmbr;
+using webrtc::test::RtcpPacketParser;
+
+namespace webrtc {
+const uint32_t kSenderSsrc = 0x12345678;
+const uint32_t kRemoteSsrc = 0x23456789;
+
+TEST(RtcpPacketTest, Tmmbr) {
+ Tmmbr tmmbr;
+ tmmbr.From(kSenderSsrc);
+ tmmbr.To(kRemoteSsrc);
+ tmmbr.WithBitrateKbps(312);
+ tmmbr.WithOverhead(60);
+
+ rtc::scoped_ptr<RawPacket> packet(tmmbr.Build());
+ RtcpPacketParser parser;
+ parser.Parse(packet->Buffer(), packet->Length());
+ EXPECT_EQ(1, parser.tmmbr()->num_packets());
+ EXPECT_EQ(kSenderSsrc, parser.tmmbr()->Ssrc());
+ EXPECT_EQ(1, parser.tmmbr_item()->num_packets());
+ EXPECT_EQ(312U, parser.tmmbr_item()->BitrateKbps());
+ EXPECT_EQ(60U, parser.tmmbr_item()->Overhead());
+}
+
+} // namespace webrtc
diff --git a/webrtc/modules/rtp_rtcp/source/rtcp_packet/transport_feedback.h b/webrtc/modules/rtp_rtcp/source/rtcp_packet/transport_feedback.h
index 4cc1f38479..ad6fd166f2 100644
--- a/webrtc/modules/rtp_rtcp/source/rtcp_packet/transport_feedback.h
+++ b/webrtc/modules/rtp_rtcp/source/rtcp_packet/transport_feedback.h
@@ -15,7 +15,7 @@
#include <vector>
#include "webrtc/base/constructormagic.h"
-#include "webrtc/modules/interface/module_common_types.h"
+#include "webrtc/modules/include/module_common_types.h"
#include "webrtc/modules/rtp_rtcp/source/rtcp_packet.h"
namespace webrtc {
diff --git a/webrtc/modules/rtp_rtcp/source/rtcp_packet/voip_metric.cc b/webrtc/modules/rtp_rtcp/source/rtcp_packet/voip_metric.cc
new file mode 100644
index 0000000000..a79d48e1ca
--- /dev/null
+++ b/webrtc/modules/rtp_rtcp/source/rtcp_packet/voip_metric.cc
@@ -0,0 +1,107 @@
+/*
+ * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/modules/rtp_rtcp/source/rtcp_packet/voip_metric.h"
+
+#include "webrtc/base/checks.h"
+#include "webrtc/modules/rtp_rtcp/source/byte_io.h"
+
+namespace webrtc {
+namespace rtcp {
+// VoIP Metrics Report Block (RFC 3611).
+//
+// 0 1 2 3
+// 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+// 0 | BT=7 | reserved | block length = 8 |
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+// 4 | SSRC of source |
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+// 8 | loss rate | discard rate | burst density | gap density |
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+// 12 | burst duration | gap duration |
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+// 16 | round trip delay | end system delay |
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+// 20 | signal level | noise level | RERL | Gmin |
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+// 24 | R factor | ext. R factor | MOS-LQ | MOS-CQ |
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+// 28 | RX config | reserved | JB nominal |
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+// 32 | JB maximum | JB abs max |
+// 36 +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+VoipMetric::VoipMetric() : ssrc_(0) {
+ memset(&voip_metric_, 0, sizeof(voip_metric_));
+}
+
+void VoipMetric::Parse(const uint8_t* buffer) {
+ RTC_DCHECK(buffer[0] == kBlockType);
+ // reserved = buffer[1];
+ RTC_DCHECK(ByteReader<uint16_t>::ReadBigEndian(&buffer[2]) == kBlockLength);
+ ssrc_ = ByteReader<uint32_t>::ReadBigEndian(&buffer[4]);
+ voip_metric_.lossRate = buffer[8];
+ voip_metric_.discardRate = buffer[9];
+ voip_metric_.burstDensity = buffer[10];
+ voip_metric_.gapDensity = buffer[11];
+ voip_metric_.burstDuration = ByteReader<uint16_t>::ReadBigEndian(&buffer[12]);
+ voip_metric_.gapDuration = ByteReader<uint16_t>::ReadBigEndian(&buffer[14]);
+ voip_metric_.roundTripDelay =
+ ByteReader<uint16_t>::ReadBigEndian(&buffer[16]);
+ voip_metric_.endSystemDelay =
+ ByteReader<uint16_t>::ReadBigEndian(&buffer[18]);
+ voip_metric_.signalLevel = buffer[20];
+ voip_metric_.noiseLevel = buffer[21];
+ voip_metric_.RERL = buffer[22];
+ voip_metric_.Gmin = buffer[23];
+ voip_metric_.Rfactor = buffer[24];
+ voip_metric_.extRfactor = buffer[25];
+ voip_metric_.MOSLQ = buffer[26];
+ voip_metric_.MOSCQ = buffer[27];
+ voip_metric_.RXconfig = buffer[28];
+ // reserved = buffer[29];
+ voip_metric_.JBnominal = ByteReader<uint16_t>::ReadBigEndian(&buffer[30]);
+ voip_metric_.JBmax = ByteReader<uint16_t>::ReadBigEndian(&buffer[32]);
+ voip_metric_.JBabsMax = ByteReader<uint16_t>::ReadBigEndian(&buffer[34]);
+}
+
+void VoipMetric::Create(uint8_t* buffer) const {
+ const uint8_t kReserved = 0;
+ buffer[0] = kBlockType;
+ buffer[1] = kReserved;
+ ByteWriter<uint16_t>::WriteBigEndian(&buffer[2], kBlockLength);
+ ByteWriter<uint32_t>::WriteBigEndian(&buffer[4], ssrc_);
+ buffer[8] = voip_metric_.lossRate;
+ buffer[9] = voip_metric_.discardRate;
+ buffer[10] = voip_metric_.burstDensity;
+ buffer[11] = voip_metric_.gapDensity;
+ ByteWriter<uint16_t>::WriteBigEndian(&buffer[12], voip_metric_.burstDuration);
+ ByteWriter<uint16_t>::WriteBigEndian(&buffer[14], voip_metric_.gapDuration);
+ ByteWriter<uint16_t>::WriteBigEndian(&buffer[16],
+ voip_metric_.roundTripDelay);
+ ByteWriter<uint16_t>::WriteBigEndian(&buffer[18],
+ voip_metric_.endSystemDelay);
+ buffer[20] = voip_metric_.signalLevel;
+ buffer[21] = voip_metric_.noiseLevel;
+ buffer[22] = voip_metric_.RERL;
+ buffer[23] = voip_metric_.Gmin;
+ buffer[24] = voip_metric_.Rfactor;
+ buffer[25] = voip_metric_.extRfactor;
+ buffer[26] = voip_metric_.MOSLQ;
+ buffer[27] = voip_metric_.MOSCQ;
+ buffer[28] = voip_metric_.RXconfig;
+ buffer[29] = kReserved;
+ ByteWriter<uint16_t>::WriteBigEndian(&buffer[30], voip_metric_.JBnominal);
+ ByteWriter<uint16_t>::WriteBigEndian(&buffer[32], voip_metric_.JBmax);
+ ByteWriter<uint16_t>::WriteBigEndian(&buffer[34], voip_metric_.JBabsMax);
+}
+
+} // namespace rtcp
+} // namespace webrtc
diff --git a/webrtc/modules/rtp_rtcp/source/rtcp_packet/voip_metric.h b/webrtc/modules/rtp_rtcp/source/rtcp_packet/voip_metric.h
new file mode 100644
index 0000000000..9e3e41995a
--- /dev/null
+++ b/webrtc/modules/rtp_rtcp/source/rtcp_packet/voip_metric.h
@@ -0,0 +1,53 @@
+/*
+ * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ *
+ */
+
+#ifndef WEBRTC_MODULES_RTP_RTCP_SOURCE_RTCP_PACKET_VOIP_METRIC_H_
+#define WEBRTC_MODULES_RTP_RTCP_SOURCE_RTCP_PACKET_VOIP_METRIC_H_
+
+#include "webrtc/base/basictypes.h"
+#include "webrtc/modules/include/module_common_types.h"
+
+namespace webrtc {
+namespace rtcp {
+
+class VoipMetric {
+ public:
+ static const uint8_t kBlockType = 7;
+ static const uint16_t kBlockLength = 8;
+ static const size_t kLength = 4 * (kBlockLength + 1); // 36
+ VoipMetric();
+ VoipMetric(const VoipMetric&) = default;
+ ~VoipMetric() {}
+
+ VoipMetric& operator=(const VoipMetric&) = default;
+
+ void Parse(const uint8_t* buffer);
+
+ // Fills buffer with the VoipMetric.
+ // Consumes VoipMetric::kLength bytes.
+ void Create(uint8_t* buffer) const;
+
+ void To(uint32_t ssrc) { ssrc_ = ssrc; }
+ void WithVoipMetric(const RTCPVoIPMetric& voip_metric) {
+ voip_metric_ = voip_metric;
+ }
+
+ uint32_t ssrc() const { return ssrc_; }
+ const RTCPVoIPMetric& voip_metric() const { return voip_metric_; }
+
+ private:
+ uint32_t ssrc_;
+ RTCPVoIPMetric voip_metric_;
+};
+
+} // namespace rtcp
+} // namespace webrtc
+#endif // WEBRTC_MODULES_RTP_RTCP_SOURCE_RTCP_PACKET_VOIP_METRIC_H_
diff --git a/webrtc/modules/rtp_rtcp/source/rtcp_packet/voip_metric_unittest.cc b/webrtc/modules/rtp_rtcp/source/rtcp_packet/voip_metric_unittest.cc
new file mode 100644
index 0000000000..44c82d67a9
--- /dev/null
+++ b/webrtc/modules/rtp_rtcp/source/rtcp_packet/voip_metric_unittest.cc
@@ -0,0 +1,93 @@
+/*
+ * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/modules/rtp_rtcp/source/rtcp_packet/voip_metric.h"
+
+#include "testing/gtest/include/gtest/gtest.h"
+
+namespace webrtc {
+namespace rtcp {
+namespace {
+
+const uint32_t kRemoteSsrc = 0x23456789;
+const uint8_t kBlock[] = {0x07, 0x00, 0x00, 0x08, 0x23, 0x45, 0x67, 0x89,
+ 0x01, 0x02, 0x03, 0x04, 0x11, 0x12, 0x22, 0x23,
+ 0x33, 0x34, 0x44, 0x45, 0x05, 0x06, 0x07, 0x08,
+ 0x09, 0x0a, 0x0b, 0x0c, 0x0d, 0x00, 0x55, 0x56,
+ 0x66, 0x67, 0x77, 0x78};
+const size_t kBlockSizeBytes = sizeof(kBlock);
+static_assert(
+ kBlockSizeBytes == VoipMetric::kLength,
+ "Size of manually created Voip Metric block should match class constant");
+
+TEST(RtcpPacketVoipMetricTest, Create) {
+ uint8_t buffer[VoipMetric::kLength];
+ RTCPVoIPMetric metric;
+ metric.lossRate = 1;
+ metric.discardRate = 2;
+ metric.burstDensity = 3;
+ metric.gapDensity = 4;
+ metric.burstDuration = 0x1112;
+ metric.gapDuration = 0x2223;
+ metric.roundTripDelay = 0x3334;
+ metric.endSystemDelay = 0x4445;
+ metric.signalLevel = 5;
+ metric.noiseLevel = 6;
+ metric.RERL = 7;
+ metric.Gmin = 8;
+ metric.Rfactor = 9;
+ metric.extRfactor = 10;
+ metric.MOSLQ = 11;
+ metric.MOSCQ = 12;
+ metric.RXconfig = 13;
+ metric.JBnominal = 0x5556;
+ metric.JBmax = 0x6667;
+ metric.JBabsMax = 0x7778;
+ VoipMetric metric_block;
+ metric_block.To(kRemoteSsrc);
+ metric_block.WithVoipMetric(metric);
+
+ metric_block.Create(buffer);
+ EXPECT_EQ(0, memcmp(buffer, kBlock, kBlockSizeBytes));
+}
+
+TEST(RtcpPacketVoipMetricTest, Parse) {
+ VoipMetric read_metric;
+ read_metric.Parse(kBlock);
+
+ // Run checks on const object to ensure all accessors have const modifier.
+ const VoipMetric& parsed = read_metric;
+
+ EXPECT_EQ(kRemoteSsrc, parsed.ssrc());
+ EXPECT_EQ(1, parsed.voip_metric().lossRate);
+ EXPECT_EQ(2, parsed.voip_metric().discardRate);
+ EXPECT_EQ(3, parsed.voip_metric().burstDensity);
+ EXPECT_EQ(4, parsed.voip_metric().gapDensity);
+ EXPECT_EQ(0x1112, parsed.voip_metric().burstDuration);
+ EXPECT_EQ(0x2223, parsed.voip_metric().gapDuration);
+ EXPECT_EQ(0x3334, parsed.voip_metric().roundTripDelay);
+ EXPECT_EQ(0x4445, parsed.voip_metric().endSystemDelay);
+ EXPECT_EQ(5, parsed.voip_metric().signalLevel);
+ EXPECT_EQ(6, parsed.voip_metric().noiseLevel);
+ EXPECT_EQ(7, parsed.voip_metric().RERL);
+ EXPECT_EQ(8, parsed.voip_metric().Gmin);
+ EXPECT_EQ(9, parsed.voip_metric().Rfactor);
+ EXPECT_EQ(10, parsed.voip_metric().extRfactor);
+ EXPECT_EQ(11, parsed.voip_metric().MOSLQ);
+ EXPECT_EQ(12, parsed.voip_metric().MOSCQ);
+ EXPECT_EQ(13, parsed.voip_metric().RXconfig);
+ EXPECT_EQ(0x5556, parsed.voip_metric().JBnominal);
+ EXPECT_EQ(0x6667, parsed.voip_metric().JBmax);
+ EXPECT_EQ(0x7778, parsed.voip_metric().JBabsMax);
+}
+
+} // namespace
+} // namespace rtcp
+} // namespace webrtc
diff --git a/webrtc/modules/rtp_rtcp/source/rtcp_packet_unittest.cc b/webrtc/modules/rtp_rtcp/source/rtcp_packet_unittest.cc
index 77520b633b..22f61f5cab 100644
--- a/webrtc/modules/rtp_rtcp/source/rtcp_packet_unittest.cc
+++ b/webrtc/modules/rtp_rtcp/source/rtcp_packet_unittest.cc
@@ -14,6 +14,9 @@
#include "testing/gtest/include/gtest/gtest.h"
#include "webrtc/modules/rtp_rtcp/source/rtcp_packet.h"
+#include "webrtc/modules/rtp_rtcp/source/rtcp_packet/app.h"
+#include "webrtc/modules/rtp_rtcp/source/rtcp_packet/bye.h"
+#include "webrtc/modules/rtp_rtcp/source/rtcp_packet/receiver_report.h"
#include "webrtc/test/rtcp_packet_parser.h"
using ::testing::ElementsAre;
@@ -21,23 +24,15 @@ using ::testing::ElementsAre;
using webrtc::rtcp::App;
using webrtc::rtcp::Bye;
using webrtc::rtcp::Dlrr;
-using webrtc::rtcp::Empty;
using webrtc::rtcp::Fir;
-using webrtc::rtcp::Ij;
-using webrtc::rtcp::Nack;
-using webrtc::rtcp::Pli;
-using webrtc::rtcp::Sdes;
-using webrtc::rtcp::SenderReport;
-using webrtc::rtcp::Sli;
using webrtc::rtcp::RawPacket;
using webrtc::rtcp::ReceiverReport;
using webrtc::rtcp::Remb;
using webrtc::rtcp::ReportBlock;
using webrtc::rtcp::Rpsi;
using webrtc::rtcp::Rrtr;
+using webrtc::rtcp::Sdes;
using webrtc::rtcp::SenderReport;
-using webrtc::rtcp::Tmmbn;
-using webrtc::rtcp::Tmmbr;
using webrtc::rtcp::VoipMetric;
using webrtc::rtcp::Xr;
using webrtc::test::RtcpPacketParser;
@@ -47,81 +42,6 @@ namespace webrtc {
const uint32_t kSenderSsrc = 0x12345678;
const uint32_t kRemoteSsrc = 0x23456789;
-TEST(RtcpPacketTest, Rr) {
- ReceiverReport rr;
- rr.From(kSenderSsrc);
-
- rtc::scoped_ptr<RawPacket> packet(rr.Build());
- RtcpPacketParser parser;
- parser.Parse(packet->Buffer(), packet->Length());
- EXPECT_EQ(1, parser.receiver_report()->num_packets());
- EXPECT_EQ(kSenderSsrc, parser.receiver_report()->Ssrc());
- EXPECT_EQ(0, parser.report_block()->num_packets());
-}
-
-TEST(RtcpPacketTest, RrWithOneReportBlock) {
- ReportBlock rb;
- rb.To(kRemoteSsrc);
- rb.WithFractionLost(55);
- rb.WithCumulativeLost(0x111111);
- rb.WithExtHighestSeqNum(0x22222222);
- rb.WithJitter(0x33333333);
- rb.WithLastSr(0x44444444);
- rb.WithDelayLastSr(0x55555555);
-
- ReceiverReport rr;
- rr.From(kSenderSsrc);
- EXPECT_TRUE(rr.WithReportBlock(rb));
-
- rtc::scoped_ptr<RawPacket> packet(rr.Build());
- RtcpPacketParser parser;
- parser.Parse(packet->Buffer(), packet->Length());
- EXPECT_EQ(1, parser.receiver_report()->num_packets());
- EXPECT_EQ(kSenderSsrc, parser.receiver_report()->Ssrc());
- EXPECT_EQ(1, parser.report_block()->num_packets());
- EXPECT_EQ(kRemoteSsrc, parser.report_block()->Ssrc());
- EXPECT_EQ(55U, parser.report_block()->FractionLost());
- EXPECT_EQ(0x111111U, parser.report_block()->CumPacketLost());
- EXPECT_EQ(0x22222222U, parser.report_block()->ExtHighestSeqNum());
- EXPECT_EQ(0x33333333U, parser.report_block()->Jitter());
- EXPECT_EQ(0x44444444U, parser.report_block()->LastSr());
- EXPECT_EQ(0x55555555U, parser.report_block()->DelayLastSr());
-}
-
-TEST(RtcpPacketTest, RrWithTwoReportBlocks) {
- ReportBlock rb1;
- rb1.To(kRemoteSsrc);
- ReportBlock rb2;
- rb2.To(kRemoteSsrc + 1);
-
- ReceiverReport rr;
- rr.From(kSenderSsrc);
- EXPECT_TRUE(rr.WithReportBlock(rb1));
- EXPECT_TRUE(rr.WithReportBlock(rb2));
-
- rtc::scoped_ptr<RawPacket> packet(rr.Build());
- RtcpPacketParser parser;
- parser.Parse(packet->Buffer(), packet->Length());
- EXPECT_EQ(1, parser.receiver_report()->num_packets());
- EXPECT_EQ(kSenderSsrc, parser.receiver_report()->Ssrc());
- EXPECT_EQ(2, parser.report_block()->num_packets());
- EXPECT_EQ(1, parser.report_blocks_per_ssrc(kRemoteSsrc));
- EXPECT_EQ(1, parser.report_blocks_per_ssrc(kRemoteSsrc + 1));
-}
-
-TEST(RtcpPacketTest, RrWithTooManyReportBlocks) {
- ReceiverReport rr;
- rr.From(kSenderSsrc);
- const int kMaxReportBlocks = (1 << 5) - 1;
- ReportBlock rb;
- for (int i = 0; i < kMaxReportBlocks; ++i) {
- rb.To(kRemoteSsrc + i);
- EXPECT_TRUE(rr.WithReportBlock(rb));
- }
- rb.To(kRemoteSsrc + kMaxReportBlocks);
- EXPECT_FALSE(rr.WithReportBlock(rb));
-}
-
TEST(RtcpPacketTest, Sr) {
SenderReport sr;
sr.From(kSenderSsrc);
@@ -196,50 +116,6 @@ TEST(RtcpPacketTest, SrWithTooManyReportBlocks) {
EXPECT_FALSE(sr.WithReportBlock(rb));
}
-TEST(RtcpPacketTest, IjNoItem) {
- Ij ij;
-
- rtc::scoped_ptr<RawPacket> packet(ij.Build());
- RtcpPacketParser parser;
- parser.Parse(packet->Buffer(), packet->Length());
- EXPECT_EQ(1, parser.ij()->num_packets());
- EXPECT_EQ(0, parser.ij_item()->num_packets());
-}
-
-TEST(RtcpPacketTest, IjOneItem) {
- Ij ij;
- EXPECT_TRUE(ij.WithJitterItem(0x11111111));
-
- rtc::scoped_ptr<RawPacket> packet(ij.Build());
- RtcpPacketParser parser;
- parser.Parse(packet->Buffer(), packet->Length());
- EXPECT_EQ(1, parser.ij()->num_packets());
- EXPECT_EQ(1, parser.ij_item()->num_packets());
- EXPECT_EQ(0x11111111U, parser.ij_item()->Jitter());
-}
-
-TEST(RtcpPacketTest, IjTwoItems) {
- Ij ij;
- EXPECT_TRUE(ij.WithJitterItem(0x11111111));
- EXPECT_TRUE(ij.WithJitterItem(0x22222222));
-
- rtc::scoped_ptr<RawPacket> packet(ij.Build());
- RtcpPacketParser parser;
- parser.Parse(packet->Buffer(), packet->Length());
- EXPECT_EQ(1, parser.ij()->num_packets());
- EXPECT_EQ(2, parser.ij_item()->num_packets());
- EXPECT_EQ(0x22222222U, parser.ij_item()->Jitter());
-}
-
-TEST(RtcpPacketTest, IjTooManyItems) {
- Ij ij;
- const int kMaxIjItems = (1 << 5) - 1;
- for (int i = 0; i < kMaxIjItems; ++i) {
- EXPECT_TRUE(ij.WithJitterItem(i));
- }
- EXPECT_FALSE(ij.WithJitterItem(kMaxIjItems));
-}
-
TEST(RtcpPacketTest, AppWithNoData) {
App app;
app.WithSubType(30);
@@ -339,140 +215,6 @@ TEST(RtcpPacketTest, CnameItemWithEmptyString) {
EXPECT_EQ("", parser.sdes_chunk()->Cname());
}
-TEST(RtcpPacketTest, Pli) {
- Pli pli;
- pli.From(kSenderSsrc);
- pli.To(kRemoteSsrc);
-
- rtc::scoped_ptr<RawPacket> packet(pli.Build());
- RtcpPacketParser parser;
- parser.Parse(packet->Buffer(), packet->Length());
- EXPECT_EQ(1, parser.pli()->num_packets());
- EXPECT_EQ(kSenderSsrc, parser.pli()->Ssrc());
- EXPECT_EQ(kRemoteSsrc, parser.pli()->MediaSsrc());
-}
-
-TEST(RtcpPacketTest, Sli) {
- const uint16_t kFirstMb = 7777;
- const uint16_t kNumberOfMb = 6666;
- const uint8_t kPictureId = 60;
- Sli sli;
- sli.From(kSenderSsrc);
- sli.To(kRemoteSsrc);
- sli.WithFirstMb(kFirstMb);
- sli.WithNumberOfMb(kNumberOfMb);
- sli.WithPictureId(kPictureId);
-
- rtc::scoped_ptr<RawPacket> packet(sli.Build());
- RtcpPacketParser parser;
- parser.Parse(packet->Buffer(), packet->Length());
- EXPECT_EQ(1, parser.sli()->num_packets());
- EXPECT_EQ(kSenderSsrc, parser.sli()->Ssrc());
- EXPECT_EQ(kRemoteSsrc, parser.sli()->MediaSsrc());
- EXPECT_EQ(1, parser.sli_item()->num_packets());
- EXPECT_EQ(kFirstMb, parser.sli_item()->FirstMb());
- EXPECT_EQ(kNumberOfMb, parser.sli_item()->NumberOfMb());
- EXPECT_EQ(kPictureId, parser.sli_item()->PictureId());
-}
-
-TEST(RtcpPacketTest, Nack) {
- Nack nack;
- const uint16_t kList[] = {0, 1, 3, 8, 16};
- const uint16_t kListLength = sizeof(kList) / sizeof(kList[0]);
- nack.From(kSenderSsrc);
- nack.To(kRemoteSsrc);
- nack.WithList(kList, kListLength);
- rtc::scoped_ptr<RawPacket> packet(nack.Build());
- RtcpPacketParser parser;
- parser.Parse(packet->Buffer(), packet->Length());
- EXPECT_EQ(1, parser.nack()->num_packets());
- EXPECT_EQ(kSenderSsrc, parser.nack()->Ssrc());
- EXPECT_EQ(kRemoteSsrc, parser.nack()->MediaSsrc());
- EXPECT_EQ(1, parser.nack_item()->num_packets());
- std::vector<uint16_t> seqs = parser.nack_item()->last_nack_list();
- EXPECT_EQ(kListLength, seqs.size());
- for (size_t i = 0; i < kListLength; ++i) {
- EXPECT_EQ(kList[i], seqs[i]);
- }
-}
-
-TEST(RtcpPacketTest, NackWithWrap) {
- Nack nack;
- const uint16_t kList[] = {65500, 65516, 65534, 65535, 0, 1, 3, 20, 100};
- const uint16_t kListLength = sizeof(kList) / sizeof(kList[0]);
- nack.From(kSenderSsrc);
- nack.To(kRemoteSsrc);
- nack.WithList(kList, kListLength);
- rtc::scoped_ptr<RawPacket> packet(nack.Build());
- RtcpPacketParser parser;
- parser.Parse(packet->Buffer(), packet->Length());
- EXPECT_EQ(1, parser.nack()->num_packets());
- EXPECT_EQ(kSenderSsrc, parser.nack()->Ssrc());
- EXPECT_EQ(kRemoteSsrc, parser.nack()->MediaSsrc());
- EXPECT_EQ(4, parser.nack_item()->num_packets());
- std::vector<uint16_t> seqs = parser.nack_item()->last_nack_list();
- EXPECT_EQ(kListLength, seqs.size());
- for (size_t i = 0; i < kListLength; ++i) {
- EXPECT_EQ(kList[i], seqs[i]);
- }
-}
-
-TEST(RtcpPacketTest, NackFragmented) {
- Nack nack;
- const uint16_t kList[] = {1, 100, 200, 300, 400};
- const uint16_t kListLength = sizeof(kList) / sizeof(kList[0]);
- nack.From(kSenderSsrc);
- nack.To(kRemoteSsrc);
- nack.WithList(kList, kListLength);
-
- class Verifier : public rtcp::RtcpPacket::PacketReadyCallback {
- public:
- void OnPacketReady(uint8_t* data, size_t length) override {
- ++packets_created_;
- RtcpPacketParser parser;
- parser.Parse(data, length);
- EXPECT_EQ(1, parser.nack()->num_packets());
- EXPECT_EQ(kSenderSsrc, parser.nack()->Ssrc());
- EXPECT_EQ(kRemoteSsrc, parser.nack()->MediaSsrc());
- switch (packets_created_) {
- case 1:
- EXPECT_THAT(parser.nack_item()->last_nack_list(),
- ElementsAre(1, 100, 200));
- break;
- case 2:
- EXPECT_THAT(parser.nack_item()->last_nack_list(),
- ElementsAre(300, 400));
- break;
- default:
- ADD_FAILURE() << "Unexpected packet count: " << packets_created_;
- }
- }
- int packets_created_ = 0;
- } verifier;
- const size_t kBufferSize = 12 + (3 * 4); // Fits common header + 3 nack items
- uint8_t buffer[kBufferSize];
- EXPECT_TRUE(nack.BuildExternalBuffer(buffer, kBufferSize, &verifier));
- EXPECT_EQ(2, verifier.packets_created_);
-}
-
-TEST(RtcpPacketTest, NackWithTooSmallBuffer) {
- const uint16_t kList[] = {1};
- const size_t kMinNackBlockSize = 16;
- Nack nack;
- nack.From(kSenderSsrc);
- nack.To(kRemoteSsrc);
- nack.WithList(kList, 1);
- class Verifier : public rtcp::RtcpPacket::PacketReadyCallback {
- public:
- void OnPacketReady(uint8_t* data, size_t length) override {
- ADD_FAILURE() << "Buffer should be too small.";
- }
- } verifier;
- uint8_t buffer[kMinNackBlockSize - 1];
- EXPECT_FALSE(
- nack.BuildExternalBuffer(buffer, kMinNackBlockSize - 1, &verifier));
-}
-
TEST(RtcpPacketTest, Rpsi) {
Rpsi rpsi;
// 1000001 (7 bits = 1 byte in native string).
@@ -562,127 +304,6 @@ TEST(RtcpPacketTest, Fir) {
EXPECT_EQ(123U, parser.fir_item()->SeqNum());
}
-TEST(RtcpPacketTest, AppendPacket) {
- Fir fir;
- ReportBlock rb;
- ReceiverReport rr;
- rr.From(kSenderSsrc);
- EXPECT_TRUE(rr.WithReportBlock(rb));
- rr.Append(&fir);
-
- rtc::scoped_ptr<RawPacket> packet(rr.Build());
- RtcpPacketParser parser;
- parser.Parse(packet->Buffer(), packet->Length());
- EXPECT_EQ(1, parser.receiver_report()->num_packets());
- EXPECT_EQ(kSenderSsrc, parser.receiver_report()->Ssrc());
- EXPECT_EQ(1, parser.report_block()->num_packets());
- EXPECT_EQ(1, parser.fir()->num_packets());
-}
-
-TEST(RtcpPacketTest, AppendPacketOnEmpty) {
- Empty empty;
- ReceiverReport rr;
- rr.From(kSenderSsrc);
- empty.Append(&rr);
-
- rtc::scoped_ptr<RawPacket> packet(empty.Build());
- RtcpPacketParser parser;
- parser.Parse(packet->Buffer(), packet->Length());
- EXPECT_EQ(1, parser.receiver_report()->num_packets());
- EXPECT_EQ(0, parser.report_block()->num_packets());
-}
-
-TEST(RtcpPacketTest, AppendPacketWithOwnAppendedPacket) {
- Fir fir;
- Bye bye;
- ReportBlock rb;
-
- ReceiverReport rr;
- EXPECT_TRUE(rr.WithReportBlock(rb));
- rr.Append(&fir);
-
- SenderReport sr;
- sr.Append(&bye);
- sr.Append(&rr);
-
- rtc::scoped_ptr<RawPacket> packet(sr.Build());
- RtcpPacketParser parser;
- parser.Parse(packet->Buffer(), packet->Length());
- EXPECT_EQ(1, parser.sender_report()->num_packets());
- EXPECT_EQ(1, parser.receiver_report()->num_packets());
- EXPECT_EQ(1, parser.report_block()->num_packets());
- EXPECT_EQ(1, parser.bye()->num_packets());
- EXPECT_EQ(1, parser.fir()->num_packets());
-}
-
-TEST(RtcpPacketTest, Bye) {
- Bye bye;
- bye.From(kSenderSsrc);
-
- rtc::scoped_ptr<RawPacket> packet(bye.Build());
- RtcpPacketParser parser;
- parser.Parse(packet->Buffer(), packet->Length());
- EXPECT_EQ(1, parser.bye()->num_packets());
- EXPECT_EQ(kSenderSsrc, parser.bye()->Ssrc());
-}
-
-TEST(RtcpPacketTest, ByeWithCsrcs) {
- Fir fir;
- Bye bye;
- bye.From(kSenderSsrc);
- EXPECT_TRUE(bye.WithCsrc(0x22222222));
- EXPECT_TRUE(bye.WithCsrc(0x33333333));
- bye.Append(&fir);
-
- rtc::scoped_ptr<RawPacket> packet(bye.Build());
- RtcpPacketParser parser;
- parser.Parse(packet->Buffer(), packet->Length());
- EXPECT_EQ(1, parser.bye()->num_packets());
- EXPECT_EQ(kSenderSsrc, parser.bye()->Ssrc());
- EXPECT_EQ(1, parser.fir()->num_packets());
-}
-
-TEST(RtcpPacketTest, ByeWithTooManyCsrcs) {
- Bye bye;
- bye.From(kSenderSsrc);
- const int kMaxCsrcs = (1 << 5) - 2; // 5 bit len, first item is sender SSRC.
- for (int i = 0; i < kMaxCsrcs; ++i) {
- EXPECT_TRUE(bye.WithCsrc(i));
- }
- EXPECT_FALSE(bye.WithCsrc(kMaxCsrcs));
-}
-
-TEST(RtcpPacketTest, BuildWithInputBuffer) {
- Fir fir;
- ReportBlock rb;
- ReceiverReport rr;
- rr.From(kSenderSsrc);
- EXPECT_TRUE(rr.WithReportBlock(rb));
- rr.Append(&fir);
-
- const size_t kRrLength = 8;
- const size_t kReportBlockLength = 24;
- const size_t kFirLength = 20;
-
- class Verifier : public rtcp::RtcpPacket::PacketReadyCallback {
- public:
- void OnPacketReady(uint8_t* data, size_t length) override {
- RtcpPacketParser parser;
- parser.Parse(data, length);
- EXPECT_EQ(1, parser.receiver_report()->num_packets());
- EXPECT_EQ(1, parser.report_block()->num_packets());
- EXPECT_EQ(1, parser.fir()->num_packets());
- ++packets_created_;
- }
-
- int packets_created_ = 0;
- } verifier;
- const size_t kBufferSize = kRrLength + kReportBlockLength + kFirLength;
- uint8_t buffer[kBufferSize];
- EXPECT_TRUE(rr.BuildExternalBuffer(buffer, kBufferSize, &verifier));
- EXPECT_EQ(1, verifier.packets_created_);
-}
-
TEST(RtcpPacketTest, BuildWithTooSmallBuffer) {
ReportBlock rb;
ReceiverReport rr;
@@ -703,47 +324,6 @@ TEST(RtcpPacketTest, BuildWithTooSmallBuffer) {
EXPECT_FALSE(rr.BuildExternalBuffer(buffer, kBufferSize, &verifier));
}
-TEST(RtcpPacketTest, BuildWithTooSmallBuffer_FragmentedSend) {
- Fir fir;
- ReportBlock rb;
- ReceiverReport rr;
- rr.From(kSenderSsrc);
- EXPECT_TRUE(rr.WithReportBlock(rb));
- rr.Append(&fir);
-
- const size_t kRrLength = 8;
- const size_t kReportBlockLength = 24;
-
- class Verifier : public rtcp::RtcpPacket::PacketReadyCallback {
- public:
- void OnPacketReady(uint8_t* data, size_t length) override {
- RtcpPacketParser parser;
- parser.Parse(data, length);
- switch (packets_created_++) {
- case 0:
- EXPECT_EQ(1, parser.receiver_report()->num_packets());
- EXPECT_EQ(1, parser.report_block()->num_packets());
- EXPECT_EQ(0, parser.fir()->num_packets());
- break;
- case 1:
- EXPECT_EQ(0, parser.receiver_report()->num_packets());
- EXPECT_EQ(0, parser.report_block()->num_packets());
- EXPECT_EQ(1, parser.fir()->num_packets());
- break;
- default:
- ADD_FAILURE() << "OnPacketReady not expected to be called "
- << packets_created_ << " times.";
- }
- }
-
- int packets_created_ = 0;
- } verifier;
- const size_t kBufferSize = kRrLength + kReportBlockLength;
- uint8_t buffer[kBufferSize];
- EXPECT_TRUE(rr.BuildExternalBuffer(buffer, kBufferSize, &verifier));
- EXPECT_EQ(2, verifier.packets_created_);
-}
-
TEST(RtcpPacketTest, Remb) {
Remb remb;
remb.From(kSenderSsrc);
@@ -765,81 +345,6 @@ TEST(RtcpPacketTest, Remb) {
EXPECT_EQ(kRemoteSsrc + 2, ssrcs[2]);
}
-TEST(RtcpPacketTest, Tmmbr) {
- Tmmbr tmmbr;
- tmmbr.From(kSenderSsrc);
- tmmbr.To(kRemoteSsrc);
- tmmbr.WithBitrateKbps(312);
- tmmbr.WithOverhead(60);
-
- rtc::scoped_ptr<RawPacket> packet(tmmbr.Build());
- RtcpPacketParser parser;
- parser.Parse(packet->Buffer(), packet->Length());
- EXPECT_EQ(1, parser.tmmbr()->num_packets());
- EXPECT_EQ(kSenderSsrc, parser.tmmbr()->Ssrc());
- EXPECT_EQ(1, parser.tmmbr_item()->num_packets());
- EXPECT_EQ(312U, parser.tmmbr_item()->BitrateKbps());
- EXPECT_EQ(60U, parser.tmmbr_item()->Overhead());
-}
-
-TEST(RtcpPacketTest, TmmbnWithNoItem) {
- Tmmbn tmmbn;
- tmmbn.From(kSenderSsrc);
-
- rtc::scoped_ptr<RawPacket> packet(tmmbn.Build());
- RtcpPacketParser parser;
- parser.Parse(packet->Buffer(), packet->Length());
- EXPECT_EQ(1, parser.tmmbn()->num_packets());
- EXPECT_EQ(kSenderSsrc, parser.tmmbn()->Ssrc());
- EXPECT_EQ(0, parser.tmmbn_items()->num_packets());
-}
-
-TEST(RtcpPacketTest, TmmbnWithOneItem) {
- Tmmbn tmmbn;
- tmmbn.From(kSenderSsrc);
- EXPECT_TRUE(tmmbn.WithTmmbr(kRemoteSsrc, 312, 60));
-
- rtc::scoped_ptr<RawPacket> packet(tmmbn.Build());
- RtcpPacketParser parser;
- parser.Parse(packet->Buffer(), packet->Length());
- EXPECT_EQ(1, parser.tmmbn()->num_packets());
- EXPECT_EQ(kSenderSsrc, parser.tmmbn()->Ssrc());
- EXPECT_EQ(1, parser.tmmbn_items()->num_packets());
- EXPECT_EQ(kRemoteSsrc, parser.tmmbn_items()->Ssrc(0));
- EXPECT_EQ(312U, parser.tmmbn_items()->BitrateKbps(0));
- EXPECT_EQ(60U, parser.tmmbn_items()->Overhead(0));
-}
-
-TEST(RtcpPacketTest, TmmbnWithTwoItems) {
- Tmmbn tmmbn;
- tmmbn.From(kSenderSsrc);
- EXPECT_TRUE(tmmbn.WithTmmbr(kRemoteSsrc, 312, 60));
- EXPECT_TRUE(tmmbn.WithTmmbr(kRemoteSsrc + 1, 1288, 40));
-
- rtc::scoped_ptr<RawPacket> packet(tmmbn.Build());
- RtcpPacketParser parser;
- parser.Parse(packet->Buffer(), packet->Length());
- EXPECT_EQ(1, parser.tmmbn()->num_packets());
- EXPECT_EQ(kSenderSsrc, parser.tmmbn()->Ssrc());
- EXPECT_EQ(2, parser.tmmbn_items()->num_packets());
- EXPECT_EQ(kRemoteSsrc, parser.tmmbn_items()->Ssrc(0));
- EXPECT_EQ(312U, parser.tmmbn_items()->BitrateKbps(0));
- EXPECT_EQ(60U, parser.tmmbn_items()->Overhead(0));
- EXPECT_EQ(kRemoteSsrc + 1, parser.tmmbn_items()->Ssrc(1));
- EXPECT_EQ(1288U, parser.tmmbn_items()->BitrateKbps(1));
- EXPECT_EQ(40U, parser.tmmbn_items()->Overhead(1));
-}
-
-TEST(RtcpPacketTest, TmmbnWithTooManyItems) {
- Tmmbn tmmbn;
- tmmbn.From(kSenderSsrc);
- const int kMaxTmmbrItems = 50;
- for (int i = 0; i < kMaxTmmbrItems; ++i)
- EXPECT_TRUE(tmmbn.WithTmmbr(kRemoteSsrc + i, 312, 60));
-
- EXPECT_FALSE(tmmbn.WithTmmbr(kRemoteSsrc + kMaxTmmbrItems, 312, 60));
-}
-
TEST(RtcpPacketTest, XrWithNoReportBlocks) {
Xr xr;
xr.From(kSenderSsrc);
@@ -853,8 +358,7 @@ TEST(RtcpPacketTest, XrWithNoReportBlocks) {
TEST(RtcpPacketTest, XrWithRrtr) {
Rrtr rrtr;
- rrtr.WithNtpSec(0x11111111);
- rrtr.WithNtpFrac(0x22222222);
+ rrtr.WithNtp(NtpTime(0x11111111, 0x22222222));
Xr xr;
xr.From(kSenderSsrc);
EXPECT_TRUE(xr.WithRrtr(&rrtr));
@@ -871,11 +375,9 @@ TEST(RtcpPacketTest, XrWithRrtr) {
TEST(RtcpPacketTest, XrWithTwoRrtrBlocks) {
Rrtr rrtr1;
- rrtr1.WithNtpSec(0x11111111);
- rrtr1.WithNtpFrac(0x22222222);
+ rrtr1.WithNtp(NtpTime(0x11111111, 0x22222222));
Rrtr rrtr2;
- rrtr2.WithNtpSec(0x33333333);
- rrtr2.WithNtpFrac(0x44444444);
+ rrtr2.WithNtp(NtpTime(0x33333333, 0x44444444));
Xr xr;
xr.From(kSenderSsrc);
EXPECT_TRUE(xr.WithRrtr(&rrtr1));
@@ -967,32 +469,33 @@ TEST(RtcpPacketTest, XrWithTwoDlrrBlocks) {
}
TEST(RtcpPacketTest, XrWithVoipMetric) {
- VoipMetric metric;
- metric.To(kRemoteSsrc);
- metric.LossRate(1);
- metric.DiscardRate(2);
- metric.BurstDensity(3);
- metric.GapDensity(4);
- metric.BurstDuration(0x1111);
- metric.GapDuration(0x2222);
- metric.RoundTripDelay(0x3333);
- metric.EndSystemDelay(0x4444);
- metric.SignalLevel(5);
- metric.NoiseLevel(6);
- metric.Rerl(7);
- metric.Gmin(8);
- metric.Rfactor(9);
- metric.ExtRfactor(10);
- metric.MosLq(11);
- metric.MosCq(12);
- metric.RxConfig(13);
- metric.JbNominal(0x5555);
- metric.JbMax(0x6666);
- metric.JbAbsMax(0x7777);
-
+ RTCPVoIPMetric metric;
+ metric.lossRate = 1;
+ metric.discardRate = 2;
+ metric.burstDensity = 3;
+ metric.gapDensity = 4;
+ metric.burstDuration = 0x1111;
+ metric.gapDuration = 0x2222;
+ metric.roundTripDelay = 0x3333;
+ metric.endSystemDelay = 0x4444;
+ metric.signalLevel = 5;
+ metric.noiseLevel = 6;
+ metric.RERL = 7;
+ metric.Gmin = 8;
+ metric.Rfactor = 9;
+ metric.extRfactor = 10;
+ metric.MOSLQ = 11;
+ metric.MOSCQ = 12;
+ metric.RXconfig = 13;
+ metric.JBnominal = 0x5555;
+ metric.JBmax = 0x6666;
+ metric.JBabsMax = 0x7777;
+ VoipMetric metric_block;
+ metric_block.To(kRemoteSsrc);
+ metric_block.WithVoipMetric(metric);
Xr xr;
xr.From(kSenderSsrc);
- EXPECT_TRUE(xr.WithVoipMetric(&metric));
+ EXPECT_TRUE(xr.WithVoipMetric(&metric_block));
rtc::scoped_ptr<RawPacket> packet(xr.Build());
RtcpPacketParser parser;
diff --git a/webrtc/modules/rtp_rtcp/source/rtcp_receiver.cc b/webrtc/modules/rtp_rtcp/source/rtcp_receiver.cc
index b914838109..d65b04c8ab 100644
--- a/webrtc/modules/rtp_rtcp/source/rtcp_receiver.cc
+++ b/webrtc/modules/rtp_rtcp/source/rtcp_receiver.cc
@@ -23,8 +23,13 @@
#include "webrtc/modules/rtp_rtcp/source/rtp_rtcp_impl.h"
namespace webrtc {
-using namespace RTCPUtility;
-using namespace RTCPHelp;
+using RTCPHelp::RTCPPacketInformation;
+using RTCPHelp::RTCPReceiveInformation;
+using RTCPHelp::RTCPReportBlockInformation;
+using RTCPUtility::kBtVoipMetric;
+using RTCPUtility::RTCPCnameInformation;
+using RTCPUtility::RTCPPacketReportBlockItem;
+using RTCPUtility::RTCPPacketTypes;
// The number of RTCP time intervals needed to trigger a timeout.
const int kRrTimeoutIntervals = 3;
@@ -741,7 +746,7 @@ bool RTCPReceiver::UpdateRTCPReceiveInformationTimers() {
return updateBoundingSet;
}
-int32_t RTCPReceiver::BoundingSet(bool &tmmbrOwner, TMMBRSet* boundingSetRec) {
+int32_t RTCPReceiver::BoundingSet(bool* tmmbrOwner, TMMBRSet* boundingSetRec) {
CriticalSectionScoped lock(_criticalSectionRTCPReceiver);
std::map<uint32_t, RTCPReceiveInformation*>::iterator receiveInfoIt =
@@ -761,7 +766,7 @@ int32_t RTCPReceiver::BoundingSet(bool &tmmbrOwner, TMMBRSet* boundingSetRec) {
i++) {
if(receiveInfo->TmmbnBoundingSet.Ssrc(i) == main_ssrc_) {
// owner of bounding set
- tmmbrOwner = true;
+ *tmmbrOwner = true;
}
boundingSetRec->SetEntry(i,
receiveInfo->TmmbnBoundingSet.Tmmbr(i),
diff --git a/webrtc/modules/rtp_rtcp/source/rtcp_receiver.h b/webrtc/modules/rtp_rtcp/source/rtcp_receiver.h
index 272397675b..24861bd49e 100644
--- a/webrtc/modules/rtp_rtcp/source/rtcp_receiver.h
+++ b/webrtc/modules/rtp_rtcp/source/rtcp_receiver.h
@@ -12,11 +12,11 @@
#define WEBRTC_MODULES_RTP_RTCP_SOURCE_RTCP_RECEIVER_H_
#include <map>
-#include <vector>
#include <set>
+#include <vector>
#include "webrtc/base/thread_annotations.h"
-#include "webrtc/modules/rtp_rtcp/interface/rtp_rtcp_defines.h"
+#include "webrtc/modules/rtp_rtcp/include/rtp_rtcp_defines.h"
#include "webrtc/modules/rtp_rtcp/source/rtcp_receiver_help.h"
#include "webrtc/modules/rtp_rtcp/source/rtcp_utility.h"
#include "webrtc/modules/rtp_rtcp/source/rtp_utility.h"
@@ -103,7 +103,7 @@ public:
bool UpdateRTCPReceiveInformationTimers();
- int32_t BoundingSet(bool &tmmbrOwner, TMMBRSet* boundingSetRec);
+ int32_t BoundingSet(bool* tmmbrOwner, TMMBRSet* boundingSetRec);
int32_t UpdateTMMBR();
diff --git a/webrtc/modules/rtp_rtcp/source/rtcp_receiver_help.cc b/webrtc/modules/rtp_rtcp/source/rtcp_receiver_help.cc
index 718990d10b..a5c0e28282 100644
--- a/webrtc/modules/rtp_rtcp/source/rtcp_receiver_help.cc
+++ b/webrtc/modules/rtp_rtcp/source/rtcp_receiver_help.cc
@@ -17,7 +17,7 @@
#include "webrtc/modules/rtp_rtcp/source/rtp_utility.h"
namespace webrtc {
-using namespace RTCPHelp;
+namespace RTCPHelp {
RTCPPacketInformation::RTCPPacketInformation()
: rtcpPacketTypeFlags(0),
@@ -190,4 +190,5 @@ void RTCPReceiveInformation::VerifyAndAllocateBoundingSet(
const uint32_t minimumSize) {
TmmbnBoundingSet.VerifyAndAllocateSet(minimumSize);
}
+} // namespace RTCPHelp
} // namespace webrtc
diff --git a/webrtc/modules/rtp_rtcp/source/rtcp_receiver_help.h b/webrtc/modules/rtp_rtcp/source/rtcp_receiver_help.h
index 37b7b88370..a792841962 100644
--- a/webrtc/modules/rtp_rtcp/source/rtcp_receiver_help.h
+++ b/webrtc/modules/rtp_rtcp/source/rtcp_receiver_help.h
@@ -11,10 +11,12 @@
#ifndef WEBRTC_MODULES_RTP_RTCP_SOURCE_RTCP_RECEIVER_HELP_H_
#define WEBRTC_MODULES_RTP_RTCP_SOURCE_RTCP_RECEIVER_HELP_H_
+#include <list>
+#include <vector>
#include "webrtc/base/constructormagic.h"
#include "webrtc/base/scoped_ptr.h"
-#include "webrtc/modules/rtp_rtcp/interface/rtp_rtcp_defines.h" // RTCPReportBlock
+#include "webrtc/modules/rtp_rtcp/include/rtp_rtcp_defines.h" // RTCPReportBlock
#include "webrtc/modules/rtp_rtcp/source/rtcp_utility.h"
#include "webrtc/modules/rtp_rtcp/source/tmmbr_help.h"
#include "webrtc/typedefs.h"
diff --git a/webrtc/modules/rtp_rtcp/source/rtcp_receiver_unittest.cc b/webrtc/modules/rtp_rtcp/source/rtcp_receiver_unittest.cc
index 1581845476..5d2fda347e 100644
--- a/webrtc/modules/rtp_rtcp/source/rtcp_receiver_unittest.cc
+++ b/webrtc/modules/rtp_rtcp/source/rtcp_receiver_unittest.cc
@@ -15,17 +15,23 @@
#include "testing/gmock/include/gmock/gmock.h"
#include "testing/gtest/include/gtest/gtest.h"
-// Note: This file has no directory. Lint warning must be ignored.
#include "webrtc/common_types.h"
#include "webrtc/modules/remote_bitrate_estimator/include/mock/mock_remote_bitrate_observer.h"
#include "webrtc/modules/remote_bitrate_estimator/remote_bitrate_estimator_single_stream.h"
#include "webrtc/modules/rtp_rtcp/source/byte_io.h"
#include "webrtc/modules/rtp_rtcp/source/rtcp_packet.h"
+#include "webrtc/modules/rtp_rtcp/source/rtcp_packet/app.h"
+#include "webrtc/modules/rtp_rtcp/source/rtcp_packet/bye.h"
+#include "webrtc/modules/rtp_rtcp/source/rtcp_packet/extended_jitter_report.h"
+#include "webrtc/modules/rtp_rtcp/source/rtcp_packet/pli.h"
+#include "webrtc/modules/rtp_rtcp/source/rtcp_packet/receiver_report.h"
+#include "webrtc/modules/rtp_rtcp/source/rtcp_packet/sli.h"
+#include "webrtc/modules/rtp_rtcp/source/rtcp_packet/tmmbr.h"
+#include "webrtc/modules/rtp_rtcp/source/rtcp_packet/transport_feedback.h"
#include "webrtc/modules/rtp_rtcp/source/rtcp_receiver.h"
#include "webrtc/modules/rtp_rtcp/source/rtcp_sender.h"
#include "webrtc/modules/rtp_rtcp/source/rtp_rtcp_impl.h"
#include "webrtc/modules/rtp_rtcp/source/rtp_utility.h"
-#include "webrtc/modules/rtp_rtcp/source/rtcp_packet/transport_feedback.h"
namespace webrtc {
@@ -384,20 +390,20 @@ TEST_F(RtcpReceiverTest, GetRtt) {
}
TEST_F(RtcpReceiverTest, InjectIjWithNoItem) {
- rtcp::Ij ij;
+ rtcp::ExtendedJitterReport ij;
rtc::scoped_ptr<rtcp::RawPacket> packet(ij.Build());
EXPECT_EQ(0, InjectRtcpPacket(packet->Buffer(), packet->Length()));
EXPECT_EQ(0U, rtcp_packet_info_.rtcpPacketTypeFlags);
}
TEST_F(RtcpReceiverTest, InjectIjWithOneItem) {
- rtcp::Ij ij;
- ij.WithJitterItem(0x11111111);
+ rtcp::ExtendedJitterReport ij;
+ ij.WithJitter(0x11213141);
rtc::scoped_ptr<rtcp::RawPacket> packet(ij.Build());
EXPECT_EQ(0, InjectRtcpPacket(packet->Buffer(), packet->Length()));
EXPECT_EQ(kRtcpTransmissionTimeOffset, rtcp_packet_info_.rtcpPacketTypeFlags);
- EXPECT_EQ(0x11111111U, rtcp_packet_info_.interArrivalJitter);
+ EXPECT_EQ(0x11213141U, rtcp_packet_info_.interArrivalJitter);
}
TEST_F(RtcpReceiverTest, InjectAppWithNoData) {
@@ -586,7 +592,9 @@ TEST_F(RtcpReceiverTest, InjectXrVoipPacket) {
const uint8_t kLossRate = 123;
rtcp::VoipMetric voip_metric;
voip_metric.To(kSourceSsrc);
- voip_metric.LossRate(kLossRate);
+ RTCPVoIPMetric metric;
+ metric.lossRate = kLossRate;
+ voip_metric.WithVoipMetric(metric);
rtcp::Xr xr;
xr.From(0x2345);
xr.WithVoipMetric(&voip_metric);
@@ -615,8 +623,7 @@ TEST_F(RtcpReceiverTest, XrVoipPacketNotToUsIgnored) {
TEST_F(RtcpReceiverTest, InjectXrReceiverReferenceTimePacket) {
rtcp::Rrtr rrtr;
- rrtr.WithNtpSec(0x10203);
- rrtr.WithNtpFrac(0x40506);
+ rrtr.WithNtp(NtpTime(0x10203, 0x40506));
rtcp::Xr xr;
xr.From(0x2345);
xr.WithRrtr(&rrtr);
@@ -751,13 +758,12 @@ TEST_F(RtcpReceiverTest, LastReceivedXrReferenceTimeInfoInitiallyFalse) {
TEST_F(RtcpReceiverTest, GetLastReceivedXrReferenceTimeInfo) {
const uint32_t kSenderSsrc = 0x123456;
- const uint32_t kNtpSec = 0x10203;
- const uint32_t kNtpFrac = 0x40506;
- const uint32_t kNtpMid = RTCPUtility::MidNtp(kNtpSec, kNtpFrac);
+ const NtpTime kNtp(0x10203, 0x40506);
+ const uint32_t kNtpMid =
+ RTCPUtility::MidNtp(kNtp.seconds(), kNtp.fractions());
rtcp::Rrtr rrtr;
- rrtr.WithNtpSec(kNtpSec);
- rrtr.WithNtpFrac(kNtpFrac);
+ rrtr.WithNtp(kNtp);
rtcp::Xr xr;
xr.From(kSenderSsrc);
xr.WithRrtr(&rrtr);
diff --git a/webrtc/modules/rtp_rtcp/source/rtcp_sender.cc b/webrtc/modules/rtp_rtcp/source/rtcp_sender.cc
index 22b9477e05..848d73b2c4 100644
--- a/webrtc/modules/rtp_rtcp/source/rtcp_sender.cc
+++ b/webrtc/modules/rtp_rtcp/source/rtcp_sender.cc
@@ -11,17 +11,26 @@
#include "webrtc/modules/rtp_rtcp/source/rtcp_sender.h"
#include <assert.h> // assert
-#include <stdlib.h> // rand
#include <string.h> // memcpy
#include <algorithm> // min
#include <limits> // max
+#include <utility>
#include "webrtc/base/checks.h"
#include "webrtc/base/logging.h"
#include "webrtc/base/trace_event.h"
#include "webrtc/common_types.h"
#include "webrtc/modules/rtp_rtcp/source/byte_io.h"
+#include "webrtc/modules/rtp_rtcp/source/rtcp_packet/app.h"
+#include "webrtc/modules/rtp_rtcp/source/rtcp_packet/bye.h"
+#include "webrtc/modules/rtp_rtcp/source/rtcp_packet/compound_packet.h"
+#include "webrtc/modules/rtp_rtcp/source/rtcp_packet/nack.h"
+#include "webrtc/modules/rtp_rtcp/source/rtcp_packet/pli.h"
+#include "webrtc/modules/rtp_rtcp/source/rtcp_packet/receiver_report.h"
+#include "webrtc/modules/rtp_rtcp/source/rtcp_packet/sli.h"
+#include "webrtc/modules/rtp_rtcp/source/rtcp_packet/tmmbn.h"
+#include "webrtc/modules/rtp_rtcp/source/rtcp_packet/tmmbr.h"
#include "webrtc/modules/rtp_rtcp/source/rtcp_packet/transport_feedback.h"
#include "webrtc/modules/rtp_rtcp/source/rtp_rtcp_impl.h"
#include "webrtc/system_wrappers/include/critical_section_wrapper.h"
@@ -31,13 +40,11 @@ namespace webrtc {
using RTCPUtility::RTCPCnameInformation;
NACKStringBuilder::NACKStringBuilder()
- : stream_(""), count_(0), prevNack_(0), consecutive_(false) {
-}
+ : stream_(""), count_(0), prevNack_(0), consecutive_(false) {}
NACKStringBuilder::~NACKStringBuilder() {}
-void NACKStringBuilder::PushNACK(uint16_t nack)
-{
+void NACKStringBuilder::PushNACK(uint16_t nack) {
if (count_ == 0) {
stream_ << nack;
} else if (nack == prevNack_ + 1) {
@@ -71,64 +78,63 @@ RTCPSender::FeedbackState::FeedbackState()
last_rr_ntp_frac(0),
remote_sr(0),
has_last_xr_rr(false),
- module(nullptr) {
-}
+ module(nullptr) {}
-struct RTCPSender::RtcpContext {
- RtcpContext(const FeedbackState& feedback_state,
- int32_t nack_size,
- const uint16_t* nack_list,
- bool repeat,
- uint64_t picture_id,
- uint8_t* buffer,
- uint32_t buffer_size)
- : feedback_state(feedback_state),
- nack_size(nack_size),
- nack_list(nack_list),
- repeat(repeat),
- picture_id(picture_id),
- buffer(buffer),
- buffer_size(buffer_size),
- ntp_sec(0),
- ntp_frac(0),
- position(0) {}
-
- uint8_t* AllocateData(uint32_t bytes) {
- RTC_DCHECK_LE(position + bytes, buffer_size);
- uint8_t* ptr = &buffer[position];
- position += bytes;
- return ptr;
+class PacketContainer : public rtcp::CompoundPacket,
+ public rtcp::RtcpPacket::PacketReadyCallback {
+ public:
+ explicit PacketContainer(Transport* transport)
+ : transport_(transport), bytes_sent_(0) {}
+ virtual ~PacketContainer() {
+ for (RtcpPacket* packet : appended_packets_)
+ delete packet;
}
- const FeedbackState& feedback_state;
- int32_t nack_size;
- const uint16_t* nack_list;
- bool repeat;
- uint64_t picture_id;
- uint8_t* buffer;
- uint32_t buffer_size;
- uint32_t ntp_sec;
- uint32_t ntp_frac;
- uint32_t position;
-};
-
-// TODO(sprang): Once all builders use RtcpPacket, call SendToNetwork() here.
-class RTCPSender::PacketBuiltCallback
- : public rtcp::RtcpPacket::PacketReadyCallback {
- public:
- PacketBuiltCallback(RtcpContext* context) : context_(context) {}
- virtual ~PacketBuiltCallback() {}
void OnPacketReady(uint8_t* data, size_t length) override {
- context_->position += length;
+ if (transport_->SendRtcp(data, length))
+ bytes_sent_ += length;
}
- bool BuildPacket(const rtcp::RtcpPacket& packet) {
- return packet.BuildExternalBuffer(
- &context_->buffer[context_->position],
- context_->buffer_size - context_->position, this);
+
+ size_t SendPackets() {
+ rtcp::CompoundPacket::Build(this);
+ return bytes_sent_;
}
private:
- RtcpContext* const context_;
+ Transport* transport_;
+ size_t bytes_sent_;
+};
+
+class RTCPSender::RtcpContext {
+ public:
+ RtcpContext(const FeedbackState& feedback_state,
+ int32_t nack_size,
+ const uint16_t* nack_list,
+ bool repeat,
+ uint64_t picture_id,
+ uint32_t ntp_sec,
+ uint32_t ntp_frac,
+ PacketContainer* container)
+ : feedback_state_(feedback_state),
+ nack_size_(nack_size),
+ nack_list_(nack_list),
+ repeat_(repeat),
+ picture_id_(picture_id),
+ ntp_sec_(ntp_sec),
+ ntp_frac_(ntp_frac),
+ container_(container) {}
+
+ virtual ~RtcpContext() {}
+
+ const FeedbackState& feedback_state_;
+ const int32_t nack_size_;
+ const uint16_t* nack_list_;
+ const bool repeat_;
+ const uint64_t picture_id_;
+ const uint32_t ntp_sec_;
+ const uint32_t ntp_frac_;
+
+ PacketContainer* const container_;
};
RTCPSender::RTCPSender(
@@ -139,6 +145,7 @@ RTCPSender::RTCPSender(
Transport* outgoing_transport)
: audio_(audio),
clock_(clock),
+ random_(clock_->TimeInMicroseconds()),
method_(RtcpMode::kOff),
transport_(outgoing_transport),
@@ -193,8 +200,7 @@ RTCPSender::RTCPSender(
builders_[kRtcpXrDlrrReportBlock] = &RTCPSender::BuildDlrr;
}
-RTCPSender::~RTCPSender() {
-}
+RTCPSender::~RTCPSender() {}
RtcpMode RTCPSender::Status() const {
CriticalSectionScoped lock(critical_section_rtcp_sender_.get());
@@ -340,63 +346,63 @@ int32_t RTCPSender::RemoveMixedCNAME(uint32_t SSRC) {
}
bool RTCPSender::TimeToSendRTCPReport(bool sendKeyframeBeforeRTP) const {
-/*
- For audio we use a fix 5 sec interval
+ /*
+ For audio we use a fix 5 sec interval
- For video we use 1 sec interval fo a BW smaller than 360 kbit/s,
- technicaly we break the max 5% RTCP BW for video below 10 kbit/s but
- that should be extremely rare
+ For video we use 1 sec interval fo a BW smaller than 360 kbit/s,
+ technicaly we break the max 5% RTCP BW for video below 10 kbit/s but
+ that should be extremely rare
-From RFC 3550
+ From RFC 3550
- MAX RTCP BW is 5% if the session BW
- A send report is approximately 65 bytes inc CNAME
- A receiver report is approximately 28 bytes
+ MAX RTCP BW is 5% if the session BW
+ A send report is approximately 65 bytes inc CNAME
+ A receiver report is approximately 28 bytes
- The RECOMMENDED value for the reduced minimum in seconds is 360
- divided by the session bandwidth in kilobits/second. This minimum
- is smaller than 5 seconds for bandwidths greater than 72 kb/s.
+ The RECOMMENDED value for the reduced minimum in seconds is 360
+ divided by the session bandwidth in kilobits/second. This minimum
+ is smaller than 5 seconds for bandwidths greater than 72 kb/s.
- If the participant has not yet sent an RTCP packet (the variable
- initial is true), the constant Tmin is set to 2.5 seconds, else it
- is set to 5 seconds.
+ If the participant has not yet sent an RTCP packet (the variable
+ initial is true), the constant Tmin is set to 2.5 seconds, else it
+ is set to 5 seconds.
- The interval between RTCP packets is varied randomly over the
- range [0.5,1.5] times the calculated interval to avoid unintended
- synchronization of all participants
+ The interval between RTCP packets is varied randomly over the
+ range [0.5,1.5] times the calculated interval to avoid unintended
+ synchronization of all participants
- if we send
- If the participant is a sender (we_sent true), the constant C is
- set to the average RTCP packet size (avg_rtcp_size) divided by 25%
- of the RTCP bandwidth (rtcp_bw), and the constant n is set to the
- number of senders.
+ if we send
+ If the participant is a sender (we_sent true), the constant C is
+ set to the average RTCP packet size (avg_rtcp_size) divided by 25%
+ of the RTCP bandwidth (rtcp_bw), and the constant n is set to the
+ number of senders.
- if we receive only
- If we_sent is not true, the constant C is set
- to the average RTCP packet size divided by 75% of the RTCP
- bandwidth. The constant n is set to the number of receivers
- (members - senders). If the number of senders is greater than
- 25%, senders and receivers are treated together.
+ if we receive only
+ If we_sent is not true, the constant C is set
+ to the average RTCP packet size divided by 75% of the RTCP
+ bandwidth. The constant n is set to the number of receivers
+ (members - senders). If the number of senders is greater than
+ 25%, senders and receivers are treated together.
- reconsideration NOT required for peer-to-peer
- "timer reconsideration" is
- employed. This algorithm implements a simple back-off mechanism
- which causes users to hold back RTCP packet transmission if the
- group sizes are increasing.
+ reconsideration NOT required for peer-to-peer
+ "timer reconsideration" is
+ employed. This algorithm implements a simple back-off mechanism
+ which causes users to hold back RTCP packet transmission if the
+ group sizes are increasing.
- n = number of members
- C = avg_size/(rtcpBW/4)
+ n = number of members
+ C = avg_size/(rtcpBW/4)
- 3. The deterministic calculated interval Td is set to max(Tmin, n*C).
+ 3. The deterministic calculated interval Td is set to max(Tmin, n*C).
- 4. The calculated interval T is set to a number uniformly distributed
- between 0.5 and 1.5 times the deterministic calculated interval.
+ 4. The calculated interval T is set to a number uniformly distributed
+ between 0.5 and 1.5 times the deterministic calculated interval.
- 5. The resulting value of T is divided by e-3/2=1.21828 to compensate
- for the fact that the timer reconsideration algorithm converges to
- a value of the RTCP bandwidth below the intended average
-*/
+ 5. The resulting value of T is divided by e-3/2=1.21828 to compensate
+ for the fact that the timer reconsideration algorithm converges to
+ a value of the RTCP bandwidth below the intended average
+ */
int64_t now = clock_->TimeInMilliseconds();
@@ -451,32 +457,15 @@ bool RTCPSender::SendTimeOfXrRrReport(uint32_t mid_ntp,
return true;
}
-int32_t RTCPSender::AddReportBlock(const RTCPReportBlock& report_block) {
- if (report_blocks_.size() >= RTCP_MAX_REPORT_BLOCKS) {
- LOG(LS_WARNING) << "Too many report blocks.";
- return -1;
- }
- rtcp::ReportBlock* block = &report_blocks_[report_block.remoteSSRC];
- block->To(report_block.remoteSSRC);
- block->WithFractionLost(report_block.fractionLost);
- block->WithCumulativeLost(report_block.cumulativeLost);
- block->WithExtHighestSeqNum(report_block.extendedHighSeqNum);
- block->WithJitter(report_block.jitter);
- block->WithLastSr(report_block.lastSR);
- block->WithDelayLastSr(report_block.delaySinceLastSR);
-
- return 0;
-}
-
-RTCPSender::BuildResult RTCPSender::BuildSR(RtcpContext* ctx) {
+rtc::scoped_ptr<rtcp::RtcpPacket> RTCPSender::BuildSR(const RtcpContext& ctx) {
for (int i = (RTCP_NUMBER_OF_SR - 2); i >= 0; i--) {
// shift old
last_send_report_[i + 1] = last_send_report_[i];
last_rtcp_time_[i + 1] = last_rtcp_time_[i];
}
- last_rtcp_time_[0] = Clock::NtpToMs(ctx->ntp_sec, ctx->ntp_frac);
- last_send_report_[0] = (ctx->ntp_sec << 16) + (ctx->ntp_frac >> 16);
+ last_rtcp_time_[0] = Clock::NtpToMs(ctx.ntp_sec_, ctx.ntp_frac_);
+ last_send_report_[0] = (ctx.ntp_sec_ << 16) + (ctx.ntp_frac_ >> 16);
// The timestamp of this RTCP packet should be estimated as the timestamp of
// the frame being captured at this moment. We are calculating that
@@ -485,67 +474,52 @@ RTCPSender::BuildResult RTCPSender::BuildSR(RtcpContext* ctx) {
uint32_t rtp_timestamp =
start_timestamp_ + last_rtp_timestamp_ +
(clock_->TimeInMilliseconds() - last_frame_capture_time_ms_) *
- (ctx->feedback_state.frequency_hz / 1000);
+ (ctx.feedback_state_.frequency_hz / 1000);
- rtcp::SenderReport report;
- report.From(ssrc_);
- report.WithNtpSec(ctx->ntp_sec);
- report.WithNtpFrac(ctx->ntp_frac);
- report.WithRtpTimestamp(rtp_timestamp);
- report.WithPacketCount(ctx->feedback_state.packets_sent);
- report.WithOctetCount(ctx->feedback_state.media_bytes_sent);
+ rtcp::SenderReport* report = new rtcp::SenderReport();
+ report->From(ssrc_);
+ report->WithNtpSec(ctx.ntp_sec_);
+ report->WithNtpFrac(ctx.ntp_frac_);
+ report->WithRtpTimestamp(rtp_timestamp);
+ report->WithPacketCount(ctx.feedback_state_.packets_sent);
+ report->WithOctetCount(ctx.feedback_state_.media_bytes_sent);
for (auto it : report_blocks_)
- report.WithReportBlock(it.second);
-
- PacketBuiltCallback callback(ctx);
- if (!callback.BuildPacket(report))
- return BuildResult::kTruncated;
+ report->WithReportBlock(it.second);
report_blocks_.clear();
- return BuildResult::kSuccess;
+
+ return rtc::scoped_ptr<rtcp::SenderReport>(report);
}
-RTCPSender::BuildResult RTCPSender::BuildSDES(RtcpContext* ctx) {
+rtc::scoped_ptr<rtcp::RtcpPacket> RTCPSender::BuildSDES(
+ const RtcpContext& ctx) {
size_t length_cname = cname_.length();
RTC_CHECK_LT(length_cname, static_cast<size_t>(RTCP_CNAME_SIZE));
- rtcp::Sdes sdes;
- sdes.WithCName(ssrc_, cname_);
+ rtcp::Sdes* sdes = new rtcp::Sdes();
+ sdes->WithCName(ssrc_, cname_);
for (const auto it : csrc_cnames_)
- sdes.WithCName(it.first, it.second);
+ sdes->WithCName(it.first, it.second);
- PacketBuiltCallback callback(ctx);
- if (!callback.BuildPacket(sdes))
- return BuildResult::kTruncated;
-
- return BuildResult::kSuccess;
+ return rtc::scoped_ptr<rtcp::Sdes>(sdes);
}
-RTCPSender::BuildResult RTCPSender::BuildRR(RtcpContext* ctx) {
- rtcp::ReceiverReport report;
- report.From(ssrc_);
+rtc::scoped_ptr<rtcp::RtcpPacket> RTCPSender::BuildRR(const RtcpContext& ctx) {
+ rtcp::ReceiverReport* report = new rtcp::ReceiverReport();
+ report->From(ssrc_);
for (auto it : report_blocks_)
- report.WithReportBlock(it.second);
-
- PacketBuiltCallback callback(ctx);
- if (!callback.BuildPacket(report))
- return BuildResult::kTruncated;
+ report->WithReportBlock(it.second);
report_blocks_.clear();
-
- return BuildResult::kSuccess;
+ return rtc::scoped_ptr<rtcp::ReceiverReport>(report);
}
-RTCPSender::BuildResult RTCPSender::BuildPLI(RtcpContext* ctx) {
- rtcp::Pli pli;
- pli.From(ssrc_);
- pli.To(remote_ssrc_);
-
- PacketBuiltCallback callback(ctx);
- if (!callback.BuildPacket(pli))
- return BuildResult::kTruncated;
+rtc::scoped_ptr<rtcp::RtcpPacket> RTCPSender::BuildPLI(const RtcpContext& ctx) {
+ rtcp::Pli* pli = new rtcp::Pli();
+ pli->From(ssrc_);
+ pli->To(remote_ssrc_);
TRACE_EVENT_INSTANT0(TRACE_DISABLED_BY_DEFAULT("webrtc_rtp"),
"RTCPSender::PLI");
@@ -553,21 +527,17 @@ RTCPSender::BuildResult RTCPSender::BuildPLI(RtcpContext* ctx) {
TRACE_COUNTER_ID1(TRACE_DISABLED_BY_DEFAULT("webrtc_rtp"), "RTCP_PLICount",
ssrc_, packet_type_counter_.pli_packets);
- return BuildResult::kSuccess;
+ return rtc::scoped_ptr<rtcp::Pli>(pli);
}
-RTCPSender::BuildResult RTCPSender::BuildFIR(RtcpContext* ctx) {
- if (!ctx->repeat)
+rtc::scoped_ptr<rtcp::RtcpPacket> RTCPSender::BuildFIR(const RtcpContext& ctx) {
+ if (!ctx.repeat_)
++sequence_number_fir_; // Do not increase if repetition.
- rtcp::Fir fir;
- fir.From(ssrc_);
- fir.To(remote_ssrc_);
- fir.WithCommandSeqNum(sequence_number_fir_);
-
- PacketBuiltCallback callback(ctx);
- if (!callback.BuildPacket(fir))
- return BuildResult::kTruncated;
+ rtcp::Fir* fir = new rtcp::Fir();
+ fir->From(ssrc_);
+ fir->To(remote_ssrc_);
+ fir->WithCommandSeqNum(sequence_number_fir_);
TRACE_EVENT_INSTANT0(TRACE_DISABLED_BY_DEFAULT("webrtc_rtp"),
"RTCPSender::FIR");
@@ -575,7 +545,7 @@ RTCPSender::BuildResult RTCPSender::BuildFIR(RtcpContext* ctx) {
TRACE_COUNTER_ID1(TRACE_DISABLED_BY_DEFAULT("webrtc_rtp"), "RTCP_FIRCount",
ssrc_, packet_type_counter_.fir_packets);
- return BuildResult::kSuccess;
+ return rtc::scoped_ptr<rtcp::Fir>(fir);
}
/*
@@ -585,20 +555,14 @@ RTCPSender::BuildResult RTCPSender::BuildFIR(RtcpContext* ctx) {
| First | Number | PictureID |
+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
*/
-RTCPSender::BuildResult RTCPSender::BuildSLI(RtcpContext* ctx) {
- rtcp::Sli sli;
- sli.From(ssrc_);
- sli.To(remote_ssrc_);
+rtc::scoped_ptr<rtcp::RtcpPacket> RTCPSender::BuildSLI(const RtcpContext& ctx) {
+ rtcp::Sli* sli = new rtcp::Sli();
+ sli->From(ssrc_);
+ sli->To(remote_ssrc_);
// Crop picture id to 6 least significant bits.
- sli.WithPictureId(ctx->picture_id & 0x3F);
- sli.WithFirstMb(0);
- sli.WithNumberOfMb(0x1FFF); // 13 bits, only ones for now.
-
- PacketBuiltCallback callback(ctx);
- if (!callback.BuildPacket(sli))
- return BuildResult::kTruncated;
+ sli->WithPictureId(ctx.picture_id_ & 0x3F);
- return BuildResult::kSuccess;
+ return rtc::scoped_ptr<rtcp::Sli>(sli);
}
/*
@@ -613,38 +577,32 @@ RTCPSender::BuildResult RTCPSender::BuildSLI(RtcpContext* ctx) {
/*
* Note: not generic made for VP8
*/
-RTCPSender::BuildResult RTCPSender::BuildRPSI(RtcpContext* ctx) {
- if (ctx->feedback_state.send_payload_type == 0xFF)
- return BuildResult::kError;
-
- rtcp::Rpsi rpsi;
- rpsi.From(ssrc_);
- rpsi.To(remote_ssrc_);
- rpsi.WithPayloadType(ctx->feedback_state.send_payload_type);
- rpsi.WithPictureId(ctx->picture_id);
-
- PacketBuiltCallback callback(ctx);
- if (!callback.BuildPacket(rpsi))
- return BuildResult::kTruncated;
-
- return BuildResult::kSuccess;
+rtc::scoped_ptr<rtcp::RtcpPacket> RTCPSender::BuildRPSI(
+ const RtcpContext& ctx) {
+ if (ctx.feedback_state_.send_payload_type == 0xFF)
+ return nullptr;
+
+ rtcp::Rpsi* rpsi = new rtcp::Rpsi();
+ rpsi->From(ssrc_);
+ rpsi->To(remote_ssrc_);
+ rpsi->WithPayloadType(ctx.feedback_state_.send_payload_type);
+ rpsi->WithPictureId(ctx.picture_id_);
+
+ return rtc::scoped_ptr<rtcp::Rpsi>(rpsi);
}
-RTCPSender::BuildResult RTCPSender::BuildREMB(RtcpContext* ctx) {
- rtcp::Remb remb;
- remb.From(ssrc_);
+rtc::scoped_ptr<rtcp::RtcpPacket> RTCPSender::BuildREMB(
+ const RtcpContext& ctx) {
+ rtcp::Remb* remb = new rtcp::Remb();
+ remb->From(ssrc_);
for (uint32_t ssrc : remb_ssrcs_)
- remb.AppliesTo(ssrc);
- remb.WithBitrateBps(remb_bitrate_);
-
- PacketBuiltCallback callback(ctx);
- if (!callback.BuildPacket(remb))
- return BuildResult::kTruncated;
+ remb->AppliesTo(ssrc);
+ remb->WithBitrateBps(remb_bitrate_);
TRACE_EVENT_INSTANT0(TRACE_DISABLED_BY_DEFAULT("webrtc_rtp"),
"RTCPSender::REMB");
- return BuildResult::kSuccess;
+ return rtc::scoped_ptr<rtcp::Remb>(remb);
}
void RTCPSender::SetTargetBitrate(unsigned int target_bitrate) {
@@ -652,9 +610,10 @@ void RTCPSender::SetTargetBitrate(unsigned int target_bitrate) {
tmmbr_send_ = target_bitrate / 1000;
}
-RTCPSender::BuildResult RTCPSender::BuildTMMBR(RtcpContext* ctx) {
- if (ctx->feedback_state.module == NULL)
- return BuildResult::kError;
+rtc::scoped_ptr<rtcp::RtcpPacket> RTCPSender::BuildTMMBR(
+ const RtcpContext& ctx) {
+ if (ctx.feedback_state_.module == nullptr)
+ return nullptr;
// Before sending the TMMBR check the received TMMBN, only an owner is
// allowed to raise the bitrate:
// * If the sender is an owner of the TMMBN -> send TMMBR
@@ -669,14 +628,14 @@ RTCPSender::BuildResult RTCPSender::BuildTMMBR(RtcpContext* ctx) {
// will accuire criticalSectionRTCPReceiver_ is a potental deadlock but
// since RTCPreceiver is not doing the reverse we should be fine
int32_t lengthOfBoundingSet =
- ctx->feedback_state.module->BoundingSet(tmmbrOwner, candidateSet);
+ ctx.feedback_state_.module->BoundingSet(&tmmbrOwner, candidateSet);
if (lengthOfBoundingSet > 0) {
for (int32_t i = 0; i < lengthOfBoundingSet; i++) {
if (candidateSet->Tmmbr(i) == tmmbr_send_ &&
candidateSet->PacketOH(i) == packet_oh_send_) {
- // do not send the same tuple
- return BuildResult::kAborted;
+ // Do not send the same tuple.
+ return nullptr;
}
}
if (!tmmbrOwner) {
@@ -687,124 +646,69 @@ RTCPSender::BuildResult RTCPSender::BuildTMMBR(RtcpContext* ctx) {
int numCandidates = lengthOfBoundingSet + 1;
// find bounding set
- TMMBRSet* boundingSet = NULL;
+ TMMBRSet* boundingSet = nullptr;
int numBoundingSet = tmmbr_help_.FindTMMBRBoundingSet(boundingSet);
if (numBoundingSet > 0 || numBoundingSet <= numCandidates)
tmmbrOwner = tmmbr_help_.IsOwner(ssrc_, numBoundingSet);
if (!tmmbrOwner) {
- // did not enter bounding set, no meaning to send this request
- return BuildResult::kAborted;
+ // Did not enter bounding set, no meaning to send this request.
+ return nullptr;
}
}
}
- if (tmmbr_send_) {
- rtcp::Tmmbr tmmbr;
- tmmbr.From(ssrc_);
- tmmbr.To(remote_ssrc_);
- tmmbr.WithBitrateKbps(tmmbr_send_);
- tmmbr.WithOverhead(packet_oh_send_);
+ if (!tmmbr_send_)
+ return nullptr;
- PacketBuiltCallback callback(ctx);
- if (!callback.BuildPacket(tmmbr))
- return BuildResult::kTruncated;
- }
- return BuildResult::kSuccess;
+ rtcp::Tmmbr* tmmbr = new rtcp::Tmmbr();
+ tmmbr->From(ssrc_);
+ tmmbr->To(remote_ssrc_);
+ tmmbr->WithBitrateKbps(tmmbr_send_);
+ tmmbr->WithOverhead(packet_oh_send_);
+
+ return rtc::scoped_ptr<rtcp::Tmmbr>(tmmbr);
}
-RTCPSender::BuildResult RTCPSender::BuildTMMBN(RtcpContext* ctx) {
+rtc::scoped_ptr<rtcp::RtcpPacket> RTCPSender::BuildTMMBN(
+ const RtcpContext& ctx) {
TMMBRSet* boundingSet = tmmbr_help_.BoundingSetToSend();
- if (boundingSet == NULL)
- return BuildResult::kError;
+ if (boundingSet == nullptr)
+ return nullptr;
- rtcp::Tmmbn tmmbn;
- tmmbn.From(ssrc_);
+ rtcp::Tmmbn* tmmbn = new rtcp::Tmmbn();
+ tmmbn->From(ssrc_);
for (uint32_t i = 0; i < boundingSet->lengthOfSet(); i++) {
if (boundingSet->Tmmbr(i) > 0) {
- tmmbn.WithTmmbr(boundingSet->Ssrc(i), boundingSet->Tmmbr(i),
- boundingSet->PacketOH(i));
+ tmmbn->WithTmmbr(boundingSet->Ssrc(i), boundingSet->Tmmbr(i),
+ boundingSet->PacketOH(i));
}
}
- PacketBuiltCallback callback(ctx);
- if (!callback.BuildPacket(tmmbn))
- return BuildResult::kTruncated;
-
- return BuildResult::kSuccess;
+ return rtc::scoped_ptr<rtcp::Tmmbn>(tmmbn);
}
-RTCPSender::BuildResult RTCPSender::BuildAPP(RtcpContext* ctx) {
- rtcp::App app;
- app.From(ssrc_);
- app.WithSubType(app_sub_type_);
- app.WithName(app_name_);
- app.WithData(app_data_.get(), app_length_);
-
- PacketBuiltCallback callback(ctx);
- if (!callback.BuildPacket(app))
- return BuildResult::kTruncated;
+rtc::scoped_ptr<rtcp::RtcpPacket> RTCPSender::BuildAPP(const RtcpContext& ctx) {
+ rtcp::App* app = new rtcp::App();
+ app->From(ssrc_);
+ app->WithSubType(app_sub_type_);
+ app->WithName(app_name_);
+ app->WithData(app_data_.get(), app_length_);
- return BuildResult::kSuccess;
+ return rtc::scoped_ptr<rtcp::App>(app);
}
-RTCPSender::BuildResult RTCPSender::BuildNACK(RtcpContext* ctx) {
- // sanity
- if (ctx->position + 16 >= IP_PACKET_SIZE) {
- LOG(LS_WARNING) << "Failed to build NACK.";
- return BuildResult::kTruncated;
- }
-
- // int size, uint16_t* nack_list
- // add nack list
- uint8_t FMT = 1;
- *ctx->AllocateData(1) = 0x80 + FMT;
- *ctx->AllocateData(1) = 205;
-
- *ctx->AllocateData(1) = 0;
- int nack_size_pos_ = ctx->position;
- *ctx->AllocateData(1) = 3; // setting it to one kNACK signal as default
-
- // Add our own SSRC
- ByteWriter<uint32_t>::WriteBigEndian(ctx->AllocateData(4), ssrc_);
-
- // Add the remote SSRC
- ByteWriter<uint32_t>::WriteBigEndian(ctx->AllocateData(4), remote_ssrc_);
-
- // Build NACK bitmasks and write them to the RTCP message.
- // The nack list should be sorted and not contain duplicates if one
- // wants to build the smallest rtcp nack packet.
- int numOfNackFields = 0;
- int maxNackFields =
- std::min<int>(kRtcpMaxNackFields, (IP_PACKET_SIZE - ctx->position) / 4);
- int i = 0;
- while (i < ctx->nack_size && numOfNackFields < maxNackFields) {
- uint16_t nack = ctx->nack_list[i++];
- uint16_t bitmask = 0;
- while (i < ctx->nack_size) {
- int shift = static_cast<uint16_t>(ctx->nack_list[i] - nack) - 1;
- if (shift >= 0 && shift <= 15) {
- bitmask |= (1 << shift);
- ++i;
- } else {
- break;
- }
- }
- // Write the sequence number and the bitmask to the packet.
- assert(ctx->position + 4 < IP_PACKET_SIZE);
- ByteWriter<uint16_t>::WriteBigEndian(ctx->AllocateData(2), nack);
- ByteWriter<uint16_t>::WriteBigEndian(ctx->AllocateData(2), bitmask);
- numOfNackFields++;
- }
- ctx->buffer[nack_size_pos_] = static_cast<uint8_t>(2 + numOfNackFields);
-
- if (i != ctx->nack_size)
- LOG(LS_WARNING) << "Nack list too large for one packet.";
+rtc::scoped_ptr<rtcp::RtcpPacket> RTCPSender::BuildNACK(
+ const RtcpContext& ctx) {
+ rtcp::Nack* nack = new rtcp::Nack();
+ nack->From(ssrc_);
+ nack->To(remote_ssrc_);
+ nack->WithList(ctx.nack_list_, ctx.nack_size_);
// Report stats.
NACKStringBuilder stringBuilder;
- for (int idx = 0; idx < i; ++idx) {
- stringBuilder.PushNACK(ctx->nack_list[idx]);
- nack_stats_.ReportRequest(ctx->nack_list[idx]);
+ for (int idx = 0; idx < ctx.nack_size_; ++idx) {
+ stringBuilder.PushNACK(ctx.nack_list_[idx]);
+ nack_stats_.ReportRequest(ctx.nack_list_[idx]);
}
packet_type_counter_.nack_requests = nack_stats_.requests();
packet_type_counter_.unique_nack_requests = nack_stats_.unique_requests();
@@ -816,101 +720,66 @@ RTCPSender::BuildResult RTCPSender::BuildNACK(RtcpContext* ctx) {
TRACE_COUNTER_ID1(TRACE_DISABLED_BY_DEFAULT("webrtc_rtp"), "RTCP_NACKCount",
ssrc_, packet_type_counter_.nack_packets);
- return BuildResult::kSuccess;
+ return rtc::scoped_ptr<rtcp::Nack>(nack);
}
-RTCPSender::BuildResult RTCPSender::BuildBYE(RtcpContext* ctx) {
- rtcp::Bye bye;
- bye.From(ssrc_);
+rtc::scoped_ptr<rtcp::RtcpPacket> RTCPSender::BuildBYE(const RtcpContext& ctx) {
+ rtcp::Bye* bye = new rtcp::Bye();
+ bye->From(ssrc_);
for (uint32_t csrc : csrcs_)
- bye.WithCsrc(csrc);
+ bye->WithCsrc(csrc);
- PacketBuiltCallback callback(ctx);
- if (!callback.BuildPacket(bye))
- return BuildResult::kTruncated;
-
- return BuildResult::kSuccess;
+ return rtc::scoped_ptr<rtcp::Bye>(bye);
}
-RTCPSender::BuildResult RTCPSender::BuildReceiverReferenceTime(
- RtcpContext* ctx) {
-
+rtc::scoped_ptr<rtcp::RtcpPacket> RTCPSender::BuildReceiverReferenceTime(
+ const RtcpContext& ctx) {
if (last_xr_rr_.size() >= RTCP_NUMBER_OF_SR)
last_xr_rr_.erase(last_xr_rr_.begin());
last_xr_rr_.insert(std::pair<uint32_t, int64_t>(
- RTCPUtility::MidNtp(ctx->ntp_sec, ctx->ntp_frac),
- Clock::NtpToMs(ctx->ntp_sec, ctx->ntp_frac)));
+ RTCPUtility::MidNtp(ctx.ntp_sec_, ctx.ntp_frac_),
+ Clock::NtpToMs(ctx.ntp_sec_, ctx.ntp_frac_)));
- rtcp::Xr xr;
- xr.From(ssrc_);
+ rtcp::Xr* xr = new rtcp::Xr();
+ xr->From(ssrc_);
rtcp::Rrtr rrtr;
- rrtr.WithNtpSec(ctx->ntp_sec);
- rrtr.WithNtpFrac(ctx->ntp_frac);
+ rrtr.WithNtp(NtpTime(ctx.ntp_sec_, ctx.ntp_frac_));
- xr.WithRrtr(&rrtr);
+ xr->WithRrtr(&rrtr);
// TODO(sprang): Merge XR report sending to contain all of RRTR, DLRR, VOIP?
- PacketBuiltCallback callback(ctx);
- if (!callback.BuildPacket(xr))
- return BuildResult::kTruncated;
-
- return BuildResult::kSuccess;
+ return rtc::scoped_ptr<rtcp::Xr>(xr);
}
-RTCPSender::BuildResult RTCPSender::BuildDlrr(RtcpContext* ctx) {
- rtcp::Xr xr;
- xr.From(ssrc_);
+rtc::scoped_ptr<rtcp::RtcpPacket> RTCPSender::BuildDlrr(
+ const RtcpContext& ctx) {
+ rtcp::Xr* xr = new rtcp::Xr();
+ xr->From(ssrc_);
rtcp::Dlrr dlrr;
- const RtcpReceiveTimeInfo& info = ctx->feedback_state.last_xr_rr;
+ const RtcpReceiveTimeInfo& info = ctx.feedback_state_.last_xr_rr;
dlrr.WithDlrrItem(info.sourceSSRC, info.lastRR, info.delaySinceLastRR);
- xr.WithDlrr(&dlrr);
+ xr->WithDlrr(&dlrr);
- PacketBuiltCallback callback(ctx);
- if (!callback.BuildPacket(xr))
- return BuildResult::kTruncated;
-
- return BuildResult::kSuccess;
+ return rtc::scoped_ptr<rtcp::Xr>(xr);
}
// TODO(sprang): Add a unit test for this, or remove if the code isn't used.
-RTCPSender::BuildResult RTCPSender::BuildVoIPMetric(RtcpContext* ctx) {
- rtcp::Xr xr;
- xr.From(ssrc_);
+rtc::scoped_ptr<rtcp::RtcpPacket> RTCPSender::BuildVoIPMetric(
+ const RtcpContext& context) {
+ rtcp::Xr* xr = new rtcp::Xr();
+ xr->From(ssrc_);
rtcp::VoipMetric voip;
voip.To(remote_ssrc_);
- voip.LossRate(xr_voip_metric_.lossRate);
- voip.DiscardRate(xr_voip_metric_.discardRate);
- voip.BurstDensity(xr_voip_metric_.burstDensity);
- voip.GapDensity(xr_voip_metric_.gapDensity);
- voip.BurstDuration(xr_voip_metric_.burstDuration);
- voip.GapDuration(xr_voip_metric_.gapDuration);
- voip.RoundTripDelay(xr_voip_metric_.roundTripDelay);
- voip.EndSystemDelay(xr_voip_metric_.endSystemDelay);
- voip.SignalLevel(xr_voip_metric_.signalLevel);
- voip.NoiseLevel(xr_voip_metric_.noiseLevel);
- voip.Rerl(xr_voip_metric_.RERL);
- voip.Gmin(xr_voip_metric_.Gmin);
- voip.Rfactor(xr_voip_metric_.Rfactor);
- voip.ExtRfactor(xr_voip_metric_.extRfactor);
- voip.MosLq(xr_voip_metric_.MOSLQ);
- voip.MosCq(xr_voip_metric_.MOSCQ);
- voip.RxConfig(xr_voip_metric_.RXconfig);
- voip.JbNominal(xr_voip_metric_.JBnominal);
- voip.JbMax(xr_voip_metric_.JBmax);
- voip.JbAbsMax(xr_voip_metric_.JBabsMax);
-
- xr.WithVoipMetric(&voip);
-
- PacketBuiltCallback callback(ctx);
- if (!callback.BuildPacket(xr))
- return BuildResult::kTruncated;
-
- return BuildResult::kSuccess;
+ voip.WithVoipMetric(xr_voip_metric_);
+
+ xr->WithVoipMetric(&voip);
+
+ return rtc::scoped_ptr<rtcp::Xr>(xr);
}
int32_t RTCPSender::SendRTCP(const FeedbackState& feedback_state,
@@ -926,43 +795,59 @@ int32_t RTCPSender::SendRTCP(const FeedbackState& feedback_state,
int32_t RTCPSender::SendCompoundRTCP(
const FeedbackState& feedback_state,
- const std::set<RTCPPacketType>& packetTypes,
+ const std::set<RTCPPacketType>& packet_types,
int32_t nack_size,
const uint16_t* nack_list,
bool repeat,
uint64_t pictureID) {
+ PacketContainer container(transport_);
{
CriticalSectionScoped lock(critical_section_rtcp_sender_.get());
if (method_ == RtcpMode::kOff) {
LOG(LS_WARNING) << "Can't send rtcp if it is disabled.";
return -1;
}
- }
- uint8_t rtcp_buffer[IP_PACKET_SIZE];
- int rtcp_length =
- PrepareRTCP(feedback_state, packetTypes, nack_size, nack_list, repeat,
- pictureID, rtcp_buffer, IP_PACKET_SIZE);
- // Sanity don't send empty packets.
- if (rtcp_length <= 0)
- return -1;
+ // We need to send our NTP even if we haven't received any reports.
+ uint32_t ntp_sec;
+ uint32_t ntp_frac;
+ clock_->CurrentNtp(ntp_sec, ntp_frac);
+ RtcpContext context(feedback_state, nack_size, nack_list, repeat, pictureID,
+ ntp_sec, ntp_frac, &container);
+
+ PrepareReport(packet_types, feedback_state);
+
+ auto it = report_flags_.begin();
+ while (it != report_flags_.end()) {
+ auto builder_it = builders_.find(it->type);
+ RTC_DCHECK(builder_it != builders_.end());
+ if (it->is_volatile) {
+ report_flags_.erase(it++);
+ } else {
+ ++it;
+ }
- return SendToNetwork(rtcp_buffer, static_cast<size_t>(rtcp_length));
-}
+ BuilderFunc func = builder_it->second;
+ rtc::scoped_ptr<rtcp::RtcpPacket> packet = (this->*func)(context);
+ if (packet.get() == nullptr)
+ return -1;
+ container.Append(packet.release());
+ }
-int RTCPSender::PrepareRTCP(const FeedbackState& feedback_state,
- const std::set<RTCPPacketType>& packetTypes,
- int32_t nack_size,
- const uint16_t* nack_list,
- bool repeat,
- uint64_t pictureID,
- uint8_t* rtcp_buffer,
- int buffer_size) {
- CriticalSectionScoped lock(critical_section_rtcp_sender_.get());
+ if (packet_type_counter_observer_ != nullptr) {
+ packet_type_counter_observer_->RtcpPacketTypesCounterUpdated(
+ remote_ssrc_, packet_type_counter_);
+ }
- RtcpContext context(feedback_state, nack_size, nack_list, repeat, pictureID,
- rtcp_buffer, buffer_size);
+ RTC_DCHECK(AllVolatileFlagsConsumed());
+ }
+
+ size_t bytes_sent = container.SendPackets();
+ return bytes_sent == 0 ? -1 : 0;
+}
+void RTCPSender::PrepareReport(const std::set<RTCPPacketType>& packetTypes,
+ const FeedbackState& feedback_state) {
// Add all flags as volatile. Non volatile entries will not be overwritten
// and all new volatile flags added will be consumed by the end of this call.
SetFlags(packetTypes, true);
@@ -986,9 +871,6 @@ int RTCPSender::PrepareRTCP(const FeedbackState& feedback_state,
if (IsFlagPresent(kRtcpSr) || (IsFlagPresent(kRtcpRr) && !cname_.empty()))
SetFlag(kRtcpSdes, true);
- // We need to send our NTP even if we haven't received any reports.
- clock_->CurrentNtp(context.ntp_sec, context.ntp_frac);
-
if (generate_report) {
if (!sending_ && xr_send_receiver_reference_time_enabled_)
SetFlag(kRtcpXrReceiverReferenceTime, true);
@@ -996,15 +878,9 @@ int RTCPSender::PrepareRTCP(const FeedbackState& feedback_state,
SetFlag(kRtcpXrDlrrReportBlock, true);
// generate next time to send an RTCP report
- // seeded from RTP constructor
- int32_t random = rand() % 1000;
- int32_t timeToNext = RTCP_INTERVAL_AUDIO_MS;
-
- if (audio_) {
- timeToNext = (RTCP_INTERVAL_AUDIO_MS / 2) +
- (RTCP_INTERVAL_AUDIO_MS * random / 1000);
- } else {
- uint32_t minIntervalMs = RTCP_INTERVAL_AUDIO_MS;
+ uint32_t minIntervalMs = RTCP_INTERVAL_AUDIO_MS;
+
+ if (!audio_) {
if (sending_) {
// Calculate bandwidth for video; 360 / send bandwidth in kbit/s.
uint32_t send_bitrate_kbit = feedback_state.send_bitrate / 1000;
@@ -1013,74 +889,46 @@ int RTCPSender::PrepareRTCP(const FeedbackState& feedback_state,
}
if (minIntervalMs > RTCP_INTERVAL_VIDEO_MS)
minIntervalMs = RTCP_INTERVAL_VIDEO_MS;
- timeToNext = (minIntervalMs / 2) + (minIntervalMs * random / 1000);
}
+ // The interval between RTCP packets is varied randomly over the
+ // range [1/2,3/2] times the calculated interval.
+ uint32_t timeToNext =
+ random_.Rand(minIntervalMs * 1 / 2, minIntervalMs * 3 / 2);
next_time_to_send_rtcp_ = clock_->TimeInMilliseconds() + timeToNext;
StatisticianMap statisticians =
receive_statistics_->GetActiveStatisticians();
- if (!statisticians.empty()) {
- for (auto it = statisticians.begin(); it != statisticians.end(); ++it) {
- RTCPReportBlock report_block;
- if (PrepareReport(feedback_state, it->first, it->second,
- &report_block)) {
- AddReportBlock(report_block);
- }
- }
- }
- }
-
- auto it = report_flags_.begin();
- while (it != report_flags_.end()) {
- auto builder = builders_.find(it->type);
- RTC_DCHECK(builder != builders_.end());
- if (it->is_volatile) {
- report_flags_.erase(it++);
- } else {
- ++it;
- }
-
- uint32_t start_position = context.position;
- BuildResult result = (this->*(builder->second))(&context);
- switch (result) {
- case BuildResult::kError:
- return -1;
- case BuildResult::kTruncated:
- return context.position;
- case BuildResult::kAborted:
- context.position = start_position;
- FALLTHROUGH();
- case BuildResult::kSuccess:
- continue;
- default:
- abort();
+ RTC_DCHECK(report_blocks_.empty());
+ for (auto& it : statisticians) {
+ AddReportBlock(feedback_state, it.first, it.second);
}
}
-
- if (packet_type_counter_observer_ != NULL) {
- packet_type_counter_observer_->RtcpPacketTypesCounterUpdated(
- remote_ssrc_, packet_type_counter_);
- }
-
- RTC_DCHECK(AllVolatileFlagsConsumed());
-
- return context.position;
}
-bool RTCPSender::PrepareReport(const FeedbackState& feedback_state,
- uint32_t ssrc,
- StreamStatistician* statistician,
- RTCPReportBlock* report_block) {
+bool RTCPSender::AddReportBlock(const FeedbackState& feedback_state,
+ uint32_t ssrc,
+ StreamStatistician* statistician) {
// Do we have receive statistics to send?
RtcpStatistics stats;
if (!statistician->GetStatistics(&stats, true))
return false;
- report_block->fractionLost = stats.fraction_lost;
- report_block->cumulativeLost = stats.cumulative_lost;
- report_block->extendedHighSeqNum =
- stats.extended_max_sequence_number;
- report_block->jitter = stats.jitter;
- report_block->remoteSSRC = ssrc;
+
+ if (report_blocks_.size() >= RTCP_MAX_REPORT_BLOCKS) {
+ LOG(LS_WARNING) << "Too many report blocks.";
+ return false;
+ }
+ RTC_DCHECK(report_blocks_.find(ssrc) == report_blocks_.end());
+ rtcp::ReportBlock* block = &report_blocks_[ssrc];
+ block->To(ssrc);
+ block->WithFractionLost(stats.fraction_lost);
+ if (!block->WithCumulativeLost(stats.cumulative_lost)) {
+ report_blocks_.erase(ssrc);
+ LOG(LS_WARNING) << "Cumulative lost is oversized.";
+ return false;
+ }
+ block->WithExtHighestSeqNum(stats.extended_max_sequence_number);
+ block->WithJitter(stats.jitter);
+ block->WithLastSr(feedback_state.remote_sr);
// TODO(sprang): Do we really need separate time stamps for each report?
// Get our NTP as late as possible to avoid a race.
@@ -1089,7 +937,6 @@ bool RTCPSender::PrepareReport(const FeedbackState& feedback_state,
clock_->CurrentNtp(ntp_secs, ntp_frac);
// Delay since last received report.
- uint32_t delaySinceLastReceivedSR = 0;
if ((feedback_state.last_rr_ntp_secs != 0) ||
(feedback_state.last_rr_ntp_frac != 0)) {
// Get the 16 lowest bits of seconds and the 16 highest bits of fractions.
@@ -1101,19 +948,11 @@ bool RTCPSender::PrepareReport(const FeedbackState& feedback_state,
receiveTime <<= 16;
receiveTime += (feedback_state.last_rr_ntp_frac & 0xffff0000) >> 16;
- delaySinceLastReceivedSR = now-receiveTime;
+ block->WithDelayLastSr(now - receiveTime);
}
- report_block->delaySinceLastSR = delaySinceLastReceivedSR;
- report_block->lastSR = feedback_state.remote_sr;
return true;
}
-int32_t RTCPSender::SendToNetwork(const uint8_t* dataBuffer, size_t length) {
- if (transport_->SendRtcp(dataBuffer, length))
- return 0;
- return -1;
-}
-
void RTCPSender::SetCsrcs(const std::vector<uint32_t>& csrcs) {
assert(csrcs.size() <= kRtpCsrcSize);
CriticalSectionScoped lock(critical_section_rtcp_sender_.get());
@@ -1203,7 +1042,7 @@ bool RTCPSender::AllVolatileFlagsConsumed() const {
bool RTCPSender::SendFeedbackPacket(const rtcp::TransportFeedback& packet) {
class Sender : public rtcp::RtcpPacket::PacketReadyCallback {
public:
- Sender(Transport* transport)
+ explicit Sender(Transport* transport)
: transport_(transport), send_failure_(false) {}
void OnPacketReady(uint8_t* data, size_t length) override {
diff --git a/webrtc/modules/rtp_rtcp/source/rtcp_sender.h b/webrtc/modules/rtp_rtcp/source/rtcp_sender.h
index 9ec928363b..dd3aec4c9f 100644
--- a/webrtc/modules/rtp_rtcp/source/rtcp_sender.h
+++ b/webrtc/modules/rtp_rtcp/source/rtcp_sender.h
@@ -15,13 +15,15 @@
#include <set>
#include <sstream>
#include <string>
+#include <vector>
+#include "webrtc/base/random.h"
#include "webrtc/base/scoped_ptr.h"
#include "webrtc/base/thread_annotations.h"
#include "webrtc/modules/remote_bitrate_estimator/include/bwe_defines.h"
#include "webrtc/modules/remote_bitrate_estimator/include/remote_bitrate_estimator.h"
-#include "webrtc/modules/rtp_rtcp/interface/receive_statistics.h"
-#include "webrtc/modules/rtp_rtcp/interface/rtp_rtcp_defines.h"
+#include "webrtc/modules/rtp_rtcp/include/receive_statistics.h"
+#include "webrtc/modules/rtp_rtcp/include/rtp_rtcp_defines.h"
#include "webrtc/modules/rtp_rtcp/source/rtcp_packet.h"
#include "webrtc/modules/rtp_rtcp/source/rtcp_utility.h"
#include "webrtc/modules/rtp_rtcp/source/rtp_utility.h"
@@ -50,277 +52,251 @@ class NACKStringBuilder {
};
class RTCPSender {
-public:
- struct FeedbackState {
- FeedbackState();
+ public:
+ struct FeedbackState {
+ FeedbackState();
+
+ uint8_t send_payload_type;
+ uint32_t frequency_hz;
+ uint32_t packets_sent;
+ size_t media_bytes_sent;
+ uint32_t send_bitrate;
+
+ uint32_t last_rr_ntp_secs;
+ uint32_t last_rr_ntp_frac;
+ uint32_t remote_sr;
+
+ bool has_last_xr_rr;
+ RtcpReceiveTimeInfo last_xr_rr;
+
+ // Used when generating TMMBR.
+ ModuleRtpRtcpImpl* module;
+ };
+
+ RTCPSender(bool audio,
+ Clock* clock,
+ ReceiveStatistics* receive_statistics,
+ RtcpPacketTypeCounterObserver* packet_type_counter_observer,
+ Transport* outgoing_transport);
+ virtual ~RTCPSender();
+
+ RtcpMode Status() const;
+ void SetRTCPStatus(RtcpMode method);
+
+ bool Sending() const;
+ int32_t SetSendingStatus(const FeedbackState& feedback_state,
+ bool enabled); // combine the functions
+
+ int32_t SetNackStatus(bool enable);
- uint8_t send_payload_type;
- uint32_t frequency_hz;
- uint32_t packets_sent;
- size_t media_bytes_sent;
- uint32_t send_bitrate;
+ void SetStartTimestamp(uint32_t start_timestamp);
- uint32_t last_rr_ntp_secs;
- uint32_t last_rr_ntp_frac;
- uint32_t remote_sr;
+ void SetLastRtpTime(uint32_t rtp_timestamp, int64_t capture_time_ms);
- bool has_last_xr_rr;
- RtcpReceiveTimeInfo last_xr_rr;
+ void SetSSRC(uint32_t ssrc);
- // Used when generating TMMBR.
- ModuleRtpRtcpImpl* module;
- };
+ void SetRemoteSSRC(uint32_t ssrc);
- RTCPSender(bool audio,
- Clock* clock,
- ReceiveStatistics* receive_statistics,
- RtcpPacketTypeCounterObserver* packet_type_counter_observer,
- Transport* outgoing_transport);
- virtual ~RTCPSender();
+ int32_t SetCNAME(const char* cName);
- RtcpMode Status() const;
- void SetRTCPStatus(RtcpMode method);
+ int32_t AddMixedCNAME(uint32_t SSRC, const char* c_name);
- bool Sending() const;
- int32_t SetSendingStatus(const FeedbackState& feedback_state,
- bool enabled); // combine the functions
+ int32_t RemoveMixedCNAME(uint32_t SSRC);
- int32_t SetNackStatus(bool enable);
+ int64_t SendTimeOfSendReport(uint32_t sendReport);
- void SetStartTimestamp(uint32_t start_timestamp);
+ bool SendTimeOfXrRrReport(uint32_t mid_ntp, int64_t* time_ms) const;
- void SetLastRtpTime(uint32_t rtp_timestamp, int64_t capture_time_ms);
+ bool TimeToSendRTCPReport(bool sendKeyframeBeforeRTP = false) const;
- void SetSSRC(uint32_t ssrc);
+ int32_t SendRTCP(const FeedbackState& feedback_state,
+ RTCPPacketType packetType,
+ int32_t nackSize = 0,
+ const uint16_t* nackList = 0,
+ bool repeat = false,
+ uint64_t pictureID = 0);
- void SetRemoteSSRC(uint32_t ssrc);
+ int32_t SendCompoundRTCP(const FeedbackState& feedback_state,
+ const std::set<RTCPPacketType>& packetTypes,
+ int32_t nackSize = 0,
+ const uint16_t* nackList = 0,
+ bool repeat = false,
+ uint64_t pictureID = 0);
- int32_t SetCNAME(const char* cName);
+ bool REMB() const;
- int32_t AddMixedCNAME(uint32_t SSRC, const char* c_name);
+ void SetREMBStatus(bool enable);
- int32_t RemoveMixedCNAME(uint32_t SSRC);
+ void SetREMBData(uint32_t bitrate, const std::vector<uint32_t>& ssrcs);
- int64_t SendTimeOfSendReport(uint32_t sendReport);
+ bool TMMBR() const;
- bool SendTimeOfXrRrReport(uint32_t mid_ntp, int64_t* time_ms) const;
+ void SetTMMBRStatus(bool enable);
- bool TimeToSendRTCPReport(bool sendKeyframeBeforeRTP = false) const;
+ int32_t SetTMMBN(const TMMBRSet* boundingSet, uint32_t maxBitrateKbit);
- int32_t SendRTCP(const FeedbackState& feedback_state,
- RTCPPacketType packetType,
- int32_t nackSize = 0,
- const uint16_t* nackList = 0,
- bool repeat = false,
- uint64_t pictureID = 0);
-
- int32_t SendCompoundRTCP(const FeedbackState& feedback_state,
- const std::set<RTCPPacketType>& packetTypes,
- int32_t nackSize = 0,
- const uint16_t* nackList = 0,
- bool repeat = false,
- uint64_t pictureID = 0);
-
- bool REMB() const;
-
- void SetREMBStatus(bool enable);
-
- void SetREMBData(uint32_t bitrate, const std::vector<uint32_t>& ssrcs);
-
- bool TMMBR() const;
-
- void SetTMMBRStatus(bool enable);
-
- int32_t SetTMMBN(const TMMBRSet* boundingSet, uint32_t maxBitrateKbit);
-
- int32_t SetApplicationSpecificData(uint8_t subType,
- uint32_t name,
- const uint8_t* data,
- uint16_t length);
- int32_t SetRTCPVoIPMetrics(const RTCPVoIPMetric* VoIPMetric);
-
- void SendRtcpXrReceiverReferenceTime(bool enable);
-
- bool RtcpXrReceiverReferenceTime() const;
-
- void SetCsrcs(const std::vector<uint32_t>& csrcs);
-
- void SetTargetBitrate(unsigned int target_bitrate);
- bool SendFeedbackPacket(const rtcp::TransportFeedback& packet);
-
-private:
- struct RtcpContext;
-
- // The BuildResult indicates the outcome of a call to a builder method,
- // constructing a part of an RTCP packet:
- //
- // kError
- // Building RTCP packet failed, propagate error out to caller.
- // kAbort
- // The (partial) block being build should not be included. Reset current
- // buffer position to the state before the method call and proceed to the
- // next packet type.
- // kTruncated
- // There is not enough room in the buffer to fit the data being constructed.
- // (IP packet is full). Proceed to the next packet type, and call this
- // method again when a new buffer has been allocated.
- // TODO(sprang): Actually allocate multiple packets if needed.
- // kSuccess
- // Data has been successfully placed in the buffer.
-
- enum class BuildResult { kError, kAborted, kTruncated, kSuccess };
-
- int32_t SendToNetwork(const uint8_t* dataBuffer, size_t length);
-
- int32_t AddReportBlock(const RTCPReportBlock& report_block)
- EXCLUSIVE_LOCKS_REQUIRED(critical_section_rtcp_sender_);
-
- bool PrepareReport(const FeedbackState& feedback_state,
- uint32_t ssrc,
- StreamStatistician* statistician,
- RTCPReportBlock* report_block);
-
- int PrepareRTCP(const FeedbackState& feedback_state,
- const std::set<RTCPPacketType>& packetTypes,
- int32_t nackSize,
- const uint16_t* nackList,
- bool repeat,
- uint64_t pictureID,
- uint8_t* rtcp_buffer,
- int buffer_size);
-
- BuildResult BuildSR(RtcpContext* context)
- EXCLUSIVE_LOCKS_REQUIRED(critical_section_rtcp_sender_);
- BuildResult BuildRR(RtcpContext* context)
- EXCLUSIVE_LOCKS_REQUIRED(critical_section_rtcp_sender_);
- BuildResult BuildSDES(RtcpContext* context)
- EXCLUSIVE_LOCKS_REQUIRED(critical_section_rtcp_sender_);
- BuildResult BuildPLI(RtcpContext* context)
- EXCLUSIVE_LOCKS_REQUIRED(critical_section_rtcp_sender_);
- BuildResult BuildREMB(RtcpContext* context)
- EXCLUSIVE_LOCKS_REQUIRED(critical_section_rtcp_sender_);
- BuildResult BuildTMMBR(RtcpContext* context)
- EXCLUSIVE_LOCKS_REQUIRED(critical_section_rtcp_sender_);
- BuildResult BuildTMMBN(RtcpContext* context)
- EXCLUSIVE_LOCKS_REQUIRED(critical_section_rtcp_sender_);
- BuildResult BuildAPP(RtcpContext* context)
- EXCLUSIVE_LOCKS_REQUIRED(critical_section_rtcp_sender_);
- BuildResult BuildVoIPMetric(RtcpContext* context)
- EXCLUSIVE_LOCKS_REQUIRED(critical_section_rtcp_sender_);
- BuildResult BuildBYE(RtcpContext* context)
- EXCLUSIVE_LOCKS_REQUIRED(critical_section_rtcp_sender_);
- BuildResult BuildFIR(RtcpContext* context)
- EXCLUSIVE_LOCKS_REQUIRED(critical_section_rtcp_sender_);
- BuildResult BuildSLI(RtcpContext* context)
- EXCLUSIVE_LOCKS_REQUIRED(critical_section_rtcp_sender_);
- BuildResult BuildRPSI(RtcpContext* context)
- EXCLUSIVE_LOCKS_REQUIRED(critical_section_rtcp_sender_);
- BuildResult BuildNACK(RtcpContext* context)
- EXCLUSIVE_LOCKS_REQUIRED(critical_section_rtcp_sender_);
- BuildResult BuildReceiverReferenceTime(RtcpContext* context)
- EXCLUSIVE_LOCKS_REQUIRED(critical_section_rtcp_sender_);
- BuildResult BuildDlrr(RtcpContext* context)
- EXCLUSIVE_LOCKS_REQUIRED(critical_section_rtcp_sender_);
-
-private:
- const bool audio_;
- Clock* const clock_;
- RtcpMode method_ GUARDED_BY(critical_section_rtcp_sender_);
-
- Transport* const transport_;
-
- rtc::scoped_ptr<CriticalSectionWrapper> critical_section_rtcp_sender_;
- bool using_nack_ GUARDED_BY(critical_section_rtcp_sender_);
- bool sending_ GUARDED_BY(critical_section_rtcp_sender_);
- bool remb_enabled_ GUARDED_BY(critical_section_rtcp_sender_);
-
- int64_t next_time_to_send_rtcp_ GUARDED_BY(critical_section_rtcp_sender_);
-
- uint32_t start_timestamp_ GUARDED_BY(critical_section_rtcp_sender_);
- uint32_t last_rtp_timestamp_ GUARDED_BY(critical_section_rtcp_sender_);
- int64_t last_frame_capture_time_ms_ GUARDED_BY(critical_section_rtcp_sender_);
- uint32_t ssrc_ GUARDED_BY(critical_section_rtcp_sender_);
- // SSRC that we receive on our RTP channel
- uint32_t remote_ssrc_ GUARDED_BY(critical_section_rtcp_sender_);
- std::string cname_ GUARDED_BY(critical_section_rtcp_sender_);
-
- ReceiveStatistics* receive_statistics_
- GUARDED_BY(critical_section_rtcp_sender_);
- std::map<uint32_t, rtcp::ReportBlock> report_blocks_
- GUARDED_BY(critical_section_rtcp_sender_);
- std::map<uint32_t, std::string> csrc_cnames_
- GUARDED_BY(critical_section_rtcp_sender_);
-
- // Sent
- uint32_t last_send_report_[RTCP_NUMBER_OF_SR] GUARDED_BY(
- critical_section_rtcp_sender_); // allow packet loss and RTT above 1 sec
- int64_t last_rtcp_time_[RTCP_NUMBER_OF_SR] GUARDED_BY(
- critical_section_rtcp_sender_);
-
- // Sent XR receiver reference time report.
- // <mid ntp (mid 32 bits of the 64 bits NTP timestamp), send time in ms>.
- std::map<uint32_t, int64_t> last_xr_rr_
- GUARDED_BY(critical_section_rtcp_sender_);
-
- // send CSRCs
- std::vector<uint32_t> csrcs_ GUARDED_BY(critical_section_rtcp_sender_);
-
- // Full intra request
- uint8_t sequence_number_fir_ GUARDED_BY(critical_section_rtcp_sender_);
-
- // REMB
- uint32_t remb_bitrate_ GUARDED_BY(critical_section_rtcp_sender_);
- std::vector<uint32_t> remb_ssrcs_ GUARDED_BY(critical_section_rtcp_sender_);
-
- TMMBRHelp tmmbr_help_ GUARDED_BY(critical_section_rtcp_sender_);
- uint32_t tmmbr_send_ GUARDED_BY(critical_section_rtcp_sender_);
- uint32_t packet_oh_send_ GUARDED_BY(critical_section_rtcp_sender_);
-
- // APP
- uint8_t app_sub_type_ GUARDED_BY(critical_section_rtcp_sender_);
- uint32_t app_name_ GUARDED_BY(critical_section_rtcp_sender_);
- rtc::scoped_ptr<uint8_t[]> app_data_ GUARDED_BY(critical_section_rtcp_sender_);
- uint16_t app_length_ GUARDED_BY(critical_section_rtcp_sender_);
-
- // True if sending of XR Receiver reference time report is enabled.
- bool xr_send_receiver_reference_time_enabled_
- GUARDED_BY(critical_section_rtcp_sender_);
-
- // XR VoIP metric
- RTCPVoIPMetric xr_voip_metric_ GUARDED_BY(critical_section_rtcp_sender_);
-
- RtcpPacketTypeCounterObserver* const packet_type_counter_observer_;
- RtcpPacketTypeCounter packet_type_counter_
- GUARDED_BY(critical_section_rtcp_sender_);
-
- RTCPUtility::NackStats nack_stats_ GUARDED_BY(critical_section_rtcp_sender_);
-
- void SetFlag(RTCPPacketType type, bool is_volatile)
- EXCLUSIVE_LOCKS_REQUIRED(critical_section_rtcp_sender_);
- void SetFlags(const std::set<RTCPPacketType>& types, bool is_volatile)
- EXCLUSIVE_LOCKS_REQUIRED(critical_section_rtcp_sender_);
- bool IsFlagPresent(RTCPPacketType type) const
- EXCLUSIVE_LOCKS_REQUIRED(critical_section_rtcp_sender_);
- bool ConsumeFlag(RTCPPacketType type, bool forced = false)
- EXCLUSIVE_LOCKS_REQUIRED(critical_section_rtcp_sender_);
- bool AllVolatileFlagsConsumed() const
- EXCLUSIVE_LOCKS_REQUIRED(critical_section_rtcp_sender_);
- struct ReportFlag {
- ReportFlag(RTCPPacketType type, bool is_volatile)
- : type(type), is_volatile(is_volatile) {}
- bool operator<(const ReportFlag& flag) const { return type < flag.type; }
- bool operator==(const ReportFlag& flag) const { return type == flag.type; }
- const RTCPPacketType type;
- const bool is_volatile;
- };
-
- std::set<ReportFlag> report_flags_ GUARDED_BY(critical_section_rtcp_sender_);
-
- typedef BuildResult (RTCPSender::*Builder)(RtcpContext*);
- std::map<RTCPPacketType, Builder> builders_;
-
- class PacketBuiltCallback;
+ int32_t SetApplicationSpecificData(uint8_t subType,
+ uint32_t name,
+ const uint8_t* data,
+ uint16_t length);
+ int32_t SetRTCPVoIPMetrics(const RTCPVoIPMetric* VoIPMetric);
+
+ void SendRtcpXrReceiverReferenceTime(bool enable);
+
+ bool RtcpXrReceiverReferenceTime() const;
+
+ void SetCsrcs(const std::vector<uint32_t>& csrcs);
+
+ void SetTargetBitrate(unsigned int target_bitrate);
+ bool SendFeedbackPacket(const rtcp::TransportFeedback& packet);
+
+ private:
+ class RtcpContext;
+
+ // Determine which RTCP messages should be sent and setup flags.
+ void PrepareReport(const std::set<RTCPPacketType>& packetTypes,
+ const FeedbackState& feedback_state)
+ EXCLUSIVE_LOCKS_REQUIRED(critical_section_rtcp_sender_);
+
+ bool AddReportBlock(const FeedbackState& feedback_state,
+ uint32_t ssrc,
+ StreamStatistician* statistician)
+ EXCLUSIVE_LOCKS_REQUIRED(critical_section_rtcp_sender_);
+
+ rtc::scoped_ptr<rtcp::RtcpPacket> BuildSR(const RtcpContext& context)
+ EXCLUSIVE_LOCKS_REQUIRED(critical_section_rtcp_sender_);
+ rtc::scoped_ptr<rtcp::RtcpPacket> BuildRR(const RtcpContext& context)
+ EXCLUSIVE_LOCKS_REQUIRED(critical_section_rtcp_sender_);
+ rtc::scoped_ptr<rtcp::RtcpPacket> BuildSDES(const RtcpContext& context)
+ EXCLUSIVE_LOCKS_REQUIRED(critical_section_rtcp_sender_);
+ rtc::scoped_ptr<rtcp::RtcpPacket> BuildPLI(const RtcpContext& context)
+ EXCLUSIVE_LOCKS_REQUIRED(critical_section_rtcp_sender_);
+ rtc::scoped_ptr<rtcp::RtcpPacket> BuildREMB(const RtcpContext& context)
+ EXCLUSIVE_LOCKS_REQUIRED(critical_section_rtcp_sender_);
+ rtc::scoped_ptr<rtcp::RtcpPacket> BuildTMMBR(const RtcpContext& context)
+ EXCLUSIVE_LOCKS_REQUIRED(critical_section_rtcp_sender_);
+ rtc::scoped_ptr<rtcp::RtcpPacket> BuildTMMBN(const RtcpContext& context)
+ EXCLUSIVE_LOCKS_REQUIRED(critical_section_rtcp_sender_);
+ rtc::scoped_ptr<rtcp::RtcpPacket> BuildAPP(const RtcpContext& context)
+ EXCLUSIVE_LOCKS_REQUIRED(critical_section_rtcp_sender_);
+ rtc::scoped_ptr<rtcp::RtcpPacket> BuildVoIPMetric(const RtcpContext& context)
+ EXCLUSIVE_LOCKS_REQUIRED(critical_section_rtcp_sender_);
+ rtc::scoped_ptr<rtcp::RtcpPacket> BuildBYE(const RtcpContext& context)
+ EXCLUSIVE_LOCKS_REQUIRED(critical_section_rtcp_sender_);
+ rtc::scoped_ptr<rtcp::RtcpPacket> BuildFIR(const RtcpContext& context)
+ EXCLUSIVE_LOCKS_REQUIRED(critical_section_rtcp_sender_);
+ rtc::scoped_ptr<rtcp::RtcpPacket> BuildSLI(const RtcpContext& context)
+ EXCLUSIVE_LOCKS_REQUIRED(critical_section_rtcp_sender_);
+ rtc::scoped_ptr<rtcp::RtcpPacket> BuildRPSI(const RtcpContext& context)
+ EXCLUSIVE_LOCKS_REQUIRED(critical_section_rtcp_sender_);
+ rtc::scoped_ptr<rtcp::RtcpPacket> BuildNACK(const RtcpContext& context)
+ EXCLUSIVE_LOCKS_REQUIRED(critical_section_rtcp_sender_);
+ rtc::scoped_ptr<rtcp::RtcpPacket> BuildReceiverReferenceTime(
+ const RtcpContext& context)
+ EXCLUSIVE_LOCKS_REQUIRED(critical_section_rtcp_sender_);
+ rtc::scoped_ptr<rtcp::RtcpPacket> BuildDlrr(const RtcpContext& context)
+ EXCLUSIVE_LOCKS_REQUIRED(critical_section_rtcp_sender_);
+
+ private:
+ const bool audio_;
+ Clock* const clock_;
+ Random random_ GUARDED_BY(critical_section_rtcp_sender_);
+ RtcpMode method_ GUARDED_BY(critical_section_rtcp_sender_);
+
+ Transport* const transport_;
+
+ rtc::scoped_ptr<CriticalSectionWrapper> critical_section_rtcp_sender_;
+ bool using_nack_ GUARDED_BY(critical_section_rtcp_sender_);
+ bool sending_ GUARDED_BY(critical_section_rtcp_sender_);
+ bool remb_enabled_ GUARDED_BY(critical_section_rtcp_sender_);
+
+ int64_t next_time_to_send_rtcp_ GUARDED_BY(critical_section_rtcp_sender_);
+
+ uint32_t start_timestamp_ GUARDED_BY(critical_section_rtcp_sender_);
+ uint32_t last_rtp_timestamp_ GUARDED_BY(critical_section_rtcp_sender_);
+ int64_t last_frame_capture_time_ms_ GUARDED_BY(critical_section_rtcp_sender_);
+ uint32_t ssrc_ GUARDED_BY(critical_section_rtcp_sender_);
+ // SSRC that we receive on our RTP channel
+ uint32_t remote_ssrc_ GUARDED_BY(critical_section_rtcp_sender_);
+ std::string cname_ GUARDED_BY(critical_section_rtcp_sender_);
+
+ ReceiveStatistics* receive_statistics_
+ GUARDED_BY(critical_section_rtcp_sender_);
+ std::map<uint32_t, rtcp::ReportBlock> report_blocks_
+ GUARDED_BY(critical_section_rtcp_sender_);
+ std::map<uint32_t, std::string> csrc_cnames_
+ GUARDED_BY(critical_section_rtcp_sender_);
+
+ // Sent
+ uint32_t last_send_report_[RTCP_NUMBER_OF_SR] GUARDED_BY(
+ critical_section_rtcp_sender_); // allow packet loss and RTT above 1 sec
+ int64_t last_rtcp_time_[RTCP_NUMBER_OF_SR] GUARDED_BY(
+ critical_section_rtcp_sender_);
+
+ // Sent XR receiver reference time report.
+ // <mid ntp (mid 32 bits of the 64 bits NTP timestamp), send time in ms>.
+ std::map<uint32_t, int64_t> last_xr_rr_
+ GUARDED_BY(critical_section_rtcp_sender_);
+
+ // send CSRCs
+ std::vector<uint32_t> csrcs_ GUARDED_BY(critical_section_rtcp_sender_);
+
+ // Full intra request
+ uint8_t sequence_number_fir_ GUARDED_BY(critical_section_rtcp_sender_);
+
+ // REMB
+ uint32_t remb_bitrate_ GUARDED_BY(critical_section_rtcp_sender_);
+ std::vector<uint32_t> remb_ssrcs_ GUARDED_BY(critical_section_rtcp_sender_);
+
+ TMMBRHelp tmmbr_help_ GUARDED_BY(critical_section_rtcp_sender_);
+ uint32_t tmmbr_send_ GUARDED_BY(critical_section_rtcp_sender_);
+ uint32_t packet_oh_send_ GUARDED_BY(critical_section_rtcp_sender_);
+
+ // APP
+ uint8_t app_sub_type_ GUARDED_BY(critical_section_rtcp_sender_);
+ uint32_t app_name_ GUARDED_BY(critical_section_rtcp_sender_);
+ rtc::scoped_ptr<uint8_t[]> app_data_
+ GUARDED_BY(critical_section_rtcp_sender_);
+ uint16_t app_length_ GUARDED_BY(critical_section_rtcp_sender_);
+
+ // True if sending of XR Receiver reference time report is enabled.
+ bool xr_send_receiver_reference_time_enabled_
+ GUARDED_BY(critical_section_rtcp_sender_);
+
+ // XR VoIP metric
+ RTCPVoIPMetric xr_voip_metric_ GUARDED_BY(critical_section_rtcp_sender_);
+
+ RtcpPacketTypeCounterObserver* const packet_type_counter_observer_;
+ RtcpPacketTypeCounter packet_type_counter_
+ GUARDED_BY(critical_section_rtcp_sender_);
+
+ RTCPUtility::NackStats nack_stats_ GUARDED_BY(critical_section_rtcp_sender_);
+
+ void SetFlag(RTCPPacketType type, bool is_volatile)
+ EXCLUSIVE_LOCKS_REQUIRED(critical_section_rtcp_sender_);
+ void SetFlags(const std::set<RTCPPacketType>& types, bool is_volatile)
+ EXCLUSIVE_LOCKS_REQUIRED(critical_section_rtcp_sender_);
+ bool IsFlagPresent(RTCPPacketType type) const
+ EXCLUSIVE_LOCKS_REQUIRED(critical_section_rtcp_sender_);
+ bool ConsumeFlag(RTCPPacketType type, bool forced = false)
+ EXCLUSIVE_LOCKS_REQUIRED(critical_section_rtcp_sender_);
+ bool AllVolatileFlagsConsumed() const
+ EXCLUSIVE_LOCKS_REQUIRED(critical_section_rtcp_sender_);
+ struct ReportFlag {
+ ReportFlag(RTCPPacketType type, bool is_volatile)
+ : type(type), is_volatile(is_volatile) {}
+ bool operator<(const ReportFlag& flag) const { return type < flag.type; }
+ bool operator==(const ReportFlag& flag) const { return type == flag.type; }
+ const RTCPPacketType type;
+ const bool is_volatile;
+ };
+
+ std::set<ReportFlag> report_flags_ GUARDED_BY(critical_section_rtcp_sender_);
+
+ typedef rtc::scoped_ptr<rtcp::RtcpPacket> (RTCPSender::*BuilderFunc)(
+ const RtcpContext&);
+ std::map<RTCPPacketType, BuilderFunc> builders_;
};
} // namespace webrtc
-#endif // WEBRTC_MODULES_RTP_RTCP_SOURCE_RTCP_SENDER_H_
+#endif // WEBRTC_MODULES_RTP_RTCP_SOURCE_RTCP_SENDER_H_
diff --git a/webrtc/modules/rtp_rtcp/source/rtcp_utility.cc b/webrtc/modules/rtp_rtcp/source/rtcp_utility.cc
index d2b80438cc..e19499612d 100644
--- a/webrtc/modules/rtp_rtcp/source/rtcp_utility.cc
+++ b/webrtc/modules/rtp_rtcp/source/rtcp_utility.cc
@@ -8,9 +8,7 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-#include "webrtc/base/checks.h"
#include "webrtc/modules/rtp_rtcp/source/rtcp_utility.h"
-#include "webrtc/modules/rtp_rtcp/source/rtcp_packet/transport_feedback.h"
#include <assert.h>
#include <math.h> // ceil
@@ -19,6 +17,7 @@
#include "webrtc/base/checks.h"
#include "webrtc/base/logging.h"
#include "webrtc/modules/rtp_rtcp/source/byte_io.h"
+#include "webrtc/modules/rtp_rtcp/source/rtcp_packet/transport_feedback.h"
namespace webrtc {
@@ -42,8 +41,8 @@ void NackStats::ReportRequest(uint16_t sequence_number) {
uint32_t MidNtp(uint32_t ntp_sec, uint32_t ntp_frac) {
return (ntp_sec << 16) + (ntp_frac >> 16);
-} // end RTCPUtility
}
+} // namespace RTCPUtility
// RTCPParserV2 : currently read only
RTCPUtility::RTCPParserV2::RTCPParserV2(const uint8_t* rtcpData,
diff --git a/webrtc/modules/rtp_rtcp/source/rtcp_utility.h b/webrtc/modules/rtp_rtcp/source/rtcp_utility.h
index f05d512919..0b03ceb56e 100644
--- a/webrtc/modules/rtp_rtcp/source/rtcp_utility.h
+++ b/webrtc/modules/rtp_rtcp/source/rtcp_utility.h
@@ -14,7 +14,7 @@
#include <stddef.h> // size_t, ptrdiff_t
#include "webrtc/base/scoped_ptr.h"
-#include "webrtc/modules/rtp_rtcp/interface/rtp_rtcp_defines.h"
+#include "webrtc/modules/rtp_rtcp/include/rtp_rtcp_defines.h"
#include "webrtc/modules/rtp_rtcp/source/rtp_rtcp_config.h"
#include "webrtc/typedefs.h"
@@ -487,6 +487,6 @@ class RTCPPacketIterator {
RtcpCommonHeader _header;
};
-} // RTCPUtility
+} // namespace RTCPUtility
} // namespace webrtc
#endif // WEBRTC_MODULES_RTP_RTCP_SOURCE_RTCP_UTILITY_H_
diff --git a/webrtc/modules/rtp_rtcp/source/rtp_fec_unittest.cc b/webrtc/modules/rtp_rtcp/source/rtp_fec_unittest.cc
index 541f522f8d..80f961bd1e 100644
--- a/webrtc/modules/rtp_rtcp/source/rtp_fec_unittest.cc
+++ b/webrtc/modules/rtp_rtcp/source/rtp_fec_unittest.cc
@@ -11,6 +11,7 @@
#include <list>
#include "testing/gtest/include/gtest/gtest.h"
+#include "webrtc/base/random.h"
#include "webrtc/modules/rtp_rtcp/source/byte_io.h"
#include "webrtc/modules/rtp_rtcp/source/forward_error_correction.h"
@@ -41,8 +42,12 @@ template <typename T> void ClearList(std::list<T*>* my_list) {
class RtpFecTest : public ::testing::Test {
protected:
RtpFecTest()
- : fec_(new ForwardErrorCorrection()), ssrc_(rand()), fec_seq_num_(0) {}
+ : random_(0xfec133700742),
+ fec_(new ForwardErrorCorrection()),
+ ssrc_(random_.Rand<uint32_t>()),
+ fec_seq_num_(0) {}
+ webrtc::Random random_;
ForwardErrorCorrection* fec_;
int ssrc_;
uint16_t fec_seq_num_;
@@ -891,22 +896,20 @@ int RtpFecTest::ConstructMediaPacketsSeqNum(int num_media_packets,
assert(num_media_packets > 0);
ForwardErrorCorrection::Packet* media_packet = NULL;
int sequence_number = start_seq_num;
- int time_stamp = rand();
+ int time_stamp = random_.Rand<int>();
for (int i = 0; i < num_media_packets; ++i) {
media_packet = new ForwardErrorCorrection::Packet;
media_packet_list_.push_back(media_packet);
- media_packet->length = static_cast<size_t>(
- (static_cast<float>(rand()) / RAND_MAX) *
- (IP_PACKET_SIZE - kRtpHeaderSize - kTransportOverhead -
- ForwardErrorCorrection::PacketOverhead()));
+ const uint32_t kMinPacketSize = kRtpHeaderSize;
+ const uint32_t kMaxPacketSize = IP_PACKET_SIZE - kRtpHeaderSize -
+ kTransportOverhead -
+ ForwardErrorCorrection::PacketOverhead();
+ media_packet->length = random_.Rand(kMinPacketSize, kMaxPacketSize);
- if (media_packet->length < kRtpHeaderSize) {
- media_packet->length = kRtpHeaderSize;
- }
// Generate random values for the first 2 bytes
- media_packet->data[0] = static_cast<uint8_t>(rand() % 256);
- media_packet->data[1] = static_cast<uint8_t>(rand() % 256);
+ media_packet->data[0] = random_.Rand<uint8_t>();
+ media_packet->data[1] = random_.Rand<uint8_t>();
// The first two bits are assumed to be 10 by the FEC encoder.
// In fact the FEC decoder will set the two first bits to 10 regardless of
@@ -929,7 +932,7 @@ int RtpFecTest::ConstructMediaPacketsSeqNum(int num_media_packets,
// Generate random values for payload.
for (size_t j = 12; j < media_packet->length; ++j) {
- media_packet->data[j] = static_cast<uint8_t>(rand() % 256);
+ media_packet->data[j] = random_.Rand<uint8_t>();
}
sequence_number++;
}
@@ -940,5 +943,5 @@ int RtpFecTest::ConstructMediaPacketsSeqNum(int num_media_packets,
}
int RtpFecTest::ConstructMediaPackets(int num_media_packets) {
- return ConstructMediaPacketsSeqNum(num_media_packets, rand());
+ return ConstructMediaPacketsSeqNum(num_media_packets, random_.Rand<int>());
}
diff --git a/webrtc/modules/rtp_rtcp/source/rtp_format.h b/webrtc/modules/rtp_rtcp/source/rtp_format.h
index 18225f9bb4..3519499248 100644
--- a/webrtc/modules/rtp_rtcp/source/rtp_format.h
+++ b/webrtc/modules/rtp_rtcp/source/rtp_format.h
@@ -14,8 +14,8 @@
#include <string>
#include "webrtc/base/constructormagic.h"
-#include "webrtc/modules/interface/module_common_types.h"
-#include "webrtc/modules/rtp_rtcp/interface/rtp_rtcp_defines.h"
+#include "webrtc/modules/include/module_common_types.h"
+#include "webrtc/modules/rtp_rtcp/include/rtp_rtcp_defines.h"
namespace webrtc {
diff --git a/webrtc/modules/rtp_rtcp/source/rtp_format_h264.cc b/webrtc/modules/rtp_rtcp/source/rtp_format_h264.cc
index aeef44364a..c422577c81 100644
--- a/webrtc/modules/rtp_rtcp/source/rtp_format_h264.cc
+++ b/webrtc/modules/rtp_rtcp/source/rtp_format_h264.cc
@@ -11,7 +11,7 @@
#include <string.h>
#include "webrtc/base/logging.h"
-#include "webrtc/modules/interface/module_common_types.h"
+#include "webrtc/modules/include/module_common_types.h"
#include "webrtc/modules/rtp_rtcp/source/byte_io.h"
#include "webrtc/modules/rtp_rtcp/source/h264_sps_parser.h"
#include "webrtc/modules/rtp_rtcp/source/rtp_format_h264.h"
diff --git a/webrtc/modules/rtp_rtcp/source/rtp_format_h264_unittest.cc b/webrtc/modules/rtp_rtcp/source/rtp_format_h264_unittest.cc
index 1a14b5554a..d29e3d4f21 100644
--- a/webrtc/modules/rtp_rtcp/source/rtp_format_h264_unittest.cc
+++ b/webrtc/modules/rtp_rtcp/source/rtp_format_h264_unittest.cc
@@ -13,7 +13,7 @@
#include "testing/gmock/include/gmock/gmock.h"
#include "testing/gtest/include/gtest/gtest.h"
#include "webrtc/base/scoped_ptr.h"
-#include "webrtc/modules/interface/module_common_types.h"
+#include "webrtc/modules/include/module_common_types.h"
#include "webrtc/modules/rtp_rtcp/mocks/mock_rtp_rtcp.h"
#include "webrtc/modules/rtp_rtcp/source/rtp_format.h"
diff --git a/webrtc/modules/rtp_rtcp/source/rtp_format_video_generic.cc b/webrtc/modules/rtp_rtcp/source/rtp_format_video_generic.cc
index 39b64c6ffa..b47e9b9359 100644
--- a/webrtc/modules/rtp_rtcp/source/rtp_format_video_generic.cc
+++ b/webrtc/modules/rtp_rtcp/source/rtp_format_video_generic.cc
@@ -11,7 +11,7 @@
#include <string>
#include "webrtc/base/logging.h"
-#include "webrtc/modules/interface/module_common_types.h"
+#include "webrtc/modules/include/module_common_types.h"
#include "webrtc/modules/rtp_rtcp/source/rtp_format_video_generic.h"
namespace webrtc {
diff --git a/webrtc/modules/rtp_rtcp/source/rtp_format_vp8.h b/webrtc/modules/rtp_rtcp/source/rtp_format_vp8.h
index 63db349c74..d62ecba85f 100644
--- a/webrtc/modules/rtp_rtcp/source/rtp_format_vp8.h
+++ b/webrtc/modules/rtp_rtcp/source/rtp_format_vp8.h
@@ -30,7 +30,7 @@
#include <vector>
#include "webrtc/base/constructormagic.h"
-#include "webrtc/modules/interface/module_common_types.h"
+#include "webrtc/modules/include/module_common_types.h"
#include "webrtc/modules/rtp_rtcp/source/rtp_format.h"
#include "webrtc/typedefs.h"
diff --git a/webrtc/modules/rtp_rtcp/source/rtp_format_vp8_test_helper.h b/webrtc/modules/rtp_rtcp/source/rtp_format_vp8_test_helper.h
index 2fe963251f..668476833d 100644
--- a/webrtc/modules/rtp_rtcp/source/rtp_format_vp8_test_helper.h
+++ b/webrtc/modules/rtp_rtcp/source/rtp_format_vp8_test_helper.h
@@ -19,7 +19,7 @@
#define WEBRTC_MODULES_RTP_RTCP_SOURCE_RTP_FORMAT_VP8_TEST_HELPER_H_
#include "webrtc/base/constructormagic.h"
-#include "webrtc/modules/interface/module_common_types.h"
+#include "webrtc/modules/include/module_common_types.h"
#include "webrtc/modules/rtp_rtcp/source/rtp_format_vp8.h"
#include "webrtc/typedefs.h"
diff --git a/webrtc/modules/rtp_rtcp/source/rtp_format_vp9.cc b/webrtc/modules/rtp_rtcp/source/rtp_format_vp9.cc
index 0e76a8eae8..d2f22d5044 100644
--- a/webrtc/modules/rtp_rtcp/source/rtp_format_vp9.cc
+++ b/webrtc/modules/rtp_rtcp/source/rtp_format_vp9.cc
@@ -47,10 +47,6 @@ int16_t Tl0PicIdxField(const RTPVideoHeaderVP9& hdr, uint8_t def) {
return (hdr.tl0_pic_idx == kNoTl0PicIdx) ? def : hdr.tl0_pic_idx;
}
-uint8_t GofIdxField(const RTPVideoHeaderVP9& hdr, uint8_t def) {
- return (hdr.gof_idx == kNoGofIdx) ? def : hdr.gof_idx;
-}
-
// Picture ID:
//
// +-+-+-+-+-+-+-+-+
@@ -74,19 +70,17 @@ bool PictureIdPresent(const RTPVideoHeaderVP9& hdr) {
// Flexible mode (F=1): Non-flexible mode (F=0):
//
// +-+-+-+-+-+-+-+-+ +-+-+-+-+-+-+-+-+
-// L: | T |U| S |D| |GOF_IDX| S |D|
+// L: | T |U| S |D| | T |U| S |D|
// +-+-+-+-+-+-+-+-+ +-+-+-+-+-+-+-+-+
// | TL0PICIDX |
// +-+-+-+-+-+-+-+-+
//
size_t LayerInfoLength(const RTPVideoHeaderVP9& hdr) {
- if (hdr.flexible_mode) {
- return (hdr.temporal_idx == kNoTemporalIdx &&
- hdr.spatial_idx == kNoSpatialIdx) ? 0 : 1;
- } else {
- return (hdr.gof_idx == kNoGofIdx &&
- hdr.spatial_idx == kNoSpatialIdx) ? 0 : 2;
+ if (hdr.temporal_idx == kNoTemporalIdx &&
+ hdr.spatial_idx == kNoSpatialIdx) {
+ return 0;
}
+ return hdr.flexible_mode ? 1 : 2;
}
bool LayerInfoPresent(const RTPVideoHeaderVP9& hdr) {
@@ -198,8 +192,8 @@ bool WritePictureId(const RTPVideoHeaderVP9& vp9,
// L: | T |U| S |D|
// +-+-+-+-+-+-+-+-+
//
-bool WriteLayerInfoFlexibleMode(const RTPVideoHeaderVP9& vp9,
- rtc::BitBufferWriter* writer) {
+bool WriteLayerInfoCommon(const RTPVideoHeaderVP9& vp9,
+ rtc::BitBufferWriter* writer) {
RETURN_FALSE_ON_ERROR(writer->WriteBits(TemporalIdxField(vp9, 0), 3));
RETURN_FALSE_ON_ERROR(writer->WriteBits(vp9.temporal_up_switch ? 1 : 0, 1));
RETURN_FALSE_ON_ERROR(writer->WriteBits(SpatialIdxField(vp9, 0), 3));
@@ -210,27 +204,26 @@ bool WriteLayerInfoFlexibleMode(const RTPVideoHeaderVP9& vp9,
// Non-flexible mode (F=0):
//
// +-+-+-+-+-+-+-+-+
-// L: |GOF_IDX| S |D|
+// L: | T |U| S |D|
// +-+-+-+-+-+-+-+-+
// | TL0PICIDX |
// +-+-+-+-+-+-+-+-+
//
bool WriteLayerInfoNonFlexibleMode(const RTPVideoHeaderVP9& vp9,
rtc::BitBufferWriter* writer) {
- RETURN_FALSE_ON_ERROR(writer->WriteBits(GofIdxField(vp9, 0), 4));
- RETURN_FALSE_ON_ERROR(writer->WriteBits(SpatialIdxField(vp9, 0), 3));
- RETURN_FALSE_ON_ERROR(writer->WriteBits(vp9.inter_layer_predicted ? 1: 0, 1));
RETURN_FALSE_ON_ERROR(writer->WriteUInt8(Tl0PicIdxField(vp9, 0)));
return true;
}
bool WriteLayerInfo(const RTPVideoHeaderVP9& vp9,
rtc::BitBufferWriter* writer) {
- if (vp9.flexible_mode) {
- return WriteLayerInfoFlexibleMode(vp9, writer);
- } else {
- return WriteLayerInfoNonFlexibleMode(vp9, writer);
- }
+ if (!WriteLayerInfoCommon(vp9, writer))
+ return false;
+
+ if (vp9.flexible_mode)
+ return true;
+
+ return WriteLayerInfoNonFlexibleMode(vp9, writer);
}
// Reference indices:
@@ -246,7 +239,7 @@ bool WriteRefIndices(const RTPVideoHeaderVP9& vp9,
vp9.num_ref_pics == 0 || vp9.num_ref_pics > kMaxVp9RefPics) {
return false;
}
- for (size_t i = 0; i < vp9.num_ref_pics; ++i) {
+ for (uint8_t i = 0; i < vp9.num_ref_pics; ++i) {
bool n_bit = !(i == vp9.num_ref_pics - 1);
RETURN_FALSE_ON_ERROR(writer->WriteBits(vp9.pid_diff[i], 7));
RETURN_FALSE_ON_ERROR(writer->WriteBits(n_bit ? 1 : 0, 1));
@@ -301,7 +294,7 @@ bool WriteSsData(const RTPVideoHeaderVP9& vp9, rtc::BitBufferWriter* writer) {
writer->WriteBits(vp9.gof.temporal_up_switch[i] ? 1 : 0, 1));
RETURN_FALSE_ON_ERROR(writer->WriteBits(vp9.gof.num_ref_pics[i], 2));
RETURN_FALSE_ON_ERROR(writer->WriteBits(kReservedBitValue0, 2));
- for (size_t r = 0; r < vp9.gof.num_ref_pics[i]; ++r) {
+ for (uint8_t r = 0; r < vp9.gof.num_ref_pics[i]; ++r) {
RETURN_FALSE_ON_ERROR(writer->WriteUInt8(vp9.gof.pid_diff[i][r]));
}
}
@@ -337,8 +330,7 @@ bool ParsePictureId(rtc::BitBuffer* parser, RTPVideoHeaderVP9* vp9) {
// L: | T |U| S |D|
// +-+-+-+-+-+-+-+-+
//
-bool ParseLayerInfoFlexibleMode(rtc::BitBuffer* parser,
- RTPVideoHeaderVP9* vp9) {
+bool ParseLayerInfoCommon(rtc::BitBuffer* parser, RTPVideoHeaderVP9* vp9) {
uint32_t t, u_bit, s, d_bit;
RETURN_FALSE_ON_ERROR(parser->ReadBits(&t, 3));
RETURN_FALSE_ON_ERROR(parser->ReadBits(&u_bit, 1));
@@ -354,32 +346,27 @@ bool ParseLayerInfoFlexibleMode(rtc::BitBuffer* parser,
// Layer indices (non-flexible mode):
//
// +-+-+-+-+-+-+-+-+
-// L: |GOF_IDX| S |D|
+// L: | T |U| S |D|
// +-+-+-+-+-+-+-+-+
// | TL0PICIDX |
// +-+-+-+-+-+-+-+-+
//
bool ParseLayerInfoNonFlexibleMode(rtc::BitBuffer* parser,
RTPVideoHeaderVP9* vp9) {
- uint32_t gof_idx, s, d_bit;
uint8_t tl0picidx;
- RETURN_FALSE_ON_ERROR(parser->ReadBits(&gof_idx, 4));
- RETURN_FALSE_ON_ERROR(parser->ReadBits(&s, 3));
- RETURN_FALSE_ON_ERROR(parser->ReadBits(&d_bit, 1));
RETURN_FALSE_ON_ERROR(parser->ReadUInt8(&tl0picidx));
- vp9->gof_idx = gof_idx;
- vp9->spatial_idx = s;
- vp9->inter_layer_predicted = d_bit ? true : false;
vp9->tl0_pic_idx = tl0picidx;
return true;
}
bool ParseLayerInfo(rtc::BitBuffer* parser, RTPVideoHeaderVP9* vp9) {
- if (vp9->flexible_mode) {
- return ParseLayerInfoFlexibleMode(parser, vp9);
- } else {
- return ParseLayerInfoNonFlexibleMode(parser, vp9);
- }
+ if (!ParseLayerInfoCommon(parser, vp9))
+ return false;
+
+ if (vp9->flexible_mode)
+ return true;
+
+ return ParseLayerInfoNonFlexibleMode(parser, vp9);
}
// Reference indices:
@@ -466,7 +453,7 @@ bool ParseSsData(rtc::BitBuffer* parser, RTPVideoHeaderVP9* vp9) {
vp9->gof.temporal_up_switch[i] = u_bit ? true : false;
vp9->gof.num_ref_pics[i] = r;
- for (size_t p = 0; p < vp9->gof.num_ref_pics[i]; ++p) {
+ for (uint8_t p = 0; p < vp9->gof.num_ref_pics[i]; ++p) {
uint8_t p_diff;
RETURN_FALSE_ON_ERROR(parser->ReadUInt8(&p_diff));
vp9->gof.pid_diff[i][p] = p_diff;
@@ -604,7 +591,7 @@ bool RtpPacketizerVp9::NextPacket(uint8_t* buffer,
// +-+-+-+-+-+-+-+-+
// M: | EXTENDED PID | (RECOMMENDED)
// +-+-+-+-+-+-+-+-+
-// L: |GOF_IDX| S |D| (CONDITIONALLY RECOMMENDED)
+// L: | T |U| S |D| (CONDITIONALLY RECOMMENDED)
// +-+-+-+-+-+-+-+-+
// | TL0PICIDX | (CONDITIONALLY REQUIRED)
// +-+-+-+-+-+-+-+-+
@@ -738,7 +725,8 @@ bool RtpDepacketizerVp9::Parse(ParsedPayload* parsed_payload,
parsed_payload->type.Video.height = vp9->height[0];
}
}
- parsed_payload->type.Video.isFirstPacket = b_bit && (vp9->spatial_idx == 0);
+ parsed_payload->type.Video.isFirstPacket =
+ b_bit && (!l_bit || !vp9->inter_layer_predicted);
uint64_t rem_bits = parser.RemainingBitCount();
assert(rem_bits % 8 == 0);
diff --git a/webrtc/modules/rtp_rtcp/source/rtp_format_vp9.h b/webrtc/modules/rtp_rtcp/source/rtp_format_vp9.h
index abce7e7791..3feca4392a 100644
--- a/webrtc/modules/rtp_rtcp/source/rtp_format_vp9.h
+++ b/webrtc/modules/rtp_rtcp/source/rtp_format_vp9.h
@@ -25,7 +25,7 @@
#include <string>
#include "webrtc/base/constructormagic.h"
-#include "webrtc/modules/interface/module_common_types.h"
+#include "webrtc/modules/include/module_common_types.h"
#include "webrtc/modules/rtp_rtcp/source/rtp_format.h"
#include "webrtc/typedefs.h"
diff --git a/webrtc/modules/rtp_rtcp/source/rtp_format_vp9_unittest.cc b/webrtc/modules/rtp_rtcp/source/rtp_format_vp9_unittest.cc
index 66ab5cdb71..5bbafe459d 100644
--- a/webrtc/modules/rtp_rtcp/source/rtp_format_vp9_unittest.cc
+++ b/webrtc/modules/rtp_rtcp/source/rtp_format_vp9_unittest.cc
@@ -55,7 +55,7 @@ void VerifyHeader(const RTPVideoHeaderVP9& expected,
actual.gof.temporal_up_switch[i]);
EXPECT_EQ(expected.gof.temporal_idx[i], actual.gof.temporal_idx[i]);
EXPECT_EQ(expected.gof.num_ref_pics[i], actual.gof.num_ref_pics[i]);
- for (size_t j = 0; j < expected.gof.num_ref_pics[i]; j++) {
+ for (uint8_t j = 0; j < expected.gof.num_ref_pics[i]; j++) {
EXPECT_EQ(expected.gof.pid_diff[i][j], actual.gof.pid_diff[i][j]);
}
}
@@ -112,7 +112,7 @@ void ParseAndCheckPacket(const uint8_t* packet,
// +-+-+-+-+-+-+-+-+
// M: | EXTENDED PID | (RECOMMENDED)
// +-+-+-+-+-+-+-+-+
-// L: |GOF_IDX| S |D| (CONDITIONALLY RECOMMENDED)
+// L: | T |U| S |D| (CONDITIONALLY RECOMMENDED)
// +-+-+-+-+-+-+-+-+
// | TL0PICIDX | (CONDITIONALLY REQUIRED)
// +-+-+-+-+-+-+-+-+
@@ -255,7 +255,8 @@ TEST_F(RtpPacketizerVp9Test, TestLayerInfoWithNonFlexibleMode) {
const size_t kFrameSize = 30;
const size_t kPacketSize = 25;
- expected_.gof_idx = 3;
+ expected_.temporal_idx = 3;
+ expected_.temporal_up_switch = true; // U
expected_.num_spatial_layers = 3;
expected_.spatial_idx = 2;
expected_.inter_layer_predicted = true; // D
@@ -264,9 +265,9 @@ TEST_F(RtpPacketizerVp9Test, TestLayerInfoWithNonFlexibleMode) {
// Two packets:
// | I:0, P:0, L:1, F:0, B:1, E:0, V:0 | (3hdr + 15 payload)
- // L: | GOF_IDX:3, S:2, D:1 | TL0PICIDX:117 |
+ // L: | T:3, U:1, S:2, D:1 | TL0PICIDX:117 |
// | I:0, P:0, L:1, F:0, B:0, E:1, V:0 | (3hdr + 15 payload)
- // L: | GOF_IDX:3, S:2, D:1 | TL0PICIDX:117 |
+ // L: | T:3, U:1, S:2, D:1 | TL0PICIDX:117 |
const size_t kExpectedHdrSizes[] = {3, 3};
const size_t kExpectedSizes[] = {18, 18};
const size_t kExpectedNum = GTEST_ARRAY_SIZE_(kExpectedSizes);
@@ -505,16 +506,20 @@ TEST_F(RtpDepacketizerVp9Test, ParseTwoBytePictureId) {
TEST_F(RtpDepacketizerVp9Test, ParseLayerInfoWithNonFlexibleMode) {
const uint8_t kHeaderLength = 3;
- const uint8_t kGofIdx = 7;
+ const uint8_t kTemporalIdx = 2;
+ const uint8_t kUbit = 1;
const uint8_t kSpatialIdx = 1;
const uint8_t kDbit = 1;
const uint8_t kTl0PicIdx = 17;
uint8_t packet[13] = {0};
packet[0] = 0x20; // I:0 P:0 L:1 F:0 B:0 E:0 V:0 R:0
- packet[1] = (kGofIdx << 4) | (kSpatialIdx << 1) | kDbit; // GOF_IDX:7 S:1 D:1
- packet[2] = kTl0PicIdx; // TL0PICIDX:17
+ packet[1] = (kTemporalIdx << 5) | (kUbit << 4) | (kSpatialIdx << 1) | kDbit;
+ packet[2] = kTl0PicIdx;
- expected_.gof_idx = kGofIdx;
+ // T:2 U:1 S:1 D:1
+ // TL0PICIDX:17
+ expected_.temporal_idx = kTemporalIdx;
+ expected_.temporal_up_switch = kUbit ? true : false;
expected_.spatial_idx = kSpatialIdx;
expected_.inter_layer_predicted = kDbit ? true : false;
expected_.tl0_pic_idx = kTl0PicIdx;
@@ -545,9 +550,9 @@ TEST_F(RtpDepacketizerVp9Test, ParseLayerInfoWithFlexibleMode) {
TEST_F(RtpDepacketizerVp9Test, ParseRefIdx) {
const uint8_t kHeaderLength = 6;
const int16_t kPictureId = 17;
- const int16_t kPdiff1 = 17;
- const int16_t kPdiff2 = 18;
- const int16_t kPdiff3 = 127;
+ const uint8_t kPdiff1 = 17;
+ const uint8_t kPdiff2 = 18;
+ const uint8_t kPdiff3 = 127;
uint8_t packet[13] = {0};
packet[0] = 0xD8; // I:1 P:1 L:0 F:1 B:1 E:0 V:0 R:0
packet[1] = 0x80 | ((kPictureId >> 8) & 0x7F); // Two byte pictureID.
@@ -577,7 +582,7 @@ TEST_F(RtpDepacketizerVp9Test, ParseRefIdx) {
}
TEST_F(RtpDepacketizerVp9Test, ParseRefIdxFailsWithNoPictureId) {
- const int16_t kPdiff = 3;
+ const uint8_t kPdiff = 3;
uint8_t packet[13] = {0};
packet[0] = 0x58; // I:0 P:1 L:0 F:1 B:1 E:0 V:0 R:0
packet[1] = (kPdiff << 1); // P,F: P_DIFF:3 N:0
@@ -587,7 +592,7 @@ TEST_F(RtpDepacketizerVp9Test, ParseRefIdxFailsWithNoPictureId) {
}
TEST_F(RtpDepacketizerVp9Test, ParseRefIdxFailsWithTooManyRefPics) {
- const int16_t kPdiff = 3;
+ const uint8_t kPdiff = 3;
uint8_t packet[13] = {0};
packet[0] = 0xD8; // I:1 P:1 L:0 F:1 B:1 E:0 V:0 R:0
packet[1] = kMaxOneBytePictureId; // I: PICTURE ID:127
diff --git a/webrtc/modules/rtp_rtcp/source/rtp_header_extension.h b/webrtc/modules/rtp_rtcp/source/rtp_header_extension.h
index 7be3c2e5c4..342e38a1f2 100644
--- a/webrtc/modules/rtp_rtcp/source/rtp_header_extension.h
+++ b/webrtc/modules/rtp_rtcp/source/rtp_header_extension.h
@@ -8,12 +8,12 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-#ifndef WEBRTC_MODULES_RTP_RTCP_RTP_HEADER_EXTENSION_H_
-#define WEBRTC_MODULES_RTP_RTCP_RTP_HEADER_EXTENSION_H_
+#ifndef WEBRTC_MODULES_RTP_RTCP_SOURCE_RTP_HEADER_EXTENSION_H_
+#define WEBRTC_MODULES_RTP_RTCP_SOURCE_RTP_HEADER_EXTENSION_H_
#include <map>
-#include "webrtc/modules/rtp_rtcp/interface/rtp_rtcp_defines.h"
+#include "webrtc/modules/rtp_rtcp/include/rtp_rtcp_defines.h"
#include "webrtc/typedefs.h"
namespace webrtc {
@@ -28,7 +28,7 @@ const size_t kVideoRotationLength = 2;
const size_t kTransportSequenceNumberLength = 3;
struct HeaderExtension {
- HeaderExtension(RTPExtensionType extension_type)
+ explicit HeaderExtension(RTPExtensionType extension_type)
: type(extension_type), length(0), active(true) {
Init();
}
@@ -112,6 +112,7 @@ class RtpHeaderExtensionMap {
int32_t Register(const RTPExtensionType type, const uint8_t id, bool active);
std::map<uint8_t, HeaderExtension*> extensionMap_;
};
-}
+} // namespace webrtc
+
+#endif // WEBRTC_MODULES_RTP_RTCP_SOURCE_RTP_HEADER_EXTENSION_H_
-#endif // WEBRTC_MODULES_RTP_RTCP_RTP_HEADER_EXTENSION_H_
diff --git a/webrtc/modules/rtp_rtcp/source/rtp_header_extension_unittest.cc b/webrtc/modules/rtp_rtcp/source/rtp_header_extension_unittest.cc
index 520cf7a962..ca37750621 100644
--- a/webrtc/modules/rtp_rtcp/source/rtp_header_extension_unittest.cc
+++ b/webrtc/modules/rtp_rtcp/source/rtp_header_extension_unittest.cc
@@ -15,7 +15,7 @@
#include "testing/gtest/include/gtest/gtest.h"
-#include "webrtc/modules/rtp_rtcp/interface/rtp_rtcp_defines.h"
+#include "webrtc/modules/rtp_rtcp/include/rtp_rtcp_defines.h"
#include "webrtc/modules/rtp_rtcp/source/rtp_header_extension.h"
#include "webrtc/typedefs.h"
diff --git a/webrtc/modules/rtp_rtcp/source/rtp_header_parser.cc b/webrtc/modules/rtp_rtcp/source/rtp_header_parser.cc
index 266bad8858..d4cbe544cc 100644
--- a/webrtc/modules/rtp_rtcp/source/rtp_header_parser.cc
+++ b/webrtc/modules/rtp_rtcp/source/rtp_header_parser.cc
@@ -7,7 +7,7 @@
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
-#include "webrtc/modules/rtp_rtcp/interface/rtp_header_parser.h"
+#include "webrtc/modules/rtp_rtcp/include/rtp_header_parser.h"
#include "webrtc/base/scoped_ptr.h"
#include "webrtc/modules/rtp_rtcp/source/rtp_header_extension.h"
@@ -58,7 +58,7 @@ bool RtpHeaderParserImpl::Parse(const uint8_t* packet,
rtp_header_extension_map_.GetCopy(&map);
}
- const bool valid_rtpheader = rtp_parser.Parse(*header, &map);
+ const bool valid_rtpheader = rtp_parser.Parse(header, &map);
if (!valid_rtpheader) {
return false;
}
diff --git a/webrtc/modules/rtp_rtcp/source/rtp_packet_history.cc b/webrtc/modules/rtp_rtcp/source/rtp_packet_history.cc
index aa941d63ff..49f9d8530a 100644
--- a/webrtc/modules/rtp_rtcp/source/rtp_packet_history.cc
+++ b/webrtc/modules/rtp_rtcp/source/rtp_packet_history.cc
@@ -13,6 +13,8 @@
#include <assert.h>
#include <stdlib.h>
#include <string.h> // memset
+
+#include <algorithm>
#include <limits>
#include <set>
diff --git a/webrtc/modules/rtp_rtcp/source/rtp_packet_history.h b/webrtc/modules/rtp_rtcp/source/rtp_packet_history.h
index e97d11eeaa..8e1a732b19 100644
--- a/webrtc/modules/rtp_rtcp/source/rtp_packet_history.h
+++ b/webrtc/modules/rtp_rtcp/source/rtp_packet_history.h
@@ -10,14 +10,14 @@
* Class for storing RTP packets.
*/
-#ifndef WEBRTC_MODULES_RTP_RTCP_RTP_PACKET_HISTORY_H_
-#define WEBRTC_MODULES_RTP_RTCP_RTP_PACKET_HISTORY_H_
+#ifndef WEBRTC_MODULES_RTP_RTCP_SOURCE_RTP_PACKET_HISTORY_H_
+#define WEBRTC_MODULES_RTP_RTCP_SOURCE_RTP_PACKET_HISTORY_H_
#include <vector>
#include "webrtc/base/thread_annotations.h"
-#include "webrtc/modules/interface/module_common_types.h"
-#include "webrtc/modules/rtp_rtcp/interface/rtp_rtcp_defines.h"
+#include "webrtc/modules/include/module_common_types.h"
+#include "webrtc/modules/rtp_rtcp/include/rtp_rtcp_defines.h"
#include "webrtc/typedefs.h"
namespace webrtc {
@@ -29,7 +29,7 @@ static const size_t kMaxHistoryCapacity = 9600;
class RTPPacketHistory {
public:
- RTPPacketHistory(Clock* clock);
+ explicit RTPPacketHistory(Clock* clock);
~RTPPacketHistory();
void SetStorePacketsStatus(bool enable, uint16_t number_to_store);
@@ -101,4 +101,4 @@ class RTPPacketHistory {
std::vector<StoredPacket> stored_packets_ GUARDED_BY(critsect_);
};
} // namespace webrtc
-#endif // WEBRTC_MODULES_RTP_RTCP_RTP_PACKET_HISTORY_H_
+#endif // WEBRTC_MODULES_RTP_RTCP_SOURCE_RTP_PACKET_HISTORY_H_
diff --git a/webrtc/modules/rtp_rtcp/source/rtp_packet_history_unittest.cc b/webrtc/modules/rtp_rtcp/source/rtp_packet_history_unittest.cc
index 00a6ac7ed2..a406d8bc9b 100644
--- a/webrtc/modules/rtp_rtcp/source/rtp_packet_history_unittest.cc
+++ b/webrtc/modules/rtp_rtcp/source/rtp_packet_history_unittest.cc
@@ -12,10 +12,9 @@
#include "testing/gtest/include/gtest/gtest.h"
-#include "webrtc/modules/rtp_rtcp/interface/rtp_rtcp_defines.h"
+#include "webrtc/modules/rtp_rtcp/include/rtp_rtcp_defines.h"
#include "webrtc/modules/rtp_rtcp/source/rtp_packet_history.h"
#include "webrtc/system_wrappers/include/clock.h"
-#include "webrtc/video_engine/vie_defines.h"
#include "webrtc/typedefs.h"
namespace webrtc {
@@ -29,7 +28,7 @@ class RtpPacketHistoryTest : public ::testing::Test {
~RtpPacketHistoryTest() {
delete hist_;
}
-
+
SimulatedClock fake_clock_;
RTPPacketHistory* hist_;
enum {kPayload = 127};
@@ -54,7 +53,7 @@ class RtpPacketHistoryTest : public ::testing::Test {
array[(*cur_pos)++] = ssrc >> 16;
array[(*cur_pos)++] = ssrc >> 8;
array[(*cur_pos)++] = ssrc;
- }
+ }
};
TEST_F(RtpPacketHistoryTest, SetStoreStatus) {
@@ -268,6 +267,7 @@ TEST_F(RtpPacketHistoryTest, DynamicExpansion) {
}
TEST_F(RtpPacketHistoryTest, FullExpansion) {
+ static const int kSendSidePacketHistorySize = 600;
hist_->SetStorePacketsStatus(true, kSendSidePacketHistorySize);
size_t len;
int64_t capture_time_ms = fake_clock_.TimeInMilliseconds();
diff --git a/webrtc/modules/rtp_rtcp/source/rtp_payload_registry.cc b/webrtc/modules/rtp_rtcp/source/rtp_payload_registry.cc
index 38d1450c23..ce0bcd7fed 100644
--- a/webrtc/modules/rtp_rtcp/source/rtp_payload_registry.cc
+++ b/webrtc/modules/rtp_rtcp/source/rtp_payload_registry.cc
@@ -8,7 +8,7 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-#include "webrtc/modules/rtp_rtcp/interface/rtp_payload_registry.h"
+#include "webrtc/modules/rtp_rtcp/include/rtp_payload_registry.h"
#include "webrtc/base/logging.h"
#include "webrtc/modules/rtp_rtcp/source/byte_io.h"
@@ -40,7 +40,7 @@ int32_t RTPPayloadRegistry::RegisterReceivePayload(
const char payload_name[RTP_PAYLOAD_NAME_SIZE],
const int8_t payload_type,
const uint32_t frequency,
- const uint8_t channels,
+ const size_t channels,
const uint32_t rate,
bool* created_new_payload) {
assert(payload_type >= 0);
@@ -139,7 +139,7 @@ void RTPPayloadRegistry::DeregisterAudioCodecOrRedTypeRegardlessOfPayloadType(
const char payload_name[RTP_PAYLOAD_NAME_SIZE],
const size_t payload_name_length,
const uint32_t frequency,
- const uint8_t channels,
+ const size_t channels,
const uint32_t rate) {
RtpUtility::PayloadTypeMap::iterator iterator = payload_type_map_.begin();
for (; iterator != payload_type_map_.end(); ++iterator) {
@@ -171,7 +171,7 @@ void RTPPayloadRegistry::DeregisterAudioCodecOrRedTypeRegardlessOfPayloadType(
int32_t RTPPayloadRegistry::ReceivePayloadType(
const char payload_name[RTP_PAYLOAD_NAME_SIZE],
const uint32_t frequency,
- const uint8_t channels,
+ const size_t channels,
const uint32_t rate,
int8_t* payload_type) const {
assert(payload_type);
@@ -343,17 +343,16 @@ bool RTPPayloadRegistry::GetPayloadSpecifics(uint8_t payload_type,
int RTPPayloadRegistry::GetPayloadTypeFrequency(
uint8_t payload_type) const {
- RtpUtility::Payload* payload;
- if (!PayloadTypeToPayload(payload_type, payload)) {
+ const RtpUtility::Payload* payload = PayloadTypeToPayload(payload_type);
+ if (!payload) {
return -1;
}
CriticalSectionScoped cs(crit_sect_.get());
return rtp_payload_strategy_->GetPayloadTypeFrequency(*payload);
}
-bool RTPPayloadRegistry::PayloadTypeToPayload(
- const uint8_t payload_type,
- RtpUtility::Payload*& payload) const {
+const RtpUtility::Payload* RTPPayloadRegistry::PayloadTypeToPayload(
+ uint8_t payload_type) const {
CriticalSectionScoped cs(crit_sect_.get());
RtpUtility::PayloadTypeMap::const_iterator it =
@@ -361,11 +360,10 @@ bool RTPPayloadRegistry::PayloadTypeToPayload(
// Check that this is a registered payload type.
if (it == payload_type_map_.end()) {
- return false;
+ return nullptr;
}
- payload = it->second;
- return true;
+ return it->second;
}
void RTPPayloadRegistry::SetIncomingPayloadType(const RTPHeader& header) {
@@ -390,7 +388,7 @@ class RTPPayloadAudioStrategy : public RTPPayloadStrategy {
bool PayloadIsCompatible(const RtpUtility::Payload& payload,
const uint32_t frequency,
- const uint8_t channels,
+ const size_t channels,
const uint32_t rate) const override {
return
payload.audio &&
@@ -409,7 +407,7 @@ class RTPPayloadAudioStrategy : public RTPPayloadStrategy {
const char payloadName[RTP_PAYLOAD_NAME_SIZE],
const int8_t payloadType,
const uint32_t frequency,
- const uint8_t channels,
+ const size_t channels,
const uint32_t rate) const override {
RtpUtility::Payload* payload = new RtpUtility::Payload;
payload->name[RTP_PAYLOAD_NAME_SIZE - 1] = 0;
@@ -433,7 +431,7 @@ class RTPPayloadVideoStrategy : public RTPPayloadStrategy {
bool PayloadIsCompatible(const RtpUtility::Payload& payload,
const uint32_t frequency,
- const uint8_t channels,
+ const size_t channels,
const uint32_t rate) const override {
return !payload.audio;
}
@@ -447,7 +445,7 @@ class RTPPayloadVideoStrategy : public RTPPayloadStrategy {
const char payloadName[RTP_PAYLOAD_NAME_SIZE],
const int8_t payloadType,
const uint32_t frequency,
- const uint8_t channels,
+ const size_t channels,
const uint32_t rate) const override {
RtpVideoCodecTypes videoType = kRtpVideoGeneric;
diff --git a/webrtc/modules/rtp_rtcp/source/rtp_payload_registry_unittest.cc b/webrtc/modules/rtp_rtcp/source/rtp_payload_registry_unittest.cc
index 0b9bf2751e..b73666d1af 100644
--- a/webrtc/modules/rtp_rtcp/source/rtp_payload_registry_unittest.cc
+++ b/webrtc/modules/rtp_rtcp/source/rtp_payload_registry_unittest.cc
@@ -8,12 +8,12 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-#include "webrtc/modules/rtp_rtcp/interface/rtp_payload_registry.h"
+#include "webrtc/modules/rtp_rtcp/include/rtp_payload_registry.h"
#include "testing/gmock/include/gmock/gmock.h"
#include "testing/gtest/include/gtest/gtest.h"
#include "webrtc/base/scoped_ptr.h"
-#include "webrtc/modules/rtp_rtcp/interface/rtp_header_parser.h"
+#include "webrtc/modules/rtp_rtcp/include/rtp_header_parser.h"
#include "webrtc/modules/rtp_rtcp/source/byte_io.h"
#include "webrtc/modules/rtp_rtcp/source/mock/mock_rtp_payload_strategy.h"
#include "webrtc/modules/rtp_rtcp/source/rtp_utility.h"
@@ -25,7 +25,7 @@ using ::testing::Return;
using ::testing::_;
static const char* kTypicalPayloadName = "name";
-static const uint8_t kTypicalChannels = 1;
+static const size_t kTypicalChannels = 1;
static const int kTypicalFrequency = 44000;
static const int kTypicalRate = 32 * 1024;
@@ -52,10 +52,9 @@ class RtpPayloadRegistryTest : public ::testing::Test {
RtpUtility::Payload* returned_payload_on_heap =
new RtpUtility::Payload(returned_payload);
EXPECT_CALL(*mock_payload_strategy_,
- CreatePayloadType(kTypicalPayloadName, payload_type,
- kTypicalFrequency,
- kTypicalChannels,
- rate)).WillOnce(Return(returned_payload_on_heap));
+ CreatePayloadType(kTypicalPayloadName, payload_type,
+ kTypicalFrequency, kTypicalChannels, rate))
+ .WillOnce(Return(returned_payload_on_heap));
return returned_payload_on_heap;
}
@@ -70,14 +69,14 @@ TEST_F(RtpPayloadRegistryTest, RegistersAndRemembersPayloadsUntilDeregistered) {
bool new_payload_created = false;
EXPECT_EQ(0, rtp_payload_registry_->RegisterReceivePayload(
- kTypicalPayloadName, payload_type, kTypicalFrequency, kTypicalChannels,
- kTypicalRate, &new_payload_created));
+ kTypicalPayloadName, payload_type, kTypicalFrequency,
+ kTypicalChannels, kTypicalRate, &new_payload_created));
EXPECT_TRUE(new_payload_created) << "A new payload WAS created.";
- RtpUtility::Payload* retrieved_payload = NULL;
- EXPECT_TRUE(rtp_payload_registry_->PayloadTypeToPayload(payload_type,
- retrieved_payload));
+ const RtpUtility::Payload* retrieved_payload =
+ rtp_payload_registry_->PayloadTypeToPayload(payload_type);
+ EXPECT_TRUE(retrieved_payload);
// We should get back the exact pointer to the payload returned by the
// payload strategy.
@@ -85,32 +84,30 @@ TEST_F(RtpPayloadRegistryTest, RegistersAndRemembersPayloadsUntilDeregistered) {
// Now forget about it and verify it's gone.
EXPECT_EQ(0, rtp_payload_registry_->DeRegisterReceivePayload(payload_type));
- EXPECT_FALSE(rtp_payload_registry_->PayloadTypeToPayload(
- payload_type, retrieved_payload));
+ EXPECT_FALSE(rtp_payload_registry_->PayloadTypeToPayload(payload_type));
}
TEST_F(RtpPayloadRegistryTest, AudioRedWorkProperly) {
const uint8_t kRedPayloadType = 127;
const int kRedSampleRate = 8000;
- const int kRedChannels = 1;
+ const size_t kRedChannels = 1;
const int kRedBitRate = 0;
// This creates an audio RTP payload strategy.
- rtp_payload_registry_.reset(new RTPPayloadRegistry(
- RTPPayloadStrategy::CreateStrategy(true)));
+ rtp_payload_registry_.reset(
+ new RTPPayloadRegistry(RTPPayloadStrategy::CreateStrategy(true)));
bool new_payload_created = false;
EXPECT_EQ(0, rtp_payload_registry_->RegisterReceivePayload(
- "red", kRedPayloadType, kRedSampleRate, kRedChannels, kRedBitRate,
- &new_payload_created));
+ "red", kRedPayloadType, kRedSampleRate, kRedChannels,
+ kRedBitRate, &new_payload_created));
EXPECT_TRUE(new_payload_created);
EXPECT_EQ(kRedPayloadType, rtp_payload_registry_->red_payload_type());
- RtpUtility::Payload* retrieved_payload = NULL;
- EXPECT_TRUE(rtp_payload_registry_->PayloadTypeToPayload(kRedPayloadType,
- retrieved_payload));
- ASSERT_TRUE(retrieved_payload);
+ const RtpUtility::Payload* retrieved_payload =
+ rtp_payload_registry_->PayloadTypeToPayload(kRedPayloadType);
+ EXPECT_TRUE(retrieved_payload);
EXPECT_TRUE(retrieved_payload->audio);
EXPECT_STRCASEEQ("red", retrieved_payload->name);
@@ -127,27 +124,29 @@ TEST_F(RtpPayloadRegistryTest,
RtpUtility::Payload* first_payload_on_heap =
ExpectReturnOfTypicalAudioPayload(payload_type, kTypicalRate);
EXPECT_EQ(0, rtp_payload_registry_->RegisterReceivePayload(
- kTypicalPayloadName, payload_type, kTypicalFrequency, kTypicalChannels,
- kTypicalRate, &ignored));
+ kTypicalPayloadName, payload_type, kTypicalFrequency,
+ kTypicalChannels, kTypicalRate, &ignored));
EXPECT_EQ(-1, rtp_payload_registry_->RegisterReceivePayload(
- kTypicalPayloadName, payload_type, kTypicalFrequency, kTypicalChannels,
- kTypicalRate, &ignored)) << "Adding same codec twice = bad.";
+ kTypicalPayloadName, payload_type, kTypicalFrequency,
+ kTypicalChannels, kTypicalRate, &ignored))
+ << "Adding same codec twice = bad.";
RtpUtility::Payload* second_payload_on_heap =
ExpectReturnOfTypicalAudioPayload(payload_type - 1, kTypicalRate);
EXPECT_EQ(0, rtp_payload_registry_->RegisterReceivePayload(
- kTypicalPayloadName, payload_type - 1, kTypicalFrequency,
- kTypicalChannels, kTypicalRate, &ignored)) <<
- "With a different payload type is fine though.";
+ kTypicalPayloadName, payload_type - 1, kTypicalFrequency,
+ kTypicalChannels, kTypicalRate, &ignored))
+ << "With a different payload type is fine though.";
// Ensure both payloads are preserved.
- RtpUtility::Payload* retrieved_payload = NULL;
- EXPECT_TRUE(rtp_payload_registry_->PayloadTypeToPayload(payload_type,
- retrieved_payload));
+ const RtpUtility::Payload* retrieved_payload =
+ rtp_payload_registry_->PayloadTypeToPayload(payload_type);
+ EXPECT_TRUE(retrieved_payload);
EXPECT_EQ(first_payload_on_heap, retrieved_payload);
- EXPECT_TRUE(rtp_payload_registry_->PayloadTypeToPayload(payload_type - 1,
- retrieved_payload));
+ retrieved_payload =
+ rtp_payload_registry_->PayloadTypeToPayload(payload_type - 1);
+ EXPECT_TRUE(retrieved_payload);
EXPECT_EQ(second_payload_on_heap, retrieved_payload);
// Ok, update the rate for one of the codecs. If either the incoming rate or
@@ -158,8 +157,8 @@ TEST_F(RtpPayloadRegistryTest,
EXPECT_CALL(*mock_payload_strategy_,
UpdatePayloadRate(first_payload_on_heap, kTypicalRate));
EXPECT_EQ(0, rtp_payload_registry_->RegisterReceivePayload(
- kTypicalPayloadName, payload_type, kTypicalFrequency, kTypicalChannels,
- kTypicalRate, &ignored));
+ kTypicalPayloadName, payload_type, kTypicalFrequency,
+ kTypicalChannels, kTypicalRate, &ignored));
}
TEST_F(RtpPayloadRegistryTest,
@@ -174,35 +173,31 @@ TEST_F(RtpPayloadRegistryTest,
bool ignored = false;
ExpectReturnOfTypicalAudioPayload(payload_type, kTypicalRate);
EXPECT_EQ(0, rtp_payload_registry_->RegisterReceivePayload(
- kTypicalPayloadName, payload_type, kTypicalFrequency, kTypicalChannels,
- kTypicalRate, &ignored));
+ kTypicalPayloadName, payload_type, kTypicalFrequency,
+ kTypicalChannels, kTypicalRate, &ignored));
ExpectReturnOfTypicalAudioPayload(payload_type - 1, kTypicalRate);
EXPECT_EQ(0, rtp_payload_registry_->RegisterReceivePayload(
- kTypicalPayloadName, payload_type - 1, kTypicalFrequency,
- kTypicalChannels, kTypicalRate, &ignored));
+ kTypicalPayloadName, payload_type - 1, kTypicalFrequency,
+ kTypicalChannels, kTypicalRate, &ignored));
- RtpUtility::Payload* retrieved_payload = NULL;
- EXPECT_FALSE(rtp_payload_registry_->PayloadTypeToPayload(
- payload_type, retrieved_payload)) << "The first payload should be "
- "deregistered because the only thing that differs is payload type.";
- EXPECT_TRUE(rtp_payload_registry_->PayloadTypeToPayload(
- payload_type - 1, retrieved_payload)) <<
- "The second payload should still be registered though.";
+ EXPECT_FALSE(rtp_payload_registry_->PayloadTypeToPayload(payload_type))
+ << "The first payload should be "
+ "deregistered because the only thing that differs is payload type.";
+ EXPECT_TRUE(rtp_payload_registry_->PayloadTypeToPayload(payload_type - 1))
+ << "The second payload should still be registered though.";
// Now ensure non-compatible codecs aren't removed.
ON_CALL(*mock_payload_strategy_, PayloadIsCompatible(_, _, _, _))
.WillByDefault(Return(false));
ExpectReturnOfTypicalAudioPayload(payload_type + 1, kTypicalRate);
EXPECT_EQ(0, rtp_payload_registry_->RegisterReceivePayload(
- kTypicalPayloadName, payload_type + 1, kTypicalFrequency,
- kTypicalChannels, kTypicalRate, &ignored));
-
- EXPECT_TRUE(rtp_payload_registry_->PayloadTypeToPayload(
- payload_type - 1, retrieved_payload)) <<
- "Not compatible; both payloads should be kept.";
- EXPECT_TRUE(rtp_payload_registry_->PayloadTypeToPayload(
- payload_type + 1, retrieved_payload)) <<
- "Not compatible; both payloads should be kept.";
+ kTypicalPayloadName, payload_type + 1, kTypicalFrequency,
+ kTypicalChannels, kTypicalRate, &ignored));
+
+ EXPECT_TRUE(rtp_payload_registry_->PayloadTypeToPayload(payload_type - 1))
+ << "Not compatible; both payloads should be kept.";
+ EXPECT_TRUE(rtp_payload_registry_->PayloadTypeToPayload(payload_type + 1))
+ << "Not compatible; both payloads should be kept.";
}
TEST_F(RtpPayloadRegistryTest,
@@ -218,18 +213,17 @@ TEST_F(RtpPayloadRegistryTest,
bool ignored;
ExpectReturnOfTypicalAudioPayload(34, kTypicalRate);
EXPECT_EQ(0, rtp_payload_registry_->RegisterReceivePayload(
- kTypicalPayloadName, 34, kTypicalFrequency, kTypicalChannels,
- kTypicalRate, &ignored));
+ kTypicalPayloadName, 34, kTypicalFrequency, kTypicalChannels,
+ kTypicalRate, &ignored));
EXPECT_EQ(-1, rtp_payload_registry_->last_received_payload_type());
media_type_unchanged = rtp_payload_registry_->ReportMediaPayloadType(18);
EXPECT_FALSE(media_type_unchanged);
}
-class ParameterizedRtpPayloadRegistryTest :
- public RtpPayloadRegistryTest,
- public ::testing::WithParamInterface<int> {
-};
+class ParameterizedRtpPayloadRegistryTest
+ : public RtpPayloadRegistryTest,
+ public ::testing::WithParamInterface<int> {};
TEST_P(ParameterizedRtpPayloadRegistryTest,
FailsToRegisterKnownPayloadsWeAreNotInterestedIn) {
@@ -237,26 +231,26 @@ TEST_P(ParameterizedRtpPayloadRegistryTest,
bool ignored;
EXPECT_EQ(-1, rtp_payload_registry_->RegisterReceivePayload(
- "whatever", static_cast<uint8_t>(payload_type), 19, 1, 17, &ignored));
+ "whatever", static_cast<uint8_t>(payload_type), 19, 1, 17,
+ &ignored));
}
INSTANTIATE_TEST_CASE_P(TestKnownBadPayloadTypes,
ParameterizedRtpPayloadRegistryTest,
testing::Values(64, 72, 73, 74, 75, 76, 77, 78, 79));
-class RtpPayloadRegistryGenericTest :
- public RtpPayloadRegistryTest,
- public ::testing::WithParamInterface<int> {
-};
+class RtpPayloadRegistryGenericTest
+ : public RtpPayloadRegistryTest,
+ public ::testing::WithParamInterface<int> {};
TEST_P(RtpPayloadRegistryGenericTest, RegisterGenericReceivePayloadType) {
int payload_type = GetParam();
bool ignored;
- EXPECT_EQ(0, rtp_payload_registry_->RegisterReceivePayload("generic-codec",
- static_cast<int8_t>(payload_type),
- 19, 1, 17, &ignored)); // dummy values, except for payload_type
+ EXPECT_EQ(0, rtp_payload_registry_->RegisterReceivePayload(
+ "generic-codec", static_cast<int8_t>(payload_type), 19, 1,
+ 17, &ignored)); // dummy values, except for payload_type
}
// Generates an RTX packet for the given length and original sequence number.
@@ -395,7 +389,8 @@ TEST_F(RtpPayloadRegistryTest, InvalidRtxConfiguration) {
TestRtxPacket(rtp_payload_registry_.get(), 106, 0, false);
}
-INSTANTIATE_TEST_CASE_P(TestDynamicRange, RtpPayloadRegistryGenericTest,
- testing::Range(96, 127+1));
+INSTANTIATE_TEST_CASE_P(TestDynamicRange,
+ RtpPayloadRegistryGenericTest,
+ testing::Range(96, 127 + 1));
} // namespace webrtc
diff --git a/webrtc/modules/rtp_rtcp/source/rtp_receiver_audio.h b/webrtc/modules/rtp_rtcp/source/rtp_receiver_audio.h
index 176852e01e..1dd07d1cc9 100644
--- a/webrtc/modules/rtp_rtcp/source/rtp_receiver_audio.h
+++ b/webrtc/modules/rtp_rtcp/source/rtp_receiver_audio.h
@@ -14,8 +14,8 @@
#include <set>
#include "webrtc/base/scoped_ptr.h"
-#include "webrtc/modules/rtp_rtcp/interface/rtp_receiver.h"
-#include "webrtc/modules/rtp_rtcp/interface/rtp_rtcp_defines.h"
+#include "webrtc/modules/rtp_rtcp/include/rtp_receiver.h"
+#include "webrtc/modules/rtp_rtcp/include/rtp_rtcp_defines.h"
#include "webrtc/modules/rtp_rtcp/source/rtp_receiver_strategy.h"
#include "webrtc/modules/rtp_rtcp/source/rtp_utility.h"
#include "webrtc/typedefs.h"
@@ -42,9 +42,7 @@ class RTPReceiverAudio : public RTPReceiverStrategy,
// Is TelephoneEvent configured with payload type payload_type
bool TelephoneEventPayloadType(const int8_t payload_type) const;
- TelephoneEventHandler* GetTelephoneEventHandler() {
- return this;
- }
+ TelephoneEventHandler* GetTelephoneEventHandler() { return this; }
// Returns true if CNG is configured with payload type payload_type. If so,
// the frequency and cng_payload_type_has_changed are filled in.
@@ -96,13 +94,11 @@ class RTPReceiverAudio : public RTPReceiverStrategy,
int Energy(uint8_t array_of_energy[kRtpCsrcSize]) const override;
private:
-
- int32_t ParseAudioCodecSpecific(
- WebRtcRTPHeader* rtp_header,
- const uint8_t* payload_data,
- size_t payload_length,
- const AudioPayload& audio_specific,
- bool is_red);
+ int32_t ParseAudioCodecSpecific(WebRtcRTPHeader* rtp_header,
+ const uint8_t* payload_data,
+ size_t payload_length,
+ const AudioPayload& audio_specific,
+ bool is_red);
uint32_t last_received_frequency_;
diff --git a/webrtc/modules/rtp_rtcp/source/rtp_receiver_impl.cc b/webrtc/modules/rtp_rtcp/source/rtp_receiver_impl.cc
index e1ebd0c8bb..2e21f230d3 100644
--- a/webrtc/modules/rtp_rtcp/source/rtp_receiver_impl.cc
+++ b/webrtc/modules/rtp_rtcp/source/rtp_receiver_impl.cc
@@ -16,13 +16,12 @@
#include <string.h>
#include "webrtc/base/logging.h"
-#include "webrtc/modules/rtp_rtcp/interface/rtp_payload_registry.h"
-#include "webrtc/modules/rtp_rtcp/interface/rtp_rtcp_defines.h"
+#include "webrtc/modules/rtp_rtcp/include/rtp_payload_registry.h"
+#include "webrtc/modules/rtp_rtcp/include/rtp_rtcp_defines.h"
#include "webrtc/modules/rtp_rtcp/source/rtp_receiver_strategy.h"
namespace webrtc {
-using RtpUtility::GetCurrentRTP;
using RtpUtility::Payload;
using RtpUtility::StringCompare;
@@ -97,7 +96,7 @@ int32_t RtpReceiverImpl::RegisterReceivePayload(
const char payload_name[RTP_PAYLOAD_NAME_SIZE],
const int8_t payload_type,
const uint32_t frequency,
- const uint8_t channels,
+ const size_t channels,
const uint32_t rate) {
CriticalSectionScoped lock(critical_section_rtp_receiver_.get());
@@ -170,7 +169,7 @@ bool RtpReceiverImpl::IncomingRtpPacket(
int8_t first_payload_byte = payload_length > 0 ? payload[0] : 0;
bool is_red = false;
- if (CheckPayloadChanged(rtp_header, first_payload_byte, is_red,
+ if (CheckPayloadChanged(rtp_header, first_payload_byte, &is_red,
&payload_specific) == -1) {
if (payload_length == 0) {
// OK, keep-alive packet.
@@ -253,7 +252,7 @@ void RtpReceiverImpl::CheckSSRCChanged(const RTPHeader& rtp_header) {
bool new_ssrc = false;
bool re_initialize_decoder = false;
char payload_name[RTP_PAYLOAD_NAME_SIZE];
- uint8_t channels = 1;
+ size_t channels = 1;
uint32_t rate = 0;
{
@@ -276,12 +275,11 @@ void RtpReceiverImpl::CheckSSRCChanged(const RTPHeader& rtp_header) {
if (rtp_header.payloadType == last_received_payload_type) {
re_initialize_decoder = true;
- Payload* payload;
- if (!rtp_payload_registry_->PayloadTypeToPayload(
- rtp_header.payloadType, payload)) {
+ const Payload* payload = rtp_payload_registry_->PayloadTypeToPayload(
+ rtp_header.payloadType);
+ if (!payload) {
return;
}
- assert(payload);
payload_name[RTP_PAYLOAD_NAME_SIZE - 1] = 0;
strncpy(payload_name, payload->name, RTP_PAYLOAD_NAME_SIZE - 1);
if (payload->audio) {
@@ -321,7 +319,7 @@ void RtpReceiverImpl::CheckSSRCChanged(const RTPHeader& rtp_header) {
// last known payload).
int32_t RtpReceiverImpl::CheckPayloadChanged(const RTPHeader& rtp_header,
const int8_t first_payload_byte,
- bool& is_red,
+ bool* is_red,
PayloadUnion* specific_payload) {
bool re_initialize_decoder = false;
@@ -339,7 +337,7 @@ int32_t RtpReceiverImpl::CheckPayloadChanged(const RTPHeader& rtp_header,
if (rtp_payload_registry_->red_payload_type() == payload_type) {
// Get the real codec payload type.
payload_type = first_payload_byte & 0x7f;
- is_red = true;
+ *is_red = true;
if (rtp_payload_registry_->red_payload_type() == payload_type) {
// Invalid payload type, traced by caller. If we proceeded here,
@@ -361,16 +359,16 @@ int32_t RtpReceiverImpl::CheckPayloadChanged(const RTPHeader& rtp_header,
&should_discard_changes);
if (should_discard_changes) {
- is_red = false;
+ *is_red = false;
return 0;
}
- Payload* payload;
- if (!rtp_payload_registry_->PayloadTypeToPayload(payload_type, payload)) {
+ const Payload* payload =
+ rtp_payload_registry_->PayloadTypeToPayload(payload_type);
+ if (!payload) {
// Not a registered payload type.
return -1;
}
- assert(payload);
payload_name[RTP_PAYLOAD_NAME_SIZE - 1] = 0;
strncpy(payload_name, payload->name, RTP_PAYLOAD_NAME_SIZE - 1);
@@ -391,7 +389,7 @@ int32_t RtpReceiverImpl::CheckPayloadChanged(const RTPHeader& rtp_header,
}
} else {
rtp_media_receiver_->GetLastMediaSpecificPayload(specific_payload);
- is_red = false;
+ *is_red = false;
}
} // End critsect.
diff --git a/webrtc/modules/rtp_rtcp/source/rtp_receiver_impl.h b/webrtc/modules/rtp_rtcp/source/rtp_receiver_impl.h
index 46741d59b4..5cf94c2859 100644
--- a/webrtc/modules/rtp_rtcp/source/rtp_receiver_impl.h
+++ b/webrtc/modules/rtp_rtcp/source/rtp_receiver_impl.h
@@ -12,8 +12,8 @@
#define WEBRTC_MODULES_RTP_RTCP_SOURCE_RTP_RECEIVER_IMPL_H_
#include "webrtc/base/scoped_ptr.h"
-#include "webrtc/modules/rtp_rtcp/interface/rtp_receiver.h"
-#include "webrtc/modules/rtp_rtcp/interface/rtp_rtcp_defines.h"
+#include "webrtc/modules/rtp_rtcp/include/rtp_receiver.h"
+#include "webrtc/modules/rtp_rtcp/include/rtp_rtcp_defines.h"
#include "webrtc/modules/rtp_rtcp/source/rtp_receiver_strategy.h"
#include "webrtc/system_wrappers/include/critical_section_wrapper.h"
#include "webrtc/typedefs.h"
@@ -36,7 +36,7 @@ class RtpReceiverImpl : public RtpReceiver {
int32_t RegisterReceivePayload(const char payload_name[RTP_PAYLOAD_NAME_SIZE],
const int8_t payload_type,
const uint32_t frequency,
- const uint8_t channels,
+ const size_t channels,
const uint32_t rate) override;
int32_t DeRegisterReceivePayload(const int8_t payload_type) override;
@@ -71,7 +71,7 @@ class RtpReceiverImpl : public RtpReceiver {
void CheckCSRC(const WebRtcRTPHeader& rtp_header);
int32_t CheckPayloadChanged(const RTPHeader& rtp_header,
const int8_t first_payload_byte,
- bool& is_red,
+ bool* is_red,
PayloadUnion* payload);
Clock* clock_;
diff --git a/webrtc/modules/rtp_rtcp/source/rtp_receiver_strategy.h b/webrtc/modules/rtp_rtcp/source/rtp_receiver_strategy.h
index 37c3e6e49a..0f7ad30e87 100644
--- a/webrtc/modules/rtp_rtcp/source/rtp_receiver_strategy.h
+++ b/webrtc/modules/rtp_rtcp/source/rtp_receiver_strategy.h
@@ -12,8 +12,8 @@
#define WEBRTC_MODULES_RTP_RTCP_SOURCE_RTP_RECEIVER_STRATEGY_H_
#include "webrtc/base/scoped_ptr.h"
-#include "webrtc/modules/rtp_rtcp/interface/rtp_rtcp.h"
-#include "webrtc/modules/rtp_rtcp/interface/rtp_rtcp_defines.h"
+#include "webrtc/modules/rtp_rtcp/include/rtp_rtcp.h"
+#include "webrtc/modules/rtp_rtcp/include/rtp_rtcp_defines.h"
#include "webrtc/modules/rtp_rtcp/source/rtp_utility.h"
#include "webrtc/system_wrappers/include/critical_section_wrapper.h"
#include "webrtc/typedefs.h"
@@ -95,7 +95,7 @@ class RTPReceiverStrategy {
// Note: Implementations may call the callback for other reasons than calls
// to ParseRtpPacket, for instance if the implementation somehow recovers a
// packet.
- RTPReceiverStrategy(RtpData* data_callback);
+ explicit RTPReceiverStrategy(RtpData* data_callback);
rtc::scoped_ptr<CriticalSectionWrapper> crit_sect_;
PayloadUnion last_payload_;
diff --git a/webrtc/modules/rtp_rtcp/source/rtp_receiver_video.cc b/webrtc/modules/rtp_rtcp/source/rtp_receiver_video.cc
index 1af2d4857e..53051dd321 100644
--- a/webrtc/modules/rtp_rtcp/source/rtp_receiver_video.cc
+++ b/webrtc/modules/rtp_rtcp/source/rtp_receiver_video.cc
@@ -16,8 +16,8 @@
#include "webrtc/base/checks.h"
#include "webrtc/base/logging.h"
#include "webrtc/base/trace_event.h"
-#include "webrtc/modules/rtp_rtcp/interface/rtp_cvo.h"
-#include "webrtc/modules/rtp_rtcp/interface/rtp_payload_registry.h"
+#include "webrtc/modules/rtp_rtcp/include/rtp_cvo.h"
+#include "webrtc/modules/rtp_rtcp/include/rtp_payload_registry.h"
#include "webrtc/modules/rtp_rtcp/source/rtp_format.h"
#include "webrtc/modules/rtp_rtcp/source/rtp_format_video_generic.h"
#include "webrtc/modules/rtp_rtcp/source/rtp_utility.h"
diff --git a/webrtc/modules/rtp_rtcp/source/rtp_receiver_video.h b/webrtc/modules/rtp_rtcp/source/rtp_receiver_video.h
index 23128df6e1..56f761a2e1 100644
--- a/webrtc/modules/rtp_rtcp/source/rtp_receiver_video.h
+++ b/webrtc/modules/rtp_rtcp/source/rtp_receiver_video.h
@@ -12,7 +12,7 @@
#define WEBRTC_MODULES_RTP_RTCP_SOURCE_RTP_RECEIVER_VIDEO_H_
#include "webrtc/base/scoped_ptr.h"
-#include "webrtc/modules/rtp_rtcp/interface/rtp_rtcp_defines.h"
+#include "webrtc/modules/rtp_rtcp/include/rtp_rtcp_defines.h"
#include "webrtc/modules/rtp_rtcp/source/bitrate.h"
#include "webrtc/modules/rtp_rtcp/source/rtp_receiver_strategy.h"
#include "webrtc/modules/rtp_rtcp/source/rtp_utility.h"
diff --git a/webrtc/modules/rtp_rtcp/source/rtp_rtcp_config.h b/webrtc/modules/rtp_rtcp/source/rtp_rtcp_config.h
index 7cfebd91a8..a2cd52736f 100644
--- a/webrtc/modules/rtp_rtcp/source/rtp_rtcp_config.h
+++ b/webrtc/modules/rtp_rtcp/source/rtp_rtcp_config.h
@@ -13,36 +13,37 @@
// Configuration file for RTP utilities (RTPSender, RTPReceiver ...)
namespace webrtc {
-enum { NACK_BYTECOUNT_SIZE = 60}; // size of our NACK history
+enum { NACK_BYTECOUNT_SIZE = 60 }; // size of our NACK history
// A sanity for the NACK list parsing at the send-side.
enum { kSendSideNackListSizeSanity = 20000 };
enum { kDefaultMaxReorderingThreshold = 50 }; // In sequence numbers.
enum { kRtcpMaxNackFields = 253 };
-enum { RTCP_INTERVAL_VIDEO_MS = 1000 };
-enum { RTCP_INTERVAL_AUDIO_MS = 5000 };
-enum { RTCP_SEND_BEFORE_KEY_FRAME_MS= 100 };
-enum { RTCP_MAX_REPORT_BLOCKS = 31}; // RFC 3550 page 37
-enum { RTCP_MIN_FRAME_LENGTH_MS = 17};
-enum { kRtcpAppCode_DATA_SIZE = 32*4}; // multiple of 4, this is not a limitation of the size
-enum { RTCP_RPSI_DATA_SIZE = 30};
-enum { RTCP_NUMBER_OF_SR = 60 };
-
-enum { MAX_NUMBER_OF_TEMPORAL_ID = 8 }; // RFC
-enum { MAX_NUMBER_OF_DEPENDENCY_QUALITY_ID = 128 };// RFC
+enum { RTCP_INTERVAL_VIDEO_MS = 1000 };
+enum { RTCP_INTERVAL_AUDIO_MS = 5000 };
+enum { RTCP_SEND_BEFORE_KEY_FRAME_MS = 100 };
+enum { RTCP_MAX_REPORT_BLOCKS = 31 }; // RFC 3550 page 37
+enum { RTCP_MIN_FRAME_LENGTH_MS = 17 };
+enum {
+ kRtcpAppCode_DATA_SIZE = 32 * 4
+}; // multiple of 4, this is not a limitation of the size
+enum { RTCP_RPSI_DATA_SIZE = 30 };
+enum { RTCP_NUMBER_OF_SR = 60 };
+
+enum { MAX_NUMBER_OF_TEMPORAL_ID = 8 }; // RFC
+enum { MAX_NUMBER_OF_DEPENDENCY_QUALITY_ID = 128 }; // RFC
enum { MAX_NUMBER_OF_REMB_FEEDBACK_SSRCS = 255 };
-enum { BW_HISTORY_SIZE = 35};
+enum { BW_HISTORY_SIZE = 35 };
-#define MIN_AUDIO_BW_MANAGEMENT_BITRATE 6
-#define MIN_VIDEO_BW_MANAGEMENT_BITRATE 30
+#define MIN_AUDIO_BW_MANAGEMENT_BITRATE 6
+#define MIN_VIDEO_BW_MANAGEMENT_BITRATE 30
-enum { DTMF_OUTBAND_MAX = 20};
+enum { DTMF_OUTBAND_MAX = 20 };
enum { RTP_MAX_BURST_SLEEP_TIME = 500 };
enum { RTP_AUDIO_LEVEL_UNIQUE_ID = 0xbede };
-enum { RTP_MAX_PACKETS_PER_FRAME= 512 }; // must be multiple of 32
+enum { RTP_MAX_PACKETS_PER_FRAME = 512 }; // must be multiple of 32
} // namespace webrtc
-
-#endif // WEBRTC_MODULES_RTP_RTCP_SOURCE_RTP_RTCP_CONFIG_H_
+#endif // WEBRTC_MODULES_RTP_RTCP_SOURCE_RTP_RTCP_CONFIG_H_
diff --git a/webrtc/modules/rtp_rtcp/source/rtp_rtcp_impl.cc b/webrtc/modules/rtp_rtcp/source/rtp_rtcp_impl.cc
index 451f8bfa93..450eed698e 100644
--- a/webrtc/modules/rtp_rtcp/source/rtp_rtcp_impl.cc
+++ b/webrtc/modules/rtp_rtcp/source/rtp_rtcp_impl.cc
@@ -183,8 +183,13 @@ int32_t ModuleRtpRtcpImpl::Process() {
set_rtt_ms(rtt_stats_->LastProcessedRtt());
}
- if (rtcp_sender_.TimeToSendRTCPReport())
- rtcp_sender_.SendRTCP(GetFeedbackState(), kRtcpReport);
+ // For sending streams, make sure to not send a SR before media has been sent.
+ if (rtcp_sender_.TimeToSendRTCPReport()) {
+ RTCPSender::FeedbackState state = GetFeedbackState();
+ // Prevent sending streams to send SR before any media has been sent.
+ if (!rtcp_sender_.Sending() || state.packets_sent > 0)
+ rtcp_sender_.SendRTCP(state, kRtcpReport);
+ }
if (UpdateRTCPReceiveInformationTimers()) {
// A receiver has timed out
@@ -402,6 +407,7 @@ int32_t ModuleRtpRtcpImpl::SendOutgoingData(
const RTPFragmentationHeader* fragmentation,
const RTPVideoHeader* rtp_video_hdr) {
rtcp_sender_.SetLastRtpTime(time_stamp, capture_time_ms);
+ // Make sure an RTCP report isn't queued behind a key frame.
if (rtcp_sender_.TimeToSendRTCPReport(kVideoFrameKey == frame_type)) {
rtcp_sender_.SendRTCP(GetFeedbackState(), kRtcpReport);
}
@@ -794,9 +800,8 @@ int32_t ModuleRtpRtcpImpl::SetSendREDPayloadType(
}
// Get payload type for Redundant Audio Data RFC 2198.
-int32_t ModuleRtpRtcpImpl::SendREDPayloadType(
- int8_t& payload_type) const {
- return rtp_sender_.RED(&payload_type);
+int32_t ModuleRtpRtcpImpl::SendREDPayloadType(int8_t* payload_type) const {
+ return rtp_sender_.RED(payload_type);
}
void ModuleRtpRtcpImpl::SetTargetSendBitrate(uint32_t bitrate_bps) {
@@ -832,11 +837,10 @@ void ModuleRtpRtcpImpl::SetGenericFECStatus(
rtp_sender_.SetGenericFECStatus(enable, payload_type_red, payload_type_fec);
}
-void ModuleRtpRtcpImpl::GenericFECStatus(bool& enable,
- uint8_t& payload_type_red,
- uint8_t& payload_type_fec) {
- rtp_sender_.GenericFECStatus(&enable, &payload_type_red,
- &payload_type_fec);
+void ModuleRtpRtcpImpl::GenericFECStatus(bool* enable,
+ uint8_t* payload_type_red,
+ uint8_t* payload_type_fec) {
+ rtp_sender_.GenericFECStatus(enable, payload_type_red, payload_type_fec);
}
int32_t ModuleRtpRtcpImpl::SetFecParameters(
@@ -952,8 +956,8 @@ bool ModuleRtpRtcpImpl::UpdateRTCPReceiveInformationTimers() {
}
// Called from RTCPsender.
-int32_t ModuleRtpRtcpImpl::BoundingSet(bool& tmmbr_owner,
- TMMBRSet*& bounding_set) {
+int32_t ModuleRtpRtcpImpl::BoundingSet(bool* tmmbr_owner,
+ TMMBRSet* bounding_set) {
return rtcp_receiver_.BoundingSet(tmmbr_owner, bounding_set);
}
diff --git a/webrtc/modules/rtp_rtcp/source/rtp_rtcp_impl.h b/webrtc/modules/rtp_rtcp/source/rtp_rtcp_impl.h
index c9b6686c0a..04e09c1217 100644
--- a/webrtc/modules/rtp_rtcp/source/rtp_rtcp_impl.h
+++ b/webrtc/modules/rtp_rtcp/source/rtp_rtcp_impl.h
@@ -12,10 +12,12 @@
#define WEBRTC_MODULES_RTP_RTCP_SOURCE_RTP_RTCP_IMPL_H_
#include <list>
+#include <set>
+#include <utility>
#include <vector>
#include "webrtc/base/scoped_ptr.h"
-#include "webrtc/modules/rtp_rtcp/interface/rtp_rtcp.h"
+#include "webrtc/modules/rtp_rtcp/include/rtp_rtcp.h"
#include "webrtc/modules/rtp_rtcp/source/packet_loss_stats.h"
#include "webrtc/modules/rtp_rtcp/source/rtcp_receiver.h"
#include "webrtc/modules/rtp_rtcp/source/rtcp_sender.h"
@@ -258,7 +260,7 @@ class ModuleRtpRtcpImpl : public RtpRtcp {
int32_t SetSendREDPayloadType(int8_t payload_type) override;
// Get payload type for Redundant Audio Data RFC 2198.
- int32_t SendREDPayloadType(int8_t& payload_type) const override;
+ int32_t SendREDPayloadType(int8_t* payload_type) const override;
// Store the audio level in d_bov for header-extension-for-audio-level-
// indication.
@@ -280,9 +282,9 @@ class ModuleRtpRtcpImpl : public RtpRtcp {
uint8_t payload_type_red,
uint8_t payload_type_fec) override;
- void GenericFECStatus(bool& enable,
- uint8_t& payload_type_red,
- uint8_t& payload_type_fec) override;
+ void GenericFECStatus(bool* enable,
+ uint8_t* payload_type_red,
+ uint8_t* payload_type_fec) override;
int32_t SetFecParameters(const FecProtectionParams* delta_params,
const FecProtectionParams* key_params) override;
@@ -293,7 +295,7 @@ class ModuleRtpRtcpImpl : public RtpRtcp {
bool LastReceivedXrReferenceTimeInfo(RtcpReceiveTimeInfo* info) const;
- virtual int32_t BoundingSet(bool& tmmbr_owner, TMMBRSet*& bounding_set_rec);
+ int32_t BoundingSet(bool* tmmbr_owner, TMMBRSet* bounding_set_rec);
void BitrateSent(uint32_t* total_rate,
uint32_t* video_rate,
diff --git a/webrtc/modules/rtp_rtcp/source/rtp_rtcp_impl_unittest.cc b/webrtc/modules/rtp_rtcp/source/rtp_rtcp_impl_unittest.cc
index 4c94764ee6..8329f603f9 100644
--- a/webrtc/modules/rtp_rtcp/source/rtp_rtcp_impl_unittest.cc
+++ b/webrtc/modules/rtp_rtcp/source/rtp_rtcp_impl_unittest.cc
@@ -8,13 +8,17 @@
* be found in the AUTHORS file in the root of the source tree.
*/
+#include <map>
+#include <set>
+
#include "testing/gmock/include/gmock/gmock.h"
#include "testing/gtest/include/gtest/gtest.h"
#include "webrtc/common_types.h"
-#include "webrtc/modules/rtp_rtcp/interface/rtp_header_parser.h"
-#include "webrtc/modules/rtp_rtcp/interface/rtp_rtcp_defines.h"
+#include "webrtc/modules/rtp_rtcp/include/rtp_header_parser.h"
+#include "webrtc/modules/rtp_rtcp/include/rtp_rtcp_defines.h"
#include "webrtc/modules/rtp_rtcp/source/rtcp_packet.h"
+#include "webrtc/modules/rtp_rtcp/source/rtcp_packet/nack.h"
#include "webrtc/modules/rtp_rtcp/source/rtp_rtcp_impl.h"
#include "webrtc/system_wrappers/include/scoped_vector.h"
#include "webrtc/test/rtcp_packet_parser.h"
@@ -94,7 +98,7 @@ class SendTransport : public Transport,
class RtpRtcpModule : public RtcpPacketTypeCounterObserver {
public:
- RtpRtcpModule(SimulatedClock* clock)
+ explicit RtpRtcpModule(SimulatedClock* clock)
: receive_statistics_(ReceiveStatistics::Create(clock)) {
RtpRtcp::Configuration config;
config.audio = false;
@@ -346,6 +350,27 @@ TEST_F(RtpRtcpImplTest, RttForReceiverOnly) {
EXPECT_EQ(2 * kOneWayNetworkDelayMs, receiver_.impl_->rtt_ms());
}
+TEST_F(RtpRtcpImplTest, NoSrBeforeMedia) {
+ // Ignore fake transport delays in this test.
+ sender_.transport_.SimulateNetworkDelay(0, &clock_);
+ receiver_.transport_.SimulateNetworkDelay(0, &clock_);
+
+ sender_.impl_->Process();
+ EXPECT_EQ(-1, sender_.RtcpSent().first_packet_time_ms);
+
+ // Verify no SR is sent before media has been sent, RR should still be sent
+ // from the receiving module though.
+ clock_.AdvanceTimeMilliseconds(2000);
+ int64_t current_time = clock_.TimeInMilliseconds();
+ sender_.impl_->Process();
+ receiver_.impl_->Process();
+ EXPECT_EQ(-1, sender_.RtcpSent().first_packet_time_ms);
+ EXPECT_EQ(receiver_.RtcpSent().first_packet_time_ms, current_time);
+
+ SendFrame(&sender_, kBaseLayerTid);
+ EXPECT_EQ(sender_.RtcpSent().first_packet_time_ms, current_time);
+}
+
TEST_F(RtpRtcpImplTest, RtcpPacketTypeCounter_Nack) {
EXPECT_EQ(-1, receiver_.RtcpSent().first_packet_time_ms);
EXPECT_EQ(-1, sender_.RtcpReceived().first_packet_time_ms);
@@ -522,5 +547,4 @@ TEST_F(RtpRtcpImplTest, UniqueNackRequests) {
EXPECT_EQ(6U, sender_.RtcpReceived().unique_nack_requests);
EXPECT_EQ(75, sender_.RtcpReceived().UniqueNackRequestsInPercent());
}
-
} // namespace webrtc
diff --git a/webrtc/modules/rtp_rtcp/source/rtp_sender.cc b/webrtc/modules/rtp_rtcp/source/rtp_sender.cc
index 50f476829d..f4933afdd9 100644
--- a/webrtc/modules/rtp_rtcp/source/rtp_sender.cc
+++ b/webrtc/modules/rtp_rtcp/source/rtp_sender.cc
@@ -11,15 +11,17 @@
#include "webrtc/modules/rtp_rtcp/source/rtp_sender.h"
#include <stdlib.h> // srand
+#include <algorithm>
#include <utility>
#include "webrtc/base/checks.h"
#include "webrtc/base/logging.h"
#include "webrtc/base/trace_event.h"
-#include "webrtc/modules/rtp_rtcp/interface/rtp_cvo.h"
+#include "webrtc/modules/rtp_rtcp/include/rtp_cvo.h"
#include "webrtc/modules/rtp_rtcp/source/byte_io.h"
#include "webrtc/modules/rtp_rtcp/source/rtp_sender_audio.h"
#include "webrtc/modules/rtp_rtcp/source/rtp_sender_video.h"
+#include "webrtc/modules/rtp_rtcp/source/time_util.h"
#include "webrtc/system_wrappers/include/critical_section_wrapper.h"
#include "webrtc/system_wrappers/include/tick_util.h"
@@ -33,6 +35,7 @@ static const uint32_t kAbsSendTimeFraction = 18;
namespace {
const size_t kRtpHeaderLength = 12;
+const uint16_t kMaxInitRtpSeqNumber = 32767; // 2^15 -1.
const char* FrameTypeToString(FrameType frame_type) {
switch (frame_type) {
@@ -125,6 +128,7 @@ RTPSender::RTPSender(
// TickTime.
clock_delta_ms_(clock_->TimeInMilliseconds() -
TickTime::MillisecondTimestamp()),
+ random_(clock_->TimeInMicroseconds()),
bitrates_(new BitrateAggregator(bitrate_callback)),
total_bitrate_sent_(clock, bitrates_->total_bitrate_observer()),
audio_configured_(audio),
@@ -182,8 +186,8 @@ RTPSender::RTPSender(
ssrc_rtx_ = ssrc_db_.CreateSSRC(); // Can't be 0.
bitrates_->set_ssrc(ssrc_);
// Random start, 16 bits. Can't be 0.
- sequence_number_rtx_ = static_cast<uint16_t>(rand() + 1) & 0x7FFF;
- sequence_number_ = static_cast<uint16_t>(rand() + 1) & 0x7FFF;
+ sequence_number_rtx_ = random_.Rand(1, kMaxInitRtpSeqNumber);
+ sequence_number_ = random_.Rand(1, kMaxInitRtpSeqNumber);
}
RTPSender::~RTPSender() {
@@ -292,7 +296,7 @@ int32_t RTPSender::RegisterPayload(
const char payload_name[RTP_PAYLOAD_NAME_SIZE],
int8_t payload_number,
uint32_t frequency,
- uint8_t channels,
+ size_t channels,
uint32_t rate) {
assert(payload_name);
CriticalSectionScoped cs(send_critsect_.get());
@@ -323,11 +327,11 @@ int32_t RTPSender::RegisterPayload(
return -1;
}
int32_t ret_val = 0;
- RtpUtility::Payload* payload = NULL;
+ RtpUtility::Payload* payload = nullptr;
if (audio_configured_) {
// TODO(mflodman): Change to CreateAudioPayload and make static.
ret_val = audio_->RegisterAudioPayload(payload_name, payload_number,
- frequency, channels, rate, payload);
+ frequency, channels, rate, &payload);
} else {
payload = video_->CreateVideoPayload(payload_name, payload_number, rate);
}
@@ -452,7 +456,7 @@ int32_t RTPSender::CheckPayloadType(int8_t payload_type,
}
if (audio_configured_) {
int8_t red_pl_type = -1;
- if (audio_->RED(red_pl_type) == 0) {
+ if (audio_->RED(&red_pl_type) == 0) {
// We have configured RED.
if (red_pl_type == payload_type) {
// And it's a match...
@@ -469,7 +473,8 @@ int32_t RTPSender::CheckPayloadType(int8_t payload_type,
std::map<int8_t, RtpUtility::Payload*>::iterator it =
payload_type_map_.find(payload_type);
if (it == payload_type_map_.end()) {
- LOG(LS_WARNING) << "Payload type " << payload_type << " not registered.";
+ LOG(LS_WARNING) << "Payload type " << static_cast<int>(payload_type)
+ << " not registered.";
return -1;
}
SetSendPayloadType(payload_type);
@@ -512,7 +517,8 @@ int32_t RTPSender::SendOutgoingData(FrameType frame_type,
}
RtpVideoCodecTypes video_type = kRtpVideoGeneric;
if (CheckPayloadType(payload_type, &video_type) != 0) {
- LOG(LS_ERROR) << "Don't send data with unknown payload type.";
+ LOG(LS_ERROR) << "Don't send data with unknown payload type: "
+ << static_cast<int>(payload_type) << ".";
return -1;
}
@@ -573,7 +579,7 @@ size_t RTPSender::TrySendRedundantPayloads(size_t bytes_to_send) {
break;
RtpUtility::RtpHeaderParser rtp_parser(buffer, length);
RTPHeader rtp_header;
- rtp_parser.Parse(rtp_header);
+ rtp_parser.Parse(&rtp_header);
bytes_left -= static_cast<int>(length - rtp_header.headerLength);
}
return bytes_to_send - bytes_left;
@@ -583,8 +589,7 @@ void RTPSender::BuildPaddingPacket(uint8_t* packet,
size_t header_length,
size_t padding_length) {
packet[0] |= 0x20; // Set padding bit.
- int32_t *data =
- reinterpret_cast<int32_t *>(&(packet[header_length]));
+ int32_t* data = reinterpret_cast<int32_t*>(&(packet[header_length]));
// Fill data buffer with random data.
for (size_t j = 0; j < (padding_length >> 2); ++j) {
@@ -665,7 +670,7 @@ size_t RTPSender::SendPadData(size_t bytes,
RtpUtility::RtpHeaderParser rtp_parser(padding_packet, length);
RTPHeader rtp_header;
- rtp_parser.Parse(rtp_header);
+ rtp_parser.Parse(&rtp_header);
if (capture_time_ms > 0) {
UpdateTransmissionTimeOffset(
@@ -717,7 +722,7 @@ int32_t RTPSender::ReSendPacket(uint16_t packet_id, int64_t min_resend_time) {
if (paced_sender_) {
RtpUtility::RtpHeaderParser rtp_parser(data_buffer, length);
RTPHeader header;
- if (!rtp_parser.Parse(header)) {
+ if (!rtp_parser.Parse(&header)) {
assert(false);
return -1;
}
@@ -725,7 +730,7 @@ int32_t RTPSender::ReSendPacket(uint16_t packet_id, int64_t min_resend_time) {
// TickTime.
int64_t corrected_capture_tims_ms = capture_time_ms + clock_delta_ms_;
paced_sender_->InsertPacket(
- RtpPacketSender::kHighPriority, header.ssrc, header.sequenceNumber,
+ RtpPacketSender::kNormalPriority, header.ssrc, header.sequenceNumber,
corrected_capture_tims_ms, length - header.headerLength, true);
return length;
@@ -903,11 +908,11 @@ bool RTPSender::PrepareAndSendPacket(uint8_t* buffer,
int64_t capture_time_ms,
bool send_over_rtx,
bool is_retransmit) {
- uint8_t *buffer_to_send_ptr = buffer;
+ uint8_t* buffer_to_send_ptr = buffer;
RtpUtility::RtpHeaderParser rtp_parser(buffer, length);
RTPHeader rtp_header;
- rtp_parser.Parse(rtp_header);
+ rtp_parser.Parse(&rtp_header);
if (!is_retransmit && rtp_header.markerBit) {
TRACE_EVENT_ASYNC_END0(TRACE_DISABLED_BY_DEFAULT("webrtc_rtp"), "PacedSend",
capture_time_ms);
@@ -996,14 +1001,14 @@ bool RTPSender::IsFecPacket(const uint8_t* buffer,
bool fec_enabled;
uint8_t pt_red;
uint8_t pt_fec;
- video_->GenericFECStatus(fec_enabled, pt_red, pt_fec);
+ video_->GenericFECStatus(&fec_enabled, &pt_red, &pt_fec);
return fec_enabled &&
header.payloadType == pt_red &&
buffer[header.headerLength] == pt_fec;
}
size_t RTPSender::TimeToSendPadding(size_t bytes) {
- if (bytes == 0)
+ if (audio_configured_ || bytes == 0)
return 0;
{
CriticalSectionScoped cs(send_critsect_.get());
@@ -1026,7 +1031,7 @@ int32_t RTPSender::SendToNetwork(uint8_t* buffer,
RtpUtility::RtpHeaderParser rtp_parser(buffer,
payload_length + rtp_header_length);
RTPHeader rtp_header;
- rtp_parser.Parse(rtp_header);
+ rtp_parser.Parse(&rtp_header);
int64_t now_ms = clock_->TimeInMilliseconds();
@@ -1169,7 +1174,7 @@ size_t RTPSender::CreateRtpHeader(uint8_t* header,
int32_t rtp_header_length = kRtpHeaderLength;
if (csrcs.size() > 0) {
- uint8_t *ptr = &header[rtp_header_length];
+ uint8_t* ptr = &header[rtp_header_length];
for (size_t i = 0; i < csrcs.size(); ++i) {
ByteWriter<uint32_t>::WriteBigEndian(ptr, csrcs[i]);
ptr += 4;
@@ -1638,7 +1643,7 @@ uint16_t RTPSender::UpdateTransportSequenceNumber(
void RTPSender::SetSendingStatus(bool enabled) {
if (enabled) {
uint32_t frequency_hz = SendPayloadFrequency();
- uint32_t RTPtime = RtpUtility::GetCurrentRTP(clock_, frequency_hz);
+ uint32_t RTPtime = CurrentRtp(*clock_, frequency_hz);
// Will be ignored if it's already configured via API.
SetStartTimestamp(RTPtime, false);
@@ -1653,8 +1658,7 @@ void RTPSender::SetSendingStatus(bool enabled) {
// Don't initialize seq number if SSRC passed externally.
if (!sequence_number_forced_ && !ssrc_forced_) {
// Generate a new sequence number.
- sequence_number_ =
- rand() / (RAND_MAX / MAX_INIT_RTP_SEQ_NUMBER); // NOLINT
+ sequence_number_ = random_.Rand(1, kMaxInitRtpSeqNumber);
}
}
}
@@ -1716,8 +1720,7 @@ void RTPSender::SetSSRC(uint32_t ssrc) {
ssrc_ = ssrc;
bitrates_->set_ssrc(ssrc_);
if (!sequence_number_forced_) {
- sequence_number_ =
- rand() / (RAND_MAX / MAX_INIT_RTP_SEQ_NUMBER); // NOLINT
+ sequence_number_ = random_.Rand(1, kMaxInitRtpSeqNumber);
}
}
@@ -1775,7 +1778,7 @@ int32_t RTPSender::RED(int8_t *payload_type) const {
if (!audio_configured_) {
return -1;
}
- return audio_->RED(*payload_type);
+ return audio_->RED(payload_type);
}
RtpVideoCodecTypes RTPSender::VideoCodecType() const {
@@ -1801,7 +1804,7 @@ void RTPSender::GenericFECStatus(bool* enable,
uint8_t* payload_type_red,
uint8_t* payload_type_fec) const {
RTC_DCHECK(!audio_configured_);
- video_->GenericFECStatus(*enable, *payload_type_red, *payload_type_fec);
+ video_->GenericFECStatus(enable, payload_type_red, payload_type_fec);
}
int32_t RTPSender::SetFecParameters(
@@ -1823,7 +1826,7 @@ void RTPSender::BuildRtxPacket(uint8_t* buffer, size_t* length,
reinterpret_cast<const uint8_t*>(buffer), *length);
RTPHeader rtp_header;
- rtp_parser.Parse(rtp_header);
+ rtp_parser.Parse(&rtp_header);
// Add original RTP header.
memcpy(data_buffer_rtx, buffer, rtp_header.headerLength);
@@ -1836,7 +1839,7 @@ void RTPSender::BuildRtxPacket(uint8_t* buffer, size_t* length,
}
// Replace sequence number.
- uint8_t *ptr = data_buffer_rtx + 2;
+ uint8_t* ptr = data_buffer_rtx + 2;
ByteWriter<uint16_t>::WriteBigEndian(ptr, sequence_number_rtx_++);
// Replace SSRC.
diff --git a/webrtc/modules/rtp_rtcp/source/rtp_sender.h b/webrtc/modules/rtp_rtcp/source/rtp_sender.h
index a134370c76..3c62336507 100644
--- a/webrtc/modules/rtp_rtcp/source/rtp_sender.h
+++ b/webrtc/modules/rtp_rtcp/source/rtp_sender.h
@@ -10,14 +10,16 @@
#ifndef WEBRTC_MODULES_RTP_RTCP_SOURCE_RTP_SENDER_H_
#define WEBRTC_MODULES_RTP_RTCP_SOURCE_RTP_SENDER_H_
-#include <assert.h>
-#include <math.h>
+#include <list>
#include <map>
+#include <utility>
+#include <vector>
+#include "webrtc/base/random.h"
#include "webrtc/base/thread_annotations.h"
#include "webrtc/common_types.h"
-#include "webrtc/modules/rtp_rtcp/interface/rtp_rtcp_defines.h"
+#include "webrtc/modules/rtp_rtcp/include/rtp_rtcp_defines.h"
#include "webrtc/modules/rtp_rtcp/source/bitrate.h"
#include "webrtc/modules/rtp_rtcp/source/rtp_header_extension.h"
#include "webrtc/modules/rtp_rtcp/source/rtp_packet_history.h"
@@ -26,8 +28,6 @@
#include "webrtc/modules/rtp_rtcp/source/ssrc_database.h"
#include "webrtc/transport.h"
-#define MAX_INIT_RTP_SEQ_NUMBER 32767 // 2^15 -1.
-
namespace webrtc {
class BitrateAggregator;
@@ -116,7 +116,7 @@ class RTPSender : public RTPSenderInterface {
int32_t RegisterPayload(
const char payload_name[RTP_PAYLOAD_NAME_SIZE],
const int8_t payload_type, const uint32_t frequency,
- const uint8_t channels, const uint32_t rate);
+ const size_t channels, const uint32_t rate);
int32_t DeRegisterSendPayload(const int8_t payload_type);
@@ -163,7 +163,7 @@ class RTPSender : public RTPSenderInterface {
int32_t SetTransportSequenceNumber(uint16_t sequence_number);
int32_t RegisterRtpHeaderExtension(RTPExtensionType type, uint8_t id);
- virtual bool IsRtpHeaderExtensionRegistered(RTPExtensionType type) override;
+ bool IsRtpHeaderExtensionRegistered(RTPExtensionType type) override;
int32_t DeregisterRtpHeaderExtension(RTPExtensionType type);
size_t RtpHeaderExtensionTotalLength() const;
@@ -202,10 +202,10 @@ class RTPSender : public RTPSenderInterface {
bool is_voiced,
uint8_t dBov) const;
- virtual bool UpdateVideoRotation(uint8_t* rtp_packet,
- size_t rtp_packet_length,
- const RTPHeader& rtp_header,
- VideoRotation rotation) const override;
+ bool UpdateVideoRotation(uint8_t* rtp_packet,
+ size_t rtp_packet_length,
+ const RTPHeader& rtp_header,
+ VideoRotation rotation) const override;
bool TimeToSendPacket(uint16_t sequence_number, int64_t capture_time_ms,
bool retransmission);
@@ -386,6 +386,7 @@ class RTPSender : public RTPSenderInterface {
Clock* clock_;
int64_t clock_delta_ms_;
+ Random random_ GUARDED_BY(send_critsect_);
rtc::scoped_ptr<BitrateAggregator> bitrates_;
Bitrate total_bitrate_sent_;
diff --git a/webrtc/modules/rtp_rtcp/source/rtp_sender_audio.cc b/webrtc/modules/rtp_rtcp/source/rtp_sender_audio.cc
index 1fc9a89ce1..2aa4961cdc 100644
--- a/webrtc/modules/rtp_rtcp/source/rtp_sender_audio.cc
+++ b/webrtc/modules/rtp_rtcp/source/rtp_sender_audio.cc
@@ -10,12 +10,12 @@
#include "webrtc/modules/rtp_rtcp/source/rtp_sender_audio.h"
-#include <assert.h> //assert
-#include <string.h> //memcpy
+#include <string.h>
#include "webrtc/base/trace_event.h"
-#include "webrtc/modules/rtp_rtcp/interface/rtp_rtcp_defines.h"
+#include "webrtc/modules/rtp_rtcp/include/rtp_rtcp_defines.h"
#include "webrtc/modules/rtp_rtcp/source/byte_io.h"
+#include "webrtc/system_wrappers/include/tick_util.h"
namespace webrtc {
@@ -47,8 +47,7 @@ RTPSenderAudio::RTPSenderAudio(Clock* clock,
_lastPayloadType(-1),
_audioLevel_dBov(0) {}
-RTPSenderAudio::~RTPSenderAudio() {
-}
+RTPSenderAudio::~RTPSenderAudio() {}
int RTPSenderAudio::AudioFrequency() const {
return kDtmfFrequencyHz;
@@ -56,22 +55,20 @@ int RTPSenderAudio::AudioFrequency() const {
// set audio packet size, used to determine when it's time to send a DTMF packet
// in silence (CNG)
-int32_t
-RTPSenderAudio::SetAudioPacketSize(const uint16_t packetSizeSamples)
-{
- CriticalSectionScoped cs(_sendAudioCritsect.get());
+int32_t RTPSenderAudio::SetAudioPacketSize(uint16_t packetSizeSamples) {
+ CriticalSectionScoped cs(_sendAudioCritsect.get());
- _packetSizeSamples = packetSizeSamples;
- return 0;
+ _packetSizeSamples = packetSizeSamples;
+ return 0;
}
int32_t RTPSenderAudio::RegisterAudioPayload(
const char payloadName[RTP_PAYLOAD_NAME_SIZE],
const int8_t payloadType,
const uint32_t frequency,
- const uint8_t channels,
+ const size_t channels,
const uint32_t rate,
- RtpUtility::Payload*& payload) {
+ RtpUtility::Payload** payload) {
if (RtpUtility::StringCompare(payloadName, "cn", 2)) {
CriticalSectionScoped cs(_sendAudioCritsect.get());
// we can have multiple CNG payload types
@@ -99,72 +96,65 @@ int32_t RTPSenderAudio::RegisterAudioPayload(
return 0;
// The default timestamp rate is 8000 Hz, but other rates may be defined.
}
- payload = new RtpUtility::Payload;
- payload->typeSpecific.Audio.frequency = frequency;
- payload->typeSpecific.Audio.channels = channels;
- payload->typeSpecific.Audio.rate = rate;
- payload->audio = true;
- payload->name[RTP_PAYLOAD_NAME_SIZE - 1] = '\0';
- strncpy(payload->name, payloadName, RTP_PAYLOAD_NAME_SIZE - 1);
+ *payload = new RtpUtility::Payload;
+ (*payload)->typeSpecific.Audio.frequency = frequency;
+ (*payload)->typeSpecific.Audio.channels = channels;
+ (*payload)->typeSpecific.Audio.rate = rate;
+ (*payload)->audio = true;
+ (*payload)->name[RTP_PAYLOAD_NAME_SIZE - 1] = '\0';
+ strncpy((*payload)->name, payloadName, RTP_PAYLOAD_NAME_SIZE - 1);
return 0;
}
-bool
-RTPSenderAudio::MarkerBit(const FrameType frameType,
- const int8_t payload_type)
-{
- CriticalSectionScoped cs(_sendAudioCritsect.get());
- // for audio true for first packet in a speech burst
- bool markerBit = false;
- if (_lastPayloadType != payload_type) {
- if (payload_type != -1 && (_cngNBPayloadType == payload_type ||
- _cngWBPayloadType == payload_type ||
- _cngSWBPayloadType == payload_type ||
- _cngFBPayloadType == payload_type)) {
- // Only set a marker bit when we change payload type to a non CNG
- return false;
- }
+bool RTPSenderAudio::MarkerBit(FrameType frameType, int8_t payload_type) {
+ CriticalSectionScoped cs(_sendAudioCritsect.get());
+ // for audio true for first packet in a speech burst
+ bool markerBit = false;
+ if (_lastPayloadType != payload_type) {
+ if (payload_type != -1 && (_cngNBPayloadType == payload_type ||
+ _cngWBPayloadType == payload_type ||
+ _cngSWBPayloadType == payload_type ||
+ _cngFBPayloadType == payload_type)) {
+ // Only set a marker bit when we change payload type to a non CNG
+ return false;
+ }
- // payload_type differ
- if (_lastPayloadType == -1) {
- if (frameType != kAudioFrameCN) {
- // first packet and NOT CNG
- return true;
- } else {
- // first packet and CNG
- _inbandVADactive = true;
- return false;
- }
+ // payload_type differ
+ if (_lastPayloadType == -1) {
+ if (frameType != kAudioFrameCN) {
+ // first packet and NOT CNG
+ return true;
+ } else {
+ // first packet and CNG
+ _inbandVADactive = true;
+ return false;
}
-
- // not first packet AND
- // not CNG AND
- // payload_type changed
-
- // set a marker bit when we change payload type
- markerBit = true;
}
- // For G.723 G.729, AMR etc we can have inband VAD
- if(frameType == kAudioFrameCN)
- {
- _inbandVADactive = true;
+ // not first packet AND
+ // not CNG AND
+ // payload_type changed
- } else if(_inbandVADactive)
- {
- _inbandVADactive = false;
- markerBit = true;
- }
- return markerBit;
+ // set a marker bit when we change payload type
+ markerBit = true;
+ }
+
+ // For G.723 G.729, AMR etc we can have inband VAD
+ if (frameType == kAudioFrameCN) {
+ _inbandVADactive = true;
+ } else if (_inbandVADactive) {
+ _inbandVADactive = false;
+ markerBit = true;
+ }
+ return markerBit;
}
-int32_t RTPSenderAudio::SendAudio(
- const FrameType frameType,
- const int8_t payloadType,
- const uint32_t captureTimeStamp,
- const uint8_t* payloadData,
- const size_t dataSize,
- const RTPFragmentationHeader* fragmentation) {
+int32_t RTPSenderAudio::SendAudio(FrameType frameType,
+ int8_t payloadType,
+ uint32_t captureTimeStamp,
+ const uint8_t* payloadData,
+ size_t dataSize,
+ const RTPFragmentationHeader* fragmentation) {
// TODO(pwestin) Breakup function in smaller functions.
size_t payloadSize = dataSize;
size_t maxPayloadLength = _rtpSender->MaxPayloadLength();
@@ -185,8 +175,8 @@ int32_t RTPSenderAudio::SendAudio(
// Check if we have pending DTMFs to send
if (!_dtmfEventIsOn && PendingDTMF()) {
- int64_t delaySinceLastDTMF = _clock->TimeInMilliseconds() -
- _dtmfTimeLastSent;
+ int64_t delaySinceLastDTMF =
+ _clock->TimeInMilliseconds() - _dtmfTimeLastSent;
if (delaySinceLastDTMF > 100) {
// New tone to play
@@ -294,128 +284,120 @@ int32_t RTPSenderAudio::SendAudio(
// Too large payload buffer.
return -1;
}
- if (red_payload_type >= 0 && // Have we configured RED?
- fragmentation && fragmentation->fragmentationVectorSize > 1 &&
- !markerBit) {
- if (timestampOffset <= 0x3fff) {
- if (fragmentation->fragmentationVectorSize != 2) {
- // we only support 2 codecs when using RED
- return -1;
- }
- // only 0x80 if we have multiple blocks
- dataBuffer[rtpHeaderLength++] =
- 0x80 + fragmentation->fragmentationPlType[1];
- size_t blockLength = fragmentation->fragmentationLength[1];
-
- // sanity blockLength
- if (blockLength > 0x3ff) { // block length 10 bits 1023 bytes
- return -1;
- }
- uint32_t REDheader = (timestampOffset << 10) + blockLength;
- ByteWriter<uint32_t>::WriteBigEndian(dataBuffer + rtpHeaderLength,
- REDheader);
- rtpHeaderLength += 3;
-
- dataBuffer[rtpHeaderLength++] = fragmentation->fragmentationPlType[0];
- // copy the RED data
- memcpy(dataBuffer + rtpHeaderLength,
- payloadData + fragmentation->fragmentationOffset[1],
- fragmentation->fragmentationLength[1]);
-
- // copy the normal data
- memcpy(dataBuffer + rtpHeaderLength +
- fragmentation->fragmentationLength[1],
- payloadData + fragmentation->fragmentationOffset[0],
- fragmentation->fragmentationLength[0]);
-
- payloadSize = fragmentation->fragmentationLength[0] +
- fragmentation->fragmentationLength[1];
- } else {
- // silence for too long send only new data
- dataBuffer[rtpHeaderLength++] = fragmentation->fragmentationPlType[0];
- memcpy(dataBuffer + rtpHeaderLength,
- payloadData + fragmentation->fragmentationOffset[0],
- fragmentation->fragmentationLength[0]);
+ if (red_payload_type >= 0 && // Have we configured RED?
+ fragmentation && fragmentation->fragmentationVectorSize > 1 &&
+ !markerBit) {
+ if (timestampOffset <= 0x3fff) {
+ if (fragmentation->fragmentationVectorSize != 2) {
+ // we only support 2 codecs when using RED
+ return -1;
+ }
+ // only 0x80 if we have multiple blocks
+ dataBuffer[rtpHeaderLength++] =
+ 0x80 + fragmentation->fragmentationPlType[1];
+ size_t blockLength = fragmentation->fragmentationLength[1];
- payloadSize = fragmentation->fragmentationLength[0];
+ // sanity blockLength
+ if (blockLength > 0x3ff) { // block length 10 bits 1023 bytes
+ return -1;
}
+ uint32_t REDheader = (timestampOffset << 10) + blockLength;
+ ByteWriter<uint32_t>::WriteBigEndian(dataBuffer + rtpHeaderLength,
+ REDheader);
+ rtpHeaderLength += 3;
+
+ dataBuffer[rtpHeaderLength++] = fragmentation->fragmentationPlType[0];
+ // copy the RED data
+ memcpy(dataBuffer + rtpHeaderLength,
+ payloadData + fragmentation->fragmentationOffset[1],
+ fragmentation->fragmentationLength[1]);
+
+ // copy the normal data
+ memcpy(
+ dataBuffer + rtpHeaderLength + fragmentation->fragmentationLength[1],
+ payloadData + fragmentation->fragmentationOffset[0],
+ fragmentation->fragmentationLength[0]);
+
+ payloadSize = fragmentation->fragmentationLength[0] +
+ fragmentation->fragmentationLength[1];
} else {
- if (fragmentation && fragmentation->fragmentationVectorSize > 0) {
- // use the fragment info if we have one
- dataBuffer[rtpHeaderLength++] = fragmentation->fragmentationPlType[0];
- memcpy(dataBuffer + rtpHeaderLength,
- payloadData + fragmentation->fragmentationOffset[0],
- fragmentation->fragmentationLength[0]);
-
- payloadSize = fragmentation->fragmentationLength[0];
- } else {
- memcpy(dataBuffer + rtpHeaderLength, payloadData, payloadSize);
- }
+ // silence for too long send only new data
+ dataBuffer[rtpHeaderLength++] = fragmentation->fragmentationPlType[0];
+ memcpy(dataBuffer + rtpHeaderLength,
+ payloadData + fragmentation->fragmentationOffset[0],
+ fragmentation->fragmentationLength[0]);
+
+ payloadSize = fragmentation->fragmentationLength[0];
}
- {
- CriticalSectionScoped cs(_sendAudioCritsect.get());
- _lastPayloadType = payloadType;
+ } else {
+ if (fragmentation && fragmentation->fragmentationVectorSize > 0) {
+ // use the fragment info if we have one
+ dataBuffer[rtpHeaderLength++] = fragmentation->fragmentationPlType[0];
+ memcpy(dataBuffer + rtpHeaderLength,
+ payloadData + fragmentation->fragmentationOffset[0],
+ fragmentation->fragmentationLength[0]);
+
+ payloadSize = fragmentation->fragmentationLength[0];
+ } else {
+ memcpy(dataBuffer + rtpHeaderLength, payloadData, payloadSize);
}
- // Update audio level extension, if included.
- size_t packetSize = payloadSize + rtpHeaderLength;
- RtpUtility::RtpHeaderParser rtp_parser(dataBuffer, packetSize);
- RTPHeader rtp_header;
- rtp_parser.Parse(rtp_header);
- _rtpSender->UpdateAudioLevel(dataBuffer, packetSize, rtp_header,
- (frameType == kAudioFrameSpeech),
- audio_level_dbov);
- TRACE_EVENT_ASYNC_END2("webrtc", "Audio", captureTimeStamp, "timestamp",
- _rtpSender->Timestamp(), "seqnum",
- _rtpSender->SequenceNumber());
- return _rtpSender->SendToNetwork(dataBuffer, payloadSize, rtpHeaderLength,
- -1, kAllowRetransmission,
- RtpPacketSender::kHighPriority);
}
-
- // Audio level magnitude and voice activity flag are set for each RTP packet
-int32_t
-RTPSenderAudio::SetAudioLevel(const uint8_t level_dBov)
-{
- if (level_dBov > 127)
- {
- return -1;
- }
+ {
CriticalSectionScoped cs(_sendAudioCritsect.get());
- _audioLevel_dBov = level_dBov;
- return 0;
+ _lastPayloadType = payloadType;
+ }
+ // Update audio level extension, if included.
+ size_t packetSize = payloadSize + rtpHeaderLength;
+ RtpUtility::RtpHeaderParser rtp_parser(dataBuffer, packetSize);
+ RTPHeader rtp_header;
+ rtp_parser.Parse(&rtp_header);
+ _rtpSender->UpdateAudioLevel(dataBuffer, packetSize, rtp_header,
+ (frameType == kAudioFrameSpeech),
+ audio_level_dbov);
+ TRACE_EVENT_ASYNC_END2("webrtc", "Audio", captureTimeStamp, "timestamp",
+ _rtpSender->Timestamp(), "seqnum",
+ _rtpSender->SequenceNumber());
+ return _rtpSender->SendToNetwork(dataBuffer, payloadSize, rtpHeaderLength,
+ TickTime::MillisecondTimestamp(),
+ kAllowRetransmission,
+ RtpPacketSender::kHighPriority);
}
- // Set payload type for Redundant Audio Data RFC 2198
-int32_t
-RTPSenderAudio::SetRED(const int8_t payloadType)
-{
- if(payloadType < -1 )
- {
- return -1;
- }
- CriticalSectionScoped cs(_sendAudioCritsect.get());
- _REDPayloadType = payloadType;
- return 0;
+// Audio level magnitude and voice activity flag are set for each RTP packet
+int32_t RTPSenderAudio::SetAudioLevel(uint8_t level_dBov) {
+ if (level_dBov > 127) {
+ return -1;
+ }
+ CriticalSectionScoped cs(_sendAudioCritsect.get());
+ _audioLevel_dBov = level_dBov;
+ return 0;
}
- // Get payload type for Redundant Audio Data RFC 2198
-int32_t
-RTPSenderAudio::RED(int8_t& payloadType) const
-{
- CriticalSectionScoped cs(_sendAudioCritsect.get());
- if(_REDPayloadType == -1)
- {
- // not configured
- return -1;
- }
- payloadType = _REDPayloadType;
- return 0;
+// Set payload type for Redundant Audio Data RFC 2198
+int32_t RTPSenderAudio::SetRED(int8_t payloadType) {
+ if (payloadType < -1) {
+ return -1;
+ }
+ CriticalSectionScoped cs(_sendAudioCritsect.get());
+ _REDPayloadType = payloadType;
+ return 0;
+}
+
+// Get payload type for Redundant Audio Data RFC 2198
+int32_t RTPSenderAudio::RED(int8_t* payloadType) const {
+ CriticalSectionScoped cs(_sendAudioCritsect.get());
+ if (_REDPayloadType == -1) {
+ // not configured
+ return -1;
+ }
+ *payloadType = _REDPayloadType;
+ return 0;
}
// Send a TelephoneEvent tone using RFC 2833 (4733)
-int32_t RTPSenderAudio::SendTelephoneEvent(const uint8_t key,
- const uint16_t time_ms,
- const uint8_t level) {
+int32_t RTPSenderAudio::SendTelephoneEvent(uint8_t key,
+ uint16_t time_ms,
+ uint8_t level) {
{
CriticalSectionScoped lock(_sendAudioCritsect.get());
if (_dtmfPayloadType < 0) {
@@ -426,63 +408,57 @@ int32_t RTPSenderAudio::SendTelephoneEvent(const uint8_t key,
return AddDTMF(key, time_ms, level);
}
-int32_t
-RTPSenderAudio::SendTelephoneEventPacket(bool ended,
- int8_t dtmf_payload_type,
- uint32_t dtmfTimeStamp,
- uint16_t duration,
- bool markerBit)
-{
- uint8_t dtmfbuffer[IP_PACKET_SIZE];
- uint8_t sendCount = 1;
- int32_t retVal = 0;
-
- if(ended)
- {
- // resend last packet in an event 3 times
- sendCount = 3;
- }
- do
- {
- //Send DTMF data
- _rtpSender->BuildRTPheader(dtmfbuffer, dtmf_payload_type, markerBit,
- dtmfTimeStamp, _clock->TimeInMilliseconds());
-
- // reset CSRC and X bit
- dtmfbuffer[0] &= 0xe0;
-
- //Create DTMF data
- /* From RFC 2833:
-
- 0 1 2 3
- 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
- +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
- | event |E|R| volume | duration |
- +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
- */
- // R bit always cleared
- uint8_t R = 0x00;
- uint8_t volume = _dtmfLevel;
-
- // First packet un-ended
- uint8_t E = ended ? 0x80 : 0x00;
-
- // First byte is Event number, equals key number
- dtmfbuffer[12] = _dtmfKey;
- dtmfbuffer[13] = E|R|volume;
- ByteWriter<uint16_t>::WriteBigEndian(dtmfbuffer + 14, duration);
-
- TRACE_EVENT_INSTANT2(TRACE_DISABLED_BY_DEFAULT("webrtc_rtp"),
- "Audio::SendTelephoneEvent", "timestamp",
- dtmfTimeStamp, "seqnum",
- _rtpSender->SequenceNumber());
- retVal = _rtpSender->SendToNetwork(dtmfbuffer, 4, 12, -1,
- kAllowRetransmission,
- RtpPacketSender::kHighPriority);
- sendCount--;
-
- }while (sendCount > 0 && retVal == 0);
-
- return retVal;
+int32_t RTPSenderAudio::SendTelephoneEventPacket(bool ended,
+ int8_t dtmf_payload_type,
+ uint32_t dtmfTimeStamp,
+ uint16_t duration,
+ bool markerBit) {
+ uint8_t dtmfbuffer[IP_PACKET_SIZE];
+ uint8_t sendCount = 1;
+ int32_t retVal = 0;
+
+ if (ended) {
+ // resend last packet in an event 3 times
+ sendCount = 3;
+ }
+ do {
+ // Send DTMF data
+ _rtpSender->BuildRTPheader(dtmfbuffer, dtmf_payload_type, markerBit,
+ dtmfTimeStamp, _clock->TimeInMilliseconds());
+
+ // reset CSRC and X bit
+ dtmfbuffer[0] &= 0xe0;
+
+ // Create DTMF data
+ /* From RFC 2833:
+
+ 0 1 2 3
+ 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
+ +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ | event |E|R| volume | duration |
+ +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ */
+ // R bit always cleared
+ uint8_t R = 0x00;
+ uint8_t volume = _dtmfLevel;
+
+ // First packet un-ended
+ uint8_t E = ended ? 0x80 : 0x00;
+
+ // First byte is Event number, equals key number
+ dtmfbuffer[12] = _dtmfKey;
+ dtmfbuffer[13] = E | R | volume;
+ ByteWriter<uint16_t>::WriteBigEndian(dtmfbuffer + 14, duration);
+
+ TRACE_EVENT_INSTANT2(TRACE_DISABLED_BY_DEFAULT("webrtc_rtp"),
+ "Audio::SendTelephoneEvent", "timestamp",
+ dtmfTimeStamp, "seqnum", _rtpSender->SequenceNumber());
+ retVal = _rtpSender->SendToNetwork(
+ dtmfbuffer, 4, 12, TickTime::MillisecondTimestamp(),
+ kAllowRetransmission, RtpPacketSender::kHighPriority);
+ sendCount--;
+ } while (sendCount > 0 && retVal == 0);
+
+ return retVal;
}
} // namespace webrtc
diff --git a/webrtc/modules/rtp_rtcp/source/rtp_sender_audio.h b/webrtc/modules/rtp_rtcp/source/rtp_sender_audio.h
index dd16fe51b4..1e96d17a67 100644
--- a/webrtc/modules/rtp_rtcp/source/rtp_sender_audio.h
+++ b/webrtc/modules/rtp_rtcp/source/rtp_sender_audio.h
@@ -19,92 +19,91 @@
#include "webrtc/typedefs.h"
namespace webrtc {
-class RTPSenderAudio: public DTMFqueue
-{
-public:
- RTPSenderAudio(Clock* clock,
- RTPSender* rtpSender,
- RtpAudioFeedback* audio_feedback);
- virtual ~RTPSenderAudio();
-
- int32_t RegisterAudioPayload(const char payloadName[RTP_PAYLOAD_NAME_SIZE],
- const int8_t payloadType,
- const uint32_t frequency,
- const uint8_t channels,
- const uint32_t rate,
- RtpUtility::Payload*& payload);
-
- int32_t SendAudio(const FrameType frameType,
- const int8_t payloadType,
- const uint32_t captureTimeStamp,
- const uint8_t* payloadData,
- const size_t payloadSize,
- const RTPFragmentationHeader* fragmentation);
-
- // set audio packet size, used to determine when it's time to send a DTMF packet in silence (CNG)
- int32_t SetAudioPacketSize(const uint16_t packetSizeSamples);
-
- // Store the audio level in dBov for header-extension-for-audio-level-indication.
- // Valid range is [0,100]. Actual value is negative.
- int32_t SetAudioLevel(const uint8_t level_dBov);
-
- // Send a DTMF tone using RFC 2833 (4733)
- int32_t SendTelephoneEvent(const uint8_t key,
- const uint16_t time_ms,
- const uint8_t level);
-
- int AudioFrequency() const;
-
- // Set payload type for Redundant Audio Data RFC 2198
- int32_t SetRED(const int8_t payloadType);
-
- // Get payload type for Redundant Audio Data RFC 2198
- int32_t RED(int8_t& payloadType) const;
-
-protected:
- int32_t SendTelephoneEventPacket(bool ended,
- int8_t dtmf_payload_type,
- uint32_t dtmfTimeStamp,
- uint16_t duration,
- bool markerBit); // set on first packet in talk burst
-
- bool MarkerBit(const FrameType frameType,
- const int8_t payloadType);
-
-private:
- Clock* const _clock;
- RTPSender* const _rtpSender;
- RtpAudioFeedback* const _audioFeedback;
-
- rtc::scoped_ptr<CriticalSectionWrapper> _sendAudioCritsect;
-
- uint16_t _packetSizeSamples GUARDED_BY(_sendAudioCritsect);
-
- // DTMF
- bool _dtmfEventIsOn;
- bool _dtmfEventFirstPacketSent;
- int8_t _dtmfPayloadType GUARDED_BY(_sendAudioCritsect);
- uint32_t _dtmfTimestamp;
- uint8_t _dtmfKey;
- uint32_t _dtmfLengthSamples;
- uint8_t _dtmfLevel;
- int64_t _dtmfTimeLastSent;
- uint32_t _dtmfTimestampLastSent;
-
- int8_t _REDPayloadType GUARDED_BY(_sendAudioCritsect);
-
- // VAD detection, used for markerbit
- bool _inbandVADactive GUARDED_BY(_sendAudioCritsect);
- int8_t _cngNBPayloadType GUARDED_BY(_sendAudioCritsect);
- int8_t _cngWBPayloadType GUARDED_BY(_sendAudioCritsect);
- int8_t _cngSWBPayloadType GUARDED_BY(_sendAudioCritsect);
- int8_t _cngFBPayloadType GUARDED_BY(_sendAudioCritsect);
- int8_t _lastPayloadType GUARDED_BY(_sendAudioCritsect);
-
- // Audio level indication
- // (https://datatracker.ietf.org/doc/draft-lennox-avt-rtp-audio-level-exthdr/)
- uint8_t _audioLevel_dBov GUARDED_BY(_sendAudioCritsect);
+class RTPSenderAudio : public DTMFqueue {
+ public:
+ RTPSenderAudio(Clock* clock,
+ RTPSender* rtpSender,
+ RtpAudioFeedback* audio_feedback);
+ virtual ~RTPSenderAudio();
+
+ int32_t RegisterAudioPayload(const char payloadName[RTP_PAYLOAD_NAME_SIZE],
+ int8_t payloadType,
+ uint32_t frequency,
+ size_t channels,
+ uint32_t rate,
+ RtpUtility::Payload** payload);
+
+ int32_t SendAudio(FrameType frameType,
+ int8_t payloadType,
+ uint32_t captureTimeStamp,
+ const uint8_t* payloadData,
+ size_t payloadSize,
+ const RTPFragmentationHeader* fragmentation);
+
+ // set audio packet size, used to determine when it's time to send a DTMF
+ // packet in silence (CNG)
+ int32_t SetAudioPacketSize(uint16_t packetSizeSamples);
+
+ // Store the audio level in dBov for
+ // header-extension-for-audio-level-indication.
+ // Valid range is [0,100]. Actual value is negative.
+ int32_t SetAudioLevel(uint8_t level_dBov);
+
+ // Send a DTMF tone using RFC 2833 (4733)
+ int32_t SendTelephoneEvent(uint8_t key, uint16_t time_ms, uint8_t level);
+
+ int AudioFrequency() const;
+
+ // Set payload type for Redundant Audio Data RFC 2198
+ int32_t SetRED(int8_t payloadType);
+
+ // Get payload type for Redundant Audio Data RFC 2198
+ int32_t RED(int8_t* payloadType) const;
+
+ protected:
+ int32_t SendTelephoneEventPacket(
+ bool ended,
+ int8_t dtmf_payload_type,
+ uint32_t dtmfTimeStamp,
+ uint16_t duration,
+ bool markerBit); // set on first packet in talk burst
+
+ bool MarkerBit(const FrameType frameType, const int8_t payloadType);
+
+ private:
+ Clock* const _clock;
+ RTPSender* const _rtpSender;
+ RtpAudioFeedback* const _audioFeedback;
+
+ rtc::scoped_ptr<CriticalSectionWrapper> _sendAudioCritsect;
+
+ uint16_t _packetSizeSamples GUARDED_BY(_sendAudioCritsect);
+
+ // DTMF
+ bool _dtmfEventIsOn;
+ bool _dtmfEventFirstPacketSent;
+ int8_t _dtmfPayloadType GUARDED_BY(_sendAudioCritsect);
+ uint32_t _dtmfTimestamp;
+ uint8_t _dtmfKey;
+ uint32_t _dtmfLengthSamples;
+ uint8_t _dtmfLevel;
+ int64_t _dtmfTimeLastSent;
+ uint32_t _dtmfTimestampLastSent;
+
+ int8_t _REDPayloadType GUARDED_BY(_sendAudioCritsect);
+
+ // VAD detection, used for markerbit
+ bool _inbandVADactive GUARDED_BY(_sendAudioCritsect);
+ int8_t _cngNBPayloadType GUARDED_BY(_sendAudioCritsect);
+ int8_t _cngWBPayloadType GUARDED_BY(_sendAudioCritsect);
+ int8_t _cngSWBPayloadType GUARDED_BY(_sendAudioCritsect);
+ int8_t _cngFBPayloadType GUARDED_BY(_sendAudioCritsect);
+ int8_t _lastPayloadType GUARDED_BY(_sendAudioCritsect);
+
+ // Audio level indication
+ // (https://datatracker.ietf.org/doc/draft-lennox-avt-rtp-audio-level-exthdr/)
+ uint8_t _audioLevel_dBov GUARDED_BY(_sendAudioCritsect);
};
} // namespace webrtc
-#endif // WEBRTC_MODULES_RTP_RTCP_SOURCE_RTP_SENDER_AUDIO_H_
+#endif // WEBRTC_MODULES_RTP_RTCP_SOURCE_RTP_SENDER_AUDIO_H_
diff --git a/webrtc/modules/rtp_rtcp/source/rtp_sender_unittest.cc b/webrtc/modules/rtp_rtcp/source/rtp_sender_unittest.cc
index fde6d47ceb..6bc122201a 100644
--- a/webrtc/modules/rtp_rtcp/source/rtp_sender_unittest.cc
+++ b/webrtc/modules/rtp_rtcp/source/rtp_sender_unittest.cc
@@ -12,20 +12,22 @@
* This file includes unit tests for the RTPSender.
*/
+#include <list>
+#include <vector>
+
#include "testing/gmock/include/gmock/gmock.h"
#include "testing/gtest/include/gtest/gtest.h"
-
#include "webrtc/base/buffer.h"
#include "webrtc/base/scoped_ptr.h"
-#include "webrtc/modules/rtp_rtcp/interface/rtp_cvo.h"
-#include "webrtc/modules/rtp_rtcp/interface/rtp_header_parser.h"
-#include "webrtc/modules/rtp_rtcp/interface/rtp_rtcp_defines.h"
+#include "webrtc/modules/rtp_rtcp/include/rtp_cvo.h"
+#include "webrtc/modules/rtp_rtcp/include/rtp_header_parser.h"
+#include "webrtc/modules/rtp_rtcp/include/rtp_rtcp_defines.h"
#include "webrtc/modules/rtp_rtcp/source/rtp_format_video_generic.h"
#include "webrtc/modules/rtp_rtcp/source/rtp_header_extension.h"
#include "webrtc/modules/rtp_rtcp/source/rtp_sender.h"
#include "webrtc/modules/rtp_rtcp/source/rtp_sender_video.h"
-#include "webrtc/system_wrappers/include/stl_util.h"
#include "webrtc/modules/rtp_rtcp/source/rtp_utility.h"
+#include "webrtc/system_wrappers/include/stl_util.h"
#include "webrtc/test/mock_transport.h"
#include "webrtc/typedefs.h"
@@ -90,9 +92,7 @@ class LoopbackTransportTest : public webrtc::Transport {
sent_packets_.push_back(buffer);
return true;
}
- bool SendRtcp(const uint8_t* data, size_t len) override {
- return false;
- }
+ bool SendRtcp(const uint8_t* data, size_t len) override { return false; }
int packets_sent_;
size_t last_sent_packet_len_;
size_t total_bytes_sent_;
@@ -163,11 +163,8 @@ class RtpSenderTest : public ::testing::Test {
void SendPacket(int64_t capture_time_ms, int payload_length) {
uint32_t timestamp = capture_time_ms * 90;
- int32_t rtp_length = rtp_sender_->BuildRTPheader(packet_,
- kPayload,
- kMarkerBit,
- timestamp,
- capture_time_ms);
+ int32_t rtp_length = rtp_sender_->BuildRTPheader(
+ packet_, kPayload, kMarkerBit, timestamp, capture_time_ms);
ASSERT_GE(rtp_length, 0);
// Packet should be stored in a send bucket.
@@ -186,7 +183,7 @@ class RtpSenderTestWithoutPacer : public RtpSenderTest {
class RtpSenderVideoTest : public RtpSenderTest {
protected:
- virtual void SetUp() override {
+ void SetUp() override {
// TODO(pbos): Set up to use pacer.
SetUpRtpSender(false);
rtp_sender_video_.reset(
@@ -211,7 +208,7 @@ class RtpSenderVideoTest : public RtpSenderTest {
} else {
ASSERT_EQ(kRtpHeaderSize, length);
}
- ASSERT_TRUE(rtp_parser.Parse(rtp_header, map));
+ ASSERT_TRUE(rtp_parser.Parse(&rtp_header, map));
ASSERT_FALSE(rtp_parser.RTCP());
EXPECT_EQ(payload_, rtp_header.payloadType);
EXPECT_EQ(seq_num, rtp_header.sequenceNumber);
@@ -228,53 +225,57 @@ TEST_F(RtpSenderTestWithoutPacer,
RegisterRtpTransmissionTimeOffsetHeaderExtension) {
EXPECT_EQ(0u, rtp_sender_->RtpHeaderExtensionTotalLength());
EXPECT_EQ(0, rtp_sender_->RegisterRtpHeaderExtension(
- kRtpExtensionTransmissionTimeOffset, kTransmissionTimeOffsetExtensionId));
+ kRtpExtensionTransmissionTimeOffset,
+ kTransmissionTimeOffsetExtensionId));
EXPECT_EQ(kRtpOneByteHeaderLength + kTransmissionTimeOffsetLength,
rtp_sender_->RtpHeaderExtensionTotalLength());
EXPECT_EQ(0, rtp_sender_->DeregisterRtpHeaderExtension(
- kRtpExtensionTransmissionTimeOffset));
+ kRtpExtensionTransmissionTimeOffset));
EXPECT_EQ(0u, rtp_sender_->RtpHeaderExtensionTotalLength());
}
TEST_F(RtpSenderTestWithoutPacer, RegisterRtpAbsoluteSendTimeHeaderExtension) {
EXPECT_EQ(0u, rtp_sender_->RtpHeaderExtensionTotalLength());
- EXPECT_EQ(0, rtp_sender_->RegisterRtpHeaderExtension(
- kRtpExtensionAbsoluteSendTime, kAbsoluteSendTimeExtensionId));
+ EXPECT_EQ(
+ 0, rtp_sender_->RegisterRtpHeaderExtension(kRtpExtensionAbsoluteSendTime,
+ kAbsoluteSendTimeExtensionId));
EXPECT_EQ(RtpUtility::Word32Align(kRtpOneByteHeaderLength +
kAbsoluteSendTimeLength),
rtp_sender_->RtpHeaderExtensionTotalLength());
EXPECT_EQ(0, rtp_sender_->DeregisterRtpHeaderExtension(
- kRtpExtensionAbsoluteSendTime));
+ kRtpExtensionAbsoluteSendTime));
EXPECT_EQ(0u, rtp_sender_->RtpHeaderExtensionTotalLength());
}
TEST_F(RtpSenderTestWithoutPacer, RegisterRtpAudioLevelHeaderExtension) {
EXPECT_EQ(0u, rtp_sender_->RtpHeaderExtensionTotalLength());
- EXPECT_EQ(0, rtp_sender_->RegisterRtpHeaderExtension(
- kRtpExtensionAudioLevel, kAudioLevelExtensionId));
+ EXPECT_EQ(0, rtp_sender_->RegisterRtpHeaderExtension(kRtpExtensionAudioLevel,
+ kAudioLevelExtensionId));
EXPECT_EQ(
RtpUtility::Word32Align(kRtpOneByteHeaderLength + kAudioLevelLength),
rtp_sender_->RtpHeaderExtensionTotalLength());
- EXPECT_EQ(0, rtp_sender_->DeregisterRtpHeaderExtension(
- kRtpExtensionAudioLevel));
+ EXPECT_EQ(0,
+ rtp_sender_->DeregisterRtpHeaderExtension(kRtpExtensionAudioLevel));
EXPECT_EQ(0u, rtp_sender_->RtpHeaderExtensionTotalLength());
}
TEST_F(RtpSenderTestWithoutPacer, RegisterRtpHeaderExtensions) {
EXPECT_EQ(0u, rtp_sender_->RtpHeaderExtensionTotalLength());
EXPECT_EQ(0, rtp_sender_->RegisterRtpHeaderExtension(
- kRtpExtensionTransmissionTimeOffset, kTransmissionTimeOffsetExtensionId));
+ kRtpExtensionTransmissionTimeOffset,
+ kTransmissionTimeOffsetExtensionId));
EXPECT_EQ(RtpUtility::Word32Align(kRtpOneByteHeaderLength +
kTransmissionTimeOffsetLength),
rtp_sender_->RtpHeaderExtensionTotalLength());
- EXPECT_EQ(0, rtp_sender_->RegisterRtpHeaderExtension(
- kRtpExtensionAbsoluteSendTime, kAbsoluteSendTimeExtensionId));
+ EXPECT_EQ(
+ 0, rtp_sender_->RegisterRtpHeaderExtension(kRtpExtensionAbsoluteSendTime,
+ kAbsoluteSendTimeExtensionId));
EXPECT_EQ(RtpUtility::Word32Align(kRtpOneByteHeaderLength +
kTransmissionTimeOffsetLength +
kAbsoluteSendTimeLength),
rtp_sender_->RtpHeaderExtensionTotalLength());
- EXPECT_EQ(0, rtp_sender_->RegisterRtpHeaderExtension(
- kRtpExtensionAudioLevel, kAudioLevelExtensionId));
+ EXPECT_EQ(0, rtp_sender_->RegisterRtpHeaderExtension(kRtpExtensionAudioLevel,
+ kAudioLevelExtensionId));
EXPECT_EQ(RtpUtility::Word32Align(
kRtpOneByteHeaderLength + kTransmissionTimeOffsetLength +
kAbsoluteSendTimeLength + kAudioLevelLength),
@@ -290,18 +291,18 @@ TEST_F(RtpSenderTestWithoutPacer, RegisterRtpHeaderExtensions) {
// Deregister starts.
EXPECT_EQ(0, rtp_sender_->DeregisterRtpHeaderExtension(
- kRtpExtensionTransmissionTimeOffset));
+ kRtpExtensionTransmissionTimeOffset));
EXPECT_EQ(RtpUtility::Word32Align(kRtpOneByteHeaderLength +
kAbsoluteSendTimeLength +
kAudioLevelLength + kVideoRotationLength),
rtp_sender_->RtpHeaderExtensionTotalLength());
EXPECT_EQ(0, rtp_sender_->DeregisterRtpHeaderExtension(
- kRtpExtensionAbsoluteSendTime));
+ kRtpExtensionAbsoluteSendTime));
EXPECT_EQ(RtpUtility::Word32Align(kRtpOneByteHeaderLength +
kAudioLevelLength + kVideoRotationLength),
rtp_sender_->RtpHeaderExtensionTotalLength());
- EXPECT_EQ(0, rtp_sender_->DeregisterRtpHeaderExtension(
- kRtpExtensionAudioLevel));
+ EXPECT_EQ(0,
+ rtp_sender_->DeregisterRtpHeaderExtension(kRtpExtensionAudioLevel));
EXPECT_EQ(
RtpUtility::Word32Align(kRtpOneByteHeaderLength + kVideoRotationLength),
rtp_sender_->RtpHeaderExtensionTotalLength());
@@ -334,7 +335,7 @@ TEST_F(RtpSenderTestWithoutPacer, BuildRTPPacket) {
webrtc::RtpUtility::RtpHeaderParser rtp_parser(packet_, length);
webrtc::RTPHeader rtp_header;
- const bool valid_rtp_header = rtp_parser.Parse(rtp_header, nullptr);
+ const bool valid_rtp_header = rtp_parser.Parse(&rtp_header, nullptr);
ASSERT_TRUE(valid_rtp_header);
ASSERT_FALSE(rtp_parser.RTCP());
@@ -354,7 +355,8 @@ TEST_F(RtpSenderTestWithoutPacer,
BuildRTPPacketWithTransmissionOffsetExtension) {
EXPECT_EQ(0, rtp_sender_->SetTransmissionTimeOffset(kTimeOffset));
EXPECT_EQ(0, rtp_sender_->RegisterRtpHeaderExtension(
- kRtpExtensionTransmissionTimeOffset, kTransmissionTimeOffsetExtensionId));
+ kRtpExtensionTransmissionTimeOffset,
+ kTransmissionTimeOffsetExtensionId));
size_t length = static_cast<size_t>(rtp_sender_->BuildRTPheader(
packet_, kPayload, kMarkerBit, kTimestamp, 0));
@@ -368,7 +370,7 @@ TEST_F(RtpSenderTestWithoutPacer,
RtpHeaderExtensionMap map;
map.Register(kRtpExtensionTransmissionTimeOffset,
kTransmissionTimeOffsetExtensionId);
- const bool valid_rtp_header = rtp_parser.Parse(rtp_header, &map);
+ const bool valid_rtp_header = rtp_parser.Parse(&rtp_header, &map);
ASSERT_TRUE(valid_rtp_header);
ASSERT_FALSE(rtp_parser.RTCP());
@@ -379,7 +381,7 @@ TEST_F(RtpSenderTestWithoutPacer,
// Parse without map extension
webrtc::RTPHeader rtp_header2;
- const bool valid_rtp_header2 = rtp_parser.Parse(rtp_header2, nullptr);
+ const bool valid_rtp_header2 = rtp_parser.Parse(&rtp_header2, nullptr);
ASSERT_TRUE(valid_rtp_header2);
VerifyRTPHeaderCommon(rtp_header2);
@@ -393,7 +395,8 @@ TEST_F(RtpSenderTestWithoutPacer,
const int kNegTimeOffset = -500;
EXPECT_EQ(0, rtp_sender_->SetTransmissionTimeOffset(kNegTimeOffset));
EXPECT_EQ(0, rtp_sender_->RegisterRtpHeaderExtension(
- kRtpExtensionTransmissionTimeOffset, kTransmissionTimeOffsetExtensionId));
+ kRtpExtensionTransmissionTimeOffset,
+ kTransmissionTimeOffsetExtensionId));
size_t length = static_cast<size_t>(rtp_sender_->BuildRTPheader(
packet_, kPayload, kMarkerBit, kTimestamp, 0));
@@ -407,7 +410,7 @@ TEST_F(RtpSenderTestWithoutPacer,
RtpHeaderExtensionMap map;
map.Register(kRtpExtensionTransmissionTimeOffset,
kTransmissionTimeOffsetExtensionId);
- const bool valid_rtp_header = rtp_parser.Parse(rtp_header, &map);
+ const bool valid_rtp_header = rtp_parser.Parse(&rtp_header, &map);
ASSERT_TRUE(valid_rtp_header);
ASSERT_FALSE(rtp_parser.RTCP());
@@ -419,8 +422,9 @@ TEST_F(RtpSenderTestWithoutPacer,
TEST_F(RtpSenderTestWithoutPacer, BuildRTPPacketWithAbsoluteSendTimeExtension) {
EXPECT_EQ(0, rtp_sender_->SetAbsoluteSendTime(kAbsoluteSendTime));
- EXPECT_EQ(0, rtp_sender_->RegisterRtpHeaderExtension(
- kRtpExtensionAbsoluteSendTime, kAbsoluteSendTimeExtensionId));
+ EXPECT_EQ(
+ 0, rtp_sender_->RegisterRtpHeaderExtension(kRtpExtensionAbsoluteSendTime,
+ kAbsoluteSendTimeExtensionId));
size_t length = static_cast<size_t>(rtp_sender_->BuildRTPheader(
packet_, kPayload, kMarkerBit, kTimestamp, 0));
@@ -433,7 +437,7 @@ TEST_F(RtpSenderTestWithoutPacer, BuildRTPPacketWithAbsoluteSendTimeExtension) {
RtpHeaderExtensionMap map;
map.Register(kRtpExtensionAbsoluteSendTime, kAbsoluteSendTimeExtensionId);
- const bool valid_rtp_header = rtp_parser.Parse(rtp_header, &map);
+ const bool valid_rtp_header = rtp_parser.Parse(&rtp_header, &map);
ASSERT_TRUE(valid_rtp_header);
ASSERT_FALSE(rtp_parser.RTCP());
@@ -444,7 +448,7 @@ TEST_F(RtpSenderTestWithoutPacer, BuildRTPPacketWithAbsoluteSendTimeExtension) {
// Parse without map extension
webrtc::RTPHeader rtp_header2;
- const bool valid_rtp_header2 = rtp_parser.Parse(rtp_header2, nullptr);
+ const bool valid_rtp_header2 = rtp_parser.Parse(&rtp_header2, nullptr);
ASSERT_TRUE(valid_rtp_header2);
VerifyRTPHeaderCommon(rtp_header2);
@@ -472,7 +476,7 @@ TEST_F(RtpSenderTestWithoutPacer, BuildRTPPacketWithVideoRotation_MarkerBit) {
webrtc::RtpUtility::RtpHeaderParser rtp_parser(packet_, length);
webrtc::RTPHeader rtp_header;
- ASSERT_TRUE(rtp_parser.Parse(rtp_header, &map));
+ ASSERT_TRUE(rtp_parser.Parse(&rtp_header, &map));
ASSERT_FALSE(rtp_parser.RTCP());
VerifyRTPHeaderCommon(rtp_header);
EXPECT_EQ(length, rtp_header.headerLength);
@@ -500,7 +504,7 @@ TEST_F(RtpSenderTestWithoutPacer,
webrtc::RtpUtility::RtpHeaderParser rtp_parser(packet_, length);
webrtc::RTPHeader rtp_header;
- ASSERT_TRUE(rtp_parser.Parse(rtp_header, &map));
+ ASSERT_TRUE(rtp_parser.Parse(&rtp_header, &map));
ASSERT_FALSE(rtp_parser.RTCP());
VerifyRTPHeaderCommon(rtp_header, false);
EXPECT_EQ(length, rtp_header.headerLength);
@@ -508,8 +512,8 @@ TEST_F(RtpSenderTestWithoutPacer,
}
TEST_F(RtpSenderTestWithoutPacer, BuildRTPPacketWithAudioLevelExtension) {
- EXPECT_EQ(0, rtp_sender_->RegisterRtpHeaderExtension(
- kRtpExtensionAudioLevel, kAudioLevelExtensionId));
+ EXPECT_EQ(0, rtp_sender_->RegisterRtpHeaderExtension(kRtpExtensionAudioLevel,
+ kAudioLevelExtensionId));
size_t length = static_cast<size_t>(rtp_sender_->BuildRTPheader(
packet_, kPayload, kMarkerBit, kTimestamp, 0));
@@ -521,12 +525,12 @@ TEST_F(RtpSenderTestWithoutPacer, BuildRTPPacketWithAudioLevelExtension) {
webrtc::RTPHeader rtp_header;
// Updating audio level is done in RTPSenderAudio, so simulate it here.
- rtp_parser.Parse(rtp_header);
+ rtp_parser.Parse(&rtp_header);
rtp_sender_->UpdateAudioLevel(packet_, length, rtp_header, true, kAudioLevel);
RtpHeaderExtensionMap map;
map.Register(kRtpExtensionAudioLevel, kAudioLevelExtensionId);
- const bool valid_rtp_header = rtp_parser.Parse(rtp_header, &map);
+ const bool valid_rtp_header = rtp_parser.Parse(&rtp_header, &map);
ASSERT_TRUE(valid_rtp_header);
ASSERT_FALSE(rtp_parser.RTCP());
@@ -538,7 +542,7 @@ TEST_F(RtpSenderTestWithoutPacer, BuildRTPPacketWithAudioLevelExtension) {
// Parse without map extension
webrtc::RTPHeader rtp_header2;
- const bool valid_rtp_header2 = rtp_parser.Parse(rtp_header2, nullptr);
+ const bool valid_rtp_header2 = rtp_parser.Parse(&rtp_header2, nullptr);
ASSERT_TRUE(valid_rtp_header2);
VerifyRTPHeaderCommon(rtp_header2);
@@ -554,11 +558,13 @@ TEST_F(RtpSenderTestWithoutPacer, BuildRTPPacketWithHeaderExtensions) {
EXPECT_EQ(0,
rtp_sender_->SetTransportSequenceNumber(kTransportSequenceNumber));
EXPECT_EQ(0, rtp_sender_->RegisterRtpHeaderExtension(
- kRtpExtensionTransmissionTimeOffset, kTransmissionTimeOffsetExtensionId));
- EXPECT_EQ(0, rtp_sender_->RegisterRtpHeaderExtension(
- kRtpExtensionAbsoluteSendTime, kAbsoluteSendTimeExtensionId));
- EXPECT_EQ(0, rtp_sender_->RegisterRtpHeaderExtension(
- kRtpExtensionAudioLevel, kAudioLevelExtensionId));
+ kRtpExtensionTransmissionTimeOffset,
+ kTransmissionTimeOffsetExtensionId));
+ EXPECT_EQ(
+ 0, rtp_sender_->RegisterRtpHeaderExtension(kRtpExtensionAbsoluteSendTime,
+ kAbsoluteSendTimeExtensionId));
+ EXPECT_EQ(0, rtp_sender_->RegisterRtpHeaderExtension(kRtpExtensionAudioLevel,
+ kAudioLevelExtensionId));
EXPECT_EQ(0, rtp_sender_->RegisterRtpHeaderExtension(
kRtpExtensionTransportSequenceNumber,
kTransportSequenceNumberExtensionId));
@@ -573,7 +579,7 @@ TEST_F(RtpSenderTestWithoutPacer, BuildRTPPacketWithHeaderExtensions) {
webrtc::RTPHeader rtp_header;
// Updating audio level is done in RTPSenderAudio, so simulate it here.
- rtp_parser.Parse(rtp_header);
+ rtp_parser.Parse(&rtp_header);
rtp_sender_->UpdateAudioLevel(packet_, length, rtp_header, true, kAudioLevel);
RtpHeaderExtensionMap map;
@@ -583,7 +589,7 @@ TEST_F(RtpSenderTestWithoutPacer, BuildRTPPacketWithHeaderExtensions) {
map.Register(kRtpExtensionAudioLevel, kAudioLevelExtensionId);
map.Register(kRtpExtensionTransportSequenceNumber,
kTransportSequenceNumberExtensionId);
- const bool valid_rtp_header = rtp_parser.Parse(rtp_header, &map);
+ const bool valid_rtp_header = rtp_parser.Parse(&rtp_header, &map);
ASSERT_TRUE(valid_rtp_header);
ASSERT_FALSE(rtp_parser.RTCP());
@@ -602,7 +608,7 @@ TEST_F(RtpSenderTestWithoutPacer, BuildRTPPacketWithHeaderExtensions) {
// Parse without map extension
webrtc::RTPHeader rtp_header2;
- const bool valid_rtp_header2 = rtp_parser.Parse(rtp_header2, nullptr);
+ const bool valid_rtp_header2 = rtp_parser.Parse(&rtp_header2, nullptr);
ASSERT_TRUE(valid_rtp_header2);
VerifyRTPHeaderCommon(rtp_header2);
@@ -626,9 +632,11 @@ TEST_F(RtpSenderTest, TrafficSmoothingWithExtensions) {
rtp_sender_->SetStorePacketsStatus(true, 10);
EXPECT_EQ(0, rtp_sender_->RegisterRtpHeaderExtension(
- kRtpExtensionTransmissionTimeOffset, kTransmissionTimeOffsetExtensionId));
- EXPECT_EQ(0, rtp_sender_->RegisterRtpHeaderExtension(
- kRtpExtensionAbsoluteSendTime, kAbsoluteSendTimeExtensionId));
+ kRtpExtensionTransmissionTimeOffset,
+ kTransmissionTimeOffsetExtensionId));
+ EXPECT_EQ(
+ 0, rtp_sender_->RegisterRtpHeaderExtension(kRtpExtensionAbsoluteSendTime,
+ kAbsoluteSendTimeExtensionId));
rtp_sender_->SetTargetBitrate(300000);
int64_t capture_time_ms = fake_clock_.TimeInMilliseconds();
int rtp_length_int = rtp_sender_->BuildRTPheader(
@@ -659,7 +667,7 @@ TEST_F(RtpSenderTest, TrafficSmoothingWithExtensions) {
map.Register(kRtpExtensionTransmissionTimeOffset,
kTransmissionTimeOffsetExtensionId);
map.Register(kRtpExtensionAbsoluteSendTime, kAbsoluteSendTimeExtensionId);
- const bool valid_rtp_header = rtp_parser.Parse(rtp_header, &map);
+ const bool valid_rtp_header = rtp_parser.Parse(&rtp_header, &map);
ASSERT_TRUE(valid_rtp_header);
// Verify transmission time offset.
@@ -676,9 +684,11 @@ TEST_F(RtpSenderTest, TrafficSmoothingRetransmits) {
rtp_sender_->SetStorePacketsStatus(true, 10);
EXPECT_EQ(0, rtp_sender_->RegisterRtpHeaderExtension(
- kRtpExtensionTransmissionTimeOffset, kTransmissionTimeOffsetExtensionId));
- EXPECT_EQ(0, rtp_sender_->RegisterRtpHeaderExtension(
- kRtpExtensionAbsoluteSendTime, kAbsoluteSendTimeExtensionId));
+ kRtpExtensionTransmissionTimeOffset,
+ kTransmissionTimeOffsetExtensionId));
+ EXPECT_EQ(
+ 0, rtp_sender_->RegisterRtpHeaderExtension(kRtpExtensionAbsoluteSendTime,
+ kAbsoluteSendTimeExtensionId));
rtp_sender_->SetTargetBitrate(300000);
int64_t capture_time_ms = fake_clock_.TimeInMilliseconds();
int rtp_length_int = rtp_sender_->BuildRTPheader(
@@ -717,7 +727,7 @@ TEST_F(RtpSenderTest, TrafficSmoothingRetransmits) {
map.Register(kRtpExtensionTransmissionTimeOffset,
kTransmissionTimeOffsetExtensionId);
map.Register(kRtpExtensionAbsoluteSendTime, kAbsoluteSendTimeExtensionId);
- const bool valid_rtp_header = rtp_parser.Parse(rtp_header, &map);
+ const bool valid_rtp_header = rtp_parser.Parse(&rtp_header, &map);
ASSERT_TRUE(valid_rtp_header);
// Verify transmission time offset.
@@ -740,10 +750,12 @@ TEST_F(RtpSenderTest, SendPadding) {
rtp_sender_->SetStorePacketsStatus(true, 10);
size_t rtp_header_len = kRtpHeaderSize;
EXPECT_EQ(0, rtp_sender_->RegisterRtpHeaderExtension(
- kRtpExtensionTransmissionTimeOffset, kTransmissionTimeOffsetExtensionId));
+ kRtpExtensionTransmissionTimeOffset,
+ kTransmissionTimeOffsetExtensionId));
rtp_header_len += 4; // 4 bytes extension.
- EXPECT_EQ(0, rtp_sender_->RegisterRtpHeaderExtension(
- kRtpExtensionAbsoluteSendTime, kAbsoluteSendTimeExtensionId));
+ EXPECT_EQ(
+ 0, rtp_sender_->RegisterRtpHeaderExtension(kRtpExtensionAbsoluteSendTime,
+ kAbsoluteSendTimeExtensionId));
rtp_header_len += 4; // 4 bytes extension.
rtp_header_len += 4; // 4 extra bytes common to all extension headers.
@@ -815,8 +827,8 @@ TEST_F(RtpSenderTest, SendPadding) {
// Send a regular video packet again.
capture_time_ms = fake_clock_.TimeInMilliseconds();
- rtp_length_int = rtp_sender_->BuildRTPheader(
- packet_, kPayload, kMarkerBit, timestamp, capture_time_ms);
+ rtp_length_int = rtp_sender_->BuildRTPheader(packet_, kPayload, kMarkerBit,
+ timestamp, capture_time_ms);
ASSERT_NE(-1, rtp_length_int);
rtp_length = static_cast<size_t>(rtp_length_int);
@@ -830,8 +842,8 @@ TEST_F(RtpSenderTest, SendPadding) {
EXPECT_EQ(++total_packets_sent, transport_.packets_sent_);
EXPECT_EQ(rtp_length, transport_.last_sent_packet_len_);
// Parse sent packet.
- ASSERT_TRUE(rtp_parser->Parse(transport_.last_sent_packet_, rtp_length,
- &rtp_header));
+ ASSERT_TRUE(
+ rtp_parser->Parse(transport_.last_sent_packet_, rtp_length, &rtp_header));
// Verify sequence number and timestamp.
EXPECT_EQ(seq_num, rtp_header.sequenceNumber);
@@ -858,8 +870,9 @@ TEST_F(RtpSenderTest, SendRedundantPayloads) {
uint16_t seq_num = kSeqNum;
rtp_sender_->SetStorePacketsStatus(true, 10);
int32_t rtp_header_len = kRtpHeaderSize;
- EXPECT_EQ(0, rtp_sender_->RegisterRtpHeaderExtension(
- kRtpExtensionAbsoluteSendTime, kAbsoluteSendTimeExtensionId));
+ EXPECT_EQ(
+ 0, rtp_sender_->RegisterRtpHeaderExtension(kRtpExtensionAbsoluteSendTime,
+ kAbsoluteSendTimeExtensionId));
rtp_header_len += 4; // 4 bytes extension.
rtp_header_len += 4; // 4 extra bytes common to all extension headers.
@@ -876,8 +889,8 @@ TEST_F(RtpSenderTest, SendRedundantPayloads) {
kAbsoluteSendTimeExtensionId);
rtp_sender_->SetTargetBitrate(300000);
const size_t kNumPayloadSizes = 10;
- const size_t kPayloadSizes[kNumPayloadSizes] = {500, 550, 600, 650, 700, 750,
- 800, 850, 900, 950};
+ const size_t kPayloadSizes[kNumPayloadSizes] = {500, 550, 600, 650, 700,
+ 750, 800, 850, 900, 950};
// Send 10 packets of increasing size.
for (size_t i = 0; i < kNumPayloadSizes; ++i) {
int64_t capture_time_ms = fake_clock_.TimeInMilliseconds();
@@ -921,10 +934,10 @@ TEST_F(RtpSenderTestWithoutPacer, SendGenericVideo) {
RtpUtility::RtpHeaderParser rtp_parser(transport_.last_sent_packet_,
transport_.last_sent_packet_len_);
webrtc::RTPHeader rtp_header;
- ASSERT_TRUE(rtp_parser.Parse(rtp_header));
+ ASSERT_TRUE(rtp_parser.Parse(&rtp_header));
- const uint8_t* payload_data = GetPayloadData(rtp_header,
- transport_.last_sent_packet_);
+ const uint8_t* payload_data =
+ GetPayloadData(rtp_header, transport_.last_sent_packet_);
uint8_t generic_header = *payload_data++;
ASSERT_EQ(sizeof(payload) + sizeof(generic_header),
@@ -946,7 +959,7 @@ TEST_F(RtpSenderTestWithoutPacer, SendGenericVideo) {
RtpUtility::RtpHeaderParser rtp_parser2(transport_.last_sent_packet_,
transport_.last_sent_packet_len_);
- ASSERT_TRUE(rtp_parser.Parse(rtp_header));
+ ASSERT_TRUE(rtp_parser.Parse(&rtp_header));
payload_data = GetPayloadData(rtp_header, transport_.last_sent_packet_);
generic_header = *payload_data++;
@@ -1043,9 +1056,8 @@ TEST_F(RtpSenderTest, BitrateCallbacks) {
char payload_name[RTP_PAYLOAD_NAME_SIZE] = "GENERIC";
const uint8_t payload_type = 127;
- ASSERT_EQ(
- 0,
- rtp_sender_->RegisterPayload(payload_name, payload_type, 90000, 0, 1500));
+ ASSERT_EQ(0, rtp_sender_->RegisterPayload(payload_name, payload_type, 90000,
+ 0, 1500));
uint8_t payload[] = {47, 11, 32, 93, 89};
rtp_sender_->SetStorePacketsStatus(true, 1);
uint32_t ssrc = rtp_sender_->SSRC();
@@ -1057,13 +1069,8 @@ TEST_F(RtpSenderTest, BitrateCallbacks) {
// Send a few frames.
for (uint32_t i = 0; i < kNumPackets; ++i) {
ASSERT_EQ(0,
- rtp_sender_->SendOutgoingData(kVideoFrameKey,
- payload_type,
- 1234,
- 4321,
- payload,
- sizeof(payload),
- 0));
+ rtp_sender_->SendOutgoingData(kVideoFrameKey, payload_type, 1234,
+ 4321, payload, sizeof(payload), 0));
fake_clock_.AdvanceTimeMilliseconds(kPacketInterval);
}
@@ -1100,8 +1107,7 @@ class RtpSenderAudioTest : public RtpSenderTest {
TEST_F(RtpSenderTestWithoutPacer, StreamDataCountersCallbacks) {
class TestCallback : public StreamDataCountersCallback {
public:
- TestCallback()
- : StreamDataCountersCallback(), ssrc_(0), counters_() {}
+ TestCallback() : StreamDataCountersCallback(), ssrc_(0), counters_() {}
virtual ~TestCallback() {}
void DataCountersUpdated(const StreamDataCounters& counters,
@@ -1127,7 +1133,6 @@ TEST_F(RtpSenderTestWithoutPacer, StreamDataCountersCallbacks) {
MatchPacketCounter(counters.retransmitted, counters_.retransmitted);
EXPECT_EQ(counters.fec.packets, counters_.fec.packets);
}
-
} callback;
const uint8_t kRedPayloadType = 96;
@@ -1212,10 +1217,10 @@ TEST_F(RtpSenderAudioTest, SendAudio) {
RtpUtility::RtpHeaderParser rtp_parser(transport_.last_sent_packet_,
transport_.last_sent_packet_len_);
webrtc::RTPHeader rtp_header;
- ASSERT_TRUE(rtp_parser.Parse(rtp_header));
+ ASSERT_TRUE(rtp_parser.Parse(&rtp_header));
- const uint8_t* payload_data = GetPayloadData(rtp_header,
- transport_.last_sent_packet_);
+ const uint8_t* payload_data =
+ GetPayloadData(rtp_header, transport_.last_sent_packet_);
ASSERT_EQ(sizeof(payload),
GetPayloadDataLength(rtp_header, transport_.last_sent_packet_len_));
@@ -1225,8 +1230,8 @@ TEST_F(RtpSenderAudioTest, SendAudio) {
TEST_F(RtpSenderAudioTest, SendAudioWithAudioLevelExtension) {
EXPECT_EQ(0, rtp_sender_->SetAudioLevel(kAudioLevel));
- EXPECT_EQ(0, rtp_sender_->RegisterRtpHeaderExtension(
- kRtpExtensionAudioLevel, kAudioLevelExtensionId));
+ EXPECT_EQ(0, rtp_sender_->RegisterRtpHeaderExtension(kRtpExtensionAudioLevel,
+ kAudioLevelExtensionId));
char payload_name[RTP_PAYLOAD_NAME_SIZE] = "PAYLOAD_NAME";
const uint8_t payload_type = 127;
@@ -1241,21 +1246,22 @@ TEST_F(RtpSenderAudioTest, SendAudioWithAudioLevelExtension) {
RtpUtility::RtpHeaderParser rtp_parser(transport_.last_sent_packet_,
transport_.last_sent_packet_len_);
webrtc::RTPHeader rtp_header;
- ASSERT_TRUE(rtp_parser.Parse(rtp_header));
+ ASSERT_TRUE(rtp_parser.Parse(&rtp_header));
- const uint8_t* payload_data = GetPayloadData(rtp_header,
- transport_.last_sent_packet_);
+ const uint8_t* payload_data =
+ GetPayloadData(rtp_header, transport_.last_sent_packet_);
ASSERT_EQ(sizeof(payload),
GetPayloadDataLength(rtp_header, transport_.last_sent_packet_len_));
EXPECT_EQ(0, memcmp(payload, payload_data, sizeof(payload)));
- uint8_t extension[] = { 0xbe, 0xde, 0x00, 0x01,
- (kAudioLevelExtensionId << 4) + 0, // ID + length.
- kAudioLevel, // Data.
- 0x00, 0x00 // Padding.
- };
+ uint8_t extension[] = {
+ 0xbe, 0xde, 0x00, 0x01,
+ (kAudioLevelExtensionId << 4) + 0, // ID + length.
+ kAudioLevel, // Data.
+ 0x00, 0x00 // Padding.
+ };
EXPECT_EQ(0, memcmp(extension, payload_data - sizeof(extension),
sizeof(extension)));
@@ -1270,14 +1276,14 @@ TEST_F(RtpSenderAudioTest, SendAudioWithAudioLevelExtension) {
TEST_F(RtpSenderAudioTest, CheckMarkerBitForTelephoneEvents) {
char payload_name[RTP_PAYLOAD_NAME_SIZE] = "telephone-event";
uint8_t payload_type = 126;
- ASSERT_EQ(0, rtp_sender_->RegisterPayload(payload_name, payload_type, 0,
- 0, 0));
+ ASSERT_EQ(0,
+ rtp_sender_->RegisterPayload(payload_name, payload_type, 0, 0, 0));
// For Telephone events, payload is not added to the registered payload list,
// it will register only the payload used for audio stream.
// Registering the payload again for audio stream with different payload name.
- strcpy(payload_name, "payload_name");
- ASSERT_EQ(0, rtp_sender_->RegisterPayload(payload_name, payload_type, 8000,
- 1, 0));
+ const char kPayloadName[] = "payload_name";
+ ASSERT_EQ(
+ 0, rtp_sender_->RegisterPayload(kPayloadName, payload_type, 8000, 1, 0));
int64_t capture_time_ms = fake_clock_.TimeInMilliseconds();
// DTMF event key=9, duration=500 and attenuationdB=10
rtp_sender_->SendTelephoneEvent(9, 500, 10);
@@ -1298,8 +1304,7 @@ TEST_F(RtpSenderAudioTest, CheckMarkerBitForTelephoneEvents) {
ASSERT_TRUE(rtp_parser.get() != nullptr);
webrtc::RTPHeader rtp_header;
ASSERT_TRUE(rtp_parser->Parse(transport_.last_sent_packet_,
- transport_.last_sent_packet_len_,
- &rtp_header));
+ transport_.last_sent_packet_len_, &rtp_header));
// Marker Bit should be set to 1 for first packet.
EXPECT_TRUE(rtp_header.markerBit);
@@ -1307,8 +1312,7 @@ TEST_F(RtpSenderAudioTest, CheckMarkerBitForTelephoneEvents) {
capture_time_ms + 4000, 0, nullptr,
0, nullptr));
ASSERT_TRUE(rtp_parser->Parse(transport_.last_sent_packet_,
- transport_.last_sent_packet_len_,
- &rtp_header));
+ transport_.last_sent_packet_len_, &rtp_header));
// Marker Bit should be set to 0 for rest of the packets.
EXPECT_FALSE(rtp_header.markerBit);
}
@@ -1321,19 +1325,13 @@ TEST_F(RtpSenderTestWithoutPacer, BytesReportedCorrectly) {
rtp_sender_->SetRtxPayloadType(kPayloadType - 1, kPayloadType);
rtp_sender_->SetRtxStatus(kRtxRetransmitted | kRtxRedundantPayloads);
- ASSERT_EQ(
- 0,
- rtp_sender_->RegisterPayload(kPayloadName, kPayloadType, 90000, 0, 1500));
+ ASSERT_EQ(0, rtp_sender_->RegisterPayload(kPayloadName, kPayloadType, 90000,
+ 0, 1500));
uint8_t payload[] = {47, 11, 32, 93, 89};
- ASSERT_EQ(0,
- rtp_sender_->SendOutgoingData(kVideoFrameKey,
- kPayloadType,
- 1234,
- 4321,
- payload,
- sizeof(payload),
- 0));
+ ASSERT_EQ(
+ 0, rtp_sender_->SendOutgoingData(kVideoFrameKey, kPayloadType, 1234, 4321,
+ payload, sizeof(payload), 0));
// Will send 2 full-size padding packets.
rtp_sender_->TimeToSendPadding(1);
@@ -1353,17 +1351,17 @@ TEST_F(RtpSenderTestWithoutPacer, BytesReportedCorrectly) {
EXPECT_EQ(rtx_stats.transmitted.padding_bytes, 2 * kMaxPaddingSize);
EXPECT_EQ(rtp_stats.transmitted.TotalBytes(),
- rtp_stats.transmitted.payload_bytes +
- rtp_stats.transmitted.header_bytes +
- rtp_stats.transmitted.padding_bytes);
+ rtp_stats.transmitted.payload_bytes +
+ rtp_stats.transmitted.header_bytes +
+ rtp_stats.transmitted.padding_bytes);
EXPECT_EQ(rtx_stats.transmitted.TotalBytes(),
- rtx_stats.transmitted.payload_bytes +
- rtx_stats.transmitted.header_bytes +
- rtx_stats.transmitted.padding_bytes);
+ rtx_stats.transmitted.payload_bytes +
+ rtx_stats.transmitted.header_bytes +
+ rtx_stats.transmitted.padding_bytes);
- EXPECT_EQ(transport_.total_bytes_sent_,
- rtp_stats.transmitted.TotalBytes() +
- rtx_stats.transmitted.TotalBytes());
+ EXPECT_EQ(
+ transport_.total_bytes_sent_,
+ rtp_stats.transmitted.TotalBytes() + rtx_stats.transmitted.TotalBytes());
}
TEST_F(RtpSenderTestWithoutPacer, RespectsNackBitrateLimit) {
diff --git a/webrtc/modules/rtp_rtcp/source/rtp_sender_video.cc b/webrtc/modules/rtp_rtcp/source/rtp_sender_video.cc
index 66062771de..5a565dfa99 100644
--- a/webrtc/modules/rtp_rtcp/source/rtp_sender_video.cc
+++ b/webrtc/modules/rtp_rtcp/source/rtp_sender_video.cc
@@ -18,7 +18,7 @@
#include "webrtc/base/checks.h"
#include "webrtc/base/logging.h"
#include "webrtc/base/trace_event.h"
-#include "webrtc/modules/rtp_rtcp/interface/rtp_rtcp_defines.h"
+#include "webrtc/modules/rtp_rtcp/include/rtp_rtcp_defines.h"
#include "webrtc/modules/rtp_rtcp/source/byte_io.h"
#include "webrtc/modules/rtp_rtcp/source/producer_fec.h"
#include "webrtc/modules/rtp_rtcp/source/rtp_format_video_generic.h"
@@ -42,13 +42,13 @@ RTPSenderVideo::RTPSenderVideo(Clock* clock, RTPSenderInterface* rtpSender)
_retransmissionSettings(kRetransmitBaseLayer),
// Generic FEC
- _fec(),
- _fecEnabled(false),
- _payloadTypeRED(-1),
- _payloadTypeFEC(-1),
+ fec_(),
+ fec_enabled_(false),
+ red_payload_type_(-1),
+ fec_payload_type_(-1),
delta_fec_params_(),
key_fec_params_(),
- producer_fec_(&_fec),
+ producer_fec_(&fec_),
_fecOverheadRate(clock, NULL),
_videoBitrate(clock, NULL) {
memset(&delta_fec_params_, 0, sizeof(delta_fec_params_));
@@ -104,7 +104,7 @@ void RTPSenderVideo::SendVideoPacket(uint8_t* data_buffer,
StorageType storage) {
if (_rtpSender.SendToNetwork(data_buffer, payload_length, rtp_header_length,
capture_time_ms, storage,
- RtpPacketSender::kNormalPriority) == 0) {
+ RtpPacketSender::kLowPriority) == 0) {
_videoBitrate.Update(payload_length + rtp_header_length);
TRACE_EVENT_INSTANT2(TRACE_DISABLED_BY_DEFAULT("webrtc_rtp"),
"Video::PacketNormal", "timestamp", capture_timestamp,
@@ -130,7 +130,7 @@ void RTPSenderVideo::SendVideoPacketAsRed(uint8_t* data_buffer,
// Only protect while creating RED and FEC packets, not when sending.
CriticalSectionScoped cs(crit_.get());
red_packet.reset(producer_fec_.BuildRedPacket(
- data_buffer, payload_length, rtp_header_length, _payloadTypeRED));
+ data_buffer, payload_length, rtp_header_length, red_payload_type_));
if (protect) {
producer_fec_.AddRtpPacketAndGenerateFec(data_buffer, payload_length,
rtp_header_length);
@@ -140,7 +140,7 @@ void RTPSenderVideo::SendVideoPacketAsRed(uint8_t* data_buffer,
next_fec_sequence_number =
_rtpSender.AllocateSequenceNumber(num_fec_packets);
fec_packets = producer_fec_.GetFecPackets(
- _payloadTypeRED, _payloadTypeFEC, next_fec_sequence_number,
+ red_payload_type_, fec_payload_type_, next_fec_sequence_number,
rtp_header_length);
RTC_DCHECK_EQ(num_fec_packets, fec_packets.size());
if (_retransmissionSettings & kRetransmitFECPackets)
@@ -150,7 +150,7 @@ void RTPSenderVideo::SendVideoPacketAsRed(uint8_t* data_buffer,
if (_rtpSender.SendToNetwork(
red_packet->data(), red_packet->length() - rtp_header_length,
rtp_header_length, capture_time_ms, media_packet_storage,
- RtpPacketSender::kNormalPriority) == 0) {
+ RtpPacketSender::kLowPriority) == 0) {
_videoBitrate.Update(red_packet->length());
TRACE_EVENT_INSTANT2(TRACE_DISABLED_BY_DEFAULT("webrtc_rtp"),
"Video::PacketRed", "timestamp", capture_timestamp,
@@ -162,7 +162,7 @@ void RTPSenderVideo::SendVideoPacketAsRed(uint8_t* data_buffer,
if (_rtpSender.SendToNetwork(
fec_packet->data(), fec_packet->length() - rtp_header_length,
rtp_header_length, capture_time_ms, fec_storage,
- RtpPacketSender::kNormalPriority) == 0) {
+ RtpPacketSender::kLowPriority) == 0) {
_fecOverheadRate.Update(fec_packet->length());
TRACE_EVENT_INSTANT2(TRACE_DISABLED_BY_DEFAULT("webrtc_rtp"),
"Video::PacketFec", "timestamp", capture_timestamp,
@@ -180,9 +180,9 @@ void RTPSenderVideo::SetGenericFECStatus(const bool enable,
const uint8_t payloadTypeRED,
const uint8_t payloadTypeFEC) {
CriticalSectionScoped cs(crit_.get());
- _fecEnabled = enable;
- _payloadTypeRED = payloadTypeRED;
- _payloadTypeFEC = payloadTypeFEC;
+ fec_enabled_ = enable;
+ red_payload_type_ = payloadTypeRED;
+ fec_payload_type_ = payloadTypeFEC;
memset(&delta_fec_params_, 0, sizeof(delta_fec_params_));
memset(&key_fec_params_, 0, sizeof(key_fec_params_));
delta_fec_params_.max_fec_frames = key_fec_params_.max_fec_frames = 1;
@@ -190,18 +190,18 @@ void RTPSenderVideo::SetGenericFECStatus(const bool enable,
kFecMaskRandom;
}
-void RTPSenderVideo::GenericFECStatus(bool& enable,
- uint8_t& payloadTypeRED,
- uint8_t& payloadTypeFEC) const {
+void RTPSenderVideo::GenericFECStatus(bool* enable,
+ uint8_t* payloadTypeRED,
+ uint8_t* payloadTypeFEC) const {
CriticalSectionScoped cs(crit_.get());
- enable = _fecEnabled;
- payloadTypeRED = _payloadTypeRED;
- payloadTypeFEC = _payloadTypeFEC;
+ *enable = fec_enabled_;
+ *payloadTypeRED = red_payload_type_;
+ *payloadTypeFEC = fec_payload_type_;
}
size_t RTPSenderVideo::FECPacketOverhead() const {
CriticalSectionScoped cs(crit_.get());
- if (_fecEnabled) {
+ if (fec_enabled_) {
// Overhead is FEC headers plus RED for FEC header plus anything in RTP
// header beyond the 12 bytes base header (CSRC list, extensions...)
// This reason for the header extensions to be included here is that
@@ -247,7 +247,7 @@ int32_t RTPSenderVideo::SendVideo(const RtpVideoCodecTypes videoType,
frameType == kVideoFrameKey ? &key_fec_params_ : &delta_fec_params_;
producer_fec_.SetFecParameters(fec_params, 0);
storage = packetizer->GetStorageType(_retransmissionSettings);
- fec_enabled = _fecEnabled;
+ fec_enabled = fec_enabled_;
}
// Register CVO rtp header extension at the first time when we receive a frame
@@ -304,7 +304,7 @@ int32_t RTPSenderVideo::SendVideo(const RtpVideoCodecTypes videoType,
size_t packetSize = payloadSize + rtp_header_length;
RtpUtility::RtpHeaderParser rtp_parser(dataBuffer, packetSize);
RTPHeader rtp_header;
- rtp_parser.Parse(rtp_header);
+ rtp_parser.Parse(&rtp_header);
_rtpSender.UpdateVideoRotation(dataBuffer, packetSize, rtp_header,
rtpHdr->rotation);
}
diff --git a/webrtc/modules/rtp_rtcp/source/rtp_sender_video.h b/webrtc/modules/rtp_rtcp/source/rtp_sender_video.h
index f412542d86..e59321ab93 100644
--- a/webrtc/modules/rtp_rtcp/source/rtp_sender_video.h
+++ b/webrtc/modules/rtp_rtcp/source/rtp_sender_video.h
@@ -16,7 +16,7 @@
#include "webrtc/base/scoped_ptr.h"
#include "webrtc/base/thread_annotations.h"
#include "webrtc/common_types.h"
-#include "webrtc/modules/rtp_rtcp/interface/rtp_rtcp_defines.h"
+#include "webrtc/modules/rtp_rtcp/include/rtp_rtcp_defines.h"
#include "webrtc/modules/rtp_rtcp/source/bitrate.h"
#include "webrtc/modules/rtp_rtcp/source/forward_error_correction.h"
#include "webrtc/modules/rtp_rtcp/source/producer_fec.h"
@@ -67,9 +67,9 @@ class RTPSenderVideo {
const uint8_t payloadTypeRED,
const uint8_t payloadTypeFEC);
- void GenericFECStatus(bool& enable,
- uint8_t& payloadTypeRED,
- uint8_t& payloadTypeFEC) const;
+ void GenericFECStatus(bool* enable,
+ uint8_t* payloadTypeRED,
+ uint8_t* payloadTypeFEC) const;
void SetFecParameters(const FecProtectionParams* delta_params,
const FecProtectionParams* key_params);
@@ -82,7 +82,7 @@ class RTPSenderVideo {
int SelectiveRetransmissions() const;
void SetSelectiveRetransmissions(uint8_t settings);
-private:
+ private:
void SendVideoPacket(uint8_t* dataBuffer,
const size_t payloadLength,
const size_t rtpHeaderLength,
@@ -110,10 +110,10 @@ private:
int32_t _retransmissionSettings GUARDED_BY(crit_);
// FEC
- ForwardErrorCorrection _fec;
- bool _fecEnabled GUARDED_BY(crit_);
- int8_t _payloadTypeRED GUARDED_BY(crit_);
- int8_t _payloadTypeFEC GUARDED_BY(crit_);
+ ForwardErrorCorrection fec_;
+ bool fec_enabled_ GUARDED_BY(crit_);
+ int8_t red_payload_type_ GUARDED_BY(crit_);
+ int8_t fec_payload_type_ GUARDED_BY(crit_);
FecProtectionParams delta_fec_params_ GUARDED_BY(crit_);
FecProtectionParams key_fec_params_ GUARDED_BY(crit_);
ProducerFec producer_fec_ GUARDED_BY(crit_);
diff --git a/webrtc/modules/rtp_rtcp/source/rtp_utility.cc b/webrtc/modules/rtp_rtcp/source/rtp_utility.cc
index bf0b30a064..0f0ad835b1 100644
--- a/webrtc/modules/rtp_rtcp/source/rtp_utility.cc
+++ b/webrtc/modules/rtp_rtcp/source/rtp_utility.cc
@@ -10,38 +10,10 @@
#include "webrtc/modules/rtp_rtcp/source/rtp_utility.h"
-#include <assert.h>
-#include <math.h> // ceil
-#include <string.h> // memcpy
-
-#if defined(_WIN32)
-// Order for these headers are important
-#include <winsock2.h> // timeval
-#include <windows.h> // FILETIME
-#include <MMSystem.h> // timeGetTime
-#elif ((defined WEBRTC_LINUX) || (defined WEBRTC_MAC))
-#include <sys/time.h> // gettimeofday
-#include <time.h>
-#endif
-#if (defined(_DEBUG) && defined(_WIN32) && (_MSC_VER >= 1400))
-#include <stdio.h>
-#endif
+#include <string.h>
#include "webrtc/base/logging.h"
#include "webrtc/modules/rtp_rtcp/source/byte_io.h"
-#include "webrtc/system_wrappers/include/tick_util.h"
-
-#if (defined(_DEBUG) && defined(_WIN32) && (_MSC_VER >= 1400))
-#define DEBUG_PRINT(...) \
- { \
- char msg[256]; \
- sprintf(msg, __VA_ARGS__); \
- OutputDebugString(msg); \
- }
-#else
-// special fix for visual 2003
-#define DEBUG_PRINT(exp) ((void)0)
-#endif // defined(_DEBUG) && defined(_WIN32)
namespace webrtc {
@@ -77,50 +49,18 @@ enum {
};
/*
- * Time routines.
- */
-
-uint32_t GetCurrentRTP(Clock* clock, uint32_t freq) {
- const bool use_global_clock = (clock == NULL);
- Clock* local_clock = clock;
- if (use_global_clock) {
- local_clock = Clock::GetRealTimeClock();
- }
- uint32_t secs = 0, frac = 0;
- local_clock->CurrentNtp(secs, frac);
- if (use_global_clock) {
- delete local_clock;
- }
- return ConvertNTPTimeToRTP(secs, frac, freq);
-}
-
-uint32_t ConvertNTPTimeToRTP(uint32_t NTPsec, uint32_t NTPfrac, uint32_t freq) {
- float ftemp = (float)NTPfrac / (float)NTP_FRAC;
- uint32_t tmp = (uint32_t)(ftemp * freq);
- return NTPsec * freq + tmp;
-}
-
-uint32_t ConvertNTPTimeToMS(uint32_t NTPsec, uint32_t NTPfrac) {
- int freq = 1000;
- float ftemp = (float)NTPfrac / (float)NTP_FRAC;
- uint32_t tmp = (uint32_t)(ftemp * freq);
- uint32_t MStime = NTPsec * freq + tmp;
- return MStime;
-}
-
-/*
* Misc utility routines
*/
#if defined(_WIN32)
bool StringCompare(const char* str1, const char* str2,
const uint32_t length) {
- return (_strnicmp(str1, str2, length) == 0) ? true : false;
+ return _strnicmp(str1, str2, length) == 0;
}
#elif defined(WEBRTC_LINUX) || defined(WEBRTC_MAC)
bool StringCompare(const char* str1, const char* str2,
const uint32_t length) {
- return (strncasecmp(str1, str2, length) == 0) ? true : false;
+ return strncasecmp(str1, str2, length) == 0;
}
#endif
@@ -131,10 +71,6 @@ size_t Word32Align(size_t size) {
return size;
}
-uint32_t pow2(uint8_t exp) {
- return 1 << exp;
-}
-
RtpHeaderParser::RtpHeaderParser(const uint8_t* rtpData,
const size_t rtpDataLength)
: _ptrRTPDataBegin(rtpData),
@@ -244,7 +180,7 @@ bool RtpHeaderParser::ParseRtcp(RTPHeader* header) const {
return true;
}
-bool RtpHeaderParser::Parse(RTPHeader& header,
+bool RtpHeaderParser::Parse(RTPHeader* header,
RtpHeaderExtensionMap* ptrExtensionMap) const {
const ptrdiff_t length = _ptrRTPDataEnd - _ptrRTPDataBegin;
if (length < kRtpMinParseLength) {
@@ -283,39 +219,39 @@ bool RtpHeaderParser::Parse(RTPHeader& header,
return false;
}
- header.markerBit = M;
- header.payloadType = PT;
- header.sequenceNumber = sequenceNumber;
- header.timestamp = RTPTimestamp;
- header.ssrc = SSRC;
- header.numCSRCs = CC;
- header.paddingLength = P ? *(_ptrRTPDataEnd - 1) : 0;
+ header->markerBit = M;
+ header->payloadType = PT;
+ header->sequenceNumber = sequenceNumber;
+ header->timestamp = RTPTimestamp;
+ header->ssrc = SSRC;
+ header->numCSRCs = CC;
+ header->paddingLength = P ? *(_ptrRTPDataEnd - 1) : 0;
for (uint8_t i = 0; i < CC; ++i) {
uint32_t CSRC = ByteReader<uint32_t>::ReadBigEndian(ptr);
ptr += 4;
- header.arrOfCSRCs[i] = CSRC;
+ header->arrOfCSRCs[i] = CSRC;
}
- header.headerLength = 12 + CSRCocts;
+ header->headerLength = 12 + CSRCocts;
// If in effect, MAY be omitted for those packets for which the offset
// is zero.
- header.extension.hasTransmissionTimeOffset = false;
- header.extension.transmissionTimeOffset = 0;
+ header->extension.hasTransmissionTimeOffset = false;
+ header->extension.transmissionTimeOffset = 0;
// May not be present in packet.
- header.extension.hasAbsoluteSendTime = false;
- header.extension.absoluteSendTime = 0;
+ header->extension.hasAbsoluteSendTime = false;
+ header->extension.absoluteSendTime = 0;
// May not be present in packet.
- header.extension.hasAudioLevel = false;
- header.extension.voiceActivity = false;
- header.extension.audioLevel = 0;
+ header->extension.hasAudioLevel = false;
+ header->extension.voiceActivity = false;
+ header->extension.audioLevel = 0;
// May not be present in packet.
- header.extension.hasVideoRotation = false;
- header.extension.videoRotation = 0;
+ header->extension.hasVideoRotation = false;
+ header->extension.videoRotation = 0;
if (X) {
/* RTP header extension, RFC 3550.
@@ -332,7 +268,7 @@ bool RtpHeaderParser::Parse(RTPHeader& header,
return false;
}
- header.headerLength += 4;
+ header->headerLength += 4;
uint16_t definedByProfile = ByteReader<uint16_t>::ReadBigEndian(ptr);
ptr += 2;
@@ -352,15 +288,16 @@ bool RtpHeaderParser::Parse(RTPHeader& header,
ptrRTPDataExtensionEnd,
ptr);
}
- header.headerLength += XLen;
+ header->headerLength += XLen;
}
- if (header.headerLength + header.paddingLength > static_cast<size_t>(length))
+ if (header->headerLength + header->paddingLength >
+ static_cast<size_t>(length))
return false;
return true;
}
void RtpHeaderParser::ParseOneByteExtensionHeader(
- RTPHeader& header,
+ RTPHeader* header,
const RtpHeaderExtensionMap* ptrExtensionMap,
const uint8_t* ptrRTPDataExtensionEnd,
const uint8_t* ptr) const {
@@ -377,8 +314,8 @@ void RtpHeaderParser::ParseOneByteExtensionHeader(
// Note that 'len' is the header extension element length, which is the
// number of bytes - 1.
- const uint8_t id = (*ptr & 0xf0) >> 4;
- const uint8_t len = (*ptr & 0x0f);
+ const int id = (*ptr & 0xf0) >> 4;
+ const int len = (*ptr & 0x0f);
ptr++;
if (id == 15) {
@@ -390,8 +327,7 @@ void RtpHeaderParser::ParseOneByteExtensionHeader(
RTPExtensionType type;
if (ptrExtensionMap->GetType(id, &type) != 0) {
// If we encounter an unknown extension, just skip over it.
- LOG(LS_WARNING) << "Failed to find extension id: "
- << static_cast<int>(id);
+ LOG(LS_WARNING) << "Failed to find extension id: " << id;
} else {
switch (type) {
case kRtpExtensionTransmissionTimeOffset: {
@@ -406,9 +342,9 @@ void RtpHeaderParser::ParseOneByteExtensionHeader(
// | ID | len=2 | transmission offset |
// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
- header.extension.transmissionTimeOffset =
+ header->extension.transmissionTimeOffset =
ByteReader<int32_t, 3>::ReadBigEndian(ptr);
- header.extension.hasTransmissionTimeOffset = true;
+ header->extension.hasTransmissionTimeOffset = true;
break;
}
case kRtpExtensionAudioLevel: {
@@ -422,9 +358,9 @@ void RtpHeaderParser::ParseOneByteExtensionHeader(
// | ID | len=0 |V| level |
// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
//
- header.extension.audioLevel = ptr[0] & 0x7f;
- header.extension.voiceActivity = (ptr[0] & 0x80) != 0;
- header.extension.hasAudioLevel = true;
+ header->extension.audioLevel = ptr[0] & 0x7f;
+ header->extension.voiceActivity = (ptr[0] & 0x80) != 0;
+ header->extension.hasAudioLevel = true;
break;
}
case kRtpExtensionAbsoluteSendTime: {
@@ -438,9 +374,9 @@ void RtpHeaderParser::ParseOneByteExtensionHeader(
// | ID | len=2 | absolute send time |
// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
- header.extension.absoluteSendTime =
+ header->extension.absoluteSendTime =
ByteReader<uint32_t, 3>::ReadBigEndian(ptr);
- header.extension.hasAbsoluteSendTime = true;
+ header->extension.hasAbsoluteSendTime = true;
break;
}
case kRtpExtensionVideoRotation: {
@@ -454,14 +390,14 @@ void RtpHeaderParser::ParseOneByteExtensionHeader(
// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
// | ID | len=0 |0 0 0 0 C F R R|
// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
- header.extension.hasVideoRotation = true;
- header.extension.videoRotation = ptr[0];
+ header->extension.hasVideoRotation = true;
+ header->extension.videoRotation = ptr[0];
break;
}
case kRtpExtensionTransportSequenceNumber: {
if (len != 1) {
- LOG(LS_WARNING)
- << "Incorrect peer connection sequence number len: " << len;
+ LOG(LS_WARNING) << "Incorrect transport sequence number len: "
+ << len;
return;
}
// 0 1 2
@@ -472,8 +408,8 @@ void RtpHeaderParser::ParseOneByteExtensionHeader(
uint16_t sequence_number = ptr[0] << 8;
sequence_number += ptr[1];
- header.extension.transportSequenceNumber = sequence_number;
- header.extension.hasTransportSequenceNumber = true;
+ header->extension.transportSequenceNumber = sequence_number;
+ header->extension.hasTransportSequenceNumber = true;
break;
}
default: {
@@ -502,5 +438,4 @@ uint8_t RtpHeaderParser::ParsePaddingBytes(
return num_zero_bytes;
}
} // namespace RtpUtility
-
} // namespace webrtc
diff --git a/webrtc/modules/rtp_rtcp/source/rtp_utility.h b/webrtc/modules/rtp_rtcp/source/rtp_utility.h
index af20f97e82..23c175356a 100644
--- a/webrtc/modules/rtp_rtcp/source/rtp_utility.h
+++ b/webrtc/modules/rtp_rtcp/source/rtp_utility.h
@@ -11,10 +11,11 @@
#ifndef WEBRTC_MODULES_RTP_RTCP_SOURCE_RTP_UTILITY_H_
#define WEBRTC_MODULES_RTP_RTCP_SOURCE_RTP_UTILITY_H_
-#include <stddef.h> // size_t, ptrdiff_t
+#include <map>
-#include "webrtc/modules/rtp_rtcp/interface/rtp_rtcp_defines.h"
-#include "webrtc/modules/rtp_rtcp/interface/receive_statistics.h"
+#include "webrtc/base/deprecation.h"
+#include "webrtc/modules/rtp_rtcp/include/receive_statistics.h"
+#include "webrtc/modules/rtp_rtcp/include/rtp_rtcp_defines.h"
#include "webrtc/modules/rtp_rtcp/source/rtp_header_extension.h"
#include "webrtc/modules/rtp_rtcp/source/rtp_rtcp_config.h"
#include "webrtc/typedefs.h"
@@ -29,71 +30,48 @@ RtpAudioFeedback* NullObjectRtpAudioFeedback();
ReceiveStatistics* NullObjectReceiveStatistics();
namespace RtpUtility {
- // January 1970, in NTP seconds.
- const uint32_t NTP_JAN_1970 = 2208988800UL;
-
- // Magic NTP fractional unit.
- const double NTP_FRAC = 4.294967296E+9;
-
- struct Payload
- {
- char name[RTP_PAYLOAD_NAME_SIZE];
- bool audio;
- PayloadUnion typeSpecific;
- };
-
- typedef std::map<int8_t, Payload*> PayloadTypeMap;
-
- // Return the current RTP timestamp from the NTP timestamp
- // returned by the specified clock.
- uint32_t GetCurrentRTP(Clock* clock, uint32_t freq);
-
- // Return the current RTP absolute timestamp.
- uint32_t ConvertNTPTimeToRTP(uint32_t NTPsec,
- uint32_t NTPfrac,
- uint32_t freq);
-
- uint32_t pow2(uint8_t exp);
-
- // Returns true if |newTimestamp| is older than |existingTimestamp|.
- // |wrapped| will be set to true if there has been a wraparound between the
- // two timestamps.
- bool OldTimestamp(uint32_t newTimestamp,
- uint32_t existingTimestamp,
- bool* wrapped);
-
- bool StringCompare(const char* str1,
- const char* str2,
- const uint32_t length);
-
- // Round up to the nearest size that is a multiple of 4.
- size_t Word32Align(size_t size);
-
- class RtpHeaderParser {
- public:
- RtpHeaderParser(const uint8_t* rtpData, size_t rtpDataLength);
- ~RtpHeaderParser();
-
- bool RTCP() const;
- bool ParseRtcp(RTPHeader* header) const;
- bool Parse(RTPHeader& parsedPacket,
- RtpHeaderExtensionMap* ptrExtensionMap = NULL) const;
-
- private:
- void ParseOneByteExtensionHeader(
- RTPHeader& parsedPacket,
- const RtpHeaderExtensionMap* ptrExtensionMap,
- const uint8_t* ptrRTPDataExtensionEnd,
- const uint8_t* ptr) const;
-
- uint8_t ParsePaddingBytes(
- const uint8_t* ptrRTPDataExtensionEnd,
- const uint8_t* ptr) const;
-
- const uint8_t* const _ptrRTPDataBegin;
- const uint8_t* const _ptrRTPDataEnd;
- };
+
+struct Payload {
+ char name[RTP_PAYLOAD_NAME_SIZE];
+ bool audio;
+ PayloadUnion typeSpecific;
+};
+
+typedef std::map<int8_t, Payload*> PayloadTypeMap;
+
+bool StringCompare(const char* str1, const char* str2, const uint32_t length);
+
+// Round up to the nearest size that is a multiple of 4.
+size_t Word32Align(size_t size);
+
+class RtpHeaderParser {
+ public:
+ RtpHeaderParser(const uint8_t* rtpData, size_t rtpDataLength);
+ ~RtpHeaderParser();
+
+ bool RTCP() const;
+ bool ParseRtcp(RTPHeader* header) const;
+ bool Parse(RTPHeader* parsedPacket,
+ RtpHeaderExtensionMap* ptrExtensionMap = nullptr) const;
+ RTC_DEPRECATED bool Parse(
+ RTPHeader& parsedPacket, // NOLINT(runtime/references)
+ RtpHeaderExtensionMap* ptrExtensionMap = nullptr) const {
+ return Parse(&parsedPacket, ptrExtensionMap);
+ }
+
+ private:
+ void ParseOneByteExtensionHeader(RTPHeader* parsedPacket,
+ const RtpHeaderExtensionMap* ptrExtensionMap,
+ const uint8_t* ptrRTPDataExtensionEnd,
+ const uint8_t* ptr) const;
+
+ uint8_t ParsePaddingBytes(const uint8_t* ptrRTPDataExtensionEnd,
+ const uint8_t* ptr) const;
+
+ const uint8_t* const _ptrRTPDataBegin;
+ const uint8_t* const _ptrRTPDataEnd;
+};
} // namespace RtpUtility
} // namespace webrtc
-#endif // WEBRTC_MODULES_RTP_RTCP_SOURCE_RTP_UTILITY_H_
+#endif // WEBRTC_MODULES_RTP_RTCP_SOURCE_RTP_UTILITY_H_
diff --git a/webrtc/modules/rtp_rtcp/source/ssrc_database.cc b/webrtc/modules/rtp_rtcp/source/ssrc_database.cc
index 6fb7c4701a..fb02b7ef12 100644
--- a/webrtc/modules/rtp_rtcp/source/ssrc_database.cc
+++ b/webrtc/modules/rtp_rtcp/source/ssrc_database.cc
@@ -10,110 +10,51 @@
#include "webrtc/modules/rtp_rtcp/source/ssrc_database.h"
-#include <assert.h>
-#include <stdlib.h>
-
+#include "webrtc/base/checks.h"
+#include "webrtc/system_wrappers/include/clock.h"
#include "webrtc/system_wrappers/include/critical_section_wrapper.h"
-#ifdef _WIN32
- #include <windows.h>
- #include <MMSystem.h> //timeGetTime
-
-// TODO(hellner): investigate if it is necessary to disable these warnings.
- #pragma warning(disable:4311)
- #pragma warning(disable:4312)
-#else
- #include <stdio.h>
- #include <string.h>
- #include <time.h>
- #include <sys/time.h>
-#endif
-
namespace webrtc {
-SSRCDatabase*
-SSRCDatabase::StaticInstance(CountOperation count_operation)
-{
- SSRCDatabase* impl =
- GetStaticInstance<SSRCDatabase>(count_operation);
- return impl;
+namespace {
+uint64_t Seed() {
+ return Clock::GetRealTimeClock()->TimeInMicroseconds();
}
+} // namespace
-SSRCDatabase*
-SSRCDatabase::GetSSRCDatabase()
-{
- return StaticInstance(kAddRef);
+SSRCDatabase* SSRCDatabase::GetSSRCDatabase() {
+ return GetStaticInstance<SSRCDatabase>(kAddRef);
}
-void
-SSRCDatabase::ReturnSSRCDatabase()
-{
- StaticInstance(kRelease);
+void SSRCDatabase::ReturnSSRCDatabase() {
+ GetStaticInstance<SSRCDatabase>(kRelease);
}
-uint32_t
-SSRCDatabase::CreateSSRC()
-{
- CriticalSectionScoped lock(_critSect);
+uint32_t SSRCDatabase::CreateSSRC() {
+ CriticalSectionScoped lock(crit_.get());
- uint32_t ssrc = GenerateRandom();
-
- while(_ssrcMap.find(ssrc) != _ssrcMap.end())
- {
- ssrc = GenerateRandom();
+ while (true) { // Try until get a new ssrc.
+ // 0 and 0xffffffff are invalid values for SSRC.
+ uint32_t ssrc = random_.Rand(1u, 0xfffffffe);
+ if (ssrcs_.insert(ssrc).second) {
+ return ssrc;
}
- _ssrcMap[ssrc] = 0;
-
- return ssrc;
+ }
}
-int32_t
-SSRCDatabase::RegisterSSRC(const uint32_t ssrc)
-{
- CriticalSectionScoped lock(_critSect);
- _ssrcMap[ssrc] = 0;
- return 0;
+void SSRCDatabase::RegisterSSRC(uint32_t ssrc) {
+ CriticalSectionScoped lock(crit_.get());
+ ssrcs_.insert(ssrc);
}
-int32_t
-SSRCDatabase::ReturnSSRC(const uint32_t ssrc)
-{
- CriticalSectionScoped lock(_critSect);
- _ssrcMap.erase(ssrc);
- return 0;
+void SSRCDatabase::ReturnSSRC(uint32_t ssrc) {
+ CriticalSectionScoped lock(crit_.get());
+ ssrcs_.erase(ssrc);
}
SSRCDatabase::SSRCDatabase()
-{
- // we need to seed the random generator, otherwise we get 26500 each time, hardly a random value :)
-#ifdef _WIN32
- srand(timeGetTime());
-#else
- struct timeval tv;
- struct timezone tz;
- gettimeofday(&tv, &tz);
- srand(tv.tv_usec);
-#endif
+ : crit_(CriticalSectionWrapper::CreateCriticalSection()), random_(Seed()) {}
- _critSect = CriticalSectionWrapper::CreateCriticalSection();
+SSRCDatabase::~SSRCDatabase() {
}
-SSRCDatabase::~SSRCDatabase()
-{
- _ssrcMap.clear();
- delete _critSect;
-}
-
-uint32_t SSRCDatabase::GenerateRandom()
-{
- uint32_t ssrc = 0;
- do
- {
- ssrc = rand();
- ssrc = ssrc <<16;
- ssrc += rand();
-
- } while (ssrc == 0 || ssrc == 0xffffffff);
-
- return ssrc;
-}
} // namespace webrtc
diff --git a/webrtc/modules/rtp_rtcp/source/ssrc_database.h b/webrtc/modules/rtp_rtcp/source/ssrc_database.h
index 7129d0de76..7a3133638d 100644
--- a/webrtc/modules/rtp_rtcp/source/ssrc_database.h
+++ b/webrtc/modules/rtp_rtcp/source/ssrc_database.h
@@ -11,43 +11,41 @@
#ifndef WEBRTC_MODULES_RTP_RTCP_SOURCE_SSRC_DATABASE_H_
#define WEBRTC_MODULES_RTP_RTCP_SOURCE_SSRC_DATABASE_H_
-#include <map>
+#include <set>
+#include "webrtc/base/random.h"
+#include "webrtc/base/scoped_ptr.h"
#include "webrtc/system_wrappers/include/static_instance.h"
#include "webrtc/typedefs.h"
namespace webrtc {
class CriticalSectionWrapper;
-class SSRCDatabase
-{
-public:
- static SSRCDatabase* GetSSRCDatabase();
- static void ReturnSSRCDatabase();
+class SSRCDatabase {
+ public:
+ static SSRCDatabase* GetSSRCDatabase();
+ static void ReturnSSRCDatabase();
- uint32_t CreateSSRC();
- int32_t RegisterSSRC(const uint32_t ssrc);
- int32_t ReturnSSRC(const uint32_t ssrc);
+ uint32_t CreateSSRC();
+ void RegisterSSRC(uint32_t ssrc);
+ void ReturnSSRC(uint32_t ssrc);
-protected:
- SSRCDatabase();
- virtual ~SSRCDatabase();
+ protected:
+ SSRCDatabase();
+ virtual ~SSRCDatabase();
- static SSRCDatabase* CreateInstance() { return new SSRCDatabase(); }
+ static SSRCDatabase* CreateInstance() { return new SSRCDatabase(); }
-private:
- // Friend function to allow the SSRC destructor to be accessed from the
- // template class.
- friend SSRCDatabase* GetStaticInstance<SSRCDatabase>(
- CountOperation count_operation);
- static SSRCDatabase* StaticInstance(CountOperation count_operation);
+ private:
+ // Friend function to allow the SSRC destructor to be accessed from the
+ // template class.
+ friend SSRCDatabase* GetStaticInstance<SSRCDatabase>(
+ CountOperation count_operation);
- uint32_t GenerateRandom();
-
- std::map<uint32_t, uint32_t> _ssrcMap;
-
- CriticalSectionWrapper* _critSect;
+ rtc::scoped_ptr<CriticalSectionWrapper> crit_;
+ Random random_ GUARDED_BY(crit_);
+ std::set<uint32_t> ssrcs_ GUARDED_BY(crit_);
};
} // namespace webrtc
-#endif // WEBRTC_MODULES_RTP_RTCP_SOURCE_SSRC_DATABASE_H_
+#endif // WEBRTC_MODULES_RTP_RTCP_SOURCE_SSRC_DATABASE_H_
diff --git a/webrtc/modules/rtp_rtcp/source/time_util.h b/webrtc/modules/rtp_rtcp/source/time_util.h
new file mode 100644
index 0000000000..5b544ddf9a
--- /dev/null
+++ b/webrtc/modules/rtp_rtcp/source/time_util.h
@@ -0,0 +1,48 @@
+/*
+ * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_RTP_RTCP_SOURCE_TIME_UTIL_H_
+#define WEBRTC_MODULES_RTP_RTCP_SOURCE_TIME_UTIL_H_
+
+#include "webrtc/base/basictypes.h"
+#include "webrtc/system_wrappers/include/ntp_time.h"
+
+namespace webrtc {
+
+// Converts NTP timestamp to RTP timestamp.
+inline uint32_t NtpToRtp(NtpTime ntp, uint32_t freq) {
+ uint32_t tmp = (static_cast<uint64_t>(ntp.fractions()) * freq) >> 32;
+ return ntp.seconds() * freq + tmp;
+}
+// Return the current RTP timestamp from the NTP timestamp
+// returned by the specified clock.
+inline uint32_t CurrentRtp(const Clock& clock, uint32_t freq) {
+ return NtpToRtp(NtpTime(clock), freq);
+}
+
+// Helper function for compact ntp representation:
+// RFC 3550, Section 4. Time Format.
+// Wallclock time is represented using the timestamp format of
+// the Network Time Protocol (NTP).
+// ...
+// In some fields where a more compact representation is
+// appropriate, only the middle 32 bits are used; that is, the low 16
+// bits of the integer part and the high 16 bits of the fractional part.
+inline uint32_t CompactNtp(NtpTime ntp) {
+ return (ntp.seconds() << 16) | (ntp.fractions() >> 16);
+}
+// Converts interval between compact ntp timestamps to milliseconds.
+// This interval can be upto ~18.2 hours (2^16 seconds).
+inline uint32_t CompactNtpIntervalToMs(uint32_t compact_ntp_interval) {
+ return static_cast<uint64_t>(compact_ntp_interval) * 1000 / (1 << 16);
+}
+
+} // namespace webrtc
+#endif // WEBRTC_MODULES_RTP_RTCP_SOURCE_TIME_UTIL_H_
diff --git a/webrtc/modules/rtp_rtcp/source/time_util_unittest.cc b/webrtc/modules/rtp_rtcp/source/time_util_unittest.cc
new file mode 100644
index 0000000000..7efb83ccad
--- /dev/null
+++ b/webrtc/modules/rtp_rtcp/source/time_util_unittest.cc
@@ -0,0 +1,62 @@
+/*
+ * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#include "webrtc/modules/rtp_rtcp/source/time_util.h"
+
+#include "testing/gtest/include/gtest/gtest.h"
+
+namespace webrtc {
+
+TEST(TimeUtilTest, CompactNtp) {
+ const uint32_t kNtpSec = 0x12345678;
+ const uint32_t kNtpFrac = 0x23456789;
+ const NtpTime kNtp(kNtpSec, kNtpFrac);
+ const uint32_t kNtpMid = 0x56782345;
+ EXPECT_EQ(kNtpMid, CompactNtp(kNtp));
+}
+
+TEST(TimeUtilTest, CompactNtpToMs) {
+ const NtpTime ntp1(0x12345, 0x23456);
+ const NtpTime ntp2(0x12654, 0x64335);
+ uint32_t ms_diff = ntp2.ToMs() - ntp1.ToMs();
+ uint32_t ntp_diff = CompactNtp(ntp2) - CompactNtp(ntp1);
+
+ uint32_t ntp_to_ms_diff = CompactNtpIntervalToMs(ntp_diff);
+
+ EXPECT_NEAR(ms_diff, ntp_to_ms_diff, 1);
+}
+
+TEST(TimeUtilTest, CompactNtpToMsWithWrap) {
+ const NtpTime ntp1(0x1ffff, 0x23456);
+ const NtpTime ntp2(0x20000, 0x64335);
+ uint32_t ms_diff = ntp2.ToMs() - ntp1.ToMs();
+
+ // While ntp2 > ntp1, there compact ntp presentation happen to be opposite.
+ // That shouldn't be a problem as long as unsigned arithmetic is used.
+ ASSERT_GT(ntp2.ToMs(), ntp1.ToMs());
+ ASSERT_LT(CompactNtp(ntp2), CompactNtp(ntp1));
+
+ uint32_t ntp_diff = CompactNtp(ntp2) - CompactNtp(ntp1);
+ uint32_t ntp_to_ms_diff = CompactNtpIntervalToMs(ntp_diff);
+
+ EXPECT_NEAR(ms_diff, ntp_to_ms_diff, 1);
+}
+
+TEST(TimeUtilTest, CompactNtpToMsLarge) {
+ const NtpTime ntp1(0x10000, 0x23456);
+ const NtpTime ntp2(0x1ffff, 0x64335);
+ uint32_t ms_diff = ntp2.ToMs() - ntp1.ToMs();
+ // Ntp difference close to maximum of ~18 hours should convert correctly too.
+ ASSERT_GT(ms_diff, 18u * 3600 * 1000);
+ uint32_t ntp_diff = CompactNtp(ntp2) - CompactNtp(ntp1);
+ uint32_t ntp_to_ms_diff = CompactNtpIntervalToMs(ntp_diff);
+
+ EXPECT_NEAR(ms_diff, ntp_to_ms_diff, 1);
+}
+} // namespace webrtc
diff --git a/webrtc/modules/rtp_rtcp/source/tmmbr_help.cc b/webrtc/modules/rtp_rtcp/source/tmmbr_help.cc
index fb1ed625ed..f994ff7049 100644
--- a/webrtc/modules/rtp_rtcp/source/tmmbr_help.cc
+++ b/webrtc/modules/rtp_rtcp/source/tmmbr_help.cc
@@ -11,8 +11,10 @@
#include "webrtc/modules/rtp_rtcp/source/tmmbr_help.h"
#include <assert.h>
-#include <limits>
#include <string.h>
+
+#include <limits>
+
#include "webrtc/modules/rtp_rtcp/source/rtp_rtcp_config.h"
namespace webrtc {
diff --git a/webrtc/modules/rtp_rtcp/source/video_codec_information.h b/webrtc/modules/rtp_rtcp/source/video_codec_information.h
index 456b3bb934..7b819d060f 100644
--- a/webrtc/modules/rtp_rtcp/source/video_codec_information.h
+++ b/webrtc/modules/rtp_rtcp/source/video_codec_information.h
@@ -15,14 +15,13 @@
#include "webrtc/modules/rtp_rtcp/source/rtp_utility.h"
namespace webrtc {
-class VideoCodecInformation
-{
-public:
- virtual void Reset() = 0;
+class VideoCodecInformation {
+ public:
+ virtual void Reset() = 0;
- virtual RtpVideoCodecTypes Type() = 0;
- virtual ~VideoCodecInformation(){};
+ virtual RtpVideoCodecTypes Type() = 0;
+ virtual ~VideoCodecInformation() {}
};
} // namespace webrtc
-#endif // WEBRTC_MODULES_RTP_RTCP_SOURCE_VIDEO_CODEC_INFORMATION_H_
+#endif // WEBRTC_MODULES_RTP_RTCP_SOURCE_VIDEO_CODEC_INFORMATION_H_
diff --git a/webrtc/modules/rtp_rtcp/source/vp8_partition_aggregator.cc b/webrtc/modules/rtp_rtcp/source/vp8_partition_aggregator.cc
index feed784839..9721a7e9ac 100644
--- a/webrtc/modules/rtp_rtcp/source/vp8_partition_aggregator.cc
+++ b/webrtc/modules/rtp_rtcp/source/vp8_partition_aggregator.cc
@@ -37,9 +37,8 @@ PartitionTreeNode::PartitionTreeNode(PartitionTreeNode* parent,
PartitionTreeNode* PartitionTreeNode::CreateRootNode(const size_t* size_vector,
size_t num_partitions) {
- PartitionTreeNode* root_node =
- new PartitionTreeNode(NULL, &size_vector[1], num_partitions - 1,
- size_vector[0]);
+ PartitionTreeNode* root_node = new PartitionTreeNode(
+ NULL, &size_vector[1], num_partitions - 1, size_vector[0]);
root_node->set_packet_start(true);
return root_node;
}
@@ -54,7 +53,7 @@ int PartitionTreeNode::Cost(size_t penalty) {
if (num_partitions_ == 0) {
// This is a solution node.
cost = std::max(max_parent_size_, this_size_int()) -
- std::min(min_parent_size_, this_size_int());
+ std::min(min_parent_size_, this_size_int());
} else {
cost = std::max(max_parent_size_, this_size_int()) - min_parent_size_;
}
@@ -68,9 +67,7 @@ bool PartitionTreeNode::CreateChildren(size_t max_size) {
if (this_size_ + size_vector_[0] <= max_size) {
assert(!children_[kLeftChild]);
children_[kLeftChild] =
- new PartitionTreeNode(this,
- &size_vector_[1],
- num_partitions_ - 1,
+ new PartitionTreeNode(this, &size_vector_[1], num_partitions_ - 1,
this_size_ + size_vector_[0]);
children_[kLeftChild]->set_max_parent_size(max_parent_size_);
children_[kLeftChild]->set_min_parent_size(min_parent_size_);
@@ -80,10 +77,8 @@ bool PartitionTreeNode::CreateChildren(size_t max_size) {
}
if (this_size_ > 0) {
assert(!children_[kRightChild]);
- children_[kRightChild] = new PartitionTreeNode(this,
- &size_vector_[1],
- num_partitions_ - 1,
- size_vector_[0]);
+ children_[kRightChild] = new PartitionTreeNode(
+ this, &size_vector_[1], num_partitions_ - 1, size_vector_[0]);
children_[kRightChild]->set_max_parent_size(
std::max(max_parent_size_, this_size_int()));
children_[kRightChild]->set_min_parent_size(
@@ -148,7 +143,8 @@ PartitionTreeNode* PartitionTreeNode::GetOptimalNode(size_t max_size,
Vp8PartitionAggregator::Vp8PartitionAggregator(
const RTPFragmentationHeader& fragmentation,
- size_t first_partition_idx, size_t last_partition_idx)
+ size_t first_partition_idx,
+ size_t last_partition_idx)
: root_(NULL),
num_partitions_(last_partition_idx - first_partition_idx + 1),
size_vector_(new size_t[num_partitions_]),
@@ -158,14 +154,14 @@ Vp8PartitionAggregator::Vp8PartitionAggregator(
for (size_t i = 0; i < num_partitions_; ++i) {
size_vector_[i] =
fragmentation.fragmentationLength[i + first_partition_idx];
- largest_partition_size_ = std::max(largest_partition_size_,
- size_vector_[i]);
+ largest_partition_size_ =
+ std::max(largest_partition_size_, size_vector_[i]);
}
root_ = PartitionTreeNode::CreateRootNode(size_vector_, num_partitions_);
}
Vp8PartitionAggregator::~Vp8PartitionAggregator() {
- delete [] size_vector_;
+ delete[] size_vector_;
delete root_;
}
@@ -190,14 +186,16 @@ Vp8PartitionAggregator::FindOptimalConfiguration(size_t max_size,
assert(packet_index > 0);
assert(temp_node != NULL);
config_vector[i - 1] = packet_index - 1;
- if (temp_node->packet_start()) --packet_index;
+ if (temp_node->packet_start())
+ --packet_index;
temp_node = temp_node->parent();
}
return config_vector;
}
void Vp8PartitionAggregator::CalcMinMax(const ConfigVec& config,
- int* min_size, int* max_size) const {
+ int* min_size,
+ int* max_size) const {
if (*min_size < 0) {
*min_size = std::numeric_limits<int>::max();
}
@@ -263,8 +261,8 @@ size_t Vp8PartitionAggregator::CalcNumberOfFragments(
}
assert(num_fragments > 0);
// TODO(mflodman) Assert disabled since it's falsely triggered, see issue 293.
- //assert(large_partition_size / num_fragments + 1 <= max_payload_size);
+ // assert(large_partition_size / num_fragments + 1 <= max_payload_size);
return num_fragments;
}
-} // namespace
+} // namespace webrtc
diff --git a/webrtc/modules/rtp_rtcp/source/vp8_partition_aggregator.h b/webrtc/modules/rtp_rtcp/source/vp8_partition_aggregator.h
index 53b678f3b9..ccd22e5be2 100644
--- a/webrtc/modules/rtp_rtcp/source/vp8_partition_aggregator.h
+++ b/webrtc/modules/rtp_rtcp/source/vp8_partition_aggregator.h
@@ -14,7 +14,7 @@
#include <vector>
#include "webrtc/base/constructormagic.h"
-#include "webrtc/modules/interface/module_common_types.h"
+#include "webrtc/modules/include/module_common_types.h"
#include "webrtc/typedefs.h"
namespace webrtc {
@@ -132,6 +132,6 @@ class Vp8PartitionAggregator {
RTC_DISALLOW_COPY_AND_ASSIGN(Vp8PartitionAggregator);
};
-} // namespace
+} // namespace webrtc
#endif // WEBRTC_MODULES_RTP_RTCP_SOURCE_VP8_PARTITION_AGGREGATOR_H_
diff --git a/webrtc/modules/rtp_rtcp/source/vp8_partition_aggregator_unittest.cc b/webrtc/modules/rtp_rtcp/source/vp8_partition_aggregator_unittest.cc
index 4650c94047..726d83ec50 100644
--- a/webrtc/modules/rtp_rtcp/source/vp8_partition_aggregator_unittest.cc
+++ b/webrtc/modules/rtp_rtcp/source/vp8_partition_aggregator_unittest.cc
@@ -209,4 +209,4 @@ TEST(Vp8PartitionAggregator, TestCalcNumberOfFragments) {
1600, kMTU, 1, 900, 1000));
}
-} // namespace
+} // namespace webrtc
diff --git a/webrtc/modules/rtp_rtcp/test/BWEStandAlone/BWEStandAlone.cc b/webrtc/modules/rtp_rtcp/test/BWEStandAlone/BWEStandAlone.cc
deleted file mode 100644
index 711be4a623..0000000000
--- a/webrtc/modules/rtp_rtcp/test/BWEStandAlone/BWEStandAlone.cc
+++ /dev/null
@@ -1,199 +0,0 @@
-/*
- * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-// BWEStandAlone.cpp : Defines the entry point for the console application.
-//
-
-#include <stdio.h>
-#include <string>
-
-#include "webrtc/modules/rtp_rtcp/interface/rtp_rtcp.h"
-#include "webrtc/system_wrappers/include/event_wrapper.h"
-#include "webrtc/system_wrappers/include/trace.h"
-#include "webrtc/test/channel_transport/udp_transport.h"
-
-#include "webrtc/modules/rtp_rtcp/test/BWEStandAlone/TestLoadGenerator.h"
-#include "webrtc/modules/rtp_rtcp/test/BWEStandAlone/TestSenderReceiver.h"
-
-#include "webrtc/modules/rtp_rtcp/test/BWEStandAlone/MatlabPlot.h"
-
-//#include "vld.h"
-
-class myTransportCB: public UdpTransportData
-{
-public:
- myTransportCB (RtpRtcp *rtpMod) : _rtpMod(rtpMod) {};
-protected:
- // Inherited from UdpTransportData
- void IncomingRTPPacket(const int8_t* incomingRtpPacket,
- const size_t rtpPacketLength,
- const int8_t* fromIP,
- const uint16_t fromPort) override;
-
- void IncomingRTCPPacket(const int8_t* incomingRtcpPacket,
- const size_t rtcpPacketLength,
- const int8_t* fromIP,
- const uint16_t fromPort) override;
-
-private:
- RtpRtcp *_rtpMod;
-};
-
-void myTransportCB::IncomingRTPPacket(const int8_t* incomingRtpPacket,
- const size_t rtpPacketLength,
- const int8_t* fromIP,
- const uint16_t fromPort)
-{
- printf("Receiving RTP from IP %s, port %u\n", fromIP, fromPort);
- _rtpMod->IncomingPacket((uint8_t *) incomingRtpPacket, rtpPacketLength);
-}
-
-void myTransportCB::IncomingRTCPPacket(const int8_t* incomingRtcpPacket,
- const size_t rtcpPacketLength,
- const int8_t* fromIP,
- const uint16_t fromPort)
-{
- printf("Receiving RTCP from IP %s, port %u\n", fromIP, fromPort);
- _rtpMod->IncomingPacket((uint8_t *) incomingRtcpPacket, rtcpPacketLength);
-}
-
-
-int main(int argc, char* argv[])
-{
- bool isSender = false;
- bool isReceiver = false;
- uint16_t port;
- std::string ip;
- TestSenderReceiver *sendrec = new TestSenderReceiver();
- TestLoadGenerator *gen;
-
- if (argc == 2)
- {
- // receiver only
- isReceiver = true;
-
- // read port
- port = atoi(argv[1]);
- }
- else if (argc == 3)
- {
- // sender and receiver
- isSender = true;
- isReceiver = true;
-
- // read IP
- ip = argv[1];
-
- // read port
- port = atoi(argv[2]);
- }
-
- Trace::CreateTrace();
- Trace::SetTraceFile("BWEStandAloneTrace.txt");
- Trace::set_level_filter(webrtc::kTraceAll);
-
- sendrec->InitReceiver(port);
-
- sendrec->Start();
-
- if (isSender)
- {
- const uint32_t startRateKbps = 1000;
- //gen = new CBRGenerator(sendrec, 1000, 500);
- gen = new CBRFixFRGenerator(sendrec, startRateKbps, 90000, 30, 0.2);
- //gen = new PeriodicKeyFixFRGenerator(sendrec, startRateKbps, 90000, 30, 0.2, 7, 300);
- //const uint16_t numFrameRates = 5;
- //const uint8_t frameRates[numFrameRates] = {30, 15, 20, 23, 25};
- //gen = new CBRVarFRGenerator(sendrec, 1000, frameRates, numFrameRates, 90000, 4.0, 0.1, 0.2);
- //gen = new CBRFrameDropGenerator(sendrec, startRateKbps, 90000, 0.2);
- sendrec->SetLoadGenerator(gen);
- sendrec->InitSender(startRateKbps, ip.c_str(), port);
- gen->Start();
- }
-
- while (1)
- {
- }
-
- if (isSender)
- {
- gen->Stop();
- delete gen;
- }
-
- delete sendrec;
-
- //uint8_t numberOfSocketThreads = 1;
- //UdpTransport* transport = UdpTransport::Create(0, numberOfSocketThreads);
-
- //RtpRtcp* rtp = RtpRtcp::CreateRtpRtcp(1, false);
- //if (rtp->InitSender() != 0)
- //{
- // exit(1);
- //}
- //if (rtp->RegisterSendTransport(transport) != 0)
- //{
- // exit(1);
- //}
-
-// transport->InitializeSendSockets("192.168.200.39", 8000);
- //transport->InitializeSendSockets("127.0.0.1", 10000);
- //transport->InitializeSourcePorts(8000);
-
-
- return(0);
- // myTransportCB *tp = new myTransportCB(rtp);
- // transport->InitializeReceiveSockets(tp, 10000, "0.0.0.0");
- // transport->StartReceiving(500);
-
- // int8_t data[100];
- // for (int i = 0; i < 100; data[i] = i++);
-
- // for (int i = 0; i < 100; i++)
- // {
- // transport->SendRaw(data, 100, false);
- // }
-
-
-
- // int32_t totTime = 0;
- // while (totTime < 10000)
- // {
- // transport->Process();
- // int32_t wTime = transport->TimeUntilNextProcess();
- // totTime += wTime;
- // Sleep(wTime);
- // }
-
-
- //if (transport)
- //{
- // // Destroy the Socket Transport module
- // transport->StopReceiving();
- // transport->InitializeReceiveSockets(NULL,0);// deregister callback
- // UdpTransport::Destroy(transport);
- // transport = NULL;
- // }
-
- // if (tp)
- // {
- // delete tp;
- // tp = NULL;
- // }
-
- // if (rtp)
- // {
- // RtpRtcp::DestroyRtpRtcp(rtp);
- // rtp = NULL;
- // }
-
-
- //return 0;
-}
diff --git a/webrtc/modules/rtp_rtcp/test/BWEStandAlone/MatlabPlot.cc b/webrtc/modules/rtp_rtcp/test/BWEStandAlone/MatlabPlot.cc
deleted file mode 100644
index fe54d67ee7..0000000000
--- a/webrtc/modules/rtp_rtcp/test/BWEStandAlone/MatlabPlot.cc
+++ /dev/null
@@ -1,1055 +0,0 @@
-/*
- * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include "webrtc/modules/rtp_rtcp/test/BWEStandAlone/MatlabPlot.h"
-
-#include <math.h>
-#include <stdio.h>
-
-#include <algorithm>
-#include <sstream>
-
-#ifdef MATLAB
-#include "engine.h"
-#endif
-
-#include "webrtc/system_wrappers/include/critical_section_wrapper.h"
-#include "webrtc/system_wrappers/include/event_wrapper.h"
-#include "webrtc/system_wrappers/include/tick_util.h"
-
-using namespace webrtc;
-
-#ifdef MATLAB
-MatlabEngine eng;
-
-MatlabLine::MatlabLine(int maxLen /*= -1*/, const char *plotAttrib /*= NULL*/, const char *name /*= NULL*/)
-:
-_xArray(NULL),
-_yArray(NULL),
-_maxLen(maxLen),
-_plotAttribute(),
-_name()
-{
- if (_maxLen > 0)
- {
- _xArray = mxCreateDoubleMatrix(1, _maxLen, mxREAL);
- _yArray = mxCreateDoubleMatrix(1, _maxLen, mxREAL);
- }
-
- if (plotAttrib)
- {
- _plotAttribute = plotAttrib;
- }
-
- if (name)
- {
- _name = name;
- }
-}
-
-MatlabLine::~MatlabLine()
-{
- if (_xArray != NULL)
- {
- mxDestroyArray(_xArray);
- }
- if (_yArray != NULL)
- {
- mxDestroyArray(_yArray);
- }
-}
-
-void MatlabLine::Append(double x, double y)
-{
- if (_maxLen > 0 && _xData.size() > static_cast<uint32_t>(_maxLen))
- {
- _xData.resize(_maxLen);
- _yData.resize(_maxLen);
- }
-
- _xData.push_front(x);
- _yData.push_front(y);
-}
-
-
-// append y-data with running integer index as x-data
-void MatlabLine::Append(double y)
-{
- if (_xData.empty())
- {
- // first element is index 0
- Append(0, y);
- }
- else
- {
- // take last x-value and increment
- double temp = _xData.back(); // last x-value
- Append(temp + 1, y);
- }
-}
-
-
-void MatlabLine::SetMaxLen(int maxLen)
-{
- if (maxLen <= 0)
- {
- // means no maxLen
- _maxLen = -1;
- }
- else
- {
- _maxLen = maxLen;
-
- if (_xArray != NULL)
- {
- mxDestroyArray(_xArray);
- mxDestroyArray(_yArray);
- }
- _xArray = mxCreateDoubleMatrix(1, _maxLen, mxREAL);
- _yArray = mxCreateDoubleMatrix(1, _maxLen, mxREAL);
-
- maxLen = ((unsigned int)maxLen <= _xData.size()) ? maxLen : (int)_xData.size();
- _xData.resize(maxLen);
- _yData.resize(maxLen);
-
- //// reserve the right amount of memory
- //_xData.reserve(_maxLen);
- //_yData.reserve(_maxLen);
- }
-}
-
-void MatlabLine::SetAttribute(char *plotAttrib)
-{
- _plotAttribute = plotAttrib;
-}
-
-void MatlabLine::SetName(char *name)
-{
- _name = name;
-}
-
-void MatlabLine::GetPlotData(mxArray** xData, mxArray** yData)
-{
- // Make sure we have enough Matlab allocated memory.
- // Assuming both arrays (x and y) are of the same size.
- if (_xData.empty())
- {
- return; // No data
- }
- unsigned int size = 0;
- if (_xArray != NULL)
- {
- size = (unsigned int)mxGetNumberOfElements(_xArray);
- }
- if (size < _xData.size())
- {
- if (_xArray != NULL)
- {
- mxDestroyArray(_xArray);
- mxDestroyArray(_yArray);
- }
- _xArray = mxCreateDoubleMatrix(1, _xData.size(), mxREAL);
- _yArray = mxCreateDoubleMatrix(1, _yData.size(), mxREAL);
- }
-
- if (!_xData.empty())
- {
- double* x = mxGetPr(_xArray);
-
- std::list<double>::iterator it = _xData.begin();
-
- for (int i = 0; it != _xData.end(); it++, i++)
- {
- x[i] = *it;
- }
- }
-
- if (!_yData.empty())
- {
- double* y = mxGetPr(_yArray);
-
- std::list<double>::iterator it = _yData.begin();
-
- for (int i = 0; it != _yData.end(); it++, i++)
- {
- y[i] = *it;
- }
- }
- *xData = _xArray;
- *yData = _yArray;
-}
-
-std::string MatlabLine::GetXName()
-{
- std::ostringstream xString;
- xString << "x_" << _name;
- return xString.str();
-}
-
-std::string MatlabLine::GetYName()
-{
- std::ostringstream yString;
- yString << "y_" << _name;
- return yString.str();
-}
-
-std::string MatlabLine::GetPlotString()
-{
-
- std::ostringstream s;
-
- if (_xData.size() == 0)
- {
- s << "[0 1], [0 1]"; // To get an empty plot
- }
- else
- {
- s << GetXName() << "(1:" << _xData.size() << "),";
- s << GetYName() << "(1:" << _yData.size() << ")";
- }
-
- s << ", '";
- s << _plotAttribute;
- s << "'";
-
- return s.str();
-}
-
-std::string MatlabLine::GetRefreshString()
-{
- std::ostringstream s;
-
- if (_xData.size() > 0)
- {
- s << "set(h,'xdata',"<< GetXName() <<"(1:" << _xData.size() << "),'ydata',"<< GetYName() << "(1:" << _yData.size() << "));";
- }
- else
- {
- s << "set(h,'xdata',[NaN],'ydata',[NaN]);";
- }
- return s.str();
-}
-
-std::string MatlabLine::GetLegendString()
-{
- return ("'" + _name + "'");
-}
-
-bool MatlabLine::hasLegend()
-{
- return (!_name.empty());
-}
-
-
-// remove data points, but keep attributes
-void MatlabLine::Reset()
-{
- _xData.clear();
- _yData.clear();
-}
-
-
-void MatlabLine::UpdateTrendLine(MatlabLine * sourceData, double slope, double offset)
-{
- Reset(); // reset data, not attributes and name
-
- double thexMin = sourceData->xMin();
- double thexMax = sourceData->xMax();
- Append(thexMin, thexMin * slope + offset);
- Append(thexMax, thexMax * slope + offset);
-}
-
-double MatlabLine::xMin()
-{
- if (!_xData.empty())
- {
- std::list<double>::iterator theStart = _xData.begin();
- std::list<double>::iterator theEnd = _xData.end();
- return(*min_element(theStart, theEnd));
- }
- return (0.0);
-}
-
-double MatlabLine::xMax()
-{
- if (!_xData.empty())
- {
- std::list<double>::iterator theStart = _xData.begin();
- std::list<double>::iterator theEnd = _xData.end();
- return(*max_element(theStart, theEnd));
- }
- return (0.0);
-}
-
-double MatlabLine::yMin()
-{
- if (!_yData.empty())
- {
- std::list<double>::iterator theStart = _yData.begin();
- std::list<double>::iterator theEnd = _yData.end();
- return(*min_element(theStart, theEnd));
- }
- return (0.0);
-}
-
-double MatlabLine::yMax()
-{
- if (!_yData.empty())
- {
- std::list<double>::iterator theStart = _yData.begin();
- std::list<double>::iterator theEnd = _yData.end();
- return(*max_element(theStart, theEnd));
- }
- return (0.0);
-}
-
-
-
-MatlabTimeLine::MatlabTimeLine(int horizonSeconds /*= -1*/, const char *plotAttrib /*= NULL*/,
- const char *name /*= NULL*/,
- int64_t refTimeMs /* = -1*/)
- :
-_timeHorizon(horizonSeconds),
-MatlabLine(-1, plotAttrib, name) // infinite number of elements
-{
- if (refTimeMs < 0)
- _refTimeMs = TickTime::MillisecondTimestamp();
- else
- _refTimeMs = refTimeMs;
-}
-
-void MatlabTimeLine::Append(double y)
-{
- MatlabLine::Append(static_cast<double>(TickTime::MillisecondTimestamp() - _refTimeMs) / 1000.0, y);
-
- PurgeOldData();
-}
-
-
-void MatlabTimeLine::PurgeOldData()
-{
- if (_timeHorizon > 0)
- {
- // remove old data
- double historyLimit = static_cast<double>(TickTime::MillisecondTimestamp() - _refTimeMs) / 1000.0
- - _timeHorizon; // remove data points older than this
-
- std::list<double>::reverse_iterator ritx = _xData.rbegin();
- uint32_t removeCount = 0;
- while (ritx != _xData.rend())
- {
- if (*ritx >= historyLimit)
- {
- break;
- }
- ritx++;
- removeCount++;
- }
- if (removeCount == 0)
- {
- return;
- }
-
- // remove the range [begin, it).
- //if (removeCount > 10)
- //{
- // printf("Removing %lu elements\n", removeCount);
- //}
- _xData.resize(_xData.size() - removeCount);
- _yData.resize(_yData.size() - removeCount);
- }
-}
-
-
-int64_t MatlabTimeLine::GetRefTime()
-{
- return(_refTimeMs);
-}
-
-
-
-
-MatlabPlot::MatlabPlot()
-:
-_figHandle(-1),
-_smartAxis(false),
-_critSect(CriticalSectionWrapper::CreateCriticalSection()),
-_timeToPlot(false),
-_plotting(false),
-_enabled(true),
-_firstPlot(true),
-_legendEnabled(true),
-_donePlottingEvent(EventWrapper::Create())
-{
- CriticalSectionScoped cs(_critSect);
-
- _xlim[0] = 0;
- _xlim[1] = 0;
- _ylim[0] = 0;
- _ylim[1] = 0;
-
-#ifdef PLOT_TESTING
- _plotStartTime = -1;
- _plotDelay = 0;
-#endif
-
-}
-
-
-MatlabPlot::~MatlabPlot()
-{
- _critSect->Enter();
-
- // delete all line objects
- while (!_line.empty())
- {
- delete *(_line.end() - 1);
- _line.pop_back();
- }
-
- delete _critSect;
- delete _donePlottingEvent;
-}
-
-
-int MatlabPlot::AddLine(int maxLen /*= -1*/, const char *plotAttrib /*= NULL*/, const char *name /*= NULL*/)
-{
- CriticalSectionScoped cs(_critSect);
- if (!_enabled)
- {
- return -1;
- }
-
- MatlabLine *newLine = new MatlabLine(maxLen, plotAttrib, name);
- _line.push_back(newLine);
-
- return (static_cast<int>(_line.size() - 1)); // index of newly inserted line
-}
-
-
-int MatlabPlot::AddTimeLine(int maxLen /*= -1*/, const char *plotAttrib /*= NULL*/, const char *name /*= NULL*/,
- int64_t refTimeMs /*= -1*/)
-{
- CriticalSectionScoped cs(_critSect);
-
- if (!_enabled)
- {
- return -1;
- }
-
- MatlabTimeLine *newLine = new MatlabTimeLine(maxLen, plotAttrib, name, refTimeMs);
- _line.push_back(newLine);
-
- return (static_cast<int>(_line.size() - 1)); // index of newly inserted line
-}
-
-
-int MatlabPlot::GetLineIx(const char *name)
-{
- CriticalSectionScoped cs(_critSect);
-
- if (!_enabled)
- {
- return -1;
- }
-
- // search the list for a matching line name
- std::vector<MatlabLine*>::iterator it = _line.begin();
- bool matchFound = false;
- int lineIx = 0;
-
- for (; it != _line.end(); it++, lineIx++)
- {
- if ((*it)->_name == name)
- {
- matchFound = true;
- break;
- }
- }
-
- if (matchFound)
- {
- return (lineIx);
- }
- else
- {
- return (-1);
- }
-}
-
-
-void MatlabPlot::Append(int lineIndex, double x, double y)
-{
- CriticalSectionScoped cs(_critSect);
-
- if (!_enabled)
- {
- return;
- }
-
- // sanity for index
- if (lineIndex < 0 || lineIndex >= static_cast<int>(_line.size()))
- {
- throw "Line index out of range";
- exit(1);
- }
-
- return (_line[lineIndex]->Append(x, y));
-}
-
-
-void MatlabPlot::Append(int lineIndex, double y)
-{
- CriticalSectionScoped cs(_critSect);
-
- if (!_enabled)
- {
- return;
- }
-
- // sanity for index
- if (lineIndex < 0 || lineIndex >= static_cast<int>(_line.size()))
- {
- throw "Line index out of range";
- exit(1);
- }
-
- return (_line[lineIndex]->Append(y));
-}
-
-
-int MatlabPlot::Append(const char *name, double x, double y)
-{
- CriticalSectionScoped cs(_critSect);
-
- if (!_enabled)
- {
- return -1;
- }
-
- // search the list for a matching line name
- int lineIx = GetLineIx(name);
-
- if (lineIx < 0) //(!matchFound)
- {
- // no match; append new line
- lineIx = AddLine(-1, NULL, name);
- }
-
- // append data to line
- Append(lineIx, x, y);
- return (lineIx);
-}
-
-int MatlabPlot::Append(const char *name, double y)
-{
- CriticalSectionScoped cs(_critSect);
-
- if (!_enabled)
- {
- return -1;
- }
-
- // search the list for a matching line name
- int lineIx = GetLineIx(name);
-
- if (lineIx < 0) //(!matchFound)
- {
- // no match; append new line
- lineIx = AddLine(-1, NULL, name);
- }
-
- // append data to line
- Append(lineIx, y);
- return (lineIx);
-}
-
-int MatlabPlot::Length(char *name)
-{
- CriticalSectionScoped cs(_critSect);
-
- if (!_enabled)
- {
- return -1;
- }
-
- int ix = GetLineIx(name);
- if (ix >= 0)
- {
- return (static_cast<int>(_line[ix]->_xData.size()));
- }
- else
- {
- return (-1);
- }
-}
-
-
-void MatlabPlot::SetPlotAttribute(char *name, char *plotAttrib)
-{
- CriticalSectionScoped cs(_critSect);
-
- if (!_enabled)
- {
- return;
- }
-
- int lineIx = GetLineIx(name);
-
- if (lineIx >= 0)
- {
- _line[lineIx]->SetAttribute(plotAttrib);
- }
-}
-
-// Must be called under critical section _critSect
-void MatlabPlot::UpdateData(Engine* ep)
-{
- if (!_enabled)
- {
- return;
- }
-
- for (std::vector<MatlabLine*>::iterator it = _line.begin(); it != _line.end(); it++)
- {
- mxArray* xData = NULL;
- mxArray* yData = NULL;
- (*it)->GetPlotData(&xData, &yData);
- if (xData != NULL)
- {
- std::string xName = (*it)->GetXName();
- std::string yName = (*it)->GetYName();
- _critSect->Leave();
-#ifdef MATLAB6
- mxSetName(xData, xName.c_str());
- mxSetName(yData, yName.c_str());
- engPutArray(ep, xData);
- engPutArray(ep, yData);
-#else
- int ret = engPutVariable(ep, xName.c_str(), xData);
- assert(ret == 0);
- ret = engPutVariable(ep, yName.c_str(), yData);
- assert(ret == 0);
-#endif
- _critSect->Enter();
- }
- }
-}
-
-bool MatlabPlot::GetPlotCmd(std::ostringstream & cmd, Engine* ep)
-{
- _critSect->Enter();
-
- if (!DataAvailable())
- {
- return false;
- }
-
- if (_firstPlot)
- {
- GetPlotCmd(cmd);
- _firstPlot = false;
- }
- else
- {
- GetRefreshCmd(cmd);
- }
-
- UpdateData(ep);
-
- _critSect->Leave();
-
- return true;
-}
-
-// Call inside critsect
-void MatlabPlot::GetPlotCmd(std::ostringstream & cmd)
-{
- // we have something to plot
- // empty the stream
- cmd.str(""); // (this seems to be the only way)
-
- cmd << "figure; h" << _figHandle << "= plot(";
-
- // first line
- std::vector<MatlabLine*>::iterator it = _line.begin();
- cmd << (*it)->GetPlotString();
-
- it++;
-
- // remaining lines
- for (; it != _line.end(); it++)
- {
- cmd << ", ";
- cmd << (*it)->GetPlotString();
- }
-
- cmd << "); ";
-
- if (_legendEnabled)
- {
- GetLegendCmd(cmd);
- }
-
- if (_smartAxis)
- {
- double xMin = _xlim[0];
- double xMax = _xlim[1];
- double yMax = _ylim[1];
- for (std::vector<MatlabLine*>::iterator it = _line.begin(); it != _line.end(); it++)
- {
- xMax = std::max(xMax, (*it)->xMax());
- xMin = std::min(xMin, (*it)->xMin());
-
- yMax = std::max(yMax, (*it)->yMax());
- yMax = std::max(yMax, fabs((*it)->yMin()));
- }
- _xlim[0] = xMin;
- _xlim[1] = xMax;
- _ylim[0] = -yMax;
- _ylim[1] = yMax;
-
- cmd << "axis([" << _xlim[0] << ", " << _xlim[1] << ", " << _ylim[0] << ", " << _ylim[1] << "]);";
- }
-
- int i=1;
- for (it = _line.begin(); it != _line.end(); i++, it++)
- {
- cmd << "set(h" << _figHandle << "(" << i << "), 'Tag', " << (*it)->GetLegendString() << ");";
- }
-}
-
-// Call inside critsect
-void MatlabPlot::GetRefreshCmd(std::ostringstream & cmd)
-{
- cmd.str(""); // (this seems to be the only way)
- std::vector<MatlabLine*>::iterator it = _line.begin();
- for (it = _line.begin(); it != _line.end(); it++)
- {
- cmd << "h = findobj(0, 'Tag', " << (*it)->GetLegendString() << ");";
- cmd << (*it)->GetRefreshString();
- }
- //if (_legendEnabled)
- //{
- // GetLegendCmd(cmd);
- //}
-}
-
-void MatlabPlot::GetLegendCmd(std::ostringstream & cmd)
-{
- std::vector<MatlabLine*>::iterator it = _line.begin();
- bool anyLegend = false;
- for (; it != _line.end(); it++)
- {
- anyLegend = anyLegend || (*it)->hasLegend();
- }
- if (anyLegend)
- {
- // create the legend
-
- cmd << "legend(h" << _figHandle << ",{";
-
-
- // iterate lines
- int i = 0;
- for (std::vector<MatlabLine*>::iterator it = _line.begin(); it != _line.end(); it++)
- {
- if (i > 0)
- {
- cmd << ", ";
- }
- cmd << (*it)->GetLegendString();
- i++;
- }
-
- cmd << "}, 2); "; // place legend in upper-left corner
- }
-}
-
-// Call inside critsect
-bool MatlabPlot::DataAvailable()
-{
- if (!_enabled)
- {
- return false;
- }
-
- for (std::vector<MatlabLine*>::iterator it = _line.begin(); it != _line.end(); it++)
- {
- (*it)->PurgeOldData();
- }
-
- return true;
-}
-
-void MatlabPlot::Plot()
-{
- CriticalSectionScoped cs(_critSect);
-
- _timeToPlot = true;
-
-#ifdef PLOT_TESTING
- _plotStartTime = TickTime::MillisecondTimestamp();
-#endif
-}
-
-
-void MatlabPlot::Reset()
-{
- CriticalSectionScoped cs(_critSect);
-
- _enabled = true;
-
- for (std::vector<MatlabLine*>::iterator it = _line.begin(); it != _line.end(); it++)
- {
- (*it)->Reset();
- }
-
-}
-
-void MatlabPlot::SetFigHandle(int handle)
-{
- CriticalSectionScoped cs(_critSect);
-
- if (handle > 0)
- _figHandle = handle;
-}
-
-bool
-MatlabPlot::TimeToPlot()
-{
- CriticalSectionScoped cs(_critSect);
- return _enabled && _timeToPlot;
-}
-
-void
-MatlabPlot::Plotting()
-{
- CriticalSectionScoped cs(_critSect);
- _plotting = true;
-}
-
-void
-MatlabPlot::DonePlotting()
-{
- CriticalSectionScoped cs(_critSect);
- _timeToPlot = false;
- _plotting = false;
- _donePlottingEvent->Set();
-}
-
-void
-MatlabPlot::DisablePlot()
-{
- _critSect->Enter();
- while (_plotting)
- {
- _critSect->Leave();
- _donePlottingEvent->Wait(WEBRTC_EVENT_INFINITE);
- _critSect->Enter();
- }
- _enabled = false;
-}
-
-int MatlabPlot::MakeTrend(const char *sourceName, const char *trendName, double slope, double offset, const char *plotAttrib)
-{
- CriticalSectionScoped cs(_critSect);
-
- int sourceIx;
- int trendIx;
-
- sourceIx = GetLineIx(sourceName);
- if (sourceIx < 0)
- {
- // could not find source
- return (-1);
- }
-
- trendIx = GetLineIx(trendName);
- if (trendIx < 0)
- {
- // no trend found; add new line
- trendIx = AddLine(2 /*maxLen*/, plotAttrib, trendName);
- }
-
- _line[trendIx]->UpdateTrendLine(_line[sourceIx], slope, offset);
-
- return (trendIx);
-
-}
-
-
-MatlabEngine::MatlabEngine()
-:
-_critSect(CriticalSectionWrapper::CreateCriticalSection()),
-_eventPtr(NULL),
-_running(false),
-_numPlots(0)
-{
- _eventPtr = EventWrapper::Create();
-
- _plotThread = ThreadWrapper::CreateThread(MatlabEngine::PlotThread, this,
- kLowPriority, "MatlabPlot");
- _running = true;
- _plotThread->Start();
-}
-
-MatlabEngine::~MatlabEngine()
-{
- _critSect->Enter();
-
- if (_plotThread)
- {
- _running = false;
- _eventPtr->Set();
-
- _plotThread->Stop();
- }
-
- _plots.clear();
-
- delete _eventPtr;
- _eventPtr = NULL;
-
- _critSect->Leave();
- delete _critSect;
-
-}
-
-MatlabPlot * MatlabEngine::NewPlot(MatlabPlot *newPlot)
-{
- CriticalSectionScoped cs(_critSect);
-
- //MatlabPlot *newPlot = new MatlabPlot();
-
- if (newPlot)
- {
- newPlot->SetFigHandle(++_numPlots); // first plot is number 1
- _plots.push_back(newPlot);
- }
-
- return (newPlot);
-
-}
-
-
-void MatlabEngine::DeletePlot(MatlabPlot *plot)
-{
- CriticalSectionScoped cs(_critSect);
-
- if (plot == NULL)
- {
- return;
- }
-
- std::vector<MatlabPlot *>::iterator it;
- for (it = _plots.begin(); it < _plots.end(); it++)
- {
- if (plot == *it)
- {
- break;
- }
- }
-
- assert (plot == *it);
-
- (*it)->DisablePlot();
-
- _plots.erase(it);
- --_numPlots;
-
- delete plot;
-}
-
-
-bool MatlabEngine::PlotThread(void *obj)
-{
- if (!obj)
- {
- return (false);
- }
-
- MatlabEngine *eng = (MatlabEngine *) obj;
-
- Engine *ep = engOpen(NULL);
- if (!ep)
- {
- throw "Cannot open Matlab engine";
- return (false);
- }
-
- engSetVisible(ep, true);
- engEvalString(ep, "close all;");
-
- while (eng->_running)
- {
- eng->_critSect->Enter();
-
- // iterate through all plots
- for (unsigned int ix = 0; ix < eng->_plots.size(); ix++)
- {
- MatlabPlot *plot = eng->_plots[ix];
- if (plot->TimeToPlot())
- {
- plot->Plotting();
- eng->_critSect->Leave();
- std::ostringstream cmd;
-
- if (engEvalString(ep, cmd.str().c_str()))
- {
- // engine dead
- return (false);
- }
-
- // empty the stream
- cmd.str(""); // (this seems to be the only way)
- if (plot->GetPlotCmd(cmd, ep))
- {
- // things to plot, we have already accessed what we need in the plot
- plot->DonePlotting();
-
- int64_t start = TickTime::MillisecondTimestamp();
- // plot it
- int ret = engEvalString(ep, cmd.str().c_str());
- printf("time=%I64i\n", TickTime::MillisecondTimestamp() - start);
- if (ret)
- {
- // engine dead
- return (false);
- }
-
-#ifdef PLOT_TESTING
- if(plot->_plotStartTime >= 0)
- {
- plot->_plotDelay = TickTime::MillisecondTimestamp() - plot->_plotStartTime;
- plot->_plotStartTime = -1;
- }
-#endif
- }
- eng->_critSect->Enter();
- }
- }
-
- eng->_critSect->Leave();
- // wait a while
- eng->_eventPtr->Wait(66); // 33 ms
- }
-
- if (ep)
- {
- engClose(ep);
- ep = NULL;
- }
-
- return (true);
-
-}
-
-#endif // MATLAB
diff --git a/webrtc/modules/rtp_rtcp/test/BWEStandAlone/MatlabPlot.h b/webrtc/modules/rtp_rtcp/test/BWEStandAlone/MatlabPlot.h
deleted file mode 100644
index 3ed89f8f91..0000000000
--- a/webrtc/modules/rtp_rtcp/test/BWEStandAlone/MatlabPlot.h
+++ /dev/null
@@ -1,170 +0,0 @@
-/*
- * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_MODULES_RTP_RTCP_TEST_BWESTANDALONE_MATLABPLOT_H_
-#define WEBRTC_MODULES_RTP_RTCP_TEST_BWESTANDALONE_MATLABPLOT_H_
-
-#include <list>
-#include <string>
-#include <vector>
-
-#include "webrtc/typedefs.h"
-#include "webrtc/system_wrappers/include/thread_wrapper.h"
-
-namespace webrtc {
-class CriticalSectionWrapper;
-class EventWrapper;
-}
-
-//#define PLOT_TESTING
-
-#ifdef MATLAB
-
-typedef struct engine Engine;
-typedef struct mxArray_tag mxArray;
-
-class MatlabLine
-{
- friend class MatlabPlot;
-
-public:
- MatlabLine(int maxLen = -1, const char *plotAttrib = NULL, const char *name = NULL);
- ~MatlabLine();
- virtual void Append(double x, double y);
- virtual void Append(double y);
- void SetMaxLen(int maxLen);
- void SetAttribute(char *plotAttrib);
- void SetName(char *name);
- void Reset();
- virtual void PurgeOldData() {};
-
- void UpdateTrendLine(MatlabLine * sourceData, double slope, double offset);
-
- double xMin();
- double xMax();
- double yMin();
- double yMax();
-
-protected:
- void GetPlotData(mxArray** xData, mxArray** yData);
- std::string GetXName();
- std::string GetYName();
- std::string GetPlotString();
- std::string GetRefreshString();
- std::string GetLegendString();
- bool hasLegend();
- std::list<double> _xData;
- std::list<double> _yData;
- mxArray* _xArray;
- mxArray* _yArray;
- int _maxLen;
- std::string _plotAttribute;
- std::string _name;
-};
-
-
-class MatlabTimeLine : public MatlabLine
-{
-public:
- MatlabTimeLine(int horizonSeconds = -1, const char *plotAttrib = NULL, const char *name = NULL,
- int64_t refTimeMs = -1);
- ~MatlabTimeLine() {};
- void Append(double y);
- void PurgeOldData();
- int64_t GetRefTime();
-
-private:
- int64_t _refTimeMs;
- int _timeHorizon;
-};
-
-
-class MatlabPlot
-{
- friend class MatlabEngine;
-
-public:
- MatlabPlot();
- ~MatlabPlot();
-
- int AddLine(int maxLen = -1, const char *plotAttrib = NULL, const char *name = NULL);
- int AddTimeLine(int maxLen = -1, const char *plotAttrib = NULL, const char *name = NULL,
- int64_t refTimeMs = -1);
- int GetLineIx(const char *name);
- void Append(int lineIndex, double x, double y);
- void Append(int lineIndex, double y);
- int Append(const char *name, double x, double y);
- int Append(const char *name, double y);
- int Length(char *name);
- void SetPlotAttribute(char *name, char *plotAttrib);
- void Plot();
- void Reset();
- void SmartAxis(bool status = true) { _smartAxis = status; };
- void SetFigHandle(int handle);
- void EnableLegend(bool enable) { _legendEnabled = enable; };
-
- bool TimeToPlot();
- void Plotting();
- void DonePlotting();
- void DisablePlot();
-
- int MakeTrend(const char *sourceName, const char *trendName, double slope, double offset, const char *plotAttrib = NULL);
-
-#ifdef PLOT_TESTING
- int64_t _plotStartTime;
- int64_t _plotDelay;
-#endif
-
-private:
- void UpdateData(Engine* ep);
- bool GetPlotCmd(std::ostringstream & cmd, Engine* ep);
- void GetPlotCmd(std::ostringstream & cmd); // call inside crit sect
- void GetRefreshCmd(std::ostringstream & cmd); // call inside crit sect
- void GetLegendCmd(std::ostringstream & cmd);
- bool DataAvailable();
-
- std::vector<MatlabLine *> _line;
- int _figHandle;
- bool _smartAxis;
- double _xlim[2];
- double _ylim[2];
- webrtc::CriticalSectionWrapper *_critSect;
- bool _timeToPlot;
- bool _plotting;
- bool _enabled;
- bool _firstPlot;
- bool _legendEnabled;
- webrtc::EventWrapper* _donePlottingEvent;
-};
-
-
-class MatlabEngine
-{
-public:
- MatlabEngine();
- ~MatlabEngine();
-
- MatlabPlot * NewPlot(MatlabPlot *newPlot);
- void DeletePlot(MatlabPlot *plot);
-
-private:
- static bool PlotThread(void *obj);
-
- std::vector<MatlabPlot *> _plots;
- webrtc::CriticalSectionWrapper *_critSect;
- webrtc::EventWrapper *_eventPtr;
- rtc::scoped_ptr<webrtc::ThreadWrapper> _plotThread;
- bool _running;
- int _numPlots;
-};
-
-#endif //MATLAB
-
-#endif // WEBRTC_MODULES_RTP_RTCP_TEST_BWESTANDALONE_MATLABPLOT_H_
diff --git a/webrtc/modules/rtp_rtcp/test/BWEStandAlone/TestLoadGenerator.cc b/webrtc/modules/rtp_rtcp/test/BWEStandAlone/TestLoadGenerator.cc
deleted file mode 100644
index 0e3e87976c..0000000000
--- a/webrtc/modules/rtp_rtcp/test/BWEStandAlone/TestLoadGenerator.cc
+++ /dev/null
@@ -1,432 +0,0 @@
-/*
- * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include "webrtc/modules/rtp_rtcp/test/BWEStandAlone/TestLoadGenerator.h"
-
-#include <stdio.h>
-
-#include <algorithm>
-
-#include "webrtc/modules/rtp_rtcp/test/BWEStandAlone/TestSenderReceiver.h"
-#include "webrtc/system_wrappers/include/critical_section_wrapper.h"
-#include "webrtc/system_wrappers/include/event_wrapper.h"
-#include "webrtc/system_wrappers/include/tick_util.h"
-
-
-bool SenderThreadFunction(void *obj)
-{
- if (obj == NULL)
- {
- return false;
- }
- TestLoadGenerator *_genObj = static_cast<TestLoadGenerator *>(obj);
-
- return _genObj->GeneratorLoop();
-}
-
-
-TestLoadGenerator::TestLoadGenerator(TestSenderReceiver *sender, int32_t rtpSampleRate)
-:
-_critSect(CriticalSectionWrapper::CreateCriticalSection()),
-_eventPtr(NULL),
-_bitrateKbps(0),
-_sender(sender),
-_running(false),
-_rtpSampleRate(rtpSampleRate)
-{
-}
-
-TestLoadGenerator::~TestLoadGenerator ()
-{
- if (_running)
- {
- Stop();
- }
-
- delete _critSect;
-}
-
-int32_t TestLoadGenerator::SetBitrate (int32_t newBitrateKbps)
-{
- CriticalSectionScoped cs(_critSect);
-
- if (newBitrateKbps < 0)
- {
- return -1;
- }
-
- _bitrateKbps = newBitrateKbps;
-
- printf("New bitrate = %i kbps\n", _bitrateKbps);
-
- return _bitrateKbps;
-}
-
-
-int32_t TestLoadGenerator::Start (const char *threadName)
-{
- CriticalSectionScoped cs(_critSect);
-
- _eventPtr = EventWrapper::Create();
-
- _genThread = ThreadWrapper::CreateThread(SenderThreadFunction, this,
- threadName);
- _running = true;
-
- _genThread->Start();
- _genThread->SetPriority(kRealtimePriority);
-
- return 0;
-}
-
-
-int32_t TestLoadGenerator::Stop ()
-{
- _critSect.Enter();
-
- if (_genThread)
- {
- _running = false;
- _eventPtr->Set();
-
- _genThread->Stop();
- _genThread.reset();
-
- delete _eventPtr;
- _eventPtr = NULL;
- }
-
- _critSect.Leave();
- return (0);
-}
-
-
-int TestLoadGenerator::generatePayload ()
-{
- return(generatePayload( static_cast<uint32_t>( TickTime::MillisecondTimestamp() * _rtpSampleRate / 1000 )));
-}
-
-
-int TestLoadGenerator::sendPayload (const uint32_t timeStamp,
- const uint8_t* payloadData,
- const size_t payloadSize,
- const webrtc::FrameType frameType /*= webrtc::kVideoFrameDelta*/)
-{
-
- return (_sender->SendOutgoingData(timeStamp, payloadData, payloadSize, frameType));
-}
-
-
-CBRGenerator::CBRGenerator (TestSenderReceiver *sender,
- size_t payloadSizeBytes,
- int32_t bitrateKbps,
- int32_t rtpSampleRate)
-:
-//_eventPtr(NULL),
-_payloadSizeBytes(payloadSizeBytes),
-_payload(new uint8_t[payloadSizeBytes]),
-TestLoadGenerator(sender, rtpSampleRate)
-{
- SetBitrate (bitrateKbps);
-}
-
-CBRGenerator::~CBRGenerator ()
-{
- if (_running)
- {
- Stop();
- }
-
- if (_payload)
- {
- delete [] _payload;
- }
-
-}
-
-bool CBRGenerator::GeneratorLoop ()
-{
- double periodMs;
- int64_t nextSendTime = TickTime::MillisecondTimestamp();
-
-
- // no critSect
- while (_running)
- {
- // send data (critSect inside)
- generatePayload( static_cast<uint32_t>(nextSendTime * _rtpSampleRate / 1000) );
-
- // calculate wait time
- periodMs = 8.0 * _payloadSizeBytes / ( _bitrateKbps );
-
- nextSendTime = static_cast<int64_t>(nextSendTime + periodMs);
-
- int32_t waitTime = static_cast<int32_t>(nextSendTime - TickTime::MillisecondTimestamp());
- if (waitTime < 0)
- {
- waitTime = 0;
- }
- // wait
- _eventPtr->Wait(static_cast<int32_t>(waitTime));
- }
-
- return true;
-}
-
-int CBRGenerator::generatePayload ( uint32_t timestamp )
-{
- CriticalSectionScoped cs(_critSect);
-
- //uint8_t *payload = new uint8_t[_payloadSizeBytes];
-
- int ret = sendPayload(timestamp, _payload, _payloadSizeBytes);
-
- //delete [] payload;
- return ret;
-}
-
-
-
-
-/////////////////////
-
-CBRFixFRGenerator::CBRFixFRGenerator (TestSenderReceiver *sender, int32_t bitrateKbps,
- int32_t rtpSampleRate, int32_t frameRateFps /*= 30*/,
- double spread /*= 0.0*/)
-:
-//_eventPtr(NULL),
-_payloadSizeBytes(0),
-_payload(NULL),
-_payloadAllocLen(0),
-_frameRateFps(frameRateFps),
-_spreadFactor(spread),
-TestLoadGenerator(sender, rtpSampleRate)
-{
- SetBitrate (bitrateKbps);
-}
-
-CBRFixFRGenerator::~CBRFixFRGenerator ()
-{
- if (_running)
- {
- Stop();
- }
-
- if (_payload)
- {
- delete [] _payload;
- _payloadAllocLen = 0;
- }
-
-}
-
-bool CBRFixFRGenerator::GeneratorLoop ()
-{
- double periodMs;
- int64_t nextSendTime = TickTime::MillisecondTimestamp();
-
- _critSect.Enter();
-
- if (_frameRateFps <= 0)
- {
- return false;
- }
-
- _critSect.Leave();
-
- // no critSect
- while (_running)
- {
- _critSect.Enter();
-
- // calculate payload size
- _payloadSizeBytes = nextPayloadSize();
-
- if (_payloadSizeBytes > 0)
- {
-
- if (_payloadAllocLen < _payloadSizeBytes * (1 + _spreadFactor))
- {
- // re-allocate _payload
- if (_payload)
- {
- delete [] _payload;
- _payload = NULL;
- }
-
- _payloadAllocLen = static_cast<int32_t>((_payloadSizeBytes * (1 + _spreadFactor) * 3) / 2 + .5); // 50% extra to avoid frequent re-alloc
- _payload = new uint8_t[_payloadAllocLen];
- }
-
-
- // send data (critSect inside)
- generatePayload( static_cast<uint32_t>(nextSendTime * _rtpSampleRate / 1000) );
- }
-
- _critSect.Leave();
-
- // calculate wait time
- periodMs = 1000.0 / _frameRateFps;
- nextSendTime = static_cast<int64_t>(nextSendTime + periodMs + 0.5);
-
- int32_t waitTime = static_cast<int32_t>(nextSendTime - TickTime::MillisecondTimestamp());
- if (waitTime < 0)
- {
- waitTime = 0;
- }
- // wait
- _eventPtr->Wait(waitTime);
- }
-
- return true;
-}
-
-size_t CBRFixFRGenerator::nextPayloadSize()
-{
- const double periodMs = 1000.0 / _frameRateFps;
- return static_cast<size_t>(_bitrateKbps * periodMs / 8 + 0.5);
-}
-
-int CBRFixFRGenerator::generatePayload ( uint32_t timestamp )
-{
- CriticalSectionScoped cs(_critSect);
-
- double factor = ((double) rand() - RAND_MAX/2) / RAND_MAX; // [-0.5; 0.5]
- factor = 1 + 2 * _spreadFactor * factor; // [1 - _spreadFactor ; 1 + _spreadFactor]
-
- size_t thisPayloadBytes = static_cast<size_t>(_payloadSizeBytes * factor);
- // sanity
- if (thisPayloadBytes > _payloadAllocLen)
- {
- thisPayloadBytes = _payloadAllocLen;
- }
-
- int ret = sendPayload(timestamp, _payload, thisPayloadBytes);
- return ret;
-}
-
-
-/////////////////////
-
-PeriodicKeyFixFRGenerator::PeriodicKeyFixFRGenerator (TestSenderReceiver *sender, int32_t bitrateKbps,
- int32_t rtpSampleRate, int32_t frameRateFps /*= 30*/,
- double spread /*= 0.0*/, double keyFactor /*= 4.0*/, uint32_t keyPeriod /*= 300*/)
-:
-_keyFactor(keyFactor),
-_keyPeriod(keyPeriod),
-_frameCount(0),
-CBRFixFRGenerator(sender, bitrateKbps, rtpSampleRate, frameRateFps, spread)
-{
-}
-
-size_t PeriodicKeyFixFRGenerator::nextPayloadSize()
-{
- // calculate payload size for a delta frame
- size_t payloadSizeBytes = static_cast<size_t>(1000 * _bitrateKbps /
- (8.0 * _frameRateFps * (1.0 + (_keyFactor - 1.0) / _keyPeriod)) + 0.5);
-
- if (_frameCount % _keyPeriod == 0)
- {
- // this is a key frame, scale the payload size
- payloadSizeBytes =
- static_cast<size_t>(_keyFactor * _payloadSizeBytes + 0.5);
- }
- _frameCount++;
-
- return payloadSizeBytes;
-}
-
-////////////////////
-
-CBRVarFRGenerator::CBRVarFRGenerator(TestSenderReceiver *sender, int32_t bitrateKbps, const uint8_t* frameRates,
- uint16_t numFrameRates, int32_t rtpSampleRate, double avgFrPeriodMs,
- double frSpreadFactor, double spreadFactor)
-:
-_avgFrPeriodMs(avgFrPeriodMs),
-_frSpreadFactor(frSpreadFactor),
-_frameRates(NULL),
-_numFrameRates(numFrameRates),
-_frChangeTimeMs(TickTime::MillisecondTimestamp() + _avgFrPeriodMs),
-CBRFixFRGenerator(sender, bitrateKbps, rtpSampleRate, frameRates[0], spreadFactor)
-{
- _frameRates = new uint8_t[_numFrameRates];
- memcpy(_frameRates, frameRates, _numFrameRates);
-}
-
-CBRVarFRGenerator::~CBRVarFRGenerator()
-{
- delete [] _frameRates;
-}
-
-void CBRVarFRGenerator::ChangeFrameRate()
-{
- const int64_t nowMs = TickTime::MillisecondTimestamp();
- if (nowMs < _frChangeTimeMs)
- {
- return;
- }
- // Time to change frame rate
- uint16_t frIndex = static_cast<uint16_t>(static_cast<double>(rand()) / RAND_MAX
- * (_numFrameRates - 1) + 0.5) ;
- assert(frIndex < _numFrameRates);
- _frameRateFps = _frameRates[frIndex];
- // Update the next frame rate change time
- double factor = ((double) rand() - RAND_MAX/2) / RAND_MAX; // [-0.5; 0.5]
- factor = 1 + 2 * _frSpreadFactor * factor; // [1 - _frSpreadFactor ; 1 + _frSpreadFactor]
- _frChangeTimeMs = nowMs + static_cast<int64_t>(1000.0 * factor *
- _avgFrPeriodMs + 0.5);
-
- printf("New frame rate: %d\n", _frameRateFps);
-}
-
-size_t CBRVarFRGenerator::nextPayloadSize()
-{
- ChangeFrameRate();
- return CBRFixFRGenerator::nextPayloadSize();
-}
-
-////////////////////
-
-CBRFrameDropGenerator::CBRFrameDropGenerator(TestSenderReceiver *sender, int32_t bitrateKbps,
- int32_t rtpSampleRate, double spreadFactor)
-:
-_accBits(0),
-CBRFixFRGenerator(sender, bitrateKbps, rtpSampleRate, 30, spreadFactor)
-{
-}
-
-CBRFrameDropGenerator::~CBRFrameDropGenerator()
-{
-}
-
-size_t CBRFrameDropGenerator::nextPayloadSize()
-{
- _accBits -= 1000 * _bitrateKbps / _frameRateFps;
- if (_accBits < 0)
- {
- _accBits = 0;
- }
- if (_accBits > 0.3 * _bitrateKbps * 1000)
- {
- //printf("drop\n");
- return 0;
- }
- else
- {
- //printf("keep\n");
- const double periodMs = 1000.0 / _frameRateFps;
- size_t frameSize =
- static_cast<size_t>(_bitrateKbps * periodMs / 8 + 0.5);
- frameSize =
- std::max(frameSize, static_cast<size_t>(300 * periodMs / 8 + 0.5));
- _accBits += frameSize * 8;
- return frameSize;
- }
-}
diff --git a/webrtc/modules/rtp_rtcp/test/BWEStandAlone/TestLoadGenerator.h b/webrtc/modules/rtp_rtcp/test/BWEStandAlone/TestLoadGenerator.h
deleted file mode 100644
index bd83962fa3..0000000000
--- a/webrtc/modules/rtp_rtcp/test/BWEStandAlone/TestLoadGenerator.h
+++ /dev/null
@@ -1,149 +0,0 @@
-/*
- * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_MODULES_RTP_RTCP_TEST_BWESTANDALONE_TESTLOADGENERATOR_H_
-#define WEBRTC_MODULES_RTP_RTCP_TEST_BWESTANDALONE_TESTLOADGENERATOR_H_
-
-#include <stdlib.h>
-
-#include "webrtc/modules/interface/module_common_types.h"
-#include "webrtc/system_wrappers/include/thread_wrapper.h"
-#include "webrtc/typedefs.h"
-
-class TestSenderReceiver;
-namespace webrtc {
-class CriticalSectionWrapper;
-class EventWrapper;
-}
-
-class TestLoadGenerator
-{
-public:
- TestLoadGenerator (TestSenderReceiver *sender, int32_t rtpSampleRate = 90000);
- virtual ~TestLoadGenerator ();
-
- int32_t SetBitrate (int32_t newBitrateKbps);
- virtual int32_t Start (const char *threadName = NULL);
- virtual int32_t Stop ();
- virtual bool GeneratorLoop () = 0;
-
-protected:
- virtual int generatePayload ( uint32_t timestamp ) = 0;
- int generatePayload ();
- int sendPayload (const uint32_t timeStamp,
- const uint8_t* payloadData,
- const size_t payloadSize,
- const webrtc::FrameType frameType = webrtc::kVideoFrameDelta);
-
- webrtc::CriticalSectionWrapper* _critSect;
- webrtc::EventWrapper *_eventPtr;
- rtc::scoped_ptr<webrtc::ThreadWrapper> _genThread;
- int32_t _bitrateKbps;
- TestSenderReceiver *_sender;
- bool _running;
- int32_t _rtpSampleRate;
-};
-
-
-class CBRGenerator : public TestLoadGenerator
-{
-public:
- CBRGenerator (TestSenderReceiver *sender,
- size_t payloadSizeBytes,
- int32_t bitrateKbps,
- int32_t rtpSampleRate = 90000);
- virtual ~CBRGenerator ();
-
- virtual int32_t Start () {return (TestLoadGenerator::Start("CBRGenerator"));};
-
- virtual bool GeneratorLoop ();
-
-protected:
- virtual int generatePayload ( uint32_t timestamp );
-
- size_t _payloadSizeBytes;
- uint8_t *_payload;
-};
-
-
-class CBRFixFRGenerator : public TestLoadGenerator // constant bitrate and fixed frame rate
-{
-public:
- CBRFixFRGenerator (TestSenderReceiver *sender, int32_t bitrateKbps, int32_t rtpSampleRate = 90000,
- int32_t frameRateFps = 30, double spread = 0.0);
- virtual ~CBRFixFRGenerator ();
-
- virtual int32_t Start () {return (TestLoadGenerator::Start("CBRFixFRGenerator"));};
-
- virtual bool GeneratorLoop ();
-
-protected:
- virtual size_t nextPayloadSize ();
- virtual int generatePayload ( uint32_t timestamp );
-
- size_t _payloadSizeBytes;
- uint8_t *_payload;
- size_t _payloadAllocLen;
- int32_t _frameRateFps;
- double _spreadFactor;
-};
-
-class PeriodicKeyFixFRGenerator : public CBRFixFRGenerator // constant bitrate and fixed frame rate with periodically large frames
-{
-public:
- PeriodicKeyFixFRGenerator (TestSenderReceiver *sender, int32_t bitrateKbps, int32_t rtpSampleRate = 90000,
- int32_t frameRateFps = 30, double spread = 0.0, double keyFactor = 4.0, uint32_t keyPeriod = 300);
- virtual ~PeriodicKeyFixFRGenerator () {}
-
-protected:
- virtual size_t nextPayloadSize ();
-
- double _keyFactor;
- uint32_t _keyPeriod;
- uint32_t _frameCount;
-};
-
-// Probably better to inherit CBRFixFRGenerator from CBRVarFRGenerator, but since
-// the fix FR version already existed this was easier.
-class CBRVarFRGenerator : public CBRFixFRGenerator // constant bitrate and variable frame rate
-{
-public:
- CBRVarFRGenerator(TestSenderReceiver *sender, int32_t bitrateKbps, const uint8_t* frameRates,
- uint16_t numFrameRates, int32_t rtpSampleRate = 90000, double avgFrPeriodMs = 5.0,
- double frSpreadFactor = 0.05, double spreadFactor = 0.0);
-
- ~CBRVarFRGenerator();
-
-protected:
- virtual void ChangeFrameRate();
- virtual size_t nextPayloadSize ();
-
- double _avgFrPeriodMs;
- double _frSpreadFactor;
- uint8_t* _frameRates;
- uint16_t _numFrameRates;
- int64_t _frChangeTimeMs;
-};
-
-class CBRFrameDropGenerator : public CBRFixFRGenerator // constant bitrate and variable frame rate
-{
-public:
- CBRFrameDropGenerator(TestSenderReceiver *sender, int32_t bitrateKbps,
- int32_t rtpSampleRate = 90000, double spreadFactor = 0.0);
-
- ~CBRFrameDropGenerator();
-
-protected:
- virtual size_t nextPayloadSize();
-
- double _accBits;
-};
-
-#endif // WEBRTC_MODULES_RTP_RTCP_TEST_BWESTANDALONE_TESTLOADGENERATOR_H_
diff --git a/webrtc/modules/rtp_rtcp/test/BWEStandAlone/TestSenderReceiver.cc b/webrtc/modules/rtp_rtcp/test/BWEStandAlone/TestSenderReceiver.cc
deleted file mode 100644
index 47f2880638..0000000000
--- a/webrtc/modules/rtp_rtcp/test/BWEStandAlone/TestSenderReceiver.cc
+++ /dev/null
@@ -1,411 +0,0 @@
-/*
- * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include "webrtc/modules/rtp_rtcp/test/BWEStandAlone/TestSenderReceiver.h"
-
-#include <stdio.h>
-#include <stdlib.h>
-
-#include "webrtc/modules/rtp_rtcp/interface/rtp_rtcp.h"
-#include "webrtc/modules/rtp_rtcp/test/BWEStandAlone/TestLoadGenerator.h"
-#include "webrtc/system_wrappers/include/critical_section_wrapper.h"
-#include "webrtc/system_wrappers/include/event_wrapper.h"
-#include "webrtc/system_wrappers/include/tick_util.h"
-#include "webrtc/test/channel_transport/udp_transport.h"
-
-#define NR_OF_SOCKET_BUFFERS 500
-
-
-bool ProcThreadFunction(void *obj)
-{
- if (obj == NULL)
- {
- return false;
- }
- TestSenderReceiver *theObj = static_cast<TestSenderReceiver *>(obj);
-
- return theObj->ProcLoop();
-}
-
-
-TestSenderReceiver::TestSenderReceiver (void)
-:
-_critSect(CriticalSectionWrapper::CreateCriticalSection()),
-_eventPtr(NULL),
-_running(false),
-_payloadType(0),
-_loadGenerator(NULL),
-_isSender(false),
-_isReceiver(false),
-_sendRecCB(NULL),
-_lastBytesReceived(0),
-_lastTime(-1)
-{
- // RTP/RTCP module
- _rtp = RtpRtcp::CreateRtpRtcp(0, false);
- if (!_rtp)
- {
- throw "Could not create RTP/RTCP module";
- exit(1);
- }
-
- if (_rtp->InitReceiver() != 0)
- {
- throw "_rtp->InitReceiver()";
- exit(1);
- }
-
- if (_rtp->InitSender() != 0)
- {
- throw "_rtp->InitSender()";
- exit(1);
- }
-
- // SocketTransport module
- uint8_t numberOfThreads = 1;
- _transport = UdpTransport::Create(0, numberOfThreads);
- if (!_transport)
- {
- throw "Could not create transport module";
- exit(1);
- }
-}
-
-TestSenderReceiver::~TestSenderReceiver (void)
-{
-
- Stop(); // N.B. without critSect
-
- _critSect->Enter();
-
- if (_rtp)
- {
- RtpRtcp::DestroyRtpRtcp(_rtp);
- _rtp = NULL;
- }
-
- if (_transport)
- {
- UdpTransport::Destroy(_transport);
- _transport = NULL;
- }
-
- delete _critSect;
-
-}
-
-
-int32_t TestSenderReceiver::InitReceiver (const uint16_t rtpPort,
- const uint16_t rtcpPort,
- const int8_t payloadType /*= 127*/)
-{
- CriticalSectionScoped cs(_critSect);
-
- // init transport
- if (_transport->InitializeReceiveSockets(this, rtpPort/*, 0, NULL, 0, true*/) != 0)
- {
- throw "_transport->InitializeReceiveSockets";
- exit(1);
- }
-
- if (_rtp->RegisterIncomingRTPCallback(this) != 0)
- {
- throw "_rtp->RegisterIncomingRTPCallback";
- exit(1);
- }
-
- if (_rtp->RegisterIncomingDataCallback(this) != 0)
- {
- throw "_rtp->RegisterIncomingRTPCallback";
- exit(1);
- }
-
- if (_rtp->SetRTCPStatus(RtcpMode::kReducedSize) != 0) {
- throw "_rtp->SetRTCPStatus";
- exit(1);
- }
-
- if (_rtp->SetTMMBRStatus(true) != 0)
- {
- throw "_rtp->SetTMMBRStatus";
- exit(1);
- }
-
- if (_rtp->RegisterReceivePayload("I420", payloadType, 90000) != 0)
- {
- throw "_rtp->RegisterReceivePayload";
- exit(1);
- }
-
- _isReceiver = true;
-
- return (0);
-}
-
-
-int32_t TestSenderReceiver::Start()
-{
- CriticalSectionScoped cs(_critSect);
-
- _eventPtr = EventWrapper::Create();
-
- if (_rtp->SetSendingStatus(true) != 0)
- {
- throw "_rtp->SetSendingStatus";
- exit(1);
- }
-
- _procThread = ThreadWrapper::CreateThread(ProcThreadFunction, this,
- "TestSenderReceiver");
-
- _running = true;
-
- if (_isReceiver)
- {
- if (_transport->StartReceiving(NR_OF_SOCKET_BUFFERS) != 0)
- {
- throw "_transport->StartReceiving";
- exit(1);
- }
- }
-
- _procThread->Start();
- _procThread->SetPriority(kRealtimePriority);
-
- return 0;
-
-}
-
-
-int32_t TestSenderReceiver::Stop ()
-{
- CriticalSectionScoped cs(_critSect);
-
- _transport->StopReceiving();
-
- if (_procThread)
- {
- _running = false;
- _eventPtr->Set();
-
- _procThread->Stop();
- _procThread.reset();
-
- delete _eventPtr;
- }
-
- return (0);
-}
-
-
-bool TestSenderReceiver::ProcLoop(void)
-{
-
- // process RTP/RTCP module
- _rtp->Process();
-
- // process SocketTransport module
- _transport->Process();
-
- // no critSect
- while (_running)
- {
- // ask RTP/RTCP module for wait time
- int32_t rtpWait = _rtp->TimeUntilNextProcess();
-
- // ask SocketTransport module for wait time
- int32_t tpWait = _transport->TimeUntilNextProcess();
-
- int32_t minWait = (rtpWait < tpWait) ? rtpWait: tpWait;
- minWait = (minWait > 0) ? minWait : 0;
- // wait
- _eventPtr->Wait(minWait);
-
- // process RTP/RTCP module
- _rtp->Process();
-
- // process SocketTransport module
- _transport->Process();
-
- }
-
- return true;
-}
-
-
-int32_t TestSenderReceiver::ReceiveBitrateKbps ()
-{
- size_t bytesSent;
- uint32_t packetsSent;
- size_t bytesReceived;
- uint32_t packetsReceived;
-
- if (_rtp->DataCountersRTP(&bytesSent, &packetsSent, &bytesReceived, &packetsReceived) == 0)
- {
- int64_t now = TickTime::MillisecondTimestamp();
- int32_t kbps = 0;
- if (now > _lastTime)
- {
- if (_lastTime > 0)
- {
- // 8 * bytes / ms = kbps
- kbps = static_cast<int32_t>(
- (8 * (bytesReceived - _lastBytesReceived)) / (now - _lastTime));
- }
- _lastTime = now;
- _lastBytesReceived = bytesReceived;
- }
- return (kbps);
- }
-
- return (-1);
-}
-
-
-int32_t TestSenderReceiver::SetPacketTimeout(const uint32_t timeoutMS)
-{
- return (_rtp->SetPacketTimeout(timeoutMS, 0 /* RTCP timeout */));
-}
-
-
-int32_t TestSenderReceiver::OnReceivedPayloadData(const uint8_t* payloadData,
- const size_t payloadSize,
- const webrtc::WebRtcRTPHeader* rtpHeader)
-{
- //printf("OnReceivedPayloadData\n");
- return (0);
-}
-
-
-void TestSenderReceiver::IncomingRTPPacket(const int8_t* incomingRtpPacket,
- const size_t rtpPacketLength,
- const int8_t* fromIP,
- const uint16_t fromPort)
-{
- _rtp->IncomingPacket((uint8_t *) incomingRtpPacket, rtpPacketLength);
-}
-
-
-
-void TestSenderReceiver::IncomingRTCPPacket(const int8_t* incomingRtcpPacket,
- const size_t rtcpPacketLength,
- const int8_t* fromIP,
- const uint16_t fromPort)
-{
- _rtp->IncomingPacket((uint8_t *) incomingRtcpPacket, rtcpPacketLength);
-}
-
-
-
-
-
-///////////////////
-
-
-int32_t TestSenderReceiver::InitSender (const uint32_t startBitrateKbps,
- const int8_t* ipAddr,
- const uint16_t rtpPort,
- const uint16_t rtcpPort /*= 0*/,
- const int8_t payloadType /*= 127*/)
-{
- CriticalSectionScoped cs(_critSect);
-
- _payloadType = payloadType;
-
- // check load generator valid
- if (_loadGenerator)
- {
- _loadGenerator->SetBitrate(startBitrateKbps);
- }
-
- if (_rtp->RegisterSendTransport(_transport) != 0)
- {
- throw "_rtp->RegisterSendTransport";
- exit(1);
- }
- if (_rtp->RegisterSendPayload("I420", _payloadType, 90000) != 0)
- {
- throw "_rtp->RegisterSendPayload";
- exit(1);
- }
-
- if (_rtp->RegisterIncomingVideoCallback(this) != 0)
- {
- throw "_rtp->RegisterIncomingVideoCallback";
- exit(1);
- }
-
- if (_rtp->SetRTCPStatus(RtcpMode::kReducedSize) != 0) {
- throw "_rtp->SetRTCPStatus";
- exit(1);
- }
-
- if (_rtp->SetSendBitrate(startBitrateKbps*1000, 0, MAX_BITRATE_KBPS) != 0)
- {
- throw "_rtp->SetSendBitrate";
- exit(1);
- }
-
-
- // SocketTransport
- if (_transport->InitializeSendSockets(ipAddr, rtpPort, rtcpPort))
- {
- throw "_transport->InitializeSendSockets";
- exit(1);
- }
-
- _isSender = true;
-
- return (0);
-}
-
-
-
-int32_t
-TestSenderReceiver::SendOutgoingData(const uint32_t timeStamp,
- const uint8_t* payloadData,
- const size_t payloadSize,
- const webrtc::FrameType frameType /*= webrtc::kVideoFrameDelta*/)
-{
- return (_rtp->SendOutgoingData(frameType, _payloadType, timeStamp, payloadData, payloadSize));
-}
-
-
-int32_t TestSenderReceiver::SetLoadGenerator(TestLoadGenerator *generator)
-{
- CriticalSectionScoped cs(_critSect);
-
- _loadGenerator = generator;
- return(0);
-
-}
-
-void TestSenderReceiver::OnNetworkChanged(const int32_t id,
- const uint32_t minBitrateBps,
- const uint32_t maxBitrateBps,
- const uint8_t fractionLost,
- const uint16_t roundTripTimeMs,
- const uint16_t bwEstimateKbitMin,
- const uint16_t bwEstimateKbitMax)
-{
- if (_loadGenerator)
- {
- _loadGenerator->SetBitrate(maxBitrateBps/1000);
- }
-
- if (_sendRecCB)
- {
- _sendRecCB->OnOnNetworkChanged(maxBitrateBps,
- fractionLost,
- roundTripTimeMs,
- bwEstimateKbitMin,
- bwEstimateKbitMax);
- }
-}
diff --git a/webrtc/modules/rtp_rtcp/test/BWEStandAlone/TestSenderReceiver.h b/webrtc/modules/rtp_rtcp/test/BWEStandAlone/TestSenderReceiver.h
deleted file mode 100644
index f394412a73..0000000000
--- a/webrtc/modules/rtp_rtcp/test/BWEStandAlone/TestSenderReceiver.h
+++ /dev/null
@@ -1,153 +0,0 @@
-/*
- * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_MODULES_RTP_RTCP_TEST_BWESTANDALONE_TESTSENDERRECEIVER_H_
-#define WEBRTC_MODULES_RTP_RTCP_TEST_BWESTANDALONE_TESTSENDERRECEIVER_H_
-
-#include "webrtc/modules/rtp_rtcp/interface/rtp_rtcp.h"
-#include "webrtc/modules/rtp_rtcp/interface/rtp_rtcp_defines.h"
-#include "webrtc/system_wrappers/include/thread_wrapper.h"
-#include "webrtc/test/channel_transport/udp_transport.h"
-#include "webrtc/typedefs.h"
-
-class TestLoadGenerator;
-namespace webrtc {
-class CriticalSectionWrapper;
-class EventWrapper;
-}
-
-using namespace webrtc;
-
-#define MAX_BITRATE_KBPS 50000
-
-
-class SendRecCB
-{
-public:
- virtual void OnOnNetworkChanged(const uint32_t bitrateTarget,
- const uint8_t fractionLost,
- const uint16_t roundTripTimeMs,
- const uint16_t bwEstimateKbitMin,
- const uint16_t bwEstimateKbitMax) = 0;
-
- virtual ~SendRecCB() {};
-};
-
-
-class TestSenderReceiver : public RtpFeedback, public RtpData, public UdpTransportData, public RtpVideoFeedback
-{
-
-public:
- TestSenderReceiver (void);
-
- ~TestSenderReceiver (void);
-
- void SetCallback (SendRecCB *cb) { _sendRecCB = cb; };
-
- int32_t Start();
-
- int32_t Stop();
-
- bool ProcLoop();
-
- /////////////////////////////////////////////
- // Receiver methods
-
- int32_t InitReceiver (const uint16_t rtpPort,
- const uint16_t rtcpPort = 0,
- const int8_t payloadType = 127);
-
- int32_t ReceiveBitrateKbps ();
-
- int32_t SetPacketTimeout(const uint32_t timeoutMS);
-
- // Inherited from RtpFeedback
- int32_t OnInitializeDecoder(const int32_t id,
- const int8_t payloadType,
- const int8_t payloadName[RTP_PAYLOAD_NAME_SIZE],
- const uint32_t frequency,
- const uint8_t channels,
- const uint32_t rate) override {
- return 0;
- }
-
- void OnIncomingSSRCChanged(const int32_t id, const uint32_t SSRC) override {
- }
-
- void OnIncomingCSRCChanged(const int32_t id,
- const uint32_t CSRC,
- const bool added) override {}
-
- // Inherited from RtpData
- int32_t OnReceivedPayloadData(
- const uint8_t* payloadData,
- const size_t payloadSize,
- const webrtc::WebRtcRTPHeader* rtpHeader) override;
-
- // Inherited from UdpTransportData
- void IncomingRTPPacket(const int8_t* incomingRtpPacket,
- const size_t rtpPacketLength,
- const int8_t* fromIP,
- const uint16_t fromPort) override;
-
- void IncomingRTCPPacket(const int8_t* incomingRtcpPacket,
- const size_t rtcpPacketLength,
- const int8_t* fromIP,
- const uint16_t fromPort) override;
-
- /////////////////////////////////
- // Sender methods
-
- int32_t InitSender (const uint32_t startBitrateKbps,
- const int8_t* ipAddr,
- const uint16_t rtpPort,
- const uint16_t rtcpPort = 0,
- const int8_t payloadType = 127);
-
- int32_t SendOutgoingData(const uint32_t timeStamp,
- const uint8_t* payloadData,
- const size_t payloadSize,
- const webrtc::FrameType frameType = webrtc::kVideoFrameDelta);
-
- int32_t SetLoadGenerator(TestLoadGenerator *generator);
-
- uint32_t BitrateSent() { return (_rtp->BitrateSent()); };
-
-
- // Inherited from RtpVideoFeedback
- virtual void OnReceivedIntraFrameRequest(const int32_t id,
- const uint8_t message = 0) {};
-
- virtual void OnNetworkChanged(const int32_t id,
- const uint32_t minBitrateBps,
- const uint32_t maxBitrateBps,
- const uint8_t fractionLost,
- const uint16_t roundTripTimeMs,
- const uint16_t bwEstimateKbitMin,
- const uint16_t bwEstimateKbitMax);
-
-private:
- RtpRtcp* _rtp;
- UdpTransport* _transport;
- webrtc::CriticalSectionWrapper* _critSect;
- webrtc::EventWrapper *_eventPtr;
- rtc::scoped_ptr<webrtc::ThreadWrapper> _procThread;
- bool _running;
- int8_t _payloadType;
- TestLoadGenerator* _loadGenerator;
- bool _isSender;
- bool _isReceiver;
- SendRecCB * _sendRecCB;
- size_t _lastBytesReceived;
- int64_t _lastTime;
-
-};
-
-#endif // WEBRTC_MODULES_RTP_RTCP_TEST_BWESTANDALONE_TESTSENDERRECEIVER_H_
diff --git a/webrtc/modules/rtp_rtcp/test/bwe_standalone.gypi b/webrtc/modules/rtp_rtcp/test/bwe_standalone.gypi
deleted file mode 100644
index e45daec77d..0000000000
--- a/webrtc/modules/rtp_rtcp/test/bwe_standalone.gypi
+++ /dev/null
@@ -1,85 +0,0 @@
-# Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
-#
-# Use of this source code is governed by a BSD-style license
-# that can be found in the LICENSE file in the root of the source
-# tree. An additional intellectual property rights grant can be found
-# in the file PATENTS. All contributing project authors may
-# be found in the AUTHORS file in the root of the source tree.
-
-{
- 'targets': [
- {
- 'target_name': 'bwe_standalone',
- 'type': 'executable',
- 'dependencies': [
- 'matlab_plotting',
- 'rtp_rtcp',
- 'udp_transport',
- '<(webrtc_root)/system_wrappers/system_wrappers.gyp:system_wrappers',
- ],
- 'sources': [
- 'BWEStandAlone/BWEStandAlone.cc',
- 'BWEStandAlone/TestLoadGenerator.cc',
- 'BWEStandAlone/TestLoadGenerator.h',
- 'BWEStandAlone/TestSenderReceiver.cc',
- 'BWEStandAlone/TestSenderReceiver.h',
- ], # source
- 'conditions': [
- ['OS=="linux"', {
- 'cflags': [
- '-fexceptions', # enable exceptions
- ],
- },
- ],
- ],
- },
-
- {
- 'target_name': 'matlab_plotting',
- 'type': 'static_library',
- 'dependencies': [
- 'matlab_plotting_include',
- '<(webrtc_root)/system_wrappers/system_wrappers.gyp:system_wrappers',
- ],
- 'include_dirs': [
- '/opt/matlab2010a/extern/include',
- ],
- 'export_dependent_settings': [
- 'matlab_plotting_include',
- ],
- 'sources': [
- 'BWEStandAlone/MatlabPlot.cc',
- 'BWEStandAlone/MatlabPlot.h',
- ],
- 'link_settings': {
- 'ldflags' : [
- '-L/opt/matlab2010a/bin/glnxa64',
- '-leng',
- '-lmx',
- '-Wl,-rpath,/opt/matlab2010a/bin/glnxa64',
- ],
- },
- 'defines': [
- 'MATLAB',
- ],
- 'conditions': [
- ['OS=="linux"', {
- 'cflags': [
- '-fexceptions', # enable exceptions
- ],
- },
- ],
- ],
- },
-
- {
- 'target_name': 'matlab_plotting_include',
- 'type': 'none',
- 'direct_dependent_settings': {
- 'include_dirs': [
- 'BWEStandAlone',
- ],
- },
- },
- ],
-}
diff --git a/webrtc/modules/rtp_rtcp/test/testAPI/test_api.cc b/webrtc/modules/rtp_rtcp/test/testAPI/test_api.cc
index 0270e55802..1d4d6d04a5 100644
--- a/webrtc/modules/rtp_rtcp/test/testAPI/test_api.cc
+++ b/webrtc/modules/rtp_rtcp/test/testAPI/test_api.cc
@@ -9,14 +9,14 @@
*/
#include "webrtc/modules/rtp_rtcp/test/testAPI/test_api.h"
-#include "webrtc/test/null_transport.h"
#include <algorithm>
#include <vector>
-using namespace webrtc;
+#include "webrtc/test/null_transport.h"
namespace webrtc {
+
void LoopBackTransport::SetSendModule(RtpRtcp* rtp_rtcp_module,
RTPPayloadRegistry* payload_registry,
RtpReceiver* receiver,
@@ -76,7 +76,6 @@ int32_t TestRtpReceiver::OnReceivedPayloadData(
payload_size_ = payload_size;
return 0;
}
-} // namespace webrtc
class RtpRtcpAPITest : public ::testing::Test {
protected:
@@ -187,3 +186,5 @@ TEST_F(RtpRtcpAPITest, RtxReceiver) {
rtx_header.payloadType = 0;
EXPECT_TRUE(rtp_payload_registry_->IsRtx(rtx_header));
}
+
+} // namespace webrtc
diff --git a/webrtc/modules/rtp_rtcp/test/testAPI/test_api.h b/webrtc/modules/rtp_rtcp/test/testAPI/test_api.h
index 73334a8b26..d8040f7902 100644
--- a/webrtc/modules/rtp_rtcp/test/testAPI/test_api.h
+++ b/webrtc/modules/rtp_rtcp/test/testAPI/test_api.h
@@ -7,16 +7,18 @@
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
+#ifndef WEBRTC_MODULES_RTP_RTCP_TEST_TESTAPI_TEST_API_H_
+#define WEBRTC_MODULES_RTP_RTCP_TEST_TESTAPI_TEST_API_H_
#include "testing/gtest/include/gtest/gtest.h"
#include "webrtc/base/scoped_ptr.h"
#include "webrtc/common_types.h"
-#include "webrtc/modules/rtp_rtcp/interface/receive_statistics.h"
-#include "webrtc/modules/rtp_rtcp/interface/rtp_header_parser.h"
-#include "webrtc/modules/rtp_rtcp/interface/rtp_payload_registry.h"
-#include "webrtc/modules/rtp_rtcp/interface/rtp_receiver.h"
-#include "webrtc/modules/rtp_rtcp/interface/rtp_rtcp.h"
-#include "webrtc/modules/rtp_rtcp/interface/rtp_rtcp_defines.h"
+#include "webrtc/modules/rtp_rtcp/include/receive_statistics.h"
+#include "webrtc/modules/rtp_rtcp/include/rtp_header_parser.h"
+#include "webrtc/modules/rtp_rtcp/include/rtp_payload_registry.h"
+#include "webrtc/modules/rtp_rtcp/include/rtp_receiver.h"
+#include "webrtc/modules/rtp_rtcp/include/rtp_rtcp.h"
+#include "webrtc/modules/rtp_rtcp/include/rtp_rtcp_defines.h"
#include "webrtc/transport.h"
namespace webrtc {
@@ -68,3 +70,4 @@ class TestRtpReceiver : public NullRtpData {
};
} // namespace webrtc
+#endif // WEBRTC_MODULES_RTP_RTCP_TEST_TESTAPI_TEST_API_H_
diff --git a/webrtc/modules/rtp_rtcp/test/testAPI/test_api_audio.cc b/webrtc/modules/rtp_rtcp/test/testAPI/test_api_audio.cc
index 745386d485..634969b311 100644
--- a/webrtc/modules/rtp_rtcp/test/testAPI/test_api_audio.cc
+++ b/webrtc/modules/rtp_rtcp/test/testAPI/test_api_audio.cc
@@ -15,12 +15,12 @@
#include "webrtc/modules/rtp_rtcp/test/testAPI/test_api.h"
#include "webrtc/common_types.h"
-#include "webrtc/modules/rtp_rtcp/interface/rtp_rtcp.h"
-#include "webrtc/modules/rtp_rtcp/interface/rtp_rtcp_defines.h"
+#include "webrtc/modules/rtp_rtcp/include/rtp_rtcp.h"
+#include "webrtc/modules/rtp_rtcp/include/rtp_rtcp_defines.h"
#include "webrtc/modules/rtp_rtcp/source/rtp_receiver_audio.h"
-using namespace webrtc;
-
+namespace webrtc {
+namespace {
#define test_rate 64000u
class VerifyingAudioReceiver : public NullRtpData {
@@ -64,7 +64,7 @@ class RTPCallback : public NullRtpFeedback {
int32_t OnInitializeDecoder(const int8_t payloadType,
const char payloadName[RTP_PAYLOAD_NAME_SIZE],
const int frequency,
- const uint8_t channels,
+ const size_t channels,
const uint32_t rate) override {
if (payloadType == 96) {
EXPECT_EQ(test_rate, rate) <<
@@ -165,7 +165,7 @@ TEST_F(RtpRtcpAudioTest, Basic) {
module1->SetStartTimestamp(test_timestamp);
// Test detection at the end of a DTMF tone.
- //EXPECT_EQ(0, module2->SetTelephoneEventForwardToDecoder(true));
+ // EXPECT_EQ(0, module2->SetTelephoneEventForwardToDecoder(true));
EXPECT_EQ(0, module1->SetSendingStatus(true));
@@ -241,7 +241,7 @@ TEST_F(RtpRtcpAudioTest, RED) {
EXPECT_EQ(0, module1->SetSendREDPayloadType(voice_codec.pltype));
int8_t red = 0;
- EXPECT_EQ(0, module1->SendREDPayloadType(red));
+ EXPECT_EQ(0, module1->SendREDPayloadType(&red));
EXPECT_EQ(voice_codec.pltype, red);
EXPECT_EQ(0, rtp_receiver1_->RegisterReceivePayload(
voice_codec.plname,
@@ -278,7 +278,7 @@ TEST_F(RtpRtcpAudioTest, RED) {
&fragmentation));
EXPECT_EQ(0, module1->SetSendREDPayloadType(-1));
- EXPECT_EQ(-1, module1->SendREDPayloadType(red));
+ EXPECT_EQ(-1, module1->SendREDPayloadType(&red));
}
TEST_F(RtpRtcpAudioTest, DTMF) {
@@ -334,7 +334,7 @@ TEST_F(RtpRtcpAudioTest, DTMF) {
// Send RTP packets for 16 tones a 160 ms 100ms
// pause between = 2560ms + 1600ms = 4160ms
- for (;timeStamp <= 250 * 160; timeStamp += 160) {
+ for (; timeStamp <= 250 * 160; timeStamp += 160) {
EXPECT_EQ(0, module1->SendOutgoingData(webrtc::kAudioFrameSpeech, 96,
timeStamp, -1, test, 4));
fake_clock.AdvanceTimeMilliseconds(20);
@@ -342,10 +342,13 @@ TEST_F(RtpRtcpAudioTest, DTMF) {
}
EXPECT_EQ(0, module1->SendTelephoneEventOutband(32, 9000, 10));
- for (;timeStamp <= 740 * 160; timeStamp += 160) {
+ for (; timeStamp <= 740 * 160; timeStamp += 160) {
EXPECT_EQ(0, module1->SendOutgoingData(webrtc::kAudioFrameSpeech, 96,
timeStamp, -1, test, 4));
fake_clock.AdvanceTimeMilliseconds(20);
module1->Process();
}
}
+
+} // namespace
+} // namespace webrtc
diff --git a/webrtc/modules/rtp_rtcp/test/testAPI/test_api_rtcp.cc b/webrtc/modules/rtp_rtcp/test/testAPI/test_api_rtcp.cc
index e9d81122b1..6c60bf1f6d 100644
--- a/webrtc/modules/rtp_rtcp/test/testAPI/test_api_rtcp.cc
+++ b/webrtc/modules/rtp_rtcp/test/testAPI/test_api_rtcp.cc
@@ -14,13 +14,14 @@
#include "testing/gmock/include/gmock/gmock.h"
#include "testing/gtest/include/gtest/gtest.h"
#include "webrtc/common_types.h"
-#include "webrtc/modules/rtp_rtcp/interface/receive_statistics.h"
-#include "webrtc/modules/rtp_rtcp/interface/rtp_rtcp.h"
-#include "webrtc/modules/rtp_rtcp/interface/rtp_rtcp_defines.h"
+#include "webrtc/modules/rtp_rtcp/include/receive_statistics.h"
+#include "webrtc/modules/rtp_rtcp/include/rtp_rtcp.h"
+#include "webrtc/modules/rtp_rtcp/include/rtp_rtcp_defines.h"
#include "webrtc/modules/rtp_rtcp/source/rtp_receiver_audio.h"
#include "webrtc/modules/rtp_rtcp/test/testAPI/test_api.h"
-using namespace webrtc;
+namespace webrtc {
+namespace {
const uint64_t kTestPictureId = 12345678;
const uint8_t kSliPictureId = 156;
@@ -29,30 +30,29 @@ class RtcpCallback : public RtcpIntraFrameObserver {
public:
void SetModule(RtpRtcp* module) {
_rtpRtcpModule = module;
- };
+ }
virtual void OnRTCPPacketTimeout(const int32_t id) {
}
virtual void OnLipSyncUpdate(const int32_t id,
- const int32_t audioVideoOffset) {
- };
- virtual void OnReceivedIntraFrameRequest(uint32_t ssrc) {
- };
+ const int32_t audioVideoOffset) {}
+ virtual void OnReceivedIntraFrameRequest(uint32_t ssrc) {}
virtual void OnReceivedSLI(uint32_t ssrc,
uint8_t pictureId) {
EXPECT_EQ(kSliPictureId & 0x3f, pictureId);
- };
+ }
virtual void OnReceivedRPSI(uint32_t ssrc,
uint64_t pictureId) {
EXPECT_EQ(kTestPictureId, pictureId);
- };
- virtual void OnLocalSsrcChanged(uint32_t old_ssrc, uint32_t new_ssrc) {};
+ }
+ virtual void OnLocalSsrcChanged(uint32_t old_ssrc, uint32_t new_ssrc) {}
+
private:
RtpRtcp* _rtpRtcpModule;
};
class TestRtpFeedback : public NullRtpFeedback {
public:
- TestRtpFeedback(RtpRtcp* rtp_rtcp) : rtp_rtcp_(rtp_rtcp) {}
+ explicit TestRtpFeedback(RtpRtcp* rtp_rtcp) : rtp_rtcp_(rtp_rtcp) {}
virtual ~TestRtpFeedback() {}
void OnIncomingSSRCChanged(const uint32_t ssrc) override {
@@ -266,3 +266,6 @@ TEST_F(RtpRtcpRtcpTest, RemoteRTCPStatRemote) {
EXPECT_EQ(test_sequence_number, report_blocks[0].extendedHighSeqNum);
EXPECT_EQ(0u, report_blocks[0].fractionLost);
}
+
+} // namespace
+} // namespace webrtc
diff --git a/webrtc/modules/rtp_rtcp/test/testAPI/test_api_video.cc b/webrtc/modules/rtp_rtcp/test/testAPI/test_api_video.cc
index 30a6a1c303..16ea540bd5 100644
--- a/webrtc/modules/rtp_rtcp/test/testAPI/test_api_video.cc
+++ b/webrtc/modules/rtp_rtcp/test/testAPI/test_api_video.cc
@@ -15,9 +15,9 @@
#include "testing/gtest/include/gtest/gtest.h"
#include "webrtc/common_types.h"
-#include "webrtc/modules/rtp_rtcp/interface/rtp_payload_registry.h"
-#include "webrtc/modules/rtp_rtcp/interface/rtp_rtcp.h"
-#include "webrtc/modules/rtp_rtcp/interface/rtp_rtcp_defines.h"
+#include "webrtc/modules/rtp_rtcp/include/rtp_payload_registry.h"
+#include "webrtc/modules/rtp_rtcp/include/rtp_rtcp.h"
+#include "webrtc/modules/rtp_rtcp/include/rtp_rtcp_defines.h"
#include "webrtc/modules/rtp_rtcp/source/byte_io.h"
#include "webrtc/modules/rtp_rtcp/source/rtp_receiver_video.h"
#include "webrtc/modules/rtp_rtcp/test/testAPI/test_api.h"
diff --git a/webrtc/modules/rtp_rtcp/test/testFec/average_residual_loss_xor_codes.h b/webrtc/modules/rtp_rtcp/test/testFec/average_residual_loss_xor_codes.h
index 2e8d676e47..6c233bba17 100644
--- a/webrtc/modules/rtp_rtcp/test/testFec/average_residual_loss_xor_codes.h
+++ b/webrtc/modules/rtp_rtcp/test/testFec/average_residual_loss_xor_codes.h
@@ -7,8 +7,10 @@
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
+#ifndef WEBRTC_MODULES_RTP_RTCP_TEST_TESTFEC_AVERAGE_RESIDUAL_LOSS_XOR_CODES_H_
+#define WEBRTC_MODULES_RTP_RTCP_TEST_TESTFEC_AVERAGE_RESIDUAL_LOSS_XOR_CODES_H_
-namespace {
+namespace webrtc {
// Maximum number of media packets allowed in this test. The burst mask types
// are currently defined up to (kMaxMediaPacketsTest, kMaxMediaPacketsTest).
@@ -185,4 +187,5 @@ const float kMaxResidualLossBurstyMask[kNumberCodes] = {
0.009657f
};
-} // namespace
+} // namespace webrtc
+#endif // WEBRTC_MODULES_RTP_RTCP_TEST_TESTFEC_AVERAGE_RESIDUAL_LOSS_XOR_CODES_H_
diff --git a/webrtc/modules/rtp_rtcp/test/testFec/test_fec.cc b/webrtc/modules/rtp_rtcp/test/testFec/test_fec.cc
index a8eafdd27e..b164b7e04c 100644
--- a/webrtc/modules/rtp_rtcp/test/testFec/test_fec.cc
+++ b/webrtc/modules/rtp_rtcp/test/testFec/test_fec.cc
@@ -22,43 +22,49 @@
#include <list>
#include "testing/gtest/include/gtest/gtest.h"
-#include "webrtc/modules/rtp_rtcp/source/fec_private_tables_bursty.h"
+#include "webrtc/base/random.h"
+#include "webrtc/modules/rtp_rtcp/source/byte_io.h"
#include "webrtc/modules/rtp_rtcp/source/forward_error_correction.h"
#include "webrtc/modules/rtp_rtcp/source/forward_error_correction_internal.h"
-
-#include "webrtc/modules/rtp_rtcp/source/byte_io.h"
#include "webrtc/test/testsupport/fileutils.h"
-//#define VERBOSE_OUTPUT
+// #define VERBOSE_OUTPUT
namespace webrtc {
+namespace fec_private_tables {
+extern const uint8_t** kPacketMaskBurstyTbl[12];
+}
namespace test {
+using fec_private_tables::kPacketMaskBurstyTbl;
void ReceivePackets(
ForwardErrorCorrection::ReceivedPacketList* toDecodeList,
ForwardErrorCorrection::ReceivedPacketList* receivedPacketList,
- uint32_t numPacketsToDecode, float reorderRate, float duplicateRate) {
+ size_t numPacketsToDecode,
+ float reorderRate,
+ float duplicateRate,
+ Random* random) {
assert(toDecodeList->empty());
assert(numPacketsToDecode <= receivedPacketList->size());
ForwardErrorCorrection::ReceivedPacketList::iterator it;
- for (uint32_t i = 0; i < numPacketsToDecode; i++) {
+ for (size_t i = 0; i < numPacketsToDecode; i++) {
it = receivedPacketList->begin();
// Reorder packets.
- float randomVariable = static_cast<float>(rand()) / RAND_MAX;
+ float randomVariable = random->Rand<float>();
while (randomVariable < reorderRate) {
++it;
if (it == receivedPacketList->end()) {
--it;
break;
}
- randomVariable = static_cast<float>(rand()) / RAND_MAX;
+ randomVariable = random->Rand<float>();
}
ForwardErrorCorrection::ReceivedPacket* receivedPacket = *it;
toDecodeList->push_back(receivedPacket);
// Duplicate packets.
- randomVariable = static_cast<float>(rand()) / RAND_MAX;
+ randomVariable = random->Rand<float>();
while (randomVariable < duplicateRate) {
ForwardErrorCorrection::ReceivedPacket* duplicatePacket =
new ForwardErrorCorrection::ReceivedPacket;
@@ -69,7 +75,7 @@ void ReceivePackets(
duplicatePacket->pkt->length = receivedPacket->pkt->length;
toDecodeList->push_back(duplicatePacket);
- randomVariable = static_cast<float>(rand()) / RAND_MAX;
+ randomVariable = random->Rand<float>();
}
receivedPacketList->erase(it);
}
@@ -77,12 +83,8 @@ void ReceivePackets(
TEST(FecTest, FecTest) {
// TODO(marpan): Split this function into subroutines/helper functions.
- enum {
- kMaxNumberMediaPackets = 48
- };
- enum {
- kMaxNumberFecPackets = 48
- };
+ enum { kMaxNumberMediaPackets = 48 };
+ enum { kMaxNumberFecPackets = 48 };
const uint32_t kNumMaskBytesL0 = 2;
const uint32_t kNumMaskBytesL1 = 6;
@@ -91,15 +93,12 @@ TEST(FecTest, FecTest) {
const bool kUseUnequalProtection = true;
// FEC mask types.
- const FecMaskType kMaskTypes[] = { kFecMaskRandom, kFecMaskBursty };
+ const FecMaskType kMaskTypes[] = {kFecMaskRandom, kFecMaskBursty};
const int kNumFecMaskTypes = sizeof(kMaskTypes) / sizeof(*kMaskTypes);
- // TODO(pbos): Fix this. Hack to prevent a warning
- // ('-Wunneeded-internal-declaration') from clang.
- (void) kPacketMaskBurstyTbl;
-
// Maximum number of media packets allowed for the mask type.
- const uint16_t kMaxMediaPackets[] = {kMaxNumberMediaPackets,
+ const uint16_t kMaxMediaPackets[] = {
+ kMaxNumberMediaPackets,
sizeof(kPacketMaskBurstyTbl) / sizeof(*kPacketMaskBurstyTbl)};
ASSERT_EQ(12, kMaxMediaPackets[1]) << "Max media packets for bursty mode not "
@@ -115,7 +114,7 @@ TEST(FecTest, FecTest) {
ForwardErrorCorrection::Packet* mediaPacket = NULL;
// Running over only one loss rate to limit execution time.
- const float lossRate[] = { 0.5f };
+ const float lossRate[] = {0.5f};
const uint32_t lossRateSize = sizeof(lossRate) / sizeof(*lossRate);
const float reorderRate = 0.1f;
const float duplicateRate = 0.1f;
@@ -127,7 +126,7 @@ TEST(FecTest, FecTest) {
// Seed the random number generator, storing the seed to file in order to
// reproduce past results.
const unsigned int randomSeed = static_cast<unsigned int>(time(NULL));
- srand(randomSeed);
+ Random random(randomSeed);
std::string filename = webrtc::test::OutputPath() + "randomSeedLog.txt";
FILE* randomSeedFile = fopen(filename.c_str(), "a");
fprintf(randomSeedFile, "%u\n", randomSeed);
@@ -135,15 +134,13 @@ TEST(FecTest, FecTest) {
randomSeedFile = NULL;
uint16_t seqNum = 0;
- uint32_t timeStamp = static_cast<uint32_t>(rand());
- const uint32_t ssrc = static_cast<uint32_t>(rand());
+ uint32_t timeStamp = random.Rand<uint32_t>();
+ const uint32_t ssrc = random.Rand(1u, 0xfffffffe);
// Loop over the mask types: random and bursty.
for (int mask_type_idx = 0; mask_type_idx < kNumFecMaskTypes;
++mask_type_idx) {
-
for (uint32_t lossRateIdx = 0; lossRateIdx < lossRateSize; ++lossRateIdx) {
-
printf("Loss rate: %.2f, Mask type %d \n", lossRate[lossRateIdx],
mask_type_idx);
@@ -159,14 +156,12 @@ TEST(FecTest, FecTest) {
for (uint32_t numFecPackets = 1;
numFecPackets <= numMediaPackets && numFecPackets <= packetMaskMax;
numFecPackets++) {
-
// Loop over numImpPackets: usually <= (0.3*numMediaPackets).
// For this test we check up to ~ (numMediaPackets / 4).
uint32_t maxNumImpPackets = numMediaPackets / 4 + 1;
for (uint32_t numImpPackets = 0; numImpPackets <= maxNumImpPackets &&
- numImpPackets <= packetMaskMax;
+ numImpPackets <= packetMaskMax;
numImpPackets++) {
-
uint8_t protectionFactor =
static_cast<uint8_t>(numFecPackets * 255 / numMediaPackets);
@@ -181,10 +176,11 @@ TEST(FecTest, FecTest) {
mask_table, packetMask);
#ifdef VERBOSE_OUTPUT
- printf("%u media packets, %u FEC packets, %u numImpPackets, "
- "loss rate = %.2f \n",
- numMediaPackets, numFecPackets, numImpPackets,
- lossRate[lossRateIdx]);
+ printf(
+ "%u media packets, %u FEC packets, %u numImpPackets, "
+ "loss rate = %.2f \n",
+ numMediaPackets, numFecPackets, numImpPackets,
+ lossRate[lossRateIdx]);
printf("Packet mask matrix \n");
#endif
@@ -232,16 +228,15 @@ TEST(FecTest, FecTest) {
for (uint32_t i = 0; i < numMediaPackets; ++i) {
mediaPacket = new ForwardErrorCorrection::Packet;
mediaPacketList.push_back(mediaPacket);
- mediaPacket->length = static_cast<size_t>(
- (static_cast<float>(rand()) / RAND_MAX) *
- (IP_PACKET_SIZE - 12 - 28 -
- ForwardErrorCorrection::PacketOverhead()));
- if (mediaPacket->length < 12) {
- mediaPacket->length = 12;
- }
+ const uint32_t kMinPacketSize = 12;
+ const uint32_t kMaxPacketSize = static_cast<uint32_t>(
+ IP_PACKET_SIZE - 12 - 28 -
+ ForwardErrorCorrection::PacketOverhead());
+ mediaPacket->length = random.Rand(kMinPacketSize, kMaxPacketSize);
+
// Generate random values for the first 2 bytes.
- mediaPacket->data[0] = static_cast<uint8_t>(rand() % 256);
- mediaPacket->data[1] = static_cast<uint8_t>(rand() % 256);
+ mediaPacket->data[0] = random.Rand<uint8_t>();
+ mediaPacket->data[1] = random.Rand<uint8_t>();
// The first two bits are assumed to be 10 by the
// FEC encoder. In fact the FEC decoder will set the
@@ -266,7 +261,7 @@ TEST(FecTest, FecTest) {
ByteWriter<uint32_t>::WriteBigEndian(&mediaPacket->data[8], ssrc);
// Generate random values for payload
for (size_t j = 12; j < mediaPacket->length; ++j) {
- mediaPacket->data[j] = static_cast<uint8_t>(rand() % 256);
+ mediaPacket->data[j] = random.Rand<uint8_t>();
}
seqNum++;
}
@@ -289,8 +284,7 @@ TEST(FecTest, FecTest) {
while (mediaPacketListItem != mediaPacketList.end()) {
mediaPacket = *mediaPacketListItem;
// We want a value between 0 and 1.
- const float lossRandomVariable =
- (static_cast<float>(rand()) / (RAND_MAX));
+ const float lossRandomVariable = random.Rand<float>();
if (lossRandomVariable >= lossRate[lossRateIdx]) {
mediaLossMask[mediaPacketIdx] = 1;
@@ -315,8 +309,7 @@ TEST(FecTest, FecTest) {
uint32_t fecPacketIdx = 0;
while (fecPacketListItem != fecPacketList.end()) {
fecPacket = *fecPacketListItem;
- const float lossRandomVariable =
- (static_cast<float>(rand()) / (RAND_MAX));
+ const float lossRandomVariable = random.Rand<float>();
if (lossRandomVariable >= lossRate[lossRateIdx]) {
fecLossMask[fecPacketIdx] = 1;
receivedPacket = new ForwardErrorCorrection::ReceivedPacket;
@@ -387,18 +380,15 @@ TEST(FecTest, FecTest) {
// For error-checking frame completion.
bool fecPacketReceived = false;
while (!receivedPacketList.empty()) {
- uint32_t numPacketsToDecode = static_cast<uint32_t>(
- (static_cast<float>(rand()) / RAND_MAX) *
- receivedPacketList.size() + 0.5);
- if (numPacketsToDecode < 1) {
- numPacketsToDecode = 1;
- }
+ size_t numPacketsToDecode = random.Rand(
+ 1u, static_cast<uint32_t>(receivedPacketList.size()));
ReceivePackets(&toDecodeList, &receivedPacketList,
- numPacketsToDecode, reorderRate, duplicateRate);
+ numPacketsToDecode, reorderRate, duplicateRate,
+ &random);
if (fecPacketReceived == false) {
ForwardErrorCorrection::ReceivedPacketList::iterator
- toDecodeIt = toDecodeList.begin();
+ toDecodeIt = toDecodeList.begin();
while (toDecodeIt != toDecodeList.end()) {
receivedPacket = *toDecodeIt;
if (receivedPacket->is_fec) {
@@ -418,11 +408,11 @@ TEST(FecTest, FecTest) {
if (mediaLossMask[mediaPacketIdx] == 1) {
// Should have recovered this packet.
ForwardErrorCorrection::RecoveredPacketList::iterator
- recoveredPacketListItem = recoveredPacketList.begin();
+ recoveredPacketListItem = recoveredPacketList.begin();
- ASSERT_FALSE(
- recoveredPacketListItem == recoveredPacketList.end())
- << "Insufficient number of recovered packets.";
+ ASSERT_FALSE(recoveredPacketListItem ==
+ recoveredPacketList.end())
+ << "Insufficient number of recovered packets.";
mediaPacket = *mediaPacketListItem;
ForwardErrorCorrection::RecoveredPacket* recoveredPacket =
*recoveredPacketListItem;
@@ -462,7 +452,7 @@ TEST(FecTest, FecTest) {
// Delete received packets we didn't pass to DecodeFEC(), due to
// early frame completion.
ForwardErrorCorrection::ReceivedPacketList::iterator
- receivedPacketIt = receivedPacketList.begin();
+ receivedPacketIt = receivedPacketList.begin();
while (receivedPacketIt != receivedPacketList.end()) {
receivedPacket = *receivedPacketIt;
delete receivedPacket;
@@ -476,11 +466,11 @@ TEST(FecTest, FecTest) {
}
timeStamp += 90000 / 30;
} // loop over numImpPackets
- } // loop over FecPackets
- } // loop over numMediaPackets
+ } // loop over FecPackets
+ } // loop over numMediaPackets
delete[] packetMask;
} // loop over loss rates
- } // loop over mask types
+ } // loop over mask types
// Have DecodeFEC free allocated memory.
fec.ResetState(&recoveredPacketList);
diff --git a/webrtc/modules/rtp_rtcp/test/testFec/test_packet_masks_metrics.cc b/webrtc/modules/rtp_rtcp/test/testFec/test_packet_masks_metrics.cc
index 843a7f77f5..466214c740 100644
--- a/webrtc/modules/rtp_rtcp/test/testFec/test_packet_masks_metrics.cc
+++ b/webrtc/modules/rtp_rtcp/test/testFec/test_packet_masks_metrics.cc
@@ -59,13 +59,6 @@ enum { kMaxNumberMediaPackets = 48 };
// Maximum number of media packets allowed for each mask type.
const uint16_t kMaxMediaPackets[] = {kMaxNumberMediaPackets, 12};
-// Maximum number of media packets allowed in this test. The burst mask types
-// are currently defined up to (k=12,m=12).
-const int kMaxMediaPacketsTest = 12;
-
-// Maximum number of FEC codes considered in this test.
-const int kNumberCodes = kMaxMediaPacketsTest * (kMaxMediaPacketsTest + 1) / 2;
-
// Maximum gap size for characterizing the consecutiveness of the loss.
const int kMaxGapSize = 2 * kMaxMediaPacketsTest;
@@ -407,7 +400,7 @@ class FecPacketMaskMetricsTest : public ::testing::Test {
// Loop over all loss configurations for the symbol sequence of length
// |tot_num_packets|. In this version we process up to (k=12, m=12) codes,
// and get exact expressions for the residual loss.
- // TODO (marpan): For larger codes, loop over some random sample of loss
+ // TODO(marpan): For larger codes, loop over some random sample of loss
// configurations, sampling driven by the underlying statistical loss model
// (importance sampling).
@@ -427,7 +420,7 @@ class FecPacketMaskMetricsTest : public ::testing::Test {
// Map configuration number to a loss state.
for (int j = 0; j < tot_num_packets; j++) {
- state[j]=0; // Received state.
+ state[j] = 0; // Received state.
int bit_value = i >> (tot_num_packets - j - 1) & 1;
if (bit_value == 1) {
state[j] = 1; // Lost state.
@@ -860,9 +853,9 @@ TEST_F(FecPacketMaskMetricsTest, FecXorVsRS) {
EXPECT_GE(kMetricsXorBursty[code_index].average_residual_loss[k],
kMetricsReedSolomon[code_index].average_residual_loss[k]);
}
- // TODO (marpan): There are some cases (for high loss rates and/or
- // burst loss models) where XOR is better than RS. Is there some pattern
- // we can identify and enforce as a constraint?
+ // TODO(marpan): There are some cases (for high loss rates and/or
+ // burst loss models) where XOR is better than RS. Is there some pattern
+ // we can identify and enforce as a constraint?
}
}
}
@@ -874,7 +867,7 @@ TEST_F(FecPacketMaskMetricsTest, FecXorVsRS) {
TEST_F(FecPacketMaskMetricsTest, FecTrendXorVsRsLossRate) {
SetLossModels();
SetCodeParams();
- // TODO (marpan): Examine this further to see if the condition can be strictly
+ // TODO(marpan): Examine this further to see if the condition can be strictly
// satisfied (i.e., scale = 1.0) for all codes with different/better masks.
double scale = 0.90;
int num_loss_rates = sizeof(kAverageLossRate) /
@@ -898,7 +891,7 @@ TEST_F(FecPacketMaskMetricsTest, FecTrendXorVsRsLossRate) {
kMetricsXorRandom[code_index].average_residual_loss[k+1];
EXPECT_GE(diff_rs_xor_random_loss1, scale * diff_rs_xor_random_loss2);
}
- // TODO (marpan): Investigate the cases for the bursty mask where
+ // TODO(marpan): Investigate the cases for the bursty mask where
// this trend is not strictly satisfied.
}
}
@@ -937,7 +930,7 @@ TEST_F(FecPacketMaskMetricsTest, FecBehaviorViaProtectionLevelAndLength) {
EXPECT_LT(
kMetricsReedSolomon[code_index2].average_residual_loss[k],
kMetricsReedSolomon[code_index1].average_residual_loss[k]);
- // TODO (marpan): There are some corner cases where this is not
+ // TODO(marpan): There are some corner cases where this is not
// satisfied with the current packet masks. Look into updating
// these cases to see if this behavior should/can be satisfied,
// with overall lower residual loss for those XOR codes.
@@ -963,7 +956,7 @@ TEST_F(FecPacketMaskMetricsTest, FecVarianceBehaviorXorVsRs) {
SetCodeParams();
// The condition is not strictly satisfied with the current masks,
// i.e., for some codes, the variance of XOR may be slightly higher than RS.
- // TODO (marpan): Examine this further to see if the condition can be strictly
+ // TODO(marpan): Examine this further to see if the condition can be strictly
// satisfied (i.e., scale = 1.0) for all codes with different/better masks.
double scale = 0.95;
for (int code_index = 0; code_index < max_num_codes_; code_index++) {
@@ -998,7 +991,7 @@ TEST_F(FecPacketMaskMetricsTest, FecXorBurstyPerfectRecoveryConsecutiveLoss) {
// bursty mask type, for random loss models at low loss rates.
// The XOR codes with bursty mask types are generally better than the one with
// random mask type, for bursty loss models and/or high loss rates.
-// TODO (marpan): Enable this test when some of the packet masks are updated.
+// TODO(marpan): Enable this test when some of the packet masks are updated.
// Some isolated cases of the codes don't pass this currently.
/*
TEST_F(FecPacketMaskMetricsTest, FecXorRandomVsBursty) {
diff --git a/webrtc/modules/utility/BUILD.gn b/webrtc/modules/utility/BUILD.gn
index 163515c466..6704cd6d9a 100644
--- a/webrtc/modules/utility/BUILD.gn
+++ b/webrtc/modules/utility/BUILD.gn
@@ -10,12 +10,12 @@ import("../../build/webrtc.gni")
source_set("utility") {
sources = [
- "interface/audio_frame_operations.h",
- "interface/file_player.h",
- "interface/file_recorder.h",
- "interface/helpers_android.h",
- "interface/jvm_android.h",
- "interface/process_thread.h",
+ "include/audio_frame_operations.h",
+ "include/file_player.h",
+ "include/file_recorder.h",
+ "include/helpers_android.h",
+ "include/jvm_android.h",
+ "include/process_thread.h",
"source/audio_frame_operations.cc",
"source/coder.cc",
"source/coder.h",
diff --git a/webrtc/modules/utility/OWNERS b/webrtc/modules/utility/OWNERS
index 347d278614..65cb70c9b9 100644
--- a/webrtc/modules/utility/OWNERS
+++ b/webrtc/modules/utility/OWNERS
@@ -1,4 +1,9 @@
asapersson@webrtc.org
perkj@webrtc.org
+# These are for the common case of adding or renaming files. If you're doing
+# structural changes, please get a review from a reviewer in this file.
+per-file *.gyp=*
+per-file *.gypi=*
+
per-file BUILD.gn=kjellander@webrtc.org
diff --git a/webrtc/modules/utility/include/audio_frame_operations.h b/webrtc/modules/utility/include/audio_frame_operations.h
new file mode 100644
index 0000000000..1551d86894
--- /dev/null
+++ b/webrtc/modules/utility/include/audio_frame_operations.h
@@ -0,0 +1,58 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_UTILITY_INCLUDE_AUDIO_FRAME_OPERATIONS_H_
+#define WEBRTC_MODULES_UTILITY_INCLUDE_AUDIO_FRAME_OPERATIONS_H_
+
+#include "webrtc/typedefs.h"
+
+namespace webrtc {
+
+class AudioFrame;
+
+// TODO(andrew): consolidate this with utility.h and audio_frame_manipulator.h.
+// Change reference parameters to pointers. Consider using a namespace rather
+// than a class.
+class AudioFrameOperations {
+ public:
+ // Upmixes mono |src_audio| to stereo |dst_audio|. This is an out-of-place
+ // operation, meaning src_audio and dst_audio must point to different
+ // buffers. It is the caller's responsibility to ensure that |dst_audio| is
+ // sufficiently large.
+ static void MonoToStereo(const int16_t* src_audio, size_t samples_per_channel,
+ int16_t* dst_audio);
+ // |frame.num_channels_| will be updated. This version checks for sufficient
+ // buffer size and that |num_channels_| is mono.
+ static int MonoToStereo(AudioFrame* frame);
+
+ // Downmixes stereo |src_audio| to mono |dst_audio|. This is an in-place
+ // operation, meaning |src_audio| and |dst_audio| may point to the same
+ // buffer.
+ static void StereoToMono(const int16_t* src_audio, size_t samples_per_channel,
+ int16_t* dst_audio);
+ // |frame.num_channels_| will be updated. This version checks that
+ // |num_channels_| is stereo.
+ static int StereoToMono(AudioFrame* frame);
+
+ // Swap the left and right channels of |frame|. Fails silently if |frame| is
+ // not stereo.
+ static void SwapStereoChannels(AudioFrame* frame);
+
+ // Zeros out the audio and sets |frame.energy| to zero.
+ static void Mute(AudioFrame& frame);
+
+ static int Scale(float left, float right, AudioFrame& frame);
+
+ static int ScaleWithSat(float scale, AudioFrame& frame);
+};
+
+} // namespace webrtc
+
+#endif // #ifndef WEBRTC_MODULES_UTILITY_INCLUDE_AUDIO_FRAME_OPERATIONS_H_
diff --git a/webrtc/modules/utility/include/file_player.h b/webrtc/modules/utility/include/file_player.h
new file mode 100644
index 0000000000..4ca134a669
--- /dev/null
+++ b/webrtc/modules/utility/include/file_player.h
@@ -0,0 +1,111 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_UTILITY_INCLUDE_FILE_PLAYER_H_
+#define WEBRTC_MODULES_UTILITY_INCLUDE_FILE_PLAYER_H_
+
+#include "webrtc/common_types.h"
+#include "webrtc/engine_configurations.h"
+#include "webrtc/modules/include/module_common_types.h"
+#include "webrtc/typedefs.h"
+#include "webrtc/video_frame.h"
+
+namespace webrtc {
+class FileCallback;
+
+class FilePlayer
+{
+public:
+ // The largest decoded frame size in samples (60ms with 32kHz sample rate).
+ enum {MAX_AUDIO_BUFFER_IN_SAMPLES = 60*32};
+ enum {MAX_AUDIO_BUFFER_IN_BYTES = MAX_AUDIO_BUFFER_IN_SAMPLES*2};
+
+ // Note: will return NULL for unsupported formats.
+ static FilePlayer* CreateFilePlayer(const uint32_t instanceID,
+ const FileFormats fileFormat);
+
+ static void DestroyFilePlayer(FilePlayer* player);
+
+ // Read 10 ms of audio at |frequencyInHz| to |outBuffer|. |lengthInSamples|
+ // will be set to the number of samples read (not the number of samples per
+ // channel).
+ virtual int Get10msAudioFromFile(
+ int16_t* outBuffer,
+ size_t& lengthInSamples,
+ int frequencyInHz) = 0;
+
+ // Register callback for receiving file playing notifications.
+ virtual int32_t RegisterModuleFileCallback(
+ FileCallback* callback) = 0;
+
+ // API for playing audio from fileName to channel.
+ // Note: codecInst is used for pre-encoded files.
+ virtual int32_t StartPlayingFile(
+ const char* fileName,
+ bool loop,
+ uint32_t startPosition,
+ float volumeScaling,
+ uint32_t notification,
+ uint32_t stopPosition = 0,
+ const CodecInst* codecInst = NULL) = 0;
+
+ // Note: codecInst is used for pre-encoded files.
+ virtual int32_t StartPlayingFile(
+ InStream& sourceStream,
+ uint32_t startPosition,
+ float volumeScaling,
+ uint32_t notification,
+ uint32_t stopPosition = 0,
+ const CodecInst* codecInst = NULL) = 0;
+
+ virtual int32_t StopPlayingFile() = 0;
+
+ virtual bool IsPlayingFile() const = 0;
+
+ virtual int32_t GetPlayoutPosition(uint32_t& durationMs) = 0;
+
+ // Set audioCodec to the currently used audio codec.
+ virtual int32_t AudioCodec(CodecInst& audioCodec) const = 0;
+
+ virtual int32_t Frequency() const = 0;
+
+ // Note: scaleFactor is in the range [0.0 - 2.0]
+ virtual int32_t SetAudioScaling(float scaleFactor) = 0;
+
+ // Return the time in ms until next video frame should be pulled (by
+ // calling GetVideoFromFile(..)).
+ // Note: this API reads one video frame from file. This means that it should
+ // be called exactly once per GetVideoFromFile(..) API call.
+ virtual int32_t TimeUntilNextVideoFrame() { return -1;}
+
+ virtual int32_t StartPlayingVideoFile(
+ const char* /*fileName*/,
+ bool /*loop*/,
+ bool /*videoOnly*/) { return -1;}
+
+ virtual int32_t video_codec_info(VideoCodec& /*videoCodec*/) const
+ {return -1;}
+
+ virtual int32_t GetVideoFromFile(VideoFrame& /*videoFrame*/) { return -1; }
+
+ // Same as GetVideoFromFile(). videoFrame will have the resolution specified
+ // by the width outWidth and height outHeight in pixels.
+ virtual int32_t GetVideoFromFile(VideoFrame& /*videoFrame*/,
+ const uint32_t /*outWidth*/,
+ const uint32_t /*outHeight*/) {
+ return -1;
+ }
+
+protected:
+ virtual ~FilePlayer() {}
+
+};
+} // namespace webrtc
+#endif // WEBRTC_MODULES_UTILITY_INCLUDE_FILE_PLAYER_H_
diff --git a/webrtc/modules/utility/include/file_recorder.h b/webrtc/modules/utility/include/file_recorder.h
new file mode 100644
index 0000000000..09ed8ae350
--- /dev/null
+++ b/webrtc/modules/utility/include/file_recorder.h
@@ -0,0 +1,84 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_UTILITY_INCLUDE_FILE_RECORDER_H_
+#define WEBRTC_MODULES_UTILITY_INCLUDE_FILE_RECORDER_H_
+
+#include "webrtc/common_types.h"
+#include "webrtc/engine_configurations.h"
+#include "webrtc/modules/include/module_common_types.h"
+#include "webrtc/modules/media_file/media_file_defines.h"
+#include "webrtc/system_wrappers/include/tick_util.h"
+#include "webrtc/typedefs.h"
+#include "webrtc/video_frame.h"
+
+namespace webrtc {
+
+class FileRecorder
+{
+public:
+
+ // Note: will return NULL for unsupported formats.
+ static FileRecorder* CreateFileRecorder(const uint32_t instanceID,
+ const FileFormats fileFormat);
+
+ static void DestroyFileRecorder(FileRecorder* recorder);
+
+ virtual int32_t RegisterModuleFileCallback(
+ FileCallback* callback) = 0;
+
+ virtual FileFormats RecordingFileFormat() const = 0;
+
+ virtual int32_t StartRecordingAudioFile(
+ const char* fileName,
+ const CodecInst& codecInst,
+ uint32_t notification) = 0;
+
+ virtual int32_t StartRecordingAudioFile(
+ OutStream& destStream,
+ const CodecInst& codecInst,
+ uint32_t notification) = 0;
+
+ // Stop recording.
+ // Note: this API is for both audio and video.
+ virtual int32_t StopRecording() = 0;
+
+ // Return true if recording.
+ // Note: this API is for both audio and video.
+ virtual bool IsRecording() const = 0;
+
+ virtual int32_t codec_info(CodecInst& codecInst) const = 0;
+
+ // Write frame to file. Frame should contain 10ms of un-ecoded audio data.
+ virtual int32_t RecordAudioToFile(
+ const AudioFrame& frame,
+ const TickTime* playoutTS = NULL) = 0;
+
+ // Open/create the file specified by fileName for writing audio/video data
+ // (relative path is allowed). audioCodecInst specifies the encoding of the
+ // audio data. videoCodecInst specifies the encoding of the video data.
+ // Only video data will be recorded if videoOnly is true. amrFormat
+ // specifies the amr/amrwb storage format.
+ // Note: the file format is AVI.
+ virtual int32_t StartRecordingVideoFile(
+ const char* fileName,
+ const CodecInst& audioCodecInst,
+ const VideoCodec& videoCodecInst,
+ bool videoOnly = false) = 0;
+
+ // Record the video frame in videoFrame to AVI file.
+ virtual int32_t RecordVideoToFile(const VideoFrame& videoFrame) = 0;
+
+protected:
+ virtual ~FileRecorder() {}
+
+};
+} // namespace webrtc
+#endif // WEBRTC_MODULES_UTILITY_INCLUDE_FILE_RECORDER_H_
diff --git a/webrtc/modules/utility/include/helpers_android.h b/webrtc/modules/utility/include/helpers_android.h
new file mode 100644
index 0000000000..2840ca965e
--- /dev/null
+++ b/webrtc/modules/utility/include/helpers_android.h
@@ -0,0 +1,87 @@
+/*
+ * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_UTILITY_INCLUDE_HELPERS_ANDROID_H_
+#define WEBRTC_MODULES_UTILITY_INCLUDE_HELPERS_ANDROID_H_
+
+#include <jni.h>
+#include <string>
+
+// Abort the process if |jni| has a Java exception pending.
+// TODO(henrika): merge with CHECK_JNI_EXCEPTION() in jni_helpers.h.
+#define CHECK_EXCEPTION(jni) \
+ RTC_CHECK(!jni->ExceptionCheck()) \
+ << (jni->ExceptionDescribe(), jni->ExceptionClear(), "")
+
+namespace webrtc {
+
+// Return a |JNIEnv*| usable on this thread or NULL if this thread is detached.
+JNIEnv* GetEnv(JavaVM* jvm);
+
+// Return a |jlong| that will correctly convert back to |ptr|. This is needed
+// because the alternative (of silently passing a 32-bit pointer to a vararg
+// function expecting a 64-bit param) picks up garbage in the high 32 bits.
+jlong PointerTojlong(void* ptr);
+
+// JNIEnv-helper methods that wraps the API which uses the JNI interface
+// pointer (JNIEnv*). It allows us to RTC_CHECK success and that no Java
+// exception is thrown while calling the method.
+jmethodID GetMethodID(
+ JNIEnv* jni, jclass c, const char* name, const char* signature);
+
+jmethodID GetStaticMethodID(
+ JNIEnv* jni, jclass c, const char* name, const char* signature);
+
+jclass FindClass(JNIEnv* jni, const char* name);
+
+jobject NewGlobalRef(JNIEnv* jni, jobject o);
+
+void DeleteGlobalRef(JNIEnv* jni, jobject o);
+
+// Return thread ID as a string.
+std::string GetThreadId();
+
+// Return thread ID as string suitable for debug logging.
+std::string GetThreadInfo();
+
+// Attach thread to JVM if necessary and detach at scope end if originally
+// attached.
+class AttachThreadScoped {
+ public:
+ explicit AttachThreadScoped(JavaVM* jvm);
+ ~AttachThreadScoped();
+ JNIEnv* env();
+
+ private:
+ bool attached_;
+ JavaVM* jvm_;
+ JNIEnv* env_;
+};
+
+// Scoped holder for global Java refs.
+template<class T> // T is jclass, jobject, jintArray, etc.
+class ScopedGlobalRef {
+ public:
+ ScopedGlobalRef(JNIEnv* jni, T obj)
+ : jni_(jni), obj_(static_cast<T>(NewGlobalRef(jni, obj))) {}
+ ~ScopedGlobalRef() {
+ DeleteGlobalRef(jni_, obj_);
+ }
+ T operator*() const {
+ return obj_;
+ }
+ private:
+ JNIEnv* jni_;
+ T obj_;
+};
+
+} // namespace webrtc
+
+#endif // WEBRTC_MODULES_UTILITY_INCLUDE_HELPERS_ANDROID_H_
diff --git a/webrtc/modules/utility/include/helpers_ios.h b/webrtc/modules/utility/include/helpers_ios.h
new file mode 100644
index 0000000000..a5a07ace17
--- /dev/null
+++ b/webrtc/modules/utility/include/helpers_ios.h
@@ -0,0 +1,59 @@
+/*
+ * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_UTILITY_INCLUDE_HELPERS_IOS_H_
+#define WEBRTC_MODULES_UTILITY_INCLUDE_HELPERS_IOS_H_
+
+#if defined(WEBRTC_IOS)
+
+#include <string>
+
+namespace webrtc {
+namespace ios {
+
+bool CheckAndLogError(BOOL success, NSError* error);
+
+std::string StdStringFromNSString(NSString* nsString);
+
+// Return thread ID as a string.
+std::string GetThreadId();
+
+// Return thread ID as string suitable for debug logging.
+std::string GetThreadInfo();
+
+// Returns [NSThread currentThread] description as string.
+// Example: <NSThread: 0x170066d80>{number = 1, name = main}
+std::string GetCurrentThreadDescription();
+
+std::string GetAudioSessionCategory();
+
+// Returns the current name of the operating system.
+std::string GetSystemName();
+
+// Returns the current version of the operating system.
+std::string GetSystemVersion();
+
+// Returns the version of the operating system as a floating point value.
+float GetSystemVersionAsFloat();
+
+// Returns the device type.
+// Examples: ”iPhone” and ”iPod touch”.
+std::string GetDeviceType();
+
+// Returns a more detailed device name.
+// Examples: "iPhone 5s (GSM)" and "iPhone 6 Plus".
+std::string GetDeviceName();
+
+} // namespace ios
+} // namespace webrtc
+
+#endif // defined(WEBRTC_IOS)
+
+#endif // WEBRTC_MODULES_UTILITY_INCLUDE_HELPERS_IOS_H_
diff --git a/webrtc/modules/utility/include/jvm_android.h b/webrtc/modules/utility/include/jvm_android.h
new file mode 100644
index 0000000000..f527dff632
--- /dev/null
+++ b/webrtc/modules/utility/include/jvm_android.h
@@ -0,0 +1,185 @@
+/*
+ * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_UTILITY_INCLUDE_JVM_ANDROID_H_
+#define WEBRTC_MODULES_UTILITY_INCLUDE_JVM_ANDROID_H_
+
+#include <jni.h>
+#include <string>
+
+#include "webrtc/base/scoped_ptr.h"
+#include "webrtc/base/thread_checker.h"
+#include "webrtc/modules/utility/include/helpers_android.h"
+
+namespace webrtc {
+
+// The JNI interface pointer (JNIEnv) is valid only in the current thread.
+// Should another thread need to access the Java VM, it must first call
+// AttachCurrentThread() to attach itself to the VM and obtain a JNI interface
+// pointer. The native thread remains attached to the VM until it calls
+// DetachCurrentThread() to detach.
+class AttachCurrentThreadIfNeeded {
+ public:
+ AttachCurrentThreadIfNeeded();
+ ~AttachCurrentThreadIfNeeded();
+
+ private:
+ rtc::ThreadChecker thread_checker_;
+ bool attached_;
+};
+
+// This class is created by the NativeRegistration class and is used to wrap
+// the actual Java object handle (jobject) on which we can call methods from
+// C++ in to Java. See example in JVM for more details.
+// TODO(henrika): extend support for type of function calls.
+class GlobalRef {
+ public:
+ GlobalRef(JNIEnv* jni, jobject object);
+ ~GlobalRef();
+
+ jboolean CallBooleanMethod(jmethodID methodID, ...);
+ jint CallIntMethod(jmethodID methodID, ...);
+ void CallVoidMethod(jmethodID methodID, ...);
+
+ private:
+ JNIEnv* const jni_;
+ const jobject j_object_;
+};
+
+// Wraps the jclass object on which we can call GetMethodId() functions to
+// query method IDs.
+class JavaClass {
+ public:
+ JavaClass(JNIEnv* jni, jclass clazz) : jni_(jni), j_class_(clazz) {}
+ ~JavaClass() {}
+
+ jmethodID GetMethodId(const char* name, const char* signature);
+ jmethodID GetStaticMethodId(const char* name, const char* signature);
+ jobject CallStaticObjectMethod(jmethodID methodID, ...);
+
+ protected:
+ JNIEnv* const jni_;
+ jclass const j_class_;
+};
+
+// Adds support of the NewObject factory method to the JavaClass class.
+// See example in JVM for more details on how to use it.
+class NativeRegistration : public JavaClass {
+ public:
+ NativeRegistration(JNIEnv* jni, jclass clazz);
+ ~NativeRegistration();
+
+ rtc::scoped_ptr<GlobalRef> NewObject(
+ const char* name, const char* signature, ...);
+
+ private:
+ JNIEnv* const jni_;
+};
+
+// This class is created by the JVM class and is used to expose methods that
+// needs the JNI interface pointer but its main purpose is to create a
+// NativeRegistration object given name of a Java class and a list of native
+// methods. See example in JVM for more details.
+class JNIEnvironment {
+ public:
+ explicit JNIEnvironment(JNIEnv* jni);
+ ~JNIEnvironment();
+
+ // Registers native methods with the Java class specified by |name|.
+ // Note that the class name must be one of the names in the static
+ // |loaded_classes| array defined in jvm_android.cc.
+ // This method must be called on the construction thread.
+ rtc::scoped_ptr<NativeRegistration> RegisterNatives(
+ const char* name, const JNINativeMethod *methods, int num_methods);
+
+ // Converts from Java string to std::string.
+ // This method must be called on the construction thread.
+ std::string JavaToStdString(const jstring& j_string);
+
+ private:
+ rtc::ThreadChecker thread_checker_;
+ JNIEnv* const jni_;
+};
+
+// Main class for working with Java from C++ using JNI in WebRTC.
+//
+// Example usage:
+//
+// // At initialization (e.g. in JNI_OnLoad), call JVM::Initialize.
+// JNIEnv* jni = ::base::android::AttachCurrentThread();
+// JavaVM* jvm = NULL;
+// jni->GetJavaVM(&jvm);
+// jobject context = ::base::android::GetApplicationContext();
+// webrtc::JVM::Initialize(jvm, context);
+//
+// // Header (.h) file of example class called User.
+// rtc::scoped_ptr<JNIEnvironment> env;
+// rtc::scoped_ptr<NativeRegistration> reg;
+// rtc::scoped_ptr<GlobalRef> obj;
+//
+// // Construction (in .cc file) of User class.
+// User::User() {
+// // Calling thread must be attached to the JVM.
+// env = JVM::GetInstance()->environment();
+// reg = env->RegisterNatives("org/webrtc/WebRtcTest", ,);
+// obj = reg->NewObject("<init>", ,);
+// }
+//
+// // Each User method can now use |reg| and |obj| and call Java functions
+// // in WebRtcTest.java, e.g. boolean init() {}.
+// bool User::Foo() {
+// jmethodID id = reg->GetMethodId("init", "()Z");
+// return obj->CallBooleanMethod(id);
+// }
+//
+// // And finally, e.g. in JNI_OnUnLoad, call JVM::Uninitialize.
+// JVM::Uninitialize();
+class JVM {
+ public:
+ // Stores global handles to the Java VM interface and the application context.
+ // Should be called once on a thread that is attached to the JVM.
+ static void Initialize(JavaVM* jvm, jobject context);
+ // Clears handles stored in Initialize(). Must be called on same thread as
+ // Initialize().
+ static void Uninitialize();
+ // Gives access to the global Java VM interface pointer, which then can be
+ // used to create a valid JNIEnvironment object or to get a JavaClass object.
+ static JVM* GetInstance();
+
+ // Creates a JNIEnvironment object.
+ // This method returns a NULL pointer if AttachCurrentThread() has not been
+ // called successfully. Use the AttachCurrentThreadIfNeeded class if needed.
+ rtc::scoped_ptr<JNIEnvironment> environment();
+
+ // Returns a JavaClass object given class |name|.
+ // Note that the class name must be one of the names in the static
+ // |loaded_classes| array defined in jvm_android.cc.
+ // This method must be called on the construction thread.
+ JavaClass GetClass(const char* name);
+
+ // TODO(henrika): can we make these private?
+ JavaVM* jvm() const { return jvm_; }
+ jobject context() const { return context_; }
+
+ protected:
+ JVM(JavaVM* jvm, jobject context);
+ ~JVM();
+
+ private:
+ JNIEnv* jni() const { return GetEnv(jvm_); }
+
+ rtc::ThreadChecker thread_checker_;
+ JavaVM* const jvm_;
+ jobject context_;
+};
+
+} // namespace webrtc
+
+#endif // WEBRTC_MODULES_UTILITY_INCLUDE_JVM_ANDROID_H_
diff --git a/webrtc/modules/utility/include/mock/mock_process_thread.h b/webrtc/modules/utility/include/mock/mock_process_thread.h
new file mode 100644
index 0000000000..56d92f4527
--- /dev/null
+++ b/webrtc/modules/utility/include/mock/mock_process_thread.h
@@ -0,0 +1,38 @@
+/*
+ * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_UTILITY_INCLUDE_MOCK_MOCK_PROCESS_THREAD_H_
+#define WEBRTC_MODULES_UTILITY_INCLUDE_MOCK_MOCK_PROCESS_THREAD_H_
+
+#include "webrtc/modules/utility/include/process_thread.h"
+
+#include "testing/gmock/include/gmock/gmock.h"
+
+namespace webrtc {
+
+class MockProcessThread : public ProcessThread {
+ public:
+ MOCK_METHOD0(Start, void());
+ MOCK_METHOD0(Stop, void());
+ MOCK_METHOD1(WakeUp, void(Module* module));
+ MOCK_METHOD1(PostTask, void(ProcessTask* task));
+ MOCK_METHOD1(RegisterModule, void(Module* module));
+ MOCK_METHOD1(DeRegisterModule, void(Module* module));
+
+ // MOCK_METHOD1 gets confused with mocking this method, so we work around it
+ // by overriding the method from the interface and forwarding the call to a
+ // mocked, simpler method.
+ void PostTask(rtc::scoped_ptr<ProcessTask> task) override {
+ PostTask(task.get());
+ }
+};
+
+} // namespace webrtc
+#endif // WEBRTC_MODULES_UTILITY_INCLUDE_MOCK_MOCK_PROCESS_THREAD_H_
diff --git a/webrtc/modules/utility/include/process_thread.h b/webrtc/modules/utility/include/process_thread.h
new file mode 100644
index 0000000000..285a5ea587
--- /dev/null
+++ b/webrtc/modules/utility/include/process_thread.h
@@ -0,0 +1,66 @@
+/*
+ * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_UTILITY_INCLUDE_PROCESS_THREAD_H_
+#define WEBRTC_MODULES_UTILITY_INCLUDE_PROCESS_THREAD_H_
+
+#include "webrtc/typedefs.h"
+#include "webrtc/base/scoped_ptr.h"
+
+namespace webrtc {
+class Module;
+
+class ProcessTask {
+ public:
+ ProcessTask() {}
+ virtual ~ProcessTask() {}
+
+ virtual void Run() = 0;
+};
+
+class ProcessThread {
+ public:
+ virtual ~ProcessThread();
+
+ static rtc::scoped_ptr<ProcessThread> Create(const char* thread_name);
+
+ // Starts the worker thread. Must be called from the construction thread.
+ virtual void Start() = 0;
+
+ // Stops the worker thread. Must be called from the construction thread.
+ virtual void Stop() = 0;
+
+ // Wakes the thread up to give a module a chance to do processing right
+ // away. This causes the worker thread to wake up and requery the specified
+ // module for when it should be called back. (Typically the module should
+ // return 0 from TimeUntilNextProcess on the worker thread at that point).
+ // Can be called on any thread.
+ virtual void WakeUp(Module* module) = 0;
+
+ // Queues a task object to run on the worker thread. Ownership of the
+ // task object is transferred to the ProcessThread and the object will
+ // either be deleted after running on the worker thread, or on the
+ // construction thread of the ProcessThread instance, if the task did not
+ // get a chance to run (e.g. posting the task while shutting down or when
+ // the thread never runs).
+ virtual void PostTask(rtc::scoped_ptr<ProcessTask> task) = 0;
+
+ // Adds a module that will start to receive callbacks on the worker thread.
+ // Can be called from any thread.
+ virtual void RegisterModule(Module* module) = 0;
+
+ // Removes a previously registered module.
+ // Can be called from any thread.
+ virtual void DeRegisterModule(Module* module) = 0;
+};
+
+} // namespace webrtc
+
+#endif // WEBRTC_MODULES_UTILITY_INCLUDE_PROCESS_THREAD_H_
diff --git a/webrtc/modules/utility/interface/audio_frame_operations.h b/webrtc/modules/utility/interface/audio_frame_operations.h
deleted file mode 100644
index c2af68ab1b..0000000000
--- a/webrtc/modules/utility/interface/audio_frame_operations.h
+++ /dev/null
@@ -1,58 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_VOICE_ENGINE_AUDIO_FRAME_OPERATIONS_H_
-#define WEBRTC_VOICE_ENGINE_AUDIO_FRAME_OPERATIONS_H_
-
-#include "webrtc/typedefs.h"
-
-namespace webrtc {
-
-class AudioFrame;
-
-// TODO(andrew): consolidate this with utility.h and audio_frame_manipulator.h.
-// Change reference parameters to pointers. Consider using a namespace rather
-// than a class.
-class AudioFrameOperations {
- public:
- // Upmixes mono |src_audio| to stereo |dst_audio|. This is an out-of-place
- // operation, meaning src_audio and dst_audio must point to different
- // buffers. It is the caller's responsibility to ensure that |dst_audio| is
- // sufficiently large.
- static void MonoToStereo(const int16_t* src_audio, size_t samples_per_channel,
- int16_t* dst_audio);
- // |frame.num_channels_| will be updated. This version checks for sufficient
- // buffer size and that |num_channels_| is mono.
- static int MonoToStereo(AudioFrame* frame);
-
- // Downmixes stereo |src_audio| to mono |dst_audio|. This is an in-place
- // operation, meaning |src_audio| and |dst_audio| may point to the same
- // buffer.
- static void StereoToMono(const int16_t* src_audio, size_t samples_per_channel,
- int16_t* dst_audio);
- // |frame.num_channels_| will be updated. This version checks that
- // |num_channels_| is stereo.
- static int StereoToMono(AudioFrame* frame);
-
- // Swap the left and right channels of |frame|. Fails silently if |frame| is
- // not stereo.
- static void SwapStereoChannels(AudioFrame* frame);
-
- // Zeros out the audio and sets |frame.energy| to zero.
- static void Mute(AudioFrame& frame);
-
- static int Scale(float left, float right, AudioFrame& frame);
-
- static int ScaleWithSat(float scale, AudioFrame& frame);
-};
-
-} // namespace webrtc
-
-#endif // #ifndef WEBRTC_VOICE_ENGINE_AUDIO_FRAME_OPERATIONS_H_
diff --git a/webrtc/modules/utility/interface/file_player.h b/webrtc/modules/utility/interface/file_player.h
deleted file mode 100644
index 44f03e475a..0000000000
--- a/webrtc/modules/utility/interface/file_player.h
+++ /dev/null
@@ -1,111 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_MODULES_UTILITY_INTERFACE_FILE_PLAYER_H_
-#define WEBRTC_MODULES_UTILITY_INTERFACE_FILE_PLAYER_H_
-
-#include "webrtc/common_types.h"
-#include "webrtc/engine_configurations.h"
-#include "webrtc/modules/interface/module_common_types.h"
-#include "webrtc/typedefs.h"
-#include "webrtc/video_frame.h"
-
-namespace webrtc {
-class FileCallback;
-
-class FilePlayer
-{
-public:
- // The largest decoded frame size in samples (60ms with 32kHz sample rate).
- enum {MAX_AUDIO_BUFFER_IN_SAMPLES = 60*32};
- enum {MAX_AUDIO_BUFFER_IN_BYTES = MAX_AUDIO_BUFFER_IN_SAMPLES*2};
-
- // Note: will return NULL for unsupported formats.
- static FilePlayer* CreateFilePlayer(const uint32_t instanceID,
- const FileFormats fileFormat);
-
- static void DestroyFilePlayer(FilePlayer* player);
-
- // Read 10 ms of audio at |frequencyInHz| to |outBuffer|. |lengthInSamples|
- // will be set to the number of samples read (not the number of samples per
- // channel).
- virtual int Get10msAudioFromFile(
- int16_t* outBuffer,
- size_t& lengthInSamples,
- int frequencyInHz) = 0;
-
- // Register callback for receiving file playing notifications.
- virtual int32_t RegisterModuleFileCallback(
- FileCallback* callback) = 0;
-
- // API for playing audio from fileName to channel.
- // Note: codecInst is used for pre-encoded files.
- virtual int32_t StartPlayingFile(
- const char* fileName,
- bool loop,
- uint32_t startPosition,
- float volumeScaling,
- uint32_t notification,
- uint32_t stopPosition = 0,
- const CodecInst* codecInst = NULL) = 0;
-
- // Note: codecInst is used for pre-encoded files.
- virtual int32_t StartPlayingFile(
- InStream& sourceStream,
- uint32_t startPosition,
- float volumeScaling,
- uint32_t notification,
- uint32_t stopPosition = 0,
- const CodecInst* codecInst = NULL) = 0;
-
- virtual int32_t StopPlayingFile() = 0;
-
- virtual bool IsPlayingFile() const = 0;
-
- virtual int32_t GetPlayoutPosition(uint32_t& durationMs) = 0;
-
- // Set audioCodec to the currently used audio codec.
- virtual int32_t AudioCodec(CodecInst& audioCodec) const = 0;
-
- virtual int32_t Frequency() const = 0;
-
- // Note: scaleFactor is in the range [0.0 - 2.0]
- virtual int32_t SetAudioScaling(float scaleFactor) = 0;
-
- // Return the time in ms until next video frame should be pulled (by
- // calling GetVideoFromFile(..)).
- // Note: this API reads one video frame from file. This means that it should
- // be called exactly once per GetVideoFromFile(..) API call.
- virtual int32_t TimeUntilNextVideoFrame() { return -1;}
-
- virtual int32_t StartPlayingVideoFile(
- const char* /*fileName*/,
- bool /*loop*/,
- bool /*videoOnly*/) { return -1;}
-
- virtual int32_t video_codec_info(VideoCodec& /*videoCodec*/) const
- {return -1;}
-
- virtual int32_t GetVideoFromFile(VideoFrame& /*videoFrame*/) { return -1; }
-
- // Same as GetVideoFromFile(). videoFrame will have the resolution specified
- // by the width outWidth and height outHeight in pixels.
- virtual int32_t GetVideoFromFile(VideoFrame& /*videoFrame*/,
- const uint32_t /*outWidth*/,
- const uint32_t /*outHeight*/) {
- return -1;
- }
-
-protected:
- virtual ~FilePlayer() {}
-
-};
-} // namespace webrtc
-#endif // WEBRTC_MODULES_UTILITY_INTERFACE_FILE_PLAYER_H_
diff --git a/webrtc/modules/utility/interface/file_recorder.h b/webrtc/modules/utility/interface/file_recorder.h
deleted file mode 100644
index f2ce785368..0000000000
--- a/webrtc/modules/utility/interface/file_recorder.h
+++ /dev/null
@@ -1,84 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_MODULES_UTILITY_INTERFACE_FILE_RECORDER_H_
-#define WEBRTC_MODULES_UTILITY_INTERFACE_FILE_RECORDER_H_
-
-#include "webrtc/common_types.h"
-#include "webrtc/engine_configurations.h"
-#include "webrtc/modules/interface/module_common_types.h"
-#include "webrtc/modules/media_file/interface/media_file_defines.h"
-#include "webrtc/system_wrappers/include/tick_util.h"
-#include "webrtc/typedefs.h"
-#include "webrtc/video_frame.h"
-
-namespace webrtc {
-
-class FileRecorder
-{
-public:
-
- // Note: will return NULL for unsupported formats.
- static FileRecorder* CreateFileRecorder(const uint32_t instanceID,
- const FileFormats fileFormat);
-
- static void DestroyFileRecorder(FileRecorder* recorder);
-
- virtual int32_t RegisterModuleFileCallback(
- FileCallback* callback) = 0;
-
- virtual FileFormats RecordingFileFormat() const = 0;
-
- virtual int32_t StartRecordingAudioFile(
- const char* fileName,
- const CodecInst& codecInst,
- uint32_t notification) = 0;
-
- virtual int32_t StartRecordingAudioFile(
- OutStream& destStream,
- const CodecInst& codecInst,
- uint32_t notification) = 0;
-
- // Stop recording.
- // Note: this API is for both audio and video.
- virtual int32_t StopRecording() = 0;
-
- // Return true if recording.
- // Note: this API is for both audio and video.
- virtual bool IsRecording() const = 0;
-
- virtual int32_t codec_info(CodecInst& codecInst) const = 0;
-
- // Write frame to file. Frame should contain 10ms of un-ecoded audio data.
- virtual int32_t RecordAudioToFile(
- const AudioFrame& frame,
- const TickTime* playoutTS = NULL) = 0;
-
- // Open/create the file specified by fileName for writing audio/video data
- // (relative path is allowed). audioCodecInst specifies the encoding of the
- // audio data. videoCodecInst specifies the encoding of the video data.
- // Only video data will be recorded if videoOnly is true. amrFormat
- // specifies the amr/amrwb storage format.
- // Note: the file format is AVI.
- virtual int32_t StartRecordingVideoFile(
- const char* fileName,
- const CodecInst& audioCodecInst,
- const VideoCodec& videoCodecInst,
- bool videoOnly = false) = 0;
-
- // Record the video frame in videoFrame to AVI file.
- virtual int32_t RecordVideoToFile(const VideoFrame& videoFrame) = 0;
-
-protected:
- virtual ~FileRecorder() {}
-
-};
-} // namespace webrtc
-#endif // WEBRTC_MODULES_UTILITY_INTERFACE_FILE_RECORDER_H_
diff --git a/webrtc/modules/utility/interface/helpers_android.h b/webrtc/modules/utility/interface/helpers_android.h
deleted file mode 100644
index 5c73fe4566..0000000000
--- a/webrtc/modules/utility/interface/helpers_android.h
+++ /dev/null
@@ -1,87 +0,0 @@
-/*
- * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_MODULES_UTILITY_INTERFACE_HELPERS_ANDROID_H_
-#define WEBRTC_MODULES_UTILITY_INTERFACE_HELPERS_ANDROID_H_
-
-#include <jni.h>
-#include <string>
-
-// Abort the process if |jni| has a Java exception pending.
-// TODO(henrika): merge with CHECK_JNI_EXCEPTION() in jni_helpers.h.
-#define CHECK_EXCEPTION(jni) \
- RTC_CHECK(!jni->ExceptionCheck()) \
- << (jni->ExceptionDescribe(), jni->ExceptionClear(), "")
-
-namespace webrtc {
-
-// Return a |JNIEnv*| usable on this thread or NULL if this thread is detached.
-JNIEnv* GetEnv(JavaVM* jvm);
-
-// Return a |jlong| that will correctly convert back to |ptr|. This is needed
-// because the alternative (of silently passing a 32-bit pointer to a vararg
-// function expecting a 64-bit param) picks up garbage in the high 32 bits.
-jlong PointerTojlong(void* ptr);
-
-// JNIEnv-helper methods that wraps the API which uses the JNI interface
-// pointer (JNIEnv*). It allows us to RTC_CHECK success and that no Java
-// exception is thrown while calling the method.
-jmethodID GetMethodID(
- JNIEnv* jni, jclass c, const char* name, const char* signature);
-
-jmethodID GetStaticMethodID(
- JNIEnv* jni, jclass c, const char* name, const char* signature);
-
-jclass FindClass(JNIEnv* jni, const char* name);
-
-jobject NewGlobalRef(JNIEnv* jni, jobject o);
-
-void DeleteGlobalRef(JNIEnv* jni, jobject o);
-
-// Return thread ID as a string.
-std::string GetThreadId();
-
-// Return thread ID as string suitable for debug logging.
-std::string GetThreadInfo();
-
-// Attach thread to JVM if necessary and detach at scope end if originally
-// attached.
-class AttachThreadScoped {
- public:
- explicit AttachThreadScoped(JavaVM* jvm);
- ~AttachThreadScoped();
- JNIEnv* env();
-
- private:
- bool attached_;
- JavaVM* jvm_;
- JNIEnv* env_;
-};
-
-// Scoped holder for global Java refs.
-template<class T> // T is jclass, jobject, jintArray, etc.
-class ScopedGlobalRef {
- public:
- ScopedGlobalRef(JNIEnv* jni, T obj)
- : jni_(jni), obj_(static_cast<T>(NewGlobalRef(jni, obj))) {}
- ~ScopedGlobalRef() {
- DeleteGlobalRef(jni_, obj_);
- }
- T operator*() const {
- return obj_;
- }
- private:
- JNIEnv* jni_;
- T obj_;
-};
-
-} // namespace webrtc
-
-#endif // WEBRTC_MODULES_UTILITY_INTERFACE_HELPERS_ANDROID_H_
diff --git a/webrtc/modules/utility/interface/helpers_ios.h b/webrtc/modules/utility/interface/helpers_ios.h
deleted file mode 100644
index a5edee0279..0000000000
--- a/webrtc/modules/utility/interface/helpers_ios.h
+++ /dev/null
@@ -1,59 +0,0 @@
-/*
- * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_MODULES_UTILITY_INTERFACE_HELPERS_IOS_H_
-#define WEBRTC_MODULES_UTILITY_INTERFACE_HELPERS_IOS_H_
-
-#if defined(WEBRTC_IOS)
-
-#include <string>
-
-namespace webrtc {
-namespace ios {
-
-bool CheckAndLogError(BOOL success, NSError* error);
-
-std::string StdStringFromNSString(NSString* nsString);
-
-// Return thread ID as a string.
-std::string GetThreadId();
-
-// Return thread ID as string suitable for debug logging.
-std::string GetThreadInfo();
-
-// Returns [NSThread currentThread] description as string.
-// Example: <NSThread: 0x170066d80>{number = 1, name = main}
-std::string GetCurrentThreadDescription();
-
-std::string GetAudioSessionCategory();
-
-// Returns the current name of the operating system.
-std::string GetSystemName();
-
-// Returns the current version of the operating system.
-std::string GetSystemVersion();
-
-// Returns the version of the operating system as a floating point value.
-float GetSystemVersionAsFloat();
-
-// Returns the device type.
-// Examples: ”iPhone” and ”iPod touch”.
-std::string GetDeviceType();
-
-// Returns a more detailed device name.
-// Examples: "iPhone 5s (GSM)" and "iPhone 6 Plus".
-std::string GetDeviceName();
-
-} // namespace ios
-} // namespace webrtc
-
-#endif // defined(WEBRTC_IOS)
-
-#endif // WEBRTC_MODULES_UTILITY_INTERFACE_HELPERS_IOS_H_
diff --git a/webrtc/modules/utility/interface/jvm_android.h b/webrtc/modules/utility/interface/jvm_android.h
deleted file mode 100644
index 0744fdbf12..0000000000
--- a/webrtc/modules/utility/interface/jvm_android.h
+++ /dev/null
@@ -1,185 +0,0 @@
-/*
- * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_MODULES_UTILITY_SOURCE_JVM_H_
-#define WEBRTC_MODULES_UTILITY_SOURCE_JVM_H_
-
-#include <jni.h>
-#include <string>
-
-#include "webrtc/base/scoped_ptr.h"
-#include "webrtc/base/thread_checker.h"
-#include "webrtc/modules/utility/interface/helpers_android.h"
-
-namespace webrtc {
-
-// The JNI interface pointer (JNIEnv) is valid only in the current thread.
-// Should another thread need to access the Java VM, it must first call
-// AttachCurrentThread() to attach itself to the VM and obtain a JNI interface
-// pointer. The native thread remains attached to the VM until it calls
-// DetachCurrentThread() to detach.
-class AttachCurrentThreadIfNeeded {
- public:
- AttachCurrentThreadIfNeeded();
- ~AttachCurrentThreadIfNeeded();
-
- private:
- rtc::ThreadChecker thread_checker_;
- bool attached_;
-};
-
-// This class is created by the NativeRegistration class and is used to wrap
-// the actual Java object handle (jobject) on which we can call methods from
-// C++ in to Java. See example in JVM for more details.
-// TODO(henrika): extend support for type of function calls.
-class GlobalRef {
- public:
- GlobalRef(JNIEnv* jni, jobject object);
- ~GlobalRef();
-
- jboolean CallBooleanMethod(jmethodID methodID, ...);
- jint CallIntMethod(jmethodID methodID, ...);
- void CallVoidMethod(jmethodID methodID, ...);
-
- private:
- JNIEnv* const jni_;
- const jobject j_object_;
-};
-
-// Wraps the jclass object on which we can call GetMethodId() functions to
-// query method IDs.
-class JavaClass {
- public:
- JavaClass(JNIEnv* jni, jclass clazz) : jni_(jni), j_class_(clazz) {}
- ~JavaClass() {}
-
- jmethodID GetMethodId(const char* name, const char* signature);
- jmethodID GetStaticMethodId(const char* name, const char* signature);
- jobject CallStaticObjectMethod(jmethodID methodID, ...);
-
- protected:
- JNIEnv* const jni_;
- jclass const j_class_;
-};
-
-// Adds support of the NewObject factory method to the JavaClass class.
-// See example in JVM for more details on how to use it.
-class NativeRegistration : public JavaClass {
- public:
- NativeRegistration(JNIEnv* jni, jclass clazz);
- ~NativeRegistration();
-
- rtc::scoped_ptr<GlobalRef> NewObject(
- const char* name, const char* signature, ...);
-
- private:
- JNIEnv* const jni_;
-};
-
-// This class is created by the JVM class and is used to expose methods that
-// needs the JNI interface pointer but its main purpose is to create a
-// NativeRegistration object given name of a Java class and a list of native
-// methods. See example in JVM for more details.
-class JNIEnvironment {
- public:
- explicit JNIEnvironment(JNIEnv* jni);
- ~JNIEnvironment();
-
- // Registers native methods with the Java class specified by |name|.
- // Note that the class name must be one of the names in the static
- // |loaded_classes| array defined in jvm_android.cc.
- // This method must be called on the construction thread.
- rtc::scoped_ptr<NativeRegistration> RegisterNatives(
- const char* name, const JNINativeMethod *methods, int num_methods);
-
- // Converts from Java string to std::string.
- // This method must be called on the construction thread.
- std::string JavaToStdString(const jstring& j_string);
-
- private:
- rtc::ThreadChecker thread_checker_;
- JNIEnv* const jni_;
-};
-
-// Main class for working with Java from C++ using JNI in WebRTC.
-//
-// Example usage:
-//
-// // At initialization (e.g. in JNI_OnLoad), call JVM::Initialize.
-// JNIEnv* jni = ::base::android::AttachCurrentThread();
-// JavaVM* jvm = NULL;
-// jni->GetJavaVM(&jvm);
-// jobject context = ::base::android::GetApplicationContext();
-// webrtc::JVM::Initialize(jvm, context);
-//
-// // Header (.h) file of example class called User.
-// rtc::scoped_ptr<JNIEnvironment> env;
-// rtc::scoped_ptr<NativeRegistration> reg;
-// rtc::scoped_ptr<GlobalRef> obj;
-//
-// // Construction (in .cc file) of User class.
-// User::User() {
-// // Calling thread must be attached to the JVM.
-// env = JVM::GetInstance()->environment();
-// reg = env->RegisterNatives("org/webrtc/WebRtcTest", ,);
-// obj = reg->NewObject("<init>", ,);
-// }
-//
-// // Each User method can now use |reg| and |obj| and call Java functions
-// // in WebRtcTest.java, e.g. boolean init() {}.
-// bool User::Foo() {
-// jmethodID id = reg->GetMethodId("init", "()Z");
-// return obj->CallBooleanMethod(id);
-// }
-//
-// // And finally, e.g. in JNI_OnUnLoad, call JVM::Uninitialize.
-// JVM::Uninitialize();
-class JVM {
- public:
- // Stores global handles to the Java VM interface and the application context.
- // Should be called once on a thread that is attached to the JVM.
- static void Initialize(JavaVM* jvm, jobject context);
- // Clears handles stored in Initialize(). Must be called on same thread as
- // Initialize().
- static void Uninitialize();
- // Gives access to the global Java VM interface pointer, which then can be
- // used to create a valid JNIEnvironment object or to get a JavaClass object.
- static JVM* GetInstance();
-
- // Creates a JNIEnvironment object.
- // This method returns a NULL pointer if AttachCurrentThread() has not been
- // called successfully. Use the AttachCurrentThreadIfNeeded class if needed.
- rtc::scoped_ptr<JNIEnvironment> environment();
-
- // Returns a JavaClass object given class |name|.
- // Note that the class name must be one of the names in the static
- // |loaded_classes| array defined in jvm_android.cc.
- // This method must be called on the construction thread.
- JavaClass GetClass(const char* name);
-
- // TODO(henrika): can we make these private?
- JavaVM* jvm() const { return jvm_; }
- jobject context() const { return context_; }
-
- protected:
- JVM(JavaVM* jvm, jobject context);
- ~JVM();
-
- private:
- JNIEnv* jni() const { return GetEnv(jvm_); }
-
- rtc::ThreadChecker thread_checker_;
- JavaVM* const jvm_;
- jobject context_;
-};
-
-} // namespace webrtc
-
-#endif // WEBRTC_MODULES_UTILITY_SOURCE_JVM_H_
diff --git a/webrtc/modules/utility/interface/mock/mock_process_thread.h b/webrtc/modules/utility/interface/mock/mock_process_thread.h
deleted file mode 100644
index fd108a8354..0000000000
--- a/webrtc/modules/utility/interface/mock/mock_process_thread.h
+++ /dev/null
@@ -1,38 +0,0 @@
-/*
- * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_MODULES_UTILITY_INTERFACE_MOCK_PROCESS_THREAD_H_
-#define WEBRTC_MODULES_UTILITY_INTERFACE_MOCK_PROCESS_THREAD_H_
-
-#include "webrtc/modules/utility/interface/process_thread.h"
-
-#include "testing/gmock/include/gmock/gmock.h"
-
-namespace webrtc {
-
-class MockProcessThread : public ProcessThread {
- public:
- MOCK_METHOD0(Start, void());
- MOCK_METHOD0(Stop, void());
- MOCK_METHOD1(WakeUp, void(Module* module));
- MOCK_METHOD1(PostTask, void(ProcessTask* task));
- MOCK_METHOD1(RegisterModule, void(Module* module));
- MOCK_METHOD1(DeRegisterModule, void(Module* module));
-
- // MOCK_METHOD1 gets confused with mocking this method, so we work around it
- // by overriding the method from the interface and forwarding the call to a
- // mocked, simpler method.
- void PostTask(rtc::scoped_ptr<ProcessTask> task) override {
- PostTask(task.get());
- }
-};
-
-} // namespace webrtc
-#endif // WEBRTC_MODULES_UTILITY_INTERFACE_MOCK_PROCESS_THREAD_H_
diff --git a/webrtc/modules/utility/interface/process_thread.h b/webrtc/modules/utility/interface/process_thread.h
deleted file mode 100644
index 451a5a301b..0000000000
--- a/webrtc/modules/utility/interface/process_thread.h
+++ /dev/null
@@ -1,66 +0,0 @@
-/*
- * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_MODULES_UTILITY_INTERFACE_PROCESS_THREAD_H_
-#define WEBRTC_MODULES_UTILITY_INTERFACE_PROCESS_THREAD_H_
-
-#include "webrtc/typedefs.h"
-#include "webrtc/base/scoped_ptr.h"
-
-namespace webrtc {
-class Module;
-
-class ProcessTask {
- public:
- ProcessTask() {}
- virtual ~ProcessTask() {}
-
- virtual void Run() = 0;
-};
-
-class ProcessThread {
- public:
- virtual ~ProcessThread();
-
- static rtc::scoped_ptr<ProcessThread> Create(const char* thread_name);
-
- // Starts the worker thread. Must be called from the construction thread.
- virtual void Start() = 0;
-
- // Stops the worker thread. Must be called from the construction thread.
- virtual void Stop() = 0;
-
- // Wakes the thread up to give a module a chance to do processing right
- // away. This causes the worker thread to wake up and requery the specified
- // module for when it should be called back. (Typically the module should
- // return 0 from TimeUntilNextProcess on the worker thread at that point).
- // Can be called on any thread.
- virtual void WakeUp(Module* module) = 0;
-
- // Queues a task object to run on the worker thread. Ownership of the
- // task object is transferred to the ProcessThread and the object will
- // either be deleted after running on the worker thread, or on the
- // construction thread of the ProcessThread instance, if the task did not
- // get a chance to run (e.g. posting the task while shutting down or when
- // the thread never runs).
- virtual void PostTask(rtc::scoped_ptr<ProcessTask> task) = 0;
-
- // Adds a module that will start to receive callbacks on the worker thread.
- // Can be called from any thread.
- virtual void RegisterModule(Module* module) = 0;
-
- // Removes a previously registered module.
- // Can be called from any thread.
- virtual void DeRegisterModule(Module* module) = 0;
-};
-
-} // namespace webrtc
-
-#endif // WEBRTC_MODULES_UTILITY_INTERFACE_PROCESS_THREAD_H_
diff --git a/webrtc/modules/utility/source/audio_frame_operations.cc b/webrtc/modules/utility/source/audio_frame_operations.cc
index c07ca1fdf6..fe09d7972f 100644
--- a/webrtc/modules/utility/source/audio_frame_operations.cc
+++ b/webrtc/modules/utility/source/audio_frame_operations.cc
@@ -8,8 +8,8 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-#include "webrtc/modules/interface/module_common_types.h"
-#include "webrtc/modules/utility/interface/audio_frame_operations.h"
+#include "webrtc/modules/include/module_common_types.h"
+#include "webrtc/modules/utility/include/audio_frame_operations.h"
namespace webrtc {
diff --git a/webrtc/modules/utility/source/audio_frame_operations_unittest.cc b/webrtc/modules/utility/source/audio_frame_operations_unittest.cc
index c278cdddcd..fff8f4407b 100644
--- a/webrtc/modules/utility/source/audio_frame_operations_unittest.cc
+++ b/webrtc/modules/utility/source/audio_frame_operations_unittest.cc
@@ -10,8 +10,8 @@
#include "testing/gtest/include/gtest/gtest.h"
-#include "webrtc/modules/interface/module_common_types.h"
-#include "webrtc/modules/utility/interface/audio_frame_operations.h"
+#include "webrtc/modules/include/module_common_types.h"
+#include "webrtc/modules/utility/include/audio_frame_operations.h"
namespace webrtc {
namespace {
diff --git a/webrtc/modules/utility/source/coder.cc b/webrtc/modules/utility/source/coder.cc
index 4ec5f9b4e2..18b690dc67 100644
--- a/webrtc/modules/utility/source/coder.cc
+++ b/webrtc/modules/utility/source/coder.cc
@@ -9,7 +9,7 @@
*/
#include "webrtc/common_types.h"
-#include "webrtc/modules/interface/module_common_types.h"
+#include "webrtc/modules/include/module_common_types.h"
#include "webrtc/modules/utility/source/coder.h"
namespace webrtc {
diff --git a/webrtc/modules/utility/source/coder.h b/webrtc/modules/utility/source/coder.h
index 4270e9b380..abfa87efe1 100644
--- a/webrtc/modules/utility/source/coder.h
+++ b/webrtc/modules/utility/source/coder.h
@@ -13,7 +13,7 @@
#include "webrtc/base/scoped_ptr.h"
#include "webrtc/common_types.h"
-#include "webrtc/modules/audio_coding/main/include/audio_coding_module.h"
+#include "webrtc/modules/audio_coding/include/audio_coding_module.h"
#include "webrtc/typedefs.h"
namespace webrtc {
diff --git a/webrtc/modules/utility/source/file_player_impl.h b/webrtc/modules/utility/source/file_player_impl.h
index f411db9151..beb6379ff0 100644
--- a/webrtc/modules/utility/source/file_player_impl.h
+++ b/webrtc/modules/utility/source/file_player_impl.h
@@ -14,9 +14,9 @@
#include "webrtc/common_audio/resampler/include/resampler.h"
#include "webrtc/common_types.h"
#include "webrtc/engine_configurations.h"
-#include "webrtc/modules/media_file/interface/media_file.h"
-#include "webrtc/modules/media_file/interface/media_file_defines.h"
-#include "webrtc/modules/utility/interface/file_player.h"
+#include "webrtc/modules/media_file/media_file.h"
+#include "webrtc/modules/media_file/media_file_defines.h"
+#include "webrtc/modules/utility/include/file_player.h"
#include "webrtc/modules/utility/source/coder.h"
#include "webrtc/system_wrappers/include/critical_section_wrapper.h"
#include "webrtc/system_wrappers/include/tick_util.h"
diff --git a/webrtc/modules/utility/source/file_player_unittests.cc b/webrtc/modules/utility/source/file_player_unittests.cc
index 4b65acdeef..58471e5e8d 100644
--- a/webrtc/modules/utility/source/file_player_unittests.cc
+++ b/webrtc/modules/utility/source/file_player_unittests.cc
@@ -10,7 +10,7 @@
// Unit tests for FilePlayer.
-#include "webrtc/modules/utility/interface/file_player.h"
+#include "webrtc/modules/utility/include/file_player.h"
#include <stdio.h>
#include <string>
@@ -20,7 +20,6 @@
#include "webrtc/base/md5digest.h"
#include "webrtc/base/stringencode.h"
#include "webrtc/test/testsupport/fileutils.h"
-#include "webrtc/test/testsupport/gtest_disable.h"
DEFINE_bool(file_player_output, false, "Generate reference files.");
@@ -82,7 +81,12 @@ class FilePlayerTest : public ::testing::Test {
FILE* output_file_;
};
-TEST_F(FilePlayerTest, DISABLED_ON_IOS(PlayWavPcmuFile)) {
+#if defined(WEBRTC_IOS)
+#define MAYBE_PlayWavPcmuFile DISABLED_PlayWavPcmuFile
+#else
+#define MAYBE_PlayWavPcmuFile PlayWavPcmuFile
+#endif
+TEST_F(FilePlayerTest, MAYBE_PlayWavPcmuFile) {
const std::string kFileName =
test::ResourcePath("utility/encapsulated_pcmu_8khz", "wav");
// The file is longer than this, but keeping the output shorter limits the
@@ -93,7 +97,12 @@ TEST_F(FilePlayerTest, DISABLED_ON_IOS(PlayWavPcmuFile)) {
PlayFileAndCheck(kFileName, kRefChecksum, kOutputLengthMs);
}
-TEST_F(FilePlayerTest, DISABLED_ON_IOS(PlayWavPcm16File)) {
+#if defined(WEBRTC_IOS)
+#define MAYBE_PlayWavPcm16File DISABLED_PlayWavPcm16File
+#else
+#define MAYBE_PlayWavPcm16File PlayWavPcm16File
+#endif
+TEST_F(FilePlayerTest, MAYBE_PlayWavPcm16File) {
const std::string kFileName =
test::ResourcePath("utility/encapsulated_pcm16b_8khz", "wav");
// The file is longer than this, but keeping the output shorter limits the
diff --git a/webrtc/modules/utility/source/file_recorder_impl.cc b/webrtc/modules/utility/source/file_recorder_impl.cc
index 13926deb4a..88b20eeac2 100644
--- a/webrtc/modules/utility/source/file_recorder_impl.cc
+++ b/webrtc/modules/utility/source/file_recorder_impl.cc
@@ -10,7 +10,7 @@
#include "webrtc/common_video/libyuv/include/webrtc_libyuv.h"
#include "webrtc/engine_configurations.h"
-#include "webrtc/modules/media_file/interface/media_file.h"
+#include "webrtc/modules/media_file/media_file.h"
#include "webrtc/modules/utility/source/file_recorder_impl.h"
#include "webrtc/system_wrappers/include/logging.h"
diff --git a/webrtc/modules/utility/source/file_recorder_impl.h b/webrtc/modules/utility/source/file_recorder_impl.h
index 8ea96bdad4..697d759375 100644
--- a/webrtc/modules/utility/source/file_recorder_impl.h
+++ b/webrtc/modules/utility/source/file_recorder_impl.h
@@ -17,16 +17,16 @@
#include <list>
+#include "webrtc/base/platform_thread.h"
#include "webrtc/common_audio/resampler/include/resampler.h"
#include "webrtc/common_types.h"
#include "webrtc/engine_configurations.h"
-#include "webrtc/modules/interface/module_common_types.h"
-#include "webrtc/modules/media_file/interface/media_file.h"
-#include "webrtc/modules/media_file/interface/media_file_defines.h"
-#include "webrtc/modules/utility/interface/file_recorder.h"
+#include "webrtc/modules/include/module_common_types.h"
+#include "webrtc/modules/media_file/media_file.h"
+#include "webrtc/modules/media_file/media_file_defines.h"
+#include "webrtc/modules/utility/include/file_recorder.h"
#include "webrtc/modules/utility/source/coder.h"
#include "webrtc/system_wrappers/include/event_wrapper.h"
-#include "webrtc/system_wrappers/include/thread_wrapper.h"
#include "webrtc/system_wrappers/include/tick_util.h"
#include "webrtc/typedefs.h"
diff --git a/webrtc/modules/utility/source/helpers_android.cc b/webrtc/modules/utility/source/helpers_android.cc
index 25652f237e..aea35f8d5a 100644
--- a/webrtc/modules/utility/source/helpers_android.cc
+++ b/webrtc/modules/utility/source/helpers_android.cc
@@ -9,7 +9,7 @@
*/
#include "webrtc/base/checks.h"
-#include "webrtc/modules/utility/interface/helpers_android.h"
+#include "webrtc/modules/utility/include/helpers_android.h"
#include <android/log.h>
#include <assert.h>
diff --git a/webrtc/modules/utility/source/helpers_ios.mm b/webrtc/modules/utility/source/helpers_ios.mm
index 90b7c8f605..2d0ac098c1 100644
--- a/webrtc/modules/utility/source/helpers_ios.mm
+++ b/webrtc/modules/utility/source/helpers_ios.mm
@@ -18,7 +18,7 @@
#include "webrtc/base/checks.h"
#include "webrtc/base/logging.h"
#include "webrtc/base/scoped_ptr.h"
-#include "webrtc/modules/utility/interface/helpers_ios.h"
+#include "webrtc/modules/utility/include/helpers_ios.h"
namespace webrtc {
namespace ios {
diff --git a/webrtc/modules/utility/source/jvm_android.cc b/webrtc/modules/utility/source/jvm_android.cc
index 648c1685ea..eb37fda040 100644
--- a/webrtc/modules/utility/source/jvm_android.cc
+++ b/webrtc/modules/utility/source/jvm_android.cc
@@ -10,7 +10,7 @@
#include <android/log.h>
-#include "webrtc/modules/utility/interface/jvm_android.h"
+#include "webrtc/modules/utility/include/jvm_android.h"
#include "webrtc/base/checks.h"
diff --git a/webrtc/modules/utility/source/process_thread_impl.cc b/webrtc/modules/utility/source/process_thread_impl.cc
index 04fa88739f..8cdf01634c 100644
--- a/webrtc/modules/utility/source/process_thread_impl.cc
+++ b/webrtc/modules/utility/source/process_thread_impl.cc
@@ -11,7 +11,7 @@
#include "webrtc/modules/utility/source/process_thread_impl.h"
#include "webrtc/base/checks.h"
-#include "webrtc/modules/interface/module.h"
+#include "webrtc/modules/include/module.h"
#include "webrtc/system_wrappers/include/logging.h"
#include "webrtc/system_wrappers/include/tick_util.h"
@@ -38,8 +38,7 @@ ProcessThread::~ProcessThread() {}
// static
rtc::scoped_ptr<ProcessThread> ProcessThread::Create(
const char* thread_name) {
- return rtc::scoped_ptr<ProcessThread>(new ProcessThreadImpl(thread_name))
- .Pass();
+ return rtc::scoped_ptr<ProcessThread>(new ProcessThreadImpl(thread_name));
}
ProcessThreadImpl::ProcessThreadImpl(const char* thread_name)
@@ -76,9 +75,9 @@ void ProcessThreadImpl::Start() {
m.module->ProcessThreadAttached(this);
}
- thread_ = ThreadWrapper::CreateThread(&ProcessThreadImpl::Run, this,
- thread_name_);
- RTC_CHECK(thread_->Start());
+ thread_.reset(
+ new rtc::PlatformThread(&ProcessThreadImpl::Run, this, thread_name_));
+ thread_->Start();
}
void ProcessThreadImpl::Stop() {
@@ -93,7 +92,7 @@ void ProcessThreadImpl::Stop() {
wake_up_->Set();
- RTC_CHECK(thread_->Stop());
+ thread_->Stop();
stop_ = false;
// TODO(tommi): Since DeRegisterModule is currently being called from
diff --git a/webrtc/modules/utility/source/process_thread_impl.h b/webrtc/modules/utility/source/process_thread_impl.h
index 4e5861b41e..1c0a0cdfdd 100644
--- a/webrtc/modules/utility/source/process_thread_impl.h
+++ b/webrtc/modules/utility/source/process_thread_impl.h
@@ -15,10 +15,10 @@
#include <queue>
#include "webrtc/base/criticalsection.h"
+#include "webrtc/base/platform_thread.h"
#include "webrtc/base/thread_checker.h"
-#include "webrtc/modules/utility/interface/process_thread.h"
+#include "webrtc/modules/utility/include/process_thread.h"
#include "webrtc/system_wrappers/include/event_wrapper.h"
-#include "webrtc/system_wrappers/include/thread_wrapper.h"
#include "webrtc/typedefs.h"
namespace webrtc {
@@ -70,7 +70,8 @@ class ProcessThreadImpl : public ProcessThread {
rtc::ThreadChecker thread_checker_;
const rtc::scoped_ptr<EventWrapper> wake_up_;
- rtc::scoped_ptr<ThreadWrapper> thread_;
+ // TODO(pbos): Remove scoped_ptr and stop recreating the thread.
+ rtc::scoped_ptr<rtc::PlatformThread> thread_;
ModuleList modules_;
// TODO(tommi): Support delayed tasks.
diff --git a/webrtc/modules/utility/source/process_thread_impl_unittest.cc b/webrtc/modules/utility/source/process_thread_impl_unittest.cc
index e080545312..0b35fad7d2 100644
--- a/webrtc/modules/utility/source/process_thread_impl_unittest.cc
+++ b/webrtc/modules/utility/source/process_thread_impl_unittest.cc
@@ -8,9 +8,11 @@
* be found in the AUTHORS file in the root of the source tree.
*/
+#include <utility>
+
#include "testing/gmock/include/gmock/gmock.h"
#include "testing/gtest/include/gtest/gtest.h"
-#include "webrtc/modules/interface/module.h"
+#include "webrtc/modules/include/module.h"
#include "webrtc/modules/utility/source/process_thread_impl.h"
#include "webrtc/system_wrappers/include/tick_util.h"
@@ -251,8 +253,9 @@ TEST(ProcessThreadImpl, WakeUp) {
rtc::scoped_ptr<EventWrapper> called(EventWrapper::Create());
MockModule module;
- int64_t start_time = 0;
- int64_t called_time = 0;
+ int64_t start_time;
+ int64_t called_time;
+
// Ask for a callback after 1000ms.
// TimeUntilNextProcess will be called twice.
// The first time we use it to get the thread into a waiting state.
@@ -281,8 +284,6 @@ TEST(ProcessThreadImpl, WakeUp) {
EXPECT_CALL(module, ProcessThreadAttached(nullptr)).Times(1);
thread.Stop();
- ASSERT_GT(start_time, 0);
- ASSERT_GT(called_time, 0);
EXPECT_GE(called_time, start_time);
uint32_t diff = called_time - start_time;
// We should have been called back much quicker than 1sec.
@@ -296,7 +297,7 @@ TEST(ProcessThreadImpl, PostTask) {
rtc::scoped_ptr<EventWrapper> task_ran(EventWrapper::Create());
rtc::scoped_ptr<RaiseEventTask> task(new RaiseEventTask(task_ran.get()));
thread.Start();
- thread.PostTask(task.Pass());
+ thread.PostTask(std::move(task));
EXPECT_EQ(kEventSignaled, task_ran->Wait(100));
thread.Stop();
}
diff --git a/webrtc/modules/utility/utility.gypi b/webrtc/modules/utility/utility.gypi
index 38c9e3ebd9..e5b0a4d9c0 100644
--- a/webrtc/modules/utility/utility.gypi
+++ b/webrtc/modules/utility/utility.gypi
@@ -18,13 +18,13 @@
'<(webrtc_root)/system_wrappers/system_wrappers.gyp:system_wrappers',
],
'sources': [
- 'interface/audio_frame_operations.h',
- 'interface/file_player.h',
- 'interface/file_recorder.h',
- 'interface/helpers_android.h',
- 'interface/helpers_ios.h',
- 'interface/jvm_android.h',
- 'interface/process_thread.h',
+ 'include/audio_frame_operations.h',
+ 'include/file_player.h',
+ 'include/file_recorder.h',
+ 'include/helpers_android.h',
+ 'include/helpers_ios.h',
+ 'include/jvm_android.h',
+ 'include/process_thread.h',
'source/audio_frame_operations.cc',
'source/coder.cc',
'source/coder.h',
diff --git a/webrtc/modules/video_capture/BUILD.gn b/webrtc/modules/video_capture/BUILD.gn
index b0ed6f4e6c..78f5212950 100644
--- a/webrtc/modules/video_capture/BUILD.gn
+++ b/webrtc/modules/video_capture/BUILD.gn
@@ -16,11 +16,11 @@ source_set("video_capture_module") {
sources = [
"device_info_impl.cc",
"device_info_impl.h",
- "include/video_capture.h",
- "include/video_capture_defines.h",
- "include/video_capture_factory.h",
+ "video_capture.h",
"video_capture_config.h",
+ "video_capture_defines.h",
"video_capture_delay.h",
+ "video_capture_factory.h",
"video_capture_factory.cc",
"video_capture_impl.cc",
"video_capture_impl.h",
diff --git a/webrtc/modules/video_capture/device_info_impl.h b/webrtc/modules/video_capture/device_info_impl.h
index 420808bcee..44e7dd596b 100644
--- a/webrtc/modules/video_capture/device_info_impl.h
+++ b/webrtc/modules/video_capture/device_info_impl.h
@@ -13,7 +13,7 @@
#include <vector>
-#include "webrtc/modules/video_capture/include/video_capture.h"
+#include "webrtc/modules/video_capture/video_capture.h"
#include "webrtc/modules/video_capture/video_capture_delay.h"
#include "webrtc/system_wrappers/include/rw_lock_wrapper.h"
diff --git a/webrtc/modules/video_capture/include/video_capture.h b/webrtc/modules/video_capture/include/video_capture.h
deleted file mode 100644
index 09b4502115..0000000000
--- a/webrtc/modules/video_capture/include/video_capture.h
+++ /dev/null
@@ -1,160 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_MODULES_VIDEO_CAPTURE_INCLUDE_VIDEO_CAPTURE_H_
-#define WEBRTC_MODULES_VIDEO_CAPTURE_INCLUDE_VIDEO_CAPTURE_H_
-
-#include "webrtc/common_video/rotation.h"
-#include "webrtc/modules/interface/module.h"
-#include "webrtc/modules/video_capture/include/video_capture_defines.h"
-
-namespace webrtc {
-
-class VideoCaptureModule: public RefCountedModule {
- public:
- // Interface for receiving information about available camera devices.
- class DeviceInfo {
- public:
- virtual uint32_t NumberOfDevices() = 0;
-
- // Returns the available capture devices.
- // deviceNumber - Index of capture device.
- // deviceNameUTF8 - Friendly name of the capture device.
- // deviceUniqueIdUTF8 - Unique name of the capture device if it exist.
- // Otherwise same as deviceNameUTF8.
- // productUniqueIdUTF8 - Unique product id if it exist.
- // Null terminated otherwise.
- virtual int32_t GetDeviceName(
- uint32_t deviceNumber,
- char* deviceNameUTF8,
- uint32_t deviceNameLength,
- char* deviceUniqueIdUTF8,
- uint32_t deviceUniqueIdUTF8Length,
- char* productUniqueIdUTF8 = 0,
- uint32_t productUniqueIdUTF8Length = 0) = 0;
-
-
- // Returns the number of capabilities this device.
- virtual int32_t NumberOfCapabilities(
- const char* deviceUniqueIdUTF8) = 0;
-
- // Gets the capabilities of the named device.
- virtual int32_t GetCapability(
- const char* deviceUniqueIdUTF8,
- const uint32_t deviceCapabilityNumber,
- VideoCaptureCapability& capability) = 0;
-
- // Gets clockwise angle the captured frames should be rotated in order
- // to be displayed correctly on a normally rotated display.
- virtual int32_t GetOrientation(const char* deviceUniqueIdUTF8,
- VideoRotation& orientation) = 0;
-
- // Gets the capability that best matches the requested width, height and
- // frame rate.
- // Returns the deviceCapabilityNumber on success.
- virtual int32_t GetBestMatchedCapability(
- const char* deviceUniqueIdUTF8,
- const VideoCaptureCapability& requested,
- VideoCaptureCapability& resulting) = 0;
-
- // Display OS /capture device specific settings dialog
- virtual int32_t DisplayCaptureSettingsDialogBox(
- const char* deviceUniqueIdUTF8,
- const char* dialogTitleUTF8,
- void* parentWindow,
- uint32_t positionX,
- uint32_t positionY) = 0;
-
- virtual ~DeviceInfo() {}
- };
-
- class VideoCaptureEncodeInterface {
- public:
- virtual int32_t ConfigureEncoder(const VideoCodec& codec,
- uint32_t maxPayloadSize) = 0;
- // Inform the encoder about the new target bit rate.
- // - newBitRate : New target bit rate in Kbit/s.
- // - frameRate : The target frame rate.
- virtual int32_t SetRates(int32_t newBitRate, int32_t frameRate) = 0;
- // Inform the encoder about the packet loss and the round-trip time.
- // - packetLoss : Fraction lost
- // (loss rate in percent = 100 * packetLoss / 255).
- // - rtt : Round-trip time in milliseconds.
- virtual int32_t SetChannelParameters(uint32_t packetLoss, int64_t rtt) = 0;
-
- // Encode the next frame as key frame.
- virtual int32_t EncodeFrameType(const FrameType type) = 0;
- protected:
- virtual ~VideoCaptureEncodeInterface() {
- }
- };
-
- // Register capture data callback
- virtual void RegisterCaptureDataCallback(
- VideoCaptureDataCallback& dataCallback) = 0;
-
- // Remove capture data callback
- virtual void DeRegisterCaptureDataCallback() = 0;
-
- // Register capture callback.
- virtual void RegisterCaptureCallback(VideoCaptureFeedBack& callBack) = 0;
-
- // Remove capture callback.
- virtual void DeRegisterCaptureCallback() = 0;
-
- // Start capture device
- virtual int32_t StartCapture(
- const VideoCaptureCapability& capability) = 0;
-
- virtual int32_t StopCapture() = 0;
-
- // Returns the name of the device used by this module.
- virtual const char* CurrentDeviceName() const = 0;
-
- // Returns true if the capture device is running
- virtual bool CaptureStarted() = 0;
-
- // Gets the current configuration.
- virtual int32_t CaptureSettings(VideoCaptureCapability& settings) = 0;
-
- virtual void SetCaptureDelay(int32_t delayMS) = 0;
-
- // Returns the current CaptureDelay. Only valid when the camera is running.
- virtual int32_t CaptureDelay() = 0;
-
- // Set the rotation of the captured frames.
- // If the rotation is set to the same as returned by
- // DeviceInfo::GetOrientation the captured frames are
- // displayed correctly if rendered.
- virtual int32_t SetCaptureRotation(VideoRotation rotation) = 0;
-
- // Tells the capture module whether to apply the pending rotation. By default,
- // the rotation is applied and the generated frame is up right. When set to
- // false, generated frames will carry the rotation information from
- // SetCaptureRotation. Return value indicates whether this operation succeeds.
- virtual bool SetApplyRotation(bool enable) = 0;
-
- // Return whether the rotation is applied or left pending.
- virtual bool GetApplyRotation() = 0;
-
- // Gets a pointer to an encode interface if the capture device supports the
- // requested type and size. NULL otherwise.
- virtual VideoCaptureEncodeInterface* GetEncodeInterface(
- const VideoCodec& codec) = 0;
-
- virtual void EnableFrameRateCallback(const bool enable) = 0;
- virtual void EnableNoPictureAlarm(const bool enable) = 0;
-
-protected:
- virtual ~VideoCaptureModule() {};
-};
-
-} // namespace webrtc
-#endif // WEBRTC_MODULES_VIDEO_CAPTURE_INCLUDE_VIDEO_CAPTURE_H_
diff --git a/webrtc/modules/video_capture/include/video_capture_defines.h b/webrtc/modules/video_capture/include/video_capture_defines.h
deleted file mode 100644
index 1dee4fa814..0000000000
--- a/webrtc/modules/video_capture/include/video_capture_defines.h
+++ /dev/null
@@ -1,118 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_MODULES_VIDEO_CAPTURE_INCLUDE_VIDEO_CAPTURE_DEFINES_H_
-#define WEBRTC_MODULES_VIDEO_CAPTURE_INCLUDE_VIDEO_CAPTURE_DEFINES_H_
-
-#include "webrtc/modules/interface/module_common_types.h"
-#include "webrtc/typedefs.h"
-#include "webrtc/video_frame.h"
-
-namespace webrtc
-{
-// Defines
-#ifndef NULL
- #define NULL 0
-#endif
-
-enum {kVideoCaptureUniqueNameLength =1024}; //Max unique capture device name lenght
-enum {kVideoCaptureDeviceNameLength =256}; //Max capture device name lenght
-enum {kVideoCaptureProductIdLength =128}; //Max product id length
-
-struct VideoCaptureCapability
-{
- int32_t width;
- int32_t height;
- int32_t maxFPS;
- int32_t expectedCaptureDelay;
- RawVideoType rawType;
- VideoCodecType codecType;
- bool interlaced;
-
- VideoCaptureCapability()
- {
- width = 0;
- height = 0;
- maxFPS = 0;
- expectedCaptureDelay = 0;
- rawType = kVideoUnknown;
- codecType = kVideoCodecUnknown;
- interlaced = false;
- }
- ;
- bool operator!=(const VideoCaptureCapability &other) const
- {
- if (width != other.width)
- return true;
- if (height != other.height)
- return true;
- if (maxFPS != other.maxFPS)
- return true;
- if (rawType != other.rawType)
- return true;
- if (codecType != other.codecType)
- return true;
- if (interlaced != other.interlaced)
- return true;
- return false;
- }
- bool operator==(const VideoCaptureCapability &other) const
- {
- return !operator!=(other);
- }
-};
-
-enum VideoCaptureAlarm
-{
- Raised = 0,
- Cleared = 1
-};
-
-/* External Capture interface. Returned by Create
- and implemented by the capture module.
- */
-class VideoCaptureExternal
-{
-public:
- // |capture_time| must be specified in the NTP time format in milliseconds.
- virtual int32_t IncomingFrame(uint8_t* videoFrame,
- size_t videoFrameLength,
- const VideoCaptureCapability& frameInfo,
- int64_t captureTime = 0) = 0;
-protected:
- ~VideoCaptureExternal() {}
-};
-
-// Callback class to be implemented by module user
-class VideoCaptureDataCallback
-{
-public:
- virtual void OnIncomingCapturedFrame(const int32_t id,
- const VideoFrame& videoFrame) = 0;
- virtual void OnCaptureDelayChanged(const int32_t id,
- const int32_t delay) = 0;
-protected:
- virtual ~VideoCaptureDataCallback(){}
-};
-
-class VideoCaptureFeedBack
-{
-public:
- virtual void OnCaptureFrameRate(const int32_t id,
- const uint32_t frameRate) = 0;
- virtual void OnNoPictureAlarm(const int32_t id,
- const VideoCaptureAlarm alarm) = 0;
-protected:
- virtual ~VideoCaptureFeedBack(){}
-};
-
-} // namespace webrtc
-
-#endif // WEBRTC_MODULES_VIDEO_CAPTURE_INCLUDE_VIDEO_CAPTURE_DEFINES_H_
diff --git a/webrtc/modules/video_capture/include/video_capture_factory.h b/webrtc/modules/video_capture/include/video_capture_factory.h
deleted file mode 100644
index f78437d1a0..0000000000
--- a/webrtc/modules/video_capture/include/video_capture_factory.h
+++ /dev/null
@@ -1,45 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-// This file contains interfaces used for creating the VideoCaptureModule
-// and DeviceInfo.
-
-#ifndef WEBRTC_MODULES_VIDEO_CAPTURE_INCLUDE_VIDEO_CAPTURE_FACTORY_H_
-#define WEBRTC_MODULES_VIDEO_CAPTURE_INCLUDE_VIDEO_CAPTURE_FACTORY_H_
-
-#include "webrtc/modules/video_capture/include/video_capture.h"
-
-namespace webrtc {
-
-class VideoCaptureFactory {
- public:
- // Create a video capture module object
- // id - unique identifier of this video capture module object.
- // deviceUniqueIdUTF8 - name of the device.
- // Available names can be found by using GetDeviceName
- static VideoCaptureModule* Create(const int32_t id,
- const char* deviceUniqueIdUTF8);
-
- // Create a video capture module object used for external capture.
- // id - unique identifier of this video capture module object
- // externalCapture - [out] interface to call when a new frame is captured.
- static VideoCaptureModule* Create(const int32_t id,
- VideoCaptureExternal*& externalCapture);
-
- static VideoCaptureModule::DeviceInfo* CreateDeviceInfo(
- const int32_t id);
-
- private:
- ~VideoCaptureFactory();
-};
-
-} // namespace webrtc
-
-#endif // WEBRTC_MODULES_VIDEO_CAPTURE_INCLUDE_VIDEO_CAPTURE_FACTORY_H_
diff --git a/webrtc/modules/video_capture/ios/device_info_ios_objc.h b/webrtc/modules/video_capture/ios/device_info_ios_objc.h
index 65444bedcb..d67b559972 100644
--- a/webrtc/modules/video_capture/ios/device_info_ios_objc.h
+++ b/webrtc/modules/video_capture/ios/device_info_ios_objc.h
@@ -13,7 +13,7 @@
#import <AVFoundation/AVFoundation.h>
-#include "webrtc/modules/video_capture/include/video_capture_defines.h"
+#include "webrtc/modules/video_capture/video_capture_defines.h"
@interface DeviceInfoIosObjC : NSObject
+ (int)captureDeviceCount;
diff --git a/webrtc/modules/video_capture/linux/video_capture_linux.cc b/webrtc/modules/video_capture/linux/video_capture_linux.cc
index fe99c7136f..401a69d8c1 100644
--- a/webrtc/modules/video_capture/linux/video_capture_linux.cc
+++ b/webrtc/modules/video_capture/linux/video_capture_linux.cc
@@ -280,10 +280,10 @@ int32_t VideoCaptureModuleV4L2::StartCapture(
//start capture thread;
if (!_captureThread)
{
- _captureThread = ThreadWrapper::CreateThread(
- VideoCaptureModuleV4L2::CaptureThread, this, "CaptureThread");
+ _captureThread.reset(new rtc::PlatformThread(
+ VideoCaptureModuleV4L2::CaptureThread, this, "CaptureThread"));
_captureThread->Start();
- _captureThread->SetPriority(kHighPriority);
+ _captureThread->SetPriority(rtc::kHighPriority);
}
// Needed to start UVC camera - from the uvcview application
diff --git a/webrtc/modules/video_capture/linux/video_capture_linux.h b/webrtc/modules/video_capture/linux/video_capture_linux.h
index 996f8e10ca..8172eb8d2a 100644
--- a/webrtc/modules/video_capture/linux/video_capture_linux.h
+++ b/webrtc/modules/video_capture/linux/video_capture_linux.h
@@ -11,9 +11,9 @@
#ifndef WEBRTC_MODULES_VIDEO_CAPTURE_MAIN_SOURCE_LINUX_VIDEO_CAPTURE_LINUX_H_
#define WEBRTC_MODULES_VIDEO_CAPTURE_MAIN_SOURCE_LINUX_VIDEO_CAPTURE_LINUX_H_
+#include "webrtc/base/platform_thread.h"
#include "webrtc/common_types.h"
#include "webrtc/modules/video_capture/video_capture_impl.h"
-#include "webrtc/system_wrappers/include/thread_wrapper.h"
namespace webrtc
{
@@ -39,7 +39,8 @@ private:
bool AllocateVideoBuffers();
bool DeAllocateVideoBuffers();
- rtc::scoped_ptr<ThreadWrapper> _captureThread;
+ // TODO(pbos): Stop using scoped_ptr and resetting the thread.
+ rtc::scoped_ptr<rtc::PlatformThread> _captureThread;
CriticalSectionWrapper* _captureCritSect;
int32_t _deviceId;
diff --git a/webrtc/modules/video_capture/mac/qtkit/video_capture_qtkit_info.mm b/webrtc/modules/video_capture/mac/qtkit/video_capture_qtkit_info.mm
index 1251ecd830..0c0c6a1261 100644
--- a/webrtc/modules/video_capture/mac/qtkit/video_capture_qtkit_info.mm
+++ b/webrtc/modules/video_capture/mac/qtkit/video_capture_qtkit_info.mm
@@ -9,7 +9,7 @@
*/
#import "webrtc/modules/video_capture/mac/qtkit/video_capture_qtkit_info_objc.h"
-#include "webrtc/modules/video_capture/include/video_capture.h"
+#include "webrtc/modules/video_capture/video_capture.h"
#include "webrtc/modules/video_capture/video_capture_config.h"
#include "webrtc/system_wrappers/include/trace.h"
diff --git a/webrtc/modules/video_capture/test/video_capture_unittest.cc b/webrtc/modules/video_capture/test/video_capture_unittest.cc
index 2b8786b0fe..45d2d2f241 100644
--- a/webrtc/modules/video_capture/test/video_capture_unittest.cc
+++ b/webrtc/modules/video_capture/test/video_capture_unittest.cc
@@ -17,13 +17,12 @@
#include "webrtc/base/scoped_ptr.h"
#include "webrtc/base/scoped_ref_ptr.h"
#include "webrtc/common_video/libyuv/include/webrtc_libyuv.h"
-#include "webrtc/modules/utility/interface/process_thread.h"
-#include "webrtc/modules/video_capture/include/video_capture.h"
-#include "webrtc/modules/video_capture/include/video_capture_factory.h"
+#include "webrtc/modules/utility/include/process_thread.h"
+#include "webrtc/modules/video_capture/video_capture.h"
+#include "webrtc/modules/video_capture/video_capture_factory.h"
#include "webrtc/system_wrappers/include/critical_section_wrapper.h"
#include "webrtc/system_wrappers/include/sleep.h"
#include "webrtc/system_wrappers/include/tick_util.h"
-#include "webrtc/test/testsupport/gtest_disable.h"
#include "webrtc/video_frame.h"
using rtc::scoped_ptr;
@@ -47,14 +46,14 @@ using webrtc::VideoCaptureModule;
SleepMs(5); \
res = (ex); \
} \
- } while (0);\
+ } while (0)
#define EXPECT_TRUE_WAIT(ex, timeout) \
do { \
bool res; \
WAIT_(ex, timeout, res); \
if (!res) EXPECT_TRUE(ex); \
- } while (0);
+ } while (0)
static const int kTimeOut = 5000;
@@ -275,7 +274,14 @@ class VideoCaptureTest : public testing::Test {
unsigned int number_of_devices_;
};
-TEST_F(VideoCaptureTest, CreateDelete) {
+#ifdef WEBRTC_MAC
+// Currently fails on Mac 64-bit, see
+// https://bugs.chromium.org/p/webrtc/issues/detail?id=5406
+#define MAYBE_CreateDelete DISABLED_CreateDelete
+#else
+#define MAYBE_CreateDelete CreateDelete
+#endif
+TEST_F(VideoCaptureTest, MAYBE_CreateDelete) {
for (int i = 0; i < 5; ++i) {
int64_t start_time = TickTime::MillisecondTimestamp();
TestVideoCaptureCallback capture_observer;
@@ -312,7 +318,14 @@ TEST_F(VideoCaptureTest, CreateDelete) {
}
}
-TEST_F(VideoCaptureTest, Capabilities) {
+#ifdef WEBRTC_MAC
+// Currently fails on Mac 64-bit, see
+// https://bugs.chromium.org/p/webrtc/issues/detail?id=5406
+#define MAYBE_Capabilities DISABLED_Capabilities
+#else
+#define MAYBE_Capabilities Capabilities
+#endif
+TEST_F(VideoCaptureTest, MAYBE_Capabilities) {
#ifdef WEBRTC_MAC
printf("Video capture capabilities are not supported on Mac.\n");
return;
@@ -479,7 +492,12 @@ TEST_F(VideoCaptureExternalTest, TestExternalCapture) {
// Test frame rate and no picture alarm.
// Flaky on Win32, see webrtc:3270.
-TEST_F(VideoCaptureExternalTest, DISABLED_ON_WIN(FrameRate)) {
+#if defined(WEBRTC_WIN)
+#define MAYBE_FrameRate DISABLED_FrameRate
+#else
+#define MAYBE_FrameRate FrameRate
+#endif
+TEST_F(VideoCaptureExternalTest, MAYBE_FrameRate) {
int64_t testTime = 3;
TickTime startTime = TickTime::Now();
diff --git a/webrtc/modules/video_capture/video_capture.gypi b/webrtc/modules/video_capture/video_capture.gypi
index f552df7758..c80f2bf5b5 100644
--- a/webrtc/modules/video_capture/video_capture.gypi
+++ b/webrtc/modules/video_capture/video_capture.gypi
@@ -23,11 +23,11 @@
'sources': [
'device_info_impl.cc',
'device_info_impl.h',
- 'include/video_capture.h',
- 'include/video_capture_defines.h',
- 'include/video_capture_factory.h',
+ 'video_capture.h',
'video_capture_config.h',
+ 'video_capture_defines.h',
'video_capture_delay.h',
+ 'video_capture_factory.h',
'video_capture_factory.cc',
'video_capture_impl.cc',
'video_capture_impl.h',
@@ -116,6 +116,23 @@
],
},
}], # win
+ ['OS=="win" and clang==1', {
+ 'msvs_settings': {
+ 'VCCLCompilerTool': {
+ 'AdditionalOptions': [
+ # Disable warnings failing when compiling with Clang on Windows.
+ # https://bugs.chromium.org/p/webrtc/issues/detail?id=5366
+ '-Wno-comment',
+ '-Wno-ignored-attributes',
+ '-Wno-microsoft-extra-qualification',
+ '-Wno-missing-braces',
+ '-Wno-overloaded-virtual',
+ '-Wno-reorder',
+ '-Wno-writable-strings',
+ ],
+ },
+ },
+ }],
['OS=="ios"', {
'sources': [
'ios/device_info_ios.h',
diff --git a/webrtc/modules/video_capture/video_capture.h b/webrtc/modules/video_capture/video_capture.h
new file mode 100644
index 0000000000..08d02211c7
--- /dev/null
+++ b/webrtc/modules/video_capture/video_capture.h
@@ -0,0 +1,160 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_CAPTURE_VIDEO_CAPTURE_H_
+#define WEBRTC_MODULES_VIDEO_CAPTURE_VIDEO_CAPTURE_H_
+
+#include "webrtc/common_video/rotation.h"
+#include "webrtc/modules/include/module.h"
+#include "webrtc/modules/video_capture/video_capture_defines.h"
+
+namespace webrtc {
+
+class VideoCaptureModule: public RefCountedModule {
+ public:
+ // Interface for receiving information about available camera devices.
+ class DeviceInfo {
+ public:
+ virtual uint32_t NumberOfDevices() = 0;
+
+ // Returns the available capture devices.
+ // deviceNumber - Index of capture device.
+ // deviceNameUTF8 - Friendly name of the capture device.
+ // deviceUniqueIdUTF8 - Unique name of the capture device if it exist.
+ // Otherwise same as deviceNameUTF8.
+ // productUniqueIdUTF8 - Unique product id if it exist.
+ // Null terminated otherwise.
+ virtual int32_t GetDeviceName(
+ uint32_t deviceNumber,
+ char* deviceNameUTF8,
+ uint32_t deviceNameLength,
+ char* deviceUniqueIdUTF8,
+ uint32_t deviceUniqueIdUTF8Length,
+ char* productUniqueIdUTF8 = 0,
+ uint32_t productUniqueIdUTF8Length = 0) = 0;
+
+
+ // Returns the number of capabilities this device.
+ virtual int32_t NumberOfCapabilities(
+ const char* deviceUniqueIdUTF8) = 0;
+
+ // Gets the capabilities of the named device.
+ virtual int32_t GetCapability(
+ const char* deviceUniqueIdUTF8,
+ const uint32_t deviceCapabilityNumber,
+ VideoCaptureCapability& capability) = 0;
+
+ // Gets clockwise angle the captured frames should be rotated in order
+ // to be displayed correctly on a normally rotated display.
+ virtual int32_t GetOrientation(const char* deviceUniqueIdUTF8,
+ VideoRotation& orientation) = 0;
+
+ // Gets the capability that best matches the requested width, height and
+ // frame rate.
+ // Returns the deviceCapabilityNumber on success.
+ virtual int32_t GetBestMatchedCapability(
+ const char* deviceUniqueIdUTF8,
+ const VideoCaptureCapability& requested,
+ VideoCaptureCapability& resulting) = 0;
+
+ // Display OS /capture device specific settings dialog
+ virtual int32_t DisplayCaptureSettingsDialogBox(
+ const char* deviceUniqueIdUTF8,
+ const char* dialogTitleUTF8,
+ void* parentWindow,
+ uint32_t positionX,
+ uint32_t positionY) = 0;
+
+ virtual ~DeviceInfo() {}
+ };
+
+ class VideoCaptureEncodeInterface {
+ public:
+ virtual int32_t ConfigureEncoder(const VideoCodec& codec,
+ uint32_t maxPayloadSize) = 0;
+ // Inform the encoder about the new target bit rate.
+ // - newBitRate : New target bit rate in Kbit/s.
+ // - frameRate : The target frame rate.
+ virtual int32_t SetRates(int32_t newBitRate, int32_t frameRate) = 0;
+ // Inform the encoder about the packet loss and the round-trip time.
+ // - packetLoss : Fraction lost
+ // (loss rate in percent = 100 * packetLoss / 255).
+ // - rtt : Round-trip time in milliseconds.
+ virtual int32_t SetChannelParameters(uint32_t packetLoss, int64_t rtt) = 0;
+
+ // Encode the next frame as key frame.
+ virtual int32_t EncodeFrameType(const FrameType type) = 0;
+ protected:
+ virtual ~VideoCaptureEncodeInterface() {
+ }
+ };
+
+ // Register capture data callback
+ virtual void RegisterCaptureDataCallback(
+ VideoCaptureDataCallback& dataCallback) = 0;
+
+ // Remove capture data callback
+ virtual void DeRegisterCaptureDataCallback() = 0;
+
+ // Register capture callback.
+ virtual void RegisterCaptureCallback(VideoCaptureFeedBack& callBack) = 0;
+
+ // Remove capture callback.
+ virtual void DeRegisterCaptureCallback() = 0;
+
+ // Start capture device
+ virtual int32_t StartCapture(
+ const VideoCaptureCapability& capability) = 0;
+
+ virtual int32_t StopCapture() = 0;
+
+ // Returns the name of the device used by this module.
+ virtual const char* CurrentDeviceName() const = 0;
+
+ // Returns true if the capture device is running
+ virtual bool CaptureStarted() = 0;
+
+ // Gets the current configuration.
+ virtual int32_t CaptureSettings(VideoCaptureCapability& settings) = 0;
+
+ virtual void SetCaptureDelay(int32_t delayMS) = 0;
+
+ // Returns the current CaptureDelay. Only valid when the camera is running.
+ virtual int32_t CaptureDelay() = 0;
+
+ // Set the rotation of the captured frames.
+ // If the rotation is set to the same as returned by
+ // DeviceInfo::GetOrientation the captured frames are
+ // displayed correctly if rendered.
+ virtual int32_t SetCaptureRotation(VideoRotation rotation) = 0;
+
+ // Tells the capture module whether to apply the pending rotation. By default,
+ // the rotation is applied and the generated frame is up right. When set to
+ // false, generated frames will carry the rotation information from
+ // SetCaptureRotation. Return value indicates whether this operation succeeds.
+ virtual bool SetApplyRotation(bool enable) = 0;
+
+ // Return whether the rotation is applied or left pending.
+ virtual bool GetApplyRotation() = 0;
+
+ // Gets a pointer to an encode interface if the capture device supports the
+ // requested type and size. NULL otherwise.
+ virtual VideoCaptureEncodeInterface* GetEncodeInterface(
+ const VideoCodec& codec) = 0;
+
+ virtual void EnableFrameRateCallback(const bool enable) = 0;
+ virtual void EnableNoPictureAlarm(const bool enable) = 0;
+
+protected:
+ virtual ~VideoCaptureModule() {};
+};
+
+} // namespace webrtc
+#endif // WEBRTC_MODULES_VIDEO_CAPTURE_VIDEO_CAPTURE_H_
diff --git a/webrtc/modules/video_capture/video_capture_defines.h b/webrtc/modules/video_capture/video_capture_defines.h
new file mode 100644
index 0000000000..ef97ecab9d
--- /dev/null
+++ b/webrtc/modules/video_capture/video_capture_defines.h
@@ -0,0 +1,118 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_CAPTURE_VIDEO_CAPTURE_DEFINES_H_
+#define WEBRTC_MODULES_VIDEO_CAPTURE_VIDEO_CAPTURE_DEFINES_H_
+
+#include "webrtc/modules/include/module_common_types.h"
+#include "webrtc/typedefs.h"
+#include "webrtc/video_frame.h"
+
+namespace webrtc
+{
+// Defines
+#ifndef NULL
+ #define NULL 0
+#endif
+
+enum {kVideoCaptureUniqueNameLength =1024}; //Max unique capture device name lenght
+enum {kVideoCaptureDeviceNameLength =256}; //Max capture device name lenght
+enum {kVideoCaptureProductIdLength =128}; //Max product id length
+
+struct VideoCaptureCapability
+{
+ int32_t width;
+ int32_t height;
+ int32_t maxFPS;
+ int32_t expectedCaptureDelay;
+ RawVideoType rawType;
+ VideoCodecType codecType;
+ bool interlaced;
+
+ VideoCaptureCapability()
+ {
+ width = 0;
+ height = 0;
+ maxFPS = 0;
+ expectedCaptureDelay = 0;
+ rawType = kVideoUnknown;
+ codecType = kVideoCodecUnknown;
+ interlaced = false;
+ }
+ ;
+ bool operator!=(const VideoCaptureCapability &other) const
+ {
+ if (width != other.width)
+ return true;
+ if (height != other.height)
+ return true;
+ if (maxFPS != other.maxFPS)
+ return true;
+ if (rawType != other.rawType)
+ return true;
+ if (codecType != other.codecType)
+ return true;
+ if (interlaced != other.interlaced)
+ return true;
+ return false;
+ }
+ bool operator==(const VideoCaptureCapability &other) const
+ {
+ return !operator!=(other);
+ }
+};
+
+enum VideoCaptureAlarm
+{
+ Raised = 0,
+ Cleared = 1
+};
+
+/* External Capture interface. Returned by Create
+ and implemented by the capture module.
+ */
+class VideoCaptureExternal
+{
+public:
+ // |capture_time| must be specified in the NTP time format in milliseconds.
+ virtual int32_t IncomingFrame(uint8_t* videoFrame,
+ size_t videoFrameLength,
+ const VideoCaptureCapability& frameInfo,
+ int64_t captureTime = 0) = 0;
+protected:
+ ~VideoCaptureExternal() {}
+};
+
+// Callback class to be implemented by module user
+class VideoCaptureDataCallback
+{
+public:
+ virtual void OnIncomingCapturedFrame(const int32_t id,
+ const VideoFrame& videoFrame) = 0;
+ virtual void OnCaptureDelayChanged(const int32_t id,
+ const int32_t delay) = 0;
+protected:
+ virtual ~VideoCaptureDataCallback(){}
+};
+
+class VideoCaptureFeedBack
+{
+public:
+ virtual void OnCaptureFrameRate(const int32_t id,
+ const uint32_t frameRate) = 0;
+ virtual void OnNoPictureAlarm(const int32_t id,
+ const VideoCaptureAlarm alarm) = 0;
+protected:
+ virtual ~VideoCaptureFeedBack(){}
+};
+
+} // namespace webrtc
+
+#endif // WEBRTC_MODULES_VIDEO_CAPTURE_VIDEO_CAPTURE_DEFINES_H_
diff --git a/webrtc/modules/video_capture/video_capture_factory.cc b/webrtc/modules/video_capture/video_capture_factory.cc
index f88f916ba4..618c08bac6 100644
--- a/webrtc/modules/video_capture/video_capture_factory.cc
+++ b/webrtc/modules/video_capture/video_capture_factory.cc
@@ -8,7 +8,7 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-#include "webrtc/modules/video_capture/include/video_capture_factory.h"
+#include "webrtc/modules/video_capture/video_capture_factory.h"
#include "webrtc/modules/video_capture/video_capture_impl.h"
diff --git a/webrtc/modules/video_capture/video_capture_factory.h b/webrtc/modules/video_capture/video_capture_factory.h
new file mode 100644
index 0000000000..4765be1fde
--- /dev/null
+++ b/webrtc/modules/video_capture/video_capture_factory.h
@@ -0,0 +1,45 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+// This file contains interfaces used for creating the VideoCaptureModule
+// and DeviceInfo.
+
+#ifndef WEBRTC_MODULES_VIDEO_CAPTURE_VIDEO_CAPTURE_FACTORY_H_
+#define WEBRTC_MODULES_VIDEO_CAPTURE_VIDEO_CAPTURE_FACTORY_H_
+
+#include "webrtc/modules/video_capture/video_capture.h"
+
+namespace webrtc {
+
+class VideoCaptureFactory {
+ public:
+ // Create a video capture module object
+ // id - unique identifier of this video capture module object.
+ // deviceUniqueIdUTF8 - name of the device.
+ // Available names can be found by using GetDeviceName
+ static VideoCaptureModule* Create(const int32_t id,
+ const char* deviceUniqueIdUTF8);
+
+ // Create a video capture module object used for external capture.
+ // id - unique identifier of this video capture module object
+ // externalCapture - [out] interface to call when a new frame is captured.
+ static VideoCaptureModule* Create(const int32_t id,
+ VideoCaptureExternal*& externalCapture);
+
+ static VideoCaptureModule::DeviceInfo* CreateDeviceInfo(
+ const int32_t id);
+
+ private:
+ ~VideoCaptureFactory();
+};
+
+} // namespace webrtc
+
+#endif // WEBRTC_MODULES_VIDEO_CAPTURE_VIDEO_CAPTURE_FACTORY_H_
diff --git a/webrtc/modules/video_capture/video_capture_impl.cc b/webrtc/modules/video_capture/video_capture_impl.cc
index 4046181505..90730cd984 100644
--- a/webrtc/modules/video_capture/video_capture_impl.cc
+++ b/webrtc/modules/video_capture/video_capture_impl.cc
@@ -14,7 +14,7 @@
#include "webrtc/base/trace_event.h"
#include "webrtc/common_video/libyuv/include/webrtc_libyuv.h"
-#include "webrtc/modules/interface/module_common_types.h"
+#include "webrtc/modules/include/module_common_types.h"
#include "webrtc/modules/video_capture/video_capture_config.h"
#include "webrtc/system_wrappers/include/clock.h"
#include "webrtc/system_wrappers/include/critical_section_wrapper.h"
diff --git a/webrtc/modules/video_capture/video_capture_impl.h b/webrtc/modules/video_capture/video_capture_impl.h
index deb989c251..164421776c 100644
--- a/webrtc/modules/video_capture/video_capture_impl.h
+++ b/webrtc/modules/video_capture/video_capture_impl.h
@@ -17,7 +17,7 @@
#include "webrtc/common_video/libyuv/include/webrtc_libyuv.h"
#include "webrtc/common_video/rotation.h"
-#include "webrtc/modules/video_capture/include/video_capture.h"
+#include "webrtc/modules/video_capture/video_capture.h"
#include "webrtc/modules/video_capture/video_capture_config.h"
#include "webrtc/system_wrappers/include/tick_util.h"
#include "webrtc/video_frame.h"
diff --git a/webrtc/modules/video_capture/windows/sink_filter_ds.h b/webrtc/modules/video_capture/windows/sink_filter_ds.h
index 064cd9d7d3..6be74f69f9 100644
--- a/webrtc/modules/video_capture/windows/sink_filter_ds.h
+++ b/webrtc/modules/video_capture/windows/sink_filter_ds.h
@@ -13,7 +13,7 @@
#include <Streams.h> // Include base DS filter header files
-#include "webrtc/modules/video_capture/include/video_capture_defines.h"
+#include "webrtc/modules/video_capture/video_capture_defines.h"
namespace webrtc
{
diff --git a/webrtc/modules/video_coding/BUILD.gn b/webrtc/modules/video_coding/BUILD.gn
index 9e8cd47e53..32ac627ed2 100644
--- a/webrtc/modules/video_coding/BUILD.gn
+++ b/webrtc/modules/video_coding/BUILD.gn
@@ -10,57 +10,57 @@ import("../../build/webrtc.gni")
source_set("video_coding") {
sources = [
- "main/interface/video_coding.h",
- "main/interface/video_coding_defines.h",
- "main/source/codec_database.cc",
- "main/source/codec_database.h",
- "main/source/codec_timer.cc",
- "main/source/codec_timer.h",
- "main/source/content_metrics_processing.cc",
- "main/source/content_metrics_processing.h",
- "main/source/decoding_state.cc",
- "main/source/decoding_state.h",
- "main/source/encoded_frame.cc",
- "main/source/encoded_frame.h",
- "main/source/fec_tables_xor.h",
- "main/source/frame_buffer.cc",
- "main/source/frame_buffer.h",
- "main/source/generic_decoder.cc",
- "main/source/generic_decoder.h",
- "main/source/generic_encoder.cc",
- "main/source/generic_encoder.h",
- "main/source/inter_frame_delay.cc",
- "main/source/inter_frame_delay.h",
- "main/source/internal_defines.h",
- "main/source/jitter_buffer.cc",
- "main/source/jitter_buffer.h",
- "main/source/jitter_buffer_common.h",
- "main/source/jitter_estimator.cc",
- "main/source/jitter_estimator.h",
- "main/source/media_opt_util.cc",
- "main/source/media_opt_util.h",
- "main/source/media_optimization.cc",
- "main/source/media_optimization.h",
- "main/source/nack_fec_tables.h",
- "main/source/packet.cc",
- "main/source/packet.h",
- "main/source/qm_select.cc",
- "main/source/qm_select.h",
- "main/source/qm_select_data.h",
- "main/source/receiver.cc",
- "main/source/receiver.h",
- "main/source/rtt_filter.cc",
- "main/source/rtt_filter.h",
- "main/source/session_info.cc",
- "main/source/session_info.h",
- "main/source/timestamp_map.cc",
- "main/source/timestamp_map.h",
- "main/source/timing.cc",
- "main/source/timing.h",
- "main/source/video_coding_impl.cc",
- "main/source/video_coding_impl.h",
- "main/source/video_receiver.cc",
- "main/source/video_sender.cc",
+ "codec_database.cc",
+ "codec_database.h",
+ "codec_timer.cc",
+ "codec_timer.h",
+ "content_metrics_processing.cc",
+ "content_metrics_processing.h",
+ "decoding_state.cc",
+ "decoding_state.h",
+ "encoded_frame.cc",
+ "encoded_frame.h",
+ "fec_tables_xor.h",
+ "frame_buffer.cc",
+ "frame_buffer.h",
+ "generic_decoder.cc",
+ "generic_decoder.h",
+ "generic_encoder.cc",
+ "generic_encoder.h",
+ "include/video_coding.h",
+ "include/video_coding_defines.h",
+ "inter_frame_delay.cc",
+ "inter_frame_delay.h",
+ "internal_defines.h",
+ "jitter_buffer.cc",
+ "jitter_buffer.h",
+ "jitter_buffer_common.h",
+ "jitter_estimator.cc",
+ "jitter_estimator.h",
+ "media_opt_util.cc",
+ "media_opt_util.h",
+ "media_optimization.cc",
+ "media_optimization.h",
+ "nack_fec_tables.h",
+ "packet.cc",
+ "packet.h",
+ "qm_select.cc",
+ "qm_select.h",
+ "qm_select_data.h",
+ "receiver.cc",
+ "receiver.h",
+ "rtt_filter.cc",
+ "rtt_filter.h",
+ "session_info.cc",
+ "session_info.h",
+ "timestamp_map.cc",
+ "timestamp_map.h",
+ "timing.cc",
+ "timing.h",
+ "video_coding_impl.cc",
+ "video_coding_impl.h",
+ "video_receiver.cc",
+ "video_sender.cc",
]
configs += [ "../..:common_config" ]
@@ -94,14 +94,14 @@ source_set("video_coding") {
source_set("video_coding_utility") {
sources = [
"utility/frame_dropper.cc",
- "utility/include/frame_dropper.h",
- "utility/include/moving_average.h",
- "utility/include/qp_parser.h",
- "utility/include/quality_scaler.h",
- "utility/include/vp8_header_parser.h",
+ "utility/frame_dropper.h",
+ "utility/moving_average.h",
"utility/qp_parser.cc",
+ "utility/qp_parser.h",
"utility/quality_scaler.cc",
+ "utility/quality_scaler.h",
"utility/vp8_header_parser.cc",
+ "utility/vp8_header_parser.h",
]
configs += [ "../..:common_config" ]
@@ -136,6 +136,18 @@ source_set("webrtc_h264") {
deps = [
"../../system_wrappers",
]
+
+ if (use_third_party_h264) {
+ # Dependency added so that variables use_openh264 and ffmpeg_branding are
+ # recognized build arguments (avoid "Build argument has no effect" error).
+ # The variables and dependencies will be used for real as soon as
+ # https://codereview.webrtc.org/1306813009/ lands. In the meantime, the
+ # build arguments are to be used by waterfall/trybots.
+ deps += [
+ "//third_party/ffmpeg:ffmpeg",
+ "//third_party/openh264:encoder",
+ ]
+ }
}
# TODO(tkchin): Source set for webrtc_h264_video_toolbox. Currently not
@@ -209,19 +221,15 @@ source_set("webrtc_vp8") {
}
source_set("webrtc_vp9") {
- if (rtc_build_vp9) {
- sources = [
- "codecs/vp9/include/vp9.h",
- "codecs/vp9/vp9_frame_buffer_pool.cc",
- "codecs/vp9/vp9_frame_buffer_pool.h",
- "codecs/vp9/vp9_impl.cc",
- "codecs/vp9/vp9_impl.h",
- ]
- } else {
- sources = [
- "codecs/vp9/vp9_dummy_impl.cc",
- ]
- }
+ sources = [
+ "codecs/vp9/include/vp9.h",
+ "codecs/vp9/screenshare_layers.cc",
+ "codecs/vp9/screenshare_layers.h",
+ "codecs/vp9/vp9_frame_buffer_pool.cc",
+ "codecs/vp9/vp9_frame_buffer_pool.h",
+ "codecs/vp9/vp9_impl.cc",
+ "codecs/vp9/vp9_impl.h",
+ ]
configs += [ "../..:common_config" ]
public_configs = [ "../..:common_inherited_config" ]
diff --git a/webrtc/modules/video_coding/OWNERS b/webrtc/modules/video_coding/OWNERS
index f452c9ed83..389d632dfd 100644
--- a/webrtc/modules/video_coding/OWNERS
+++ b/webrtc/modules/video_coding/OWNERS
@@ -1,4 +1,9 @@
stefan@webrtc.org
marpan@webrtc.org
+# These are for the common case of adding or renaming files. If you're doing
+# structural changes, please get a review from a reviewer in this file.
+per-file *.gyp=*
+per-file *.gypi=*
+
per-file BUILD.gn=kjellander@webrtc.org
diff --git a/webrtc/modules/video_coding/codec_database.cc b/webrtc/modules/video_coding/codec_database.cc
new file mode 100644
index 0000000000..1fae435bab
--- /dev/null
+++ b/webrtc/modules/video_coding/codec_database.cc
@@ -0,0 +1,616 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/modules/video_coding/codec_database.h"
+
+#include <assert.h>
+
+#include "webrtc/base/checks.h"
+#include "webrtc/base/logging.h"
+#include "webrtc/engine_configurations.h"
+#include "webrtc/modules/video_coding/codecs/h264/include/h264.h"
+#include "webrtc/modules/video_coding/codecs/i420/include/i420.h"
+#include "webrtc/modules/video_coding/codecs/vp8/include/vp8.h"
+#include "webrtc/modules/video_coding/codecs/vp9/include/vp9.h"
+#include "webrtc/modules/video_coding/internal_defines.h"
+
+namespace {
+const size_t kDefaultPayloadSize = 1440;
+const uint8_t kDefaultPayloadType = 100;
+}
+
+namespace webrtc {
+
+VideoCodecVP8 VideoEncoder::GetDefaultVp8Settings() {
+ VideoCodecVP8 vp8_settings;
+ memset(&vp8_settings, 0, sizeof(vp8_settings));
+
+ vp8_settings.resilience = kResilientStream;
+ vp8_settings.numberOfTemporalLayers = 1;
+ vp8_settings.denoisingOn = true;
+ vp8_settings.errorConcealmentOn = false;
+ vp8_settings.automaticResizeOn = false;
+ vp8_settings.frameDroppingOn = true;
+ vp8_settings.keyFrameInterval = 3000;
+
+ return vp8_settings;
+}
+
+VideoCodecVP9 VideoEncoder::GetDefaultVp9Settings() {
+ VideoCodecVP9 vp9_settings;
+ memset(&vp9_settings, 0, sizeof(vp9_settings));
+
+ vp9_settings.resilience = 1;
+ vp9_settings.numberOfTemporalLayers = 1;
+ vp9_settings.denoisingOn = false;
+ vp9_settings.frameDroppingOn = true;
+ vp9_settings.keyFrameInterval = 3000;
+ vp9_settings.adaptiveQpMode = true;
+ vp9_settings.automaticResizeOn = true;
+ vp9_settings.numberOfSpatialLayers = 1;
+ vp9_settings.flexibleMode = false;
+ return vp9_settings;
+}
+
+VideoCodecH264 VideoEncoder::GetDefaultH264Settings() {
+ VideoCodecH264 h264_settings;
+ memset(&h264_settings, 0, sizeof(h264_settings));
+
+ h264_settings.profile = kProfileBase;
+ h264_settings.frameDroppingOn = true;
+ h264_settings.keyFrameInterval = 3000;
+ h264_settings.spsData = nullptr;
+ h264_settings.spsLen = 0;
+ h264_settings.ppsData = nullptr;
+ h264_settings.ppsLen = 0;
+
+ return h264_settings;
+}
+
+VCMDecoderMapItem::VCMDecoderMapItem(VideoCodec* settings,
+ int number_of_cores,
+ bool require_key_frame)
+ : settings(settings),
+ number_of_cores(number_of_cores),
+ require_key_frame(require_key_frame) {
+ assert(number_of_cores >= 0);
+}
+
+VCMExtDecoderMapItem::VCMExtDecoderMapItem(
+ VideoDecoder* external_decoder_instance,
+ uint8_t payload_type)
+ : payload_type(payload_type),
+ external_decoder_instance(external_decoder_instance) {}
+
+VCMCodecDataBase::VCMCodecDataBase(
+ VideoEncoderRateObserver* encoder_rate_observer,
+ VCMEncodedFrameCallback* encoded_frame_callback)
+ : number_of_cores_(0),
+ max_payload_size_(kDefaultPayloadSize),
+ periodic_key_frames_(false),
+ pending_encoder_reset_(true),
+ send_codec_(),
+ receive_codec_(),
+ encoder_payload_type_(0),
+ external_encoder_(nullptr),
+ internal_source_(false),
+ encoder_rate_observer_(encoder_rate_observer),
+ encoded_frame_callback_(encoded_frame_callback),
+ ptr_decoder_(nullptr),
+ dec_map_(),
+ dec_external_map_() {}
+
+VCMCodecDataBase::~VCMCodecDataBase() {
+ DeleteEncoder();
+ ReleaseDecoder(ptr_decoder_);
+ for (auto& kv : dec_map_)
+ delete kv.second;
+ for (auto& kv : dec_external_map_)
+ delete kv.second;
+}
+
+void VCMCodecDataBase::Codec(VideoCodecType codec_type, VideoCodec* settings) {
+ memset(settings, 0, sizeof(VideoCodec));
+ switch (codec_type) {
+ case kVideoCodecVP8:
+ strncpy(settings->plName, "VP8", 4);
+ settings->codecType = kVideoCodecVP8;
+ // 96 to 127 dynamic payload types for video codecs.
+ settings->plType = kDefaultPayloadType;
+ settings->startBitrate = kDefaultStartBitrateKbps;
+ settings->minBitrate = VCM_MIN_BITRATE;
+ settings->maxBitrate = 0;
+ settings->maxFramerate = VCM_DEFAULT_FRAME_RATE;
+ settings->width = VCM_DEFAULT_CODEC_WIDTH;
+ settings->height = VCM_DEFAULT_CODEC_HEIGHT;
+ settings->numberOfSimulcastStreams = 0;
+ settings->qpMax = 56;
+ settings->codecSpecific.VP8 = VideoEncoder::GetDefaultVp8Settings();
+ return;
+ case kVideoCodecVP9:
+ strncpy(settings->plName, "VP9", 4);
+ settings->codecType = kVideoCodecVP9;
+ // 96 to 127 dynamic payload types for video codecs.
+ settings->plType = kDefaultPayloadType;
+ settings->startBitrate = 100;
+ settings->minBitrate = VCM_MIN_BITRATE;
+ settings->maxBitrate = 0;
+ settings->maxFramerate = VCM_DEFAULT_FRAME_RATE;
+ settings->width = VCM_DEFAULT_CODEC_WIDTH;
+ settings->height = VCM_DEFAULT_CODEC_HEIGHT;
+ settings->numberOfSimulcastStreams = 0;
+ settings->qpMax = 56;
+ settings->codecSpecific.VP9 = VideoEncoder::GetDefaultVp9Settings();
+ return;
+ case kVideoCodecH264:
+ strncpy(settings->plName, "H264", 5);
+ settings->codecType = kVideoCodecH264;
+ // 96 to 127 dynamic payload types for video codecs.
+ settings->plType = kDefaultPayloadType;
+ settings->startBitrate = kDefaultStartBitrateKbps;
+ settings->minBitrate = VCM_MIN_BITRATE;
+ settings->maxBitrate = 0;
+ settings->maxFramerate = VCM_DEFAULT_FRAME_RATE;
+ settings->width = VCM_DEFAULT_CODEC_WIDTH;
+ settings->height = VCM_DEFAULT_CODEC_HEIGHT;
+ settings->numberOfSimulcastStreams = 0;
+ settings->qpMax = 56;
+ settings->codecSpecific.H264 = VideoEncoder::GetDefaultH264Settings();
+ return;
+ case kVideoCodecI420:
+ strncpy(settings->plName, "I420", 5);
+ settings->codecType = kVideoCodecI420;
+ // 96 to 127 dynamic payload types for video codecs.
+ settings->plType = kDefaultPayloadType;
+ // Bitrate needed for this size and framerate.
+ settings->startBitrate = 3 * VCM_DEFAULT_CODEC_WIDTH *
+ VCM_DEFAULT_CODEC_HEIGHT * 8 *
+ VCM_DEFAULT_FRAME_RATE / 1000 / 2;
+ settings->maxBitrate = settings->startBitrate;
+ settings->maxFramerate = VCM_DEFAULT_FRAME_RATE;
+ settings->width = VCM_DEFAULT_CODEC_WIDTH;
+ settings->height = VCM_DEFAULT_CODEC_HEIGHT;
+ settings->minBitrate = VCM_MIN_BITRATE;
+ settings->numberOfSimulcastStreams = 0;
+ return;
+ case kVideoCodecRED:
+ case kVideoCodecULPFEC:
+ case kVideoCodecGeneric:
+ case kVideoCodecUnknown:
+ RTC_NOTREACHED();
+ return;
+ }
+}
+
+// Assuming only one registered encoder - since only one used, no need for more.
+bool VCMCodecDataBase::SetSendCodec(const VideoCodec* send_codec,
+ int number_of_cores,
+ size_t max_payload_size) {
+ RTC_DCHECK(send_codec);
+ if (max_payload_size == 0) {
+ max_payload_size = kDefaultPayloadSize;
+ }
+ RTC_DCHECK_GE(number_of_cores, 1);
+ RTC_DCHECK_GE(send_codec->plType, 1);
+ // Make sure the start bit rate is sane...
+ RTC_DCHECK_LE(send_codec->startBitrate, 1000000u);
+ RTC_DCHECK(send_codec->codecType != kVideoCodecUnknown);
+ bool reset_required = pending_encoder_reset_;
+ if (number_of_cores_ != number_of_cores) {
+ number_of_cores_ = number_of_cores;
+ reset_required = true;
+ }
+ if (max_payload_size_ != max_payload_size) {
+ max_payload_size_ = max_payload_size;
+ reset_required = true;
+ }
+
+ VideoCodec new_send_codec;
+ memcpy(&new_send_codec, send_codec, sizeof(new_send_codec));
+
+ if (new_send_codec.maxBitrate == 0) {
+ // max is one bit per pixel
+ new_send_codec.maxBitrate = (static_cast<int>(send_codec->height) *
+ static_cast<int>(send_codec->width) *
+ static_cast<int>(send_codec->maxFramerate)) /
+ 1000;
+ if (send_codec->startBitrate > new_send_codec.maxBitrate) {
+ // But if the user tries to set a higher start bit rate we will
+ // increase the max accordingly.
+ new_send_codec.maxBitrate = send_codec->startBitrate;
+ }
+ }
+
+ if (new_send_codec.startBitrate > new_send_codec.maxBitrate)
+ new_send_codec.startBitrate = new_send_codec.maxBitrate;
+
+ if (!reset_required) {
+ reset_required = RequiresEncoderReset(new_send_codec);
+ }
+
+ memcpy(&send_codec_, &new_send_codec, sizeof(send_codec_));
+
+ if (!reset_required) {
+ encoded_frame_callback_->SetPayloadType(send_codec_.plType);
+ return true;
+ }
+
+ // If encoder exists, will destroy it and create new one.
+ DeleteEncoder();
+ RTC_DCHECK_EQ(encoder_payload_type_, send_codec_.plType)
+ << "Encoder not registered for payload type " << send_codec_.plType;
+ ptr_encoder_.reset(
+ new VCMGenericEncoder(external_encoder_, encoder_rate_observer_,
+ encoded_frame_callback_, internal_source_));
+ encoded_frame_callback_->SetPayloadType(send_codec_.plType);
+ encoded_frame_callback_->SetInternalSource(internal_source_);
+ if (ptr_encoder_->InitEncode(&send_codec_, number_of_cores_,
+ max_payload_size_) < 0) {
+ LOG(LS_ERROR) << "Failed to initialize video encoder.";
+ DeleteEncoder();
+ return false;
+ }
+
+ // Intentionally don't check return value since the encoder registration
+ // shouldn't fail because the codec doesn't support changing the periodic key
+ // frame setting.
+ ptr_encoder_->SetPeriodicKeyFrames(periodic_key_frames_);
+
+ pending_encoder_reset_ = false;
+
+ return true;
+}
+
+bool VCMCodecDataBase::SendCodec(VideoCodec* current_send_codec) const {
+ if (!ptr_encoder_) {
+ return false;
+ }
+ memcpy(current_send_codec, &send_codec_, sizeof(VideoCodec));
+ return true;
+}
+
+VideoCodecType VCMCodecDataBase::SendCodec() const {
+ if (!ptr_encoder_) {
+ return kVideoCodecUnknown;
+ }
+ return send_codec_.codecType;
+}
+
+bool VCMCodecDataBase::DeregisterExternalEncoder(uint8_t payload_type,
+ bool* was_send_codec) {
+ assert(was_send_codec);
+ *was_send_codec = false;
+ if (encoder_payload_type_ != payload_type) {
+ return false;
+ }
+ if (send_codec_.plType == payload_type) {
+ // De-register as send codec if needed.
+ DeleteEncoder();
+ memset(&send_codec_, 0, sizeof(VideoCodec));
+ *was_send_codec = true;
+ }
+ encoder_payload_type_ = 0;
+ external_encoder_ = nullptr;
+ internal_source_ = false;
+ return true;
+}
+
+void VCMCodecDataBase::RegisterExternalEncoder(VideoEncoder* external_encoder,
+ uint8_t payload_type,
+ bool internal_source) {
+ // Since only one encoder can be used at a given time, only one external
+ // encoder can be registered/used.
+ external_encoder_ = external_encoder;
+ encoder_payload_type_ = payload_type;
+ internal_source_ = internal_source;
+ pending_encoder_reset_ = true;
+}
+
+bool VCMCodecDataBase::RequiresEncoderReset(const VideoCodec& new_send_codec) {
+ if (!ptr_encoder_)
+ return true;
+
+ // Does not check startBitrate or maxFramerate
+ if (new_send_codec.codecType != send_codec_.codecType ||
+ strcmp(new_send_codec.plName, send_codec_.plName) != 0 ||
+ new_send_codec.plType != send_codec_.plType ||
+ new_send_codec.width != send_codec_.width ||
+ new_send_codec.height != send_codec_.height ||
+ new_send_codec.maxBitrate != send_codec_.maxBitrate ||
+ new_send_codec.minBitrate != send_codec_.minBitrate ||
+ new_send_codec.qpMax != send_codec_.qpMax ||
+ new_send_codec.numberOfSimulcastStreams !=
+ send_codec_.numberOfSimulcastStreams ||
+ new_send_codec.mode != send_codec_.mode ||
+ new_send_codec.extra_options != send_codec_.extra_options) {
+ return true;
+ }
+
+ switch (new_send_codec.codecType) {
+ case kVideoCodecVP8:
+ if (memcmp(&new_send_codec.codecSpecific.VP8,
+ &send_codec_.codecSpecific.VP8,
+ sizeof(new_send_codec.codecSpecific.VP8)) != 0) {
+ return true;
+ }
+ break;
+ case kVideoCodecVP9:
+ if (memcmp(&new_send_codec.codecSpecific.VP9,
+ &send_codec_.codecSpecific.VP9,
+ sizeof(new_send_codec.codecSpecific.VP9)) != 0) {
+ return true;
+ }
+ break;
+ case kVideoCodecH264:
+ if (memcmp(&new_send_codec.codecSpecific.H264,
+ &send_codec_.codecSpecific.H264,
+ sizeof(new_send_codec.codecSpecific.H264)) != 0) {
+ return true;
+ }
+ break;
+ case kVideoCodecGeneric:
+ break;
+ // Known codecs without payload-specifics
+ case kVideoCodecI420:
+ case kVideoCodecRED:
+ case kVideoCodecULPFEC:
+ break;
+ // Unknown codec type, reset just to be sure.
+ case kVideoCodecUnknown:
+ return true;
+ }
+
+ if (new_send_codec.numberOfSimulcastStreams > 0) {
+ for (unsigned char i = 0; i < new_send_codec.numberOfSimulcastStreams;
+ ++i) {
+ if (memcmp(&new_send_codec.simulcastStream[i],
+ &send_codec_.simulcastStream[i],
+ sizeof(new_send_codec.simulcastStream[i])) != 0) {
+ return true;
+ }
+ }
+ }
+ return false;
+}
+
+VCMGenericEncoder* VCMCodecDataBase::GetEncoder() {
+ return ptr_encoder_.get();
+}
+
+bool VCMCodecDataBase::SetPeriodicKeyFrames(bool enable) {
+ periodic_key_frames_ = enable;
+ if (ptr_encoder_) {
+ return (ptr_encoder_->SetPeriodicKeyFrames(periodic_key_frames_) == 0);
+ }
+ return true;
+}
+
+bool VCMCodecDataBase::DeregisterExternalDecoder(uint8_t payload_type) {
+ ExternalDecoderMap::iterator it = dec_external_map_.find(payload_type);
+ if (it == dec_external_map_.end()) {
+ // Not found
+ return false;
+ }
+ // We can't use payload_type to check if the decoder is currently in use,
+ // because payload type may be out of date (e.g. before we decode the first
+ // frame after RegisterReceiveCodec)
+ if (ptr_decoder_ != nullptr &&
+ ptr_decoder_->_decoder == (*it).second->external_decoder_instance) {
+ // Release it if it was registered and in use.
+ ReleaseDecoder(ptr_decoder_);
+ ptr_decoder_ = nullptr;
+ }
+ DeregisterReceiveCodec(payload_type);
+ delete it->second;
+ dec_external_map_.erase(it);
+ return true;
+}
+
+// Add the external encoder object to the list of external decoders.
+// Won't be registered as a receive codec until RegisterReceiveCodec is called.
+void VCMCodecDataBase::RegisterExternalDecoder(VideoDecoder* external_decoder,
+ uint8_t payload_type) {
+ // Check if payload value already exists, if so - erase old and insert new.
+ VCMExtDecoderMapItem* ext_decoder =
+ new VCMExtDecoderMapItem(external_decoder, payload_type);
+ DeregisterExternalDecoder(payload_type);
+ dec_external_map_[payload_type] = ext_decoder;
+}
+
+bool VCMCodecDataBase::DecoderRegistered() const {
+ return !dec_map_.empty();
+}
+
+bool VCMCodecDataBase::RegisterReceiveCodec(const VideoCodec* receive_codec,
+ int number_of_cores,
+ bool require_key_frame) {
+ if (number_of_cores < 0) {
+ return false;
+ }
+ // Check if payload value already exists, if so - erase old and insert new.
+ DeregisterReceiveCodec(receive_codec->plType);
+ if (receive_codec->codecType == kVideoCodecUnknown) {
+ return false;
+ }
+ VideoCodec* new_receive_codec = new VideoCodec(*receive_codec);
+ dec_map_[receive_codec->plType] = new VCMDecoderMapItem(
+ new_receive_codec, number_of_cores, require_key_frame);
+ return true;
+}
+
+bool VCMCodecDataBase::DeregisterReceiveCodec(uint8_t payload_type) {
+ DecoderMap::iterator it = dec_map_.find(payload_type);
+ if (it == dec_map_.end()) {
+ return false;
+ }
+ delete it->second;
+ dec_map_.erase(it);
+ if (receive_codec_.plType == payload_type) {
+ // This codec is currently in use.
+ memset(&receive_codec_, 0, sizeof(VideoCodec));
+ }
+ return true;
+}
+
+bool VCMCodecDataBase::ReceiveCodec(VideoCodec* current_receive_codec) const {
+ assert(current_receive_codec);
+ if (!ptr_decoder_) {
+ return false;
+ }
+ memcpy(current_receive_codec, &receive_codec_, sizeof(VideoCodec));
+ return true;
+}
+
+VideoCodecType VCMCodecDataBase::ReceiveCodec() const {
+ if (!ptr_decoder_) {
+ return kVideoCodecUnknown;
+ }
+ return receive_codec_.codecType;
+}
+
+VCMGenericDecoder* VCMCodecDataBase::GetDecoder(
+ const VCMEncodedFrame& frame,
+ VCMDecodedFrameCallback* decoded_frame_callback) {
+ uint8_t payload_type = frame.PayloadType();
+ if (payload_type == receive_codec_.plType || payload_type == 0) {
+ return ptr_decoder_;
+ }
+ // Check for exisitng decoder, if exists - delete.
+ if (ptr_decoder_) {
+ ReleaseDecoder(ptr_decoder_);
+ ptr_decoder_ = nullptr;
+ memset(&receive_codec_, 0, sizeof(VideoCodec));
+ }
+ ptr_decoder_ = CreateAndInitDecoder(frame, &receive_codec_);
+ if (!ptr_decoder_) {
+ return nullptr;
+ }
+ VCMReceiveCallback* callback = decoded_frame_callback->UserReceiveCallback();
+ if (callback)
+ callback->OnIncomingPayloadType(receive_codec_.plType);
+ if (ptr_decoder_->RegisterDecodeCompleteCallback(decoded_frame_callback) <
+ 0) {
+ ReleaseDecoder(ptr_decoder_);
+ ptr_decoder_ = nullptr;
+ memset(&receive_codec_, 0, sizeof(VideoCodec));
+ return nullptr;
+ }
+ return ptr_decoder_;
+}
+
+void VCMCodecDataBase::ReleaseDecoder(VCMGenericDecoder* decoder) const {
+ if (decoder) {
+ assert(decoder->_decoder);
+ decoder->Release();
+ if (!decoder->External()) {
+ delete decoder->_decoder;
+ }
+ delete decoder;
+ }
+}
+
+bool VCMCodecDataBase::PrefersLateDecoding() const {
+ if (!ptr_decoder_)
+ return true;
+ return ptr_decoder_->PrefersLateDecoding();
+}
+
+bool VCMCodecDataBase::MatchesCurrentResolution(int width, int height) const {
+ return send_codec_.width == width && send_codec_.height == height;
+}
+
+VCMGenericDecoder* VCMCodecDataBase::CreateAndInitDecoder(
+ const VCMEncodedFrame& frame,
+ VideoCodec* new_codec) const {
+ uint8_t payload_type = frame.PayloadType();
+ assert(new_codec);
+ const VCMDecoderMapItem* decoder_item = FindDecoderItem(payload_type);
+ if (!decoder_item) {
+ LOG(LS_ERROR) << "Can't find a decoder associated with payload type: "
+ << static_cast<int>(payload_type);
+ return nullptr;
+ }
+ VCMGenericDecoder* ptr_decoder = nullptr;
+ const VCMExtDecoderMapItem* external_dec_item =
+ FindExternalDecoderItem(payload_type);
+ if (external_dec_item) {
+ // External codec.
+ ptr_decoder = new VCMGenericDecoder(
+ external_dec_item->external_decoder_instance, true);
+ } else {
+ // Create decoder.
+ ptr_decoder = CreateDecoder(decoder_item->settings->codecType);
+ }
+ if (!ptr_decoder)
+ return nullptr;
+
+ // Copy over input resolutions to prevent codec reinitialization due to
+ // the first frame being of a different resolution than the database values.
+ // This is best effort, since there's no guarantee that width/height have been
+ // parsed yet (and may be zero).
+ if (frame.EncodedImage()._encodedWidth > 0 &&
+ frame.EncodedImage()._encodedHeight > 0) {
+ decoder_item->settings->width = frame.EncodedImage()._encodedWidth;
+ decoder_item->settings->height = frame.EncodedImage()._encodedHeight;
+ }
+ if (ptr_decoder->InitDecode(decoder_item->settings.get(),
+ decoder_item->number_of_cores) < 0) {
+ ReleaseDecoder(ptr_decoder);
+ return nullptr;
+ }
+ memcpy(new_codec, decoder_item->settings.get(), sizeof(VideoCodec));
+ return ptr_decoder;
+}
+
+void VCMCodecDataBase::DeleteEncoder() {
+ if (!ptr_encoder_)
+ return;
+ ptr_encoder_->Release();
+ ptr_encoder_.reset();
+}
+
+VCMGenericDecoder* VCMCodecDataBase::CreateDecoder(VideoCodecType type) const {
+ switch (type) {
+ case kVideoCodecVP8:
+ return new VCMGenericDecoder(VP8Decoder::Create());
+ case kVideoCodecVP9:
+ return new VCMGenericDecoder(VP9Decoder::Create());
+ case kVideoCodecI420:
+ return new VCMGenericDecoder(new I420Decoder());
+ case kVideoCodecH264:
+ if (H264Decoder::IsSupported()) {
+ return new VCMGenericDecoder(H264Decoder::Create());
+ }
+ break;
+ default:
+ break;
+ }
+ LOG(LS_WARNING) << "No internal decoder of this type exists.";
+ return nullptr;
+}
+
+const VCMDecoderMapItem* VCMCodecDataBase::FindDecoderItem(
+ uint8_t payload_type) const {
+ DecoderMap::const_iterator it = dec_map_.find(payload_type);
+ if (it != dec_map_.end()) {
+ return (*it).second;
+ }
+ return nullptr;
+}
+
+const VCMExtDecoderMapItem* VCMCodecDataBase::FindExternalDecoderItem(
+ uint8_t payload_type) const {
+ ExternalDecoderMap::const_iterator it = dec_external_map_.find(payload_type);
+ if (it != dec_external_map_.end()) {
+ return (*it).second;
+ }
+ return nullptr;
+}
+} // namespace webrtc
diff --git a/webrtc/modules/video_coding/codec_database.h b/webrtc/modules/video_coding/codec_database.h
new file mode 100644
index 0000000000..62ec30a46e
--- /dev/null
+++ b/webrtc/modules/video_coding/codec_database.h
@@ -0,0 +1,167 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_CODING_CODEC_DATABASE_H_
+#define WEBRTC_MODULES_VIDEO_CODING_CODEC_DATABASE_H_
+
+#include <map>
+
+#include "webrtc/base/scoped_ptr.h"
+#include "webrtc/modules/video_coding/include/video_codec_interface.h"
+#include "webrtc/modules/video_coding/include/video_coding.h"
+#include "webrtc/modules/video_coding/generic_decoder.h"
+#include "webrtc/modules/video_coding/generic_encoder.h"
+#include "webrtc/typedefs.h"
+
+namespace webrtc {
+
+struct VCMDecoderMapItem {
+ public:
+ VCMDecoderMapItem(VideoCodec* settings,
+ int number_of_cores,
+ bool require_key_frame);
+
+ rtc::scoped_ptr<VideoCodec> settings;
+ int number_of_cores;
+ bool require_key_frame;
+};
+
+struct VCMExtDecoderMapItem {
+ public:
+ VCMExtDecoderMapItem(VideoDecoder* external_decoder_instance,
+ uint8_t payload_type);
+
+ uint8_t payload_type;
+ VideoDecoder* external_decoder_instance;
+};
+
+class VCMCodecDataBase {
+ public:
+ VCMCodecDataBase(VideoEncoderRateObserver* encoder_rate_observer,
+ VCMEncodedFrameCallback* encoded_frame_callback);
+ ~VCMCodecDataBase();
+
+ // Sender Side
+ // Returns the default settings for the codec with type |codec_type|.
+ static void Codec(VideoCodecType codec_type, VideoCodec* settings);
+
+ // Sets the sender side codec and initiates the desired codec given the
+ // VideoCodec struct.
+ // Returns true if the codec was successfully registered, false otherwise.
+ bool SetSendCodec(const VideoCodec* send_codec,
+ int number_of_cores,
+ size_t max_payload_size);
+
+ // Gets the current send codec. Relevant for internal codecs only.
+ // Returns true if there is a send codec, false otherwise.
+ bool SendCodec(VideoCodec* current_send_codec) const;
+
+ // Gets current send side codec type. Relevant for internal codecs only.
+ // Returns kVideoCodecUnknown if there is no send codec.
+ VideoCodecType SendCodec() const;
+
+ // Registers and initializes an external encoder object.
+ // |internal_source| should be set to true if the codec has an internal
+ // video source and doesn't need the user to provide it with frames via
+ // the Encode() method.
+ void RegisterExternalEncoder(VideoEncoder* external_encoder,
+ uint8_t payload_type,
+ bool internal_source);
+
+ // Deregisters an external encoder. Returns true if the encoder was
+ // found and deregistered, false otherwise. |was_send_codec| is set to true
+ // if the external encoder was the send codec before being deregistered.
+ bool DeregisterExternalEncoder(uint8_t payload_type, bool* was_send_codec);
+
+ VCMGenericEncoder* GetEncoder();
+
+ bool SetPeriodicKeyFrames(bool enable);
+
+ // Deregisters an external decoder object specified by |payload_type|.
+ bool DeregisterExternalDecoder(uint8_t payload_type);
+
+ // Registers an external decoder object to the payload type |payload_type|.
+ void RegisterExternalDecoder(VideoDecoder* external_decoder,
+ uint8_t payload_type);
+
+ bool DecoderRegistered() const;
+
+ bool RegisterReceiveCodec(const VideoCodec* receive_codec,
+ int number_of_cores,
+ bool require_key_frame);
+
+ bool DeregisterReceiveCodec(uint8_t payload_type);
+
+ // Get current receive side codec. Relevant for internal codecs only.
+ bool ReceiveCodec(VideoCodec* current_receive_codec) const;
+
+ // Get current receive side codec type. Relevant for internal codecs only.
+ VideoCodecType ReceiveCodec() const;
+
+ // Returns a decoder specified by |payload_type|. The decoded frame callback
+ // of the encoder is set to |decoded_frame_callback|. If no such decoder
+ // already exists an instance will be created and initialized.
+ // NULL is returned if no encoder with the specified payload type was found
+ // and the function failed to create one.
+ VCMGenericDecoder* GetDecoder(
+ const VCMEncodedFrame& frame,
+ VCMDecodedFrameCallback* decoded_frame_callback);
+
+ // Deletes the memory of the decoder instance |decoder|. Used to delete
+ // deep copies returned by CreateDecoderCopy().
+ void ReleaseDecoder(VCMGenericDecoder* decoder) const;
+
+ // Returns true if the currently active decoder prefer to decode frames late.
+ // That means that frames must be decoded near the render times stamp.
+ bool PrefersLateDecoding() const;
+
+ bool MatchesCurrentResolution(int width, int height) const;
+
+ private:
+ typedef std::map<uint8_t, VCMDecoderMapItem*> DecoderMap;
+ typedef std::map<uint8_t, VCMExtDecoderMapItem*> ExternalDecoderMap;
+
+ VCMGenericDecoder* CreateAndInitDecoder(const VCMEncodedFrame& frame,
+ VideoCodec* new_codec) const;
+
+ // Determines whether a new codec has to be created or not.
+ // Checks every setting apart from maxFramerate and startBitrate.
+ bool RequiresEncoderReset(const VideoCodec& send_codec);
+
+ void DeleteEncoder();
+
+ // Create an internal Decoder given a codec type
+ VCMGenericDecoder* CreateDecoder(VideoCodecType type) const;
+
+ const VCMDecoderMapItem* FindDecoderItem(uint8_t payload_type) const;
+
+ const VCMExtDecoderMapItem* FindExternalDecoderItem(
+ uint8_t payload_type) const;
+
+ int number_of_cores_;
+ size_t max_payload_size_;
+ bool periodic_key_frames_;
+ bool pending_encoder_reset_;
+ VideoCodec send_codec_;
+ VideoCodec receive_codec_;
+ uint8_t encoder_payload_type_;
+ VideoEncoder* external_encoder_;
+ bool internal_source_;
+ VideoEncoderRateObserver* const encoder_rate_observer_;
+ VCMEncodedFrameCallback* const encoded_frame_callback_;
+ rtc::scoped_ptr<VCMGenericEncoder> ptr_encoder_;
+ VCMGenericDecoder* ptr_decoder_;
+ DecoderMap dec_map_;
+ ExternalDecoderMap dec_external_map_;
+}; // VCMCodecDataBase
+
+} // namespace webrtc
+
+#endif // WEBRTC_MODULES_VIDEO_CODING_CODEC_DATABASE_H_
diff --git a/webrtc/modules/video_coding/codec_timer.cc b/webrtc/modules/video_coding/codec_timer.cc
new file mode 100644
index 0000000000..60add8fc4b
--- /dev/null
+++ b/webrtc/modules/video_coding/codec_timer.cc
@@ -0,0 +1,96 @@
+/*
+ * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/modules/video_coding/codec_timer.h"
+
+#include <assert.h>
+
+namespace webrtc {
+
+// The first kIgnoredSampleCount samples will be ignored.
+static const int32_t kIgnoredSampleCount = 5;
+
+VCMCodecTimer::VCMCodecTimer()
+ : _filteredMax(0), _ignoredSampleCount(0), _shortMax(0), _history() {
+ Reset();
+}
+
+void VCMCodecTimer::Reset() {
+ _filteredMax = 0;
+ _ignoredSampleCount = 0;
+ _shortMax = 0;
+ for (int i = 0; i < MAX_HISTORY_SIZE; i++) {
+ _history[i].shortMax = 0;
+ _history[i].timeMs = -1;
+ }
+}
+
+// Update the max-value filter
+void VCMCodecTimer::MaxFilter(int32_t decodeTime, int64_t nowMs) {
+ if (_ignoredSampleCount >= kIgnoredSampleCount) {
+ UpdateMaxHistory(decodeTime, nowMs);
+ ProcessHistory(nowMs);
+ } else {
+ _ignoredSampleCount++;
+ }
+}
+
+void VCMCodecTimer::UpdateMaxHistory(int32_t decodeTime, int64_t now) {
+ if (_history[0].timeMs >= 0 && now - _history[0].timeMs < SHORT_FILTER_MS) {
+ if (decodeTime > _shortMax) {
+ _shortMax = decodeTime;
+ }
+ } else {
+ // Only add a new value to the history once a second
+ if (_history[0].timeMs == -1) {
+ // First, no shift
+ _shortMax = decodeTime;
+ } else {
+ // Shift
+ for (int i = (MAX_HISTORY_SIZE - 2); i >= 0; i--) {
+ _history[i + 1].shortMax = _history[i].shortMax;
+ _history[i + 1].timeMs = _history[i].timeMs;
+ }
+ }
+ if (_shortMax == 0) {
+ _shortMax = decodeTime;
+ }
+
+ _history[0].shortMax = _shortMax;
+ _history[0].timeMs = now;
+ _shortMax = 0;
+ }
+}
+
+void VCMCodecTimer::ProcessHistory(int64_t nowMs) {
+ _filteredMax = _shortMax;
+ if (_history[0].timeMs == -1) {
+ return;
+ }
+ for (int i = 0; i < MAX_HISTORY_SIZE; i++) {
+ if (_history[i].timeMs == -1) {
+ break;
+ }
+ if (nowMs - _history[i].timeMs > MAX_HISTORY_SIZE * SHORT_FILTER_MS) {
+ // This sample (and all samples after this) is too old
+ break;
+ }
+ if (_history[i].shortMax > _filteredMax) {
+ // This sample is the largest one this far into the history
+ _filteredMax = _history[i].shortMax;
+ }
+ }
+}
+
+// Get the maximum observed time within a time window
+int32_t VCMCodecTimer::RequiredDecodeTimeMs(FrameType /*frameType*/) const {
+ return _filteredMax;
+}
+} // namespace webrtc
diff --git a/webrtc/modules/video_coding/codec_timer.h b/webrtc/modules/video_coding/codec_timer.h
new file mode 100644
index 0000000000..8ebd82ab9c
--- /dev/null
+++ b/webrtc/modules/video_coding/codec_timer.h
@@ -0,0 +1,57 @@
+/*
+ * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_CODING_CODEC_TIMER_H_
+#define WEBRTC_MODULES_VIDEO_CODING_CODEC_TIMER_H_
+
+#include "webrtc/modules/include/module_common_types.h"
+#include "webrtc/typedefs.h"
+
+namespace webrtc {
+
+// MAX_HISTORY_SIZE * SHORT_FILTER_MS defines the window size in milliseconds
+#define MAX_HISTORY_SIZE 10
+#define SHORT_FILTER_MS 1000
+
+class VCMShortMaxSample {
+ public:
+ VCMShortMaxSample() : shortMax(0), timeMs(-1) {}
+
+ int32_t shortMax;
+ int64_t timeMs;
+};
+
+class VCMCodecTimer {
+ public:
+ VCMCodecTimer();
+
+ // Updates the max filtered decode time.
+ void MaxFilter(int32_t newDecodeTimeMs, int64_t nowMs);
+
+ // Empty the list of timers.
+ void Reset();
+
+ // Get the required decode time in ms.
+ int32_t RequiredDecodeTimeMs(FrameType frameType) const;
+
+ private:
+ void UpdateMaxHistory(int32_t decodeTime, int64_t now);
+ void ProcessHistory(int64_t nowMs);
+
+ int32_t _filteredMax;
+ // The number of samples ignored so far.
+ int32_t _ignoredSampleCount;
+ int32_t _shortMax;
+ VCMShortMaxSample _history[MAX_HISTORY_SIZE];
+};
+
+} // namespace webrtc
+
+#endif // WEBRTC_MODULES_VIDEO_CODING_CODEC_TIMER_H_
diff --git a/webrtc/modules/video_coding/codecs/h264/h264_video_toolbox_decoder.cc b/webrtc/modules/video_coding/codecs/h264/h264_video_toolbox_decoder.cc
index 61ef80bbf1..6fee2e6f36 100644
--- a/webrtc/modules/video_coding/codecs/h264/h264_video_toolbox_decoder.cc
+++ b/webrtc/modules/video_coding/codecs/h264/h264_video_toolbox_decoder.cc
@@ -16,7 +16,7 @@
#include "libyuv/convert.h"
#include "webrtc/base/checks.h"
#include "webrtc/base/logging.h"
-#include "webrtc/common_video/interface/video_frame_buffer.h"
+#include "webrtc/common_video/include/video_frame_buffer.h"
#include "webrtc/modules/video_coding/codecs/h264/h264_video_toolbox_nalu.h"
#include "webrtc/video_frame.h"
@@ -106,8 +106,7 @@ namespace webrtc {
H264VideoToolboxDecoder::H264VideoToolboxDecoder()
: callback_(nullptr),
video_format_(nullptr),
- decompression_session_(nullptr) {
-}
+ decompression_session_(nullptr) {}
H264VideoToolboxDecoder::~H264VideoToolboxDecoder() {
DestroyDecompressionSession();
@@ -129,8 +128,7 @@ int H264VideoToolboxDecoder::Decode(
CMSampleBufferRef sample_buffer = nullptr;
if (!H264AnnexBBufferToCMSampleBuffer(input_image._buffer,
- input_image._length,
- video_format_,
+ input_image._length, video_format_,
&sample_buffer)) {
return WEBRTC_VIDEO_CODEC_ERROR;
}
@@ -206,11 +204,8 @@ int H264VideoToolboxDecoder::ResetDecompressionSession() {
int64_t nv12type = kCVPixelFormatType_420YpCbCr8BiPlanarFullRange;
CFNumberRef pixel_format =
CFNumberCreate(nullptr, kCFNumberLongType, &nv12type);
- CFTypeRef values[attributes_size] = {
- kCFBooleanTrue,
- io_surface_value,
- pixel_format
- };
+ CFTypeRef values[attributes_size] = {kCFBooleanTrue, io_surface_value,
+ pixel_format};
CFDictionaryRef attributes =
internal::CreateCFDictionary(keys, values, attributes_size);
if (io_surface_value) {
@@ -266,6 +261,10 @@ void H264VideoToolboxDecoder::SetVideoFormat(
}
}
+const char* H264VideoToolboxDecoder::ImplementationName() const {
+ return "VideoToolbox";
+}
+
} // namespace webrtc
#endif // defined(WEBRTC_VIDEO_TOOLBOX_SUPPORTED)
diff --git a/webrtc/modules/video_coding/codecs/h264/h264_video_toolbox_decoder.h b/webrtc/modules/video_coding/codecs/h264/h264_video_toolbox_decoder.h
index f54ddb9efd..6d64307a82 100644
--- a/webrtc/modules/video_coding/codecs/h264/h264_video_toolbox_decoder.h
+++ b/webrtc/modules/video_coding/codecs/h264/h264_video_toolbox_decoder.h
@@ -45,6 +45,8 @@ class H264VideoToolboxDecoder : public H264Decoder {
int Reset() override;
+ const char* ImplementationName() const override;
+
private:
int ResetDecompressionSession();
void ConfigureDecompressionSession();
diff --git a/webrtc/modules/video_coding/codecs/h264/h264_video_toolbox_encoder.cc b/webrtc/modules/video_coding/codecs/h264/h264_video_toolbox_encoder.cc
index d677f8b812..7df4ec74ba 100644
--- a/webrtc/modules/video_coding/codecs/h264/h264_video_toolbox_encoder.cc
+++ b/webrtc/modules/video_coding/codecs/h264/h264_video_toolbox_encoder.cc
@@ -99,11 +99,7 @@ struct FrameEncodeParams {
int32_t h,
int64_t rtms,
uint32_t ts)
- : callback(cb),
- width(w),
- height(h),
- render_time_ms(rtms),
- timestamp(ts) {
+ : callback(cb), width(w), height(h), render_time_ms(rtms), timestamp(ts) {
if (csi) {
codec_specific_info = *csi;
} else {
@@ -146,9 +142,8 @@ bool CopyVideoFrameToPixelBuffer(const webrtc::VideoFrame& frame,
int ret = libyuv::I420ToNV12(
frame.buffer(webrtc::kYPlane), frame.stride(webrtc::kYPlane),
frame.buffer(webrtc::kUPlane), frame.stride(webrtc::kUPlane),
- frame.buffer(webrtc::kVPlane), frame.stride(webrtc::kVPlane),
- dst_y, dst_stride_y, dst_uv, dst_stride_uv,
- frame.width(), frame.height());
+ frame.buffer(webrtc::kVPlane), frame.stride(webrtc::kVPlane), dst_y,
+ dst_stride_y, dst_uv, dst_stride_uv, frame.width(), frame.height());
CVPixelBufferUnlockBaseAddress(pixel_buffer, 0);
if (ret) {
LOG(LS_ERROR) << "Error converting I420 VideoFrame to NV12 :" << ret;
@@ -188,10 +183,8 @@ void VTCompressionOutputCallback(void* encoder,
// TODO(tkchin): Allocate buffers through a pool.
rtc::scoped_ptr<rtc::Buffer> buffer(new rtc::Buffer());
rtc::scoped_ptr<webrtc::RTPFragmentationHeader> header;
- if (!H264CMSampleBufferToAnnexBBuffer(sample_buffer,
- is_keyframe,
- buffer.get(),
- header.accept())) {
+ if (!H264CMSampleBufferToAnnexBBuffer(sample_buffer, is_keyframe,
+ buffer.get(), header.accept())) {
return;
}
webrtc::EncodedImage frame(buffer->data(), buffer->size(), buffer->size());
@@ -215,8 +208,7 @@ void VTCompressionOutputCallback(void* encoder,
namespace webrtc {
H264VideoToolboxEncoder::H264VideoToolboxEncoder()
- : callback_(nullptr), compression_session_(nullptr) {
-}
+ : callback_(nullptr), compression_session_(nullptr) {}
H264VideoToolboxEncoder::~H264VideoToolboxEncoder() {
DestroyCompressionSession();
@@ -289,8 +281,8 @@ int H264VideoToolboxEncoder::Encode(
CMTimeMake(input_image.render_time_ms(), 1000);
CFDictionaryRef frame_properties = nullptr;
if (is_keyframe_required) {
- CFTypeRef keys[] = { kVTEncodeFrameOptionKey_ForceKeyFrame };
- CFTypeRef values[] = { kCFBooleanTrue };
+ CFTypeRef keys[] = {kVTEncodeFrameOptionKey_ForceKeyFrame};
+ CFTypeRef values[] = {kCFBooleanTrue};
frame_properties = internal::CreateCFDictionary(keys, values, 1);
}
rtc::scoped_ptr<internal::FrameEncodeParams> encode_params;
@@ -359,11 +351,8 @@ int H264VideoToolboxEncoder::ResetCompressionSession() {
int64_t nv12type = kCVPixelFormatType_420YpCbCr8BiPlanarFullRange;
CFNumberRef pixel_format =
CFNumberCreate(nullptr, kCFNumberLongType, &nv12type);
- CFTypeRef values[attributes_size] = {
- kCFBooleanTrue,
- io_surface_value,
- pixel_format
- };
+ CFTypeRef values[attributes_size] = {kCFBooleanTrue, io_surface_value,
+ pixel_format};
CFDictionaryRef source_attributes =
internal::CreateCFDictionary(keys, values, attributes_size);
if (io_surface_value) {
@@ -376,15 +365,11 @@ int H264VideoToolboxEncoder::ResetCompressionSession() {
}
OSStatus status = VTCompressionSessionCreate(
nullptr, // use default allocator
- width_,
- height_,
- kCMVideoCodecType_H264,
+ width_, height_, kCMVideoCodecType_H264,
nullptr, // use default encoder
source_attributes,
nullptr, // use default compressed data allocator
- internal::VTCompressionOutputCallback,
- this,
- &compression_session_);
+ internal::VTCompressionOutputCallback, this, &compression_session_);
if (source_attributes) {
CFRelease(source_attributes);
source_attributes = nullptr;
@@ -434,6 +419,10 @@ void H264VideoToolboxEncoder::DestroyCompressionSession() {
}
}
+const char* H264VideoToolboxEncoder::ImplementationName() const {
+ return "VideoToolbox";
+}
+
} // namespace webrtc
#endif // defined(WEBRTC_VIDEO_TOOLBOX_SUPPORTED)
diff --git a/webrtc/modules/video_coding/codecs/h264/h264_video_toolbox_encoder.h b/webrtc/modules/video_coding/codecs/h264/h264_video_toolbox_encoder.h
index f4fb86fa04..269e0411b2 100644
--- a/webrtc/modules/video_coding/codecs/h264/h264_video_toolbox_encoder.h
+++ b/webrtc/modules/video_coding/codecs/h264/h264_video_toolbox_encoder.h
@@ -48,6 +48,8 @@ class H264VideoToolboxEncoder : public H264Encoder {
int Release() override;
+ const char* ImplementationName() const override;
+
private:
int ResetCompressionSession();
void ConfigureCompressionSession();
diff --git a/webrtc/modules/video_coding/codecs/h264/h264_video_toolbox_nalu.cc b/webrtc/modules/video_coding/codecs/h264/h264_video_toolbox_nalu.cc
index caca96d3d8..322c213f7b 100644
--- a/webrtc/modules/video_coding/codecs/h264/h264_video_toolbox_nalu.cc
+++ b/webrtc/modules/video_coding/codecs/h264/h264_video_toolbox_nalu.cc
@@ -154,11 +154,10 @@ bool H264CMSampleBufferToAnnexBBuffer(
return true;
}
-bool H264AnnexBBufferToCMSampleBuffer(
- const uint8_t* annexb_buffer,
- size_t annexb_buffer_size,
- CMVideoFormatDescriptionRef video_format,
- CMSampleBufferRef* out_sample_buffer) {
+bool H264AnnexBBufferToCMSampleBuffer(const uint8_t* annexb_buffer,
+ size_t annexb_buffer_size,
+ CMVideoFormatDescriptionRef video_format,
+ CMSampleBufferRef* out_sample_buffer) {
RTC_DCHECK(annexb_buffer);
RTC_DCHECK(out_sample_buffer);
*out_sample_buffer = nullptr;
diff --git a/webrtc/modules/video_coding/codecs/h264/h264_video_toolbox_nalu.h b/webrtc/modules/video_coding/codecs/h264/h264_video_toolbox_nalu.h
index 230dea94a0..31ef525816 100644
--- a/webrtc/modules/video_coding/codecs/h264/h264_video_toolbox_nalu.h
+++ b/webrtc/modules/video_coding/codecs/h264/h264_video_toolbox_nalu.h
@@ -9,8 +9,8 @@
*
*/
-#ifndef WEBRTC_MODULES_VIDEO_CODING_CODECS_H264_H264_VIDEO_TOOLBOX_NALU_H
-#define WEBRTC_MODULES_VIDEO_CODING_CODECS_H264_H264_VIDEO_TOOLBOX_NALU_H
+#ifndef WEBRTC_MODULES_VIDEO_CODING_CODECS_H264_H264_VIDEO_TOOLBOX_NALU_H_
+#define WEBRTC_MODULES_VIDEO_CODING_CODECS_H264_H264_VIDEO_TOOLBOX_NALU_H_
#include "webrtc/modules/video_coding/codecs/h264/include/h264.h"
@@ -19,7 +19,7 @@
#include <CoreMedia/CoreMedia.h>
#include "webrtc/base/buffer.h"
-#include "webrtc/modules/interface/module_common_types.h"
+#include "webrtc/modules/include/module_common_types.h"
namespace webrtc {
@@ -39,11 +39,10 @@ bool H264CMSampleBufferToAnnexBBuffer(
// If |is_keyframe| is true then |video_format| is ignored since the format will
// be read from the buffer. Otherwise |video_format| must be provided.
// Caller is responsible for releasing the created sample buffer.
-bool H264AnnexBBufferToCMSampleBuffer(
- const uint8_t* annexb_buffer,
- size_t annexb_buffer_size,
- CMVideoFormatDescriptionRef video_format,
- CMSampleBufferRef* out_sample_buffer);
+bool H264AnnexBBufferToCMSampleBuffer(const uint8_t* annexb_buffer,
+ size_t annexb_buffer_size,
+ CMVideoFormatDescriptionRef video_format,
+ CMSampleBufferRef* out_sample_buffer);
// Helper class for reading NALUs from an RTP Annex B buffer.
class AnnexBBufferReader final {
@@ -97,4 +96,4 @@ class AvccBufferWriter final {
} // namespace webrtc
#endif // defined(WEBRTC_VIDEO_TOOLBOX_SUPPORTED)
-#endif // WEBRTC_MODULES_VIDEO_CODING_CODECS_H264_H264_VIDEO_TOOLBOX_NALU_H
+#endif // WEBRTC_MODULES_VIDEO_CODING_CODECS_H264_H264_VIDEO_TOOLBOX_NALU_H_
diff --git a/webrtc/modules/video_coding/codecs/h264/include/h264.h b/webrtc/modules/video_coding/codecs/h264/include/h264.h
index 3f52839a6c..50ca57c1c9 100644
--- a/webrtc/modules/video_coding/codecs/h264/include/h264.h
+++ b/webrtc/modules/video_coding/codecs/h264/include/h264.h
@@ -23,7 +23,7 @@
#endif // defined(WEBRTC_IOS) || defined(WEBRTC_MAC)
-#include "webrtc/modules/video_coding/codecs/interface/video_codec_interface.h"
+#include "webrtc/modules/video_coding/include/video_codec_interface.h"
namespace webrtc {
diff --git a/webrtc/modules/video_coding/codecs/i420/i420.cc b/webrtc/modules/video_coding/codecs/i420/i420.cc
index cf546a07a1..7f06b4cf7d 100644
--- a/webrtc/modules/video_coding/codecs/i420/i420.cc
+++ b/webrtc/modules/video_coding/codecs/i420/i420.cc
@@ -21,20 +21,19 @@ const size_t kI420HeaderSize = 4;
namespace webrtc {
-I420Encoder::I420Encoder() : _inited(false), _encodedImage(),
- _encodedCompleteCallback(NULL) {
-}
+I420Encoder::I420Encoder()
+ : _inited(false), _encodedImage(), _encodedCompleteCallback(NULL) {}
I420Encoder::~I420Encoder() {
_inited = false;
- delete [] _encodedImage._buffer;
+ delete[] _encodedImage._buffer;
}
int I420Encoder::Release() {
// Should allocate an encoded frame and then release it here, for that we
// actually need an init flag.
if (_encodedImage._buffer != NULL) {
- delete [] _encodedImage._buffer;
+ delete[] _encodedImage._buffer;
_encodedImage._buffer = NULL;
}
_inited = false;
@@ -53,7 +52,7 @@ int I420Encoder::InitEncode(const VideoCodec* codecSettings,
// Allocating encoded memory.
if (_encodedImage._buffer != NULL) {
- delete [] _encodedImage._buffer;
+ delete[] _encodedImage._buffer;
_encodedImage._buffer = NULL;
_encodedImage._size = 0;
}
@@ -101,18 +100,18 @@ int I420Encoder::Encode(const VideoFrame& inputImage,
kI420HeaderSize;
if (_encodedImage._size > req_length) {
// Reallocate buffer.
- delete [] _encodedImage._buffer;
+ delete[] _encodedImage._buffer;
_encodedImage._buffer = new uint8_t[req_length];
_encodedImage._size = req_length;
}
- uint8_t *buffer = _encodedImage._buffer;
+ uint8_t* buffer = _encodedImage._buffer;
buffer = InsertHeader(buffer, width, height);
- int ret_length = ExtractBuffer(inputImage, req_length - kI420HeaderSize,
- buffer);
+ int ret_length =
+ ExtractBuffer(inputImage, req_length - kI420HeaderSize, buffer);
if (ret_length < 0)
return WEBRTC_VIDEO_CODEC_MEMORY;
_encodedImage._length = ret_length + kI420HeaderSize;
@@ -121,7 +120,8 @@ int I420Encoder::Encode(const VideoFrame& inputImage,
return WEBRTC_VIDEO_CODEC_OK;
}
-uint8_t* I420Encoder::InsertHeader(uint8_t *buffer, uint16_t width,
+uint8_t* I420Encoder::InsertHeader(uint8_t* buffer,
+ uint16_t width,
uint16_t height) {
*buffer++ = static_cast<uint8_t>(width >> 8);
*buffer++ = static_cast<uint8_t>(width & 0xFF);
@@ -130,30 +130,29 @@ uint8_t* I420Encoder::InsertHeader(uint8_t *buffer, uint16_t width,
return buffer;
}
-int
-I420Encoder::RegisterEncodeCompleteCallback(EncodedImageCallback* callback) {
+int I420Encoder::RegisterEncodeCompleteCallback(
+ EncodedImageCallback* callback) {
_encodedCompleteCallback = callback;
return WEBRTC_VIDEO_CODEC_OK;
}
-
-I420Decoder::I420Decoder() : _decodedImage(), _width(0), _height(0),
- _inited(false), _decodeCompleteCallback(NULL) {
-}
+I420Decoder::I420Decoder()
+ : _decodedImage(),
+ _width(0),
+ _height(0),
+ _inited(false),
+ _decodeCompleteCallback(NULL) {}
I420Decoder::~I420Decoder() {
Release();
}
-int
-I420Decoder::Reset() {
+int I420Decoder::Reset() {
return WEBRTC_VIDEO_CODEC_OK;
}
-
-int
-I420Decoder::InitDecode(const VideoCodec* codecSettings,
- int /*numberOfCores */) {
+int I420Decoder::InitDecode(const VideoCodec* codecSettings,
+ int /*numberOfCores */) {
if (codecSettings == NULL) {
return WEBRTC_VIDEO_CODEC_ERR_PARAMETER;
} else if (codecSettings->width < 1 || codecSettings->height < 1) {
@@ -165,7 +164,8 @@ I420Decoder::InitDecode(const VideoCodec* codecSettings,
return WEBRTC_VIDEO_CODEC_OK;
}
-int I420Decoder::Decode(const EncodedImage& inputImage, bool /*missingFrames*/,
+int I420Decoder::Decode(const EncodedImage& inputImage,
+ bool /*missingFrames*/,
const RTPFragmentationHeader* /*fragmentation*/,
const CodecSpecificInfo* /*codecSpecificInfo*/,
int64_t /*renderTimeMs*/) {
@@ -203,8 +203,8 @@ int I420Decoder::Decode(const EncodedImage& inputImage, bool /*missingFrames*/,
}
// Set decoded image parameters.
int half_width = (_width + 1) / 2;
- _decodedImage.CreateEmptyFrame(_width, _height,
- _width, half_width, half_width);
+ _decodedImage.CreateEmptyFrame(_width, _height, _width, half_width,
+ half_width);
// Converting from buffer to plane representation.
int ret = ConvertToI420(kI420, buffer, 0, 0, _width, _height, 0,
kVideoRotation_0, &_decodedImage);
@@ -218,7 +218,8 @@ int I420Decoder::Decode(const EncodedImage& inputImage, bool /*missingFrames*/,
}
const uint8_t* I420Decoder::ExtractHeader(const uint8_t* buffer,
- uint16_t* width, uint16_t* height) {
+ uint16_t* width,
+ uint16_t* height) {
*width = static_cast<uint16_t>(*buffer++) << 8;
*width |= *buffer++;
*height = static_cast<uint16_t>(*buffer++) << 8;
diff --git a/webrtc/modules/video_coding/codecs/i420/include/i420.h b/webrtc/modules/video_coding/codecs/i420/include/i420.h
index 8990ccf878..9f77845e96 100644
--- a/webrtc/modules/video_coding/codecs/i420/include/i420.h
+++ b/webrtc/modules/video_coding/codecs/i420/include/i420.h
@@ -8,12 +8,12 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-#ifndef WEBRTC_MODULES_VIDEO_CODING_CODECS_I420_MAIN_INTERFACE_I420_H_
-#define WEBRTC_MODULES_VIDEO_CODING_CODECS_I420_MAIN_INTERFACE_I420_H_
+#ifndef WEBRTC_MODULES_VIDEO_CODING_CODECS_I420_INCLUDE_I420_H_
+#define WEBRTC_MODULES_VIDEO_CODING_CODECS_I420_INCLUDE_I420_H_
#include <vector>
-#include "webrtc/modules/video_coding/codecs/interface/video_codec_interface.h"
+#include "webrtc/modules/video_coding/include/video_codec_interface.h"
#include "webrtc/typedefs.h"
namespace webrtc {
@@ -24,45 +24,45 @@ class I420Encoder : public VideoEncoder {
virtual ~I420Encoder();
-// Initialize the encoder with the information from the VideoCodec.
-//
-// Input:
-// - codecSettings : Codec settings.
-// - numberOfCores : Number of cores available for the encoder.
-// - maxPayloadSize : The maximum size each payload is allowed
-// to have. Usually MTU - overhead.
-//
-// Return value : WEBRTC_VIDEO_CODEC_OK if OK.
-// <0 - Error
+ // Initialize the encoder with the information from the VideoCodec.
+ //
+ // Input:
+ // - codecSettings : Codec settings.
+ // - numberOfCores : Number of cores available for the encoder.
+ // - maxPayloadSize : The maximum size each payload is allowed
+ // to have. Usually MTU - overhead.
+ //
+ // Return value : WEBRTC_VIDEO_CODEC_OK if OK.
+ // <0 - Error
int InitEncode(const VideoCodec* codecSettings,
int /*numberOfCores*/,
size_t /*maxPayloadSize*/) override;
-// "Encode" an I420 image (as a part of a video stream). The encoded image
-// will be returned to the user via the encode complete callback.
-//
-// Input:
-// - inputImage : Image to be encoded.
-// - codecSpecificInfo : Pointer to codec specific data.
-// - frameType : Frame type to be sent (Key /Delta).
-//
-// Return value : WEBRTC_VIDEO_CODEC_OK if OK.
-// <0 - Error
+ // "Encode" an I420 image (as a part of a video stream). The encoded image
+ // will be returned to the user via the encode complete callback.
+ //
+ // Input:
+ // - inputImage : Image to be encoded.
+ // - codecSpecificInfo : Pointer to codec specific data.
+ // - frameType : Frame type to be sent (Key /Delta).
+ //
+ // Return value : WEBRTC_VIDEO_CODEC_OK if OK.
+ // <0 - Error
int Encode(const VideoFrame& inputImage,
const CodecSpecificInfo* /*codecSpecificInfo*/,
const std::vector<FrameType>* /*frame_types*/) override;
-// Register an encode complete callback object.
-//
-// Input:
-// - callback : Callback object which handles encoded images.
-//
-// Return value : WEBRTC_VIDEO_CODEC_OK if OK, < 0 otherwise.
+ // Register an encode complete callback object.
+ //
+ // Input:
+ // - callback : Callback object which handles encoded images.
+ //
+ // Return value : WEBRTC_VIDEO_CODEC_OK if OK, < 0 otherwise.
int RegisterEncodeCompleteCallback(EncodedImageCallback* callback) override;
-// Free encoder memory.
-//
-// Return value : WEBRTC_VIDEO_CODEC_OK if OK, < 0 otherwise.
+ // Free encoder memory.
+ //
+ // Return value : WEBRTC_VIDEO_CODEC_OK if OK, < 0 otherwise.
int Release() override;
int SetRates(uint32_t /*newBitRate*/, uint32_t /*frameRate*/) override {
@@ -76,12 +76,13 @@ class I420Encoder : public VideoEncoder {
void OnDroppedFrame() override {}
private:
- static uint8_t* InsertHeader(uint8_t* buffer, uint16_t width,
+ static uint8_t* InsertHeader(uint8_t* buffer,
+ uint16_t width,
uint16_t height);
- bool _inited;
- EncodedImage _encodedImage;
- EncodedImageCallback* _encodedCompleteCallback;
+ bool _inited;
+ EncodedImage _encodedImage;
+ EncodedImageCallback* _encodedCompleteCallback;
}; // class I420Encoder
class I420Decoder : public VideoDecoder {
@@ -90,50 +91,50 @@ class I420Decoder : public VideoDecoder {
virtual ~I420Decoder();
-// Initialize the decoder.
-// The user must notify the codec of width and height values.
-//
-// Return value : WEBRTC_VIDEO_CODEC_OK.
-// <0 - Errors
+ // Initialize the decoder.
+ // The user must notify the codec of width and height values.
+ //
+ // Return value : WEBRTC_VIDEO_CODEC_OK.
+ // <0 - Errors
int InitDecode(const VideoCodec* codecSettings,
int /*numberOfCores*/) override;
-// Decode encoded image (as a part of a video stream). The decoded image
-// will be returned to the user through the decode complete callback.
-//
-// Input:
-// - inputImage : Encoded image to be decoded
-// - missingFrames : True if one or more frames have been lost
-// since the previous decode call.
-// - codecSpecificInfo : pointer to specific codec data
-// - renderTimeMs : Render time in Ms
-//
-// Return value : WEBRTC_VIDEO_CODEC_OK if OK
-// <0 - Error
+ // Decode encoded image (as a part of a video stream). The decoded image
+ // will be returned to the user through the decode complete callback.
+ //
+ // Input:
+ // - inputImage : Encoded image to be decoded
+ // - missingFrames : True if one or more frames have been lost
+ // since the previous decode call.
+ // - codecSpecificInfo : pointer to specific codec data
+ // - renderTimeMs : Render time in Ms
+ //
+ // Return value : WEBRTC_VIDEO_CODEC_OK if OK
+ // <0 - Error
int Decode(const EncodedImage& inputImage,
bool missingFrames,
const RTPFragmentationHeader* /*fragmentation*/,
const CodecSpecificInfo* /*codecSpecificInfo*/,
int64_t /*renderTimeMs*/) override;
-// Register a decode complete callback object.
-//
-// Input:
-// - callback : Callback object which handles decoded images.
-//
-// Return value : WEBRTC_VIDEO_CODEC_OK if OK, < 0 otherwise.
+ // Register a decode complete callback object.
+ //
+ // Input:
+ // - callback : Callback object which handles decoded images.
+ //
+ // Return value : WEBRTC_VIDEO_CODEC_OK if OK, < 0 otherwise.
int RegisterDecodeCompleteCallback(DecodedImageCallback* callback) override;
-// Free decoder memory.
-//
-// Return value : WEBRTC_VIDEO_CODEC_OK if OK.
-// <0 - Error
+ // Free decoder memory.
+ //
+ // Return value : WEBRTC_VIDEO_CODEC_OK if OK.
+ // <0 - Error
int Release() override;
-// Reset decoder state and prepare for a new call.
-//
-// Return value : WEBRTC_VIDEO_CODEC_OK.
-// <0 - Error
+ // Reset decoder state and prepare for a new call.
+ //
+ // Return value : WEBRTC_VIDEO_CODEC_OK.
+ // <0 - Error
int Reset() override;
private:
@@ -142,12 +143,12 @@ class I420Decoder : public VideoDecoder {
uint16_t* height);
VideoFrame _decodedImage;
- int _width;
- int _height;
- bool _inited;
- DecodedImageCallback* _decodeCompleteCallback;
+ int _width;
+ int _height;
+ bool _inited;
+ DecodedImageCallback* _decodeCompleteCallback;
}; // class I420Decoder
} // namespace webrtc
-#endif // WEBRTC_MODULES_VIDEO_CODING_CODECS_I420_MAIN_INTERFACE_I420_H_
+#endif // WEBRTC_MODULES_VIDEO_CODING_CODECS_I420_INCLUDE_I420_H_
diff --git a/webrtc/modules/video_coding/codecs/interface/mock/mock_video_codec_interface.h b/webrtc/modules/video_coding/codecs/interface/mock/mock_video_codec_interface.h
index 6c926d4794..d727e896ad 100644
--- a/webrtc/modules/video_coding/codecs/interface/mock/mock_video_codec_interface.h
+++ b/webrtc/modules/video_coding/codecs/interface/mock/mock_video_codec_interface.h
@@ -11,27 +11,32 @@
#ifndef WEBRTC_MODULES_VIDEO_CODING_CODECS_INTERFACE_MOCK_MOCK_VIDEO_CODEC_INTERFACE_H_
#define WEBRTC_MODULES_VIDEO_CODING_CODECS_INTERFACE_MOCK_MOCK_VIDEO_CODEC_INTERFACE_H_
+#pragma message("WARNING: video_coding/codecs/interface is DEPRECATED; "
+ "use video_coding/include")
#include <string>
+#include <vector>
#include "testing/gmock/include/gmock/gmock.h"
-#include "webrtc/modules/video_coding/codecs/interface/video_codec_interface.h"
+#include "webrtc/modules/video_coding/include/video_codec_interface.h"
#include "webrtc/typedefs.h"
namespace webrtc {
class MockEncodedImageCallback : public EncodedImageCallback {
public:
- MOCK_METHOD3(Encoded, int32_t(const EncodedImage& encodedImage,
- const CodecSpecificInfo* codecSpecificInfo,
- const RTPFragmentationHeader* fragmentation));
+ MOCK_METHOD3(Encoded,
+ int32_t(const EncodedImage& encodedImage,
+ const CodecSpecificInfo* codecSpecificInfo,
+ const RTPFragmentationHeader* fragmentation));
};
class MockVideoEncoder : public VideoEncoder {
public:
- MOCK_CONST_METHOD2(Version, int32_t(int8_t *version, int32_t length));
- MOCK_METHOD3(InitEncode, int32_t(const VideoCodec* codecSettings,
- int32_t numberOfCores,
- size_t maxPayloadSize));
+ MOCK_CONST_METHOD2(Version, int32_t(int8_t* version, int32_t length));
+ MOCK_METHOD3(InitEncode,
+ int32_t(const VideoCodec* codecSettings,
+ int32_t numberOfCores,
+ size_t maxPayloadSize));
MOCK_METHOD3(Encode,
int32_t(const VideoFrame& inputImage,
const CodecSpecificInfo* codecSpecificInfo,
@@ -47,22 +52,24 @@ class MockVideoEncoder : public VideoEncoder {
class MockDecodedImageCallback : public DecodedImageCallback {
public:
- MOCK_METHOD1(Decoded, int32_t(VideoFrame& decodedImage));
+ MOCK_METHOD1(Decoded, int32_t(const VideoFrame& decodedImage));
+ MOCK_METHOD2(Decoded,
+ int32_t(const VideoFrame& decodedImage, int64_t decode_time_ms));
MOCK_METHOD1(ReceivedDecodedReferenceFrame,
int32_t(const uint64_t pictureId));
- MOCK_METHOD1(ReceivedDecodedFrame,
- int32_t(const uint64_t pictureId));
+ MOCK_METHOD1(ReceivedDecodedFrame, int32_t(const uint64_t pictureId));
};
class MockVideoDecoder : public VideoDecoder {
public:
- MOCK_METHOD2(InitDecode, int32_t(const VideoCodec* codecSettings,
- int32_t numberOfCores));
- MOCK_METHOD5(Decode, int32_t(const EncodedImage& inputImage,
- bool missingFrames,
- const RTPFragmentationHeader* fragmentation,
- const CodecSpecificInfo* codecSpecificInfo,
- int64_t renderTimeMs));
+ MOCK_METHOD2(InitDecode,
+ int32_t(const VideoCodec* codecSettings, int32_t numberOfCores));
+ MOCK_METHOD5(Decode,
+ int32_t(const EncodedImage& inputImage,
+ bool missingFrames,
+ const RTPFragmentationHeader* fragmentation,
+ const CodecSpecificInfo* codecSpecificInfo,
+ int64_t renderTimeMs));
MOCK_METHOD1(RegisterDecodeCompleteCallback,
int32_t(DecodedImageCallback* callback));
MOCK_METHOD0(Release, int32_t());
diff --git a/webrtc/modules/video_coding/codecs/interface/video_codec_interface.h b/webrtc/modules/video_coding/codecs/interface/video_codec_interface.h
index 6363ab7332..6bcfa909bd 100644
--- a/webrtc/modules/video_coding/codecs/interface/video_codec_interface.h
+++ b/webrtc/modules/video_coding/codecs/interface/video_codec_interface.h
@@ -8,23 +8,24 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-#ifndef WEBRTC_MODULES_VIDEO_CODING_CODECS_INTERFACE_VIDEO_CODEC_INTERFACE_H
-#define WEBRTC_MODULES_VIDEO_CODING_CODECS_INTERFACE_VIDEO_CODEC_INTERFACE_H
+#ifndef WEBRTC_MODULES_VIDEO_CODING_CODECS_INTERFACE_VIDEO_CODEC_INTERFACE_H_
+#define WEBRTC_MODULES_VIDEO_CODING_CODECS_INTERFACE_VIDEO_CODEC_INTERFACE_H_
+#pragma message("WARNING: video_coding/codecs/interface is DEPRECATED; "
+ "use video_coding/include")
#include <vector>
#include "webrtc/common_types.h"
-#include "webrtc/modules/interface/module_common_types.h"
-#include "webrtc/modules/video_coding/codecs/interface/video_error_codes.h"
+#include "webrtc/modules/include/module_common_types.h"
+#include "webrtc/modules/video_coding/include/video_error_codes.h"
#include "webrtc/typedefs.h"
#include "webrtc/video_decoder.h"
#include "webrtc/video_encoder.h"
#include "webrtc/video_frame.h"
-namespace webrtc
-{
+namespace webrtc {
-class RTPFragmentationHeader; // forward declaration
+class RTPFragmentationHeader; // forward declaration
// Note: if any pointers are added to this struct, it must be fitted
// with a copy-constructor. See below.
@@ -68,6 +69,10 @@ struct CodecSpecificInfoVP9 {
uint16_t width[kMaxVp9NumberOfSpatialLayers];
uint16_t height[kMaxVp9NumberOfSpatialLayers];
GofInfoVP9 gof;
+
+ // Frame reference data.
+ uint8_t num_ref_pics;
+ uint8_t p_diff[kMaxVp9RefPics];
};
struct CodecSpecificInfoGeneric {
@@ -86,12 +91,11 @@ union CodecSpecificInfoUnion {
// Note: if any pointers are added to this struct or its sub-structs, it
// must be fitted with a copy-constructor. This is because it is copied
// in the copy-constructor of VCMEncodedFrame.
-struct CodecSpecificInfo
-{
- VideoCodecType codecType;
- CodecSpecificInfoUnion codecSpecific;
+struct CodecSpecificInfo {
+ VideoCodecType codecType;
+ CodecSpecificInfoUnion codecSpecific;
};
} // namespace webrtc
-#endif // WEBRTC_MODULES_VIDEO_CODING_CODECS_INTERFACE_VIDEO_CODEC_INTERFACE_H
+#endif // WEBRTC_MODULES_VIDEO_CODING_CODECS_INTERFACE_VIDEO_CODEC_INTERFACE_H_
diff --git a/webrtc/modules/video_coding/codecs/interface/video_error_codes.h b/webrtc/modules/video_coding/codecs/interface/video_error_codes.h
index 28e5a32d43..ea8829df80 100644
--- a/webrtc/modules/video_coding/codecs/interface/video_error_codes.h
+++ b/webrtc/modules/video_coding/codecs/interface/video_error_codes.h
@@ -8,8 +8,11 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-#ifndef WEBRTC_MODULES_VIDEO_CODING_CODECS_INTERFACE_VIDEO_ERROR_CODES_H
-#define WEBRTC_MODULES_VIDEO_CODING_CODECS_INTERFACE_VIDEO_ERROR_CODES_H
+#ifndef WEBRTC_MODULES_VIDEO_CODING_CODECS_INTERFACE_VIDEO_ERROR_CODES_H_
+#define WEBRTC_MODULES_VIDEO_CODING_CODECS_INTERFACE_VIDEO_ERROR_CODES_H_
+
+#pragma message("WARNING: video_coding/codecs/interface is DEPRECATED; "
+ "use video_coding/include")
// NOTE: in sync with video_coding_module_defines.h
@@ -29,4 +32,4 @@
#define WEBRTC_VIDEO_CODEC_FALLBACK_SOFTWARE -13
#define WEBRTC_VIDEO_CODEC_TARGET_BITRATE_OVERSHOOT -14
-#endif // WEBRTC_MODULES_VIDEO_CODING_CODECS_INTERFACE_VIDEO_ERROR_CODES_H
+#endif // WEBRTC_MODULES_VIDEO_CODING_CODECS_INTERFACE_VIDEO_ERROR_CODES_H_
diff --git a/webrtc/modules/video_coding/codecs/test/packet_manipulator.cc b/webrtc/modules/video_coding/codecs/test/packet_manipulator.cc
index 36ba0e8272..b554b4e9ae 100644
--- a/webrtc/modules/video_coding/codecs/test/packet_manipulator.cc
+++ b/webrtc/modules/video_coding/codecs/test/packet_manipulator.cc
@@ -57,7 +57,7 @@ int PacketManipulatorImpl::ManipulatePackets(
active_burst_packets_--;
nbr_packets_dropped++;
} else if (RandomUniform() < config_.packet_loss_probability ||
- packet_loss_has_occurred) {
+ packet_loss_has_occurred) {
packet_loss_has_occurred = true;
nbr_packets_dropped++;
if (config_.packet_loss_mode == kBurst) {
@@ -91,9 +91,9 @@ inline double PacketManipulatorImpl::RandomUniform() {
// get the same behavior as long as we're using a fixed initial seed.
critsect_->Enter();
srand(random_seed_);
- random_seed_ = rand();
+ random_seed_ = rand(); // NOLINT (rand_r instead of rand)
critsect_->Leave();
- return (random_seed_ + 1.0)/(RAND_MAX + 1.0);
+ return (random_seed_ + 1.0) / (RAND_MAX + 1.0);
}
const char* PacketLossModeToStr(PacketLossMode e) {
@@ -109,4 +109,4 @@ const char* PacketLossModeToStr(PacketLossMode e) {
}
} // namespace test
-} // namespace webrtcc
+} // namespace webrtc
diff --git a/webrtc/modules/video_coding/codecs/test/packet_manipulator.h b/webrtc/modules/video_coding/codecs/test/packet_manipulator.h
index 16a9dc22ef..3334be072b 100644
--- a/webrtc/modules/video_coding/codecs/test/packet_manipulator.h
+++ b/webrtc/modules/video_coding/codecs/test/packet_manipulator.h
@@ -13,7 +13,7 @@
#include <stdlib.h>
-#include "webrtc/modules/video_coding/codecs/interface/video_codec_interface.h"
+#include "webrtc/modules/video_coding/include/video_codec_interface.h"
#include "webrtc/system_wrappers/include/critical_section_wrapper.h"
#include "webrtc/test/testsupport/packet_reader.h"
@@ -36,10 +36,11 @@ const char* PacketLossModeToStr(PacketLossMode e);
// scenarios caused by network interference.
struct NetworkingConfig {
NetworkingConfig()
- : packet_size_in_bytes(1500), max_payload_size_in_bytes(1440),
- packet_loss_mode(kUniform), packet_loss_probability(0.0),
- packet_loss_burst_length(1) {
- }
+ : packet_size_in_bytes(1500),
+ max_payload_size_in_bytes(1440),
+ packet_loss_mode(kUniform),
+ packet_loss_probability(0.0),
+ packet_loss_burst_length(1) {}
// Packet size in bytes. Default: 1500 bytes.
size_t packet_size_in_bytes;
@@ -93,9 +94,11 @@ class PacketManipulatorImpl : public PacketManipulator {
virtual ~PacketManipulatorImpl();
int ManipulatePackets(webrtc::EncodedImage* encoded_image) override;
virtual void InitializeRandomSeed(unsigned int seed);
+
protected:
// Returns a uniformly distributed random value between 0.0 and 1.0
virtual double RandomUniform();
+
private:
PacketReader* packet_reader_;
const NetworkingConfig& config_;
diff --git a/webrtc/modules/video_coding/codecs/test/packet_manipulator_unittest.cc b/webrtc/modules/video_coding/codecs/test/packet_manipulator_unittest.cc
index ace7bc0507..8c3d30dc0d 100644
--- a/webrtc/modules/video_coding/codecs/test/packet_manipulator_unittest.cc
+++ b/webrtc/modules/video_coding/codecs/test/packet_manipulator_unittest.cc
@@ -13,7 +13,7 @@
#include <queue>
#include "testing/gtest/include/gtest/gtest.h"
-#include "webrtc/modules/video_coding/codecs/interface/video_codec_interface.h"
+#include "webrtc/modules/video_coding/include/video_codec_interface.h"
#include "webrtc/modules/video_coding/codecs/test/predictive_packet_manipulator.h"
#include "webrtc/test/testsupport/unittest_utils.h"
#include "webrtc/typedefs.h"
@@ -25,7 +25,7 @@ const double kNeverDropProbability = 0.0;
const double kAlwaysDropProbability = 1.0;
const int kBurstLength = 1;
-class PacketManipulatorTest: public PacketRelatedTest {
+class PacketManipulatorTest : public PacketRelatedTest {
protected:
PacketReader packet_reader_;
EncodedImage image_;
@@ -50,19 +50,15 @@ class PacketManipulatorTest: public PacketRelatedTest {
virtual ~PacketManipulatorTest() {}
- void SetUp() {
- PacketRelatedTest::SetUp();
- }
+ void SetUp() { PacketRelatedTest::SetUp(); }
- void TearDown() {
- PacketRelatedTest::TearDown();
- }
+ void TearDown() { PacketRelatedTest::TearDown(); }
void VerifyPacketLoss(int expected_nbr_packets_dropped,
int actual_nbr_packets_dropped,
size_t expected_packet_data_length,
uint8_t* expected_packet_data,
- EncodedImage& actual_image) {
+ const EncodedImage& actual_image) {
EXPECT_EQ(expected_nbr_packets_dropped, actual_nbr_packets_dropped);
EXPECT_EQ(expected_packet_data_length, image_._length);
EXPECT_EQ(0, memcmp(expected_packet_data, actual_image._buffer,
@@ -75,10 +71,10 @@ TEST_F(PacketManipulatorTest, Constructor) {
}
TEST_F(PacketManipulatorTest, DropNone) {
- PacketManipulatorImpl manipulator(&packet_reader_, no_drop_config_, false);
+ PacketManipulatorImpl manipulator(&packet_reader_, no_drop_config_, false);
int nbr_packets_dropped = manipulator.ManipulatePackets(&image_);
- VerifyPacketLoss(0, nbr_packets_dropped, kPacketDataLength,
- packet_data_, image_);
+ VerifyPacketLoss(0, nbr_packets_dropped, kPacketDataLength, packet_data_,
+ image_);
}
TEST_F(PacketManipulatorTest, UniformDropNoneSmallFrame) {
@@ -87,15 +83,14 @@ TEST_F(PacketManipulatorTest, UniformDropNoneSmallFrame) {
PacketManipulatorImpl manipulator(&packet_reader_, no_drop_config_, false);
int nbr_packets_dropped = manipulator.ManipulatePackets(&image_);
- VerifyPacketLoss(0, nbr_packets_dropped, data_length,
- packet_data_, image_);
+ VerifyPacketLoss(0, nbr_packets_dropped, data_length, packet_data_, image_);
}
TEST_F(PacketManipulatorTest, UniformDropAll) {
PacketManipulatorImpl manipulator(&packet_reader_, drop_config_, false);
int nbr_packets_dropped = manipulator.ManipulatePackets(&image_);
- VerifyPacketLoss(kPacketDataNumberOfPackets, nbr_packets_dropped,
- 0, packet_data_, image_);
+ VerifyPacketLoss(kPacketDataNumberOfPackets, nbr_packets_dropped, 0,
+ packet_data_, image_);
}
// Use our customized test class to make the second packet being lost
diff --git a/webrtc/modules/video_coding/codecs/test/predictive_packet_manipulator.cc b/webrtc/modules/video_coding/codecs/test/predictive_packet_manipulator.cc
index c92cfa48a7..9eba205a88 100644
--- a/webrtc/modules/video_coding/codecs/test/predictive_packet_manipulator.cc
+++ b/webrtc/modules/video_coding/codecs/test/predictive_packet_manipulator.cc
@@ -19,13 +19,11 @@ namespace webrtc {
namespace test {
PredictivePacketManipulator::PredictivePacketManipulator(
- PacketReader* packet_reader, const NetworkingConfig& config)
- : PacketManipulatorImpl(packet_reader, config, false) {
-}
-
-PredictivePacketManipulator::~PredictivePacketManipulator() {
-}
+ PacketReader* packet_reader,
+ const NetworkingConfig& config)
+ : PacketManipulatorImpl(packet_reader, config, false) {}
+PredictivePacketManipulator::~PredictivePacketManipulator() {}
void PredictivePacketManipulator::AddRandomResult(double result) {
assert(result >= 0.0 && result <= 1.0);
@@ -33,8 +31,9 @@ void PredictivePacketManipulator::AddRandomResult(double result) {
}
double PredictivePacketManipulator::RandomUniform() {
- if(random_results_.size() == 0u) {
- fprintf(stderr, "No more stored results, please make sure AddRandomResult()"
+ if (random_results_.size() == 0u) {
+ fprintf(stderr,
+ "No more stored results, please make sure AddRandomResult()"
"is called same amount of times you're going to invoke the "
"RandomUniform() function, i.e. once per packet.\n");
assert(false);
@@ -45,4 +44,4 @@ double PredictivePacketManipulator::RandomUniform() {
}
} // namespace test
-} // namespace webrtcc
+} // namespace webrtc
diff --git a/webrtc/modules/video_coding/codecs/test/predictive_packet_manipulator.h b/webrtc/modules/video_coding/codecs/test/predictive_packet_manipulator.h
index 082712d870..45c7848c67 100644
--- a/webrtc/modules/video_coding/codecs/test/predictive_packet_manipulator.h
+++ b/webrtc/modules/video_coding/codecs/test/predictive_packet_manipulator.h
@@ -31,6 +31,7 @@ class PredictivePacketManipulator : public PacketManipulatorImpl {
// FIFO queue so they will be returned in the same order they were added.
// Result parameter must be 0.0 to 1.0.
void AddRandomResult(double result);
+
protected:
// Returns a uniformly distributed random value between 0.0 and 1.0
double RandomUniform() override;
diff --git a/webrtc/modules/video_coding/codecs/test/stats.cc b/webrtc/modules/video_coding/codecs/test/stats.cc
index f87407d223..478b2f4901 100644
--- a/webrtc/modules/video_coding/codecs/test/stats.cc
+++ b/webrtc/modules/video_coding/codecs/test/stats.cc
@@ -39,19 +39,19 @@ Stats::Stats() {}
Stats::~Stats() {}
bool LessForEncodeTime(const FrameStatistic& s1, const FrameStatistic& s2) {
- return s1.encode_time_in_us < s2.encode_time_in_us;
+ return s1.encode_time_in_us < s2.encode_time_in_us;
}
bool LessForDecodeTime(const FrameStatistic& s1, const FrameStatistic& s2) {
- return s1.decode_time_in_us < s2.decode_time_in_us;
+ return s1.decode_time_in_us < s2.decode_time_in_us;
}
bool LessForEncodedSize(const FrameStatistic& s1, const FrameStatistic& s2) {
- return s1.encoded_frame_length_in_bytes < s2.encoded_frame_length_in_bytes;
+ return s1.encoded_frame_length_in_bytes < s2.encoded_frame_length_in_bytes;
}
bool LessForBitRate(const FrameStatistic& s1, const FrameStatistic& s2) {
- return s1.bit_rate_in_kbps < s2.bit_rate_in_kbps;
+ return s1.bit_rate_in_kbps < s2.bit_rate_in_kbps;
}
FrameStatistic& Stats::NewFrame(int frame_number) {
@@ -78,8 +78,7 @@ void Stats::PrintSummary() {
size_t nbr_keyframes = 0;
size_t nbr_nonkeyframes = 0;
- for (FrameStatisticsIterator it = stats_.begin();
- it != stats_.end(); ++it) {
+ for (FrameStatisticsIterator it = stats_.begin(); it != stats_.end(); ++it) {
total_encoding_time_in_us += it->encode_time_in_us;
total_decoding_time_in_us += it->decode_time_in_us;
total_encoded_frames_lengths += it->encoded_frame_length_in_bytes;
@@ -96,15 +95,13 @@ void Stats::PrintSummary() {
// ENCODING
printf("Encoding time:\n");
- frame = std::min_element(stats_.begin(),
- stats_.end(), LessForEncodeTime);
- printf(" Min : %7d us (frame %d)\n",
- frame->encode_time_in_us, frame->frame_number);
+ frame = std::min_element(stats_.begin(), stats_.end(), LessForEncodeTime);
+ printf(" Min : %7d us (frame %d)\n", frame->encode_time_in_us,
+ frame->frame_number);
- frame = std::max_element(stats_.begin(),
- stats_.end(), LessForEncodeTime);
- printf(" Max : %7d us (frame %d)\n",
- frame->encode_time_in_us, frame->frame_number);
+ frame = std::max_element(stats_.begin(), stats_.end(), LessForEncodeTime);
+ printf(" Max : %7d us (frame %d)\n", frame->encode_time_in_us,
+ frame->frame_number);
printf(" Average : %7d us\n",
static_cast<int>(total_encoding_time_in_us / stats_.size()));
@@ -115,7 +112,7 @@ void Stats::PrintSummary() {
// failures)
std::vector<FrameStatistic> decoded_frames;
for (std::vector<FrameStatistic>::iterator it = stats_.begin();
- it != stats_.end(); ++it) {
+ it != stats_.end(); ++it) {
if (it->decoding_successful) {
decoded_frames.push_back(*it);
}
@@ -123,15 +120,15 @@ void Stats::PrintSummary() {
if (decoded_frames.size() == 0) {
printf("No successfully decoded frames exist in this statistics.\n");
} else {
- frame = std::min_element(decoded_frames.begin(),
- decoded_frames.end(), LessForDecodeTime);
- printf(" Min : %7d us (frame %d)\n",
- frame->decode_time_in_us, frame->frame_number);
+ frame = std::min_element(decoded_frames.begin(), decoded_frames.end(),
+ LessForDecodeTime);
+ printf(" Min : %7d us (frame %d)\n", frame->decode_time_in_us,
+ frame->frame_number);
- frame = std::max_element(decoded_frames.begin(),
- decoded_frames.end(), LessForDecodeTime);
- printf(" Max : %7d us (frame %d)\n",
- frame->decode_time_in_us, frame->frame_number);
+ frame = std::max_element(decoded_frames.begin(), decoded_frames.end(),
+ LessForDecodeTime);
+ printf(" Max : %7d us (frame %d)\n", frame->decode_time_in_us,
+ frame->frame_number);
printf(" Average : %7d us\n",
static_cast<int>(total_decoding_time_in_us / decoded_frames.size()));
@@ -141,13 +138,11 @@ void Stats::PrintSummary() {
// SIZE
printf("Frame sizes:\n");
- frame = std::min_element(stats_.begin(),
- stats_.end(), LessForEncodedSize);
+ frame = std::min_element(stats_.begin(), stats_.end(), LessForEncodedSize);
printf(" Min : %7" PRIuS " bytes (frame %d)\n",
frame->encoded_frame_length_in_bytes, frame->frame_number);
- frame = std::max_element(stats_.begin(),
- stats_.end(), LessForEncodedSize);
+ frame = std::max_element(stats_.begin(), stats_.end(), LessForEncodedSize);
printf(" Max : %7" PRIuS " bytes (frame %d)\n",
frame->encoded_frame_length_in_bytes, frame->frame_number);
@@ -167,21 +162,17 @@ void Stats::PrintSummary() {
// BIT RATE
printf("Bit rates:\n");
- frame = std::min_element(stats_.begin(),
- stats_.end(), LessForBitRate);
- printf(" Min bit rate: %7d kbps (frame %d)\n",
- frame->bit_rate_in_kbps, frame->frame_number);
+ frame = std::min_element(stats_.begin(), stats_.end(), LessForBitRate);
+ printf(" Min bit rate: %7d kbps (frame %d)\n", frame->bit_rate_in_kbps,
+ frame->frame_number);
- frame = std::max_element(stats_.begin(),
- stats_.end(), LessForBitRate);
- printf(" Max bit rate: %7d kbps (frame %d)\n",
- frame->bit_rate_in_kbps, frame->frame_number);
+ frame = std::max_element(stats_.begin(), stats_.end(), LessForBitRate);
+ printf(" Max bit rate: %7d kbps (frame %d)\n", frame->bit_rate_in_kbps,
+ frame->frame_number);
printf("\n");
- printf("Total encoding time : %7d ms.\n",
- total_encoding_time_in_us / 1000);
- printf("Total decoding time : %7d ms.\n",
- total_decoding_time_in_us / 1000);
+ printf("Total encoding time : %7d ms.\n", total_encoding_time_in_us / 1000);
+ printf("Total decoding time : %7d ms.\n", total_decoding_time_in_us / 1000);
printf("Total processing time: %7d ms.\n",
(total_encoding_time_in_us + total_decoding_time_in_us) / 1000);
}
diff --git a/webrtc/modules/video_coding/codecs/test/stats.h b/webrtc/modules/video_coding/codecs/test/stats.h
index 83ba108bb7..9092631ca1 100644
--- a/webrtc/modules/video_coding/codecs/test/stats.h
+++ b/webrtc/modules/video_coding/codecs/test/stats.h
@@ -13,7 +13,7 @@
#include <vector>
-#include "webrtc/common_video/interface/video_image.h"
+#include "webrtc/common_video/include/video_image.h"
namespace webrtc {
namespace test {
diff --git a/webrtc/modules/video_coding/codecs/test/stats_unittest.cc b/webrtc/modules/video_coding/codecs/test/stats_unittest.cc
index a2d27e71d6..0403ccfdb3 100644
--- a/webrtc/modules/video_coding/codecs/test/stats_unittest.cc
+++ b/webrtc/modules/video_coding/codecs/test/stats_unittest.cc
@@ -16,21 +16,15 @@
namespace webrtc {
namespace test {
-class StatsTest: public testing::Test {
+class StatsTest : public testing::Test {
protected:
- StatsTest() {
- }
+ StatsTest() {}
- virtual ~StatsTest() {
- }
+ virtual ~StatsTest() {}
- void SetUp() {
- stats_ = new Stats();
- }
+ void SetUp() { stats_ = new Stats(); }
- void TearDown() {
- delete stats_;
- }
+ void TearDown() { delete stats_; }
Stats* stats_;
};
diff --git a/webrtc/modules/video_coding/codecs/test/videoprocessor.cc b/webrtc/modules/video_coding/codecs/test/videoprocessor.cc
index c814dfe0e7..7376000bd5 100644
--- a/webrtc/modules/video_coding/codecs/test/videoprocessor.cc
+++ b/webrtc/modules/video_coding/codecs/test/videoprocessor.cc
@@ -93,14 +93,18 @@ bool VideoProcessorImpl::Init() {
int32_t register_result =
encoder_->RegisterEncodeCompleteCallback(encode_callback_);
if (register_result != WEBRTC_VIDEO_CODEC_OK) {
- fprintf(stderr, "Failed to register encode complete callback, return code: "
- "%d\n", register_result);
+ fprintf(stderr,
+ "Failed to register encode complete callback, return code: "
+ "%d\n",
+ register_result);
return false;
}
register_result = decoder_->RegisterDecodeCompleteCallback(decode_callback_);
if (register_result != WEBRTC_VIDEO_CODEC_OK) {
- fprintf(stderr, "Failed to register decode complete callback, return code: "
- "%d\n", register_result);
+ fprintf(stderr,
+ "Failed to register decode complete callback, return code: "
+ "%d\n",
+ register_result);
return false;
}
// Init the encoder and decoder
@@ -146,13 +150,14 @@ VideoProcessorImpl::~VideoProcessorImpl() {
delete decode_callback_;
}
-
void VideoProcessorImpl::SetRates(int bit_rate, int frame_rate) {
int set_rates_result = encoder_->SetRates(bit_rate, frame_rate);
assert(set_rates_result >= 0);
if (set_rates_result < 0) {
- fprintf(stderr, "Failed to update encoder with new rate %d, "
- "return code: %d\n", bit_rate, set_rates_result);
+ fprintf(stderr,
+ "Failed to update encoder with new rate %d, "
+ "return code: %d\n",
+ bit_rate, set_rates_result);
}
num_dropped_frames_ = 0;
num_spatial_resizes_ = 0;
@@ -175,7 +180,7 @@ int VideoProcessorImpl::NumberSpatialResizes() {
}
bool VideoProcessorImpl::ProcessFrame(int frame_number) {
- assert(frame_number >=0);
+ assert(frame_number >= 0);
if (!initialized_) {
fprintf(stderr, "Attempting to use uninitialized VideoProcessor!\n");
return false;
@@ -186,10 +191,8 @@ bool VideoProcessorImpl::ProcessFrame(int frame_number) {
}
if (frame_reader_->ReadFrame(source_buffer_)) {
// Copy the source frame to the newly read frame data.
- source_frame_.CreateFrame(source_buffer_,
- config_.codec_settings->width,
- config_.codec_settings->height,
- kVideoRotation_0);
+ source_frame_.CreateFrame(source_buffer_, config_.codec_settings->width,
+ config_.codec_settings->height, kVideoRotation_0);
// Ensure we have a new statistics data object we can fill:
FrameStatistic& stat = stats_->NewFrame(frame_number);
@@ -224,10 +227,10 @@ bool VideoProcessorImpl::ProcessFrame(int frame_number) {
void VideoProcessorImpl::FrameEncoded(const EncodedImage& encoded_image) {
// Timestamp is frame number, so this gives us #dropped frames.
- int num_dropped_from_prev_encode = encoded_image._timeStamp -
- prev_time_stamp_ - 1;
- num_dropped_frames_ += num_dropped_from_prev_encode;
- prev_time_stamp_ = encoded_image._timeStamp;
+ int num_dropped_from_prev_encode =
+ encoded_image._timeStamp - prev_time_stamp_ - 1;
+ num_dropped_frames_ += num_dropped_from_prev_encode;
+ prev_time_stamp_ = encoded_image._timeStamp;
if (num_dropped_from_prev_encode > 0) {
// For dropped frames, we write out the last decoded frame to avoid getting
// out of sync for the computation of PSNR and SSIM.
@@ -244,15 +247,16 @@ void VideoProcessorImpl::FrameEncoded(const EncodedImage& encoded_image) {
TickTime encode_stop = TickTime::Now();
int frame_number = encoded_image._timeStamp;
FrameStatistic& stat = stats_->stats_[frame_number];
- stat.encode_time_in_us = GetElapsedTimeMicroseconds(encode_start_,
- encode_stop);
+ stat.encode_time_in_us =
+ GetElapsedTimeMicroseconds(encode_start_, encode_stop);
stat.encoding_successful = true;
stat.encoded_frame_length_in_bytes = encoded_image._length;
stat.frame_number = encoded_image._timeStamp;
stat.frame_type = encoded_image._frameType;
stat.bit_rate_in_kbps = encoded_image._length * bit_rate_factor_;
- stat.total_packets = encoded_image._length /
- config_.networking_config.packet_size_in_bytes + 1;
+ stat.total_packets =
+ encoded_image._length / config_.networking_config.packet_size_in_bytes +
+ 1;
// Perform packet loss if criteria is fullfilled:
bool exclude_this_frame = false;
@@ -280,7 +284,7 @@ void VideoProcessorImpl::FrameEncoded(const EncodedImage& encoded_image) {
copied_image._buffer = copied_buffer.get();
if (!exclude_this_frame) {
stat.packets_dropped =
- packet_manipulator_->ManipulatePackets(&copied_image);
+ packet_manipulator_->ManipulatePackets(&copied_image);
}
// Keep track of if frames are lost due to packet loss so we can tell
@@ -305,26 +309,25 @@ void VideoProcessorImpl::FrameDecoded(const VideoFrame& image) {
int frame_number = image.timestamp();
// Report stats
FrameStatistic& stat = stats_->stats_[frame_number];
- stat.decode_time_in_us = GetElapsedTimeMicroseconds(decode_start_,
- decode_stop);
+ stat.decode_time_in_us =
+ GetElapsedTimeMicroseconds(decode_start_, decode_stop);
stat.decoding_successful = true;
// Check for resize action (either down or up):
if (static_cast<int>(image.width()) != last_encoder_frame_width_ ||
- static_cast<int>(image.height()) != last_encoder_frame_height_ ) {
+ static_cast<int>(image.height()) != last_encoder_frame_height_) {
++num_spatial_resizes_;
last_encoder_frame_width_ = image.width();
last_encoder_frame_height_ = image.height();
}
// Check if codec size is different from native/original size, and if so,
// upsample back to original size: needed for PSNR and SSIM computations.
- if (image.width() != config_.codec_settings->width ||
+ if (image.width() != config_.codec_settings->width ||
image.height() != config_.codec_settings->height) {
VideoFrame up_image;
- int ret_val = scaler_.Set(image.width(), image.height(),
- config_.codec_settings->width,
- config_.codec_settings->height,
- kI420, kI420, kScaleBilinear);
+ int ret_val = scaler_.Set(
+ image.width(), image.height(), config_.codec_settings->width,
+ config_.codec_settings->height, kI420, kI420, kScaleBilinear);
assert(ret_val >= 0);
if (ret_val < 0) {
fprintf(stderr, "Failed to set scalar for frame: %d, return code: %d\n",
@@ -366,7 +369,8 @@ void VideoProcessorImpl::FrameDecoded(const VideoFrame& image) {
}
int VideoProcessorImpl::GetElapsedTimeMicroseconds(
- const webrtc::TickTime& start, const webrtc::TickTime& stop) {
+ const webrtc::TickTime& start,
+ const webrtc::TickTime& stop) {
uint64_t encode_time = (stop - start).Microseconds();
assert(encode_time <
static_cast<unsigned int>(std::numeric_limits<int>::max()));
@@ -404,8 +408,7 @@ const char* VideoCodecTypeToStr(webrtc::VideoCodecType e) {
}
// Callbacks
-int32_t
-VideoProcessorImpl::VideoProcessorEncodeCompleteCallback::Encoded(
+int32_t VideoProcessorImpl::VideoProcessorEncodeCompleteCallback::Encoded(
const EncodedImage& encoded_image,
const webrtc::CodecSpecificInfo* codec_specific_info,
const webrtc::RTPFragmentationHeader* fragmentation) {
diff --git a/webrtc/modules/video_coding/codecs/test/videoprocessor.h b/webrtc/modules/video_coding/codecs/test/videoprocessor.h
index 0b094ae73e..3ee08fd46a 100644
--- a/webrtc/modules/video_coding/codecs/test/videoprocessor.h
+++ b/webrtc/modules/video_coding/codecs/test/videoprocessor.h
@@ -13,9 +13,10 @@
#include <string>
+#include "webrtc/base/checks.h"
#include "webrtc/common_video/libyuv/include/scaler.h"
#include "webrtc/common_video/libyuv/include/webrtc_libyuv.h"
-#include "webrtc/modules/video_coding/codecs/interface/video_codec_interface.h"
+#include "webrtc/modules/video_coding/include/video_codec_interface.h"
#include "webrtc/modules/video_coding/codecs/test/packet_manipulator.h"
#include "webrtc/modules/video_coding/codecs/test/stats.h"
#include "webrtc/system_wrappers/include/tick_util.h"
@@ -242,12 +243,16 @@ class VideoProcessorImpl : public VideoProcessor {
// Callback class required to implement according to the VideoDecoder API.
class VideoProcessorDecodeCompleteCallback
- : public webrtc::DecodedImageCallback {
+ : public webrtc::DecodedImageCallback {
public:
- explicit VideoProcessorDecodeCompleteCallback(VideoProcessorImpl* vp)
- : video_processor_(vp) {
+ explicit VideoProcessorDecodeCompleteCallback(VideoProcessorImpl* vp)
+ : video_processor_(vp) {}
+ int32_t Decoded(webrtc::VideoFrame& image) override;
+ int32_t Decoded(webrtc::VideoFrame& image,
+ int64_t decode_time_ms) override {
+ RTC_NOTREACHED();
+ return -1;
}
- int32_t Decoded(webrtc::VideoFrame& image) override;
private:
VideoProcessorImpl* video_processor_;
diff --git a/webrtc/modules/video_coding/codecs/test/videoprocessor_integrationtest.cc b/webrtc/modules/video_coding/codecs/test/videoprocessor_integrationtest.cc
index 3d6aedb22a..7b92616e1b 100644
--- a/webrtc/modules/video_coding/codecs/test/videoprocessor_integrationtest.cc
+++ b/webrtc/modules/video_coding/codecs/test/videoprocessor_integrationtest.cc
@@ -12,17 +12,16 @@
#include "testing/gtest/include/gtest/gtest.h"
-#include "webrtc/modules/video_coding/codecs/interface/video_codec_interface.h"
+#include "webrtc/modules/video_coding/include/video_codec_interface.h"
#include "webrtc/modules/video_coding/codecs/test/packet_manipulator.h"
#include "webrtc/modules/video_coding/codecs/test/videoprocessor.h"
#include "webrtc/modules/video_coding/codecs/vp8/include/vp8.h"
#include "webrtc/modules/video_coding/codecs/vp9/include/vp9.h"
#include "webrtc/modules/video_coding/codecs/vp8/include/vp8_common_types.h"
-#include "webrtc/modules/video_coding/main/interface/video_coding.h"
+#include "webrtc/modules/video_coding/include/video_coding.h"
#include "webrtc/test/testsupport/fileutils.h"
#include "webrtc/test/testsupport/frame_reader.h"
#include "webrtc/test/testsupport/frame_writer.h"
-#include "webrtc/test/testsupport/gtest_disable.h"
#include "webrtc/test/testsupport/metrics/video_metrics.h"
#include "webrtc/test/testsupport/packet_reader.h"
#include "webrtc/typedefs.h"
@@ -81,7 +80,6 @@ struct RateControlMetrics {
int num_key_frames;
};
-
// Sequence used is foreman (CIF): may be better to use VGA for resize test.
const int kCIFWidth = 352;
const int kCIFHeight = 288;
@@ -101,7 +99,7 @@ const float kScaleKeyFrameSize = 0.5f;
// dropping/spatial resize, and temporal layers. The limits for the rate
// control metrics are set to be fairly conservative, so failure should only
// happen when some significant regression or breakdown occurs.
-class VideoProcessorIntegrationTest: public testing::Test {
+class VideoProcessorIntegrationTest : public testing::Test {
protected:
VideoEncoder* encoder_;
VideoDecoder* decoder_;
@@ -148,7 +146,6 @@ class VideoProcessorIntegrationTest: public testing::Test {
bool frame_dropper_on_;
bool spatial_resize_on_;
-
VideoProcessorIntegrationTest() {}
virtual ~VideoProcessorIntegrationTest() {}
@@ -165,14 +162,13 @@ class VideoProcessorIntegrationTest: public testing::Test {
// CIF is currently used for all tests below.
// Setup the TestConfig struct for processing of a clip in CIF resolution.
- config_.input_filename =
- webrtc::test::ResourcePath("foreman_cif", "yuv");
+ config_.input_filename = webrtc::test::ResourcePath("foreman_cif", "yuv");
// Generate an output filename in a safe way.
config_.output_filename = webrtc::test::TempFilename(
webrtc::test::OutputPath(), "videoprocessor_integrationtest");
- config_.frame_length_in_bytes = CalcBufferSize(kI420,
- kCIFWidth, kCIFHeight);
+ config_.frame_length_in_bytes =
+ CalcBufferSize(kI420, kCIFWidth, kCIFHeight);
config_.verbose = false;
// Only allow encoder/decoder to use single core, for predictability.
config_.use_single_core = true;
@@ -188,52 +184,46 @@ class VideoProcessorIntegrationTest: public testing::Test {
// These features may be set depending on the test.
switch (config_.codec_settings->codecType) {
- case kVideoCodecVP8:
- config_.codec_settings->codecSpecific.VP8.errorConcealmentOn =
- error_concealment_on_;
- config_.codec_settings->codecSpecific.VP8.denoisingOn =
- denoising_on_;
- config_.codec_settings->codecSpecific.VP8.numberOfTemporalLayers =
- num_temporal_layers_;
- config_.codec_settings->codecSpecific.VP8.frameDroppingOn =
- frame_dropper_on_;
- config_.codec_settings->codecSpecific.VP8.automaticResizeOn =
- spatial_resize_on_;
- config_.codec_settings->codecSpecific.VP8.keyFrameInterval =
- kBaseKeyFrameInterval;
- break;
- case kVideoCodecVP9:
- config_.codec_settings->codecSpecific.VP9.denoisingOn =
- denoising_on_;
- config_.codec_settings->codecSpecific.VP9.numberOfTemporalLayers =
- num_temporal_layers_;
- config_.codec_settings->codecSpecific.VP9.frameDroppingOn =
- frame_dropper_on_;
- config_.codec_settings->codecSpecific.VP9.automaticResizeOn =
- spatial_resize_on_;
- config_.codec_settings->codecSpecific.VP9.keyFrameInterval =
- kBaseKeyFrameInterval;
- break;
- default:
- assert(false);
- break;
- }
- frame_reader_ =
- new webrtc::test::FrameReaderImpl(config_.input_filename,
- config_.frame_length_in_bytes);
- frame_writer_ =
- new webrtc::test::FrameWriterImpl(config_.output_filename,
- config_.frame_length_in_bytes);
+ case kVideoCodecVP8:
+ config_.codec_settings->codecSpecific.VP8.errorConcealmentOn =
+ error_concealment_on_;
+ config_.codec_settings->codecSpecific.VP8.denoisingOn = denoising_on_;
+ config_.codec_settings->codecSpecific.VP8.numberOfTemporalLayers =
+ num_temporal_layers_;
+ config_.codec_settings->codecSpecific.VP8.frameDroppingOn =
+ frame_dropper_on_;
+ config_.codec_settings->codecSpecific.VP8.automaticResizeOn =
+ spatial_resize_on_;
+ config_.codec_settings->codecSpecific.VP8.keyFrameInterval =
+ kBaseKeyFrameInterval;
+ break;
+ case kVideoCodecVP9:
+ config_.codec_settings->codecSpecific.VP9.denoisingOn = denoising_on_;
+ config_.codec_settings->codecSpecific.VP9.numberOfTemporalLayers =
+ num_temporal_layers_;
+ config_.codec_settings->codecSpecific.VP9.frameDroppingOn =
+ frame_dropper_on_;
+ config_.codec_settings->codecSpecific.VP9.automaticResizeOn =
+ spatial_resize_on_;
+ config_.codec_settings->codecSpecific.VP9.keyFrameInterval =
+ kBaseKeyFrameInterval;
+ break;
+ default:
+ assert(false);
+ break;
+ }
+ frame_reader_ = new webrtc::test::FrameReaderImpl(
+ config_.input_filename, config_.frame_length_in_bytes);
+ frame_writer_ = new webrtc::test::FrameWriterImpl(
+ config_.output_filename, config_.frame_length_in_bytes);
ASSERT_TRUE(frame_reader_->Init());
ASSERT_TRUE(frame_writer_->Init());
packet_manipulator_ = new webrtc::test::PacketManipulatorImpl(
&packet_reader_, config_.networking_config, config_.verbose);
- processor_ = new webrtc::test::VideoProcessorImpl(encoder_, decoder_,
- frame_reader_,
- frame_writer_,
- packet_manipulator_,
- config_, &stats_);
+ processor_ = new webrtc::test::VideoProcessorImpl(
+ encoder_, decoder_, frame_reader_, frame_writer_, packet_manipulator_,
+ config_, &stats_);
ASSERT_TRUE(processor_->Init());
}
@@ -247,7 +237,7 @@ class VideoProcessorIntegrationTest: public testing::Test {
encoding_bitrate_[i] = 0.0f;
// Update layer per-frame-bandwidth.
per_frame_bandwidth_[i] = static_cast<float>(bit_rate_layer_[i]) /
- static_cast<float>(frame_rate_layer_[i]);
+ static_cast<float>(frame_rate_layer_[i]);
}
// Set maximum size of key frames, following setting in the VP8 wrapper.
float max_key_size = kScaleKeyFrameSize * kOptimalBufferSize * frame_rate_;
@@ -274,28 +264,28 @@ class VideoProcessorIntegrationTest: public testing::Test {
// Update rate mismatch relative to per-frame bandwidth for delta frames.
if (frame_type == kVideoFrameDelta) {
// TODO(marpan): Should we count dropped (zero size) frames in mismatch?
- sum_frame_size_mismatch_[layer_] += fabs(encoded_size_kbits -
- per_frame_bandwidth_[layer_]) /
- per_frame_bandwidth_[layer_];
+ sum_frame_size_mismatch_[layer_] +=
+ fabs(encoded_size_kbits - per_frame_bandwidth_[layer_]) /
+ per_frame_bandwidth_[layer_];
} else {
- float target_size = (frame_num == 1) ? target_size_key_frame_initial_ :
- target_size_key_frame_;
- sum_key_frame_size_mismatch_ += fabs(encoded_size_kbits - target_size) /
- target_size;
+ float target_size = (frame_num == 1) ? target_size_key_frame_initial_
+ : target_size_key_frame_;
+ sum_key_frame_size_mismatch_ +=
+ fabs(encoded_size_kbits - target_size) / target_size;
num_key_frames_ += 1;
}
sum_encoded_frame_size_[layer_] += encoded_size_kbits;
// Encoding bitrate per layer: from the start of the update/run to the
// current frame.
encoding_bitrate_[layer_] = sum_encoded_frame_size_[layer_] *
- frame_rate_layer_[layer_] /
- num_frames_per_update_[layer_];
+ frame_rate_layer_[layer_] /
+ num_frames_per_update_[layer_];
// Total encoding rate: from the start of the update/run to current frame.
sum_encoded_frame_size_total_ += encoded_size_kbits;
- encoding_bitrate_total_ = sum_encoded_frame_size_total_ * frame_rate_ /
- num_frames_total_;
- perc_encoding_rate_mismatch_ = 100 * fabs(encoding_bitrate_total_ -
- bit_rate_) / bit_rate_;
+ encoding_bitrate_total_ =
+ sum_encoded_frame_size_total_ * frame_rate_ / num_frames_total_;
+ perc_encoding_rate_mismatch_ =
+ 100 * fabs(encoding_bitrate_total_ - bit_rate_) / bit_rate_;
if (perc_encoding_rate_mismatch_ < kPercTargetvsActualMismatch &&
!encoding_rate_within_target_) {
num_frames_to_hit_target_ = num_frames_total_;
@@ -314,34 +304,38 @@ class VideoProcessorIntegrationTest: public testing::Test {
int num_key_frames) {
int num_dropped_frames = processor_->NumberDroppedFrames();
int num_resize_actions = processor_->NumberSpatialResizes();
- printf("For update #: %d,\n "
+ printf(
+ "For update #: %d,\n "
" Target Bitrate: %d,\n"
" Encoding bitrate: %f,\n"
" Frame rate: %d \n",
update_index, bit_rate_, encoding_bitrate_total_, frame_rate_);
- printf(" Number of frames to approach target rate = %d, \n"
- " Number of dropped frames = %d, \n"
- " Number of spatial resizes = %d, \n",
- num_frames_to_hit_target_, num_dropped_frames, num_resize_actions);
+ printf(
+ " Number of frames to approach target rate = %d, \n"
+ " Number of dropped frames = %d, \n"
+ " Number of spatial resizes = %d, \n",
+ num_frames_to_hit_target_, num_dropped_frames, num_resize_actions);
EXPECT_LE(perc_encoding_rate_mismatch_, max_encoding_rate_mismatch);
if (num_key_frames_ > 0) {
- int perc_key_frame_size_mismatch = 100 * sum_key_frame_size_mismatch_ /
- num_key_frames_;
- printf(" Number of Key frames: %d \n"
- " Key frame rate mismatch: %d \n",
- num_key_frames_, perc_key_frame_size_mismatch);
+ int perc_key_frame_size_mismatch =
+ 100 * sum_key_frame_size_mismatch_ / num_key_frames_;
+ printf(
+ " Number of Key frames: %d \n"
+ " Key frame rate mismatch: %d \n",
+ num_key_frames_, perc_key_frame_size_mismatch);
EXPECT_LE(perc_key_frame_size_mismatch, max_key_frame_size_mismatch);
}
printf("\n");
printf("Rates statistics for Layer data \n");
- for (int i = 0; i < num_temporal_layers_ ; i++) {
+ for (int i = 0; i < num_temporal_layers_; i++) {
printf("Layer #%d \n", i);
- int perc_frame_size_mismatch = 100 * sum_frame_size_mismatch_[i] /
- num_frames_per_update_[i];
- int perc_encoding_rate_mismatch = 100 * fabs(encoding_bitrate_[i] -
- bit_rate_layer_[i]) /
- bit_rate_layer_[i];
- printf(" Target Layer Bit rate: %f \n"
+ int perc_frame_size_mismatch =
+ 100 * sum_frame_size_mismatch_[i] / num_frames_per_update_[i];
+ int perc_encoding_rate_mismatch =
+ 100 * fabs(encoding_bitrate_[i] - bit_rate_layer_[i]) /
+ bit_rate_layer_[i];
+ printf(
+ " Target Layer Bit rate: %f \n"
" Layer frame rate: %f, \n"
" Layer per frame bandwidth: %f, \n"
" Layer Encoding bit rate: %f, \n"
@@ -366,13 +360,13 @@ class VideoProcessorIntegrationTest: public testing::Test {
if (num_temporal_layers_ == 1) {
layer_ = 0;
} else if (num_temporal_layers_ == 2) {
- // layer 0: 0 2 4 ...
- // layer 1: 1 3
- if (frame_number % 2 == 0) {
- layer_ = 0;
- } else {
- layer_ = 1;
- }
+ // layer 0: 0 2 4 ...
+ // layer 1: 1 3
+ if (frame_number % 2 == 0) {
+ layer_ = 0;
+ } else {
+ layer_ = 1;
+ }
} else if (num_temporal_layers_ == 3) {
// layer 0: 0 4 8 ...
// layer 1: 2 6
@@ -391,20 +385,20 @@ class VideoProcessorIntegrationTest: public testing::Test {
// Set the bitrate and frame rate per layer, for up to 3 layers.
void SetLayerRates() {
- assert(num_temporal_layers_<= 3);
+ assert(num_temporal_layers_ <= 3);
for (int i = 0; i < num_temporal_layers_; i++) {
float bit_rate_ratio =
kVp8LayerRateAlloction[num_temporal_layers_ - 1][i];
if (i > 0) {
- float bit_rate_delta_ratio = kVp8LayerRateAlloction
- [num_temporal_layers_ - 1][i] -
+ float bit_rate_delta_ratio =
+ kVp8LayerRateAlloction[num_temporal_layers_ - 1][i] -
kVp8LayerRateAlloction[num_temporal_layers_ - 1][i - 1];
bit_rate_layer_[i] = bit_rate_ * bit_rate_delta_ratio;
} else {
bit_rate_layer_[i] = bit_rate_ * bit_rate_ratio;
}
- frame_rate_layer_[i] = frame_rate_ / static_cast<float>(
- 1 << (num_temporal_layers_ - 1));
+ frame_rate_layer_[i] =
+ frame_rate_ / static_cast<float>(1 << (num_temporal_layers_ - 1));
}
if (num_temporal_layers_ == 3) {
frame_rate_layer_[2] = frame_rate_ / 2.0f;
@@ -437,12 +431,12 @@ class VideoProcessorIntegrationTest: public testing::Test {
spatial_resize_on_ = process.spatial_resize_on;
SetUpCodecConfig();
// Update the layers and the codec with the initial rates.
- bit_rate_ = rate_profile.target_bit_rate[0];
+ bit_rate_ = rate_profile.target_bit_rate[0];
frame_rate_ = rate_profile.input_frame_rate[0];
SetLayerRates();
// Set the initial target size for key frame.
- target_size_key_frame_initial_ = 0.5 * kInitialBufferSize *
- bit_rate_layer_[0];
+ target_size_key_frame_initial_ =
+ 0.5 * kInitialBufferSize * bit_rate_layer_[0];
processor_->SetRates(bit_rate_, frame_rate_);
// Process each frame, up to |num_frames|.
int num_frames = rate_profile.num_frames;
@@ -452,7 +446,7 @@ class VideoProcessorIntegrationTest: public testing::Test {
int frame_number = 0;
FrameType frame_type = kVideoFrameDelta;
while (processor_->ProcessFrame(frame_number) &&
- frame_number < num_frames) {
+ frame_number < num_frames) {
// Get the layer index for the frame |frame_number|.
LayerIndexForFrame(frame_number);
// Get the frame_type.
@@ -468,8 +462,7 @@ class VideoProcessorIntegrationTest: public testing::Test {
if (frame_number ==
rate_profile.frame_index_rate_update[update_index + 1]) {
VerifyRateControl(
- update_index,
- rc_metrics[update_index].max_key_frame_size_mismatch,
+ update_index, rc_metrics[update_index].max_key_frame_size_mismatch,
rc_metrics[update_index].max_delta_frame_size_mismatch,
rc_metrics[update_index].max_encoding_rate_mismatch,
rc_metrics[update_index].max_time_hit_target,
@@ -478,23 +471,22 @@ class VideoProcessorIntegrationTest: public testing::Test {
rc_metrics[update_index].num_key_frames);
// Update layer rates and the codec with new rates.
++update_index;
- bit_rate_ = rate_profile.target_bit_rate[update_index];
+ bit_rate_ = rate_profile.target_bit_rate[update_index];
frame_rate_ = rate_profile.input_frame_rate[update_index];
SetLayerRates();
- ResetRateControlMetrics(rate_profile.
- frame_index_rate_update[update_index + 1]);
+ ResetRateControlMetrics(
+ rate_profile.frame_index_rate_update[update_index + 1]);
processor_->SetRates(bit_rate_, frame_rate_);
}
}
- VerifyRateControl(
- update_index,
- rc_metrics[update_index].max_key_frame_size_mismatch,
- rc_metrics[update_index].max_delta_frame_size_mismatch,
- rc_metrics[update_index].max_encoding_rate_mismatch,
- rc_metrics[update_index].max_time_hit_target,
- rc_metrics[update_index].max_num_dropped_frames,
- rc_metrics[update_index].num_spatial_resizes,
- rc_metrics[update_index].num_key_frames);
+ VerifyRateControl(update_index,
+ rc_metrics[update_index].max_key_frame_size_mismatch,
+ rc_metrics[update_index].max_delta_frame_size_mismatch,
+ rc_metrics[update_index].max_encoding_rate_mismatch,
+ rc_metrics[update_index].max_time_hit_target,
+ rc_metrics[update_index].max_num_dropped_frames,
+ rc_metrics[update_index].num_spatial_resizes,
+ rc_metrics[update_index].num_key_frames);
EXPECT_EQ(num_frames, frame_number);
EXPECT_EQ(num_frames + 1, static_cast<int>(stats_.stats_.size()));
@@ -507,16 +499,14 @@ class VideoProcessorIntegrationTest: public testing::Test {
// TODO(marpan): should compute these quality metrics per SetRates update.
webrtc::test::QualityMetricsResult psnr_result, ssim_result;
- EXPECT_EQ(0, webrtc::test::I420MetricsFromFiles(
- config_.input_filename.c_str(),
- config_.output_filename.c_str(),
- config_.codec_settings->width,
- config_.codec_settings->height,
- &psnr_result,
- &ssim_result));
+ EXPECT_EQ(
+ 0, webrtc::test::I420MetricsFromFiles(
+ config_.input_filename.c_str(), config_.output_filename.c_str(),
+ config_.codec_settings->width, config_.codec_settings->height,
+ &psnr_result, &ssim_result));
printf("PSNR avg: %f, min: %f SSIM avg: %f, min: %f\n",
- psnr_result.average, psnr_result.min,
- ssim_result.average, ssim_result.min);
+ psnr_result.average, psnr_result.min, ssim_result.average,
+ ssim_result.min);
stats_.PrintSummary();
EXPECT_GT(psnr_result.average, quality_metrics.minimum_avg_psnr);
EXPECT_GT(psnr_result.min, quality_metrics.minimum_min_psnr);
@@ -549,7 +539,7 @@ void SetCodecParameters(CodecConfigPars* process_settings,
bool spatial_resize_on) {
process_settings->codec_type = codec_type;
process_settings->packet_loss = packet_loss;
- process_settings->key_frame_interval = key_frame_interval;
+ process_settings->key_frame_interval = key_frame_interval;
process_settings->num_temporal_layers = num_temporal_layers,
process_settings->error_concealment_on = error_concealment_on;
process_settings->denoising_on = denoising_on;
@@ -608,9 +598,7 @@ TEST_F(VideoProcessorIntegrationTest, Process0PercentPacketLossVP9) {
// Metrics for rate control.
RateControlMetrics rc_metrics[1];
SetRateControlMetrics(rc_metrics, 0, 0, 40, 20, 10, 20, 0, 1);
- ProcessFramesAndVerify(quality_metrics,
- rate_profile,
- process_settings,
+ ProcessFramesAndVerify(quality_metrics, rate_profile, process_settings,
rc_metrics);
}
@@ -632,13 +620,10 @@ TEST_F(VideoProcessorIntegrationTest, Process5PercentPacketLossVP9) {
// Metrics for rate control.
RateControlMetrics rc_metrics[1];
SetRateControlMetrics(rc_metrics, 0, 0, 40, 20, 10, 20, 0, 1);
- ProcessFramesAndVerify(quality_metrics,
- rate_profile,
- process_settings,
+ ProcessFramesAndVerify(quality_metrics, rate_profile, process_settings,
rc_metrics);
}
-
// VP9: Run with no packet loss, with varying bitrate (3 rate updates):
// low to high to medium. Check that quality and encoder response to the new
// target rate/per-frame bandwidth (for each rate update) is within limits.
@@ -657,15 +642,13 @@ TEST_F(VideoProcessorIntegrationTest, ProcessNoLossChangeBitRateVP9) {
false, true, false);
// Metrics for expected quality.
QualityMetrics quality_metrics;
- SetQualityMetrics(&quality_metrics, 35.9, 30.0, 0.90, 0.85);
+ SetQualityMetrics(&quality_metrics, 35.7, 30.0, 0.90, 0.85);
// Metrics for rate control.
RateControlMetrics rc_metrics[3];
SetRateControlMetrics(rc_metrics, 0, 0, 30, 20, 20, 30, 0, 1);
SetRateControlMetrics(rc_metrics, 1, 2, 0, 20, 20, 60, 0, 0);
SetRateControlMetrics(rc_metrics, 2, 0, 0, 25, 20, 40, 0, 0);
- ProcessFramesAndVerify(quality_metrics,
- rate_profile,
- process_settings,
+ ProcessFramesAndVerify(quality_metrics, rate_profile, process_settings,
rc_metrics);
}
@@ -695,12 +678,10 @@ TEST_F(VideoProcessorIntegrationTest,
SetQualityMetrics(&quality_metrics, 31.5, 18.0, 0.80, 0.44);
// Metrics for rate control.
RateControlMetrics rc_metrics[3];
- SetRateControlMetrics(rc_metrics, 0, 35, 50, 70, 15, 45, 0, 1);
+ SetRateControlMetrics(rc_metrics, 0, 38, 50, 75, 15, 45, 0, 1);
SetRateControlMetrics(rc_metrics, 1, 10, 0, 40, 10, 30, 0, 0);
SetRateControlMetrics(rc_metrics, 2, 5, 0, 30, 5, 20, 0, 0);
- ProcessFramesAndVerify(quality_metrics,
- rate_profile,
- process_settings,
+ ProcessFramesAndVerify(quality_metrics, rate_profile, process_settings,
rc_metrics);
}
@@ -721,19 +702,13 @@ TEST_F(VideoProcessorIntegrationTest, ProcessNoLossDenoiserOnVP9) {
// Metrics for rate control.
RateControlMetrics rc_metrics[1];
SetRateControlMetrics(rc_metrics, 0, 0, 40, 20, 10, 20, 0, 1);
- ProcessFramesAndVerify(quality_metrics,
- rate_profile,
- process_settings,
+ ProcessFramesAndVerify(quality_metrics, rate_profile, process_settings,
rc_metrics);
}
// Run with no packet loss, at low bitrate.
-// spatial_resize is on, and for this low bitrate expect two resizes during the
-// sequence; first resize is 3/4, second is 1/2 (from original).
+// spatial_resize is on, for this low bitrate expect one resize in sequence.
// Resize happens on delta frame. Expect only one key frame (first frame).
-// Disable for msan, see
-// https://code.google.com/p/webrtc/issues/detail?id=5110 for details.
-#if !defined(MEMORY_SANITIZER)
TEST_F(VideoProcessorIntegrationTest, ProcessNoLossSpatialResizeFrameDropVP9) {
config_.networking_config.packet_loss_probability = 0;
// Bitrate and frame rate profile.
@@ -743,20 +718,17 @@ TEST_F(VideoProcessorIntegrationTest, ProcessNoLossSpatialResizeFrameDropVP9) {
rate_profile.num_frames = kNbrFramesLong;
// Codec/network settings.
CodecConfigPars process_settings;
- SetCodecParameters(&process_settings, kVideoCodecVP9, 0.0f, -1,
- 1, false, false, true, true);
+ SetCodecParameters(&process_settings, kVideoCodecVP9, 0.0f, -1, 1, false,
+ false, true, true);
// Metrics for expected quality.
QualityMetrics quality_metrics;
- SetQualityMetrics(&quality_metrics, 25.0, 13.0, 0.70, 0.40);
+ SetQualityMetrics(&quality_metrics, 24.0, 13.0, 0.65, 0.37);
// Metrics for rate control.
RateControlMetrics rc_metrics[1];
- SetRateControlMetrics(rc_metrics, 0, 180, 70, 130, 15, 80, 2, 1);
- ProcessFramesAndVerify(quality_metrics,
- rate_profile,
- process_settings,
+ SetRateControlMetrics(rc_metrics, 0, 228, 70, 160, 15, 80, 1, 1);
+ ProcessFramesAndVerify(quality_metrics, rate_profile, process_settings,
rc_metrics);
}
-#endif
// TODO(marpan): Add temporal layer test for VP9, once changes are in
// vp9 wrapper for this.
@@ -780,9 +752,7 @@ TEST_F(VideoProcessorIntegrationTest, ProcessZeroPacketLoss) {
// Metrics for rate control.
RateControlMetrics rc_metrics[1];
SetRateControlMetrics(rc_metrics, 0, 0, 40, 20, 10, 15, 0, 1);
- ProcessFramesAndVerify(quality_metrics,
- rate_profile,
- process_settings,
+ ProcessFramesAndVerify(quality_metrics, rate_profile, process_settings,
rc_metrics);
}
@@ -804,9 +774,7 @@ TEST_F(VideoProcessorIntegrationTest, Process5PercentPacketLoss) {
// Metrics for rate control.
RateControlMetrics rc_metrics[1];
SetRateControlMetrics(rc_metrics, 0, 0, 40, 20, 10, 15, 0, 1);
- ProcessFramesAndVerify(quality_metrics,
- rate_profile,
- process_settings,
+ ProcessFramesAndVerify(quality_metrics, rate_profile, process_settings,
rc_metrics);
}
@@ -828,9 +796,7 @@ TEST_F(VideoProcessorIntegrationTest, Process10PercentPacketLoss) {
// Metrics for rate control.
RateControlMetrics rc_metrics[1];
SetRateControlMetrics(rc_metrics, 0, 0, 40, 20, 10, 15, 0, 1);
- ProcessFramesAndVerify(quality_metrics,
- rate_profile,
- process_settings,
+ ProcessFramesAndVerify(quality_metrics, rate_profile, process_settings,
rc_metrics);
}
@@ -847,8 +813,13 @@ TEST_F(VideoProcessorIntegrationTest, Process10PercentPacketLoss) {
// low to high to medium. Check that quality and encoder response to the new
// target rate/per-frame bandwidth (for each rate update) is within limits.
// One key frame (first frame only) in sequence.
-TEST_F(VideoProcessorIntegrationTest,
- DISABLED_ON_ANDROID(ProcessNoLossChangeBitRateVP8)) {
+#if defined(WEBRTC_ANDROID)
+#define MAYBE_ProcessNoLossChangeBitRateVP8 \
+ DISABLED_ProcessNoLossChangeBitRateVP8
+#else
+#define MAYBE_ProcessNoLossChangeBitRateVP8 ProcessNoLossChangeBitRateVP8
+#endif
+TEST_F(VideoProcessorIntegrationTest, MAYBE_ProcessNoLossChangeBitRateVP8) {
// Bitrate and frame rate profile.
RateProfile rate_profile;
SetRateProfilePars(&rate_profile, 0, 200, 30, 0);
@@ -868,9 +839,7 @@ TEST_F(VideoProcessorIntegrationTest,
SetRateControlMetrics(rc_metrics, 0, 0, 45, 20, 10, 15, 0, 1);
SetRateControlMetrics(rc_metrics, 1, 0, 0, 25, 20, 10, 0, 0);
SetRateControlMetrics(rc_metrics, 2, 0, 0, 25, 15, 10, 0, 0);
- ProcessFramesAndVerify(quality_metrics,
- rate_profile,
- process_settings,
+ ProcessFramesAndVerify(quality_metrics, rate_profile, process_settings,
rc_metrics);
}
@@ -881,8 +850,15 @@ TEST_F(VideoProcessorIntegrationTest,
// for the rate control metrics can be lower. One key frame (first frame only).
// Note: quality after update should be higher but we currently compute quality
// metrics averaged over whole sequence run.
+#if defined(WEBRTC_ANDROID)
+#define MAYBE_ProcessNoLossChangeFrameRateFrameDropVP8 \
+ DISABLED_ProcessNoLossChangeFrameRateFrameDropVP8
+#else
+#define MAYBE_ProcessNoLossChangeFrameRateFrameDropVP8 \
+ ProcessNoLossChangeFrameRateFrameDropVP8
+#endif
TEST_F(VideoProcessorIntegrationTest,
- DISABLED_ON_ANDROID(ProcessNoLossChangeFrameRateFrameDropVP8)) {
+ MAYBE_ProcessNoLossChangeFrameRateFrameDropVP8) {
config_.networking_config.packet_loss_probability = 0;
// Bitrate and frame rate profile.
RateProfile rate_profile;
@@ -903,16 +879,21 @@ TEST_F(VideoProcessorIntegrationTest,
SetRateControlMetrics(rc_metrics, 0, 40, 20, 75, 15, 60, 0, 1);
SetRateControlMetrics(rc_metrics, 1, 10, 0, 25, 10, 35, 0, 0);
SetRateControlMetrics(rc_metrics, 2, 0, 0, 20, 10, 15, 0, 0);
- ProcessFramesAndVerify(quality_metrics,
- rate_profile,
- process_settings,
+ ProcessFramesAndVerify(quality_metrics, rate_profile, process_settings,
rc_metrics);
}
// Run with no packet loss, at low bitrate. During this time we should've
// resized once. Expect 2 key frames generated (first and one for resize).
+#if defined(WEBRTC_ANDROID)
+#define MAYBE_ProcessNoLossSpatialResizeFrameDropVP8 \
+ DISABLED_ProcessNoLossSpatialResizeFrameDropVP8
+#else
+#define MAYBE_ProcessNoLossSpatialResizeFrameDropVP8 \
+ ProcessNoLossSpatialResizeFrameDropVP8
+#endif
TEST_F(VideoProcessorIntegrationTest,
- DISABLED_ON_ANDROID(ProcessNoLossSpatialResizeFrameDropVP8)) {
+ MAYBE_ProcessNoLossSpatialResizeFrameDropVP8) {
config_.networking_config.packet_loss_probability = 0;
// Bitrate and frame rate profile.
RateProfile rate_profile;
@@ -921,17 +902,15 @@ TEST_F(VideoProcessorIntegrationTest,
rate_profile.num_frames = kNbrFramesLong;
// Codec/network settings.
CodecConfigPars process_settings;
- SetCodecParameters(&process_settings, kVideoCodecVP8, 0.0f, -1,
- 1, false, true, true, true);
+ SetCodecParameters(&process_settings, kVideoCodecVP8, 0.0f, -1, 1, false,
+ true, true, true);
// Metrics for expected quality.
QualityMetrics quality_metrics;
SetQualityMetrics(&quality_metrics, 25.0, 15.0, 0.70, 0.40);
// Metrics for rate control.
RateControlMetrics rc_metrics[1];
SetRateControlMetrics(rc_metrics, 0, 160, 60, 120, 20, 70, 1, 2);
- ProcessFramesAndVerify(quality_metrics,
- rate_profile,
- process_settings,
+ ProcessFramesAndVerify(quality_metrics, rate_profile, process_settings,
rc_metrics);
}
@@ -940,8 +919,13 @@ TEST_F(VideoProcessorIntegrationTest,
// encoding rate mismatch are applied to each layer.
// No dropped frames in this test, and internal spatial resizer is off.
// One key frame (first frame only) in sequence, so no spatial resizing.
-TEST_F(VideoProcessorIntegrationTest,
- DISABLED_ON_ANDROID(ProcessNoLossTemporalLayersVP8)) {
+#if defined(WEBRTC_ANDROID)
+#define MAYBE_ProcessNoLossTemporalLayersVP8 \
+ DISABLED_ProcessNoLossTemporalLayersVP8
+#else
+#define MAYBE_ProcessNoLossTemporalLayersVP8 ProcessNoLossTemporalLayersVP8
+#endif
+TEST_F(VideoProcessorIntegrationTest, MAYBE_ProcessNoLossTemporalLayersVP8) {
config_.networking_config.packet_loss_probability = 0;
// Bitrate and frame rate profile.
RateProfile rate_profile;
@@ -960,9 +944,7 @@ TEST_F(VideoProcessorIntegrationTest,
RateControlMetrics rc_metrics[2];
SetRateControlMetrics(rc_metrics, 0, 0, 20, 30, 10, 10, 0, 1);
SetRateControlMetrics(rc_metrics, 1, 0, 0, 30, 15, 10, 0, 0);
- ProcessFramesAndVerify(quality_metrics,
- rate_profile,
- process_settings,
+ ProcessFramesAndVerify(quality_metrics, rate_profile, process_settings,
rc_metrics);
}
} // namespace webrtc
diff --git a/webrtc/modules/video_coding/codecs/test/videoprocessor_unittest.cc b/webrtc/modules/video_coding/codecs/test/videoprocessor_unittest.cc
index 88b5467f1f..148d8dc74a 100644
--- a/webrtc/modules/video_coding/codecs/test/videoprocessor_unittest.cc
+++ b/webrtc/modules/video_coding/codecs/test/videoprocessor_unittest.cc
@@ -10,10 +10,10 @@
#include "testing/gmock/include/gmock/gmock.h"
#include "testing/gtest/include/gtest/gtest.h"
-#include "webrtc/modules/video_coding/codecs/interface/mock/mock_video_codec_interface.h"
+#include "webrtc/modules/video_coding/include/mock/mock_video_codec_interface.h"
#include "webrtc/modules/video_coding/codecs/test/mock/mock_packet_manipulator.h"
#include "webrtc/modules/video_coding/codecs/test/videoprocessor.h"
-#include "webrtc/modules/video_coding/main/interface/video_coding.h"
+#include "webrtc/modules/video_coding/include/video_coding.h"
#include "webrtc/test/testsupport/mock/mock_frame_reader.h"
#include "webrtc/test/testsupport/mock/mock_frame_writer.h"
#include "webrtc/test/testsupport/packet_reader.h"
@@ -29,7 +29,7 @@ namespace test {
// Very basic testing for VideoProcessor. It's mostly tested by running the
// video_quality_measurement program.
-class VideoProcessorTest: public testing::Test {
+class VideoProcessorTest : public testing::Test {
protected:
MockVideoEncoder encoder_mock_;
MockVideoDecoder decoder_mock_;
@@ -53,44 +53,34 @@ class VideoProcessorTest: public testing::Test {
void TearDown() {}
void ExpectInit() {
- EXPECT_CALL(encoder_mock_, InitEncode(_, _, _))
- .Times(1);
+ EXPECT_CALL(encoder_mock_, InitEncode(_, _, _)).Times(1);
EXPECT_CALL(encoder_mock_, RegisterEncodeCompleteCallback(_))
- .Times(AtLeast(1));
- EXPECT_CALL(decoder_mock_, InitDecode(_, _))
- .Times(1);
+ .Times(AtLeast(1));
+ EXPECT_CALL(decoder_mock_, InitDecode(_, _)).Times(1);
EXPECT_CALL(decoder_mock_, RegisterDecodeCompleteCallback(_))
- .Times(AtLeast(1));
- EXPECT_CALL(frame_reader_mock_, NumberOfFrames())
- .WillOnce(Return(1));
- EXPECT_CALL(frame_reader_mock_, FrameLength())
- .WillOnce(Return(152064));
+ .Times(AtLeast(1));
+ EXPECT_CALL(frame_reader_mock_, NumberOfFrames()).WillOnce(Return(1));
+ EXPECT_CALL(frame_reader_mock_, FrameLength()).WillOnce(Return(152064));
}
};
TEST_F(VideoProcessorTest, Init) {
ExpectInit();
- VideoProcessorImpl video_processor(&encoder_mock_, &decoder_mock_,
- &frame_reader_mock_,
- &frame_writer_mock_,
- &packet_manipulator_mock_, config_,
- &stats_);
+ VideoProcessorImpl video_processor(
+ &encoder_mock_, &decoder_mock_, &frame_reader_mock_, &frame_writer_mock_,
+ &packet_manipulator_mock_, config_, &stats_);
ASSERT_TRUE(video_processor.Init());
}
TEST_F(VideoProcessorTest, ProcessFrame) {
ExpectInit();
- EXPECT_CALL(encoder_mock_, Encode(_, _, _))
- .Times(1);
- EXPECT_CALL(frame_reader_mock_, ReadFrame(_))
- .WillOnce(Return(true));
+ EXPECT_CALL(encoder_mock_, Encode(_, _, _)).Times(1);
+ EXPECT_CALL(frame_reader_mock_, ReadFrame(_)).WillOnce(Return(true));
// Since we don't return any callback from the mock, the decoder will not
// be more than initialized...
- VideoProcessorImpl video_processor(&encoder_mock_, &decoder_mock_,
- &frame_reader_mock_,
- &frame_writer_mock_,
- &packet_manipulator_mock_, config_,
- &stats_);
+ VideoProcessorImpl video_processor(
+ &encoder_mock_, &decoder_mock_, &frame_reader_mock_, &frame_writer_mock_,
+ &packet_manipulator_mock_, config_, &stats_);
ASSERT_TRUE(video_processor.Init());
video_processor.ProcessFrame(0);
}
diff --git a/webrtc/modules/video_coding/codecs/tools/video_quality_measurement.cc b/webrtc/modules/video_coding/codecs/tools/video_quality_measurement.cc
index 22be5a83cc..37fad483f7 100644
--- a/webrtc/modules/video_coding/codecs/tools/video_quality_measurement.cc
+++ b/webrtc/modules/video_coding/codecs/tools/video_quality_measurement.cc
@@ -16,7 +16,7 @@
#include <sys/stat.h> // To check for directory existence.
#ifndef S_ISDIR // Not defined in stat.h on Windows.
-#define S_ISDIR(mode) (((mode) & S_IFMT) == S_IFDIR)
+#define S_ISDIR(mode) (((mode)&S_IFMT) == S_IFDIR)
#endif
#include "gflags/gflags.h"
@@ -26,7 +26,7 @@
#include "webrtc/modules/video_coding/codecs/test/stats.h"
#include "webrtc/modules/video_coding/codecs/test/videoprocessor.h"
#include "webrtc/modules/video_coding/codecs/vp8/include/vp8.h"
-#include "webrtc/modules/video_coding/main/interface/video_coding.h"
+#include "webrtc/modules/video_coding/include/video_coding.h"
#include "webrtc/system_wrappers/include/trace.h"
#include "webrtc/test/testsupport/frame_reader.h"
#include "webrtc/test/testsupport/frame_writer.h"
@@ -34,68 +34,102 @@
#include "webrtc/test/testsupport/packet_reader.h"
DEFINE_string(test_name, "Quality test", "The name of the test to run. ");
-DEFINE_string(test_description, "", "A more detailed description about what "
+DEFINE_string(test_description,
+ "",
+ "A more detailed description about what "
"the current test is about.");
-DEFINE_string(input_filename, "", "Input file. "
+DEFINE_string(input_filename,
+ "",
+ "Input file. "
"The source video file to be encoded and decoded. Must be in "
".yuv format");
DEFINE_int32(width, -1, "Width in pixels of the frames in the input file.");
DEFINE_int32(height, -1, "Height in pixels of the frames in the input file.");
-DEFINE_int32(framerate, 30, "Frame rate of the input file, in FPS "
+DEFINE_int32(framerate,
+ 30,
+ "Frame rate of the input file, in FPS "
"(frames-per-second). ");
-DEFINE_string(output_dir, ".", "Output directory. "
+DEFINE_string(output_dir,
+ ".",
+ "Output directory. "
"The directory where the output file will be put. Must already "
"exist.");
-DEFINE_bool(use_single_core, false, "Force using a single core. If set to "
+DEFINE_bool(use_single_core,
+ false,
+ "Force using a single core. If set to "
"true, only one core will be used for processing. Using a single "
"core is necessary to get a deterministic behavior for the"
"encoded frames - using multiple cores will produce different "
"encoded frames since multiple cores are competing to consume the "
"byte budget for each frame in parallel. If set to false, "
"the maximum detected number of cores will be used. ");
-DEFINE_bool(disable_fixed_random_seed , false, "Set this flag to disable the"
+DEFINE_bool(disable_fixed_random_seed,
+ false,
+ "Set this flag to disable the"
"usage of a fixed random seed for the random generator used "
"for packet loss. Disabling this will cause consecutive runs "
"loose packets at different locations, which is bad for "
"reproducibility.");
-DEFINE_string(output_filename, "", "Output file. "
+DEFINE_string(output_filename,
+ "",
+ "Output file. "
"The name of the output video file resulting of the processing "
"of the source file. By default this is the same name as the "
"input file with '_out' appended before the extension.");
DEFINE_int32(bitrate, 500, "Bit rate in kilobits/second.");
-DEFINE_int32(keyframe_interval, 0, "Forces a keyframe every Nth frame. "
+DEFINE_int32(keyframe_interval,
+ 0,
+ "Forces a keyframe every Nth frame. "
"0 means the encoder decides when to insert keyframes. Note that "
"the encoder may create a keyframe in other locations in addition "
"to the interval that is set using this parameter.");
-DEFINE_int32(temporal_layers, 0, "The number of temporal layers to use "
+DEFINE_int32(temporal_layers,
+ 0,
+ "The number of temporal layers to use "
"(VP8 specific codec setting). Must be 0-4.");
-DEFINE_int32(packet_size, 1500, "Simulated network packet size in bytes (MTU). "
+DEFINE_int32(packet_size,
+ 1500,
+ "Simulated network packet size in bytes (MTU). "
"Used for packet loss simulation.");
-DEFINE_int32(max_payload_size, 1440, "Max payload size in bytes for the "
+DEFINE_int32(max_payload_size,
+ 1440,
+ "Max payload size in bytes for the "
"encoder.");
-DEFINE_string(packet_loss_mode, "uniform", "Packet loss mode. Two different "
+DEFINE_string(packet_loss_mode,
+ "uniform",
+ "Packet loss mode. Two different "
"packet loss models are supported: uniform or burst. This "
"setting has no effect unless packet_loss_rate is >0. ");
-DEFINE_double(packet_loss_probability, 0.0, "Packet loss probability. A value "
+DEFINE_double(packet_loss_probability,
+ 0.0,
+ "Packet loss probability. A value "
"between 0.0 and 1.0 that defines the probability of a packet "
"being lost. 0.1 means 10% and so on.");
-DEFINE_int32(packet_loss_burst_length, 1, "Packet loss burst length. Defines "
+DEFINE_int32(packet_loss_burst_length,
+ 1,
+ "Packet loss burst length. Defines "
"how many packets will be lost in a burst when a packet has been "
"decided to be lost. Must be >=1.");
-DEFINE_bool(csv, false, "CSV output. Enabling this will output all frame "
+DEFINE_bool(csv,
+ false,
+ "CSV output. Enabling this will output all frame "
"statistics at the end of execution. Recommended to run combined "
"with --noverbose to avoid mixing output.");
-DEFINE_bool(python, false, "Python output. Enabling this will output all frame "
+DEFINE_bool(python,
+ false,
+ "Python output. Enabling this will output all frame "
"statistics as a Python script at the end of execution. "
"Recommended to run combine with --noverbose to avoid mixing "
"output.");
-DEFINE_bool(verbose, true, "Verbose mode. Prints a lot of debugging info. "
+DEFINE_bool(verbose,
+ true,
+ "Verbose mode. Prints a lot of debugging info. "
"Suitable for tracking progress but not for capturing output. "
"Disable with --noverbose flag.");
// Custom log method that only prints if the verbose flag is given.
// Supports all the standard printf parameters and formatting (just forwarded).
-int Log(const char *format, ...) {
+int Log(const char* format, ...) {
int result = 0;
if (FLAGS_verbose) {
va_list args;
@@ -132,9 +166,9 @@ int HandleCommandLineFlags(webrtc::test::TestConfig* config) {
// Verify the output dir exists.
struct stat dir_info;
if (!(stat(FLAGS_output_dir.c_str(), &dir_info) == 0 &&
- S_ISDIR(dir_info.st_mode))) {
+ S_ISDIR(dir_info.st_mode))) {
fprintf(stderr, "Cannot find output directory: %s\n",
- FLAGS_output_dir.c_str());
+ FLAGS_output_dir.c_str());
return 3;
}
config->output_dir = FLAGS_output_dir;
@@ -148,16 +182,16 @@ int HandleCommandLineFlags(webrtc::test::TestConfig* config) {
startIndex = 0;
}
FLAGS_output_filename =
- FLAGS_input_filename.substr(startIndex,
- FLAGS_input_filename.find_last_of(".")
- - startIndex) + "_out.yuv";
+ FLAGS_input_filename.substr(
+ startIndex, FLAGS_input_filename.find_last_of(".") - startIndex) +
+ "_out.yuv";
}
// Verify output file can be written.
if (FLAGS_output_dir == ".") {
config->output_filename = FLAGS_output_filename;
} else {
- config->output_filename = FLAGS_output_dir + "/"+ FLAGS_output_filename;
+ config->output_filename = FLAGS_output_dir + "/" + FLAGS_output_filename;
}
test_file = fopen(config->output_filename.c_str(), "wb");
if (test_file == NULL) {
@@ -232,27 +266,32 @@ int HandleCommandLineFlags(webrtc::test::TestConfig* config) {
// Check packet loss settings
if (FLAGS_packet_loss_mode != "uniform" &&
FLAGS_packet_loss_mode != "burst") {
- fprintf(stderr, "Unsupported packet loss mode, must be 'uniform' or "
+ fprintf(stderr,
+ "Unsupported packet loss mode, must be 'uniform' or "
"'burst'\n.");
return 10;
}
config->networking_config.packet_loss_mode = webrtc::test::kUniform;
if (FLAGS_packet_loss_mode == "burst") {
- config->networking_config.packet_loss_mode = webrtc::test::kBurst;
+ config->networking_config.packet_loss_mode = webrtc::test::kBurst;
}
if (FLAGS_packet_loss_probability < 0.0 ||
FLAGS_packet_loss_probability > 1.0) {
- fprintf(stderr, "Invalid packet loss probability. Must be 0.0 - 1.0, "
- "was: %f\n", FLAGS_packet_loss_probability);
+ fprintf(stderr,
+ "Invalid packet loss probability. Must be 0.0 - 1.0, "
+ "was: %f\n",
+ FLAGS_packet_loss_probability);
return 11;
}
config->networking_config.packet_loss_probability =
FLAGS_packet_loss_probability;
if (FLAGS_packet_loss_burst_length < 1) {
- fprintf(stderr, "Invalid packet loss burst length, must be >=1, "
- "was: %d\n", FLAGS_packet_loss_burst_length);
+ fprintf(stderr,
+ "Invalid packet loss burst length, must be >=1, "
+ "was: %d\n",
+ FLAGS_packet_loss_burst_length);
return 12;
}
config->networking_config.packet_loss_burst_length =
@@ -264,10 +303,9 @@ int HandleCommandLineFlags(webrtc::test::TestConfig* config) {
void CalculateSsimVideoMetrics(webrtc::test::TestConfig* config,
webrtc::test::QualityMetricsResult* result) {
Log("Calculating SSIM...\n");
- I420SSIMFromFiles(config->input_filename.c_str(),
- config->output_filename.c_str(),
- config->codec_settings->width,
- config->codec_settings->height, result);
+ I420SSIMFromFiles(
+ config->input_filename.c_str(), config->output_filename.c_str(),
+ config->codec_settings->width, config->codec_settings->height, result);
Log(" Average: %3.2f\n", result->average);
Log(" Min : %3.2f (frame %d)\n", result->min, result->min_frame_number);
Log(" Max : %3.2f (frame %d)\n", result->max, result->max_frame_number);
@@ -276,10 +314,9 @@ void CalculateSsimVideoMetrics(webrtc::test::TestConfig* config,
void CalculatePsnrVideoMetrics(webrtc::test::TestConfig* config,
webrtc::test::QualityMetricsResult* result) {
Log("Calculating PSNR...\n");
- I420PSNRFromFiles(config->input_filename.c_str(),
- config->output_filename.c_str(),
- config->codec_settings->width,
- config->codec_settings->height, result);
+ I420PSNRFromFiles(
+ config->input_filename.c_str(), config->output_filename.c_str(),
+ config->codec_settings->width, config->codec_settings->height, result);
Log(" Average: %3.2f\n", result->average);
Log(" Min : %3.2f (frame %d)\n", result->min, result->min_frame_number);
Log(" Max : %3.2f (frame %d)\n", result->max, result->max_frame_number);
@@ -309,9 +346,11 @@ void PrintConfigurationSummary(const webrtc::test::TestConfig& config) {
void PrintCsvOutput(const webrtc::test::Stats& stats,
const webrtc::test::QualityMetricsResult& ssim_result,
const webrtc::test::QualityMetricsResult& psnr_result) {
- Log("\nCSV output (recommended to run with --noverbose to skip the "
- "above output)\n");
- printf("frame_number encoding_successful decoding_successful "
+ Log(
+ "\nCSV output (recommended to run with --noverbose to skip the "
+ "above output)\n");
+ printf(
+ "frame_number encoding_successful decoding_successful "
"encode_return_code decode_return_code "
"encode_time_in_us decode_time_in_us "
"bit_rate_in_kbps encoded_frame_length_in_bytes frame_type "
@@ -322,22 +361,13 @@ void PrintCsvOutput(const webrtc::test::Stats& stats,
const webrtc::test::FrameStatistic& f = stats.stats_[i];
const webrtc::test::FrameResult& ssim = ssim_result.frames[i];
const webrtc::test::FrameResult& psnr = psnr_result.frames[i];
- printf("%4d, %d, %d, %2d, %2d, %6d, %6d, %5d, %7" PRIuS ", %d, %2d, %2"
- PRIuS ", %5.3f, %5.2f\n",
- f.frame_number,
- f.encoding_successful,
- f.decoding_successful,
- f.encode_return_code,
- f.decode_return_code,
- f.encode_time_in_us,
- f.decode_time_in_us,
- f.bit_rate_in_kbps,
- f.encoded_frame_length_in_bytes,
- f.frame_type,
- f.packets_dropped,
- f.total_packets,
- ssim.value,
- psnr.value);
+ printf("%4d, %d, %d, %2d, %2d, %6d, %6d, %5d, %7" PRIuS
+ ", %d, %2d, %2" PRIuS ", %5.3f, %5.2f\n",
+ f.frame_number, f.encoding_successful, f.decoding_successful,
+ f.encode_return_code, f.decode_return_code, f.encode_time_in_us,
+ f.decode_time_in_us, f.bit_rate_in_kbps,
+ f.encoded_frame_length_in_bytes, f.frame_type, f.packets_dropped,
+ f.total_packets, ssim.value, psnr.value);
}
}
@@ -345,91 +375,85 @@ void PrintPythonOutput(const webrtc::test::TestConfig& config,
const webrtc::test::Stats& stats,
const webrtc::test::QualityMetricsResult& ssim_result,
const webrtc::test::QualityMetricsResult& psnr_result) {
- Log("\nPython output (recommended to run with --noverbose to skip the "
- "above output)\n");
- printf("test_configuration = ["
- "{'name': 'name', 'value': '%s'},\n"
- "{'name': 'description', 'value': '%s'},\n"
- "{'name': 'test_number', 'value': '%d'},\n"
- "{'name': 'input_filename', 'value': '%s'},\n"
- "{'name': 'output_filename', 'value': '%s'},\n"
- "{'name': 'output_dir', 'value': '%s'},\n"
- "{'name': 'packet_size_in_bytes', 'value': '%" PRIuS "'},\n"
- "{'name': 'max_payload_size_in_bytes', 'value': '%" PRIuS "'},\n"
- "{'name': 'packet_loss_mode', 'value': '%s'},\n"
- "{'name': 'packet_loss_probability', 'value': '%f'},\n"
- "{'name': 'packet_loss_burst_length', 'value': '%d'},\n"
- "{'name': 'exclude_frame_types', 'value': '%s'},\n"
- "{'name': 'frame_length_in_bytes', 'value': '%" PRIuS "'},\n"
- "{'name': 'use_single_core', 'value': '%s'},\n"
- "{'name': 'keyframe_interval;', 'value': '%d'},\n"
- "{'name': 'video_codec_type', 'value': '%s'},\n"
- "{'name': 'width', 'value': '%d'},\n"
- "{'name': 'height', 'value': '%d'},\n"
- "{'name': 'bit_rate_in_kbps', 'value': '%d'},\n"
- "]\n",
- config.name.c_str(),
- config.description.c_str(),
- config.test_number,
- config.input_filename.c_str(),
- config.output_filename.c_str(),
- config.output_dir.c_str(),
- config.networking_config.packet_size_in_bytes,
- config.networking_config.max_payload_size_in_bytes,
- PacketLossModeToStr(config.networking_config.packet_loss_mode),
- config.networking_config.packet_loss_probability,
- config.networking_config.packet_loss_burst_length,
- ExcludeFrameTypesToStr(config.exclude_frame_types),
- config.frame_length_in_bytes,
- config.use_single_core ? "True " : "False",
- config.keyframe_interval,
- webrtc::test::VideoCodecTypeToStr(config.codec_settings->codecType),
- config.codec_settings->width,
- config.codec_settings->height,
- config.codec_settings->startBitrate);
- printf("frame_data_types = {"
- "'frame_number': ('number', 'Frame number'),\n"
- "'encoding_successful': ('boolean', 'Encoding successful?'),\n"
- "'decoding_successful': ('boolean', 'Decoding successful?'),\n"
- "'encode_time': ('number', 'Encode time (us)'),\n"
- "'decode_time': ('number', 'Decode time (us)'),\n"
- "'encode_return_code': ('number', 'Encode return code'),\n"
- "'decode_return_code': ('number', 'Decode return code'),\n"
- "'bit_rate': ('number', 'Bit rate (kbps)'),\n"
- "'encoded_frame_length': "
- "('number', 'Encoded frame length (bytes)'),\n"
- "'frame_type': ('string', 'Frame type'),\n"
- "'packets_dropped': ('number', 'Packets dropped'),\n"
- "'total_packets': ('number', 'Total packets'),\n"
- "'ssim': ('number', 'SSIM'),\n"
- "'psnr': ('number', 'PSNR (dB)'),\n"
- "}\n");
+ Log(
+ "\nPython output (recommended to run with --noverbose to skip the "
+ "above output)\n");
+ printf(
+ "test_configuration = ["
+ "{'name': 'name', 'value': '%s'},\n"
+ "{'name': 'description', 'value': '%s'},\n"
+ "{'name': 'test_number', 'value': '%d'},\n"
+ "{'name': 'input_filename', 'value': '%s'},\n"
+ "{'name': 'output_filename', 'value': '%s'},\n"
+ "{'name': 'output_dir', 'value': '%s'},\n"
+ "{'name': 'packet_size_in_bytes', 'value': '%" PRIuS
+ "'},\n"
+ "{'name': 'max_payload_size_in_bytes', 'value': '%" PRIuS
+ "'},\n"
+ "{'name': 'packet_loss_mode', 'value': '%s'},\n"
+ "{'name': 'packet_loss_probability', 'value': '%f'},\n"
+ "{'name': 'packet_loss_burst_length', 'value': '%d'},\n"
+ "{'name': 'exclude_frame_types', 'value': '%s'},\n"
+ "{'name': 'frame_length_in_bytes', 'value': '%" PRIuS
+ "'},\n"
+ "{'name': 'use_single_core', 'value': '%s'},\n"
+ "{'name': 'keyframe_interval;', 'value': '%d'},\n"
+ "{'name': 'video_codec_type', 'value': '%s'},\n"
+ "{'name': 'width', 'value': '%d'},\n"
+ "{'name': 'height', 'value': '%d'},\n"
+ "{'name': 'bit_rate_in_kbps', 'value': '%d'},\n"
+ "]\n",
+ config.name.c_str(), config.description.c_str(), config.test_number,
+ config.input_filename.c_str(), config.output_filename.c_str(),
+ config.output_dir.c_str(), config.networking_config.packet_size_in_bytes,
+ config.networking_config.max_payload_size_in_bytes,
+ PacketLossModeToStr(config.networking_config.packet_loss_mode),
+ config.networking_config.packet_loss_probability,
+ config.networking_config.packet_loss_burst_length,
+ ExcludeFrameTypesToStr(config.exclude_frame_types),
+ config.frame_length_in_bytes, config.use_single_core ? "True " : "False",
+ config.keyframe_interval,
+ webrtc::test::VideoCodecTypeToStr(config.codec_settings->codecType),
+ config.codec_settings->width, config.codec_settings->height,
+ config.codec_settings->startBitrate);
+ printf(
+ "frame_data_types = {"
+ "'frame_number': ('number', 'Frame number'),\n"
+ "'encoding_successful': ('boolean', 'Encoding successful?'),\n"
+ "'decoding_successful': ('boolean', 'Decoding successful?'),\n"
+ "'encode_time': ('number', 'Encode time (us)'),\n"
+ "'decode_time': ('number', 'Decode time (us)'),\n"
+ "'encode_return_code': ('number', 'Encode return code'),\n"
+ "'decode_return_code': ('number', 'Decode return code'),\n"
+ "'bit_rate': ('number', 'Bit rate (kbps)'),\n"
+ "'encoded_frame_length': "
+ "('number', 'Encoded frame length (bytes)'),\n"
+ "'frame_type': ('string', 'Frame type'),\n"
+ "'packets_dropped': ('number', 'Packets dropped'),\n"
+ "'total_packets': ('number', 'Total packets'),\n"
+ "'ssim': ('number', 'SSIM'),\n"
+ "'psnr': ('number', 'PSNR (dB)'),\n"
+ "}\n");
printf("frame_data = [");
for (unsigned int i = 0; i < stats.stats_.size(); ++i) {
const webrtc::test::FrameStatistic& f = stats.stats_[i];
const webrtc::test::FrameResult& ssim = ssim_result.frames[i];
const webrtc::test::FrameResult& psnr = psnr_result.frames[i];
- printf("{'frame_number': %d, "
- "'encoding_successful': %s, 'decoding_successful': %s, "
- "'encode_time': %d, 'decode_time': %d, "
- "'encode_return_code': %d, 'decode_return_code': %d, "
- "'bit_rate': %d, 'encoded_frame_length': %" PRIuS ", "
- "'frame_type': %s, 'packets_dropped': %d, "
- "'total_packets': %" PRIuS ", 'ssim': %f, 'psnr': %f},\n",
- f.frame_number,
- f.encoding_successful ? "True " : "False",
- f.decoding_successful ? "True " : "False",
- f.encode_time_in_us,
- f.decode_time_in_us,
- f.encode_return_code,
- f.decode_return_code,
- f.bit_rate_in_kbps,
- f.encoded_frame_length_in_bytes,
- f.frame_type == webrtc::kVideoFrameDelta ? "'Delta'" : "'Other'",
- f.packets_dropped,
- f.total_packets,
- ssim.value,
- psnr.value);
+ printf(
+ "{'frame_number': %d, "
+ "'encoding_successful': %s, 'decoding_successful': %s, "
+ "'encode_time': %d, 'decode_time': %d, "
+ "'encode_return_code': %d, 'decode_return_code': %d, "
+ "'bit_rate': %d, 'encoded_frame_length': %" PRIuS
+ ", "
+ "'frame_type': %s, 'packets_dropped': %d, "
+ "'total_packets': %" PRIuS ", 'ssim': %f, 'psnr': %f},\n",
+ f.frame_number, f.encoding_successful ? "True " : "False",
+ f.decoding_successful ? "True " : "False", f.encode_time_in_us,
+ f.decode_time_in_us, f.encode_return_code, f.decode_return_code,
+ f.bit_rate_in_kbps, f.encoded_frame_length_in_bytes,
+ f.frame_type == webrtc::kVideoFrameDelta ? "'Delta'" : "'Other'",
+ f.packets_dropped, f.total_packets, ssim.value, psnr.value);
}
printf("]\n");
}
@@ -438,10 +462,14 @@ void PrintPythonOutput(const webrtc::test::TestConfig& config,
// The input file must be in YUV format.
int main(int argc, char* argv[]) {
std::string program_name = argv[0];
- std::string usage = "Quality test application for video comparisons.\n"
- "Run " + program_name + " --helpshort for usage.\n"
- "Example usage:\n" + program_name +
- " --input_filename=filename.yuv --width=352 --height=288\n";
+ std::string usage =
+ "Quality test application for video comparisons.\n"
+ "Run " +
+ program_name +
+ " --helpshort for usage.\n"
+ "Example usage:\n" +
+ program_name +
+ " --input_filename=filename.yuv --width=352 --height=288\n";
google::SetUsageMessage(usage);
google::ParseCommandLineFlags(&argc, &argv, true);
@@ -478,10 +506,8 @@ int main(int argc, char* argv[]) {
packet_manipulator.InitializeRandomSeed(time(NULL));
}
webrtc::test::VideoProcessor* processor =
- new webrtc::test::VideoProcessorImpl(encoder, decoder,
- &frame_reader,
- &frame_writer,
- &packet_manipulator,
+ new webrtc::test::VideoProcessorImpl(encoder, decoder, &frame_reader,
+ &frame_writer, &packet_manipulator,
config, &stats);
processor->Init();
diff --git a/webrtc/modules/video_coding/codecs/vp8/default_temporal_layers.cc b/webrtc/modules/video_coding/codecs/vp8/default_temporal_layers.cc
index da6008ba3d..9226fa774c 100644
--- a/webrtc/modules/video_coding/codecs/vp8/default_temporal_layers.cc
+++ b/webrtc/modules/video_coding/codecs/vp8/default_temporal_layers.cc
@@ -13,8 +13,8 @@
#include <stdlib.h>
#include <string.h>
-#include "webrtc/modules/interface/module_common_types.h"
-#include "webrtc/modules/video_coding/codecs/interface/video_codec_interface.h"
+#include "webrtc/modules/include/module_common_types.h"
+#include "webrtc/modules/video_coding/include/video_codec_interface.h"
#include "webrtc/modules/video_coding/codecs/vp8/include/vp8_common_types.h"
#include "vpx/vpx_encoder.h"
@@ -41,7 +41,7 @@ int DefaultTemporalLayers::CurrentLayerId() const {
int index = pattern_idx_ % temporal_ids_length_;
assert(index >= 0);
return temporal_ids_[index];
- }
+}
bool DefaultTemporalLayers::ConfigureBitrates(int bitrateKbit,
int max_bitrate_kbit,
@@ -56,8 +56,7 @@ bool DefaultTemporalLayers::ConfigureBitrates(int bitrateKbit,
cfg->ts_periodicity = temporal_ids_length_;
cfg->ts_target_bitrate[0] = bitrateKbit;
cfg->ts_rate_decimator[0] = 1;
- memcpy(cfg->ts_layer_id,
- temporal_ids_,
+ memcpy(cfg->ts_layer_id, temporal_ids_,
sizeof(unsigned int) * temporal_ids_length_);
temporal_pattern_length_ = 1;
temporal_pattern_[0] = kTemporalUpdateLastRefAll;
@@ -74,8 +73,7 @@ bool DefaultTemporalLayers::ConfigureBitrates(int bitrateKbit,
cfg->ts_target_bitrate[1] = bitrateKbit;
cfg->ts_rate_decimator[0] = 2;
cfg->ts_rate_decimator[1] = 1;
- memcpy(cfg->ts_layer_id,
- temporal_ids_,
+ memcpy(cfg->ts_layer_id, temporal_ids_,
sizeof(unsigned int) * temporal_ids_length_);
temporal_pattern_length_ = 8;
temporal_pattern_[0] = kTemporalUpdateLastAndGoldenRefAltRef;
@@ -103,8 +101,7 @@ bool DefaultTemporalLayers::ConfigureBitrates(int bitrateKbit,
cfg->ts_rate_decimator[0] = 4;
cfg->ts_rate_decimator[1] = 2;
cfg->ts_rate_decimator[2] = 1;
- memcpy(cfg->ts_layer_id,
- temporal_ids_,
+ memcpy(cfg->ts_layer_id, temporal_ids_,
sizeof(unsigned int) * temporal_ids_length_);
temporal_pattern_length_ = 8;
temporal_pattern_[0] = kTemporalUpdateLastAndGoldenRefAltRef;
@@ -138,8 +135,7 @@ bool DefaultTemporalLayers::ConfigureBitrates(int bitrateKbit,
cfg->ts_rate_decimator[1] = 4;
cfg->ts_rate_decimator[2] = 2;
cfg->ts_rate_decimator[3] = 1;
- memcpy(cfg->ts_layer_id,
- temporal_ids_,
+ memcpy(cfg->ts_layer_id, temporal_ids_,
sizeof(unsigned int) * temporal_ids_length_);
temporal_pattern_length_ = 16;
temporal_pattern_[0] = kTemporalUpdateLast;
@@ -243,7 +239,7 @@ int DefaultTemporalLayers::EncodeFlags(uint32_t timestamp) {
void DefaultTemporalLayers::PopulateCodecSpecific(
bool base_layer_sync,
- CodecSpecificInfoVP8 *vp8_info,
+ CodecSpecificInfoVP8* vp8_info,
uint32_t timestamp) {
assert(number_of_temporal_layers_ > 0);
assert(0 < temporal_ids_length_);
@@ -254,8 +250,8 @@ void DefaultTemporalLayers::PopulateCodecSpecific(
vp8_info->tl0PicIdx = kNoTl0PicIdx;
} else {
if (base_layer_sync) {
- vp8_info->temporalIdx = 0;
- vp8_info->layerSync = true;
+ vp8_info->temporalIdx = 0;
+ vp8_info->layerSync = true;
} else {
vp8_info->temporalIdx = CurrentLayerId();
TemporalReferences temporal_reference =
@@ -267,7 +263,7 @@ void DefaultTemporalLayers::PopulateCodecSpecific(
kTemporalUpdateGoldenWithoutDependencyRefAltRef ||
temporal_reference == kTemporalUpdateNoneNoRefGoldenRefAltRef ||
(temporal_reference == kTemporalUpdateNone &&
- number_of_temporal_layers_ == 4)) {
+ number_of_temporal_layers_ == 4)) {
vp8_info->layerSync = true;
} else {
vp8_info->layerSync = false;
diff --git a/webrtc/modules/video_coding/codecs/vp8/default_temporal_layers_unittest.cc b/webrtc/modules/video_coding/codecs/vp8/default_temporal_layers_unittest.cc
index 34121cbcf6..461ba69a72 100644
--- a/webrtc/modules/video_coding/codecs/vp8/default_temporal_layers_unittest.cc
+++ b/webrtc/modules/video_coding/codecs/vp8/default_temporal_layers_unittest.cc
@@ -8,9 +8,8 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-
#include "testing/gtest/include/gtest/gtest.h"
-#include "webrtc/modules/video_coding/codecs/interface/video_codec_interface.h"
+#include "webrtc/modules/video_coding/include/video_codec_interface.h"
#include "webrtc/modules/video_coding/codecs/vp8/default_temporal_layers.h"
#include "vpx/vpx_encoder.h"
@@ -19,47 +18,36 @@
namespace webrtc {
enum {
- kTemporalUpdateLast = VP8_EFLAG_NO_UPD_GF |
- VP8_EFLAG_NO_UPD_ARF |
+ kTemporalUpdateLast = VP8_EFLAG_NO_UPD_GF | VP8_EFLAG_NO_UPD_ARF |
VP8_EFLAG_NO_REF_GF |
VP8_EFLAG_NO_REF_ARF,
- kTemporalUpdateGoldenWithoutDependency = VP8_EFLAG_NO_REF_GF |
- VP8_EFLAG_NO_REF_ARF |
- VP8_EFLAG_NO_UPD_ARF |
- VP8_EFLAG_NO_UPD_LAST,
- kTemporalUpdateGolden = VP8_EFLAG_NO_REF_ARF |
- VP8_EFLAG_NO_UPD_ARF |
- VP8_EFLAG_NO_UPD_LAST,
- kTemporalUpdateAltrefWithoutDependency = VP8_EFLAG_NO_REF_ARF |
- VP8_EFLAG_NO_REF_GF |
- VP8_EFLAG_NO_UPD_GF |
- VP8_EFLAG_NO_UPD_LAST,
- kTemporalUpdateAltref = VP8_EFLAG_NO_UPD_GF |
- VP8_EFLAG_NO_UPD_LAST,
- kTemporalUpdateNone = VP8_EFLAG_NO_UPD_GF |
- VP8_EFLAG_NO_UPD_ARF |
+ kTemporalUpdateGoldenWithoutDependency =
+ VP8_EFLAG_NO_REF_GF | VP8_EFLAG_NO_REF_ARF | VP8_EFLAG_NO_UPD_ARF |
+ VP8_EFLAG_NO_UPD_LAST,
+ kTemporalUpdateGolden =
+ VP8_EFLAG_NO_REF_ARF | VP8_EFLAG_NO_UPD_ARF | VP8_EFLAG_NO_UPD_LAST,
+ kTemporalUpdateAltrefWithoutDependency =
+ VP8_EFLAG_NO_REF_ARF | VP8_EFLAG_NO_REF_GF | VP8_EFLAG_NO_UPD_GF |
+ VP8_EFLAG_NO_UPD_LAST,
+ kTemporalUpdateAltref = VP8_EFLAG_NO_UPD_GF | VP8_EFLAG_NO_UPD_LAST,
+ kTemporalUpdateNone = VP8_EFLAG_NO_UPD_GF | VP8_EFLAG_NO_UPD_ARF |
VP8_EFLAG_NO_UPD_LAST |
VP8_EFLAG_NO_UPD_ENTROPY,
- kTemporalUpdateNoneNoRefAltRef = VP8_EFLAG_NO_REF_ARF |
- VP8_EFLAG_NO_UPD_GF |
+ kTemporalUpdateNoneNoRefAltRef = VP8_EFLAG_NO_REF_ARF | VP8_EFLAG_NO_UPD_GF |
VP8_EFLAG_NO_UPD_ARF |
VP8_EFLAG_NO_UPD_LAST |
VP8_EFLAG_NO_UPD_ENTROPY,
- kTemporalUpdateNoneNoRefGolden = VP8_EFLAG_NO_REF_GF |
- VP8_EFLAG_NO_UPD_GF |
+ kTemporalUpdateNoneNoRefGolden = VP8_EFLAG_NO_REF_GF | VP8_EFLAG_NO_UPD_GF |
VP8_EFLAG_NO_UPD_ARF |
VP8_EFLAG_NO_UPD_LAST |
VP8_EFLAG_NO_UPD_ENTROPY,
- kTemporalUpdateGoldenWithoutDependencyRefAltRef = VP8_EFLAG_NO_REF_GF |
- VP8_EFLAG_NO_UPD_ARF |
- VP8_EFLAG_NO_UPD_LAST,
- kTemporalUpdateGoldenRefAltRef = VP8_EFLAG_NO_UPD_ARF |
- VP8_EFLAG_NO_UPD_LAST,
- kTemporalUpdateLastRefAltRef = VP8_EFLAG_NO_UPD_GF |
- VP8_EFLAG_NO_UPD_ARF |
- VP8_EFLAG_NO_REF_GF,
- kTemporalUpdateLastAndGoldenRefAltRef = VP8_EFLAG_NO_UPD_ARF |
- VP8_EFLAG_NO_REF_GF,
+ kTemporalUpdateGoldenWithoutDependencyRefAltRef =
+ VP8_EFLAG_NO_REF_GF | VP8_EFLAG_NO_UPD_ARF | VP8_EFLAG_NO_UPD_LAST,
+ kTemporalUpdateGoldenRefAltRef = VP8_EFLAG_NO_UPD_ARF | VP8_EFLAG_NO_UPD_LAST,
+ kTemporalUpdateLastRefAltRef =
+ VP8_EFLAG_NO_UPD_GF | VP8_EFLAG_NO_UPD_ARF | VP8_EFLAG_NO_REF_GF,
+ kTemporalUpdateLastAndGoldenRefAltRef =
+ VP8_EFLAG_NO_UPD_ARF | VP8_EFLAG_NO_REF_GF,
};
TEST(TemporalLayersTest, 2Layers) {
@@ -68,29 +56,30 @@ TEST(TemporalLayersTest, 2Layers) {
CodecSpecificInfoVP8 vp8_info;
tl.ConfigureBitrates(500, 500, 30, &cfg);
- int expected_flags[16] = { kTemporalUpdateLastAndGoldenRefAltRef,
- kTemporalUpdateGoldenWithoutDependencyRefAltRef,
- kTemporalUpdateLastRefAltRef,
- kTemporalUpdateGoldenRefAltRef,
- kTemporalUpdateLastRefAltRef,
- kTemporalUpdateGoldenRefAltRef,
- kTemporalUpdateLastRefAltRef,
- kTemporalUpdateNone,
- kTemporalUpdateLastAndGoldenRefAltRef,
- kTemporalUpdateGoldenWithoutDependencyRefAltRef,
- kTemporalUpdateLastRefAltRef,
- kTemporalUpdateGoldenRefAltRef,
- kTemporalUpdateLastRefAltRef,
- kTemporalUpdateGoldenRefAltRef,
- kTemporalUpdateLastRefAltRef,
- kTemporalUpdateNone,
- };
- int expected_temporal_idx[16] =
- { 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1 };
-
- bool expected_layer_sync[16] =
- { false, true, false, false, false, false, false, false,
- false, true, false, false, false, false, false, false };
+ int expected_flags[16] = {
+ kTemporalUpdateLastAndGoldenRefAltRef,
+ kTemporalUpdateGoldenWithoutDependencyRefAltRef,
+ kTemporalUpdateLastRefAltRef,
+ kTemporalUpdateGoldenRefAltRef,
+ kTemporalUpdateLastRefAltRef,
+ kTemporalUpdateGoldenRefAltRef,
+ kTemporalUpdateLastRefAltRef,
+ kTemporalUpdateNone,
+ kTemporalUpdateLastAndGoldenRefAltRef,
+ kTemporalUpdateGoldenWithoutDependencyRefAltRef,
+ kTemporalUpdateLastRefAltRef,
+ kTemporalUpdateGoldenRefAltRef,
+ kTemporalUpdateLastRefAltRef,
+ kTemporalUpdateGoldenRefAltRef,
+ kTemporalUpdateLastRefAltRef,
+ kTemporalUpdateNone,
+ };
+ int expected_temporal_idx[16] = {0, 1, 0, 1, 0, 1, 0, 1,
+ 0, 1, 0, 1, 0, 1, 0, 1};
+
+ bool expected_layer_sync[16] = {false, true, false, false, false, false,
+ false, false, false, true, false, false,
+ false, false, false, false};
uint32_t timestamp = 0;
for (int i = 0; i < 16; ++i) {
@@ -108,29 +97,30 @@ TEST(TemporalLayersTest, 3Layers) {
CodecSpecificInfoVP8 vp8_info;
tl.ConfigureBitrates(500, 500, 30, &cfg);
- int expected_flags[16] = { kTemporalUpdateLastAndGoldenRefAltRef,
- kTemporalUpdateNoneNoRefGolden,
- kTemporalUpdateGoldenWithoutDependencyRefAltRef,
- kTemporalUpdateNone,
- kTemporalUpdateLastRefAltRef,
- kTemporalUpdateNone,
- kTemporalUpdateGoldenRefAltRef,
- kTemporalUpdateNone,
- kTemporalUpdateLastAndGoldenRefAltRef,
- kTemporalUpdateNoneNoRefGolden,
- kTemporalUpdateGoldenWithoutDependencyRefAltRef,
- kTemporalUpdateNone,
- kTemporalUpdateLastRefAltRef,
- kTemporalUpdateNone,
- kTemporalUpdateGoldenRefAltRef,
- kTemporalUpdateNone,
+ int expected_flags[16] = {
+ kTemporalUpdateLastAndGoldenRefAltRef,
+ kTemporalUpdateNoneNoRefGolden,
+ kTemporalUpdateGoldenWithoutDependencyRefAltRef,
+ kTemporalUpdateNone,
+ kTemporalUpdateLastRefAltRef,
+ kTemporalUpdateNone,
+ kTemporalUpdateGoldenRefAltRef,
+ kTemporalUpdateNone,
+ kTemporalUpdateLastAndGoldenRefAltRef,
+ kTemporalUpdateNoneNoRefGolden,
+ kTemporalUpdateGoldenWithoutDependencyRefAltRef,
+ kTemporalUpdateNone,
+ kTemporalUpdateLastRefAltRef,
+ kTemporalUpdateNone,
+ kTemporalUpdateGoldenRefAltRef,
+ kTemporalUpdateNone,
};
- int expected_temporal_idx[16] =
- { 0, 2, 1, 2, 0, 2, 1, 2, 0, 2, 1, 2, 0, 2, 1, 2 };
+ int expected_temporal_idx[16] = {0, 2, 1, 2, 0, 2, 1, 2,
+ 0, 2, 1, 2, 0, 2, 1, 2};
- bool expected_layer_sync[16] =
- { false, true, true, false, false, false, false, false,
- false, true, true, false, false, false, false, false };
+ bool expected_layer_sync[16] = {false, true, true, false, false, false,
+ false, false, false, true, true, false,
+ false, false, false, false};
unsigned int timestamp = 0;
for (int i = 0; i < 16; ++i) {
@@ -165,12 +155,12 @@ TEST(TemporalLayersTest, 4Layers) {
kTemporalUpdateAltref,
kTemporalUpdateNone,
};
- int expected_temporal_idx[16] =
- { 0, 3, 2, 3, 1, 3, 2, 3, 0, 3, 2, 3, 1, 3, 2, 3 };
+ int expected_temporal_idx[16] = {0, 3, 2, 3, 1, 3, 2, 3,
+ 0, 3, 2, 3, 1, 3, 2, 3};
- bool expected_layer_sync[16] =
- { false, true, true, true, true, true, false, true,
- false, true, false, true, false, true, false, true };
+ bool expected_layer_sync[16] = {false, true, true, true, true, true,
+ false, true, false, true, false, true,
+ false, true, false, true};
uint32_t timestamp = 0;
for (int i = 0; i < 16; ++i) {
@@ -198,8 +188,7 @@ TEST(TemporalLayersTest, KeyFrame) {
kTemporalUpdateGoldenRefAltRef,
kTemporalUpdateNone,
};
- int expected_temporal_idx[8] =
- { 0, 0, 0, 0, 0, 0, 0, 2};
+ int expected_temporal_idx[8] = {0, 0, 0, 0, 0, 0, 0, 2};
uint32_t timestamp = 0;
for (int i = 0; i < 7; ++i) {
diff --git a/webrtc/modules/video_coding/codecs/vp8/include/vp8.h b/webrtc/modules/video_coding/codecs/vp8/include/vp8.h
index f5dae471d2..dd3514235d 100644
--- a/webrtc/modules/video_coding/codecs/vp8/include/vp8.h
+++ b/webrtc/modules/video_coding/codecs/vp8/include/vp8.h
@@ -13,7 +13,7 @@
#ifndef WEBRTC_MODULES_VIDEO_CODING_CODECS_VP8_INCLUDE_VP8_H_
#define WEBRTC_MODULES_VIDEO_CODING_CODECS_VP8_INCLUDE_VP8_H_
-#include "webrtc/modules/video_coding/codecs/interface/video_codec_interface.h"
+#include "webrtc/modules/video_coding/include/video_codec_interface.h"
namespace webrtc {
@@ -21,16 +21,15 @@ class VP8Encoder : public VideoEncoder {
public:
static VP8Encoder* Create();
- virtual ~VP8Encoder() {};
+ virtual ~VP8Encoder() {}
}; // end of VP8Encoder class
-
class VP8Decoder : public VideoDecoder {
public:
static VP8Decoder* Create();
- virtual ~VP8Decoder() {};
+ virtual ~VP8Decoder() {}
}; // end of VP8Decoder class
} // namespace webrtc
-#endif // WEBRTC_MODULES_VIDEO_CODING_CODECS_VP8_INCLUDE_VP8_H_
+#endif // WEBRTC_MODULES_VIDEO_CODING_CODECS_VP8_INCLUDE_VP8_H_
diff --git a/webrtc/modules/video_coding/codecs/vp8/include/vp8_common_types.h b/webrtc/modules/video_coding/codecs/vp8/include/vp8_common_types.h
index c2cefdd94e..7a27e4429a 100644
--- a/webrtc/modules/video_coding/codecs/vp8/include/vp8_common_types.h
+++ b/webrtc/modules/video_coding/codecs/vp8/include/vp8_common_types.h
@@ -8,8 +8,8 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-#ifndef WEBRTC_MODULES_VIDEO_CODING_CODECS_VP8_COMMON_TYPES_H_
-#define WEBRTC_MODULES_VIDEO_CODING_CODECS_VP8_COMMON_TYPES_H_
+#ifndef WEBRTC_MODULES_VIDEO_CODING_CODECS_VP8_INCLUDE_VP8_COMMON_TYPES_H_
+#define WEBRTC_MODULES_VIDEO_CODING_CODECS_VP8_INCLUDE_VP8_COMMON_TYPES_H_
#include "webrtc/common_types.h"
@@ -19,11 +19,11 @@ namespace webrtc {
// Values as required for the VP8 codec (accumulating).
static const float
kVp8LayerRateAlloction[kMaxTemporalStreams][kMaxTemporalStreams] = {
- {1.0f, 1.0f, 1.0f, 1.0f}, // 1 layer
- {0.6f, 1.0f, 1.0f, 1.0f}, // 2 layers {60%, 40%}
- {0.4f, 0.6f, 1.0f, 1.0f}, // 3 layers {40%, 20%, 40%}
- {0.25f, 0.4f, 0.6f, 1.0f} // 4 layers {25%, 15%, 20%, 40%}
+ {1.0f, 1.0f, 1.0f, 1.0f}, // 1 layer
+ {0.6f, 1.0f, 1.0f, 1.0f}, // 2 layers {60%, 40%}
+ {0.4f, 0.6f, 1.0f, 1.0f}, // 3 layers {40%, 20%, 40%}
+ {0.25f, 0.4f, 0.6f, 1.0f} // 4 layers {25%, 15%, 20%, 40%}
};
} // namespace webrtc
-#endif // WEBRTC_MODULES_VIDEO_CODING_CODECS_VP8_COMMON_TYPES_H_
+#endif // WEBRTC_MODULES_VIDEO_CODING_CODECS_VP8_INCLUDE_VP8_COMMON_TYPES_H_
diff --git a/webrtc/modules/video_coding/codecs/vp8/realtime_temporal_layers.cc b/webrtc/modules/video_coding/codecs/vp8/realtime_temporal_layers.cc
index 15b5af9200..d22601358f 100644
--- a/webrtc/modules/video_coding/codecs/vp8/realtime_temporal_layers.cc
+++ b/webrtc/modules/video_coding/codecs/vp8/realtime_temporal_layers.cc
@@ -12,7 +12,7 @@
#include "vpx/vpx_encoder.h"
#include "vpx/vp8cx.h"
-#include "webrtc/modules/video_coding/codecs/interface/video_codec_interface.h"
+#include "webrtc/modules/video_coding/include/video_codec_interface.h"
#include "webrtc/modules/video_coding/codecs/vp8/include/vp8_common_types.h"
#include "webrtc/modules/video_coding/codecs/vp8/temporal_layers.h"
@@ -23,7 +23,8 @@ namespace webrtc {
namespace {
enum {
kTemporalUpdateLast = VP8_EFLAG_NO_UPD_GF | VP8_EFLAG_NO_UPD_ARF |
- VP8_EFLAG_NO_REF_GF | VP8_EFLAG_NO_REF_ARF,
+ VP8_EFLAG_NO_REF_GF |
+ VP8_EFLAG_NO_REF_ARF,
kTemporalUpdateGolden =
VP8_EFLAG_NO_REF_ARF | VP8_EFLAG_NO_UPD_ARF | VP8_EFLAG_NO_UPD_LAST,
@@ -37,13 +38,15 @@ enum {
kTemporalUpdateAltref | VP8_EFLAG_NO_REF_ARF | VP8_EFLAG_NO_REF_GF,
kTemporalUpdateNone = VP8_EFLAG_NO_UPD_GF | VP8_EFLAG_NO_UPD_ARF |
- VP8_EFLAG_NO_UPD_LAST | VP8_EFLAG_NO_UPD_ENTROPY,
+ VP8_EFLAG_NO_UPD_LAST |
+ VP8_EFLAG_NO_UPD_ENTROPY,
kTemporalUpdateNoneNoRefAltref = kTemporalUpdateNone | VP8_EFLAG_NO_REF_ARF,
kTemporalUpdateNoneNoRefGoldenRefAltRef =
VP8_EFLAG_NO_REF_GF | VP8_EFLAG_NO_UPD_GF | VP8_EFLAG_NO_UPD_ARF |
- VP8_EFLAG_NO_UPD_LAST | VP8_EFLAG_NO_UPD_ENTROPY,
+ VP8_EFLAG_NO_UPD_LAST |
+ VP8_EFLAG_NO_UPD_ENTROPY,
kTemporalUpdateGoldenWithoutDependencyRefAltRef =
VP8_EFLAG_NO_REF_GF | VP8_EFLAG_NO_UPD_ARF | VP8_EFLAG_NO_UPD_LAST,
@@ -133,12 +136,14 @@ class RealTimeTemporalLayers : public TemporalLayers {
layer_ids_length_ = sizeof(layer_ids) / sizeof(*layer_ids);
static const int encode_flags[] = {
- kTemporalUpdateLastAndGoldenRefAltRef,
- kTemporalUpdateGoldenWithoutDependencyRefAltRef,
- kTemporalUpdateLastRefAltRef, kTemporalUpdateGoldenRefAltRef,
- kTemporalUpdateLastRefAltRef, kTemporalUpdateGoldenRefAltRef,
- kTemporalUpdateLastRefAltRef, kTemporalUpdateNone
- };
+ kTemporalUpdateLastAndGoldenRefAltRef,
+ kTemporalUpdateGoldenWithoutDependencyRefAltRef,
+ kTemporalUpdateLastRefAltRef,
+ kTemporalUpdateGoldenRefAltRef,
+ kTemporalUpdateLastRefAltRef,
+ kTemporalUpdateGoldenRefAltRef,
+ kTemporalUpdateLastRefAltRef,
+ kTemporalUpdateNone};
encode_flags_length_ = sizeof(encode_flags) / sizeof(*layer_ids);
encode_flags_ = encode_flags;
@@ -153,12 +158,14 @@ class RealTimeTemporalLayers : public TemporalLayers {
layer_ids_length_ = sizeof(layer_ids) / sizeof(*layer_ids);
static const int encode_flags[] = {
- kTemporalUpdateLastAndGoldenRefAltRef,
- kTemporalUpdateNoneNoRefGoldenRefAltRef,
- kTemporalUpdateGoldenWithoutDependencyRefAltRef, kTemporalUpdateNone,
- kTemporalUpdateLastRefAltRef, kTemporalUpdateNone,
- kTemporalUpdateGoldenRefAltRef, kTemporalUpdateNone
- };
+ kTemporalUpdateLastAndGoldenRefAltRef,
+ kTemporalUpdateNoneNoRefGoldenRefAltRef,
+ kTemporalUpdateGoldenWithoutDependencyRefAltRef,
+ kTemporalUpdateNone,
+ kTemporalUpdateLastRefAltRef,
+ kTemporalUpdateNone,
+ kTemporalUpdateGoldenRefAltRef,
+ kTemporalUpdateNone};
encode_flags_length_ = sizeof(encode_flags) / sizeof(*layer_ids);
encode_flags_ = encode_flags;
@@ -172,8 +179,8 @@ class RealTimeTemporalLayers : public TemporalLayers {
assert(false);
return false;
}
- memcpy(
- cfg->ts_layer_id, layer_ids_, sizeof(unsigned int) * layer_ids_length_);
+ memcpy(cfg->ts_layer_id, layer_ids_,
+ sizeof(unsigned int) * layer_ids_length_);
return true;
}
diff --git a/webrtc/modules/video_coding/codecs/vp8/reference_picture_selection.cc b/webrtc/modules/video_coding/codecs/vp8/reference_picture_selection.cc
index a922e35712..1838e32eb7 100644
--- a/webrtc/modules/video_coding/codecs/vp8/reference_picture_selection.cc
+++ b/webrtc/modules/video_coding/codecs/vp8/reference_picture_selection.cc
@@ -25,8 +25,7 @@ ReferencePictureSelection::ReferencePictureSelection()
last_sent_ref_update_time_(0),
established_ref_picture_id_(0),
last_refresh_time_(0),
- rtt_(0) {
-}
+ rtt_(0) {}
void ReferencePictureSelection::Init() {
update_golden_next_ = true;
@@ -62,7 +61,8 @@ bool ReferencePictureSelection::ReceivedSLI(uint32_t now_ts) {
return send_refresh;
}
-int ReferencePictureSelection::EncodeFlags(int picture_id, bool send_refresh,
+int ReferencePictureSelection::EncodeFlags(int picture_id,
+ bool send_refresh,
uint32_t now_ts) {
int flags = 0;
// We can't refresh the decoder until we have established the key frame.
@@ -87,12 +87,12 @@ int ReferencePictureSelection::EncodeFlags(int picture_id, bool send_refresh,
received_ack_) {
flags |= VP8_EFLAG_NO_REF_LAST; // Don't reference the last frame.
if (update_golden_next_) {
- flags |= VP8_EFLAG_FORCE_GF; // Update the golden reference.
+ flags |= VP8_EFLAG_FORCE_GF; // Update the golden reference.
flags |= VP8_EFLAG_NO_UPD_ARF; // Don't update alt-ref.
- flags |= VP8_EFLAG_NO_REF_GF; // Don't reference the golden frame.
+ flags |= VP8_EFLAG_NO_REF_GF; // Don't reference the golden frame.
} else {
- flags |= VP8_EFLAG_FORCE_ARF; // Update the alt-ref reference.
- flags |= VP8_EFLAG_NO_UPD_GF; // Don't update the golden frame.
+ flags |= VP8_EFLAG_FORCE_ARF; // Update the alt-ref reference.
+ flags |= VP8_EFLAG_NO_UPD_GF; // Don't update the golden frame.
flags |= VP8_EFLAG_NO_REF_ARF; // Don't reference the alt-ref frame.
}
last_sent_ref_picture_id_ = picture_id;
@@ -103,9 +103,9 @@ int ReferencePictureSelection::EncodeFlags(int picture_id, bool send_refresh,
if (established_golden_)
flags |= VP8_EFLAG_NO_REF_ARF; // Don't reference the alt-ref frame.
else
- flags |= VP8_EFLAG_NO_REF_GF; // Don't reference the golden frame.
- flags |= VP8_EFLAG_NO_UPD_GF; // Don't update the golden frame.
- flags |= VP8_EFLAG_NO_UPD_ARF; // Don't update the alt-ref frame.
+ flags |= VP8_EFLAG_NO_REF_GF; // Don't reference the golden frame.
+ flags |= VP8_EFLAG_NO_UPD_GF; // Don't update the golden frame.
+ flags |= VP8_EFLAG_NO_UPD_ARF; // Don't update the alt-ref frame.
}
return flags;
}
diff --git a/webrtc/modules/video_coding/codecs/vp8/reference_picture_selection_unittest.cc b/webrtc/modules/video_coding/codecs/vp8/reference_picture_selection_unittest.cc
index c6474e5bd1..742bb96e91 100644
--- a/webrtc/modules/video_coding/codecs/vp8/reference_picture_selection_unittest.cc
+++ b/webrtc/modules/video_coding/codecs/vp8/reference_picture_selection_unittest.cc
@@ -22,25 +22,19 @@ static const uint32_t kMinUpdateInterval = 10;
// Should match the values set in reference_picture_selection.h
static const int kRtt = 10;
-static const int kNoPropagationGolden = VP8_EFLAG_NO_REF_ARF |
- VP8_EFLAG_NO_UPD_GF |
- VP8_EFLAG_NO_UPD_ARF;
-static const int kNoPropagationAltRef = VP8_EFLAG_NO_REF_GF |
- VP8_EFLAG_NO_UPD_GF |
- VP8_EFLAG_NO_UPD_ARF;
-static const int kPropagateGolden = VP8_EFLAG_FORCE_GF |
- VP8_EFLAG_NO_UPD_ARF |
- VP8_EFLAG_NO_REF_GF |
- VP8_EFLAG_NO_REF_LAST;
-static const int kPropagateAltRef = VP8_EFLAG_FORCE_ARF |
- VP8_EFLAG_NO_UPD_GF |
- VP8_EFLAG_NO_REF_ARF |
- VP8_EFLAG_NO_REF_LAST;
-static const int kRefreshFromGolden = VP8_EFLAG_NO_REF_LAST |
- VP8_EFLAG_NO_REF_ARF;
-static const int kRefreshFromAltRef = VP8_EFLAG_NO_REF_LAST |
- VP8_EFLAG_NO_REF_GF;
-
+static const int kNoPropagationGolden =
+ VP8_EFLAG_NO_REF_ARF | VP8_EFLAG_NO_UPD_GF | VP8_EFLAG_NO_UPD_ARF;
+static const int kNoPropagationAltRef =
+ VP8_EFLAG_NO_REF_GF | VP8_EFLAG_NO_UPD_GF | VP8_EFLAG_NO_UPD_ARF;
+static const int kPropagateGolden = VP8_EFLAG_FORCE_GF | VP8_EFLAG_NO_UPD_ARF |
+ VP8_EFLAG_NO_REF_GF | VP8_EFLAG_NO_REF_LAST;
+static const int kPropagateAltRef = VP8_EFLAG_FORCE_ARF | VP8_EFLAG_NO_UPD_GF |
+ VP8_EFLAG_NO_REF_ARF |
+ VP8_EFLAG_NO_REF_LAST;
+static const int kRefreshFromGolden =
+ VP8_EFLAG_NO_REF_LAST | VP8_EFLAG_NO_REF_ARF;
+static const int kRefreshFromAltRef =
+ VP8_EFLAG_NO_REF_LAST | VP8_EFLAG_NO_REF_GF;
class TestRPS : public ::testing::Test {
protected:
@@ -84,15 +78,15 @@ TEST_F(TestRPS, TestDecoderRefresh) {
EXPECT_EQ(rps_.ReceivedSLI(90 * time), true);
// Enough time have elapsed since the previous reference propagation, we will
// therefore get both a refresh from golden and a propagation of alt-ref.
- EXPECT_EQ(rps_.EncodeFlags(5, true, 90 * time), kRefreshFromGolden |
- kPropagateAltRef);
+ EXPECT_EQ(rps_.EncodeFlags(5, true, 90 * time),
+ kRefreshFromGolden | kPropagateAltRef);
rps_.ReceivedRPSI(5);
time += kRtt + 1;
// Enough time for a new refresh, but not enough time for a reference
// propagation.
EXPECT_EQ(rps_.ReceivedSLI(90 * time), true);
- EXPECT_EQ(rps_.EncodeFlags(6, true, 90 * time), kRefreshFromAltRef |
- kNoPropagationAltRef);
+ EXPECT_EQ(rps_.EncodeFlags(6, true, 90 * time),
+ kRefreshFromAltRef | kNoPropagationAltRef);
}
TEST_F(TestRPS, TestWrap) {
diff --git a/webrtc/modules/video_coding/codecs/vp8/screenshare_layers.cc b/webrtc/modules/video_coding/codecs/vp8/screenshare_layers.cc
index 0fbb2a6c40..536587a13e 100644
--- a/webrtc/modules/video_coding/codecs/vp8/screenshare_layers.cc
+++ b/webrtc/modules/video_coding/codecs/vp8/screenshare_layers.cc
@@ -11,10 +11,12 @@
#include <stdlib.h>
+#include <algorithm>
+
#include "webrtc/base/checks.h"
#include "vpx/vpx_encoder.h"
#include "vpx/vp8cx.h"
-#include "webrtc/modules/video_coding/codecs/interface/video_codec_interface.h"
+#include "webrtc/modules/video_coding/include/video_codec_interface.h"
namespace webrtc {
@@ -188,7 +190,7 @@ void ScreenshareLayers::FrameEncoded(unsigned int size,
}
void ScreenshareLayers::PopulateCodecSpecific(bool base_layer_sync,
- CodecSpecificInfoVP8 *vp8_info,
+ CodecSpecificInfoVP8* vp8_info,
uint32_t timestamp) {
int64_t unwrapped_timestamp = time_wrap_handler_.Unwrap(timestamp);
if (number_of_temporal_layers_ == 1) {
diff --git a/webrtc/modules/video_coding/codecs/vp8/screenshare_layers.h b/webrtc/modules/video_coding/codecs/vp8/screenshare_layers.h
index 90a8b1b883..7628758209 100644
--- a/webrtc/modules/video_coding/codecs/vp8/screenshare_layers.h
+++ b/webrtc/modules/video_coding/codecs/vp8/screenshare_layers.h
@@ -15,7 +15,7 @@
#include "webrtc/base/timeutils.h"
#include "webrtc/modules/video_coding/codecs/vp8/temporal_layers.h"
-#include "webrtc/modules/video_coding/utility/include/frame_dropper.h"
+#include "webrtc/modules/video_coding/utility/frame_dropper.h"
#include "webrtc/typedefs.h"
namespace webrtc {
diff --git a/webrtc/modules/video_coding/codecs/vp8/screenshare_layers_unittest.cc b/webrtc/modules/video_coding/codecs/vp8/screenshare_layers_unittest.cc
index 628e336568..f31ed5e4d8 100644
--- a/webrtc/modules/video_coding/codecs/vp8/screenshare_layers_unittest.cc
+++ b/webrtc/modules/video_coding/codecs/vp8/screenshare_layers_unittest.cc
@@ -12,9 +12,9 @@
#include "vpx/vpx_encoder.h"
#include "vpx/vp8cx.h"
#include "webrtc/base/scoped_ptr.h"
-#include "webrtc/modules/video_coding/codecs/interface/video_codec_interface.h"
+#include "webrtc/modules/video_coding/include/video_codec_interface.h"
#include "webrtc/modules/video_coding/codecs/vp8/screenshare_layers.h"
-#include "webrtc/modules/video_coding/utility/include/mock/mock_frame_dropper.h"
+#include "webrtc/modules/video_coding/utility/mock/mock_frame_dropper.h"
using ::testing::_;
using ::testing::NiceMock;
diff --git a/webrtc/modules/video_coding/codecs/vp8/simulcast_encoder_adapter.cc b/webrtc/modules/video_coding/codecs/vp8/simulcast_encoder_adapter.cc
index 5dc4ac78f1..40e438f7e4 100644
--- a/webrtc/modules/video_coding/codecs/vp8/simulcast_encoder_adapter.cc
+++ b/webrtc/modules/video_coding/codecs/vp8/simulcast_encoder_adapter.cc
@@ -215,9 +215,7 @@ int SimulcastEncoderAdapter::InitEncode(const VideoCodec* inst,
}
VideoEncoder* encoder = factory_->Create();
- ret = encoder->InitEncode(&stream_codec,
- number_of_cores,
- max_payload_size);
+ ret = encoder->InitEncode(&stream_codec, number_of_cores, max_payload_size);
if (ret < 0) {
Release();
return ret;
@@ -284,35 +282,25 @@ int SimulcastEncoderAdapter::Encode(
// scale it to match what the encoder expects (below).
if ((dst_width == src_width && dst_height == src_height) ||
input_image.IsZeroSize()) {
- streaminfos_[stream_idx].encoder->Encode(input_image,
- codec_specific_info,
+ streaminfos_[stream_idx].encoder->Encode(input_image, codec_specific_info,
&stream_frame_types);
} else {
VideoFrame dst_frame;
// Making sure that destination frame is of sufficient size.
// Aligning stride values based on width.
- dst_frame.CreateEmptyFrame(dst_width, dst_height,
- dst_width, (dst_width + 1) / 2,
- (dst_width + 1) / 2);
- libyuv::I420Scale(input_image.buffer(kYPlane),
- input_image.stride(kYPlane),
- input_image.buffer(kUPlane),
- input_image.stride(kUPlane),
- input_image.buffer(kVPlane),
- input_image.stride(kVPlane),
- src_width, src_height,
- dst_frame.buffer(kYPlane),
- dst_frame.stride(kYPlane),
- dst_frame.buffer(kUPlane),
- dst_frame.stride(kUPlane),
- dst_frame.buffer(kVPlane),
- dst_frame.stride(kVPlane),
- dst_width, dst_height,
- libyuv::kFilterBilinear);
+ dst_frame.CreateEmptyFrame(dst_width, dst_height, dst_width,
+ (dst_width + 1) / 2, (dst_width + 1) / 2);
+ libyuv::I420Scale(
+ input_image.buffer(kYPlane), input_image.stride(kYPlane),
+ input_image.buffer(kUPlane), input_image.stride(kUPlane),
+ input_image.buffer(kVPlane), input_image.stride(kVPlane), src_width,
+ src_height, dst_frame.buffer(kYPlane), dst_frame.stride(kYPlane),
+ dst_frame.buffer(kUPlane), dst_frame.stride(kUPlane),
+ dst_frame.buffer(kVPlane), dst_frame.stride(kVPlane), dst_width,
+ dst_height, libyuv::kFilterBilinear);
dst_frame.set_timestamp(input_image.timestamp());
dst_frame.set_render_time_ms(input_image.render_time_ms());
- streaminfos_[stream_idx].encoder->Encode(dst_frame,
- codec_specific_info,
+ streaminfos_[stream_idx].encoder->Encode(dst_frame, codec_specific_info,
&stream_frame_types);
}
}
@@ -426,16 +414,17 @@ uint32_t SimulcastEncoderAdapter::GetStreamBitrate(
// current stream's |targetBitrate|, otherwise it's capped by |maxBitrate|.
if (stream_idx < codec_.numberOfSimulcastStreams - 1) {
unsigned int max_rate = codec_.simulcastStream[stream_idx].maxBitrate;
- if (new_bitrate_kbit >= SumStreamTargetBitrate(stream_idx + 1, codec_) +
- codec_.simulcastStream[stream_idx + 1].minBitrate) {
+ if (new_bitrate_kbit >=
+ SumStreamTargetBitrate(stream_idx + 1, codec_) +
+ codec_.simulcastStream[stream_idx + 1].minBitrate) {
max_rate = codec_.simulcastStream[stream_idx].targetBitrate;
}
return std::min(new_bitrate_kbit - sum_target_lower_streams, max_rate);
} else {
- // For the highest stream (highest resolution), the |targetBitRate| and
- // |maxBitrate| are not used. Any excess bitrate (above the targets of
- // all lower streams) is given to this (highest resolution) stream.
- return new_bitrate_kbit - sum_target_lower_streams;
+ // For the highest stream (highest resolution), the |targetBitRate| and
+ // |maxBitrate| are not used. Any excess bitrate (above the targets of
+ // all lower streams) is given to this (highest resolution) stream.
+ return new_bitrate_kbit - sum_target_lower_streams;
}
} else {
// Not enough bitrate for this stream.
@@ -507,4 +496,11 @@ bool SimulcastEncoderAdapter::SupportsNativeHandle() const {
return streaminfos_[0].encoder->SupportsNativeHandle();
}
+const char* SimulcastEncoderAdapter::ImplementationName() const {
+ // We should not be calling this method before streaminfos_ are configured.
+ RTC_DCHECK(!streaminfos_.empty());
+ // TODO(pbos): Support multiple implementation names for different encoders.
+ return streaminfos_[0].encoder->ImplementationName();
+}
+
} // namespace webrtc
diff --git a/webrtc/modules/video_coding/codecs/vp8/simulcast_encoder_adapter.h b/webrtc/modules/video_coding/codecs/vp8/simulcast_encoder_adapter.h
index afec024abc..05a96c7336 100644
--- a/webrtc/modules/video_coding/codecs/vp8/simulcast_encoder_adapter.h
+++ b/webrtc/modules/video_coding/codecs/vp8/simulcast_encoder_adapter.h
@@ -59,6 +59,7 @@ class SimulcastEncoderAdapter : public VP8Encoder {
int GetTargetFramerate() override;
bool SupportsNativeHandle() const override;
+ const char* ImplementationName() const override;
private:
struct StreamInfo {
@@ -71,8 +72,8 @@ class SimulcastEncoderAdapter : public VP8Encoder {
send_stream(true) {}
StreamInfo(VideoEncoder* encoder,
EncodedImageCallback* callback,
- unsigned short width,
- unsigned short height,
+ uint16_t width,
+ uint16_t height,
bool send_stream)
: encoder(encoder),
callback(callback),
@@ -83,8 +84,8 @@ class SimulcastEncoderAdapter : public VP8Encoder {
// Deleted by SimulcastEncoderAdapter::Release().
VideoEncoder* encoder;
EncodedImageCallback* callback;
- unsigned short width;
- unsigned short height;
+ uint16_t width;
+ uint16_t height;
bool key_frame_request;
bool send_stream;
};
@@ -118,4 +119,3 @@ class SimulcastEncoderAdapter : public VP8Encoder {
} // namespace webrtc
#endif // WEBRTC_MODULES_VIDEO_CODING_CODECS_VP8_SIMULCAST_ENCODER_ADAPTER_H_
-
diff --git a/webrtc/modules/video_coding/codecs/vp8/simulcast_encoder_adapter_unittest.cc b/webrtc/modules/video_coding/codecs/vp8/simulcast_encoder_adapter_unittest.cc
index 218b5e2d1a..86b8e0b345 100644
--- a/webrtc/modules/video_coding/codecs/vp8/simulcast_encoder_adapter_unittest.cc
+++ b/webrtc/modules/video_coding/codecs/vp8/simulcast_encoder_adapter_unittest.cc
@@ -11,7 +11,7 @@
#include <vector>
#include "testing/gmock/include/gmock/gmock.h"
-#include "webrtc/modules/video_coding/codecs/interface/video_codec_interface.h"
+#include "webrtc/modules/video_coding/include/video_codec_interface.h"
#include "webrtc/modules/video_coding/codecs/vp8/simulcast_encoder_adapter.h"
#include "webrtc/modules/video_coding/codecs/vp8/simulcast_unittest.h"
#include "webrtc/modules/video_coding/codecs/vp8/vp8_factory.h"
@@ -27,12 +27,10 @@ static VP8Encoder* CreateTestEncoderAdapter() {
class TestSimulcastEncoderAdapter : public TestVp8Simulcast {
public:
TestSimulcastEncoderAdapter()
- : TestVp8Simulcast(CreateTestEncoderAdapter(),
- VP8Decoder::Create()) {}
+ : TestVp8Simulcast(CreateTestEncoderAdapter(), VP8Decoder::Create()) {}
+
protected:
- virtual void SetUp() {
- TestVp8Simulcast::SetUp();
- }
+ virtual void SetUp() { TestVp8Simulcast::SetUp(); }
virtual void TearDown() {
TestVp8Simulcast::TearDown();
VP8EncoderFactoryConfig::set_use_simulcast_adapter(false);
@@ -97,8 +95,7 @@ TEST_F(TestSimulcastEncoderAdapter, TestSpatioTemporalLayers321PatternEncoder) {
// TODO(ronghuawu): Enable this test when SkipEncodingUnusedStreams option is
// implemented for SimulcastEncoderAdapter.
-TEST_F(TestSimulcastEncoderAdapter,
- DISABLED_TestSkipEncodingUnusedStreams) {
+TEST_F(TestSimulcastEncoderAdapter, DISABLED_TestSkipEncodingUnusedStreams) {
TestVp8Simulcast::TestSkipEncodingUnusedStreams();
}
@@ -127,23 +124,17 @@ class MockVideoEncoder : public VideoEncoder {
return 0;
}
- int32_t Release() override {
- return 0;
- }
+ int32_t Release() override { return 0; }
int32_t SetRates(uint32_t newBitRate, uint32_t frameRate) override {
return 0;
}
- MOCK_METHOD2(SetChannelParameters,
- int32_t(uint32_t packetLoss, int64_t rtt));
+ MOCK_METHOD2(SetChannelParameters, int32_t(uint32_t packetLoss, int64_t rtt));
- bool SupportsNativeHandle() const override {
- return supports_native_handle_;
- }
+ bool SupportsNativeHandle() const override { return supports_native_handle_; }
- virtual ~MockVideoEncoder() {
- }
+ virtual ~MockVideoEncoder() {}
const VideoCodec& codec() const { return codec_; }
@@ -200,7 +191,8 @@ class TestSimulcastEncoderAdapterFakeHelper {
EXPECT_TRUE(!factory_->encoders().empty());
for (size_t i = 0; i < factory_->encoders().size(); ++i) {
EXPECT_CALL(*factory_->encoders()[i],
- SetChannelParameters(packetLoss, rtt)).Times(1);
+ SetChannelParameters(packetLoss, rtt))
+ .Times(1);
}
}
@@ -249,8 +241,7 @@ class TestSimulcastEncoderAdapterFake : public ::testing::Test,
void SetupCodec() {
TestVp8Simulcast::DefaultSettings(
- &codec_,
- static_cast<const int*>(kTestTemporalLayerProfile));
+ &codec_, static_cast<const int*>(kTestTemporalLayerProfile));
EXPECT_EQ(0, adapter_->InitEncode(&codec_, 1, 1200));
adapter_->RegisterEncodeCompleteCallback(this);
}
diff --git a/webrtc/modules/video_coding/codecs/vp8/simulcast_unittest.cc b/webrtc/modules/video_coding/codecs/vp8/simulcast_unittest.cc
index 373a55237f..f23affee41 100644
--- a/webrtc/modules/video_coding/codecs/vp8/simulcast_unittest.cc
+++ b/webrtc/modules/video_coding/codecs/vp8/simulcast_unittest.cc
@@ -13,18 +13,14 @@
namespace webrtc {
namespace testing {
-class TestVp8Impl
- : public TestVp8Simulcast {
+class TestVp8Impl : public TestVp8Simulcast {
public:
TestVp8Impl()
- : TestVp8Simulcast(VP8Encoder::Create(), VP8Decoder::Create()) {}
+ : TestVp8Simulcast(VP8Encoder::Create(), VP8Decoder::Create()) {}
+
protected:
- virtual void SetUp() {
- TestVp8Simulcast::SetUp();
- }
- virtual void TearDown() {
- TestVp8Simulcast::TearDown();
- }
+ virtual void SetUp() { TestVp8Simulcast::SetUp(); }
+ virtual void TearDown() { TestVp8Simulcast::TearDown(); }
};
TEST_F(TestVp8Impl, TestKeyFrameRequestsOnAllStreams) {
diff --git a/webrtc/modules/video_coding/codecs/vp8/simulcast_unittest.h b/webrtc/modules/video_coding/codecs/vp8/simulcast_unittest.h
index e4fc986545..7a7a2c253b 100644
--- a/webrtc/modules/video_coding/codecs/vp8/simulcast_unittest.h
+++ b/webrtc/modules/video_coding/codecs/vp8/simulcast_unittest.h
@@ -14,10 +14,11 @@
#include <algorithm>
#include <vector>
+#include "webrtc/base/checks.h"
#include "webrtc/base/scoped_ptr.h"
#include "webrtc/common.h"
#include "webrtc/common_video/libyuv/include/webrtc_libyuv.h"
-#include "webrtc/modules/video_coding/codecs/interface/mock/mock_video_codec_interface.h"
+#include "webrtc/modules/video_coding/include/mock/mock_video_codec_interface.h"
#include "webrtc/modules/video_coding/codecs/vp8/include/vp8.h"
#include "webrtc/modules/video_coding/codecs/vp8/temporal_layers.h"
#include "webrtc/video_frame.h"
@@ -43,10 +44,8 @@ const int kMinBitrates[kNumberOfSimulcastStreams] = {50, 150, 600};
const int kTargetBitrates[kNumberOfSimulcastStreams] = {100, 450, 1000};
const int kDefaultTemporalLayerProfile[3] = {3, 3, 3};
-template<typename T> void SetExpectedValues3(T value0,
- T value1,
- T value2,
- T* expected_values) {
+template <typename T>
+void SetExpectedValues3(T value0, T value1, T value2, T* expected_values) {
expected_values[0] = value0;
expected_values[1] = value1;
expected_values[2] = value2;
@@ -54,15 +53,14 @@ template<typename T> void SetExpectedValues3(T value0,
class Vp8TestEncodedImageCallback : public EncodedImageCallback {
public:
- Vp8TestEncodedImageCallback()
- : picture_id_(-1) {
+ Vp8TestEncodedImageCallback() : picture_id_(-1) {
memset(temporal_layer_, -1, sizeof(temporal_layer_));
memset(layer_sync_, false, sizeof(layer_sync_));
}
~Vp8TestEncodedImageCallback() {
- delete [] encoded_key_frame_._buffer;
- delete [] encoded_frame_._buffer;
+ delete[] encoded_key_frame_._buffer;
+ delete[] encoded_frame_._buffer;
}
virtual int32_t Encoded(const EncodedImage& encoded_image,
@@ -71,22 +69,20 @@ class Vp8TestEncodedImageCallback : public EncodedImageCallback {
// Only store the base layer.
if (codec_specific_info->codecSpecific.VP8.simulcastIdx == 0) {
if (encoded_image._frameType == kVideoFrameKey) {
- delete [] encoded_key_frame_._buffer;
+ delete[] encoded_key_frame_._buffer;
encoded_key_frame_._buffer = new uint8_t[encoded_image._size];
encoded_key_frame_._size = encoded_image._size;
encoded_key_frame_._length = encoded_image._length;
encoded_key_frame_._frameType = kVideoFrameKey;
encoded_key_frame_._completeFrame = encoded_image._completeFrame;
- memcpy(encoded_key_frame_._buffer,
- encoded_image._buffer,
+ memcpy(encoded_key_frame_._buffer, encoded_image._buffer,
encoded_image._length);
} else {
- delete [] encoded_frame_._buffer;
+ delete[] encoded_frame_._buffer;
encoded_frame_._buffer = new uint8_t[encoded_image._size];
encoded_frame_._size = encoded_image._size;
encoded_frame_._length = encoded_image._length;
- memcpy(encoded_frame_._buffer,
- encoded_image._buffer,
+ memcpy(encoded_frame_._buffer, encoded_image._buffer,
encoded_image._length);
}
}
@@ -97,8 +93,10 @@ class Vp8TestEncodedImageCallback : public EncodedImageCallback {
codec_specific_info->codecSpecific.VP8.temporalIdx;
return 0;
}
- void GetLastEncodedFrameInfo(int* picture_id, int* temporal_layer,
- bool* layer_sync, int stream) {
+ void GetLastEncodedFrameInfo(int* picture_id,
+ int* temporal_layer,
+ bool* layer_sync,
+ int stream) {
*picture_id = picture_id_;
*temporal_layer = temporal_layer_[stream];
*layer_sync = layer_sync_[stream];
@@ -120,10 +118,8 @@ class Vp8TestEncodedImageCallback : public EncodedImageCallback {
class Vp8TestDecodedImageCallback : public DecodedImageCallback {
public:
- Vp8TestDecodedImageCallback()
- : decoded_frames_(0) {
- }
- virtual int32_t Decoded(VideoFrame& decoded_image) {
+ Vp8TestDecodedImageCallback() : decoded_frames_(0) {}
+ int32_t Decoded(VideoFrame& decoded_image) override {
for (int i = 0; i < decoded_image.width(); ++i) {
EXPECT_NEAR(kColorY, decoded_image.buffer(kYPlane)[i], 1);
}
@@ -136,9 +132,11 @@ class Vp8TestDecodedImageCallback : public DecodedImageCallback {
decoded_frames_++;
return 0;
}
- int DecodedFrames() {
- return decoded_frames_;
+ int32_t Decoded(VideoFrame& decoded_image, int64_t decode_time_ms) override {
+ RTC_NOTREACHED();
+ return -1;
}
+ int DecodedFrames() { return decoded_frames_; }
private:
int decoded_frames_;
@@ -161,8 +159,7 @@ class SkipEncodingUnusedStreamsTest {
std::vector<unsigned int> configured_bitrates;
for (std::vector<TemporalLayers*>::const_iterator it =
spy_factory->spying_layers_.begin();
- it != spy_factory->spying_layers_.end();
- ++it) {
+ it != spy_factory->spying_layers_.end(); ++it) {
configured_bitrates.push_back(
static_cast<SpyingTemporalLayers*>(*it)->configured_bitrate_);
}
@@ -185,8 +182,8 @@ class SkipEncodingUnusedStreamsTest {
int framerate,
vpx_codec_enc_cfg_t* cfg) override {
configured_bitrate_ = bitrate_kbit;
- return layers_->ConfigureBitrates(
- bitrate_kbit, max_bitrate_kbit, framerate, cfg);
+ return layers_->ConfigureBitrates(bitrate_kbit, max_bitrate_kbit,
+ framerate, cfg);
}
void PopulateCodecSpecific(bool base_layer_sync,
@@ -228,16 +225,15 @@ class SkipEncodingUnusedStreamsTest {
class TestVp8Simulcast : public ::testing::Test {
public:
TestVp8Simulcast(VP8Encoder* encoder, VP8Decoder* decoder)
- : encoder_(encoder),
- decoder_(decoder) {}
+ : encoder_(encoder), decoder_(decoder) {}
// Creates an VideoFrame from |plane_colors|.
static void CreateImage(VideoFrame* frame, int plane_colors[kNumOfPlanes]) {
for (int plane_num = 0; plane_num < kNumOfPlanes; ++plane_num) {
- int width = (plane_num != kYPlane ? (frame->width() + 1) / 2 :
- frame->width());
- int height = (plane_num != kYPlane ? (frame->height() + 1) / 2 :
- frame->height());
+ int width =
+ (plane_num != kYPlane ? (frame->width() + 1) / 2 : frame->width());
+ int height =
+ (plane_num != kYPlane ? (frame->height() + 1) / 2 : frame->height());
PlaneType plane_type = static_cast<PlaneType>(plane_num);
uint8_t* data = frame->buffer(plane_type);
// Setting allocated area to zero - setting only image size to
@@ -267,24 +263,15 @@ class TestVp8Simulcast : public ::testing::Test {
settings->height = kDefaultHeight;
settings->numberOfSimulcastStreams = kNumberOfSimulcastStreams;
ASSERT_EQ(3, kNumberOfSimulcastStreams);
- ConfigureStream(kDefaultWidth / 4, kDefaultHeight / 4,
- kMaxBitrates[0],
- kMinBitrates[0],
- kTargetBitrates[0],
- &settings->simulcastStream[0],
- temporal_layer_profile[0]);
- ConfigureStream(kDefaultWidth / 2, kDefaultHeight / 2,
- kMaxBitrates[1],
- kMinBitrates[1],
- kTargetBitrates[1],
- &settings->simulcastStream[1],
- temporal_layer_profile[1]);
- ConfigureStream(kDefaultWidth, kDefaultHeight,
- kMaxBitrates[2],
- kMinBitrates[2],
- kTargetBitrates[2],
- &settings->simulcastStream[2],
- temporal_layer_profile[2]);
+ ConfigureStream(kDefaultWidth / 4, kDefaultHeight / 4, kMaxBitrates[0],
+ kMinBitrates[0], kTargetBitrates[0],
+ &settings->simulcastStream[0], temporal_layer_profile[0]);
+ ConfigureStream(kDefaultWidth / 2, kDefaultHeight / 2, kMaxBitrates[1],
+ kMinBitrates[1], kTargetBitrates[1],
+ &settings->simulcastStream[1], temporal_layer_profile[1]);
+ ConfigureStream(kDefaultWidth, kDefaultHeight, kMaxBitrates[2],
+ kMinBitrates[2], kTargetBitrates[2],
+ &settings->simulcastStream[2], temporal_layer_profile[2]);
settings->codecSpecific.VP8.resilience = kResilientStream;
settings->codecSpecific.VP8.denoisingOn = true;
settings->codecSpecific.VP8.errorConcealmentOn = false;
@@ -312,9 +299,7 @@ class TestVp8Simulcast : public ::testing::Test {
}
protected:
- virtual void SetUp() {
- SetUpCodec(kDefaultTemporalLayerProfile);
- }
+ virtual void SetUp() { SetUpCodec(kDefaultTemporalLayerProfile); }
virtual void SetUpCodec(const int* temporal_layer_profile) {
encoder_->RegisterEncodeCompleteCallback(&encoder_callback_);
@@ -323,14 +308,14 @@ class TestVp8Simulcast : public ::testing::Test {
EXPECT_EQ(0, encoder_->InitEncode(&settings_, 1, 1200));
EXPECT_EQ(0, decoder_->InitDecode(&settings_, 1));
int half_width = (kDefaultWidth + 1) / 2;
- input_frame_.CreateEmptyFrame(kDefaultWidth, kDefaultHeight,
- kDefaultWidth, half_width, half_width);
+ input_frame_.CreateEmptyFrame(kDefaultWidth, kDefaultHeight, kDefaultWidth,
+ half_width, half_width);
memset(input_frame_.buffer(kYPlane), 0,
- input_frame_.allocated_size(kYPlane));
+ input_frame_.allocated_size(kYPlane));
memset(input_frame_.buffer(kUPlane), 0,
- input_frame_.allocated_size(kUPlane));
+ input_frame_.allocated_size(kUPlane));
memset(input_frame_.buffer(kVPlane), 0,
- input_frame_.allocated_size(kVPlane));
+ input_frame_.allocated_size(kVPlane));
}
virtual void TearDown() {
@@ -342,28 +327,34 @@ class TestVp8Simulcast : public ::testing::Test {
ASSERT_GE(expected_video_streams, 0);
ASSERT_LE(expected_video_streams, kNumberOfSimulcastStreams);
if (expected_video_streams >= 1) {
- EXPECT_CALL(encoder_callback_, Encoded(
- AllOf(Field(&EncodedImage::_frameType, frame_type),
- Field(&EncodedImage::_encodedWidth, kDefaultWidth / 4),
- Field(&EncodedImage::_encodedHeight, kDefaultHeight / 4)), _, _)
- )
+ EXPECT_CALL(
+ encoder_callback_,
+ Encoded(
+ AllOf(Field(&EncodedImage::_frameType, frame_type),
+ Field(&EncodedImage::_encodedWidth, kDefaultWidth / 4),
+ Field(&EncodedImage::_encodedHeight, kDefaultHeight / 4)),
+ _, _))
.Times(1)
.WillRepeatedly(Return(0));
}
if (expected_video_streams >= 2) {
- EXPECT_CALL(encoder_callback_, Encoded(
- AllOf(Field(&EncodedImage::_frameType, frame_type),
- Field(&EncodedImage::_encodedWidth, kDefaultWidth / 2),
- Field(&EncodedImage::_encodedHeight, kDefaultHeight / 2)), _, _)
- )
+ EXPECT_CALL(
+ encoder_callback_,
+ Encoded(
+ AllOf(Field(&EncodedImage::_frameType, frame_type),
+ Field(&EncodedImage::_encodedWidth, kDefaultWidth / 2),
+ Field(&EncodedImage::_encodedHeight, kDefaultHeight / 2)),
+ _, _))
.Times(1)
.WillRepeatedly(Return(0));
}
if (expected_video_streams >= 3) {
- EXPECT_CALL(encoder_callback_, Encoded(
- AllOf(Field(&EncodedImage::_frameType, frame_type),
- Field(&EncodedImage::_encodedWidth, kDefaultWidth),
- Field(&EncodedImage::_encodedHeight, kDefaultHeight)), _, _))
+ EXPECT_CALL(
+ encoder_callback_,
+ Encoded(AllOf(Field(&EncodedImage::_frameType, frame_type),
+ Field(&EncodedImage::_encodedWidth, kDefaultWidth),
+ Field(&EncodedImage::_encodedHeight, kDefaultHeight)),
+ _, _))
.Times(1)
.WillRepeatedly(Return(0));
}
@@ -477,8 +468,8 @@ class TestVp8Simulcast : public ::testing::Test {
void TestPaddingOneStreamTwoMaxedOut() {
// We are just below limit of sending third stream, so we should get
// first stream's rate maxed out at |targetBitrate|, second at |maxBitrate|.
- encoder_->SetRates(kTargetBitrates[0] + kTargetBitrates[1] +
- kMinBitrates[2] - 1, 30);
+ encoder_->SetRates(
+ kTargetBitrates[0] + kTargetBitrates[1] + kMinBitrates[2] - 1, 30);
std::vector<FrameType> frame_types(kNumberOfSimulcastStreams,
kVideoFrameDelta);
ExpectStreams(kVideoFrameKey, 2);
@@ -491,8 +482,8 @@ class TestVp8Simulcast : public ::testing::Test {
void TestSendAllStreams() {
// We have just enough to send all streams.
- encoder_->SetRates(kTargetBitrates[0] + kTargetBitrates[1] +
- kMinBitrates[2], 30);
+ encoder_->SetRates(
+ kTargetBitrates[0] + kTargetBitrates[1] + kMinBitrates[2], 30);
std::vector<FrameType> frame_types(kNumberOfSimulcastStreams,
kVideoFrameDelta);
ExpectStreams(kVideoFrameKey, 3);
@@ -505,8 +496,7 @@ class TestVp8Simulcast : public ::testing::Test {
void TestDisablingStreams() {
// We should get three media streams.
- encoder_->SetRates(kMaxBitrates[0] + kMaxBitrates[1] +
- kMaxBitrates[2], 30);
+ encoder_->SetRates(kMaxBitrates[0] + kMaxBitrates[1] + kMaxBitrates[2], 30);
std::vector<FrameType> frame_types(kNumberOfSimulcastStreams,
kVideoFrameDelta);
ExpectStreams(kVideoFrameKey, 3);
@@ -517,8 +507,8 @@ class TestVp8Simulcast : public ::testing::Test {
EXPECT_EQ(0, encoder_->Encode(input_frame_, NULL, &frame_types));
// We should only get two streams and padding for one.
- encoder_->SetRates(kTargetBitrates[0] + kTargetBitrates[1] +
- kMinBitrates[2] / 2, 30);
+ encoder_->SetRates(
+ kTargetBitrates[0] + kTargetBitrates[1] + kMinBitrates[2] / 2, 30);
ExpectStreams(kVideoFrameDelta, 2);
input_frame_.set_timestamp(input_frame_.timestamp() + 3000);
EXPECT_EQ(0, encoder_->Encode(input_frame_, NULL, &frame_types));
@@ -537,16 +527,16 @@ class TestVp8Simulcast : public ::testing::Test {
EXPECT_EQ(0, encoder_->Encode(input_frame_, NULL, &frame_types));
// We should only get two streams and padding for one.
- encoder_->SetRates(kTargetBitrates[0] + kTargetBitrates[1] +
- kMinBitrates[2] / 2, 30);
+ encoder_->SetRates(
+ kTargetBitrates[0] + kTargetBitrates[1] + kMinBitrates[2] / 2, 30);
// We get a key frame because a new stream is being enabled.
ExpectStreams(kVideoFrameKey, 2);
input_frame_.set_timestamp(input_frame_.timestamp() + 3000);
EXPECT_EQ(0, encoder_->Encode(input_frame_, NULL, &frame_types));
// We should get all three streams.
- encoder_->SetRates(kTargetBitrates[0] + kTargetBitrates[1] +
- kTargetBitrates[2], 30);
+ encoder_->SetRates(
+ kTargetBitrates[0] + kTargetBitrates[1] + kTargetBitrates[2], 30);
// We get a key frame because a new stream is being enabled.
ExpectStreams(kVideoFrameKey, 3);
input_frame_.set_timestamp(input_frame_.timestamp() + 3000);
@@ -571,20 +561,20 @@ class TestVp8Simulcast : public ::testing::Test {
input_frame_.CreateEmptyFrame(settings_.width, settings_.height,
settings_.width, half_width, half_width);
memset(input_frame_.buffer(kYPlane), 0,
- input_frame_.allocated_size(kYPlane));
+ input_frame_.allocated_size(kYPlane));
memset(input_frame_.buffer(kUPlane), 0,
- input_frame_.allocated_size(kUPlane));
+ input_frame_.allocated_size(kUPlane));
memset(input_frame_.buffer(kVPlane), 0,
- input_frame_.allocated_size(kVPlane));
+ input_frame_.allocated_size(kVPlane));
// The for loop above did not set the bitrate of the highest layer.
- settings_.simulcastStream[settings_.numberOfSimulcastStreams - 1].
- maxBitrate = 0;
+ settings_.simulcastStream[settings_.numberOfSimulcastStreams - 1]
+ .maxBitrate = 0;
// The highest layer has to correspond to the non-simulcast resolution.
- settings_.simulcastStream[settings_.numberOfSimulcastStreams - 1].
- width = settings_.width;
- settings_.simulcastStream[settings_.numberOfSimulcastStreams - 1].
- height = settings_.height;
+ settings_.simulcastStream[settings_.numberOfSimulcastStreams - 1].width =
+ settings_.width;
+ settings_.simulcastStream[settings_.numberOfSimulcastStreams - 1].height =
+ settings_.height;
EXPECT_EQ(0, encoder_->InitEncode(&settings_, 1, 1200));
// Encode one frame and verify.
@@ -612,21 +602,17 @@ class TestVp8Simulcast : public ::testing::Test {
input_frame_.CreateEmptyFrame(settings_.width, settings_.height,
settings_.width, half_width, half_width);
memset(input_frame_.buffer(kYPlane), 0,
- input_frame_.allocated_size(kYPlane));
+ input_frame_.allocated_size(kYPlane));
memset(input_frame_.buffer(kUPlane), 0,
- input_frame_.allocated_size(kUPlane));
+ input_frame_.allocated_size(kUPlane));
memset(input_frame_.buffer(kVPlane), 0,
- input_frame_.allocated_size(kVPlane));
+ input_frame_.allocated_size(kVPlane));
EXPECT_EQ(0, encoder_->Encode(input_frame_, NULL, &frame_types));
}
- void TestSwitchingToOneStream() {
- SwitchingToOneStream(1024, 768);
- }
+ void TestSwitchingToOneStream() { SwitchingToOneStream(1024, 768); }
- void TestSwitchingToOneOddStream() {
- SwitchingToOneStream(1023, 769);
- }
+ void TestSwitchingToOneOddStream() { SwitchingToOneStream(1023, 769); }
void TestRPSIEncoder() {
Vp8TestEncodedImageCallback encoder_callback;
@@ -777,67 +763,55 @@ class TestVp8Simulcast : public ::testing::Test {
encoder_->RegisterEncodeCompleteCallback(&encoder_callback);
encoder_->SetRates(kMaxBitrates[2], 30); // To get all three streams.
- int expected_temporal_idx[3] = { -1, -1, -1};
+ int expected_temporal_idx[3] = {-1, -1, -1};
bool expected_layer_sync[3] = {false, false, false};
// First frame: #0.
EXPECT_EQ(0, encoder_->Encode(input_frame_, NULL, NULL));
SetExpectedValues3<int>(0, 0, 0, expected_temporal_idx);
SetExpectedValues3<bool>(true, true, true, expected_layer_sync);
- VerifyTemporalIdxAndSyncForAllSpatialLayers(&encoder_callback,
- expected_temporal_idx,
- expected_layer_sync,
- 3);
+ VerifyTemporalIdxAndSyncForAllSpatialLayers(
+ &encoder_callback, expected_temporal_idx, expected_layer_sync, 3);
// Next frame: #1.
input_frame_.set_timestamp(input_frame_.timestamp() + 3000);
EXPECT_EQ(0, encoder_->Encode(input_frame_, NULL, NULL));
SetExpectedValues3<int>(2, 2, 2, expected_temporal_idx);
SetExpectedValues3<bool>(true, true, true, expected_layer_sync);
- VerifyTemporalIdxAndSyncForAllSpatialLayers(&encoder_callback,
- expected_temporal_idx,
- expected_layer_sync,
- 3);
+ VerifyTemporalIdxAndSyncForAllSpatialLayers(
+ &encoder_callback, expected_temporal_idx, expected_layer_sync, 3);
// Next frame: #2.
input_frame_.set_timestamp(input_frame_.timestamp() + 3000);
EXPECT_EQ(0, encoder_->Encode(input_frame_, NULL, NULL));
SetExpectedValues3<int>(1, 1, 1, expected_temporal_idx);
SetExpectedValues3<bool>(true, true, true, expected_layer_sync);
- VerifyTemporalIdxAndSyncForAllSpatialLayers(&encoder_callback,
- expected_temporal_idx,
- expected_layer_sync,
- 3);
+ VerifyTemporalIdxAndSyncForAllSpatialLayers(
+ &encoder_callback, expected_temporal_idx, expected_layer_sync, 3);
// Next frame: #3.
input_frame_.set_timestamp(input_frame_.timestamp() + 3000);
EXPECT_EQ(0, encoder_->Encode(input_frame_, NULL, NULL));
SetExpectedValues3<int>(2, 2, 2, expected_temporal_idx);
SetExpectedValues3<bool>(false, false, false, expected_layer_sync);
- VerifyTemporalIdxAndSyncForAllSpatialLayers(&encoder_callback,
- expected_temporal_idx,
- expected_layer_sync,
- 3);
+ VerifyTemporalIdxAndSyncForAllSpatialLayers(
+ &encoder_callback, expected_temporal_idx, expected_layer_sync, 3);
// Next frame: #4.
input_frame_.set_timestamp(input_frame_.timestamp() + 3000);
EXPECT_EQ(0, encoder_->Encode(input_frame_, NULL, NULL));
SetExpectedValues3<int>(0, 0, 0, expected_temporal_idx);
SetExpectedValues3<bool>(false, false, false, expected_layer_sync);
- VerifyTemporalIdxAndSyncForAllSpatialLayers(&encoder_callback,
- expected_temporal_idx,
- expected_layer_sync,
- 3);
+ VerifyTemporalIdxAndSyncForAllSpatialLayers(
+ &encoder_callback, expected_temporal_idx, expected_layer_sync, 3);
// Next frame: #5.
input_frame_.set_timestamp(input_frame_.timestamp() + 3000);
EXPECT_EQ(0, encoder_->Encode(input_frame_, NULL, NULL));
SetExpectedValues3<int>(2, 2, 2, expected_temporal_idx);
SetExpectedValues3<bool>(false, false, false, expected_layer_sync);
- VerifyTemporalIdxAndSyncForAllSpatialLayers(&encoder_callback,
- expected_temporal_idx,
- expected_layer_sync,
- 3);
+ VerifyTemporalIdxAndSyncForAllSpatialLayers(
+ &encoder_callback, expected_temporal_idx, expected_layer_sync, 3);
}
// Test the layer pattern and sync flag for various spatial-temporal patterns.
@@ -858,67 +832,55 @@ class TestVp8Simulcast : public ::testing::Test {
encoder_->RegisterEncodeCompleteCallback(&encoder_callback);
encoder_->SetRates(kMaxBitrates[2], 30); // To get all three streams.
- int expected_temporal_idx[3] = { -1, -1, -1};
+ int expected_temporal_idx[3] = {-1, -1, -1};
bool expected_layer_sync[3] = {false, false, false};
// First frame: #0.
EXPECT_EQ(0, encoder_->Encode(input_frame_, NULL, NULL));
SetExpectedValues3<int>(0, 0, 255, expected_temporal_idx);
SetExpectedValues3<bool>(true, true, false, expected_layer_sync);
- VerifyTemporalIdxAndSyncForAllSpatialLayers(&encoder_callback,
- expected_temporal_idx,
- expected_layer_sync,
- 3);
+ VerifyTemporalIdxAndSyncForAllSpatialLayers(
+ &encoder_callback, expected_temporal_idx, expected_layer_sync, 3);
// Next frame: #1.
input_frame_.set_timestamp(input_frame_.timestamp() + 3000);
EXPECT_EQ(0, encoder_->Encode(input_frame_, NULL, NULL));
SetExpectedValues3<int>(2, 1, 255, expected_temporal_idx);
SetExpectedValues3<bool>(true, true, false, expected_layer_sync);
- VerifyTemporalIdxAndSyncForAllSpatialLayers(&encoder_callback,
- expected_temporal_idx,
- expected_layer_sync,
- 3);
+ VerifyTemporalIdxAndSyncForAllSpatialLayers(
+ &encoder_callback, expected_temporal_idx, expected_layer_sync, 3);
// Next frame: #2.
input_frame_.set_timestamp(input_frame_.timestamp() + 3000);
EXPECT_EQ(0, encoder_->Encode(input_frame_, NULL, NULL));
SetExpectedValues3<int>(1, 0, 255, expected_temporal_idx);
SetExpectedValues3<bool>(true, false, false, expected_layer_sync);
- VerifyTemporalIdxAndSyncForAllSpatialLayers(&encoder_callback,
- expected_temporal_idx,
- expected_layer_sync,
- 3);
+ VerifyTemporalIdxAndSyncForAllSpatialLayers(
+ &encoder_callback, expected_temporal_idx, expected_layer_sync, 3);
// Next frame: #3.
input_frame_.set_timestamp(input_frame_.timestamp() + 3000);
EXPECT_EQ(0, encoder_->Encode(input_frame_, NULL, NULL));
SetExpectedValues3<int>(2, 1, 255, expected_temporal_idx);
SetExpectedValues3<bool>(false, false, false, expected_layer_sync);
- VerifyTemporalIdxAndSyncForAllSpatialLayers(&encoder_callback,
- expected_temporal_idx,
- expected_layer_sync,
- 3);
+ VerifyTemporalIdxAndSyncForAllSpatialLayers(
+ &encoder_callback, expected_temporal_idx, expected_layer_sync, 3);
// Next frame: #4.
input_frame_.set_timestamp(input_frame_.timestamp() + 3000);
EXPECT_EQ(0, encoder_->Encode(input_frame_, NULL, NULL));
SetExpectedValues3<int>(0, 0, 255, expected_temporal_idx);
SetExpectedValues3<bool>(false, false, false, expected_layer_sync);
- VerifyTemporalIdxAndSyncForAllSpatialLayers(&encoder_callback,
- expected_temporal_idx,
- expected_layer_sync,
- 3);
+ VerifyTemporalIdxAndSyncForAllSpatialLayers(
+ &encoder_callback, expected_temporal_idx, expected_layer_sync, 3);
// Next frame: #5.
input_frame_.set_timestamp(input_frame_.timestamp() + 3000);
EXPECT_EQ(0, encoder_->Encode(input_frame_, NULL, NULL));
SetExpectedValues3<int>(2, 1, 255, expected_temporal_idx);
SetExpectedValues3<bool>(false, false, false, expected_layer_sync);
- VerifyTemporalIdxAndSyncForAllSpatialLayers(&encoder_callback,
- expected_temporal_idx,
- expected_layer_sync,
- 3);
+ VerifyTemporalIdxAndSyncForAllSpatialLayers(
+ &encoder_callback, expected_temporal_idx, expected_layer_sync, 3);
}
void TestStrideEncodeDecode() {
@@ -932,8 +894,8 @@ class TestVp8Simulcast : public ::testing::Test {
// 1. stride > width 2. stride_y != stride_uv/2
int stride_y = kDefaultWidth + 20;
int stride_uv = ((kDefaultWidth + 1) / 2) + 5;
- input_frame_.CreateEmptyFrame(kDefaultWidth, kDefaultHeight,
- stride_y, stride_uv, stride_uv);
+ input_frame_.CreateEmptyFrame(kDefaultWidth, kDefaultHeight, stride_y,
+ stride_uv, stride_uv);
// Set color.
int plane_offset[kNumOfPlanes];
plane_offset[kYPlane] = kColorY;
@@ -963,10 +925,9 @@ class TestVp8Simulcast : public ::testing::Test {
void TestSkipEncodingUnusedStreams() {
SkipEncodingUnusedStreamsTest test;
std::vector<unsigned int> configured_bitrate =
- test.RunTest(encoder_.get(),
- &settings_,
- 1); // Target bit rate 1, to force all streams but the
- // base one to be exceeding bandwidth constraints.
+ test.RunTest(encoder_.get(), &settings_,
+ 1); // Target bit rate 1, to force all streams but the
+ // base one to be exceeding bandwidth constraints.
EXPECT_EQ(static_cast<size_t>(kNumberOfSimulcastStreams),
configured_bitrate.size());
@@ -975,8 +936,7 @@ class TestVp8Simulcast : public ::testing::Test {
int stream = 0;
for (std::vector<unsigned int>::const_iterator it =
configured_bitrate.begin();
- it != configured_bitrate.end();
- ++it) {
+ it != configured_bitrate.end(); ++it) {
if (stream == 0) {
EXPECT_EQ(min_bitrate, *it);
} else {
diff --git a/webrtc/modules/video_coding/codecs/vp8/temporal_layers.h b/webrtc/modules/video_coding/codecs/vp8/temporal_layers.h
index 7607210d5c..47112c64aa 100644
--- a/webrtc/modules/video_coding/codecs/vp8/temporal_layers.h
+++ b/webrtc/modules/video_coding/codecs/vp8/temporal_layers.h
@@ -14,7 +14,8 @@
#include "vpx/vpx_encoder.h"
-#include "webrtc/common_video/interface/video_image.h"
+#include "webrtc/common.h"
+#include "webrtc/common_video/include/video_image.h"
#include "webrtc/typedefs.h"
namespace webrtc {
@@ -30,6 +31,8 @@ class TemporalLayers {
virtual ~Factory() {}
virtual TemporalLayers* Create(int temporal_layers,
uint8_t initial_tl0_pic_idx) const;
+ static const ConfigOptionID identifier =
+ ConfigOptionID::kTemporalLayersFactory;
};
virtual ~TemporalLayers() {}
diff --git a/webrtc/modules/video_coding/codecs/vp8/test/vp8_impl_unittest.cc b/webrtc/modules/video_coding/codecs/vp8/test/vp8_impl_unittest.cc
index 5ec674f16a..c3d77da063 100644
--- a/webrtc/modules/video_coding/codecs/vp8/test/vp8_impl_unittest.cc
+++ b/webrtc/modules/video_coding/codecs/vp8/test/vp8_impl_unittest.cc
@@ -11,12 +11,12 @@
#include <stdio.h>
#include "testing/gtest/include/gtest/gtest.h"
+#include "webrtc/base/checks.h"
#include "webrtc/base/scoped_ptr.h"
#include "webrtc/common_video/libyuv/include/webrtc_libyuv.h"
#include "webrtc/modules/video_coding/codecs/vp8/include/vp8.h"
#include "webrtc/system_wrappers/include/tick_util.h"
#include "webrtc/test/testsupport/fileutils.h"
-#include "webrtc/test/testsupport/gtest_disable.h"
namespace webrtc {
@@ -78,7 +78,11 @@ class Vp8UnitTestDecodeCompleteCallback : public webrtc::DecodedImageCallback {
public:
explicit Vp8UnitTestDecodeCompleteCallback(VideoFrame* frame)
: decoded_frame_(frame), decode_complete(false) {}
- int Decoded(webrtc::VideoFrame& frame);
+ int32_t Decoded(VideoFrame& frame) override;
+ int32_t Decoded(VideoFrame& frame, int64_t decode_time_ms) override {
+ RTC_NOTREACHED();
+ return -1;
+ }
bool DecodeComplete();
private:
@@ -216,7 +220,12 @@ TEST_F(TestVp8Impl, EncoderParameterTest) {
EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK, decoder_->InitDecode(&codec_inst_, 1));
}
-TEST_F(TestVp8Impl, DISABLED_ON_ANDROID(AlignedStrideEncodeDecode)) {
+#if defined(WEBRTC_ANDROID)
+#define MAYBE_AlignedStrideEncodeDecode DISABLED_AlignedStrideEncodeDecode
+#else
+#define MAYBE_AlignedStrideEncodeDecode AlignedStrideEncodeDecode
+#endif
+TEST_F(TestVp8Impl, MAYBE_AlignedStrideEncodeDecode) {
SetUpEncodeDecode();
encoder_->Encode(input_frame_, NULL, NULL);
EXPECT_GT(WaitForEncodedFrame(), 0u);
@@ -232,7 +241,12 @@ TEST_F(TestVp8Impl, DISABLED_ON_ANDROID(AlignedStrideEncodeDecode)) {
EXPECT_EQ(kTestNtpTimeMs, decoded_frame_.ntp_time_ms());
}
-TEST_F(TestVp8Impl, DISABLED_ON_ANDROID(DecodeWithACompleteKeyFrame)) {
+#if defined(WEBRTC_ANDROID)
+#define MAYBE_DecodeWithACompleteKeyFrame DISABLED_DecodeWithACompleteKeyFrame
+#else
+#define MAYBE_DecodeWithACompleteKeyFrame DecodeWithACompleteKeyFrame
+#endif
+TEST_F(TestVp8Impl, MAYBE_DecodeWithACompleteKeyFrame) {
SetUpEncodeDecode();
encoder_->Encode(input_frame_, NULL, NULL);
EXPECT_GT(WaitForEncodedFrame(), 0u);
diff --git a/webrtc/modules/video_coding/codecs/vp8/vp8_factory.h b/webrtc/modules/video_coding/codecs/vp8/vp8_factory.h
index 84745ea5a1..52f8aa30b8 100644
--- a/webrtc/modules/video_coding/codecs/vp8/vp8_factory.h
+++ b/webrtc/modules/video_coding/codecs/vp8/vp8_factory.h
@@ -32,4 +32,3 @@ class VP8EncoderFactoryConfig {
} // namespace webrtc
#endif // WEBRTC_MODULES_VIDEO_CODING_CODECS_VP8_VP8_FACTORY_H_
-
diff --git a/webrtc/modules/video_coding/codecs/vp8/vp8_impl.cc b/webrtc/modules/video_coding/codecs/vp8/vp8_impl.cc
index 029ccd1f27..5a04f6a43d 100644
--- a/webrtc/modules/video_coding/codecs/vp8/vp8_impl.cc
+++ b/webrtc/modules/video_coding/codecs/vp8/vp8_impl.cc
@@ -16,7 +16,7 @@
#include <algorithm>
// NOTE(ajm): Path provided by gyp.
-#include "libyuv/scale.h" // NOLINT
+#include "libyuv/scale.h" // NOLINT
#include "libyuv/convert.h" // NOLINT
#include "webrtc/base/checks.h"
@@ -24,8 +24,8 @@
#include "webrtc/common.h"
#include "webrtc/common_types.h"
#include "webrtc/common_video/libyuv/include/webrtc_libyuv.h"
-#include "webrtc/modules/interface/module_common_types.h"
-#include "webrtc/modules/video_coding/codecs/interface/video_codec_interface.h"
+#include "webrtc/modules/include/module_common_types.h"
+#include "webrtc/modules/video_coding/include/video_codec_interface.h"
#include "webrtc/modules/video_coding/codecs/vp8/include/vp8_common_types.h"
#include "webrtc/modules/video_coding/codecs/vp8/screenshare_layers.h"
#include "webrtc/modules/video_coding/codecs/vp8/temporal_layers.h"
@@ -68,10 +68,9 @@ std::vector<int> GetStreamBitratesKbps(const VideoCodec& codec,
std::vector<int> bitrates_kbps(codec.numberOfSimulcastStreams);
// Allocate min -> target bitrates as long as we have bitrate to spend.
size_t last_active_stream = 0;
- for (size_t i = 0;
- i < static_cast<size_t>(codec.numberOfSimulcastStreams) &&
- bitrate_to_allocate_kbps >=
- static_cast<int>(codec.simulcastStream[i].minBitrate);
+ for (size_t i = 0; i < static_cast<size_t>(codec.numberOfSimulcastStreams) &&
+ bitrate_to_allocate_kbps >=
+ static_cast<int>(codec.simulcastStream[i].minBitrate);
++i) {
last_active_stream = i;
int allocated_bitrate_kbps =
@@ -132,7 +131,7 @@ bool ValidSimulcastResolutions(const VideoCodec& codec, int num_streams) {
return true;
}
-int NumStreamsDisabled(std::vector<bool>& streams) {
+int NumStreamsDisabled(const std::vector<bool>& streams) {
int num_disabled = 0;
for (bool stream : streams) {
if (!stream)
@@ -183,7 +182,7 @@ int VP8EncoderImpl::Release() {
while (!encoded_images_.empty()) {
EncodedImage& image = encoded_images_.back();
- delete [] image._buffer;
+ delete[] image._buffer;
encoded_images_.pop_back();
}
while (!encoders_.empty()) {
@@ -289,10 +288,8 @@ int VP8EncoderImpl::SetRates(uint32_t new_bitrate_kbit,
target_bitrate = tl0_bitrate;
}
configurations_[i].rc_target_bitrate = target_bitrate;
- temporal_layers_[stream_idx]->ConfigureBitrates(target_bitrate,
- max_bitrate,
- framerate,
- &configurations_[i]);
+ temporal_layers_[stream_idx]->ConfigureBitrates(
+ target_bitrate, max_bitrate, framerate, &configurations_[i]);
if (vpx_codec_enc_config_set(&encoders_[i], &configurations_[i])) {
return WEBRTC_VIDEO_CODEC_ERROR;
}
@@ -301,6 +298,10 @@ int VP8EncoderImpl::SetRates(uint32_t new_bitrate_kbit,
return WEBRTC_VIDEO_CODEC_OK;
}
+const char* VP8EncoderImpl::ImplementationName() const {
+ return "libvpx";
+}
+
void VP8EncoderImpl::SetStreamState(bool send_stream,
int stream_idx) {
if (send_stream && !send_stream_[stream_idx]) {
@@ -311,8 +312,8 @@ void VP8EncoderImpl::SetStreamState(bool send_stream,
}
void VP8EncoderImpl::SetupTemporalLayers(int num_streams,
- int num_temporal_layers,
- const VideoCodec& codec) {
+ int num_temporal_layers,
+ const VideoCodec& codec) {
const Config default_options;
const TemporalLayers::Factory& tl_factory =
(codec.extra_options ? codec.extra_options : &default_options)
@@ -330,15 +331,16 @@ void VP8EncoderImpl::SetupTemporalLayers(int num_streams,
for (int i = 0; i < num_streams; ++i) {
// TODO(andresp): crash if layers is invalid.
int layers = codec.simulcastStream[i].numberOfTemporalLayers;
- if (layers < 1) layers = 1;
+ if (layers < 1)
+ layers = 1;
temporal_layers_.push_back(tl_factory.Create(layers, rand()));
}
}
}
int VP8EncoderImpl::InitEncode(const VideoCodec* inst,
- int number_of_cores,
- size_t /*maxPayloadSize */) {
+ int number_of_cores,
+ size_t /*maxPayloadSize */) {
if (inst == NULL) {
return WEBRTC_VIDEO_CODEC_ERR_PARAMETER;
}
@@ -375,12 +377,13 @@ int VP8EncoderImpl::InitEncode(const VideoCodec* inst,
return WEBRTC_VIDEO_CODEC_ERR_PARAMETER;
}
- int num_temporal_layers = doing_simulcast ?
- inst->simulcastStream[0].numberOfTemporalLayers :
- inst->codecSpecific.VP8.numberOfTemporalLayers;
+ int num_temporal_layers =
+ doing_simulcast ? inst->simulcastStream[0].numberOfTemporalLayers
+ : inst->codecSpecific.VP8.numberOfTemporalLayers;
// TODO(andresp): crash if num temporal layers is bananas.
- if (num_temporal_layers < 1) num_temporal_layers = 1;
+ if (num_temporal_layers < 1)
+ num_temporal_layers = 1;
SetupTemporalLayers(number_of_streams, num_temporal_layers, *inst);
feedback_mode_ = inst->codecSpecific.VP8.feedbackModeOn;
@@ -410,7 +413,7 @@ int VP8EncoderImpl::InitEncode(const VideoCodec* inst,
int idx = number_of_streams - 1;
for (int i = 0; i < (number_of_streams - 1); ++i, --idx) {
int gcd = GCD(inst->simulcastStream[idx].width,
- inst->simulcastStream[idx-1].width);
+ inst->simulcastStream[idx - 1].width);
downsampling_factors_[i].num = inst->simulcastStream[idx].width / gcd;
downsampling_factors_[i].den = inst->simulcastStream[idx - 1].width / gcd;
send_stream_[i] = false;
@@ -422,20 +425,20 @@ int VP8EncoderImpl::InitEncode(const VideoCodec* inst,
}
for (int i = 0; i < number_of_streams; ++i) {
// Random start, 16 bits is enough.
- picture_id_[i] = static_cast<uint16_t>(rand()) & 0x7FFF;
+ picture_id_[i] = static_cast<uint16_t>(rand()) & 0x7FFF; // NOLINT
last_key_frame_picture_id_[i] = -1;
// allocate memory for encoded image
if (encoded_images_[i]._buffer != NULL) {
- delete [] encoded_images_[i]._buffer;
+ delete[] encoded_images_[i]._buffer;
}
- encoded_images_[i]._size = CalcBufferSize(kI420,
- codec_.width, codec_.height);
+ encoded_images_[i]._size =
+ CalcBufferSize(kI420, codec_.width, codec_.height);
encoded_images_[i]._buffer = new uint8_t[encoded_images_[i]._size];
encoded_images_[i]._completeFrame = true;
}
// populate encoder configuration with default values
- if (vpx_codec_enc_config_default(vpx_codec_vp8_cx(),
- &configurations_[0], 0)) {
+ if (vpx_codec_enc_config_default(vpx_codec_vp8_cx(), &configurations_[0],
+ 0)) {
return WEBRTC_VIDEO_CODEC_ERROR;
}
// setting the time base of the codec
@@ -459,8 +462,8 @@ int VP8EncoderImpl::InitEncode(const VideoCodec* inst,
break;
case kResilientFrames:
#ifdef INDEPENDENT_PARTITIONS
- configurations_[0]-g_error_resilient = VPX_ERROR_RESILIENT_DEFAULT |
- VPX_ERROR_RESILIENT_PARTITIONS;
+ configurations_[0] - g_error_resilient =
+ VPX_ERROR_RESILIENT_DEFAULT | VPX_ERROR_RESILIENT_PARTITIONS;
break;
#else
return WEBRTC_VIDEO_CODEC_ERR_PARAMETER; // Not supported
@@ -536,20 +539,18 @@ int VP8EncoderImpl::InitEncode(const VideoCodec* inst,
// Determine number of threads based on the image size and #cores.
// TODO(fbarchard): Consider number of Simulcast layers.
- configurations_[0].g_threads = NumberOfThreads(configurations_[0].g_w,
- configurations_[0].g_h,
- number_of_cores);
+ configurations_[0].g_threads = NumberOfThreads(
+ configurations_[0].g_w, configurations_[0].g_h, number_of_cores);
// Creating a wrapper to the image - setting image data to NULL.
// Actual pointer will be set in encode. Setting align to 1, as it
// is meaningless (no memory allocation is done here).
- vpx_img_wrap(&raw_images_[0], VPX_IMG_FMT_I420, inst->width, inst->height,
- 1, NULL);
+ vpx_img_wrap(&raw_images_[0], VPX_IMG_FMT_I420, inst->width, inst->height, 1,
+ NULL);
if (encoders_.size() == 1) {
configurations_[0].rc_target_bitrate = inst->startBitrate;
- temporal_layers_[0]->ConfigureBitrates(inst->startBitrate,
- inst->maxBitrate,
+ temporal_layers_[0]->ConfigureBitrates(inst->startBitrate, inst->maxBitrate,
inst->maxFramerate,
&configurations_[0]);
} else {
@@ -641,20 +642,15 @@ int VP8EncoderImpl::InitAndSetControlSettings() {
flags |= VPX_CODEC_USE_OUTPUT_PARTITION;
if (encoders_.size() > 1) {
- int error = vpx_codec_enc_init_multi(&encoders_[0],
- vpx_codec_vp8_cx(),
- &configurations_[0],
- encoders_.size(),
- flags,
- &downsampling_factors_[0]);
+ int error = vpx_codec_enc_init_multi(&encoders_[0], vpx_codec_vp8_cx(),
+ &configurations_[0], encoders_.size(),
+ flags, &downsampling_factors_[0]);
if (error) {
return WEBRTC_VIDEO_CODEC_UNINITIALIZED;
}
} else {
- if (vpx_codec_enc_init(&encoders_[0],
- vpx_codec_vp8_cx(),
- &configurations_[0],
- flags)) {
+ if (vpx_codec_enc_init(&encoders_[0], vpx_codec_vp8_cx(),
+ &configurations_[0], flags)) {
return WEBRTC_VIDEO_CODEC_UNINITIALIZED;
}
}
@@ -671,13 +667,13 @@ int VP8EncoderImpl::InitAndSetControlSettings() {
#else
denoiser_state = kDenoiserOnAdaptive;
#endif
- vpx_codec_control(&encoders_[0], VP8E_SET_NOISE_SENSITIVITY,
- codec_.codecSpecific.VP8.denoisingOn ?
- denoiser_state : kDenoiserOff);
+ vpx_codec_control(
+ &encoders_[0], VP8E_SET_NOISE_SENSITIVITY,
+ codec_.codecSpecific.VP8.denoisingOn ? denoiser_state : kDenoiserOff);
if (encoders_.size() > 2) {
- vpx_codec_control(&encoders_[1], VP8E_SET_NOISE_SENSITIVITY,
- codec_.codecSpecific.VP8.denoisingOn ?
- denoiser_state : kDenoiserOff);
+ vpx_codec_control(
+ &encoders_[1], VP8E_SET_NOISE_SENSITIVITY,
+ codec_.codecSpecific.VP8.denoisingOn ? denoiser_state : kDenoiserOff);
}
for (size_t i = 0; i < encoders_.size(); ++i) {
// Allow more screen content to be detected as static.
@@ -710,14 +706,12 @@ uint32_t VP8EncoderImpl::MaxIntraTarget(uint32_t optimalBuffersize) {
// Don't go below 3 times the per frame bandwidth.
const uint32_t minIntraTh = 300;
- return (targetPct < minIntraTh) ? minIntraTh: targetPct;
+ return (targetPct < minIntraTh) ? minIntraTh : targetPct;
}
int VP8EncoderImpl::Encode(const VideoFrame& frame,
const CodecSpecificInfo* codec_specific_info,
const std::vector<FrameType>* frame_types) {
- TRACE_EVENT1("webrtc", "VP8::Encode", "timestamp", frame.timestamp());
-
if (!inited_)
return WEBRTC_VIDEO_CODEC_UNINITIALIZED;
if (frame.IsZeroSize())
@@ -731,7 +725,7 @@ int VP8EncoderImpl::Encode(const VideoFrame& frame,
quality_scaler_enabled_ ? quality_scaler_.GetScaledFrame(frame) : frame;
if (quality_scaler_enabled_ && (input_image.width() != codec_.width ||
- input_image.height() != codec_.height)) {
+ input_image.height() != codec_.height)) {
int ret = UpdateCodecFrameSize(input_image);
if (ret < 0)
return ret;
@@ -747,11 +741,11 @@ int VP8EncoderImpl::Encode(const VideoFrame& frame,
// Image in vpx_image_t format.
// Input image is const. VP8's raw image is not defined as const.
raw_images_[0].planes[VPX_PLANE_Y] =
- const_cast<uint8_t*>(input_image.buffer(kYPlane));
+ const_cast<uint8_t*>(input_image.buffer(kYPlane));
raw_images_[0].planes[VPX_PLANE_U] =
- const_cast<uint8_t*>(input_image.buffer(kUPlane));
+ const_cast<uint8_t*>(input_image.buffer(kUPlane));
raw_images_[0].planes[VPX_PLANE_V] =
- const_cast<uint8_t*>(input_image.buffer(kVPlane));
+ const_cast<uint8_t*>(input_image.buffer(kVPlane));
raw_images_[0].stride[VPX_PLANE_Y] = input_image.stride(kYPlane);
raw_images_[0].stride[VPX_PLANE_U] = input_image.stride(kUPlane);
@@ -760,17 +754,17 @@ int VP8EncoderImpl::Encode(const VideoFrame& frame,
for (size_t i = 1; i < encoders_.size(); ++i) {
// Scale the image down a number of times by downsampling factor
libyuv::I420Scale(
- raw_images_[i-1].planes[VPX_PLANE_Y],
- raw_images_[i-1].stride[VPX_PLANE_Y],
- raw_images_[i-1].planes[VPX_PLANE_U],
- raw_images_[i-1].stride[VPX_PLANE_U],
- raw_images_[i-1].planes[VPX_PLANE_V],
- raw_images_[i-1].stride[VPX_PLANE_V],
- raw_images_[i-1].d_w, raw_images_[i-1].d_h,
- raw_images_[i].planes[VPX_PLANE_Y], raw_images_[i].stride[VPX_PLANE_Y],
- raw_images_[i].planes[VPX_PLANE_U], raw_images_[i].stride[VPX_PLANE_U],
- raw_images_[i].planes[VPX_PLANE_V], raw_images_[i].stride[VPX_PLANE_V],
- raw_images_[i].d_w, raw_images_[i].d_h, libyuv::kFilterBilinear);
+ raw_images_[i - 1].planes[VPX_PLANE_Y],
+ raw_images_[i - 1].stride[VPX_PLANE_Y],
+ raw_images_[i - 1].planes[VPX_PLANE_U],
+ raw_images_[i - 1].stride[VPX_PLANE_U],
+ raw_images_[i - 1].planes[VPX_PLANE_V],
+ raw_images_[i - 1].stride[VPX_PLANE_V], raw_images_[i - 1].d_w,
+ raw_images_[i - 1].d_h, raw_images_[i].planes[VPX_PLANE_Y],
+ raw_images_[i].stride[VPX_PLANE_Y], raw_images_[i].planes[VPX_PLANE_U],
+ raw_images_[i].stride[VPX_PLANE_U], raw_images_[i].planes[VPX_PLANE_V],
+ raw_images_[i].stride[VPX_PLANE_V], raw_images_[i].d_w,
+ raw_images_[i].d_h, libyuv::kFilterBilinear);
}
vpx_enc_frame_flags_t flags[kMaxSimulcastStreams];
for (size_t i = 0; i < encoders_.size(); ++i) {
@@ -805,8 +799,8 @@ int VP8EncoderImpl::Encode(const VideoFrame& frame,
if (send_key_frame) {
// Adapt the size of the key frame when in screenshare with 1 temporal
// layer.
- if (encoders_.size() == 1 && codec_.mode == kScreensharing
- && codec_.codecSpecific.VP8.numberOfTemporalLayers <= 1) {
+ if (encoders_.size() == 1 && codec_.mode == kScreensharing &&
+ codec_.codecSpecific.VP8.numberOfTemporalLayers <= 1) {
const uint32_t forceKeyFrameIntraTh = 100;
vpx_codec_control(&(encoders_[0]), VP8E_SET_MAX_INTRA_BITRATE_PCT,
forceKeyFrameIntraTh);
@@ -818,13 +812,12 @@ int VP8EncoderImpl::Encode(const VideoFrame& frame,
}
std::fill(key_frame_request_.begin(), key_frame_request_.end(), false);
} else if (codec_specific_info &&
- codec_specific_info->codecType == kVideoCodecVP8) {
+ codec_specific_info->codecType == kVideoCodecVP8) {
if (feedback_mode_) {
// Handle RPSI and SLI messages and set up the appropriate encode flags.
bool sendRefresh = false;
if (codec_specific_info->codecSpecific.VP8.hasReceivedRPSI) {
- rps_.ReceivedRPSI(
- codec_specific_info->codecSpecific.VP8.pictureIdRPSI);
+ rps_.ReceivedRPSI(codec_specific_info->codecSpecific.VP8.pictureIdRPSI);
}
if (codec_specific_info->codecSpecific.VP8.hasReceivedSLI) {
sendRefresh = rps_.ReceivedSLI(input_image.timestamp());
@@ -876,8 +869,7 @@ int VP8EncoderImpl::Encode(const VideoFrame& frame,
}
vpx_codec_control(&encoders_[i], VP8E_SET_FRAME_FLAGS, flags[stream_idx]);
- vpx_codec_control(&encoders_[i],
- VP8E_SET_TEMPORAL_LAYER_ID,
+ vpx_codec_control(&encoders_[i], VP8E_SET_TEMPORAL_LAYER_ID,
temporal_layers_[stream_idx]->CurrentLayerId());
}
// TODO(holmer): Ideally the duration should be the timestamp diff of this
@@ -895,7 +887,7 @@ int VP8EncoderImpl::Encode(const VideoFrame& frame,
// Reset specific intra frame thresholds, following the key frame.
if (send_key_frame) {
vpx_codec_control(&(encoders_[0]), VP8E_SET_MAX_INTRA_BITRATE_PCT,
- rc_max_intra_target_);
+ rc_max_intra_target_);
}
if (error)
return WEBRTC_VIDEO_CODEC_ERROR;
@@ -913,8 +905,7 @@ int VP8EncoderImpl::UpdateCodecFrameSize(const VideoFrame& input_image) {
codec_.simulcastStream[0].height = input_image.height();
}
// Update the cpu_speed setting for resolution change.
- vpx_codec_control(&(encoders_[0]),
- VP8E_SET_CPUUSED,
+ vpx_codec_control(&(encoders_[0]), VP8E_SET_CPUUSED,
SetCpuSpeed(codec_.width, codec_.height));
raw_images_[0].w = codec_.width;
raw_images_[0].h = codec_.height;
@@ -947,13 +938,12 @@ void VP8EncoderImpl::PopulateCodecSpecific(
}
vp8Info->simulcastIdx = stream_idx;
vp8Info->keyIdx = kNoKeyIdx; // TODO(hlundin) populate this
- vp8Info->nonReference = (pkt.data.frame.flags & VPX_FRAME_IS_DROPPABLE) ?
- true : false;
+ vp8Info->nonReference =
+ (pkt.data.frame.flags & VPX_FRAME_IS_DROPPABLE) ? true : false;
bool base_layer_sync_point = (pkt.data.frame.flags & VPX_FRAME_IS_KEY) ||
- only_predicting_from_key_frame;
+ only_predicting_from_key_frame;
temporal_layers_[stream_idx]->PopulateCodecSpecific(base_layer_sync_point,
- vp8Info,
- timestamp);
+ vp8Info, timestamp);
// Prepare next.
picture_id_[stream_idx] = (picture_id_[stream_idx] + 1) & 0x7FFF;
}
@@ -966,27 +956,26 @@ int VP8EncoderImpl::GetEncodedPartitions(const VideoFrame& input_image,
int stream_idx = static_cast<int>(encoders_.size()) - 1;
int result = WEBRTC_VIDEO_CODEC_OK;
for (size_t encoder_idx = 0; encoder_idx < encoders_.size();
- ++encoder_idx, --stream_idx) {
+ ++encoder_idx, --stream_idx) {
vpx_codec_iter_t iter = NULL;
int part_idx = 0;
encoded_images_[encoder_idx]._length = 0;
encoded_images_[encoder_idx]._frameType = kVideoFrameDelta;
RTPFragmentationHeader frag_info;
// token_partitions_ is number of bits used.
- frag_info.VerifyAndAllocateFragmentationHeader((1 << token_partitions_)
- + 1);
+ frag_info.VerifyAndAllocateFragmentationHeader((1 << token_partitions_) +
+ 1);
CodecSpecificInfo codec_specific;
- const vpx_codec_cx_pkt_t *pkt = NULL;
- while ((pkt = vpx_codec_get_cx_data(&encoders_[encoder_idx],
- &iter)) != NULL) {
+ const vpx_codec_cx_pkt_t* pkt = NULL;
+ while ((pkt = vpx_codec_get_cx_data(&encoders_[encoder_idx], &iter)) !=
+ NULL) {
switch (pkt->kind) {
case VPX_CODEC_CX_FRAME_PKT: {
uint32_t length = encoded_images_[encoder_idx]._length;
memcpy(&encoded_images_[encoder_idx]._buffer[length],
- pkt->data.frame.buf,
- pkt->data.frame.sz);
+ pkt->data.frame.buf, pkt->data.frame.sz);
frag_info.fragmentationOffset[part_idx] = length;
- frag_info.fragmentationLength[part_idx] = pkt->data.frame.sz;
+ frag_info.fragmentationLength[part_idx] = pkt->data.frame.sz;
frag_info.fragmentationPlType[part_idx] = 0; // not known here
frag_info.fragmentationTimeDiff[part_idx] = 0;
encoded_images_[encoder_idx]._length += pkt->data.frame.sz;
@@ -1063,7 +1052,6 @@ int VP8EncoderImpl::RegisterEncodeCompleteCallback(
return WEBRTC_VIDEO_CODEC_OK;
}
-
VP8DecoderImpl::VP8DecoderImpl()
: decode_complete_callback_(NULL),
inited_(false),
@@ -1075,8 +1063,7 @@ VP8DecoderImpl::VP8DecoderImpl()
propagation_cnt_(-1),
last_frame_width_(0),
last_frame_height_(0),
- key_frame_required_(true) {
-}
+ key_frame_required_(true) {}
VP8DecoderImpl::~VP8DecoderImpl() {
inited_ = true; // in order to do the actual release
@@ -1092,8 +1079,7 @@ int VP8DecoderImpl::Reset() {
return WEBRTC_VIDEO_CODEC_OK;
}
-int VP8DecoderImpl::InitDecode(const VideoCodec* inst,
- int number_of_cores) {
+int VP8DecoderImpl::InitDecode(const VideoCodec* inst, int number_of_cores) {
int ret_val = Release();
if (ret_val < 0) {
return ret_val;
@@ -1104,12 +1090,12 @@ int VP8DecoderImpl::InitDecode(const VideoCodec* inst,
if (inst && inst->codecType == kVideoCodecVP8) {
feedback_mode_ = inst->codecSpecific.VP8.feedbackModeOn;
}
- vpx_codec_dec_cfg_t cfg;
+ vpx_codec_dec_cfg_t cfg;
// Setting number of threads to a constant value (1)
cfg.threads = 1;
cfg.h = cfg.w = 0; // set after decode
-vpx_codec_flags_t flags = 0;
+ vpx_codec_flags_t flags = 0;
#if !defined(WEBRTC_ARCH_ARM) && !defined(WEBRTC_ARCH_ARM64)
flags = VPX_CODEC_USE_POSTPROC;
#ifdef INDEPENDENT_PARTITIONS
@@ -1134,10 +1120,10 @@ vpx_codec_flags_t flags = 0;
}
int VP8DecoderImpl::Decode(const EncodedImage& input_image,
- bool missing_frames,
- const RTPFragmentationHeader* fragmentation,
- const CodecSpecificInfo* codec_specific_info,
- int64_t /*render_time_ms*/) {
+ bool missing_frames,
+ const RTPFragmentationHeader* fragmentation,
+ const CodecSpecificInfo* codec_specific_info,
+ int64_t /*render_time_ms*/) {
if (!inited_) {
return WEBRTC_VIDEO_CODEC_UNINITIALIZED;
}
@@ -1188,9 +1174,9 @@ int VP8DecoderImpl::Decode(const EncodedImage& input_image,
if (input_image._frameType == kVideoFrameKey &&
input_image._completeFrame) {
propagation_cnt_ = -1;
- // Start count on first loss.
+ // Start count on first loss.
} else if ((!input_image._completeFrame || missing_frames) &&
- propagation_cnt_ == -1) {
+ propagation_cnt_ == -1) {
propagation_cnt_ = 0;
}
if (propagation_cnt_ >= 0) {
@@ -1242,15 +1228,15 @@ int VP8DecoderImpl::Decode(const EncodedImage& input_image,
if (input_image._frameType == kVideoFrameKey && input_image._buffer != NULL) {
const uint32_t bytes_to_copy = input_image._length;
if (last_keyframe_._size < bytes_to_copy) {
- delete [] last_keyframe_._buffer;
+ delete[] last_keyframe_._buffer;
last_keyframe_._buffer = NULL;
last_keyframe_._size = 0;
}
uint8_t* temp_buffer = last_keyframe_._buffer; // Save buffer ptr.
- uint32_t temp_size = last_keyframe_._size; // Save size.
- last_keyframe_ = input_image; // Shallow copy.
- last_keyframe_._buffer = temp_buffer; // Restore buffer ptr.
- last_keyframe_._size = temp_size; // Restore buffer size.
+ uint32_t temp_size = last_keyframe_._size; // Save size.
+ last_keyframe_ = input_image; // Shallow copy.
+ last_keyframe_._buffer = temp_buffer; // Restore buffer ptr.
+ last_keyframe_._size = temp_size; // Restore buffer size.
if (!last_keyframe_._buffer) {
// Allocate memory.
last_keyframe_._size = bytes_to_copy;
@@ -1300,7 +1286,8 @@ int VP8DecoderImpl::Decode(const EncodedImage& input_image,
}
if (picture_id > -1) {
if (((reference_updates & VP8_GOLD_FRAME) ||
- (reference_updates & VP8_ALTR_FRAME)) && !corrupted) {
+ (reference_updates & VP8_ALTR_FRAME)) &&
+ !corrupted) {
decode_complete_callback_->ReceivedDecodedReferenceFrame(picture_id);
}
decode_complete_callback_->ReceivedDecodedFrame(picture_id);
@@ -1323,14 +1310,10 @@ int VP8DecoderImpl::DecodePartitions(
const EncodedImage& input_image,
const RTPFragmentationHeader* fragmentation) {
for (int i = 0; i < fragmentation->fragmentationVectorSize; ++i) {
- const uint8_t* partition = input_image._buffer +
- fragmentation->fragmentationOffset[i];
- const uint32_t partition_length =
- fragmentation->fragmentationLength[i];
- if (vpx_codec_decode(decoder_,
- partition,
- partition_length,
- 0,
+ const uint8_t* partition =
+ input_image._buffer + fragmentation->fragmentationOffset[i];
+ const uint32_t partition_length = fragmentation->fragmentationLength[i];
+ if (vpx_codec_decode(decoder_, partition, partition_length, 0,
VPX_DL_REALTIME)) {
return WEBRTC_VIDEO_CODEC_ERROR;
}
@@ -1343,8 +1326,8 @@ int VP8DecoderImpl::DecodePartitions(
}
int VP8DecoderImpl::ReturnFrame(const vpx_image_t* img,
- uint32_t timestamp,
- int64_t ntp_time_ms) {
+ uint32_t timestamp,
+ int64_t ntp_time_ms) {
if (img == NULL) {
// Decoder OK and NULL image => No show frame
return WEBRTC_VIDEO_CODEC_NO_OUTPUT;
@@ -1354,14 +1337,13 @@ int VP8DecoderImpl::ReturnFrame(const vpx_image_t* img,
// Allocate memory for decoded image.
VideoFrame decoded_image(buffer_pool_.CreateBuffer(img->d_w, img->d_h),
timestamp, 0, kVideoRotation_0);
- libyuv::I420Copy(
- img->planes[VPX_PLANE_Y], img->stride[VPX_PLANE_Y],
- img->planes[VPX_PLANE_U], img->stride[VPX_PLANE_U],
- img->planes[VPX_PLANE_V], img->stride[VPX_PLANE_V],
- decoded_image.buffer(kYPlane), decoded_image.stride(kYPlane),
- decoded_image.buffer(kUPlane), decoded_image.stride(kUPlane),
- decoded_image.buffer(kVPlane), decoded_image.stride(kVPlane),
- img->d_w, img->d_h);
+ libyuv::I420Copy(img->planes[VPX_PLANE_Y], img->stride[VPX_PLANE_Y],
+ img->planes[VPX_PLANE_U], img->stride[VPX_PLANE_U],
+ img->planes[VPX_PLANE_V], img->stride[VPX_PLANE_V],
+ decoded_image.buffer(kYPlane), decoded_image.stride(kYPlane),
+ decoded_image.buffer(kUPlane), decoded_image.stride(kUPlane),
+ decoded_image.buffer(kVPlane), decoded_image.stride(kVPlane),
+ img->d_w, img->d_h);
decoded_image.set_ntp_time_ms(ntp_time_ms);
int ret = decode_complete_callback_->Decoded(decoded_image);
if (ret != 0)
@@ -1380,7 +1362,7 @@ int VP8DecoderImpl::RegisterDecodeCompleteCallback(
int VP8DecoderImpl::Release() {
if (last_keyframe_._buffer != NULL) {
- delete [] last_keyframe_._buffer;
+ delete[] last_keyframe_._buffer;
last_keyframe_._buffer = NULL;
}
if (decoder_ != NULL) {
@@ -1400,15 +1382,19 @@ int VP8DecoderImpl::Release() {
return WEBRTC_VIDEO_CODEC_OK;
}
+const char* VP8DecoderImpl::ImplementationName() const {
+ return "libvpx";
+}
+
int VP8DecoderImpl::CopyReference(VP8DecoderImpl* copy) {
// The type of frame to copy should be set in ref_frame_->frame_type
// before the call to this function.
- if (vpx_codec_control(decoder_, VP8_COPY_REFERENCE, ref_frame_)
- != VPX_CODEC_OK) {
+ if (vpx_codec_control(decoder_, VP8_COPY_REFERENCE, ref_frame_) !=
+ VPX_CODEC_OK) {
return -1;
}
- if (vpx_codec_control(copy->decoder_, VP8_SET_REFERENCE, ref_frame_)
- != VPX_CODEC_OK) {
+ if (vpx_codec_control(copy->decoder_, VP8_SET_REFERENCE, ref_frame_) !=
+ VPX_CODEC_OK) {
return -1;
}
return 0;
diff --git a/webrtc/modules/video_coding/codecs/vp8/vp8_impl.h b/webrtc/modules/video_coding/codecs/vp8/vp8_impl.h
index ba14ed5841..9d5fb713a4 100644
--- a/webrtc/modules/video_coding/codecs/vp8/vp8_impl.h
+++ b/webrtc/modules/video_coding/codecs/vp8/vp8_impl.h
@@ -22,12 +22,12 @@
#include "vpx/vp8cx.h"
#include "vpx/vp8dx.h"
-#include "webrtc/common_video/interface/i420_buffer_pool.h"
-#include "webrtc/modules/video_coding/codecs/interface/video_codec_interface.h"
+#include "webrtc/common_video/include/i420_buffer_pool.h"
+#include "webrtc/modules/video_coding/include/video_codec_interface.h"
#include "webrtc/modules/video_coding/codecs/vp8/include/vp8.h"
#include "webrtc/modules/video_coding/codecs/vp8/reference_picture_selection.h"
-#include "webrtc/modules/video_coding/utility/include/frame_dropper.h"
-#include "webrtc/modules/video_coding/utility/include/quality_scaler.h"
+#include "webrtc/modules/video_coding/utility/frame_dropper.h"
+#include "webrtc/modules/video_coding/utility/quality_scaler.h"
#include "webrtc/video_frame.h"
namespace webrtc {
@@ -58,8 +58,11 @@ class VP8EncoderImpl : public VP8Encoder {
void OnDroppedFrame() override {}
+ const char* ImplementationName() const override;
+
private:
- void SetupTemporalLayers(int num_streams, int num_temporal_layers,
+ void SetupTemporalLayers(int num_streams,
+ int num_temporal_layers,
const VideoCodec& codec);
// Set the cpu_speed setting for encoder based on resolution and/or platform.
@@ -126,15 +129,17 @@ class VP8DecoderImpl : public VP8Decoder {
int InitDecode(const VideoCodec* inst, int number_of_cores) override;
int Decode(const EncodedImage& input_image,
- bool missing_frames,
- const RTPFragmentationHeader* fragmentation,
- const CodecSpecificInfo* codec_specific_info,
- int64_t /*render_time_ms*/) override;
+ bool missing_frames,
+ const RTPFragmentationHeader* fragmentation,
+ const CodecSpecificInfo* codec_specific_info,
+ int64_t /*render_time_ms*/) override;
int RegisterDecodeCompleteCallback(DecodedImageCallback* callback) override;
int Release() override;
int Reset() override;
+ const char* ImplementationName() const override;
+
private:
// Copy reference image from this _decoder to the _decoder in copyTo. Set
// which frame type to copy in _refFrame->frame_type before the call to
@@ -165,4 +170,3 @@ class VP8DecoderImpl : public VP8Decoder {
} // namespace webrtc
#endif // WEBRTC_MODULES_VIDEO_CODING_CODECS_VP8_VP8_IMPL_H_
-
diff --git a/webrtc/modules/video_coding/codecs/vp8/vp8_sequence_coder.cc b/webrtc/modules/video_coding/codecs/vp8/vp8_sequence_coder.cc
index 5843d83fa7..9e546653db 100644
--- a/webrtc/modules/video_coding/codecs/vp8/vp8_sequence_coder.cc
+++ b/webrtc/modules/video_coding/codecs/vp8/vp8_sequence_coder.cc
@@ -1,4 +1,4 @@
- /*
+/*
* Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
@@ -9,8 +9,9 @@
*/
#include "testing/gtest/include/gtest/gtest.h"
+#include "webrtc/base/checks.h"
#include "webrtc/base/scoped_ptr.h"
-#include "webrtc/common_video/interface/video_image.h"
+#include "webrtc/common_video/include/video_image.h"
#include "webrtc/common_video/libyuv/include/webrtc_libyuv.h"
#include "webrtc/modules/video_coding/codecs/vp8/include/vp8.h"
#include "webrtc/system_wrappers/include/tick_util.h"
@@ -22,8 +23,7 @@
class Vp8SequenceCoderEncodeCallback : public webrtc::EncodedImageCallback {
public:
explicit Vp8SequenceCoderEncodeCallback(FILE* encoded_file)
- : encoded_file_(encoded_file),
- encoded_bytes_(0) {}
+ : encoded_file_(encoded_file), encoded_bytes_(0) {}
~Vp8SequenceCoderEncodeCallback();
int Encoded(const webrtc::EncodedImage& encoded_image,
const webrtc::CodecSpecificInfo* codecSpecificInfo,
@@ -31,6 +31,7 @@ class Vp8SequenceCoderEncodeCallback : public webrtc::EncodedImageCallback {
// Returns the encoded image.
webrtc::EncodedImage encoded_image() { return encoded_image_; }
size_t encoded_bytes() { return encoded_bytes_; }
+
private:
webrtc::EncodedImage encoded_image_;
FILE* encoded_file_;
@@ -38,7 +39,7 @@ class Vp8SequenceCoderEncodeCallback : public webrtc::EncodedImageCallback {
};
Vp8SequenceCoderEncodeCallback::~Vp8SequenceCoderEncodeCallback() {
- delete [] encoded_image_._buffer;
+ delete[] encoded_image_._buffer;
encoded_image_._buffer = NULL;
}
int Vp8SequenceCoderEncodeCallback::Encoded(
@@ -46,7 +47,7 @@ int Vp8SequenceCoderEncodeCallback::Encoded(
const webrtc::CodecSpecificInfo* codecSpecificInfo,
const webrtc::RTPFragmentationHeader* fragmentation) {
if (encoded_image_._size < encoded_image._size) {
- delete [] encoded_image_._buffer;
+ delete[] encoded_image_._buffer;
encoded_image_._buffer = NULL;
encoded_image_._buffer = new uint8_t[encoded_image._size];
encoded_image_._size = encoded_image._size;
@@ -68,7 +69,11 @@ class Vp8SequenceCoderDecodeCallback : public webrtc::DecodedImageCallback {
public:
explicit Vp8SequenceCoderDecodeCallback(FILE* decoded_file)
: decoded_file_(decoded_file) {}
- int Decoded(webrtc::VideoFrame& frame);
+ int32_t Decoded(webrtc::VideoFrame& frame) override;
+ int32_t Decoded(webrtc::VideoFrame& frame, int64_t decode_time_ms) override {
+ RTC_NOTREACHED();
+ return -1;
+ }
bool DecodeComplete();
private:
@@ -80,16 +85,16 @@ int Vp8SequenceCoderDecodeCallback::Decoded(webrtc::VideoFrame& image) {
return 0;
}
-int SequenceCoder(webrtc::test::CommandLineParser& parser) {
- int width = strtol((parser.GetFlag("w")).c_str(), NULL, 10);
- int height = strtol((parser.GetFlag("h")).c_str(), NULL, 10);
- int framerate = strtol((parser.GetFlag("f")).c_str(), NULL, 10);
+int SequenceCoder(webrtc::test::CommandLineParser* parser) {
+ int width = strtol((parser->GetFlag("w")).c_str(), NULL, 10);
+ int height = strtol((parser->GetFlag("h")).c_str(), NULL, 10);
+ int framerate = strtol((parser->GetFlag("f")).c_str(), NULL, 10);
if (width <= 0 || height <= 0 || framerate <= 0) {
fprintf(stderr, "Error: Resolution cannot be <= 0!\n");
return -1;
}
- int target_bitrate = strtol((parser.GetFlag("b")).c_str(), NULL, 10);
+ int target_bitrate = strtol((parser->GetFlag("b")).c_str(), NULL, 10);
if (target_bitrate <= 0) {
fprintf(stderr, "Error: Bit-rate cannot be <= 0!\n");
return -1;
@@ -97,20 +102,20 @@ int SequenceCoder(webrtc::test::CommandLineParser& parser) {
// SetUp
// Open input file.
- std::string encoded_file_name = parser.GetFlag("encoded_file");
+ std::string encoded_file_name = parser->GetFlag("encoded_file");
FILE* encoded_file = fopen(encoded_file_name.c_str(), "wb");
if (encoded_file == NULL) {
fprintf(stderr, "Error: Cannot open encoded file\n");
return -1;
}
- std::string input_file_name = parser.GetFlag("input_file");
+ std::string input_file_name = parser->GetFlag("input_file");
FILE* input_file = fopen(input_file_name.c_str(), "rb");
if (input_file == NULL) {
fprintf(stderr, "Error: Cannot open input file\n");
return -1;
}
// Open output file.
- std::string output_file_name = parser.GetFlag("output_file");
+ std::string output_file_name = parser->GetFlag("output_file");
FILE* output_file = fopen(output_file_name.c_str(), "wb");
if (output_file == NULL) {
fprintf(stderr, "Error: Cannot open output file\n");
@@ -118,8 +123,8 @@ int SequenceCoder(webrtc::test::CommandLineParser& parser) {
}
// Get range of frames: will encode num_frames following start_frame).
- int start_frame = strtol((parser.GetFlag("start_frame")).c_str(), NULL, 10);
- int num_frames = strtol((parser.GetFlag("num_frames")).c_str(), NULL, 10);
+ int start_frame = strtol((parser->GetFlag("start_frame")).c_str(), NULL, 10);
+ int num_frames = strtol((parser->GetFlag("num_frames")).c_str(), NULL, 10);
// Codec SetUp.
webrtc::VideoCodec inst;
@@ -157,8 +162,8 @@ int SequenceCoder(webrtc::test::CommandLineParser& parser) {
int frames_processed = 0;
input_frame.CreateEmptyFrame(width, height, width, half_width, half_width);
while (!feof(input_file) &&
- (num_frames == -1 || frames_processed < num_frames)) {
- if (fread(frame_buffer.get(), 1, length, input_file) != length)
+ (num_frames == -1 || frames_processed < num_frames)) {
+ if (fread(frame_buffer.get(), 1, length, input_file) != length)
continue;
if (frame_cnt >= start_frame) {
webrtc::ConvertToI420(webrtc::kI420, frame_buffer.get(), 0, 0, width,
@@ -179,33 +184,35 @@ int SequenceCoder(webrtc::test::CommandLineParser& parser) {
printf("Actual bitrate: %f kbps\n", actual_bit_rate / 1000);
webrtc::test::QualityMetricsResult psnr_result, ssim_result;
EXPECT_EQ(0, webrtc::test::I420MetricsFromFiles(
- input_file_name.c_str(), output_file_name.c_str(),
- inst.width, inst.height,
- &psnr_result, &ssim_result));
+ input_file_name.c_str(), output_file_name.c_str(),
+ inst.width, inst.height, &psnr_result, &ssim_result));
printf("PSNR avg: %f[dB], min: %f[dB]\nSSIM avg: %f, min: %f\n",
- psnr_result.average, psnr_result.min,
- ssim_result.average, ssim_result.min);
+ psnr_result.average, psnr_result.min, ssim_result.average,
+ ssim_result.min);
return frame_cnt;
}
int main(int argc, char** argv) {
std::string program_name = argv[0];
- std::string usage = "Encode and decodes a video sequence, and writes"
- "results to a file.\n"
- "Example usage:\n" + program_name + " functionality"
- " --w=352 --h=288 --input_file=input.yuv --output_file=output.yuv "
- " Command line flags:\n"
- " - width(int): The width of the input file. Default: 352\n"
- " - height(int): The height of the input file. Default: 288\n"
- " - input_file(string): The YUV file to encode."
- " Default: foreman.yuv\n"
- " - encoded_file(string): The vp8 encoded file (encoder output)."
- " Default: vp8_encoded.vp8\n"
- " - output_file(string): The yuv decoded file (decoder output)."
- " Default: vp8_decoded.yuv\n."
- " - start_frame - frame number in which encoding will begin. Default: 0"
- " - num_frames - Number of frames to be processed. "
- " Default: -1 (entire sequence).";
+ std::string usage =
+ "Encode and decodes a video sequence, and writes"
+ "results to a file.\n"
+ "Example usage:\n" +
+ program_name +
+ " functionality"
+ " --w=352 --h=288 --input_file=input.yuv --output_file=output.yuv "
+ " Command line flags:\n"
+ " - width(int): The width of the input file. Default: 352\n"
+ " - height(int): The height of the input file. Default: 288\n"
+ " - input_file(string): The YUV file to encode."
+ " Default: foreman.yuv\n"
+ " - encoded_file(string): The vp8 encoded file (encoder output)."
+ " Default: vp8_encoded.vp8\n"
+ " - output_file(string): The yuv decoded file (decoder output)."
+ " Default: vp8_decoded.yuv\n."
+ " - start_frame - frame number in which encoding will begin. Default: 0"
+ " - num_frames - Number of frames to be processed. "
+ " Default: -1 (entire sequence).";
webrtc::test::CommandLineParser parser;
@@ -223,8 +230,8 @@ int main(int argc, char** argv) {
parser.SetFlag("output_file", webrtc::test::OutputPath() + "vp8_decoded.yuv");
parser.SetFlag("encoded_file",
webrtc::test::OutputPath() + "vp8_encoded.vp8");
- parser.SetFlag("input_file", webrtc::test::ResourcePath("foreman_cif",
- "yuv"));
+ parser.SetFlag("input_file",
+ webrtc::test::ResourcePath("foreman_cif", "yuv"));
parser.SetFlag("help", "false");
parser.ProcessFlags();
@@ -234,5 +241,5 @@ int main(int argc, char** argv) {
}
parser.PrintEnteredFlags();
- return SequenceCoder(parser);
+ return SequenceCoder(&parser);
}
diff --git a/webrtc/modules/video_coding/codecs/vp9/include/vp9.h b/webrtc/modules/video_coding/codecs/vp9/include/vp9.h
index cd77f72dcb..3bcbe46b3a 100644
--- a/webrtc/modules/video_coding/codecs/vp9/include/vp9.h
+++ b/webrtc/modules/video_coding/codecs/vp9/include/vp9.h
@@ -12,7 +12,7 @@
#ifndef WEBRTC_MODULES_VIDEO_CODING_CODECS_VP9_INCLUDE_VP9_H_
#define WEBRTC_MODULES_VIDEO_CODING_CODECS_VP9_INCLUDE_VP9_H_
-#include "webrtc/modules/video_coding/codecs/interface/video_codec_interface.h"
+#include "webrtc/modules/video_coding/include/video_codec_interface.h"
namespace webrtc {
@@ -23,7 +23,6 @@ class VP9Encoder : public VideoEncoder {
virtual ~VP9Encoder() {}
};
-
class VP9Decoder : public VideoDecoder {
public:
static VP9Decoder* Create();
diff --git a/webrtc/modules/video_coding/codecs/vp9/screenshare_layers.cc b/webrtc/modules/video_coding/codecs/vp9/screenshare_layers.cc
new file mode 100644
index 0000000000..c7ed78a192
--- /dev/null
+++ b/webrtc/modules/video_coding/codecs/vp9/screenshare_layers.cc
@@ -0,0 +1,93 @@
+/* Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
+*
+* Use of this source code is governed by a BSD-style license
+* that can be found in the LICENSE file in the root of the source
+* tree. An additional intellectual property rights grant can be found
+* in the file PATENTS. All contributing project authors may
+* be found in the AUTHORS file in the root of the source tree.
+*/
+
+#include <algorithm>
+#include "webrtc/modules/video_coding/codecs/vp9/screenshare_layers.h"
+#include "webrtc/base/checks.h"
+
+namespace webrtc {
+
+ScreenshareLayersVP9::ScreenshareLayersVP9(uint8_t num_layers)
+ : num_layers_(num_layers),
+ start_layer_(0),
+ last_timestamp_(0),
+ timestamp_initialized_(false) {
+ RTC_DCHECK_GT(num_layers, 0);
+ RTC_DCHECK_LE(num_layers, kMaxVp9NumberOfSpatialLayers);
+ memset(bits_used_, 0, sizeof(bits_used_));
+ memset(threshold_kbps_, 0, sizeof(threshold_kbps_));
+}
+
+uint8_t ScreenshareLayersVP9::GetStartLayer() const {
+ return start_layer_;
+}
+
+void ScreenshareLayersVP9::ConfigureBitrate(int threshold_kbps,
+ uint8_t layer_id) {
+ // The upper layer is always the layer we spill frames
+ // to when the bitrate becomes to high, therefore setting
+ // a max limit is not allowed. The top layer bitrate is
+ // never used either so configuring it makes no difference.
+ RTC_DCHECK_LT(layer_id, num_layers_ - 1);
+ threshold_kbps_[layer_id] = threshold_kbps;
+}
+
+void ScreenshareLayersVP9::LayerFrameEncoded(unsigned int size_bytes,
+ uint8_t layer_id) {
+ RTC_DCHECK_LT(layer_id, num_layers_);
+ bits_used_[layer_id] += size_bytes * 8;
+}
+
+VP9EncoderImpl::SuperFrameRefSettings
+ScreenshareLayersVP9::GetSuperFrameSettings(uint32_t timestamp,
+ bool is_keyframe) {
+ VP9EncoderImpl::SuperFrameRefSettings settings;
+ if (!timestamp_initialized_) {
+ last_timestamp_ = timestamp;
+ timestamp_initialized_ = true;
+ }
+ float time_diff = (timestamp - last_timestamp_) / 90.f;
+ float total_bits_used = 0;
+ float total_threshold_kbps = 0;
+ start_layer_ = 0;
+
+ // Up to (num_layers - 1) because we only have
+ // (num_layers - 1) thresholds to check.
+ for (int layer_id = 0; layer_id < num_layers_ - 1; ++layer_id) {
+ bits_used_[layer_id] = std::max(
+ 0.f, bits_used_[layer_id] - time_diff * threshold_kbps_[layer_id]);
+ total_bits_used += bits_used_[layer_id];
+ total_threshold_kbps += threshold_kbps_[layer_id];
+
+ // If this is a keyframe then there should be no
+ // references to any previous frames.
+ if (!is_keyframe) {
+ settings.layer[layer_id].ref_buf1 = layer_id;
+ if (total_bits_used > total_threshold_kbps * 1000)
+ start_layer_ = layer_id + 1;
+ }
+
+ settings.layer[layer_id].upd_buf = layer_id;
+ }
+ // Since the above loop does not iterate over the last layer
+ // the reference of the last layer has to be set after the loop,
+ // and if this is a keyframe there should be no references to
+ // any previous frames.
+ if (!is_keyframe)
+ settings.layer[num_layers_ - 1].ref_buf1 = num_layers_ - 1;
+
+ settings.layer[num_layers_ - 1].upd_buf = num_layers_ - 1;
+ settings.is_keyframe = is_keyframe;
+ settings.start_layer = start_layer_;
+ settings.stop_layer = num_layers_ - 1;
+ last_timestamp_ = timestamp;
+ return settings;
+}
+
+} // namespace webrtc
diff --git a/webrtc/modules/video_coding/codecs/vp9/screenshare_layers.h b/webrtc/modules/video_coding/codecs/vp9/screenshare_layers.h
new file mode 100644
index 0000000000..5a901ae359
--- /dev/null
+++ b/webrtc/modules/video_coding/codecs/vp9/screenshare_layers.h
@@ -0,0 +1,66 @@
+/* Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
+*
+* Use of this source code is governed by a BSD-style license
+* that can be found in the LICENSE file in the root of the source
+* tree. An additional intellectual property rights grant can be found
+* in the file PATENTS. All contributing project authors may
+* be found in the AUTHORS file in the root of the source tree.
+*/
+
+#ifndef WEBRTC_MODULES_VIDEO_CODING_CODECS_VP9_SCREENSHARE_LAYERS_H_
+#define WEBRTC_MODULES_VIDEO_CODING_CODECS_VP9_SCREENSHARE_LAYERS_H_
+
+#include "webrtc/modules/video_coding/codecs/vp9/vp9_impl.h"
+
+namespace webrtc {
+
+class ScreenshareLayersVP9 {
+ public:
+ explicit ScreenshareLayersVP9(uint8_t num_layers);
+
+ // The target bitrate for layer with id layer_id.
+ void ConfigureBitrate(int threshold_kbps, uint8_t layer_id);
+
+ // The current start layer.
+ uint8_t GetStartLayer() const;
+
+ // Update the layer with the size of the layer frame.
+ void LayerFrameEncoded(unsigned int size_bytes, uint8_t layer_id);
+
+ // Get the layer settings for the next superframe.
+ //
+ // In short, each time the GetSuperFrameSettings is called the
+ // bitrate of every layer is calculated and if the cummulative
+ // bitrate exceeds the configured cummulative bitrates
+ // (ConfigureBitrate to configure) up to and including that
+ // layer then the resulting encoding settings for the
+ // superframe will only encode layers above that layer.
+ VP9EncoderImpl::SuperFrameRefSettings GetSuperFrameSettings(
+ uint32_t timestamp,
+ bool is_keyframe);
+
+ private:
+ // How many layers that are used.
+ uint8_t num_layers_;
+
+ // The index of the first layer to encode.
+ uint8_t start_layer_;
+
+ // Cummulative target kbps for the different layers.
+ float threshold_kbps_[kMaxVp9NumberOfSpatialLayers - 1];
+
+ // How many bits that has been used for a certain layer. Increased in
+ // FrameEncoded() by the size of the encoded frame and decreased in
+ // GetSuperFrameSettings() depending on the time between frames.
+ float bits_used_[kMaxVp9NumberOfSpatialLayers];
+
+ // Timestamp of last frame.
+ uint32_t last_timestamp_;
+
+ // If the last_timestamp_ has been set.
+ bool timestamp_initialized_;
+};
+
+} // namespace webrtc
+
+#endif // WEBRTC_MODULES_VIDEO_CODING_CODECS_VP9_SCREENSHARE_LAYERS_H_
diff --git a/webrtc/modules/video_coding/codecs/vp9/screenshare_layers_unittest.cc b/webrtc/modules/video_coding/codecs/vp9/screenshare_layers_unittest.cc
new file mode 100644
index 0000000000..5eb7b237ac
--- /dev/null
+++ b/webrtc/modules/video_coding/codecs/vp9/screenshare_layers_unittest.cc
@@ -0,0 +1,323 @@
+/*
+ * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <limits>
+
+#include "testing/gtest/include/gtest/gtest.h"
+#include "vpx/vp8cx.h"
+#include "webrtc/base/logging.h"
+#include "webrtc/modules/video_coding/codecs/vp9/screenshare_layers.h"
+#include "webrtc/modules/video_coding/codecs/vp9/vp9_impl.h"
+#include "webrtc/system_wrappers/include/clock.h"
+
+namespace webrtc {
+
+typedef VP9EncoderImpl::SuperFrameRefSettings Settings;
+
+const uint32_t kTickFrequency = 90000;
+
+class ScreenshareLayerTestVP9 : public ::testing::Test {
+ protected:
+ ScreenshareLayerTestVP9() : clock_(0) {}
+ virtual ~ScreenshareLayerTestVP9() {}
+
+ void InitScreenshareLayers(int layers) {
+ layers_.reset(new ScreenshareLayersVP9(layers));
+ }
+
+ void ConfigureBitrateForLayer(int kbps, uint8_t layer_id) {
+ layers_->ConfigureBitrate(kbps, layer_id);
+ }
+
+ void AdvanceTime(int64_t milliseconds) {
+ clock_.AdvanceTimeMilliseconds(milliseconds);
+ }
+
+ void AddKilobitsToLayer(int kilobits, uint8_t layer_id) {
+ layers_->LayerFrameEncoded(kilobits * 1000 / 8, layer_id);
+ }
+
+ void EqualRefsForLayer(const Settings& actual, uint8_t layer_id) {
+ EXPECT_EQ(expected_.layer[layer_id].upd_buf,
+ actual.layer[layer_id].upd_buf);
+ EXPECT_EQ(expected_.layer[layer_id].ref_buf1,
+ actual.layer[layer_id].ref_buf1);
+ EXPECT_EQ(expected_.layer[layer_id].ref_buf2,
+ actual.layer[layer_id].ref_buf2);
+ EXPECT_EQ(expected_.layer[layer_id].ref_buf3,
+ actual.layer[layer_id].ref_buf3);
+ }
+
+ void EqualRefs(const Settings& actual) {
+ for (unsigned int layer_id = 0; layer_id < kMaxVp9NumberOfSpatialLayers;
+ ++layer_id) {
+ EqualRefsForLayer(actual, layer_id);
+ }
+ }
+
+ void EqualStartStopKeyframe(const Settings& actual) {
+ EXPECT_EQ(expected_.start_layer, actual.start_layer);
+ EXPECT_EQ(expected_.stop_layer, actual.stop_layer);
+ EXPECT_EQ(expected_.is_keyframe, actual.is_keyframe);
+ }
+
+ // Check that the settings returned by GetSuperFrameSettings() is
+ // equal to the expected_ settings.
+ void EqualToExpected() {
+ uint32_t frame_timestamp_ =
+ clock_.TimeInMilliseconds() * (kTickFrequency / 1000);
+ Settings actual =
+ layers_->GetSuperFrameSettings(frame_timestamp_, expected_.is_keyframe);
+ EqualRefs(actual);
+ EqualStartStopKeyframe(actual);
+ }
+
+ Settings expected_;
+ SimulatedClock clock_;
+ rtc::scoped_ptr<ScreenshareLayersVP9> layers_;
+};
+
+TEST_F(ScreenshareLayerTestVP9, NoRefsOnKeyFrame) {
+ const int kNumLayers = kMaxVp9NumberOfSpatialLayers;
+ InitScreenshareLayers(kNumLayers);
+ expected_.start_layer = 0;
+ expected_.stop_layer = kNumLayers - 1;
+
+ for (int l = 0; l < kNumLayers; ++l) {
+ expected_.layer[l].upd_buf = l;
+ }
+ expected_.is_keyframe = true;
+ EqualToExpected();
+
+ for (int l = 0; l < kNumLayers; ++l) {
+ expected_.layer[l].ref_buf1 = l;
+ }
+ expected_.is_keyframe = false;
+ EqualToExpected();
+}
+
+// Test if it is possible to send at a high bitrate (over the threshold)
+// after a longer period of low bitrate. This should not be possible.
+TEST_F(ScreenshareLayerTestVP9, DontAccumelateAvailableBitsOverTime) {
+ InitScreenshareLayers(2);
+ ConfigureBitrateForLayer(100, 0);
+
+ expected_.layer[0].upd_buf = 0;
+ expected_.layer[0].ref_buf1 = 0;
+ expected_.layer[1].upd_buf = 1;
+ expected_.layer[1].ref_buf1 = 1;
+ expected_.start_layer = 0;
+ expected_.stop_layer = 1;
+
+ // Send 10 frames at a low bitrate (50 kbps)
+ for (int i = 0; i < 10; ++i) {
+ AdvanceTime(200);
+ EqualToExpected();
+ AddKilobitsToLayer(10, 0);
+ }
+
+ AdvanceTime(200);
+ EqualToExpected();
+ AddKilobitsToLayer(301, 0);
+
+ // Send 10 frames at a high bitrate (200 kbps)
+ expected_.start_layer = 1;
+ for (int i = 0; i < 10; ++i) {
+ AdvanceTime(200);
+ EqualToExpected();
+ AddKilobitsToLayer(40, 1);
+ }
+}
+
+// Test if used bits are accumelated over layers, as they should;
+TEST_F(ScreenshareLayerTestVP9, AccumelateUsedBitsOverLayers) {
+ const int kNumLayers = kMaxVp9NumberOfSpatialLayers;
+ InitScreenshareLayers(kNumLayers);
+ for (int l = 0; l < kNumLayers - 1; ++l)
+ ConfigureBitrateForLayer(100, l);
+ for (int l = 0; l < kNumLayers; ++l) {
+ expected_.layer[l].upd_buf = l;
+ expected_.layer[l].ref_buf1 = l;
+ }
+
+ expected_.start_layer = 0;
+ expected_.stop_layer = kNumLayers - 1;
+ EqualToExpected();
+
+ for (int layer = 0; layer < kNumLayers - 1; ++layer) {
+ expected_.start_layer = layer;
+ EqualToExpected();
+ AddKilobitsToLayer(101, layer);
+ }
+}
+
+// General testing of the bitrate controller.
+TEST_F(ScreenshareLayerTestVP9, 2LayerBitrate) {
+ InitScreenshareLayers(2);
+ ConfigureBitrateForLayer(100, 0);
+
+ expected_.layer[0].upd_buf = 0;
+ expected_.layer[1].upd_buf = 1;
+ expected_.layer[0].ref_buf1 = -1;
+ expected_.layer[1].ref_buf1 = -1;
+ expected_.start_layer = 0;
+ expected_.stop_layer = 1;
+
+ expected_.is_keyframe = true;
+ EqualToExpected();
+ AddKilobitsToLayer(100, 0);
+
+ expected_.layer[0].ref_buf1 = 0;
+ expected_.layer[1].ref_buf1 = 1;
+ expected_.is_keyframe = false;
+ AdvanceTime(199);
+ EqualToExpected();
+ AddKilobitsToLayer(100, 0);
+
+ expected_.start_layer = 1;
+ for (int frame = 0; frame < 3; ++frame) {
+ AdvanceTime(200);
+ EqualToExpected();
+ AddKilobitsToLayer(100, 1);
+ }
+
+ // Just before enough bits become available for L0 @0.999 seconds.
+ AdvanceTime(199);
+ EqualToExpected();
+ AddKilobitsToLayer(100, 1);
+
+ // Just after enough bits become available for L0 @1.0001 seconds.
+ expected_.start_layer = 0;
+ AdvanceTime(2);
+ EqualToExpected();
+ AddKilobitsToLayer(100, 0);
+
+ // Keyframes always encode all layers, even if it is over budget.
+ expected_.layer[0].ref_buf1 = -1;
+ expected_.layer[1].ref_buf1 = -1;
+ expected_.is_keyframe = true;
+ AdvanceTime(499);
+ EqualToExpected();
+ expected_.layer[0].ref_buf1 = 0;
+ expected_.layer[1].ref_buf1 = 1;
+ expected_.start_layer = 1;
+ expected_.is_keyframe = false;
+ EqualToExpected();
+ AddKilobitsToLayer(100, 0);
+
+ // 400 kb in L0 --> @3 second mark to fall below the threshold..
+ // just before @2.999 seconds.
+ expected_.is_keyframe = false;
+ AdvanceTime(1499);
+ EqualToExpected();
+ AddKilobitsToLayer(100, 1);
+
+ // just after @3.001 seconds.
+ expected_.start_layer = 0;
+ AdvanceTime(2);
+ EqualToExpected();
+ AddKilobitsToLayer(100, 0);
+}
+
+// General testing of the bitrate controller.
+TEST_F(ScreenshareLayerTestVP9, 3LayerBitrate) {
+ InitScreenshareLayers(3);
+ ConfigureBitrateForLayer(100, 0);
+ ConfigureBitrateForLayer(100, 1);
+
+ for (int l = 0; l < 3; ++l) {
+ expected_.layer[l].upd_buf = l;
+ expected_.layer[l].ref_buf1 = l;
+ }
+ expected_.start_layer = 0;
+ expected_.stop_layer = 2;
+
+ EqualToExpected();
+ AddKilobitsToLayer(105, 0);
+ AddKilobitsToLayer(30, 1);
+
+ AdvanceTime(199);
+ EqualToExpected();
+ AddKilobitsToLayer(105, 0);
+ AddKilobitsToLayer(30, 1);
+
+ expected_.start_layer = 1;
+ AdvanceTime(200);
+ EqualToExpected();
+ AddKilobitsToLayer(130, 1);
+
+ expected_.start_layer = 2;
+ AdvanceTime(200);
+ EqualToExpected();
+
+ // 400 kb in L1 --> @1.0 second mark to fall below threshold.
+ // 210 kb in L0 --> @1.1 second mark to fall below threshold.
+ // Just before L1 @0.999 seconds.
+ AdvanceTime(399);
+ EqualToExpected();
+
+ // Just after L1 @1.001 seconds.
+ expected_.start_layer = 1;
+ AdvanceTime(2);
+ EqualToExpected();
+
+ // Just before L0 @1.099 seconds.
+ AdvanceTime(99);
+ EqualToExpected();
+
+ // Just after L0 @1.101 seconds.
+ expected_.start_layer = 0;
+ AdvanceTime(2);
+ EqualToExpected();
+
+ // @1.1 seconds
+ AdvanceTime(99);
+ EqualToExpected();
+ AddKilobitsToLayer(200, 1);
+
+ expected_.is_keyframe = true;
+ for (int l = 0; l < 3; ++l)
+ expected_.layer[l].ref_buf1 = -1;
+ AdvanceTime(200);
+ EqualToExpected();
+
+ expected_.is_keyframe = false;
+ expected_.start_layer = 2;
+ for (int l = 0; l < 3; ++l)
+ expected_.layer[l].ref_buf1 = l;
+ AdvanceTime(200);
+ EqualToExpected();
+}
+
+// Test that the bitrate calculations are
+// correct when the timestamp wrap.
+TEST_F(ScreenshareLayerTestVP9, TimestampWrap) {
+ InitScreenshareLayers(2);
+ ConfigureBitrateForLayer(100, 0);
+
+ expected_.layer[0].upd_buf = 0;
+ expected_.layer[0].ref_buf1 = 0;
+ expected_.layer[1].upd_buf = 1;
+ expected_.layer[1].ref_buf1 = 1;
+ expected_.start_layer = 0;
+ expected_.stop_layer = 1;
+
+ // Advance time to just before the timestamp wraps.
+ AdvanceTime(std::numeric_limits<uint32_t>::max() / (kTickFrequency / 1000));
+ EqualToExpected();
+ AddKilobitsToLayer(200, 0);
+
+ // Wrap
+ expected_.start_layer = 1;
+ AdvanceTime(1);
+ EqualToExpected();
+}
+
+} // namespace webrtc
diff --git a/webrtc/modules/video_coding/codecs/vp9/vp9.gyp b/webrtc/modules/video_coding/codecs/vp9/vp9.gyp
index 752521c5cb..8993d79bd7 100644
--- a/webrtc/modules/video_coding/codecs/vp9/vp9.gyp
+++ b/webrtc/modules/video_coding/codecs/vp9/vp9.gyp
@@ -14,30 +14,26 @@
{
'target_name': 'webrtc_vp9',
'type': 'static_library',
- 'dependencies': [
- '<(webrtc_root)/common_video/common_video.gyp:common_video',
- '<(webrtc_root)/modules/video_coding/utility/video_coding_utility.gyp:video_coding_utility',
- '<(webrtc_root)/system_wrappers/system_wrappers.gyp:system_wrappers',
- ],
'conditions': [
['build_libvpx==1', {
'dependencies': [
'<(libvpx_dir)/libvpx.gyp:libvpx_new',
],
}],
- ['build_vp9==1', {
- 'sources': [
- 'include/vp9.h',
- 'vp9_frame_buffer_pool.cc',
- 'vp9_frame_buffer_pool.h',
- 'vp9_impl.cc',
- 'vp9_impl.h',
- ],
- }, {
- 'sources': [
- 'vp9_dummy_impl.cc',
- ],
- }],
+ ],
+ 'dependencies': [
+ '<(webrtc_root)/common_video/common_video.gyp:common_video',
+ '<(webrtc_root)/modules/video_coding/utility/video_coding_utility.gyp:video_coding_utility',
+ '<(webrtc_root)/system_wrappers/system_wrappers.gyp:system_wrappers',
+ ],
+ 'sources': [
+ 'include/vp9.h',
+ 'screenshare_layers.cc',
+ 'screenshare_layers.h',
+ 'vp9_frame_buffer_pool.cc',
+ 'vp9_frame_buffer_pool.h',
+ 'vp9_impl.cc',
+ 'vp9_impl.h',
],
},
],
diff --git a/webrtc/modules/video_coding/codecs/vp9/vp9_dummy_impl.cc b/webrtc/modules/video_coding/codecs/vp9/vp9_dummy_impl.cc
deleted file mode 100644
index 491ccbe79c..0000000000
--- a/webrtc/modules/video_coding/codecs/vp9/vp9_dummy_impl.cc
+++ /dev/null
@@ -1,19 +0,0 @@
-/*
- * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- *
- */
-
-// This file contains an implementation of empty webrtc VP9 encoder/decoder
-// factories so it is possible to build webrtc without linking with vp9.
-#include "webrtc/modules/video_coding/codecs/vp9/vp9_impl.h"
-
-namespace webrtc {
-VP9Encoder* VP9Encoder::Create() { return nullptr; }
-VP9Decoder* VP9Decoder::Create() { return nullptr; }
-}
diff --git a/webrtc/modules/video_coding/codecs/vp9/vp9_frame_buffer_pool.cc b/webrtc/modules/video_coding/codecs/vp9/vp9_frame_buffer_pool.cc
index bedbe68ca8..62c05d34fa 100644
--- a/webrtc/modules/video_coding/codecs/vp9/vp9_frame_buffer_pool.cc
+++ b/webrtc/modules/video_coding/codecs/vp9/vp9_frame_buffer_pool.cc
@@ -16,7 +16,7 @@
#include "vpx/vpx_frame_buffer.h"
#include "webrtc/base/checks.h"
-#include "webrtc/system_wrappers/include/logging.h"
+#include "webrtc/base/logging.h"
namespace webrtc {
diff --git a/webrtc/modules/video_coding/codecs/vp9/vp9_impl.cc b/webrtc/modules/video_coding/codecs/vp9/vp9_impl.cc
index 0ca7eeabe9..e554795519 100644
--- a/webrtc/modules/video_coding/codecs/vp9/vp9_impl.cc
+++ b/webrtc/modules/video_coding/codecs/vp9/vp9_impl.cc
@@ -21,36 +21,31 @@
#include "vpx/vp8cx.h"
#include "vpx/vp8dx.h"
-#include "webrtc/base/bind.h"
#include "webrtc/base/checks.h"
+#include "webrtc/base/keep_ref_until_done.h"
+#include "webrtc/base/logging.h"
#include "webrtc/base/trace_event.h"
#include "webrtc/common.h"
#include "webrtc/common_video/libyuv/include/webrtc_libyuv.h"
-#include "webrtc/modules/interface/module_common_types.h"
-#include "webrtc/system_wrappers/include/logging.h"
+#include "webrtc/modules/include/module_common_types.h"
+#include "webrtc/modules/video_coding/codecs/vp9/screenshare_layers.h"
#include "webrtc/system_wrappers/include/tick_util.h"
-namespace {
-
-// VP9DecoderImpl::ReturnFrame helper function used with WrappedI420Buffer.
-static void WrappedI420BufferNoLongerUsedCb(
- webrtc::Vp9FrameBufferPool::Vp9FrameBuffer* img_buffer) {
- img_buffer->Release();
-}
-
-} // anonymous namespace
-
namespace webrtc {
// Only positive speeds, range for real-time coding currently is: 5 - 8.
// Lower means slower/better quality, higher means fastest/lower quality.
int GetCpuSpeed(int width, int height) {
+#if defined(WEBRTC_ARCH_ARM) || defined(WEBRTC_ARCH_ARM64)
+ return 8;
+#else
// For smaller resolutions, use lower speed setting (get some coding gain at
// the cost of increased encoding complexity).
if (width * height <= 352 * 288)
return 5;
else
return 7;
+#endif
}
VP9Encoder* VP9Encoder::Create() {
@@ -59,7 +54,7 @@ VP9Encoder* VP9Encoder::Create() {
void VP9EncoderImpl::EncoderOutputCodedPacketCallback(vpx_codec_cx_pkt* pkt,
void* user_data) {
- VP9EncoderImpl* enc = (VP9EncoderImpl*)(user_data);
+ VP9EncoderImpl* enc = static_cast<VP9EncoderImpl*>(user_data);
enc->GetEncodedLayerFrame(pkt);
}
@@ -76,9 +71,12 @@ VP9EncoderImpl::VP9EncoderImpl()
raw_(NULL),
input_image_(NULL),
tl0_pic_idx_(0),
- gof_idx_(0),
+ frames_since_kf_(0),
num_temporal_layers_(0),
- num_spatial_layers_(0) {
+ num_spatial_layers_(0),
+ frames_encoded_(0),
+ // Use two spatial when screensharing with flexible mode.
+ spatial_layer_(new ScreenshareLayersVP9(2)) {
memset(&codec_, 0, sizeof(codec_));
uint32_t seed = static_cast<uint32_t>(TickTime::MillisecondTimestamp());
srand(seed);
@@ -90,7 +88,7 @@ VP9EncoderImpl::~VP9EncoderImpl() {
int VP9EncoderImpl::Release() {
if (encoded_image_._buffer != NULL) {
- delete [] encoded_image_._buffer;
+ delete[] encoded_image_._buffer;
encoded_image_._buffer = NULL;
}
if (encoder_ != NULL) {
@@ -112,42 +110,72 @@ int VP9EncoderImpl::Release() {
return WEBRTC_VIDEO_CODEC_OK;
}
+bool VP9EncoderImpl::ExplicitlyConfiguredSpatialLayers() const {
+ // We check target_bitrate_bps of the 0th layer to see if the spatial layers
+ // (i.e. bitrates) were explicitly configured.
+ return num_spatial_layers_ > 1 &&
+ codec_.spatialLayers[0].target_bitrate_bps > 0;
+}
+
bool VP9EncoderImpl::SetSvcRates() {
- float rate_ratio[VPX_MAX_LAYERS] = {0};
- float total = 0;
uint8_t i = 0;
- for (i = 0; i < num_spatial_layers_; ++i) {
- if (svc_internal_.svc_params.scaling_factor_num[i] <= 0 ||
- svc_internal_.svc_params.scaling_factor_den[i] <= 0) {
+ if (ExplicitlyConfiguredSpatialLayers()) {
+ if (num_temporal_layers_ > 1) {
+ LOG(LS_ERROR) << "Multiple temporal layers when manually specifying "
+ "spatial layers not implemented yet!";
return false;
}
- rate_ratio[i] = static_cast<float>(
- svc_internal_.svc_params.scaling_factor_num[i]) /
- svc_internal_.svc_params.scaling_factor_den[i];
- total += rate_ratio[i];
- }
-
- for (i = 0; i < num_spatial_layers_; ++i) {
- config_->ss_target_bitrate[i] = static_cast<unsigned int>(
- config_->rc_target_bitrate * rate_ratio[i] / total);
- if (num_temporal_layers_ == 1) {
- config_->layer_target_bitrate[i] = config_->ss_target_bitrate[i];
- } else if (num_temporal_layers_ == 2) {
- config_->layer_target_bitrate[i * num_temporal_layers_] =
- config_->ss_target_bitrate[i] * 2 / 3;
- config_->layer_target_bitrate[i * num_temporal_layers_ + 1] =
- config_->ss_target_bitrate[i];
- } else if (num_temporal_layers_ == 3) {
- config_->layer_target_bitrate[i * num_temporal_layers_] =
- config_->ss_target_bitrate[i] / 2;
- config_->layer_target_bitrate[i * num_temporal_layers_ + 1] =
- config_->layer_target_bitrate[i * num_temporal_layers_] +
- (config_->ss_target_bitrate[i] / 4);
- config_->layer_target_bitrate[i * num_temporal_layers_ + 2] =
- config_->ss_target_bitrate[i];
- } else {
- return false;
+ int total_bitrate_bps = 0;
+ for (i = 0; i < num_spatial_layers_; ++i)
+ total_bitrate_bps += codec_.spatialLayers[i].target_bitrate_bps;
+ // If total bitrate differs now from what has been specified at the
+ // beginning, update the bitrates in the same ratio as before.
+ for (i = 0; i < num_spatial_layers_; ++i) {
+ config_->ss_target_bitrate[i] = config_->layer_target_bitrate[i] =
+ static_cast<int>(static_cast<int64_t>(config_->rc_target_bitrate) *
+ codec_.spatialLayers[i].target_bitrate_bps /
+ total_bitrate_bps);
+ }
+ } else {
+ float rate_ratio[VPX_MAX_LAYERS] = {0};
+ float total = 0;
+
+ for (i = 0; i < num_spatial_layers_; ++i) {
+ if (svc_internal_.svc_params.scaling_factor_num[i] <= 0 ||
+ svc_internal_.svc_params.scaling_factor_den[i] <= 0) {
+ LOG(LS_ERROR) << "Scaling factors not specified!";
+ return false;
+ }
+ rate_ratio[i] =
+ static_cast<float>(svc_internal_.svc_params.scaling_factor_num[i]) /
+ svc_internal_.svc_params.scaling_factor_den[i];
+ total += rate_ratio[i];
+ }
+
+ for (i = 0; i < num_spatial_layers_; ++i) {
+ config_->ss_target_bitrate[i] = static_cast<unsigned int>(
+ config_->rc_target_bitrate * rate_ratio[i] / total);
+ if (num_temporal_layers_ == 1) {
+ config_->layer_target_bitrate[i] = config_->ss_target_bitrate[i];
+ } else if (num_temporal_layers_ == 2) {
+ config_->layer_target_bitrate[i * num_temporal_layers_] =
+ config_->ss_target_bitrate[i] * 2 / 3;
+ config_->layer_target_bitrate[i * num_temporal_layers_ + 1] =
+ config_->ss_target_bitrate[i];
+ } else if (num_temporal_layers_ == 3) {
+ config_->layer_target_bitrate[i * num_temporal_layers_] =
+ config_->ss_target_bitrate[i] / 2;
+ config_->layer_target_bitrate[i * num_temporal_layers_ + 1] =
+ config_->layer_target_bitrate[i * num_temporal_layers_] +
+ (config_->ss_target_bitrate[i] / 4);
+ config_->layer_target_bitrate[i * num_temporal_layers_ + 2] =
+ config_->ss_target_bitrate[i];
+ } else {
+ LOG(LS_ERROR) << "Unsupported number of temporal layers: "
+ << num_temporal_layers_;
+ return false;
+ }
}
}
@@ -178,6 +206,7 @@ int VP9EncoderImpl::SetRates(uint32_t new_bitrate_kbit,
}
config_->rc_target_bitrate = new_bitrate_kbit;
codec_.maxFramerate = new_framerate;
+ spatial_layer_->ConfigureBitrate(new_bitrate_kbit, 0);
if (!SetSvcRates()) {
return WEBRTC_VIDEO_CODEC_ERR_PARAMETER;
@@ -216,6 +245,7 @@ int VP9EncoderImpl::InitEncode(const VideoCodec* inst,
if (inst->codecSpecific.VP9.numberOfSpatialLayers > 2) {
return WEBRTC_VIDEO_CODEC_ERR_PARAMETER;
}
+
int retVal = Release();
if (retVal < 0) {
return retVal;
@@ -237,10 +267,10 @@ int VP9EncoderImpl::InitEncode(const VideoCodec* inst,
num_temporal_layers_ = 1;
// Random start 16 bits is enough.
- picture_id_ = static_cast<uint16_t>(rand()) & 0x7FFF;
+ picture_id_ = static_cast<uint16_t>(rand()) & 0x7FFF; // NOLINT
// Allocate memory for encoded image
if (encoded_image_._buffer != NULL) {
- delete [] encoded_image_._buffer;
+ delete[] encoded_image_._buffer;
}
encoded_image_._size = CalcBufferSize(kI420, codec_.width, codec_.height);
encoded_image_._buffer = new uint8_t[encoded_image_._size];
@@ -248,8 +278,8 @@ int VP9EncoderImpl::InitEncode(const VideoCodec* inst,
// Creating a wrapper to the image - setting image data to NULL. Actual
// pointer will be set in encode. Setting align to 1, as it is meaningless
// (actual memory is not allocated).
- raw_ = vpx_img_wrap(NULL, VPX_IMG_FMT_I420, codec_.width, codec_.height,
- 1, NULL);
+ raw_ = vpx_img_wrap(NULL, VPX_IMG_FMT_I420, codec_.width, codec_.height, 1,
+ NULL);
// Populate encoder configuration with default values.
if (vpx_codec_enc_config_default(vpx_codec_vp9_cx(), config_, 0)) {
return WEBRTC_VIDEO_CODEC_ERROR;
@@ -264,8 +294,8 @@ int VP9EncoderImpl::InitEncode(const VideoCodec* inst,
config_->g_lag_in_frames = 0; // 0- no frame lagging
config_->g_threads = 1;
// Rate control settings.
- config_->rc_dropframe_thresh = inst->codecSpecific.VP9.frameDroppingOn ?
- 30 : 0;
+ config_->rc_dropframe_thresh =
+ inst->codecSpecific.VP9.frameDroppingOn ? 30 : 0;
config_->rc_end_usage = VPX_CBR;
config_->g_pass = VPX_RC_ONE_PASS;
config_->rc_min_quantizer = 2;
@@ -277,24 +307,32 @@ int VP9EncoderImpl::InitEncode(const VideoCodec* inst,
config_->rc_buf_sz = 1000;
// Set the maximum target size of any key-frame.
rc_max_intra_target_ = MaxIntraTarget(config_->rc_buf_optimal_sz);
- if (inst->codecSpecific.VP9.keyFrameInterval > 0) {
+ if (inst->codecSpecific.VP9.keyFrameInterval > 0) {
config_->kf_mode = VPX_KF_AUTO;
config_->kf_max_dist = inst->codecSpecific.VP9.keyFrameInterval;
+ // Needs to be set (in svc mode) to get correct periodic key frame interval
+ // (will have no effect in non-svc).
+ config_->kf_min_dist = config_->kf_max_dist;
} else {
config_->kf_mode = VPX_KF_DISABLED;
}
- config_->rc_resize_allowed = inst->codecSpecific.VP9.automaticResizeOn ?
- 1 : 0;
+ config_->rc_resize_allowed =
+ inst->codecSpecific.VP9.automaticResizeOn ? 1 : 0;
// Determine number of threads based on the image size and #cores.
- config_->g_threads = NumberOfThreads(config_->g_w,
- config_->g_h,
- number_of_cores);
+ config_->g_threads =
+ NumberOfThreads(config_->g_w, config_->g_h, number_of_cores);
cpu_speed_ = GetCpuSpeed(config_->g_w, config_->g_h);
// TODO(asapersson): Check configuration of temporal switch up and increase
// pattern length.
- if (num_temporal_layers_ == 1) {
+ is_flexible_mode_ = inst->codecSpecific.VP9.flexibleMode;
+ if (is_flexible_mode_) {
+ config_->temporal_layering_mode = VP9E_TEMPORAL_LAYERING_MODE_BYPASS;
+ config_->ts_number_layers = num_temporal_layers_;
+ if (codec_.mode == kScreensharing)
+ spatial_layer_->ConfigureBitrate(inst->startBitrate, 0);
+ } else if (num_temporal_layers_ == 1) {
gof_.SetGofInfoVP9(kTemporalStructureMode1);
config_->temporal_layering_mode = VP9E_TEMPORAL_LAYERING_MODE_NOLAYERING;
config_->ts_number_layers = 1;
@@ -326,7 +364,7 @@ int VP9EncoderImpl::InitEncode(const VideoCodec* inst,
return WEBRTC_VIDEO_CODEC_ERR_PARAMETER;
}
- tl0_pic_idx_ = static_cast<uint8_t>(rand());
+ tl0_pic_idx_ = static_cast<uint8_t>(rand()); // NOLINT
return InitAndSetControlSettings(inst);
}
@@ -347,16 +385,28 @@ int VP9EncoderImpl::NumberOfThreads(int width,
}
int VP9EncoderImpl::InitAndSetControlSettings(const VideoCodec* inst) {
- config_->ss_number_layers = num_spatial_layers_;
-
- int scaling_factor_num = 256;
- for (int i = num_spatial_layers_ - 1; i >= 0; --i) {
+ // Set QP-min/max per spatial and temporal layer.
+ int tot_num_layers = num_spatial_layers_ * num_temporal_layers_;
+ for (int i = 0; i < tot_num_layers; ++i) {
svc_internal_.svc_params.max_quantizers[i] = config_->rc_max_quantizer;
svc_internal_.svc_params.min_quantizers[i] = config_->rc_min_quantizer;
- // 1:2 scaling in each dimension.
- svc_internal_.svc_params.scaling_factor_num[i] = scaling_factor_num;
- svc_internal_.svc_params.scaling_factor_den[i] = 256;
- scaling_factor_num /= 2;
+ }
+ config_->ss_number_layers = num_spatial_layers_;
+ if (ExplicitlyConfiguredSpatialLayers()) {
+ for (int i = 0; i < num_spatial_layers_; ++i) {
+ const auto& layer = codec_.spatialLayers[i];
+ svc_internal_.svc_params.scaling_factor_num[i] = layer.scaling_factor_num;
+ svc_internal_.svc_params.scaling_factor_den[i] = layer.scaling_factor_den;
+ }
+ } else {
+ int scaling_factor_num = 256;
+ for (int i = num_spatial_layers_ - 1; i >= 0; --i) {
+ // 1:2 scaling in each dimension.
+ svc_internal_.svc_params.scaling_factor_num[i] = scaling_factor_num;
+ svc_internal_.svc_params.scaling_factor_den[i] = 256;
+ if (codec_.mode != kScreensharing)
+ scaling_factor_num /= 2;
+ }
}
if (!SetSvcRates()) {
@@ -381,8 +431,10 @@ int VP9EncoderImpl::InitAndSetControlSettings(const VideoCodec* inst) {
}
// Register callback for getting each spatial layer.
vpx_codec_priv_output_cx_pkt_cb_pair_t cbp = {
- VP9EncoderImpl::EncoderOutputCodedPacketCallback, (void*)(this)};
- vpx_codec_control(encoder_, VP9E_REGISTER_CX_CALLBACK, (void*)(&cbp));
+ VP9EncoderImpl::EncoderOutputCodedPacketCallback,
+ reinterpret_cast<void*>(this)};
+ vpx_codec_control(encoder_, VP9E_REGISTER_CX_CALLBACK,
+ reinterpret_cast<void*>(&cbp));
// Control function to set the number of column tiles in encoding a frame, in
// log2 unit: e.g., 0 = 1 tile column, 1 = 2 tile columns, 2 = 4 tile columns.
@@ -417,7 +469,7 @@ uint32_t VP9EncoderImpl::MaxIntraTarget(uint32_t optimal_buffer_size) {
optimal_buffer_size * scale_par * codec_.maxFramerate / 10;
// Don't go below 3 times the per frame bandwidth.
const uint32_t min_intra_size = 300;
- return (target_pct < min_intra_size) ? min_intra_size: target_pct;
+ return (target_pct < min_intra_size) ? min_intra_size : target_pct;
}
int VP9EncoderImpl::Encode(const VideoFrame& input_image,
@@ -455,12 +507,35 @@ int VP9EncoderImpl::Encode(const VideoFrame& input_image,
raw_->stride[VPX_PLANE_U] = input_image.stride(kUPlane);
raw_->stride[VPX_PLANE_V] = input_image.stride(kVPlane);
- int flags = 0;
+ vpx_enc_frame_flags_t flags = 0;
bool send_keyframe = (frame_type == kVideoFrameKey);
if (send_keyframe) {
// Key frame request from caller.
flags = VPX_EFLAG_FORCE_KF;
}
+
+ if (is_flexible_mode_) {
+ SuperFrameRefSettings settings;
+
+ // These structs are copied when calling vpx_codec_control,
+ // therefore it is ok for them to go out of scope.
+ vpx_svc_ref_frame_config enc_layer_conf;
+ vpx_svc_layer_id layer_id;
+
+ if (codec_.mode == kRealtimeVideo) {
+ // Real time video not yet implemented in flexible mode.
+ RTC_NOTREACHED();
+ } else {
+ settings = spatial_layer_->GetSuperFrameSettings(input_image.timestamp(),
+ send_keyframe);
+ }
+ enc_layer_conf = GenerateRefsAndFlags(settings);
+ layer_id.temporal_layer_id = 0;
+ layer_id.spatial_layer_id = settings.start_layer;
+ vpx_codec_control(encoder_, VP9E_SET_SVC_LAYER_ID, &layer_id);
+ vpx_codec_control(encoder_, VP9E_SET_SVC_REF_FRAME_CONFIG, &enc_layer_conf);
+ }
+
assert(codec_.maxFramerate > 0);
uint32_t duration = 90000 / codec_.maxFramerate;
if (vpx_codec_encode(encoder_, raw_, timestamp_, duration, flags,
@@ -473,12 +548,12 @@ int VP9EncoderImpl::Encode(const VideoFrame& input_image,
}
void VP9EncoderImpl::PopulateCodecSpecific(CodecSpecificInfo* codec_specific,
- const vpx_codec_cx_pkt& pkt,
- uint32_t timestamp) {
+ const vpx_codec_cx_pkt& pkt,
+ uint32_t timestamp) {
assert(codec_specific != NULL);
codec_specific->codecType = kVideoCodecVP9;
- CodecSpecificInfoVP9 *vp9_info = &(codec_specific->codecSpecific.VP9);
- // TODO(asapersson): Set correct values.
+ CodecSpecificInfoVP9* vp9_info = &(codec_specific->codecSpecific.VP9);
+ // TODO(asapersson): Set correct value.
vp9_info->inter_pic_predicted =
(pkt.data.frame.flags & VPX_FRAME_IS_KEY) ? false : true;
vp9_info->flexible_mode = codec_.codecSpecific.VP9.flexibleMode;
@@ -486,9 +561,6 @@ void VP9EncoderImpl::PopulateCodecSpecific(CodecSpecificInfo* codec_specific,
!codec_.codecSpecific.VP9.flexibleMode)
? true
: false;
- if (pkt.data.frame.flags & VPX_FRAME_IS_KEY) {
- gof_idx_ = 0;
- }
vpx_svc_layer_id_t layer_id = {0};
vpx_codec_control(encoder_, VP9E_GET_SVC_LAYER_ID, &layer_id);
@@ -511,25 +583,31 @@ void VP9EncoderImpl::PopulateCodecSpecific(CodecSpecificInfo* codec_specific,
vp9_info->ss_data_available = false;
}
- if (vp9_info->flexible_mode) {
- vp9_info->gof_idx = kNoGofIdx;
+ // TODO(asapersson): this info has to be obtained from the encoder.
+ vp9_info->temporal_up_switch = false;
+
+ bool is_first_frame = false;
+ if (is_flexible_mode_) {
+ is_first_frame =
+ layer_id.spatial_layer_id == spatial_layer_->GetStartLayer();
} else {
- vp9_info->gof_idx =
- static_cast<uint8_t>(gof_idx_++ % gof_.num_frames_in_gof);
+ is_first_frame = layer_id.spatial_layer_id == 0;
}
- // TODO(asapersson): this info has to be obtained from the encoder.
- vp9_info->temporal_up_switch = true;
-
- if (layer_id.spatial_layer_id == 0) {
+ if (is_first_frame) {
picture_id_ = (picture_id_ + 1) & 0x7FFF;
// TODO(asapersson): this info has to be obtained from the encoder.
vp9_info->inter_layer_predicted = false;
+ ++frames_since_kf_;
} else {
// TODO(asapersson): this info has to be obtained from the encoder.
vp9_info->inter_layer_predicted = true;
}
+ if (pkt.data.frame.flags & VPX_FRAME_IS_KEY) {
+ frames_since_kf_ = 0;
+ }
+
vp9_info->picture_id = picture_id_;
if (!vp9_info->flexible_mode) {
@@ -542,6 +620,20 @@ void VP9EncoderImpl::PopulateCodecSpecific(CodecSpecificInfo* codec_specific,
// Always populate this, so that the packetizer can properly set the marker
// bit.
vp9_info->num_spatial_layers = num_spatial_layers_;
+
+ vp9_info->num_ref_pics = 0;
+ if (vp9_info->flexible_mode) {
+ vp9_info->gof_idx = kNoGofIdx;
+ vp9_info->num_ref_pics = num_ref_pics_[layer_id.spatial_layer_id];
+ for (int i = 0; i < num_ref_pics_[layer_id.spatial_layer_id]; ++i) {
+ vp9_info->p_diff[i] = p_diff_[layer_id.spatial_layer_id][i];
+ }
+ } else {
+ vp9_info->gof_idx =
+ static_cast<uint8_t>(frames_since_kf_ % gof_.num_frames_in_gof);
+ vp9_info->temporal_up_switch = gof_.temporal_up_switch[vp9_info->gof_idx];
+ }
+
if (vp9_info->ss_data_available) {
vp9_info->spatial_layer_resolution_present = true;
for (size_t i = 0; i < vp9_info->num_spatial_layers; ++i) {
@@ -577,6 +669,14 @@ int VP9EncoderImpl::GetEncodedLayerFrame(const vpx_codec_cx_pkt* pkt) {
frag_info.fragmentationPlType[part_idx] = 0;
frag_info.fragmentationTimeDiff[part_idx] = 0;
encoded_image_._length += static_cast<uint32_t>(pkt->data.frame.sz);
+
+ vpx_svc_layer_id_t layer_id = {0};
+ vpx_codec_control(encoder_, VP9E_GET_SVC_LAYER_ID, &layer_id);
+ if (is_flexible_mode_ && codec_.mode == kScreensharing)
+ spatial_layer_->LayerFrameEncoded(
+ static_cast<unsigned int>(encoded_image_._length),
+ layer_id.spatial_layer_id);
+
assert(encoded_image_._length <= encoded_image_._size);
// End of frame.
@@ -598,6 +698,108 @@ int VP9EncoderImpl::GetEncodedLayerFrame(const vpx_codec_cx_pkt* pkt) {
return WEBRTC_VIDEO_CODEC_OK;
}
+vpx_svc_ref_frame_config VP9EncoderImpl::GenerateRefsAndFlags(
+ const SuperFrameRefSettings& settings) {
+ static const vpx_enc_frame_flags_t kAllFlags =
+ VP8_EFLAG_NO_REF_ARF | VP8_EFLAG_NO_REF_GF | VP8_EFLAG_NO_REF_LAST |
+ VP8_EFLAG_NO_UPD_LAST | VP8_EFLAG_NO_UPD_ARF | VP8_EFLAG_NO_UPD_GF;
+ vpx_svc_ref_frame_config sf_conf = {};
+ if (settings.is_keyframe) {
+ // Used later on to make sure we don't make any invalid references.
+ memset(buffer_updated_at_frame_, -1, sizeof(buffer_updated_at_frame_));
+ for (int layer = settings.start_layer; layer <= settings.stop_layer;
+ ++layer) {
+ num_ref_pics_[layer] = 0;
+ buffer_updated_at_frame_[settings.layer[layer].upd_buf] = frames_encoded_;
+ // When encoding a keyframe only the alt_fb_idx is used
+ // to specify which layer ends up in which buffer.
+ sf_conf.alt_fb_idx[layer] = settings.layer[layer].upd_buf;
+ }
+ } else {
+ for (int layer_idx = settings.start_layer; layer_idx <= settings.stop_layer;
+ ++layer_idx) {
+ vpx_enc_frame_flags_t layer_flags = kAllFlags;
+ num_ref_pics_[layer_idx] = 0;
+ int8_t refs[3] = {settings.layer[layer_idx].ref_buf1,
+ settings.layer[layer_idx].ref_buf2,
+ settings.layer[layer_idx].ref_buf3};
+
+ for (unsigned int ref_idx = 0; ref_idx < kMaxVp9RefPics; ++ref_idx) {
+ if (refs[ref_idx] == -1)
+ continue;
+
+ RTC_DCHECK_GE(refs[ref_idx], 0);
+ RTC_DCHECK_LE(refs[ref_idx], 7);
+ // Easier to remove flags from all flags rather than having to
+ // build the flags from 0.
+ switch (num_ref_pics_[layer_idx]) {
+ case 0: {
+ sf_conf.lst_fb_idx[layer_idx] = refs[ref_idx];
+ layer_flags &= ~VP8_EFLAG_NO_REF_LAST;
+ break;
+ }
+ case 1: {
+ sf_conf.gld_fb_idx[layer_idx] = refs[ref_idx];
+ layer_flags &= ~VP8_EFLAG_NO_REF_GF;
+ break;
+ }
+ case 2: {
+ sf_conf.alt_fb_idx[layer_idx] = refs[ref_idx];
+ layer_flags &= ~VP8_EFLAG_NO_REF_ARF;
+ break;
+ }
+ }
+ // Make sure we don't reference a buffer that hasn't been
+ // used at all or hasn't been used since a keyframe.
+ RTC_DCHECK_NE(buffer_updated_at_frame_[refs[ref_idx]], -1);
+
+ p_diff_[layer_idx][num_ref_pics_[layer_idx]] =
+ frames_encoded_ - buffer_updated_at_frame_[refs[ref_idx]];
+ num_ref_pics_[layer_idx]++;
+ }
+
+ bool upd_buf_same_as_a_ref = false;
+ if (settings.layer[layer_idx].upd_buf != -1) {
+ for (unsigned int ref_idx = 0; ref_idx < kMaxVp9RefPics; ++ref_idx) {
+ if (settings.layer[layer_idx].upd_buf == refs[ref_idx]) {
+ switch (ref_idx) {
+ case 0: {
+ layer_flags &= ~VP8_EFLAG_NO_UPD_LAST;
+ break;
+ }
+ case 1: {
+ layer_flags &= ~VP8_EFLAG_NO_UPD_GF;
+ break;
+ }
+ case 2: {
+ layer_flags &= ~VP8_EFLAG_NO_UPD_ARF;
+ break;
+ }
+ }
+ upd_buf_same_as_a_ref = true;
+ break;
+ }
+ }
+ if (!upd_buf_same_as_a_ref) {
+ // If we have three references and a buffer is specified to be
+ // updated, then that buffer must be the same as one of the
+ // three references.
+ RTC_CHECK_LT(num_ref_pics_[layer_idx], kMaxVp9RefPics);
+
+ sf_conf.alt_fb_idx[layer_idx] = settings.layer[layer_idx].upd_buf;
+ layer_flags ^= VP8_EFLAG_NO_UPD_ARF;
+ }
+
+ int updated_buffer = settings.layer[layer_idx].upd_buf;
+ buffer_updated_at_frame_[updated_buffer] = frames_encoded_;
+ sf_conf.frame_flags[layer_idx] = layer_flags;
+ }
+ }
+ }
+ ++frames_encoded_;
+ return sf_conf;
+}
+
int VP9EncoderImpl::SetChannelParameters(uint32_t packet_loss, int64_t rtt) {
return WEBRTC_VIDEO_CODEC_OK;
}
@@ -608,6 +810,10 @@ int VP9EncoderImpl::RegisterEncodeCompleteCallback(
return WEBRTC_VIDEO_CODEC_OK;
}
+const char* VP9EncoderImpl::ImplementationName() const {
+ return "libvpx";
+}
+
VP9Decoder* VP9Decoder::Create() {
return new VP9DecoderImpl();
}
@@ -652,7 +858,7 @@ int VP9DecoderImpl::InitDecode(const VideoCodec* inst, int number_of_cores) {
if (decoder_ == NULL) {
decoder_ = new vpx_codec_ctx_t;
}
- vpx_codec_dec_cfg_t cfg;
+ vpx_codec_dec_cfg_t cfg;
// Setting number of threads to a constant value (1)
cfg.threads = 1;
cfg.h = cfg.w = 0; // set after decode
@@ -705,10 +911,8 @@ int VP9DecoderImpl::Decode(const EncodedImage& input_image,
}
// During decode libvpx may get and release buffers from |frame_buffer_pool_|.
// In practice libvpx keeps a few (~3-4) buffers alive at a time.
- if (vpx_codec_decode(decoder_,
- buffer,
- static_cast<unsigned int>(input_image._length),
- 0,
+ if (vpx_codec_decode(decoder_, buffer,
+ static_cast<unsigned int>(input_image._length), 0,
VPX_DL_REALTIME)) {
return WEBRTC_VIDEO_CODEC_ERROR;
}
@@ -730,24 +934,22 @@ int VP9DecoderImpl::ReturnFrame(const vpx_image_t* img, uint32_t timestamp) {
}
// This buffer contains all of |img|'s image data, a reference counted
- // Vp9FrameBuffer. Performing AddRef/Release ensures it is not released and
- // recycled during use (libvpx is done with the buffers after a few
+ // Vp9FrameBuffer. (libvpx is done with the buffers after a few
// vpx_codec_decode calls or vpx_codec_destroy).
Vp9FrameBufferPool::Vp9FrameBuffer* img_buffer =
static_cast<Vp9FrameBufferPool::Vp9FrameBuffer*>(img->fb_priv);
- img_buffer->AddRef();
// The buffer can be used directly by the VideoFrame (without copy) by
// using a WrappedI420Buffer.
rtc::scoped_refptr<WrappedI420Buffer> img_wrapped_buffer(
new rtc::RefCountedObject<webrtc::WrappedI420Buffer>(
- img->d_w, img->d_h,
- img->planes[VPX_PLANE_Y], img->stride[VPX_PLANE_Y],
- img->planes[VPX_PLANE_U], img->stride[VPX_PLANE_U],
- img->planes[VPX_PLANE_V], img->stride[VPX_PLANE_V],
+ img->d_w, img->d_h, img->planes[VPX_PLANE_Y],
+ img->stride[VPX_PLANE_Y], img->planes[VPX_PLANE_U],
+ img->stride[VPX_PLANE_U], img->planes[VPX_PLANE_V],
+ img->stride[VPX_PLANE_V],
// WrappedI420Buffer's mechanism for allowing the release of its frame
// buffer is through a callback function. This is where we should
// release |img_buffer|.
- rtc::Bind(&WrappedI420BufferNoLongerUsedCb, img_buffer)));
+ rtc::KeepRefUntilDone(img_buffer)));
VideoFrame decoded_image;
decoded_image.set_video_frame_buffer(img_wrapped_buffer);
@@ -781,4 +983,9 @@ int VP9DecoderImpl::Release() {
inited_ = false;
return WEBRTC_VIDEO_CODEC_OK;
}
+
+const char* VP9DecoderImpl::ImplementationName() const {
+ return "libvpx";
+}
+
} // namespace webrtc
diff --git a/webrtc/modules/video_coding/codecs/vp9/vp9_impl.h b/webrtc/modules/video_coding/codecs/vp9/vp9_impl.h
index f9c123079e..bfa4540304 100644
--- a/webrtc/modules/video_coding/codecs/vp9/vp9_impl.h
+++ b/webrtc/modules/video_coding/codecs/vp9/vp9_impl.h
@@ -9,8 +9,10 @@
*
*/
-#ifndef WEBRTC_MODULES_VIDEO_CODING_CODECS_VP9_IMPL_H_
-#define WEBRTC_MODULES_VIDEO_CODING_CODECS_VP9_IMPL_H_
+#ifndef WEBRTC_MODULES_VIDEO_CODING_CODECS_VP9_VP9_IMPL_H_
+#define WEBRTC_MODULES_VIDEO_CODING_CODECS_VP9_VP9_IMPL_H_
+
+#include <vector>
#include "webrtc/modules/video_coding/codecs/vp9/include/vp9.h"
#include "webrtc/modules/video_coding/codecs/vp9/vp9_frame_buffer_pool.h"
@@ -21,6 +23,8 @@
namespace webrtc {
+class ScreenshareLayersVP9;
+
class VP9EncoderImpl : public VP9Encoder {
public:
VP9EncoderImpl();
@@ -45,6 +49,22 @@ class VP9EncoderImpl : public VP9Encoder {
void OnDroppedFrame() override {}
+ const char* ImplementationName() const override;
+
+ struct LayerFrameRefSettings {
+ int8_t upd_buf = -1; // -1 - no update, 0..7 - update buffer 0..7
+ int8_t ref_buf1 = -1; // -1 - no reference, 0..7 - reference buffer 0..7
+ int8_t ref_buf2 = -1; // -1 - no reference, 0..7 - reference buffer 0..7
+ int8_t ref_buf3 = -1; // -1 - no reference, 0..7 - reference buffer 0..7
+ };
+
+ struct SuperFrameRefSettings {
+ LayerFrameRefSettings layer[kMaxVp9NumberOfSpatialLayers];
+ uint8_t start_layer = 0; // The first spatial layer to be encoded.
+ uint8_t stop_layer = 0; // The last spatial layer to be encoded.
+ bool is_keyframe = false;
+ };
+
private:
// Determine number of encoder threads to use.
int NumberOfThreads(int width, int height, int number_of_cores);
@@ -56,8 +76,18 @@ class VP9EncoderImpl : public VP9Encoder {
const vpx_codec_cx_pkt& pkt,
uint32_t timestamp);
+ bool ExplicitlyConfiguredSpatialLayers() const;
bool SetSvcRates();
+ // Used for flexible mode to set the flags and buffer references used
+ // by the encoder. Also calculates the references used by the RTP
+ // packetizer.
+ //
+ // Has to be called for every frame (keyframes included) to update the
+ // state used to calculate references.
+ vpx_svc_ref_frame_config GenerateRefsAndFlags(
+ const SuperFrameRefSettings& settings);
+
virtual int GetEncodedLayerFrame(const vpx_codec_cx_pkt* pkt);
// Callback function for outputting packets per spatial layer.
@@ -88,11 +118,18 @@ class VP9EncoderImpl : public VP9Encoder {
GofInfoVP9 gof_; // Contains each frame's temporal information for
// non-flexible mode.
uint8_t tl0_pic_idx_; // Only used in non-flexible mode.
- size_t gof_idx_; // Only used in non-flexible mode.
+ size_t frames_since_kf_;
uint8_t num_temporal_layers_;
uint8_t num_spatial_layers_;
-};
+ // Used for flexible mode.
+ bool is_flexible_mode_;
+ int64_t buffer_updated_at_frame_[kNumVp9Buffers];
+ int64_t frames_encoded_;
+ uint8_t num_ref_pics_[kMaxVp9NumberOfSpatialLayers];
+ uint8_t p_diff_[kMaxVp9NumberOfSpatialLayers][kMaxVp9RefPics];
+ rtc::scoped_ptr<ScreenshareLayersVP9> spatial_layer_;
+};
class VP9DecoderImpl : public VP9Decoder {
public:
@@ -114,6 +151,8 @@ class VP9DecoderImpl : public VP9Decoder {
int Reset() override;
+ const char* ImplementationName() const override;
+
private:
int ReturnFrame(const vpx_image_t* img, uint32_t timeStamp);
@@ -127,4 +166,4 @@ class VP9DecoderImpl : public VP9Decoder {
};
} // namespace webrtc
-#endif // WEBRTC_MODULES_VIDEO_CODING_CODECS_VP9_IMPL_H_
+#endif // WEBRTC_MODULES_VIDEO_CODING_CODECS_VP9_VP9_IMPL_H_
diff --git a/webrtc/modules/video_coding/content_metrics_processing.cc b/webrtc/modules/video_coding/content_metrics_processing.cc
new file mode 100644
index 0000000000..0c3a6dbc6c
--- /dev/null
+++ b/webrtc/modules/video_coding/content_metrics_processing.cc
@@ -0,0 +1,124 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/modules/video_coding/content_metrics_processing.h"
+
+#include <math.h>
+
+#include "webrtc/modules/include/module_common_types.h"
+#include "webrtc/modules/video_coding/include/video_coding_defines.h"
+
+namespace webrtc {
+//////////////////////////////////
+/// VCMContentMetricsProcessing //
+//////////////////////////////////
+
+VCMContentMetricsProcessing::VCMContentMetricsProcessing()
+ : recursive_avg_factor_(1 / 150.0f), // matched to 30fps.
+ frame_cnt_uniform_avg_(0),
+ avg_motion_level_(0.0f),
+ avg_spatial_level_(0.0f) {
+ recursive_avg_ = new VideoContentMetrics();
+ uniform_avg_ = new VideoContentMetrics();
+}
+
+VCMContentMetricsProcessing::~VCMContentMetricsProcessing() {
+ delete recursive_avg_;
+ delete uniform_avg_;
+}
+
+int VCMContentMetricsProcessing::Reset() {
+ recursive_avg_->Reset();
+ uniform_avg_->Reset();
+ frame_cnt_uniform_avg_ = 0;
+ avg_motion_level_ = 0.0f;
+ avg_spatial_level_ = 0.0f;
+ return VCM_OK;
+}
+
+void VCMContentMetricsProcessing::UpdateFrameRate(uint32_t frameRate) {
+ // Update factor for recursive averaging.
+ recursive_avg_factor_ = static_cast<float>(1000.0f) /
+ static_cast<float>(frameRate * kQmMinIntervalMs);
+}
+
+VideoContentMetrics* VCMContentMetricsProcessing::LongTermAvgData() {
+ return recursive_avg_;
+}
+
+VideoContentMetrics* VCMContentMetricsProcessing::ShortTermAvgData() {
+ if (frame_cnt_uniform_avg_ == 0) {
+ return NULL;
+ }
+ // Two metrics are used: motion and spatial level.
+ uniform_avg_->motion_magnitude =
+ avg_motion_level_ / static_cast<float>(frame_cnt_uniform_avg_);
+ uniform_avg_->spatial_pred_err =
+ avg_spatial_level_ / static_cast<float>(frame_cnt_uniform_avg_);
+ return uniform_avg_;
+}
+
+void VCMContentMetricsProcessing::ResetShortTermAvgData() {
+ // Reset.
+ avg_motion_level_ = 0.0f;
+ avg_spatial_level_ = 0.0f;
+ frame_cnt_uniform_avg_ = 0;
+}
+
+int VCMContentMetricsProcessing::UpdateContentData(
+ const VideoContentMetrics* contentMetrics) {
+ if (contentMetrics == NULL) {
+ return VCM_OK;
+ }
+ return ProcessContent(contentMetrics);
+}
+
+int VCMContentMetricsProcessing::ProcessContent(
+ const VideoContentMetrics* contentMetrics) {
+ // Update the recursive averaged metrics: average is over longer window
+ // of time: over QmMinIntervalMs ms.
+ UpdateRecursiveAvg(contentMetrics);
+ // Update the uniform averaged metrics: average is over shorter window
+ // of time: based on ~RTCP reports.
+ UpdateUniformAvg(contentMetrics);
+ return VCM_OK;
+}
+
+void VCMContentMetricsProcessing::UpdateUniformAvg(
+ const VideoContentMetrics* contentMetrics) {
+ // Update frame counter.
+ frame_cnt_uniform_avg_ += 1;
+ // Update averaged metrics: motion and spatial level are used.
+ avg_motion_level_ += contentMetrics->motion_magnitude;
+ avg_spatial_level_ += contentMetrics->spatial_pred_err;
+ return;
+}
+
+void VCMContentMetricsProcessing::UpdateRecursiveAvg(
+ const VideoContentMetrics* contentMetrics) {
+ // Spatial metrics: 2x2, 1x2(H), 2x1(V).
+ recursive_avg_->spatial_pred_err =
+ (1 - recursive_avg_factor_) * recursive_avg_->spatial_pred_err +
+ recursive_avg_factor_ * contentMetrics->spatial_pred_err;
+
+ recursive_avg_->spatial_pred_err_h =
+ (1 - recursive_avg_factor_) * recursive_avg_->spatial_pred_err_h +
+ recursive_avg_factor_ * contentMetrics->spatial_pred_err_h;
+
+ recursive_avg_->spatial_pred_err_v =
+ (1 - recursive_avg_factor_) * recursive_avg_->spatial_pred_err_v +
+ recursive_avg_factor_ * contentMetrics->spatial_pred_err_v;
+
+ // Motion metric: Derived from NFD (normalized frame difference).
+ recursive_avg_->motion_magnitude =
+ (1 - recursive_avg_factor_) * recursive_avg_->motion_magnitude +
+ recursive_avg_factor_ * contentMetrics->motion_magnitude;
+}
+} // namespace webrtc
diff --git a/webrtc/modules/video_coding/content_metrics_processing.h b/webrtc/modules/video_coding/content_metrics_processing.h
new file mode 100644
index 0000000000..3f67ec19c9
--- /dev/null
+++ b/webrtc/modules/video_coding/content_metrics_processing.h
@@ -0,0 +1,72 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_CODING_CONTENT_METRICS_PROCESSING_H_
+#define WEBRTC_MODULES_VIDEO_CODING_CONTENT_METRICS_PROCESSING_H_
+
+#include "webrtc/typedefs.h"
+
+namespace webrtc {
+
+struct VideoContentMetrics;
+
+// QM interval time (in ms)
+enum { kQmMinIntervalMs = 10000 };
+
+// Flag for NFD metric vs motion metric
+enum { kNfdMetric = 1 };
+
+/**********************************/
+/* Content Metrics Processing */
+/**********************************/
+class VCMContentMetricsProcessing {
+ public:
+ VCMContentMetricsProcessing();
+ ~VCMContentMetricsProcessing();
+
+ // Update class with latest metrics.
+ int UpdateContentData(const VideoContentMetrics* contentMetrics);
+
+ // Reset the short-term averaged content data.
+ void ResetShortTermAvgData();
+
+ // Initialize.
+ int Reset();
+
+ // Inform class of current frame rate.
+ void UpdateFrameRate(uint32_t frameRate);
+
+ // Returns the long-term averaged content data: recursive average over longer
+ // time scale.
+ VideoContentMetrics* LongTermAvgData();
+
+ // Returns the short-term averaged content data: uniform average over
+ // shorter time scalE.
+ VideoContentMetrics* ShortTermAvgData();
+
+ private:
+ // Compute working average.
+ int ProcessContent(const VideoContentMetrics* contentMetrics);
+
+ // Update the recursive averaged metrics: longer time average (~5/10 secs).
+ void UpdateRecursiveAvg(const VideoContentMetrics* contentMetrics);
+
+ // Update the uniform averaged metrics: shorter time average (~RTCP report).
+ void UpdateUniformAvg(const VideoContentMetrics* contentMetrics);
+
+ VideoContentMetrics* recursive_avg_;
+ VideoContentMetrics* uniform_avg_;
+ float recursive_avg_factor_;
+ uint32_t frame_cnt_uniform_avg_;
+ float avg_motion_level_;
+ float avg_spatial_level_;
+};
+} // namespace webrtc
+#endif // WEBRTC_MODULES_VIDEO_CODING_CONTENT_METRICS_PROCESSING_H_
diff --git a/webrtc/modules/video_coding/decoding_state.cc b/webrtc/modules/video_coding/decoding_state.cc
new file mode 100644
index 0000000000..89be9b66c1
--- /dev/null
+++ b/webrtc/modules/video_coding/decoding_state.cc
@@ -0,0 +1,285 @@
+/*
+ * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/modules/video_coding/decoding_state.h"
+
+#include "webrtc/modules/include/module_common_types.h"
+#include "webrtc/modules/video_coding/frame_buffer.h"
+#include "webrtc/modules/video_coding/jitter_buffer_common.h"
+#include "webrtc/modules/video_coding/packet.h"
+
+namespace webrtc {
+
+VCMDecodingState::VCMDecodingState()
+ : sequence_num_(0),
+ time_stamp_(0),
+ picture_id_(kNoPictureId),
+ temporal_id_(kNoTemporalIdx),
+ tl0_pic_id_(kNoTl0PicIdx),
+ full_sync_(true),
+ in_initial_state_(true) {
+ memset(frame_decoded_, 0, sizeof(frame_decoded_));
+}
+
+VCMDecodingState::~VCMDecodingState() {}
+
+void VCMDecodingState::Reset() {
+ // TODO(mikhal): Verify - not always would want to reset the sync
+ sequence_num_ = 0;
+ time_stamp_ = 0;
+ picture_id_ = kNoPictureId;
+ temporal_id_ = kNoTemporalIdx;
+ tl0_pic_id_ = kNoTl0PicIdx;
+ full_sync_ = true;
+ in_initial_state_ = true;
+ memset(frame_decoded_, 0, sizeof(frame_decoded_));
+}
+
+uint32_t VCMDecodingState::time_stamp() const {
+ return time_stamp_;
+}
+
+uint16_t VCMDecodingState::sequence_num() const {
+ return sequence_num_;
+}
+
+bool VCMDecodingState::IsOldFrame(const VCMFrameBuffer* frame) const {
+ assert(frame != NULL);
+ if (in_initial_state_)
+ return false;
+ return !IsNewerTimestamp(frame->TimeStamp(), time_stamp_);
+}
+
+bool VCMDecodingState::IsOldPacket(const VCMPacket* packet) const {
+ assert(packet != NULL);
+ if (in_initial_state_)
+ return false;
+ return !IsNewerTimestamp(packet->timestamp, time_stamp_);
+}
+
+void VCMDecodingState::SetState(const VCMFrameBuffer* frame) {
+ assert(frame != NULL && frame->GetHighSeqNum() >= 0);
+ if (!UsingFlexibleMode(frame))
+ UpdateSyncState(frame);
+ sequence_num_ = static_cast<uint16_t>(frame->GetHighSeqNum());
+ time_stamp_ = frame->TimeStamp();
+ picture_id_ = frame->PictureId();
+ temporal_id_ = frame->TemporalId();
+ tl0_pic_id_ = frame->Tl0PicId();
+
+ if (UsingFlexibleMode(frame)) {
+ uint16_t frame_index = picture_id_ % kFrameDecodedLength;
+ if (in_initial_state_) {
+ frame_decoded_cleared_to_ = frame_index;
+ } else if (frame->FrameType() == kVideoFrameKey) {
+ memset(frame_decoded_, 0, sizeof(frame_decoded_));
+ frame_decoded_cleared_to_ = frame_index;
+ } else {
+ if (AheadOfFramesDecodedClearedTo(frame_index)) {
+ while (frame_decoded_cleared_to_ != frame_index) {
+ frame_decoded_cleared_to_ =
+ (frame_decoded_cleared_to_ + 1) % kFrameDecodedLength;
+ frame_decoded_[frame_decoded_cleared_to_] = false;
+ }
+ }
+ }
+ frame_decoded_[frame_index] = true;
+ }
+
+ in_initial_state_ = false;
+}
+
+void VCMDecodingState::CopyFrom(const VCMDecodingState& state) {
+ sequence_num_ = state.sequence_num_;
+ time_stamp_ = state.time_stamp_;
+ picture_id_ = state.picture_id_;
+ temporal_id_ = state.temporal_id_;
+ tl0_pic_id_ = state.tl0_pic_id_;
+ full_sync_ = state.full_sync_;
+ in_initial_state_ = state.in_initial_state_;
+ frame_decoded_cleared_to_ = state.frame_decoded_cleared_to_;
+ memcpy(frame_decoded_, state.frame_decoded_, sizeof(frame_decoded_));
+}
+
+bool VCMDecodingState::UpdateEmptyFrame(const VCMFrameBuffer* frame) {
+ bool empty_packet = frame->GetHighSeqNum() == frame->GetLowSeqNum();
+ if (in_initial_state_ && empty_packet) {
+ // Drop empty packets as long as we are in the initial state.
+ return true;
+ }
+ if ((empty_packet && ContinuousSeqNum(frame->GetHighSeqNum())) ||
+ ContinuousFrame(frame)) {
+ // Continuous empty packets or continuous frames can be dropped if we
+ // advance the sequence number.
+ sequence_num_ = frame->GetHighSeqNum();
+ time_stamp_ = frame->TimeStamp();
+ return true;
+ }
+ return false;
+}
+
+void VCMDecodingState::UpdateOldPacket(const VCMPacket* packet) {
+ assert(packet != NULL);
+ if (packet->timestamp == time_stamp_) {
+ // Late packet belonging to the last decoded frame - make sure we update the
+ // last decoded sequence number.
+ sequence_num_ = LatestSequenceNumber(packet->seqNum, sequence_num_);
+ }
+}
+
+void VCMDecodingState::SetSeqNum(uint16_t new_seq_num) {
+ sequence_num_ = new_seq_num;
+}
+
+bool VCMDecodingState::in_initial_state() const {
+ return in_initial_state_;
+}
+
+bool VCMDecodingState::full_sync() const {
+ return full_sync_;
+}
+
+void VCMDecodingState::UpdateSyncState(const VCMFrameBuffer* frame) {
+ if (in_initial_state_)
+ return;
+ if (frame->TemporalId() == kNoTemporalIdx ||
+ frame->Tl0PicId() == kNoTl0PicIdx) {
+ full_sync_ = true;
+ } else if (frame->FrameType() == kVideoFrameKey || frame->LayerSync()) {
+ full_sync_ = true;
+ } else if (full_sync_) {
+ // Verify that we are still in sync.
+ // Sync will be broken if continuity is true for layers but not for the
+ // other methods (PictureId and SeqNum).
+ if (UsingPictureId(frame)) {
+ // First check for a valid tl0PicId.
+ if (frame->Tl0PicId() - tl0_pic_id_ > 1) {
+ full_sync_ = false;
+ } else {
+ full_sync_ = ContinuousPictureId(frame->PictureId());
+ }
+ } else {
+ full_sync_ =
+ ContinuousSeqNum(static_cast<uint16_t>(frame->GetLowSeqNum()));
+ }
+ }
+}
+
+bool VCMDecodingState::ContinuousFrame(const VCMFrameBuffer* frame) const {
+ // Check continuity based on the following hierarchy:
+ // - Temporal layers (stop here if out of sync).
+ // - Picture Id when available.
+ // - Sequence numbers.
+ // Return true when in initial state.
+ // Note that when a method is not applicable it will return false.
+ assert(frame != NULL);
+ // A key frame is always considered continuous as it doesn't refer to any
+ // frames and therefore won't introduce any errors even if prior frames are
+ // missing.
+ if (frame->FrameType() == kVideoFrameKey)
+ return true;
+ // When in the initial state we always require a key frame to start decoding.
+ if (in_initial_state_)
+ return false;
+ if (ContinuousLayer(frame->TemporalId(), frame->Tl0PicId()))
+ return true;
+ // tl0picId is either not used, or should remain unchanged.
+ if (frame->Tl0PicId() != tl0_pic_id_)
+ return false;
+ // Base layers are not continuous or temporal layers are inactive.
+ // In the presence of temporal layers, check for Picture ID/sequence number
+ // continuity if sync can be restored by this frame.
+ if (!full_sync_ && !frame->LayerSync())
+ return false;
+ if (UsingPictureId(frame)) {
+ if (UsingFlexibleMode(frame)) {
+ return ContinuousFrameRefs(frame);
+ } else {
+ return ContinuousPictureId(frame->PictureId());
+ }
+ } else {
+ return ContinuousSeqNum(static_cast<uint16_t>(frame->GetLowSeqNum()));
+ }
+}
+
+bool VCMDecodingState::ContinuousPictureId(int picture_id) const {
+ int next_picture_id = picture_id_ + 1;
+ if (picture_id < picture_id_) {
+ // Wrap
+ if (picture_id_ >= 0x80) {
+ // 15 bits used for picture id
+ return ((next_picture_id & 0x7FFF) == picture_id);
+ } else {
+ // 7 bits used for picture id
+ return ((next_picture_id & 0x7F) == picture_id);
+ }
+ }
+ // No wrap
+ return (next_picture_id == picture_id);
+}
+
+bool VCMDecodingState::ContinuousSeqNum(uint16_t seq_num) const {
+ return seq_num == static_cast<uint16_t>(sequence_num_ + 1);
+}
+
+bool VCMDecodingState::ContinuousLayer(int temporal_id, int tl0_pic_id) const {
+ // First, check if applicable.
+ if (temporal_id == kNoTemporalIdx || tl0_pic_id == kNoTl0PicIdx)
+ return false;
+ // If this is the first frame to use temporal layers, make sure we start
+ // from base.
+ else if (tl0_pic_id_ == kNoTl0PicIdx && temporal_id_ == kNoTemporalIdx &&
+ temporal_id == 0)
+ return true;
+
+ // Current implementation: Look for base layer continuity.
+ if (temporal_id != 0)
+ return false;
+ return (static_cast<uint8_t>(tl0_pic_id_ + 1) == tl0_pic_id);
+}
+
+bool VCMDecodingState::ContinuousFrameRefs(const VCMFrameBuffer* frame) const {
+ uint8_t num_refs = frame->CodecSpecific()->codecSpecific.VP9.num_ref_pics;
+ for (uint8_t r = 0; r < num_refs; ++r) {
+ uint16_t frame_ref = frame->PictureId() -
+ frame->CodecSpecific()->codecSpecific.VP9.p_diff[r];
+ uint16_t frame_index = frame_ref % kFrameDecodedLength;
+ if (AheadOfFramesDecodedClearedTo(frame_index) ||
+ !frame_decoded_[frame_index]) {
+ return false;
+ }
+ }
+ return true;
+}
+
+bool VCMDecodingState::UsingPictureId(const VCMFrameBuffer* frame) const {
+ return (frame->PictureId() != kNoPictureId && picture_id_ != kNoPictureId);
+}
+
+bool VCMDecodingState::UsingFlexibleMode(const VCMFrameBuffer* frame) const {
+ return frame->CodecSpecific()->codecType == kVideoCodecVP9 &&
+ frame->CodecSpecific()->codecSpecific.VP9.flexible_mode;
+}
+
+// TODO(philipel): change how check work, this check practially
+// limits the max p_diff to 64.
+bool VCMDecodingState::AheadOfFramesDecodedClearedTo(uint16_t index) const {
+ // No way of knowing for sure if we are actually ahead of
+ // frame_decoded_cleared_to_. We just make the assumption
+ // that we are not trying to reference back to a very old
+ // index, but instead are referencing a newer index.
+ uint16_t diff =
+ index > frame_decoded_cleared_to_
+ ? kFrameDecodedLength - (index - frame_decoded_cleared_to_)
+ : frame_decoded_cleared_to_ - index;
+ return diff > kFrameDecodedLength / 2;
+}
+
+} // namespace webrtc
diff --git a/webrtc/modules/video_coding/decoding_state.h b/webrtc/modules/video_coding/decoding_state.h
new file mode 100644
index 0000000000..f4ea8ae081
--- /dev/null
+++ b/webrtc/modules/video_coding/decoding_state.h
@@ -0,0 +1,82 @@
+/*
+ * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_CODING_DECODING_STATE_H_
+#define WEBRTC_MODULES_VIDEO_CODING_DECODING_STATE_H_
+
+#include "webrtc/typedefs.h"
+
+namespace webrtc {
+
+// Forward declarations
+class VCMFrameBuffer;
+class VCMPacket;
+
+class VCMDecodingState {
+ public:
+ // The max number of bits used to reference back
+ // to a previous frame when using flexible mode.
+ static const uint16_t kNumRefBits = 7;
+ static const uint16_t kFrameDecodedLength = 1 << kNumRefBits;
+
+ VCMDecodingState();
+ ~VCMDecodingState();
+ // Check for old frame
+ bool IsOldFrame(const VCMFrameBuffer* frame) const;
+ // Check for old packet
+ bool IsOldPacket(const VCMPacket* packet) const;
+ // Check for frame continuity based on current decoded state. Use best method
+ // possible, i.e. temporal info, picture ID or sequence number.
+ bool ContinuousFrame(const VCMFrameBuffer* frame) const;
+ void SetState(const VCMFrameBuffer* frame);
+ void CopyFrom(const VCMDecodingState& state);
+ bool UpdateEmptyFrame(const VCMFrameBuffer* frame);
+ // Update the sequence number if the timestamp matches current state and the
+ // sequence number is higher than the current one. This accounts for packets
+ // arriving late.
+ void UpdateOldPacket(const VCMPacket* packet);
+ void SetSeqNum(uint16_t new_seq_num);
+ void Reset();
+ uint32_t time_stamp() const;
+ uint16_t sequence_num() const;
+ // Return true if at initial state.
+ bool in_initial_state() const;
+ // Return true when sync is on - decode all layers.
+ bool full_sync() const;
+
+ private:
+ void UpdateSyncState(const VCMFrameBuffer* frame);
+ // Designated continuity functions
+ bool ContinuousPictureId(int picture_id) const;
+ bool ContinuousSeqNum(uint16_t seq_num) const;
+ bool ContinuousLayer(int temporal_id, int tl0_pic_id) const;
+ bool ContinuousFrameRefs(const VCMFrameBuffer* frame) const;
+ bool UsingPictureId(const VCMFrameBuffer* frame) const;
+ bool UsingFlexibleMode(const VCMFrameBuffer* frame) const;
+ bool AheadOfFramesDecodedClearedTo(uint16_t index) const;
+
+ // Keep state of last decoded frame.
+ // TODO(mikhal/stefan): create designated classes to handle these types.
+ uint16_t sequence_num_;
+ uint32_t time_stamp_;
+ int picture_id_;
+ int temporal_id_;
+ int tl0_pic_id_;
+ bool full_sync_; // Sync flag when temporal layers are used.
+ bool in_initial_state_;
+
+ // Used to check references in flexible mode.
+ bool frame_decoded_[kFrameDecodedLength];
+ uint16_t frame_decoded_cleared_to_;
+};
+
+} // namespace webrtc
+
+#endif // WEBRTC_MODULES_VIDEO_CODING_DECODING_STATE_H_
diff --git a/webrtc/modules/video_coding/decoding_state_unittest.cc b/webrtc/modules/video_coding/decoding_state_unittest.cc
new file mode 100644
index 0000000000..5f5d0d38b1
--- /dev/null
+++ b/webrtc/modules/video_coding/decoding_state_unittest.cc
@@ -0,0 +1,699 @@
+/*
+ * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <string.h>
+
+#include "testing/gtest/include/gtest/gtest.h"
+#include "webrtc/modules/include/module_common_types.h"
+#include "webrtc/modules/video_coding/decoding_state.h"
+#include "webrtc/modules/video_coding/frame_buffer.h"
+#include "webrtc/modules/video_coding/jitter_buffer_common.h"
+#include "webrtc/modules/video_coding/packet.h"
+
+namespace webrtc {
+
+TEST(TestDecodingState, Sanity) {
+ VCMDecodingState dec_state;
+ dec_state.Reset();
+ EXPECT_TRUE(dec_state.in_initial_state());
+ EXPECT_TRUE(dec_state.full_sync());
+}
+
+TEST(TestDecodingState, FrameContinuity) {
+ VCMDecodingState dec_state;
+ // Check that makes decision based on correct method.
+ VCMFrameBuffer frame;
+ VCMFrameBuffer frame_key;
+ VCMPacket packet;
+ packet.isFirstPacket = true;
+ packet.timestamp = 1;
+ packet.seqNum = 0xffff;
+ packet.frameType = kVideoFrameDelta;
+ packet.codecSpecificHeader.codec = kRtpVideoVp8;
+ packet.codecSpecificHeader.codecHeader.VP8.pictureId = 0x007F;
+ FrameData frame_data;
+ frame_data.rtt_ms = 0;
+ frame_data.rolling_average_packets_per_frame = -1;
+ EXPECT_LE(0, frame.InsertPacket(packet, 0, kNoErrors, frame_data));
+ // Always start with a key frame.
+ dec_state.Reset();
+ EXPECT_FALSE(dec_state.ContinuousFrame(&frame));
+ packet.frameType = kVideoFrameKey;
+ EXPECT_LE(0, frame_key.InsertPacket(packet, 0, kNoErrors, frame_data));
+ EXPECT_TRUE(dec_state.ContinuousFrame(&frame_key));
+ dec_state.SetState(&frame);
+ frame.Reset();
+ packet.frameType = kVideoFrameDelta;
+ // Use pictureId
+ packet.isFirstPacket = false;
+ packet.codecSpecificHeader.codecHeader.VP8.pictureId = 0x0002;
+ EXPECT_LE(0, frame.InsertPacket(packet, 0, kNoErrors, frame_data));
+ EXPECT_FALSE(dec_state.ContinuousFrame(&frame));
+ frame.Reset();
+ packet.codecSpecificHeader.codecHeader.VP8.pictureId = 0;
+ packet.seqNum = 10;
+ EXPECT_LE(0, frame.InsertPacket(packet, 0, kNoErrors, frame_data));
+ EXPECT_TRUE(dec_state.ContinuousFrame(&frame));
+
+ // Use sequence numbers.
+ packet.codecSpecificHeader.codecHeader.VP8.pictureId = kNoPictureId;
+ frame.Reset();
+ packet.seqNum = dec_state.sequence_num() - 1u;
+ EXPECT_LE(0, frame.InsertPacket(packet, 0, kNoErrors, frame_data));
+ EXPECT_FALSE(dec_state.ContinuousFrame(&frame));
+ frame.Reset();
+ packet.seqNum = dec_state.sequence_num() + 1u;
+ EXPECT_LE(0, frame.InsertPacket(packet, 0, kNoErrors, frame_data));
+ // Insert another packet to this frame
+ packet.seqNum++;
+ EXPECT_LE(0, frame.InsertPacket(packet, 0, kNoErrors, frame_data));
+ // Verify wrap.
+ EXPECT_LE(dec_state.sequence_num(), 0xffff);
+ EXPECT_TRUE(dec_state.ContinuousFrame(&frame));
+ dec_state.SetState(&frame);
+
+ // Insert packet with temporal info.
+ dec_state.Reset();
+ frame.Reset();
+ packet.codecSpecificHeader.codecHeader.VP8.tl0PicIdx = 0;
+ packet.codecSpecificHeader.codecHeader.VP8.temporalIdx = 0;
+ packet.codecSpecificHeader.codecHeader.VP8.pictureId = 0;
+ packet.seqNum = 1;
+ packet.timestamp = 1;
+ EXPECT_TRUE(dec_state.full_sync());
+ EXPECT_LE(0, frame.InsertPacket(packet, 0, kNoErrors, frame_data));
+ dec_state.SetState(&frame);
+ EXPECT_TRUE(dec_state.full_sync());
+ frame.Reset();
+ // 1 layer up - still good.
+ packet.codecSpecificHeader.codecHeader.VP8.tl0PicIdx = 0;
+ packet.codecSpecificHeader.codecHeader.VP8.temporalIdx = 1;
+ packet.codecSpecificHeader.codecHeader.VP8.pictureId = 1;
+ packet.seqNum = 2;
+ packet.timestamp = 2;
+ EXPECT_LE(0, frame.InsertPacket(packet, 0, kNoErrors, frame_data));
+ EXPECT_TRUE(dec_state.ContinuousFrame(&frame));
+ dec_state.SetState(&frame);
+ EXPECT_TRUE(dec_state.full_sync());
+ frame.Reset();
+ // Lost non-base layer packet => should update sync parameter.
+ packet.codecSpecificHeader.codecHeader.VP8.tl0PicIdx = 0;
+ packet.codecSpecificHeader.codecHeader.VP8.temporalIdx = 3;
+ packet.codecSpecificHeader.codecHeader.VP8.pictureId = 3;
+ packet.seqNum = 4;
+ packet.timestamp = 4;
+ EXPECT_LE(0, frame.InsertPacket(packet, 0, kNoErrors, frame_data));
+ EXPECT_FALSE(dec_state.ContinuousFrame(&frame));
+ // Now insert the next non-base layer (belonging to a next tl0PicId).
+ frame.Reset();
+ packet.codecSpecificHeader.codecHeader.VP8.tl0PicIdx = 1;
+ packet.codecSpecificHeader.codecHeader.VP8.temporalIdx = 2;
+ packet.codecSpecificHeader.codecHeader.VP8.pictureId = 4;
+ packet.seqNum = 5;
+ packet.timestamp = 5;
+ EXPECT_LE(0, frame.InsertPacket(packet, 0, kNoErrors, frame_data));
+ // Checking continuity and not updating the state - this should not trigger
+ // an update of sync state.
+ EXPECT_FALSE(dec_state.ContinuousFrame(&frame));
+ EXPECT_TRUE(dec_state.full_sync());
+ // Next base layer (dropped interim non-base layers) - should update sync.
+ frame.Reset();
+ packet.codecSpecificHeader.codecHeader.VP8.tl0PicIdx = 1;
+ packet.codecSpecificHeader.codecHeader.VP8.temporalIdx = 0;
+ packet.codecSpecificHeader.codecHeader.VP8.pictureId = 5;
+ packet.seqNum = 6;
+ packet.timestamp = 6;
+ EXPECT_LE(0, frame.InsertPacket(packet, 0, kNoErrors, frame_data));
+ EXPECT_TRUE(dec_state.ContinuousFrame(&frame));
+ dec_state.SetState(&frame);
+ EXPECT_FALSE(dec_state.full_sync());
+
+ // Check wrap for temporal layers.
+ frame.Reset();
+ packet.codecSpecificHeader.codecHeader.VP8.tl0PicIdx = 0x00FF;
+ packet.codecSpecificHeader.codecHeader.VP8.temporalIdx = 0;
+ packet.codecSpecificHeader.codecHeader.VP8.pictureId = 6;
+ packet.seqNum = 7;
+ packet.timestamp = 7;
+ EXPECT_LE(0, frame.InsertPacket(packet, 0, kNoErrors, frame_data));
+ dec_state.SetState(&frame);
+ EXPECT_FALSE(dec_state.full_sync());
+ frame.Reset();
+ packet.codecSpecificHeader.codecHeader.VP8.tl0PicIdx = 0x0000;
+ packet.codecSpecificHeader.codecHeader.VP8.temporalIdx = 0;
+ packet.codecSpecificHeader.codecHeader.VP8.pictureId = 7;
+ packet.seqNum = 8;
+ packet.timestamp = 8;
+ EXPECT_LE(0, frame.InsertPacket(packet, 0, kNoErrors, frame_data));
+ EXPECT_TRUE(dec_state.ContinuousFrame(&frame));
+ // The current frame is not continuous
+ dec_state.SetState(&frame);
+ EXPECT_FALSE(dec_state.ContinuousFrame(&frame));
+}
+
+TEST(TestDecodingState, UpdateOldPacket) {
+ VCMDecodingState dec_state;
+ // Update only if zero size and newer than previous.
+ // Should only update if the timeStamp match.
+ VCMFrameBuffer frame;
+ VCMPacket packet;
+ packet.timestamp = 1;
+ packet.seqNum = 1;
+ packet.frameType = kVideoFrameDelta;
+ FrameData frame_data;
+ frame_data.rtt_ms = 0;
+ frame_data.rolling_average_packets_per_frame = -1;
+ EXPECT_LE(0, frame.InsertPacket(packet, 0, kNoErrors, frame_data));
+ dec_state.SetState(&frame);
+ EXPECT_EQ(dec_state.sequence_num(), 1);
+ // Insert an empty packet that does not belong to the same frame.
+ // => Sequence num should be the same.
+ packet.timestamp = 2;
+ dec_state.UpdateOldPacket(&packet);
+ EXPECT_EQ(dec_state.sequence_num(), 1);
+ // Now insert empty packet belonging to the same frame.
+ packet.timestamp = 1;
+ packet.seqNum = 2;
+ packet.frameType = kEmptyFrame;
+ packet.sizeBytes = 0;
+ dec_state.UpdateOldPacket(&packet);
+ EXPECT_EQ(dec_state.sequence_num(), 2);
+ // Now insert delta packet belonging to the same frame.
+ packet.timestamp = 1;
+ packet.seqNum = 3;
+ packet.frameType = kVideoFrameDelta;
+ packet.sizeBytes = 1400;
+ dec_state.UpdateOldPacket(&packet);
+ EXPECT_EQ(dec_state.sequence_num(), 3);
+ // Insert a packet belonging to an older timestamp - should not update the
+ // sequence number.
+ packet.timestamp = 0;
+ packet.seqNum = 4;
+ packet.frameType = kEmptyFrame;
+ packet.sizeBytes = 0;
+ dec_state.UpdateOldPacket(&packet);
+ EXPECT_EQ(dec_state.sequence_num(), 3);
+}
+
+TEST(TestDecodingState, MultiLayerBehavior) {
+ // Identify sync/non-sync when more than one layer.
+ VCMDecodingState dec_state;
+ // Identify packets belonging to old frames/packets.
+ // Set state for current frames.
+ // tl0PicIdx 0, temporal id 0.
+ VCMFrameBuffer frame;
+ VCMPacket packet;
+ packet.frameType = kVideoFrameDelta;
+ packet.codecSpecificHeader.codec = kRtpVideoVp8;
+ packet.timestamp = 0;
+ packet.seqNum = 0;
+ packet.codecSpecificHeader.codecHeader.VP8.tl0PicIdx = 0;
+ packet.codecSpecificHeader.codecHeader.VP8.temporalIdx = 0;
+ packet.codecSpecificHeader.codecHeader.VP8.pictureId = 0;
+ FrameData frame_data;
+ frame_data.rtt_ms = 0;
+ frame_data.rolling_average_packets_per_frame = -1;
+ EXPECT_LE(0, frame.InsertPacket(packet, 0, kNoErrors, frame_data));
+ dec_state.SetState(&frame);
+ // tl0PicIdx 0, temporal id 1.
+ frame.Reset();
+ packet.timestamp = 1;
+ packet.seqNum = 1;
+ packet.codecSpecificHeader.codecHeader.VP8.tl0PicIdx = 0;
+ packet.codecSpecificHeader.codecHeader.VP8.temporalIdx = 1;
+ packet.codecSpecificHeader.codecHeader.VP8.pictureId = 1;
+ EXPECT_LE(0, frame.InsertPacket(packet, 0, kNoErrors, frame_data));
+ EXPECT_TRUE(dec_state.ContinuousFrame(&frame));
+ dec_state.SetState(&frame);
+ EXPECT_TRUE(dec_state.full_sync());
+ // Lost tl0PicIdx 0, temporal id 2.
+ // Insert tl0PicIdx 0, temporal id 3.
+ frame.Reset();
+ packet.timestamp = 3;
+ packet.seqNum = 3;
+ packet.codecSpecificHeader.codecHeader.VP8.tl0PicIdx = 0;
+ packet.codecSpecificHeader.codecHeader.VP8.temporalIdx = 3;
+ packet.codecSpecificHeader.codecHeader.VP8.pictureId = 3;
+ EXPECT_LE(0, frame.InsertPacket(packet, 0, kNoErrors, frame_data));
+ EXPECT_FALSE(dec_state.ContinuousFrame(&frame));
+ dec_state.SetState(&frame);
+ EXPECT_FALSE(dec_state.full_sync());
+ // Insert next base layer
+ frame.Reset();
+ packet.timestamp = 4;
+ packet.seqNum = 4;
+ packet.codecSpecificHeader.codecHeader.VP8.tl0PicIdx = 1;
+ packet.codecSpecificHeader.codecHeader.VP8.temporalIdx = 0;
+ packet.codecSpecificHeader.codecHeader.VP8.pictureId = 4;
+ EXPECT_LE(0, frame.InsertPacket(packet, 0, kNoErrors, frame_data));
+ EXPECT_TRUE(dec_state.ContinuousFrame(&frame));
+ dec_state.SetState(&frame);
+ EXPECT_FALSE(dec_state.full_sync());
+ // Insert key frame - should update sync value.
+ // A key frame is always a base layer.
+ frame.Reset();
+ packet.frameType = kVideoFrameKey;
+ packet.isFirstPacket = 1;
+ packet.timestamp = 5;
+ packet.seqNum = 5;
+ packet.codecSpecificHeader.codecHeader.VP8.tl0PicIdx = 2;
+ packet.codecSpecificHeader.codecHeader.VP8.temporalIdx = 0;
+ packet.codecSpecificHeader.codecHeader.VP8.pictureId = 5;
+ EXPECT_LE(0, frame.InsertPacket(packet, 0, kNoErrors, frame_data));
+ EXPECT_TRUE(dec_state.ContinuousFrame(&frame));
+ dec_state.SetState(&frame);
+ EXPECT_TRUE(dec_state.full_sync());
+ // After sync, a continuous PictureId is required
+ // (continuous base layer is not enough )
+ frame.Reset();
+ packet.frameType = kVideoFrameDelta;
+ packet.timestamp = 6;
+ packet.seqNum = 6;
+ packet.codecSpecificHeader.codecHeader.VP8.tl0PicIdx = 3;
+ packet.codecSpecificHeader.codecHeader.VP8.temporalIdx = 0;
+ packet.codecSpecificHeader.codecHeader.VP8.pictureId = 6;
+ EXPECT_LE(0, frame.InsertPacket(packet, 0, kNoErrors, frame_data));
+ EXPECT_TRUE(dec_state.ContinuousFrame(&frame));
+ EXPECT_TRUE(dec_state.full_sync());
+ frame.Reset();
+ packet.frameType = kVideoFrameDelta;
+ packet.isFirstPacket = 1;
+ packet.timestamp = 8;
+ packet.seqNum = 8;
+ packet.codecSpecificHeader.codecHeader.VP8.tl0PicIdx = 4;
+ packet.codecSpecificHeader.codecHeader.VP8.temporalIdx = 0;
+ packet.codecSpecificHeader.codecHeader.VP8.pictureId = 8;
+ EXPECT_LE(0, frame.InsertPacket(packet, 0, kNoErrors, frame_data));
+ EXPECT_FALSE(dec_state.ContinuousFrame(&frame));
+ EXPECT_TRUE(dec_state.full_sync());
+ dec_state.SetState(&frame);
+ EXPECT_FALSE(dec_state.full_sync());
+
+ // Insert a non-ref frame - should update sync value.
+ frame.Reset();
+ packet.frameType = kVideoFrameDelta;
+ packet.isFirstPacket = 1;
+ packet.timestamp = 9;
+ packet.seqNum = 9;
+ packet.codecSpecificHeader.codecHeader.VP8.tl0PicIdx = 4;
+ packet.codecSpecificHeader.codecHeader.VP8.temporalIdx = 2;
+ packet.codecSpecificHeader.codecHeader.VP8.pictureId = 9;
+ packet.codecSpecificHeader.codecHeader.VP8.layerSync = true;
+ EXPECT_LE(0, frame.InsertPacket(packet, 0, kNoErrors, frame_data));
+ dec_state.SetState(&frame);
+ EXPECT_TRUE(dec_state.full_sync());
+
+ // The following test will verify the sync flag behavior after a loss.
+ // Create the following pattern:
+ // Update base layer, lose packet 1 (sync flag on, layer 2), insert packet 3
+ // (sync flag on, layer 2) check continuity and sync flag after inserting
+ // packet 2 (sync flag on, layer 1).
+ // Base layer.
+ frame.Reset();
+ dec_state.Reset();
+ packet.frameType = kVideoFrameDelta;
+ packet.isFirstPacket = 1;
+ packet.markerBit = 1;
+ packet.timestamp = 0;
+ packet.seqNum = 0;
+ packet.codecSpecificHeader.codecHeader.VP8.tl0PicIdx = 0;
+ packet.codecSpecificHeader.codecHeader.VP8.temporalIdx = 0;
+ packet.codecSpecificHeader.codecHeader.VP8.pictureId = 0;
+ packet.codecSpecificHeader.codecHeader.VP8.layerSync = false;
+ EXPECT_LE(0, frame.InsertPacket(packet, 0, kNoErrors, frame_data));
+ dec_state.SetState(&frame);
+ EXPECT_TRUE(dec_state.full_sync());
+ // Layer 2 - 2 packets (insert one, lose one).
+ frame.Reset();
+ packet.frameType = kVideoFrameDelta;
+ packet.isFirstPacket = 1;
+ packet.markerBit = 0;
+ packet.timestamp = 1;
+ packet.seqNum = 1;
+ packet.codecSpecificHeader.codecHeader.VP8.tl0PicIdx = 0;
+ packet.codecSpecificHeader.codecHeader.VP8.temporalIdx = 2;
+ packet.codecSpecificHeader.codecHeader.VP8.pictureId = 1;
+ packet.codecSpecificHeader.codecHeader.VP8.layerSync = true;
+ EXPECT_LE(0, frame.InsertPacket(packet, 0, kNoErrors, frame_data));
+ EXPECT_TRUE(dec_state.ContinuousFrame(&frame));
+ // Layer 1
+ frame.Reset();
+ packet.frameType = kVideoFrameDelta;
+ packet.isFirstPacket = 1;
+ packet.markerBit = 1;
+ packet.timestamp = 2;
+ packet.seqNum = 3;
+ packet.codecSpecificHeader.codecHeader.VP8.tl0PicIdx = 0;
+ packet.codecSpecificHeader.codecHeader.VP8.temporalIdx = 1;
+ packet.codecSpecificHeader.codecHeader.VP8.pictureId = 2;
+ packet.codecSpecificHeader.codecHeader.VP8.layerSync = true;
+ EXPECT_LE(0, frame.InsertPacket(packet, 0, kNoErrors, frame_data));
+ EXPECT_FALSE(dec_state.ContinuousFrame(&frame));
+ EXPECT_TRUE(dec_state.full_sync());
+}
+
+TEST(TestDecodingState, DiscontinuousPicIdContinuousSeqNum) {
+ VCMDecodingState dec_state;
+ VCMFrameBuffer frame;
+ VCMPacket packet;
+ frame.Reset();
+ packet.frameType = kVideoFrameKey;
+ packet.codecSpecificHeader.codec = kRtpVideoVp8;
+ packet.timestamp = 0;
+ packet.seqNum = 0;
+ packet.codecSpecificHeader.codecHeader.VP8.tl0PicIdx = 0;
+ packet.codecSpecificHeader.codecHeader.VP8.temporalIdx = 0;
+ packet.codecSpecificHeader.codecHeader.VP8.pictureId = 0;
+ FrameData frame_data;
+ frame_data.rtt_ms = 0;
+ frame_data.rolling_average_packets_per_frame = -1;
+ EXPECT_LE(0, frame.InsertPacket(packet, 0, kNoErrors, frame_data));
+ dec_state.SetState(&frame);
+ EXPECT_TRUE(dec_state.full_sync());
+
+ // Continuous sequence number but discontinuous picture id. This implies a
+ // a loss and we have to fall back to only decoding the base layer.
+ frame.Reset();
+ packet.frameType = kVideoFrameDelta;
+ packet.timestamp += 3000;
+ ++packet.seqNum;
+ packet.codecSpecificHeader.codecHeader.VP8.temporalIdx = 1;
+ packet.codecSpecificHeader.codecHeader.VP8.pictureId = 2;
+ EXPECT_LE(0, frame.InsertPacket(packet, 0, kNoErrors, frame_data));
+ EXPECT_FALSE(dec_state.ContinuousFrame(&frame));
+ dec_state.SetState(&frame);
+ EXPECT_FALSE(dec_state.full_sync());
+}
+
+TEST(TestDecodingState, OldInput) {
+ VCMDecodingState dec_state;
+ // Identify packets belonging to old frames/packets.
+ // Set state for current frames.
+ VCMFrameBuffer frame;
+ VCMPacket packet;
+ packet.timestamp = 10;
+ packet.seqNum = 1;
+ FrameData frame_data;
+ frame_data.rtt_ms = 0;
+ frame_data.rolling_average_packets_per_frame = -1;
+ EXPECT_LE(0, frame.InsertPacket(packet, 0, kNoErrors, frame_data));
+ dec_state.SetState(&frame);
+ packet.timestamp = 9;
+ EXPECT_TRUE(dec_state.IsOldPacket(&packet));
+ // Check for old frame
+ frame.Reset();
+ frame.InsertPacket(packet, 0, kNoErrors, frame_data);
+ EXPECT_TRUE(dec_state.IsOldFrame(&frame));
+}
+
+TEST(TestDecodingState, PictureIdRepeat) {
+ VCMDecodingState dec_state;
+ VCMFrameBuffer frame;
+ VCMPacket packet;
+ packet.frameType = kVideoFrameDelta;
+ packet.codecSpecificHeader.codec = kRtpVideoVp8;
+ packet.timestamp = 0;
+ packet.seqNum = 0;
+ packet.codecSpecificHeader.codecHeader.VP8.tl0PicIdx = 0;
+ packet.codecSpecificHeader.codecHeader.VP8.temporalIdx = 0;
+ packet.codecSpecificHeader.codecHeader.VP8.pictureId = 0;
+ FrameData frame_data;
+ frame_data.rtt_ms = 0;
+ frame_data.rolling_average_packets_per_frame = -1;
+ EXPECT_LE(0, frame.InsertPacket(packet, 0, kNoErrors, frame_data));
+ dec_state.SetState(&frame);
+ // tl0PicIdx 0, temporal id 1.
+ frame.Reset();
+ ++packet.timestamp;
+ ++packet.seqNum;
+ packet.codecSpecificHeader.codecHeader.VP8.temporalIdx++;
+ packet.codecSpecificHeader.codecHeader.VP8.pictureId++;
+ EXPECT_LE(0, frame.InsertPacket(packet, 0, kNoErrors, frame_data));
+ EXPECT_TRUE(dec_state.ContinuousFrame(&frame));
+ frame.Reset();
+ // Testing only gap in tl0PicIdx when tl0PicIdx in continuous.
+ packet.codecSpecificHeader.codecHeader.VP8.tl0PicIdx += 3;
+ packet.codecSpecificHeader.codecHeader.VP8.temporalIdx++;
+ packet.codecSpecificHeader.codecHeader.VP8.tl0PicIdx = 1;
+ EXPECT_LE(0, frame.InsertPacket(packet, 0, kNoErrors, frame_data));
+ EXPECT_FALSE(dec_state.ContinuousFrame(&frame));
+}
+
+TEST(TestDecodingState, FrameContinuityFlexibleModeKeyFrame) {
+ VCMDecodingState dec_state;
+ VCMFrameBuffer frame;
+ VCMPacket packet;
+ packet.isFirstPacket = true;
+ packet.timestamp = 1;
+ packet.seqNum = 0xffff;
+ uint8_t data[] = "I need a data pointer for this test!";
+ packet.sizeBytes = sizeof(data);
+ packet.dataPtr = data;
+ packet.codecSpecificHeader.codec = kRtpVideoVp9;
+
+ RTPVideoHeaderVP9& vp9_hdr = packet.codecSpecificHeader.codecHeader.VP9;
+ vp9_hdr.picture_id = 10;
+ vp9_hdr.flexible_mode = true;
+
+ FrameData frame_data;
+ frame_data.rtt_ms = 0;
+ frame_data.rolling_average_packets_per_frame = -1;
+
+ // Key frame as first frame
+ packet.frameType = kVideoFrameKey;
+ EXPECT_LE(0, frame.InsertPacket(packet, 0, kNoErrors, frame_data));
+ EXPECT_TRUE(dec_state.ContinuousFrame(&frame));
+ dec_state.SetState(&frame);
+
+ // Key frame again
+ vp9_hdr.picture_id = 11;
+ frame.Reset();
+ EXPECT_LE(0, frame.InsertPacket(packet, 0, kNoErrors, frame_data));
+ EXPECT_TRUE(dec_state.ContinuousFrame(&frame));
+ dec_state.SetState(&frame);
+
+ // Ref to 11, continuous
+ frame.Reset();
+ packet.frameType = kVideoFrameDelta;
+ vp9_hdr.picture_id = 12;
+ vp9_hdr.num_ref_pics = 1;
+ vp9_hdr.pid_diff[0] = 1;
+ EXPECT_LE(0, frame.InsertPacket(packet, 0, kNoErrors, frame_data));
+ EXPECT_TRUE(dec_state.ContinuousFrame(&frame));
+}
+
+TEST(TestDecodingState, FrameContinuityFlexibleModeOutOfOrderFrames) {
+ VCMDecodingState dec_state;
+ VCMFrameBuffer frame;
+ VCMPacket packet;
+ packet.isFirstPacket = true;
+ packet.timestamp = 1;
+ packet.seqNum = 0xffff;
+ uint8_t data[] = "I need a data pointer for this test!";
+ packet.sizeBytes = sizeof(data);
+ packet.dataPtr = data;
+ packet.codecSpecificHeader.codec = kRtpVideoVp9;
+
+ RTPVideoHeaderVP9& vp9_hdr = packet.codecSpecificHeader.codecHeader.VP9;
+ vp9_hdr.picture_id = 10;
+ vp9_hdr.flexible_mode = true;
+
+ FrameData frame_data;
+ frame_data.rtt_ms = 0;
+ frame_data.rolling_average_packets_per_frame = -1;
+
+ // Key frame as first frame
+ packet.frameType = kVideoFrameKey;
+ EXPECT_LE(0, frame.InsertPacket(packet, 0, kNoErrors, frame_data));
+ EXPECT_TRUE(dec_state.ContinuousFrame(&frame));
+ dec_state.SetState(&frame);
+
+ // Ref to 10, continuous
+ frame.Reset();
+ packet.frameType = kVideoFrameDelta;
+ vp9_hdr.picture_id = 15;
+ vp9_hdr.num_ref_pics = 1;
+ vp9_hdr.pid_diff[0] = 5;
+ EXPECT_LE(0, frame.InsertPacket(packet, 0, kNoErrors, frame_data));
+ EXPECT_TRUE(dec_state.ContinuousFrame(&frame));
+ dec_state.SetState(&frame);
+
+ // Out of order, last id 15, this id 12, ref to 10, continuous
+ frame.Reset();
+ vp9_hdr.picture_id = 12;
+ vp9_hdr.pid_diff[0] = 2;
+ EXPECT_LE(0, frame.InsertPacket(packet, 0, kNoErrors, frame_data));
+ EXPECT_TRUE(dec_state.ContinuousFrame(&frame));
+ dec_state.SetState(&frame);
+
+ // Ref 10, 12, 15, continuous
+ frame.Reset();
+ vp9_hdr.picture_id = 20;
+ vp9_hdr.num_ref_pics = 3;
+ vp9_hdr.pid_diff[0] = 10;
+ vp9_hdr.pid_diff[1] = 8;
+ vp9_hdr.pid_diff[2] = 5;
+ EXPECT_LE(0, frame.InsertPacket(packet, 0, kNoErrors, frame_data));
+ EXPECT_TRUE(dec_state.ContinuousFrame(&frame));
+}
+
+TEST(TestDecodingState, FrameContinuityFlexibleModeGeneral) {
+ VCMDecodingState dec_state;
+ VCMFrameBuffer frame;
+ VCMPacket packet;
+ packet.isFirstPacket = true;
+ packet.timestamp = 1;
+ packet.seqNum = 0xffff;
+ uint8_t data[] = "I need a data pointer for this test!";
+ packet.sizeBytes = sizeof(data);
+ packet.dataPtr = data;
+ packet.codecSpecificHeader.codec = kRtpVideoVp9;
+
+ RTPVideoHeaderVP9& vp9_hdr = packet.codecSpecificHeader.codecHeader.VP9;
+ vp9_hdr.picture_id = 10;
+ vp9_hdr.flexible_mode = true;
+
+ FrameData frame_data;
+ frame_data.rtt_ms = 0;
+ frame_data.rolling_average_packets_per_frame = -1;
+
+ // Key frame as first frame
+ packet.frameType = kVideoFrameKey;
+ EXPECT_LE(0, frame.InsertPacket(packet, 0, kNoErrors, frame_data));
+ EXPECT_TRUE(dec_state.ContinuousFrame(&frame));
+
+ // Delta frame as first frame
+ frame.Reset();
+ packet.frameType = kVideoFrameDelta;
+ EXPECT_LE(0, frame.InsertPacket(packet, 0, kNoErrors, frame_data));
+ EXPECT_FALSE(dec_state.ContinuousFrame(&frame));
+
+ // Key frame then delta frame
+ frame.Reset();
+ packet.frameType = kVideoFrameKey;
+ EXPECT_LE(0, frame.InsertPacket(packet, 0, kNoErrors, frame_data));
+ dec_state.SetState(&frame);
+ frame.Reset();
+ packet.frameType = kVideoFrameDelta;
+ vp9_hdr.num_ref_pics = 1;
+ vp9_hdr.picture_id = 15;
+ vp9_hdr.pid_diff[0] = 5;
+ EXPECT_LE(0, frame.InsertPacket(packet, 0, kNoErrors, frame_data));
+ EXPECT_TRUE(dec_state.ContinuousFrame(&frame));
+ dec_state.SetState(&frame);
+
+ // Ref to 11, not continuous
+ frame.Reset();
+ vp9_hdr.picture_id = 16;
+ EXPECT_LE(0, frame.InsertPacket(packet, 0, kNoErrors, frame_data));
+ EXPECT_FALSE(dec_state.ContinuousFrame(&frame));
+
+ // Ref to 15, continuous
+ frame.Reset();
+ vp9_hdr.picture_id = 16;
+ vp9_hdr.pid_diff[0] = 1;
+ EXPECT_LE(0, frame.InsertPacket(packet, 0, kNoErrors, frame_data));
+ EXPECT_TRUE(dec_state.ContinuousFrame(&frame));
+ dec_state.SetState(&frame);
+
+ // Ref to 11 and 15, not continuous
+ frame.Reset();
+ vp9_hdr.picture_id = 20;
+ vp9_hdr.num_ref_pics = 2;
+ vp9_hdr.pid_diff[0] = 9;
+ vp9_hdr.pid_diff[1] = 5;
+ EXPECT_LE(0, frame.InsertPacket(packet, 0, kNoErrors, frame_data));
+ EXPECT_FALSE(dec_state.ContinuousFrame(&frame));
+
+ // Ref to 10, 15 and 16, continuous
+ frame.Reset();
+ vp9_hdr.picture_id = 22;
+ vp9_hdr.num_ref_pics = 3;
+ vp9_hdr.pid_diff[0] = 12;
+ vp9_hdr.pid_diff[1] = 7;
+ vp9_hdr.pid_diff[2] = 6;
+ EXPECT_LE(0, frame.InsertPacket(packet, 0, kNoErrors, frame_data));
+ EXPECT_TRUE(dec_state.ContinuousFrame(&frame));
+ dec_state.SetState(&frame);
+
+ // Key Frame, continuous
+ frame.Reset();
+ packet.frameType = kVideoFrameKey;
+ vp9_hdr.picture_id = VCMDecodingState::kFrameDecodedLength - 2;
+ vp9_hdr.num_ref_pics = 0;
+ EXPECT_LE(0, frame.InsertPacket(packet, 0, kNoErrors, frame_data));
+ EXPECT_TRUE(dec_state.ContinuousFrame(&frame));
+ dec_state.SetState(&frame);
+
+ // Frame at last index, ref to KF, continuous
+ frame.Reset();
+ packet.frameType = kVideoFrameDelta;
+ vp9_hdr.picture_id = VCMDecodingState::kFrameDecodedLength - 1;
+ vp9_hdr.num_ref_pics = 1;
+ vp9_hdr.pid_diff[0] = 1;
+ EXPECT_LE(0, frame.InsertPacket(packet, 0, kNoErrors, frame_data));
+ EXPECT_TRUE(dec_state.ContinuousFrame(&frame));
+ dec_state.SetState(&frame);
+
+ // Frame after wrapping buffer length, ref to last index, continuous
+ frame.Reset();
+ vp9_hdr.picture_id = 0;
+ vp9_hdr.num_ref_pics = 1;
+ vp9_hdr.pid_diff[0] = 1;
+ EXPECT_LE(0, frame.InsertPacket(packet, 0, kNoErrors, frame_data));
+ EXPECT_TRUE(dec_state.ContinuousFrame(&frame));
+ dec_state.SetState(&frame);
+
+ // Frame after wrapping start frame, ref to 0, continuous
+ frame.Reset();
+ vp9_hdr.picture_id = 20;
+ vp9_hdr.num_ref_pics = 1;
+ vp9_hdr.pid_diff[0] = 20;
+ EXPECT_LE(0, frame.InsertPacket(packet, 0, kNoErrors, frame_data));
+ EXPECT_TRUE(dec_state.ContinuousFrame(&frame));
+ dec_state.SetState(&frame);
+
+ // Frame after wrapping start frame, ref to 10, not continuous
+ frame.Reset();
+ vp9_hdr.picture_id = 23;
+ vp9_hdr.num_ref_pics = 1;
+ vp9_hdr.pid_diff[0] = 13;
+ EXPECT_LE(0, frame.InsertPacket(packet, 0, kNoErrors, frame_data));
+ EXPECT_FALSE(dec_state.ContinuousFrame(&frame));
+
+ // Key frame, continuous
+ frame.Reset();
+ packet.frameType = kVideoFrameKey;
+ vp9_hdr.picture_id = 25;
+ vp9_hdr.num_ref_pics = 0;
+ EXPECT_LE(0, frame.InsertPacket(packet, 0, kNoErrors, frame_data));
+ EXPECT_TRUE(dec_state.ContinuousFrame(&frame));
+ dec_state.SetState(&frame);
+
+ // Ref to KF, continuous
+ frame.Reset();
+ packet.frameType = kVideoFrameDelta;
+ vp9_hdr.picture_id = 26;
+ vp9_hdr.num_ref_pics = 1;
+ vp9_hdr.pid_diff[0] = 1;
+ EXPECT_LE(0, frame.InsertPacket(packet, 0, kNoErrors, frame_data));
+ EXPECT_TRUE(dec_state.ContinuousFrame(&frame));
+ dec_state.SetState(&frame);
+
+ // Ref to frame previous to KF, not continuous
+ frame.Reset();
+ vp9_hdr.picture_id = 30;
+ vp9_hdr.num_ref_pics = 1;
+ vp9_hdr.pid_diff[0] = 30;
+ EXPECT_LE(0, frame.InsertPacket(packet, 0, kNoErrors, frame_data));
+ EXPECT_FALSE(dec_state.ContinuousFrame(&frame));
+}
+
+} // namespace webrtc
diff --git a/webrtc/modules/video_coding/encoded_frame.cc b/webrtc/modules/video_coding/encoded_frame.cc
new file mode 100644
index 0000000000..261074ae73
--- /dev/null
+++ b/webrtc/modules/video_coding/encoded_frame.cc
@@ -0,0 +1,225 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/modules/video_coding/include/video_coding_defines.h"
+#include "webrtc/modules/video_coding/encoded_frame.h"
+#include "webrtc/modules/video_coding/generic_encoder.h"
+#include "webrtc/modules/video_coding/jitter_buffer_common.h"
+
+namespace webrtc {
+
+VCMEncodedFrame::VCMEncodedFrame()
+ : webrtc::EncodedImage(),
+ _renderTimeMs(-1),
+ _payloadType(0),
+ _missingFrame(false),
+ _codec(kVideoCodecUnknown),
+ _fragmentation(),
+ _rotation(kVideoRotation_0),
+ _rotation_set(false) {
+ _codecSpecificInfo.codecType = kVideoCodecUnknown;
+}
+
+VCMEncodedFrame::VCMEncodedFrame(const webrtc::EncodedImage& rhs)
+ : webrtc::EncodedImage(rhs),
+ _renderTimeMs(-1),
+ _payloadType(0),
+ _missingFrame(false),
+ _codec(kVideoCodecUnknown),
+ _fragmentation(),
+ _rotation(kVideoRotation_0),
+ _rotation_set(false) {
+ _codecSpecificInfo.codecType = kVideoCodecUnknown;
+ _buffer = NULL;
+ _size = 0;
+ _length = 0;
+ if (rhs._buffer != NULL) {
+ VerifyAndAllocate(rhs._length);
+ memcpy(_buffer, rhs._buffer, rhs._length);
+ }
+}
+
+VCMEncodedFrame::VCMEncodedFrame(const VCMEncodedFrame& rhs)
+ : webrtc::EncodedImage(rhs),
+ _renderTimeMs(rhs._renderTimeMs),
+ _payloadType(rhs._payloadType),
+ _missingFrame(rhs._missingFrame),
+ _codecSpecificInfo(rhs._codecSpecificInfo),
+ _codec(rhs._codec),
+ _fragmentation(),
+ _rotation(rhs._rotation),
+ _rotation_set(rhs._rotation_set) {
+ _buffer = NULL;
+ _size = 0;
+ _length = 0;
+ if (rhs._buffer != NULL) {
+ VerifyAndAllocate(rhs._length);
+ memcpy(_buffer, rhs._buffer, rhs._length);
+ _length = rhs._length;
+ }
+ _fragmentation.CopyFrom(rhs._fragmentation);
+}
+
+VCMEncodedFrame::~VCMEncodedFrame() {
+ Free();
+}
+
+void VCMEncodedFrame::Free() {
+ Reset();
+ if (_buffer != NULL) {
+ delete[] _buffer;
+ _buffer = NULL;
+ }
+}
+
+void VCMEncodedFrame::Reset() {
+ _renderTimeMs = -1;
+ _timeStamp = 0;
+ _payloadType = 0;
+ _frameType = kVideoFrameDelta;
+ _encodedWidth = 0;
+ _encodedHeight = 0;
+ _completeFrame = false;
+ _missingFrame = false;
+ _length = 0;
+ _codecSpecificInfo.codecType = kVideoCodecUnknown;
+ _codec = kVideoCodecUnknown;
+ _rotation = kVideoRotation_0;
+ _rotation_set = false;
+}
+
+void VCMEncodedFrame::CopyCodecSpecific(const RTPVideoHeader* header) {
+ if (header) {
+ switch (header->codec) {
+ case kRtpVideoVp8: {
+ if (_codecSpecificInfo.codecType != kVideoCodecVP8) {
+ // This is the first packet for this frame.
+ _codecSpecificInfo.codecSpecific.VP8.pictureId = -1;
+ _codecSpecificInfo.codecSpecific.VP8.temporalIdx = 0;
+ _codecSpecificInfo.codecSpecific.VP8.layerSync = false;
+ _codecSpecificInfo.codecSpecific.VP8.keyIdx = -1;
+ _codecSpecificInfo.codecType = kVideoCodecVP8;
+ }
+ _codecSpecificInfo.codecSpecific.VP8.nonReference =
+ header->codecHeader.VP8.nonReference;
+ if (header->codecHeader.VP8.pictureId != kNoPictureId) {
+ _codecSpecificInfo.codecSpecific.VP8.pictureId =
+ header->codecHeader.VP8.pictureId;
+ }
+ if (header->codecHeader.VP8.temporalIdx != kNoTemporalIdx) {
+ _codecSpecificInfo.codecSpecific.VP8.temporalIdx =
+ header->codecHeader.VP8.temporalIdx;
+ _codecSpecificInfo.codecSpecific.VP8.layerSync =
+ header->codecHeader.VP8.layerSync;
+ }
+ if (header->codecHeader.VP8.keyIdx != kNoKeyIdx) {
+ _codecSpecificInfo.codecSpecific.VP8.keyIdx =
+ header->codecHeader.VP8.keyIdx;
+ }
+ break;
+ }
+ case kRtpVideoVp9: {
+ if (_codecSpecificInfo.codecType != kVideoCodecVP9) {
+ // This is the first packet for this frame.
+ _codecSpecificInfo.codecSpecific.VP9.picture_id = -1;
+ _codecSpecificInfo.codecSpecific.VP9.temporal_idx = 0;
+ _codecSpecificInfo.codecSpecific.VP9.spatial_idx = 0;
+ _codecSpecificInfo.codecSpecific.VP9.gof_idx = 0;
+ _codecSpecificInfo.codecSpecific.VP9.inter_layer_predicted = false;
+ _codecSpecificInfo.codecSpecific.VP9.tl0_pic_idx = -1;
+ _codecSpecificInfo.codecType = kVideoCodecVP9;
+ }
+ _codecSpecificInfo.codecSpecific.VP9.inter_pic_predicted =
+ header->codecHeader.VP9.inter_pic_predicted;
+ _codecSpecificInfo.codecSpecific.VP9.flexible_mode =
+ header->codecHeader.VP9.flexible_mode;
+ _codecSpecificInfo.codecSpecific.VP9.num_ref_pics =
+ header->codecHeader.VP9.num_ref_pics;
+ for (uint8_t r = 0; r < header->codecHeader.VP9.num_ref_pics; ++r) {
+ _codecSpecificInfo.codecSpecific.VP9.p_diff[r] =
+ header->codecHeader.VP9.pid_diff[r];
+ }
+ _codecSpecificInfo.codecSpecific.VP9.ss_data_available =
+ header->codecHeader.VP9.ss_data_available;
+ if (header->codecHeader.VP9.picture_id != kNoPictureId) {
+ _codecSpecificInfo.codecSpecific.VP9.picture_id =
+ header->codecHeader.VP9.picture_id;
+ }
+ if (header->codecHeader.VP9.tl0_pic_idx != kNoTl0PicIdx) {
+ _codecSpecificInfo.codecSpecific.VP9.tl0_pic_idx =
+ header->codecHeader.VP9.tl0_pic_idx;
+ }
+ if (header->codecHeader.VP9.temporal_idx != kNoTemporalIdx) {
+ _codecSpecificInfo.codecSpecific.VP9.temporal_idx =
+ header->codecHeader.VP9.temporal_idx;
+ _codecSpecificInfo.codecSpecific.VP9.temporal_up_switch =
+ header->codecHeader.VP9.temporal_up_switch;
+ }
+ if (header->codecHeader.VP9.spatial_idx != kNoSpatialIdx) {
+ _codecSpecificInfo.codecSpecific.VP9.spatial_idx =
+ header->codecHeader.VP9.spatial_idx;
+ _codecSpecificInfo.codecSpecific.VP9.inter_layer_predicted =
+ header->codecHeader.VP9.inter_layer_predicted;
+ }
+ if (header->codecHeader.VP9.gof_idx != kNoGofIdx) {
+ _codecSpecificInfo.codecSpecific.VP9.gof_idx =
+ header->codecHeader.VP9.gof_idx;
+ }
+ if (header->codecHeader.VP9.ss_data_available) {
+ _codecSpecificInfo.codecSpecific.VP9.num_spatial_layers =
+ header->codecHeader.VP9.num_spatial_layers;
+ _codecSpecificInfo.codecSpecific.VP9
+ .spatial_layer_resolution_present =
+ header->codecHeader.VP9.spatial_layer_resolution_present;
+ if (header->codecHeader.VP9.spatial_layer_resolution_present) {
+ for (size_t i = 0; i < header->codecHeader.VP9.num_spatial_layers;
+ ++i) {
+ _codecSpecificInfo.codecSpecific.VP9.width[i] =
+ header->codecHeader.VP9.width[i];
+ _codecSpecificInfo.codecSpecific.VP9.height[i] =
+ header->codecHeader.VP9.height[i];
+ }
+ }
+ _codecSpecificInfo.codecSpecific.VP9.gof.CopyGofInfoVP9(
+ header->codecHeader.VP9.gof);
+ }
+ break;
+ }
+ case kRtpVideoH264: {
+ _codecSpecificInfo.codecType = kVideoCodecH264;
+ break;
+ }
+ default: {
+ _codecSpecificInfo.codecType = kVideoCodecUnknown;
+ break;
+ }
+ }
+ }
+}
+
+const RTPFragmentationHeader* VCMEncodedFrame::FragmentationHeader() const {
+ return &_fragmentation;
+}
+
+void VCMEncodedFrame::VerifyAndAllocate(size_t minimumSize) {
+ if (minimumSize > _size) {
+ // create buffer of sufficient size
+ uint8_t* newBuffer = new uint8_t[minimumSize];
+ if (_buffer) {
+ // copy old data
+ memcpy(newBuffer, _buffer, _size);
+ delete[] _buffer;
+ }
+ _buffer = newBuffer;
+ _size = minimumSize;
+ }
+}
+
+} // namespace webrtc
diff --git a/webrtc/modules/video_coding/encoded_frame.h b/webrtc/modules/video_coding/encoded_frame.h
new file mode 100644
index 0000000000..9034200980
--- /dev/null
+++ b/webrtc/modules/video_coding/encoded_frame.h
@@ -0,0 +1,132 @@
+/*
+ * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_CODING_ENCODED_FRAME_H_
+#define WEBRTC_MODULES_VIDEO_CODING_ENCODED_FRAME_H_
+
+#include <vector>
+
+#include "webrtc/common_types.h"
+#include "webrtc/common_video/include/video_image.h"
+#include "webrtc/modules/include/module_common_types.h"
+#include "webrtc/modules/video_coding/include/video_codec_interface.h"
+#include "webrtc/modules/video_coding/include/video_coding_defines.h"
+
+namespace webrtc {
+
+class VCMEncodedFrame : protected EncodedImage {
+ public:
+ VCMEncodedFrame();
+ explicit VCMEncodedFrame(const webrtc::EncodedImage& rhs);
+ VCMEncodedFrame(const VCMEncodedFrame& rhs);
+
+ ~VCMEncodedFrame();
+ /**
+ * Delete VideoFrame and resets members to zero
+ */
+ void Free();
+ /**
+ * Set render time in milliseconds
+ */
+ void SetRenderTime(const int64_t renderTimeMs) {
+ _renderTimeMs = renderTimeMs;
+ }
+
+ /**
+ * Set the encoded frame size
+ */
+ void SetEncodedSize(uint32_t width, uint32_t height) {
+ _encodedWidth = width;
+ _encodedHeight = height;
+ }
+ /**
+ * Get the encoded image
+ */
+ const webrtc::EncodedImage& EncodedImage() const {
+ return static_cast<const webrtc::EncodedImage&>(*this);
+ }
+ /**
+ * Get pointer to frame buffer
+ */
+ const uint8_t* Buffer() const { return _buffer; }
+ /**
+ * Get frame length
+ */
+ size_t Length() const { return _length; }
+ /**
+ * Get frame timestamp (90kHz)
+ */
+ uint32_t TimeStamp() const { return _timeStamp; }
+ /**
+ * Get render time in milliseconds
+ */
+ int64_t RenderTimeMs() const { return _renderTimeMs; }
+ /**
+ * Get frame type
+ */
+ webrtc::FrameType FrameType() const { return _frameType; }
+ /**
+ * Get frame rotation
+ */
+ VideoRotation rotation() const { return _rotation; }
+ /**
+ * True if this frame is complete, false otherwise
+ */
+ bool Complete() const { return _completeFrame; }
+ /**
+ * True if there's a frame missing before this frame
+ */
+ bool MissingFrame() const { return _missingFrame; }
+ /**
+ * Payload type of the encoded payload
+ */
+ uint8_t PayloadType() const { return _payloadType; }
+ /**
+ * Get codec specific info.
+ * The returned pointer is only valid as long as the VCMEncodedFrame
+ * is valid. Also, VCMEncodedFrame owns the pointer and will delete
+ * the object.
+ */
+ const CodecSpecificInfo* CodecSpecific() const { return &_codecSpecificInfo; }
+
+ const RTPFragmentationHeader* FragmentationHeader() const;
+
+ protected:
+ /**
+ * Verifies that current allocated buffer size is larger than or equal to the
+ * input size.
+ * If the current buffer size is smaller, a new allocation is made and the old
+ * buffer data
+ * is copied to the new buffer.
+ * Buffer size is updated to minimumSize.
+ */
+ void VerifyAndAllocate(size_t minimumSize);
+
+ void Reset();
+
+ void CopyCodecSpecific(const RTPVideoHeader* header);
+
+ int64_t _renderTimeMs;
+ uint8_t _payloadType;
+ bool _missingFrame;
+ CodecSpecificInfo _codecSpecificInfo;
+ webrtc::VideoCodecType _codec;
+ RTPFragmentationHeader _fragmentation;
+ VideoRotation _rotation;
+
+ // Video rotation is only set along with the last packet for each frame
+ // (same as marker bit). This |_rotation_set| is only for debugging purpose
+ // to ensure we don't set it twice for a frame.
+ bool _rotation_set;
+};
+
+} // namespace webrtc
+
+#endif // WEBRTC_MODULES_VIDEO_CODING_ENCODED_FRAME_H_
diff --git a/webrtc/modules/video_coding/fec_tables_xor.h b/webrtc/modules/video_coding/fec_tables_xor.h
new file mode 100644
index 0000000000..fa5bd7bde4
--- /dev/null
+++ b/webrtc/modules/video_coding/fec_tables_xor.h
@@ -0,0 +1,459 @@
+/*
+ * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_CODING_FEC_TABLES_XOR_H_
+#define WEBRTC_MODULES_VIDEO_CODING_FEC_TABLES_XOR_H_
+
+// This is a private header for media_opt_util.cc.
+// It should not be included by other files.
+
+namespace webrtc {
+
+// Table for Protection factor (code rate) of delta frames, for the XOR FEC.
+// Input is the packet loss and an effective rate (bits/frame).
+// Output is array kCodeRateXORTable[k], where k = rate_i*129 + loss_j;
+// loss_j = 0,1,..128, and rate_i varies over some range.
+static const int kSizeCodeRateXORTable = 6450;
+static const unsigned char kCodeRateXORTable[kSizeCodeRateXORTable] = {
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11,
+ 11, 11, 11, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39,
+ 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39,
+ 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39,
+ 39, 39, 39, 39, 39, 39, 51, 51, 51, 51, 51, 51, 51, 51, 51,
+ 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51,
+ 51, 51, 51, 51, 51, 51, 51, 51, 51, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 8, 8, 8,
+ 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 30, 30, 30,
+ 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 56, 56, 56,
+ 56, 56, 56, 56, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65,
+ 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65,
+ 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65,
+ 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87,
+ 87, 87, 87, 87, 87, 87, 87, 87, 87, 78, 78, 78, 78, 78, 78,
+ 78, 78, 78, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 6, 6, 6, 23, 23, 23, 23, 23, 23, 23, 23, 23,
+ 23, 23, 23, 23, 23, 23, 44, 44, 44, 44, 44, 44, 50, 50, 50,
+ 50, 50, 50, 50, 50, 50, 68, 68, 68, 68, 68, 68, 68, 85, 85,
+ 85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85,
+ 85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85,
+ 85, 85, 85, 85, 85, 85, 85, 85, 85, 105, 105, 105, 105, 105, 105,
+ 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105,
+ 105, 105, 105, 88, 88, 88, 88, 88, 88, 88, 88, 88, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 5, 5, 5, 5, 5, 5, 19, 19, 19,
+ 36, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41,
+ 55, 55, 55, 55, 55, 55, 69, 69, 69, 69, 69, 69, 69, 69, 69,
+ 75, 75, 80, 80, 80, 80, 80, 97, 97, 97, 97, 97, 97, 97, 97,
+ 97, 97, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102,
+ 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102,
+ 102, 102, 102, 116, 116, 116, 116, 116, 116, 116, 116, 116, 116, 116, 116,
+ 116, 116, 116, 116, 116, 116, 116, 116, 116, 116, 116, 116, 100, 100, 100,
+ 100, 100, 100, 100, 100, 100, 0, 0, 0, 0, 0, 0, 0, 0, 4,
+ 16, 16, 16, 16, 16, 16, 30, 35, 35, 47, 58, 58, 58, 58, 58,
+ 58, 58, 58, 58, 58, 58, 58, 58, 58, 63, 63, 63, 63, 63, 63,
+ 77, 77, 77, 77, 77, 77, 77, 82, 82, 82, 82, 94, 94, 94, 94,
+ 94, 105, 105, 105, 105, 110, 110, 110, 110, 110, 110, 122, 122, 122, 122,
+ 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122,
+ 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 127, 127, 127,
+ 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127,
+ 127, 127, 127, 127, 127, 127, 115, 115, 115, 115, 115, 115, 115, 115, 115,
+ 0, 0, 0, 0, 0, 0, 0, 4, 14, 27, 27, 27, 27, 27, 31,
+ 41, 52, 52, 56, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69,
+ 69, 69, 69, 69, 69, 69, 69, 69, 69, 79, 79, 79, 79, 83, 83,
+ 83, 94, 94, 94, 94, 106, 106, 106, 106, 106, 115, 115, 115, 115, 125,
+ 125, 125, 125, 125, 125, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127,
+ 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127,
+ 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127,
+ 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127,
+ 127, 127, 127, 127, 127, 127, 127, 127, 127, 0, 0, 0, 0, 3, 3,
+ 3, 17, 28, 38, 38, 38, 38, 38, 47, 51, 63, 63, 63, 72, 72,
+ 72, 72, 72, 72, 72, 76, 76, 76, 76, 80, 80, 80, 80, 80, 80,
+ 80, 80, 80, 84, 84, 84, 84, 93, 93, 93, 105, 105, 105, 105, 114,
+ 114, 114, 114, 114, 124, 124, 124, 124, 127, 127, 127, 127, 127, 127, 127,
+ 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127,
+ 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127,
+ 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127,
+ 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127,
+ 127, 127, 127, 0, 0, 0, 0, 12, 12, 12, 35, 43, 47, 47, 47,
+ 47, 47, 58, 58, 66, 66, 66, 70, 70, 70, 70, 70, 73, 73, 82,
+ 82, 82, 86, 94, 94, 94, 94, 94, 94, 94, 94, 94, 94, 94, 94,
+ 94, 105, 105, 105, 114, 114, 114, 114, 117, 117, 117, 117, 117, 127, 127,
+ 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127,
+ 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127,
+ 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127,
+ 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127,
+ 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 0, 0, 0,
+ 0, 24, 24, 24, 49, 53, 53, 53, 53, 53, 53, 61, 61, 64, 64,
+ 64, 64, 70, 70, 70, 70, 78, 78, 88, 88, 88, 96, 106, 106, 106,
+ 106, 106, 106, 106, 106, 106, 106, 112, 112, 112, 120, 120, 120, 124, 124,
+ 124, 124, 124, 124, 124, 124, 124, 127, 127, 127, 127, 127, 127, 127, 127,
+ 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127,
+ 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127,
+ 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127,
+ 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127,
+ 127, 127, 127, 127, 127, 127, 0, 0, 0, 5, 36, 36, 36, 55, 55,
+ 55, 55, 55, 55, 55, 58, 58, 58, 58, 58, 64, 78, 78, 78, 78,
+ 87, 87, 94, 94, 94, 103, 110, 110, 110, 110, 110, 110, 110, 110, 116,
+ 116, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127,
+ 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127,
+ 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127,
+ 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127,
+ 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127,
+ 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127,
+ 0, 0, 0, 18, 43, 43, 43, 53, 53, 53, 53, 53, 53, 53, 53,
+ 58, 58, 58, 58, 71, 87, 87, 87, 87, 94, 94, 97, 97, 97, 109,
+ 111, 111, 111, 111, 111, 111, 111, 111, 125, 125, 127, 127, 127, 127, 127,
+ 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127,
+ 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127,
+ 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127,
+ 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127,
+ 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127,
+ 127, 127, 127, 127, 127, 127, 127, 127, 127, 0, 0, 0, 31, 46, 46,
+ 46, 48, 48, 48, 48, 48, 48, 48, 48, 66, 66, 66, 66, 80, 93,
+ 93, 93, 93, 95, 95, 95, 95, 100, 115, 115, 115, 115, 115, 115, 115,
+ 115, 115, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127,
+ 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127,
+ 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127,
+ 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127,
+ 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127,
+ 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127,
+ 127, 127, 127, 0, 0, 4, 40, 45, 45, 45, 45, 45, 45, 45, 45,
+ 49, 49, 49, 74, 74, 74, 74, 86, 90, 90, 90, 90, 95, 95, 95,
+ 95, 106, 120, 120, 120, 120, 120, 120, 120, 120, 120, 127, 127, 127, 127,
+ 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127,
+ 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127,
+ 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127,
+ 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127,
+ 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127,
+ 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 0, 0, 14,
+ 42, 42, 42, 42, 42, 42, 42, 42, 46, 56, 56, 56, 80, 80, 80,
+ 80, 84, 84, 84, 84, 88, 99, 99, 99, 99, 111, 122, 122, 122, 122,
+ 122, 122, 122, 122, 122, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127,
+ 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127,
+ 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127,
+ 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127,
+ 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127,
+ 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127,
+ 127, 127, 127, 127, 127, 127, 0, 0, 26, 40, 40, 40, 40, 40, 40,
+ 40, 40, 54, 66, 66, 66, 80, 80, 80, 80, 80, 80, 80, 84, 94,
+ 106, 106, 106, 106, 116, 120, 120, 120, 120, 120, 120, 120, 120, 124, 127,
+ 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127,
+ 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127,
+ 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127,
+ 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127,
+ 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127,
+ 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127,
+ 0, 3, 34, 38, 38, 38, 38, 38, 42, 42, 42, 63, 72, 72, 76,
+ 80, 80, 80, 80, 80, 80, 80, 89, 101, 114, 114, 114, 114, 118, 118,
+ 118, 118, 118, 118, 118, 118, 118, 127, 127, 127, 127, 127, 127, 127, 127,
+ 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127,
+ 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127,
+ 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127,
+ 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127,
+ 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127,
+ 127, 127, 127, 127, 127, 127, 127, 127, 127, 0, 12, 36, 36, 36, 36,
+ 36, 36, 49, 49, 49, 69, 73, 76, 86, 86, 86, 86, 86, 86, 86,
+ 86, 97, 109, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122,
+ 122, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127,
+ 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127,
+ 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127,
+ 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127,
+ 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127,
+ 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127,
+ 127, 127, 127, 0, 22, 34, 34, 34, 34, 38, 38, 57, 57, 57, 69,
+ 73, 82, 92, 92, 92, 92, 92, 92, 96, 96, 104, 117, 127, 127, 127,
+ 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127,
+ 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127,
+ 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127,
+ 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127,
+ 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127,
+ 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127,
+ 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 0, 29, 33,
+ 33, 33, 33, 44, 44, 62, 62, 62, 69, 77, 87, 95, 95, 95, 95,
+ 95, 95, 107, 107, 110, 120, 127, 127, 127, 127, 127, 127, 127, 127, 127,
+ 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127,
+ 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127,
+ 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127,
+ 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127,
+ 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127,
+ 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127,
+ 127, 127, 127, 127, 127, 127, 0, 31, 31, 31, 31, 31, 51, 51, 62,
+ 65, 65, 73, 83, 91, 94, 94, 94, 94, 97, 97, 114, 114, 114, 122,
+ 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127,
+ 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127,
+ 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127,
+ 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127,
+ 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127,
+ 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127,
+ 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127,
+ 0, 29, 29, 29, 29, 29, 56, 56, 59, 70, 70, 79, 86, 89, 89,
+ 89, 89, 89, 100, 100, 116, 116, 116, 122, 127, 127, 127, 127, 127, 127,
+ 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127,
+ 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127,
+ 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127,
+ 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127,
+ 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127,
+ 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127,
+ 127, 127, 127, 127, 127, 127, 127, 127, 127, 0, 28, 28, 28, 28, 28,
+ 57, 57, 57, 76, 76, 83, 86, 86, 86, 86, 86, 89, 104, 104, 114,
+ 114, 114, 124, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127,
+ 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127,
+ 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127,
+ 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127,
+ 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127,
+ 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127,
+ 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127,
+ 127, 127, 127, 0, 27, 27, 27, 27, 30, 55, 55, 55, 80, 80, 83,
+ 86, 86, 86, 86, 86, 93, 108, 108, 111, 111, 111, 127, 127, 127, 127,
+ 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127,
+ 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127,
+ 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127,
+ 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127,
+ 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127,
+ 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127,
+ 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 0, 26, 26,
+ 26, 26, 36, 53, 53, 53, 80, 80, 80, 90, 90, 90, 90, 90, 98,
+ 107, 107, 107, 107, 107, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127,
+ 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127,
+ 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127,
+ 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127,
+ 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127,
+ 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127,
+ 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127,
+ 127, 127, 127, 127, 127, 127, 0, 26, 26, 26, 28, 42, 52, 54, 54,
+ 78, 78, 78, 95, 95, 95, 97, 97, 104, 106, 106, 106, 106, 106, 127,
+ 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127,
+ 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127,
+ 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127,
+ 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127,
+ 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127,
+ 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127,
+ 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127,
+ 0, 24, 24, 24, 33, 47, 49, 58, 58, 74, 74, 74, 97, 97, 97,
+ 106, 106, 108, 108, 108, 108, 108, 108, 124, 124, 124, 124, 124, 124, 124,
+ 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127,
+ 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127,
+ 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127,
+ 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127,
+ 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127,
+ 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127,
+ 127, 127, 127, 127, 127, 127, 127, 127, 127, 0, 24, 24, 24, 39, 48,
+ 50, 63, 63, 72, 74, 74, 96, 96, 96, 109, 111, 111, 111, 111, 111,
+ 111, 111, 119, 119, 122, 122, 122, 122, 122, 127, 127, 127, 127, 127, 127,
+ 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127,
+ 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127,
+ 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127,
+ 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127,
+ 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127,
+ 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127,
+ 127, 127, 127, 0, 23, 23, 23, 43, 46, 54, 66, 66, 69, 77, 77,
+ 92, 92, 92, 105, 113, 113, 113, 113, 113, 113, 113, 115, 117, 123, 123,
+ 123, 123, 123, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127,
+ 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127,
+ 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127,
+ 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127,
+ 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127,
+ 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127,
+ 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 0, 22, 22,
+ 22, 44, 44, 59, 67, 67, 67, 81, 81, 89, 89, 89, 97, 112, 112,
+ 112, 112, 112, 112, 112, 112, 119, 126, 126, 126, 126, 126, 127, 127, 127,
+ 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127,
+ 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127,
+ 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127,
+ 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127,
+ 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127,
+ 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127,
+ 127, 127, 127, 127, 127, 127, 0, 21, 21, 24, 43, 45, 63, 65, 65,
+ 67, 85, 85, 87, 87, 87, 91, 109, 109, 109, 111, 111, 111, 111, 111,
+ 123, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127,
+ 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127,
+ 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127,
+ 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127,
+ 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127,
+ 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127,
+ 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127,
+ 0, 21, 21, 28, 42, 50, 63, 63, 66, 71, 85, 85, 85, 85, 87,
+ 92, 106, 106, 108, 114, 114, 114, 114, 114, 125, 127, 127, 127, 127, 127,
+ 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127,
+ 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127,
+ 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127,
+ 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127,
+ 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127,
+ 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127,
+ 127, 127, 127, 127, 127, 127, 127, 127, 127, 0, 20, 20, 34, 41, 54,
+ 62, 62, 69, 75, 82, 82, 82, 82, 92, 98, 105, 105, 110, 117, 117,
+ 117, 117, 117, 124, 124, 126, 126, 126, 126, 126, 126, 127, 127, 127, 127,
+ 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127,
+ 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127,
+ 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127,
+ 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127,
+ 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127,
+ 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127,
+ 127, 127, 127, 0, 20, 20, 38, 40, 58, 60, 60, 73, 78, 80, 80,
+ 80, 80, 100, 105, 107, 107, 113, 118, 118, 118, 118, 118, 120, 120, 127,
+ 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127,
+ 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127,
+ 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127,
+ 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127,
+ 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127,
+ 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127,
+ 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 0, 19, 21,
+ 38, 40, 58, 58, 60, 75, 77, 77, 77, 81, 81, 107, 109, 109, 109,
+ 114, 116, 116, 116, 116, 116, 116, 116, 127, 127, 127, 127, 127, 127, 127,
+ 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127,
+ 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127,
+ 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127,
+ 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127,
+ 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127,
+ 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127,
+ 127, 127, 127, 127, 127, 127, 0, 18, 25, 37, 44, 56, 56, 63, 75,
+ 75, 75, 75, 88, 88, 111, 111, 111, 111, 112, 112, 112, 112, 112, 112,
+ 112, 114, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127,
+ 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127,
+ 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127,
+ 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127,
+ 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127,
+ 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127,
+ 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127,
+ 0, 18, 30, 36, 48, 55, 55, 67, 73, 73, 73, 73, 97, 97, 110,
+ 110, 110, 110, 110, 110, 110, 110, 110, 110, 110, 116, 127, 127, 127, 127,
+ 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127,
+ 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127,
+ 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127,
+ 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127,
+ 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127,
+ 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127,
+ 127, 127, 127, 127, 127, 127, 127, 127, 127, 0, 18, 34, 36, 52, 55,
+ 55, 70, 72, 73, 73, 73, 102, 104, 108, 108, 108, 108, 109, 109, 109,
+ 109, 109, 109, 109, 119, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127,
+ 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127,
+ 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127,
+ 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127,
+ 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127,
+ 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127,
+ 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127,
+ 127, 127, 127, 0, 17, 35, 35, 52, 59, 59, 70, 70, 76, 76, 76,
+ 99, 105, 105, 105, 105, 105, 111, 111, 111, 111, 111, 111, 111, 121, 127,
+ 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127,
+ 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127,
+ 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127,
+ 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127,
+ 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127,
+ 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127,
+ 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 0, 17, 34,
+ 36, 51, 61, 62, 70, 70, 80, 80, 80, 93, 103, 103, 103, 103, 103,
+ 112, 112, 112, 112, 112, 116, 118, 124, 127, 127, 127, 127, 127, 127, 127,
+ 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127,
+ 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127,
+ 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127,
+ 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127,
+ 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127,
+ 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127,
+ 127, 127, 127, 127, 127, 127, 0, 16, 33, 39, 50, 59, 65, 72, 72,
+ 82, 82, 82, 91, 100, 100, 100, 100, 100, 109, 109, 109, 109, 109, 121,
+ 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127,
+ 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127,
+ 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127,
+ 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127,
+ 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127,
+ 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127,
+ 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127,
+ 0, 16, 32, 43, 48, 54, 66, 75, 75, 81, 83, 83, 92, 97, 97,
+ 97, 99, 99, 105, 105, 105, 105, 105, 123, 127, 127, 127, 127, 127, 127,
+ 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127,
+ 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127,
+ 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127,
+ 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127,
+ 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127,
+ 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127,
+ 127, 127, 127, 127, 127, 127, 127, 127, 127, 0, 15, 31, 46, 47, 49,
+ 69, 77, 77, 81, 85, 85, 93, 95, 95, 95, 100, 100, 102, 102, 102,
+ 102, 102, 120, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127,
+ 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127,
+ 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127,
+ 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127,
+ 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127,
+ 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127,
+ 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127,
+ 127, 127, 127, 0, 15, 30, 46, 48, 48, 70, 75, 79, 82, 87, 87,
+ 92, 94, 94, 94, 103, 103, 103, 103, 103, 104, 104, 115, 120, 127, 127,
+ 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127,
+ 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127,
+ 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127,
+ 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127,
+ 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127,
+ 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127,
+ 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 0, 15, 30,
+ 45, 50, 50, 68, 70, 80, 85, 89, 89, 90, 95, 95, 95, 104, 104,
+ 104, 104, 104, 109, 109, 112, 114, 127, 127, 127, 127, 127, 127, 127, 127,
+ 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127,
+ 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127,
+ 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127,
+ 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127,
+ 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127,
+ 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127,
+ 127, 127, 127, 127, 127, 127, 0, 14, 29, 44, 54, 54, 64, 64, 83,
+ 87, 88, 88, 88, 98, 98, 98, 103, 103, 103, 103, 103, 113, 113, 113,
+ 113, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127,
+ 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127,
+ 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127,
+ 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127,
+ 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127,
+ 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127,
+ 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127,
+ 0, 14, 29, 43, 56, 56, 61, 61, 84, 85, 88, 88, 88, 100, 100,
+ 100, 102, 102, 102, 102, 102, 113, 116, 116, 116, 127, 127, 127, 127, 127,
+ 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127,
+ 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127,
+ 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127,
+ 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127,
+ 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127,
+ 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127,
+ 127, 127, 127, 127, 127, 127, 127, 127, 127, 0, 14, 28, 42, 57, 57,
+ 62, 62, 80, 80, 91, 91, 91, 100, 100, 100, 100, 100, 100, 100, 100,
+ 109, 119, 119, 119, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127,
+ 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127,
+ 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127,
+ 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127,
+ 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127,
+ 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127,
+ 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127,
+ 127, 127, 127, 0, 14, 28, 42, 56, 56, 65, 66, 76, 76, 92, 92,
+ 92, 97, 97, 97, 101, 101, 101, 101, 101, 106, 121, 121, 121, 126, 126,
+ 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127,
+ 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127,
+ 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127,
+ 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127,
+ 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127,
+ 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127,
+ 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 0, 13, 27,
+ 41, 55, 55, 67, 72, 74, 74, 90, 90, 90, 91, 91, 91, 105, 105,
+ 105, 105, 105, 107, 122, 122, 122, 123, 123, 127, 127, 127, 127, 127, 127,
+ 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127,
+ 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127,
+ 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127,
+ 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127,
+ 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127,
+ 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127,
+ 127, 127, 127, 127, 127, 127, 0, 13, 27, 40, 54, 54, 67, 76, 76,
+ 76, 85, 85, 85, 85, 85, 85, 112, 112, 112, 112, 112, 112, 121, 121,
+ 121, 121, 121, 126, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127,
+ 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127,
+ 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127,
+ 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127,
+ 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127,
+ 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127,
+ 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127,
+};
+
+} // namespace webrtc
+
+#endif // WEBRTC_MODULES_VIDEO_CODING_FEC_TABLES_XOR_H_
diff --git a/webrtc/modules/video_coding/frame_buffer.cc b/webrtc/modules/video_coding/frame_buffer.cc
new file mode 100644
index 0000000000..b6ddeda4e7
--- /dev/null
+++ b/webrtc/modules/video_coding/frame_buffer.cc
@@ -0,0 +1,270 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/modules/video_coding/frame_buffer.h"
+
+#include <assert.h>
+#include <string.h>
+
+#include "webrtc/base/checks.h"
+#include "webrtc/base/logging.h"
+#include "webrtc/modules/video_coding/packet.h"
+
+namespace webrtc {
+
+VCMFrameBuffer::VCMFrameBuffer()
+ : _state(kStateEmpty), _nackCount(0), _latestPacketTimeMs(-1) {}
+
+VCMFrameBuffer::~VCMFrameBuffer() {}
+
+VCMFrameBuffer::VCMFrameBuffer(const VCMFrameBuffer& rhs)
+ : VCMEncodedFrame(rhs),
+ _state(rhs._state),
+ _sessionInfo(),
+ _nackCount(rhs._nackCount),
+ _latestPacketTimeMs(rhs._latestPacketTimeMs) {
+ _sessionInfo = rhs._sessionInfo;
+ _sessionInfo.UpdateDataPointers(rhs._buffer, _buffer);
+}
+
+webrtc::FrameType VCMFrameBuffer::FrameType() const {
+ return _sessionInfo.FrameType();
+}
+
+int32_t VCMFrameBuffer::GetLowSeqNum() const {
+ return _sessionInfo.LowSequenceNumber();
+}
+
+int32_t VCMFrameBuffer::GetHighSeqNum() const {
+ return _sessionInfo.HighSequenceNumber();
+}
+
+int VCMFrameBuffer::PictureId() const {
+ return _sessionInfo.PictureId();
+}
+
+int VCMFrameBuffer::TemporalId() const {
+ return _sessionInfo.TemporalId();
+}
+
+bool VCMFrameBuffer::LayerSync() const {
+ return _sessionInfo.LayerSync();
+}
+
+int VCMFrameBuffer::Tl0PicId() const {
+ return _sessionInfo.Tl0PicId();
+}
+
+bool VCMFrameBuffer::NonReference() const {
+ return _sessionInfo.NonReference();
+}
+
+void VCMFrameBuffer::SetGofInfo(const GofInfoVP9& gof_info, size_t idx) {
+ _sessionInfo.SetGofInfo(gof_info, idx);
+ // TODO(asapersson): Consider adding hdr->VP9.ref_picture_id for testing.
+ _codecSpecificInfo.codecSpecific.VP9.temporal_idx =
+ gof_info.temporal_idx[idx];
+ _codecSpecificInfo.codecSpecific.VP9.temporal_up_switch =
+ gof_info.temporal_up_switch[idx];
+}
+
+bool VCMFrameBuffer::IsSessionComplete() const {
+ return _sessionInfo.complete();
+}
+
+// Insert packet
+VCMFrameBufferEnum VCMFrameBuffer::InsertPacket(
+ const VCMPacket& packet,
+ int64_t timeInMs,
+ VCMDecodeErrorMode decode_error_mode,
+ const FrameData& frame_data) {
+ assert(!(NULL == packet.dataPtr && packet.sizeBytes > 0));
+ if (packet.dataPtr != NULL) {
+ _payloadType = packet.payloadType;
+ }
+
+ if (kStateEmpty == _state) {
+ // First packet (empty and/or media) inserted into this frame.
+ // store some info and set some initial values.
+ _timeStamp = packet.timestamp;
+ // We only take the ntp timestamp of the first packet of a frame.
+ ntp_time_ms_ = packet.ntp_time_ms_;
+ _codec = packet.codec;
+ if (packet.frameType != kEmptyFrame) {
+ // first media packet
+ SetState(kStateIncomplete);
+ }
+ }
+
+ uint32_t requiredSizeBytes =
+ Length() + packet.sizeBytes +
+ (packet.insertStartCode ? kH264StartCodeLengthBytes : 0);
+ if (requiredSizeBytes >= _size) {
+ const uint8_t* prevBuffer = _buffer;
+ const uint32_t increments =
+ requiredSizeBytes / kBufferIncStepSizeBytes +
+ (requiredSizeBytes % kBufferIncStepSizeBytes > 0);
+ const uint32_t newSize = _size + increments * kBufferIncStepSizeBytes;
+ if (newSize > kMaxJBFrameSizeBytes) {
+ LOG(LS_ERROR) << "Failed to insert packet due to frame being too "
+ "big.";
+ return kSizeError;
+ }
+ VerifyAndAllocate(newSize);
+ _sessionInfo.UpdateDataPointers(prevBuffer, _buffer);
+ }
+
+ if (packet.width > 0 && packet.height > 0) {
+ _encodedWidth = packet.width;
+ _encodedHeight = packet.height;
+ }
+
+ // Don't copy payload specific data for empty packets (e.g padding packets).
+ if (packet.sizeBytes > 0)
+ CopyCodecSpecific(&packet.codecSpecificHeader);
+
+ int retVal =
+ _sessionInfo.InsertPacket(packet, _buffer, decode_error_mode, frame_data);
+ if (retVal == -1) {
+ return kSizeError;
+ } else if (retVal == -2) {
+ return kDuplicatePacket;
+ } else if (retVal == -3) {
+ return kOutOfBoundsPacket;
+ }
+ // update length
+ _length = Length() + static_cast<uint32_t>(retVal);
+
+ _latestPacketTimeMs = timeInMs;
+
+ // http://www.etsi.org/deliver/etsi_ts/126100_126199/126114/12.07.00_60/
+ // ts_126114v120700p.pdf Section 7.4.5.
+ // The MTSI client shall add the payload bytes as defined in this clause
+ // onto the last RTP packet in each group of packets which make up a key
+ // frame (I-frame or IDR frame in H.264 (AVC), or an IRAP picture in H.265
+ // (HEVC)).
+ if (packet.markerBit) {
+ RTC_DCHECK(!_rotation_set);
+ _rotation = packet.codecSpecificHeader.rotation;
+ _rotation_set = true;
+ }
+
+ if (_sessionInfo.complete()) {
+ SetState(kStateComplete);
+ return kCompleteSession;
+ } else if (_sessionInfo.decodable()) {
+ SetState(kStateDecodable);
+ return kDecodableSession;
+ }
+ return kIncomplete;
+}
+
+int64_t VCMFrameBuffer::LatestPacketTimeMs() const {
+ return _latestPacketTimeMs;
+}
+
+void VCMFrameBuffer::IncrementNackCount() {
+ _nackCount++;
+}
+
+int16_t VCMFrameBuffer::GetNackCount() const {
+ return _nackCount;
+}
+
+bool VCMFrameBuffer::HaveFirstPacket() const {
+ return _sessionInfo.HaveFirstPacket();
+}
+
+bool VCMFrameBuffer::HaveLastPacket() const {
+ return _sessionInfo.HaveLastPacket();
+}
+
+int VCMFrameBuffer::NumPackets() const {
+ return _sessionInfo.NumPackets();
+}
+
+void VCMFrameBuffer::Reset() {
+ _length = 0;
+ _timeStamp = 0;
+ _sessionInfo.Reset();
+ _payloadType = 0;
+ _nackCount = 0;
+ _latestPacketTimeMs = -1;
+ _state = kStateEmpty;
+ VCMEncodedFrame::Reset();
+}
+
+// Set state of frame
+void VCMFrameBuffer::SetState(VCMFrameBufferStateEnum state) {
+ if (_state == state) {
+ return;
+ }
+ switch (state) {
+ case kStateIncomplete:
+ // we can go to this state from state kStateEmpty
+ assert(_state == kStateEmpty);
+
+ // Do nothing, we received a packet
+ break;
+
+ case kStateComplete:
+ assert(_state == kStateEmpty || _state == kStateIncomplete ||
+ _state == kStateDecodable);
+
+ break;
+
+ case kStateEmpty:
+ // Should only be set to empty through Reset().
+ assert(false);
+ break;
+
+ case kStateDecodable:
+ assert(_state == kStateEmpty || _state == kStateIncomplete);
+ break;
+ }
+ _state = state;
+}
+
+// Get current state of frame
+VCMFrameBufferStateEnum VCMFrameBuffer::GetState() const {
+ return _state;
+}
+
+// Get current state of frame
+VCMFrameBufferStateEnum VCMFrameBuffer::GetState(uint32_t& timeStamp) const {
+ timeStamp = TimeStamp();
+ return GetState();
+}
+
+bool VCMFrameBuffer::IsRetransmitted() const {
+ return _sessionInfo.session_nack();
+}
+
+void VCMFrameBuffer::PrepareForDecode(bool continuous) {
+#ifdef INDEPENDENT_PARTITIONS
+ if (_codec == kVideoCodecVP8) {
+ _length = _sessionInfo.BuildVP8FragmentationHeader(_buffer, _length,
+ &_fragmentation);
+ } else {
+ size_t bytes_removed = _sessionInfo.MakeDecodable();
+ _length -= bytes_removed;
+ }
+#else
+ size_t bytes_removed = _sessionInfo.MakeDecodable();
+ _length -= bytes_removed;
+#endif
+ // Transfer frame information to EncodedFrame and create any codec
+ // specific information.
+ _frameType = _sessionInfo.FrameType();
+ _completeFrame = _sessionInfo.complete();
+ _missingFrame = !continuous;
+}
+
+} // namespace webrtc
diff --git a/webrtc/modules/video_coding/frame_buffer.h b/webrtc/modules/video_coding/frame_buffer.h
new file mode 100644
index 0000000000..f5a707efe4
--- /dev/null
+++ b/webrtc/modules/video_coding/frame_buffer.h
@@ -0,0 +1,92 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_CODING_FRAME_BUFFER_H_
+#define WEBRTC_MODULES_VIDEO_CODING_FRAME_BUFFER_H_
+
+#include "webrtc/modules/include/module_common_types.h"
+#include "webrtc/modules/video_coding/include/video_coding.h"
+#include "webrtc/modules/video_coding/encoded_frame.h"
+#include "webrtc/modules/video_coding/jitter_buffer_common.h"
+#include "webrtc/modules/video_coding/session_info.h"
+#include "webrtc/typedefs.h"
+
+namespace webrtc {
+
+class VCMFrameBuffer : public VCMEncodedFrame {
+ public:
+ VCMFrameBuffer();
+ virtual ~VCMFrameBuffer();
+
+ VCMFrameBuffer(const VCMFrameBuffer& rhs);
+
+ virtual void Reset();
+
+ VCMFrameBufferEnum InsertPacket(const VCMPacket& packet,
+ int64_t timeInMs,
+ VCMDecodeErrorMode decode_error_mode,
+ const FrameData& frame_data);
+
+ // State
+ // Get current state of frame
+ VCMFrameBufferStateEnum GetState() const;
+ // Get current state and timestamp of frame
+ VCMFrameBufferStateEnum GetState(uint32_t& timeStamp) const;
+ void PrepareForDecode(bool continuous);
+
+ bool IsRetransmitted() const;
+ bool IsSessionComplete() const;
+ bool HaveFirstPacket() const;
+ bool HaveLastPacket() const;
+ int NumPackets() const;
+ // Makes sure the session contain a decodable stream.
+ void MakeSessionDecodable();
+
+ // Sequence numbers
+ // Get lowest packet sequence number in frame
+ int32_t GetLowSeqNum() const;
+ // Get highest packet sequence number in frame
+ int32_t GetHighSeqNum() const;
+
+ int PictureId() const;
+ int TemporalId() const;
+ bool LayerSync() const;
+ int Tl0PicId() const;
+ bool NonReference() const;
+
+ void SetGofInfo(const GofInfoVP9& gof_info, size_t idx);
+
+ // Increments a counter to keep track of the number of packets of this frame
+ // which were NACKed before they arrived.
+ void IncrementNackCount();
+ // Returns the number of packets of this frame which were NACKed before they
+ // arrived.
+ int16_t GetNackCount() const;
+
+ int64_t LatestPacketTimeMs() const;
+
+ webrtc::FrameType FrameType() const;
+ void SetPreviousFrameLoss();
+
+ // The number of packets discarded because the decoder can't make use of them.
+ int NotDecodablePackets() const;
+
+ private:
+ void SetState(VCMFrameBufferStateEnum state); // Set state of frame
+
+ VCMFrameBufferStateEnum _state; // Current state of the frame
+ VCMSessionInfo _sessionInfo;
+ uint16_t _nackCount;
+ int64_t _latestPacketTimeMs;
+};
+
+} // namespace webrtc
+
+#endif // WEBRTC_MODULES_VIDEO_CODING_FRAME_BUFFER_H_
diff --git a/webrtc/modules/video_coding/generic_decoder.cc b/webrtc/modules/video_coding/generic_decoder.cc
new file mode 100644
index 0000000000..5cbe0f5ba0
--- /dev/null
+++ b/webrtc/modules/video_coding/generic_decoder.cc
@@ -0,0 +1,192 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/base/logging.h"
+#include "webrtc/base/trace_event.h"
+#include "webrtc/modules/video_coding/include/video_coding.h"
+#include "webrtc/modules/video_coding/generic_decoder.h"
+#include "webrtc/modules/video_coding/internal_defines.h"
+#include "webrtc/system_wrappers/include/clock.h"
+
+namespace webrtc {
+
+VCMDecodedFrameCallback::VCMDecodedFrameCallback(VCMTiming* timing,
+ Clock* clock)
+ : _critSect(CriticalSectionWrapper::CreateCriticalSection()),
+ _clock(clock),
+ _receiveCallback(NULL),
+ _timing(timing),
+ _timestampMap(kDecoderFrameMemoryLength),
+ _lastReceivedPictureID(0) {}
+
+VCMDecodedFrameCallback::~VCMDecodedFrameCallback() {
+ delete _critSect;
+}
+
+void VCMDecodedFrameCallback::SetUserReceiveCallback(
+ VCMReceiveCallback* receiveCallback) {
+ CriticalSectionScoped cs(_critSect);
+ _receiveCallback = receiveCallback;
+}
+
+VCMReceiveCallback* VCMDecodedFrameCallback::UserReceiveCallback() {
+ CriticalSectionScoped cs(_critSect);
+ return _receiveCallback;
+}
+
+int32_t VCMDecodedFrameCallback::Decoded(VideoFrame& decodedImage) {
+ return Decoded(decodedImage, -1);
+}
+
+int32_t VCMDecodedFrameCallback::Decoded(VideoFrame& decodedImage,
+ int64_t decode_time_ms) {
+ TRACE_EVENT_INSTANT1("webrtc", "VCMDecodedFrameCallback::Decoded",
+ "timestamp", decodedImage.timestamp());
+ // TODO(holmer): We should improve this so that we can handle multiple
+ // callbacks from one call to Decode().
+ VCMFrameInformation* frameInfo;
+ VCMReceiveCallback* callback;
+ {
+ CriticalSectionScoped cs(_critSect);
+ frameInfo = _timestampMap.Pop(decodedImage.timestamp());
+ callback = _receiveCallback;
+ }
+
+ if (frameInfo == NULL) {
+ LOG(LS_WARNING) << "Too many frames backed up in the decoder, dropping "
+ "this one.";
+ return WEBRTC_VIDEO_CODEC_OK;
+ }
+
+ const int64_t now_ms = _clock->TimeInMilliseconds();
+ if (decode_time_ms < 0) {
+ decode_time_ms =
+ static_cast<int32_t>(now_ms - frameInfo->decodeStartTimeMs);
+ }
+ _timing->StopDecodeTimer(decodedImage.timestamp(), decode_time_ms, now_ms,
+ frameInfo->renderTimeMs);
+
+ if (callback != NULL) {
+ decodedImage.set_render_time_ms(frameInfo->renderTimeMs);
+ decodedImage.set_rotation(frameInfo->rotation);
+ callback->FrameToRender(decodedImage);
+ }
+ return WEBRTC_VIDEO_CODEC_OK;
+}
+
+int32_t VCMDecodedFrameCallback::ReceivedDecodedReferenceFrame(
+ const uint64_t pictureId) {
+ CriticalSectionScoped cs(_critSect);
+ if (_receiveCallback != NULL) {
+ return _receiveCallback->ReceivedDecodedReferenceFrame(pictureId);
+ }
+ return -1;
+}
+
+int32_t VCMDecodedFrameCallback::ReceivedDecodedFrame(
+ const uint64_t pictureId) {
+ _lastReceivedPictureID = pictureId;
+ return 0;
+}
+
+uint64_t VCMDecodedFrameCallback::LastReceivedPictureID() const {
+ return _lastReceivedPictureID;
+}
+
+void VCMDecodedFrameCallback::OnDecoderImplementationName(
+ const char* implementation_name) {
+ CriticalSectionScoped cs(_critSect);
+ if (_receiveCallback)
+ _receiveCallback->OnDecoderImplementationName(implementation_name);
+}
+
+void VCMDecodedFrameCallback::Map(uint32_t timestamp,
+ VCMFrameInformation* frameInfo) {
+ CriticalSectionScoped cs(_critSect);
+ _timestampMap.Add(timestamp, frameInfo);
+}
+
+int32_t VCMDecodedFrameCallback::Pop(uint32_t timestamp) {
+ CriticalSectionScoped cs(_critSect);
+ if (_timestampMap.Pop(timestamp) == NULL) {
+ return VCM_GENERAL_ERROR;
+ }
+ return VCM_OK;
+}
+
+VCMGenericDecoder::VCMGenericDecoder(VideoDecoder* decoder, bool isExternal)
+ : _callback(NULL),
+ _frameInfos(),
+ _nextFrameInfoIdx(0),
+ _decoder(decoder),
+ _codecType(kVideoCodecUnknown),
+ _isExternal(isExternal),
+ _keyFrameDecoded(false) {}
+
+VCMGenericDecoder::~VCMGenericDecoder() {}
+
+int32_t VCMGenericDecoder::InitDecode(const VideoCodec* settings,
+ int32_t numberOfCores) {
+ TRACE_EVENT0("webrtc", "VCMGenericDecoder::InitDecode");
+ _codecType = settings->codecType;
+
+ return _decoder->InitDecode(settings, numberOfCores);
+}
+
+int32_t VCMGenericDecoder::Decode(const VCMEncodedFrame& frame, int64_t nowMs) {
+ TRACE_EVENT1("webrtc", "VCMGenericDecoder::Decode", "timestamp",
+ frame.EncodedImage()._timeStamp);
+ _frameInfos[_nextFrameInfoIdx].decodeStartTimeMs = nowMs;
+ _frameInfos[_nextFrameInfoIdx].renderTimeMs = frame.RenderTimeMs();
+ _frameInfos[_nextFrameInfoIdx].rotation = frame.rotation();
+ _callback->Map(frame.TimeStamp(), &_frameInfos[_nextFrameInfoIdx]);
+
+ _nextFrameInfoIdx = (_nextFrameInfoIdx + 1) % kDecoderFrameMemoryLength;
+ int32_t ret = _decoder->Decode(frame.EncodedImage(), frame.MissingFrame(),
+ frame.FragmentationHeader(),
+ frame.CodecSpecific(), frame.RenderTimeMs());
+
+ _callback->OnDecoderImplementationName(_decoder->ImplementationName());
+ if (ret < WEBRTC_VIDEO_CODEC_OK) {
+ LOG(LS_WARNING) << "Failed to decode frame with timestamp "
+ << frame.TimeStamp() << ", error code: " << ret;
+ _callback->Pop(frame.TimeStamp());
+ return ret;
+ } else if (ret == WEBRTC_VIDEO_CODEC_NO_OUTPUT ||
+ ret == WEBRTC_VIDEO_CODEC_REQUEST_SLI) {
+ // No output
+ _callback->Pop(frame.TimeStamp());
+ }
+ return ret;
+}
+
+int32_t VCMGenericDecoder::Release() {
+ return _decoder->Release();
+}
+
+int32_t VCMGenericDecoder::Reset() {
+ return _decoder->Reset();
+}
+
+int32_t VCMGenericDecoder::RegisterDecodeCompleteCallback(
+ VCMDecodedFrameCallback* callback) {
+ _callback = callback;
+ return _decoder->RegisterDecodeCompleteCallback(callback);
+}
+
+bool VCMGenericDecoder::External() const {
+ return _isExternal;
+}
+
+bool VCMGenericDecoder::PrefersLateDecoding() const {
+ return _decoder->PrefersLateDecoding();
+}
+
+} // namespace webrtc
diff --git a/webrtc/modules/video_coding/generic_decoder.h b/webrtc/modules/video_coding/generic_decoder.h
new file mode 100644
index 0000000000..67ceabfc53
--- /dev/null
+++ b/webrtc/modules/video_coding/generic_decoder.h
@@ -0,0 +1,111 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_CODING_GENERIC_DECODER_H_
+#define WEBRTC_MODULES_VIDEO_CODING_GENERIC_DECODER_H_
+
+#include "webrtc/modules/include/module_common_types.h"
+#include "webrtc/modules/video_coding/include/video_codec_interface.h"
+#include "webrtc/modules/video_coding/encoded_frame.h"
+#include "webrtc/modules/video_coding/timestamp_map.h"
+#include "webrtc/modules/video_coding/timing.h"
+
+namespace webrtc {
+
+class VCMReceiveCallback;
+
+enum { kDecoderFrameMemoryLength = 10 };
+
+struct VCMFrameInformation {
+ int64_t renderTimeMs;
+ int64_t decodeStartTimeMs;
+ void* userData;
+ VideoRotation rotation;
+};
+
+class VCMDecodedFrameCallback : public DecodedImageCallback {
+ public:
+ VCMDecodedFrameCallback(VCMTiming* timing, Clock* clock);
+ virtual ~VCMDecodedFrameCallback();
+ void SetUserReceiveCallback(VCMReceiveCallback* receiveCallback);
+ VCMReceiveCallback* UserReceiveCallback();
+
+ virtual int32_t Decoded(VideoFrame& decodedImage); // NOLINT
+ virtual int32_t Decoded(VideoFrame& decodedImage, // NOLINT
+ int64_t decode_time_ms);
+ virtual int32_t ReceivedDecodedReferenceFrame(const uint64_t pictureId);
+ virtual int32_t ReceivedDecodedFrame(const uint64_t pictureId);
+
+ uint64_t LastReceivedPictureID() const;
+ void OnDecoderImplementationName(const char* implementation_name);
+
+ void Map(uint32_t timestamp, VCMFrameInformation* frameInfo);
+ int32_t Pop(uint32_t timestamp);
+
+ private:
+ // Protect |_receiveCallback| and |_timestampMap|.
+ CriticalSectionWrapper* _critSect;
+ Clock* _clock;
+ VCMReceiveCallback* _receiveCallback GUARDED_BY(_critSect);
+ VCMTiming* _timing;
+ VCMTimestampMap _timestampMap GUARDED_BY(_critSect);
+ uint64_t _lastReceivedPictureID;
+};
+
+class VCMGenericDecoder {
+ friend class VCMCodecDataBase;
+
+ public:
+ explicit VCMGenericDecoder(VideoDecoder* decoder, bool isExternal = false);
+ ~VCMGenericDecoder();
+
+ /**
+ * Initialize the decoder with the information from the VideoCodec
+ */
+ int32_t InitDecode(const VideoCodec* settings, int32_t numberOfCores);
+
+ /**
+ * Decode to a raw I420 frame,
+ *
+ * inputVideoBuffer reference to encoded video frame
+ */
+ int32_t Decode(const VCMEncodedFrame& inputFrame, int64_t nowMs);
+
+ /**
+ * Free the decoder memory
+ */
+ int32_t Release();
+
+ /**
+ * Reset the decoder state, prepare for a new call
+ */
+ int32_t Reset();
+
+ /**
+ * Set decode callback. Deregistering while decoding is illegal.
+ */
+ int32_t RegisterDecodeCompleteCallback(VCMDecodedFrameCallback* callback);
+
+ bool External() const;
+ bool PrefersLateDecoding() const;
+
+ private:
+ VCMDecodedFrameCallback* _callback;
+ VCMFrameInformation _frameInfos[kDecoderFrameMemoryLength];
+ uint32_t _nextFrameInfoIdx;
+ VideoDecoder* const _decoder;
+ VideoCodecType _codecType;
+ bool _isExternal;
+ bool _keyFrameDecoded;
+};
+
+} // namespace webrtc
+
+#endif // WEBRTC_MODULES_VIDEO_CODING_GENERIC_DECODER_H_
diff --git a/webrtc/modules/video_coding/generic_encoder.cc b/webrtc/modules/video_coding/generic_encoder.cc
new file mode 100644
index 0000000000..c7444ce99f
--- /dev/null
+++ b/webrtc/modules/video_coding/generic_encoder.cc
@@ -0,0 +1,311 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/modules/video_coding/generic_encoder.h"
+
+#include <vector>
+
+#include "webrtc/base/checks.h"
+#include "webrtc/base/logging.h"
+#include "webrtc/base/trace_event.h"
+#include "webrtc/engine_configurations.h"
+#include "webrtc/modules/video_coding/encoded_frame.h"
+#include "webrtc/modules/video_coding/media_optimization.h"
+#include "webrtc/system_wrappers/include/critical_section_wrapper.h"
+
+namespace webrtc {
+namespace {
+// Map information from info into rtp. If no relevant information is found
+// in info, rtp is set to NULL.
+void CopyCodecSpecific(const CodecSpecificInfo* info, RTPVideoHeader* rtp) {
+ RTC_DCHECK(info);
+ switch (info->codecType) {
+ case kVideoCodecVP8: {
+ rtp->codec = kRtpVideoVp8;
+ rtp->codecHeader.VP8.InitRTPVideoHeaderVP8();
+ rtp->codecHeader.VP8.pictureId = info->codecSpecific.VP8.pictureId;
+ rtp->codecHeader.VP8.nonReference = info->codecSpecific.VP8.nonReference;
+ rtp->codecHeader.VP8.temporalIdx = info->codecSpecific.VP8.temporalIdx;
+ rtp->codecHeader.VP8.layerSync = info->codecSpecific.VP8.layerSync;
+ rtp->codecHeader.VP8.tl0PicIdx = info->codecSpecific.VP8.tl0PicIdx;
+ rtp->codecHeader.VP8.keyIdx = info->codecSpecific.VP8.keyIdx;
+ rtp->simulcastIdx = info->codecSpecific.VP8.simulcastIdx;
+ return;
+ }
+ case kVideoCodecVP9: {
+ rtp->codec = kRtpVideoVp9;
+ rtp->codecHeader.VP9.InitRTPVideoHeaderVP9();
+ rtp->codecHeader.VP9.inter_pic_predicted =
+ info->codecSpecific.VP9.inter_pic_predicted;
+ rtp->codecHeader.VP9.flexible_mode =
+ info->codecSpecific.VP9.flexible_mode;
+ rtp->codecHeader.VP9.ss_data_available =
+ info->codecSpecific.VP9.ss_data_available;
+ rtp->codecHeader.VP9.picture_id = info->codecSpecific.VP9.picture_id;
+ rtp->codecHeader.VP9.tl0_pic_idx = info->codecSpecific.VP9.tl0_pic_idx;
+ rtp->codecHeader.VP9.temporal_idx = info->codecSpecific.VP9.temporal_idx;
+ rtp->codecHeader.VP9.spatial_idx = info->codecSpecific.VP9.spatial_idx;
+ rtp->codecHeader.VP9.temporal_up_switch =
+ info->codecSpecific.VP9.temporal_up_switch;
+ rtp->codecHeader.VP9.inter_layer_predicted =
+ info->codecSpecific.VP9.inter_layer_predicted;
+ rtp->codecHeader.VP9.gof_idx = info->codecSpecific.VP9.gof_idx;
+ rtp->codecHeader.VP9.num_spatial_layers =
+ info->codecSpecific.VP9.num_spatial_layers;
+
+ if (info->codecSpecific.VP9.ss_data_available) {
+ rtp->codecHeader.VP9.spatial_layer_resolution_present =
+ info->codecSpecific.VP9.spatial_layer_resolution_present;
+ if (info->codecSpecific.VP9.spatial_layer_resolution_present) {
+ for (size_t i = 0; i < info->codecSpecific.VP9.num_spatial_layers;
+ ++i) {
+ rtp->codecHeader.VP9.width[i] = info->codecSpecific.VP9.width[i];
+ rtp->codecHeader.VP9.height[i] = info->codecSpecific.VP9.height[i];
+ }
+ }
+ rtp->codecHeader.VP9.gof.CopyGofInfoVP9(info->codecSpecific.VP9.gof);
+ }
+
+ rtp->codecHeader.VP9.num_ref_pics = info->codecSpecific.VP9.num_ref_pics;
+ for (int i = 0; i < info->codecSpecific.VP9.num_ref_pics; ++i)
+ rtp->codecHeader.VP9.pid_diff[i] = info->codecSpecific.VP9.p_diff[i];
+ return;
+ }
+ case kVideoCodecH264:
+ rtp->codec = kRtpVideoH264;
+ return;
+ case kVideoCodecGeneric:
+ rtp->codec = kRtpVideoGeneric;
+ rtp->simulcastIdx = info->codecSpecific.generic.simulcast_idx;
+ return;
+ default:
+ return;
+ }
+}
+} // namespace
+
+// #define DEBUG_ENCODER_BIT_STREAM
+
+VCMGenericEncoder::VCMGenericEncoder(
+ VideoEncoder* encoder,
+ VideoEncoderRateObserver* rate_observer,
+ VCMEncodedFrameCallback* encoded_frame_callback,
+ bool internalSource)
+ : encoder_(encoder),
+ rate_observer_(rate_observer),
+ vcm_encoded_frame_callback_(encoded_frame_callback),
+ internal_source_(internalSource),
+ encoder_params_({0, 0, 0, 0}),
+ rotation_(kVideoRotation_0),
+ is_screenshare_(false) {}
+
+VCMGenericEncoder::~VCMGenericEncoder() {}
+
+int32_t VCMGenericEncoder::Release() {
+ return encoder_->Release();
+}
+
+int32_t VCMGenericEncoder::InitEncode(const VideoCodec* settings,
+ int32_t numberOfCores,
+ size_t maxPayloadSize) {
+ TRACE_EVENT0("webrtc", "VCMGenericEncoder::InitEncode");
+ {
+ rtc::CritScope lock(&params_lock_);
+ encoder_params_.target_bitrate = settings->startBitrate * 1000;
+ encoder_params_.input_frame_rate = settings->maxFramerate;
+ }
+
+ is_screenshare_ = settings->mode == VideoCodecMode::kScreensharing;
+ if (encoder_->InitEncode(settings, numberOfCores, maxPayloadSize) != 0) {
+ LOG(LS_ERROR) << "Failed to initialize the encoder associated with "
+ "payload name: "
+ << settings->plName;
+ return -1;
+ }
+ encoder_->RegisterEncodeCompleteCallback(vcm_encoded_frame_callback_);
+ return 0;
+}
+
+int32_t VCMGenericEncoder::Encode(const VideoFrame& inputFrame,
+ const CodecSpecificInfo* codecSpecificInfo,
+ const std::vector<FrameType>& frameTypes) {
+ TRACE_EVENT1("webrtc", "VCMGenericEncoder::Encode", "timestamp",
+ inputFrame.timestamp());
+
+ for (FrameType frame_type : frameTypes)
+ RTC_DCHECK(frame_type == kVideoFrameKey || frame_type == kVideoFrameDelta);
+
+ rotation_ = inputFrame.rotation();
+
+ // Keep track of the current frame rotation and apply to the output of the
+ // encoder. There might not be exact as the encoder could have one frame delay
+ // but it should be close enough.
+ // TODO(pbos): Map from timestamp, this is racy (even if rotation_ is locked
+ // properly, which it isn't). More than one frame may be in the pipeline.
+ vcm_encoded_frame_callback_->SetRotation(rotation_);
+
+ int32_t result = encoder_->Encode(inputFrame, codecSpecificInfo, &frameTypes);
+
+ if (vcm_encoded_frame_callback_) {
+ vcm_encoded_frame_callback_->SignalLastEncoderImplementationUsed(
+ encoder_->ImplementationName());
+ }
+
+ if (is_screenshare_ &&
+ result == WEBRTC_VIDEO_CODEC_TARGET_BITRATE_OVERSHOOT) {
+ // Target bitrate exceeded, encoder state has been reset - try again.
+ return encoder_->Encode(inputFrame, codecSpecificInfo, &frameTypes);
+ }
+
+ return result;
+}
+
+void VCMGenericEncoder::SetEncoderParameters(const EncoderParameters& params) {
+ bool channel_parameters_have_changed;
+ bool rates_have_changed;
+ {
+ rtc::CritScope lock(&params_lock_);
+ channel_parameters_have_changed =
+ params.loss_rate != encoder_params_.loss_rate ||
+ params.rtt != encoder_params_.rtt;
+ rates_have_changed =
+ params.target_bitrate != encoder_params_.target_bitrate ||
+ params.input_frame_rate != encoder_params_.input_frame_rate;
+ encoder_params_ = params;
+ }
+ if (channel_parameters_have_changed)
+ encoder_->SetChannelParameters(params.loss_rate, params.rtt);
+ if (rates_have_changed) {
+ uint32_t target_bitrate_kbps = (params.target_bitrate + 500) / 1000;
+ encoder_->SetRates(target_bitrate_kbps, params.input_frame_rate);
+ if (rate_observer_ != nullptr) {
+ rate_observer_->OnSetRates(params.target_bitrate,
+ params.input_frame_rate);
+ }
+ }
+}
+
+EncoderParameters VCMGenericEncoder::GetEncoderParameters() const {
+ rtc::CritScope lock(&params_lock_);
+ return encoder_params_;
+}
+
+int32_t VCMGenericEncoder::SetPeriodicKeyFrames(bool enable) {
+ return encoder_->SetPeriodicKeyFrames(enable);
+}
+
+int32_t VCMGenericEncoder::RequestFrame(
+ const std::vector<FrameType>& frame_types) {
+ VideoFrame image;
+ return encoder_->Encode(image, NULL, &frame_types);
+}
+
+bool VCMGenericEncoder::InternalSource() const {
+ return internal_source_;
+}
+
+void VCMGenericEncoder::OnDroppedFrame() {
+ encoder_->OnDroppedFrame();
+}
+
+bool VCMGenericEncoder::SupportsNativeHandle() const {
+ return encoder_->SupportsNativeHandle();
+}
+
+int VCMGenericEncoder::GetTargetFramerate() {
+ return encoder_->GetTargetFramerate();
+}
+
+/***************************
+ * Callback Implementation
+ ***************************/
+VCMEncodedFrameCallback::VCMEncodedFrameCallback(
+ EncodedImageCallback* post_encode_callback)
+ : send_callback_(),
+ _mediaOpt(NULL),
+ _payloadType(0),
+ _internalSource(false),
+ _rotation(kVideoRotation_0),
+ post_encode_callback_(post_encode_callback)
+#ifdef DEBUG_ENCODER_BIT_STREAM
+ ,
+ _bitStreamAfterEncoder(NULL)
+#endif
+{
+#ifdef DEBUG_ENCODER_BIT_STREAM
+ _bitStreamAfterEncoder = fopen("encoderBitStream.bit", "wb");
+#endif
+}
+
+VCMEncodedFrameCallback::~VCMEncodedFrameCallback() {
+#ifdef DEBUG_ENCODER_BIT_STREAM
+ fclose(_bitStreamAfterEncoder);
+#endif
+}
+
+int32_t VCMEncodedFrameCallback::SetTransportCallback(
+ VCMPacketizationCallback* transport) {
+ send_callback_ = transport;
+ return VCM_OK;
+}
+
+int32_t VCMEncodedFrameCallback::Encoded(
+ const EncodedImage& encoded_image,
+ const CodecSpecificInfo* codecSpecificInfo,
+ const RTPFragmentationHeader* fragmentationHeader) {
+ TRACE_EVENT_INSTANT1("webrtc", "VCMEncodedFrameCallback::Encoded",
+ "timestamp", encoded_image._timeStamp);
+ post_encode_callback_->Encoded(encoded_image, NULL, NULL);
+
+ if (send_callback_ == NULL) {
+ return VCM_UNINITIALIZED;
+ }
+
+#ifdef DEBUG_ENCODER_BIT_STREAM
+ if (_bitStreamAfterEncoder != NULL) {
+ fwrite(encoded_image._buffer, 1, encoded_image._length,
+ _bitStreamAfterEncoder);
+ }
+#endif
+
+ RTPVideoHeader rtpVideoHeader;
+ memset(&rtpVideoHeader, 0, sizeof(RTPVideoHeader));
+ RTPVideoHeader* rtpVideoHeaderPtr = &rtpVideoHeader;
+ if (codecSpecificInfo) {
+ CopyCodecSpecific(codecSpecificInfo, rtpVideoHeaderPtr);
+ }
+ rtpVideoHeader.rotation = _rotation;
+
+ int32_t callbackReturn = send_callback_->SendData(
+ _payloadType, encoded_image, *fragmentationHeader, rtpVideoHeaderPtr);
+ if (callbackReturn < 0) {
+ return callbackReturn;
+ }
+
+ if (_mediaOpt != NULL) {
+ _mediaOpt->UpdateWithEncodedData(encoded_image);
+ if (_internalSource)
+ return _mediaOpt->DropFrame(); // Signal to encoder to drop next frame.
+ }
+ return VCM_OK;
+}
+
+void VCMEncodedFrameCallback::SetMediaOpt(
+ media_optimization::MediaOptimization* mediaOpt) {
+ _mediaOpt = mediaOpt;
+}
+
+void VCMEncodedFrameCallback::SignalLastEncoderImplementationUsed(
+ const char* implementation_name) {
+ if (send_callback_)
+ send_callback_->OnEncoderImplementationName(implementation_name);
+}
+
+} // namespace webrtc
diff --git a/webrtc/modules/video_coding/generic_encoder.h b/webrtc/modules/video_coding/generic_encoder.h
new file mode 100644
index 0000000000..f739edb44f
--- /dev/null
+++ b/webrtc/modules/video_coding/generic_encoder.h
@@ -0,0 +1,149 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_CODING_GENERIC_ENCODER_H_
+#define WEBRTC_MODULES_VIDEO_CODING_GENERIC_ENCODER_H_
+
+#include <stdio.h>
+#include <vector>
+
+#include "webrtc/modules/video_coding/include/video_codec_interface.h"
+#include "webrtc/modules/video_coding/include/video_coding_defines.h"
+
+#include "webrtc/base/criticalsection.h"
+#include "webrtc/base/scoped_ptr.h"
+
+namespace webrtc {
+class CriticalSectionWrapper;
+
+namespace media_optimization {
+class MediaOptimization;
+} // namespace media_optimization
+
+struct EncoderParameters {
+ uint32_t target_bitrate;
+ uint8_t loss_rate;
+ int64_t rtt;
+ uint32_t input_frame_rate;
+};
+
+/*************************************/
+/* VCMEncodeFrameCallback class */
+/***********************************/
+class VCMEncodedFrameCallback : public EncodedImageCallback {
+ public:
+ explicit VCMEncodedFrameCallback(
+ EncodedImageCallback* post_encode_callback);
+ virtual ~VCMEncodedFrameCallback();
+
+ /*
+ * Callback implementation - codec encode complete
+ */
+ int32_t Encoded(
+ const EncodedImage& encodedImage,
+ const CodecSpecificInfo* codecSpecificInfo = NULL,
+ const RTPFragmentationHeader* fragmentationHeader = NULL);
+ /*
+ * Callback implementation - generic encoder encode complete
+ */
+ int32_t SetTransportCallback(VCMPacketizationCallback* transport);
+ /**
+ * Set media Optimization
+ */
+ void SetMediaOpt(media_optimization::MediaOptimization* mediaOpt);
+
+ void SetPayloadType(uint8_t payloadType) {
+ _payloadType = payloadType;
+ }
+
+ void SetInternalSource(bool internalSource) {
+ _internalSource = internalSource;
+ }
+
+ void SetRotation(VideoRotation rotation) { _rotation = rotation; }
+ void SignalLastEncoderImplementationUsed(
+ const char* encoder_implementation_name);
+
+ private:
+ VCMPacketizationCallback* send_callback_;
+ media_optimization::MediaOptimization* _mediaOpt;
+ uint8_t _payloadType;
+ bool _internalSource;
+ VideoRotation _rotation;
+
+ EncodedImageCallback* post_encode_callback_;
+
+#ifdef DEBUG_ENCODER_BIT_STREAM
+ FILE* _bitStreamAfterEncoder;
+#endif
+}; // end of VCMEncodeFrameCallback class
+
+/******************************/
+/* VCMGenericEncoder class */
+/******************************/
+class VCMGenericEncoder {
+ friend class VCMCodecDataBase;
+
+ public:
+ VCMGenericEncoder(VideoEncoder* encoder,
+ VideoEncoderRateObserver* rate_observer,
+ VCMEncodedFrameCallback* encoded_frame_callback,
+ bool internalSource);
+ ~VCMGenericEncoder();
+ /**
+ * Free encoder memory
+ */
+ int32_t Release();
+ /**
+ * Initialize the encoder with the information from the VideoCodec
+ */
+ int32_t InitEncode(const VideoCodec* settings,
+ int32_t numberOfCores,
+ size_t maxPayloadSize);
+ /**
+ * Encode raw image
+ * inputFrame : Frame containing raw image
+ * codecSpecificInfo : Specific codec data
+ * cameraFrameRate : Request or information from the remote side
+ * frameType : The requested frame type to encode
+ */
+ int32_t Encode(const VideoFrame& inputFrame,
+ const CodecSpecificInfo* codecSpecificInfo,
+ const std::vector<FrameType>& frameTypes);
+
+ void SetEncoderParameters(const EncoderParameters& params);
+ EncoderParameters GetEncoderParameters() const;
+
+ int32_t SetPeriodicKeyFrames(bool enable);
+
+ int32_t RequestFrame(const std::vector<FrameType>& frame_types);
+
+ bool InternalSource() const;
+
+ void OnDroppedFrame();
+
+ bool SupportsNativeHandle() const;
+
+ int GetTargetFramerate();
+
+ private:
+ VideoEncoder* const encoder_;
+ VideoEncoderRateObserver* const rate_observer_;
+ VCMEncodedFrameCallback* const vcm_encoded_frame_callback_;
+ const bool internal_source_;
+ mutable rtc::CriticalSection params_lock_;
+ EncoderParameters encoder_params_ GUARDED_BY(params_lock_);
+ VideoRotation rotation_;
+ bool is_screenshare_;
+}; // end of VCMGenericEncoder class
+
+} // namespace webrtc
+
+#endif // WEBRTC_MODULES_VIDEO_CODING_GENERIC_ENCODER_H_
diff --git a/webrtc/modules/video_coding/include/mock/mock_vcm_callbacks.h b/webrtc/modules/video_coding/include/mock/mock_vcm_callbacks.h
new file mode 100644
index 0000000000..0185dae333
--- /dev/null
+++ b/webrtc/modules/video_coding/include/mock/mock_vcm_callbacks.h
@@ -0,0 +1,34 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_CODING_INCLUDE_MOCK_MOCK_VCM_CALLBACKS_H_
+#define WEBRTC_MODULES_VIDEO_CODING_INCLUDE_MOCK_MOCK_VCM_CALLBACKS_H_
+
+#include "testing/gmock/include/gmock/gmock.h"
+#include "webrtc/modules/video_coding/include/video_coding_defines.h"
+#include "webrtc/typedefs.h"
+
+namespace webrtc {
+
+class MockVCMFrameTypeCallback : public VCMFrameTypeCallback {
+ public:
+ MOCK_METHOD0(RequestKeyFrame, int32_t());
+ MOCK_METHOD1(SliceLossIndicationRequest, int32_t(const uint64_t pictureId));
+};
+
+class MockPacketRequestCallback : public VCMPacketRequestCallback {
+ public:
+ MOCK_METHOD2(ResendPackets,
+ int32_t(const uint16_t* sequenceNumbers, uint16_t length));
+};
+
+} // namespace webrtc
+
+#endif // WEBRTC_MODULES_VIDEO_CODING_INCLUDE_MOCK_MOCK_VCM_CALLBACKS_H_
diff --git a/webrtc/modules/video_coding/include/mock/mock_video_codec_interface.h b/webrtc/modules/video_coding/include/mock/mock_video_codec_interface.h
new file mode 100644
index 0000000000..9cb4a83535
--- /dev/null
+++ b/webrtc/modules/video_coding/include/mock/mock_video_codec_interface.h
@@ -0,0 +1,81 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_CODING_INCLUDE_MOCK_MOCK_VIDEO_CODEC_INTERFACE_H_
+#define WEBRTC_MODULES_VIDEO_CODING_INCLUDE_MOCK_MOCK_VIDEO_CODEC_INTERFACE_H_
+
+#include <string>
+#include <vector>
+
+#include "testing/gmock/include/gmock/gmock.h"
+#include "webrtc/modules/video_coding/include/video_codec_interface.h"
+#include "webrtc/typedefs.h"
+
+namespace webrtc {
+
+class MockEncodedImageCallback : public EncodedImageCallback {
+ public:
+ MOCK_METHOD3(Encoded,
+ int32_t(const EncodedImage& encodedImage,
+ const CodecSpecificInfo* codecSpecificInfo,
+ const RTPFragmentationHeader* fragmentation));
+};
+
+class MockVideoEncoder : public VideoEncoder {
+ public:
+ MOCK_CONST_METHOD2(Version, int32_t(int8_t* version, int32_t length));
+ MOCK_METHOD3(InitEncode,
+ int32_t(const VideoCodec* codecSettings,
+ int32_t numberOfCores,
+ size_t maxPayloadSize));
+ MOCK_METHOD3(Encode,
+ int32_t(const VideoFrame& inputImage,
+ const CodecSpecificInfo* codecSpecificInfo,
+ const std::vector<FrameType>* frame_types));
+ MOCK_METHOD1(RegisterEncodeCompleteCallback,
+ int32_t(EncodedImageCallback* callback));
+ MOCK_METHOD0(Release, int32_t());
+ MOCK_METHOD0(Reset, int32_t());
+ MOCK_METHOD2(SetChannelParameters, int32_t(uint32_t packetLoss, int64_t rtt));
+ MOCK_METHOD2(SetRates, int32_t(uint32_t newBitRate, uint32_t frameRate));
+ MOCK_METHOD1(SetPeriodicKeyFrames, int32_t(bool enable));
+};
+
+class MockDecodedImageCallback : public DecodedImageCallback {
+ public:
+ MOCK_METHOD1(Decoded, int32_t(VideoFrame& decodedImage)); // NOLINT
+ MOCK_METHOD2(Decoded,
+ int32_t(VideoFrame& decodedImage, // NOLINT
+ int64_t decode_time_ms));
+ MOCK_METHOD1(ReceivedDecodedReferenceFrame,
+ int32_t(const uint64_t pictureId));
+ MOCK_METHOD1(ReceivedDecodedFrame, int32_t(const uint64_t pictureId));
+};
+
+class MockVideoDecoder : public VideoDecoder {
+ public:
+ MOCK_METHOD2(InitDecode,
+ int32_t(const VideoCodec* codecSettings, int32_t numberOfCores));
+ MOCK_METHOD5(Decode,
+ int32_t(const EncodedImage& inputImage,
+ bool missingFrames,
+ const RTPFragmentationHeader* fragmentation,
+ const CodecSpecificInfo* codecSpecificInfo,
+ int64_t renderTimeMs));
+ MOCK_METHOD1(RegisterDecodeCompleteCallback,
+ int32_t(DecodedImageCallback* callback));
+ MOCK_METHOD0(Release, int32_t());
+ MOCK_METHOD0(Reset, int32_t());
+ MOCK_METHOD0(Copy, VideoDecoder*());
+};
+
+} // namespace webrtc
+
+#endif // WEBRTC_MODULES_VIDEO_CODING_INCLUDE_MOCK_MOCK_VIDEO_CODEC_INTERFACE_H_
diff --git a/webrtc/modules/video_coding/include/video_codec_interface.h b/webrtc/modules/video_coding/include/video_codec_interface.h
new file mode 100644
index 0000000000..19303c0d67
--- /dev/null
+++ b/webrtc/modules/video_coding/include/video_codec_interface.h
@@ -0,0 +1,99 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_CODING_INCLUDE_VIDEO_CODEC_INTERFACE_H_
+#define WEBRTC_MODULES_VIDEO_CODING_INCLUDE_VIDEO_CODEC_INTERFACE_H_
+
+#include <vector>
+
+#include "webrtc/common_types.h"
+#include "webrtc/modules/include/module_common_types.h"
+#include "webrtc/modules/video_coding/include/video_error_codes.h"
+#include "webrtc/typedefs.h"
+#include "webrtc/video_decoder.h"
+#include "webrtc/video_encoder.h"
+#include "webrtc/video_frame.h"
+
+namespace webrtc {
+
+class RTPFragmentationHeader; // forward declaration
+
+// Note: if any pointers are added to this struct, it must be fitted
+// with a copy-constructor. See below.
+struct CodecSpecificInfoVP8 {
+ bool hasReceivedSLI;
+ uint8_t pictureIdSLI;
+ bool hasReceivedRPSI;
+ uint64_t pictureIdRPSI;
+ int16_t pictureId; // Negative value to skip pictureId.
+ bool nonReference;
+ uint8_t simulcastIdx;
+ uint8_t temporalIdx;
+ bool layerSync;
+ int tl0PicIdx; // Negative value to skip tl0PicIdx.
+ int8_t keyIdx; // Negative value to skip keyIdx.
+};
+
+struct CodecSpecificInfoVP9 {
+ bool has_received_sli;
+ uint8_t picture_id_sli;
+ bool has_received_rpsi;
+ uint64_t picture_id_rpsi;
+ int16_t picture_id; // Negative value to skip pictureId.
+
+ bool inter_pic_predicted; // This layer frame is dependent on previously
+ // coded frame(s).
+ bool flexible_mode;
+ bool ss_data_available;
+
+ int tl0_pic_idx; // Negative value to skip tl0PicIdx.
+ uint8_t temporal_idx;
+ uint8_t spatial_idx;
+ bool temporal_up_switch;
+ bool inter_layer_predicted; // Frame is dependent on directly lower spatial
+ // layer frame.
+ uint8_t gof_idx;
+
+ // SS data.
+ size_t num_spatial_layers; // Always populated.
+ bool spatial_layer_resolution_present;
+ uint16_t width[kMaxVp9NumberOfSpatialLayers];
+ uint16_t height[kMaxVp9NumberOfSpatialLayers];
+ GofInfoVP9 gof;
+
+ // Frame reference data.
+ uint8_t num_ref_pics;
+ uint8_t p_diff[kMaxVp9RefPics];
+};
+
+struct CodecSpecificInfoGeneric {
+ uint8_t simulcast_idx;
+};
+
+struct CodecSpecificInfoH264 {};
+
+union CodecSpecificInfoUnion {
+ CodecSpecificInfoGeneric generic;
+ CodecSpecificInfoVP8 VP8;
+ CodecSpecificInfoVP9 VP9;
+ CodecSpecificInfoH264 H264;
+};
+
+// Note: if any pointers are added to this struct or its sub-structs, it
+// must be fitted with a copy-constructor. This is because it is copied
+// in the copy-constructor of VCMEncodedFrame.
+struct CodecSpecificInfo {
+ VideoCodecType codecType;
+ CodecSpecificInfoUnion codecSpecific;
+};
+
+} // namespace webrtc
+
+#endif // WEBRTC_MODULES_VIDEO_CODING_INCLUDE_VIDEO_CODEC_INTERFACE_H_
diff --git a/webrtc/modules/video_coding/include/video_coding.h b/webrtc/modules/video_coding/include/video_coding.h
new file mode 100644
index 0000000000..c46896c823
--- /dev/null
+++ b/webrtc/modules/video_coding/include/video_coding.h
@@ -0,0 +1,519 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_CODING_INCLUDE_VIDEO_CODING_H_
+#define WEBRTC_MODULES_VIDEO_CODING_INCLUDE_VIDEO_CODING_H_
+
+#if defined(WEBRTC_WIN)
+// This is a workaround on Windows due to the fact that some Windows
+// headers define CreateEvent as a macro to either CreateEventW or CreateEventA.
+// This can cause problems since we use that name as well and could
+// declare them as one thing here whereas in another place a windows header
+// may have been included and then implementing CreateEvent() causes compilation
+// errors. So for consistency, we include the main windows header here.
+#include <windows.h>
+#endif
+
+#include "webrtc/modules/include/module.h"
+#include "webrtc/modules/include/module_common_types.h"
+#include "webrtc/modules/video_coding/include/video_coding_defines.h"
+#include "webrtc/system_wrappers/include/event_wrapper.h"
+#include "webrtc/video_frame.h"
+
+namespace webrtc {
+
+class Clock;
+class EncodedImageCallback;
+class VideoEncoder;
+class VideoDecoder;
+struct CodecSpecificInfo;
+
+class EventFactory {
+ public:
+ virtual ~EventFactory() {}
+
+ virtual EventWrapper* CreateEvent() = 0;
+};
+
+class EventFactoryImpl : public EventFactory {
+ public:
+ virtual ~EventFactoryImpl() {}
+
+ virtual EventWrapper* CreateEvent() { return EventWrapper::Create(); }
+};
+
+// Used to indicate which decode with errors mode should be used.
+enum VCMDecodeErrorMode {
+ kNoErrors, // Never decode with errors. Video will freeze
+ // if nack is disabled.
+ kSelectiveErrors, // Frames that are determined decodable in
+ // VCMSessionInfo may be decoded with missing
+ // packets. As not all incomplete frames will be
+ // decodable, video will freeze if nack is disabled.
+ kWithErrors // Release frames as needed. Errors may be
+ // introduced as some encoded frames may not be
+ // complete.
+};
+
+class VideoCodingModule : public Module {
+ public:
+ enum SenderNackMode { kNackNone, kNackAll, kNackSelective };
+
+ enum ReceiverRobustness { kNone, kHardNack, kSoftNack, kReferenceSelection };
+
+ static VideoCodingModule* Create(
+ Clock* clock,
+ VideoEncoderRateObserver* encoder_rate_observer,
+ VCMQMSettingsCallback* qm_settings_callback);
+
+ static VideoCodingModule* Create(Clock* clock, EventFactory* event_factory);
+
+ static void Destroy(VideoCodingModule* module);
+
+ // Get supported codec settings using codec type
+ //
+ // Input:
+ // - codecType : The codec type to get settings for
+ // - codec : Memory where the codec settings will be stored
+ //
+ // Return value : VCM_OK, on success
+ // VCM_PARAMETER_ERROR if codec not supported
+ static void Codec(VideoCodecType codecType, VideoCodec* codec);
+
+ /*
+ * Sender
+ */
+
+ // Registers a codec to be used for encoding. Calling this
+ // API multiple times overwrites any previously registered codecs.
+ //
+ // NOTE: Must be called on the thread that constructed the VCM instance.
+ //
+ // Input:
+ // - sendCodec : Settings for the codec to be registered.
+ // - numberOfCores : The number of cores the codec is allowed
+ // to use.
+ // - maxPayloadSize : The maximum size each payload is allowed
+ // to have. Usually MTU - overhead.
+ //
+ // Return value : VCM_OK, on success.
+ // < 0, on error.
+ virtual int32_t RegisterSendCodec(const VideoCodec* sendCodec,
+ uint32_t numberOfCores,
+ uint32_t maxPayloadSize) = 0;
+
+ // Register an external encoder object. This can not be used together with
+ // external decoder callbacks.
+ //
+ // Input:
+ // - externalEncoder : Encoder object to be used for encoding frames
+ // inserted
+ // with the AddVideoFrame API.
+ // - payloadType : The payload type bound which this encoder is bound
+ // to.
+ //
+ // Return value : VCM_OK, on success.
+ // < 0, on error.
+ // TODO(pbos): Remove return type when unused elsewhere.
+ virtual int32_t RegisterExternalEncoder(VideoEncoder* externalEncoder,
+ uint8_t payloadType,
+ bool internalSource = false) = 0;
+
+ // API to get currently configured encoder target bitrate in bits/s.
+ //
+ // Return value : 0, on success.
+ // < 0, on error.
+ virtual int Bitrate(unsigned int* bitrate) const = 0;
+
+ // API to get currently configured encoder target frame rate.
+ //
+ // Return value : 0, on success.
+ // < 0, on error.
+ virtual int FrameRate(unsigned int* framerate) const = 0;
+
+ // Sets the parameters describing the send channel. These parameters are
+ // inputs to the
+ // Media Optimization inside the VCM and also specifies the target bit rate
+ // for the
+ // encoder. Bit rate used by NACK should already be compensated for by the
+ // user.
+ //
+ // Input:
+ // - target_bitrate : The target bitrate for VCM in bits/s.
+ // - lossRate : Fractions of lost packets the past second.
+ // (loss rate in percent = 100 * packetLoss /
+ // 255)
+ // - rtt : Current round-trip time in ms.
+ //
+ // Return value : VCM_OK, on success.
+ // < 0, on error.
+ virtual int32_t SetChannelParameters(uint32_t target_bitrate,
+ uint8_t lossRate,
+ int64_t rtt) = 0;
+
+ // Sets the parameters describing the receive channel. These parameters are
+ // inputs to the
+ // Media Optimization inside the VCM.
+ //
+ // Input:
+ // - rtt : Current round-trip time in ms.
+ // with the most amount available bandwidth in
+ // a conference
+ // scenario
+ //
+ // Return value : VCM_OK, on success.
+ // < 0, on error.
+ virtual int32_t SetReceiveChannelParameters(int64_t rtt) = 0;
+
+ // Register a transport callback which will be called to deliver the encoded
+ // data and
+ // side information.
+ //
+ // Input:
+ // - transport : The callback object to register.
+ //
+ // Return value : VCM_OK, on success.
+ // < 0, on error.
+ virtual int32_t RegisterTransportCallback(
+ VCMPacketizationCallback* transport) = 0;
+
+ // Register video output information callback which will be called to deliver
+ // information
+ // about the video stream produced by the encoder, for instance the average
+ // frame rate and
+ // bit rate.
+ //
+ // Input:
+ // - outputInformation : The callback object to register.
+ //
+ // Return value : VCM_OK, on success.
+ // < 0, on error.
+ virtual int32_t RegisterSendStatisticsCallback(
+ VCMSendStatisticsCallback* sendStats) = 0;
+
+ // Register a video protection callback which will be called to deliver
+ // the requested FEC rate and NACK status (on/off).
+ //
+ // Input:
+ // - protection : The callback object to register.
+ //
+ // Return value : VCM_OK, on success.
+ // < 0, on error.
+ virtual int32_t RegisterProtectionCallback(
+ VCMProtectionCallback* protection) = 0;
+
+ // Enable or disable a video protection method.
+ //
+ // Input:
+ // - videoProtection : The method to enable or disable.
+ // - enable : True if the method should be enabled, false if
+ // it should be disabled.
+ //
+ // Return value : VCM_OK, on success.
+ // < 0, on error.
+ virtual int32_t SetVideoProtection(VCMVideoProtection videoProtection,
+ bool enable) = 0;
+
+ // Add one raw video frame to the encoder. This function does all the
+ // necessary
+ // processing, then decides what frame type to encode, or if the frame should
+ // be
+ // dropped. If the frame should be encoded it passes the frame to the encoder
+ // before it returns.
+ //
+ // Input:
+ // - videoFrame : Video frame to encode.
+ // - codecSpecificInfo : Extra codec information, e.g., pre-parsed
+ // in-band signaling.
+ //
+ // Return value : VCM_OK, on success.
+ // < 0, on error.
+ virtual int32_t AddVideoFrame(
+ const VideoFrame& videoFrame,
+ const VideoContentMetrics* contentMetrics = NULL,
+ const CodecSpecificInfo* codecSpecificInfo = NULL) = 0;
+
+ // Next frame encoded should be an intra frame (keyframe).
+ //
+ // Return value : VCM_OK, on success.
+ // < 0, on error.
+ virtual int32_t IntraFrameRequest(int stream_index) = 0;
+
+ // Frame Dropper enable. Can be used to disable the frame dropping when the
+ // encoder
+ // over-uses its bit rate. This API is designed to be used when the encoded
+ // frames
+ // are supposed to be stored to an AVI file, or when the I420 codec is used
+ // and the
+ // target bit rate shouldn't affect the frame rate.
+ //
+ // Input:
+ // - enable : True to enable the setting, false to disable it.
+ //
+ // Return value : VCM_OK, on success.
+ // < 0, on error.
+ virtual int32_t EnableFrameDropper(bool enable) = 0;
+
+ /*
+ * Receiver
+ */
+
+ // Register possible receive codecs, can be called multiple times for
+ // different codecs.
+ // The module will automatically switch between registered codecs depending on
+ // the
+ // payload type of incoming frames. The actual decoder will be created when
+ // needed.
+ //
+ // Input:
+ // - receiveCodec : Settings for the codec to be registered.
+ // - numberOfCores : Number of CPU cores that the decoder is allowed
+ // to use.
+ // - requireKeyFrame : Set this to true if you don't want any delta
+ // frames
+ // to be decoded until the first key frame has been
+ // decoded.
+ //
+ // Return value : VCM_OK, on success.
+ // < 0, on error.
+ virtual int32_t RegisterReceiveCodec(const VideoCodec* receiveCodec,
+ int32_t numberOfCores,
+ bool requireKeyFrame = false) = 0;
+
+ // Register an externally defined decoder/renderer object. Can be a decoder
+ // only or a
+ // decoder coupled with a renderer. Note that RegisterReceiveCodec must be
+ // called to
+ // be used for decoding incoming streams.
+ //
+ // Input:
+ // - externalDecoder : The external decoder/renderer object.
+ // - payloadType : The payload type which this decoder should
+ // be
+ // registered to.
+ //
+ virtual void RegisterExternalDecoder(VideoDecoder* externalDecoder,
+ uint8_t payloadType) = 0;
+
+ // Register a receive callback. Will be called whenever there is a new frame
+ // ready
+ // for rendering.
+ //
+ // Input:
+ // - receiveCallback : The callback object to be used by the
+ // module when a
+ // frame is ready for rendering.
+ // De-register with a NULL pointer.
+ //
+ // Return value : VCM_OK, on success.
+ // < 0, on error.
+ virtual int32_t RegisterReceiveCallback(
+ VCMReceiveCallback* receiveCallback) = 0;
+
+ // Register a receive statistics callback which will be called to deliver
+ // information
+ // about the video stream received by the receiving side of the VCM, for
+ // instance the
+ // average frame rate and bit rate.
+ //
+ // Input:
+ // - receiveStats : The callback object to register.
+ //
+ // Return value : VCM_OK, on success.
+ // < 0, on error.
+ virtual int32_t RegisterReceiveStatisticsCallback(
+ VCMReceiveStatisticsCallback* receiveStats) = 0;
+
+ // Register a decoder timing callback which will be called to deliver
+ // information about the timing of the decoder in the receiving side of the
+ // VCM, for instance the current and maximum frame decode latency.
+ //
+ // Input:
+ // - decoderTiming : The callback object to register.
+ //
+ // Return value : VCM_OK, on success.
+ // < 0, on error.
+ virtual int32_t RegisterDecoderTimingCallback(
+ VCMDecoderTimingCallback* decoderTiming) = 0;
+
+ // Register a frame type request callback. This callback will be called when
+ // the
+ // module needs to request specific frame types from the send side.
+ //
+ // Input:
+ // - frameTypeCallback : The callback object to be used by the
+ // module when
+ // requesting a specific type of frame from
+ // the send side.
+ // De-register with a NULL pointer.
+ //
+ // Return value : VCM_OK, on success.
+ // < 0, on error.
+ virtual int32_t RegisterFrameTypeCallback(
+ VCMFrameTypeCallback* frameTypeCallback) = 0;
+
+ // Registers a callback which is called whenever the receive side of the VCM
+ // encounters holes in the packet sequence and needs packets to be
+ // retransmitted.
+ //
+ // Input:
+ // - callback : The callback to be registered in the VCM.
+ //
+ // Return value : VCM_OK, on success.
+ // <0, on error.
+ virtual int32_t RegisterPacketRequestCallback(
+ VCMPacketRequestCallback* callback) = 0;
+
+ // Waits for the next frame in the jitter buffer to become complete
+ // (waits no longer than maxWaitTimeMs), then passes it to the decoder for
+ // decoding.
+ // Should be called as often as possible to get the most out of the decoder.
+ //
+ // Return value : VCM_OK, on success.
+ // < 0, on error.
+ virtual int32_t Decode(uint16_t maxWaitTimeMs = 200) = 0;
+
+ // Registers a callback which conveys the size of the render buffer.
+ virtual int RegisterRenderBufferSizeCallback(
+ VCMRenderBufferSizeCallback* callback) = 0;
+
+ // Reset the decoder state to the initial state.
+ //
+ // Return value : VCM_OK, on success.
+ // < 0, on error.
+ virtual int32_t ResetDecoder() = 0;
+
+ // API to get the codec which is currently used for decoding by the module.
+ //
+ // Input:
+ // - currentReceiveCodec : Settings for the codec to be registered.
+ //
+ // Return value : VCM_OK, on success.
+ // < 0, on error.
+ virtual int32_t ReceiveCodec(VideoCodec* currentReceiveCodec) const = 0;
+
+ // API to get the codec type currently used for decoding by the module.
+ //
+ // Return value : codecy type, on success.
+ // kVideoCodecUnknown, on error or if no receive codec is
+ // registered
+ virtual VideoCodecType ReceiveCodec() const = 0;
+
+ // Insert a parsed packet into the receiver side of the module. Will be placed
+ // in the
+ // jitter buffer waiting for the frame to become complete. Returns as soon as
+ // the packet
+ // has been placed in the jitter buffer.
+ //
+ // Input:
+ // - incomingPayload : Payload of the packet.
+ // - payloadLength : Length of the payload.
+ // - rtpInfo : The parsed header.
+ //
+ // Return value : VCM_OK, on success.
+ // < 0, on error.
+ virtual int32_t IncomingPacket(const uint8_t* incomingPayload,
+ size_t payloadLength,
+ const WebRtcRTPHeader& rtpInfo) = 0;
+
+ // Minimum playout delay (Used for lip-sync). This is the minimum delay
+ // required
+ // to sync with audio. Not included in VideoCodingModule::Delay()
+ // Defaults to 0 ms.
+ //
+ // Input:
+ // - minPlayoutDelayMs : Additional delay in ms.
+ //
+ // Return value : VCM_OK, on success.
+ // < 0, on error.
+ virtual int32_t SetMinimumPlayoutDelay(uint32_t minPlayoutDelayMs) = 0;
+
+ // Set the time required by the renderer to render a frame.
+ //
+ // Input:
+ // - timeMS : The time in ms required by the renderer to render a
+ // frame.
+ //
+ // Return value : VCM_OK, on success.
+ // < 0, on error.
+ virtual int32_t SetRenderDelay(uint32_t timeMS) = 0;
+
+ // The total delay desired by the VCM. Can be less than the minimum
+ // delay set with SetMinimumPlayoutDelay.
+ //
+ // Return value : Total delay in ms, on success.
+ // < 0, on error.
+ virtual int32_t Delay() const = 0;
+
+ // Returns the number of packets discarded by the jitter buffer due to being
+ // too late. This can include duplicated packets which arrived after the
+ // frame was sent to the decoder. Therefore packets which were prematurely
+ // NACKed will be counted.
+ virtual uint32_t DiscardedPackets() const = 0;
+
+ // Robustness APIs
+
+ // Set the receiver robustness mode. The mode decides how the receiver
+ // responds to losses in the stream. The type of counter-measure (soft or
+ // hard NACK, dual decoder, RPS, etc.) is selected through the
+ // robustnessMode parameter. The errorMode parameter decides if it is
+ // allowed to display frames corrupted by losses. Note that not all
+ // combinations of the two parameters are feasible. An error will be
+ // returned for invalid combinations.
+ // Input:
+ // - robustnessMode : selected robustness mode.
+ // - errorMode : selected error mode.
+ //
+ // Return value : VCM_OK, on success;
+ // < 0, on error.
+ virtual int SetReceiverRobustnessMode(ReceiverRobustness robustnessMode,
+ VCMDecodeErrorMode errorMode) = 0;
+
+ // Set the decode error mode. The mode decides which errors (if any) are
+ // allowed in decodable frames. Note that setting decode_error_mode to
+ // anything other than kWithErrors without enabling nack will cause
+ // long-term freezes (resulting from frequent key frame requests) if
+ // packet loss occurs.
+ virtual void SetDecodeErrorMode(VCMDecodeErrorMode decode_error_mode) = 0;
+
+ // Sets the maximum number of sequence numbers that we are allowed to NACK
+ // and the oldest sequence number that we will consider to NACK. If a
+ // sequence number older than |max_packet_age_to_nack| is missing
+ // a key frame will be requested. A key frame will also be requested if the
+ // time of incomplete or non-continuous frames in the jitter buffer is above
+ // |max_incomplete_time_ms|.
+ virtual void SetNackSettings(size_t max_nack_list_size,
+ int max_packet_age_to_nack,
+ int max_incomplete_time_ms) = 0;
+
+ // Setting a desired delay to the VCM receiver. Video rendering will be
+ // delayed by at least desired_delay_ms.
+ virtual int SetMinReceiverDelay(int desired_delay_ms) = 0;
+
+ // Lets the sender suspend video when the rate drops below
+ // |threshold_bps|, and turns back on when the rate goes back up above
+ // |threshold_bps| + |window_bps|.
+ virtual void SuspendBelowMinBitrate() = 0;
+
+ // Returns true if SuspendBelowMinBitrate is engaged and the video has been
+ // suspended due to bandwidth limitations; otherwise false.
+ virtual bool VideoSuspended() const = 0;
+
+ virtual void RegisterPreDecodeImageCallback(
+ EncodedImageCallback* observer) = 0;
+ virtual void RegisterPostEncodeImageCallback(
+ EncodedImageCallback* post_encode_callback) = 0;
+ // Releases pending decode calls, permitting faster thread shutdown.
+ virtual void TriggerDecoderShutdown() = 0;
+};
+
+} // namespace webrtc
+
+#endif // WEBRTC_MODULES_VIDEO_CODING_INCLUDE_VIDEO_CODING_H_
diff --git a/webrtc/modules/video_coding/include/video_coding_defines.h b/webrtc/modules/video_coding/include/video_coding_defines.h
new file mode 100644
index 0000000000..673a02b713
--- /dev/null
+++ b/webrtc/modules/video_coding/include/video_coding_defines.h
@@ -0,0 +1,198 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_CODING_INCLUDE_VIDEO_CODING_DEFINES_H_
+#define WEBRTC_MODULES_VIDEO_CODING_INCLUDE_VIDEO_CODING_DEFINES_H_
+
+#include "webrtc/modules/include/module_common_types.h"
+#include "webrtc/typedefs.h"
+#include "webrtc/video_frame.h"
+
+namespace webrtc {
+
+// Error codes
+#define VCM_FRAME_NOT_READY 3
+#define VCM_REQUEST_SLI 2
+#define VCM_MISSING_CALLBACK 1
+#define VCM_OK 0
+#define VCM_GENERAL_ERROR -1
+#define VCM_LEVEL_EXCEEDED -2
+#define VCM_MEMORY -3
+#define VCM_PARAMETER_ERROR -4
+#define VCM_UNKNOWN_PAYLOAD -5
+#define VCM_CODEC_ERROR -6
+#define VCM_UNINITIALIZED -7
+#define VCM_NO_CODEC_REGISTERED -8
+#define VCM_JITTER_BUFFER_ERROR -9
+#define VCM_OLD_PACKET_ERROR -10
+#define VCM_NO_FRAME_DECODED -11
+#define VCM_ERROR_REQUEST_SLI -12
+#define VCM_NOT_IMPLEMENTED -20
+
+enum { kDefaultStartBitrateKbps = 300 };
+
+enum VCMVideoProtection {
+ kProtectionNone,
+ kProtectionNack,
+ kProtectionFEC,
+ kProtectionNackFEC,
+};
+
+enum VCMTemporalDecimation {
+ kBitrateOverUseDecimation,
+};
+
+struct VCMFrameCount {
+ uint32_t numKeyFrames;
+ uint32_t numDeltaFrames;
+};
+
+// Callback class used for sending data ready to be packetized
+class VCMPacketizationCallback {
+ public:
+ virtual int32_t SendData(uint8_t payloadType,
+ const EncodedImage& encoded_image,
+ const RTPFragmentationHeader& fragmentationHeader,
+ const RTPVideoHeader* rtpVideoHdr) = 0;
+
+ virtual void OnEncoderImplementationName(const char* implementation_name) {}
+
+ protected:
+ virtual ~VCMPacketizationCallback() {}
+};
+
+// Callback class used for passing decoded frames which are ready to be
+// rendered.
+class VCMReceiveCallback {
+ public:
+ virtual int32_t FrameToRender(VideoFrame& videoFrame) = 0; // NOLINT
+ virtual int32_t ReceivedDecodedReferenceFrame(const uint64_t pictureId) {
+ return -1;
+ }
+ // Called when the current receive codec changes.
+ virtual void OnIncomingPayloadType(int payload_type) {}
+ virtual void OnDecoderImplementationName(const char* implementation_name) {}
+
+ protected:
+ virtual ~VCMReceiveCallback() {}
+};
+
+// Callback class used for informing the user of the bit rate and frame rate
+// produced by the
+// encoder.
+class VCMSendStatisticsCallback {
+ public:
+ virtual int32_t SendStatistics(const uint32_t bitRate,
+ const uint32_t frameRate) = 0;
+
+ protected:
+ virtual ~VCMSendStatisticsCallback() {}
+};
+
+// Callback class used for informing the user of the incoming bit rate and frame
+// rate.
+class VCMReceiveStatisticsCallback {
+ public:
+ virtual void OnReceiveRatesUpdated(uint32_t bitRate, uint32_t frameRate) = 0;
+ virtual void OnDiscardedPacketsUpdated(int discarded_packets) = 0;
+ virtual void OnFrameCountsUpdated(const FrameCounts& frame_counts) = 0;
+
+ protected:
+ virtual ~VCMReceiveStatisticsCallback() {}
+};
+
+// Callback class used for informing the user of decode timing info.
+class VCMDecoderTimingCallback {
+ public:
+ virtual void OnDecoderTiming(int decode_ms,
+ int max_decode_ms,
+ int current_delay_ms,
+ int target_delay_ms,
+ int jitter_buffer_ms,
+ int min_playout_delay_ms,
+ int render_delay_ms) = 0;
+
+ protected:
+ virtual ~VCMDecoderTimingCallback() {}
+};
+
+// Callback class used for telling the user about how to configure the FEC,
+// and the rates sent the last second is returned to the VCM.
+class VCMProtectionCallback {
+ public:
+ virtual int ProtectionRequest(const FecProtectionParams* delta_params,
+ const FecProtectionParams* key_params,
+ uint32_t* sent_video_rate_bps,
+ uint32_t* sent_nack_rate_bps,
+ uint32_t* sent_fec_rate_bps) = 0;
+
+ protected:
+ virtual ~VCMProtectionCallback() {}
+};
+
+class VideoEncoderRateObserver {
+ public:
+ virtual ~VideoEncoderRateObserver() {}
+ virtual void OnSetRates(uint32_t bitrate_bps, int framerate) = 0;
+};
+
+// Callback class used for telling the user about what frame type needed to
+// continue decoding.
+// Typically a key frame when the stream has been corrupted in some way.
+class VCMFrameTypeCallback {
+ public:
+ virtual int32_t RequestKeyFrame() = 0;
+ virtual int32_t SliceLossIndicationRequest(const uint64_t pictureId) {
+ return -1;
+ }
+
+ protected:
+ virtual ~VCMFrameTypeCallback() {}
+};
+
+// Callback class used for telling the user about which packet sequence numbers
+// are currently
+// missing and need to be resent.
+class VCMPacketRequestCallback {
+ public:
+ virtual int32_t ResendPackets(const uint16_t* sequenceNumbers,
+ uint16_t length) = 0;
+
+ protected:
+ virtual ~VCMPacketRequestCallback() {}
+};
+
+// Callback used to inform the user of the the desired resolution
+// as subscribed by Media Optimization (Quality Modes)
+class VCMQMSettingsCallback {
+ public:
+ virtual int32_t SetVideoQMSettings(const uint32_t frameRate,
+ const uint32_t width,
+ const uint32_t height) = 0;
+
+ virtual void SetTargetFramerate(int frame_rate) = 0;
+
+ protected:
+ virtual ~VCMQMSettingsCallback() {}
+};
+
+// Callback class used for telling the user about the size (in time) of the
+// render buffer, that is the size in time of the complete continuous frames.
+class VCMRenderBufferSizeCallback {
+ public:
+ virtual void RenderBufferSizeMs(int buffer_size_ms) = 0;
+
+ protected:
+ virtual ~VCMRenderBufferSizeCallback() {}
+};
+
+} // namespace webrtc
+
+#endif // WEBRTC_MODULES_VIDEO_CODING_INCLUDE_VIDEO_CODING_DEFINES_H_
diff --git a/webrtc/modules/video_coding/include/video_error_codes.h b/webrtc/modules/video_coding/include/video_error_codes.h
new file mode 100644
index 0000000000..360aa87744
--- /dev/null
+++ b/webrtc/modules/video_coding/include/video_error_codes.h
@@ -0,0 +1,32 @@
+/*
+ * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_CODING_INCLUDE_VIDEO_ERROR_CODES_H_
+#define WEBRTC_MODULES_VIDEO_CODING_INCLUDE_VIDEO_ERROR_CODES_H_
+
+// NOTE: in sync with video_coding_module_defines.h
+
+// Define return values
+
+#define WEBRTC_VIDEO_CODEC_REQUEST_SLI 2
+#define WEBRTC_VIDEO_CODEC_NO_OUTPUT 1
+#define WEBRTC_VIDEO_CODEC_OK 0
+#define WEBRTC_VIDEO_CODEC_ERROR -1
+#define WEBRTC_VIDEO_CODEC_LEVEL_EXCEEDED -2
+#define WEBRTC_VIDEO_CODEC_MEMORY -3
+#define WEBRTC_VIDEO_CODEC_ERR_PARAMETER -4
+#define WEBRTC_VIDEO_CODEC_ERR_SIZE -5
+#define WEBRTC_VIDEO_CODEC_TIMEOUT -6
+#define WEBRTC_VIDEO_CODEC_UNINITIALIZED -7
+#define WEBRTC_VIDEO_CODEC_ERR_REQUEST_SLI -12
+#define WEBRTC_VIDEO_CODEC_FALLBACK_SOFTWARE -13
+#define WEBRTC_VIDEO_CODEC_TARGET_BITRATE_OVERSHOOT -14
+
+#endif // WEBRTC_MODULES_VIDEO_CODING_INCLUDE_VIDEO_ERROR_CODES_H_
diff --git a/webrtc/modules/video_coding/inter_frame_delay.cc b/webrtc/modules/video_coding/inter_frame_delay.cc
new file mode 100644
index 0000000000..fb3b54d204
--- /dev/null
+++ b/webrtc/modules/video_coding/inter_frame_delay.cc
@@ -0,0 +1,107 @@
+/*
+ * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/modules/video_coding/inter_frame_delay.h"
+
+namespace webrtc {
+
+VCMInterFrameDelay::VCMInterFrameDelay(int64_t currentWallClock) {
+ Reset(currentWallClock);
+}
+
+// Resets the delay estimate
+void VCMInterFrameDelay::Reset(int64_t currentWallClock) {
+ _zeroWallClock = currentWallClock;
+ _wrapArounds = 0;
+ _prevWallClock = 0;
+ _prevTimestamp = 0;
+ _dTS = 0;
+}
+
+// Calculates the delay of a frame with the given timestamp.
+// This method is called when the frame is complete.
+bool VCMInterFrameDelay::CalculateDelay(uint32_t timestamp,
+ int64_t* delay,
+ int64_t currentWallClock) {
+ if (_prevWallClock == 0) {
+ // First set of data, initialization, wait for next frame
+ _prevWallClock = currentWallClock;
+ _prevTimestamp = timestamp;
+ *delay = 0;
+ return true;
+ }
+
+ int32_t prevWrapArounds = _wrapArounds;
+ CheckForWrapArounds(timestamp);
+
+ // This will be -1 for backward wrap arounds and +1 for forward wrap arounds
+ int32_t wrapAroundsSincePrev = _wrapArounds - prevWrapArounds;
+
+ // Account for reordering in jitter variance estimate in the future?
+ // Note that this also captures incomplete frames which are grabbed
+ // for decoding after a later frame has been complete, i.e. real
+ // packet losses.
+ if ((wrapAroundsSincePrev == 0 && timestamp < _prevTimestamp) ||
+ wrapAroundsSincePrev < 0) {
+ *delay = 0;
+ return false;
+ }
+
+ // Compute the compensated timestamp difference and convert it to ms and
+ // round it to closest integer.
+ _dTS = static_cast<int64_t>(
+ (timestamp + wrapAroundsSincePrev * (static_cast<int64_t>(1) << 32) -
+ _prevTimestamp) /
+ 90.0 +
+ 0.5);
+
+ // frameDelay is the difference of dT and dTS -- i.e. the difference of
+ // the wall clock time difference and the timestamp difference between
+ // two following frames.
+ *delay = static_cast<int64_t>(currentWallClock - _prevWallClock - _dTS);
+
+ _prevTimestamp = timestamp;
+ _prevWallClock = currentWallClock;
+
+ return true;
+}
+
+// Returns the current difference between incoming timestamps
+uint32_t VCMInterFrameDelay::CurrentTimeStampDiffMs() const {
+ if (_dTS < 0) {
+ return 0;
+ }
+ return static_cast<uint32_t>(_dTS);
+}
+
+// Investigates if the timestamp clock has overflowed since the last timestamp
+// and
+// keeps track of the number of wrap arounds since reset.
+void VCMInterFrameDelay::CheckForWrapArounds(uint32_t timestamp) {
+ if (timestamp < _prevTimestamp) {
+ // This difference will probably be less than -2^31 if we have had a wrap
+ // around
+ // (e.g. timestamp = 1, _previousTimestamp = 2^32 - 1). Since it is cast to
+ // a Word32,
+ // it should be positive.
+ if (static_cast<int32_t>(timestamp - _prevTimestamp) > 0) {
+ // Forward wrap around
+ _wrapArounds++;
+ }
+ // This difference will probably be less than -2^31 if we have had a
+ // backward
+ // wrap around.
+ // Since it is cast to a Word32, it should be positive.
+ } else if (static_cast<int32_t>(_prevTimestamp - timestamp) > 0) {
+ // Backward wrap around
+ _wrapArounds--;
+ }
+}
+} // namespace webrtc
diff --git a/webrtc/modules/video_coding/inter_frame_delay.h b/webrtc/modules/video_coding/inter_frame_delay.h
new file mode 100644
index 0000000000..94b73908bb
--- /dev/null
+++ b/webrtc/modules/video_coding/inter_frame_delay.h
@@ -0,0 +1,67 @@
+/*
+ * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_CODING_INTER_FRAME_DELAY_H_
+#define WEBRTC_MODULES_VIDEO_CODING_INTER_FRAME_DELAY_H_
+
+#include "webrtc/typedefs.h"
+
+namespace webrtc {
+
+class VCMInterFrameDelay {
+ public:
+ explicit VCMInterFrameDelay(int64_t currentWallClock);
+
+ // Resets the estimate. Zeros are given as parameters.
+ void Reset(int64_t currentWallClock);
+
+ // Calculates the delay of a frame with the given timestamp.
+ // This method is called when the frame is complete.
+ //
+ // Input:
+ // - timestamp : RTP timestamp of a received frame
+ // - *delay : Pointer to memory where the result should be
+ // stored
+ // - currentWallClock : The current time in milliseconds.
+ // Should be -1 for normal operation, only used
+ // for testing.
+ // Return value : true if OK, false when reordered timestamps
+ bool CalculateDelay(uint32_t timestamp,
+ int64_t* delay,
+ int64_t currentWallClock);
+
+ // Returns the current difference between incoming timestamps
+ //
+ // Return value : Wrap-around compensated difference between
+ // incoming
+ // timestamps.
+ uint32_t CurrentTimeStampDiffMs() const;
+
+ private:
+ // Controls if the RTP timestamp counter has had a wrap around
+ // between the current and the previously received frame.
+ //
+ // Input:
+ // - timestmap : RTP timestamp of the current frame.
+ void CheckForWrapArounds(uint32_t timestamp);
+
+ int64_t _zeroWallClock; // Local timestamp of the first video packet received
+ int32_t _wrapArounds; // Number of wrapArounds detected
+ // The previous timestamp passed to the delay estimate
+ uint32_t _prevTimestamp;
+ // The previous wall clock timestamp used by the delay estimate
+ int64_t _prevWallClock;
+ // Wrap-around compensated difference between incoming timestamps
+ int64_t _dTS;
+};
+
+} // namespace webrtc
+
+#endif // WEBRTC_MODULES_VIDEO_CODING_INTER_FRAME_DELAY_H_
diff --git a/webrtc/modules/video_coding/internal_defines.h b/webrtc/modules/video_coding/internal_defines.h
new file mode 100644
index 0000000000..e225726dea
--- /dev/null
+++ b/webrtc/modules/video_coding/internal_defines.h
@@ -0,0 +1,41 @@
+/*
+ * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_CODING_INTERNAL_DEFINES_H_
+#define WEBRTC_MODULES_VIDEO_CODING_INTERNAL_DEFINES_H_
+
+#include "webrtc/typedefs.h"
+
+namespace webrtc {
+
+#define MASK_32_BITS(x) (0xFFFFFFFF & (x))
+
+inline uint32_t MaskWord64ToUWord32(int64_t w64) {
+ return static_cast<uint32_t>(MASK_32_BITS(w64));
+}
+
+#define VCM_MAX(a, b) (((a) > (b)) ? (a) : (b))
+#define VCM_MIN(a, b) (((a) < (b)) ? (a) : (b))
+
+#define VCM_DEFAULT_CODEC_WIDTH 352
+#define VCM_DEFAULT_CODEC_HEIGHT 288
+#define VCM_DEFAULT_FRAME_RATE 30
+#define VCM_MIN_BITRATE 30
+#define VCM_FLUSH_INDICATOR 4
+
+#define VCM_NO_RECEIVER_ID 0
+
+inline int32_t VCMId(const int32_t vcmId, const int32_t receiverId = 0) {
+ return static_cast<int32_t>((vcmId << 16) + receiverId);
+}
+
+} // namespace webrtc
+
+#endif // WEBRTC_MODULES_VIDEO_CODING_INTERNAL_DEFINES_H_
diff --git a/webrtc/modules/video_coding/jitter_buffer.cc b/webrtc/modules/video_coding/jitter_buffer.cc
new file mode 100644
index 0000000000..640bcb4f22
--- /dev/null
+++ b/webrtc/modules/video_coding/jitter_buffer.cc
@@ -0,0 +1,1346 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#include "webrtc/modules/video_coding/jitter_buffer.h"
+
+#include <assert.h>
+
+#include <algorithm>
+#include <utility>
+
+#include "webrtc/base/checks.h"
+#include "webrtc/base/logging.h"
+#include "webrtc/base/trace_event.h"
+#include "webrtc/modules/rtp_rtcp/include/rtp_rtcp_defines.h"
+#include "webrtc/modules/video_coding/include/video_coding.h"
+#include "webrtc/modules/video_coding/frame_buffer.h"
+#include "webrtc/modules/video_coding/inter_frame_delay.h"
+#include "webrtc/modules/video_coding/internal_defines.h"
+#include "webrtc/modules/video_coding/jitter_buffer_common.h"
+#include "webrtc/modules/video_coding/jitter_estimator.h"
+#include "webrtc/modules/video_coding/packet.h"
+#include "webrtc/system_wrappers/include/clock.h"
+#include "webrtc/system_wrappers/include/critical_section_wrapper.h"
+#include "webrtc/system_wrappers/include/event_wrapper.h"
+#include "webrtc/system_wrappers/include/metrics.h"
+
+namespace webrtc {
+
+// Interval for updating SS data.
+static const uint32_t kSsCleanupIntervalSec = 60;
+
+// Use this rtt if no value has been reported.
+static const int64_t kDefaultRtt = 200;
+
+// Request a keyframe if no continuous frame has been received for this
+// number of milliseconds and NACKs are disabled.
+static const int64_t kMaxDiscontinuousFramesTime = 1000;
+
+typedef std::pair<uint32_t, VCMFrameBuffer*> FrameListPair;
+
+bool IsKeyFrame(FrameListPair pair) {
+ return pair.second->FrameType() == kVideoFrameKey;
+}
+
+bool HasNonEmptyState(FrameListPair pair) {
+ return pair.second->GetState() != kStateEmpty;
+}
+
+void FrameList::InsertFrame(VCMFrameBuffer* frame) {
+ insert(rbegin().base(), FrameListPair(frame->TimeStamp(), frame));
+}
+
+VCMFrameBuffer* FrameList::PopFrame(uint32_t timestamp) {
+ FrameList::iterator it = find(timestamp);
+ if (it == end())
+ return NULL;
+ VCMFrameBuffer* frame = it->second;
+ erase(it);
+ return frame;
+}
+
+VCMFrameBuffer* FrameList::Front() const {
+ return begin()->second;
+}
+
+VCMFrameBuffer* FrameList::Back() const {
+ return rbegin()->second;
+}
+
+int FrameList::RecycleFramesUntilKeyFrame(FrameList::iterator* key_frame_it,
+ UnorderedFrameList* free_frames) {
+ int drop_count = 0;
+ FrameList::iterator it = begin();
+ while (!empty()) {
+ // Throw at least one frame.
+ it->second->Reset();
+ free_frames->push_back(it->second);
+ erase(it++);
+ ++drop_count;
+ if (it != end() && it->second->FrameType() == kVideoFrameKey) {
+ *key_frame_it = it;
+ return drop_count;
+ }
+ }
+ *key_frame_it = end();
+ return drop_count;
+}
+
+void FrameList::CleanUpOldOrEmptyFrames(VCMDecodingState* decoding_state,
+ UnorderedFrameList* free_frames) {
+ while (!empty()) {
+ VCMFrameBuffer* oldest_frame = Front();
+ bool remove_frame = false;
+ if (oldest_frame->GetState() == kStateEmpty && size() > 1) {
+ // This frame is empty, try to update the last decoded state and drop it
+ // if successful.
+ remove_frame = decoding_state->UpdateEmptyFrame(oldest_frame);
+ } else {
+ remove_frame = decoding_state->IsOldFrame(oldest_frame);
+ }
+ if (!remove_frame) {
+ break;
+ }
+ free_frames->push_back(oldest_frame);
+ TRACE_EVENT_INSTANT1("webrtc", "JB::OldOrEmptyFrameDropped", "timestamp",
+ oldest_frame->TimeStamp());
+ erase(begin());
+ }
+}
+
+void FrameList::Reset(UnorderedFrameList* free_frames) {
+ while (!empty()) {
+ begin()->second->Reset();
+ free_frames->push_back(begin()->second);
+ erase(begin());
+ }
+}
+
+bool Vp9SsMap::Insert(const VCMPacket& packet) {
+ if (!packet.codecSpecificHeader.codecHeader.VP9.ss_data_available)
+ return false;
+
+ ss_map_[packet.timestamp] = packet.codecSpecificHeader.codecHeader.VP9.gof;
+ return true;
+}
+
+void Vp9SsMap::Reset() {
+ ss_map_.clear();
+}
+
+bool Vp9SsMap::Find(uint32_t timestamp, SsMap::iterator* it_out) {
+ bool found = false;
+ for (SsMap::iterator it = ss_map_.begin(); it != ss_map_.end(); ++it) {
+ if (it->first == timestamp || IsNewerTimestamp(timestamp, it->first)) {
+ *it_out = it;
+ found = true;
+ }
+ }
+ return found;
+}
+
+void Vp9SsMap::RemoveOld(uint32_t timestamp) {
+ if (!TimeForCleanup(timestamp))
+ return;
+
+ SsMap::iterator it;
+ if (!Find(timestamp, &it))
+ return;
+
+ ss_map_.erase(ss_map_.begin(), it);
+ AdvanceFront(timestamp);
+}
+
+bool Vp9SsMap::TimeForCleanup(uint32_t timestamp) const {
+ if (ss_map_.empty() || !IsNewerTimestamp(timestamp, ss_map_.begin()->first))
+ return false;
+
+ uint32_t diff = timestamp - ss_map_.begin()->first;
+ return diff / kVideoPayloadTypeFrequency >= kSsCleanupIntervalSec;
+}
+
+void Vp9SsMap::AdvanceFront(uint32_t timestamp) {
+ RTC_DCHECK(!ss_map_.empty());
+ GofInfoVP9 gof = ss_map_.begin()->second;
+ ss_map_.erase(ss_map_.begin());
+ ss_map_[timestamp] = gof;
+}
+
+// TODO(asapersson): Update according to updates in RTP payload profile.
+bool Vp9SsMap::UpdatePacket(VCMPacket* packet) {
+ uint8_t gof_idx = packet->codecSpecificHeader.codecHeader.VP9.gof_idx;
+ if (gof_idx == kNoGofIdx)
+ return false; // No update needed.
+
+ SsMap::iterator it;
+ if (!Find(packet->timestamp, &it))
+ return false; // Corresponding SS not yet received.
+
+ if (gof_idx >= it->second.num_frames_in_gof)
+ return false; // Assume corresponding SS not yet received.
+
+ RTPVideoHeaderVP9* vp9 = &packet->codecSpecificHeader.codecHeader.VP9;
+ vp9->temporal_idx = it->second.temporal_idx[gof_idx];
+ vp9->temporal_up_switch = it->second.temporal_up_switch[gof_idx];
+
+ // TODO(asapersson): Set vp9.ref_picture_id[i] and add usage.
+ vp9->num_ref_pics = it->second.num_ref_pics[gof_idx];
+ for (uint8_t i = 0; i < it->second.num_ref_pics[gof_idx]; ++i) {
+ vp9->pid_diff[i] = it->second.pid_diff[gof_idx][i];
+ }
+ return true;
+}
+
+void Vp9SsMap::UpdateFrames(FrameList* frames) {
+ for (const auto& frame_it : *frames) {
+ uint8_t gof_idx =
+ frame_it.second->CodecSpecific()->codecSpecific.VP9.gof_idx;
+ if (gof_idx == kNoGofIdx) {
+ continue;
+ }
+ SsMap::iterator ss_it;
+ if (Find(frame_it.second->TimeStamp(), &ss_it)) {
+ if (gof_idx >= ss_it->second.num_frames_in_gof) {
+ continue; // Assume corresponding SS not yet received.
+ }
+ frame_it.second->SetGofInfo(ss_it->second, gof_idx);
+ }
+ }
+}
+
+VCMJitterBuffer::VCMJitterBuffer(Clock* clock,
+ rtc::scoped_ptr<EventWrapper> event)
+ : clock_(clock),
+ running_(false),
+ crit_sect_(CriticalSectionWrapper::CreateCriticalSection()),
+ frame_event_(std::move(event)),
+ max_number_of_frames_(kStartNumberOfFrames),
+ free_frames_(),
+ decodable_frames_(),
+ incomplete_frames_(),
+ last_decoded_state_(),
+ first_packet_since_reset_(true),
+ stats_callback_(NULL),
+ incoming_frame_rate_(0),
+ incoming_frame_count_(0),
+ time_last_incoming_frame_count_(0),
+ incoming_bit_count_(0),
+ incoming_bit_rate_(0),
+ num_consecutive_old_packets_(0),
+ num_packets_(0),
+ num_duplicated_packets_(0),
+ num_discarded_packets_(0),
+ time_first_packet_ms_(0),
+ jitter_estimate_(clock),
+ inter_frame_delay_(clock_->TimeInMilliseconds()),
+ rtt_ms_(kDefaultRtt),
+ nack_mode_(kNoNack),
+ low_rtt_nack_threshold_ms_(-1),
+ high_rtt_nack_threshold_ms_(-1),
+ missing_sequence_numbers_(SequenceNumberLessThan()),
+ max_nack_list_size_(0),
+ max_packet_age_to_nack_(0),
+ max_incomplete_time_ms_(0),
+ decode_error_mode_(kNoErrors),
+ average_packets_per_frame_(0.0f),
+ frame_counter_(0) {
+ for (int i = 0; i < kStartNumberOfFrames; i++)
+ free_frames_.push_back(new VCMFrameBuffer());
+}
+
+VCMJitterBuffer::~VCMJitterBuffer() {
+ Stop();
+ for (UnorderedFrameList::iterator it = free_frames_.begin();
+ it != free_frames_.end(); ++it) {
+ delete *it;
+ }
+ for (FrameList::iterator it = incomplete_frames_.begin();
+ it != incomplete_frames_.end(); ++it) {
+ delete it->second;
+ }
+ for (FrameList::iterator it = decodable_frames_.begin();
+ it != decodable_frames_.end(); ++it) {
+ delete it->second;
+ }
+ delete crit_sect_;
+}
+
+void VCMJitterBuffer::UpdateHistograms() {
+ if (num_packets_ <= 0 || !running_) {
+ return;
+ }
+ int64_t elapsed_sec =
+ (clock_->TimeInMilliseconds() - time_first_packet_ms_) / 1000;
+ if (elapsed_sec < metrics::kMinRunTimeInSeconds) {
+ return;
+ }
+
+ RTC_HISTOGRAM_PERCENTAGE_SPARSE("WebRTC.Video.DiscardedPacketsInPercent",
+ num_discarded_packets_ * 100 / num_packets_);
+ RTC_HISTOGRAM_PERCENTAGE_SPARSE("WebRTC.Video.DuplicatedPacketsInPercent",
+ num_duplicated_packets_ * 100 / num_packets_);
+
+ int total_frames =
+ receive_statistics_.key_frames + receive_statistics_.delta_frames;
+ if (total_frames > 0) {
+ RTC_HISTOGRAM_COUNTS_SPARSE_100(
+ "WebRTC.Video.CompleteFramesReceivedPerSecond",
+ static_cast<int>((total_frames / elapsed_sec) + 0.5f));
+ RTC_HISTOGRAM_COUNTS_SPARSE_1000(
+ "WebRTC.Video.KeyFramesReceivedInPermille",
+ static_cast<int>(
+ (receive_statistics_.key_frames * 1000.0f / total_frames) + 0.5f));
+ }
+}
+
+void VCMJitterBuffer::Start() {
+ CriticalSectionScoped cs(crit_sect_);
+ running_ = true;
+ incoming_frame_count_ = 0;
+ incoming_frame_rate_ = 0;
+ incoming_bit_count_ = 0;
+ incoming_bit_rate_ = 0;
+ time_last_incoming_frame_count_ = clock_->TimeInMilliseconds();
+ receive_statistics_ = FrameCounts();
+
+ num_consecutive_old_packets_ = 0;
+ num_packets_ = 0;
+ num_duplicated_packets_ = 0;
+ num_discarded_packets_ = 0;
+ time_first_packet_ms_ = 0;
+
+ // Start in a non-signaled state.
+ waiting_for_completion_.frame_size = 0;
+ waiting_for_completion_.timestamp = 0;
+ waiting_for_completion_.latest_packet_time = -1;
+ first_packet_since_reset_ = true;
+ rtt_ms_ = kDefaultRtt;
+ last_decoded_state_.Reset();
+}
+
+void VCMJitterBuffer::Stop() {
+ crit_sect_->Enter();
+ UpdateHistograms();
+ running_ = false;
+ last_decoded_state_.Reset();
+
+ // Make sure all frames are free and reset.
+ for (FrameList::iterator it = decodable_frames_.begin();
+ it != decodable_frames_.end(); ++it) {
+ free_frames_.push_back(it->second);
+ }
+ for (FrameList::iterator it = incomplete_frames_.begin();
+ it != incomplete_frames_.end(); ++it) {
+ free_frames_.push_back(it->second);
+ }
+ for (UnorderedFrameList::iterator it = free_frames_.begin();
+ it != free_frames_.end(); ++it) {
+ (*it)->Reset();
+ }
+ decodable_frames_.clear();
+ incomplete_frames_.clear();
+ crit_sect_->Leave();
+ // Make sure we wake up any threads waiting on these events.
+ frame_event_->Set();
+}
+
+bool VCMJitterBuffer::Running() const {
+ CriticalSectionScoped cs(crit_sect_);
+ return running_;
+}
+
+void VCMJitterBuffer::Flush() {
+ CriticalSectionScoped cs(crit_sect_);
+ decodable_frames_.Reset(&free_frames_);
+ incomplete_frames_.Reset(&free_frames_);
+ last_decoded_state_.Reset(); // TODO(mikhal): sync reset.
+ num_consecutive_old_packets_ = 0;
+ // Also reset the jitter and delay estimates
+ jitter_estimate_.Reset();
+ inter_frame_delay_.Reset(clock_->TimeInMilliseconds());
+ waiting_for_completion_.frame_size = 0;
+ waiting_for_completion_.timestamp = 0;
+ waiting_for_completion_.latest_packet_time = -1;
+ first_packet_since_reset_ = true;
+ missing_sequence_numbers_.clear();
+}
+
+// Get received key and delta frames
+FrameCounts VCMJitterBuffer::FrameStatistics() const {
+ CriticalSectionScoped cs(crit_sect_);
+ return receive_statistics_;
+}
+
+int VCMJitterBuffer::num_packets() const {
+ CriticalSectionScoped cs(crit_sect_);
+ return num_packets_;
+}
+
+int VCMJitterBuffer::num_duplicated_packets() const {
+ CriticalSectionScoped cs(crit_sect_);
+ return num_duplicated_packets_;
+}
+
+int VCMJitterBuffer::num_discarded_packets() const {
+ CriticalSectionScoped cs(crit_sect_);
+ return num_discarded_packets_;
+}
+
+// Calculate framerate and bitrate.
+void VCMJitterBuffer::IncomingRateStatistics(unsigned int* framerate,
+ unsigned int* bitrate) {
+ assert(framerate);
+ assert(bitrate);
+ CriticalSectionScoped cs(crit_sect_);
+ const int64_t now = clock_->TimeInMilliseconds();
+ int64_t diff = now - time_last_incoming_frame_count_;
+ if (diff < 1000 && incoming_frame_rate_ > 0 && incoming_bit_rate_ > 0) {
+ // Make sure we report something even though less than
+ // 1 second has passed since last update.
+ *framerate = incoming_frame_rate_;
+ *bitrate = incoming_bit_rate_;
+ } else if (incoming_frame_count_ != 0) {
+ // We have received frame(s) since last call to this function
+
+ // Prepare calculations
+ if (diff <= 0) {
+ diff = 1;
+ }
+ // we add 0.5f for rounding
+ float rate = 0.5f + ((incoming_frame_count_ * 1000.0f) / diff);
+ if (rate < 1.0f) {
+ rate = 1.0f;
+ }
+
+ // Calculate frame rate
+ // Let r be rate.
+ // r(0) = 1000*framecount/delta_time.
+ // (I.e. frames per second since last calculation.)
+ // frame_rate = r(0)/2 + r(-1)/2
+ // (I.e. fr/s average this and the previous calculation.)
+ *framerate = (incoming_frame_rate_ + static_cast<unsigned int>(rate)) / 2;
+ incoming_frame_rate_ = static_cast<unsigned int>(rate);
+
+ // Calculate bit rate
+ if (incoming_bit_count_ == 0) {
+ *bitrate = 0;
+ } else {
+ *bitrate =
+ 10 * ((100 * incoming_bit_count_) / static_cast<unsigned int>(diff));
+ }
+ incoming_bit_rate_ = *bitrate;
+
+ // Reset count
+ incoming_frame_count_ = 0;
+ incoming_bit_count_ = 0;
+ time_last_incoming_frame_count_ = now;
+
+ } else {
+ // No frames since last call
+ time_last_incoming_frame_count_ = clock_->TimeInMilliseconds();
+ *framerate = 0;
+ *bitrate = 0;
+ incoming_frame_rate_ = 0;
+ incoming_bit_rate_ = 0;
+ }
+}
+
+// Answers the question:
+// Will the packet sequence be complete if the next frame is grabbed for
+// decoding right now? That is, have we lost a frame between the last decoded
+// frame and the next, or is the next
+// frame missing one or more packets?
+bool VCMJitterBuffer::CompleteSequenceWithNextFrame() {
+ CriticalSectionScoped cs(crit_sect_);
+ // Finding oldest frame ready for decoder, check sequence number and size
+ CleanUpOldOrEmptyFrames();
+ if (!decodable_frames_.empty()) {
+ if (decodable_frames_.Front()->GetState() == kStateComplete) {
+ return true;
+ }
+ } else if (incomplete_frames_.size() <= 1) {
+ // Frame not ready to be decoded.
+ return true;
+ }
+ return false;
+}
+
+// Returns immediately or a |max_wait_time_ms| ms event hang waiting for a
+// complete frame, |max_wait_time_ms| decided by caller.
+bool VCMJitterBuffer::NextCompleteTimestamp(uint32_t max_wait_time_ms,
+ uint32_t* timestamp) {
+ crit_sect_->Enter();
+ if (!running_) {
+ crit_sect_->Leave();
+ return false;
+ }
+ CleanUpOldOrEmptyFrames();
+
+ if (decodable_frames_.empty() ||
+ decodable_frames_.Front()->GetState() != kStateComplete) {
+ const int64_t end_wait_time_ms =
+ clock_->TimeInMilliseconds() + max_wait_time_ms;
+ int64_t wait_time_ms = max_wait_time_ms;
+ while (wait_time_ms > 0) {
+ crit_sect_->Leave();
+ const EventTypeWrapper ret =
+ frame_event_->Wait(static_cast<uint32_t>(wait_time_ms));
+ crit_sect_->Enter();
+ if (ret == kEventSignaled) {
+ // Are we shutting down the jitter buffer?
+ if (!running_) {
+ crit_sect_->Leave();
+ return false;
+ }
+ // Finding oldest frame ready for decoder.
+ CleanUpOldOrEmptyFrames();
+ if (decodable_frames_.empty() ||
+ decodable_frames_.Front()->GetState() != kStateComplete) {
+ wait_time_ms = end_wait_time_ms - clock_->TimeInMilliseconds();
+ } else {
+ break;
+ }
+ } else {
+ break;
+ }
+ }
+ }
+ if (decodable_frames_.empty() ||
+ decodable_frames_.Front()->GetState() != kStateComplete) {
+ crit_sect_->Leave();
+ return false;
+ }
+ *timestamp = decodable_frames_.Front()->TimeStamp();
+ crit_sect_->Leave();
+ return true;
+}
+
+bool VCMJitterBuffer::NextMaybeIncompleteTimestamp(uint32_t* timestamp) {
+ CriticalSectionScoped cs(crit_sect_);
+ if (!running_) {
+ return false;
+ }
+ if (decode_error_mode_ == kNoErrors) {
+ // No point to continue, as we are not decoding with errors.
+ return false;
+ }
+
+ CleanUpOldOrEmptyFrames();
+
+ VCMFrameBuffer* oldest_frame;
+ if (decodable_frames_.empty()) {
+ if (nack_mode_ != kNoNack || incomplete_frames_.size() <= 1) {
+ return false;
+ }
+ oldest_frame = incomplete_frames_.Front();
+ // Frame will only be removed from buffer if it is complete (or decodable).
+ if (oldest_frame->GetState() < kStateComplete) {
+ return false;
+ }
+ } else {
+ oldest_frame = decodable_frames_.Front();
+ // If we have exactly one frame in the buffer, release it only if it is
+ // complete. We know decodable_frames_ is not empty due to the previous
+ // check.
+ if (decodable_frames_.size() == 1 && incomplete_frames_.empty() &&
+ oldest_frame->GetState() != kStateComplete) {
+ return false;
+ }
+ }
+
+ *timestamp = oldest_frame->TimeStamp();
+ return true;
+}
+
+VCMEncodedFrame* VCMJitterBuffer::ExtractAndSetDecode(uint32_t timestamp) {
+ CriticalSectionScoped cs(crit_sect_);
+ if (!running_) {
+ return NULL;
+ }
+ // Extract the frame with the desired timestamp.
+ VCMFrameBuffer* frame = decodable_frames_.PopFrame(timestamp);
+ bool continuous = true;
+ if (!frame) {
+ frame = incomplete_frames_.PopFrame(timestamp);
+ if (frame)
+ continuous = last_decoded_state_.ContinuousFrame(frame);
+ else
+ return NULL;
+ }
+ TRACE_EVENT_ASYNC_STEP0("webrtc", "Video", timestamp, "Extract");
+ // Frame pulled out from jitter buffer, update the jitter estimate.
+ const bool retransmitted = (frame->GetNackCount() > 0);
+ if (retransmitted) {
+ jitter_estimate_.FrameNacked();
+ } else if (frame->Length() > 0) {
+ // Ignore retransmitted and empty frames.
+ if (waiting_for_completion_.latest_packet_time >= 0) {
+ UpdateJitterEstimate(waiting_for_completion_, true);
+ }
+ if (frame->GetState() == kStateComplete) {
+ UpdateJitterEstimate(*frame, false);
+ } else {
+ // Wait for this one to get complete.
+ waiting_for_completion_.frame_size = frame->Length();
+ waiting_for_completion_.latest_packet_time = frame->LatestPacketTimeMs();
+ waiting_for_completion_.timestamp = frame->TimeStamp();
+ }
+ }
+
+ // The state must be changed to decoding before cleaning up zero sized
+ // frames to avoid empty frames being cleaned up and then given to the
+ // decoder. Propagates the missing_frame bit.
+ frame->PrepareForDecode(continuous);
+
+ // We have a frame - update the last decoded state and nack list.
+ last_decoded_state_.SetState(frame);
+ DropPacketsFromNackList(last_decoded_state_.sequence_num());
+
+ if ((*frame).IsSessionComplete())
+ UpdateAveragePacketsPerFrame(frame->NumPackets());
+
+ return frame;
+}
+
+// Release frame when done with decoding. Should never be used to release
+// frames from within the jitter buffer.
+void VCMJitterBuffer::ReleaseFrame(VCMEncodedFrame* frame) {
+ CriticalSectionScoped cs(crit_sect_);
+ VCMFrameBuffer* frame_buffer = static_cast<VCMFrameBuffer*>(frame);
+ if (frame_buffer) {
+ free_frames_.push_back(frame_buffer);
+ }
+}
+
+// Gets frame to use for this timestamp. If no match, get empty frame.
+VCMFrameBufferEnum VCMJitterBuffer::GetFrame(const VCMPacket& packet,
+ VCMFrameBuffer** frame,
+ FrameList** frame_list) {
+ *frame = incomplete_frames_.PopFrame(packet.timestamp);
+ if (*frame != NULL) {
+ *frame_list = &incomplete_frames_;
+ return kNoError;
+ }
+ *frame = decodable_frames_.PopFrame(packet.timestamp);
+ if (*frame != NULL) {
+ *frame_list = &decodable_frames_;
+ return kNoError;
+ }
+
+ *frame_list = NULL;
+ // No match, return empty frame.
+ *frame = GetEmptyFrame();
+ if (*frame == NULL) {
+ // No free frame! Try to reclaim some...
+ LOG(LS_WARNING) << "Unable to get empty frame; Recycling.";
+ bool found_key_frame = RecycleFramesUntilKeyFrame();
+ *frame = GetEmptyFrame();
+ assert(*frame);
+ if (!found_key_frame) {
+ free_frames_.push_back(*frame);
+ return kFlushIndicator;
+ }
+ }
+ (*frame)->Reset();
+ return kNoError;
+}
+
+int64_t VCMJitterBuffer::LastPacketTime(const VCMEncodedFrame* frame,
+ bool* retransmitted) const {
+ assert(retransmitted);
+ CriticalSectionScoped cs(crit_sect_);
+ const VCMFrameBuffer* frame_buffer =
+ static_cast<const VCMFrameBuffer*>(frame);
+ *retransmitted = (frame_buffer->GetNackCount() > 0);
+ return frame_buffer->LatestPacketTimeMs();
+}
+
+VCMFrameBufferEnum VCMJitterBuffer::InsertPacket(const VCMPacket& packet,
+ bool* retransmitted) {
+ CriticalSectionScoped cs(crit_sect_);
+
+ ++num_packets_;
+ if (num_packets_ == 1) {
+ time_first_packet_ms_ = clock_->TimeInMilliseconds();
+ }
+ // Does this packet belong to an old frame?
+ if (last_decoded_state_.IsOldPacket(&packet)) {
+ // Account only for media packets.
+ if (packet.sizeBytes > 0) {
+ num_discarded_packets_++;
+ num_consecutive_old_packets_++;
+ if (stats_callback_ != NULL)
+ stats_callback_->OnDiscardedPacketsUpdated(num_discarded_packets_);
+ }
+ // Update last decoded sequence number if the packet arrived late and
+ // belongs to a frame with a timestamp equal to the last decoded
+ // timestamp.
+ last_decoded_state_.UpdateOldPacket(&packet);
+ DropPacketsFromNackList(last_decoded_state_.sequence_num());
+
+ // Also see if this old packet made more incomplete frames continuous.
+ FindAndInsertContinuousFramesWithState(last_decoded_state_);
+
+ if (num_consecutive_old_packets_ > kMaxConsecutiveOldPackets) {
+ LOG(LS_WARNING)
+ << num_consecutive_old_packets_
+ << " consecutive old packets received. Flushing the jitter buffer.";
+ Flush();
+ return kFlushIndicator;
+ }
+ return kOldPacket;
+ }
+
+ num_consecutive_old_packets_ = 0;
+
+ VCMFrameBuffer* frame;
+ FrameList* frame_list;
+ const VCMFrameBufferEnum error = GetFrame(packet, &frame, &frame_list);
+ if (error != kNoError)
+ return error;
+
+ int64_t now_ms = clock_->TimeInMilliseconds();
+ // We are keeping track of the first and latest seq numbers, and
+ // the number of wraps to be able to calculate how many packets we expect.
+ if (first_packet_since_reset_) {
+ // Now it's time to start estimating jitter
+ // reset the delay estimate.
+ inter_frame_delay_.Reset(now_ms);
+ }
+
+ // Empty packets may bias the jitter estimate (lacking size component),
+ // therefore don't let empty packet trigger the following updates:
+ if (packet.frameType != kEmptyFrame) {
+ if (waiting_for_completion_.timestamp == packet.timestamp) {
+ // This can get bad if we have a lot of duplicate packets,
+ // we will then count some packet multiple times.
+ waiting_for_completion_.frame_size += packet.sizeBytes;
+ waiting_for_completion_.latest_packet_time = now_ms;
+ } else if (waiting_for_completion_.latest_packet_time >= 0 &&
+ waiting_for_completion_.latest_packet_time + 2000 <= now_ms) {
+ // A packet should never be more than two seconds late
+ UpdateJitterEstimate(waiting_for_completion_, true);
+ waiting_for_completion_.latest_packet_time = -1;
+ waiting_for_completion_.frame_size = 0;
+ waiting_for_completion_.timestamp = 0;
+ }
+ }
+
+ VCMFrameBufferStateEnum previous_state = frame->GetState();
+ // Insert packet.
+ FrameData frame_data;
+ frame_data.rtt_ms = rtt_ms_;
+ frame_data.rolling_average_packets_per_frame = average_packets_per_frame_;
+ VCMFrameBufferEnum buffer_state =
+ frame->InsertPacket(packet, now_ms, decode_error_mode_, frame_data);
+
+ if (previous_state != kStateComplete) {
+ TRACE_EVENT_ASYNC_BEGIN1("webrtc", "Video", frame->TimeStamp(), "timestamp",
+ frame->TimeStamp());
+ }
+
+ if (buffer_state > 0) {
+ incoming_bit_count_ += packet.sizeBytes << 3;
+ if (first_packet_since_reset_) {
+ latest_received_sequence_number_ = packet.seqNum;
+ first_packet_since_reset_ = false;
+ } else {
+ if (IsPacketRetransmitted(packet)) {
+ frame->IncrementNackCount();
+ }
+ if (!UpdateNackList(packet.seqNum) &&
+ packet.frameType != kVideoFrameKey) {
+ buffer_state = kFlushIndicator;
+ }
+
+ latest_received_sequence_number_ =
+ LatestSequenceNumber(latest_received_sequence_number_, packet.seqNum);
+ }
+ }
+
+ // Is the frame already in the decodable list?
+ bool continuous = IsContinuous(*frame);
+ switch (buffer_state) {
+ case kGeneralError:
+ case kTimeStampError:
+ case kSizeError: {
+ free_frames_.push_back(frame);
+ break;
+ }
+ case kCompleteSession: {
+ if (previous_state != kStateDecodable &&
+ previous_state != kStateComplete) {
+ CountFrame(*frame);
+ if (continuous) {
+ // Signal that we have a complete session.
+ frame_event_->Set();
+ }
+ }
+ FALLTHROUGH();
+ }
+ // Note: There is no break here - continuing to kDecodableSession.
+ case kDecodableSession: {
+ *retransmitted = (frame->GetNackCount() > 0);
+ if (continuous) {
+ decodable_frames_.InsertFrame(frame);
+ FindAndInsertContinuousFrames(*frame);
+ } else {
+ incomplete_frames_.InsertFrame(frame);
+ // If NACKs are enabled, keyframes are triggered by |GetNackList|.
+ if (nack_mode_ == kNoNack &&
+ NonContinuousOrIncompleteDuration() >
+ 90 * kMaxDiscontinuousFramesTime) {
+ return kFlushIndicator;
+ }
+ }
+ break;
+ }
+ case kIncomplete: {
+ if (frame->GetState() == kStateEmpty &&
+ last_decoded_state_.UpdateEmptyFrame(frame)) {
+ free_frames_.push_back(frame);
+ return kNoError;
+ } else {
+ incomplete_frames_.InsertFrame(frame);
+ // If NACKs are enabled, keyframes are triggered by |GetNackList|.
+ if (nack_mode_ == kNoNack &&
+ NonContinuousOrIncompleteDuration() >
+ 90 * kMaxDiscontinuousFramesTime) {
+ return kFlushIndicator;
+ }
+ }
+ break;
+ }
+ case kNoError:
+ case kOutOfBoundsPacket:
+ case kDuplicatePacket: {
+ // Put back the frame where it came from.
+ if (frame_list != NULL) {
+ frame_list->InsertFrame(frame);
+ } else {
+ free_frames_.push_back(frame);
+ }
+ ++num_duplicated_packets_;
+ break;
+ }
+ case kFlushIndicator:
+ free_frames_.push_back(frame);
+ return kFlushIndicator;
+ default:
+ assert(false);
+ }
+ return buffer_state;
+}
+
+bool VCMJitterBuffer::IsContinuousInState(
+ const VCMFrameBuffer& frame,
+ const VCMDecodingState& decoding_state) const {
+ // Is this frame (complete or decodable) and continuous?
+ // kStateDecodable will never be set when decode_error_mode_ is false
+ // as SessionInfo determines this state based on the error mode (and frame
+ // completeness).
+ return (frame.GetState() == kStateComplete ||
+ frame.GetState() == kStateDecodable) &&
+ decoding_state.ContinuousFrame(&frame);
+}
+
+bool VCMJitterBuffer::IsContinuous(const VCMFrameBuffer& frame) const {
+ if (IsContinuousInState(frame, last_decoded_state_)) {
+ return true;
+ }
+ VCMDecodingState decoding_state;
+ decoding_state.CopyFrom(last_decoded_state_);
+ for (FrameList::const_iterator it = decodable_frames_.begin();
+ it != decodable_frames_.end(); ++it) {
+ VCMFrameBuffer* decodable_frame = it->second;
+ if (IsNewerTimestamp(decodable_frame->TimeStamp(), frame.TimeStamp())) {
+ break;
+ }
+ decoding_state.SetState(decodable_frame);
+ if (IsContinuousInState(frame, decoding_state)) {
+ return true;
+ }
+ }
+ return false;
+}
+
+void VCMJitterBuffer::FindAndInsertContinuousFrames(
+ const VCMFrameBuffer& new_frame) {
+ VCMDecodingState decoding_state;
+ decoding_state.CopyFrom(last_decoded_state_);
+ decoding_state.SetState(&new_frame);
+ FindAndInsertContinuousFramesWithState(decoding_state);
+}
+
+void VCMJitterBuffer::FindAndInsertContinuousFramesWithState(
+ const VCMDecodingState& original_decoded_state) {
+ // Copy original_decoded_state so we can move the state forward with each
+ // decodable frame we find.
+ VCMDecodingState decoding_state;
+ decoding_state.CopyFrom(original_decoded_state);
+
+ // When temporal layers are available, we search for a complete or decodable
+ // frame until we hit one of the following:
+ // 1. Continuous base or sync layer.
+ // 2. The end of the list was reached.
+ for (FrameList::iterator it = incomplete_frames_.begin();
+ it != incomplete_frames_.end();) {
+ VCMFrameBuffer* frame = it->second;
+ if (IsNewerTimestamp(original_decoded_state.time_stamp(),
+ frame->TimeStamp())) {
+ ++it;
+ continue;
+ }
+ if (IsContinuousInState(*frame, decoding_state)) {
+ decodable_frames_.InsertFrame(frame);
+ incomplete_frames_.erase(it++);
+ decoding_state.SetState(frame);
+ } else if (frame->TemporalId() <= 0) {
+ break;
+ } else {
+ ++it;
+ }
+ }
+}
+
+uint32_t VCMJitterBuffer::EstimatedJitterMs() {
+ CriticalSectionScoped cs(crit_sect_);
+ // Compute RTT multiplier for estimation.
+ // low_rtt_nackThresholdMs_ == -1 means no FEC.
+ double rtt_mult = 1.0f;
+ if (low_rtt_nack_threshold_ms_ >= 0 &&
+ rtt_ms_ >= low_rtt_nack_threshold_ms_) {
+ // For RTTs above low_rtt_nack_threshold_ms_ we don't apply extra delay
+ // when waiting for retransmissions.
+ rtt_mult = 0.0f;
+ }
+ return jitter_estimate_.GetJitterEstimate(rtt_mult);
+}
+
+void VCMJitterBuffer::UpdateRtt(int64_t rtt_ms) {
+ CriticalSectionScoped cs(crit_sect_);
+ rtt_ms_ = rtt_ms;
+ jitter_estimate_.UpdateRtt(rtt_ms);
+}
+
+void VCMJitterBuffer::SetNackMode(VCMNackMode mode,
+ int64_t low_rtt_nack_threshold_ms,
+ int64_t high_rtt_nack_threshold_ms) {
+ CriticalSectionScoped cs(crit_sect_);
+ nack_mode_ = mode;
+ if (mode == kNoNack) {
+ missing_sequence_numbers_.clear();
+ }
+ assert(low_rtt_nack_threshold_ms >= -1 && high_rtt_nack_threshold_ms >= -1);
+ assert(high_rtt_nack_threshold_ms == -1 ||
+ low_rtt_nack_threshold_ms <= high_rtt_nack_threshold_ms);
+ assert(low_rtt_nack_threshold_ms > -1 || high_rtt_nack_threshold_ms == -1);
+ low_rtt_nack_threshold_ms_ = low_rtt_nack_threshold_ms;
+ high_rtt_nack_threshold_ms_ = high_rtt_nack_threshold_ms;
+ // Don't set a high start rtt if high_rtt_nack_threshold_ms_ is used, to not
+ // disable NACK in |kNack| mode.
+ if (rtt_ms_ == kDefaultRtt && high_rtt_nack_threshold_ms_ != -1) {
+ rtt_ms_ = 0;
+ }
+ if (!WaitForRetransmissions()) {
+ jitter_estimate_.ResetNackCount();
+ }
+}
+
+void VCMJitterBuffer::SetNackSettings(size_t max_nack_list_size,
+ int max_packet_age_to_nack,
+ int max_incomplete_time_ms) {
+ CriticalSectionScoped cs(crit_sect_);
+ assert(max_packet_age_to_nack >= 0);
+ assert(max_incomplete_time_ms_ >= 0);
+ max_nack_list_size_ = max_nack_list_size;
+ max_packet_age_to_nack_ = max_packet_age_to_nack;
+ max_incomplete_time_ms_ = max_incomplete_time_ms;
+}
+
+VCMNackMode VCMJitterBuffer::nack_mode() const {
+ CriticalSectionScoped cs(crit_sect_);
+ return nack_mode_;
+}
+
+int VCMJitterBuffer::NonContinuousOrIncompleteDuration() {
+ if (incomplete_frames_.empty()) {
+ return 0;
+ }
+ uint32_t start_timestamp = incomplete_frames_.Front()->TimeStamp();
+ if (!decodable_frames_.empty()) {
+ start_timestamp = decodable_frames_.Back()->TimeStamp();
+ }
+ return incomplete_frames_.Back()->TimeStamp() - start_timestamp;
+}
+
+uint16_t VCMJitterBuffer::EstimatedLowSequenceNumber(
+ const VCMFrameBuffer& frame) const {
+ assert(frame.GetLowSeqNum() >= 0);
+ if (frame.HaveFirstPacket())
+ return frame.GetLowSeqNum();
+
+ // This estimate is not accurate if more than one packet with lower sequence
+ // number is lost.
+ return frame.GetLowSeqNum() - 1;
+}
+
+std::vector<uint16_t> VCMJitterBuffer::GetNackList(bool* request_key_frame) {
+ CriticalSectionScoped cs(crit_sect_);
+ *request_key_frame = false;
+ if (nack_mode_ == kNoNack) {
+ return std::vector<uint16_t>();
+ }
+ if (last_decoded_state_.in_initial_state()) {
+ VCMFrameBuffer* next_frame = NextFrame();
+ const bool first_frame_is_key = next_frame &&
+ next_frame->FrameType() == kVideoFrameKey &&
+ next_frame->HaveFirstPacket();
+ if (!first_frame_is_key) {
+ bool have_non_empty_frame =
+ decodable_frames_.end() != find_if(decodable_frames_.begin(),
+ decodable_frames_.end(),
+ HasNonEmptyState);
+ if (!have_non_empty_frame) {
+ have_non_empty_frame =
+ incomplete_frames_.end() != find_if(incomplete_frames_.begin(),
+ incomplete_frames_.end(),
+ HasNonEmptyState);
+ }
+ bool found_key_frame = RecycleFramesUntilKeyFrame();
+ if (!found_key_frame) {
+ *request_key_frame = have_non_empty_frame;
+ return std::vector<uint16_t>();
+ }
+ }
+ }
+ if (TooLargeNackList()) {
+ *request_key_frame = !HandleTooLargeNackList();
+ }
+ if (max_incomplete_time_ms_ > 0) {
+ int non_continuous_incomplete_duration =
+ NonContinuousOrIncompleteDuration();
+ if (non_continuous_incomplete_duration > 90 * max_incomplete_time_ms_) {
+ LOG_F(LS_WARNING) << "Too long non-decodable duration: "
+ << non_continuous_incomplete_duration << " > "
+ << 90 * max_incomplete_time_ms_;
+ FrameList::reverse_iterator rit = find_if(
+ incomplete_frames_.rbegin(), incomplete_frames_.rend(), IsKeyFrame);
+ if (rit == incomplete_frames_.rend()) {
+ // Request a key frame if we don't have one already.
+ *request_key_frame = true;
+ return std::vector<uint16_t>();
+ } else {
+ // Skip to the last key frame. If it's incomplete we will start
+ // NACKing it.
+ // Note that the estimated low sequence number is correct for VP8
+ // streams because only the first packet of a key frame is marked.
+ last_decoded_state_.Reset();
+ DropPacketsFromNackList(EstimatedLowSequenceNumber(*rit->second));
+ }
+ }
+ }
+ std::vector<uint16_t> nack_list(missing_sequence_numbers_.begin(),
+ missing_sequence_numbers_.end());
+ return nack_list;
+}
+
+void VCMJitterBuffer::SetDecodeErrorMode(VCMDecodeErrorMode error_mode) {
+ CriticalSectionScoped cs(crit_sect_);
+ decode_error_mode_ = error_mode;
+}
+
+VCMFrameBuffer* VCMJitterBuffer::NextFrame() const {
+ if (!decodable_frames_.empty())
+ return decodable_frames_.Front();
+ if (!incomplete_frames_.empty())
+ return incomplete_frames_.Front();
+ return NULL;
+}
+
+bool VCMJitterBuffer::UpdateNackList(uint16_t sequence_number) {
+ if (nack_mode_ == kNoNack) {
+ return true;
+ }
+ // Make sure we don't add packets which are already too old to be decoded.
+ if (!last_decoded_state_.in_initial_state()) {
+ latest_received_sequence_number_ = LatestSequenceNumber(
+ latest_received_sequence_number_, last_decoded_state_.sequence_num());
+ }
+ if (IsNewerSequenceNumber(sequence_number,
+ latest_received_sequence_number_)) {
+ // Push any missing sequence numbers to the NACK list.
+ for (uint16_t i = latest_received_sequence_number_ + 1;
+ IsNewerSequenceNumber(sequence_number, i); ++i) {
+ missing_sequence_numbers_.insert(missing_sequence_numbers_.end(), i);
+ TRACE_EVENT_INSTANT1(TRACE_DISABLED_BY_DEFAULT("webrtc_rtp"), "AddNack",
+ "seqnum", i);
+ }
+ if (TooLargeNackList() && !HandleTooLargeNackList()) {
+ LOG(LS_WARNING) << "Requesting key frame due to too large NACK list.";
+ return false;
+ }
+ if (MissingTooOldPacket(sequence_number) &&
+ !HandleTooOldPackets(sequence_number)) {
+ LOG(LS_WARNING) << "Requesting key frame due to missing too old packets";
+ return false;
+ }
+ } else {
+ missing_sequence_numbers_.erase(sequence_number);
+ TRACE_EVENT_INSTANT1(TRACE_DISABLED_BY_DEFAULT("webrtc_rtp"), "RemoveNack",
+ "seqnum", sequence_number);
+ }
+ return true;
+}
+
+bool VCMJitterBuffer::TooLargeNackList() const {
+ return missing_sequence_numbers_.size() > max_nack_list_size_;
+}
+
+bool VCMJitterBuffer::HandleTooLargeNackList() {
+ // Recycle frames until the NACK list is small enough. It is likely cheaper to
+ // request a key frame than to retransmit this many missing packets.
+ LOG_F(LS_WARNING) << "NACK list has grown too large: "
+ << missing_sequence_numbers_.size() << " > "
+ << max_nack_list_size_;
+ bool key_frame_found = false;
+ while (TooLargeNackList()) {
+ key_frame_found = RecycleFramesUntilKeyFrame();
+ }
+ return key_frame_found;
+}
+
+bool VCMJitterBuffer::MissingTooOldPacket(
+ uint16_t latest_sequence_number) const {
+ if (missing_sequence_numbers_.empty()) {
+ return false;
+ }
+ const uint16_t age_of_oldest_missing_packet =
+ latest_sequence_number - *missing_sequence_numbers_.begin();
+ // Recycle frames if the NACK list contains too old sequence numbers as
+ // the packets may have already been dropped by the sender.
+ return age_of_oldest_missing_packet > max_packet_age_to_nack_;
+}
+
+bool VCMJitterBuffer::HandleTooOldPackets(uint16_t latest_sequence_number) {
+ bool key_frame_found = false;
+ const uint16_t age_of_oldest_missing_packet =
+ latest_sequence_number - *missing_sequence_numbers_.begin();
+ LOG_F(LS_WARNING) << "NACK list contains too old sequence numbers: "
+ << age_of_oldest_missing_packet << " > "
+ << max_packet_age_to_nack_;
+ while (MissingTooOldPacket(latest_sequence_number)) {
+ key_frame_found = RecycleFramesUntilKeyFrame();
+ }
+ return key_frame_found;
+}
+
+void VCMJitterBuffer::DropPacketsFromNackList(
+ uint16_t last_decoded_sequence_number) {
+ // Erase all sequence numbers from the NACK list which we won't need any
+ // longer.
+ missing_sequence_numbers_.erase(
+ missing_sequence_numbers_.begin(),
+ missing_sequence_numbers_.upper_bound(last_decoded_sequence_number));
+}
+
+int64_t VCMJitterBuffer::LastDecodedTimestamp() const {
+ CriticalSectionScoped cs(crit_sect_);
+ return last_decoded_state_.time_stamp();
+}
+
+void VCMJitterBuffer::RenderBufferSize(uint32_t* timestamp_start,
+ uint32_t* timestamp_end) {
+ CriticalSectionScoped cs(crit_sect_);
+ CleanUpOldOrEmptyFrames();
+ *timestamp_start = 0;
+ *timestamp_end = 0;
+ if (decodable_frames_.empty()) {
+ return;
+ }
+ *timestamp_start = decodable_frames_.Front()->TimeStamp();
+ *timestamp_end = decodable_frames_.Back()->TimeStamp();
+}
+
+void VCMJitterBuffer::RegisterStatsCallback(
+ VCMReceiveStatisticsCallback* callback) {
+ CriticalSectionScoped cs(crit_sect_);
+ stats_callback_ = callback;
+}
+
+VCMFrameBuffer* VCMJitterBuffer::GetEmptyFrame() {
+ if (free_frames_.empty()) {
+ if (!TryToIncreaseJitterBufferSize()) {
+ return NULL;
+ }
+ }
+ VCMFrameBuffer* frame = free_frames_.front();
+ free_frames_.pop_front();
+ return frame;
+}
+
+bool VCMJitterBuffer::TryToIncreaseJitterBufferSize() {
+ if (max_number_of_frames_ >= kMaxNumberOfFrames)
+ return false;
+ free_frames_.push_back(new VCMFrameBuffer());
+ ++max_number_of_frames_;
+ TRACE_COUNTER1("webrtc", "JBMaxFrames", max_number_of_frames_);
+ return true;
+}
+
+// Recycle oldest frames up to a key frame, used if jitter buffer is completely
+// full.
+bool VCMJitterBuffer::RecycleFramesUntilKeyFrame() {
+ // First release incomplete frames, and only release decodable frames if there
+ // are no incomplete ones.
+ FrameList::iterator key_frame_it;
+ bool key_frame_found = false;
+ int dropped_frames = 0;
+ dropped_frames += incomplete_frames_.RecycleFramesUntilKeyFrame(
+ &key_frame_it, &free_frames_);
+ key_frame_found = key_frame_it != incomplete_frames_.end();
+ if (dropped_frames == 0) {
+ dropped_frames += decodable_frames_.RecycleFramesUntilKeyFrame(
+ &key_frame_it, &free_frames_);
+ key_frame_found = key_frame_it != decodable_frames_.end();
+ }
+ TRACE_EVENT_INSTANT0("webrtc", "JB::RecycleFramesUntilKeyFrame");
+ if (key_frame_found) {
+ LOG(LS_INFO) << "Found key frame while dropping frames.";
+ // Reset last decoded state to make sure the next frame decoded is a key
+ // frame, and start NACKing from here.
+ last_decoded_state_.Reset();
+ DropPacketsFromNackList(EstimatedLowSequenceNumber(*key_frame_it->second));
+ } else if (decodable_frames_.empty()) {
+ // All frames dropped. Reset the decoding state and clear missing sequence
+ // numbers as we're starting fresh.
+ last_decoded_state_.Reset();
+ missing_sequence_numbers_.clear();
+ }
+ return key_frame_found;
+}
+
+// Must be called under the critical section |crit_sect_|.
+void VCMJitterBuffer::CountFrame(const VCMFrameBuffer& frame) {
+ incoming_frame_count_++;
+
+ if (frame.FrameType() == kVideoFrameKey) {
+ TRACE_EVENT_ASYNC_STEP0("webrtc", "Video", frame.TimeStamp(),
+ "KeyComplete");
+ } else {
+ TRACE_EVENT_ASYNC_STEP0("webrtc", "Video", frame.TimeStamp(),
+ "DeltaComplete");
+ }
+
+ // Update receive statistics. We count all layers, thus when you use layers
+ // adding all key and delta frames might differ from frame count.
+ if (frame.IsSessionComplete()) {
+ if (frame.FrameType() == kVideoFrameKey) {
+ ++receive_statistics_.key_frames;
+ } else {
+ ++receive_statistics_.delta_frames;
+ }
+ if (stats_callback_ != NULL)
+ stats_callback_->OnFrameCountsUpdated(receive_statistics_);
+ }
+}
+
+void VCMJitterBuffer::UpdateAveragePacketsPerFrame(int current_number_packets) {
+ if (frame_counter_ > kFastConvergeThreshold) {
+ average_packets_per_frame_ =
+ average_packets_per_frame_ * (1 - kNormalConvergeMultiplier) +
+ current_number_packets * kNormalConvergeMultiplier;
+ } else if (frame_counter_ > 0) {
+ average_packets_per_frame_ =
+ average_packets_per_frame_ * (1 - kFastConvergeMultiplier) +
+ current_number_packets * kFastConvergeMultiplier;
+ frame_counter_++;
+ } else {
+ average_packets_per_frame_ = current_number_packets;
+ frame_counter_++;
+ }
+}
+
+// Must be called under the critical section |crit_sect_|.
+void VCMJitterBuffer::CleanUpOldOrEmptyFrames() {
+ decodable_frames_.CleanUpOldOrEmptyFrames(&last_decoded_state_,
+ &free_frames_);
+ incomplete_frames_.CleanUpOldOrEmptyFrames(&last_decoded_state_,
+ &free_frames_);
+ if (!last_decoded_state_.in_initial_state()) {
+ DropPacketsFromNackList(last_decoded_state_.sequence_num());
+ }
+}
+
+// Must be called from within |crit_sect_|.
+bool VCMJitterBuffer::IsPacketRetransmitted(const VCMPacket& packet) const {
+ return missing_sequence_numbers_.find(packet.seqNum) !=
+ missing_sequence_numbers_.end();
+}
+
+// Must be called under the critical section |crit_sect_|. Should never be
+// called with retransmitted frames, they must be filtered out before this
+// function is called.
+void VCMJitterBuffer::UpdateJitterEstimate(const VCMJitterSample& sample,
+ bool incomplete_frame) {
+ if (sample.latest_packet_time == -1) {
+ return;
+ }
+ UpdateJitterEstimate(sample.latest_packet_time, sample.timestamp,
+ sample.frame_size, incomplete_frame);
+}
+
+// Must be called under the critical section crit_sect_. Should never be
+// called with retransmitted frames, they must be filtered out before this
+// function is called.
+void VCMJitterBuffer::UpdateJitterEstimate(const VCMFrameBuffer& frame,
+ bool incomplete_frame) {
+ if (frame.LatestPacketTimeMs() == -1) {
+ return;
+ }
+ // No retransmitted frames should be a part of the jitter
+ // estimate.
+ UpdateJitterEstimate(frame.LatestPacketTimeMs(), frame.TimeStamp(),
+ frame.Length(), incomplete_frame);
+}
+
+// Must be called under the critical section |crit_sect_|. Should never be
+// called with retransmitted frames, they must be filtered out before this
+// function is called.
+void VCMJitterBuffer::UpdateJitterEstimate(int64_t latest_packet_time_ms,
+ uint32_t timestamp,
+ unsigned int frame_size,
+ bool incomplete_frame) {
+ if (latest_packet_time_ms == -1) {
+ return;
+ }
+ int64_t frame_delay;
+ bool not_reordered = inter_frame_delay_.CalculateDelay(
+ timestamp, &frame_delay, latest_packet_time_ms);
+ // Filter out frames which have been reordered in time by the network
+ if (not_reordered) {
+ // Update the jitter estimate with the new samples
+ jitter_estimate_.UpdateEstimate(frame_delay, frame_size, incomplete_frame);
+ }
+}
+
+bool VCMJitterBuffer::WaitForRetransmissions() {
+ if (nack_mode_ == kNoNack) {
+ // NACK disabled -> don't wait for retransmissions.
+ return false;
+ }
+ // Evaluate if the RTT is higher than |high_rtt_nack_threshold_ms_|, and in
+ // that case we don't wait for retransmissions.
+ if (high_rtt_nack_threshold_ms_ >= 0 &&
+ rtt_ms_ >= high_rtt_nack_threshold_ms_) {
+ return false;
+ }
+ return true;
+}
+} // namespace webrtc
diff --git a/webrtc/modules/video_coding/jitter_buffer.h b/webrtc/modules/video_coding/jitter_buffer.h
new file mode 100644
index 0000000000..01e27752d2
--- /dev/null
+++ b/webrtc/modules/video_coding/jitter_buffer.h
@@ -0,0 +1,389 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_CODING_JITTER_BUFFER_H_
+#define WEBRTC_MODULES_VIDEO_CODING_JITTER_BUFFER_H_
+
+#include <list>
+#include <map>
+#include <set>
+#include <vector>
+
+#include "webrtc/base/constructormagic.h"
+#include "webrtc/base/thread_annotations.h"
+#include "webrtc/modules/include/module_common_types.h"
+#include "webrtc/modules/video_coding/include/video_coding.h"
+#include "webrtc/modules/video_coding/include/video_coding_defines.h"
+#include "webrtc/modules/video_coding/decoding_state.h"
+#include "webrtc/modules/video_coding/inter_frame_delay.h"
+#include "webrtc/modules/video_coding/jitter_buffer_common.h"
+#include "webrtc/modules/video_coding/jitter_estimator.h"
+#include "webrtc/system_wrappers/include/critical_section_wrapper.h"
+#include "webrtc/typedefs.h"
+
+namespace webrtc {
+
+enum VCMNackMode { kNack, kNoNack };
+
+// forward declarations
+class Clock;
+class EventFactory;
+class EventWrapper;
+class VCMFrameBuffer;
+class VCMPacket;
+class VCMEncodedFrame;
+
+typedef std::list<VCMFrameBuffer*> UnorderedFrameList;
+
+struct VCMJitterSample {
+ VCMJitterSample() : timestamp(0), frame_size(0), latest_packet_time(-1) {}
+ uint32_t timestamp;
+ uint32_t frame_size;
+ int64_t latest_packet_time;
+};
+
+class TimestampLessThan {
+ public:
+ bool operator()(uint32_t timestamp1, uint32_t timestamp2) const {
+ return IsNewerTimestamp(timestamp2, timestamp1);
+ }
+};
+
+class FrameList
+ : public std::map<uint32_t, VCMFrameBuffer*, TimestampLessThan> {
+ public:
+ void InsertFrame(VCMFrameBuffer* frame);
+ VCMFrameBuffer* PopFrame(uint32_t timestamp);
+ VCMFrameBuffer* Front() const;
+ VCMFrameBuffer* Back() const;
+ int RecycleFramesUntilKeyFrame(FrameList::iterator* key_frame_it,
+ UnorderedFrameList* free_frames);
+ void CleanUpOldOrEmptyFrames(VCMDecodingState* decoding_state,
+ UnorderedFrameList* free_frames);
+ void Reset(UnorderedFrameList* free_frames);
+};
+
+class Vp9SsMap {
+ public:
+ typedef std::map<uint32_t, GofInfoVP9, TimestampLessThan> SsMap;
+ bool Insert(const VCMPacket& packet);
+ void Reset();
+
+ // Removes SS data that are older than |timestamp|.
+ // The |timestamp| should be an old timestamp, i.e. packets with older
+ // timestamps should no longer be inserted.
+ void RemoveOld(uint32_t timestamp);
+
+ bool UpdatePacket(VCMPacket* packet);
+ void UpdateFrames(FrameList* frames);
+
+ // Public for testing.
+ // Returns an iterator to the corresponding SS data for the input |timestamp|.
+ bool Find(uint32_t timestamp, SsMap::iterator* it);
+
+ private:
+ // These two functions are called by RemoveOld.
+ // Checks if it is time to do a clean up (done each kSsCleanupIntervalSec).
+ bool TimeForCleanup(uint32_t timestamp) const;
+
+ // Advances the oldest SS data to handle timestamp wrap in cases where SS data
+ // are received very seldom (e.g. only once in beginning, second when
+ // IsNewerTimestamp is not true).
+ void AdvanceFront(uint32_t timestamp);
+
+ SsMap ss_map_;
+};
+
+class VCMJitterBuffer {
+ public:
+ VCMJitterBuffer(Clock* clock, rtc::scoped_ptr<EventWrapper> event);
+
+ ~VCMJitterBuffer();
+
+ // Initializes and starts jitter buffer.
+ void Start();
+
+ // Signals all internal events and stops the jitter buffer.
+ void Stop();
+
+ // Returns true if the jitter buffer is running.
+ bool Running() const;
+
+ // Empty the jitter buffer of all its data.
+ void Flush();
+
+ // Get the number of received frames, by type, since the jitter buffer
+ // was started.
+ FrameCounts FrameStatistics() const;
+
+ // The number of packets discarded by the jitter buffer because the decoder
+ // won't be able to decode them.
+ int num_not_decodable_packets() const;
+
+ // Gets number of packets received.
+ int num_packets() const;
+
+ // Gets number of duplicated packets received.
+ int num_duplicated_packets() const;
+
+ // Gets number of packets discarded by the jitter buffer.
+ int num_discarded_packets() const;
+
+ // Statistics, Calculate frame and bit rates.
+ void IncomingRateStatistics(unsigned int* framerate, unsigned int* bitrate);
+
+ // Checks if the packet sequence will be complete if the next frame would be
+ // grabbed for decoding. That is, if a frame has been lost between the
+ // last decoded frame and the next, or if the next frame is missing one
+ // or more packets.
+ bool CompleteSequenceWithNextFrame();
+
+ // Wait |max_wait_time_ms| for a complete frame to arrive.
+ // The function returns true once such a frame is found, its corresponding
+ // timestamp is returned. Otherwise, returns false.
+ bool NextCompleteTimestamp(uint32_t max_wait_time_ms, uint32_t* timestamp);
+
+ // Locates a frame for decoding (even an incomplete) without delay.
+ // The function returns true once such a frame is found, its corresponding
+ // timestamp is returned. Otherwise, returns false.
+ bool NextMaybeIncompleteTimestamp(uint32_t* timestamp);
+
+ // Extract frame corresponding to input timestamp.
+ // Frame will be set to a decoding state.
+ VCMEncodedFrame* ExtractAndSetDecode(uint32_t timestamp);
+
+ // Releases a frame returned from the jitter buffer, should be called when
+ // done with decoding.
+ void ReleaseFrame(VCMEncodedFrame* frame);
+
+ // Returns the time in ms when the latest packet was inserted into the frame.
+ // Retransmitted is set to true if any of the packets belonging to the frame
+ // has been retransmitted.
+ int64_t LastPacketTime(const VCMEncodedFrame* frame,
+ bool* retransmitted) const;
+
+ // Inserts a packet into a frame returned from GetFrame().
+ // If the return value is <= 0, |frame| is invalidated and the pointer must
+ // be dropped after this function returns.
+ VCMFrameBufferEnum InsertPacket(const VCMPacket& packet, bool* retransmitted);
+
+ // Returns the estimated jitter in milliseconds.
+ uint32_t EstimatedJitterMs();
+
+ // Updates the round-trip time estimate.
+ void UpdateRtt(int64_t rtt_ms);
+
+ // Set the NACK mode. |high_rtt_nack_threshold_ms| is an RTT threshold in ms
+ // above which NACK will be disabled if the NACK mode is |kNack|, -1 meaning
+ // that NACK is always enabled in the |kNack| mode.
+ // |low_rtt_nack_threshold_ms| is an RTT threshold in ms below which we expect
+ // to rely on NACK only, and therefore are using larger buffers to have time
+ // to wait for retransmissions.
+ void SetNackMode(VCMNackMode mode,
+ int64_t low_rtt_nack_threshold_ms,
+ int64_t high_rtt_nack_threshold_ms);
+
+ void SetNackSettings(size_t max_nack_list_size,
+ int max_packet_age_to_nack,
+ int max_incomplete_time_ms);
+
+ // Returns the current NACK mode.
+ VCMNackMode nack_mode() const;
+
+ // Returns a list of the sequence numbers currently missing.
+ std::vector<uint16_t> GetNackList(bool* request_key_frame);
+
+ // Set decode error mode - Should not be changed in the middle of the
+ // session. Changes will not influence frames already in the buffer.
+ void SetDecodeErrorMode(VCMDecodeErrorMode error_mode);
+ int64_t LastDecodedTimestamp() const;
+ VCMDecodeErrorMode decode_error_mode() const { return decode_error_mode_; }
+
+ // Used to compute time of complete continuous frames. Returns the timestamps
+ // corresponding to the start and end of the continuous complete buffer.
+ void RenderBufferSize(uint32_t* timestamp_start, uint32_t* timestamp_end);
+
+ void RegisterStatsCallback(VCMReceiveStatisticsCallback* callback);
+
+ private:
+ class SequenceNumberLessThan {
+ public:
+ bool operator()(const uint16_t& sequence_number1,
+ const uint16_t& sequence_number2) const {
+ return IsNewerSequenceNumber(sequence_number2, sequence_number1);
+ }
+ };
+ typedef std::set<uint16_t, SequenceNumberLessThan> SequenceNumberSet;
+
+ // Gets the frame assigned to the timestamp of the packet. May recycle
+ // existing frames if no free frames are available. Returns an error code if
+ // failing, or kNoError on success. |frame_list| contains which list the
+ // packet was in, or NULL if it was not in a FrameList (a new frame).
+ VCMFrameBufferEnum GetFrame(const VCMPacket& packet,
+ VCMFrameBuffer** frame,
+ FrameList** frame_list)
+ EXCLUSIVE_LOCKS_REQUIRED(crit_sect_);
+
+ // Returns true if |frame| is continuous in |decoding_state|, not taking
+ // decodable frames into account.
+ bool IsContinuousInState(const VCMFrameBuffer& frame,
+ const VCMDecodingState& decoding_state) const
+ EXCLUSIVE_LOCKS_REQUIRED(crit_sect_);
+ // Returns true if |frame| is continuous in the |last_decoded_state_|, taking
+ // all decodable frames into account.
+ bool IsContinuous(const VCMFrameBuffer& frame) const
+ EXCLUSIVE_LOCKS_REQUIRED(crit_sect_);
+ // Looks for frames in |incomplete_frames_| which are continuous in the
+ // provided |decoded_state|. Starts the search from the timestamp of
+ // |decoded_state|.
+ void FindAndInsertContinuousFramesWithState(
+ const VCMDecodingState& decoded_state)
+ EXCLUSIVE_LOCKS_REQUIRED(crit_sect_);
+ // Looks for frames in |incomplete_frames_| which are continuous in
+ // |last_decoded_state_| taking all decodable frames into account. Starts
+ // the search from |new_frame|.
+ void FindAndInsertContinuousFrames(const VCMFrameBuffer& new_frame)
+ EXCLUSIVE_LOCKS_REQUIRED(crit_sect_);
+ VCMFrameBuffer* NextFrame() const EXCLUSIVE_LOCKS_REQUIRED(crit_sect_);
+ // Returns true if the NACK list was updated to cover sequence numbers up to
+ // |sequence_number|. If false a key frame is needed to get into a state where
+ // we can continue decoding.
+ bool UpdateNackList(uint16_t sequence_number)
+ EXCLUSIVE_LOCKS_REQUIRED(crit_sect_);
+ bool TooLargeNackList() const;
+ // Returns true if the NACK list was reduced without problem. If false a key
+ // frame is needed to get into a state where we can continue decoding.
+ bool HandleTooLargeNackList() EXCLUSIVE_LOCKS_REQUIRED(crit_sect_);
+ bool MissingTooOldPacket(uint16_t latest_sequence_number) const
+ EXCLUSIVE_LOCKS_REQUIRED(crit_sect_);
+ // Returns true if the too old packets was successfully removed from the NACK
+ // list. If false, a key frame is needed to get into a state where we can
+ // continue decoding.
+ bool HandleTooOldPackets(uint16_t latest_sequence_number)
+ EXCLUSIVE_LOCKS_REQUIRED(crit_sect_);
+ // Drops all packets in the NACK list up until |last_decoded_sequence_number|.
+ void DropPacketsFromNackList(uint16_t last_decoded_sequence_number);
+
+ void ReleaseFrameIfNotDecoding(VCMFrameBuffer* frame);
+
+ // Gets an empty frame, creating a new frame if necessary (i.e. increases
+ // jitter buffer size).
+ VCMFrameBuffer* GetEmptyFrame() EXCLUSIVE_LOCKS_REQUIRED(crit_sect_);
+
+ // Attempts to increase the size of the jitter buffer. Returns true on
+ // success, false otherwise.
+ bool TryToIncreaseJitterBufferSize() EXCLUSIVE_LOCKS_REQUIRED(crit_sect_);
+
+ // Recycles oldest frames until a key frame is found. Used if jitter buffer is
+ // completely full. Returns true if a key frame was found.
+ bool RecycleFramesUntilKeyFrame() EXCLUSIVE_LOCKS_REQUIRED(crit_sect_);
+
+ // Updates the frame statistics.
+ // Counts only complete frames, so decodable incomplete frames will not be
+ // counted.
+ void CountFrame(const VCMFrameBuffer& frame)
+ EXCLUSIVE_LOCKS_REQUIRED(crit_sect_);
+
+ // Update rolling average of packets per frame.
+ void UpdateAveragePacketsPerFrame(int current_number_packets_);
+
+ // Cleans the frame list in the JB from old/empty frames.
+ // Should only be called prior to actual use.
+ void CleanUpOldOrEmptyFrames() EXCLUSIVE_LOCKS_REQUIRED(crit_sect_);
+
+ // Returns true if |packet| is likely to have been retransmitted.
+ bool IsPacketRetransmitted(const VCMPacket& packet) const;
+
+ // The following three functions update the jitter estimate with the
+ // payload size, receive time and RTP timestamp of a frame.
+ void UpdateJitterEstimate(const VCMJitterSample& sample,
+ bool incomplete_frame);
+ void UpdateJitterEstimate(const VCMFrameBuffer& frame, bool incomplete_frame);
+ void UpdateJitterEstimate(int64_t latest_packet_time_ms,
+ uint32_t timestamp,
+ unsigned int frame_size,
+ bool incomplete_frame);
+
+ // Returns true if we should wait for retransmissions, false otherwise.
+ bool WaitForRetransmissions();
+
+ int NonContinuousOrIncompleteDuration() EXCLUSIVE_LOCKS_REQUIRED(crit_sect_);
+
+ uint16_t EstimatedLowSequenceNumber(const VCMFrameBuffer& frame) const;
+
+ void UpdateHistograms() EXCLUSIVE_LOCKS_REQUIRED(crit_sect_);
+
+ Clock* clock_;
+ // If we are running (have started) or not.
+ bool running_;
+ CriticalSectionWrapper* crit_sect_;
+ // Event to signal when we have a frame ready for decoder.
+ rtc::scoped_ptr<EventWrapper> frame_event_;
+ // Number of allocated frames.
+ int max_number_of_frames_;
+ UnorderedFrameList free_frames_ GUARDED_BY(crit_sect_);
+ FrameList decodable_frames_ GUARDED_BY(crit_sect_);
+ FrameList incomplete_frames_ GUARDED_BY(crit_sect_);
+ VCMDecodingState last_decoded_state_ GUARDED_BY(crit_sect_);
+ bool first_packet_since_reset_;
+
+ // Statistics.
+ VCMReceiveStatisticsCallback* stats_callback_ GUARDED_BY(crit_sect_);
+ // Frame counts for each type (key, delta, ...)
+ FrameCounts receive_statistics_;
+ // Latest calculated frame rates of incoming stream.
+ unsigned int incoming_frame_rate_;
+ unsigned int incoming_frame_count_;
+ int64_t time_last_incoming_frame_count_;
+ unsigned int incoming_bit_count_;
+ unsigned int incoming_bit_rate_;
+ // Number of frames in a row that have been too old.
+ int num_consecutive_old_frames_;
+ // Number of packets in a row that have been too old.
+ int num_consecutive_old_packets_;
+ // Number of packets received.
+ int num_packets_ GUARDED_BY(crit_sect_);
+ // Number of duplicated packets received.
+ int num_duplicated_packets_ GUARDED_BY(crit_sect_);
+ // Number of packets discarded by the jitter buffer.
+ int num_discarded_packets_ GUARDED_BY(crit_sect_);
+ // Time when first packet is received.
+ int64_t time_first_packet_ms_ GUARDED_BY(crit_sect_);
+
+ // Jitter estimation.
+ // Filter for estimating jitter.
+ VCMJitterEstimator jitter_estimate_;
+ // Calculates network delays used for jitter calculations.
+ VCMInterFrameDelay inter_frame_delay_;
+ VCMJitterSample waiting_for_completion_;
+ int64_t rtt_ms_;
+
+ // NACK and retransmissions.
+ VCMNackMode nack_mode_;
+ int64_t low_rtt_nack_threshold_ms_;
+ int64_t high_rtt_nack_threshold_ms_;
+ // Holds the internal NACK list (the missing sequence numbers).
+ SequenceNumberSet missing_sequence_numbers_;
+ uint16_t latest_received_sequence_number_;
+ size_t max_nack_list_size_;
+ int max_packet_age_to_nack_; // Measured in sequence numbers.
+ int max_incomplete_time_ms_;
+
+ VCMDecodeErrorMode decode_error_mode_;
+ // Estimated rolling average of packets per frame
+ float average_packets_per_frame_;
+ // average_packets_per_frame converges fast if we have fewer than this many
+ // frames.
+ int frame_counter_;
+ RTC_DISALLOW_COPY_AND_ASSIGN(VCMJitterBuffer);
+};
+} // namespace webrtc
+
+#endif // WEBRTC_MODULES_VIDEO_CODING_JITTER_BUFFER_H_
diff --git a/webrtc/modules/video_coding/jitter_buffer_common.h b/webrtc/modules/video_coding/jitter_buffer_common.h
new file mode 100644
index 0000000000..65356f1d1b
--- /dev/null
+++ b/webrtc/modules/video_coding/jitter_buffer_common.h
@@ -0,0 +1,72 @@
+/*
+ * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_CODING_JITTER_BUFFER_COMMON_H_
+#define WEBRTC_MODULES_VIDEO_CODING_JITTER_BUFFER_COMMON_H_
+
+#include "webrtc/typedefs.h"
+
+namespace webrtc {
+
+// Used to estimate rolling average of packets per frame.
+static const float kFastConvergeMultiplier = 0.4f;
+static const float kNormalConvergeMultiplier = 0.2f;
+
+enum { kMaxNumberOfFrames = 300 };
+enum { kStartNumberOfFrames = 6 };
+enum { kMaxVideoDelayMs = 10000 };
+enum { kPacketsPerFrameMultiplier = 5 };
+enum { kFastConvergeThreshold = 5 };
+
+enum VCMJitterBufferEnum {
+ kMaxConsecutiveOldFrames = 60,
+ kMaxConsecutiveOldPackets = 300,
+ // TODO(sprang): Reduce this limit once codecs don't sometimes wildly
+ // overshoot bitrate target.
+ kMaxPacketsInSession = 1400, // Allows ~2MB frames.
+ kBufferIncStepSizeBytes = 30000, // >20 packets.
+ kMaxJBFrameSizeBytes = 4000000 // sanity don't go above 4Mbyte.
+};
+
+enum VCMFrameBufferEnum {
+ kOutOfBoundsPacket = -7,
+ kNotInitialized = -6,
+ kOldPacket = -5,
+ kGeneralError = -4,
+ kFlushIndicator = -3, // Indicator that a flush has occurred.
+ kTimeStampError = -2,
+ kSizeError = -1,
+ kNoError = 0,
+ kIncomplete = 1, // Frame incomplete.
+ kCompleteSession = 3, // at least one layer in the frame complete.
+ kDecodableSession = 4, // Frame incomplete, but ready to be decoded
+ kDuplicatePacket = 5 // We're receiving a duplicate packet.
+};
+
+enum VCMFrameBufferStateEnum {
+ kStateEmpty, // frame popped by the RTP receiver
+ kStateIncomplete, // frame that have one or more packet(s) stored
+ kStateComplete, // frame that have all packets
+ kStateDecodable // Hybrid mode - frame can be decoded
+};
+
+enum { kH264StartCodeLengthBytes = 4 };
+
+// Used to indicate if a received packet contain a complete NALU (or equivalent)
+enum VCMNaluCompleteness {
+ kNaluUnset = 0, // Packet has not been filled.
+ kNaluComplete = 1, // Packet can be decoded as is.
+ kNaluStart, // Packet contain beginning of NALU
+ kNaluIncomplete, // Packet is not beginning or end of NALU
+ kNaluEnd, // Packet is the end of a NALU
+};
+} // namespace webrtc
+
+#endif // WEBRTC_MODULES_VIDEO_CODING_JITTER_BUFFER_COMMON_H_
diff --git a/webrtc/modules/video_coding/jitter_buffer_unittest.cc b/webrtc/modules/video_coding/jitter_buffer_unittest.cc
new file mode 100644
index 0000000000..8abc1b5471
--- /dev/null
+++ b/webrtc/modules/video_coding/jitter_buffer_unittest.cc
@@ -0,0 +1,2571 @@
+/*
+ * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <string.h>
+
+#include <list>
+
+#include "testing/gtest/include/gtest/gtest.h"
+#include "webrtc/modules/video_coding/frame_buffer.h"
+#include "webrtc/modules/video_coding/jitter_buffer.h"
+#include "webrtc/modules/video_coding/media_opt_util.h"
+#include "webrtc/modules/video_coding/packet.h"
+#include "webrtc/modules/video_coding/test/stream_generator.h"
+#include "webrtc/modules/video_coding/test/test_util.h"
+#include "webrtc/system_wrappers/include/clock.h"
+#include "webrtc/system_wrappers/include/metrics.h"
+#include "webrtc/test/histogram.h"
+
+namespace webrtc {
+
+namespace {
+const uint32_t kProcessIntervalSec = 60;
+} // namespace
+
+class Vp9SsMapTest : public ::testing::Test {
+ protected:
+ Vp9SsMapTest() : packet_(data_, 1400, 1234, 1, true) {}
+
+ virtual void SetUp() {
+ packet_.isFirstPacket = true;
+ packet_.markerBit = true;
+ packet_.frameType = kVideoFrameKey;
+ packet_.codec = kVideoCodecVP9;
+ packet_.codecSpecificHeader.codec = kRtpVideoVp9;
+ packet_.codecSpecificHeader.codecHeader.VP9.flexible_mode = false;
+ packet_.codecSpecificHeader.codecHeader.VP9.gof_idx = 0;
+ packet_.codecSpecificHeader.codecHeader.VP9.temporal_idx = kNoTemporalIdx;
+ packet_.codecSpecificHeader.codecHeader.VP9.temporal_up_switch = false;
+ packet_.codecSpecificHeader.codecHeader.VP9.ss_data_available = true;
+ packet_.codecSpecificHeader.codecHeader.VP9.gof.SetGofInfoVP9(
+ kTemporalStructureMode3); // kTemporalStructureMode3: 0-2-1-2..
+ }
+
+ Vp9SsMap map_;
+ uint8_t data_[1500];
+ VCMPacket packet_;
+};
+
+TEST_F(Vp9SsMapTest, Insert) {
+ EXPECT_TRUE(map_.Insert(packet_));
+}
+
+TEST_F(Vp9SsMapTest, Insert_NoSsData) {
+ packet_.codecSpecificHeader.codecHeader.VP9.ss_data_available = false;
+ EXPECT_FALSE(map_.Insert(packet_));
+}
+
+TEST_F(Vp9SsMapTest, Find) {
+ EXPECT_TRUE(map_.Insert(packet_));
+ Vp9SsMap::SsMap::iterator it;
+ EXPECT_TRUE(map_.Find(packet_.timestamp, &it));
+ EXPECT_EQ(packet_.timestamp, it->first);
+}
+
+TEST_F(Vp9SsMapTest, Find_WithWrap) {
+ const uint32_t kSsTimestamp1 = 0xFFFFFFFF;
+ const uint32_t kSsTimestamp2 = 100;
+ packet_.timestamp = kSsTimestamp1;
+ EXPECT_TRUE(map_.Insert(packet_));
+ packet_.timestamp = kSsTimestamp2;
+ EXPECT_TRUE(map_.Insert(packet_));
+ Vp9SsMap::SsMap::iterator it;
+ EXPECT_FALSE(map_.Find(kSsTimestamp1 - 1, &it));
+ EXPECT_TRUE(map_.Find(kSsTimestamp1, &it));
+ EXPECT_EQ(kSsTimestamp1, it->first);
+ EXPECT_TRUE(map_.Find(0, &it));
+ EXPECT_EQ(kSsTimestamp1, it->first);
+ EXPECT_TRUE(map_.Find(kSsTimestamp2 - 1, &it));
+ EXPECT_EQ(kSsTimestamp1, it->first);
+ EXPECT_TRUE(map_.Find(kSsTimestamp2, &it));
+ EXPECT_EQ(kSsTimestamp2, it->first);
+ EXPECT_TRUE(map_.Find(kSsTimestamp2 + 1, &it));
+ EXPECT_EQ(kSsTimestamp2, it->first);
+}
+
+TEST_F(Vp9SsMapTest, Reset) {
+ EXPECT_TRUE(map_.Insert(packet_));
+ Vp9SsMap::SsMap::iterator it;
+ EXPECT_TRUE(map_.Find(packet_.timestamp, &it));
+ EXPECT_EQ(packet_.timestamp, it->first);
+
+ map_.Reset();
+ EXPECT_FALSE(map_.Find(packet_.timestamp, &it));
+}
+
+TEST_F(Vp9SsMapTest, RemoveOld) {
+ Vp9SsMap::SsMap::iterator it;
+ const uint32_t kSsTimestamp1 = 10000;
+ packet_.timestamp = kSsTimestamp1;
+ EXPECT_TRUE(map_.Insert(packet_));
+
+ const uint32_t kTimestamp = kSsTimestamp1 + kProcessIntervalSec * 90000;
+ map_.RemoveOld(kTimestamp - 1); // Interval not passed.
+ EXPECT_TRUE(map_.Find(kSsTimestamp1, &it)); // Should not been removed.
+
+ map_.RemoveOld(kTimestamp);
+ EXPECT_FALSE(map_.Find(kSsTimestamp1, &it));
+ EXPECT_TRUE(map_.Find(kTimestamp, &it));
+ EXPECT_EQ(kTimestamp, it->first);
+}
+
+TEST_F(Vp9SsMapTest, RemoveOld_WithWrap) {
+ Vp9SsMap::SsMap::iterator it;
+ const uint32_t kSsTimestamp1 = 0xFFFFFFFF - kProcessIntervalSec * 90000;
+ const uint32_t kSsTimestamp2 = 10;
+ const uint32_t kSsTimestamp3 = 1000;
+ packet_.timestamp = kSsTimestamp1;
+ EXPECT_TRUE(map_.Insert(packet_));
+ packet_.timestamp = kSsTimestamp2;
+ EXPECT_TRUE(map_.Insert(packet_));
+ packet_.timestamp = kSsTimestamp3;
+ EXPECT_TRUE(map_.Insert(packet_));
+
+ map_.RemoveOld(kSsTimestamp3);
+ EXPECT_FALSE(map_.Find(kSsTimestamp1, &it));
+ EXPECT_FALSE(map_.Find(kSsTimestamp2, &it));
+ EXPECT_TRUE(map_.Find(kSsTimestamp3, &it));
+}
+
+TEST_F(Vp9SsMapTest, UpdatePacket_NoSsData) {
+ packet_.codecSpecificHeader.codecHeader.VP9.gof_idx = 0;
+ EXPECT_FALSE(map_.UpdatePacket(&packet_));
+}
+
+TEST_F(Vp9SsMapTest, UpdatePacket_NoGofIdx) {
+ EXPECT_TRUE(map_.Insert(packet_));
+ packet_.codecSpecificHeader.codecHeader.VP9.gof_idx = kNoGofIdx;
+ EXPECT_FALSE(map_.UpdatePacket(&packet_));
+}
+
+TEST_F(Vp9SsMapTest, UpdatePacket_InvalidGofIdx) {
+ EXPECT_TRUE(map_.Insert(packet_));
+ packet_.codecSpecificHeader.codecHeader.VP9.gof_idx = 4;
+ EXPECT_FALSE(map_.UpdatePacket(&packet_));
+}
+
+TEST_F(Vp9SsMapTest, UpdatePacket) {
+ EXPECT_TRUE(map_.Insert(packet_)); // kTemporalStructureMode3: 0-2-1-2..
+
+ packet_.codecSpecificHeader.codecHeader.VP9.gof_idx = 0;
+ EXPECT_TRUE(map_.UpdatePacket(&packet_));
+ EXPECT_EQ(0, packet_.codecSpecificHeader.codecHeader.VP9.temporal_idx);
+ EXPECT_FALSE(packet_.codecSpecificHeader.codecHeader.VP9.temporal_up_switch);
+ EXPECT_EQ(1U, packet_.codecSpecificHeader.codecHeader.VP9.num_ref_pics);
+ EXPECT_EQ(4, packet_.codecSpecificHeader.codecHeader.VP9.pid_diff[0]);
+
+ packet_.codecSpecificHeader.codecHeader.VP9.gof_idx = 1;
+ EXPECT_TRUE(map_.UpdatePacket(&packet_));
+ EXPECT_EQ(2, packet_.codecSpecificHeader.codecHeader.VP9.temporal_idx);
+ EXPECT_TRUE(packet_.codecSpecificHeader.codecHeader.VP9.temporal_up_switch);
+ EXPECT_EQ(1U, packet_.codecSpecificHeader.codecHeader.VP9.num_ref_pics);
+ EXPECT_EQ(1, packet_.codecSpecificHeader.codecHeader.VP9.pid_diff[0]);
+
+ packet_.codecSpecificHeader.codecHeader.VP9.gof_idx = 2;
+ EXPECT_TRUE(map_.UpdatePacket(&packet_));
+ EXPECT_EQ(1, packet_.codecSpecificHeader.codecHeader.VP9.temporal_idx);
+ EXPECT_TRUE(packet_.codecSpecificHeader.codecHeader.VP9.temporal_up_switch);
+ EXPECT_EQ(1U, packet_.codecSpecificHeader.codecHeader.VP9.num_ref_pics);
+ EXPECT_EQ(2, packet_.codecSpecificHeader.codecHeader.VP9.pid_diff[0]);
+
+ packet_.codecSpecificHeader.codecHeader.VP9.gof_idx = 3;
+ EXPECT_TRUE(map_.UpdatePacket(&packet_));
+ EXPECT_EQ(2, packet_.codecSpecificHeader.codecHeader.VP9.temporal_idx);
+ EXPECT_FALSE(packet_.codecSpecificHeader.codecHeader.VP9.temporal_up_switch);
+ EXPECT_EQ(2U, packet_.codecSpecificHeader.codecHeader.VP9.num_ref_pics);
+ EXPECT_EQ(1, packet_.codecSpecificHeader.codecHeader.VP9.pid_diff[0]);
+ EXPECT_EQ(2, packet_.codecSpecificHeader.codecHeader.VP9.pid_diff[1]);
+}
+
+class TestBasicJitterBuffer : public ::testing::Test {
+ protected:
+ virtual void SetUp() {
+ clock_.reset(new SimulatedClock(0));
+ jitter_buffer_.reset(new VCMJitterBuffer(
+ clock_.get(),
+ rtc::scoped_ptr<EventWrapper>(event_factory_.CreateEvent())));
+ jitter_buffer_->Start();
+ seq_num_ = 1234;
+ timestamp_ = 0;
+ size_ = 1400;
+ // Data vector - 0, 0, 0x80, 3, 4, 5, 6, 7, 8, 9, 0, 0, 0x80, 3....
+ data_[0] = 0;
+ data_[1] = 0;
+ data_[2] = 0x80;
+ int count = 3;
+ for (unsigned int i = 3; i < sizeof(data_) - 3; ++i) {
+ data_[i] = count;
+ count++;
+ if (count == 10) {
+ data_[i + 1] = 0;
+ data_[i + 2] = 0;
+ data_[i + 3] = 0x80;
+ count = 3;
+ i += 3;
+ }
+ }
+ packet_.reset(new VCMPacket(data_, size_, seq_num_, timestamp_, true));
+ }
+
+ VCMEncodedFrame* DecodeCompleteFrame() {
+ uint32_t timestamp = 0;
+ bool found_frame = jitter_buffer_->NextCompleteTimestamp(10, &timestamp);
+ if (!found_frame)
+ return NULL;
+ VCMEncodedFrame* frame = jitter_buffer_->ExtractAndSetDecode(timestamp);
+ return frame;
+ }
+
+ VCMEncodedFrame* DecodeIncompleteFrame() {
+ uint32_t timestamp = 0;
+ bool found_frame = jitter_buffer_->NextMaybeIncompleteTimestamp(&timestamp);
+ if (!found_frame)
+ return NULL;
+ VCMEncodedFrame* frame = jitter_buffer_->ExtractAndSetDecode(timestamp);
+ return frame;
+ }
+
+ void CheckOutFrame(VCMEncodedFrame* frame_out,
+ unsigned int size,
+ bool startCode) {
+ ASSERT_TRUE(frame_out);
+
+ const uint8_t* outData = frame_out->Buffer();
+ unsigned int i = 0;
+
+ if (startCode) {
+ EXPECT_EQ(0, outData[0]);
+ EXPECT_EQ(0, outData[1]);
+ EXPECT_EQ(0, outData[2]);
+ EXPECT_EQ(1, outData[3]);
+ i += 4;
+ }
+
+ EXPECT_EQ(size, frame_out->Length());
+ int count = 3;
+ for (; i < size; i++) {
+ if (outData[i] == 0 && outData[i + 1] == 0 && outData[i + 2] == 0x80) {
+ i += 2;
+ } else if (startCode && outData[i] == 0 && outData[i + 1] == 0) {
+ EXPECT_EQ(0, outData[0]);
+ EXPECT_EQ(0, outData[1]);
+ EXPECT_EQ(0, outData[2]);
+ EXPECT_EQ(1, outData[3]);
+ i += 3;
+ } else {
+ EXPECT_EQ(count, outData[i]);
+ count++;
+ if (count == 10) {
+ count = 3;
+ }
+ }
+ }
+ }
+
+ uint16_t seq_num_;
+ uint32_t timestamp_;
+ int size_;
+ uint8_t data_[1500];
+ rtc::scoped_ptr<VCMPacket> packet_;
+ rtc::scoped_ptr<SimulatedClock> clock_;
+ NullEventFactory event_factory_;
+ rtc::scoped_ptr<VCMJitterBuffer> jitter_buffer_;
+};
+
+class TestRunningJitterBuffer : public ::testing::Test {
+ protected:
+ enum { kDataBufferSize = 10 };
+
+ virtual void SetUp() {
+ clock_.reset(new SimulatedClock(0));
+ max_nack_list_size_ = 150;
+ oldest_packet_to_nack_ = 250;
+ jitter_buffer_ = new VCMJitterBuffer(
+ clock_.get(),
+ rtc::scoped_ptr<EventWrapper>(event_factory_.CreateEvent()));
+ stream_generator_ = new StreamGenerator(0, clock_->TimeInMilliseconds());
+ jitter_buffer_->Start();
+ jitter_buffer_->SetNackSettings(max_nack_list_size_, oldest_packet_to_nack_,
+ 0);
+ memset(data_buffer_, 0, kDataBufferSize);
+ }
+
+ virtual void TearDown() {
+ jitter_buffer_->Stop();
+ delete stream_generator_;
+ delete jitter_buffer_;
+ }
+
+ VCMFrameBufferEnum InsertPacketAndPop(int index) {
+ VCMPacket packet;
+ packet.dataPtr = data_buffer_;
+ bool packet_available = stream_generator_->PopPacket(&packet, index);
+ EXPECT_TRUE(packet_available);
+ if (!packet_available)
+ return kGeneralError; // Return here to avoid crashes below.
+ bool retransmitted = false;
+ return jitter_buffer_->InsertPacket(packet, &retransmitted);
+ }
+
+ VCMFrameBufferEnum InsertPacket(int index) {
+ VCMPacket packet;
+ packet.dataPtr = data_buffer_;
+ bool packet_available = stream_generator_->GetPacket(&packet, index);
+ EXPECT_TRUE(packet_available);
+ if (!packet_available)
+ return kGeneralError; // Return here to avoid crashes below.
+ bool retransmitted = false;
+ return jitter_buffer_->InsertPacket(packet, &retransmitted);
+ }
+
+ VCMFrameBufferEnum InsertFrame(FrameType frame_type) {
+ stream_generator_->GenerateFrame(
+ frame_type, (frame_type != kEmptyFrame) ? 1 : 0,
+ (frame_type == kEmptyFrame) ? 1 : 0, clock_->TimeInMilliseconds());
+ VCMFrameBufferEnum ret = InsertPacketAndPop(0);
+ clock_->AdvanceTimeMilliseconds(kDefaultFramePeriodMs);
+ return ret;
+ }
+
+ VCMFrameBufferEnum InsertFrames(int num_frames, FrameType frame_type) {
+ VCMFrameBufferEnum ret_for_all = kNoError;
+ for (int i = 0; i < num_frames; ++i) {
+ VCMFrameBufferEnum ret = InsertFrame(frame_type);
+ if (ret < kNoError) {
+ ret_for_all = ret;
+ } else if (ret_for_all >= kNoError) {
+ ret_for_all = ret;
+ }
+ }
+ return ret_for_all;
+ }
+
+ void DropFrame(int num_packets) {
+ stream_generator_->GenerateFrame(kVideoFrameDelta, num_packets, 0,
+ clock_->TimeInMilliseconds());
+ for (int i = 0; i < num_packets; ++i)
+ stream_generator_->DropLastPacket();
+ clock_->AdvanceTimeMilliseconds(kDefaultFramePeriodMs);
+ }
+
+ bool DecodeCompleteFrame() {
+ uint32_t timestamp = 0;
+ bool found_frame = jitter_buffer_->NextCompleteTimestamp(0, &timestamp);
+ if (!found_frame)
+ return false;
+
+ VCMEncodedFrame* frame = jitter_buffer_->ExtractAndSetDecode(timestamp);
+ bool ret = (frame != NULL);
+ jitter_buffer_->ReleaseFrame(frame);
+ return ret;
+ }
+
+ bool DecodeIncompleteFrame() {
+ uint32_t timestamp = 0;
+ bool found_frame = jitter_buffer_->NextMaybeIncompleteTimestamp(&timestamp);
+ if (!found_frame)
+ return false;
+ VCMEncodedFrame* frame = jitter_buffer_->ExtractAndSetDecode(timestamp);
+ bool ret = (frame != NULL);
+ jitter_buffer_->ReleaseFrame(frame);
+ return ret;
+ }
+
+ VCMJitterBuffer* jitter_buffer_;
+ StreamGenerator* stream_generator_;
+ rtc::scoped_ptr<SimulatedClock> clock_;
+ NullEventFactory event_factory_;
+ size_t max_nack_list_size_;
+ int oldest_packet_to_nack_;
+ uint8_t data_buffer_[kDataBufferSize];
+};
+
+class TestJitterBufferNack : public TestRunningJitterBuffer {
+ protected:
+ virtual void SetUp() {
+ TestRunningJitterBuffer::SetUp();
+ jitter_buffer_->SetNackMode(kNack, -1, -1);
+ }
+
+ virtual void TearDown() { TestRunningJitterBuffer::TearDown(); }
+};
+
+TEST_F(TestBasicJitterBuffer, StopRunning) {
+ jitter_buffer_->Stop();
+ EXPECT_TRUE(NULL == DecodeCompleteFrame());
+ EXPECT_TRUE(NULL == DecodeIncompleteFrame());
+ jitter_buffer_->Start();
+ // Allow selective errors.
+ jitter_buffer_->SetDecodeErrorMode(kSelectiveErrors);
+
+ // No packets inserted.
+ EXPECT_TRUE(NULL == DecodeCompleteFrame());
+ EXPECT_TRUE(NULL == DecodeIncompleteFrame());
+
+ // Allow decoding with errors.
+ jitter_buffer_->SetDecodeErrorMode(kWithErrors);
+
+ // No packets inserted.
+ EXPECT_TRUE(NULL == DecodeCompleteFrame());
+ EXPECT_TRUE(NULL == DecodeIncompleteFrame());
+}
+
+TEST_F(TestBasicJitterBuffer, SinglePacketFrame) {
+ // Always start with a complete key frame when not allowing errors.
+ jitter_buffer_->SetDecodeErrorMode(kNoErrors);
+ packet_->frameType = kVideoFrameKey;
+ packet_->isFirstPacket = true;
+ packet_->markerBit = true;
+ packet_->timestamp += 123 * 90;
+
+ // Insert the packet to the jitter buffer and get a frame.
+ bool retransmitted = false;
+ EXPECT_EQ(kCompleteSession,
+ jitter_buffer_->InsertPacket(*packet_, &retransmitted));
+ VCMEncodedFrame* frame_out = DecodeCompleteFrame();
+ CheckOutFrame(frame_out, size_, false);
+ EXPECT_EQ(kVideoFrameKey, frame_out->FrameType());
+ jitter_buffer_->ReleaseFrame(frame_out);
+}
+
+TEST_F(TestBasicJitterBuffer, VerifyHistogramStats) {
+ test::ClearHistograms();
+ // Always start with a complete key frame when not allowing errors.
+ jitter_buffer_->SetDecodeErrorMode(kNoErrors);
+ packet_->frameType = kVideoFrameKey;
+ packet_->isFirstPacket = true;
+ packet_->markerBit = true;
+ packet_->timestamp += 123 * 90;
+
+ // Insert single packet frame to the jitter buffer and get a frame.
+ bool retransmitted = false;
+ EXPECT_EQ(kCompleteSession,
+ jitter_buffer_->InsertPacket(*packet_, &retransmitted));
+ VCMEncodedFrame* frame_out = DecodeCompleteFrame();
+ CheckOutFrame(frame_out, size_, false);
+ EXPECT_EQ(kVideoFrameKey, frame_out->FrameType());
+ jitter_buffer_->ReleaseFrame(frame_out);
+
+ // Verify that histograms are updated when the jitter buffer is stopped.
+ clock_->AdvanceTimeMilliseconds(metrics::kMinRunTimeInSeconds * 1000);
+ jitter_buffer_->Stop();
+ EXPECT_EQ(
+ 0, test::LastHistogramSample("WebRTC.Video.DiscardedPacketsInPercent"));
+ EXPECT_EQ(
+ 0, test::LastHistogramSample("WebRTC.Video.DuplicatedPacketsInPercent"));
+ EXPECT_NE(-1, test::LastHistogramSample(
+ "WebRTC.Video.CompleteFramesReceivedPerSecond"));
+ EXPECT_EQ(1000, test::LastHistogramSample(
+ "WebRTC.Video.KeyFramesReceivedInPermille"));
+
+ // Verify that histograms are not updated if stop is called again.
+ jitter_buffer_->Stop();
+ EXPECT_EQ(
+ 1, test::NumHistogramSamples("WebRTC.Video.DiscardedPacketsInPercent"));
+ EXPECT_EQ(
+ 1, test::NumHistogramSamples("WebRTC.Video.DuplicatedPacketsInPercent"));
+ EXPECT_EQ(1, test::NumHistogramSamples(
+ "WebRTC.Video.CompleteFramesReceivedPerSecond"));
+ EXPECT_EQ(
+ 1, test::NumHistogramSamples("WebRTC.Video.KeyFramesReceivedInPermille"));
+}
+
+TEST_F(TestBasicJitterBuffer, DualPacketFrame) {
+ packet_->frameType = kVideoFrameKey;
+ packet_->isFirstPacket = true;
+ packet_->markerBit = false;
+
+ bool retransmitted = false;
+ EXPECT_EQ(kIncomplete,
+ jitter_buffer_->InsertPacket(*packet_, &retransmitted));
+ VCMEncodedFrame* frame_out = DecodeCompleteFrame();
+ // Should not be complete.
+ EXPECT_TRUE(frame_out == NULL);
+
+ ++seq_num_;
+ packet_->isFirstPacket = false;
+ packet_->markerBit = true;
+ packet_->seqNum = seq_num_;
+
+ EXPECT_EQ(kCompleteSession,
+ jitter_buffer_->InsertPacket(*packet_, &retransmitted));
+
+ frame_out = DecodeCompleteFrame();
+ CheckOutFrame(frame_out, 2 * size_, false);
+
+ EXPECT_EQ(kVideoFrameKey, frame_out->FrameType());
+ jitter_buffer_->ReleaseFrame(frame_out);
+}
+
+TEST_F(TestBasicJitterBuffer, 100PacketKeyFrame) {
+ packet_->frameType = kVideoFrameKey;
+ packet_->isFirstPacket = true;
+ packet_->markerBit = false;
+
+ bool retransmitted = false;
+ EXPECT_EQ(kIncomplete,
+ jitter_buffer_->InsertPacket(*packet_, &retransmitted));
+
+ VCMEncodedFrame* frame_out = DecodeCompleteFrame();
+
+ // Frame should not be complete.
+ EXPECT_TRUE(frame_out == NULL);
+
+ // Insert 98 frames.
+ int loop = 0;
+ do {
+ seq_num_++;
+ packet_->isFirstPacket = false;
+ packet_->markerBit = false;
+ packet_->seqNum = seq_num_;
+
+ EXPECT_EQ(kIncomplete,
+ jitter_buffer_->InsertPacket(*packet_, &retransmitted));
+ loop++;
+ } while (loop < 98);
+
+ // Insert last packet.
+ ++seq_num_;
+ packet_->isFirstPacket = false;
+ packet_->markerBit = true;
+ packet_->seqNum = seq_num_;
+
+ EXPECT_EQ(kCompleteSession,
+ jitter_buffer_->InsertPacket(*packet_, &retransmitted));
+
+ frame_out = DecodeCompleteFrame();
+
+ CheckOutFrame(frame_out, 100 * size_, false);
+ EXPECT_EQ(kVideoFrameKey, frame_out->FrameType());
+ jitter_buffer_->ReleaseFrame(frame_out);
+}
+
+TEST_F(TestBasicJitterBuffer, 100PacketDeltaFrame) {
+ // Always start with a complete key frame.
+ packet_->frameType = kVideoFrameKey;
+ packet_->isFirstPacket = true;
+ packet_->markerBit = true;
+
+ bool retransmitted = false;
+ EXPECT_EQ(kCompleteSession,
+ jitter_buffer_->InsertPacket(*packet_, &retransmitted));
+ VCMEncodedFrame* frame_out = DecodeCompleteFrame();
+ EXPECT_FALSE(frame_out == NULL);
+ jitter_buffer_->ReleaseFrame(frame_out);
+
+ ++seq_num_;
+ packet_->seqNum = seq_num_;
+ packet_->markerBit = false;
+ packet_->frameType = kVideoFrameDelta;
+ packet_->timestamp += 33 * 90;
+
+ EXPECT_EQ(kIncomplete,
+ jitter_buffer_->InsertPacket(*packet_, &retransmitted));
+
+ frame_out = DecodeCompleteFrame();
+
+ // Frame should not be complete.
+ EXPECT_TRUE(frame_out == NULL);
+
+ packet_->isFirstPacket = false;
+ // Insert 98 frames.
+ int loop = 0;
+ do {
+ ++seq_num_;
+ packet_->seqNum = seq_num_;
+
+ // Insert a packet into a frame.
+ EXPECT_EQ(kIncomplete,
+ jitter_buffer_->InsertPacket(*packet_, &retransmitted));
+ loop++;
+ } while (loop < 98);
+
+ // Insert the last packet.
+ ++seq_num_;
+ packet_->isFirstPacket = false;
+ packet_->markerBit = true;
+ packet_->seqNum = seq_num_;
+
+ EXPECT_EQ(kCompleteSession,
+ jitter_buffer_->InsertPacket(*packet_, &retransmitted));
+
+ frame_out = DecodeCompleteFrame();
+
+ CheckOutFrame(frame_out, 100 * size_, false);
+ EXPECT_EQ(kVideoFrameDelta, frame_out->FrameType());
+ jitter_buffer_->ReleaseFrame(frame_out);
+}
+
+TEST_F(TestBasicJitterBuffer, PacketReorderingReverseOrder) {
+ // Insert the "first" packet last.
+ seq_num_ += 100;
+ packet_->frameType = kVideoFrameKey;
+ packet_->isFirstPacket = false;
+ packet_->markerBit = true;
+ packet_->seqNum = seq_num_;
+ packet_->timestamp = timestamp_;
+
+ bool retransmitted = false;
+ EXPECT_EQ(kIncomplete,
+ jitter_buffer_->InsertPacket(*packet_, &retransmitted));
+
+ VCMEncodedFrame* frame_out = DecodeCompleteFrame();
+
+ EXPECT_TRUE(frame_out == NULL);
+
+ // Insert 98 packets.
+ int loop = 0;
+ do {
+ seq_num_--;
+ packet_->isFirstPacket = false;
+ packet_->markerBit = false;
+ packet_->seqNum = seq_num_;
+
+ EXPECT_EQ(kIncomplete,
+ jitter_buffer_->InsertPacket(*packet_, &retransmitted));
+ loop++;
+ } while (loop < 98);
+
+ // Insert the last packet.
+ seq_num_--;
+ packet_->isFirstPacket = true;
+ packet_->markerBit = false;
+ packet_->seqNum = seq_num_;
+
+ EXPECT_EQ(kCompleteSession,
+ jitter_buffer_->InsertPacket(*packet_, &retransmitted));
+
+ frame_out = DecodeCompleteFrame();
+
+ CheckOutFrame(frame_out, 100 * size_, false);
+
+ EXPECT_EQ(kVideoFrameKey, frame_out->FrameType());
+ jitter_buffer_->ReleaseFrame(frame_out);
+}
+
+TEST_F(TestBasicJitterBuffer, FrameReordering2Frames2PacketsEach) {
+ packet_->frameType = kVideoFrameDelta;
+ packet_->isFirstPacket = true;
+ packet_->markerBit = false;
+
+ bool retransmitted = false;
+ EXPECT_EQ(kIncomplete,
+ jitter_buffer_->InsertPacket(*packet_, &retransmitted));
+
+ VCMEncodedFrame* frame_out = DecodeCompleteFrame();
+
+ EXPECT_TRUE(frame_out == NULL);
+
+ seq_num_++;
+ packet_->isFirstPacket = false;
+ packet_->markerBit = true;
+ packet_->seqNum = seq_num_;
+
+ EXPECT_EQ(kCompleteSession,
+ jitter_buffer_->InsertPacket(*packet_, &retransmitted));
+
+ // check that we fail to get frame since seqnum is not continuous
+ frame_out = DecodeCompleteFrame();
+ EXPECT_TRUE(frame_out == NULL);
+
+ seq_num_ -= 3;
+ timestamp_ -= 33 * 90;
+ packet_->frameType = kVideoFrameKey;
+ packet_->isFirstPacket = true;
+ packet_->markerBit = false;
+ packet_->seqNum = seq_num_;
+ packet_->timestamp = timestamp_;
+
+ EXPECT_EQ(kIncomplete,
+ jitter_buffer_->InsertPacket(*packet_, &retransmitted));
+
+ frame_out = DecodeCompleteFrame();
+
+ // It should not be complete.
+ EXPECT_TRUE(frame_out == NULL);
+
+ seq_num_++;
+ packet_->isFirstPacket = false;
+ packet_->markerBit = true;
+ packet_->seqNum = seq_num_;
+
+ EXPECT_EQ(kCompleteSession,
+ jitter_buffer_->InsertPacket(*packet_, &retransmitted));
+
+ frame_out = DecodeCompleteFrame();
+ CheckOutFrame(frame_out, 2 * size_, false);
+ EXPECT_EQ(kVideoFrameKey, frame_out->FrameType());
+ jitter_buffer_->ReleaseFrame(frame_out);
+
+ frame_out = DecodeCompleteFrame();
+ CheckOutFrame(frame_out, 2 * size_, false);
+ EXPECT_EQ(kVideoFrameDelta, frame_out->FrameType());
+ jitter_buffer_->ReleaseFrame(frame_out);
+}
+
+TEST_F(TestBasicJitterBuffer, TestReorderingWithPadding) {
+ packet_->frameType = kVideoFrameKey;
+ packet_->isFirstPacket = true;
+ packet_->markerBit = true;
+
+ // Send in an initial good packet/frame (Frame A) to start things off.
+ bool retransmitted = false;
+ EXPECT_EQ(kCompleteSession,
+ jitter_buffer_->InsertPacket(*packet_, &retransmitted));
+ VCMEncodedFrame* frame_out = DecodeCompleteFrame();
+ EXPECT_TRUE(frame_out != NULL);
+ jitter_buffer_->ReleaseFrame(frame_out);
+
+ // Now send in a complete delta frame (Frame C), but with a sequence number
+ // gap. No pic index either, so no temporal scalability cheating :)
+ packet_->frameType = kVideoFrameDelta;
+ // Leave a gap of 2 sequence numbers and two frames.
+ packet_->seqNum = seq_num_ + 3;
+ packet_->timestamp = timestamp_ + (66 * 90);
+ // Still isFirst = marker = true.
+ // Session should be complete (frame is complete), but there's nothing to
+ // decode yet.
+ EXPECT_EQ(kCompleteSession,
+ jitter_buffer_->InsertPacket(*packet_, &retransmitted));
+ frame_out = DecodeCompleteFrame();
+ EXPECT_TRUE(frame_out == NULL);
+
+ // Now send in a complete delta frame (Frame B) that is continuous from A, but
+ // doesn't fill the full gap to C. The rest of the gap is going to be padding.
+ packet_->seqNum = seq_num_ + 1;
+ packet_->timestamp = timestamp_ + (33 * 90);
+ // Still isFirst = marker = true.
+ EXPECT_EQ(kCompleteSession,
+ jitter_buffer_->InsertPacket(*packet_, &retransmitted));
+ frame_out = DecodeCompleteFrame();
+ EXPECT_TRUE(frame_out != NULL);
+ jitter_buffer_->ReleaseFrame(frame_out);
+
+ // But Frame C isn't continuous yet.
+ frame_out = DecodeCompleteFrame();
+ EXPECT_TRUE(frame_out == NULL);
+
+ // Add in the padding. These are empty packets (data length is 0) with no
+ // marker bit and matching the timestamp of Frame B.
+ VCMPacket empty_packet(data_, 0, seq_num_ + 2, timestamp_ + (33 * 90), false);
+ EXPECT_EQ(kOldPacket,
+ jitter_buffer_->InsertPacket(empty_packet, &retransmitted));
+ empty_packet.seqNum += 1;
+ EXPECT_EQ(kOldPacket,
+ jitter_buffer_->InsertPacket(empty_packet, &retransmitted));
+
+ // But now Frame C should be ready!
+ frame_out = DecodeCompleteFrame();
+ EXPECT_TRUE(frame_out != NULL);
+ jitter_buffer_->ReleaseFrame(frame_out);
+}
+
+TEST_F(TestBasicJitterBuffer, DuplicatePackets) {
+ packet_->frameType = kVideoFrameKey;
+ packet_->isFirstPacket = true;
+ packet_->markerBit = false;
+ packet_->seqNum = seq_num_;
+ packet_->timestamp = timestamp_;
+ EXPECT_EQ(0, jitter_buffer_->num_packets());
+ EXPECT_EQ(0, jitter_buffer_->num_duplicated_packets());
+
+ bool retransmitted = false;
+ EXPECT_EQ(kIncomplete,
+ jitter_buffer_->InsertPacket(*packet_, &retransmitted));
+
+ VCMEncodedFrame* frame_out = DecodeCompleteFrame();
+
+ EXPECT_TRUE(frame_out == NULL);
+ EXPECT_EQ(1, jitter_buffer_->num_packets());
+ EXPECT_EQ(0, jitter_buffer_->num_duplicated_packets());
+
+ // Insert a packet into a frame.
+ EXPECT_EQ(kDuplicatePacket,
+ jitter_buffer_->InsertPacket(*packet_, &retransmitted));
+ EXPECT_EQ(2, jitter_buffer_->num_packets());
+ EXPECT_EQ(1, jitter_buffer_->num_duplicated_packets());
+
+ seq_num_++;
+ packet_->seqNum = seq_num_;
+ packet_->markerBit = true;
+ packet_->isFirstPacket = false;
+
+ EXPECT_EQ(kCompleteSession,
+ jitter_buffer_->InsertPacket(*packet_, &retransmitted));
+
+ frame_out = DecodeCompleteFrame();
+ ASSERT_TRUE(frame_out != NULL);
+ CheckOutFrame(frame_out, 2 * size_, false);
+
+ EXPECT_EQ(kVideoFrameKey, frame_out->FrameType());
+ EXPECT_EQ(3, jitter_buffer_->num_packets());
+ EXPECT_EQ(1, jitter_buffer_->num_duplicated_packets());
+ jitter_buffer_->ReleaseFrame(frame_out);
+}
+
+TEST_F(TestBasicJitterBuffer, DuplicatePreviousDeltaFramePacket) {
+ packet_->frameType = kVideoFrameKey;
+ packet_->isFirstPacket = true;
+ packet_->markerBit = true;
+ packet_->seqNum = seq_num_;
+ packet_->timestamp = timestamp_;
+ jitter_buffer_->SetDecodeErrorMode(kNoErrors);
+ EXPECT_EQ(0, jitter_buffer_->num_packets());
+ EXPECT_EQ(0, jitter_buffer_->num_duplicated_packets());
+
+ bool retransmitted = false;
+ // Insert first complete frame.
+ EXPECT_EQ(kCompleteSession,
+ jitter_buffer_->InsertPacket(*packet_, &retransmitted));
+
+ VCMEncodedFrame* frame_out = DecodeCompleteFrame();
+ ASSERT_TRUE(frame_out != NULL);
+ CheckOutFrame(frame_out, size_, false);
+ EXPECT_EQ(kVideoFrameKey, frame_out->FrameType());
+ jitter_buffer_->ReleaseFrame(frame_out);
+
+ // Insert 3 delta frames.
+ for (uint16_t i = 1; i <= 3; ++i) {
+ packet_->seqNum = seq_num_ + i;
+ packet_->timestamp = timestamp_ + (i * 33) * 90;
+ packet_->frameType = kVideoFrameDelta;
+ EXPECT_EQ(kCompleteSession,
+ jitter_buffer_->InsertPacket(*packet_, &retransmitted));
+ EXPECT_EQ(i + 1, jitter_buffer_->num_packets());
+ EXPECT_EQ(0, jitter_buffer_->num_duplicated_packets());
+ }
+
+ // Retransmit second delta frame.
+ packet_->seqNum = seq_num_ + 2;
+ packet_->timestamp = timestamp_ + 66 * 90;
+
+ EXPECT_EQ(kDuplicatePacket,
+ jitter_buffer_->InsertPacket(*packet_, &retransmitted));
+
+ EXPECT_EQ(5, jitter_buffer_->num_packets());
+ EXPECT_EQ(1, jitter_buffer_->num_duplicated_packets());
+
+ // Should be able to decode 3 delta frames, key frame already decoded.
+ for (size_t i = 0; i < 3; ++i) {
+ frame_out = DecodeCompleteFrame();
+ ASSERT_TRUE(frame_out != NULL);
+ CheckOutFrame(frame_out, size_, false);
+ EXPECT_EQ(kVideoFrameDelta, frame_out->FrameType());
+ jitter_buffer_->ReleaseFrame(frame_out);
+ }
+}
+
+TEST_F(TestBasicJitterBuffer, TestSkipForwardVp9) {
+ // Verify that JB skips forward to next base layer frame.
+ // -------------------------------------------------
+ // | 65485 | 65486 | 65487 | 65488 | 65489 | ...
+ // | pid:5 | pid:6 | pid:7 | pid:8 | pid:9 | ...
+ // | tid:0 | tid:2 | tid:1 | tid:2 | tid:0 | ...
+ // | ss | x | x | x | |
+ // -------------------------------------------------
+ // |<----------tl0idx:200--------->|<---tl0idx:201---
+
+ bool re = false;
+ packet_->codec = kVideoCodecVP9;
+ packet_->codecSpecificHeader.codec = kRtpVideoVp9;
+ packet_->isFirstPacket = true;
+ packet_->markerBit = true;
+ packet_->codecSpecificHeader.codecHeader.VP9.flexible_mode = false;
+ packet_->codecSpecificHeader.codecHeader.VP9.spatial_idx = 0;
+ packet_->codecSpecificHeader.codecHeader.VP9.beginning_of_frame = true;
+ packet_->codecSpecificHeader.codecHeader.VP9.end_of_frame = true;
+ packet_->codecSpecificHeader.codecHeader.VP9.temporal_up_switch = false;
+
+ packet_->seqNum = 65485;
+ packet_->timestamp = 1000;
+ packet_->frameType = kVideoFrameKey;
+ packet_->codecSpecificHeader.codecHeader.VP9.picture_id = 5;
+ packet_->codecSpecificHeader.codecHeader.VP9.tl0_pic_idx = 200;
+ packet_->codecSpecificHeader.codecHeader.VP9.temporal_idx = 0;
+ packet_->codecSpecificHeader.codecHeader.VP9.ss_data_available = true;
+ packet_->codecSpecificHeader.codecHeader.VP9.gof.SetGofInfoVP9(
+ kTemporalStructureMode3); // kTemporalStructureMode3: 0-2-1-2..
+ EXPECT_EQ(kCompleteSession, jitter_buffer_->InsertPacket(*packet_, &re));
+
+ // Insert next temporal layer 0.
+ packet_->seqNum = 65489;
+ packet_->timestamp = 13000;
+ packet_->frameType = kVideoFrameDelta;
+ packet_->codecSpecificHeader.codecHeader.VP9.picture_id = 9;
+ packet_->codecSpecificHeader.codecHeader.VP9.tl0_pic_idx = 201;
+ packet_->codecSpecificHeader.codecHeader.VP9.temporal_idx = 0;
+ packet_->codecSpecificHeader.codecHeader.VP9.ss_data_available = false;
+ EXPECT_EQ(kCompleteSession, jitter_buffer_->InsertPacket(*packet_, &re));
+
+ VCMEncodedFrame* frame_out = DecodeCompleteFrame();
+ EXPECT_EQ(1000U, frame_out->TimeStamp());
+ EXPECT_EQ(kVideoFrameKey, frame_out->FrameType());
+ jitter_buffer_->ReleaseFrame(frame_out);
+
+ frame_out = DecodeCompleteFrame();
+ EXPECT_EQ(13000U, frame_out->TimeStamp());
+ EXPECT_EQ(kVideoFrameDelta, frame_out->FrameType());
+ jitter_buffer_->ReleaseFrame(frame_out);
+}
+
+TEST_F(TestBasicJitterBuffer, ReorderedVp9SsData_3TlLayers) {
+ // Verify that frames are updated with SS data when SS packet is reordered.
+ // --------------------------------
+ // | 65486 | 65487 | 65485 |...
+ // | pid:6 | pid:7 | pid:5 |...
+ // | tid:2 | tid:1 | tid:0 |...
+ // | | | ss |
+ // --------------------------------
+ // |<--------tl0idx:200--------->|
+
+ bool re = false;
+ packet_->codec = kVideoCodecVP9;
+ packet_->codecSpecificHeader.codec = kRtpVideoVp9;
+ packet_->isFirstPacket = true;
+ packet_->markerBit = true;
+ packet_->codecSpecificHeader.codecHeader.VP9.flexible_mode = false;
+ packet_->codecSpecificHeader.codecHeader.VP9.spatial_idx = 0;
+ packet_->codecSpecificHeader.codecHeader.VP9.beginning_of_frame = true;
+ packet_->codecSpecificHeader.codecHeader.VP9.end_of_frame = true;
+ packet_->codecSpecificHeader.codecHeader.VP9.tl0_pic_idx = 200;
+
+ packet_->seqNum = 65486;
+ packet_->timestamp = 6000;
+ packet_->frameType = kVideoFrameDelta;
+ packet_->codecSpecificHeader.codecHeader.VP9.picture_id = 6;
+ packet_->codecSpecificHeader.codecHeader.VP9.temporal_idx = 2;
+ packet_->codecSpecificHeader.codecHeader.VP9.temporal_up_switch = true;
+ EXPECT_EQ(kCompleteSession, jitter_buffer_->InsertPacket(*packet_, &re));
+
+ packet_->seqNum = 65487;
+ packet_->timestamp = 9000;
+ packet_->frameType = kVideoFrameDelta;
+ packet_->codecSpecificHeader.codecHeader.VP9.picture_id = 7;
+ packet_->codecSpecificHeader.codecHeader.VP9.temporal_idx = 1;
+ packet_->codecSpecificHeader.codecHeader.VP9.temporal_up_switch = true;
+ EXPECT_EQ(kCompleteSession, jitter_buffer_->InsertPacket(*packet_, &re));
+
+ // Insert first frame with SS data.
+ packet_->seqNum = 65485;
+ packet_->timestamp = 3000;
+ packet_->frameType = kVideoFrameKey;
+ packet_->width = 352;
+ packet_->height = 288;
+ packet_->codecSpecificHeader.codecHeader.VP9.picture_id = 5;
+ packet_->codecSpecificHeader.codecHeader.VP9.temporal_idx = 0;
+ packet_->codecSpecificHeader.codecHeader.VP9.temporal_up_switch = false;
+ packet_->codecSpecificHeader.codecHeader.VP9.ss_data_available = true;
+ packet_->codecSpecificHeader.codecHeader.VP9.gof.SetGofInfoVP9(
+ kTemporalStructureMode3); // kTemporalStructureMode3: 0-2-1-2..
+ EXPECT_EQ(kCompleteSession, jitter_buffer_->InsertPacket(*packet_, &re));
+
+ VCMEncodedFrame* frame_out = DecodeCompleteFrame();
+ EXPECT_EQ(3000U, frame_out->TimeStamp());
+ EXPECT_EQ(kVideoFrameKey, frame_out->FrameType());
+ EXPECT_EQ(0, frame_out->CodecSpecific()->codecSpecific.VP9.temporal_idx);
+ EXPECT_FALSE(
+ frame_out->CodecSpecific()->codecSpecific.VP9.temporal_up_switch);
+ jitter_buffer_->ReleaseFrame(frame_out);
+
+ frame_out = DecodeCompleteFrame();
+ EXPECT_EQ(6000U, frame_out->TimeStamp());
+ EXPECT_EQ(kVideoFrameDelta, frame_out->FrameType());
+ EXPECT_EQ(2, frame_out->CodecSpecific()->codecSpecific.VP9.temporal_idx);
+ EXPECT_TRUE(frame_out->CodecSpecific()->codecSpecific.VP9.temporal_up_switch);
+ jitter_buffer_->ReleaseFrame(frame_out);
+
+ frame_out = DecodeCompleteFrame();
+ EXPECT_EQ(9000U, frame_out->TimeStamp());
+ EXPECT_EQ(kVideoFrameDelta, frame_out->FrameType());
+ EXPECT_EQ(1, frame_out->CodecSpecific()->codecSpecific.VP9.temporal_idx);
+ EXPECT_TRUE(frame_out->CodecSpecific()->codecSpecific.VP9.temporal_up_switch);
+ jitter_buffer_->ReleaseFrame(frame_out);
+}
+
+TEST_F(TestBasicJitterBuffer, ReorderedVp9SsData_2Tl2SLayers) {
+ // Verify that frames are updated with SS data when SS packet is reordered.
+ // -----------------------------------------
+ // | 65486 | 65487 | 65485 | 65484 |...
+ // | pid:6 | pid:6 | pid:5 | pid:5 |...
+ // | tid:1 | tid:1 | tid:0 | tid:0 |...
+ // | sid:0 | sid:1 | sid:1 | sid:0 |...
+ // | t:6000 | t:6000 | t:3000 | t:3000 |
+ // | | | | ss |
+ // -----------------------------------------
+ // |<-----------tl0idx:200------------>|
+
+ bool re = false;
+ packet_->codec = kVideoCodecVP9;
+ packet_->codecSpecificHeader.codec = kRtpVideoVp9;
+ packet_->codecSpecificHeader.codecHeader.VP9.flexible_mode = false;
+ packet_->codecSpecificHeader.codecHeader.VP9.beginning_of_frame = true;
+ packet_->codecSpecificHeader.codecHeader.VP9.end_of_frame = true;
+ packet_->codecSpecificHeader.codecHeader.VP9.tl0_pic_idx = 200;
+
+ packet_->isFirstPacket = true;
+ packet_->markerBit = false;
+ packet_->seqNum = 65486;
+ packet_->timestamp = 6000;
+ packet_->frameType = kVideoFrameDelta;
+ packet_->codecSpecificHeader.codecHeader.VP9.spatial_idx = 0;
+ packet_->codecSpecificHeader.codecHeader.VP9.picture_id = 6;
+ packet_->codecSpecificHeader.codecHeader.VP9.temporal_idx = 1;
+ packet_->codecSpecificHeader.codecHeader.VP9.temporal_up_switch = true;
+ EXPECT_EQ(kIncomplete, jitter_buffer_->InsertPacket(*packet_, &re));
+
+ packet_->isFirstPacket = false;
+ packet_->markerBit = true;
+ packet_->seqNum = 65487;
+ packet_->frameType = kVideoFrameDelta;
+ packet_->codecSpecificHeader.codecHeader.VP9.spatial_idx = 1;
+ packet_->codecSpecificHeader.codecHeader.VP9.picture_id = 6;
+ packet_->codecSpecificHeader.codecHeader.VP9.temporal_idx = 1;
+ packet_->codecSpecificHeader.codecHeader.VP9.temporal_up_switch = true;
+ EXPECT_EQ(kCompleteSession, jitter_buffer_->InsertPacket(*packet_, &re));
+
+ packet_->isFirstPacket = false;
+ packet_->markerBit = true;
+ packet_->seqNum = 65485;
+ packet_->timestamp = 3000;
+ packet_->frameType = kVideoFrameKey;
+ packet_->codecSpecificHeader.codecHeader.VP9.spatial_idx = 1;
+ packet_->codecSpecificHeader.codecHeader.VP9.picture_id = 5;
+ packet_->codecSpecificHeader.codecHeader.VP9.temporal_idx = 0;
+ packet_->codecSpecificHeader.codecHeader.VP9.temporal_up_switch = false;
+ EXPECT_EQ(kIncomplete, jitter_buffer_->InsertPacket(*packet_, &re));
+
+ // Insert first frame with SS data.
+ packet_->isFirstPacket = true;
+ packet_->markerBit = false;
+ packet_->seqNum = 65484;
+ packet_->frameType = kVideoFrameKey;
+ packet_->width = 352;
+ packet_->height = 288;
+ packet_->codecSpecificHeader.codecHeader.VP9.spatial_idx = 0;
+ packet_->codecSpecificHeader.codecHeader.VP9.picture_id = 5;
+ packet_->codecSpecificHeader.codecHeader.VP9.temporal_idx = 0;
+ packet_->codecSpecificHeader.codecHeader.VP9.temporal_up_switch = false;
+ packet_->codecSpecificHeader.codecHeader.VP9.ss_data_available = true;
+ packet_->codecSpecificHeader.codecHeader.VP9.gof.SetGofInfoVP9(
+ kTemporalStructureMode2); // kTemporalStructureMode3: 0-1-0-1..
+ EXPECT_EQ(kCompleteSession, jitter_buffer_->InsertPacket(*packet_, &re));
+
+ VCMEncodedFrame* frame_out = DecodeCompleteFrame();
+ EXPECT_EQ(3000U, frame_out->TimeStamp());
+ EXPECT_EQ(kVideoFrameKey, frame_out->FrameType());
+ EXPECT_EQ(0, frame_out->CodecSpecific()->codecSpecific.VP9.temporal_idx);
+ EXPECT_FALSE(
+ frame_out->CodecSpecific()->codecSpecific.VP9.temporal_up_switch);
+ jitter_buffer_->ReleaseFrame(frame_out);
+
+ frame_out = DecodeCompleteFrame();
+ EXPECT_EQ(6000U, frame_out->TimeStamp());
+ EXPECT_EQ(kVideoFrameDelta, frame_out->FrameType());
+ EXPECT_EQ(1, frame_out->CodecSpecific()->codecSpecific.VP9.temporal_idx);
+ EXPECT_TRUE(frame_out->CodecSpecific()->codecSpecific.VP9.temporal_up_switch);
+ jitter_buffer_->ReleaseFrame(frame_out);
+}
+
+TEST_F(TestBasicJitterBuffer, H264InsertStartCode) {
+ packet_->frameType = kVideoFrameKey;
+ packet_->isFirstPacket = true;
+ packet_->markerBit = false;
+ packet_->seqNum = seq_num_;
+ packet_->timestamp = timestamp_;
+ packet_->insertStartCode = true;
+
+ bool retransmitted = false;
+ EXPECT_EQ(kIncomplete,
+ jitter_buffer_->InsertPacket(*packet_, &retransmitted));
+
+ VCMEncodedFrame* frame_out = DecodeCompleteFrame();
+
+ // Frame should not be complete.
+ EXPECT_TRUE(frame_out == NULL);
+
+ seq_num_++;
+ packet_->isFirstPacket = false;
+ packet_->markerBit = true;
+ packet_->seqNum = seq_num_;
+
+ EXPECT_EQ(kCompleteSession,
+ jitter_buffer_->InsertPacket(*packet_, &retransmitted));
+
+ frame_out = DecodeCompleteFrame();
+ CheckOutFrame(frame_out, size_ * 2 + 4 * 2, true);
+ EXPECT_EQ(kVideoFrameKey, frame_out->FrameType());
+ jitter_buffer_->ReleaseFrame(frame_out);
+}
+
+// Test threshold conditions of decodable state.
+TEST_F(TestBasicJitterBuffer, PacketLossWithSelectiveErrorsThresholdCheck) {
+ jitter_buffer_->SetDecodeErrorMode(kSelectiveErrors);
+ // Always start with a key frame. Use 10 packets to test Decodable State
+ // boundaries.
+ packet_->frameType = kVideoFrameKey;
+ packet_->isFirstPacket = true;
+ packet_->markerBit = false;
+ packet_->seqNum = seq_num_;
+ packet_->timestamp = timestamp_;
+
+ bool retransmitted = false;
+ EXPECT_EQ(kIncomplete,
+ jitter_buffer_->InsertPacket(*packet_, &retransmitted));
+ uint32_t timestamp = 0;
+ EXPECT_FALSE(jitter_buffer_->NextCompleteTimestamp(0, &timestamp));
+ EXPECT_FALSE(jitter_buffer_->NextMaybeIncompleteTimestamp(&timestamp));
+
+ packet_->isFirstPacket = false;
+ for (int i = 1; i < 9; ++i) {
+ packet_->seqNum++;
+ EXPECT_EQ(kIncomplete,
+ jitter_buffer_->InsertPacket(*packet_, &retransmitted));
+ EXPECT_FALSE(jitter_buffer_->NextCompleteTimestamp(0, &timestamp));
+ EXPECT_FALSE(jitter_buffer_->NextMaybeIncompleteTimestamp(&timestamp));
+ }
+
+ // last packet
+ packet_->markerBit = true;
+ packet_->seqNum++;
+
+ EXPECT_EQ(kCompleteSession,
+ jitter_buffer_->InsertPacket(*packet_, &retransmitted));
+ VCMEncodedFrame* frame_out = DecodeCompleteFrame();
+ CheckOutFrame(frame_out, 10 * size_, false);
+ EXPECT_EQ(kVideoFrameKey, frame_out->FrameType());
+ jitter_buffer_->ReleaseFrame(frame_out);
+
+ // An incomplete frame can only be decoded once a subsequent frame has begun
+ // to arrive. Insert packet in distant frame for this purpose.
+ packet_->frameType = kVideoFrameDelta;
+ packet_->isFirstPacket = true;
+ packet_->markerBit = false;
+ packet_->seqNum += 100;
+ packet_->timestamp += 33 * 90 * 8;
+
+ EXPECT_EQ(kDecodableSession,
+ jitter_buffer_->InsertPacket(*packet_, &retransmitted));
+ EXPECT_FALSE(jitter_buffer_->NextCompleteTimestamp(0, &timestamp));
+ EXPECT_FALSE(jitter_buffer_->NextMaybeIncompleteTimestamp(&timestamp));
+
+ // Insert second frame
+ packet_->seqNum -= 99;
+ packet_->timestamp -= 33 * 90 * 7;
+
+ EXPECT_EQ(kDecodableSession,
+ jitter_buffer_->InsertPacket(*packet_, &retransmitted));
+ EXPECT_FALSE(jitter_buffer_->NextCompleteTimestamp(0, &timestamp));
+ EXPECT_TRUE(jitter_buffer_->NextMaybeIncompleteTimestamp(&timestamp));
+
+ packet_->isFirstPacket = false;
+ for (int i = 1; i < 8; ++i) {
+ packet_->seqNum++;
+ EXPECT_EQ(kDecodableSession,
+ jitter_buffer_->InsertPacket(*packet_, &retransmitted));
+ EXPECT_FALSE(jitter_buffer_->NextCompleteTimestamp(0, &timestamp));
+ EXPECT_TRUE(jitter_buffer_->NextMaybeIncompleteTimestamp(&timestamp));
+ }
+
+ packet_->seqNum++;
+ EXPECT_EQ(kDecodableSession,
+ jitter_buffer_->InsertPacket(*packet_, &retransmitted));
+ EXPECT_FALSE(jitter_buffer_->NextCompleteTimestamp(0, &timestamp));
+ EXPECT_TRUE(jitter_buffer_->NextMaybeIncompleteTimestamp(&timestamp));
+
+ frame_out = DecodeIncompleteFrame();
+ ASSERT_FALSE(NULL == frame_out);
+ CheckOutFrame(frame_out, 9 * size_, false);
+ EXPECT_EQ(kVideoFrameDelta, frame_out->FrameType());
+ jitter_buffer_->ReleaseFrame(frame_out);
+
+ packet_->markerBit = true;
+ packet_->seqNum++;
+ EXPECT_EQ(kOldPacket, jitter_buffer_->InsertPacket(*packet_, &retransmitted));
+}
+
+// Make sure first packet is present before a frame can be decoded.
+TEST_F(TestBasicJitterBuffer, PacketLossWithSelectiveErrorsIncompleteKey) {
+ jitter_buffer_->SetDecodeErrorMode(kSelectiveErrors);
+ // Always start with a key frame.
+ packet_->frameType = kVideoFrameKey;
+ packet_->isFirstPacket = true;
+ packet_->markerBit = true;
+ packet_->seqNum = seq_num_;
+ packet_->timestamp = timestamp_;
+
+ bool retransmitted = false;
+ EXPECT_EQ(kCompleteSession,
+ jitter_buffer_->InsertPacket(*packet_, &retransmitted));
+ VCMEncodedFrame* frame_out = DecodeCompleteFrame();
+ CheckOutFrame(frame_out, size_, false);
+ EXPECT_EQ(kVideoFrameKey, frame_out->FrameType());
+ jitter_buffer_->ReleaseFrame(frame_out);
+
+ // An incomplete frame can only be decoded once a subsequent frame has begun
+ // to arrive. Insert packet in distant frame for this purpose.
+ packet_->frameType = kVideoFrameDelta;
+ packet_->isFirstPacket = false;
+ packet_->markerBit = false;
+ packet_->seqNum += 100;
+ packet_->timestamp += 33 * 90 * 8;
+ EXPECT_EQ(kIncomplete,
+ jitter_buffer_->InsertPacket(*packet_, &retransmitted));
+ uint32_t timestamp;
+ EXPECT_FALSE(jitter_buffer_->NextCompleteTimestamp(0, &timestamp));
+ EXPECT_FALSE(jitter_buffer_->NextMaybeIncompleteTimestamp(&timestamp));
+
+ // Insert second frame - an incomplete key frame.
+ packet_->frameType = kVideoFrameKey;
+ packet_->isFirstPacket = true;
+ packet_->seqNum -= 99;
+ packet_->timestamp -= 33 * 90 * 7;
+
+ EXPECT_EQ(kIncomplete,
+ jitter_buffer_->InsertPacket(*packet_, &retransmitted));
+ EXPECT_FALSE(jitter_buffer_->NextCompleteTimestamp(0, &timestamp));
+ EXPECT_FALSE(jitter_buffer_->NextMaybeIncompleteTimestamp(&timestamp));
+
+ // Insert a few more packets. Make sure we're waiting for the key frame to be
+ // complete.
+ packet_->isFirstPacket = false;
+ for (int i = 1; i < 5; ++i) {
+ packet_->seqNum++;
+ EXPECT_EQ(kIncomplete,
+ jitter_buffer_->InsertPacket(*packet_, &retransmitted));
+ EXPECT_FALSE(jitter_buffer_->NextCompleteTimestamp(0, &timestamp));
+ EXPECT_FALSE(jitter_buffer_->NextMaybeIncompleteTimestamp(&timestamp));
+ }
+
+ // Complete key frame.
+ packet_->markerBit = true;
+ packet_->seqNum++;
+ EXPECT_EQ(kCompleteSession,
+ jitter_buffer_->InsertPacket(*packet_, &retransmitted));
+ frame_out = DecodeCompleteFrame();
+ CheckOutFrame(frame_out, 6 * size_, false);
+ EXPECT_EQ(kVideoFrameKey, frame_out->FrameType());
+ jitter_buffer_->ReleaseFrame(frame_out);
+}
+
+// Make sure first packet is present before a frame can be decoded.
+TEST_F(TestBasicJitterBuffer, PacketLossWithSelectiveErrorsMissingFirstPacket) {
+ jitter_buffer_->SetDecodeErrorMode(kSelectiveErrors);
+ // Always start with a key frame.
+ packet_->frameType = kVideoFrameKey;
+ packet_->isFirstPacket = true;
+ packet_->markerBit = true;
+ packet_->seqNum = seq_num_;
+ packet_->timestamp = timestamp_;
+
+ bool retransmitted = false;
+ EXPECT_EQ(kCompleteSession,
+ jitter_buffer_->InsertPacket(*packet_, &retransmitted));
+ VCMEncodedFrame* frame_out = DecodeCompleteFrame();
+ CheckOutFrame(frame_out, size_, false);
+ EXPECT_EQ(kVideoFrameKey, frame_out->FrameType());
+ jitter_buffer_->ReleaseFrame(frame_out);
+
+ // An incomplete frame can only be decoded once a subsequent frame has begun
+ // to arrive. Insert packet in distant frame for this purpose.
+ packet_->frameType = kVideoFrameDelta;
+ packet_->isFirstPacket = false;
+ packet_->markerBit = false;
+ packet_->seqNum += 100;
+ packet_->timestamp += 33 * 90 * 8;
+ EXPECT_EQ(kIncomplete,
+ jitter_buffer_->InsertPacket(*packet_, &retransmitted));
+ uint32_t timestamp;
+ EXPECT_FALSE(jitter_buffer_->NextCompleteTimestamp(0, &timestamp));
+ EXPECT_FALSE(jitter_buffer_->NextMaybeIncompleteTimestamp(&timestamp));
+
+ // Insert second frame with the first packet missing. Make sure we're waiting
+ // for the key frame to be complete.
+ packet_->seqNum -= 98;
+ packet_->timestamp -= 33 * 90 * 7;
+
+ EXPECT_EQ(kIncomplete,
+ jitter_buffer_->InsertPacket(*packet_, &retransmitted));
+ EXPECT_FALSE(jitter_buffer_->NextCompleteTimestamp(0, &timestamp));
+ EXPECT_FALSE(jitter_buffer_->NextMaybeIncompleteTimestamp(&timestamp));
+
+ for (int i = 0; i < 5; ++i) {
+ packet_->seqNum++;
+ EXPECT_EQ(kIncomplete,
+ jitter_buffer_->InsertPacket(*packet_, &retransmitted));
+ EXPECT_FALSE(jitter_buffer_->NextCompleteTimestamp(0, &timestamp));
+ EXPECT_FALSE(jitter_buffer_->NextMaybeIncompleteTimestamp(&timestamp));
+ }
+
+ // Add first packet. Frame should now be decodable, but incomplete.
+ packet_->isFirstPacket = true;
+ packet_->seqNum -= 6;
+ EXPECT_EQ(kDecodableSession,
+ jitter_buffer_->InsertPacket(*packet_, &retransmitted));
+ EXPECT_FALSE(jitter_buffer_->NextCompleteTimestamp(0, &timestamp));
+ EXPECT_TRUE(jitter_buffer_->NextMaybeIncompleteTimestamp(&timestamp));
+
+ frame_out = DecodeIncompleteFrame();
+ CheckOutFrame(frame_out, 7 * size_, false);
+ EXPECT_EQ(kVideoFrameDelta, frame_out->FrameType());
+ jitter_buffer_->ReleaseFrame(frame_out);
+}
+
+TEST_F(TestBasicJitterBuffer, DiscontinuousStreamWhenDecodingWithErrors) {
+ // Will use one packet per frame.
+ jitter_buffer_->SetDecodeErrorMode(kWithErrors);
+ packet_->frameType = kVideoFrameKey;
+ packet_->isFirstPacket = true;
+ packet_->markerBit = true;
+ packet_->seqNum = seq_num_;
+ packet_->timestamp = timestamp_;
+ bool retransmitted = false;
+ EXPECT_EQ(kCompleteSession,
+ jitter_buffer_->InsertPacket(*packet_, &retransmitted));
+ uint32_t next_timestamp;
+ EXPECT_TRUE(jitter_buffer_->NextCompleteTimestamp(0, &next_timestamp));
+ EXPECT_EQ(packet_->timestamp, next_timestamp);
+ VCMEncodedFrame* frame = jitter_buffer_->ExtractAndSetDecode(next_timestamp);
+ EXPECT_TRUE(frame != NULL);
+ jitter_buffer_->ReleaseFrame(frame);
+
+ // Drop a complete frame.
+ timestamp_ += 2 * 33 * 90;
+ seq_num_ += 2;
+ packet_->frameType = kVideoFrameDelta;
+ packet_->isFirstPacket = true;
+ packet_->markerBit = false;
+ packet_->seqNum = seq_num_;
+ packet_->timestamp = timestamp_;
+ EXPECT_EQ(kDecodableSession,
+ jitter_buffer_->InsertPacket(*packet_, &retransmitted));
+ // Insert a packet (so the previous one will be released).
+ timestamp_ += 33 * 90;
+ seq_num_ += 2;
+ packet_->frameType = kVideoFrameDelta;
+ packet_->isFirstPacket = true;
+ packet_->markerBit = false;
+ packet_->seqNum = seq_num_;
+ packet_->timestamp = timestamp_;
+ EXPECT_EQ(kDecodableSession,
+ jitter_buffer_->InsertPacket(*packet_, &retransmitted));
+ EXPECT_FALSE(jitter_buffer_->NextCompleteTimestamp(0, &next_timestamp));
+ EXPECT_TRUE(jitter_buffer_->NextMaybeIncompleteTimestamp(&next_timestamp));
+ EXPECT_EQ(packet_->timestamp - 33 * 90, next_timestamp);
+}
+
+TEST_F(TestBasicJitterBuffer, PacketLoss) {
+ // Verify missing packets statistics and not decodable packets statistics.
+ // Insert 10 frames consisting of 4 packets and remove one from all of them.
+ // The last packet is an empty (non-media) packet.
+
+ // Select a start seqNum which triggers a difficult wrap situation
+ // The JB will only output (incomplete)frames if the next one has started
+ // to arrive. Start by inserting one frame (key).
+ jitter_buffer_->SetDecodeErrorMode(kWithErrors);
+ seq_num_ = 0xffff - 4;
+ seq_num_++;
+ packet_->frameType = kVideoFrameKey;
+ packet_->isFirstPacket = true;
+ packet_->markerBit = false;
+ packet_->seqNum = seq_num_;
+ packet_->timestamp = timestamp_;
+ packet_->completeNALU = kNaluStart;
+
+ bool retransmitted = false;
+ EXPECT_EQ(kDecodableSession,
+ jitter_buffer_->InsertPacket(*packet_, &retransmitted));
+ for (int i = 0; i < 11; ++i) {
+ webrtc::FrameType frametype = kVideoFrameDelta;
+ seq_num_++;
+ timestamp_ += 33 * 90;
+ packet_->frameType = frametype;
+ packet_->isFirstPacket = true;
+ packet_->markerBit = false;
+ packet_->seqNum = seq_num_;
+ packet_->timestamp = timestamp_;
+ packet_->completeNALU = kNaluStart;
+
+ EXPECT_EQ(kDecodableSession,
+ jitter_buffer_->InsertPacket(*packet_, &retransmitted));
+
+ VCMEncodedFrame* frame_out = DecodeCompleteFrame();
+
+ // Should not be complete.
+ EXPECT_TRUE(frame_out == NULL);
+
+ seq_num_ += 2;
+ packet_->isFirstPacket = false;
+ packet_->markerBit = true;
+ packet_->seqNum = seq_num_;
+ packet_->completeNALU = kNaluEnd;
+
+ EXPECT_EQ(jitter_buffer_->InsertPacket(*packet_, &retransmitted),
+ kDecodableSession);
+
+ // Insert an empty (non-media) packet.
+ seq_num_++;
+ packet_->isFirstPacket = false;
+ packet_->markerBit = false;
+ packet_->seqNum = seq_num_;
+ packet_->completeNALU = kNaluEnd;
+ packet_->frameType = kEmptyFrame;
+
+ EXPECT_EQ(jitter_buffer_->InsertPacket(*packet_, &retransmitted),
+ kDecodableSession);
+ frame_out = DecodeIncompleteFrame();
+
+ // One of the packets has been discarded by the jitter buffer.
+ // Last frame can't be extracted yet.
+ if (i < 10) {
+ CheckOutFrame(frame_out, size_, false);
+
+ if (i == 0) {
+ EXPECT_EQ(kVideoFrameKey, frame_out->FrameType());
+ } else {
+ EXPECT_EQ(frametype, frame_out->FrameType());
+ }
+ EXPECT_FALSE(frame_out->Complete());
+ EXPECT_FALSE(frame_out->MissingFrame());
+ }
+
+ jitter_buffer_->ReleaseFrame(frame_out);
+ }
+
+ // Insert 3 old packets and verify that we have 3 discarded packets
+ // Match value to actual latest timestamp decoded.
+ timestamp_ -= 33 * 90;
+ packet_->timestamp = timestamp_ - 1000;
+
+ EXPECT_EQ(kOldPacket, jitter_buffer_->InsertPacket(*packet_, &retransmitted));
+
+ packet_->timestamp = timestamp_ - 500;
+
+ EXPECT_EQ(kOldPacket, jitter_buffer_->InsertPacket(*packet_, &retransmitted));
+
+ packet_->timestamp = timestamp_ - 100;
+
+ EXPECT_EQ(kOldPacket, jitter_buffer_->InsertPacket(*packet_, &retransmitted));
+
+ EXPECT_EQ(3, jitter_buffer_->num_discarded_packets());
+
+ jitter_buffer_->Flush();
+
+ // This statistic shouldn't be reset by a flush.
+ EXPECT_EQ(3, jitter_buffer_->num_discarded_packets());
+}
+
+TEST_F(TestBasicJitterBuffer, DeltaFrame100PacketsWithSeqNumWrap) {
+ seq_num_ = 0xfff0;
+ packet_->frameType = kVideoFrameKey;
+ packet_->isFirstPacket = true;
+ packet_->markerBit = false;
+ packet_->seqNum = seq_num_;
+ packet_->timestamp = timestamp_;
+
+ bool retransmitted = false;
+ EXPECT_EQ(kIncomplete,
+ jitter_buffer_->InsertPacket(*packet_, &retransmitted));
+
+ VCMEncodedFrame* frame_out = DecodeCompleteFrame();
+
+ EXPECT_TRUE(frame_out == NULL);
+
+ int loop = 0;
+ do {
+ seq_num_++;
+ packet_->isFirstPacket = false;
+ packet_->markerBit = false;
+ packet_->seqNum = seq_num_;
+
+ EXPECT_EQ(kIncomplete,
+ jitter_buffer_->InsertPacket(*packet_, &retransmitted));
+
+ frame_out = DecodeCompleteFrame();
+
+ EXPECT_TRUE(frame_out == NULL);
+
+ loop++;
+ } while (loop < 98);
+
+ seq_num_++;
+ packet_->isFirstPacket = false;
+ packet_->markerBit = true;
+ packet_->seqNum = seq_num_;
+
+ EXPECT_EQ(kCompleteSession,
+ jitter_buffer_->InsertPacket(*packet_, &retransmitted));
+
+ frame_out = DecodeCompleteFrame();
+
+ CheckOutFrame(frame_out, 100 * size_, false);
+
+ EXPECT_EQ(kVideoFrameKey, frame_out->FrameType());
+ jitter_buffer_->ReleaseFrame(frame_out);
+}
+
+TEST_F(TestBasicJitterBuffer, PacketReorderingReverseWithNegSeqNumWrap) {
+ // Insert "first" packet last seqnum.
+ seq_num_ = 10;
+ packet_->frameType = kVideoFrameKey;
+ packet_->isFirstPacket = false;
+ packet_->markerBit = true;
+ packet_->seqNum = seq_num_;
+
+ bool retransmitted = false;
+ EXPECT_EQ(kIncomplete,
+ jitter_buffer_->InsertPacket(*packet_, &retransmitted));
+ VCMEncodedFrame* frame_out = DecodeCompleteFrame();
+
+ // Should not be complete.
+ EXPECT_TRUE(frame_out == NULL);
+
+ // Insert 98 frames.
+ int loop = 0;
+ do {
+ seq_num_--;
+ packet_->isFirstPacket = false;
+ packet_->markerBit = false;
+ packet_->seqNum = seq_num_;
+
+ EXPECT_EQ(kIncomplete,
+ jitter_buffer_->InsertPacket(*packet_, &retransmitted));
+
+ frame_out = DecodeCompleteFrame();
+
+ EXPECT_TRUE(frame_out == NULL);
+
+ loop++;
+ } while (loop < 98);
+
+ // Insert last packet.
+ seq_num_--;
+ packet_->isFirstPacket = true;
+ packet_->markerBit = false;
+ packet_->seqNum = seq_num_;
+
+ EXPECT_EQ(kCompleteSession,
+ jitter_buffer_->InsertPacket(*packet_, &retransmitted));
+
+ frame_out = DecodeCompleteFrame();
+ CheckOutFrame(frame_out, 100 * size_, false);
+ EXPECT_EQ(kVideoFrameKey, frame_out->FrameType());
+ jitter_buffer_->ReleaseFrame(frame_out);
+}
+
+TEST_F(TestBasicJitterBuffer, TestInsertOldFrame) {
+ // ------- -------
+ // | 2 | | 1 |
+ // ------- -------
+ // t = 3000 t = 2000
+ seq_num_ = 2;
+ timestamp_ = 3000;
+ packet_->frameType = kVideoFrameKey;
+ packet_->isFirstPacket = true;
+ packet_->markerBit = true;
+ packet_->timestamp = timestamp_;
+ packet_->seqNum = seq_num_;
+
+ bool retransmitted = false;
+ EXPECT_EQ(kCompleteSession,
+ jitter_buffer_->InsertPacket(*packet_, &retransmitted));
+
+ VCMEncodedFrame* frame_out = DecodeCompleteFrame();
+ EXPECT_EQ(3000u, frame_out->TimeStamp());
+ CheckOutFrame(frame_out, size_, false);
+ EXPECT_EQ(kVideoFrameKey, frame_out->FrameType());
+ jitter_buffer_->ReleaseFrame(frame_out);
+
+ seq_num_--;
+ timestamp_ = 2000;
+ packet_->frameType = kVideoFrameDelta;
+ packet_->isFirstPacket = true;
+ packet_->markerBit = true;
+ packet_->seqNum = seq_num_;
+ packet_->timestamp = timestamp_;
+
+ EXPECT_EQ(kOldPacket, jitter_buffer_->InsertPacket(*packet_, &retransmitted));
+}
+
+TEST_F(TestBasicJitterBuffer, TestInsertOldFrameWithSeqNumWrap) {
+ // ------- -------
+ // | 2 | | 1 |
+ // ------- -------
+ // t = 3000 t = 0xffffff00
+
+ seq_num_ = 2;
+ timestamp_ = 3000;
+ packet_->frameType = kVideoFrameKey;
+ packet_->isFirstPacket = true;
+ packet_->markerBit = true;
+ packet_->seqNum = seq_num_;
+ packet_->timestamp = timestamp_;
+
+ bool retransmitted = false;
+ EXPECT_EQ(kCompleteSession,
+ jitter_buffer_->InsertPacket(*packet_, &retransmitted));
+
+ VCMEncodedFrame* frame_out = DecodeCompleteFrame();
+ EXPECT_EQ(timestamp_, frame_out->TimeStamp());
+
+ CheckOutFrame(frame_out, size_, false);
+
+ EXPECT_EQ(kVideoFrameKey, frame_out->FrameType());
+
+ jitter_buffer_->ReleaseFrame(frame_out);
+
+ seq_num_--;
+ timestamp_ = 0xffffff00;
+ packet_->frameType = kVideoFrameDelta;
+ packet_->isFirstPacket = true;
+ packet_->markerBit = true;
+ packet_->seqNum = seq_num_;
+ packet_->timestamp = timestamp_;
+
+ // This timestamp is old.
+ EXPECT_EQ(kOldPacket, jitter_buffer_->InsertPacket(*packet_, &retransmitted));
+}
+
+TEST_F(TestBasicJitterBuffer, TimestampWrap) {
+ // --------------- ---------------
+ // | 1 | 2 | | 3 | 4 |
+ // --------------- ---------------
+ // t = 0xffffff00 t = 33*90
+
+ timestamp_ = 0xffffff00;
+ packet_->frameType = kVideoFrameKey;
+ packet_->isFirstPacket = true;
+ packet_->markerBit = false;
+ packet_->seqNum = seq_num_;
+ packet_->timestamp = timestamp_;
+
+ bool retransmitted = false;
+ EXPECT_EQ(kIncomplete,
+ jitter_buffer_->InsertPacket(*packet_, &retransmitted));
+
+ VCMEncodedFrame* frame_out = DecodeCompleteFrame();
+ EXPECT_TRUE(frame_out == NULL);
+
+ seq_num_++;
+ packet_->isFirstPacket = false;
+ packet_->markerBit = true;
+ packet_->seqNum = seq_num_;
+
+ EXPECT_EQ(kCompleteSession,
+ jitter_buffer_->InsertPacket(*packet_, &retransmitted));
+
+ frame_out = DecodeCompleteFrame();
+ CheckOutFrame(frame_out, 2 * size_, false);
+ jitter_buffer_->ReleaseFrame(frame_out);
+
+ seq_num_++;
+ timestamp_ += 33 * 90;
+ packet_->frameType = kVideoFrameDelta;
+ packet_->isFirstPacket = true;
+ packet_->markerBit = false;
+ packet_->seqNum = seq_num_;
+ packet_->timestamp = timestamp_;
+
+ EXPECT_EQ(kIncomplete,
+ jitter_buffer_->InsertPacket(*packet_, &retransmitted));
+
+ frame_out = DecodeCompleteFrame();
+ EXPECT_TRUE(frame_out == NULL);
+
+ seq_num_++;
+ packet_->isFirstPacket = false;
+ packet_->markerBit = true;
+ packet_->seqNum = seq_num_;
+
+ EXPECT_EQ(kCompleteSession,
+ jitter_buffer_->InsertPacket(*packet_, &retransmitted));
+
+ frame_out = DecodeCompleteFrame();
+ CheckOutFrame(frame_out, 2 * size_, false);
+ EXPECT_EQ(kVideoFrameDelta, frame_out->FrameType());
+ jitter_buffer_->ReleaseFrame(frame_out);
+}
+
+TEST_F(TestBasicJitterBuffer, 2FrameWithTimestampWrap) {
+ // ------- -------
+ // | 1 | | 2 |
+ // ------- -------
+ // t = 0xffffff00 t = 2700
+
+ timestamp_ = 0xffffff00;
+ packet_->frameType = kVideoFrameKey;
+ packet_->isFirstPacket = true;
+ packet_->markerBit = true;
+ packet_->timestamp = timestamp_;
+
+ bool retransmitted = false;
+ // Insert first frame (session will be complete).
+ EXPECT_EQ(kCompleteSession,
+ jitter_buffer_->InsertPacket(*packet_, &retransmitted));
+
+ // Insert next frame.
+ seq_num_++;
+ timestamp_ = 2700;
+ packet_->frameType = kVideoFrameDelta;
+ packet_->isFirstPacket = true;
+ packet_->markerBit = true;
+ packet_->seqNum = seq_num_;
+ packet_->timestamp = timestamp_;
+
+ EXPECT_EQ(kCompleteSession,
+ jitter_buffer_->InsertPacket(*packet_, &retransmitted));
+
+ VCMEncodedFrame* frame_out = DecodeCompleteFrame();
+ EXPECT_EQ(0xffffff00, frame_out->TimeStamp());
+ CheckOutFrame(frame_out, size_, false);
+ EXPECT_EQ(kVideoFrameKey, frame_out->FrameType());
+ jitter_buffer_->ReleaseFrame(frame_out);
+
+ VCMEncodedFrame* frame_out2 = DecodeCompleteFrame();
+ EXPECT_EQ(2700u, frame_out2->TimeStamp());
+ CheckOutFrame(frame_out2, size_, false);
+ EXPECT_EQ(kVideoFrameDelta, frame_out2->FrameType());
+ jitter_buffer_->ReleaseFrame(frame_out2);
+}
+
+TEST_F(TestBasicJitterBuffer, Insert2FramesReOrderedWithTimestampWrap) {
+ // ------- -------
+ // | 2 | | 1 |
+ // ------- -------
+ // t = 2700 t = 0xffffff00
+
+ seq_num_ = 2;
+ timestamp_ = 2700;
+ packet_->frameType = kVideoFrameDelta;
+ packet_->isFirstPacket = true;
+ packet_->markerBit = true;
+ packet_->seqNum = seq_num_;
+ packet_->timestamp = timestamp_;
+
+ bool retransmitted = false;
+ EXPECT_EQ(kCompleteSession,
+ jitter_buffer_->InsertPacket(*packet_, &retransmitted));
+
+ // Insert second frame
+ seq_num_--;
+ timestamp_ = 0xffffff00;
+ packet_->frameType = kVideoFrameKey;
+ packet_->isFirstPacket = true;
+ packet_->markerBit = true;
+ packet_->seqNum = seq_num_;
+ packet_->timestamp = timestamp_;
+
+ EXPECT_EQ(kCompleteSession,
+ jitter_buffer_->InsertPacket(*packet_, &retransmitted));
+
+ VCMEncodedFrame* frame_out = DecodeCompleteFrame();
+ EXPECT_EQ(0xffffff00, frame_out->TimeStamp());
+ CheckOutFrame(frame_out, size_, false);
+ EXPECT_EQ(kVideoFrameKey, frame_out->FrameType());
+ jitter_buffer_->ReleaseFrame(frame_out);
+
+ VCMEncodedFrame* frame_out2 = DecodeCompleteFrame();
+ EXPECT_EQ(2700u, frame_out2->TimeStamp());
+ CheckOutFrame(frame_out2, size_, false);
+ EXPECT_EQ(kVideoFrameDelta, frame_out2->FrameType());
+ jitter_buffer_->ReleaseFrame(frame_out2);
+}
+
+TEST_F(TestBasicJitterBuffer, DeltaFrameWithMoreThanMaxNumberOfPackets) {
+ int loop = 0;
+ bool firstPacket = true;
+ bool retransmitted = false;
+ // Insert kMaxPacketsInJitterBuffer into frame.
+ do {
+ seq_num_++;
+ packet_->isFirstPacket = false;
+ packet_->markerBit = false;
+ packet_->seqNum = seq_num_;
+
+ if (firstPacket) {
+ EXPECT_EQ(kIncomplete,
+ jitter_buffer_->InsertPacket(*packet_, &retransmitted));
+ firstPacket = false;
+ } else {
+ EXPECT_EQ(kIncomplete,
+ jitter_buffer_->InsertPacket(*packet_, &retransmitted));
+ }
+
+ loop++;
+ } while (loop < kMaxPacketsInSession);
+
+ // Max number of packets inserted.
+ // Insert one more packet.
+ seq_num_++;
+ packet_->isFirstPacket = false;
+ packet_->markerBit = true;
+ packet_->seqNum = seq_num_;
+
+ // Insert the packet -> frame recycled.
+ EXPECT_EQ(kSizeError, jitter_buffer_->InsertPacket(*packet_, &retransmitted));
+ EXPECT_TRUE(NULL == DecodeCompleteFrame());
+}
+
+TEST_F(TestBasicJitterBuffer, ExceedNumOfFrameWithSeqNumWrap) {
+ // TEST fill JB with more than max number of frame (50 delta frames +
+ // 51 key frames) with wrap in seq_num_
+ //
+ // --------------------------------------------------------------
+ // | 65485 | 65486 | 65487 | .... | 65535 | 0 | 1 | 2 | .....| 50 |
+ // --------------------------------------------------------------
+ // |<-----------delta frames------------->|<------key frames----->|
+
+ // Make sure the jitter doesn't request a keyframe after too much non-
+ // decodable frames.
+ jitter_buffer_->SetNackMode(kNack, -1, -1);
+ jitter_buffer_->SetNackSettings(kMaxNumberOfFrames, kMaxNumberOfFrames, 0);
+
+ int loop = 0;
+ seq_num_ = 65485;
+ uint32_t first_key_frame_timestamp = 0;
+ bool retransmitted = false;
+ // Insert MAX_NUMBER_OF_FRAMES frames.
+ do {
+ timestamp_ += 33 * 90;
+ seq_num_++;
+ packet_->isFirstPacket = true;
+ packet_->markerBit = true;
+ packet_->seqNum = seq_num_;
+ packet_->timestamp = timestamp_;
+
+ if (loop == 50) {
+ first_key_frame_timestamp = packet_->timestamp;
+ packet_->frameType = kVideoFrameKey;
+ }
+
+ // Insert frame.
+ EXPECT_EQ(kCompleteSession,
+ jitter_buffer_->InsertPacket(*packet_, &retransmitted));
+
+ loop++;
+ } while (loop < kMaxNumberOfFrames);
+
+ // Max number of frames inserted.
+
+ // Insert one more frame.
+ timestamp_ += 33 * 90;
+ seq_num_++;
+ packet_->isFirstPacket = true;
+ packet_->markerBit = true;
+ packet_->seqNum = seq_num_;
+ packet_->timestamp = timestamp_;
+
+ // Now, no free frame - frames will be recycled until first key frame.
+ EXPECT_EQ(kFlushIndicator,
+ jitter_buffer_->InsertPacket(*packet_, &retransmitted));
+
+ VCMEncodedFrame* frame_out = DecodeCompleteFrame();
+ EXPECT_EQ(first_key_frame_timestamp, frame_out->TimeStamp());
+ CheckOutFrame(frame_out, size_, false);
+ EXPECT_EQ(kVideoFrameKey, frame_out->FrameType());
+ jitter_buffer_->ReleaseFrame(frame_out);
+}
+
+TEST_F(TestBasicJitterBuffer, EmptyLastFrame) {
+ jitter_buffer_->SetDecodeErrorMode(kWithErrors);
+ seq_num_ = 3;
+ // Insert one empty packet per frame, should never return the last timestamp
+ // inserted. Only return empty frames in the presence of subsequent frames.
+ int maxSize = 1000;
+ bool retransmitted = false;
+ for (int i = 0; i < maxSize + 10; i++) {
+ timestamp_ += 33 * 90;
+ seq_num_++;
+ packet_->isFirstPacket = false;
+ packet_->markerBit = false;
+ packet_->seqNum = seq_num_;
+ packet_->timestamp = timestamp_;
+ packet_->frameType = kEmptyFrame;
+
+ EXPECT_EQ(kNoError, jitter_buffer_->InsertPacket(*packet_, &retransmitted));
+ VCMEncodedFrame* testFrame = DecodeIncompleteFrame();
+ // Timestamp should never be the last TS inserted.
+ if (testFrame != NULL) {
+ EXPECT_TRUE(testFrame->TimeStamp() < timestamp_);
+ jitter_buffer_->ReleaseFrame(testFrame);
+ }
+ }
+}
+
+TEST_F(TestBasicJitterBuffer, H264IncompleteNalu) {
+ jitter_buffer_->SetNackMode(kNoNack, -1, -1);
+ jitter_buffer_->SetDecodeErrorMode(kWithErrors);
+ ++seq_num_;
+ timestamp_ += 33 * 90;
+ int insertedLength = 0;
+ packet_->seqNum = seq_num_;
+ packet_->timestamp = timestamp_;
+ packet_->frameType = kVideoFrameKey;
+ packet_->isFirstPacket = true;
+ packet_->completeNALU = kNaluStart;
+ packet_->markerBit = false;
+ bool retransmitted = false;
+
+ EXPECT_EQ(kDecodableSession,
+ jitter_buffer_->InsertPacket(*packet_, &retransmitted));
+
+ seq_num_ += 2; // Skip one packet.
+ packet_->seqNum = seq_num_;
+ packet_->frameType = kVideoFrameKey;
+ packet_->isFirstPacket = false;
+ packet_->completeNALU = kNaluIncomplete;
+ packet_->markerBit = false;
+
+ EXPECT_EQ(kDecodableSession,
+ jitter_buffer_->InsertPacket(*packet_, &retransmitted));
+
+ seq_num_++;
+ packet_->seqNum = seq_num_;
+ packet_->frameType = kVideoFrameKey;
+ packet_->isFirstPacket = false;
+ packet_->completeNALU = kNaluEnd;
+ packet_->markerBit = false;
+
+ EXPECT_EQ(kDecodableSession,
+ jitter_buffer_->InsertPacket(*packet_, &retransmitted));
+
+ seq_num_++;
+ packet_->seqNum = seq_num_;
+ packet_->completeNALU = kNaluComplete;
+ packet_->markerBit = true; // Last packet.
+ EXPECT_EQ(kDecodableSession,
+ jitter_buffer_->InsertPacket(*packet_, &retransmitted));
+ // The JB will only output (incomplete) frames if a packet belonging to a
+ // subsequent frame was already inserted. Insert one packet of a subsequent
+ // frame. place high timestamp so the JB would always have a next frame
+ // (otherwise, for every inserted frame we need to take care of the next
+ // frame as well).
+ packet_->seqNum = 1;
+ packet_->timestamp = timestamp_ + 33 * 90 * 10;
+ packet_->frameType = kVideoFrameDelta;
+ packet_->isFirstPacket = false;
+ packet_->completeNALU = kNaluStart;
+ packet_->markerBit = false;
+
+ EXPECT_EQ(kDecodableSession,
+ jitter_buffer_->InsertPacket(*packet_, &retransmitted));
+
+ VCMEncodedFrame* frame_out = DecodeIncompleteFrame();
+
+ // We can decode everything from a NALU until a packet has been lost.
+ // Thus we can decode the first packet of the first NALU and the second NALU
+ // which consists of one packet.
+ CheckOutFrame(frame_out, packet_->sizeBytes * 2, false);
+ jitter_buffer_->ReleaseFrame(frame_out);
+
+ // Test reordered start frame + 1 lost.
+ seq_num_ += 2; // Re-order 1 frame.
+ timestamp_ += 33 * 90;
+ insertedLength = 0;
+
+ packet_->seqNum = seq_num_;
+ packet_->timestamp = timestamp_;
+ packet_->frameType = kVideoFrameKey;
+ packet_->isFirstPacket = false;
+ packet_->completeNALU = kNaluEnd;
+ packet_->markerBit = false;
+ EXPECT_EQ(kDecodableSession,
+ jitter_buffer_->InsertPacket(*packet_, &retransmitted));
+ insertedLength += packet_->sizeBytes; // This packet should be decoded.
+ seq_num_--;
+ packet_->seqNum = seq_num_;
+ packet_->timestamp = timestamp_;
+ packet_->frameType = kVideoFrameKey;
+ packet_->isFirstPacket = true;
+ packet_->completeNALU = kNaluStart;
+ packet_->markerBit = false;
+
+ EXPECT_EQ(kDecodableSession,
+ jitter_buffer_->InsertPacket(*packet_, &retransmitted));
+ insertedLength += packet_->sizeBytes; // This packet should be decoded.
+
+ seq_num_ += 3; // One packet drop.
+ packet_->seqNum = seq_num_;
+ packet_->timestamp = timestamp_;
+ packet_->frameType = kVideoFrameKey;
+ packet_->isFirstPacket = false;
+ packet_->completeNALU = kNaluComplete;
+ packet_->markerBit = false;
+ EXPECT_EQ(kDecodableSession,
+ jitter_buffer_->InsertPacket(*packet_, &retransmitted));
+ insertedLength += packet_->sizeBytes; // This packet should be decoded.
+ seq_num_++;
+ packet_->seqNum = seq_num_;
+ packet_->timestamp = timestamp_;
+ packet_->frameType = kVideoFrameKey;
+ packet_->isFirstPacket = false;
+ packet_->completeNALU = kNaluStart;
+ packet_->markerBit = false;
+ EXPECT_EQ(kDecodableSession,
+ jitter_buffer_->InsertPacket(*packet_, &retransmitted));
+ // This packet should be decoded since it's the beginning of a NAL.
+ insertedLength += packet_->sizeBytes;
+
+ seq_num_ += 2;
+ packet_->seqNum = seq_num_;
+ packet_->timestamp = timestamp_;
+ packet_->frameType = kVideoFrameKey;
+ packet_->isFirstPacket = false;
+ packet_->completeNALU = kNaluEnd;
+ packet_->markerBit = true;
+ EXPECT_EQ(kDecodableSession,
+ jitter_buffer_->InsertPacket(*packet_, &retransmitted));
+ // This packet should not be decoded because it is an incomplete NAL if it
+ // is the last.
+ frame_out = DecodeIncompleteFrame();
+ // Only last NALU is complete.
+ CheckOutFrame(frame_out, insertedLength, false);
+ jitter_buffer_->ReleaseFrame(frame_out);
+
+ // Test to insert empty packet.
+ seq_num_++;
+ timestamp_ += 33 * 90;
+ VCMPacket emptypacket(data_, 0, seq_num_, timestamp_, true);
+ emptypacket.seqNum = seq_num_;
+ emptypacket.timestamp = timestamp_;
+ emptypacket.frameType = kVideoFrameKey;
+ emptypacket.isFirstPacket = true;
+ emptypacket.completeNALU = kNaluComplete;
+ emptypacket.markerBit = true;
+ EXPECT_EQ(kCompleteSession,
+ jitter_buffer_->InsertPacket(emptypacket, &retransmitted));
+ // This packet should not be decoded because it is an incomplete NAL if it
+ // is the last.
+
+ // Will be sent to the decoder, as a packet belonging to a subsequent frame
+ // has arrived.
+ frame_out = DecodeIncompleteFrame();
+ EXPECT_TRUE(frame_out != NULL);
+ jitter_buffer_->ReleaseFrame(frame_out);
+
+ // Test that a frame can include an empty packet.
+ seq_num_++;
+ timestamp_ += 33 * 90;
+
+ packet_->seqNum = seq_num_;
+ packet_->timestamp = timestamp_;
+ packet_->frameType = kVideoFrameKey;
+ packet_->isFirstPacket = true;
+ packet_->completeNALU = kNaluComplete;
+ packet_->markerBit = false;
+
+ EXPECT_EQ(kDecodableSession,
+ jitter_buffer_->InsertPacket(*packet_, &retransmitted));
+
+ seq_num_++;
+ emptypacket.seqNum = seq_num_;
+ emptypacket.timestamp = timestamp_;
+ emptypacket.frameType = kVideoFrameKey;
+ emptypacket.isFirstPacket = true;
+ emptypacket.completeNALU = kNaluComplete;
+ emptypacket.markerBit = true;
+ EXPECT_EQ(kCompleteSession,
+ jitter_buffer_->InsertPacket(emptypacket, &retransmitted));
+
+ frame_out = DecodeCompleteFrame();
+ // Only last NALU is complete
+ CheckOutFrame(frame_out, packet_->sizeBytes, false);
+ jitter_buffer_->ReleaseFrame(frame_out);
+}
+
+TEST_F(TestBasicJitterBuffer, NextFrameWhenIncomplete) {
+ // Test that a we cannot get incomplete frames from the JB if we haven't
+ // received the marker bit, unless we have received a packet from a later
+ // timestamp.
+ jitter_buffer_->SetDecodeErrorMode(kWithErrors);
+ // Start with a complete key frame - insert and decode.
+ packet_->frameType = kVideoFrameKey;
+ packet_->isFirstPacket = true;
+ packet_->markerBit = true;
+ bool retransmitted = false;
+
+ EXPECT_EQ(kCompleteSession,
+ jitter_buffer_->InsertPacket(*packet_, &retransmitted));
+ VCMEncodedFrame* frame_out = DecodeCompleteFrame();
+ EXPECT_TRUE(frame_out != NULL);
+ jitter_buffer_->ReleaseFrame(frame_out);
+
+ packet_->seqNum += 2;
+ packet_->timestamp += 33 * 90;
+ packet_->frameType = kVideoFrameDelta;
+ packet_->isFirstPacket = false;
+ packet_->markerBit = false;
+
+ EXPECT_EQ(kDecodableSession,
+ jitter_buffer_->InsertPacket(*packet_, &retransmitted));
+
+ frame_out = DecodeIncompleteFrame();
+ EXPECT_TRUE(frame_out == NULL);
+
+ packet_->seqNum += 2;
+ packet_->timestamp += 33 * 90;
+ packet_->isFirstPacket = true;
+
+ EXPECT_EQ(kDecodableSession,
+ jitter_buffer_->InsertPacket(*packet_, &retransmitted));
+
+ frame_out = DecodeIncompleteFrame();
+ CheckOutFrame(frame_out, packet_->sizeBytes, false);
+ jitter_buffer_->ReleaseFrame(frame_out);
+}
+
+TEST_F(TestRunningJitterBuffer, Full) {
+ // Make sure the jitter doesn't request a keyframe after too much non-
+ // decodable frames.
+ jitter_buffer_->SetNackMode(kNack, -1, -1);
+ jitter_buffer_->SetNackSettings(kMaxNumberOfFrames, kMaxNumberOfFrames, 0);
+ // Insert a key frame and decode it.
+ EXPECT_GE(InsertFrame(kVideoFrameKey), kNoError);
+ EXPECT_TRUE(DecodeCompleteFrame());
+ DropFrame(1);
+ // Fill the jitter buffer.
+ EXPECT_GE(InsertFrames(kMaxNumberOfFrames, kVideoFrameDelta), kNoError);
+ // Make sure we can't decode these frames.
+ EXPECT_FALSE(DecodeCompleteFrame());
+ // This frame will make the jitter buffer recycle frames until a key frame.
+ // Since none is found it will have to wait until the next key frame before
+ // decoding.
+ EXPECT_EQ(kFlushIndicator, InsertFrame(kVideoFrameDelta));
+ EXPECT_FALSE(DecodeCompleteFrame());
+}
+
+TEST_F(TestRunningJitterBuffer, EmptyPackets) {
+ // Make sure a frame can get complete even though empty packets are missing.
+ stream_generator_->GenerateFrame(kVideoFrameKey, 3, 3,
+ clock_->TimeInMilliseconds());
+ bool request_key_frame = false;
+ // Insert empty packet.
+ EXPECT_EQ(kNoError, InsertPacketAndPop(4));
+ EXPECT_FALSE(request_key_frame);
+ // Insert 3 media packets.
+ EXPECT_EQ(kIncomplete, InsertPacketAndPop(0));
+ EXPECT_FALSE(request_key_frame);
+ EXPECT_EQ(kIncomplete, InsertPacketAndPop(0));
+ EXPECT_FALSE(request_key_frame);
+ EXPECT_EQ(kCompleteSession, InsertPacketAndPop(0));
+ EXPECT_FALSE(request_key_frame);
+ // Insert empty packet.
+ EXPECT_EQ(kCompleteSession, InsertPacketAndPop(0));
+ EXPECT_FALSE(request_key_frame);
+}
+
+TEST_F(TestRunningJitterBuffer, StatisticsTest) {
+ FrameCounts frame_stats(jitter_buffer_->FrameStatistics());
+ EXPECT_EQ(0, frame_stats.delta_frames);
+ EXPECT_EQ(0, frame_stats.key_frames);
+
+ uint32_t framerate = 0;
+ uint32_t bitrate = 0;
+ jitter_buffer_->IncomingRateStatistics(&framerate, &bitrate);
+ EXPECT_EQ(0u, framerate);
+ EXPECT_EQ(0u, bitrate);
+
+ // Insert a couple of key and delta frames.
+ InsertFrame(kVideoFrameKey);
+ InsertFrame(kVideoFrameDelta);
+ InsertFrame(kVideoFrameDelta);
+ InsertFrame(kVideoFrameKey);
+ InsertFrame(kVideoFrameDelta);
+ // Decode some of them to make sure the statistics doesn't depend on frames
+ // being decoded.
+ EXPECT_TRUE(DecodeCompleteFrame());
+ EXPECT_TRUE(DecodeCompleteFrame());
+ frame_stats = jitter_buffer_->FrameStatistics();
+ EXPECT_EQ(3, frame_stats.delta_frames);
+ EXPECT_EQ(2, frame_stats.key_frames);
+
+ // Insert 20 more frames to get estimates of bitrate and framerate over
+ // 1 second.
+ for (int i = 0; i < 20; ++i) {
+ InsertFrame(kVideoFrameDelta);
+ }
+ jitter_buffer_->IncomingRateStatistics(&framerate, &bitrate);
+ // TODO(holmer): The current implementation returns the average of the last
+ // two framerate calculations, which is why it takes two calls to reach the
+ // actual framerate. This should be fixed.
+ EXPECT_EQ(kDefaultFrameRate / 2u, framerate);
+ EXPECT_EQ(kDefaultBitrateKbps, bitrate);
+ // Insert 25 more frames to get estimates of bitrate and framerate over
+ // 2 seconds.
+ for (int i = 0; i < 25; ++i) {
+ InsertFrame(kVideoFrameDelta);
+ }
+ jitter_buffer_->IncomingRateStatistics(&framerate, &bitrate);
+ EXPECT_EQ(kDefaultFrameRate, framerate);
+ EXPECT_EQ(kDefaultBitrateKbps, bitrate);
+}
+
+TEST_F(TestRunningJitterBuffer, SkipToKeyFrame) {
+ // Insert delta frames.
+ EXPECT_GE(InsertFrames(5, kVideoFrameDelta), kNoError);
+ // Can't decode without a key frame.
+ EXPECT_FALSE(DecodeCompleteFrame());
+ InsertFrame(kVideoFrameKey);
+ // Skip to the next key frame.
+ EXPECT_TRUE(DecodeCompleteFrame());
+}
+
+TEST_F(TestRunningJitterBuffer, DontSkipToKeyFrameIfDecodable) {
+ InsertFrame(kVideoFrameKey);
+ EXPECT_TRUE(DecodeCompleteFrame());
+ const int kNumDeltaFrames = 5;
+ EXPECT_GE(InsertFrames(kNumDeltaFrames, kVideoFrameDelta), kNoError);
+ InsertFrame(kVideoFrameKey);
+ for (int i = 0; i < kNumDeltaFrames + 1; ++i) {
+ EXPECT_TRUE(DecodeCompleteFrame());
+ }
+}
+
+TEST_F(TestRunningJitterBuffer, KeyDeltaKeyDelta) {
+ InsertFrame(kVideoFrameKey);
+ EXPECT_TRUE(DecodeCompleteFrame());
+ const int kNumDeltaFrames = 5;
+ EXPECT_GE(InsertFrames(kNumDeltaFrames, kVideoFrameDelta), kNoError);
+ InsertFrame(kVideoFrameKey);
+ EXPECT_GE(InsertFrames(kNumDeltaFrames, kVideoFrameDelta), kNoError);
+ InsertFrame(kVideoFrameKey);
+ for (int i = 0; i < 2 * (kNumDeltaFrames + 1); ++i) {
+ EXPECT_TRUE(DecodeCompleteFrame());
+ }
+}
+
+TEST_F(TestRunningJitterBuffer, TwoPacketsNonContinuous) {
+ InsertFrame(kVideoFrameKey);
+ EXPECT_TRUE(DecodeCompleteFrame());
+ stream_generator_->GenerateFrame(kVideoFrameDelta, 1, 0,
+ clock_->TimeInMilliseconds());
+ clock_->AdvanceTimeMilliseconds(kDefaultFramePeriodMs);
+ stream_generator_->GenerateFrame(kVideoFrameDelta, 2, 0,
+ clock_->TimeInMilliseconds());
+ EXPECT_EQ(kIncomplete, InsertPacketAndPop(1));
+ EXPECT_EQ(kCompleteSession, InsertPacketAndPop(1));
+ EXPECT_FALSE(DecodeCompleteFrame());
+ EXPECT_EQ(kCompleteSession, InsertPacketAndPop(0));
+ EXPECT_TRUE(DecodeCompleteFrame());
+ EXPECT_TRUE(DecodeCompleteFrame());
+}
+
+TEST_F(TestJitterBufferNack, EmptyPackets) {
+ // Make sure empty packets doesn't clog the jitter buffer.
+ jitter_buffer_->SetNackMode(kNack, media_optimization::kLowRttNackMs, -1);
+ EXPECT_GE(InsertFrames(kMaxNumberOfFrames, kEmptyFrame), kNoError);
+ InsertFrame(kVideoFrameKey);
+ EXPECT_TRUE(DecodeCompleteFrame());
+}
+
+TEST_F(TestJitterBufferNack, NackTooOldPackets) {
+ // Insert a key frame and decode it.
+ EXPECT_GE(InsertFrame(kVideoFrameKey), kNoError);
+ EXPECT_TRUE(DecodeCompleteFrame());
+
+ // Drop one frame and insert |kNackHistoryLength| to trigger NACKing a too
+ // old packet.
+ DropFrame(1);
+ // Insert a frame which should trigger a recycle until the next key frame.
+ EXPECT_EQ(kFlushIndicator,
+ InsertFrames(oldest_packet_to_nack_ + 1, kVideoFrameDelta));
+ EXPECT_FALSE(DecodeCompleteFrame());
+
+ bool request_key_frame = false;
+ std::vector<uint16_t> nack_list =
+ jitter_buffer_->GetNackList(&request_key_frame);
+ // No key frame will be requested since the jitter buffer is empty.
+ EXPECT_FALSE(request_key_frame);
+ EXPECT_EQ(0u, nack_list.size());
+
+ EXPECT_GE(InsertFrame(kVideoFrameDelta), kNoError);
+ // Waiting for a key frame.
+ EXPECT_FALSE(DecodeCompleteFrame());
+ EXPECT_FALSE(DecodeIncompleteFrame());
+
+ // The next complete continuous frame isn't a key frame, but we're waiting
+ // for one.
+ EXPECT_FALSE(DecodeCompleteFrame());
+ EXPECT_GE(InsertFrame(kVideoFrameKey), kNoError);
+ // Skipping ahead to the key frame.
+ EXPECT_TRUE(DecodeCompleteFrame());
+}
+
+TEST_F(TestJitterBufferNack, NackLargeJitterBuffer) {
+ // Insert a key frame and decode it.
+ EXPECT_GE(InsertFrame(kVideoFrameKey), kNoError);
+ EXPECT_TRUE(DecodeCompleteFrame());
+
+ // Insert a frame which should trigger a recycle until the next key frame.
+ EXPECT_GE(InsertFrames(oldest_packet_to_nack_, kVideoFrameDelta), kNoError);
+
+ bool request_key_frame = false;
+ std::vector<uint16_t> nack_list =
+ jitter_buffer_->GetNackList(&request_key_frame);
+ // Verify that the jitter buffer does not request a key frame.
+ EXPECT_FALSE(request_key_frame);
+ // Verify that no packets are NACKed.
+ EXPECT_EQ(0u, nack_list.size());
+ // Verify that we can decode the next frame.
+ EXPECT_TRUE(DecodeCompleteFrame());
+}
+
+TEST_F(TestJitterBufferNack, NackListFull) {
+ // Insert a key frame and decode it.
+ EXPECT_GE(InsertFrame(kVideoFrameKey), kNoError);
+ EXPECT_TRUE(DecodeCompleteFrame());
+
+ // Generate and drop |kNackHistoryLength| packets to fill the NACK list.
+ DropFrame(max_nack_list_size_ + 1);
+ // Insert a frame which should trigger a recycle until the next key frame.
+ EXPECT_EQ(kFlushIndicator, InsertFrame(kVideoFrameDelta));
+ EXPECT_FALSE(DecodeCompleteFrame());
+
+ bool request_key_frame = false;
+ jitter_buffer_->GetNackList(&request_key_frame);
+ // The jitter buffer is empty, so we won't request key frames until we get a
+ // packet.
+ EXPECT_FALSE(request_key_frame);
+
+ EXPECT_GE(InsertFrame(kVideoFrameDelta), kNoError);
+ // Now we have a packet in the jitter buffer, a key frame will be requested
+ // since it's not a key frame.
+ jitter_buffer_->GetNackList(&request_key_frame);
+ // The jitter buffer is empty, so we won't request key frames until we get a
+ // packet.
+ EXPECT_TRUE(request_key_frame);
+ // The next complete continuous frame isn't a key frame, but we're waiting
+ // for one.
+ EXPECT_FALSE(DecodeCompleteFrame());
+ EXPECT_FALSE(DecodeIncompleteFrame());
+ EXPECT_GE(InsertFrame(kVideoFrameKey), kNoError);
+ // Skipping ahead to the key frame.
+ EXPECT_TRUE(DecodeCompleteFrame());
+}
+
+TEST_F(TestJitterBufferNack, NoNackListReturnedBeforeFirstDecode) {
+ DropFrame(10);
+ // Insert a frame and try to generate a NACK list. Shouldn't get one.
+ EXPECT_GE(InsertFrame(kVideoFrameDelta), kNoError);
+ bool request_key_frame = false;
+ std::vector<uint16_t> nack_list =
+ jitter_buffer_->GetNackList(&request_key_frame);
+ // No list generated, and a key frame request is signaled.
+ EXPECT_EQ(0u, nack_list.size());
+ EXPECT_TRUE(request_key_frame);
+}
+
+TEST_F(TestJitterBufferNack, NackListBuiltBeforeFirstDecode) {
+ stream_generator_->Init(0, clock_->TimeInMilliseconds());
+ InsertFrame(kVideoFrameKey);
+ stream_generator_->GenerateFrame(kVideoFrameDelta, 2, 0,
+ clock_->TimeInMilliseconds());
+ stream_generator_->NextPacket(NULL); // Drop packet.
+ EXPECT_EQ(kIncomplete, InsertPacketAndPop(0));
+ EXPECT_TRUE(DecodeCompleteFrame());
+ bool extended = false;
+ std::vector<uint16_t> nack_list = jitter_buffer_->GetNackList(&extended);
+ EXPECT_EQ(1u, nack_list.size());
+}
+
+TEST_F(TestJitterBufferNack, VerifyRetransmittedFlag) {
+ stream_generator_->Init(0, clock_->TimeInMilliseconds());
+ stream_generator_->GenerateFrame(kVideoFrameKey, 3, 0,
+ clock_->TimeInMilliseconds());
+ VCMPacket packet;
+ stream_generator_->PopPacket(&packet, 0);
+ bool retransmitted = false;
+ EXPECT_EQ(kIncomplete, jitter_buffer_->InsertPacket(packet, &retransmitted));
+ EXPECT_FALSE(retransmitted);
+ // Drop second packet.
+ stream_generator_->PopPacket(&packet, 1);
+ EXPECT_EQ(kIncomplete, jitter_buffer_->InsertPacket(packet, &retransmitted));
+ EXPECT_FALSE(retransmitted);
+ EXPECT_FALSE(DecodeCompleteFrame());
+ bool extended = false;
+ std::vector<uint16_t> nack_list = jitter_buffer_->GetNackList(&extended);
+ EXPECT_EQ(1u, nack_list.size());
+ stream_generator_->PopPacket(&packet, 0);
+ EXPECT_EQ(packet.seqNum, nack_list[0]);
+ EXPECT_EQ(kCompleteSession,
+ jitter_buffer_->InsertPacket(packet, &retransmitted));
+ EXPECT_TRUE(retransmitted);
+ EXPECT_TRUE(DecodeCompleteFrame());
+}
+
+TEST_F(TestJitterBufferNack, UseNackToRecoverFirstKeyFrame) {
+ stream_generator_->Init(0, clock_->TimeInMilliseconds());
+ stream_generator_->GenerateFrame(kVideoFrameKey, 3, 0,
+ clock_->TimeInMilliseconds());
+ EXPECT_EQ(kIncomplete, InsertPacketAndPop(0));
+ // Drop second packet.
+ EXPECT_EQ(kIncomplete, InsertPacketAndPop(1));
+ EXPECT_FALSE(DecodeCompleteFrame());
+ bool extended = false;
+ std::vector<uint16_t> nack_list = jitter_buffer_->GetNackList(&extended);
+ EXPECT_EQ(1u, nack_list.size());
+ VCMPacket packet;
+ stream_generator_->GetPacket(&packet, 0);
+ EXPECT_EQ(packet.seqNum, nack_list[0]);
+}
+
+TEST_F(TestJitterBufferNack, UseNackToRecoverFirstKeyFrameSecondInQueue) {
+ VCMPacket packet;
+ stream_generator_->Init(0, clock_->TimeInMilliseconds());
+ // First frame is delta.
+ stream_generator_->GenerateFrame(kVideoFrameDelta, 3, 0,
+ clock_->TimeInMilliseconds());
+ EXPECT_EQ(kIncomplete, InsertPacketAndPop(0));
+ // Drop second packet in frame.
+ ASSERT_TRUE(stream_generator_->PopPacket(&packet, 0));
+ EXPECT_EQ(kIncomplete, InsertPacketAndPop(0));
+ // Second frame is key.
+ stream_generator_->GenerateFrame(kVideoFrameKey, 3, 0,
+ clock_->TimeInMilliseconds() + 10);
+ EXPECT_EQ(kIncomplete, InsertPacketAndPop(0));
+ // Drop second packet in frame.
+ EXPECT_EQ(kIncomplete, InsertPacketAndPop(1));
+ EXPECT_FALSE(DecodeCompleteFrame());
+ bool extended = false;
+ std::vector<uint16_t> nack_list = jitter_buffer_->GetNackList(&extended);
+ EXPECT_EQ(1u, nack_list.size());
+ stream_generator_->GetPacket(&packet, 0);
+ EXPECT_EQ(packet.seqNum, nack_list[0]);
+}
+
+TEST_F(TestJitterBufferNack, NormalOperation) {
+ EXPECT_EQ(kNack, jitter_buffer_->nack_mode());
+ jitter_buffer_->SetDecodeErrorMode(kWithErrors);
+
+ EXPECT_GE(InsertFrame(kVideoFrameKey), kNoError);
+ EXPECT_TRUE(DecodeIncompleteFrame());
+
+ // ----------------------------------------------------------------
+ // | 1 | 2 | .. | 8 | 9 | x | 11 | 12 | .. | 19 | x | 21 | .. | 100 |
+ // ----------------------------------------------------------------
+ stream_generator_->GenerateFrame(kVideoFrameKey, 100, 0,
+ clock_->TimeInMilliseconds());
+ clock_->AdvanceTimeMilliseconds(kDefaultFramePeriodMs);
+ EXPECT_EQ(kDecodableSession, InsertPacketAndPop(0));
+ // Verify that the frame is incomplete.
+ EXPECT_FALSE(DecodeCompleteFrame());
+ while (stream_generator_->PacketsRemaining() > 1) {
+ if (stream_generator_->NextSequenceNumber() % 10 != 0) {
+ EXPECT_EQ(kDecodableSession, InsertPacketAndPop(0));
+ } else {
+ stream_generator_->NextPacket(NULL); // Drop packet
+ }
+ }
+ EXPECT_EQ(kDecodableSession, InsertPacketAndPop(0));
+ EXPECT_EQ(0, stream_generator_->PacketsRemaining());
+ EXPECT_FALSE(DecodeCompleteFrame());
+ EXPECT_FALSE(DecodeIncompleteFrame());
+ bool request_key_frame = false;
+ std::vector<uint16_t> nack_list =
+ jitter_buffer_->GetNackList(&request_key_frame);
+ // Verify the NACK list.
+ const size_t kExpectedNackSize = 9;
+ ASSERT_EQ(kExpectedNackSize, nack_list.size());
+ for (size_t i = 0; i < nack_list.size(); ++i)
+ EXPECT_EQ((1 + i) * 10, nack_list[i]);
+}
+
+TEST_F(TestJitterBufferNack, NormalOperationWrap) {
+ bool request_key_frame = false;
+ // ------- ------------------------------------------------------------
+ // | 65532 | | 65533 | 65534 | 65535 | x | 1 | .. | 9 | x | 11 |.....| 96 |
+ // ------- ------------------------------------------------------------
+ stream_generator_->Init(65532, clock_->TimeInMilliseconds());
+ InsertFrame(kVideoFrameKey);
+ EXPECT_FALSE(request_key_frame);
+ EXPECT_TRUE(DecodeCompleteFrame());
+ stream_generator_->GenerateFrame(kVideoFrameDelta, 100, 0,
+ clock_->TimeInMilliseconds());
+ EXPECT_EQ(kIncomplete, InsertPacketAndPop(0));
+ while (stream_generator_->PacketsRemaining() > 1) {
+ if (stream_generator_->NextSequenceNumber() % 10 != 0) {
+ EXPECT_EQ(kIncomplete, InsertPacketAndPop(0));
+ EXPECT_FALSE(request_key_frame);
+ } else {
+ stream_generator_->NextPacket(NULL); // Drop packet
+ }
+ }
+ EXPECT_EQ(kIncomplete, InsertPacketAndPop(0));
+ EXPECT_FALSE(request_key_frame);
+ EXPECT_EQ(0, stream_generator_->PacketsRemaining());
+ EXPECT_FALSE(DecodeCompleteFrame());
+ EXPECT_FALSE(DecodeCompleteFrame());
+ bool extended = false;
+ std::vector<uint16_t> nack_list = jitter_buffer_->GetNackList(&extended);
+ // Verify the NACK list.
+ const size_t kExpectedNackSize = 10;
+ ASSERT_EQ(kExpectedNackSize, nack_list.size());
+ for (size_t i = 0; i < nack_list.size(); ++i)
+ EXPECT_EQ(i * 10, nack_list[i]);
+}
+
+TEST_F(TestJitterBufferNack, NormalOperationWrap2) {
+ bool request_key_frame = false;
+ // -----------------------------------
+ // | 65532 | 65533 | 65534 | x | 0 | 1 |
+ // -----------------------------------
+ stream_generator_->Init(65532, clock_->TimeInMilliseconds());
+ InsertFrame(kVideoFrameKey);
+ EXPECT_FALSE(request_key_frame);
+ EXPECT_TRUE(DecodeCompleteFrame());
+ stream_generator_->GenerateFrame(kVideoFrameDelta, 1, 0,
+ clock_->TimeInMilliseconds());
+ clock_->AdvanceTimeMilliseconds(kDefaultFramePeriodMs);
+ for (int i = 0; i < 5; ++i) {
+ if (stream_generator_->NextSequenceNumber() != 65535) {
+ EXPECT_EQ(kCompleteSession, InsertPacketAndPop(0));
+ EXPECT_FALSE(request_key_frame);
+ } else {
+ stream_generator_->NextPacket(NULL); // Drop packet
+ }
+ stream_generator_->GenerateFrame(kVideoFrameDelta, 1, 0,
+ clock_->TimeInMilliseconds());
+ clock_->AdvanceTimeMilliseconds(kDefaultFramePeriodMs);
+ }
+ EXPECT_EQ(kCompleteSession, InsertPacketAndPop(0));
+ EXPECT_FALSE(request_key_frame);
+ bool extended = false;
+ std::vector<uint16_t> nack_list = jitter_buffer_->GetNackList(&extended);
+ // Verify the NACK list.
+ ASSERT_EQ(1u, nack_list.size());
+ EXPECT_EQ(65535, nack_list[0]);
+}
+
+TEST_F(TestJitterBufferNack, ResetByFutureKeyFrameDoesntError) {
+ stream_generator_->Init(0, clock_->TimeInMilliseconds());
+ InsertFrame(kVideoFrameKey);
+ EXPECT_TRUE(DecodeCompleteFrame());
+ bool extended = false;
+ std::vector<uint16_t> nack_list = jitter_buffer_->GetNackList(&extended);
+ EXPECT_EQ(0u, nack_list.size());
+
+ // Far-into-the-future video frame, could be caused by resetting the encoder
+ // or otherwise restarting. This should not fail when error when the packet is
+ // a keyframe, even if all of the nack list needs to be flushed.
+ stream_generator_->Init(10000, clock_->TimeInMilliseconds());
+ clock_->AdvanceTimeMilliseconds(kDefaultFramePeriodMs);
+ InsertFrame(kVideoFrameKey);
+ EXPECT_TRUE(DecodeCompleteFrame());
+ nack_list = jitter_buffer_->GetNackList(&extended);
+ EXPECT_EQ(0u, nack_list.size());
+
+ // Stream should be decodable from this point.
+ clock_->AdvanceTimeMilliseconds(kDefaultFramePeriodMs);
+ InsertFrame(kVideoFrameDelta);
+ EXPECT_TRUE(DecodeCompleteFrame());
+ nack_list = jitter_buffer_->GetNackList(&extended);
+ EXPECT_EQ(0u, nack_list.size());
+}
+
+} // namespace webrtc
diff --git a/webrtc/modules/video_coding/jitter_estimator.cc b/webrtc/modules/video_coding/jitter_estimator.cc
new file mode 100644
index 0000000000..8270c60e01
--- /dev/null
+++ b/webrtc/modules/video_coding/jitter_estimator.cc
@@ -0,0 +1,443 @@
+/*
+ * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/modules/video_coding/jitter_estimator.h"
+
+#include <assert.h>
+#include <math.h>
+#include <stdlib.h>
+#include <string.h>
+#include <string>
+
+#include "webrtc/modules/video_coding/internal_defines.h"
+#include "webrtc/modules/video_coding/rtt_filter.h"
+#include "webrtc/system_wrappers/include/clock.h"
+#include "webrtc/system_wrappers/include/field_trial.h"
+
+namespace webrtc {
+
+enum { kStartupDelaySamples = 30 };
+enum { kFsAccuStartupSamples = 5 };
+enum { kMaxFramerateEstimate = 200 };
+
+VCMJitterEstimator::VCMJitterEstimator(const Clock* clock,
+ int32_t vcmId,
+ int32_t receiverId)
+ : _vcmId(vcmId),
+ _receiverId(receiverId),
+ _phi(0.97),
+ _psi(0.9999),
+ _alphaCountMax(400),
+ _thetaLow(0.000001),
+ _nackLimit(3),
+ _numStdDevDelayOutlier(15),
+ _numStdDevFrameSizeOutlier(3),
+ _noiseStdDevs(2.33), // ~Less than 1% chance
+ // (look up in normal distribution table)...
+ _noiseStdDevOffset(30.0), // ...of getting 30 ms freezes
+ _rttFilter(),
+ fps_counter_(30), // TODO(sprang): Use an estimator with limit based on
+ // time, rather than number of samples.
+ low_rate_experiment_(kInit),
+ clock_(clock) {
+ Reset();
+}
+
+VCMJitterEstimator::~VCMJitterEstimator() {}
+
+VCMJitterEstimator& VCMJitterEstimator::operator=(
+ const VCMJitterEstimator& rhs) {
+ if (this != &rhs) {
+ memcpy(_thetaCov, rhs._thetaCov, sizeof(_thetaCov));
+ memcpy(_Qcov, rhs._Qcov, sizeof(_Qcov));
+
+ _vcmId = rhs._vcmId;
+ _receiverId = rhs._receiverId;
+ _avgFrameSize = rhs._avgFrameSize;
+ _varFrameSize = rhs._varFrameSize;
+ _maxFrameSize = rhs._maxFrameSize;
+ _fsSum = rhs._fsSum;
+ _fsCount = rhs._fsCount;
+ _lastUpdateT = rhs._lastUpdateT;
+ _prevEstimate = rhs._prevEstimate;
+ _prevFrameSize = rhs._prevFrameSize;
+ _avgNoise = rhs._avgNoise;
+ _alphaCount = rhs._alphaCount;
+ _filterJitterEstimate = rhs._filterJitterEstimate;
+ _startupCount = rhs._startupCount;
+ _latestNackTimestamp = rhs._latestNackTimestamp;
+ _nackCount = rhs._nackCount;
+ _rttFilter = rhs._rttFilter;
+ }
+ return *this;
+}
+
+// Resets the JitterEstimate
+void VCMJitterEstimator::Reset() {
+ _theta[0] = 1 / (512e3 / 8);
+ _theta[1] = 0;
+ _varNoise = 4.0;
+
+ _thetaCov[0][0] = 1e-4;
+ _thetaCov[1][1] = 1e2;
+ _thetaCov[0][1] = _thetaCov[1][0] = 0;
+ _Qcov[0][0] = 2.5e-10;
+ _Qcov[1][1] = 1e-10;
+ _Qcov[0][1] = _Qcov[1][0] = 0;
+ _avgFrameSize = 500;
+ _maxFrameSize = 500;
+ _varFrameSize = 100;
+ _lastUpdateT = -1;
+ _prevEstimate = -1.0;
+ _prevFrameSize = 0;
+ _avgNoise = 0.0;
+ _alphaCount = 1;
+ _filterJitterEstimate = 0.0;
+ _latestNackTimestamp = 0;
+ _nackCount = 0;
+ _fsSum = 0;
+ _fsCount = 0;
+ _startupCount = 0;
+ _rttFilter.Reset();
+ fps_counter_.Reset();
+}
+
+void VCMJitterEstimator::ResetNackCount() {
+ _nackCount = 0;
+}
+
+// Updates the estimates with the new measurements
+void VCMJitterEstimator::UpdateEstimate(int64_t frameDelayMS,
+ uint32_t frameSizeBytes,
+ bool incompleteFrame /* = false */) {
+ if (frameSizeBytes == 0) {
+ return;
+ }
+ int deltaFS = frameSizeBytes - _prevFrameSize;
+ if (_fsCount < kFsAccuStartupSamples) {
+ _fsSum += frameSizeBytes;
+ _fsCount++;
+ } else if (_fsCount == kFsAccuStartupSamples) {
+ // Give the frame size filter
+ _avgFrameSize = static_cast<double>(_fsSum) / static_cast<double>(_fsCount);
+ _fsCount++;
+ }
+ if (!incompleteFrame || frameSizeBytes > _avgFrameSize) {
+ double avgFrameSize = _phi * _avgFrameSize + (1 - _phi) * frameSizeBytes;
+ if (frameSizeBytes < _avgFrameSize + 2 * sqrt(_varFrameSize)) {
+ // Only update the average frame size if this sample wasn't a
+ // key frame
+ _avgFrameSize = avgFrameSize;
+ }
+ // Update the variance anyway since we want to capture cases where we only
+ // get
+ // key frames.
+ _varFrameSize = VCM_MAX(_phi * _varFrameSize +
+ (1 - _phi) * (frameSizeBytes - avgFrameSize) *
+ (frameSizeBytes - avgFrameSize),
+ 1.0);
+ }
+
+ // Update max frameSize estimate
+ _maxFrameSize =
+ VCM_MAX(_psi * _maxFrameSize, static_cast<double>(frameSizeBytes));
+
+ if (_prevFrameSize == 0) {
+ _prevFrameSize = frameSizeBytes;
+ return;
+ }
+ _prevFrameSize = frameSizeBytes;
+
+ // Only update the Kalman filter if the sample is not considered
+ // an extreme outlier. Even if it is an extreme outlier from a
+ // delay point of view, if the frame size also is large the
+ // deviation is probably due to an incorrect line slope.
+ double deviation = DeviationFromExpectedDelay(frameDelayMS, deltaFS);
+
+ if (fabs(deviation) < _numStdDevDelayOutlier * sqrt(_varNoise) ||
+ frameSizeBytes >
+ _avgFrameSize + _numStdDevFrameSizeOutlier * sqrt(_varFrameSize)) {
+ // Update the variance of the deviation from the
+ // line given by the Kalman filter
+ EstimateRandomJitter(deviation, incompleteFrame);
+ // Prevent updating with frames which have been congested by a large
+ // frame, and therefore arrives almost at the same time as that frame.
+ // This can occur when we receive a large frame (key frame) which
+ // has been delayed. The next frame is of normal size (delta frame),
+ // and thus deltaFS will be << 0. This removes all frame samples
+ // which arrives after a key frame.
+ if ((!incompleteFrame || deviation >= 0.0) &&
+ static_cast<double>(deltaFS) > -0.25 * _maxFrameSize) {
+ // Update the Kalman filter with the new data
+ KalmanEstimateChannel(frameDelayMS, deltaFS);
+ }
+ } else {
+ int nStdDev =
+ (deviation >= 0) ? _numStdDevDelayOutlier : -_numStdDevDelayOutlier;
+ EstimateRandomJitter(nStdDev * sqrt(_varNoise), incompleteFrame);
+ }
+ // Post process the total estimated jitter
+ if (_startupCount >= kStartupDelaySamples) {
+ PostProcessEstimate();
+ } else {
+ _startupCount++;
+ }
+}
+
+// Updates the nack/packet ratio
+void VCMJitterEstimator::FrameNacked() {
+ // Wait until _nackLimit retransmissions has been received,
+ // then always add ~1 RTT delay.
+ // TODO(holmer): Should we ever remove the additional delay if the
+ // the packet losses seem to have stopped? We could for instance scale
+ // the number of RTTs to add with the amount of retransmissions in a given
+ // time interval, or similar.
+ if (_nackCount < _nackLimit) {
+ _nackCount++;
+ }
+}
+
+// Updates Kalman estimate of the channel
+// The caller is expected to sanity check the inputs.
+void VCMJitterEstimator::KalmanEstimateChannel(int64_t frameDelayMS,
+ int32_t deltaFSBytes) {
+ double Mh[2];
+ double hMh_sigma;
+ double kalmanGain[2];
+ double measureRes;
+ double t00, t01;
+
+ // Kalman filtering
+
+ // Prediction
+ // M = M + Q
+ _thetaCov[0][0] += _Qcov[0][0];
+ _thetaCov[0][1] += _Qcov[0][1];
+ _thetaCov[1][0] += _Qcov[1][0];
+ _thetaCov[1][1] += _Qcov[1][1];
+
+ // Kalman gain
+ // K = M*h'/(sigma2n + h*M*h') = M*h'/(1 + h*M*h')
+ // h = [dFS 1]
+ // Mh = M*h'
+ // hMh_sigma = h*M*h' + R
+ Mh[0] = _thetaCov[0][0] * deltaFSBytes + _thetaCov[0][1];
+ Mh[1] = _thetaCov[1][0] * deltaFSBytes + _thetaCov[1][1];
+ // sigma weights measurements with a small deltaFS as noisy and
+ // measurements with large deltaFS as good
+ if (_maxFrameSize < 1.0) {
+ return;
+ }
+ double sigma = (300.0 * exp(-fabs(static_cast<double>(deltaFSBytes)) /
+ (1e0 * _maxFrameSize)) +
+ 1) *
+ sqrt(_varNoise);
+ if (sigma < 1.0) {
+ sigma = 1.0;
+ }
+ hMh_sigma = deltaFSBytes * Mh[0] + Mh[1] + sigma;
+ if ((hMh_sigma < 1e-9 && hMh_sigma >= 0) ||
+ (hMh_sigma > -1e-9 && hMh_sigma <= 0)) {
+ assert(false);
+ return;
+ }
+ kalmanGain[0] = Mh[0] / hMh_sigma;
+ kalmanGain[1] = Mh[1] / hMh_sigma;
+
+ // Correction
+ // theta = theta + K*(dT - h*theta)
+ measureRes = frameDelayMS - (deltaFSBytes * _theta[0] + _theta[1]);
+ _theta[0] += kalmanGain[0] * measureRes;
+ _theta[1] += kalmanGain[1] * measureRes;
+
+ if (_theta[0] < _thetaLow) {
+ _theta[0] = _thetaLow;
+ }
+
+ // M = (I - K*h)*M
+ t00 = _thetaCov[0][0];
+ t01 = _thetaCov[0][1];
+ _thetaCov[0][0] = (1 - kalmanGain[0] * deltaFSBytes) * t00 -
+ kalmanGain[0] * _thetaCov[1][0];
+ _thetaCov[0][1] = (1 - kalmanGain[0] * deltaFSBytes) * t01 -
+ kalmanGain[0] * _thetaCov[1][1];
+ _thetaCov[1][0] = _thetaCov[1][0] * (1 - kalmanGain[1]) -
+ kalmanGain[1] * deltaFSBytes * t00;
+ _thetaCov[1][1] = _thetaCov[1][1] * (1 - kalmanGain[1]) -
+ kalmanGain[1] * deltaFSBytes * t01;
+
+ // Covariance matrix, must be positive semi-definite
+ assert(_thetaCov[0][0] + _thetaCov[1][1] >= 0 &&
+ _thetaCov[0][0] * _thetaCov[1][1] -
+ _thetaCov[0][1] * _thetaCov[1][0] >=
+ 0 &&
+ _thetaCov[0][0] >= 0);
+}
+
+// Calculate difference in delay between a sample and the
+// expected delay estimated by the Kalman filter
+double VCMJitterEstimator::DeviationFromExpectedDelay(
+ int64_t frameDelayMS,
+ int32_t deltaFSBytes) const {
+ return frameDelayMS - (_theta[0] * deltaFSBytes + _theta[1]);
+}
+
+// Estimates the random jitter by calculating the variance of the
+// sample distance from the line given by theta.
+void VCMJitterEstimator::EstimateRandomJitter(double d_dT,
+ bool incompleteFrame) {
+ uint64_t now = clock_->TimeInMicroseconds();
+ if (_lastUpdateT != -1) {
+ fps_counter_.AddSample(now - _lastUpdateT);
+ }
+ _lastUpdateT = now;
+
+ if (_alphaCount == 0) {
+ assert(false);
+ return;
+ }
+ double alpha =
+ static_cast<double>(_alphaCount - 1) / static_cast<double>(_alphaCount);
+ _alphaCount++;
+ if (_alphaCount > _alphaCountMax)
+ _alphaCount = _alphaCountMax;
+
+ if (LowRateExperimentEnabled()) {
+ // In order to avoid a low frame rate stream to react slower to changes,
+ // scale the alpha weight relative a 30 fps stream.
+ double fps = GetFrameRate();
+ if (fps > 0.0) {
+ double rate_scale = 30.0 / fps;
+ // At startup, there can be a lot of noise in the fps estimate.
+ // Interpolate rate_scale linearly, from 1.0 at sample #1, to 30.0 / fps
+ // at sample #kStartupDelaySamples.
+ if (_alphaCount < kStartupDelaySamples) {
+ rate_scale =
+ (_alphaCount * rate_scale + (kStartupDelaySamples - _alphaCount)) /
+ kStartupDelaySamples;
+ }
+ alpha = pow(alpha, rate_scale);
+ }
+ }
+
+ double avgNoise = alpha * _avgNoise + (1 - alpha) * d_dT;
+ double varNoise =
+ alpha * _varNoise + (1 - alpha) * (d_dT - _avgNoise) * (d_dT - _avgNoise);
+ if (!incompleteFrame || varNoise > _varNoise) {
+ _avgNoise = avgNoise;
+ _varNoise = varNoise;
+ }
+ if (_varNoise < 1.0) {
+ // The variance should never be zero, since we might get
+ // stuck and consider all samples as outliers.
+ _varNoise = 1.0;
+ }
+}
+
+double VCMJitterEstimator::NoiseThreshold() const {
+ double noiseThreshold = _noiseStdDevs * sqrt(_varNoise) - _noiseStdDevOffset;
+ if (noiseThreshold < 1.0) {
+ noiseThreshold = 1.0;
+ }
+ return noiseThreshold;
+}
+
+// Calculates the current jitter estimate from the filtered estimates
+double VCMJitterEstimator::CalculateEstimate() {
+ double ret = _theta[0] * (_maxFrameSize - _avgFrameSize) + NoiseThreshold();
+
+ // A very low estimate (or negative) is neglected
+ if (ret < 1.0) {
+ if (_prevEstimate <= 0.01) {
+ ret = 1.0;
+ } else {
+ ret = _prevEstimate;
+ }
+ }
+ if (ret > 10000.0) { // Sanity
+ ret = 10000.0;
+ }
+ _prevEstimate = ret;
+ return ret;
+}
+
+void VCMJitterEstimator::PostProcessEstimate() {
+ _filterJitterEstimate = CalculateEstimate();
+}
+
+void VCMJitterEstimator::UpdateRtt(int64_t rttMs) {
+ _rttFilter.Update(rttMs);
+}
+
+void VCMJitterEstimator::UpdateMaxFrameSize(uint32_t frameSizeBytes) {
+ if (_maxFrameSize < frameSizeBytes) {
+ _maxFrameSize = frameSizeBytes;
+ }
+}
+
+// Returns the current filtered estimate if available,
+// otherwise tries to calculate an estimate.
+int VCMJitterEstimator::GetJitterEstimate(double rttMultiplier) {
+ double jitterMS = CalculateEstimate() + OPERATING_SYSTEM_JITTER;
+ if (_filterJitterEstimate > jitterMS)
+ jitterMS = _filterJitterEstimate;
+ if (_nackCount >= _nackLimit)
+ jitterMS += _rttFilter.RttMs() * rttMultiplier;
+
+ if (LowRateExperimentEnabled()) {
+ static const double kJitterScaleLowThreshold = 5.0;
+ static const double kJitterScaleHighThreshold = 10.0;
+ double fps = GetFrameRate();
+ // Ignore jitter for very low fps streams.
+ if (fps < kJitterScaleLowThreshold) {
+ if (fps == 0.0) {
+ return jitterMS;
+ }
+ return 0;
+ }
+
+ // Semi-low frame rate; scale by factor linearly interpolated from 0.0 at
+ // kJitterScaleLowThreshold to 1.0 at kJitterScaleHighThreshold.
+ if (fps < kJitterScaleHighThreshold) {
+ jitterMS =
+ (1.0 / (kJitterScaleHighThreshold - kJitterScaleLowThreshold)) *
+ (fps - kJitterScaleLowThreshold) * jitterMS;
+ }
+ }
+
+ return static_cast<uint32_t>(jitterMS + 0.5);
+}
+
+bool VCMJitterEstimator::LowRateExperimentEnabled() {
+ if (low_rate_experiment_ == kInit) {
+ std::string group =
+ webrtc::field_trial::FindFullName("WebRTC-ReducedJitterDelay");
+ if (group == "Disabled") {
+ low_rate_experiment_ = kDisabled;
+ } else {
+ low_rate_experiment_ = kEnabled;
+ }
+ }
+ return low_rate_experiment_ == kEnabled ? true : false;
+}
+
+double VCMJitterEstimator::GetFrameRate() const {
+ if (fps_counter_.count() == 0)
+ return 0;
+
+ double fps = 1000000.0 / fps_counter_.ComputeMean();
+ // Sanity check.
+ assert(fps >= 0.0);
+ if (fps > kMaxFramerateEstimate) {
+ fps = kMaxFramerateEstimate;
+ }
+ return fps;
+}
+} // namespace webrtc
diff --git a/webrtc/modules/video_coding/jitter_estimator.h b/webrtc/modules/video_coding/jitter_estimator.h
new file mode 100644
index 0000000000..a7b4b3e3df
--- /dev/null
+++ b/webrtc/modules/video_coding/jitter_estimator.h
@@ -0,0 +1,170 @@
+/*
+ * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_CODING_JITTER_ESTIMATOR_H_
+#define WEBRTC_MODULES_VIDEO_CODING_JITTER_ESTIMATOR_H_
+
+#include "webrtc/base/rollingaccumulator.h"
+#include "webrtc/modules/video_coding/rtt_filter.h"
+#include "webrtc/typedefs.h"
+
+namespace webrtc {
+
+class Clock;
+
+class VCMJitterEstimator {
+ public:
+ VCMJitterEstimator(const Clock* clock,
+ int32_t vcmId = 0,
+ int32_t receiverId = 0);
+ virtual ~VCMJitterEstimator();
+ VCMJitterEstimator& operator=(const VCMJitterEstimator& rhs);
+
+ // Resets the estimate to the initial state
+ void Reset();
+ void ResetNackCount();
+
+ // Updates the jitter estimate with the new data.
+ //
+ // Input:
+ // - frameDelay : Delay-delta calculated by UTILDelayEstimate in
+ // milliseconds
+ // - frameSize : Frame size of the current frame.
+ // - incompleteFrame : Flags if the frame is used to update the
+ // estimate before it
+ // was complete. Default is false.
+ void UpdateEstimate(int64_t frameDelayMS,
+ uint32_t frameSizeBytes,
+ bool incompleteFrame = false);
+
+ // Returns the current jitter estimate in milliseconds and adds
+ // also adds an RTT dependent term in cases of retransmission.
+ // Input:
+ // - rttMultiplier : RTT param multiplier (when applicable).
+ //
+ // Return value : Jitter estimate in milliseconds
+ int GetJitterEstimate(double rttMultiplier);
+
+ // Updates the nack counter.
+ void FrameNacked();
+
+ // Updates the RTT filter.
+ //
+ // Input:
+ // - rttMs : RTT in ms
+ void UpdateRtt(int64_t rttMs);
+
+ void UpdateMaxFrameSize(uint32_t frameSizeBytes);
+
+ // A constant describing the delay from the jitter buffer
+ // to the delay on the receiving side which is not accounted
+ // for by the jitter buffer nor the decoding delay estimate.
+ static const uint32_t OPERATING_SYSTEM_JITTER = 10;
+
+ protected:
+ // These are protected for better testing possibilities
+ double _theta[2]; // Estimated line parameters (slope, offset)
+ double _varNoise; // Variance of the time-deviation from the line
+
+ virtual bool LowRateExperimentEnabled();
+
+ private:
+ // Updates the Kalman filter for the line describing
+ // the frame size dependent jitter.
+ //
+ // Input:
+ // - frameDelayMS : Delay-delta calculated by UTILDelayEstimate in
+ // milliseconds
+ // - deltaFSBytes : Frame size delta, i.e.
+ // : frame size at time T minus frame size at time
+ // T-1
+ void KalmanEstimateChannel(int64_t frameDelayMS, int32_t deltaFSBytes);
+
+ // Updates the random jitter estimate, i.e. the variance
+ // of the time deviations from the line given by the Kalman filter.
+ //
+ // Input:
+ // - d_dT : The deviation from the kalman estimate
+ // - incompleteFrame : True if the frame used to update the
+ // estimate
+ // with was incomplete
+ void EstimateRandomJitter(double d_dT, bool incompleteFrame);
+
+ double NoiseThreshold() const;
+
+ // Calculates the current jitter estimate.
+ //
+ // Return value : The current jitter estimate in milliseconds
+ double CalculateEstimate();
+
+ // Post process the calculated estimate
+ void PostProcessEstimate();
+
+ // Calculates the difference in delay between a sample and the
+ // expected delay estimated by the Kalman filter.
+ //
+ // Input:
+ // - frameDelayMS : Delay-delta calculated by UTILDelayEstimate in
+ // milliseconds
+ // - deltaFS : Frame size delta, i.e. frame size at time
+ // T minus frame size at time T-1
+ //
+ // Return value : The difference in milliseconds
+ double DeviationFromExpectedDelay(int64_t frameDelayMS,
+ int32_t deltaFSBytes) const;
+
+ double GetFrameRate() const;
+
+ // Constants, filter parameters
+ int32_t _vcmId;
+ int32_t _receiverId;
+ const double _phi;
+ const double _psi;
+ const uint32_t _alphaCountMax;
+ const double _thetaLow;
+ const uint32_t _nackLimit;
+ const int32_t _numStdDevDelayOutlier;
+ const int32_t _numStdDevFrameSizeOutlier;
+ const double _noiseStdDevs;
+ const double _noiseStdDevOffset;
+
+ double _thetaCov[2][2]; // Estimate covariance
+ double _Qcov[2][2]; // Process noise covariance
+ double _avgFrameSize; // Average frame size
+ double _varFrameSize; // Frame size variance
+ double _maxFrameSize; // Largest frame size received (descending
+ // with a factor _psi)
+ uint32_t _fsSum;
+ uint32_t _fsCount;
+
+ int64_t _lastUpdateT;
+ double _prevEstimate; // The previously returned jitter estimate
+ uint32_t _prevFrameSize; // Frame size of the previous frame
+ double _avgNoise; // Average of the random jitter
+ uint32_t _alphaCount;
+ double _filterJitterEstimate; // The filtered sum of jitter estimates
+
+ uint32_t _startupCount;
+
+ int64_t
+ _latestNackTimestamp; // Timestamp in ms when the latest nack was seen
+ uint32_t _nackCount; // Keeps track of the number of nacks received,
+ // but never goes above _nackLimit
+ VCMRttFilter _rttFilter;
+
+ rtc::RollingAccumulator<uint64_t> fps_counter_;
+ enum ExperimentFlag { kInit, kEnabled, kDisabled };
+ ExperimentFlag low_rate_experiment_;
+ const Clock* clock_;
+};
+
+} // namespace webrtc
+
+#endif // WEBRTC_MODULES_VIDEO_CODING_JITTER_ESTIMATOR_H_
diff --git a/webrtc/modules/video_coding/jitter_estimator_tests.cc b/webrtc/modules/video_coding/jitter_estimator_tests.cc
new file mode 100644
index 0000000000..3d46ce2bcd
--- /dev/null
+++ b/webrtc/modules/video_coding/jitter_estimator_tests.cc
@@ -0,0 +1,160 @@
+/* Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/modules/video_coding/jitter_estimator.h"
+
+#include "testing/gtest/include/gtest/gtest.h"
+#include "webrtc/system_wrappers/include/clock.h"
+
+namespace webrtc {
+
+class TestEstimator : public VCMJitterEstimator {
+ public:
+ explicit TestEstimator(bool exp_enabled)
+ : VCMJitterEstimator(&fake_clock_, 0, 0),
+ fake_clock_(0),
+ exp_enabled_(exp_enabled) {}
+
+ virtual bool LowRateExperimentEnabled() { return exp_enabled_; }
+
+ void AdvanceClock(int64_t microseconds) {
+ fake_clock_.AdvanceTimeMicroseconds(microseconds);
+ }
+
+ private:
+ SimulatedClock fake_clock_;
+ const bool exp_enabled_;
+};
+
+class TestVCMJitterEstimator : public ::testing::Test {
+ protected:
+ TestVCMJitterEstimator()
+ : regular_estimator_(false), low_rate_estimator_(true) {}
+
+ virtual void SetUp() { regular_estimator_.Reset(); }
+
+ TestEstimator regular_estimator_;
+ TestEstimator low_rate_estimator_;
+};
+
+// Generates some simple test data in the form of a sawtooth wave.
+class ValueGenerator {
+ public:
+ ValueGenerator(int32_t amplitude) : amplitude_(amplitude), counter_(0) {}
+ virtual ~ValueGenerator() {}
+
+ int64_t Delay() { return ((counter_ % 11) - 5) * amplitude_; }
+
+ uint32_t FrameSize() { return 1000 + Delay(); }
+
+ void Advance() { ++counter_; }
+
+ private:
+ const int32_t amplitude_;
+ int64_t counter_;
+};
+
+// 5 fps, disable jitter delay altogether.
+TEST_F(TestVCMJitterEstimator, TestLowRate) {
+ ValueGenerator gen(10);
+ uint64_t time_delta = 1000000 / 5;
+ for (int i = 0; i < 60; ++i) {
+ regular_estimator_.UpdateEstimate(gen.Delay(), gen.FrameSize());
+ regular_estimator_.AdvanceClock(time_delta);
+ low_rate_estimator_.UpdateEstimate(gen.Delay(), gen.FrameSize());
+ low_rate_estimator_.AdvanceClock(time_delta);
+ EXPECT_GT(regular_estimator_.GetJitterEstimate(0), 0);
+ if (i > 2)
+ EXPECT_EQ(low_rate_estimator_.GetJitterEstimate(0), 0);
+ gen.Advance();
+ }
+}
+
+// 8 fps, steady state estimate should be in interpolated interval between 0
+// and value of previous method.
+TEST_F(TestVCMJitterEstimator, TestMidRate) {
+ ValueGenerator gen(10);
+ uint64_t time_delta = 1000000 / 8;
+ for (int i = 0; i < 60; ++i) {
+ regular_estimator_.UpdateEstimate(gen.Delay(), gen.FrameSize());
+ regular_estimator_.AdvanceClock(time_delta);
+ low_rate_estimator_.UpdateEstimate(gen.Delay(), gen.FrameSize());
+ low_rate_estimator_.AdvanceClock(time_delta);
+ EXPECT_GT(regular_estimator_.GetJitterEstimate(0), 0);
+ EXPECT_GT(low_rate_estimator_.GetJitterEstimate(0), 0);
+ EXPECT_GE(regular_estimator_.GetJitterEstimate(0),
+ low_rate_estimator_.GetJitterEstimate(0));
+ gen.Advance();
+ }
+}
+
+// 30 fps, steady state estimate should be same as previous method.
+TEST_F(TestVCMJitterEstimator, TestHighRate) {
+ ValueGenerator gen(10);
+ uint64_t time_delta = 1000000 / 30;
+ for (int i = 0; i < 60; ++i) {
+ regular_estimator_.UpdateEstimate(gen.Delay(), gen.FrameSize());
+ regular_estimator_.AdvanceClock(time_delta);
+ low_rate_estimator_.UpdateEstimate(gen.Delay(), gen.FrameSize());
+ low_rate_estimator_.AdvanceClock(time_delta);
+ EXPECT_EQ(regular_estimator_.GetJitterEstimate(0),
+ low_rate_estimator_.GetJitterEstimate(0));
+ gen.Advance();
+ }
+}
+
+// 10 fps, high jitter then low jitter. Low rate estimator should converge
+// faster to low noise estimate.
+TEST_F(TestVCMJitterEstimator, TestConvergence) {
+ // Reach a steady state with high noise.
+ ValueGenerator gen(50);
+ uint64_t time_delta = 1000000 / 10;
+ for (int i = 0; i < 100; ++i) {
+ regular_estimator_.UpdateEstimate(gen.Delay(), gen.FrameSize());
+ regular_estimator_.AdvanceClock(time_delta * 2);
+ low_rate_estimator_.UpdateEstimate(gen.Delay(), gen.FrameSize());
+ low_rate_estimator_.AdvanceClock(time_delta * 2);
+ gen.Advance();
+ }
+
+ int threshold = regular_estimator_.GetJitterEstimate(0) / 2;
+
+ // New generator with zero noise.
+ ValueGenerator low_gen(0);
+ int regular_iterations = 0;
+ int low_rate_iterations = 0;
+ for (int i = 0; i < 500; ++i) {
+ if (regular_iterations == 0) {
+ regular_estimator_.UpdateEstimate(low_gen.Delay(), low_gen.FrameSize());
+ regular_estimator_.AdvanceClock(time_delta);
+ if (regular_estimator_.GetJitterEstimate(0) < threshold) {
+ regular_iterations = i;
+ }
+ }
+
+ if (low_rate_iterations == 0) {
+ low_rate_estimator_.UpdateEstimate(low_gen.Delay(), low_gen.FrameSize());
+ low_rate_estimator_.AdvanceClock(time_delta);
+ if (low_rate_estimator_.GetJitterEstimate(0) < threshold) {
+ low_rate_iterations = i;
+ }
+ }
+
+ if (regular_iterations != 0 && low_rate_iterations != 0) {
+ break;
+ }
+
+ gen.Advance();
+ }
+
+ EXPECT_NE(regular_iterations, 0);
+ EXPECT_NE(low_rate_iterations, 0);
+ EXPECT_LE(low_rate_iterations, regular_iterations);
+}
+}
diff --git a/webrtc/modules/video_coding/main/interface/mock/mock_vcm_callbacks.h b/webrtc/modules/video_coding/main/interface/mock/mock_vcm_callbacks.h
deleted file mode 100644
index 302d4a3a13..0000000000
--- a/webrtc/modules/video_coding/main/interface/mock/mock_vcm_callbacks.h
+++ /dev/null
@@ -1,35 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_MODULES_VIDEO_CODING_MAIN_INTERFACE_MOCK_MOCK_VCM_CALLBACKS_H_
-#define WEBRTC_MODULES_VIDEO_CODING_MAIN_INTERFACE_MOCK_MOCK_VCM_CALLBACKS_H_
-
-#include "testing/gmock/include/gmock/gmock.h"
-#include "webrtc/modules/video_coding/main/interface/video_coding_defines.h"
-#include "webrtc/typedefs.h"
-
-namespace webrtc {
-
-class MockVCMFrameTypeCallback : public VCMFrameTypeCallback {
- public:
- MOCK_METHOD0(RequestKeyFrame, int32_t());
- MOCK_METHOD1(SliceLossIndicationRequest,
- int32_t(const uint64_t pictureId));
-};
-
-class MockPacketRequestCallback : public VCMPacketRequestCallback {
- public:
- MOCK_METHOD2(ResendPackets, int32_t(const uint16_t* sequenceNumbers,
- uint16_t length));
-};
-
-} // namespace webrtc
-
-#endif // WEBRTC_MODULES_VIDEO_CODING_MAIN_INTERFACE_MOCK_MOCK_VCM_CALLBACKS_H_
diff --git a/webrtc/modules/video_coding/main/interface/video_coding.h b/webrtc/modules/video_coding/main/interface/video_coding.h
deleted file mode 100644
index 67f7b635cb..0000000000
--- a/webrtc/modules/video_coding/main/interface/video_coding.h
+++ /dev/null
@@ -1,544 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_MODULES_INTERFACE_VIDEO_CODING_H_
-#define WEBRTC_MODULES_INTERFACE_VIDEO_CODING_H_
-
-#if defined(WEBRTC_WIN)
-// This is a workaround on Windows due to the fact that some Windows
-// headers define CreateEvent as a macro to either CreateEventW or CreateEventA.
-// This can cause problems since we use that name as well and could
-// declare them as one thing here whereas in another place a windows header
-// may have been included and then implementing CreateEvent() causes compilation
-// errors. So for consistency, we include the main windows header here.
-#include <windows.h>
-#endif
-
-#include "webrtc/modules/interface/module.h"
-#include "webrtc/modules/interface/module_common_types.h"
-#include "webrtc/modules/video_coding/main/interface/video_coding_defines.h"
-#include "webrtc/system_wrappers/include/event_wrapper.h"
-#include "webrtc/video_frame.h"
-
-namespace webrtc
-{
-
-class Clock;
-class EncodedImageCallback;
-class VideoEncoder;
-class VideoDecoder;
-struct CodecSpecificInfo;
-
-class EventFactory {
- public:
- virtual ~EventFactory() {}
-
- virtual EventWrapper* CreateEvent() = 0;
-};
-
-class EventFactoryImpl : public EventFactory {
- public:
- virtual ~EventFactoryImpl() {}
-
- virtual EventWrapper* CreateEvent() {
- return EventWrapper::Create();
- }
-};
-
-// Used to indicate which decode with errors mode should be used.
-enum VCMDecodeErrorMode {
- kNoErrors, // Never decode with errors. Video will freeze
- // if nack is disabled.
- kSelectiveErrors, // Frames that are determined decodable in
- // VCMSessionInfo may be decoded with missing
- // packets. As not all incomplete frames will be
- // decodable, video will freeze if nack is disabled.
- kWithErrors // Release frames as needed. Errors may be
- // introduced as some encoded frames may not be
- // complete.
-};
-
-class VideoCodingModule : public Module
-{
-public:
- enum SenderNackMode {
- kNackNone,
- kNackAll,
- kNackSelective
- };
-
- enum ReceiverRobustness {
- kNone,
- kHardNack,
- kSoftNack,
- kReferenceSelection
- };
-
- static VideoCodingModule* Create(
- Clock* clock,
- VideoEncoderRateObserver* encoder_rate_observer,
- VCMQMSettingsCallback* qm_settings_callback);
-
- static VideoCodingModule* Create(Clock* clock, EventFactory* event_factory);
-
- static void Destroy(VideoCodingModule* module);
-
- // Get number of supported codecs
- //
- // Return value : Number of supported codecs
- static uint8_t NumberOfCodecs();
-
- // Get supported codec settings with using id
- //
- // Input:
- // - listId : Id or index of the codec to look up
- // - codec : Memory where the codec settings will be stored
- //
- // Return value : VCM_OK, on success
- // VCM_PARAMETER_ERROR if codec not supported or id too high
- static int32_t Codec(const uint8_t listId, VideoCodec* codec);
-
- // Get supported codec settings using codec type
- //
- // Input:
- // - codecType : The codec type to get settings for
- // - codec : Memory where the codec settings will be stored
- //
- // Return value : VCM_OK, on success
- // VCM_PARAMETER_ERROR if codec not supported
- static int32_t Codec(VideoCodecType codecType, VideoCodec* codec);
-
- /*
- * Sender
- */
-
- // Registers a codec to be used for encoding. Calling this
- // API multiple times overwrites any previously registered codecs.
- //
- // NOTE: Must be called on the thread that constructed the VCM instance.
- //
- // Input:
- // - sendCodec : Settings for the codec to be registered.
- // - numberOfCores : The number of cores the codec is allowed
- // to use.
- // - maxPayloadSize : The maximum size each payload is allowed
- // to have. Usually MTU - overhead.
- //
- // Return value : VCM_OK, on success.
- // < 0, on error.
- virtual int32_t RegisterSendCodec(const VideoCodec* sendCodec,
- uint32_t numberOfCores,
- uint32_t maxPayloadSize) = 0;
-
- // Get the current send codec in use.
- //
- // If a codec has not been set yet, the |id| property of the return value
- // will be 0 and |name| empty.
- //
- // NOTE: This method intentionally does not hold locks and minimizes data
- // copying. It must be called on the thread where the VCM was constructed.
- virtual const VideoCodec& GetSendCodec() const = 0;
-
- // DEPRECATED: Use GetSendCodec() instead.
- //
- // API to get the current send codec in use.
- //
- // Input:
- // - currentSendCodec : Address where the sendCodec will be written.
- //
- // Return value : VCM_OK, on success.
- // < 0, on error.
- //
- // NOTE: The returned codec information is not guaranteed to be current when
- // the call returns. This method acquires a lock that is aligned with
- // video encoding, so it should be assumed to be allowed to block for
- // several milliseconds.
- virtual int32_t SendCodec(VideoCodec* currentSendCodec) const = 0;
-
- // DEPRECATED: Use GetSendCodec() instead.
- //
- // API to get the current send codec type
- //
- // Return value : Codec type, on success.
- // kVideoCodecUnknown, on error or if no send codec is set
- // NOTE: Same notes apply as for SendCodec() above.
- virtual VideoCodecType SendCodec() const = 0;
-
- // Register an external encoder object. This can not be used together with
- // external decoder callbacks.
- //
- // Input:
- // - externalEncoder : Encoder object to be used for encoding frames inserted
- // with the AddVideoFrame API.
- // - payloadType : The payload type bound which this encoder is bound to.
- //
- // Return value : VCM_OK, on success.
- // < 0, on error.
- virtual int32_t RegisterExternalEncoder(VideoEncoder* externalEncoder,
- uint8_t payloadType,
- bool internalSource = false) = 0;
-
- // API to get currently configured encoder target bitrate in bits/s.
- //
- // Return value : 0, on success.
- // < 0, on error.
- virtual int Bitrate(unsigned int* bitrate) const = 0;
-
- // API to get currently configured encoder target frame rate.
- //
- // Return value : 0, on success.
- // < 0, on error.
- virtual int FrameRate(unsigned int* framerate) const = 0;
-
- // Sets the parameters describing the send channel. These parameters are inputs to the
- // Media Optimization inside the VCM and also specifies the target bit rate for the
- // encoder. Bit rate used by NACK should already be compensated for by the user.
- //
- // Input:
- // - target_bitrate : The target bitrate for VCM in bits/s.
- // - lossRate : Fractions of lost packets the past second.
- // (loss rate in percent = 100 * packetLoss / 255)
- // - rtt : Current round-trip time in ms.
- //
- // Return value : VCM_OK, on success.
- // < 0, on error.
- virtual int32_t SetChannelParameters(uint32_t target_bitrate,
- uint8_t lossRate,
- int64_t rtt) = 0;
-
- // Sets the parameters describing the receive channel. These parameters are inputs to the
- // Media Optimization inside the VCM.
- //
- // Input:
- // - rtt : Current round-trip time in ms.
- // with the most amount available bandwidth in a conference
- // scenario
- //
- // Return value : VCM_OK, on success.
- // < 0, on error.
- virtual int32_t SetReceiveChannelParameters(int64_t rtt) = 0;
-
- // Register a transport callback which will be called to deliver the encoded data and
- // side information.
- //
- // Input:
- // - transport : The callback object to register.
- //
- // Return value : VCM_OK, on success.
- // < 0, on error.
- virtual int32_t RegisterTransportCallback(VCMPacketizationCallback* transport) = 0;
-
- // Register video output information callback which will be called to deliver information
- // about the video stream produced by the encoder, for instance the average frame rate and
- // bit rate.
- //
- // Input:
- // - outputInformation : The callback object to register.
- //
- // Return value : VCM_OK, on success.
- // < 0, on error.
- virtual int32_t RegisterSendStatisticsCallback(
- VCMSendStatisticsCallback* sendStats) = 0;
-
- // Register a video protection callback which will be called to deliver
- // the requested FEC rate and NACK status (on/off).
- //
- // Input:
- // - protection : The callback object to register.
- //
- // Return value : VCM_OK, on success.
- // < 0, on error.
- virtual int32_t RegisterProtectionCallback(VCMProtectionCallback* protection) = 0;
-
- // Enable or disable a video protection method.
- //
- // Input:
- // - videoProtection : The method to enable or disable.
- // - enable : True if the method should be enabled, false if
- // it should be disabled.
- //
- // Return value : VCM_OK, on success.
- // < 0, on error.
- virtual int32_t SetVideoProtection(VCMVideoProtection videoProtection,
- bool enable) = 0;
-
- // Add one raw video frame to the encoder. This function does all the necessary
- // processing, then decides what frame type to encode, or if the frame should be
- // dropped. If the frame should be encoded it passes the frame to the encoder
- // before it returns.
- //
- // Input:
- // - videoFrame : Video frame to encode.
- // - codecSpecificInfo : Extra codec information, e.g., pre-parsed in-band signaling.
- //
- // Return value : VCM_OK, on success.
- // < 0, on error.
- virtual int32_t AddVideoFrame(
- const VideoFrame& videoFrame,
- const VideoContentMetrics* contentMetrics = NULL,
- const CodecSpecificInfo* codecSpecificInfo = NULL) = 0;
-
- // Next frame encoded should be an intra frame (keyframe).
- //
- // Return value : VCM_OK, on success.
- // < 0, on error.
- virtual int32_t IntraFrameRequest(int stream_index) = 0;
-
- // Frame Dropper enable. Can be used to disable the frame dropping when the encoder
- // over-uses its bit rate. This API is designed to be used when the encoded frames
- // are supposed to be stored to an AVI file, or when the I420 codec is used and the
- // target bit rate shouldn't affect the frame rate.
- //
- // Input:
- // - enable : True to enable the setting, false to disable it.
- //
- // Return value : VCM_OK, on success.
- // < 0, on error.
- virtual int32_t EnableFrameDropper(bool enable) = 0;
-
-
- /*
- * Receiver
- */
-
- // Register possible receive codecs, can be called multiple times for different codecs.
- // The module will automatically switch between registered codecs depending on the
- // payload type of incoming frames. The actual decoder will be created when needed.
- //
- // Input:
- // - receiveCodec : Settings for the codec to be registered.
- // - numberOfCores : Number of CPU cores that the decoder is allowed to use.
- // - requireKeyFrame : Set this to true if you don't want any delta frames
- // to be decoded until the first key frame has been decoded.
- //
- // Return value : VCM_OK, on success.
- // < 0, on error.
- virtual int32_t RegisterReceiveCodec(const VideoCodec* receiveCodec,
- int32_t numberOfCores,
- bool requireKeyFrame = false) = 0;
-
- // Register an externally defined decoder/renderer object. Can be a decoder only or a
- // decoder coupled with a renderer. Note that RegisterReceiveCodec must be called to
- // be used for decoding incoming streams.
- //
- // Input:
- // - externalDecoder : The external decoder/renderer object.
- // - payloadType : The payload type which this decoder should be
- // registered to.
- // - internalRenderTiming : True if the internal renderer (if any) of the decoder
- // object can make sure to render at a given time in ms.
- //
- // Return value : VCM_OK, on success.
- // < 0, on error.
- virtual int32_t RegisterExternalDecoder(VideoDecoder* externalDecoder,
- uint8_t payloadType,
- bool internalRenderTiming) = 0;
-
- // Register a receive callback. Will be called whenever there is a new frame ready
- // for rendering.
- //
- // Input:
- // - receiveCallback : The callback object to be used by the module when a
- // frame is ready for rendering.
- // De-register with a NULL pointer.
- //
- // Return value : VCM_OK, on success.
- // < 0, on error.
- virtual int32_t RegisterReceiveCallback(VCMReceiveCallback* receiveCallback) = 0;
-
- // Register a receive statistics callback which will be called to deliver information
- // about the video stream received by the receiving side of the VCM, for instance the
- // average frame rate and bit rate.
- //
- // Input:
- // - receiveStats : The callback object to register.
- //
- // Return value : VCM_OK, on success.
- // < 0, on error.
- virtual int32_t RegisterReceiveStatisticsCallback(
- VCMReceiveStatisticsCallback* receiveStats) = 0;
-
- // Register a decoder timing callback which will be called to deliver
- // information about the timing of the decoder in the receiving side of the
- // VCM, for instance the current and maximum frame decode latency.
- //
- // Input:
- // - decoderTiming : The callback object to register.
- //
- // Return value : VCM_OK, on success.
- // < 0, on error.
- virtual int32_t RegisterDecoderTimingCallback(
- VCMDecoderTimingCallback* decoderTiming) = 0;
-
- // Register a frame type request callback. This callback will be called when the
- // module needs to request specific frame types from the send side.
- //
- // Input:
- // - frameTypeCallback : The callback object to be used by the module when
- // requesting a specific type of frame from the send side.
- // De-register with a NULL pointer.
- //
- // Return value : VCM_OK, on success.
- // < 0, on error.
- virtual int32_t RegisterFrameTypeCallback(
- VCMFrameTypeCallback* frameTypeCallback) = 0;
-
- // Registers a callback which is called whenever the receive side of the VCM
- // encounters holes in the packet sequence and needs packets to be retransmitted.
- //
- // Input:
- // - callback : The callback to be registered in the VCM.
- //
- // Return value : VCM_OK, on success.
- // <0, on error.
- virtual int32_t RegisterPacketRequestCallback(
- VCMPacketRequestCallback* callback) = 0;
-
- // Waits for the next frame in the jitter buffer to become complete
- // (waits no longer than maxWaitTimeMs), then passes it to the decoder for decoding.
- // Should be called as often as possible to get the most out of the decoder.
- //
- // Return value : VCM_OK, on success.
- // < 0, on error.
- virtual int32_t Decode(uint16_t maxWaitTimeMs = 200) = 0;
-
- // Registers a callback which conveys the size of the render buffer.
- virtual int RegisterRenderBufferSizeCallback(
- VCMRenderBufferSizeCallback* callback) = 0;
-
- // Reset the decoder state to the initial state.
- //
- // Return value : VCM_OK, on success.
- // < 0, on error.
- virtual int32_t ResetDecoder() = 0;
-
- // API to get the codec which is currently used for decoding by the module.
- //
- // Input:
- // - currentReceiveCodec : Settings for the codec to be registered.
- //
- // Return value : VCM_OK, on success.
- // < 0, on error.
- virtual int32_t ReceiveCodec(VideoCodec* currentReceiveCodec) const = 0;
-
- // API to get the codec type currently used for decoding by the module.
- //
- // Return value : codecy type, on success.
- // kVideoCodecUnknown, on error or if no receive codec is registered
- virtual VideoCodecType ReceiveCodec() const = 0;
-
- // Insert a parsed packet into the receiver side of the module. Will be placed in the
- // jitter buffer waiting for the frame to become complete. Returns as soon as the packet
- // has been placed in the jitter buffer.
- //
- // Input:
- // - incomingPayload : Payload of the packet.
- // - payloadLength : Length of the payload.
- // - rtpInfo : The parsed header.
- //
- // Return value : VCM_OK, on success.
- // < 0, on error.
- virtual int32_t IncomingPacket(const uint8_t* incomingPayload,
- size_t payloadLength,
- const WebRtcRTPHeader& rtpInfo) = 0;
-
- // Minimum playout delay (Used for lip-sync). This is the minimum delay required
- // to sync with audio. Not included in VideoCodingModule::Delay()
- // Defaults to 0 ms.
- //
- // Input:
- // - minPlayoutDelayMs : Additional delay in ms.
- //
- // Return value : VCM_OK, on success.
- // < 0, on error.
- virtual int32_t SetMinimumPlayoutDelay(uint32_t minPlayoutDelayMs) = 0;
-
- // Set the time required by the renderer to render a frame.
- //
- // Input:
- // - timeMS : The time in ms required by the renderer to render a frame.
- //
- // Return value : VCM_OK, on success.
- // < 0, on error.
- virtual int32_t SetRenderDelay(uint32_t timeMS) = 0;
-
- // The total delay desired by the VCM. Can be less than the minimum
- // delay set with SetMinimumPlayoutDelay.
- //
- // Return value : Total delay in ms, on success.
- // < 0, on error.
- virtual int32_t Delay() const = 0;
-
- // Returns the number of packets discarded by the jitter buffer due to being
- // too late. This can include duplicated packets which arrived after the
- // frame was sent to the decoder. Therefore packets which were prematurely
- // NACKed will be counted.
- virtual uint32_t DiscardedPackets() const = 0;
-
-
- // Robustness APIs
-
- // Set the receiver robustness mode. The mode decides how the receiver
- // responds to losses in the stream. The type of counter-measure (soft or
- // hard NACK, dual decoder, RPS, etc.) is selected through the
- // robustnessMode parameter. The errorMode parameter decides if it is
- // allowed to display frames corrupted by losses. Note that not all
- // combinations of the two parameters are feasible. An error will be
- // returned for invalid combinations.
- // Input:
- // - robustnessMode : selected robustness mode.
- // - errorMode : selected error mode.
- //
- // Return value : VCM_OK, on success;
- // < 0, on error.
- virtual int SetReceiverRobustnessMode(ReceiverRobustness robustnessMode,
- VCMDecodeErrorMode errorMode) = 0;
-
- // Set the decode error mode. The mode decides which errors (if any) are
- // allowed in decodable frames. Note that setting decode_error_mode to
- // anything other than kWithErrors without enabling nack will cause
- // long-term freezes (resulting from frequent key frame requests) if
- // packet loss occurs.
- virtual void SetDecodeErrorMode(VCMDecodeErrorMode decode_error_mode) = 0;
-
- // Sets the maximum number of sequence numbers that we are allowed to NACK
- // and the oldest sequence number that we will consider to NACK. If a
- // sequence number older than |max_packet_age_to_nack| is missing
- // a key frame will be requested. A key frame will also be requested if the
- // time of incomplete or non-continuous frames in the jitter buffer is above
- // |max_incomplete_time_ms|.
- virtual void SetNackSettings(size_t max_nack_list_size,
- int max_packet_age_to_nack,
- int max_incomplete_time_ms) = 0;
-
- // Setting a desired delay to the VCM receiver. Video rendering will be
- // delayed by at least desired_delay_ms.
- virtual int SetMinReceiverDelay(int desired_delay_ms) = 0;
-
- // Lets the sender suspend video when the rate drops below
- // |threshold_bps|, and turns back on when the rate goes back up above
- // |threshold_bps| + |window_bps|.
- virtual void SuspendBelowMinBitrate() = 0;
-
- // Returns true if SuspendBelowMinBitrate is engaged and the video has been
- // suspended due to bandwidth limitations; otherwise false.
- virtual bool VideoSuspended() const = 0;
-
- virtual void RegisterPreDecodeImageCallback(
- EncodedImageCallback* observer) = 0;
- virtual void RegisterPostEncodeImageCallback(
- EncodedImageCallback* post_encode_callback) = 0;
- // Releases pending decode calls, permitting faster thread shutdown.
- virtual void TriggerDecoderShutdown() = 0;
-};
-
-} // namespace webrtc
-
-#endif // WEBRTC_MODULES_INTERFACE_VIDEO_CODING_H_
diff --git a/webrtc/modules/video_coding/main/interface/video_coding_defines.h b/webrtc/modules/video_coding/main/interface/video_coding_defines.h
deleted file mode 100644
index fd38d64415..0000000000
--- a/webrtc/modules/video_coding/main/interface/video_coding_defines.h
+++ /dev/null
@@ -1,201 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_MODULES_INTERFACE_VIDEO_CODING_DEFINES_H_
-#define WEBRTC_MODULES_INTERFACE_VIDEO_CODING_DEFINES_H_
-
-#include "webrtc/modules/interface/module_common_types.h"
-#include "webrtc/typedefs.h"
-#include "webrtc/video_frame.h"
-
-namespace webrtc {
-
-// Error codes
-#define VCM_FRAME_NOT_READY 3
-#define VCM_REQUEST_SLI 2
-#define VCM_MISSING_CALLBACK 1
-#define VCM_OK 0
-#define VCM_GENERAL_ERROR -1
-#define VCM_LEVEL_EXCEEDED -2
-#define VCM_MEMORY -3
-#define VCM_PARAMETER_ERROR -4
-#define VCM_UNKNOWN_PAYLOAD -5
-#define VCM_CODEC_ERROR -6
-#define VCM_UNINITIALIZED -7
-#define VCM_NO_CODEC_REGISTERED -8
-#define VCM_JITTER_BUFFER_ERROR -9
-#define VCM_OLD_PACKET_ERROR -10
-#define VCM_NO_FRAME_DECODED -11
-#define VCM_ERROR_REQUEST_SLI -12
-#define VCM_NOT_IMPLEMENTED -20
-
-enum { kDefaultStartBitrateKbps = 300 };
-
-enum VCMVideoProtection {
- kProtectionNone,
- kProtectionNack,
- kProtectionFEC,
- kProtectionNackFEC,
-};
-
-enum VCMTemporalDecimation {
- kBitrateOverUseDecimation,
-};
-
-struct VCMFrameCount {
- uint32_t numKeyFrames;
- uint32_t numDeltaFrames;
-};
-
-// Callback class used for sending data ready to be packetized
-class VCMPacketizationCallback {
- public:
- virtual int32_t SendData(uint8_t payloadType,
- const EncodedImage& encoded_image,
- const RTPFragmentationHeader& fragmentationHeader,
- const RTPVideoHeader* rtpVideoHdr) = 0;
-
- protected:
- virtual ~VCMPacketizationCallback() {
- }
-};
-
-// Callback class used for passing decoded frames which are ready to be rendered.
-class VCMReceiveCallback {
- public:
- virtual int32_t FrameToRender(VideoFrame& videoFrame) = 0;
- virtual int32_t ReceivedDecodedReferenceFrame(
- const uint64_t pictureId) {
- return -1;
- }
- // Called when the current receive codec changes.
- virtual void OnIncomingPayloadType(int payload_type) {}
-
- protected:
- virtual ~VCMReceiveCallback() {
- }
-};
-
-// Callback class used for informing the user of the bit rate and frame rate produced by the
-// encoder.
-class VCMSendStatisticsCallback {
- public:
- virtual int32_t SendStatistics(const uint32_t bitRate,
- const uint32_t frameRate) = 0;
-
- protected:
- virtual ~VCMSendStatisticsCallback() {
- }
-};
-
-// Callback class used for informing the user of the incoming bit rate and frame rate.
-class VCMReceiveStatisticsCallback {
- public:
- virtual void OnReceiveRatesUpdated(uint32_t bitRate, uint32_t frameRate) = 0;
- virtual void OnDiscardedPacketsUpdated(int discarded_packets) = 0;
- virtual void OnFrameCountsUpdated(const FrameCounts& frame_counts) = 0;
-
- protected:
- virtual ~VCMReceiveStatisticsCallback() {
- }
-};
-
-// Callback class used for informing the user of decode timing info.
-class VCMDecoderTimingCallback {
- public:
- virtual void OnDecoderTiming(int decode_ms,
- int max_decode_ms,
- int current_delay_ms,
- int target_delay_ms,
- int jitter_buffer_ms,
- int min_playout_delay_ms,
- int render_delay_ms) = 0;
-
- protected:
- virtual ~VCMDecoderTimingCallback() {}
-};
-
-// Callback class used for telling the user about how to configure the FEC,
-// and the rates sent the last second is returned to the VCM.
-class VCMProtectionCallback {
- public:
- virtual int ProtectionRequest(const FecProtectionParams* delta_params,
- const FecProtectionParams* key_params,
- uint32_t* sent_video_rate_bps,
- uint32_t* sent_nack_rate_bps,
- uint32_t* sent_fec_rate_bps) = 0;
-
- protected:
- virtual ~VCMProtectionCallback() {
- }
-};
-
-class VideoEncoderRateObserver {
- public:
- virtual ~VideoEncoderRateObserver() {}
- virtual void OnSetRates(uint32_t bitrate_bps, int framerate) = 0;
-};
-
-// Callback class used for telling the user about what frame type needed to continue decoding.
-// Typically a key frame when the stream has been corrupted in some way.
-class VCMFrameTypeCallback {
- public:
- virtual int32_t RequestKeyFrame() = 0;
- virtual int32_t SliceLossIndicationRequest(
- const uint64_t pictureId) {
- return -1;
- }
-
- protected:
- virtual ~VCMFrameTypeCallback() {
- }
-};
-
-// Callback class used for telling the user about which packet sequence numbers are currently
-// missing and need to be resent.
-class VCMPacketRequestCallback {
- public:
- virtual int32_t ResendPackets(const uint16_t* sequenceNumbers,
- uint16_t length) = 0;
-
- protected:
- virtual ~VCMPacketRequestCallback() {
- }
-};
-
-// Callback used to inform the user of the the desired resolution
-// as subscribed by Media Optimization (Quality Modes)
-class VCMQMSettingsCallback {
- public:
- virtual int32_t SetVideoQMSettings(const uint32_t frameRate,
- const uint32_t width,
- const uint32_t height) = 0;
-
- virtual void SetTargetFramerate(int frame_rate) = 0;
-
- protected:
- virtual ~VCMQMSettingsCallback() {
- }
-};
-
-// Callback class used for telling the user about the size (in time) of the
-// render buffer, that is the size in time of the complete continuous frames.
-class VCMRenderBufferSizeCallback {
- public:
- virtual void RenderBufferSizeMs(int buffer_size_ms) = 0;
-
- protected:
- virtual ~VCMRenderBufferSizeCallback() {
- }
-};
-
-} // namespace webrtc
-
-#endif // WEBRTC_MODULES_INTERFACE_VIDEO_CODING_DEFINES_H_
diff --git a/webrtc/modules/video_coding/main/source/OWNERS b/webrtc/modules/video_coding/main/source/OWNERS
deleted file mode 100644
index 3ee6b4bf5f..0000000000
--- a/webrtc/modules/video_coding/main/source/OWNERS
+++ /dev/null
@@ -1,5 +0,0 @@
-
-# These are for the common case of adding or renaming files. If you're doing
-# structural changes, please get a review from a reviewer in this file.
-per-file *.gyp=*
-per-file *.gypi=*
diff --git a/webrtc/modules/video_coding/main/source/codec_database.cc b/webrtc/modules/video_coding/main/source/codec_database.cc
deleted file mode 100644
index bfdc609e3c..0000000000
--- a/webrtc/modules/video_coding/main/source/codec_database.cc
+++ /dev/null
@@ -1,687 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include "webrtc/modules/video_coding/main/source/codec_database.h"
-
-#include <assert.h>
-
-#include "webrtc/base/checks.h"
-#include "webrtc/engine_configurations.h"
-#ifdef VIDEOCODEC_H264
-#include "webrtc/modules/video_coding/codecs/h264/include/h264.h"
-#endif
-#ifdef VIDEOCODEC_I420
-#include "webrtc/modules/video_coding/codecs/i420/include/i420.h"
-#endif
-#ifdef VIDEOCODEC_VP8
-#include "webrtc/modules/video_coding/codecs/vp8/include/vp8.h"
-#endif
-#ifdef VIDEOCODEC_VP9
-#include "webrtc/modules/video_coding/codecs/vp9/include/vp9.h"
-#endif
-#include "webrtc/modules/video_coding/main/source/internal_defines.h"
-#include "webrtc/system_wrappers/include/logging.h"
-
-namespace {
-const size_t kDefaultPayloadSize = 1440;
-const uint8_t kDefaultPayloadType = 100;
-}
-
-namespace webrtc {
-
-VideoCodecVP8 VideoEncoder::GetDefaultVp8Settings() {
- VideoCodecVP8 vp8_settings;
- memset(&vp8_settings, 0, sizeof(vp8_settings));
-
- vp8_settings.resilience = kResilientStream;
- vp8_settings.numberOfTemporalLayers = 1;
- vp8_settings.denoisingOn = true;
- vp8_settings.errorConcealmentOn = false;
- vp8_settings.automaticResizeOn = false;
- vp8_settings.frameDroppingOn = true;
- vp8_settings.keyFrameInterval = 3000;
-
- return vp8_settings;
-}
-
-VideoCodecVP9 VideoEncoder::GetDefaultVp9Settings() {
- VideoCodecVP9 vp9_settings;
- memset(&vp9_settings, 0, sizeof(vp9_settings));
-
- vp9_settings.resilience = 1;
- vp9_settings.numberOfTemporalLayers = 1;
- vp9_settings.denoisingOn = false;
- vp9_settings.frameDroppingOn = true;
- vp9_settings.keyFrameInterval = 3000;
- vp9_settings.adaptiveQpMode = true;
- vp9_settings.automaticResizeOn = true;
- vp9_settings.numberOfSpatialLayers = 1;
- vp9_settings.flexibleMode = false;
- return vp9_settings;
-}
-
-VideoCodecH264 VideoEncoder::GetDefaultH264Settings() {
- VideoCodecH264 h264_settings;
- memset(&h264_settings, 0, sizeof(h264_settings));
-
- h264_settings.profile = kProfileBase;
- h264_settings.frameDroppingOn = true;
- h264_settings.keyFrameInterval = 3000;
- h264_settings.spsData = NULL;
- h264_settings.spsLen = 0;
- h264_settings.ppsData = NULL;
- h264_settings.ppsLen = 0;
-
- return h264_settings;
-}
-
-VCMDecoderMapItem::VCMDecoderMapItem(VideoCodec* settings,
- int number_of_cores,
- bool require_key_frame)
- : settings(settings),
- number_of_cores(number_of_cores),
- require_key_frame(require_key_frame) {
- assert(number_of_cores >= 0);
-}
-
-VCMExtDecoderMapItem::VCMExtDecoderMapItem(
- VideoDecoder* external_decoder_instance,
- uint8_t payload_type,
- bool internal_render_timing)
- : payload_type(payload_type),
- external_decoder_instance(external_decoder_instance),
- internal_render_timing(internal_render_timing) {
-}
-
-VCMCodecDataBase::VCMCodecDataBase(
- VideoEncoderRateObserver* encoder_rate_observer,
- VCMEncodedFrameCallback* encoded_frame_callback)
- : number_of_cores_(0),
- max_payload_size_(kDefaultPayloadSize),
- periodic_key_frames_(false),
- pending_encoder_reset_(true),
- send_codec_(),
- receive_codec_(),
- encoder_payload_type_(0),
- external_encoder_(NULL),
- internal_source_(false),
- encoder_rate_observer_(encoder_rate_observer),
- encoded_frame_callback_(encoded_frame_callback),
- ptr_decoder_(NULL),
- dec_map_(),
- dec_external_map_() {}
-
-VCMCodecDataBase::~VCMCodecDataBase() {
- ResetSender();
- ResetReceiver();
-}
-
-int VCMCodecDataBase::NumberOfCodecs() {
- return VCM_NUM_VIDEO_CODECS_AVAILABLE;
-}
-
-bool VCMCodecDataBase::Codec(int list_id,
- VideoCodec* settings) {
- if (!settings) {
- return false;
- }
- if (list_id >= VCM_NUM_VIDEO_CODECS_AVAILABLE) {
- return false;
- }
- memset(settings, 0, sizeof(VideoCodec));
- switch (list_id) {
-#ifdef VIDEOCODEC_VP8
- case VCM_VP8_IDX: {
- strncpy(settings->plName, "VP8", 4);
- settings->codecType = kVideoCodecVP8;
- // 96 to 127 dynamic payload types for video codecs.
- settings->plType = kDefaultPayloadType;
- settings->startBitrate = kDefaultStartBitrateKbps;
- settings->minBitrate = VCM_MIN_BITRATE;
- settings->maxBitrate = 0;
- settings->maxFramerate = VCM_DEFAULT_FRAME_RATE;
- settings->width = VCM_DEFAULT_CODEC_WIDTH;
- settings->height = VCM_DEFAULT_CODEC_HEIGHT;
- settings->numberOfSimulcastStreams = 0;
- settings->qpMax = 56;
- settings->codecSpecific.VP8 = VideoEncoder::GetDefaultVp8Settings();
- return true;
- }
-#endif
-#ifdef VIDEOCODEC_VP9
- case VCM_VP9_IDX: {
- strncpy(settings->plName, "VP9", 4);
- settings->codecType = kVideoCodecVP9;
- // 96 to 127 dynamic payload types for video codecs.
- settings->plType = kDefaultPayloadType;
- settings->startBitrate = 100;
- settings->minBitrate = VCM_MIN_BITRATE;
- settings->maxBitrate = 0;
- settings->maxFramerate = VCM_DEFAULT_FRAME_RATE;
- settings->width = VCM_DEFAULT_CODEC_WIDTH;
- settings->height = VCM_DEFAULT_CODEC_HEIGHT;
- settings->numberOfSimulcastStreams = 0;
- settings->qpMax = 56;
- settings->codecSpecific.VP9 = VideoEncoder::GetDefaultVp9Settings();
- return true;
- }
-#endif
-#ifdef VIDEOCODEC_H264
- case VCM_H264_IDX: {
- strncpy(settings->plName, "H264", 5);
- settings->codecType = kVideoCodecH264;
- // 96 to 127 dynamic payload types for video codecs.
- settings->plType = kDefaultPayloadType;
- settings->startBitrate = kDefaultStartBitrateKbps;
- settings->minBitrate = VCM_MIN_BITRATE;
- settings->maxBitrate = 0;
- settings->maxFramerate = VCM_DEFAULT_FRAME_RATE;
- settings->width = VCM_DEFAULT_CODEC_WIDTH;
- settings->height = VCM_DEFAULT_CODEC_HEIGHT;
- settings->numberOfSimulcastStreams = 0;
- settings->qpMax = 56;
- settings->codecSpecific.H264 = VideoEncoder::GetDefaultH264Settings();
- return true;
- }
-#endif
-#ifdef VIDEOCODEC_I420
- case VCM_I420_IDX: {
- strncpy(settings->plName, "I420", 5);
- settings->codecType = kVideoCodecI420;
- // 96 to 127 dynamic payload types for video codecs.
- settings->plType = kDefaultPayloadType;
- // Bitrate needed for this size and framerate.
- settings->startBitrate = 3 * VCM_DEFAULT_CODEC_WIDTH *
- VCM_DEFAULT_CODEC_HEIGHT * 8 *
- VCM_DEFAULT_FRAME_RATE / 1000 / 2;
- settings->maxBitrate = settings->startBitrate;
- settings->maxFramerate = VCM_DEFAULT_FRAME_RATE;
- settings->width = VCM_DEFAULT_CODEC_WIDTH;
- settings->height = VCM_DEFAULT_CODEC_HEIGHT;
- settings->minBitrate = VCM_MIN_BITRATE;
- settings->numberOfSimulcastStreams = 0;
- return true;
- }
-#endif
- default: {
- return false;
- }
- }
-}
-
-bool VCMCodecDataBase::Codec(VideoCodecType codec_type,
- VideoCodec* settings) {
- for (int i = 0; i < VCMCodecDataBase::NumberOfCodecs(); i++) {
- const bool ret = VCMCodecDataBase::Codec(i, settings);
- if (!ret) {
- return false;
- }
- if (codec_type == settings->codecType) {
- return true;
- }
- }
- return false;
-}
-
-void VCMCodecDataBase::ResetSender() {
- DeleteEncoder();
- periodic_key_frames_ = false;
-}
-
-// Assuming only one registered encoder - since only one used, no need for more.
-bool VCMCodecDataBase::SetSendCodec(const VideoCodec* send_codec,
- int number_of_cores,
- size_t max_payload_size) {
- RTC_DCHECK(send_codec);
- if (max_payload_size == 0) {
- max_payload_size = kDefaultPayloadSize;
- }
- RTC_DCHECK_GE(number_of_cores, 1);
- RTC_DCHECK_GE(send_codec->plType, 1);
- // Make sure the start bit rate is sane...
- RTC_DCHECK_LE(send_codec->startBitrate, 1000000u);
- RTC_DCHECK(send_codec->codecType != kVideoCodecUnknown);
- bool reset_required = pending_encoder_reset_;
- if (number_of_cores_ != number_of_cores) {
- number_of_cores_ = number_of_cores;
- reset_required = true;
- }
- if (max_payload_size_ != max_payload_size) {
- max_payload_size_ = max_payload_size;
- reset_required = true;
- }
-
- VideoCodec new_send_codec;
- memcpy(&new_send_codec, send_codec, sizeof(new_send_codec));
-
- if (new_send_codec.maxBitrate == 0) {
- // max is one bit per pixel
- new_send_codec.maxBitrate = (static_cast<int>(send_codec->height) *
- static_cast<int>(send_codec->width) *
- static_cast<int>(send_codec->maxFramerate)) / 1000;
- if (send_codec->startBitrate > new_send_codec.maxBitrate) {
- // But if the user tries to set a higher start bit rate we will
- // increase the max accordingly.
- new_send_codec.maxBitrate = send_codec->startBitrate;
- }
- }
-
- if (new_send_codec.startBitrate > new_send_codec.maxBitrate)
- new_send_codec.startBitrate = new_send_codec.maxBitrate;
-
- if (!reset_required) {
- reset_required = RequiresEncoderReset(new_send_codec);
- }
-
- memcpy(&send_codec_, &new_send_codec, sizeof(send_codec_));
-
- if (!reset_required) {
- encoded_frame_callback_->SetPayloadType(send_codec_.plType);
- return true;
- }
-
- // If encoder exists, will destroy it and create new one.
- DeleteEncoder();
- RTC_DCHECK_EQ(encoder_payload_type_, send_codec_.plType)
- << "Encoder not registered for payload type " << send_codec_.plType;
- ptr_encoder_.reset(
- new VCMGenericEncoder(external_encoder_, encoder_rate_observer_,
- encoded_frame_callback_, internal_source_));
- encoded_frame_callback_->SetPayloadType(send_codec_.plType);
- encoded_frame_callback_->SetInternalSource(internal_source_);
- if (ptr_encoder_->InitEncode(&send_codec_, number_of_cores_,
- max_payload_size_) < 0) {
- LOG(LS_ERROR) << "Failed to initialize video encoder.";
- DeleteEncoder();
- return false;
- }
-
- // Intentionally don't check return value since the encoder registration
- // shouldn't fail because the codec doesn't support changing the periodic key
- // frame setting.
- ptr_encoder_->SetPeriodicKeyFrames(periodic_key_frames_);
-
- pending_encoder_reset_ = false;
-
- return true;
-}
-
-bool VCMCodecDataBase::SendCodec(VideoCodec* current_send_codec) const {
- if (!ptr_encoder_) {
- return false;
- }
- memcpy(current_send_codec, &send_codec_, sizeof(VideoCodec));
- return true;
-}
-
-VideoCodecType VCMCodecDataBase::SendCodec() const {
- if (!ptr_encoder_) {
- return kVideoCodecUnknown;
- }
- return send_codec_.codecType;
-}
-
-bool VCMCodecDataBase::DeregisterExternalEncoder(
- uint8_t payload_type, bool* was_send_codec) {
- assert(was_send_codec);
- *was_send_codec = false;
- if (encoder_payload_type_ != payload_type) {
- return false;
- }
- if (send_codec_.plType == payload_type) {
- // De-register as send codec if needed.
- DeleteEncoder();
- memset(&send_codec_, 0, sizeof(VideoCodec));
- *was_send_codec = true;
- }
- encoder_payload_type_ = 0;
- external_encoder_ = NULL;
- internal_source_ = false;
- return true;
-}
-
-void VCMCodecDataBase::RegisterExternalEncoder(
- VideoEncoder* external_encoder,
- uint8_t payload_type,
- bool internal_source) {
- // Since only one encoder can be used at a given time, only one external
- // encoder can be registered/used.
- external_encoder_ = external_encoder;
- encoder_payload_type_ = payload_type;
- internal_source_ = internal_source;
- pending_encoder_reset_ = true;
-}
-
-bool VCMCodecDataBase::RequiresEncoderReset(const VideoCodec& new_send_codec) {
- if (ptr_encoder_ == NULL) {
- return true;
- }
-
- // Does not check startBitrate or maxFramerate
- if (new_send_codec.codecType != send_codec_.codecType ||
- strcmp(new_send_codec.plName, send_codec_.plName) != 0 ||
- new_send_codec.plType != send_codec_.plType ||
- new_send_codec.width != send_codec_.width ||
- new_send_codec.height != send_codec_.height ||
- new_send_codec.maxBitrate != send_codec_.maxBitrate ||
- new_send_codec.minBitrate != send_codec_.minBitrate ||
- new_send_codec.qpMax != send_codec_.qpMax ||
- new_send_codec.numberOfSimulcastStreams !=
- send_codec_.numberOfSimulcastStreams ||
- new_send_codec.mode != send_codec_.mode ||
- new_send_codec.extra_options != send_codec_.extra_options) {
- return true;
- }
-
- switch (new_send_codec.codecType) {
- case kVideoCodecVP8:
- if (memcmp(&new_send_codec.codecSpecific.VP8,
- &send_codec_.codecSpecific.VP8,
- sizeof(new_send_codec.codecSpecific.VP8)) != 0) {
- return true;
- }
- break;
- case kVideoCodecVP9:
- if (memcmp(&new_send_codec.codecSpecific.VP9,
- &send_codec_.codecSpecific.VP9,
- sizeof(new_send_codec.codecSpecific.VP9)) != 0) {
- return true;
- }
- break;
- case kVideoCodecH264:
- if (memcmp(&new_send_codec.codecSpecific.H264,
- &send_codec_.codecSpecific.H264,
- sizeof(new_send_codec.codecSpecific.H264)) != 0) {
- return true;
- }
- break;
- case kVideoCodecGeneric:
- break;
- // Known codecs without payload-specifics
- case kVideoCodecI420:
- case kVideoCodecRED:
- case kVideoCodecULPFEC:
- break;
- // Unknown codec type, reset just to be sure.
- case kVideoCodecUnknown:
- return true;
- }
-
- if (new_send_codec.numberOfSimulcastStreams > 0) {
- for (unsigned char i = 0; i < new_send_codec.numberOfSimulcastStreams;
- ++i) {
- if (memcmp(&new_send_codec.simulcastStream[i],
- &send_codec_.simulcastStream[i],
- sizeof(new_send_codec.simulcastStream[i])) !=
- 0) {
- return true;
- }
- }
- }
- return false;
-}
-
-VCMGenericEncoder* VCMCodecDataBase::GetEncoder() {
- return ptr_encoder_.get();
-}
-
-bool VCMCodecDataBase::SetPeriodicKeyFrames(bool enable) {
- periodic_key_frames_ = enable;
- if (ptr_encoder_) {
- return (ptr_encoder_->SetPeriodicKeyFrames(periodic_key_frames_) == 0);
- }
- return true;
-}
-
-void VCMCodecDataBase::ResetReceiver() {
- ReleaseDecoder(ptr_decoder_);
- ptr_decoder_ = NULL;
- memset(&receive_codec_, 0, sizeof(VideoCodec));
- while (!dec_map_.empty()) {
- DecoderMap::iterator it = dec_map_.begin();
- delete (*it).second;
- dec_map_.erase(it);
- }
- while (!dec_external_map_.empty()) {
- ExternalDecoderMap::iterator external_it = dec_external_map_.begin();
- delete (*external_it).second;
- dec_external_map_.erase(external_it);
- }
-}
-
-bool VCMCodecDataBase::DeregisterExternalDecoder(uint8_t payload_type) {
- ExternalDecoderMap::iterator it = dec_external_map_.find(payload_type);
- if (it == dec_external_map_.end()) {
- // Not found
- return false;
- }
- // We can't use payload_type to check if the decoder is currently in use,
- // because payload type may be out of date (e.g. before we decode the first
- // frame after RegisterReceiveCodec)
- if (ptr_decoder_ != NULL &&
- &ptr_decoder_->_decoder == (*it).second->external_decoder_instance) {
- // Release it if it was registered and in use.
- ReleaseDecoder(ptr_decoder_);
- ptr_decoder_ = NULL;
- }
- DeregisterReceiveCodec(payload_type);
- delete (*it).second;
- dec_external_map_.erase(it);
- return true;
-}
-
-// Add the external encoder object to the list of external decoders.
-// Won't be registered as a receive codec until RegisterReceiveCodec is called.
-bool VCMCodecDataBase::RegisterExternalDecoder(
- VideoDecoder* external_decoder,
- uint8_t payload_type,
- bool internal_render_timing) {
- // Check if payload value already exists, if so - erase old and insert new.
- VCMExtDecoderMapItem* ext_decoder = new VCMExtDecoderMapItem(
- external_decoder, payload_type, internal_render_timing);
- if (!ext_decoder) {
- return false;
- }
- DeregisterExternalDecoder(payload_type);
- dec_external_map_[payload_type] = ext_decoder;
- return true;
-}
-
-bool VCMCodecDataBase::DecoderRegistered() const {
- return !dec_map_.empty();
-}
-
-bool VCMCodecDataBase::RegisterReceiveCodec(
- const VideoCodec* receive_codec,
- int number_of_cores,
- bool require_key_frame) {
- if (number_of_cores < 0) {
- return false;
- }
- // Check if payload value already exists, if so - erase old and insert new.
- DeregisterReceiveCodec(receive_codec->plType);
- if (receive_codec->codecType == kVideoCodecUnknown) {
- return false;
- }
- VideoCodec* new_receive_codec = new VideoCodec(*receive_codec);
- dec_map_[receive_codec->plType] = new VCMDecoderMapItem(new_receive_codec,
- number_of_cores,
- require_key_frame);
- return true;
-}
-
-bool VCMCodecDataBase::DeregisterReceiveCodec(
- uint8_t payload_type) {
- DecoderMap::iterator it = dec_map_.find(payload_type);
- if (it == dec_map_.end()) {
- return false;
- }
- VCMDecoderMapItem* dec_item = (*it).second;
- delete dec_item;
- dec_map_.erase(it);
- if (receive_codec_.plType == payload_type) {
- // This codec is currently in use.
- memset(&receive_codec_, 0, sizeof(VideoCodec));
- }
- return true;
-}
-
-bool VCMCodecDataBase::ReceiveCodec(VideoCodec* current_receive_codec) const {
- assert(current_receive_codec);
- if (!ptr_decoder_) {
- return false;
- }
- memcpy(current_receive_codec, &receive_codec_, sizeof(VideoCodec));
- return true;
-}
-
-VideoCodecType VCMCodecDataBase::ReceiveCodec() const {
- if (!ptr_decoder_) {
- return kVideoCodecUnknown;
- }
- return receive_codec_.codecType;
-}
-
-VCMGenericDecoder* VCMCodecDataBase::GetDecoder(
- uint8_t payload_type, VCMDecodedFrameCallback* decoded_frame_callback) {
- if (payload_type == receive_codec_.plType || payload_type == 0) {
- return ptr_decoder_;
- }
- // Check for exisitng decoder, if exists - delete.
- if (ptr_decoder_) {
- ReleaseDecoder(ptr_decoder_);
- ptr_decoder_ = NULL;
- memset(&receive_codec_, 0, sizeof(VideoCodec));
- }
- ptr_decoder_ = CreateAndInitDecoder(payload_type, &receive_codec_);
- if (!ptr_decoder_) {
- return NULL;
- }
- VCMReceiveCallback* callback = decoded_frame_callback->UserReceiveCallback();
- if (callback) callback->OnIncomingPayloadType(receive_codec_.plType);
- if (ptr_decoder_->RegisterDecodeCompleteCallback(decoded_frame_callback)
- < 0) {
- ReleaseDecoder(ptr_decoder_);
- ptr_decoder_ = NULL;
- memset(&receive_codec_, 0, sizeof(VideoCodec));
- return NULL;
- }
- return ptr_decoder_;
-}
-
-void VCMCodecDataBase::ReleaseDecoder(VCMGenericDecoder* decoder) const {
- if (decoder) {
- assert(&decoder->_decoder);
- decoder->Release();
- if (!decoder->External()) {
- delete &decoder->_decoder;
- }
- delete decoder;
- }
-}
-
-bool VCMCodecDataBase::SupportsRenderScheduling() const {
- const VCMExtDecoderMapItem* ext_item = FindExternalDecoderItem(
- receive_codec_.plType);
- if (ext_item == nullptr)
- return true;
- return ext_item->internal_render_timing;
-}
-
-bool VCMCodecDataBase::MatchesCurrentResolution(int width, int height) const {
- return send_codec_.width == width && send_codec_.height == height;
-}
-
-VCMGenericDecoder* VCMCodecDataBase::CreateAndInitDecoder(
- uint8_t payload_type,
- VideoCodec* new_codec) const {
- assert(new_codec);
- const VCMDecoderMapItem* decoder_item = FindDecoderItem(payload_type);
- if (!decoder_item) {
- LOG(LS_ERROR) << "Can't find a decoder associated with payload type: "
- << static_cast<int>(payload_type);
- return NULL;
- }
- VCMGenericDecoder* ptr_decoder = NULL;
- const VCMExtDecoderMapItem* external_dec_item =
- FindExternalDecoderItem(payload_type);
- if (external_dec_item) {
- // External codec.
- ptr_decoder = new VCMGenericDecoder(
- *external_dec_item->external_decoder_instance, true);
- } else {
- // Create decoder.
- ptr_decoder = CreateDecoder(decoder_item->settings->codecType);
- }
- if (!ptr_decoder)
- return NULL;
-
- if (ptr_decoder->InitDecode(decoder_item->settings.get(),
- decoder_item->number_of_cores) < 0) {
- ReleaseDecoder(ptr_decoder);
- return NULL;
- }
- memcpy(new_codec, decoder_item->settings.get(), sizeof(VideoCodec));
- return ptr_decoder;
-}
-
-void VCMCodecDataBase::DeleteEncoder() {
- if (!ptr_encoder_)
- return;
- ptr_encoder_->Release();
- ptr_encoder_.reset();
-}
-
-VCMGenericDecoder* VCMCodecDataBase::CreateDecoder(VideoCodecType type) const {
- switch (type) {
-#ifdef VIDEOCODEC_VP8
- case kVideoCodecVP8:
- return new VCMGenericDecoder(*(VP8Decoder::Create()));
-#endif
-#ifdef VIDEOCODEC_VP9
- case kVideoCodecVP9:
- return new VCMGenericDecoder(*(VP9Decoder::Create()));
-#endif
-#ifdef VIDEOCODEC_I420
- case kVideoCodecI420:
- return new VCMGenericDecoder(*(new I420Decoder));
-#endif
-#ifdef VIDEOCODEC_H264
- case kVideoCodecH264:
- if (H264Decoder::IsSupported()) {
- return new VCMGenericDecoder(*(H264Decoder::Create()));
- }
- break;
-#endif
- default:
- break;
- }
- LOG(LS_WARNING) << "No internal decoder of this type exists.";
- return NULL;
-}
-
-const VCMDecoderMapItem* VCMCodecDataBase::FindDecoderItem(
- uint8_t payload_type) const {
- DecoderMap::const_iterator it = dec_map_.find(payload_type);
- if (it != dec_map_.end()) {
- return (*it).second;
- }
- return NULL;
-}
-
-const VCMExtDecoderMapItem* VCMCodecDataBase::FindExternalDecoderItem(
- uint8_t payload_type) const {
- ExternalDecoderMap::const_iterator it = dec_external_map_.find(payload_type);
- if (it != dec_external_map_.end()) {
- return (*it).second;
- }
- return NULL;
-}
-} // namespace webrtc
diff --git a/webrtc/modules/video_coding/main/source/codec_database.h b/webrtc/modules/video_coding/main/source/codec_database.h
deleted file mode 100644
index 93aa9c3ba8..0000000000
--- a/webrtc/modules/video_coding/main/source/codec_database.h
+++ /dev/null
@@ -1,184 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_MODULES_VIDEO_CODING_MAIN_SOURCE_CODEC_DATABASE_H_
-#define WEBRTC_MODULES_VIDEO_CODING_MAIN_SOURCE_CODEC_DATABASE_H_
-
-#include <map>
-
-#include "webrtc/base/scoped_ptr.h"
-#include "webrtc/modules/video_coding/codecs/interface/video_codec_interface.h"
-#include "webrtc/modules/video_coding/main/interface/video_coding.h"
-#include "webrtc/modules/video_coding/main/source/generic_decoder.h"
-#include "webrtc/modules/video_coding/main/source/generic_encoder.h"
-#include "webrtc/typedefs.h"
-
-namespace webrtc {
-
-struct VCMDecoderMapItem {
- public:
- VCMDecoderMapItem(VideoCodec* settings,
- int number_of_cores,
- bool require_key_frame);
-
- rtc::scoped_ptr<VideoCodec> settings;
- int number_of_cores;
- bool require_key_frame;
-};
-
-struct VCMExtDecoderMapItem {
- public:
- VCMExtDecoderMapItem(VideoDecoder* external_decoder_instance,
- uint8_t payload_type,
- bool internal_render_timing);
-
- uint8_t payload_type;
- VideoDecoder* external_decoder_instance;
- bool internal_render_timing;
-};
-
-class VCMCodecDataBase {
- public:
- VCMCodecDataBase(VideoEncoderRateObserver* encoder_rate_observer,
- VCMEncodedFrameCallback* encoded_frame_callback);
- ~VCMCodecDataBase();
-
- // Sender Side
- // Returns the number of supported codecs (or -1 in case of error).
- static int NumberOfCodecs();
-
- // Returns the default settings for the codec with id |list_id|.
- static bool Codec(int list_id, VideoCodec* settings);
-
- // Returns the default settings for the codec with type |codec_type|.
- static bool Codec(VideoCodecType codec_type, VideoCodec* settings);
-
- void ResetSender();
-
- // Sets the sender side codec and initiates the desired codec given the
- // VideoCodec struct.
- // Returns true if the codec was successfully registered, false otherwise.
- bool SetSendCodec(const VideoCodec* send_codec,
- int number_of_cores,
- size_t max_payload_size);
-
- // Gets the current send codec. Relevant for internal codecs only.
- // Returns true if there is a send codec, false otherwise.
- bool SendCodec(VideoCodec* current_send_codec) const;
-
- // Gets current send side codec type. Relevant for internal codecs only.
- // Returns kVideoCodecUnknown if there is no send codec.
- VideoCodecType SendCodec() const;
-
- // Registers and initializes an external encoder object.
- // |internal_source| should be set to true if the codec has an internal
- // video source and doesn't need the user to provide it with frames via
- // the Encode() method.
- void RegisterExternalEncoder(VideoEncoder* external_encoder,
- uint8_t payload_type,
- bool internal_source);
-
- // Deregisters an external encoder. Returns true if the encoder was
- // found and deregistered, false otherwise. |was_send_codec| is set to true
- // if the external encoder was the send codec before being deregistered.
- bool DeregisterExternalEncoder(uint8_t payload_type, bool* was_send_codec);
-
- VCMGenericEncoder* GetEncoder();
-
- bool SetPeriodicKeyFrames(bool enable);
-
- // Receiver Side
- void ResetReceiver();
-
- // Deregisters an external decoder object specified by |payload_type|.
- bool DeregisterExternalDecoder(uint8_t payload_type);
-
- // Registers an external decoder object to the payload type |payload_type|.
- // |internal_render_timing| is set to true if the |external_decoder| has
- // built in rendering which is able to obey the render timestamps of the
- // encoded frames.
- bool RegisterExternalDecoder(VideoDecoder* external_decoder,
- uint8_t payload_type,
- bool internal_render_timing);
-
- bool DecoderRegistered() const;
-
- bool RegisterReceiveCodec(const VideoCodec* receive_codec,
- int number_of_cores,
- bool require_key_frame);
-
- bool DeregisterReceiveCodec(uint8_t payload_type);
-
- // Get current receive side codec. Relevant for internal codecs only.
- bool ReceiveCodec(VideoCodec* current_receive_codec) const;
-
- // Get current receive side codec type. Relevant for internal codecs only.
- VideoCodecType ReceiveCodec() const;
-
- // Returns a decoder specified by |payload_type|. The decoded frame callback
- // of the encoder is set to |decoded_frame_callback|. If no such decoder
- // already exists an instance will be created and initialized.
- // NULL is returned if no encoder with the specified payload type was found
- // and the function failed to create one.
- VCMGenericDecoder* GetDecoder(
- uint8_t payload_type, VCMDecodedFrameCallback* decoded_frame_callback);
-
- // Deletes the memory of the decoder instance |decoder|. Used to delete
- // deep copies returned by CreateDecoderCopy().
- void ReleaseDecoder(VCMGenericDecoder* decoder) const;
-
- // Returns true if the currently active decoder supports render scheduling,
- // that is, it is able to render frames according to the render timestamp of
- // the encoded frames.
- bool SupportsRenderScheduling() const;
-
- bool MatchesCurrentResolution(int width, int height) const;
-
- private:
- typedef std::map<uint8_t, VCMDecoderMapItem*> DecoderMap;
- typedef std::map<uint8_t, VCMExtDecoderMapItem*> ExternalDecoderMap;
-
- VCMGenericDecoder* CreateAndInitDecoder(uint8_t payload_type,
- VideoCodec* new_codec) const;
-
- // Determines whether a new codec has to be created or not.
- // Checks every setting apart from maxFramerate and startBitrate.
- bool RequiresEncoderReset(const VideoCodec& send_codec);
-
- void DeleteEncoder();
-
- // Create an internal Decoder given a codec type
- VCMGenericDecoder* CreateDecoder(VideoCodecType type) const;
-
- const VCMDecoderMapItem* FindDecoderItem(uint8_t payload_type) const;
-
- const VCMExtDecoderMapItem* FindExternalDecoderItem(
- uint8_t payload_type) const;
-
- int number_of_cores_;
- size_t max_payload_size_;
- bool periodic_key_frames_;
- bool pending_encoder_reset_;
- VideoCodec send_codec_;
- VideoCodec receive_codec_;
- uint8_t encoder_payload_type_;
- VideoEncoder* external_encoder_;
- bool internal_source_;
- VideoEncoderRateObserver* const encoder_rate_observer_;
- VCMEncodedFrameCallback* const encoded_frame_callback_;
- rtc::scoped_ptr<VCMGenericEncoder> ptr_encoder_;
- VCMGenericDecoder* ptr_decoder_;
- DecoderMap dec_map_;
- ExternalDecoderMap dec_external_map_;
-}; // VCMCodecDataBase
-
-} // namespace webrtc
-
-#endif // WEBRTC_MODULES_VIDEO_CODING_MAIN_SOURCE_CODEC_DATABASE_H_
diff --git a/webrtc/modules/video_coding/main/source/codec_timer.cc b/webrtc/modules/video_coding/main/source/codec_timer.cc
deleted file mode 100644
index a462258813..0000000000
--- a/webrtc/modules/video_coding/main/source/codec_timer.cc
+++ /dev/null
@@ -1,136 +0,0 @@
-/*
- * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include "webrtc/modules/video_coding/main/source/codec_timer.h"
-
-#include <assert.h>
-
-namespace webrtc
-{
-
-// The first kIgnoredSampleCount samples will be ignored.
-static const int32_t kIgnoredSampleCount = 5;
-
-VCMCodecTimer::VCMCodecTimer()
-:
-_filteredMax(0),
-_ignoredSampleCount(0),
-_shortMax(0),
-_history()
-{
- Reset();
-}
-
-int32_t VCMCodecTimer::StopTimer(int64_t startTimeMs, int64_t nowMs)
-{
- const int32_t timeDiff = static_cast<int32_t>(nowMs - startTimeMs);
- MaxFilter(timeDiff, nowMs);
- return timeDiff;
-}
-
-void VCMCodecTimer::Reset()
-{
- _filteredMax = 0;
- _ignoredSampleCount = 0;
- _shortMax = 0;
- for (int i=0; i < MAX_HISTORY_SIZE; i++)
- {
- _history[i].shortMax = 0;
- _history[i].timeMs = -1;
- }
-}
-
-// Update the max-value filter
-void VCMCodecTimer::MaxFilter(int32_t decodeTime, int64_t nowMs)
-{
- if (_ignoredSampleCount >= kIgnoredSampleCount)
- {
- UpdateMaxHistory(decodeTime, nowMs);
- ProcessHistory(nowMs);
- }
- else
- {
- _ignoredSampleCount++;
- }
-}
-
-void
-VCMCodecTimer::UpdateMaxHistory(int32_t decodeTime, int64_t now)
-{
- if (_history[0].timeMs >= 0 &&
- now - _history[0].timeMs < SHORT_FILTER_MS)
- {
- if (decodeTime > _shortMax)
- {
- _shortMax = decodeTime;
- }
- }
- else
- {
- // Only add a new value to the history once a second
- if(_history[0].timeMs == -1)
- {
- // First, no shift
- _shortMax = decodeTime;
- }
- else
- {
- // Shift
- for(int i = (MAX_HISTORY_SIZE - 2); i >= 0 ; i--)
- {
- _history[i+1].shortMax = _history[i].shortMax;
- _history[i+1].timeMs = _history[i].timeMs;
- }
- }
- if (_shortMax == 0)
- {
- _shortMax = decodeTime;
- }
-
- _history[0].shortMax = _shortMax;
- _history[0].timeMs = now;
- _shortMax = 0;
- }
-}
-
-void
-VCMCodecTimer::ProcessHistory(int64_t nowMs)
-{
- _filteredMax = _shortMax;
- if (_history[0].timeMs == -1)
- {
- return;
- }
- for (int i=0; i < MAX_HISTORY_SIZE; i++)
- {
- if (_history[i].timeMs == -1)
- {
- break;
- }
- if (nowMs - _history[i].timeMs > MAX_HISTORY_SIZE * SHORT_FILTER_MS)
- {
- // This sample (and all samples after this) is too old
- break;
- }
- if (_history[i].shortMax > _filteredMax)
- {
- // This sample is the largest one this far into the history
- _filteredMax = _history[i].shortMax;
- }
- }
-}
-
-// Get the maximum observed time within a time window
-int32_t VCMCodecTimer::RequiredDecodeTimeMs(FrameType /*frameType*/) const
-{
- return _filteredMax;
-}
-
-}
diff --git a/webrtc/modules/video_coding/main/source/codec_timer.h b/webrtc/modules/video_coding/main/source/codec_timer.h
deleted file mode 100644
index 9268e8d817..0000000000
--- a/webrtc/modules/video_coding/main/source/codec_timer.h
+++ /dev/null
@@ -1,62 +0,0 @@
-/*
- * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_MODULES_VIDEO_CODING_CODEC_TIMER_H_
-#define WEBRTC_MODULES_VIDEO_CODING_CODEC_TIMER_H_
-
-#include "webrtc/modules/interface/module_common_types.h"
-#include "webrtc/typedefs.h"
-
-namespace webrtc
-{
-
-// MAX_HISTORY_SIZE * SHORT_FILTER_MS defines the window size in milliseconds
-#define MAX_HISTORY_SIZE 10
-#define SHORT_FILTER_MS 1000
-
-class VCMShortMaxSample
-{
-public:
- VCMShortMaxSample() : shortMax(0), timeMs(-1) {};
-
- int32_t shortMax;
- int64_t timeMs;
-};
-
-class VCMCodecTimer
-{
-public:
- VCMCodecTimer();
-
- // Updates and returns the max filtered decode time.
- int32_t StopTimer(int64_t startTimeMs, int64_t nowMs);
-
- // Empty the list of timers.
- void Reset();
-
- // Get the required decode time in ms.
- int32_t RequiredDecodeTimeMs(FrameType frameType) const;
-
-private:
- void UpdateMaxHistory(int32_t decodeTime, int64_t now);
- void MaxFilter(int32_t newTime, int64_t nowMs);
- void ProcessHistory(int64_t nowMs);
-
- int32_t _filteredMax;
- // The number of samples ignored so far.
- int32_t _ignoredSampleCount;
- int32_t _shortMax;
- VCMShortMaxSample _history[MAX_HISTORY_SIZE];
-
-};
-
-} // namespace webrtc
-
-#endif // WEBRTC_MODULES_VIDEO_CODING_CODEC_TIMER_H_
diff --git a/webrtc/modules/video_coding/main/source/content_metrics_processing.cc b/webrtc/modules/video_coding/main/source/content_metrics_processing.cc
deleted file mode 100644
index 757ffb0e46..0000000000
--- a/webrtc/modules/video_coding/main/source/content_metrics_processing.cc
+++ /dev/null
@@ -1,125 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include "webrtc/modules/video_coding/main/source/content_metrics_processing.h"
-
-#include <math.h>
-
-#include "webrtc/modules/interface/module_common_types.h"
-#include "webrtc/modules/video_coding/main/interface/video_coding_defines.h"
-
-namespace webrtc {
-//////////////////////////////////
-/// VCMContentMetricsProcessing //
-//////////////////////////////////
-
-VCMContentMetricsProcessing::VCMContentMetricsProcessing()
- : recursive_avg_factor_(1 / 150.0f), // matched to 30fps.
- frame_cnt_uniform_avg_(0),
- avg_motion_level_(0.0f),
- avg_spatial_level_(0.0f) {
- recursive_avg_ = new VideoContentMetrics();
- uniform_avg_ = new VideoContentMetrics();
-}
-
-VCMContentMetricsProcessing::~VCMContentMetricsProcessing() {
- delete recursive_avg_;
- delete uniform_avg_;
-}
-
-int VCMContentMetricsProcessing::Reset() {
- recursive_avg_->Reset();
- uniform_avg_->Reset();
- frame_cnt_uniform_avg_ = 0;
- avg_motion_level_ = 0.0f;
- avg_spatial_level_ = 0.0f;
- return VCM_OK;
-}
-
-void VCMContentMetricsProcessing::UpdateFrameRate(uint32_t frameRate) {
- // Update factor for recursive averaging.
- recursive_avg_factor_ = static_cast<float> (1000.0f) /
- static_cast<float>(frameRate * kQmMinIntervalMs);
-}
-
-VideoContentMetrics* VCMContentMetricsProcessing::LongTermAvgData() {
- return recursive_avg_;
-}
-
-VideoContentMetrics* VCMContentMetricsProcessing::ShortTermAvgData() {
- if (frame_cnt_uniform_avg_ == 0) {
- return NULL;
- }
- // Two metrics are used: motion and spatial level.
- uniform_avg_->motion_magnitude = avg_motion_level_ /
- static_cast<float>(frame_cnt_uniform_avg_);
- uniform_avg_->spatial_pred_err = avg_spatial_level_ /
- static_cast<float>(frame_cnt_uniform_avg_);
- return uniform_avg_;
-}
-
-void VCMContentMetricsProcessing::ResetShortTermAvgData() {
- // Reset.
- avg_motion_level_ = 0.0f;
- avg_spatial_level_ = 0.0f;
- frame_cnt_uniform_avg_ = 0;
-}
-
-int VCMContentMetricsProcessing::UpdateContentData(
- const VideoContentMetrics *contentMetrics) {
- if (contentMetrics == NULL) {
- return VCM_OK;
- }
- return ProcessContent(contentMetrics);
-}
-
-int VCMContentMetricsProcessing::ProcessContent(
- const VideoContentMetrics *contentMetrics) {
- // Update the recursive averaged metrics: average is over longer window
- // of time: over QmMinIntervalMs ms.
- UpdateRecursiveAvg(contentMetrics);
- // Update the uniform averaged metrics: average is over shorter window
- // of time: based on ~RTCP reports.
- UpdateUniformAvg(contentMetrics);
- return VCM_OK;
-}
-
-void VCMContentMetricsProcessing::UpdateUniformAvg(
- const VideoContentMetrics *contentMetrics) {
- // Update frame counter.
- frame_cnt_uniform_avg_ += 1;
- // Update averaged metrics: motion and spatial level are used.
- avg_motion_level_ += contentMetrics->motion_magnitude;
- avg_spatial_level_ += contentMetrics->spatial_pred_err;
- return;
-}
-
-void VCMContentMetricsProcessing::UpdateRecursiveAvg(
- const VideoContentMetrics *contentMetrics) {
-
- // Spatial metrics: 2x2, 1x2(H), 2x1(V).
- recursive_avg_->spatial_pred_err = (1 - recursive_avg_factor_) *
- recursive_avg_->spatial_pred_err +
- recursive_avg_factor_ * contentMetrics->spatial_pred_err;
-
- recursive_avg_->spatial_pred_err_h = (1 - recursive_avg_factor_) *
- recursive_avg_->spatial_pred_err_h +
- recursive_avg_factor_ * contentMetrics->spatial_pred_err_h;
-
- recursive_avg_->spatial_pred_err_v = (1 - recursive_avg_factor_) *
- recursive_avg_->spatial_pred_err_v +
- recursive_avg_factor_ * contentMetrics->spatial_pred_err_v;
-
- // Motion metric: Derived from NFD (normalized frame difference).
- recursive_avg_->motion_magnitude = (1 - recursive_avg_factor_) *
- recursive_avg_->motion_magnitude +
- recursive_avg_factor_ * contentMetrics->motion_magnitude;
-}
-} // namespace
diff --git a/webrtc/modules/video_coding/main/source/content_metrics_processing.h b/webrtc/modules/video_coding/main/source/content_metrics_processing.h
deleted file mode 100644
index 3517f757d4..0000000000
--- a/webrtc/modules/video_coding/main/source/content_metrics_processing.h
+++ /dev/null
@@ -1,76 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_MODULES_VIDEO_CODING_CONTENT_METRICS_PROCESSING_H_
-#define WEBRTC_MODULES_VIDEO_CODING_CONTENT_METRICS_PROCESSING_H_
-
-#include "webrtc/typedefs.h"
-
-namespace webrtc {
-
-struct VideoContentMetrics;
-
-// QM interval time (in ms)
-enum {
- kQmMinIntervalMs = 10000
-};
-
-// Flag for NFD metric vs motion metric
-enum {
- kNfdMetric = 1
-};
-
-/**********************************/
-/* Content Metrics Processing */
-/**********************************/
-class VCMContentMetricsProcessing {
- public:
- VCMContentMetricsProcessing();
- ~VCMContentMetricsProcessing();
-
- // Update class with latest metrics.
- int UpdateContentData(const VideoContentMetrics *contentMetrics);
-
- // Reset the short-term averaged content data.
- void ResetShortTermAvgData();
-
- // Initialize.
- int Reset();
-
- // Inform class of current frame rate.
- void UpdateFrameRate(uint32_t frameRate);
-
- // Returns the long-term averaged content data: recursive average over longer
- // time scale.
- VideoContentMetrics* LongTermAvgData();
-
- // Returns the short-term averaged content data: uniform average over
- // shorter time scalE.
- VideoContentMetrics* ShortTermAvgData();
-
- private:
- // Compute working average.
- int ProcessContent(const VideoContentMetrics *contentMetrics);
-
- // Update the recursive averaged metrics: longer time average (~5/10 secs).
- void UpdateRecursiveAvg(const VideoContentMetrics *contentMetrics);
-
- // Update the uniform averaged metrics: shorter time average (~RTCP report).
- void UpdateUniformAvg(const VideoContentMetrics *contentMetrics);
-
- VideoContentMetrics* recursive_avg_;
- VideoContentMetrics* uniform_avg_;
- float recursive_avg_factor_;
- uint32_t frame_cnt_uniform_avg_;
- float avg_motion_level_;
- float avg_spatial_level_;
-};
-} // namespace webrtc
-#endif // WEBRTC_MODULES_VIDEO_CODING_CONTENT_METRICS_PROCESSING_H_
diff --git a/webrtc/modules/video_coding/main/source/decoding_state.cc b/webrtc/modules/video_coding/main/source/decoding_state.cc
deleted file mode 100644
index cc92f1c83f..0000000000
--- a/webrtc/modules/video_coding/main/source/decoding_state.cc
+++ /dev/null
@@ -1,223 +0,0 @@
-/*
- * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include "webrtc/modules/video_coding/main/source/decoding_state.h"
-
-#include "webrtc/modules/interface/module_common_types.h"
-#include "webrtc/modules/video_coding/main/source/frame_buffer.h"
-#include "webrtc/modules/video_coding/main/source/jitter_buffer_common.h"
-#include "webrtc/modules/video_coding/main/source/packet.h"
-
-namespace webrtc {
-
-VCMDecodingState::VCMDecodingState()
- : sequence_num_(0),
- time_stamp_(0),
- picture_id_(kNoPictureId),
- temporal_id_(kNoTemporalIdx),
- tl0_pic_id_(kNoTl0PicIdx),
- full_sync_(true),
- in_initial_state_(true) {}
-
-VCMDecodingState::~VCMDecodingState() {}
-
-void VCMDecodingState::Reset() {
- // TODO(mikhal): Verify - not always would want to reset the sync
- sequence_num_ = 0;
- time_stamp_ = 0;
- picture_id_ = kNoPictureId;
- temporal_id_ = kNoTemporalIdx;
- tl0_pic_id_ = kNoTl0PicIdx;
- full_sync_ = true;
- in_initial_state_ = true;
-}
-
-uint32_t VCMDecodingState::time_stamp() const {
- return time_stamp_;
-}
-
-uint16_t VCMDecodingState::sequence_num() const {
- return sequence_num_;
-}
-
-bool VCMDecodingState::IsOldFrame(const VCMFrameBuffer* frame) const {
- assert(frame != NULL);
- if (in_initial_state_)
- return false;
- return !IsNewerTimestamp(frame->TimeStamp(), time_stamp_);
-}
-
-bool VCMDecodingState::IsOldPacket(const VCMPacket* packet) const {
- assert(packet != NULL);
- if (in_initial_state_)
- return false;
- return !IsNewerTimestamp(packet->timestamp, time_stamp_);
-}
-
-void VCMDecodingState::SetState(const VCMFrameBuffer* frame) {
- assert(frame != NULL && frame->GetHighSeqNum() >= 0);
- UpdateSyncState(frame);
- sequence_num_ = static_cast<uint16_t>(frame->GetHighSeqNum());
- time_stamp_ = frame->TimeStamp();
- picture_id_ = frame->PictureId();
- temporal_id_ = frame->TemporalId();
- tl0_pic_id_ = frame->Tl0PicId();
- in_initial_state_ = false;
-}
-
-void VCMDecodingState::CopyFrom(const VCMDecodingState& state) {
- sequence_num_ = state.sequence_num_;
- time_stamp_ = state.time_stamp_;
- picture_id_ = state.picture_id_;
- temporal_id_ = state.temporal_id_;
- tl0_pic_id_ = state.tl0_pic_id_;
- full_sync_ = state.full_sync_;
- in_initial_state_ = state.in_initial_state_;
-}
-
-bool VCMDecodingState::UpdateEmptyFrame(const VCMFrameBuffer* frame) {
- bool empty_packet = frame->GetHighSeqNum() == frame->GetLowSeqNum();
- if (in_initial_state_ && empty_packet) {
- // Drop empty packets as long as we are in the initial state.
- return true;
- }
- if ((empty_packet && ContinuousSeqNum(frame->GetHighSeqNum())) ||
- ContinuousFrame(frame)) {
- // Continuous empty packets or continuous frames can be dropped if we
- // advance the sequence number.
- sequence_num_ = frame->GetHighSeqNum();
- time_stamp_ = frame->TimeStamp();
- return true;
- }
- return false;
-}
-
-void VCMDecodingState::UpdateOldPacket(const VCMPacket* packet) {
- assert(packet != NULL);
- if (packet->timestamp == time_stamp_) {
- // Late packet belonging to the last decoded frame - make sure we update the
- // last decoded sequence number.
- sequence_num_ = LatestSequenceNumber(packet->seqNum, sequence_num_);
- }
-}
-
-void VCMDecodingState::SetSeqNum(uint16_t new_seq_num) {
- sequence_num_ = new_seq_num;
-}
-
-bool VCMDecodingState::in_initial_state() const {
- return in_initial_state_;
-}
-
-bool VCMDecodingState::full_sync() const {
- return full_sync_;
-}
-
-void VCMDecodingState::UpdateSyncState(const VCMFrameBuffer* frame) {
- if (in_initial_state_)
- return;
- if (frame->TemporalId() == kNoTemporalIdx ||
- frame->Tl0PicId() == kNoTl0PicIdx) {
- full_sync_ = true;
- } else if (frame->FrameType() == kVideoFrameKey || frame->LayerSync()) {
- full_sync_ = true;
- } else if (full_sync_) {
- // Verify that we are still in sync.
- // Sync will be broken if continuity is true for layers but not for the
- // other methods (PictureId and SeqNum).
- if (UsingPictureId(frame)) {
- // First check for a valid tl0PicId.
- if (frame->Tl0PicId() - tl0_pic_id_ > 1) {
- full_sync_ = false;
- } else {
- full_sync_ = ContinuousPictureId(frame->PictureId());
- }
- } else {
- full_sync_ = ContinuousSeqNum(static_cast<uint16_t>(
- frame->GetLowSeqNum()));
- }
- }
-}
-
-bool VCMDecodingState::ContinuousFrame(const VCMFrameBuffer* frame) const {
- // Check continuity based on the following hierarchy:
- // - Temporal layers (stop here if out of sync).
- // - Picture Id when available.
- // - Sequence numbers.
- // Return true when in initial state.
- // Note that when a method is not applicable it will return false.
- assert(frame != NULL);
- // A key frame is always considered continuous as it doesn't refer to any
- // frames and therefore won't introduce any errors even if prior frames are
- // missing.
- if (frame->FrameType() == kVideoFrameKey)
- return true;
- // When in the initial state we always require a key frame to start decoding.
- if (in_initial_state_)
- return false;
- if (ContinuousLayer(frame->TemporalId(), frame->Tl0PicId()))
- return true;
- // tl0picId is either not used, or should remain unchanged.
- if (frame->Tl0PicId() != tl0_pic_id_)
- return false;
- // Base layers are not continuous or temporal layers are inactive.
- // In the presence of temporal layers, check for Picture ID/sequence number
- // continuity if sync can be restored by this frame.
- if (!full_sync_ && !frame->LayerSync())
- return false;
- if (UsingPictureId(frame)) {
- return ContinuousPictureId(frame->PictureId());
- } else {
- return ContinuousSeqNum(static_cast<uint16_t>(frame->GetLowSeqNum()));
- }
-}
-
-bool VCMDecodingState::ContinuousPictureId(int picture_id) const {
- int next_picture_id = picture_id_ + 1;
- if (picture_id < picture_id_) {
- // Wrap
- if (picture_id_ >= 0x80) {
- // 15 bits used for picture id
- return ((next_picture_id & 0x7FFF) == picture_id);
- } else {
- // 7 bits used for picture id
- return ((next_picture_id & 0x7F) == picture_id);
- }
- }
- // No wrap
- return (next_picture_id == picture_id);
-}
-
-bool VCMDecodingState::ContinuousSeqNum(uint16_t seq_num) const {
- return seq_num == static_cast<uint16_t>(sequence_num_ + 1);
-}
-
-bool VCMDecodingState::ContinuousLayer(int temporal_id,
- int tl0_pic_id) const {
- // First, check if applicable.
- if (temporal_id == kNoTemporalIdx || tl0_pic_id == kNoTl0PicIdx)
- return false;
- // If this is the first frame to use temporal layers, make sure we start
- // from base.
- else if (tl0_pic_id_ == kNoTl0PicIdx && temporal_id_ == kNoTemporalIdx &&
- temporal_id == 0)
- return true;
-
- // Current implementation: Look for base layer continuity.
- if (temporal_id != 0)
- return false;
- return (static_cast<uint8_t>(tl0_pic_id_ + 1) == tl0_pic_id);
-}
-
-bool VCMDecodingState::UsingPictureId(const VCMFrameBuffer* frame) const {
- return (frame->PictureId() != kNoPictureId && picture_id_ != kNoPictureId);
-}
-
-} // namespace webrtc
diff --git a/webrtc/modules/video_coding/main/source/decoding_state.h b/webrtc/modules/video_coding/main/source/decoding_state.h
deleted file mode 100644
index 99ee335195..0000000000
--- a/webrtc/modules/video_coding/main/source/decoding_state.h
+++ /dev/null
@@ -1,70 +0,0 @@
-/*
- * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_MODULES_VIDEO_CODING_DECODING_STATE_H_
-#define WEBRTC_MODULES_VIDEO_CODING_DECODING_STATE_H_
-
-#include "webrtc/typedefs.h"
-
-namespace webrtc {
-
-// Forward declarations
-class VCMFrameBuffer;
-class VCMPacket;
-
-class VCMDecodingState {
- public:
- VCMDecodingState();
- ~VCMDecodingState();
- // Check for old frame
- bool IsOldFrame(const VCMFrameBuffer* frame) const;
- // Check for old packet
- bool IsOldPacket(const VCMPacket* packet) const;
- // Check for frame continuity based on current decoded state. Use best method
- // possible, i.e. temporal info, picture ID or sequence number.
- bool ContinuousFrame(const VCMFrameBuffer* frame) const;
- void SetState(const VCMFrameBuffer* frame);
- void CopyFrom(const VCMDecodingState& state);
- bool UpdateEmptyFrame(const VCMFrameBuffer* frame);
- // Update the sequence number if the timestamp matches current state and the
- // sequence number is higher than the current one. This accounts for packets
- // arriving late.
- void UpdateOldPacket(const VCMPacket* packet);
- void SetSeqNum(uint16_t new_seq_num);
- void Reset();
- uint32_t time_stamp() const;
- uint16_t sequence_num() const;
- // Return true if at initial state.
- bool in_initial_state() const;
- // Return true when sync is on - decode all layers.
- bool full_sync() const;
-
- private:
- void UpdateSyncState(const VCMFrameBuffer* frame);
- // Designated continuity functions
- bool ContinuousPictureId(int picture_id) const;
- bool ContinuousSeqNum(uint16_t seq_num) const;
- bool ContinuousLayer(int temporal_id, int tl0_pic_id) const;
- bool UsingPictureId(const VCMFrameBuffer* frame) const;
-
- // Keep state of last decoded frame.
- // TODO(mikhal/stefan): create designated classes to handle these types.
- uint16_t sequence_num_;
- uint32_t time_stamp_;
- int picture_id_;
- int temporal_id_;
- int tl0_pic_id_;
- bool full_sync_; // Sync flag when temporal layers are used.
- bool in_initial_state_;
-};
-
-} // namespace webrtc
-
-#endif // WEBRTC_MODULES_VIDEO_CODING_DECODING_STATE_H_
diff --git a/webrtc/modules/video_coding/main/source/decoding_state_unittest.cc b/webrtc/modules/video_coding/main/source/decoding_state_unittest.cc
deleted file mode 100644
index feae701a65..0000000000
--- a/webrtc/modules/video_coding/main/source/decoding_state_unittest.cc
+++ /dev/null
@@ -1,449 +0,0 @@
-/*
- * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include <string.h>
-
-#include "testing/gtest/include/gtest/gtest.h"
-#include "webrtc/modules/interface/module_common_types.h"
-#include "webrtc/modules/video_coding/main/source/decoding_state.h"
-#include "webrtc/modules/video_coding/main/source/frame_buffer.h"
-#include "webrtc/modules/video_coding/main/source/jitter_buffer_common.h"
-#include "webrtc/modules/video_coding/main/source/packet.h"
-
-namespace webrtc {
-
-TEST(TestDecodingState, Sanity) {
- VCMDecodingState dec_state;
- dec_state.Reset();
- EXPECT_TRUE(dec_state.in_initial_state());
- EXPECT_TRUE(dec_state.full_sync());
-}
-
-TEST(TestDecodingState, FrameContinuity) {
- VCMDecodingState dec_state;
- // Check that makes decision based on correct method.
- VCMFrameBuffer frame;
- VCMFrameBuffer frame_key;
- VCMPacket packet;
- packet.isFirstPacket = true;
- packet.timestamp = 1;
- packet.seqNum = 0xffff;
- packet.frameType = kVideoFrameDelta;
- packet.codecSpecificHeader.codec = kRtpVideoVp8;
- packet.codecSpecificHeader.codecHeader.VP8.pictureId = 0x007F;
- FrameData frame_data;
- frame_data.rtt_ms = 0;
- frame_data.rolling_average_packets_per_frame = -1;
- EXPECT_LE(0, frame.InsertPacket(packet, 0, kNoErrors, frame_data));
- // Always start with a key frame.
- dec_state.Reset();
- EXPECT_FALSE(dec_state.ContinuousFrame(&frame));
- packet.frameType = kVideoFrameKey;
- EXPECT_LE(0, frame_key.InsertPacket(packet, 0, kNoErrors, frame_data));
- EXPECT_TRUE(dec_state.ContinuousFrame(&frame_key));
- dec_state.SetState(&frame);
- frame.Reset();
- packet.frameType = kVideoFrameDelta;
- // Use pictureId
- packet.isFirstPacket = false;
- packet.codecSpecificHeader.codecHeader.VP8.pictureId = 0x0002;
- EXPECT_LE(0, frame.InsertPacket(packet, 0, kNoErrors, frame_data));
- EXPECT_FALSE(dec_state.ContinuousFrame(&frame));
- frame.Reset();
- packet.codecSpecificHeader.codecHeader.VP8.pictureId = 0;
- packet.seqNum = 10;
- EXPECT_LE(0, frame.InsertPacket(packet, 0, kNoErrors, frame_data));
- EXPECT_TRUE(dec_state.ContinuousFrame(&frame));
-
- // Use sequence numbers.
- packet.codecSpecificHeader.codecHeader.VP8.pictureId = kNoPictureId;
- frame.Reset();
- packet.seqNum = dec_state.sequence_num() - 1u;
- EXPECT_LE(0, frame.InsertPacket(packet, 0, kNoErrors, frame_data));
- EXPECT_FALSE(dec_state.ContinuousFrame(&frame));
- frame.Reset();
- packet.seqNum = dec_state.sequence_num() + 1u;
- EXPECT_LE(0, frame.InsertPacket(packet, 0, kNoErrors, frame_data));
- // Insert another packet to this frame
- packet.seqNum++;
- EXPECT_LE(0, frame.InsertPacket(packet, 0, kNoErrors, frame_data));
- // Verify wrap.
- EXPECT_LE(dec_state.sequence_num(), 0xffff);
- EXPECT_TRUE(dec_state.ContinuousFrame(&frame));
- dec_state.SetState(&frame);
-
- // Insert packet with temporal info.
- dec_state.Reset();
- frame.Reset();
- packet.codecSpecificHeader.codecHeader.VP8.tl0PicIdx = 0;
- packet.codecSpecificHeader.codecHeader.VP8.temporalIdx = 0;
- packet.codecSpecificHeader.codecHeader.VP8.pictureId = 0;
- packet.seqNum = 1;
- packet.timestamp = 1;
- EXPECT_TRUE(dec_state.full_sync());
- EXPECT_LE(0, frame.InsertPacket(packet, 0, kNoErrors, frame_data));
- dec_state.SetState(&frame);
- EXPECT_TRUE(dec_state.full_sync());
- frame.Reset();
- // 1 layer up - still good.
- packet.codecSpecificHeader.codecHeader.VP8.tl0PicIdx = 0;
- packet.codecSpecificHeader.codecHeader.VP8.temporalIdx = 1;
- packet.codecSpecificHeader.codecHeader.VP8.pictureId = 1;
- packet.seqNum = 2;
- packet.timestamp = 2;
- EXPECT_LE(0, frame.InsertPacket(packet, 0, kNoErrors, frame_data));
- EXPECT_TRUE(dec_state.ContinuousFrame(&frame));
- dec_state.SetState(&frame);
- EXPECT_TRUE(dec_state.full_sync());
- frame.Reset();
- // Lost non-base layer packet => should update sync parameter.
- packet.codecSpecificHeader.codecHeader.VP8.tl0PicIdx = 0;
- packet.codecSpecificHeader.codecHeader.VP8.temporalIdx = 3;
- packet.codecSpecificHeader.codecHeader.VP8.pictureId = 3;
- packet.seqNum = 4;
- packet.timestamp = 4;
- EXPECT_LE(0, frame.InsertPacket(packet, 0, kNoErrors, frame_data));
- EXPECT_FALSE(dec_state.ContinuousFrame(&frame));
- // Now insert the next non-base layer (belonging to a next tl0PicId).
- frame.Reset();
- packet.codecSpecificHeader.codecHeader.VP8.tl0PicIdx = 1;
- packet.codecSpecificHeader.codecHeader.VP8.temporalIdx = 2;
- packet.codecSpecificHeader.codecHeader.VP8.pictureId = 4;
- packet.seqNum = 5;
- packet.timestamp = 5;
- EXPECT_LE(0, frame.InsertPacket(packet, 0, kNoErrors, frame_data));
- // Checking continuity and not updating the state - this should not trigger
- // an update of sync state.
- EXPECT_FALSE(dec_state.ContinuousFrame(&frame));
- EXPECT_TRUE(dec_state.full_sync());
- // Next base layer (dropped interim non-base layers) - should update sync.
- frame.Reset();
- packet.codecSpecificHeader.codecHeader.VP8.tl0PicIdx = 1;
- packet.codecSpecificHeader.codecHeader.VP8.temporalIdx = 0;
- packet.codecSpecificHeader.codecHeader.VP8.pictureId = 5;
- packet.seqNum = 6;
- packet.timestamp = 6;
- EXPECT_LE(0, frame.InsertPacket(packet, 0, kNoErrors, frame_data));
- EXPECT_TRUE(dec_state.ContinuousFrame(&frame));
- dec_state.SetState(&frame);
- EXPECT_FALSE(dec_state.full_sync());
-
- // Check wrap for temporal layers.
- frame.Reset();
- packet.codecSpecificHeader.codecHeader.VP8.tl0PicIdx = 0x00FF;
- packet.codecSpecificHeader.codecHeader.VP8.temporalIdx = 0;
- packet.codecSpecificHeader.codecHeader.VP8.pictureId = 6;
- packet.seqNum = 7;
- packet.timestamp = 7;
- EXPECT_LE(0, frame.InsertPacket(packet, 0, kNoErrors, frame_data));
- dec_state.SetState(&frame);
- EXPECT_FALSE(dec_state.full_sync());
- frame.Reset();
- packet.codecSpecificHeader.codecHeader.VP8.tl0PicIdx = 0x0000;
- packet.codecSpecificHeader.codecHeader.VP8.temporalIdx = 0;
- packet.codecSpecificHeader.codecHeader.VP8.pictureId = 7;
- packet.seqNum = 8;
- packet.timestamp = 8;
- EXPECT_LE(0, frame.InsertPacket(packet, 0, kNoErrors, frame_data));
- EXPECT_TRUE(dec_state.ContinuousFrame(&frame));
- // The current frame is not continuous
- dec_state.SetState(&frame);
- EXPECT_FALSE(dec_state.ContinuousFrame(&frame));
-}
-
-TEST(TestDecodingState, UpdateOldPacket) {
- VCMDecodingState dec_state;
- // Update only if zero size and newer than previous.
- // Should only update if the timeStamp match.
- VCMFrameBuffer frame;
- VCMPacket packet;
- packet.timestamp = 1;
- packet.seqNum = 1;
- packet.frameType = kVideoFrameDelta;
- FrameData frame_data;
- frame_data.rtt_ms = 0;
- frame_data.rolling_average_packets_per_frame = -1;
- EXPECT_LE(0, frame.InsertPacket(packet, 0, kNoErrors, frame_data));
- dec_state.SetState(&frame);
- EXPECT_EQ(dec_state.sequence_num(), 1);
- // Insert an empty packet that does not belong to the same frame.
- // => Sequence num should be the same.
- packet.timestamp = 2;
- dec_state.UpdateOldPacket(&packet);
- EXPECT_EQ(dec_state.sequence_num(), 1);
- // Now insert empty packet belonging to the same frame.
- packet.timestamp = 1;
- packet.seqNum = 2;
- packet.frameType = kEmptyFrame;
- packet.sizeBytes = 0;
- dec_state.UpdateOldPacket(&packet);
- EXPECT_EQ(dec_state.sequence_num(), 2);
- // Now insert delta packet belonging to the same frame.
- packet.timestamp = 1;
- packet.seqNum = 3;
- packet.frameType = kVideoFrameDelta;
- packet.sizeBytes = 1400;
- dec_state.UpdateOldPacket(&packet);
- EXPECT_EQ(dec_state.sequence_num(), 3);
- // Insert a packet belonging to an older timestamp - should not update the
- // sequence number.
- packet.timestamp = 0;
- packet.seqNum = 4;
- packet.frameType = kEmptyFrame;
- packet.sizeBytes = 0;
- dec_state.UpdateOldPacket(&packet);
- EXPECT_EQ(dec_state.sequence_num(), 3);
-}
-
-TEST(TestDecodingState, MultiLayerBehavior) {
- // Identify sync/non-sync when more than one layer.
- VCMDecodingState dec_state;
- // Identify packets belonging to old frames/packets.
- // Set state for current frames.
- // tl0PicIdx 0, temporal id 0.
- VCMFrameBuffer frame;
- VCMPacket packet;
- packet.frameType = kVideoFrameDelta;
- packet.codecSpecificHeader.codec = kRtpVideoVp8;
- packet.timestamp = 0;
- packet.seqNum = 0;
- packet.codecSpecificHeader.codecHeader.VP8.tl0PicIdx = 0;
- packet.codecSpecificHeader.codecHeader.VP8.temporalIdx = 0;
- packet.codecSpecificHeader.codecHeader.VP8.pictureId = 0;
- FrameData frame_data;
- frame_data.rtt_ms = 0;
- frame_data.rolling_average_packets_per_frame = -1;
- EXPECT_LE(0, frame.InsertPacket(packet, 0, kNoErrors, frame_data));
- dec_state.SetState(&frame);
- // tl0PicIdx 0, temporal id 1.
- frame.Reset();
- packet.timestamp = 1;
- packet.seqNum = 1;
- packet.codecSpecificHeader.codecHeader.VP8.tl0PicIdx = 0;
- packet.codecSpecificHeader.codecHeader.VP8.temporalIdx = 1;
- packet.codecSpecificHeader.codecHeader.VP8.pictureId = 1;
- EXPECT_LE(0, frame.InsertPacket(packet, 0, kNoErrors, frame_data));
- EXPECT_TRUE(dec_state.ContinuousFrame(&frame));
- dec_state.SetState(&frame);
- EXPECT_TRUE(dec_state.full_sync());
- // Lost tl0PicIdx 0, temporal id 2.
- // Insert tl0PicIdx 0, temporal id 3.
- frame.Reset();
- packet.timestamp = 3;
- packet.seqNum = 3;
- packet.codecSpecificHeader.codecHeader.VP8.tl0PicIdx = 0;
- packet.codecSpecificHeader.codecHeader.VP8.temporalIdx = 3;
- packet.codecSpecificHeader.codecHeader.VP8.pictureId = 3;
- EXPECT_LE(0, frame.InsertPacket(packet, 0, kNoErrors, frame_data));
- EXPECT_FALSE(dec_state.ContinuousFrame(&frame));
- dec_state.SetState(&frame);
- EXPECT_FALSE(dec_state.full_sync());
- // Insert next base layer
- frame.Reset();
- packet.timestamp = 4;
- packet.seqNum = 4;
- packet.codecSpecificHeader.codecHeader.VP8.tl0PicIdx = 1;
- packet.codecSpecificHeader.codecHeader.VP8.temporalIdx = 0;
- packet.codecSpecificHeader.codecHeader.VP8.pictureId = 4;
- EXPECT_LE(0, frame.InsertPacket(packet, 0, kNoErrors, frame_data));
- EXPECT_TRUE(dec_state.ContinuousFrame(&frame));
- dec_state.SetState(&frame);
- EXPECT_FALSE(dec_state.full_sync());
- // Insert key frame - should update sync value.
- // A key frame is always a base layer.
- frame.Reset();
- packet.frameType = kVideoFrameKey;
- packet.isFirstPacket = 1;
- packet.timestamp = 5;
- packet.seqNum = 5;
- packet.codecSpecificHeader.codecHeader.VP8.tl0PicIdx = 2;
- packet.codecSpecificHeader.codecHeader.VP8.temporalIdx = 0;
- packet.codecSpecificHeader.codecHeader.VP8.pictureId = 5;
- EXPECT_LE(0, frame.InsertPacket(packet, 0, kNoErrors, frame_data));
- EXPECT_TRUE(dec_state.ContinuousFrame(&frame));
- dec_state.SetState(&frame);
- EXPECT_TRUE(dec_state.full_sync());
- // After sync, a continuous PictureId is required
- // (continuous base layer is not enough )
- frame.Reset();
- packet.frameType = kVideoFrameDelta;
- packet.timestamp = 6;
- packet.seqNum = 6;
- packet.codecSpecificHeader.codecHeader.VP8.tl0PicIdx = 3;
- packet.codecSpecificHeader.codecHeader.VP8.temporalIdx = 0;
- packet.codecSpecificHeader.codecHeader.VP8.pictureId = 6;
- EXPECT_LE(0, frame.InsertPacket(packet, 0, kNoErrors, frame_data));
- EXPECT_TRUE(dec_state.ContinuousFrame(&frame));
- EXPECT_TRUE(dec_state.full_sync());
- frame.Reset();
- packet.frameType = kVideoFrameDelta;
- packet.isFirstPacket = 1;
- packet.timestamp = 8;
- packet.seqNum = 8;
- packet.codecSpecificHeader.codecHeader.VP8.tl0PicIdx = 4;
- packet.codecSpecificHeader.codecHeader.VP8.temporalIdx = 0;
- packet.codecSpecificHeader.codecHeader.VP8.pictureId = 8;
- EXPECT_LE(0, frame.InsertPacket(packet, 0, kNoErrors, frame_data));
- EXPECT_FALSE(dec_state.ContinuousFrame(&frame));
- EXPECT_TRUE(dec_state.full_sync());
- dec_state.SetState(&frame);
- EXPECT_FALSE(dec_state.full_sync());
-
- // Insert a non-ref frame - should update sync value.
- frame.Reset();
- packet.frameType = kVideoFrameDelta;
- packet.isFirstPacket = 1;
- packet.timestamp = 9;
- packet.seqNum = 9;
- packet.codecSpecificHeader.codecHeader.VP8.tl0PicIdx = 4;
- packet.codecSpecificHeader.codecHeader.VP8.temporalIdx = 2;
- packet.codecSpecificHeader.codecHeader.VP8.pictureId = 9;
- packet.codecSpecificHeader.codecHeader.VP8.layerSync = true;
- EXPECT_LE(0, frame.InsertPacket(packet, 0, kNoErrors, frame_data));
- dec_state.SetState(&frame);
- EXPECT_TRUE(dec_state.full_sync());
-
- // The following test will verify the sync flag behavior after a loss.
- // Create the following pattern:
- // Update base layer, lose packet 1 (sync flag on, layer 2), insert packet 3
- // (sync flag on, layer 2) check continuity and sync flag after inserting
- // packet 2 (sync flag on, layer 1).
- // Base layer.
- frame.Reset();
- dec_state.Reset();
- packet.frameType = kVideoFrameDelta;
- packet.isFirstPacket = 1;
- packet.markerBit = 1;
- packet.timestamp = 0;
- packet.seqNum = 0;
- packet.codecSpecificHeader.codecHeader.VP8.tl0PicIdx = 0;
- packet.codecSpecificHeader.codecHeader.VP8.temporalIdx = 0;
- packet.codecSpecificHeader.codecHeader.VP8.pictureId = 0;
- packet.codecSpecificHeader.codecHeader.VP8.layerSync = false;
- EXPECT_LE(0, frame.InsertPacket(packet, 0, kNoErrors, frame_data));
- dec_state.SetState(&frame);
- EXPECT_TRUE(dec_state.full_sync());
- // Layer 2 - 2 packets (insert one, lose one).
- frame.Reset();
- packet.frameType = kVideoFrameDelta;
- packet.isFirstPacket = 1;
- packet.markerBit = 0;
- packet.timestamp = 1;
- packet.seqNum = 1;
- packet.codecSpecificHeader.codecHeader.VP8.tl0PicIdx = 0;
- packet.codecSpecificHeader.codecHeader.VP8.temporalIdx = 2;
- packet.codecSpecificHeader.codecHeader.VP8.pictureId = 1;
- packet.codecSpecificHeader.codecHeader.VP8.layerSync = true;
- EXPECT_LE(0, frame.InsertPacket(packet, 0, kNoErrors, frame_data));
- EXPECT_TRUE(dec_state.ContinuousFrame(&frame));
- // Layer 1
- frame.Reset();
- packet.frameType = kVideoFrameDelta;
- packet.isFirstPacket = 1;
- packet.markerBit = 1;
- packet.timestamp = 2;
- packet.seqNum = 3;
- packet.codecSpecificHeader.codecHeader.VP8.tl0PicIdx = 0;
- packet.codecSpecificHeader.codecHeader.VP8.temporalIdx = 1;
- packet.codecSpecificHeader.codecHeader.VP8.pictureId = 2;
- packet.codecSpecificHeader.codecHeader.VP8.layerSync = true;
- EXPECT_LE(0, frame.InsertPacket(packet, 0, kNoErrors, frame_data));
- EXPECT_FALSE(dec_state.ContinuousFrame(&frame));
- EXPECT_TRUE(dec_state.full_sync());
-}
-
-TEST(TestDecodingState, DiscontinuousPicIdContinuousSeqNum) {
- VCMDecodingState dec_state;
- VCMFrameBuffer frame;
- VCMPacket packet;
- frame.Reset();
- packet.frameType = kVideoFrameKey;
- packet.codecSpecificHeader.codec = kRtpVideoVp8;
- packet.timestamp = 0;
- packet.seqNum = 0;
- packet.codecSpecificHeader.codecHeader.VP8.tl0PicIdx = 0;
- packet.codecSpecificHeader.codecHeader.VP8.temporalIdx = 0;
- packet.codecSpecificHeader.codecHeader.VP8.pictureId = 0;
- FrameData frame_data;
- frame_data.rtt_ms = 0;
- frame_data.rolling_average_packets_per_frame = -1;
- EXPECT_LE(0, frame.InsertPacket(packet, 0, kNoErrors, frame_data));
- dec_state.SetState(&frame);
- EXPECT_TRUE(dec_state.full_sync());
-
- // Continuous sequence number but discontinuous picture id. This implies a
- // a loss and we have to fall back to only decoding the base layer.
- frame.Reset();
- packet.frameType = kVideoFrameDelta;
- packet.timestamp += 3000;
- ++packet.seqNum;
- packet.codecSpecificHeader.codecHeader.VP8.temporalIdx = 1;
- packet.codecSpecificHeader.codecHeader.VP8.pictureId = 2;
- EXPECT_LE(0, frame.InsertPacket(packet, 0, kNoErrors, frame_data));
- EXPECT_FALSE(dec_state.ContinuousFrame(&frame));
- dec_state.SetState(&frame);
- EXPECT_FALSE(dec_state.full_sync());
-}
-
-TEST(TestDecodingState, OldInput) {
- VCMDecodingState dec_state;
- // Identify packets belonging to old frames/packets.
- // Set state for current frames.
- VCMFrameBuffer frame;
- VCMPacket packet;
- packet.timestamp = 10;
- packet.seqNum = 1;
- FrameData frame_data;
- frame_data.rtt_ms = 0;
- frame_data.rolling_average_packets_per_frame = -1;
- EXPECT_LE(0, frame.InsertPacket(packet, 0, kNoErrors, frame_data));
- dec_state.SetState(&frame);
- packet.timestamp = 9;
- EXPECT_TRUE(dec_state.IsOldPacket(&packet));
- // Check for old frame
- frame.Reset();
- frame.InsertPacket(packet, 0, kNoErrors, frame_data);
- EXPECT_TRUE(dec_state.IsOldFrame(&frame));
-}
-
-TEST(TestDecodingState, PictureIdRepeat) {
- VCMDecodingState dec_state;
- VCMFrameBuffer frame;
- VCMPacket packet;
- packet.frameType = kVideoFrameDelta;
- packet.codecSpecificHeader.codec = kRtpVideoVp8;
- packet.timestamp = 0;
- packet.seqNum = 0;
- packet.codecSpecificHeader.codecHeader.VP8.tl0PicIdx = 0;
- packet.codecSpecificHeader.codecHeader.VP8.temporalIdx = 0;
- packet.codecSpecificHeader.codecHeader.VP8.pictureId = 0;
- FrameData frame_data;
- frame_data.rtt_ms = 0;
- frame_data.rolling_average_packets_per_frame = -1;
- EXPECT_LE(0, frame.InsertPacket(packet, 0, kNoErrors, frame_data));
- dec_state.SetState(&frame);
- // tl0PicIdx 0, temporal id 1.
- frame.Reset();
- ++packet.timestamp;
- ++packet.seqNum;
- packet.codecSpecificHeader.codecHeader.VP8.temporalIdx++;
- packet.codecSpecificHeader.codecHeader.VP8.pictureId++;
- EXPECT_LE(0, frame.InsertPacket(packet, 0, kNoErrors, frame_data));
- EXPECT_TRUE(dec_state.ContinuousFrame(&frame));
- frame.Reset();
- // Testing only gap in tl0PicIdx when tl0PicIdx in continuous.
- packet.codecSpecificHeader.codecHeader.VP8.tl0PicIdx += 3;
- packet.codecSpecificHeader.codecHeader.VP8.temporalIdx++;
- packet.codecSpecificHeader.codecHeader.VP8.tl0PicIdx = 1;
- EXPECT_LE(0, frame.InsertPacket(packet, 0, kNoErrors, frame_data));
- EXPECT_FALSE(dec_state.ContinuousFrame(&frame));
-}
-
-} // namespace webrtc
diff --git a/webrtc/modules/video_coding/main/source/encoded_frame.cc b/webrtc/modules/video_coding/main/source/encoded_frame.cc
deleted file mode 100644
index d86704d632..0000000000
--- a/webrtc/modules/video_coding/main/source/encoded_frame.cc
+++ /dev/null
@@ -1,229 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include "webrtc/modules/video_coding/main/interface/video_coding_defines.h"
-#include "webrtc/modules/video_coding/main/source/encoded_frame.h"
-#include "webrtc/modules/video_coding/main/source/generic_encoder.h"
-#include "webrtc/modules/video_coding/main/source/jitter_buffer_common.h"
-
-namespace webrtc {
-
-VCMEncodedFrame::VCMEncodedFrame()
- : webrtc::EncodedImage(),
- _renderTimeMs(-1),
- _payloadType(0),
- _missingFrame(false),
- _codec(kVideoCodecUnknown),
- _fragmentation(),
- _rotation(kVideoRotation_0),
- _rotation_set(false) {
- _codecSpecificInfo.codecType = kVideoCodecUnknown;
-}
-
-VCMEncodedFrame::VCMEncodedFrame(const webrtc::EncodedImage& rhs)
- : webrtc::EncodedImage(rhs),
- _renderTimeMs(-1),
- _payloadType(0),
- _missingFrame(false),
- _codec(kVideoCodecUnknown),
- _fragmentation(),
- _rotation(kVideoRotation_0),
- _rotation_set(false) {
- _codecSpecificInfo.codecType = kVideoCodecUnknown;
- _buffer = NULL;
- _size = 0;
- _length = 0;
- if (rhs._buffer != NULL)
- {
- VerifyAndAllocate(rhs._length);
- memcpy(_buffer, rhs._buffer, rhs._length);
- }
-}
-
-VCMEncodedFrame::VCMEncodedFrame(const VCMEncodedFrame& rhs)
- : webrtc::EncodedImage(rhs),
- _renderTimeMs(rhs._renderTimeMs),
- _payloadType(rhs._payloadType),
- _missingFrame(rhs._missingFrame),
- _codecSpecificInfo(rhs._codecSpecificInfo),
- _codec(rhs._codec),
- _fragmentation(),
- _rotation(rhs._rotation),
- _rotation_set(rhs._rotation_set) {
- _buffer = NULL;
- _size = 0;
- _length = 0;
- if (rhs._buffer != NULL)
- {
- VerifyAndAllocate(rhs._length);
- memcpy(_buffer, rhs._buffer, rhs._length);
- _length = rhs._length;
- }
- _fragmentation.CopyFrom(rhs._fragmentation);
-}
-
-VCMEncodedFrame::~VCMEncodedFrame()
-{
- Free();
-}
-
-void VCMEncodedFrame::Free()
-{
- Reset();
- if (_buffer != NULL)
- {
- delete [] _buffer;
- _buffer = NULL;
- }
-}
-
-void VCMEncodedFrame::Reset()
-{
- _renderTimeMs = -1;
- _timeStamp = 0;
- _payloadType = 0;
- _frameType = kVideoFrameDelta;
- _encodedWidth = 0;
- _encodedHeight = 0;
- _completeFrame = false;
- _missingFrame = false;
- _length = 0;
- _codecSpecificInfo.codecType = kVideoCodecUnknown;
- _codec = kVideoCodecUnknown;
- _rotation = kVideoRotation_0;
- _rotation_set = false;
-}
-
-void VCMEncodedFrame::CopyCodecSpecific(const RTPVideoHeader* header)
-{
- if (header) {
- switch (header->codec) {
- case kRtpVideoVp8: {
- if (_codecSpecificInfo.codecType != kVideoCodecVP8) {
- // This is the first packet for this frame.
- _codecSpecificInfo.codecSpecific.VP8.pictureId = -1;
- _codecSpecificInfo.codecSpecific.VP8.temporalIdx = 0;
- _codecSpecificInfo.codecSpecific.VP8.layerSync = false;
- _codecSpecificInfo.codecSpecific.VP8.keyIdx = -1;
- _codecSpecificInfo.codecType = kVideoCodecVP8;
- }
- _codecSpecificInfo.codecSpecific.VP8.nonReference =
- header->codecHeader.VP8.nonReference;
- if (header->codecHeader.VP8.pictureId != kNoPictureId) {
- _codecSpecificInfo.codecSpecific.VP8.pictureId =
- header->codecHeader.VP8.pictureId;
- }
- if (header->codecHeader.VP8.temporalIdx != kNoTemporalIdx) {
- _codecSpecificInfo.codecSpecific.VP8.temporalIdx =
- header->codecHeader.VP8.temporalIdx;
- _codecSpecificInfo.codecSpecific.VP8.layerSync =
- header->codecHeader.VP8.layerSync;
- }
- if (header->codecHeader.VP8.keyIdx != kNoKeyIdx) {
- _codecSpecificInfo.codecSpecific.VP8.keyIdx =
- header->codecHeader.VP8.keyIdx;
- }
- break;
- }
- case kRtpVideoVp9: {
- if (_codecSpecificInfo.codecType != kVideoCodecVP9) {
- // This is the first packet for this frame.
- _codecSpecificInfo.codecSpecific.VP9.picture_id = -1;
- _codecSpecificInfo.codecSpecific.VP9.temporal_idx = 0;
- _codecSpecificInfo.codecSpecific.VP9.spatial_idx = 0;
- _codecSpecificInfo.codecSpecific.VP9.gof_idx = 0;
- _codecSpecificInfo.codecSpecific.VP9.inter_layer_predicted = false;
- _codecSpecificInfo.codecSpecific.VP9.tl0_pic_idx = -1;
- _codecSpecificInfo.codecType = kVideoCodecVP9;
- }
- _codecSpecificInfo.codecSpecific.VP9.inter_pic_predicted =
- header->codecHeader.VP9.inter_pic_predicted;
- _codecSpecificInfo.codecSpecific.VP9.flexible_mode =
- header->codecHeader.VP9.flexible_mode;
- _codecSpecificInfo.codecSpecific.VP9.ss_data_available =
- header->codecHeader.VP9.ss_data_available;
- if (header->codecHeader.VP9.picture_id != kNoPictureId) {
- _codecSpecificInfo.codecSpecific.VP9.picture_id =
- header->codecHeader.VP9.picture_id;
- }
- if (header->codecHeader.VP9.tl0_pic_idx != kNoTl0PicIdx) {
- _codecSpecificInfo.codecSpecific.VP9.tl0_pic_idx =
- header->codecHeader.VP9.tl0_pic_idx;
- }
- if (header->codecHeader.VP9.temporal_idx != kNoTemporalIdx) {
- _codecSpecificInfo.codecSpecific.VP9.temporal_idx =
- header->codecHeader.VP9.temporal_idx;
- _codecSpecificInfo.codecSpecific.VP9.temporal_up_switch =
- header->codecHeader.VP9.temporal_up_switch;
- }
- if (header->codecHeader.VP9.spatial_idx != kNoSpatialIdx) {
- _codecSpecificInfo.codecSpecific.VP9.spatial_idx =
- header->codecHeader.VP9.spatial_idx;
- _codecSpecificInfo.codecSpecific.VP9.inter_layer_predicted =
- header->codecHeader.VP9.inter_layer_predicted;
- }
- if (header->codecHeader.VP9.gof_idx != kNoGofIdx) {
- _codecSpecificInfo.codecSpecific.VP9.gof_idx =
- header->codecHeader.VP9.gof_idx;
- }
- if (header->codecHeader.VP9.ss_data_available) {
- _codecSpecificInfo.codecSpecific.VP9.num_spatial_layers =
- header->codecHeader.VP9.num_spatial_layers;
- _codecSpecificInfo.codecSpecific.VP9
- .spatial_layer_resolution_present =
- header->codecHeader.VP9.spatial_layer_resolution_present;
- if (header->codecHeader.VP9.spatial_layer_resolution_present) {
- for (size_t i = 0; i < header->codecHeader.VP9.num_spatial_layers;
- ++i) {
- _codecSpecificInfo.codecSpecific.VP9.width[i] =
- header->codecHeader.VP9.width[i];
- _codecSpecificInfo.codecSpecific.VP9.height[i] =
- header->codecHeader.VP9.height[i];
- }
- }
- _codecSpecificInfo.codecSpecific.VP9.gof.CopyGofInfoVP9(
- header->codecHeader.VP9.gof);
- }
- break;
- }
- case kRtpVideoH264: {
- _codecSpecificInfo.codecType = kVideoCodecH264;
- break;
- }
- default: {
- _codecSpecificInfo.codecType = kVideoCodecUnknown;
- break;
- }
- }
- }
-}
-
-const RTPFragmentationHeader* VCMEncodedFrame::FragmentationHeader() const {
- return &_fragmentation;
-}
-
-void VCMEncodedFrame::VerifyAndAllocate(size_t minimumSize)
-{
- if(minimumSize > _size)
- {
- // create buffer of sufficient size
- uint8_t* newBuffer = new uint8_t[minimumSize];
- if(_buffer)
- {
- // copy old data
- memcpy(newBuffer, _buffer, _size);
- delete [] _buffer;
- }
- _buffer = newBuffer;
- _size = minimumSize;
- }
-}
-
-} // namespace webrtc
diff --git a/webrtc/modules/video_coding/main/source/encoded_frame.h b/webrtc/modules/video_coding/main/source/encoded_frame.h
deleted file mode 100644
index 608578c35d..0000000000
--- a/webrtc/modules/video_coding/main/source/encoded_frame.h
+++ /dev/null
@@ -1,127 +0,0 @@
-/*
- * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_MODULES_VIDEO_CODING_ENCODED_FRAME_H_
-#define WEBRTC_MODULES_VIDEO_CODING_ENCODED_FRAME_H_
-
-#include <vector>
-
-#include "webrtc/common_types.h"
-#include "webrtc/common_video/interface/video_image.h"
-#include "webrtc/modules/interface/module_common_types.h"
-#include "webrtc/modules/video_coding/codecs/interface/video_codec_interface.h"
-#include "webrtc/modules/video_coding/main/interface/video_coding_defines.h"
-
-namespace webrtc
-{
-
-class VCMEncodedFrame : protected EncodedImage
-{
-public:
- VCMEncodedFrame();
- VCMEncodedFrame(const webrtc::EncodedImage& rhs);
- VCMEncodedFrame(const VCMEncodedFrame& rhs);
-
- ~VCMEncodedFrame();
- /**
- * Delete VideoFrame and resets members to zero
- */
- void Free();
- /**
- * Set render time in milliseconds
- */
- void SetRenderTime(const int64_t renderTimeMs) {_renderTimeMs = renderTimeMs;}
-
- /**
- * Set the encoded frame size
- */
- void SetEncodedSize(uint32_t width, uint32_t height)
- { _encodedWidth = width; _encodedHeight = height; }
- /**
- * Get the encoded image
- */
- const webrtc::EncodedImage& EncodedImage() const
- { return static_cast<const webrtc::EncodedImage&>(*this); }
- /**
- * Get pointer to frame buffer
- */
- const uint8_t* Buffer() const {return _buffer;}
- /**
- * Get frame length
- */
- size_t Length() const {return _length;}
- /**
- * Get frame timestamp (90kHz)
- */
- uint32_t TimeStamp() const {return _timeStamp;}
- /**
- * Get render time in milliseconds
- */
- int64_t RenderTimeMs() const {return _renderTimeMs;}
- /**
- * Get frame type
- */
- webrtc::FrameType FrameType() const { return _frameType; }
- /**
- * Get frame rotation
- */
- VideoRotation rotation() const { return _rotation; }
- /**
- * True if this frame is complete, false otherwise
- */
- bool Complete() const { return _completeFrame; }
- /**
- * True if there's a frame missing before this frame
- */
- bool MissingFrame() const { return _missingFrame; }
- /**
- * Payload type of the encoded payload
- */
- uint8_t PayloadType() const { return _payloadType; }
- /**
- * Get codec specific info.
- * The returned pointer is only valid as long as the VCMEncodedFrame
- * is valid. Also, VCMEncodedFrame owns the pointer and will delete
- * the object.
- */
- const CodecSpecificInfo* CodecSpecific() const {return &_codecSpecificInfo;}
-
- const RTPFragmentationHeader* FragmentationHeader() const;
-
-protected:
- /**
- * Verifies that current allocated buffer size is larger than or equal to the input size.
- * If the current buffer size is smaller, a new allocation is made and the old buffer data
- * is copied to the new buffer.
- * Buffer size is updated to minimumSize.
- */
- void VerifyAndAllocate(size_t minimumSize);
-
- void Reset();
-
- void CopyCodecSpecific(const RTPVideoHeader* header);
-
- int64_t _renderTimeMs;
- uint8_t _payloadType;
- bool _missingFrame;
- CodecSpecificInfo _codecSpecificInfo;
- webrtc::VideoCodecType _codec;
- RTPFragmentationHeader _fragmentation;
- VideoRotation _rotation;
-
- // Video rotation is only set along with the last packet for each frame
- // (same as marker bit). This |_rotation_set| is only for debugging purpose
- // to ensure we don't set it twice for a frame.
- bool _rotation_set;
-};
-
-} // namespace webrtc
-
-#endif // WEBRTC_MODULES_VIDEO_CODING_ENCODED_FRAME_H_
diff --git a/webrtc/modules/video_coding/main/source/fec_tables_xor.h b/webrtc/modules/video_coding/main/source/fec_tables_xor.h
deleted file mode 100644
index 28c67b4565..0000000000
--- a/webrtc/modules/video_coding/main/source/fec_tables_xor.h
+++ /dev/null
@@ -1,6481 +0,0 @@
-/*
- * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_MODULES_VIDEO_CODING_SOURCE_FEC_TABLES_XOR_H_
-#define WEBRTC_MODULES_VIDEO_CODING_SOURCE_FEC_TABLES_XOR_H_
-
-// This is a private header for media_opt_util.cc.
-// It should not be included by other files.
-
-namespace webrtc {
-
-// Table for Protection factor (code rate) of delta frames, for the XOR FEC.
-// Input is the packet loss and an effective rate (bits/frame).
-// Output is array kCodeRateXORTable[k], where k = rate_i*129 + loss_j;
-// loss_j = 0,1,..128, and rate_i varies over some range.
-static const int kSizeCodeRateXORTable = 6450;
-static const unsigned char kCodeRateXORTable[kSizeCodeRateXORTable] = {
-0,
-0,
-0,
-0,
-0,
-0,
-0,
-0,
-0,
-0,
-0,
-0,
-0,
-0,
-0,
-0,
-0,
-0,
-0,
-0,
-0,
-0,
-0,
-0,
-0,
-0,
-0,
-0,
-0,
-0,
-0,
-0,
-0,
-11,
-11,
-11,
-11,
-11,
-11,
-11,
-11,
-11,
-11,
-11,
-11,
-11,
-11,
-11,
-39,
-39,
-39,
-39,
-39,
-39,
-39,
-39,
-39,
-39,
-39,
-39,
-39,
-39,
-39,
-39,
-39,
-39,
-39,
-39,
-39,
-39,
-39,
-39,
-39,
-39,
-39,
-39,
-39,
-39,
-39,
-39,
-39,
-39,
-39,
-39,
-39,
-39,
-39,
-39,
-39,
-39,
-39,
-39,
-39,
-39,
-39,
-39,
-51,
-51,
-51,
-51,
-51,
-51,
-51,
-51,
-51,
-51,
-51,
-51,
-51,
-51,
-51,
-51,
-51,
-51,
-51,
-51,
-51,
-51,
-51,
-51,
-51,
-51,
-51,
-51,
-51,
-51,
-51,
-51,
-51,
-0,
-0,
-0,
-0,
-0,
-0,
-0,
-0,
-0,
-0,
-0,
-0,
-0,
-0,
-0,
-0,
-0,
-0,
-8,
-8,
-8,
-8,
-8,
-8,
-8,
-8,
-8,
-8,
-8,
-8,
-8,
-8,
-8,
-30,
-30,
-30,
-30,
-30,
-30,
-30,
-30,
-30,
-30,
-30,
-30,
-30,
-30,
-30,
-56,
-56,
-56,
-56,
-56,
-56,
-56,
-65,
-65,
-65,
-65,
-65,
-65,
-65,
-65,
-65,
-65,
-65,
-65,
-65,
-65,
-65,
-65,
-65,
-65,
-65,
-65,
-65,
-65,
-65,
-65,
-65,
-65,
-65,
-65,
-65,
-65,
-65,
-65,
-65,
-65,
-65,
-65,
-65,
-65,
-65,
-65,
-65,
-87,
-87,
-87,
-87,
-87,
-87,
-87,
-87,
-87,
-87,
-87,
-87,
-87,
-87,
-87,
-87,
-87,
-87,
-87,
-87,
-87,
-87,
-87,
-87,
-78,
-78,
-78,
-78,
-78,
-78,
-78,
-78,
-78,
-0,
-0,
-0,
-0,
-0,
-0,
-0,
-0,
-0,
-0,
-0,
-0,
-0,
-0,
-0,
-6,
-6,
-6,
-23,
-23,
-23,
-23,
-23,
-23,
-23,
-23,
-23,
-23,
-23,
-23,
-23,
-23,
-23,
-44,
-44,
-44,
-44,
-44,
-44,
-50,
-50,
-50,
-50,
-50,
-50,
-50,
-50,
-50,
-68,
-68,
-68,
-68,
-68,
-68,
-68,
-85,
-85,
-85,
-85,
-85,
-85,
-85,
-85,
-85,
-85,
-85,
-85,
-85,
-85,
-85,
-85,
-85,
-85,
-85,
-85,
-85,
-85,
-85,
-85,
-85,
-85,
-85,
-85,
-85,
-85,
-85,
-85,
-85,
-85,
-85,
-85,
-85,
-85,
-85,
-85,
-85,
-105,
-105,
-105,
-105,
-105,
-105,
-105,
-105,
-105,
-105,
-105,
-105,
-105,
-105,
-105,
-105,
-105,
-105,
-105,
-105,
-105,
-105,
-105,
-105,
-88,
-88,
-88,
-88,
-88,
-88,
-88,
-88,
-88,
-0,
-0,
-0,
-0,
-0,
-0,
-0,
-0,
-0,
-5,
-5,
-5,
-5,
-5,
-5,
-19,
-19,
-19,
-36,
-41,
-41,
-41,
-41,
-41,
-41,
-41,
-41,
-41,
-41,
-41,
-41,
-41,
-41,
-55,
-55,
-55,
-55,
-55,
-55,
-69,
-69,
-69,
-69,
-69,
-69,
-69,
-69,
-69,
-75,
-75,
-80,
-80,
-80,
-80,
-80,
-97,
-97,
-97,
-97,
-97,
-97,
-97,
-97,
-97,
-97,
-102,
-102,
-102,
-102,
-102,
-102,
-102,
-102,
-102,
-102,
-102,
-102,
-102,
-102,
-102,
-102,
-102,
-102,
-102,
-102,
-102,
-102,
-102,
-102,
-102,
-102,
-102,
-102,
-102,
-102,
-102,
-116,
-116,
-116,
-116,
-116,
-116,
-116,
-116,
-116,
-116,
-116,
-116,
-116,
-116,
-116,
-116,
-116,
-116,
-116,
-116,
-116,
-116,
-116,
-116,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-0,
-0,
-0,
-0,
-0,
-0,
-0,
-0,
-4,
-16,
-16,
-16,
-16,
-16,
-16,
-30,
-35,
-35,
-47,
-58,
-58,
-58,
-58,
-58,
-58,
-58,
-58,
-58,
-58,
-58,
-58,
-58,
-58,
-63,
-63,
-63,
-63,
-63,
-63,
-77,
-77,
-77,
-77,
-77,
-77,
-77,
-82,
-82,
-82,
-82,
-94,
-94,
-94,
-94,
-94,
-105,
-105,
-105,
-105,
-110,
-110,
-110,
-110,
-110,
-110,
-122,
-122,
-122,
-122,
-122,
-122,
-122,
-122,
-122,
-122,
-122,
-122,
-122,
-122,
-122,
-122,
-122,
-122,
-122,
-122,
-122,
-122,
-122,
-122,
-122,
-122,
-122,
-122,
-122,
-122,
-122,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-115,
-115,
-115,
-115,
-115,
-115,
-115,
-115,
-115,
-0,
-0,
-0,
-0,
-0,
-0,
-0,
-4,
-14,
-27,
-27,
-27,
-27,
-27,
-31,
-41,
-52,
-52,
-56,
-69,
-69,
-69,
-69,
-69,
-69,
-69,
-69,
-69,
-69,
-69,
-69,
-69,
-69,
-69,
-69,
-69,
-69,
-69,
-69,
-79,
-79,
-79,
-79,
-83,
-83,
-83,
-94,
-94,
-94,
-94,
-106,
-106,
-106,
-106,
-106,
-115,
-115,
-115,
-115,
-125,
-125,
-125,
-125,
-125,
-125,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-0,
-0,
-0,
-0,
-3,
-3,
-3,
-17,
-28,
-38,
-38,
-38,
-38,
-38,
-47,
-51,
-63,
-63,
-63,
-72,
-72,
-72,
-72,
-72,
-72,
-72,
-76,
-76,
-76,
-76,
-80,
-80,
-80,
-80,
-80,
-80,
-80,
-80,
-80,
-84,
-84,
-84,
-84,
-93,
-93,
-93,
-105,
-105,
-105,
-105,
-114,
-114,
-114,
-114,
-114,
-124,
-124,
-124,
-124,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-0,
-0,
-0,
-0,
-12,
-12,
-12,
-35,
-43,
-47,
-47,
-47,
-47,
-47,
-58,
-58,
-66,
-66,
-66,
-70,
-70,
-70,
-70,
-70,
-73,
-73,
-82,
-82,
-82,
-86,
-94,
-94,
-94,
-94,
-94,
-94,
-94,
-94,
-94,
-94,
-94,
-94,
-94,
-105,
-105,
-105,
-114,
-114,
-114,
-114,
-117,
-117,
-117,
-117,
-117,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-0,
-0,
-0,
-0,
-24,
-24,
-24,
-49,
-53,
-53,
-53,
-53,
-53,
-53,
-61,
-61,
-64,
-64,
-64,
-64,
-70,
-70,
-70,
-70,
-78,
-78,
-88,
-88,
-88,
-96,
-106,
-106,
-106,
-106,
-106,
-106,
-106,
-106,
-106,
-106,
-112,
-112,
-112,
-120,
-120,
-120,
-124,
-124,
-124,
-124,
-124,
-124,
-124,
-124,
-124,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-0,
-0,
-0,
-5,
-36,
-36,
-36,
-55,
-55,
-55,
-55,
-55,
-55,
-55,
-58,
-58,
-58,
-58,
-58,
-64,
-78,
-78,
-78,
-78,
-87,
-87,
-94,
-94,
-94,
-103,
-110,
-110,
-110,
-110,
-110,
-110,
-110,
-110,
-116,
-116,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-0,
-0,
-0,
-18,
-43,
-43,
-43,
-53,
-53,
-53,
-53,
-53,
-53,
-53,
-53,
-58,
-58,
-58,
-58,
-71,
-87,
-87,
-87,
-87,
-94,
-94,
-97,
-97,
-97,
-109,
-111,
-111,
-111,
-111,
-111,
-111,
-111,
-111,
-125,
-125,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-0,
-0,
-0,
-31,
-46,
-46,
-46,
-48,
-48,
-48,
-48,
-48,
-48,
-48,
-48,
-66,
-66,
-66,
-66,
-80,
-93,
-93,
-93,
-93,
-95,
-95,
-95,
-95,
-100,
-115,
-115,
-115,
-115,
-115,
-115,
-115,
-115,
-115,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-0,
-0,
-4,
-40,
-45,
-45,
-45,
-45,
-45,
-45,
-45,
-45,
-49,
-49,
-49,
-74,
-74,
-74,
-74,
-86,
-90,
-90,
-90,
-90,
-95,
-95,
-95,
-95,
-106,
-120,
-120,
-120,
-120,
-120,
-120,
-120,
-120,
-120,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-0,
-0,
-14,
-42,
-42,
-42,
-42,
-42,
-42,
-42,
-42,
-46,
-56,
-56,
-56,
-80,
-80,
-80,
-80,
-84,
-84,
-84,
-84,
-88,
-99,
-99,
-99,
-99,
-111,
-122,
-122,
-122,
-122,
-122,
-122,
-122,
-122,
-122,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-0,
-0,
-26,
-40,
-40,
-40,
-40,
-40,
-40,
-40,
-40,
-54,
-66,
-66,
-66,
-80,
-80,
-80,
-80,
-80,
-80,
-80,
-84,
-94,
-106,
-106,
-106,
-106,
-116,
-120,
-120,
-120,
-120,
-120,
-120,
-120,
-120,
-124,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-0,
-3,
-34,
-38,
-38,
-38,
-38,
-38,
-42,
-42,
-42,
-63,
-72,
-72,
-76,
-80,
-80,
-80,
-80,
-80,
-80,
-80,
-89,
-101,
-114,
-114,
-114,
-114,
-118,
-118,
-118,
-118,
-118,
-118,
-118,
-118,
-118,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-0,
-12,
-36,
-36,
-36,
-36,
-36,
-36,
-49,
-49,
-49,
-69,
-73,
-76,
-86,
-86,
-86,
-86,
-86,
-86,
-86,
-86,
-97,
-109,
-122,
-122,
-122,
-122,
-122,
-122,
-122,
-122,
-122,
-122,
-122,
-122,
-122,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-0,
-22,
-34,
-34,
-34,
-34,
-38,
-38,
-57,
-57,
-57,
-69,
-73,
-82,
-92,
-92,
-92,
-92,
-92,
-92,
-96,
-96,
-104,
-117,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-0,
-29,
-33,
-33,
-33,
-33,
-44,
-44,
-62,
-62,
-62,
-69,
-77,
-87,
-95,
-95,
-95,
-95,
-95,
-95,
-107,
-107,
-110,
-120,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-0,
-31,
-31,
-31,
-31,
-31,
-51,
-51,
-62,
-65,
-65,
-73,
-83,
-91,
-94,
-94,
-94,
-94,
-97,
-97,
-114,
-114,
-114,
-122,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-0,
-29,
-29,
-29,
-29,
-29,
-56,
-56,
-59,
-70,
-70,
-79,
-86,
-89,
-89,
-89,
-89,
-89,
-100,
-100,
-116,
-116,
-116,
-122,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-0,
-28,
-28,
-28,
-28,
-28,
-57,
-57,
-57,
-76,
-76,
-83,
-86,
-86,
-86,
-86,
-86,
-89,
-104,
-104,
-114,
-114,
-114,
-124,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-0,
-27,
-27,
-27,
-27,
-30,
-55,
-55,
-55,
-80,
-80,
-83,
-86,
-86,
-86,
-86,
-86,
-93,
-108,
-108,
-111,
-111,
-111,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-0,
-26,
-26,
-26,
-26,
-36,
-53,
-53,
-53,
-80,
-80,
-80,
-90,
-90,
-90,
-90,
-90,
-98,
-107,
-107,
-107,
-107,
-107,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-0,
-26,
-26,
-26,
-28,
-42,
-52,
-54,
-54,
-78,
-78,
-78,
-95,
-95,
-95,
-97,
-97,
-104,
-106,
-106,
-106,
-106,
-106,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-0,
-24,
-24,
-24,
-33,
-47,
-49,
-58,
-58,
-74,
-74,
-74,
-97,
-97,
-97,
-106,
-106,
-108,
-108,
-108,
-108,
-108,
-108,
-124,
-124,
-124,
-124,
-124,
-124,
-124,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-0,
-24,
-24,
-24,
-39,
-48,
-50,
-63,
-63,
-72,
-74,
-74,
-96,
-96,
-96,
-109,
-111,
-111,
-111,
-111,
-111,
-111,
-111,
-119,
-119,
-122,
-122,
-122,
-122,
-122,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-0,
-23,
-23,
-23,
-43,
-46,
-54,
-66,
-66,
-69,
-77,
-77,
-92,
-92,
-92,
-105,
-113,
-113,
-113,
-113,
-113,
-113,
-113,
-115,
-117,
-123,
-123,
-123,
-123,
-123,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-0,
-22,
-22,
-22,
-44,
-44,
-59,
-67,
-67,
-67,
-81,
-81,
-89,
-89,
-89,
-97,
-112,
-112,
-112,
-112,
-112,
-112,
-112,
-112,
-119,
-126,
-126,
-126,
-126,
-126,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-0,
-21,
-21,
-24,
-43,
-45,
-63,
-65,
-65,
-67,
-85,
-85,
-87,
-87,
-87,
-91,
-109,
-109,
-109,
-111,
-111,
-111,
-111,
-111,
-123,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-0,
-21,
-21,
-28,
-42,
-50,
-63,
-63,
-66,
-71,
-85,
-85,
-85,
-85,
-87,
-92,
-106,
-106,
-108,
-114,
-114,
-114,
-114,
-114,
-125,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-0,
-20,
-20,
-34,
-41,
-54,
-62,
-62,
-69,
-75,
-82,
-82,
-82,
-82,
-92,
-98,
-105,
-105,
-110,
-117,
-117,
-117,
-117,
-117,
-124,
-124,
-126,
-126,
-126,
-126,
-126,
-126,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-0,
-20,
-20,
-38,
-40,
-58,
-60,
-60,
-73,
-78,
-80,
-80,
-80,
-80,
-100,
-105,
-107,
-107,
-113,
-118,
-118,
-118,
-118,
-118,
-120,
-120,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-0,
-19,
-21,
-38,
-40,
-58,
-58,
-60,
-75,
-77,
-77,
-77,
-81,
-81,
-107,
-109,
-109,
-109,
-114,
-116,
-116,
-116,
-116,
-116,
-116,
-116,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-0,
-18,
-25,
-37,
-44,
-56,
-56,
-63,
-75,
-75,
-75,
-75,
-88,
-88,
-111,
-111,
-111,
-111,
-112,
-112,
-112,
-112,
-112,
-112,
-112,
-114,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-0,
-18,
-30,
-36,
-48,
-55,
-55,
-67,
-73,
-73,
-73,
-73,
-97,
-97,
-110,
-110,
-110,
-110,
-110,
-110,
-110,
-110,
-110,
-110,
-110,
-116,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-0,
-18,
-34,
-36,
-52,
-55,
-55,
-70,
-72,
-73,
-73,
-73,
-102,
-104,
-108,
-108,
-108,
-108,
-109,
-109,
-109,
-109,
-109,
-109,
-109,
-119,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-0,
-17,
-35,
-35,
-52,
-59,
-59,
-70,
-70,
-76,
-76,
-76,
-99,
-105,
-105,
-105,
-105,
-105,
-111,
-111,
-111,
-111,
-111,
-111,
-111,
-121,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-0,
-17,
-34,
-36,
-51,
-61,
-62,
-70,
-70,
-80,
-80,
-80,
-93,
-103,
-103,
-103,
-103,
-103,
-112,
-112,
-112,
-112,
-112,
-116,
-118,
-124,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-0,
-16,
-33,
-39,
-50,
-59,
-65,
-72,
-72,
-82,
-82,
-82,
-91,
-100,
-100,
-100,
-100,
-100,
-109,
-109,
-109,
-109,
-109,
-121,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-0,
-16,
-32,
-43,
-48,
-54,
-66,
-75,
-75,
-81,
-83,
-83,
-92,
-97,
-97,
-97,
-99,
-99,
-105,
-105,
-105,
-105,
-105,
-123,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-0,
-15,
-31,
-46,
-47,
-49,
-69,
-77,
-77,
-81,
-85,
-85,
-93,
-95,
-95,
-95,
-100,
-100,
-102,
-102,
-102,
-102,
-102,
-120,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-0,
-15,
-30,
-46,
-48,
-48,
-70,
-75,
-79,
-82,
-87,
-87,
-92,
-94,
-94,
-94,
-103,
-103,
-103,
-103,
-103,
-104,
-104,
-115,
-120,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-0,
-15,
-30,
-45,
-50,
-50,
-68,
-70,
-80,
-85,
-89,
-89,
-90,
-95,
-95,
-95,
-104,
-104,
-104,
-104,
-104,
-109,
-109,
-112,
-114,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-0,
-14,
-29,
-44,
-54,
-54,
-64,
-64,
-83,
-87,
-88,
-88,
-88,
-98,
-98,
-98,
-103,
-103,
-103,
-103,
-103,
-113,
-113,
-113,
-113,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-0,
-14,
-29,
-43,
-56,
-56,
-61,
-61,
-84,
-85,
-88,
-88,
-88,
-100,
-100,
-100,
-102,
-102,
-102,
-102,
-102,
-113,
-116,
-116,
-116,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-0,
-14,
-28,
-42,
-57,
-57,
-62,
-62,
-80,
-80,
-91,
-91,
-91,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-109,
-119,
-119,
-119,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-0,
-14,
-28,
-42,
-56,
-56,
-65,
-66,
-76,
-76,
-92,
-92,
-92,
-97,
-97,
-97,
-101,
-101,
-101,
-101,
-101,
-106,
-121,
-121,
-121,
-126,
-126,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-0,
-13,
-27,
-41,
-55,
-55,
-67,
-72,
-74,
-74,
-90,
-90,
-90,
-91,
-91,
-91,
-105,
-105,
-105,
-105,
-105,
-107,
-122,
-122,
-122,
-123,
-123,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-0,
-13,
-27,
-40,
-54,
-54,
-67,
-76,
-76,
-76,
-85,
-85,
-85,
-85,
-85,
-85,
-112,
-112,
-112,
-112,
-112,
-112,
-121,
-121,
-121,
-121,
-121,
-126,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-127,
-
-
-};
-
-} // namespace webrtc
-
-#endif // WEBRTC_MODULES_VIDEO_CODING_SOURCE_FEC_TABLES_XOR_H_
diff --git a/webrtc/modules/video_coding/main/source/frame_buffer.cc b/webrtc/modules/video_coding/main/source/frame_buffer.cc
deleted file mode 100644
index 5b6680ec61..0000000000
--- a/webrtc/modules/video_coding/main/source/frame_buffer.cc
+++ /dev/null
@@ -1,297 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include "webrtc/modules/video_coding/main/source/frame_buffer.h"
-
-#include <assert.h>
-#include <string.h>
-
-#include "webrtc/base/checks.h"
-#include "webrtc/modules/video_coding/main/source/packet.h"
-#include "webrtc/system_wrappers/include/logging.h"
-
-namespace webrtc {
-
-VCMFrameBuffer::VCMFrameBuffer()
- :
- _state(kStateEmpty),
- _nackCount(0),
- _latestPacketTimeMs(-1) {
-}
-
-VCMFrameBuffer::~VCMFrameBuffer() {
-}
-
-VCMFrameBuffer::VCMFrameBuffer(const VCMFrameBuffer& rhs)
-:
-VCMEncodedFrame(rhs),
-_state(rhs._state),
-_sessionInfo(),
-_nackCount(rhs._nackCount),
-_latestPacketTimeMs(rhs._latestPacketTimeMs) {
- _sessionInfo = rhs._sessionInfo;
- _sessionInfo.UpdateDataPointers(rhs._buffer, _buffer);
-}
-
-webrtc::FrameType
-VCMFrameBuffer::FrameType() const {
- return _sessionInfo.FrameType();
-}
-
-int32_t
-VCMFrameBuffer::GetLowSeqNum() const {
- return _sessionInfo.LowSequenceNumber();
-}
-
-int32_t
-VCMFrameBuffer::GetHighSeqNum() const {
- return _sessionInfo.HighSequenceNumber();
-}
-
-int VCMFrameBuffer::PictureId() const {
- return _sessionInfo.PictureId();
-}
-
-int VCMFrameBuffer::TemporalId() const {
- return _sessionInfo.TemporalId();
-}
-
-bool VCMFrameBuffer::LayerSync() const {
- return _sessionInfo.LayerSync();
-}
-
-int VCMFrameBuffer::Tl0PicId() const {
- return _sessionInfo.Tl0PicId();
-}
-
-bool VCMFrameBuffer::NonReference() const {
- return _sessionInfo.NonReference();
-}
-
-void VCMFrameBuffer::SetGofInfo(const GofInfoVP9& gof_info, size_t idx) {
- _sessionInfo.SetGofInfo(gof_info, idx);
- // TODO(asapersson): Consider adding hdr->VP9.ref_picture_id for testing.
- _codecSpecificInfo.codecSpecific.VP9.temporal_idx =
- gof_info.temporal_idx[idx];
- _codecSpecificInfo.codecSpecific.VP9.temporal_up_switch =
- gof_info.temporal_up_switch[idx];
-}
-
-bool
-VCMFrameBuffer::IsSessionComplete() const {
- return _sessionInfo.complete();
-}
-
-// Insert packet
-VCMFrameBufferEnum
-VCMFrameBuffer::InsertPacket(const VCMPacket& packet,
- int64_t timeInMs,
- VCMDecodeErrorMode decode_error_mode,
- const FrameData& frame_data) {
- assert(!(NULL == packet.dataPtr && packet.sizeBytes > 0));
- if (packet.dataPtr != NULL) {
- _payloadType = packet.payloadType;
- }
-
- if (kStateEmpty == _state) {
- // First packet (empty and/or media) inserted into this frame.
- // store some info and set some initial values.
- _timeStamp = packet.timestamp;
- // We only take the ntp timestamp of the first packet of a frame.
- ntp_time_ms_ = packet.ntp_time_ms_;
- _codec = packet.codec;
- if (packet.frameType != kEmptyFrame) {
- // first media packet
- SetState(kStateIncomplete);
- }
- }
-
- uint32_t requiredSizeBytes = Length() + packet.sizeBytes +
- (packet.insertStartCode ? kH264StartCodeLengthBytes : 0);
- if (requiredSizeBytes >= _size) {
- const uint8_t* prevBuffer = _buffer;
- const uint32_t increments = requiredSizeBytes /
- kBufferIncStepSizeBytes +
- (requiredSizeBytes %
- kBufferIncStepSizeBytes > 0);
- const uint32_t newSize = _size +
- increments * kBufferIncStepSizeBytes;
- if (newSize > kMaxJBFrameSizeBytes) {
- LOG(LS_ERROR) << "Failed to insert packet due to frame being too "
- "big.";
- return kSizeError;
- }
- VerifyAndAllocate(newSize);
- _sessionInfo.UpdateDataPointers(prevBuffer, _buffer);
- }
-
- if (packet.width > 0 && packet.height > 0) {
- _encodedWidth = packet.width;
- _encodedHeight = packet.height;
- }
-
- // Don't copy payload specific data for empty packets (e.g padding packets).
- if (packet.sizeBytes > 0)
- CopyCodecSpecific(&packet.codecSpecificHeader);
-
- int retVal = _sessionInfo.InsertPacket(packet, _buffer,
- decode_error_mode,
- frame_data);
- if (retVal == -1) {
- return kSizeError;
- } else if (retVal == -2) {
- return kDuplicatePacket;
- } else if (retVal == -3) {
- return kOutOfBoundsPacket;
- }
- // update length
- _length = Length() + static_cast<uint32_t>(retVal);
-
- _latestPacketTimeMs = timeInMs;
-
- // http://www.etsi.org/deliver/etsi_ts/126100_126199/126114/12.07.00_60/
- // ts_126114v120700p.pdf Section 7.4.5.
- // The MTSI client shall add the payload bytes as defined in this clause
- // onto the last RTP packet in each group of packets which make up a key
- // frame (I-frame or IDR frame in H.264 (AVC), or an IRAP picture in H.265
- // (HEVC)).
- if (packet.markerBit) {
- RTC_DCHECK(!_rotation_set);
- _rotation = packet.codecSpecificHeader.rotation;
- _rotation_set = true;
- }
-
- if (_sessionInfo.complete()) {
- SetState(kStateComplete);
- return kCompleteSession;
- } else if (_sessionInfo.decodable()) {
- SetState(kStateDecodable);
- return kDecodableSession;
- }
- return kIncomplete;
-}
-
-int64_t
-VCMFrameBuffer::LatestPacketTimeMs() const {
- return _latestPacketTimeMs;
-}
-
-void
-VCMFrameBuffer::IncrementNackCount() {
- _nackCount++;
-}
-
-int16_t
-VCMFrameBuffer::GetNackCount() const {
- return _nackCount;
-}
-
-bool
-VCMFrameBuffer::HaveFirstPacket() const {
- return _sessionInfo.HaveFirstPacket();
-}
-
-bool
-VCMFrameBuffer::HaveLastPacket() const {
- return _sessionInfo.HaveLastPacket();
-}
-
-int
-VCMFrameBuffer::NumPackets() const {
- return _sessionInfo.NumPackets();
-}
-
-void
-VCMFrameBuffer::Reset() {
- _length = 0;
- _timeStamp = 0;
- _sessionInfo.Reset();
- _payloadType = 0;
- _nackCount = 0;
- _latestPacketTimeMs = -1;
- _state = kStateEmpty;
- VCMEncodedFrame::Reset();
-}
-
-// Set state of frame
-void
-VCMFrameBuffer::SetState(VCMFrameBufferStateEnum state) {
- if (_state == state) {
- return;
- }
- switch (state) {
- case kStateIncomplete:
- // we can go to this state from state kStateEmpty
- assert(_state == kStateEmpty);
-
- // Do nothing, we received a packet
- break;
-
- case kStateComplete:
- assert(_state == kStateEmpty ||
- _state == kStateIncomplete ||
- _state == kStateDecodable);
-
- break;
-
- case kStateEmpty:
- // Should only be set to empty through Reset().
- assert(false);
- break;
-
- case kStateDecodable:
- assert(_state == kStateEmpty ||
- _state == kStateIncomplete);
- break;
- }
- _state = state;
-}
-
-// Get current state of frame
-VCMFrameBufferStateEnum
-VCMFrameBuffer::GetState() const {
- return _state;
-}
-
-// Get current state of frame
-VCMFrameBufferStateEnum
-VCMFrameBuffer::GetState(uint32_t& timeStamp) const {
- timeStamp = TimeStamp();
- return GetState();
-}
-
-bool
-VCMFrameBuffer::IsRetransmitted() const {
- return _sessionInfo.session_nack();
-}
-
-void
-VCMFrameBuffer::PrepareForDecode(bool continuous) {
-#ifdef INDEPENDENT_PARTITIONS
- if (_codec == kVideoCodecVP8) {
- _length =
- _sessionInfo.BuildVP8FragmentationHeader(_buffer, _length,
- &_fragmentation);
- } else {
- size_t bytes_removed = _sessionInfo.MakeDecodable();
- _length -= bytes_removed;
- }
-#else
- size_t bytes_removed = _sessionInfo.MakeDecodable();
- _length -= bytes_removed;
-#endif
- // Transfer frame information to EncodedFrame and create any codec
- // specific information.
- _frameType = _sessionInfo.FrameType();
- _completeFrame = _sessionInfo.complete();
- _missingFrame = !continuous;
-}
-
-} // namespace webrtc
diff --git a/webrtc/modules/video_coding/main/source/frame_buffer.h b/webrtc/modules/video_coding/main/source/frame_buffer.h
deleted file mode 100644
index ab4ff6574e..0000000000
--- a/webrtc/modules/video_coding/main/source/frame_buffer.h
+++ /dev/null
@@ -1,92 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_MODULES_VIDEO_CODING_MAIN_SOURCE_FRAME_BUFFER_H_
-#define WEBRTC_MODULES_VIDEO_CODING_MAIN_SOURCE_FRAME_BUFFER_H_
-
-#include "webrtc/modules/interface/module_common_types.h"
-#include "webrtc/modules/video_coding/main/interface/video_coding.h"
-#include "webrtc/modules/video_coding/main/source/encoded_frame.h"
-#include "webrtc/modules/video_coding/main/source/jitter_buffer_common.h"
-#include "webrtc/modules/video_coding/main/source/session_info.h"
-#include "webrtc/typedefs.h"
-
-namespace webrtc {
-
-class VCMFrameBuffer : public VCMEncodedFrame {
- public:
- VCMFrameBuffer();
- virtual ~VCMFrameBuffer();
-
- VCMFrameBuffer(const VCMFrameBuffer& rhs);
-
- virtual void Reset();
-
- VCMFrameBufferEnum InsertPacket(const VCMPacket& packet,
- int64_t timeInMs,
- VCMDecodeErrorMode decode_error_mode,
- const FrameData& frame_data);
-
- // State
- // Get current state of frame
- VCMFrameBufferStateEnum GetState() const;
- // Get current state and timestamp of frame
- VCMFrameBufferStateEnum GetState(uint32_t& timeStamp) const;
- void PrepareForDecode(bool continuous);
-
- bool IsRetransmitted() const;
- bool IsSessionComplete() const;
- bool HaveFirstPacket() const;
- bool HaveLastPacket() const;
- int NumPackets() const;
- // Makes sure the session contain a decodable stream.
- void MakeSessionDecodable();
-
- // Sequence numbers
- // Get lowest packet sequence number in frame
- int32_t GetLowSeqNum() const;
- // Get highest packet sequence number in frame
- int32_t GetHighSeqNum() const;
-
- int PictureId() const;
- int TemporalId() const;
- bool LayerSync() const;
- int Tl0PicId() const;
- bool NonReference() const;
-
- void SetGofInfo(const GofInfoVP9& gof_info, size_t idx);
-
- // Increments a counter to keep track of the number of packets of this frame
- // which were NACKed before they arrived.
- void IncrementNackCount();
- // Returns the number of packets of this frame which were NACKed before they
- // arrived.
- int16_t GetNackCount() const;
-
- int64_t LatestPacketTimeMs() const;
-
- webrtc::FrameType FrameType() const;
- void SetPreviousFrameLoss();
-
- // The number of packets discarded because the decoder can't make use of them.
- int NotDecodablePackets() const;
-
- private:
- void SetState(VCMFrameBufferStateEnum state); // Set state of frame
-
- VCMFrameBufferStateEnum _state; // Current state of the frame
- VCMSessionInfo _sessionInfo;
- uint16_t _nackCount;
- int64_t _latestPacketTimeMs;
-};
-
-} // namespace webrtc
-
-#endif // WEBRTC_MODULES_VIDEO_CODING_MAIN_SOURCE_FRAME_BUFFER_H_
diff --git a/webrtc/modules/video_coding/main/source/generic_decoder.cc b/webrtc/modules/video_coding/main/source/generic_decoder.cc
deleted file mode 100644
index 8b2d3974de..0000000000
--- a/webrtc/modules/video_coding/main/source/generic_decoder.cc
+++ /dev/null
@@ -1,198 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include "webrtc/modules/video_coding/main/interface/video_coding.h"
-#include "webrtc/modules/video_coding/main/source/generic_decoder.h"
-#include "webrtc/modules/video_coding/main/source/internal_defines.h"
-#include "webrtc/system_wrappers/include/clock.h"
-#include "webrtc/system_wrappers/include/logging.h"
-
-namespace webrtc {
-
-VCMDecodedFrameCallback::VCMDecodedFrameCallback(VCMTiming& timing,
- Clock* clock)
-:
-_critSect(CriticalSectionWrapper::CreateCriticalSection()),
-_clock(clock),
-_receiveCallback(NULL),
-_timing(timing),
-_timestampMap(kDecoderFrameMemoryLength),
-_lastReceivedPictureID(0)
-{
-}
-
-VCMDecodedFrameCallback::~VCMDecodedFrameCallback()
-{
- delete _critSect;
-}
-
-void VCMDecodedFrameCallback::SetUserReceiveCallback(
- VCMReceiveCallback* receiveCallback)
-{
- CriticalSectionScoped cs(_critSect);
- _receiveCallback = receiveCallback;
-}
-
-VCMReceiveCallback* VCMDecodedFrameCallback::UserReceiveCallback()
-{
- CriticalSectionScoped cs(_critSect);
- return _receiveCallback;
-}
-
-int32_t VCMDecodedFrameCallback::Decoded(VideoFrame& decodedImage) {
- // TODO(holmer): We should improve this so that we can handle multiple
- // callbacks from one call to Decode().
- VCMFrameInformation* frameInfo;
- VCMReceiveCallback* callback;
- {
- CriticalSectionScoped cs(_critSect);
- frameInfo = _timestampMap.Pop(decodedImage.timestamp());
- callback = _receiveCallback;
- }
-
- if (frameInfo == NULL) {
- LOG(LS_WARNING) << "Too many frames backed up in the decoder, dropping "
- "this one.";
- return WEBRTC_VIDEO_CODEC_OK;
- }
-
- _timing.StopDecodeTimer(
- decodedImage.timestamp(),
- frameInfo->decodeStartTimeMs,
- _clock->TimeInMilliseconds(),
- frameInfo->renderTimeMs);
-
- if (callback != NULL)
- {
- decodedImage.set_render_time_ms(frameInfo->renderTimeMs);
- decodedImage.set_rotation(frameInfo->rotation);
- callback->FrameToRender(decodedImage);
- }
- return WEBRTC_VIDEO_CODEC_OK;
-}
-
-int32_t
-VCMDecodedFrameCallback::ReceivedDecodedReferenceFrame(
- const uint64_t pictureId)
-{
- CriticalSectionScoped cs(_critSect);
- if (_receiveCallback != NULL)
- {
- return _receiveCallback->ReceivedDecodedReferenceFrame(pictureId);
- }
- return -1;
-}
-
-int32_t
-VCMDecodedFrameCallback::ReceivedDecodedFrame(const uint64_t pictureId)
-{
- _lastReceivedPictureID = pictureId;
- return 0;
-}
-
-uint64_t VCMDecodedFrameCallback::LastReceivedPictureID() const
-{
- return _lastReceivedPictureID;
-}
-
-void VCMDecodedFrameCallback::Map(uint32_t timestamp,
- VCMFrameInformation* frameInfo) {
- CriticalSectionScoped cs(_critSect);
- _timestampMap.Add(timestamp, frameInfo);
-}
-
-int32_t VCMDecodedFrameCallback::Pop(uint32_t timestamp)
-{
- CriticalSectionScoped cs(_critSect);
- if (_timestampMap.Pop(timestamp) == NULL)
- {
- return VCM_GENERAL_ERROR;
- }
- return VCM_OK;
-}
-
-VCMGenericDecoder::VCMGenericDecoder(VideoDecoder& decoder, bool isExternal)
-:
-_callback(NULL),
-_frameInfos(),
-_nextFrameInfoIdx(0),
-_decoder(decoder),
-_codecType(kVideoCodecUnknown),
-_isExternal(isExternal),
-_keyFrameDecoded(false)
-{
-}
-
-VCMGenericDecoder::~VCMGenericDecoder()
-{
-}
-
-int32_t VCMGenericDecoder::InitDecode(const VideoCodec* settings,
- int32_t numberOfCores)
-{
- _codecType = settings->codecType;
-
- return _decoder.InitDecode(settings, numberOfCores);
-}
-
-int32_t VCMGenericDecoder::Decode(const VCMEncodedFrame& frame,
- int64_t nowMs)
-{
- _frameInfos[_nextFrameInfoIdx].decodeStartTimeMs = nowMs;
- _frameInfos[_nextFrameInfoIdx].renderTimeMs = frame.RenderTimeMs();
- _frameInfos[_nextFrameInfoIdx].rotation = frame.rotation();
- _callback->Map(frame.TimeStamp(), &_frameInfos[_nextFrameInfoIdx]);
-
- _nextFrameInfoIdx = (_nextFrameInfoIdx + 1) % kDecoderFrameMemoryLength;
- int32_t ret = _decoder.Decode(frame.EncodedImage(),
- frame.MissingFrame(),
- frame.FragmentationHeader(),
- frame.CodecSpecific(),
- frame.RenderTimeMs());
-
- if (ret < WEBRTC_VIDEO_CODEC_OK)
- {
- LOG(LS_WARNING) << "Failed to decode frame with timestamp "
- << frame.TimeStamp() << ", error code: " << ret;
- _callback->Pop(frame.TimeStamp());
- return ret;
- }
- else if (ret == WEBRTC_VIDEO_CODEC_NO_OUTPUT ||
- ret == WEBRTC_VIDEO_CODEC_REQUEST_SLI)
- {
- // No output
- _callback->Pop(frame.TimeStamp());
- }
- return ret;
-}
-
-int32_t
-VCMGenericDecoder::Release()
-{
- return _decoder.Release();
-}
-
-int32_t VCMGenericDecoder::Reset()
-{
- return _decoder.Reset();
-}
-
-int32_t VCMGenericDecoder::RegisterDecodeCompleteCallback(VCMDecodedFrameCallback* callback)
-{
- _callback = callback;
- return _decoder.RegisterDecodeCompleteCallback(callback);
-}
-
-bool VCMGenericDecoder::External() const
-{
- return _isExternal;
-}
-
-} // namespace
diff --git a/webrtc/modules/video_coding/main/source/generic_decoder.h b/webrtc/modules/video_coding/main/source/generic_decoder.h
deleted file mode 100644
index 09929e64f4..0000000000
--- a/webrtc/modules/video_coding/main/source/generic_decoder.h
+++ /dev/null
@@ -1,112 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_MODULES_VIDEO_CODING_GENERIC_DECODER_H_
-#define WEBRTC_MODULES_VIDEO_CODING_GENERIC_DECODER_H_
-
-#include "webrtc/modules/interface/module_common_types.h"
-#include "webrtc/modules/video_coding/codecs/interface/video_codec_interface.h"
-#include "webrtc/modules/video_coding/main/source/encoded_frame.h"
-#include "webrtc/modules/video_coding/main/source/timestamp_map.h"
-#include "webrtc/modules/video_coding/main/source/timing.h"
-
-namespace webrtc
-{
-
-class VCMReceiveCallback;
-
-enum { kDecoderFrameMemoryLength = 10 };
-
-struct VCMFrameInformation
-{
- int64_t renderTimeMs;
- int64_t decodeStartTimeMs;
- void* userData;
- VideoRotation rotation;
-};
-
-class VCMDecodedFrameCallback : public DecodedImageCallback
-{
-public:
- VCMDecodedFrameCallback(VCMTiming& timing, Clock* clock);
- virtual ~VCMDecodedFrameCallback();
- void SetUserReceiveCallback(VCMReceiveCallback* receiveCallback);
- VCMReceiveCallback* UserReceiveCallback();
-
- virtual int32_t Decoded(VideoFrame& decodedImage);
- virtual int32_t ReceivedDecodedReferenceFrame(const uint64_t pictureId);
- virtual int32_t ReceivedDecodedFrame(const uint64_t pictureId);
-
- uint64_t LastReceivedPictureID() const;
-
- void Map(uint32_t timestamp, VCMFrameInformation* frameInfo);
- int32_t Pop(uint32_t timestamp);
-
-private:
- // Protect |_receiveCallback| and |_timestampMap|.
- CriticalSectionWrapper* _critSect;
- Clock* _clock;
- VCMReceiveCallback* _receiveCallback; // Guarded by |_critSect|.
- VCMTiming& _timing;
- VCMTimestampMap _timestampMap; // Guarded by |_critSect|.
- uint64_t _lastReceivedPictureID;
-};
-
-
-class VCMGenericDecoder
-{
- friend class VCMCodecDataBase;
-public:
- VCMGenericDecoder(VideoDecoder& decoder, bool isExternal = false);
- ~VCMGenericDecoder();
-
- /**
- * Initialize the decoder with the information from the VideoCodec
- */
- int32_t InitDecode(const VideoCodec* settings,
- int32_t numberOfCores);
-
- /**
- * Decode to a raw I420 frame,
- *
- * inputVideoBuffer reference to encoded video frame
- */
- int32_t Decode(const VCMEncodedFrame& inputFrame, int64_t nowMs);
-
- /**
- * Free the decoder memory
- */
- int32_t Release();
-
- /**
- * Reset the decoder state, prepare for a new call
- */
- int32_t Reset();
-
- /**
- * Set decode callback. Deregistering while decoding is illegal.
- */
- int32_t RegisterDecodeCompleteCallback(VCMDecodedFrameCallback* callback);
-
- bool External() const;
-
-private:
- VCMDecodedFrameCallback* _callback;
- VCMFrameInformation _frameInfos[kDecoderFrameMemoryLength];
- uint32_t _nextFrameInfoIdx;
- VideoDecoder& _decoder;
- VideoCodecType _codecType;
- bool _isExternal;
- bool _keyFrameDecoded;
-};
-
-} // namespace webrtc
-
-#endif // WEBRTC_MODULES_VIDEO_CODING_GENERIC_DECODER_H_
diff --git a/webrtc/modules/video_coding/main/source/generic_encoder.cc b/webrtc/modules/video_coding/main/source/generic_encoder.cc
deleted file mode 100644
index de196040f0..0000000000
--- a/webrtc/modules/video_coding/main/source/generic_encoder.cc
+++ /dev/null
@@ -1,298 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include "webrtc/base/checks.h"
-#include "webrtc/engine_configurations.h"
-#include "webrtc/modules/video_coding/main/source/encoded_frame.h"
-#include "webrtc/modules/video_coding/main/source/generic_encoder.h"
-#include "webrtc/modules/video_coding/main/source/media_optimization.h"
-#include "webrtc/system_wrappers/include/critical_section_wrapper.h"
-#include "webrtc/system_wrappers/include/logging.h"
-
-namespace webrtc {
-namespace {
-// Map information from info into rtp. If no relevant information is found
-// in info, rtp is set to NULL.
-void CopyCodecSpecific(const CodecSpecificInfo* info, RTPVideoHeader* rtp) {
- RTC_DCHECK(info);
- switch (info->codecType) {
- case kVideoCodecVP8: {
- rtp->codec = kRtpVideoVp8;
- rtp->codecHeader.VP8.InitRTPVideoHeaderVP8();
- rtp->codecHeader.VP8.pictureId = info->codecSpecific.VP8.pictureId;
- rtp->codecHeader.VP8.nonReference =
- info->codecSpecific.VP8.nonReference;
- rtp->codecHeader.VP8.temporalIdx = info->codecSpecific.VP8.temporalIdx;
- rtp->codecHeader.VP8.layerSync = info->codecSpecific.VP8.layerSync;
- rtp->codecHeader.VP8.tl0PicIdx = info->codecSpecific.VP8.tl0PicIdx;
- rtp->codecHeader.VP8.keyIdx = info->codecSpecific.VP8.keyIdx;
- rtp->simulcastIdx = info->codecSpecific.VP8.simulcastIdx;
- return;
- }
- case kVideoCodecVP9: {
- rtp->codec = kRtpVideoVp9;
- rtp->codecHeader.VP9.InitRTPVideoHeaderVP9();
- rtp->codecHeader.VP9.inter_pic_predicted =
- info->codecSpecific.VP9.inter_pic_predicted;
- rtp->codecHeader.VP9.flexible_mode =
- info->codecSpecific.VP9.flexible_mode;
- rtp->codecHeader.VP9.ss_data_available =
- info->codecSpecific.VP9.ss_data_available;
- rtp->codecHeader.VP9.picture_id = info->codecSpecific.VP9.picture_id;
- rtp->codecHeader.VP9.tl0_pic_idx = info->codecSpecific.VP9.tl0_pic_idx;
- rtp->codecHeader.VP9.temporal_idx = info->codecSpecific.VP9.temporal_idx;
- rtp->codecHeader.VP9.spatial_idx = info->codecSpecific.VP9.spatial_idx;
- rtp->codecHeader.VP9.temporal_up_switch =
- info->codecSpecific.VP9.temporal_up_switch;
- rtp->codecHeader.VP9.inter_layer_predicted =
- info->codecSpecific.VP9.inter_layer_predicted;
- rtp->codecHeader.VP9.gof_idx = info->codecSpecific.VP9.gof_idx;
-
- // Packetizer needs to know the number of spatial layers to correctly set
- // the marker bit, even when the number won't be written in the packet.
- rtp->codecHeader.VP9.num_spatial_layers =
- info->codecSpecific.VP9.num_spatial_layers;
- if (info->codecSpecific.VP9.ss_data_available) {
- rtp->codecHeader.VP9.spatial_layer_resolution_present =
- info->codecSpecific.VP9.spatial_layer_resolution_present;
- if (info->codecSpecific.VP9.spatial_layer_resolution_present) {
- for (size_t i = 0; i < info->codecSpecific.VP9.num_spatial_layers;
- ++i) {
- rtp->codecHeader.VP9.width[i] = info->codecSpecific.VP9.width[i];
- rtp->codecHeader.VP9.height[i] = info->codecSpecific.VP9.height[i];
- }
- }
- rtp->codecHeader.VP9.gof.CopyGofInfoVP9(info->codecSpecific.VP9.gof);
- }
- return;
- }
- case kVideoCodecH264:
- rtp->codec = kRtpVideoH264;
- return;
- case kVideoCodecGeneric:
- rtp->codec = kRtpVideoGeneric;
- rtp->simulcastIdx = info->codecSpecific.generic.simulcast_idx;
- return;
- default:
- return;
- }
-}
-} // namespace
-
-//#define DEBUG_ENCODER_BIT_STREAM
-
-VCMGenericEncoder::VCMGenericEncoder(
- VideoEncoder* encoder,
- VideoEncoderRateObserver* rate_observer,
- VCMEncodedFrameCallback* encoded_frame_callback,
- bool internalSource)
- : encoder_(encoder),
- rate_observer_(rate_observer),
- vcm_encoded_frame_callback_(encoded_frame_callback),
- internal_source_(internalSource),
- encoder_params_({0, 0, 0, 0}),
- rotation_(kVideoRotation_0),
- is_screenshare_(false) {}
-
-VCMGenericEncoder::~VCMGenericEncoder() {}
-
-int32_t VCMGenericEncoder::Release() {
- return encoder_->Release();
-}
-
-int32_t VCMGenericEncoder::InitEncode(const VideoCodec* settings,
- int32_t numberOfCores,
- size_t maxPayloadSize) {
- {
- rtc::CritScope lock(&params_lock_);
- encoder_params_.target_bitrate = settings->startBitrate * 1000;
- encoder_params_.input_frame_rate = settings->maxFramerate;
- }
-
- is_screenshare_ = settings->mode == VideoCodecMode::kScreensharing;
- if (encoder_->InitEncode(settings, numberOfCores, maxPayloadSize) != 0) {
- LOG(LS_ERROR) << "Failed to initialize the encoder associated with "
- "payload name: "
- << settings->plName;
- return -1;
- }
- encoder_->RegisterEncodeCompleteCallback(vcm_encoded_frame_callback_);
- return 0;
-}
-
-int32_t VCMGenericEncoder::Encode(const VideoFrame& inputFrame,
- const CodecSpecificInfo* codecSpecificInfo,
- const std::vector<FrameType>& frameTypes) {
- for (FrameType frame_type : frameTypes)
- RTC_DCHECK(frame_type == kVideoFrameKey || frame_type == kVideoFrameDelta);
-
- rotation_ = inputFrame.rotation();
-
- // Keep track of the current frame rotation and apply to the output of the
- // encoder. There might not be exact as the encoder could have one frame delay
- // but it should be close enough.
- // TODO(pbos): Map from timestamp, this is racy (even if rotation_ is locked
- // properly, which it isn't). More than one frame may be in the pipeline.
- vcm_encoded_frame_callback_->SetRotation(rotation_);
-
- int32_t result = encoder_->Encode(inputFrame, codecSpecificInfo, &frameTypes);
- if (is_screenshare_ &&
- result == WEBRTC_VIDEO_CODEC_TARGET_BITRATE_OVERSHOOT) {
- // Target bitrate exceeded, encoder state has been reset - try again.
- return encoder_->Encode(inputFrame, codecSpecificInfo, &frameTypes);
- }
-
- return result;
-}
-
-void VCMGenericEncoder::SetEncoderParameters(const EncoderParameters& params) {
- bool channel_parameters_have_changed;
- bool rates_have_changed;
- {
- rtc::CritScope lock(&params_lock_);
- channel_parameters_have_changed =
- params.loss_rate != encoder_params_.loss_rate ||
- params.rtt != encoder_params_.rtt;
- rates_have_changed =
- params.target_bitrate != encoder_params_.target_bitrate ||
- params.input_frame_rate != encoder_params_.input_frame_rate;
- encoder_params_ = params;
- }
- if (channel_parameters_have_changed)
- encoder_->SetChannelParameters(params.loss_rate, params.rtt);
- if (rates_have_changed) {
- uint32_t target_bitrate_kbps = (params.target_bitrate + 500) / 1000;
- encoder_->SetRates(target_bitrate_kbps, params.input_frame_rate);
- if (rate_observer_ != nullptr) {
- rate_observer_->OnSetRates(params.target_bitrate,
- params.input_frame_rate);
- }
- }
-}
-
-EncoderParameters VCMGenericEncoder::GetEncoderParameters() const {
- rtc::CritScope lock(&params_lock_);
- return encoder_params_;
-}
-
-int32_t
-VCMGenericEncoder::SetPeriodicKeyFrames(bool enable)
-{
- return encoder_->SetPeriodicKeyFrames(enable);
-}
-
-int32_t VCMGenericEncoder::RequestFrame(
- const std::vector<FrameType>& frame_types) {
- VideoFrame image;
- return encoder_->Encode(image, NULL, &frame_types);
-}
-
-bool
-VCMGenericEncoder::InternalSource() const
-{
- return internal_source_;
-}
-
-void VCMGenericEncoder::OnDroppedFrame() {
- encoder_->OnDroppedFrame();
-}
-
-bool VCMGenericEncoder::SupportsNativeHandle() const {
- return encoder_->SupportsNativeHandle();
-}
-
-int VCMGenericEncoder::GetTargetFramerate() {
- return encoder_->GetTargetFramerate();
-}
-
- /***************************
- * Callback Implementation
- ***************************/
-VCMEncodedFrameCallback::VCMEncodedFrameCallback(
- EncodedImageCallback* post_encode_callback)
- : _sendCallback(),
- _mediaOpt(NULL),
- _payloadType(0),
- _internalSource(false),
- _rotation(kVideoRotation_0),
- post_encode_callback_(post_encode_callback)
-#ifdef DEBUG_ENCODER_BIT_STREAM
- ,
- _bitStreamAfterEncoder(NULL)
-#endif
-{
-#ifdef DEBUG_ENCODER_BIT_STREAM
- _bitStreamAfterEncoder = fopen("encoderBitStream.bit", "wb");
-#endif
-}
-
-VCMEncodedFrameCallback::~VCMEncodedFrameCallback()
-{
-#ifdef DEBUG_ENCODER_BIT_STREAM
- fclose(_bitStreamAfterEncoder);
-#endif
-}
-
-int32_t
-VCMEncodedFrameCallback::SetTransportCallback(VCMPacketizationCallback* transport)
-{
- _sendCallback = transport;
- return VCM_OK;
-}
-
-int32_t VCMEncodedFrameCallback::Encoded(
- const EncodedImage& encodedImage,
- const CodecSpecificInfo* codecSpecificInfo,
- const RTPFragmentationHeader* fragmentationHeader) {
- RTC_DCHECK(encodedImage._frameType == kVideoFrameKey ||
- encodedImage._frameType == kVideoFrameDelta);
- post_encode_callback_->Encoded(encodedImage, NULL, NULL);
-
- if (_sendCallback == NULL) {
- return VCM_UNINITIALIZED;
- }
-
-#ifdef DEBUG_ENCODER_BIT_STREAM
- if (_bitStreamAfterEncoder != NULL) {
- fwrite(encodedImage._buffer, 1, encodedImage._length,
- _bitStreamAfterEncoder);
- }
-#endif
-
- RTPVideoHeader rtpVideoHeader;
- memset(&rtpVideoHeader, 0, sizeof(RTPVideoHeader));
- RTPVideoHeader* rtpVideoHeaderPtr = &rtpVideoHeader;
- if (codecSpecificInfo) {
- CopyCodecSpecific(codecSpecificInfo, rtpVideoHeaderPtr);
- }
- rtpVideoHeader.rotation = _rotation;
-
- int32_t callbackReturn = _sendCallback->SendData(
- _payloadType, encodedImage, *fragmentationHeader, rtpVideoHeaderPtr);
- if (callbackReturn < 0) {
- return callbackReturn;
- }
-
- if (_mediaOpt != NULL) {
- _mediaOpt->UpdateWithEncodedData(encodedImage);
- if (_internalSource)
- return _mediaOpt->DropFrame(); // Signal to encoder to drop next frame.
- }
- return VCM_OK;
-}
-
-void
-VCMEncodedFrameCallback::SetMediaOpt(
- media_optimization::MediaOptimization *mediaOpt)
-{
- _mediaOpt = mediaOpt;
-}
-
-} // namespace webrtc
diff --git a/webrtc/modules/video_coding/main/source/generic_encoder.h b/webrtc/modules/video_coding/main/source/generic_encoder.h
deleted file mode 100644
index 3a7132860f..0000000000
--- a/webrtc/modules/video_coding/main/source/generic_encoder.h
+++ /dev/null
@@ -1,142 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_MODULES_VIDEO_CODING_GENERIC_ENCODER_H_
-#define WEBRTC_MODULES_VIDEO_CODING_GENERIC_ENCODER_H_
-
-#include "webrtc/modules/video_coding/codecs/interface/video_codec_interface.h"
-#include "webrtc/modules/video_coding/main/interface/video_coding_defines.h"
-
-#include <stdio.h>
-
-#include "webrtc/base/criticalsection.h"
-#include "webrtc/base/scoped_ptr.h"
-
-namespace webrtc {
-class CriticalSectionWrapper;
-
-namespace media_optimization {
-class MediaOptimization;
-} // namespace media_optimization
-
-struct EncoderParameters {
- uint32_t target_bitrate;
- uint8_t loss_rate;
- int64_t rtt;
- uint32_t input_frame_rate;
-};
-
-/*************************************/
-/* VCMEncodeFrameCallback class */
-/***********************************/
-class VCMEncodedFrameCallback : public EncodedImageCallback
-{
-public:
- VCMEncodedFrameCallback(EncodedImageCallback* post_encode_callback);
- virtual ~VCMEncodedFrameCallback();
-
- /*
- * Callback implementation - codec encode complete
- */
- int32_t Encoded(
- const EncodedImage& encodedImage,
- const CodecSpecificInfo* codecSpecificInfo = NULL,
- const RTPFragmentationHeader* fragmentationHeader = NULL);
- /*
- * Callback implementation - generic encoder encode complete
- */
- int32_t SetTransportCallback(VCMPacketizationCallback* transport);
- /**
- * Set media Optimization
- */
- void SetMediaOpt (media_optimization::MediaOptimization* mediaOpt);
-
- void SetPayloadType(uint8_t payloadType) { _payloadType = payloadType; };
- void SetInternalSource(bool internalSource) { _internalSource = internalSource; };
-
- void SetRotation(VideoRotation rotation) { _rotation = rotation; }
-
-private:
- VCMPacketizationCallback* _sendCallback;
- media_optimization::MediaOptimization* _mediaOpt;
- uint8_t _payloadType;
- bool _internalSource;
- VideoRotation _rotation;
-
- EncodedImageCallback* post_encode_callback_;
-
-#ifdef DEBUG_ENCODER_BIT_STREAM
- FILE* _bitStreamAfterEncoder;
-#endif
-};// end of VCMEncodeFrameCallback class
-
-
-/******************************/
-/* VCMGenericEncoder class */
-/******************************/
-class VCMGenericEncoder
-{
- friend class VCMCodecDataBase;
-public:
- VCMGenericEncoder(VideoEncoder* encoder,
- VideoEncoderRateObserver* rate_observer,
- VCMEncodedFrameCallback* encoded_frame_callback,
- bool internalSource);
- ~VCMGenericEncoder();
- /**
- * Free encoder memory
- */
- int32_t Release();
- /**
- * Initialize the encoder with the information from the VideoCodec
- */
- int32_t InitEncode(const VideoCodec* settings,
- int32_t numberOfCores,
- size_t maxPayloadSize);
- /**
- * Encode raw image
- * inputFrame : Frame containing raw image
- * codecSpecificInfo : Specific codec data
- * cameraFrameRate : Request or information from the remote side
- * frameType : The requested frame type to encode
- */
- int32_t Encode(const VideoFrame& inputFrame,
- const CodecSpecificInfo* codecSpecificInfo,
- const std::vector<FrameType>& frameTypes);
-
- void SetEncoderParameters(const EncoderParameters& params);
- EncoderParameters GetEncoderParameters() const;
-
- int32_t SetPeriodicKeyFrames(bool enable);
-
- int32_t RequestFrame(const std::vector<FrameType>& frame_types);
-
- bool InternalSource() const;
-
- void OnDroppedFrame();
-
- bool SupportsNativeHandle() const;
-
- int GetTargetFramerate();
-
-private:
- VideoEncoder* const encoder_;
- VideoEncoderRateObserver* const rate_observer_;
- VCMEncodedFrameCallback* const vcm_encoded_frame_callback_;
- const bool internal_source_;
- mutable rtc::CriticalSection params_lock_;
- EncoderParameters encoder_params_ GUARDED_BY(params_lock_);
- VideoRotation rotation_;
- bool is_screenshare_;
-}; // end of VCMGenericEncoder class
-
-} // namespace webrtc
-
-#endif // WEBRTC_MODULES_VIDEO_CODING_GENERIC_ENCODER_H_
diff --git a/webrtc/modules/video_coding/main/source/inter_frame_delay.cc b/webrtc/modules/video_coding/main/source/inter_frame_delay.cc
deleted file mode 100644
index 4786917e16..0000000000
--- a/webrtc/modules/video_coding/main/source/inter_frame_delay.cc
+++ /dev/null
@@ -1,114 +0,0 @@
-/*
- * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include "webrtc/modules/video_coding/main/source/inter_frame_delay.h"
-
-namespace webrtc {
-
-VCMInterFrameDelay::VCMInterFrameDelay(int64_t currentWallClock)
-{
- Reset(currentWallClock);
-}
-
-// Resets the delay estimate
-void
-VCMInterFrameDelay::Reset(int64_t currentWallClock)
-{
- _zeroWallClock = currentWallClock;
- _wrapArounds = 0;
- _prevWallClock = 0;
- _prevTimestamp = 0;
- _dTS = 0;
-}
-
-// Calculates the delay of a frame with the given timestamp.
-// This method is called when the frame is complete.
-bool
-VCMInterFrameDelay::CalculateDelay(uint32_t timestamp,
- int64_t *delay,
- int64_t currentWallClock)
-{
- if (_prevWallClock == 0)
- {
- // First set of data, initialization, wait for next frame
- _prevWallClock = currentWallClock;
- _prevTimestamp = timestamp;
- *delay = 0;
- return true;
- }
-
- int32_t prevWrapArounds = _wrapArounds;
- CheckForWrapArounds(timestamp);
-
- // This will be -1 for backward wrap arounds and +1 for forward wrap arounds
- int32_t wrapAroundsSincePrev = _wrapArounds - prevWrapArounds;
-
- // Account for reordering in jitter variance estimate in the future?
- // Note that this also captures incomplete frames which are grabbed
- // for decoding after a later frame has been complete, i.e. real
- // packet losses.
- if ((wrapAroundsSincePrev == 0 && timestamp < _prevTimestamp) || wrapAroundsSincePrev < 0)
- {
- *delay = 0;
- return false;
- }
-
- // Compute the compensated timestamp difference and convert it to ms and
- // round it to closest integer.
- _dTS = static_cast<int64_t>((timestamp + wrapAroundsSincePrev *
- (static_cast<int64_t>(1)<<32) - _prevTimestamp) / 90.0 + 0.5);
-
- // frameDelay is the difference of dT and dTS -- i.e. the difference of
- // the wall clock time difference and the timestamp difference between
- // two following frames.
- *delay = static_cast<int64_t>(currentWallClock - _prevWallClock - _dTS);
-
- _prevTimestamp = timestamp;
- _prevWallClock = currentWallClock;
-
- return true;
-}
-
-// Returns the current difference between incoming timestamps
-uint32_t VCMInterFrameDelay::CurrentTimeStampDiffMs() const
-{
- if (_dTS < 0)
- {
- return 0;
- }
- return static_cast<uint32_t>(_dTS);
-}
-
-// Investigates if the timestamp clock has overflowed since the last timestamp and
-// keeps track of the number of wrap arounds since reset.
-void
-VCMInterFrameDelay::CheckForWrapArounds(uint32_t timestamp)
-{
- if (timestamp < _prevTimestamp)
- {
- // This difference will probably be less than -2^31 if we have had a wrap around
- // (e.g. timestamp = 1, _previousTimestamp = 2^32 - 1). Since it is cast to a Word32,
- // it should be positive.
- if (static_cast<int32_t>(timestamp - _prevTimestamp) > 0)
- {
- // Forward wrap around
- _wrapArounds++;
- }
- }
- // This difference will probably be less than -2^31 if we have had a backward wrap around.
- // Since it is cast to a Word32, it should be positive.
- else if (static_cast<int32_t>(_prevTimestamp - timestamp) > 0)
- {
- // Backward wrap around
- _wrapArounds--;
- }
-}
-
-}
diff --git a/webrtc/modules/video_coding/main/source/inter_frame_delay.h b/webrtc/modules/video_coding/main/source/inter_frame_delay.h
deleted file mode 100644
index 58b326ae96..0000000000
--- a/webrtc/modules/video_coding/main/source/inter_frame_delay.h
+++ /dev/null
@@ -1,66 +0,0 @@
-/*
- * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_MODULES_VIDEO_CODING_INTER_FRAME_DELAY_H_
-#define WEBRTC_MODULES_VIDEO_CODING_INTER_FRAME_DELAY_H_
-
-#include "webrtc/typedefs.h"
-
-namespace webrtc
-{
-
-class VCMInterFrameDelay
-{
-public:
- VCMInterFrameDelay(int64_t currentWallClock);
-
- // Resets the estimate. Zeros are given as parameters.
- void Reset(int64_t currentWallClock);
-
- // Calculates the delay of a frame with the given timestamp.
- // This method is called when the frame is complete.
- //
- // Input:
- // - timestamp : RTP timestamp of a received frame
- // - *delay : Pointer to memory where the result should be stored
- // - currentWallClock : The current time in milliseconds.
- // Should be -1 for normal operation, only used for testing.
- // Return value : true if OK, false when reordered timestamps
- bool CalculateDelay(uint32_t timestamp,
- int64_t *delay,
- int64_t currentWallClock);
-
- // Returns the current difference between incoming timestamps
- //
- // Return value : Wrap-around compensated difference between incoming
- // timestamps.
- uint32_t CurrentTimeStampDiffMs() const;
-
-private:
- // Controls if the RTP timestamp counter has had a wrap around
- // between the current and the previously received frame.
- //
- // Input:
- // - timestmap : RTP timestamp of the current frame.
- void CheckForWrapArounds(uint32_t timestamp);
-
- int64_t _zeroWallClock; // Local timestamp of the first video packet received
- int32_t _wrapArounds; // Number of wrapArounds detected
- // The previous timestamp passed to the delay estimate
- uint32_t _prevTimestamp;
- // The previous wall clock timestamp used by the delay estimate
- int64_t _prevWallClock;
- // Wrap-around compensated difference between incoming timestamps
- int64_t _dTS;
-};
-
-} // namespace webrtc
-
-#endif // WEBRTC_MODULES_VIDEO_CODING_INTER_FRAME_DELAY_H_
diff --git a/webrtc/modules/video_coding/main/source/internal_defines.h b/webrtc/modules/video_coding/main/source/internal_defines.h
deleted file mode 100644
index adc940f20d..0000000000
--- a/webrtc/modules/video_coding/main/source/internal_defines.h
+++ /dev/null
@@ -1,68 +0,0 @@
-/*
- * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_MODULES_VIDEO_CODING_SOURCE_INTERNAL_DEFINES_H_
-#define WEBRTC_MODULES_VIDEO_CODING_SOURCE_INTERNAL_DEFINES_H_
-
-#include "webrtc/typedefs.h"
-
-namespace webrtc
-{
-
-#define MASK_32_BITS(x) (0xFFFFFFFF & (x))
-
-inline uint32_t MaskWord64ToUWord32(int64_t w64)
-{
- return static_cast<uint32_t>(MASK_32_BITS(w64));
-}
-
-#define VCM_MAX(a, b) (((a) > (b)) ? (a) : (b))
-#define VCM_MIN(a, b) (((a) < (b)) ? (a) : (b))
-
-#define VCM_DEFAULT_CODEC_WIDTH 352
-#define VCM_DEFAULT_CODEC_HEIGHT 288
-#define VCM_DEFAULT_FRAME_RATE 30
-#define VCM_MIN_BITRATE 30
-#define VCM_FLUSH_INDICATOR 4
-
-// Helper macros for creating the static codec list
-#define VCM_NO_CODEC_IDX -1
-#ifdef VIDEOCODEC_VP8
- #define VCM_VP8_IDX (VCM_NO_CODEC_IDX + 1)
-#else
- #define VCM_VP8_IDX VCM_NO_CODEC_IDX
-#endif
-#ifdef VIDEOCODEC_VP9
- #define VCM_VP9_IDX (VCM_VP8_IDX + 1)
-#else
- #define VCM_VP9_IDX VCM_VP8_IDX
-#endif
-#ifdef VIDEOCODEC_H264
- #define VCM_H264_IDX (VCM_VP9_IDX + 1)
-#else
- #define VCM_H264_IDX VCM_VP9_IDX
-#endif
-#ifdef VIDEOCODEC_I420
- #define VCM_I420_IDX (VCM_H264_IDX + 1)
-#else
- #define VCM_I420_IDX VCM_H264_IDX
-#endif
-#define VCM_NUM_VIDEO_CODECS_AVAILABLE (VCM_I420_IDX + 1)
-
-#define VCM_NO_RECEIVER_ID 0
-
-inline int32_t VCMId(const int32_t vcmId, const int32_t receiverId = 0)
-{
- return static_cast<int32_t>((vcmId << 16) + receiverId);
-}
-
-} // namespace webrtc
-
-#endif // WEBRTC_MODULES_VIDEO_CODING_SOURCE_INTERNAL_DEFINES_H_
diff --git a/webrtc/modules/video_coding/main/source/jitter_buffer.cc b/webrtc/modules/video_coding/main/source/jitter_buffer.cc
deleted file mode 100644
index bfdd7867d9..0000000000
--- a/webrtc/modules/video_coding/main/source/jitter_buffer.cc
+++ /dev/null
@@ -1,1339 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-#include "webrtc/modules/video_coding/main/source/jitter_buffer.h"
-
-#include <assert.h>
-
-#include <algorithm>
-#include <utility>
-
-#include "webrtc/base/checks.h"
-#include "webrtc/base/trace_event.h"
-#include "webrtc/modules/rtp_rtcp/interface/rtp_rtcp_defines.h"
-#include "webrtc/modules/video_coding/main/interface/video_coding.h"
-#include "webrtc/modules/video_coding/main/source/frame_buffer.h"
-#include "webrtc/modules/video_coding/main/source/inter_frame_delay.h"
-#include "webrtc/modules/video_coding/main/source/internal_defines.h"
-#include "webrtc/modules/video_coding/main/source/jitter_buffer_common.h"
-#include "webrtc/modules/video_coding/main/source/jitter_estimator.h"
-#include "webrtc/modules/video_coding/main/source/packet.h"
-#include "webrtc/system_wrappers/include/clock.h"
-#include "webrtc/system_wrappers/include/critical_section_wrapper.h"
-#include "webrtc/system_wrappers/include/event_wrapper.h"
-#include "webrtc/system_wrappers/include/logging.h"
-#include "webrtc/system_wrappers/include/metrics.h"
-
-namespace webrtc {
-
-// Interval for updating SS data.
-static const uint32_t kSsCleanupIntervalSec = 60;
-
-// Use this rtt if no value has been reported.
-static const int64_t kDefaultRtt = 200;
-
-typedef std::pair<uint32_t, VCMFrameBuffer*> FrameListPair;
-
-bool IsKeyFrame(FrameListPair pair) {
- return pair.second->FrameType() == kVideoFrameKey;
-}
-
-bool HasNonEmptyState(FrameListPair pair) {
- return pair.second->GetState() != kStateEmpty;
-}
-
-void FrameList::InsertFrame(VCMFrameBuffer* frame) {
- insert(rbegin().base(), FrameListPair(frame->TimeStamp(), frame));
-}
-
-VCMFrameBuffer* FrameList::PopFrame(uint32_t timestamp) {
- FrameList::iterator it = find(timestamp);
- if (it == end())
- return NULL;
- VCMFrameBuffer* frame = it->second;
- erase(it);
- return frame;
-}
-
-VCMFrameBuffer* FrameList::Front() const {
- return begin()->second;
-}
-
-VCMFrameBuffer* FrameList::Back() const {
- return rbegin()->second;
-}
-
-int FrameList::RecycleFramesUntilKeyFrame(FrameList::iterator* key_frame_it,
- UnorderedFrameList* free_frames) {
- int drop_count = 0;
- FrameList::iterator it = begin();
- while (!empty()) {
- // Throw at least one frame.
- it->second->Reset();
- free_frames->push_back(it->second);
- erase(it++);
- ++drop_count;
- if (it != end() && it->second->FrameType() == kVideoFrameKey) {
- *key_frame_it = it;
- return drop_count;
- }
- }
- *key_frame_it = end();
- return drop_count;
-}
-
-void FrameList::CleanUpOldOrEmptyFrames(VCMDecodingState* decoding_state,
- UnorderedFrameList* free_frames) {
- while (!empty()) {
- VCMFrameBuffer* oldest_frame = Front();
- bool remove_frame = false;
- if (oldest_frame->GetState() == kStateEmpty && size() > 1) {
- // This frame is empty, try to update the last decoded state and drop it
- // if successful.
- remove_frame = decoding_state->UpdateEmptyFrame(oldest_frame);
- } else {
- remove_frame = decoding_state->IsOldFrame(oldest_frame);
- }
- if (!remove_frame) {
- break;
- }
- free_frames->push_back(oldest_frame);
- TRACE_EVENT_INSTANT1("webrtc", "JB::OldOrEmptyFrameDropped", "timestamp",
- oldest_frame->TimeStamp());
- erase(begin());
- }
-}
-
-void FrameList::Reset(UnorderedFrameList* free_frames) {
- while (!empty()) {
- begin()->second->Reset();
- free_frames->push_back(begin()->second);
- erase(begin());
- }
-}
-
-bool Vp9SsMap::Insert(const VCMPacket& packet) {
- if (!packet.codecSpecificHeader.codecHeader.VP9.ss_data_available)
- return false;
-
- ss_map_[packet.timestamp] = packet.codecSpecificHeader.codecHeader.VP9.gof;
- return true;
-}
-
-void Vp9SsMap::Reset() {
- ss_map_.clear();
-}
-
-bool Vp9SsMap::Find(uint32_t timestamp, SsMap::iterator* it_out) {
- bool found = false;
- for (SsMap::iterator it = ss_map_.begin(); it != ss_map_.end(); ++it) {
- if (it->first == timestamp || IsNewerTimestamp(timestamp, it->first)) {
- *it_out = it;
- found = true;
- }
- }
- return found;
-}
-
-void Vp9SsMap::RemoveOld(uint32_t timestamp) {
- if (!TimeForCleanup(timestamp))
- return;
-
- SsMap::iterator it;
- if (!Find(timestamp, &it))
- return;
-
- ss_map_.erase(ss_map_.begin(), it);
- AdvanceFront(timestamp);
-}
-
-bool Vp9SsMap::TimeForCleanup(uint32_t timestamp) const {
- if (ss_map_.empty() || !IsNewerTimestamp(timestamp, ss_map_.begin()->first))
- return false;
-
- uint32_t diff = timestamp - ss_map_.begin()->first;
- return diff / kVideoPayloadTypeFrequency >= kSsCleanupIntervalSec;
-}
-
-void Vp9SsMap::AdvanceFront(uint32_t timestamp) {
- RTC_DCHECK(!ss_map_.empty());
- GofInfoVP9 gof = ss_map_.begin()->second;
- ss_map_.erase(ss_map_.begin());
- ss_map_[timestamp] = gof;
-}
-
-bool Vp9SsMap::UpdatePacket(VCMPacket* packet) {
- uint8_t gof_idx = packet->codecSpecificHeader.codecHeader.VP9.gof_idx;
- if (gof_idx == kNoGofIdx)
- return false; // No update needed.
-
- SsMap::iterator it;
- if (!Find(packet->timestamp, &it))
- return false; // Corresponding SS not yet received.
-
- if (gof_idx >= it->second.num_frames_in_gof)
- return false; // Assume corresponding SS not yet received.
-
- RTPVideoHeaderVP9* vp9 = &packet->codecSpecificHeader.codecHeader.VP9;
- vp9->temporal_idx = it->second.temporal_idx[gof_idx];
- vp9->temporal_up_switch = it->second.temporal_up_switch[gof_idx];
-
- // TODO(asapersson): Set vp9.ref_picture_id[i] and add usage.
- vp9->num_ref_pics = it->second.num_ref_pics[gof_idx];
- for (size_t i = 0; i < it->second.num_ref_pics[gof_idx]; ++i) {
- vp9->pid_diff[i] = it->second.pid_diff[gof_idx][i];
- }
- return true;
-}
-
-void Vp9SsMap::UpdateFrames(FrameList* frames) {
- for (const auto& frame_it : *frames) {
- uint8_t gof_idx =
- frame_it.second->CodecSpecific()->codecSpecific.VP9.gof_idx;
- if (gof_idx == kNoGofIdx) {
- continue;
- }
- SsMap::iterator ss_it;
- if (Find(frame_it.second->TimeStamp(), &ss_it)) {
- if (gof_idx >= ss_it->second.num_frames_in_gof) {
- continue; // Assume corresponding SS not yet received.
- }
- frame_it.second->SetGofInfo(ss_it->second, gof_idx);
- }
- }
-}
-
-VCMJitterBuffer::VCMJitterBuffer(Clock* clock,
- rtc::scoped_ptr<EventWrapper> event)
- : clock_(clock),
- running_(false),
- crit_sect_(CriticalSectionWrapper::CreateCriticalSection()),
- frame_event_(event.Pass()),
- max_number_of_frames_(kStartNumberOfFrames),
- free_frames_(),
- decodable_frames_(),
- incomplete_frames_(),
- last_decoded_state_(),
- first_packet_since_reset_(true),
- stats_callback_(NULL),
- incoming_frame_rate_(0),
- incoming_frame_count_(0),
- time_last_incoming_frame_count_(0),
- incoming_bit_count_(0),
- incoming_bit_rate_(0),
- num_consecutive_old_packets_(0),
- num_packets_(0),
- num_duplicated_packets_(0),
- num_discarded_packets_(0),
- time_first_packet_ms_(0),
- jitter_estimate_(clock),
- inter_frame_delay_(clock_->TimeInMilliseconds()),
- rtt_ms_(kDefaultRtt),
- nack_mode_(kNoNack),
- low_rtt_nack_threshold_ms_(-1),
- high_rtt_nack_threshold_ms_(-1),
- missing_sequence_numbers_(SequenceNumberLessThan()),
- max_nack_list_size_(0),
- max_packet_age_to_nack_(0),
- max_incomplete_time_ms_(0),
- decode_error_mode_(kNoErrors),
- average_packets_per_frame_(0.0f),
- frame_counter_(0) {
- for (int i = 0; i < kStartNumberOfFrames; i++)
- free_frames_.push_back(new VCMFrameBuffer());
-}
-
-VCMJitterBuffer::~VCMJitterBuffer() {
- Stop();
- for (UnorderedFrameList::iterator it = free_frames_.begin();
- it != free_frames_.end(); ++it) {
- delete *it;
- }
- for (FrameList::iterator it = incomplete_frames_.begin();
- it != incomplete_frames_.end(); ++it) {
- delete it->second;
- }
- for (FrameList::iterator it = decodable_frames_.begin();
- it != decodable_frames_.end(); ++it) {
- delete it->second;
- }
- delete crit_sect_;
-}
-
-void VCMJitterBuffer::UpdateHistograms() {
- if (num_packets_ <= 0 || !running_) {
- return;
- }
- int64_t elapsed_sec =
- (clock_->TimeInMilliseconds() - time_first_packet_ms_) / 1000;
- if (elapsed_sec < metrics::kMinRunTimeInSeconds) {
- return;
- }
-
- RTC_HISTOGRAM_PERCENTAGE("WebRTC.Video.DiscardedPacketsInPercent",
- num_discarded_packets_ * 100 / num_packets_);
- RTC_HISTOGRAM_PERCENTAGE("WebRTC.Video.DuplicatedPacketsInPercent",
- num_duplicated_packets_ * 100 / num_packets_);
-
- int total_frames =
- receive_statistics_.key_frames + receive_statistics_.delta_frames;
- if (total_frames > 0) {
- RTC_HISTOGRAM_COUNTS_100("WebRTC.Video.CompleteFramesReceivedPerSecond",
- static_cast<int>((total_frames / elapsed_sec) + 0.5f));
- RTC_HISTOGRAM_COUNTS_1000(
- "WebRTC.Video.KeyFramesReceivedInPermille",
- static_cast<int>(
- (receive_statistics_.key_frames * 1000.0f / total_frames) + 0.5f));
- }
-}
-
-void VCMJitterBuffer::Start() {
- CriticalSectionScoped cs(crit_sect_);
- running_ = true;
- incoming_frame_count_ = 0;
- incoming_frame_rate_ = 0;
- incoming_bit_count_ = 0;
- incoming_bit_rate_ = 0;
- time_last_incoming_frame_count_ = clock_->TimeInMilliseconds();
- receive_statistics_ = FrameCounts();
-
- num_consecutive_old_packets_ = 0;
- num_packets_ = 0;
- num_duplicated_packets_ = 0;
- num_discarded_packets_ = 0;
- time_first_packet_ms_ = 0;
-
- // Start in a non-signaled state.
- waiting_for_completion_.frame_size = 0;
- waiting_for_completion_.timestamp = 0;
- waiting_for_completion_.latest_packet_time = -1;
- first_packet_since_reset_ = true;
- rtt_ms_ = kDefaultRtt;
- last_decoded_state_.Reset();
- vp9_ss_map_.Reset();
-}
-
-void VCMJitterBuffer::Stop() {
- crit_sect_->Enter();
- UpdateHistograms();
- running_ = false;
- last_decoded_state_.Reset();
- vp9_ss_map_.Reset();
-
- // Make sure all frames are free and reset.
- for (FrameList::iterator it = decodable_frames_.begin();
- it != decodable_frames_.end(); ++it) {
- free_frames_.push_back(it->second);
- }
- for (FrameList::iterator it = incomplete_frames_.begin();
- it != incomplete_frames_.end(); ++it) {
- free_frames_.push_back(it->second);
- }
- for (UnorderedFrameList::iterator it = free_frames_.begin();
- it != free_frames_.end(); ++it) {
- (*it)->Reset();
- }
- decodable_frames_.clear();
- incomplete_frames_.clear();
- crit_sect_->Leave();
- // Make sure we wake up any threads waiting on these events.
- frame_event_->Set();
-}
-
-bool VCMJitterBuffer::Running() const {
- CriticalSectionScoped cs(crit_sect_);
- return running_;
-}
-
-void VCMJitterBuffer::Flush() {
- CriticalSectionScoped cs(crit_sect_);
- decodable_frames_.Reset(&free_frames_);
- incomplete_frames_.Reset(&free_frames_);
- last_decoded_state_.Reset(); // TODO(mikhal): sync reset.
- vp9_ss_map_.Reset();
- num_consecutive_old_packets_ = 0;
- // Also reset the jitter and delay estimates
- jitter_estimate_.Reset();
- inter_frame_delay_.Reset(clock_->TimeInMilliseconds());
- waiting_for_completion_.frame_size = 0;
- waiting_for_completion_.timestamp = 0;
- waiting_for_completion_.latest_packet_time = -1;
- first_packet_since_reset_ = true;
- missing_sequence_numbers_.clear();
-}
-
-// Get received key and delta frames
-FrameCounts VCMJitterBuffer::FrameStatistics() const {
- CriticalSectionScoped cs(crit_sect_);
- return receive_statistics_;
-}
-
-int VCMJitterBuffer::num_packets() const {
- CriticalSectionScoped cs(crit_sect_);
- return num_packets_;
-}
-
-int VCMJitterBuffer::num_duplicated_packets() const {
- CriticalSectionScoped cs(crit_sect_);
- return num_duplicated_packets_;
-}
-
-int VCMJitterBuffer::num_discarded_packets() const {
- CriticalSectionScoped cs(crit_sect_);
- return num_discarded_packets_;
-}
-
-// Calculate framerate and bitrate.
-void VCMJitterBuffer::IncomingRateStatistics(unsigned int* framerate,
- unsigned int* bitrate) {
- assert(framerate);
- assert(bitrate);
- CriticalSectionScoped cs(crit_sect_);
- const int64_t now = clock_->TimeInMilliseconds();
- int64_t diff = now - time_last_incoming_frame_count_;
- if (diff < 1000 && incoming_frame_rate_ > 0 && incoming_bit_rate_ > 0) {
- // Make sure we report something even though less than
- // 1 second has passed since last update.
- *framerate = incoming_frame_rate_;
- *bitrate = incoming_bit_rate_;
- } else if (incoming_frame_count_ != 0) {
- // We have received frame(s) since last call to this function
-
- // Prepare calculations
- if (diff <= 0) {
- diff = 1;
- }
- // we add 0.5f for rounding
- float rate = 0.5f + ((incoming_frame_count_ * 1000.0f) / diff);
- if (rate < 1.0f) {
- rate = 1.0f;
- }
-
- // Calculate frame rate
- // Let r be rate.
- // r(0) = 1000*framecount/delta_time.
- // (I.e. frames per second since last calculation.)
- // frame_rate = r(0)/2 + r(-1)/2
- // (I.e. fr/s average this and the previous calculation.)
- *framerate = (incoming_frame_rate_ + static_cast<unsigned int>(rate)) / 2;
- incoming_frame_rate_ = static_cast<unsigned int>(rate);
-
- // Calculate bit rate
- if (incoming_bit_count_ == 0) {
- *bitrate = 0;
- } else {
- *bitrate = 10 * ((100 * incoming_bit_count_) /
- static_cast<unsigned int>(diff));
- }
- incoming_bit_rate_ = *bitrate;
-
- // Reset count
- incoming_frame_count_ = 0;
- incoming_bit_count_ = 0;
- time_last_incoming_frame_count_ = now;
-
- } else {
- // No frames since last call
- time_last_incoming_frame_count_ = clock_->TimeInMilliseconds();
- *framerate = 0;
- *bitrate = 0;
- incoming_frame_rate_ = 0;
- incoming_bit_rate_ = 0;
- }
-}
-
-// Answers the question:
-// Will the packet sequence be complete if the next frame is grabbed for
-// decoding right now? That is, have we lost a frame between the last decoded
-// frame and the next, or is the next
-// frame missing one or more packets?
-bool VCMJitterBuffer::CompleteSequenceWithNextFrame() {
- CriticalSectionScoped cs(crit_sect_);
- // Finding oldest frame ready for decoder, check sequence number and size
- CleanUpOldOrEmptyFrames();
- if (!decodable_frames_.empty()) {
- if (decodable_frames_.Front()->GetState() == kStateComplete) {
- return true;
- }
- } else if (incomplete_frames_.size() <= 1) {
- // Frame not ready to be decoded.
- return true;
- }
- return false;
-}
-
-// Returns immediately or a |max_wait_time_ms| ms event hang waiting for a
-// complete frame, |max_wait_time_ms| decided by caller.
-bool VCMJitterBuffer::NextCompleteTimestamp(
- uint32_t max_wait_time_ms, uint32_t* timestamp) {
- crit_sect_->Enter();
- if (!running_) {
- crit_sect_->Leave();
- return false;
- }
- CleanUpOldOrEmptyFrames();
-
- if (decodable_frames_.empty() ||
- decodable_frames_.Front()->GetState() != kStateComplete) {
- const int64_t end_wait_time_ms = clock_->TimeInMilliseconds() +
- max_wait_time_ms;
- int64_t wait_time_ms = max_wait_time_ms;
- while (wait_time_ms > 0) {
- crit_sect_->Leave();
- const EventTypeWrapper ret =
- frame_event_->Wait(static_cast<uint32_t>(wait_time_ms));
- crit_sect_->Enter();
- if (ret == kEventSignaled) {
- // Are we shutting down the jitter buffer?
- if (!running_) {
- crit_sect_->Leave();
- return false;
- }
- // Finding oldest frame ready for decoder.
- CleanUpOldOrEmptyFrames();
- if (decodable_frames_.empty() ||
- decodable_frames_.Front()->GetState() != kStateComplete) {
- wait_time_ms = end_wait_time_ms - clock_->TimeInMilliseconds();
- } else {
- break;
- }
- } else {
- break;
- }
- }
- }
- if (decodable_frames_.empty() ||
- decodable_frames_.Front()->GetState() != kStateComplete) {
- crit_sect_->Leave();
- return false;
- }
- *timestamp = decodable_frames_.Front()->TimeStamp();
- crit_sect_->Leave();
- return true;
-}
-
-bool VCMJitterBuffer::NextMaybeIncompleteTimestamp(uint32_t* timestamp) {
- CriticalSectionScoped cs(crit_sect_);
- if (!running_) {
- return false;
- }
- if (decode_error_mode_ == kNoErrors) {
- // No point to continue, as we are not decoding with errors.
- return false;
- }
-
- CleanUpOldOrEmptyFrames();
-
- if (decodable_frames_.empty()) {
- return false;
- }
- VCMFrameBuffer* oldest_frame = decodable_frames_.Front();
- // If we have exactly one frame in the buffer, release it only if it is
- // complete. We know decodable_frames_ is not empty due to the previous
- // check.
- if (decodable_frames_.size() == 1 && incomplete_frames_.empty()
- && oldest_frame->GetState() != kStateComplete) {
- return false;
- }
-
- *timestamp = oldest_frame->TimeStamp();
- return true;
-}
-
-VCMEncodedFrame* VCMJitterBuffer::ExtractAndSetDecode(uint32_t timestamp) {
- CriticalSectionScoped cs(crit_sect_);
- if (!running_) {
- return NULL;
- }
- // Extract the frame with the desired timestamp.
- VCMFrameBuffer* frame = decodable_frames_.PopFrame(timestamp);
- bool continuous = true;
- if (!frame) {
- frame = incomplete_frames_.PopFrame(timestamp);
- if (frame)
- continuous = last_decoded_state_.ContinuousFrame(frame);
- else
- return NULL;
- }
- TRACE_EVENT_ASYNC_STEP0("webrtc", "Video", timestamp, "Extract");
- // Frame pulled out from jitter buffer, update the jitter estimate.
- const bool retransmitted = (frame->GetNackCount() > 0);
- if (retransmitted) {
- jitter_estimate_.FrameNacked();
- } else if (frame->Length() > 0) {
- // Ignore retransmitted and empty frames.
- if (waiting_for_completion_.latest_packet_time >= 0) {
- UpdateJitterEstimate(waiting_for_completion_, true);
- }
- if (frame->GetState() == kStateComplete) {
- UpdateJitterEstimate(*frame, false);
- } else {
- // Wait for this one to get complete.
- waiting_for_completion_.frame_size = frame->Length();
- waiting_for_completion_.latest_packet_time =
- frame->LatestPacketTimeMs();
- waiting_for_completion_.timestamp = frame->TimeStamp();
- }
- }
-
- // The state must be changed to decoding before cleaning up zero sized
- // frames to avoid empty frames being cleaned up and then given to the
- // decoder. Propagates the missing_frame bit.
- frame->PrepareForDecode(continuous);
-
- // We have a frame - update the last decoded state and nack list.
- last_decoded_state_.SetState(frame);
- DropPacketsFromNackList(last_decoded_state_.sequence_num());
-
- if ((*frame).IsSessionComplete())
- UpdateAveragePacketsPerFrame(frame->NumPackets());
-
- return frame;
-}
-
-// Release frame when done with decoding. Should never be used to release
-// frames from within the jitter buffer.
-void VCMJitterBuffer::ReleaseFrame(VCMEncodedFrame* frame) {
- CriticalSectionScoped cs(crit_sect_);
- VCMFrameBuffer* frame_buffer = static_cast<VCMFrameBuffer*>(frame);
- if (frame_buffer) {
- free_frames_.push_back(frame_buffer);
- }
-}
-
-// Gets frame to use for this timestamp. If no match, get empty frame.
-VCMFrameBufferEnum VCMJitterBuffer::GetFrame(const VCMPacket& packet,
- VCMFrameBuffer** frame,
- FrameList** frame_list) {
- *frame = incomplete_frames_.PopFrame(packet.timestamp);
- if (*frame != NULL) {
- *frame_list = &incomplete_frames_;
- return kNoError;
- }
- *frame = decodable_frames_.PopFrame(packet.timestamp);
- if (*frame != NULL) {
- *frame_list = &decodable_frames_;
- return kNoError;
- }
-
- *frame_list = NULL;
- // No match, return empty frame.
- *frame = GetEmptyFrame();
- if (*frame == NULL) {
- // No free frame! Try to reclaim some...
- LOG(LS_WARNING) << "Unable to get empty frame; Recycling.";
- bool found_key_frame = RecycleFramesUntilKeyFrame();
- *frame = GetEmptyFrame();
- assert(*frame);
- if (!found_key_frame) {
- free_frames_.push_back(*frame);
- return kFlushIndicator;
- }
- }
- (*frame)->Reset();
- return kNoError;
-}
-
-int64_t VCMJitterBuffer::LastPacketTime(const VCMEncodedFrame* frame,
- bool* retransmitted) const {
- assert(retransmitted);
- CriticalSectionScoped cs(crit_sect_);
- const VCMFrameBuffer* frame_buffer =
- static_cast<const VCMFrameBuffer*>(frame);
- *retransmitted = (frame_buffer->GetNackCount() > 0);
- return frame_buffer->LatestPacketTimeMs();
-}
-
-VCMFrameBufferEnum VCMJitterBuffer::InsertPacket(const VCMPacket& packet,
- bool* retransmitted) {
- CriticalSectionScoped cs(crit_sect_);
-
- ++num_packets_;
- if (num_packets_ == 1) {
- time_first_packet_ms_ = clock_->TimeInMilliseconds();
- }
- // Does this packet belong to an old frame?
- if (last_decoded_state_.IsOldPacket(&packet)) {
- // Account only for media packets.
- if (packet.sizeBytes > 0) {
- num_discarded_packets_++;
- num_consecutive_old_packets_++;
- if (stats_callback_ != NULL)
- stats_callback_->OnDiscardedPacketsUpdated(num_discarded_packets_);
- }
- // Update last decoded sequence number if the packet arrived late and
- // belongs to a frame with a timestamp equal to the last decoded
- // timestamp.
- last_decoded_state_.UpdateOldPacket(&packet);
- DropPacketsFromNackList(last_decoded_state_.sequence_num());
-
- // Also see if this old packet made more incomplete frames continuous.
- FindAndInsertContinuousFramesWithState(last_decoded_state_);
-
- if (num_consecutive_old_packets_ > kMaxConsecutiveOldPackets) {
- LOG(LS_WARNING)
- << num_consecutive_old_packets_
- << " consecutive old packets received. Flushing the jitter buffer.";
- Flush();
- return kFlushIndicator;
- }
- return kOldPacket;
- }
-
- num_consecutive_old_packets_ = 0;
-
- if (packet.codec == kVideoCodecVP9) {
- if (packet.codecSpecificHeader.codecHeader.VP9.flexible_mode) {
- // TODO(asapersson): Add support for flexible mode.
- return kGeneralError;
- }
- if (!packet.codecSpecificHeader.codecHeader.VP9.flexible_mode) {
- if (vp9_ss_map_.Insert(packet))
- vp9_ss_map_.UpdateFrames(&incomplete_frames_);
-
- vp9_ss_map_.UpdatePacket(const_cast<VCMPacket*>(&packet));
- }
- if (!last_decoded_state_.in_initial_state())
- vp9_ss_map_.RemoveOld(last_decoded_state_.time_stamp());
- }
-
- VCMFrameBuffer* frame;
- FrameList* frame_list;
- const VCMFrameBufferEnum error = GetFrame(packet, &frame, &frame_list);
- if (error != kNoError)
- return error;
-
- int64_t now_ms = clock_->TimeInMilliseconds();
- // We are keeping track of the first and latest seq numbers, and
- // the number of wraps to be able to calculate how many packets we expect.
- if (first_packet_since_reset_) {
- // Now it's time to start estimating jitter
- // reset the delay estimate.
- inter_frame_delay_.Reset(now_ms);
- }
-
- // Empty packets may bias the jitter estimate (lacking size component),
- // therefore don't let empty packet trigger the following updates:
- if (packet.frameType != kEmptyFrame) {
- if (waiting_for_completion_.timestamp == packet.timestamp) {
- // This can get bad if we have a lot of duplicate packets,
- // we will then count some packet multiple times.
- waiting_for_completion_.frame_size += packet.sizeBytes;
- waiting_for_completion_.latest_packet_time = now_ms;
- } else if (waiting_for_completion_.latest_packet_time >= 0 &&
- waiting_for_completion_.latest_packet_time + 2000 <= now_ms) {
- // A packet should never be more than two seconds late
- UpdateJitterEstimate(waiting_for_completion_, true);
- waiting_for_completion_.latest_packet_time = -1;
- waiting_for_completion_.frame_size = 0;
- waiting_for_completion_.timestamp = 0;
- }
- }
-
- VCMFrameBufferStateEnum previous_state = frame->GetState();
- // Insert packet.
- FrameData frame_data;
- frame_data.rtt_ms = rtt_ms_;
- frame_data.rolling_average_packets_per_frame = average_packets_per_frame_;
- VCMFrameBufferEnum buffer_state =
- frame->InsertPacket(packet, now_ms, decode_error_mode_, frame_data);
-
- if (previous_state != kStateComplete) {
- TRACE_EVENT_ASYNC_BEGIN1("webrtc", "Video", frame->TimeStamp(),
- "timestamp", frame->TimeStamp());
- }
-
- if (buffer_state > 0) {
- incoming_bit_count_ += packet.sizeBytes << 3;
- if (first_packet_since_reset_) {
- latest_received_sequence_number_ = packet.seqNum;
- first_packet_since_reset_ = false;
- } else {
- if (IsPacketRetransmitted(packet)) {
- frame->IncrementNackCount();
- }
- if (!UpdateNackList(packet.seqNum) &&
- packet.frameType != kVideoFrameKey) {
- buffer_state = kFlushIndicator;
- }
-
- latest_received_sequence_number_ = LatestSequenceNumber(
- latest_received_sequence_number_, packet.seqNum);
- }
- }
-
- // Is the frame already in the decodable list?
- bool continuous = IsContinuous(*frame);
- switch (buffer_state) {
- case kGeneralError:
- case kTimeStampError:
- case kSizeError: {
- free_frames_.push_back(frame);
- break;
- }
- case kCompleteSession: {
- if (previous_state != kStateDecodable &&
- previous_state != kStateComplete) {
- CountFrame(*frame);
- if (continuous) {
- // Signal that we have a complete session.
- frame_event_->Set();
- }
- }
- FALLTHROUGH();
- }
- // Note: There is no break here - continuing to kDecodableSession.
- case kDecodableSession: {
- *retransmitted = (frame->GetNackCount() > 0);
- if (continuous) {
- decodable_frames_.InsertFrame(frame);
- FindAndInsertContinuousFrames(*frame);
- } else {
- incomplete_frames_.InsertFrame(frame);
- }
- break;
- }
- case kIncomplete: {
- if (frame->GetState() == kStateEmpty &&
- last_decoded_state_.UpdateEmptyFrame(frame)) {
- free_frames_.push_back(frame);
- return kNoError;
- } else {
- incomplete_frames_.InsertFrame(frame);
- }
- break;
- }
- case kNoError:
- case kOutOfBoundsPacket:
- case kDuplicatePacket: {
- // Put back the frame where it came from.
- if (frame_list != NULL) {
- frame_list->InsertFrame(frame);
- } else {
- free_frames_.push_back(frame);
- }
- ++num_duplicated_packets_;
- break;
- }
- case kFlushIndicator:
- free_frames_.push_back(frame);
- return kFlushIndicator;
- default: assert(false);
- }
- return buffer_state;
-}
-
-bool VCMJitterBuffer::IsContinuousInState(const VCMFrameBuffer& frame,
- const VCMDecodingState& decoding_state) const {
- if (decode_error_mode_ == kWithErrors)
- return true;
- // Is this frame (complete or decodable) and continuous?
- // kStateDecodable will never be set when decode_error_mode_ is false
- // as SessionInfo determines this state based on the error mode (and frame
- // completeness).
- return (frame.GetState() == kStateComplete ||
- frame.GetState() == kStateDecodable) &&
- decoding_state.ContinuousFrame(&frame);
-}
-
-bool VCMJitterBuffer::IsContinuous(const VCMFrameBuffer& frame) const {
- if (IsContinuousInState(frame, last_decoded_state_)) {
- return true;
- }
- VCMDecodingState decoding_state;
- decoding_state.CopyFrom(last_decoded_state_);
- for (FrameList::const_iterator it = decodable_frames_.begin();
- it != decodable_frames_.end(); ++it) {
- VCMFrameBuffer* decodable_frame = it->second;
- if (IsNewerTimestamp(decodable_frame->TimeStamp(), frame.TimeStamp())) {
- break;
- }
- decoding_state.SetState(decodable_frame);
- if (IsContinuousInState(frame, decoding_state)) {
- return true;
- }
- }
- return false;
-}
-
-void VCMJitterBuffer::FindAndInsertContinuousFrames(
- const VCMFrameBuffer& new_frame) {
- VCMDecodingState decoding_state;
- decoding_state.CopyFrom(last_decoded_state_);
- decoding_state.SetState(&new_frame);
- FindAndInsertContinuousFramesWithState(decoding_state);
-}
-
-void VCMJitterBuffer::FindAndInsertContinuousFramesWithState(
- const VCMDecodingState& original_decoded_state) {
- // Copy original_decoded_state so we can move the state forward with each
- // decodable frame we find.
- VCMDecodingState decoding_state;
- decoding_state.CopyFrom(original_decoded_state);
-
- // When temporal layers are available, we search for a complete or decodable
- // frame until we hit one of the following:
- // 1. Continuous base or sync layer.
- // 2. The end of the list was reached.
- for (FrameList::iterator it = incomplete_frames_.begin();
- it != incomplete_frames_.end();) {
- VCMFrameBuffer* frame = it->second;
- if (IsNewerTimestamp(original_decoded_state.time_stamp(),
- frame->TimeStamp())) {
- ++it;
- continue;
- }
- if (IsContinuousInState(*frame, decoding_state)) {
- decodable_frames_.InsertFrame(frame);
- incomplete_frames_.erase(it++);
- decoding_state.SetState(frame);
- } else if (frame->TemporalId() <= 0) {
- break;
- } else {
- ++it;
- }
- }
-}
-
-uint32_t VCMJitterBuffer::EstimatedJitterMs() {
- CriticalSectionScoped cs(crit_sect_);
- // Compute RTT multiplier for estimation.
- // low_rtt_nackThresholdMs_ == -1 means no FEC.
- double rtt_mult = 1.0f;
- if (low_rtt_nack_threshold_ms_ >= 0 &&
- rtt_ms_ >= low_rtt_nack_threshold_ms_) {
- // For RTTs above low_rtt_nack_threshold_ms_ we don't apply extra delay
- // when waiting for retransmissions.
- rtt_mult = 0.0f;
- }
- return jitter_estimate_.GetJitterEstimate(rtt_mult);
-}
-
-void VCMJitterBuffer::UpdateRtt(int64_t rtt_ms) {
- CriticalSectionScoped cs(crit_sect_);
- rtt_ms_ = rtt_ms;
- jitter_estimate_.UpdateRtt(rtt_ms);
-}
-
-void VCMJitterBuffer::SetNackMode(VCMNackMode mode,
- int64_t low_rtt_nack_threshold_ms,
- int64_t high_rtt_nack_threshold_ms) {
- CriticalSectionScoped cs(crit_sect_);
- nack_mode_ = mode;
- if (mode == kNoNack) {
- missing_sequence_numbers_.clear();
- }
- assert(low_rtt_nack_threshold_ms >= -1 && high_rtt_nack_threshold_ms >= -1);
- assert(high_rtt_nack_threshold_ms == -1 ||
- low_rtt_nack_threshold_ms <= high_rtt_nack_threshold_ms);
- assert(low_rtt_nack_threshold_ms > -1 || high_rtt_nack_threshold_ms == -1);
- low_rtt_nack_threshold_ms_ = low_rtt_nack_threshold_ms;
- high_rtt_nack_threshold_ms_ = high_rtt_nack_threshold_ms;
- // Don't set a high start rtt if high_rtt_nack_threshold_ms_ is used, to not
- // disable NACK in |kNack| mode.
- if (rtt_ms_ == kDefaultRtt && high_rtt_nack_threshold_ms_ != -1) {
- rtt_ms_ = 0;
- }
- if (!WaitForRetransmissions()) {
- jitter_estimate_.ResetNackCount();
- }
-}
-
-void VCMJitterBuffer::SetNackSettings(size_t max_nack_list_size,
- int max_packet_age_to_nack,
- int max_incomplete_time_ms) {
- CriticalSectionScoped cs(crit_sect_);
- assert(max_packet_age_to_nack >= 0);
- assert(max_incomplete_time_ms_ >= 0);
- max_nack_list_size_ = max_nack_list_size;
- max_packet_age_to_nack_ = max_packet_age_to_nack;
- max_incomplete_time_ms_ = max_incomplete_time_ms;
-}
-
-VCMNackMode VCMJitterBuffer::nack_mode() const {
- CriticalSectionScoped cs(crit_sect_);
- return nack_mode_;
-}
-
-int VCMJitterBuffer::NonContinuousOrIncompleteDuration() {
- if (incomplete_frames_.empty()) {
- return 0;
- }
- uint32_t start_timestamp = incomplete_frames_.Front()->TimeStamp();
- if (!decodable_frames_.empty()) {
- start_timestamp = decodable_frames_.Back()->TimeStamp();
- }
- return incomplete_frames_.Back()->TimeStamp() - start_timestamp;
-}
-
-uint16_t VCMJitterBuffer::EstimatedLowSequenceNumber(
- const VCMFrameBuffer& frame) const {
- assert(frame.GetLowSeqNum() >= 0);
- if (frame.HaveFirstPacket())
- return frame.GetLowSeqNum();
-
- // This estimate is not accurate if more than one packet with lower sequence
- // number is lost.
- return frame.GetLowSeqNum() - 1;
-}
-
-std::vector<uint16_t> VCMJitterBuffer::GetNackList(bool* request_key_frame) {
- CriticalSectionScoped cs(crit_sect_);
- *request_key_frame = false;
- if (nack_mode_ == kNoNack) {
- return std::vector<uint16_t>();
- }
- if (last_decoded_state_.in_initial_state()) {
- VCMFrameBuffer* next_frame = NextFrame();
- const bool first_frame_is_key = next_frame &&
- next_frame->FrameType() == kVideoFrameKey &&
- next_frame->HaveFirstPacket();
- if (!first_frame_is_key) {
- bool have_non_empty_frame = decodable_frames_.end() != find_if(
- decodable_frames_.begin(), decodable_frames_.end(),
- HasNonEmptyState);
- if (!have_non_empty_frame) {
- have_non_empty_frame = incomplete_frames_.end() != find_if(
- incomplete_frames_.begin(), incomplete_frames_.end(),
- HasNonEmptyState);
- }
- bool found_key_frame = RecycleFramesUntilKeyFrame();
- if (!found_key_frame) {
- *request_key_frame = have_non_empty_frame;
- return std::vector<uint16_t>();
- }
- }
- }
- if (TooLargeNackList()) {
- *request_key_frame = !HandleTooLargeNackList();
- }
- if (max_incomplete_time_ms_ > 0) {
- int non_continuous_incomplete_duration =
- NonContinuousOrIncompleteDuration();
- if (non_continuous_incomplete_duration > 90 * max_incomplete_time_ms_) {
- LOG_F(LS_WARNING) << "Too long non-decodable duration: "
- << non_continuous_incomplete_duration << " > "
- << 90 * max_incomplete_time_ms_;
- FrameList::reverse_iterator rit = find_if(incomplete_frames_.rbegin(),
- incomplete_frames_.rend(), IsKeyFrame);
- if (rit == incomplete_frames_.rend()) {
- // Request a key frame if we don't have one already.
- *request_key_frame = true;
- return std::vector<uint16_t>();
- } else {
- // Skip to the last key frame. If it's incomplete we will start
- // NACKing it.
- // Note that the estimated low sequence number is correct for VP8
- // streams because only the first packet of a key frame is marked.
- last_decoded_state_.Reset();
- DropPacketsFromNackList(EstimatedLowSequenceNumber(*rit->second));
- }
- }
- }
- std::vector<uint16_t> nack_list(missing_sequence_numbers_.begin(),
- missing_sequence_numbers_.end());
- return nack_list;
-}
-
-void VCMJitterBuffer::SetDecodeErrorMode(VCMDecodeErrorMode error_mode) {
- CriticalSectionScoped cs(crit_sect_);
- decode_error_mode_ = error_mode;
-}
-
-VCMFrameBuffer* VCMJitterBuffer::NextFrame() const {
- if (!decodable_frames_.empty())
- return decodable_frames_.Front();
- if (!incomplete_frames_.empty())
- return incomplete_frames_.Front();
- return NULL;
-}
-
-bool VCMJitterBuffer::UpdateNackList(uint16_t sequence_number) {
- if (nack_mode_ == kNoNack) {
- return true;
- }
- // Make sure we don't add packets which are already too old to be decoded.
- if (!last_decoded_state_.in_initial_state()) {
- latest_received_sequence_number_ = LatestSequenceNumber(
- latest_received_sequence_number_,
- last_decoded_state_.sequence_num());
- }
- if (IsNewerSequenceNumber(sequence_number,
- latest_received_sequence_number_)) {
- // Push any missing sequence numbers to the NACK list.
- for (uint16_t i = latest_received_sequence_number_ + 1;
- IsNewerSequenceNumber(sequence_number, i); ++i) {
- missing_sequence_numbers_.insert(missing_sequence_numbers_.end(), i);
- TRACE_EVENT_INSTANT1(TRACE_DISABLED_BY_DEFAULT("webrtc_rtp"), "AddNack",
- "seqnum", i);
- }
- if (TooLargeNackList() && !HandleTooLargeNackList()) {
- LOG(LS_WARNING) << "Requesting key frame due to too large NACK list.";
- return false;
- }
- if (MissingTooOldPacket(sequence_number) &&
- !HandleTooOldPackets(sequence_number)) {
- LOG(LS_WARNING) << "Requesting key frame due to missing too old packets";
- return false;
- }
- } else {
- missing_sequence_numbers_.erase(sequence_number);
- TRACE_EVENT_INSTANT1(TRACE_DISABLED_BY_DEFAULT("webrtc_rtp"), "RemoveNack",
- "seqnum", sequence_number);
- }
- return true;
-}
-
-bool VCMJitterBuffer::TooLargeNackList() const {
- return missing_sequence_numbers_.size() > max_nack_list_size_;
-}
-
-bool VCMJitterBuffer::HandleTooLargeNackList() {
- // Recycle frames until the NACK list is small enough. It is likely cheaper to
- // request a key frame than to retransmit this many missing packets.
- LOG_F(LS_WARNING) << "NACK list has grown too large: "
- << missing_sequence_numbers_.size() << " > "
- << max_nack_list_size_;
- bool key_frame_found = false;
- while (TooLargeNackList()) {
- key_frame_found = RecycleFramesUntilKeyFrame();
- }
- return key_frame_found;
-}
-
-bool VCMJitterBuffer::MissingTooOldPacket(
- uint16_t latest_sequence_number) const {
- if (missing_sequence_numbers_.empty()) {
- return false;
- }
- const uint16_t age_of_oldest_missing_packet = latest_sequence_number -
- *missing_sequence_numbers_.begin();
- // Recycle frames if the NACK list contains too old sequence numbers as
- // the packets may have already been dropped by the sender.
- return age_of_oldest_missing_packet > max_packet_age_to_nack_;
-}
-
-bool VCMJitterBuffer::HandleTooOldPackets(uint16_t latest_sequence_number) {
- bool key_frame_found = false;
- const uint16_t age_of_oldest_missing_packet = latest_sequence_number -
- *missing_sequence_numbers_.begin();
- LOG_F(LS_WARNING) << "NACK list contains too old sequence numbers: "
- << age_of_oldest_missing_packet << " > "
- << max_packet_age_to_nack_;
- while (MissingTooOldPacket(latest_sequence_number)) {
- key_frame_found = RecycleFramesUntilKeyFrame();
- }
- return key_frame_found;
-}
-
-void VCMJitterBuffer::DropPacketsFromNackList(
- uint16_t last_decoded_sequence_number) {
- // Erase all sequence numbers from the NACK list which we won't need any
- // longer.
- missing_sequence_numbers_.erase(missing_sequence_numbers_.begin(),
- missing_sequence_numbers_.upper_bound(
- last_decoded_sequence_number));
-}
-
-int64_t VCMJitterBuffer::LastDecodedTimestamp() const {
- CriticalSectionScoped cs(crit_sect_);
- return last_decoded_state_.time_stamp();
-}
-
-void VCMJitterBuffer::RenderBufferSize(uint32_t* timestamp_start,
- uint32_t* timestamp_end) {
- CriticalSectionScoped cs(crit_sect_);
- CleanUpOldOrEmptyFrames();
- *timestamp_start = 0;
- *timestamp_end = 0;
- if (decodable_frames_.empty()) {
- return;
- }
- *timestamp_start = decodable_frames_.Front()->TimeStamp();
- *timestamp_end = decodable_frames_.Back()->TimeStamp();
-}
-
-void VCMJitterBuffer::RegisterStatsCallback(
- VCMReceiveStatisticsCallback* callback) {
- CriticalSectionScoped cs(crit_sect_);
- stats_callback_ = callback;
-}
-
-VCMFrameBuffer* VCMJitterBuffer::GetEmptyFrame() {
- if (free_frames_.empty()) {
- if (!TryToIncreaseJitterBufferSize()) {
- return NULL;
- }
- }
- VCMFrameBuffer* frame = free_frames_.front();
- free_frames_.pop_front();
- return frame;
-}
-
-bool VCMJitterBuffer::TryToIncreaseJitterBufferSize() {
- if (max_number_of_frames_ >= kMaxNumberOfFrames)
- return false;
- free_frames_.push_back(new VCMFrameBuffer());
- ++max_number_of_frames_;
- TRACE_COUNTER1("webrtc", "JBMaxFrames", max_number_of_frames_);
- return true;
-}
-
-// Recycle oldest frames up to a key frame, used if jitter buffer is completely
-// full.
-bool VCMJitterBuffer::RecycleFramesUntilKeyFrame() {
- // First release incomplete frames, and only release decodable frames if there
- // are no incomplete ones.
- FrameList::iterator key_frame_it;
- bool key_frame_found = false;
- int dropped_frames = 0;
- dropped_frames += incomplete_frames_.RecycleFramesUntilKeyFrame(
- &key_frame_it, &free_frames_);
- key_frame_found = key_frame_it != incomplete_frames_.end();
- if (dropped_frames == 0) {
- dropped_frames += decodable_frames_.RecycleFramesUntilKeyFrame(
- &key_frame_it, &free_frames_);
- key_frame_found = key_frame_it != decodable_frames_.end();
- }
- TRACE_EVENT_INSTANT0("webrtc", "JB::RecycleFramesUntilKeyFrame");
- if (key_frame_found) {
- LOG(LS_INFO) << "Found key frame while dropping frames.";
- // Reset last decoded state to make sure the next frame decoded is a key
- // frame, and start NACKing from here.
- last_decoded_state_.Reset();
- DropPacketsFromNackList(EstimatedLowSequenceNumber(*key_frame_it->second));
- } else if (decodable_frames_.empty()) {
- // All frames dropped. Reset the decoding state and clear missing sequence
- // numbers as we're starting fresh.
- last_decoded_state_.Reset();
- missing_sequence_numbers_.clear();
- }
- return key_frame_found;
-}
-
-// Must be called under the critical section |crit_sect_|.
-void VCMJitterBuffer::CountFrame(const VCMFrameBuffer& frame) {
- incoming_frame_count_++;
-
- if (frame.FrameType() == kVideoFrameKey) {
- TRACE_EVENT_ASYNC_STEP0("webrtc", "Video",
- frame.TimeStamp(), "KeyComplete");
- } else {
- TRACE_EVENT_ASYNC_STEP0("webrtc", "Video",
- frame.TimeStamp(), "DeltaComplete");
- }
-
- // Update receive statistics. We count all layers, thus when you use layers
- // adding all key and delta frames might differ from frame count.
- if (frame.IsSessionComplete()) {
- if (frame.FrameType() == kVideoFrameKey) {
- ++receive_statistics_.key_frames;
- } else {
- ++receive_statistics_.delta_frames;
- }
- if (stats_callback_ != NULL)
- stats_callback_->OnFrameCountsUpdated(receive_statistics_);
- }
-}
-
-void VCMJitterBuffer::UpdateAveragePacketsPerFrame(int current_number_packets) {
- if (frame_counter_ > kFastConvergeThreshold) {
- average_packets_per_frame_ = average_packets_per_frame_
- * (1 - kNormalConvergeMultiplier)
- + current_number_packets * kNormalConvergeMultiplier;
- } else if (frame_counter_ > 0) {
- average_packets_per_frame_ = average_packets_per_frame_
- * (1 - kFastConvergeMultiplier)
- + current_number_packets * kFastConvergeMultiplier;
- frame_counter_++;
- } else {
- average_packets_per_frame_ = current_number_packets;
- frame_counter_++;
- }
-}
-
-// Must be called under the critical section |crit_sect_|.
-void VCMJitterBuffer::CleanUpOldOrEmptyFrames() {
- decodable_frames_.CleanUpOldOrEmptyFrames(&last_decoded_state_,
- &free_frames_);
- incomplete_frames_.CleanUpOldOrEmptyFrames(&last_decoded_state_,
- &free_frames_);
- if (!last_decoded_state_.in_initial_state()) {
- DropPacketsFromNackList(last_decoded_state_.sequence_num());
- }
-}
-
-// Must be called from within |crit_sect_|.
-bool VCMJitterBuffer::IsPacketRetransmitted(const VCMPacket& packet) const {
- return missing_sequence_numbers_.find(packet.seqNum) !=
- missing_sequence_numbers_.end();
-}
-
-// Must be called under the critical section |crit_sect_|. Should never be
-// called with retransmitted frames, they must be filtered out before this
-// function is called.
-void VCMJitterBuffer::UpdateJitterEstimate(const VCMJitterSample& sample,
- bool incomplete_frame) {
- if (sample.latest_packet_time == -1) {
- return;
- }
- UpdateJitterEstimate(sample.latest_packet_time, sample.timestamp,
- sample.frame_size, incomplete_frame);
-}
-
-// Must be called under the critical section crit_sect_. Should never be
-// called with retransmitted frames, they must be filtered out before this
-// function is called.
-void VCMJitterBuffer::UpdateJitterEstimate(const VCMFrameBuffer& frame,
- bool incomplete_frame) {
- if (frame.LatestPacketTimeMs() == -1) {
- return;
- }
- // No retransmitted frames should be a part of the jitter
- // estimate.
- UpdateJitterEstimate(frame.LatestPacketTimeMs(), frame.TimeStamp(),
- frame.Length(), incomplete_frame);
-}
-
-// Must be called under the critical section |crit_sect_|. Should never be
-// called with retransmitted frames, they must be filtered out before this
-// function is called.
-void VCMJitterBuffer::UpdateJitterEstimate(
- int64_t latest_packet_time_ms,
- uint32_t timestamp,
- unsigned int frame_size,
- bool incomplete_frame) {
- if (latest_packet_time_ms == -1) {
- return;
- }
- int64_t frame_delay;
- bool not_reordered = inter_frame_delay_.CalculateDelay(timestamp,
- &frame_delay,
- latest_packet_time_ms);
- // Filter out frames which have been reordered in time by the network
- if (not_reordered) {
- // Update the jitter estimate with the new samples
- jitter_estimate_.UpdateEstimate(frame_delay, frame_size, incomplete_frame);
- }
-}
-
-bool VCMJitterBuffer::WaitForRetransmissions() {
- if (nack_mode_ == kNoNack) {
- // NACK disabled -> don't wait for retransmissions.
- return false;
- }
- // Evaluate if the RTT is higher than |high_rtt_nack_threshold_ms_|, and in
- // that case we don't wait for retransmissions.
- if (high_rtt_nack_threshold_ms_ >= 0 &&
- rtt_ms_ >= high_rtt_nack_threshold_ms_) {
- return false;
- }
- return true;
-}
-} // namespace webrtc
diff --git a/webrtc/modules/video_coding/main/source/jitter_buffer.h b/webrtc/modules/video_coding/main/source/jitter_buffer.h
deleted file mode 100644
index f4a3638f7d..0000000000
--- a/webrtc/modules/video_coding/main/source/jitter_buffer.h
+++ /dev/null
@@ -1,396 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_MODULES_VIDEO_CODING_MAIN_SOURCE_JITTER_BUFFER_H_
-#define WEBRTC_MODULES_VIDEO_CODING_MAIN_SOURCE_JITTER_BUFFER_H_
-
-#include <list>
-#include <map>
-#include <set>
-#include <vector>
-
-#include "webrtc/base/constructormagic.h"
-#include "webrtc/base/thread_annotations.h"
-#include "webrtc/modules/interface/module_common_types.h"
-#include "webrtc/modules/video_coding/main/interface/video_coding.h"
-#include "webrtc/modules/video_coding/main/interface/video_coding_defines.h"
-#include "webrtc/modules/video_coding/main/source/decoding_state.h"
-#include "webrtc/modules/video_coding/main/source/inter_frame_delay.h"
-#include "webrtc/modules/video_coding/main/source/jitter_buffer_common.h"
-#include "webrtc/modules/video_coding/main/source/jitter_estimator.h"
-#include "webrtc/system_wrappers/include/critical_section_wrapper.h"
-#include "webrtc/typedefs.h"
-
-namespace webrtc {
-
-enum VCMNackMode {
- kNack,
- kNoNack
-};
-
-// forward declarations
-class Clock;
-class EventFactory;
-class EventWrapper;
-class VCMFrameBuffer;
-class VCMPacket;
-class VCMEncodedFrame;
-
-typedef std::list<VCMFrameBuffer*> UnorderedFrameList;
-
-struct VCMJitterSample {
- VCMJitterSample() : timestamp(0), frame_size(0), latest_packet_time(-1) {}
- uint32_t timestamp;
- uint32_t frame_size;
- int64_t latest_packet_time;
-};
-
-class TimestampLessThan {
- public:
- bool operator() (uint32_t timestamp1,
- uint32_t timestamp2) const {
- return IsNewerTimestamp(timestamp2, timestamp1);
- }
-};
-
-class FrameList
- : public std::map<uint32_t, VCMFrameBuffer*, TimestampLessThan> {
- public:
- void InsertFrame(VCMFrameBuffer* frame);
- VCMFrameBuffer* PopFrame(uint32_t timestamp);
- VCMFrameBuffer* Front() const;
- VCMFrameBuffer* Back() const;
- int RecycleFramesUntilKeyFrame(FrameList::iterator* key_frame_it,
- UnorderedFrameList* free_frames);
- void CleanUpOldOrEmptyFrames(VCMDecodingState* decoding_state,
- UnorderedFrameList* free_frames);
- void Reset(UnorderedFrameList* free_frames);
-};
-
-class Vp9SsMap {
- public:
- typedef std::map<uint32_t, GofInfoVP9, TimestampLessThan> SsMap;
- bool Insert(const VCMPacket& packet);
- void Reset();
-
- // Removes SS data that are older than |timestamp|.
- // The |timestamp| should be an old timestamp, i.e. packets with older
- // timestamps should no longer be inserted.
- void RemoveOld(uint32_t timestamp);
-
- bool UpdatePacket(VCMPacket* packet);
- void UpdateFrames(FrameList* frames);
-
- // Public for testing.
- // Returns an iterator to the corresponding SS data for the input |timestamp|.
- bool Find(uint32_t timestamp, SsMap::iterator* it);
-
- private:
- // These two functions are called by RemoveOld.
- // Checks if it is time to do a clean up (done each kSsCleanupIntervalSec).
- bool TimeForCleanup(uint32_t timestamp) const;
-
- // Advances the oldest SS data to handle timestamp wrap in cases where SS data
- // are received very seldom (e.g. only once in beginning, second when
- // IsNewerTimestamp is not true).
- void AdvanceFront(uint32_t timestamp);
-
- SsMap ss_map_;
-};
-
-class VCMJitterBuffer {
- public:
- VCMJitterBuffer(Clock* clock, rtc::scoped_ptr<EventWrapper> event);
-
- ~VCMJitterBuffer();
-
- // Initializes and starts jitter buffer.
- void Start();
-
- // Signals all internal events and stops the jitter buffer.
- void Stop();
-
- // Returns true if the jitter buffer is running.
- bool Running() const;
-
- // Empty the jitter buffer of all its data.
- void Flush();
-
- // Get the number of received frames, by type, since the jitter buffer
- // was started.
- FrameCounts FrameStatistics() const;
-
- // The number of packets discarded by the jitter buffer because the decoder
- // won't be able to decode them.
- int num_not_decodable_packets() const;
-
- // Gets number of packets received.
- int num_packets() const;
-
- // Gets number of duplicated packets received.
- int num_duplicated_packets() const;
-
- // Gets number of packets discarded by the jitter buffer.
- int num_discarded_packets() const;
-
- // Statistics, Calculate frame and bit rates.
- void IncomingRateStatistics(unsigned int* framerate,
- unsigned int* bitrate);
-
- // Checks if the packet sequence will be complete if the next frame would be
- // grabbed for decoding. That is, if a frame has been lost between the
- // last decoded frame and the next, or if the next frame is missing one
- // or more packets.
- bool CompleteSequenceWithNextFrame();
-
- // Wait |max_wait_time_ms| for a complete frame to arrive.
- // The function returns true once such a frame is found, its corresponding
- // timestamp is returned. Otherwise, returns false.
- bool NextCompleteTimestamp(uint32_t max_wait_time_ms, uint32_t* timestamp);
-
- // Locates a frame for decoding (even an incomplete) without delay.
- // The function returns true once such a frame is found, its corresponding
- // timestamp is returned. Otherwise, returns false.
- bool NextMaybeIncompleteTimestamp(uint32_t* timestamp);
-
- // Extract frame corresponding to input timestamp.
- // Frame will be set to a decoding state.
- VCMEncodedFrame* ExtractAndSetDecode(uint32_t timestamp);
-
- // Releases a frame returned from the jitter buffer, should be called when
- // done with decoding.
- void ReleaseFrame(VCMEncodedFrame* frame);
-
- // Returns the time in ms when the latest packet was inserted into the frame.
- // Retransmitted is set to true if any of the packets belonging to the frame
- // has been retransmitted.
- int64_t LastPacketTime(const VCMEncodedFrame* frame,
- bool* retransmitted) const;
-
- // Inserts a packet into a frame returned from GetFrame().
- // If the return value is <= 0, |frame| is invalidated and the pointer must
- // be dropped after this function returns.
- VCMFrameBufferEnum InsertPacket(const VCMPacket& packet,
- bool* retransmitted);
-
- // Returns the estimated jitter in milliseconds.
- uint32_t EstimatedJitterMs();
-
- // Updates the round-trip time estimate.
- void UpdateRtt(int64_t rtt_ms);
-
- // Set the NACK mode. |high_rtt_nack_threshold_ms| is an RTT threshold in ms
- // above which NACK will be disabled if the NACK mode is |kNack|, -1 meaning
- // that NACK is always enabled in the |kNack| mode.
- // |low_rtt_nack_threshold_ms| is an RTT threshold in ms below which we expect
- // to rely on NACK only, and therefore are using larger buffers to have time
- // to wait for retransmissions.
- void SetNackMode(VCMNackMode mode, int64_t low_rtt_nack_threshold_ms,
- int64_t high_rtt_nack_threshold_ms);
-
- void SetNackSettings(size_t max_nack_list_size,
- int max_packet_age_to_nack,
- int max_incomplete_time_ms);
-
- // Returns the current NACK mode.
- VCMNackMode nack_mode() const;
-
- // Returns a list of the sequence numbers currently missing.
- std::vector<uint16_t> GetNackList(bool* request_key_frame);
-
- // Set decode error mode - Should not be changed in the middle of the
- // session. Changes will not influence frames already in the buffer.
- void SetDecodeErrorMode(VCMDecodeErrorMode error_mode);
- int64_t LastDecodedTimestamp() const;
- VCMDecodeErrorMode decode_error_mode() const {return decode_error_mode_;}
-
- // Used to compute time of complete continuous frames. Returns the timestamps
- // corresponding to the start and end of the continuous complete buffer.
- void RenderBufferSize(uint32_t* timestamp_start, uint32_t* timestamp_end);
-
- void RegisterStatsCallback(VCMReceiveStatisticsCallback* callback);
-
- private:
- class SequenceNumberLessThan {
- public:
- bool operator() (const uint16_t& sequence_number1,
- const uint16_t& sequence_number2) const {
- return IsNewerSequenceNumber(sequence_number2, sequence_number1);
- }
- };
- typedef std::set<uint16_t, SequenceNumberLessThan> SequenceNumberSet;
-
- // Gets the frame assigned to the timestamp of the packet. May recycle
- // existing frames if no free frames are available. Returns an error code if
- // failing, or kNoError on success. |frame_list| contains which list the
- // packet was in, or NULL if it was not in a FrameList (a new frame).
- VCMFrameBufferEnum GetFrame(const VCMPacket& packet,
- VCMFrameBuffer** frame,
- FrameList** frame_list)
- EXCLUSIVE_LOCKS_REQUIRED(crit_sect_);
-
- // Returns true if |frame| is continuous in |decoding_state|, not taking
- // decodable frames into account.
- bool IsContinuousInState(const VCMFrameBuffer& frame,
- const VCMDecodingState& decoding_state) const
- EXCLUSIVE_LOCKS_REQUIRED(crit_sect_);
- // Returns true if |frame| is continuous in the |last_decoded_state_|, taking
- // all decodable frames into account.
- bool IsContinuous(const VCMFrameBuffer& frame) const
- EXCLUSIVE_LOCKS_REQUIRED(crit_sect_);
- // Looks for frames in |incomplete_frames_| which are continuous in the
- // provided |decoded_state|. Starts the search from the timestamp of
- // |decoded_state|.
- void FindAndInsertContinuousFramesWithState(
- const VCMDecodingState& decoded_state)
- EXCLUSIVE_LOCKS_REQUIRED(crit_sect_);
- // Looks for frames in |incomplete_frames_| which are continuous in
- // |last_decoded_state_| taking all decodable frames into account. Starts
- // the search from |new_frame|.
- void FindAndInsertContinuousFrames(const VCMFrameBuffer& new_frame)
- EXCLUSIVE_LOCKS_REQUIRED(crit_sect_);
- VCMFrameBuffer* NextFrame() const EXCLUSIVE_LOCKS_REQUIRED(crit_sect_);
- // Returns true if the NACK list was updated to cover sequence numbers up to
- // |sequence_number|. If false a key frame is needed to get into a state where
- // we can continue decoding.
- bool UpdateNackList(uint16_t sequence_number)
- EXCLUSIVE_LOCKS_REQUIRED(crit_sect_);
- bool TooLargeNackList() const;
- // Returns true if the NACK list was reduced without problem. If false a key
- // frame is needed to get into a state where we can continue decoding.
- bool HandleTooLargeNackList() EXCLUSIVE_LOCKS_REQUIRED(crit_sect_);
- bool MissingTooOldPacket(uint16_t latest_sequence_number) const
- EXCLUSIVE_LOCKS_REQUIRED(crit_sect_);
- // Returns true if the too old packets was successfully removed from the NACK
- // list. If false, a key frame is needed to get into a state where we can
- // continue decoding.
- bool HandleTooOldPackets(uint16_t latest_sequence_number)
- EXCLUSIVE_LOCKS_REQUIRED(crit_sect_);
- // Drops all packets in the NACK list up until |last_decoded_sequence_number|.
- void DropPacketsFromNackList(uint16_t last_decoded_sequence_number);
-
- void ReleaseFrameIfNotDecoding(VCMFrameBuffer* frame);
-
- // Gets an empty frame, creating a new frame if necessary (i.e. increases
- // jitter buffer size).
- VCMFrameBuffer* GetEmptyFrame() EXCLUSIVE_LOCKS_REQUIRED(crit_sect_);
-
- // Attempts to increase the size of the jitter buffer. Returns true on
- // success, false otherwise.
- bool TryToIncreaseJitterBufferSize() EXCLUSIVE_LOCKS_REQUIRED(crit_sect_);
-
- // Recycles oldest frames until a key frame is found. Used if jitter buffer is
- // completely full. Returns true if a key frame was found.
- bool RecycleFramesUntilKeyFrame() EXCLUSIVE_LOCKS_REQUIRED(crit_sect_);
-
- // Updates the frame statistics.
- // Counts only complete frames, so decodable incomplete frames will not be
- // counted.
- void CountFrame(const VCMFrameBuffer& frame)
- EXCLUSIVE_LOCKS_REQUIRED(crit_sect_);
-
- // Update rolling average of packets per frame.
- void UpdateAveragePacketsPerFrame(int current_number_packets_);
-
- // Cleans the frame list in the JB from old/empty frames.
- // Should only be called prior to actual use.
- void CleanUpOldOrEmptyFrames() EXCLUSIVE_LOCKS_REQUIRED(crit_sect_);
-
- // Returns true if |packet| is likely to have been retransmitted.
- bool IsPacketRetransmitted(const VCMPacket& packet) const;
-
- // The following three functions update the jitter estimate with the
- // payload size, receive time and RTP timestamp of a frame.
- void UpdateJitterEstimate(const VCMJitterSample& sample,
- bool incomplete_frame);
- void UpdateJitterEstimate(const VCMFrameBuffer& frame, bool incomplete_frame);
- void UpdateJitterEstimate(int64_t latest_packet_time_ms,
- uint32_t timestamp,
- unsigned int frame_size,
- bool incomplete_frame);
-
- // Returns true if we should wait for retransmissions, false otherwise.
- bool WaitForRetransmissions();
-
- int NonContinuousOrIncompleteDuration() EXCLUSIVE_LOCKS_REQUIRED(crit_sect_);
-
- uint16_t EstimatedLowSequenceNumber(const VCMFrameBuffer& frame) const;
-
- void UpdateHistograms() EXCLUSIVE_LOCKS_REQUIRED(crit_sect_);
-
- Clock* clock_;
- // If we are running (have started) or not.
- bool running_;
- CriticalSectionWrapper* crit_sect_;
- // Event to signal when we have a frame ready for decoder.
- rtc::scoped_ptr<EventWrapper> frame_event_;
- // Number of allocated frames.
- int max_number_of_frames_;
- UnorderedFrameList free_frames_ GUARDED_BY(crit_sect_);
- FrameList decodable_frames_ GUARDED_BY(crit_sect_);
- FrameList incomplete_frames_ GUARDED_BY(crit_sect_);
- VCMDecodingState last_decoded_state_ GUARDED_BY(crit_sect_);
- bool first_packet_since_reset_;
- // Contains scalability structure data for VP9.
- Vp9SsMap vp9_ss_map_ GUARDED_BY(crit_sect_);
-
- // Statistics.
- VCMReceiveStatisticsCallback* stats_callback_ GUARDED_BY(crit_sect_);
- // Frame counts for each type (key, delta, ...)
- FrameCounts receive_statistics_;
- // Latest calculated frame rates of incoming stream.
- unsigned int incoming_frame_rate_;
- unsigned int incoming_frame_count_;
- int64_t time_last_incoming_frame_count_;
- unsigned int incoming_bit_count_;
- unsigned int incoming_bit_rate_;
- // Number of frames in a row that have been too old.
- int num_consecutive_old_frames_;
- // Number of packets in a row that have been too old.
- int num_consecutive_old_packets_;
- // Number of packets received.
- int num_packets_ GUARDED_BY(crit_sect_);
- // Number of duplicated packets received.
- int num_duplicated_packets_ GUARDED_BY(crit_sect_);
- // Number of packets discarded by the jitter buffer.
- int num_discarded_packets_ GUARDED_BY(crit_sect_);
- // Time when first packet is received.
- int64_t time_first_packet_ms_ GUARDED_BY(crit_sect_);
-
- // Jitter estimation.
- // Filter for estimating jitter.
- VCMJitterEstimator jitter_estimate_;
- // Calculates network delays used for jitter calculations.
- VCMInterFrameDelay inter_frame_delay_;
- VCMJitterSample waiting_for_completion_;
- int64_t rtt_ms_;
-
- // NACK and retransmissions.
- VCMNackMode nack_mode_;
- int64_t low_rtt_nack_threshold_ms_;
- int64_t high_rtt_nack_threshold_ms_;
- // Holds the internal NACK list (the missing sequence numbers).
- SequenceNumberSet missing_sequence_numbers_;
- uint16_t latest_received_sequence_number_;
- size_t max_nack_list_size_;
- int max_packet_age_to_nack_; // Measured in sequence numbers.
- int max_incomplete_time_ms_;
-
- VCMDecodeErrorMode decode_error_mode_;
- // Estimated rolling average of packets per frame
- float average_packets_per_frame_;
- // average_packets_per_frame converges fast if we have fewer than this many
- // frames.
- int frame_counter_;
- RTC_DISALLOW_COPY_AND_ASSIGN(VCMJitterBuffer);
-};
-} // namespace webrtc
-
-#endif // WEBRTC_MODULES_VIDEO_CODING_MAIN_SOURCE_JITTER_BUFFER_H_
diff --git a/webrtc/modules/video_coding/main/source/jitter_buffer_common.h b/webrtc/modules/video_coding/main/source/jitter_buffer_common.h
deleted file mode 100644
index 97af78087a..0000000000
--- a/webrtc/modules/video_coding/main/source/jitter_buffer_common.h
+++ /dev/null
@@ -1,72 +0,0 @@
-/*
- * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_MODULES_VIDEO_CODING_JITTER_BUFFER_COMMON_H_
-#define WEBRTC_MODULES_VIDEO_CODING_JITTER_BUFFER_COMMON_H_
-
-#include "webrtc/typedefs.h"
-
-namespace webrtc {
-
-// Used to estimate rolling average of packets per frame.
-static const float kFastConvergeMultiplier = 0.4f;
-static const float kNormalConvergeMultiplier = 0.2f;
-
-enum { kMaxNumberOfFrames = 300 };
-enum { kStartNumberOfFrames = 6 };
-enum { kMaxVideoDelayMs = 10000 };
-enum { kPacketsPerFrameMultiplier = 5 };
-enum { kFastConvergeThreshold = 5};
-
-enum VCMJitterBufferEnum {
- kMaxConsecutiveOldFrames = 60,
- kMaxConsecutiveOldPackets = 300,
- // TODO(sprang): Reduce this limit once codecs don't sometimes wildly
- // overshoot bitrate target.
- kMaxPacketsInSession = 1400, // Allows ~2MB frames.
- kBufferIncStepSizeBytes = 30000, // >20 packets.
- kMaxJBFrameSizeBytes = 4000000 // sanity don't go above 4Mbyte.
-};
-
-enum VCMFrameBufferEnum {
- kOutOfBoundsPacket = -7,
- kNotInitialized = -6,
- kOldPacket = -5,
- kGeneralError = -4,
- kFlushIndicator = -3, // Indicator that a flush has occurred.
- kTimeStampError = -2,
- kSizeError = -1,
- kNoError = 0,
- kIncomplete = 1, // Frame incomplete.
- kCompleteSession = 3, // at least one layer in the frame complete.
- kDecodableSession = 4, // Frame incomplete, but ready to be decoded
- kDuplicatePacket = 5 // We're receiving a duplicate packet.
-};
-
-enum VCMFrameBufferStateEnum {
- kStateEmpty, // frame popped by the RTP receiver
- kStateIncomplete, // frame that have one or more packet(s) stored
- kStateComplete, // frame that have all packets
- kStateDecodable // Hybrid mode - frame can be decoded
-};
-
-enum { kH264StartCodeLengthBytes = 4};
-
-// Used to indicate if a received packet contain a complete NALU (or equivalent)
-enum VCMNaluCompleteness {
- kNaluUnset = 0, // Packet has not been filled.
- kNaluComplete = 1, // Packet can be decoded as is.
- kNaluStart, // Packet contain beginning of NALU
- kNaluIncomplete, // Packet is not beginning or end of NALU
- kNaluEnd, // Packet is the end of a NALU
-};
-} // namespace webrtc
-
-#endif // WEBRTC_MODULES_VIDEO_CODING_JITTER_BUFFER_COMMON_H_
diff --git a/webrtc/modules/video_coding/main/source/jitter_buffer_unittest.cc b/webrtc/modules/video_coding/main/source/jitter_buffer_unittest.cc
deleted file mode 100644
index d6c6d4985b..0000000000
--- a/webrtc/modules/video_coding/main/source/jitter_buffer_unittest.cc
+++ /dev/null
@@ -1,2575 +0,0 @@
-/*
- * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include <string.h>
-
-#include <list>
-
-#include "testing/gtest/include/gtest/gtest.h"
-#include "webrtc/modules/video_coding/main/source/frame_buffer.h"
-#include "webrtc/modules/video_coding/main/source/jitter_buffer.h"
-#include "webrtc/modules/video_coding/main/source/media_opt_util.h"
-#include "webrtc/modules/video_coding/main/source/packet.h"
-#include "webrtc/modules/video_coding/main/source/test/stream_generator.h"
-#include "webrtc/modules/video_coding/main/test/test_util.h"
-#include "webrtc/system_wrappers/include/clock.h"
-#include "webrtc/system_wrappers/include/metrics.h"
-#include "webrtc/test/histogram.h"
-
-namespace webrtc {
-
-namespace {
- const uint32_t kProcessIntervalSec = 60;
-} // namespace
-
-class Vp9SsMapTest : public ::testing::Test {
- protected:
- Vp9SsMapTest()
- : packet_(data_, 1400, 1234, 1, true) {}
-
- virtual void SetUp() {
- packet_.isFirstPacket = true;
- packet_.markerBit = true;
- packet_.frameType = kVideoFrameKey;
- packet_.codec = kVideoCodecVP9;
- packet_.codecSpecificHeader.codec = kRtpVideoVp9;
- packet_.codecSpecificHeader.codecHeader.VP9.flexible_mode = false;
- packet_.codecSpecificHeader.codecHeader.VP9.gof_idx = 0;
- packet_.codecSpecificHeader.codecHeader.VP9.temporal_idx = kNoTemporalIdx;
- packet_.codecSpecificHeader.codecHeader.VP9.temporal_up_switch = false;
- packet_.codecSpecificHeader.codecHeader.VP9.ss_data_available = true;
- packet_.codecSpecificHeader.codecHeader.VP9.gof.SetGofInfoVP9(
- kTemporalStructureMode3); // kTemporalStructureMode3: 0-2-1-2..
- }
-
- Vp9SsMap map_;
- uint8_t data_[1500];
- VCMPacket packet_;
-};
-
-TEST_F(Vp9SsMapTest, Insert) {
- EXPECT_TRUE(map_.Insert(packet_));
-}
-
-TEST_F(Vp9SsMapTest, Insert_NoSsData) {
- packet_.codecSpecificHeader.codecHeader.VP9.ss_data_available = false;
- EXPECT_FALSE(map_.Insert(packet_));
-}
-
-TEST_F(Vp9SsMapTest, Find) {
- EXPECT_TRUE(map_.Insert(packet_));
- Vp9SsMap::SsMap::iterator it;
- EXPECT_TRUE(map_.Find(packet_.timestamp, &it));
- EXPECT_EQ(packet_.timestamp, it->first);
-}
-
-TEST_F(Vp9SsMapTest, Find_WithWrap) {
- const uint32_t kSsTimestamp1 = 0xFFFFFFFF;
- const uint32_t kSsTimestamp2 = 100;
- packet_.timestamp = kSsTimestamp1;
- EXPECT_TRUE(map_.Insert(packet_));
- packet_.timestamp = kSsTimestamp2;
- EXPECT_TRUE(map_.Insert(packet_));
- Vp9SsMap::SsMap::iterator it;
- EXPECT_FALSE(map_.Find(kSsTimestamp1 - 1, &it));
- EXPECT_TRUE(map_.Find(kSsTimestamp1, &it));
- EXPECT_EQ(kSsTimestamp1, it->first);
- EXPECT_TRUE(map_.Find(0, &it));
- EXPECT_EQ(kSsTimestamp1, it->first);
- EXPECT_TRUE(map_.Find(kSsTimestamp2 - 1, &it));
- EXPECT_EQ(kSsTimestamp1, it->first);
- EXPECT_TRUE(map_.Find(kSsTimestamp2, &it));
- EXPECT_EQ(kSsTimestamp2, it->first);
- EXPECT_TRUE(map_.Find(kSsTimestamp2 + 1, &it));
- EXPECT_EQ(kSsTimestamp2, it->first);
-}
-
-TEST_F(Vp9SsMapTest, Reset) {
- EXPECT_TRUE(map_.Insert(packet_));
- Vp9SsMap::SsMap::iterator it;
- EXPECT_TRUE(map_.Find(packet_.timestamp, &it));
- EXPECT_EQ(packet_.timestamp, it->first);
-
- map_.Reset();
- EXPECT_FALSE(map_.Find(packet_.timestamp, &it));
-}
-
-TEST_F(Vp9SsMapTest, RemoveOld) {
- Vp9SsMap::SsMap::iterator it;
- const uint32_t kSsTimestamp1 = 10000;
- packet_.timestamp = kSsTimestamp1;
- EXPECT_TRUE(map_.Insert(packet_));
-
- const uint32_t kTimestamp = kSsTimestamp1 + kProcessIntervalSec * 90000;
- map_.RemoveOld(kTimestamp - 1); // Interval not passed.
- EXPECT_TRUE(map_.Find(kSsTimestamp1, &it)); // Should not been removed.
-
- map_.RemoveOld(kTimestamp);
- EXPECT_FALSE(map_.Find(kSsTimestamp1, &it));
- EXPECT_TRUE(map_.Find(kTimestamp, &it));
- EXPECT_EQ(kTimestamp, it->first);
-}
-
-TEST_F(Vp9SsMapTest, RemoveOld_WithWrap) {
- Vp9SsMap::SsMap::iterator it;
- const uint32_t kSsTimestamp1 = 0xFFFFFFFF - kProcessIntervalSec * 90000;
- const uint32_t kSsTimestamp2 = 10;
- const uint32_t kSsTimestamp3 = 1000;
- packet_.timestamp = kSsTimestamp1;
- EXPECT_TRUE(map_.Insert(packet_));
- packet_.timestamp = kSsTimestamp2;
- EXPECT_TRUE(map_.Insert(packet_));
- packet_.timestamp = kSsTimestamp3;
- EXPECT_TRUE(map_.Insert(packet_));
-
- map_.RemoveOld(kSsTimestamp3);
- EXPECT_FALSE(map_.Find(kSsTimestamp1, &it));
- EXPECT_FALSE(map_.Find(kSsTimestamp2, &it));
- EXPECT_TRUE(map_.Find(kSsTimestamp3, &it));
-}
-
-TEST_F(Vp9SsMapTest, UpdatePacket_NoSsData) {
- packet_.codecSpecificHeader.codecHeader.VP9.gof_idx = 0;
- EXPECT_FALSE(map_.UpdatePacket(&packet_));
-}
-
-TEST_F(Vp9SsMapTest, UpdatePacket_NoGofIdx) {
- EXPECT_TRUE(map_.Insert(packet_));
- packet_.codecSpecificHeader.codecHeader.VP9.gof_idx = kNoGofIdx;
- EXPECT_FALSE(map_.UpdatePacket(&packet_));
-}
-
-TEST_F(Vp9SsMapTest, UpdatePacket_InvalidGofIdx) {
- EXPECT_TRUE(map_.Insert(packet_));
- packet_.codecSpecificHeader.codecHeader.VP9.gof_idx = 4;
- EXPECT_FALSE(map_.UpdatePacket(&packet_));
-}
-
-TEST_F(Vp9SsMapTest, UpdatePacket) {
- EXPECT_TRUE(map_.Insert(packet_)); // kTemporalStructureMode3: 0-2-1-2..
-
- packet_.codecSpecificHeader.codecHeader.VP9.gof_idx = 0;
- EXPECT_TRUE(map_.UpdatePacket(&packet_));
- EXPECT_EQ(0, packet_.codecSpecificHeader.codecHeader.VP9.temporal_idx);
- EXPECT_FALSE(packet_.codecSpecificHeader.codecHeader.VP9.temporal_up_switch);
- EXPECT_EQ(1U, packet_.codecSpecificHeader.codecHeader.VP9.num_ref_pics);
- EXPECT_EQ(4, packet_.codecSpecificHeader.codecHeader.VP9.pid_diff[0]);
-
- packet_.codecSpecificHeader.codecHeader.VP9.gof_idx = 1;
- EXPECT_TRUE(map_.UpdatePacket(&packet_));
- EXPECT_EQ(2, packet_.codecSpecificHeader.codecHeader.VP9.temporal_idx);
- EXPECT_TRUE(packet_.codecSpecificHeader.codecHeader.VP9.temporal_up_switch);
- EXPECT_EQ(1U, packet_.codecSpecificHeader.codecHeader.VP9.num_ref_pics);
- EXPECT_EQ(1, packet_.codecSpecificHeader.codecHeader.VP9.pid_diff[0]);
-
- packet_.codecSpecificHeader.codecHeader.VP9.gof_idx = 2;
- EXPECT_TRUE(map_.UpdatePacket(&packet_));
- EXPECT_EQ(1, packet_.codecSpecificHeader.codecHeader.VP9.temporal_idx);
- EXPECT_TRUE(packet_.codecSpecificHeader.codecHeader.VP9.temporal_up_switch);
- EXPECT_EQ(1U, packet_.codecSpecificHeader.codecHeader.VP9.num_ref_pics);
- EXPECT_EQ(2, packet_.codecSpecificHeader.codecHeader.VP9.pid_diff[0]);
-
- packet_.codecSpecificHeader.codecHeader.VP9.gof_idx = 3;
- EXPECT_TRUE(map_.UpdatePacket(&packet_));
- EXPECT_EQ(2, packet_.codecSpecificHeader.codecHeader.VP9.temporal_idx);
- EXPECT_FALSE(packet_.codecSpecificHeader.codecHeader.VP9.temporal_up_switch);
- EXPECT_EQ(2U, packet_.codecSpecificHeader.codecHeader.VP9.num_ref_pics);
- EXPECT_EQ(1, packet_.codecSpecificHeader.codecHeader.VP9.pid_diff[0]);
- EXPECT_EQ(2, packet_.codecSpecificHeader.codecHeader.VP9.pid_diff[1]);
-}
-
-class TestBasicJitterBuffer : public ::testing::Test {
- protected:
- virtual void SetUp() {
- clock_.reset(new SimulatedClock(0));
- jitter_buffer_.reset(new VCMJitterBuffer(
- clock_.get(),
- rtc::scoped_ptr<EventWrapper>(event_factory_.CreateEvent())));
- jitter_buffer_->Start();
- seq_num_ = 1234;
- timestamp_ = 0;
- size_ = 1400;
- // Data vector - 0, 0, 0x80, 3, 4, 5, 6, 7, 8, 9, 0, 0, 0x80, 3....
- data_[0] = 0;
- data_[1] = 0;
- data_[2] = 0x80;
- int count = 3;
- for (unsigned int i = 3; i < sizeof(data_) - 3; ++i) {
- data_[i] = count;
- count++;
- if (count == 10) {
- data_[i + 1] = 0;
- data_[i + 2] = 0;
- data_[i + 3] = 0x80;
- count = 3;
- i += 3;
- }
- }
- packet_.reset(new VCMPacket(data_, size_, seq_num_, timestamp_, true));
- }
-
- VCMEncodedFrame* DecodeCompleteFrame() {
- uint32_t timestamp = 0;
- bool found_frame = jitter_buffer_->NextCompleteTimestamp(10, &timestamp);
- if (!found_frame)
- return NULL;
- VCMEncodedFrame* frame = jitter_buffer_->ExtractAndSetDecode(timestamp);
- return frame;
- }
-
- VCMEncodedFrame* DecodeIncompleteFrame() {
- uint32_t timestamp = 0;
- bool found_frame = jitter_buffer_->NextMaybeIncompleteTimestamp(&timestamp);
- if (!found_frame)
- return NULL;
- VCMEncodedFrame* frame = jitter_buffer_->ExtractAndSetDecode(timestamp);
- return frame;
- }
-
- void CheckOutFrame(VCMEncodedFrame* frame_out,
- unsigned int size,
- bool startCode) {
- ASSERT_TRUE(frame_out);
-
- const uint8_t* outData = frame_out->Buffer();
- unsigned int i = 0;
-
- if (startCode) {
- EXPECT_EQ(0, outData[0]);
- EXPECT_EQ(0, outData[1]);
- EXPECT_EQ(0, outData[2]);
- EXPECT_EQ(1, outData[3]);
- i += 4;
- }
-
- EXPECT_EQ(size, frame_out->Length());
- int count = 3;
- for (; i < size; i++) {
- if (outData[i] == 0 && outData[i + 1] == 0 && outData[i + 2] == 0x80) {
- i += 2;
- } else if (startCode && outData[i] == 0 && outData[i + 1] == 0) {
- EXPECT_EQ(0, outData[0]);
- EXPECT_EQ(0, outData[1]);
- EXPECT_EQ(0, outData[2]);
- EXPECT_EQ(1, outData[3]);
- i += 3;
- } else {
- EXPECT_EQ(count, outData[i]);
- count++;
- if (count == 10) {
- count = 3;
- }
- }
- }
- }
-
- uint16_t seq_num_;
- uint32_t timestamp_;
- int size_;
- uint8_t data_[1500];
- rtc::scoped_ptr<VCMPacket> packet_;
- rtc::scoped_ptr<SimulatedClock> clock_;
- NullEventFactory event_factory_;
- rtc::scoped_ptr<VCMJitterBuffer> jitter_buffer_;
-};
-
-
-class TestRunningJitterBuffer : public ::testing::Test {
- protected:
- enum { kDataBufferSize = 10 };
-
- virtual void SetUp() {
- clock_.reset(new SimulatedClock(0));
- max_nack_list_size_ = 150;
- oldest_packet_to_nack_ = 250;
- jitter_buffer_ = new VCMJitterBuffer(
- clock_.get(),
- rtc::scoped_ptr<EventWrapper>(event_factory_.CreateEvent()));
- stream_generator_ = new StreamGenerator(0, clock_->TimeInMilliseconds());
- jitter_buffer_->Start();
- jitter_buffer_->SetNackSettings(max_nack_list_size_,
- oldest_packet_to_nack_, 0);
- memset(data_buffer_, 0, kDataBufferSize);
- }
-
- virtual void TearDown() {
- jitter_buffer_->Stop();
- delete stream_generator_;
- delete jitter_buffer_;
- }
-
- VCMFrameBufferEnum InsertPacketAndPop(int index) {
- VCMPacket packet;
- packet.dataPtr = data_buffer_;
- bool packet_available = stream_generator_->PopPacket(&packet, index);
- EXPECT_TRUE(packet_available);
- if (!packet_available)
- return kGeneralError; // Return here to avoid crashes below.
- bool retransmitted = false;
- return jitter_buffer_->InsertPacket(packet, &retransmitted);
- }
-
- VCMFrameBufferEnum InsertPacket(int index) {
- VCMPacket packet;
- packet.dataPtr = data_buffer_;
- bool packet_available = stream_generator_->GetPacket(&packet, index);
- EXPECT_TRUE(packet_available);
- if (!packet_available)
- return kGeneralError; // Return here to avoid crashes below.
- bool retransmitted = false;
- return jitter_buffer_->InsertPacket(packet, &retransmitted);
- }
-
- VCMFrameBufferEnum InsertFrame(FrameType frame_type) {
- stream_generator_->GenerateFrame(
- frame_type, (frame_type != kEmptyFrame) ? 1 : 0,
- (frame_type == kEmptyFrame) ? 1 : 0, clock_->TimeInMilliseconds());
- VCMFrameBufferEnum ret = InsertPacketAndPop(0);
- clock_->AdvanceTimeMilliseconds(kDefaultFramePeriodMs);
- return ret;
- }
-
- VCMFrameBufferEnum InsertFrames(int num_frames, FrameType frame_type) {
- VCMFrameBufferEnum ret_for_all = kNoError;
- for (int i = 0; i < num_frames; ++i) {
- VCMFrameBufferEnum ret = InsertFrame(frame_type);
- if (ret < kNoError) {
- ret_for_all = ret;
- } else if (ret_for_all >= kNoError) {
- ret_for_all = ret;
- }
- }
- return ret_for_all;
- }
-
- void DropFrame(int num_packets) {
- stream_generator_->GenerateFrame(kVideoFrameDelta, num_packets, 0,
- clock_->TimeInMilliseconds());
- for (int i = 0; i < num_packets; ++i)
- stream_generator_->DropLastPacket();
- clock_->AdvanceTimeMilliseconds(kDefaultFramePeriodMs);
- }
-
- bool DecodeCompleteFrame() {
- uint32_t timestamp = 0;
- bool found_frame = jitter_buffer_->NextCompleteTimestamp(0, &timestamp);
- if (!found_frame)
- return false;
-
- VCMEncodedFrame* frame = jitter_buffer_->ExtractAndSetDecode(timestamp);
- bool ret = (frame != NULL);
- jitter_buffer_->ReleaseFrame(frame);
- return ret;
- }
-
- bool DecodeIncompleteFrame() {
- uint32_t timestamp = 0;
- bool found_frame = jitter_buffer_->NextMaybeIncompleteTimestamp(&timestamp);
- if (!found_frame)
- return false;
- VCMEncodedFrame* frame = jitter_buffer_->ExtractAndSetDecode(timestamp);
- bool ret = (frame != NULL);
- jitter_buffer_->ReleaseFrame(frame);
- return ret;
- }
-
- VCMJitterBuffer* jitter_buffer_;
- StreamGenerator* stream_generator_;
- rtc::scoped_ptr<SimulatedClock> clock_;
- NullEventFactory event_factory_;
- size_t max_nack_list_size_;
- int oldest_packet_to_nack_;
- uint8_t data_buffer_[kDataBufferSize];
-};
-
-class TestJitterBufferNack : public TestRunningJitterBuffer {
- protected:
- virtual void SetUp() {
- TestRunningJitterBuffer::SetUp();
- jitter_buffer_->SetNackMode(kNack, -1, -1);
- }
-
- virtual void TearDown() {
- TestRunningJitterBuffer::TearDown();
- }
-};
-
-TEST_F(TestBasicJitterBuffer, StopRunning) {
- jitter_buffer_->Stop();
- EXPECT_TRUE(NULL == DecodeCompleteFrame());
- EXPECT_TRUE(NULL == DecodeIncompleteFrame());
- jitter_buffer_->Start();
- // Allow selective errors.
- jitter_buffer_->SetDecodeErrorMode(kSelectiveErrors);
-
- // No packets inserted.
- EXPECT_TRUE(NULL == DecodeCompleteFrame());
- EXPECT_TRUE(NULL == DecodeIncompleteFrame());
-
- // Allow decoding with errors.
- jitter_buffer_->SetDecodeErrorMode(kWithErrors);
-
- // No packets inserted.
- EXPECT_TRUE(NULL == DecodeCompleteFrame());
- EXPECT_TRUE(NULL == DecodeIncompleteFrame());
-}
-
-TEST_F(TestBasicJitterBuffer, SinglePacketFrame) {
- // Always start with a complete key frame when not allowing errors.
- jitter_buffer_->SetDecodeErrorMode(kNoErrors);
- packet_->frameType = kVideoFrameKey;
- packet_->isFirstPacket = true;
- packet_->markerBit = true;
- packet_->timestamp += 123 * 90;
-
- // Insert the packet to the jitter buffer and get a frame.
- bool retransmitted = false;
- EXPECT_EQ(kCompleteSession, jitter_buffer_->InsertPacket(*packet_,
- &retransmitted));
- VCMEncodedFrame* frame_out = DecodeCompleteFrame();
- CheckOutFrame(frame_out, size_, false);
- EXPECT_EQ(kVideoFrameKey, frame_out->FrameType());
- jitter_buffer_->ReleaseFrame(frame_out);
-}
-
-TEST_F(TestBasicJitterBuffer, VerifyHistogramStats) {
- test::ClearHistograms();
- // Always start with a complete key frame when not allowing errors.
- jitter_buffer_->SetDecodeErrorMode(kNoErrors);
- packet_->frameType = kVideoFrameKey;
- packet_->isFirstPacket = true;
- packet_->markerBit = true;
- packet_->timestamp += 123 * 90;
-
- // Insert single packet frame to the jitter buffer and get a frame.
- bool retransmitted = false;
- EXPECT_EQ(kCompleteSession, jitter_buffer_->InsertPacket(*packet_,
- &retransmitted));
- VCMEncodedFrame* frame_out = DecodeCompleteFrame();
- CheckOutFrame(frame_out, size_, false);
- EXPECT_EQ(kVideoFrameKey, frame_out->FrameType());
- jitter_buffer_->ReleaseFrame(frame_out);
-
- // Verify that histograms are updated when the jitter buffer is stopped.
- clock_->AdvanceTimeMilliseconds(metrics::kMinRunTimeInSeconds * 1000);
- jitter_buffer_->Stop();
- EXPECT_EQ(0, test::LastHistogramSample(
- "WebRTC.Video.DiscardedPacketsInPercent"));
- EXPECT_EQ(0, test::LastHistogramSample(
- "WebRTC.Video.DuplicatedPacketsInPercent"));
- EXPECT_NE(-1, test::LastHistogramSample(
- "WebRTC.Video.CompleteFramesReceivedPerSecond"));
- EXPECT_EQ(1000, test::LastHistogramSample(
- "WebRTC.Video.KeyFramesReceivedInPermille"));
-
- // Verify that histograms are not updated if stop is called again.
- jitter_buffer_->Stop();
- EXPECT_EQ(1, test::NumHistogramSamples(
- "WebRTC.Video.DiscardedPacketsInPercent"));
- EXPECT_EQ(1, test::NumHistogramSamples(
- "WebRTC.Video.DuplicatedPacketsInPercent"));
- EXPECT_EQ(1, test::NumHistogramSamples(
- "WebRTC.Video.CompleteFramesReceivedPerSecond"));
- EXPECT_EQ(1, test::NumHistogramSamples(
- "WebRTC.Video.KeyFramesReceivedInPermille"));
-}
-
-TEST_F(TestBasicJitterBuffer, DualPacketFrame) {
- packet_->frameType = kVideoFrameKey;
- packet_->isFirstPacket = true;
- packet_->markerBit = false;
-
- bool retransmitted = false;
- EXPECT_EQ(kIncomplete, jitter_buffer_->InsertPacket(*packet_,
- &retransmitted));
- VCMEncodedFrame* frame_out = DecodeCompleteFrame();
- // Should not be complete.
- EXPECT_TRUE(frame_out == NULL);
-
- ++seq_num_;
- packet_->isFirstPacket = false;
- packet_->markerBit = true;
- packet_->seqNum = seq_num_;
-
- EXPECT_EQ(kCompleteSession, jitter_buffer_->InsertPacket(*packet_,
- &retransmitted));
-
- frame_out = DecodeCompleteFrame();
- CheckOutFrame(frame_out, 2 * size_, false);
-
- EXPECT_EQ(kVideoFrameKey, frame_out->FrameType());
- jitter_buffer_->ReleaseFrame(frame_out);
-}
-
-TEST_F(TestBasicJitterBuffer, 100PacketKeyFrame) {
- packet_->frameType = kVideoFrameKey;
- packet_->isFirstPacket = true;
- packet_->markerBit = false;
-
- bool retransmitted = false;
- EXPECT_EQ(kIncomplete, jitter_buffer_->InsertPacket(*packet_,
- &retransmitted));
-
- VCMEncodedFrame* frame_out = DecodeCompleteFrame();
-
- // Frame should not be complete.
- EXPECT_TRUE(frame_out == NULL);
-
- // Insert 98 frames.
- int loop = 0;
- do {
- seq_num_++;
- packet_->isFirstPacket = false;
- packet_->markerBit = false;
- packet_->seqNum = seq_num_;
-
- EXPECT_EQ(kIncomplete, jitter_buffer_->InsertPacket(*packet_,
- &retransmitted));
- loop++;
- } while (loop < 98);
-
- // Insert last packet.
- ++seq_num_;
- packet_->isFirstPacket = false;
- packet_->markerBit = true;
- packet_->seqNum = seq_num_;
-
- EXPECT_EQ(kCompleteSession, jitter_buffer_->InsertPacket(*packet_,
- &retransmitted));
-
- frame_out = DecodeCompleteFrame();
-
- CheckOutFrame(frame_out, 100 * size_, false);
- EXPECT_EQ(kVideoFrameKey, frame_out->FrameType());
- jitter_buffer_->ReleaseFrame(frame_out);
-}
-
-TEST_F(TestBasicJitterBuffer, 100PacketDeltaFrame) {
- // Always start with a complete key frame.
- packet_->frameType = kVideoFrameKey;
- packet_->isFirstPacket = true;
- packet_->markerBit = true;
-
- bool retransmitted = false;
- EXPECT_EQ(kCompleteSession, jitter_buffer_->InsertPacket(*packet_,
- &retransmitted));
- VCMEncodedFrame* frame_out = DecodeCompleteFrame();
- EXPECT_FALSE(frame_out == NULL);
- jitter_buffer_->ReleaseFrame(frame_out);
-
- ++seq_num_;
- packet_->seqNum = seq_num_;
- packet_->markerBit = false;
- packet_->frameType = kVideoFrameDelta;
- packet_->timestamp += 33 * 90;
-
- EXPECT_EQ(kIncomplete, jitter_buffer_->InsertPacket(*packet_,
- &retransmitted));
-
- frame_out = DecodeCompleteFrame();
-
- // Frame should not be complete.
- EXPECT_TRUE(frame_out == NULL);
-
- packet_->isFirstPacket = false;
- // Insert 98 frames.
- int loop = 0;
- do {
- ++seq_num_;
- packet_->seqNum = seq_num_;
-
- // Insert a packet into a frame.
- EXPECT_EQ(kIncomplete, jitter_buffer_->InsertPacket(*packet_,
- &retransmitted));
- loop++;
- } while (loop < 98);
-
- // Insert the last packet.
- ++seq_num_;
- packet_->isFirstPacket = false;
- packet_->markerBit = true;
- packet_->seqNum = seq_num_;
-
- EXPECT_EQ(kCompleteSession, jitter_buffer_->InsertPacket(*packet_,
- &retransmitted));
-
- frame_out = DecodeCompleteFrame();
-
- CheckOutFrame(frame_out, 100 * size_, false);
- EXPECT_EQ(kVideoFrameDelta, frame_out->FrameType());
- jitter_buffer_->ReleaseFrame(frame_out);
-}
-
-TEST_F(TestBasicJitterBuffer, PacketReorderingReverseOrder) {
- // Insert the "first" packet last.
- seq_num_ += 100;
- packet_->frameType = kVideoFrameKey;
- packet_->isFirstPacket = false;
- packet_->markerBit = true;
- packet_->seqNum = seq_num_;
- packet_->timestamp = timestamp_;
-
- bool retransmitted = false;
- EXPECT_EQ(kIncomplete, jitter_buffer_->InsertPacket(*packet_,
- &retransmitted));
-
- VCMEncodedFrame* frame_out = DecodeCompleteFrame();
-
- EXPECT_TRUE(frame_out == NULL);
-
- // Insert 98 packets.
- int loop = 0;
- do {
- seq_num_--;
- packet_->isFirstPacket = false;
- packet_->markerBit = false;
- packet_->seqNum = seq_num_;
-
- EXPECT_EQ(kIncomplete, jitter_buffer_->InsertPacket(*packet_,
- &retransmitted));
- loop++;
- } while (loop < 98);
-
- // Insert the last packet.
- seq_num_--;
- packet_->isFirstPacket = true;
- packet_->markerBit = false;
- packet_->seqNum = seq_num_;
-
- EXPECT_EQ(kCompleteSession, jitter_buffer_->InsertPacket(*packet_,
- &retransmitted));
-
- frame_out = DecodeCompleteFrame();;
-
- CheckOutFrame(frame_out, 100 * size_, false);
-
- EXPECT_EQ(kVideoFrameKey, frame_out->FrameType());
- jitter_buffer_->ReleaseFrame(frame_out);
-}
-
-TEST_F(TestBasicJitterBuffer, FrameReordering2Frames2PacketsEach) {
- packet_->frameType = kVideoFrameDelta;
- packet_->isFirstPacket = true;
- packet_->markerBit = false;
-
- bool retransmitted = false;
- EXPECT_EQ(kIncomplete, jitter_buffer_->InsertPacket(*packet_,
- &retransmitted));
-
- VCMEncodedFrame* frame_out = DecodeCompleteFrame();
-
- EXPECT_TRUE(frame_out == NULL);
-
- seq_num_++;
- packet_->isFirstPacket = false;
- packet_->markerBit = true;
- packet_->seqNum = seq_num_;
-
- EXPECT_EQ(kCompleteSession, jitter_buffer_->InsertPacket(*packet_,
- &retransmitted));
-
- // check that we fail to get frame since seqnum is not continuous
- frame_out = DecodeCompleteFrame();
- EXPECT_TRUE(frame_out == NULL);
-
- seq_num_ -= 3;
- timestamp_ -= 33*90;
- packet_->frameType = kVideoFrameKey;
- packet_->isFirstPacket = true;
- packet_->markerBit = false;
- packet_->seqNum = seq_num_;
- packet_->timestamp = timestamp_;
-
- EXPECT_EQ(kIncomplete, jitter_buffer_->InsertPacket(*packet_,
- &retransmitted));
-
- frame_out = DecodeCompleteFrame();
-
- // It should not be complete.
- EXPECT_TRUE(frame_out == NULL);
-
- seq_num_++;
- packet_->isFirstPacket = false;
- packet_->markerBit = true;
- packet_->seqNum = seq_num_;
-
- EXPECT_EQ(kCompleteSession, jitter_buffer_->InsertPacket(*packet_,
- &retransmitted));
-
- frame_out = DecodeCompleteFrame();
- CheckOutFrame(frame_out, 2 * size_, false);
- EXPECT_EQ(kVideoFrameKey, frame_out->FrameType());
- jitter_buffer_->ReleaseFrame(frame_out);
-
- frame_out = DecodeCompleteFrame();
- CheckOutFrame(frame_out, 2 * size_, false);
- EXPECT_EQ(kVideoFrameDelta, frame_out->FrameType());
- jitter_buffer_->ReleaseFrame(frame_out);
-}
-
-TEST_F(TestBasicJitterBuffer, TestReorderingWithPadding) {
- packet_->frameType = kVideoFrameKey;
- packet_->isFirstPacket = true;
- packet_->markerBit = true;
-
- // Send in an initial good packet/frame (Frame A) to start things off.
- bool retransmitted = false;
- EXPECT_EQ(kCompleteSession,
- jitter_buffer_->InsertPacket(*packet_, &retransmitted));
- VCMEncodedFrame* frame_out = DecodeCompleteFrame();
- EXPECT_TRUE(frame_out != NULL);
- jitter_buffer_->ReleaseFrame(frame_out);
-
- // Now send in a complete delta frame (Frame C), but with a sequence number
- // gap. No pic index either, so no temporal scalability cheating :)
- packet_->frameType = kVideoFrameDelta;
- // Leave a gap of 2 sequence numbers and two frames.
- packet_->seqNum = seq_num_ + 3;
- packet_->timestamp = timestamp_ + (66 * 90);
- // Still isFirst = marker = true.
- // Session should be complete (frame is complete), but there's nothing to
- // decode yet.
- EXPECT_EQ(kCompleteSession,
- jitter_buffer_->InsertPacket(*packet_, &retransmitted));
- frame_out = DecodeCompleteFrame();
- EXPECT_TRUE(frame_out == NULL);
-
- // Now send in a complete delta frame (Frame B) that is continuous from A, but
- // doesn't fill the full gap to C. The rest of the gap is going to be padding.
- packet_->seqNum = seq_num_ + 1;
- packet_->timestamp = timestamp_ + (33 * 90);
- // Still isFirst = marker = true.
- EXPECT_EQ(kCompleteSession,
- jitter_buffer_->InsertPacket(*packet_, &retransmitted));
- frame_out = DecodeCompleteFrame();
- EXPECT_TRUE(frame_out != NULL);
- jitter_buffer_->ReleaseFrame(frame_out);
-
- // But Frame C isn't continuous yet.
- frame_out = DecodeCompleteFrame();
- EXPECT_TRUE(frame_out == NULL);
-
- // Add in the padding. These are empty packets (data length is 0) with no
- // marker bit and matching the timestamp of Frame B.
- VCMPacket empty_packet(data_, 0, seq_num_ + 2, timestamp_ + (33 * 90), false);
- EXPECT_EQ(kOldPacket,
- jitter_buffer_->InsertPacket(empty_packet, &retransmitted));
- empty_packet.seqNum += 1;
- EXPECT_EQ(kOldPacket,
- jitter_buffer_->InsertPacket(empty_packet, &retransmitted));
-
- // But now Frame C should be ready!
- frame_out = DecodeCompleteFrame();
- EXPECT_TRUE(frame_out != NULL);
- jitter_buffer_->ReleaseFrame(frame_out);
-}
-
-TEST_F(TestBasicJitterBuffer, DuplicatePackets) {
- packet_->frameType = kVideoFrameKey;
- packet_->isFirstPacket = true;
- packet_->markerBit = false;
- packet_->seqNum = seq_num_;
- packet_->timestamp = timestamp_;
- EXPECT_EQ(0, jitter_buffer_->num_packets());
- EXPECT_EQ(0, jitter_buffer_->num_duplicated_packets());
-
- bool retransmitted = false;
- EXPECT_EQ(kIncomplete, jitter_buffer_->InsertPacket(*packet_,
- &retransmitted));
-
- VCMEncodedFrame* frame_out = DecodeCompleteFrame();
-
- EXPECT_TRUE(frame_out == NULL);
- EXPECT_EQ(1, jitter_buffer_->num_packets());
- EXPECT_EQ(0, jitter_buffer_->num_duplicated_packets());
-
- // Insert a packet into a frame.
- EXPECT_EQ(kDuplicatePacket, jitter_buffer_->InsertPacket(*packet_,
- &retransmitted));
- EXPECT_EQ(2, jitter_buffer_->num_packets());
- EXPECT_EQ(1, jitter_buffer_->num_duplicated_packets());
-
- seq_num_++;
- packet_->seqNum = seq_num_;
- packet_->markerBit = true;
- packet_->isFirstPacket = false;
-
- EXPECT_EQ(kCompleteSession, jitter_buffer_->InsertPacket(*packet_,
- &retransmitted));
-
- frame_out = DecodeCompleteFrame();
- ASSERT_TRUE(frame_out != NULL);
- CheckOutFrame(frame_out, 2 * size_, false);
-
- EXPECT_EQ(kVideoFrameKey, frame_out->FrameType());
- EXPECT_EQ(3, jitter_buffer_->num_packets());
- EXPECT_EQ(1, jitter_buffer_->num_duplicated_packets());
- jitter_buffer_->ReleaseFrame(frame_out);
-}
-
-TEST_F(TestBasicJitterBuffer, DuplicatePreviousDeltaFramePacket) {
- packet_->frameType = kVideoFrameKey;
- packet_->isFirstPacket = true;
- packet_->markerBit = true;
- packet_->seqNum = seq_num_;
- packet_->timestamp = timestamp_;
- jitter_buffer_->SetDecodeErrorMode(kNoErrors);
- EXPECT_EQ(0, jitter_buffer_->num_packets());
- EXPECT_EQ(0, jitter_buffer_->num_duplicated_packets());
-
- bool retransmitted = false;
- // Insert first complete frame.
- EXPECT_EQ(kCompleteSession,
- jitter_buffer_->InsertPacket(*packet_, &retransmitted));
-
- VCMEncodedFrame* frame_out = DecodeCompleteFrame();
- ASSERT_TRUE(frame_out != NULL);
- CheckOutFrame(frame_out, size_, false);
- EXPECT_EQ(kVideoFrameKey, frame_out->FrameType());
- jitter_buffer_->ReleaseFrame(frame_out);
-
- // Insert 3 delta frames.
- for (uint16_t i = 1; i <= 3; ++i) {
- packet_->seqNum = seq_num_ + i;
- packet_->timestamp = timestamp_ + (i * 33) * 90;
- packet_->frameType = kVideoFrameDelta;
- EXPECT_EQ(kCompleteSession,
- jitter_buffer_->InsertPacket(*packet_, &retransmitted));
- EXPECT_EQ(i + 1, jitter_buffer_->num_packets());
- EXPECT_EQ(0, jitter_buffer_->num_duplicated_packets());
- }
-
- // Retransmit second delta frame.
- packet_->seqNum = seq_num_ + 2;
- packet_->timestamp = timestamp_ + 66 * 90;
-
- EXPECT_EQ(kDuplicatePacket,
- jitter_buffer_->InsertPacket(*packet_, &retransmitted));
-
- EXPECT_EQ(5, jitter_buffer_->num_packets());
- EXPECT_EQ(1, jitter_buffer_->num_duplicated_packets());
-
- // Should be able to decode 3 delta frames, key frame already decoded.
- for (size_t i = 0; i < 3; ++i) {
- frame_out = DecodeCompleteFrame();
- ASSERT_TRUE(frame_out != NULL);
- CheckOutFrame(frame_out, size_, false);
- EXPECT_EQ(kVideoFrameDelta, frame_out->FrameType());
- jitter_buffer_->ReleaseFrame(frame_out);
- }
-}
-
-TEST_F(TestBasicJitterBuffer, TestSkipForwardVp9) {
- // Verify that JB skips forward to next base layer frame.
- // -------------------------------------------------
- // | 65485 | 65486 | 65487 | 65488 | 65489 | ...
- // | pid:5 | pid:6 | pid:7 | pid:8 | pid:9 | ...
- // | tid:0 | tid:2 | tid:1 | tid:2 | tid:0 | ...
- // | ss | x | x | x | |
- // -------------------------------------------------
- // |<----------tl0idx:200--------->|<---tl0idx:201---
-
- bool re = false;
- packet_->codec = kVideoCodecVP9;
- packet_->codecSpecificHeader.codec = kRtpVideoVp9;
- packet_->isFirstPacket = true;
- packet_->markerBit = true;
- packet_->codecSpecificHeader.codecHeader.VP9.flexible_mode = false;
- packet_->codecSpecificHeader.codecHeader.VP9.spatial_idx = 0;
- packet_->codecSpecificHeader.codecHeader.VP9.beginning_of_frame = true;
- packet_->codecSpecificHeader.codecHeader.VP9.end_of_frame = true;
- packet_->codecSpecificHeader.codecHeader.VP9.temporal_idx = kNoTemporalIdx;
- packet_->codecSpecificHeader.codecHeader.VP9.temporal_up_switch = false;
-
- packet_->seqNum = 65485;
- packet_->timestamp = 1000;
- packet_->frameType = kVideoFrameKey;
- packet_->codecSpecificHeader.codecHeader.VP9.picture_id = 5;
- packet_->codecSpecificHeader.codecHeader.VP9.tl0_pic_idx = 200;
- packet_->codecSpecificHeader.codecHeader.VP9.gof_idx = 0;
- packet_->codecSpecificHeader.codecHeader.VP9.ss_data_available = true;
- packet_->codecSpecificHeader.codecHeader.VP9.gof.SetGofInfoVP9(
- kTemporalStructureMode3); // kTemporalStructureMode3: 0-2-1-2..
- EXPECT_EQ(kCompleteSession, jitter_buffer_->InsertPacket(*packet_, &re));
-
- // Insert next temporal layer 0.
- packet_->seqNum = 65489;
- packet_->timestamp = 13000;
- packet_->frameType = kVideoFrameDelta;
- packet_->codecSpecificHeader.codecHeader.VP9.picture_id = 9;
- packet_->codecSpecificHeader.codecHeader.VP9.tl0_pic_idx = 201;
- packet_->codecSpecificHeader.codecHeader.VP9.gof_idx = 0;
- packet_->codecSpecificHeader.codecHeader.VP9.ss_data_available = false;
- EXPECT_EQ(kCompleteSession, jitter_buffer_->InsertPacket(*packet_, &re));
-
- VCMEncodedFrame* frame_out = DecodeCompleteFrame();
- EXPECT_EQ(1000U, frame_out->TimeStamp());
- EXPECT_EQ(kVideoFrameKey, frame_out->FrameType());
- jitter_buffer_->ReleaseFrame(frame_out);
-
- frame_out = DecodeCompleteFrame();
- EXPECT_EQ(13000U, frame_out->TimeStamp());
- EXPECT_EQ(kVideoFrameDelta, frame_out->FrameType());
- jitter_buffer_->ReleaseFrame(frame_out);
-}
-
-TEST_F(TestBasicJitterBuffer, ReorderedVp9SsData_3TlLayers) {
- // Verify that frames are updated with SS data when SS packet is reordered.
- // --------------------------------
- // | 65486 | 65487 | 65485 |...
- // | pid:6 | pid:7 | pid:5 |...
- // | tid:2 | tid:1 | tid:0 |...
- // | | | ss |
- // --------------------------------
- // |<--------tl0idx:200--------->|
-
- bool re = false;
- packet_->codec = kVideoCodecVP9;
- packet_->codecSpecificHeader.codec = kRtpVideoVp9;
- packet_->isFirstPacket = true;
- packet_->markerBit = true;
- packet_->codecSpecificHeader.codecHeader.VP9.flexible_mode = false;
- packet_->codecSpecificHeader.codecHeader.VP9.spatial_idx = 0;
- packet_->codecSpecificHeader.codecHeader.VP9.beginning_of_frame = true;
- packet_->codecSpecificHeader.codecHeader.VP9.end_of_frame = true;
- packet_->codecSpecificHeader.codecHeader.VP9.temporal_idx = kNoTemporalIdx;
- packet_->codecSpecificHeader.codecHeader.VP9.temporal_up_switch = false;
- packet_->codecSpecificHeader.codecHeader.VP9.tl0_pic_idx = 200;
-
- packet_->seqNum = 65486;
- packet_->timestamp = 6000;
- packet_->frameType = kVideoFrameDelta;
- packet_->codecSpecificHeader.codecHeader.VP9.picture_id = 6;
- packet_->codecSpecificHeader.codecHeader.VP9.gof_idx = 1;
- EXPECT_EQ(kCompleteSession, jitter_buffer_->InsertPacket(*packet_, &re));
-
- packet_->seqNum = 65487;
- packet_->timestamp = 9000;
- packet_->frameType = kVideoFrameDelta;
- packet_->codecSpecificHeader.codecHeader.VP9.picture_id = 7;
- packet_->codecSpecificHeader.codecHeader.VP9.gof_idx = 2;
- EXPECT_EQ(kCompleteSession, jitter_buffer_->InsertPacket(*packet_, &re));
-
- // Insert first frame with SS data.
- packet_->seqNum = 65485;
- packet_->timestamp = 3000;
- packet_->frameType = kVideoFrameKey;
- packet_->width = 352;
- packet_->height = 288;
- packet_->codecSpecificHeader.codecHeader.VP9.picture_id = 5;
- packet_->codecSpecificHeader.codecHeader.VP9.gof_idx = 0;
- packet_->codecSpecificHeader.codecHeader.VP9.ss_data_available = true;
- packet_->codecSpecificHeader.codecHeader.VP9.gof.SetGofInfoVP9(
- kTemporalStructureMode3); // kTemporalStructureMode3: 0-2-1-2..
- EXPECT_EQ(kCompleteSession, jitter_buffer_->InsertPacket(*packet_, &re));
-
- VCMEncodedFrame* frame_out = DecodeCompleteFrame();
- EXPECT_EQ(3000U, frame_out->TimeStamp());
- EXPECT_EQ(kVideoFrameKey, frame_out->FrameType());
- EXPECT_EQ(0, frame_out->CodecSpecific()->codecSpecific.VP9.temporal_idx);
- EXPECT_FALSE(
- frame_out->CodecSpecific()->codecSpecific.VP9.temporal_up_switch);
- jitter_buffer_->ReleaseFrame(frame_out);
-
- frame_out = DecodeCompleteFrame();
- EXPECT_EQ(6000U, frame_out->TimeStamp());
- EXPECT_EQ(kVideoFrameDelta, frame_out->FrameType());
- EXPECT_EQ(2, frame_out->CodecSpecific()->codecSpecific.VP9.temporal_idx);
- EXPECT_TRUE(frame_out->CodecSpecific()->codecSpecific.VP9.temporal_up_switch);
- jitter_buffer_->ReleaseFrame(frame_out);
-
- frame_out = DecodeCompleteFrame();
- EXPECT_EQ(9000U, frame_out->TimeStamp());
- EXPECT_EQ(kVideoFrameDelta, frame_out->FrameType());
- EXPECT_EQ(1, frame_out->CodecSpecific()->codecSpecific.VP9.temporal_idx);
- EXPECT_TRUE(frame_out->CodecSpecific()->codecSpecific.VP9.temporal_up_switch);
- jitter_buffer_->ReleaseFrame(frame_out);
-}
-
-TEST_F(TestBasicJitterBuffer, ReorderedVp9SsData_2Tl2SLayers) {
- // Verify that frames are updated with SS data when SS packet is reordered.
- // -----------------------------------------
- // | 65486 | 65487 | 65485 | 65484 |...
- // | pid:6 | pid:6 | pid:5 | pid:5 |...
- // | tid:1 | tid:1 | tid:0 | tid:0 |...
- // | sid:0 | sid:1 | sid:1 | sid:0 |...
- // | t:6000 | t:6000 | t:3000 | t:3000 |
- // | | | | ss |
- // -----------------------------------------
- // |<-----------tl0idx:200------------>|
-
- bool re = false;
- packet_->codec = kVideoCodecVP9;
- packet_->codecSpecificHeader.codec = kRtpVideoVp9;
- packet_->codecSpecificHeader.codecHeader.VP9.flexible_mode = false;
- packet_->codecSpecificHeader.codecHeader.VP9.beginning_of_frame = true;
- packet_->codecSpecificHeader.codecHeader.VP9.end_of_frame = true;
- packet_->codecSpecificHeader.codecHeader.VP9.temporal_idx = kNoTemporalIdx;
- packet_->codecSpecificHeader.codecHeader.VP9.temporal_up_switch = false;
- packet_->codecSpecificHeader.codecHeader.VP9.tl0_pic_idx = 200;
-
- packet_->isFirstPacket = true;
- packet_->markerBit = false;
- packet_->seqNum = 65486;
- packet_->timestamp = 6000;
- packet_->frameType = kVideoFrameDelta;
- packet_->codecSpecificHeader.codecHeader.VP9.spatial_idx = 0;
- packet_->codecSpecificHeader.codecHeader.VP9.picture_id = 6;
- packet_->codecSpecificHeader.codecHeader.VP9.gof_idx = 1;
- EXPECT_EQ(kIncomplete, jitter_buffer_->InsertPacket(*packet_, &re));
-
- packet_->isFirstPacket = false;
- packet_->markerBit = true;
- packet_->seqNum = 65487;
- packet_->frameType = kVideoFrameDelta;
- packet_->codecSpecificHeader.codecHeader.VP9.spatial_idx = 1;
- packet_->codecSpecificHeader.codecHeader.VP9.picture_id = 6;
- packet_->codecSpecificHeader.codecHeader.VP9.gof_idx = 1;
- EXPECT_EQ(kCompleteSession, jitter_buffer_->InsertPacket(*packet_, &re));
-
- packet_->isFirstPacket = false;
- packet_->markerBit = true;
- packet_->seqNum = 65485;
- packet_->timestamp = 3000;
- packet_->frameType = kVideoFrameKey;
- packet_->codecSpecificHeader.codecHeader.VP9.spatial_idx = 1;
- packet_->codecSpecificHeader.codecHeader.VP9.picture_id = 5;
- packet_->codecSpecificHeader.codecHeader.VP9.gof_idx = 0;
- EXPECT_EQ(kIncomplete, jitter_buffer_->InsertPacket(*packet_, &re));
-
- // Insert first frame with SS data.
- packet_->isFirstPacket = true;
- packet_->markerBit = false;
- packet_->seqNum = 65484;
- packet_->frameType = kVideoFrameKey;
- packet_->width = 352;
- packet_->height = 288;
- packet_->codecSpecificHeader.codecHeader.VP9.spatial_idx = 0;
- packet_->codecSpecificHeader.codecHeader.VP9.picture_id = 5;
- packet_->codecSpecificHeader.codecHeader.VP9.gof_idx = 0;
- packet_->codecSpecificHeader.codecHeader.VP9.ss_data_available = true;
- packet_->codecSpecificHeader.codecHeader.VP9.gof.SetGofInfoVP9(
- kTemporalStructureMode2); // kTemporalStructureMode3: 0-1-0-1..
- EXPECT_EQ(kCompleteSession, jitter_buffer_->InsertPacket(*packet_, &re));
-
- VCMEncodedFrame* frame_out = DecodeCompleteFrame();
- EXPECT_EQ(3000U, frame_out->TimeStamp());
- EXPECT_EQ(kVideoFrameKey, frame_out->FrameType());
- EXPECT_EQ(0, frame_out->CodecSpecific()->codecSpecific.VP9.temporal_idx);
- EXPECT_FALSE(
- frame_out->CodecSpecific()->codecSpecific.VP9.temporal_up_switch);
- jitter_buffer_->ReleaseFrame(frame_out);
-
- frame_out = DecodeCompleteFrame();
- EXPECT_EQ(6000U, frame_out->TimeStamp());
- EXPECT_EQ(kVideoFrameDelta, frame_out->FrameType());
- EXPECT_EQ(1, frame_out->CodecSpecific()->codecSpecific.VP9.temporal_idx);
- EXPECT_TRUE(frame_out->CodecSpecific()->codecSpecific.VP9.temporal_up_switch);
- jitter_buffer_->ReleaseFrame(frame_out);
-}
-
-TEST_F(TestBasicJitterBuffer, H264InsertStartCode) {
- packet_->frameType = kVideoFrameKey;
- packet_->isFirstPacket = true;
- packet_->markerBit = false;
- packet_->seqNum = seq_num_;
- packet_->timestamp = timestamp_;
- packet_->insertStartCode = true;
-
- bool retransmitted = false;
- EXPECT_EQ(kIncomplete, jitter_buffer_->InsertPacket(*packet_,
- &retransmitted));
-
- VCMEncodedFrame* frame_out = DecodeCompleteFrame();
-
- // Frame should not be complete.
- EXPECT_TRUE(frame_out == NULL);
-
- seq_num_++;
- packet_->isFirstPacket = false;
- packet_->markerBit = true;
- packet_->seqNum = seq_num_;
-
- EXPECT_EQ(kCompleteSession, jitter_buffer_->InsertPacket(*packet_,
- &retransmitted));
-
- frame_out = DecodeCompleteFrame();
- CheckOutFrame(frame_out, size_ * 2 + 4 * 2, true);
- EXPECT_EQ(kVideoFrameKey, frame_out->FrameType());
- jitter_buffer_->ReleaseFrame(frame_out);
-}
-
-// Test threshold conditions of decodable state.
-TEST_F(TestBasicJitterBuffer, PacketLossWithSelectiveErrorsThresholdCheck) {
- jitter_buffer_->SetDecodeErrorMode(kSelectiveErrors);
- // Always start with a key frame. Use 10 packets to test Decodable State
- // boundaries.
- packet_->frameType = kVideoFrameKey;
- packet_->isFirstPacket = true;
- packet_->markerBit = false;
- packet_->seqNum = seq_num_;
- packet_->timestamp = timestamp_;
-
- bool retransmitted = false;
- EXPECT_EQ(kIncomplete, jitter_buffer_->InsertPacket(*packet_,
- &retransmitted));
- uint32_t timestamp = 0;
- EXPECT_FALSE(jitter_buffer_->NextCompleteTimestamp(0, &timestamp));
- EXPECT_FALSE(jitter_buffer_->NextMaybeIncompleteTimestamp(&timestamp));
-
- packet_->isFirstPacket = false;
- for (int i = 1; i < 9; ++i) {
- packet_->seqNum++;
- EXPECT_EQ(kIncomplete, jitter_buffer_->InsertPacket(*packet_,
- &retransmitted));
- EXPECT_FALSE(jitter_buffer_->NextCompleteTimestamp(0, &timestamp));
- EXPECT_FALSE(jitter_buffer_->NextMaybeIncompleteTimestamp(&timestamp));
- }
-
- // last packet
- packet_->markerBit = true;
- packet_->seqNum++;
-
- EXPECT_EQ(kCompleteSession, jitter_buffer_->InsertPacket(*packet_,
- &retransmitted));
- VCMEncodedFrame* frame_out = DecodeCompleteFrame();
- CheckOutFrame(frame_out, 10 * size_, false);
- EXPECT_EQ(kVideoFrameKey, frame_out->FrameType());
- jitter_buffer_->ReleaseFrame(frame_out);
-
- // An incomplete frame can only be decoded once a subsequent frame has begun
- // to arrive. Insert packet in distant frame for this purpose.
- packet_->frameType = kVideoFrameDelta;
- packet_->isFirstPacket = true;
- packet_->markerBit = false;
- packet_->seqNum += 100;
- packet_->timestamp += 33 * 90 * 8;
-
- EXPECT_EQ(kDecodableSession, jitter_buffer_->InsertPacket(*packet_,
- &retransmitted));
- EXPECT_FALSE(jitter_buffer_->NextCompleteTimestamp(0, &timestamp));
- EXPECT_FALSE(jitter_buffer_->NextMaybeIncompleteTimestamp(&timestamp));
-
- // Insert second frame
- packet_->seqNum -= 99;
- packet_->timestamp -= 33 * 90 * 7;
-
- EXPECT_EQ(kDecodableSession, jitter_buffer_->InsertPacket(*packet_,
- &retransmitted));
- EXPECT_FALSE(jitter_buffer_->NextCompleteTimestamp(0, &timestamp));
- EXPECT_TRUE(jitter_buffer_->NextMaybeIncompleteTimestamp(&timestamp));
-
- packet_->isFirstPacket = false;
- for (int i = 1; i < 8; ++i) {
- packet_->seqNum++;
- EXPECT_EQ(kDecodableSession, jitter_buffer_->InsertPacket(*packet_,
- &retransmitted));
- EXPECT_FALSE(jitter_buffer_->NextCompleteTimestamp(0, &timestamp));
- EXPECT_TRUE(jitter_buffer_->NextMaybeIncompleteTimestamp(&timestamp));
- }
-
- packet_->seqNum++;
- EXPECT_EQ(kDecodableSession, jitter_buffer_->InsertPacket(*packet_,
- &retransmitted));
- EXPECT_FALSE(jitter_buffer_->NextCompleteTimestamp(0, &timestamp));
- EXPECT_TRUE(jitter_buffer_->NextMaybeIncompleteTimestamp(&timestamp));
-
- frame_out = DecodeIncompleteFrame();
- ASSERT_FALSE(NULL == frame_out);
- CheckOutFrame(frame_out, 9 * size_, false);
- EXPECT_EQ(kVideoFrameDelta, frame_out->FrameType());
- jitter_buffer_->ReleaseFrame(frame_out);
-
- packet_->markerBit = true;
- packet_->seqNum++;
- EXPECT_EQ(kOldPacket, jitter_buffer_->InsertPacket(*packet_,
- &retransmitted));
-}
-
-// Make sure first packet is present before a frame can be decoded.
-TEST_F(TestBasicJitterBuffer, PacketLossWithSelectiveErrorsIncompleteKey) {
- jitter_buffer_->SetDecodeErrorMode(kSelectiveErrors);
- // Always start with a key frame.
- packet_->frameType = kVideoFrameKey;
- packet_->isFirstPacket = true;
- packet_->markerBit = true;
- packet_->seqNum = seq_num_;
- packet_->timestamp = timestamp_;
-
- bool retransmitted = false;
- EXPECT_EQ(kCompleteSession, jitter_buffer_->InsertPacket(*packet_,
- &retransmitted));
- VCMEncodedFrame* frame_out = DecodeCompleteFrame();
- CheckOutFrame(frame_out, size_, false);
- EXPECT_EQ(kVideoFrameKey, frame_out->FrameType());
- jitter_buffer_->ReleaseFrame(frame_out);
-
- // An incomplete frame can only be decoded once a subsequent frame has begun
- // to arrive. Insert packet in distant frame for this purpose.
- packet_->frameType = kVideoFrameDelta;
- packet_->isFirstPacket = false;
- packet_->markerBit = false;
- packet_->seqNum += 100;
- packet_->timestamp += 33*90*8;
- EXPECT_EQ(kIncomplete, jitter_buffer_->InsertPacket(*packet_,
- &retransmitted));
- uint32_t timestamp;
- EXPECT_FALSE(jitter_buffer_->NextCompleteTimestamp(0, &timestamp));
- EXPECT_FALSE(jitter_buffer_->NextMaybeIncompleteTimestamp(&timestamp));
-
- // Insert second frame - an incomplete key frame.
- packet_->frameType = kVideoFrameKey;
- packet_->isFirstPacket = true;
- packet_->seqNum -= 99;
- packet_->timestamp -= 33*90*7;
-
- EXPECT_EQ(kIncomplete, jitter_buffer_->InsertPacket(*packet_,
- &retransmitted));
- EXPECT_FALSE(jitter_buffer_->NextCompleteTimestamp(0, &timestamp));
- EXPECT_FALSE(jitter_buffer_->NextMaybeIncompleteTimestamp(&timestamp));
-
- // Insert a few more packets. Make sure we're waiting for the key frame to be
- // complete.
- packet_->isFirstPacket = false;
- for (int i = 1; i < 5; ++i) {
- packet_->seqNum++;
- EXPECT_EQ(kIncomplete, jitter_buffer_->InsertPacket(*packet_,
- &retransmitted));
- EXPECT_FALSE(jitter_buffer_->NextCompleteTimestamp(0, &timestamp));
- EXPECT_FALSE(jitter_buffer_->NextMaybeIncompleteTimestamp(&timestamp));
- }
-
- // Complete key frame.
- packet_->markerBit = true;
- packet_->seqNum++;
- EXPECT_EQ(kCompleteSession, jitter_buffer_->InsertPacket(*packet_,
- &retransmitted));
- frame_out = DecodeCompleteFrame();
- CheckOutFrame(frame_out, 6 * size_, false);
- EXPECT_EQ(kVideoFrameKey, frame_out->FrameType());
- jitter_buffer_->ReleaseFrame(frame_out);
-}
-
-// Make sure first packet is present before a frame can be decoded.
-TEST_F(TestBasicJitterBuffer, PacketLossWithSelectiveErrorsMissingFirstPacket) {
- jitter_buffer_->SetDecodeErrorMode(kSelectiveErrors);
- // Always start with a key frame.
- packet_->frameType = kVideoFrameKey;
- packet_->isFirstPacket = true;
- packet_->markerBit = true;
- packet_->seqNum = seq_num_;
- packet_->timestamp = timestamp_;
-
- bool retransmitted = false;
- EXPECT_EQ(kCompleteSession, jitter_buffer_->InsertPacket(*packet_,
- &retransmitted));
- VCMEncodedFrame* frame_out = DecodeCompleteFrame();
- CheckOutFrame(frame_out, size_, false);
- EXPECT_EQ(kVideoFrameKey, frame_out->FrameType());
- jitter_buffer_->ReleaseFrame(frame_out);
-
- // An incomplete frame can only be decoded once a subsequent frame has begun
- // to arrive. Insert packet in distant frame for this purpose.
- packet_->frameType = kVideoFrameDelta;
- packet_->isFirstPacket = false;
- packet_->markerBit = false;
- packet_->seqNum += 100;
- packet_->timestamp += 33*90*8;
- EXPECT_EQ(kIncomplete, jitter_buffer_->InsertPacket(*packet_,
- &retransmitted));
- uint32_t timestamp;
- EXPECT_FALSE(jitter_buffer_->NextCompleteTimestamp(0, &timestamp));
- EXPECT_FALSE(jitter_buffer_->NextMaybeIncompleteTimestamp(&timestamp));
-
- // Insert second frame with the first packet missing. Make sure we're waiting
- // for the key frame to be complete.
- packet_->seqNum -= 98;
- packet_->timestamp -= 33*90*7;
-
- EXPECT_EQ(kIncomplete, jitter_buffer_->InsertPacket(*packet_,
- &retransmitted));
- EXPECT_FALSE(jitter_buffer_->NextCompleteTimestamp(0, &timestamp));
- EXPECT_FALSE(jitter_buffer_->NextMaybeIncompleteTimestamp(&timestamp));
-
- for (int i = 0; i < 5; ++i) {
- packet_->seqNum++;
- EXPECT_EQ(kIncomplete, jitter_buffer_->InsertPacket(*packet_,
- &retransmitted));
- EXPECT_FALSE(jitter_buffer_->NextCompleteTimestamp(0, &timestamp));
- EXPECT_FALSE(jitter_buffer_->NextMaybeIncompleteTimestamp(&timestamp));
- }
-
- // Add first packet. Frame should now be decodable, but incomplete.
- packet_->isFirstPacket = true;
- packet_->seqNum -= 6;
- EXPECT_EQ(kDecodableSession, jitter_buffer_->InsertPacket(*packet_,
- &retransmitted));
- EXPECT_FALSE(jitter_buffer_->NextCompleteTimestamp(0, &timestamp));
- EXPECT_TRUE(jitter_buffer_->NextMaybeIncompleteTimestamp(&timestamp));
-
- frame_out = DecodeIncompleteFrame();
- CheckOutFrame(frame_out, 7 * size_, false);
- EXPECT_EQ(kVideoFrameDelta, frame_out->FrameType());
- jitter_buffer_->ReleaseFrame(frame_out);
-}
-
-TEST_F(TestBasicJitterBuffer, DiscontinuousStreamWhenDecodingWithErrors) {
- // Will use one packet per frame.
- jitter_buffer_->SetDecodeErrorMode(kWithErrors);
- packet_->frameType = kVideoFrameKey;
- packet_->isFirstPacket = true;
- packet_->markerBit = true;
- packet_->seqNum = seq_num_;
- packet_->timestamp = timestamp_;
- bool retransmitted = false;
- EXPECT_EQ(kCompleteSession, jitter_buffer_->InsertPacket(*packet_,
- &retransmitted));
- uint32_t next_timestamp;
- EXPECT_TRUE(jitter_buffer_->NextCompleteTimestamp(0, &next_timestamp));
- EXPECT_EQ(packet_->timestamp, next_timestamp);
- VCMEncodedFrame* frame = jitter_buffer_->ExtractAndSetDecode(next_timestamp);
- EXPECT_TRUE(frame != NULL);
- jitter_buffer_->ReleaseFrame(frame);
-
- // Drop a complete frame.
- timestamp_ += 2 * 33 * 90;
- seq_num_ += 2;
- packet_->frameType = kVideoFrameDelta;
- packet_->isFirstPacket = true;
- packet_->markerBit = false;
- packet_->seqNum = seq_num_;
- packet_->timestamp = timestamp_;
- EXPECT_EQ(kDecodableSession, jitter_buffer_->InsertPacket(*packet_,
- &retransmitted));
- // Insert a packet (so the previous one will be released).
- timestamp_ += 33 * 90;
- seq_num_ += 2;
- packet_->frameType = kVideoFrameDelta;
- packet_->isFirstPacket = true;
- packet_->markerBit = false;
- packet_->seqNum = seq_num_;
- packet_->timestamp = timestamp_;
- EXPECT_EQ(kDecodableSession, jitter_buffer_->InsertPacket(*packet_,
- &retransmitted));
- EXPECT_FALSE(jitter_buffer_->NextCompleteTimestamp(0, &next_timestamp));
- EXPECT_TRUE(jitter_buffer_->NextMaybeIncompleteTimestamp(&next_timestamp));
- EXPECT_EQ(packet_->timestamp - 33 * 90, next_timestamp);
-}
-
-TEST_F(TestBasicJitterBuffer, PacketLoss) {
- // Verify missing packets statistics and not decodable packets statistics.
- // Insert 10 frames consisting of 4 packets and remove one from all of them.
- // The last packet is an empty (non-media) packet.
-
- // Select a start seqNum which triggers a difficult wrap situation
- // The JB will only output (incomplete)frames if the next one has started
- // to arrive. Start by inserting one frame (key).
- jitter_buffer_->SetDecodeErrorMode(kWithErrors);
- seq_num_ = 0xffff - 4;
- seq_num_++;
- packet_->frameType = kVideoFrameKey;
- packet_->isFirstPacket = true;
- packet_->markerBit = false;
- packet_->seqNum = seq_num_;
- packet_->timestamp = timestamp_;
- packet_->completeNALU = kNaluStart;
-
- bool retransmitted = false;
- EXPECT_EQ(kDecodableSession, jitter_buffer_->InsertPacket(*packet_,
- &retransmitted));
- for (int i = 0; i < 11; ++i) {
- webrtc::FrameType frametype = kVideoFrameDelta;
- seq_num_++;
- timestamp_ += 33*90;
- packet_->frameType = frametype;
- packet_->isFirstPacket = true;
- packet_->markerBit = false;
- packet_->seqNum = seq_num_;
- packet_->timestamp = timestamp_;
- packet_->completeNALU = kNaluStart;
-
- EXPECT_EQ(kDecodableSession, jitter_buffer_->InsertPacket(*packet_,
- &retransmitted));
-
- VCMEncodedFrame* frame_out = DecodeCompleteFrame();
-
- // Should not be complete.
- EXPECT_TRUE(frame_out == NULL);
-
- seq_num_ += 2;
- packet_->isFirstPacket = false;
- packet_->markerBit = true;
- packet_->seqNum = seq_num_;
- packet_->completeNALU = kNaluEnd;
-
- EXPECT_EQ(jitter_buffer_->InsertPacket(*packet_, &retransmitted),
- kDecodableSession);
-
- // Insert an empty (non-media) packet.
- seq_num_++;
- packet_->isFirstPacket = false;
- packet_->markerBit = false;
- packet_->seqNum = seq_num_;
- packet_->completeNALU = kNaluEnd;
- packet_->frameType = kEmptyFrame;
-
- EXPECT_EQ(jitter_buffer_->InsertPacket(*packet_, &retransmitted),
- kDecodableSession);
- frame_out = DecodeIncompleteFrame();
-
- // One of the packets has been discarded by the jitter buffer.
- // Last frame can't be extracted yet.
- if (i < 10) {
- CheckOutFrame(frame_out, size_, false);
-
- if (i == 0) {
- EXPECT_EQ(kVideoFrameKey, frame_out->FrameType());
- } else {
- EXPECT_EQ(frametype, frame_out->FrameType());
- }
- EXPECT_FALSE(frame_out->Complete());
- EXPECT_FALSE(frame_out->MissingFrame());
- }
-
- jitter_buffer_->ReleaseFrame(frame_out);
- }
-
- // Insert 3 old packets and verify that we have 3 discarded packets
- // Match value to actual latest timestamp decoded.
- timestamp_ -= 33 * 90;
- packet_->timestamp = timestamp_ - 1000;
-
- EXPECT_EQ(kOldPacket, jitter_buffer_->InsertPacket(*packet_,
- &retransmitted));
-
- packet_->timestamp = timestamp_ - 500;
-
- EXPECT_EQ(kOldPacket, jitter_buffer_->InsertPacket(*packet_,
- &retransmitted));
-
- packet_->timestamp = timestamp_ - 100;
-
- EXPECT_EQ(kOldPacket, jitter_buffer_->InsertPacket(*packet_,
- &retransmitted));
-
- EXPECT_EQ(3, jitter_buffer_->num_discarded_packets());
-
- jitter_buffer_->Flush();
-
- // This statistic shouldn't be reset by a flush.
- EXPECT_EQ(3, jitter_buffer_->num_discarded_packets());
-}
-
-TEST_F(TestBasicJitterBuffer, DeltaFrame100PacketsWithSeqNumWrap) {
- seq_num_ = 0xfff0;
- packet_->frameType = kVideoFrameKey;
- packet_->isFirstPacket = true;
- packet_->markerBit = false;
- packet_->seqNum = seq_num_;
- packet_->timestamp = timestamp_;
-
- bool retransmitted = false;
- EXPECT_EQ(kIncomplete, jitter_buffer_->InsertPacket(*packet_,
- &retransmitted));
-
- VCMEncodedFrame* frame_out = DecodeCompleteFrame();
-
- EXPECT_TRUE(frame_out == NULL);
-
- int loop = 0;
- do {
- seq_num_++;
- packet_->isFirstPacket = false;
- packet_->markerBit = false;
- packet_->seqNum = seq_num_;
-
- EXPECT_EQ(kIncomplete, jitter_buffer_->InsertPacket(*packet_,
- &retransmitted));
-
- frame_out = DecodeCompleteFrame();
-
- EXPECT_TRUE(frame_out == NULL);
-
- loop++;
- } while (loop < 98);
-
- seq_num_++;
- packet_->isFirstPacket = false;
- packet_->markerBit = true;
- packet_->seqNum = seq_num_;
-
- EXPECT_EQ(kCompleteSession, jitter_buffer_->InsertPacket(*packet_,
- &retransmitted));
-
- frame_out = DecodeCompleteFrame();
-
- CheckOutFrame(frame_out, 100 * size_, false);
-
- EXPECT_EQ(kVideoFrameKey, frame_out->FrameType());
- jitter_buffer_->ReleaseFrame(frame_out);
-}
-
-TEST_F(TestBasicJitterBuffer, PacketReorderingReverseWithNegSeqNumWrap) {
- // Insert "first" packet last seqnum.
- seq_num_ = 10;
- packet_->frameType = kVideoFrameKey;
- packet_->isFirstPacket = false;
- packet_->markerBit = true;
- packet_->seqNum = seq_num_;
-
- bool retransmitted = false;
- EXPECT_EQ(kIncomplete, jitter_buffer_->InsertPacket(*packet_,
- &retransmitted));
- VCMEncodedFrame* frame_out = DecodeCompleteFrame();
-
- // Should not be complete.
- EXPECT_TRUE(frame_out == NULL);
-
- // Insert 98 frames.
- int loop = 0;
- do {
- seq_num_--;
- packet_->isFirstPacket = false;
- packet_->markerBit = false;
- packet_->seqNum = seq_num_;
-
- EXPECT_EQ(kIncomplete, jitter_buffer_->InsertPacket(*packet_,
- &retransmitted));
-
- frame_out = DecodeCompleteFrame();
-
- EXPECT_TRUE(frame_out == NULL);
-
- loop++;
- } while (loop < 98);
-
- // Insert last packet.
- seq_num_--;
- packet_->isFirstPacket = true;
- packet_->markerBit = false;
- packet_->seqNum = seq_num_;
-
- EXPECT_EQ(kCompleteSession, jitter_buffer_->InsertPacket(*packet_,
- &retransmitted));
-
- frame_out = DecodeCompleteFrame();
- CheckOutFrame(frame_out, 100 * size_, false);
- EXPECT_EQ(kVideoFrameKey, frame_out->FrameType());
- jitter_buffer_->ReleaseFrame(frame_out);
-}
-
-TEST_F(TestBasicJitterBuffer, TestInsertOldFrame) {
- // ------- -------
- // | 2 | | 1 |
- // ------- -------
- // t = 3000 t = 2000
- seq_num_ = 2;
- timestamp_ = 3000;
- packet_->frameType = kVideoFrameKey;
- packet_->isFirstPacket = true;
- packet_->markerBit = true;
- packet_->timestamp = timestamp_;
- packet_->seqNum = seq_num_;
-
- bool retransmitted = false;
- EXPECT_EQ(kCompleteSession, jitter_buffer_->InsertPacket(*packet_,
- &retransmitted));
-
- VCMEncodedFrame* frame_out = DecodeCompleteFrame();
- EXPECT_EQ(3000u, frame_out->TimeStamp());
- CheckOutFrame(frame_out, size_, false);
- EXPECT_EQ(kVideoFrameKey, frame_out->FrameType());
- jitter_buffer_->ReleaseFrame(frame_out);
-
- seq_num_--;
- timestamp_ = 2000;
- packet_->frameType = kVideoFrameDelta;
- packet_->isFirstPacket = true;
- packet_->markerBit = true;
- packet_->seqNum = seq_num_;
- packet_->timestamp = timestamp_;
-
- EXPECT_EQ(kOldPacket, jitter_buffer_->InsertPacket(*packet_,
- &retransmitted));
-}
-
-TEST_F(TestBasicJitterBuffer, TestInsertOldFrameWithSeqNumWrap) {
- // ------- -------
- // | 2 | | 1 |
- // ------- -------
- // t = 3000 t = 0xffffff00
-
- seq_num_ = 2;
- timestamp_ = 3000;
- packet_->frameType = kVideoFrameKey;
- packet_->isFirstPacket = true;
- packet_->markerBit = true;
- packet_->seqNum = seq_num_;
- packet_->timestamp = timestamp_;
-
- bool retransmitted = false;
- EXPECT_EQ(kCompleteSession, jitter_buffer_->InsertPacket(*packet_,
- &retransmitted));
-
- VCMEncodedFrame* frame_out = DecodeCompleteFrame();
- EXPECT_EQ(timestamp_, frame_out->TimeStamp());
-
- CheckOutFrame(frame_out, size_, false);
-
- EXPECT_EQ(kVideoFrameKey, frame_out->FrameType());
-
- jitter_buffer_->ReleaseFrame(frame_out);
-
- seq_num_--;
- timestamp_ = 0xffffff00;
- packet_->frameType = kVideoFrameDelta;
- packet_->isFirstPacket = true;
- packet_->markerBit = true;
- packet_->seqNum = seq_num_;
- packet_->timestamp = timestamp_;
-
-
- // This timestamp is old.
- EXPECT_EQ(kOldPacket, jitter_buffer_->InsertPacket(*packet_,
- &retransmitted));
-}
-
-TEST_F(TestBasicJitterBuffer, TimestampWrap) {
- // --------------- ---------------
- // | 1 | 2 | | 3 | 4 |
- // --------------- ---------------
- // t = 0xffffff00 t = 33*90
-
- timestamp_ = 0xffffff00;
- packet_->frameType = kVideoFrameKey;
- packet_->isFirstPacket = true;
- packet_->markerBit = false;
- packet_->seqNum = seq_num_;
- packet_->timestamp = timestamp_;
-
- bool retransmitted = false;
- EXPECT_EQ(kIncomplete, jitter_buffer_->InsertPacket(*packet_,
- &retransmitted));
-
- VCMEncodedFrame* frame_out = DecodeCompleteFrame();
- EXPECT_TRUE(frame_out == NULL);
-
- seq_num_++;
- packet_->isFirstPacket = false;
- packet_->markerBit = true;
- packet_->seqNum = seq_num_;
-
- EXPECT_EQ(kCompleteSession, jitter_buffer_->InsertPacket(*packet_,
- &retransmitted));
-
- frame_out = DecodeCompleteFrame();
- CheckOutFrame(frame_out, 2 * size_, false);
- jitter_buffer_->ReleaseFrame(frame_out);
-
- seq_num_++;
- timestamp_ += 33*90;
- packet_->frameType = kVideoFrameDelta;
- packet_->isFirstPacket = true;
- packet_->markerBit = false;
- packet_->seqNum = seq_num_;
- packet_->timestamp = timestamp_;
-
- EXPECT_EQ(kIncomplete, jitter_buffer_->InsertPacket(*packet_,
- &retransmitted));
-
- frame_out = DecodeCompleteFrame();
- EXPECT_TRUE(frame_out == NULL);
-
- seq_num_++;
- packet_->isFirstPacket = false;
- packet_->markerBit = true;
- packet_->seqNum = seq_num_;
-
- EXPECT_EQ(kCompleteSession, jitter_buffer_->InsertPacket(*packet_,
- &retransmitted));
-
- frame_out = DecodeCompleteFrame();
- CheckOutFrame(frame_out, 2 * size_, false);
- EXPECT_EQ(kVideoFrameDelta, frame_out->FrameType());
- jitter_buffer_->ReleaseFrame(frame_out);
-}
-
-TEST_F(TestBasicJitterBuffer, 2FrameWithTimestampWrap) {
- // ------- -------
- // | 1 | | 2 |
- // ------- -------
- // t = 0xffffff00 t = 2700
-
- timestamp_ = 0xffffff00;
- packet_->frameType = kVideoFrameKey;
- packet_->isFirstPacket = true;
- packet_->markerBit = true;
- packet_->timestamp = timestamp_;
-
- bool retransmitted = false;
- // Insert first frame (session will be complete).
- EXPECT_EQ(kCompleteSession, jitter_buffer_->InsertPacket(*packet_,
- &retransmitted));
-
- // Insert next frame.
- seq_num_++;
- timestamp_ = 2700;
- packet_->frameType = kVideoFrameDelta;
- packet_->isFirstPacket = true;
- packet_->markerBit = true;
- packet_->seqNum = seq_num_;
- packet_->timestamp = timestamp_;
-
- EXPECT_EQ(kCompleteSession, jitter_buffer_->InsertPacket(*packet_,
- &retransmitted));
-
- VCMEncodedFrame* frame_out = DecodeCompleteFrame();
- EXPECT_EQ(0xffffff00, frame_out->TimeStamp());
- CheckOutFrame(frame_out, size_, false);
- EXPECT_EQ(kVideoFrameKey, frame_out->FrameType());
- jitter_buffer_->ReleaseFrame(frame_out);
-
- VCMEncodedFrame* frame_out2 = DecodeCompleteFrame();
- EXPECT_EQ(2700u, frame_out2->TimeStamp());
- CheckOutFrame(frame_out2, size_, false);
- EXPECT_EQ(kVideoFrameDelta, frame_out2->FrameType());
- jitter_buffer_->ReleaseFrame(frame_out2);
-}
-
-TEST_F(TestBasicJitterBuffer, Insert2FramesReOrderedWithTimestampWrap) {
- // ------- -------
- // | 2 | | 1 |
- // ------- -------
- // t = 2700 t = 0xffffff00
-
- seq_num_ = 2;
- timestamp_ = 2700;
- packet_->frameType = kVideoFrameDelta;
- packet_->isFirstPacket = true;
- packet_->markerBit = true;
- packet_->seqNum = seq_num_;
- packet_->timestamp = timestamp_;
-
- bool retransmitted = false;
- EXPECT_EQ(kCompleteSession, jitter_buffer_->InsertPacket(*packet_,
- &retransmitted));
-
- // Insert second frame
- seq_num_--;
- timestamp_ = 0xffffff00;
- packet_->frameType = kVideoFrameKey;
- packet_->isFirstPacket = true;
- packet_->markerBit = true;
- packet_->seqNum = seq_num_;
- packet_->timestamp = timestamp_;
-
- EXPECT_EQ(kCompleteSession, jitter_buffer_->InsertPacket(*packet_,
- &retransmitted));
-
- VCMEncodedFrame* frame_out = DecodeCompleteFrame();
- EXPECT_EQ(0xffffff00, frame_out->TimeStamp());
- CheckOutFrame(frame_out, size_, false);
- EXPECT_EQ(kVideoFrameKey, frame_out->FrameType());
- jitter_buffer_->ReleaseFrame(frame_out);
-
- VCMEncodedFrame* frame_out2 = DecodeCompleteFrame();
- EXPECT_EQ(2700u, frame_out2->TimeStamp());
- CheckOutFrame(frame_out2, size_, false);
- EXPECT_EQ(kVideoFrameDelta, frame_out2->FrameType());
- jitter_buffer_->ReleaseFrame(frame_out2);
-}
-
-TEST_F(TestBasicJitterBuffer, DeltaFrameWithMoreThanMaxNumberOfPackets) {
- int loop = 0;
- bool firstPacket = true;
- bool retransmitted = false;
- // Insert kMaxPacketsInJitterBuffer into frame.
- do {
- seq_num_++;
- packet_->isFirstPacket = false;
- packet_->markerBit = false;
- packet_->seqNum = seq_num_;
-
- if (firstPacket) {
- EXPECT_EQ(kIncomplete, jitter_buffer_->InsertPacket(*packet_,
- &retransmitted));
- firstPacket = false;
- } else {
- EXPECT_EQ(kIncomplete, jitter_buffer_->InsertPacket(*packet_,
- &retransmitted));
- }
-
- loop++;
- } while (loop < kMaxPacketsInSession);
-
- // Max number of packets inserted.
- // Insert one more packet.
- seq_num_++;
- packet_->isFirstPacket = false;
- packet_->markerBit = true;
- packet_->seqNum = seq_num_;
-
- // Insert the packet -> frame recycled.
- EXPECT_EQ(kSizeError, jitter_buffer_->InsertPacket(*packet_,
- &retransmitted));
- EXPECT_TRUE(NULL == DecodeCompleteFrame());
-
-}
-
-TEST_F(TestBasicJitterBuffer, ExceedNumOfFrameWithSeqNumWrap) {
- // TEST fill JB with more than max number of frame (50 delta frames +
- // 51 key frames) with wrap in seq_num_
- //
- // --------------------------------------------------------------
- // | 65485 | 65486 | 65487 | .... | 65535 | 0 | 1 | 2 | .....| 50 |
- // --------------------------------------------------------------
- // |<-----------delta frames------------->|<------key frames----->|
-
- int loop = 0;
- seq_num_ = 65485;
- uint32_t first_key_frame_timestamp = 0;
- bool retransmitted = false;
- // Insert MAX_NUMBER_OF_FRAMES frames.
- do {
- timestamp_ += 33*90;
- seq_num_++;
- packet_->isFirstPacket = true;
- packet_->markerBit = true;
- packet_->seqNum = seq_num_;
- packet_->timestamp = timestamp_;
-
- if (loop == 50) {
- first_key_frame_timestamp = packet_->timestamp;
- packet_->frameType = kVideoFrameKey;
- }
-
- // Insert frame.
- EXPECT_EQ(kCompleteSession, jitter_buffer_->InsertPacket(*packet_,
- &retransmitted));
-
- loop++;
- } while (loop < kMaxNumberOfFrames);
-
- // Max number of frames inserted.
-
- // Insert one more frame.
- timestamp_ += 33*90;
- seq_num_++;
- packet_->isFirstPacket = true;
- packet_->markerBit = true;
- packet_->seqNum = seq_num_;
- packet_->timestamp = timestamp_;
-
- // Now, no free frame - frames will be recycled until first key frame.
- EXPECT_EQ(kFlushIndicator,
- jitter_buffer_->InsertPacket(*packet_, &retransmitted));
-
- VCMEncodedFrame* frame_out = DecodeCompleteFrame();
- EXPECT_EQ(first_key_frame_timestamp, frame_out->TimeStamp());
- CheckOutFrame(frame_out, size_, false);
- EXPECT_EQ(kVideoFrameKey, frame_out->FrameType());
- jitter_buffer_->ReleaseFrame(frame_out);
-}
-
-TEST_F(TestBasicJitterBuffer, EmptyLastFrame) {
- jitter_buffer_->SetDecodeErrorMode(kWithErrors);
- seq_num_ = 3;
- // Insert one empty packet per frame, should never return the last timestamp
- // inserted. Only return empty frames in the presence of subsequent frames.
- int maxSize = 1000;
- bool retransmitted = false;
- for (int i = 0; i < maxSize + 10; i++) {
- timestamp_ += 33 * 90;
- seq_num_++;
- packet_->isFirstPacket = false;
- packet_->markerBit = false;
- packet_->seqNum = seq_num_;
- packet_->timestamp = timestamp_;
- packet_->frameType = kEmptyFrame;
-
- EXPECT_EQ(kNoError, jitter_buffer_->InsertPacket(*packet_,
- &retransmitted));
- VCMEncodedFrame* testFrame = DecodeIncompleteFrame();
- // Timestamp should never be the last TS inserted.
- if (testFrame != NULL) {
- EXPECT_TRUE(testFrame->TimeStamp() < timestamp_);
- jitter_buffer_->ReleaseFrame(testFrame);
- }
- }
-}
-
-TEST_F(TestBasicJitterBuffer, H264IncompleteNalu) {
- jitter_buffer_->SetNackMode(kNoNack, -1, -1);
- jitter_buffer_->SetDecodeErrorMode(kWithErrors);
- ++seq_num_;
- timestamp_ += 33 * 90;
- int insertedLength = 0;
- packet_->seqNum = seq_num_;
- packet_->timestamp = timestamp_;
- packet_->frameType = kVideoFrameKey;
- packet_->isFirstPacket = true;
- packet_->completeNALU = kNaluStart;
- packet_->markerBit = false;
- bool retransmitted = false;
-
- EXPECT_EQ(kDecodableSession, jitter_buffer_->InsertPacket(*packet_,
- &retransmitted));
-
- seq_num_ += 2; // Skip one packet.
- packet_->seqNum = seq_num_;
- packet_->frameType = kVideoFrameKey;
- packet_->isFirstPacket = false;
- packet_->completeNALU = kNaluIncomplete;
- packet_->markerBit = false;
-
- EXPECT_EQ(kDecodableSession, jitter_buffer_->InsertPacket(*packet_,
- &retransmitted));
-
- seq_num_++;
- packet_->seqNum = seq_num_;
- packet_->frameType = kVideoFrameKey;
- packet_->isFirstPacket = false;
- packet_->completeNALU = kNaluEnd;
- packet_->markerBit = false;
-
- EXPECT_EQ(kDecodableSession, jitter_buffer_->InsertPacket(*packet_,
- &retransmitted));
-
- seq_num_++;
- packet_->seqNum = seq_num_;
- packet_->completeNALU = kNaluComplete;
- packet_->markerBit = true; // Last packet.
- EXPECT_EQ(kDecodableSession, jitter_buffer_->InsertPacket(*packet_,
- &retransmitted));
- // The JB will only output (incomplete) frames if a packet belonging to a
- // subsequent frame was already inserted. Insert one packet of a subsequent
- // frame. place high timestamp so the JB would always have a next frame
- // (otherwise, for every inserted frame we need to take care of the next
- // frame as well).
- packet_->seqNum = 1;
- packet_->timestamp = timestamp_ + 33 * 90 * 10;
- packet_->frameType = kVideoFrameDelta;
- packet_->isFirstPacket = false;
- packet_->completeNALU = kNaluStart;
- packet_->markerBit = false;
-
- EXPECT_EQ(kDecodableSession, jitter_buffer_->InsertPacket(*packet_,
- &retransmitted));
-
- VCMEncodedFrame* frame_out = DecodeIncompleteFrame();
-
- // We can decode everything from a NALU until a packet has been lost.
- // Thus we can decode the first packet of the first NALU and the second NALU
- // which consists of one packet.
- CheckOutFrame(frame_out, packet_->sizeBytes * 2, false);
- jitter_buffer_->ReleaseFrame(frame_out);
-
- // Test reordered start frame + 1 lost.
- seq_num_ += 2; // Re-order 1 frame.
- timestamp_ += 33*90;
- insertedLength = 0;
-
- packet_->seqNum = seq_num_;
- packet_->timestamp = timestamp_;
- packet_->frameType = kVideoFrameKey;
- packet_->isFirstPacket = false;
- packet_->completeNALU = kNaluEnd;
- packet_->markerBit = false;
- EXPECT_EQ(kDecodableSession, jitter_buffer_->InsertPacket(*packet_,
- &retransmitted));
- insertedLength += packet_->sizeBytes; // This packet should be decoded.
- seq_num_--;
- packet_->seqNum = seq_num_;
- packet_->timestamp = timestamp_;
- packet_->frameType = kVideoFrameKey;
- packet_->isFirstPacket = true;
- packet_->completeNALU = kNaluStart;
- packet_->markerBit = false;
-
- EXPECT_EQ(kDecodableSession, jitter_buffer_->InsertPacket(*packet_,
- &retransmitted));
- insertedLength += packet_->sizeBytes; // This packet should be decoded.
-
- seq_num_ += 3; // One packet drop.
- packet_->seqNum = seq_num_;
- packet_->timestamp = timestamp_;
- packet_->frameType = kVideoFrameKey;
- packet_->isFirstPacket = false;
- packet_->completeNALU = kNaluComplete;
- packet_->markerBit = false;
- EXPECT_EQ(kDecodableSession, jitter_buffer_->InsertPacket(*packet_,
- &retransmitted));
- insertedLength += packet_->sizeBytes; // This packet should be decoded.
- seq_num_++;
- packet_->seqNum = seq_num_;
- packet_->timestamp = timestamp_;
- packet_->frameType = kVideoFrameKey;
- packet_->isFirstPacket = false;
- packet_->completeNALU = kNaluStart;
- packet_->markerBit = false;
- EXPECT_EQ(kDecodableSession, jitter_buffer_->InsertPacket(*packet_,
- &retransmitted));
- // This packet should be decoded since it's the beginning of a NAL.
- insertedLength += packet_->sizeBytes;
-
- seq_num_ += 2;
- packet_->seqNum = seq_num_;
- packet_->timestamp = timestamp_;
- packet_->frameType = kVideoFrameKey;
- packet_->isFirstPacket = false;
- packet_->completeNALU = kNaluEnd;
- packet_->markerBit = true;
- EXPECT_EQ(kDecodableSession, jitter_buffer_->InsertPacket(*packet_,
- &retransmitted));
- // This packet should not be decoded because it is an incomplete NAL if it
- // is the last.
- frame_out = DecodeIncompleteFrame();
- // Only last NALU is complete.
- CheckOutFrame(frame_out, insertedLength, false);
- jitter_buffer_->ReleaseFrame(frame_out);
-
- // Test to insert empty packet.
- seq_num_++;
- timestamp_ += 33 * 90;
- VCMPacket emptypacket(data_, 0, seq_num_, timestamp_, true);
- emptypacket.seqNum = seq_num_;
- emptypacket.timestamp = timestamp_;
- emptypacket.frameType = kVideoFrameKey;
- emptypacket.isFirstPacket = true;
- emptypacket.completeNALU = kNaluComplete;
- emptypacket.markerBit = true;
- EXPECT_EQ(kCompleteSession, jitter_buffer_->InsertPacket(emptypacket,
- &retransmitted));
- // This packet should not be decoded because it is an incomplete NAL if it
- // is the last.
-
- // Will be sent to the decoder, as a packet belonging to a subsequent frame
- // has arrived.
- frame_out = DecodeIncompleteFrame();
- EXPECT_TRUE(frame_out != NULL);
- jitter_buffer_->ReleaseFrame(frame_out);
-
- // Test that a frame can include an empty packet.
- seq_num_++;
- timestamp_ += 33 * 90;
-
- packet_->seqNum = seq_num_;
- packet_->timestamp = timestamp_;
- packet_->frameType = kVideoFrameKey;
- packet_->isFirstPacket = true;
- packet_->completeNALU = kNaluComplete;
- packet_->markerBit = false;
-
- EXPECT_EQ(kDecodableSession, jitter_buffer_->InsertPacket(*packet_,
- &retransmitted));
-
- seq_num_++;
- emptypacket.seqNum = seq_num_;
- emptypacket.timestamp = timestamp_;
- emptypacket.frameType = kVideoFrameKey;
- emptypacket.isFirstPacket = true;
- emptypacket.completeNALU = kNaluComplete;
- emptypacket.markerBit = true;
- EXPECT_EQ(kCompleteSession, jitter_buffer_->InsertPacket(emptypacket,
- &retransmitted));
-
- frame_out = DecodeCompleteFrame();
- // Only last NALU is complete
- CheckOutFrame(frame_out, packet_->sizeBytes, false);
- jitter_buffer_->ReleaseFrame(frame_out);
-}
-
-TEST_F(TestBasicJitterBuffer, NextFrameWhenIncomplete) {
- // Test that a we cannot get incomplete frames from the JB if we haven't
- // received the marker bit, unless we have received a packet from a later
- // timestamp.
- jitter_buffer_->SetDecodeErrorMode(kWithErrors);
- // Start with a complete key frame - insert and decode.
- packet_->frameType = kVideoFrameKey;
- packet_->isFirstPacket = true;
- packet_->markerBit = true;
- bool retransmitted = false;
-
- EXPECT_EQ(kCompleteSession, jitter_buffer_->InsertPacket(*packet_,
- &retransmitted));
- VCMEncodedFrame* frame_out = DecodeCompleteFrame();
- EXPECT_TRUE(frame_out != NULL);
- jitter_buffer_->ReleaseFrame(frame_out);
-
- packet_->seqNum += 2;
- packet_->timestamp += 33 * 90;
- packet_->frameType = kVideoFrameDelta;
- packet_->isFirstPacket = false;
- packet_->markerBit = false;
-
-
- EXPECT_EQ(kDecodableSession, jitter_buffer_->InsertPacket(*packet_,
- &retransmitted));
-
- frame_out = DecodeIncompleteFrame();
- EXPECT_TRUE(frame_out == NULL);
-
- packet_->seqNum += 2;
- packet_->timestamp += 33 * 90;
- packet_->isFirstPacket = true;
-
- EXPECT_EQ(kDecodableSession, jitter_buffer_->InsertPacket(*packet_,
- &retransmitted));
-
- frame_out = DecodeIncompleteFrame();
- CheckOutFrame(frame_out, packet_->sizeBytes, false);
- jitter_buffer_->ReleaseFrame(frame_out);
-}
-
-TEST_F(TestRunningJitterBuffer, Full) {
- // Insert a key frame and decode it.
- EXPECT_GE(InsertFrame(kVideoFrameKey), kNoError);
- EXPECT_TRUE(DecodeCompleteFrame());
- DropFrame(1);
- // Fill the jitter buffer.
- EXPECT_GE(InsertFrames(kMaxNumberOfFrames, kVideoFrameDelta), kNoError);
- // Make sure we can't decode these frames.
- EXPECT_FALSE(DecodeCompleteFrame());
- // This frame will make the jitter buffer recycle frames until a key frame.
- // Since none is found it will have to wait until the next key frame before
- // decoding.
- EXPECT_EQ(kFlushIndicator, InsertFrame(kVideoFrameDelta));
- EXPECT_FALSE(DecodeCompleteFrame());
-}
-
-TEST_F(TestRunningJitterBuffer, EmptyPackets) {
- // Make sure a frame can get complete even though empty packets are missing.
- stream_generator_->GenerateFrame(kVideoFrameKey, 3, 3,
- clock_->TimeInMilliseconds());
- bool request_key_frame = false;
- // Insert empty packet.
- EXPECT_EQ(kNoError, InsertPacketAndPop(4));
- EXPECT_FALSE(request_key_frame);
- // Insert 3 media packets.
- EXPECT_EQ(kIncomplete, InsertPacketAndPop(0));
- EXPECT_FALSE(request_key_frame);
- EXPECT_EQ(kIncomplete, InsertPacketAndPop(0));
- EXPECT_FALSE(request_key_frame);
- EXPECT_EQ(kCompleteSession, InsertPacketAndPop(0));
- EXPECT_FALSE(request_key_frame);
- // Insert empty packet.
- EXPECT_EQ(kCompleteSession, InsertPacketAndPop(0));
- EXPECT_FALSE(request_key_frame);
-}
-
-TEST_F(TestRunningJitterBuffer, StatisticsTest) {
- FrameCounts frame_stats(jitter_buffer_->FrameStatistics());
- EXPECT_EQ(0, frame_stats.delta_frames);
- EXPECT_EQ(0, frame_stats.key_frames);
-
- uint32_t framerate = 0;
- uint32_t bitrate = 0;
- jitter_buffer_->IncomingRateStatistics(&framerate, &bitrate);
- EXPECT_EQ(0u, framerate);
- EXPECT_EQ(0u, bitrate);
-
- // Insert a couple of key and delta frames.
- InsertFrame(kVideoFrameKey);
- InsertFrame(kVideoFrameDelta);
- InsertFrame(kVideoFrameDelta);
- InsertFrame(kVideoFrameKey);
- InsertFrame(kVideoFrameDelta);
- // Decode some of them to make sure the statistics doesn't depend on frames
- // being decoded.
- EXPECT_TRUE(DecodeCompleteFrame());
- EXPECT_TRUE(DecodeCompleteFrame());
- frame_stats = jitter_buffer_->FrameStatistics();
- EXPECT_EQ(3, frame_stats.delta_frames);
- EXPECT_EQ(2, frame_stats.key_frames);
-
- // Insert 20 more frames to get estimates of bitrate and framerate over
- // 1 second.
- for (int i = 0; i < 20; ++i) {
- InsertFrame(kVideoFrameDelta);
- }
- jitter_buffer_->IncomingRateStatistics(&framerate, &bitrate);
- // TODO(holmer): The current implementation returns the average of the last
- // two framerate calculations, which is why it takes two calls to reach the
- // actual framerate. This should be fixed.
- EXPECT_EQ(kDefaultFrameRate / 2u, framerate);
- EXPECT_EQ(kDefaultBitrateKbps, bitrate);
- // Insert 25 more frames to get estimates of bitrate and framerate over
- // 2 seconds.
- for (int i = 0; i < 25; ++i) {
- InsertFrame(kVideoFrameDelta);
- }
- jitter_buffer_->IncomingRateStatistics(&framerate, &bitrate);
- EXPECT_EQ(kDefaultFrameRate, framerate);
- EXPECT_EQ(kDefaultBitrateKbps, bitrate);
-}
-
-TEST_F(TestRunningJitterBuffer, SkipToKeyFrame) {
- // Insert delta frames.
- EXPECT_GE(InsertFrames(5, kVideoFrameDelta), kNoError);
- // Can't decode without a key frame.
- EXPECT_FALSE(DecodeCompleteFrame());
- InsertFrame(kVideoFrameKey);
- // Skip to the next key frame.
- EXPECT_TRUE(DecodeCompleteFrame());
-}
-
-TEST_F(TestRunningJitterBuffer, DontSkipToKeyFrameIfDecodable) {
- InsertFrame(kVideoFrameKey);
- EXPECT_TRUE(DecodeCompleteFrame());
- const int kNumDeltaFrames = 5;
- EXPECT_GE(InsertFrames(kNumDeltaFrames, kVideoFrameDelta), kNoError);
- InsertFrame(kVideoFrameKey);
- for (int i = 0; i < kNumDeltaFrames + 1; ++i) {
- EXPECT_TRUE(DecodeCompleteFrame());
- }
-}
-
-TEST_F(TestRunningJitterBuffer, KeyDeltaKeyDelta) {
- InsertFrame(kVideoFrameKey);
- EXPECT_TRUE(DecodeCompleteFrame());
- const int kNumDeltaFrames = 5;
- EXPECT_GE(InsertFrames(kNumDeltaFrames, kVideoFrameDelta), kNoError);
- InsertFrame(kVideoFrameKey);
- EXPECT_GE(InsertFrames(kNumDeltaFrames, kVideoFrameDelta), kNoError);
- InsertFrame(kVideoFrameKey);
- for (int i = 0; i < 2 * (kNumDeltaFrames + 1); ++i) {
- EXPECT_TRUE(DecodeCompleteFrame());
- }
-}
-
-TEST_F(TestRunningJitterBuffer, TwoPacketsNonContinuous) {
- InsertFrame(kVideoFrameKey);
- EXPECT_TRUE(DecodeCompleteFrame());
- stream_generator_->GenerateFrame(kVideoFrameDelta, 1, 0,
- clock_->TimeInMilliseconds());
- clock_->AdvanceTimeMilliseconds(kDefaultFramePeriodMs);
- stream_generator_->GenerateFrame(kVideoFrameDelta, 2, 0,
- clock_->TimeInMilliseconds());
- EXPECT_EQ(kIncomplete, InsertPacketAndPop(1));
- EXPECT_EQ(kCompleteSession, InsertPacketAndPop(1));
- EXPECT_FALSE(DecodeCompleteFrame());
- EXPECT_EQ(kCompleteSession, InsertPacketAndPop(0));
- EXPECT_TRUE(DecodeCompleteFrame());
- EXPECT_TRUE(DecodeCompleteFrame());
-}
-
-TEST_F(TestJitterBufferNack, EmptyPackets) {
- // Make sure empty packets doesn't clog the jitter buffer.
- jitter_buffer_->SetNackMode(kNack, media_optimization::kLowRttNackMs, -1);
- EXPECT_GE(InsertFrames(kMaxNumberOfFrames, kEmptyFrame), kNoError);
- InsertFrame(kVideoFrameKey);
- EXPECT_TRUE(DecodeCompleteFrame());
-}
-
-TEST_F(TestJitterBufferNack, NackTooOldPackets) {
- // Insert a key frame and decode it.
- EXPECT_GE(InsertFrame(kVideoFrameKey), kNoError);
- EXPECT_TRUE(DecodeCompleteFrame());
-
- // Drop one frame and insert |kNackHistoryLength| to trigger NACKing a too
- // old packet.
- DropFrame(1);
- // Insert a frame which should trigger a recycle until the next key frame.
- EXPECT_EQ(kFlushIndicator, InsertFrames(oldest_packet_to_nack_ + 1,
- kVideoFrameDelta));
- EXPECT_FALSE(DecodeCompleteFrame());
-
- bool request_key_frame = false;
- std::vector<uint16_t> nack_list =
- jitter_buffer_->GetNackList(&request_key_frame);
- // No key frame will be requested since the jitter buffer is empty.
- EXPECT_FALSE(request_key_frame);
- EXPECT_EQ(0u, nack_list.size());
-
- EXPECT_GE(InsertFrame(kVideoFrameDelta), kNoError);
- // Waiting for a key frame.
- EXPECT_FALSE(DecodeCompleteFrame());
- EXPECT_FALSE(DecodeIncompleteFrame());
-
- // The next complete continuous frame isn't a key frame, but we're waiting
- // for one.
- EXPECT_FALSE(DecodeCompleteFrame());
- EXPECT_GE(InsertFrame(kVideoFrameKey), kNoError);
- // Skipping ahead to the key frame.
- EXPECT_TRUE(DecodeCompleteFrame());
-}
-
-TEST_F(TestJitterBufferNack, NackLargeJitterBuffer) {
- // Insert a key frame and decode it.
- EXPECT_GE(InsertFrame(kVideoFrameKey), kNoError);
- EXPECT_TRUE(DecodeCompleteFrame());
-
- // Insert a frame which should trigger a recycle until the next key frame.
- EXPECT_GE(InsertFrames(oldest_packet_to_nack_, kVideoFrameDelta), kNoError);
-
- bool request_key_frame = false;
- std::vector<uint16_t> nack_list =
- jitter_buffer_->GetNackList(&request_key_frame);
- // Verify that the jitter buffer does not request a key frame.
- EXPECT_FALSE(request_key_frame);
- // Verify that no packets are NACKed.
- EXPECT_EQ(0u, nack_list.size());
- // Verify that we can decode the next frame.
- EXPECT_TRUE(DecodeCompleteFrame());
-}
-
-TEST_F(TestJitterBufferNack, NackListFull) {
- // Insert a key frame and decode it.
- EXPECT_GE(InsertFrame(kVideoFrameKey), kNoError);
- EXPECT_TRUE(DecodeCompleteFrame());
-
- // Generate and drop |kNackHistoryLength| packets to fill the NACK list.
- DropFrame(max_nack_list_size_ + 1);
- // Insert a frame which should trigger a recycle until the next key frame.
- EXPECT_EQ(kFlushIndicator, InsertFrame(kVideoFrameDelta));
- EXPECT_FALSE(DecodeCompleteFrame());
-
- bool request_key_frame = false;
- jitter_buffer_->GetNackList(&request_key_frame);
- // The jitter buffer is empty, so we won't request key frames until we get a
- // packet.
- EXPECT_FALSE(request_key_frame);
-
- EXPECT_GE(InsertFrame(kVideoFrameDelta), kNoError);
- // Now we have a packet in the jitter buffer, a key frame will be requested
- // since it's not a key frame.
- jitter_buffer_->GetNackList(&request_key_frame);
- // The jitter buffer is empty, so we won't request key frames until we get a
- // packet.
- EXPECT_TRUE(request_key_frame);
- // The next complete continuous frame isn't a key frame, but we're waiting
- // for one.
- EXPECT_FALSE(DecodeCompleteFrame());
- EXPECT_FALSE(DecodeIncompleteFrame());
- EXPECT_GE(InsertFrame(kVideoFrameKey), kNoError);
- // Skipping ahead to the key frame.
- EXPECT_TRUE(DecodeCompleteFrame());
-}
-
-TEST_F(TestJitterBufferNack, NoNackListReturnedBeforeFirstDecode) {
- DropFrame(10);
- // Insert a frame and try to generate a NACK list. Shouldn't get one.
- EXPECT_GE(InsertFrame(kVideoFrameDelta), kNoError);
- bool request_key_frame = false;
- std::vector<uint16_t> nack_list =
- jitter_buffer_->GetNackList(&request_key_frame);
- // No list generated, and a key frame request is signaled.
- EXPECT_EQ(0u, nack_list.size());
- EXPECT_TRUE(request_key_frame);
-}
-
-TEST_F(TestJitterBufferNack, NackListBuiltBeforeFirstDecode) {
- stream_generator_->Init(0, clock_->TimeInMilliseconds());
- InsertFrame(kVideoFrameKey);
- stream_generator_->GenerateFrame(kVideoFrameDelta, 2, 0,
- clock_->TimeInMilliseconds());
- stream_generator_->NextPacket(NULL); // Drop packet.
- EXPECT_EQ(kIncomplete, InsertPacketAndPop(0));
- EXPECT_TRUE(DecodeCompleteFrame());
- bool extended = false;
- std::vector<uint16_t> nack_list = jitter_buffer_->GetNackList(&extended);
- EXPECT_EQ(1u, nack_list.size());
-}
-
-TEST_F(TestJitterBufferNack, VerifyRetransmittedFlag) {
- stream_generator_->Init(0, clock_->TimeInMilliseconds());
- stream_generator_->GenerateFrame(kVideoFrameKey, 3, 0,
- clock_->TimeInMilliseconds());
- VCMPacket packet;
- stream_generator_->PopPacket(&packet, 0);
- bool retransmitted = false;
- EXPECT_EQ(kIncomplete, jitter_buffer_->InsertPacket(packet, &retransmitted));
- EXPECT_FALSE(retransmitted);
- // Drop second packet.
- stream_generator_->PopPacket(&packet, 1);
- EXPECT_EQ(kIncomplete, jitter_buffer_->InsertPacket(packet, &retransmitted));
- EXPECT_FALSE(retransmitted);
- EXPECT_FALSE(DecodeCompleteFrame());
- bool extended = false;
- std::vector<uint16_t> nack_list = jitter_buffer_->GetNackList(&extended);
- EXPECT_EQ(1u, nack_list.size());
- stream_generator_->PopPacket(&packet, 0);
- EXPECT_EQ(packet.seqNum, nack_list[0]);
- EXPECT_EQ(kCompleteSession, jitter_buffer_->InsertPacket(packet,
- &retransmitted));
- EXPECT_TRUE(retransmitted);
- EXPECT_TRUE(DecodeCompleteFrame());
-}
-
-TEST_F(TestJitterBufferNack, UseNackToRecoverFirstKeyFrame) {
- stream_generator_->Init(0, clock_->TimeInMilliseconds());
- stream_generator_->GenerateFrame(kVideoFrameKey, 3, 0,
- clock_->TimeInMilliseconds());
- EXPECT_EQ(kIncomplete, InsertPacketAndPop(0));
- // Drop second packet.
- EXPECT_EQ(kIncomplete, InsertPacketAndPop(1));
- EXPECT_FALSE(DecodeCompleteFrame());
- bool extended = false;
- std::vector<uint16_t> nack_list = jitter_buffer_->GetNackList(&extended);
- EXPECT_EQ(1u, nack_list.size());
- VCMPacket packet;
- stream_generator_->GetPacket(&packet, 0);
- EXPECT_EQ(packet.seqNum, nack_list[0]);
-}
-
-TEST_F(TestJitterBufferNack, UseNackToRecoverFirstKeyFrameSecondInQueue) {
- VCMPacket packet;
- stream_generator_->Init(0, clock_->TimeInMilliseconds());
- // First frame is delta.
- stream_generator_->GenerateFrame(kVideoFrameDelta, 3, 0,
- clock_->TimeInMilliseconds());
- EXPECT_EQ(kIncomplete, InsertPacketAndPop(0));
- // Drop second packet in frame.
- ASSERT_TRUE(stream_generator_->PopPacket(&packet, 0));
- EXPECT_EQ(kIncomplete, InsertPacketAndPop(0));
- // Second frame is key.
- stream_generator_->GenerateFrame(kVideoFrameKey, 3, 0,
- clock_->TimeInMilliseconds() + 10);
- EXPECT_EQ(kIncomplete, InsertPacketAndPop(0));
- // Drop second packet in frame.
- EXPECT_EQ(kIncomplete, InsertPacketAndPop(1));
- EXPECT_FALSE(DecodeCompleteFrame());
- bool extended = false;
- std::vector<uint16_t> nack_list = jitter_buffer_->GetNackList(&extended);
- EXPECT_EQ(1u, nack_list.size());
- stream_generator_->GetPacket(&packet, 0);
- EXPECT_EQ(packet.seqNum, nack_list[0]);
-}
-
-TEST_F(TestJitterBufferNack, NormalOperation) {
- EXPECT_EQ(kNack, jitter_buffer_->nack_mode());
- jitter_buffer_->SetDecodeErrorMode(kWithErrors);
-
- EXPECT_GE(InsertFrame(kVideoFrameKey), kNoError);
- EXPECT_TRUE(DecodeIncompleteFrame());
-
- // ----------------------------------------------------------------
- // | 1 | 2 | .. | 8 | 9 | x | 11 | 12 | .. | 19 | x | 21 | .. | 100 |
- // ----------------------------------------------------------------
- stream_generator_->GenerateFrame(kVideoFrameKey, 100, 0,
- clock_->TimeInMilliseconds());
- clock_->AdvanceTimeMilliseconds(kDefaultFramePeriodMs);
- EXPECT_EQ(kDecodableSession, InsertPacketAndPop(0));
- // Verify that the frame is incomplete.
- EXPECT_FALSE(DecodeCompleteFrame());
- while (stream_generator_->PacketsRemaining() > 1) {
- if (stream_generator_->NextSequenceNumber() % 10 != 0) {
- EXPECT_EQ(kDecodableSession, InsertPacketAndPop(0));
- } else {
- stream_generator_->NextPacket(NULL); // Drop packet
- }
- }
- EXPECT_EQ(kDecodableSession, InsertPacketAndPop(0));
- EXPECT_EQ(0, stream_generator_->PacketsRemaining());
- EXPECT_FALSE(DecodeCompleteFrame());
- EXPECT_FALSE(DecodeIncompleteFrame());
- bool request_key_frame = false;
- std::vector<uint16_t> nack_list =
- jitter_buffer_->GetNackList(&request_key_frame);
- // Verify the NACK list.
- const size_t kExpectedNackSize = 9;
- ASSERT_EQ(kExpectedNackSize, nack_list.size());
- for (size_t i = 0; i < nack_list.size(); ++i)
- EXPECT_EQ((1 + i) * 10, nack_list[i]);
-}
-
-TEST_F(TestJitterBufferNack, NormalOperationWrap) {
- bool request_key_frame = false;
- // ------- ------------------------------------------------------------
- // | 65532 | | 65533 | 65534 | 65535 | x | 1 | .. | 9 | x | 11 |.....| 96 |
- // ------- ------------------------------------------------------------
- stream_generator_->Init(65532, clock_->TimeInMilliseconds());
- InsertFrame(kVideoFrameKey);
- EXPECT_FALSE(request_key_frame);
- EXPECT_TRUE(DecodeCompleteFrame());
- stream_generator_->GenerateFrame(kVideoFrameDelta, 100, 0,
- clock_->TimeInMilliseconds());
- EXPECT_EQ(kIncomplete, InsertPacketAndPop(0));
- while (stream_generator_->PacketsRemaining() > 1) {
- if (stream_generator_->NextSequenceNumber() % 10 != 0) {
- EXPECT_EQ(kIncomplete, InsertPacketAndPop(0));
- EXPECT_FALSE(request_key_frame);
- } else {
- stream_generator_->NextPacket(NULL); // Drop packet
- }
- }
- EXPECT_EQ(kIncomplete, InsertPacketAndPop(0));
- EXPECT_FALSE(request_key_frame);
- EXPECT_EQ(0, stream_generator_->PacketsRemaining());
- EXPECT_FALSE(DecodeCompleteFrame());
- EXPECT_FALSE(DecodeCompleteFrame());
- bool extended = false;
- std::vector<uint16_t> nack_list = jitter_buffer_->GetNackList(&extended);
- // Verify the NACK list.
- const size_t kExpectedNackSize = 10;
- ASSERT_EQ(kExpectedNackSize, nack_list.size());
- for (size_t i = 0; i < nack_list.size(); ++i)
- EXPECT_EQ(i * 10, nack_list[i]);
-}
-
-TEST_F(TestJitterBufferNack, NormalOperationWrap2) {
- bool request_key_frame = false;
- // -----------------------------------
- // | 65532 | 65533 | 65534 | x | 0 | 1 |
- // -----------------------------------
- stream_generator_->Init(65532, clock_->TimeInMilliseconds());
- InsertFrame(kVideoFrameKey);
- EXPECT_FALSE(request_key_frame);
- EXPECT_TRUE(DecodeCompleteFrame());
- stream_generator_->GenerateFrame(kVideoFrameDelta, 1, 0,
- clock_->TimeInMilliseconds());
- clock_->AdvanceTimeMilliseconds(kDefaultFramePeriodMs);
- for (int i = 0; i < 5; ++i) {
- if (stream_generator_->NextSequenceNumber() != 65535) {
- EXPECT_EQ(kCompleteSession, InsertPacketAndPop(0));
- EXPECT_FALSE(request_key_frame);
- } else {
- stream_generator_->NextPacket(NULL); // Drop packet
- }
- stream_generator_->GenerateFrame(kVideoFrameDelta, 1, 0,
- clock_->TimeInMilliseconds());
- clock_->AdvanceTimeMilliseconds(kDefaultFramePeriodMs);
- }
- EXPECT_EQ(kCompleteSession, InsertPacketAndPop(0));
- EXPECT_FALSE(request_key_frame);
- bool extended = false;
- std::vector<uint16_t> nack_list = jitter_buffer_->GetNackList(&extended);
- // Verify the NACK list.
- ASSERT_EQ(1u, nack_list.size());
- EXPECT_EQ(65535, nack_list[0]);
-}
-
-TEST_F(TestJitterBufferNack, ResetByFutureKeyFrameDoesntError) {
- stream_generator_->Init(0, clock_->TimeInMilliseconds());
- InsertFrame(kVideoFrameKey);
- EXPECT_TRUE(DecodeCompleteFrame());
- bool extended = false;
- std::vector<uint16_t> nack_list = jitter_buffer_->GetNackList(&extended);
- EXPECT_EQ(0u, nack_list.size());
-
- // Far-into-the-future video frame, could be caused by resetting the encoder
- // or otherwise restarting. This should not fail when error when the packet is
- // a keyframe, even if all of the nack list needs to be flushed.
- stream_generator_->Init(10000, clock_->TimeInMilliseconds());
- clock_->AdvanceTimeMilliseconds(kDefaultFramePeriodMs);
- InsertFrame(kVideoFrameKey);
- EXPECT_TRUE(DecodeCompleteFrame());
- nack_list = jitter_buffer_->GetNackList(&extended);
- EXPECT_EQ(0u, nack_list.size());
-
- // Stream should be decodable from this point.
- clock_->AdvanceTimeMilliseconds(kDefaultFramePeriodMs);
- InsertFrame(kVideoFrameDelta);
- EXPECT_TRUE(DecodeCompleteFrame());
- nack_list = jitter_buffer_->GetNackList(&extended);
- EXPECT_EQ(0u, nack_list.size());
-}
-
-} // namespace webrtc
diff --git a/webrtc/modules/video_coding/main/source/jitter_estimator.cc b/webrtc/modules/video_coding/main/source/jitter_estimator.cc
deleted file mode 100644
index 5894c88d72..0000000000
--- a/webrtc/modules/video_coding/main/source/jitter_estimator.cc
+++ /dev/null
@@ -1,482 +0,0 @@
-/*
- * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include "webrtc/modules/video_coding/main/source/internal_defines.h"
-#include "webrtc/modules/video_coding/main/source/jitter_estimator.h"
-#include "webrtc/modules/video_coding/main/source/rtt_filter.h"
-#include "webrtc/system_wrappers/include/clock.h"
-#include "webrtc/system_wrappers/include/field_trial.h"
-
-#include <assert.h>
-#include <math.h>
-#include <stdlib.h>
-#include <string.h>
-
-namespace webrtc {
-
-enum { kStartupDelaySamples = 30 };
-enum { kFsAccuStartupSamples = 5 };
-enum { kMaxFramerateEstimate = 200 };
-
-VCMJitterEstimator::VCMJitterEstimator(const Clock* clock,
- int32_t vcmId,
- int32_t receiverId)
- : _vcmId(vcmId),
- _receiverId(receiverId),
- _phi(0.97),
- _psi(0.9999),
- _alphaCountMax(400),
- _thetaLow(0.000001),
- _nackLimit(3),
- _numStdDevDelayOutlier(15),
- _numStdDevFrameSizeOutlier(3),
- _noiseStdDevs(2.33), // ~Less than 1% chance
- // (look up in normal distribution table)...
- _noiseStdDevOffset(30.0), // ...of getting 30 ms freezes
- _rttFilter(),
- fps_counter_(30), // TODO(sprang): Use an estimator with limit based on
- // time, rather than number of samples.
- low_rate_experiment_(kInit),
- clock_(clock) {
- Reset();
-}
-
-VCMJitterEstimator::~VCMJitterEstimator() {
-}
-
-VCMJitterEstimator&
-VCMJitterEstimator::operator=(const VCMJitterEstimator& rhs)
-{
- if (this != &rhs)
- {
- memcpy(_thetaCov, rhs._thetaCov, sizeof(_thetaCov));
- memcpy(_Qcov, rhs._Qcov, sizeof(_Qcov));
-
- _vcmId = rhs._vcmId;
- _receiverId = rhs._receiverId;
- _avgFrameSize = rhs._avgFrameSize;
- _varFrameSize = rhs._varFrameSize;
- _maxFrameSize = rhs._maxFrameSize;
- _fsSum = rhs._fsSum;
- _fsCount = rhs._fsCount;
- _lastUpdateT = rhs._lastUpdateT;
- _prevEstimate = rhs._prevEstimate;
- _prevFrameSize = rhs._prevFrameSize;
- _avgNoise = rhs._avgNoise;
- _alphaCount = rhs._alphaCount;
- _filterJitterEstimate = rhs._filterJitterEstimate;
- _startupCount = rhs._startupCount;
- _latestNackTimestamp = rhs._latestNackTimestamp;
- _nackCount = rhs._nackCount;
- _rttFilter = rhs._rttFilter;
- }
- return *this;
-}
-
-// Resets the JitterEstimate
-void
-VCMJitterEstimator::Reset()
-{
- _theta[0] = 1/(512e3/8);
- _theta[1] = 0;
- _varNoise = 4.0;
-
- _thetaCov[0][0] = 1e-4;
- _thetaCov[1][1] = 1e2;
- _thetaCov[0][1] = _thetaCov[1][0] = 0;
- _Qcov[0][0] = 2.5e-10;
- _Qcov[1][1] = 1e-10;
- _Qcov[0][1] = _Qcov[1][0] = 0;
- _avgFrameSize = 500;
- _maxFrameSize = 500;
- _varFrameSize = 100;
- _lastUpdateT = -1;
- _prevEstimate = -1.0;
- _prevFrameSize = 0;
- _avgNoise = 0.0;
- _alphaCount = 1;
- _filterJitterEstimate = 0.0;
- _latestNackTimestamp = 0;
- _nackCount = 0;
- _fsSum = 0;
- _fsCount = 0;
- _startupCount = 0;
- _rttFilter.Reset();
- fps_counter_.Reset();
-}
-
-void
-VCMJitterEstimator::ResetNackCount()
-{
- _nackCount = 0;
-}
-
-// Updates the estimates with the new measurements
-void
-VCMJitterEstimator::UpdateEstimate(int64_t frameDelayMS, uint32_t frameSizeBytes,
- bool incompleteFrame /* = false */)
-{
- if (frameSizeBytes == 0)
- {
- return;
- }
- int deltaFS = frameSizeBytes - _prevFrameSize;
- if (_fsCount < kFsAccuStartupSamples)
- {
- _fsSum += frameSizeBytes;
- _fsCount++;
- }
- else if (_fsCount == kFsAccuStartupSamples)
- {
- // Give the frame size filter
- _avgFrameSize = static_cast<double>(_fsSum) /
- static_cast<double>(_fsCount);
- _fsCount++;
- }
- if (!incompleteFrame || frameSizeBytes > _avgFrameSize)
- {
- double avgFrameSize = _phi * _avgFrameSize +
- (1 - _phi) * frameSizeBytes;
- if (frameSizeBytes < _avgFrameSize + 2 * sqrt(_varFrameSize))
- {
- // Only update the average frame size if this sample wasn't a
- // key frame
- _avgFrameSize = avgFrameSize;
- }
- // Update the variance anyway since we want to capture cases where we only get
- // key frames.
- _varFrameSize = VCM_MAX(_phi * _varFrameSize + (1 - _phi) *
- (frameSizeBytes - avgFrameSize) *
- (frameSizeBytes - avgFrameSize), 1.0);
- }
-
- // Update max frameSize estimate
- _maxFrameSize = VCM_MAX(_psi * _maxFrameSize, static_cast<double>(frameSizeBytes));
-
- if (_prevFrameSize == 0)
- {
- _prevFrameSize = frameSizeBytes;
- return;
- }
- _prevFrameSize = frameSizeBytes;
-
- // Only update the Kalman filter if the sample is not considered
- // an extreme outlier. Even if it is an extreme outlier from a
- // delay point of view, if the frame size also is large the
- // deviation is probably due to an incorrect line slope.
- double deviation = DeviationFromExpectedDelay(frameDelayMS, deltaFS);
-
- if (fabs(deviation) < _numStdDevDelayOutlier * sqrt(_varNoise) ||
- frameSizeBytes > _avgFrameSize + _numStdDevFrameSizeOutlier * sqrt(_varFrameSize))
- {
- // Update the variance of the deviation from the
- // line given by the Kalman filter
- EstimateRandomJitter(deviation, incompleteFrame);
- // Prevent updating with frames which have been congested by a large
- // frame, and therefore arrives almost at the same time as that frame.
- // This can occur when we receive a large frame (key frame) which
- // has been delayed. The next frame is of normal size (delta frame),
- // and thus deltaFS will be << 0. This removes all frame samples
- // which arrives after a key frame.
- if ((!incompleteFrame || deviation >= 0.0) &&
- static_cast<double>(deltaFS) > - 0.25 * _maxFrameSize)
- {
- // Update the Kalman filter with the new data
- KalmanEstimateChannel(frameDelayMS, deltaFS);
- }
- }
- else
- {
- int nStdDev = (deviation >= 0) ? _numStdDevDelayOutlier : -_numStdDevDelayOutlier;
- EstimateRandomJitter(nStdDev * sqrt(_varNoise), incompleteFrame);
- }
- // Post process the total estimated jitter
- if (_startupCount >= kStartupDelaySamples)
- {
- PostProcessEstimate();
- }
- else
- {
- _startupCount++;
- }
-}
-
-// Updates the nack/packet ratio
-void
-VCMJitterEstimator::FrameNacked()
-{
- // Wait until _nackLimit retransmissions has been received,
- // then always add ~1 RTT delay.
- // TODO(holmer): Should we ever remove the additional delay if the
- // the packet losses seem to have stopped? We could for instance scale
- // the number of RTTs to add with the amount of retransmissions in a given
- // time interval, or similar.
- if (_nackCount < _nackLimit)
- {
- _nackCount++;
- }
-}
-
-// Updates Kalman estimate of the channel
-// The caller is expected to sanity check the inputs.
-void
-VCMJitterEstimator::KalmanEstimateChannel(int64_t frameDelayMS,
- int32_t deltaFSBytes)
-{
- double Mh[2];
- double hMh_sigma;
- double kalmanGain[2];
- double measureRes;
- double t00, t01;
-
- // Kalman filtering
-
- // Prediction
- // M = M + Q
- _thetaCov[0][0] += _Qcov[0][0];
- _thetaCov[0][1] += _Qcov[0][1];
- _thetaCov[1][0] += _Qcov[1][0];
- _thetaCov[1][1] += _Qcov[1][1];
-
- // Kalman gain
- // K = M*h'/(sigma2n + h*M*h') = M*h'/(1 + h*M*h')
- // h = [dFS 1]
- // Mh = M*h'
- // hMh_sigma = h*M*h' + R
- Mh[0] = _thetaCov[0][0] * deltaFSBytes + _thetaCov[0][1];
- Mh[1] = _thetaCov[1][0] * deltaFSBytes + _thetaCov[1][1];
- // sigma weights measurements with a small deltaFS as noisy and
- // measurements with large deltaFS as good
- if (_maxFrameSize < 1.0)
- {
- return;
- }
- double sigma = (300.0 * exp(-fabs(static_cast<double>(deltaFSBytes)) /
- (1e0 * _maxFrameSize)) + 1) * sqrt(_varNoise);
- if (sigma < 1.0)
- {
- sigma = 1.0;
- }
- hMh_sigma = deltaFSBytes * Mh[0] + Mh[1] + sigma;
- if ((hMh_sigma < 1e-9 && hMh_sigma >= 0) || (hMh_sigma > -1e-9 && hMh_sigma <= 0))
- {
- assert(false);
- return;
- }
- kalmanGain[0] = Mh[0] / hMh_sigma;
- kalmanGain[1] = Mh[1] / hMh_sigma;
-
- // Correction
- // theta = theta + K*(dT - h*theta)
- measureRes = frameDelayMS - (deltaFSBytes * _theta[0] + _theta[1]);
- _theta[0] += kalmanGain[0] * measureRes;
- _theta[1] += kalmanGain[1] * measureRes;
-
- if (_theta[0] < _thetaLow)
- {
- _theta[0] = _thetaLow;
- }
-
- // M = (I - K*h)*M
- t00 = _thetaCov[0][0];
- t01 = _thetaCov[0][1];
- _thetaCov[0][0] = (1 - kalmanGain[0] * deltaFSBytes) * t00 -
- kalmanGain[0] * _thetaCov[1][0];
- _thetaCov[0][1] = (1 - kalmanGain[0] * deltaFSBytes) * t01 -
- kalmanGain[0] * _thetaCov[1][1];
- _thetaCov[1][0] = _thetaCov[1][0] * (1 - kalmanGain[1]) -
- kalmanGain[1] * deltaFSBytes * t00;
- _thetaCov[1][1] = _thetaCov[1][1] * (1 - kalmanGain[1]) -
- kalmanGain[1] * deltaFSBytes * t01;
-
- // Covariance matrix, must be positive semi-definite
- assert(_thetaCov[0][0] + _thetaCov[1][1] >= 0 &&
- _thetaCov[0][0] * _thetaCov[1][1] - _thetaCov[0][1] * _thetaCov[1][0] >= 0 &&
- _thetaCov[0][0] >= 0);
-}
-
-// Calculate difference in delay between a sample and the
-// expected delay estimated by the Kalman filter
-double
-VCMJitterEstimator::DeviationFromExpectedDelay(int64_t frameDelayMS,
- int32_t deltaFSBytes) const
-{
- return frameDelayMS - (_theta[0] * deltaFSBytes + _theta[1]);
-}
-
-// Estimates the random jitter by calculating the variance of the
-// sample distance from the line given by theta.
-void VCMJitterEstimator::EstimateRandomJitter(double d_dT,
- bool incompleteFrame) {
- uint64_t now = clock_->TimeInMicroseconds();
- if (_lastUpdateT != -1) {
- fps_counter_.AddSample(now - _lastUpdateT);
- }
- _lastUpdateT = now;
-
- if (_alphaCount == 0) {
- assert(false);
- return;
- }
- double alpha =
- static_cast<double>(_alphaCount - 1) / static_cast<double>(_alphaCount);
- _alphaCount++;
- if (_alphaCount > _alphaCountMax)
- _alphaCount = _alphaCountMax;
-
- if (LowRateExperimentEnabled()) {
- // In order to avoid a low frame rate stream to react slower to changes,
- // scale the alpha weight relative a 30 fps stream.
- double fps = GetFrameRate();
- if (fps > 0.0) {
- double rate_scale = 30.0 / fps;
- // At startup, there can be a lot of noise in the fps estimate.
- // Interpolate rate_scale linearly, from 1.0 at sample #1, to 30.0 / fps
- // at sample #kStartupDelaySamples.
- if (_alphaCount < kStartupDelaySamples) {
- rate_scale =
- (_alphaCount * rate_scale + (kStartupDelaySamples - _alphaCount)) /
- kStartupDelaySamples;
- }
- alpha = pow(alpha, rate_scale);
- }
- }
-
- double avgNoise = alpha * _avgNoise + (1 - alpha) * d_dT;
- double varNoise =
- alpha * _varNoise + (1 - alpha) * (d_dT - _avgNoise) * (d_dT - _avgNoise);
- if (!incompleteFrame || varNoise > _varNoise) {
- _avgNoise = avgNoise;
- _varNoise = varNoise;
- }
- if (_varNoise < 1.0) {
- // The variance should never be zero, since we might get
- // stuck and consider all samples as outliers.
- _varNoise = 1.0;
- }
-}
-
-double
-VCMJitterEstimator::NoiseThreshold() const
-{
- double noiseThreshold = _noiseStdDevs * sqrt(_varNoise) - _noiseStdDevOffset;
- if (noiseThreshold < 1.0)
- {
- noiseThreshold = 1.0;
- }
- return noiseThreshold;
-}
-
-// Calculates the current jitter estimate from the filtered estimates
-double
-VCMJitterEstimator::CalculateEstimate()
-{
- double ret = _theta[0] * (_maxFrameSize - _avgFrameSize) + NoiseThreshold();
-
- // A very low estimate (or negative) is neglected
- if (ret < 1.0) {
- if (_prevEstimate <= 0.01)
- {
- ret = 1.0;
- }
- else
- {
- ret = _prevEstimate;
- }
- }
- if (ret > 10000.0) // Sanity
- {
- ret = 10000.0;
- }
- _prevEstimate = ret;
- return ret;
-}
-
-void
-VCMJitterEstimator::PostProcessEstimate()
-{
- _filterJitterEstimate = CalculateEstimate();
-}
-
-void
-VCMJitterEstimator::UpdateRtt(int64_t rttMs)
-{
- _rttFilter.Update(rttMs);
-}
-
-void
-VCMJitterEstimator::UpdateMaxFrameSize(uint32_t frameSizeBytes)
-{
- if (_maxFrameSize < frameSizeBytes)
- {
- _maxFrameSize = frameSizeBytes;
- }
-}
-
-// Returns the current filtered estimate if available,
-// otherwise tries to calculate an estimate.
-int VCMJitterEstimator::GetJitterEstimate(double rttMultiplier) {
- double jitterMS = CalculateEstimate() + OPERATING_SYSTEM_JITTER;
- if (_filterJitterEstimate > jitterMS)
- jitterMS = _filterJitterEstimate;
- if (_nackCount >= _nackLimit)
- jitterMS += _rttFilter.RttMs() * rttMultiplier;
-
- if (LowRateExperimentEnabled()) {
- static const double kJitterScaleLowThreshold = 5.0;
- static const double kJitterScaleHighThreshold = 10.0;
- double fps = GetFrameRate();
- // Ignore jitter for very low fps streams.
- if (fps < kJitterScaleLowThreshold) {
- if (fps == 0.0) {
- return jitterMS;
- }
- return 0;
- }
-
- // Semi-low frame rate; scale by factor linearly interpolated from 0.0 at
- // kJitterScaleLowThreshold to 1.0 at kJitterScaleHighThreshold.
- if (fps < kJitterScaleHighThreshold) {
- jitterMS =
- (1.0 / (kJitterScaleHighThreshold - kJitterScaleLowThreshold)) *
- (fps - kJitterScaleLowThreshold) * jitterMS;
- }
- }
-
- return static_cast<uint32_t>(jitterMS + 0.5);
-}
-
-bool VCMJitterEstimator::LowRateExperimentEnabled() {
- if (low_rate_experiment_ == kInit) {
- std::string group =
- webrtc::field_trial::FindFullName("WebRTC-ReducedJitterDelay");
- if (group == "Disabled") {
- low_rate_experiment_ = kDisabled;
- } else {
- low_rate_experiment_ = kEnabled;
- }
- }
- return low_rate_experiment_ == kEnabled ? true : false;
-}
-
-double VCMJitterEstimator::GetFrameRate() const {
- if (fps_counter_.count() == 0)
- return 0;
-
- double fps = 1000000.0 / fps_counter_.ComputeMean();
- // Sanity check.
- assert(fps >= 0.0);
- if (fps > kMaxFramerateEstimate) {
- fps = kMaxFramerateEstimate;
- }
- return fps;
-}
-
-}
diff --git a/webrtc/modules/video_coding/main/source/jitter_estimator.h b/webrtc/modules/video_coding/main/source/jitter_estimator.h
deleted file mode 100644
index 46ed67ba1d..0000000000
--- a/webrtc/modules/video_coding/main/source/jitter_estimator.h
+++ /dev/null
@@ -1,165 +0,0 @@
-/*
- * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_MODULES_VIDEO_CODING_JITTER_ESTIMATOR_H_
-#define WEBRTC_MODULES_VIDEO_CODING_JITTER_ESTIMATOR_H_
-
-#include "webrtc/base/rollingaccumulator.h"
-#include "webrtc/modules/video_coding/main/source/rtt_filter.h"
-#include "webrtc/typedefs.h"
-
-namespace webrtc
-{
-
-class Clock;
-
-class VCMJitterEstimator
-{
-public:
- VCMJitterEstimator(const Clock* clock,
- int32_t vcmId = 0,
- int32_t receiverId = 0);
- virtual ~VCMJitterEstimator();
- VCMJitterEstimator& operator=(const VCMJitterEstimator& rhs);
-
- // Resets the estimate to the initial state
- void Reset();
- void ResetNackCount();
-
- // Updates the jitter estimate with the new data.
- //
- // Input:
- // - frameDelay : Delay-delta calculated by UTILDelayEstimate in milliseconds
- // - frameSize : Frame size of the current frame.
- // - incompleteFrame : Flags if the frame is used to update the estimate before it
- // was complete. Default is false.
- void UpdateEstimate(int64_t frameDelayMS,
- uint32_t frameSizeBytes,
- bool incompleteFrame = false);
-
- // Returns the current jitter estimate in milliseconds and adds
- // also adds an RTT dependent term in cases of retransmission.
- // Input:
- // - rttMultiplier : RTT param multiplier (when applicable).
- //
- // Return value : Jitter estimate in milliseconds
- int GetJitterEstimate(double rttMultiplier);
-
- // Updates the nack counter.
- void FrameNacked();
-
- // Updates the RTT filter.
- //
- // Input:
- // - rttMs : RTT in ms
- void UpdateRtt(int64_t rttMs);
-
- void UpdateMaxFrameSize(uint32_t frameSizeBytes);
-
- // A constant describing the delay from the jitter buffer
- // to the delay on the receiving side which is not accounted
- // for by the jitter buffer nor the decoding delay estimate.
- static const uint32_t OPERATING_SYSTEM_JITTER = 10;
-
-protected:
- // These are protected for better testing possibilities
- double _theta[2]; // Estimated line parameters (slope, offset)
- double _varNoise; // Variance of the time-deviation from the line
-
- virtual bool LowRateExperimentEnabled();
-
-private:
- // Updates the Kalman filter for the line describing
- // the frame size dependent jitter.
- //
- // Input:
- // - frameDelayMS : Delay-delta calculated by UTILDelayEstimate in milliseconds
- // - deltaFSBytes : Frame size delta, i.e.
- // : frame size at time T minus frame size at time T-1
- void KalmanEstimateChannel(int64_t frameDelayMS, int32_t deltaFSBytes);
-
- // Updates the random jitter estimate, i.e. the variance
- // of the time deviations from the line given by the Kalman filter.
- //
- // Input:
- // - d_dT : The deviation from the kalman estimate
- // - incompleteFrame : True if the frame used to update the estimate
- // with was incomplete
- void EstimateRandomJitter(double d_dT, bool incompleteFrame);
-
- double NoiseThreshold() const;
-
- // Calculates the current jitter estimate.
- //
- // Return value : The current jitter estimate in milliseconds
- double CalculateEstimate();
-
- // Post process the calculated estimate
- void PostProcessEstimate();
-
- // Calculates the difference in delay between a sample and the
- // expected delay estimated by the Kalman filter.
- //
- // Input:
- // - frameDelayMS : Delay-delta calculated by UTILDelayEstimate in milliseconds
- // - deltaFS : Frame size delta, i.e. frame size at time
- // T minus frame size at time T-1
- //
- // Return value : The difference in milliseconds
- double DeviationFromExpectedDelay(int64_t frameDelayMS,
- int32_t deltaFSBytes) const;
-
- double GetFrameRate() const;
-
- // Constants, filter parameters
- int32_t _vcmId;
- int32_t _receiverId;
- const double _phi;
- const double _psi;
- const uint32_t _alphaCountMax;
- const double _thetaLow;
- const uint32_t _nackLimit;
- const int32_t _numStdDevDelayOutlier;
- const int32_t _numStdDevFrameSizeOutlier;
- const double _noiseStdDevs;
- const double _noiseStdDevOffset;
-
- double _thetaCov[2][2]; // Estimate covariance
- double _Qcov[2][2]; // Process noise covariance
- double _avgFrameSize; // Average frame size
- double _varFrameSize; // Frame size variance
- double _maxFrameSize; // Largest frame size received (descending
- // with a factor _psi)
- uint32_t _fsSum;
- uint32_t _fsCount;
-
- int64_t _lastUpdateT;
- double _prevEstimate; // The previously returned jitter estimate
- uint32_t _prevFrameSize; // Frame size of the previous frame
- double _avgNoise; // Average of the random jitter
- uint32_t _alphaCount;
- double _filterJitterEstimate; // The filtered sum of jitter estimates
-
- uint32_t _startupCount;
-
- int64_t _latestNackTimestamp; // Timestamp in ms when the latest nack was seen
- uint32_t _nackCount; // Keeps track of the number of nacks received,
- // but never goes above _nackLimit
- VCMRttFilter _rttFilter;
-
- rtc::RollingAccumulator<uint64_t> fps_counter_;
- enum ExperimentFlag { kInit, kEnabled, kDisabled };
- ExperimentFlag low_rate_experiment_;
- const Clock* clock_;
-};
-
-} // namespace webrtc
-
-#endif // WEBRTC_MODULES_VIDEO_CODING_JITTER_ESTIMATOR_H_
diff --git a/webrtc/modules/video_coding/main/source/jitter_estimator_tests.cc b/webrtc/modules/video_coding/main/source/jitter_estimator_tests.cc
deleted file mode 100644
index c69c4bcdad..0000000000
--- a/webrtc/modules/video_coding/main/source/jitter_estimator_tests.cc
+++ /dev/null
@@ -1,160 +0,0 @@
-/* Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include "webrtc/modules/video_coding/main/source/jitter_estimator.h"
-
-#include "testing/gtest/include/gtest/gtest.h"
-#include "webrtc/system_wrappers/include/clock.h"
-
-namespace webrtc {
-
-class TestEstimator : public VCMJitterEstimator {
- public:
- explicit TestEstimator(bool exp_enabled)
- : VCMJitterEstimator(&fake_clock_, 0, 0),
- fake_clock_(0),
- exp_enabled_(exp_enabled) {}
-
- virtual bool LowRateExperimentEnabled() { return exp_enabled_; }
-
- void AdvanceClock(int64_t microseconds) {
- fake_clock_.AdvanceTimeMicroseconds(microseconds);
- }
-
- private:
- SimulatedClock fake_clock_;
- const bool exp_enabled_;
-};
-
-class TestVCMJitterEstimator : public ::testing::Test {
- protected:
- TestVCMJitterEstimator()
- : regular_estimator_(false), low_rate_estimator_(true) {}
-
- virtual void SetUp() { regular_estimator_.Reset(); }
-
- TestEstimator regular_estimator_;
- TestEstimator low_rate_estimator_;
-};
-
-// Generates some simple test data in the form of a sawtooth wave.
-class ValueGenerator {
- public:
- ValueGenerator(int32_t amplitude) : amplitude_(amplitude), counter_(0) {}
- virtual ~ValueGenerator() {}
-
- int64_t Delay() { return ((counter_ % 11) - 5) * amplitude_; }
-
- uint32_t FrameSize() { return 1000 + Delay(); }
-
- void Advance() { ++counter_; }
-
- private:
- const int32_t amplitude_;
- int64_t counter_;
-};
-
-// 5 fps, disable jitter delay altogether.
-TEST_F(TestVCMJitterEstimator, TestLowRate) {
- ValueGenerator gen(10);
- uint64_t time_delta = 1000000 / 5;
- for (int i = 0; i < 60; ++i) {
- regular_estimator_.UpdateEstimate(gen.Delay(), gen.FrameSize());
- regular_estimator_.AdvanceClock(time_delta);
- low_rate_estimator_.UpdateEstimate(gen.Delay(), gen.FrameSize());
- low_rate_estimator_.AdvanceClock(time_delta);
- EXPECT_GT(regular_estimator_.GetJitterEstimate(0), 0);
- if (i > 2)
- EXPECT_EQ(low_rate_estimator_.GetJitterEstimate(0), 0);
- gen.Advance();
- }
-}
-
-// 8 fps, steady state estimate should be in interpolated interval between 0
-// and value of previous method.
-TEST_F(TestVCMJitterEstimator, TestMidRate) {
- ValueGenerator gen(10);
- uint64_t time_delta = 1000000 / 8;
- for (int i = 0; i < 60; ++i) {
- regular_estimator_.UpdateEstimate(gen.Delay(), gen.FrameSize());
- regular_estimator_.AdvanceClock(time_delta);
- low_rate_estimator_.UpdateEstimate(gen.Delay(), gen.FrameSize());
- low_rate_estimator_.AdvanceClock(time_delta);
- EXPECT_GT(regular_estimator_.GetJitterEstimate(0), 0);
- EXPECT_GT(low_rate_estimator_.GetJitterEstimate(0), 0);
- EXPECT_GE(regular_estimator_.GetJitterEstimate(0),
- low_rate_estimator_.GetJitterEstimate(0));
- gen.Advance();
- }
-}
-
-// 30 fps, steady state estimate should be same as previous method.
-TEST_F(TestVCMJitterEstimator, TestHighRate) {
- ValueGenerator gen(10);
- uint64_t time_delta = 1000000 / 30;
- for (int i = 0; i < 60; ++i) {
- regular_estimator_.UpdateEstimate(gen.Delay(), gen.FrameSize());
- regular_estimator_.AdvanceClock(time_delta);
- low_rate_estimator_.UpdateEstimate(gen.Delay(), gen.FrameSize());
- low_rate_estimator_.AdvanceClock(time_delta);
- EXPECT_EQ(regular_estimator_.GetJitterEstimate(0),
- low_rate_estimator_.GetJitterEstimate(0));
- gen.Advance();
- }
-}
-
-// 10 fps, high jitter then low jitter. Low rate estimator should converge
-// faster to low noise estimate.
-TEST_F(TestVCMJitterEstimator, TestConvergence) {
- // Reach a steady state with high noise.
- ValueGenerator gen(50);
- uint64_t time_delta = 1000000 / 10;
- for (int i = 0; i < 100; ++i) {
- regular_estimator_.UpdateEstimate(gen.Delay(), gen.FrameSize());
- regular_estimator_.AdvanceClock(time_delta * 2);
- low_rate_estimator_.UpdateEstimate(gen.Delay(), gen.FrameSize());
- low_rate_estimator_.AdvanceClock(time_delta * 2);
- gen.Advance();
- }
-
- int threshold = regular_estimator_.GetJitterEstimate(0) / 2;
-
- // New generator with zero noise.
- ValueGenerator low_gen(0);
- int regular_iterations = 0;
- int low_rate_iterations = 0;
- for (int i = 0; i < 500; ++i) {
- if (regular_iterations == 0) {
- regular_estimator_.UpdateEstimate(low_gen.Delay(), low_gen.FrameSize());
- regular_estimator_.AdvanceClock(time_delta);
- if (regular_estimator_.GetJitterEstimate(0) < threshold) {
- regular_iterations = i;
- }
- }
-
- if (low_rate_iterations == 0) {
- low_rate_estimator_.UpdateEstimate(low_gen.Delay(), low_gen.FrameSize());
- low_rate_estimator_.AdvanceClock(time_delta);
- if (low_rate_estimator_.GetJitterEstimate(0) < threshold) {
- low_rate_iterations = i;
- }
- }
-
- if (regular_iterations != 0 && low_rate_iterations != 0) {
- break;
- }
-
- gen.Advance();
- }
-
- EXPECT_NE(regular_iterations, 0);
- EXPECT_NE(low_rate_iterations, 0);
- EXPECT_LE(low_rate_iterations, regular_iterations);
-}
-}
diff --git a/webrtc/modules/video_coding/main/source/media_opt_util.cc b/webrtc/modules/video_coding/main/source/media_opt_util.cc
deleted file mode 100644
index 51decbed97..0000000000
--- a/webrtc/modules/video_coding/main/source/media_opt_util.cc
+++ /dev/null
@@ -1,774 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include "webrtc/modules/video_coding/main/source/media_opt_util.h"
-
-#include <algorithm>
-#include <float.h>
-#include <limits.h>
-#include <math.h>
-
-#include "webrtc/modules/interface/module_common_types.h"
-#include "webrtc/modules/video_coding/codecs/vp8/include/vp8_common_types.h"
-#include "webrtc/modules/video_coding/main/interface/video_coding_defines.h"
-#include "webrtc/modules/video_coding/main/source/fec_tables_xor.h"
-#include "webrtc/modules/video_coding/main/source/nack_fec_tables.h"
-
-namespace webrtc {
-// Max value of loss rates in off-line model
-static const int kPacketLossMax = 129;
-
-namespace media_optimization {
-
-VCMProtectionMethod::VCMProtectionMethod()
- : _effectivePacketLoss(0),
- _protectionFactorK(0),
- _protectionFactorD(0),
- _scaleProtKey(2.0f),
- _maxPayloadSize(1460),
- _qmRobustness(new VCMQmRobustness()),
- _useUepProtectionK(false),
- _useUepProtectionD(true),
- _corrFecCost(1.0),
- _type(kNone) {
-}
-
-VCMProtectionMethod::~VCMProtectionMethod()
-{
- delete _qmRobustness;
-}
-void
-VCMProtectionMethod::UpdateContentMetrics(const
- VideoContentMetrics* contentMetrics)
-{
- _qmRobustness->UpdateContent(contentMetrics);
-}
-
-VCMNackFecMethod::VCMNackFecMethod(int64_t lowRttNackThresholdMs,
- int64_t highRttNackThresholdMs)
- : VCMFecMethod(),
- _lowRttNackMs(lowRttNackThresholdMs),
- _highRttNackMs(highRttNackThresholdMs),
- _maxFramesFec(1) {
- assert(lowRttNackThresholdMs >= -1 && highRttNackThresholdMs >= -1);
- assert(highRttNackThresholdMs == -1 ||
- lowRttNackThresholdMs <= highRttNackThresholdMs);
- assert(lowRttNackThresholdMs > -1 || highRttNackThresholdMs == -1);
- _type = kNackFec;
-}
-
-VCMNackFecMethod::~VCMNackFecMethod()
-{
- //
-}
-bool
-VCMNackFecMethod::ProtectionFactor(const VCMProtectionParameters* parameters)
-{
- // Hybrid Nack FEC has three operational modes:
- // 1. Low RTT (below kLowRttNackMs) - Nack only: Set FEC rate
- // (_protectionFactorD) to zero. -1 means no FEC.
- // 2. High RTT (above _highRttNackMs) - FEC Only: Keep FEC factors.
- // -1 means always allow NACK.
- // 3. Medium RTT values - Hybrid mode: We will only nack the
- // residual following the decoding of the FEC (refer to JB logic). FEC
- // delta protection factor will be adjusted based on the RTT.
-
- // Otherwise: we count on FEC; if the RTT is below a threshold, then we
- // nack the residual, based on a decision made in the JB.
-
- // Compute the protection factors
- VCMFecMethod::ProtectionFactor(parameters);
- if (_lowRttNackMs == -1 || parameters->rtt < _lowRttNackMs)
- {
- _protectionFactorD = 0;
- VCMFecMethod::UpdateProtectionFactorD(_protectionFactorD);
- }
-
- // When in Hybrid mode (RTT range), adjust FEC rates based on the
- // RTT (NACK effectiveness) - adjustment factor is in the range [0,1].
- else if (_highRttNackMs == -1 || parameters->rtt < _highRttNackMs)
- {
- // TODO(mikhal): Disabling adjustment temporarily.
- // uint16_t rttIndex = (uint16_t) parameters->rtt;
- float adjustRtt = 1.0f;// (float)VCMNackFecTable[rttIndex] / 100.0f;
-
- // Adjust FEC with NACK on (for delta frame only)
- // table depends on RTT relative to rttMax (NACK Threshold)
- _protectionFactorD = static_cast<uint8_t>
- (adjustRtt *
- static_cast<float>(_protectionFactorD));
- // update FEC rates after applying adjustment
- VCMFecMethod::UpdateProtectionFactorD(_protectionFactorD);
- }
-
- return true;
-}
-
-int VCMNackFecMethod::ComputeMaxFramesFec(
- const VCMProtectionParameters* parameters) {
- if (parameters->numLayers > 2) {
- // For more than 2 temporal layers we will only have FEC on the base layer,
- // and the base layers will be pretty far apart. Therefore we force one
- // frame FEC.
- return 1;
- }
- // We set the max number of frames to base the FEC on so that on average
- // we will have complete frames in one RTT. Note that this is an upper
- // bound, and that the actual number of frames used for FEC is decided by the
- // RTP module based on the actual number of packets and the protection factor.
- float base_layer_framerate = parameters->frameRate /
- static_cast<float>(1 << (parameters->numLayers - 1));
- int max_frames_fec = std::max(static_cast<int>(
- 2.0f * base_layer_framerate * parameters->rtt /
- 1000.0f + 0.5f), 1);
- // |kUpperLimitFramesFec| is the upper limit on how many frames we
- // allow any FEC to be based on.
- if (max_frames_fec > kUpperLimitFramesFec) {
- max_frames_fec = kUpperLimitFramesFec;
- }
- return max_frames_fec;
-}
-
-int VCMNackFecMethod::MaxFramesFec() const {
- return _maxFramesFec;
-}
-
-bool VCMNackFecMethod::BitRateTooLowForFec(
- const VCMProtectionParameters* parameters) {
- // Bitrate below which we turn off FEC, regardless of reported packet loss.
- // The condition should depend on resolution and content. For now, use
- // threshold on bytes per frame, with some effect for the frame size.
- // The condition for turning off FEC is also based on other factors,
- // such as |_numLayers|, |_maxFramesFec|, and |_rtt|.
- int estimate_bytes_per_frame = 1000 * BitsPerFrame(parameters) / 8;
- int max_bytes_per_frame = kMaxBytesPerFrameForFec;
- int num_pixels = parameters->codecWidth * parameters->codecHeight;
- if (num_pixels <= 352 * 288) {
- max_bytes_per_frame = kMaxBytesPerFrameForFecLow;
- } else if (num_pixels > 640 * 480) {
- max_bytes_per_frame = kMaxBytesPerFrameForFecHigh;
- }
- // TODO (marpan): add condition based on maximum frames used for FEC,
- // and expand condition based on frame size.
- // Max round trip time threshold in ms.
- const int64_t kMaxRttTurnOffFec = 200;
- if (estimate_bytes_per_frame < max_bytes_per_frame &&
- parameters->numLayers < 3 &&
- parameters->rtt < kMaxRttTurnOffFec) {
- return true;
- }
- return false;
-}
-
-bool
-VCMNackFecMethod::EffectivePacketLoss(const VCMProtectionParameters* parameters)
-{
- // Set the effective packet loss for encoder (based on FEC code).
- // Compute the effective packet loss and residual packet loss due to FEC.
- VCMFecMethod::EffectivePacketLoss(parameters);
- return true;
-}
-
-bool
-VCMNackFecMethod::UpdateParameters(const VCMProtectionParameters* parameters)
-{
- ProtectionFactor(parameters);
- EffectivePacketLoss(parameters);
- _maxFramesFec = ComputeMaxFramesFec(parameters);
- if (BitRateTooLowForFec(parameters)) {
- _protectionFactorK = 0;
- _protectionFactorD = 0;
- }
-
- // Protection/fec rates obtained above are defined relative to total number
- // of packets (total rate: source + fec) FEC in RTP module assumes
- // protection factor is defined relative to source number of packets so we
- // should convert the factor to reduce mismatch between mediaOpt's rate and
- // the actual one
- _protectionFactorK = VCMFecMethod::ConvertFECRate(_protectionFactorK);
- _protectionFactorD = VCMFecMethod::ConvertFECRate(_protectionFactorD);
-
- return true;
-}
-
-VCMNackMethod::VCMNackMethod():
-VCMProtectionMethod()
-{
- _type = kNack;
-}
-
-VCMNackMethod::~VCMNackMethod()
-{
- //
-}
-
-bool
-VCMNackMethod::EffectivePacketLoss(const VCMProtectionParameters* parameter)
-{
- // Effective Packet Loss, NA in current version.
- _effectivePacketLoss = 0;
- return true;
-}
-
-bool
-VCMNackMethod::UpdateParameters(const VCMProtectionParameters* parameters)
-{
- // Compute the effective packet loss
- EffectivePacketLoss(parameters);
-
- // nackCost = (bitRate - nackCost) * (lossPr)
- return true;
-}
-
-VCMFecMethod::VCMFecMethod():
-VCMProtectionMethod()
-{
- _type = kFec;
-}
-VCMFecMethod::~VCMFecMethod()
-{
- //
-}
-
-uint8_t
-VCMFecMethod::BoostCodeRateKey(uint8_t packetFrameDelta,
- uint8_t packetFrameKey) const
-{
- uint8_t boostRateKey = 2;
- // Default: ratio scales the FEC protection up for I frames
- uint8_t ratio = 1;
-
- if (packetFrameDelta > 0)
- {
- ratio = (int8_t) (packetFrameKey / packetFrameDelta);
- }
- ratio = VCM_MAX(boostRateKey, ratio);
-
- return ratio;
-}
-
-uint8_t
-VCMFecMethod::ConvertFECRate(uint8_t codeRateRTP) const
-{
- return static_cast<uint8_t> (VCM_MIN(255,(0.5 + 255.0 * codeRateRTP /
- (float)(255 - codeRateRTP))));
-}
-
-// Update FEC with protectionFactorD
-void
-VCMFecMethod::UpdateProtectionFactorD(uint8_t protectionFactorD)
-{
- _protectionFactorD = protectionFactorD;
-}
-
-// Update FEC with protectionFactorK
-void
-VCMFecMethod::UpdateProtectionFactorK(uint8_t protectionFactorK)
-{
- _protectionFactorK = protectionFactorK;
-}
-
-bool
-VCMFecMethod::ProtectionFactor(const VCMProtectionParameters* parameters)
-{
- // FEC PROTECTION SETTINGS: varies with packet loss and bitrate
-
- // No protection if (filtered) packetLoss is 0
- uint8_t packetLoss = (uint8_t) (255 * parameters->lossPr);
- if (packetLoss == 0)
- {
- _protectionFactorK = 0;
- _protectionFactorD = 0;
- return true;
- }
-
- // Parameters for FEC setting:
- // first partition size, thresholds, table pars, spatial resoln fac.
-
- // First partition protection: ~ 20%
- uint8_t firstPartitionProt = (uint8_t) (255 * 0.20);
-
- // Minimum protection level needed to generate one FEC packet for one
- // source packet/frame (in RTP sender)
- uint8_t minProtLevelFec = 85;
-
- // Threshold on packetLoss and bitRrate/frameRate (=average #packets),
- // above which we allocate protection to cover at least first partition.
- uint8_t lossThr = 0;
- uint8_t packetNumThr = 1;
-
- // Parameters for range of rate index of table.
- const uint8_t ratePar1 = 5;
- const uint8_t ratePar2 = 49;
-
- // Spatial resolution size, relative to a reference size.
- float spatialSizeToRef = static_cast<float>
- (parameters->codecWidth * parameters->codecHeight) /
- (static_cast<float>(704 * 576));
- // resolnFac: This parameter will generally increase/decrease the FEC rate
- // (for fixed bitRate and packetLoss) based on system size.
- // Use a smaller exponent (< 1) to control/soften system size effect.
- const float resolnFac = 1.0 / powf(spatialSizeToRef, 0.3f);
-
- const int bitRatePerFrame = BitsPerFrame(parameters);
-
-
- // Average number of packets per frame (source and fec):
- const uint8_t avgTotPackets = 1 + (uint8_t)
- ((float) bitRatePerFrame * 1000.0
- / (float) (8.0 * _maxPayloadSize) + 0.5);
-
- // FEC rate parameters: for P and I frame
- uint8_t codeRateDelta = 0;
- uint8_t codeRateKey = 0;
-
- // Get index for table: the FEC protection depends on an effective rate.
- // The range on the rate index corresponds to rates (bps)
- // from ~200k to ~8000k, for 30fps
- const uint16_t effRateFecTable = static_cast<uint16_t>
- (resolnFac * bitRatePerFrame);
- uint8_t rateIndexTable =
- (uint8_t) VCM_MAX(VCM_MIN((effRateFecTable - ratePar1) /
- ratePar1, ratePar2), 0);
-
- // Restrict packet loss range to 50:
- // current tables defined only up to 50%
- if (packetLoss >= kPacketLossMax)
- {
- packetLoss = kPacketLossMax - 1;
- }
- uint16_t indexTable = rateIndexTable * kPacketLossMax + packetLoss;
-
- // Check on table index
- assert(indexTable < kSizeCodeRateXORTable);
-
- // Protection factor for P frame
- codeRateDelta = kCodeRateXORTable[indexTable];
-
- if (packetLoss > lossThr && avgTotPackets > packetNumThr)
- {
- // Set a minimum based on first partition size.
- if (codeRateDelta < firstPartitionProt)
- {
- codeRateDelta = firstPartitionProt;
- }
- }
-
- // Check limit on amount of protection for P frame; 50% is max.
- if (codeRateDelta >= kPacketLossMax)
- {
- codeRateDelta = kPacketLossMax - 1;
- }
-
- float adjustFec = 1.0f;
- // Avoid additional adjustments when layers are active.
- // TODO(mikhal/marco): Update adjusmtent based on layer info.
- if (parameters->numLayers == 1)
- {
- adjustFec = _qmRobustness->AdjustFecFactor(codeRateDelta,
- parameters->bitRate,
- parameters->frameRate,
- parameters->rtt,
- packetLoss);
- }
-
- codeRateDelta = static_cast<uint8_t>(codeRateDelta * adjustFec);
-
- // For Key frame:
- // Effectively at a higher rate, so we scale/boost the rate
- // The boost factor may depend on several factors: ratio of packet
- // number of I to P frames, how much protection placed on P frames, etc.
- const uint8_t packetFrameDelta = (uint8_t)
- (0.5 + parameters->packetsPerFrame);
- const uint8_t packetFrameKey = (uint8_t)
- (0.5 + parameters->packetsPerFrameKey);
- const uint8_t boostKey = BoostCodeRateKey(packetFrameDelta,
- packetFrameKey);
-
- rateIndexTable = (uint8_t) VCM_MAX(VCM_MIN(
- 1 + (boostKey * effRateFecTable - ratePar1) /
- ratePar1,ratePar2),0);
- uint16_t indexTableKey = rateIndexTable * kPacketLossMax + packetLoss;
-
- indexTableKey = VCM_MIN(indexTableKey, kSizeCodeRateXORTable);
-
- // Check on table index
- assert(indexTableKey < kSizeCodeRateXORTable);
-
- // Protection factor for I frame
- codeRateKey = kCodeRateXORTable[indexTableKey];
-
- // Boosting for Key frame.
- int boostKeyProt = _scaleProtKey * codeRateDelta;
- if (boostKeyProt >= kPacketLossMax)
- {
- boostKeyProt = kPacketLossMax - 1;
- }
-
- // Make sure I frame protection is at least larger than P frame protection,
- // and at least as high as filtered packet loss.
- codeRateKey = static_cast<uint8_t> (VCM_MAX(packetLoss,
- VCM_MAX(boostKeyProt, codeRateKey)));
-
- // Check limit on amount of protection for I frame: 50% is max.
- if (codeRateKey >= kPacketLossMax)
- {
- codeRateKey = kPacketLossMax - 1;
- }
-
- _protectionFactorK = codeRateKey;
- _protectionFactorD = codeRateDelta;
-
- // Generally there is a rate mis-match between the FEC cost estimated
- // in mediaOpt and the actual FEC cost sent out in RTP module.
- // This is more significant at low rates (small # of source packets), where
- // the granularity of the FEC decreases. In this case, non-zero protection
- // in mediaOpt may generate 0 FEC packets in RTP sender (since actual #FEC
- // is based on rounding off protectionFactor on actual source packet number).
- // The correction factor (_corrFecCost) attempts to corrects this, at least
- // for cases of low rates (small #packets) and low protection levels.
-
- float numPacketsFl = 1.0f + ((float) bitRatePerFrame * 1000.0
- / (float) (8.0 * _maxPayloadSize) + 0.5);
-
- const float estNumFecGen = 0.5f + static_cast<float> (_protectionFactorD *
- numPacketsFl / 255.0f);
-
-
- // We reduce cost factor (which will reduce overhead for FEC and
- // hybrid method) and not the protectionFactor.
- _corrFecCost = 1.0f;
- if (estNumFecGen < 1.1f && _protectionFactorD < minProtLevelFec)
- {
- _corrFecCost = 0.5f;
- }
- if (estNumFecGen < 0.9f && _protectionFactorD < minProtLevelFec)
- {
- _corrFecCost = 0.0f;
- }
-
- // TODO (marpan): Set the UEP protection on/off for Key and Delta frames
- _useUepProtectionK = _qmRobustness->SetUepProtection(codeRateKey,
- parameters->bitRate,
- packetLoss,
- 0);
-
- _useUepProtectionD = _qmRobustness->SetUepProtection(codeRateDelta,
- parameters->bitRate,
- packetLoss,
- 1);
-
- // DONE WITH FEC PROTECTION SETTINGS
- return true;
-}
-
-int VCMFecMethod::BitsPerFrame(const VCMProtectionParameters* parameters) {
- // When temporal layers are available FEC will only be applied on the base
- // layer.
- const float bitRateRatio =
- kVp8LayerRateAlloction[parameters->numLayers - 1][0];
- float frameRateRatio = powf(1 / 2.0, parameters->numLayers - 1);
- float bitRate = parameters->bitRate * bitRateRatio;
- float frameRate = parameters->frameRate * frameRateRatio;
-
- // TODO(mikhal): Update factor following testing.
- float adjustmentFactor = 1;
-
- // Average bits per frame (units of kbits)
- return static_cast<int>(adjustmentFactor * bitRate / frameRate);
-}
-
-bool
-VCMFecMethod::EffectivePacketLoss(const VCMProtectionParameters* parameters)
-{
- // Effective packet loss to encoder is based on RPL (residual packet loss)
- // this is a soft setting based on degree of FEC protection
- // RPL = received/input packet loss - average_FEC_recovery
- // note: received/input packet loss may be filtered based on FilteredLoss
-
- // Effective Packet Loss, NA in current version.
- _effectivePacketLoss = 0;
-
- return true;
-}
-
-bool
-VCMFecMethod::UpdateParameters(const VCMProtectionParameters* parameters)
-{
- // Compute the protection factor
- ProtectionFactor(parameters);
-
- // Compute the effective packet loss
- EffectivePacketLoss(parameters);
-
- // Protection/fec rates obtained above is defined relative to total number
- // of packets (total rate: source+fec) FEC in RTP module assumes protection
- // factor is defined relative to source number of packets so we should
- // convert the factor to reduce mismatch between mediaOpt suggested rate and
- // the actual rate
- _protectionFactorK = ConvertFECRate(_protectionFactorK);
- _protectionFactorD = ConvertFECRate(_protectionFactorD);
-
- return true;
-}
-VCMLossProtectionLogic::VCMLossProtectionLogic(int64_t nowMs):
-_currentParameters(),
-_rtt(0),
-_lossPr(0.0f),
-_bitRate(0.0f),
-_frameRate(0.0f),
-_keyFrameSize(0.0f),
-_fecRateKey(0),
-_fecRateDelta(0),
-_lastPrUpdateT(0),
-_lossPr255(0.9999f),
-_lossPrHistory(),
-_shortMaxLossPr255(0),
-_packetsPerFrame(0.9999f),
-_packetsPerFrameKey(0.9999f),
-_codecWidth(0),
-_codecHeight(0),
-_numLayers(1)
-{
- Reset(nowMs);
-}
-
-VCMLossProtectionLogic::~VCMLossProtectionLogic()
-{
- Release();
-}
-
-void VCMLossProtectionLogic::SetMethod(
- enum VCMProtectionMethodEnum newMethodType) {
- if (_selectedMethod && _selectedMethod->Type() == newMethodType)
- return;
-
- switch(newMethodType) {
- case kNack:
- _selectedMethod.reset(new VCMNackMethod());
- break;
- case kFec:
- _selectedMethod.reset(new VCMFecMethod());
- break;
- case kNackFec:
- _selectedMethod.reset(new VCMNackFecMethod(kLowRttNackMs, -1));
- break;
- case kNone:
- _selectedMethod.reset();
- break;
- }
- UpdateMethod();
-}
-
-void
-VCMLossProtectionLogic::UpdateRtt(int64_t rtt)
-{
- _rtt = rtt;
-}
-
-void
-VCMLossProtectionLogic::UpdateMaxLossHistory(uint8_t lossPr255,
- int64_t now)
-{
- if (_lossPrHistory[0].timeMs >= 0 &&
- now - _lossPrHistory[0].timeMs < kLossPrShortFilterWinMs)
- {
- if (lossPr255 > _shortMaxLossPr255)
- {
- _shortMaxLossPr255 = lossPr255;
- }
- }
- else
- {
- // Only add a new value to the history once a second
- if (_lossPrHistory[0].timeMs == -1)
- {
- // First, no shift
- _shortMaxLossPr255 = lossPr255;
- }
- else
- {
- // Shift
- for (int32_t i = (kLossPrHistorySize - 2); i >= 0; i--)
- {
- _lossPrHistory[i + 1].lossPr255 = _lossPrHistory[i].lossPr255;
- _lossPrHistory[i + 1].timeMs = _lossPrHistory[i].timeMs;
- }
- }
- if (_shortMaxLossPr255 == 0)
- {
- _shortMaxLossPr255 = lossPr255;
- }
-
- _lossPrHistory[0].lossPr255 = _shortMaxLossPr255;
- _lossPrHistory[0].timeMs = now;
- _shortMaxLossPr255 = 0;
- }
-}
-
-uint8_t
-VCMLossProtectionLogic::MaxFilteredLossPr(int64_t nowMs) const
-{
- uint8_t maxFound = _shortMaxLossPr255;
- if (_lossPrHistory[0].timeMs == -1)
- {
- return maxFound;
- }
- for (int32_t i = 0; i < kLossPrHistorySize; i++)
- {
- if (_lossPrHistory[i].timeMs == -1)
- {
- break;
- }
- if (nowMs - _lossPrHistory[i].timeMs >
- kLossPrHistorySize * kLossPrShortFilterWinMs)
- {
- // This sample (and all samples after this) is too old
- break;
- }
- if (_lossPrHistory[i].lossPr255 > maxFound)
- {
- // This sample is the largest one this far into the history
- maxFound = _lossPrHistory[i].lossPr255;
- }
- }
- return maxFound;
-}
-
-uint8_t VCMLossProtectionLogic::FilteredLoss(
- int64_t nowMs,
- FilterPacketLossMode filter_mode,
- uint8_t lossPr255) {
-
- // Update the max window filter.
- UpdateMaxLossHistory(lossPr255, nowMs);
-
- // Update the recursive average filter.
- _lossPr255.Apply(static_cast<float> (nowMs - _lastPrUpdateT),
- static_cast<float> (lossPr255));
- _lastPrUpdateT = nowMs;
-
- // Filtered loss: default is received loss (no filtering).
- uint8_t filtered_loss = lossPr255;
-
- switch (filter_mode) {
- case kNoFilter:
- break;
- case kAvgFilter:
- filtered_loss = static_cast<uint8_t>(_lossPr255.filtered() + 0.5);
- break;
- case kMaxFilter:
- filtered_loss = MaxFilteredLossPr(nowMs);
- break;
- }
-
- return filtered_loss;
-}
-
-void
-VCMLossProtectionLogic::UpdateFilteredLossPr(uint8_t packetLossEnc)
-{
- _lossPr = (float) packetLossEnc / (float) 255.0;
-}
-
-void
-VCMLossProtectionLogic::UpdateBitRate(float bitRate)
-{
- _bitRate = bitRate;
-}
-
-void
-VCMLossProtectionLogic::UpdatePacketsPerFrame(float nPackets, int64_t nowMs)
-{
- _packetsPerFrame.Apply(static_cast<float>(nowMs - _lastPacketPerFrameUpdateT),
- nPackets);
- _lastPacketPerFrameUpdateT = nowMs;
-}
-
-void
-VCMLossProtectionLogic::UpdatePacketsPerFrameKey(float nPackets, int64_t nowMs)
-{
- _packetsPerFrameKey.Apply(static_cast<float>(nowMs -
- _lastPacketPerFrameUpdateTKey), nPackets);
- _lastPacketPerFrameUpdateTKey = nowMs;
-}
-
-void
-VCMLossProtectionLogic::UpdateKeyFrameSize(float keyFrameSize)
-{
- _keyFrameSize = keyFrameSize;
-}
-
-void
-VCMLossProtectionLogic::UpdateFrameSize(uint16_t width,
- uint16_t height)
-{
- _codecWidth = width;
- _codecHeight = height;
-}
-
-void VCMLossProtectionLogic::UpdateNumLayers(int numLayers) {
- _numLayers = (numLayers == 0) ? 1 : numLayers;
-}
-
-bool
-VCMLossProtectionLogic::UpdateMethod()
-{
- if (!_selectedMethod)
- return false;
- _currentParameters.rtt = _rtt;
- _currentParameters.lossPr = _lossPr;
- _currentParameters.bitRate = _bitRate;
- _currentParameters.frameRate = _frameRate; // rename actual frame rate?
- _currentParameters.keyFrameSize = _keyFrameSize;
- _currentParameters.fecRateDelta = _fecRateDelta;
- _currentParameters.fecRateKey = _fecRateKey;
- _currentParameters.packetsPerFrame = _packetsPerFrame.filtered();
- _currentParameters.packetsPerFrameKey = _packetsPerFrameKey.filtered();
- _currentParameters.codecWidth = _codecWidth;
- _currentParameters.codecHeight = _codecHeight;
- _currentParameters.numLayers = _numLayers;
- return _selectedMethod->UpdateParameters(&_currentParameters);
-}
-
-VCMProtectionMethod*
-VCMLossProtectionLogic::SelectedMethod() const
-{
- return _selectedMethod.get();
-}
-
-VCMProtectionMethodEnum VCMLossProtectionLogic::SelectedType() const {
- return _selectedMethod ? _selectedMethod->Type() : kNone;
-}
-
-void
-VCMLossProtectionLogic::Reset(int64_t nowMs)
-{
- _lastPrUpdateT = nowMs;
- _lastPacketPerFrameUpdateT = nowMs;
- _lastPacketPerFrameUpdateTKey = nowMs;
- _lossPr255.Reset(0.9999f);
- _packetsPerFrame.Reset(0.9999f);
- _fecRateDelta = _fecRateKey = 0;
- for (int32_t i = 0; i < kLossPrHistorySize; i++)
- {
- _lossPrHistory[i].lossPr255 = 0;
- _lossPrHistory[i].timeMs = -1;
- }
- _shortMaxLossPr255 = 0;
- Release();
-}
-
-void VCMLossProtectionLogic::Release() {
- _selectedMethod.reset();
-}
-
-} // namespace media_optimization
-} // namespace webrtc
diff --git a/webrtc/modules/video_coding/main/source/media_opt_util.h b/webrtc/modules/video_coding/main/source/media_opt_util.h
deleted file mode 100644
index 2085bbcde9..0000000000
--- a/webrtc/modules/video_coding/main/source/media_opt_util.h
+++ /dev/null
@@ -1,364 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_MODULES_VIDEO_CODING_MEDIA_OPT_UTIL_H_
-#define WEBRTC_MODULES_VIDEO_CODING_MEDIA_OPT_UTIL_H_
-
-#include <math.h>
-#include <stdlib.h>
-
-#include "webrtc/base/exp_filter.h"
-#include "webrtc/base/scoped_ptr.h"
-#include "webrtc/modules/video_coding/main/source/internal_defines.h"
-#include "webrtc/modules/video_coding/main/source/qm_select.h"
-#include "webrtc/system_wrappers/include/trace.h"
-#include "webrtc/typedefs.h"
-
-namespace webrtc {
-namespace media_optimization {
-
-// Number of time periods used for (max) window filter for packet loss
-// TODO (marpan): set reasonable window size for filtered packet loss,
-// adjustment should be based on logged/real data of loss stats/correlation.
-enum { kLossPrHistorySize = 10 };
-
-// 1000 ms, total filter length is (kLossPrHistorySize * 1000) ms
-enum { kLossPrShortFilterWinMs = 1000 };
-
-// The type of filter used on the received packet loss reports.
-enum FilterPacketLossMode {
- kNoFilter, // No filtering on received loss.
- kAvgFilter, // Recursive average filter.
- kMaxFilter // Max-window filter, over the time interval of:
- // (kLossPrHistorySize * kLossPrShortFilterWinMs) ms.
-};
-
-// Thresholds for hybrid NACK/FEC
-// common to media optimization and the jitter buffer.
-const int64_t kLowRttNackMs = 20;
-
-struct VCMProtectionParameters
-{
- VCMProtectionParameters() : rtt(0), lossPr(0.0f), bitRate(0.0f),
- packetsPerFrame(0.0f), packetsPerFrameKey(0.0f), frameRate(0.0f),
- keyFrameSize(0.0f), fecRateDelta(0), fecRateKey(0),
- codecWidth(0), codecHeight(0),
- numLayers(1)
- {}
-
- int64_t rtt;
- float lossPr;
- float bitRate;
- float packetsPerFrame;
- float packetsPerFrameKey;
- float frameRate;
- float keyFrameSize;
- uint8_t fecRateDelta;
- uint8_t fecRateKey;
- uint16_t codecWidth;
- uint16_t codecHeight;
- int numLayers;
-};
-
-
-/******************************/
-/* VCMProtectionMethod class */
-/******************************/
-
-enum VCMProtectionMethodEnum
-{
- kNack,
- kFec,
- kNackFec,
- kNone
-};
-
-class VCMLossProbabilitySample
-{
-public:
- VCMLossProbabilitySample() : lossPr255(0), timeMs(-1) {};
-
- uint8_t lossPr255;
- int64_t timeMs;
-};
-
-
-class VCMProtectionMethod
-{
-public:
- VCMProtectionMethod();
- virtual ~VCMProtectionMethod();
-
- // Updates the efficiency of the method using the parameters provided
- //
- // Input:
- // - parameters : Parameters used to calculate efficiency
- //
- // Return value : True if this method is recommended in
- // the given conditions.
- virtual bool UpdateParameters(const VCMProtectionParameters* parameters) = 0;
-
- // Returns the protection type
- //
- // Return value : The protection type
- enum VCMProtectionMethodEnum Type() const { return _type; }
-
- // Returns the effective packet loss for ER, required by this protection method
- //
- // Return value : Required effective packet loss
- virtual uint8_t RequiredPacketLossER() { return _effectivePacketLoss; }
-
- // Extracts the FEC protection factor for Key frame, required by this protection method
- //
- // Return value : Required protectionFactor for Key frame
- virtual uint8_t RequiredProtectionFactorK() { return _protectionFactorK; }
-
- // Extracts the FEC protection factor for Delta frame, required by this protection method
- //
- // Return value : Required protectionFactor for delta frame
- virtual uint8_t RequiredProtectionFactorD() { return _protectionFactorD; }
-
- // Extracts whether the FEC Unequal protection (UEP) is used for Key frame.
- //
- // Return value : Required Unequal protection on/off state.
- virtual bool RequiredUepProtectionK() { return _useUepProtectionK; }
-
- // Extracts whether the the FEC Unequal protection (UEP) is used for Delta frame.
- //
- // Return value : Required Unequal protection on/off state.
- virtual bool RequiredUepProtectionD() { return _useUepProtectionD; }
-
- virtual int MaxFramesFec() const { return 1; }
-
- // Updates content metrics
- void UpdateContentMetrics(const VideoContentMetrics* contentMetrics);
-
-protected:
-
- uint8_t _effectivePacketLoss;
- uint8_t _protectionFactorK;
- uint8_t _protectionFactorD;
- // Estimation of residual loss after the FEC
- float _scaleProtKey;
- int32_t _maxPayloadSize;
-
- VCMQmRobustness* _qmRobustness;
- bool _useUepProtectionK;
- bool _useUepProtectionD;
- float _corrFecCost;
- enum VCMProtectionMethodEnum _type;
-};
-
-class VCMNackMethod : public VCMProtectionMethod
-{
-public:
- VCMNackMethod();
- virtual ~VCMNackMethod();
- virtual bool UpdateParameters(const VCMProtectionParameters* parameters);
- // Get the effective packet loss
- bool EffectivePacketLoss(const VCMProtectionParameters* parameter);
-};
-
-class VCMFecMethod : public VCMProtectionMethod
-{
-public:
- VCMFecMethod();
- virtual ~VCMFecMethod();
- virtual bool UpdateParameters(const VCMProtectionParameters* parameters);
- // Get the effective packet loss for ER
- bool EffectivePacketLoss(const VCMProtectionParameters* parameters);
- // Get the FEC protection factors
- bool ProtectionFactor(const VCMProtectionParameters* parameters);
- // Get the boost for key frame protection
- uint8_t BoostCodeRateKey(uint8_t packetFrameDelta,
- uint8_t packetFrameKey) const;
- // Convert the rates: defined relative to total# packets or source# packets
- uint8_t ConvertFECRate(uint8_t codeRate) const;
- // Get the average effective recovery from FEC: for random loss model
- float AvgRecoveryFEC(const VCMProtectionParameters* parameters) const;
- // Update FEC with protectionFactorD
- void UpdateProtectionFactorD(uint8_t protectionFactorD);
- // Update FEC with protectionFactorK
- void UpdateProtectionFactorK(uint8_t protectionFactorK);
- // Compute the bits per frame. Account for temporal layers when applicable.
- int BitsPerFrame(const VCMProtectionParameters* parameters);
-
-protected:
- enum { kUpperLimitFramesFec = 6 };
- // Thresholds values for the bytes/frame and round trip time, below which we
- // may turn off FEC, depending on |_numLayers| and |_maxFramesFec|.
- // Max bytes/frame for VGA, corresponds to ~140k at 25fps.
- enum { kMaxBytesPerFrameForFec = 700 };
- // Max bytes/frame for CIF and lower: corresponds to ~80k at 25fps.
- enum { kMaxBytesPerFrameForFecLow = 400 };
- // Max bytes/frame for frame size larger than VGA, ~200k at 25fps.
- enum { kMaxBytesPerFrameForFecHigh = 1000 };
-};
-
-
-class VCMNackFecMethod : public VCMFecMethod
-{
-public:
- VCMNackFecMethod(int64_t lowRttNackThresholdMs,
- int64_t highRttNackThresholdMs);
- virtual ~VCMNackFecMethod();
- virtual bool UpdateParameters(const VCMProtectionParameters* parameters);
- // Get the effective packet loss for ER
- bool EffectivePacketLoss(const VCMProtectionParameters* parameters);
- // Get the protection factors
- bool ProtectionFactor(const VCMProtectionParameters* parameters);
- // Get the max number of frames the FEC is allowed to be based on.
- int MaxFramesFec() const;
- // Turn off the FEC based on low bitrate and other factors.
- bool BitRateTooLowForFec(const VCMProtectionParameters* parameters);
-private:
- int ComputeMaxFramesFec(const VCMProtectionParameters* parameters);
-
- int64_t _lowRttNackMs;
- int64_t _highRttNackMs;
- int _maxFramesFec;
-};
-
-class VCMLossProtectionLogic
-{
-public:
- VCMLossProtectionLogic(int64_t nowMs);
- ~VCMLossProtectionLogic();
-
- // Set the protection method to be used
- //
- // Input:
- // - newMethodType : New requested protection method type. If one
- // is already set, it will be deleted and replaced
- void SetMethod(VCMProtectionMethodEnum newMethodType);
-
- // Update the round-trip time
- //
- // Input:
- // - rtt : Round-trip time in seconds.
- void UpdateRtt(int64_t rtt);
-
- // Update the filtered packet loss.
- //
- // Input:
- // - packetLossEnc : The reported packet loss filtered
- // (max window or average)
- void UpdateFilteredLossPr(uint8_t packetLossEnc);
-
- // Update the current target bit rate.
- //
- // Input:
- // - bitRate : The current target bit rate in kbits/s
- void UpdateBitRate(float bitRate);
-
- // Update the number of packets per frame estimate, for delta frames
- //
- // Input:
- // - nPackets : Number of packets in the latest sent frame.
- void UpdatePacketsPerFrame(float nPackets, int64_t nowMs);
-
- // Update the number of packets per frame estimate, for key frames
- //
- // Input:
- // - nPackets : umber of packets in the latest sent frame.
- void UpdatePacketsPerFrameKey(float nPackets, int64_t nowMs);
-
- // Update the keyFrameSize estimate
- //
- // Input:
- // - keyFrameSize : The size of the latest sent key frame.
- void UpdateKeyFrameSize(float keyFrameSize);
-
- // Update the frame rate
- //
- // Input:
- // - frameRate : The current target frame rate.
- void UpdateFrameRate(float frameRate) { _frameRate = frameRate; }
-
- // Update the frame size
- //
- // Input:
- // - width : The codec frame width.
- // - height : The codec frame height.
- void UpdateFrameSize(uint16_t width, uint16_t height);
-
- // Update the number of active layers
- //
- // Input:
- // - numLayers : Number of layers used.
- void UpdateNumLayers(int numLayers);
-
- // The amount of packet loss to cover for with FEC.
- //
- // Input:
- // - fecRateKey : Packet loss to cover for with FEC when
- // sending key frames.
- // - fecRateDelta : Packet loss to cover for with FEC when
- // sending delta frames.
- void UpdateFECRates(uint8_t fecRateKey, uint8_t fecRateDelta)
- { _fecRateKey = fecRateKey;
- _fecRateDelta = fecRateDelta; }
-
- // Update the protection methods with the current VCMProtectionParameters
- // and set the requested protection settings.
- // Return value : Returns true on update
- bool UpdateMethod();
-
- // Returns the method currently selected.
- //
- // Return value : The protection method currently selected.
- VCMProtectionMethod* SelectedMethod() const;
-
- // Return the protection type of the currently selected method
- VCMProtectionMethodEnum SelectedType() const;
-
- // Updates the filtered loss for the average and max window packet loss,
- // and returns the filtered loss probability in the interval [0, 255].
- // The returned filtered loss value depends on the parameter |filter_mode|.
- // The input parameter |lossPr255| is the received packet loss.
-
- // Return value : The filtered loss probability
- uint8_t FilteredLoss(int64_t nowMs, FilterPacketLossMode filter_mode,
- uint8_t lossPr255);
-
- void Reset(int64_t nowMs);
-
- void Release();
-
-private:
- // Sets the available loss protection methods.
- void UpdateMaxLossHistory(uint8_t lossPr255, int64_t now);
- uint8_t MaxFilteredLossPr(int64_t nowMs) const;
- rtc::scoped_ptr<VCMProtectionMethod> _selectedMethod;
- VCMProtectionParameters _currentParameters;
- int64_t _rtt;
- float _lossPr;
- float _bitRate;
- float _frameRate;
- float _keyFrameSize;
- uint8_t _fecRateKey;
- uint8_t _fecRateDelta;
- int64_t _lastPrUpdateT;
- int64_t _lastPacketPerFrameUpdateT;
- int64_t _lastPacketPerFrameUpdateTKey;
- rtc::ExpFilter _lossPr255;
- VCMLossProbabilitySample _lossPrHistory[kLossPrHistorySize];
- uint8_t _shortMaxLossPr255;
- rtc::ExpFilter _packetsPerFrame;
- rtc::ExpFilter _packetsPerFrameKey;
- uint16_t _codecWidth;
- uint16_t _codecHeight;
- int _numLayers;
-};
-
-} // namespace media_optimization
-} // namespace webrtc
-
-#endif // WEBRTC_MODULES_VIDEO_CODING_MEDIA_OPT_UTIL_H_
diff --git a/webrtc/modules/video_coding/main/source/media_optimization.cc b/webrtc/modules/video_coding/main/source/media_optimization.cc
deleted file mode 100644
index cc73d3803d..0000000000
--- a/webrtc/modules/video_coding/main/source/media_optimization.cc
+++ /dev/null
@@ -1,648 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include "webrtc/modules/video_coding/main/source/media_optimization.h"
-
-#include "webrtc/modules/video_coding/main/source/content_metrics_processing.h"
-#include "webrtc/modules/video_coding/main/source/qm_select.h"
-#include "webrtc/modules/video_coding/utility/include/frame_dropper.h"
-#include "webrtc/system_wrappers/include/clock.h"
-#include "webrtc/system_wrappers/include/logging.h"
-
-namespace webrtc {
-namespace media_optimization {
-namespace {
-void UpdateProtectionCallback(
- VCMProtectionMethod* selected_method,
- uint32_t* video_rate_bps,
- uint32_t* nack_overhead_rate_bps,
- uint32_t* fec_overhead_rate_bps,
- VCMProtectionCallback* video_protection_callback) {
- FecProtectionParams delta_fec_params;
- FecProtectionParams key_fec_params;
- // Get the FEC code rate for Key frames (set to 0 when NA).
- key_fec_params.fec_rate = selected_method->RequiredProtectionFactorK();
-
- // Get the FEC code rate for Delta frames (set to 0 when NA).
- delta_fec_params.fec_rate = selected_method->RequiredProtectionFactorD();
-
- // Get the FEC-UEP protection status for Key frames: UEP on/off.
- key_fec_params.use_uep_protection = selected_method->RequiredUepProtectionK();
-
- // Get the FEC-UEP protection status for Delta frames: UEP on/off.
- delta_fec_params.use_uep_protection =
- selected_method->RequiredUepProtectionD();
-
- // The RTP module currently requires the same |max_fec_frames| for both
- // key and delta frames.
- delta_fec_params.max_fec_frames = selected_method->MaxFramesFec();
- key_fec_params.max_fec_frames = selected_method->MaxFramesFec();
-
- // Set the FEC packet mask type. |kFecMaskBursty| is more effective for
- // consecutive losses and little/no packet re-ordering. As we currently
- // do not have feedback data on the degree of correlated losses and packet
- // re-ordering, we keep default setting to |kFecMaskRandom| for now.
- delta_fec_params.fec_mask_type = kFecMaskRandom;
- key_fec_params.fec_mask_type = kFecMaskRandom;
-
- // TODO(Marco): Pass FEC protection values per layer.
- video_protection_callback->ProtectionRequest(&delta_fec_params,
- &key_fec_params,
- video_rate_bps,
- nack_overhead_rate_bps,
- fec_overhead_rate_bps);
-}
-} // namespace
-
-struct MediaOptimization::EncodedFrameSample {
- EncodedFrameSample(size_t size_bytes,
- uint32_t timestamp,
- int64_t time_complete_ms)
- : size_bytes(size_bytes),
- timestamp(timestamp),
- time_complete_ms(time_complete_ms) {}
-
- size_t size_bytes;
- uint32_t timestamp;
- int64_t time_complete_ms;
-};
-
-MediaOptimization::MediaOptimization(Clock* clock)
- : crit_sect_(CriticalSectionWrapper::CreateCriticalSection()),
- clock_(clock),
- max_bit_rate_(0),
- send_codec_type_(kVideoCodecUnknown),
- codec_width_(0),
- codec_height_(0),
- user_frame_rate_(0),
- frame_dropper_(new FrameDropper),
- loss_prot_logic_(
- new VCMLossProtectionLogic(clock_->TimeInMilliseconds())),
- fraction_lost_(0),
- send_statistics_zero_encode_(0),
- max_payload_size_(1460),
- video_target_bitrate_(0),
- incoming_frame_rate_(0),
- enable_qm_(false),
- encoded_frame_samples_(),
- avg_sent_bit_rate_bps_(0),
- avg_sent_framerate_(0),
- key_frame_cnt_(0),
- delta_frame_cnt_(0),
- content_(new VCMContentMetricsProcessing()),
- qm_resolution_(new VCMQmResolution()),
- last_qm_update_time_(0),
- last_change_time_(0),
- num_layers_(0),
- suspension_enabled_(false),
- video_suspended_(false),
- suspension_threshold_bps_(0),
- suspension_window_bps_(0) {
- memset(send_statistics_, 0, sizeof(send_statistics_));
- memset(incoming_frame_times_, -1, sizeof(incoming_frame_times_));
-}
-
-MediaOptimization::~MediaOptimization(void) {
- loss_prot_logic_->Release();
-}
-
-void MediaOptimization::Reset() {
- CriticalSectionScoped lock(crit_sect_.get());
- SetEncodingDataInternal(
- kVideoCodecUnknown, 0, 0, 0, 0, 0, 0, max_payload_size_);
- memset(incoming_frame_times_, -1, sizeof(incoming_frame_times_));
- incoming_frame_rate_ = 0.0;
- frame_dropper_->Reset();
- loss_prot_logic_->Reset(clock_->TimeInMilliseconds());
- frame_dropper_->SetRates(0, 0);
- content_->Reset();
- qm_resolution_->Reset();
- loss_prot_logic_->UpdateFrameRate(incoming_frame_rate_);
- loss_prot_logic_->Reset(clock_->TimeInMilliseconds());
- send_statistics_zero_encode_ = 0;
- video_target_bitrate_ = 0;
- codec_width_ = 0;
- codec_height_ = 0;
- user_frame_rate_ = 0;
- key_frame_cnt_ = 0;
- delta_frame_cnt_ = 0;
- last_qm_update_time_ = 0;
- last_change_time_ = 0;
- encoded_frame_samples_.clear();
- avg_sent_bit_rate_bps_ = 0;
- num_layers_ = 1;
-}
-
-void MediaOptimization::SetEncodingData(VideoCodecType send_codec_type,
- int32_t max_bit_rate,
- uint32_t target_bitrate,
- uint16_t width,
- uint16_t height,
- uint32_t frame_rate,
- int num_layers,
- int32_t mtu) {
- CriticalSectionScoped lock(crit_sect_.get());
- SetEncodingDataInternal(send_codec_type,
- max_bit_rate,
- frame_rate,
- target_bitrate,
- width,
- height,
- num_layers,
- mtu);
-}
-
-void MediaOptimization::SetEncodingDataInternal(VideoCodecType send_codec_type,
- int32_t max_bit_rate,
- uint32_t frame_rate,
- uint32_t target_bitrate,
- uint16_t width,
- uint16_t height,
- int num_layers,
- int32_t mtu) {
- // Everything codec specific should be reset here since this means the codec
- // has changed. If native dimension values have changed, then either user
- // initiated change, or QM initiated change. Will be able to determine only
- // after the processing of the first frame.
- last_change_time_ = clock_->TimeInMilliseconds();
- content_->Reset();
- content_->UpdateFrameRate(frame_rate);
-
- max_bit_rate_ = max_bit_rate;
- send_codec_type_ = send_codec_type;
- video_target_bitrate_ = target_bitrate;
- float target_bitrate_kbps = static_cast<float>(target_bitrate) / 1000.0f;
- loss_prot_logic_->UpdateBitRate(target_bitrate_kbps);
- loss_prot_logic_->UpdateFrameRate(static_cast<float>(frame_rate));
- loss_prot_logic_->UpdateFrameSize(width, height);
- loss_prot_logic_->UpdateNumLayers(num_layers);
- frame_dropper_->Reset();
- frame_dropper_->SetRates(target_bitrate_kbps, static_cast<float>(frame_rate));
- user_frame_rate_ = static_cast<float>(frame_rate);
- codec_width_ = width;
- codec_height_ = height;
- num_layers_ = (num_layers <= 1) ? 1 : num_layers; // Can also be zero.
- max_payload_size_ = mtu;
- qm_resolution_->Initialize(target_bitrate_kbps,
- user_frame_rate_,
- codec_width_,
- codec_height_,
- num_layers_);
-}
-
-uint32_t MediaOptimization::SetTargetRates(
- uint32_t target_bitrate,
- uint8_t fraction_lost,
- int64_t round_trip_time_ms,
- VCMProtectionCallback* protection_callback,
- VCMQMSettingsCallback* qmsettings_callback) {
- CriticalSectionScoped lock(crit_sect_.get());
- VCMProtectionMethod* selected_method = loss_prot_logic_->SelectedMethod();
- float target_bitrate_kbps = static_cast<float>(target_bitrate) / 1000.0f;
- loss_prot_logic_->UpdateBitRate(target_bitrate_kbps);
- loss_prot_logic_->UpdateRtt(round_trip_time_ms);
-
- // Get frame rate for encoder: this is the actual/sent frame rate.
- float actual_frame_rate = SentFrameRateInternal();
-
- // Sanity check.
- if (actual_frame_rate < 1.0) {
- actual_frame_rate = 1.0;
- }
-
- // Update frame rate for the loss protection logic class: frame rate should
- // be the actual/sent rate.
- loss_prot_logic_->UpdateFrameRate(actual_frame_rate);
-
- fraction_lost_ = fraction_lost;
-
- // Returns the filtered packet loss, used for the protection setting.
- // The filtered loss may be the received loss (no filter), or some
- // filtered value (average or max window filter).
- // Use max window filter for now.
- FilterPacketLossMode filter_mode = kMaxFilter;
- uint8_t packet_loss_enc = loss_prot_logic_->FilteredLoss(
- clock_->TimeInMilliseconds(), filter_mode, fraction_lost);
-
- // For now use the filtered loss for computing the robustness settings.
- loss_prot_logic_->UpdateFilteredLossPr(packet_loss_enc);
-
- // Rate cost of the protection methods.
- float protection_overhead_rate = 0.0f;
-
- // Update protection settings, when applicable.
- float sent_video_rate_kbps = 0.0f;
- if (loss_prot_logic_->SelectedType() != kNone) {
- // Update protection method with content metrics.
- selected_method->UpdateContentMetrics(content_->ShortTermAvgData());
-
- // Update method will compute the robustness settings for the given
- // protection method and the overhead cost
- // the protection method is set by the user via SetVideoProtection.
- loss_prot_logic_->UpdateMethod();
-
- // Update protection callback with protection settings.
- uint32_t sent_video_rate_bps = 0;
- uint32_t sent_nack_rate_bps = 0;
- uint32_t sent_fec_rate_bps = 0;
- // Get the bit cost of protection method, based on the amount of
- // overhead data actually transmitted (including headers) the last
- // second.
- if (protection_callback) {
- UpdateProtectionCallback(selected_method,
- &sent_video_rate_bps,
- &sent_nack_rate_bps,
- &sent_fec_rate_bps,
- protection_callback);
- }
- uint32_t sent_total_rate_bps =
- sent_video_rate_bps + sent_nack_rate_bps + sent_fec_rate_bps;
- // Estimate the overhead costs of the next second as staying the same
- // wrt the source bitrate.
- if (sent_total_rate_bps > 0) {
- protection_overhead_rate =
- static_cast<float>(sent_nack_rate_bps + sent_fec_rate_bps) /
- sent_total_rate_bps;
- }
- // Cap the overhead estimate to 50%.
- if (protection_overhead_rate > 0.5)
- protection_overhead_rate = 0.5;
-
- // Get the effective packet loss for encoder ER when applicable. Should be
- // passed to encoder via fraction_lost.
- packet_loss_enc = selected_method->RequiredPacketLossER();
- sent_video_rate_kbps = static_cast<float>(sent_video_rate_bps) / 1000.0f;
- }
-
- // Source coding rate: total rate - protection overhead.
- video_target_bitrate_ = target_bitrate * (1.0 - protection_overhead_rate);
-
- // Cap target video bitrate to codec maximum.
- if (max_bit_rate_ > 0 && video_target_bitrate_ > max_bit_rate_) {
- video_target_bitrate_ = max_bit_rate_;
- }
-
- // Update encoding rates following protection settings.
- float target_video_bitrate_kbps =
- static_cast<float>(video_target_bitrate_) / 1000.0f;
- frame_dropper_->SetRates(target_video_bitrate_kbps, incoming_frame_rate_);
-
- if (enable_qm_ && qmsettings_callback) {
- // Update QM with rates.
- qm_resolution_->UpdateRates(target_video_bitrate_kbps,
- sent_video_rate_kbps,
- incoming_frame_rate_,
- fraction_lost_);
- // Check for QM selection.
- bool select_qm = CheckStatusForQMchange();
- if (select_qm) {
- SelectQuality(qmsettings_callback);
- }
- // Reset the short-term averaged content data.
- content_->ResetShortTermAvgData();
- }
-
- CheckSuspendConditions();
-
- return video_target_bitrate_;
-}
-
-void MediaOptimization::SetProtectionMethod(VCMProtectionMethodEnum method) {
- CriticalSectionScoped lock(crit_sect_.get());
- loss_prot_logic_->SetMethod(method);
-}
-
-uint32_t MediaOptimization::InputFrameRate() {
- CriticalSectionScoped lock(crit_sect_.get());
- return InputFrameRateInternal();
-}
-
-uint32_t MediaOptimization::InputFrameRateInternal() {
- ProcessIncomingFrameRate(clock_->TimeInMilliseconds());
- return uint32_t(incoming_frame_rate_ + 0.5f);
-}
-
-uint32_t MediaOptimization::SentFrameRate() {
- CriticalSectionScoped lock(crit_sect_.get());
- return SentFrameRateInternal();
-}
-
-uint32_t MediaOptimization::SentFrameRateInternal() {
- PurgeOldFrameSamples(clock_->TimeInMilliseconds());
- UpdateSentFramerate();
- return avg_sent_framerate_;
-}
-
-uint32_t MediaOptimization::SentBitRate() {
- CriticalSectionScoped lock(crit_sect_.get());
- const int64_t now_ms = clock_->TimeInMilliseconds();
- PurgeOldFrameSamples(now_ms);
- UpdateSentBitrate(now_ms);
- return avg_sent_bit_rate_bps_;
-}
-
-int32_t MediaOptimization::UpdateWithEncodedData(
- const EncodedImage& encoded_image) {
- size_t encoded_length = encoded_image._length;
- uint32_t timestamp = encoded_image._timeStamp;
- CriticalSectionScoped lock(crit_sect_.get());
- const int64_t now_ms = clock_->TimeInMilliseconds();
- PurgeOldFrameSamples(now_ms);
- if (encoded_frame_samples_.size() > 0 &&
- encoded_frame_samples_.back().timestamp == timestamp) {
- // Frames having the same timestamp are generated from the same input
- // frame. We don't want to double count them, but only increment the
- // size_bytes.
- encoded_frame_samples_.back().size_bytes += encoded_length;
- encoded_frame_samples_.back().time_complete_ms = now_ms;
- } else {
- encoded_frame_samples_.push_back(
- EncodedFrameSample(encoded_length, timestamp, now_ms));
- }
- UpdateSentBitrate(now_ms);
- UpdateSentFramerate();
- if (encoded_length > 0) {
- const bool delta_frame = encoded_image._frameType != kVideoFrameKey;
-
- frame_dropper_->Fill(encoded_length, delta_frame);
- if (max_payload_size_ > 0 && encoded_length > 0) {
- const float min_packets_per_frame =
- encoded_length / static_cast<float>(max_payload_size_);
- if (delta_frame) {
- loss_prot_logic_->UpdatePacketsPerFrame(min_packets_per_frame,
- clock_->TimeInMilliseconds());
- } else {
- loss_prot_logic_->UpdatePacketsPerFrameKey(
- min_packets_per_frame, clock_->TimeInMilliseconds());
- }
-
- if (enable_qm_) {
- // Update quality select with encoded length.
- qm_resolution_->UpdateEncodedSize(encoded_length);
- }
- }
- if (!delta_frame && encoded_length > 0) {
- loss_prot_logic_->UpdateKeyFrameSize(static_cast<float>(encoded_length));
- }
-
- // Updating counters.
- if (delta_frame) {
- delta_frame_cnt_++;
- } else {
- key_frame_cnt_++;
- }
- }
-
- return VCM_OK;
-}
-
-void MediaOptimization::EnableQM(bool enable) {
- CriticalSectionScoped lock(crit_sect_.get());
- enable_qm_ = enable;
-}
-
-void MediaOptimization::EnableFrameDropper(bool enable) {
- CriticalSectionScoped lock(crit_sect_.get());
- frame_dropper_->Enable(enable);
-}
-
-void MediaOptimization::SuspendBelowMinBitrate(int threshold_bps,
- int window_bps) {
- CriticalSectionScoped lock(crit_sect_.get());
- assert(threshold_bps > 0 && window_bps >= 0);
- suspension_threshold_bps_ = threshold_bps;
- suspension_window_bps_ = window_bps;
- suspension_enabled_ = true;
- video_suspended_ = false;
-}
-
-bool MediaOptimization::IsVideoSuspended() const {
- CriticalSectionScoped lock(crit_sect_.get());
- return video_suspended_;
-}
-
-bool MediaOptimization::DropFrame() {
- CriticalSectionScoped lock(crit_sect_.get());
- UpdateIncomingFrameRate();
- // Leak appropriate number of bytes.
- frame_dropper_->Leak((uint32_t)(InputFrameRateInternal() + 0.5f));
- if (video_suspended_) {
- return true; // Drop all frames when muted.
- }
- return frame_dropper_->DropFrame();
-}
-
-void MediaOptimization::UpdateContentData(
- const VideoContentMetrics* content_metrics) {
- CriticalSectionScoped lock(crit_sect_.get());
- // Updating content metrics.
- if (content_metrics == NULL) {
- // Disable QM if metrics are NULL.
- enable_qm_ = false;
- qm_resolution_->Reset();
- } else {
- content_->UpdateContentData(content_metrics);
- }
-}
-
-void MediaOptimization::UpdateIncomingFrameRate() {
- int64_t now = clock_->TimeInMilliseconds();
- if (incoming_frame_times_[0] == 0) {
- // No shifting if this is the first time.
- } else {
- // Shift all times one step.
- for (int32_t i = (kFrameCountHistorySize - 2); i >= 0; i--) {
- incoming_frame_times_[i + 1] = incoming_frame_times_[i];
- }
- }
- incoming_frame_times_[0] = now;
- ProcessIncomingFrameRate(now);
-}
-
-int32_t MediaOptimization::SelectQuality(
- VCMQMSettingsCallback* video_qmsettings_callback) {
- // Reset quantities for QM select.
- qm_resolution_->ResetQM();
-
- // Update QM will long-term averaged content metrics.
- qm_resolution_->UpdateContent(content_->LongTermAvgData());
-
- // Select quality mode.
- VCMResolutionScale* qm = NULL;
- int32_t ret = qm_resolution_->SelectResolution(&qm);
- if (ret < 0) {
- return ret;
- }
-
- // Check for updates to spatial/temporal modes.
- QMUpdate(qm, video_qmsettings_callback);
-
- // Reset all the rate and related frame counters quantities.
- qm_resolution_->ResetRates();
-
- // Reset counters.
- last_qm_update_time_ = clock_->TimeInMilliseconds();
-
- // Reset content metrics.
- content_->Reset();
-
- return VCM_OK;
-}
-
-void MediaOptimization::PurgeOldFrameSamples(int64_t now_ms) {
- while (!encoded_frame_samples_.empty()) {
- if (now_ms - encoded_frame_samples_.front().time_complete_ms >
- kBitrateAverageWinMs) {
- encoded_frame_samples_.pop_front();
- } else {
- break;
- }
- }
-}
-
-void MediaOptimization::UpdateSentBitrate(int64_t now_ms) {
- if (encoded_frame_samples_.empty()) {
- avg_sent_bit_rate_bps_ = 0;
- return;
- }
- size_t framesize_sum = 0;
- for (FrameSampleList::iterator it = encoded_frame_samples_.begin();
- it != encoded_frame_samples_.end();
- ++it) {
- framesize_sum += it->size_bytes;
- }
- float denom = static_cast<float>(
- now_ms - encoded_frame_samples_.front().time_complete_ms);
- if (denom >= 1.0f) {
- avg_sent_bit_rate_bps_ =
- static_cast<uint32_t>(framesize_sum * 8.0f * 1000.0f / denom + 0.5f);
- } else {
- avg_sent_bit_rate_bps_ = framesize_sum * 8;
- }
-}
-
-void MediaOptimization::UpdateSentFramerate() {
- if (encoded_frame_samples_.size() <= 1) {
- avg_sent_framerate_ = encoded_frame_samples_.size();
- return;
- }
- int denom = encoded_frame_samples_.back().timestamp -
- encoded_frame_samples_.front().timestamp;
- if (denom > 0) {
- avg_sent_framerate_ =
- (90000 * (encoded_frame_samples_.size() - 1) + denom / 2) / denom;
- } else {
- avg_sent_framerate_ = encoded_frame_samples_.size();
- }
-}
-
-bool MediaOptimization::QMUpdate(
- VCMResolutionScale* qm,
- VCMQMSettingsCallback* video_qmsettings_callback) {
- // Check for no change.
- if (!qm->change_resolution_spatial && !qm->change_resolution_temporal) {
- return false;
- }
-
- // Check for change in frame rate.
- if (qm->change_resolution_temporal) {
- incoming_frame_rate_ = qm->frame_rate;
- // Reset frame rate estimate.
- memset(incoming_frame_times_, -1, sizeof(incoming_frame_times_));
- }
-
- // Check for change in frame size.
- if (qm->change_resolution_spatial) {
- codec_width_ = qm->codec_width;
- codec_height_ = qm->codec_height;
- }
-
- LOG(LS_INFO) << "Media optimizer requests the video resolution to be changed "
- "to " << qm->codec_width << "x" << qm->codec_height << "@"
- << qm->frame_rate;
-
- // Update VPM with new target frame rate and frame size.
- // Note: use |qm->frame_rate| instead of |_incoming_frame_rate| for updating
- // target frame rate in VPM frame dropper. The quantity |_incoming_frame_rate|
- // will vary/fluctuate, and since we don't want to change the state of the
- // VPM frame dropper, unless a temporal action was selected, we use the
- // quantity |qm->frame_rate| for updating.
- video_qmsettings_callback->SetVideoQMSettings(
- qm->frame_rate, codec_width_, codec_height_);
- content_->UpdateFrameRate(qm->frame_rate);
- qm_resolution_->UpdateCodecParameters(
- qm->frame_rate, codec_width_, codec_height_);
- return true;
-}
-
-// Check timing constraints and look for significant change in:
-// (1) scene content,
-// (2) target bit rate.
-bool MediaOptimization::CheckStatusForQMchange() {
- bool status = true;
-
- // Check that we do not call QMSelect too often, and that we waited some time
- // (to sample the metrics) from the event last_change_time
- // last_change_time is the time where user changed the size/rate/frame rate
- // (via SetEncodingData).
- int64_t now = clock_->TimeInMilliseconds();
- if ((now - last_qm_update_time_) < kQmMinIntervalMs ||
- (now - last_change_time_) < kQmMinIntervalMs) {
- status = false;
- }
-
- return status;
-}
-
-// Allowing VCM to keep track of incoming frame rate.
-void MediaOptimization::ProcessIncomingFrameRate(int64_t now) {
- int32_t num = 0;
- int32_t nr_of_frames = 0;
- for (num = 1; num < (kFrameCountHistorySize - 1); ++num) {
- if (incoming_frame_times_[num] <= 0 ||
- // don't use data older than 2 s
- now - incoming_frame_times_[num] > kFrameHistoryWinMs) {
- break;
- } else {
- nr_of_frames++;
- }
- }
- if (num > 1) {
- const int64_t diff =
- incoming_frame_times_[0] - incoming_frame_times_[num - 1];
- incoming_frame_rate_ = 0.0; // No frame rate estimate available.
- if (diff > 0) {
- incoming_frame_rate_ = nr_of_frames * 1000.0f / static_cast<float>(diff);
- }
- }
-}
-
-void MediaOptimization::CheckSuspendConditions() {
- // Check conditions for SuspendBelowMinBitrate. |video_target_bitrate_| is in
- // bps.
- if (suspension_enabled_) {
- if (!video_suspended_) {
- // Check if we just went below the threshold.
- if (video_target_bitrate_ < suspension_threshold_bps_) {
- video_suspended_ = true;
- }
- } else {
- // Video is already suspended. Check if we just went over the threshold
- // with a margin.
- if (video_target_bitrate_ >
- suspension_threshold_bps_ + suspension_window_bps_) {
- video_suspended_ = false;
- }
- }
- }
-}
-
-} // namespace media_optimization
-} // namespace webrtc
diff --git a/webrtc/modules/video_coding/main/source/media_optimization.h b/webrtc/modules/video_coding/main/source/media_optimization.h
deleted file mode 100644
index c4feeff743..0000000000
--- a/webrtc/modules/video_coding/main/source/media_optimization.h
+++ /dev/null
@@ -1,180 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_MODULES_VIDEO_CODING_MAIN_SOURCE_MEDIA_OPTIMIZATION_H_
-#define WEBRTC_MODULES_VIDEO_CODING_MAIN_SOURCE_MEDIA_OPTIMIZATION_H_
-
-#include <list>
-
-#include "webrtc/base/scoped_ptr.h"
-#include "webrtc/modules/interface/module_common_types.h"
-#include "webrtc/modules/video_coding/main/interface/video_coding.h"
-#include "webrtc/modules/video_coding/main/source/media_opt_util.h"
-#include "webrtc/modules/video_coding/main/source/qm_select.h"
-#include "webrtc/system_wrappers/include/critical_section_wrapper.h"
-
-namespace webrtc {
-
-// Forward declarations.
-class Clock;
-class FrameDropper;
-class VCMContentMetricsProcessing;
-
-namespace media_optimization {
-
-class MediaOptimization {
- public:
- explicit MediaOptimization(Clock* clock);
- ~MediaOptimization();
-
- // TODO(andresp): Can Reset and SetEncodingData be done at construction time
- // only?
- void Reset();
-
- // Informs media optimization of initial encoding state.
- void SetEncodingData(VideoCodecType send_codec_type,
- int32_t max_bit_rate,
- uint32_t bit_rate,
- uint16_t width,
- uint16_t height,
- uint32_t frame_rate,
- int num_temporal_layers,
- int32_t mtu);
-
- // Sets target rates for the encoder given the channel parameters.
- // Inputs: target bitrate - the encoder target bitrate in bits/s.
- // fraction_lost - packet loss rate in % in the network.
- // round_trip_time_ms - round trip time in milliseconds.
- // min_bit_rate - the bit rate of the end-point with lowest rate.
- // max_bit_rate - the bit rate of the end-point with highest rate.
- // TODO(andresp): Find if the callbacks can be triggered only after releasing
- // an internal critical section.
- uint32_t SetTargetRates(uint32_t target_bitrate,
- uint8_t fraction_lost,
- int64_t round_trip_time_ms,
- VCMProtectionCallback* protection_callback,
- VCMQMSettingsCallback* qmsettings_callback);
-
- void SetProtectionMethod(VCMProtectionMethodEnum method);
- void EnableQM(bool enable);
- void EnableFrameDropper(bool enable);
-
- // Lets the sender suspend video when the rate drops below
- // |threshold_bps|, and turns back on when the rate goes back up above
- // |threshold_bps| + |window_bps|.
- void SuspendBelowMinBitrate(int threshold_bps, int window_bps);
- bool IsVideoSuspended() const;
-
- bool DropFrame();
-
- void UpdateContentData(const VideoContentMetrics* content_metrics);
-
- // Informs Media Optimization of encoded output.
- int32_t UpdateWithEncodedData(const EncodedImage& encoded_image);
-
- // InputFrameRate 0 = no frame rate estimate available.
- uint32_t InputFrameRate();
- uint32_t SentFrameRate();
- uint32_t SentBitRate();
-
- private:
- enum {
- kFrameCountHistorySize = 90
- };
- enum {
- kFrameHistoryWinMs = 2000
- };
- enum {
- kBitrateAverageWinMs = 1000
- };
-
- struct EncodedFrameSample;
- typedef std::list<EncodedFrameSample> FrameSampleList;
-
- void UpdateIncomingFrameRate() EXCLUSIVE_LOCKS_REQUIRED(crit_sect_);
- void PurgeOldFrameSamples(int64_t now_ms)
- EXCLUSIVE_LOCKS_REQUIRED(crit_sect_);
- void UpdateSentBitrate(int64_t now_ms) EXCLUSIVE_LOCKS_REQUIRED(crit_sect_);
- void UpdateSentFramerate() EXCLUSIVE_LOCKS_REQUIRED(crit_sect_);
-
- // Computes new Quality Mode.
- int32_t SelectQuality(VCMQMSettingsCallback* qmsettings_callback)
- EXCLUSIVE_LOCKS_REQUIRED(crit_sect_);
-
- // Verifies if QM settings differ from default, i.e. if an update is required.
- // Computes actual values, as will be sent to the encoder.
- bool QMUpdate(VCMResolutionScale* qm,
- VCMQMSettingsCallback* qmsettings_callback)
- EXCLUSIVE_LOCKS_REQUIRED(crit_sect_);
-
- // Checks if we should make a QM change. Return true if yes, false otherwise.
- bool CheckStatusForQMchange() EXCLUSIVE_LOCKS_REQUIRED(crit_sect_);
-
- void ProcessIncomingFrameRate(int64_t now)
- EXCLUSIVE_LOCKS_REQUIRED(crit_sect_);
-
- // Checks conditions for suspending the video. The method compares
- // |video_target_bitrate_| with the threshold values for suspension, and
- // changes the state of |video_suspended_| accordingly.
- void CheckSuspendConditions() EXCLUSIVE_LOCKS_REQUIRED(crit_sect_);
-
- void SetEncodingDataInternal(VideoCodecType send_codec_type,
- int32_t max_bit_rate,
- uint32_t frame_rate,
- uint32_t bit_rate,
- uint16_t width,
- uint16_t height,
- int num_temporal_layers,
- int32_t mtu)
- EXCLUSIVE_LOCKS_REQUIRED(crit_sect_);
-
- uint32_t InputFrameRateInternal() EXCLUSIVE_LOCKS_REQUIRED(crit_sect_);
-
- uint32_t SentFrameRateInternal() EXCLUSIVE_LOCKS_REQUIRED(crit_sect_);
-
- // Protect all members.
- rtc::scoped_ptr<CriticalSectionWrapper> crit_sect_;
-
- Clock* clock_ GUARDED_BY(crit_sect_);
- int32_t max_bit_rate_ GUARDED_BY(crit_sect_);
- VideoCodecType send_codec_type_ GUARDED_BY(crit_sect_);
- uint16_t codec_width_ GUARDED_BY(crit_sect_);
- uint16_t codec_height_ GUARDED_BY(crit_sect_);
- float user_frame_rate_ GUARDED_BY(crit_sect_);
- rtc::scoped_ptr<FrameDropper> frame_dropper_ GUARDED_BY(crit_sect_);
- rtc::scoped_ptr<VCMLossProtectionLogic> loss_prot_logic_
- GUARDED_BY(crit_sect_);
- uint8_t fraction_lost_ GUARDED_BY(crit_sect_);
- uint32_t send_statistics_[4] GUARDED_BY(crit_sect_);
- uint32_t send_statistics_zero_encode_ GUARDED_BY(crit_sect_);
- int32_t max_payload_size_ GUARDED_BY(crit_sect_);
- int video_target_bitrate_ GUARDED_BY(crit_sect_);
- float incoming_frame_rate_ GUARDED_BY(crit_sect_);
- int64_t incoming_frame_times_[kFrameCountHistorySize] GUARDED_BY(crit_sect_);
- bool enable_qm_ GUARDED_BY(crit_sect_);
- std::list<EncodedFrameSample> encoded_frame_samples_ GUARDED_BY(crit_sect_);
- uint32_t avg_sent_bit_rate_bps_ GUARDED_BY(crit_sect_);
- uint32_t avg_sent_framerate_ GUARDED_BY(crit_sect_);
- uint32_t key_frame_cnt_ GUARDED_BY(crit_sect_);
- uint32_t delta_frame_cnt_ GUARDED_BY(crit_sect_);
- rtc::scoped_ptr<VCMContentMetricsProcessing> content_ GUARDED_BY(crit_sect_);
- rtc::scoped_ptr<VCMQmResolution> qm_resolution_ GUARDED_BY(crit_sect_);
- int64_t last_qm_update_time_ GUARDED_BY(crit_sect_);
- int64_t last_change_time_ GUARDED_BY(crit_sect_); // Content/user triggered.
- int num_layers_ GUARDED_BY(crit_sect_);
- bool suspension_enabled_ GUARDED_BY(crit_sect_);
- bool video_suspended_ GUARDED_BY(crit_sect_);
- int suspension_threshold_bps_ GUARDED_BY(crit_sect_);
- int suspension_window_bps_ GUARDED_BY(crit_sect_);
-};
-} // namespace media_optimization
-} // namespace webrtc
-
-#endif // WEBRTC_MODULES_VIDEO_CODING_MAIN_SOURCE_MEDIA_OPTIMIZATION_H_
diff --git a/webrtc/modules/video_coding/main/source/media_optimization_unittest.cc b/webrtc/modules/video_coding/main/source/media_optimization_unittest.cc
deleted file mode 100644
index be528d9932..0000000000
--- a/webrtc/modules/video_coding/main/source/media_optimization_unittest.cc
+++ /dev/null
@@ -1,155 +0,0 @@
-/*
- * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include "testing/gtest/include/gtest/gtest.h"
-#include "webrtc/modules/video_coding/main/source/media_optimization.h"
-#include "webrtc/system_wrappers/include/clock.h"
-
-namespace webrtc {
-namespace media_optimization {
-
-class TestMediaOptimization : public ::testing::Test {
- protected:
- enum {
- kSampleRate = 90000 // RTP timestamps per second.
- };
-
- // Note: simulated clock starts at 1 seconds, since parts of webrtc use 0 as
- // a special case (e.g. frame rate in media optimization).
- TestMediaOptimization()
- : clock_(1000),
- media_opt_(&clock_),
- frame_time_ms_(33),
- next_timestamp_(0) {}
-
- // This method mimics what happens in VideoSender::AddVideoFrame.
- void AddFrameAndAdvanceTime(uint32_t bitrate_bps, bool expect_frame_drop) {
- bool frame_dropped = media_opt_.DropFrame();
- EXPECT_EQ(expect_frame_drop, frame_dropped);
- if (!frame_dropped) {
- size_t bytes_per_frame = bitrate_bps * frame_time_ms_ / (8 * 1000);
- EncodedImage encoded_image;
- encoded_image._length = bytes_per_frame;
- encoded_image._timeStamp = next_timestamp_;
- encoded_image._frameType = kVideoFrameKey;
- ASSERT_EQ(VCM_OK, media_opt_.UpdateWithEncodedData(encoded_image));
- }
- next_timestamp_ += frame_time_ms_ * kSampleRate / 1000;
- clock_.AdvanceTimeMilliseconds(frame_time_ms_);
- }
-
- SimulatedClock clock_;
- MediaOptimization media_opt_;
- int frame_time_ms_;
- uint32_t next_timestamp_;
-};
-
-
-TEST_F(TestMediaOptimization, VerifyMuting) {
- // Enable video suspension with these limits.
- // Suspend the video when the rate is below 50 kbps and resume when it gets
- // above 50 + 10 kbps again.
- const uint32_t kThresholdBps = 50000;
- const uint32_t kWindowBps = 10000;
- media_opt_.SuspendBelowMinBitrate(kThresholdBps, kWindowBps);
-
- // The video should not be suspended from the start.
- EXPECT_FALSE(media_opt_.IsVideoSuspended());
-
- uint32_t target_bitrate_kbps = 100;
- media_opt_.SetTargetRates(target_bitrate_kbps * 1000,
- 0, // Lossrate.
- 100, // RTT in ms.
- nullptr, nullptr);
- media_opt_.EnableFrameDropper(true);
- for (int time = 0; time < 2000; time += frame_time_ms_) {
- ASSERT_NO_FATAL_FAILURE(AddFrameAndAdvanceTime(target_bitrate_kbps, false));
- }
-
- // Set the target rate below the limit for muting.
- media_opt_.SetTargetRates(kThresholdBps - 1000,
- 0, // Lossrate.
- 100, // RTT in ms.
- nullptr, nullptr);
- // Expect the muter to engage immediately and stay muted.
- // Test during 2 seconds.
- for (int time = 0; time < 2000; time += frame_time_ms_) {
- EXPECT_TRUE(media_opt_.IsVideoSuspended());
- ASSERT_NO_FATAL_FAILURE(AddFrameAndAdvanceTime(target_bitrate_kbps, true));
- }
-
- // Set the target above the limit for muting, but not above the
- // limit + window.
- media_opt_.SetTargetRates(kThresholdBps + 1000,
- 0, // Lossrate.
- 100, // RTT in ms.
- nullptr, nullptr);
- // Expect the muter to stay muted.
- // Test during 2 seconds.
- for (int time = 0; time < 2000; time += frame_time_ms_) {
- EXPECT_TRUE(media_opt_.IsVideoSuspended());
- ASSERT_NO_FATAL_FAILURE(AddFrameAndAdvanceTime(target_bitrate_kbps, true));
- }
-
- // Set the target above limit + window.
- media_opt_.SetTargetRates(kThresholdBps + kWindowBps + 1000,
- 0, // Lossrate.
- 100, // RTT in ms.
- nullptr, nullptr);
- // Expect the muter to disengage immediately.
- // Test during 2 seconds.
- for (int time = 0; time < 2000; time += frame_time_ms_) {
- EXPECT_FALSE(media_opt_.IsVideoSuspended());
- ASSERT_NO_FATAL_FAILURE(
- AddFrameAndAdvanceTime((kThresholdBps + kWindowBps) / 1000, false));
- }
-}
-
-TEST_F(TestMediaOptimization, ProtectsUsingFecBitrateAboveCodecMax) {
- static const int kCodecBitrateBps = 100000;
- static const int kMaxBitrateBps = 130000;
-
- class ProtectionCallback : public VCMProtectionCallback {
- int ProtectionRequest(const FecProtectionParams* delta_params,
- const FecProtectionParams* key_params,
- uint32_t* sent_video_rate_bps,
- uint32_t* sent_nack_rate_bps,
- uint32_t* sent_fec_rate_bps) override {
- *sent_video_rate_bps = kCodecBitrateBps;
- *sent_nack_rate_bps = 0;
- *sent_fec_rate_bps = fec_rate_bps_;
- return 0;
- }
-
- public:
- uint32_t fec_rate_bps_;
- } protection_callback;
-
- media_opt_.SetProtectionMethod(kFec);
- media_opt_.SetEncodingData(kVideoCodecVP8, kCodecBitrateBps, kCodecBitrateBps,
- 640, 480, 30, 1, 1000);
-
- // Using 10% of codec bitrate for FEC, should still be able to use all of it.
- protection_callback.fec_rate_bps_ = kCodecBitrateBps / 10;
- uint32_t target_bitrate = media_opt_.SetTargetRates(
- kMaxBitrateBps, 0, 0, &protection_callback, nullptr);
-
- EXPECT_EQ(kCodecBitrateBps, static_cast<int>(target_bitrate));
-
- // Using as much for codec bitrate as fec rate, new target rate should share
- // both equally, but only be half of max (since that ceiling should be hit).
- protection_callback.fec_rate_bps_ = kCodecBitrateBps;
- target_bitrate = media_opt_.SetTargetRates(kMaxBitrateBps, 128, 100,
- &protection_callback, nullptr);
- EXPECT_EQ(kMaxBitrateBps / 2, static_cast<int>(target_bitrate));
-}
-
-} // namespace media_optimization
-} // namespace webrtc
diff --git a/webrtc/modules/video_coding/main/source/nack_fec_tables.h b/webrtc/modules/video_coding/main/source/nack_fec_tables.h
deleted file mode 100644
index b82bb1b4ba..0000000000
--- a/webrtc/modules/video_coding/main/source/nack_fec_tables.h
+++ /dev/null
@@ -1,126 +0,0 @@
-/*
- * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_MODULES_VIDEO_CODING_SOURCE_NACK_FEC_TABLES_H_
-#define WEBRTC_MODULES_VIDEO_CODING_SOURCE_NACK_FEC_TABLES_H_
-
-namespace webrtc
-{
-
-// Table for adjusting FEC rate for NACK/FEC protection method
-// Table values are built as a sigmoid function, ranging from 0 to 100, based on
-// the HybridNackTH values defined in media_opt_util.h.
-const uint16_t VCMNackFecTable[100] = {
-0,
-0,
-0,
-0,
-0,
-0,
-0,
-0,
-0,
-0,
-0,
-1,
-1,
-1,
-1,
-1,
-2,
-2,
-2,
-3,
-3,
-4,
-5,
-6,
-7,
-9,
-10,
-12,
-15,
-18,
-21,
-24,
-28,
-32,
-37,
-41,
-46,
-51,
-56,
-61,
-66,
-70,
-74,
-78,
-81,
-84,
-86,
-89,
-90,
-92,
-93,
-95,
-95,
-96,
-97,
-97,
-98,
-98,
-99,
-99,
-99,
-99,
-99,
-99,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-100,
-
-};
-
-} // namespace webrtc
-
-#endif // WEBRTC_MODULES_VIDEO_CODING_SOURCE_NACK_FEC_TABLES_H_
diff --git a/webrtc/modules/video_coding/main/source/packet.cc b/webrtc/modules/video_coding/main/source/packet.cc
deleted file mode 100644
index fd5a6abb8c..0000000000
--- a/webrtc/modules/video_coding/main/source/packet.cc
+++ /dev/null
@@ -1,154 +0,0 @@
-/*
- * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include "webrtc/modules/interface/module_common_types.h"
-#include "webrtc/modules/video_coding/main/source/packet.h"
-
-#include <assert.h>
-
-namespace webrtc {
-
-VCMPacket::VCMPacket()
- : payloadType(0),
- timestamp(0),
- ntp_time_ms_(0),
- seqNum(0),
- dataPtr(NULL),
- sizeBytes(0),
- markerBit(false),
- frameType(kEmptyFrame),
- codec(kVideoCodecUnknown),
- isFirstPacket(false),
- completeNALU(kNaluUnset),
- insertStartCode(false),
- width(0),
- height(0),
- codecSpecificHeader() {}
-
-VCMPacket::VCMPacket(const uint8_t* ptr,
- const size_t size,
- const WebRtcRTPHeader& rtpHeader) :
- payloadType(rtpHeader.header.payloadType),
- timestamp(rtpHeader.header.timestamp),
- ntp_time_ms_(rtpHeader.ntp_time_ms),
- seqNum(rtpHeader.header.sequenceNumber),
- dataPtr(ptr),
- sizeBytes(size),
- markerBit(rtpHeader.header.markerBit),
-
- frameType(rtpHeader.frameType),
- codec(kVideoCodecUnknown),
- isFirstPacket(rtpHeader.type.Video.isFirstPacket),
- completeNALU(kNaluComplete),
- insertStartCode(false),
- width(rtpHeader.type.Video.width),
- height(rtpHeader.type.Video.height),
- codecSpecificHeader(rtpHeader.type.Video)
-{
- CopyCodecSpecifics(rtpHeader.type.Video);
-}
-
-VCMPacket::VCMPacket(const uint8_t* ptr,
- size_t size,
- uint16_t seq,
- uint32_t ts,
- bool mBit) :
- payloadType(0),
- timestamp(ts),
- ntp_time_ms_(0),
- seqNum(seq),
- dataPtr(ptr),
- sizeBytes(size),
- markerBit(mBit),
-
- frameType(kVideoFrameDelta),
- codec(kVideoCodecUnknown),
- isFirstPacket(false),
- completeNALU(kNaluComplete),
- insertStartCode(false),
- width(0),
- height(0),
- codecSpecificHeader()
-{}
-
-void VCMPacket::Reset() {
- payloadType = 0;
- timestamp = 0;
- ntp_time_ms_ = 0;
- seqNum = 0;
- dataPtr = NULL;
- sizeBytes = 0;
- markerBit = false;
- frameType = kEmptyFrame;
- codec = kVideoCodecUnknown;
- isFirstPacket = false;
- completeNALU = kNaluUnset;
- insertStartCode = false;
- width = 0;
- height = 0;
- memset(&codecSpecificHeader, 0, sizeof(RTPVideoHeader));
-}
-
-void VCMPacket::CopyCodecSpecifics(const RTPVideoHeader& videoHeader) {
- if (markerBit) {
- codecSpecificHeader.rotation = videoHeader.rotation;
- }
- switch (videoHeader.codec) {
- case kRtpVideoVp8:
- // Handle all packets within a frame as depending on the previous packet
- // TODO(holmer): This should be changed to make fragments independent
- // when the VP8 RTP receiver supports fragments.
- if (isFirstPacket && markerBit)
- completeNALU = kNaluComplete;
- else if (isFirstPacket)
- completeNALU = kNaluStart;
- else if (markerBit)
- completeNALU = kNaluEnd;
- else
- completeNALU = kNaluIncomplete;
-
- codec = kVideoCodecVP8;
- return;
- case kRtpVideoVp9:
- if (isFirstPacket && markerBit)
- completeNALU = kNaluComplete;
- else if (isFirstPacket)
- completeNALU = kNaluStart;
- else if (markerBit)
- completeNALU = kNaluEnd;
- else
- completeNALU = kNaluIncomplete;
-
- codec = kVideoCodecVP9;
- return;
- case kRtpVideoH264:
- isFirstPacket = videoHeader.isFirstPacket;
- if (isFirstPacket)
- insertStartCode = true;
-
- if (isFirstPacket && markerBit) {
- completeNALU = kNaluComplete;
- } else if (isFirstPacket) {
- completeNALU = kNaluStart;
- } else if (markerBit) {
- completeNALU = kNaluEnd;
- } else {
- completeNALU = kNaluIncomplete;
- }
- codec = kVideoCodecH264;
- return;
- case kRtpVideoGeneric:
- case kRtpVideoNone:
- codec = kVideoCodecUnknown;
- return;
- }
-}
-
-} // namespace webrtc
diff --git a/webrtc/modules/video_coding/main/source/packet.h b/webrtc/modules/video_coding/main/source/packet.h
deleted file mode 100644
index 80bf532502..0000000000
--- a/webrtc/modules/video_coding/main/source/packet.h
+++ /dev/null
@@ -1,59 +0,0 @@
-/*
- * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_MODULES_VIDEO_CODING_PACKET_H_
-#define WEBRTC_MODULES_VIDEO_CODING_PACKET_H_
-
-#include "webrtc/modules/interface/module_common_types.h"
-#include "webrtc/modules/video_coding/main/source/jitter_buffer_common.h"
-#include "webrtc/typedefs.h"
-
-namespace webrtc {
-
-class VCMPacket {
-public:
- VCMPacket();
- VCMPacket(const uint8_t* ptr,
- const size_t size,
- const WebRtcRTPHeader& rtpHeader);
- VCMPacket(const uint8_t* ptr,
- size_t size,
- uint16_t seqNum,
- uint32_t timestamp,
- bool markerBit);
-
- void Reset();
-
- uint8_t payloadType;
- uint32_t timestamp;
- // NTP time of the capture time in local timebase in milliseconds.
- int64_t ntp_time_ms_;
- uint16_t seqNum;
- const uint8_t* dataPtr;
- size_t sizeBytes;
- bool markerBit;
-
- FrameType frameType;
- VideoCodecType codec;
-
- bool isFirstPacket; // Is this first packet in a frame.
- VCMNaluCompleteness completeNALU; // Default is kNaluIncomplete.
- bool insertStartCode; // True if a start code should be inserted before this
- // packet.
- int width;
- int height;
- RTPVideoHeader codecSpecificHeader;
-
-protected:
- void CopyCodecSpecifics(const RTPVideoHeader& videoHeader);
-};
-
-} // namespace webrtc
-#endif // WEBRTC_MODULES_VIDEO_CODING_PACKET_H_
diff --git a/webrtc/modules/video_coding/main/source/qm_select.cc b/webrtc/modules/video_coding/main/source/qm_select.cc
deleted file mode 100644
index e86d0755c0..0000000000
--- a/webrtc/modules/video_coding/main/source/qm_select.cc
+++ /dev/null
@@ -1,958 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include "webrtc/modules/video_coding/main/source/qm_select.h"
-
-#include <math.h>
-
-#include "webrtc/modules/interface/module_common_types.h"
-#include "webrtc/modules/video_coding/main/interface/video_coding_defines.h"
-#include "webrtc/modules/video_coding/main/source/internal_defines.h"
-#include "webrtc/modules/video_coding/main/source/qm_select_data.h"
-#include "webrtc/system_wrappers/include/trace.h"
-
-namespace webrtc {
-
-// QM-METHOD class
-
-VCMQmMethod::VCMQmMethod()
- : content_metrics_(NULL),
- width_(0),
- height_(0),
- user_frame_rate_(0.0f),
- native_width_(0),
- native_height_(0),
- native_frame_rate_(0.0f),
- image_type_(kVGA),
- framerate_level_(kFrameRateHigh),
- init_(false) {
- ResetQM();
-}
-
-VCMQmMethod::~VCMQmMethod() {
-}
-
-void VCMQmMethod::ResetQM() {
- aspect_ratio_ = 1.0f;
- motion_.Reset();
- spatial_.Reset();
- content_class_ = 0;
-}
-
-uint8_t VCMQmMethod::ComputeContentClass() {
- ComputeMotionNFD();
- ComputeSpatial();
- return content_class_ = 3 * motion_.level + spatial_.level;
-}
-
-void VCMQmMethod::UpdateContent(const VideoContentMetrics* contentMetrics) {
- content_metrics_ = contentMetrics;
-}
-
-void VCMQmMethod::ComputeMotionNFD() {
- if (content_metrics_) {
- motion_.value = content_metrics_->motion_magnitude;
- }
- // Determine motion level.
- if (motion_.value < kLowMotionNfd) {
- motion_.level = kLow;
- } else if (motion_.value > kHighMotionNfd) {
- motion_.level = kHigh;
- } else {
- motion_.level = kDefault;
- }
-}
-
-void VCMQmMethod::ComputeSpatial() {
- float spatial_err = 0.0;
- float spatial_err_h = 0.0;
- float spatial_err_v = 0.0;
- if (content_metrics_) {
- spatial_err = content_metrics_->spatial_pred_err;
- spatial_err_h = content_metrics_->spatial_pred_err_h;
- spatial_err_v = content_metrics_->spatial_pred_err_v;
- }
- // Spatial measure: take average of 3 prediction errors.
- spatial_.value = (spatial_err + spatial_err_h + spatial_err_v) / 3.0f;
-
- // Reduce thresholds for large scenes/higher pixel correlation.
- float scale2 = image_type_ > kVGA ? kScaleTexture : 1.0;
-
- if (spatial_.value > scale2 * kHighTexture) {
- spatial_.level = kHigh;
- } else if (spatial_.value < scale2 * kLowTexture) {
- spatial_.level = kLow;
- } else {
- spatial_.level = kDefault;
- }
-}
-
-ImageType VCMQmMethod::GetImageType(uint16_t width,
- uint16_t height) {
- // Get the image type for the encoder frame size.
- uint32_t image_size = width * height;
- if (image_size == kSizeOfImageType[kQCIF]) {
- return kQCIF;
- } else if (image_size == kSizeOfImageType[kHCIF]) {
- return kHCIF;
- } else if (image_size == kSizeOfImageType[kQVGA]) {
- return kQVGA;
- } else if (image_size == kSizeOfImageType[kCIF]) {
- return kCIF;
- } else if (image_size == kSizeOfImageType[kHVGA]) {
- return kHVGA;
- } else if (image_size == kSizeOfImageType[kVGA]) {
- return kVGA;
- } else if (image_size == kSizeOfImageType[kQFULLHD]) {
- return kQFULLHD;
- } else if (image_size == kSizeOfImageType[kWHD]) {
- return kWHD;
- } else if (image_size == kSizeOfImageType[kFULLHD]) {
- return kFULLHD;
- } else {
- // No exact match, find closet one.
- return FindClosestImageType(width, height);
- }
-}
-
-ImageType VCMQmMethod::FindClosestImageType(uint16_t width, uint16_t height) {
- float size = static_cast<float>(width * height);
- float min = size;
- int isel = 0;
- for (int i = 0; i < kNumImageTypes; ++i) {
- float dist = fabs(size - kSizeOfImageType[i]);
- if (dist < min) {
- min = dist;
- isel = i;
- }
- }
- return static_cast<ImageType>(isel);
-}
-
-FrameRateLevelClass VCMQmMethod::FrameRateLevel(float avg_framerate) {
- if (avg_framerate <= kLowFrameRate) {
- return kFrameRateLow;
- } else if (avg_framerate <= kMiddleFrameRate) {
- return kFrameRateMiddle1;
- } else if (avg_framerate <= kHighFrameRate) {
- return kFrameRateMiddle2;
- } else {
- return kFrameRateHigh;
- }
-}
-
-// RESOLUTION CLASS
-
-VCMQmResolution::VCMQmResolution()
- : qm_(new VCMResolutionScale()) {
- Reset();
-}
-
-VCMQmResolution::~VCMQmResolution() {
- delete qm_;
-}
-
-void VCMQmResolution::ResetRates() {
- sum_target_rate_ = 0.0f;
- sum_incoming_framerate_ = 0.0f;
- sum_rate_MM_ = 0.0f;
- sum_rate_MM_sgn_ = 0.0f;
- sum_packet_loss_ = 0.0f;
- buffer_level_ = kInitBufferLevel * target_bitrate_;
- frame_cnt_ = 0;
- frame_cnt_delta_ = 0;
- low_buffer_cnt_ = 0;
- update_rate_cnt_ = 0;
-}
-
-void VCMQmResolution::ResetDownSamplingState() {
- state_dec_factor_spatial_ = 1.0;
- state_dec_factor_temporal_ = 1.0;
- for (int i = 0; i < kDownActionHistorySize; i++) {
- down_action_history_[i].spatial = kNoChangeSpatial;
- down_action_history_[i].temporal = kNoChangeTemporal;
- }
-}
-
-void VCMQmResolution::Reset() {
- target_bitrate_ = 0.0f;
- incoming_framerate_ = 0.0f;
- buffer_level_ = 0.0f;
- per_frame_bandwidth_ = 0.0f;
- avg_target_rate_ = 0.0f;
- avg_incoming_framerate_ = 0.0f;
- avg_ratio_buffer_low_ = 0.0f;
- avg_rate_mismatch_ = 0.0f;
- avg_rate_mismatch_sgn_ = 0.0f;
- avg_packet_loss_ = 0.0f;
- encoder_state_ = kStableEncoding;
- num_layers_ = 1;
- ResetRates();
- ResetDownSamplingState();
- ResetQM();
-}
-
-EncoderState VCMQmResolution::GetEncoderState() {
- return encoder_state_;
-}
-
-// Initialize state after re-initializing the encoder,
-// i.e., after SetEncodingData() in mediaOpt.
-int VCMQmResolution::Initialize(float bitrate,
- float user_framerate,
- uint16_t width,
- uint16_t height,
- int num_layers) {
- if (user_framerate == 0.0f || width == 0 || height == 0) {
- return VCM_PARAMETER_ERROR;
- }
- Reset();
- target_bitrate_ = bitrate;
- incoming_framerate_ = user_framerate;
- UpdateCodecParameters(user_framerate, width, height);
- native_width_ = width;
- native_height_ = height;
- native_frame_rate_ = user_framerate;
- num_layers_ = num_layers;
- // Initial buffer level.
- buffer_level_ = kInitBufferLevel * target_bitrate_;
- // Per-frame bandwidth.
- per_frame_bandwidth_ = target_bitrate_ / user_framerate;
- init_ = true;
- return VCM_OK;
-}
-
-void VCMQmResolution::UpdateCodecParameters(float frame_rate, uint16_t width,
- uint16_t height) {
- width_ = width;
- height_ = height;
- // |user_frame_rate| is the target frame rate for VPM frame dropper.
- user_frame_rate_ = frame_rate;
- image_type_ = GetImageType(width, height);
-}
-
-// Update rate data after every encoded frame.
-void VCMQmResolution::UpdateEncodedSize(size_t encoded_size) {
- frame_cnt_++;
- // Convert to Kbps.
- float encoded_size_kbits = 8.0f * static_cast<float>(encoded_size) / 1000.0f;
-
- // Update the buffer level:
- // Note this is not the actual encoder buffer level.
- // |buffer_level_| is reset to an initial value after SelectResolution is
- // called, and does not account for frame dropping by encoder or VCM.
- buffer_level_ += per_frame_bandwidth_ - encoded_size_kbits;
-
- // Counter for occurrences of low buffer level:
- // low/negative values means encoder is likely dropping frames.
- if (buffer_level_ <= kPercBufferThr * kInitBufferLevel * target_bitrate_) {
- low_buffer_cnt_++;
- }
-}
-
-// Update various quantities after SetTargetRates in MediaOpt.
-void VCMQmResolution::UpdateRates(float target_bitrate,
- float encoder_sent_rate,
- float incoming_framerate,
- uint8_t packet_loss) {
- // Sum the target bitrate: this is the encoder rate from previous update
- // (~1sec), i.e, before the update for next ~1sec.
- sum_target_rate_ += target_bitrate_;
- update_rate_cnt_++;
-
- // Sum the received (from RTCP reports) packet loss rates.
- sum_packet_loss_ += static_cast<float>(packet_loss / 255.0);
-
- // Sum the sequence rate mismatch:
- // Mismatch here is based on the difference between the target rate
- // used (in previous ~1sec) and the average actual encoding rate measured
- // at previous ~1sec.
- float diff = target_bitrate_ - encoder_sent_rate;
- if (target_bitrate_ > 0.0)
- sum_rate_MM_ += fabs(diff) / target_bitrate_;
- int sgnDiff = diff > 0 ? 1 : (diff < 0 ? -1 : 0);
- // To check for consistent under(+)/over_shooting(-) of target rate.
- sum_rate_MM_sgn_ += sgnDiff;
-
- // Update with the current new target and frame rate:
- // these values are ones the encoder will use for the current/next ~1sec.
- target_bitrate_ = target_bitrate;
- incoming_framerate_ = incoming_framerate;
- sum_incoming_framerate_ += incoming_framerate_;
- // Update the per_frame_bandwidth:
- // this is the per_frame_bw for the current/next ~1sec.
- per_frame_bandwidth_ = 0.0f;
- if (incoming_framerate_ > 0.0f) {
- per_frame_bandwidth_ = target_bitrate_ / incoming_framerate_;
- }
-}
-
-// Select the resolution factors: frame size and frame rate change (qm scales).
-// Selection is for going down in resolution, or for going back up
-// (if a previous down-sampling action was taken).
-
-// In the current version the following constraints are imposed:
-// 1) We only allow for one action, either down or up, at a given time.
-// 2) The possible down-sampling actions are: spatial by 1/2x1/2, 3/4x3/4;
-// temporal/frame rate reduction by 1/2 and 2/3.
-// 3) The action for going back up is the reverse of last (spatial or temporal)
-// down-sampling action. The list of down-sampling actions from the
-// Initialize() state are kept in |down_action_history_|.
-// 4) The total amount of down-sampling (spatial and/or temporal) from the
-// Initialize() state (native resolution) is limited by various factors.
-int VCMQmResolution::SelectResolution(VCMResolutionScale** qm) {
- if (!init_) {
- return VCM_UNINITIALIZED;
- }
- if (content_metrics_ == NULL) {
- Reset();
- *qm = qm_;
- return VCM_OK;
- }
-
- // Check conditions on down-sampling state.
- assert(state_dec_factor_spatial_ >= 1.0f);
- assert(state_dec_factor_temporal_ >= 1.0f);
- assert(state_dec_factor_spatial_ <= kMaxSpatialDown);
- assert(state_dec_factor_temporal_ <= kMaxTempDown);
- assert(state_dec_factor_temporal_ * state_dec_factor_spatial_ <=
- kMaxTotalDown);
-
- // Compute content class for selection.
- content_class_ = ComputeContentClass();
- // Compute various rate quantities for selection.
- ComputeRatesForSelection();
-
- // Get the encoder state.
- ComputeEncoderState();
-
- // Default settings: no action.
- SetDefaultAction();
- *qm = qm_;
-
- // Check for going back up in resolution, if we have had some down-sampling
- // relative to native state in Initialize().
- if (down_action_history_[0].spatial != kNoChangeSpatial ||
- down_action_history_[0].temporal != kNoChangeTemporal) {
- if (GoingUpResolution()) {
- *qm = qm_;
- return VCM_OK;
- }
- }
-
- // Check for going down in resolution.
- if (GoingDownResolution()) {
- *qm = qm_;
- return VCM_OK;
- }
- return VCM_OK;
-}
-
-void VCMQmResolution::SetDefaultAction() {
- qm_->codec_width = width_;
- qm_->codec_height = height_;
- qm_->frame_rate = user_frame_rate_;
- qm_->change_resolution_spatial = false;
- qm_->change_resolution_temporal = false;
- qm_->spatial_width_fact = 1.0f;
- qm_->spatial_height_fact = 1.0f;
- qm_->temporal_fact = 1.0f;
- action_.spatial = kNoChangeSpatial;
- action_.temporal = kNoChangeTemporal;
-}
-
-void VCMQmResolution::ComputeRatesForSelection() {
- avg_target_rate_ = 0.0f;
- avg_incoming_framerate_ = 0.0f;
- avg_ratio_buffer_low_ = 0.0f;
- avg_rate_mismatch_ = 0.0f;
- avg_rate_mismatch_sgn_ = 0.0f;
- avg_packet_loss_ = 0.0f;
- if (frame_cnt_ > 0) {
- avg_ratio_buffer_low_ = static_cast<float>(low_buffer_cnt_) /
- static_cast<float>(frame_cnt_);
- }
- if (update_rate_cnt_ > 0) {
- avg_rate_mismatch_ = static_cast<float>(sum_rate_MM_) /
- static_cast<float>(update_rate_cnt_);
- avg_rate_mismatch_sgn_ = static_cast<float>(sum_rate_MM_sgn_) /
- static_cast<float>(update_rate_cnt_);
- avg_target_rate_ = static_cast<float>(sum_target_rate_) /
- static_cast<float>(update_rate_cnt_);
- avg_incoming_framerate_ = static_cast<float>(sum_incoming_framerate_) /
- static_cast<float>(update_rate_cnt_);
- avg_packet_loss_ = static_cast<float>(sum_packet_loss_) /
- static_cast<float>(update_rate_cnt_);
- }
- // For selection we may want to weight some quantities more heavily
- // with the current (i.e., next ~1sec) rate values.
- avg_target_rate_ = kWeightRate * avg_target_rate_ +
- (1.0 - kWeightRate) * target_bitrate_;
- avg_incoming_framerate_ = kWeightRate * avg_incoming_framerate_ +
- (1.0 - kWeightRate) * incoming_framerate_;
- // Use base layer frame rate for temporal layers: this will favor spatial.
- assert(num_layers_ > 0);
- framerate_level_ = FrameRateLevel(
- avg_incoming_framerate_ / static_cast<float>(1 << (num_layers_ - 1)));
-}
-
-void VCMQmResolution::ComputeEncoderState() {
- // Default.
- encoder_state_ = kStableEncoding;
-
- // Assign stressed state if:
- // 1) occurrences of low buffer levels is high, or
- // 2) rate mis-match is high, and consistent over-shooting by encoder.
- if ((avg_ratio_buffer_low_ > kMaxBufferLow) ||
- ((avg_rate_mismatch_ > kMaxRateMisMatch) &&
- (avg_rate_mismatch_sgn_ < -kRateOverShoot))) {
- encoder_state_ = kStressedEncoding;
- }
- // Assign easy state if:
- // 1) rate mis-match is high, and
- // 2) consistent under-shooting by encoder.
- if ((avg_rate_mismatch_ > kMaxRateMisMatch) &&
- (avg_rate_mismatch_sgn_ > kRateUnderShoot)) {
- encoder_state_ = kEasyEncoding;
- }
-}
-
-bool VCMQmResolution::GoingUpResolution() {
- // For going up, we check for undoing the previous down-sampling action.
-
- float fac_width = kFactorWidthSpatial[down_action_history_[0].spatial];
- float fac_height = kFactorHeightSpatial[down_action_history_[0].spatial];
- float fac_temp = kFactorTemporal[down_action_history_[0].temporal];
- // For going up spatially, we allow for going up by 3/4x3/4 at each stage.
- // So if the last spatial action was 1/2x1/2 it would be undone in 2 stages.
- // Modify the fac_width/height for this case.
- if (down_action_history_[0].spatial == kOneQuarterSpatialUniform) {
- fac_width = kFactorWidthSpatial[kOneQuarterSpatialUniform] /
- kFactorWidthSpatial[kOneHalfSpatialUniform];
- fac_height = kFactorHeightSpatial[kOneQuarterSpatialUniform] /
- kFactorHeightSpatial[kOneHalfSpatialUniform];
- }
-
- // Check if we should go up both spatially and temporally.
- if (down_action_history_[0].spatial != kNoChangeSpatial &&
- down_action_history_[0].temporal != kNoChangeTemporal) {
- if (ConditionForGoingUp(fac_width, fac_height, fac_temp,
- kTransRateScaleUpSpatialTemp)) {
- action_.spatial = down_action_history_[0].spatial;
- action_.temporal = down_action_history_[0].temporal;
- UpdateDownsamplingState(kUpResolution);
- return true;
- }
- }
- // Check if we should go up either spatially or temporally.
- bool selected_up_spatial = false;
- bool selected_up_temporal = false;
- if (down_action_history_[0].spatial != kNoChangeSpatial) {
- selected_up_spatial = ConditionForGoingUp(fac_width, fac_height, 1.0f,
- kTransRateScaleUpSpatial);
- }
- if (down_action_history_[0].temporal != kNoChangeTemporal) {
- selected_up_temporal = ConditionForGoingUp(1.0f, 1.0f, fac_temp,
- kTransRateScaleUpTemp);
- }
- if (selected_up_spatial && !selected_up_temporal) {
- action_.spatial = down_action_history_[0].spatial;
- action_.temporal = kNoChangeTemporal;
- UpdateDownsamplingState(kUpResolution);
- return true;
- } else if (!selected_up_spatial && selected_up_temporal) {
- action_.spatial = kNoChangeSpatial;
- action_.temporal = down_action_history_[0].temporal;
- UpdateDownsamplingState(kUpResolution);
- return true;
- } else if (selected_up_spatial && selected_up_temporal) {
- PickSpatialOrTemporal();
- UpdateDownsamplingState(kUpResolution);
- return true;
- }
- return false;
-}
-
-bool VCMQmResolution::ConditionForGoingUp(float fac_width,
- float fac_height,
- float fac_temp,
- float scale_fac) {
- float estimated_transition_rate_up = GetTransitionRate(fac_width, fac_height,
- fac_temp, scale_fac);
- // Go back up if:
- // 1) target rate is above threshold and current encoder state is stable, or
- // 2) encoder state is easy (encoder is significantly under-shooting target).
- if (((avg_target_rate_ > estimated_transition_rate_up) &&
- (encoder_state_ == kStableEncoding)) ||
- (encoder_state_ == kEasyEncoding)) {
- return true;
- } else {
- return false;
- }
-}
-
-bool VCMQmResolution::GoingDownResolution() {
- float estimated_transition_rate_down =
- GetTransitionRate(1.0f, 1.0f, 1.0f, 1.0f);
- float max_rate = kFrameRateFac[framerate_level_] * kMaxRateQm[image_type_];
- // Resolution reduction if:
- // (1) target rate is below transition rate, or
- // (2) encoder is in stressed state and target rate below a max threshold.
- if ((avg_target_rate_ < estimated_transition_rate_down ) ||
- (encoder_state_ == kStressedEncoding && avg_target_rate_ < max_rate)) {
- // Get the down-sampling action: based on content class, and how low
- // average target rate is relative to transition rate.
- uint8_t spatial_fact =
- kSpatialAction[content_class_ +
- 9 * RateClass(estimated_transition_rate_down)];
- uint8_t temp_fact =
- kTemporalAction[content_class_ +
- 9 * RateClass(estimated_transition_rate_down)];
-
- switch (spatial_fact) {
- case 4: {
- action_.spatial = kOneQuarterSpatialUniform;
- break;
- }
- case 2: {
- action_.spatial = kOneHalfSpatialUniform;
- break;
- }
- case 1: {
- action_.spatial = kNoChangeSpatial;
- break;
- }
- default: {
- assert(false);
- }
- }
- switch (temp_fact) {
- case 3: {
- action_.temporal = kTwoThirdsTemporal;
- break;
- }
- case 2: {
- action_.temporal = kOneHalfTemporal;
- break;
- }
- case 1: {
- action_.temporal = kNoChangeTemporal;
- break;
- }
- default: {
- assert(false);
- }
- }
- // Only allow for one action (spatial or temporal) at a given time.
- assert(action_.temporal == kNoChangeTemporal ||
- action_.spatial == kNoChangeSpatial);
-
- // Adjust cases not captured in tables, mainly based on frame rate, and
- // also check for odd frame sizes.
- AdjustAction();
-
- // Update down-sampling state.
- if (action_.spatial != kNoChangeSpatial ||
- action_.temporal != kNoChangeTemporal) {
- UpdateDownsamplingState(kDownResolution);
- return true;
- }
- }
- return false;
-}
-
-float VCMQmResolution::GetTransitionRate(float fac_width,
- float fac_height,
- float fac_temp,
- float scale_fac) {
- ImageType image_type = GetImageType(
- static_cast<uint16_t>(fac_width * width_),
- static_cast<uint16_t>(fac_height * height_));
-
- FrameRateLevelClass framerate_level =
- FrameRateLevel(fac_temp * avg_incoming_framerate_);
- // If we are checking for going up temporally, and this is the last
- // temporal action, then use native frame rate.
- if (down_action_history_[1].temporal == kNoChangeTemporal &&
- fac_temp > 1.0f) {
- framerate_level = FrameRateLevel(native_frame_rate_);
- }
-
- // The maximum allowed rate below which down-sampling is allowed:
- // Nominal values based on image format (frame size and frame rate).
- float max_rate = kFrameRateFac[framerate_level] * kMaxRateQm[image_type];
-
- uint8_t image_class = image_type > kVGA ? 1: 0;
- uint8_t table_index = image_class * 9 + content_class_;
- // Scale factor for down-sampling transition threshold:
- // factor based on the content class and the image size.
- float scaleTransRate = kScaleTransRateQm[table_index];
- // Threshold bitrate for resolution action.
- return static_cast<float> (scale_fac * scaleTransRate * max_rate);
-}
-
-void VCMQmResolution::UpdateDownsamplingState(UpDownAction up_down) {
- if (up_down == kUpResolution) {
- qm_->spatial_width_fact = 1.0f / kFactorWidthSpatial[action_.spatial];
- qm_->spatial_height_fact = 1.0f / kFactorHeightSpatial[action_.spatial];
- // If last spatial action was 1/2x1/2, we undo it in two steps, so the
- // spatial scale factor in this first step is modified as (4.0/3.0 / 2.0).
- if (action_.spatial == kOneQuarterSpatialUniform) {
- qm_->spatial_width_fact =
- 1.0f * kFactorWidthSpatial[kOneHalfSpatialUniform] /
- kFactorWidthSpatial[kOneQuarterSpatialUniform];
- qm_->spatial_height_fact =
- 1.0f * kFactorHeightSpatial[kOneHalfSpatialUniform] /
- kFactorHeightSpatial[kOneQuarterSpatialUniform];
- }
- qm_->temporal_fact = 1.0f / kFactorTemporal[action_.temporal];
- RemoveLastDownAction();
- } else if (up_down == kDownResolution) {
- ConstrainAmountOfDownSampling();
- ConvertSpatialFractionalToWhole();
- qm_->spatial_width_fact = kFactorWidthSpatial[action_.spatial];
- qm_->spatial_height_fact = kFactorHeightSpatial[action_.spatial];
- qm_->temporal_fact = kFactorTemporal[action_.temporal];
- InsertLatestDownAction();
- } else {
- // This function should only be called if either the Up or Down action
- // has been selected.
- assert(false);
- }
- UpdateCodecResolution();
- state_dec_factor_spatial_ = state_dec_factor_spatial_ *
- qm_->spatial_width_fact * qm_->spatial_height_fact;
- state_dec_factor_temporal_ = state_dec_factor_temporal_ * qm_->temporal_fact;
-}
-
-void VCMQmResolution::UpdateCodecResolution() {
- if (action_.spatial != kNoChangeSpatial) {
- qm_->change_resolution_spatial = true;
- qm_->codec_width = static_cast<uint16_t>(width_ /
- qm_->spatial_width_fact + 0.5f);
- qm_->codec_height = static_cast<uint16_t>(height_ /
- qm_->spatial_height_fact + 0.5f);
- // Size should not exceed native sizes.
- assert(qm_->codec_width <= native_width_);
- assert(qm_->codec_height <= native_height_);
- // New sizes should be multiple of 2, otherwise spatial should not have
- // been selected.
- assert(qm_->codec_width % 2 == 0);
- assert(qm_->codec_height % 2 == 0);
- }
- if (action_.temporal != kNoChangeTemporal) {
- qm_->change_resolution_temporal = true;
- // Update the frame rate based on the average incoming frame rate.
- qm_->frame_rate = avg_incoming_framerate_ / qm_->temporal_fact + 0.5f;
- if (down_action_history_[0].temporal == 0) {
- // When we undo the last temporal-down action, make sure we go back up
- // to the native frame rate. Since the incoming frame rate may
- // fluctuate over time, |avg_incoming_framerate_| scaled back up may
- // be smaller than |native_frame rate_|.
- qm_->frame_rate = native_frame_rate_;
- }
- }
-}
-
-uint8_t VCMQmResolution::RateClass(float transition_rate) {
- return avg_target_rate_ < (kFacLowRate * transition_rate) ? 0:
- (avg_target_rate_ >= transition_rate ? 2 : 1);
-}
-
-// TODO(marpan): Would be better to capture these frame rate adjustments by
-// extending the table data (qm_select_data.h).
-void VCMQmResolution::AdjustAction() {
- // If the spatial level is default state (neither low or high), motion level
- // is not high, and spatial action was selected, switch to 2/3 frame rate
- // reduction if the average incoming frame rate is high.
- if (spatial_.level == kDefault && motion_.level != kHigh &&
- action_.spatial != kNoChangeSpatial &&
- framerate_level_ == kFrameRateHigh) {
- action_.spatial = kNoChangeSpatial;
- action_.temporal = kTwoThirdsTemporal;
- }
- // If both motion and spatial level are low, and temporal down action was
- // selected, switch to spatial 3/4x3/4 if the frame rate is not above the
- // lower middle level (|kFrameRateMiddle1|).
- if (motion_.level == kLow && spatial_.level == kLow &&
- framerate_level_ <= kFrameRateMiddle1 &&
- action_.temporal != kNoChangeTemporal) {
- action_.spatial = kOneHalfSpatialUniform;
- action_.temporal = kNoChangeTemporal;
- }
- // If spatial action is selected, and there has been too much spatial
- // reduction already (i.e., 1/4), then switch to temporal action if the
- // average frame rate is not low.
- if (action_.spatial != kNoChangeSpatial &&
- down_action_history_[0].spatial == kOneQuarterSpatialUniform &&
- framerate_level_ != kFrameRateLow) {
- action_.spatial = kNoChangeSpatial;
- action_.temporal = kTwoThirdsTemporal;
- }
- // Never use temporal action if number of temporal layers is above 2.
- if (num_layers_ > 2) {
- if (action_.temporal != kNoChangeTemporal) {
- action_.spatial = kOneHalfSpatialUniform;
- }
- action_.temporal = kNoChangeTemporal;
- }
- // If spatial action was selected, we need to make sure the frame sizes
- // are multiples of two. Otherwise switch to 2/3 temporal.
- if (action_.spatial != kNoChangeSpatial &&
- !EvenFrameSize()) {
- action_.spatial = kNoChangeSpatial;
- // Only one action (spatial or temporal) is allowed at a given time, so need
- // to check whether temporal action is currently selected.
- action_.temporal = kTwoThirdsTemporal;
- }
-}
-
-void VCMQmResolution::ConvertSpatialFractionalToWhole() {
- // If 3/4 spatial is selected, check if there has been another 3/4,
- // and if so, combine them into 1/2. 1/2 scaling is more efficient than 9/16.
- // Note we define 3/4x3/4 spatial as kOneHalfSpatialUniform.
- if (action_.spatial == kOneHalfSpatialUniform) {
- bool found = false;
- int isel = kDownActionHistorySize;
- for (int i = 0; i < kDownActionHistorySize; ++i) {
- if (down_action_history_[i].spatial == kOneHalfSpatialUniform) {
- isel = i;
- found = true;
- break;
- }
- }
- if (found) {
- action_.spatial = kOneQuarterSpatialUniform;
- state_dec_factor_spatial_ = state_dec_factor_spatial_ /
- (kFactorWidthSpatial[kOneHalfSpatialUniform] *
- kFactorHeightSpatial[kOneHalfSpatialUniform]);
- // Check if switching to 1/2x1/2 (=1/4) spatial is allowed.
- ConstrainAmountOfDownSampling();
- if (action_.spatial == kNoChangeSpatial) {
- // Not allowed. Go back to 3/4x3/4 spatial.
- action_.spatial = kOneHalfSpatialUniform;
- state_dec_factor_spatial_ = state_dec_factor_spatial_ *
- kFactorWidthSpatial[kOneHalfSpatialUniform] *
- kFactorHeightSpatial[kOneHalfSpatialUniform];
- } else {
- // Switching is allowed. Remove 3/4x3/4 from the history, and update
- // the frame size.
- for (int i = isel; i < kDownActionHistorySize - 1; ++i) {
- down_action_history_[i].spatial =
- down_action_history_[i + 1].spatial;
- }
- width_ = width_ * kFactorWidthSpatial[kOneHalfSpatialUniform];
- height_ = height_ * kFactorHeightSpatial[kOneHalfSpatialUniform];
- }
- }
- }
-}
-
-// Returns false if the new frame sizes, under the current spatial action,
-// are not multiples of two.
-bool VCMQmResolution::EvenFrameSize() {
- if (action_.spatial == kOneHalfSpatialUniform) {
- if ((width_ * 3 / 4) % 2 != 0 || (height_ * 3 / 4) % 2 != 0) {
- return false;
- }
- } else if (action_.spatial == kOneQuarterSpatialUniform) {
- if ((width_ * 1 / 2) % 2 != 0 || (height_ * 1 / 2) % 2 != 0) {
- return false;
- }
- }
- return true;
-}
-
-void VCMQmResolution::InsertLatestDownAction() {
- if (action_.spatial != kNoChangeSpatial) {
- for (int i = kDownActionHistorySize - 1; i > 0; --i) {
- down_action_history_[i].spatial = down_action_history_[i - 1].spatial;
- }
- down_action_history_[0].spatial = action_.spatial;
- }
- if (action_.temporal != kNoChangeTemporal) {
- for (int i = kDownActionHistorySize - 1; i > 0; --i) {
- down_action_history_[i].temporal = down_action_history_[i - 1].temporal;
- }
- down_action_history_[0].temporal = action_.temporal;
- }
-}
-
-void VCMQmResolution::RemoveLastDownAction() {
- if (action_.spatial != kNoChangeSpatial) {
- // If the last spatial action was 1/2x1/2 we replace it with 3/4x3/4.
- if (action_.spatial == kOneQuarterSpatialUniform) {
- down_action_history_[0].spatial = kOneHalfSpatialUniform;
- } else {
- for (int i = 0; i < kDownActionHistorySize - 1; ++i) {
- down_action_history_[i].spatial = down_action_history_[i + 1].spatial;
- }
- down_action_history_[kDownActionHistorySize - 1].spatial =
- kNoChangeSpatial;
- }
- }
- if (action_.temporal != kNoChangeTemporal) {
- for (int i = 0; i < kDownActionHistorySize - 1; ++i) {
- down_action_history_[i].temporal = down_action_history_[i + 1].temporal;
- }
- down_action_history_[kDownActionHistorySize - 1].temporal =
- kNoChangeTemporal;
- }
-}
-
-void VCMQmResolution::ConstrainAmountOfDownSampling() {
- // Sanity checks on down-sampling selection:
- // override the settings for too small image size and/or frame rate.
- // Also check the limit on current down-sampling states.
-
- float spatial_width_fact = kFactorWidthSpatial[action_.spatial];
- float spatial_height_fact = kFactorHeightSpatial[action_.spatial];
- float temporal_fact = kFactorTemporal[action_.temporal];
- float new_dec_factor_spatial = state_dec_factor_spatial_ *
- spatial_width_fact * spatial_height_fact;
- float new_dec_factor_temp = state_dec_factor_temporal_ * temporal_fact;
-
- // No spatial sampling if current frame size is too small, or if the
- // amount of spatial down-sampling is above maximum spatial down-action.
- if ((width_ * height_) <= kMinImageSize ||
- new_dec_factor_spatial > kMaxSpatialDown) {
- action_.spatial = kNoChangeSpatial;
- new_dec_factor_spatial = state_dec_factor_spatial_;
- }
- // No frame rate reduction if average frame rate is below some point, or if
- // the amount of temporal down-sampling is above maximum temporal down-action.
- if (avg_incoming_framerate_ <= kMinFrameRate ||
- new_dec_factor_temp > kMaxTempDown) {
- action_.temporal = kNoChangeTemporal;
- new_dec_factor_temp = state_dec_factor_temporal_;
- }
- // Check if the total (spatial-temporal) down-action is above maximum allowed,
- // if so, disallow the current selected down-action.
- if (new_dec_factor_spatial * new_dec_factor_temp > kMaxTotalDown) {
- if (action_.spatial != kNoChangeSpatial) {
- action_.spatial = kNoChangeSpatial;
- } else if (action_.temporal != kNoChangeTemporal) {
- action_.temporal = kNoChangeTemporal;
- } else {
- // We only allow for one action (spatial or temporal) at a given time, so
- // either spatial or temporal action is selected when this function is
- // called. If the selected action is disallowed from one of the above
- // 2 prior conditions (on spatial & temporal max down-action), then this
- // condition "total down-action > |kMaxTotalDown|" would not be entered.
- assert(false);
- }
- }
-}
-
-void VCMQmResolution::PickSpatialOrTemporal() {
- // Pick the one that has had the most down-sampling thus far.
- if (state_dec_factor_spatial_ > state_dec_factor_temporal_) {
- action_.spatial = down_action_history_[0].spatial;
- action_.temporal = kNoChangeTemporal;
- } else {
- action_.spatial = kNoChangeSpatial;
- action_.temporal = down_action_history_[0].temporal;
- }
-}
-
-// TODO(marpan): Update when we allow for directional spatial down-sampling.
-void VCMQmResolution::SelectSpatialDirectionMode(float transition_rate) {
- // Default is 4/3x4/3
- // For bit rates well below transitional rate, we select 2x2.
- if (avg_target_rate_ < transition_rate * kRateRedSpatial2X2) {
- qm_->spatial_width_fact = 2.0f;
- qm_->spatial_height_fact = 2.0f;
- }
- // Otherwise check prediction errors and aspect ratio.
- float spatial_err = 0.0f;
- float spatial_err_h = 0.0f;
- float spatial_err_v = 0.0f;
- if (content_metrics_) {
- spatial_err = content_metrics_->spatial_pred_err;
- spatial_err_h = content_metrics_->spatial_pred_err_h;
- spatial_err_v = content_metrics_->spatial_pred_err_v;
- }
-
- // Favor 1x2 if aspect_ratio is 16:9.
- if (aspect_ratio_ >= 16.0f / 9.0f) {
- // Check if 1x2 has lowest prediction error.
- if (spatial_err_h < spatial_err && spatial_err_h < spatial_err_v) {
- qm_->spatial_width_fact = 2.0f;
- qm_->spatial_height_fact = 1.0f;
- }
- }
- // Check for 4/3x4/3 selection: favor 2x2 over 1x2 and 2x1.
- if (spatial_err < spatial_err_h * (1.0f + kSpatialErr2x2VsHoriz) &&
- spatial_err < spatial_err_v * (1.0f + kSpatialErr2X2VsVert)) {
- qm_->spatial_width_fact = 4.0f / 3.0f;
- qm_->spatial_height_fact = 4.0f / 3.0f;
- }
- // Check for 2x1 selection.
- if (spatial_err_v < spatial_err_h * (1.0f - kSpatialErrVertVsHoriz) &&
- spatial_err_v < spatial_err * (1.0f - kSpatialErr2X2VsVert)) {
- qm_->spatial_width_fact = 1.0f;
- qm_->spatial_height_fact = 2.0f;
- }
-}
-
-// ROBUSTNESS CLASS
-
-VCMQmRobustness::VCMQmRobustness() {
- Reset();
-}
-
-VCMQmRobustness::~VCMQmRobustness() {
-}
-
-void VCMQmRobustness::Reset() {
- prev_total_rate_ = 0.0f;
- prev_rtt_time_ = 0;
- prev_packet_loss_ = 0;
- prev_code_rate_delta_ = 0;
- ResetQM();
-}
-
-// Adjust the FEC rate based on the content and the network state
-// (packet loss rate, total rate/bandwidth, round trip time).
-// Note that packetLoss here is the filtered loss value.
-float VCMQmRobustness::AdjustFecFactor(uint8_t code_rate_delta,
- float total_rate,
- float framerate,
- int64_t rtt_time,
- uint8_t packet_loss) {
- // Default: no adjustment
- float adjust_fec = 1.0f;
- if (content_metrics_ == NULL) {
- return adjust_fec;
- }
- // Compute class state of the content.
- ComputeMotionNFD();
- ComputeSpatial();
-
- // TODO(marpan): Set FEC adjustment factor.
-
- // Keep track of previous values of network state:
- // adjustment may be also based on pattern of changes in network state.
- prev_total_rate_ = total_rate;
- prev_rtt_time_ = rtt_time;
- prev_packet_loss_ = packet_loss;
- prev_code_rate_delta_ = code_rate_delta;
- return adjust_fec;
-}
-
-// Set the UEP (unequal-protection across packets) on/off for the FEC.
-bool VCMQmRobustness::SetUepProtection(uint8_t code_rate_delta,
- float total_rate,
- uint8_t packet_loss,
- bool frame_type) {
- // Default.
- return false;
-}
-} // namespace
diff --git a/webrtc/modules/video_coding/main/source/qm_select.h b/webrtc/modules/video_coding/main/source/qm_select.h
deleted file mode 100644
index 079e7f8879..0000000000
--- a/webrtc/modules/video_coding/main/source/qm_select.h
+++ /dev/null
@@ -1,373 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_MODULES_VIDEO_CODING_QM_SELECT_H_
-#define WEBRTC_MODULES_VIDEO_CODING_QM_SELECT_H_
-
-#include "webrtc/common_types.h"
-#include "webrtc/typedefs.h"
-
-/******************************************************/
-/* Quality Modes: Resolution and Robustness settings */
-/******************************************************/
-
-namespace webrtc {
-struct VideoContentMetrics;
-
-struct VCMResolutionScale {
- VCMResolutionScale()
- : codec_width(640),
- codec_height(480),
- frame_rate(30.0f),
- spatial_width_fact(1.0f),
- spatial_height_fact(1.0f),
- temporal_fact(1.0f),
- change_resolution_spatial(false),
- change_resolution_temporal(false) {
- }
- uint16_t codec_width;
- uint16_t codec_height;
- float frame_rate;
- float spatial_width_fact;
- float spatial_height_fact;
- float temporal_fact;
- bool change_resolution_spatial;
- bool change_resolution_temporal;
-};
-
-enum ImageType {
- kQCIF = 0, // 176x144
- kHCIF, // 264x216 = half(~3/4x3/4) CIF.
- kQVGA, // 320x240 = quarter VGA.
- kCIF, // 352x288
- kHVGA, // 480x360 = half(~3/4x3/4) VGA.
- kVGA, // 640x480
- kQFULLHD, // 960x540 = quarter FULLHD, and half(~3/4x3/4) WHD.
- kWHD, // 1280x720
- kFULLHD, // 1920x1080
- kNumImageTypes
-};
-
-const uint32_t kSizeOfImageType[kNumImageTypes] =
-{ 25344, 57024, 76800, 101376, 172800, 307200, 518400, 921600, 2073600 };
-
-enum FrameRateLevelClass {
- kFrameRateLow,
- kFrameRateMiddle1,
- kFrameRateMiddle2,
- kFrameRateHigh
-};
-
-enum ContentLevelClass {
- kLow,
- kHigh,
- kDefault
-};
-
-struct VCMContFeature {
- VCMContFeature()
- : value(0.0f),
- level(kDefault) {
- }
- void Reset() {
- value = 0.0f;
- level = kDefault;
- }
- float value;
- ContentLevelClass level;
-};
-
-enum UpDownAction {
- kUpResolution,
- kDownResolution
-};
-
-enum SpatialAction {
- kNoChangeSpatial,
- kOneHalfSpatialUniform, // 3/4 x 3/4: 9/6 ~1/2 pixel reduction.
- kOneQuarterSpatialUniform, // 1/2 x 1/2: 1/4 pixel reduction.
- kNumModesSpatial
-};
-
-enum TemporalAction {
- kNoChangeTemporal,
- kTwoThirdsTemporal, // 2/3 frame rate reduction
- kOneHalfTemporal, // 1/2 frame rate reduction
- kNumModesTemporal
-};
-
-struct ResolutionAction {
- ResolutionAction()
- : spatial(kNoChangeSpatial),
- temporal(kNoChangeTemporal) {
- }
- SpatialAction spatial;
- TemporalAction temporal;
-};
-
-// Down-sampling factors for spatial (width and height), and temporal.
-const float kFactorWidthSpatial[kNumModesSpatial] =
- { 1.0f, 4.0f / 3.0f, 2.0f };
-
-const float kFactorHeightSpatial[kNumModesSpatial] =
- { 1.0f, 4.0f / 3.0f, 2.0f };
-
-const float kFactorTemporal[kNumModesTemporal] =
- { 1.0f, 1.5f, 2.0f };
-
-enum EncoderState {
- kStableEncoding, // Low rate mis-match, stable buffer levels.
- kStressedEncoding, // Significant over-shooting of target rate,
- // Buffer under-flow, etc.
- kEasyEncoding // Significant under-shooting of target rate.
-};
-
-// QmMethod class: main class for resolution and robustness settings
-
-class VCMQmMethod {
- public:
- VCMQmMethod();
- virtual ~VCMQmMethod();
-
- // Reset values
- void ResetQM();
- virtual void Reset() = 0;
-
- // Compute content class.
- uint8_t ComputeContentClass();
-
- // Update with the content metrics.
- void UpdateContent(const VideoContentMetrics* content_metrics);
-
- // Compute spatial texture magnitude and level.
- // Spatial texture is a spatial prediction error measure.
- void ComputeSpatial();
-
- // Compute motion magnitude and level for NFD metric.
- // NFD is normalized frame difference (normalized by spatial variance).
- void ComputeMotionNFD();
-
- // Get the imageType (CIF, VGA, HD, etc) for the system width/height.
- ImageType GetImageType(uint16_t width, uint16_t height);
-
- // Return the closest image type.
- ImageType FindClosestImageType(uint16_t width, uint16_t height);
-
- // Get the frame rate level.
- FrameRateLevelClass FrameRateLevel(float frame_rate);
-
- protected:
- // Content Data.
- const VideoContentMetrics* content_metrics_;
-
- // Encoder frame sizes and native frame sizes.
- uint16_t width_;
- uint16_t height_;
- float user_frame_rate_;
- uint16_t native_width_;
- uint16_t native_height_;
- float native_frame_rate_;
- float aspect_ratio_;
- // Image type and frame rate leve, for the current encoder resolution.
- ImageType image_type_;
- FrameRateLevelClass framerate_level_;
- // Content class data.
- VCMContFeature motion_;
- VCMContFeature spatial_;
- uint8_t content_class_;
- bool init_;
-};
-
-// Resolution settings class
-
-class VCMQmResolution : public VCMQmMethod {
- public:
- VCMQmResolution();
- virtual ~VCMQmResolution();
-
- // Reset all quantities.
- virtual void Reset();
-
- // Reset rate quantities and counters after every SelectResolution() call.
- void ResetRates();
-
- // Reset down-sampling state.
- void ResetDownSamplingState();
-
- // Get the encoder state.
- EncoderState GetEncoderState();
-
- // Initialize after SetEncodingData in media_opt.
- int Initialize(float bitrate,
- float user_framerate,
- uint16_t width,
- uint16_t height,
- int num_layers);
-
- // Update the encoder frame size.
- void UpdateCodecParameters(float frame_rate, uint16_t width, uint16_t height);
-
- // Update with actual bit rate (size of the latest encoded frame)
- // and frame type, after every encoded frame.
- void UpdateEncodedSize(size_t encoded_size);
-
- // Update with new target bitrate, actual encoder sent rate, frame_rate,
- // loss rate: every ~1 sec from SetTargetRates in media_opt.
- void UpdateRates(float target_bitrate,
- float encoder_sent_rate,
- float incoming_framerate,
- uint8_t packet_loss);
-
- // Extract ST (spatio-temporal) resolution action.
- // Inputs: qm: Reference to the quality modes pointer.
- // Output: the spatial and/or temporal scale change.
- int SelectResolution(VCMResolutionScale** qm);
-
- private:
- // Set the default resolution action.
- void SetDefaultAction();
-
- // Compute rates for the selection of down-sampling action.
- void ComputeRatesForSelection();
-
- // Compute the encoder state.
- void ComputeEncoderState();
-
- // Return true if the action is to go back up in resolution.
- bool GoingUpResolution();
-
- // Return true if the action is to go down in resolution.
- bool GoingDownResolution();
-
- // Check the condition for going up in resolution by the scale factors:
- // |facWidth|, |facHeight|, |facTemp|.
- // |scaleFac| is a scale factor for the transition rate.
- bool ConditionForGoingUp(float fac_width,
- float fac_height,
- float fac_temp,
- float scale_fac);
-
- // Get the bitrate threshold for the resolution action.
- // The case |facWidth|=|facHeight|=|facTemp|==1 is for down-sampling action.
- // |scaleFac| is a scale factor for the transition rate.
- float GetTransitionRate(float fac_width,
- float fac_height,
- float fac_temp,
- float scale_fac);
-
- // Update the down-sampling state.
- void UpdateDownsamplingState(UpDownAction up_down);
-
- // Update the codec frame size and frame rate.
- void UpdateCodecResolution();
-
- // Return a state based on average target rate relative transition rate.
- uint8_t RateClass(float transition_rate);
-
- // Adjust the action selected from the table.
- void AdjustAction();
-
- // Covert 2 stages of 3/4 (=9/16) spatial decimation to 1/2.
- void ConvertSpatialFractionalToWhole();
-
- // Returns true if the new frame sizes, under the selected spatial action,
- // are of even size.
- bool EvenFrameSize();
-
- // Insert latest down-sampling action into the history list.
- void InsertLatestDownAction();
-
- // Remove the last (first element) down-sampling action from the list.
- void RemoveLastDownAction();
-
- // Check constraints on the amount of down-sampling allowed.
- void ConstrainAmountOfDownSampling();
-
- // For going up in resolution: pick spatial or temporal action,
- // if both actions were separately selected.
- void PickSpatialOrTemporal();
-
- // Select the directional (1x2 or 2x1) spatial down-sampling action.
- void SelectSpatialDirectionMode(float transition_rate);
-
- enum { kDownActionHistorySize = 10};
-
- VCMResolutionScale* qm_;
- // Encoder rate control parameters.
- float target_bitrate_;
- float incoming_framerate_;
- float per_frame_bandwidth_;
- float buffer_level_;
-
- // Data accumulated every ~1sec from MediaOpt.
- float sum_target_rate_;
- float sum_incoming_framerate_;
- float sum_rate_MM_;
- float sum_rate_MM_sgn_;
- float sum_packet_loss_;
- // Counters.
- uint32_t frame_cnt_;
- uint32_t frame_cnt_delta_;
- uint32_t update_rate_cnt_;
- uint32_t low_buffer_cnt_;
-
- // Resolution state parameters.
- float state_dec_factor_spatial_;
- float state_dec_factor_temporal_;
-
- // Quantities used for selection.
- float avg_target_rate_;
- float avg_incoming_framerate_;
- float avg_ratio_buffer_low_;
- float avg_rate_mismatch_;
- float avg_rate_mismatch_sgn_;
- float avg_packet_loss_;
- EncoderState encoder_state_;
- ResolutionAction action_;
- // Short history of the down-sampling actions from the Initialize() state.
- // This is needed for going up in resolution. Since the total amount of
- // down-sampling actions are constrained, the length of the list need not be
- // large: i.e., (4/3) ^{kDownActionHistorySize} <= kMaxDownSample.
- ResolutionAction down_action_history_[kDownActionHistorySize];
- int num_layers_;
-};
-
-// Robustness settings class.
-
-class VCMQmRobustness : public VCMQmMethod {
- public:
- VCMQmRobustness();
- ~VCMQmRobustness();
-
- virtual void Reset();
-
- // Adjust FEC rate based on content: every ~1 sec from SetTargetRates.
- // Returns an adjustment factor.
- float AdjustFecFactor(uint8_t code_rate_delta,
- float total_rate,
- float framerate,
- int64_t rtt_time,
- uint8_t packet_loss);
-
- // Set the UEP protection on/off.
- bool SetUepProtection(uint8_t code_rate_delta,
- float total_rate,
- uint8_t packet_loss,
- bool frame_type);
-
- private:
- // Previous state of network parameters.
- float prev_total_rate_;
- int64_t prev_rtt_time_;
- uint8_t prev_packet_loss_;
- uint8_t prev_code_rate_delta_;
-};
-} // namespace webrtc
-#endif // WEBRTC_MODULES_VIDEO_CODING_QM_SELECT_H_
diff --git a/webrtc/modules/video_coding/main/source/qm_select_data.h b/webrtc/modules/video_coding/main/source/qm_select_data.h
deleted file mode 100644
index dc6bce4811..0000000000
--- a/webrtc/modules/video_coding/main/source/qm_select_data.h
+++ /dev/null
@@ -1,227 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_MODULES_VIDEO_CODING_SOURCE_QM_SELECT_DATA_H_
-#define WEBRTC_MODULES_VIDEO_CODING_SOURCE_QM_SELECT_DATA_H_
-
-/***************************************************************
-*QMSelectData.h
-* This file includes parameters for content-aware media optimization
-****************************************************************/
-
-#include "webrtc/typedefs.h"
-
-namespace webrtc {
-//
-// PARAMETERS FOR RESOLUTION ADAPTATION
-//
-
-// Initial level of buffer in secs.
-const float kInitBufferLevel = 0.5f;
-
-// Threshold of (max) buffer size below which we consider too low (underflow).
-const float kPercBufferThr = 0.10f;
-
-// Threshold on the occurrences of low buffer levels.
-const float kMaxBufferLow = 0.30f;
-
-// Threshold on rate mismatch.
-const float kMaxRateMisMatch = 0.5f;
-
-// Threshold on amount of under/over encoder shooting.
-const float kRateOverShoot = 0.75f;
-const float kRateUnderShoot = 0.75f;
-
-// Factor to favor weighting the average rates with the current/last data.
-const float kWeightRate = 0.70f;
-
-// Factor for transitional rate for going back up in resolution.
-const float kTransRateScaleUpSpatial = 1.25f;
-const float kTransRateScaleUpTemp = 1.25f;
-const float kTransRateScaleUpSpatialTemp = 1.25f;
-
-// Threshold on packet loss rate, above which favor resolution reduction.
-const float kPacketLossThr = 0.1f;
-
-// Factor for reducing transitional bitrate under packet loss.
-const float kPacketLossRateFac = 1.0f;
-
-// Maximum possible transitional rate for down-sampling:
-// (units in kbps), for 30fps.
-const uint16_t kMaxRateQm[9] = {
- 0, // QCIF
- 50, // kHCIF
- 125, // kQVGA
- 200, // CIF
- 280, // HVGA
- 400, // VGA
- 700, // QFULLHD
- 1000, // WHD
- 1500 // FULLHD
-};
-
-// Frame rate scale for maximum transition rate.
-const float kFrameRateFac[4] = {
- 0.5f, // Low
- 0.7f, // Middle level 1
- 0.85f, // Middle level 2
- 1.0f, // High
-};
-
-// Scale for transitional rate: based on content class
-// motion=L/H/D,spatial==L/H/D: for low, high, middle levels
-const float kScaleTransRateQm[18] = {
- // VGA and lower
- 0.40f, // L, L
- 0.50f, // L, H
- 0.40f, // L, D
- 0.60f, // H ,L
- 0.60f, // H, H
- 0.60f, // H, D
- 0.50f, // D, L
- 0.50f, // D, D
- 0.50f, // D, H
-
- // over VGA
- 0.40f, // L, L
- 0.50f, // L, H
- 0.40f, // L, D
- 0.60f, // H ,L
- 0.60f, // H, H
- 0.60f, // H, D
- 0.50f, // D, L
- 0.50f, // D, D
- 0.50f, // D, H
-};
-
-// Threshold on the target rate relative to transitional rate.
-const float kFacLowRate = 0.5f;
-
-// Action for down-sampling:
-// motion=L/H/D,spatial==L/H/D, for low, high, middle levels;
-// rate = 0/1/2, for target rate state relative to transition rate.
-const uint8_t kSpatialAction[27] = {
-// rateClass = 0:
- 1, // L, L
- 1, // L, H
- 1, // L, D
- 4, // H ,L
- 1, // H, H
- 4, // H, D
- 4, // D, L
- 1, // D, H
- 2, // D, D
-
-// rateClass = 1:
- 1, // L, L
- 1, // L, H
- 1, // L, D
- 2, // H ,L
- 1, // H, H
- 2, // H, D
- 2, // D, L
- 1, // D, H
- 2, // D, D
-
-// rateClass = 2:
- 1, // L, L
- 1, // L, H
- 1, // L, D
- 2, // H ,L
- 1, // H, H
- 2, // H, D
- 2, // D, L
- 1, // D, H
- 2, // D, D
-};
-
-const uint8_t kTemporalAction[27] = {
-// rateClass = 0:
- 3, // L, L
- 2, // L, H
- 2, // L, D
- 1, // H ,L
- 3, // H, H
- 1, // H, D
- 1, // D, L
- 2, // D, H
- 1, // D, D
-
-// rateClass = 1:
- 3, // L, L
- 3, // L, H
- 3, // L, D
- 1, // H ,L
- 3, // H, H
- 1, // H, D
- 1, // D, L
- 3, // D, H
- 1, // D, D
-
-// rateClass = 2:
- 1, // L, L
- 3, // L, H
- 3, // L, D
- 1, // H ,L
- 3, // H, H
- 1, // H, D
- 1, // D, L
- 3, // D, H
- 1, // D, D
-};
-
-// Control the total amount of down-sampling allowed.
-const float kMaxSpatialDown = 8.0f;
-const float kMaxTempDown = 3.0f;
-const float kMaxTotalDown = 9.0f;
-
-// Minimum image size for a spatial down-sampling.
-const int kMinImageSize = 176 * 144;
-
-// Minimum frame rate for temporal down-sampling:
-// no frame rate reduction if incomingFrameRate <= MIN_FRAME_RATE.
-const int kMinFrameRate = 8;
-
-//
-// PARAMETERS FOR FEC ADJUSTMENT: TODO (marpan)
-//
-
-//
-// PARAMETETS FOR SETTING LOW/HIGH STATES OF CONTENT METRICS:
-//
-
-// Thresholds for frame rate:
-const int kLowFrameRate = 10;
-const int kMiddleFrameRate = 15;
-const int kHighFrameRate = 25;
-
-// Thresholds for motion: motion level is from NFD.
-const float kHighMotionNfd = 0.075f;
-const float kLowMotionNfd = 0.03f;
-
-// Thresholds for spatial prediction error:
-// this is applied on the average of (2x2,1x2,2x1).
-const float kHighTexture = 0.035f;
-const float kLowTexture = 0.020f;
-
-// Used to reduce thresholds for larger/HD scenes: correction factor since
-// higher correlation in HD scenes means lower spatial prediction error.
-const float kScaleTexture = 0.9f;
-
-// Percentage reduction in transitional bitrate for 2x2 selected over 1x2/2x1.
-const float kRateRedSpatial2X2 = 0.6f;
-
-const float kSpatialErr2x2VsHoriz = 0.1f; // percentage to favor 2x2 over H
-const float kSpatialErr2X2VsVert = 0.1f; // percentage to favor 2x2 over V
-const float kSpatialErrVertVsHoriz = 0.1f; // percentage to favor H over V
-
-} // namespace webrtc
-
-#endif // WEBRTC_MODULES_VIDEO_CODING_SOURCE_QM_SELECT_DATA_H_
diff --git a/webrtc/modules/video_coding/main/source/qm_select_unittest.cc b/webrtc/modules/video_coding/main/source/qm_select_unittest.cc
deleted file mode 100644
index 6abc0d3099..0000000000
--- a/webrtc/modules/video_coding/main/source/qm_select_unittest.cc
+++ /dev/null
@@ -1,1311 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-/*
- * This file includes unit tests the QmResolution class
- * In particular, for the selection of spatial and/or temporal down-sampling.
- */
-
-#include "testing/gtest/include/gtest/gtest.h"
-
-#include "webrtc/modules/interface/module_common_types.h"
-#include "webrtc/modules/video_coding/main/source/qm_select.h"
-
-namespace webrtc {
-
-// Representative values of content metrics for: low/high/medium(default) state,
-// based on parameters settings in qm_select_data.h.
-const float kSpatialLow = 0.01f;
-const float kSpatialMedium = 0.03f;
-const float kSpatialHigh = 0.1f;
-const float kTemporalLow = 0.01f;
-const float kTemporalMedium = 0.06f;
-const float kTemporalHigh = 0.1f;
-
-class QmSelectTest : public ::testing::Test {
- protected:
- QmSelectTest()
- : qm_resolution_(new VCMQmResolution()),
- content_metrics_(new VideoContentMetrics()),
- qm_scale_(NULL) {
- }
- VCMQmResolution* qm_resolution_;
- VideoContentMetrics* content_metrics_;
- VCMResolutionScale* qm_scale_;
-
- void InitQmNativeData(float initial_bit_rate,
- int user_frame_rate,
- int native_width,
- int native_height,
- int num_layers);
-
- void UpdateQmEncodedFrame(size_t* encoded_size, size_t num_updates);
-
- void UpdateQmRateData(int* target_rate,
- int* encoder_sent_rate,
- int* incoming_frame_rate,
- uint8_t* fraction_lost,
- int num_updates);
-
- void UpdateQmContentData(float motion_metric,
- float spatial_metric,
- float spatial_metric_horiz,
- float spatial_metric_vert);
-
- bool IsSelectedActionCorrect(VCMResolutionScale* qm_scale,
- float fac_width,
- float fac_height,
- float fac_temp,
- uint16_t new_width,
- uint16_t new_height,
- float new_frame_rate);
-
- void TearDown() {
- delete qm_resolution_;
- delete content_metrics_;
- }
-};
-
-TEST_F(QmSelectTest, HandleInputs) {
- // Expect parameter error. Initialize with invalid inputs.
- EXPECT_EQ(-4, qm_resolution_->Initialize(1000, 0, 640, 480, 1));
- EXPECT_EQ(-4, qm_resolution_->Initialize(1000, 30, 640, 0, 1));
- EXPECT_EQ(-4, qm_resolution_->Initialize(1000, 30, 0, 480, 1));
-
- // Expect uninitialized error.: No valid initialization before selection.
- EXPECT_EQ(-7, qm_resolution_->SelectResolution(&qm_scale_));
-
- VideoContentMetrics* content_metrics = NULL;
- EXPECT_EQ(0, qm_resolution_->Initialize(1000, 30, 640, 480, 1));
- qm_resolution_->UpdateContent(content_metrics);
- // Content metrics are NULL: Expect success and no down-sampling action.
- EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
- EXPECT_TRUE(IsSelectedActionCorrect(qm_scale_, 1.0, 1.0, 1.0, 640, 480,
- 30.0f));
-}
-
-// TODO(marpan): Add a test for number of temporal layers > 1.
-
-// No down-sampling action at high rates.
-TEST_F(QmSelectTest, NoActionHighRate) {
- // Initialize with bitrate, frame rate, native system width/height, and
- // number of temporal layers.
- InitQmNativeData(800, 30, 640, 480, 1);
-
- // Update with encoder frame size.
- uint16_t codec_width = 640;
- uint16_t codec_height = 480;
- qm_resolution_->UpdateCodecParameters(30.0f, codec_width, codec_height);
- EXPECT_EQ(5, qm_resolution_->GetImageType(codec_width, codec_height));
-
- // Update rates for a sequence of intervals.
- int target_rate[] = {800, 800, 800};
- int encoder_sent_rate[] = {800, 800, 800};
- int incoming_frame_rate[] = {30, 30, 30};
- uint8_t fraction_lost[] = {10, 10, 10};
- UpdateQmRateData(target_rate, encoder_sent_rate, incoming_frame_rate,
- fraction_lost, 3);
-
- // Update content: motion level, and 3 spatial prediction errors.
- UpdateQmContentData(kTemporalLow, kSpatialLow, kSpatialLow, kSpatialLow);
- EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
- EXPECT_EQ(0, qm_resolution_->ComputeContentClass());
- EXPECT_EQ(kStableEncoding, qm_resolution_->GetEncoderState());
- EXPECT_TRUE(IsSelectedActionCorrect(qm_scale_, 1.0f, 1.0f, 1.0f, 640, 480,
- 30.0f));
-}
-
-// Rate is well below transition, down-sampling action is taken,
-// depending on the content state.
-TEST_F(QmSelectTest, DownActionLowRate) {
- // Initialize with bitrate, frame rate, native system width/height, and
- // number of temporal layers.
- InitQmNativeData(50, 30, 640, 480, 1);
-
- // Update with encoder frame size.
- uint16_t codec_width = 640;
- uint16_t codec_height = 480;
- qm_resolution_->UpdateCodecParameters(30.0f, codec_width, codec_height);
- EXPECT_EQ(5, qm_resolution_->GetImageType(codec_width, codec_height));
-
- // Update rates for a sequence of intervals.
- int target_rate[] = {50, 50, 50};
- int encoder_sent_rate[] = {50, 50, 50};
- int incoming_frame_rate[] = {30, 30, 30};
- uint8_t fraction_lost[] = {10, 10, 10};
- UpdateQmRateData(target_rate, encoder_sent_rate, incoming_frame_rate,
- fraction_lost, 3);
-
- // Update content: motion level, and 3 spatial prediction errors.
- // High motion, low spatial: 2x2 spatial expected.
- UpdateQmContentData(kTemporalHigh, kSpatialLow, kSpatialLow, kSpatialLow);
- EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
- EXPECT_EQ(3, qm_resolution_->ComputeContentClass());
- EXPECT_EQ(kStableEncoding, qm_resolution_->GetEncoderState());
- EXPECT_TRUE(IsSelectedActionCorrect(qm_scale_, 2.0f, 2.0f, 1.0f, 320, 240,
- 30.0f));
-
- qm_resolution_->ResetDownSamplingState();
- // Low motion, low spatial: 2/3 temporal is expected.
- UpdateQmContentData(kTemporalLow, kSpatialLow, kSpatialLow, kSpatialLow);
- EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
- EXPECT_EQ(0, qm_resolution_->ComputeContentClass());
- EXPECT_TRUE(IsSelectedActionCorrect(qm_scale_, 1.0f, 1.0f, 1.5f, 640, 480,
- 20.5f));
-
- qm_resolution_->ResetDownSamplingState();
- // Medium motion, low spatial: 2x2 spatial expected.
- UpdateQmContentData(kTemporalMedium, kSpatialLow, kSpatialLow, kSpatialLow);
- EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
- EXPECT_EQ(6, qm_resolution_->ComputeContentClass());
- EXPECT_TRUE(IsSelectedActionCorrect(qm_scale_, 2.0f, 2.0f, 1.0f, 320, 240,
- 30.0f));
-
- qm_resolution_->ResetDownSamplingState();
- // High motion, high spatial: 2/3 temporal expected.
- UpdateQmContentData(kTemporalHigh, kSpatialHigh, kSpatialHigh, kSpatialHigh);
- EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
- EXPECT_EQ(4, qm_resolution_->ComputeContentClass());
- EXPECT_TRUE(IsSelectedActionCorrect(qm_scale_, 1.0f, 1.0f, 1.5f, 640, 480,
- 20.5f));
-
- qm_resolution_->ResetDownSamplingState();
- // Low motion, high spatial: 1/2 temporal expected.
- UpdateQmContentData(kTemporalLow, kSpatialHigh, kSpatialHigh, kSpatialHigh);
- EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
- EXPECT_EQ(1, qm_resolution_->ComputeContentClass());
- EXPECT_TRUE(IsSelectedActionCorrect(qm_scale_, 1.0f, 1.0f, 2.0f, 640, 480,
- 15.5f));
-
- qm_resolution_->ResetDownSamplingState();
- // Medium motion, high spatial: 1/2 temporal expected.
- UpdateQmContentData(kTemporalMedium, kSpatialHigh, kSpatialHigh,
- kSpatialHigh);
- EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
- EXPECT_EQ(7, qm_resolution_->ComputeContentClass());
- EXPECT_TRUE(IsSelectedActionCorrect(qm_scale_, 1.0f, 1.0f, 2.0f, 640, 480,
- 15.5f));
-
- qm_resolution_->ResetDownSamplingState();
- // High motion, medium spatial: 2x2 spatial expected.
- UpdateQmContentData(kTemporalHigh, kSpatialMedium, kSpatialMedium,
- kSpatialMedium);
- EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
- EXPECT_EQ(5, qm_resolution_->ComputeContentClass());
- // Target frame rate for frame dropper should be the same as previous == 15.
- EXPECT_TRUE(IsSelectedActionCorrect(qm_scale_, 2.0f, 2.0f, 1.0f, 320, 240,
- 30.0f));
-
- qm_resolution_->ResetDownSamplingState();
- // Low motion, medium spatial: high frame rate, so 1/2 temporal expected.
- UpdateQmContentData(kTemporalLow, kSpatialMedium, kSpatialMedium,
- kSpatialMedium);
- EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
- EXPECT_EQ(2, qm_resolution_->ComputeContentClass());
- EXPECT_TRUE(IsSelectedActionCorrect(qm_scale_, 1.0f, 1.0f, 2.0f, 640, 480,
- 15.5f));
-
- qm_resolution_->ResetDownSamplingState();
- // Medium motion, medium spatial: high frame rate, so 2/3 temporal expected.
- UpdateQmContentData(kTemporalMedium, kSpatialMedium, kSpatialMedium,
- kSpatialMedium);
- EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
- EXPECT_EQ(8, qm_resolution_->ComputeContentClass());
- EXPECT_TRUE(IsSelectedActionCorrect(qm_scale_, 1.0f, 1.0f, 1.5f, 640, 480,
- 20.5f));
-}
-
-// Rate mis-match is high, and we have over-shooting.
-// since target rate is below max for down-sampling, down-sampling is selected.
-TEST_F(QmSelectTest, DownActionHighRateMMOvershoot) {
- // Initialize with bitrate, frame rate, native system width/height, and
- // number of temporal layers.
- InitQmNativeData(300, 30, 640, 480, 1);
-
- // Update with encoder frame size.
- uint16_t codec_width = 640;
- uint16_t codec_height = 480;
- qm_resolution_->UpdateCodecParameters(30.0f, codec_width, codec_height);
- EXPECT_EQ(5, qm_resolution_->GetImageType(codec_width, codec_height));
-
- // Update rates for a sequence of intervals.
- int target_rate[] = {300, 300, 300};
- int encoder_sent_rate[] = {900, 900, 900};
- int incoming_frame_rate[] = {30, 30, 30};
- uint8_t fraction_lost[] = {10, 10, 10};
- UpdateQmRateData(target_rate, encoder_sent_rate, incoming_frame_rate,
- fraction_lost, 3);
-
- // Update content: motion level, and 3 spatial prediction errors.
- // High motion, low spatial.
- UpdateQmContentData(kTemporalHigh, kSpatialLow, kSpatialLow, kSpatialLow);
- EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
- EXPECT_EQ(3, qm_resolution_->ComputeContentClass());
- EXPECT_EQ(kStressedEncoding, qm_resolution_->GetEncoderState());
- EXPECT_TRUE(IsSelectedActionCorrect(qm_scale_, 4.0f / 3.0f, 4.0f / 3.0f,
- 1.0f, 480, 360, 30.0f));
-
- qm_resolution_->ResetDownSamplingState();
- // Low motion, high spatial
- UpdateQmContentData(kTemporalLow, kSpatialHigh, kSpatialHigh, kSpatialHigh);
- EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
- EXPECT_EQ(1, qm_resolution_->ComputeContentClass());
- EXPECT_TRUE(IsSelectedActionCorrect(qm_scale_, 1.0f, 1.0f, 1.5f, 640, 480,
- 20.5f));
-}
-
-// Rate mis-match is high, target rate is below max for down-sampling,
-// but since we have consistent under-shooting, no down-sampling action.
-TEST_F(QmSelectTest, NoActionHighRateMMUndershoot) {
- // Initialize with bitrate, frame rate, native system width/height, and
- // number of temporal layers.
- InitQmNativeData(300, 30, 640, 480, 1);
-
- // Update with encoder frame size.
- uint16_t codec_width = 640;
- uint16_t codec_height = 480;
- qm_resolution_->UpdateCodecParameters(30.0f, codec_width, codec_height);
- EXPECT_EQ(5, qm_resolution_->GetImageType(codec_width, codec_height));
-
- // Update rates for a sequence of intervals.
- int target_rate[] = {300, 300, 300};
- int encoder_sent_rate[] = {100, 100, 100};
- int incoming_frame_rate[] = {30, 30, 30};
- uint8_t fraction_lost[] = {10, 10, 10};
- UpdateQmRateData(target_rate, encoder_sent_rate, incoming_frame_rate,
- fraction_lost, 3);
-
- // Update content: motion level, and 3 spatial prediction errors.
- // High motion, low spatial.
- UpdateQmContentData(kTemporalHigh, kSpatialLow, kSpatialLow, kSpatialLow);
- EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
- EXPECT_EQ(3, qm_resolution_->ComputeContentClass());
- EXPECT_EQ(kEasyEncoding, qm_resolution_->GetEncoderState());
- EXPECT_TRUE(IsSelectedActionCorrect(qm_scale_, 1.0f, 1.0f, 1.0f, 640, 480,
- 30.0f));
-
- qm_resolution_->ResetDownSamplingState();
- // Low motion, high spatial
- UpdateQmContentData(kTemporalLow, kSpatialHigh, kSpatialHigh, kSpatialHigh);
- EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
- EXPECT_EQ(1, qm_resolution_->ComputeContentClass());
- EXPECT_TRUE(IsSelectedActionCorrect(qm_scale_, 1.0f, 1.0f, 1.0f, 640, 480,
- 30.0f));
-}
-
-// Buffer is underflowing, and target rate is below max for down-sampling,
-// so action is taken.
-TEST_F(QmSelectTest, DownActionBufferUnderflow) {
- // Initialize with bitrate, frame rate, native system width/height, and
- // number of temporal layers.
- InitQmNativeData(300, 30, 640, 480, 1);
-
- // Update with encoder frame size.
- uint16_t codec_width = 640;
- uint16_t codec_height = 480;
- qm_resolution_->UpdateCodecParameters(30.0f, codec_width, codec_height);
- EXPECT_EQ(5, qm_resolution_->GetImageType(codec_width, codec_height));
-
- // Update with encoded size over a number of frames.
- // per-frame bandwidth = 15 = 450/30: simulate (decoder) buffer underflow:
- size_t encoded_size[] = {200, 100, 50, 30, 60, 40, 20, 30, 20, 40};
- UpdateQmEncodedFrame(encoded_size, GTEST_ARRAY_SIZE_(encoded_size));
-
- // Update rates for a sequence of intervals.
- int target_rate[] = {300, 300, 300};
- int encoder_sent_rate[] = {450, 450, 450};
- int incoming_frame_rate[] = {30, 30, 30};
- uint8_t fraction_lost[] = {10, 10, 10};
- UpdateQmRateData(target_rate, encoder_sent_rate, incoming_frame_rate,
- fraction_lost, 3);
-
- // Update content: motion level, and 3 spatial prediction errors.
- // High motion, low spatial.
- UpdateQmContentData(kTemporalHigh, kSpatialLow, kSpatialLow, kSpatialLow);
- EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
- EXPECT_EQ(3, qm_resolution_->ComputeContentClass());
- EXPECT_EQ(kStressedEncoding, qm_resolution_->GetEncoderState());
- EXPECT_TRUE(IsSelectedActionCorrect(qm_scale_, 4.0f / 3.0f, 4.0f / 3.0f,
- 1.0f, 480, 360, 30.0f));
-
- qm_resolution_->ResetDownSamplingState();
- // Low motion, high spatial
- UpdateQmContentData(kTemporalLow, kSpatialHigh, kSpatialHigh, kSpatialHigh);
- EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
- EXPECT_EQ(1, qm_resolution_->ComputeContentClass());
- EXPECT_TRUE(IsSelectedActionCorrect(qm_scale_, 1.0f, 1.0f, 1.5f, 640, 480,
- 20.5f));
-}
-
-// Target rate is below max for down-sampling, but buffer level is stable,
-// so no action is taken.
-TEST_F(QmSelectTest, NoActionBufferStable) {
- // Initialize with bitrate, frame rate, native system width/height, and
- // number of temporal layers.
- InitQmNativeData(350, 30, 640, 480, 1);
-
- // Update with encoder frame size.
- uint16_t codec_width = 640;
- uint16_t codec_height = 480;
- qm_resolution_->UpdateCodecParameters(30.0f, codec_width, codec_height);
- EXPECT_EQ(5, qm_resolution_->GetImageType(codec_width, codec_height));
-
- // Update with encoded size over a number of frames.
- // per-frame bandwidth = 15 = 450/30: simulate stable (decoder) buffer levels.
- size_t encoded_size[] = {40, 10, 10, 16, 18, 20, 17, 20, 16, 15};
- UpdateQmEncodedFrame(encoded_size, GTEST_ARRAY_SIZE_(encoded_size));
-
- // Update rates for a sequence of intervals.
- int target_rate[] = {350, 350, 350};
- int encoder_sent_rate[] = {350, 450, 450};
- int incoming_frame_rate[] = {30, 30, 30};
- uint8_t fraction_lost[] = {10, 10, 10};
- UpdateQmRateData(target_rate, encoder_sent_rate, incoming_frame_rate,
- fraction_lost, 3);
-
- // Update content: motion level, and 3 spatial prediction errors.
- // High motion, low spatial.
- UpdateQmContentData(kTemporalHigh, kSpatialLow, kSpatialLow, kSpatialLow);
- EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
- EXPECT_EQ(3, qm_resolution_->ComputeContentClass());
- EXPECT_EQ(kStableEncoding, qm_resolution_->GetEncoderState());
- EXPECT_TRUE(IsSelectedActionCorrect(qm_scale_, 1.0f, 1.0f, 1.0f, 640, 480,
- 30.0f));
-
- qm_resolution_->ResetDownSamplingState();
- // Low motion, high spatial
- UpdateQmContentData(kTemporalLow, kSpatialHigh, kSpatialHigh, kSpatialHigh);
- EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
- EXPECT_EQ(1, qm_resolution_->ComputeContentClass());
- EXPECT_TRUE(IsSelectedActionCorrect(qm_scale_, 1.0f, 1.0f, 1.0f, 640, 480,
- 30.0f));
-}
-
-// Very low rate, but no spatial down-sampling below some size (QCIF).
-TEST_F(QmSelectTest, LimitDownSpatialAction) {
- // Initialize with bitrate, frame rate, native system width/height, and
- // number of temporal layers.
- InitQmNativeData(10, 30, 176, 144, 1);
-
- // Update with encoder frame size.
- uint16_t codec_width = 176;
- uint16_t codec_height = 144;
- qm_resolution_->UpdateCodecParameters(30.0f, codec_width, codec_height);
- EXPECT_EQ(0, qm_resolution_->GetImageType(codec_width, codec_height));
-
- // Update rates for a sequence of intervals.
- int target_rate[] = {10, 10, 10};
- int encoder_sent_rate[] = {10, 10, 10};
- int incoming_frame_rate[] = {30, 30, 30};
- uint8_t fraction_lost[] = {10, 10, 10};
- UpdateQmRateData(target_rate, encoder_sent_rate, incoming_frame_rate,
- fraction_lost, 3);
-
- // Update content: motion level, and 3 spatial prediction errors.
- // High motion, low spatial.
- UpdateQmContentData(kTemporalHigh, kSpatialLow, kSpatialLow, kSpatialLow);
- EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
- EXPECT_EQ(3, qm_resolution_->ComputeContentClass());
- EXPECT_EQ(kStableEncoding, qm_resolution_->GetEncoderState());
- EXPECT_TRUE(IsSelectedActionCorrect(qm_scale_, 1.0f, 1.0f, 1.0f, 176, 144,
- 30.0f));
-}
-
-// Very low rate, but no frame reduction below some frame_rate (8fps).
-TEST_F(QmSelectTest, LimitDownTemporalAction) {
- // Initialize with bitrate, frame rate, native system width/height, and
- // number of temporal layers.
- InitQmNativeData(10, 8, 640, 480, 1);
-
- // Update with encoder frame size.
- uint16_t codec_width = 640;
- uint16_t codec_height = 480;
- qm_resolution_->UpdateCodecParameters(8.0f, codec_width, codec_height);
- EXPECT_EQ(5, qm_resolution_->GetImageType(codec_width, codec_height));
-
- // Update rates for a sequence of intervals.
- int target_rate[] = {10, 10, 10};
- int encoder_sent_rate[] = {10, 10, 10};
- int incoming_frame_rate[] = {8, 8, 8};
- uint8_t fraction_lost[] = {10, 10, 10};
- UpdateQmRateData(target_rate, encoder_sent_rate, incoming_frame_rate,
- fraction_lost, 3);
-
- // Update content: motion level, and 3 spatial prediction errors.
- // Low motion, medium spatial.
- UpdateQmContentData(kTemporalLow, kSpatialMedium, kSpatialMedium,
- kSpatialMedium);
- EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
- EXPECT_EQ(2, qm_resolution_->ComputeContentClass());
- EXPECT_EQ(kStableEncoding, qm_resolution_->GetEncoderState());
- EXPECT_TRUE(IsSelectedActionCorrect(qm_scale_, 1.0f, 1.0f, 1.0f, 640, 480,
- 8.0f));
-}
-
-// Two stages: spatial down-sample and then back up spatially,
-// as rate as increased.
-TEST_F(QmSelectTest, 2StageDownSpatialUpSpatial) {
- // Initialize with bitrate, frame rate, native system width/height, and
- // number of temporal layers.
- InitQmNativeData(50, 30, 640, 480, 1);
-
- // Update with encoder frame size.
- uint16_t codec_width = 640;
- uint16_t codec_height = 480;
- qm_resolution_->UpdateCodecParameters(30.0f, codec_width, codec_height);
- EXPECT_EQ(5, qm_resolution_->GetImageType(codec_width, codec_height));
-
- // Update rates for a sequence of intervals.
- int target_rate[] = {50, 50, 50};
- int encoder_sent_rate[] = {50, 50, 50};
- int incoming_frame_rate[] = {30, 30, 30};
- uint8_t fraction_lost[] = {10, 10, 10};
- UpdateQmRateData(target_rate, encoder_sent_rate, incoming_frame_rate,
- fraction_lost, 3);
-
- // Update content: motion level, and 3 spatial prediction errors.
- // High motion, low spatial.
- UpdateQmContentData(kTemporalHigh, kSpatialLow, kSpatialLow, kSpatialLow);
- EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
- EXPECT_EQ(3, qm_resolution_->ComputeContentClass());
- EXPECT_EQ(kStableEncoding, qm_resolution_->GetEncoderState());
- EXPECT_TRUE(IsSelectedActionCorrect(qm_scale_, 2.0f, 2.0f, 1.0f, 320, 240,
- 30.0f));
-
- // Reset and go up in rate: expected to go back up, in 2 stages of 3/4.
- qm_resolution_->ResetRates();
- qm_resolution_->UpdateCodecParameters(30.0f, 320, 240);
- EXPECT_EQ(2, qm_resolution_->GetImageType(320, 240));
- // Update rates for a sequence of intervals.
- int target_rate2[] = {400, 400, 400, 400, 400};
- int encoder_sent_rate2[] = {400, 400, 400, 400, 400};
- int incoming_frame_rate2[] = {30, 30, 30, 30, 30};
- uint8_t fraction_lost2[] = {10, 10, 10, 10, 10};
- UpdateQmRateData(target_rate2, encoder_sent_rate2, incoming_frame_rate2,
- fraction_lost2, 5);
- EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
- EXPECT_EQ(kStableEncoding, qm_resolution_->GetEncoderState());
- float scale = (4.0f / 3.0f) / 2.0f;
- EXPECT_TRUE(IsSelectedActionCorrect(qm_scale_, scale, scale, 1.0f, 480, 360,
- 30.0f));
-
- qm_resolution_->UpdateCodecParameters(30.0f, 480, 360);
- EXPECT_EQ(4, qm_resolution_->GetImageType(480, 360));
- EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
- EXPECT_TRUE(IsSelectedActionCorrect(qm_scale_, 3.0f / 4.0f, 3.0f / 4.0f, 1.0f,
- 640, 480, 30.0f));
-}
-
-// Two stages: spatial down-sample and then back up spatially, since encoder
-// is under-shooting target even though rate has not increased much.
-TEST_F(QmSelectTest, 2StageDownSpatialUpSpatialUndershoot) {
- // Initialize with bitrate, frame rate, native system width/height, and
- // number of temporal layers.
- InitQmNativeData(50, 30, 640, 480, 1);
-
- // Update with encoder frame size.
- uint16_t codec_width = 640;
- uint16_t codec_height = 480;
- qm_resolution_->UpdateCodecParameters(30.0f, codec_width, codec_height);
- EXPECT_EQ(5, qm_resolution_->GetImageType(codec_width, codec_height));
-
- // Update rates for a sequence of intervals.
- int target_rate[] = {50, 50, 50};
- int encoder_sent_rate[] = {50, 50, 50};
- int incoming_frame_rate[] = {30, 30, 30};
- uint8_t fraction_lost[] = {10, 10, 10};
- UpdateQmRateData(target_rate, encoder_sent_rate, incoming_frame_rate,
- fraction_lost, 3);
-
- // Update content: motion level, and 3 spatial prediction errors.
- // High motion, low spatial.
- UpdateQmContentData(kTemporalHigh, kSpatialLow, kSpatialLow, kSpatialLow);
- EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
- EXPECT_EQ(3, qm_resolution_->ComputeContentClass());
- EXPECT_EQ(kStableEncoding, qm_resolution_->GetEncoderState());
- EXPECT_TRUE(IsSelectedActionCorrect(qm_scale_, 2.0f, 2.0f, 1.0f, 320, 240,
- 30.0f));
-
- // Reset rates and simulate under-shooting scenario.: expect to go back up.
- // Goes up spatially in two stages for 1/2x1/2 down-sampling.
- qm_resolution_->ResetRates();
- qm_resolution_->UpdateCodecParameters(30.0f, 320, 240);
- EXPECT_EQ(2, qm_resolution_->GetImageType(320, 240));
- // Update rates for a sequence of intervals.
- int target_rate2[] = {200, 200, 200, 200, 200};
- int encoder_sent_rate2[] = {50, 50, 50, 50, 50};
- int incoming_frame_rate2[] = {30, 30, 30, 30, 30};
- uint8_t fraction_lost2[] = {10, 10, 10, 10, 10};
- UpdateQmRateData(target_rate2, encoder_sent_rate2, incoming_frame_rate2,
- fraction_lost2, 5);
- EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
- EXPECT_EQ(kEasyEncoding, qm_resolution_->GetEncoderState());
- float scale = (4.0f / 3.0f) / 2.0f;
- EXPECT_TRUE(IsSelectedActionCorrect(qm_scale_, scale, scale, 1.0f, 480, 360,
- 30.0f));
-
- qm_resolution_->UpdateCodecParameters(30.0f, 480, 360);
- EXPECT_EQ(4, qm_resolution_->GetImageType(480, 360));
- EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
- EXPECT_TRUE(IsSelectedActionCorrect(qm_scale_, 3.0f / 4.0f, 3.0f / 4.0f, 1.0f,
- 640, 480, 30.0f));
-}
-
-// Two stages: spatial down-sample and then no action to go up,
-// as encoding rate mis-match is too high.
-TEST_F(QmSelectTest, 2StageDownSpatialNoActionUp) {
- // Initialize with bitrate, frame rate, native system width/height, and
- // number of temporal layers.
- InitQmNativeData(50, 30, 640, 480, 1);
-
- // Update with encoder frame size.
- uint16_t codec_width = 640;
- uint16_t codec_height = 480;
- qm_resolution_->UpdateCodecParameters(30.0f, codec_width, codec_height);
- EXPECT_EQ(5, qm_resolution_->GetImageType(codec_width, codec_height));
-
- // Update rates for a sequence of intervals.
- int target_rate[] = {50, 50, 50};
- int encoder_sent_rate[] = {50, 50, 50};
- int incoming_frame_rate[] = {30, 30, 30};
- uint8_t fraction_lost[] = {10, 10, 10};
- UpdateQmRateData(target_rate, encoder_sent_rate, incoming_frame_rate,
- fraction_lost, 3);
-
- // Update content: motion level, and 3 spatial prediction errors.
- // High motion, low spatial.
- UpdateQmContentData(kTemporalHigh, kSpatialLow, kSpatialLow, kSpatialLow);
- EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
- EXPECT_EQ(3, qm_resolution_->ComputeContentClass());
- EXPECT_EQ(kStableEncoding, qm_resolution_->GetEncoderState());
- EXPECT_TRUE(IsSelectedActionCorrect(qm_scale_, 2.0f, 2.0f, 1.0f, 320, 240,
- 30.0f));
-
- // Reset and simulate large rate mis-match: expect no action to go back up.
- qm_resolution_->ResetRates();
- qm_resolution_->UpdateCodecParameters(30.0f, 320, 240);
- EXPECT_EQ(2, qm_resolution_->GetImageType(320, 240));
- // Update rates for a sequence of intervals.
- int target_rate2[] = {400, 400, 400, 400, 400};
- int encoder_sent_rate2[] = {1000, 1000, 1000, 1000, 1000};
- int incoming_frame_rate2[] = {30, 30, 30, 30, 30};
- uint8_t fraction_lost2[] = {10, 10, 10, 10, 10};
- UpdateQmRateData(target_rate2, encoder_sent_rate2, incoming_frame_rate2,
- fraction_lost2, 5);
- EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
- EXPECT_EQ(kStressedEncoding, qm_resolution_->GetEncoderState());
- EXPECT_TRUE(IsSelectedActionCorrect(qm_scale_, 1.0f, 1.0f, 1.0f, 320, 240,
- 30.0f));
-}
-
-// Two stages: temporally down-sample and then back up temporally,
-// as rate as increased.
-TEST_F(QmSelectTest, 2StatgeDownTemporalUpTemporal) {
- // Initialize with bitrate, frame rate, native system width/height, and
- // number of temporal layers.
- InitQmNativeData(50, 30, 640, 480, 1);
-
- // Update with encoder frame size.
- uint16_t codec_width = 640;
- uint16_t codec_height = 480;
- qm_resolution_->UpdateCodecParameters(30.0f, codec_width, codec_height);
- EXPECT_EQ(5, qm_resolution_->GetImageType(codec_width, codec_height));
-
- // Update rates for a sequence of intervals.
- int target_rate[] = {50, 50, 50};
- int encoder_sent_rate[] = {50, 50, 50};
- int incoming_frame_rate[] = {30, 30, 30};
- uint8_t fraction_lost[] = {10, 10, 10};
- UpdateQmRateData(target_rate, encoder_sent_rate, incoming_frame_rate,
- fraction_lost, 3);
-
- // Update content: motion level, and 3 spatial prediction errors.
- // Low motion, high spatial.
- UpdateQmContentData(kTemporalLow, kSpatialHigh, kSpatialHigh, kSpatialHigh);
- EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
- EXPECT_EQ(1, qm_resolution_->ComputeContentClass());
- EXPECT_EQ(kStableEncoding, qm_resolution_->GetEncoderState());
- EXPECT_TRUE(IsSelectedActionCorrect(qm_scale_, 1.0f, 1.0f, 2.0f, 640, 480,
- 15.5f));
-
- // Reset rates and go up in rate: expect to go back up.
- qm_resolution_->ResetRates();
- // Update rates for a sequence of intervals.
- int target_rate2[] = {400, 400, 400, 400, 400};
- int encoder_sent_rate2[] = {400, 400, 400, 400, 400};
- int incoming_frame_rate2[] = {15, 15, 15, 15, 15};
- uint8_t fraction_lost2[] = {10, 10, 10, 10, 10};
- UpdateQmRateData(target_rate2, encoder_sent_rate2, incoming_frame_rate2,
- fraction_lost2, 5);
- EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
- EXPECT_EQ(kStableEncoding, qm_resolution_->GetEncoderState());
- EXPECT_TRUE(IsSelectedActionCorrect(qm_scale_, 1.0f, 1.0f, 0.5f, 640, 480,
- 30.0f));
-}
-
-// Two stages: temporal down-sample and then back up temporally, since encoder
-// is under-shooting target even though rate has not increased much.
-TEST_F(QmSelectTest, 2StatgeDownTemporalUpTemporalUndershoot) {
- // Initialize with bitrate, frame rate, native system width/height, and
- // number of temporal layers.
- InitQmNativeData(50, 30, 640, 480, 1);
-
- // Update with encoder frame size.
- uint16_t codec_width = 640;
- uint16_t codec_height = 480;
- qm_resolution_->UpdateCodecParameters(30.0f, codec_width, codec_height);
- EXPECT_EQ(5, qm_resolution_->GetImageType(codec_width, codec_height));
-
- // Update rates for a sequence of intervals.
- int target_rate[] = {50, 50, 50};
- int encoder_sent_rate[] = {50, 50, 50};
- int incoming_frame_rate[] = {30, 30, 30};
- uint8_t fraction_lost[] = {10, 10, 10};
- UpdateQmRateData(target_rate, encoder_sent_rate, incoming_frame_rate,
- fraction_lost, 3);
-
- // Update content: motion level, and 3 spatial prediction errors.
- // Low motion, high spatial.
- UpdateQmContentData(kTemporalLow, kSpatialHigh, kSpatialHigh, kSpatialHigh);
- EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
- EXPECT_EQ(1, qm_resolution_->ComputeContentClass());
- EXPECT_EQ(kStableEncoding, qm_resolution_->GetEncoderState());
- EXPECT_TRUE(IsSelectedActionCorrect(qm_scale_, 1.0f, 1.0f, 2.0f, 640, 480,
- 15.5f));
-
- // Reset rates and simulate under-shooting scenario.: expect to go back up.
- qm_resolution_->ResetRates();
- // Update rates for a sequence of intervals.
- int target_rate2[] = {150, 150, 150, 150, 150};
- int encoder_sent_rate2[] = {50, 50, 50, 50, 50};
- int incoming_frame_rate2[] = {15, 15, 15, 15, 15};
- uint8_t fraction_lost2[] = {10, 10, 10, 10, 10};
- UpdateQmRateData(target_rate2, encoder_sent_rate2, incoming_frame_rate2,
- fraction_lost2, 5);
- EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
- EXPECT_EQ(kEasyEncoding, qm_resolution_->GetEncoderState());
- EXPECT_TRUE(IsSelectedActionCorrect(qm_scale_, 1.0f, 1.0f, 0.5f, 640, 480,
- 30.0f));
-}
-
-// Two stages: temporal down-sample and then no action to go up,
-// as encoding rate mis-match is too high.
-TEST_F(QmSelectTest, 2StageDownTemporalNoActionUp) {
- // Initialize with bitrate, frame rate, native system width/height, and
- // number of temporal layers.
- InitQmNativeData(50, 30, 640, 480, 1);
-
- // Update with encoder frame size.
- uint16_t codec_width = 640;
- uint16_t codec_height = 480;
- qm_resolution_->UpdateCodecParameters(30.0f, codec_width, codec_height);
- EXPECT_EQ(5, qm_resolution_->GetImageType(codec_width, codec_height));
-
- // Update rates for a sequence of intervals.
- int target_rate[] = {50, 50, 50};
- int encoder_sent_rate[] = {50, 50, 50};
- int incoming_frame_rate[] = {30, 30, 30};
- uint8_t fraction_lost[] = {10, 10, 10};
- UpdateQmRateData(target_rate, encoder_sent_rate, incoming_frame_rate,
- fraction_lost, 3);
-
- // Update content: motion level, and 3 spatial prediction errors.
- // Low motion, high spatial.
- UpdateQmContentData(kTemporalLow, kSpatialHigh, kSpatialHigh, kSpatialHigh);
- EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
- EXPECT_EQ(1, qm_resolution_->ComputeContentClass());
- EXPECT_EQ(kStableEncoding, qm_resolution_->GetEncoderState());
- EXPECT_TRUE(IsSelectedActionCorrect(qm_scale_, 1, 1, 2, 640, 480, 15.5f));
-
- // Reset and simulate large rate mis-match: expect no action to go back up.
- qm_resolution_->UpdateCodecParameters(15.0f, codec_width, codec_height);
- qm_resolution_->ResetRates();
- // Update rates for a sequence of intervals.
- int target_rate2[] = {600, 600, 600, 600, 600};
- int encoder_sent_rate2[] = {1000, 1000, 1000, 1000, 1000};
- int incoming_frame_rate2[] = {15, 15, 15, 15, 15};
- uint8_t fraction_lost2[] = {10, 10, 10, 10, 10};
- UpdateQmRateData(target_rate2, encoder_sent_rate2, incoming_frame_rate2,
- fraction_lost2, 5);
- EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
- EXPECT_EQ(kStressedEncoding, qm_resolution_->GetEncoderState());
- EXPECT_TRUE(IsSelectedActionCorrect(qm_scale_, 1.0f, 1.0f, 1.0f, 640, 480,
- 15.0f));
-}
-// 3 stages: spatial down-sample, followed by temporal down-sample,
-// and then go up to full state, as encoding rate has increased.
-TEST_F(QmSelectTest, 3StageDownSpatialTemporlaUpSpatialTemporal) {
- // Initialize with bitrate, frame rate, native system width/height, and
- // number of temporal layers.
- InitQmNativeData(80, 30, 640, 480, 1);
-
- // Update with encoder frame size.
- uint16_t codec_width = 640;
- uint16_t codec_height = 480;
- qm_resolution_->UpdateCodecParameters(30.0f, codec_width, codec_height);
- EXPECT_EQ(5, qm_resolution_->GetImageType(codec_width, codec_height));
-
- // Update rates for a sequence of intervals.
- int target_rate[] = {80, 80, 80};
- int encoder_sent_rate[] = {80, 80, 80};
- int incoming_frame_rate[] = {30, 30, 30};
- uint8_t fraction_lost[] = {10, 10, 10};
- UpdateQmRateData(target_rate, encoder_sent_rate, incoming_frame_rate,
- fraction_lost, 3);
-
- // Update content: motion level, and 3 spatial prediction errors.
- // High motion, low spatial.
- UpdateQmContentData(kTemporalHigh, kSpatialLow, kSpatialLow, kSpatialLow);
- EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
- EXPECT_EQ(3, qm_resolution_->ComputeContentClass());
- EXPECT_EQ(kStableEncoding, qm_resolution_->GetEncoderState());
- EXPECT_TRUE(IsSelectedActionCorrect(qm_scale_, 2.0f, 2.0f, 1.0f, 320, 240,
- 30.0f));
-
- // Change content data: expect temporal down-sample.
- qm_resolution_->UpdateCodecParameters(30.0f, 320, 240);
- EXPECT_EQ(2, qm_resolution_->GetImageType(320, 240));
-
- // Reset rates and go lower in rate.
- qm_resolution_->ResetRates();
- int target_rate2[] = {40, 40, 40, 40, 40};
- int encoder_sent_rate2[] = {40, 40, 40, 40, 40};
- int incoming_frame_rate2[] = {30, 30, 30, 30, 30};
- uint8_t fraction_lost2[] = {10, 10, 10, 10, 10};
- UpdateQmRateData(target_rate2, encoder_sent_rate2, incoming_frame_rate2,
- fraction_lost2, 5);
-
- // Update content: motion level, and 3 spatial prediction errors.
- // Low motion, high spatial.
- UpdateQmContentData(kTemporalLow, kSpatialHigh, kSpatialHigh, kSpatialHigh);
- EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
- EXPECT_EQ(1, qm_resolution_->ComputeContentClass());
- EXPECT_EQ(kStableEncoding, qm_resolution_->GetEncoderState());
- EXPECT_TRUE(IsSelectedActionCorrect(qm_scale_, 1.0f, 1.0f, 1.5f, 320, 240,
- 20.5f));
-
- // Reset rates and go high up in rate: expect to go back up both spatial
- // and temporally. The 1/2x1/2 spatial is undone in two stages.
- qm_resolution_->ResetRates();
- // Update rates for a sequence of intervals.
- int target_rate3[] = {1000, 1000, 1000, 1000, 1000};
- int encoder_sent_rate3[] = {1000, 1000, 1000, 1000, 1000};
- int incoming_frame_rate3[] = {20, 20, 20, 20, 20};
- uint8_t fraction_lost3[] = {10, 10, 10, 10, 10};
- UpdateQmRateData(target_rate3, encoder_sent_rate3, incoming_frame_rate3,
- fraction_lost3, 5);
-
- EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
- EXPECT_EQ(1, qm_resolution_->ComputeContentClass());
- EXPECT_EQ(kStableEncoding, qm_resolution_->GetEncoderState());
- float scale = (4.0f / 3.0f) / 2.0f;
- EXPECT_TRUE(IsSelectedActionCorrect(qm_scale_, scale, scale, 2.0f / 3.0f,
- 480, 360, 30.0f));
-
- qm_resolution_->UpdateCodecParameters(30.0f, 480, 360);
- EXPECT_EQ(4, qm_resolution_->GetImageType(480, 360));
- EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
- EXPECT_TRUE(IsSelectedActionCorrect(qm_scale_, 3.0f / 4.0f, 3.0f / 4.0f, 1.0f,
- 640, 480, 30.0f));
-}
-
-// No down-sampling below some total amount.
-TEST_F(QmSelectTest, NoActionTooMuchDownSampling) {
- // Initialize with bitrate, frame rate, native system width/height, and
- // number of temporal layers.
- InitQmNativeData(150, 30, 1280, 720, 1);
-
- // Update with encoder frame size.
- uint16_t codec_width = 1280;
- uint16_t codec_height = 720;
- qm_resolution_->UpdateCodecParameters(30.0f, codec_width, codec_height);
- EXPECT_EQ(7, qm_resolution_->GetImageType(codec_width, codec_height));
-
- // Update rates for a sequence of intervals.
- int target_rate[] = {150, 150, 150};
- int encoder_sent_rate[] = {150, 150, 150};
- int incoming_frame_rate[] = {30, 30, 30};
- uint8_t fraction_lost[] = {10, 10, 10};
- UpdateQmRateData(target_rate, encoder_sent_rate, incoming_frame_rate,
- fraction_lost, 3);
-
- // Update content: motion level, and 3 spatial prediction errors.
- // High motion, low spatial.
- UpdateQmContentData(kTemporalHigh, kSpatialLow, kSpatialLow, kSpatialLow);
- EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
- EXPECT_EQ(3, qm_resolution_->ComputeContentClass());
- EXPECT_EQ(kStableEncoding, qm_resolution_->GetEncoderState());
- EXPECT_TRUE(IsSelectedActionCorrect(qm_scale_, 2.0f, 2.0f, 1.0f, 640, 360,
- 30.0f));
-
- // Reset and lower rates to get another spatial action (3/4x3/4).
- // Lower the frame rate for spatial to be selected again.
- qm_resolution_->ResetRates();
- qm_resolution_->UpdateCodecParameters(10.0f, 640, 360);
- EXPECT_EQ(4, qm_resolution_->GetImageType(640, 360));
- // Update rates for a sequence of intervals.
- int target_rate2[] = {70, 70, 70, 70, 70};
- int encoder_sent_rate2[] = {70, 70, 70, 70, 70};
- int incoming_frame_rate2[] = {10, 10, 10, 10, 10};
- uint8_t fraction_lost2[] = {10, 10, 10, 10, 10};
- UpdateQmRateData(target_rate2, encoder_sent_rate2, incoming_frame_rate2,
- fraction_lost2, 5);
-
- // Update content: motion level, and 3 spatial prediction errors.
- // High motion, medium spatial.
- UpdateQmContentData(kTemporalHigh, kSpatialMedium, kSpatialMedium,
- kSpatialMedium);
- EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
- EXPECT_EQ(5, qm_resolution_->ComputeContentClass());
- EXPECT_EQ(kStableEncoding, qm_resolution_->GetEncoderState());
- EXPECT_TRUE(IsSelectedActionCorrect(qm_scale_, 4.0f / 3.0f, 4.0f / 3.0f,
- 1.0f, 480, 270, 10.0f));
-
- // Reset and go to very low rate: no action should be taken,
- // we went down too much already.
- qm_resolution_->ResetRates();
- qm_resolution_->UpdateCodecParameters(10.0f, 480, 270);
- EXPECT_EQ(3, qm_resolution_->GetImageType(480, 270));
- // Update rates for a sequence of intervals.
- int target_rate3[] = {10, 10, 10, 10, 10};
- int encoder_sent_rate3[] = {10, 10, 10, 10, 10};
- int incoming_frame_rate3[] = {10, 10, 10, 10, 10};
- uint8_t fraction_lost3[] = {10, 10, 10, 10, 10};
- UpdateQmRateData(target_rate3, encoder_sent_rate3, incoming_frame_rate3,
- fraction_lost3, 5);
- EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
- EXPECT_EQ(5, qm_resolution_->ComputeContentClass());
- EXPECT_EQ(kStableEncoding, qm_resolution_->GetEncoderState());
- EXPECT_TRUE(IsSelectedActionCorrect(qm_scale_, 1.0f, 1.0f, 1.0f, 480, 270,
- 10.0f));
-}
-
-// Multiple down-sampling stages and then undo all of them.
-// Spatial down-sample 3/4x3/4, followed by temporal down-sample 2/3,
-// followed by spatial 3/4x3/4. Then go up to full state,
-// as encoding rate has increased.
-TEST_F(QmSelectTest, MultipleStagesCheckActionHistory1) {
- // Initialize with bitrate, frame rate, native system width/height, and
- // number of temporal layers.
- InitQmNativeData(150, 30, 640, 480, 1);
-
- // Update with encoder frame size.
- uint16_t codec_width = 640;
- uint16_t codec_height = 480;
- qm_resolution_->UpdateCodecParameters(30.0f, codec_width, codec_height);
- EXPECT_EQ(5, qm_resolution_->GetImageType(codec_width, codec_height));
-
- // Go down spatial 3/4x3/4.
- // Update rates for a sequence of intervals.
- int target_rate[] = {150, 150, 150};
- int encoder_sent_rate[] = {150, 150, 150};
- int incoming_frame_rate[] = {30, 30, 30};
- uint8_t fraction_lost[] = {10, 10, 10};
- UpdateQmRateData(target_rate, encoder_sent_rate, incoming_frame_rate,
- fraction_lost, 3);
-
- // Update content: motion level, and 3 spatial prediction errors.
- // Medium motion, low spatial.
- UpdateQmContentData(kTemporalMedium, kSpatialLow, kSpatialLow, kSpatialLow);
- EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
- EXPECT_EQ(6, qm_resolution_->ComputeContentClass());
- EXPECT_EQ(kStableEncoding, qm_resolution_->GetEncoderState());
- EXPECT_TRUE(IsSelectedActionCorrect(qm_scale_, 4.0f / 3.0f, 4.0f / 3.0f,
- 1.0f, 480, 360, 30.0f));
- // Go down 2/3 temporal.
- qm_resolution_->UpdateCodecParameters(30.0f, 480, 360);
- EXPECT_EQ(4, qm_resolution_->GetImageType(480, 360));
- qm_resolution_->ResetRates();
- int target_rate2[] = {100, 100, 100, 100, 100};
- int encoder_sent_rate2[] = {100, 100, 100, 100, 100};
- int incoming_frame_rate2[] = {30, 30, 30, 30, 30};
- uint8_t fraction_lost2[] = {10, 10, 10, 10, 10};
- UpdateQmRateData(target_rate2, encoder_sent_rate2, incoming_frame_rate2,
- fraction_lost2, 5);
-
- // Update content: motion level, and 3 spatial prediction errors.
- // Low motion, high spatial.
- UpdateQmContentData(kTemporalLow, kSpatialHigh, kSpatialHigh, kSpatialHigh);
- EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
- EXPECT_EQ(1, qm_resolution_->ComputeContentClass());
- EXPECT_EQ(kStableEncoding, qm_resolution_->GetEncoderState());
- EXPECT_TRUE(IsSelectedActionCorrect(qm_scale_, 1.0f, 1.0f, 1.5f, 480, 360,
- 20.5f));
-
- // Go down 3/4x3/4 spatial:
- qm_resolution_->UpdateCodecParameters(20.0f, 480, 360);
- qm_resolution_->ResetRates();
- int target_rate3[] = {80, 80, 80, 80, 80};
- int encoder_sent_rate3[] = {80, 80, 80, 80, 80};
- int incoming_frame_rate3[] = {20, 20, 20, 20, 20};
- uint8_t fraction_lost3[] = {10, 10, 10, 10, 10};
- UpdateQmRateData(target_rate3, encoder_sent_rate3, incoming_frame_rate3,
- fraction_lost3, 5);
-
- // Update content: motion level, and 3 spatial prediction errors.
- // High motion, low spatial.
- UpdateQmContentData(kTemporalHigh, kSpatialLow, kSpatialLow, kSpatialLow);
- EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
- EXPECT_EQ(3, qm_resolution_->ComputeContentClass());
- EXPECT_EQ(kStableEncoding, qm_resolution_->GetEncoderState());
- // The two spatial actions of 3/4x3/4 are converted to 1/2x1/2,
- // so scale factor is 2.0.
- EXPECT_TRUE(IsSelectedActionCorrect(qm_scale_, 2.0f, 2.0f, 1.0f, 320, 240,
- 20.0f));
-
- // Reset rates and go high up in rate: expect to go up:
- // 1/2x1x2 spatial and 1/2 temporally.
-
- // Go up 1/2x1/2 spatially and 1/2 temporally. Spatial is done in 2 stages.
- qm_resolution_->UpdateCodecParameters(15.0f, 320, 240);
- EXPECT_EQ(2, qm_resolution_->GetImageType(320, 240));
- qm_resolution_->ResetRates();
- // Update rates for a sequence of intervals.
- int target_rate4[] = {1000, 1000, 1000, 1000, 1000};
- int encoder_sent_rate4[] = {1000, 1000, 1000, 1000, 1000};
- int incoming_frame_rate4[] = {15, 15, 15, 15, 15};
- uint8_t fraction_lost4[] = {10, 10, 10, 10, 10};
- UpdateQmRateData(target_rate4, encoder_sent_rate4, incoming_frame_rate4,
- fraction_lost4, 5);
-
- EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
- EXPECT_EQ(3, qm_resolution_->ComputeContentClass());
- EXPECT_EQ(kStableEncoding, qm_resolution_->GetEncoderState());
- float scale = (4.0f / 3.0f) / 2.0f;
- EXPECT_TRUE(IsSelectedActionCorrect(qm_scale_, scale, scale, 2.0f / 3.0f, 480,
- 360, 30.0f));
-
- qm_resolution_->UpdateCodecParameters(30.0f, 480, 360);
- EXPECT_EQ(4, qm_resolution_->GetImageType(480, 360));
- EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
- EXPECT_TRUE(IsSelectedActionCorrect(qm_scale_, 3.0f / 4.0f, 3.0f / 4.0f, 1.0f,
- 640, 480, 30.0f));
-}
-
-// Multiple down-sampling and up-sample stages, with partial undoing.
-// Spatial down-sample 1/2x1/2, followed by temporal down-sample 2/3, undo the
-// temporal, then another temporal, and then undo both spatial and temporal.
-TEST_F(QmSelectTest, MultipleStagesCheckActionHistory2) {
- // Initialize with bitrate, frame rate, native system width/height, and
- // number of temporal layers.
- InitQmNativeData(80, 30, 640, 480, 1);
-
- // Update with encoder frame size.
- uint16_t codec_width = 640;
- uint16_t codec_height = 480;
- qm_resolution_->UpdateCodecParameters(30.0f, codec_width, codec_height);
- EXPECT_EQ(5, qm_resolution_->GetImageType(codec_width, codec_height));
-
- // Go down 1/2x1/2 spatial.
- // Update rates for a sequence of intervals.
- int target_rate[] = {80, 80, 80};
- int encoder_sent_rate[] = {80, 80, 80};
- int incoming_frame_rate[] = {30, 30, 30};
- uint8_t fraction_lost[] = {10, 10, 10};
- UpdateQmRateData(target_rate, encoder_sent_rate, incoming_frame_rate,
- fraction_lost, 3);
-
- // Update content: motion level, and 3 spatial prediction errors.
- // Medium motion, low spatial.
- UpdateQmContentData(kTemporalMedium, kSpatialLow, kSpatialLow, kSpatialLow);
- EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
- EXPECT_EQ(6, qm_resolution_->ComputeContentClass());
- EXPECT_EQ(kStableEncoding, qm_resolution_->GetEncoderState());
- EXPECT_TRUE(IsSelectedActionCorrect(qm_scale_, 2.0f, 2.0f, 1.0f, 320, 240,
- 30.0f));
-
- // Go down 2/3 temporal.
- qm_resolution_->UpdateCodecParameters(30.0f, 320, 240);
- EXPECT_EQ(2, qm_resolution_->GetImageType(320, 240));
- qm_resolution_->ResetRates();
- int target_rate2[] = {40, 40, 40, 40, 40};
- int encoder_sent_rate2[] = {40, 40, 40, 40, 40};
- int incoming_frame_rate2[] = {30, 30, 30, 30, 30};
- uint8_t fraction_lost2[] = {10, 10, 10, 10, 10};
- UpdateQmRateData(target_rate2, encoder_sent_rate2, incoming_frame_rate2,
- fraction_lost2, 5);
-
- // Update content: motion level, and 3 spatial prediction errors.
- // Medium motion, high spatial.
- UpdateQmContentData(kTemporalMedium, kSpatialHigh, kSpatialHigh,
- kSpatialHigh);
- EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
- EXPECT_EQ(7, qm_resolution_->ComputeContentClass());
- EXPECT_EQ(kStableEncoding, qm_resolution_->GetEncoderState());
- EXPECT_TRUE(IsSelectedActionCorrect(qm_scale_, 1.0f, 1.0f, 1.5f, 320, 240,
- 20.5f));
-
- // Go up 2/3 temporally.
- qm_resolution_->UpdateCodecParameters(20.0f, 320, 240);
- qm_resolution_->ResetRates();
- // Update rates for a sequence of intervals.
- int target_rate3[] = {150, 150, 150, 150, 150};
- int encoder_sent_rate3[] = {150, 150, 150, 150, 150};
- int incoming_frame_rate3[] = {20, 20, 20, 20, 20};
- uint8_t fraction_lost3[] = {10, 10, 10, 10, 10};
- UpdateQmRateData(target_rate3, encoder_sent_rate3, incoming_frame_rate3,
- fraction_lost3, 5);
-
- EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
- EXPECT_EQ(7, qm_resolution_->ComputeContentClass());
- EXPECT_EQ(kStableEncoding, qm_resolution_->GetEncoderState());
- EXPECT_TRUE(IsSelectedActionCorrect(qm_scale_, 1.0f, 1.0f, 2.0f / 3.0f, 320,
- 240, 30.0f));
-
- // Go down 2/3 temporal.
- qm_resolution_->UpdateCodecParameters(30.0f, 320, 240);
- EXPECT_EQ(2, qm_resolution_->GetImageType(320, 240));
- qm_resolution_->ResetRates();
- int target_rate4[] = {40, 40, 40, 40, 40};
- int encoder_sent_rate4[] = {40, 40, 40, 40, 40};
- int incoming_frame_rate4[] = {30, 30, 30, 30, 30};
- uint8_t fraction_lost4[] = {10, 10, 10, 10, 10};
- UpdateQmRateData(target_rate4, encoder_sent_rate4, incoming_frame_rate4,
- fraction_lost4, 5);
-
- // Update content: motion level, and 3 spatial prediction errors.
- // Low motion, high spatial.
- UpdateQmContentData(kTemporalLow, kSpatialHigh, kSpatialHigh, kSpatialHigh);
- EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
- EXPECT_EQ(1, qm_resolution_->ComputeContentClass());
- EXPECT_EQ(kStableEncoding, qm_resolution_->GetEncoderState());
- EXPECT_TRUE(IsSelectedActionCorrect(qm_scale_, 1.0f, 1.0f, 1.5f, 320, 240,
- 20.5f));
-
- // Go up spatial and temporal. Spatial undoing is done in 2 stages.
- qm_resolution_->UpdateCodecParameters(20.5f, 320, 240);
- qm_resolution_->ResetRates();
- // Update rates for a sequence of intervals.
- int target_rate5[] = {1000, 1000, 1000, 1000, 1000};
- int encoder_sent_rate5[] = {1000, 1000, 1000, 1000, 1000};
- int incoming_frame_rate5[] = {20, 20, 20, 20, 20};
- uint8_t fraction_lost5[] = {10, 10, 10, 10, 10};
- UpdateQmRateData(target_rate5, encoder_sent_rate5, incoming_frame_rate5,
- fraction_lost5, 5);
-
- EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
- float scale = (4.0f / 3.0f) / 2.0f;
- EXPECT_TRUE(IsSelectedActionCorrect(qm_scale_, scale, scale, 2.0f / 3.0f,
- 480, 360, 30.0f));
-
- qm_resolution_->UpdateCodecParameters(30.0f, 480, 360);
- EXPECT_EQ(4, qm_resolution_->GetImageType(480, 360));
- EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
- EXPECT_TRUE(IsSelectedActionCorrect(qm_scale_, 3.0f / 4.0f, 3.0f / 4.0f, 1.0f,
- 640, 480, 30.0f));
-}
-
-// Multiple down-sampling and up-sample stages, with partial undoing.
-// Spatial down-sample 3/4x3/4, followed by temporal down-sample 2/3,
-// undo the temporal 2/3, and then undo the spatial.
-TEST_F(QmSelectTest, MultipleStagesCheckActionHistory3) {
- // Initialize with bitrate, frame rate, native system width/height, and
- // number of temporal layers.
- InitQmNativeData(100, 30, 640, 480, 1);
-
- // Update with encoder frame size.
- uint16_t codec_width = 640;
- uint16_t codec_height = 480;
- qm_resolution_->UpdateCodecParameters(30.0f, codec_width, codec_height);
- EXPECT_EQ(5, qm_resolution_->GetImageType(codec_width, codec_height));
-
- // Go down 3/4x3/4 spatial.
- // Update rates for a sequence of intervals.
- int target_rate[] = {100, 100, 100};
- int encoder_sent_rate[] = {100, 100, 100};
- int incoming_frame_rate[] = {30, 30, 30};
- uint8_t fraction_lost[] = {10, 10, 10};
- UpdateQmRateData(target_rate, encoder_sent_rate, incoming_frame_rate,
- fraction_lost, 3);
-
- // Update content: motion level, and 3 spatial prediction errors.
- // Medium motion, low spatial.
- UpdateQmContentData(kTemporalMedium, kSpatialLow, kSpatialLow, kSpatialLow);
- EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
- EXPECT_EQ(6, qm_resolution_->ComputeContentClass());
- EXPECT_EQ(kStableEncoding, qm_resolution_->GetEncoderState());
- EXPECT_TRUE(IsSelectedActionCorrect(qm_scale_, 4.0f / 3.0f, 4.0f / 3.0f,
- 1.0f, 480, 360, 30.0f));
-
- // Go down 2/3 temporal.
- qm_resolution_->UpdateCodecParameters(30.0f, 480, 360);
- EXPECT_EQ(4, qm_resolution_->GetImageType(480, 360));
- qm_resolution_->ResetRates();
- int target_rate2[] = {100, 100, 100, 100, 100};
- int encoder_sent_rate2[] = {100, 100, 100, 100, 100};
- int incoming_frame_rate2[] = {30, 30, 30, 30, 30};
- uint8_t fraction_lost2[] = {10, 10, 10, 10, 10};
- UpdateQmRateData(target_rate2, encoder_sent_rate2, incoming_frame_rate2,
- fraction_lost2, 5);
-
- // Update content: motion level, and 3 spatial prediction errors.
- // Low motion, high spatial.
- UpdateQmContentData(kTemporalLow, kSpatialHigh, kSpatialHigh, kSpatialHigh);
- EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
- EXPECT_EQ(1, qm_resolution_->ComputeContentClass());
- EXPECT_EQ(kStableEncoding, qm_resolution_->GetEncoderState());
- EXPECT_TRUE(IsSelectedActionCorrect(qm_scale_, 1.0f, 1.0f, 1.5f, 480, 360,
- 20.5f));
-
- // Go up 2/3 temporal.
- qm_resolution_->UpdateCodecParameters(20.5f, 480, 360);
- qm_resolution_->ResetRates();
- // Update rates for a sequence of intervals.
- int target_rate3[] = {250, 250, 250, 250, 250};
- int encoder_sent_rate3[] = {250, 250, 250, 250, 250};
- int incoming_frame_rate3[] = {20, 20, 20, 20, 120};
- uint8_t fraction_lost3[] = {10, 10, 10, 10, 10};
- UpdateQmRateData(target_rate3, encoder_sent_rate3, incoming_frame_rate3,
- fraction_lost3, 5);
-
- EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
- EXPECT_EQ(1, qm_resolution_->ComputeContentClass());
- EXPECT_EQ(kStableEncoding, qm_resolution_->GetEncoderState());
- EXPECT_TRUE(IsSelectedActionCorrect(qm_scale_, 1.0f, 1.0f, 2.0f / 3.0f, 480,
- 360, 30.0f));
-
- // Go up spatial.
- qm_resolution_->UpdateCodecParameters(30.0f, 480, 360);
- EXPECT_EQ(4, qm_resolution_->GetImageType(480, 360));
- qm_resolution_->ResetRates();
- int target_rate4[] = {500, 500, 500, 500, 500};
- int encoder_sent_rate4[] = {500, 500, 500, 500, 500};
- int incoming_frame_rate4[] = {30, 30, 30, 30, 30};
- uint8_t fraction_lost4[] = {30, 30, 30, 30, 30};
- UpdateQmRateData(target_rate4, encoder_sent_rate4, incoming_frame_rate4,
- fraction_lost4, 5);
-
- EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
- EXPECT_EQ(kStableEncoding, qm_resolution_->GetEncoderState());
- EXPECT_TRUE(IsSelectedActionCorrect(qm_scale_, 3.0f / 4.0f, 3.0f / 4.0f,
- 1.0f, 640, 480, 30.0f));
-}
-
-// Two stages of 3/4x3/4 converted to one stage of 1/2x1/2.
-TEST_F(QmSelectTest, ConvertThreeQuartersToOneHalf) {
- // Initialize with bitrate, frame rate, native system width/height, and
- // number of temporal layers.
- InitQmNativeData(150, 30, 640, 480, 1);
-
- // Update with encoder frame size.
- uint16_t codec_width = 640;
- uint16_t codec_height = 480;
- qm_resolution_->UpdateCodecParameters(30.0f, codec_width, codec_height);
- EXPECT_EQ(5, qm_resolution_->GetImageType(codec_width, codec_height));
-
- // Go down 3/4x3/4 spatial.
- // Update rates for a sequence of intervals.
- int target_rate[] = {150, 150, 150};
- int encoder_sent_rate[] = {150, 150, 150};
- int incoming_frame_rate[] = {30, 30, 30};
- uint8_t fraction_lost[] = {10, 10, 10};
- UpdateQmRateData(target_rate, encoder_sent_rate, incoming_frame_rate,
- fraction_lost, 3);
-
- // Update content: motion level, and 3 spatial prediction errors.
- // Medium motion, low spatial.
- UpdateQmContentData(kTemporalMedium, kSpatialLow, kSpatialLow, kSpatialLow);
- EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
- EXPECT_EQ(6, qm_resolution_->ComputeContentClass());
- EXPECT_EQ(kStableEncoding, qm_resolution_->GetEncoderState());
- EXPECT_TRUE(IsSelectedActionCorrect(qm_scale_, 4.0f / 3.0f, 4.0f / 3.0f,
- 1.0f, 480, 360, 30.0f));
-
- // Set rates to go down another 3/4 spatial. Should be converted ton 1/2.
- qm_resolution_->UpdateCodecParameters(30.0f, 480, 360);
- EXPECT_EQ(4, qm_resolution_->GetImageType(480, 360));
- qm_resolution_->ResetRates();
- int target_rate2[] = {100, 100, 100, 100, 100};
- int encoder_sent_rate2[] = {100, 100, 100, 100, 100};
- int incoming_frame_rate2[] = {30, 30, 30, 30, 30};
- uint8_t fraction_lost2[] = {10, 10, 10, 10, 10};
- UpdateQmRateData(target_rate2, encoder_sent_rate2, incoming_frame_rate2,
- fraction_lost2, 5);
-
- // Update content: motion level, and 3 spatial prediction errors.
- // Medium motion, low spatial.
- UpdateQmContentData(kTemporalMedium, kSpatialLow, kSpatialLow, kSpatialLow);
- EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
- EXPECT_EQ(6, qm_resolution_->ComputeContentClass());
- EXPECT_EQ(kStableEncoding, qm_resolution_->GetEncoderState());
- EXPECT_TRUE(IsSelectedActionCorrect(qm_scale_, 2.0f, 2.0f, 1.0f, 320, 240,
- 30.0f));
-}
-
-void QmSelectTest::InitQmNativeData(float initial_bit_rate,
- int user_frame_rate,
- int native_width,
- int native_height,
- int num_layers) {
- EXPECT_EQ(0, qm_resolution_->Initialize(initial_bit_rate,
- user_frame_rate,
- native_width,
- native_height,
- num_layers));
-}
-
-void QmSelectTest::UpdateQmContentData(float motion_metric,
- float spatial_metric,
- float spatial_metric_horiz,
- float spatial_metric_vert) {
- content_metrics_->motion_magnitude = motion_metric;
- content_metrics_->spatial_pred_err = spatial_metric;
- content_metrics_->spatial_pred_err_h = spatial_metric_horiz;
- content_metrics_->spatial_pred_err_v = spatial_metric_vert;
- qm_resolution_->UpdateContent(content_metrics_);
-}
-
-void QmSelectTest::UpdateQmEncodedFrame(size_t* encoded_size,
- size_t num_updates) {
- for (size_t i = 0; i < num_updates; ++i) {
- // Convert to bytes.
- size_t encoded_size_update = 1000 * encoded_size[i] / 8;
- qm_resolution_->UpdateEncodedSize(encoded_size_update);
- }
-}
-
-void QmSelectTest::UpdateQmRateData(int* target_rate,
- int* encoder_sent_rate,
- int* incoming_frame_rate,
- uint8_t* fraction_lost,
- int num_updates) {
- for (int i = 0; i < num_updates; ++i) {
- float target_rate_update = target_rate[i];
- float encoder_sent_rate_update = encoder_sent_rate[i];
- float incoming_frame_rate_update = incoming_frame_rate[i];
- uint8_t fraction_lost_update = fraction_lost[i];
- qm_resolution_->UpdateRates(target_rate_update,
- encoder_sent_rate_update,
- incoming_frame_rate_update,
- fraction_lost_update);
- }
-}
-
-// Check is the selected action from the QmResolution class is the same
-// as the expected scales from |fac_width|, |fac_height|, |fac_temp|.
-bool QmSelectTest::IsSelectedActionCorrect(VCMResolutionScale* qm_scale,
- float fac_width,
- float fac_height,
- float fac_temp,
- uint16_t new_width,
- uint16_t new_height,
- float new_frame_rate) {
- if (qm_scale->spatial_width_fact == fac_width &&
- qm_scale->spatial_height_fact == fac_height &&
- qm_scale->temporal_fact == fac_temp &&
- qm_scale->codec_width == new_width &&
- qm_scale->codec_height == new_height &&
- qm_scale->frame_rate == new_frame_rate) {
- return true;
- } else {
- return false;
- }
-}
-} // namespace webrtc
diff --git a/webrtc/modules/video_coding/main/source/receiver.cc b/webrtc/modules/video_coding/main/source/receiver.cc
deleted file mode 100644
index 0707a9c3cd..0000000000
--- a/webrtc/modules/video_coding/main/source/receiver.cc
+++ /dev/null
@@ -1,268 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include "webrtc/modules/video_coding/main/source/receiver.h"
-
-#include <assert.h>
-
-#include <cstdlib>
-
-#include "webrtc/base/trace_event.h"
-#include "webrtc/modules/video_coding/main/source/encoded_frame.h"
-#include "webrtc/modules/video_coding/main/source/internal_defines.h"
-#include "webrtc/modules/video_coding/main/source/media_opt_util.h"
-#include "webrtc/system_wrappers/include/clock.h"
-#include "webrtc/system_wrappers/include/logging.h"
-
-namespace webrtc {
-
-enum { kMaxReceiverDelayMs = 10000 };
-
-VCMReceiver::VCMReceiver(VCMTiming* timing,
- Clock* clock,
- EventFactory* event_factory)
- : VCMReceiver(timing,
- clock,
- rtc::scoped_ptr<EventWrapper>(event_factory->CreateEvent()),
- rtc::scoped_ptr<EventWrapper>(event_factory->CreateEvent())) {
-}
-
-VCMReceiver::VCMReceiver(VCMTiming* timing,
- Clock* clock,
- rtc::scoped_ptr<EventWrapper> receiver_event,
- rtc::scoped_ptr<EventWrapper> jitter_buffer_event)
- : crit_sect_(CriticalSectionWrapper::CreateCriticalSection()),
- clock_(clock),
- jitter_buffer_(clock_, jitter_buffer_event.Pass()),
- timing_(timing),
- render_wait_event_(receiver_event.Pass()),
- max_video_delay_ms_(kMaxVideoDelayMs) {
- Reset();
-}
-
-VCMReceiver::~VCMReceiver() {
- render_wait_event_->Set();
- delete crit_sect_;
-}
-
-void VCMReceiver::Reset() {
- CriticalSectionScoped cs(crit_sect_);
- if (!jitter_buffer_.Running()) {
- jitter_buffer_.Start();
- } else {
- jitter_buffer_.Flush();
- }
-}
-
-void VCMReceiver::UpdateRtt(int64_t rtt) {
- jitter_buffer_.UpdateRtt(rtt);
-}
-
-int32_t VCMReceiver::InsertPacket(const VCMPacket& packet,
- uint16_t frame_width,
- uint16_t frame_height) {
- // Insert the packet into the jitter buffer. The packet can either be empty or
- // contain media at this point.
- bool retransmitted = false;
- const VCMFrameBufferEnum ret = jitter_buffer_.InsertPacket(packet,
- &retransmitted);
- if (ret == kOldPacket) {
- return VCM_OK;
- } else if (ret == kFlushIndicator) {
- return VCM_FLUSH_INDICATOR;
- } else if (ret < 0) {
- return VCM_JITTER_BUFFER_ERROR;
- }
- if (ret == kCompleteSession && !retransmitted) {
- // We don't want to include timestamps which have suffered from
- // retransmission here, since we compensate with extra retransmission
- // delay within the jitter estimate.
- timing_->IncomingTimestamp(packet.timestamp, clock_->TimeInMilliseconds());
- }
- return VCM_OK;
-}
-
-void VCMReceiver::TriggerDecoderShutdown() {
- jitter_buffer_.Stop();
- render_wait_event_->Set();
-}
-
-VCMEncodedFrame* VCMReceiver::FrameForDecoding(uint16_t max_wait_time_ms,
- int64_t& next_render_time_ms,
- bool render_timing) {
- const int64_t start_time_ms = clock_->TimeInMilliseconds();
- uint32_t frame_timestamp = 0;
- // Exhaust wait time to get a complete frame for decoding.
- bool found_frame = jitter_buffer_.NextCompleteTimestamp(
- max_wait_time_ms, &frame_timestamp);
-
- if (!found_frame)
- found_frame = jitter_buffer_.NextMaybeIncompleteTimestamp(&frame_timestamp);
-
- if (!found_frame)
- return NULL;
-
- // We have a frame - Set timing and render timestamp.
- timing_->SetJitterDelay(jitter_buffer_.EstimatedJitterMs());
- const int64_t now_ms = clock_->TimeInMilliseconds();
- timing_->UpdateCurrentDelay(frame_timestamp);
- next_render_time_ms = timing_->RenderTimeMs(frame_timestamp, now_ms);
- // Check render timing.
- bool timing_error = false;
- // Assume that render timing errors are due to changes in the video stream.
- if (next_render_time_ms < 0) {
- timing_error = true;
- } else if (std::abs(next_render_time_ms - now_ms) > max_video_delay_ms_) {
- int frame_delay = static_cast<int>(std::abs(next_render_time_ms - now_ms));
- LOG(LS_WARNING) << "A frame about to be decoded is out of the configured "
- << "delay bounds (" << frame_delay << " > "
- << max_video_delay_ms_
- << "). Resetting the video jitter buffer.";
- timing_error = true;
- } else if (static_cast<int>(timing_->TargetVideoDelay()) >
- max_video_delay_ms_) {
- LOG(LS_WARNING) << "The video target delay has grown larger than "
- << max_video_delay_ms_ << " ms. Resetting jitter buffer.";
- timing_error = true;
- }
-
- if (timing_error) {
- // Timing error => reset timing and flush the jitter buffer.
- jitter_buffer_.Flush();
- timing_->Reset();
- return NULL;
- }
-
- if (!render_timing) {
- // Decode frame as close as possible to the render timestamp.
- const int32_t available_wait_time = max_wait_time_ms -
- static_cast<int32_t>(clock_->TimeInMilliseconds() - start_time_ms);
- uint16_t new_max_wait_time = static_cast<uint16_t>(
- VCM_MAX(available_wait_time, 0));
- uint32_t wait_time_ms = timing_->MaxWaitingTime(
- next_render_time_ms, clock_->TimeInMilliseconds());
- if (new_max_wait_time < wait_time_ms) {
- // We're not allowed to wait until the frame is supposed to be rendered,
- // waiting as long as we're allowed to avoid busy looping, and then return
- // NULL. Next call to this function might return the frame.
- render_wait_event_->Wait(new_max_wait_time);
- return NULL;
- }
- // Wait until it's time to render.
- render_wait_event_->Wait(wait_time_ms);
- }
-
- // Extract the frame from the jitter buffer and set the render time.
- VCMEncodedFrame* frame = jitter_buffer_.ExtractAndSetDecode(frame_timestamp);
- if (frame == NULL) {
- return NULL;
- }
- frame->SetRenderTime(next_render_time_ms);
- TRACE_EVENT_ASYNC_STEP1("webrtc", "Video", frame->TimeStamp(),
- "SetRenderTS", "render_time", next_render_time_ms);
- if (!frame->Complete()) {
- // Update stats for incomplete frames.
- bool retransmitted = false;
- const int64_t last_packet_time_ms =
- jitter_buffer_.LastPacketTime(frame, &retransmitted);
- if (last_packet_time_ms >= 0 && !retransmitted) {
- // We don't want to include timestamps which have suffered from
- // retransmission here, since we compensate with extra retransmission
- // delay within the jitter estimate.
- timing_->IncomingTimestamp(frame_timestamp, last_packet_time_ms);
- }
- }
- return frame;
-}
-
-void VCMReceiver::ReleaseFrame(VCMEncodedFrame* frame) {
- jitter_buffer_.ReleaseFrame(frame);
-}
-
-void VCMReceiver::ReceiveStatistics(uint32_t* bitrate,
- uint32_t* framerate) {
- assert(bitrate);
- assert(framerate);
- jitter_buffer_.IncomingRateStatistics(framerate, bitrate);
-}
-
-uint32_t VCMReceiver::DiscardedPackets() const {
- return jitter_buffer_.num_discarded_packets();
-}
-
-void VCMReceiver::SetNackMode(VCMNackMode nackMode,
- int64_t low_rtt_nack_threshold_ms,
- int64_t high_rtt_nack_threshold_ms) {
- CriticalSectionScoped cs(crit_sect_);
- // Default to always having NACK enabled in hybrid mode.
- jitter_buffer_.SetNackMode(nackMode, low_rtt_nack_threshold_ms,
- high_rtt_nack_threshold_ms);
-}
-
-void VCMReceiver::SetNackSettings(size_t max_nack_list_size,
- int max_packet_age_to_nack,
- int max_incomplete_time_ms) {
- jitter_buffer_.SetNackSettings(max_nack_list_size,
- max_packet_age_to_nack,
- max_incomplete_time_ms);
-}
-
-VCMNackMode VCMReceiver::NackMode() const {
- CriticalSectionScoped cs(crit_sect_);
- return jitter_buffer_.nack_mode();
-}
-
-std::vector<uint16_t> VCMReceiver::NackList(bool* request_key_frame) {
- return jitter_buffer_.GetNackList(request_key_frame);
-}
-
-void VCMReceiver::SetDecodeErrorMode(VCMDecodeErrorMode decode_error_mode) {
- jitter_buffer_.SetDecodeErrorMode(decode_error_mode);
-}
-
-VCMDecodeErrorMode VCMReceiver::DecodeErrorMode() const {
- return jitter_buffer_.decode_error_mode();
-}
-
-int VCMReceiver::SetMinReceiverDelay(int desired_delay_ms) {
- CriticalSectionScoped cs(crit_sect_);
- if (desired_delay_ms < 0 || desired_delay_ms > kMaxReceiverDelayMs) {
- return -1;
- }
- max_video_delay_ms_ = desired_delay_ms + kMaxVideoDelayMs;
- // Initializing timing to the desired delay.
- timing_->set_min_playout_delay(desired_delay_ms);
- return 0;
-}
-
-int VCMReceiver::RenderBufferSizeMs() {
- uint32_t timestamp_start = 0u;
- uint32_t timestamp_end = 0u;
- // Render timestamps are computed just prior to decoding. Therefore this is
- // only an estimate based on frames' timestamps and current timing state.
- jitter_buffer_.RenderBufferSize(&timestamp_start, &timestamp_end);
- if (timestamp_start == timestamp_end) {
- return 0;
- }
- // Update timing.
- const int64_t now_ms = clock_->TimeInMilliseconds();
- timing_->SetJitterDelay(jitter_buffer_.EstimatedJitterMs());
- // Get render timestamps.
- uint32_t render_start = timing_->RenderTimeMs(timestamp_start, now_ms);
- uint32_t render_end = timing_->RenderTimeMs(timestamp_end, now_ms);
- return render_end - render_start;
-}
-
-void VCMReceiver::RegisterStatsCallback(
- VCMReceiveStatisticsCallback* callback) {
- jitter_buffer_.RegisterStatsCallback(callback);
-}
-
-} // namespace webrtc
diff --git a/webrtc/modules/video_coding/main/source/receiver.h b/webrtc/modules/video_coding/main/source/receiver.h
deleted file mode 100644
index e2515d438f..0000000000
--- a/webrtc/modules/video_coding/main/source/receiver.h
+++ /dev/null
@@ -1,92 +0,0 @@
-/*
- * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_MODULES_VIDEO_CODING_MAIN_SOURCE_RECEIVER_H_
-#define WEBRTC_MODULES_VIDEO_CODING_MAIN_SOURCE_RECEIVER_H_
-
-#include "webrtc/modules/video_coding/main/source/jitter_buffer.h"
-#include "webrtc/modules/video_coding/main/source/packet.h"
-#include "webrtc/modules/video_coding/main/source/timing.h"
-#include "webrtc/system_wrappers/include/critical_section_wrapper.h"
-#include "webrtc/modules/video_coding/main/interface/video_coding.h"
-#include "webrtc/modules/video_coding/main/interface/video_coding_defines.h"
-
-namespace webrtc {
-
-class Clock;
-class VCMEncodedFrame;
-
-class VCMReceiver {
- public:
- VCMReceiver(VCMTiming* timing,
- Clock* clock,
- EventFactory* event_factory);
-
- // Using this constructor, you can specify a different event factory for the
- // jitter buffer. Useful for unit tests when you want to simulate incoming
- // packets, in which case the jitter buffer's wait event is different from
- // that of VCMReceiver itself.
- VCMReceiver(VCMTiming* timing,
- Clock* clock,
- rtc::scoped_ptr<EventWrapper> receiver_event,
- rtc::scoped_ptr<EventWrapper> jitter_buffer_event);
-
- ~VCMReceiver();
-
- void Reset();
- void UpdateRtt(int64_t rtt);
- int32_t InsertPacket(const VCMPacket& packet,
- uint16_t frame_width,
- uint16_t frame_height);
- VCMEncodedFrame* FrameForDecoding(uint16_t max_wait_time_ms,
- int64_t& next_render_time_ms,
- bool render_timing = true);
- void ReleaseFrame(VCMEncodedFrame* frame);
- void ReceiveStatistics(uint32_t* bitrate, uint32_t* framerate);
- uint32_t DiscardedPackets() const;
-
- // NACK.
- void SetNackMode(VCMNackMode nackMode,
- int64_t low_rtt_nack_threshold_ms,
- int64_t high_rtt_nack_threshold_ms);
- void SetNackSettings(size_t max_nack_list_size,
- int max_packet_age_to_nack,
- int max_incomplete_time_ms);
- VCMNackMode NackMode() const;
- std::vector<uint16_t> NackList(bool* request_key_frame);
-
- // Receiver video delay.
- int SetMinReceiverDelay(int desired_delay_ms);
-
- // Decoding with errors.
- void SetDecodeErrorMode(VCMDecodeErrorMode decode_error_mode);
- VCMDecodeErrorMode DecodeErrorMode() const;
-
- // Returns size in time (milliseconds) of complete continuous frames in the
- // jitter buffer. The render time is estimated based on the render delay at
- // the time this function is called.
- int RenderBufferSizeMs();
-
- void RegisterStatsCallback(VCMReceiveStatisticsCallback* callback);
-
- void TriggerDecoderShutdown();
-
- private:
- CriticalSectionWrapper* crit_sect_;
- Clock* const clock_;
- VCMJitterBuffer jitter_buffer_;
- VCMTiming* timing_;
- rtc::scoped_ptr<EventWrapper> render_wait_event_;
- int max_video_delay_ms_;
-};
-
-} // namespace webrtc
-
-#endif // WEBRTC_MODULES_VIDEO_CODING_MAIN_SOURCE_RECEIVER_H_
diff --git a/webrtc/modules/video_coding/main/source/receiver_unittest.cc b/webrtc/modules/video_coding/main/source/receiver_unittest.cc
deleted file mode 100644
index 359b241e72..0000000000
--- a/webrtc/modules/video_coding/main/source/receiver_unittest.cc
+++ /dev/null
@@ -1,526 +0,0 @@
-/* Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include <string.h>
-
-#include <list>
-#include <queue>
-
-#include "testing/gtest/include/gtest/gtest.h"
-#include "webrtc/base/checks.h"
-#include "webrtc/modules/video_coding/main/source/packet.h"
-#include "webrtc/modules/video_coding/main/source/receiver.h"
-#include "webrtc/modules/video_coding/main/source/test/stream_generator.h"
-#include "webrtc/modules/video_coding/main/source/timing.h"
-#include "webrtc/modules/video_coding/main/test/test_util.h"
-#include "webrtc/system_wrappers/include/clock.h"
-#include "webrtc/system_wrappers/include/critical_section_wrapper.h"
-
-namespace webrtc {
-
-class TestVCMReceiver : public ::testing::Test {
- protected:
- enum { kWidth = 640 };
- enum { kHeight = 480 };
-
- TestVCMReceiver()
- : clock_(new SimulatedClock(0)),
- timing_(clock_.get()),
- receiver_(&timing_, clock_.get(), &event_factory_) {
-
- stream_generator_.reset(new
- StreamGenerator(0, clock_->TimeInMilliseconds()));
- }
-
- virtual void SetUp() {
- receiver_.Reset();
- }
-
- int32_t InsertPacket(int index) {
- VCMPacket packet;
- bool packet_available = stream_generator_->GetPacket(&packet, index);
- EXPECT_TRUE(packet_available);
- if (!packet_available)
- return kGeneralError; // Return here to avoid crashes below.
- return receiver_.InsertPacket(packet, kWidth, kHeight);
- }
-
- int32_t InsertPacketAndPop(int index) {
- VCMPacket packet;
- bool packet_available = stream_generator_->PopPacket(&packet, index);
- EXPECT_TRUE(packet_available);
- if (!packet_available)
- return kGeneralError; // Return here to avoid crashes below.
- return receiver_.InsertPacket(packet, kWidth, kHeight);
- }
-
- int32_t InsertFrame(FrameType frame_type, bool complete) {
- int num_of_packets = complete ? 1 : 2;
- stream_generator_->GenerateFrame(
- frame_type, (frame_type != kEmptyFrame) ? num_of_packets : 0,
- (frame_type == kEmptyFrame) ? 1 : 0, clock_->TimeInMilliseconds());
- int32_t ret = InsertPacketAndPop(0);
- if (!complete) {
- // Drop the second packet.
- VCMPacket packet;
- stream_generator_->PopPacket(&packet, 0);
- }
- clock_->AdvanceTimeMilliseconds(kDefaultFramePeriodMs);
- return ret;
- }
-
- bool DecodeNextFrame() {
- int64_t render_time_ms = 0;
- VCMEncodedFrame* frame =
- receiver_.FrameForDecoding(0, render_time_ms, false);
- if (!frame)
- return false;
- receiver_.ReleaseFrame(frame);
- return true;
- }
-
- rtc::scoped_ptr<SimulatedClock> clock_;
- VCMTiming timing_;
- NullEventFactory event_factory_;
- VCMReceiver receiver_;
- rtc::scoped_ptr<StreamGenerator> stream_generator_;
-};
-
-TEST_F(TestVCMReceiver, RenderBufferSize_AllComplete) {
- EXPECT_EQ(0, receiver_.RenderBufferSizeMs());
- EXPECT_GE(InsertFrame(kVideoFrameKey, true), kNoError);
- int num_of_frames = 10;
- for (int i = 0; i < num_of_frames; ++i) {
- EXPECT_GE(InsertFrame(kVideoFrameDelta, true), kNoError);
- }
- EXPECT_EQ(num_of_frames * kDefaultFramePeriodMs,
- receiver_.RenderBufferSizeMs());
-}
-
-TEST_F(TestVCMReceiver, RenderBufferSize_SkipToKeyFrame) {
- EXPECT_EQ(0, receiver_.RenderBufferSizeMs());
- const int kNumOfNonDecodableFrames = 2;
- for (int i = 0; i < kNumOfNonDecodableFrames; ++i) {
- EXPECT_GE(InsertFrame(kVideoFrameDelta, true), kNoError);
- }
- const int kNumOfFrames = 10;
- EXPECT_GE(InsertFrame(kVideoFrameKey, true), kNoError);
- for (int i = 0; i < kNumOfFrames - 1; ++i) {
- EXPECT_GE(InsertFrame(kVideoFrameDelta, true), kNoError);
- }
- EXPECT_EQ((kNumOfFrames - 1) * kDefaultFramePeriodMs,
- receiver_.RenderBufferSizeMs());
-}
-
-TEST_F(TestVCMReceiver, RenderBufferSize_NotAllComplete) {
- EXPECT_EQ(0, receiver_.RenderBufferSizeMs());
- EXPECT_GE(InsertFrame(kVideoFrameKey, true), kNoError);
- int num_of_frames = 10;
- for (int i = 0; i < num_of_frames; ++i) {
- EXPECT_GE(InsertFrame(kVideoFrameDelta, true), kNoError);
- }
- num_of_frames++;
- EXPECT_GE(InsertFrame(kVideoFrameDelta, false), kNoError);
- for (int i = 0; i < num_of_frames; ++i) {
- EXPECT_GE(InsertFrame(kVideoFrameDelta, true), kNoError);
- }
- EXPECT_EQ((num_of_frames - 1) * kDefaultFramePeriodMs,
- receiver_.RenderBufferSizeMs());
-}
-
-TEST_F(TestVCMReceiver, RenderBufferSize_NoKeyFrame) {
- EXPECT_EQ(0, receiver_.RenderBufferSizeMs());
- int num_of_frames = 10;
- for (int i = 0; i < num_of_frames; ++i) {
- EXPECT_GE(InsertFrame(kVideoFrameDelta, true), kNoError);
- }
- int64_t next_render_time_ms = 0;
- VCMEncodedFrame* frame = receiver_.FrameForDecoding(10, next_render_time_ms);
- EXPECT_TRUE(frame == NULL);
- receiver_.ReleaseFrame(frame);
- EXPECT_GE(InsertFrame(kVideoFrameDelta, false), kNoError);
- for (int i = 0; i < num_of_frames; ++i) {
- EXPECT_GE(InsertFrame(kVideoFrameDelta, true), kNoError);
- }
- EXPECT_EQ(0, receiver_.RenderBufferSizeMs());
-}
-
-TEST_F(TestVCMReceiver, NonDecodableDuration_Empty) {
- // Enable NACK and with no RTT thresholds for disabling retransmission delay.
- receiver_.SetNackMode(kNack, -1, -1);
- const size_t kMaxNackListSize = 1000;
- const int kMaxPacketAgeToNack = 1000;
- const int kMaxNonDecodableDuration = 500;
- const int kMinDelayMs = 500;
- receiver_.SetNackSettings(kMaxNackListSize, kMaxPacketAgeToNack,
- kMaxNonDecodableDuration);
- EXPECT_GE(InsertFrame(kVideoFrameKey, true), kNoError);
- // Advance time until it's time to decode the key frame.
- clock_->AdvanceTimeMilliseconds(kMinDelayMs);
- EXPECT_TRUE(DecodeNextFrame());
- bool request_key_frame = false;
- std::vector<uint16_t> nack_list = receiver_.NackList(&request_key_frame);
- EXPECT_FALSE(request_key_frame);
-}
-
-TEST_F(TestVCMReceiver, NonDecodableDuration_NoKeyFrame) {
- // Enable NACK and with no RTT thresholds for disabling retransmission delay.
- receiver_.SetNackMode(kNack, -1, -1);
- const size_t kMaxNackListSize = 1000;
- const int kMaxPacketAgeToNack = 1000;
- const int kMaxNonDecodableDuration = 500;
- receiver_.SetNackSettings(kMaxNackListSize, kMaxPacketAgeToNack,
- kMaxNonDecodableDuration);
- const int kNumFrames = kDefaultFrameRate * kMaxNonDecodableDuration / 1000;
- for (int i = 0; i < kNumFrames; ++i) {
- EXPECT_GE(InsertFrame(kVideoFrameDelta, true), kNoError);
- }
- bool request_key_frame = false;
- std::vector<uint16_t> nack_list = receiver_.NackList(&request_key_frame);
- EXPECT_TRUE(request_key_frame);
-}
-
-TEST_F(TestVCMReceiver, NonDecodableDuration_OneIncomplete) {
- // Enable NACK and with no RTT thresholds for disabling retransmission delay.
- receiver_.SetNackMode(kNack, -1, -1);
- const size_t kMaxNackListSize = 1000;
- const int kMaxPacketAgeToNack = 1000;
- const int kMaxNonDecodableDuration = 500;
- const int kMaxNonDecodableDurationFrames = (kDefaultFrameRate *
- kMaxNonDecodableDuration + 500) / 1000;
- const int kMinDelayMs = 500;
- receiver_.SetNackSettings(kMaxNackListSize, kMaxPacketAgeToNack,
- kMaxNonDecodableDuration);
- receiver_.SetMinReceiverDelay(kMinDelayMs);
- int64_t key_frame_inserted = clock_->TimeInMilliseconds();
- EXPECT_GE(InsertFrame(kVideoFrameKey, true), kNoError);
- // Insert an incomplete frame.
- EXPECT_GE(InsertFrame(kVideoFrameDelta, false), kNoError);
- // Insert enough frames to have too long non-decodable sequence.
- for (int i = 0; i < kMaxNonDecodableDurationFrames;
- ++i) {
- EXPECT_GE(InsertFrame(kVideoFrameDelta, true), kNoError);
- }
- // Advance time until it's time to decode the key frame.
- clock_->AdvanceTimeMilliseconds(kMinDelayMs - clock_->TimeInMilliseconds() -
- key_frame_inserted);
- EXPECT_TRUE(DecodeNextFrame());
- // Make sure we get a key frame request.
- bool request_key_frame = false;
- std::vector<uint16_t> nack_list = receiver_.NackList(&request_key_frame);
- EXPECT_TRUE(request_key_frame);
-}
-
-TEST_F(TestVCMReceiver, NonDecodableDuration_NoTrigger) {
- // Enable NACK and with no RTT thresholds for disabling retransmission delay.
- receiver_.SetNackMode(kNack, -1, -1);
- const size_t kMaxNackListSize = 1000;
- const int kMaxPacketAgeToNack = 1000;
- const int kMaxNonDecodableDuration = 500;
- const int kMaxNonDecodableDurationFrames = (kDefaultFrameRate *
- kMaxNonDecodableDuration + 500) / 1000;
- const int kMinDelayMs = 500;
- receiver_.SetNackSettings(kMaxNackListSize, kMaxPacketAgeToNack,
- kMaxNonDecodableDuration);
- receiver_.SetMinReceiverDelay(kMinDelayMs);
- int64_t key_frame_inserted = clock_->TimeInMilliseconds();
- EXPECT_GE(InsertFrame(kVideoFrameKey, true), kNoError);
- // Insert an incomplete frame.
- EXPECT_GE(InsertFrame(kVideoFrameDelta, false), kNoError);
- // Insert all but one frame to not trigger a key frame request due to
- // too long duration of non-decodable frames.
- for (int i = 0; i < kMaxNonDecodableDurationFrames - 1;
- ++i) {
- EXPECT_GE(InsertFrame(kVideoFrameDelta, true), kNoError);
- }
- // Advance time until it's time to decode the key frame.
- clock_->AdvanceTimeMilliseconds(kMinDelayMs - clock_->TimeInMilliseconds() -
- key_frame_inserted);
- EXPECT_TRUE(DecodeNextFrame());
- // Make sure we don't get a key frame request since we haven't generated
- // enough frames.
- bool request_key_frame = false;
- std::vector<uint16_t> nack_list = receiver_.NackList(&request_key_frame);
- EXPECT_FALSE(request_key_frame);
-}
-
-TEST_F(TestVCMReceiver, NonDecodableDuration_NoTrigger2) {
- // Enable NACK and with no RTT thresholds for disabling retransmission delay.
- receiver_.SetNackMode(kNack, -1, -1);
- const size_t kMaxNackListSize = 1000;
- const int kMaxPacketAgeToNack = 1000;
- const int kMaxNonDecodableDuration = 500;
- const int kMaxNonDecodableDurationFrames = (kDefaultFrameRate *
- kMaxNonDecodableDuration + 500) / 1000;
- const int kMinDelayMs = 500;
- receiver_.SetNackSettings(kMaxNackListSize, kMaxPacketAgeToNack,
- kMaxNonDecodableDuration);
- receiver_.SetMinReceiverDelay(kMinDelayMs);
- int64_t key_frame_inserted = clock_->TimeInMilliseconds();
- EXPECT_GE(InsertFrame(kVideoFrameKey, true), kNoError);
- // Insert enough frames to have too long non-decodable sequence, except that
- // we don't have any losses.
- for (int i = 0; i < kMaxNonDecodableDurationFrames;
- ++i) {
- EXPECT_GE(InsertFrame(kVideoFrameDelta, true), kNoError);
- }
- // Insert an incomplete frame.
- EXPECT_GE(InsertFrame(kVideoFrameDelta, false), kNoError);
- // Advance time until it's time to decode the key frame.
- clock_->AdvanceTimeMilliseconds(kMinDelayMs - clock_->TimeInMilliseconds() -
- key_frame_inserted);
- EXPECT_TRUE(DecodeNextFrame());
- // Make sure we don't get a key frame request since the non-decodable duration
- // is only one frame.
- bool request_key_frame = false;
- std::vector<uint16_t> nack_list = receiver_.NackList(&request_key_frame);
- EXPECT_FALSE(request_key_frame);
-}
-
-TEST_F(TestVCMReceiver, NonDecodableDuration_KeyFrameAfterIncompleteFrames) {
- // Enable NACK and with no RTT thresholds for disabling retransmission delay.
- receiver_.SetNackMode(kNack, -1, -1);
- const size_t kMaxNackListSize = 1000;
- const int kMaxPacketAgeToNack = 1000;
- const int kMaxNonDecodableDuration = 500;
- const int kMaxNonDecodableDurationFrames = (kDefaultFrameRate *
- kMaxNonDecodableDuration + 500) / 1000;
- const int kMinDelayMs = 500;
- receiver_.SetNackSettings(kMaxNackListSize, kMaxPacketAgeToNack,
- kMaxNonDecodableDuration);
- receiver_.SetMinReceiverDelay(kMinDelayMs);
- int64_t key_frame_inserted = clock_->TimeInMilliseconds();
- EXPECT_GE(InsertFrame(kVideoFrameKey, true), kNoError);
- // Insert an incomplete frame.
- EXPECT_GE(InsertFrame(kVideoFrameDelta, false), kNoError);
- // Insert enough frames to have too long non-decodable sequence.
- for (int i = 0; i < kMaxNonDecodableDurationFrames;
- ++i) {
- EXPECT_GE(InsertFrame(kVideoFrameDelta, true), kNoError);
- }
- EXPECT_GE(InsertFrame(kVideoFrameKey, true), kNoError);
- // Advance time until it's time to decode the key frame.
- clock_->AdvanceTimeMilliseconds(kMinDelayMs - clock_->TimeInMilliseconds() -
- key_frame_inserted);
- EXPECT_TRUE(DecodeNextFrame());
- // Make sure we don't get a key frame request since we have a key frame
- // in the list.
- bool request_key_frame = false;
- std::vector<uint16_t> nack_list = receiver_.NackList(&request_key_frame);
- EXPECT_FALSE(request_key_frame);
-}
-
-// A simulated clock, when time elapses, will insert frames into the jitter
-// buffer, based on initial settings.
-class SimulatedClockWithFrames : public SimulatedClock {
- public:
- SimulatedClockWithFrames(StreamGenerator* stream_generator,
- VCMReceiver* receiver)
- : SimulatedClock(0),
- stream_generator_(stream_generator),
- receiver_(receiver) {}
- virtual ~SimulatedClockWithFrames() {}
-
- // If |stop_on_frame| is true and next frame arrives between now and
- // now+|milliseconds|, the clock will be advanced to the arrival time of next
- // frame.
- // Otherwise, the clock will be advanced by |milliseconds|.
- //
- // For both cases, a frame will be inserted into the jitter buffer at the
- // instant when the clock time is timestamps_.front().arrive_time.
- //
- // Return true if some frame arrives between now and now+|milliseconds|.
- bool AdvanceTimeMilliseconds(int64_t milliseconds, bool stop_on_frame) {
- return AdvanceTimeMicroseconds(milliseconds * 1000, stop_on_frame);
- };
-
- bool AdvanceTimeMicroseconds(int64_t microseconds, bool stop_on_frame) {
- int64_t start_time = TimeInMicroseconds();
- int64_t end_time = start_time + microseconds;
- bool frame_injected = false;
- while (!timestamps_.empty() &&
- timestamps_.front().arrive_time <= end_time) {
- RTC_DCHECK(timestamps_.front().arrive_time >= start_time);
-
- SimulatedClock::AdvanceTimeMicroseconds(timestamps_.front().arrive_time -
- TimeInMicroseconds());
- GenerateAndInsertFrame((timestamps_.front().render_time + 500) / 1000);
- timestamps_.pop();
- frame_injected = true;
-
- if (stop_on_frame)
- return frame_injected;
- }
-
- if (TimeInMicroseconds() < end_time) {
- SimulatedClock::AdvanceTimeMicroseconds(end_time - TimeInMicroseconds());
- }
- return frame_injected;
- };
-
- // Input timestamps are in unit Milliseconds.
- // And |arrive_timestamps| must be positive and in increasing order.
- // |arrive_timestamps| determine when we are going to insert frames into the
- // jitter buffer.
- // |render_timestamps| are the timestamps on the frame.
- void SetFrames(const int64_t* arrive_timestamps,
- const int64_t* render_timestamps,
- size_t size) {
- int64_t previous_arrive_timestamp = 0;
- for (size_t i = 0; i < size; i++) {
- RTC_CHECK(arrive_timestamps[i] >= previous_arrive_timestamp);
- timestamps_.push(TimestampPair(arrive_timestamps[i] * 1000,
- render_timestamps[i] * 1000));
- previous_arrive_timestamp = arrive_timestamps[i];
- }
- }
-
- private:
- struct TimestampPair {
- TimestampPair(int64_t arrive_timestamp, int64_t render_timestamp)
- : arrive_time(arrive_timestamp), render_time(render_timestamp) {}
-
- int64_t arrive_time;
- int64_t render_time;
- };
-
- void GenerateAndInsertFrame(int64_t render_timestamp_ms) {
- VCMPacket packet;
- stream_generator_->GenerateFrame(FrameType::kVideoFrameKey,
- 1, // media packets
- 0, // empty packets
- render_timestamp_ms);
-
- bool packet_available = stream_generator_->PopPacket(&packet, 0);
- EXPECT_TRUE(packet_available);
- if (!packet_available)
- return; // Return here to avoid crashes below.
- receiver_->InsertPacket(packet, 640, 480);
- }
-
- std::queue<TimestampPair> timestamps_;
- StreamGenerator* stream_generator_;
- VCMReceiver* receiver_;
-};
-
-// Use a SimulatedClockWithFrames
-// Wait call will do either of these:
-// 1. If |stop_on_frame| is true, the clock will be turned to the exact instant
-// that the first frame comes and the frame will be inserted into the jitter
-// buffer, or the clock will be turned to now + |max_time| if no frame comes in
-// the window.
-// 2. If |stop_on_frame| is false, the clock will be turn to now + |max_time|,
-// and all the frames arriving between now and now + |max_time| will be
-// inserted into the jitter buffer.
-//
-// This is used to simulate the JitterBuffer getting packets from internet as
-// time elapses.
-
-class FrameInjectEvent : public EventWrapper {
- public:
- FrameInjectEvent(SimulatedClockWithFrames* clock, bool stop_on_frame)
- : clock_(clock), stop_on_frame_(stop_on_frame) {}
-
- bool Set() override { return true; }
-
- EventTypeWrapper Wait(unsigned long max_time) override {
- if (clock_->AdvanceTimeMilliseconds(max_time, stop_on_frame_) &&
- stop_on_frame_) {
- return EventTypeWrapper::kEventSignaled;
- } else {
- return EventTypeWrapper::kEventTimeout;
- }
- }
-
- private:
- SimulatedClockWithFrames* clock_;
- bool stop_on_frame_;
-};
-
-class VCMReceiverTimingTest : public ::testing::Test {
- protected:
-
- VCMReceiverTimingTest()
-
- : clock_(&stream_generator_, &receiver_),
- stream_generator_(0, clock_.TimeInMilliseconds()),
- timing_(&clock_),
- receiver_(
- &timing_,
- &clock_,
- rtc::scoped_ptr<EventWrapper>(new FrameInjectEvent(&clock_, false)),
- rtc::scoped_ptr<EventWrapper>(
- new FrameInjectEvent(&clock_, true))) {}
-
-
- virtual void SetUp() { receiver_.Reset(); }
-
- SimulatedClockWithFrames clock_;
- StreamGenerator stream_generator_;
- VCMTiming timing_;
- VCMReceiver receiver_;
-};
-
-// Test whether VCMReceiver::FrameForDecoding handles parameter
-// |max_wait_time_ms| correctly:
-// 1. The function execution should never take more than |max_wait_time_ms|.
-// 2. If the function exit before now + |max_wait_time_ms|, a frame must be
-// returned.
-TEST_F(VCMReceiverTimingTest, FrameForDecoding) {
- const size_t kNumFrames = 100;
- const int kFramePeriod = 40;
- int64_t arrive_timestamps[kNumFrames];
- int64_t render_timestamps[kNumFrames];
- int64_t next_render_time;
-
- // Construct test samples.
- // render_timestamps are the timestamps stored in the Frame;
- // arrive_timestamps controls when the Frame packet got received.
- for (size_t i = 0; i < kNumFrames; i++) {
- // Preset frame rate to 25Hz.
- // But we add a reasonable deviation to arrive_timestamps to mimic Internet
- // fluctuation.
- arrive_timestamps[i] =
- (i + 1) * kFramePeriod + (i % 10) * ((i % 2) ? 1 : -1);
- render_timestamps[i] = (i + 1) * kFramePeriod;
- }
-
- clock_.SetFrames(arrive_timestamps, render_timestamps, kNumFrames);
-
- // Record how many frames we finally get out of the receiver.
- size_t num_frames_return = 0;
-
- const int64_t kMaxWaitTime = 30;
-
- // Ideally, we should get all frames that we input in InitializeFrames.
- // In the case that FrameForDecoding kills frames by error, we rely on the
- // build bot to kill the test.
- while (num_frames_return < kNumFrames) {
- int64_t start_time = clock_.TimeInMilliseconds();
- VCMEncodedFrame* frame =
- receiver_.FrameForDecoding(kMaxWaitTime, next_render_time, false);
- int64_t end_time = clock_.TimeInMilliseconds();
-
- // In any case the FrameForDecoding should not wait longer than
- // max_wait_time.
- // In the case that we did not get a frame, it should have been waiting for
- // exactly max_wait_time. (By the testing samples we constructed above, we
- // are sure there is no timing error, so the only case it returns with NULL
- // is that it runs out of time.)
- if (frame) {
- receiver_.ReleaseFrame(frame);
- ++num_frames_return;
- EXPECT_GE(kMaxWaitTime, end_time - start_time);
- } else {
- EXPECT_EQ(kMaxWaitTime, end_time - start_time);
- }
- }
-}
-
-} // namespace webrtc
diff --git a/webrtc/modules/video_coding/main/source/rtt_filter.cc b/webrtc/modules/video_coding/main/source/rtt_filter.cc
deleted file mode 100644
index 5742e8fa89..0000000000
--- a/webrtc/modules/video_coding/main/source/rtt_filter.cc
+++ /dev/null
@@ -1,202 +0,0 @@
-/*
- * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include "webrtc/modules/video_coding/main/source/internal_defines.h"
-#include "webrtc/modules/video_coding/main/source/rtt_filter.h"
-
-#include <math.h>
-#include <stdlib.h>
-#include <string.h>
-
-namespace webrtc {
-
-VCMRttFilter::VCMRttFilter()
- : _filtFactMax(35),
- _jumpStdDevs(2.5),
- _driftStdDevs(3.5),
- _detectThreshold(kMaxDriftJumpCount) {
- Reset();
-}
-
-VCMRttFilter&
-VCMRttFilter::operator=(const VCMRttFilter& rhs)
-{
- if (this != &rhs)
- {
- _gotNonZeroUpdate = rhs._gotNonZeroUpdate;
- _avgRtt = rhs._avgRtt;
- _varRtt = rhs._varRtt;
- _maxRtt = rhs._maxRtt;
- _filtFactCount = rhs._filtFactCount;
- _jumpCount = rhs._jumpCount;
- _driftCount = rhs._driftCount;
- memcpy(_jumpBuf, rhs._jumpBuf, sizeof(_jumpBuf));
- memcpy(_driftBuf, rhs._driftBuf, sizeof(_driftBuf));
- }
- return *this;
-}
-
-void
-VCMRttFilter::Reset()
-{
- _gotNonZeroUpdate = false;
- _avgRtt = 0;
- _varRtt = 0;
- _maxRtt = 0;
- _filtFactCount = 1;
- _jumpCount = 0;
- _driftCount = 0;
- memset(_jumpBuf, 0, kMaxDriftJumpCount);
- memset(_driftBuf, 0, kMaxDriftJumpCount);
-}
-
-void
-VCMRttFilter::Update(int64_t rttMs)
-{
- if (!_gotNonZeroUpdate)
- {
- if (rttMs == 0)
- {
- return;
- }
- _gotNonZeroUpdate = true;
- }
-
- // Sanity check
- if (rttMs > 3000)
- {
- rttMs = 3000;
- }
-
- double filtFactor = 0;
- if (_filtFactCount > 1)
- {
- filtFactor = static_cast<double>(_filtFactCount - 1) / _filtFactCount;
- }
- _filtFactCount++;
- if (_filtFactCount > _filtFactMax)
- {
- // This prevents filtFactor from going above
- // (_filtFactMax - 1) / _filtFactMax,
- // e.g., _filtFactMax = 50 => filtFactor = 49/50 = 0.98
- _filtFactCount = _filtFactMax;
- }
- double oldAvg = _avgRtt;
- double oldVar = _varRtt;
- _avgRtt = filtFactor * _avgRtt + (1 - filtFactor) * rttMs;
- _varRtt = filtFactor * _varRtt + (1 - filtFactor) *
- (rttMs - _avgRtt) * (rttMs - _avgRtt);
- _maxRtt = VCM_MAX(rttMs, _maxRtt);
- if (!JumpDetection(rttMs) || !DriftDetection(rttMs))
- {
- // In some cases we don't want to update the statistics
- _avgRtt = oldAvg;
- _varRtt = oldVar;
- }
-}
-
-bool
-VCMRttFilter::JumpDetection(int64_t rttMs)
-{
- double diffFromAvg = _avgRtt - rttMs;
- if (fabs(diffFromAvg) > _jumpStdDevs * sqrt(_varRtt))
- {
- int diffSign = (diffFromAvg >= 0) ? 1 : -1;
- int jumpCountSign = (_jumpCount >= 0) ? 1 : -1;
- if (diffSign != jumpCountSign)
- {
- // Since the signs differ the samples currently
- // in the buffer is useless as they represent a
- // jump in a different direction.
- _jumpCount = 0;
- }
- if (abs(_jumpCount) < kMaxDriftJumpCount)
- {
- // Update the buffer used for the short time
- // statistics.
- // The sign of the diff is used for updating the counter since
- // we want to use the same buffer for keeping track of when
- // the RTT jumps down and up.
- _jumpBuf[abs(_jumpCount)] = rttMs;
- _jumpCount += diffSign;
- }
- if (abs(_jumpCount) >= _detectThreshold)
- {
- // Detected an RTT jump
- ShortRttFilter(_jumpBuf, abs(_jumpCount));
- _filtFactCount = _detectThreshold + 1;
- _jumpCount = 0;
- }
- else
- {
- return false;
- }
- }
- else
- {
- _jumpCount = 0;
- }
- return true;
-}
-
-bool
-VCMRttFilter::DriftDetection(int64_t rttMs)
-{
- if (_maxRtt - _avgRtt > _driftStdDevs * sqrt(_varRtt))
- {
- if (_driftCount < kMaxDriftJumpCount)
- {
- // Update the buffer used for the short time
- // statistics.
- _driftBuf[_driftCount] = rttMs;
- _driftCount++;
- }
- if (_driftCount >= _detectThreshold)
- {
- // Detected an RTT drift
- ShortRttFilter(_driftBuf, _driftCount);
- _filtFactCount = _detectThreshold + 1;
- _driftCount = 0;
- }
- }
- else
- {
- _driftCount = 0;
- }
- return true;
-}
-
-void
-VCMRttFilter::ShortRttFilter(int64_t* buf, uint32_t length)
-{
- if (length == 0)
- {
- return;
- }
- _maxRtt = 0;
- _avgRtt = 0;
- for (uint32_t i=0; i < length; i++)
- {
- if (buf[i] > _maxRtt)
- {
- _maxRtt = buf[i];
- }
- _avgRtt += buf[i];
- }
- _avgRtt = _avgRtt / static_cast<double>(length);
-}
-
-int64_t
-VCMRttFilter::RttMs() const
-{
- return static_cast<int64_t>(_maxRtt + 0.5);
-}
-
-}
diff --git a/webrtc/modules/video_coding/main/source/rtt_filter.h b/webrtc/modules/video_coding/main/source/rtt_filter.h
deleted file mode 100644
index 9e14a1ab39..0000000000
--- a/webrtc/modules/video_coding/main/source/rtt_filter.h
+++ /dev/null
@@ -1,68 +0,0 @@
-/*
- * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_MODULES_VIDEO_CODING_RTT_FILTER_H_
-#define WEBRTC_MODULES_VIDEO_CODING_RTT_FILTER_H_
-
-#include "webrtc/typedefs.h"
-
-namespace webrtc
-{
-
-class VCMRttFilter
-{
-public:
- VCMRttFilter();
-
- VCMRttFilter& operator=(const VCMRttFilter& rhs);
-
- // Resets the filter.
- void Reset();
- // Updates the filter with a new sample.
- void Update(int64_t rttMs);
- // A getter function for the current RTT level in ms.
- int64_t RttMs() const;
-
-private:
- // The size of the drift and jump memory buffers
- // and thus also the detection threshold for these
- // detectors in number of samples.
- enum { kMaxDriftJumpCount = 5 };
- // Detects RTT jumps by comparing the difference between
- // samples and average to the standard deviation.
- // Returns true if the long time statistics should be updated
- // and false otherwise
- bool JumpDetection(int64_t rttMs);
- // Detects RTT drifts by comparing the difference between
- // max and average to the standard deviation.
- // Returns true if the long time statistics should be updated
- // and false otherwise
- bool DriftDetection(int64_t rttMs);
- // Computes the short time average and maximum of the vector buf.
- void ShortRttFilter(int64_t* buf, uint32_t length);
-
- bool _gotNonZeroUpdate;
- double _avgRtt;
- double _varRtt;
- int64_t _maxRtt;
- uint32_t _filtFactCount;
- const uint32_t _filtFactMax;
- const double _jumpStdDevs;
- const double _driftStdDevs;
- int32_t _jumpCount;
- int32_t _driftCount;
- const int32_t _detectThreshold;
- int64_t _jumpBuf[kMaxDriftJumpCount];
- int64_t _driftBuf[kMaxDriftJumpCount];
-};
-
-} // namespace webrtc
-
-#endif // WEBRTC_MODULES_VIDEO_CODING_RTT_FILTER_H_
diff --git a/webrtc/modules/video_coding/main/source/session_info.cc b/webrtc/modules/video_coding/main/source/session_info.cc
deleted file mode 100644
index 9a1bc54e52..0000000000
--- a/webrtc/modules/video_coding/main/source/session_info.cc
+++ /dev/null
@@ -1,580 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include "webrtc/modules/video_coding/main/source/session_info.h"
-
-#include "webrtc/modules/video_coding/main/source/packet.h"
-#include "webrtc/system_wrappers/include/logging.h"
-
-namespace webrtc {
-
-namespace {
-
-uint16_t BufferToUWord16(const uint8_t* dataBuffer) {
- return (dataBuffer[0] << 8) | dataBuffer[1];
-}
-
-} // namespace
-
-VCMSessionInfo::VCMSessionInfo()
- : session_nack_(false),
- complete_(false),
- decodable_(false),
- frame_type_(kVideoFrameDelta),
- packets_(),
- empty_seq_num_low_(-1),
- empty_seq_num_high_(-1),
- first_packet_seq_num_(-1),
- last_packet_seq_num_(-1) {
-}
-
-void VCMSessionInfo::UpdateDataPointers(const uint8_t* old_base_ptr,
- const uint8_t* new_base_ptr) {
- for (PacketIterator it = packets_.begin(); it != packets_.end(); ++it)
- if ((*it).dataPtr != NULL) {
- assert(old_base_ptr != NULL && new_base_ptr != NULL);
- (*it).dataPtr = new_base_ptr + ((*it).dataPtr - old_base_ptr);
- }
-}
-
-int VCMSessionInfo::LowSequenceNumber() const {
- if (packets_.empty())
- return empty_seq_num_low_;
- return packets_.front().seqNum;
-}
-
-int VCMSessionInfo::HighSequenceNumber() const {
- if (packets_.empty())
- return empty_seq_num_high_;
- if (empty_seq_num_high_ == -1)
- return packets_.back().seqNum;
- return LatestSequenceNumber(packets_.back().seqNum, empty_seq_num_high_);
-}
-
-int VCMSessionInfo::PictureId() const {
- if (packets_.empty())
- return kNoPictureId;
- if (packets_.front().codecSpecificHeader.codec == kRtpVideoVp8) {
- return packets_.front().codecSpecificHeader.codecHeader.VP8.pictureId;
- } else if (packets_.front().codecSpecificHeader.codec == kRtpVideoVp9) {
- return packets_.front().codecSpecificHeader.codecHeader.VP9.picture_id;
- } else {
- return kNoPictureId;
- }
-}
-
-int VCMSessionInfo::TemporalId() const {
- if (packets_.empty())
- return kNoTemporalIdx;
- if (packets_.front().codecSpecificHeader.codec == kRtpVideoVp8) {
- return packets_.front().codecSpecificHeader.codecHeader.VP8.temporalIdx;
- } else if (packets_.front().codecSpecificHeader.codec == kRtpVideoVp9) {
- return packets_.front().codecSpecificHeader.codecHeader.VP9.temporal_idx;
- } else {
- return kNoTemporalIdx;
- }
-}
-
-bool VCMSessionInfo::LayerSync() const {
- if (packets_.empty())
- return false;
- if (packets_.front().codecSpecificHeader.codec == kRtpVideoVp8) {
- return packets_.front().codecSpecificHeader.codecHeader.VP8.layerSync;
- } else if (packets_.front().codecSpecificHeader.codec == kRtpVideoVp9) {
- return
- packets_.front().codecSpecificHeader.codecHeader.VP9.temporal_up_switch;
- } else {
- return false;
- }
-}
-
-int VCMSessionInfo::Tl0PicId() const {
- if (packets_.empty())
- return kNoTl0PicIdx;
- if (packets_.front().codecSpecificHeader.codec == kRtpVideoVp8) {
- return packets_.front().codecSpecificHeader.codecHeader.VP8.tl0PicIdx;
- } else if (packets_.front().codecSpecificHeader.codec == kRtpVideoVp9) {
- return packets_.front().codecSpecificHeader.codecHeader.VP9.tl0_pic_idx;
- } else {
- return kNoTl0PicIdx;
- }
-}
-
-bool VCMSessionInfo::NonReference() const {
- if (packets_.empty() ||
- packets_.front().codecSpecificHeader.codec != kRtpVideoVp8)
- return false;
- return packets_.front().codecSpecificHeader.codecHeader.VP8.nonReference;
-}
-
-void VCMSessionInfo::SetGofInfo(const GofInfoVP9& gof_info, size_t idx) {
- if (packets_.empty() ||
- packets_.front().codecSpecificHeader.codec != kRtpVideoVp9 ||
- packets_.front().codecSpecificHeader.codecHeader.VP9.flexible_mode) {
- return;
- }
- packets_.front().codecSpecificHeader.codecHeader.VP9.temporal_idx =
- gof_info.temporal_idx[idx];
- packets_.front().codecSpecificHeader.codecHeader.VP9.temporal_up_switch =
- gof_info.temporal_up_switch[idx];
- packets_.front().codecSpecificHeader.codecHeader.VP9.num_ref_pics =
- gof_info.num_ref_pics[idx];
- for (size_t i = 0; i < gof_info.num_ref_pics[idx]; ++i) {
- packets_.front().codecSpecificHeader.codecHeader.VP9.pid_diff[i] =
- gof_info.pid_diff[idx][i];
- }
-}
-
-void VCMSessionInfo::Reset() {
- session_nack_ = false;
- complete_ = false;
- decodable_ = false;
- frame_type_ = kVideoFrameDelta;
- packets_.clear();
- empty_seq_num_low_ = -1;
- empty_seq_num_high_ = -1;
- first_packet_seq_num_ = -1;
- last_packet_seq_num_ = -1;
-}
-
-size_t VCMSessionInfo::SessionLength() const {
- size_t length = 0;
- for (PacketIteratorConst it = packets_.begin(); it != packets_.end(); ++it)
- length += (*it).sizeBytes;
- return length;
-}
-
-int VCMSessionInfo::NumPackets() const {
- return packets_.size();
-}
-
-size_t VCMSessionInfo::InsertBuffer(uint8_t* frame_buffer,
- PacketIterator packet_it) {
- VCMPacket& packet = *packet_it;
- PacketIterator it;
-
- // Calculate the offset into the frame buffer for this packet.
- size_t offset = 0;
- for (it = packets_.begin(); it != packet_it; ++it)
- offset += (*it).sizeBytes;
-
- // Set the data pointer to pointing to the start of this packet in the
- // frame buffer.
- const uint8_t* packet_buffer = packet.dataPtr;
- packet.dataPtr = frame_buffer + offset;
-
- // We handle H.264 STAP-A packets in a special way as we need to remove the
- // two length bytes between each NAL unit, and potentially add start codes.
- // TODO(pbos): Remove H264 parsing from this step and use a fragmentation
- // header supplied by the H264 depacketizer.
- const size_t kH264NALHeaderLengthInBytes = 1;
- const size_t kLengthFieldLength = 2;
- if (packet.codecSpecificHeader.codec == kRtpVideoH264 &&
- packet.codecSpecificHeader.codecHeader.H264.packetization_type ==
- kH264StapA) {
- size_t required_length = 0;
- const uint8_t* nalu_ptr = packet_buffer + kH264NALHeaderLengthInBytes;
- while (nalu_ptr < packet_buffer + packet.sizeBytes) {
- size_t length = BufferToUWord16(nalu_ptr);
- required_length +=
- length + (packet.insertStartCode ? kH264StartCodeLengthBytes : 0);
- nalu_ptr += kLengthFieldLength + length;
- }
- ShiftSubsequentPackets(packet_it, required_length);
- nalu_ptr = packet_buffer + kH264NALHeaderLengthInBytes;
- uint8_t* frame_buffer_ptr = frame_buffer + offset;
- while (nalu_ptr < packet_buffer + packet.sizeBytes) {
- size_t length = BufferToUWord16(nalu_ptr);
- nalu_ptr += kLengthFieldLength;
- frame_buffer_ptr += Insert(nalu_ptr,
- length,
- packet.insertStartCode,
- const_cast<uint8_t*>(frame_buffer_ptr));
- nalu_ptr += length;
- }
- packet.sizeBytes = required_length;
- return packet.sizeBytes;
- }
- ShiftSubsequentPackets(
- packet_it,
- packet.sizeBytes +
- (packet.insertStartCode ? kH264StartCodeLengthBytes : 0));
-
- packet.sizeBytes = Insert(packet_buffer,
- packet.sizeBytes,
- packet.insertStartCode,
- const_cast<uint8_t*>(packet.dataPtr));
- return packet.sizeBytes;
-}
-
-size_t VCMSessionInfo::Insert(const uint8_t* buffer,
- size_t length,
- bool insert_start_code,
- uint8_t* frame_buffer) {
- if (insert_start_code) {
- const unsigned char startCode[] = {0, 0, 0, 1};
- memcpy(frame_buffer, startCode, kH264StartCodeLengthBytes);
- }
- memcpy(frame_buffer + (insert_start_code ? kH264StartCodeLengthBytes : 0),
- buffer,
- length);
- length += (insert_start_code ? kH264StartCodeLengthBytes : 0);
-
- return length;
-}
-
-void VCMSessionInfo::ShiftSubsequentPackets(PacketIterator it,
- int steps_to_shift) {
- ++it;
- if (it == packets_.end())
- return;
- uint8_t* first_packet_ptr = const_cast<uint8_t*>((*it).dataPtr);
- int shift_length = 0;
- // Calculate the total move length and move the data pointers in advance.
- for (; it != packets_.end(); ++it) {
- shift_length += (*it).sizeBytes;
- if ((*it).dataPtr != NULL)
- (*it).dataPtr += steps_to_shift;
- }
- memmove(first_packet_ptr + steps_to_shift, first_packet_ptr, shift_length);
-}
-
-void VCMSessionInfo::UpdateCompleteSession() {
- if (HaveFirstPacket() && HaveLastPacket()) {
- // Do we have all the packets in this session?
- bool complete_session = true;
- PacketIterator it = packets_.begin();
- PacketIterator prev_it = it;
- ++it;
- for (; it != packets_.end(); ++it) {
- if (!InSequence(it, prev_it)) {
- complete_session = false;
- break;
- }
- prev_it = it;
- }
- complete_ = complete_session;
- }
-}
-
-void VCMSessionInfo::UpdateDecodableSession(const FrameData& frame_data) {
- // Irrelevant if session is already complete or decodable
- if (complete_ || decodable_)
- return;
- // TODO(agalusza): Account for bursty loss.
- // TODO(agalusza): Refine these values to better approximate optimal ones.
- // Do not decode frames if the RTT is lower than this.
- const int64_t kRttThreshold = 100;
- // Do not decode frames if the number of packets is between these two
- // thresholds.
- const float kLowPacketPercentageThreshold = 0.2f;
- const float kHighPacketPercentageThreshold = 0.8f;
- if (frame_data.rtt_ms < kRttThreshold
- || frame_type_ == kVideoFrameKey
- || !HaveFirstPacket()
- || (NumPackets() <= kHighPacketPercentageThreshold
- * frame_data.rolling_average_packets_per_frame
- && NumPackets() > kLowPacketPercentageThreshold
- * frame_data.rolling_average_packets_per_frame))
- return;
-
- decodable_ = true;
-}
-
-bool VCMSessionInfo::complete() const {
- return complete_;
-}
-
-bool VCMSessionInfo::decodable() const {
- return decodable_;
-}
-
-// Find the end of the NAL unit which the packet pointed to by |packet_it|
-// belongs to. Returns an iterator to the last packet of the frame if the end
-// of the NAL unit wasn't found.
-VCMSessionInfo::PacketIterator VCMSessionInfo::FindNaluEnd(
- PacketIterator packet_it) const {
- if ((*packet_it).completeNALU == kNaluEnd ||
- (*packet_it).completeNALU == kNaluComplete) {
- return packet_it;
- }
- // Find the end of the NAL unit.
- for (; packet_it != packets_.end(); ++packet_it) {
- if (((*packet_it).completeNALU == kNaluComplete &&
- (*packet_it).sizeBytes > 0) ||
- // Found next NALU.
- (*packet_it).completeNALU == kNaluStart)
- return --packet_it;
- if ((*packet_it).completeNALU == kNaluEnd)
- return packet_it;
- }
- // The end wasn't found.
- return --packet_it;
-}
-
-size_t VCMSessionInfo::DeletePacketData(PacketIterator start,
- PacketIterator end) {
- size_t bytes_to_delete = 0; // The number of bytes to delete.
- PacketIterator packet_after_end = end;
- ++packet_after_end;
-
- // Get the number of bytes to delete.
- // Clear the size of these packets.
- for (PacketIterator it = start; it != packet_after_end; ++it) {
- bytes_to_delete += (*it).sizeBytes;
- (*it).sizeBytes = 0;
- (*it).dataPtr = NULL;
- }
- if (bytes_to_delete > 0)
- ShiftSubsequentPackets(end, -static_cast<int>(bytes_to_delete));
- return bytes_to_delete;
-}
-
-size_t VCMSessionInfo::BuildVP8FragmentationHeader(
- uint8_t* frame_buffer,
- size_t frame_buffer_length,
- RTPFragmentationHeader* fragmentation) {
- size_t new_length = 0;
- // Allocate space for max number of partitions
- fragmentation->VerifyAndAllocateFragmentationHeader(kMaxVP8Partitions);
- fragmentation->fragmentationVectorSize = 0;
- memset(fragmentation->fragmentationLength, 0,
- kMaxVP8Partitions * sizeof(size_t));
- if (packets_.empty())
- return new_length;
- PacketIterator it = FindNextPartitionBeginning(packets_.begin());
- while (it != packets_.end()) {
- const int partition_id =
- (*it).codecSpecificHeader.codecHeader.VP8.partitionId;
- PacketIterator partition_end = FindPartitionEnd(it);
- fragmentation->fragmentationOffset[partition_id] =
- (*it).dataPtr - frame_buffer;
- assert(fragmentation->fragmentationOffset[partition_id] <
- frame_buffer_length);
- fragmentation->fragmentationLength[partition_id] =
- (*partition_end).dataPtr + (*partition_end).sizeBytes - (*it).dataPtr;
- assert(fragmentation->fragmentationLength[partition_id] <=
- frame_buffer_length);
- new_length += fragmentation->fragmentationLength[partition_id];
- ++partition_end;
- it = FindNextPartitionBeginning(partition_end);
- if (partition_id + 1 > fragmentation->fragmentationVectorSize)
- fragmentation->fragmentationVectorSize = partition_id + 1;
- }
- // Set all empty fragments to start where the previous fragment ends,
- // and have zero length.
- if (fragmentation->fragmentationLength[0] == 0)
- fragmentation->fragmentationOffset[0] = 0;
- for (int i = 1; i < fragmentation->fragmentationVectorSize; ++i) {
- if (fragmentation->fragmentationLength[i] == 0)
- fragmentation->fragmentationOffset[i] =
- fragmentation->fragmentationOffset[i - 1] +
- fragmentation->fragmentationLength[i - 1];
- assert(i == 0 ||
- fragmentation->fragmentationOffset[i] >=
- fragmentation->fragmentationOffset[i - 1]);
- }
- assert(new_length <= frame_buffer_length);
- return new_length;
-}
-
-VCMSessionInfo::PacketIterator VCMSessionInfo::FindNextPartitionBeginning(
- PacketIterator it) const {
- while (it != packets_.end()) {
- if ((*it).codecSpecificHeader.codecHeader.VP8.beginningOfPartition) {
- return it;
- }
- ++it;
- }
- return it;
-}
-
-VCMSessionInfo::PacketIterator VCMSessionInfo::FindPartitionEnd(
- PacketIterator it) const {
- assert((*it).codec == kVideoCodecVP8);
- PacketIterator prev_it = it;
- const int partition_id =
- (*it).codecSpecificHeader.codecHeader.VP8.partitionId;
- while (it != packets_.end()) {
- bool beginning =
- (*it).codecSpecificHeader.codecHeader.VP8.beginningOfPartition;
- int current_partition_id =
- (*it).codecSpecificHeader.codecHeader.VP8.partitionId;
- bool packet_loss_found = (!beginning && !InSequence(it, prev_it));
- if (packet_loss_found ||
- (beginning && current_partition_id != partition_id)) {
- // Missing packet, the previous packet was the last in sequence.
- return prev_it;
- }
- prev_it = it;
- ++it;
- }
- return prev_it;
-}
-
-bool VCMSessionInfo::InSequence(const PacketIterator& packet_it,
- const PacketIterator& prev_packet_it) {
- // If the two iterators are pointing to the same packet they are considered
- // to be in sequence.
- return (packet_it == prev_packet_it ||
- (static_cast<uint16_t>((*prev_packet_it).seqNum + 1) ==
- (*packet_it).seqNum));
-}
-
-size_t VCMSessionInfo::MakeDecodable() {
- size_t return_length = 0;
- if (packets_.empty()) {
- return 0;
- }
- PacketIterator it = packets_.begin();
- // Make sure we remove the first NAL unit if it's not decodable.
- if ((*it).completeNALU == kNaluIncomplete ||
- (*it).completeNALU == kNaluEnd) {
- PacketIterator nalu_end = FindNaluEnd(it);
- return_length += DeletePacketData(it, nalu_end);
- it = nalu_end;
- }
- PacketIterator prev_it = it;
- // Take care of the rest of the NAL units.
- for (; it != packets_.end(); ++it) {
- bool start_of_nalu = ((*it).completeNALU == kNaluStart ||
- (*it).completeNALU == kNaluComplete);
- if (!start_of_nalu && !InSequence(it, prev_it)) {
- // Found a sequence number gap due to packet loss.
- PacketIterator nalu_end = FindNaluEnd(it);
- return_length += DeletePacketData(it, nalu_end);
- it = nalu_end;
- }
- prev_it = it;
- }
- return return_length;
-}
-
-void VCMSessionInfo::SetNotDecodableIfIncomplete() {
- // We don't need to check for completeness first because the two are
- // orthogonal. If complete_ is true, decodable_ is irrelevant.
- decodable_ = false;
-}
-
-bool
-VCMSessionInfo::HaveFirstPacket() const {
- return !packets_.empty() && (first_packet_seq_num_ != -1);
-}
-
-bool
-VCMSessionInfo::HaveLastPacket() const {
- return !packets_.empty() && (last_packet_seq_num_ != -1);
-}
-
-bool
-VCMSessionInfo::session_nack() const {
- return session_nack_;
-}
-
-int VCMSessionInfo::InsertPacket(const VCMPacket& packet,
- uint8_t* frame_buffer,
- VCMDecodeErrorMode decode_error_mode,
- const FrameData& frame_data) {
- if (packet.frameType == kEmptyFrame) {
- // Update sequence number of an empty packet.
- // Only media packets are inserted into the packet list.
- InformOfEmptyPacket(packet.seqNum);
- return 0;
- }
-
- if (packets_.size() == kMaxPacketsInSession) {
- LOG(LS_ERROR) << "Max number of packets per frame has been reached.";
- return -1;
- }
-
- // Find the position of this packet in the packet list in sequence number
- // order and insert it. Loop over the list in reverse order.
- ReversePacketIterator rit = packets_.rbegin();
- for (; rit != packets_.rend(); ++rit)
- if (LatestSequenceNumber(packet.seqNum, (*rit).seqNum) == packet.seqNum)
- break;
-
- // Check for duplicate packets.
- if (rit != packets_.rend() &&
- (*rit).seqNum == packet.seqNum && (*rit).sizeBytes > 0)
- return -2;
-
- if (packet.codec == kVideoCodecH264) {
- frame_type_ = packet.frameType;
- if (packet.isFirstPacket &&
- (first_packet_seq_num_ == -1 ||
- IsNewerSequenceNumber(first_packet_seq_num_, packet.seqNum))) {
- first_packet_seq_num_ = packet.seqNum;
- }
- if (packet.markerBit &&
- (last_packet_seq_num_ == -1 ||
- IsNewerSequenceNumber(packet.seqNum, last_packet_seq_num_))) {
- last_packet_seq_num_ = packet.seqNum;
- }
- } else {
- // Only insert media packets between first and last packets (when
- // available).
- // Placing check here, as to properly account for duplicate packets.
- // Check if this is first packet (only valid for some codecs)
- // Should only be set for one packet per session.
- if (packet.isFirstPacket && first_packet_seq_num_ == -1) {
- // The first packet in a frame signals the frame type.
- frame_type_ = packet.frameType;
- // Store the sequence number for the first packet.
- first_packet_seq_num_ = static_cast<int>(packet.seqNum);
- } else if (first_packet_seq_num_ != -1 &&
- IsNewerSequenceNumber(first_packet_seq_num_, packet.seqNum)) {
- LOG(LS_WARNING) << "Received packet with a sequence number which is out "
- "of frame boundaries";
- return -3;
- } else if (frame_type_ == kEmptyFrame && packet.frameType != kEmptyFrame) {
- // Update the frame type with the type of the first media packet.
- // TODO(mikhal): Can this trigger?
- frame_type_ = packet.frameType;
- }
-
- // Track the marker bit, should only be set for one packet per session.
- if (packet.markerBit && last_packet_seq_num_ == -1) {
- last_packet_seq_num_ = static_cast<int>(packet.seqNum);
- } else if (last_packet_seq_num_ != -1 &&
- IsNewerSequenceNumber(packet.seqNum, last_packet_seq_num_)) {
- LOG(LS_WARNING) << "Received packet with a sequence number which is out "
- "of frame boundaries";
- return -3;
- }
- }
-
- // The insert operation invalidates the iterator |rit|.
- PacketIterator packet_list_it = packets_.insert(rit.base(), packet);
-
- size_t returnLength = InsertBuffer(frame_buffer, packet_list_it);
- UpdateCompleteSession();
- if (decode_error_mode == kWithErrors)
- decodable_ = true;
- else if (decode_error_mode == kSelectiveErrors)
- UpdateDecodableSession(frame_data);
- return static_cast<int>(returnLength);
-}
-
-void VCMSessionInfo::InformOfEmptyPacket(uint16_t seq_num) {
- // Empty packets may be FEC or filler packets. They are sequential and
- // follow the data packets, therefore, we should only keep track of the high
- // and low sequence numbers and may assume that the packets in between are
- // empty packets belonging to the same frame (timestamp).
- if (empty_seq_num_high_ == -1)
- empty_seq_num_high_ = seq_num;
- else
- empty_seq_num_high_ = LatestSequenceNumber(seq_num, empty_seq_num_high_);
- if (empty_seq_num_low_ == -1 || IsNewerSequenceNumber(empty_seq_num_low_,
- seq_num))
- empty_seq_num_low_ = seq_num;
-}
-
-} // namespace webrtc
diff --git a/webrtc/modules/video_coding/main/source/session_info.h b/webrtc/modules/video_coding/main/source/session_info.h
deleted file mode 100644
index 88071e19d5..0000000000
--- a/webrtc/modules/video_coding/main/source/session_info.h
+++ /dev/null
@@ -1,172 +0,0 @@
-/*
- * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_MODULES_VIDEO_CODING_MAIN_SOURCE_SESSION_INFO_H_
-#define WEBRTC_MODULES_VIDEO_CODING_MAIN_SOURCE_SESSION_INFO_H_
-
-#include <list>
-
-#include "webrtc/modules/interface/module_common_types.h"
-#include "webrtc/modules/video_coding/main/interface/video_coding.h"
-#include "webrtc/modules/video_coding/main/source/packet.h"
-#include "webrtc/typedefs.h"
-
-namespace webrtc {
-// Used to pass data from jitter buffer to session info.
-// This data is then used in determining whether a frame is decodable.
-struct FrameData {
- int64_t rtt_ms;
- float rolling_average_packets_per_frame;
-};
-
-class VCMSessionInfo {
- public:
- VCMSessionInfo();
-
- void UpdateDataPointers(const uint8_t* old_base_ptr,
- const uint8_t* new_base_ptr);
- // NACK - Building the NACK lists.
- // Build hard NACK list: Zero out all entries in list up to and including
- // _lowSeqNum.
- int BuildHardNackList(int* seq_num_list,
- int seq_num_list_length,
- int nack_seq_nums_index);
-
- // Build soft NACK list: Zero out only a subset of the packets, discard
- // empty packets.
- int BuildSoftNackList(int* seq_num_list,
- int seq_num_list_length,
- int nack_seq_nums_index,
- int rtt_ms);
- void Reset();
- int InsertPacket(const VCMPacket& packet,
- uint8_t* frame_buffer,
- VCMDecodeErrorMode enable_decodable_state,
- const FrameData& frame_data);
- bool complete() const;
- bool decodable() const;
-
- // Builds fragmentation headers for VP8, each fragment being a decodable
- // VP8 partition. Returns the total number of bytes which are decodable. Is
- // used instead of MakeDecodable for VP8.
- size_t BuildVP8FragmentationHeader(uint8_t* frame_buffer,
- size_t frame_buffer_length,
- RTPFragmentationHeader* fragmentation);
-
- // Makes the frame decodable. I.e., only contain decodable NALUs. All
- // non-decodable NALUs will be deleted and packets will be moved to in
- // memory to remove any empty space.
- // Returns the number of bytes deleted from the session.
- size_t MakeDecodable();
-
- // Sets decodable_ to false.
- // Used by the dual decoder. After the mode is changed to kNoErrors from
- // kWithErrors or kSelective errors, any states that have been marked
- // decodable and are not complete are marked as non-decodable.
- void SetNotDecodableIfIncomplete();
-
- size_t SessionLength() const;
- int NumPackets() const;
- bool HaveFirstPacket() const;
- bool HaveLastPacket() const;
- bool session_nack() const;
- webrtc::FrameType FrameType() const { return frame_type_; }
- int LowSequenceNumber() const;
-
- // Returns highest sequence number, media or empty.
- int HighSequenceNumber() const;
- int PictureId() const;
- int TemporalId() const;
- bool LayerSync() const;
- int Tl0PicId() const;
- bool NonReference() const;
-
- void SetGofInfo(const GofInfoVP9& gof_info, size_t idx);
-
- // The number of packets discarded because the decoder can't make use of
- // them.
- int packets_not_decodable() const;
-
- private:
- enum { kMaxVP8Partitions = 9 };
-
- typedef std::list<VCMPacket> PacketList;
- typedef PacketList::iterator PacketIterator;
- typedef PacketList::const_iterator PacketIteratorConst;
- typedef PacketList::reverse_iterator ReversePacketIterator;
-
- void InformOfEmptyPacket(uint16_t seq_num);
-
- // Finds the packet of the beginning of the next VP8 partition. If
- // none is found the returned iterator points to |packets_.end()|.
- // |it| is expected to point to the last packet of the previous partition,
- // or to the first packet of the frame. |packets_skipped| is incremented
- // for each packet found which doesn't have the beginning bit set.
- PacketIterator FindNextPartitionBeginning(PacketIterator it) const;
-
- // Returns an iterator pointing to the last packet of the partition pointed to
- // by |it|.
- PacketIterator FindPartitionEnd(PacketIterator it) const;
- static bool InSequence(const PacketIterator& it,
- const PacketIterator& prev_it);
- size_t InsertBuffer(uint8_t* frame_buffer,
- PacketIterator packetIterator);
- size_t Insert(const uint8_t* buffer,
- size_t length,
- bool insert_start_code,
- uint8_t* frame_buffer);
- void ShiftSubsequentPackets(PacketIterator it, int steps_to_shift);
- PacketIterator FindNaluEnd(PacketIterator packet_iter) const;
- // Deletes the data of all packets between |start| and |end|, inclusively.
- // Note that this function doesn't delete the actual packets.
- size_t DeletePacketData(PacketIterator start,
- PacketIterator end);
- void UpdateCompleteSession();
-
- // When enabled, determine if session is decodable, i.e. incomplete but
- // would be sent to the decoder.
- // Note: definition assumes random loss.
- // A frame is defined to be decodable when:
- // Round trip time is higher than threshold
- // It is not a key frame
- // It has the first packet: In VP8 the first packet contains all or part of
- // the first partition, which consists of the most relevant information for
- // decoding.
- // Either more than the upper threshold of the average number of packets per
- // frame is present
- // or less than the lower threshold of the average number of packets per
- // frame is present: suggests a small frame. Such a frame is unlikely
- // to contain many motion vectors, so having the first packet will
- // likely suffice. Once we have more than the lower threshold of the
- // frame, we know that the frame is medium or large-sized.
- void UpdateDecodableSession(const FrameData& frame_data);
-
- // If this session has been NACKed by the jitter buffer.
- bool session_nack_;
- bool complete_;
- bool decodable_;
- webrtc::FrameType frame_type_;
- // Packets in this frame.
- PacketList packets_;
- int empty_seq_num_low_;
- int empty_seq_num_high_;
-
- // The following two variables correspond to the first and last media packets
- // in a session defined by the first packet flag and the marker bit.
- // They are not necessarily equal to the front and back packets, as packets
- // may enter out of order.
- // TODO(mikhal): Refactor the list to use a map.
- int first_packet_seq_num_;
- int last_packet_seq_num_;
-};
-
-} // namespace webrtc
-
-#endif // WEBRTC_MODULES_VIDEO_CODING_MAIN_SOURCE_SESSION_INFO_H_
diff --git a/webrtc/modules/video_coding/main/source/session_info_unittest.cc b/webrtc/modules/video_coding/main/source/session_info_unittest.cc
deleted file mode 100644
index 58c352d3fc..0000000000
--- a/webrtc/modules/video_coding/main/source/session_info_unittest.cc
+++ /dev/null
@@ -1,1064 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include <string.h>
-
-#include "testing/gtest/include/gtest/gtest.h"
-#include "webrtc/modules/interface/module_common_types.h"
-#include "webrtc/modules/video_coding/main/source/packet.h"
-#include "webrtc/modules/video_coding/main/source/session_info.h"
-
-namespace webrtc {
-
-class TestSessionInfo : public ::testing::Test {
- protected:
- virtual void SetUp() {
- memset(packet_buffer_, 0, sizeof(packet_buffer_));
- memset(frame_buffer_, 0, sizeof(frame_buffer_));
- session_.Reset();
- packet_.Reset();
- packet_.frameType = kVideoFrameDelta;
- packet_.sizeBytes = packet_buffer_size();
- packet_.dataPtr = packet_buffer_;
- packet_.seqNum = 0;
- packet_.timestamp = 0;
- frame_data.rtt_ms = 0;
- frame_data.rolling_average_packets_per_frame = -1;
- }
-
- void FillPacket(uint8_t start_value) {
- for (size_t i = 0; i < packet_buffer_size(); ++i)
- packet_buffer_[i] = start_value + i;
- }
-
- void VerifyPacket(uint8_t* start_ptr, uint8_t start_value) {
- for (size_t j = 0; j < packet_buffer_size(); ++j) {
- ASSERT_EQ(start_value + j, start_ptr[j]);
- }
- }
-
- size_t packet_buffer_size() const {
- return sizeof(packet_buffer_) / sizeof(packet_buffer_[0]);
- }
- size_t frame_buffer_size() const {
- return sizeof(frame_buffer_) / sizeof(frame_buffer_[0]);
- }
-
- enum { kPacketBufferSize = 10 };
-
- uint8_t packet_buffer_[kPacketBufferSize];
- uint8_t frame_buffer_[10 * kPacketBufferSize];
-
- VCMSessionInfo session_;
- VCMPacket packet_;
- FrameData frame_data;
-};
-
-class TestVP8Partitions : public TestSessionInfo {
- protected:
- enum { kMaxVP8Partitions = 9 };
-
- virtual void SetUp() {
- TestSessionInfo::SetUp();
- vp8_header_ = &packet_header_.type.Video.codecHeader.VP8;
- packet_header_.frameType = kVideoFrameDelta;
- packet_header_.type.Video.codec = kRtpVideoVp8;
- vp8_header_->InitRTPVideoHeaderVP8();
- fragmentation_.VerifyAndAllocateFragmentationHeader(kMaxVP8Partitions);
- }
-
- bool VerifyPartition(int partition_id,
- int packets_expected,
- int start_value) {
- EXPECT_EQ(packets_expected * packet_buffer_size(),
- fragmentation_.fragmentationLength[partition_id]);
- for (int i = 0; i < packets_expected; ++i) {
- size_t packet_index = fragmentation_.fragmentationOffset[partition_id] +
- i * packet_buffer_size();
- if (packet_index + packet_buffer_size() > frame_buffer_size())
- return false;
- VerifyPacket(frame_buffer_ + packet_index, start_value + i);
- }
- return true;
- }
-
- WebRtcRTPHeader packet_header_;
- RTPVideoHeaderVP8* vp8_header_;
- RTPFragmentationHeader fragmentation_;
-};
-
-class TestNalUnits : public TestSessionInfo {
- protected:
- virtual void SetUp() {
- TestSessionInfo::SetUp();
- packet_.codec = kVideoCodecVP8;
- }
-
- bool VerifyNalu(int offset, int packets_expected, int start_value) {
- EXPECT_GE(session_.SessionLength(),
- packets_expected * packet_buffer_size());
- for (int i = 0; i < packets_expected; ++i) {
- int packet_index = (offset + i) * packet_buffer_size();
- VerifyPacket(frame_buffer_ + packet_index, start_value + i);
- }
- return true;
- }
-};
-
-class TestNackList : public TestSessionInfo {
- protected:
- static const size_t kMaxSeqNumListLength = 30;
-
- virtual void SetUp() {
- TestSessionInfo::SetUp();
- seq_num_list_length_ = 0;
- memset(seq_num_list_, 0, sizeof(seq_num_list_));
- }
-
- void BuildSeqNumList(uint16_t low,
- uint16_t high) {
- size_t i = 0;
- while (low != high + 1) {
- EXPECT_LT(i, kMaxSeqNumListLength);
- if (i >= kMaxSeqNumListLength) {
- seq_num_list_length_ = kMaxSeqNumListLength;
- return;
- }
- seq_num_list_[i] = low;
- low++;
- i++;
- }
- seq_num_list_length_ = i;
- }
-
- void VerifyAll(int value) {
- for (int i = 0; i < seq_num_list_length_; ++i)
- EXPECT_EQ(seq_num_list_[i], value);
- }
-
- int seq_num_list_[kMaxSeqNumListLength];
- int seq_num_list_length_;
-};
-
-TEST_F(TestSessionInfo, TestSimpleAPIs) {
- packet_.isFirstPacket = true;
- packet_.seqNum = 0xFFFE;
- packet_.sizeBytes = packet_buffer_size();
- packet_.frameType = kVideoFrameKey;
- FillPacket(0);
- EXPECT_EQ(packet_buffer_size(),
- static_cast<size_t>(session_.InsertPacket(packet_, frame_buffer_,
- kNoErrors, frame_data)));
- EXPECT_FALSE(session_.HaveLastPacket());
- EXPECT_EQ(kVideoFrameKey, session_.FrameType());
-
- packet_.isFirstPacket = false;
- packet_.markerBit = true;
- packet_.seqNum += 1;
- EXPECT_EQ(packet_buffer_size(),
- static_cast<size_t>(session_.InsertPacket(packet_, frame_buffer_,
- kNoErrors, frame_data)));
- EXPECT_TRUE(session_.HaveLastPacket());
- EXPECT_EQ(packet_.seqNum, session_.HighSequenceNumber());
- EXPECT_EQ(0xFFFE, session_.LowSequenceNumber());
-
- // Insert empty packet which will be the new high sequence number.
- // To make things more difficult we will make sure to have a wrap here.
- packet_.isFirstPacket = false;
- packet_.markerBit = true;
- packet_.seqNum = 2;
- packet_.sizeBytes = 0;
- packet_.frameType = kEmptyFrame;
- EXPECT_EQ(0,
- session_.InsertPacket(packet_,
- frame_buffer_,
- kNoErrors,
- frame_data));
- EXPECT_EQ(packet_.seqNum, session_.HighSequenceNumber());
-}
-
-TEST_F(TestSessionInfo, NormalOperation) {
- packet_.seqNum = 0xFFFF;
- packet_.isFirstPacket = true;
- packet_.markerBit = false;
- FillPacket(0);
- EXPECT_EQ(packet_buffer_size(),
- static_cast<size_t>(session_.InsertPacket(packet_, frame_buffer_,
- kNoErrors, frame_data)));
-
- packet_.isFirstPacket = false;
- for (int i = 1; i < 9; ++i) {
- packet_.seqNum += 1;
- FillPacket(i);
- ASSERT_EQ(packet_buffer_size(),
- static_cast<size_t>(session_.InsertPacket(packet_, frame_buffer_,
- kNoErrors,
- frame_data)));
- }
-
- packet_.seqNum += 1;
- packet_.markerBit = true;
- FillPacket(9);
- EXPECT_EQ(packet_buffer_size(),
- static_cast<size_t>(session_.InsertPacket(packet_, frame_buffer_,
- kNoErrors, frame_data)));
-
- EXPECT_EQ(10 * packet_buffer_size(), session_.SessionLength());
- for (int i = 0; i < 10; ++i) {
- SCOPED_TRACE("Calling VerifyPacket");
- VerifyPacket(frame_buffer_ + i * packet_buffer_size(), i);
- }
-}
-
-TEST_F(TestSessionInfo, ErrorsEqualDecodableState) {
- packet_.seqNum = 0xFFFF;
- packet_.isFirstPacket = false;
- packet_.markerBit = false;
- FillPacket(3);
- EXPECT_EQ(packet_buffer_size(),
- static_cast<size_t>(session_.InsertPacket(packet_, frame_buffer_,
- kWithErrors,
- frame_data)));
- EXPECT_TRUE(session_.decodable());
-}
-
-TEST_F(TestSessionInfo, SelectiveDecodableState) {
- packet_.seqNum = 0xFFFF;
- packet_.isFirstPacket = false;
- packet_.markerBit = false;
- FillPacket(1);
- frame_data.rolling_average_packets_per_frame = 11;
- frame_data.rtt_ms = 150;
- EXPECT_EQ(packet_buffer_size(),
- static_cast<size_t>(session_.InsertPacket(packet_, frame_buffer_,
- kSelectiveErrors,
- frame_data)));
- EXPECT_FALSE(session_.decodable());
-
- packet_.seqNum -= 1;
- FillPacket(0);
- packet_.isFirstPacket = true;
- EXPECT_EQ(packet_buffer_size(),
- static_cast<size_t>(session_.InsertPacket(packet_, frame_buffer_,
- kSelectiveErrors,
- frame_data)));
- EXPECT_TRUE(session_.decodable());
-
- packet_.isFirstPacket = false;
- packet_.seqNum += 1;
- for (int i = 2; i < 8; ++i) {
- packet_.seqNum += 1;
- FillPacket(i);
- EXPECT_EQ(packet_buffer_size(),
- static_cast<size_t>(session_.InsertPacket(packet_, frame_buffer_,
- kSelectiveErrors,
- frame_data)));
- EXPECT_TRUE(session_.decodable());
- }
-
- packet_.seqNum += 1;
- FillPacket(8);
- EXPECT_EQ(packet_buffer_size(),
- static_cast<size_t>(session_.InsertPacket(packet_, frame_buffer_,
- kSelectiveErrors,
- frame_data)));
- EXPECT_TRUE(session_.decodable());
-}
-
-TEST_F(TestSessionInfo, OutOfBoundsPackets1PacketFrame) {
- packet_.seqNum = 0x0001;
- packet_.isFirstPacket = true;
- packet_.markerBit = true;
- FillPacket(1);
- EXPECT_EQ(packet_buffer_size(),
- static_cast<size_t>(session_.InsertPacket(packet_, frame_buffer_,
- kNoErrors, frame_data)));
-
- packet_.seqNum = 0x0004;
- packet_.isFirstPacket = true;
- packet_.markerBit = true;
- FillPacket(1);
- EXPECT_EQ(-3, session_.InsertPacket(packet_,
- frame_buffer_,
- kNoErrors,
- frame_data));
- packet_.seqNum = 0x0000;
- packet_.isFirstPacket = false;
- packet_.markerBit = false;
- FillPacket(1);
- EXPECT_EQ(-3, session_.InsertPacket(packet_,
- frame_buffer_,
- kNoErrors,
- frame_data));
-}
-
-TEST_F(TestSessionInfo, SetMarkerBitOnce) {
- packet_.seqNum = 0x0005;
- packet_.isFirstPacket = false;
- packet_.markerBit = true;
- FillPacket(1);
- EXPECT_EQ(packet_buffer_size(),
- static_cast<size_t>(session_.InsertPacket(packet_, frame_buffer_,
- kNoErrors, frame_data)));
- ++packet_.seqNum;
- packet_.isFirstPacket = true;
- packet_.markerBit = true;
- FillPacket(1);
- EXPECT_EQ(-3, session_.InsertPacket(packet_,
- frame_buffer_,
- kNoErrors,
- frame_data));
-}
-
-TEST_F(TestSessionInfo, OutOfBoundsPacketsBase) {
- // Allow packets in the range 5-6.
- packet_.seqNum = 0x0005;
- packet_.isFirstPacket = true;
- packet_.markerBit = false;
- FillPacket(1);
- EXPECT_EQ(packet_buffer_size(),
- static_cast<size_t>(session_.InsertPacket(packet_, frame_buffer_,
- kNoErrors, frame_data)));
- // Insert an older packet with a first packet set.
- packet_.seqNum = 0x0004;
- packet_.isFirstPacket = true;
- packet_.markerBit = true;
- FillPacket(1);
- EXPECT_EQ(-3, session_.InsertPacket(packet_,
- frame_buffer_,
- kNoErrors,
- frame_data));
- packet_.seqNum = 0x0006;
- packet_.isFirstPacket = true;
- packet_.markerBit = true;
- FillPacket(1);
- EXPECT_EQ(packet_buffer_size(),
- static_cast<size_t>(session_.InsertPacket(packet_, frame_buffer_,
- kNoErrors, frame_data)));
- packet_.seqNum = 0x0008;
- packet_.isFirstPacket = false;
- packet_.markerBit = true;
- FillPacket(1);
- EXPECT_EQ(-3, session_.InsertPacket(packet_,
- frame_buffer_,
- kNoErrors,
- frame_data));
-}
-
-TEST_F(TestSessionInfo, OutOfBoundsPacketsWrap) {
- packet_.seqNum = 0xFFFE;
- packet_.isFirstPacket = true;
- packet_.markerBit = false;
- FillPacket(1);
- EXPECT_EQ(packet_buffer_size(),
- static_cast<size_t>(session_.InsertPacket(packet_, frame_buffer_,
- kNoErrors, frame_data)));
-
- packet_.seqNum = 0x0004;
- packet_.isFirstPacket = false;
- packet_.markerBit = true;
- FillPacket(1);
- EXPECT_EQ(packet_buffer_size(),
- static_cast<size_t>(session_.InsertPacket(packet_, frame_buffer_,
- kNoErrors, frame_data)));
- packet_.seqNum = 0x0002;
- packet_.isFirstPacket = false;
- packet_.markerBit = false;
- FillPacket(1);
- ASSERT_EQ(packet_buffer_size(),
- static_cast<size_t>(session_.InsertPacket(packet_, frame_buffer_,
- kNoErrors, frame_data)));
- packet_.seqNum = 0xFFF0;
- packet_.isFirstPacket = false;
- packet_.markerBit = false;
- FillPacket(1);
- EXPECT_EQ(-3,
- session_.InsertPacket(packet_,
- frame_buffer_,
- kNoErrors,
- frame_data));
- packet_.seqNum = 0x0006;
- packet_.isFirstPacket = false;
- packet_.markerBit = false;
- FillPacket(1);
- EXPECT_EQ(-3,
- session_.InsertPacket(packet_,
- frame_buffer_,
- kNoErrors,
- frame_data));
-}
-
-TEST_F(TestSessionInfo, OutOfBoundsOutOfOrder) {
- // Insert out of bound regular packets, and then the first and last packet.
- // Verify that correct bounds are maintained.
- packet_.seqNum = 0x0003;
- packet_.isFirstPacket = false;
- packet_.markerBit = false;
- FillPacket(1);
- EXPECT_EQ(packet_buffer_size(),
- static_cast<size_t>(session_.InsertPacket(packet_, frame_buffer_,
- kNoErrors, frame_data)));
- // Insert an older packet with a first packet set.
- packet_.seqNum = 0x0005;
- packet_.isFirstPacket = true;
- packet_.markerBit = false;
- FillPacket(1);
- EXPECT_EQ(packet_buffer_size(),
- static_cast<size_t>(session_.InsertPacket(packet_, frame_buffer_,
- kNoErrors, frame_data)));
- packet_.seqNum = 0x0004;
- packet_.isFirstPacket = false;
- packet_.markerBit = false;
- FillPacket(1);
- EXPECT_EQ(-3, session_.InsertPacket(packet_,
- frame_buffer_,
- kNoErrors,
- frame_data));
- packet_.seqNum = 0x0010;
- packet_.isFirstPacket = false;
- packet_.markerBit = false;
- FillPacket(1);
- EXPECT_EQ(packet_buffer_size(),
- static_cast<size_t>(session_.InsertPacket(packet_, frame_buffer_,
- kNoErrors, frame_data)));
- packet_.seqNum = 0x0008;
- packet_.isFirstPacket = false;
- packet_.markerBit = true;
- FillPacket(1);
- EXPECT_EQ(packet_buffer_size(),
- static_cast<size_t>(session_.InsertPacket(packet_, frame_buffer_,
- kNoErrors, frame_data)));
-
- packet_.seqNum = 0x0009;
- packet_.isFirstPacket = false;
- packet_.markerBit = false;
- FillPacket(1);
- EXPECT_EQ(-3, session_.InsertPacket(packet_,
- frame_buffer_,
- kNoErrors,
- frame_data));
-}
-
-TEST_F(TestVP8Partitions, TwoPartitionsOneLoss) {
- // Partition 0 | Partition 1
- // [ 0 ] [ 2 ] | [ 3 ]
- packet_header_.type.Video.isFirstPacket = true;
- vp8_header_->beginningOfPartition = true;
- vp8_header_->partitionId = 0;
- packet_header_.header.markerBit = false;
- packet_header_.header.sequenceNumber = 0;
- FillPacket(0);
- VCMPacket* packet = new VCMPacket(packet_buffer_, packet_buffer_size(),
- packet_header_);
- EXPECT_EQ(packet_buffer_size(),
- static_cast<size_t>(session_.InsertPacket(*packet, frame_buffer_,
- kNoErrors, frame_data)));
- delete packet;
-
- packet_header_.type.Video.isFirstPacket = false;
- vp8_header_->partitionId = 0;
- vp8_header_->beginningOfPartition = false;
- packet_header_.header.markerBit = false;
- packet_header_.header.sequenceNumber += 2;
- FillPacket(2);
- packet = new VCMPacket(packet_buffer_, packet_buffer_size(), packet_header_);
- EXPECT_EQ(packet_buffer_size(),
- static_cast<size_t>(session_.InsertPacket(*packet, frame_buffer_,
- kNoErrors, frame_data)));
- delete packet;
-
- packet_header_.type.Video.isFirstPacket = false;
- vp8_header_->partitionId = 1;
- vp8_header_->beginningOfPartition = true;
- packet_header_.header.markerBit = true;
- packet_header_.header.sequenceNumber += 1;
- FillPacket(3);
- packet = new VCMPacket(packet_buffer_, packet_buffer_size(), packet_header_);
- EXPECT_EQ(packet_buffer_size(),
- static_cast<size_t>(session_.InsertPacket(*packet, frame_buffer_,
- kNoErrors, frame_data)));
- delete packet;
-
- // One packet should be removed (end of partition 0).
- EXPECT_EQ(2 * packet_buffer_size(),
- session_.BuildVP8FragmentationHeader(
- frame_buffer_, frame_buffer_size(), &fragmentation_));
- SCOPED_TRACE("Calling VerifyPartition");
- EXPECT_TRUE(VerifyPartition(0, 1, 0));
- SCOPED_TRACE("Calling VerifyPartition");
- EXPECT_TRUE(VerifyPartition(1, 1, 3));
-}
-
-TEST_F(TestVP8Partitions, TwoPartitionsOneLoss2) {
- // Partition 0 | Partition 1
- // [ 1 ] [ 2 ] | [ 3 ] [ 5 ]
- packet_header_.type.Video.isFirstPacket = true;
- vp8_header_->beginningOfPartition = true;
- vp8_header_->partitionId = 0;
- packet_header_.header.markerBit = false;
- packet_header_.header.sequenceNumber = 1;
- FillPacket(1);
- VCMPacket* packet = new VCMPacket(packet_buffer_, packet_buffer_size(),
- packet_header_);
- EXPECT_EQ(packet_buffer_size(),
- static_cast<size_t>(session_.InsertPacket(*packet, frame_buffer_,
- kNoErrors, frame_data)));
- delete packet;
-
- packet_header_.type.Video.isFirstPacket = false;
- vp8_header_->partitionId = 0;
- vp8_header_->beginningOfPartition = false;
- packet_header_.header.markerBit = false;
- packet_header_.header.sequenceNumber += 1;
- FillPacket(2);
- packet = new VCMPacket(packet_buffer_, packet_buffer_size(), packet_header_);
- EXPECT_EQ(packet_buffer_size(),
- static_cast<size_t>(session_.InsertPacket(*packet, frame_buffer_,
- kNoErrors, frame_data)));
- delete packet;
-
- packet_header_.type.Video.isFirstPacket = false;
- vp8_header_->partitionId = 1;
- vp8_header_->beginningOfPartition = true;
- packet_header_.header.markerBit = false;
- packet_header_.header.sequenceNumber += 1;
- FillPacket(3);
- packet = new VCMPacket(packet_buffer_, packet_buffer_size(), packet_header_);
- EXPECT_EQ(packet_buffer_size(),
- static_cast<size_t>(session_.InsertPacket(*packet, frame_buffer_,
- kNoErrors, frame_data)));
- delete packet;
-
- packet_header_.type.Video.isFirstPacket = false;
- vp8_header_->partitionId = 1;
- vp8_header_->beginningOfPartition = false;
- packet_header_.header.markerBit = true;
- packet_header_.header.sequenceNumber += 2;
- FillPacket(5);
- packet = new VCMPacket(packet_buffer_, packet_buffer_size(), packet_header_);
- EXPECT_EQ(packet_buffer_size(),
- static_cast<size_t>(session_.InsertPacket(*packet, frame_buffer_,
- kNoErrors, frame_data)));
- delete packet;
-
- // One packet should be removed (end of partition 2), 3 left.
- EXPECT_EQ(3 * packet_buffer_size(),
- session_.BuildVP8FragmentationHeader(
- frame_buffer_, frame_buffer_size(), &fragmentation_));
- SCOPED_TRACE("Calling VerifyPartition");
- EXPECT_TRUE(VerifyPartition(0, 2, 1));
- SCOPED_TRACE("Calling VerifyPartition");
- EXPECT_TRUE(VerifyPartition(1, 1, 3));
-}
-
-TEST_F(TestVP8Partitions, TwoPartitionsNoLossWrap) {
- // Partition 0 | Partition 1
- // [ fffd ] [ fffe ] | [ ffff ] [ 0 ]
- packet_header_.type.Video.isFirstPacket = true;
- vp8_header_->beginningOfPartition = true;
- vp8_header_->partitionId = 0;
- packet_header_.header.markerBit = false;
- packet_header_.header.sequenceNumber = 0xfffd;
- FillPacket(0);
- VCMPacket* packet = new VCMPacket(packet_buffer_, packet_buffer_size(),
- packet_header_);
- EXPECT_EQ(packet_buffer_size(),
- static_cast<size_t>(session_.InsertPacket(*packet, frame_buffer_,
- kNoErrors, frame_data)));
- delete packet;
-
- packet_header_.type.Video.isFirstPacket = false;
- vp8_header_->partitionId = 0;
- vp8_header_->beginningOfPartition = false;
- packet_header_.header.markerBit = false;
- packet_header_.header.sequenceNumber += 1;
- FillPacket(1);
- packet = new VCMPacket(packet_buffer_, packet_buffer_size(), packet_header_);
- EXPECT_EQ(packet_buffer_size(),
- static_cast<size_t>(session_.InsertPacket(*packet, frame_buffer_,
- kNoErrors, frame_data)));
- delete packet;
-
- packet_header_.type.Video.isFirstPacket = false;
- vp8_header_->partitionId = 1;
- vp8_header_->beginningOfPartition = true;
- packet_header_.header.markerBit = false;
- packet_header_.header.sequenceNumber += 1;
- FillPacket(2);
- packet = new VCMPacket(packet_buffer_, packet_buffer_size(), packet_header_);
- EXPECT_EQ(packet_buffer_size(),
- static_cast<size_t>(session_.InsertPacket(*packet, frame_buffer_,
- kNoErrors, frame_data)));
- delete packet;
-
- packet_header_.type.Video.isFirstPacket = false;
- vp8_header_->partitionId = 1;
- vp8_header_->beginningOfPartition = false;
- packet_header_.header.markerBit = true;
- packet_header_.header.sequenceNumber += 1;
- FillPacket(3);
- packet = new VCMPacket(packet_buffer_, packet_buffer_size(), packet_header_);
- EXPECT_EQ(packet_buffer_size(),
- static_cast<size_t>(session_.InsertPacket(*packet, frame_buffer_,
- kNoErrors, frame_data)));
- delete packet;
-
- // No packet should be removed.
- EXPECT_EQ(4 * packet_buffer_size(),
- session_.BuildVP8FragmentationHeader(
- frame_buffer_, frame_buffer_size(), &fragmentation_));
- SCOPED_TRACE("Calling VerifyPartition");
- EXPECT_TRUE(VerifyPartition(0, 2, 0));
- SCOPED_TRACE("Calling VerifyPartition");
- EXPECT_TRUE(VerifyPartition(1, 2, 2));
-}
-
-TEST_F(TestVP8Partitions, TwoPartitionsLossWrap) {
- // Partition 0 | Partition 1
- // [ fffd ] [ fffe ] | [ ffff ] [ 1 ]
- packet_header_.type.Video.isFirstPacket = true;
- vp8_header_->beginningOfPartition = true;
- vp8_header_->partitionId = 0;
- packet_header_.header.markerBit = false;
- packet_header_.header.sequenceNumber = 0xfffd;
- FillPacket(0);
- VCMPacket* packet = new VCMPacket(packet_buffer_, packet_buffer_size(),
- packet_header_);
- EXPECT_EQ(packet_buffer_size(),
- static_cast<size_t>(session_.InsertPacket(*packet, frame_buffer_,
- kNoErrors, frame_data)));
- delete packet;
-
- packet_header_.type.Video.isFirstPacket = false;
- vp8_header_->partitionId = 0;
- vp8_header_->beginningOfPartition = false;
- packet_header_.header.markerBit = false;
- packet_header_.header.sequenceNumber += 1;
- FillPacket(1);
- packet = new VCMPacket(packet_buffer_, packet_buffer_size(), packet_header_);
- EXPECT_EQ(packet_buffer_size(),
- static_cast<size_t>(session_.InsertPacket(*packet, frame_buffer_,
- kNoErrors, frame_data)));
- delete packet;
-
- packet_header_.type.Video.isFirstPacket = false;
- vp8_header_->partitionId = 1;
- vp8_header_->beginningOfPartition = true;
- packet_header_.header.markerBit = false;
- packet_header_.header.sequenceNumber += 1;
- FillPacket(2);
- packet = new VCMPacket(packet_buffer_, packet_buffer_size(), packet_header_);
- EXPECT_EQ(packet_buffer_size(),
- static_cast<size_t>(session_.InsertPacket(*packet, frame_buffer_,
- kNoErrors, frame_data)));
- delete packet;
-
- packet_header_.type.Video.isFirstPacket = false;
- vp8_header_->partitionId = 1;
- vp8_header_->beginningOfPartition = false;
- packet_header_.header.markerBit = true;
- packet_header_.header.sequenceNumber += 2;
- FillPacket(3);
- packet = new VCMPacket(packet_buffer_, packet_buffer_size(), packet_header_);
- EXPECT_EQ(packet_buffer_size(),
- static_cast<size_t>(session_.InsertPacket(*packet, frame_buffer_,
- kNoErrors, frame_data)));
- delete packet;
-
- // One packet should be removed from the last partition
- EXPECT_EQ(3 * packet_buffer_size(),
- session_.BuildVP8FragmentationHeader(
- frame_buffer_, frame_buffer_size(), &fragmentation_));
- SCOPED_TRACE("Calling VerifyPartition");
- EXPECT_TRUE(VerifyPartition(0, 2, 0));
- SCOPED_TRACE("Calling VerifyPartition");
- EXPECT_TRUE(VerifyPartition(1, 1, 2));
-}
-
-
-TEST_F(TestVP8Partitions, ThreePartitionsOneMissing) {
- // Partition 1 |Partition 2 | Partition 3
- // [ 1 ] [ 2 ] | | [ 5 ] | [ 6 ]
- packet_header_.type.Video.isFirstPacket = true;
- vp8_header_->beginningOfPartition = true;
- vp8_header_->partitionId = 0;
- packet_header_.header.markerBit = false;
- packet_header_.header.sequenceNumber = 1;
- FillPacket(1);
- VCMPacket* packet = new VCMPacket(packet_buffer_, packet_buffer_size(),
- packet_header_);
- EXPECT_EQ(packet_buffer_size(),
- static_cast<size_t>(session_.InsertPacket(*packet, frame_buffer_,
- kNoErrors, frame_data)));
- delete packet;
-
- packet_header_.type.Video.isFirstPacket = false;
- vp8_header_->partitionId = 0;
- vp8_header_->beginningOfPartition = false;
- packet_header_.header.markerBit = false;
- packet_header_.header.sequenceNumber += 1;
- FillPacket(2);
- packet = new VCMPacket(packet_buffer_, packet_buffer_size(), packet_header_);
- EXPECT_EQ(packet_buffer_size(),
- static_cast<size_t>(session_.InsertPacket(*packet, frame_buffer_,
- kNoErrors, frame_data)));
- delete packet;
-
- packet_header_.type.Video.isFirstPacket = false;
- vp8_header_->partitionId = 2;
- vp8_header_->beginningOfPartition = true;
- packet_header_.header.markerBit = false;
- packet_header_.header.sequenceNumber += 3;
- FillPacket(5);
- packet = new VCMPacket(packet_buffer_, packet_buffer_size(), packet_header_);
- EXPECT_EQ(packet_buffer_size(),
- static_cast<size_t>(session_.InsertPacket(*packet, frame_buffer_,
- kNoErrors, frame_data)));
- delete packet;
-
- packet_header_.type.Video.isFirstPacket = false;
- vp8_header_->partitionId = 2;
- vp8_header_->beginningOfPartition = false;
- packet_header_.header.markerBit = true;
- packet_header_.header.sequenceNumber += 1;
- FillPacket(6);
- packet = new VCMPacket(packet_buffer_, packet_buffer_size(), packet_header_);
- EXPECT_EQ(packet_buffer_size(),
- static_cast<size_t>(session_.InsertPacket(*packet, frame_buffer_,
- kNoErrors, frame_data)));
- delete packet;
-
- // No packet should be removed.
- EXPECT_EQ(4 * packet_buffer_size(),
- session_.BuildVP8FragmentationHeader(
- frame_buffer_, frame_buffer_size(), &fragmentation_));
- SCOPED_TRACE("Calling VerifyPartition");
- EXPECT_TRUE(VerifyPartition(0, 2, 1));
- SCOPED_TRACE("Calling VerifyPartition");
- EXPECT_TRUE(VerifyPartition(2, 2, 5));
-}
-
-TEST_F(TestVP8Partitions, ThreePartitionsLossInSecond) {
- // Partition 0 |Partition 1 | Partition 2
- // [ 1 ] [ 2 ] | [ 4 ] [ 5 ] | [ 6 ] [ 7 ]
- packet_header_.type.Video.isFirstPacket = true;
- vp8_header_->beginningOfPartition = true;
- vp8_header_->partitionId = 0;
- packet_header_.header.markerBit = false;
- packet_header_.header.sequenceNumber = 1;
- FillPacket(1);
- VCMPacket* packet = new VCMPacket(packet_buffer_, packet_buffer_size(),
- packet_header_);
- EXPECT_EQ(packet_buffer_size(),
- static_cast<size_t>(session_.InsertPacket(*packet, frame_buffer_,
- kNoErrors, frame_data)));
- delete packet;
-
- packet_header_.type.Video.isFirstPacket = false;
- vp8_header_->partitionId = 0;
- vp8_header_->beginningOfPartition = false;
- packet_header_.header.markerBit = false;
- packet_header_.header.sequenceNumber += 1;
- FillPacket(2);
- packet = new VCMPacket(packet_buffer_, packet_buffer_size(),
- packet_header_);
- EXPECT_EQ(packet_buffer_size(),
- static_cast<size_t>(session_.InsertPacket(*packet, frame_buffer_,
- kNoErrors, frame_data)));
- delete packet;
-
- packet_header_.type.Video.isFirstPacket = false;
- vp8_header_->partitionId = 1;
- vp8_header_->beginningOfPartition = false;
- packet_header_.header.markerBit = false;
- packet_header_.header.sequenceNumber += 2;
- FillPacket(4);
- packet = new VCMPacket(packet_buffer_, packet_buffer_size(), packet_header_);
- EXPECT_EQ(packet_buffer_size(),
- static_cast<size_t>(session_.InsertPacket(*packet, frame_buffer_,
- kNoErrors, frame_data)));
- delete packet;
-
- packet_header_.type.Video.isFirstPacket = false;
- vp8_header_->partitionId = 1;
- vp8_header_->beginningOfPartition = false;
- packet_header_.header.markerBit = false;
- packet_header_.header.sequenceNumber += 1;
- FillPacket(5);
- packet = new VCMPacket(packet_buffer_, packet_buffer_size(), packet_header_);
- EXPECT_EQ(packet_buffer_size(),
- static_cast<size_t>(session_.InsertPacket(*packet, frame_buffer_,
- kNoErrors, frame_data)));
- delete packet;
-
- packet_header_.type.Video.isFirstPacket = false;
- vp8_header_->partitionId = 2;
- vp8_header_->beginningOfPartition = true;
- packet_header_.header.markerBit = false;
- packet_header_.header.sequenceNumber += 1;
- FillPacket(6);
- packet = new VCMPacket(packet_buffer_, packet_buffer_size(), packet_header_);
- EXPECT_EQ(packet_buffer_size(),
- static_cast<size_t>(session_.InsertPacket(*packet, frame_buffer_,
- kNoErrors, frame_data)));
- delete packet;
-
- packet_header_.type.Video.isFirstPacket = false;
- vp8_header_->partitionId = 2;
- vp8_header_->beginningOfPartition = false;
- packet_header_.header.markerBit = true;
- packet_header_.header.sequenceNumber += 1;
- FillPacket(7);
- packet = new VCMPacket(packet_buffer_, packet_buffer_size(), packet_header_);
- EXPECT_EQ(packet_buffer_size(),
- static_cast<size_t>(session_.InsertPacket(*packet, frame_buffer_,
- kNoErrors, frame_data)));
- delete packet;
-
- // 2 partitions left. 2 packets removed from second partition
- EXPECT_EQ(4 * packet_buffer_size(),
- session_.BuildVP8FragmentationHeader(
- frame_buffer_, frame_buffer_size(), &fragmentation_));
- SCOPED_TRACE("Calling VerifyPartition");
- EXPECT_TRUE(VerifyPartition(0, 2, 1));
- SCOPED_TRACE("Calling VerifyPartition");
- EXPECT_TRUE(VerifyPartition(2, 2, 6));
-}
-
-TEST_F(TestVP8Partitions, AggregationOverTwoPackets) {
- // Partition 0 | Partition 1 | Partition 2
- // [ 0 | ] [ 1 ] | [ 2 ]
- packet_header_.type.Video.isFirstPacket = true;
- vp8_header_->beginningOfPartition = true;
- vp8_header_->partitionId = 0;
- packet_header_.header.markerBit = false;
- packet_header_.header.sequenceNumber = 0;
- FillPacket(0);
- VCMPacket* packet = new VCMPacket(packet_buffer_, packet_buffer_size(),
- packet_header_);
- EXPECT_EQ(packet_buffer_size(),
- static_cast<size_t>(session_.InsertPacket(*packet, frame_buffer_,
- kNoErrors, frame_data)));
- delete packet;
-
- packet_header_.type.Video.isFirstPacket = false;
- vp8_header_->partitionId = 1;
- vp8_header_->beginningOfPartition = false;
- packet_header_.header.markerBit = false;
- packet_header_.header.sequenceNumber += 1;
- FillPacket(1);
- packet = new VCMPacket(packet_buffer_, packet_buffer_size(), packet_header_);
- EXPECT_EQ(packet_buffer_size(),
- static_cast<size_t>(session_.InsertPacket(*packet, frame_buffer_,
- kNoErrors, frame_data)));
- delete packet;
-
- packet_header_.type.Video.isFirstPacket = false;
- vp8_header_->partitionId = 2;
- vp8_header_->beginningOfPartition = true;
- packet_header_.header.markerBit = true;
- packet_header_.header.sequenceNumber += 1;
- FillPacket(2);
- packet = new VCMPacket(packet_buffer_, packet_buffer_size(), packet_header_);
- EXPECT_EQ(packet_buffer_size(),
- static_cast<size_t>(session_.InsertPacket(*packet, frame_buffer_,
- kNoErrors, frame_data)));
- delete packet;
-
- // No packets removed.
- EXPECT_EQ(3 * packet_buffer_size(),
- session_.BuildVP8FragmentationHeader(
- frame_buffer_, frame_buffer_size(), &fragmentation_));
- SCOPED_TRACE("Calling VerifyPartition");
- EXPECT_TRUE(VerifyPartition(0, 2, 0));
- // This partition is aggregated in partition 0
- SCOPED_TRACE("Calling VerifyPartition");
- EXPECT_TRUE(VerifyPartition(1, 0, 0));
- SCOPED_TRACE("Calling VerifyPartition");
- EXPECT_TRUE(VerifyPartition(2, 1, 2));
-}
-
-TEST_F(TestNalUnits, OnlyReceivedEmptyPacket) {
- packet_.isFirstPacket = false;
- packet_.completeNALU = kNaluComplete;
- packet_.frameType = kEmptyFrame;
- packet_.sizeBytes = 0;
- packet_.seqNum = 0;
- packet_.markerBit = false;
- EXPECT_EQ(0, session_.InsertPacket(packet_,
- frame_buffer_,
- kNoErrors,
- frame_data));
-
- EXPECT_EQ(0U, session_.MakeDecodable());
- EXPECT_EQ(0U, session_.SessionLength());
-}
-
-TEST_F(TestNalUnits, OneIsolatedNaluLoss) {
- packet_.isFirstPacket = true;
- packet_.completeNALU = kNaluComplete;
- packet_.seqNum = 0;
- packet_.markerBit = false;
- FillPacket(0);
- EXPECT_EQ(packet_buffer_size(),
- static_cast<size_t>(session_.InsertPacket(packet_, frame_buffer_,
- kNoErrors, frame_data)));
-
- packet_.isFirstPacket = false;
- packet_.completeNALU = kNaluComplete;
- packet_.seqNum += 2;
- packet_.markerBit = true;
- FillPacket(2);
- EXPECT_EQ(packet_buffer_size(),
- static_cast<size_t>(session_.InsertPacket(packet_, frame_buffer_,
- kNoErrors, frame_data)));
-
- EXPECT_EQ(0U, session_.MakeDecodable());
- EXPECT_EQ(2 * packet_buffer_size(), session_.SessionLength());
- SCOPED_TRACE("Calling VerifyNalu");
- EXPECT_TRUE(VerifyNalu(0, 1, 0));
- SCOPED_TRACE("Calling VerifyNalu");
- EXPECT_TRUE(VerifyNalu(1, 1, 2));
-}
-
-TEST_F(TestNalUnits, LossInMiddleOfNalu) {
- packet_.isFirstPacket = true;
- packet_.completeNALU = kNaluComplete;
- packet_.seqNum = 0;
- packet_.markerBit = false;
- FillPacket(0);
- EXPECT_EQ(packet_buffer_size(),
- static_cast<size_t>(session_.InsertPacket(packet_, frame_buffer_,
- kNoErrors, frame_data)));
-
- packet_.isFirstPacket = false;
- packet_.completeNALU = kNaluEnd;
- packet_.seqNum += 2;
- packet_.markerBit = true;
- FillPacket(2);
- EXPECT_EQ(packet_buffer_size(),
- static_cast<size_t>(session_.InsertPacket(packet_, frame_buffer_,
- kNoErrors, frame_data)));
-
- EXPECT_EQ(packet_buffer_size(), session_.MakeDecodable());
- EXPECT_EQ(packet_buffer_size(), session_.SessionLength());
- SCOPED_TRACE("Calling VerifyNalu");
- EXPECT_TRUE(VerifyNalu(0, 1, 0));
-}
-
-TEST_F(TestNalUnits, StartAndEndOfLastNalUnitLost) {
- packet_.isFirstPacket = true;
- packet_.completeNALU = kNaluComplete;
- packet_.seqNum = 0;
- packet_.markerBit = false;
- FillPacket(0);
- EXPECT_EQ(packet_buffer_size(),
- static_cast<size_t>(session_.InsertPacket(packet_, frame_buffer_,
- kNoErrors, frame_data)));
-
- packet_.isFirstPacket = false;
- packet_.completeNALU = kNaluIncomplete;
- packet_.seqNum += 2;
- packet_.markerBit = false;
- FillPacket(1);
- EXPECT_EQ(packet_buffer_size(),
- static_cast<size_t>(session_.InsertPacket(packet_, frame_buffer_,
- kNoErrors, frame_data)));
-
- EXPECT_EQ(packet_buffer_size(), session_.MakeDecodable());
- EXPECT_EQ(packet_buffer_size(), session_.SessionLength());
- SCOPED_TRACE("Calling VerifyNalu");
- EXPECT_TRUE(VerifyNalu(0, 1, 0));
-}
-
-TEST_F(TestNalUnits, ReorderWrapNoLoss) {
- packet_.seqNum = 0xFFFF;
- packet_.isFirstPacket = false;
- packet_.completeNALU = kNaluIncomplete;
- packet_.seqNum += 1;
- packet_.markerBit = false;
- FillPacket(1);
- EXPECT_EQ(packet_buffer_size(),
- static_cast<size_t>(session_.InsertPacket(packet_, frame_buffer_,
- kNoErrors, frame_data)));
-
- packet_.isFirstPacket = true;
- packet_.completeNALU = kNaluComplete;
- packet_.seqNum -= 1;
- packet_.markerBit = false;
- FillPacket(0);
- EXPECT_EQ(packet_buffer_size(),
- static_cast<size_t>(session_.InsertPacket(packet_, frame_buffer_,
- kNoErrors, frame_data)));
-
- packet_.isFirstPacket = false;
- packet_.completeNALU = kNaluEnd;
- packet_.seqNum += 2;
- packet_.markerBit = true;
- FillPacket(2);
- EXPECT_EQ(packet_buffer_size(),
- static_cast<size_t>(session_.InsertPacket(packet_, frame_buffer_,
- kNoErrors, frame_data)));
-
- EXPECT_EQ(0U, session_.MakeDecodable());
- EXPECT_EQ(3 * packet_buffer_size(), session_.SessionLength());
- SCOPED_TRACE("Calling VerifyNalu");
- EXPECT_TRUE(VerifyNalu(0, 1, 0));
-}
-
-TEST_F(TestNalUnits, WrapLosses) {
- packet_.seqNum = 0xFFFF;
- packet_.isFirstPacket = false;
- packet_.completeNALU = kNaluIncomplete;
- packet_.markerBit = false;
- FillPacket(1);
- EXPECT_EQ(packet_buffer_size(),
- static_cast<size_t>(session_.InsertPacket(packet_, frame_buffer_,
- kNoErrors, frame_data)));
-
- packet_.isFirstPacket = false;
- packet_.completeNALU = kNaluEnd;
- packet_.seqNum += 2;
- packet_.markerBit = true;
- FillPacket(2);
- EXPECT_EQ(packet_buffer_size(),
- static_cast<size_t>(session_.InsertPacket(packet_, frame_buffer_,
- kNoErrors, frame_data)));
-
- EXPECT_EQ(2 * packet_buffer_size(), session_.MakeDecodable());
- EXPECT_EQ(0U, session_.SessionLength());
-}
-
-TEST_F(TestNalUnits, ReorderWrapLosses) {
- packet_.seqNum = 0xFFFF;
-
- packet_.isFirstPacket = false;
- packet_.completeNALU = kNaluEnd;
- packet_.seqNum += 2;
- packet_.markerBit = true;
- FillPacket(2);
- EXPECT_EQ(packet_buffer_size(),
- static_cast<size_t>(session_.InsertPacket(packet_, frame_buffer_,
- kNoErrors, frame_data)));
-
- packet_.seqNum -= 2;
- packet_.isFirstPacket = false;
- packet_.completeNALU = kNaluIncomplete;
- packet_.markerBit = false;
- FillPacket(1);
- EXPECT_EQ(packet_buffer_size(),
- static_cast<size_t>(session_.InsertPacket(packet_, frame_buffer_,
- kNoErrors, frame_data)));
-
- EXPECT_EQ(2 * packet_buffer_size(), session_.MakeDecodable());
- EXPECT_EQ(0U, session_.SessionLength());
-}
-
-} // namespace webrtc
diff --git a/webrtc/modules/video_coding/main/source/test/stream_generator.cc b/webrtc/modules/video_coding/main/source/test/stream_generator.cc
deleted file mode 100644
index b365d96dc0..0000000000
--- a/webrtc/modules/video_coding/main/source/test/stream_generator.cc
+++ /dev/null
@@ -1,127 +0,0 @@
-/*
- * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include "webrtc/modules/video_coding/main/source/test/stream_generator.h"
-
-#include <string.h>
-
-#include <list>
-
-#include "testing/gtest/include/gtest/gtest.h"
-#include "webrtc/modules/video_coding/main/source/packet.h"
-#include "webrtc/modules/video_coding/main/test/test_util.h"
-#include "webrtc/system_wrappers/include/clock.h"
-
-namespace webrtc {
-
-StreamGenerator::StreamGenerator(uint16_t start_seq_num, int64_t current_time)
- : packets_(), sequence_number_(start_seq_num), start_time_(current_time) {
-}
-
-void StreamGenerator::Init(uint16_t start_seq_num, int64_t current_time) {
- packets_.clear();
- sequence_number_ = start_seq_num;
- start_time_ = current_time;
- memset(packet_buffer_, 0, sizeof(packet_buffer_));
-}
-
-void StreamGenerator::GenerateFrame(FrameType type,
- int num_media_packets,
- int num_empty_packets,
- int64_t time_ms) {
- uint32_t timestamp = 90 * (time_ms - start_time_);
- for (int i = 0; i < num_media_packets; ++i) {
- const int packet_size =
- (kFrameSize + num_media_packets / 2) / num_media_packets;
- bool marker_bit = (i == num_media_packets - 1);
- packets_.push_back(GeneratePacket(
- sequence_number_, timestamp, packet_size, (i == 0), marker_bit, type));
- ++sequence_number_;
- }
- for (int i = 0; i < num_empty_packets; ++i) {
- packets_.push_back(GeneratePacket(sequence_number_, timestamp, 0, false,
- false, kEmptyFrame));
- ++sequence_number_;
- }
-}
-
-VCMPacket StreamGenerator::GeneratePacket(uint16_t sequence_number,
- uint32_t timestamp,
- unsigned int size,
- bool first_packet,
- bool marker_bit,
- FrameType type) {
- EXPECT_LT(size, kMaxPacketSize);
- VCMPacket packet;
- packet.seqNum = sequence_number;
- packet.timestamp = timestamp;
- packet.frameType = type;
- packet.isFirstPacket = first_packet;
- packet.markerBit = marker_bit;
- packet.sizeBytes = size;
- packet.dataPtr = packet_buffer_;
- if (packet.isFirstPacket)
- packet.completeNALU = kNaluStart;
- else if (packet.markerBit)
- packet.completeNALU = kNaluEnd;
- else
- packet.completeNALU = kNaluIncomplete;
- return packet;
-}
-
-bool StreamGenerator::PopPacket(VCMPacket* packet, int index) {
- std::list<VCMPacket>::iterator it = GetPacketIterator(index);
- if (it == packets_.end())
- return false;
- if (packet)
- *packet = (*it);
- packets_.erase(it);
- return true;
-}
-
-bool StreamGenerator::GetPacket(VCMPacket* packet, int index) {
- std::list<VCMPacket>::iterator it = GetPacketIterator(index);
- if (it == packets_.end())
- return false;
- if (packet)
- *packet = (*it);
- return true;
-}
-
-bool StreamGenerator::NextPacket(VCMPacket* packet) {
- if (packets_.empty())
- return false;
- if (packet != NULL)
- *packet = packets_.front();
- packets_.pop_front();
- return true;
-}
-
-void StreamGenerator::DropLastPacket() { packets_.pop_back(); }
-
-uint16_t StreamGenerator::NextSequenceNumber() const {
- if (packets_.empty())
- return sequence_number_;
- return packets_.front().seqNum;
-}
-
-int StreamGenerator::PacketsRemaining() const { return packets_.size(); }
-
-std::list<VCMPacket>::iterator StreamGenerator::GetPacketIterator(int index) {
- std::list<VCMPacket>::iterator it = packets_.begin();
- for (int i = 0; i < index; ++i) {
- ++it;
- if (it == packets_.end())
- break;
- }
- return it;
-}
-
-} // namespace webrtc
diff --git a/webrtc/modules/video_coding/main/source/test/stream_generator.h b/webrtc/modules/video_coding/main/source/test/stream_generator.h
deleted file mode 100644
index 7902d16706..0000000000
--- a/webrtc/modules/video_coding/main/source/test/stream_generator.h
+++ /dev/null
@@ -1,72 +0,0 @@
-/*
- * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-#ifndef WEBRTC_MODULES_VIDEO_CODING_MAIN_SOURCE_TEST_STREAM_GENERATOR_H_
-#define WEBRTC_MODULES_VIDEO_CODING_MAIN_SOURCE_TEST_STREAM_GENERATOR_H_
-
-#include <list>
-
-#include "webrtc/modules/video_coding/main/source/packet.h"
-#include "webrtc/modules/video_coding/main/test/test_util.h"
-#include "webrtc/typedefs.h"
-
-namespace webrtc {
-
-const unsigned int kDefaultBitrateKbps = 1000;
-const unsigned int kDefaultFrameRate = 25;
-const unsigned int kMaxPacketSize = 1500;
-const unsigned int kFrameSize =
- (kDefaultBitrateKbps + kDefaultFrameRate * 4) / (kDefaultFrameRate * 8);
-const int kDefaultFramePeriodMs = 1000 / kDefaultFrameRate;
-
-class StreamGenerator {
- public:
- StreamGenerator(uint16_t start_seq_num, int64_t current_time);
- void Init(uint16_t start_seq_num, int64_t current_time);
-
- // |time_ms| denotes the timestamp you want to put on the frame, and the unit
- // is millisecond. GenerateFrame will translate |time_ms| into a 90kHz
- // timestamp and put it on the frame.
- void GenerateFrame(FrameType type,
- int num_media_packets,
- int num_empty_packets,
- int64_t time_ms);
-
- bool PopPacket(VCMPacket* packet, int index);
- void DropLastPacket();
-
- bool GetPacket(VCMPacket* packet, int index);
-
- bool NextPacket(VCMPacket* packet);
-
- uint16_t NextSequenceNumber() const;
-
- int PacketsRemaining() const;
-
- private:
- VCMPacket GeneratePacket(uint16_t sequence_number,
- uint32_t timestamp,
- unsigned int size,
- bool first_packet,
- bool marker_bit,
- FrameType type);
-
- std::list<VCMPacket>::iterator GetPacketIterator(int index);
-
- std::list<VCMPacket> packets_;
- uint16_t sequence_number_;
- int64_t start_time_;
- uint8_t packet_buffer_[kMaxPacketSize];
-
- RTC_DISALLOW_COPY_AND_ASSIGN(StreamGenerator);
-};
-
-} // namespace webrtc
-
-#endif // WEBRTC_MODULES_VIDEO_CODING_MAIN_SOURCE_TEST_STREAM_GENERATOR_H_
diff --git a/webrtc/modules/video_coding/main/source/timestamp_map.cc b/webrtc/modules/video_coding/main/source/timestamp_map.cc
deleted file mode 100644
index c68a5af7ba..0000000000
--- a/webrtc/modules/video_coding/main/source/timestamp_map.cc
+++ /dev/null
@@ -1,65 +0,0 @@
-/*
- * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include <assert.h>
-#include <stdlib.h>
-
-#include "webrtc/modules/interface/module_common_types.h"
-#include "webrtc/modules/video_coding/main/source/timestamp_map.h"
-
-namespace webrtc {
-
-VCMTimestampMap::VCMTimestampMap(size_t capacity)
- : ring_buffer_(new TimestampDataTuple[capacity]),
- capacity_(capacity),
- next_add_idx_(0),
- next_pop_idx_(0) {
-}
-
-VCMTimestampMap::~VCMTimestampMap() {
-}
-
-void VCMTimestampMap::Add(uint32_t timestamp, VCMFrameInformation* data) {
- ring_buffer_[next_add_idx_].timestamp = timestamp;
- ring_buffer_[next_add_idx_].data = data;
- next_add_idx_ = (next_add_idx_ + 1) % capacity_;
-
- if (next_add_idx_ == next_pop_idx_) {
- // Circular list full; forget oldest entry.
- next_pop_idx_ = (next_pop_idx_ + 1) % capacity_;
- }
-}
-
-VCMFrameInformation* VCMTimestampMap::Pop(uint32_t timestamp) {
- while (!IsEmpty()) {
- if (ring_buffer_[next_pop_idx_].timestamp == timestamp) {
- // Found start time for this timestamp.
- VCMFrameInformation* data = ring_buffer_[next_pop_idx_].data;
- ring_buffer_[next_pop_idx_].data = nullptr;
- next_pop_idx_ = (next_pop_idx_ + 1) % capacity_;
- return data;
- } else if (IsNewerTimestamp(ring_buffer_[next_pop_idx_].timestamp,
- timestamp)) {
- // The timestamp we are looking for is not in the list.
- return nullptr;
- }
-
- // Not in this position, check next (and forget this position).
- next_pop_idx_ = (next_pop_idx_ + 1) % capacity_;
- }
-
- // Could not find matching timestamp in list.
- return nullptr;
-}
-
-bool VCMTimestampMap::IsEmpty() const {
- return (next_add_idx_ == next_pop_idx_);
-}
-}
diff --git a/webrtc/modules/video_coding/main/source/timestamp_map.h b/webrtc/modules/video_coding/main/source/timestamp_map.h
deleted file mode 100644
index 3d6f1bca0f..0000000000
--- a/webrtc/modules/video_coding/main/source/timestamp_map.h
+++ /dev/null
@@ -1,47 +0,0 @@
-/*
- * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_MODULES_VIDEO_CODING_TIMESTAMP_MAP_H_
-#define WEBRTC_MODULES_VIDEO_CODING_TIMESTAMP_MAP_H_
-
-#include "webrtc/base/scoped_ptr.h"
-#include "webrtc/typedefs.h"
-
-namespace webrtc {
-
-struct VCMFrameInformation;
-
-class VCMTimestampMap {
- public:
- explicit VCMTimestampMap(size_t capacity);
- ~VCMTimestampMap();
-
- // Empty the map.
- void Reset();
-
- void Add(uint32_t timestamp, VCMFrameInformation* data);
- VCMFrameInformation* Pop(uint32_t timestamp);
-
- private:
- struct TimestampDataTuple {
- uint32_t timestamp;
- VCMFrameInformation* data;
- };
- bool IsEmpty() const;
-
- rtc::scoped_ptr<TimestampDataTuple[]> ring_buffer_;
- const size_t capacity_;
- size_t next_add_idx_;
- size_t next_pop_idx_;
-};
-
-} // namespace webrtc
-
-#endif // WEBRTC_MODULES_VIDEO_CODING_TIMESTAMP_MAP_H_
diff --git a/webrtc/modules/video_coding/main/source/timing.cc b/webrtc/modules/video_coding/main/source/timing.cc
deleted file mode 100644
index 8d59135876..0000000000
--- a/webrtc/modules/video_coding/main/source/timing.cc
+++ /dev/null
@@ -1,279 +0,0 @@
-/*
- * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include "webrtc/modules/video_coding/main/source/timing.h"
-
-#include "webrtc/modules/video_coding/main/source/internal_defines.h"
-#include "webrtc/modules/video_coding/main/source/jitter_buffer_common.h"
-#include "webrtc/system_wrappers/include/clock.h"
-#include "webrtc/system_wrappers/include/metrics.h"
-#include "webrtc/system_wrappers/include/timestamp_extrapolator.h"
-
-
-namespace webrtc {
-
-VCMTiming::VCMTiming(Clock* clock,
- VCMTiming* master_timing)
- : crit_sect_(CriticalSectionWrapper::CreateCriticalSection()),
- clock_(clock),
- master_(false),
- ts_extrapolator_(),
- codec_timer_(),
- render_delay_ms_(kDefaultRenderDelayMs),
- min_playout_delay_ms_(0),
- jitter_delay_ms_(0),
- current_delay_ms_(0),
- last_decode_ms_(0),
- prev_frame_timestamp_(0),
- num_decoded_frames_(0),
- num_delayed_decoded_frames_(0),
- first_decoded_frame_ms_(-1),
- sum_missed_render_deadline_ms_(0) {
- if (master_timing == NULL) {
- master_ = true;
- ts_extrapolator_ = new TimestampExtrapolator(clock_->TimeInMilliseconds());
- } else {
- ts_extrapolator_ = master_timing->ts_extrapolator_;
- }
-}
-
-VCMTiming::~VCMTiming() {
- UpdateHistograms();
- if (master_) {
- delete ts_extrapolator_;
- }
- delete crit_sect_;
-}
-
-void VCMTiming::UpdateHistograms() const {
- CriticalSectionScoped cs(crit_sect_);
- if (num_decoded_frames_ == 0) {
- return;
- }
- int64_t elapsed_sec =
- (clock_->TimeInMilliseconds() - first_decoded_frame_ms_) / 1000;
- if (elapsed_sec < metrics::kMinRunTimeInSeconds) {
- return;
- }
- RTC_HISTOGRAM_COUNTS_100("WebRTC.Video.DecodedFramesPerSecond",
- static_cast<int>((num_decoded_frames_ / elapsed_sec) + 0.5f));
- RTC_HISTOGRAM_PERCENTAGE("WebRTC.Video.DelayedFramesToRenderer",
- num_delayed_decoded_frames_ * 100 / num_decoded_frames_);
- if (num_delayed_decoded_frames_ > 0) {
- RTC_HISTOGRAM_COUNTS_1000(
- "WebRTC.Video.DelayedFramesToRenderer_AvgDelayInMs",
- sum_missed_render_deadline_ms_ / num_delayed_decoded_frames_);
- }
-}
-
-void VCMTiming::Reset() {
- CriticalSectionScoped cs(crit_sect_);
- ts_extrapolator_->Reset(clock_->TimeInMilliseconds());
- codec_timer_.Reset();
- render_delay_ms_ = kDefaultRenderDelayMs;
- min_playout_delay_ms_ = 0;
- jitter_delay_ms_ = 0;
- current_delay_ms_ = 0;
- prev_frame_timestamp_ = 0;
-}
-
-void VCMTiming::ResetDecodeTime() {
- CriticalSectionScoped lock(crit_sect_);
- codec_timer_.Reset();
-}
-
-void VCMTiming::set_render_delay(uint32_t render_delay_ms) {
- CriticalSectionScoped cs(crit_sect_);
- render_delay_ms_ = render_delay_ms;
-}
-
-void VCMTiming::set_min_playout_delay(uint32_t min_playout_delay_ms) {
- CriticalSectionScoped cs(crit_sect_);
- min_playout_delay_ms_ = min_playout_delay_ms;
-}
-
-void VCMTiming::SetJitterDelay(uint32_t jitter_delay_ms) {
- CriticalSectionScoped cs(crit_sect_);
- if (jitter_delay_ms != jitter_delay_ms_) {
- jitter_delay_ms_ = jitter_delay_ms;
- // When in initial state, set current delay to minimum delay.
- if (current_delay_ms_ == 0) {
- current_delay_ms_ = jitter_delay_ms_;
- }
- }
-}
-
-void VCMTiming::UpdateCurrentDelay(uint32_t frame_timestamp) {
- CriticalSectionScoped cs(crit_sect_);
- uint32_t target_delay_ms = TargetDelayInternal();
-
- if (current_delay_ms_ == 0) {
- // Not initialized, set current delay to target.
- current_delay_ms_ = target_delay_ms;
- } else if (target_delay_ms != current_delay_ms_) {
- int64_t delay_diff_ms = static_cast<int64_t>(target_delay_ms) -
- current_delay_ms_;
- // Never change the delay with more than 100 ms every second. If we're
- // changing the delay in too large steps we will get noticeable freezes. By
- // limiting the change we can increase the delay in smaller steps, which
- // will be experienced as the video is played in slow motion. When lowering
- // the delay the video will be played at a faster pace.
- int64_t max_change_ms = 0;
- if (frame_timestamp < 0x0000ffff && prev_frame_timestamp_ > 0xffff0000) {
- // wrap
- max_change_ms = kDelayMaxChangeMsPerS * (frame_timestamp +
- (static_cast<int64_t>(1) << 32) - prev_frame_timestamp_) / 90000;
- } else {
- max_change_ms = kDelayMaxChangeMsPerS *
- (frame_timestamp - prev_frame_timestamp_) / 90000;
- }
- if (max_change_ms <= 0) {
- // Any changes less than 1 ms are truncated and
- // will be postponed. Negative change will be due
- // to reordering and should be ignored.
- return;
- }
- delay_diff_ms = std::max(delay_diff_ms, -max_change_ms);
- delay_diff_ms = std::min(delay_diff_ms, max_change_ms);
-
- current_delay_ms_ = current_delay_ms_ + static_cast<int32_t>(delay_diff_ms);
- }
- prev_frame_timestamp_ = frame_timestamp;
-}
-
-void VCMTiming::UpdateCurrentDelay(int64_t render_time_ms,
- int64_t actual_decode_time_ms) {
- CriticalSectionScoped cs(crit_sect_);
- uint32_t target_delay_ms = TargetDelayInternal();
- int64_t delayed_ms = actual_decode_time_ms -
- (render_time_ms - MaxDecodeTimeMs() - render_delay_ms_);
- if (delayed_ms < 0) {
- return;
- }
- if (current_delay_ms_ + delayed_ms <= target_delay_ms) {
- current_delay_ms_ += static_cast<uint32_t>(delayed_ms);
- } else {
- current_delay_ms_ = target_delay_ms;
- }
-}
-
-int32_t VCMTiming::StopDecodeTimer(uint32_t time_stamp,
- int64_t start_time_ms,
- int64_t now_ms,
- int64_t render_time_ms) {
- CriticalSectionScoped cs(crit_sect_);
- int32_t time_diff_ms = codec_timer_.StopTimer(start_time_ms, now_ms);
- assert(time_diff_ms >= 0);
- last_decode_ms_ = time_diff_ms;
-
- // Update stats.
- ++num_decoded_frames_;
- if (num_decoded_frames_ == 1) {
- first_decoded_frame_ms_ = now_ms;
- }
- int time_until_rendering_ms = render_time_ms - render_delay_ms_ - now_ms;
- if (time_until_rendering_ms < 0) {
- sum_missed_render_deadline_ms_ += -time_until_rendering_ms;
- ++num_delayed_decoded_frames_;
- }
- return 0;
-}
-
-void VCMTiming::IncomingTimestamp(uint32_t time_stamp, int64_t now_ms) {
- CriticalSectionScoped cs(crit_sect_);
- ts_extrapolator_->Update(now_ms, time_stamp);
-}
-
-int64_t VCMTiming::RenderTimeMs(uint32_t frame_timestamp, int64_t now_ms)
- const {
- CriticalSectionScoped cs(crit_sect_);
- const int64_t render_time_ms = RenderTimeMsInternal(frame_timestamp, now_ms);
- return render_time_ms;
-}
-
-int64_t VCMTiming::RenderTimeMsInternal(uint32_t frame_timestamp,
- int64_t now_ms) const {
- int64_t estimated_complete_time_ms =
- ts_extrapolator_->ExtrapolateLocalTime(frame_timestamp);
- if (estimated_complete_time_ms == -1) {
- estimated_complete_time_ms = now_ms;
- }
-
- // Make sure that we have at least the playout delay.
- uint32_t actual_delay = std::max(current_delay_ms_, min_playout_delay_ms_);
- return estimated_complete_time_ms + actual_delay;
-}
-
-// Must be called from inside a critical section.
-int32_t VCMTiming::MaxDecodeTimeMs(FrameType frame_type /*= kVideoFrameDelta*/)
- const {
- const int32_t decode_time_ms = codec_timer_.RequiredDecodeTimeMs(frame_type);
- assert(decode_time_ms >= 0);
- return decode_time_ms;
-}
-
-uint32_t VCMTiming::MaxWaitingTime(int64_t render_time_ms, int64_t now_ms)
- const {
- CriticalSectionScoped cs(crit_sect_);
-
- const int64_t max_wait_time_ms = render_time_ms - now_ms -
- MaxDecodeTimeMs() - render_delay_ms_;
-
- if (max_wait_time_ms < 0) {
- return 0;
- }
- return static_cast<uint32_t>(max_wait_time_ms);
-}
-
-bool VCMTiming::EnoughTimeToDecode(uint32_t available_processing_time_ms)
- const {
- CriticalSectionScoped cs(crit_sect_);
- int32_t max_decode_time_ms = MaxDecodeTimeMs();
- if (max_decode_time_ms < 0) {
- // Haven't decoded any frames yet, try decoding one to get an estimate
- // of the decode time.
- return true;
- } else if (max_decode_time_ms == 0) {
- // Decode time is less than 1, set to 1 for now since
- // we don't have any better precision. Count ticks later?
- max_decode_time_ms = 1;
- }
- return static_cast<int32_t>(available_processing_time_ms) -
- max_decode_time_ms > 0;
-}
-
-uint32_t VCMTiming::TargetVideoDelay() const {
- CriticalSectionScoped cs(crit_sect_);
- return TargetDelayInternal();
-}
-
-uint32_t VCMTiming::TargetDelayInternal() const {
- return std::max(min_playout_delay_ms_,
- jitter_delay_ms_ + MaxDecodeTimeMs() + render_delay_ms_);
-}
-
-void VCMTiming::GetTimings(int* decode_ms,
- int* max_decode_ms,
- int* current_delay_ms,
- int* target_delay_ms,
- int* jitter_buffer_ms,
- int* min_playout_delay_ms,
- int* render_delay_ms) const {
- CriticalSectionScoped cs(crit_sect_);
- *decode_ms = last_decode_ms_;
- *max_decode_ms = MaxDecodeTimeMs();
- *current_delay_ms = current_delay_ms_;
- *target_delay_ms = TargetDelayInternal();
- *jitter_buffer_ms = jitter_delay_ms_;
- *min_playout_delay_ms = min_playout_delay_ms_;
- *render_delay_ms = render_delay_ms_;
-}
-
-} // namespace webrtc
diff --git a/webrtc/modules/video_coding/main/source/timing.h b/webrtc/modules/video_coding/main/source/timing.h
deleted file mode 100644
index d3b8fa673f..0000000000
--- a/webrtc/modules/video_coding/main/source/timing.h
+++ /dev/null
@@ -1,127 +0,0 @@
-/*
- * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_MODULES_VIDEO_CODING_MAIN_SOURCE_TIMING_H_
-#define WEBRTC_MODULES_VIDEO_CODING_MAIN_SOURCE_TIMING_H_
-
-#include "webrtc/base/thread_annotations.h"
-#include "webrtc/modules/video_coding/main/source/codec_timer.h"
-#include "webrtc/system_wrappers/include/critical_section_wrapper.h"
-#include "webrtc/typedefs.h"
-
-namespace webrtc {
-
-class Clock;
-class TimestampExtrapolator;
-
-class VCMTiming {
- public:
- // The primary timing component should be passed
- // if this is the dual timing component.
- VCMTiming(Clock* clock,
- VCMTiming* master_timing = NULL);
- ~VCMTiming();
-
- // Resets the timing to the initial state.
- void Reset();
- void ResetDecodeTime();
-
- // Set the amount of time needed to render an image. Defaults to 10 ms.
- void set_render_delay(uint32_t render_delay_ms);
-
- // Set the minimum time the video must be delayed on the receiver to
- // get the desired jitter buffer level.
- void SetJitterDelay(uint32_t required_delay_ms);
-
- // Set the minimum playout delay required to sync video with audio.
- void set_min_playout_delay(uint32_t min_playout_delay);
-
- // Increases or decreases the current delay to get closer to the target delay.
- // Calculates how long it has been since the previous call to this function,
- // and increases/decreases the delay in proportion to the time difference.
- void UpdateCurrentDelay(uint32_t frame_timestamp);
-
- // Increases or decreases the current delay to get closer to the target delay.
- // Given the actual decode time in ms and the render time in ms for a frame,
- // this function calculates how late the frame is and increases the delay
- // accordingly.
- void UpdateCurrentDelay(int64_t render_time_ms,
- int64_t actual_decode_time_ms);
-
- // Stops the decoder timer, should be called when the decoder returns a frame
- // or when the decoded frame callback is called.
- int32_t StopDecodeTimer(uint32_t time_stamp,
- int64_t start_time_ms,
- int64_t now_ms,
- int64_t render_time_ms);
-
- // Used to report that a frame is passed to decoding. Updates the timestamp
- // filter which is used to map between timestamps and receiver system time.
- void IncomingTimestamp(uint32_t time_stamp, int64_t last_packet_time_ms);
- // Returns the receiver system time when the frame with timestamp
- // frame_timestamp should be rendered, assuming that the system time currently
- // is now_ms.
- int64_t RenderTimeMs(uint32_t frame_timestamp, int64_t now_ms) const;
-
- // Returns the maximum time in ms that we can wait for a frame to become
- // complete before we must pass it to the decoder.
- uint32_t MaxWaitingTime(int64_t render_time_ms, int64_t now_ms) const;
-
- // Returns the current target delay which is required delay + decode time +
- // render delay.
- uint32_t TargetVideoDelay() const;
-
- // Calculates whether or not there is enough time to decode a frame given a
- // certain amount of processing time.
- bool EnoughTimeToDecode(uint32_t available_processing_time_ms) const;
-
- // Return current timing information.
- void GetTimings(int* decode_ms,
- int* max_decode_ms,
- int* current_delay_ms,
- int* target_delay_ms,
- int* jitter_buffer_ms,
- int* min_playout_delay_ms,
- int* render_delay_ms) const;
-
- enum { kDefaultRenderDelayMs = 10 };
- enum { kDelayMaxChangeMsPerS = 100 };
-
- protected:
- int32_t MaxDecodeTimeMs(FrameType frame_type = kVideoFrameDelta) const
- EXCLUSIVE_LOCKS_REQUIRED(crit_sect_);
- int64_t RenderTimeMsInternal(uint32_t frame_timestamp, int64_t now_ms) const
- EXCLUSIVE_LOCKS_REQUIRED(crit_sect_);
- uint32_t TargetDelayInternal() const EXCLUSIVE_LOCKS_REQUIRED(crit_sect_);
-
- private:
- void UpdateHistograms() const;
-
- CriticalSectionWrapper* crit_sect_;
- Clock* const clock_;
- bool master_ GUARDED_BY(crit_sect_);
- TimestampExtrapolator* ts_extrapolator_ GUARDED_BY(crit_sect_);
- VCMCodecTimer codec_timer_ GUARDED_BY(crit_sect_);
- uint32_t render_delay_ms_ GUARDED_BY(crit_sect_);
- uint32_t min_playout_delay_ms_ GUARDED_BY(crit_sect_);
- uint32_t jitter_delay_ms_ GUARDED_BY(crit_sect_);
- uint32_t current_delay_ms_ GUARDED_BY(crit_sect_);
- int last_decode_ms_ GUARDED_BY(crit_sect_);
- uint32_t prev_frame_timestamp_ GUARDED_BY(crit_sect_);
-
- // Statistics.
- size_t num_decoded_frames_ GUARDED_BY(crit_sect_);
- size_t num_delayed_decoded_frames_ GUARDED_BY(crit_sect_);
- int64_t first_decoded_frame_ms_ GUARDED_BY(crit_sect_);
- uint64_t sum_missed_render_deadline_ms_ GUARDED_BY(crit_sect_);
-};
-} // namespace webrtc
-
-#endif // WEBRTC_MODULES_VIDEO_CODING_MAIN_SOURCE_TIMING_H_
diff --git a/webrtc/modules/video_coding/main/source/timing_unittest.cc b/webrtc/modules/video_coding/main/source/timing_unittest.cc
deleted file mode 100644
index 694a600c2a..0000000000
--- a/webrtc/modules/video_coding/main/source/timing_unittest.cc
+++ /dev/null
@@ -1,147 +0,0 @@
-/*
- * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include <math.h>
-#include <stdio.h>
-#include <stdlib.h>
-
-#include "testing/gtest/include/gtest/gtest.h"
-
-#include "webrtc/modules/video_coding/main/interface/video_coding.h"
-#include "webrtc/modules/video_coding/main/source/internal_defines.h"
-#include "webrtc/modules/video_coding/main/source/timing.h"
-#include "webrtc/modules/video_coding/main/test/test_util.h"
-#include "webrtc/system_wrappers/include/clock.h"
-#include "webrtc/system_wrappers/include/trace.h"
-#include "webrtc/test/testsupport/fileutils.h"
-
-namespace webrtc {
-
-TEST(ReceiverTiming, Tests) {
- SimulatedClock clock(0);
- VCMTiming timing(&clock);
- uint32_t waitTime = 0;
- uint32_t jitterDelayMs = 0;
- uint32_t maxDecodeTimeMs = 0;
- uint32_t timeStamp = 0;
-
- timing.Reset();
-
- timing.UpdateCurrentDelay(timeStamp);
-
- timing.Reset();
-
- timing.IncomingTimestamp(timeStamp, clock.TimeInMilliseconds());
- jitterDelayMs = 20;
- timing.SetJitterDelay(jitterDelayMs);
- timing.UpdateCurrentDelay(timeStamp);
- timing.set_render_delay(0);
- waitTime = timing.MaxWaitingTime(
- timing.RenderTimeMs(timeStamp, clock.TimeInMilliseconds()),
- clock.TimeInMilliseconds());
- // First update initializes the render time. Since we have no decode delay
- // we get waitTime = renderTime - now - renderDelay = jitter.
- EXPECT_EQ(jitterDelayMs, waitTime);
-
- jitterDelayMs += VCMTiming::kDelayMaxChangeMsPerS + 10;
- timeStamp += 90000;
- clock.AdvanceTimeMilliseconds(1000);
- timing.SetJitterDelay(jitterDelayMs);
- timing.UpdateCurrentDelay(timeStamp);
- waitTime = timing.MaxWaitingTime(timing.RenderTimeMs(
- timeStamp, clock.TimeInMilliseconds()), clock.TimeInMilliseconds());
- // Since we gradually increase the delay we only get 100 ms every second.
- EXPECT_EQ(jitterDelayMs - 10, waitTime);
-
- timeStamp += 90000;
- clock.AdvanceTimeMilliseconds(1000);
- timing.UpdateCurrentDelay(timeStamp);
- waitTime = timing.MaxWaitingTime(
- timing.RenderTimeMs(timeStamp, clock.TimeInMilliseconds()),
- clock.TimeInMilliseconds());
- EXPECT_EQ(waitTime, jitterDelayMs);
-
- // 300 incoming frames without jitter, verify that this gives the exact wait
- // time.
- for (int i = 0; i < 300; i++) {
- clock.AdvanceTimeMilliseconds(1000 / 25);
- timeStamp += 90000 / 25;
- timing.IncomingTimestamp(timeStamp, clock.TimeInMilliseconds());
- }
- timing.UpdateCurrentDelay(timeStamp);
- waitTime = timing.MaxWaitingTime(
- timing.RenderTimeMs(timeStamp, clock.TimeInMilliseconds()),
- clock.TimeInMilliseconds());
- EXPECT_EQ(waitTime, jitterDelayMs);
-
- // Add decode time estimates.
- for (int i = 0; i < 10; i++) {
- int64_t startTimeMs = clock.TimeInMilliseconds();
- clock.AdvanceTimeMilliseconds(10);
- timing.StopDecodeTimer(timeStamp, startTimeMs,
- clock.TimeInMilliseconds(), timing.RenderTimeMs(
- timeStamp, clock.TimeInMilliseconds()));
- timeStamp += 90000 / 25;
- clock.AdvanceTimeMilliseconds(1000 / 25 - 10);
- timing.IncomingTimestamp(timeStamp, clock.TimeInMilliseconds());
- }
- maxDecodeTimeMs = 10;
- timing.SetJitterDelay(jitterDelayMs);
- clock.AdvanceTimeMilliseconds(1000);
- timeStamp += 90000;
- timing.UpdateCurrentDelay(timeStamp);
- waitTime = timing.MaxWaitingTime(
- timing.RenderTimeMs(timeStamp, clock.TimeInMilliseconds()),
- clock.TimeInMilliseconds());
- EXPECT_EQ(waitTime, jitterDelayMs);
-
- uint32_t minTotalDelayMs = 200;
- timing.set_min_playout_delay(minTotalDelayMs);
- clock.AdvanceTimeMilliseconds(5000);
- timeStamp += 5*90000;
- timing.UpdateCurrentDelay(timeStamp);
- const int kRenderDelayMs = 10;
- timing.set_render_delay(kRenderDelayMs);
- waitTime = timing.MaxWaitingTime(
- timing.RenderTimeMs(timeStamp, clock.TimeInMilliseconds()),
- clock.TimeInMilliseconds());
- // We should at least have minTotalDelayMs - decodeTime (10) - renderTime
- // (10) to wait.
- EXPECT_EQ(waitTime, minTotalDelayMs - maxDecodeTimeMs - kRenderDelayMs);
- // The total video delay should be equal to the min total delay.
- EXPECT_EQ(minTotalDelayMs, timing.TargetVideoDelay());
-
- // Reset playout delay.
- timing.set_min_playout_delay(0);
- clock.AdvanceTimeMilliseconds(5000);
- timeStamp += 5*90000;
- timing.UpdateCurrentDelay(timeStamp);
-}
-
-TEST(ReceiverTiming, WrapAround) {
- const int kFramerate = 25;
- SimulatedClock clock(0);
- VCMTiming timing(&clock);
- // Provoke a wrap-around. The forth frame will have wrapped at 25 fps.
- uint32_t timestamp = 0xFFFFFFFFu - 3 * 90000 / kFramerate;
- for (int i = 0; i < 4; ++i) {
- timing.IncomingTimestamp(timestamp, clock.TimeInMilliseconds());
- clock.AdvanceTimeMilliseconds(1000 / kFramerate);
- timestamp += 90000 / kFramerate;
- int64_t render_time = timing.RenderTimeMs(0xFFFFFFFFu,
- clock.TimeInMilliseconds());
- EXPECT_EQ(3 * 1000 / kFramerate, render_time);
- render_time = timing.RenderTimeMs(89u, // One second later in 90 kHz.
- clock.TimeInMilliseconds());
- EXPECT_EQ(3 * 1000 / kFramerate + 1, render_time);
- }
-}
-
-} // namespace webrtc
diff --git a/webrtc/modules/video_coding/main/source/video_coding_impl.cc b/webrtc/modules/video_coding/main/source/video_coding_impl.cc
deleted file mode 100644
index b0a6754cbd..0000000000
--- a/webrtc/modules/video_coding/main/source/video_coding_impl.cc
+++ /dev/null
@@ -1,359 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include "webrtc/common_types.h"
-#include "webrtc/common_video/libyuv/include/webrtc_libyuv.h"
-#include "webrtc/modules/video_coding/codecs/interface/video_codec_interface.h"
-#include "webrtc/modules/video_coding/main/source/encoded_frame.h"
-#include "webrtc/modules/video_coding/main/source/jitter_buffer.h"
-#include "webrtc/modules/video_coding/main/source/packet.h"
-#include "webrtc/modules/video_coding/main/source/video_coding_impl.h"
-#include "webrtc/system_wrappers/include/clock.h"
-
-namespace webrtc {
-namespace vcm {
-
-int64_t
-VCMProcessTimer::Period() const {
- return _periodMs;
-}
-
-int64_t
-VCMProcessTimer::TimeUntilProcess() const {
- const int64_t time_since_process = _clock->TimeInMilliseconds() - _latestMs;
- const int64_t time_until_process = _periodMs - time_since_process;
- return std::max<int64_t>(time_until_process, 0);
-}
-
-void
-VCMProcessTimer::Processed() {
- _latestMs = _clock->TimeInMilliseconds();
-}
-} // namespace vcm
-
-namespace {
-// This wrapper provides a way to modify the callback without the need to expose
-// a register method all the way down to the function calling it.
-class EncodedImageCallbackWrapper : public EncodedImageCallback {
- public:
- EncodedImageCallbackWrapper()
- : cs_(CriticalSectionWrapper::CreateCriticalSection()), callback_(NULL) {}
-
- virtual ~EncodedImageCallbackWrapper() {}
-
- void Register(EncodedImageCallback* callback) {
- CriticalSectionScoped cs(cs_.get());
- callback_ = callback;
- }
-
- // TODO(andresp): Change to void as return value is ignored.
- virtual int32_t Encoded(const EncodedImage& encoded_image,
- const CodecSpecificInfo* codec_specific_info,
- const RTPFragmentationHeader* fragmentation) {
- CriticalSectionScoped cs(cs_.get());
- if (callback_)
- return callback_->Encoded(
- encoded_image, codec_specific_info, fragmentation);
- return 0;
- }
-
- private:
- rtc::scoped_ptr<CriticalSectionWrapper> cs_;
- EncodedImageCallback* callback_ GUARDED_BY(cs_);
-};
-
-class VideoCodingModuleImpl : public VideoCodingModule {
- public:
- VideoCodingModuleImpl(Clock* clock,
- EventFactory* event_factory,
- bool owns_event_factory,
- VideoEncoderRateObserver* encoder_rate_observer,
- VCMQMSettingsCallback* qm_settings_callback)
- : VideoCodingModule(),
- sender_(new vcm::VideoSender(clock,
- &post_encode_callback_,
- encoder_rate_observer,
- qm_settings_callback)),
- receiver_(new vcm::VideoReceiver(clock, event_factory)),
- own_event_factory_(owns_event_factory ? event_factory : NULL) {}
-
- virtual ~VideoCodingModuleImpl() {
- sender_.reset();
- receiver_.reset();
- own_event_factory_.reset();
- }
-
- int64_t TimeUntilNextProcess() override {
- int64_t sender_time = sender_->TimeUntilNextProcess();
- int64_t receiver_time = receiver_->TimeUntilNextProcess();
- assert(sender_time >= 0);
- assert(receiver_time >= 0);
- return VCM_MIN(sender_time, receiver_time);
- }
-
- int32_t Process() override {
- int32_t sender_return = sender_->Process();
- int32_t receiver_return = receiver_->Process();
- if (sender_return != VCM_OK)
- return sender_return;
- return receiver_return;
- }
-
- int32_t RegisterSendCodec(const VideoCodec* sendCodec,
- uint32_t numberOfCores,
- uint32_t maxPayloadSize) override {
- return sender_->RegisterSendCodec(sendCodec, numberOfCores, maxPayloadSize);
- }
-
- const VideoCodec& GetSendCodec() const override {
- return sender_->GetSendCodec();
- }
-
- // DEPRECATED.
- int32_t SendCodec(VideoCodec* currentSendCodec) const override {
- return sender_->SendCodecBlocking(currentSendCodec);
- }
-
- // DEPRECATED.
- VideoCodecType SendCodec() const override {
- return sender_->SendCodecBlocking();
- }
-
- int32_t RegisterExternalEncoder(VideoEncoder* externalEncoder,
- uint8_t payloadType,
- bool internalSource) override {
- return sender_->RegisterExternalEncoder(
- externalEncoder, payloadType, internalSource);
- }
-
- int Bitrate(unsigned int* bitrate) const override {
- return sender_->Bitrate(bitrate);
- }
-
- int FrameRate(unsigned int* framerate) const override {
- return sender_->FrameRate(framerate);
- }
-
- int32_t SetChannelParameters(uint32_t target_bitrate, // bits/s.
- uint8_t lossRate,
- int64_t rtt) override {
- return sender_->SetChannelParameters(target_bitrate, lossRate, rtt);
- }
-
- int32_t RegisterTransportCallback(
- VCMPacketizationCallback* transport) override {
- return sender_->RegisterTransportCallback(transport);
- }
-
- int32_t RegisterSendStatisticsCallback(
- VCMSendStatisticsCallback* sendStats) override {
- return sender_->RegisterSendStatisticsCallback(sendStats);
- }
-
- int32_t RegisterProtectionCallback(
- VCMProtectionCallback* protection) override {
- return sender_->RegisterProtectionCallback(protection);
- }
-
- int32_t SetVideoProtection(VCMVideoProtection videoProtection,
- bool enable) override {
- // TODO(pbos): Remove enable from receive-side protection modes as well.
- if (enable)
- sender_->SetVideoProtection(videoProtection);
- return receiver_->SetVideoProtection(videoProtection, enable);
- }
-
- int32_t AddVideoFrame(const VideoFrame& videoFrame,
- const VideoContentMetrics* contentMetrics,
- const CodecSpecificInfo* codecSpecificInfo) override {
- return sender_->AddVideoFrame(
- videoFrame, contentMetrics, codecSpecificInfo);
- }
-
- int32_t IntraFrameRequest(int stream_index) override {
- return sender_->IntraFrameRequest(stream_index);
- }
-
- int32_t EnableFrameDropper(bool enable) override {
- return sender_->EnableFrameDropper(enable);
- }
-
- void SuspendBelowMinBitrate() override {
- return sender_->SuspendBelowMinBitrate();
- }
-
- bool VideoSuspended() const override { return sender_->VideoSuspended(); }
-
- int32_t RegisterReceiveCodec(const VideoCodec* receiveCodec,
- int32_t numberOfCores,
- bool requireKeyFrame) override {
- return receiver_->RegisterReceiveCodec(
- receiveCodec, numberOfCores, requireKeyFrame);
- }
-
- int32_t RegisterExternalDecoder(VideoDecoder* externalDecoder,
- uint8_t payloadType,
- bool internalRenderTiming) override {
- return receiver_->RegisterExternalDecoder(
- externalDecoder, payloadType, internalRenderTiming);
- }
-
- int32_t RegisterReceiveCallback(
- VCMReceiveCallback* receiveCallback) override {
- return receiver_->RegisterReceiveCallback(receiveCallback);
- }
-
- int32_t RegisterReceiveStatisticsCallback(
- VCMReceiveStatisticsCallback* receiveStats) override {
- return receiver_->RegisterReceiveStatisticsCallback(receiveStats);
- }
-
- int32_t RegisterDecoderTimingCallback(
- VCMDecoderTimingCallback* decoderTiming) override {
- return receiver_->RegisterDecoderTimingCallback(decoderTiming);
- }
-
- int32_t RegisterFrameTypeCallback(
- VCMFrameTypeCallback* frameTypeCallback) override {
- return receiver_->RegisterFrameTypeCallback(frameTypeCallback);
- }
-
- int32_t RegisterPacketRequestCallback(
- VCMPacketRequestCallback* callback) override {
- return receiver_->RegisterPacketRequestCallback(callback);
- }
-
- int RegisterRenderBufferSizeCallback(
- VCMRenderBufferSizeCallback* callback) override {
- return receiver_->RegisterRenderBufferSizeCallback(callback);
- }
-
- int32_t Decode(uint16_t maxWaitTimeMs) override {
- return receiver_->Decode(maxWaitTimeMs);
- }
-
- int32_t ResetDecoder() override { return receiver_->ResetDecoder(); }
-
- int32_t ReceiveCodec(VideoCodec* currentReceiveCodec) const override {
- return receiver_->ReceiveCodec(currentReceiveCodec);
- }
-
- VideoCodecType ReceiveCodec() const override {
- return receiver_->ReceiveCodec();
- }
-
- int32_t IncomingPacket(const uint8_t* incomingPayload,
- size_t payloadLength,
- const WebRtcRTPHeader& rtpInfo) override {
- return receiver_->IncomingPacket(incomingPayload, payloadLength, rtpInfo);
- }
-
- int32_t SetMinimumPlayoutDelay(uint32_t minPlayoutDelayMs) override {
- return receiver_->SetMinimumPlayoutDelay(minPlayoutDelayMs);
- }
-
- int32_t SetRenderDelay(uint32_t timeMS) override {
- return receiver_->SetRenderDelay(timeMS);
- }
-
- int32_t Delay() const override { return receiver_->Delay(); }
-
- uint32_t DiscardedPackets() const override {
- return receiver_->DiscardedPackets();
- }
-
- int SetReceiverRobustnessMode(ReceiverRobustness robustnessMode,
- VCMDecodeErrorMode errorMode) override {
- return receiver_->SetReceiverRobustnessMode(robustnessMode, errorMode);
- }
-
- void SetNackSettings(size_t max_nack_list_size,
- int max_packet_age_to_nack,
- int max_incomplete_time_ms) override {
- return receiver_->SetNackSettings(
- max_nack_list_size, max_packet_age_to_nack, max_incomplete_time_ms);
- }
-
- void SetDecodeErrorMode(VCMDecodeErrorMode decode_error_mode) override {
- return receiver_->SetDecodeErrorMode(decode_error_mode);
- }
-
- int SetMinReceiverDelay(int desired_delay_ms) override {
- return receiver_->SetMinReceiverDelay(desired_delay_ms);
- }
-
- int32_t SetReceiveChannelParameters(int64_t rtt) override {
- return receiver_->SetReceiveChannelParameters(rtt);
- }
-
- void RegisterPreDecodeImageCallback(EncodedImageCallback* observer) override {
- receiver_->RegisterPreDecodeImageCallback(observer);
- }
-
- void RegisterPostEncodeImageCallback(
- EncodedImageCallback* observer) override {
- post_encode_callback_.Register(observer);
- }
-
- void TriggerDecoderShutdown() override {
- receiver_->TriggerDecoderShutdown();
- }
-
- private:
- EncodedImageCallbackWrapper post_encode_callback_;
- // TODO(tommi): Change sender_ and receiver_ to be non pointers
- // (construction is 1 alloc instead of 3).
- rtc::scoped_ptr<vcm::VideoSender> sender_;
- rtc::scoped_ptr<vcm::VideoReceiver> receiver_;
- rtc::scoped_ptr<EventFactory> own_event_factory_;
-};
-} // namespace
-
-uint8_t VideoCodingModule::NumberOfCodecs() {
- return VCMCodecDataBase::NumberOfCodecs();
-}
-
-int32_t VideoCodingModule::Codec(uint8_t listId, VideoCodec* codec) {
- if (codec == NULL) {
- return VCM_PARAMETER_ERROR;
- }
- return VCMCodecDataBase::Codec(listId, codec) ? 0 : -1;
-}
-
-int32_t VideoCodingModule::Codec(VideoCodecType codecType, VideoCodec* codec) {
- if (codec == NULL) {
- return VCM_PARAMETER_ERROR;
- }
- return VCMCodecDataBase::Codec(codecType, codec) ? 0 : -1;
-}
-
-VideoCodingModule* VideoCodingModule::Create(
- Clock* clock,
- VideoEncoderRateObserver* encoder_rate_observer,
- VCMQMSettingsCallback* qm_settings_callback) {
- return new VideoCodingModuleImpl(clock, new EventFactoryImpl, true,
- encoder_rate_observer, qm_settings_callback);
-}
-
-VideoCodingModule* VideoCodingModule::Create(
- Clock* clock,
- EventFactory* event_factory) {
- assert(clock);
- assert(event_factory);
- return new VideoCodingModuleImpl(clock, event_factory, false, nullptr,
- nullptr);
-}
-
-void VideoCodingModule::Destroy(VideoCodingModule* module) {
- if (module != NULL) {
- delete static_cast<VideoCodingModuleImpl*>(module);
- }
-}
-} // namespace webrtc
diff --git a/webrtc/modules/video_coding/main/source/video_coding_impl.h b/webrtc/modules/video_coding/main/source/video_coding_impl.h
deleted file mode 100644
index 57f38dad13..0000000000
--- a/webrtc/modules/video_coding/main/source/video_coding_impl.h
+++ /dev/null
@@ -1,237 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_MODULES_VIDEO_CODING_VIDEO_CODING_IMPL_H_
-#define WEBRTC_MODULES_VIDEO_CODING_VIDEO_CODING_IMPL_H_
-
-#include "webrtc/modules/video_coding/main/interface/video_coding.h"
-
-#include <vector>
-
-#include "webrtc/base/thread_annotations.h"
-#include "webrtc/base/thread_checker.h"
-#include "webrtc/modules/video_coding/main/source/codec_database.h"
-#include "webrtc/modules/video_coding/main/source/frame_buffer.h"
-#include "webrtc/modules/video_coding/main/source/generic_decoder.h"
-#include "webrtc/modules/video_coding/main/source/generic_encoder.h"
-#include "webrtc/modules/video_coding/main/source/jitter_buffer.h"
-#include "webrtc/modules/video_coding/main/source/media_optimization.h"
-#include "webrtc/modules/video_coding/main/source/receiver.h"
-#include "webrtc/modules/video_coding/main/source/timing.h"
-#include "webrtc/modules/video_coding/utility/include/qp_parser.h"
-#include "webrtc/system_wrappers/include/clock.h"
-#include "webrtc/system_wrappers/include/critical_section_wrapper.h"
-
-namespace webrtc {
-
-class EncodedFrameObserver;
-
-namespace vcm {
-
-class VCMProcessTimer {
- public:
- VCMProcessTimer(int64_t periodMs, Clock* clock)
- : _clock(clock),
- _periodMs(periodMs),
- _latestMs(_clock->TimeInMilliseconds()) {}
- int64_t Period() const;
- int64_t TimeUntilProcess() const;
- void Processed();
-
- private:
- Clock* _clock;
- int64_t _periodMs;
- int64_t _latestMs;
-};
-
-class VideoSender {
- public:
- typedef VideoCodingModule::SenderNackMode SenderNackMode;
-
- VideoSender(Clock* clock,
- EncodedImageCallback* post_encode_callback,
- VideoEncoderRateObserver* encoder_rate_observer,
- VCMQMSettingsCallback* qm_settings_callback);
-
- ~VideoSender();
-
- // Register the send codec to be used.
- // This method must be called on the construction thread.
- int32_t RegisterSendCodec(const VideoCodec* sendCodec,
- uint32_t numberOfCores,
- uint32_t maxPayloadSize);
- // Non-blocking access to the currently active send codec configuration.
- // Must be called from the same thread as the VideoSender instance was
- // created on.
- const VideoCodec& GetSendCodec() const;
-
- // Get a copy of the currently configured send codec.
- // This method acquires a lock to copy the current configuration out,
- // so it can block and the returned information is not guaranteed to be
- // accurate upon return. Consider using GetSendCodec() instead and make
- // decisions on that thread with regards to the current codec.
- int32_t SendCodecBlocking(VideoCodec* currentSendCodec) const;
-
- // Same as SendCodecBlocking. Try to use GetSendCodec() instead.
- VideoCodecType SendCodecBlocking() const;
-
- int32_t RegisterExternalEncoder(VideoEncoder* externalEncoder,
- uint8_t payloadType,
- bool internalSource);
-
- int Bitrate(unsigned int* bitrate) const;
- int FrameRate(unsigned int* framerate) const;
-
- int32_t SetChannelParameters(uint32_t target_bitrate, // bits/s.
- uint8_t lossRate,
- int64_t rtt);
-
- int32_t RegisterTransportCallback(VCMPacketizationCallback* transport);
- int32_t RegisterSendStatisticsCallback(VCMSendStatisticsCallback* sendStats);
- int32_t RegisterProtectionCallback(VCMProtectionCallback* protection);
- void SetVideoProtection(VCMVideoProtection videoProtection);
-
- int32_t AddVideoFrame(const VideoFrame& videoFrame,
- const VideoContentMetrics* _contentMetrics,
- const CodecSpecificInfo* codecSpecificInfo);
-
- int32_t IntraFrameRequest(int stream_index);
- int32_t EnableFrameDropper(bool enable);
-
- void SuspendBelowMinBitrate();
- bool VideoSuspended() const;
-
- int64_t TimeUntilNextProcess();
- int32_t Process();
-
- private:
- void SetEncoderParameters(EncoderParameters params)
- EXCLUSIVE_LOCKS_REQUIRED(send_crit_);
-
- Clock* const clock_;
-
- rtc::scoped_ptr<CriticalSectionWrapper> process_crit_sect_;
- mutable rtc::CriticalSection send_crit_;
- VCMGenericEncoder* _encoder;
- VCMEncodedFrameCallback _encodedFrameCallback;
- std::vector<FrameType> _nextFrameTypes;
- media_optimization::MediaOptimization _mediaOpt;
- VCMSendStatisticsCallback* _sendStatsCallback GUARDED_BY(process_crit_sect_);
- VCMCodecDataBase _codecDataBase GUARDED_BY(send_crit_);
- bool frame_dropper_enabled_ GUARDED_BY(send_crit_);
- VCMProcessTimer _sendStatsTimer;
-
- // Must be accessed on the construction thread of VideoSender.
- VideoCodec current_codec_;
- rtc::ThreadChecker main_thread_;
-
- VCMQMSettingsCallback* const qm_settings_callback_;
- VCMProtectionCallback* protection_callback_;
-
- rtc::CriticalSection params_lock_;
- EncoderParameters encoder_params_ GUARDED_BY(params_lock_);
-};
-
-class VideoReceiver {
- public:
- typedef VideoCodingModule::ReceiverRobustness ReceiverRobustness;
-
- VideoReceiver(Clock* clock, EventFactory* event_factory);
- ~VideoReceiver();
-
- int32_t RegisterReceiveCodec(const VideoCodec* receiveCodec,
- int32_t numberOfCores,
- bool requireKeyFrame);
-
- int32_t RegisterExternalDecoder(VideoDecoder* externalDecoder,
- uint8_t payloadType,
- bool internalRenderTiming);
- int32_t RegisterReceiveCallback(VCMReceiveCallback* receiveCallback);
- int32_t RegisterReceiveStatisticsCallback(
- VCMReceiveStatisticsCallback* receiveStats);
- int32_t RegisterDecoderTimingCallback(
- VCMDecoderTimingCallback* decoderTiming);
- int32_t RegisterFrameTypeCallback(VCMFrameTypeCallback* frameTypeCallback);
- int32_t RegisterPacketRequestCallback(VCMPacketRequestCallback* callback);
- int RegisterRenderBufferSizeCallback(VCMRenderBufferSizeCallback* callback);
-
- int32_t Decode(uint16_t maxWaitTimeMs);
- int32_t ResetDecoder();
-
- int32_t ReceiveCodec(VideoCodec* currentReceiveCodec) const;
- VideoCodecType ReceiveCodec() const;
-
- int32_t IncomingPacket(const uint8_t* incomingPayload,
- size_t payloadLength,
- const WebRtcRTPHeader& rtpInfo);
- int32_t SetMinimumPlayoutDelay(uint32_t minPlayoutDelayMs);
- int32_t SetRenderDelay(uint32_t timeMS);
- int32_t Delay() const;
- uint32_t DiscardedPackets() const;
-
- int SetReceiverRobustnessMode(ReceiverRobustness robustnessMode,
- VCMDecodeErrorMode errorMode);
- void SetNackSettings(size_t max_nack_list_size,
- int max_packet_age_to_nack,
- int max_incomplete_time_ms);
-
- void SetDecodeErrorMode(VCMDecodeErrorMode decode_error_mode);
- int SetMinReceiverDelay(int desired_delay_ms);
-
- int32_t SetReceiveChannelParameters(int64_t rtt);
- int32_t SetVideoProtection(VCMVideoProtection videoProtection, bool enable);
-
- int64_t TimeUntilNextProcess();
- int32_t Process();
-
- void RegisterPreDecodeImageCallback(EncodedImageCallback* observer);
- void TriggerDecoderShutdown();
-
- protected:
- int32_t Decode(const webrtc::VCMEncodedFrame& frame)
- EXCLUSIVE_LOCKS_REQUIRED(_receiveCritSect);
- int32_t RequestKeyFrame();
- int32_t RequestSliceLossIndication(const uint64_t pictureID) const;
-
- private:
- Clock* const clock_;
- rtc::scoped_ptr<CriticalSectionWrapper> process_crit_sect_;
- CriticalSectionWrapper* _receiveCritSect;
- VCMTiming _timing;
- VCMReceiver _receiver;
- VCMDecodedFrameCallback _decodedFrameCallback;
- VCMFrameTypeCallback* _frameTypeCallback GUARDED_BY(process_crit_sect_);
- VCMReceiveStatisticsCallback* _receiveStatsCallback
- GUARDED_BY(process_crit_sect_);
- VCMDecoderTimingCallback* _decoderTimingCallback
- GUARDED_BY(process_crit_sect_);
- VCMPacketRequestCallback* _packetRequestCallback
- GUARDED_BY(process_crit_sect_);
- VCMRenderBufferSizeCallback* render_buffer_callback_
- GUARDED_BY(process_crit_sect_);
- VCMGenericDecoder* _decoder;
-#ifdef DEBUG_DECODER_BIT_STREAM
- FILE* _bitStreamBeforeDecoder;
-#endif
- VCMFrameBuffer _frameFromFile;
- bool _scheduleKeyRequest GUARDED_BY(process_crit_sect_);
- size_t max_nack_list_size_ GUARDED_BY(process_crit_sect_);
- EncodedImageCallback* pre_decode_image_callback_ GUARDED_BY(_receiveCritSect);
-
- VCMCodecDataBase _codecDataBase GUARDED_BY(_receiveCritSect);
- VCMProcessTimer _receiveStatsTimer;
- VCMProcessTimer _retransmissionTimer;
- VCMProcessTimer _keyRequestTimer;
- QpParser qp_parser_;
-};
-
-} // namespace vcm
-} // namespace webrtc
-#endif // WEBRTC_MODULES_VIDEO_CODING_VIDEO_CODING_IMPL_H_
diff --git a/webrtc/modules/video_coding/main/source/video_coding_robustness_unittest.cc b/webrtc/modules/video_coding/main/source/video_coding_robustness_unittest.cc
deleted file mode 100644
index ac6e16bd80..0000000000
--- a/webrtc/modules/video_coding/main/source/video_coding_robustness_unittest.cc
+++ /dev/null
@@ -1,238 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include "testing/gmock/include/gmock/gmock.h"
-#include "testing/gtest/include/gtest/gtest.h"
-#include "webrtc/modules/video_coding/codecs/interface/mock/mock_video_codec_interface.h"
-#include "webrtc/modules/video_coding/main/interface/mock/mock_vcm_callbacks.h"
-#include "webrtc/modules/video_coding/main/interface/video_coding.h"
-#include "webrtc/modules/video_coding/main/test/test_util.h"
-#include "webrtc/system_wrappers/include/clock.h"
-
-namespace webrtc {
-
-using ::testing::Return;
-using ::testing::_;
-using ::testing::ElementsAre;
-using ::testing::AllOf;
-using ::testing::Args;
-using ::testing::Field;
-using ::testing::Pointee;
-using ::testing::NiceMock;
-using ::testing::Sequence;
-
-class VCMRobustnessTest : public ::testing::Test {
- protected:
- static const size_t kPayloadLen = 10;
-
- virtual void SetUp() {
- clock_.reset(new SimulatedClock(0));
- ASSERT_TRUE(clock_.get() != NULL);
- vcm_ = VideoCodingModule::Create(clock_.get(), &event_factory_);
- ASSERT_TRUE(vcm_ != NULL);
- const size_t kMaxNackListSize = 250;
- const int kMaxPacketAgeToNack = 450;
- vcm_->SetNackSettings(kMaxNackListSize, kMaxPacketAgeToNack, 0);
- ASSERT_EQ(0, vcm_->RegisterFrameTypeCallback(&frame_type_callback_));
- ASSERT_EQ(0, vcm_->RegisterPacketRequestCallback(&request_callback_));
- ASSERT_EQ(VCM_OK, vcm_->Codec(kVideoCodecVP8, &video_codec_));
- ASSERT_EQ(VCM_OK, vcm_->RegisterReceiveCodec(&video_codec_, 1));
- ASSERT_EQ(VCM_OK, vcm_->RegisterExternalDecoder(&decoder_,
- video_codec_.plType,
- true));
- }
-
- virtual void TearDown() {
- VideoCodingModule::Destroy(vcm_);
- }
-
- void InsertPacket(uint32_t timestamp,
- uint16_t seq_no,
- bool first,
- bool marker_bit,
- FrameType frame_type) {
- const uint8_t payload[kPayloadLen] = {0};
- WebRtcRTPHeader rtp_info;
- memset(&rtp_info, 0, sizeof(rtp_info));
- rtp_info.frameType = frame_type;
- rtp_info.header.timestamp = timestamp;
- rtp_info.header.sequenceNumber = seq_no;
- rtp_info.header.markerBit = marker_bit;
- rtp_info.header.payloadType = video_codec_.plType;
- rtp_info.type.Video.codec = kRtpVideoVp8;
- rtp_info.type.Video.codecHeader.VP8.InitRTPVideoHeaderVP8();
- rtp_info.type.Video.isFirstPacket = first;
-
- ASSERT_EQ(VCM_OK, vcm_->IncomingPacket(payload, kPayloadLen, rtp_info));
- }
-
- VideoCodingModule* vcm_;
- VideoCodec video_codec_;
- MockVCMFrameTypeCallback frame_type_callback_;
- MockPacketRequestCallback request_callback_;
- NiceMock<MockVideoDecoder> decoder_;
- NiceMock<MockVideoDecoder> decoderCopy_;
- rtc::scoped_ptr<SimulatedClock> clock_;
- NullEventFactory event_factory_;
-};
-
-TEST_F(VCMRobustnessTest, TestHardNack) {
- Sequence s;
- EXPECT_CALL(request_callback_, ResendPackets(_, 2))
- .With(Args<0, 1>(ElementsAre(6, 7)))
- .Times(1);
- for (int ts = 0; ts <= 6000; ts += 3000) {
- EXPECT_CALL(decoder_, Decode(AllOf(Field(&EncodedImage::_timeStamp, ts),
- Field(&EncodedImage::_length,
- kPayloadLen * 3),
- Field(&EncodedImage::_completeFrame,
- true)),
- false, _, _, _))
- .Times(1)
- .InSequence(s);
- }
-
- ASSERT_EQ(VCM_OK, vcm_->SetReceiverRobustnessMode(
- VideoCodingModule::kHardNack,
- kNoErrors));
-
- InsertPacket(0, 0, true, false, kVideoFrameKey);
- InsertPacket(0, 1, false, false, kVideoFrameKey);
- InsertPacket(0, 2, false, true, kVideoFrameKey);
- clock_->AdvanceTimeMilliseconds(1000 / 30);
-
- InsertPacket(3000, 3, true, false, kVideoFrameDelta);
- InsertPacket(3000, 4, false, false, kVideoFrameDelta);
- InsertPacket(3000, 5, false, true, kVideoFrameDelta);
- clock_->AdvanceTimeMilliseconds(1000 / 30);
-
- ASSERT_EQ(VCM_OK, vcm_->Decode(0));
- ASSERT_EQ(VCM_OK, vcm_->Decode(0));
- ASSERT_EQ(VCM_FRAME_NOT_READY, vcm_->Decode(0));
-
- clock_->AdvanceTimeMilliseconds(10);
-
- ASSERT_EQ(VCM_OK, vcm_->Process());
-
- ASSERT_EQ(VCM_FRAME_NOT_READY, vcm_->Decode(0));
-
- InsertPacket(6000, 8, false, true, kVideoFrameDelta);
- clock_->AdvanceTimeMilliseconds(10);
- ASSERT_EQ(VCM_OK, vcm_->Process());
-
- ASSERT_EQ(VCM_FRAME_NOT_READY, vcm_->Decode(0));
-
- InsertPacket(6000, 6, true, false, kVideoFrameDelta);
- InsertPacket(6000, 7, false, false, kVideoFrameDelta);
- clock_->AdvanceTimeMilliseconds(10);
- ASSERT_EQ(VCM_OK, vcm_->Process());
-
- ASSERT_EQ(VCM_OK, vcm_->Decode(0));
-}
-
-TEST_F(VCMRobustnessTest, TestHardNackNoneDecoded) {
- EXPECT_CALL(request_callback_, ResendPackets(_, _))
- .Times(0);
- EXPECT_CALL(frame_type_callback_, RequestKeyFrame())
- .Times(1);
-
- ASSERT_EQ(VCM_OK, vcm_->SetReceiverRobustnessMode(
- VideoCodingModule::kHardNack,
- kNoErrors));
-
- InsertPacket(3000, 3, true, false, kVideoFrameDelta);
- InsertPacket(3000, 4, false, false, kVideoFrameDelta);
- InsertPacket(3000, 5, false, true, kVideoFrameDelta);
-
- EXPECT_EQ(VCM_FRAME_NOT_READY, vcm_->Decode(0));
- ASSERT_EQ(VCM_OK, vcm_->Process());
-
- clock_->AdvanceTimeMilliseconds(10);
-
- EXPECT_EQ(VCM_FRAME_NOT_READY, vcm_->Decode(0));
- ASSERT_EQ(VCM_OK, vcm_->Process());
-}
-
-TEST_F(VCMRobustnessTest, TestModeNoneWithErrors) {
- EXPECT_CALL(decoder_, InitDecode(_, _)).Times(1);
- EXPECT_CALL(decoder_, Release()).Times(1);
- Sequence s1;
- EXPECT_CALL(request_callback_, ResendPackets(_, 1))
- .With(Args<0, 1>(ElementsAre(4)))
- .Times(0);
-
- EXPECT_CALL(decoder_, Copy())
- .Times(0);
- EXPECT_CALL(decoderCopy_, Copy())
- .Times(0);
-
- // Decode operations
- EXPECT_CALL(decoder_, Decode(AllOf(Field(&EncodedImage::_timeStamp, 0),
- Field(&EncodedImage::_completeFrame,
- true)),
- false, _, _, _))
- .Times(1)
- .InSequence(s1);
- EXPECT_CALL(decoder_, Decode(AllOf(Field(&EncodedImage::_timeStamp, 3000),
- Field(&EncodedImage::_completeFrame,
- false)),
- false, _, _, _))
- .Times(1)
- .InSequence(s1);
- EXPECT_CALL(decoder_, Decode(AllOf(Field(&EncodedImage::_timeStamp, 6000),
- Field(&EncodedImage::_completeFrame,
- true)),
- false, _, _, _))
- .Times(1)
- .InSequence(s1);
- EXPECT_CALL(decoder_, Decode(AllOf(Field(&EncodedImage::_timeStamp, 9000),
- Field(&EncodedImage::_completeFrame,
- true)),
- false, _, _, _))
- .Times(1)
- .InSequence(s1);
-
- ASSERT_EQ(VCM_OK, vcm_->SetReceiverRobustnessMode(
- VideoCodingModule::kNone,
- kWithErrors));
-
- InsertPacket(0, 0, true, false, kVideoFrameKey);
- InsertPacket(0, 1, false, false, kVideoFrameKey);
- InsertPacket(0, 2, false, true, kVideoFrameKey);
- EXPECT_EQ(VCM_OK, vcm_->Decode(0)); // Decode timestamp 0.
- EXPECT_EQ(VCM_OK, vcm_->Process()); // Expect no NACK list.
-
- clock_->AdvanceTimeMilliseconds(33);
- InsertPacket(3000, 3, true, false, kVideoFrameDelta);
- // Packet 4 missing
- InsertPacket(3000, 5, false, true, kVideoFrameDelta);
- EXPECT_EQ(VCM_FRAME_NOT_READY, vcm_->Decode(0));
- EXPECT_EQ(VCM_OK, vcm_->Process()); // Expect no NACK list.
-
- clock_->AdvanceTimeMilliseconds(33);
- InsertPacket(6000, 6, true, false, kVideoFrameDelta);
- InsertPacket(6000, 7, false, false, kVideoFrameDelta);
- InsertPacket(6000, 8, false, true, kVideoFrameDelta);
- EXPECT_EQ(VCM_OK, vcm_->Decode(0)); // Decode timestamp 3000 incomplete.
- EXPECT_EQ(VCM_OK, vcm_->Process()); // Expect no NACK list.
-
- clock_->AdvanceTimeMilliseconds(10);
- EXPECT_EQ(VCM_OK, vcm_->Decode(0)); // Decode timestamp 6000 complete.
- EXPECT_EQ(VCM_OK, vcm_->Process()); // Expect no NACK list.
-
- clock_->AdvanceTimeMilliseconds(23);
- InsertPacket(3000, 4, false, false, kVideoFrameDelta);
-
- InsertPacket(9000, 9, true, false, kVideoFrameDelta);
- InsertPacket(9000, 10, false, false, kVideoFrameDelta);
- InsertPacket(9000, 11, false, true, kVideoFrameDelta);
- EXPECT_EQ(VCM_OK, vcm_->Decode(0)); // Decode timestamp 9000 complete.
-}
-} // namespace webrtc
diff --git a/webrtc/modules/video_coding/main/source/video_receiver.cc b/webrtc/modules/video_coding/main/source/video_receiver.cc
deleted file mode 100644
index 77c069cf2d..0000000000
--- a/webrtc/modules/video_coding/main/source/video_receiver.cc
+++ /dev/null
@@ -1,578 +0,0 @@
-/*
- * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include "webrtc/base/checks.h"
-#include "webrtc/base/trace_event.h"
-#include "webrtc/common_types.h"
-#include "webrtc/common_video/libyuv/include/webrtc_libyuv.h"
-#include "webrtc/modules/video_coding/codecs/interface/video_codec_interface.h"
-#include "webrtc/modules/video_coding/main/source/encoded_frame.h"
-#include "webrtc/modules/video_coding/main/source/jitter_buffer.h"
-#include "webrtc/modules/video_coding/main/source/packet.h"
-#include "webrtc/modules/video_coding/main/source/video_coding_impl.h"
-#include "webrtc/system_wrappers/include/clock.h"
-#include "webrtc/system_wrappers/include/logging.h"
-
-// #define DEBUG_DECODER_BIT_STREAM
-
-namespace webrtc {
-namespace vcm {
-
-VideoReceiver::VideoReceiver(Clock* clock, EventFactory* event_factory)
- : clock_(clock),
- process_crit_sect_(CriticalSectionWrapper::CreateCriticalSection()),
- _receiveCritSect(CriticalSectionWrapper::CreateCriticalSection()),
- _timing(clock_),
- _receiver(&_timing, clock_, event_factory),
- _decodedFrameCallback(_timing, clock_),
- _frameTypeCallback(NULL),
- _receiveStatsCallback(NULL),
- _decoderTimingCallback(NULL),
- _packetRequestCallback(NULL),
- render_buffer_callback_(NULL),
- _decoder(NULL),
-#ifdef DEBUG_DECODER_BIT_STREAM
- _bitStreamBeforeDecoder(NULL),
-#endif
- _frameFromFile(),
- _scheduleKeyRequest(false),
- max_nack_list_size_(0),
- pre_decode_image_callback_(NULL),
- _codecDataBase(nullptr, nullptr),
- _receiveStatsTimer(1000, clock_),
- _retransmissionTimer(10, clock_),
- _keyRequestTimer(500, clock_) {
- assert(clock_);
-#ifdef DEBUG_DECODER_BIT_STREAM
- _bitStreamBeforeDecoder = fopen("decoderBitStream.bit", "wb");
-#endif
-}
-
-VideoReceiver::~VideoReceiver() {
- delete _receiveCritSect;
-#ifdef DEBUG_DECODER_BIT_STREAM
- fclose(_bitStreamBeforeDecoder);
-#endif
-}
-
-int32_t VideoReceiver::Process() {
- int32_t returnValue = VCM_OK;
-
- // Receive-side statistics
- if (_receiveStatsTimer.TimeUntilProcess() == 0) {
- _receiveStatsTimer.Processed();
- CriticalSectionScoped cs(process_crit_sect_.get());
- if (_receiveStatsCallback != NULL) {
- uint32_t bitRate;
- uint32_t frameRate;
- _receiver.ReceiveStatistics(&bitRate, &frameRate);
- _receiveStatsCallback->OnReceiveRatesUpdated(bitRate, frameRate);
- }
-
- if (_decoderTimingCallback != NULL) {
- int decode_ms;
- int max_decode_ms;
- int current_delay_ms;
- int target_delay_ms;
- int jitter_buffer_ms;
- int min_playout_delay_ms;
- int render_delay_ms;
- _timing.GetTimings(&decode_ms,
- &max_decode_ms,
- &current_delay_ms,
- &target_delay_ms,
- &jitter_buffer_ms,
- &min_playout_delay_ms,
- &render_delay_ms);
- _decoderTimingCallback->OnDecoderTiming(decode_ms,
- max_decode_ms,
- current_delay_ms,
- target_delay_ms,
- jitter_buffer_ms,
- min_playout_delay_ms,
- render_delay_ms);
- }
-
- // Size of render buffer.
- if (render_buffer_callback_) {
- int buffer_size_ms = _receiver.RenderBufferSizeMs();
- render_buffer_callback_->RenderBufferSizeMs(buffer_size_ms);
- }
- }
-
- // Key frame requests
- if (_keyRequestTimer.TimeUntilProcess() == 0) {
- _keyRequestTimer.Processed();
- bool request_key_frame = false;
- {
- CriticalSectionScoped cs(process_crit_sect_.get());
- request_key_frame = _scheduleKeyRequest && _frameTypeCallback != NULL;
- }
- if (request_key_frame) {
- const int32_t ret = RequestKeyFrame();
- if (ret != VCM_OK && returnValue == VCM_OK) {
- returnValue = ret;
- }
- }
- }
-
- // Packet retransmission requests
- // TODO(holmer): Add API for changing Process interval and make sure it's
- // disabled when NACK is off.
- if (_retransmissionTimer.TimeUntilProcess() == 0) {
- _retransmissionTimer.Processed();
- bool callback_registered = false;
- uint16_t length;
- {
- CriticalSectionScoped cs(process_crit_sect_.get());
- length = max_nack_list_size_;
- callback_registered = _packetRequestCallback != NULL;
- }
- if (callback_registered && length > 0) {
- // Collect sequence numbers from the default receiver.
- bool request_key_frame = false;
- std::vector<uint16_t> nackList = _receiver.NackList(&request_key_frame);
- int32_t ret = VCM_OK;
- if (request_key_frame) {
- ret = RequestKeyFrame();
- if (ret != VCM_OK && returnValue == VCM_OK) {
- returnValue = ret;
- }
- }
- if (ret == VCM_OK && !nackList.empty()) {
- CriticalSectionScoped cs(process_crit_sect_.get());
- if (_packetRequestCallback != NULL) {
- _packetRequestCallback->ResendPackets(&nackList[0], nackList.size());
- }
- }
- }
- }
-
- return returnValue;
-}
-
-int64_t VideoReceiver::TimeUntilNextProcess() {
- int64_t timeUntilNextProcess = _receiveStatsTimer.TimeUntilProcess();
- if (_receiver.NackMode() != kNoNack) {
- // We need a Process call more often if we are relying on
- // retransmissions
- timeUntilNextProcess =
- VCM_MIN(timeUntilNextProcess, _retransmissionTimer.TimeUntilProcess());
- }
- timeUntilNextProcess =
- VCM_MIN(timeUntilNextProcess, _keyRequestTimer.TimeUntilProcess());
-
- return timeUntilNextProcess;
-}
-
-int32_t VideoReceiver::SetReceiveChannelParameters(int64_t rtt) {
- CriticalSectionScoped receiveCs(_receiveCritSect);
- _receiver.UpdateRtt(rtt);
- return 0;
-}
-
-// Enable or disable a video protection method.
-// Note: This API should be deprecated, as it does not offer a distinction
-// between the protection method and decoding with or without errors. If such a
-// behavior is desired, use the following API: SetReceiverRobustnessMode.
-int32_t VideoReceiver::SetVideoProtection(VCMVideoProtection videoProtection,
- bool enable) {
- // By default, do not decode with errors.
- _receiver.SetDecodeErrorMode(kNoErrors);
- switch (videoProtection) {
- case kProtectionNack: {
- RTC_DCHECK(enable);
- _receiver.SetNackMode(kNack, -1, -1);
- break;
- }
-
- case kProtectionNackFEC: {
- CriticalSectionScoped cs(_receiveCritSect);
- RTC_DCHECK(enable);
- _receiver.SetNackMode(kNack, media_optimization::kLowRttNackMs, -1);
- _receiver.SetDecodeErrorMode(kNoErrors);
- break;
- }
- case kProtectionFEC:
- case kProtectionNone:
- // No receiver-side protection.
- RTC_DCHECK(enable);
- _receiver.SetNackMode(kNoNack, -1, -1);
- _receiver.SetDecodeErrorMode(kWithErrors);
- break;
- }
- return VCM_OK;
-}
-
-// Register a receive callback. Will be called whenever there is a new frame
-// ready for rendering.
-int32_t VideoReceiver::RegisterReceiveCallback(
- VCMReceiveCallback* receiveCallback) {
- CriticalSectionScoped cs(_receiveCritSect);
- _decodedFrameCallback.SetUserReceiveCallback(receiveCallback);
- return VCM_OK;
-}
-
-int32_t VideoReceiver::RegisterReceiveStatisticsCallback(
- VCMReceiveStatisticsCallback* receiveStats) {
- CriticalSectionScoped cs(process_crit_sect_.get());
- _receiver.RegisterStatsCallback(receiveStats);
- _receiveStatsCallback = receiveStats;
- return VCM_OK;
-}
-
-int32_t VideoReceiver::RegisterDecoderTimingCallback(
- VCMDecoderTimingCallback* decoderTiming) {
- CriticalSectionScoped cs(process_crit_sect_.get());
- _decoderTimingCallback = decoderTiming;
- return VCM_OK;
-}
-
-// Register an externally defined decoder/render object.
-// Can be a decoder only or a decoder coupled with a renderer.
-int32_t VideoReceiver::RegisterExternalDecoder(VideoDecoder* externalDecoder,
- uint8_t payloadType,
- bool internalRenderTiming) {
- CriticalSectionScoped cs(_receiveCritSect);
- if (externalDecoder == NULL) {
- // Make sure the VCM updates the decoder next time it decodes.
- _decoder = NULL;
- return _codecDataBase.DeregisterExternalDecoder(payloadType) ? 0 : -1;
- }
- return _codecDataBase.RegisterExternalDecoder(
- externalDecoder, payloadType, internalRenderTiming)
- ? 0
- : -1;
-}
-
-// Register a frame type request callback.
-int32_t VideoReceiver::RegisterFrameTypeCallback(
- VCMFrameTypeCallback* frameTypeCallback) {
- CriticalSectionScoped cs(process_crit_sect_.get());
- _frameTypeCallback = frameTypeCallback;
- return VCM_OK;
-}
-
-int32_t VideoReceiver::RegisterPacketRequestCallback(
- VCMPacketRequestCallback* callback) {
- CriticalSectionScoped cs(process_crit_sect_.get());
- _packetRequestCallback = callback;
- return VCM_OK;
-}
-
-int VideoReceiver::RegisterRenderBufferSizeCallback(
- VCMRenderBufferSizeCallback* callback) {
- CriticalSectionScoped cs(process_crit_sect_.get());
- render_buffer_callback_ = callback;
- return VCM_OK;
-}
-
-void VideoReceiver::TriggerDecoderShutdown() {
- _receiver.TriggerDecoderShutdown();
-}
-
-// Decode next frame, blocking.
-// Should be called as often as possible to get the most out of the decoder.
-int32_t VideoReceiver::Decode(uint16_t maxWaitTimeMs) {
- int64_t nextRenderTimeMs;
- bool supports_render_scheduling;
- {
- CriticalSectionScoped cs(_receiveCritSect);
- supports_render_scheduling = _codecDataBase.SupportsRenderScheduling();
- }
-
- VCMEncodedFrame* frame = _receiver.FrameForDecoding(
- maxWaitTimeMs, nextRenderTimeMs, supports_render_scheduling);
-
- if (frame == NULL) {
- return VCM_FRAME_NOT_READY;
- } else {
- CriticalSectionScoped cs(_receiveCritSect);
-
- // If this frame was too late, we should adjust the delay accordingly
- _timing.UpdateCurrentDelay(frame->RenderTimeMs(),
- clock_->TimeInMilliseconds());
-
- if (pre_decode_image_callback_) {
- EncodedImage encoded_image(frame->EncodedImage());
- int qp = -1;
- if (qp_parser_.GetQp(*frame, &qp)) {
- encoded_image.qp_ = qp;
- }
- pre_decode_image_callback_->Encoded(
- encoded_image, frame->CodecSpecific(), NULL);
- }
-
-#ifdef DEBUG_DECODER_BIT_STREAM
- if (_bitStreamBeforeDecoder != NULL) {
- // Write bit stream to file for debugging purposes
- if (fwrite(
- frame->Buffer(), 1, frame->Length(), _bitStreamBeforeDecoder) !=
- frame->Length()) {
- return -1;
- }
- }
-#endif
- const int32_t ret = Decode(*frame);
- _receiver.ReleaseFrame(frame);
- frame = NULL;
- if (ret != VCM_OK) {
- return ret;
- }
- }
- return VCM_OK;
-}
-
-int32_t VideoReceiver::RequestSliceLossIndication(
- const uint64_t pictureID) const {
- TRACE_EVENT1("webrtc", "RequestSLI", "picture_id", pictureID);
- CriticalSectionScoped cs(process_crit_sect_.get());
- if (_frameTypeCallback != NULL) {
- const int32_t ret =
- _frameTypeCallback->SliceLossIndicationRequest(pictureID);
- if (ret < 0) {
- return ret;
- }
- } else {
- return VCM_MISSING_CALLBACK;
- }
- return VCM_OK;
-}
-
-int32_t VideoReceiver::RequestKeyFrame() {
- TRACE_EVENT0("webrtc", "RequestKeyFrame");
- CriticalSectionScoped process_cs(process_crit_sect_.get());
- if (_frameTypeCallback != NULL) {
- const int32_t ret = _frameTypeCallback->RequestKeyFrame();
- if (ret < 0) {
- return ret;
- }
- _scheduleKeyRequest = false;
- } else {
- return VCM_MISSING_CALLBACK;
- }
- return VCM_OK;
-}
-
-// Must be called from inside the receive side critical section.
-int32_t VideoReceiver::Decode(const VCMEncodedFrame& frame) {
- TRACE_EVENT_ASYNC_STEP1("webrtc",
- "Video",
- frame.TimeStamp(),
- "Decode",
- "type",
- frame.FrameType());
- // Change decoder if payload type has changed
- const bool renderTimingBefore = _codecDataBase.SupportsRenderScheduling();
- _decoder =
- _codecDataBase.GetDecoder(frame.PayloadType(), &_decodedFrameCallback);
- if (renderTimingBefore != _codecDataBase.SupportsRenderScheduling()) {
- // Make sure we reset the decode time estimate since it will
- // be zero for codecs without render timing.
- _timing.ResetDecodeTime();
- }
- if (_decoder == NULL) {
- return VCM_NO_CODEC_REGISTERED;
- }
- // Decode a frame
- int32_t ret = _decoder->Decode(frame, clock_->TimeInMilliseconds());
-
- // Check for failed decoding, run frame type request callback if needed.
- bool request_key_frame = false;
- if (ret < 0) {
- if (ret == VCM_ERROR_REQUEST_SLI) {
- return RequestSliceLossIndication(
- _decodedFrameCallback.LastReceivedPictureID() + 1);
- } else {
- request_key_frame = true;
- }
- } else if (ret == VCM_REQUEST_SLI) {
- ret = RequestSliceLossIndication(
- _decodedFrameCallback.LastReceivedPictureID() + 1);
- }
- if (!frame.Complete() || frame.MissingFrame()) {
- request_key_frame = true;
- ret = VCM_OK;
- }
- if (request_key_frame) {
- CriticalSectionScoped cs(process_crit_sect_.get());
- _scheduleKeyRequest = true;
- }
- TRACE_EVENT_ASYNC_END0("webrtc", "Video", frame.TimeStamp());
- return ret;
-}
-
-// Reset the decoder state
-int32_t VideoReceiver::ResetDecoder() {
- bool reset_key_request = false;
- {
- CriticalSectionScoped cs(_receiveCritSect);
- if (_decoder != NULL) {
- _receiver.Reset();
- _timing.Reset();
- reset_key_request = true;
- _decoder->Reset();
- }
- }
- if (reset_key_request) {
- CriticalSectionScoped cs(process_crit_sect_.get());
- _scheduleKeyRequest = false;
- }
- return VCM_OK;
-}
-
-// Register possible receive codecs, can be called multiple times
-int32_t VideoReceiver::RegisterReceiveCodec(const VideoCodec* receiveCodec,
- int32_t numberOfCores,
- bool requireKeyFrame) {
- CriticalSectionScoped cs(_receiveCritSect);
- if (receiveCodec == NULL) {
- return VCM_PARAMETER_ERROR;
- }
- if (!_codecDataBase.RegisterReceiveCodec(
- receiveCodec, numberOfCores, requireKeyFrame)) {
- return -1;
- }
- return 0;
-}
-
-// Get current received codec
-int32_t VideoReceiver::ReceiveCodec(VideoCodec* currentReceiveCodec) const {
- CriticalSectionScoped cs(_receiveCritSect);
- if (currentReceiveCodec == NULL) {
- return VCM_PARAMETER_ERROR;
- }
- return _codecDataBase.ReceiveCodec(currentReceiveCodec) ? 0 : -1;
-}
-
-// Get current received codec
-VideoCodecType VideoReceiver::ReceiveCodec() const {
- CriticalSectionScoped cs(_receiveCritSect);
- return _codecDataBase.ReceiveCodec();
-}
-
-// Incoming packet from network parsed and ready for decode, non blocking.
-int32_t VideoReceiver::IncomingPacket(const uint8_t* incomingPayload,
- size_t payloadLength,
- const WebRtcRTPHeader& rtpInfo) {
- if (rtpInfo.frameType == kVideoFrameKey) {
- TRACE_EVENT1("webrtc",
- "VCM::PacketKeyFrame",
- "seqnum",
- rtpInfo.header.sequenceNumber);
- }
- if (incomingPayload == NULL) {
- // The jitter buffer doesn't handle non-zero payload lengths for packets
- // without payload.
- // TODO(holmer): We should fix this in the jitter buffer.
- payloadLength = 0;
- }
- const VCMPacket packet(incomingPayload, payloadLength, rtpInfo);
- int32_t ret = _receiver.InsertPacket(packet, rtpInfo.type.Video.width,
- rtpInfo.type.Video.height);
- // TODO(holmer): Investigate if this somehow should use the key frame
- // request scheduling to throttle the requests.
- if (ret == VCM_FLUSH_INDICATOR) {
- RequestKeyFrame();
- ResetDecoder();
- } else if (ret < 0) {
- return ret;
- }
- return VCM_OK;
-}
-
-// Minimum playout delay (used for lip-sync). This is the minimum delay required
-// to sync with audio. Not included in VideoCodingModule::Delay()
-// Defaults to 0 ms.
-int32_t VideoReceiver::SetMinimumPlayoutDelay(uint32_t minPlayoutDelayMs) {
- _timing.set_min_playout_delay(minPlayoutDelayMs);
- return VCM_OK;
-}
-
-// The estimated delay caused by rendering, defaults to
-// kDefaultRenderDelayMs = 10 ms
-int32_t VideoReceiver::SetRenderDelay(uint32_t timeMS) {
- _timing.set_render_delay(timeMS);
- return VCM_OK;
-}
-
-// Current video delay
-int32_t VideoReceiver::Delay() const { return _timing.TargetVideoDelay(); }
-
-uint32_t VideoReceiver::DiscardedPackets() const {
- return _receiver.DiscardedPackets();
-}
-
-int VideoReceiver::SetReceiverRobustnessMode(
- ReceiverRobustness robustnessMode,
- VCMDecodeErrorMode decode_error_mode) {
- CriticalSectionScoped cs(_receiveCritSect);
- switch (robustnessMode) {
- case VideoCodingModule::kNone:
- _receiver.SetNackMode(kNoNack, -1, -1);
- break;
- case VideoCodingModule::kHardNack:
- // Always wait for retransmissions (except when decoding with errors).
- _receiver.SetNackMode(kNack, -1, -1);
- break;
- case VideoCodingModule::kSoftNack:
-#if 1
- assert(false); // TODO(hlundin): Not completed.
- return VCM_NOT_IMPLEMENTED;
-#else
- // Enable hybrid NACK/FEC. Always wait for retransmissions and don't add
- // extra delay when RTT is above kLowRttNackMs.
- _receiver.SetNackMode(kNack, media_optimization::kLowRttNackMs, -1);
- break;
-#endif
- case VideoCodingModule::kReferenceSelection:
-#if 1
- assert(false); // TODO(hlundin): Not completed.
- return VCM_NOT_IMPLEMENTED;
-#else
- if (decode_error_mode == kNoErrors) {
- return VCM_PARAMETER_ERROR;
- }
- _receiver.SetNackMode(kNoNack, -1, -1);
- break;
-#endif
- }
- _receiver.SetDecodeErrorMode(decode_error_mode);
- return VCM_OK;
-}
-
-void VideoReceiver::SetDecodeErrorMode(VCMDecodeErrorMode decode_error_mode) {
- CriticalSectionScoped cs(_receiveCritSect);
- _receiver.SetDecodeErrorMode(decode_error_mode);
-}
-
-void VideoReceiver::SetNackSettings(size_t max_nack_list_size,
- int max_packet_age_to_nack,
- int max_incomplete_time_ms) {
- if (max_nack_list_size != 0) {
- CriticalSectionScoped process_cs(process_crit_sect_.get());
- max_nack_list_size_ = max_nack_list_size;
- }
- _receiver.SetNackSettings(
- max_nack_list_size, max_packet_age_to_nack, max_incomplete_time_ms);
-}
-
-int VideoReceiver::SetMinReceiverDelay(int desired_delay_ms) {
- return _receiver.SetMinReceiverDelay(desired_delay_ms);
-}
-
-void VideoReceiver::RegisterPreDecodeImageCallback(
- EncodedImageCallback* observer) {
- CriticalSectionScoped cs(_receiveCritSect);
- pre_decode_image_callback_ = observer;
-}
-
-} // namespace vcm
-} // namespace webrtc
diff --git a/webrtc/modules/video_coding/main/source/video_receiver_unittest.cc b/webrtc/modules/video_coding/main/source/video_receiver_unittest.cc
deleted file mode 100644
index 75ea29a1ec..0000000000
--- a/webrtc/modules/video_coding/main/source/video_receiver_unittest.cc
+++ /dev/null
@@ -1,211 +0,0 @@
-/*
- * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include <vector>
-
-#include "testing/gtest/include/gtest/gtest.h"
-#include "webrtc/base/scoped_ptr.h"
-#include "webrtc/modules/video_coding/codecs/interface/mock/mock_video_codec_interface.h"
-#include "webrtc/modules/video_coding/main/interface/mock/mock_vcm_callbacks.h"
-#include "webrtc/modules/video_coding/main/interface/video_coding.h"
-#include "webrtc/modules/video_coding/main/source/video_coding_impl.h"
-#include "webrtc/modules/video_coding/main/test/test_util.h"
-#include "webrtc/system_wrappers/include/clock.h"
-
-using ::testing::_;
-using ::testing::NiceMock;
-
-namespace webrtc {
-namespace vcm {
-namespace {
-
-class TestVideoReceiver : public ::testing::Test {
- protected:
- static const int kUnusedPayloadType = 10;
-
- TestVideoReceiver() : clock_(0) {}
-
- virtual void SetUp() {
- receiver_.reset(new VideoReceiver(&clock_, &event_factory_));
- EXPECT_EQ(0, receiver_->RegisterExternalDecoder(&decoder_,
- kUnusedPayloadType, true));
- const size_t kMaxNackListSize = 250;
- const int kMaxPacketAgeToNack = 450;
- receiver_->SetNackSettings(kMaxNackListSize, kMaxPacketAgeToNack, 0);
-
- memset(&settings_, 0, sizeof(settings_));
- EXPECT_EQ(0, VideoCodingModule::Codec(kVideoCodecVP8, &settings_));
- settings_.plType = kUnusedPayloadType; // Use the mocked encoder.
- EXPECT_EQ(0, receiver_->RegisterReceiveCodec(&settings_, 1, true));
- }
-
- void InsertAndVerifyPaddingFrame(const uint8_t* payload,
- WebRtcRTPHeader* header) {
- ASSERT_TRUE(header != NULL);
- for (int j = 0; j < 5; ++j) {
- // Padding only packets are passed to the VCM with payload size 0.
- EXPECT_EQ(0, receiver_->IncomingPacket(payload, 0, *header));
- ++header->header.sequenceNumber;
- }
- EXPECT_EQ(0, receiver_->Process());
- EXPECT_CALL(decoder_, Decode(_, _, _, _, _)).Times(0);
- EXPECT_EQ(VCM_FRAME_NOT_READY, receiver_->Decode(0));
- }
-
- void InsertAndVerifyDecodableFrame(const uint8_t* payload,
- size_t length,
- WebRtcRTPHeader* header) {
- ASSERT_TRUE(header != NULL);
- EXPECT_EQ(0, receiver_->IncomingPacket(payload, length, *header));
- ++header->header.sequenceNumber;
- EXPECT_CALL(packet_request_callback_, ResendPackets(_, _)).Times(0);
- EXPECT_EQ(0, receiver_->Process());
- EXPECT_CALL(decoder_, Decode(_, _, _, _, _)).Times(1);
- EXPECT_EQ(0, receiver_->Decode(0));
- }
-
- SimulatedClock clock_;
- NullEventFactory event_factory_;
- VideoCodec settings_;
- NiceMock<MockVideoDecoder> decoder_;
- NiceMock<MockPacketRequestCallback> packet_request_callback_;
-
- rtc::scoped_ptr<VideoReceiver> receiver_;
-};
-
-TEST_F(TestVideoReceiver, PaddingOnlyFrames) {
- EXPECT_EQ(0, receiver_->SetVideoProtection(kProtectionNack, true));
- EXPECT_EQ(
- 0, receiver_->RegisterPacketRequestCallback(&packet_request_callback_));
- const size_t kPaddingSize = 220;
- const uint8_t payload[kPaddingSize] = {0};
- WebRtcRTPHeader header;
- memset(&header, 0, sizeof(header));
- header.frameType = kEmptyFrame;
- header.header.markerBit = false;
- header.header.paddingLength = kPaddingSize;
- header.header.payloadType = kUnusedPayloadType;
- header.header.ssrc = 1;
- header.header.headerLength = 12;
- header.type.Video.codec = kRtpVideoVp8;
- for (int i = 0; i < 10; ++i) {
- EXPECT_CALL(packet_request_callback_, ResendPackets(_, _)).Times(0);
- InsertAndVerifyPaddingFrame(payload, &header);
- clock_.AdvanceTimeMilliseconds(33);
- header.header.timestamp += 3000;
- }
-}
-
-TEST_F(TestVideoReceiver, PaddingOnlyFramesWithLosses) {
- EXPECT_EQ(0, receiver_->SetVideoProtection(kProtectionNack, true));
- EXPECT_EQ(
- 0, receiver_->RegisterPacketRequestCallback(&packet_request_callback_));
- const size_t kFrameSize = 1200;
- const size_t kPaddingSize = 220;
- const uint8_t payload[kFrameSize] = {0};
- WebRtcRTPHeader header;
- memset(&header, 0, sizeof(header));
- header.frameType = kEmptyFrame;
- header.header.markerBit = false;
- header.header.paddingLength = kPaddingSize;
- header.header.payloadType = kUnusedPayloadType;
- header.header.ssrc = 1;
- header.header.headerLength = 12;
- header.type.Video.codec = kRtpVideoVp8;
- // Insert one video frame to get one frame decoded.
- header.frameType = kVideoFrameKey;
- header.type.Video.isFirstPacket = true;
- header.header.markerBit = true;
- InsertAndVerifyDecodableFrame(payload, kFrameSize, &header);
- clock_.AdvanceTimeMilliseconds(33);
- header.header.timestamp += 3000;
-
- header.frameType = kEmptyFrame;
- header.type.Video.isFirstPacket = false;
- header.header.markerBit = false;
- // Insert padding frames.
- for (int i = 0; i < 10; ++i) {
- // Lose one packet from the 6th frame.
- if (i == 5) {
- ++header.header.sequenceNumber;
- }
- // Lose the 4th frame.
- if (i == 3) {
- header.header.sequenceNumber += 5;
- } else {
- if (i > 3 && i < 5) {
- EXPECT_CALL(packet_request_callback_, ResendPackets(_, 5)).Times(1);
- } else if (i >= 5) {
- EXPECT_CALL(packet_request_callback_, ResendPackets(_, 6)).Times(1);
- } else {
- EXPECT_CALL(packet_request_callback_, ResendPackets(_, _)).Times(0);
- }
- InsertAndVerifyPaddingFrame(payload, &header);
- }
- clock_.AdvanceTimeMilliseconds(33);
- header.header.timestamp += 3000;
- }
-}
-
-TEST_F(TestVideoReceiver, PaddingOnlyAndVideo) {
- EXPECT_EQ(0, receiver_->SetVideoProtection(kProtectionNack, true));
- EXPECT_EQ(
- 0, receiver_->RegisterPacketRequestCallback(&packet_request_callback_));
- const size_t kFrameSize = 1200;
- const size_t kPaddingSize = 220;
- const uint8_t payload[kFrameSize] = {0};
- WebRtcRTPHeader header;
- memset(&header, 0, sizeof(header));
- header.frameType = kEmptyFrame;
- header.type.Video.isFirstPacket = false;
- header.header.markerBit = false;
- header.header.paddingLength = kPaddingSize;
- header.header.payloadType = kUnusedPayloadType;
- header.header.ssrc = 1;
- header.header.headerLength = 12;
- header.type.Video.codec = kRtpVideoVp8;
- header.type.Video.codecHeader.VP8.pictureId = -1;
- header.type.Video.codecHeader.VP8.tl0PicIdx = -1;
- for (int i = 0; i < 3; ++i) {
- // Insert 2 video frames.
- for (int j = 0; j < 2; ++j) {
- if (i == 0 && j == 0) // First frame should be a key frame.
- header.frameType = kVideoFrameKey;
- else
- header.frameType = kVideoFrameDelta;
- header.type.Video.isFirstPacket = true;
- header.header.markerBit = true;
- InsertAndVerifyDecodableFrame(payload, kFrameSize, &header);
- clock_.AdvanceTimeMilliseconds(33);
- header.header.timestamp += 3000;
- }
-
- // Insert 2 padding only frames.
- header.frameType = kEmptyFrame;
- header.type.Video.isFirstPacket = false;
- header.header.markerBit = false;
- for (int j = 0; j < 2; ++j) {
- // InsertAndVerifyPaddingFrame(payload, &header);
- clock_.AdvanceTimeMilliseconds(33);
- header.header.timestamp += 3000;
- }
- }
-}
-
-TEST_F(TestVideoReceiver, ReceiverDelay) {
- EXPECT_EQ(0, receiver_->SetMinReceiverDelay(0));
- EXPECT_EQ(0, receiver_->SetMinReceiverDelay(5000));
- EXPECT_EQ(-1, receiver_->SetMinReceiverDelay(-100));
- EXPECT_EQ(-1, receiver_->SetMinReceiverDelay(10010));
-}
-
-} // namespace
-} // namespace vcm
-} // namespace webrtc
diff --git a/webrtc/modules/video_coding/main/source/video_sender.cc b/webrtc/modules/video_coding/main/source/video_sender.cc
deleted file mode 100644
index 98230b1e9e..0000000000
--- a/webrtc/modules/video_coding/main/source/video_sender.cc
+++ /dev/null
@@ -1,376 +0,0 @@
-/*
- * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include "webrtc/common_types.h"
-
-#include <algorithm> // std::max
-
-#include "webrtc/base/checks.h"
-#include "webrtc/common_video/libyuv/include/webrtc_libyuv.h"
-#include "webrtc/modules/video_coding/codecs/interface/video_codec_interface.h"
-#include "webrtc/modules/video_coding/main/source/encoded_frame.h"
-#include "webrtc/modules/video_coding/main/source/video_coding_impl.h"
-#include "webrtc/modules/video_coding/utility/include/quality_scaler.h"
-#include "webrtc/system_wrappers/include/clock.h"
-#include "webrtc/system_wrappers/include/logging.h"
-
-namespace webrtc {
-namespace vcm {
-
-VideoSender::VideoSender(Clock* clock,
- EncodedImageCallback* post_encode_callback,
- VideoEncoderRateObserver* encoder_rate_observer,
- VCMQMSettingsCallback* qm_settings_callback)
- : clock_(clock),
- process_crit_sect_(CriticalSectionWrapper::CreateCriticalSection()),
- _encoder(nullptr),
- _encodedFrameCallback(post_encode_callback),
- _nextFrameTypes(1, kVideoFrameDelta),
- _mediaOpt(clock_),
- _sendStatsCallback(nullptr),
- _codecDataBase(encoder_rate_observer, &_encodedFrameCallback),
- frame_dropper_enabled_(true),
- _sendStatsTimer(1000, clock_),
- current_codec_(),
- qm_settings_callback_(qm_settings_callback),
- protection_callback_(nullptr),
- encoder_params_({0, 0, 0, 0}) {
- // Allow VideoSender to be created on one thread but used on another, post
- // construction. This is currently how this class is being used by at least
- // one external project (diffractor).
- _mediaOpt.EnableQM(qm_settings_callback_ != nullptr);
- _mediaOpt.Reset();
- main_thread_.DetachFromThread();
-}
-
-VideoSender::~VideoSender() {}
-
-int32_t VideoSender::Process() {
- int32_t returnValue = VCM_OK;
-
- if (_sendStatsTimer.TimeUntilProcess() == 0) {
- _sendStatsTimer.Processed();
- CriticalSectionScoped cs(process_crit_sect_.get());
- if (_sendStatsCallback != nullptr) {
- uint32_t bitRate = _mediaOpt.SentBitRate();
- uint32_t frameRate = _mediaOpt.SentFrameRate();
- _sendStatsCallback->SendStatistics(bitRate, frameRate);
- }
- }
-
- {
- rtc::CritScope cs(&params_lock_);
- // Force an encoder parameters update, so that incoming frame rate is
- // updated even if bandwidth hasn't changed.
- encoder_params_.input_frame_rate = _mediaOpt.InputFrameRate();
- }
-
- return returnValue;
-}
-
-int64_t VideoSender::TimeUntilNextProcess() {
- return _sendStatsTimer.TimeUntilProcess();
-}
-
-// Register the send codec to be used.
-int32_t VideoSender::RegisterSendCodec(const VideoCodec* sendCodec,
- uint32_t numberOfCores,
- uint32_t maxPayloadSize) {
- RTC_DCHECK(main_thread_.CalledOnValidThread());
- rtc::CritScope lock(&send_crit_);
- if (sendCodec == nullptr) {
- return VCM_PARAMETER_ERROR;
- }
-
- bool ret =
- _codecDataBase.SetSendCodec(sendCodec, numberOfCores, maxPayloadSize);
-
- // Update encoder regardless of result to make sure that we're not holding on
- // to a deleted instance.
- _encoder = _codecDataBase.GetEncoder();
- // Cache the current codec here so they can be fetched from this thread
- // without requiring the _sendCritSect lock.
- current_codec_ = *sendCodec;
-
- if (!ret) {
- LOG(LS_ERROR) << "Failed to initialize set encoder with payload name '"
- << sendCodec->plName << "'.";
- return VCM_CODEC_ERROR;
- }
-
- int numLayers;
- if (sendCodec->codecType == kVideoCodecVP8) {
- numLayers = sendCodec->codecSpecific.VP8.numberOfTemporalLayers;
- } else if (sendCodec->codecType == kVideoCodecVP9) {
- numLayers = sendCodec->codecSpecific.VP9.numberOfTemporalLayers;
- } else {
- numLayers = 1;
- }
-
- // If we have screensharing and we have layers, we disable frame dropper.
- bool disable_frame_dropper =
- numLayers > 1 && sendCodec->mode == kScreensharing;
- if (disable_frame_dropper) {
- _mediaOpt.EnableFrameDropper(false);
- } else if (frame_dropper_enabled_) {
- _mediaOpt.EnableFrameDropper(true);
- }
- _nextFrameTypes.clear();
- _nextFrameTypes.resize(VCM_MAX(sendCodec->numberOfSimulcastStreams, 1),
- kVideoFrameDelta);
-
- _mediaOpt.SetEncodingData(sendCodec->codecType,
- sendCodec->maxBitrate * 1000,
- sendCodec->startBitrate * 1000,
- sendCodec->width,
- sendCodec->height,
- sendCodec->maxFramerate,
- numLayers,
- maxPayloadSize);
- return VCM_OK;
-}
-
-const VideoCodec& VideoSender::GetSendCodec() const {
- RTC_DCHECK(main_thread_.CalledOnValidThread());
- return current_codec_;
-}
-
-int32_t VideoSender::SendCodecBlocking(VideoCodec* currentSendCodec) const {
- rtc::CritScope lock(&send_crit_);
- if (currentSendCodec == nullptr) {
- return VCM_PARAMETER_ERROR;
- }
- return _codecDataBase.SendCodec(currentSendCodec) ? 0 : -1;
-}
-
-VideoCodecType VideoSender::SendCodecBlocking() const {
- rtc::CritScope lock(&send_crit_);
- return _codecDataBase.SendCodec();
-}
-
-// Register an external decoder object.
-// This can not be used together with external decoder callbacks.
-int32_t VideoSender::RegisterExternalEncoder(VideoEncoder* externalEncoder,
- uint8_t payloadType,
- bool internalSource /*= false*/) {
- RTC_DCHECK(main_thread_.CalledOnValidThread());
-
- rtc::CritScope lock(&send_crit_);
-
- if (externalEncoder == nullptr) {
- bool wasSendCodec = false;
- const bool ret =
- _codecDataBase.DeregisterExternalEncoder(payloadType, &wasSendCodec);
- if (wasSendCodec) {
- // Make sure the VCM doesn't use the de-registered codec
- _encoder = nullptr;
- }
- return ret ? 0 : -1;
- }
- _codecDataBase.RegisterExternalEncoder(
- externalEncoder, payloadType, internalSource);
- return 0;
-}
-
-// Get encode bitrate
-int VideoSender::Bitrate(unsigned int* bitrate) const {
- RTC_DCHECK(main_thread_.CalledOnValidThread());
- // Since we're running on the thread that's the only thread known to modify
- // the value of _encoder, we don't need to grab the lock here.
-
- if (!_encoder)
- return VCM_UNINITIALIZED;
- *bitrate = _encoder->GetEncoderParameters().target_bitrate;
- return 0;
-}
-
-// Get encode frame rate
-int VideoSender::FrameRate(unsigned int* framerate) const {
- RTC_DCHECK(main_thread_.CalledOnValidThread());
- // Since we're running on the thread that's the only thread known to modify
- // the value of _encoder, we don't need to grab the lock here.
-
- if (!_encoder)
- return VCM_UNINITIALIZED;
-
- *framerate = _encoder->GetEncoderParameters().input_frame_rate;
- return 0;
-}
-
-int32_t VideoSender::SetChannelParameters(uint32_t target_bitrate,
- uint8_t lossRate,
- int64_t rtt) {
- uint32_t target_rate =
- _mediaOpt.SetTargetRates(target_bitrate, lossRate, rtt,
- protection_callback_, qm_settings_callback_);
-
- uint32_t input_frame_rate = _mediaOpt.InputFrameRate();
-
- rtc::CritScope cs(&params_lock_);
- encoder_params_ = {target_rate, lossRate, rtt, input_frame_rate};
-
- return VCM_OK;
-}
-
-void VideoSender::SetEncoderParameters(EncoderParameters params) {
- if (params.target_bitrate == 0)
- return;
-
- if (params.input_frame_rate == 0) {
- // No frame rate estimate available, use default.
- params.input_frame_rate = current_codec_.maxFramerate;
- }
- if (_encoder != nullptr)
- _encoder->SetEncoderParameters(params);
-}
-
-int32_t VideoSender::RegisterTransportCallback(
- VCMPacketizationCallback* transport) {
- rtc::CritScope lock(&send_crit_);
- _encodedFrameCallback.SetMediaOpt(&_mediaOpt);
- _encodedFrameCallback.SetTransportCallback(transport);
- return VCM_OK;
-}
-
-// Register video output information callback which will be called to deliver
-// information about the video stream produced by the encoder, for instance the
-// average frame rate and bit rate.
-int32_t VideoSender::RegisterSendStatisticsCallback(
- VCMSendStatisticsCallback* sendStats) {
- CriticalSectionScoped cs(process_crit_sect_.get());
- _sendStatsCallback = sendStats;
- return VCM_OK;
-}
-
-// Register a video protection callback which will be called to deliver the
-// requested FEC rate and NACK status (on/off).
-// Note: this callback is assumed to only be registered once and before it is
-// used in this class.
-int32_t VideoSender::RegisterProtectionCallback(
- VCMProtectionCallback* protection_callback) {
- RTC_DCHECK(protection_callback == nullptr || protection_callback_ == nullptr);
- protection_callback_ = protection_callback;
- return VCM_OK;
-}
-
-// Enable or disable a video protection method.
-void VideoSender::SetVideoProtection(VCMVideoProtection videoProtection) {
- rtc::CritScope lock(&send_crit_);
- switch (videoProtection) {
- case kProtectionNone:
- _mediaOpt.SetProtectionMethod(media_optimization::kNone);
- break;
- case kProtectionNack:
- _mediaOpt.SetProtectionMethod(media_optimization::kNack);
- break;
- case kProtectionNackFEC:
- _mediaOpt.SetProtectionMethod(media_optimization::kNackFec);
- break;
- case kProtectionFEC:
- _mediaOpt.SetProtectionMethod(media_optimization::kFec);
- break;
- }
-}
-// Add one raw video frame to the encoder, blocking.
-int32_t VideoSender::AddVideoFrame(const VideoFrame& videoFrame,
- const VideoContentMetrics* contentMetrics,
- const CodecSpecificInfo* codecSpecificInfo) {
- EncoderParameters encoder_params;
- {
- rtc::CritScope lock(&params_lock_);
- encoder_params = encoder_params_;
- }
- rtc::CritScope lock(&send_crit_);
- if (_encoder == nullptr)
- return VCM_UNINITIALIZED;
- SetEncoderParameters(encoder_params);
- // TODO(holmer): Add support for dropping frames per stream. Currently we
- // only have one frame dropper for all streams.
- if (_nextFrameTypes[0] == kEmptyFrame) {
- return VCM_OK;
- }
- if (_mediaOpt.DropFrame()) {
- _encoder->OnDroppedFrame();
- return VCM_OK;
- }
- _mediaOpt.UpdateContentData(contentMetrics);
- // TODO(pbos): Make sure setting send codec is synchronized with video
- // processing so frame size always matches.
- if (!_codecDataBase.MatchesCurrentResolution(videoFrame.width(),
- videoFrame.height())) {
- LOG(LS_ERROR) << "Incoming frame doesn't match set resolution. Dropping.";
- return VCM_PARAMETER_ERROR;
- }
- VideoFrame converted_frame = videoFrame;
- if (converted_frame.native_handle() && !_encoder->SupportsNativeHandle()) {
- // This module only supports software encoding.
- // TODO(pbos): Offload conversion from the encoder thread.
- converted_frame = converted_frame.ConvertNativeToI420Frame();
- RTC_CHECK(!converted_frame.IsZeroSize())
- << "Frame conversion failed, won't be able to encode frame.";
- }
- int32_t ret =
- _encoder->Encode(converted_frame, codecSpecificInfo, _nextFrameTypes);
- if (ret < 0) {
- LOG(LS_ERROR) << "Failed to encode frame. Error code: " << ret;
- return ret;
- }
- for (size_t i = 0; i < _nextFrameTypes.size(); ++i) {
- _nextFrameTypes[i] = kVideoFrameDelta; // Default frame type.
- }
- if (qm_settings_callback_)
- qm_settings_callback_->SetTargetFramerate(_encoder->GetTargetFramerate());
- return VCM_OK;
-}
-
-int32_t VideoSender::IntraFrameRequest(int stream_index) {
- rtc::CritScope lock(&send_crit_);
- if (stream_index < 0 ||
- static_cast<unsigned int>(stream_index) >= _nextFrameTypes.size()) {
- return -1;
- }
- _nextFrameTypes[stream_index] = kVideoFrameKey;
- if (_encoder != nullptr && _encoder->InternalSource()) {
- // Try to request the frame if we have an external encoder with
- // internal source since AddVideoFrame never will be called.
- if (_encoder->RequestFrame(_nextFrameTypes) == WEBRTC_VIDEO_CODEC_OK) {
- _nextFrameTypes[stream_index] = kVideoFrameDelta;
- }
- }
- return VCM_OK;
-}
-
-int32_t VideoSender::EnableFrameDropper(bool enable) {
- rtc::CritScope lock(&send_crit_);
- frame_dropper_enabled_ = enable;
- _mediaOpt.EnableFrameDropper(enable);
- return VCM_OK;
-}
-
-void VideoSender::SuspendBelowMinBitrate() {
- RTC_DCHECK(main_thread_.CalledOnValidThread());
- int threshold_bps;
- if (current_codec_.numberOfSimulcastStreams == 0) {
- threshold_bps = current_codec_.minBitrate * 1000;
- } else {
- threshold_bps = current_codec_.simulcastStream[0].minBitrate * 1000;
- }
- // Set the hysteresis window to be at 10% of the threshold, but at least
- // 10 kbps.
- int window_bps = std::max(threshold_bps / 10, 10000);
- _mediaOpt.SuspendBelowMinBitrate(threshold_bps, window_bps);
-}
-
-bool VideoSender::VideoSuspended() const {
- rtc::CritScope lock(&send_crit_);
- return _mediaOpt.IsVideoSuspended();
-}
-} // namespace vcm
-} // namespace webrtc
diff --git a/webrtc/modules/video_coding/main/source/video_sender_unittest.cc b/webrtc/modules/video_coding/main/source/video_sender_unittest.cc
deleted file mode 100644
index e9c8bd79b6..0000000000
--- a/webrtc/modules/video_coding/main/source/video_sender_unittest.cc
+++ /dev/null
@@ -1,494 +0,0 @@
-/*
- * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include <vector>
-
-#include "testing/gtest/include/gtest/gtest.h"
-#include "webrtc/base/scoped_ptr.h"
-#include "webrtc/common.h"
-#include "webrtc/modules/video_coding/codecs/interface/mock/mock_video_codec_interface.h"
-#include "webrtc/modules/video_coding/codecs/vp8/include/vp8.h"
-#include "webrtc/modules/video_coding/codecs/vp8/include/vp8_common_types.h"
-#include "webrtc/modules/video_coding/codecs/vp8/temporal_layers.h"
-#include "webrtc/modules/video_coding/main/interface/mock/mock_vcm_callbacks.h"
-#include "webrtc/modules/video_coding/main/interface/video_coding.h"
-#include "webrtc/modules/video_coding/main/source/video_coding_impl.h"
-#include "webrtc/modules/video_coding/main/test/test_util.h"
-#include "webrtc/system_wrappers/include/clock.h"
-#include "webrtc/test/frame_generator.h"
-#include "webrtc/test/testsupport/fileutils.h"
-#include "webrtc/test/testsupport/gtest_disable.h"
-
-using ::testing::_;
-using ::testing::AllOf;
-using ::testing::ElementsAre;
-using ::testing::ElementsAreArray;
-using ::testing::Field;
-using ::testing::NiceMock;
-using ::testing::Pointee;
-using ::testing::Return;
-using ::testing::FloatEq;
-using std::vector;
-using webrtc::test::FrameGenerator;
-
-namespace webrtc {
-namespace vcm {
-namespace {
-enum {
- kMaxNumberOfTemporalLayers = 3
-};
-
-struct Vp8StreamInfo {
- float framerate_fps[kMaxNumberOfTemporalLayers];
- int bitrate_kbps[kMaxNumberOfTemporalLayers];
-};
-
-MATCHER_P(MatchesVp8StreamInfo, expected, "") {
- bool res = true;
- for (int tl = 0; tl < kMaxNumberOfTemporalLayers; ++tl) {
- if (fabs(expected.framerate_fps[tl] - arg.framerate_fps[tl]) > 0.5) {
- *result_listener << " framerate_fps[" << tl
- << "] = " << arg.framerate_fps[tl] << " (expected "
- << expected.framerate_fps[tl] << ") ";
- res = false;
- }
- if (abs(expected.bitrate_kbps[tl] - arg.bitrate_kbps[tl]) > 10) {
- *result_listener << " bitrate_kbps[" << tl
- << "] = " << arg.bitrate_kbps[tl] << " (expected "
- << expected.bitrate_kbps[tl] << ") ";
- res = false;
- }
- }
- return res;
-}
-
-class EmptyFrameGenerator : public FrameGenerator {
- public:
- EmptyFrameGenerator(int width, int height) : width_(width), height_(height) {}
- VideoFrame* NextFrame() override {
- frame_.reset(new VideoFrame());
- frame_->CreateEmptyFrame(width_, height_, width_, (width_ + 1) / 2,
- (width_ + 1) / 2);
- return frame_.get();
- }
-
- private:
- const int width_;
- const int height_;
- rtc::scoped_ptr<VideoFrame> frame_;
-};
-
-class PacketizationCallback : public VCMPacketizationCallback {
- public:
- PacketizationCallback(Clock* clock)
- : clock_(clock), start_time_ms_(clock_->TimeInMilliseconds()) {}
-
- virtual ~PacketizationCallback() {}
-
- int32_t SendData(uint8_t payload_type,
- const EncodedImage& encoded_image,
- const RTPFragmentationHeader& fragmentation_header,
- const RTPVideoHeader* rtp_video_header) override {
- assert(rtp_video_header);
- frame_data_.push_back(FrameData(encoded_image._length, *rtp_video_header));
- return 0;
- }
-
- void Reset() {
- frame_data_.clear();
- start_time_ms_ = clock_->TimeInMilliseconds();
- }
-
- float FramerateFpsWithinTemporalLayer(int temporal_layer) {
- return CountFramesWithinTemporalLayer(temporal_layer) *
- (1000.0 / interval_ms());
- }
-
- float BitrateKbpsWithinTemporalLayer(int temporal_layer) {
- return SumPayloadBytesWithinTemporalLayer(temporal_layer) * 8.0 /
- interval_ms();
- }
-
- Vp8StreamInfo CalculateVp8StreamInfo() {
- Vp8StreamInfo info;
- for (int tl = 0; tl < 3; ++tl) {
- info.framerate_fps[tl] = FramerateFpsWithinTemporalLayer(tl);
- info.bitrate_kbps[tl] = BitrateKbpsWithinTemporalLayer(tl);
- }
- return info;
- }
-
- private:
- struct FrameData {
- FrameData() {}
-
- FrameData(size_t payload_size, const RTPVideoHeader& rtp_video_header)
- : payload_size(payload_size), rtp_video_header(rtp_video_header) {}
-
- size_t payload_size;
- RTPVideoHeader rtp_video_header;
- };
-
- int64_t interval_ms() {
- int64_t diff = (clock_->TimeInMilliseconds() - start_time_ms_);
- EXPECT_GT(diff, 0);
- return diff;
- }
-
- int CountFramesWithinTemporalLayer(int temporal_layer) {
- int frames = 0;
- for (size_t i = 0; i < frame_data_.size(); ++i) {
- EXPECT_EQ(kRtpVideoVp8, frame_data_[i].rtp_video_header.codec);
- const uint8_t temporal_idx =
- frame_data_[i].rtp_video_header.codecHeader.VP8.temporalIdx;
- if (temporal_idx <= temporal_layer || temporal_idx == kNoTemporalIdx)
- frames++;
- }
- return frames;
- }
-
- size_t SumPayloadBytesWithinTemporalLayer(int temporal_layer) {
- size_t payload_size = 0;
- for (size_t i = 0; i < frame_data_.size(); ++i) {
- EXPECT_EQ(kRtpVideoVp8, frame_data_[i].rtp_video_header.codec);
- const uint8_t temporal_idx =
- frame_data_[i].rtp_video_header.codecHeader.VP8.temporalIdx;
- if (temporal_idx <= temporal_layer || temporal_idx == kNoTemporalIdx)
- payload_size += frame_data_[i].payload_size;
- }
- return payload_size;
- }
-
- Clock* clock_;
- int64_t start_time_ms_;
- vector<FrameData> frame_data_;
-};
-
-class TestVideoSender : public ::testing::Test {
- protected:
- // Note: simulated clock starts at 1 seconds, since parts of webrtc use 0 as
- // a special case (e.g. frame rate in media optimization).
- TestVideoSender() : clock_(1000), packetization_callback_(&clock_) {}
-
- void SetUp() override {
- sender_.reset(
- new VideoSender(&clock_, &post_encode_callback_, nullptr, nullptr));
- EXPECT_EQ(0, sender_->RegisterTransportCallback(&packetization_callback_));
- }
-
- void AddFrame() {
- assert(generator_.get());
- sender_->AddVideoFrame(*generator_->NextFrame(), NULL, NULL);
- }
-
- SimulatedClock clock_;
- PacketizationCallback packetization_callback_;
- MockEncodedImageCallback post_encode_callback_;
- // Used by subclassing tests, need to outlive sender_.
- rtc::scoped_ptr<VideoEncoder> encoder_;
- rtc::scoped_ptr<VideoSender> sender_;
- rtc::scoped_ptr<FrameGenerator> generator_;
-};
-
-class TestVideoSenderWithMockEncoder : public TestVideoSender {
- protected:
- static const int kDefaultWidth = 1280;
- static const int kDefaultHeight = 720;
- static const int kNumberOfStreams = 3;
- static const int kNumberOfLayers = 3;
- static const int kUnusedPayloadType = 10;
-
- void SetUp() override {
- TestVideoSender::SetUp();
- EXPECT_EQ(
- 0,
- sender_->RegisterExternalEncoder(&encoder_, kUnusedPayloadType, false));
- memset(&settings_, 0, sizeof(settings_));
- EXPECT_EQ(0, VideoCodingModule::Codec(kVideoCodecVP8, &settings_));
- settings_.numberOfSimulcastStreams = kNumberOfStreams;
- ConfigureStream(kDefaultWidth / 4,
- kDefaultHeight / 4,
- 100,
- &settings_.simulcastStream[0]);
- ConfigureStream(kDefaultWidth / 2,
- kDefaultHeight / 2,
- 500,
- &settings_.simulcastStream[1]);
- ConfigureStream(
- kDefaultWidth, kDefaultHeight, 1200, &settings_.simulcastStream[2]);
- settings_.plType = kUnusedPayloadType; // Use the mocked encoder.
- generator_.reset(
- new EmptyFrameGenerator(settings_.width, settings_.height));
- EXPECT_EQ(0, sender_->RegisterSendCodec(&settings_, 1, 1200));
- }
-
- void TearDown() override { sender_.reset(); }
-
- void ExpectIntraRequest(int stream) {
- if (stream == -1) {
- // No intra request expected.
- EXPECT_CALL(
- encoder_,
- Encode(_, _, Pointee(ElementsAre(kVideoFrameDelta, kVideoFrameDelta,
- kVideoFrameDelta))))
- .Times(1)
- .WillRepeatedly(Return(0));
- return;
- }
- assert(stream >= 0);
- assert(stream < kNumberOfStreams);
- std::vector<FrameType> frame_types(kNumberOfStreams, kVideoFrameDelta);
- frame_types[stream] = kVideoFrameKey;
- EXPECT_CALL(
- encoder_,
- Encode(_,
- _,
- Pointee(ElementsAreArray(&frame_types[0], frame_types.size()))))
- .Times(1).WillRepeatedly(Return(0));
- }
-
- static void ConfigureStream(int width,
- int height,
- int max_bitrate,
- SimulcastStream* stream) {
- assert(stream);
- stream->width = width;
- stream->height = height;
- stream->maxBitrate = max_bitrate;
- stream->numberOfTemporalLayers = kNumberOfLayers;
- stream->qpMax = 45;
- }
-
- VideoCodec settings_;
- NiceMock<MockVideoEncoder> encoder_;
-};
-
-TEST_F(TestVideoSenderWithMockEncoder, TestIntraRequests) {
- EXPECT_EQ(0, sender_->IntraFrameRequest(0));
- ExpectIntraRequest(0);
- AddFrame();
- ExpectIntraRequest(-1);
- AddFrame();
-
- EXPECT_EQ(0, sender_->IntraFrameRequest(1));
- ExpectIntraRequest(1);
- AddFrame();
- ExpectIntraRequest(-1);
- AddFrame();
-
- EXPECT_EQ(0, sender_->IntraFrameRequest(2));
- ExpectIntraRequest(2);
- AddFrame();
- ExpectIntraRequest(-1);
- AddFrame();
-
- EXPECT_EQ(-1, sender_->IntraFrameRequest(3));
- ExpectIntraRequest(-1);
- AddFrame();
-
- EXPECT_EQ(-1, sender_->IntraFrameRequest(-1));
- ExpectIntraRequest(-1);
- AddFrame();
-}
-
-TEST_F(TestVideoSenderWithMockEncoder, TestIntraRequestsInternalCapture) {
- // De-register current external encoder.
- EXPECT_EQ(0,
- sender_->RegisterExternalEncoder(NULL, kUnusedPayloadType, false));
- // Register encoder with internal capture.
- EXPECT_EQ(
- 0, sender_->RegisterExternalEncoder(&encoder_, kUnusedPayloadType, true));
- EXPECT_EQ(0, sender_->RegisterSendCodec(&settings_, 1, 1200));
- ExpectIntraRequest(0);
- EXPECT_EQ(0, sender_->IntraFrameRequest(0));
- ExpectIntraRequest(1);
- EXPECT_EQ(0, sender_->IntraFrameRequest(1));
- ExpectIntraRequest(2);
- EXPECT_EQ(0, sender_->IntraFrameRequest(2));
- // No requests expected since these indices are out of bounds.
- EXPECT_EQ(-1, sender_->IntraFrameRequest(3));
- EXPECT_EQ(-1, sender_->IntraFrameRequest(-1));
-}
-
-TEST_F(TestVideoSenderWithMockEncoder, EncoderFramerateUpdatedViaProcess) {
- sender_->SetChannelParameters(settings_.startBitrate * 1000, 0, 200);
- const int64_t kRateStatsWindowMs = 2000;
- const uint32_t kInputFps = 20;
- int64_t start_time = clock_.TimeInMilliseconds();
- while (clock_.TimeInMilliseconds() < start_time + kRateStatsWindowMs) {
- AddFrame();
- clock_.AdvanceTimeMilliseconds(1000 / kInputFps);
- }
- EXPECT_CALL(encoder_, SetRates(_, kInputFps)).Times(1).WillOnce(Return(0));
- sender_->Process();
- AddFrame();
-}
-
-TEST_F(TestVideoSenderWithMockEncoder,
- NoRedundantSetChannelParameterOrSetRatesCalls) {
- const uint8_t kLossRate = 4;
- const uint8_t kRtt = 200;
- const int64_t kRateStatsWindowMs = 2000;
- const uint32_t kInputFps = 20;
- int64_t start_time = clock_.TimeInMilliseconds();
- // Expect initial call to SetChannelParameters. Rates are initialized through
- // InitEncode and expects no additional call before the framerate (or bitrate)
- // updates.
- EXPECT_CALL(encoder_, SetChannelParameters(kLossRate, kRtt))
- .Times(1)
- .WillOnce(Return(0));
- sender_->SetChannelParameters(settings_.startBitrate * 1000, kLossRate, kRtt);
- while (clock_.TimeInMilliseconds() < start_time + kRateStatsWindowMs) {
- AddFrame();
- clock_.AdvanceTimeMilliseconds(1000 / kInputFps);
- }
- // After process, input framerate should be updated but not ChannelParameters
- // as they are the same as before.
- EXPECT_CALL(encoder_, SetRates(_, kInputFps)).Times(1).WillOnce(Return(0));
- sender_->Process();
- AddFrame();
- // Call to SetChannelParameters with changed bitrate should call encoder
- // SetRates but not encoder SetChannelParameters (that are unchanged).
- EXPECT_CALL(encoder_, SetRates(2 * settings_.startBitrate, kInputFps))
- .Times(1)
- .WillOnce(Return(0));
- sender_->SetChannelParameters(2 * settings_.startBitrate * 1000, kLossRate,
- kRtt);
- AddFrame();
-}
-
-class TestVideoSenderWithVp8 : public TestVideoSender {
- public:
- TestVideoSenderWithVp8()
- : codec_bitrate_kbps_(300), available_bitrate_kbps_(1000) {}
-
- void SetUp() override {
- TestVideoSender::SetUp();
-
- const char* input_video = "foreman_cif";
- const int width = 352;
- const int height = 288;
- generator_.reset(FrameGenerator::CreateFromYuvFile(
- std::vector<std::string>(1, test::ResourcePath(input_video, "yuv")),
- width, height, 1));
-
- codec_ = MakeVp8VideoCodec(width, height, 3);
- codec_.minBitrate = 10;
- codec_.startBitrate = codec_bitrate_kbps_;
- codec_.maxBitrate = codec_bitrate_kbps_;
- encoder_.reset(VP8Encoder::Create());
- ASSERT_EQ(0, sender_->RegisterExternalEncoder(encoder_.get(), codec_.plType,
- false));
- EXPECT_EQ(0, sender_->RegisterSendCodec(&codec_, 1, 1200));
- }
-
- static VideoCodec MakeVp8VideoCodec(int width,
- int height,
- int temporal_layers) {
- VideoCodec codec;
- memset(&codec, 0, sizeof(codec));
- EXPECT_EQ(0, VideoCodingModule::Codec(kVideoCodecVP8, &codec));
- codec.width = width;
- codec.height = height;
- codec.codecSpecific.VP8.numberOfTemporalLayers = temporal_layers;
- return codec;
- }
-
- void InsertFrames(float framerate, float seconds) {
- for (int i = 0; i < seconds * framerate; ++i) {
- clock_.AdvanceTimeMilliseconds(1000.0f / framerate);
- EXPECT_CALL(post_encode_callback_, Encoded(_, NULL, NULL))
- .WillOnce(Return(0));
- AddFrame();
- // SetChannelParameters needs to be called frequently to propagate
- // framerate from the media optimization into the encoder.
- // Note: SetChannelParameters fails if less than 2 frames are in the
- // buffer since it will fail to calculate the framerate.
- if (i != 0) {
- EXPECT_EQ(VCM_OK, sender_->SetChannelParameters(
- available_bitrate_kbps_ * 1000, 0, 200));
- }
- }
- }
-
- Vp8StreamInfo SimulateWithFramerate(float framerate) {
- const float short_simulation_interval = 5.0;
- const float long_simulation_interval = 10.0;
- // It appears that this 5 seconds simulation is needed to allow
- // bitrate and framerate to stabilize.
- InsertFrames(framerate, short_simulation_interval);
- packetization_callback_.Reset();
-
- InsertFrames(framerate, long_simulation_interval);
- return packetization_callback_.CalculateVp8StreamInfo();
- }
-
- protected:
- VideoCodec codec_;
- int codec_bitrate_kbps_;
- int available_bitrate_kbps_;
-};
-
-TEST_F(TestVideoSenderWithVp8,
- DISABLED_ON_IOS(DISABLED_ON_ANDROID(FixedTemporalLayersStrategy))) {
- const int low_b = codec_bitrate_kbps_ * kVp8LayerRateAlloction[2][0];
- const int mid_b = codec_bitrate_kbps_ * kVp8LayerRateAlloction[2][1];
- const int high_b = codec_bitrate_kbps_ * kVp8LayerRateAlloction[2][2];
- {
- Vp8StreamInfo expected = {{7.5, 15.0, 30.0}, {low_b, mid_b, high_b}};
- EXPECT_THAT(SimulateWithFramerate(30.0), MatchesVp8StreamInfo(expected));
- }
- {
- Vp8StreamInfo expected = {{3.75, 7.5, 15.0}, {low_b, mid_b, high_b}};
- EXPECT_THAT(SimulateWithFramerate(15.0), MatchesVp8StreamInfo(expected));
- }
-}
-
-TEST_F(TestVideoSenderWithVp8,
- DISABLED_ON_IOS(DISABLED_ON_ANDROID(RealTimeTemporalLayersStrategy))) {
- Config extra_options;
- extra_options.Set<TemporalLayers::Factory>(
- new RealTimeTemporalLayersFactory());
- VideoCodec codec = MakeVp8VideoCodec(352, 288, 3);
- codec.extra_options = &extra_options;
- codec.minBitrate = 10;
- codec.startBitrate = codec_bitrate_kbps_;
- codec.maxBitrate = codec_bitrate_kbps_;
- EXPECT_EQ(0, sender_->RegisterSendCodec(&codec, 1, 1200));
-
- const int low_b = codec_bitrate_kbps_ * 0.4;
- const int mid_b = codec_bitrate_kbps_ * 0.6;
- const int high_b = codec_bitrate_kbps_;
-
- {
- Vp8StreamInfo expected = {{7.5, 15.0, 30.0}, {low_b, mid_b, high_b}};
- EXPECT_THAT(SimulateWithFramerate(30.0), MatchesVp8StreamInfo(expected));
- }
- {
- Vp8StreamInfo expected = {{5.0, 10.0, 20.0}, {low_b, mid_b, high_b}};
- EXPECT_THAT(SimulateWithFramerate(20.0), MatchesVp8StreamInfo(expected));
- }
- {
- Vp8StreamInfo expected = {{7.5, 15.0, 15.0}, {mid_b, high_b, high_b}};
- EXPECT_THAT(SimulateWithFramerate(15.0), MatchesVp8StreamInfo(expected));
- }
- {
- Vp8StreamInfo expected = {{5.0, 10.0, 10.0}, {mid_b, high_b, high_b}};
- EXPECT_THAT(SimulateWithFramerate(10.0), MatchesVp8StreamInfo(expected));
- }
- {
- // TODO(andresp): Find out why this fails with framerate = 7.5
- Vp8StreamInfo expected = {{7.0, 7.0, 7.0}, {high_b, high_b, high_b}};
- EXPECT_THAT(SimulateWithFramerate(7.0), MatchesVp8StreamInfo(expected));
- }
-}
-} // namespace
-} // namespace vcm
-} // namespace webrtc
diff --git a/webrtc/modules/video_coding/main/test/receiver_tests.h b/webrtc/modules/video_coding/main/test/receiver_tests.h
deleted file mode 100644
index 6d7b7beeb5..0000000000
--- a/webrtc/modules/video_coding/main/test/receiver_tests.h
+++ /dev/null
@@ -1,43 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_MODULES_VIDEO_CODING_TEST_RECEIVER_TESTS_H_
-#define WEBRTC_MODULES_VIDEO_CODING_TEST_RECEIVER_TESTS_H_
-
-#include "webrtc/common_types.h"
-#include "webrtc/modules/interface/module_common_types.h"
-#include "webrtc/modules/rtp_rtcp/interface/rtp_rtcp.h"
-#include "webrtc/modules/video_coding/main/interface/video_coding.h"
-#include "webrtc/modules/video_coding/main/test/test_util.h"
-#include "webrtc/modules/video_coding/main/test/video_source.h"
-#include "webrtc/typedefs.h"
-
-#include <stdio.h>
-#include <string>
-
-class RtpDataCallback : public webrtc::NullRtpData {
- public:
- RtpDataCallback(webrtc::VideoCodingModule* vcm) : vcm_(vcm) {}
- virtual ~RtpDataCallback() {}
-
- int32_t OnReceivedPayloadData(
- const uint8_t* payload_data,
- const size_t payload_size,
- const webrtc::WebRtcRTPHeader* rtp_header) override {
- return vcm_->IncomingPacket(payload_data, payload_size, *rtp_header);
- }
-
- private:
- webrtc::VideoCodingModule* vcm_;
-};
-
-int RtpPlay(const CmdArgs& args);
-
-#endif // WEBRTC_MODULES_VIDEO_CODING_TEST_RECEIVER_TESTS_H_
diff --git a/webrtc/modules/video_coding/main/test/release_test.h b/webrtc/modules/video_coding/main/test/release_test.h
deleted file mode 100644
index e90dcaef01..0000000000
--- a/webrtc/modules/video_coding/main/test/release_test.h
+++ /dev/null
@@ -1,17 +0,0 @@
-/*
- * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef RELEASE_TEST_H
-#define RELEASE_TEST_H
-
-int ReleaseTest();
-int ReleaseTestPart2();
-
-#endif
diff --git a/webrtc/modules/video_coding/main/test/rtp_player.cc b/webrtc/modules/video_coding/main/test/rtp_player.cc
deleted file mode 100644
index 6717cf227d..0000000000
--- a/webrtc/modules/video_coding/main/test/rtp_player.cc
+++ /dev/null
@@ -1,493 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include "webrtc/modules/video_coding/main/test/rtp_player.h"
-
-#include <stdio.h>
-
-#include <map>
-
-#include "webrtc/base/scoped_ptr.h"
-#include "webrtc/modules/rtp_rtcp/interface/rtp_header_parser.h"
-#include "webrtc/modules/rtp_rtcp/interface/rtp_payload_registry.h"
-#include "webrtc/modules/rtp_rtcp/interface/rtp_receiver.h"
-#include "webrtc/modules/rtp_rtcp/interface/rtp_rtcp.h"
-#include "webrtc/modules/video_coding/main/source/internal_defines.h"
-#include "webrtc/modules/video_coding/main/test/test_util.h"
-#include "webrtc/system_wrappers/include/clock.h"
-#include "webrtc/system_wrappers/include/critical_section_wrapper.h"
-#include "webrtc/test/rtp_file_reader.h"
-
-#if 1
-# define DEBUG_LOG1(text, arg)
-#else
-# define DEBUG_LOG1(text, arg) (printf(text "\n", arg))
-#endif
-
-namespace webrtc {
-namespace rtpplayer {
-
-enum {
- kMaxPacketBufferSize = 4096,
- kDefaultTransmissionTimeOffsetExtensionId = 2
-};
-
-class RawRtpPacket {
- public:
- RawRtpPacket(const uint8_t* data, size_t length, uint32_t ssrc,
- uint16_t seq_num)
- : data_(new uint8_t[length]),
- length_(length),
- resend_time_ms_(-1),
- ssrc_(ssrc),
- seq_num_(seq_num) {
- assert(data);
- memcpy(data_.get(), data, length_);
- }
-
- const uint8_t* data() const { return data_.get(); }
- size_t length() const { return length_; }
- int64_t resend_time_ms() const { return resend_time_ms_; }
- void set_resend_time_ms(int64_t timeMs) { resend_time_ms_ = timeMs; }
- uint32_t ssrc() const { return ssrc_; }
- uint16_t seq_num() const { return seq_num_; }
-
- private:
- rtc::scoped_ptr<uint8_t[]> data_;
- size_t length_;
- int64_t resend_time_ms_;
- uint32_t ssrc_;
- uint16_t seq_num_;
-
- RTC_DISALLOW_IMPLICIT_CONSTRUCTORS(RawRtpPacket);
-};
-
-class LostPackets {
- public:
- LostPackets(Clock* clock, int64_t rtt_ms)
- : crit_sect_(CriticalSectionWrapper::CreateCriticalSection()),
- debug_file_(fopen("PacketLossDebug.txt", "w")),
- loss_count_(0),
- packets_(),
- clock_(clock),
- rtt_ms_(rtt_ms) {
- assert(clock);
- }
-
- ~LostPackets() {
- if (debug_file_) {
- fclose(debug_file_);
- debug_file_ = NULL;
- }
- while (!packets_.empty()) {
- delete packets_.back();
- packets_.pop_back();
- }
- }
-
- void AddPacket(RawRtpPacket* packet) {
- assert(packet);
- printf("Throw: %08x:%u\n", packet->ssrc(), packet->seq_num());
- CriticalSectionScoped cs(crit_sect_.get());
- if (debug_file_) {
- fprintf(debug_file_, "%u Lost packet: %u\n", loss_count_,
- packet->seq_num());
- }
- packets_.push_back(packet);
- loss_count_++;
- }
-
- void SetResendTime(uint32_t ssrc, int16_t resendSeqNum) {
- int64_t resend_time_ms = clock_->TimeInMilliseconds() + rtt_ms_;
- int64_t now_ms = clock_->TimeInMilliseconds();
- CriticalSectionScoped cs(crit_sect_.get());
- for (RtpPacketIterator it = packets_.begin(); it != packets_.end(); ++it) {
- RawRtpPacket* packet = *it;
- if (ssrc == packet->ssrc() && resendSeqNum == packet->seq_num() &&
- packet->resend_time_ms() + 10 < now_ms) {
- if (debug_file_) {
- fprintf(debug_file_, "Resend %u at %u\n", packet->seq_num(),
- MaskWord64ToUWord32(resend_time_ms));
- }
- packet->set_resend_time_ms(resend_time_ms);
- return;
- }
- }
- // We may get here since the captured stream may itself be missing packets.
- }
-
- RawRtpPacket* NextPacketToResend(int64_t time_now) {
- CriticalSectionScoped cs(crit_sect_.get());
- for (RtpPacketIterator it = packets_.begin(); it != packets_.end(); ++it) {
- RawRtpPacket* packet = *it;
- if (time_now >= packet->resend_time_ms() &&
- packet->resend_time_ms() != -1) {
- packets_.erase(it);
- return packet;
- }
- }
- return NULL;
- }
-
- int NumberOfPacketsToResend() const {
- CriticalSectionScoped cs(crit_sect_.get());
- int count = 0;
- for (ConstRtpPacketIterator it = packets_.begin(); it != packets_.end();
- ++it) {
- if ((*it)->resend_time_ms() >= 0) {
- count++;
- }
- }
- return count;
- }
-
- void LogPacketResent(RawRtpPacket* packet) {
- int64_t now_ms = clock_->TimeInMilliseconds();
- CriticalSectionScoped cs(crit_sect_.get());
- if (debug_file_) {
- fprintf(debug_file_, "Resent %u at %u\n", packet->seq_num(),
- MaskWord64ToUWord32(now_ms));
- }
- }
-
- void Print() const {
- CriticalSectionScoped cs(crit_sect_.get());
- printf("Lost packets: %u\n", loss_count_);
- printf("Packets waiting to be resent: %d\n", NumberOfPacketsToResend());
- printf("Packets still lost: %zd\n", packets_.size());
- printf("Sequence numbers:\n");
- for (ConstRtpPacketIterator it = packets_.begin(); it != packets_.end();
- ++it) {
- printf("%u, ", (*it)->seq_num());
- }
- printf("\n");
- }
-
- private:
- typedef std::vector<RawRtpPacket*> RtpPacketList;
- typedef RtpPacketList::iterator RtpPacketIterator;
- typedef RtpPacketList::const_iterator ConstRtpPacketIterator;
-
- rtc::scoped_ptr<CriticalSectionWrapper> crit_sect_;
- FILE* debug_file_;
- int loss_count_;
- RtpPacketList packets_;
- Clock* clock_;
- int64_t rtt_ms_;
-
- RTC_DISALLOW_IMPLICIT_CONSTRUCTORS(LostPackets);
-};
-
-class SsrcHandlers {
- public:
- SsrcHandlers(PayloadSinkFactoryInterface* payload_sink_factory,
- const PayloadTypes& payload_types)
- : payload_sink_factory_(payload_sink_factory),
- payload_types_(payload_types),
- handlers_() {
- assert(payload_sink_factory);
- }
-
- ~SsrcHandlers() {
- while (!handlers_.empty()) {
- delete handlers_.begin()->second;
- handlers_.erase(handlers_.begin());
- }
- }
-
- int RegisterSsrc(uint32_t ssrc, LostPackets* lost_packets, Clock* clock) {
- if (handlers_.count(ssrc) > 0) {
- return 0;
- }
- DEBUG_LOG1("Registering handler for ssrc=%08x", ssrc);
-
- rtc::scoped_ptr<Handler> handler(
- new Handler(ssrc, payload_types_, lost_packets));
- handler->payload_sink_.reset(payload_sink_factory_->Create(handler.get()));
- if (handler->payload_sink_.get() == NULL) {
- return -1;
- }
-
- RtpRtcp::Configuration configuration;
- configuration.clock = clock;
- configuration.audio = false;
- handler->rtp_module_.reset(RtpReceiver::CreateVideoReceiver(
- configuration.clock, handler->payload_sink_.get(), NULL,
- handler->rtp_payload_registry_.get()));
- if (handler->rtp_module_.get() == NULL) {
- return -1;
- }
-
- handler->rtp_module_->SetNACKStatus(kNackOff);
- handler->rtp_header_parser_->RegisterRtpHeaderExtension(
- kRtpExtensionTransmissionTimeOffset,
- kDefaultTransmissionTimeOffsetExtensionId);
-
- for (PayloadTypesIterator it = payload_types_.begin();
- it != payload_types_.end(); ++it) {
- VideoCodec codec;
- memset(&codec, 0, sizeof(codec));
- strncpy(codec.plName, it->name().c_str(), sizeof(codec.plName)-1);
- codec.plType = it->payload_type();
- codec.codecType = it->codec_type();
- if (handler->rtp_module_->RegisterReceivePayload(codec.plName,
- codec.plType,
- 90000,
- 0,
- codec.maxBitrate) < 0) {
- return -1;
- }
- }
-
- handlers_[ssrc] = handler.release();
- return 0;
- }
-
- void IncomingPacket(const uint8_t* data, size_t length) {
- for (HandlerMapIt it = handlers_.begin(); it != handlers_.end(); ++it) {
- if (!it->second->rtp_header_parser_->IsRtcp(data, length)) {
- RTPHeader header;
- it->second->rtp_header_parser_->Parse(data, length, &header);
- PayloadUnion payload_specific;
- it->second->rtp_payload_registry_->GetPayloadSpecifics(
- header.payloadType, &payload_specific);
- it->second->rtp_module_->IncomingRtpPacket(header, data, length,
- payload_specific, true);
- }
- }
- }
-
- private:
- class Handler : public RtpStreamInterface {
- public:
- Handler(uint32_t ssrc, const PayloadTypes& payload_types,
- LostPackets* lost_packets)
- : rtp_header_parser_(RtpHeaderParser::Create()),
- rtp_payload_registry_(new RTPPayloadRegistry(
- RTPPayloadStrategy::CreateStrategy(false))),
- rtp_module_(),
- payload_sink_(),
- ssrc_(ssrc),
- payload_types_(payload_types),
- lost_packets_(lost_packets) {
- assert(lost_packets);
- }
- virtual ~Handler() {}
-
- virtual void ResendPackets(const uint16_t* sequence_numbers,
- uint16_t length) {
- assert(sequence_numbers);
- for (uint16_t i = 0; i < length; i++) {
- lost_packets_->SetResendTime(ssrc_, sequence_numbers[i]);
- }
- }
-
- virtual uint32_t ssrc() const { return ssrc_; }
- virtual const PayloadTypes& payload_types() const {
- return payload_types_;
- }
-
- rtc::scoped_ptr<RtpHeaderParser> rtp_header_parser_;
- rtc::scoped_ptr<RTPPayloadRegistry> rtp_payload_registry_;
- rtc::scoped_ptr<RtpReceiver> rtp_module_;
- rtc::scoped_ptr<PayloadSinkInterface> payload_sink_;
-
- private:
- uint32_t ssrc_;
- const PayloadTypes& payload_types_;
- LostPackets* lost_packets_;
-
- RTC_DISALLOW_COPY_AND_ASSIGN(Handler);
- };
-
- typedef std::map<uint32_t, Handler*> HandlerMap;
- typedef std::map<uint32_t, Handler*>::iterator HandlerMapIt;
-
- PayloadSinkFactoryInterface* payload_sink_factory_;
- PayloadTypes payload_types_;
- HandlerMap handlers_;
-
- RTC_DISALLOW_IMPLICIT_CONSTRUCTORS(SsrcHandlers);
-};
-
-class RtpPlayerImpl : public RtpPlayerInterface {
- public:
- RtpPlayerImpl(PayloadSinkFactoryInterface* payload_sink_factory,
- const PayloadTypes& payload_types,
- Clock* clock,
- rtc::scoped_ptr<test::RtpFileReader>* packet_source,
- float loss_rate,
- int64_t rtt_ms,
- bool reordering)
- : ssrc_handlers_(payload_sink_factory, payload_types),
- clock_(clock),
- next_rtp_time_(0),
- first_packet_(true),
- first_packet_rtp_time_(0),
- first_packet_time_ms_(0),
- loss_rate_(loss_rate),
- lost_packets_(clock, rtt_ms),
- resend_packet_count_(0),
- no_loss_startup_(100),
- end_of_file_(false),
- reordering_(false),
- reorder_buffer_() {
- assert(clock);
- assert(packet_source);
- assert(packet_source->get());
- packet_source_.swap(*packet_source);
- srand(321);
- }
-
- virtual ~RtpPlayerImpl() {}
-
- virtual int NextPacket(int64_t time_now) {
- // Send any packets ready to be resent.
- for (RawRtpPacket* packet = lost_packets_.NextPacketToResend(time_now);
- packet != NULL;
- packet = lost_packets_.NextPacketToResend(time_now)) {
- int ret = SendPacket(packet->data(), packet->length());
- if (ret > 0) {
- printf("Resend: %08x:%u\n", packet->ssrc(), packet->seq_num());
- lost_packets_.LogPacketResent(packet);
- resend_packet_count_++;
- }
- delete packet;
- if (ret < 0) {
- return ret;
- }
- }
-
- // Send any packets from packet source.
- if (!end_of_file_ && (TimeUntilNextPacket() == 0 || first_packet_)) {
- if (first_packet_) {
- if (!packet_source_->NextPacket(&next_packet_))
- return 0;
- first_packet_rtp_time_ = next_packet_.time_ms;
- first_packet_time_ms_ = clock_->TimeInMilliseconds();
- first_packet_ = false;
- }
-
- if (reordering_ && reorder_buffer_.get() == NULL) {
- reorder_buffer_.reset(
- new RawRtpPacket(next_packet_.data, next_packet_.length, 0, 0));
- return 0;
- }
- int ret = SendPacket(next_packet_.data, next_packet_.length);
- if (reorder_buffer_.get()) {
- SendPacket(reorder_buffer_->data(), reorder_buffer_->length());
- reorder_buffer_.reset(NULL);
- }
- if (ret < 0) {
- return ret;
- }
-
- if (!packet_source_->NextPacket(&next_packet_)) {
- end_of_file_ = true;
- return 0;
- }
- else if (next_packet_.length == 0) {
- return 0;
- }
- }
-
- if (end_of_file_ && lost_packets_.NumberOfPacketsToResend() == 0) {
- return 1;
- }
- return 0;
- }
-
- virtual uint32_t TimeUntilNextPacket() const {
- int64_t time_left = (next_rtp_time_ - first_packet_rtp_time_) -
- (clock_->TimeInMilliseconds() - first_packet_time_ms_);
- if (time_left < 0) {
- return 0;
- }
- return static_cast<uint32_t>(time_left);
- }
-
- virtual void Print() const {
- printf("Resent packets: %u\n", resend_packet_count_);
- lost_packets_.Print();
- }
-
- private:
- int SendPacket(const uint8_t* data, size_t length) {
- assert(data);
- assert(length > 0);
-
- rtc::scoped_ptr<RtpHeaderParser> rtp_header_parser(
- RtpHeaderParser::Create());
- if (!rtp_header_parser->IsRtcp(data, length)) {
- RTPHeader header;
- if (!rtp_header_parser->Parse(data, length, &header)) {
- return -1;
- }
- uint32_t ssrc = header.ssrc;
- if (ssrc_handlers_.RegisterSsrc(ssrc, &lost_packets_, clock_) < 0) {
- DEBUG_LOG1("Unable to register ssrc: %d", ssrc);
- return -1;
- }
-
- if (no_loss_startup_ > 0) {
- no_loss_startup_--;
- } else if ((rand() + 1.0)/(RAND_MAX + 1.0) < loss_rate_) {
- uint16_t seq_num = header.sequenceNumber;
- lost_packets_.AddPacket(new RawRtpPacket(data, length, ssrc, seq_num));
- DEBUG_LOG1("Dropped packet: %d!", header.header.sequenceNumber);
- return 0;
- }
- }
-
- ssrc_handlers_.IncomingPacket(data, length);
- return 1;
- }
-
- SsrcHandlers ssrc_handlers_;
- Clock* clock_;
- rtc::scoped_ptr<test::RtpFileReader> packet_source_;
- test::RtpPacket next_packet_;
- uint32_t next_rtp_time_;
- bool first_packet_;
- int64_t first_packet_rtp_time_;
- int64_t first_packet_time_ms_;
- float loss_rate_;
- LostPackets lost_packets_;
- uint32_t resend_packet_count_;
- uint32_t no_loss_startup_;
- bool end_of_file_;
- bool reordering_;
- rtc::scoped_ptr<RawRtpPacket> reorder_buffer_;
-
- RTC_DISALLOW_IMPLICIT_CONSTRUCTORS(RtpPlayerImpl);
-};
-
-RtpPlayerInterface* Create(const std::string& input_filename,
- PayloadSinkFactoryInterface* payload_sink_factory, Clock* clock,
- const PayloadTypes& payload_types, float loss_rate, int64_t rtt_ms,
- bool reordering) {
- rtc::scoped_ptr<test::RtpFileReader> packet_source(
- test::RtpFileReader::Create(test::RtpFileReader::kRtpDump,
- input_filename));
- if (packet_source.get() == NULL) {
- packet_source.reset(test::RtpFileReader::Create(test::RtpFileReader::kPcap,
- input_filename));
- if (packet_source.get() == NULL) {
- return NULL;
- }
- }
-
- rtc::scoped_ptr<RtpPlayerImpl> impl(
- new RtpPlayerImpl(payload_sink_factory, payload_types, clock,
- &packet_source, loss_rate, rtt_ms, reordering));
- return impl.release();
-}
-} // namespace rtpplayer
-} // namespace webrtc
diff --git a/webrtc/modules/video_coding/main/test/rtp_player.h b/webrtc/modules/video_coding/main/test/rtp_player.h
deleted file mode 100644
index 7459231416..0000000000
--- a/webrtc/modules/video_coding/main/test/rtp_player.h
+++ /dev/null
@@ -1,97 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_MODULES_VIDEO_CODING_TEST_RTP_PLAYER_H_
-#define WEBRTC_MODULES_VIDEO_CODING_TEST_RTP_PLAYER_H_
-
-#include <string>
-#include <vector>
-
-#include "webrtc/modules/rtp_rtcp/interface/rtp_rtcp_defines.h"
-#include "webrtc/modules/video_coding/main/interface/video_coding_defines.h"
-
-namespace webrtc {
-class Clock;
-
-namespace rtpplayer {
-
-class PayloadCodecTuple {
- public:
- PayloadCodecTuple(uint8_t payload_type, const std::string& codec_name,
- VideoCodecType codec_type)
- : name_(codec_name),
- payload_type_(payload_type),
- codec_type_(codec_type) {
- }
-
- const std::string& name() const { return name_; }
- uint8_t payload_type() const { return payload_type_; }
- VideoCodecType codec_type() const { return codec_type_; }
-
- private:
- std::string name_;
- uint8_t payload_type_;
- VideoCodecType codec_type_;
-};
-
-typedef std::vector<PayloadCodecTuple> PayloadTypes;
-typedef std::vector<PayloadCodecTuple>::const_iterator PayloadTypesIterator;
-
-// Implemented by RtpPlayer and given to client as a means to retrieve
-// information about a specific RTP stream.
-class RtpStreamInterface {
- public:
- virtual ~RtpStreamInterface() {}
-
- // Ask for missing packets to be resent.
- virtual void ResendPackets(const uint16_t* sequence_numbers,
- uint16_t length) = 0;
-
- virtual uint32_t ssrc() const = 0;
- virtual const PayloadTypes& payload_types() const = 0;
-};
-
-// Implemented by a sink. Wraps RtpData because its d-tor is protected.
-class PayloadSinkInterface : public RtpData {
- public:
- virtual ~PayloadSinkInterface() {}
-};
-
-// Implemented to provide a sink for RTP data, such as hooking up a VCM to
-// the incoming RTP stream.
-class PayloadSinkFactoryInterface {
- public:
- virtual ~PayloadSinkFactoryInterface() {}
-
- // Return NULL if failed to create sink. 'stream' is guaranteed to be
- // around for as long as the RtpData. The returned object is owned by
- // the caller (RtpPlayer).
- virtual PayloadSinkInterface* Create(RtpStreamInterface* stream) = 0;
-};
-
-// The client's view of an RtpPlayer.
-class RtpPlayerInterface {
- public:
- virtual ~RtpPlayerInterface() {}
-
- virtual int NextPacket(int64_t timeNow) = 0;
- virtual uint32_t TimeUntilNextPacket() const = 0;
- virtual void Print() const = 0;
-};
-
-RtpPlayerInterface* Create(const std::string& inputFilename,
- PayloadSinkFactoryInterface* payloadSinkFactory, Clock* clock,
- const PayloadTypes& payload_types, float lossRate, int64_t rttMs,
- bool reordering);
-
-} // namespace rtpplayer
-} // namespace webrtc
-
-#endif // WEBRTC_MODULES_VIDEO_CODING_TEST_RTP_PLAYER_H_
diff --git a/webrtc/modules/video_coding/main/test/test_util.cc b/webrtc/modules/video_coding/main/test/test_util.cc
deleted file mode 100644
index cd858da288..0000000000
--- a/webrtc/modules/video_coding/main/test/test_util.cc
+++ /dev/null
@@ -1,139 +0,0 @@
-/*
- * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include "webrtc/modules/video_coding/main/test/test_util.h"
-
-#include <assert.h>
-#include <math.h>
-
-#include <iomanip>
-#include <sstream>
-
-#include "webrtc/common_video/libyuv/include/webrtc_libyuv.h"
-#include "webrtc/modules/video_coding/main/source/internal_defines.h"
-#include "webrtc/test/testsupport/fileutils.h"
-
-CmdArgs::CmdArgs()
- : codecName("VP8"),
- codecType(webrtc::kVideoCodecVP8),
- width(352),
- height(288),
- rtt(0),
- inputFile(webrtc::test::ProjectRootPath() + "/resources/foreman_cif.yuv"),
- outputFile(webrtc::test::OutputPath() +
- "video_coding_test_output_352x288.yuv") {
-}
-
-namespace {
-
-void SplitFilename(const std::string& filename, std::string* basename,
- std::string* extension) {
- assert(basename);
- assert(extension);
-
- std::string::size_type idx;
- idx = filename.rfind('.');
-
- if(idx != std::string::npos) {
- *basename = filename.substr(0, idx);
- *extension = filename.substr(idx + 1);
- } else {
- *basename = filename;
- *extension = "";
- }
-}
-
-std::string AppendWidthHeightCount(const std::string& filename, int width,
- int height, int count) {
- std::string basename;
- std::string extension;
- SplitFilename(filename, &basename, &extension);
- std::stringstream ss;
- ss << basename << "_" << count << "." << width << "_" << height << "." <<
- extension;
- return ss.str();
-}
-
-} // namespace
-
-FileOutputFrameReceiver::FileOutputFrameReceiver(
- const std::string& base_out_filename, uint32_t ssrc)
- : out_filename_(),
- out_file_(NULL),
- timing_file_(NULL),
- width_(0),
- height_(0),
- count_(0) {
- std::string basename;
- std::string extension;
- if (base_out_filename.empty()) {
- basename = webrtc::test::OutputPath() + "rtp_decoded";
- extension = "yuv";
- } else {
- SplitFilename(base_out_filename, &basename, &extension);
- }
- std::stringstream ss;
- ss << basename << "_" << std::hex << std::setw(8) << std::setfill('0') <<
- ssrc << "." << extension;
- out_filename_ = ss.str();
-}
-
-FileOutputFrameReceiver::~FileOutputFrameReceiver() {
- if (timing_file_ != NULL) {
- fclose(timing_file_);
- }
- if (out_file_ != NULL) {
- fclose(out_file_);
- }
-}
-
-int32_t FileOutputFrameReceiver::FrameToRender(
- webrtc::VideoFrame& video_frame) {
- if (timing_file_ == NULL) {
- std::string basename;
- std::string extension;
- SplitFilename(out_filename_, &basename, &extension);
- timing_file_ = fopen((basename + "_renderTiming.txt").c_str(), "w");
- if (timing_file_ == NULL) {
- return -1;
- }
- }
- if (out_file_ == NULL || video_frame.width() != width_ ||
- video_frame.height() != height_) {
- if (out_file_) {
- fclose(out_file_);
- }
- printf("New size: %dx%d\n", video_frame.width(), video_frame.height());
- width_ = video_frame.width();
- height_ = video_frame.height();
- std::string filename_with_width_height = AppendWidthHeightCount(
- out_filename_, width_, height_, count_);
- ++count_;
- out_file_ = fopen(filename_with_width_height.c_str(), "wb");
- if (out_file_ == NULL) {
- return -1;
- }
- }
- fprintf(timing_file_, "%u, %u\n", video_frame.timestamp(),
- webrtc::MaskWord64ToUWord32(video_frame.render_time_ms()));
- if (PrintVideoFrame(video_frame, out_file_) < 0) {
- return -1;
- }
- return 0;
-}
-
-webrtc::RtpVideoCodecTypes ConvertCodecType(const char* plname) {
- if (strncmp(plname,"VP8" , 3) == 0) {
- return webrtc::kRtpVideoVp8;
- } else {
- // Default value.
- return webrtc::kRtpVideoGeneric;
- }
-}
diff --git a/webrtc/modules/video_coding/main/test/test_util.h b/webrtc/modules/video_coding/main/test/test_util.h
deleted file mode 100644
index 27f66fe011..0000000000
--- a/webrtc/modules/video_coding/main/test/test_util.h
+++ /dev/null
@@ -1,86 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_MODULES_VIDEO_CODING_TEST_TEST_UTIL_H_
-#define WEBRTC_MODULES_VIDEO_CODING_TEST_TEST_UTIL_H_
-
-/*
- * General declarations used through out VCM offline tests.
- */
-
-#include <string>
-
-#include "webrtc/base/constructormagic.h"
-#include "webrtc/modules/interface/module_common_types.h"
-#include "webrtc/modules/video_coding/main/interface/video_coding.h"
-#include "webrtc/system_wrappers/include/event_wrapper.h"
-
-enum { kMaxNackListSize = 250 };
-enum { kMaxPacketAgeToNack = 450 };
-
-class NullEvent : public webrtc::EventWrapper {
- public:
- virtual ~NullEvent() {}
-
- virtual bool Set() { return true; }
-
- virtual bool Reset() { return true; }
-
- virtual webrtc::EventTypeWrapper Wait(unsigned long max_time) {
- return webrtc::kEventTimeout;
- }
-
- virtual bool StartTimer(bool periodic, unsigned long time) { return true; }
-
- virtual bool StopTimer() { return true; }
-};
-
-class NullEventFactory : public webrtc::EventFactory {
- public:
- virtual ~NullEventFactory() {}
-
- virtual webrtc::EventWrapper* CreateEvent() {
- return new NullEvent;
- }
-};
-
-class FileOutputFrameReceiver : public webrtc::VCMReceiveCallback {
- public:
- FileOutputFrameReceiver(const std::string& base_out_filename, uint32_t ssrc);
- virtual ~FileOutputFrameReceiver();
-
- // VCMReceiveCallback
- virtual int32_t FrameToRender(webrtc::VideoFrame& video_frame);
-
- private:
- std::string out_filename_;
- FILE* out_file_;
- FILE* timing_file_;
- int width_;
- int height_;
- int count_;
-
- RTC_DISALLOW_IMPLICIT_CONSTRUCTORS(FileOutputFrameReceiver);
-};
-
-class CmdArgs {
- public:
- CmdArgs();
-
- std::string codecName;
- webrtc::VideoCodecType codecType;
- int width;
- int height;
- int rtt;
- std::string inputFile;
- std::string outputFile;
-};
-
-#endif
diff --git a/webrtc/modules/video_coding/main/test/tester_main.cc b/webrtc/modules/video_coding/main/test/tester_main.cc
deleted file mode 100644
index 2885f00bd5..0000000000
--- a/webrtc/modules/video_coding/main/test/tester_main.cc
+++ /dev/null
@@ -1,75 +0,0 @@
-/*
- * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-
-#include <stdlib.h>
-#include <string.h>
-
-#include "gflags/gflags.h"
-#include "webrtc/modules/video_coding/main/interface/video_coding.h"
-#include "webrtc/modules/video_coding/main/test/receiver_tests.h"
-#include "webrtc/test/testsupport/fileutils.h"
-
-DEFINE_string(codec, "VP8", "Codec to use (VP8 or I420).");
-DEFINE_int32(width, 352, "Width in pixels of the frames in the input file.");
-DEFINE_int32(height, 288, "Height in pixels of the frames in the input file.");
-DEFINE_int32(rtt, 0, "RTT (round-trip time), in milliseconds.");
-DEFINE_string(input_filename, webrtc::test::ProjectRootPath() +
- "/resources/foreman_cif.yuv", "Input file.");
-DEFINE_string(output_filename, webrtc::test::OutputPath() +
- "video_coding_test_output_352x288.yuv", "Output file.");
-
-using namespace webrtc;
-
-/*
- * Build with EVENT_DEBUG defined
- * to build the tests with simulated events.
- */
-
-int vcmMacrosTests = 0;
-int vcmMacrosErrors = 0;
-
-int ParseArguments(CmdArgs& args) {
- args.width = FLAGS_width;
- args.height = FLAGS_height;
- if (args.width < 1 || args.height < 1) {
- return -1;
- }
- args.codecName = FLAGS_codec;
- if (args.codecName == "VP8") {
- args.codecType = kVideoCodecVP8;
- } else if (args.codecName == "VP9") {
- args.codecType = kVideoCodecVP9;
- } else if (args.codecName == "I420") {
- args.codecType = kVideoCodecI420;
- } else {
- printf("Invalid codec: %s\n", args.codecName.c_str());
- return -1;
- }
- args.inputFile = FLAGS_input_filename;
- args.outputFile = FLAGS_output_filename;
- args.rtt = FLAGS_rtt;
- return 0;
-}
-
-int main(int argc, char **argv) {
- // Initialize WebRTC fileutils.h so paths to resources can be resolved.
- webrtc::test::SetExecutablePath(argv[0]);
- google::ParseCommandLineFlags(&argc, &argv, true);
-
- CmdArgs args;
- if (ParseArguments(args) != 0) {
- printf("Unable to parse input arguments\n");
- return -1;
- }
-
- printf("Running video coding tests...\n");
- return RtpPlay(args);
-}
diff --git a/webrtc/modules/video_coding/main/test/vcm_payload_sink_factory.cc b/webrtc/modules/video_coding/main/test/vcm_payload_sink_factory.cc
deleted file mode 100644
index 2d874cd1bd..0000000000
--- a/webrtc/modules/video_coding/main/test/vcm_payload_sink_factory.cc
+++ /dev/null
@@ -1,210 +0,0 @@
-/*
- * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include "webrtc/modules/video_coding/main/test/vcm_payload_sink_factory.h"
-
-#include <assert.h>
-
-#include <algorithm>
-
-#include "webrtc/modules/rtp_rtcp/interface/rtp_rtcp.h"
-#include "webrtc/modules/video_coding/main/test/test_util.h"
-#include "webrtc/system_wrappers/include/clock.h"
-#include "webrtc/system_wrappers/include/critical_section_wrapper.h"
-
-namespace webrtc {
-namespace rtpplayer {
-
-class VcmPayloadSinkFactory::VcmPayloadSink
- : public PayloadSinkInterface,
- public VCMPacketRequestCallback {
- public:
- VcmPayloadSink(VcmPayloadSinkFactory* factory,
- RtpStreamInterface* stream,
- rtc::scoped_ptr<VideoCodingModule>* vcm,
- rtc::scoped_ptr<FileOutputFrameReceiver>* frame_receiver)
- : factory_(factory), stream_(stream), vcm_(), frame_receiver_() {
- assert(factory);
- assert(stream);
- assert(vcm);
- assert(vcm->get());
- assert(frame_receiver);
- assert(frame_receiver->get());
- vcm_.swap(*vcm);
- frame_receiver_.swap(*frame_receiver);
- vcm_->RegisterPacketRequestCallback(this);
- vcm_->RegisterReceiveCallback(frame_receiver_.get());
- }
-
- virtual ~VcmPayloadSink() {
- factory_->Remove(this);
- }
-
- // PayloadSinkInterface
- int32_t OnReceivedPayloadData(const uint8_t* payload_data,
- const size_t payload_size,
- const WebRtcRTPHeader* rtp_header) override {
- return vcm_->IncomingPacket(payload_data, payload_size, *rtp_header);
- }
-
- bool OnRecoveredPacket(const uint8_t* packet, size_t packet_length) override {
- // We currently don't handle FEC.
- return true;
- }
-
- // VCMPacketRequestCallback
- int32_t ResendPackets(const uint16_t* sequence_numbers,
- uint16_t length) override {
- stream_->ResendPackets(sequence_numbers, length);
- return 0;
- }
-
- int DecodeAndProcess(bool should_decode, bool decode_dual_frame) {
- if (should_decode) {
- if (vcm_->Decode() < 0) {
- return -1;
- }
- }
- return Process() ? 0 : -1;
- }
-
- bool Process() {
- if (vcm_->TimeUntilNextProcess() <= 0) {
- if (vcm_->Process() < 0) {
- return false;
- }
- }
- return true;
- }
-
- bool Decode() {
- vcm_->Decode(10000);
- return true;
- }
-
- private:
- VcmPayloadSinkFactory* factory_;
- RtpStreamInterface* stream_;
- rtc::scoped_ptr<VideoCodingModule> vcm_;
- rtc::scoped_ptr<FileOutputFrameReceiver> frame_receiver_;
-
- RTC_DISALLOW_IMPLICIT_CONSTRUCTORS(VcmPayloadSink);
-};
-
-VcmPayloadSinkFactory::VcmPayloadSinkFactory(
- const std::string& base_out_filename,
- Clock* clock,
- bool protection_enabled,
- VCMVideoProtection protection_method,
- int64_t rtt_ms,
- uint32_t render_delay_ms,
- uint32_t min_playout_delay_ms)
- : base_out_filename_(base_out_filename),
- clock_(clock),
- protection_enabled_(protection_enabled),
- protection_method_(protection_method),
- rtt_ms_(rtt_ms),
- render_delay_ms_(render_delay_ms),
- min_playout_delay_ms_(min_playout_delay_ms),
- null_event_factory_(new NullEventFactory()),
- crit_sect_(CriticalSectionWrapper::CreateCriticalSection()),
- sinks_() {
- assert(clock);
- assert(crit_sect_.get());
-}
-
-VcmPayloadSinkFactory::~VcmPayloadSinkFactory() {
- assert(sinks_.empty());
-}
-
-PayloadSinkInterface* VcmPayloadSinkFactory::Create(
- RtpStreamInterface* stream) {
- assert(stream);
- CriticalSectionScoped cs(crit_sect_.get());
-
- rtc::scoped_ptr<VideoCodingModule> vcm(
- VideoCodingModule::Create(clock_, null_event_factory_.get()));
- if (vcm.get() == NULL) {
- return NULL;
- }
-
- const PayloadTypes& plt = stream->payload_types();
- for (PayloadTypesIterator it = plt.begin(); it != plt.end();
- ++it) {
- if (it->codec_type() != kVideoCodecULPFEC &&
- it->codec_type() != kVideoCodecRED) {
- VideoCodec codec;
- if (VideoCodingModule::Codec(it->codec_type(), &codec) < 0) {
- return NULL;
- }
- codec.plType = it->payload_type();
- if (vcm->RegisterReceiveCodec(&codec, 1) < 0) {
- return NULL;
- }
- }
- }
-
- vcm->SetChannelParameters(0, 0, rtt_ms_);
- vcm->SetVideoProtection(protection_method_, protection_enabled_);
- vcm->SetRenderDelay(render_delay_ms_);
- vcm->SetMinimumPlayoutDelay(min_playout_delay_ms_);
- vcm->SetNackSettings(kMaxNackListSize, kMaxPacketAgeToNack, 0);
-
- rtc::scoped_ptr<FileOutputFrameReceiver> frame_receiver(
- new FileOutputFrameReceiver(base_out_filename_, stream->ssrc()));
- rtc::scoped_ptr<VcmPayloadSink> sink(
- new VcmPayloadSink(this, stream, &vcm, &frame_receiver));
-
- sinks_.push_back(sink.get());
- return sink.release();
-}
-
-int VcmPayloadSinkFactory::DecodeAndProcessAll(bool decode_dual_frame) {
- CriticalSectionScoped cs(crit_sect_.get());
- assert(clock_);
- bool should_decode = (clock_->TimeInMilliseconds() % 5) == 0;
- for (Sinks::iterator it = sinks_.begin(); it != sinks_.end(); ++it) {
- if ((*it)->DecodeAndProcess(should_decode, decode_dual_frame) < 0) {
- return -1;
- }
- }
- return 0;
-}
-
-bool VcmPayloadSinkFactory::ProcessAll() {
- CriticalSectionScoped cs(crit_sect_.get());
- for (Sinks::iterator it = sinks_.begin(); it != sinks_.end(); ++it) {
- if (!(*it)->Process()) {
- return false;
- }
- }
- return true;
-}
-
-bool VcmPayloadSinkFactory::DecodeAll() {
- CriticalSectionScoped cs(crit_sect_.get());
- for (Sinks::iterator it = sinks_.begin(); it != sinks_.end(); ++it) {
- if (!(*it)->Decode()) {
- return false;
- }
- }
- return true;
-}
-
-void VcmPayloadSinkFactory::Remove(VcmPayloadSink* sink) {
- assert(sink);
- CriticalSectionScoped cs(crit_sect_.get());
- Sinks::iterator it = std::find(sinks_.begin(), sinks_.end(), sink);
- assert(it != sinks_.end());
- sinks_.erase(it);
-}
-
-} // namespace rtpplayer
-} // namespace webrtc
diff --git a/webrtc/modules/video_coding/main/test/vcm_payload_sink_factory.h b/webrtc/modules/video_coding/main/test/vcm_payload_sink_factory.h
deleted file mode 100644
index ec94bdc382..0000000000
--- a/webrtc/modules/video_coding/main/test/vcm_payload_sink_factory.h
+++ /dev/null
@@ -1,63 +0,0 @@
-/*
- * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include <string>
-#include <vector>
-
-#include "webrtc/base/constructormagic.h"
-#include "webrtc/base/scoped_ptr.h"
-#include "webrtc/modules/video_coding/main/interface/video_coding_defines.h"
-#include "webrtc/modules/video_coding/main/test/rtp_player.h"
-
-class NullEventFactory;
-
-namespace webrtc {
-class Clock;
-class CriticalSectionWrapper;
-
-namespace rtpplayer {
-class VcmPayloadSinkFactory : public PayloadSinkFactoryInterface {
- public:
- VcmPayloadSinkFactory(const std::string& base_out_filename,
- Clock* clock, bool protection_enabled,
- VCMVideoProtection protection_method,
- int64_t rtt_ms, uint32_t render_delay_ms,
- uint32_t min_playout_delay_ms);
- virtual ~VcmPayloadSinkFactory();
-
- // PayloadSinkFactoryInterface
- virtual PayloadSinkInterface* Create(RtpStreamInterface* stream);
-
- int DecodeAndProcessAll(bool decode_dual_frame);
- bool ProcessAll();
- bool DecodeAll();
-
- private:
- class VcmPayloadSink;
- friend class VcmPayloadSink;
- typedef std::vector<VcmPayloadSink*> Sinks;
-
- void Remove(VcmPayloadSink* sink);
-
- std::string base_out_filename_;
- Clock* clock_;
- bool protection_enabled_;
- VCMVideoProtection protection_method_;
- int64_t rtt_ms_;
- uint32_t render_delay_ms_;
- uint32_t min_playout_delay_ms_;
- rtc::scoped_ptr<NullEventFactory> null_event_factory_;
- rtc::scoped_ptr<CriticalSectionWrapper> crit_sect_;
- Sinks sinks_;
-
- RTC_DISALLOW_IMPLICIT_CONSTRUCTORS(VcmPayloadSinkFactory);
-};
-} // namespace rtpplayer
-} // namespace webrtc
diff --git a/webrtc/modules/video_coding/main/test/video_rtp_play.cc b/webrtc/modules/video_coding/main/test/video_rtp_play.cc
deleted file mode 100644
index 8460601bf5..0000000000
--- a/webrtc/modules/video_coding/main/test/video_rtp_play.cc
+++ /dev/null
@@ -1,88 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include "webrtc/modules/video_coding/main/test/receiver_tests.h"
-#include "webrtc/modules/video_coding/main/test/vcm_payload_sink_factory.h"
-#include "webrtc/system_wrappers/include/trace.h"
-#include "webrtc/test/testsupport/fileutils.h"
-
-namespace {
-
-const bool kConfigProtectionEnabled = true;
-const webrtc::VCMVideoProtection kConfigProtectionMethod =
- webrtc::kProtectionNack;
-const float kConfigLossRate = 0.0f;
-const bool kConfigReordering = false;
-const int64_t kConfigRttMs = 0;
-const uint32_t kConfigRenderDelayMs = 0;
-const uint32_t kConfigMinPlayoutDelayMs = 0;
-const int64_t kConfigMaxRuntimeMs = -1;
-const uint8_t kDefaultUlpFecPayloadType = 97;
-const uint8_t kDefaultRedPayloadType = 96;
-const uint8_t kDefaultVp8PayloadType = 100;
-} // namespace
-
-int RtpPlay(const CmdArgs& args) {
- std::string trace_file = webrtc::test::OutputPath() + "receiverTestTrace.txt";
- webrtc::Trace::CreateTrace();
- webrtc::Trace::SetTraceFile(trace_file.c_str());
- webrtc::Trace::set_level_filter(webrtc::kTraceAll);
-
- webrtc::rtpplayer::PayloadTypes payload_types;
- payload_types.push_back(webrtc::rtpplayer::PayloadCodecTuple(
- kDefaultUlpFecPayloadType, "ULPFEC", webrtc::kVideoCodecULPFEC));
- payload_types.push_back(webrtc::rtpplayer::PayloadCodecTuple(
- kDefaultRedPayloadType, "RED", webrtc::kVideoCodecRED));
- payload_types.push_back(webrtc::rtpplayer::PayloadCodecTuple(
- kDefaultVp8PayloadType, "VP8", webrtc::kVideoCodecVP8));
-
- std::string output_file = args.outputFile;
- if (output_file.empty())
- output_file = webrtc::test::OutputPath() + "RtpPlay_decoded.yuv";
-
- webrtc::SimulatedClock clock(0);
- webrtc::rtpplayer::VcmPayloadSinkFactory factory(output_file, &clock,
- kConfigProtectionEnabled, kConfigProtectionMethod, kConfigRttMs,
- kConfigRenderDelayMs, kConfigMinPlayoutDelayMs);
- rtc::scoped_ptr<webrtc::rtpplayer::RtpPlayerInterface> rtp_player(
- webrtc::rtpplayer::Create(args.inputFile, &factory, &clock, payload_types,
- kConfigLossRate, kConfigRttMs,
- kConfigReordering));
- if (rtp_player.get() == NULL) {
- return -1;
- }
-
- int ret = 0;
- while ((ret = rtp_player->NextPacket(clock.TimeInMilliseconds())) == 0) {
- ret = factory.DecodeAndProcessAll(true);
- if (ret < 0 || (kConfigMaxRuntimeMs > -1 &&
- clock.TimeInMilliseconds() >= kConfigMaxRuntimeMs)) {
- break;
- }
- clock.AdvanceTimeMilliseconds(1);
- }
-
- rtp_player->Print();
-
- switch (ret) {
- case 1:
- printf("Success\n");
- return 0;
- case -1:
- printf("Failed\n");
- return -1;
- case 0:
- printf("Timeout\n");
- return -1;
- }
-
- webrtc::Trace::ReturnTrace();
- return 0;
-}
diff --git a/webrtc/modules/video_coding/main/test/video_source.h b/webrtc/modules/video_coding/main/test/video_source.h
deleted file mode 100644
index 05deb4a39b..0000000000
--- a/webrtc/modules/video_coding/main/test/video_source.h
+++ /dev/null
@@ -1,82 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_MODULES_VIDEO_CODING_TEST_VIDEO_SOURCE_H_
-#define WEBRTC_MODULES_VIDEO_CODING_TEST_VIDEO_SOURCE_H_
-
-#include "webrtc/common_video/libyuv/include/webrtc_libyuv.h"
-#include "webrtc/typedefs.h"
-
-#include <string>
-
-enum VideoSize
- {
- kUndefined,
- kSQCIF, // 128*96 = 12 288
- kQQVGA, // 160*120 = 19 200
- kQCIF, // 176*144 = 25 344
- kCGA, // 320*200 = 64 000
- kQVGA, // 320*240 = 76 800
- kSIF, // 352*240 = 84 480
- kWQVGA, // 400*240 = 96 000
- kCIF, // 352*288 = 101 376
- kW288p, // 512*288 = 147 456 (WCIF)
- k448p, // 576*448 = 281 088
- kVGA, // 640*480 = 307 200
- k432p, // 720*432 = 311 040
- kW432p, // 768*432 = 331 776
- k4SIF, // 704*480 = 337 920
- kW448p, // 768*448 = 344 064
- kNTSC, // 720*480 = 345 600
- kFW448p, // 800*448 = 358 400
- kWVGA, // 800*480 = 384 000
- k4CIF, // 704*576 = 405 504
- kSVGA, // 800*600 = 480 000
- kW544p, // 960*544 = 522 240
- kW576p, // 1024*576 = 589 824 (W4CIF)
- kHD, // 960*720 = 691 200
- kXGA, // 1024*768 = 786 432
- kWHD, // 1280*720 = 921 600
- kFullHD, // 1440*1080 = 1 555 200
- kWFullHD, // 1920*1080 = 2 073 600
-
- kNumberOfVideoSizes
- };
-
-
-class VideoSource
-{
-public:
- VideoSource();
- VideoSource(std::string fileName, VideoSize size, float frameRate, webrtc::VideoType type = webrtc::kI420);
- VideoSource(std::string fileName, uint16_t width, uint16_t height,
- float frameRate = 30, webrtc::VideoType type = webrtc::kI420);
-
- std::string GetFileName() const { return _fileName; }
- uint16_t GetWidth() const { return _width; }
- uint16_t GetHeight() const { return _height; }
- webrtc::VideoType GetType() const { return _type; }
- float GetFrameRate() const { return _frameRate; }
- int GetWidthHeight( VideoSize size);
-
- // Returns the filename with the path (including the leading slash) removed.
- std::string GetName() const;
-
- size_t GetFrameLength() const;
-
-private:
- std::string _fileName;
- uint16_t _width;
- uint16_t _height;
- webrtc::VideoType _type;
- float _frameRate;
-};
-
-#endif // WEBRTC_MODULES_VIDEO_CODING_TEST_VIDEO_SOURCE_H_
diff --git a/webrtc/modules/video_coding/media_opt_util.cc b/webrtc/modules/video_coding/media_opt_util.cc
new file mode 100644
index 0000000000..d57e9c8dd2
--- /dev/null
+++ b/webrtc/modules/video_coding/media_opt_util.cc
@@ -0,0 +1,682 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/modules/video_coding/media_opt_util.h"
+
+#include <float.h>
+#include <limits.h>
+#include <math.h>
+
+#include <algorithm>
+
+#include "webrtc/modules/include/module_common_types.h"
+#include "webrtc/modules/video_coding/codecs/vp8/include/vp8_common_types.h"
+#include "webrtc/modules/video_coding/include/video_coding_defines.h"
+#include "webrtc/modules/video_coding/fec_tables_xor.h"
+#include "webrtc/modules/video_coding/nack_fec_tables.h"
+
+namespace webrtc {
+// Max value of loss rates in off-line model
+static const int kPacketLossMax = 129;
+
+namespace media_optimization {
+
+VCMProtectionMethod::VCMProtectionMethod()
+ : _effectivePacketLoss(0),
+ _protectionFactorK(0),
+ _protectionFactorD(0),
+ _scaleProtKey(2.0f),
+ _maxPayloadSize(1460),
+ _qmRobustness(new VCMQmRobustness()),
+ _useUepProtectionK(false),
+ _useUepProtectionD(true),
+ _corrFecCost(1.0),
+ _type(kNone) {}
+
+VCMProtectionMethod::~VCMProtectionMethod() {
+ delete _qmRobustness;
+}
+void VCMProtectionMethod::UpdateContentMetrics(
+ const VideoContentMetrics* contentMetrics) {
+ _qmRobustness->UpdateContent(contentMetrics);
+}
+
+VCMNackFecMethod::VCMNackFecMethod(int64_t lowRttNackThresholdMs,
+ int64_t highRttNackThresholdMs)
+ : VCMFecMethod(),
+ _lowRttNackMs(lowRttNackThresholdMs),
+ _highRttNackMs(highRttNackThresholdMs),
+ _maxFramesFec(1) {
+ assert(lowRttNackThresholdMs >= -1 && highRttNackThresholdMs >= -1);
+ assert(highRttNackThresholdMs == -1 ||
+ lowRttNackThresholdMs <= highRttNackThresholdMs);
+ assert(lowRttNackThresholdMs > -1 || highRttNackThresholdMs == -1);
+ _type = kNackFec;
+}
+
+VCMNackFecMethod::~VCMNackFecMethod() {
+ //
+}
+bool VCMNackFecMethod::ProtectionFactor(
+ const VCMProtectionParameters* parameters) {
+ // Hybrid Nack FEC has three operational modes:
+ // 1. Low RTT (below kLowRttNackMs) - Nack only: Set FEC rate
+ // (_protectionFactorD) to zero. -1 means no FEC.
+ // 2. High RTT (above _highRttNackMs) - FEC Only: Keep FEC factors.
+ // -1 means always allow NACK.
+ // 3. Medium RTT values - Hybrid mode: We will only nack the
+ // residual following the decoding of the FEC (refer to JB logic). FEC
+ // delta protection factor will be adjusted based on the RTT.
+
+ // Otherwise: we count on FEC; if the RTT is below a threshold, then we
+ // nack the residual, based on a decision made in the JB.
+
+ // Compute the protection factors
+ VCMFecMethod::ProtectionFactor(parameters);
+ if (_lowRttNackMs == -1 || parameters->rtt < _lowRttNackMs) {
+ _protectionFactorD = 0;
+ VCMFecMethod::UpdateProtectionFactorD(_protectionFactorD);
+
+ // When in Hybrid mode (RTT range), adjust FEC rates based on the
+ // RTT (NACK effectiveness) - adjustment factor is in the range [0,1].
+ } else if (_highRttNackMs == -1 || parameters->rtt < _highRttNackMs) {
+ // TODO(mikhal): Disabling adjustment temporarily.
+ // uint16_t rttIndex = (uint16_t) parameters->rtt;
+ float adjustRtt = 1.0f; // (float)VCMNackFecTable[rttIndex] / 100.0f;
+
+ // Adjust FEC with NACK on (for delta frame only)
+ // table depends on RTT relative to rttMax (NACK Threshold)
+ _protectionFactorD = static_cast<uint8_t>(
+ adjustRtt * static_cast<float>(_protectionFactorD));
+ // update FEC rates after applying adjustment
+ VCMFecMethod::UpdateProtectionFactorD(_protectionFactorD);
+ }
+
+ return true;
+}
+
+int VCMNackFecMethod::ComputeMaxFramesFec(
+ const VCMProtectionParameters* parameters) {
+ if (parameters->numLayers > 2) {
+ // For more than 2 temporal layers we will only have FEC on the base layer,
+ // and the base layers will be pretty far apart. Therefore we force one
+ // frame FEC.
+ return 1;
+ }
+ // We set the max number of frames to base the FEC on so that on average
+ // we will have complete frames in one RTT. Note that this is an upper
+ // bound, and that the actual number of frames used for FEC is decided by the
+ // RTP module based on the actual number of packets and the protection factor.
+ float base_layer_framerate =
+ parameters->frameRate /
+ static_cast<float>(1 << (parameters->numLayers - 1));
+ int max_frames_fec = std::max(
+ static_cast<int>(2.0f * base_layer_framerate * parameters->rtt / 1000.0f +
+ 0.5f),
+ 1);
+ // |kUpperLimitFramesFec| is the upper limit on how many frames we
+ // allow any FEC to be based on.
+ if (max_frames_fec > kUpperLimitFramesFec) {
+ max_frames_fec = kUpperLimitFramesFec;
+ }
+ return max_frames_fec;
+}
+
+int VCMNackFecMethod::MaxFramesFec() const {
+ return _maxFramesFec;
+}
+
+bool VCMNackFecMethod::BitRateTooLowForFec(
+ const VCMProtectionParameters* parameters) {
+ // Bitrate below which we turn off FEC, regardless of reported packet loss.
+ // The condition should depend on resolution and content. For now, use
+ // threshold on bytes per frame, with some effect for the frame size.
+ // The condition for turning off FEC is also based on other factors,
+ // such as |_numLayers|, |_maxFramesFec|, and |_rtt|.
+ int estimate_bytes_per_frame = 1000 * BitsPerFrame(parameters) / 8;
+ int max_bytes_per_frame = kMaxBytesPerFrameForFec;
+ int num_pixels = parameters->codecWidth * parameters->codecHeight;
+ if (num_pixels <= 352 * 288) {
+ max_bytes_per_frame = kMaxBytesPerFrameForFecLow;
+ } else if (num_pixels > 640 * 480) {
+ max_bytes_per_frame = kMaxBytesPerFrameForFecHigh;
+ }
+ // TODO(marpan): add condition based on maximum frames used for FEC,
+ // and expand condition based on frame size.
+ // Max round trip time threshold in ms.
+ const int64_t kMaxRttTurnOffFec = 200;
+ if (estimate_bytes_per_frame < max_bytes_per_frame &&
+ parameters->numLayers < 3 && parameters->rtt < kMaxRttTurnOffFec) {
+ return true;
+ }
+ return false;
+}
+
+bool VCMNackFecMethod::EffectivePacketLoss(
+ const VCMProtectionParameters* parameters) {
+ // Set the effective packet loss for encoder (based on FEC code).
+ // Compute the effective packet loss and residual packet loss due to FEC.
+ VCMFecMethod::EffectivePacketLoss(parameters);
+ return true;
+}
+
+bool VCMNackFecMethod::UpdateParameters(
+ const VCMProtectionParameters* parameters) {
+ ProtectionFactor(parameters);
+ EffectivePacketLoss(parameters);
+ _maxFramesFec = ComputeMaxFramesFec(parameters);
+ if (BitRateTooLowForFec(parameters)) {
+ _protectionFactorK = 0;
+ _protectionFactorD = 0;
+ }
+
+ // Protection/fec rates obtained above are defined relative to total number
+ // of packets (total rate: source + fec) FEC in RTP module assumes
+ // protection factor is defined relative to source number of packets so we
+ // should convert the factor to reduce mismatch between mediaOpt's rate and
+ // the actual one
+ _protectionFactorK = VCMFecMethod::ConvertFECRate(_protectionFactorK);
+ _protectionFactorD = VCMFecMethod::ConvertFECRate(_protectionFactorD);
+
+ return true;
+}
+
+VCMNackMethod::VCMNackMethod() : VCMProtectionMethod() {
+ _type = kNack;
+}
+
+VCMNackMethod::~VCMNackMethod() {
+ //
+}
+
+bool VCMNackMethod::EffectivePacketLoss(
+ const VCMProtectionParameters* parameter) {
+ // Effective Packet Loss, NA in current version.
+ _effectivePacketLoss = 0;
+ return true;
+}
+
+bool VCMNackMethod::UpdateParameters(
+ const VCMProtectionParameters* parameters) {
+ // Compute the effective packet loss
+ EffectivePacketLoss(parameters);
+
+ // nackCost = (bitRate - nackCost) * (lossPr)
+ return true;
+}
+
+VCMFecMethod::VCMFecMethod() : VCMProtectionMethod() {
+ _type = kFec;
+}
+VCMFecMethod::~VCMFecMethod() {
+ //
+}
+
+uint8_t VCMFecMethod::BoostCodeRateKey(uint8_t packetFrameDelta,
+ uint8_t packetFrameKey) const {
+ uint8_t boostRateKey = 2;
+ // Default: ratio scales the FEC protection up for I frames
+ uint8_t ratio = 1;
+
+ if (packetFrameDelta > 0) {
+ ratio = (int8_t)(packetFrameKey / packetFrameDelta);
+ }
+ ratio = VCM_MAX(boostRateKey, ratio);
+
+ return ratio;
+}
+
+uint8_t VCMFecMethod::ConvertFECRate(uint8_t codeRateRTP) const {
+ return static_cast<uint8_t>(VCM_MIN(
+ 255,
+ (0.5 + 255.0 * codeRateRTP / static_cast<float>(255 - codeRateRTP))));
+}
+
+// Update FEC with protectionFactorD
+void VCMFecMethod::UpdateProtectionFactorD(uint8_t protectionFactorD) {
+ _protectionFactorD = protectionFactorD;
+}
+
+// Update FEC with protectionFactorK
+void VCMFecMethod::UpdateProtectionFactorK(uint8_t protectionFactorK) {
+ _protectionFactorK = protectionFactorK;
+}
+
+bool VCMFecMethod::ProtectionFactor(const VCMProtectionParameters* parameters) {
+ // FEC PROTECTION SETTINGS: varies with packet loss and bitrate
+
+ // No protection if (filtered) packetLoss is 0
+ uint8_t packetLoss = (uint8_t)(255 * parameters->lossPr);
+ if (packetLoss == 0) {
+ _protectionFactorK = 0;
+ _protectionFactorD = 0;
+ return true;
+ }
+
+ // Parameters for FEC setting:
+ // first partition size, thresholds, table pars, spatial resoln fac.
+
+ // First partition protection: ~ 20%
+ uint8_t firstPartitionProt = (uint8_t)(255 * 0.20);
+
+ // Minimum protection level needed to generate one FEC packet for one
+ // source packet/frame (in RTP sender)
+ uint8_t minProtLevelFec = 85;
+
+ // Threshold on packetLoss and bitRrate/frameRate (=average #packets),
+ // above which we allocate protection to cover at least first partition.
+ uint8_t lossThr = 0;
+ uint8_t packetNumThr = 1;
+
+ // Parameters for range of rate index of table.
+ const uint8_t ratePar1 = 5;
+ const uint8_t ratePar2 = 49;
+
+ // Spatial resolution size, relative to a reference size.
+ float spatialSizeToRef =
+ static_cast<float>(parameters->codecWidth * parameters->codecHeight) /
+ (static_cast<float>(704 * 576));
+ // resolnFac: This parameter will generally increase/decrease the FEC rate
+ // (for fixed bitRate and packetLoss) based on system size.
+ // Use a smaller exponent (< 1) to control/soften system size effect.
+ const float resolnFac = 1.0 / powf(spatialSizeToRef, 0.3f);
+
+ const int bitRatePerFrame = BitsPerFrame(parameters);
+
+ // Average number of packets per frame (source and fec):
+ const uint8_t avgTotPackets =
+ 1 + (uint8_t)(static_cast<float>(bitRatePerFrame) * 1000.0 /
+ static_cast<float>(8.0 * _maxPayloadSize) +
+ 0.5);
+
+ // FEC rate parameters: for P and I frame
+ uint8_t codeRateDelta = 0;
+ uint8_t codeRateKey = 0;
+
+ // Get index for table: the FEC protection depends on an effective rate.
+ // The range on the rate index corresponds to rates (bps)
+ // from ~200k to ~8000k, for 30fps
+ const uint16_t effRateFecTable =
+ static_cast<uint16_t>(resolnFac * bitRatePerFrame);
+ uint8_t rateIndexTable = (uint8_t)VCM_MAX(
+ VCM_MIN((effRateFecTable - ratePar1) / ratePar1, ratePar2), 0);
+
+ // Restrict packet loss range to 50:
+ // current tables defined only up to 50%
+ if (packetLoss >= kPacketLossMax) {
+ packetLoss = kPacketLossMax - 1;
+ }
+ uint16_t indexTable = rateIndexTable * kPacketLossMax + packetLoss;
+
+ // Check on table index
+ assert(indexTable < kSizeCodeRateXORTable);
+
+ // Protection factor for P frame
+ codeRateDelta = kCodeRateXORTable[indexTable];
+
+ if (packetLoss > lossThr && avgTotPackets > packetNumThr) {
+ // Set a minimum based on first partition size.
+ if (codeRateDelta < firstPartitionProt) {
+ codeRateDelta = firstPartitionProt;
+ }
+ }
+
+ // Check limit on amount of protection for P frame; 50% is max.
+ if (codeRateDelta >= kPacketLossMax) {
+ codeRateDelta = kPacketLossMax - 1;
+ }
+
+ float adjustFec = 1.0f;
+ // Avoid additional adjustments when layers are active.
+ // TODO(mikhal/marco): Update adjusmtent based on layer info.
+ if (parameters->numLayers == 1) {
+ adjustFec = _qmRobustness->AdjustFecFactor(
+ codeRateDelta, parameters->bitRate, parameters->frameRate,
+ parameters->rtt, packetLoss);
+ }
+
+ codeRateDelta = static_cast<uint8_t>(codeRateDelta * adjustFec);
+
+ // For Key frame:
+ // Effectively at a higher rate, so we scale/boost the rate
+ // The boost factor may depend on several factors: ratio of packet
+ // number of I to P frames, how much protection placed on P frames, etc.
+ const uint8_t packetFrameDelta = (uint8_t)(0.5 + parameters->packetsPerFrame);
+ const uint8_t packetFrameKey =
+ (uint8_t)(0.5 + parameters->packetsPerFrameKey);
+ const uint8_t boostKey = BoostCodeRateKey(packetFrameDelta, packetFrameKey);
+
+ rateIndexTable = (uint8_t)VCM_MAX(
+ VCM_MIN(1 + (boostKey * effRateFecTable - ratePar1) / ratePar1, ratePar2),
+ 0);
+ uint16_t indexTableKey = rateIndexTable * kPacketLossMax + packetLoss;
+
+ indexTableKey = VCM_MIN(indexTableKey, kSizeCodeRateXORTable);
+
+ // Check on table index
+ assert(indexTableKey < kSizeCodeRateXORTable);
+
+ // Protection factor for I frame
+ codeRateKey = kCodeRateXORTable[indexTableKey];
+
+ // Boosting for Key frame.
+ int boostKeyProt = _scaleProtKey * codeRateDelta;
+ if (boostKeyProt >= kPacketLossMax) {
+ boostKeyProt = kPacketLossMax - 1;
+ }
+
+ // Make sure I frame protection is at least larger than P frame protection,
+ // and at least as high as filtered packet loss.
+ codeRateKey = static_cast<uint8_t>(
+ VCM_MAX(packetLoss, VCM_MAX(boostKeyProt, codeRateKey)));
+
+ // Check limit on amount of protection for I frame: 50% is max.
+ if (codeRateKey >= kPacketLossMax) {
+ codeRateKey = kPacketLossMax - 1;
+ }
+
+ _protectionFactorK = codeRateKey;
+ _protectionFactorD = codeRateDelta;
+
+ // Generally there is a rate mis-match between the FEC cost estimated
+ // in mediaOpt and the actual FEC cost sent out in RTP module.
+ // This is more significant at low rates (small # of source packets), where
+ // the granularity of the FEC decreases. In this case, non-zero protection
+ // in mediaOpt may generate 0 FEC packets in RTP sender (since actual #FEC
+ // is based on rounding off protectionFactor on actual source packet number).
+ // The correction factor (_corrFecCost) attempts to corrects this, at least
+ // for cases of low rates (small #packets) and low protection levels.
+
+ float numPacketsFl = 1.0f + (static_cast<float>(bitRatePerFrame) * 1000.0 /
+ static_cast<float>(8.0 * _maxPayloadSize) +
+ 0.5);
+
+ const float estNumFecGen =
+ 0.5f + static_cast<float>(_protectionFactorD * numPacketsFl / 255.0f);
+
+ // We reduce cost factor (which will reduce overhead for FEC and
+ // hybrid method) and not the protectionFactor.
+ _corrFecCost = 1.0f;
+ if (estNumFecGen < 1.1f && _protectionFactorD < minProtLevelFec) {
+ _corrFecCost = 0.5f;
+ }
+ if (estNumFecGen < 0.9f && _protectionFactorD < minProtLevelFec) {
+ _corrFecCost = 0.0f;
+ }
+
+ // TODO(marpan): Set the UEP protection on/off for Key and Delta frames
+ _useUepProtectionK = _qmRobustness->SetUepProtection(
+ codeRateKey, parameters->bitRate, packetLoss, 0);
+
+ _useUepProtectionD = _qmRobustness->SetUepProtection(
+ codeRateDelta, parameters->bitRate, packetLoss, 1);
+
+ // DONE WITH FEC PROTECTION SETTINGS
+ return true;
+}
+
+int VCMFecMethod::BitsPerFrame(const VCMProtectionParameters* parameters) {
+ // When temporal layers are available FEC will only be applied on the base
+ // layer.
+ const float bitRateRatio =
+ kVp8LayerRateAlloction[parameters->numLayers - 1][0];
+ float frameRateRatio = powf(1 / 2.0, parameters->numLayers - 1);
+ float bitRate = parameters->bitRate * bitRateRatio;
+ float frameRate = parameters->frameRate * frameRateRatio;
+
+ // TODO(mikhal): Update factor following testing.
+ float adjustmentFactor = 1;
+
+ // Average bits per frame (units of kbits)
+ return static_cast<int>(adjustmentFactor * bitRate / frameRate);
+}
+
+bool VCMFecMethod::EffectivePacketLoss(
+ const VCMProtectionParameters* parameters) {
+ // Effective packet loss to encoder is based on RPL (residual packet loss)
+ // this is a soft setting based on degree of FEC protection
+ // RPL = received/input packet loss - average_FEC_recovery
+ // note: received/input packet loss may be filtered based on FilteredLoss
+
+ // Effective Packet Loss, NA in current version.
+ _effectivePacketLoss = 0;
+
+ return true;
+}
+
+bool VCMFecMethod::UpdateParameters(const VCMProtectionParameters* parameters) {
+ // Compute the protection factor
+ ProtectionFactor(parameters);
+
+ // Compute the effective packet loss
+ EffectivePacketLoss(parameters);
+
+ // Protection/fec rates obtained above is defined relative to total number
+ // of packets (total rate: source+fec) FEC in RTP module assumes protection
+ // factor is defined relative to source number of packets so we should
+ // convert the factor to reduce mismatch between mediaOpt suggested rate and
+ // the actual rate
+ _protectionFactorK = ConvertFECRate(_protectionFactorK);
+ _protectionFactorD = ConvertFECRate(_protectionFactorD);
+
+ return true;
+}
+VCMLossProtectionLogic::VCMLossProtectionLogic(int64_t nowMs)
+ : _currentParameters(),
+ _rtt(0),
+ _lossPr(0.0f),
+ _bitRate(0.0f),
+ _frameRate(0.0f),
+ _keyFrameSize(0.0f),
+ _fecRateKey(0),
+ _fecRateDelta(0),
+ _lastPrUpdateT(0),
+ _lossPr255(0.9999f),
+ _lossPrHistory(),
+ _shortMaxLossPr255(0),
+ _packetsPerFrame(0.9999f),
+ _packetsPerFrameKey(0.9999f),
+ _codecWidth(0),
+ _codecHeight(0),
+ _numLayers(1) {
+ Reset(nowMs);
+}
+
+VCMLossProtectionLogic::~VCMLossProtectionLogic() {
+ Release();
+}
+
+void VCMLossProtectionLogic::SetMethod(
+ enum VCMProtectionMethodEnum newMethodType) {
+ if (_selectedMethod && _selectedMethod->Type() == newMethodType)
+ return;
+
+ switch (newMethodType) {
+ case kNack:
+ _selectedMethod.reset(new VCMNackMethod());
+ break;
+ case kFec:
+ _selectedMethod.reset(new VCMFecMethod());
+ break;
+ case kNackFec:
+ _selectedMethod.reset(new VCMNackFecMethod(kLowRttNackMs, -1));
+ break;
+ case kNone:
+ _selectedMethod.reset();
+ break;
+ }
+ UpdateMethod();
+}
+
+void VCMLossProtectionLogic::UpdateRtt(int64_t rtt) {
+ _rtt = rtt;
+}
+
+void VCMLossProtectionLogic::UpdateMaxLossHistory(uint8_t lossPr255,
+ int64_t now) {
+ if (_lossPrHistory[0].timeMs >= 0 &&
+ now - _lossPrHistory[0].timeMs < kLossPrShortFilterWinMs) {
+ if (lossPr255 > _shortMaxLossPr255) {
+ _shortMaxLossPr255 = lossPr255;
+ }
+ } else {
+ // Only add a new value to the history once a second
+ if (_lossPrHistory[0].timeMs == -1) {
+ // First, no shift
+ _shortMaxLossPr255 = lossPr255;
+ } else {
+ // Shift
+ for (int32_t i = (kLossPrHistorySize - 2); i >= 0; i--) {
+ _lossPrHistory[i + 1].lossPr255 = _lossPrHistory[i].lossPr255;
+ _lossPrHistory[i + 1].timeMs = _lossPrHistory[i].timeMs;
+ }
+ }
+ if (_shortMaxLossPr255 == 0) {
+ _shortMaxLossPr255 = lossPr255;
+ }
+
+ _lossPrHistory[0].lossPr255 = _shortMaxLossPr255;
+ _lossPrHistory[0].timeMs = now;
+ _shortMaxLossPr255 = 0;
+ }
+}
+
+uint8_t VCMLossProtectionLogic::MaxFilteredLossPr(int64_t nowMs) const {
+ uint8_t maxFound = _shortMaxLossPr255;
+ if (_lossPrHistory[0].timeMs == -1) {
+ return maxFound;
+ }
+ for (int32_t i = 0; i < kLossPrHistorySize; i++) {
+ if (_lossPrHistory[i].timeMs == -1) {
+ break;
+ }
+ if (nowMs - _lossPrHistory[i].timeMs >
+ kLossPrHistorySize * kLossPrShortFilterWinMs) {
+ // This sample (and all samples after this) is too old
+ break;
+ }
+ if (_lossPrHistory[i].lossPr255 > maxFound) {
+ // This sample is the largest one this far into the history
+ maxFound = _lossPrHistory[i].lossPr255;
+ }
+ }
+ return maxFound;
+}
+
+uint8_t VCMLossProtectionLogic::FilteredLoss(int64_t nowMs,
+ FilterPacketLossMode filter_mode,
+ uint8_t lossPr255) {
+ // Update the max window filter.
+ UpdateMaxLossHistory(lossPr255, nowMs);
+
+ // Update the recursive average filter.
+ _lossPr255.Apply(static_cast<float>(nowMs - _lastPrUpdateT),
+ static_cast<float>(lossPr255));
+ _lastPrUpdateT = nowMs;
+
+ // Filtered loss: default is received loss (no filtering).
+ uint8_t filtered_loss = lossPr255;
+
+ switch (filter_mode) {
+ case kNoFilter:
+ break;
+ case kAvgFilter:
+ filtered_loss = static_cast<uint8_t>(_lossPr255.filtered() + 0.5);
+ break;
+ case kMaxFilter:
+ filtered_loss = MaxFilteredLossPr(nowMs);
+ break;
+ }
+
+ return filtered_loss;
+}
+
+void VCMLossProtectionLogic::UpdateFilteredLossPr(uint8_t packetLossEnc) {
+ _lossPr = static_cast<float>(packetLossEnc) / 255.0;
+}
+
+void VCMLossProtectionLogic::UpdateBitRate(float bitRate) {
+ _bitRate = bitRate;
+}
+
+void VCMLossProtectionLogic::UpdatePacketsPerFrame(float nPackets,
+ int64_t nowMs) {
+ _packetsPerFrame.Apply(static_cast<float>(nowMs - _lastPacketPerFrameUpdateT),
+ nPackets);
+ _lastPacketPerFrameUpdateT = nowMs;
+}
+
+void VCMLossProtectionLogic::UpdatePacketsPerFrameKey(float nPackets,
+ int64_t nowMs) {
+ _packetsPerFrameKey.Apply(
+ static_cast<float>(nowMs - _lastPacketPerFrameUpdateTKey), nPackets);
+ _lastPacketPerFrameUpdateTKey = nowMs;
+}
+
+void VCMLossProtectionLogic::UpdateKeyFrameSize(float keyFrameSize) {
+ _keyFrameSize = keyFrameSize;
+}
+
+void VCMLossProtectionLogic::UpdateFrameSize(uint16_t width, uint16_t height) {
+ _codecWidth = width;
+ _codecHeight = height;
+}
+
+void VCMLossProtectionLogic::UpdateNumLayers(int numLayers) {
+ _numLayers = (numLayers == 0) ? 1 : numLayers;
+}
+
+bool VCMLossProtectionLogic::UpdateMethod() {
+ if (!_selectedMethod)
+ return false;
+ _currentParameters.rtt = _rtt;
+ _currentParameters.lossPr = _lossPr;
+ _currentParameters.bitRate = _bitRate;
+ _currentParameters.frameRate = _frameRate; // rename actual frame rate?
+ _currentParameters.keyFrameSize = _keyFrameSize;
+ _currentParameters.fecRateDelta = _fecRateDelta;
+ _currentParameters.fecRateKey = _fecRateKey;
+ _currentParameters.packetsPerFrame = _packetsPerFrame.filtered();
+ _currentParameters.packetsPerFrameKey = _packetsPerFrameKey.filtered();
+ _currentParameters.codecWidth = _codecWidth;
+ _currentParameters.codecHeight = _codecHeight;
+ _currentParameters.numLayers = _numLayers;
+ return _selectedMethod->UpdateParameters(&_currentParameters);
+}
+
+VCMProtectionMethod* VCMLossProtectionLogic::SelectedMethod() const {
+ return _selectedMethod.get();
+}
+
+VCMProtectionMethodEnum VCMLossProtectionLogic::SelectedType() const {
+ return _selectedMethod ? _selectedMethod->Type() : kNone;
+}
+
+void VCMLossProtectionLogic::Reset(int64_t nowMs) {
+ _lastPrUpdateT = nowMs;
+ _lastPacketPerFrameUpdateT = nowMs;
+ _lastPacketPerFrameUpdateTKey = nowMs;
+ _lossPr255.Reset(0.9999f);
+ _packetsPerFrame.Reset(0.9999f);
+ _fecRateDelta = _fecRateKey = 0;
+ for (int32_t i = 0; i < kLossPrHistorySize; i++) {
+ _lossPrHistory[i].lossPr255 = 0;
+ _lossPrHistory[i].timeMs = -1;
+ }
+ _shortMaxLossPr255 = 0;
+ Release();
+}
+
+void VCMLossProtectionLogic::Release() {
+ _selectedMethod.reset();
+}
+
+} // namespace media_optimization
+} // namespace webrtc
diff --git a/webrtc/modules/video_coding/media_opt_util.h b/webrtc/modules/video_coding/media_opt_util.h
new file mode 100644
index 0000000000..a016a03eab
--- /dev/null
+++ b/webrtc/modules/video_coding/media_opt_util.h
@@ -0,0 +1,361 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_CODING_MEDIA_OPT_UTIL_H_
+#define WEBRTC_MODULES_VIDEO_CODING_MEDIA_OPT_UTIL_H_
+
+#include <math.h>
+#include <stdlib.h>
+
+#include "webrtc/base/exp_filter.h"
+#include "webrtc/base/scoped_ptr.h"
+#include "webrtc/modules/video_coding/internal_defines.h"
+#include "webrtc/modules/video_coding/qm_select.h"
+#include "webrtc/system_wrappers/include/trace.h"
+#include "webrtc/typedefs.h"
+
+namespace webrtc {
+namespace media_optimization {
+
+// Number of time periods used for (max) window filter for packet loss
+// TODO(marpan): set reasonable window size for filtered packet loss,
+// adjustment should be based on logged/real data of loss stats/correlation.
+enum { kLossPrHistorySize = 10 };
+
+// 1000 ms, total filter length is (kLossPrHistorySize * 1000) ms
+enum { kLossPrShortFilterWinMs = 1000 };
+
+// The type of filter used on the received packet loss reports.
+enum FilterPacketLossMode {
+ kNoFilter, // No filtering on received loss.
+ kAvgFilter, // Recursive average filter.
+ kMaxFilter // Max-window filter, over the time interval of:
+ // (kLossPrHistorySize * kLossPrShortFilterWinMs) ms.
+};
+
+// Thresholds for hybrid NACK/FEC
+// common to media optimization and the jitter buffer.
+const int64_t kLowRttNackMs = 20;
+
+struct VCMProtectionParameters {
+ VCMProtectionParameters()
+ : rtt(0),
+ lossPr(0.0f),
+ bitRate(0.0f),
+ packetsPerFrame(0.0f),
+ packetsPerFrameKey(0.0f),
+ frameRate(0.0f),
+ keyFrameSize(0.0f),
+ fecRateDelta(0),
+ fecRateKey(0),
+ codecWidth(0),
+ codecHeight(0),
+ numLayers(1) {}
+
+ int64_t rtt;
+ float lossPr;
+ float bitRate;
+ float packetsPerFrame;
+ float packetsPerFrameKey;
+ float frameRate;
+ float keyFrameSize;
+ uint8_t fecRateDelta;
+ uint8_t fecRateKey;
+ uint16_t codecWidth;
+ uint16_t codecHeight;
+ int numLayers;
+};
+
+/******************************/
+/* VCMProtectionMethod class */
+/******************************/
+
+enum VCMProtectionMethodEnum { kNack, kFec, kNackFec, kNone };
+
+class VCMLossProbabilitySample {
+ public:
+ VCMLossProbabilitySample() : lossPr255(0), timeMs(-1) {}
+
+ uint8_t lossPr255;
+ int64_t timeMs;
+};
+
+class VCMProtectionMethod {
+ public:
+ VCMProtectionMethod();
+ virtual ~VCMProtectionMethod();
+
+ // Updates the efficiency of the method using the parameters provided
+ //
+ // Input:
+ // - parameters : Parameters used to calculate efficiency
+ //
+ // Return value : True if this method is recommended in
+ // the given conditions.
+ virtual bool UpdateParameters(const VCMProtectionParameters* parameters) = 0;
+
+ // Returns the protection type
+ //
+ // Return value : The protection type
+ enum VCMProtectionMethodEnum Type() const { return _type; }
+
+ // Returns the effective packet loss for ER, required by this protection
+ // method
+ //
+ // Return value : Required effective packet loss
+ virtual uint8_t RequiredPacketLossER() { return _effectivePacketLoss; }
+
+ // Extracts the FEC protection factor for Key frame, required by this
+ // protection method
+ //
+ // Return value : Required protectionFactor for Key frame
+ virtual uint8_t RequiredProtectionFactorK() { return _protectionFactorK; }
+
+ // Extracts the FEC protection factor for Delta frame, required by this
+ // protection method
+ //
+ // Return value : Required protectionFactor for delta frame
+ virtual uint8_t RequiredProtectionFactorD() { return _protectionFactorD; }
+
+ // Extracts whether the FEC Unequal protection (UEP) is used for Key frame.
+ //
+ // Return value : Required Unequal protection on/off state.
+ virtual bool RequiredUepProtectionK() { return _useUepProtectionK; }
+
+ // Extracts whether the the FEC Unequal protection (UEP) is used for Delta
+ // frame.
+ //
+ // Return value : Required Unequal protection on/off state.
+ virtual bool RequiredUepProtectionD() { return _useUepProtectionD; }
+
+ virtual int MaxFramesFec() const { return 1; }
+
+ // Updates content metrics
+ void UpdateContentMetrics(const VideoContentMetrics* contentMetrics);
+
+ protected:
+ uint8_t _effectivePacketLoss;
+ uint8_t _protectionFactorK;
+ uint8_t _protectionFactorD;
+ // Estimation of residual loss after the FEC
+ float _scaleProtKey;
+ int32_t _maxPayloadSize;
+
+ VCMQmRobustness* _qmRobustness;
+ bool _useUepProtectionK;
+ bool _useUepProtectionD;
+ float _corrFecCost;
+ enum VCMProtectionMethodEnum _type;
+};
+
+class VCMNackMethod : public VCMProtectionMethod {
+ public:
+ VCMNackMethod();
+ virtual ~VCMNackMethod();
+ virtual bool UpdateParameters(const VCMProtectionParameters* parameters);
+ // Get the effective packet loss
+ bool EffectivePacketLoss(const VCMProtectionParameters* parameter);
+};
+
+class VCMFecMethod : public VCMProtectionMethod {
+ public:
+ VCMFecMethod();
+ virtual ~VCMFecMethod();
+ virtual bool UpdateParameters(const VCMProtectionParameters* parameters);
+ // Get the effective packet loss for ER
+ bool EffectivePacketLoss(const VCMProtectionParameters* parameters);
+ // Get the FEC protection factors
+ bool ProtectionFactor(const VCMProtectionParameters* parameters);
+ // Get the boost for key frame protection
+ uint8_t BoostCodeRateKey(uint8_t packetFrameDelta,
+ uint8_t packetFrameKey) const;
+ // Convert the rates: defined relative to total# packets or source# packets
+ uint8_t ConvertFECRate(uint8_t codeRate) const;
+ // Get the average effective recovery from FEC: for random loss model
+ float AvgRecoveryFEC(const VCMProtectionParameters* parameters) const;
+ // Update FEC with protectionFactorD
+ void UpdateProtectionFactorD(uint8_t protectionFactorD);
+ // Update FEC with protectionFactorK
+ void UpdateProtectionFactorK(uint8_t protectionFactorK);
+ // Compute the bits per frame. Account for temporal layers when applicable.
+ int BitsPerFrame(const VCMProtectionParameters* parameters);
+
+ protected:
+ enum { kUpperLimitFramesFec = 6 };
+ // Thresholds values for the bytes/frame and round trip time, below which we
+ // may turn off FEC, depending on |_numLayers| and |_maxFramesFec|.
+ // Max bytes/frame for VGA, corresponds to ~140k at 25fps.
+ enum { kMaxBytesPerFrameForFec = 700 };
+ // Max bytes/frame for CIF and lower: corresponds to ~80k at 25fps.
+ enum { kMaxBytesPerFrameForFecLow = 400 };
+ // Max bytes/frame for frame size larger than VGA, ~200k at 25fps.
+ enum { kMaxBytesPerFrameForFecHigh = 1000 };
+};
+
+class VCMNackFecMethod : public VCMFecMethod {
+ public:
+ VCMNackFecMethod(int64_t lowRttNackThresholdMs,
+ int64_t highRttNackThresholdMs);
+ virtual ~VCMNackFecMethod();
+ virtual bool UpdateParameters(const VCMProtectionParameters* parameters);
+ // Get the effective packet loss for ER
+ bool EffectivePacketLoss(const VCMProtectionParameters* parameters);
+ // Get the protection factors
+ bool ProtectionFactor(const VCMProtectionParameters* parameters);
+ // Get the max number of frames the FEC is allowed to be based on.
+ int MaxFramesFec() const;
+ // Turn off the FEC based on low bitrate and other factors.
+ bool BitRateTooLowForFec(const VCMProtectionParameters* parameters);
+
+ private:
+ int ComputeMaxFramesFec(const VCMProtectionParameters* parameters);
+
+ int64_t _lowRttNackMs;
+ int64_t _highRttNackMs;
+ int _maxFramesFec;
+};
+
+class VCMLossProtectionLogic {
+ public:
+ explicit VCMLossProtectionLogic(int64_t nowMs);
+ ~VCMLossProtectionLogic();
+
+ // Set the protection method to be used
+ //
+ // Input:
+ // - newMethodType : New requested protection method type. If one
+ // is already set, it will be deleted and replaced
+ void SetMethod(VCMProtectionMethodEnum newMethodType);
+
+ // Update the round-trip time
+ //
+ // Input:
+ // - rtt : Round-trip time in seconds.
+ void UpdateRtt(int64_t rtt);
+
+ // Update the filtered packet loss.
+ //
+ // Input:
+ // - packetLossEnc : The reported packet loss filtered
+ // (max window or average)
+ void UpdateFilteredLossPr(uint8_t packetLossEnc);
+
+ // Update the current target bit rate.
+ //
+ // Input:
+ // - bitRate : The current target bit rate in kbits/s
+ void UpdateBitRate(float bitRate);
+
+ // Update the number of packets per frame estimate, for delta frames
+ //
+ // Input:
+ // - nPackets : Number of packets in the latest sent frame.
+ void UpdatePacketsPerFrame(float nPackets, int64_t nowMs);
+
+ // Update the number of packets per frame estimate, for key frames
+ //
+ // Input:
+ // - nPackets : umber of packets in the latest sent frame.
+ void UpdatePacketsPerFrameKey(float nPackets, int64_t nowMs);
+
+ // Update the keyFrameSize estimate
+ //
+ // Input:
+ // - keyFrameSize : The size of the latest sent key frame.
+ void UpdateKeyFrameSize(float keyFrameSize);
+
+ // Update the frame rate
+ //
+ // Input:
+ // - frameRate : The current target frame rate.
+ void UpdateFrameRate(float frameRate) { _frameRate = frameRate; }
+
+ // Update the frame size
+ //
+ // Input:
+ // - width : The codec frame width.
+ // - height : The codec frame height.
+ void UpdateFrameSize(uint16_t width, uint16_t height);
+
+ // Update the number of active layers
+ //
+ // Input:
+ // - numLayers : Number of layers used.
+ void UpdateNumLayers(int numLayers);
+
+ // The amount of packet loss to cover for with FEC.
+ //
+ // Input:
+ // - fecRateKey : Packet loss to cover for with FEC when
+ // sending key frames.
+ // - fecRateDelta : Packet loss to cover for with FEC when
+ // sending delta frames.
+ void UpdateFECRates(uint8_t fecRateKey, uint8_t fecRateDelta) {
+ _fecRateKey = fecRateKey;
+ _fecRateDelta = fecRateDelta;
+ }
+
+ // Update the protection methods with the current VCMProtectionParameters
+ // and set the requested protection settings.
+ // Return value : Returns true on update
+ bool UpdateMethod();
+
+ // Returns the method currently selected.
+ //
+ // Return value : The protection method currently selected.
+ VCMProtectionMethod* SelectedMethod() const;
+
+ // Return the protection type of the currently selected method
+ VCMProtectionMethodEnum SelectedType() const;
+
+ // Updates the filtered loss for the average and max window packet loss,
+ // and returns the filtered loss probability in the interval [0, 255].
+ // The returned filtered loss value depends on the parameter |filter_mode|.
+ // The input parameter |lossPr255| is the received packet loss.
+
+ // Return value : The filtered loss probability
+ uint8_t FilteredLoss(int64_t nowMs,
+ FilterPacketLossMode filter_mode,
+ uint8_t lossPr255);
+
+ void Reset(int64_t nowMs);
+
+ void Release();
+
+ private:
+ // Sets the available loss protection methods.
+ void UpdateMaxLossHistory(uint8_t lossPr255, int64_t now);
+ uint8_t MaxFilteredLossPr(int64_t nowMs) const;
+ rtc::scoped_ptr<VCMProtectionMethod> _selectedMethod;
+ VCMProtectionParameters _currentParameters;
+ int64_t _rtt;
+ float _lossPr;
+ float _bitRate;
+ float _frameRate;
+ float _keyFrameSize;
+ uint8_t _fecRateKey;
+ uint8_t _fecRateDelta;
+ int64_t _lastPrUpdateT;
+ int64_t _lastPacketPerFrameUpdateT;
+ int64_t _lastPacketPerFrameUpdateTKey;
+ rtc::ExpFilter _lossPr255;
+ VCMLossProbabilitySample _lossPrHistory[kLossPrHistorySize];
+ uint8_t _shortMaxLossPr255;
+ rtc::ExpFilter _packetsPerFrame;
+ rtc::ExpFilter _packetsPerFrameKey;
+ uint16_t _codecWidth;
+ uint16_t _codecHeight;
+ int _numLayers;
+};
+
+} // namespace media_optimization
+} // namespace webrtc
+
+#endif // WEBRTC_MODULES_VIDEO_CODING_MEDIA_OPT_UTIL_H_
diff --git a/webrtc/modules/video_coding/media_optimization.cc b/webrtc/modules/video_coding/media_optimization.cc
new file mode 100644
index 0000000000..a234a06f9b
--- /dev/null
+++ b/webrtc/modules/video_coding/media_optimization.cc
@@ -0,0 +1,633 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/modules/video_coding/media_optimization.h"
+
+#include "webrtc/base/logging.h"
+#include "webrtc/modules/video_coding/content_metrics_processing.h"
+#include "webrtc/modules/video_coding/qm_select.h"
+#include "webrtc/modules/video_coding/utility/frame_dropper.h"
+#include "webrtc/system_wrappers/include/clock.h"
+
+namespace webrtc {
+namespace media_optimization {
+namespace {
+void UpdateProtectionCallback(
+ VCMProtectionMethod* selected_method,
+ uint32_t* video_rate_bps,
+ uint32_t* nack_overhead_rate_bps,
+ uint32_t* fec_overhead_rate_bps,
+ VCMProtectionCallback* video_protection_callback) {
+ FecProtectionParams delta_fec_params;
+ FecProtectionParams key_fec_params;
+ // Get the FEC code rate for Key frames (set to 0 when NA).
+ key_fec_params.fec_rate = selected_method->RequiredProtectionFactorK();
+
+ // Get the FEC code rate for Delta frames (set to 0 when NA).
+ delta_fec_params.fec_rate = selected_method->RequiredProtectionFactorD();
+
+ // Get the FEC-UEP protection status for Key frames: UEP on/off.
+ key_fec_params.use_uep_protection = selected_method->RequiredUepProtectionK();
+
+ // Get the FEC-UEP protection status for Delta frames: UEP on/off.
+ delta_fec_params.use_uep_protection =
+ selected_method->RequiredUepProtectionD();
+
+ // The RTP module currently requires the same |max_fec_frames| for both
+ // key and delta frames.
+ delta_fec_params.max_fec_frames = selected_method->MaxFramesFec();
+ key_fec_params.max_fec_frames = selected_method->MaxFramesFec();
+
+ // Set the FEC packet mask type. |kFecMaskBursty| is more effective for
+ // consecutive losses and little/no packet re-ordering. As we currently
+ // do not have feedback data on the degree of correlated losses and packet
+ // re-ordering, we keep default setting to |kFecMaskRandom| for now.
+ delta_fec_params.fec_mask_type = kFecMaskRandom;
+ key_fec_params.fec_mask_type = kFecMaskRandom;
+
+ // TODO(Marco): Pass FEC protection values per layer.
+ video_protection_callback->ProtectionRequest(
+ &delta_fec_params, &key_fec_params, video_rate_bps,
+ nack_overhead_rate_bps, fec_overhead_rate_bps);
+}
+} // namespace
+
+struct MediaOptimization::EncodedFrameSample {
+ EncodedFrameSample(size_t size_bytes,
+ uint32_t timestamp,
+ int64_t time_complete_ms)
+ : size_bytes(size_bytes),
+ timestamp(timestamp),
+ time_complete_ms(time_complete_ms) {}
+
+ size_t size_bytes;
+ uint32_t timestamp;
+ int64_t time_complete_ms;
+};
+
+MediaOptimization::MediaOptimization(Clock* clock)
+ : crit_sect_(CriticalSectionWrapper::CreateCriticalSection()),
+ clock_(clock),
+ max_bit_rate_(0),
+ send_codec_type_(kVideoCodecUnknown),
+ codec_width_(0),
+ codec_height_(0),
+ user_frame_rate_(0),
+ frame_dropper_(new FrameDropper),
+ loss_prot_logic_(
+ new VCMLossProtectionLogic(clock_->TimeInMilliseconds())),
+ fraction_lost_(0),
+ send_statistics_zero_encode_(0),
+ max_payload_size_(1460),
+ video_target_bitrate_(0),
+ incoming_frame_rate_(0),
+ enable_qm_(false),
+ encoded_frame_samples_(),
+ avg_sent_bit_rate_bps_(0),
+ avg_sent_framerate_(0),
+ key_frame_cnt_(0),
+ delta_frame_cnt_(0),
+ content_(new VCMContentMetricsProcessing()),
+ qm_resolution_(new VCMQmResolution()),
+ last_qm_update_time_(0),
+ last_change_time_(0),
+ num_layers_(0),
+ suspension_enabled_(false),
+ video_suspended_(false),
+ suspension_threshold_bps_(0),
+ suspension_window_bps_(0) {
+ memset(send_statistics_, 0, sizeof(send_statistics_));
+ memset(incoming_frame_times_, -1, sizeof(incoming_frame_times_));
+}
+
+MediaOptimization::~MediaOptimization(void) {
+ loss_prot_logic_->Release();
+}
+
+void MediaOptimization::Reset() {
+ CriticalSectionScoped lock(crit_sect_.get());
+ SetEncodingDataInternal(kVideoCodecUnknown, 0, 0, 0, 0, 0, 0,
+ max_payload_size_);
+ memset(incoming_frame_times_, -1, sizeof(incoming_frame_times_));
+ incoming_frame_rate_ = 0.0;
+ frame_dropper_->Reset();
+ loss_prot_logic_->Reset(clock_->TimeInMilliseconds());
+ frame_dropper_->SetRates(0, 0);
+ content_->Reset();
+ qm_resolution_->Reset();
+ loss_prot_logic_->UpdateFrameRate(incoming_frame_rate_);
+ loss_prot_logic_->Reset(clock_->TimeInMilliseconds());
+ send_statistics_zero_encode_ = 0;
+ video_target_bitrate_ = 0;
+ codec_width_ = 0;
+ codec_height_ = 0;
+ user_frame_rate_ = 0;
+ key_frame_cnt_ = 0;
+ delta_frame_cnt_ = 0;
+ last_qm_update_time_ = 0;
+ last_change_time_ = 0;
+ encoded_frame_samples_.clear();
+ avg_sent_bit_rate_bps_ = 0;
+ num_layers_ = 1;
+}
+
+void MediaOptimization::SetEncodingData(VideoCodecType send_codec_type,
+ int32_t max_bit_rate,
+ uint32_t target_bitrate,
+ uint16_t width,
+ uint16_t height,
+ uint32_t frame_rate,
+ int num_layers,
+ int32_t mtu) {
+ CriticalSectionScoped lock(crit_sect_.get());
+ SetEncodingDataInternal(send_codec_type, max_bit_rate, frame_rate,
+ target_bitrate, width, height, num_layers, mtu);
+}
+
+void MediaOptimization::SetEncodingDataInternal(VideoCodecType send_codec_type,
+ int32_t max_bit_rate,
+ uint32_t frame_rate,
+ uint32_t target_bitrate,
+ uint16_t width,
+ uint16_t height,
+ int num_layers,
+ int32_t mtu) {
+ // Everything codec specific should be reset here since this means the codec
+ // has changed. If native dimension values have changed, then either user
+ // initiated change, or QM initiated change. Will be able to determine only
+ // after the processing of the first frame.
+ last_change_time_ = clock_->TimeInMilliseconds();
+ content_->Reset();
+ content_->UpdateFrameRate(frame_rate);
+
+ max_bit_rate_ = max_bit_rate;
+ send_codec_type_ = send_codec_type;
+ video_target_bitrate_ = target_bitrate;
+ float target_bitrate_kbps = static_cast<float>(target_bitrate) / 1000.0f;
+ loss_prot_logic_->UpdateBitRate(target_bitrate_kbps);
+ loss_prot_logic_->UpdateFrameRate(static_cast<float>(frame_rate));
+ loss_prot_logic_->UpdateFrameSize(width, height);
+ loss_prot_logic_->UpdateNumLayers(num_layers);
+ frame_dropper_->Reset();
+ frame_dropper_->SetRates(target_bitrate_kbps, static_cast<float>(frame_rate));
+ user_frame_rate_ = static_cast<float>(frame_rate);
+ codec_width_ = width;
+ codec_height_ = height;
+ num_layers_ = (num_layers <= 1) ? 1 : num_layers; // Can also be zero.
+ max_payload_size_ = mtu;
+ qm_resolution_->Initialize(target_bitrate_kbps, user_frame_rate_,
+ codec_width_, codec_height_, num_layers_);
+}
+
+uint32_t MediaOptimization::SetTargetRates(
+ uint32_t target_bitrate,
+ uint8_t fraction_lost,
+ int64_t round_trip_time_ms,
+ VCMProtectionCallback* protection_callback,
+ VCMQMSettingsCallback* qmsettings_callback) {
+ CriticalSectionScoped lock(crit_sect_.get());
+ VCMProtectionMethod* selected_method = loss_prot_logic_->SelectedMethod();
+ float target_bitrate_kbps = static_cast<float>(target_bitrate) / 1000.0f;
+ loss_prot_logic_->UpdateBitRate(target_bitrate_kbps);
+ loss_prot_logic_->UpdateRtt(round_trip_time_ms);
+
+ // Get frame rate for encoder: this is the actual/sent frame rate.
+ float actual_frame_rate = SentFrameRateInternal();
+
+ // Sanity check.
+ if (actual_frame_rate < 1.0) {
+ actual_frame_rate = 1.0;
+ }
+
+ // Update frame rate for the loss protection logic class: frame rate should
+ // be the actual/sent rate.
+ loss_prot_logic_->UpdateFrameRate(actual_frame_rate);
+
+ fraction_lost_ = fraction_lost;
+
+ // Returns the filtered packet loss, used for the protection setting.
+ // The filtered loss may be the received loss (no filter), or some
+ // filtered value (average or max window filter).
+ // Use max window filter for now.
+ FilterPacketLossMode filter_mode = kMaxFilter;
+ uint8_t packet_loss_enc = loss_prot_logic_->FilteredLoss(
+ clock_->TimeInMilliseconds(), filter_mode, fraction_lost);
+
+ // For now use the filtered loss for computing the robustness settings.
+ loss_prot_logic_->UpdateFilteredLossPr(packet_loss_enc);
+
+ // Rate cost of the protection methods.
+ float protection_overhead_rate = 0.0f;
+
+ // Update protection settings, when applicable.
+ float sent_video_rate_kbps = 0.0f;
+ if (loss_prot_logic_->SelectedType() != kNone) {
+ // Update protection method with content metrics.
+ selected_method->UpdateContentMetrics(content_->ShortTermAvgData());
+
+ // Update method will compute the robustness settings for the given
+ // protection method and the overhead cost
+ // the protection method is set by the user via SetVideoProtection.
+ loss_prot_logic_->UpdateMethod();
+
+ // Update protection callback with protection settings.
+ uint32_t sent_video_rate_bps = 0;
+ uint32_t sent_nack_rate_bps = 0;
+ uint32_t sent_fec_rate_bps = 0;
+ // Get the bit cost of protection method, based on the amount of
+ // overhead data actually transmitted (including headers) the last
+ // second.
+ if (protection_callback) {
+ UpdateProtectionCallback(selected_method, &sent_video_rate_bps,
+ &sent_nack_rate_bps, &sent_fec_rate_bps,
+ protection_callback);
+ }
+ uint32_t sent_total_rate_bps =
+ sent_video_rate_bps + sent_nack_rate_bps + sent_fec_rate_bps;
+ // Estimate the overhead costs of the next second as staying the same
+ // wrt the source bitrate.
+ if (sent_total_rate_bps > 0) {
+ protection_overhead_rate =
+ static_cast<float>(sent_nack_rate_bps + sent_fec_rate_bps) /
+ sent_total_rate_bps;
+ }
+ // Cap the overhead estimate to 50%.
+ if (protection_overhead_rate > 0.5)
+ protection_overhead_rate = 0.5;
+
+ // Get the effective packet loss for encoder ER when applicable. Should be
+ // passed to encoder via fraction_lost.
+ packet_loss_enc = selected_method->RequiredPacketLossER();
+ sent_video_rate_kbps = static_cast<float>(sent_video_rate_bps) / 1000.0f;
+ }
+
+ // Source coding rate: total rate - protection overhead.
+ video_target_bitrate_ = target_bitrate * (1.0 - protection_overhead_rate);
+
+ // Cap target video bitrate to codec maximum.
+ if (max_bit_rate_ > 0 && video_target_bitrate_ > max_bit_rate_) {
+ video_target_bitrate_ = max_bit_rate_;
+ }
+
+ // Update encoding rates following protection settings.
+ float target_video_bitrate_kbps =
+ static_cast<float>(video_target_bitrate_) / 1000.0f;
+ frame_dropper_->SetRates(target_video_bitrate_kbps, incoming_frame_rate_);
+
+ if (enable_qm_ && qmsettings_callback) {
+ // Update QM with rates.
+ qm_resolution_->UpdateRates(target_video_bitrate_kbps, sent_video_rate_kbps,
+ incoming_frame_rate_, fraction_lost_);
+ // Check for QM selection.
+ bool select_qm = CheckStatusForQMchange();
+ if (select_qm) {
+ SelectQuality(qmsettings_callback);
+ }
+ // Reset the short-term averaged content data.
+ content_->ResetShortTermAvgData();
+ }
+
+ CheckSuspendConditions();
+
+ return video_target_bitrate_;
+}
+
+void MediaOptimization::SetProtectionMethod(VCMProtectionMethodEnum method) {
+ CriticalSectionScoped lock(crit_sect_.get());
+ loss_prot_logic_->SetMethod(method);
+}
+
+uint32_t MediaOptimization::InputFrameRate() {
+ CriticalSectionScoped lock(crit_sect_.get());
+ return InputFrameRateInternal();
+}
+
+uint32_t MediaOptimization::InputFrameRateInternal() {
+ ProcessIncomingFrameRate(clock_->TimeInMilliseconds());
+ return uint32_t(incoming_frame_rate_ + 0.5f);
+}
+
+uint32_t MediaOptimization::SentFrameRate() {
+ CriticalSectionScoped lock(crit_sect_.get());
+ return SentFrameRateInternal();
+}
+
+uint32_t MediaOptimization::SentFrameRateInternal() {
+ PurgeOldFrameSamples(clock_->TimeInMilliseconds());
+ UpdateSentFramerate();
+ return avg_sent_framerate_;
+}
+
+uint32_t MediaOptimization::SentBitRate() {
+ CriticalSectionScoped lock(crit_sect_.get());
+ const int64_t now_ms = clock_->TimeInMilliseconds();
+ PurgeOldFrameSamples(now_ms);
+ UpdateSentBitrate(now_ms);
+ return avg_sent_bit_rate_bps_;
+}
+
+int32_t MediaOptimization::UpdateWithEncodedData(
+ const EncodedImage& encoded_image) {
+ size_t encoded_length = encoded_image._length;
+ uint32_t timestamp = encoded_image._timeStamp;
+ CriticalSectionScoped lock(crit_sect_.get());
+ const int64_t now_ms = clock_->TimeInMilliseconds();
+ PurgeOldFrameSamples(now_ms);
+ if (encoded_frame_samples_.size() > 0 &&
+ encoded_frame_samples_.back().timestamp == timestamp) {
+ // Frames having the same timestamp are generated from the same input
+ // frame. We don't want to double count them, but only increment the
+ // size_bytes.
+ encoded_frame_samples_.back().size_bytes += encoded_length;
+ encoded_frame_samples_.back().time_complete_ms = now_ms;
+ } else {
+ encoded_frame_samples_.push_back(
+ EncodedFrameSample(encoded_length, timestamp, now_ms));
+ }
+ UpdateSentBitrate(now_ms);
+ UpdateSentFramerate();
+ if (encoded_length > 0) {
+ const bool delta_frame = encoded_image._frameType != kVideoFrameKey;
+
+ frame_dropper_->Fill(encoded_length, delta_frame);
+ if (max_payload_size_ > 0 && encoded_length > 0) {
+ const float min_packets_per_frame =
+ encoded_length / static_cast<float>(max_payload_size_);
+ if (delta_frame) {
+ loss_prot_logic_->UpdatePacketsPerFrame(min_packets_per_frame,
+ clock_->TimeInMilliseconds());
+ } else {
+ loss_prot_logic_->UpdatePacketsPerFrameKey(
+ min_packets_per_frame, clock_->TimeInMilliseconds());
+ }
+
+ if (enable_qm_) {
+ // Update quality select with encoded length.
+ qm_resolution_->UpdateEncodedSize(encoded_length);
+ }
+ }
+ if (!delta_frame && encoded_length > 0) {
+ loss_prot_logic_->UpdateKeyFrameSize(static_cast<float>(encoded_length));
+ }
+
+ // Updating counters.
+ if (delta_frame) {
+ delta_frame_cnt_++;
+ } else {
+ key_frame_cnt_++;
+ }
+ }
+
+ return VCM_OK;
+}
+
+void MediaOptimization::EnableQM(bool enable) {
+ CriticalSectionScoped lock(crit_sect_.get());
+ enable_qm_ = enable;
+}
+
+void MediaOptimization::EnableFrameDropper(bool enable) {
+ CriticalSectionScoped lock(crit_sect_.get());
+ frame_dropper_->Enable(enable);
+}
+
+void MediaOptimization::SuspendBelowMinBitrate(int threshold_bps,
+ int window_bps) {
+ CriticalSectionScoped lock(crit_sect_.get());
+ assert(threshold_bps > 0 && window_bps >= 0);
+ suspension_threshold_bps_ = threshold_bps;
+ suspension_window_bps_ = window_bps;
+ suspension_enabled_ = true;
+ video_suspended_ = false;
+}
+
+bool MediaOptimization::IsVideoSuspended() const {
+ CriticalSectionScoped lock(crit_sect_.get());
+ return video_suspended_;
+}
+
+bool MediaOptimization::DropFrame() {
+ CriticalSectionScoped lock(crit_sect_.get());
+ UpdateIncomingFrameRate();
+ // Leak appropriate number of bytes.
+ frame_dropper_->Leak((uint32_t)(InputFrameRateInternal() + 0.5f));
+ if (video_suspended_) {
+ return true; // Drop all frames when muted.
+ }
+ return frame_dropper_->DropFrame();
+}
+
+void MediaOptimization::UpdateContentData(
+ const VideoContentMetrics* content_metrics) {
+ CriticalSectionScoped lock(crit_sect_.get());
+ // Updating content metrics.
+ if (content_metrics == NULL) {
+ // Disable QM if metrics are NULL.
+ enable_qm_ = false;
+ qm_resolution_->Reset();
+ } else {
+ content_->UpdateContentData(content_metrics);
+ }
+}
+
+void MediaOptimization::UpdateIncomingFrameRate() {
+ int64_t now = clock_->TimeInMilliseconds();
+ if (incoming_frame_times_[0] == 0) {
+ // No shifting if this is the first time.
+ } else {
+ // Shift all times one step.
+ for (int32_t i = (kFrameCountHistorySize - 2); i >= 0; i--) {
+ incoming_frame_times_[i + 1] = incoming_frame_times_[i];
+ }
+ }
+ incoming_frame_times_[0] = now;
+ ProcessIncomingFrameRate(now);
+}
+
+int32_t MediaOptimization::SelectQuality(
+ VCMQMSettingsCallback* video_qmsettings_callback) {
+ // Reset quantities for QM select.
+ qm_resolution_->ResetQM();
+
+ // Update QM will long-term averaged content metrics.
+ qm_resolution_->UpdateContent(content_->LongTermAvgData());
+
+ // Select quality mode.
+ VCMResolutionScale* qm = NULL;
+ int32_t ret = qm_resolution_->SelectResolution(&qm);
+ if (ret < 0) {
+ return ret;
+ }
+
+ // Check for updates to spatial/temporal modes.
+ QMUpdate(qm, video_qmsettings_callback);
+
+ // Reset all the rate and related frame counters quantities.
+ qm_resolution_->ResetRates();
+
+ // Reset counters.
+ last_qm_update_time_ = clock_->TimeInMilliseconds();
+
+ // Reset content metrics.
+ content_->Reset();
+
+ return VCM_OK;
+}
+
+void MediaOptimization::PurgeOldFrameSamples(int64_t now_ms) {
+ while (!encoded_frame_samples_.empty()) {
+ if (now_ms - encoded_frame_samples_.front().time_complete_ms >
+ kBitrateAverageWinMs) {
+ encoded_frame_samples_.pop_front();
+ } else {
+ break;
+ }
+ }
+}
+
+void MediaOptimization::UpdateSentBitrate(int64_t now_ms) {
+ if (encoded_frame_samples_.empty()) {
+ avg_sent_bit_rate_bps_ = 0;
+ return;
+ }
+ size_t framesize_sum = 0;
+ for (FrameSampleList::iterator it = encoded_frame_samples_.begin();
+ it != encoded_frame_samples_.end(); ++it) {
+ framesize_sum += it->size_bytes;
+ }
+ float denom = static_cast<float>(
+ now_ms - encoded_frame_samples_.front().time_complete_ms);
+ if (denom >= 1.0f) {
+ avg_sent_bit_rate_bps_ =
+ static_cast<uint32_t>(framesize_sum * 8.0f * 1000.0f / denom + 0.5f);
+ } else {
+ avg_sent_bit_rate_bps_ = framesize_sum * 8;
+ }
+}
+
+void MediaOptimization::UpdateSentFramerate() {
+ if (encoded_frame_samples_.size() <= 1) {
+ avg_sent_framerate_ = encoded_frame_samples_.size();
+ return;
+ }
+ int denom = encoded_frame_samples_.back().timestamp -
+ encoded_frame_samples_.front().timestamp;
+ if (denom > 0) {
+ avg_sent_framerate_ =
+ (90000 * (encoded_frame_samples_.size() - 1) + denom / 2) / denom;
+ } else {
+ avg_sent_framerate_ = encoded_frame_samples_.size();
+ }
+}
+
+bool MediaOptimization::QMUpdate(
+ VCMResolutionScale* qm,
+ VCMQMSettingsCallback* video_qmsettings_callback) {
+ // Check for no change.
+ if (!qm->change_resolution_spatial && !qm->change_resolution_temporal) {
+ return false;
+ }
+
+ // Check for change in frame rate.
+ if (qm->change_resolution_temporal) {
+ incoming_frame_rate_ = qm->frame_rate;
+ // Reset frame rate estimate.
+ memset(incoming_frame_times_, -1, sizeof(incoming_frame_times_));
+ }
+
+ // Check for change in frame size.
+ if (qm->change_resolution_spatial) {
+ codec_width_ = qm->codec_width;
+ codec_height_ = qm->codec_height;
+ }
+
+ LOG(LS_INFO) << "Media optimizer requests the video resolution to be changed "
+ "to "
+ << qm->codec_width << "x" << qm->codec_height << "@"
+ << qm->frame_rate;
+
+ // Update VPM with new target frame rate and frame size.
+ // Note: use |qm->frame_rate| instead of |_incoming_frame_rate| for updating
+ // target frame rate in VPM frame dropper. The quantity |_incoming_frame_rate|
+ // will vary/fluctuate, and since we don't want to change the state of the
+ // VPM frame dropper, unless a temporal action was selected, we use the
+ // quantity |qm->frame_rate| for updating.
+ video_qmsettings_callback->SetVideoQMSettings(qm->frame_rate, codec_width_,
+ codec_height_);
+ content_->UpdateFrameRate(qm->frame_rate);
+ qm_resolution_->UpdateCodecParameters(qm->frame_rate, codec_width_,
+ codec_height_);
+ return true;
+}
+
+// Check timing constraints and look for significant change in:
+// (1) scene content,
+// (2) target bit rate.
+bool MediaOptimization::CheckStatusForQMchange() {
+ bool status = true;
+
+ // Check that we do not call QMSelect too often, and that we waited some time
+ // (to sample the metrics) from the event last_change_time
+ // last_change_time is the time where user changed the size/rate/frame rate
+ // (via SetEncodingData).
+ int64_t now = clock_->TimeInMilliseconds();
+ if ((now - last_qm_update_time_) < kQmMinIntervalMs ||
+ (now - last_change_time_) < kQmMinIntervalMs) {
+ status = false;
+ }
+
+ return status;
+}
+
+// Allowing VCM to keep track of incoming frame rate.
+void MediaOptimization::ProcessIncomingFrameRate(int64_t now) {
+ int32_t num = 0;
+ int32_t nr_of_frames = 0;
+ for (num = 1; num < (kFrameCountHistorySize - 1); ++num) {
+ if (incoming_frame_times_[num] <= 0 ||
+ // don't use data older than 2 s
+ now - incoming_frame_times_[num] > kFrameHistoryWinMs) {
+ break;
+ } else {
+ nr_of_frames++;
+ }
+ }
+ if (num > 1) {
+ const int64_t diff =
+ incoming_frame_times_[0] - incoming_frame_times_[num - 1];
+ incoming_frame_rate_ = 0.0; // No frame rate estimate available.
+ if (diff > 0) {
+ incoming_frame_rate_ = nr_of_frames * 1000.0f / static_cast<float>(diff);
+ }
+ }
+}
+
+void MediaOptimization::CheckSuspendConditions() {
+ // Check conditions for SuspendBelowMinBitrate. |video_target_bitrate_| is in
+ // bps.
+ if (suspension_enabled_) {
+ if (!video_suspended_) {
+ // Check if we just went below the threshold.
+ if (video_target_bitrate_ < suspension_threshold_bps_) {
+ video_suspended_ = true;
+ }
+ } else {
+ // Video is already suspended. Check if we just went over the threshold
+ // with a margin.
+ if (video_target_bitrate_ >
+ suspension_threshold_bps_ + suspension_window_bps_) {
+ video_suspended_ = false;
+ }
+ }
+ }
+}
+
+} // namespace media_optimization
+} // namespace webrtc
diff --git a/webrtc/modules/video_coding/media_optimization.h b/webrtc/modules/video_coding/media_optimization.h
new file mode 100644
index 0000000000..54389bf5b5
--- /dev/null
+++ b/webrtc/modules/video_coding/media_optimization.h
@@ -0,0 +1,174 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_CODING_MEDIA_OPTIMIZATION_H_
+#define WEBRTC_MODULES_VIDEO_CODING_MEDIA_OPTIMIZATION_H_
+
+#include <list>
+
+#include "webrtc/base/scoped_ptr.h"
+#include "webrtc/modules/include/module_common_types.h"
+#include "webrtc/modules/video_coding/include/video_coding.h"
+#include "webrtc/modules/video_coding/media_opt_util.h"
+#include "webrtc/modules/video_coding/qm_select.h"
+#include "webrtc/system_wrappers/include/critical_section_wrapper.h"
+
+namespace webrtc {
+
+// Forward declarations.
+class Clock;
+class FrameDropper;
+class VCMContentMetricsProcessing;
+
+namespace media_optimization {
+
+class MediaOptimization {
+ public:
+ explicit MediaOptimization(Clock* clock);
+ ~MediaOptimization();
+
+ // TODO(andresp): Can Reset and SetEncodingData be done at construction time
+ // only?
+ void Reset();
+
+ // Informs media optimization of initial encoding state.
+ void SetEncodingData(VideoCodecType send_codec_type,
+ int32_t max_bit_rate,
+ uint32_t bit_rate,
+ uint16_t width,
+ uint16_t height,
+ uint32_t frame_rate,
+ int num_temporal_layers,
+ int32_t mtu);
+
+ // Sets target rates for the encoder given the channel parameters.
+ // Inputs: target bitrate - the encoder target bitrate in bits/s.
+ // fraction_lost - packet loss rate in % in the network.
+ // round_trip_time_ms - round trip time in milliseconds.
+ // min_bit_rate - the bit rate of the end-point with lowest rate.
+ // max_bit_rate - the bit rate of the end-point with highest rate.
+ // TODO(andresp): Find if the callbacks can be triggered only after releasing
+ // an internal critical section.
+ uint32_t SetTargetRates(uint32_t target_bitrate,
+ uint8_t fraction_lost,
+ int64_t round_trip_time_ms,
+ VCMProtectionCallback* protection_callback,
+ VCMQMSettingsCallback* qmsettings_callback);
+
+ void SetProtectionMethod(VCMProtectionMethodEnum method);
+ void EnableQM(bool enable);
+ void EnableFrameDropper(bool enable);
+
+ // Lets the sender suspend video when the rate drops below
+ // |threshold_bps|, and turns back on when the rate goes back up above
+ // |threshold_bps| + |window_bps|.
+ void SuspendBelowMinBitrate(int threshold_bps, int window_bps);
+ bool IsVideoSuspended() const;
+
+ bool DropFrame();
+
+ void UpdateContentData(const VideoContentMetrics* content_metrics);
+
+ // Informs Media Optimization of encoded output.
+ int32_t UpdateWithEncodedData(const EncodedImage& encoded_image);
+
+ // InputFrameRate 0 = no frame rate estimate available.
+ uint32_t InputFrameRate();
+ uint32_t SentFrameRate();
+ uint32_t SentBitRate();
+
+ private:
+ enum { kFrameCountHistorySize = 90 };
+ enum { kFrameHistoryWinMs = 2000 };
+ enum { kBitrateAverageWinMs = 1000 };
+
+ struct EncodedFrameSample;
+ typedef std::list<EncodedFrameSample> FrameSampleList;
+
+ void UpdateIncomingFrameRate() EXCLUSIVE_LOCKS_REQUIRED(crit_sect_);
+ void PurgeOldFrameSamples(int64_t now_ms)
+ EXCLUSIVE_LOCKS_REQUIRED(crit_sect_);
+ void UpdateSentBitrate(int64_t now_ms) EXCLUSIVE_LOCKS_REQUIRED(crit_sect_);
+ void UpdateSentFramerate() EXCLUSIVE_LOCKS_REQUIRED(crit_sect_);
+
+ // Computes new Quality Mode.
+ int32_t SelectQuality(VCMQMSettingsCallback* qmsettings_callback)
+ EXCLUSIVE_LOCKS_REQUIRED(crit_sect_);
+
+ // Verifies if QM settings differ from default, i.e. if an update is required.
+ // Computes actual values, as will be sent to the encoder.
+ bool QMUpdate(VCMResolutionScale* qm,
+ VCMQMSettingsCallback* qmsettings_callback)
+ EXCLUSIVE_LOCKS_REQUIRED(crit_sect_);
+
+ // Checks if we should make a QM change. Return true if yes, false otherwise.
+ bool CheckStatusForQMchange() EXCLUSIVE_LOCKS_REQUIRED(crit_sect_);
+
+ void ProcessIncomingFrameRate(int64_t now)
+ EXCLUSIVE_LOCKS_REQUIRED(crit_sect_);
+
+ // Checks conditions for suspending the video. The method compares
+ // |video_target_bitrate_| with the threshold values for suspension, and
+ // changes the state of |video_suspended_| accordingly.
+ void CheckSuspendConditions() EXCLUSIVE_LOCKS_REQUIRED(crit_sect_);
+
+ void SetEncodingDataInternal(VideoCodecType send_codec_type,
+ int32_t max_bit_rate,
+ uint32_t frame_rate,
+ uint32_t bit_rate,
+ uint16_t width,
+ uint16_t height,
+ int num_temporal_layers,
+ int32_t mtu)
+ EXCLUSIVE_LOCKS_REQUIRED(crit_sect_);
+
+ uint32_t InputFrameRateInternal() EXCLUSIVE_LOCKS_REQUIRED(crit_sect_);
+
+ uint32_t SentFrameRateInternal() EXCLUSIVE_LOCKS_REQUIRED(crit_sect_);
+
+ // Protect all members.
+ rtc::scoped_ptr<CriticalSectionWrapper> crit_sect_;
+
+ Clock* clock_ GUARDED_BY(crit_sect_);
+ int32_t max_bit_rate_ GUARDED_BY(crit_sect_);
+ VideoCodecType send_codec_type_ GUARDED_BY(crit_sect_);
+ uint16_t codec_width_ GUARDED_BY(crit_sect_);
+ uint16_t codec_height_ GUARDED_BY(crit_sect_);
+ float user_frame_rate_ GUARDED_BY(crit_sect_);
+ rtc::scoped_ptr<FrameDropper> frame_dropper_ GUARDED_BY(crit_sect_);
+ rtc::scoped_ptr<VCMLossProtectionLogic> loss_prot_logic_
+ GUARDED_BY(crit_sect_);
+ uint8_t fraction_lost_ GUARDED_BY(crit_sect_);
+ uint32_t send_statistics_[4] GUARDED_BY(crit_sect_);
+ uint32_t send_statistics_zero_encode_ GUARDED_BY(crit_sect_);
+ int32_t max_payload_size_ GUARDED_BY(crit_sect_);
+ int video_target_bitrate_ GUARDED_BY(crit_sect_);
+ float incoming_frame_rate_ GUARDED_BY(crit_sect_);
+ int64_t incoming_frame_times_[kFrameCountHistorySize] GUARDED_BY(crit_sect_);
+ bool enable_qm_ GUARDED_BY(crit_sect_);
+ std::list<EncodedFrameSample> encoded_frame_samples_ GUARDED_BY(crit_sect_);
+ uint32_t avg_sent_bit_rate_bps_ GUARDED_BY(crit_sect_);
+ uint32_t avg_sent_framerate_ GUARDED_BY(crit_sect_);
+ uint32_t key_frame_cnt_ GUARDED_BY(crit_sect_);
+ uint32_t delta_frame_cnt_ GUARDED_BY(crit_sect_);
+ rtc::scoped_ptr<VCMContentMetricsProcessing> content_ GUARDED_BY(crit_sect_);
+ rtc::scoped_ptr<VCMQmResolution> qm_resolution_ GUARDED_BY(crit_sect_);
+ int64_t last_qm_update_time_ GUARDED_BY(crit_sect_);
+ int64_t last_change_time_ GUARDED_BY(crit_sect_); // Content/user triggered.
+ int num_layers_ GUARDED_BY(crit_sect_);
+ bool suspension_enabled_ GUARDED_BY(crit_sect_);
+ bool video_suspended_ GUARDED_BY(crit_sect_);
+ int suspension_threshold_bps_ GUARDED_BY(crit_sect_);
+ int suspension_window_bps_ GUARDED_BY(crit_sect_);
+};
+} // namespace media_optimization
+} // namespace webrtc
+
+#endif // WEBRTC_MODULES_VIDEO_CODING_MEDIA_OPTIMIZATION_H_
diff --git a/webrtc/modules/video_coding/media_optimization_unittest.cc b/webrtc/modules/video_coding/media_optimization_unittest.cc
new file mode 100644
index 0000000000..3f8ac5d075
--- /dev/null
+++ b/webrtc/modules/video_coding/media_optimization_unittest.cc
@@ -0,0 +1,154 @@
+/*
+ * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "testing/gtest/include/gtest/gtest.h"
+#include "webrtc/modules/video_coding/media_optimization.h"
+#include "webrtc/system_wrappers/include/clock.h"
+
+namespace webrtc {
+namespace media_optimization {
+
+class TestMediaOptimization : public ::testing::Test {
+ protected:
+ enum {
+ kSampleRate = 90000 // RTP timestamps per second.
+ };
+
+ // Note: simulated clock starts at 1 seconds, since parts of webrtc use 0 as
+ // a special case (e.g. frame rate in media optimization).
+ TestMediaOptimization()
+ : clock_(1000),
+ media_opt_(&clock_),
+ frame_time_ms_(33),
+ next_timestamp_(0) {}
+
+ // This method mimics what happens in VideoSender::AddVideoFrame.
+ void AddFrameAndAdvanceTime(uint32_t bitrate_bps, bool expect_frame_drop) {
+ bool frame_dropped = media_opt_.DropFrame();
+ EXPECT_EQ(expect_frame_drop, frame_dropped);
+ if (!frame_dropped) {
+ size_t bytes_per_frame = bitrate_bps * frame_time_ms_ / (8 * 1000);
+ EncodedImage encoded_image;
+ encoded_image._length = bytes_per_frame;
+ encoded_image._timeStamp = next_timestamp_;
+ encoded_image._frameType = kVideoFrameKey;
+ ASSERT_EQ(VCM_OK, media_opt_.UpdateWithEncodedData(encoded_image));
+ }
+ next_timestamp_ += frame_time_ms_ * kSampleRate / 1000;
+ clock_.AdvanceTimeMilliseconds(frame_time_ms_);
+ }
+
+ SimulatedClock clock_;
+ MediaOptimization media_opt_;
+ int frame_time_ms_;
+ uint32_t next_timestamp_;
+};
+
+TEST_F(TestMediaOptimization, VerifyMuting) {
+ // Enable video suspension with these limits.
+ // Suspend the video when the rate is below 50 kbps and resume when it gets
+ // above 50 + 10 kbps again.
+ const uint32_t kThresholdBps = 50000;
+ const uint32_t kWindowBps = 10000;
+ media_opt_.SuspendBelowMinBitrate(kThresholdBps, kWindowBps);
+
+ // The video should not be suspended from the start.
+ EXPECT_FALSE(media_opt_.IsVideoSuspended());
+
+ uint32_t target_bitrate_kbps = 100;
+ media_opt_.SetTargetRates(target_bitrate_kbps * 1000,
+ 0, // Lossrate.
+ 100, // RTT in ms.
+ nullptr, nullptr);
+ media_opt_.EnableFrameDropper(true);
+ for (int time = 0; time < 2000; time += frame_time_ms_) {
+ ASSERT_NO_FATAL_FAILURE(AddFrameAndAdvanceTime(target_bitrate_kbps, false));
+ }
+
+ // Set the target rate below the limit for muting.
+ media_opt_.SetTargetRates(kThresholdBps - 1000,
+ 0, // Lossrate.
+ 100, // RTT in ms.
+ nullptr, nullptr);
+ // Expect the muter to engage immediately and stay muted.
+ // Test during 2 seconds.
+ for (int time = 0; time < 2000; time += frame_time_ms_) {
+ EXPECT_TRUE(media_opt_.IsVideoSuspended());
+ ASSERT_NO_FATAL_FAILURE(AddFrameAndAdvanceTime(target_bitrate_kbps, true));
+ }
+
+ // Set the target above the limit for muting, but not above the
+ // limit + window.
+ media_opt_.SetTargetRates(kThresholdBps + 1000,
+ 0, // Lossrate.
+ 100, // RTT in ms.
+ nullptr, nullptr);
+ // Expect the muter to stay muted.
+ // Test during 2 seconds.
+ for (int time = 0; time < 2000; time += frame_time_ms_) {
+ EXPECT_TRUE(media_opt_.IsVideoSuspended());
+ ASSERT_NO_FATAL_FAILURE(AddFrameAndAdvanceTime(target_bitrate_kbps, true));
+ }
+
+ // Set the target above limit + window.
+ media_opt_.SetTargetRates(kThresholdBps + kWindowBps + 1000,
+ 0, // Lossrate.
+ 100, // RTT in ms.
+ nullptr, nullptr);
+ // Expect the muter to disengage immediately.
+ // Test during 2 seconds.
+ for (int time = 0; time < 2000; time += frame_time_ms_) {
+ EXPECT_FALSE(media_opt_.IsVideoSuspended());
+ ASSERT_NO_FATAL_FAILURE(
+ AddFrameAndAdvanceTime((kThresholdBps + kWindowBps) / 1000, false));
+ }
+}
+
+TEST_F(TestMediaOptimization, ProtectsUsingFecBitrateAboveCodecMax) {
+ static const int kCodecBitrateBps = 100000;
+ static const int kMaxBitrateBps = 130000;
+
+ class ProtectionCallback : public VCMProtectionCallback {
+ int ProtectionRequest(const FecProtectionParams* delta_params,
+ const FecProtectionParams* key_params,
+ uint32_t* sent_video_rate_bps,
+ uint32_t* sent_nack_rate_bps,
+ uint32_t* sent_fec_rate_bps) override {
+ *sent_video_rate_bps = kCodecBitrateBps;
+ *sent_nack_rate_bps = 0;
+ *sent_fec_rate_bps = fec_rate_bps_;
+ return 0;
+ }
+
+ public:
+ uint32_t fec_rate_bps_;
+ } protection_callback;
+
+ media_opt_.SetProtectionMethod(kFec);
+ media_opt_.SetEncodingData(kVideoCodecVP8, kCodecBitrateBps, kCodecBitrateBps,
+ 640, 480, 30, 1, 1000);
+
+ // Using 10% of codec bitrate for FEC, should still be able to use all of it.
+ protection_callback.fec_rate_bps_ = kCodecBitrateBps / 10;
+ uint32_t target_bitrate = media_opt_.SetTargetRates(
+ kMaxBitrateBps, 0, 0, &protection_callback, nullptr);
+
+ EXPECT_EQ(kCodecBitrateBps, static_cast<int>(target_bitrate));
+
+ // Using as much for codec bitrate as fec rate, new target rate should share
+ // both equally, but only be half of max (since that ceiling should be hit).
+ protection_callback.fec_rate_bps_ = kCodecBitrateBps;
+ target_bitrate = media_opt_.SetTargetRates(kMaxBitrateBps, 128, 100,
+ &protection_callback, nullptr);
+ EXPECT_EQ(kMaxBitrateBps / 2, static_cast<int>(target_bitrate));
+}
+
+} // namespace media_optimization
+} // namespace webrtc
diff --git a/webrtc/modules/video_coding/nack_fec_tables.h b/webrtc/modules/video_coding/nack_fec_tables.h
new file mode 100644
index 0000000000..f9f5ad97ac
--- /dev/null
+++ b/webrtc/modules/video_coding/nack_fec_tables.h
@@ -0,0 +1,31 @@
+/*
+ * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_CODING_NACK_FEC_TABLES_H_
+#define WEBRTC_MODULES_VIDEO_CODING_NACK_FEC_TABLES_H_
+
+namespace webrtc {
+
+// Table for adjusting FEC rate for NACK/FEC protection method
+// Table values are built as a sigmoid function, ranging from 0 to 100, based on
+// the HybridNackTH values defined in media_opt_util.h.
+const uint16_t VCMNackFecTable[100] = {
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1,
+ 1, 2, 2, 2, 3, 3, 4, 5, 6, 7, 9, 10, 12, 15, 18,
+ 21, 24, 28, 32, 37, 41, 46, 51, 56, 61, 66, 70, 74, 78, 81,
+ 84, 86, 89, 90, 92, 93, 95, 95, 96, 97, 97, 98, 98, 99, 99,
+ 99, 99, 99, 99, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100,
+ 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100,
+ 100, 100, 100, 100, 100, 100, 100, 100, 100, 100,
+};
+
+} // namespace webrtc
+
+#endif // WEBRTC_MODULES_VIDEO_CODING_NACK_FEC_TABLES_H_
diff --git a/webrtc/modules/video_coding/packet.cc b/webrtc/modules/video_coding/packet.cc
new file mode 100644
index 0000000000..e25de2ed6c
--- /dev/null
+++ b/webrtc/modules/video_coding/packet.cc
@@ -0,0 +1,153 @@
+/*
+ * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/modules/video_coding/packet.h"
+
+#include <assert.h>
+
+#include "webrtc/modules/include/module_common_types.h"
+
+namespace webrtc {
+
+VCMPacket::VCMPacket()
+ : payloadType(0),
+ timestamp(0),
+ ntp_time_ms_(0),
+ seqNum(0),
+ dataPtr(NULL),
+ sizeBytes(0),
+ markerBit(false),
+ frameType(kEmptyFrame),
+ codec(kVideoCodecUnknown),
+ isFirstPacket(false),
+ completeNALU(kNaluUnset),
+ insertStartCode(false),
+ width(0),
+ height(0),
+ codecSpecificHeader() {}
+
+VCMPacket::VCMPacket(const uint8_t* ptr,
+ const size_t size,
+ const WebRtcRTPHeader& rtpHeader)
+ : payloadType(rtpHeader.header.payloadType),
+ timestamp(rtpHeader.header.timestamp),
+ ntp_time_ms_(rtpHeader.ntp_time_ms),
+ seqNum(rtpHeader.header.sequenceNumber),
+ dataPtr(ptr),
+ sizeBytes(size),
+ markerBit(rtpHeader.header.markerBit),
+
+ frameType(rtpHeader.frameType),
+ codec(kVideoCodecUnknown),
+ isFirstPacket(rtpHeader.type.Video.isFirstPacket),
+ completeNALU(kNaluComplete),
+ insertStartCode(false),
+ width(rtpHeader.type.Video.width),
+ height(rtpHeader.type.Video.height),
+ codecSpecificHeader(rtpHeader.type.Video) {
+ CopyCodecSpecifics(rtpHeader.type.Video);
+}
+
+VCMPacket::VCMPacket(const uint8_t* ptr,
+ size_t size,
+ uint16_t seq,
+ uint32_t ts,
+ bool mBit)
+ : payloadType(0),
+ timestamp(ts),
+ ntp_time_ms_(0),
+ seqNum(seq),
+ dataPtr(ptr),
+ sizeBytes(size),
+ markerBit(mBit),
+
+ frameType(kVideoFrameDelta),
+ codec(kVideoCodecUnknown),
+ isFirstPacket(false),
+ completeNALU(kNaluComplete),
+ insertStartCode(false),
+ width(0),
+ height(0),
+ codecSpecificHeader() {}
+
+void VCMPacket::Reset() {
+ payloadType = 0;
+ timestamp = 0;
+ ntp_time_ms_ = 0;
+ seqNum = 0;
+ dataPtr = NULL;
+ sizeBytes = 0;
+ markerBit = false;
+ frameType = kEmptyFrame;
+ codec = kVideoCodecUnknown;
+ isFirstPacket = false;
+ completeNALU = kNaluUnset;
+ insertStartCode = false;
+ width = 0;
+ height = 0;
+ memset(&codecSpecificHeader, 0, sizeof(RTPVideoHeader));
+}
+
+void VCMPacket::CopyCodecSpecifics(const RTPVideoHeader& videoHeader) {
+ if (markerBit) {
+ codecSpecificHeader.rotation = videoHeader.rotation;
+ }
+ switch (videoHeader.codec) {
+ case kRtpVideoVp8:
+ // Handle all packets within a frame as depending on the previous packet
+ // TODO(holmer): This should be changed to make fragments independent
+ // when the VP8 RTP receiver supports fragments.
+ if (isFirstPacket && markerBit)
+ completeNALU = kNaluComplete;
+ else if (isFirstPacket)
+ completeNALU = kNaluStart;
+ else if (markerBit)
+ completeNALU = kNaluEnd;
+ else
+ completeNALU = kNaluIncomplete;
+
+ codec = kVideoCodecVP8;
+ return;
+ case kRtpVideoVp9:
+ if (isFirstPacket && markerBit)
+ completeNALU = kNaluComplete;
+ else if (isFirstPacket)
+ completeNALU = kNaluStart;
+ else if (markerBit)
+ completeNALU = kNaluEnd;
+ else
+ completeNALU = kNaluIncomplete;
+
+ codec = kVideoCodecVP9;
+ return;
+ case kRtpVideoH264:
+ isFirstPacket = videoHeader.isFirstPacket;
+ if (isFirstPacket)
+ insertStartCode = true;
+
+ if (isFirstPacket && markerBit) {
+ completeNALU = kNaluComplete;
+ } else if (isFirstPacket) {
+ completeNALU = kNaluStart;
+ } else if (markerBit) {
+ completeNALU = kNaluEnd;
+ } else {
+ completeNALU = kNaluIncomplete;
+ }
+ codec = kVideoCodecH264;
+ return;
+ case kRtpVideoGeneric:
+ case kRtpVideoNone:
+ codec = kVideoCodecUnknown;
+ return;
+ }
+}
+
+} // namespace webrtc
diff --git a/webrtc/modules/video_coding/packet.h b/webrtc/modules/video_coding/packet.h
new file mode 100644
index 0000000000..b77c1df039
--- /dev/null
+++ b/webrtc/modules/video_coding/packet.h
@@ -0,0 +1,59 @@
+/*
+ * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_CODING_PACKET_H_
+#define WEBRTC_MODULES_VIDEO_CODING_PACKET_H_
+
+#include "webrtc/modules/include/module_common_types.h"
+#include "webrtc/modules/video_coding/jitter_buffer_common.h"
+#include "webrtc/typedefs.h"
+
+namespace webrtc {
+
+class VCMPacket {
+ public:
+ VCMPacket();
+ VCMPacket(const uint8_t* ptr,
+ const size_t size,
+ const WebRtcRTPHeader& rtpHeader);
+ VCMPacket(const uint8_t* ptr,
+ size_t size,
+ uint16_t seqNum,
+ uint32_t timestamp,
+ bool markerBit);
+
+ void Reset();
+
+ uint8_t payloadType;
+ uint32_t timestamp;
+ // NTP time of the capture time in local timebase in milliseconds.
+ int64_t ntp_time_ms_;
+ uint16_t seqNum;
+ const uint8_t* dataPtr;
+ size_t sizeBytes;
+ bool markerBit;
+
+ FrameType frameType;
+ VideoCodecType codec;
+
+ bool isFirstPacket; // Is this first packet in a frame.
+ VCMNaluCompleteness completeNALU; // Default is kNaluIncomplete.
+ bool insertStartCode; // True if a start code should be inserted before this
+ // packet.
+ int width;
+ int height;
+ RTPVideoHeader codecSpecificHeader;
+
+ protected:
+ void CopyCodecSpecifics(const RTPVideoHeader& videoHeader);
+};
+
+} // namespace webrtc
+#endif // WEBRTC_MODULES_VIDEO_CODING_PACKET_H_
diff --git a/webrtc/modules/video_coding/qm_select.cc b/webrtc/modules/video_coding/qm_select.cc
new file mode 100644
index 0000000000..9da42bb33c
--- /dev/null
+++ b/webrtc/modules/video_coding/qm_select.cc
@@ -0,0 +1,953 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/modules/video_coding/qm_select.h"
+
+#include <math.h>
+
+#include "webrtc/modules/include/module_common_types.h"
+#include "webrtc/modules/video_coding/include/video_coding_defines.h"
+#include "webrtc/modules/video_coding/internal_defines.h"
+#include "webrtc/modules/video_coding/qm_select_data.h"
+#include "webrtc/system_wrappers/include/trace.h"
+
+namespace webrtc {
+
+// QM-METHOD class
+
+VCMQmMethod::VCMQmMethod()
+ : content_metrics_(NULL),
+ width_(0),
+ height_(0),
+ user_frame_rate_(0.0f),
+ native_width_(0),
+ native_height_(0),
+ native_frame_rate_(0.0f),
+ image_type_(kVGA),
+ framerate_level_(kFrameRateHigh),
+ init_(false) {
+ ResetQM();
+}
+
+VCMQmMethod::~VCMQmMethod() {}
+
+void VCMQmMethod::ResetQM() {
+ aspect_ratio_ = 1.0f;
+ motion_.Reset();
+ spatial_.Reset();
+ content_class_ = 0;
+}
+
+uint8_t VCMQmMethod::ComputeContentClass() {
+ ComputeMotionNFD();
+ ComputeSpatial();
+ return content_class_ = 3 * motion_.level + spatial_.level;
+}
+
+void VCMQmMethod::UpdateContent(const VideoContentMetrics* contentMetrics) {
+ content_metrics_ = contentMetrics;
+}
+
+void VCMQmMethod::ComputeMotionNFD() {
+ if (content_metrics_) {
+ motion_.value = content_metrics_->motion_magnitude;
+ }
+ // Determine motion level.
+ if (motion_.value < kLowMotionNfd) {
+ motion_.level = kLow;
+ } else if (motion_.value > kHighMotionNfd) {
+ motion_.level = kHigh;
+ } else {
+ motion_.level = kDefault;
+ }
+}
+
+void VCMQmMethod::ComputeSpatial() {
+ float spatial_err = 0.0;
+ float spatial_err_h = 0.0;
+ float spatial_err_v = 0.0;
+ if (content_metrics_) {
+ spatial_err = content_metrics_->spatial_pred_err;
+ spatial_err_h = content_metrics_->spatial_pred_err_h;
+ spatial_err_v = content_metrics_->spatial_pred_err_v;
+ }
+ // Spatial measure: take average of 3 prediction errors.
+ spatial_.value = (spatial_err + spatial_err_h + spatial_err_v) / 3.0f;
+
+ // Reduce thresholds for large scenes/higher pixel correlation.
+ float scale2 = image_type_ > kVGA ? kScaleTexture : 1.0;
+
+ if (spatial_.value > scale2 * kHighTexture) {
+ spatial_.level = kHigh;
+ } else if (spatial_.value < scale2 * kLowTexture) {
+ spatial_.level = kLow;
+ } else {
+ spatial_.level = kDefault;
+ }
+}
+
+ImageType VCMQmMethod::GetImageType(uint16_t width, uint16_t height) {
+ // Get the image type for the encoder frame size.
+ uint32_t image_size = width * height;
+ if (image_size == kSizeOfImageType[kQCIF]) {
+ return kQCIF;
+ } else if (image_size == kSizeOfImageType[kHCIF]) {
+ return kHCIF;
+ } else if (image_size == kSizeOfImageType[kQVGA]) {
+ return kQVGA;
+ } else if (image_size == kSizeOfImageType[kCIF]) {
+ return kCIF;
+ } else if (image_size == kSizeOfImageType[kHVGA]) {
+ return kHVGA;
+ } else if (image_size == kSizeOfImageType[kVGA]) {
+ return kVGA;
+ } else if (image_size == kSizeOfImageType[kQFULLHD]) {
+ return kQFULLHD;
+ } else if (image_size == kSizeOfImageType[kWHD]) {
+ return kWHD;
+ } else if (image_size == kSizeOfImageType[kFULLHD]) {
+ return kFULLHD;
+ } else {
+ // No exact match, find closet one.
+ return FindClosestImageType(width, height);
+ }
+}
+
+ImageType VCMQmMethod::FindClosestImageType(uint16_t width, uint16_t height) {
+ float size = static_cast<float>(width * height);
+ float min = size;
+ int isel = 0;
+ for (int i = 0; i < kNumImageTypes; ++i) {
+ float dist = fabs(size - kSizeOfImageType[i]);
+ if (dist < min) {
+ min = dist;
+ isel = i;
+ }
+ }
+ return static_cast<ImageType>(isel);
+}
+
+FrameRateLevelClass VCMQmMethod::FrameRateLevel(float avg_framerate) {
+ if (avg_framerate <= kLowFrameRate) {
+ return kFrameRateLow;
+ } else if (avg_framerate <= kMiddleFrameRate) {
+ return kFrameRateMiddle1;
+ } else if (avg_framerate <= kHighFrameRate) {
+ return kFrameRateMiddle2;
+ } else {
+ return kFrameRateHigh;
+ }
+}
+
+// RESOLUTION CLASS
+
+VCMQmResolution::VCMQmResolution() : qm_(new VCMResolutionScale()) {
+ Reset();
+}
+
+VCMQmResolution::~VCMQmResolution() {
+ delete qm_;
+}
+
+void VCMQmResolution::ResetRates() {
+ sum_target_rate_ = 0.0f;
+ sum_incoming_framerate_ = 0.0f;
+ sum_rate_MM_ = 0.0f;
+ sum_rate_MM_sgn_ = 0.0f;
+ sum_packet_loss_ = 0.0f;
+ buffer_level_ = kInitBufferLevel * target_bitrate_;
+ frame_cnt_ = 0;
+ frame_cnt_delta_ = 0;
+ low_buffer_cnt_ = 0;
+ update_rate_cnt_ = 0;
+}
+
+void VCMQmResolution::ResetDownSamplingState() {
+ state_dec_factor_spatial_ = 1.0;
+ state_dec_factor_temporal_ = 1.0;
+ for (int i = 0; i < kDownActionHistorySize; i++) {
+ down_action_history_[i].spatial = kNoChangeSpatial;
+ down_action_history_[i].temporal = kNoChangeTemporal;
+ }
+}
+
+void VCMQmResolution::Reset() {
+ target_bitrate_ = 0.0f;
+ incoming_framerate_ = 0.0f;
+ buffer_level_ = 0.0f;
+ per_frame_bandwidth_ = 0.0f;
+ avg_target_rate_ = 0.0f;
+ avg_incoming_framerate_ = 0.0f;
+ avg_ratio_buffer_low_ = 0.0f;
+ avg_rate_mismatch_ = 0.0f;
+ avg_rate_mismatch_sgn_ = 0.0f;
+ avg_packet_loss_ = 0.0f;
+ encoder_state_ = kStableEncoding;
+ num_layers_ = 1;
+ ResetRates();
+ ResetDownSamplingState();
+ ResetQM();
+}
+
+EncoderState VCMQmResolution::GetEncoderState() {
+ return encoder_state_;
+}
+
+// Initialize state after re-initializing the encoder,
+// i.e., after SetEncodingData() in mediaOpt.
+int VCMQmResolution::Initialize(float bitrate,
+ float user_framerate,
+ uint16_t width,
+ uint16_t height,
+ int num_layers) {
+ if (user_framerate == 0.0f || width == 0 || height == 0) {
+ return VCM_PARAMETER_ERROR;
+ }
+ Reset();
+ target_bitrate_ = bitrate;
+ incoming_framerate_ = user_framerate;
+ UpdateCodecParameters(user_framerate, width, height);
+ native_width_ = width;
+ native_height_ = height;
+ native_frame_rate_ = user_framerate;
+ num_layers_ = num_layers;
+ // Initial buffer level.
+ buffer_level_ = kInitBufferLevel * target_bitrate_;
+ // Per-frame bandwidth.
+ per_frame_bandwidth_ = target_bitrate_ / user_framerate;
+ init_ = true;
+ return VCM_OK;
+}
+
+void VCMQmResolution::UpdateCodecParameters(float frame_rate,
+ uint16_t width,
+ uint16_t height) {
+ width_ = width;
+ height_ = height;
+ // |user_frame_rate| is the target frame rate for VPM frame dropper.
+ user_frame_rate_ = frame_rate;
+ image_type_ = GetImageType(width, height);
+}
+
+// Update rate data after every encoded frame.
+void VCMQmResolution::UpdateEncodedSize(size_t encoded_size) {
+ frame_cnt_++;
+ // Convert to Kbps.
+ float encoded_size_kbits = 8.0f * static_cast<float>(encoded_size) / 1000.0f;
+
+ // Update the buffer level:
+ // Note this is not the actual encoder buffer level.
+ // |buffer_level_| is reset to an initial value after SelectResolution is
+ // called, and does not account for frame dropping by encoder or VCM.
+ buffer_level_ += per_frame_bandwidth_ - encoded_size_kbits;
+
+ // Counter for occurrences of low buffer level:
+ // low/negative values means encoder is likely dropping frames.
+ if (buffer_level_ <= kPercBufferThr * kInitBufferLevel * target_bitrate_) {
+ low_buffer_cnt_++;
+ }
+}
+
+// Update various quantities after SetTargetRates in MediaOpt.
+void VCMQmResolution::UpdateRates(float target_bitrate,
+ float encoder_sent_rate,
+ float incoming_framerate,
+ uint8_t packet_loss) {
+ // Sum the target bitrate: this is the encoder rate from previous update
+ // (~1sec), i.e, before the update for next ~1sec.
+ sum_target_rate_ += target_bitrate_;
+ update_rate_cnt_++;
+
+ // Sum the received (from RTCP reports) packet loss rates.
+ sum_packet_loss_ += static_cast<float>(packet_loss / 255.0);
+
+ // Sum the sequence rate mismatch:
+ // Mismatch here is based on the difference between the target rate
+ // used (in previous ~1sec) and the average actual encoding rate measured
+ // at previous ~1sec.
+ float diff = target_bitrate_ - encoder_sent_rate;
+ if (target_bitrate_ > 0.0)
+ sum_rate_MM_ += fabs(diff) / target_bitrate_;
+ int sgnDiff = diff > 0 ? 1 : (diff < 0 ? -1 : 0);
+ // To check for consistent under(+)/over_shooting(-) of target rate.
+ sum_rate_MM_sgn_ += sgnDiff;
+
+ // Update with the current new target and frame rate:
+ // these values are ones the encoder will use for the current/next ~1sec.
+ target_bitrate_ = target_bitrate;
+ incoming_framerate_ = incoming_framerate;
+ sum_incoming_framerate_ += incoming_framerate_;
+ // Update the per_frame_bandwidth:
+ // this is the per_frame_bw for the current/next ~1sec.
+ per_frame_bandwidth_ = 0.0f;
+ if (incoming_framerate_ > 0.0f) {
+ per_frame_bandwidth_ = target_bitrate_ / incoming_framerate_;
+ }
+}
+
+// Select the resolution factors: frame size and frame rate change (qm scales).
+// Selection is for going down in resolution, or for going back up
+// (if a previous down-sampling action was taken).
+
+// In the current version the following constraints are imposed:
+// 1) We only allow for one action, either down or up, at a given time.
+// 2) The possible down-sampling actions are: spatial by 1/2x1/2, 3/4x3/4;
+// temporal/frame rate reduction by 1/2 and 2/3.
+// 3) The action for going back up is the reverse of last (spatial or temporal)
+// down-sampling action. The list of down-sampling actions from the
+// Initialize() state are kept in |down_action_history_|.
+// 4) The total amount of down-sampling (spatial and/or temporal) from the
+// Initialize() state (native resolution) is limited by various factors.
+int VCMQmResolution::SelectResolution(VCMResolutionScale** qm) {
+ if (!init_) {
+ return VCM_UNINITIALIZED;
+ }
+ if (content_metrics_ == NULL) {
+ Reset();
+ *qm = qm_;
+ return VCM_OK;
+ }
+
+ // Check conditions on down-sampling state.
+ assert(state_dec_factor_spatial_ >= 1.0f);
+ assert(state_dec_factor_temporal_ >= 1.0f);
+ assert(state_dec_factor_spatial_ <= kMaxSpatialDown);
+ assert(state_dec_factor_temporal_ <= kMaxTempDown);
+ assert(state_dec_factor_temporal_ * state_dec_factor_spatial_ <=
+ kMaxTotalDown);
+
+ // Compute content class for selection.
+ content_class_ = ComputeContentClass();
+ // Compute various rate quantities for selection.
+ ComputeRatesForSelection();
+
+ // Get the encoder state.
+ ComputeEncoderState();
+
+ // Default settings: no action.
+ SetDefaultAction();
+ *qm = qm_;
+
+ // Check for going back up in resolution, if we have had some down-sampling
+ // relative to native state in Initialize().
+ if (down_action_history_[0].spatial != kNoChangeSpatial ||
+ down_action_history_[0].temporal != kNoChangeTemporal) {
+ if (GoingUpResolution()) {
+ *qm = qm_;
+ return VCM_OK;
+ }
+ }
+
+ // Check for going down in resolution.
+ if (GoingDownResolution()) {
+ *qm = qm_;
+ return VCM_OK;
+ }
+ return VCM_OK;
+}
+
+void VCMQmResolution::SetDefaultAction() {
+ qm_->codec_width = width_;
+ qm_->codec_height = height_;
+ qm_->frame_rate = user_frame_rate_;
+ qm_->change_resolution_spatial = false;
+ qm_->change_resolution_temporal = false;
+ qm_->spatial_width_fact = 1.0f;
+ qm_->spatial_height_fact = 1.0f;
+ qm_->temporal_fact = 1.0f;
+ action_.spatial = kNoChangeSpatial;
+ action_.temporal = kNoChangeTemporal;
+}
+
+void VCMQmResolution::ComputeRatesForSelection() {
+ avg_target_rate_ = 0.0f;
+ avg_incoming_framerate_ = 0.0f;
+ avg_ratio_buffer_low_ = 0.0f;
+ avg_rate_mismatch_ = 0.0f;
+ avg_rate_mismatch_sgn_ = 0.0f;
+ avg_packet_loss_ = 0.0f;
+ if (frame_cnt_ > 0) {
+ avg_ratio_buffer_low_ =
+ static_cast<float>(low_buffer_cnt_) / static_cast<float>(frame_cnt_);
+ }
+ if (update_rate_cnt_ > 0) {
+ avg_rate_mismatch_ =
+ static_cast<float>(sum_rate_MM_) / static_cast<float>(update_rate_cnt_);
+ avg_rate_mismatch_sgn_ = static_cast<float>(sum_rate_MM_sgn_) /
+ static_cast<float>(update_rate_cnt_);
+ avg_target_rate_ = static_cast<float>(sum_target_rate_) /
+ static_cast<float>(update_rate_cnt_);
+ avg_incoming_framerate_ = static_cast<float>(sum_incoming_framerate_) /
+ static_cast<float>(update_rate_cnt_);
+ avg_packet_loss_ = static_cast<float>(sum_packet_loss_) /
+ static_cast<float>(update_rate_cnt_);
+ }
+ // For selection we may want to weight some quantities more heavily
+ // with the current (i.e., next ~1sec) rate values.
+ avg_target_rate_ =
+ kWeightRate * avg_target_rate_ + (1.0 - kWeightRate) * target_bitrate_;
+ avg_incoming_framerate_ = kWeightRate * avg_incoming_framerate_ +
+ (1.0 - kWeightRate) * incoming_framerate_;
+ // Use base layer frame rate for temporal layers: this will favor spatial.
+ assert(num_layers_ > 0);
+ framerate_level_ = FrameRateLevel(avg_incoming_framerate_ /
+ static_cast<float>(1 << (num_layers_ - 1)));
+}
+
+void VCMQmResolution::ComputeEncoderState() {
+ // Default.
+ encoder_state_ = kStableEncoding;
+
+ // Assign stressed state if:
+ // 1) occurrences of low buffer levels is high, or
+ // 2) rate mis-match is high, and consistent over-shooting by encoder.
+ if ((avg_ratio_buffer_low_ > kMaxBufferLow) ||
+ ((avg_rate_mismatch_ > kMaxRateMisMatch) &&
+ (avg_rate_mismatch_sgn_ < -kRateOverShoot))) {
+ encoder_state_ = kStressedEncoding;
+ }
+ // Assign easy state if:
+ // 1) rate mis-match is high, and
+ // 2) consistent under-shooting by encoder.
+ if ((avg_rate_mismatch_ > kMaxRateMisMatch) &&
+ (avg_rate_mismatch_sgn_ > kRateUnderShoot)) {
+ encoder_state_ = kEasyEncoding;
+ }
+}
+
+bool VCMQmResolution::GoingUpResolution() {
+ // For going up, we check for undoing the previous down-sampling action.
+
+ float fac_width = kFactorWidthSpatial[down_action_history_[0].spatial];
+ float fac_height = kFactorHeightSpatial[down_action_history_[0].spatial];
+ float fac_temp = kFactorTemporal[down_action_history_[0].temporal];
+ // For going up spatially, we allow for going up by 3/4x3/4 at each stage.
+ // So if the last spatial action was 1/2x1/2 it would be undone in 2 stages.
+ // Modify the fac_width/height for this case.
+ if (down_action_history_[0].spatial == kOneQuarterSpatialUniform) {
+ fac_width = kFactorWidthSpatial[kOneQuarterSpatialUniform] /
+ kFactorWidthSpatial[kOneHalfSpatialUniform];
+ fac_height = kFactorHeightSpatial[kOneQuarterSpatialUniform] /
+ kFactorHeightSpatial[kOneHalfSpatialUniform];
+ }
+
+ // Check if we should go up both spatially and temporally.
+ if (down_action_history_[0].spatial != kNoChangeSpatial &&
+ down_action_history_[0].temporal != kNoChangeTemporal) {
+ if (ConditionForGoingUp(fac_width, fac_height, fac_temp,
+ kTransRateScaleUpSpatialTemp)) {
+ action_.spatial = down_action_history_[0].spatial;
+ action_.temporal = down_action_history_[0].temporal;
+ UpdateDownsamplingState(kUpResolution);
+ return true;
+ }
+ }
+ // Check if we should go up either spatially or temporally.
+ bool selected_up_spatial = false;
+ bool selected_up_temporal = false;
+ if (down_action_history_[0].spatial != kNoChangeSpatial) {
+ selected_up_spatial = ConditionForGoingUp(fac_width, fac_height, 1.0f,
+ kTransRateScaleUpSpatial);
+ }
+ if (down_action_history_[0].temporal != kNoChangeTemporal) {
+ selected_up_temporal =
+ ConditionForGoingUp(1.0f, 1.0f, fac_temp, kTransRateScaleUpTemp);
+ }
+ if (selected_up_spatial && !selected_up_temporal) {
+ action_.spatial = down_action_history_[0].spatial;
+ action_.temporal = kNoChangeTemporal;
+ UpdateDownsamplingState(kUpResolution);
+ return true;
+ } else if (!selected_up_spatial && selected_up_temporal) {
+ action_.spatial = kNoChangeSpatial;
+ action_.temporal = down_action_history_[0].temporal;
+ UpdateDownsamplingState(kUpResolution);
+ return true;
+ } else if (selected_up_spatial && selected_up_temporal) {
+ PickSpatialOrTemporal();
+ UpdateDownsamplingState(kUpResolution);
+ return true;
+ }
+ return false;
+}
+
+bool VCMQmResolution::ConditionForGoingUp(float fac_width,
+ float fac_height,
+ float fac_temp,
+ float scale_fac) {
+ float estimated_transition_rate_up =
+ GetTransitionRate(fac_width, fac_height, fac_temp, scale_fac);
+ // Go back up if:
+ // 1) target rate is above threshold and current encoder state is stable, or
+ // 2) encoder state is easy (encoder is significantly under-shooting target).
+ if (((avg_target_rate_ > estimated_transition_rate_up) &&
+ (encoder_state_ == kStableEncoding)) ||
+ (encoder_state_ == kEasyEncoding)) {
+ return true;
+ } else {
+ return false;
+ }
+}
+
+bool VCMQmResolution::GoingDownResolution() {
+ float estimated_transition_rate_down =
+ GetTransitionRate(1.0f, 1.0f, 1.0f, 1.0f);
+ float max_rate = kFrameRateFac[framerate_level_] * kMaxRateQm[image_type_];
+ // Resolution reduction if:
+ // (1) target rate is below transition rate, or
+ // (2) encoder is in stressed state and target rate below a max threshold.
+ if ((avg_target_rate_ < estimated_transition_rate_down) ||
+ (encoder_state_ == kStressedEncoding && avg_target_rate_ < max_rate)) {
+ // Get the down-sampling action: based on content class, and how low
+ // average target rate is relative to transition rate.
+ uint8_t spatial_fact =
+ kSpatialAction[content_class_ +
+ 9 * RateClass(estimated_transition_rate_down)];
+ uint8_t temp_fact =
+ kTemporalAction[content_class_ +
+ 9 * RateClass(estimated_transition_rate_down)];
+
+ switch (spatial_fact) {
+ case 4: {
+ action_.spatial = kOneQuarterSpatialUniform;
+ break;
+ }
+ case 2: {
+ action_.spatial = kOneHalfSpatialUniform;
+ break;
+ }
+ case 1: {
+ action_.spatial = kNoChangeSpatial;
+ break;
+ }
+ default: { assert(false); }
+ }
+ switch (temp_fact) {
+ case 3: {
+ action_.temporal = kTwoThirdsTemporal;
+ break;
+ }
+ case 2: {
+ action_.temporal = kOneHalfTemporal;
+ break;
+ }
+ case 1: {
+ action_.temporal = kNoChangeTemporal;
+ break;
+ }
+ default: { assert(false); }
+ }
+ // Only allow for one action (spatial or temporal) at a given time.
+ assert(action_.temporal == kNoChangeTemporal ||
+ action_.spatial == kNoChangeSpatial);
+
+ // Adjust cases not captured in tables, mainly based on frame rate, and
+ // also check for odd frame sizes.
+ AdjustAction();
+
+ // Update down-sampling state.
+ if (action_.spatial != kNoChangeSpatial ||
+ action_.temporal != kNoChangeTemporal) {
+ UpdateDownsamplingState(kDownResolution);
+ return true;
+ }
+ }
+ return false;
+}
+
+float VCMQmResolution::GetTransitionRate(float fac_width,
+ float fac_height,
+ float fac_temp,
+ float scale_fac) {
+ ImageType image_type =
+ GetImageType(static_cast<uint16_t>(fac_width * width_),
+ static_cast<uint16_t>(fac_height * height_));
+
+ FrameRateLevelClass framerate_level =
+ FrameRateLevel(fac_temp * avg_incoming_framerate_);
+ // If we are checking for going up temporally, and this is the last
+ // temporal action, then use native frame rate.
+ if (down_action_history_[1].temporal == kNoChangeTemporal &&
+ fac_temp > 1.0f) {
+ framerate_level = FrameRateLevel(native_frame_rate_);
+ }
+
+ // The maximum allowed rate below which down-sampling is allowed:
+ // Nominal values based on image format (frame size and frame rate).
+ float max_rate = kFrameRateFac[framerate_level] * kMaxRateQm[image_type];
+
+ uint8_t image_class = image_type > kVGA ? 1 : 0;
+ uint8_t table_index = image_class * 9 + content_class_;
+ // Scale factor for down-sampling transition threshold:
+ // factor based on the content class and the image size.
+ float scaleTransRate = kScaleTransRateQm[table_index];
+ // Threshold bitrate for resolution action.
+ return static_cast<float>(scale_fac * scaleTransRate * max_rate);
+}
+
+void VCMQmResolution::UpdateDownsamplingState(UpDownAction up_down) {
+ if (up_down == kUpResolution) {
+ qm_->spatial_width_fact = 1.0f / kFactorWidthSpatial[action_.spatial];
+ qm_->spatial_height_fact = 1.0f / kFactorHeightSpatial[action_.spatial];
+ // If last spatial action was 1/2x1/2, we undo it in two steps, so the
+ // spatial scale factor in this first step is modified as (4.0/3.0 / 2.0).
+ if (action_.spatial == kOneQuarterSpatialUniform) {
+ qm_->spatial_width_fact = 1.0f *
+ kFactorWidthSpatial[kOneHalfSpatialUniform] /
+ kFactorWidthSpatial[kOneQuarterSpatialUniform];
+ qm_->spatial_height_fact =
+ 1.0f * kFactorHeightSpatial[kOneHalfSpatialUniform] /
+ kFactorHeightSpatial[kOneQuarterSpatialUniform];
+ }
+ qm_->temporal_fact = 1.0f / kFactorTemporal[action_.temporal];
+ RemoveLastDownAction();
+ } else if (up_down == kDownResolution) {
+ ConstrainAmountOfDownSampling();
+ ConvertSpatialFractionalToWhole();
+ qm_->spatial_width_fact = kFactorWidthSpatial[action_.spatial];
+ qm_->spatial_height_fact = kFactorHeightSpatial[action_.spatial];
+ qm_->temporal_fact = kFactorTemporal[action_.temporal];
+ InsertLatestDownAction();
+ } else {
+ // This function should only be called if either the Up or Down action
+ // has been selected.
+ assert(false);
+ }
+ UpdateCodecResolution();
+ state_dec_factor_spatial_ = state_dec_factor_spatial_ *
+ qm_->spatial_width_fact *
+ qm_->spatial_height_fact;
+ state_dec_factor_temporal_ = state_dec_factor_temporal_ * qm_->temporal_fact;
+}
+
+void VCMQmResolution::UpdateCodecResolution() {
+ if (action_.spatial != kNoChangeSpatial) {
+ qm_->change_resolution_spatial = true;
+ qm_->codec_width =
+ static_cast<uint16_t>(width_ / qm_->spatial_width_fact + 0.5f);
+ qm_->codec_height =
+ static_cast<uint16_t>(height_ / qm_->spatial_height_fact + 0.5f);
+ // Size should not exceed native sizes.
+ assert(qm_->codec_width <= native_width_);
+ assert(qm_->codec_height <= native_height_);
+ // New sizes should be multiple of 2, otherwise spatial should not have
+ // been selected.
+ assert(qm_->codec_width % 2 == 0);
+ assert(qm_->codec_height % 2 == 0);
+ }
+ if (action_.temporal != kNoChangeTemporal) {
+ qm_->change_resolution_temporal = true;
+ // Update the frame rate based on the average incoming frame rate.
+ qm_->frame_rate = avg_incoming_framerate_ / qm_->temporal_fact + 0.5f;
+ if (down_action_history_[0].temporal == 0) {
+ // When we undo the last temporal-down action, make sure we go back up
+ // to the native frame rate. Since the incoming frame rate may
+ // fluctuate over time, |avg_incoming_framerate_| scaled back up may
+ // be smaller than |native_frame rate_|.
+ qm_->frame_rate = native_frame_rate_;
+ }
+ }
+}
+
+uint8_t VCMQmResolution::RateClass(float transition_rate) {
+ return avg_target_rate_ < (kFacLowRate * transition_rate)
+ ? 0
+ : (avg_target_rate_ >= transition_rate ? 2 : 1);
+}
+
+// TODO(marpan): Would be better to capture these frame rate adjustments by
+// extending the table data (qm_select_data.h).
+void VCMQmResolution::AdjustAction() {
+ // If the spatial level is default state (neither low or high), motion level
+ // is not high, and spatial action was selected, switch to 2/3 frame rate
+ // reduction if the average incoming frame rate is high.
+ if (spatial_.level == kDefault && motion_.level != kHigh &&
+ action_.spatial != kNoChangeSpatial &&
+ framerate_level_ == kFrameRateHigh) {
+ action_.spatial = kNoChangeSpatial;
+ action_.temporal = kTwoThirdsTemporal;
+ }
+ // If both motion and spatial level are low, and temporal down action was
+ // selected, switch to spatial 3/4x3/4 if the frame rate is not above the
+ // lower middle level (|kFrameRateMiddle1|).
+ if (motion_.level == kLow && spatial_.level == kLow &&
+ framerate_level_ <= kFrameRateMiddle1 &&
+ action_.temporal != kNoChangeTemporal) {
+ action_.spatial = kOneHalfSpatialUniform;
+ action_.temporal = kNoChangeTemporal;
+ }
+ // If spatial action is selected, and there has been too much spatial
+ // reduction already (i.e., 1/4), then switch to temporal action if the
+ // average frame rate is not low.
+ if (action_.spatial != kNoChangeSpatial &&
+ down_action_history_[0].spatial == kOneQuarterSpatialUniform &&
+ framerate_level_ != kFrameRateLow) {
+ action_.spatial = kNoChangeSpatial;
+ action_.temporal = kTwoThirdsTemporal;
+ }
+ // Never use temporal action if number of temporal layers is above 2.
+ if (num_layers_ > 2) {
+ if (action_.temporal != kNoChangeTemporal) {
+ action_.spatial = kOneHalfSpatialUniform;
+ }
+ action_.temporal = kNoChangeTemporal;
+ }
+ // If spatial action was selected, we need to make sure the frame sizes
+ // are multiples of two. Otherwise switch to 2/3 temporal.
+ if (action_.spatial != kNoChangeSpatial && !EvenFrameSize()) {
+ action_.spatial = kNoChangeSpatial;
+ // Only one action (spatial or temporal) is allowed at a given time, so need
+ // to check whether temporal action is currently selected.
+ action_.temporal = kTwoThirdsTemporal;
+ }
+}
+
+void VCMQmResolution::ConvertSpatialFractionalToWhole() {
+ // If 3/4 spatial is selected, check if there has been another 3/4,
+ // and if so, combine them into 1/2. 1/2 scaling is more efficient than 9/16.
+ // Note we define 3/4x3/4 spatial as kOneHalfSpatialUniform.
+ if (action_.spatial == kOneHalfSpatialUniform) {
+ bool found = false;
+ int isel = kDownActionHistorySize;
+ for (int i = 0; i < kDownActionHistorySize; ++i) {
+ if (down_action_history_[i].spatial == kOneHalfSpatialUniform) {
+ isel = i;
+ found = true;
+ break;
+ }
+ }
+ if (found) {
+ action_.spatial = kOneQuarterSpatialUniform;
+ state_dec_factor_spatial_ =
+ state_dec_factor_spatial_ /
+ (kFactorWidthSpatial[kOneHalfSpatialUniform] *
+ kFactorHeightSpatial[kOneHalfSpatialUniform]);
+ // Check if switching to 1/2x1/2 (=1/4) spatial is allowed.
+ ConstrainAmountOfDownSampling();
+ if (action_.spatial == kNoChangeSpatial) {
+ // Not allowed. Go back to 3/4x3/4 spatial.
+ action_.spatial = kOneHalfSpatialUniform;
+ state_dec_factor_spatial_ =
+ state_dec_factor_spatial_ *
+ kFactorWidthSpatial[kOneHalfSpatialUniform] *
+ kFactorHeightSpatial[kOneHalfSpatialUniform];
+ } else {
+ // Switching is allowed. Remove 3/4x3/4 from the history, and update
+ // the frame size.
+ for (int i = isel; i < kDownActionHistorySize - 1; ++i) {
+ down_action_history_[i].spatial = down_action_history_[i + 1].spatial;
+ }
+ width_ = width_ * kFactorWidthSpatial[kOneHalfSpatialUniform];
+ height_ = height_ * kFactorHeightSpatial[kOneHalfSpatialUniform];
+ }
+ }
+ }
+}
+
+// Returns false if the new frame sizes, under the current spatial action,
+// are not multiples of two.
+bool VCMQmResolution::EvenFrameSize() {
+ if (action_.spatial == kOneHalfSpatialUniform) {
+ if ((width_ * 3 / 4) % 2 != 0 || (height_ * 3 / 4) % 2 != 0) {
+ return false;
+ }
+ } else if (action_.spatial == kOneQuarterSpatialUniform) {
+ if ((width_ * 1 / 2) % 2 != 0 || (height_ * 1 / 2) % 2 != 0) {
+ return false;
+ }
+ }
+ return true;
+}
+
+void VCMQmResolution::InsertLatestDownAction() {
+ if (action_.spatial != kNoChangeSpatial) {
+ for (int i = kDownActionHistorySize - 1; i > 0; --i) {
+ down_action_history_[i].spatial = down_action_history_[i - 1].spatial;
+ }
+ down_action_history_[0].spatial = action_.spatial;
+ }
+ if (action_.temporal != kNoChangeTemporal) {
+ for (int i = kDownActionHistorySize - 1; i > 0; --i) {
+ down_action_history_[i].temporal = down_action_history_[i - 1].temporal;
+ }
+ down_action_history_[0].temporal = action_.temporal;
+ }
+}
+
+void VCMQmResolution::RemoveLastDownAction() {
+ if (action_.spatial != kNoChangeSpatial) {
+ // If the last spatial action was 1/2x1/2 we replace it with 3/4x3/4.
+ if (action_.spatial == kOneQuarterSpatialUniform) {
+ down_action_history_[0].spatial = kOneHalfSpatialUniform;
+ } else {
+ for (int i = 0; i < kDownActionHistorySize - 1; ++i) {
+ down_action_history_[i].spatial = down_action_history_[i + 1].spatial;
+ }
+ down_action_history_[kDownActionHistorySize - 1].spatial =
+ kNoChangeSpatial;
+ }
+ }
+ if (action_.temporal != kNoChangeTemporal) {
+ for (int i = 0; i < kDownActionHistorySize - 1; ++i) {
+ down_action_history_[i].temporal = down_action_history_[i + 1].temporal;
+ }
+ down_action_history_[kDownActionHistorySize - 1].temporal =
+ kNoChangeTemporal;
+ }
+}
+
+void VCMQmResolution::ConstrainAmountOfDownSampling() {
+ // Sanity checks on down-sampling selection:
+ // override the settings for too small image size and/or frame rate.
+ // Also check the limit on current down-sampling states.
+
+ float spatial_width_fact = kFactorWidthSpatial[action_.spatial];
+ float spatial_height_fact = kFactorHeightSpatial[action_.spatial];
+ float temporal_fact = kFactorTemporal[action_.temporal];
+ float new_dec_factor_spatial =
+ state_dec_factor_spatial_ * spatial_width_fact * spatial_height_fact;
+ float new_dec_factor_temp = state_dec_factor_temporal_ * temporal_fact;
+
+ // No spatial sampling if current frame size is too small, or if the
+ // amount of spatial down-sampling is above maximum spatial down-action.
+ if ((width_ * height_) <= kMinImageSize ||
+ new_dec_factor_spatial > kMaxSpatialDown) {
+ action_.spatial = kNoChangeSpatial;
+ new_dec_factor_spatial = state_dec_factor_spatial_;
+ }
+ // No frame rate reduction if average frame rate is below some point, or if
+ // the amount of temporal down-sampling is above maximum temporal down-action.
+ if (avg_incoming_framerate_ <= kMinFrameRate ||
+ new_dec_factor_temp > kMaxTempDown) {
+ action_.temporal = kNoChangeTemporal;
+ new_dec_factor_temp = state_dec_factor_temporal_;
+ }
+ // Check if the total (spatial-temporal) down-action is above maximum allowed,
+ // if so, disallow the current selected down-action.
+ if (new_dec_factor_spatial * new_dec_factor_temp > kMaxTotalDown) {
+ if (action_.spatial != kNoChangeSpatial) {
+ action_.spatial = kNoChangeSpatial;
+ } else if (action_.temporal != kNoChangeTemporal) {
+ action_.temporal = kNoChangeTemporal;
+ } else {
+ // We only allow for one action (spatial or temporal) at a given time, so
+ // either spatial or temporal action is selected when this function is
+ // called. If the selected action is disallowed from one of the above
+ // 2 prior conditions (on spatial & temporal max down-action), then this
+ // condition "total down-action > |kMaxTotalDown|" would not be entered.
+ assert(false);
+ }
+ }
+}
+
+void VCMQmResolution::PickSpatialOrTemporal() {
+ // Pick the one that has had the most down-sampling thus far.
+ if (state_dec_factor_spatial_ > state_dec_factor_temporal_) {
+ action_.spatial = down_action_history_[0].spatial;
+ action_.temporal = kNoChangeTemporal;
+ } else {
+ action_.spatial = kNoChangeSpatial;
+ action_.temporal = down_action_history_[0].temporal;
+ }
+}
+
+// TODO(marpan): Update when we allow for directional spatial down-sampling.
+void VCMQmResolution::SelectSpatialDirectionMode(float transition_rate) {
+ // Default is 4/3x4/3
+ // For bit rates well below transitional rate, we select 2x2.
+ if (avg_target_rate_ < transition_rate * kRateRedSpatial2X2) {
+ qm_->spatial_width_fact = 2.0f;
+ qm_->spatial_height_fact = 2.0f;
+ }
+ // Otherwise check prediction errors and aspect ratio.
+ float spatial_err = 0.0f;
+ float spatial_err_h = 0.0f;
+ float spatial_err_v = 0.0f;
+ if (content_metrics_) {
+ spatial_err = content_metrics_->spatial_pred_err;
+ spatial_err_h = content_metrics_->spatial_pred_err_h;
+ spatial_err_v = content_metrics_->spatial_pred_err_v;
+ }
+
+ // Favor 1x2 if aspect_ratio is 16:9.
+ if (aspect_ratio_ >= 16.0f / 9.0f) {
+ // Check if 1x2 has lowest prediction error.
+ if (spatial_err_h < spatial_err && spatial_err_h < spatial_err_v) {
+ qm_->spatial_width_fact = 2.0f;
+ qm_->spatial_height_fact = 1.0f;
+ }
+ }
+ // Check for 4/3x4/3 selection: favor 2x2 over 1x2 and 2x1.
+ if (spatial_err < spatial_err_h * (1.0f + kSpatialErr2x2VsHoriz) &&
+ spatial_err < spatial_err_v * (1.0f + kSpatialErr2X2VsVert)) {
+ qm_->spatial_width_fact = 4.0f / 3.0f;
+ qm_->spatial_height_fact = 4.0f / 3.0f;
+ }
+ // Check for 2x1 selection.
+ if (spatial_err_v < spatial_err_h * (1.0f - kSpatialErrVertVsHoriz) &&
+ spatial_err_v < spatial_err * (1.0f - kSpatialErr2X2VsVert)) {
+ qm_->spatial_width_fact = 1.0f;
+ qm_->spatial_height_fact = 2.0f;
+ }
+}
+
+// ROBUSTNESS CLASS
+
+VCMQmRobustness::VCMQmRobustness() {
+ Reset();
+}
+
+VCMQmRobustness::~VCMQmRobustness() {}
+
+void VCMQmRobustness::Reset() {
+ prev_total_rate_ = 0.0f;
+ prev_rtt_time_ = 0;
+ prev_packet_loss_ = 0;
+ prev_code_rate_delta_ = 0;
+ ResetQM();
+}
+
+// Adjust the FEC rate based on the content and the network state
+// (packet loss rate, total rate/bandwidth, round trip time).
+// Note that packetLoss here is the filtered loss value.
+float VCMQmRobustness::AdjustFecFactor(uint8_t code_rate_delta,
+ float total_rate,
+ float framerate,
+ int64_t rtt_time,
+ uint8_t packet_loss) {
+ // Default: no adjustment
+ float adjust_fec = 1.0f;
+ if (content_metrics_ == NULL) {
+ return adjust_fec;
+ }
+ // Compute class state of the content.
+ ComputeMotionNFD();
+ ComputeSpatial();
+
+ // TODO(marpan): Set FEC adjustment factor.
+
+ // Keep track of previous values of network state:
+ // adjustment may be also based on pattern of changes in network state.
+ prev_total_rate_ = total_rate;
+ prev_rtt_time_ = rtt_time;
+ prev_packet_loss_ = packet_loss;
+ prev_code_rate_delta_ = code_rate_delta;
+ return adjust_fec;
+}
+
+// Set the UEP (unequal-protection across packets) on/off for the FEC.
+bool VCMQmRobustness::SetUepProtection(uint8_t code_rate_delta,
+ float total_rate,
+ uint8_t packet_loss,
+ bool frame_type) {
+ // Default.
+ return false;
+}
+} // namespace webrtc
diff --git a/webrtc/modules/video_coding/qm_select.h b/webrtc/modules/video_coding/qm_select.h
new file mode 100644
index 0000000000..764b5ed8e3
--- /dev/null
+++ b/webrtc/modules/video_coding/qm_select.h
@@ -0,0 +1,356 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_CODING_QM_SELECT_H_
+#define WEBRTC_MODULES_VIDEO_CODING_QM_SELECT_H_
+
+#include "webrtc/common_types.h"
+#include "webrtc/typedefs.h"
+
+/******************************************************/
+/* Quality Modes: Resolution and Robustness settings */
+/******************************************************/
+
+namespace webrtc {
+struct VideoContentMetrics;
+
+struct VCMResolutionScale {
+ VCMResolutionScale()
+ : codec_width(640),
+ codec_height(480),
+ frame_rate(30.0f),
+ spatial_width_fact(1.0f),
+ spatial_height_fact(1.0f),
+ temporal_fact(1.0f),
+ change_resolution_spatial(false),
+ change_resolution_temporal(false) {}
+ uint16_t codec_width;
+ uint16_t codec_height;
+ float frame_rate;
+ float spatial_width_fact;
+ float spatial_height_fact;
+ float temporal_fact;
+ bool change_resolution_spatial;
+ bool change_resolution_temporal;
+};
+
+enum ImageType {
+ kQCIF = 0, // 176x144
+ kHCIF, // 264x216 = half(~3/4x3/4) CIF.
+ kQVGA, // 320x240 = quarter VGA.
+ kCIF, // 352x288
+ kHVGA, // 480x360 = half(~3/4x3/4) VGA.
+ kVGA, // 640x480
+ kQFULLHD, // 960x540 = quarter FULLHD, and half(~3/4x3/4) WHD.
+ kWHD, // 1280x720
+ kFULLHD, // 1920x1080
+ kNumImageTypes
+};
+
+const uint32_t kSizeOfImageType[kNumImageTypes] = {
+ 25344, 57024, 76800, 101376, 172800, 307200, 518400, 921600, 2073600};
+
+enum FrameRateLevelClass {
+ kFrameRateLow,
+ kFrameRateMiddle1,
+ kFrameRateMiddle2,
+ kFrameRateHigh
+};
+
+enum ContentLevelClass { kLow, kHigh, kDefault };
+
+struct VCMContFeature {
+ VCMContFeature() : value(0.0f), level(kDefault) {}
+ void Reset() {
+ value = 0.0f;
+ level = kDefault;
+ }
+ float value;
+ ContentLevelClass level;
+};
+
+enum UpDownAction { kUpResolution, kDownResolution };
+
+enum SpatialAction {
+ kNoChangeSpatial,
+ kOneHalfSpatialUniform, // 3/4 x 3/4: 9/6 ~1/2 pixel reduction.
+ kOneQuarterSpatialUniform, // 1/2 x 1/2: 1/4 pixel reduction.
+ kNumModesSpatial
+};
+
+enum TemporalAction {
+ kNoChangeTemporal,
+ kTwoThirdsTemporal, // 2/3 frame rate reduction
+ kOneHalfTemporal, // 1/2 frame rate reduction
+ kNumModesTemporal
+};
+
+struct ResolutionAction {
+ ResolutionAction() : spatial(kNoChangeSpatial), temporal(kNoChangeTemporal) {}
+ SpatialAction spatial;
+ TemporalAction temporal;
+};
+
+// Down-sampling factors for spatial (width and height), and temporal.
+const float kFactorWidthSpatial[kNumModesSpatial] = {1.0f, 4.0f / 3.0f, 2.0f};
+
+const float kFactorHeightSpatial[kNumModesSpatial] = {1.0f, 4.0f / 3.0f, 2.0f};
+
+const float kFactorTemporal[kNumModesTemporal] = {1.0f, 1.5f, 2.0f};
+
+enum EncoderState {
+ kStableEncoding, // Low rate mis-match, stable buffer levels.
+ kStressedEncoding, // Significant over-shooting of target rate,
+ // Buffer under-flow, etc.
+ kEasyEncoding // Significant under-shooting of target rate.
+};
+
+// QmMethod class: main class for resolution and robustness settings
+
+class VCMQmMethod {
+ public:
+ VCMQmMethod();
+ virtual ~VCMQmMethod();
+
+ // Reset values
+ void ResetQM();
+ virtual void Reset() = 0;
+
+ // Compute content class.
+ uint8_t ComputeContentClass();
+
+ // Update with the content metrics.
+ void UpdateContent(const VideoContentMetrics* content_metrics);
+
+ // Compute spatial texture magnitude and level.
+ // Spatial texture is a spatial prediction error measure.
+ void ComputeSpatial();
+
+ // Compute motion magnitude and level for NFD metric.
+ // NFD is normalized frame difference (normalized by spatial variance).
+ void ComputeMotionNFD();
+
+ // Get the imageType (CIF, VGA, HD, etc) for the system width/height.
+ ImageType GetImageType(uint16_t width, uint16_t height);
+
+ // Return the closest image type.
+ ImageType FindClosestImageType(uint16_t width, uint16_t height);
+
+ // Get the frame rate level.
+ FrameRateLevelClass FrameRateLevel(float frame_rate);
+
+ protected:
+ // Content Data.
+ const VideoContentMetrics* content_metrics_;
+
+ // Encoder frame sizes and native frame sizes.
+ uint16_t width_;
+ uint16_t height_;
+ float user_frame_rate_;
+ uint16_t native_width_;
+ uint16_t native_height_;
+ float native_frame_rate_;
+ float aspect_ratio_;
+ // Image type and frame rate leve, for the current encoder resolution.
+ ImageType image_type_;
+ FrameRateLevelClass framerate_level_;
+ // Content class data.
+ VCMContFeature motion_;
+ VCMContFeature spatial_;
+ uint8_t content_class_;
+ bool init_;
+};
+
+// Resolution settings class
+
+class VCMQmResolution : public VCMQmMethod {
+ public:
+ VCMQmResolution();
+ virtual ~VCMQmResolution();
+
+ // Reset all quantities.
+ virtual void Reset();
+
+ // Reset rate quantities and counters after every SelectResolution() call.
+ void ResetRates();
+
+ // Reset down-sampling state.
+ void ResetDownSamplingState();
+
+ // Get the encoder state.
+ EncoderState GetEncoderState();
+
+ // Initialize after SetEncodingData in media_opt.
+ int Initialize(float bitrate,
+ float user_framerate,
+ uint16_t width,
+ uint16_t height,
+ int num_layers);
+
+ // Update the encoder frame size.
+ void UpdateCodecParameters(float frame_rate, uint16_t width, uint16_t height);
+
+ // Update with actual bit rate (size of the latest encoded frame)
+ // and frame type, after every encoded frame.
+ void UpdateEncodedSize(size_t encoded_size);
+
+ // Update with new target bitrate, actual encoder sent rate, frame_rate,
+ // loss rate: every ~1 sec from SetTargetRates in media_opt.
+ void UpdateRates(float target_bitrate,
+ float encoder_sent_rate,
+ float incoming_framerate,
+ uint8_t packet_loss);
+
+ // Extract ST (spatio-temporal) resolution action.
+ // Inputs: qm: Reference to the quality modes pointer.
+ // Output: the spatial and/or temporal scale change.
+ int SelectResolution(VCMResolutionScale** qm);
+
+ private:
+ // Set the default resolution action.
+ void SetDefaultAction();
+
+ // Compute rates for the selection of down-sampling action.
+ void ComputeRatesForSelection();
+
+ // Compute the encoder state.
+ void ComputeEncoderState();
+
+ // Return true if the action is to go back up in resolution.
+ bool GoingUpResolution();
+
+ // Return true if the action is to go down in resolution.
+ bool GoingDownResolution();
+
+ // Check the condition for going up in resolution by the scale factors:
+ // |facWidth|, |facHeight|, |facTemp|.
+ // |scaleFac| is a scale factor for the transition rate.
+ bool ConditionForGoingUp(float fac_width,
+ float fac_height,
+ float fac_temp,
+ float scale_fac);
+
+ // Get the bitrate threshold for the resolution action.
+ // The case |facWidth|=|facHeight|=|facTemp|==1 is for down-sampling action.
+ // |scaleFac| is a scale factor for the transition rate.
+ float GetTransitionRate(float fac_width,
+ float fac_height,
+ float fac_temp,
+ float scale_fac);
+
+ // Update the down-sampling state.
+ void UpdateDownsamplingState(UpDownAction up_down);
+
+ // Update the codec frame size and frame rate.
+ void UpdateCodecResolution();
+
+ // Return a state based on average target rate relative transition rate.
+ uint8_t RateClass(float transition_rate);
+
+ // Adjust the action selected from the table.
+ void AdjustAction();
+
+ // Covert 2 stages of 3/4 (=9/16) spatial decimation to 1/2.
+ void ConvertSpatialFractionalToWhole();
+
+ // Returns true if the new frame sizes, under the selected spatial action,
+ // are of even size.
+ bool EvenFrameSize();
+
+ // Insert latest down-sampling action into the history list.
+ void InsertLatestDownAction();
+
+ // Remove the last (first element) down-sampling action from the list.
+ void RemoveLastDownAction();
+
+ // Check constraints on the amount of down-sampling allowed.
+ void ConstrainAmountOfDownSampling();
+
+ // For going up in resolution: pick spatial or temporal action,
+ // if both actions were separately selected.
+ void PickSpatialOrTemporal();
+
+ // Select the directional (1x2 or 2x1) spatial down-sampling action.
+ void SelectSpatialDirectionMode(float transition_rate);
+
+ enum { kDownActionHistorySize = 10 };
+
+ VCMResolutionScale* qm_;
+ // Encoder rate control parameters.
+ float target_bitrate_;
+ float incoming_framerate_;
+ float per_frame_bandwidth_;
+ float buffer_level_;
+
+ // Data accumulated every ~1sec from MediaOpt.
+ float sum_target_rate_;
+ float sum_incoming_framerate_;
+ float sum_rate_MM_;
+ float sum_rate_MM_sgn_;
+ float sum_packet_loss_;
+ // Counters.
+ uint32_t frame_cnt_;
+ uint32_t frame_cnt_delta_;
+ uint32_t update_rate_cnt_;
+ uint32_t low_buffer_cnt_;
+
+ // Resolution state parameters.
+ float state_dec_factor_spatial_;
+ float state_dec_factor_temporal_;
+
+ // Quantities used for selection.
+ float avg_target_rate_;
+ float avg_incoming_framerate_;
+ float avg_ratio_buffer_low_;
+ float avg_rate_mismatch_;
+ float avg_rate_mismatch_sgn_;
+ float avg_packet_loss_;
+ EncoderState encoder_state_;
+ ResolutionAction action_;
+ // Short history of the down-sampling actions from the Initialize() state.
+ // This is needed for going up in resolution. Since the total amount of
+ // down-sampling actions are constrained, the length of the list need not be
+ // large: i.e., (4/3) ^{kDownActionHistorySize} <= kMaxDownSample.
+ ResolutionAction down_action_history_[kDownActionHistorySize];
+ int num_layers_;
+};
+
+// Robustness settings class.
+
+class VCMQmRobustness : public VCMQmMethod {
+ public:
+ VCMQmRobustness();
+ ~VCMQmRobustness();
+
+ virtual void Reset();
+
+ // Adjust FEC rate based on content: every ~1 sec from SetTargetRates.
+ // Returns an adjustment factor.
+ float AdjustFecFactor(uint8_t code_rate_delta,
+ float total_rate,
+ float framerate,
+ int64_t rtt_time,
+ uint8_t packet_loss);
+
+ // Set the UEP protection on/off.
+ bool SetUepProtection(uint8_t code_rate_delta,
+ float total_rate,
+ uint8_t packet_loss,
+ bool frame_type);
+
+ private:
+ // Previous state of network parameters.
+ float prev_total_rate_;
+ int64_t prev_rtt_time_;
+ uint8_t prev_packet_loss_;
+ uint8_t prev_code_rate_delta_;
+};
+} // namespace webrtc
+#endif // WEBRTC_MODULES_VIDEO_CODING_QM_SELECT_H_
diff --git a/webrtc/modules/video_coding/qm_select_data.h b/webrtc/modules/video_coding/qm_select_data.h
new file mode 100644
index 0000000000..49190ef53b
--- /dev/null
+++ b/webrtc/modules/video_coding/qm_select_data.h
@@ -0,0 +1,227 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_CODING_QM_SELECT_DATA_H_
+#define WEBRTC_MODULES_VIDEO_CODING_QM_SELECT_DATA_H_
+
+/***************************************************************
+*QMSelectData.h
+* This file includes parameters for content-aware media optimization
+****************************************************************/
+
+#include "webrtc/typedefs.h"
+
+namespace webrtc {
+//
+// PARAMETERS FOR RESOLUTION ADAPTATION
+//
+
+// Initial level of buffer in secs.
+const float kInitBufferLevel = 0.5f;
+
+// Threshold of (max) buffer size below which we consider too low (underflow).
+const float kPercBufferThr = 0.10f;
+
+// Threshold on the occurrences of low buffer levels.
+const float kMaxBufferLow = 0.30f;
+
+// Threshold on rate mismatch.
+const float kMaxRateMisMatch = 0.5f;
+
+// Threshold on amount of under/over encoder shooting.
+const float kRateOverShoot = 0.75f;
+const float kRateUnderShoot = 0.75f;
+
+// Factor to favor weighting the average rates with the current/last data.
+const float kWeightRate = 0.70f;
+
+// Factor for transitional rate for going back up in resolution.
+const float kTransRateScaleUpSpatial = 1.25f;
+const float kTransRateScaleUpTemp = 1.25f;
+const float kTransRateScaleUpSpatialTemp = 1.25f;
+
+// Threshold on packet loss rate, above which favor resolution reduction.
+const float kPacketLossThr = 0.1f;
+
+// Factor for reducing transitional bitrate under packet loss.
+const float kPacketLossRateFac = 1.0f;
+
+// Maximum possible transitional rate for down-sampling:
+// (units in kbps), for 30fps.
+const uint16_t kMaxRateQm[9] = {
+ 0, // QCIF
+ 50, // kHCIF
+ 125, // kQVGA
+ 200, // CIF
+ 280, // HVGA
+ 400, // VGA
+ 700, // QFULLHD
+ 1000, // WHD
+ 1500 // FULLHD
+};
+
+// Frame rate scale for maximum transition rate.
+const float kFrameRateFac[4] = {
+ 0.5f, // Low
+ 0.7f, // Middle level 1
+ 0.85f, // Middle level 2
+ 1.0f, // High
+};
+
+// Scale for transitional rate: based on content class
+// motion=L/H/D,spatial==L/H/D: for low, high, middle levels
+const float kScaleTransRateQm[18] = {
+ // VGA and lower
+ 0.40f, // L, L
+ 0.50f, // L, H
+ 0.40f, // L, D
+ 0.60f, // H ,L
+ 0.60f, // H, H
+ 0.60f, // H, D
+ 0.50f, // D, L
+ 0.50f, // D, D
+ 0.50f, // D, H
+
+ // over VGA
+ 0.40f, // L, L
+ 0.50f, // L, H
+ 0.40f, // L, D
+ 0.60f, // H ,L
+ 0.60f, // H, H
+ 0.60f, // H, D
+ 0.50f, // D, L
+ 0.50f, // D, D
+ 0.50f, // D, H
+};
+
+// Threshold on the target rate relative to transitional rate.
+const float kFacLowRate = 0.5f;
+
+// Action for down-sampling:
+// motion=L/H/D,spatial==L/H/D, for low, high, middle levels;
+// rate = 0/1/2, for target rate state relative to transition rate.
+const uint8_t kSpatialAction[27] = {
+ // rateClass = 0:
+ 1, // L, L
+ 1, // L, H
+ 1, // L, D
+ 4, // H ,L
+ 1, // H, H
+ 4, // H, D
+ 4, // D, L
+ 1, // D, H
+ 2, // D, D
+
+ // rateClass = 1:
+ 1, // L, L
+ 1, // L, H
+ 1, // L, D
+ 2, // H ,L
+ 1, // H, H
+ 2, // H, D
+ 2, // D, L
+ 1, // D, H
+ 2, // D, D
+
+ // rateClass = 2:
+ 1, // L, L
+ 1, // L, H
+ 1, // L, D
+ 2, // H ,L
+ 1, // H, H
+ 2, // H, D
+ 2, // D, L
+ 1, // D, H
+ 2, // D, D
+};
+
+const uint8_t kTemporalAction[27] = {
+ // rateClass = 0:
+ 3, // L, L
+ 2, // L, H
+ 2, // L, D
+ 1, // H ,L
+ 3, // H, H
+ 1, // H, D
+ 1, // D, L
+ 2, // D, H
+ 1, // D, D
+
+ // rateClass = 1:
+ 3, // L, L
+ 3, // L, H
+ 3, // L, D
+ 1, // H ,L
+ 3, // H, H
+ 1, // H, D
+ 1, // D, L
+ 3, // D, H
+ 1, // D, D
+
+ // rateClass = 2:
+ 1, // L, L
+ 3, // L, H
+ 3, // L, D
+ 1, // H ,L
+ 3, // H, H
+ 1, // H, D
+ 1, // D, L
+ 3, // D, H
+ 1, // D, D
+};
+
+// Control the total amount of down-sampling allowed.
+const float kMaxSpatialDown = 8.0f;
+const float kMaxTempDown = 3.0f;
+const float kMaxTotalDown = 9.0f;
+
+// Minimum image size for a spatial down-sampling.
+const int kMinImageSize = 176 * 144;
+
+// Minimum frame rate for temporal down-sampling:
+// no frame rate reduction if incomingFrameRate <= MIN_FRAME_RATE.
+const int kMinFrameRate = 8;
+
+//
+// PARAMETERS FOR FEC ADJUSTMENT: TODO (marpan)
+//
+
+//
+// PARAMETETS FOR SETTING LOW/HIGH STATES OF CONTENT METRICS:
+//
+
+// Thresholds for frame rate:
+const int kLowFrameRate = 10;
+const int kMiddleFrameRate = 15;
+const int kHighFrameRate = 25;
+
+// Thresholds for motion: motion level is from NFD.
+const float kHighMotionNfd = 0.075f;
+const float kLowMotionNfd = 0.03f;
+
+// Thresholds for spatial prediction error:
+// this is applied on the average of (2x2,1x2,2x1).
+const float kHighTexture = 0.035f;
+const float kLowTexture = 0.020f;
+
+// Used to reduce thresholds for larger/HD scenes: correction factor since
+// higher correlation in HD scenes means lower spatial prediction error.
+const float kScaleTexture = 0.9f;
+
+// Percentage reduction in transitional bitrate for 2x2 selected over 1x2/2x1.
+const float kRateRedSpatial2X2 = 0.6f;
+
+const float kSpatialErr2x2VsHoriz = 0.1f; // percentage to favor 2x2 over H
+const float kSpatialErr2X2VsVert = 0.1f; // percentage to favor 2x2 over V
+const float kSpatialErrVertVsHoriz = 0.1f; // percentage to favor H over V
+
+} // namespace webrtc
+
+#endif // WEBRTC_MODULES_VIDEO_CODING_QM_SELECT_DATA_H_
diff --git a/webrtc/modules/video_coding/qm_select_unittest.cc b/webrtc/modules/video_coding/qm_select_unittest.cc
new file mode 100644
index 0000000000..f8542ec676
--- /dev/null
+++ b/webrtc/modules/video_coding/qm_select_unittest.cc
@@ -0,0 +1,1307 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+/*
+ * This file includes unit tests the QmResolution class
+ * In particular, for the selection of spatial and/or temporal down-sampling.
+ */
+
+#include "testing/gtest/include/gtest/gtest.h"
+
+#include "webrtc/modules/include/module_common_types.h"
+#include "webrtc/modules/video_coding/qm_select.h"
+
+namespace webrtc {
+
+// Representative values of content metrics for: low/high/medium(default) state,
+// based on parameters settings in qm_select_data.h.
+const float kSpatialLow = 0.01f;
+const float kSpatialMedium = 0.03f;
+const float kSpatialHigh = 0.1f;
+const float kTemporalLow = 0.01f;
+const float kTemporalMedium = 0.06f;
+const float kTemporalHigh = 0.1f;
+
+class QmSelectTest : public ::testing::Test {
+ protected:
+ QmSelectTest()
+ : qm_resolution_(new VCMQmResolution()),
+ content_metrics_(new VideoContentMetrics()),
+ qm_scale_(NULL) {}
+ VCMQmResolution* qm_resolution_;
+ VideoContentMetrics* content_metrics_;
+ VCMResolutionScale* qm_scale_;
+
+ void InitQmNativeData(float initial_bit_rate,
+ int user_frame_rate,
+ int native_width,
+ int native_height,
+ int num_layers);
+
+ void UpdateQmEncodedFrame(size_t* encoded_size, size_t num_updates);
+
+ void UpdateQmRateData(int* target_rate,
+ int* encoder_sent_rate,
+ int* incoming_frame_rate,
+ uint8_t* fraction_lost,
+ int num_updates);
+
+ void UpdateQmContentData(float motion_metric,
+ float spatial_metric,
+ float spatial_metric_horiz,
+ float spatial_metric_vert);
+
+ bool IsSelectedActionCorrect(VCMResolutionScale* qm_scale,
+ float fac_width,
+ float fac_height,
+ float fac_temp,
+ uint16_t new_width,
+ uint16_t new_height,
+ float new_frame_rate);
+
+ void TearDown() {
+ delete qm_resolution_;
+ delete content_metrics_;
+ }
+};
+
+TEST_F(QmSelectTest, HandleInputs) {
+ // Expect parameter error. Initialize with invalid inputs.
+ EXPECT_EQ(-4, qm_resolution_->Initialize(1000, 0, 640, 480, 1));
+ EXPECT_EQ(-4, qm_resolution_->Initialize(1000, 30, 640, 0, 1));
+ EXPECT_EQ(-4, qm_resolution_->Initialize(1000, 30, 0, 480, 1));
+
+ // Expect uninitialized error.: No valid initialization before selection.
+ EXPECT_EQ(-7, qm_resolution_->SelectResolution(&qm_scale_));
+
+ VideoContentMetrics* content_metrics = NULL;
+ EXPECT_EQ(0, qm_resolution_->Initialize(1000, 30, 640, 480, 1));
+ qm_resolution_->UpdateContent(content_metrics);
+ // Content metrics are NULL: Expect success and no down-sampling action.
+ EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
+ EXPECT_TRUE(
+ IsSelectedActionCorrect(qm_scale_, 1.0, 1.0, 1.0, 640, 480, 30.0f));
+}
+
+// TODO(marpan): Add a test for number of temporal layers > 1.
+
+// No down-sampling action at high rates.
+TEST_F(QmSelectTest, NoActionHighRate) {
+ // Initialize with bitrate, frame rate, native system width/height, and
+ // number of temporal layers.
+ InitQmNativeData(800, 30, 640, 480, 1);
+
+ // Update with encoder frame size.
+ uint16_t codec_width = 640;
+ uint16_t codec_height = 480;
+ qm_resolution_->UpdateCodecParameters(30.0f, codec_width, codec_height);
+ EXPECT_EQ(5, qm_resolution_->GetImageType(codec_width, codec_height));
+
+ // Update rates for a sequence of intervals.
+ int target_rate[] = {800, 800, 800};
+ int encoder_sent_rate[] = {800, 800, 800};
+ int incoming_frame_rate[] = {30, 30, 30};
+ uint8_t fraction_lost[] = {10, 10, 10};
+ UpdateQmRateData(target_rate, encoder_sent_rate, incoming_frame_rate,
+ fraction_lost, 3);
+
+ // Update content: motion level, and 3 spatial prediction errors.
+ UpdateQmContentData(kTemporalLow, kSpatialLow, kSpatialLow, kSpatialLow);
+ EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
+ EXPECT_EQ(0, qm_resolution_->ComputeContentClass());
+ EXPECT_EQ(kStableEncoding, qm_resolution_->GetEncoderState());
+ EXPECT_TRUE(
+ IsSelectedActionCorrect(qm_scale_, 1.0f, 1.0f, 1.0f, 640, 480, 30.0f));
+}
+
+// Rate is well below transition, down-sampling action is taken,
+// depending on the content state.
+TEST_F(QmSelectTest, DownActionLowRate) {
+ // Initialize with bitrate, frame rate, native system width/height, and
+ // number of temporal layers.
+ InitQmNativeData(50, 30, 640, 480, 1);
+
+ // Update with encoder frame size.
+ uint16_t codec_width = 640;
+ uint16_t codec_height = 480;
+ qm_resolution_->UpdateCodecParameters(30.0f, codec_width, codec_height);
+ EXPECT_EQ(5, qm_resolution_->GetImageType(codec_width, codec_height));
+
+ // Update rates for a sequence of intervals.
+ int target_rate[] = {50, 50, 50};
+ int encoder_sent_rate[] = {50, 50, 50};
+ int incoming_frame_rate[] = {30, 30, 30};
+ uint8_t fraction_lost[] = {10, 10, 10};
+ UpdateQmRateData(target_rate, encoder_sent_rate, incoming_frame_rate,
+ fraction_lost, 3);
+
+ // Update content: motion level, and 3 spatial prediction errors.
+ // High motion, low spatial: 2x2 spatial expected.
+ UpdateQmContentData(kTemporalHigh, kSpatialLow, kSpatialLow, kSpatialLow);
+ EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
+ EXPECT_EQ(3, qm_resolution_->ComputeContentClass());
+ EXPECT_EQ(kStableEncoding, qm_resolution_->GetEncoderState());
+ EXPECT_TRUE(
+ IsSelectedActionCorrect(qm_scale_, 2.0f, 2.0f, 1.0f, 320, 240, 30.0f));
+
+ qm_resolution_->ResetDownSamplingState();
+ // Low motion, low spatial: 2/3 temporal is expected.
+ UpdateQmContentData(kTemporalLow, kSpatialLow, kSpatialLow, kSpatialLow);
+ EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
+ EXPECT_EQ(0, qm_resolution_->ComputeContentClass());
+ EXPECT_TRUE(
+ IsSelectedActionCorrect(qm_scale_, 1.0f, 1.0f, 1.5f, 640, 480, 20.5f));
+
+ qm_resolution_->ResetDownSamplingState();
+ // Medium motion, low spatial: 2x2 spatial expected.
+ UpdateQmContentData(kTemporalMedium, kSpatialLow, kSpatialLow, kSpatialLow);
+ EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
+ EXPECT_EQ(6, qm_resolution_->ComputeContentClass());
+ EXPECT_TRUE(
+ IsSelectedActionCorrect(qm_scale_, 2.0f, 2.0f, 1.0f, 320, 240, 30.0f));
+
+ qm_resolution_->ResetDownSamplingState();
+ // High motion, high spatial: 2/3 temporal expected.
+ UpdateQmContentData(kTemporalHigh, kSpatialHigh, kSpatialHigh, kSpatialHigh);
+ EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
+ EXPECT_EQ(4, qm_resolution_->ComputeContentClass());
+ EXPECT_TRUE(
+ IsSelectedActionCorrect(qm_scale_, 1.0f, 1.0f, 1.5f, 640, 480, 20.5f));
+
+ qm_resolution_->ResetDownSamplingState();
+ // Low motion, high spatial: 1/2 temporal expected.
+ UpdateQmContentData(kTemporalLow, kSpatialHigh, kSpatialHigh, kSpatialHigh);
+ EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
+ EXPECT_EQ(1, qm_resolution_->ComputeContentClass());
+ EXPECT_TRUE(
+ IsSelectedActionCorrect(qm_scale_, 1.0f, 1.0f, 2.0f, 640, 480, 15.5f));
+
+ qm_resolution_->ResetDownSamplingState();
+ // Medium motion, high spatial: 1/2 temporal expected.
+ UpdateQmContentData(kTemporalMedium, kSpatialHigh, kSpatialHigh,
+ kSpatialHigh);
+ EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
+ EXPECT_EQ(7, qm_resolution_->ComputeContentClass());
+ EXPECT_TRUE(
+ IsSelectedActionCorrect(qm_scale_, 1.0f, 1.0f, 2.0f, 640, 480, 15.5f));
+
+ qm_resolution_->ResetDownSamplingState();
+ // High motion, medium spatial: 2x2 spatial expected.
+ UpdateQmContentData(kTemporalHigh, kSpatialMedium, kSpatialMedium,
+ kSpatialMedium);
+ EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
+ EXPECT_EQ(5, qm_resolution_->ComputeContentClass());
+ // Target frame rate for frame dropper should be the same as previous == 15.
+ EXPECT_TRUE(
+ IsSelectedActionCorrect(qm_scale_, 2.0f, 2.0f, 1.0f, 320, 240, 30.0f));
+
+ qm_resolution_->ResetDownSamplingState();
+ // Low motion, medium spatial: high frame rate, so 1/2 temporal expected.
+ UpdateQmContentData(kTemporalLow, kSpatialMedium, kSpatialMedium,
+ kSpatialMedium);
+ EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
+ EXPECT_EQ(2, qm_resolution_->ComputeContentClass());
+ EXPECT_TRUE(
+ IsSelectedActionCorrect(qm_scale_, 1.0f, 1.0f, 2.0f, 640, 480, 15.5f));
+
+ qm_resolution_->ResetDownSamplingState();
+ // Medium motion, medium spatial: high frame rate, so 2/3 temporal expected.
+ UpdateQmContentData(kTemporalMedium, kSpatialMedium, kSpatialMedium,
+ kSpatialMedium);
+ EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
+ EXPECT_EQ(8, qm_resolution_->ComputeContentClass());
+ EXPECT_TRUE(
+ IsSelectedActionCorrect(qm_scale_, 1.0f, 1.0f, 1.5f, 640, 480, 20.5f));
+}
+
+// Rate mis-match is high, and we have over-shooting.
+// since target rate is below max for down-sampling, down-sampling is selected.
+TEST_F(QmSelectTest, DownActionHighRateMMOvershoot) {
+ // Initialize with bitrate, frame rate, native system width/height, and
+ // number of temporal layers.
+ InitQmNativeData(300, 30, 640, 480, 1);
+
+ // Update with encoder frame size.
+ uint16_t codec_width = 640;
+ uint16_t codec_height = 480;
+ qm_resolution_->UpdateCodecParameters(30.0f, codec_width, codec_height);
+ EXPECT_EQ(5, qm_resolution_->GetImageType(codec_width, codec_height));
+
+ // Update rates for a sequence of intervals.
+ int target_rate[] = {300, 300, 300};
+ int encoder_sent_rate[] = {900, 900, 900};
+ int incoming_frame_rate[] = {30, 30, 30};
+ uint8_t fraction_lost[] = {10, 10, 10};
+ UpdateQmRateData(target_rate, encoder_sent_rate, incoming_frame_rate,
+ fraction_lost, 3);
+
+ // Update content: motion level, and 3 spatial prediction errors.
+ // High motion, low spatial.
+ UpdateQmContentData(kTemporalHigh, kSpatialLow, kSpatialLow, kSpatialLow);
+ EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
+ EXPECT_EQ(3, qm_resolution_->ComputeContentClass());
+ EXPECT_EQ(kStressedEncoding, qm_resolution_->GetEncoderState());
+ EXPECT_TRUE(IsSelectedActionCorrect(qm_scale_, 4.0f / 3.0f, 4.0f / 3.0f, 1.0f,
+ 480, 360, 30.0f));
+
+ qm_resolution_->ResetDownSamplingState();
+ // Low motion, high spatial
+ UpdateQmContentData(kTemporalLow, kSpatialHigh, kSpatialHigh, kSpatialHigh);
+ EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
+ EXPECT_EQ(1, qm_resolution_->ComputeContentClass());
+ EXPECT_TRUE(
+ IsSelectedActionCorrect(qm_scale_, 1.0f, 1.0f, 1.5f, 640, 480, 20.5f));
+}
+
+// Rate mis-match is high, target rate is below max for down-sampling,
+// but since we have consistent under-shooting, no down-sampling action.
+TEST_F(QmSelectTest, NoActionHighRateMMUndershoot) {
+ // Initialize with bitrate, frame rate, native system width/height, and
+ // number of temporal layers.
+ InitQmNativeData(300, 30, 640, 480, 1);
+
+ // Update with encoder frame size.
+ uint16_t codec_width = 640;
+ uint16_t codec_height = 480;
+ qm_resolution_->UpdateCodecParameters(30.0f, codec_width, codec_height);
+ EXPECT_EQ(5, qm_resolution_->GetImageType(codec_width, codec_height));
+
+ // Update rates for a sequence of intervals.
+ int target_rate[] = {300, 300, 300};
+ int encoder_sent_rate[] = {100, 100, 100};
+ int incoming_frame_rate[] = {30, 30, 30};
+ uint8_t fraction_lost[] = {10, 10, 10};
+ UpdateQmRateData(target_rate, encoder_sent_rate, incoming_frame_rate,
+ fraction_lost, 3);
+
+ // Update content: motion level, and 3 spatial prediction errors.
+ // High motion, low spatial.
+ UpdateQmContentData(kTemporalHigh, kSpatialLow, kSpatialLow, kSpatialLow);
+ EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
+ EXPECT_EQ(3, qm_resolution_->ComputeContentClass());
+ EXPECT_EQ(kEasyEncoding, qm_resolution_->GetEncoderState());
+ EXPECT_TRUE(
+ IsSelectedActionCorrect(qm_scale_, 1.0f, 1.0f, 1.0f, 640, 480, 30.0f));
+
+ qm_resolution_->ResetDownSamplingState();
+ // Low motion, high spatial
+ UpdateQmContentData(kTemporalLow, kSpatialHigh, kSpatialHigh, kSpatialHigh);
+ EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
+ EXPECT_EQ(1, qm_resolution_->ComputeContentClass());
+ EXPECT_TRUE(
+ IsSelectedActionCorrect(qm_scale_, 1.0f, 1.0f, 1.0f, 640, 480, 30.0f));
+}
+
+// Buffer is underflowing, and target rate is below max for down-sampling,
+// so action is taken.
+TEST_F(QmSelectTest, DownActionBufferUnderflow) {
+ // Initialize with bitrate, frame rate, native system width/height, and
+ // number of temporal layers.
+ InitQmNativeData(300, 30, 640, 480, 1);
+
+ // Update with encoder frame size.
+ uint16_t codec_width = 640;
+ uint16_t codec_height = 480;
+ qm_resolution_->UpdateCodecParameters(30.0f, codec_width, codec_height);
+ EXPECT_EQ(5, qm_resolution_->GetImageType(codec_width, codec_height));
+
+ // Update with encoded size over a number of frames.
+ // per-frame bandwidth = 15 = 450/30: simulate (decoder) buffer underflow:
+ size_t encoded_size[] = {200, 100, 50, 30, 60, 40, 20, 30, 20, 40};
+ UpdateQmEncodedFrame(encoded_size, GTEST_ARRAY_SIZE_(encoded_size));
+
+ // Update rates for a sequence of intervals.
+ int target_rate[] = {300, 300, 300};
+ int encoder_sent_rate[] = {450, 450, 450};
+ int incoming_frame_rate[] = {30, 30, 30};
+ uint8_t fraction_lost[] = {10, 10, 10};
+ UpdateQmRateData(target_rate, encoder_sent_rate, incoming_frame_rate,
+ fraction_lost, 3);
+
+ // Update content: motion level, and 3 spatial prediction errors.
+ // High motion, low spatial.
+ UpdateQmContentData(kTemporalHigh, kSpatialLow, kSpatialLow, kSpatialLow);
+ EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
+ EXPECT_EQ(3, qm_resolution_->ComputeContentClass());
+ EXPECT_EQ(kStressedEncoding, qm_resolution_->GetEncoderState());
+ EXPECT_TRUE(IsSelectedActionCorrect(qm_scale_, 4.0f / 3.0f, 4.0f / 3.0f, 1.0f,
+ 480, 360, 30.0f));
+
+ qm_resolution_->ResetDownSamplingState();
+ // Low motion, high spatial
+ UpdateQmContentData(kTemporalLow, kSpatialHigh, kSpatialHigh, kSpatialHigh);
+ EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
+ EXPECT_EQ(1, qm_resolution_->ComputeContentClass());
+ EXPECT_TRUE(
+ IsSelectedActionCorrect(qm_scale_, 1.0f, 1.0f, 1.5f, 640, 480, 20.5f));
+}
+
+// Target rate is below max for down-sampling, but buffer level is stable,
+// so no action is taken.
+TEST_F(QmSelectTest, NoActionBufferStable) {
+ // Initialize with bitrate, frame rate, native system width/height, and
+ // number of temporal layers.
+ InitQmNativeData(350, 30, 640, 480, 1);
+
+ // Update with encoder frame size.
+ uint16_t codec_width = 640;
+ uint16_t codec_height = 480;
+ qm_resolution_->UpdateCodecParameters(30.0f, codec_width, codec_height);
+ EXPECT_EQ(5, qm_resolution_->GetImageType(codec_width, codec_height));
+
+ // Update with encoded size over a number of frames.
+ // per-frame bandwidth = 15 = 450/30: simulate stable (decoder) buffer levels.
+ size_t encoded_size[] = {40, 10, 10, 16, 18, 20, 17, 20, 16, 15};
+ UpdateQmEncodedFrame(encoded_size, GTEST_ARRAY_SIZE_(encoded_size));
+
+ // Update rates for a sequence of intervals.
+ int target_rate[] = {350, 350, 350};
+ int encoder_sent_rate[] = {350, 450, 450};
+ int incoming_frame_rate[] = {30, 30, 30};
+ uint8_t fraction_lost[] = {10, 10, 10};
+ UpdateQmRateData(target_rate, encoder_sent_rate, incoming_frame_rate,
+ fraction_lost, 3);
+
+ // Update content: motion level, and 3 spatial prediction errors.
+ // High motion, low spatial.
+ UpdateQmContentData(kTemporalHigh, kSpatialLow, kSpatialLow, kSpatialLow);
+ EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
+ EXPECT_EQ(3, qm_resolution_->ComputeContentClass());
+ EXPECT_EQ(kStableEncoding, qm_resolution_->GetEncoderState());
+ EXPECT_TRUE(
+ IsSelectedActionCorrect(qm_scale_, 1.0f, 1.0f, 1.0f, 640, 480, 30.0f));
+
+ qm_resolution_->ResetDownSamplingState();
+ // Low motion, high spatial
+ UpdateQmContentData(kTemporalLow, kSpatialHigh, kSpatialHigh, kSpatialHigh);
+ EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
+ EXPECT_EQ(1, qm_resolution_->ComputeContentClass());
+ EXPECT_TRUE(
+ IsSelectedActionCorrect(qm_scale_, 1.0f, 1.0f, 1.0f, 640, 480, 30.0f));
+}
+
+// Very low rate, but no spatial down-sampling below some size (QCIF).
+TEST_F(QmSelectTest, LimitDownSpatialAction) {
+ // Initialize with bitrate, frame rate, native system width/height, and
+ // number of temporal layers.
+ InitQmNativeData(10, 30, 176, 144, 1);
+
+ // Update with encoder frame size.
+ uint16_t codec_width = 176;
+ uint16_t codec_height = 144;
+ qm_resolution_->UpdateCodecParameters(30.0f, codec_width, codec_height);
+ EXPECT_EQ(0, qm_resolution_->GetImageType(codec_width, codec_height));
+
+ // Update rates for a sequence of intervals.
+ int target_rate[] = {10, 10, 10};
+ int encoder_sent_rate[] = {10, 10, 10};
+ int incoming_frame_rate[] = {30, 30, 30};
+ uint8_t fraction_lost[] = {10, 10, 10};
+ UpdateQmRateData(target_rate, encoder_sent_rate, incoming_frame_rate,
+ fraction_lost, 3);
+
+ // Update content: motion level, and 3 spatial prediction errors.
+ // High motion, low spatial.
+ UpdateQmContentData(kTemporalHigh, kSpatialLow, kSpatialLow, kSpatialLow);
+ EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
+ EXPECT_EQ(3, qm_resolution_->ComputeContentClass());
+ EXPECT_EQ(kStableEncoding, qm_resolution_->GetEncoderState());
+ EXPECT_TRUE(
+ IsSelectedActionCorrect(qm_scale_, 1.0f, 1.0f, 1.0f, 176, 144, 30.0f));
+}
+
+// Very low rate, but no frame reduction below some frame_rate (8fps).
+TEST_F(QmSelectTest, LimitDownTemporalAction) {
+ // Initialize with bitrate, frame rate, native system width/height, and
+ // number of temporal layers.
+ InitQmNativeData(10, 8, 640, 480, 1);
+
+ // Update with encoder frame size.
+ uint16_t codec_width = 640;
+ uint16_t codec_height = 480;
+ qm_resolution_->UpdateCodecParameters(8.0f, codec_width, codec_height);
+ EXPECT_EQ(5, qm_resolution_->GetImageType(codec_width, codec_height));
+
+ // Update rates for a sequence of intervals.
+ int target_rate[] = {10, 10, 10};
+ int encoder_sent_rate[] = {10, 10, 10};
+ int incoming_frame_rate[] = {8, 8, 8};
+ uint8_t fraction_lost[] = {10, 10, 10};
+ UpdateQmRateData(target_rate, encoder_sent_rate, incoming_frame_rate,
+ fraction_lost, 3);
+
+ // Update content: motion level, and 3 spatial prediction errors.
+ // Low motion, medium spatial.
+ UpdateQmContentData(kTemporalLow, kSpatialMedium, kSpatialMedium,
+ kSpatialMedium);
+ EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
+ EXPECT_EQ(2, qm_resolution_->ComputeContentClass());
+ EXPECT_EQ(kStableEncoding, qm_resolution_->GetEncoderState());
+ EXPECT_TRUE(
+ IsSelectedActionCorrect(qm_scale_, 1.0f, 1.0f, 1.0f, 640, 480, 8.0f));
+}
+
+// Two stages: spatial down-sample and then back up spatially,
+// as rate as increased.
+TEST_F(QmSelectTest, 2StageDownSpatialUpSpatial) {
+ // Initialize with bitrate, frame rate, native system width/height, and
+ // number of temporal layers.
+ InitQmNativeData(50, 30, 640, 480, 1);
+
+ // Update with encoder frame size.
+ uint16_t codec_width = 640;
+ uint16_t codec_height = 480;
+ qm_resolution_->UpdateCodecParameters(30.0f, codec_width, codec_height);
+ EXPECT_EQ(5, qm_resolution_->GetImageType(codec_width, codec_height));
+
+ // Update rates for a sequence of intervals.
+ int target_rate[] = {50, 50, 50};
+ int encoder_sent_rate[] = {50, 50, 50};
+ int incoming_frame_rate[] = {30, 30, 30};
+ uint8_t fraction_lost[] = {10, 10, 10};
+ UpdateQmRateData(target_rate, encoder_sent_rate, incoming_frame_rate,
+ fraction_lost, 3);
+
+ // Update content: motion level, and 3 spatial prediction errors.
+ // High motion, low spatial.
+ UpdateQmContentData(kTemporalHigh, kSpatialLow, kSpatialLow, kSpatialLow);
+ EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
+ EXPECT_EQ(3, qm_resolution_->ComputeContentClass());
+ EXPECT_EQ(kStableEncoding, qm_resolution_->GetEncoderState());
+ EXPECT_TRUE(
+ IsSelectedActionCorrect(qm_scale_, 2.0f, 2.0f, 1.0f, 320, 240, 30.0f));
+
+ // Reset and go up in rate: expected to go back up, in 2 stages of 3/4.
+ qm_resolution_->ResetRates();
+ qm_resolution_->UpdateCodecParameters(30.0f, 320, 240);
+ EXPECT_EQ(2, qm_resolution_->GetImageType(320, 240));
+ // Update rates for a sequence of intervals.
+ int target_rate2[] = {400, 400, 400, 400, 400};
+ int encoder_sent_rate2[] = {400, 400, 400, 400, 400};
+ int incoming_frame_rate2[] = {30, 30, 30, 30, 30};
+ uint8_t fraction_lost2[] = {10, 10, 10, 10, 10};
+ UpdateQmRateData(target_rate2, encoder_sent_rate2, incoming_frame_rate2,
+ fraction_lost2, 5);
+ EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
+ EXPECT_EQ(kStableEncoding, qm_resolution_->GetEncoderState());
+ float scale = (4.0f / 3.0f) / 2.0f;
+ EXPECT_TRUE(
+ IsSelectedActionCorrect(qm_scale_, scale, scale, 1.0f, 480, 360, 30.0f));
+
+ qm_resolution_->UpdateCodecParameters(30.0f, 480, 360);
+ EXPECT_EQ(4, qm_resolution_->GetImageType(480, 360));
+ EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
+ EXPECT_TRUE(IsSelectedActionCorrect(qm_scale_, 3.0f / 4.0f, 3.0f / 4.0f, 1.0f,
+ 640, 480, 30.0f));
+}
+
+// Two stages: spatial down-sample and then back up spatially, since encoder
+// is under-shooting target even though rate has not increased much.
+TEST_F(QmSelectTest, 2StageDownSpatialUpSpatialUndershoot) {
+ // Initialize with bitrate, frame rate, native system width/height, and
+ // number of temporal layers.
+ InitQmNativeData(50, 30, 640, 480, 1);
+
+ // Update with encoder frame size.
+ uint16_t codec_width = 640;
+ uint16_t codec_height = 480;
+ qm_resolution_->UpdateCodecParameters(30.0f, codec_width, codec_height);
+ EXPECT_EQ(5, qm_resolution_->GetImageType(codec_width, codec_height));
+
+ // Update rates for a sequence of intervals.
+ int target_rate[] = {50, 50, 50};
+ int encoder_sent_rate[] = {50, 50, 50};
+ int incoming_frame_rate[] = {30, 30, 30};
+ uint8_t fraction_lost[] = {10, 10, 10};
+ UpdateQmRateData(target_rate, encoder_sent_rate, incoming_frame_rate,
+ fraction_lost, 3);
+
+ // Update content: motion level, and 3 spatial prediction errors.
+ // High motion, low spatial.
+ UpdateQmContentData(kTemporalHigh, kSpatialLow, kSpatialLow, kSpatialLow);
+ EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
+ EXPECT_EQ(3, qm_resolution_->ComputeContentClass());
+ EXPECT_EQ(kStableEncoding, qm_resolution_->GetEncoderState());
+ EXPECT_TRUE(
+ IsSelectedActionCorrect(qm_scale_, 2.0f, 2.0f, 1.0f, 320, 240, 30.0f));
+
+ // Reset rates and simulate under-shooting scenario.: expect to go back up.
+ // Goes up spatially in two stages for 1/2x1/2 down-sampling.
+ qm_resolution_->ResetRates();
+ qm_resolution_->UpdateCodecParameters(30.0f, 320, 240);
+ EXPECT_EQ(2, qm_resolution_->GetImageType(320, 240));
+ // Update rates for a sequence of intervals.
+ int target_rate2[] = {200, 200, 200, 200, 200};
+ int encoder_sent_rate2[] = {50, 50, 50, 50, 50};
+ int incoming_frame_rate2[] = {30, 30, 30, 30, 30};
+ uint8_t fraction_lost2[] = {10, 10, 10, 10, 10};
+ UpdateQmRateData(target_rate2, encoder_sent_rate2, incoming_frame_rate2,
+ fraction_lost2, 5);
+ EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
+ EXPECT_EQ(kEasyEncoding, qm_resolution_->GetEncoderState());
+ float scale = (4.0f / 3.0f) / 2.0f;
+ EXPECT_TRUE(
+ IsSelectedActionCorrect(qm_scale_, scale, scale, 1.0f, 480, 360, 30.0f));
+
+ qm_resolution_->UpdateCodecParameters(30.0f, 480, 360);
+ EXPECT_EQ(4, qm_resolution_->GetImageType(480, 360));
+ EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
+ EXPECT_TRUE(IsSelectedActionCorrect(qm_scale_, 3.0f / 4.0f, 3.0f / 4.0f, 1.0f,
+ 640, 480, 30.0f));
+}
+
+// Two stages: spatial down-sample and then no action to go up,
+// as encoding rate mis-match is too high.
+TEST_F(QmSelectTest, 2StageDownSpatialNoActionUp) {
+ // Initialize with bitrate, frame rate, native system width/height, and
+ // number of temporal layers.
+ InitQmNativeData(50, 30, 640, 480, 1);
+
+ // Update with encoder frame size.
+ uint16_t codec_width = 640;
+ uint16_t codec_height = 480;
+ qm_resolution_->UpdateCodecParameters(30.0f, codec_width, codec_height);
+ EXPECT_EQ(5, qm_resolution_->GetImageType(codec_width, codec_height));
+
+ // Update rates for a sequence of intervals.
+ int target_rate[] = {50, 50, 50};
+ int encoder_sent_rate[] = {50, 50, 50};
+ int incoming_frame_rate[] = {30, 30, 30};
+ uint8_t fraction_lost[] = {10, 10, 10};
+ UpdateQmRateData(target_rate, encoder_sent_rate, incoming_frame_rate,
+ fraction_lost, 3);
+
+ // Update content: motion level, and 3 spatial prediction errors.
+ // High motion, low spatial.
+ UpdateQmContentData(kTemporalHigh, kSpatialLow, kSpatialLow, kSpatialLow);
+ EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
+ EXPECT_EQ(3, qm_resolution_->ComputeContentClass());
+ EXPECT_EQ(kStableEncoding, qm_resolution_->GetEncoderState());
+ EXPECT_TRUE(
+ IsSelectedActionCorrect(qm_scale_, 2.0f, 2.0f, 1.0f, 320, 240, 30.0f));
+
+ // Reset and simulate large rate mis-match: expect no action to go back up.
+ qm_resolution_->ResetRates();
+ qm_resolution_->UpdateCodecParameters(30.0f, 320, 240);
+ EXPECT_EQ(2, qm_resolution_->GetImageType(320, 240));
+ // Update rates for a sequence of intervals.
+ int target_rate2[] = {400, 400, 400, 400, 400};
+ int encoder_sent_rate2[] = {1000, 1000, 1000, 1000, 1000};
+ int incoming_frame_rate2[] = {30, 30, 30, 30, 30};
+ uint8_t fraction_lost2[] = {10, 10, 10, 10, 10};
+ UpdateQmRateData(target_rate2, encoder_sent_rate2, incoming_frame_rate2,
+ fraction_lost2, 5);
+ EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
+ EXPECT_EQ(kStressedEncoding, qm_resolution_->GetEncoderState());
+ EXPECT_TRUE(
+ IsSelectedActionCorrect(qm_scale_, 1.0f, 1.0f, 1.0f, 320, 240, 30.0f));
+}
+
+// Two stages: temporally down-sample and then back up temporally,
+// as rate as increased.
+TEST_F(QmSelectTest, 2StatgeDownTemporalUpTemporal) {
+ // Initialize with bitrate, frame rate, native system width/height, and
+ // number of temporal layers.
+ InitQmNativeData(50, 30, 640, 480, 1);
+
+ // Update with encoder frame size.
+ uint16_t codec_width = 640;
+ uint16_t codec_height = 480;
+ qm_resolution_->UpdateCodecParameters(30.0f, codec_width, codec_height);
+ EXPECT_EQ(5, qm_resolution_->GetImageType(codec_width, codec_height));
+
+ // Update rates for a sequence of intervals.
+ int target_rate[] = {50, 50, 50};
+ int encoder_sent_rate[] = {50, 50, 50};
+ int incoming_frame_rate[] = {30, 30, 30};
+ uint8_t fraction_lost[] = {10, 10, 10};
+ UpdateQmRateData(target_rate, encoder_sent_rate, incoming_frame_rate,
+ fraction_lost, 3);
+
+ // Update content: motion level, and 3 spatial prediction errors.
+ // Low motion, high spatial.
+ UpdateQmContentData(kTemporalLow, kSpatialHigh, kSpatialHigh, kSpatialHigh);
+ EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
+ EXPECT_EQ(1, qm_resolution_->ComputeContentClass());
+ EXPECT_EQ(kStableEncoding, qm_resolution_->GetEncoderState());
+ EXPECT_TRUE(
+ IsSelectedActionCorrect(qm_scale_, 1.0f, 1.0f, 2.0f, 640, 480, 15.5f));
+
+ // Reset rates and go up in rate: expect to go back up.
+ qm_resolution_->ResetRates();
+ // Update rates for a sequence of intervals.
+ int target_rate2[] = {400, 400, 400, 400, 400};
+ int encoder_sent_rate2[] = {400, 400, 400, 400, 400};
+ int incoming_frame_rate2[] = {15, 15, 15, 15, 15};
+ uint8_t fraction_lost2[] = {10, 10, 10, 10, 10};
+ UpdateQmRateData(target_rate2, encoder_sent_rate2, incoming_frame_rate2,
+ fraction_lost2, 5);
+ EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
+ EXPECT_EQ(kStableEncoding, qm_resolution_->GetEncoderState());
+ EXPECT_TRUE(
+ IsSelectedActionCorrect(qm_scale_, 1.0f, 1.0f, 0.5f, 640, 480, 30.0f));
+}
+
+// Two stages: temporal down-sample and then back up temporally, since encoder
+// is under-shooting target even though rate has not increased much.
+TEST_F(QmSelectTest, 2StatgeDownTemporalUpTemporalUndershoot) {
+ // Initialize with bitrate, frame rate, native system width/height, and
+ // number of temporal layers.
+ InitQmNativeData(50, 30, 640, 480, 1);
+
+ // Update with encoder frame size.
+ uint16_t codec_width = 640;
+ uint16_t codec_height = 480;
+ qm_resolution_->UpdateCodecParameters(30.0f, codec_width, codec_height);
+ EXPECT_EQ(5, qm_resolution_->GetImageType(codec_width, codec_height));
+
+ // Update rates for a sequence of intervals.
+ int target_rate[] = {50, 50, 50};
+ int encoder_sent_rate[] = {50, 50, 50};
+ int incoming_frame_rate[] = {30, 30, 30};
+ uint8_t fraction_lost[] = {10, 10, 10};
+ UpdateQmRateData(target_rate, encoder_sent_rate, incoming_frame_rate,
+ fraction_lost, 3);
+
+ // Update content: motion level, and 3 spatial prediction errors.
+ // Low motion, high spatial.
+ UpdateQmContentData(kTemporalLow, kSpatialHigh, kSpatialHigh, kSpatialHigh);
+ EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
+ EXPECT_EQ(1, qm_resolution_->ComputeContentClass());
+ EXPECT_EQ(kStableEncoding, qm_resolution_->GetEncoderState());
+ EXPECT_TRUE(
+ IsSelectedActionCorrect(qm_scale_, 1.0f, 1.0f, 2.0f, 640, 480, 15.5f));
+
+ // Reset rates and simulate under-shooting scenario.: expect to go back up.
+ qm_resolution_->ResetRates();
+ // Update rates for a sequence of intervals.
+ int target_rate2[] = {150, 150, 150, 150, 150};
+ int encoder_sent_rate2[] = {50, 50, 50, 50, 50};
+ int incoming_frame_rate2[] = {15, 15, 15, 15, 15};
+ uint8_t fraction_lost2[] = {10, 10, 10, 10, 10};
+ UpdateQmRateData(target_rate2, encoder_sent_rate2, incoming_frame_rate2,
+ fraction_lost2, 5);
+ EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
+ EXPECT_EQ(kEasyEncoding, qm_resolution_->GetEncoderState());
+ EXPECT_TRUE(
+ IsSelectedActionCorrect(qm_scale_, 1.0f, 1.0f, 0.5f, 640, 480, 30.0f));
+}
+
+// Two stages: temporal down-sample and then no action to go up,
+// as encoding rate mis-match is too high.
+TEST_F(QmSelectTest, 2StageDownTemporalNoActionUp) {
+ // Initialize with bitrate, frame rate, native system width/height, and
+ // number of temporal layers.
+ InitQmNativeData(50, 30, 640, 480, 1);
+
+ // Update with encoder frame size.
+ uint16_t codec_width = 640;
+ uint16_t codec_height = 480;
+ qm_resolution_->UpdateCodecParameters(30.0f, codec_width, codec_height);
+ EXPECT_EQ(5, qm_resolution_->GetImageType(codec_width, codec_height));
+
+ // Update rates for a sequence of intervals.
+ int target_rate[] = {50, 50, 50};
+ int encoder_sent_rate[] = {50, 50, 50};
+ int incoming_frame_rate[] = {30, 30, 30};
+ uint8_t fraction_lost[] = {10, 10, 10};
+ UpdateQmRateData(target_rate, encoder_sent_rate, incoming_frame_rate,
+ fraction_lost, 3);
+
+ // Update content: motion level, and 3 spatial prediction errors.
+ // Low motion, high spatial.
+ UpdateQmContentData(kTemporalLow, kSpatialHigh, kSpatialHigh, kSpatialHigh);
+ EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
+ EXPECT_EQ(1, qm_resolution_->ComputeContentClass());
+ EXPECT_EQ(kStableEncoding, qm_resolution_->GetEncoderState());
+ EXPECT_TRUE(IsSelectedActionCorrect(qm_scale_, 1, 1, 2, 640, 480, 15.5f));
+
+ // Reset and simulate large rate mis-match: expect no action to go back up.
+ qm_resolution_->UpdateCodecParameters(15.0f, codec_width, codec_height);
+ qm_resolution_->ResetRates();
+ // Update rates for a sequence of intervals.
+ int target_rate2[] = {600, 600, 600, 600, 600};
+ int encoder_sent_rate2[] = {1000, 1000, 1000, 1000, 1000};
+ int incoming_frame_rate2[] = {15, 15, 15, 15, 15};
+ uint8_t fraction_lost2[] = {10, 10, 10, 10, 10};
+ UpdateQmRateData(target_rate2, encoder_sent_rate2, incoming_frame_rate2,
+ fraction_lost2, 5);
+ EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
+ EXPECT_EQ(kStressedEncoding, qm_resolution_->GetEncoderState());
+ EXPECT_TRUE(
+ IsSelectedActionCorrect(qm_scale_, 1.0f, 1.0f, 1.0f, 640, 480, 15.0f));
+}
+// 3 stages: spatial down-sample, followed by temporal down-sample,
+// and then go up to full state, as encoding rate has increased.
+TEST_F(QmSelectTest, 3StageDownSpatialTemporlaUpSpatialTemporal) {
+ // Initialize with bitrate, frame rate, native system width/height, and
+ // number of temporal layers.
+ InitQmNativeData(80, 30, 640, 480, 1);
+
+ // Update with encoder frame size.
+ uint16_t codec_width = 640;
+ uint16_t codec_height = 480;
+ qm_resolution_->UpdateCodecParameters(30.0f, codec_width, codec_height);
+ EXPECT_EQ(5, qm_resolution_->GetImageType(codec_width, codec_height));
+
+ // Update rates for a sequence of intervals.
+ int target_rate[] = {80, 80, 80};
+ int encoder_sent_rate[] = {80, 80, 80};
+ int incoming_frame_rate[] = {30, 30, 30};
+ uint8_t fraction_lost[] = {10, 10, 10};
+ UpdateQmRateData(target_rate, encoder_sent_rate, incoming_frame_rate,
+ fraction_lost, 3);
+
+ // Update content: motion level, and 3 spatial prediction errors.
+ // High motion, low spatial.
+ UpdateQmContentData(kTemporalHigh, kSpatialLow, kSpatialLow, kSpatialLow);
+ EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
+ EXPECT_EQ(3, qm_resolution_->ComputeContentClass());
+ EXPECT_EQ(kStableEncoding, qm_resolution_->GetEncoderState());
+ EXPECT_TRUE(
+ IsSelectedActionCorrect(qm_scale_, 2.0f, 2.0f, 1.0f, 320, 240, 30.0f));
+
+ // Change content data: expect temporal down-sample.
+ qm_resolution_->UpdateCodecParameters(30.0f, 320, 240);
+ EXPECT_EQ(2, qm_resolution_->GetImageType(320, 240));
+
+ // Reset rates and go lower in rate.
+ qm_resolution_->ResetRates();
+ int target_rate2[] = {40, 40, 40, 40, 40};
+ int encoder_sent_rate2[] = {40, 40, 40, 40, 40};
+ int incoming_frame_rate2[] = {30, 30, 30, 30, 30};
+ uint8_t fraction_lost2[] = {10, 10, 10, 10, 10};
+ UpdateQmRateData(target_rate2, encoder_sent_rate2, incoming_frame_rate2,
+ fraction_lost2, 5);
+
+ // Update content: motion level, and 3 spatial prediction errors.
+ // Low motion, high spatial.
+ UpdateQmContentData(kTemporalLow, kSpatialHigh, kSpatialHigh, kSpatialHigh);
+ EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
+ EXPECT_EQ(1, qm_resolution_->ComputeContentClass());
+ EXPECT_EQ(kStableEncoding, qm_resolution_->GetEncoderState());
+ EXPECT_TRUE(
+ IsSelectedActionCorrect(qm_scale_, 1.0f, 1.0f, 1.5f, 320, 240, 20.5f));
+
+ // Reset rates and go high up in rate: expect to go back up both spatial
+ // and temporally. The 1/2x1/2 spatial is undone in two stages.
+ qm_resolution_->ResetRates();
+ // Update rates for a sequence of intervals.
+ int target_rate3[] = {1000, 1000, 1000, 1000, 1000};
+ int encoder_sent_rate3[] = {1000, 1000, 1000, 1000, 1000};
+ int incoming_frame_rate3[] = {20, 20, 20, 20, 20};
+ uint8_t fraction_lost3[] = {10, 10, 10, 10, 10};
+ UpdateQmRateData(target_rate3, encoder_sent_rate3, incoming_frame_rate3,
+ fraction_lost3, 5);
+
+ EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
+ EXPECT_EQ(1, qm_resolution_->ComputeContentClass());
+ EXPECT_EQ(kStableEncoding, qm_resolution_->GetEncoderState());
+ float scale = (4.0f / 3.0f) / 2.0f;
+ EXPECT_TRUE(IsSelectedActionCorrect(qm_scale_, scale, scale, 2.0f / 3.0f, 480,
+ 360, 30.0f));
+
+ qm_resolution_->UpdateCodecParameters(30.0f, 480, 360);
+ EXPECT_EQ(4, qm_resolution_->GetImageType(480, 360));
+ EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
+ EXPECT_TRUE(IsSelectedActionCorrect(qm_scale_, 3.0f / 4.0f, 3.0f / 4.0f, 1.0f,
+ 640, 480, 30.0f));
+}
+
+// No down-sampling below some total amount.
+TEST_F(QmSelectTest, NoActionTooMuchDownSampling) {
+ // Initialize with bitrate, frame rate, native system width/height, and
+ // number of temporal layers.
+ InitQmNativeData(150, 30, 1280, 720, 1);
+
+ // Update with encoder frame size.
+ uint16_t codec_width = 1280;
+ uint16_t codec_height = 720;
+ qm_resolution_->UpdateCodecParameters(30.0f, codec_width, codec_height);
+ EXPECT_EQ(7, qm_resolution_->GetImageType(codec_width, codec_height));
+
+ // Update rates for a sequence of intervals.
+ int target_rate[] = {150, 150, 150};
+ int encoder_sent_rate[] = {150, 150, 150};
+ int incoming_frame_rate[] = {30, 30, 30};
+ uint8_t fraction_lost[] = {10, 10, 10};
+ UpdateQmRateData(target_rate, encoder_sent_rate, incoming_frame_rate,
+ fraction_lost, 3);
+
+ // Update content: motion level, and 3 spatial prediction errors.
+ // High motion, low spatial.
+ UpdateQmContentData(kTemporalHigh, kSpatialLow, kSpatialLow, kSpatialLow);
+ EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
+ EXPECT_EQ(3, qm_resolution_->ComputeContentClass());
+ EXPECT_EQ(kStableEncoding, qm_resolution_->GetEncoderState());
+ EXPECT_TRUE(
+ IsSelectedActionCorrect(qm_scale_, 2.0f, 2.0f, 1.0f, 640, 360, 30.0f));
+
+ // Reset and lower rates to get another spatial action (3/4x3/4).
+ // Lower the frame rate for spatial to be selected again.
+ qm_resolution_->ResetRates();
+ qm_resolution_->UpdateCodecParameters(10.0f, 640, 360);
+ EXPECT_EQ(4, qm_resolution_->GetImageType(640, 360));
+ // Update rates for a sequence of intervals.
+ int target_rate2[] = {70, 70, 70, 70, 70};
+ int encoder_sent_rate2[] = {70, 70, 70, 70, 70};
+ int incoming_frame_rate2[] = {10, 10, 10, 10, 10};
+ uint8_t fraction_lost2[] = {10, 10, 10, 10, 10};
+ UpdateQmRateData(target_rate2, encoder_sent_rate2, incoming_frame_rate2,
+ fraction_lost2, 5);
+
+ // Update content: motion level, and 3 spatial prediction errors.
+ // High motion, medium spatial.
+ UpdateQmContentData(kTemporalHigh, kSpatialMedium, kSpatialMedium,
+ kSpatialMedium);
+ EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
+ EXPECT_EQ(5, qm_resolution_->ComputeContentClass());
+ EXPECT_EQ(kStableEncoding, qm_resolution_->GetEncoderState());
+ EXPECT_TRUE(IsSelectedActionCorrect(qm_scale_, 4.0f / 3.0f, 4.0f / 3.0f, 1.0f,
+ 480, 270, 10.0f));
+
+ // Reset and go to very low rate: no action should be taken,
+ // we went down too much already.
+ qm_resolution_->ResetRates();
+ qm_resolution_->UpdateCodecParameters(10.0f, 480, 270);
+ EXPECT_EQ(3, qm_resolution_->GetImageType(480, 270));
+ // Update rates for a sequence of intervals.
+ int target_rate3[] = {10, 10, 10, 10, 10};
+ int encoder_sent_rate3[] = {10, 10, 10, 10, 10};
+ int incoming_frame_rate3[] = {10, 10, 10, 10, 10};
+ uint8_t fraction_lost3[] = {10, 10, 10, 10, 10};
+ UpdateQmRateData(target_rate3, encoder_sent_rate3, incoming_frame_rate3,
+ fraction_lost3, 5);
+ EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
+ EXPECT_EQ(5, qm_resolution_->ComputeContentClass());
+ EXPECT_EQ(kStableEncoding, qm_resolution_->GetEncoderState());
+ EXPECT_TRUE(
+ IsSelectedActionCorrect(qm_scale_, 1.0f, 1.0f, 1.0f, 480, 270, 10.0f));
+}
+
+// Multiple down-sampling stages and then undo all of them.
+// Spatial down-sample 3/4x3/4, followed by temporal down-sample 2/3,
+// followed by spatial 3/4x3/4. Then go up to full state,
+// as encoding rate has increased.
+TEST_F(QmSelectTest, MultipleStagesCheckActionHistory1) {
+ // Initialize with bitrate, frame rate, native system width/height, and
+ // number of temporal layers.
+ InitQmNativeData(150, 30, 640, 480, 1);
+
+ // Update with encoder frame size.
+ uint16_t codec_width = 640;
+ uint16_t codec_height = 480;
+ qm_resolution_->UpdateCodecParameters(30.0f, codec_width, codec_height);
+ EXPECT_EQ(5, qm_resolution_->GetImageType(codec_width, codec_height));
+
+ // Go down spatial 3/4x3/4.
+ // Update rates for a sequence of intervals.
+ int target_rate[] = {150, 150, 150};
+ int encoder_sent_rate[] = {150, 150, 150};
+ int incoming_frame_rate[] = {30, 30, 30};
+ uint8_t fraction_lost[] = {10, 10, 10};
+ UpdateQmRateData(target_rate, encoder_sent_rate, incoming_frame_rate,
+ fraction_lost, 3);
+
+ // Update content: motion level, and 3 spatial prediction errors.
+ // Medium motion, low spatial.
+ UpdateQmContentData(kTemporalMedium, kSpatialLow, kSpatialLow, kSpatialLow);
+ EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
+ EXPECT_EQ(6, qm_resolution_->ComputeContentClass());
+ EXPECT_EQ(kStableEncoding, qm_resolution_->GetEncoderState());
+ EXPECT_TRUE(IsSelectedActionCorrect(qm_scale_, 4.0f / 3.0f, 4.0f / 3.0f, 1.0f,
+ 480, 360, 30.0f));
+ // Go down 2/3 temporal.
+ qm_resolution_->UpdateCodecParameters(30.0f, 480, 360);
+ EXPECT_EQ(4, qm_resolution_->GetImageType(480, 360));
+ qm_resolution_->ResetRates();
+ int target_rate2[] = {100, 100, 100, 100, 100};
+ int encoder_sent_rate2[] = {100, 100, 100, 100, 100};
+ int incoming_frame_rate2[] = {30, 30, 30, 30, 30};
+ uint8_t fraction_lost2[] = {10, 10, 10, 10, 10};
+ UpdateQmRateData(target_rate2, encoder_sent_rate2, incoming_frame_rate2,
+ fraction_lost2, 5);
+
+ // Update content: motion level, and 3 spatial prediction errors.
+ // Low motion, high spatial.
+ UpdateQmContentData(kTemporalLow, kSpatialHigh, kSpatialHigh, kSpatialHigh);
+ EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
+ EXPECT_EQ(1, qm_resolution_->ComputeContentClass());
+ EXPECT_EQ(kStableEncoding, qm_resolution_->GetEncoderState());
+ EXPECT_TRUE(
+ IsSelectedActionCorrect(qm_scale_, 1.0f, 1.0f, 1.5f, 480, 360, 20.5f));
+
+ // Go down 3/4x3/4 spatial:
+ qm_resolution_->UpdateCodecParameters(20.0f, 480, 360);
+ qm_resolution_->ResetRates();
+ int target_rate3[] = {80, 80, 80, 80, 80};
+ int encoder_sent_rate3[] = {80, 80, 80, 80, 80};
+ int incoming_frame_rate3[] = {20, 20, 20, 20, 20};
+ uint8_t fraction_lost3[] = {10, 10, 10, 10, 10};
+ UpdateQmRateData(target_rate3, encoder_sent_rate3, incoming_frame_rate3,
+ fraction_lost3, 5);
+
+ // Update content: motion level, and 3 spatial prediction errors.
+ // High motion, low spatial.
+ UpdateQmContentData(kTemporalHigh, kSpatialLow, kSpatialLow, kSpatialLow);
+ EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
+ EXPECT_EQ(3, qm_resolution_->ComputeContentClass());
+ EXPECT_EQ(kStableEncoding, qm_resolution_->GetEncoderState());
+ // The two spatial actions of 3/4x3/4 are converted to 1/2x1/2,
+ // so scale factor is 2.0.
+ EXPECT_TRUE(
+ IsSelectedActionCorrect(qm_scale_, 2.0f, 2.0f, 1.0f, 320, 240, 20.0f));
+
+ // Reset rates and go high up in rate: expect to go up:
+ // 1/2x1x2 spatial and 1/2 temporally.
+
+ // Go up 1/2x1/2 spatially and 1/2 temporally. Spatial is done in 2 stages.
+ qm_resolution_->UpdateCodecParameters(15.0f, 320, 240);
+ EXPECT_EQ(2, qm_resolution_->GetImageType(320, 240));
+ qm_resolution_->ResetRates();
+ // Update rates for a sequence of intervals.
+ int target_rate4[] = {1000, 1000, 1000, 1000, 1000};
+ int encoder_sent_rate4[] = {1000, 1000, 1000, 1000, 1000};
+ int incoming_frame_rate4[] = {15, 15, 15, 15, 15};
+ uint8_t fraction_lost4[] = {10, 10, 10, 10, 10};
+ UpdateQmRateData(target_rate4, encoder_sent_rate4, incoming_frame_rate4,
+ fraction_lost4, 5);
+
+ EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
+ EXPECT_EQ(3, qm_resolution_->ComputeContentClass());
+ EXPECT_EQ(kStableEncoding, qm_resolution_->GetEncoderState());
+ float scale = (4.0f / 3.0f) / 2.0f;
+ EXPECT_TRUE(IsSelectedActionCorrect(qm_scale_, scale, scale, 2.0f / 3.0f, 480,
+ 360, 30.0f));
+
+ qm_resolution_->UpdateCodecParameters(30.0f, 480, 360);
+ EXPECT_EQ(4, qm_resolution_->GetImageType(480, 360));
+ EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
+ EXPECT_TRUE(IsSelectedActionCorrect(qm_scale_, 3.0f / 4.0f, 3.0f / 4.0f, 1.0f,
+ 640, 480, 30.0f));
+}
+
+// Multiple down-sampling and up-sample stages, with partial undoing.
+// Spatial down-sample 1/2x1/2, followed by temporal down-sample 2/3, undo the
+// temporal, then another temporal, and then undo both spatial and temporal.
+TEST_F(QmSelectTest, MultipleStagesCheckActionHistory2) {
+ // Initialize with bitrate, frame rate, native system width/height, and
+ // number of temporal layers.
+ InitQmNativeData(80, 30, 640, 480, 1);
+
+ // Update with encoder frame size.
+ uint16_t codec_width = 640;
+ uint16_t codec_height = 480;
+ qm_resolution_->UpdateCodecParameters(30.0f, codec_width, codec_height);
+ EXPECT_EQ(5, qm_resolution_->GetImageType(codec_width, codec_height));
+
+ // Go down 1/2x1/2 spatial.
+ // Update rates for a sequence of intervals.
+ int target_rate[] = {80, 80, 80};
+ int encoder_sent_rate[] = {80, 80, 80};
+ int incoming_frame_rate[] = {30, 30, 30};
+ uint8_t fraction_lost[] = {10, 10, 10};
+ UpdateQmRateData(target_rate, encoder_sent_rate, incoming_frame_rate,
+ fraction_lost, 3);
+
+ // Update content: motion level, and 3 spatial prediction errors.
+ // Medium motion, low spatial.
+ UpdateQmContentData(kTemporalMedium, kSpatialLow, kSpatialLow, kSpatialLow);
+ EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
+ EXPECT_EQ(6, qm_resolution_->ComputeContentClass());
+ EXPECT_EQ(kStableEncoding, qm_resolution_->GetEncoderState());
+ EXPECT_TRUE(
+ IsSelectedActionCorrect(qm_scale_, 2.0f, 2.0f, 1.0f, 320, 240, 30.0f));
+
+ // Go down 2/3 temporal.
+ qm_resolution_->UpdateCodecParameters(30.0f, 320, 240);
+ EXPECT_EQ(2, qm_resolution_->GetImageType(320, 240));
+ qm_resolution_->ResetRates();
+ int target_rate2[] = {40, 40, 40, 40, 40};
+ int encoder_sent_rate2[] = {40, 40, 40, 40, 40};
+ int incoming_frame_rate2[] = {30, 30, 30, 30, 30};
+ uint8_t fraction_lost2[] = {10, 10, 10, 10, 10};
+ UpdateQmRateData(target_rate2, encoder_sent_rate2, incoming_frame_rate2,
+ fraction_lost2, 5);
+
+ // Update content: motion level, and 3 spatial prediction errors.
+ // Medium motion, high spatial.
+ UpdateQmContentData(kTemporalMedium, kSpatialHigh, kSpatialHigh,
+ kSpatialHigh);
+ EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
+ EXPECT_EQ(7, qm_resolution_->ComputeContentClass());
+ EXPECT_EQ(kStableEncoding, qm_resolution_->GetEncoderState());
+ EXPECT_TRUE(
+ IsSelectedActionCorrect(qm_scale_, 1.0f, 1.0f, 1.5f, 320, 240, 20.5f));
+
+ // Go up 2/3 temporally.
+ qm_resolution_->UpdateCodecParameters(20.0f, 320, 240);
+ qm_resolution_->ResetRates();
+ // Update rates for a sequence of intervals.
+ int target_rate3[] = {150, 150, 150, 150, 150};
+ int encoder_sent_rate3[] = {150, 150, 150, 150, 150};
+ int incoming_frame_rate3[] = {20, 20, 20, 20, 20};
+ uint8_t fraction_lost3[] = {10, 10, 10, 10, 10};
+ UpdateQmRateData(target_rate3, encoder_sent_rate3, incoming_frame_rate3,
+ fraction_lost3, 5);
+
+ EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
+ EXPECT_EQ(7, qm_resolution_->ComputeContentClass());
+ EXPECT_EQ(kStableEncoding, qm_resolution_->GetEncoderState());
+ EXPECT_TRUE(IsSelectedActionCorrect(qm_scale_, 1.0f, 1.0f, 2.0f / 3.0f, 320,
+ 240, 30.0f));
+
+ // Go down 2/3 temporal.
+ qm_resolution_->UpdateCodecParameters(30.0f, 320, 240);
+ EXPECT_EQ(2, qm_resolution_->GetImageType(320, 240));
+ qm_resolution_->ResetRates();
+ int target_rate4[] = {40, 40, 40, 40, 40};
+ int encoder_sent_rate4[] = {40, 40, 40, 40, 40};
+ int incoming_frame_rate4[] = {30, 30, 30, 30, 30};
+ uint8_t fraction_lost4[] = {10, 10, 10, 10, 10};
+ UpdateQmRateData(target_rate4, encoder_sent_rate4, incoming_frame_rate4,
+ fraction_lost4, 5);
+
+ // Update content: motion level, and 3 spatial prediction errors.
+ // Low motion, high spatial.
+ UpdateQmContentData(kTemporalLow, kSpatialHigh, kSpatialHigh, kSpatialHigh);
+ EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
+ EXPECT_EQ(1, qm_resolution_->ComputeContentClass());
+ EXPECT_EQ(kStableEncoding, qm_resolution_->GetEncoderState());
+ EXPECT_TRUE(
+ IsSelectedActionCorrect(qm_scale_, 1.0f, 1.0f, 1.5f, 320, 240, 20.5f));
+
+ // Go up spatial and temporal. Spatial undoing is done in 2 stages.
+ qm_resolution_->UpdateCodecParameters(20.5f, 320, 240);
+ qm_resolution_->ResetRates();
+ // Update rates for a sequence of intervals.
+ int target_rate5[] = {1000, 1000, 1000, 1000, 1000};
+ int encoder_sent_rate5[] = {1000, 1000, 1000, 1000, 1000};
+ int incoming_frame_rate5[] = {20, 20, 20, 20, 20};
+ uint8_t fraction_lost5[] = {10, 10, 10, 10, 10};
+ UpdateQmRateData(target_rate5, encoder_sent_rate5, incoming_frame_rate5,
+ fraction_lost5, 5);
+
+ EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
+ float scale = (4.0f / 3.0f) / 2.0f;
+ EXPECT_TRUE(IsSelectedActionCorrect(qm_scale_, scale, scale, 2.0f / 3.0f, 480,
+ 360, 30.0f));
+
+ qm_resolution_->UpdateCodecParameters(30.0f, 480, 360);
+ EXPECT_EQ(4, qm_resolution_->GetImageType(480, 360));
+ EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
+ EXPECT_TRUE(IsSelectedActionCorrect(qm_scale_, 3.0f / 4.0f, 3.0f / 4.0f, 1.0f,
+ 640, 480, 30.0f));
+}
+
+// Multiple down-sampling and up-sample stages, with partial undoing.
+// Spatial down-sample 3/4x3/4, followed by temporal down-sample 2/3,
+// undo the temporal 2/3, and then undo the spatial.
+TEST_F(QmSelectTest, MultipleStagesCheckActionHistory3) {
+ // Initialize with bitrate, frame rate, native system width/height, and
+ // number of temporal layers.
+ InitQmNativeData(100, 30, 640, 480, 1);
+
+ // Update with encoder frame size.
+ uint16_t codec_width = 640;
+ uint16_t codec_height = 480;
+ qm_resolution_->UpdateCodecParameters(30.0f, codec_width, codec_height);
+ EXPECT_EQ(5, qm_resolution_->GetImageType(codec_width, codec_height));
+
+ // Go down 3/4x3/4 spatial.
+ // Update rates for a sequence of intervals.
+ int target_rate[] = {100, 100, 100};
+ int encoder_sent_rate[] = {100, 100, 100};
+ int incoming_frame_rate[] = {30, 30, 30};
+ uint8_t fraction_lost[] = {10, 10, 10};
+ UpdateQmRateData(target_rate, encoder_sent_rate, incoming_frame_rate,
+ fraction_lost, 3);
+
+ // Update content: motion level, and 3 spatial prediction errors.
+ // Medium motion, low spatial.
+ UpdateQmContentData(kTemporalMedium, kSpatialLow, kSpatialLow, kSpatialLow);
+ EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
+ EXPECT_EQ(6, qm_resolution_->ComputeContentClass());
+ EXPECT_EQ(kStableEncoding, qm_resolution_->GetEncoderState());
+ EXPECT_TRUE(IsSelectedActionCorrect(qm_scale_, 4.0f / 3.0f, 4.0f / 3.0f, 1.0f,
+ 480, 360, 30.0f));
+
+ // Go down 2/3 temporal.
+ qm_resolution_->UpdateCodecParameters(30.0f, 480, 360);
+ EXPECT_EQ(4, qm_resolution_->GetImageType(480, 360));
+ qm_resolution_->ResetRates();
+ int target_rate2[] = {100, 100, 100, 100, 100};
+ int encoder_sent_rate2[] = {100, 100, 100, 100, 100};
+ int incoming_frame_rate2[] = {30, 30, 30, 30, 30};
+ uint8_t fraction_lost2[] = {10, 10, 10, 10, 10};
+ UpdateQmRateData(target_rate2, encoder_sent_rate2, incoming_frame_rate2,
+ fraction_lost2, 5);
+
+ // Update content: motion level, and 3 spatial prediction errors.
+ // Low motion, high spatial.
+ UpdateQmContentData(kTemporalLow, kSpatialHigh, kSpatialHigh, kSpatialHigh);
+ EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
+ EXPECT_EQ(1, qm_resolution_->ComputeContentClass());
+ EXPECT_EQ(kStableEncoding, qm_resolution_->GetEncoderState());
+ EXPECT_TRUE(
+ IsSelectedActionCorrect(qm_scale_, 1.0f, 1.0f, 1.5f, 480, 360, 20.5f));
+
+ // Go up 2/3 temporal.
+ qm_resolution_->UpdateCodecParameters(20.5f, 480, 360);
+ qm_resolution_->ResetRates();
+ // Update rates for a sequence of intervals.
+ int target_rate3[] = {250, 250, 250, 250, 250};
+ int encoder_sent_rate3[] = {250, 250, 250, 250, 250};
+ int incoming_frame_rate3[] = {20, 20, 20, 20, 120};
+ uint8_t fraction_lost3[] = {10, 10, 10, 10, 10};
+ UpdateQmRateData(target_rate3, encoder_sent_rate3, incoming_frame_rate3,
+ fraction_lost3, 5);
+
+ EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
+ EXPECT_EQ(1, qm_resolution_->ComputeContentClass());
+ EXPECT_EQ(kStableEncoding, qm_resolution_->GetEncoderState());
+ EXPECT_TRUE(IsSelectedActionCorrect(qm_scale_, 1.0f, 1.0f, 2.0f / 3.0f, 480,
+ 360, 30.0f));
+
+ // Go up spatial.
+ qm_resolution_->UpdateCodecParameters(30.0f, 480, 360);
+ EXPECT_EQ(4, qm_resolution_->GetImageType(480, 360));
+ qm_resolution_->ResetRates();
+ int target_rate4[] = {500, 500, 500, 500, 500};
+ int encoder_sent_rate4[] = {500, 500, 500, 500, 500};
+ int incoming_frame_rate4[] = {30, 30, 30, 30, 30};
+ uint8_t fraction_lost4[] = {30, 30, 30, 30, 30};
+ UpdateQmRateData(target_rate4, encoder_sent_rate4, incoming_frame_rate4,
+ fraction_lost4, 5);
+
+ EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
+ EXPECT_EQ(kStableEncoding, qm_resolution_->GetEncoderState());
+ EXPECT_TRUE(IsSelectedActionCorrect(qm_scale_, 3.0f / 4.0f, 3.0f / 4.0f, 1.0f,
+ 640, 480, 30.0f));
+}
+
+// Two stages of 3/4x3/4 converted to one stage of 1/2x1/2.
+TEST_F(QmSelectTest, ConvertThreeQuartersToOneHalf) {
+ // Initialize with bitrate, frame rate, native system width/height, and
+ // number of temporal layers.
+ InitQmNativeData(150, 30, 640, 480, 1);
+
+ // Update with encoder frame size.
+ uint16_t codec_width = 640;
+ uint16_t codec_height = 480;
+ qm_resolution_->UpdateCodecParameters(30.0f, codec_width, codec_height);
+ EXPECT_EQ(5, qm_resolution_->GetImageType(codec_width, codec_height));
+
+ // Go down 3/4x3/4 spatial.
+ // Update rates for a sequence of intervals.
+ int target_rate[] = {150, 150, 150};
+ int encoder_sent_rate[] = {150, 150, 150};
+ int incoming_frame_rate[] = {30, 30, 30};
+ uint8_t fraction_lost[] = {10, 10, 10};
+ UpdateQmRateData(target_rate, encoder_sent_rate, incoming_frame_rate,
+ fraction_lost, 3);
+
+ // Update content: motion level, and 3 spatial prediction errors.
+ // Medium motion, low spatial.
+ UpdateQmContentData(kTemporalMedium, kSpatialLow, kSpatialLow, kSpatialLow);
+ EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
+ EXPECT_EQ(6, qm_resolution_->ComputeContentClass());
+ EXPECT_EQ(kStableEncoding, qm_resolution_->GetEncoderState());
+ EXPECT_TRUE(IsSelectedActionCorrect(qm_scale_, 4.0f / 3.0f, 4.0f / 3.0f, 1.0f,
+ 480, 360, 30.0f));
+
+ // Set rates to go down another 3/4 spatial. Should be converted ton 1/2.
+ qm_resolution_->UpdateCodecParameters(30.0f, 480, 360);
+ EXPECT_EQ(4, qm_resolution_->GetImageType(480, 360));
+ qm_resolution_->ResetRates();
+ int target_rate2[] = {100, 100, 100, 100, 100};
+ int encoder_sent_rate2[] = {100, 100, 100, 100, 100};
+ int incoming_frame_rate2[] = {30, 30, 30, 30, 30};
+ uint8_t fraction_lost2[] = {10, 10, 10, 10, 10};
+ UpdateQmRateData(target_rate2, encoder_sent_rate2, incoming_frame_rate2,
+ fraction_lost2, 5);
+
+ // Update content: motion level, and 3 spatial prediction errors.
+ // Medium motion, low spatial.
+ UpdateQmContentData(kTemporalMedium, kSpatialLow, kSpatialLow, kSpatialLow);
+ EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
+ EXPECT_EQ(6, qm_resolution_->ComputeContentClass());
+ EXPECT_EQ(kStableEncoding, qm_resolution_->GetEncoderState());
+ EXPECT_TRUE(
+ IsSelectedActionCorrect(qm_scale_, 2.0f, 2.0f, 1.0f, 320, 240, 30.0f));
+}
+
+void QmSelectTest::InitQmNativeData(float initial_bit_rate,
+ int user_frame_rate,
+ int native_width,
+ int native_height,
+ int num_layers) {
+ EXPECT_EQ(
+ 0, qm_resolution_->Initialize(initial_bit_rate, user_frame_rate,
+ native_width, native_height, num_layers));
+}
+
+void QmSelectTest::UpdateQmContentData(float motion_metric,
+ float spatial_metric,
+ float spatial_metric_horiz,
+ float spatial_metric_vert) {
+ content_metrics_->motion_magnitude = motion_metric;
+ content_metrics_->spatial_pred_err = spatial_metric;
+ content_metrics_->spatial_pred_err_h = spatial_metric_horiz;
+ content_metrics_->spatial_pred_err_v = spatial_metric_vert;
+ qm_resolution_->UpdateContent(content_metrics_);
+}
+
+void QmSelectTest::UpdateQmEncodedFrame(size_t* encoded_size,
+ size_t num_updates) {
+ for (size_t i = 0; i < num_updates; ++i) {
+ // Convert to bytes.
+ size_t encoded_size_update = 1000 * encoded_size[i] / 8;
+ qm_resolution_->UpdateEncodedSize(encoded_size_update);
+ }
+}
+
+void QmSelectTest::UpdateQmRateData(int* target_rate,
+ int* encoder_sent_rate,
+ int* incoming_frame_rate,
+ uint8_t* fraction_lost,
+ int num_updates) {
+ for (int i = 0; i < num_updates; ++i) {
+ float target_rate_update = target_rate[i];
+ float encoder_sent_rate_update = encoder_sent_rate[i];
+ float incoming_frame_rate_update = incoming_frame_rate[i];
+ uint8_t fraction_lost_update = fraction_lost[i];
+ qm_resolution_->UpdateRates(target_rate_update, encoder_sent_rate_update,
+ incoming_frame_rate_update,
+ fraction_lost_update);
+ }
+}
+
+// Check is the selected action from the QmResolution class is the same
+// as the expected scales from |fac_width|, |fac_height|, |fac_temp|.
+bool QmSelectTest::IsSelectedActionCorrect(VCMResolutionScale* qm_scale,
+ float fac_width,
+ float fac_height,
+ float fac_temp,
+ uint16_t new_width,
+ uint16_t new_height,
+ float new_frame_rate) {
+ if (qm_scale->spatial_width_fact == fac_width &&
+ qm_scale->spatial_height_fact == fac_height &&
+ qm_scale->temporal_fact == fac_temp &&
+ qm_scale->codec_width == new_width &&
+ qm_scale->codec_height == new_height &&
+ qm_scale->frame_rate == new_frame_rate) {
+ return true;
+ } else {
+ return false;
+ }
+}
+} // namespace webrtc
diff --git a/webrtc/modules/video_coding/receiver.cc b/webrtc/modules/video_coding/receiver.cc
new file mode 100644
index 0000000000..fa2a2dca29
--- /dev/null
+++ b/webrtc/modules/video_coding/receiver.cc
@@ -0,0 +1,269 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/modules/video_coding/receiver.h"
+
+#include <assert.h>
+
+#include <cstdlib>
+#include <utility>
+#include <vector>
+
+#include "webrtc/base/logging.h"
+#include "webrtc/base/trace_event.h"
+#include "webrtc/modules/video_coding/encoded_frame.h"
+#include "webrtc/modules/video_coding/internal_defines.h"
+#include "webrtc/modules/video_coding/media_opt_util.h"
+#include "webrtc/system_wrappers/include/clock.h"
+
+namespace webrtc {
+
+enum { kMaxReceiverDelayMs = 10000 };
+
+VCMReceiver::VCMReceiver(VCMTiming* timing,
+ Clock* clock,
+ EventFactory* event_factory)
+ : VCMReceiver(timing,
+ clock,
+ rtc::scoped_ptr<EventWrapper>(event_factory->CreateEvent()),
+ rtc::scoped_ptr<EventWrapper>(event_factory->CreateEvent())) {
+}
+
+VCMReceiver::VCMReceiver(VCMTiming* timing,
+ Clock* clock,
+ rtc::scoped_ptr<EventWrapper> receiver_event,
+ rtc::scoped_ptr<EventWrapper> jitter_buffer_event)
+ : crit_sect_(CriticalSectionWrapper::CreateCriticalSection()),
+ clock_(clock),
+ jitter_buffer_(clock_, std::move(jitter_buffer_event)),
+ timing_(timing),
+ render_wait_event_(std::move(receiver_event)),
+ max_video_delay_ms_(kMaxVideoDelayMs) {
+ Reset();
+}
+
+VCMReceiver::~VCMReceiver() {
+ render_wait_event_->Set();
+ delete crit_sect_;
+}
+
+void VCMReceiver::Reset() {
+ CriticalSectionScoped cs(crit_sect_);
+ if (!jitter_buffer_.Running()) {
+ jitter_buffer_.Start();
+ } else {
+ jitter_buffer_.Flush();
+ }
+}
+
+void VCMReceiver::UpdateRtt(int64_t rtt) {
+ jitter_buffer_.UpdateRtt(rtt);
+}
+
+int32_t VCMReceiver::InsertPacket(const VCMPacket& packet,
+ uint16_t frame_width,
+ uint16_t frame_height) {
+ // Insert the packet into the jitter buffer. The packet can either be empty or
+ // contain media at this point.
+ bool retransmitted = false;
+ const VCMFrameBufferEnum ret =
+ jitter_buffer_.InsertPacket(packet, &retransmitted);
+ if (ret == kOldPacket) {
+ return VCM_OK;
+ } else if (ret == kFlushIndicator) {
+ return VCM_FLUSH_INDICATOR;
+ } else if (ret < 0) {
+ return VCM_JITTER_BUFFER_ERROR;
+ }
+ if (ret == kCompleteSession && !retransmitted) {
+ // We don't want to include timestamps which have suffered from
+ // retransmission here, since we compensate with extra retransmission
+ // delay within the jitter estimate.
+ timing_->IncomingTimestamp(packet.timestamp, clock_->TimeInMilliseconds());
+ }
+ return VCM_OK;
+}
+
+void VCMReceiver::TriggerDecoderShutdown() {
+ jitter_buffer_.Stop();
+ render_wait_event_->Set();
+}
+
+VCMEncodedFrame* VCMReceiver::FrameForDecoding(uint16_t max_wait_time_ms,
+ int64_t* next_render_time_ms,
+ bool prefer_late_decoding) {
+ const int64_t start_time_ms = clock_->TimeInMilliseconds();
+ uint32_t frame_timestamp = 0;
+ // Exhaust wait time to get a complete frame for decoding.
+ bool found_frame =
+ jitter_buffer_.NextCompleteTimestamp(max_wait_time_ms, &frame_timestamp);
+
+ if (!found_frame)
+ found_frame = jitter_buffer_.NextMaybeIncompleteTimestamp(&frame_timestamp);
+
+ if (!found_frame)
+ return NULL;
+
+ // We have a frame - Set timing and render timestamp.
+ timing_->SetJitterDelay(jitter_buffer_.EstimatedJitterMs());
+ const int64_t now_ms = clock_->TimeInMilliseconds();
+ timing_->UpdateCurrentDelay(frame_timestamp);
+ *next_render_time_ms = timing_->RenderTimeMs(frame_timestamp, now_ms);
+ // Check render timing.
+ bool timing_error = false;
+ // Assume that render timing errors are due to changes in the video stream.
+ if (*next_render_time_ms < 0) {
+ timing_error = true;
+ } else if (std::abs(*next_render_time_ms - now_ms) > max_video_delay_ms_) {
+ int frame_delay = static_cast<int>(std::abs(*next_render_time_ms - now_ms));
+ LOG(LS_WARNING) << "A frame about to be decoded is out of the configured "
+ << "delay bounds (" << frame_delay << " > "
+ << max_video_delay_ms_
+ << "). Resetting the video jitter buffer.";
+ timing_error = true;
+ } else if (static_cast<int>(timing_->TargetVideoDelay()) >
+ max_video_delay_ms_) {
+ LOG(LS_WARNING) << "The video target delay has grown larger than "
+ << max_video_delay_ms_ << " ms. Resetting jitter buffer.";
+ timing_error = true;
+ }
+
+ if (timing_error) {
+ // Timing error => reset timing and flush the jitter buffer.
+ jitter_buffer_.Flush();
+ timing_->Reset();
+ return NULL;
+ }
+
+ if (prefer_late_decoding) {
+ // Decode frame as close as possible to the render timestamp.
+ const int32_t available_wait_time =
+ max_wait_time_ms -
+ static_cast<int32_t>(clock_->TimeInMilliseconds() - start_time_ms);
+ uint16_t new_max_wait_time =
+ static_cast<uint16_t>(VCM_MAX(available_wait_time, 0));
+ uint32_t wait_time_ms = timing_->MaxWaitingTime(
+ *next_render_time_ms, clock_->TimeInMilliseconds());
+ if (new_max_wait_time < wait_time_ms) {
+ // We're not allowed to wait until the frame is supposed to be rendered,
+ // waiting as long as we're allowed to avoid busy looping, and then return
+ // NULL. Next call to this function might return the frame.
+ render_wait_event_->Wait(new_max_wait_time);
+ return NULL;
+ }
+ // Wait until it's time to render.
+ render_wait_event_->Wait(wait_time_ms);
+ }
+
+ // Extract the frame from the jitter buffer and set the render time.
+ VCMEncodedFrame* frame = jitter_buffer_.ExtractAndSetDecode(frame_timestamp);
+ if (frame == NULL) {
+ return NULL;
+ }
+ frame->SetRenderTime(*next_render_time_ms);
+ TRACE_EVENT_ASYNC_STEP1("webrtc", "Video", frame->TimeStamp(), "SetRenderTS",
+ "render_time", *next_render_time_ms);
+ if (!frame->Complete()) {
+ // Update stats for incomplete frames.
+ bool retransmitted = false;
+ const int64_t last_packet_time_ms =
+ jitter_buffer_.LastPacketTime(frame, &retransmitted);
+ if (last_packet_time_ms >= 0 && !retransmitted) {
+ // We don't want to include timestamps which have suffered from
+ // retransmission here, since we compensate with extra retransmission
+ // delay within the jitter estimate.
+ timing_->IncomingTimestamp(frame_timestamp, last_packet_time_ms);
+ }
+ }
+ return frame;
+}
+
+void VCMReceiver::ReleaseFrame(VCMEncodedFrame* frame) {
+ jitter_buffer_.ReleaseFrame(frame);
+}
+
+void VCMReceiver::ReceiveStatistics(uint32_t* bitrate, uint32_t* framerate) {
+ assert(bitrate);
+ assert(framerate);
+ jitter_buffer_.IncomingRateStatistics(framerate, bitrate);
+}
+
+uint32_t VCMReceiver::DiscardedPackets() const {
+ return jitter_buffer_.num_discarded_packets();
+}
+
+void VCMReceiver::SetNackMode(VCMNackMode nackMode,
+ int64_t low_rtt_nack_threshold_ms,
+ int64_t high_rtt_nack_threshold_ms) {
+ CriticalSectionScoped cs(crit_sect_);
+ // Default to always having NACK enabled in hybrid mode.
+ jitter_buffer_.SetNackMode(nackMode, low_rtt_nack_threshold_ms,
+ high_rtt_nack_threshold_ms);
+}
+
+void VCMReceiver::SetNackSettings(size_t max_nack_list_size,
+ int max_packet_age_to_nack,
+ int max_incomplete_time_ms) {
+ jitter_buffer_.SetNackSettings(max_nack_list_size, max_packet_age_to_nack,
+ max_incomplete_time_ms);
+}
+
+VCMNackMode VCMReceiver::NackMode() const {
+ CriticalSectionScoped cs(crit_sect_);
+ return jitter_buffer_.nack_mode();
+}
+
+std::vector<uint16_t> VCMReceiver::NackList(bool* request_key_frame) {
+ return jitter_buffer_.GetNackList(request_key_frame);
+}
+
+void VCMReceiver::SetDecodeErrorMode(VCMDecodeErrorMode decode_error_mode) {
+ jitter_buffer_.SetDecodeErrorMode(decode_error_mode);
+}
+
+VCMDecodeErrorMode VCMReceiver::DecodeErrorMode() const {
+ return jitter_buffer_.decode_error_mode();
+}
+
+int VCMReceiver::SetMinReceiverDelay(int desired_delay_ms) {
+ CriticalSectionScoped cs(crit_sect_);
+ if (desired_delay_ms < 0 || desired_delay_ms > kMaxReceiverDelayMs) {
+ return -1;
+ }
+ max_video_delay_ms_ = desired_delay_ms + kMaxVideoDelayMs;
+ // Initializing timing to the desired delay.
+ timing_->set_min_playout_delay(desired_delay_ms);
+ return 0;
+}
+
+int VCMReceiver::RenderBufferSizeMs() {
+ uint32_t timestamp_start = 0u;
+ uint32_t timestamp_end = 0u;
+ // Render timestamps are computed just prior to decoding. Therefore this is
+ // only an estimate based on frames' timestamps and current timing state.
+ jitter_buffer_.RenderBufferSize(&timestamp_start, &timestamp_end);
+ if (timestamp_start == timestamp_end) {
+ return 0;
+ }
+ // Update timing.
+ const int64_t now_ms = clock_->TimeInMilliseconds();
+ timing_->SetJitterDelay(jitter_buffer_.EstimatedJitterMs());
+ // Get render timestamps.
+ uint32_t render_start = timing_->RenderTimeMs(timestamp_start, now_ms);
+ uint32_t render_end = timing_->RenderTimeMs(timestamp_end, now_ms);
+ return render_end - render_start;
+}
+
+void VCMReceiver::RegisterStatsCallback(
+ VCMReceiveStatisticsCallback* callback) {
+ jitter_buffer_.RegisterStatsCallback(callback);
+}
+
+} // namespace webrtc
diff --git a/webrtc/modules/video_coding/receiver.h b/webrtc/modules/video_coding/receiver.h
new file mode 100644
index 0000000000..ff0eef8a6a
--- /dev/null
+++ b/webrtc/modules/video_coding/receiver.h
@@ -0,0 +1,92 @@
+/*
+ * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_CODING_RECEIVER_H_
+#define WEBRTC_MODULES_VIDEO_CODING_RECEIVER_H_
+
+#include <vector>
+
+#include "webrtc/modules/video_coding/jitter_buffer.h"
+#include "webrtc/modules/video_coding/packet.h"
+#include "webrtc/modules/video_coding/timing.h"
+#include "webrtc/system_wrappers/include/critical_section_wrapper.h"
+#include "webrtc/modules/video_coding/include/video_coding.h"
+#include "webrtc/modules/video_coding/include/video_coding_defines.h"
+
+namespace webrtc {
+
+class Clock;
+class VCMEncodedFrame;
+
+class VCMReceiver {
+ public:
+ VCMReceiver(VCMTiming* timing, Clock* clock, EventFactory* event_factory);
+
+ // Using this constructor, you can specify a different event factory for the
+ // jitter buffer. Useful for unit tests when you want to simulate incoming
+ // packets, in which case the jitter buffer's wait event is different from
+ // that of VCMReceiver itself.
+ VCMReceiver(VCMTiming* timing,
+ Clock* clock,
+ rtc::scoped_ptr<EventWrapper> receiver_event,
+ rtc::scoped_ptr<EventWrapper> jitter_buffer_event);
+
+ ~VCMReceiver();
+
+ void Reset();
+ void UpdateRtt(int64_t rtt);
+ int32_t InsertPacket(const VCMPacket& packet,
+ uint16_t frame_width,
+ uint16_t frame_height);
+ VCMEncodedFrame* FrameForDecoding(uint16_t max_wait_time_ms,
+ int64_t* next_render_time_ms,
+ bool prefer_late_decoding);
+ void ReleaseFrame(VCMEncodedFrame* frame);
+ void ReceiveStatistics(uint32_t* bitrate, uint32_t* framerate);
+ uint32_t DiscardedPackets() const;
+
+ // NACK.
+ void SetNackMode(VCMNackMode nackMode,
+ int64_t low_rtt_nack_threshold_ms,
+ int64_t high_rtt_nack_threshold_ms);
+ void SetNackSettings(size_t max_nack_list_size,
+ int max_packet_age_to_nack,
+ int max_incomplete_time_ms);
+ VCMNackMode NackMode() const;
+ std::vector<uint16_t> NackList(bool* request_key_frame);
+
+ // Receiver video delay.
+ int SetMinReceiverDelay(int desired_delay_ms);
+
+ // Decoding with errors.
+ void SetDecodeErrorMode(VCMDecodeErrorMode decode_error_mode);
+ VCMDecodeErrorMode DecodeErrorMode() const;
+
+ // Returns size in time (milliseconds) of complete continuous frames in the
+ // jitter buffer. The render time is estimated based on the render delay at
+ // the time this function is called.
+ int RenderBufferSizeMs();
+
+ void RegisterStatsCallback(VCMReceiveStatisticsCallback* callback);
+
+ void TriggerDecoderShutdown();
+
+ private:
+ CriticalSectionWrapper* crit_sect_;
+ Clock* const clock_;
+ VCMJitterBuffer jitter_buffer_;
+ VCMTiming* timing_;
+ rtc::scoped_ptr<EventWrapper> render_wait_event_;
+ int max_video_delay_ms_;
+};
+
+} // namespace webrtc
+
+#endif // WEBRTC_MODULES_VIDEO_CODING_RECEIVER_H_
diff --git a/webrtc/modules/video_coding/receiver_unittest.cc b/webrtc/modules/video_coding/receiver_unittest.cc
new file mode 100644
index 0000000000..1f3a144bad
--- /dev/null
+++ b/webrtc/modules/video_coding/receiver_unittest.cc
@@ -0,0 +1,575 @@
+/* Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <string.h>
+
+#include <list>
+#include <queue>
+#include <vector>
+
+#include "testing/gtest/include/gtest/gtest.h"
+#include "webrtc/base/checks.h"
+#include "webrtc/modules/video_coding/encoded_frame.h"
+#include "webrtc/modules/video_coding/packet.h"
+#include "webrtc/modules/video_coding/receiver.h"
+#include "webrtc/modules/video_coding/test/stream_generator.h"
+#include "webrtc/modules/video_coding/timing.h"
+#include "webrtc/modules/video_coding/test/test_util.h"
+#include "webrtc/system_wrappers/include/clock.h"
+#include "webrtc/system_wrappers/include/critical_section_wrapper.h"
+
+namespace webrtc {
+
+class TestVCMReceiver : public ::testing::Test {
+ protected:
+ enum { kWidth = 640 };
+ enum { kHeight = 480 };
+
+ TestVCMReceiver()
+ : clock_(new SimulatedClock(0)),
+ timing_(clock_.get()),
+ receiver_(&timing_, clock_.get(), &event_factory_) {
+ stream_generator_.reset(
+ new StreamGenerator(0, clock_->TimeInMilliseconds()));
+ }
+
+ virtual void SetUp() { receiver_.Reset(); }
+
+ int32_t InsertPacket(int index) {
+ VCMPacket packet;
+ bool packet_available = stream_generator_->GetPacket(&packet, index);
+ EXPECT_TRUE(packet_available);
+ if (!packet_available)
+ return kGeneralError; // Return here to avoid crashes below.
+ return receiver_.InsertPacket(packet, kWidth, kHeight);
+ }
+
+ int32_t InsertPacketAndPop(int index) {
+ VCMPacket packet;
+ bool packet_available = stream_generator_->PopPacket(&packet, index);
+ EXPECT_TRUE(packet_available);
+ if (!packet_available)
+ return kGeneralError; // Return here to avoid crashes below.
+ return receiver_.InsertPacket(packet, kWidth, kHeight);
+ }
+
+ int32_t InsertFrame(FrameType frame_type, bool complete) {
+ int num_of_packets = complete ? 1 : 2;
+ stream_generator_->GenerateFrame(
+ frame_type, (frame_type != kEmptyFrame) ? num_of_packets : 0,
+ (frame_type == kEmptyFrame) ? 1 : 0, clock_->TimeInMilliseconds());
+ int32_t ret = InsertPacketAndPop(0);
+ if (!complete) {
+ // Drop the second packet.
+ VCMPacket packet;
+ stream_generator_->PopPacket(&packet, 0);
+ }
+ clock_->AdvanceTimeMilliseconds(kDefaultFramePeriodMs);
+ return ret;
+ }
+
+ bool DecodeNextFrame() {
+ int64_t render_time_ms = 0;
+ VCMEncodedFrame* frame =
+ receiver_.FrameForDecoding(0, &render_time_ms, false);
+ if (!frame)
+ return false;
+ receiver_.ReleaseFrame(frame);
+ return true;
+ }
+
+ rtc::scoped_ptr<SimulatedClock> clock_;
+ VCMTiming timing_;
+ NullEventFactory event_factory_;
+ VCMReceiver receiver_;
+ rtc::scoped_ptr<StreamGenerator> stream_generator_;
+};
+
+TEST_F(TestVCMReceiver, RenderBufferSize_AllComplete) {
+ EXPECT_EQ(0, receiver_.RenderBufferSizeMs());
+ EXPECT_GE(InsertFrame(kVideoFrameKey, true), kNoError);
+ int num_of_frames = 10;
+ for (int i = 0; i < num_of_frames; ++i) {
+ EXPECT_GE(InsertFrame(kVideoFrameDelta, true), kNoError);
+ }
+ EXPECT_EQ(num_of_frames * kDefaultFramePeriodMs,
+ receiver_.RenderBufferSizeMs());
+}
+
+TEST_F(TestVCMReceiver, RenderBufferSize_SkipToKeyFrame) {
+ EXPECT_EQ(0, receiver_.RenderBufferSizeMs());
+ const int kNumOfNonDecodableFrames = 2;
+ for (int i = 0; i < kNumOfNonDecodableFrames; ++i) {
+ EXPECT_GE(InsertFrame(kVideoFrameDelta, true), kNoError);
+ }
+ const int kNumOfFrames = 10;
+ EXPECT_GE(InsertFrame(kVideoFrameKey, true), kNoError);
+ for (int i = 0; i < kNumOfFrames - 1; ++i) {
+ EXPECT_GE(InsertFrame(kVideoFrameDelta, true), kNoError);
+ }
+ EXPECT_EQ((kNumOfFrames - 1) * kDefaultFramePeriodMs,
+ receiver_.RenderBufferSizeMs());
+}
+
+TEST_F(TestVCMReceiver, RenderBufferSize_NotAllComplete) {
+ EXPECT_EQ(0, receiver_.RenderBufferSizeMs());
+ EXPECT_GE(InsertFrame(kVideoFrameKey, true), kNoError);
+ int num_of_frames = 10;
+ for (int i = 0; i < num_of_frames; ++i) {
+ EXPECT_GE(InsertFrame(kVideoFrameDelta, true), kNoError);
+ }
+ num_of_frames++;
+ EXPECT_GE(InsertFrame(kVideoFrameDelta, false), kNoError);
+ for (int i = 0; i < num_of_frames; ++i) {
+ EXPECT_GE(InsertFrame(kVideoFrameDelta, true), kNoError);
+ }
+ EXPECT_EQ((num_of_frames - 1) * kDefaultFramePeriodMs,
+ receiver_.RenderBufferSizeMs());
+}
+
+TEST_F(TestVCMReceiver, RenderBufferSize_NoKeyFrame) {
+ EXPECT_EQ(0, receiver_.RenderBufferSizeMs());
+ int num_of_frames = 10;
+ for (int i = 0; i < num_of_frames; ++i) {
+ EXPECT_GE(InsertFrame(kVideoFrameDelta, true), kNoError);
+ }
+ int64_t next_render_time_ms = 0;
+ VCMEncodedFrame* frame =
+ receiver_.FrameForDecoding(10, &next_render_time_ms, false);
+ EXPECT_TRUE(frame == NULL);
+ receiver_.ReleaseFrame(frame);
+ EXPECT_GE(InsertFrame(kVideoFrameDelta, false), kNoError);
+ for (int i = 0; i < num_of_frames; ++i) {
+ EXPECT_GE(InsertFrame(kVideoFrameDelta, true), kNoError);
+ }
+ EXPECT_EQ(0, receiver_.RenderBufferSizeMs());
+}
+
+TEST_F(TestVCMReceiver, NonDecodableDuration_Empty) {
+ // Enable NACK and with no RTT thresholds for disabling retransmission delay.
+ receiver_.SetNackMode(kNack, -1, -1);
+ const size_t kMaxNackListSize = 1000;
+ const int kMaxPacketAgeToNack = 1000;
+ const int kMaxNonDecodableDuration = 500;
+ const int kMinDelayMs = 500;
+ receiver_.SetNackSettings(kMaxNackListSize, kMaxPacketAgeToNack,
+ kMaxNonDecodableDuration);
+ EXPECT_GE(InsertFrame(kVideoFrameKey, true), kNoError);
+ // Advance time until it's time to decode the key frame.
+ clock_->AdvanceTimeMilliseconds(kMinDelayMs);
+ EXPECT_TRUE(DecodeNextFrame());
+ bool request_key_frame = false;
+ std::vector<uint16_t> nack_list = receiver_.NackList(&request_key_frame);
+ EXPECT_FALSE(request_key_frame);
+}
+
+TEST_F(TestVCMReceiver, NonDecodableDuration_NoKeyFrame) {
+ // Enable NACK and with no RTT thresholds for disabling retransmission delay.
+ receiver_.SetNackMode(kNack, -1, -1);
+ const size_t kMaxNackListSize = 1000;
+ const int kMaxPacketAgeToNack = 1000;
+ const int kMaxNonDecodableDuration = 500;
+ receiver_.SetNackSettings(kMaxNackListSize, kMaxPacketAgeToNack,
+ kMaxNonDecodableDuration);
+ const int kNumFrames = kDefaultFrameRate * kMaxNonDecodableDuration / 1000;
+ for (int i = 0; i < kNumFrames; ++i) {
+ EXPECT_GE(InsertFrame(kVideoFrameDelta, true), kNoError);
+ }
+ bool request_key_frame = false;
+ std::vector<uint16_t> nack_list = receiver_.NackList(&request_key_frame);
+ EXPECT_TRUE(request_key_frame);
+}
+
+TEST_F(TestVCMReceiver, NonDecodableDuration_OneIncomplete) {
+ // Enable NACK and with no RTT thresholds for disabling retransmission delay.
+ receiver_.SetNackMode(kNack, -1, -1);
+ const size_t kMaxNackListSize = 1000;
+ const int kMaxPacketAgeToNack = 1000;
+ const int kMaxNonDecodableDuration = 500;
+ const int kMaxNonDecodableDurationFrames =
+ (kDefaultFrameRate * kMaxNonDecodableDuration + 500) / 1000;
+ const int kMinDelayMs = 500;
+ receiver_.SetNackSettings(kMaxNackListSize, kMaxPacketAgeToNack,
+ kMaxNonDecodableDuration);
+ receiver_.SetMinReceiverDelay(kMinDelayMs);
+ int64_t key_frame_inserted = clock_->TimeInMilliseconds();
+ EXPECT_GE(InsertFrame(kVideoFrameKey, true), kNoError);
+ // Insert an incomplete frame.
+ EXPECT_GE(InsertFrame(kVideoFrameDelta, false), kNoError);
+ // Insert enough frames to have too long non-decodable sequence.
+ for (int i = 0; i < kMaxNonDecodableDurationFrames; ++i) {
+ EXPECT_GE(InsertFrame(kVideoFrameDelta, true), kNoError);
+ }
+ // Advance time until it's time to decode the key frame.
+ clock_->AdvanceTimeMilliseconds(kMinDelayMs - clock_->TimeInMilliseconds() -
+ key_frame_inserted);
+ EXPECT_TRUE(DecodeNextFrame());
+ // Make sure we get a key frame request.
+ bool request_key_frame = false;
+ std::vector<uint16_t> nack_list = receiver_.NackList(&request_key_frame);
+ EXPECT_TRUE(request_key_frame);
+}
+
+TEST_F(TestVCMReceiver, NonDecodableDuration_NoTrigger) {
+ // Enable NACK and with no RTT thresholds for disabling retransmission delay.
+ receiver_.SetNackMode(kNack, -1, -1);
+ const size_t kMaxNackListSize = 1000;
+ const int kMaxPacketAgeToNack = 1000;
+ const int kMaxNonDecodableDuration = 500;
+ const int kMaxNonDecodableDurationFrames =
+ (kDefaultFrameRate * kMaxNonDecodableDuration + 500) / 1000;
+ const int kMinDelayMs = 500;
+ receiver_.SetNackSettings(kMaxNackListSize, kMaxPacketAgeToNack,
+ kMaxNonDecodableDuration);
+ receiver_.SetMinReceiverDelay(kMinDelayMs);
+ int64_t key_frame_inserted = clock_->TimeInMilliseconds();
+ EXPECT_GE(InsertFrame(kVideoFrameKey, true), kNoError);
+ // Insert an incomplete frame.
+ EXPECT_GE(InsertFrame(kVideoFrameDelta, false), kNoError);
+ // Insert all but one frame to not trigger a key frame request due to
+ // too long duration of non-decodable frames.
+ for (int i = 0; i < kMaxNonDecodableDurationFrames - 1; ++i) {
+ EXPECT_GE(InsertFrame(kVideoFrameDelta, true), kNoError);
+ }
+ // Advance time until it's time to decode the key frame.
+ clock_->AdvanceTimeMilliseconds(kMinDelayMs - clock_->TimeInMilliseconds() -
+ key_frame_inserted);
+ EXPECT_TRUE(DecodeNextFrame());
+ // Make sure we don't get a key frame request since we haven't generated
+ // enough frames.
+ bool request_key_frame = false;
+ std::vector<uint16_t> nack_list = receiver_.NackList(&request_key_frame);
+ EXPECT_FALSE(request_key_frame);
+}
+
+TEST_F(TestVCMReceiver, NonDecodableDuration_NoTrigger2) {
+ // Enable NACK and with no RTT thresholds for disabling retransmission delay.
+ receiver_.SetNackMode(kNack, -1, -1);
+ const size_t kMaxNackListSize = 1000;
+ const int kMaxPacketAgeToNack = 1000;
+ const int kMaxNonDecodableDuration = 500;
+ const int kMaxNonDecodableDurationFrames =
+ (kDefaultFrameRate * kMaxNonDecodableDuration + 500) / 1000;
+ const int kMinDelayMs = 500;
+ receiver_.SetNackSettings(kMaxNackListSize, kMaxPacketAgeToNack,
+ kMaxNonDecodableDuration);
+ receiver_.SetMinReceiverDelay(kMinDelayMs);
+ int64_t key_frame_inserted = clock_->TimeInMilliseconds();
+ EXPECT_GE(InsertFrame(kVideoFrameKey, true), kNoError);
+ // Insert enough frames to have too long non-decodable sequence, except that
+ // we don't have any losses.
+ for (int i = 0; i < kMaxNonDecodableDurationFrames; ++i) {
+ EXPECT_GE(InsertFrame(kVideoFrameDelta, true), kNoError);
+ }
+ // Insert an incomplete frame.
+ EXPECT_GE(InsertFrame(kVideoFrameDelta, false), kNoError);
+ // Advance time until it's time to decode the key frame.
+ clock_->AdvanceTimeMilliseconds(kMinDelayMs - clock_->TimeInMilliseconds() -
+ key_frame_inserted);
+ EXPECT_TRUE(DecodeNextFrame());
+ // Make sure we don't get a key frame request since the non-decodable duration
+ // is only one frame.
+ bool request_key_frame = false;
+ std::vector<uint16_t> nack_list = receiver_.NackList(&request_key_frame);
+ EXPECT_FALSE(request_key_frame);
+}
+
+TEST_F(TestVCMReceiver, NonDecodableDuration_KeyFrameAfterIncompleteFrames) {
+ // Enable NACK and with no RTT thresholds for disabling retransmission delay.
+ receiver_.SetNackMode(kNack, -1, -1);
+ const size_t kMaxNackListSize = 1000;
+ const int kMaxPacketAgeToNack = 1000;
+ const int kMaxNonDecodableDuration = 500;
+ const int kMaxNonDecodableDurationFrames =
+ (kDefaultFrameRate * kMaxNonDecodableDuration + 500) / 1000;
+ const int kMinDelayMs = 500;
+ receiver_.SetNackSettings(kMaxNackListSize, kMaxPacketAgeToNack,
+ kMaxNonDecodableDuration);
+ receiver_.SetMinReceiverDelay(kMinDelayMs);
+ int64_t key_frame_inserted = clock_->TimeInMilliseconds();
+ EXPECT_GE(InsertFrame(kVideoFrameKey, true), kNoError);
+ // Insert an incomplete frame.
+ EXPECT_GE(InsertFrame(kVideoFrameDelta, false), kNoError);
+ // Insert enough frames to have too long non-decodable sequence.
+ for (int i = 0; i < kMaxNonDecodableDurationFrames; ++i) {
+ EXPECT_GE(InsertFrame(kVideoFrameDelta, true), kNoError);
+ }
+ EXPECT_GE(InsertFrame(kVideoFrameKey, true), kNoError);
+ // Advance time until it's time to decode the key frame.
+ clock_->AdvanceTimeMilliseconds(kMinDelayMs - clock_->TimeInMilliseconds() -
+ key_frame_inserted);
+ EXPECT_TRUE(DecodeNextFrame());
+ // Make sure we don't get a key frame request since we have a key frame
+ // in the list.
+ bool request_key_frame = false;
+ std::vector<uint16_t> nack_list = receiver_.NackList(&request_key_frame);
+ EXPECT_FALSE(request_key_frame);
+}
+
+// A simulated clock, when time elapses, will insert frames into the jitter
+// buffer, based on initial settings.
+class SimulatedClockWithFrames : public SimulatedClock {
+ public:
+ SimulatedClockWithFrames(StreamGenerator* stream_generator,
+ VCMReceiver* receiver)
+ : SimulatedClock(0),
+ stream_generator_(stream_generator),
+ receiver_(receiver) {}
+ virtual ~SimulatedClockWithFrames() {}
+
+ // If |stop_on_frame| is true and next frame arrives between now and
+ // now+|milliseconds|, the clock will be advanced to the arrival time of next
+ // frame.
+ // Otherwise, the clock will be advanced by |milliseconds|.
+ //
+ // For both cases, a frame will be inserted into the jitter buffer at the
+ // instant when the clock time is timestamps_.front().arrive_time.
+ //
+ // Return true if some frame arrives between now and now+|milliseconds|.
+ bool AdvanceTimeMilliseconds(int64_t milliseconds, bool stop_on_frame) {
+ return AdvanceTimeMicroseconds(milliseconds * 1000, stop_on_frame);
+ }
+
+ bool AdvanceTimeMicroseconds(int64_t microseconds, bool stop_on_frame) {
+ int64_t start_time = TimeInMicroseconds();
+ int64_t end_time = start_time + microseconds;
+ bool frame_injected = false;
+ while (!timestamps_.empty() &&
+ timestamps_.front().arrive_time <= end_time) {
+ RTC_DCHECK(timestamps_.front().arrive_time >= start_time);
+
+ SimulatedClock::AdvanceTimeMicroseconds(timestamps_.front().arrive_time -
+ TimeInMicroseconds());
+ GenerateAndInsertFrame((timestamps_.front().render_time + 500) / 1000);
+ timestamps_.pop();
+ frame_injected = true;
+
+ if (stop_on_frame)
+ return frame_injected;
+ }
+
+ if (TimeInMicroseconds() < end_time) {
+ SimulatedClock::AdvanceTimeMicroseconds(end_time - TimeInMicroseconds());
+ }
+ return frame_injected;
+ }
+
+ // Input timestamps are in unit Milliseconds.
+ // And |arrive_timestamps| must be positive and in increasing order.
+ // |arrive_timestamps| determine when we are going to insert frames into the
+ // jitter buffer.
+ // |render_timestamps| are the timestamps on the frame.
+ void SetFrames(const int64_t* arrive_timestamps,
+ const int64_t* render_timestamps,
+ size_t size) {
+ int64_t previous_arrive_timestamp = 0;
+ for (size_t i = 0; i < size; i++) {
+ RTC_CHECK(arrive_timestamps[i] >= previous_arrive_timestamp);
+ timestamps_.push(TimestampPair(arrive_timestamps[i] * 1000,
+ render_timestamps[i] * 1000));
+ previous_arrive_timestamp = arrive_timestamps[i];
+ }
+ }
+
+ private:
+ struct TimestampPair {
+ TimestampPair(int64_t arrive_timestamp, int64_t render_timestamp)
+ : arrive_time(arrive_timestamp), render_time(render_timestamp) {}
+
+ int64_t arrive_time;
+ int64_t render_time;
+ };
+
+ void GenerateAndInsertFrame(int64_t render_timestamp_ms) {
+ VCMPacket packet;
+ stream_generator_->GenerateFrame(FrameType::kVideoFrameKey,
+ 1, // media packets
+ 0, // empty packets
+ render_timestamp_ms);
+
+ bool packet_available = stream_generator_->PopPacket(&packet, 0);
+ EXPECT_TRUE(packet_available);
+ if (!packet_available)
+ return; // Return here to avoid crashes below.
+ receiver_->InsertPacket(packet, 640, 480);
+ }
+
+ std::queue<TimestampPair> timestamps_;
+ StreamGenerator* stream_generator_;
+ VCMReceiver* receiver_;
+};
+
+// Use a SimulatedClockWithFrames
+// Wait call will do either of these:
+// 1. If |stop_on_frame| is true, the clock will be turned to the exact instant
+// that the first frame comes and the frame will be inserted into the jitter
+// buffer, or the clock will be turned to now + |max_time| if no frame comes in
+// the window.
+// 2. If |stop_on_frame| is false, the clock will be turn to now + |max_time|,
+// and all the frames arriving between now and now + |max_time| will be
+// inserted into the jitter buffer.
+//
+// This is used to simulate the JitterBuffer getting packets from internet as
+// time elapses.
+
+class FrameInjectEvent : public EventWrapper {
+ public:
+ FrameInjectEvent(SimulatedClockWithFrames* clock, bool stop_on_frame)
+ : clock_(clock), stop_on_frame_(stop_on_frame) {}
+
+ bool Set() override { return true; }
+
+ EventTypeWrapper Wait(unsigned long max_time) override { // NOLINT
+ if (clock_->AdvanceTimeMilliseconds(max_time, stop_on_frame_) &&
+ stop_on_frame_) {
+ return EventTypeWrapper::kEventSignaled;
+ } else {
+ return EventTypeWrapper::kEventTimeout;
+ }
+ }
+
+ private:
+ SimulatedClockWithFrames* clock_;
+ bool stop_on_frame_;
+};
+
+class VCMReceiverTimingTest : public ::testing::Test {
+ protected:
+ VCMReceiverTimingTest()
+
+ : clock_(&stream_generator_, &receiver_),
+ stream_generator_(0, clock_.TimeInMilliseconds()),
+ timing_(&clock_),
+ receiver_(
+ &timing_,
+ &clock_,
+ rtc::scoped_ptr<EventWrapper>(new FrameInjectEvent(&clock_, false)),
+ rtc::scoped_ptr<EventWrapper>(
+ new FrameInjectEvent(&clock_, true))) {}
+
+ virtual void SetUp() { receiver_.Reset(); }
+
+ SimulatedClockWithFrames clock_;
+ StreamGenerator stream_generator_;
+ VCMTiming timing_;
+ VCMReceiver receiver_;
+};
+
+// Test whether VCMReceiver::FrameForDecoding handles parameter
+// |max_wait_time_ms| correctly:
+// 1. The function execution should never take more than |max_wait_time_ms|.
+// 2. If the function exit before now + |max_wait_time_ms|, a frame must be
+// returned.
+TEST_F(VCMReceiverTimingTest, FrameForDecoding) {
+ const size_t kNumFrames = 100;
+ const int kFramePeriod = 40;
+ int64_t arrive_timestamps[kNumFrames];
+ int64_t render_timestamps[kNumFrames];
+ int64_t next_render_time;
+
+ // Construct test samples.
+ // render_timestamps are the timestamps stored in the Frame;
+ // arrive_timestamps controls when the Frame packet got received.
+ for (size_t i = 0; i < kNumFrames; i++) {
+ // Preset frame rate to 25Hz.
+ // But we add a reasonable deviation to arrive_timestamps to mimic Internet
+ // fluctuation.
+ arrive_timestamps[i] =
+ (i + 1) * kFramePeriod + (i % 10) * ((i % 2) ? 1 : -1);
+ render_timestamps[i] = (i + 1) * kFramePeriod;
+ }
+
+ clock_.SetFrames(arrive_timestamps, render_timestamps, kNumFrames);
+
+ // Record how many frames we finally get out of the receiver.
+ size_t num_frames_return = 0;
+
+ const int64_t kMaxWaitTime = 30;
+
+ // Ideally, we should get all frames that we input in InitializeFrames.
+ // In the case that FrameForDecoding kills frames by error, we rely on the
+ // build bot to kill the test.
+ while (num_frames_return < kNumFrames) {
+ int64_t start_time = clock_.TimeInMilliseconds();
+ VCMEncodedFrame* frame =
+ receiver_.FrameForDecoding(kMaxWaitTime, &next_render_time, false);
+ int64_t end_time = clock_.TimeInMilliseconds();
+
+ // In any case the FrameForDecoding should not wait longer than
+ // max_wait_time.
+ // In the case that we did not get a frame, it should have been waiting for
+ // exactly max_wait_time. (By the testing samples we constructed above, we
+ // are sure there is no timing error, so the only case it returns with NULL
+ // is that it runs out of time.)
+ if (frame) {
+ receiver_.ReleaseFrame(frame);
+ ++num_frames_return;
+ EXPECT_GE(kMaxWaitTime, end_time - start_time);
+ } else {
+ EXPECT_EQ(kMaxWaitTime, end_time - start_time);
+ }
+ }
+}
+
+// Test whether VCMReceiver::FrameForDecoding handles parameter
+// |prefer_late_decoding| and |max_wait_time_ms| correctly:
+// 1. The function execution should never take more than |max_wait_time_ms|.
+// 2. If the function exit before now + |max_wait_time_ms|, a frame must be
+// returned and the end time must be equal to the render timestamp - delay
+// for decoding and rendering.
+TEST_F(VCMReceiverTimingTest, FrameForDecodingPreferLateDecoding) {
+ const size_t kNumFrames = 100;
+ const int kFramePeriod = 40;
+
+ int64_t arrive_timestamps[kNumFrames];
+ int64_t render_timestamps[kNumFrames];
+ int64_t next_render_time;
+
+ int render_delay_ms;
+ int max_decode_ms;
+ int dummy;
+ timing_.GetTimings(&dummy, &max_decode_ms, &dummy, &dummy, &dummy, &dummy,
+ &render_delay_ms);
+
+ // Construct test samples.
+ // render_timestamps are the timestamps stored in the Frame;
+ // arrive_timestamps controls when the Frame packet got received.
+ for (size_t i = 0; i < kNumFrames; i++) {
+ // Preset frame rate to 25Hz.
+ // But we add a reasonable deviation to arrive_timestamps to mimic Internet
+ // fluctuation.
+ arrive_timestamps[i] =
+ (i + 1) * kFramePeriod + (i % 10) * ((i % 2) ? 1 : -1);
+ render_timestamps[i] = (i + 1) * kFramePeriod;
+ }
+
+ clock_.SetFrames(arrive_timestamps, render_timestamps, kNumFrames);
+
+ // Record how many frames we finally get out of the receiver.
+ size_t num_frames_return = 0;
+ const int64_t kMaxWaitTime = 30;
+ bool prefer_late_decoding = true;
+ while (num_frames_return < kNumFrames) {
+ int64_t start_time = clock_.TimeInMilliseconds();
+
+ VCMEncodedFrame* frame = receiver_.FrameForDecoding(
+ kMaxWaitTime, &next_render_time, prefer_late_decoding);
+ int64_t end_time = clock_.TimeInMilliseconds();
+ if (frame) {
+ EXPECT_EQ(frame->RenderTimeMs() - max_decode_ms - render_delay_ms,
+ end_time);
+ receiver_.ReleaseFrame(frame);
+ ++num_frames_return;
+ } else {
+ EXPECT_EQ(kMaxWaitTime, end_time - start_time);
+ }
+ }
+}
+
+} // namespace webrtc
diff --git a/webrtc/modules/video_coding/rtt_filter.cc b/webrtc/modules/video_coding/rtt_filter.cc
new file mode 100644
index 0000000000..742f70f1c1
--- /dev/null
+++ b/webrtc/modules/video_coding/rtt_filter.cc
@@ -0,0 +1,165 @@
+/*
+ * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/modules/video_coding/rtt_filter.h"
+
+#include <math.h>
+#include <stdlib.h>
+#include <string.h>
+
+#include "webrtc/modules/video_coding/internal_defines.h"
+
+namespace webrtc {
+
+VCMRttFilter::VCMRttFilter()
+ : _filtFactMax(35),
+ _jumpStdDevs(2.5),
+ _driftStdDevs(3.5),
+ _detectThreshold(kMaxDriftJumpCount) {
+ Reset();
+}
+
+VCMRttFilter& VCMRttFilter::operator=(const VCMRttFilter& rhs) {
+ if (this != &rhs) {
+ _gotNonZeroUpdate = rhs._gotNonZeroUpdate;
+ _avgRtt = rhs._avgRtt;
+ _varRtt = rhs._varRtt;
+ _maxRtt = rhs._maxRtt;
+ _filtFactCount = rhs._filtFactCount;
+ _jumpCount = rhs._jumpCount;
+ _driftCount = rhs._driftCount;
+ memcpy(_jumpBuf, rhs._jumpBuf, sizeof(_jumpBuf));
+ memcpy(_driftBuf, rhs._driftBuf, sizeof(_driftBuf));
+ }
+ return *this;
+}
+
+void VCMRttFilter::Reset() {
+ _gotNonZeroUpdate = false;
+ _avgRtt = 0;
+ _varRtt = 0;
+ _maxRtt = 0;
+ _filtFactCount = 1;
+ _jumpCount = 0;
+ _driftCount = 0;
+ memset(_jumpBuf, 0, kMaxDriftJumpCount);
+ memset(_driftBuf, 0, kMaxDriftJumpCount);
+}
+
+void VCMRttFilter::Update(int64_t rttMs) {
+ if (!_gotNonZeroUpdate) {
+ if (rttMs == 0) {
+ return;
+ }
+ _gotNonZeroUpdate = true;
+ }
+
+ // Sanity check
+ if (rttMs > 3000) {
+ rttMs = 3000;
+ }
+
+ double filtFactor = 0;
+ if (_filtFactCount > 1) {
+ filtFactor = static_cast<double>(_filtFactCount - 1) / _filtFactCount;
+ }
+ _filtFactCount++;
+ if (_filtFactCount > _filtFactMax) {
+ // This prevents filtFactor from going above
+ // (_filtFactMax - 1) / _filtFactMax,
+ // e.g., _filtFactMax = 50 => filtFactor = 49/50 = 0.98
+ _filtFactCount = _filtFactMax;
+ }
+ double oldAvg = _avgRtt;
+ double oldVar = _varRtt;
+ _avgRtt = filtFactor * _avgRtt + (1 - filtFactor) * rttMs;
+ _varRtt = filtFactor * _varRtt +
+ (1 - filtFactor) * (rttMs - _avgRtt) * (rttMs - _avgRtt);
+ _maxRtt = VCM_MAX(rttMs, _maxRtt);
+ if (!JumpDetection(rttMs) || !DriftDetection(rttMs)) {
+ // In some cases we don't want to update the statistics
+ _avgRtt = oldAvg;
+ _varRtt = oldVar;
+ }
+}
+
+bool VCMRttFilter::JumpDetection(int64_t rttMs) {
+ double diffFromAvg = _avgRtt - rttMs;
+ if (fabs(diffFromAvg) > _jumpStdDevs * sqrt(_varRtt)) {
+ int diffSign = (diffFromAvg >= 0) ? 1 : -1;
+ int jumpCountSign = (_jumpCount >= 0) ? 1 : -1;
+ if (diffSign != jumpCountSign) {
+ // Since the signs differ the samples currently
+ // in the buffer is useless as they represent a
+ // jump in a different direction.
+ _jumpCount = 0;
+ }
+ if (abs(_jumpCount) < kMaxDriftJumpCount) {
+ // Update the buffer used for the short time
+ // statistics.
+ // The sign of the diff is used for updating the counter since
+ // we want to use the same buffer for keeping track of when
+ // the RTT jumps down and up.
+ _jumpBuf[abs(_jumpCount)] = rttMs;
+ _jumpCount += diffSign;
+ }
+ if (abs(_jumpCount) >= _detectThreshold) {
+ // Detected an RTT jump
+ ShortRttFilter(_jumpBuf, abs(_jumpCount));
+ _filtFactCount = _detectThreshold + 1;
+ _jumpCount = 0;
+ } else {
+ return false;
+ }
+ } else {
+ _jumpCount = 0;
+ }
+ return true;
+}
+
+bool VCMRttFilter::DriftDetection(int64_t rttMs) {
+ if (_maxRtt - _avgRtt > _driftStdDevs * sqrt(_varRtt)) {
+ if (_driftCount < kMaxDriftJumpCount) {
+ // Update the buffer used for the short time
+ // statistics.
+ _driftBuf[_driftCount] = rttMs;
+ _driftCount++;
+ }
+ if (_driftCount >= _detectThreshold) {
+ // Detected an RTT drift
+ ShortRttFilter(_driftBuf, _driftCount);
+ _filtFactCount = _detectThreshold + 1;
+ _driftCount = 0;
+ }
+ } else {
+ _driftCount = 0;
+ }
+ return true;
+}
+
+void VCMRttFilter::ShortRttFilter(int64_t* buf, uint32_t length) {
+ if (length == 0) {
+ return;
+ }
+ _maxRtt = 0;
+ _avgRtt = 0;
+ for (uint32_t i = 0; i < length; i++) {
+ if (buf[i] > _maxRtt) {
+ _maxRtt = buf[i];
+ }
+ _avgRtt += buf[i];
+ }
+ _avgRtt = _avgRtt / static_cast<double>(length);
+}
+
+int64_t VCMRttFilter::RttMs() const {
+ return static_cast<int64_t>(_maxRtt + 0.5);
+}
+} // namespace webrtc
diff --git a/webrtc/modules/video_coding/rtt_filter.h b/webrtc/modules/video_coding/rtt_filter.h
new file mode 100644
index 0000000000..f5de532cfc
--- /dev/null
+++ b/webrtc/modules/video_coding/rtt_filter.h
@@ -0,0 +1,66 @@
+/*
+ * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_CODING_RTT_FILTER_H_
+#define WEBRTC_MODULES_VIDEO_CODING_RTT_FILTER_H_
+
+#include "webrtc/typedefs.h"
+
+namespace webrtc {
+
+class VCMRttFilter {
+ public:
+ VCMRttFilter();
+
+ VCMRttFilter& operator=(const VCMRttFilter& rhs);
+
+ // Resets the filter.
+ void Reset();
+ // Updates the filter with a new sample.
+ void Update(int64_t rttMs);
+ // A getter function for the current RTT level in ms.
+ int64_t RttMs() const;
+
+ private:
+ // The size of the drift and jump memory buffers
+ // and thus also the detection threshold for these
+ // detectors in number of samples.
+ enum { kMaxDriftJumpCount = 5 };
+ // Detects RTT jumps by comparing the difference between
+ // samples and average to the standard deviation.
+ // Returns true if the long time statistics should be updated
+ // and false otherwise
+ bool JumpDetection(int64_t rttMs);
+ // Detects RTT drifts by comparing the difference between
+ // max and average to the standard deviation.
+ // Returns true if the long time statistics should be updated
+ // and false otherwise
+ bool DriftDetection(int64_t rttMs);
+ // Computes the short time average and maximum of the vector buf.
+ void ShortRttFilter(int64_t* buf, uint32_t length);
+
+ bool _gotNonZeroUpdate;
+ double _avgRtt;
+ double _varRtt;
+ int64_t _maxRtt;
+ uint32_t _filtFactCount;
+ const uint32_t _filtFactMax;
+ const double _jumpStdDevs;
+ const double _driftStdDevs;
+ int32_t _jumpCount;
+ int32_t _driftCount;
+ const int32_t _detectThreshold;
+ int64_t _jumpBuf[kMaxDriftJumpCount];
+ int64_t _driftBuf[kMaxDriftJumpCount];
+};
+
+} // namespace webrtc
+
+#endif // WEBRTC_MODULES_VIDEO_CODING_RTT_FILTER_H_
diff --git a/webrtc/modules/video_coding/session_info.cc b/webrtc/modules/video_coding/session_info.cc
new file mode 100644
index 0000000000..8701098639
--- /dev/null
+++ b/webrtc/modules/video_coding/session_info.cc
@@ -0,0 +1,569 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/modules/video_coding/session_info.h"
+
+#include "webrtc/base/logging.h"
+#include "webrtc/modules/video_coding/packet.h"
+
+namespace webrtc {
+
+namespace {
+
+uint16_t BufferToUWord16(const uint8_t* dataBuffer) {
+ return (dataBuffer[0] << 8) | dataBuffer[1];
+}
+
+} // namespace
+
+VCMSessionInfo::VCMSessionInfo()
+ : session_nack_(false),
+ complete_(false),
+ decodable_(false),
+ frame_type_(kVideoFrameDelta),
+ packets_(),
+ empty_seq_num_low_(-1),
+ empty_seq_num_high_(-1),
+ first_packet_seq_num_(-1),
+ last_packet_seq_num_(-1) {}
+
+void VCMSessionInfo::UpdateDataPointers(const uint8_t* old_base_ptr,
+ const uint8_t* new_base_ptr) {
+ for (PacketIterator it = packets_.begin(); it != packets_.end(); ++it)
+ if ((*it).dataPtr != NULL) {
+ assert(old_base_ptr != NULL && new_base_ptr != NULL);
+ (*it).dataPtr = new_base_ptr + ((*it).dataPtr - old_base_ptr);
+ }
+}
+
+int VCMSessionInfo::LowSequenceNumber() const {
+ if (packets_.empty())
+ return empty_seq_num_low_;
+ return packets_.front().seqNum;
+}
+
+int VCMSessionInfo::HighSequenceNumber() const {
+ if (packets_.empty())
+ return empty_seq_num_high_;
+ if (empty_seq_num_high_ == -1)
+ return packets_.back().seqNum;
+ return LatestSequenceNumber(packets_.back().seqNum, empty_seq_num_high_);
+}
+
+int VCMSessionInfo::PictureId() const {
+ if (packets_.empty())
+ return kNoPictureId;
+ if (packets_.front().codecSpecificHeader.codec == kRtpVideoVp8) {
+ return packets_.front().codecSpecificHeader.codecHeader.VP8.pictureId;
+ } else if (packets_.front().codecSpecificHeader.codec == kRtpVideoVp9) {
+ return packets_.front().codecSpecificHeader.codecHeader.VP9.picture_id;
+ } else {
+ return kNoPictureId;
+ }
+}
+
+int VCMSessionInfo::TemporalId() const {
+ if (packets_.empty())
+ return kNoTemporalIdx;
+ if (packets_.front().codecSpecificHeader.codec == kRtpVideoVp8) {
+ return packets_.front().codecSpecificHeader.codecHeader.VP8.temporalIdx;
+ } else if (packets_.front().codecSpecificHeader.codec == kRtpVideoVp9) {
+ return packets_.front().codecSpecificHeader.codecHeader.VP9.temporal_idx;
+ } else {
+ return kNoTemporalIdx;
+ }
+}
+
+bool VCMSessionInfo::LayerSync() const {
+ if (packets_.empty())
+ return false;
+ if (packets_.front().codecSpecificHeader.codec == kRtpVideoVp8) {
+ return packets_.front().codecSpecificHeader.codecHeader.VP8.layerSync;
+ } else if (packets_.front().codecSpecificHeader.codec == kRtpVideoVp9) {
+ return packets_.front()
+ .codecSpecificHeader.codecHeader.VP9.temporal_up_switch;
+ } else {
+ return false;
+ }
+}
+
+int VCMSessionInfo::Tl0PicId() const {
+ if (packets_.empty())
+ return kNoTl0PicIdx;
+ if (packets_.front().codecSpecificHeader.codec == kRtpVideoVp8) {
+ return packets_.front().codecSpecificHeader.codecHeader.VP8.tl0PicIdx;
+ } else if (packets_.front().codecSpecificHeader.codec == kRtpVideoVp9) {
+ return packets_.front().codecSpecificHeader.codecHeader.VP9.tl0_pic_idx;
+ } else {
+ return kNoTl0PicIdx;
+ }
+}
+
+bool VCMSessionInfo::NonReference() const {
+ if (packets_.empty() ||
+ packets_.front().codecSpecificHeader.codec != kRtpVideoVp8)
+ return false;
+ return packets_.front().codecSpecificHeader.codecHeader.VP8.nonReference;
+}
+
+void VCMSessionInfo::SetGofInfo(const GofInfoVP9& gof_info, size_t idx) {
+ if (packets_.empty() ||
+ packets_.front().codecSpecificHeader.codec != kRtpVideoVp9 ||
+ packets_.front().codecSpecificHeader.codecHeader.VP9.flexible_mode) {
+ return;
+ }
+ packets_.front().codecSpecificHeader.codecHeader.VP9.temporal_idx =
+ gof_info.temporal_idx[idx];
+ packets_.front().codecSpecificHeader.codecHeader.VP9.temporal_up_switch =
+ gof_info.temporal_up_switch[idx];
+ packets_.front().codecSpecificHeader.codecHeader.VP9.num_ref_pics =
+ gof_info.num_ref_pics[idx];
+ for (uint8_t i = 0; i < gof_info.num_ref_pics[idx]; ++i) {
+ packets_.front().codecSpecificHeader.codecHeader.VP9.pid_diff[i] =
+ gof_info.pid_diff[idx][i];
+ }
+}
+
+void VCMSessionInfo::Reset() {
+ session_nack_ = false;
+ complete_ = false;
+ decodable_ = false;
+ frame_type_ = kVideoFrameDelta;
+ packets_.clear();
+ empty_seq_num_low_ = -1;
+ empty_seq_num_high_ = -1;
+ first_packet_seq_num_ = -1;
+ last_packet_seq_num_ = -1;
+}
+
+size_t VCMSessionInfo::SessionLength() const {
+ size_t length = 0;
+ for (PacketIteratorConst it = packets_.begin(); it != packets_.end(); ++it)
+ length += (*it).sizeBytes;
+ return length;
+}
+
+int VCMSessionInfo::NumPackets() const {
+ return packets_.size();
+}
+
+size_t VCMSessionInfo::InsertBuffer(uint8_t* frame_buffer,
+ PacketIterator packet_it) {
+ VCMPacket& packet = *packet_it;
+ PacketIterator it;
+
+ // Calculate the offset into the frame buffer for this packet.
+ size_t offset = 0;
+ for (it = packets_.begin(); it != packet_it; ++it)
+ offset += (*it).sizeBytes;
+
+ // Set the data pointer to pointing to the start of this packet in the
+ // frame buffer.
+ const uint8_t* packet_buffer = packet.dataPtr;
+ packet.dataPtr = frame_buffer + offset;
+
+ // We handle H.264 STAP-A packets in a special way as we need to remove the
+ // two length bytes between each NAL unit, and potentially add start codes.
+ // TODO(pbos): Remove H264 parsing from this step and use a fragmentation
+ // header supplied by the H264 depacketizer.
+ const size_t kH264NALHeaderLengthInBytes = 1;
+ const size_t kLengthFieldLength = 2;
+ if (packet.codecSpecificHeader.codec == kRtpVideoH264 &&
+ packet.codecSpecificHeader.codecHeader.H264.packetization_type ==
+ kH264StapA) {
+ size_t required_length = 0;
+ const uint8_t* nalu_ptr = packet_buffer + kH264NALHeaderLengthInBytes;
+ while (nalu_ptr < packet_buffer + packet.sizeBytes) {
+ size_t length = BufferToUWord16(nalu_ptr);
+ required_length +=
+ length + (packet.insertStartCode ? kH264StartCodeLengthBytes : 0);
+ nalu_ptr += kLengthFieldLength + length;
+ }
+ ShiftSubsequentPackets(packet_it, required_length);
+ nalu_ptr = packet_buffer + kH264NALHeaderLengthInBytes;
+ uint8_t* frame_buffer_ptr = frame_buffer + offset;
+ while (nalu_ptr < packet_buffer + packet.sizeBytes) {
+ size_t length = BufferToUWord16(nalu_ptr);
+ nalu_ptr += kLengthFieldLength;
+ frame_buffer_ptr += Insert(nalu_ptr, length, packet.insertStartCode,
+ const_cast<uint8_t*>(frame_buffer_ptr));
+ nalu_ptr += length;
+ }
+ packet.sizeBytes = required_length;
+ return packet.sizeBytes;
+ }
+ ShiftSubsequentPackets(
+ packet_it, packet.sizeBytes +
+ (packet.insertStartCode ? kH264StartCodeLengthBytes : 0));
+
+ packet.sizeBytes =
+ Insert(packet_buffer, packet.sizeBytes, packet.insertStartCode,
+ const_cast<uint8_t*>(packet.dataPtr));
+ return packet.sizeBytes;
+}
+
+size_t VCMSessionInfo::Insert(const uint8_t* buffer,
+ size_t length,
+ bool insert_start_code,
+ uint8_t* frame_buffer) {
+ if (insert_start_code) {
+ const unsigned char startCode[] = {0, 0, 0, 1};
+ memcpy(frame_buffer, startCode, kH264StartCodeLengthBytes);
+ }
+ memcpy(frame_buffer + (insert_start_code ? kH264StartCodeLengthBytes : 0),
+ buffer, length);
+ length += (insert_start_code ? kH264StartCodeLengthBytes : 0);
+
+ return length;
+}
+
+void VCMSessionInfo::ShiftSubsequentPackets(PacketIterator it,
+ int steps_to_shift) {
+ ++it;
+ if (it == packets_.end())
+ return;
+ uint8_t* first_packet_ptr = const_cast<uint8_t*>((*it).dataPtr);
+ int shift_length = 0;
+ // Calculate the total move length and move the data pointers in advance.
+ for (; it != packets_.end(); ++it) {
+ shift_length += (*it).sizeBytes;
+ if ((*it).dataPtr != NULL)
+ (*it).dataPtr += steps_to_shift;
+ }
+ memmove(first_packet_ptr + steps_to_shift, first_packet_ptr, shift_length);
+}
+
+void VCMSessionInfo::UpdateCompleteSession() {
+ if (HaveFirstPacket() && HaveLastPacket()) {
+ // Do we have all the packets in this session?
+ bool complete_session = true;
+ PacketIterator it = packets_.begin();
+ PacketIterator prev_it = it;
+ ++it;
+ for (; it != packets_.end(); ++it) {
+ if (!InSequence(it, prev_it)) {
+ complete_session = false;
+ break;
+ }
+ prev_it = it;
+ }
+ complete_ = complete_session;
+ }
+}
+
+void VCMSessionInfo::UpdateDecodableSession(const FrameData& frame_data) {
+ // Irrelevant if session is already complete or decodable
+ if (complete_ || decodable_)
+ return;
+ // TODO(agalusza): Account for bursty loss.
+ // TODO(agalusza): Refine these values to better approximate optimal ones.
+ // Do not decode frames if the RTT is lower than this.
+ const int64_t kRttThreshold = 100;
+ // Do not decode frames if the number of packets is between these two
+ // thresholds.
+ const float kLowPacketPercentageThreshold = 0.2f;
+ const float kHighPacketPercentageThreshold = 0.8f;
+ if (frame_data.rtt_ms < kRttThreshold || frame_type_ == kVideoFrameKey ||
+ !HaveFirstPacket() ||
+ (NumPackets() <= kHighPacketPercentageThreshold *
+ frame_data.rolling_average_packets_per_frame &&
+ NumPackets() > kLowPacketPercentageThreshold *
+ frame_data.rolling_average_packets_per_frame))
+ return;
+
+ decodable_ = true;
+}
+
+bool VCMSessionInfo::complete() const {
+ return complete_;
+}
+
+bool VCMSessionInfo::decodable() const {
+ return decodable_;
+}
+
+// Find the end of the NAL unit which the packet pointed to by |packet_it|
+// belongs to. Returns an iterator to the last packet of the frame if the end
+// of the NAL unit wasn't found.
+VCMSessionInfo::PacketIterator VCMSessionInfo::FindNaluEnd(
+ PacketIterator packet_it) const {
+ if ((*packet_it).completeNALU == kNaluEnd ||
+ (*packet_it).completeNALU == kNaluComplete) {
+ return packet_it;
+ }
+ // Find the end of the NAL unit.
+ for (; packet_it != packets_.end(); ++packet_it) {
+ if (((*packet_it).completeNALU == kNaluComplete &&
+ (*packet_it).sizeBytes > 0) ||
+ // Found next NALU.
+ (*packet_it).completeNALU == kNaluStart)
+ return --packet_it;
+ if ((*packet_it).completeNALU == kNaluEnd)
+ return packet_it;
+ }
+ // The end wasn't found.
+ return --packet_it;
+}
+
+size_t VCMSessionInfo::DeletePacketData(PacketIterator start,
+ PacketIterator end) {
+ size_t bytes_to_delete = 0; // The number of bytes to delete.
+ PacketIterator packet_after_end = end;
+ ++packet_after_end;
+
+ // Get the number of bytes to delete.
+ // Clear the size of these packets.
+ for (PacketIterator it = start; it != packet_after_end; ++it) {
+ bytes_to_delete += (*it).sizeBytes;
+ (*it).sizeBytes = 0;
+ (*it).dataPtr = NULL;
+ }
+ if (bytes_to_delete > 0)
+ ShiftSubsequentPackets(end, -static_cast<int>(bytes_to_delete));
+ return bytes_to_delete;
+}
+
+size_t VCMSessionInfo::BuildVP8FragmentationHeader(
+ uint8_t* frame_buffer,
+ size_t frame_buffer_length,
+ RTPFragmentationHeader* fragmentation) {
+ size_t new_length = 0;
+ // Allocate space for max number of partitions
+ fragmentation->VerifyAndAllocateFragmentationHeader(kMaxVP8Partitions);
+ fragmentation->fragmentationVectorSize = 0;
+ memset(fragmentation->fragmentationLength, 0,
+ kMaxVP8Partitions * sizeof(size_t));
+ if (packets_.empty())
+ return new_length;
+ PacketIterator it = FindNextPartitionBeginning(packets_.begin());
+ while (it != packets_.end()) {
+ const int partition_id =
+ (*it).codecSpecificHeader.codecHeader.VP8.partitionId;
+ PacketIterator partition_end = FindPartitionEnd(it);
+ fragmentation->fragmentationOffset[partition_id] =
+ (*it).dataPtr - frame_buffer;
+ assert(fragmentation->fragmentationOffset[partition_id] <
+ frame_buffer_length);
+ fragmentation->fragmentationLength[partition_id] =
+ (*partition_end).dataPtr + (*partition_end).sizeBytes - (*it).dataPtr;
+ assert(fragmentation->fragmentationLength[partition_id] <=
+ frame_buffer_length);
+ new_length += fragmentation->fragmentationLength[partition_id];
+ ++partition_end;
+ it = FindNextPartitionBeginning(partition_end);
+ if (partition_id + 1 > fragmentation->fragmentationVectorSize)
+ fragmentation->fragmentationVectorSize = partition_id + 1;
+ }
+ // Set all empty fragments to start where the previous fragment ends,
+ // and have zero length.
+ if (fragmentation->fragmentationLength[0] == 0)
+ fragmentation->fragmentationOffset[0] = 0;
+ for (int i = 1; i < fragmentation->fragmentationVectorSize; ++i) {
+ if (fragmentation->fragmentationLength[i] == 0)
+ fragmentation->fragmentationOffset[i] =
+ fragmentation->fragmentationOffset[i - 1] +
+ fragmentation->fragmentationLength[i - 1];
+ assert(i == 0 ||
+ fragmentation->fragmentationOffset[i] >=
+ fragmentation->fragmentationOffset[i - 1]);
+ }
+ assert(new_length <= frame_buffer_length);
+ return new_length;
+}
+
+VCMSessionInfo::PacketIterator VCMSessionInfo::FindNextPartitionBeginning(
+ PacketIterator it) const {
+ while (it != packets_.end()) {
+ if ((*it).codecSpecificHeader.codecHeader.VP8.beginningOfPartition) {
+ return it;
+ }
+ ++it;
+ }
+ return it;
+}
+
+VCMSessionInfo::PacketIterator VCMSessionInfo::FindPartitionEnd(
+ PacketIterator it) const {
+ assert((*it).codec == kVideoCodecVP8);
+ PacketIterator prev_it = it;
+ const int partition_id =
+ (*it).codecSpecificHeader.codecHeader.VP8.partitionId;
+ while (it != packets_.end()) {
+ bool beginning =
+ (*it).codecSpecificHeader.codecHeader.VP8.beginningOfPartition;
+ int current_partition_id =
+ (*it).codecSpecificHeader.codecHeader.VP8.partitionId;
+ bool packet_loss_found = (!beginning && !InSequence(it, prev_it));
+ if (packet_loss_found ||
+ (beginning && current_partition_id != partition_id)) {
+ // Missing packet, the previous packet was the last in sequence.
+ return prev_it;
+ }
+ prev_it = it;
+ ++it;
+ }
+ return prev_it;
+}
+
+bool VCMSessionInfo::InSequence(const PacketIterator& packet_it,
+ const PacketIterator& prev_packet_it) {
+ // If the two iterators are pointing to the same packet they are considered
+ // to be in sequence.
+ return (packet_it == prev_packet_it ||
+ (static_cast<uint16_t>((*prev_packet_it).seqNum + 1) ==
+ (*packet_it).seqNum));
+}
+
+size_t VCMSessionInfo::MakeDecodable() {
+ size_t return_length = 0;
+ if (packets_.empty()) {
+ return 0;
+ }
+ PacketIterator it = packets_.begin();
+ // Make sure we remove the first NAL unit if it's not decodable.
+ if ((*it).completeNALU == kNaluIncomplete || (*it).completeNALU == kNaluEnd) {
+ PacketIterator nalu_end = FindNaluEnd(it);
+ return_length += DeletePacketData(it, nalu_end);
+ it = nalu_end;
+ }
+ PacketIterator prev_it = it;
+ // Take care of the rest of the NAL units.
+ for (; it != packets_.end(); ++it) {
+ bool start_of_nalu = ((*it).completeNALU == kNaluStart ||
+ (*it).completeNALU == kNaluComplete);
+ if (!start_of_nalu && !InSequence(it, prev_it)) {
+ // Found a sequence number gap due to packet loss.
+ PacketIterator nalu_end = FindNaluEnd(it);
+ return_length += DeletePacketData(it, nalu_end);
+ it = nalu_end;
+ }
+ prev_it = it;
+ }
+ return return_length;
+}
+
+void VCMSessionInfo::SetNotDecodableIfIncomplete() {
+ // We don't need to check for completeness first because the two are
+ // orthogonal. If complete_ is true, decodable_ is irrelevant.
+ decodable_ = false;
+}
+
+bool VCMSessionInfo::HaveFirstPacket() const {
+ return !packets_.empty() && (first_packet_seq_num_ != -1);
+}
+
+bool VCMSessionInfo::HaveLastPacket() const {
+ return !packets_.empty() && (last_packet_seq_num_ != -1);
+}
+
+bool VCMSessionInfo::session_nack() const {
+ return session_nack_;
+}
+
+int VCMSessionInfo::InsertPacket(const VCMPacket& packet,
+ uint8_t* frame_buffer,
+ VCMDecodeErrorMode decode_error_mode,
+ const FrameData& frame_data) {
+ if (packet.frameType == kEmptyFrame) {
+ // Update sequence number of an empty packet.
+ // Only media packets are inserted into the packet list.
+ InformOfEmptyPacket(packet.seqNum);
+ return 0;
+ }
+
+ if (packets_.size() == kMaxPacketsInSession) {
+ LOG(LS_ERROR) << "Max number of packets per frame has been reached.";
+ return -1;
+ }
+
+ // Find the position of this packet in the packet list in sequence number
+ // order and insert it. Loop over the list in reverse order.
+ ReversePacketIterator rit = packets_.rbegin();
+ for (; rit != packets_.rend(); ++rit)
+ if (LatestSequenceNumber(packet.seqNum, (*rit).seqNum) == packet.seqNum)
+ break;
+
+ // Check for duplicate packets.
+ if (rit != packets_.rend() && (*rit).seqNum == packet.seqNum &&
+ (*rit).sizeBytes > 0)
+ return -2;
+
+ if (packet.codec == kVideoCodecH264) {
+ frame_type_ = packet.frameType;
+ if (packet.isFirstPacket &&
+ (first_packet_seq_num_ == -1 ||
+ IsNewerSequenceNumber(first_packet_seq_num_, packet.seqNum))) {
+ first_packet_seq_num_ = packet.seqNum;
+ }
+ if (packet.markerBit &&
+ (last_packet_seq_num_ == -1 ||
+ IsNewerSequenceNumber(packet.seqNum, last_packet_seq_num_))) {
+ last_packet_seq_num_ = packet.seqNum;
+ }
+ } else {
+ // Only insert media packets between first and last packets (when
+ // available).
+ // Placing check here, as to properly account for duplicate packets.
+ // Check if this is first packet (only valid for some codecs)
+ // Should only be set for one packet per session.
+ if (packet.isFirstPacket && first_packet_seq_num_ == -1) {
+ // The first packet in a frame signals the frame type.
+ frame_type_ = packet.frameType;
+ // Store the sequence number for the first packet.
+ first_packet_seq_num_ = static_cast<int>(packet.seqNum);
+ } else if (first_packet_seq_num_ != -1 &&
+ IsNewerSequenceNumber(first_packet_seq_num_, packet.seqNum)) {
+ LOG(LS_WARNING) << "Received packet with a sequence number which is out "
+ "of frame boundaries";
+ return -3;
+ } else if (frame_type_ == kEmptyFrame && packet.frameType != kEmptyFrame) {
+ // Update the frame type with the type of the first media packet.
+ // TODO(mikhal): Can this trigger?
+ frame_type_ = packet.frameType;
+ }
+
+ // Track the marker bit, should only be set for one packet per session.
+ if (packet.markerBit && last_packet_seq_num_ == -1) {
+ last_packet_seq_num_ = static_cast<int>(packet.seqNum);
+ } else if (last_packet_seq_num_ != -1 &&
+ IsNewerSequenceNumber(packet.seqNum, last_packet_seq_num_)) {
+ LOG(LS_WARNING) << "Received packet with a sequence number which is out "
+ "of frame boundaries";
+ return -3;
+ }
+ }
+
+ // The insert operation invalidates the iterator |rit|.
+ PacketIterator packet_list_it = packets_.insert(rit.base(), packet);
+
+ size_t returnLength = InsertBuffer(frame_buffer, packet_list_it);
+ UpdateCompleteSession();
+ if (decode_error_mode == kWithErrors)
+ decodable_ = true;
+ else if (decode_error_mode == kSelectiveErrors)
+ UpdateDecodableSession(frame_data);
+ return static_cast<int>(returnLength);
+}
+
+void VCMSessionInfo::InformOfEmptyPacket(uint16_t seq_num) {
+ // Empty packets may be FEC or filler packets. They are sequential and
+ // follow the data packets, therefore, we should only keep track of the high
+ // and low sequence numbers and may assume that the packets in between are
+ // empty packets belonging to the same frame (timestamp).
+ if (empty_seq_num_high_ == -1)
+ empty_seq_num_high_ = seq_num;
+ else
+ empty_seq_num_high_ = LatestSequenceNumber(seq_num, empty_seq_num_high_);
+ if (empty_seq_num_low_ == -1 ||
+ IsNewerSequenceNumber(empty_seq_num_low_, seq_num))
+ empty_seq_num_low_ = seq_num;
+}
+
+} // namespace webrtc
diff --git a/webrtc/modules/video_coding/session_info.h b/webrtc/modules/video_coding/session_info.h
new file mode 100644
index 0000000000..e9ff25166d
--- /dev/null
+++ b/webrtc/modules/video_coding/session_info.h
@@ -0,0 +1,170 @@
+/*
+ * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_CODING_SESSION_INFO_H_
+#define WEBRTC_MODULES_VIDEO_CODING_SESSION_INFO_H_
+
+#include <list>
+
+#include "webrtc/modules/include/module_common_types.h"
+#include "webrtc/modules/video_coding/include/video_coding.h"
+#include "webrtc/modules/video_coding/packet.h"
+#include "webrtc/typedefs.h"
+
+namespace webrtc {
+// Used to pass data from jitter buffer to session info.
+// This data is then used in determining whether a frame is decodable.
+struct FrameData {
+ int64_t rtt_ms;
+ float rolling_average_packets_per_frame;
+};
+
+class VCMSessionInfo {
+ public:
+ VCMSessionInfo();
+
+ void UpdateDataPointers(const uint8_t* old_base_ptr,
+ const uint8_t* new_base_ptr);
+ // NACK - Building the NACK lists.
+ // Build hard NACK list: Zero out all entries in list up to and including
+ // _lowSeqNum.
+ int BuildHardNackList(int* seq_num_list,
+ int seq_num_list_length,
+ int nack_seq_nums_index);
+
+ // Build soft NACK list: Zero out only a subset of the packets, discard
+ // empty packets.
+ int BuildSoftNackList(int* seq_num_list,
+ int seq_num_list_length,
+ int nack_seq_nums_index,
+ int rtt_ms);
+ void Reset();
+ int InsertPacket(const VCMPacket& packet,
+ uint8_t* frame_buffer,
+ VCMDecodeErrorMode enable_decodable_state,
+ const FrameData& frame_data);
+ bool complete() const;
+ bool decodable() const;
+
+ // Builds fragmentation headers for VP8, each fragment being a decodable
+ // VP8 partition. Returns the total number of bytes which are decodable. Is
+ // used instead of MakeDecodable for VP8.
+ size_t BuildVP8FragmentationHeader(uint8_t* frame_buffer,
+ size_t frame_buffer_length,
+ RTPFragmentationHeader* fragmentation);
+
+ // Makes the frame decodable. I.e., only contain decodable NALUs. All
+ // non-decodable NALUs will be deleted and packets will be moved to in
+ // memory to remove any empty space.
+ // Returns the number of bytes deleted from the session.
+ size_t MakeDecodable();
+
+ // Sets decodable_ to false.
+ // Used by the dual decoder. After the mode is changed to kNoErrors from
+ // kWithErrors or kSelective errors, any states that have been marked
+ // decodable and are not complete are marked as non-decodable.
+ void SetNotDecodableIfIncomplete();
+
+ size_t SessionLength() const;
+ int NumPackets() const;
+ bool HaveFirstPacket() const;
+ bool HaveLastPacket() const;
+ bool session_nack() const;
+ webrtc::FrameType FrameType() const { return frame_type_; }
+ int LowSequenceNumber() const;
+
+ // Returns highest sequence number, media or empty.
+ int HighSequenceNumber() const;
+ int PictureId() const;
+ int TemporalId() const;
+ bool LayerSync() const;
+ int Tl0PicId() const;
+ bool NonReference() const;
+
+ void SetGofInfo(const GofInfoVP9& gof_info, size_t idx);
+
+ // The number of packets discarded because the decoder can't make use of
+ // them.
+ int packets_not_decodable() const;
+
+ private:
+ enum { kMaxVP8Partitions = 9 };
+
+ typedef std::list<VCMPacket> PacketList;
+ typedef PacketList::iterator PacketIterator;
+ typedef PacketList::const_iterator PacketIteratorConst;
+ typedef PacketList::reverse_iterator ReversePacketIterator;
+
+ void InformOfEmptyPacket(uint16_t seq_num);
+
+ // Finds the packet of the beginning of the next VP8 partition. If
+ // none is found the returned iterator points to |packets_.end()|.
+ // |it| is expected to point to the last packet of the previous partition,
+ // or to the first packet of the frame. |packets_skipped| is incremented
+ // for each packet found which doesn't have the beginning bit set.
+ PacketIterator FindNextPartitionBeginning(PacketIterator it) const;
+
+ // Returns an iterator pointing to the last packet of the partition pointed to
+ // by |it|.
+ PacketIterator FindPartitionEnd(PacketIterator it) const;
+ static bool InSequence(const PacketIterator& it,
+ const PacketIterator& prev_it);
+ size_t InsertBuffer(uint8_t* frame_buffer, PacketIterator packetIterator);
+ size_t Insert(const uint8_t* buffer,
+ size_t length,
+ bool insert_start_code,
+ uint8_t* frame_buffer);
+ void ShiftSubsequentPackets(PacketIterator it, int steps_to_shift);
+ PacketIterator FindNaluEnd(PacketIterator packet_iter) const;
+ // Deletes the data of all packets between |start| and |end|, inclusively.
+ // Note that this function doesn't delete the actual packets.
+ size_t DeletePacketData(PacketIterator start, PacketIterator end);
+ void UpdateCompleteSession();
+
+ // When enabled, determine if session is decodable, i.e. incomplete but
+ // would be sent to the decoder.
+ // Note: definition assumes random loss.
+ // A frame is defined to be decodable when:
+ // Round trip time is higher than threshold
+ // It is not a key frame
+ // It has the first packet: In VP8 the first packet contains all or part of
+ // the first partition, which consists of the most relevant information for
+ // decoding.
+ // Either more than the upper threshold of the average number of packets per
+ // frame is present
+ // or less than the lower threshold of the average number of packets per
+ // frame is present: suggests a small frame. Such a frame is unlikely
+ // to contain many motion vectors, so having the first packet will
+ // likely suffice. Once we have more than the lower threshold of the
+ // frame, we know that the frame is medium or large-sized.
+ void UpdateDecodableSession(const FrameData& frame_data);
+
+ // If this session has been NACKed by the jitter buffer.
+ bool session_nack_;
+ bool complete_;
+ bool decodable_;
+ webrtc::FrameType frame_type_;
+ // Packets in this frame.
+ PacketList packets_;
+ int empty_seq_num_low_;
+ int empty_seq_num_high_;
+
+ // The following two variables correspond to the first and last media packets
+ // in a session defined by the first packet flag and the marker bit.
+ // They are not necessarily equal to the front and back packets, as packets
+ // may enter out of order.
+ // TODO(mikhal): Refactor the list to use a map.
+ int first_packet_seq_num_;
+ int last_packet_seq_num_;
+};
+
+} // namespace webrtc
+
+#endif // WEBRTC_MODULES_VIDEO_CODING_SESSION_INFO_H_
diff --git a/webrtc/modules/video_coding/session_info_unittest.cc b/webrtc/modules/video_coding/session_info_unittest.cc
new file mode 100644
index 0000000000..4019d63a5f
--- /dev/null
+++ b/webrtc/modules/video_coding/session_info_unittest.cc
@@ -0,0 +1,1030 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <string.h>
+
+#include "testing/gtest/include/gtest/gtest.h"
+#include "webrtc/modules/include/module_common_types.h"
+#include "webrtc/modules/video_coding/packet.h"
+#include "webrtc/modules/video_coding/session_info.h"
+
+namespace webrtc {
+
+class TestSessionInfo : public ::testing::Test {
+ protected:
+ virtual void SetUp() {
+ memset(packet_buffer_, 0, sizeof(packet_buffer_));
+ memset(frame_buffer_, 0, sizeof(frame_buffer_));
+ session_.Reset();
+ packet_.Reset();
+ packet_.frameType = kVideoFrameDelta;
+ packet_.sizeBytes = packet_buffer_size();
+ packet_.dataPtr = packet_buffer_;
+ packet_.seqNum = 0;
+ packet_.timestamp = 0;
+ frame_data.rtt_ms = 0;
+ frame_data.rolling_average_packets_per_frame = -1;
+ }
+
+ void FillPacket(uint8_t start_value) {
+ for (size_t i = 0; i < packet_buffer_size(); ++i)
+ packet_buffer_[i] = start_value + i;
+ }
+
+ void VerifyPacket(uint8_t* start_ptr, uint8_t start_value) {
+ for (size_t j = 0; j < packet_buffer_size(); ++j) {
+ ASSERT_EQ(start_value + j, start_ptr[j]);
+ }
+ }
+
+ size_t packet_buffer_size() const {
+ return sizeof(packet_buffer_) / sizeof(packet_buffer_[0]);
+ }
+ size_t frame_buffer_size() const {
+ return sizeof(frame_buffer_) / sizeof(frame_buffer_[0]);
+ }
+
+ enum { kPacketBufferSize = 10 };
+
+ uint8_t packet_buffer_[kPacketBufferSize];
+ uint8_t frame_buffer_[10 * kPacketBufferSize];
+
+ VCMSessionInfo session_;
+ VCMPacket packet_;
+ FrameData frame_data;
+};
+
+class TestVP8Partitions : public TestSessionInfo {
+ protected:
+ enum { kMaxVP8Partitions = 9 };
+
+ virtual void SetUp() {
+ TestSessionInfo::SetUp();
+ vp8_header_ = &packet_header_.type.Video.codecHeader.VP8;
+ packet_header_.frameType = kVideoFrameDelta;
+ packet_header_.type.Video.codec = kRtpVideoVp8;
+ vp8_header_->InitRTPVideoHeaderVP8();
+ fragmentation_.VerifyAndAllocateFragmentationHeader(kMaxVP8Partitions);
+ }
+
+ bool VerifyPartition(int partition_id,
+ int packets_expected,
+ int start_value) {
+ EXPECT_EQ(packets_expected * packet_buffer_size(),
+ fragmentation_.fragmentationLength[partition_id]);
+ for (int i = 0; i < packets_expected; ++i) {
+ size_t packet_index = fragmentation_.fragmentationOffset[partition_id] +
+ i * packet_buffer_size();
+ if (packet_index + packet_buffer_size() > frame_buffer_size())
+ return false;
+ VerifyPacket(frame_buffer_ + packet_index, start_value + i);
+ }
+ return true;
+ }
+
+ WebRtcRTPHeader packet_header_;
+ RTPVideoHeaderVP8* vp8_header_;
+ RTPFragmentationHeader fragmentation_;
+};
+
+class TestNalUnits : public TestSessionInfo {
+ protected:
+ virtual void SetUp() {
+ TestSessionInfo::SetUp();
+ packet_.codec = kVideoCodecVP8;
+ }
+
+ bool VerifyNalu(int offset, int packets_expected, int start_value) {
+ EXPECT_GE(session_.SessionLength(),
+ packets_expected * packet_buffer_size());
+ for (int i = 0; i < packets_expected; ++i) {
+ int packet_index = (offset + i) * packet_buffer_size();
+ VerifyPacket(frame_buffer_ + packet_index, start_value + i);
+ }
+ return true;
+ }
+};
+
+class TestNackList : public TestSessionInfo {
+ protected:
+ static const size_t kMaxSeqNumListLength = 30;
+
+ virtual void SetUp() {
+ TestSessionInfo::SetUp();
+ seq_num_list_length_ = 0;
+ memset(seq_num_list_, 0, sizeof(seq_num_list_));
+ }
+
+ void BuildSeqNumList(uint16_t low, uint16_t high) {
+ size_t i = 0;
+ while (low != high + 1) {
+ EXPECT_LT(i, kMaxSeqNumListLength);
+ if (i >= kMaxSeqNumListLength) {
+ seq_num_list_length_ = kMaxSeqNumListLength;
+ return;
+ }
+ seq_num_list_[i] = low;
+ low++;
+ i++;
+ }
+ seq_num_list_length_ = i;
+ }
+
+ void VerifyAll(int value) {
+ for (int i = 0; i < seq_num_list_length_; ++i)
+ EXPECT_EQ(seq_num_list_[i], value);
+ }
+
+ int seq_num_list_[kMaxSeqNumListLength];
+ int seq_num_list_length_;
+};
+
+TEST_F(TestSessionInfo, TestSimpleAPIs) {
+ packet_.isFirstPacket = true;
+ packet_.seqNum = 0xFFFE;
+ packet_.sizeBytes = packet_buffer_size();
+ packet_.frameType = kVideoFrameKey;
+ FillPacket(0);
+ EXPECT_EQ(packet_buffer_size(),
+ static_cast<size_t>(session_.InsertPacket(packet_, frame_buffer_,
+ kNoErrors, frame_data)));
+ EXPECT_FALSE(session_.HaveLastPacket());
+ EXPECT_EQ(kVideoFrameKey, session_.FrameType());
+
+ packet_.isFirstPacket = false;
+ packet_.markerBit = true;
+ packet_.seqNum += 1;
+ EXPECT_EQ(packet_buffer_size(),
+ static_cast<size_t>(session_.InsertPacket(packet_, frame_buffer_,
+ kNoErrors, frame_data)));
+ EXPECT_TRUE(session_.HaveLastPacket());
+ EXPECT_EQ(packet_.seqNum, session_.HighSequenceNumber());
+ EXPECT_EQ(0xFFFE, session_.LowSequenceNumber());
+
+ // Insert empty packet which will be the new high sequence number.
+ // To make things more difficult we will make sure to have a wrap here.
+ packet_.isFirstPacket = false;
+ packet_.markerBit = true;
+ packet_.seqNum = 2;
+ packet_.sizeBytes = 0;
+ packet_.frameType = kEmptyFrame;
+ EXPECT_EQ(
+ 0, session_.InsertPacket(packet_, frame_buffer_, kNoErrors, frame_data));
+ EXPECT_EQ(packet_.seqNum, session_.HighSequenceNumber());
+}
+
+TEST_F(TestSessionInfo, NormalOperation) {
+ packet_.seqNum = 0xFFFF;
+ packet_.isFirstPacket = true;
+ packet_.markerBit = false;
+ FillPacket(0);
+ EXPECT_EQ(packet_buffer_size(),
+ static_cast<size_t>(session_.InsertPacket(packet_, frame_buffer_,
+ kNoErrors, frame_data)));
+
+ packet_.isFirstPacket = false;
+ for (int i = 1; i < 9; ++i) {
+ packet_.seqNum += 1;
+ FillPacket(i);
+ ASSERT_EQ(packet_buffer_size(),
+ static_cast<size_t>(session_.InsertPacket(
+ packet_, frame_buffer_, kNoErrors, frame_data)));
+ }
+
+ packet_.seqNum += 1;
+ packet_.markerBit = true;
+ FillPacket(9);
+ EXPECT_EQ(packet_buffer_size(),
+ static_cast<size_t>(session_.InsertPacket(packet_, frame_buffer_,
+ kNoErrors, frame_data)));
+
+ EXPECT_EQ(10 * packet_buffer_size(), session_.SessionLength());
+ for (int i = 0; i < 10; ++i) {
+ SCOPED_TRACE("Calling VerifyPacket");
+ VerifyPacket(frame_buffer_ + i * packet_buffer_size(), i);
+ }
+}
+
+TEST_F(TestSessionInfo, ErrorsEqualDecodableState) {
+ packet_.seqNum = 0xFFFF;
+ packet_.isFirstPacket = false;
+ packet_.markerBit = false;
+ FillPacket(3);
+ EXPECT_EQ(packet_buffer_size(),
+ static_cast<size_t>(session_.InsertPacket(
+ packet_, frame_buffer_, kWithErrors, frame_data)));
+ EXPECT_TRUE(session_.decodable());
+}
+
+TEST_F(TestSessionInfo, SelectiveDecodableState) {
+ packet_.seqNum = 0xFFFF;
+ packet_.isFirstPacket = false;
+ packet_.markerBit = false;
+ FillPacket(1);
+ frame_data.rolling_average_packets_per_frame = 11;
+ frame_data.rtt_ms = 150;
+ EXPECT_EQ(packet_buffer_size(),
+ static_cast<size_t>(session_.InsertPacket(
+ packet_, frame_buffer_, kSelectiveErrors, frame_data)));
+ EXPECT_FALSE(session_.decodable());
+
+ packet_.seqNum -= 1;
+ FillPacket(0);
+ packet_.isFirstPacket = true;
+ EXPECT_EQ(packet_buffer_size(),
+ static_cast<size_t>(session_.InsertPacket(
+ packet_, frame_buffer_, kSelectiveErrors, frame_data)));
+ EXPECT_TRUE(session_.decodable());
+
+ packet_.isFirstPacket = false;
+ packet_.seqNum += 1;
+ for (int i = 2; i < 8; ++i) {
+ packet_.seqNum += 1;
+ FillPacket(i);
+ EXPECT_EQ(packet_buffer_size(),
+ static_cast<size_t>(session_.InsertPacket(
+ packet_, frame_buffer_, kSelectiveErrors, frame_data)));
+ EXPECT_TRUE(session_.decodable());
+ }
+
+ packet_.seqNum += 1;
+ FillPacket(8);
+ EXPECT_EQ(packet_buffer_size(),
+ static_cast<size_t>(session_.InsertPacket(
+ packet_, frame_buffer_, kSelectiveErrors, frame_data)));
+ EXPECT_TRUE(session_.decodable());
+}
+
+TEST_F(TestSessionInfo, OutOfBoundsPackets1PacketFrame) {
+ packet_.seqNum = 0x0001;
+ packet_.isFirstPacket = true;
+ packet_.markerBit = true;
+ FillPacket(1);
+ EXPECT_EQ(packet_buffer_size(),
+ static_cast<size_t>(session_.InsertPacket(packet_, frame_buffer_,
+ kNoErrors, frame_data)));
+
+ packet_.seqNum = 0x0004;
+ packet_.isFirstPacket = true;
+ packet_.markerBit = true;
+ FillPacket(1);
+ EXPECT_EQ(
+ -3, session_.InsertPacket(packet_, frame_buffer_, kNoErrors, frame_data));
+ packet_.seqNum = 0x0000;
+ packet_.isFirstPacket = false;
+ packet_.markerBit = false;
+ FillPacket(1);
+ EXPECT_EQ(
+ -3, session_.InsertPacket(packet_, frame_buffer_, kNoErrors, frame_data));
+}
+
+TEST_F(TestSessionInfo, SetMarkerBitOnce) {
+ packet_.seqNum = 0x0005;
+ packet_.isFirstPacket = false;
+ packet_.markerBit = true;
+ FillPacket(1);
+ EXPECT_EQ(packet_buffer_size(),
+ static_cast<size_t>(session_.InsertPacket(packet_, frame_buffer_,
+ kNoErrors, frame_data)));
+ ++packet_.seqNum;
+ packet_.isFirstPacket = true;
+ packet_.markerBit = true;
+ FillPacket(1);
+ EXPECT_EQ(
+ -3, session_.InsertPacket(packet_, frame_buffer_, kNoErrors, frame_data));
+}
+
+TEST_F(TestSessionInfo, OutOfBoundsPacketsBase) {
+ // Allow packets in the range 5-6.
+ packet_.seqNum = 0x0005;
+ packet_.isFirstPacket = true;
+ packet_.markerBit = false;
+ FillPacket(1);
+ EXPECT_EQ(packet_buffer_size(),
+ static_cast<size_t>(session_.InsertPacket(packet_, frame_buffer_,
+ kNoErrors, frame_data)));
+ // Insert an older packet with a first packet set.
+ packet_.seqNum = 0x0004;
+ packet_.isFirstPacket = true;
+ packet_.markerBit = true;
+ FillPacket(1);
+ EXPECT_EQ(
+ -3, session_.InsertPacket(packet_, frame_buffer_, kNoErrors, frame_data));
+ packet_.seqNum = 0x0006;
+ packet_.isFirstPacket = true;
+ packet_.markerBit = true;
+ FillPacket(1);
+ EXPECT_EQ(packet_buffer_size(),
+ static_cast<size_t>(session_.InsertPacket(packet_, frame_buffer_,
+ kNoErrors, frame_data)));
+ packet_.seqNum = 0x0008;
+ packet_.isFirstPacket = false;
+ packet_.markerBit = true;
+ FillPacket(1);
+ EXPECT_EQ(
+ -3, session_.InsertPacket(packet_, frame_buffer_, kNoErrors, frame_data));
+}
+
+TEST_F(TestSessionInfo, OutOfBoundsPacketsWrap) {
+ packet_.seqNum = 0xFFFE;
+ packet_.isFirstPacket = true;
+ packet_.markerBit = false;
+ FillPacket(1);
+ EXPECT_EQ(packet_buffer_size(),
+ static_cast<size_t>(session_.InsertPacket(packet_, frame_buffer_,
+ kNoErrors, frame_data)));
+
+ packet_.seqNum = 0x0004;
+ packet_.isFirstPacket = false;
+ packet_.markerBit = true;
+ FillPacket(1);
+ EXPECT_EQ(packet_buffer_size(),
+ static_cast<size_t>(session_.InsertPacket(packet_, frame_buffer_,
+ kNoErrors, frame_data)));
+ packet_.seqNum = 0x0002;
+ packet_.isFirstPacket = false;
+ packet_.markerBit = false;
+ FillPacket(1);
+ ASSERT_EQ(packet_buffer_size(),
+ static_cast<size_t>(session_.InsertPacket(packet_, frame_buffer_,
+ kNoErrors, frame_data)));
+ packet_.seqNum = 0xFFF0;
+ packet_.isFirstPacket = false;
+ packet_.markerBit = false;
+ FillPacket(1);
+ EXPECT_EQ(
+ -3, session_.InsertPacket(packet_, frame_buffer_, kNoErrors, frame_data));
+ packet_.seqNum = 0x0006;
+ packet_.isFirstPacket = false;
+ packet_.markerBit = false;
+ FillPacket(1);
+ EXPECT_EQ(
+ -3, session_.InsertPacket(packet_, frame_buffer_, kNoErrors, frame_data));
+}
+
+TEST_F(TestSessionInfo, OutOfBoundsOutOfOrder) {
+ // Insert out of bound regular packets, and then the first and last packet.
+ // Verify that correct bounds are maintained.
+ packet_.seqNum = 0x0003;
+ packet_.isFirstPacket = false;
+ packet_.markerBit = false;
+ FillPacket(1);
+ EXPECT_EQ(packet_buffer_size(),
+ static_cast<size_t>(session_.InsertPacket(packet_, frame_buffer_,
+ kNoErrors, frame_data)));
+ // Insert an older packet with a first packet set.
+ packet_.seqNum = 0x0005;
+ packet_.isFirstPacket = true;
+ packet_.markerBit = false;
+ FillPacket(1);
+ EXPECT_EQ(packet_buffer_size(),
+ static_cast<size_t>(session_.InsertPacket(packet_, frame_buffer_,
+ kNoErrors, frame_data)));
+ packet_.seqNum = 0x0004;
+ packet_.isFirstPacket = false;
+ packet_.markerBit = false;
+ FillPacket(1);
+ EXPECT_EQ(
+ -3, session_.InsertPacket(packet_, frame_buffer_, kNoErrors, frame_data));
+ packet_.seqNum = 0x0010;
+ packet_.isFirstPacket = false;
+ packet_.markerBit = false;
+ FillPacket(1);
+ EXPECT_EQ(packet_buffer_size(),
+ static_cast<size_t>(session_.InsertPacket(packet_, frame_buffer_,
+ kNoErrors, frame_data)));
+ packet_.seqNum = 0x0008;
+ packet_.isFirstPacket = false;
+ packet_.markerBit = true;
+ FillPacket(1);
+ EXPECT_EQ(packet_buffer_size(),
+ static_cast<size_t>(session_.InsertPacket(packet_, frame_buffer_,
+ kNoErrors, frame_data)));
+
+ packet_.seqNum = 0x0009;
+ packet_.isFirstPacket = false;
+ packet_.markerBit = false;
+ FillPacket(1);
+ EXPECT_EQ(
+ -3, session_.InsertPacket(packet_, frame_buffer_, kNoErrors, frame_data));
+}
+
+TEST_F(TestVP8Partitions, TwoPartitionsOneLoss) {
+ // Partition 0 | Partition 1
+ // [ 0 ] [ 2 ] | [ 3 ]
+ packet_header_.type.Video.isFirstPacket = true;
+ vp8_header_->beginningOfPartition = true;
+ vp8_header_->partitionId = 0;
+ packet_header_.header.markerBit = false;
+ packet_header_.header.sequenceNumber = 0;
+ FillPacket(0);
+ VCMPacket* packet =
+ new VCMPacket(packet_buffer_, packet_buffer_size(), packet_header_);
+ EXPECT_EQ(packet_buffer_size(),
+ static_cast<size_t>(session_.InsertPacket(*packet, frame_buffer_,
+ kNoErrors, frame_data)));
+ delete packet;
+
+ packet_header_.type.Video.isFirstPacket = false;
+ vp8_header_->partitionId = 0;
+ vp8_header_->beginningOfPartition = false;
+ packet_header_.header.markerBit = false;
+ packet_header_.header.sequenceNumber += 2;
+ FillPacket(2);
+ packet = new VCMPacket(packet_buffer_, packet_buffer_size(), packet_header_);
+ EXPECT_EQ(packet_buffer_size(),
+ static_cast<size_t>(session_.InsertPacket(*packet, frame_buffer_,
+ kNoErrors, frame_data)));
+ delete packet;
+
+ packet_header_.type.Video.isFirstPacket = false;
+ vp8_header_->partitionId = 1;
+ vp8_header_->beginningOfPartition = true;
+ packet_header_.header.markerBit = true;
+ packet_header_.header.sequenceNumber += 1;
+ FillPacket(3);
+ packet = new VCMPacket(packet_buffer_, packet_buffer_size(), packet_header_);
+ EXPECT_EQ(packet_buffer_size(),
+ static_cast<size_t>(session_.InsertPacket(*packet, frame_buffer_,
+ kNoErrors, frame_data)));
+ delete packet;
+
+ // One packet should be removed (end of partition 0).
+ EXPECT_EQ(2 * packet_buffer_size(),
+ session_.BuildVP8FragmentationHeader(
+ frame_buffer_, frame_buffer_size(), &fragmentation_));
+ SCOPED_TRACE("Calling VerifyPartition");
+ EXPECT_TRUE(VerifyPartition(0, 1, 0));
+ SCOPED_TRACE("Calling VerifyPartition");
+ EXPECT_TRUE(VerifyPartition(1, 1, 3));
+}
+
+TEST_F(TestVP8Partitions, TwoPartitionsOneLoss2) {
+ // Partition 0 | Partition 1
+ // [ 1 ] [ 2 ] | [ 3 ] [ 5 ]
+ packet_header_.type.Video.isFirstPacket = true;
+ vp8_header_->beginningOfPartition = true;
+ vp8_header_->partitionId = 0;
+ packet_header_.header.markerBit = false;
+ packet_header_.header.sequenceNumber = 1;
+ FillPacket(1);
+ VCMPacket* packet =
+ new VCMPacket(packet_buffer_, packet_buffer_size(), packet_header_);
+ EXPECT_EQ(packet_buffer_size(),
+ static_cast<size_t>(session_.InsertPacket(*packet, frame_buffer_,
+ kNoErrors, frame_data)));
+ delete packet;
+
+ packet_header_.type.Video.isFirstPacket = false;
+ vp8_header_->partitionId = 0;
+ vp8_header_->beginningOfPartition = false;
+ packet_header_.header.markerBit = false;
+ packet_header_.header.sequenceNumber += 1;
+ FillPacket(2);
+ packet = new VCMPacket(packet_buffer_, packet_buffer_size(), packet_header_);
+ EXPECT_EQ(packet_buffer_size(),
+ static_cast<size_t>(session_.InsertPacket(*packet, frame_buffer_,
+ kNoErrors, frame_data)));
+ delete packet;
+
+ packet_header_.type.Video.isFirstPacket = false;
+ vp8_header_->partitionId = 1;
+ vp8_header_->beginningOfPartition = true;
+ packet_header_.header.markerBit = false;
+ packet_header_.header.sequenceNumber += 1;
+ FillPacket(3);
+ packet = new VCMPacket(packet_buffer_, packet_buffer_size(), packet_header_);
+ EXPECT_EQ(packet_buffer_size(),
+ static_cast<size_t>(session_.InsertPacket(*packet, frame_buffer_,
+ kNoErrors, frame_data)));
+ delete packet;
+
+ packet_header_.type.Video.isFirstPacket = false;
+ vp8_header_->partitionId = 1;
+ vp8_header_->beginningOfPartition = false;
+ packet_header_.header.markerBit = true;
+ packet_header_.header.sequenceNumber += 2;
+ FillPacket(5);
+ packet = new VCMPacket(packet_buffer_, packet_buffer_size(), packet_header_);
+ EXPECT_EQ(packet_buffer_size(),
+ static_cast<size_t>(session_.InsertPacket(*packet, frame_buffer_,
+ kNoErrors, frame_data)));
+ delete packet;
+
+ // One packet should be removed (end of partition 2), 3 left.
+ EXPECT_EQ(3 * packet_buffer_size(),
+ session_.BuildVP8FragmentationHeader(
+ frame_buffer_, frame_buffer_size(), &fragmentation_));
+ SCOPED_TRACE("Calling VerifyPartition");
+ EXPECT_TRUE(VerifyPartition(0, 2, 1));
+ SCOPED_TRACE("Calling VerifyPartition");
+ EXPECT_TRUE(VerifyPartition(1, 1, 3));
+}
+
+TEST_F(TestVP8Partitions, TwoPartitionsNoLossWrap) {
+ // Partition 0 | Partition 1
+ // [ fffd ] [ fffe ] | [ ffff ] [ 0 ]
+ packet_header_.type.Video.isFirstPacket = true;
+ vp8_header_->beginningOfPartition = true;
+ vp8_header_->partitionId = 0;
+ packet_header_.header.markerBit = false;
+ packet_header_.header.sequenceNumber = 0xfffd;
+ FillPacket(0);
+ VCMPacket* packet =
+ new VCMPacket(packet_buffer_, packet_buffer_size(), packet_header_);
+ EXPECT_EQ(packet_buffer_size(),
+ static_cast<size_t>(session_.InsertPacket(*packet, frame_buffer_,
+ kNoErrors, frame_data)));
+ delete packet;
+
+ packet_header_.type.Video.isFirstPacket = false;
+ vp8_header_->partitionId = 0;
+ vp8_header_->beginningOfPartition = false;
+ packet_header_.header.markerBit = false;
+ packet_header_.header.sequenceNumber += 1;
+ FillPacket(1);
+ packet = new VCMPacket(packet_buffer_, packet_buffer_size(), packet_header_);
+ EXPECT_EQ(packet_buffer_size(),
+ static_cast<size_t>(session_.InsertPacket(*packet, frame_buffer_,
+ kNoErrors, frame_data)));
+ delete packet;
+
+ packet_header_.type.Video.isFirstPacket = false;
+ vp8_header_->partitionId = 1;
+ vp8_header_->beginningOfPartition = true;
+ packet_header_.header.markerBit = false;
+ packet_header_.header.sequenceNumber += 1;
+ FillPacket(2);
+ packet = new VCMPacket(packet_buffer_, packet_buffer_size(), packet_header_);
+ EXPECT_EQ(packet_buffer_size(),
+ static_cast<size_t>(session_.InsertPacket(*packet, frame_buffer_,
+ kNoErrors, frame_data)));
+ delete packet;
+
+ packet_header_.type.Video.isFirstPacket = false;
+ vp8_header_->partitionId = 1;
+ vp8_header_->beginningOfPartition = false;
+ packet_header_.header.markerBit = true;
+ packet_header_.header.sequenceNumber += 1;
+ FillPacket(3);
+ packet = new VCMPacket(packet_buffer_, packet_buffer_size(), packet_header_);
+ EXPECT_EQ(packet_buffer_size(),
+ static_cast<size_t>(session_.InsertPacket(*packet, frame_buffer_,
+ kNoErrors, frame_data)));
+ delete packet;
+
+ // No packet should be removed.
+ EXPECT_EQ(4 * packet_buffer_size(),
+ session_.BuildVP8FragmentationHeader(
+ frame_buffer_, frame_buffer_size(), &fragmentation_));
+ SCOPED_TRACE("Calling VerifyPartition");
+ EXPECT_TRUE(VerifyPartition(0, 2, 0));
+ SCOPED_TRACE("Calling VerifyPartition");
+ EXPECT_TRUE(VerifyPartition(1, 2, 2));
+}
+
+TEST_F(TestVP8Partitions, TwoPartitionsLossWrap) {
+ // Partition 0 | Partition 1
+ // [ fffd ] [ fffe ] | [ ffff ] [ 1 ]
+ packet_header_.type.Video.isFirstPacket = true;
+ vp8_header_->beginningOfPartition = true;
+ vp8_header_->partitionId = 0;
+ packet_header_.header.markerBit = false;
+ packet_header_.header.sequenceNumber = 0xfffd;
+ FillPacket(0);
+ VCMPacket* packet =
+ new VCMPacket(packet_buffer_, packet_buffer_size(), packet_header_);
+ EXPECT_EQ(packet_buffer_size(),
+ static_cast<size_t>(session_.InsertPacket(*packet, frame_buffer_,
+ kNoErrors, frame_data)));
+ delete packet;
+
+ packet_header_.type.Video.isFirstPacket = false;
+ vp8_header_->partitionId = 0;
+ vp8_header_->beginningOfPartition = false;
+ packet_header_.header.markerBit = false;
+ packet_header_.header.sequenceNumber += 1;
+ FillPacket(1);
+ packet = new VCMPacket(packet_buffer_, packet_buffer_size(), packet_header_);
+ EXPECT_EQ(packet_buffer_size(),
+ static_cast<size_t>(session_.InsertPacket(*packet, frame_buffer_,
+ kNoErrors, frame_data)));
+ delete packet;
+
+ packet_header_.type.Video.isFirstPacket = false;
+ vp8_header_->partitionId = 1;
+ vp8_header_->beginningOfPartition = true;
+ packet_header_.header.markerBit = false;
+ packet_header_.header.sequenceNumber += 1;
+ FillPacket(2);
+ packet = new VCMPacket(packet_buffer_, packet_buffer_size(), packet_header_);
+ EXPECT_EQ(packet_buffer_size(),
+ static_cast<size_t>(session_.InsertPacket(*packet, frame_buffer_,
+ kNoErrors, frame_data)));
+ delete packet;
+
+ packet_header_.type.Video.isFirstPacket = false;
+ vp8_header_->partitionId = 1;
+ vp8_header_->beginningOfPartition = false;
+ packet_header_.header.markerBit = true;
+ packet_header_.header.sequenceNumber += 2;
+ FillPacket(3);
+ packet = new VCMPacket(packet_buffer_, packet_buffer_size(), packet_header_);
+ EXPECT_EQ(packet_buffer_size(),
+ static_cast<size_t>(session_.InsertPacket(*packet, frame_buffer_,
+ kNoErrors, frame_data)));
+ delete packet;
+
+ // One packet should be removed from the last partition
+ EXPECT_EQ(3 * packet_buffer_size(),
+ session_.BuildVP8FragmentationHeader(
+ frame_buffer_, frame_buffer_size(), &fragmentation_));
+ SCOPED_TRACE("Calling VerifyPartition");
+ EXPECT_TRUE(VerifyPartition(0, 2, 0));
+ SCOPED_TRACE("Calling VerifyPartition");
+ EXPECT_TRUE(VerifyPartition(1, 1, 2));
+}
+
+TEST_F(TestVP8Partitions, ThreePartitionsOneMissing) {
+ // Partition 1 |Partition 2 | Partition 3
+ // [ 1 ] [ 2 ] | | [ 5 ] | [ 6 ]
+ packet_header_.type.Video.isFirstPacket = true;
+ vp8_header_->beginningOfPartition = true;
+ vp8_header_->partitionId = 0;
+ packet_header_.header.markerBit = false;
+ packet_header_.header.sequenceNumber = 1;
+ FillPacket(1);
+ VCMPacket* packet =
+ new VCMPacket(packet_buffer_, packet_buffer_size(), packet_header_);
+ EXPECT_EQ(packet_buffer_size(),
+ static_cast<size_t>(session_.InsertPacket(*packet, frame_buffer_,
+ kNoErrors, frame_data)));
+ delete packet;
+
+ packet_header_.type.Video.isFirstPacket = false;
+ vp8_header_->partitionId = 0;
+ vp8_header_->beginningOfPartition = false;
+ packet_header_.header.markerBit = false;
+ packet_header_.header.sequenceNumber += 1;
+ FillPacket(2);
+ packet = new VCMPacket(packet_buffer_, packet_buffer_size(), packet_header_);
+ EXPECT_EQ(packet_buffer_size(),
+ static_cast<size_t>(session_.InsertPacket(*packet, frame_buffer_,
+ kNoErrors, frame_data)));
+ delete packet;
+
+ packet_header_.type.Video.isFirstPacket = false;
+ vp8_header_->partitionId = 2;
+ vp8_header_->beginningOfPartition = true;
+ packet_header_.header.markerBit = false;
+ packet_header_.header.sequenceNumber += 3;
+ FillPacket(5);
+ packet = new VCMPacket(packet_buffer_, packet_buffer_size(), packet_header_);
+ EXPECT_EQ(packet_buffer_size(),
+ static_cast<size_t>(session_.InsertPacket(*packet, frame_buffer_,
+ kNoErrors, frame_data)));
+ delete packet;
+
+ packet_header_.type.Video.isFirstPacket = false;
+ vp8_header_->partitionId = 2;
+ vp8_header_->beginningOfPartition = false;
+ packet_header_.header.markerBit = true;
+ packet_header_.header.sequenceNumber += 1;
+ FillPacket(6);
+ packet = new VCMPacket(packet_buffer_, packet_buffer_size(), packet_header_);
+ EXPECT_EQ(packet_buffer_size(),
+ static_cast<size_t>(session_.InsertPacket(*packet, frame_buffer_,
+ kNoErrors, frame_data)));
+ delete packet;
+
+ // No packet should be removed.
+ EXPECT_EQ(4 * packet_buffer_size(),
+ session_.BuildVP8FragmentationHeader(
+ frame_buffer_, frame_buffer_size(), &fragmentation_));
+ SCOPED_TRACE("Calling VerifyPartition");
+ EXPECT_TRUE(VerifyPartition(0, 2, 1));
+ SCOPED_TRACE("Calling VerifyPartition");
+ EXPECT_TRUE(VerifyPartition(2, 2, 5));
+}
+
+TEST_F(TestVP8Partitions, ThreePartitionsLossInSecond) {
+ // Partition 0 |Partition 1 | Partition 2
+ // [ 1 ] [ 2 ] | [ 4 ] [ 5 ] | [ 6 ] [ 7 ]
+ packet_header_.type.Video.isFirstPacket = true;
+ vp8_header_->beginningOfPartition = true;
+ vp8_header_->partitionId = 0;
+ packet_header_.header.markerBit = false;
+ packet_header_.header.sequenceNumber = 1;
+ FillPacket(1);
+ VCMPacket* packet =
+ new VCMPacket(packet_buffer_, packet_buffer_size(), packet_header_);
+ EXPECT_EQ(packet_buffer_size(),
+ static_cast<size_t>(session_.InsertPacket(*packet, frame_buffer_,
+ kNoErrors, frame_data)));
+ delete packet;
+
+ packet_header_.type.Video.isFirstPacket = false;
+ vp8_header_->partitionId = 0;
+ vp8_header_->beginningOfPartition = false;
+ packet_header_.header.markerBit = false;
+ packet_header_.header.sequenceNumber += 1;
+ FillPacket(2);
+ packet = new VCMPacket(packet_buffer_, packet_buffer_size(), packet_header_);
+ EXPECT_EQ(packet_buffer_size(),
+ static_cast<size_t>(session_.InsertPacket(*packet, frame_buffer_,
+ kNoErrors, frame_data)));
+ delete packet;
+
+ packet_header_.type.Video.isFirstPacket = false;
+ vp8_header_->partitionId = 1;
+ vp8_header_->beginningOfPartition = false;
+ packet_header_.header.markerBit = false;
+ packet_header_.header.sequenceNumber += 2;
+ FillPacket(4);
+ packet = new VCMPacket(packet_buffer_, packet_buffer_size(), packet_header_);
+ EXPECT_EQ(packet_buffer_size(),
+ static_cast<size_t>(session_.InsertPacket(*packet, frame_buffer_,
+ kNoErrors, frame_data)));
+ delete packet;
+
+ packet_header_.type.Video.isFirstPacket = false;
+ vp8_header_->partitionId = 1;
+ vp8_header_->beginningOfPartition = false;
+ packet_header_.header.markerBit = false;
+ packet_header_.header.sequenceNumber += 1;
+ FillPacket(5);
+ packet = new VCMPacket(packet_buffer_, packet_buffer_size(), packet_header_);
+ EXPECT_EQ(packet_buffer_size(),
+ static_cast<size_t>(session_.InsertPacket(*packet, frame_buffer_,
+ kNoErrors, frame_data)));
+ delete packet;
+
+ packet_header_.type.Video.isFirstPacket = false;
+ vp8_header_->partitionId = 2;
+ vp8_header_->beginningOfPartition = true;
+ packet_header_.header.markerBit = false;
+ packet_header_.header.sequenceNumber += 1;
+ FillPacket(6);
+ packet = new VCMPacket(packet_buffer_, packet_buffer_size(), packet_header_);
+ EXPECT_EQ(packet_buffer_size(),
+ static_cast<size_t>(session_.InsertPacket(*packet, frame_buffer_,
+ kNoErrors, frame_data)));
+ delete packet;
+
+ packet_header_.type.Video.isFirstPacket = false;
+ vp8_header_->partitionId = 2;
+ vp8_header_->beginningOfPartition = false;
+ packet_header_.header.markerBit = true;
+ packet_header_.header.sequenceNumber += 1;
+ FillPacket(7);
+ packet = new VCMPacket(packet_buffer_, packet_buffer_size(), packet_header_);
+ EXPECT_EQ(packet_buffer_size(),
+ static_cast<size_t>(session_.InsertPacket(*packet, frame_buffer_,
+ kNoErrors, frame_data)));
+ delete packet;
+
+ // 2 partitions left. 2 packets removed from second partition
+ EXPECT_EQ(4 * packet_buffer_size(),
+ session_.BuildVP8FragmentationHeader(
+ frame_buffer_, frame_buffer_size(), &fragmentation_));
+ SCOPED_TRACE("Calling VerifyPartition");
+ EXPECT_TRUE(VerifyPartition(0, 2, 1));
+ SCOPED_TRACE("Calling VerifyPartition");
+ EXPECT_TRUE(VerifyPartition(2, 2, 6));
+}
+
+TEST_F(TestVP8Partitions, AggregationOverTwoPackets) {
+ // Partition 0 | Partition 1 | Partition 2
+ // [ 0 | ] [ 1 ] | [ 2 ]
+ packet_header_.type.Video.isFirstPacket = true;
+ vp8_header_->beginningOfPartition = true;
+ vp8_header_->partitionId = 0;
+ packet_header_.header.markerBit = false;
+ packet_header_.header.sequenceNumber = 0;
+ FillPacket(0);
+ VCMPacket* packet =
+ new VCMPacket(packet_buffer_, packet_buffer_size(), packet_header_);
+ EXPECT_EQ(packet_buffer_size(),
+ static_cast<size_t>(session_.InsertPacket(*packet, frame_buffer_,
+ kNoErrors, frame_data)));
+ delete packet;
+
+ packet_header_.type.Video.isFirstPacket = false;
+ vp8_header_->partitionId = 1;
+ vp8_header_->beginningOfPartition = false;
+ packet_header_.header.markerBit = false;
+ packet_header_.header.sequenceNumber += 1;
+ FillPacket(1);
+ packet = new VCMPacket(packet_buffer_, packet_buffer_size(), packet_header_);
+ EXPECT_EQ(packet_buffer_size(),
+ static_cast<size_t>(session_.InsertPacket(*packet, frame_buffer_,
+ kNoErrors, frame_data)));
+ delete packet;
+
+ packet_header_.type.Video.isFirstPacket = false;
+ vp8_header_->partitionId = 2;
+ vp8_header_->beginningOfPartition = true;
+ packet_header_.header.markerBit = true;
+ packet_header_.header.sequenceNumber += 1;
+ FillPacket(2);
+ packet = new VCMPacket(packet_buffer_, packet_buffer_size(), packet_header_);
+ EXPECT_EQ(packet_buffer_size(),
+ static_cast<size_t>(session_.InsertPacket(*packet, frame_buffer_,
+ kNoErrors, frame_data)));
+ delete packet;
+
+ // No packets removed.
+ EXPECT_EQ(3 * packet_buffer_size(),
+ session_.BuildVP8FragmentationHeader(
+ frame_buffer_, frame_buffer_size(), &fragmentation_));
+ SCOPED_TRACE("Calling VerifyPartition");
+ EXPECT_TRUE(VerifyPartition(0, 2, 0));
+ // This partition is aggregated in partition 0
+ SCOPED_TRACE("Calling VerifyPartition");
+ EXPECT_TRUE(VerifyPartition(1, 0, 0));
+ SCOPED_TRACE("Calling VerifyPartition");
+ EXPECT_TRUE(VerifyPartition(2, 1, 2));
+}
+
+TEST_F(TestNalUnits, OnlyReceivedEmptyPacket) {
+ packet_.isFirstPacket = false;
+ packet_.completeNALU = kNaluComplete;
+ packet_.frameType = kEmptyFrame;
+ packet_.sizeBytes = 0;
+ packet_.seqNum = 0;
+ packet_.markerBit = false;
+ EXPECT_EQ(
+ 0, session_.InsertPacket(packet_, frame_buffer_, kNoErrors, frame_data));
+
+ EXPECT_EQ(0U, session_.MakeDecodable());
+ EXPECT_EQ(0U, session_.SessionLength());
+}
+
+TEST_F(TestNalUnits, OneIsolatedNaluLoss) {
+ packet_.isFirstPacket = true;
+ packet_.completeNALU = kNaluComplete;
+ packet_.seqNum = 0;
+ packet_.markerBit = false;
+ FillPacket(0);
+ EXPECT_EQ(packet_buffer_size(),
+ static_cast<size_t>(session_.InsertPacket(packet_, frame_buffer_,
+ kNoErrors, frame_data)));
+
+ packet_.isFirstPacket = false;
+ packet_.completeNALU = kNaluComplete;
+ packet_.seqNum += 2;
+ packet_.markerBit = true;
+ FillPacket(2);
+ EXPECT_EQ(packet_buffer_size(),
+ static_cast<size_t>(session_.InsertPacket(packet_, frame_buffer_,
+ kNoErrors, frame_data)));
+
+ EXPECT_EQ(0U, session_.MakeDecodable());
+ EXPECT_EQ(2 * packet_buffer_size(), session_.SessionLength());
+ SCOPED_TRACE("Calling VerifyNalu");
+ EXPECT_TRUE(VerifyNalu(0, 1, 0));
+ SCOPED_TRACE("Calling VerifyNalu");
+ EXPECT_TRUE(VerifyNalu(1, 1, 2));
+}
+
+TEST_F(TestNalUnits, LossInMiddleOfNalu) {
+ packet_.isFirstPacket = true;
+ packet_.completeNALU = kNaluComplete;
+ packet_.seqNum = 0;
+ packet_.markerBit = false;
+ FillPacket(0);
+ EXPECT_EQ(packet_buffer_size(),
+ static_cast<size_t>(session_.InsertPacket(packet_, frame_buffer_,
+ kNoErrors, frame_data)));
+
+ packet_.isFirstPacket = false;
+ packet_.completeNALU = kNaluEnd;
+ packet_.seqNum += 2;
+ packet_.markerBit = true;
+ FillPacket(2);
+ EXPECT_EQ(packet_buffer_size(),
+ static_cast<size_t>(session_.InsertPacket(packet_, frame_buffer_,
+ kNoErrors, frame_data)));
+
+ EXPECT_EQ(packet_buffer_size(), session_.MakeDecodable());
+ EXPECT_EQ(packet_buffer_size(), session_.SessionLength());
+ SCOPED_TRACE("Calling VerifyNalu");
+ EXPECT_TRUE(VerifyNalu(0, 1, 0));
+}
+
+TEST_F(TestNalUnits, StartAndEndOfLastNalUnitLost) {
+ packet_.isFirstPacket = true;
+ packet_.completeNALU = kNaluComplete;
+ packet_.seqNum = 0;
+ packet_.markerBit = false;
+ FillPacket(0);
+ EXPECT_EQ(packet_buffer_size(),
+ static_cast<size_t>(session_.InsertPacket(packet_, frame_buffer_,
+ kNoErrors, frame_data)));
+
+ packet_.isFirstPacket = false;
+ packet_.completeNALU = kNaluIncomplete;
+ packet_.seqNum += 2;
+ packet_.markerBit = false;
+ FillPacket(1);
+ EXPECT_EQ(packet_buffer_size(),
+ static_cast<size_t>(session_.InsertPacket(packet_, frame_buffer_,
+ kNoErrors, frame_data)));
+
+ EXPECT_EQ(packet_buffer_size(), session_.MakeDecodable());
+ EXPECT_EQ(packet_buffer_size(), session_.SessionLength());
+ SCOPED_TRACE("Calling VerifyNalu");
+ EXPECT_TRUE(VerifyNalu(0, 1, 0));
+}
+
+TEST_F(TestNalUnits, ReorderWrapNoLoss) {
+ packet_.seqNum = 0xFFFF;
+ packet_.isFirstPacket = false;
+ packet_.completeNALU = kNaluIncomplete;
+ packet_.seqNum += 1;
+ packet_.markerBit = false;
+ FillPacket(1);
+ EXPECT_EQ(packet_buffer_size(),
+ static_cast<size_t>(session_.InsertPacket(packet_, frame_buffer_,
+ kNoErrors, frame_data)));
+
+ packet_.isFirstPacket = true;
+ packet_.completeNALU = kNaluComplete;
+ packet_.seqNum -= 1;
+ packet_.markerBit = false;
+ FillPacket(0);
+ EXPECT_EQ(packet_buffer_size(),
+ static_cast<size_t>(session_.InsertPacket(packet_, frame_buffer_,
+ kNoErrors, frame_data)));
+
+ packet_.isFirstPacket = false;
+ packet_.completeNALU = kNaluEnd;
+ packet_.seqNum += 2;
+ packet_.markerBit = true;
+ FillPacket(2);
+ EXPECT_EQ(packet_buffer_size(),
+ static_cast<size_t>(session_.InsertPacket(packet_, frame_buffer_,
+ kNoErrors, frame_data)));
+
+ EXPECT_EQ(0U, session_.MakeDecodable());
+ EXPECT_EQ(3 * packet_buffer_size(), session_.SessionLength());
+ SCOPED_TRACE("Calling VerifyNalu");
+ EXPECT_TRUE(VerifyNalu(0, 1, 0));
+}
+
+TEST_F(TestNalUnits, WrapLosses) {
+ packet_.seqNum = 0xFFFF;
+ packet_.isFirstPacket = false;
+ packet_.completeNALU = kNaluIncomplete;
+ packet_.markerBit = false;
+ FillPacket(1);
+ EXPECT_EQ(packet_buffer_size(),
+ static_cast<size_t>(session_.InsertPacket(packet_, frame_buffer_,
+ kNoErrors, frame_data)));
+
+ packet_.isFirstPacket = false;
+ packet_.completeNALU = kNaluEnd;
+ packet_.seqNum += 2;
+ packet_.markerBit = true;
+ FillPacket(2);
+ EXPECT_EQ(packet_buffer_size(),
+ static_cast<size_t>(session_.InsertPacket(packet_, frame_buffer_,
+ kNoErrors, frame_data)));
+
+ EXPECT_EQ(2 * packet_buffer_size(), session_.MakeDecodable());
+ EXPECT_EQ(0U, session_.SessionLength());
+}
+
+TEST_F(TestNalUnits, ReorderWrapLosses) {
+ packet_.seqNum = 0xFFFF;
+
+ packet_.isFirstPacket = false;
+ packet_.completeNALU = kNaluEnd;
+ packet_.seqNum += 2;
+ packet_.markerBit = true;
+ FillPacket(2);
+ EXPECT_EQ(packet_buffer_size(),
+ static_cast<size_t>(session_.InsertPacket(packet_, frame_buffer_,
+ kNoErrors, frame_data)));
+
+ packet_.seqNum -= 2;
+ packet_.isFirstPacket = false;
+ packet_.completeNALU = kNaluIncomplete;
+ packet_.markerBit = false;
+ FillPacket(1);
+ EXPECT_EQ(packet_buffer_size(),
+ static_cast<size_t>(session_.InsertPacket(packet_, frame_buffer_,
+ kNoErrors, frame_data)));
+
+ EXPECT_EQ(2 * packet_buffer_size(), session_.MakeDecodable());
+ EXPECT_EQ(0U, session_.SessionLength());
+}
+
+} // namespace webrtc
diff --git a/webrtc/modules/video_coding/main/test/plotJitterEstimate.m b/webrtc/modules/video_coding/test/plotJitterEstimate.m
index d6185f55da..d6185f55da 100644
--- a/webrtc/modules/video_coding/main/test/plotJitterEstimate.m
+++ b/webrtc/modules/video_coding/test/plotJitterEstimate.m
diff --git a/webrtc/modules/video_coding/main/test/plotReceiveTrace.m b/webrtc/modules/video_coding/test/plotReceiveTrace.m
index 4d262aa165..4d262aa165 100644
--- a/webrtc/modules/video_coding/main/test/plotReceiveTrace.m
+++ b/webrtc/modules/video_coding/test/plotReceiveTrace.m
diff --git a/webrtc/modules/video_coding/main/test/plotTimingTest.m b/webrtc/modules/video_coding/test/plotTimingTest.m
index 52a6f303cd..52a6f303cd 100644
--- a/webrtc/modules/video_coding/main/test/plotTimingTest.m
+++ b/webrtc/modules/video_coding/test/plotTimingTest.m
diff --git a/webrtc/modules/video_coding/test/receiver_tests.h b/webrtc/modules/video_coding/test/receiver_tests.h
new file mode 100644
index 0000000000..d6bac07392
--- /dev/null
+++ b/webrtc/modules/video_coding/test/receiver_tests.h
@@ -0,0 +1,43 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_CODING_TEST_RECEIVER_TESTS_H_
+#define WEBRTC_MODULES_VIDEO_CODING_TEST_RECEIVER_TESTS_H_
+
+#include <stdio.h>
+#include <string>
+
+#include "webrtc/common_types.h"
+#include "webrtc/modules/include/module_common_types.h"
+#include "webrtc/modules/rtp_rtcp/include/rtp_rtcp.h"
+#include "webrtc/modules/video_coding/include/video_coding.h"
+#include "webrtc/modules/video_coding/test/test_util.h"
+#include "webrtc/modules/video_coding/test/video_source.h"
+#include "webrtc/typedefs.h"
+
+class RtpDataCallback : public webrtc::NullRtpData {
+ public:
+ explicit RtpDataCallback(webrtc::VideoCodingModule* vcm) : vcm_(vcm) {}
+ virtual ~RtpDataCallback() {}
+
+ int32_t OnReceivedPayloadData(
+ const uint8_t* payload_data,
+ const size_t payload_size,
+ const webrtc::WebRtcRTPHeader* rtp_header) override {
+ return vcm_->IncomingPacket(payload_data, payload_size, *rtp_header);
+ }
+
+ private:
+ webrtc::VideoCodingModule* vcm_;
+};
+
+int RtpPlay(const CmdArgs& args);
+
+#endif // WEBRTC_MODULES_VIDEO_CODING_TEST_RECEIVER_TESTS_H_
diff --git a/webrtc/modules/video_coding/test/release_test.h b/webrtc/modules/video_coding/test/release_test.h
new file mode 100644
index 0000000000..ab9b2159d9
--- /dev/null
+++ b/webrtc/modules/video_coding/test/release_test.h
@@ -0,0 +1,17 @@
+/*
+ * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_CODING_TEST_RELEASE_TEST_H_
+#define WEBRTC_MODULES_VIDEO_CODING_TEST_RELEASE_TEST_H_
+
+int ReleaseTest();
+int ReleaseTestPart2();
+
+#endif // WEBRTC_MODULES_VIDEO_CODING_TEST_RELEASE_TEST_H_
diff --git a/webrtc/modules/video_coding/test/rtp_player.cc b/webrtc/modules/video_coding/test/rtp_player.cc
new file mode 100644
index 0000000000..9b6490618c
--- /dev/null
+++ b/webrtc/modules/video_coding/test/rtp_player.cc
@@ -0,0 +1,492 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/modules/video_coding/test/rtp_player.h"
+
+#include <stdio.h>
+
+#include <map>
+
+#include "webrtc/base/scoped_ptr.h"
+#include "webrtc/modules/rtp_rtcp/include/rtp_header_parser.h"
+#include "webrtc/modules/rtp_rtcp/include/rtp_payload_registry.h"
+#include "webrtc/modules/rtp_rtcp/include/rtp_receiver.h"
+#include "webrtc/modules/rtp_rtcp/include/rtp_rtcp.h"
+#include "webrtc/modules/video_coding/internal_defines.h"
+#include "webrtc/modules/video_coding/test/test_util.h"
+#include "webrtc/system_wrappers/include/clock.h"
+#include "webrtc/system_wrappers/include/critical_section_wrapper.h"
+#include "webrtc/test/rtp_file_reader.h"
+
+#if 1
+#define DEBUG_LOG1(text, arg)
+#else
+#define DEBUG_LOG1(text, arg) (printf(text "\n", arg))
+#endif
+
+namespace webrtc {
+namespace rtpplayer {
+
+enum {
+ kMaxPacketBufferSize = 4096,
+ kDefaultTransmissionTimeOffsetExtensionId = 2
+};
+
+class RawRtpPacket {
+ public:
+ RawRtpPacket(const uint8_t* data,
+ size_t length,
+ uint32_t ssrc,
+ uint16_t seq_num)
+ : data_(new uint8_t[length]),
+ length_(length),
+ resend_time_ms_(-1),
+ ssrc_(ssrc),
+ seq_num_(seq_num) {
+ assert(data);
+ memcpy(data_.get(), data, length_);
+ }
+
+ const uint8_t* data() const { return data_.get(); }
+ size_t length() const { return length_; }
+ int64_t resend_time_ms() const { return resend_time_ms_; }
+ void set_resend_time_ms(int64_t timeMs) { resend_time_ms_ = timeMs; }
+ uint32_t ssrc() const { return ssrc_; }
+ uint16_t seq_num() const { return seq_num_; }
+
+ private:
+ rtc::scoped_ptr<uint8_t[]> data_;
+ size_t length_;
+ int64_t resend_time_ms_;
+ uint32_t ssrc_;
+ uint16_t seq_num_;
+
+ RTC_DISALLOW_IMPLICIT_CONSTRUCTORS(RawRtpPacket);
+};
+
+class LostPackets {
+ public:
+ LostPackets(Clock* clock, int64_t rtt_ms)
+ : crit_sect_(CriticalSectionWrapper::CreateCriticalSection()),
+ debug_file_(fopen("PacketLossDebug.txt", "w")),
+ loss_count_(0),
+ packets_(),
+ clock_(clock),
+ rtt_ms_(rtt_ms) {
+ assert(clock);
+ }
+
+ ~LostPackets() {
+ if (debug_file_) {
+ fclose(debug_file_);
+ debug_file_ = NULL;
+ }
+ while (!packets_.empty()) {
+ delete packets_.back();
+ packets_.pop_back();
+ }
+ }
+
+ void AddPacket(RawRtpPacket* packet) {
+ assert(packet);
+ printf("Throw: %08x:%u\n", packet->ssrc(), packet->seq_num());
+ CriticalSectionScoped cs(crit_sect_.get());
+ if (debug_file_) {
+ fprintf(debug_file_, "%u Lost packet: %u\n", loss_count_,
+ packet->seq_num());
+ }
+ packets_.push_back(packet);
+ loss_count_++;
+ }
+
+ void SetResendTime(uint32_t ssrc, int16_t resendSeqNum) {
+ int64_t resend_time_ms = clock_->TimeInMilliseconds() + rtt_ms_;
+ int64_t now_ms = clock_->TimeInMilliseconds();
+ CriticalSectionScoped cs(crit_sect_.get());
+ for (RtpPacketIterator it = packets_.begin(); it != packets_.end(); ++it) {
+ RawRtpPacket* packet = *it;
+ if (ssrc == packet->ssrc() && resendSeqNum == packet->seq_num() &&
+ packet->resend_time_ms() + 10 < now_ms) {
+ if (debug_file_) {
+ fprintf(debug_file_, "Resend %u at %u\n", packet->seq_num(),
+ MaskWord64ToUWord32(resend_time_ms));
+ }
+ packet->set_resend_time_ms(resend_time_ms);
+ return;
+ }
+ }
+ // We may get here since the captured stream may itself be missing packets.
+ }
+
+ RawRtpPacket* NextPacketToResend(int64_t time_now) {
+ CriticalSectionScoped cs(crit_sect_.get());
+ for (RtpPacketIterator it = packets_.begin(); it != packets_.end(); ++it) {
+ RawRtpPacket* packet = *it;
+ if (time_now >= packet->resend_time_ms() &&
+ packet->resend_time_ms() != -1) {
+ packets_.erase(it);
+ return packet;
+ }
+ }
+ return NULL;
+ }
+
+ int NumberOfPacketsToResend() const {
+ CriticalSectionScoped cs(crit_sect_.get());
+ int count = 0;
+ for (ConstRtpPacketIterator it = packets_.begin(); it != packets_.end();
+ ++it) {
+ if ((*it)->resend_time_ms() >= 0) {
+ count++;
+ }
+ }
+ return count;
+ }
+
+ void LogPacketResent(RawRtpPacket* packet) {
+ int64_t now_ms = clock_->TimeInMilliseconds();
+ CriticalSectionScoped cs(crit_sect_.get());
+ if (debug_file_) {
+ fprintf(debug_file_, "Resent %u at %u\n", packet->seq_num(),
+ MaskWord64ToUWord32(now_ms));
+ }
+ }
+
+ void Print() const {
+ CriticalSectionScoped cs(crit_sect_.get());
+ printf("Lost packets: %u\n", loss_count_);
+ printf("Packets waiting to be resent: %d\n", NumberOfPacketsToResend());
+ printf("Packets still lost: %zd\n", packets_.size());
+ printf("Sequence numbers:\n");
+ for (ConstRtpPacketIterator it = packets_.begin(); it != packets_.end();
+ ++it) {
+ printf("%u, ", (*it)->seq_num());
+ }
+ printf("\n");
+ }
+
+ private:
+ typedef std::vector<RawRtpPacket*> RtpPacketList;
+ typedef RtpPacketList::iterator RtpPacketIterator;
+ typedef RtpPacketList::const_iterator ConstRtpPacketIterator;
+
+ rtc::scoped_ptr<CriticalSectionWrapper> crit_sect_;
+ FILE* debug_file_;
+ int loss_count_;
+ RtpPacketList packets_;
+ Clock* clock_;
+ int64_t rtt_ms_;
+
+ RTC_DISALLOW_IMPLICIT_CONSTRUCTORS(LostPackets);
+};
+
+class SsrcHandlers {
+ public:
+ SsrcHandlers(PayloadSinkFactoryInterface* payload_sink_factory,
+ const PayloadTypes& payload_types)
+ : payload_sink_factory_(payload_sink_factory),
+ payload_types_(payload_types),
+ handlers_() {
+ assert(payload_sink_factory);
+ }
+
+ ~SsrcHandlers() {
+ while (!handlers_.empty()) {
+ delete handlers_.begin()->second;
+ handlers_.erase(handlers_.begin());
+ }
+ }
+
+ int RegisterSsrc(uint32_t ssrc, LostPackets* lost_packets, Clock* clock) {
+ if (handlers_.count(ssrc) > 0) {
+ return 0;
+ }
+ DEBUG_LOG1("Registering handler for ssrc=%08x", ssrc);
+
+ rtc::scoped_ptr<Handler> handler(
+ new Handler(ssrc, payload_types_, lost_packets));
+ handler->payload_sink_.reset(payload_sink_factory_->Create(handler.get()));
+ if (handler->payload_sink_.get() == NULL) {
+ return -1;
+ }
+
+ RtpRtcp::Configuration configuration;
+ configuration.clock = clock;
+ configuration.audio = false;
+ handler->rtp_module_.reset(RtpReceiver::CreateVideoReceiver(
+ configuration.clock, handler->payload_sink_.get(), NULL,
+ handler->rtp_payload_registry_.get()));
+ if (handler->rtp_module_.get() == NULL) {
+ return -1;
+ }
+
+ handler->rtp_module_->SetNACKStatus(kNackOff);
+ handler->rtp_header_parser_->RegisterRtpHeaderExtension(
+ kRtpExtensionTransmissionTimeOffset,
+ kDefaultTransmissionTimeOffsetExtensionId);
+
+ for (PayloadTypesIterator it = payload_types_.begin();
+ it != payload_types_.end(); ++it) {
+ VideoCodec codec;
+ memset(&codec, 0, sizeof(codec));
+ strncpy(codec.plName, it->name().c_str(), sizeof(codec.plName) - 1);
+ codec.plType = it->payload_type();
+ codec.codecType = it->codec_type();
+ if (handler->rtp_module_->RegisterReceivePayload(
+ codec.plName, codec.plType, 90000, 0, codec.maxBitrate) < 0) {
+ return -1;
+ }
+ }
+
+ handlers_[ssrc] = handler.release();
+ return 0;
+ }
+
+ void IncomingPacket(const uint8_t* data, size_t length) {
+ for (HandlerMapIt it = handlers_.begin(); it != handlers_.end(); ++it) {
+ if (!it->second->rtp_header_parser_->IsRtcp(data, length)) {
+ RTPHeader header;
+ it->second->rtp_header_parser_->Parse(data, length, &header);
+ PayloadUnion payload_specific;
+ it->second->rtp_payload_registry_->GetPayloadSpecifics(
+ header.payloadType, &payload_specific);
+ it->second->rtp_module_->IncomingRtpPacket(header, data, length,
+ payload_specific, true);
+ }
+ }
+ }
+
+ private:
+ class Handler : public RtpStreamInterface {
+ public:
+ Handler(uint32_t ssrc,
+ const PayloadTypes& payload_types,
+ LostPackets* lost_packets)
+ : rtp_header_parser_(RtpHeaderParser::Create()),
+ rtp_payload_registry_(new RTPPayloadRegistry(
+ RTPPayloadStrategy::CreateStrategy(false))),
+ rtp_module_(),
+ payload_sink_(),
+ ssrc_(ssrc),
+ payload_types_(payload_types),
+ lost_packets_(lost_packets) {
+ assert(lost_packets);
+ }
+ virtual ~Handler() {}
+
+ virtual void ResendPackets(const uint16_t* sequence_numbers,
+ uint16_t length) {
+ assert(sequence_numbers);
+ for (uint16_t i = 0; i < length; i++) {
+ lost_packets_->SetResendTime(ssrc_, sequence_numbers[i]);
+ }
+ }
+
+ virtual uint32_t ssrc() const { return ssrc_; }
+ virtual const PayloadTypes& payload_types() const { return payload_types_; }
+
+ rtc::scoped_ptr<RtpHeaderParser> rtp_header_parser_;
+ rtc::scoped_ptr<RTPPayloadRegistry> rtp_payload_registry_;
+ rtc::scoped_ptr<RtpReceiver> rtp_module_;
+ rtc::scoped_ptr<PayloadSinkInterface> payload_sink_;
+
+ private:
+ uint32_t ssrc_;
+ const PayloadTypes& payload_types_;
+ LostPackets* lost_packets_;
+
+ RTC_DISALLOW_COPY_AND_ASSIGN(Handler);
+ };
+
+ typedef std::map<uint32_t, Handler*> HandlerMap;
+ typedef std::map<uint32_t, Handler*>::iterator HandlerMapIt;
+
+ PayloadSinkFactoryInterface* payload_sink_factory_;
+ PayloadTypes payload_types_;
+ HandlerMap handlers_;
+
+ RTC_DISALLOW_IMPLICIT_CONSTRUCTORS(SsrcHandlers);
+};
+
+class RtpPlayerImpl : public RtpPlayerInterface {
+ public:
+ RtpPlayerImpl(PayloadSinkFactoryInterface* payload_sink_factory,
+ const PayloadTypes& payload_types,
+ Clock* clock,
+ rtc::scoped_ptr<test::RtpFileReader>* packet_source,
+ float loss_rate,
+ int64_t rtt_ms,
+ bool reordering)
+ : ssrc_handlers_(payload_sink_factory, payload_types),
+ clock_(clock),
+ next_rtp_time_(0),
+ first_packet_(true),
+ first_packet_rtp_time_(0),
+ first_packet_time_ms_(0),
+ loss_rate_(loss_rate),
+ lost_packets_(clock, rtt_ms),
+ resend_packet_count_(0),
+ no_loss_startup_(100),
+ end_of_file_(false),
+ reordering_(false),
+ reorder_buffer_() {
+ assert(clock);
+ assert(packet_source);
+ assert(packet_source->get());
+ packet_source_.swap(*packet_source);
+ srand(321);
+ }
+
+ virtual ~RtpPlayerImpl() {}
+
+ virtual int NextPacket(int64_t time_now) {
+ // Send any packets ready to be resent.
+ for (RawRtpPacket* packet = lost_packets_.NextPacketToResend(time_now);
+ packet != NULL; packet = lost_packets_.NextPacketToResend(time_now)) {
+ int ret = SendPacket(packet->data(), packet->length());
+ if (ret > 0) {
+ printf("Resend: %08x:%u\n", packet->ssrc(), packet->seq_num());
+ lost_packets_.LogPacketResent(packet);
+ resend_packet_count_++;
+ }
+ delete packet;
+ if (ret < 0) {
+ return ret;
+ }
+ }
+
+ // Send any packets from packet source.
+ if (!end_of_file_ && (TimeUntilNextPacket() == 0 || first_packet_)) {
+ if (first_packet_) {
+ if (!packet_source_->NextPacket(&next_packet_))
+ return 0;
+ first_packet_rtp_time_ = next_packet_.time_ms;
+ first_packet_time_ms_ = clock_->TimeInMilliseconds();
+ first_packet_ = false;
+ }
+
+ if (reordering_ && reorder_buffer_.get() == NULL) {
+ reorder_buffer_.reset(
+ new RawRtpPacket(next_packet_.data, next_packet_.length, 0, 0));
+ return 0;
+ }
+ int ret = SendPacket(next_packet_.data, next_packet_.length);
+ if (reorder_buffer_.get()) {
+ SendPacket(reorder_buffer_->data(), reorder_buffer_->length());
+ reorder_buffer_.reset(NULL);
+ }
+ if (ret < 0) {
+ return ret;
+ }
+
+ if (!packet_source_->NextPacket(&next_packet_)) {
+ end_of_file_ = true;
+ return 0;
+ } else if (next_packet_.length == 0) {
+ return 0;
+ }
+ }
+
+ if (end_of_file_ && lost_packets_.NumberOfPacketsToResend() == 0) {
+ return 1;
+ }
+ return 0;
+ }
+
+ virtual uint32_t TimeUntilNextPacket() const {
+ int64_t time_left = (next_rtp_time_ - first_packet_rtp_time_) -
+ (clock_->TimeInMilliseconds() - first_packet_time_ms_);
+ if (time_left < 0) {
+ return 0;
+ }
+ return static_cast<uint32_t>(time_left);
+ }
+
+ virtual void Print() const {
+ printf("Resent packets: %u\n", resend_packet_count_);
+ lost_packets_.Print();
+ }
+
+ private:
+ int SendPacket(const uint8_t* data, size_t length) {
+ assert(data);
+ assert(length > 0);
+
+ rtc::scoped_ptr<RtpHeaderParser> rtp_header_parser(
+ RtpHeaderParser::Create());
+ if (!rtp_header_parser->IsRtcp(data, length)) {
+ RTPHeader header;
+ if (!rtp_header_parser->Parse(data, length, &header)) {
+ return -1;
+ }
+ uint32_t ssrc = header.ssrc;
+ if (ssrc_handlers_.RegisterSsrc(ssrc, &lost_packets_, clock_) < 0) {
+ DEBUG_LOG1("Unable to register ssrc: %d", ssrc);
+ return -1;
+ }
+
+ if (no_loss_startup_ > 0) {
+ no_loss_startup_--;
+ } else if ((rand() + 1.0) / (RAND_MAX + 1.0) < loss_rate_) { // NOLINT
+ uint16_t seq_num = header.sequenceNumber;
+ lost_packets_.AddPacket(new RawRtpPacket(data, length, ssrc, seq_num));
+ DEBUG_LOG1("Dropped packet: %d!", header.header.sequenceNumber);
+ return 0;
+ }
+ }
+
+ ssrc_handlers_.IncomingPacket(data, length);
+ return 1;
+ }
+
+ SsrcHandlers ssrc_handlers_;
+ Clock* clock_;
+ rtc::scoped_ptr<test::RtpFileReader> packet_source_;
+ test::RtpPacket next_packet_;
+ uint32_t next_rtp_time_;
+ bool first_packet_;
+ int64_t first_packet_rtp_time_;
+ int64_t first_packet_time_ms_;
+ float loss_rate_;
+ LostPackets lost_packets_;
+ uint32_t resend_packet_count_;
+ uint32_t no_loss_startup_;
+ bool end_of_file_;
+ bool reordering_;
+ rtc::scoped_ptr<RawRtpPacket> reorder_buffer_;
+
+ RTC_DISALLOW_IMPLICIT_CONSTRUCTORS(RtpPlayerImpl);
+};
+
+RtpPlayerInterface* Create(const std::string& input_filename,
+ PayloadSinkFactoryInterface* payload_sink_factory,
+ Clock* clock,
+ const PayloadTypes& payload_types,
+ float loss_rate,
+ int64_t rtt_ms,
+ bool reordering) {
+ rtc::scoped_ptr<test::RtpFileReader> packet_source(
+ test::RtpFileReader::Create(test::RtpFileReader::kRtpDump,
+ input_filename));
+ if (packet_source.get() == NULL) {
+ packet_source.reset(test::RtpFileReader::Create(test::RtpFileReader::kPcap,
+ input_filename));
+ if (packet_source.get() == NULL) {
+ return NULL;
+ }
+ }
+
+ rtc::scoped_ptr<RtpPlayerImpl> impl(
+ new RtpPlayerImpl(payload_sink_factory, payload_types, clock,
+ &packet_source, loss_rate, rtt_ms, reordering));
+ return impl.release();
+}
+} // namespace rtpplayer
+} // namespace webrtc
diff --git a/webrtc/modules/video_coding/test/rtp_player.h b/webrtc/modules/video_coding/test/rtp_player.h
new file mode 100644
index 0000000000..e50fb9ac70
--- /dev/null
+++ b/webrtc/modules/video_coding/test/rtp_player.h
@@ -0,0 +1,100 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_CODING_TEST_RTP_PLAYER_H_
+#define WEBRTC_MODULES_VIDEO_CODING_TEST_RTP_PLAYER_H_
+
+#include <string>
+#include <vector>
+
+#include "webrtc/modules/rtp_rtcp/include/rtp_rtcp_defines.h"
+#include "webrtc/modules/video_coding/include/video_coding_defines.h"
+
+namespace webrtc {
+class Clock;
+
+namespace rtpplayer {
+
+class PayloadCodecTuple {
+ public:
+ PayloadCodecTuple(uint8_t payload_type,
+ const std::string& codec_name,
+ VideoCodecType codec_type)
+ : name_(codec_name),
+ payload_type_(payload_type),
+ codec_type_(codec_type) {}
+
+ const std::string& name() const { return name_; }
+ uint8_t payload_type() const { return payload_type_; }
+ VideoCodecType codec_type() const { return codec_type_; }
+
+ private:
+ std::string name_;
+ uint8_t payload_type_;
+ VideoCodecType codec_type_;
+};
+
+typedef std::vector<PayloadCodecTuple> PayloadTypes;
+typedef std::vector<PayloadCodecTuple>::const_iterator PayloadTypesIterator;
+
+// Implemented by RtpPlayer and given to client as a means to retrieve
+// information about a specific RTP stream.
+class RtpStreamInterface {
+ public:
+ virtual ~RtpStreamInterface() {}
+
+ // Ask for missing packets to be resent.
+ virtual void ResendPackets(const uint16_t* sequence_numbers,
+ uint16_t length) = 0;
+
+ virtual uint32_t ssrc() const = 0;
+ virtual const PayloadTypes& payload_types() const = 0;
+};
+
+// Implemented by a sink. Wraps RtpData because its d-tor is protected.
+class PayloadSinkInterface : public RtpData {
+ public:
+ virtual ~PayloadSinkInterface() {}
+};
+
+// Implemented to provide a sink for RTP data, such as hooking up a VCM to
+// the incoming RTP stream.
+class PayloadSinkFactoryInterface {
+ public:
+ virtual ~PayloadSinkFactoryInterface() {}
+
+ // Return NULL if failed to create sink. 'stream' is guaranteed to be
+ // around for as long as the RtpData. The returned object is owned by
+ // the caller (RtpPlayer).
+ virtual PayloadSinkInterface* Create(RtpStreamInterface* stream) = 0;
+};
+
+// The client's view of an RtpPlayer.
+class RtpPlayerInterface {
+ public:
+ virtual ~RtpPlayerInterface() {}
+
+ virtual int NextPacket(int64_t timeNow) = 0;
+ virtual uint32_t TimeUntilNextPacket() const = 0;
+ virtual void Print() const = 0;
+};
+
+RtpPlayerInterface* Create(const std::string& inputFilename,
+ PayloadSinkFactoryInterface* payloadSinkFactory,
+ Clock* clock,
+ const PayloadTypes& payload_types,
+ float lossRate,
+ int64_t rttMs,
+ bool reordering);
+
+} // namespace rtpplayer
+} // namespace webrtc
+
+#endif // WEBRTC_MODULES_VIDEO_CODING_TEST_RTP_PLAYER_H_
diff --git a/webrtc/modules/video_coding/test/stream_generator.cc b/webrtc/modules/video_coding/test/stream_generator.cc
new file mode 100644
index 0000000000..167d55faff
--- /dev/null
+++ b/webrtc/modules/video_coding/test/stream_generator.cc
@@ -0,0 +1,130 @@
+/*
+ * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/modules/video_coding/test/stream_generator.h"
+
+#include <string.h>
+
+#include <list>
+
+#include "testing/gtest/include/gtest/gtest.h"
+#include "webrtc/modules/video_coding/packet.h"
+#include "webrtc/modules/video_coding/test/test_util.h"
+#include "webrtc/system_wrappers/include/clock.h"
+
+namespace webrtc {
+
+StreamGenerator::StreamGenerator(uint16_t start_seq_num, int64_t current_time)
+ : packets_(), sequence_number_(start_seq_num), start_time_(current_time) {}
+
+void StreamGenerator::Init(uint16_t start_seq_num, int64_t current_time) {
+ packets_.clear();
+ sequence_number_ = start_seq_num;
+ start_time_ = current_time;
+ memset(packet_buffer_, 0, sizeof(packet_buffer_));
+}
+
+void StreamGenerator::GenerateFrame(FrameType type,
+ int num_media_packets,
+ int num_empty_packets,
+ int64_t time_ms) {
+ uint32_t timestamp = 90 * (time_ms - start_time_);
+ for (int i = 0; i < num_media_packets; ++i) {
+ const int packet_size =
+ (kFrameSize + num_media_packets / 2) / num_media_packets;
+ bool marker_bit = (i == num_media_packets - 1);
+ packets_.push_back(GeneratePacket(sequence_number_, timestamp, packet_size,
+ (i == 0), marker_bit, type));
+ ++sequence_number_;
+ }
+ for (int i = 0; i < num_empty_packets; ++i) {
+ packets_.push_back(GeneratePacket(sequence_number_, timestamp, 0, false,
+ false, kEmptyFrame));
+ ++sequence_number_;
+ }
+}
+
+VCMPacket StreamGenerator::GeneratePacket(uint16_t sequence_number,
+ uint32_t timestamp,
+ unsigned int size,
+ bool first_packet,
+ bool marker_bit,
+ FrameType type) {
+ EXPECT_LT(size, kMaxPacketSize);
+ VCMPacket packet;
+ packet.seqNum = sequence_number;
+ packet.timestamp = timestamp;
+ packet.frameType = type;
+ packet.isFirstPacket = first_packet;
+ packet.markerBit = marker_bit;
+ packet.sizeBytes = size;
+ packet.dataPtr = packet_buffer_;
+ if (packet.isFirstPacket)
+ packet.completeNALU = kNaluStart;
+ else if (packet.markerBit)
+ packet.completeNALU = kNaluEnd;
+ else
+ packet.completeNALU = kNaluIncomplete;
+ return packet;
+}
+
+bool StreamGenerator::PopPacket(VCMPacket* packet, int index) {
+ std::list<VCMPacket>::iterator it = GetPacketIterator(index);
+ if (it == packets_.end())
+ return false;
+ if (packet)
+ *packet = (*it);
+ packets_.erase(it);
+ return true;
+}
+
+bool StreamGenerator::GetPacket(VCMPacket* packet, int index) {
+ std::list<VCMPacket>::iterator it = GetPacketIterator(index);
+ if (it == packets_.end())
+ return false;
+ if (packet)
+ *packet = (*it);
+ return true;
+}
+
+bool StreamGenerator::NextPacket(VCMPacket* packet) {
+ if (packets_.empty())
+ return false;
+ if (packet != NULL)
+ *packet = packets_.front();
+ packets_.pop_front();
+ return true;
+}
+
+void StreamGenerator::DropLastPacket() {
+ packets_.pop_back();
+}
+
+uint16_t StreamGenerator::NextSequenceNumber() const {
+ if (packets_.empty())
+ return sequence_number_;
+ return packets_.front().seqNum;
+}
+
+int StreamGenerator::PacketsRemaining() const {
+ return packets_.size();
+}
+
+std::list<VCMPacket>::iterator StreamGenerator::GetPacketIterator(int index) {
+ std::list<VCMPacket>::iterator it = packets_.begin();
+ for (int i = 0; i < index; ++i) {
+ ++it;
+ if (it == packets_.end())
+ break;
+ }
+ return it;
+}
+
+} // namespace webrtc
diff --git a/webrtc/modules/video_coding/test/stream_generator.h b/webrtc/modules/video_coding/test/stream_generator.h
new file mode 100644
index 0000000000..36b26db92e
--- /dev/null
+++ b/webrtc/modules/video_coding/test/stream_generator.h
@@ -0,0 +1,72 @@
+/*
+ * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#ifndef WEBRTC_MODULES_VIDEO_CODING_TEST_STREAM_GENERATOR_H_
+#define WEBRTC_MODULES_VIDEO_CODING_TEST_STREAM_GENERATOR_H_
+
+#include <list>
+
+#include "webrtc/modules/video_coding/packet.h"
+#include "webrtc/modules/video_coding/test/test_util.h"
+#include "webrtc/typedefs.h"
+
+namespace webrtc {
+
+const unsigned int kDefaultBitrateKbps = 1000;
+const unsigned int kDefaultFrameRate = 25;
+const unsigned int kMaxPacketSize = 1500;
+const unsigned int kFrameSize =
+ (kDefaultBitrateKbps + kDefaultFrameRate * 4) / (kDefaultFrameRate * 8);
+const int kDefaultFramePeriodMs = 1000 / kDefaultFrameRate;
+
+class StreamGenerator {
+ public:
+ StreamGenerator(uint16_t start_seq_num, int64_t current_time);
+ void Init(uint16_t start_seq_num, int64_t current_time);
+
+ // |time_ms| denotes the timestamp you want to put on the frame, and the unit
+ // is millisecond. GenerateFrame will translate |time_ms| into a 90kHz
+ // timestamp and put it on the frame.
+ void GenerateFrame(FrameType type,
+ int num_media_packets,
+ int num_empty_packets,
+ int64_t time_ms);
+
+ bool PopPacket(VCMPacket* packet, int index);
+ void DropLastPacket();
+
+ bool GetPacket(VCMPacket* packet, int index);
+
+ bool NextPacket(VCMPacket* packet);
+
+ uint16_t NextSequenceNumber() const;
+
+ int PacketsRemaining() const;
+
+ private:
+ VCMPacket GeneratePacket(uint16_t sequence_number,
+ uint32_t timestamp,
+ unsigned int size,
+ bool first_packet,
+ bool marker_bit,
+ FrameType type);
+
+ std::list<VCMPacket>::iterator GetPacketIterator(int index);
+
+ std::list<VCMPacket> packets_;
+ uint16_t sequence_number_;
+ int64_t start_time_;
+ uint8_t packet_buffer_[kMaxPacketSize];
+
+ RTC_DISALLOW_COPY_AND_ASSIGN(StreamGenerator);
+};
+
+} // namespace webrtc
+
+#endif // WEBRTC_MODULES_VIDEO_CODING_TEST_STREAM_GENERATOR_H_
diff --git a/webrtc/modules/video_coding/main/test/subfigure.m b/webrtc/modules/video_coding/test/subfigure.m
index eadfcb69bd..eadfcb69bd 100644
--- a/webrtc/modules/video_coding/main/test/subfigure.m
+++ b/webrtc/modules/video_coding/test/subfigure.m
diff --git a/webrtc/modules/video_coding/test/test_util.cc b/webrtc/modules/video_coding/test/test_util.cc
new file mode 100644
index 0000000000..7ff663e395
--- /dev/null
+++ b/webrtc/modules/video_coding/test/test_util.cc
@@ -0,0 +1,142 @@
+/*
+ * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/modules/video_coding/test/test_util.h"
+
+#include <assert.h>
+#include <math.h>
+
+#include <iomanip>
+#include <sstream>
+
+#include "webrtc/common_video/libyuv/include/webrtc_libyuv.h"
+#include "webrtc/modules/video_coding/internal_defines.h"
+#include "webrtc/test/testsupport/fileutils.h"
+
+CmdArgs::CmdArgs()
+ : codecName("VP8"),
+ codecType(webrtc::kVideoCodecVP8),
+ width(352),
+ height(288),
+ rtt(0),
+ inputFile(webrtc::test::ProjectRootPath() + "/resources/foreman_cif.yuv"),
+ outputFile(webrtc::test::OutputPath() +
+ "video_coding_test_output_352x288.yuv") {}
+
+namespace {
+
+void SplitFilename(const std::string& filename,
+ std::string* basename,
+ std::string* extension) {
+ assert(basename);
+ assert(extension);
+
+ std::string::size_type idx;
+ idx = filename.rfind('.');
+
+ if (idx != std::string::npos) {
+ *basename = filename.substr(0, idx);
+ *extension = filename.substr(idx + 1);
+ } else {
+ *basename = filename;
+ *extension = "";
+ }
+}
+
+std::string AppendWidthHeightCount(const std::string& filename,
+ int width,
+ int height,
+ int count) {
+ std::string basename;
+ std::string extension;
+ SplitFilename(filename, &basename, &extension);
+ std::stringstream ss;
+ ss << basename << "_" << count << "." << width << "_" << height << "."
+ << extension;
+ return ss.str();
+}
+
+} // namespace
+
+FileOutputFrameReceiver::FileOutputFrameReceiver(
+ const std::string& base_out_filename,
+ uint32_t ssrc)
+ : out_filename_(),
+ out_file_(NULL),
+ timing_file_(NULL),
+ width_(0),
+ height_(0),
+ count_(0) {
+ std::string basename;
+ std::string extension;
+ if (base_out_filename.empty()) {
+ basename = webrtc::test::OutputPath() + "rtp_decoded";
+ extension = "yuv";
+ } else {
+ SplitFilename(base_out_filename, &basename, &extension);
+ }
+ std::stringstream ss;
+ ss << basename << "_" << std::hex << std::setw(8) << std::setfill('0') << ssrc
+ << "." << extension;
+ out_filename_ = ss.str();
+}
+
+FileOutputFrameReceiver::~FileOutputFrameReceiver() {
+ if (timing_file_ != NULL) {
+ fclose(timing_file_);
+ }
+ if (out_file_ != NULL) {
+ fclose(out_file_);
+ }
+}
+
+int32_t FileOutputFrameReceiver::FrameToRender(
+ webrtc::VideoFrame& video_frame) {
+ if (timing_file_ == NULL) {
+ std::string basename;
+ std::string extension;
+ SplitFilename(out_filename_, &basename, &extension);
+ timing_file_ = fopen((basename + "_renderTiming.txt").c_str(), "w");
+ if (timing_file_ == NULL) {
+ return -1;
+ }
+ }
+ if (out_file_ == NULL || video_frame.width() != width_ ||
+ video_frame.height() != height_) {
+ if (out_file_) {
+ fclose(out_file_);
+ }
+ printf("New size: %dx%d\n", video_frame.width(), video_frame.height());
+ width_ = video_frame.width();
+ height_ = video_frame.height();
+ std::string filename_with_width_height =
+ AppendWidthHeightCount(out_filename_, width_, height_, count_);
+ ++count_;
+ out_file_ = fopen(filename_with_width_height.c_str(), "wb");
+ if (out_file_ == NULL) {
+ return -1;
+ }
+ }
+ fprintf(timing_file_, "%u, %u\n", video_frame.timestamp(),
+ webrtc::MaskWord64ToUWord32(video_frame.render_time_ms()));
+ if (PrintVideoFrame(video_frame, out_file_) < 0) {
+ return -1;
+ }
+ return 0;
+}
+
+webrtc::RtpVideoCodecTypes ConvertCodecType(const char* plname) {
+ if (strncmp(plname, "VP8", 3) == 0) {
+ return webrtc::kRtpVideoVp8;
+ } else {
+ // Default value.
+ return webrtc::kRtpVideoGeneric;
+ }
+}
diff --git a/webrtc/modules/video_coding/test/test_util.h b/webrtc/modules/video_coding/test/test_util.h
new file mode 100644
index 0000000000..45b88b9b50
--- /dev/null
+++ b/webrtc/modules/video_coding/test/test_util.h
@@ -0,0 +1,86 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_CODING_TEST_TEST_UTIL_H_
+#define WEBRTC_MODULES_VIDEO_CODING_TEST_TEST_UTIL_H_
+
+/*
+ * General declarations used through out VCM offline tests.
+ */
+
+#include <string>
+
+#include "webrtc/base/constructormagic.h"
+#include "webrtc/modules/include/module_common_types.h"
+#include "webrtc/modules/video_coding/include/video_coding.h"
+#include "webrtc/system_wrappers/include/event_wrapper.h"
+
+enum { kMaxNackListSize = 250 };
+enum { kMaxPacketAgeToNack = 450 };
+
+class NullEvent : public webrtc::EventWrapper {
+ public:
+ virtual ~NullEvent() {}
+
+ virtual bool Set() { return true; }
+
+ virtual bool Reset() { return true; }
+
+ virtual webrtc::EventTypeWrapper Wait(unsigned long max_time) { // NOLINT
+ return webrtc::kEventTimeout;
+ }
+
+ virtual bool StartTimer(bool periodic, unsigned long time) { // NOLINT
+ return true;
+ }
+
+ virtual bool StopTimer() { return true; }
+};
+
+class NullEventFactory : public webrtc::EventFactory {
+ public:
+ virtual ~NullEventFactory() {}
+
+ virtual webrtc::EventWrapper* CreateEvent() { return new NullEvent; }
+};
+
+class FileOutputFrameReceiver : public webrtc::VCMReceiveCallback {
+ public:
+ FileOutputFrameReceiver(const std::string& base_out_filename, uint32_t ssrc);
+ virtual ~FileOutputFrameReceiver();
+
+ // VCMReceiveCallback
+ virtual int32_t FrameToRender(webrtc::VideoFrame& video_frame); // NOLINT
+
+ private:
+ std::string out_filename_;
+ FILE* out_file_;
+ FILE* timing_file_;
+ int width_;
+ int height_;
+ int count_;
+
+ RTC_DISALLOW_IMPLICIT_CONSTRUCTORS(FileOutputFrameReceiver);
+};
+
+class CmdArgs {
+ public:
+ CmdArgs();
+
+ std::string codecName;
+ webrtc::VideoCodecType codecType;
+ int width;
+ int height;
+ int rtt;
+ std::string inputFile;
+ std::string outputFile;
+};
+
+#endif // WEBRTC_MODULES_VIDEO_CODING_TEST_TEST_UTIL_H_
diff --git a/webrtc/modules/video_coding/test/tester_main.cc b/webrtc/modules/video_coding/test/tester_main.cc
new file mode 100644
index 0000000000..33ca82007d
--- /dev/null
+++ b/webrtc/modules/video_coding/test/tester_main.cc
@@ -0,0 +1,78 @@
+/*
+ * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <stdlib.h>
+#include <string.h>
+
+#include "gflags/gflags.h"
+#include "webrtc/modules/video_coding/include/video_coding.h"
+#include "webrtc/modules/video_coding/test/receiver_tests.h"
+#include "webrtc/test/testsupport/fileutils.h"
+
+DEFINE_string(codec, "VP8", "Codec to use (VP8 or I420).");
+DEFINE_int32(width, 352, "Width in pixels of the frames in the input file.");
+DEFINE_int32(height, 288, "Height in pixels of the frames in the input file.");
+DEFINE_int32(rtt, 0, "RTT (round-trip time), in milliseconds.");
+DEFINE_string(input_filename,
+ webrtc::test::ProjectRootPath() + "/resources/foreman_cif.yuv",
+ "Input file.");
+DEFINE_string(output_filename,
+ webrtc::test::OutputPath() +
+ "video_coding_test_output_352x288.yuv",
+ "Output file.");
+
+namespace webrtc {
+
+/*
+ * Build with EVENT_DEBUG defined
+ * to build the tests with simulated events.
+ */
+
+int vcmMacrosTests = 0;
+int vcmMacrosErrors = 0;
+
+int ParseArguments(CmdArgs* args) {
+ args->width = FLAGS_width;
+ args->height = FLAGS_height;
+ if (args->width < 1 || args->height < 1) {
+ return -1;
+ }
+ args->codecName = FLAGS_codec;
+ if (args->codecName == "VP8") {
+ args->codecType = kVideoCodecVP8;
+ } else if (args->codecName == "VP9") {
+ args->codecType = kVideoCodecVP9;
+ } else if (args->codecName == "I420") {
+ args->codecType = kVideoCodecI420;
+ } else {
+ printf("Invalid codec: %s\n", args->codecName.c_str());
+ return -1;
+ }
+ args->inputFile = FLAGS_input_filename;
+ args->outputFile = FLAGS_output_filename;
+ args->rtt = FLAGS_rtt;
+ return 0;
+}
+} // namespace webrtc
+
+int main(int argc, char** argv) {
+ // Initialize WebRTC fileutils.h so paths to resources can be resolved.
+ webrtc::test::SetExecutablePath(argv[0]);
+ google::ParseCommandLineFlags(&argc, &argv, true);
+
+ CmdArgs args;
+ if (webrtc::ParseArguments(&args) != 0) {
+ printf("Unable to parse input arguments\n");
+ return -1;
+ }
+
+ printf("Running video coding tests...\n");
+ return RtpPlay(args);
+}
diff --git a/webrtc/modules/video_coding/test/vcm_payload_sink_factory.cc b/webrtc/modules/video_coding/test/vcm_payload_sink_factory.cc
new file mode 100644
index 0000000000..c9ec372f41
--- /dev/null
+++ b/webrtc/modules/video_coding/test/vcm_payload_sink_factory.cc
@@ -0,0 +1,204 @@
+/*
+ * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/modules/video_coding/test/vcm_payload_sink_factory.h"
+
+#include <assert.h>
+
+#include <algorithm>
+
+#include "webrtc/modules/rtp_rtcp/include/rtp_rtcp.h"
+#include "webrtc/modules/video_coding/test/test_util.h"
+#include "webrtc/system_wrappers/include/clock.h"
+#include "webrtc/system_wrappers/include/critical_section_wrapper.h"
+
+namespace webrtc {
+namespace rtpplayer {
+
+class VcmPayloadSinkFactory::VcmPayloadSink : public PayloadSinkInterface,
+ public VCMPacketRequestCallback {
+ public:
+ VcmPayloadSink(VcmPayloadSinkFactory* factory,
+ RtpStreamInterface* stream,
+ rtc::scoped_ptr<VideoCodingModule>* vcm,
+ rtc::scoped_ptr<FileOutputFrameReceiver>* frame_receiver)
+ : factory_(factory), stream_(stream), vcm_(), frame_receiver_() {
+ assert(factory);
+ assert(stream);
+ assert(vcm);
+ assert(vcm->get());
+ assert(frame_receiver);
+ assert(frame_receiver->get());
+ vcm_.swap(*vcm);
+ frame_receiver_.swap(*frame_receiver);
+ vcm_->RegisterPacketRequestCallback(this);
+ vcm_->RegisterReceiveCallback(frame_receiver_.get());
+ }
+
+ virtual ~VcmPayloadSink() { factory_->Remove(this); }
+
+ // PayloadSinkInterface
+ int32_t OnReceivedPayloadData(const uint8_t* payload_data,
+ const size_t payload_size,
+ const WebRtcRTPHeader* rtp_header) override {
+ return vcm_->IncomingPacket(payload_data, payload_size, *rtp_header);
+ }
+
+ bool OnRecoveredPacket(const uint8_t* packet, size_t packet_length) override {
+ // We currently don't handle FEC.
+ return true;
+ }
+
+ // VCMPacketRequestCallback
+ int32_t ResendPackets(const uint16_t* sequence_numbers,
+ uint16_t length) override {
+ stream_->ResendPackets(sequence_numbers, length);
+ return 0;
+ }
+
+ int DecodeAndProcess(bool should_decode, bool decode_dual_frame) {
+ if (should_decode) {
+ if (vcm_->Decode() < 0) {
+ return -1;
+ }
+ }
+ return Process() ? 0 : -1;
+ }
+
+ bool Process() {
+ if (vcm_->TimeUntilNextProcess() <= 0) {
+ if (vcm_->Process() < 0) {
+ return false;
+ }
+ }
+ return true;
+ }
+
+ bool Decode() {
+ vcm_->Decode(10000);
+ return true;
+ }
+
+ private:
+ VcmPayloadSinkFactory* factory_;
+ RtpStreamInterface* stream_;
+ rtc::scoped_ptr<VideoCodingModule> vcm_;
+ rtc::scoped_ptr<FileOutputFrameReceiver> frame_receiver_;
+
+ RTC_DISALLOW_IMPLICIT_CONSTRUCTORS(VcmPayloadSink);
+};
+
+VcmPayloadSinkFactory::VcmPayloadSinkFactory(
+ const std::string& base_out_filename,
+ Clock* clock,
+ bool protection_enabled,
+ VCMVideoProtection protection_method,
+ int64_t rtt_ms,
+ uint32_t render_delay_ms,
+ uint32_t min_playout_delay_ms)
+ : base_out_filename_(base_out_filename),
+ clock_(clock),
+ protection_enabled_(protection_enabled),
+ protection_method_(protection_method),
+ rtt_ms_(rtt_ms),
+ render_delay_ms_(render_delay_ms),
+ min_playout_delay_ms_(min_playout_delay_ms),
+ null_event_factory_(new NullEventFactory()),
+ crit_sect_(CriticalSectionWrapper::CreateCriticalSection()),
+ sinks_() {
+ assert(clock);
+ assert(crit_sect_.get());
+}
+
+VcmPayloadSinkFactory::~VcmPayloadSinkFactory() {
+ assert(sinks_.empty());
+}
+
+PayloadSinkInterface* VcmPayloadSinkFactory::Create(
+ RtpStreamInterface* stream) {
+ assert(stream);
+ CriticalSectionScoped cs(crit_sect_.get());
+
+ rtc::scoped_ptr<VideoCodingModule> vcm(
+ VideoCodingModule::Create(clock_, null_event_factory_.get()));
+ if (vcm.get() == NULL) {
+ return NULL;
+ }
+
+ const PayloadTypes& plt = stream->payload_types();
+ for (PayloadTypesIterator it = plt.begin(); it != plt.end(); ++it) {
+ if (it->codec_type() != kVideoCodecULPFEC &&
+ it->codec_type() != kVideoCodecRED) {
+ VideoCodec codec;
+ VideoCodingModule::Codec(it->codec_type(), &codec);
+ codec.plType = it->payload_type();
+ if (vcm->RegisterReceiveCodec(&codec, 1) < 0) {
+ return NULL;
+ }
+ }
+ }
+
+ vcm->SetChannelParameters(0, 0, rtt_ms_);
+ vcm->SetVideoProtection(protection_method_, protection_enabled_);
+ vcm->SetRenderDelay(render_delay_ms_);
+ vcm->SetMinimumPlayoutDelay(min_playout_delay_ms_);
+ vcm->SetNackSettings(kMaxNackListSize, kMaxPacketAgeToNack, 0);
+
+ rtc::scoped_ptr<FileOutputFrameReceiver> frame_receiver(
+ new FileOutputFrameReceiver(base_out_filename_, stream->ssrc()));
+ rtc::scoped_ptr<VcmPayloadSink> sink(
+ new VcmPayloadSink(this, stream, &vcm, &frame_receiver));
+
+ sinks_.push_back(sink.get());
+ return sink.release();
+}
+
+int VcmPayloadSinkFactory::DecodeAndProcessAll(bool decode_dual_frame) {
+ CriticalSectionScoped cs(crit_sect_.get());
+ assert(clock_);
+ bool should_decode = (clock_->TimeInMilliseconds() % 5) == 0;
+ for (Sinks::iterator it = sinks_.begin(); it != sinks_.end(); ++it) {
+ if ((*it)->DecodeAndProcess(should_decode, decode_dual_frame) < 0) {
+ return -1;
+ }
+ }
+ return 0;
+}
+
+bool VcmPayloadSinkFactory::ProcessAll() {
+ CriticalSectionScoped cs(crit_sect_.get());
+ for (Sinks::iterator it = sinks_.begin(); it != sinks_.end(); ++it) {
+ if (!(*it)->Process()) {
+ return false;
+ }
+ }
+ return true;
+}
+
+bool VcmPayloadSinkFactory::DecodeAll() {
+ CriticalSectionScoped cs(crit_sect_.get());
+ for (Sinks::iterator it = sinks_.begin(); it != sinks_.end(); ++it) {
+ if (!(*it)->Decode()) {
+ return false;
+ }
+ }
+ return true;
+}
+
+void VcmPayloadSinkFactory::Remove(VcmPayloadSink* sink) {
+ assert(sink);
+ CriticalSectionScoped cs(crit_sect_.get());
+ Sinks::iterator it = std::find(sinks_.begin(), sinks_.end(), sink);
+ assert(it != sinks_.end());
+ sinks_.erase(it);
+}
+
+} // namespace rtpplayer
+} // namespace webrtc
diff --git a/webrtc/modules/video_coding/test/vcm_payload_sink_factory.h b/webrtc/modules/video_coding/test/vcm_payload_sink_factory.h
new file mode 100644
index 0000000000..dae53b0c08
--- /dev/null
+++ b/webrtc/modules/video_coding/test/vcm_payload_sink_factory.h
@@ -0,0 +1,70 @@
+/*
+ * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_CODING_TEST_VCM_PAYLOAD_SINK_FACTORY_H_
+#define WEBRTC_MODULES_VIDEO_CODING_TEST_VCM_PAYLOAD_SINK_FACTORY_H_
+
+#include <string>
+#include <vector>
+
+#include "webrtc/base/constructormagic.h"
+#include "webrtc/base/scoped_ptr.h"
+#include "webrtc/modules/video_coding/include/video_coding_defines.h"
+#include "webrtc/modules/video_coding/test/rtp_player.h"
+
+class NullEventFactory;
+
+namespace webrtc {
+class Clock;
+class CriticalSectionWrapper;
+
+namespace rtpplayer {
+class VcmPayloadSinkFactory : public PayloadSinkFactoryInterface {
+ public:
+ VcmPayloadSinkFactory(const std::string& base_out_filename,
+ Clock* clock,
+ bool protection_enabled,
+ VCMVideoProtection protection_method,
+ int64_t rtt_ms,
+ uint32_t render_delay_ms,
+ uint32_t min_playout_delay_ms);
+ virtual ~VcmPayloadSinkFactory();
+
+ // PayloadSinkFactoryInterface
+ virtual PayloadSinkInterface* Create(RtpStreamInterface* stream);
+
+ int DecodeAndProcessAll(bool decode_dual_frame);
+ bool ProcessAll();
+ bool DecodeAll();
+
+ private:
+ class VcmPayloadSink;
+ friend class VcmPayloadSink;
+ typedef std::vector<VcmPayloadSink*> Sinks;
+
+ void Remove(VcmPayloadSink* sink);
+
+ std::string base_out_filename_;
+ Clock* clock_;
+ bool protection_enabled_;
+ VCMVideoProtection protection_method_;
+ int64_t rtt_ms_;
+ uint32_t render_delay_ms_;
+ uint32_t min_playout_delay_ms_;
+ rtc::scoped_ptr<NullEventFactory> null_event_factory_;
+ rtc::scoped_ptr<CriticalSectionWrapper> crit_sect_;
+ Sinks sinks_;
+
+ RTC_DISALLOW_IMPLICIT_CONSTRUCTORS(VcmPayloadSinkFactory);
+};
+} // namespace rtpplayer
+} // namespace webrtc
+
+#endif // WEBRTC_MODULES_VIDEO_CODING_TEST_VCM_PAYLOAD_SINK_FACTORY_H_
diff --git a/webrtc/modules/video_coding/test/video_rtp_play.cc b/webrtc/modules/video_coding/test/video_rtp_play.cc
new file mode 100644
index 0000000000..cb092e381e
--- /dev/null
+++ b/webrtc/modules/video_coding/test/video_rtp_play.cc
@@ -0,0 +1,88 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/modules/video_coding/test/receiver_tests.h"
+#include "webrtc/modules/video_coding/test/vcm_payload_sink_factory.h"
+#include "webrtc/system_wrappers/include/trace.h"
+#include "webrtc/test/testsupport/fileutils.h"
+
+namespace {
+
+const bool kConfigProtectionEnabled = true;
+const webrtc::VCMVideoProtection kConfigProtectionMethod =
+ webrtc::kProtectionNack;
+const float kConfigLossRate = 0.0f;
+const bool kConfigReordering = false;
+const int64_t kConfigRttMs = 0;
+const uint32_t kConfigRenderDelayMs = 0;
+const uint32_t kConfigMinPlayoutDelayMs = 0;
+const int64_t kConfigMaxRuntimeMs = -1;
+const uint8_t kDefaultUlpFecPayloadType = 97;
+const uint8_t kDefaultRedPayloadType = 96;
+const uint8_t kDefaultVp8PayloadType = 100;
+} // namespace
+
+int RtpPlay(const CmdArgs& args) {
+ std::string trace_file = webrtc::test::OutputPath() + "receiverTestTrace.txt";
+ webrtc::Trace::CreateTrace();
+ webrtc::Trace::SetTraceFile(trace_file.c_str());
+ webrtc::Trace::set_level_filter(webrtc::kTraceAll);
+
+ webrtc::rtpplayer::PayloadTypes payload_types;
+ payload_types.push_back(webrtc::rtpplayer::PayloadCodecTuple(
+ kDefaultUlpFecPayloadType, "ULPFEC", webrtc::kVideoCodecULPFEC));
+ payload_types.push_back(webrtc::rtpplayer::PayloadCodecTuple(
+ kDefaultRedPayloadType, "RED", webrtc::kVideoCodecRED));
+ payload_types.push_back(webrtc::rtpplayer::PayloadCodecTuple(
+ kDefaultVp8PayloadType, "VP8", webrtc::kVideoCodecVP8));
+
+ std::string output_file = args.outputFile;
+ if (output_file.empty())
+ output_file = webrtc::test::OutputPath() + "RtpPlay_decoded.yuv";
+
+ webrtc::SimulatedClock clock(0);
+ webrtc::rtpplayer::VcmPayloadSinkFactory factory(
+ output_file, &clock, kConfigProtectionEnabled, kConfigProtectionMethod,
+ kConfigRttMs, kConfigRenderDelayMs, kConfigMinPlayoutDelayMs);
+ rtc::scoped_ptr<webrtc::rtpplayer::RtpPlayerInterface> rtp_player(
+ webrtc::rtpplayer::Create(args.inputFile, &factory, &clock, payload_types,
+ kConfigLossRate, kConfigRttMs,
+ kConfigReordering));
+ if (rtp_player.get() == NULL) {
+ return -1;
+ }
+
+ int ret = 0;
+ while ((ret = rtp_player->NextPacket(clock.TimeInMilliseconds())) == 0) {
+ ret = factory.DecodeAndProcessAll(true);
+ if (ret < 0 || (kConfigMaxRuntimeMs > -1 &&
+ clock.TimeInMilliseconds() >= kConfigMaxRuntimeMs)) {
+ break;
+ }
+ clock.AdvanceTimeMilliseconds(1);
+ }
+
+ rtp_player->Print();
+
+ switch (ret) {
+ case 1:
+ printf("Success\n");
+ return 0;
+ case -1:
+ printf("Failed\n");
+ return -1;
+ case 0:
+ printf("Timeout\n");
+ return -1;
+ }
+
+ webrtc::Trace::ReturnTrace();
+ return 0;
+}
diff --git a/webrtc/modules/video_coding/test/video_source.h b/webrtc/modules/video_coding/test/video_source.h
new file mode 100644
index 0000000000..19d7f50b26
--- /dev/null
+++ b/webrtc/modules/video_coding/test/video_source.h
@@ -0,0 +1,85 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_CODING_TEST_VIDEO_SOURCE_H_
+#define WEBRTC_MODULES_VIDEO_CODING_TEST_VIDEO_SOURCE_H_
+
+#include <string>
+
+#include "webrtc/common_video/libyuv/include/webrtc_libyuv.h"
+#include "webrtc/typedefs.h"
+
+enum VideoSize {
+ kUndefined,
+ kSQCIF, // 128*96 = 12 288
+ kQQVGA, // 160*120 = 19 200
+ kQCIF, // 176*144 = 25 344
+ kCGA, // 320*200 = 64 000
+ kQVGA, // 320*240 = 76 800
+ kSIF, // 352*240 = 84 480
+ kWQVGA, // 400*240 = 96 000
+ kCIF, // 352*288 = 101 376
+ kW288p, // 512*288 = 147 456 (WCIF)
+ k448p, // 576*448 = 281 088
+ kVGA, // 640*480 = 307 200
+ k432p, // 720*432 = 311 040
+ kW432p, // 768*432 = 331 776
+ k4SIF, // 704*480 = 337 920
+ kW448p, // 768*448 = 344 064
+ kNTSC, // 720*480 = 345 600
+ kFW448p, // 800*448 = 358 400
+ kWVGA, // 800*480 = 384 000
+ k4CIF, // 704*576 = 405 504
+ kSVGA, // 800*600 = 480 000
+ kW544p, // 960*544 = 522 240
+ kW576p, // 1024*576 = 589 824 (W4CIF)
+ kHD, // 960*720 = 691 200
+ kXGA, // 1024*768 = 786 432
+ kWHD, // 1280*720 = 921 600
+ kFullHD, // 1440*1080 = 1 555 200
+ kWFullHD, // 1920*1080 = 2 073 600
+
+ kNumberOfVideoSizes
+};
+
+class VideoSource {
+ public:
+ VideoSource();
+ VideoSource(std::string fileName,
+ VideoSize size,
+ float frameRate,
+ webrtc::VideoType type = webrtc::kI420);
+ VideoSource(std::string fileName,
+ uint16_t width,
+ uint16_t height,
+ float frameRate = 30,
+ webrtc::VideoType type = webrtc::kI420);
+
+ std::string GetFileName() const { return _fileName; }
+ uint16_t GetWidth() const { return _width; }
+ uint16_t GetHeight() const { return _height; }
+ webrtc::VideoType GetType() const { return _type; }
+ float GetFrameRate() const { return _frameRate; }
+ int GetWidthHeight(VideoSize size);
+
+ // Returns the filename with the path (including the leading slash) removed.
+ std::string GetName() const;
+
+ size_t GetFrameLength() const;
+
+ private:
+ std::string _fileName;
+ uint16_t _width;
+ uint16_t _height;
+ webrtc::VideoType _type;
+ float _frameRate;
+};
+
+#endif // WEBRTC_MODULES_VIDEO_CODING_TEST_VIDEO_SOURCE_H_
diff --git a/webrtc/modules/video_coding/timestamp_map.cc b/webrtc/modules/video_coding/timestamp_map.cc
new file mode 100644
index 0000000000..97d2777658
--- /dev/null
+++ b/webrtc/modules/video_coding/timestamp_map.cc
@@ -0,0 +1,63 @@
+/*
+ * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <assert.h>
+#include <stdlib.h>
+
+#include "webrtc/modules/include/module_common_types.h"
+#include "webrtc/modules/video_coding/timestamp_map.h"
+
+namespace webrtc {
+
+VCMTimestampMap::VCMTimestampMap(size_t capacity)
+ : ring_buffer_(new TimestampDataTuple[capacity]),
+ capacity_(capacity),
+ next_add_idx_(0),
+ next_pop_idx_(0) {}
+
+VCMTimestampMap::~VCMTimestampMap() {}
+
+void VCMTimestampMap::Add(uint32_t timestamp, VCMFrameInformation* data) {
+ ring_buffer_[next_add_idx_].timestamp = timestamp;
+ ring_buffer_[next_add_idx_].data = data;
+ next_add_idx_ = (next_add_idx_ + 1) % capacity_;
+
+ if (next_add_idx_ == next_pop_idx_) {
+ // Circular list full; forget oldest entry.
+ next_pop_idx_ = (next_pop_idx_ + 1) % capacity_;
+ }
+}
+
+VCMFrameInformation* VCMTimestampMap::Pop(uint32_t timestamp) {
+ while (!IsEmpty()) {
+ if (ring_buffer_[next_pop_idx_].timestamp == timestamp) {
+ // Found start time for this timestamp.
+ VCMFrameInformation* data = ring_buffer_[next_pop_idx_].data;
+ ring_buffer_[next_pop_idx_].data = nullptr;
+ next_pop_idx_ = (next_pop_idx_ + 1) % capacity_;
+ return data;
+ } else if (IsNewerTimestamp(ring_buffer_[next_pop_idx_].timestamp,
+ timestamp)) {
+ // The timestamp we are looking for is not in the list.
+ return nullptr;
+ }
+
+ // Not in this position, check next (and forget this position).
+ next_pop_idx_ = (next_pop_idx_ + 1) % capacity_;
+ }
+
+ // Could not find matching timestamp in list.
+ return nullptr;
+}
+
+bool VCMTimestampMap::IsEmpty() const {
+ return (next_add_idx_ == next_pop_idx_);
+}
+} // namespace webrtc
diff --git a/webrtc/modules/video_coding/timestamp_map.h b/webrtc/modules/video_coding/timestamp_map.h
new file mode 100644
index 0000000000..435d05895c
--- /dev/null
+++ b/webrtc/modules/video_coding/timestamp_map.h
@@ -0,0 +1,47 @@
+/*
+ * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_CODING_TIMESTAMP_MAP_H_
+#define WEBRTC_MODULES_VIDEO_CODING_TIMESTAMP_MAP_H_
+
+#include "webrtc/base/scoped_ptr.h"
+#include "webrtc/typedefs.h"
+
+namespace webrtc {
+
+struct VCMFrameInformation;
+
+class VCMTimestampMap {
+ public:
+ explicit VCMTimestampMap(size_t capacity);
+ ~VCMTimestampMap();
+
+ // Empty the map.
+ void Reset();
+
+ void Add(uint32_t timestamp, VCMFrameInformation* data);
+ VCMFrameInformation* Pop(uint32_t timestamp);
+
+ private:
+ struct TimestampDataTuple {
+ uint32_t timestamp;
+ VCMFrameInformation* data;
+ };
+ bool IsEmpty() const;
+
+ rtc::scoped_ptr<TimestampDataTuple[]> ring_buffer_;
+ const size_t capacity_;
+ size_t next_add_idx_;
+ size_t next_pop_idx_;
+};
+
+} // namespace webrtc
+
+#endif // WEBRTC_MODULES_VIDEO_CODING_TIMESTAMP_MAP_H_
diff --git a/webrtc/modules/video_coding/timing.cc b/webrtc/modules/video_coding/timing.cc
new file mode 100644
index 0000000000..08dc307524
--- /dev/null
+++ b/webrtc/modules/video_coding/timing.cc
@@ -0,0 +1,284 @@
+/*
+ * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/modules/video_coding/timing.h"
+
+#include <algorithm>
+
+#include "webrtc/modules/video_coding/internal_defines.h"
+#include "webrtc/modules/video_coding/jitter_buffer_common.h"
+#include "webrtc/system_wrappers/include/clock.h"
+#include "webrtc/system_wrappers/include/metrics.h"
+#include "webrtc/system_wrappers/include/timestamp_extrapolator.h"
+
+namespace webrtc {
+
+VCMTiming::VCMTiming(Clock* clock, VCMTiming* master_timing)
+ : crit_sect_(CriticalSectionWrapper::CreateCriticalSection()),
+ clock_(clock),
+ master_(false),
+ ts_extrapolator_(),
+ codec_timer_(),
+ render_delay_ms_(kDefaultRenderDelayMs),
+ min_playout_delay_ms_(0),
+ jitter_delay_ms_(0),
+ current_delay_ms_(0),
+ last_decode_ms_(0),
+ prev_frame_timestamp_(0),
+ num_decoded_frames_(0),
+ num_delayed_decoded_frames_(0),
+ first_decoded_frame_ms_(-1),
+ sum_missed_render_deadline_ms_(0) {
+ if (master_timing == NULL) {
+ master_ = true;
+ ts_extrapolator_ = new TimestampExtrapolator(clock_->TimeInMilliseconds());
+ } else {
+ ts_extrapolator_ = master_timing->ts_extrapolator_;
+ }
+}
+
+VCMTiming::~VCMTiming() {
+ UpdateHistograms();
+ if (master_) {
+ delete ts_extrapolator_;
+ }
+ delete crit_sect_;
+}
+
+void VCMTiming::UpdateHistograms() const {
+ CriticalSectionScoped cs(crit_sect_);
+ if (num_decoded_frames_ == 0) {
+ return;
+ }
+ int64_t elapsed_sec =
+ (clock_->TimeInMilliseconds() - first_decoded_frame_ms_) / 1000;
+ if (elapsed_sec < metrics::kMinRunTimeInSeconds) {
+ return;
+ }
+ RTC_HISTOGRAM_COUNTS_SPARSE_100(
+ "WebRTC.Video.DecodedFramesPerSecond",
+ static_cast<int>((num_decoded_frames_ / elapsed_sec) + 0.5f));
+ RTC_HISTOGRAM_PERCENTAGE_SPARSE(
+ "WebRTC.Video.DelayedFramesToRenderer",
+ num_delayed_decoded_frames_ * 100 / num_decoded_frames_);
+ if (num_delayed_decoded_frames_ > 0) {
+ RTC_HISTOGRAM_COUNTS_SPARSE_1000(
+ "WebRTC.Video.DelayedFramesToRenderer_AvgDelayInMs",
+ sum_missed_render_deadline_ms_ / num_delayed_decoded_frames_);
+ }
+}
+
+void VCMTiming::Reset() {
+ CriticalSectionScoped cs(crit_sect_);
+ ts_extrapolator_->Reset(clock_->TimeInMilliseconds());
+ codec_timer_.Reset();
+ render_delay_ms_ = kDefaultRenderDelayMs;
+ min_playout_delay_ms_ = 0;
+ jitter_delay_ms_ = 0;
+ current_delay_ms_ = 0;
+ prev_frame_timestamp_ = 0;
+}
+
+void VCMTiming::ResetDecodeTime() {
+ CriticalSectionScoped lock(crit_sect_);
+ codec_timer_.Reset();
+}
+
+void VCMTiming::set_render_delay(uint32_t render_delay_ms) {
+ CriticalSectionScoped cs(crit_sect_);
+ render_delay_ms_ = render_delay_ms;
+}
+
+void VCMTiming::set_min_playout_delay(uint32_t min_playout_delay_ms) {
+ CriticalSectionScoped cs(crit_sect_);
+ min_playout_delay_ms_ = min_playout_delay_ms;
+}
+
+void VCMTiming::SetJitterDelay(uint32_t jitter_delay_ms) {
+ CriticalSectionScoped cs(crit_sect_);
+ if (jitter_delay_ms != jitter_delay_ms_) {
+ jitter_delay_ms_ = jitter_delay_ms;
+ // When in initial state, set current delay to minimum delay.
+ if (current_delay_ms_ == 0) {
+ current_delay_ms_ = jitter_delay_ms_;
+ }
+ }
+}
+
+void VCMTiming::UpdateCurrentDelay(uint32_t frame_timestamp) {
+ CriticalSectionScoped cs(crit_sect_);
+ uint32_t target_delay_ms = TargetDelayInternal();
+
+ if (current_delay_ms_ == 0) {
+ // Not initialized, set current delay to target.
+ current_delay_ms_ = target_delay_ms;
+ } else if (target_delay_ms != current_delay_ms_) {
+ int64_t delay_diff_ms =
+ static_cast<int64_t>(target_delay_ms) - current_delay_ms_;
+ // Never change the delay with more than 100 ms every second. If we're
+ // changing the delay in too large steps we will get noticeable freezes. By
+ // limiting the change we can increase the delay in smaller steps, which
+ // will be experienced as the video is played in slow motion. When lowering
+ // the delay the video will be played at a faster pace.
+ int64_t max_change_ms = 0;
+ if (frame_timestamp < 0x0000ffff && prev_frame_timestamp_ > 0xffff0000) {
+ // wrap
+ max_change_ms = kDelayMaxChangeMsPerS *
+ (frame_timestamp + (static_cast<int64_t>(1) << 32) -
+ prev_frame_timestamp_) /
+ 90000;
+ } else {
+ max_change_ms = kDelayMaxChangeMsPerS *
+ (frame_timestamp - prev_frame_timestamp_) / 90000;
+ }
+ if (max_change_ms <= 0) {
+ // Any changes less than 1 ms are truncated and
+ // will be postponed. Negative change will be due
+ // to reordering and should be ignored.
+ return;
+ }
+ delay_diff_ms = std::max(delay_diff_ms, -max_change_ms);
+ delay_diff_ms = std::min(delay_diff_ms, max_change_ms);
+
+ current_delay_ms_ = current_delay_ms_ + static_cast<int32_t>(delay_diff_ms);
+ }
+ prev_frame_timestamp_ = frame_timestamp;
+}
+
+void VCMTiming::UpdateCurrentDelay(int64_t render_time_ms,
+ int64_t actual_decode_time_ms) {
+ CriticalSectionScoped cs(crit_sect_);
+ uint32_t target_delay_ms = TargetDelayInternal();
+ int64_t delayed_ms = actual_decode_time_ms -
+ (render_time_ms - MaxDecodeTimeMs() - render_delay_ms_);
+ if (delayed_ms < 0) {
+ return;
+ }
+ if (current_delay_ms_ + delayed_ms <= target_delay_ms) {
+ current_delay_ms_ += static_cast<uint32_t>(delayed_ms);
+ } else {
+ current_delay_ms_ = target_delay_ms;
+ }
+}
+
+int32_t VCMTiming::StopDecodeTimer(uint32_t time_stamp,
+ int32_t decode_time_ms,
+ int64_t now_ms,
+ int64_t render_time_ms) {
+ CriticalSectionScoped cs(crit_sect_);
+ codec_timer_.MaxFilter(decode_time_ms, now_ms);
+ assert(decode_time_ms >= 0);
+ last_decode_ms_ = decode_time_ms;
+
+ // Update stats.
+ ++num_decoded_frames_;
+ if (num_decoded_frames_ == 1) {
+ first_decoded_frame_ms_ = now_ms;
+ }
+ int time_until_rendering_ms = render_time_ms - render_delay_ms_ - now_ms;
+ if (time_until_rendering_ms < 0) {
+ sum_missed_render_deadline_ms_ += -time_until_rendering_ms;
+ ++num_delayed_decoded_frames_;
+ }
+ return 0;
+}
+
+void VCMTiming::IncomingTimestamp(uint32_t time_stamp, int64_t now_ms) {
+ CriticalSectionScoped cs(crit_sect_);
+ ts_extrapolator_->Update(now_ms, time_stamp);
+}
+
+int64_t VCMTiming::RenderTimeMs(uint32_t frame_timestamp,
+ int64_t now_ms) const {
+ CriticalSectionScoped cs(crit_sect_);
+ const int64_t render_time_ms = RenderTimeMsInternal(frame_timestamp, now_ms);
+ return render_time_ms;
+}
+
+int64_t VCMTiming::RenderTimeMsInternal(uint32_t frame_timestamp,
+ int64_t now_ms) const {
+ int64_t estimated_complete_time_ms =
+ ts_extrapolator_->ExtrapolateLocalTime(frame_timestamp);
+ if (estimated_complete_time_ms == -1) {
+ estimated_complete_time_ms = now_ms;
+ }
+
+ // Make sure that we have at least the playout delay.
+ uint32_t actual_delay = std::max(current_delay_ms_, min_playout_delay_ms_);
+ return estimated_complete_time_ms + actual_delay;
+}
+
+// Must be called from inside a critical section.
+int32_t VCMTiming::MaxDecodeTimeMs(
+ FrameType frame_type /*= kVideoFrameDelta*/) const {
+ const int32_t decode_time_ms = codec_timer_.RequiredDecodeTimeMs(frame_type);
+ assert(decode_time_ms >= 0);
+ return decode_time_ms;
+}
+
+uint32_t VCMTiming::MaxWaitingTime(int64_t render_time_ms,
+ int64_t now_ms) const {
+ CriticalSectionScoped cs(crit_sect_);
+
+ const int64_t max_wait_time_ms =
+ render_time_ms - now_ms - MaxDecodeTimeMs() - render_delay_ms_;
+
+ if (max_wait_time_ms < 0) {
+ return 0;
+ }
+ return static_cast<uint32_t>(max_wait_time_ms);
+}
+
+bool VCMTiming::EnoughTimeToDecode(
+ uint32_t available_processing_time_ms) const {
+ CriticalSectionScoped cs(crit_sect_);
+ int32_t max_decode_time_ms = MaxDecodeTimeMs();
+ if (max_decode_time_ms < 0) {
+ // Haven't decoded any frames yet, try decoding one to get an estimate
+ // of the decode time.
+ return true;
+ } else if (max_decode_time_ms == 0) {
+ // Decode time is less than 1, set to 1 for now since
+ // we don't have any better precision. Count ticks later?
+ max_decode_time_ms = 1;
+ }
+ return static_cast<int32_t>(available_processing_time_ms) -
+ max_decode_time_ms >
+ 0;
+}
+
+uint32_t VCMTiming::TargetVideoDelay() const {
+ CriticalSectionScoped cs(crit_sect_);
+ return TargetDelayInternal();
+}
+
+uint32_t VCMTiming::TargetDelayInternal() const {
+ return std::max(min_playout_delay_ms_,
+ jitter_delay_ms_ + MaxDecodeTimeMs() + render_delay_ms_);
+}
+
+void VCMTiming::GetTimings(int* decode_ms,
+ int* max_decode_ms,
+ int* current_delay_ms,
+ int* target_delay_ms,
+ int* jitter_buffer_ms,
+ int* min_playout_delay_ms,
+ int* render_delay_ms) const {
+ CriticalSectionScoped cs(crit_sect_);
+ *decode_ms = last_decode_ms_;
+ *max_decode_ms = MaxDecodeTimeMs();
+ *current_delay_ms = current_delay_ms_;
+ *target_delay_ms = TargetDelayInternal();
+ *jitter_buffer_ms = jitter_delay_ms_;
+ *min_playout_delay_ms = min_playout_delay_ms_;
+ *render_delay_ms = render_delay_ms_;
+}
+
+} // namespace webrtc
diff --git a/webrtc/modules/video_coding/timing.h b/webrtc/modules/video_coding/timing.h
new file mode 100644
index 0000000000..a4d0cf4543
--- /dev/null
+++ b/webrtc/modules/video_coding/timing.h
@@ -0,0 +1,126 @@
+/*
+ * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_CODING_TIMING_H_
+#define WEBRTC_MODULES_VIDEO_CODING_TIMING_H_
+
+#include "webrtc/base/thread_annotations.h"
+#include "webrtc/modules/video_coding/codec_timer.h"
+#include "webrtc/system_wrappers/include/critical_section_wrapper.h"
+#include "webrtc/typedefs.h"
+
+namespace webrtc {
+
+class Clock;
+class TimestampExtrapolator;
+
+class VCMTiming {
+ public:
+ // The primary timing component should be passed
+ // if this is the dual timing component.
+ explicit VCMTiming(Clock* clock, VCMTiming* master_timing = NULL);
+ ~VCMTiming();
+
+ // Resets the timing to the initial state.
+ void Reset();
+ void ResetDecodeTime();
+
+ // Set the amount of time needed to render an image. Defaults to 10 ms.
+ void set_render_delay(uint32_t render_delay_ms);
+
+ // Set the minimum time the video must be delayed on the receiver to
+ // get the desired jitter buffer level.
+ void SetJitterDelay(uint32_t required_delay_ms);
+
+ // Set the minimum playout delay required to sync video with audio.
+ void set_min_playout_delay(uint32_t min_playout_delay);
+
+ // Increases or decreases the current delay to get closer to the target delay.
+ // Calculates how long it has been since the previous call to this function,
+ // and increases/decreases the delay in proportion to the time difference.
+ void UpdateCurrentDelay(uint32_t frame_timestamp);
+
+ // Increases or decreases the current delay to get closer to the target delay.
+ // Given the actual decode time in ms and the render time in ms for a frame,
+ // this function calculates how late the frame is and increases the delay
+ // accordingly.
+ void UpdateCurrentDelay(int64_t render_time_ms,
+ int64_t actual_decode_time_ms);
+
+ // Stops the decoder timer, should be called when the decoder returns a frame
+ // or when the decoded frame callback is called.
+ int32_t StopDecodeTimer(uint32_t time_stamp,
+ int32_t decode_time_ms,
+ int64_t now_ms,
+ int64_t render_time_ms);
+
+ // Used to report that a frame is passed to decoding. Updates the timestamp
+ // filter which is used to map between timestamps and receiver system time.
+ void IncomingTimestamp(uint32_t time_stamp, int64_t last_packet_time_ms);
+ // Returns the receiver system time when the frame with timestamp
+ // frame_timestamp should be rendered, assuming that the system time currently
+ // is now_ms.
+ int64_t RenderTimeMs(uint32_t frame_timestamp, int64_t now_ms) const;
+
+ // Returns the maximum time in ms that we can wait for a frame to become
+ // complete before we must pass it to the decoder.
+ uint32_t MaxWaitingTime(int64_t render_time_ms, int64_t now_ms) const;
+
+ // Returns the current target delay which is required delay + decode time +
+ // render delay.
+ uint32_t TargetVideoDelay() const;
+
+ // Calculates whether or not there is enough time to decode a frame given a
+ // certain amount of processing time.
+ bool EnoughTimeToDecode(uint32_t available_processing_time_ms) const;
+
+ // Return current timing information.
+ void GetTimings(int* decode_ms,
+ int* max_decode_ms,
+ int* current_delay_ms,
+ int* target_delay_ms,
+ int* jitter_buffer_ms,
+ int* min_playout_delay_ms,
+ int* render_delay_ms) const;
+
+ enum { kDefaultRenderDelayMs = 10 };
+ enum { kDelayMaxChangeMsPerS = 100 };
+
+ protected:
+ int32_t MaxDecodeTimeMs(FrameType frame_type = kVideoFrameDelta) const
+ EXCLUSIVE_LOCKS_REQUIRED(crit_sect_);
+ int64_t RenderTimeMsInternal(uint32_t frame_timestamp, int64_t now_ms) const
+ EXCLUSIVE_LOCKS_REQUIRED(crit_sect_);
+ uint32_t TargetDelayInternal() const EXCLUSIVE_LOCKS_REQUIRED(crit_sect_);
+
+ private:
+ void UpdateHistograms() const;
+
+ CriticalSectionWrapper* crit_sect_;
+ Clock* const clock_;
+ bool master_ GUARDED_BY(crit_sect_);
+ TimestampExtrapolator* ts_extrapolator_ GUARDED_BY(crit_sect_);
+ VCMCodecTimer codec_timer_ GUARDED_BY(crit_sect_);
+ uint32_t render_delay_ms_ GUARDED_BY(crit_sect_);
+ uint32_t min_playout_delay_ms_ GUARDED_BY(crit_sect_);
+ uint32_t jitter_delay_ms_ GUARDED_BY(crit_sect_);
+ uint32_t current_delay_ms_ GUARDED_BY(crit_sect_);
+ int last_decode_ms_ GUARDED_BY(crit_sect_);
+ uint32_t prev_frame_timestamp_ GUARDED_BY(crit_sect_);
+
+ // Statistics.
+ size_t num_decoded_frames_ GUARDED_BY(crit_sect_);
+ size_t num_delayed_decoded_frames_ GUARDED_BY(crit_sect_);
+ int64_t first_decoded_frame_ms_ GUARDED_BY(crit_sect_);
+ uint64_t sum_missed_render_deadline_ms_ GUARDED_BY(crit_sect_);
+};
+} // namespace webrtc
+
+#endif // WEBRTC_MODULES_VIDEO_CODING_TIMING_H_
diff --git a/webrtc/modules/video_coding/timing_unittest.cc b/webrtc/modules/video_coding/timing_unittest.cc
new file mode 100644
index 0000000000..2e8df83683
--- /dev/null
+++ b/webrtc/modules/video_coding/timing_unittest.cc
@@ -0,0 +1,149 @@
+/*
+ * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <math.h>
+#include <stdio.h>
+#include <stdlib.h>
+
+#include "testing/gtest/include/gtest/gtest.h"
+
+#include "webrtc/modules/video_coding/include/video_coding.h"
+#include "webrtc/modules/video_coding/internal_defines.h"
+#include "webrtc/modules/video_coding/timing.h"
+#include "webrtc/modules/video_coding/test/test_util.h"
+#include "webrtc/system_wrappers/include/clock.h"
+#include "webrtc/system_wrappers/include/trace.h"
+#include "webrtc/test/testsupport/fileutils.h"
+
+namespace webrtc {
+
+TEST(ReceiverTiming, Tests) {
+ SimulatedClock clock(0);
+ VCMTiming timing(&clock);
+ uint32_t waitTime = 0;
+ uint32_t jitterDelayMs = 0;
+ uint32_t maxDecodeTimeMs = 0;
+ uint32_t timeStamp = 0;
+
+ timing.Reset();
+
+ timing.UpdateCurrentDelay(timeStamp);
+
+ timing.Reset();
+
+ timing.IncomingTimestamp(timeStamp, clock.TimeInMilliseconds());
+ jitterDelayMs = 20;
+ timing.SetJitterDelay(jitterDelayMs);
+ timing.UpdateCurrentDelay(timeStamp);
+ timing.set_render_delay(0);
+ waitTime = timing.MaxWaitingTime(
+ timing.RenderTimeMs(timeStamp, clock.TimeInMilliseconds()),
+ clock.TimeInMilliseconds());
+ // First update initializes the render time. Since we have no decode delay
+ // we get waitTime = renderTime - now - renderDelay = jitter.
+ EXPECT_EQ(jitterDelayMs, waitTime);
+
+ jitterDelayMs += VCMTiming::kDelayMaxChangeMsPerS + 10;
+ timeStamp += 90000;
+ clock.AdvanceTimeMilliseconds(1000);
+ timing.SetJitterDelay(jitterDelayMs);
+ timing.UpdateCurrentDelay(timeStamp);
+ waitTime = timing.MaxWaitingTime(
+ timing.RenderTimeMs(timeStamp, clock.TimeInMilliseconds()),
+ clock.TimeInMilliseconds());
+ // Since we gradually increase the delay we only get 100 ms every second.
+ EXPECT_EQ(jitterDelayMs - 10, waitTime);
+
+ timeStamp += 90000;
+ clock.AdvanceTimeMilliseconds(1000);
+ timing.UpdateCurrentDelay(timeStamp);
+ waitTime = timing.MaxWaitingTime(
+ timing.RenderTimeMs(timeStamp, clock.TimeInMilliseconds()),
+ clock.TimeInMilliseconds());
+ EXPECT_EQ(waitTime, jitterDelayMs);
+
+ // 300 incoming frames without jitter, verify that this gives the exact wait
+ // time.
+ for (int i = 0; i < 300; i++) {
+ clock.AdvanceTimeMilliseconds(1000 / 25);
+ timeStamp += 90000 / 25;
+ timing.IncomingTimestamp(timeStamp, clock.TimeInMilliseconds());
+ }
+ timing.UpdateCurrentDelay(timeStamp);
+ waitTime = timing.MaxWaitingTime(
+ timing.RenderTimeMs(timeStamp, clock.TimeInMilliseconds()),
+ clock.TimeInMilliseconds());
+ EXPECT_EQ(waitTime, jitterDelayMs);
+
+ // Add decode time estimates.
+ for (int i = 0; i < 10; i++) {
+ int64_t startTimeMs = clock.TimeInMilliseconds();
+ clock.AdvanceTimeMilliseconds(10);
+ timing.StopDecodeTimer(
+ timeStamp, clock.TimeInMilliseconds() - startTimeMs,
+ clock.TimeInMilliseconds(),
+ timing.RenderTimeMs(timeStamp, clock.TimeInMilliseconds()));
+ timeStamp += 90000 / 25;
+ clock.AdvanceTimeMilliseconds(1000 / 25 - 10);
+ timing.IncomingTimestamp(timeStamp, clock.TimeInMilliseconds());
+ }
+ maxDecodeTimeMs = 10;
+ timing.SetJitterDelay(jitterDelayMs);
+ clock.AdvanceTimeMilliseconds(1000);
+ timeStamp += 90000;
+ timing.UpdateCurrentDelay(timeStamp);
+ waitTime = timing.MaxWaitingTime(
+ timing.RenderTimeMs(timeStamp, clock.TimeInMilliseconds()),
+ clock.TimeInMilliseconds());
+ EXPECT_EQ(waitTime, jitterDelayMs);
+
+ uint32_t minTotalDelayMs = 200;
+ timing.set_min_playout_delay(minTotalDelayMs);
+ clock.AdvanceTimeMilliseconds(5000);
+ timeStamp += 5 * 90000;
+ timing.UpdateCurrentDelay(timeStamp);
+ const int kRenderDelayMs = 10;
+ timing.set_render_delay(kRenderDelayMs);
+ waitTime = timing.MaxWaitingTime(
+ timing.RenderTimeMs(timeStamp, clock.TimeInMilliseconds()),
+ clock.TimeInMilliseconds());
+ // We should at least have minTotalDelayMs - decodeTime (10) - renderTime
+ // (10) to wait.
+ EXPECT_EQ(waitTime, minTotalDelayMs - maxDecodeTimeMs - kRenderDelayMs);
+ // The total video delay should be equal to the min total delay.
+ EXPECT_EQ(minTotalDelayMs, timing.TargetVideoDelay());
+
+ // Reset playout delay.
+ timing.set_min_playout_delay(0);
+ clock.AdvanceTimeMilliseconds(5000);
+ timeStamp += 5 * 90000;
+ timing.UpdateCurrentDelay(timeStamp);
+}
+
+TEST(ReceiverTiming, WrapAround) {
+ const int kFramerate = 25;
+ SimulatedClock clock(0);
+ VCMTiming timing(&clock);
+ // Provoke a wrap-around. The forth frame will have wrapped at 25 fps.
+ uint32_t timestamp = 0xFFFFFFFFu - 3 * 90000 / kFramerate;
+ for (int i = 0; i < 4; ++i) {
+ timing.IncomingTimestamp(timestamp, clock.TimeInMilliseconds());
+ clock.AdvanceTimeMilliseconds(1000 / kFramerate);
+ timestamp += 90000 / kFramerate;
+ int64_t render_time =
+ timing.RenderTimeMs(0xFFFFFFFFu, clock.TimeInMilliseconds());
+ EXPECT_EQ(3 * 1000 / kFramerate, render_time);
+ render_time = timing.RenderTimeMs(89u, // One second later in 90 kHz.
+ clock.TimeInMilliseconds());
+ EXPECT_EQ(3 * 1000 / kFramerate + 1, render_time);
+ }
+}
+
+} // namespace webrtc
diff --git a/webrtc/modules/video_coding/utility/frame_dropper.cc b/webrtc/modules/video_coding/utility/frame_dropper.cc
index 5262c5b88a..a0aa67be4e 100644
--- a/webrtc/modules/video_coding/utility/frame_dropper.cc
+++ b/webrtc/modules/video_coding/utility/frame_dropper.cc
@@ -8,12 +8,11 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-#include "webrtc/modules/video_coding/utility/include/frame_dropper.h"
+#include "webrtc/modules/video_coding/utility/frame_dropper.h"
#include "webrtc/system_wrappers/include/trace.h"
-namespace webrtc
-{
+namespace webrtc {
const float kDefaultKeyFrameSizeAvgKBits = 0.9f;
const float kDefaultKeyFrameRatio = 0.99f;
@@ -22,339 +21,266 @@ const float kDefaultDropRatioMax = 0.96f;
const float kDefaultMaxTimeToDropFrames = 4.0f; // In seconds.
FrameDropper::FrameDropper()
-:
-_keyFrameSizeAvgKbits(kDefaultKeyFrameSizeAvgKBits),
-_keyFrameRatio(kDefaultKeyFrameRatio),
-_dropRatio(kDefaultDropRatioAlpha, kDefaultDropRatioMax),
-_enabled(true),
-_max_time_drops(kDefaultMaxTimeToDropFrames)
-{
- Reset();
+ : _keyFrameSizeAvgKbits(kDefaultKeyFrameSizeAvgKBits),
+ _keyFrameRatio(kDefaultKeyFrameRatio),
+ _dropRatio(kDefaultDropRatioAlpha, kDefaultDropRatioMax),
+ _enabled(true),
+ _max_time_drops(kDefaultMaxTimeToDropFrames) {
+ Reset();
}
FrameDropper::FrameDropper(float max_time_drops)
-:
-_keyFrameSizeAvgKbits(kDefaultKeyFrameSizeAvgKBits),
-_keyFrameRatio(kDefaultKeyFrameRatio),
-_dropRatio(kDefaultDropRatioAlpha, kDefaultDropRatioMax),
-_enabled(true),
-_max_time_drops(max_time_drops)
-{
- Reset();
+ : _keyFrameSizeAvgKbits(kDefaultKeyFrameSizeAvgKBits),
+ _keyFrameRatio(kDefaultKeyFrameRatio),
+ _dropRatio(kDefaultDropRatioAlpha, kDefaultDropRatioMax),
+ _enabled(true),
+ _max_time_drops(max_time_drops) {
+ Reset();
}
-void
-FrameDropper::Reset()
-{
- _keyFrameRatio.Reset(0.99f);
- _keyFrameRatio.Apply(1.0f, 1.0f/300.0f); // 1 key frame every 10th second in 30 fps
- _keyFrameSizeAvgKbits.Reset(0.9f);
- _keyFrameCount = 0;
- _accumulator = 0.0f;
- _accumulatorMax = 150.0f; // assume 300 kb/s and 0.5 s window
- _targetBitRate = 300.0f;
- _incoming_frame_rate = 30;
- _keyFrameSpreadFrames = 0.5f * _incoming_frame_rate;
- _dropNext = false;
- _dropRatio.Reset(0.9f);
- _dropRatio.Apply(0.0f, 0.0f); // Initialize to 0
- _dropCount = 0;
- _windowSize = 0.5f;
- _wasBelowMax = true;
- _fastMode = false; // start with normal (non-aggressive) mode
- // Cap for the encoder buffer level/accumulator, in secs.
- _cap_buffer_size = 3.0f;
- // Cap on maximum amount of dropped frames between kept frames, in secs.
- _max_time_drops = 4.0f;
+void FrameDropper::Reset() {
+ _keyFrameRatio.Reset(0.99f);
+ _keyFrameRatio.Apply(
+ 1.0f, 1.0f / 300.0f); // 1 key frame every 10th second in 30 fps
+ _keyFrameSizeAvgKbits.Reset(0.9f);
+ _keyFrameCount = 0;
+ _accumulator = 0.0f;
+ _accumulatorMax = 150.0f; // assume 300 kb/s and 0.5 s window
+ _targetBitRate = 300.0f;
+ _incoming_frame_rate = 30;
+ _keyFrameSpreadFrames = 0.5f * _incoming_frame_rate;
+ _dropNext = false;
+ _dropRatio.Reset(0.9f);
+ _dropRatio.Apply(0.0f, 0.0f); // Initialize to 0
+ _dropCount = 0;
+ _windowSize = 0.5f;
+ _wasBelowMax = true;
+ _fastMode = false; // start with normal (non-aggressive) mode
+ // Cap for the encoder buffer level/accumulator, in secs.
+ _cap_buffer_size = 3.0f;
+ // Cap on maximum amount of dropped frames between kept frames, in secs.
+ _max_time_drops = 4.0f;
}
-void
-FrameDropper::Enable(bool enable)
-{
- _enabled = enable;
+void FrameDropper::Enable(bool enable) {
+ _enabled = enable;
}
-void
-FrameDropper::Fill(size_t frameSizeBytes, bool deltaFrame)
-{
- if (!_enabled)
- {
- return;
- }
- float frameSizeKbits = 8.0f * static_cast<float>(frameSizeBytes) / 1000.0f;
- if (!deltaFrame && !_fastMode) // fast mode does not treat key-frames any different
- {
- _keyFrameSizeAvgKbits.Apply(1, frameSizeKbits);
- _keyFrameRatio.Apply(1.0, 1.0);
- if (frameSizeKbits > _keyFrameSizeAvgKbits.filtered())
- {
- // Remove the average key frame size since we
- // compensate for key frames when adding delta
- // frames.
- frameSizeKbits -= _keyFrameSizeAvgKbits.filtered();
- }
- else
- {
- // Shouldn't be negative, so zero is the lower bound.
- frameSizeKbits = 0;
- }
- if (_keyFrameRatio.filtered() > 1e-5 &&
- 1 / _keyFrameRatio.filtered() < _keyFrameSpreadFrames)
- {
- // We are sending key frames more often than our upper bound for
- // how much we allow the key frame compensation to be spread
- // out in time. Therefor we must use the key frame ratio rather
- // than keyFrameSpreadFrames.
- _keyFrameCount =
- static_cast<int32_t>(1 / _keyFrameRatio.filtered() + 0.5);
- }
- else
- {
- // Compensate for the key frame the following frames
- _keyFrameCount = static_cast<int32_t>(_keyFrameSpreadFrames + 0.5);
- }
+void FrameDropper::Fill(size_t frameSizeBytes, bool deltaFrame) {
+ if (!_enabled) {
+ return;
+ }
+ float frameSizeKbits = 8.0f * static_cast<float>(frameSizeBytes) / 1000.0f;
+ if (!deltaFrame &&
+ !_fastMode) { // fast mode does not treat key-frames any different
+ _keyFrameSizeAvgKbits.Apply(1, frameSizeKbits);
+ _keyFrameRatio.Apply(1.0, 1.0);
+ if (frameSizeKbits > _keyFrameSizeAvgKbits.filtered()) {
+ // Remove the average key frame size since we
+ // compensate for key frames when adding delta
+ // frames.
+ frameSizeKbits -= _keyFrameSizeAvgKbits.filtered();
+ } else {
+ // Shouldn't be negative, so zero is the lower bound.
+ frameSizeKbits = 0;
}
- else
- {
- // Decrease the keyFrameRatio
- _keyFrameRatio.Apply(1.0, 0.0);
+ if (_keyFrameRatio.filtered() > 1e-5 &&
+ 1 / _keyFrameRatio.filtered() < _keyFrameSpreadFrames) {
+ // We are sending key frames more often than our upper bound for
+ // how much we allow the key frame compensation to be spread
+ // out in time. Therefor we must use the key frame ratio rather
+ // than keyFrameSpreadFrames.
+ _keyFrameCount =
+ static_cast<int32_t>(1 / _keyFrameRatio.filtered() + 0.5);
+ } else {
+ // Compensate for the key frame the following frames
+ _keyFrameCount = static_cast<int32_t>(_keyFrameSpreadFrames + 0.5);
}
- // Change the level of the accumulator (bucket)
- _accumulator += frameSizeKbits;
- CapAccumulator();
+ } else {
+ // Decrease the keyFrameRatio
+ _keyFrameRatio.Apply(1.0, 0.0);
+ }
+ // Change the level of the accumulator (bucket)
+ _accumulator += frameSizeKbits;
+ CapAccumulator();
}
-void
-FrameDropper::Leak(uint32_t inputFrameRate)
-{
- if (!_enabled)
- {
- return;
- }
- if (inputFrameRate < 1)
- {
- return;
- }
- if (_targetBitRate < 0.0f)
- {
- return;
- }
- _keyFrameSpreadFrames = 0.5f * inputFrameRate;
- // T is the expected bits per frame (target). If all frames were the same size,
- // we would get T bits per frame. Notice that T is also weighted to be able to
- // force a lower frame rate if wanted.
- float T = _targetBitRate / inputFrameRate;
- if (_keyFrameCount > 0)
- {
- // Perform the key frame compensation
- if (_keyFrameRatio.filtered() > 0 &&
- 1 / _keyFrameRatio.filtered() < _keyFrameSpreadFrames)
- {
- T -= _keyFrameSizeAvgKbits.filtered() * _keyFrameRatio.filtered();
- }
- else
- {
- T -= _keyFrameSizeAvgKbits.filtered() / _keyFrameSpreadFrames;
- }
- _keyFrameCount--;
- }
- _accumulator -= T;
- if (_accumulator < 0.0f)
- {
- _accumulator = 0.0f;
+void FrameDropper::Leak(uint32_t inputFrameRate) {
+ if (!_enabled) {
+ return;
+ }
+ if (inputFrameRate < 1) {
+ return;
+ }
+ if (_targetBitRate < 0.0f) {
+ return;
+ }
+ _keyFrameSpreadFrames = 0.5f * inputFrameRate;
+ // T is the expected bits per frame (target). If all frames were the same
+ // size,
+ // we would get T bits per frame. Notice that T is also weighted to be able to
+ // force a lower frame rate if wanted.
+ float T = _targetBitRate / inputFrameRate;
+ if (_keyFrameCount > 0) {
+ // Perform the key frame compensation
+ if (_keyFrameRatio.filtered() > 0 &&
+ 1 / _keyFrameRatio.filtered() < _keyFrameSpreadFrames) {
+ T -= _keyFrameSizeAvgKbits.filtered() * _keyFrameRatio.filtered();
+ } else {
+ T -= _keyFrameSizeAvgKbits.filtered() / _keyFrameSpreadFrames;
}
- UpdateRatio();
+ _keyFrameCount--;
+ }
+ _accumulator -= T;
+ if (_accumulator < 0.0f) {
+ _accumulator = 0.0f;
+ }
+ UpdateRatio();
}
-void
-FrameDropper::UpdateNack(uint32_t nackBytes)
-{
- if (!_enabled)
- {
- return;
- }
- _accumulator += static_cast<float>(nackBytes) * 8.0f / 1000.0f;
+void FrameDropper::UpdateNack(uint32_t nackBytes) {
+ if (!_enabled) {
+ return;
+ }
+ _accumulator += static_cast<float>(nackBytes) * 8.0f / 1000.0f;
}
-void
-FrameDropper::FillBucket(float inKbits, float outKbits)
-{
- _accumulator += (inKbits - outKbits);
+void FrameDropper::FillBucket(float inKbits, float outKbits) {
+ _accumulator += (inKbits - outKbits);
}
-void
-FrameDropper::UpdateRatio()
-{
- if (_accumulator > 1.3f * _accumulatorMax)
- {
- // Too far above accumulator max, react faster
- _dropRatio.UpdateBase(0.8f);
+void FrameDropper::UpdateRatio() {
+ if (_accumulator > 1.3f * _accumulatorMax) {
+ // Too far above accumulator max, react faster
+ _dropRatio.UpdateBase(0.8f);
+ } else {
+ // Go back to normal reaction
+ _dropRatio.UpdateBase(0.9f);
+ }
+ if (_accumulator > _accumulatorMax) {
+ // We are above accumulator max, and should ideally
+ // drop a frame. Increase the dropRatio and drop
+ // the frame later.
+ if (_wasBelowMax) {
+ _dropNext = true;
}
- else
- {
- // Go back to normal reaction
- _dropRatio.UpdateBase(0.9f);
+ if (_fastMode) {
+ // always drop in aggressive mode
+ _dropNext = true;
}
- if (_accumulator > _accumulatorMax)
- {
- // We are above accumulator max, and should ideally
- // drop a frame. Increase the dropRatio and drop
- // the frame later.
- if (_wasBelowMax)
- {
- _dropNext = true;
- }
- if (_fastMode)
- {
- // always drop in aggressive mode
- _dropNext = true;
- }
- _dropRatio.Apply(1.0f, 1.0f);
- _dropRatio.UpdateBase(0.9f);
- }
- else
- {
- _dropRatio.Apply(1.0f, 0.0f);
- }
- _wasBelowMax = _accumulator < _accumulatorMax;
+ _dropRatio.Apply(1.0f, 1.0f);
+ _dropRatio.UpdateBase(0.9f);
+ } else {
+ _dropRatio.Apply(1.0f, 0.0f);
+ }
+ _wasBelowMax = _accumulator < _accumulatorMax;
}
-// This function signals when to drop frames to the caller. It makes use of the dropRatio
+// This function signals when to drop frames to the caller. It makes use of the
+// dropRatio
// to smooth out the drops over time.
-bool
-FrameDropper::DropFrame()
-{
- if (!_enabled)
- {
- return false;
+bool FrameDropper::DropFrame() {
+ if (!_enabled) {
+ return false;
+ }
+ if (_dropNext) {
+ _dropNext = false;
+ _dropCount = 0;
+ }
+
+ if (_dropRatio.filtered() >= 0.5f) { // Drops per keep
+ // limit is the number of frames we should drop between each kept frame
+ // to keep our drop ratio. limit is positive in this case.
+ float denom = 1.0f - _dropRatio.filtered();
+ if (denom < 1e-5) {
+ denom = 1e-5f;
+ }
+ int32_t limit = static_cast<int32_t>(1.0f / denom - 1.0f + 0.5f);
+ // Put a bound on the max amount of dropped frames between each kept
+ // frame, in terms of frame rate and window size (secs).
+ int max_limit = static_cast<int>(_incoming_frame_rate * _max_time_drops);
+ if (limit > max_limit) {
+ limit = max_limit;
}
- if (_dropNext)
- {
- _dropNext = false;
+ if (_dropCount < 0) {
+ // Reset the _dropCount since it was negative and should be positive.
+ if (_dropRatio.filtered() > 0.4f) {
+ _dropCount = -_dropCount;
+ } else {
_dropCount = 0;
+ }
}
-
- if (_dropRatio.filtered() >= 0.5f) // Drops per keep
- {
- // limit is the number of frames we should drop between each kept frame
- // to keep our drop ratio. limit is positive in this case.
- float denom = 1.0f - _dropRatio.filtered();
- if (denom < 1e-5)
- {
- denom = (float)1e-5;
- }
- int32_t limit = static_cast<int32_t>(1.0f / denom - 1.0f + 0.5f);
- // Put a bound on the max amount of dropped frames between each kept
- // frame, in terms of frame rate and window size (secs).
- int max_limit = static_cast<int>(_incoming_frame_rate *
- _max_time_drops);
- if (limit > max_limit) {
- limit = max_limit;
- }
- if (_dropCount < 0)
- {
- // Reset the _dropCount since it was negative and should be positive.
- if (_dropRatio.filtered() > 0.4f)
- {
- _dropCount = -_dropCount;
- }
- else
- {
- _dropCount = 0;
- }
- }
- if (_dropCount < limit)
- {
- // As long we are below the limit we should drop frames.
- _dropCount++;
- return true;
- }
- else
- {
- // Only when we reset _dropCount a frame should be kept.
- _dropCount = 0;
- return false;
- }
+ if (_dropCount < limit) {
+ // As long we are below the limit we should drop frames.
+ _dropCount++;
+ return true;
+ } else {
+ // Only when we reset _dropCount a frame should be kept.
+ _dropCount = 0;
+ return false;
}
- else if (_dropRatio.filtered() > 0.0f &&
- _dropRatio.filtered() < 0.5f) // Keeps per drop
- {
- // limit is the number of frames we should keep between each drop
- // in order to keep the drop ratio. limit is negative in this case,
- // and the _dropCount is also negative.
- float denom = _dropRatio.filtered();
- if (denom < 1e-5)
- {
- denom = (float)1e-5;
- }
- int32_t limit = -static_cast<int32_t>(1.0f / denom - 1.0f + 0.5f);
- if (_dropCount > 0)
- {
- // Reset the _dropCount since we have a positive
- // _dropCount, and it should be negative.
- if (_dropRatio.filtered() < 0.6f)
- {
- _dropCount = -_dropCount;
- }
- else
- {
- _dropCount = 0;
- }
- }
- if (_dropCount > limit)
- {
- if (_dropCount == 0)
- {
- // Drop frames when we reset _dropCount.
- _dropCount--;
- return true;
- }
- else
- {
- // Keep frames as long as we haven't reached limit.
- _dropCount--;
- return false;
- }
- }
- else
- {
- _dropCount = 0;
- return false;
- }
+ } else if (_dropRatio.filtered() > 0.0f &&
+ _dropRatio.filtered() < 0.5f) { // Keeps per drop
+ // limit is the number of frames we should keep between each drop
+ // in order to keep the drop ratio. limit is negative in this case,
+ // and the _dropCount is also negative.
+ float denom = _dropRatio.filtered();
+ if (denom < 1e-5) {
+ denom = 1e-5f;
}
- _dropCount = 0;
- return false;
+ int32_t limit = -static_cast<int32_t>(1.0f / denom - 1.0f + 0.5f);
+ if (_dropCount > 0) {
+ // Reset the _dropCount since we have a positive
+ // _dropCount, and it should be negative.
+ if (_dropRatio.filtered() < 0.6f) {
+ _dropCount = -_dropCount;
+ } else {
+ _dropCount = 0;
+ }
+ }
+ if (_dropCount > limit) {
+ if (_dropCount == 0) {
+ // Drop frames when we reset _dropCount.
+ _dropCount--;
+ return true;
+ } else {
+ // Keep frames as long as we haven't reached limit.
+ _dropCount--;
+ return false;
+ }
+ } else {
+ _dropCount = 0;
+ return false;
+ }
+ }
+ _dropCount = 0;
+ return false;
- // A simpler version, unfiltered and quicker
- //bool dropNext = _dropNext;
- //_dropNext = false;
- //return dropNext;
+ // A simpler version, unfiltered and quicker
+ // bool dropNext = _dropNext;
+ // _dropNext = false;
+ // return dropNext;
}
-void
-FrameDropper::SetRates(float bitRate, float incoming_frame_rate)
-{
- // Bit rate of -1 means infinite bandwidth.
- _accumulatorMax = bitRate * _windowSize; // bitRate * windowSize (in seconds)
- if (_targetBitRate > 0.0f && bitRate < _targetBitRate && _accumulator > _accumulatorMax)
- {
- // Rescale the accumulator level if the accumulator max decreases
- _accumulator = bitRate / _targetBitRate * _accumulator;
- }
- _targetBitRate = bitRate;
- CapAccumulator();
- _incoming_frame_rate = incoming_frame_rate;
+void FrameDropper::SetRates(float bitRate, float incoming_frame_rate) {
+ // Bit rate of -1 means infinite bandwidth.
+ _accumulatorMax = bitRate * _windowSize; // bitRate * windowSize (in seconds)
+ if (_targetBitRate > 0.0f && bitRate < _targetBitRate &&
+ _accumulator > _accumulatorMax) {
+ // Rescale the accumulator level if the accumulator max decreases
+ _accumulator = bitRate / _targetBitRate * _accumulator;
+ }
+ _targetBitRate = bitRate;
+ CapAccumulator();
+ _incoming_frame_rate = incoming_frame_rate;
}
-float
-FrameDropper::ActualFrameRate(uint32_t inputFrameRate) const
-{
- if (!_enabled)
- {
- return static_cast<float>(inputFrameRate);
- }
- return inputFrameRate * (1.0f - _dropRatio.filtered());
+float FrameDropper::ActualFrameRate(uint32_t inputFrameRate) const {
+ if (!_enabled) {
+ return static_cast<float>(inputFrameRate);
+ }
+ return inputFrameRate * (1.0f - _dropRatio.filtered());
}
// Put a cap on the accumulator, i.e., don't let it grow beyond some level.
@@ -366,5 +292,4 @@ void FrameDropper::CapAccumulator() {
_accumulator = max_accumulator;
}
}
-
-}
+} // namespace webrtc
diff --git a/webrtc/modules/video_coding/utility/frame_dropper.h b/webrtc/modules/video_coding/utility/frame_dropper.h
new file mode 100644
index 0000000000..7ec85ea880
--- /dev/null
+++ b/webrtc/modules/video_coding/utility/frame_dropper.h
@@ -0,0 +1,96 @@
+/*
+ * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_CODING_UTILITY_FRAME_DROPPER_H_
+#define WEBRTC_MODULES_VIDEO_CODING_UTILITY_FRAME_DROPPER_H_
+
+#include <cstddef>
+
+#include "webrtc/base/exp_filter.h"
+#include "webrtc/typedefs.h"
+
+namespace webrtc {
+
+// The Frame Dropper implements a variant of the leaky bucket algorithm
+// for keeping track of when to drop frames to avoid bit rate
+// over use when the encoder can't keep its bit rate.
+class FrameDropper {
+ public:
+ FrameDropper();
+ explicit FrameDropper(float max_time_drops);
+ virtual ~FrameDropper() {}
+
+ // Resets the FrameDropper to its initial state.
+ // This means that the frameRateWeight is set to its
+ // default value as well.
+ virtual void Reset();
+
+ virtual void Enable(bool enable);
+ // Answers the question if it's time to drop a frame
+ // if we want to reach a given frame rate. Must be
+ // called for every frame.
+ //
+ // Return value : True if we should drop the current frame
+ virtual bool DropFrame();
+ // Updates the FrameDropper with the size of the latest encoded
+ // frame. The FrameDropper calculates a new drop ratio (can be
+ // seen as the probability to drop a frame) and updates its
+ // internal statistics.
+ //
+ // Input:
+ // - frameSizeBytes : The size of the latest frame
+ // returned from the encoder.
+ // - deltaFrame : True if the encoder returned
+ // a key frame.
+ virtual void Fill(size_t frameSizeBytes, bool deltaFrame);
+
+ virtual void Leak(uint32_t inputFrameRate);
+
+ void UpdateNack(uint32_t nackBytes);
+
+ // Sets the target bit rate and the frame rate produced by
+ // the camera.
+ //
+ // Input:
+ // - bitRate : The target bit rate
+ virtual void SetRates(float bitRate, float incoming_frame_rate);
+
+ // Return value : The current average frame rate produced
+ // if the DropFrame() function is used as
+ // instruction of when to drop frames.
+ virtual float ActualFrameRate(uint32_t inputFrameRate) const;
+
+ private:
+ void FillBucket(float inKbits, float outKbits);
+ void UpdateRatio();
+ void CapAccumulator();
+
+ rtc::ExpFilter _keyFrameSizeAvgKbits;
+ rtc::ExpFilter _keyFrameRatio;
+ float _keyFrameSpreadFrames;
+ int32_t _keyFrameCount;
+ float _accumulator;
+ float _accumulatorMax;
+ float _targetBitRate;
+ bool _dropNext;
+ rtc::ExpFilter _dropRatio;
+ int32_t _dropCount;
+ float _windowSize;
+ float _incoming_frame_rate;
+ bool _wasBelowMax;
+ bool _enabled;
+ bool _fastMode;
+ float _cap_buffer_size;
+ float _max_time_drops;
+}; // end of VCMFrameDropper class
+
+} // namespace webrtc
+
+#endif // WEBRTC_MODULES_VIDEO_CODING_UTILITY_FRAME_DROPPER_H_
diff --git a/webrtc/modules/video_coding/utility/include/frame_dropper.h b/webrtc/modules/video_coding/utility/include/frame_dropper.h
deleted file mode 100644
index 2b78a7264f..0000000000
--- a/webrtc/modules/video_coding/utility/include/frame_dropper.h
+++ /dev/null
@@ -1,98 +0,0 @@
-/*
- * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_MODULES_VIDEO_CODING_UTILITY_INCLUDE_FRAME_DROPPER_H_
-#define WEBRTC_MODULES_VIDEO_CODING_UTILITY_INCLUDE_FRAME_DROPPER_H_
-
-#include <cstddef>
-
-#include "webrtc/base/exp_filter.h"
-#include "webrtc/typedefs.h"
-
-namespace webrtc
-{
-
-// The Frame Dropper implements a variant of the leaky bucket algorithm
-// for keeping track of when to drop frames to avoid bit rate
-// over use when the encoder can't keep its bit rate.
-class FrameDropper
-{
-public:
- FrameDropper();
- explicit FrameDropper(float max_time_drops);
- virtual ~FrameDropper() {}
-
- // Resets the FrameDropper to its initial state.
- // This means that the frameRateWeight is set to its
- // default value as well.
- virtual void Reset();
-
- virtual void Enable(bool enable);
- // Answers the question if it's time to drop a frame
- // if we want to reach a given frame rate. Must be
- // called for every frame.
- //
- // Return value : True if we should drop the current frame
- virtual bool DropFrame();
- // Updates the FrameDropper with the size of the latest encoded
- // frame. The FrameDropper calculates a new drop ratio (can be
- // seen as the probability to drop a frame) and updates its
- // internal statistics.
- //
- // Input:
- // - frameSizeBytes : The size of the latest frame
- // returned from the encoder.
- // - deltaFrame : True if the encoder returned
- // a key frame.
- virtual void Fill(size_t frameSizeBytes, bool deltaFrame);
-
- virtual void Leak(uint32_t inputFrameRate);
-
- void UpdateNack(uint32_t nackBytes);
-
- // Sets the target bit rate and the frame rate produced by
- // the camera.
- //
- // Input:
- // - bitRate : The target bit rate
- virtual void SetRates(float bitRate, float incoming_frame_rate);
-
- // Return value : The current average frame rate produced
- // if the DropFrame() function is used as
- // instruction of when to drop frames.
- virtual float ActualFrameRate(uint32_t inputFrameRate) const;
-
-private:
- void FillBucket(float inKbits, float outKbits);
- void UpdateRatio();
- void CapAccumulator();
-
- rtc::ExpFilter _keyFrameSizeAvgKbits;
- rtc::ExpFilter _keyFrameRatio;
- float _keyFrameSpreadFrames;
- int32_t _keyFrameCount;
- float _accumulator;
- float _accumulatorMax;
- float _targetBitRate;
- bool _dropNext;
- rtc::ExpFilter _dropRatio;
- int32_t _dropCount;
- float _windowSize;
- float _incoming_frame_rate;
- bool _wasBelowMax;
- bool _enabled;
- bool _fastMode;
- float _cap_buffer_size;
- float _max_time_drops;
-}; // end of VCMFrameDropper class
-
-} // namespace webrtc
-
-#endif // WEBRTC_MODULES_VIDEO_CODING_UTILITY_INCLUDE_FRAME_DROPPER_H_
diff --git a/webrtc/modules/video_coding/utility/include/mock/mock_frame_dropper.h b/webrtc/modules/video_coding/utility/include/mock/mock_frame_dropper.h
deleted file mode 100644
index 1e31e5442a..0000000000
--- a/webrtc/modules/video_coding/utility/include/mock/mock_frame_dropper.h
+++ /dev/null
@@ -1,41 +0,0 @@
-/*
- * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-#ifndef WEBRTC_MODULES_VIDEO_CODING_UTILITY_INCLUDE_MOCK_MOCK_FRAME_DROPPER_H_
-#define WEBRTC_MODULES_VIDEO_CODING_UTILITY_INCLUDE_MOCK_MOCK_FRAME_DROPPER_H_
-
-#include <string>
-
-#include "testing/gmock/include/gmock/gmock.h"
-#include "webrtc/modules/video_coding/utility/include/frame_dropper.h"
-#include "webrtc/typedefs.h"
-
-namespace webrtc {
-
-class MockFrameDropper : public FrameDropper {
- public:
- MOCK_METHOD0(Reset,
- void());
- MOCK_METHOD1(Enable,
- void(bool enable));
- MOCK_METHOD0(DropFrame,
- bool());
- MOCK_METHOD2(Fill,
- void(size_t frameSizeBytes, bool deltaFrame));
- MOCK_METHOD1(Leak,
- void(uint32_t inputFrameRate));
- MOCK_METHOD2(SetRates,
- void(float bitRate, float incoming_frame_rate));
- MOCK_CONST_METHOD1(ActualFrameRate,
- float(uint32_t inputFrameRate));
-};
-
-} // namespace webrtc
-
-#endif // WEBRTC_MODULES_VIDEO_CODING_UTILITY_INCLUDE_MOCK_MOCK_FRAME_DROPPER_H_
diff --git a/webrtc/modules/video_coding/utility/include/moving_average.h b/webrtc/modules/video_coding/utility/include/moving_average.h
deleted file mode 100644
index 49c42c4ed4..0000000000
--- a/webrtc/modules/video_coding/utility/include/moving_average.h
+++ /dev/null
@@ -1,71 +0,0 @@
-/*
- * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_MODULES_VIDEO_CODING_UTILITY_MOVING_AVERAGE_H_
-#define WEBRTC_MODULES_VIDEO_CODING_UTILITY_MOVING_AVERAGE_H_
-
-#include <list>
-
-#include "webrtc/typedefs.h"
-
-namespace webrtc {
-template<class T>
-class MovingAverage {
- public:
- MovingAverage();
- void AddSample(T sample);
- bool GetAverage(size_t num_samples, T* average);
- void Reset();
- int size();
-
- private:
- T sum_;
- std::list<T> samples_;
-};
-
-template<class T>
-MovingAverage<T>::MovingAverage() : sum_(static_cast<T>(0)) {
-}
-
-template<class T>
-void MovingAverage<T>::AddSample(T sample) {
- samples_.push_back(sample);
- sum_ += sample;
-}
-
-template<class T>
-bool MovingAverage<T>::GetAverage(size_t num_samples, T* avg) {
- if (num_samples > samples_.size())
- return false;
-
- // Remove old samples.
- while (num_samples < samples_.size()) {
- sum_ -= samples_.front();
- samples_.pop_front();
- }
-
- *avg = sum_ / static_cast<T>(num_samples);
- return true;
-}
-
-template<class T>
-void MovingAverage<T>::Reset() {
- sum_ = static_cast<T>(0);
- samples_.clear();
-}
-
-template<class T>
-int MovingAverage<T>::size() {
- return samples_.size();
-}
-
-} // namespace webrtc
-
-#endif // WEBRTC_MODULES_VIDEO_CODING_MOVING_AVERAGE_SCALER_H_
diff --git a/webrtc/modules/video_coding/utility/include/qp_parser.h b/webrtc/modules/video_coding/utility/include/qp_parser.h
deleted file mode 100644
index 805b37b45c..0000000000
--- a/webrtc/modules/video_coding/utility/include/qp_parser.h
+++ /dev/null
@@ -1,30 +0,0 @@
-/*
- * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_MODULES_VIDEO_CODING_UTILITY_QP_PARSER_H_
-#define WEBRTC_MODULES_VIDEO_CODING_UTILITY_QP_PARSER_H_
-
-#include "webrtc/modules/video_coding/main/source/encoded_frame.h"
-
-namespace webrtc {
-
-class QpParser {
- public:
- QpParser() {}
- ~QpParser() {}
-
- // Parses an encoded |frame| and extracts the |qp|.
- // Returns true on success, false otherwise.
- bool GetQp(const VCMEncodedFrame& frame, int* qp);
-};
-
-} // namespace webrtc
-
-#endif // WEBRTC_MODULES_VIDEO_CODING_UTILITY_QP_PARSER_H_
diff --git a/webrtc/modules/video_coding/utility/include/quality_scaler.h b/webrtc/modules/video_coding/utility/include/quality_scaler.h
deleted file mode 100644
index 29a1496c05..0000000000
--- a/webrtc/modules/video_coding/utility/include/quality_scaler.h
+++ /dev/null
@@ -1,67 +0,0 @@
-/*
- * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_MODULES_VIDEO_CODING_UTILITY_QUALITY_SCALER_H_
-#define WEBRTC_MODULES_VIDEO_CODING_UTILITY_QUALITY_SCALER_H_
-
-#include "webrtc/common_video/libyuv/include/scaler.h"
-#include "webrtc/modules/video_coding/utility/include/moving_average.h"
-
-namespace webrtc {
-class QualityScaler {
- public:
- static const int kDefaultLowQpDenominator;
- static const int kDefaultMinDownscaleDimension;
- struct Resolution {
- int width;
- int height;
- };
-
- QualityScaler();
- void Init(int low_qp_threshold,
- int high_qp_threshold,
- bool use_framerate_reduction);
- void SetMinResolution(int min_width, int min_height);
- void ReportFramerate(int framerate);
- void ReportQP(int qp);
- void ReportDroppedFrame();
- void Reset(int framerate, int bitrate, int width, int height);
- void OnEncodeFrame(const VideoFrame& frame);
- Resolution GetScaledResolution() const;
- const VideoFrame& GetScaledFrame(const VideoFrame& frame);
- int GetTargetFramerate() const;
- int downscale_shift() const { return downscale_shift_; }
-
- private:
- void AdjustScale(bool up);
- void ClearSamples();
-
- Scaler scaler_;
- VideoFrame scaled_frame_;
-
- size_t num_samples_;
- int framerate_;
- int target_framerate_;
- int low_qp_threshold_;
- int high_qp_threshold_;
- MovingAverage<int> framedrop_percent_;
- MovingAverage<int> average_qp_;
- Resolution res_;
-
- int downscale_shift_;
- int framerate_down_;
- bool use_framerate_reduction_;
- int min_width_;
- int min_height_;
-};
-
-} // namespace webrtc
-
-#endif // WEBRTC_MODULES_VIDEO_CODING_UTILITY_QUALITY_SCALER_H_
diff --git a/webrtc/modules/video_coding/utility/include/vp8_header_parser.h b/webrtc/modules/video_coding/utility/include/vp8_header_parser.h
deleted file mode 100644
index 88796ecd0e..0000000000
--- a/webrtc/modules/video_coding/utility/include/vp8_header_parser.h
+++ /dev/null
@@ -1,77 +0,0 @@
-/*
- * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_MODULES_VIDEO_CODING_UTILITY_VP8_PARSE_HEADER_H_
-#define WEBRTC_MODULES_VIDEO_CODING_UTILITY_VP8_PARSE_HEADER_H_
-
-namespace webrtc {
-
-namespace vp8 {
-
-enum {
- MB_FEATURE_TREE_PROBS = 3,
- NUM_MB_SEGMENTS = 4,
- NUM_REF_LF_DELTAS = 4,
- NUM_MODE_LF_DELTAS = 4,
-};
-
-typedef struct VP8BitReader VP8BitReader;
-struct VP8BitReader {
- // Boolean decoder.
- uint32_t value_; // Current value.
- uint32_t range_; // Current range minus 1. In [127, 254] interval.
- int bits_; // Number of valid bits left.
- // Read buffer.
- const uint8_t* buf_; // Next byte to be read.
- const uint8_t* buf_end_; // End of read buffer.
- int eof_; // True if input is exhausted.
-};
-
-const uint8_t kVP8Log2Range[128] = {
- 7, 6, 6, 5, 5, 5, 5, 4, 4, 4, 4, 4, 4, 4, 4,
- 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3,
- 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
- 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
- 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
- 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
- 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
- 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
- 0
-};
-
-// range = ((range - 1) << kVP8Log2Range[range]) + 1
-const uint8_t kVP8NewRange[128] = {
- 127, 127, 191, 127, 159, 191, 223, 127,
- 143, 159, 175, 191, 207, 223, 239, 127,
- 135, 143, 151, 159, 167, 175, 183, 191,
- 199, 207, 215, 223, 231, 239, 247, 127,
- 131, 135, 139, 143, 147, 151, 155, 159,
- 163, 167, 171, 175, 179, 183, 187, 191,
- 195, 199, 203, 207, 211, 215, 219, 223,
- 227, 231, 235, 239, 243, 247, 251, 127,
- 129, 131, 133, 135, 137, 139, 141, 143,
- 145, 147, 149, 151, 153, 155, 157, 159,
- 161, 163, 165, 167, 169, 171, 173, 175,
- 177, 179, 181, 183, 185, 187, 189, 191,
- 193, 195, 197, 199, 201, 203, 205, 207,
- 209, 211, 213, 215, 217, 219, 221, 223,
- 225, 227, 229, 231, 233, 235, 237, 239,
- 241, 243, 245, 247, 249, 251, 253, 127
-};
-
-// Gets the QP, QP range: [0, 127].
-// Returns true on success, false otherwise.
-bool GetQp(const uint8_t* buf, size_t length, int* qp);
-
-} // namespace vp8
-
-} // namespace webrtc
-
-#endif // WEBRTC_MODULES_VIDEO_CODING_UTILITY_VP8_PARSE_HEADER_H_
diff --git a/webrtc/modules/video_coding/utility/mock/mock_frame_dropper.h b/webrtc/modules/video_coding/utility/mock/mock_frame_dropper.h
new file mode 100644
index 0000000000..b68a4b8d5d
--- /dev/null
+++ b/webrtc/modules/video_coding/utility/mock/mock_frame_dropper.h
@@ -0,0 +1,34 @@
+/*
+ * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#ifndef WEBRTC_MODULES_VIDEO_CODING_UTILITY_MOCK_MOCK_FRAME_DROPPER_H_
+#define WEBRTC_MODULES_VIDEO_CODING_UTILITY_MOCK_MOCK_FRAME_DROPPER_H_
+
+#include <string>
+
+#include "testing/gmock/include/gmock/gmock.h"
+#include "webrtc/modules/video_coding/utility/frame_dropper.h"
+#include "webrtc/typedefs.h"
+
+namespace webrtc {
+
+class MockFrameDropper : public FrameDropper {
+ public:
+ MOCK_METHOD0(Reset, void());
+ MOCK_METHOD1(Enable, void(bool enable));
+ MOCK_METHOD0(DropFrame, bool());
+ MOCK_METHOD2(Fill, void(size_t frameSizeBytes, bool deltaFrame));
+ MOCK_METHOD1(Leak, void(uint32_t inputFrameRate));
+ MOCK_METHOD2(SetRates, void(float bitRate, float incoming_frame_rate));
+ MOCK_CONST_METHOD1(ActualFrameRate, float(uint32_t inputFrameRate));
+};
+
+} // namespace webrtc
+
+#endif // WEBRTC_MODULES_VIDEO_CODING_UTILITY_MOCK_MOCK_FRAME_DROPPER_H_
diff --git a/webrtc/modules/video_coding/utility/moving_average.h b/webrtc/modules/video_coding/utility/moving_average.h
new file mode 100644
index 0000000000..494bfd51fb
--- /dev/null
+++ b/webrtc/modules/video_coding/utility/moving_average.h
@@ -0,0 +1,71 @@
+/*
+ * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_CODING_UTILITY_MOVING_AVERAGE_H_
+#define WEBRTC_MODULES_VIDEO_CODING_UTILITY_MOVING_AVERAGE_H_
+
+#include <list>
+
+#include "webrtc/typedefs.h"
+
+namespace webrtc {
+template <class T>
+class MovingAverage {
+ public:
+ MovingAverage();
+ void AddSample(T sample);
+ bool GetAverage(size_t num_samples, T* average);
+ void Reset();
+ int size();
+
+ private:
+ T sum_;
+ std::list<T> samples_;
+};
+
+template <class T>
+MovingAverage<T>::MovingAverage()
+ : sum_(static_cast<T>(0)) {}
+
+template <class T>
+void MovingAverage<T>::AddSample(T sample) {
+ samples_.push_back(sample);
+ sum_ += sample;
+}
+
+template <class T>
+bool MovingAverage<T>::GetAverage(size_t num_samples, T* avg) {
+ if (num_samples > samples_.size())
+ return false;
+
+ // Remove old samples.
+ while (num_samples < samples_.size()) {
+ sum_ -= samples_.front();
+ samples_.pop_front();
+ }
+
+ *avg = sum_ / static_cast<T>(num_samples);
+ return true;
+}
+
+template <class T>
+void MovingAverage<T>::Reset() {
+ sum_ = static_cast<T>(0);
+ samples_.clear();
+}
+
+template <class T>
+int MovingAverage<T>::size() {
+ return samples_.size();
+}
+
+} // namespace webrtc
+
+#endif // WEBRTC_MODULES_VIDEO_CODING_UTILITY_MOVING_AVERAGE_H_
diff --git a/webrtc/modules/video_coding/utility/qp_parser.cc b/webrtc/modules/video_coding/utility/qp_parser.cc
index 62ce31351e..0916cb0094 100644
--- a/webrtc/modules/video_coding/utility/qp_parser.cc
+++ b/webrtc/modules/video_coding/utility/qp_parser.cc
@@ -8,10 +8,10 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-#include "webrtc/modules/video_coding/utility/include/qp_parser.h"
+#include "webrtc/modules/video_coding/utility/qp_parser.h"
#include "webrtc/common_types.h"
-#include "webrtc/modules/video_coding/utility/include/vp8_header_parser.h"
+#include "webrtc/modules/video_coding/utility/vp8_header_parser.h"
namespace webrtc {
diff --git a/webrtc/modules/video_coding/utility/qp_parser.h b/webrtc/modules/video_coding/utility/qp_parser.h
new file mode 100644
index 0000000000..0b644ef61c
--- /dev/null
+++ b/webrtc/modules/video_coding/utility/qp_parser.h
@@ -0,0 +1,30 @@
+/*
+ * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_CODING_UTILITY_QP_PARSER_H_
+#define WEBRTC_MODULES_VIDEO_CODING_UTILITY_QP_PARSER_H_
+
+#include "webrtc/modules/video_coding/encoded_frame.h"
+
+namespace webrtc {
+
+class QpParser {
+ public:
+ QpParser() {}
+ ~QpParser() {}
+
+ // Parses an encoded |frame| and extracts the |qp|.
+ // Returns true on success, false otherwise.
+ bool GetQp(const VCMEncodedFrame& frame, int* qp);
+};
+
+} // namespace webrtc
+
+#endif // WEBRTC_MODULES_VIDEO_CODING_UTILITY_QP_PARSER_H_
diff --git a/webrtc/modules/video_coding/utility/quality_scaler.cc b/webrtc/modules/video_coding/utility/quality_scaler.cc
index ec7715230e..76bf9f5b03 100644
--- a/webrtc/modules/video_coding/utility/quality_scaler.cc
+++ b/webrtc/modules/video_coding/utility/quality_scaler.cc
@@ -7,7 +7,7 @@
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
-#include "webrtc/modules/video_coding/utility/include/quality_scaler.h"
+#include "webrtc/modules/video_coding/utility/quality_scaler.h"
namespace webrtc {
@@ -26,8 +26,7 @@ QualityScaler::QualityScaler()
downscale_shift_(0),
framerate_down_(false),
min_width_(kDefaultMinDownscaleDimension),
- min_height_(kDefaultMinDownscaleDimension) {
-}
+ min_height_(kDefaultMinDownscaleDimension) {}
void QualityScaler::Init(int low_qp_threshold,
int high_qp_threshold,
@@ -91,7 +90,7 @@ void QualityScaler::OnEncodeFrame(const VideoFrame& frame) {
AdjustScale(false);
}
} else if (average_qp_.GetAverage(num_samples_, &avg_qp) &&
- avg_qp <= low_qp_threshold_) {
+ avg_qp <= low_qp_threshold_) {
if (use_framerate_reduction_ && framerate_down_) {
target_framerate_ = -1;
framerate_down_ = false;
@@ -104,7 +103,7 @@ void QualityScaler::OnEncodeFrame(const VideoFrame& frame) {
assert(downscale_shift_ >= 0);
for (int shift = downscale_shift_;
shift > 0 && (res_.width / 2 >= min_width_) &&
- (res_.height / 2 >= min_height_);
+ (res_.height / 2 >= min_height_);
--shift) {
res_.width /= 2;
res_.height /= 2;
@@ -124,13 +123,8 @@ const VideoFrame& QualityScaler::GetScaledFrame(const VideoFrame& frame) {
if (res.width == frame.width())
return frame;
- scaler_.Set(frame.width(),
- frame.height(),
- res.width,
- res.height,
- kI420,
- kI420,
- kScaleBox);
+ scaler_.Set(frame.width(), frame.height(), res.width, res.height, kI420,
+ kI420, kScaleBox);
if (scaler_.Scale(frame, &scaled_frame_) != 0)
return frame;
diff --git a/webrtc/modules/video_coding/utility/quality_scaler.h b/webrtc/modules/video_coding/utility/quality_scaler.h
new file mode 100644
index 0000000000..a1233cca51
--- /dev/null
+++ b/webrtc/modules/video_coding/utility/quality_scaler.h
@@ -0,0 +1,67 @@
+/*
+ * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_CODING_UTILITY_QUALITY_SCALER_H_
+#define WEBRTC_MODULES_VIDEO_CODING_UTILITY_QUALITY_SCALER_H_
+
+#include "webrtc/common_video/libyuv/include/scaler.h"
+#include "webrtc/modules/video_coding/utility/moving_average.h"
+
+namespace webrtc {
+class QualityScaler {
+ public:
+ static const int kDefaultLowQpDenominator;
+ static const int kDefaultMinDownscaleDimension;
+ struct Resolution {
+ int width;
+ int height;
+ };
+
+ QualityScaler();
+ void Init(int low_qp_threshold,
+ int high_qp_threshold,
+ bool use_framerate_reduction);
+ void SetMinResolution(int min_width, int min_height);
+ void ReportFramerate(int framerate);
+ void ReportQP(int qp);
+ void ReportDroppedFrame();
+ void Reset(int framerate, int bitrate, int width, int height);
+ void OnEncodeFrame(const VideoFrame& frame);
+ Resolution GetScaledResolution() const;
+ const VideoFrame& GetScaledFrame(const VideoFrame& frame);
+ int GetTargetFramerate() const;
+ int downscale_shift() const { return downscale_shift_; }
+
+ private:
+ void AdjustScale(bool up);
+ void ClearSamples();
+
+ Scaler scaler_;
+ VideoFrame scaled_frame_;
+
+ size_t num_samples_;
+ int framerate_;
+ int target_framerate_;
+ int low_qp_threshold_;
+ int high_qp_threshold_;
+ MovingAverage<int> framedrop_percent_;
+ MovingAverage<int> average_qp_;
+ Resolution res_;
+
+ int downscale_shift_;
+ int framerate_down_;
+ bool use_framerate_reduction_;
+ int min_width_;
+ int min_height_;
+};
+
+} // namespace webrtc
+
+#endif // WEBRTC_MODULES_VIDEO_CODING_UTILITY_QUALITY_SCALER_H_
diff --git a/webrtc/modules/video_coding/utility/quality_scaler_unittest.cc b/webrtc/modules/video_coding/utility/quality_scaler_unittest.cc
index 2ce1107472..bad73a748c 100644
--- a/webrtc/modules/video_coding/utility/quality_scaler_unittest.cc
+++ b/webrtc/modules/video_coding/utility/quality_scaler_unittest.cc
@@ -8,7 +8,7 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-#include "webrtc/modules/video_coding/utility/include/quality_scaler.h"
+#include "webrtc/modules/video_coding/utility/quality_scaler.h"
#include "testing/gtest/include/gtest/gtest.h"
@@ -33,6 +33,7 @@ class QualityScalerTest : public ::testing::Test {
int width;
int height;
};
+
protected:
enum ScaleDirection {
kKeepScaleAtHighQp,
@@ -43,8 +44,8 @@ class QualityScalerTest : public ::testing::Test {
enum BadQualityMetric { kDropFrame, kReportLowQP };
QualityScalerTest() {
- input_frame_.CreateEmptyFrame(
- kWidth, kHeight, kWidth, kHalfWidth, kHalfWidth);
+ input_frame_.CreateEmptyFrame(kWidth, kHeight, kWidth, kHalfWidth,
+ kHalfWidth);
qs_.Init(kMaxQp / QualityScaler::kDefaultLowQpDenominator, kHighQp, false);
qs_.ReportFramerate(kFramerate);
qs_.OnEncodeFrame(input_frame_);
@@ -97,7 +98,8 @@ class QualityScalerTest : public ::testing::Test {
int num_second,
int initial_framerate);
- void VerifyQualityAdaptation(int initial_framerate, int seconds,
+ void VerifyQualityAdaptation(int initial_framerate,
+ int seconds,
bool expect_spatial_resize,
bool expect_framerate_reduction);
@@ -183,8 +185,8 @@ TEST_F(QualityScalerTest, DoesNotDownscaleAfterHalfFramedrop) {
void QualityScalerTest::ContinuouslyDownscalesByHalfDimensionsAndBackUp() {
const int initial_min_dimension = input_frame_.width() < input_frame_.height()
- ? input_frame_.width()
- : input_frame_.height();
+ ? input_frame_.width()
+ : input_frame_.height();
int min_dimension = initial_min_dimension;
int current_shift = 0;
// Drop all frames to force-trigger downscaling.
@@ -229,14 +231,14 @@ TEST_F(QualityScalerTest,
const int kOddWidth = 517;
const int kHalfOddWidth = (kOddWidth + 1) / 2;
const int kOddHeight = 1239;
- input_frame_.CreateEmptyFrame(
- kOddWidth, kOddHeight, kOddWidth, kHalfOddWidth, kHalfOddWidth);
+ input_frame_.CreateEmptyFrame(kOddWidth, kOddHeight, kOddWidth, kHalfOddWidth,
+ kHalfOddWidth);
ContinuouslyDownscalesByHalfDimensionsAndBackUp();
}
void QualityScalerTest::DoesNotDownscaleFrameDimensions(int width, int height) {
- input_frame_.CreateEmptyFrame(
- width, height, width, (width + 1) / 2, (width + 1) / 2);
+ input_frame_.CreateEmptyFrame(width, height, width, (width + 1) / 2,
+ (width + 1) / 2);
for (int i = 0; i < kFramerate * kNumSeconds; ++i) {
qs_.ReportDroppedFrame();
@@ -259,7 +261,9 @@ TEST_F(QualityScalerTest, DoesNotDownscaleFrom1Px) {
}
QualityScalerTest::Resolution QualityScalerTest::TriggerResolutionChange(
- BadQualityMetric dropframe_lowqp, int num_second, int initial_framerate) {
+ BadQualityMetric dropframe_lowqp,
+ int num_second,
+ int initial_framerate) {
QualityScalerTest::Resolution res;
res.framerate = initial_framerate;
qs_.OnEncodeFrame(input_frame_);
@@ -288,7 +292,9 @@ QualityScalerTest::Resolution QualityScalerTest::TriggerResolutionChange(
}
void QualityScalerTest::VerifyQualityAdaptation(
- int initial_framerate, int seconds, bool expect_spatial_resize,
+ int initial_framerate,
+ int seconds,
+ bool expect_spatial_resize,
bool expect_framerate_reduction) {
const int kDisabledBadQpThreshold = kMaxQp + 1;
qs_.Init(kMaxQp / QualityScaler::kDefaultLowQpDenominator,
@@ -298,8 +304,8 @@ void QualityScalerTest::VerifyQualityAdaptation(
int init_height = qs_.GetScaledResolution().height;
// Test reducing framerate by dropping frame continuously.
- QualityScalerTest::Resolution res = TriggerResolutionChange(
- kDropFrame, seconds, initial_framerate);
+ QualityScalerTest::Resolution res =
+ TriggerResolutionChange(kDropFrame, seconds, initial_framerate);
if (expect_framerate_reduction) {
EXPECT_LT(res.framerate, initial_framerate);
diff --git a/webrtc/modules/video_coding/utility/video_coding_utility.gyp b/webrtc/modules/video_coding/utility/video_coding_utility.gyp
index f0764bb7bf..42cbb3d4e0 100644
--- a/webrtc/modules/video_coding/utility/video_coding_utility.gyp
+++ b/webrtc/modules/video_coding/utility/video_coding_utility.gyp
@@ -19,14 +19,14 @@
],
'sources': [
'frame_dropper.cc',
- 'include/frame_dropper.h',
- 'include/moving_average.h',
- 'include/qp_parser.h',
- 'include/quality_scaler.h',
- 'include/vp8_header_parser.h',
+ 'frame_dropper.h',
+ 'moving_average.h',
'qp_parser.cc',
+ 'qp_parser.h',
'quality_scaler.cc',
+ 'quality_scaler.h',
'vp8_header_parser.cc',
+ 'vp8_header_parser.h',
],
},
], # targets
diff --git a/webrtc/modules/video_coding/utility/vp8_header_parser.cc b/webrtc/modules/video_coding/utility/vp8_header_parser.cc
index dc5a0e5d15..631385d0f2 100644
--- a/webrtc/modules/video_coding/utility/vp8_header_parser.cc
+++ b/webrtc/modules/video_coding/utility/vp8_header_parser.cc
@@ -7,12 +7,9 @@
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
-#include <stdint.h>
-#include <stdio.h>
+#include "webrtc/modules/video_coding/utility/vp8_header_parser.h"
-#include "webrtc/modules/video_coding/utility/include/vp8_header_parser.h"
-
-#include "webrtc/system_wrappers/include/logging.h"
+#include "webrtc/base/logging.h"
namespace webrtc {
@@ -46,12 +43,12 @@ static void VP8LoadNewBytes(VP8BitReader* const br) {
const uint32_t in_bits = *(const uint32_t*)(br->buf_);
br->buf_ += BITS >> 3;
#if defined(WEBRTC_ARCH_BIG_ENDIAN)
- bits = static_cast<uint32_t>(in_bits);
- if (BITS != 8 * sizeof(uint32_t))
- bits >>= (8 * sizeof(uint32_t) - BITS);
+ bits = static_cast<uint32_t>(in_bits);
+ if (BITS != 8 * sizeof(uint32_t))
+ bits >>= (8 * sizeof(uint32_t) - BITS);
#else
- bits = BSwap32(in_bits);
- bits >>= 32 - BITS;
+ bits = BSwap32(in_bits);
+ bits >>= 32 - BITS;
#endif
br->value_ = bits | (br->value_ << BITS);
br->bits_ += BITS;
@@ -63,12 +60,12 @@ static void VP8LoadNewBytes(VP8BitReader* const br) {
static void VP8InitBitReader(VP8BitReader* const br,
const uint8_t* const start,
const uint8_t* const end) {
- br->range_ = 255 - 1;
- br->buf_ = start;
+ br->range_ = 255 - 1;
+ br->buf_ = start;
br->buf_end_ = end;
- br->value_ = 0;
- br->bits_ = -8; // To load the very first 8bits.
- br->eof_ = 0;
+ br->value_ = 0;
+ br->bits_ = -8; // To load the very first 8bits.
+ br->eof_ = 0;
VP8LoadNewBytes(br);
}
@@ -125,7 +122,7 @@ static void ParseSegmentHeader(VP8BitReader* br) {
int s;
VP8Get(br);
for (s = 0; s < NUM_MB_SEGMENTS; ++s) {
- VP8Get(br) ? VP8GetSignedValue(br, 7) : 0;
+ VP8Get(br) ? VP8GetSignedValue(br, 7) : 0;
}
for (s = 0; s < NUM_MB_SEGMENTS; ++s) {
VP8Get(br) ? VP8GetSignedValue(br, 6) : 0;
diff --git a/webrtc/modules/video_coding/utility/vp8_header_parser.h b/webrtc/modules/video_coding/utility/vp8_header_parser.h
new file mode 100644
index 0000000000..b0c684c578
--- /dev/null
+++ b/webrtc/modules/video_coding/utility/vp8_header_parser.h
@@ -0,0 +1,68 @@
+/*
+ * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_CODING_UTILITY_VP8_HEADER_PARSER_H_
+#define WEBRTC_MODULES_VIDEO_CODING_UTILITY_VP8_HEADER_PARSER_H_
+
+#include <stdint.h>
+#include <stdio.h>
+
+namespace webrtc {
+
+namespace vp8 {
+
+enum {
+ MB_FEATURE_TREE_PROBS = 3,
+ NUM_MB_SEGMENTS = 4,
+ NUM_REF_LF_DELTAS = 4,
+ NUM_MODE_LF_DELTAS = 4,
+};
+
+typedef struct VP8BitReader VP8BitReader;
+struct VP8BitReader {
+ // Boolean decoder.
+ uint32_t value_; // Current value.
+ uint32_t range_; // Current range minus 1. In [127, 254] interval.
+ int bits_; // Number of valid bits left.
+ // Read buffer.
+ const uint8_t* buf_; // Next byte to be read.
+ const uint8_t* buf_end_; // End of read buffer.
+ int eof_; // True if input is exhausted.
+};
+
+const uint8_t kVP8Log2Range[128] = {
+ 7, 6, 6, 5, 5, 5, 5, 4, 4, 4, 4, 4, 4, 4, 4, 3, 3, 3, 3, 3, 3, 3,
+ 3, 3, 3, 3, 3, 3, 3, 3, 3, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
+ 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 1, 1, 1,
+ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
+ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
+ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0};
+
+// range = ((range - 1) << kVP8Log2Range[range]) + 1
+const uint8_t kVP8NewRange[128] = {
+ 127, 127, 191, 127, 159, 191, 223, 127, 143, 159, 175, 191, 207, 223, 239,
+ 127, 135, 143, 151, 159, 167, 175, 183, 191, 199, 207, 215, 223, 231, 239,
+ 247, 127, 131, 135, 139, 143, 147, 151, 155, 159, 163, 167, 171, 175, 179,
+ 183, 187, 191, 195, 199, 203, 207, 211, 215, 219, 223, 227, 231, 235, 239,
+ 243, 247, 251, 127, 129, 131, 133, 135, 137, 139, 141, 143, 145, 147, 149,
+ 151, 153, 155, 157, 159, 161, 163, 165, 167, 169, 171, 173, 175, 177, 179,
+ 181, 183, 185, 187, 189, 191, 193, 195, 197, 199, 201, 203, 205, 207, 209,
+ 211, 213, 215, 217, 219, 221, 223, 225, 227, 229, 231, 233, 235, 237, 239,
+ 241, 243, 245, 247, 249, 251, 253, 127};
+
+// Gets the QP, QP range: [0, 127].
+// Returns true on success, false otherwise.
+bool GetQp(const uint8_t* buf, size_t length, int* qp);
+
+} // namespace vp8
+
+} // namespace webrtc
+
+#endif // WEBRTC_MODULES_VIDEO_CODING_UTILITY_VP8_HEADER_PARSER_H_
diff --git a/webrtc/modules/video_coding/video_coding.gypi b/webrtc/modules/video_coding/video_coding.gypi
index b292e0ae31..438d8f1c1f 100644
--- a/webrtc/modules/video_coding/video_coding.gypi
+++ b/webrtc/modules/video_coding/video_coding.gypi
@@ -22,61 +22,61 @@
],
'sources': [
# interfaces
- 'main/interface/video_coding.h',
- 'main/interface/video_coding_defines.h',
+ 'include/video_coding.h',
+ 'include/video_coding_defines.h',
# headers
- 'main/source/codec_database.h',
- 'main/source/codec_timer.h',
- 'main/source/content_metrics_processing.h',
- 'main/source/decoding_state.h',
- 'main/source/encoded_frame.h',
- 'main/source/fec_tables_xor.h',
- 'main/source/frame_buffer.h',
- 'main/source/generic_decoder.h',
- 'main/source/generic_encoder.h',
- 'main/source/inter_frame_delay.h',
- 'main/source/internal_defines.h',
- 'main/source/jitter_buffer.h',
- 'main/source/jitter_buffer_common.h',
- 'main/source/jitter_estimator.h',
- 'main/source/media_opt_util.h',
- 'main/source/media_optimization.h',
- 'main/source/nack_fec_tables.h',
- 'main/source/packet.h',
- 'main/source/qm_select_data.h',
- 'main/source/qm_select.h',
- 'main/source/receiver.h',
- 'main/source/rtt_filter.h',
- 'main/source/session_info.h',
- 'main/source/timestamp_map.h',
- 'main/source/timing.h',
- 'main/source/video_coding_impl.h',
+ 'codec_database.h',
+ 'codec_timer.h',
+ 'content_metrics_processing.h',
+ 'decoding_state.h',
+ 'encoded_frame.h',
+ 'fec_tables_xor.h',
+ 'frame_buffer.h',
+ 'generic_decoder.h',
+ 'generic_encoder.h',
+ 'inter_frame_delay.h',
+ 'internal_defines.h',
+ 'jitter_buffer.h',
+ 'jitter_buffer_common.h',
+ 'jitter_estimator.h',
+ 'media_opt_util.h',
+ 'media_optimization.h',
+ 'nack_fec_tables.h',
+ 'packet.h',
+ 'qm_select_data.h',
+ 'qm_select.h',
+ 'receiver.h',
+ 'rtt_filter.h',
+ 'session_info.h',
+ 'timestamp_map.h',
+ 'timing.h',
+ 'video_coding_impl.h',
# sources
- 'main/source/codec_database.cc',
- 'main/source/codec_timer.cc',
- 'main/source/content_metrics_processing.cc',
- 'main/source/decoding_state.cc',
- 'main/source/encoded_frame.cc',
- 'main/source/frame_buffer.cc',
- 'main/source/generic_decoder.cc',
- 'main/source/generic_encoder.cc',
- 'main/source/inter_frame_delay.cc',
- 'main/source/jitter_buffer.cc',
- 'main/source/jitter_estimator.cc',
- 'main/source/media_opt_util.cc',
- 'main/source/media_optimization.cc',
- 'main/source/packet.cc',
- 'main/source/qm_select.cc',
- 'main/source/receiver.cc',
- 'main/source/rtt_filter.cc',
- 'main/source/session_info.cc',
- 'main/source/timestamp_map.cc',
- 'main/source/timing.cc',
- 'main/source/video_coding_impl.cc',
- 'main/source/video_sender.cc',
- 'main/source/video_receiver.cc',
+ 'codec_database.cc',
+ 'codec_timer.cc',
+ 'content_metrics_processing.cc',
+ 'decoding_state.cc',
+ 'encoded_frame.cc',
+ 'frame_buffer.cc',
+ 'generic_decoder.cc',
+ 'generic_encoder.cc',
+ 'inter_frame_delay.cc',
+ 'jitter_buffer.cc',
+ 'jitter_estimator.cc',
+ 'media_opt_util.cc',
+ 'media_optimization.cc',
+ 'packet.cc',
+ 'qm_select.cc',
+ 'receiver.cc',
+ 'rtt_filter.cc',
+ 'session_info.cc',
+ 'timestamp_map.cc',
+ 'timing.cc',
+ 'video_coding_impl.cc',
+ 'video_sender.cc',
+ 'video_receiver.cc',
], # source
# TODO(jschuh): Bug 1348: fix size_t to int truncations.
'msvs_disabled_warnings': [ 4267, ],
diff --git a/webrtc/modules/video_coding/video_coding_impl.cc b/webrtc/modules/video_coding/video_coding_impl.cc
new file mode 100644
index 0000000000..1e26a7e243
--- /dev/null
+++ b/webrtc/modules/video_coding/video_coding_impl.cc
@@ -0,0 +1,320 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/modules/video_coding/video_coding_impl.h"
+
+#include <algorithm>
+
+#include "webrtc/common_types.h"
+#include "webrtc/common_video/libyuv/include/webrtc_libyuv.h"
+#include "webrtc/modules/video_coding/include/video_codec_interface.h"
+#include "webrtc/modules/video_coding/encoded_frame.h"
+#include "webrtc/modules/video_coding/jitter_buffer.h"
+#include "webrtc/modules/video_coding/packet.h"
+#include "webrtc/system_wrappers/include/clock.h"
+
+namespace webrtc {
+namespace vcm {
+
+int64_t VCMProcessTimer::Period() const {
+ return _periodMs;
+}
+
+int64_t VCMProcessTimer::TimeUntilProcess() const {
+ const int64_t time_since_process = _clock->TimeInMilliseconds() - _latestMs;
+ const int64_t time_until_process = _periodMs - time_since_process;
+ return std::max<int64_t>(time_until_process, 0);
+}
+
+void VCMProcessTimer::Processed() {
+ _latestMs = _clock->TimeInMilliseconds();
+}
+} // namespace vcm
+
+namespace {
+// This wrapper provides a way to modify the callback without the need to expose
+// a register method all the way down to the function calling it.
+class EncodedImageCallbackWrapper : public EncodedImageCallback {
+ public:
+ EncodedImageCallbackWrapper()
+ : cs_(CriticalSectionWrapper::CreateCriticalSection()), callback_(NULL) {}
+
+ virtual ~EncodedImageCallbackWrapper() {}
+
+ void Register(EncodedImageCallback* callback) {
+ CriticalSectionScoped cs(cs_.get());
+ callback_ = callback;
+ }
+
+ // TODO(andresp): Change to void as return value is ignored.
+ virtual int32_t Encoded(const EncodedImage& encoded_image,
+ const CodecSpecificInfo* codec_specific_info,
+ const RTPFragmentationHeader* fragmentation) {
+ CriticalSectionScoped cs(cs_.get());
+ if (callback_)
+ return callback_->Encoded(encoded_image, codec_specific_info,
+ fragmentation);
+ return 0;
+ }
+
+ private:
+ rtc::scoped_ptr<CriticalSectionWrapper> cs_;
+ EncodedImageCallback* callback_ GUARDED_BY(cs_);
+};
+
+class VideoCodingModuleImpl : public VideoCodingModule {
+ public:
+ VideoCodingModuleImpl(Clock* clock,
+ EventFactory* event_factory,
+ bool owns_event_factory,
+ VideoEncoderRateObserver* encoder_rate_observer,
+ VCMQMSettingsCallback* qm_settings_callback)
+ : VideoCodingModule(),
+ sender_(clock,
+ &post_encode_callback_,
+ encoder_rate_observer,
+ qm_settings_callback),
+ receiver_(clock, event_factory),
+ own_event_factory_(owns_event_factory ? event_factory : NULL) {}
+
+ virtual ~VideoCodingModuleImpl() { own_event_factory_.reset(); }
+
+ int64_t TimeUntilNextProcess() override {
+ int64_t sender_time = sender_.TimeUntilNextProcess();
+ int64_t receiver_time = receiver_.TimeUntilNextProcess();
+ assert(sender_time >= 0);
+ assert(receiver_time >= 0);
+ return VCM_MIN(sender_time, receiver_time);
+ }
+
+ int32_t Process() override {
+ int32_t sender_return = sender_.Process();
+ int32_t receiver_return = receiver_.Process();
+ if (sender_return != VCM_OK)
+ return sender_return;
+ return receiver_return;
+ }
+
+ int32_t RegisterSendCodec(const VideoCodec* sendCodec,
+ uint32_t numberOfCores,
+ uint32_t maxPayloadSize) override {
+ return sender_.RegisterSendCodec(sendCodec, numberOfCores, maxPayloadSize);
+ }
+
+ int32_t RegisterExternalEncoder(VideoEncoder* externalEncoder,
+ uint8_t payloadType,
+ bool internalSource) override {
+ sender_.RegisterExternalEncoder(externalEncoder, payloadType,
+ internalSource);
+ return 0;
+ }
+
+ int Bitrate(unsigned int* bitrate) const override {
+ return sender_.Bitrate(bitrate);
+ }
+
+ int FrameRate(unsigned int* framerate) const override {
+ return sender_.FrameRate(framerate);
+ }
+
+ int32_t SetChannelParameters(uint32_t target_bitrate, // bits/s.
+ uint8_t lossRate,
+ int64_t rtt) override {
+ return sender_.SetChannelParameters(target_bitrate, lossRate, rtt);
+ }
+
+ int32_t RegisterTransportCallback(
+ VCMPacketizationCallback* transport) override {
+ return sender_.RegisterTransportCallback(transport);
+ }
+
+ int32_t RegisterSendStatisticsCallback(
+ VCMSendStatisticsCallback* sendStats) override {
+ return sender_.RegisterSendStatisticsCallback(sendStats);
+ }
+
+ int32_t RegisterProtectionCallback(
+ VCMProtectionCallback* protection) override {
+ return sender_.RegisterProtectionCallback(protection);
+ }
+
+ int32_t SetVideoProtection(VCMVideoProtection videoProtection,
+ bool enable) override {
+ // TODO(pbos): Remove enable from receive-side protection modes as well.
+ if (enable)
+ sender_.SetVideoProtection(videoProtection);
+ return receiver_.SetVideoProtection(videoProtection, enable);
+ }
+
+ int32_t AddVideoFrame(const VideoFrame& videoFrame,
+ const VideoContentMetrics* contentMetrics,
+ const CodecSpecificInfo* codecSpecificInfo) override {
+ return sender_.AddVideoFrame(videoFrame, contentMetrics, codecSpecificInfo);
+ }
+
+ int32_t IntraFrameRequest(int stream_index) override {
+ return sender_.IntraFrameRequest(stream_index);
+ }
+
+ int32_t EnableFrameDropper(bool enable) override {
+ return sender_.EnableFrameDropper(enable);
+ }
+
+ void SuspendBelowMinBitrate() override {
+ return sender_.SuspendBelowMinBitrate();
+ }
+
+ bool VideoSuspended() const override { return sender_.VideoSuspended(); }
+
+ int32_t RegisterReceiveCodec(const VideoCodec* receiveCodec,
+ int32_t numberOfCores,
+ bool requireKeyFrame) override {
+ return receiver_.RegisterReceiveCodec(receiveCodec, numberOfCores,
+ requireKeyFrame);
+ }
+
+ void RegisterExternalDecoder(VideoDecoder* externalDecoder,
+ uint8_t payloadType) override {
+ receiver_.RegisterExternalDecoder(externalDecoder, payloadType);
+ }
+
+ int32_t RegisterReceiveCallback(
+ VCMReceiveCallback* receiveCallback) override {
+ return receiver_.RegisterReceiveCallback(receiveCallback);
+ }
+
+ int32_t RegisterReceiveStatisticsCallback(
+ VCMReceiveStatisticsCallback* receiveStats) override {
+ return receiver_.RegisterReceiveStatisticsCallback(receiveStats);
+ }
+
+ int32_t RegisterDecoderTimingCallback(
+ VCMDecoderTimingCallback* decoderTiming) override {
+ return receiver_.RegisterDecoderTimingCallback(decoderTiming);
+ }
+
+ int32_t RegisterFrameTypeCallback(
+ VCMFrameTypeCallback* frameTypeCallback) override {
+ return receiver_.RegisterFrameTypeCallback(frameTypeCallback);
+ }
+
+ int32_t RegisterPacketRequestCallback(
+ VCMPacketRequestCallback* callback) override {
+ return receiver_.RegisterPacketRequestCallback(callback);
+ }
+
+ int RegisterRenderBufferSizeCallback(
+ VCMRenderBufferSizeCallback* callback) override {
+ return receiver_.RegisterRenderBufferSizeCallback(callback);
+ }
+
+ int32_t Decode(uint16_t maxWaitTimeMs) override {
+ return receiver_.Decode(maxWaitTimeMs);
+ }
+
+ int32_t ResetDecoder() override { return receiver_.ResetDecoder(); }
+
+ int32_t ReceiveCodec(VideoCodec* currentReceiveCodec) const override {
+ return receiver_.ReceiveCodec(currentReceiveCodec);
+ }
+
+ VideoCodecType ReceiveCodec() const override {
+ return receiver_.ReceiveCodec();
+ }
+
+ int32_t IncomingPacket(const uint8_t* incomingPayload,
+ size_t payloadLength,
+ const WebRtcRTPHeader& rtpInfo) override {
+ return receiver_.IncomingPacket(incomingPayload, payloadLength, rtpInfo);
+ }
+
+ int32_t SetMinimumPlayoutDelay(uint32_t minPlayoutDelayMs) override {
+ return receiver_.SetMinimumPlayoutDelay(minPlayoutDelayMs);
+ }
+
+ int32_t SetRenderDelay(uint32_t timeMS) override {
+ return receiver_.SetRenderDelay(timeMS);
+ }
+
+ int32_t Delay() const override { return receiver_.Delay(); }
+
+ uint32_t DiscardedPackets() const override {
+ return receiver_.DiscardedPackets();
+ }
+
+ int SetReceiverRobustnessMode(ReceiverRobustness robustnessMode,
+ VCMDecodeErrorMode errorMode) override {
+ return receiver_.SetReceiverRobustnessMode(robustnessMode, errorMode);
+ }
+
+ void SetNackSettings(size_t max_nack_list_size,
+ int max_packet_age_to_nack,
+ int max_incomplete_time_ms) override {
+ return receiver_.SetNackSettings(max_nack_list_size, max_packet_age_to_nack,
+ max_incomplete_time_ms);
+ }
+
+ void SetDecodeErrorMode(VCMDecodeErrorMode decode_error_mode) override {
+ return receiver_.SetDecodeErrorMode(decode_error_mode);
+ }
+
+ int SetMinReceiverDelay(int desired_delay_ms) override {
+ return receiver_.SetMinReceiverDelay(desired_delay_ms);
+ }
+
+ int32_t SetReceiveChannelParameters(int64_t rtt) override {
+ return receiver_.SetReceiveChannelParameters(rtt);
+ }
+
+ void RegisterPreDecodeImageCallback(EncodedImageCallback* observer) override {
+ receiver_.RegisterPreDecodeImageCallback(observer);
+ }
+
+ void RegisterPostEncodeImageCallback(
+ EncodedImageCallback* observer) override {
+ post_encode_callback_.Register(observer);
+ }
+
+ void TriggerDecoderShutdown() override { receiver_.TriggerDecoderShutdown(); }
+
+ private:
+ EncodedImageCallbackWrapper post_encode_callback_;
+ vcm::VideoSender sender_;
+ vcm::VideoReceiver receiver_;
+ rtc::scoped_ptr<EventFactory> own_event_factory_;
+};
+} // namespace
+
+void VideoCodingModule::Codec(VideoCodecType codecType, VideoCodec* codec) {
+ VCMCodecDataBase::Codec(codecType, codec);
+}
+
+VideoCodingModule* VideoCodingModule::Create(
+ Clock* clock,
+ VideoEncoderRateObserver* encoder_rate_observer,
+ VCMQMSettingsCallback* qm_settings_callback) {
+ return new VideoCodingModuleImpl(clock, new EventFactoryImpl, true,
+ encoder_rate_observer, qm_settings_callback);
+}
+
+VideoCodingModule* VideoCodingModule::Create(Clock* clock,
+ EventFactory* event_factory) {
+ assert(clock);
+ assert(event_factory);
+ return new VideoCodingModuleImpl(clock, event_factory, false, nullptr,
+ nullptr);
+}
+
+void VideoCodingModule::Destroy(VideoCodingModule* module) {
+ if (module != NULL) {
+ delete static_cast<VideoCodingModuleImpl*>(module);
+ }
+}
+} // namespace webrtc
diff --git a/webrtc/modules/video_coding/video_coding_impl.h b/webrtc/modules/video_coding/video_coding_impl.h
new file mode 100644
index 0000000000..f105fa9c18
--- /dev/null
+++ b/webrtc/modules/video_coding/video_coding_impl.h
@@ -0,0 +1,222 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_CODING_VIDEO_CODING_IMPL_H_
+#define WEBRTC_MODULES_VIDEO_CODING_VIDEO_CODING_IMPL_H_
+
+#include "webrtc/modules/video_coding/include/video_coding.h"
+
+#include <vector>
+
+#include "webrtc/base/thread_annotations.h"
+#include "webrtc/base/thread_checker.h"
+#include "webrtc/modules/video_coding/codec_database.h"
+#include "webrtc/modules/video_coding/frame_buffer.h"
+#include "webrtc/modules/video_coding/generic_decoder.h"
+#include "webrtc/modules/video_coding/generic_encoder.h"
+#include "webrtc/modules/video_coding/jitter_buffer.h"
+#include "webrtc/modules/video_coding/media_optimization.h"
+#include "webrtc/modules/video_coding/receiver.h"
+#include "webrtc/modules/video_coding/timing.h"
+#include "webrtc/modules/video_coding/utility/qp_parser.h"
+#include "webrtc/system_wrappers/include/clock.h"
+#include "webrtc/system_wrappers/include/critical_section_wrapper.h"
+
+namespace webrtc {
+
+class EncodedFrameObserver;
+
+namespace vcm {
+
+class VCMProcessTimer {
+ public:
+ VCMProcessTimer(int64_t periodMs, Clock* clock)
+ : _clock(clock),
+ _periodMs(periodMs),
+ _latestMs(_clock->TimeInMilliseconds()) {}
+ int64_t Period() const;
+ int64_t TimeUntilProcess() const;
+ void Processed();
+
+ private:
+ Clock* _clock;
+ int64_t _periodMs;
+ int64_t _latestMs;
+};
+
+class VideoSender {
+ public:
+ typedef VideoCodingModule::SenderNackMode SenderNackMode;
+
+ VideoSender(Clock* clock,
+ EncodedImageCallback* post_encode_callback,
+ VideoEncoderRateObserver* encoder_rate_observer,
+ VCMQMSettingsCallback* qm_settings_callback);
+
+ ~VideoSender();
+
+ // Register the send codec to be used.
+ // This method must be called on the construction thread.
+ int32_t RegisterSendCodec(const VideoCodec* sendCodec,
+ uint32_t numberOfCores,
+ uint32_t maxPayloadSize);
+
+ void RegisterExternalEncoder(VideoEncoder* externalEncoder,
+ uint8_t payloadType,
+ bool internalSource);
+
+ int Bitrate(unsigned int* bitrate) const;
+ int FrameRate(unsigned int* framerate) const;
+
+ int32_t SetChannelParameters(uint32_t target_bitrate, // bits/s.
+ uint8_t lossRate,
+ int64_t rtt);
+
+ int32_t RegisterTransportCallback(VCMPacketizationCallback* transport);
+ int32_t RegisterSendStatisticsCallback(VCMSendStatisticsCallback* sendStats);
+ int32_t RegisterProtectionCallback(VCMProtectionCallback* protection);
+ void SetVideoProtection(VCMVideoProtection videoProtection);
+
+ int32_t AddVideoFrame(const VideoFrame& videoFrame,
+ const VideoContentMetrics* _contentMetrics,
+ const CodecSpecificInfo* codecSpecificInfo);
+
+ int32_t IntraFrameRequest(int stream_index);
+ int32_t EnableFrameDropper(bool enable);
+
+ void SuspendBelowMinBitrate();
+ bool VideoSuspended() const;
+
+ int64_t TimeUntilNextProcess();
+ int32_t Process();
+
+ private:
+ void SetEncoderParameters(EncoderParameters params)
+ EXCLUSIVE_LOCKS_REQUIRED(send_crit_);
+
+ Clock* const clock_;
+
+ rtc::scoped_ptr<CriticalSectionWrapper> process_crit_sect_;
+ mutable rtc::CriticalSection send_crit_;
+ VCMGenericEncoder* _encoder;
+ VCMEncodedFrameCallback _encodedFrameCallback;
+ std::vector<FrameType> _nextFrameTypes;
+ media_optimization::MediaOptimization _mediaOpt;
+ VCMSendStatisticsCallback* _sendStatsCallback GUARDED_BY(process_crit_sect_);
+ VCMCodecDataBase _codecDataBase GUARDED_BY(send_crit_);
+ bool frame_dropper_enabled_ GUARDED_BY(send_crit_);
+ VCMProcessTimer _sendStatsTimer;
+
+ // Must be accessed on the construction thread of VideoSender.
+ VideoCodec current_codec_;
+ rtc::ThreadChecker main_thread_;
+
+ VCMQMSettingsCallback* const qm_settings_callback_;
+ VCMProtectionCallback* protection_callback_;
+
+ rtc::CriticalSection params_lock_;
+ EncoderParameters encoder_params_ GUARDED_BY(params_lock_);
+};
+
+class VideoReceiver {
+ public:
+ typedef VideoCodingModule::ReceiverRobustness ReceiverRobustness;
+
+ VideoReceiver(Clock* clock, EventFactory* event_factory);
+ ~VideoReceiver();
+
+ int32_t RegisterReceiveCodec(const VideoCodec* receiveCodec,
+ int32_t numberOfCores,
+ bool requireKeyFrame);
+
+ void RegisterExternalDecoder(VideoDecoder* externalDecoder,
+ uint8_t payloadType);
+ int32_t RegisterReceiveCallback(VCMReceiveCallback* receiveCallback);
+ int32_t RegisterReceiveStatisticsCallback(
+ VCMReceiveStatisticsCallback* receiveStats);
+ int32_t RegisterDecoderTimingCallback(
+ VCMDecoderTimingCallback* decoderTiming);
+ int32_t RegisterFrameTypeCallback(VCMFrameTypeCallback* frameTypeCallback);
+ int32_t RegisterPacketRequestCallback(VCMPacketRequestCallback* callback);
+ int RegisterRenderBufferSizeCallback(VCMRenderBufferSizeCallback* callback);
+
+ int32_t Decode(uint16_t maxWaitTimeMs);
+ int32_t ResetDecoder();
+
+ int32_t ReceiveCodec(VideoCodec* currentReceiveCodec) const;
+ VideoCodecType ReceiveCodec() const;
+
+ int32_t IncomingPacket(const uint8_t* incomingPayload,
+ size_t payloadLength,
+ const WebRtcRTPHeader& rtpInfo);
+ int32_t SetMinimumPlayoutDelay(uint32_t minPlayoutDelayMs);
+ int32_t SetRenderDelay(uint32_t timeMS);
+ int32_t Delay() const;
+ uint32_t DiscardedPackets() const;
+
+ int SetReceiverRobustnessMode(ReceiverRobustness robustnessMode,
+ VCMDecodeErrorMode errorMode);
+ void SetNackSettings(size_t max_nack_list_size,
+ int max_packet_age_to_nack,
+ int max_incomplete_time_ms);
+
+ void SetDecodeErrorMode(VCMDecodeErrorMode decode_error_mode);
+ int SetMinReceiverDelay(int desired_delay_ms);
+
+ int32_t SetReceiveChannelParameters(int64_t rtt);
+ int32_t SetVideoProtection(VCMVideoProtection videoProtection, bool enable);
+
+ int64_t TimeUntilNextProcess();
+ int32_t Process();
+
+ void RegisterPreDecodeImageCallback(EncodedImageCallback* observer);
+ void TriggerDecoderShutdown();
+
+ protected:
+ int32_t Decode(const webrtc::VCMEncodedFrame& frame)
+ EXCLUSIVE_LOCKS_REQUIRED(_receiveCritSect);
+ int32_t RequestKeyFrame();
+ int32_t RequestSliceLossIndication(const uint64_t pictureID) const;
+
+ private:
+ Clock* const clock_;
+ rtc::scoped_ptr<CriticalSectionWrapper> process_crit_sect_;
+ CriticalSectionWrapper* _receiveCritSect;
+ VCMTiming _timing;
+ VCMReceiver _receiver;
+ VCMDecodedFrameCallback _decodedFrameCallback;
+ VCMFrameTypeCallback* _frameTypeCallback GUARDED_BY(process_crit_sect_);
+ VCMReceiveStatisticsCallback* _receiveStatsCallback
+ GUARDED_BY(process_crit_sect_);
+ VCMDecoderTimingCallback* _decoderTimingCallback
+ GUARDED_BY(process_crit_sect_);
+ VCMPacketRequestCallback* _packetRequestCallback
+ GUARDED_BY(process_crit_sect_);
+ VCMRenderBufferSizeCallback* render_buffer_callback_
+ GUARDED_BY(process_crit_sect_);
+ VCMGenericDecoder* _decoder;
+#ifdef DEBUG_DECODER_BIT_STREAM
+ FILE* _bitStreamBeforeDecoder;
+#endif
+ VCMFrameBuffer _frameFromFile;
+ bool _scheduleKeyRequest GUARDED_BY(process_crit_sect_);
+ size_t max_nack_list_size_ GUARDED_BY(process_crit_sect_);
+ EncodedImageCallback* pre_decode_image_callback_ GUARDED_BY(_receiveCritSect);
+
+ VCMCodecDataBase _codecDataBase GUARDED_BY(_receiveCritSect);
+ VCMProcessTimer _receiveStatsTimer;
+ VCMProcessTimer _retransmissionTimer;
+ VCMProcessTimer _keyRequestTimer;
+ QpParser qp_parser_;
+};
+
+} // namespace vcm
+} // namespace webrtc
+#endif // WEBRTC_MODULES_VIDEO_CODING_VIDEO_CODING_IMPL_H_
diff --git a/webrtc/modules/video_coding/video_coding_robustness_unittest.cc b/webrtc/modules/video_coding/video_coding_robustness_unittest.cc
new file mode 100644
index 0000000000..dd6565d505
--- /dev/null
+++ b/webrtc/modules/video_coding/video_coding_robustness_unittest.cc
@@ -0,0 +1,226 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "testing/gmock/include/gmock/gmock.h"
+#include "testing/gtest/include/gtest/gtest.h"
+#include "webrtc/modules/video_coding/include/mock/mock_video_codec_interface.h"
+#include "webrtc/modules/video_coding/include/mock/mock_vcm_callbacks.h"
+#include "webrtc/modules/video_coding/include/video_coding.h"
+#include "webrtc/modules/video_coding/test/test_util.h"
+#include "webrtc/system_wrappers/include/clock.h"
+
+namespace webrtc {
+
+using ::testing::Return;
+using ::testing::_;
+using ::testing::ElementsAre;
+using ::testing::AllOf;
+using ::testing::Args;
+using ::testing::Field;
+using ::testing::Pointee;
+using ::testing::NiceMock;
+using ::testing::Sequence;
+
+class VCMRobustnessTest : public ::testing::Test {
+ protected:
+ static const size_t kPayloadLen = 10;
+
+ virtual void SetUp() {
+ clock_.reset(new SimulatedClock(0));
+ ASSERT_TRUE(clock_.get() != NULL);
+ vcm_ = VideoCodingModule::Create(clock_.get(), &event_factory_);
+ ASSERT_TRUE(vcm_ != NULL);
+ const size_t kMaxNackListSize = 250;
+ const int kMaxPacketAgeToNack = 450;
+ vcm_->SetNackSettings(kMaxNackListSize, kMaxPacketAgeToNack, 0);
+ ASSERT_EQ(0, vcm_->RegisterFrameTypeCallback(&frame_type_callback_));
+ ASSERT_EQ(0, vcm_->RegisterPacketRequestCallback(&request_callback_));
+ VideoCodingModule::Codec(kVideoCodecVP8, &video_codec_);
+ ASSERT_EQ(VCM_OK, vcm_->RegisterReceiveCodec(&video_codec_, 1));
+ vcm_->RegisterExternalDecoder(&decoder_, video_codec_.plType);
+ }
+
+ virtual void TearDown() { VideoCodingModule::Destroy(vcm_); }
+
+ void InsertPacket(uint32_t timestamp,
+ uint16_t seq_no,
+ bool first,
+ bool marker_bit,
+ FrameType frame_type) {
+ const uint8_t payload[kPayloadLen] = {0};
+ WebRtcRTPHeader rtp_info;
+ memset(&rtp_info, 0, sizeof(rtp_info));
+ rtp_info.frameType = frame_type;
+ rtp_info.header.timestamp = timestamp;
+ rtp_info.header.sequenceNumber = seq_no;
+ rtp_info.header.markerBit = marker_bit;
+ rtp_info.header.payloadType = video_codec_.plType;
+ rtp_info.type.Video.codec = kRtpVideoVp8;
+ rtp_info.type.Video.codecHeader.VP8.InitRTPVideoHeaderVP8();
+ rtp_info.type.Video.isFirstPacket = first;
+
+ ASSERT_EQ(VCM_OK, vcm_->IncomingPacket(payload, kPayloadLen, rtp_info));
+ }
+
+ VideoCodingModule* vcm_;
+ VideoCodec video_codec_;
+ MockVCMFrameTypeCallback frame_type_callback_;
+ MockPacketRequestCallback request_callback_;
+ NiceMock<MockVideoDecoder> decoder_;
+ NiceMock<MockVideoDecoder> decoderCopy_;
+ rtc::scoped_ptr<SimulatedClock> clock_;
+ NullEventFactory event_factory_;
+};
+
+TEST_F(VCMRobustnessTest, TestHardNack) {
+ Sequence s;
+ EXPECT_CALL(request_callback_, ResendPackets(_, 2))
+ .With(Args<0, 1>(ElementsAre(6, 7)))
+ .Times(1);
+ for (int ts = 0; ts <= 6000; ts += 3000) {
+ EXPECT_CALL(decoder_,
+ Decode(AllOf(Field(&EncodedImage::_timeStamp, ts),
+ Field(&EncodedImage::_length, kPayloadLen * 3),
+ Field(&EncodedImage::_completeFrame, true)),
+ false, _, _, _))
+ .Times(1)
+ .InSequence(s);
+ }
+
+ ASSERT_EQ(VCM_OK, vcm_->SetReceiverRobustnessMode(
+ VideoCodingModule::kHardNack, kNoErrors));
+
+ InsertPacket(0, 0, true, false, kVideoFrameKey);
+ InsertPacket(0, 1, false, false, kVideoFrameKey);
+ InsertPacket(0, 2, false, true, kVideoFrameKey);
+ clock_->AdvanceTimeMilliseconds(1000 / 30);
+
+ InsertPacket(3000, 3, true, false, kVideoFrameDelta);
+ InsertPacket(3000, 4, false, false, kVideoFrameDelta);
+ InsertPacket(3000, 5, false, true, kVideoFrameDelta);
+ clock_->AdvanceTimeMilliseconds(1000 / 30);
+
+ ASSERT_EQ(VCM_OK, vcm_->Decode(0));
+ ASSERT_EQ(VCM_OK, vcm_->Decode(0));
+ ASSERT_EQ(VCM_FRAME_NOT_READY, vcm_->Decode(0));
+
+ clock_->AdvanceTimeMilliseconds(10);
+
+ ASSERT_EQ(VCM_OK, vcm_->Process());
+
+ ASSERT_EQ(VCM_FRAME_NOT_READY, vcm_->Decode(0));
+
+ InsertPacket(6000, 8, false, true, kVideoFrameDelta);
+ clock_->AdvanceTimeMilliseconds(10);
+ ASSERT_EQ(VCM_OK, vcm_->Process());
+
+ ASSERT_EQ(VCM_FRAME_NOT_READY, vcm_->Decode(0));
+
+ InsertPacket(6000, 6, true, false, kVideoFrameDelta);
+ InsertPacket(6000, 7, false, false, kVideoFrameDelta);
+ clock_->AdvanceTimeMilliseconds(10);
+ ASSERT_EQ(VCM_OK, vcm_->Process());
+
+ ASSERT_EQ(VCM_OK, vcm_->Decode(0));
+}
+
+TEST_F(VCMRobustnessTest, TestHardNackNoneDecoded) {
+ EXPECT_CALL(request_callback_, ResendPackets(_, _)).Times(0);
+ EXPECT_CALL(frame_type_callback_, RequestKeyFrame()).Times(1);
+
+ ASSERT_EQ(VCM_OK, vcm_->SetReceiverRobustnessMode(
+ VideoCodingModule::kHardNack, kNoErrors));
+
+ InsertPacket(3000, 3, true, false, kVideoFrameDelta);
+ InsertPacket(3000, 4, false, false, kVideoFrameDelta);
+ InsertPacket(3000, 5, false, true, kVideoFrameDelta);
+
+ EXPECT_EQ(VCM_FRAME_NOT_READY, vcm_->Decode(0));
+ ASSERT_EQ(VCM_OK, vcm_->Process());
+
+ clock_->AdvanceTimeMilliseconds(10);
+
+ EXPECT_EQ(VCM_FRAME_NOT_READY, vcm_->Decode(0));
+ ASSERT_EQ(VCM_OK, vcm_->Process());
+}
+
+TEST_F(VCMRobustnessTest, TestModeNoneWithErrors) {
+ EXPECT_CALL(decoder_, InitDecode(_, _)).Times(1);
+ EXPECT_CALL(decoder_, Release()).Times(1);
+ Sequence s1;
+ EXPECT_CALL(request_callback_, ResendPackets(_, 1))
+ .With(Args<0, 1>(ElementsAre(4)))
+ .Times(0);
+
+ EXPECT_CALL(decoder_, Copy()).Times(0);
+ EXPECT_CALL(decoderCopy_, Copy()).Times(0);
+
+ // Decode operations
+ EXPECT_CALL(decoder_,
+ Decode(AllOf(Field(&EncodedImage::_timeStamp, 0),
+ Field(&EncodedImage::_completeFrame, true)),
+ false, _, _, _))
+ .Times(1)
+ .InSequence(s1);
+ EXPECT_CALL(decoder_,
+ Decode(AllOf(Field(&EncodedImage::_timeStamp, 3000),
+ Field(&EncodedImage::_completeFrame, false)),
+ false, _, _, _))
+ .Times(1)
+ .InSequence(s1);
+ EXPECT_CALL(decoder_,
+ Decode(AllOf(Field(&EncodedImage::_timeStamp, 6000),
+ Field(&EncodedImage::_completeFrame, true)),
+ false, _, _, _))
+ .Times(1)
+ .InSequence(s1);
+ EXPECT_CALL(decoder_,
+ Decode(AllOf(Field(&EncodedImage::_timeStamp, 9000),
+ Field(&EncodedImage::_completeFrame, true)),
+ false, _, _, _))
+ .Times(1)
+ .InSequence(s1);
+
+ ASSERT_EQ(VCM_OK, vcm_->SetReceiverRobustnessMode(VideoCodingModule::kNone,
+ kWithErrors));
+
+ InsertPacket(0, 0, true, false, kVideoFrameKey);
+ InsertPacket(0, 1, false, false, kVideoFrameKey);
+ InsertPacket(0, 2, false, true, kVideoFrameKey);
+ EXPECT_EQ(VCM_OK, vcm_->Decode(33)); // Decode timestamp 0.
+ EXPECT_EQ(VCM_OK, vcm_->Process()); // Expect no NACK list.
+
+ clock_->AdvanceTimeMilliseconds(33);
+ InsertPacket(3000, 3, true, false, kVideoFrameDelta);
+ // Packet 4 missing
+ InsertPacket(3000, 5, false, true, kVideoFrameDelta);
+ EXPECT_EQ(VCM_FRAME_NOT_READY, vcm_->Decode(0));
+ EXPECT_EQ(VCM_OK, vcm_->Process()); // Expect no NACK list.
+
+ clock_->AdvanceTimeMilliseconds(33);
+ InsertPacket(6000, 6, true, false, kVideoFrameDelta);
+ InsertPacket(6000, 7, false, false, kVideoFrameDelta);
+ InsertPacket(6000, 8, false, true, kVideoFrameDelta);
+ EXPECT_EQ(VCM_OK, vcm_->Decode(0)); // Decode timestamp 3000 incomplete.
+ EXPECT_EQ(VCM_OK, vcm_->Process()); // Expect no NACK list.
+
+ clock_->AdvanceTimeMilliseconds(10);
+ EXPECT_EQ(VCM_OK, vcm_->Decode(23)); // Decode timestamp 6000 complete.
+ EXPECT_EQ(VCM_OK, vcm_->Process()); // Expect no NACK list.
+
+ clock_->AdvanceTimeMilliseconds(23);
+ InsertPacket(3000, 4, false, false, kVideoFrameDelta);
+
+ InsertPacket(9000, 9, true, false, kVideoFrameDelta);
+ InsertPacket(9000, 10, false, false, kVideoFrameDelta);
+ InsertPacket(9000, 11, false, true, kVideoFrameDelta);
+ EXPECT_EQ(VCM_OK, vcm_->Decode(33)); // Decode timestamp 9000 complete.
+}
+} // namespace webrtc
diff --git a/webrtc/modules/video_coding/video_coding_test.gypi b/webrtc/modules/video_coding/video_coding_test.gypi
index 5d720ebb63..fc2fec6c98 100644
--- a/webrtc/modules/video_coding/video_coding_test.gypi
+++ b/webrtc/modules/video_coding/video_coding_test.gypi
@@ -19,16 +19,16 @@
],
'sources': [
# headers
- 'main/test/receiver_tests.h',
- 'main/test/rtp_player.h',
- 'main/test/vcm_payload_sink_factory.h',
+ 'test/receiver_tests.h',
+ 'test/rtp_player.h',
+ 'test/vcm_payload_sink_factory.h',
# sources
- 'main/test/rtp_player.cc',
- 'main/test/test_util.cc',
- 'main/test/tester_main.cc',
- 'main/test/vcm_payload_sink_factory.cc',
- 'main/test/video_rtp_play.cc',
+ 'test/rtp_player.cc',
+ 'test/test_util.cc',
+ 'test/tester_main.cc',
+ 'test/vcm_payload_sink_factory.cc',
+ 'test/video_rtp_play.cc',
], # sources
},
],
diff --git a/webrtc/modules/video_coding/video_receiver.cc b/webrtc/modules/video_coding/video_receiver.cc
new file mode 100644
index 0000000000..02c0da8f48
--- /dev/null
+++ b/webrtc/modules/video_coding/video_receiver.cc
@@ -0,0 +1,549 @@
+/*
+ * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/base/checks.h"
+#include "webrtc/base/logging.h"
+#include "webrtc/base/trace_event.h"
+#include "webrtc/common_types.h"
+#include "webrtc/common_video/libyuv/include/webrtc_libyuv.h"
+#include "webrtc/modules/video_coding/include/video_codec_interface.h"
+#include "webrtc/modules/video_coding/encoded_frame.h"
+#include "webrtc/modules/video_coding/jitter_buffer.h"
+#include "webrtc/modules/video_coding/packet.h"
+#include "webrtc/modules/video_coding/video_coding_impl.h"
+#include "webrtc/system_wrappers/include/clock.h"
+
+// #define DEBUG_DECODER_BIT_STREAM
+
+namespace webrtc {
+namespace vcm {
+
+VideoReceiver::VideoReceiver(Clock* clock, EventFactory* event_factory)
+ : clock_(clock),
+ process_crit_sect_(CriticalSectionWrapper::CreateCriticalSection()),
+ _receiveCritSect(CriticalSectionWrapper::CreateCriticalSection()),
+ _timing(clock_),
+ _receiver(&_timing, clock_, event_factory),
+ _decodedFrameCallback(&_timing, clock_),
+ _frameTypeCallback(NULL),
+ _receiveStatsCallback(NULL),
+ _decoderTimingCallback(NULL),
+ _packetRequestCallback(NULL),
+ render_buffer_callback_(NULL),
+ _decoder(NULL),
+#ifdef DEBUG_DECODER_BIT_STREAM
+ _bitStreamBeforeDecoder(NULL),
+#endif
+ _frameFromFile(),
+ _scheduleKeyRequest(false),
+ max_nack_list_size_(0),
+ pre_decode_image_callback_(NULL),
+ _codecDataBase(nullptr, nullptr),
+ _receiveStatsTimer(1000, clock_),
+ _retransmissionTimer(10, clock_),
+ _keyRequestTimer(500, clock_) {
+ assert(clock_);
+#ifdef DEBUG_DECODER_BIT_STREAM
+ _bitStreamBeforeDecoder = fopen("decoderBitStream.bit", "wb");
+#endif
+}
+
+VideoReceiver::~VideoReceiver() {
+ delete _receiveCritSect;
+#ifdef DEBUG_DECODER_BIT_STREAM
+ fclose(_bitStreamBeforeDecoder);
+#endif
+}
+
+int32_t VideoReceiver::Process() {
+ int32_t returnValue = VCM_OK;
+
+ // Receive-side statistics
+ if (_receiveStatsTimer.TimeUntilProcess() == 0) {
+ _receiveStatsTimer.Processed();
+ CriticalSectionScoped cs(process_crit_sect_.get());
+ if (_receiveStatsCallback != NULL) {
+ uint32_t bitRate;
+ uint32_t frameRate;
+ _receiver.ReceiveStatistics(&bitRate, &frameRate);
+ _receiveStatsCallback->OnReceiveRatesUpdated(bitRate, frameRate);
+ }
+
+ if (_decoderTimingCallback != NULL) {
+ int decode_ms;
+ int max_decode_ms;
+ int current_delay_ms;
+ int target_delay_ms;
+ int jitter_buffer_ms;
+ int min_playout_delay_ms;
+ int render_delay_ms;
+ _timing.GetTimings(&decode_ms, &max_decode_ms, &current_delay_ms,
+ &target_delay_ms, &jitter_buffer_ms,
+ &min_playout_delay_ms, &render_delay_ms);
+ _decoderTimingCallback->OnDecoderTiming(
+ decode_ms, max_decode_ms, current_delay_ms, target_delay_ms,
+ jitter_buffer_ms, min_playout_delay_ms, render_delay_ms);
+ }
+
+ // Size of render buffer.
+ if (render_buffer_callback_) {
+ int buffer_size_ms = _receiver.RenderBufferSizeMs();
+ render_buffer_callback_->RenderBufferSizeMs(buffer_size_ms);
+ }
+ }
+
+ // Key frame requests
+ if (_keyRequestTimer.TimeUntilProcess() == 0) {
+ _keyRequestTimer.Processed();
+ bool request_key_frame = false;
+ {
+ CriticalSectionScoped cs(process_crit_sect_.get());
+ request_key_frame = _scheduleKeyRequest && _frameTypeCallback != NULL;
+ }
+ if (request_key_frame) {
+ const int32_t ret = RequestKeyFrame();
+ if (ret != VCM_OK && returnValue == VCM_OK) {
+ returnValue = ret;
+ }
+ }
+ }
+
+ // Packet retransmission requests
+ // TODO(holmer): Add API for changing Process interval and make sure it's
+ // disabled when NACK is off.
+ if (_retransmissionTimer.TimeUntilProcess() == 0) {
+ _retransmissionTimer.Processed();
+ bool callback_registered = false;
+ uint16_t length;
+ {
+ CriticalSectionScoped cs(process_crit_sect_.get());
+ length = max_nack_list_size_;
+ callback_registered = _packetRequestCallback != NULL;
+ }
+ if (callback_registered && length > 0) {
+ // Collect sequence numbers from the default receiver.
+ bool request_key_frame = false;
+ std::vector<uint16_t> nackList = _receiver.NackList(&request_key_frame);
+ int32_t ret = VCM_OK;
+ if (request_key_frame) {
+ ret = RequestKeyFrame();
+ if (ret != VCM_OK && returnValue == VCM_OK) {
+ returnValue = ret;
+ }
+ }
+ if (ret == VCM_OK && !nackList.empty()) {
+ CriticalSectionScoped cs(process_crit_sect_.get());
+ if (_packetRequestCallback != NULL) {
+ _packetRequestCallback->ResendPackets(&nackList[0], nackList.size());
+ }
+ }
+ }
+ }
+
+ return returnValue;
+}
+
+int64_t VideoReceiver::TimeUntilNextProcess() {
+ int64_t timeUntilNextProcess = _receiveStatsTimer.TimeUntilProcess();
+ if (_receiver.NackMode() != kNoNack) {
+ // We need a Process call more often if we are relying on
+ // retransmissions
+ timeUntilNextProcess =
+ VCM_MIN(timeUntilNextProcess, _retransmissionTimer.TimeUntilProcess());
+ }
+ timeUntilNextProcess =
+ VCM_MIN(timeUntilNextProcess, _keyRequestTimer.TimeUntilProcess());
+
+ return timeUntilNextProcess;
+}
+
+int32_t VideoReceiver::SetReceiveChannelParameters(int64_t rtt) {
+ CriticalSectionScoped receiveCs(_receiveCritSect);
+ _receiver.UpdateRtt(rtt);
+ return 0;
+}
+
+// Enable or disable a video protection method.
+// Note: This API should be deprecated, as it does not offer a distinction
+// between the protection method and decoding with or without errors. If such a
+// behavior is desired, use the following API: SetReceiverRobustnessMode.
+int32_t VideoReceiver::SetVideoProtection(VCMVideoProtection videoProtection,
+ bool enable) {
+ // By default, do not decode with errors.
+ _receiver.SetDecodeErrorMode(kNoErrors);
+ switch (videoProtection) {
+ case kProtectionNack: {
+ RTC_DCHECK(enable);
+ _receiver.SetNackMode(kNack, -1, -1);
+ break;
+ }
+
+ case kProtectionNackFEC: {
+ CriticalSectionScoped cs(_receiveCritSect);
+ RTC_DCHECK(enable);
+ _receiver.SetNackMode(kNack, media_optimization::kLowRttNackMs, -1);
+ _receiver.SetDecodeErrorMode(kNoErrors);
+ break;
+ }
+ case kProtectionFEC:
+ case kProtectionNone:
+ // No receiver-side protection.
+ RTC_DCHECK(enable);
+ _receiver.SetNackMode(kNoNack, -1, -1);
+ _receiver.SetDecodeErrorMode(kWithErrors);
+ break;
+ }
+ return VCM_OK;
+}
+
+// Register a receive callback. Will be called whenever there is a new frame
+// ready for rendering.
+int32_t VideoReceiver::RegisterReceiveCallback(
+ VCMReceiveCallback* receiveCallback) {
+ CriticalSectionScoped cs(_receiveCritSect);
+ _decodedFrameCallback.SetUserReceiveCallback(receiveCallback);
+ return VCM_OK;
+}
+
+int32_t VideoReceiver::RegisterReceiveStatisticsCallback(
+ VCMReceiveStatisticsCallback* receiveStats) {
+ CriticalSectionScoped cs(process_crit_sect_.get());
+ _receiver.RegisterStatsCallback(receiveStats);
+ _receiveStatsCallback = receiveStats;
+ return VCM_OK;
+}
+
+int32_t VideoReceiver::RegisterDecoderTimingCallback(
+ VCMDecoderTimingCallback* decoderTiming) {
+ CriticalSectionScoped cs(process_crit_sect_.get());
+ _decoderTimingCallback = decoderTiming;
+ return VCM_OK;
+}
+
+// Register an externally defined decoder object.
+void VideoReceiver::RegisterExternalDecoder(VideoDecoder* externalDecoder,
+ uint8_t payloadType) {
+ CriticalSectionScoped cs(_receiveCritSect);
+ if (externalDecoder == NULL) {
+ // Make sure the VCM updates the decoder next time it decodes.
+ _decoder = NULL;
+ RTC_CHECK(_codecDataBase.DeregisterExternalDecoder(payloadType));
+ return;
+ }
+ _codecDataBase.RegisterExternalDecoder(externalDecoder, payloadType);
+}
+
+// Register a frame type request callback.
+int32_t VideoReceiver::RegisterFrameTypeCallback(
+ VCMFrameTypeCallback* frameTypeCallback) {
+ CriticalSectionScoped cs(process_crit_sect_.get());
+ _frameTypeCallback = frameTypeCallback;
+ return VCM_OK;
+}
+
+int32_t VideoReceiver::RegisterPacketRequestCallback(
+ VCMPacketRequestCallback* callback) {
+ CriticalSectionScoped cs(process_crit_sect_.get());
+ _packetRequestCallback = callback;
+ return VCM_OK;
+}
+
+int VideoReceiver::RegisterRenderBufferSizeCallback(
+ VCMRenderBufferSizeCallback* callback) {
+ CriticalSectionScoped cs(process_crit_sect_.get());
+ render_buffer_callback_ = callback;
+ return VCM_OK;
+}
+
+void VideoReceiver::TriggerDecoderShutdown() {
+ _receiver.TriggerDecoderShutdown();
+}
+
+// Decode next frame, blocking.
+// Should be called as often as possible to get the most out of the decoder.
+int32_t VideoReceiver::Decode(uint16_t maxWaitTimeMs) {
+ int64_t nextRenderTimeMs;
+ bool prefer_late_decoding = false;
+ {
+ CriticalSectionScoped cs(_receiveCritSect);
+ prefer_late_decoding = _codecDataBase.PrefersLateDecoding();
+ }
+
+ VCMEncodedFrame* frame = _receiver.FrameForDecoding(
+ maxWaitTimeMs, &nextRenderTimeMs, prefer_late_decoding);
+
+ if (!frame)
+ return VCM_FRAME_NOT_READY;
+
+ CriticalSectionScoped cs(_receiveCritSect);
+
+ // If this frame was too late, we should adjust the delay accordingly
+ _timing.UpdateCurrentDelay(frame->RenderTimeMs(),
+ clock_->TimeInMilliseconds());
+
+ if (pre_decode_image_callback_) {
+ EncodedImage encoded_image(frame->EncodedImage());
+ int qp = -1;
+ if (qp_parser_.GetQp(*frame, &qp)) {
+ encoded_image.qp_ = qp;
+ }
+ pre_decode_image_callback_->Encoded(encoded_image, frame->CodecSpecific(),
+ NULL);
+ }
+
+#ifdef DEBUG_DECODER_BIT_STREAM
+ if (_bitStreamBeforeDecoder != NULL) {
+ // Write bit stream to file for debugging purposes
+ if (fwrite(frame->Buffer(), 1, frame->Length(), _bitStreamBeforeDecoder) !=
+ frame->Length()) {
+ return -1;
+ }
+ }
+#endif
+ const int32_t ret = Decode(*frame);
+ _receiver.ReleaseFrame(frame);
+ return ret;
+}
+
+int32_t VideoReceiver::RequestSliceLossIndication(
+ const uint64_t pictureID) const {
+ TRACE_EVENT1("webrtc", "RequestSLI", "picture_id", pictureID);
+ CriticalSectionScoped cs(process_crit_sect_.get());
+ if (_frameTypeCallback != NULL) {
+ const int32_t ret =
+ _frameTypeCallback->SliceLossIndicationRequest(pictureID);
+ if (ret < 0) {
+ return ret;
+ }
+ } else {
+ return VCM_MISSING_CALLBACK;
+ }
+ return VCM_OK;
+}
+
+int32_t VideoReceiver::RequestKeyFrame() {
+ TRACE_EVENT0("webrtc", "RequestKeyFrame");
+ CriticalSectionScoped process_cs(process_crit_sect_.get());
+ if (_frameTypeCallback != NULL) {
+ const int32_t ret = _frameTypeCallback->RequestKeyFrame();
+ if (ret < 0) {
+ return ret;
+ }
+ _scheduleKeyRequest = false;
+ } else {
+ return VCM_MISSING_CALLBACK;
+ }
+ return VCM_OK;
+}
+
+// Must be called from inside the receive side critical section.
+int32_t VideoReceiver::Decode(const VCMEncodedFrame& frame) {
+ TRACE_EVENT_ASYNC_STEP1("webrtc", "Video", frame.TimeStamp(), "Decode",
+ "type", frame.FrameType());
+ // Change decoder if payload type has changed
+ _decoder = _codecDataBase.GetDecoder(frame, &_decodedFrameCallback);
+ if (_decoder == NULL) {
+ return VCM_NO_CODEC_REGISTERED;
+ }
+ // Decode a frame
+ int32_t ret = _decoder->Decode(frame, clock_->TimeInMilliseconds());
+
+ // Check for failed decoding, run frame type request callback if needed.
+ bool request_key_frame = false;
+ if (ret < 0) {
+ if (ret == VCM_ERROR_REQUEST_SLI) {
+ return RequestSliceLossIndication(
+ _decodedFrameCallback.LastReceivedPictureID() + 1);
+ } else {
+ request_key_frame = true;
+ }
+ } else if (ret == VCM_REQUEST_SLI) {
+ ret = RequestSliceLossIndication(
+ _decodedFrameCallback.LastReceivedPictureID() + 1);
+ }
+ if (!frame.Complete() || frame.MissingFrame()) {
+ request_key_frame = true;
+ ret = VCM_OK;
+ }
+ if (request_key_frame) {
+ CriticalSectionScoped cs(process_crit_sect_.get());
+ _scheduleKeyRequest = true;
+ }
+ TRACE_EVENT_ASYNC_END0("webrtc", "Video", frame.TimeStamp());
+ return ret;
+}
+
+// Reset the decoder state
+int32_t VideoReceiver::ResetDecoder() {
+ bool reset_key_request = false;
+ {
+ CriticalSectionScoped cs(_receiveCritSect);
+ if (_decoder != NULL) {
+ _receiver.Reset();
+ _timing.Reset();
+ reset_key_request = true;
+ _decoder->Reset();
+ }
+ }
+ if (reset_key_request) {
+ CriticalSectionScoped cs(process_crit_sect_.get());
+ _scheduleKeyRequest = false;
+ }
+ return VCM_OK;
+}
+
+// Register possible receive codecs, can be called multiple times
+int32_t VideoReceiver::RegisterReceiveCodec(const VideoCodec* receiveCodec,
+ int32_t numberOfCores,
+ bool requireKeyFrame) {
+ CriticalSectionScoped cs(_receiveCritSect);
+ if (receiveCodec == NULL) {
+ return VCM_PARAMETER_ERROR;
+ }
+ if (!_codecDataBase.RegisterReceiveCodec(receiveCodec, numberOfCores,
+ requireKeyFrame)) {
+ return -1;
+ }
+ return 0;
+}
+
+// Get current received codec
+int32_t VideoReceiver::ReceiveCodec(VideoCodec* currentReceiveCodec) const {
+ CriticalSectionScoped cs(_receiveCritSect);
+ if (currentReceiveCodec == NULL) {
+ return VCM_PARAMETER_ERROR;
+ }
+ return _codecDataBase.ReceiveCodec(currentReceiveCodec) ? 0 : -1;
+}
+
+// Get current received codec
+VideoCodecType VideoReceiver::ReceiveCodec() const {
+ CriticalSectionScoped cs(_receiveCritSect);
+ return _codecDataBase.ReceiveCodec();
+}
+
+// Incoming packet from network parsed and ready for decode, non blocking.
+int32_t VideoReceiver::IncomingPacket(const uint8_t* incomingPayload,
+ size_t payloadLength,
+ const WebRtcRTPHeader& rtpInfo) {
+ if (rtpInfo.frameType == kVideoFrameKey) {
+ TRACE_EVENT1("webrtc", "VCM::PacketKeyFrame", "seqnum",
+ rtpInfo.header.sequenceNumber);
+ }
+ if (incomingPayload == NULL) {
+ // The jitter buffer doesn't handle non-zero payload lengths for packets
+ // without payload.
+ // TODO(holmer): We should fix this in the jitter buffer.
+ payloadLength = 0;
+ }
+ const VCMPacket packet(incomingPayload, payloadLength, rtpInfo);
+ int32_t ret = _receiver.InsertPacket(packet, rtpInfo.type.Video.width,
+ rtpInfo.type.Video.height);
+ // TODO(holmer): Investigate if this somehow should use the key frame
+ // request scheduling to throttle the requests.
+ if (ret == VCM_FLUSH_INDICATOR) {
+ RequestKeyFrame();
+ ResetDecoder();
+ } else if (ret < 0) {
+ return ret;
+ }
+ return VCM_OK;
+}
+
+// Minimum playout delay (used for lip-sync). This is the minimum delay required
+// to sync with audio. Not included in VideoCodingModule::Delay()
+// Defaults to 0 ms.
+int32_t VideoReceiver::SetMinimumPlayoutDelay(uint32_t minPlayoutDelayMs) {
+ _timing.set_min_playout_delay(minPlayoutDelayMs);
+ return VCM_OK;
+}
+
+// The estimated delay caused by rendering, defaults to
+// kDefaultRenderDelayMs = 10 ms
+int32_t VideoReceiver::SetRenderDelay(uint32_t timeMS) {
+ _timing.set_render_delay(timeMS);
+ return VCM_OK;
+}
+
+// Current video delay
+int32_t VideoReceiver::Delay() const {
+ return _timing.TargetVideoDelay();
+}
+
+uint32_t VideoReceiver::DiscardedPackets() const {
+ return _receiver.DiscardedPackets();
+}
+
+int VideoReceiver::SetReceiverRobustnessMode(
+ ReceiverRobustness robustnessMode,
+ VCMDecodeErrorMode decode_error_mode) {
+ CriticalSectionScoped cs(_receiveCritSect);
+ switch (robustnessMode) {
+ case VideoCodingModule::kNone:
+ _receiver.SetNackMode(kNoNack, -1, -1);
+ break;
+ case VideoCodingModule::kHardNack:
+ // Always wait for retransmissions (except when decoding with errors).
+ _receiver.SetNackMode(kNack, -1, -1);
+ break;
+ case VideoCodingModule::kSoftNack:
+#if 1
+ assert(false); // TODO(hlundin): Not completed.
+ return VCM_NOT_IMPLEMENTED;
+#else
+ // Enable hybrid NACK/FEC. Always wait for retransmissions and don't add
+ // extra delay when RTT is above kLowRttNackMs.
+ _receiver.SetNackMode(kNack, media_optimization::kLowRttNackMs, -1);
+ break;
+#endif
+ case VideoCodingModule::kReferenceSelection:
+#if 1
+ assert(false); // TODO(hlundin): Not completed.
+ return VCM_NOT_IMPLEMENTED;
+#else
+ if (decode_error_mode == kNoErrors) {
+ return VCM_PARAMETER_ERROR;
+ }
+ _receiver.SetNackMode(kNoNack, -1, -1);
+ break;
+#endif
+ }
+ _receiver.SetDecodeErrorMode(decode_error_mode);
+ return VCM_OK;
+}
+
+void VideoReceiver::SetDecodeErrorMode(VCMDecodeErrorMode decode_error_mode) {
+ CriticalSectionScoped cs(_receiveCritSect);
+ _receiver.SetDecodeErrorMode(decode_error_mode);
+}
+
+void VideoReceiver::SetNackSettings(size_t max_nack_list_size,
+ int max_packet_age_to_nack,
+ int max_incomplete_time_ms) {
+ if (max_nack_list_size != 0) {
+ CriticalSectionScoped process_cs(process_crit_sect_.get());
+ max_nack_list_size_ = max_nack_list_size;
+ }
+ _receiver.SetNackSettings(max_nack_list_size, max_packet_age_to_nack,
+ max_incomplete_time_ms);
+}
+
+int VideoReceiver::SetMinReceiverDelay(int desired_delay_ms) {
+ return _receiver.SetMinReceiverDelay(desired_delay_ms);
+}
+
+void VideoReceiver::RegisterPreDecodeImageCallback(
+ EncodedImageCallback* observer) {
+ CriticalSectionScoped cs(_receiveCritSect);
+ pre_decode_image_callback_ = observer;
+}
+
+} // namespace vcm
+} // namespace webrtc
diff --git a/webrtc/modules/video_coding/video_receiver_unittest.cc b/webrtc/modules/video_coding/video_receiver_unittest.cc
new file mode 100644
index 0000000000..820ce9ae2d
--- /dev/null
+++ b/webrtc/modules/video_coding/video_receiver_unittest.cc
@@ -0,0 +1,209 @@
+/*
+ * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <vector>
+
+#include "testing/gtest/include/gtest/gtest.h"
+#include "webrtc/base/scoped_ptr.h"
+#include "webrtc/modules/video_coding/include/mock/mock_video_codec_interface.h"
+#include "webrtc/modules/video_coding/include/mock/mock_vcm_callbacks.h"
+#include "webrtc/modules/video_coding/include/video_coding.h"
+#include "webrtc/modules/video_coding/video_coding_impl.h"
+#include "webrtc/modules/video_coding/test/test_util.h"
+#include "webrtc/system_wrappers/include/clock.h"
+
+using ::testing::_;
+using ::testing::NiceMock;
+
+namespace webrtc {
+namespace vcm {
+namespace {
+
+class TestVideoReceiver : public ::testing::Test {
+ protected:
+ static const int kUnusedPayloadType = 10;
+
+ TestVideoReceiver() : clock_(0) {}
+
+ virtual void SetUp() {
+ receiver_.reset(new VideoReceiver(&clock_, &event_factory_));
+ receiver_->RegisterExternalDecoder(&decoder_, kUnusedPayloadType);
+ const size_t kMaxNackListSize = 250;
+ const int kMaxPacketAgeToNack = 450;
+ receiver_->SetNackSettings(kMaxNackListSize, kMaxPacketAgeToNack, 0);
+
+ VideoCodingModule::Codec(kVideoCodecVP8, &settings_);
+ settings_.plType = kUnusedPayloadType; // Use the mocked encoder.
+ EXPECT_EQ(0, receiver_->RegisterReceiveCodec(&settings_, 1, true));
+ }
+
+ void InsertAndVerifyPaddingFrame(const uint8_t* payload,
+ WebRtcRTPHeader* header) {
+ ASSERT_TRUE(header != NULL);
+ for (int j = 0; j < 5; ++j) {
+ // Padding only packets are passed to the VCM with payload size 0.
+ EXPECT_EQ(0, receiver_->IncomingPacket(payload, 0, *header));
+ ++header->header.sequenceNumber;
+ }
+ EXPECT_EQ(0, receiver_->Process());
+ EXPECT_CALL(decoder_, Decode(_, _, _, _, _)).Times(0);
+ EXPECT_EQ(VCM_FRAME_NOT_READY, receiver_->Decode(100));
+ }
+
+ void InsertAndVerifyDecodableFrame(const uint8_t* payload,
+ size_t length,
+ WebRtcRTPHeader* header) {
+ ASSERT_TRUE(header != NULL);
+ EXPECT_EQ(0, receiver_->IncomingPacket(payload, length, *header));
+ ++header->header.sequenceNumber;
+ EXPECT_CALL(packet_request_callback_, ResendPackets(_, _)).Times(0);
+ EXPECT_EQ(0, receiver_->Process());
+ EXPECT_CALL(decoder_, Decode(_, _, _, _, _)).Times(1);
+ EXPECT_EQ(0, receiver_->Decode(100));
+ }
+
+ SimulatedClock clock_;
+ NullEventFactory event_factory_;
+ VideoCodec settings_;
+ NiceMock<MockVideoDecoder> decoder_;
+ NiceMock<MockPacketRequestCallback> packet_request_callback_;
+
+ rtc::scoped_ptr<VideoReceiver> receiver_;
+};
+
+TEST_F(TestVideoReceiver, PaddingOnlyFrames) {
+ EXPECT_EQ(0, receiver_->SetVideoProtection(kProtectionNack, true));
+ EXPECT_EQ(
+ 0, receiver_->RegisterPacketRequestCallback(&packet_request_callback_));
+ const size_t kPaddingSize = 220;
+ const uint8_t payload[kPaddingSize] = {0};
+ WebRtcRTPHeader header;
+ memset(&header, 0, sizeof(header));
+ header.frameType = kEmptyFrame;
+ header.header.markerBit = false;
+ header.header.paddingLength = kPaddingSize;
+ header.header.payloadType = kUnusedPayloadType;
+ header.header.ssrc = 1;
+ header.header.headerLength = 12;
+ header.type.Video.codec = kRtpVideoVp8;
+ for (int i = 0; i < 10; ++i) {
+ EXPECT_CALL(packet_request_callback_, ResendPackets(_, _)).Times(0);
+ InsertAndVerifyPaddingFrame(payload, &header);
+ clock_.AdvanceTimeMilliseconds(33);
+ header.header.timestamp += 3000;
+ }
+}
+
+TEST_F(TestVideoReceiver, PaddingOnlyFramesWithLosses) {
+ EXPECT_EQ(0, receiver_->SetVideoProtection(kProtectionNack, true));
+ EXPECT_EQ(
+ 0, receiver_->RegisterPacketRequestCallback(&packet_request_callback_));
+ const size_t kFrameSize = 1200;
+ const size_t kPaddingSize = 220;
+ const uint8_t payload[kFrameSize] = {0};
+ WebRtcRTPHeader header;
+ memset(&header, 0, sizeof(header));
+ header.frameType = kEmptyFrame;
+ header.header.markerBit = false;
+ header.header.paddingLength = kPaddingSize;
+ header.header.payloadType = kUnusedPayloadType;
+ header.header.ssrc = 1;
+ header.header.headerLength = 12;
+ header.type.Video.codec = kRtpVideoVp8;
+ // Insert one video frame to get one frame decoded.
+ header.frameType = kVideoFrameKey;
+ header.type.Video.isFirstPacket = true;
+ header.header.markerBit = true;
+ InsertAndVerifyDecodableFrame(payload, kFrameSize, &header);
+ clock_.AdvanceTimeMilliseconds(33);
+ header.header.timestamp += 3000;
+
+ header.frameType = kEmptyFrame;
+ header.type.Video.isFirstPacket = false;
+ header.header.markerBit = false;
+ // Insert padding frames.
+ for (int i = 0; i < 10; ++i) {
+ // Lose one packet from the 6th frame.
+ if (i == 5) {
+ ++header.header.sequenceNumber;
+ }
+ // Lose the 4th frame.
+ if (i == 3) {
+ header.header.sequenceNumber += 5;
+ } else {
+ if (i > 3 && i < 5) {
+ EXPECT_CALL(packet_request_callback_, ResendPackets(_, 5)).Times(1);
+ } else if (i >= 5) {
+ EXPECT_CALL(packet_request_callback_, ResendPackets(_, 6)).Times(1);
+ } else {
+ EXPECT_CALL(packet_request_callback_, ResendPackets(_, _)).Times(0);
+ }
+ InsertAndVerifyPaddingFrame(payload, &header);
+ }
+ clock_.AdvanceTimeMilliseconds(33);
+ header.header.timestamp += 3000;
+ }
+}
+
+TEST_F(TestVideoReceiver, PaddingOnlyAndVideo) {
+ EXPECT_EQ(0, receiver_->SetVideoProtection(kProtectionNack, true));
+ EXPECT_EQ(
+ 0, receiver_->RegisterPacketRequestCallback(&packet_request_callback_));
+ const size_t kFrameSize = 1200;
+ const size_t kPaddingSize = 220;
+ const uint8_t payload[kFrameSize] = {0};
+ WebRtcRTPHeader header;
+ memset(&header, 0, sizeof(header));
+ header.frameType = kEmptyFrame;
+ header.type.Video.isFirstPacket = false;
+ header.header.markerBit = false;
+ header.header.paddingLength = kPaddingSize;
+ header.header.payloadType = kUnusedPayloadType;
+ header.header.ssrc = 1;
+ header.header.headerLength = 12;
+ header.type.Video.codec = kRtpVideoVp8;
+ header.type.Video.codecHeader.VP8.pictureId = -1;
+ header.type.Video.codecHeader.VP8.tl0PicIdx = -1;
+ for (int i = 0; i < 3; ++i) {
+ // Insert 2 video frames.
+ for (int j = 0; j < 2; ++j) {
+ if (i == 0 && j == 0) // First frame should be a key frame.
+ header.frameType = kVideoFrameKey;
+ else
+ header.frameType = kVideoFrameDelta;
+ header.type.Video.isFirstPacket = true;
+ header.header.markerBit = true;
+ InsertAndVerifyDecodableFrame(payload, kFrameSize, &header);
+ clock_.AdvanceTimeMilliseconds(33);
+ header.header.timestamp += 3000;
+ }
+
+ // Insert 2 padding only frames.
+ header.frameType = kEmptyFrame;
+ header.type.Video.isFirstPacket = false;
+ header.header.markerBit = false;
+ for (int j = 0; j < 2; ++j) {
+ // InsertAndVerifyPaddingFrame(payload, &header);
+ clock_.AdvanceTimeMilliseconds(33);
+ header.header.timestamp += 3000;
+ }
+ }
+}
+
+TEST_F(TestVideoReceiver, ReceiverDelay) {
+ EXPECT_EQ(0, receiver_->SetMinReceiverDelay(0));
+ EXPECT_EQ(0, receiver_->SetMinReceiverDelay(5000));
+ EXPECT_EQ(-1, receiver_->SetMinReceiverDelay(-100));
+ EXPECT_EQ(-1, receiver_->SetMinReceiverDelay(10010));
+}
+
+} // namespace
+} // namespace vcm
+} // namespace webrtc
diff --git a/webrtc/modules/video_coding/video_sender.cc b/webrtc/modules/video_coding/video_sender.cc
new file mode 100644
index 0000000000..ac901f95b9
--- /dev/null
+++ b/webrtc/modules/video_coding/video_sender.cc
@@ -0,0 +1,352 @@
+/*
+ * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+
+#include <algorithm> // std::max
+
+#include "webrtc/base/checks.h"
+#include "webrtc/base/logging.h"
+#include "webrtc/common_types.h"
+#include "webrtc/common_video/libyuv/include/webrtc_libyuv.h"
+#include "webrtc/modules/video_coding/include/video_codec_interface.h"
+#include "webrtc/modules/video_coding/encoded_frame.h"
+#include "webrtc/modules/video_coding/utility/quality_scaler.h"
+#include "webrtc/modules/video_coding/video_coding_impl.h"
+#include "webrtc/system_wrappers/include/clock.h"
+
+namespace webrtc {
+namespace vcm {
+
+VideoSender::VideoSender(Clock* clock,
+ EncodedImageCallback* post_encode_callback,
+ VideoEncoderRateObserver* encoder_rate_observer,
+ VCMQMSettingsCallback* qm_settings_callback)
+ : clock_(clock),
+ process_crit_sect_(CriticalSectionWrapper::CreateCriticalSection()),
+ _encoder(nullptr),
+ _encodedFrameCallback(post_encode_callback),
+ _nextFrameTypes(1, kVideoFrameDelta),
+ _mediaOpt(clock_),
+ _sendStatsCallback(nullptr),
+ _codecDataBase(encoder_rate_observer, &_encodedFrameCallback),
+ frame_dropper_enabled_(true),
+ _sendStatsTimer(1000, clock_),
+ current_codec_(),
+ qm_settings_callback_(qm_settings_callback),
+ protection_callback_(nullptr),
+ encoder_params_({0, 0, 0, 0}) {
+ // Allow VideoSender to be created on one thread but used on another, post
+ // construction. This is currently how this class is being used by at least
+ // one external project (diffractor).
+ _mediaOpt.EnableQM(qm_settings_callback_ != nullptr);
+ _mediaOpt.Reset();
+ main_thread_.DetachFromThread();
+}
+
+VideoSender::~VideoSender() {}
+
+int32_t VideoSender::Process() {
+ int32_t returnValue = VCM_OK;
+
+ if (_sendStatsTimer.TimeUntilProcess() == 0) {
+ _sendStatsTimer.Processed();
+ CriticalSectionScoped cs(process_crit_sect_.get());
+ if (_sendStatsCallback != nullptr) {
+ uint32_t bitRate = _mediaOpt.SentBitRate();
+ uint32_t frameRate = _mediaOpt.SentFrameRate();
+ _sendStatsCallback->SendStatistics(bitRate, frameRate);
+ }
+ }
+
+ {
+ rtc::CritScope cs(&params_lock_);
+ // Force an encoder parameters update, so that incoming frame rate is
+ // updated even if bandwidth hasn't changed.
+ encoder_params_.input_frame_rate = _mediaOpt.InputFrameRate();
+ }
+
+ return returnValue;
+}
+
+int64_t VideoSender::TimeUntilNextProcess() {
+ return _sendStatsTimer.TimeUntilProcess();
+}
+
+// Register the send codec to be used.
+int32_t VideoSender::RegisterSendCodec(const VideoCodec* sendCodec,
+ uint32_t numberOfCores,
+ uint32_t maxPayloadSize) {
+ RTC_DCHECK(main_thread_.CalledOnValidThread());
+ rtc::CritScope lock(&send_crit_);
+ if (sendCodec == nullptr) {
+ return VCM_PARAMETER_ERROR;
+ }
+
+ bool ret =
+ _codecDataBase.SetSendCodec(sendCodec, numberOfCores, maxPayloadSize);
+
+ // Update encoder regardless of result to make sure that we're not holding on
+ // to a deleted instance.
+ _encoder = _codecDataBase.GetEncoder();
+ // Cache the current codec here so they can be fetched from this thread
+ // without requiring the _sendCritSect lock.
+ current_codec_ = *sendCodec;
+
+ if (!ret) {
+ LOG(LS_ERROR) << "Failed to initialize set encoder with payload name '"
+ << sendCodec->plName << "'.";
+ return VCM_CODEC_ERROR;
+ }
+
+ int numLayers;
+ if (sendCodec->codecType == kVideoCodecVP8) {
+ numLayers = sendCodec->codecSpecific.VP8.numberOfTemporalLayers;
+ } else if (sendCodec->codecType == kVideoCodecVP9) {
+ numLayers = sendCodec->codecSpecific.VP9.numberOfTemporalLayers;
+ } else {
+ numLayers = 1;
+ }
+
+ // If we have screensharing and we have layers, we disable frame dropper.
+ bool disable_frame_dropper =
+ numLayers > 1 && sendCodec->mode == kScreensharing;
+ if (disable_frame_dropper) {
+ _mediaOpt.EnableFrameDropper(false);
+ } else if (frame_dropper_enabled_) {
+ _mediaOpt.EnableFrameDropper(true);
+ }
+ _nextFrameTypes.clear();
+ _nextFrameTypes.resize(VCM_MAX(sendCodec->numberOfSimulcastStreams, 1),
+ kVideoFrameDelta);
+
+ _mediaOpt.SetEncodingData(sendCodec->codecType, sendCodec->maxBitrate * 1000,
+ sendCodec->startBitrate * 1000, sendCodec->width,
+ sendCodec->height, sendCodec->maxFramerate,
+ numLayers, maxPayloadSize);
+ return VCM_OK;
+}
+
+// Register an external decoder object.
+// This can not be used together with external decoder callbacks.
+void VideoSender::RegisterExternalEncoder(VideoEncoder* externalEncoder,
+ uint8_t payloadType,
+ bool internalSource /*= false*/) {
+ RTC_DCHECK(main_thread_.CalledOnValidThread());
+
+ rtc::CritScope lock(&send_crit_);
+
+ if (externalEncoder == nullptr) {
+ bool wasSendCodec = false;
+ RTC_CHECK(
+ _codecDataBase.DeregisterExternalEncoder(payloadType, &wasSendCodec));
+ if (wasSendCodec) {
+ // Make sure the VCM doesn't use the de-registered codec
+ _encoder = nullptr;
+ }
+ return;
+ }
+ _codecDataBase.RegisterExternalEncoder(externalEncoder, payloadType,
+ internalSource);
+}
+
+// Get encode bitrate
+int VideoSender::Bitrate(unsigned int* bitrate) const {
+ RTC_DCHECK(main_thread_.CalledOnValidThread());
+ // Since we're running on the thread that's the only thread known to modify
+ // the value of _encoder, we don't need to grab the lock here.
+
+ if (!_encoder)
+ return VCM_UNINITIALIZED;
+ *bitrate = _encoder->GetEncoderParameters().target_bitrate;
+ return 0;
+}
+
+// Get encode frame rate
+int VideoSender::FrameRate(unsigned int* framerate) const {
+ RTC_DCHECK(main_thread_.CalledOnValidThread());
+ // Since we're running on the thread that's the only thread known to modify
+ // the value of _encoder, we don't need to grab the lock here.
+
+ if (!_encoder)
+ return VCM_UNINITIALIZED;
+
+ *framerate = _encoder->GetEncoderParameters().input_frame_rate;
+ return 0;
+}
+
+int32_t VideoSender::SetChannelParameters(uint32_t target_bitrate,
+ uint8_t lossRate,
+ int64_t rtt) {
+ uint32_t target_rate =
+ _mediaOpt.SetTargetRates(target_bitrate, lossRate, rtt,
+ protection_callback_, qm_settings_callback_);
+
+ uint32_t input_frame_rate = _mediaOpt.InputFrameRate();
+
+ rtc::CritScope cs(&params_lock_);
+ encoder_params_ = {target_rate, lossRate, rtt, input_frame_rate};
+
+ return VCM_OK;
+}
+
+void VideoSender::SetEncoderParameters(EncoderParameters params) {
+ if (params.target_bitrate == 0)
+ return;
+
+ if (params.input_frame_rate == 0) {
+ // No frame rate estimate available, use default.
+ params.input_frame_rate = current_codec_.maxFramerate;
+ }
+ if (_encoder != nullptr)
+ _encoder->SetEncoderParameters(params);
+}
+
+int32_t VideoSender::RegisterTransportCallback(
+ VCMPacketizationCallback* transport) {
+ rtc::CritScope lock(&send_crit_);
+ _encodedFrameCallback.SetMediaOpt(&_mediaOpt);
+ _encodedFrameCallback.SetTransportCallback(transport);
+ return VCM_OK;
+}
+
+// Register video output information callback which will be called to deliver
+// information about the video stream produced by the encoder, for instance the
+// average frame rate and bit rate.
+int32_t VideoSender::RegisterSendStatisticsCallback(
+ VCMSendStatisticsCallback* sendStats) {
+ CriticalSectionScoped cs(process_crit_sect_.get());
+ _sendStatsCallback = sendStats;
+ return VCM_OK;
+}
+
+// Register a video protection callback which will be called to deliver the
+// requested FEC rate and NACK status (on/off).
+// Note: this callback is assumed to only be registered once and before it is
+// used in this class.
+int32_t VideoSender::RegisterProtectionCallback(
+ VCMProtectionCallback* protection_callback) {
+ RTC_DCHECK(protection_callback == nullptr || protection_callback_ == nullptr);
+ protection_callback_ = protection_callback;
+ return VCM_OK;
+}
+
+// Enable or disable a video protection method.
+void VideoSender::SetVideoProtection(VCMVideoProtection videoProtection) {
+ rtc::CritScope lock(&send_crit_);
+ switch (videoProtection) {
+ case kProtectionNone:
+ _mediaOpt.SetProtectionMethod(media_optimization::kNone);
+ break;
+ case kProtectionNack:
+ _mediaOpt.SetProtectionMethod(media_optimization::kNack);
+ break;
+ case kProtectionNackFEC:
+ _mediaOpt.SetProtectionMethod(media_optimization::kNackFec);
+ break;
+ case kProtectionFEC:
+ _mediaOpt.SetProtectionMethod(media_optimization::kFec);
+ break;
+ }
+}
+// Add one raw video frame to the encoder, blocking.
+int32_t VideoSender::AddVideoFrame(const VideoFrame& videoFrame,
+ const VideoContentMetrics* contentMetrics,
+ const CodecSpecificInfo* codecSpecificInfo) {
+ EncoderParameters encoder_params;
+ {
+ rtc::CritScope lock(&params_lock_);
+ encoder_params = encoder_params_;
+ }
+ rtc::CritScope lock(&send_crit_);
+ if (_encoder == nullptr)
+ return VCM_UNINITIALIZED;
+ SetEncoderParameters(encoder_params);
+ // TODO(holmer): Add support for dropping frames per stream. Currently we
+ // only have one frame dropper for all streams.
+ if (_nextFrameTypes[0] == kEmptyFrame) {
+ return VCM_OK;
+ }
+ if (_mediaOpt.DropFrame()) {
+ _encoder->OnDroppedFrame();
+ return VCM_OK;
+ }
+ _mediaOpt.UpdateContentData(contentMetrics);
+ // TODO(pbos): Make sure setting send codec is synchronized with video
+ // processing so frame size always matches.
+ if (!_codecDataBase.MatchesCurrentResolution(videoFrame.width(),
+ videoFrame.height())) {
+ LOG(LS_ERROR) << "Incoming frame doesn't match set resolution. Dropping.";
+ return VCM_PARAMETER_ERROR;
+ }
+ VideoFrame converted_frame = videoFrame;
+ if (converted_frame.native_handle() && !_encoder->SupportsNativeHandle()) {
+ // This module only supports software encoding.
+ // TODO(pbos): Offload conversion from the encoder thread.
+ converted_frame = converted_frame.ConvertNativeToI420Frame();
+ RTC_CHECK(!converted_frame.IsZeroSize())
+ << "Frame conversion failed, won't be able to encode frame.";
+ }
+ int32_t ret =
+ _encoder->Encode(converted_frame, codecSpecificInfo, _nextFrameTypes);
+ if (ret < 0) {
+ LOG(LS_ERROR) << "Failed to encode frame. Error code: " << ret;
+ return ret;
+ }
+ for (size_t i = 0; i < _nextFrameTypes.size(); ++i) {
+ _nextFrameTypes[i] = kVideoFrameDelta; // Default frame type.
+ }
+ if (qm_settings_callback_)
+ qm_settings_callback_->SetTargetFramerate(_encoder->GetTargetFramerate());
+ return VCM_OK;
+}
+
+int32_t VideoSender::IntraFrameRequest(int stream_index) {
+ rtc::CritScope lock(&send_crit_);
+ if (stream_index < 0 ||
+ static_cast<unsigned int>(stream_index) >= _nextFrameTypes.size()) {
+ return -1;
+ }
+ _nextFrameTypes[stream_index] = kVideoFrameKey;
+ if (_encoder != nullptr && _encoder->InternalSource()) {
+ // Try to request the frame if we have an external encoder with
+ // internal source since AddVideoFrame never will be called.
+ if (_encoder->RequestFrame(_nextFrameTypes) == WEBRTC_VIDEO_CODEC_OK) {
+ _nextFrameTypes[stream_index] = kVideoFrameDelta;
+ }
+ }
+ return VCM_OK;
+}
+
+int32_t VideoSender::EnableFrameDropper(bool enable) {
+ rtc::CritScope lock(&send_crit_);
+ frame_dropper_enabled_ = enable;
+ _mediaOpt.EnableFrameDropper(enable);
+ return VCM_OK;
+}
+
+void VideoSender::SuspendBelowMinBitrate() {
+ RTC_DCHECK(main_thread_.CalledOnValidThread());
+ int threshold_bps;
+ if (current_codec_.numberOfSimulcastStreams == 0) {
+ threshold_bps = current_codec_.minBitrate * 1000;
+ } else {
+ threshold_bps = current_codec_.simulcastStream[0].minBitrate * 1000;
+ }
+ // Set the hysteresis window to be at 10% of the threshold, but at least
+ // 10 kbps.
+ int window_bps = std::max(threshold_bps / 10, 10000);
+ _mediaOpt.SuspendBelowMinBitrate(threshold_bps, window_bps);
+}
+
+bool VideoSender::VideoSuspended() const {
+ return _mediaOpt.IsVideoSuspended();
+}
+} // namespace vcm
+} // namespace webrtc
diff --git a/webrtc/modules/video_coding/video_sender_unittest.cc b/webrtc/modules/video_coding/video_sender_unittest.cc
new file mode 100644
index 0000000000..741c7b7a60
--- /dev/null
+++ b/webrtc/modules/video_coding/video_sender_unittest.cc
@@ -0,0 +1,488 @@
+/*
+ * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <vector>
+
+#include "testing/gtest/include/gtest/gtest.h"
+#include "webrtc/base/scoped_ptr.h"
+#include "webrtc/common.h"
+#include "webrtc/modules/video_coding/include/mock/mock_video_codec_interface.h"
+#include "webrtc/modules/video_coding/codecs/vp8/include/vp8.h"
+#include "webrtc/modules/video_coding/codecs/vp8/include/vp8_common_types.h"
+#include "webrtc/modules/video_coding/codecs/vp8/temporal_layers.h"
+#include "webrtc/modules/video_coding/include/mock/mock_vcm_callbacks.h"
+#include "webrtc/modules/video_coding/include/video_coding.h"
+#include "webrtc/modules/video_coding/video_coding_impl.h"
+#include "webrtc/modules/video_coding/test/test_util.h"
+#include "webrtc/system_wrappers/include/clock.h"
+#include "webrtc/test/frame_generator.h"
+#include "webrtc/test/testsupport/fileutils.h"
+
+using ::testing::_;
+using ::testing::AllOf;
+using ::testing::ElementsAre;
+using ::testing::ElementsAreArray;
+using ::testing::Field;
+using ::testing::NiceMock;
+using ::testing::Pointee;
+using ::testing::Return;
+using ::testing::FloatEq;
+using std::vector;
+using webrtc::test::FrameGenerator;
+
+namespace webrtc {
+namespace vcm {
+namespace {
+enum { kMaxNumberOfTemporalLayers = 3 };
+
+struct Vp8StreamInfo {
+ float framerate_fps[kMaxNumberOfTemporalLayers];
+ int bitrate_kbps[kMaxNumberOfTemporalLayers];
+};
+
+MATCHER_P(MatchesVp8StreamInfo, expected, "") {
+ bool res = true;
+ for (int tl = 0; tl < kMaxNumberOfTemporalLayers; ++tl) {
+ if (fabs(expected.framerate_fps[tl] - arg.framerate_fps[tl]) > 0.5) {
+ *result_listener << " framerate_fps[" << tl
+ << "] = " << arg.framerate_fps[tl] << " (expected "
+ << expected.framerate_fps[tl] << ") ";
+ res = false;
+ }
+ if (abs(expected.bitrate_kbps[tl] - arg.bitrate_kbps[tl]) > 10) {
+ *result_listener << " bitrate_kbps[" << tl
+ << "] = " << arg.bitrate_kbps[tl] << " (expected "
+ << expected.bitrate_kbps[tl] << ") ";
+ res = false;
+ }
+ }
+ return res;
+}
+
+class EmptyFrameGenerator : public FrameGenerator {
+ public:
+ EmptyFrameGenerator(int width, int height) : width_(width), height_(height) {}
+ VideoFrame* NextFrame() override {
+ frame_.reset(new VideoFrame());
+ frame_->CreateEmptyFrame(width_, height_, width_, (width_ + 1) / 2,
+ (width_ + 1) / 2);
+ return frame_.get();
+ }
+
+ private:
+ const int width_;
+ const int height_;
+ rtc::scoped_ptr<VideoFrame> frame_;
+};
+
+class PacketizationCallback : public VCMPacketizationCallback {
+ public:
+ explicit PacketizationCallback(Clock* clock)
+ : clock_(clock), start_time_ms_(clock_->TimeInMilliseconds()) {}
+
+ virtual ~PacketizationCallback() {}
+
+ int32_t SendData(uint8_t payload_type,
+ const EncodedImage& encoded_image,
+ const RTPFragmentationHeader& fragmentation_header,
+ const RTPVideoHeader* rtp_video_header) override {
+ assert(rtp_video_header);
+ frame_data_.push_back(FrameData(encoded_image._length, *rtp_video_header));
+ return 0;
+ }
+
+ void Reset() {
+ frame_data_.clear();
+ start_time_ms_ = clock_->TimeInMilliseconds();
+ }
+
+ float FramerateFpsWithinTemporalLayer(int temporal_layer) {
+ return CountFramesWithinTemporalLayer(temporal_layer) *
+ (1000.0 / interval_ms());
+ }
+
+ float BitrateKbpsWithinTemporalLayer(int temporal_layer) {
+ return SumPayloadBytesWithinTemporalLayer(temporal_layer) * 8.0 /
+ interval_ms();
+ }
+
+ Vp8StreamInfo CalculateVp8StreamInfo() {
+ Vp8StreamInfo info;
+ for (int tl = 0; tl < 3; ++tl) {
+ info.framerate_fps[tl] = FramerateFpsWithinTemporalLayer(tl);
+ info.bitrate_kbps[tl] = BitrateKbpsWithinTemporalLayer(tl);
+ }
+ return info;
+ }
+
+ private:
+ struct FrameData {
+ FrameData() {}
+
+ FrameData(size_t payload_size, const RTPVideoHeader& rtp_video_header)
+ : payload_size(payload_size), rtp_video_header(rtp_video_header) {}
+
+ size_t payload_size;
+ RTPVideoHeader rtp_video_header;
+ };
+
+ int64_t interval_ms() {
+ int64_t diff = (clock_->TimeInMilliseconds() - start_time_ms_);
+ EXPECT_GT(diff, 0);
+ return diff;
+ }
+
+ int CountFramesWithinTemporalLayer(int temporal_layer) {
+ int frames = 0;
+ for (size_t i = 0; i < frame_data_.size(); ++i) {
+ EXPECT_EQ(kRtpVideoVp8, frame_data_[i].rtp_video_header.codec);
+ const uint8_t temporal_idx =
+ frame_data_[i].rtp_video_header.codecHeader.VP8.temporalIdx;
+ if (temporal_idx <= temporal_layer || temporal_idx == kNoTemporalIdx)
+ frames++;
+ }
+ return frames;
+ }
+
+ size_t SumPayloadBytesWithinTemporalLayer(int temporal_layer) {
+ size_t payload_size = 0;
+ for (size_t i = 0; i < frame_data_.size(); ++i) {
+ EXPECT_EQ(kRtpVideoVp8, frame_data_[i].rtp_video_header.codec);
+ const uint8_t temporal_idx =
+ frame_data_[i].rtp_video_header.codecHeader.VP8.temporalIdx;
+ if (temporal_idx <= temporal_layer || temporal_idx == kNoTemporalIdx)
+ payload_size += frame_data_[i].payload_size;
+ }
+ return payload_size;
+ }
+
+ Clock* clock_;
+ int64_t start_time_ms_;
+ vector<FrameData> frame_data_;
+};
+
+class TestVideoSender : public ::testing::Test {
+ protected:
+ // Note: simulated clock starts at 1 seconds, since parts of webrtc use 0 as
+ // a special case (e.g. frame rate in media optimization).
+ TestVideoSender() : clock_(1000), packetization_callback_(&clock_) {}
+
+ void SetUp() override {
+ sender_.reset(
+ new VideoSender(&clock_, &post_encode_callback_, nullptr, nullptr));
+ EXPECT_EQ(0, sender_->RegisterTransportCallback(&packetization_callback_));
+ }
+
+ void AddFrame() {
+ assert(generator_.get());
+ sender_->AddVideoFrame(*generator_->NextFrame(), NULL, NULL);
+ }
+
+ SimulatedClock clock_;
+ PacketizationCallback packetization_callback_;
+ MockEncodedImageCallback post_encode_callback_;
+ // Used by subclassing tests, need to outlive sender_.
+ rtc::scoped_ptr<VideoEncoder> encoder_;
+ rtc::scoped_ptr<VideoSender> sender_;
+ rtc::scoped_ptr<FrameGenerator> generator_;
+};
+
+class TestVideoSenderWithMockEncoder : public TestVideoSender {
+ protected:
+ static const int kDefaultWidth = 1280;
+ static const int kDefaultHeight = 720;
+ static const int kNumberOfStreams = 3;
+ static const int kNumberOfLayers = 3;
+ static const int kUnusedPayloadType = 10;
+
+ void SetUp() override {
+ TestVideoSender::SetUp();
+ sender_->RegisterExternalEncoder(&encoder_, kUnusedPayloadType, false);
+ VideoCodingModule::Codec(kVideoCodecVP8, &settings_);
+ settings_.numberOfSimulcastStreams = kNumberOfStreams;
+ ConfigureStream(kDefaultWidth / 4, kDefaultHeight / 4, 100,
+ &settings_.simulcastStream[0]);
+ ConfigureStream(kDefaultWidth / 2, kDefaultHeight / 2, 500,
+ &settings_.simulcastStream[1]);
+ ConfigureStream(kDefaultWidth, kDefaultHeight, 1200,
+ &settings_.simulcastStream[2]);
+ settings_.plType = kUnusedPayloadType; // Use the mocked encoder.
+ generator_.reset(
+ new EmptyFrameGenerator(settings_.width, settings_.height));
+ EXPECT_EQ(0, sender_->RegisterSendCodec(&settings_, 1, 1200));
+ }
+
+ void TearDown() override { sender_.reset(); }
+
+ void ExpectIntraRequest(int stream) {
+ if (stream == -1) {
+ // No intra request expected.
+ EXPECT_CALL(
+ encoder_,
+ Encode(_, _, Pointee(ElementsAre(kVideoFrameDelta, kVideoFrameDelta,
+ kVideoFrameDelta))))
+ .Times(1)
+ .WillRepeatedly(Return(0));
+ return;
+ }
+ assert(stream >= 0);
+ assert(stream < kNumberOfStreams);
+ std::vector<FrameType> frame_types(kNumberOfStreams, kVideoFrameDelta);
+ frame_types[stream] = kVideoFrameKey;
+ EXPECT_CALL(encoder_,
+ Encode(_, _, Pointee(ElementsAreArray(&frame_types[0],
+ frame_types.size()))))
+ .Times(1)
+ .WillRepeatedly(Return(0));
+ }
+
+ static void ConfigureStream(int width,
+ int height,
+ int max_bitrate,
+ SimulcastStream* stream) {
+ assert(stream);
+ stream->width = width;
+ stream->height = height;
+ stream->maxBitrate = max_bitrate;
+ stream->numberOfTemporalLayers = kNumberOfLayers;
+ stream->qpMax = 45;
+ }
+
+ VideoCodec settings_;
+ NiceMock<MockVideoEncoder> encoder_;
+};
+
+TEST_F(TestVideoSenderWithMockEncoder, TestIntraRequests) {
+ EXPECT_EQ(0, sender_->IntraFrameRequest(0));
+ ExpectIntraRequest(0);
+ AddFrame();
+ ExpectIntraRequest(-1);
+ AddFrame();
+
+ EXPECT_EQ(0, sender_->IntraFrameRequest(1));
+ ExpectIntraRequest(1);
+ AddFrame();
+ ExpectIntraRequest(-1);
+ AddFrame();
+
+ EXPECT_EQ(0, sender_->IntraFrameRequest(2));
+ ExpectIntraRequest(2);
+ AddFrame();
+ ExpectIntraRequest(-1);
+ AddFrame();
+
+ EXPECT_EQ(-1, sender_->IntraFrameRequest(3));
+ ExpectIntraRequest(-1);
+ AddFrame();
+
+ EXPECT_EQ(-1, sender_->IntraFrameRequest(-1));
+ ExpectIntraRequest(-1);
+ AddFrame();
+}
+
+TEST_F(TestVideoSenderWithMockEncoder, TestIntraRequestsInternalCapture) {
+ // De-register current external encoder.
+ sender_->RegisterExternalEncoder(nullptr, kUnusedPayloadType, false);
+ // Register encoder with internal capture.
+ sender_->RegisterExternalEncoder(&encoder_, kUnusedPayloadType, true);
+ EXPECT_EQ(0, sender_->RegisterSendCodec(&settings_, 1, 1200));
+ ExpectIntraRequest(0);
+ EXPECT_EQ(0, sender_->IntraFrameRequest(0));
+ ExpectIntraRequest(1);
+ EXPECT_EQ(0, sender_->IntraFrameRequest(1));
+ ExpectIntraRequest(2);
+ EXPECT_EQ(0, sender_->IntraFrameRequest(2));
+ // No requests expected since these indices are out of bounds.
+ EXPECT_EQ(-1, sender_->IntraFrameRequest(3));
+ EXPECT_EQ(-1, sender_->IntraFrameRequest(-1));
+}
+
+TEST_F(TestVideoSenderWithMockEncoder, EncoderFramerateUpdatedViaProcess) {
+ sender_->SetChannelParameters(settings_.startBitrate * 1000, 0, 200);
+ const int64_t kRateStatsWindowMs = 2000;
+ const uint32_t kInputFps = 20;
+ int64_t start_time = clock_.TimeInMilliseconds();
+ while (clock_.TimeInMilliseconds() < start_time + kRateStatsWindowMs) {
+ AddFrame();
+ clock_.AdvanceTimeMilliseconds(1000 / kInputFps);
+ }
+ EXPECT_CALL(encoder_, SetRates(_, kInputFps)).Times(1).WillOnce(Return(0));
+ sender_->Process();
+ AddFrame();
+}
+
+TEST_F(TestVideoSenderWithMockEncoder,
+ NoRedundantSetChannelParameterOrSetRatesCalls) {
+ const uint8_t kLossRate = 4;
+ const uint8_t kRtt = 200;
+ const int64_t kRateStatsWindowMs = 2000;
+ const uint32_t kInputFps = 20;
+ int64_t start_time = clock_.TimeInMilliseconds();
+ // Expect initial call to SetChannelParameters. Rates are initialized through
+ // InitEncode and expects no additional call before the framerate (or bitrate)
+ // updates.
+ EXPECT_CALL(encoder_, SetChannelParameters(kLossRate, kRtt))
+ .Times(1)
+ .WillOnce(Return(0));
+ sender_->SetChannelParameters(settings_.startBitrate * 1000, kLossRate, kRtt);
+ while (clock_.TimeInMilliseconds() < start_time + kRateStatsWindowMs) {
+ AddFrame();
+ clock_.AdvanceTimeMilliseconds(1000 / kInputFps);
+ }
+ // After process, input framerate should be updated but not ChannelParameters
+ // as they are the same as before.
+ EXPECT_CALL(encoder_, SetRates(_, kInputFps)).Times(1).WillOnce(Return(0));
+ sender_->Process();
+ AddFrame();
+ // Call to SetChannelParameters with changed bitrate should call encoder
+ // SetRates but not encoder SetChannelParameters (that are unchanged).
+ EXPECT_CALL(encoder_, SetRates(2 * settings_.startBitrate, kInputFps))
+ .Times(1)
+ .WillOnce(Return(0));
+ sender_->SetChannelParameters(2 * settings_.startBitrate * 1000, kLossRate,
+ kRtt);
+ AddFrame();
+}
+
+class TestVideoSenderWithVp8 : public TestVideoSender {
+ public:
+ TestVideoSenderWithVp8()
+ : codec_bitrate_kbps_(300), available_bitrate_kbps_(1000) {}
+
+ void SetUp() override {
+ TestVideoSender::SetUp();
+
+ const char* input_video = "foreman_cif";
+ const int width = 352;
+ const int height = 288;
+ generator_.reset(FrameGenerator::CreateFromYuvFile(
+ std::vector<std::string>(1, test::ResourcePath(input_video, "yuv")),
+ width, height, 1));
+
+ codec_ = MakeVp8VideoCodec(width, height, 3);
+ codec_.minBitrate = 10;
+ codec_.startBitrate = codec_bitrate_kbps_;
+ codec_.maxBitrate = codec_bitrate_kbps_;
+ encoder_.reset(VP8Encoder::Create());
+ sender_->RegisterExternalEncoder(encoder_.get(), codec_.plType, false);
+ EXPECT_EQ(0, sender_->RegisterSendCodec(&codec_, 1, 1200));
+ }
+
+ static VideoCodec MakeVp8VideoCodec(int width,
+ int height,
+ int temporal_layers) {
+ VideoCodec codec;
+ VideoCodingModule::Codec(kVideoCodecVP8, &codec);
+ codec.width = width;
+ codec.height = height;
+ codec.codecSpecific.VP8.numberOfTemporalLayers = temporal_layers;
+ return codec;
+ }
+
+ void InsertFrames(float framerate, float seconds) {
+ for (int i = 0; i < seconds * framerate; ++i) {
+ clock_.AdvanceTimeMilliseconds(1000.0f / framerate);
+ EXPECT_CALL(post_encode_callback_, Encoded(_, NULL, NULL))
+ .WillOnce(Return(0));
+ AddFrame();
+ // SetChannelParameters needs to be called frequently to propagate
+ // framerate from the media optimization into the encoder.
+ // Note: SetChannelParameters fails if less than 2 frames are in the
+ // buffer since it will fail to calculate the framerate.
+ if (i != 0) {
+ EXPECT_EQ(VCM_OK, sender_->SetChannelParameters(
+ available_bitrate_kbps_ * 1000, 0, 200));
+ }
+ }
+ }
+
+ Vp8StreamInfo SimulateWithFramerate(float framerate) {
+ const float short_simulation_interval = 5.0;
+ const float long_simulation_interval = 10.0;
+ // It appears that this 5 seconds simulation is needed to allow
+ // bitrate and framerate to stabilize.
+ InsertFrames(framerate, short_simulation_interval);
+ packetization_callback_.Reset();
+
+ InsertFrames(framerate, long_simulation_interval);
+ return packetization_callback_.CalculateVp8StreamInfo();
+ }
+
+ protected:
+ VideoCodec codec_;
+ int codec_bitrate_kbps_;
+ int available_bitrate_kbps_;
+};
+
+#if defined(WEBRTC_ANDROID) || defined(WEBRTC_IOS)
+#define MAYBE_FixedTemporalLayersStrategy DISABLED_FixedTemporalLayersStrategy
+#else
+#define MAYBE_FixedTemporalLayersStrategy FixedTemporalLayersStrategy
+#endif
+TEST_F(TestVideoSenderWithVp8, MAYBE_FixedTemporalLayersStrategy) {
+ const int low_b = codec_bitrate_kbps_ * kVp8LayerRateAlloction[2][0];
+ const int mid_b = codec_bitrate_kbps_ * kVp8LayerRateAlloction[2][1];
+ const int high_b = codec_bitrate_kbps_ * kVp8LayerRateAlloction[2][2];
+ {
+ Vp8StreamInfo expected = {{7.5, 15.0, 30.0}, {low_b, mid_b, high_b}};
+ EXPECT_THAT(SimulateWithFramerate(30.0), MatchesVp8StreamInfo(expected));
+ }
+ {
+ Vp8StreamInfo expected = {{3.75, 7.5, 15.0}, {low_b, mid_b, high_b}};
+ EXPECT_THAT(SimulateWithFramerate(15.0), MatchesVp8StreamInfo(expected));
+ }
+}
+
+#if defined(WEBRTC_ANDROID) || defined(WEBRTC_IOS)
+#define MAYBE_RealTimeTemporalLayersStrategy \
+ DISABLED_RealTimeTemporalLayersStrategy
+#else
+#define MAYBE_RealTimeTemporalLayersStrategy RealTimeTemporalLayersStrategy
+#endif
+TEST_F(TestVideoSenderWithVp8, MAYBE_RealTimeTemporalLayersStrategy) {
+ Config extra_options;
+ extra_options.Set<TemporalLayers::Factory>(
+ new RealTimeTemporalLayersFactory());
+ VideoCodec codec = MakeVp8VideoCodec(352, 288, 3);
+ codec.extra_options = &extra_options;
+ codec.minBitrate = 10;
+ codec.startBitrate = codec_bitrate_kbps_;
+ codec.maxBitrate = codec_bitrate_kbps_;
+ EXPECT_EQ(0, sender_->RegisterSendCodec(&codec, 1, 1200));
+
+ const int low_b = codec_bitrate_kbps_ * 0.4;
+ const int mid_b = codec_bitrate_kbps_ * 0.6;
+ const int high_b = codec_bitrate_kbps_;
+
+ {
+ Vp8StreamInfo expected = {{7.5, 15.0, 30.0}, {low_b, mid_b, high_b}};
+ EXPECT_THAT(SimulateWithFramerate(30.0), MatchesVp8StreamInfo(expected));
+ }
+ {
+ Vp8StreamInfo expected = {{5.0, 10.0, 20.0}, {low_b, mid_b, high_b}};
+ EXPECT_THAT(SimulateWithFramerate(20.0), MatchesVp8StreamInfo(expected));
+ }
+ {
+ Vp8StreamInfo expected = {{7.5, 15.0, 15.0}, {mid_b, high_b, high_b}};
+ EXPECT_THAT(SimulateWithFramerate(15.0), MatchesVp8StreamInfo(expected));
+ }
+ {
+ Vp8StreamInfo expected = {{5.0, 10.0, 10.0}, {mid_b, high_b, high_b}};
+ EXPECT_THAT(SimulateWithFramerate(10.0), MatchesVp8StreamInfo(expected));
+ }
+ {
+ // TODO(andresp): Find out why this fails with framerate = 7.5
+ Vp8StreamInfo expected = {{7.0, 7.0, 7.0}, {high_b, high_b, high_b}};
+ EXPECT_THAT(SimulateWithFramerate(7.0), MatchesVp8StreamInfo(expected));
+ }
+}
+} // namespace
+} // namespace vcm
+} // namespace webrtc
diff --git a/webrtc/modules/video_processing/BUILD.gn b/webrtc/modules/video_processing/BUILD.gn
index 00d2911eef..6d411edda1 100644
--- a/webrtc/modules/video_processing/BUILD.gn
+++ b/webrtc/modules/video_processing/BUILD.gn
@@ -6,30 +6,37 @@
# in the file PATENTS. All contributing project authors may
# be found in the AUTHORS file in the root of the source tree.
+import("//build/config/arm.gni")
import("../../build/webrtc.gni")
build_video_processing_sse2 = current_cpu == "x86" || current_cpu == "x64"
source_set("video_processing") {
sources = [
- "main/interface/video_processing.h",
- "main/interface/video_processing_defines.h",
- "main/source/brighten.cc",
- "main/source/brighten.h",
- "main/source/brightness_detection.cc",
- "main/source/brightness_detection.h",
- "main/source/content_analysis.cc",
- "main/source/content_analysis.h",
- "main/source/deflickering.cc",
- "main/source/deflickering.h",
- "main/source/frame_preprocessor.cc",
- "main/source/frame_preprocessor.h",
- "main/source/spatial_resampler.cc",
- "main/source/spatial_resampler.h",
- "main/source/video_decimator.cc",
- "main/source/video_decimator.h",
- "main/source/video_processing_impl.cc",
- "main/source/video_processing_impl.h",
+ "brightness_detection.cc",
+ "brightness_detection.h",
+ "content_analysis.cc",
+ "content_analysis.h",
+ "deflickering.cc",
+ "deflickering.h",
+ "frame_preprocessor.cc",
+ "frame_preprocessor.h",
+ "include/video_processing.h",
+ "include/video_processing_defines.h",
+ "spatial_resampler.cc",
+ "spatial_resampler.h",
+ "util/denoiser_filter.cc",
+ "util/denoiser_filter.h",
+ "util/denoiser_filter_c.cc",
+ "util/denoiser_filter_c.h",
+ "util/skin_detection.cc",
+ "util/skin_detection.h",
+ "video_decimator.cc",
+ "video_decimator.h",
+ "video_denoiser.cc",
+ "video_denoiser.h",
+ "video_processing_impl.cc",
+ "video_processing_impl.h",
]
deps = [
@@ -41,6 +48,9 @@ source_set("video_processing") {
if (build_video_processing_sse2) {
deps += [ ":video_processing_sse2" ]
}
+ if (rtc_build_with_neon) {
+ deps += [ ":video_processing_neon" ]
+ }
configs += [ "../..:common_config" ]
public_configs = [ "../..:common_inherited_config" ]
@@ -55,7 +65,9 @@ source_set("video_processing") {
if (build_video_processing_sse2) {
source_set("video_processing_sse2") {
sources = [
- "main/source/content_analysis_sse2.cc",
+ "content_analysis_sse2.cc",
+ "util/denoiser_filter_sse2.cc",
+ "util/denoiser_filter_sse2.h",
]
configs += [ "../..:common_config" ]
@@ -72,3 +84,18 @@ if (build_video_processing_sse2) {
}
}
}
+
+if (rtc_build_with_neon) {
+ source_set("video_processing_neon") {
+ sources = [
+ "util/denoiser_filter_neon.cc",
+ "util/denoiser_filter_neon.h",
+ ]
+ if (current_cpu != "arm64") {
+ configs -= [ "//build/config/compiler:compiler_arm_fpu" ]
+ cflags = [ "-mfpu=neon" ]
+ }
+ configs += [ "../..:common_config" ]
+ public_configs = [ "../..:common_inherited_config" ]
+ }
+}
diff --git a/webrtc/modules/video_processing/OWNERS b/webrtc/modules/video_processing/OWNERS
index f452c9ed83..389d632dfd 100644
--- a/webrtc/modules/video_processing/OWNERS
+++ b/webrtc/modules/video_processing/OWNERS
@@ -1,4 +1,9 @@
stefan@webrtc.org
marpan@webrtc.org
+# These are for the common case of adding or renaming files. If you're doing
+# structural changes, please get a review from a reviewer in this file.
+per-file *.gyp=*
+per-file *.gypi=*
+
per-file BUILD.gn=kjellander@webrtc.org
diff --git a/webrtc/modules/video_processing/brightness_detection.cc b/webrtc/modules/video_processing/brightness_detection.cc
new file mode 100644
index 0000000000..7455cf9759
--- /dev/null
+++ b/webrtc/modules/video_processing/brightness_detection.cc
@@ -0,0 +1,136 @@
+/*
+ * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/modules/video_processing/brightness_detection.h"
+
+#include <math.h>
+
+#include "webrtc/modules/video_processing/include/video_processing.h"
+
+namespace webrtc {
+
+VPMBrightnessDetection::VPMBrightnessDetection() {
+ Reset();
+}
+
+VPMBrightnessDetection::~VPMBrightnessDetection() {}
+
+void VPMBrightnessDetection::Reset() {
+ frame_cnt_bright_ = 0;
+ frame_cnt_dark_ = 0;
+}
+
+int32_t VPMBrightnessDetection::ProcessFrame(
+ const VideoFrame& frame,
+ const VideoProcessing::FrameStats& stats) {
+ if (frame.IsZeroSize()) {
+ return VPM_PARAMETER_ERROR;
+ }
+ int width = frame.width();
+ int height = frame.height();
+
+ if (!VideoProcessing::ValidFrameStats(stats)) {
+ return VPM_PARAMETER_ERROR;
+ }
+
+ const uint8_t frame_cnt_alarm = 2;
+
+ // Get proportion in lowest bins.
+ uint8_t low_th = 20;
+ float prop_low = 0;
+ for (uint32_t i = 0; i < low_th; i++) {
+ prop_low += stats.hist[i];
+ }
+ prop_low /= stats.num_pixels;
+
+ // Get proportion in highest bins.
+ unsigned char high_th = 230;
+ float prop_high = 0;
+ for (uint32_t i = high_th; i < 256; i++) {
+ prop_high += stats.hist[i];
+ }
+ prop_high /= stats.num_pixels;
+
+ if (prop_high < 0.4) {
+ if (stats.mean < 90 || stats.mean > 170) {
+ // Standard deviation of Y
+ const uint8_t* buffer = frame.buffer(kYPlane);
+ float std_y = 0;
+ for (int h = 0; h < height; h += (1 << stats.sub_sampling_factor)) {
+ int row = h * width;
+ for (int w = 0; w < width; w += (1 << stats.sub_sampling_factor)) {
+ std_y +=
+ (buffer[w + row] - stats.mean) * (buffer[w + row] - stats.mean);
+ }
+ }
+ std_y = sqrt(std_y / stats.num_pixels);
+
+ // Get percentiles.
+ uint32_t sum = 0;
+ uint32_t median_y = 140;
+ uint32_t perc05 = 0;
+ uint32_t perc95 = 255;
+ float pos_perc05 = stats.num_pixels * 0.05f;
+ float pos_median = stats.num_pixels * 0.5f;
+ float posPerc95 = stats.num_pixels * 0.95f;
+ for (uint32_t i = 0; i < 256; i++) {
+ sum += stats.hist[i];
+ if (sum < pos_perc05)
+ perc05 = i; // 5th perc.
+ if (sum < pos_median)
+ median_y = i; // 50th perc.
+ if (sum < posPerc95)
+ perc95 = i; // 95th perc.
+ else
+ break;
+ }
+
+ // Check if image is too dark
+ if ((std_y < 55) && (perc05 < 50)) {
+ if (median_y < 60 || stats.mean < 80 || perc95 < 130 ||
+ prop_low > 0.20) {
+ frame_cnt_dark_++;
+ } else {
+ frame_cnt_dark_ = 0;
+ }
+ } else {
+ frame_cnt_dark_ = 0;
+ }
+
+ // Check if image is too bright
+ if ((std_y < 52) && (perc95 > 200) && (median_y > 160)) {
+ if (median_y > 185 || stats.mean > 185 || perc05 > 140 ||
+ prop_high > 0.25) {
+ frame_cnt_bright_++;
+ } else {
+ frame_cnt_bright_ = 0;
+ }
+ } else {
+ frame_cnt_bright_ = 0;
+ }
+ } else {
+ frame_cnt_dark_ = 0;
+ frame_cnt_bright_ = 0;
+ }
+ } else {
+ frame_cnt_bright_++;
+ frame_cnt_dark_ = 0;
+ }
+
+ if (frame_cnt_dark_ > frame_cnt_alarm) {
+ return VideoProcessing::kDarkWarning;
+ } else if (frame_cnt_bright_ > frame_cnt_alarm) {
+ return VideoProcessing::kBrightWarning;
+ } else {
+ return VideoProcessing::kNoWarning;
+ }
+}
+
+} // namespace webrtc
diff --git a/webrtc/modules/video_processing/brightness_detection.h b/webrtc/modules/video_processing/brightness_detection.h
new file mode 100644
index 0000000000..78a7ac5e0b
--- /dev/null
+++ b/webrtc/modules/video_processing/brightness_detection.h
@@ -0,0 +1,35 @@
+/*
+ * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_PROCESSING_BRIGHTNESS_DETECTION_H_
+#define WEBRTC_MODULES_VIDEO_PROCESSING_BRIGHTNESS_DETECTION_H_
+
+#include "webrtc/modules/video_processing/include/video_processing.h"
+#include "webrtc/typedefs.h"
+
+namespace webrtc {
+
+class VPMBrightnessDetection {
+ public:
+ VPMBrightnessDetection();
+ ~VPMBrightnessDetection();
+
+ void Reset();
+ int32_t ProcessFrame(const VideoFrame& frame,
+ const VideoProcessing::FrameStats& stats);
+
+ private:
+ uint32_t frame_cnt_bright_;
+ uint32_t frame_cnt_dark_;
+};
+
+} // namespace webrtc
+
+#endif // WEBRTC_MODULES_VIDEO_PROCESSING_BRIGHTNESS_DETECTION_H_
diff --git a/webrtc/modules/video_processing/content_analysis.cc b/webrtc/modules/video_processing/content_analysis.cc
new file mode 100644
index 0000000000..54c04da466
--- /dev/null
+++ b/webrtc/modules/video_processing/content_analysis.cc
@@ -0,0 +1,281 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#include "webrtc/modules/video_processing/content_analysis.h"
+
+#include <math.h>
+#include <stdlib.h>
+
+#include "webrtc/system_wrappers/include/cpu_features_wrapper.h"
+#include "webrtc/system_wrappers/include/tick_util.h"
+
+namespace webrtc {
+
+VPMContentAnalysis::VPMContentAnalysis(bool runtime_cpu_detection)
+ : orig_frame_(NULL),
+ prev_frame_(NULL),
+ width_(0),
+ height_(0),
+ skip_num_(1),
+ border_(8),
+ motion_magnitude_(0.0f),
+ spatial_pred_err_(0.0f),
+ spatial_pred_err_h_(0.0f),
+ spatial_pred_err_v_(0.0f),
+ first_frame_(true),
+ ca_Init_(false),
+ content_metrics_(NULL) {
+ ComputeSpatialMetrics = &VPMContentAnalysis::ComputeSpatialMetrics_C;
+ TemporalDiffMetric = &VPMContentAnalysis::TemporalDiffMetric_C;
+
+ if (runtime_cpu_detection) {
+#if defined(WEBRTC_ARCH_X86_FAMILY)
+ if (WebRtc_GetCPUInfo(kSSE2)) {
+ ComputeSpatialMetrics = &VPMContentAnalysis::ComputeSpatialMetrics_SSE2;
+ TemporalDiffMetric = &VPMContentAnalysis::TemporalDiffMetric_SSE2;
+ }
+#endif
+ }
+ Release();
+}
+
+VPMContentAnalysis::~VPMContentAnalysis() {
+ Release();
+}
+
+VideoContentMetrics* VPMContentAnalysis::ComputeContentMetrics(
+ const VideoFrame& inputFrame) {
+ if (inputFrame.IsZeroSize())
+ return NULL;
+
+ // Init if needed (native dimension change).
+ if (width_ != inputFrame.width() || height_ != inputFrame.height()) {
+ if (VPM_OK != Initialize(inputFrame.width(), inputFrame.height()))
+ return NULL;
+ }
+ // Only interested in the Y plane.
+ orig_frame_ = inputFrame.buffer(kYPlane);
+
+ // Compute spatial metrics: 3 spatial prediction errors.
+ (this->*ComputeSpatialMetrics)();
+
+ // Compute motion metrics
+ if (first_frame_ == false)
+ ComputeMotionMetrics();
+
+ // Saving current frame as previous one: Y only.
+ memcpy(prev_frame_, orig_frame_, width_ * height_);
+
+ first_frame_ = false;
+ ca_Init_ = true;
+
+ return ContentMetrics();
+}
+
+int32_t VPMContentAnalysis::Release() {
+ if (content_metrics_ != NULL) {
+ delete content_metrics_;
+ content_metrics_ = NULL;
+ }
+
+ if (prev_frame_ != NULL) {
+ delete[] prev_frame_;
+ prev_frame_ = NULL;
+ }
+
+ width_ = 0;
+ height_ = 0;
+ first_frame_ = true;
+
+ return VPM_OK;
+}
+
+int32_t VPMContentAnalysis::Initialize(int width, int height) {
+ width_ = width;
+ height_ = height;
+ first_frame_ = true;
+
+ // skip parameter: # of skipped rows: for complexity reduction
+ // temporal also currently uses it for column reduction.
+ skip_num_ = 1;
+
+ // use skipNum = 2 for 4CIF, WHD
+ if ((height_ >= 576) && (width_ >= 704)) {
+ skip_num_ = 2;
+ }
+ // use skipNum = 4 for FULLL_HD images
+ if ((height_ >= 1080) && (width_ >= 1920)) {
+ skip_num_ = 4;
+ }
+
+ if (content_metrics_ != NULL) {
+ delete content_metrics_;
+ }
+
+ if (prev_frame_ != NULL) {
+ delete[] prev_frame_;
+ }
+
+ // Spatial Metrics don't work on a border of 8. Minimum processing
+ // block size is 16 pixels. So make sure the width and height support this.
+ if (width_ <= 32 || height_ <= 32) {
+ ca_Init_ = false;
+ return VPM_PARAMETER_ERROR;
+ }
+
+ content_metrics_ = new VideoContentMetrics();
+ if (content_metrics_ == NULL) {
+ return VPM_MEMORY;
+ }
+
+ prev_frame_ = new uint8_t[width_ * height_]; // Y only.
+ if (prev_frame_ == NULL)
+ return VPM_MEMORY;
+
+ return VPM_OK;
+}
+
+// Compute motion metrics: magnitude over non-zero motion vectors,
+// and size of zero cluster
+int32_t VPMContentAnalysis::ComputeMotionMetrics() {
+ // Motion metrics: only one is derived from normalized
+ // (MAD) temporal difference
+ (this->*TemporalDiffMetric)();
+ return VPM_OK;
+}
+
+// Normalized temporal difference (MAD): used as a motion level metric
+// Normalize MAD by spatial contrast: images with more contrast
+// (pixel variance) likely have larger temporal difference
+// To reduce complexity, we compute the metric for a reduced set of points.
+int32_t VPMContentAnalysis::TemporalDiffMetric_C() {
+ // size of original frame
+ int sizei = height_;
+ int sizej = width_;
+ uint32_t tempDiffSum = 0;
+ uint32_t pixelSum = 0;
+ uint64_t pixelSqSum = 0;
+
+ uint32_t num_pixels = 0; // Counter for # of pixels.
+ const int width_end = ((width_ - 2 * border_) & -16) + border_;
+
+ for (int i = border_; i < sizei - border_; i += skip_num_) {
+ for (int j = border_; j < width_end; j++) {
+ num_pixels += 1;
+ int ssn = i * sizej + j;
+
+ uint8_t currPixel = orig_frame_[ssn];
+ uint8_t prevPixel = prev_frame_[ssn];
+
+ tempDiffSum +=
+ static_cast<uint32_t>(abs((int16_t)(currPixel - prevPixel)));
+ pixelSum += static_cast<uint32_t>(currPixel);
+ pixelSqSum += static_cast<uint64_t>(currPixel * currPixel);
+ }
+ }
+
+ // Default.
+ motion_magnitude_ = 0.0f;
+
+ if (tempDiffSum == 0)
+ return VPM_OK;
+
+ // Normalize over all pixels.
+ float const tempDiffAvg =
+ static_cast<float>(tempDiffSum) / static_cast<float>(num_pixels);
+ float const pixelSumAvg =
+ static_cast<float>(pixelSum) / static_cast<float>(num_pixels);
+ float const pixelSqSumAvg =
+ static_cast<float>(pixelSqSum) / static_cast<float>(num_pixels);
+ float contrast = pixelSqSumAvg - (pixelSumAvg * pixelSumAvg);
+
+ if (contrast > 0.0) {
+ contrast = sqrt(contrast);
+ motion_magnitude_ = tempDiffAvg / contrast;
+ }
+ return VPM_OK;
+}
+
+// Compute spatial metrics:
+// To reduce complexity, we compute the metric for a reduced set of points.
+// The spatial metrics are rough estimates of the prediction error cost for
+// each QM spatial mode: 2x2,1x2,2x1
+// The metrics are a simple estimate of the up-sampling prediction error,
+// estimated assuming sub-sampling for decimation (no filtering),
+// and up-sampling back up with simple bilinear interpolation.
+int32_t VPMContentAnalysis::ComputeSpatialMetrics_C() {
+ const int sizei = height_;
+ const int sizej = width_;
+
+ // Pixel mean square average: used to normalize the spatial metrics.
+ uint32_t pixelMSA = 0;
+
+ uint32_t spatialErrSum = 0;
+ uint32_t spatialErrVSum = 0;
+ uint32_t spatialErrHSum = 0;
+
+ // make sure work section is a multiple of 16
+ const int width_end = ((sizej - 2 * border_) & -16) + border_;
+
+ for (int i = border_; i < sizei - border_; i += skip_num_) {
+ for (int j = border_; j < width_end; j++) {
+ int ssn1 = i * sizej + j;
+ int ssn2 = (i + 1) * sizej + j; // bottom
+ int ssn3 = (i - 1) * sizej + j; // top
+ int ssn4 = i * sizej + j + 1; // right
+ int ssn5 = i * sizej + j - 1; // left
+
+ uint16_t refPixel1 = orig_frame_[ssn1] << 1;
+ uint16_t refPixel2 = orig_frame_[ssn1] << 2;
+
+ uint8_t bottPixel = orig_frame_[ssn2];
+ uint8_t topPixel = orig_frame_[ssn3];
+ uint8_t rightPixel = orig_frame_[ssn4];
+ uint8_t leftPixel = orig_frame_[ssn5];
+
+ spatialErrSum += static_cast<uint32_t>(abs(static_cast<int16_t>(
+ refPixel2 - static_cast<uint16_t>(bottPixel + topPixel + leftPixel +
+ rightPixel))));
+ spatialErrVSum += static_cast<uint32_t>(abs(static_cast<int16_t>(
+ refPixel1 - static_cast<uint16_t>(bottPixel + topPixel))));
+ spatialErrHSum += static_cast<uint32_t>(abs(static_cast<int16_t>(
+ refPixel1 - static_cast<uint16_t>(leftPixel + rightPixel))));
+ pixelMSA += orig_frame_[ssn1];
+ }
+ }
+
+ // Normalize over all pixels.
+ const float spatialErr = static_cast<float>(spatialErrSum >> 2);
+ const float spatialErrH = static_cast<float>(spatialErrHSum >> 1);
+ const float spatialErrV = static_cast<float>(spatialErrVSum >> 1);
+ const float norm = static_cast<float>(pixelMSA);
+
+ // 2X2:
+ spatial_pred_err_ = spatialErr / norm;
+ // 1X2:
+ spatial_pred_err_h_ = spatialErrH / norm;
+ // 2X1:
+ spatial_pred_err_v_ = spatialErrV / norm;
+ return VPM_OK;
+}
+
+VideoContentMetrics* VPMContentAnalysis::ContentMetrics() {
+ if (ca_Init_ == false)
+ return NULL;
+
+ content_metrics_->spatial_pred_err = spatial_pred_err_;
+ content_metrics_->spatial_pred_err_h = spatial_pred_err_h_;
+ content_metrics_->spatial_pred_err_v = spatial_pred_err_v_;
+ // Motion metric: normalized temporal difference (MAD).
+ content_metrics_->motion_magnitude = motion_magnitude_;
+
+ return content_metrics_;
+}
+
+} // namespace webrtc
diff --git a/webrtc/modules/video_processing/content_analysis.h b/webrtc/modules/video_processing/content_analysis.h
new file mode 100644
index 0000000000..d3a11bd091
--- /dev/null
+++ b/webrtc/modules/video_processing/content_analysis.h
@@ -0,0 +1,87 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_PROCESSING_CONTENT_ANALYSIS_H_
+#define WEBRTC_MODULES_VIDEO_PROCESSING_CONTENT_ANALYSIS_H_
+
+#include "webrtc/modules/include/module_common_types.h"
+#include "webrtc/modules/video_processing/include/video_processing_defines.h"
+#include "webrtc/typedefs.h"
+#include "webrtc/video_frame.h"
+
+namespace webrtc {
+
+class VPMContentAnalysis {
+ public:
+ // When |runtime_cpu_detection| is true, runtime selection of an optimized
+ // code path is allowed.
+ explicit VPMContentAnalysis(bool runtime_cpu_detection);
+ ~VPMContentAnalysis();
+
+ // Initialize ContentAnalysis - should be called prior to
+ // extractContentFeature
+ // Inputs: width, height
+ // Return value: 0 if OK, negative value upon error
+ int32_t Initialize(int width, int height);
+
+ // Extract content Feature - main function of ContentAnalysis
+ // Input: new frame
+ // Return value: pointer to structure containing content Analysis
+ // metrics or NULL value upon error
+ VideoContentMetrics* ComputeContentMetrics(const VideoFrame& inputFrame);
+
+ // Release all allocated memory
+ // Output: 0 if OK, negative value upon error
+ int32_t Release();
+
+ private:
+ // return motion metrics
+ VideoContentMetrics* ContentMetrics();
+
+ // Normalized temporal difference metric: for motion magnitude
+ typedef int32_t (VPMContentAnalysis::*TemporalDiffMetricFunc)();
+ TemporalDiffMetricFunc TemporalDiffMetric;
+ int32_t TemporalDiffMetric_C();
+
+ // Motion metric method: call 2 metrics (magnitude and size)
+ int32_t ComputeMotionMetrics();
+
+ // Spatial metric method: computes the 3 frame-average spatial
+ // prediction errors (1x2,2x1,2x2)
+ typedef int32_t (VPMContentAnalysis::*ComputeSpatialMetricsFunc)();
+ ComputeSpatialMetricsFunc ComputeSpatialMetrics;
+ int32_t ComputeSpatialMetrics_C();
+
+#if defined(WEBRTC_ARCH_X86_FAMILY)
+ int32_t ComputeSpatialMetrics_SSE2();
+ int32_t TemporalDiffMetric_SSE2();
+#endif
+
+ const uint8_t* orig_frame_;
+ uint8_t* prev_frame_;
+ int width_;
+ int height_;
+ int skip_num_;
+ int border_;
+
+ // Content Metrics: Stores the local average of the metrics.
+ float motion_magnitude_; // motion class
+ float spatial_pred_err_; // spatial class
+ float spatial_pred_err_h_; // spatial class
+ float spatial_pred_err_v_; // spatial class
+ bool first_frame_;
+ bool ca_Init_;
+
+ VideoContentMetrics* content_metrics_;
+};
+
+} // namespace webrtc
+
+#endif // WEBRTC_MODULES_VIDEO_PROCESSING_CONTENT_ANALYSIS_H_
diff --git a/webrtc/modules/video_processing/content_analysis_sse2.cc b/webrtc/modules/video_processing/content_analysis_sse2.cc
new file mode 100644
index 0000000000..7a60a89b45
--- /dev/null
+++ b/webrtc/modules/video_processing/content_analysis_sse2.cc
@@ -0,0 +1,271 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/modules/video_processing/content_analysis.h"
+
+#include <emmintrin.h>
+#include <math.h>
+
+namespace webrtc {
+
+int32_t VPMContentAnalysis::TemporalDiffMetric_SSE2() {
+ uint32_t num_pixels = 0; // counter for # of pixels
+ const uint8_t* imgBufO = orig_frame_ + border_ * width_ + border_;
+ const uint8_t* imgBufP = prev_frame_ + border_ * width_ + border_;
+
+ const int32_t width_end = ((width_ - 2 * border_) & -16) + border_;
+
+ __m128i sad_64 = _mm_setzero_si128();
+ __m128i sum_64 = _mm_setzero_si128();
+ __m128i sqsum_64 = _mm_setzero_si128();
+ const __m128i z = _mm_setzero_si128();
+
+ for (uint16_t i = 0; i < (height_ - 2 * border_); i += skip_num_) {
+ __m128i sqsum_32 = _mm_setzero_si128();
+
+ const uint8_t* lineO = imgBufO;
+ const uint8_t* lineP = imgBufP;
+
+ // Work on 16 pixels at a time. For HD content with a width of 1920
+ // this loop will run ~67 times (depending on border). Maximum for
+ // abs(o-p) and sum(o) will be 255. _mm_sad_epu8 produces 2 64 bit
+ // results which are then accumulated. There is no chance of
+ // rollover for these two accumulators.
+ // o*o will have a maximum of 255*255 = 65025. This will roll over
+ // a 16 bit accumulator as 67*65025 > 65535, but will fit in a
+ // 32 bit accumulator.
+ for (uint16_t j = 0; j < width_end - border_; j += 16) {
+ const __m128i o = _mm_loadu_si128((__m128i*)(lineO));
+ const __m128i p = _mm_loadu_si128((__m128i*)(lineP));
+
+ lineO += 16;
+ lineP += 16;
+
+ // Abs pixel difference between frames.
+ sad_64 = _mm_add_epi64(sad_64, _mm_sad_epu8(o, p));
+
+ // sum of all pixels in frame
+ sum_64 = _mm_add_epi64(sum_64, _mm_sad_epu8(o, z));
+
+ // Squared sum of all pixels in frame.
+ const __m128i olo = _mm_unpacklo_epi8(o, z);
+ const __m128i ohi = _mm_unpackhi_epi8(o, z);
+
+ const __m128i sqsum_32_lo = _mm_madd_epi16(olo, olo);
+ const __m128i sqsum_32_hi = _mm_madd_epi16(ohi, ohi);
+
+ sqsum_32 = _mm_add_epi32(sqsum_32, sqsum_32_lo);
+ sqsum_32 = _mm_add_epi32(sqsum_32, sqsum_32_hi);
+ }
+
+ // Add to 64 bit running sum as to not roll over.
+ sqsum_64 =
+ _mm_add_epi64(sqsum_64, _mm_add_epi64(_mm_unpackhi_epi32(sqsum_32, z),
+ _mm_unpacklo_epi32(sqsum_32, z)));
+
+ imgBufO += width_ * skip_num_;
+ imgBufP += width_ * skip_num_;
+ num_pixels += (width_end - border_);
+ }
+
+ __m128i sad_final_128;
+ __m128i sum_final_128;
+ __m128i sqsum_final_128;
+
+ // Bring sums out of vector registers and into integer register
+ // domain, summing them along the way.
+ _mm_store_si128(&sad_final_128, sad_64);
+ _mm_store_si128(&sum_final_128, sum_64);
+ _mm_store_si128(&sqsum_final_128, sqsum_64);
+
+ uint64_t* sad_final_64 = reinterpret_cast<uint64_t*>(&sad_final_128);
+ uint64_t* sum_final_64 = reinterpret_cast<uint64_t*>(&sum_final_128);
+ uint64_t* sqsum_final_64 = reinterpret_cast<uint64_t*>(&sqsum_final_128);
+
+ const uint32_t pixelSum = sum_final_64[0] + sum_final_64[1];
+ const uint64_t pixelSqSum = sqsum_final_64[0] + sqsum_final_64[1];
+ const uint32_t tempDiffSum = sad_final_64[0] + sad_final_64[1];
+
+ // Default.
+ motion_magnitude_ = 0.0f;
+
+ if (tempDiffSum == 0)
+ return VPM_OK;
+
+ // Normalize over all pixels.
+ const float tempDiffAvg =
+ static_cast<float>(tempDiffSum) / static_cast<float>(num_pixels);
+ const float pixelSumAvg =
+ static_cast<float>(pixelSum) / static_cast<float>(num_pixels);
+ const float pixelSqSumAvg =
+ static_cast<float>(pixelSqSum) / static_cast<float>(num_pixels);
+ float contrast = pixelSqSumAvg - (pixelSumAvg * pixelSumAvg);
+
+ if (contrast > 0.0) {
+ contrast = sqrt(contrast);
+ motion_magnitude_ = tempDiffAvg / contrast;
+ }
+
+ return VPM_OK;
+}
+
+int32_t VPMContentAnalysis::ComputeSpatialMetrics_SSE2() {
+ const uint8_t* imgBuf = orig_frame_ + border_ * width_;
+ const int32_t width_end = ((width_ - 2 * border_) & -16) + border_;
+
+ __m128i se_32 = _mm_setzero_si128();
+ __m128i sev_32 = _mm_setzero_si128();
+ __m128i seh_32 = _mm_setzero_si128();
+ __m128i msa_32 = _mm_setzero_si128();
+ const __m128i z = _mm_setzero_si128();
+
+ // Error is accumulated as a 32 bit value. Looking at HD content with a
+ // height of 1080 lines, or about 67 macro blocks. If the 16 bit row
+ // value is maxed out at 65529 for every row, 65529*1080 = 70777800, which
+ // will not roll over a 32 bit accumulator.
+ // skip_num_ is also used to reduce the number of rows
+ for (int32_t i = 0; i < (height_ - 2 * border_); i += skip_num_) {
+ __m128i se_16 = _mm_setzero_si128();
+ __m128i sev_16 = _mm_setzero_si128();
+ __m128i seh_16 = _mm_setzero_si128();
+ __m128i msa_16 = _mm_setzero_si128();
+
+ // Row error is accumulated as a 16 bit value. There are 8
+ // accumulators. Max value of a 16 bit number is 65529. Looking
+ // at HD content, 1080p, has a width of 1920, 120 macro blocks.
+ // A mb at a time is processed at a time. Absolute max error at
+ // a point would be abs(0-255+255+255+255) which equals 1020.
+ // 120*1020 = 122400. The probability of hitting this is quite low
+ // on well behaved content. A specially crafted image could roll over.
+ // border_ could also be adjusted to concentrate on just the center of
+ // the images for an HD capture in order to reduce the possiblity of
+ // rollover.
+ const uint8_t* lineTop = imgBuf - width_ + border_;
+ const uint8_t* lineCen = imgBuf + border_;
+ const uint8_t* lineBot = imgBuf + width_ + border_;
+
+ for (int32_t j = 0; j < width_end - border_; j += 16) {
+ const __m128i t = _mm_loadu_si128((__m128i*)(lineTop));
+ const __m128i l = _mm_loadu_si128((__m128i*)(lineCen - 1));
+ const __m128i c = _mm_loadu_si128((__m128i*)(lineCen));
+ const __m128i r = _mm_loadu_si128((__m128i*)(lineCen + 1));
+ const __m128i b = _mm_loadu_si128((__m128i*)(lineBot));
+
+ lineTop += 16;
+ lineCen += 16;
+ lineBot += 16;
+
+ // center pixel unpacked
+ __m128i clo = _mm_unpacklo_epi8(c, z);
+ __m128i chi = _mm_unpackhi_epi8(c, z);
+
+ // left right pixels unpacked and added together
+ const __m128i lrlo =
+ _mm_add_epi16(_mm_unpacklo_epi8(l, z), _mm_unpacklo_epi8(r, z));
+ const __m128i lrhi =
+ _mm_add_epi16(_mm_unpackhi_epi8(l, z), _mm_unpackhi_epi8(r, z));
+
+ // top & bottom pixels unpacked and added together
+ const __m128i tblo =
+ _mm_add_epi16(_mm_unpacklo_epi8(t, z), _mm_unpacklo_epi8(b, z));
+ const __m128i tbhi =
+ _mm_add_epi16(_mm_unpackhi_epi8(t, z), _mm_unpackhi_epi8(b, z));
+
+ // running sum of all pixels
+ msa_16 = _mm_add_epi16(msa_16, _mm_add_epi16(chi, clo));
+
+ clo = _mm_slli_epi16(clo, 1);
+ chi = _mm_slli_epi16(chi, 1);
+ const __m128i sevtlo = _mm_subs_epi16(clo, tblo);
+ const __m128i sevthi = _mm_subs_epi16(chi, tbhi);
+ const __m128i sehtlo = _mm_subs_epi16(clo, lrlo);
+ const __m128i sehthi = _mm_subs_epi16(chi, lrhi);
+
+ clo = _mm_slli_epi16(clo, 1);
+ chi = _mm_slli_epi16(chi, 1);
+ const __m128i setlo = _mm_subs_epi16(clo, _mm_add_epi16(lrlo, tblo));
+ const __m128i sethi = _mm_subs_epi16(chi, _mm_add_epi16(lrhi, tbhi));
+
+ // Add to 16 bit running sum
+ se_16 =
+ _mm_add_epi16(se_16, _mm_max_epi16(setlo, _mm_subs_epi16(z, setlo)));
+ se_16 =
+ _mm_add_epi16(se_16, _mm_max_epi16(sethi, _mm_subs_epi16(z, sethi)));
+ sev_16 = _mm_add_epi16(sev_16,
+ _mm_max_epi16(sevtlo, _mm_subs_epi16(z, sevtlo)));
+ sev_16 = _mm_add_epi16(sev_16,
+ _mm_max_epi16(sevthi, _mm_subs_epi16(z, sevthi)));
+ seh_16 = _mm_add_epi16(seh_16,
+ _mm_max_epi16(sehtlo, _mm_subs_epi16(z, sehtlo)));
+ seh_16 = _mm_add_epi16(seh_16,
+ _mm_max_epi16(sehthi, _mm_subs_epi16(z, sehthi)));
+ }
+
+ // Add to 32 bit running sum as to not roll over.
+ se_32 = _mm_add_epi32(se_32, _mm_add_epi32(_mm_unpackhi_epi16(se_16, z),
+ _mm_unpacklo_epi16(se_16, z)));
+ sev_32 =
+ _mm_add_epi32(sev_32, _mm_add_epi32(_mm_unpackhi_epi16(sev_16, z),
+ _mm_unpacklo_epi16(sev_16, z)));
+ seh_32 =
+ _mm_add_epi32(seh_32, _mm_add_epi32(_mm_unpackhi_epi16(seh_16, z),
+ _mm_unpacklo_epi16(seh_16, z)));
+ msa_32 =
+ _mm_add_epi32(msa_32, _mm_add_epi32(_mm_unpackhi_epi16(msa_16, z),
+ _mm_unpacklo_epi16(msa_16, z)));
+
+ imgBuf += width_ * skip_num_;
+ }
+
+ __m128i se_128;
+ __m128i sev_128;
+ __m128i seh_128;
+ __m128i msa_128;
+
+ // Bring sums out of vector registers and into integer register
+ // domain, summing them along the way.
+ _mm_store_si128(&se_128, _mm_add_epi64(_mm_unpackhi_epi32(se_32, z),
+ _mm_unpacklo_epi32(se_32, z)));
+ _mm_store_si128(&sev_128, _mm_add_epi64(_mm_unpackhi_epi32(sev_32, z),
+ _mm_unpacklo_epi32(sev_32, z)));
+ _mm_store_si128(&seh_128, _mm_add_epi64(_mm_unpackhi_epi32(seh_32, z),
+ _mm_unpacklo_epi32(seh_32, z)));
+ _mm_store_si128(&msa_128, _mm_add_epi64(_mm_unpackhi_epi32(msa_32, z),
+ _mm_unpacklo_epi32(msa_32, z)));
+
+ uint64_t* se_64 = reinterpret_cast<uint64_t*>(&se_128);
+ uint64_t* sev_64 = reinterpret_cast<uint64_t*>(&sev_128);
+ uint64_t* seh_64 = reinterpret_cast<uint64_t*>(&seh_128);
+ uint64_t* msa_64 = reinterpret_cast<uint64_t*>(&msa_128);
+
+ const uint32_t spatialErrSum = se_64[0] + se_64[1];
+ const uint32_t spatialErrVSum = sev_64[0] + sev_64[1];
+ const uint32_t spatialErrHSum = seh_64[0] + seh_64[1];
+ const uint32_t pixelMSA = msa_64[0] + msa_64[1];
+
+ // Normalize over all pixels.
+ const float spatialErr = static_cast<float>(spatialErrSum >> 2);
+ const float spatialErrH = static_cast<float>(spatialErrHSum >> 1);
+ const float spatialErrV = static_cast<float>(spatialErrVSum >> 1);
+ const float norm = static_cast<float>(pixelMSA);
+
+ // 2X2:
+ spatial_pred_err_ = spatialErr / norm;
+
+ // 1X2:
+ spatial_pred_err_h_ = spatialErrH / norm;
+
+ // 2X1:
+ spatial_pred_err_v_ = spatialErrV / norm;
+
+ return VPM_OK;
+}
+
+} // namespace webrtc
diff --git a/webrtc/modules/video_processing/deflickering.cc b/webrtc/modules/video_processing/deflickering.cc
new file mode 100644
index 0000000000..0e936ce9b7
--- /dev/null
+++ b/webrtc/modules/video_processing/deflickering.cc
@@ -0,0 +1,402 @@
+/*
+ * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/modules/video_processing/deflickering.h"
+
+#include <math.h>
+#include <stdlib.h>
+
+#include "webrtc/base/logging.h"
+#include "webrtc/common_audio/signal_processing/include/signal_processing_library.h"
+#include "webrtc/system_wrappers/include/sort.h"
+
+namespace webrtc {
+
+// Detection constants
+// (Q4) Maximum allowed deviation for detection.
+enum { kFrequencyDeviation = 39 };
+// (Q4) Minimum frequency that can be detected.
+enum { kMinFrequencyToDetect = 32 };
+// Number of flickers before we accept detection
+enum { kNumFlickerBeforeDetect = 2 };
+enum { kmean_valueScaling = 4 }; // (Q4) In power of 2
+// Dead-zone region in terms of pixel values
+enum { kZeroCrossingDeadzone = 10 };
+// Deflickering constants.
+// Compute the quantiles over 1 / DownsamplingFactor of the image.
+enum { kDownsamplingFactor = 8 };
+enum { kLog2OfDownsamplingFactor = 3 };
+
+// To generate in Matlab:
+// >> probUW16 = round(2^11 *
+// [0.05,0.1,0.2,0.3,0.4,0.5,0.6,0.7,0.8,0.9,0.95,0.97]);
+// >> fprintf('%d, ', probUW16)
+// Resolution reduced to avoid overflow when multiplying with the
+// (potentially) large number of pixels.
+const uint16_t VPMDeflickering::prob_uw16_[kNumProbs] = {
+ 102, 205, 410, 614, 819, 1024,
+ 1229, 1434, 1638, 1843, 1946, 1987}; // <Q11>
+
+// To generate in Matlab:
+// >> numQuants = 14; maxOnlyLength = 5;
+// >> weightUW16 = round(2^15 *
+// [linspace(0.5, 1.0, numQuants - maxOnlyLength)]);
+// >> fprintf('%d, %d,\n ', weightUW16);
+const uint16_t VPMDeflickering::weight_uw16_[kNumQuants - kMaxOnlyLength] = {
+ 16384, 18432, 20480, 22528, 24576, 26624, 28672, 30720, 32768}; // <Q15>
+
+VPMDeflickering::VPMDeflickering() {
+ Reset();
+}
+
+VPMDeflickering::~VPMDeflickering() {}
+
+void VPMDeflickering::Reset() {
+ mean_buffer_length_ = 0;
+ detection_state_ = 0;
+ frame_rate_ = 0;
+
+ memset(mean_buffer_, 0, sizeof(int32_t) * kMeanBufferLength);
+ memset(timestamp_buffer_, 0, sizeof(int32_t) * kMeanBufferLength);
+
+ // Initialize the history with a uniformly distributed histogram.
+ quant_hist_uw8_[0][0] = 0;
+ quant_hist_uw8_[0][kNumQuants - 1] = 255;
+ for (int32_t i = 0; i < kNumProbs; i++) {
+ // Unsigned round. <Q0>
+ quant_hist_uw8_[0][i + 1] =
+ static_cast<uint8_t>((prob_uw16_[i] * 255 + (1 << 10)) >> 11);
+ }
+
+ for (int32_t i = 1; i < kFrameHistory_size; i++) {
+ memcpy(quant_hist_uw8_[i], quant_hist_uw8_[0],
+ sizeof(uint8_t) * kNumQuants);
+ }
+}
+
+int32_t VPMDeflickering::ProcessFrame(VideoFrame* frame,
+ VideoProcessing::FrameStats* stats) {
+ assert(frame);
+ uint32_t frame_memory;
+ uint8_t quant_uw8[kNumQuants];
+ uint8_t maxquant_uw8[kNumQuants];
+ uint8_t minquant_uw8[kNumQuants];
+ uint16_t target_quant_uw16[kNumQuants];
+ uint16_t increment_uw16;
+ uint8_t map_uw8[256];
+
+ uint16_t tmp_uw16;
+ uint32_t tmp_uw32;
+ int width = frame->width();
+ int height = frame->height();
+
+ if (frame->IsZeroSize()) {
+ return VPM_GENERAL_ERROR;
+ }
+
+ // Stricter height check due to subsampling size calculation below.
+ if (height < 2) {
+ LOG(LS_ERROR) << "Invalid frame size.";
+ return VPM_GENERAL_ERROR;
+ }
+
+ if (!VideoProcessing::ValidFrameStats(*stats)) {
+ return VPM_GENERAL_ERROR;
+ }
+
+ if (PreDetection(frame->timestamp(), *stats) == -1)
+ return VPM_GENERAL_ERROR;
+
+ // Flicker detection
+ int32_t det_flicker = DetectFlicker();
+ if (det_flicker < 0) {
+ return VPM_GENERAL_ERROR;
+ } else if (det_flicker != 1) {
+ return 0;
+ }
+
+ // Size of luminance component.
+ const uint32_t y_size = height * width;
+
+ const uint32_t y_sub_size =
+ width * (((height - 1) >> kLog2OfDownsamplingFactor) + 1);
+ uint8_t* y_sorted = new uint8_t[y_sub_size];
+ uint32_t sort_row_idx = 0;
+ for (int i = 0; i < height; i += kDownsamplingFactor) {
+ memcpy(y_sorted + sort_row_idx * width, frame->buffer(kYPlane) + i * width,
+ width);
+ sort_row_idx++;
+ }
+
+ webrtc::Sort(y_sorted, y_sub_size, webrtc::TYPE_UWord8);
+
+ uint32_t prob_idx_uw32 = 0;
+ quant_uw8[0] = 0;
+ quant_uw8[kNumQuants - 1] = 255;
+
+ // Ensure we won't get an overflow below.
+ // In practice, the number of subsampled pixels will not become this large.
+ if (y_sub_size > (1 << 21) - 1) {
+ LOG(LS_ERROR) << "Subsampled number of pixels too large.";
+ return -1;
+ }
+
+ for (int32_t i = 0; i < kNumProbs; i++) {
+ // <Q0>.
+ prob_idx_uw32 = WEBRTC_SPL_UMUL_32_16(y_sub_size, prob_uw16_[i]) >> 11;
+ quant_uw8[i + 1] = y_sorted[prob_idx_uw32];
+ }
+
+ delete[] y_sorted;
+ y_sorted = NULL;
+
+ // Shift history for new frame.
+ memmove(quant_hist_uw8_[1], quant_hist_uw8_[0],
+ (kFrameHistory_size - 1) * kNumQuants * sizeof(uint8_t));
+ // Store current frame in history.
+ memcpy(quant_hist_uw8_[0], quant_uw8, kNumQuants * sizeof(uint8_t));
+
+ // We use a frame memory equal to the ceiling of half the frame rate to
+ // ensure we capture an entire period of flicker.
+ frame_memory = (frame_rate_ + (1 << 5)) >> 5; // Unsigned ceiling. <Q0>
+ // frame_rate_ in Q4.
+ if (frame_memory > kFrameHistory_size) {
+ frame_memory = kFrameHistory_size;
+ }
+
+ // Get maximum and minimum.
+ for (int32_t i = 0; i < kNumQuants; i++) {
+ maxquant_uw8[i] = 0;
+ minquant_uw8[i] = 255;
+ for (uint32_t j = 0; j < frame_memory; j++) {
+ if (quant_hist_uw8_[j][i] > maxquant_uw8[i]) {
+ maxquant_uw8[i] = quant_hist_uw8_[j][i];
+ }
+
+ if (quant_hist_uw8_[j][i] < minquant_uw8[i]) {
+ minquant_uw8[i] = quant_hist_uw8_[j][i];
+ }
+ }
+ }
+
+ // Get target quantiles.
+ for (int32_t i = 0; i < kNumQuants - kMaxOnlyLength; i++) {
+ // target = w * maxquant_uw8 + (1 - w) * minquant_uw8
+ // Weights w = |weight_uw16_| are in Q15, hence the final output has to be
+ // right shifted by 8 to end up in Q7.
+ target_quant_uw16[i] = static_cast<uint16_t>(
+ (weight_uw16_[i] * maxquant_uw8[i] +
+ ((1 << 15) - weight_uw16_[i]) * minquant_uw8[i]) >>
+ 8); // <Q7>
+ }
+
+ for (int32_t i = kNumQuants - kMaxOnlyLength; i < kNumQuants; i++) {
+ target_quant_uw16[i] = ((uint16_t)maxquant_uw8[i]) << 7;
+ }
+
+ // Compute the map from input to output pixels.
+ uint16_t mapUW16; // <Q7>
+ for (int32_t i = 1; i < kNumQuants; i++) {
+ // As quant and targetQuant are limited to UWord8, it's safe to use Q7 here.
+ tmp_uw32 =
+ static_cast<uint32_t>(target_quant_uw16[i] - target_quant_uw16[i - 1]);
+ tmp_uw16 = static_cast<uint16_t>(quant_uw8[i] - quant_uw8[i - 1]); // <Q0>
+
+ if (tmp_uw16 > 0) {
+ increment_uw16 =
+ static_cast<uint16_t>(WebRtcSpl_DivU32U16(tmp_uw32,
+ tmp_uw16)); // <Q7>
+ } else {
+ // The value is irrelevant; the loop below will only iterate once.
+ increment_uw16 = 0;
+ }
+
+ mapUW16 = target_quant_uw16[i - 1];
+ for (uint32_t j = quant_uw8[i - 1]; j < (uint32_t)(quant_uw8[i] + 1); j++) {
+ // Unsigned round. <Q0>
+ map_uw8[j] = (uint8_t)((mapUW16 + (1 << 6)) >> 7);
+ mapUW16 += increment_uw16;
+ }
+ }
+
+ // Map to the output frame.
+ uint8_t* buffer = frame->buffer(kYPlane);
+ for (uint32_t i = 0; i < y_size; i++) {
+ buffer[i] = map_uw8[buffer[i]];
+ }
+
+ // Frame was altered, so reset stats.
+ VideoProcessing::ClearFrameStats(stats);
+
+ return VPM_OK;
+}
+
+/**
+ Performs some pre-detection operations. Must be called before
+ DetectFlicker().
+
+ \param[in] timestamp Timestamp of the current frame.
+ \param[in] stats Statistics of the current frame.
+
+ \return 0: Success\n
+ 2: Detection not possible due to flickering frequency too close to
+ zero.\n
+ -1: Error
+*/
+int32_t VPMDeflickering::PreDetection(
+ const uint32_t timestamp,
+ const VideoProcessing::FrameStats& stats) {
+ int32_t mean_val; // Mean value of frame (Q4)
+ uint32_t frame_rate = 0;
+ int32_t meanBufferLength; // Temp variable.
+
+ mean_val = ((stats.sum << kmean_valueScaling) / stats.num_pixels);
+ // Update mean value buffer.
+ // This should be done even though we might end up in an unreliable detection.
+ memmove(mean_buffer_ + 1, mean_buffer_,
+ (kMeanBufferLength - 1) * sizeof(int32_t));
+ mean_buffer_[0] = mean_val;
+
+ // Update timestamp buffer.
+ // This should be done even though we might end up in an unreliable detection.
+ memmove(timestamp_buffer_ + 1, timestamp_buffer_,
+ (kMeanBufferLength - 1) * sizeof(uint32_t));
+ timestamp_buffer_[0] = timestamp;
+
+ /* Compute current frame rate (Q4) */
+ if (timestamp_buffer_[kMeanBufferLength - 1] != 0) {
+ frame_rate = ((90000 << 4) * (kMeanBufferLength - 1));
+ frame_rate /=
+ (timestamp_buffer_[0] - timestamp_buffer_[kMeanBufferLength - 1]);
+ } else if (timestamp_buffer_[1] != 0) {
+ frame_rate = (90000 << 4) / (timestamp_buffer_[0] - timestamp_buffer_[1]);
+ }
+
+ /* Determine required size of mean value buffer (mean_buffer_length_) */
+ if (frame_rate == 0) {
+ meanBufferLength = 1;
+ } else {
+ meanBufferLength =
+ (kNumFlickerBeforeDetect * frame_rate) / kMinFrequencyToDetect;
+ }
+ /* Sanity check of buffer length */
+ if (meanBufferLength >= kMeanBufferLength) {
+ /* Too long buffer. The flickering frequency is too close to zero, which
+ * makes the estimation unreliable.
+ */
+ mean_buffer_length_ = 0;
+ return 2;
+ }
+ mean_buffer_length_ = meanBufferLength;
+
+ if ((timestamp_buffer_[mean_buffer_length_ - 1] != 0) &&
+ (mean_buffer_length_ != 1)) {
+ frame_rate = ((90000 << 4) * (mean_buffer_length_ - 1));
+ frame_rate /=
+ (timestamp_buffer_[0] - timestamp_buffer_[mean_buffer_length_ - 1]);
+ } else if (timestamp_buffer_[1] != 0) {
+ frame_rate = (90000 << 4) / (timestamp_buffer_[0] - timestamp_buffer_[1]);
+ }
+ frame_rate_ = frame_rate;
+
+ return VPM_OK;
+}
+
+/**
+ This function detects flicker in the video stream. As a side effect the
+ mean value buffer is updated with the new mean value.
+
+ \return 0: No flickering detected\n
+ 1: Flickering detected\n
+ 2: Detection not possible due to unreliable frequency interval
+ -1: Error
+*/
+int32_t VPMDeflickering::DetectFlicker() {
+ uint32_t i;
+ int32_t freqEst; // (Q4) Frequency estimate to base detection upon
+ int32_t ret_val = -1;
+
+ /* Sanity check for mean_buffer_length_ */
+ if (mean_buffer_length_ < 2) {
+ /* Not possible to estimate frequency */
+ return 2;
+ }
+ // Count zero crossings with a dead zone to be robust against noise. If the
+ // noise std is 2 pixel this corresponds to about 95% confidence interval.
+ int32_t deadzone = (kZeroCrossingDeadzone << kmean_valueScaling); // Q4
+ int32_t meanOfBuffer = 0; // Mean value of mean value buffer.
+ int32_t numZeros = 0; // Number of zeros that cross the dead-zone.
+ int32_t cntState = 0; // State variable for zero crossing regions.
+ int32_t cntStateOld = 0; // Previous state for zero crossing regions.
+
+ for (i = 0; i < mean_buffer_length_; i++) {
+ meanOfBuffer += mean_buffer_[i];
+ }
+ meanOfBuffer += (mean_buffer_length_ >> 1); // Rounding, not truncation.
+ meanOfBuffer /= mean_buffer_length_;
+
+ // Count zero crossings.
+ cntStateOld = (mean_buffer_[0] >= (meanOfBuffer + deadzone));
+ cntStateOld -= (mean_buffer_[0] <= (meanOfBuffer - deadzone));
+ for (i = 1; i < mean_buffer_length_; i++) {
+ cntState = (mean_buffer_[i] >= (meanOfBuffer + deadzone));
+ cntState -= (mean_buffer_[i] <= (meanOfBuffer - deadzone));
+ if (cntStateOld == 0) {
+ cntStateOld = -cntState;
+ }
+ if (((cntState + cntStateOld) == 0) && (cntState != 0)) {
+ numZeros++;
+ cntStateOld = cntState;
+ }
+ }
+ // END count zero crossings.
+
+ /* Frequency estimation according to:
+ * freqEst = numZeros * frame_rate / 2 / mean_buffer_length_;
+ *
+ * Resolution is set to Q4
+ */
+ freqEst = ((numZeros * 90000) << 3);
+ freqEst /=
+ (timestamp_buffer_[0] - timestamp_buffer_[mean_buffer_length_ - 1]);
+
+ /* Translate frequency estimate to regions close to 100 and 120 Hz */
+ uint8_t freqState = 0; // Current translation state;
+ // (0) Not in interval,
+ // (1) Within valid interval,
+ // (2) Out of range
+ int32_t freqAlias = freqEst;
+ if (freqEst > kMinFrequencyToDetect) {
+ uint8_t aliasState = 1;
+ while (freqState == 0) {
+ /* Increase frequency */
+ freqAlias += (aliasState * frame_rate_);
+ freqAlias += ((freqEst << 1) * (1 - (aliasState << 1)));
+ /* Compute state */
+ freqState = (abs(freqAlias - (100 << 4)) <= kFrequencyDeviation);
+ freqState += (abs(freqAlias - (120 << 4)) <= kFrequencyDeviation);
+ freqState += 2 * (freqAlias > ((120 << 4) + kFrequencyDeviation));
+ /* Switch alias state */
+ aliasState++;
+ aliasState &= 0x01;
+ }
+ }
+ /* Is frequency estimate within detection region? */
+ if (freqState == 1) {
+ ret_val = 1;
+ } else if (freqState == 0) {
+ ret_val = 2;
+ } else {
+ ret_val = 0;
+ }
+ return ret_val;
+}
+
+} // namespace webrtc
diff --git a/webrtc/modules/video_processing/deflickering.h b/webrtc/modules/video_processing/deflickering.h
new file mode 100644
index 0000000000..3ff2723aba
--- /dev/null
+++ b/webrtc/modules/video_processing/deflickering.h
@@ -0,0 +1,55 @@
+/*
+ * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_PROCESSING_DEFLICKERING_H_
+#define WEBRTC_MODULES_VIDEO_PROCESSING_DEFLICKERING_H_
+
+#include <string.h> // NULL
+
+#include "webrtc/modules/video_processing/include/video_processing.h"
+#include "webrtc/typedefs.h"
+
+namespace webrtc {
+
+class VPMDeflickering {
+ public:
+ VPMDeflickering();
+ ~VPMDeflickering();
+
+ void Reset();
+ int32_t ProcessFrame(VideoFrame* frame, VideoProcessing::FrameStats* stats);
+
+ private:
+ int32_t PreDetection(uint32_t timestamp,
+ const VideoProcessing::FrameStats& stats);
+
+ int32_t DetectFlicker();
+
+ enum { kMeanBufferLength = 32 };
+ enum { kFrameHistory_size = 15 };
+ enum { kNumProbs = 12 };
+ enum { kNumQuants = kNumProbs + 2 };
+ enum { kMaxOnlyLength = 5 };
+
+ uint32_t mean_buffer_length_;
+ uint8_t detection_state_; // 0: No flickering
+ // 1: Flickering detected
+ // 2: In flickering
+ int32_t mean_buffer_[kMeanBufferLength];
+ uint32_t timestamp_buffer_[kMeanBufferLength];
+ uint32_t frame_rate_;
+ static const uint16_t prob_uw16_[kNumProbs];
+ static const uint16_t weight_uw16_[kNumQuants - kMaxOnlyLength];
+ uint8_t quant_hist_uw8_[kFrameHistory_size][kNumQuants];
+};
+
+} // namespace webrtc
+
+#endif // WEBRTC_MODULES_VIDEO_PROCESSING_DEFLICKERING_H_
diff --git a/webrtc/modules/video_processing/frame_preprocessor.cc b/webrtc/modules/video_processing/frame_preprocessor.cc
new file mode 100644
index 0000000000..6778a597be
--- /dev/null
+++ b/webrtc/modules/video_processing/frame_preprocessor.cc
@@ -0,0 +1,141 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/modules/video_processing/frame_preprocessor.h"
+
+#include "webrtc/modules/video_processing/video_denoiser.h"
+
+namespace webrtc {
+
+VPMFramePreprocessor::VPMFramePreprocessor()
+ : content_metrics_(nullptr),
+ resampled_frame_(),
+ enable_ca_(false),
+ frame_cnt_(0) {
+ spatial_resampler_ = new VPMSimpleSpatialResampler();
+ ca_ = new VPMContentAnalysis(true);
+ vd_ = new VPMVideoDecimator();
+}
+
+VPMFramePreprocessor::~VPMFramePreprocessor() {
+ Reset();
+ delete ca_;
+ delete vd_;
+ delete spatial_resampler_;
+}
+
+void VPMFramePreprocessor::Reset() {
+ ca_->Release();
+ vd_->Reset();
+ content_metrics_ = nullptr;
+ spatial_resampler_->Reset();
+ enable_ca_ = false;
+ frame_cnt_ = 0;
+}
+
+void VPMFramePreprocessor::EnableTemporalDecimation(bool enable) {
+ vd_->EnableTemporalDecimation(enable);
+}
+
+void VPMFramePreprocessor::EnableContentAnalysis(bool enable) {
+ enable_ca_ = enable;
+}
+
+void VPMFramePreprocessor::SetInputFrameResampleMode(
+ VideoFrameResampling resampling_mode) {
+ spatial_resampler_->SetInputFrameResampleMode(resampling_mode);
+}
+
+int32_t VPMFramePreprocessor::SetTargetResolution(uint32_t width,
+ uint32_t height,
+ uint32_t frame_rate) {
+ if ((width == 0) || (height == 0) || (frame_rate == 0)) {
+ return VPM_PARAMETER_ERROR;
+ }
+ int32_t ret_val = 0;
+ ret_val = spatial_resampler_->SetTargetFrameSize(width, height);
+
+ if (ret_val < 0)
+ return ret_val;
+
+ vd_->SetTargetFramerate(frame_rate);
+ return VPM_OK;
+}
+
+void VPMFramePreprocessor::SetTargetFramerate(int frame_rate) {
+ if (frame_rate == -1) {
+ vd_->EnableTemporalDecimation(false);
+ } else {
+ vd_->EnableTemporalDecimation(true);
+ vd_->SetTargetFramerate(frame_rate);
+ }
+}
+
+void VPMFramePreprocessor::UpdateIncomingframe_rate() {
+ vd_->UpdateIncomingframe_rate();
+}
+
+uint32_t VPMFramePreprocessor::GetDecimatedFrameRate() {
+ return vd_->GetDecimatedFrameRate();
+}
+
+uint32_t VPMFramePreprocessor::GetDecimatedWidth() const {
+ return spatial_resampler_->TargetWidth();
+}
+
+uint32_t VPMFramePreprocessor::GetDecimatedHeight() const {
+ return spatial_resampler_->TargetHeight();
+}
+
+void VPMFramePreprocessor::EnableDenosing(bool enable) {
+ denoiser_.reset(new VideoDenoiser(true));
+}
+
+const VideoFrame* VPMFramePreprocessor::PreprocessFrame(
+ const VideoFrame& frame) {
+ if (frame.IsZeroSize()) {
+ return nullptr;
+ }
+
+ vd_->UpdateIncomingframe_rate();
+ if (vd_->DropFrame()) {
+ return nullptr;
+ }
+
+ const VideoFrame* current_frame = &frame;
+ if (denoiser_) {
+ denoiser_->DenoiseFrame(*current_frame, &denoised_frame_);
+ current_frame = &denoised_frame_;
+ }
+
+ if (spatial_resampler_->ApplyResample(current_frame->width(),
+ current_frame->height())) {
+ if (spatial_resampler_->ResampleFrame(*current_frame, &resampled_frame_) !=
+ VPM_OK) {
+ return nullptr;
+ }
+ current_frame = &resampled_frame_;
+ }
+
+ // Perform content analysis on the frame to be encoded.
+ if (enable_ca_ && frame_cnt_ % kSkipFrameCA == 0) {
+ // Compute new metrics every |kSkipFramesCA| frames, starting with
+ // the first frame.
+ content_metrics_ = ca_->ComputeContentMetrics(*current_frame);
+ }
+ ++frame_cnt_;
+ return current_frame;
+}
+
+VideoContentMetrics* VPMFramePreprocessor::GetContentMetrics() const {
+ return content_metrics_;
+}
+
+} // namespace webrtc
diff --git a/webrtc/modules/video_processing/frame_preprocessor.h b/webrtc/modules/video_processing/frame_preprocessor.h
new file mode 100644
index 0000000000..5bdc576f37
--- /dev/null
+++ b/webrtc/modules/video_processing/frame_preprocessor.h
@@ -0,0 +1,84 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_PROCESSING_FRAME_PREPROCESSOR_H_
+#define WEBRTC_MODULES_VIDEO_PROCESSING_FRAME_PREPROCESSOR_H_
+
+#include "webrtc/base/scoped_ptr.h"
+#include "webrtc/modules/video_processing/include/video_processing.h"
+#include "webrtc/modules/video_processing/content_analysis.h"
+#include "webrtc/modules/video_processing/spatial_resampler.h"
+#include "webrtc/modules/video_processing/video_decimator.h"
+#include "webrtc/typedefs.h"
+#include "webrtc/video_frame.h"
+
+namespace webrtc {
+
+class VideoDenoiser;
+
+// All pointers/members in this class are assumed to be protected by the class
+// owner.
+class VPMFramePreprocessor {
+ public:
+ VPMFramePreprocessor();
+ ~VPMFramePreprocessor();
+
+ void Reset();
+
+ // Enable temporal decimation.
+ void EnableTemporalDecimation(bool enable);
+
+ void SetInputFrameResampleMode(VideoFrameResampling resampling_mode);
+
+ // Enable content analysis.
+ void EnableContentAnalysis(bool enable);
+
+ // Set target resolution: frame rate and dimension.
+ int32_t SetTargetResolution(uint32_t width,
+ uint32_t height,
+ uint32_t frame_rate);
+
+ // Set target frame rate.
+ void SetTargetFramerate(int frame_rate);
+
+ // Update incoming frame rate/dimension.
+ void UpdateIncomingframe_rate();
+
+ int32_t updateIncomingFrameSize(uint32_t width, uint32_t height);
+
+ // Set decimated values: frame rate/dimension.
+ uint32_t GetDecimatedFrameRate();
+ uint32_t GetDecimatedWidth() const;
+ uint32_t GetDecimatedHeight() const;
+
+ // Preprocess output:
+ void EnableDenosing(bool enable);
+ const VideoFrame* PreprocessFrame(const VideoFrame& frame);
+ VideoContentMetrics* GetContentMetrics() const;
+
+ private:
+ // The content does not change so much every frame, so to reduce complexity
+ // we can compute new content metrics every |kSkipFrameCA| frames.
+ enum { kSkipFrameCA = 2 };
+
+ VideoContentMetrics* content_metrics_;
+ VideoFrame denoised_frame_;
+ VideoFrame resampled_frame_;
+ VPMSpatialResampler* spatial_resampler_;
+ VPMContentAnalysis* ca_;
+ VPMVideoDecimator* vd_;
+ rtc::scoped_ptr<VideoDenoiser> denoiser_;
+ bool enable_ca_;
+ uint32_t frame_cnt_;
+};
+
+} // namespace webrtc
+
+#endif // WEBRTC_MODULES_VIDEO_PROCESSING_FRAME_PREPROCESSOR_H_
diff --git a/webrtc/modules/video_processing/include/video_processing.h b/webrtc/modules/video_processing/include/video_processing.h
new file mode 100644
index 0000000000..a8d6358887
--- /dev/null
+++ b/webrtc/modules/video_processing/include/video_processing.h
@@ -0,0 +1,102 @@
+/*
+ * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_PROCESSING_INCLUDE_VIDEO_PROCESSING_H_
+#define WEBRTC_MODULES_VIDEO_PROCESSING_INCLUDE_VIDEO_PROCESSING_H_
+
+#include "webrtc/modules/include/module_common_types.h"
+#include "webrtc/modules/video_processing/include/video_processing_defines.h"
+#include "webrtc/video_frame.h"
+
+// The module is largely intended to process video streams, except functionality
+// provided by static functions which operate independent of previous frames. It
+// is recommended, but not required that a unique instance be used for each
+// concurrently processed stream. Similarly, it is recommended to call Reset()
+// before switching to a new stream, but this is not absolutely required.
+//
+// The module provides basic thread safety by permitting only a single function
+// to execute concurrently.
+
+namespace webrtc {
+
+class VideoProcessing {
+ public:
+ struct FrameStats {
+ uint32_t hist[256]; // Frame histogram.
+ uint32_t mean;
+ uint32_t sum;
+ uint32_t num_pixels;
+ uint32_t sub_sampling_factor; // Sub-sampling factor, in powers of 2.
+ };
+
+ enum BrightnessWarning { kNoWarning, kDarkWarning, kBrightWarning };
+
+ static VideoProcessing* Create();
+ virtual ~VideoProcessing() {}
+
+ // Retrieves statistics for the input frame. This function must be used to
+ // prepare a FrameStats struct for use in certain VPM functions.
+ static void GetFrameStats(const VideoFrame& frame, FrameStats* stats);
+
+ // Checks the validity of a FrameStats struct. Currently, valid implies only
+ // that is had changed from its initialized state.
+ static bool ValidFrameStats(const FrameStats& stats);
+
+ static void ClearFrameStats(FrameStats* stats);
+
+ // Increases/decreases the luminance value. 'delta' can be in the range {}
+ static void Brighten(int delta, VideoFrame* frame);
+
+ // Detects and removes camera flicker from a video stream. Every frame from
+ // the stream must be passed in. A frame will only be altered if flicker has
+ // been detected. Has a fixed-point implementation.
+ // Frame statistics provided by GetFrameStats(). On return the stats will
+ // be reset to zero if the frame was altered. Call GetFrameStats() again
+ // if the statistics for the altered frame are required.
+ virtual int32_t Deflickering(VideoFrame* frame, FrameStats* stats) = 0;
+
+ // Detects if a video frame is excessively bright or dark. Returns a
+ // warning if this is the case. Multiple frames should be passed in before
+ // expecting a warning. Has a floating-point implementation.
+ virtual int32_t BrightnessDetection(const VideoFrame& frame,
+ const FrameStats& stats) = 0;
+
+ // The following functions refer to the pre-processor unit within VPM. The
+ // pre-processor perfoms spatial/temporal decimation and content analysis on
+ // the frames prior to encoding.
+
+ // Enable/disable temporal decimation
+ virtual void EnableTemporalDecimation(bool enable) = 0;
+
+ virtual int32_t SetTargetResolution(uint32_t width,
+ uint32_t height,
+ uint32_t frame_rate) = 0;
+
+ virtual void SetTargetFramerate(int frame_rate) = 0;
+
+ virtual uint32_t GetDecimatedFrameRate() = 0;
+ virtual uint32_t GetDecimatedWidth() const = 0;
+ virtual uint32_t GetDecimatedHeight() const = 0;
+
+ // Set the spatial resampling settings of the VPM according to
+ // VideoFrameResampling.
+ virtual void SetInputFrameResampleMode(
+ VideoFrameResampling resampling_mode) = 0;
+
+ virtual void EnableDenosing(bool enable) = 0;
+ virtual const VideoFrame* PreprocessFrame(const VideoFrame& frame) = 0;
+
+ virtual VideoContentMetrics* GetContentMetrics() const = 0;
+ virtual void EnableContentAnalysis(bool enable) = 0;
+};
+
+} // namespace webrtc
+
+#endif // WEBRTC_MODULES_VIDEO_PROCESSING_INCLUDE_VIDEO_PROCESSING_H_
diff --git a/webrtc/modules/video_processing/include/video_processing_defines.h b/webrtc/modules/video_processing/include/video_processing_defines.h
new file mode 100644
index 0000000000..9cc71bde27
--- /dev/null
+++ b/webrtc/modules/video_processing/include/video_processing_defines.h
@@ -0,0 +1,41 @@
+/*
+ * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+/*
+ * video_processing_defines.h
+ * This header file includes the definitions used in the video processor module
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_PROCESSING_INCLUDE_VIDEO_PROCESSING_DEFINES_H_
+#define WEBRTC_MODULES_VIDEO_PROCESSING_INCLUDE_VIDEO_PROCESSING_DEFINES_H_
+
+#include "webrtc/typedefs.h"
+
+namespace webrtc {
+
+// Error codes
+#define VPM_OK 0
+#define VPM_GENERAL_ERROR -1
+#define VPM_MEMORY -2
+#define VPM_PARAMETER_ERROR -3
+#define VPM_SCALE_ERROR -4
+#define VPM_UNINITIALIZED -5
+#define VPM_UNIMPLEMENTED -6
+
+enum VideoFrameResampling {
+ kNoRescaling, // Disables rescaling.
+ kFastRescaling, // Point filter.
+ kBiLinear, // Bi-linear interpolation.
+ kBox, // Box inteprolation.
+};
+
+} // namespace webrtc
+
+#endif // WEBRTC_MODULES_VIDEO_PROCESSING_INCLUDE_VIDEO_PROCESSING_DEFINES_H_
diff --git a/webrtc/modules/video_processing/main/interface/video_processing.h b/webrtc/modules/video_processing/main/interface/video_processing.h
deleted file mode 100644
index 30af99fb8e..0000000000
--- a/webrtc/modules/video_processing/main/interface/video_processing.h
+++ /dev/null
@@ -1,270 +0,0 @@
-/*
- * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-/*
- * video_processing.h
- * This header file contains the API required for the video
- * processing module class.
- */
-
-
-#ifndef WEBRTC_MODULES_INTERFACE_VIDEO_PROCESSING_H
-#define WEBRTC_MODULES_INTERFACE_VIDEO_PROCESSING_H
-
-#include "webrtc/modules/interface/module.h"
-#include "webrtc/modules/interface/module_common_types.h"
-#include "webrtc/modules/video_processing/main/interface/video_processing_defines.h"
-#include "webrtc/video_frame.h"
-
-/**
- The module is largely intended to process video streams, except functionality
- provided by static functions which operate independent of previous frames. It
- is recommended, but not required that a unique instance be used for each
- concurrently processed stream. Similarly, it is recommended to call Reset()
- before switching to a new stream, but this is not absolutely required.
-
- The module provides basic thread safety by permitting only a single function
- to execute concurrently.
-*/
-
-namespace webrtc {
-
-class VideoProcessingModule : public Module {
- public:
- /**
- Structure to hold frame statistics. Populate it with GetFrameStats().
- */
- struct FrameStats {
- FrameStats() :
- mean(0),
- sum(0),
- num_pixels(0),
- subSamplWidth(0),
- subSamplHeight(0) {
- memset(hist, 0, sizeof(hist));
- }
-
- uint32_t hist[256]; // FRame histogram.
- uint32_t mean; // Frame Mean value.
- uint32_t sum; // Sum of frame.
- uint32_t num_pixels; // Number of pixels.
- uint8_t subSamplWidth; // Subsampling rate of width in powers of 2.
- uint8_t subSamplHeight; // Subsampling rate of height in powers of 2.
-};
-
- /**
- Specifies the warning types returned by BrightnessDetection().
- */
- enum BrightnessWarning {
- kNoWarning, // Frame has acceptable brightness.
- kDarkWarning, // Frame is too dark.
- kBrightWarning // Frame is too bright.
- };
-
- /*
- Creates a VPM object.
-
- \param[in] id
- Unique identifier of this object.
-
- \return Pointer to a VPM object.
- */
- static VideoProcessingModule* Create();
-
- /**
- Destroys a VPM object.
-
- \param[in] module
- Pointer to the VPM object to destroy.
- */
- static void Destroy(VideoProcessingModule* module);
-
- /**
- Not supported.
- */
- int64_t TimeUntilNextProcess() override { return -1; }
-
- /**
- Not supported.
- */
- int32_t Process() override { return -1; }
-
- /**
- Resets all processing components to their initial states. This should be
- called whenever a new video stream is started.
- */
- virtual void Reset() = 0;
-
- /**
- Retrieves statistics for the input frame. This function must be used to
- prepare a FrameStats struct for use in certain VPM functions.
-
- \param[out] stats
- The frame statistics will be stored here on return.
-
- \param[in] frame
- Reference to the video frame.
-
- \return 0 on success, -1 on failure.
- */
- static int32_t GetFrameStats(FrameStats* stats, const VideoFrame& frame);
-
- /**
- Checks the validity of a FrameStats struct. Currently, valid implies only
- that is had changed from its initialized state.
-
- \param[in] stats
- Frame statistics.
-
- \return True on valid stats, false on invalid stats.
- */
- static bool ValidFrameStats(const FrameStats& stats);
-
- /**
- Returns a FrameStats struct to its intialized state.
-
- \param[in,out] stats
- Frame statistics.
- */
- static void ClearFrameStats(FrameStats* stats);
-
- /**
- Increases/decreases the luminance value.
-
- \param[in,out] frame
- Pointer to the video frame.
-
- \param[in] delta
- The amount to change the chrominance value of every single pixel.
- Can be < 0 also.
-
- \return 0 on success, -1 on failure.
- */
- static int32_t Brighten(VideoFrame* frame, int delta);
-
- /**
- Detects and removes camera flicker from a video stream. Every frame from
- the stream must be passed in. A frame will only be altered if flicker has
- been detected. Has a fixed-point implementation.
-
- \param[in,out] frame
- Pointer to the video frame.
-
- \param[in,out] stats
- Frame statistics provided by GetFrameStats(). On return the stats will
- be reset to zero if the frame was altered. Call GetFrameStats() again
- if the statistics for the altered frame are required.
-
- \return 0 on success, -1 on failure.
- */
- virtual int32_t Deflickering(VideoFrame* frame, FrameStats* stats) = 0;
-
- /**
- Detects if a video frame is excessively bright or dark. Returns a
- warning if this is the case. Multiple frames should be passed in before
- expecting a warning. Has a floating-point implementation.
-
- \param[in] frame
- Pointer to the video frame.
-
- \param[in] stats
- Frame statistics provided by GetFrameStats().
-
- \return A member of BrightnessWarning on success, -1 on error
- */
- virtual int32_t BrightnessDetection(const VideoFrame& frame,
- const FrameStats& stats) = 0;
-
- /**
- The following functions refer to the pre-processor unit within VPM. The
- pre-processor perfoms spatial/temporal decimation and content analysis on
- the frames prior to encoding.
- */
-
- /**
- Enable/disable temporal decimation
-
- \param[in] enable when true, temporal decimation is enabled
- */
- virtual void EnableTemporalDecimation(bool enable) = 0;
-
- /**
- Set target resolution
-
- \param[in] width
- Target width
-
- \param[in] height
- Target height
-
- \param[in] frame_rate
- Target frame_rate
-
- \return VPM_OK on success, a negative value on error (see error codes)
-
- */
- virtual int32_t SetTargetResolution(uint32_t width,
- uint32_t height,
- uint32_t frame_rate) = 0;
-
- virtual void SetTargetFramerate(int frame_rate) {}
-
- /**
- Get decimated(target) frame rate
- */
- virtual uint32_t Decimatedframe_rate() = 0;
-
- /**
- Get decimated(target) frame width
- */
- virtual uint32_t DecimatedWidth() const = 0;
-
- /**
- Get decimated(target) frame height
- */
- virtual uint32_t DecimatedHeight() const = 0 ;
-
- /**
- Set the spatial resampling settings of the VPM: The resampler may either be
- disabled or one of the following:
- scaling to a close to target dimension followed by crop/pad
-
- \param[in] resampling_mode
- Set resampling mode (a member of VideoFrameResampling)
- */
- virtual void SetInputFrameResampleMode(VideoFrameResampling
- resampling_mode) = 0;
-
- /**
- Get Processed (decimated) frame
-
- \param[in] frame pointer to the video frame.
- \param[in] processed_frame pointer (double) to the processed frame. If no
- processing is required, processed_frame will be NULL.
-
- \return VPM_OK on success, a negative value on error (see error codes)
- */
- virtual int32_t PreprocessFrame(const VideoFrame& frame,
- VideoFrame** processed_frame) = 0;
-
- /**
- Return content metrics for the last processed frame
- */
- virtual VideoContentMetrics* ContentMetrics() const = 0 ;
-
- /**
- Enable content analysis
- */
- virtual void EnableContentAnalysis(bool enable) = 0;
-};
-
-} // namespace webrtc
-
-#endif // WEBRTC_MODULES_INTERFACE_VIDEO_PROCESSING_H
diff --git a/webrtc/modules/video_processing/main/interface/video_processing_defines.h b/webrtc/modules/video_processing/main/interface/video_processing_defines.h
deleted file mode 100644
index 93a0658966..0000000000
--- a/webrtc/modules/video_processing/main/interface/video_processing_defines.h
+++ /dev/null
@@ -1,41 +0,0 @@
-/*
- * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-/*
- * video_processing_defines.h
- * This header file includes the definitions used in the video processor module
- */
-
-#ifndef WEBRTC_MODULES_INTERFACE_VIDEO_PROCESSING_DEFINES_H
-#define WEBRTC_MODULES_INTERFACE_VIDEO_PROCESSING_DEFINES_H
-
-#include "webrtc/typedefs.h"
-
-namespace webrtc {
-
-// Error codes
-#define VPM_OK 0
-#define VPM_GENERAL_ERROR -1
-#define VPM_MEMORY -2
-#define VPM_PARAMETER_ERROR -3
-#define VPM_SCALE_ERROR -4
-#define VPM_UNINITIALIZED -5
-#define VPM_UNIMPLEMENTED -6
-
-enum VideoFrameResampling {
- kNoRescaling, // Disables rescaling.
- kFastRescaling, // Point filter.
- kBiLinear, // Bi-linear interpolation.
- kBox, // Box inteprolation.
-};
-
-} // namespace webrtc
-
-#endif // WEBRTC_MODULES_INTERFACE_VIDEO_PROCESSING_DEFINES_H
diff --git a/webrtc/modules/video_processing/main/source/OWNERS b/webrtc/modules/video_processing/main/source/OWNERS
deleted file mode 100644
index 3ee6b4bf5f..0000000000
--- a/webrtc/modules/video_processing/main/source/OWNERS
+++ /dev/null
@@ -1,5 +0,0 @@
-
-# These are for the common case of adding or renaming files. If you're doing
-# structural changes, please get a review from a reviewer in this file.
-per-file *.gyp=*
-per-file *.gypi=*
diff --git a/webrtc/modules/video_processing/main/source/brighten.cc b/webrtc/modules/video_processing/main/source/brighten.cc
deleted file mode 100644
index 1fe813e7b0..0000000000
--- a/webrtc/modules/video_processing/main/source/brighten.cc
+++ /dev/null
@@ -1,45 +0,0 @@
-/*
- * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include "webrtc/modules/video_processing/main/source/brighten.h"
-
-#include <stdlib.h>
-
-namespace webrtc {
-namespace VideoProcessing {
-
-int32_t Brighten(VideoFrame* frame, int delta) {
- assert(frame);
- if (frame->IsZeroSize()) {
- return VPM_PARAMETER_ERROR;
- }
- if (frame->width() <= 0 || frame->height() <= 0) {
- return VPM_PARAMETER_ERROR;
- }
-
- int num_pixels = frame->width() * frame->height();
-
- int look_up[256];
- for (int i = 0; i < 256; i++) {
- int val = i + delta;
- look_up[i] = ((((val < 0) ? 0 : val) > 255) ? 255 : val);
- }
-
- uint8_t* temp_ptr = frame->buffer(kYPlane);
-
- for (int i = 0; i < num_pixels; i++) {
- *temp_ptr = static_cast<uint8_t>(look_up[*temp_ptr]);
- temp_ptr++;
- }
- return VPM_OK;
-}
-
-} // namespace VideoProcessing
-} // namespace webrtc
diff --git a/webrtc/modules/video_processing/main/source/brighten.h b/webrtc/modules/video_processing/main/source/brighten.h
deleted file mode 100644
index 151d7a3b51..0000000000
--- a/webrtc/modules/video_processing/main/source/brighten.h
+++ /dev/null
@@ -1,25 +0,0 @@
-/*
- * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef MODULES_VIDEO_PROCESSING_MAIN_SOURCE_BRIGHTEN_H_
-#define MODULES_VIDEO_PROCESSING_MAIN_SOURCE_BRIGHTEN_H_
-
-#include "webrtc/modules/video_processing/main/interface/video_processing.h"
-#include "webrtc/typedefs.h"
-
-namespace webrtc {
-namespace VideoProcessing {
-
-int32_t Brighten(VideoFrame* frame, int delta);
-
-} // namespace VideoProcessing
-} // namespace webrtc
-
-#endif // MODULES_VIDEO_PROCESSING_MAIN_SOURCE_BRIGHTEN_H_
diff --git a/webrtc/modules/video_processing/main/source/brightness_detection.cc b/webrtc/modules/video_processing/main/source/brightness_detection.cc
deleted file mode 100644
index bae225b3b0..0000000000
--- a/webrtc/modules/video_processing/main/source/brightness_detection.cc
+++ /dev/null
@@ -1,133 +0,0 @@
-/*
- * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include "webrtc/modules/video_processing/main/interface/video_processing.h"
-#include "webrtc/modules/video_processing/main/source/brightness_detection.h"
-
-#include <math.h>
-
-namespace webrtc {
-
-VPMBrightnessDetection::VPMBrightnessDetection() {
- Reset();
-}
-
-VPMBrightnessDetection::~VPMBrightnessDetection() {}
-
-void VPMBrightnessDetection::Reset() {
- frame_cnt_bright_ = 0;
- frame_cnt_dark_ = 0;
-}
-
-int32_t VPMBrightnessDetection::ProcessFrame(
- const VideoFrame& frame,
- const VideoProcessingModule::FrameStats& stats) {
- if (frame.IsZeroSize()) {
- return VPM_PARAMETER_ERROR;
- }
- int width = frame.width();
- int height = frame.height();
-
- if (!VideoProcessingModule::ValidFrameStats(stats)) {
- return VPM_PARAMETER_ERROR;
- }
-
- const uint8_t frame_cnt_alarm = 2;
-
- // Get proportion in lowest bins.
- uint8_t low_th = 20;
- float prop_low = 0;
- for (uint32_t i = 0; i < low_th; i++) {
- prop_low += stats.hist[i];
- }
- prop_low /= stats.num_pixels;
-
- // Get proportion in highest bins.
- unsigned char high_th = 230;
- float prop_high = 0;
- for (uint32_t i = high_th; i < 256; i++) {
- prop_high += stats.hist[i];
- }
- prop_high /= stats.num_pixels;
-
- if (prop_high < 0.4) {
- if (stats.mean < 90 || stats.mean > 170) {
- // Standard deviation of Y
- const uint8_t* buffer = frame.buffer(kYPlane);
- float std_y = 0;
- for (int h = 0; h < height; h += (1 << stats.subSamplHeight)) {
- int row = h*width;
- for (int w = 0; w < width; w += (1 << stats.subSamplWidth)) {
- std_y += (buffer[w + row] - stats.mean) * (buffer[w + row] -
- stats.mean);
- }
- }
- std_y = sqrt(std_y / stats.num_pixels);
-
- // Get percentiles.
- uint32_t sum = 0;
- uint32_t median_y = 140;
- uint32_t perc05 = 0;
- uint32_t perc95 = 255;
- float pos_perc05 = stats.num_pixels * 0.05f;
- float pos_median = stats.num_pixels * 0.5f;
- float posPerc95 = stats.num_pixels * 0.95f;
- for (uint32_t i = 0; i < 256; i++) {
- sum += stats.hist[i];
- if (sum < pos_perc05) perc05 = i; // 5th perc.
- if (sum < pos_median) median_y = i; // 50th perc.
- if (sum < posPerc95)
- perc95 = i; // 95th perc.
- else
- break;
- }
-
- // Check if image is too dark
- if ((std_y < 55) && (perc05 < 50)) {
- if (median_y < 60 || stats.mean < 80 || perc95 < 130 ||
- prop_low > 0.20) {
- frame_cnt_dark_++;
- } else {
- frame_cnt_dark_ = 0;
- }
- } else {
- frame_cnt_dark_ = 0;
- }
-
- // Check if image is too bright
- if ((std_y < 52) && (perc95 > 200) && (median_y > 160)) {
- if (median_y > 185 || stats.mean > 185 || perc05 > 140 ||
- prop_high > 0.25) {
- frame_cnt_bright_++;
- } else {
- frame_cnt_bright_ = 0;
- }
- } else {
- frame_cnt_bright_ = 0;
- }
- } else {
- frame_cnt_dark_ = 0;
- frame_cnt_bright_ = 0;
- }
- } else {
- frame_cnt_bright_++;
- frame_cnt_dark_ = 0;
- }
-
- if (frame_cnt_dark_ > frame_cnt_alarm) {
- return VideoProcessingModule::kDarkWarning;
- } else if (frame_cnt_bright_ > frame_cnt_alarm) {
- return VideoProcessingModule::kBrightWarning;
- } else {
- return VideoProcessingModule::kNoWarning;
- }
-}
-
-} // namespace webrtc
diff --git a/webrtc/modules/video_processing/main/source/brightness_detection.h b/webrtc/modules/video_processing/main/source/brightness_detection.h
deleted file mode 100644
index 48532b4a20..0000000000
--- a/webrtc/modules/video_processing/main/source/brightness_detection.h
+++ /dev/null
@@ -1,37 +0,0 @@
-/*
- * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-/*
- * brightness_detection.h
- */
-#ifndef MODULES_VIDEO_PROCESSING_MAIN_SOURCE_BRIGHTNESS_DETECTION_H
-#define MODULES_VIDEO_PROCESSING_MAIN_SOURCE_BRIGHTNESS_DETECTION_H
-#include "webrtc/modules/video_processing/main/interface/video_processing.h"
-#include "webrtc/typedefs.h"
-
-namespace webrtc {
-
-class VPMBrightnessDetection {
- public:
- VPMBrightnessDetection();
- ~VPMBrightnessDetection();
-
- void Reset();
- int32_t ProcessFrame(const VideoFrame& frame,
- const VideoProcessingModule::FrameStats& stats);
-
- private:
- uint32_t frame_cnt_bright_;
- uint32_t frame_cnt_dark_;
-};
-
-} // namespace webrtc
-
-#endif // MODULES_VIDEO_PROCESSING_MAIN_SOURCE_BRIGHTNESS_DETECTION_H
diff --git a/webrtc/modules/video_processing/main/source/content_analysis.cc b/webrtc/modules/video_processing/main/source/content_analysis.cc
deleted file mode 100644
index d29db27408..0000000000
--- a/webrtc/modules/video_processing/main/source/content_analysis.cc
+++ /dev/null
@@ -1,274 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-#include "webrtc/modules/video_processing/main/source/content_analysis.h"
-
-#include <math.h>
-#include <stdlib.h>
-
-#include "webrtc/system_wrappers/include/cpu_features_wrapper.h"
-#include "webrtc/system_wrappers/include/tick_util.h"
-
-namespace webrtc {
-
-VPMContentAnalysis::VPMContentAnalysis(bool runtime_cpu_detection)
- : orig_frame_(NULL),
- prev_frame_(NULL),
- width_(0),
- height_(0),
- skip_num_(1),
- border_(8),
- motion_magnitude_(0.0f),
- spatial_pred_err_(0.0f),
- spatial_pred_err_h_(0.0f),
- spatial_pred_err_v_(0.0f),
- first_frame_(true),
- ca_Init_(false),
- content_metrics_(NULL) {
- ComputeSpatialMetrics = &VPMContentAnalysis::ComputeSpatialMetrics_C;
- TemporalDiffMetric = &VPMContentAnalysis::TemporalDiffMetric_C;
-
- if (runtime_cpu_detection) {
-#if defined(WEBRTC_ARCH_X86_FAMILY)
- if (WebRtc_GetCPUInfo(kSSE2)) {
- ComputeSpatialMetrics = &VPMContentAnalysis::ComputeSpatialMetrics_SSE2;
- TemporalDiffMetric = &VPMContentAnalysis::TemporalDiffMetric_SSE2;
- }
-#endif
- }
- Release();
-}
-
-VPMContentAnalysis::~VPMContentAnalysis() {
- Release();
-}
-
-VideoContentMetrics* VPMContentAnalysis::ComputeContentMetrics(
- const VideoFrame& inputFrame) {
- if (inputFrame.IsZeroSize())
- return NULL;
-
- // Init if needed (native dimension change).
- if (width_ != inputFrame.width() || height_ != inputFrame.height()) {
- if (VPM_OK != Initialize(inputFrame.width(), inputFrame.height()))
- return NULL;
- }
- // Only interested in the Y plane.
- orig_frame_ = inputFrame.buffer(kYPlane);
-
- // Compute spatial metrics: 3 spatial prediction errors.
- (this->*ComputeSpatialMetrics)();
-
- // Compute motion metrics
- if (first_frame_ == false)
- ComputeMotionMetrics();
-
- // Saving current frame as previous one: Y only.
- memcpy(prev_frame_, orig_frame_, width_ * height_);
-
- first_frame_ = false;
- ca_Init_ = true;
-
- return ContentMetrics();
-}
-
-int32_t VPMContentAnalysis::Release() {
- if (content_metrics_ != NULL) {
- delete content_metrics_;
- content_metrics_ = NULL;
- }
-
- if (prev_frame_ != NULL) {
- delete [] prev_frame_;
- prev_frame_ = NULL;
- }
-
- width_ = 0;
- height_ = 0;
- first_frame_ = true;
-
- return VPM_OK;
-}
-
-int32_t VPMContentAnalysis::Initialize(int width, int height) {
- width_ = width;
- height_ = height;
- first_frame_ = true;
-
- // skip parameter: # of skipped rows: for complexity reduction
- // temporal also currently uses it for column reduction.
- skip_num_ = 1;
-
- // use skipNum = 2 for 4CIF, WHD
- if ( (height_ >= 576) && (width_ >= 704) ) {
- skip_num_ = 2;
- }
- // use skipNum = 4 for FULLL_HD images
- if ( (height_ >= 1080) && (width_ >= 1920) ) {
- skip_num_ = 4;
- }
-
- if (content_metrics_ != NULL) {
- delete content_metrics_;
- }
-
- if (prev_frame_ != NULL) {
- delete [] prev_frame_;
- }
-
- // Spatial Metrics don't work on a border of 8. Minimum processing
- // block size is 16 pixels. So make sure the width and height support this.
- if (width_ <= 32 || height_ <= 32) {
- ca_Init_ = false;
- return VPM_PARAMETER_ERROR;
- }
-
- content_metrics_ = new VideoContentMetrics();
- if (content_metrics_ == NULL) {
- return VPM_MEMORY;
- }
-
- prev_frame_ = new uint8_t[width_ * height_]; // Y only.
- if (prev_frame_ == NULL) return VPM_MEMORY;
-
- return VPM_OK;
-}
-
-
-// Compute motion metrics: magnitude over non-zero motion vectors,
-// and size of zero cluster
-int32_t VPMContentAnalysis::ComputeMotionMetrics() {
- // Motion metrics: only one is derived from normalized
- // (MAD) temporal difference
- (this->*TemporalDiffMetric)();
- return VPM_OK;
-}
-
-// Normalized temporal difference (MAD): used as a motion level metric
-// Normalize MAD by spatial contrast: images with more contrast
-// (pixel variance) likely have larger temporal difference
-// To reduce complexity, we compute the metric for a reduced set of points.
-int32_t VPMContentAnalysis::TemporalDiffMetric_C() {
- // size of original frame
- int sizei = height_;
- int sizej = width_;
- uint32_t tempDiffSum = 0;
- uint32_t pixelSum = 0;
- uint64_t pixelSqSum = 0;
-
- uint32_t num_pixels = 0; // Counter for # of pixels.
- const int width_end = ((width_ - 2*border_) & -16) + border_;
-
- for (int i = border_; i < sizei - border_; i += skip_num_) {
- for (int j = border_; j < width_end; j++) {
- num_pixels += 1;
- int ssn = i * sizej + j;
-
- uint8_t currPixel = orig_frame_[ssn];
- uint8_t prevPixel = prev_frame_[ssn];
-
- tempDiffSum += (uint32_t)abs((int16_t)(currPixel - prevPixel));
- pixelSum += (uint32_t) currPixel;
- pixelSqSum += (uint64_t) (currPixel * currPixel);
- }
- }
-
- // Default.
- motion_magnitude_ = 0.0f;
-
- if (tempDiffSum == 0) return VPM_OK;
-
- // Normalize over all pixels.
- float const tempDiffAvg = (float)tempDiffSum / (float)(num_pixels);
- float const pixelSumAvg = (float)pixelSum / (float)(num_pixels);
- float const pixelSqSumAvg = (float)pixelSqSum / (float)(num_pixels);
- float contrast = pixelSqSumAvg - (pixelSumAvg * pixelSumAvg);
-
- if (contrast > 0.0) {
- contrast = sqrt(contrast);
- motion_magnitude_ = tempDiffAvg/contrast;
- }
- return VPM_OK;
-}
-
-// Compute spatial metrics:
-// To reduce complexity, we compute the metric for a reduced set of points.
-// The spatial metrics are rough estimates of the prediction error cost for
-// each QM spatial mode: 2x2,1x2,2x1
-// The metrics are a simple estimate of the up-sampling prediction error,
-// estimated assuming sub-sampling for decimation (no filtering),
-// and up-sampling back up with simple bilinear interpolation.
-int32_t VPMContentAnalysis::ComputeSpatialMetrics_C() {
- const int sizei = height_;
- const int sizej = width_;
-
- // Pixel mean square average: used to normalize the spatial metrics.
- uint32_t pixelMSA = 0;
-
- uint32_t spatialErrSum = 0;
- uint32_t spatialErrVSum = 0;
- uint32_t spatialErrHSum = 0;
-
- // make sure work section is a multiple of 16
- const int width_end = ((sizej - 2*border_) & -16) + border_;
-
- for (int i = border_; i < sizei - border_; i += skip_num_) {
- for (int j = border_; j < width_end; j++) {
- int ssn1= i * sizej + j;
- int ssn2 = (i + 1) * sizej + j; // bottom
- int ssn3 = (i - 1) * sizej + j; // top
- int ssn4 = i * sizej + j + 1; // right
- int ssn5 = i * sizej + j - 1; // left
-
- uint16_t refPixel1 = orig_frame_[ssn1] << 1;
- uint16_t refPixel2 = orig_frame_[ssn1] << 2;
-
- uint8_t bottPixel = orig_frame_[ssn2];
- uint8_t topPixel = orig_frame_[ssn3];
- uint8_t rightPixel = orig_frame_[ssn4];
- uint8_t leftPixel = orig_frame_[ssn5];
-
- spatialErrSum += (uint32_t) abs((int16_t)(refPixel2
- - (uint16_t)(bottPixel + topPixel + leftPixel + rightPixel)));
- spatialErrVSum += (uint32_t) abs((int16_t)(refPixel1
- - (uint16_t)(bottPixel + topPixel)));
- spatialErrHSum += (uint32_t) abs((int16_t)(refPixel1
- - (uint16_t)(leftPixel + rightPixel)));
- pixelMSA += orig_frame_[ssn1];
- }
- }
-
- // Normalize over all pixels.
- const float spatialErr = (float)(spatialErrSum >> 2);
- const float spatialErrH = (float)(spatialErrHSum >> 1);
- const float spatialErrV = (float)(spatialErrVSum >> 1);
- const float norm = (float)pixelMSA;
-
- // 2X2:
- spatial_pred_err_ = spatialErr / norm;
- // 1X2:
- spatial_pred_err_h_ = spatialErrH / norm;
- // 2X1:
- spatial_pred_err_v_ = spatialErrV / norm;
- return VPM_OK;
-}
-
-VideoContentMetrics* VPMContentAnalysis::ContentMetrics() {
- if (ca_Init_ == false) return NULL;
-
- content_metrics_->spatial_pred_err = spatial_pred_err_;
- content_metrics_->spatial_pred_err_h = spatial_pred_err_h_;
- content_metrics_->spatial_pred_err_v = spatial_pred_err_v_;
- // Motion metric: normalized temporal difference (MAD).
- content_metrics_->motion_magnitude = motion_magnitude_;
-
- return content_metrics_;
-}
-
-} // namespace webrtc
diff --git a/webrtc/modules/video_processing/main/source/content_analysis.h b/webrtc/modules/video_processing/main/source/content_analysis.h
deleted file mode 100644
index 510c1b4a55..0000000000
--- a/webrtc/modules/video_processing/main/source/content_analysis.h
+++ /dev/null
@@ -1,87 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_MODULES_VIDEO_PROCESSING_MAIN_SOURCE_CONTENT_ANALYSIS_H
-#define WEBRTC_MODULES_VIDEO_PROCESSING_MAIN_SOURCE_CONTENT_ANALYSIS_H
-
-#include "webrtc/modules/interface/module_common_types.h"
-#include "webrtc/modules/video_processing/main/interface/video_processing_defines.h"
-#include "webrtc/typedefs.h"
-#include "webrtc/video_frame.h"
-
-namespace webrtc {
-
-class VPMContentAnalysis {
- public:
- // When |runtime_cpu_detection| is true, runtime selection of an optimized
- // code path is allowed.
- explicit VPMContentAnalysis(bool runtime_cpu_detection);
- ~VPMContentAnalysis();
-
- // Initialize ContentAnalysis - should be called prior to
- // extractContentFeature
- // Inputs: width, height
- // Return value: 0 if OK, negative value upon error
- int32_t Initialize(int width, int height);
-
- // Extract content Feature - main function of ContentAnalysis
- // Input: new frame
- // Return value: pointer to structure containing content Analysis
- // metrics or NULL value upon error
- VideoContentMetrics* ComputeContentMetrics(const VideoFrame& inputFrame);
-
- // Release all allocated memory
- // Output: 0 if OK, negative value upon error
- int32_t Release();
-
- private:
- // return motion metrics
- VideoContentMetrics* ContentMetrics();
-
- // Normalized temporal difference metric: for motion magnitude
- typedef int32_t (VPMContentAnalysis::*TemporalDiffMetricFunc)();
- TemporalDiffMetricFunc TemporalDiffMetric;
- int32_t TemporalDiffMetric_C();
-
- // Motion metric method: call 2 metrics (magnitude and size)
- int32_t ComputeMotionMetrics();
-
- // Spatial metric method: computes the 3 frame-average spatial
- // prediction errors (1x2,2x1,2x2)
- typedef int32_t (VPMContentAnalysis::*ComputeSpatialMetricsFunc)();
- ComputeSpatialMetricsFunc ComputeSpatialMetrics;
- int32_t ComputeSpatialMetrics_C();
-
-#if defined(WEBRTC_ARCH_X86_FAMILY)
- int32_t ComputeSpatialMetrics_SSE2();
- int32_t TemporalDiffMetric_SSE2();
-#endif
-
- const uint8_t* orig_frame_;
- uint8_t* prev_frame_;
- int width_;
- int height_;
- int skip_num_;
- int border_;
-
- // Content Metrics: Stores the local average of the metrics.
- float motion_magnitude_; // motion class
- float spatial_pred_err_; // spatial class
- float spatial_pred_err_h_; // spatial class
- float spatial_pred_err_v_; // spatial class
- bool first_frame_;
- bool ca_Init_;
-
- VideoContentMetrics* content_metrics_;
-};
-
-} // namespace webrtc
-
-#endif // WEBRTC_MODULES_VIDEO_PROCESSING_MAIN_SOURCE_CONTENT_ANALYSIS_H
diff --git a/webrtc/modules/video_processing/main/source/content_analysis_sse2.cc b/webrtc/modules/video_processing/main/source/content_analysis_sse2.cc
deleted file mode 100644
index 17b64ff280..0000000000
--- a/webrtc/modules/video_processing/main/source/content_analysis_sse2.cc
+++ /dev/null
@@ -1,264 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include "webrtc/modules/video_processing/main/source/content_analysis.h"
-
-#include <emmintrin.h>
-#include <math.h>
-
-namespace webrtc {
-
-int32_t VPMContentAnalysis::TemporalDiffMetric_SSE2() {
- uint32_t num_pixels = 0; // counter for # of pixels
- const uint8_t* imgBufO = orig_frame_ + border_*width_ + border_;
- const uint8_t* imgBufP = prev_frame_ + border_*width_ + border_;
-
- const int32_t width_end = ((width_ - 2*border_) & -16) + border_;
-
- __m128i sad_64 = _mm_setzero_si128();
- __m128i sum_64 = _mm_setzero_si128();
- __m128i sqsum_64 = _mm_setzero_si128();
- const __m128i z = _mm_setzero_si128();
-
- for (uint16_t i = 0; i < (height_ - 2*border_); i += skip_num_) {
- __m128i sqsum_32 = _mm_setzero_si128();
-
- const uint8_t *lineO = imgBufO;
- const uint8_t *lineP = imgBufP;
-
- // Work on 16 pixels at a time. For HD content with a width of 1920
- // this loop will run ~67 times (depending on border). Maximum for
- // abs(o-p) and sum(o) will be 255. _mm_sad_epu8 produces 2 64 bit
- // results which are then accumulated. There is no chance of
- // rollover for these two accumulators.
- // o*o will have a maximum of 255*255 = 65025. This will roll over
- // a 16 bit accumulator as 67*65025 > 65535, but will fit in a
- // 32 bit accumulator.
- for (uint16_t j = 0; j < width_end - border_; j += 16) {
- const __m128i o = _mm_loadu_si128((__m128i*)(lineO));
- const __m128i p = _mm_loadu_si128((__m128i*)(lineP));
-
- lineO += 16;
- lineP += 16;
-
- // Abs pixel difference between frames.
- sad_64 = _mm_add_epi64 (sad_64, _mm_sad_epu8(o, p));
-
- // sum of all pixels in frame
- sum_64 = _mm_add_epi64 (sum_64, _mm_sad_epu8(o, z));
-
- // Squared sum of all pixels in frame.
- const __m128i olo = _mm_unpacklo_epi8(o,z);
- const __m128i ohi = _mm_unpackhi_epi8(o,z);
-
- const __m128i sqsum_32_lo = _mm_madd_epi16(olo, olo);
- const __m128i sqsum_32_hi = _mm_madd_epi16(ohi, ohi);
-
- sqsum_32 = _mm_add_epi32(sqsum_32, sqsum_32_lo);
- sqsum_32 = _mm_add_epi32(sqsum_32, sqsum_32_hi);
- }
-
- // Add to 64 bit running sum as to not roll over.
- sqsum_64 = _mm_add_epi64(sqsum_64,
- _mm_add_epi64(_mm_unpackhi_epi32(sqsum_32,z),
- _mm_unpacklo_epi32(sqsum_32,z)));
-
- imgBufO += width_ * skip_num_;
- imgBufP += width_ * skip_num_;
- num_pixels += (width_end - border_);
- }
-
- __m128i sad_final_128;
- __m128i sum_final_128;
- __m128i sqsum_final_128;
-
- // Bring sums out of vector registers and into integer register
- // domain, summing them along the way.
- _mm_store_si128 (&sad_final_128, sad_64);
- _mm_store_si128 (&sum_final_128, sum_64);
- _mm_store_si128 (&sqsum_final_128, sqsum_64);
-
- uint64_t *sad_final_64 = reinterpret_cast<uint64_t*>(&sad_final_128);
- uint64_t *sum_final_64 = reinterpret_cast<uint64_t*>(&sum_final_128);
- uint64_t *sqsum_final_64 = reinterpret_cast<uint64_t*>(&sqsum_final_128);
-
- const uint32_t pixelSum = sum_final_64[0] + sum_final_64[1];
- const uint64_t pixelSqSum = sqsum_final_64[0] + sqsum_final_64[1];
- const uint32_t tempDiffSum = sad_final_64[0] + sad_final_64[1];
-
- // Default.
- motion_magnitude_ = 0.0f;
-
- if (tempDiffSum == 0) return VPM_OK;
-
- // Normalize over all pixels.
- const float tempDiffAvg = (float)tempDiffSum / (float)(num_pixels);
- const float pixelSumAvg = (float)pixelSum / (float)(num_pixels);
- const float pixelSqSumAvg = (float)pixelSqSum / (float)(num_pixels);
- float contrast = pixelSqSumAvg - (pixelSumAvg * pixelSumAvg);
-
- if (contrast > 0.0) {
- contrast = sqrt(contrast);
- motion_magnitude_ = tempDiffAvg/contrast;
- }
-
- return VPM_OK;
-}
-
-int32_t VPMContentAnalysis::ComputeSpatialMetrics_SSE2() {
- const uint8_t* imgBuf = orig_frame_ + border_*width_;
- const int32_t width_end = ((width_ - 2 * border_) & -16) + border_;
-
- __m128i se_32 = _mm_setzero_si128();
- __m128i sev_32 = _mm_setzero_si128();
- __m128i seh_32 = _mm_setzero_si128();
- __m128i msa_32 = _mm_setzero_si128();
- const __m128i z = _mm_setzero_si128();
-
- // Error is accumulated as a 32 bit value. Looking at HD content with a
- // height of 1080 lines, or about 67 macro blocks. If the 16 bit row
- // value is maxed out at 65529 for every row, 65529*1080 = 70777800, which
- // will not roll over a 32 bit accumulator.
- // skip_num_ is also used to reduce the number of rows
- for (int32_t i = 0; i < (height_ - 2*border_); i += skip_num_) {
- __m128i se_16 = _mm_setzero_si128();
- __m128i sev_16 = _mm_setzero_si128();
- __m128i seh_16 = _mm_setzero_si128();
- __m128i msa_16 = _mm_setzero_si128();
-
- // Row error is accumulated as a 16 bit value. There are 8
- // accumulators. Max value of a 16 bit number is 65529. Looking
- // at HD content, 1080p, has a width of 1920, 120 macro blocks.
- // A mb at a time is processed at a time. Absolute max error at
- // a point would be abs(0-255+255+255+255) which equals 1020.
- // 120*1020 = 122400. The probability of hitting this is quite low
- // on well behaved content. A specially crafted image could roll over.
- // border_ could also be adjusted to concentrate on just the center of
- // the images for an HD capture in order to reduce the possiblity of
- // rollover.
- const uint8_t *lineTop = imgBuf - width_ + border_;
- const uint8_t *lineCen = imgBuf + border_;
- const uint8_t *lineBot = imgBuf + width_ + border_;
-
- for (int32_t j = 0; j < width_end - border_; j += 16) {
- const __m128i t = _mm_loadu_si128((__m128i*)(lineTop));
- const __m128i l = _mm_loadu_si128((__m128i*)(lineCen - 1));
- const __m128i c = _mm_loadu_si128((__m128i*)(lineCen));
- const __m128i r = _mm_loadu_si128((__m128i*)(lineCen + 1));
- const __m128i b = _mm_loadu_si128((__m128i*)(lineBot));
-
- lineTop += 16;
- lineCen += 16;
- lineBot += 16;
-
- // center pixel unpacked
- __m128i clo = _mm_unpacklo_epi8(c,z);
- __m128i chi = _mm_unpackhi_epi8(c,z);
-
- // left right pixels unpacked and added together
- const __m128i lrlo = _mm_add_epi16(_mm_unpacklo_epi8(l,z),
- _mm_unpacklo_epi8(r,z));
- const __m128i lrhi = _mm_add_epi16(_mm_unpackhi_epi8(l,z),
- _mm_unpackhi_epi8(r,z));
-
- // top & bottom pixels unpacked and added together
- const __m128i tblo = _mm_add_epi16(_mm_unpacklo_epi8(t,z),
- _mm_unpacklo_epi8(b,z));
- const __m128i tbhi = _mm_add_epi16(_mm_unpackhi_epi8(t,z),
- _mm_unpackhi_epi8(b,z));
-
- // running sum of all pixels
- msa_16 = _mm_add_epi16(msa_16, _mm_add_epi16(chi, clo));
-
- clo = _mm_slli_epi16(clo, 1);
- chi = _mm_slli_epi16(chi, 1);
- const __m128i sevtlo = _mm_subs_epi16(clo, tblo);
- const __m128i sevthi = _mm_subs_epi16(chi, tbhi);
- const __m128i sehtlo = _mm_subs_epi16(clo, lrlo);
- const __m128i sehthi = _mm_subs_epi16(chi, lrhi);
-
- clo = _mm_slli_epi16(clo, 1);
- chi = _mm_slli_epi16(chi, 1);
- const __m128i setlo = _mm_subs_epi16(clo, _mm_add_epi16(lrlo, tblo));
- const __m128i sethi = _mm_subs_epi16(chi, _mm_add_epi16(lrhi, tbhi));
-
- // Add to 16 bit running sum
- se_16 = _mm_add_epi16(se_16, _mm_max_epi16(setlo,
- _mm_subs_epi16(z, setlo)));
- se_16 = _mm_add_epi16(se_16, _mm_max_epi16(sethi,
- _mm_subs_epi16(z, sethi)));
- sev_16 = _mm_add_epi16(sev_16, _mm_max_epi16(sevtlo,
- _mm_subs_epi16(z, sevtlo)));
- sev_16 = _mm_add_epi16(sev_16, _mm_max_epi16(sevthi,
- _mm_subs_epi16(z, sevthi)));
- seh_16 = _mm_add_epi16(seh_16, _mm_max_epi16(sehtlo,
- _mm_subs_epi16(z, sehtlo)));
- seh_16 = _mm_add_epi16(seh_16, _mm_max_epi16(sehthi,
- _mm_subs_epi16(z, sehthi)));
- }
-
- // Add to 32 bit running sum as to not roll over.
- se_32 = _mm_add_epi32(se_32, _mm_add_epi32(_mm_unpackhi_epi16(se_16,z),
- _mm_unpacklo_epi16(se_16,z)));
- sev_32 = _mm_add_epi32(sev_32, _mm_add_epi32(_mm_unpackhi_epi16(sev_16,z),
- _mm_unpacklo_epi16(sev_16,z)));
- seh_32 = _mm_add_epi32(seh_32, _mm_add_epi32(_mm_unpackhi_epi16(seh_16,z),
- _mm_unpacklo_epi16(seh_16,z)));
- msa_32 = _mm_add_epi32(msa_32, _mm_add_epi32(_mm_unpackhi_epi16(msa_16,z),
- _mm_unpacklo_epi16(msa_16,z)));
-
- imgBuf += width_ * skip_num_;
- }
-
- __m128i se_128;
- __m128i sev_128;
- __m128i seh_128;
- __m128i msa_128;
-
- // Bring sums out of vector registers and into integer register
- // domain, summing them along the way.
- _mm_store_si128 (&se_128, _mm_add_epi64(_mm_unpackhi_epi32(se_32,z),
- _mm_unpacklo_epi32(se_32,z)));
- _mm_store_si128 (&sev_128, _mm_add_epi64(_mm_unpackhi_epi32(sev_32,z),
- _mm_unpacklo_epi32(sev_32,z)));
- _mm_store_si128 (&seh_128, _mm_add_epi64(_mm_unpackhi_epi32(seh_32,z),
- _mm_unpacklo_epi32(seh_32,z)));
- _mm_store_si128 (&msa_128, _mm_add_epi64(_mm_unpackhi_epi32(msa_32,z),
- _mm_unpacklo_epi32(msa_32,z)));
-
- uint64_t *se_64 = reinterpret_cast<uint64_t*>(&se_128);
- uint64_t *sev_64 = reinterpret_cast<uint64_t*>(&sev_128);
- uint64_t *seh_64 = reinterpret_cast<uint64_t*>(&seh_128);
- uint64_t *msa_64 = reinterpret_cast<uint64_t*>(&msa_128);
-
- const uint32_t spatialErrSum = se_64[0] + se_64[1];
- const uint32_t spatialErrVSum = sev_64[0] + sev_64[1];
- const uint32_t spatialErrHSum = seh_64[0] + seh_64[1];
- const uint32_t pixelMSA = msa_64[0] + msa_64[1];
-
- // Normalize over all pixels.
- const float spatialErr = (float)(spatialErrSum >> 2);
- const float spatialErrH = (float)(spatialErrHSum >> 1);
- const float spatialErrV = (float)(spatialErrVSum >> 1);
- const float norm = (float)pixelMSA;
-
- // 2X2:
- spatial_pred_err_ = spatialErr / norm;
-
- // 1X2:
- spatial_pred_err_h_ = spatialErrH / norm;
-
- // 2X1:
- spatial_pred_err_v_ = spatialErrV / norm;
-
- return VPM_OK;
-}
-
-} // namespace webrtc
diff --git a/webrtc/modules/video_processing/main/source/deflickering.cc b/webrtc/modules/video_processing/main/source/deflickering.cc
deleted file mode 100644
index 19bc641ac9..0000000000
--- a/webrtc/modules/video_processing/main/source/deflickering.cc
+++ /dev/null
@@ -1,398 +0,0 @@
-/*
- * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include "webrtc/modules/video_processing/main/source/deflickering.h"
-
-#include <math.h>
-#include <stdlib.h>
-
-#include "webrtc/common_audio/signal_processing/include/signal_processing_library.h"
-#include "webrtc/system_wrappers/include/logging.h"
-#include "webrtc/system_wrappers/include/sort.h"
-
-namespace webrtc {
-
-// Detection constants
-// (Q4) Maximum allowed deviation for detection.
-enum { kFrequencyDeviation = 39 };
-// (Q4) Minimum frequency that can be detected.
-enum { kMinFrequencyToDetect = 32 };
-// Number of flickers before we accept detection
-enum { kNumFlickerBeforeDetect = 2 };
-enum { kmean_valueScaling = 4 }; // (Q4) In power of 2
-// Dead-zone region in terms of pixel values
-enum { kZeroCrossingDeadzone = 10 };
-// Deflickering constants.
-// Compute the quantiles over 1 / DownsamplingFactor of the image.
-enum { kDownsamplingFactor = 8 };
-enum { kLog2OfDownsamplingFactor = 3 };
-
-// To generate in Matlab:
-// >> probUW16 = round(2^11 *
-// [0.05,0.1,0.2,0.3,0.4,0.5,0.6,0.7,0.8,0.9,0.95,0.97]);
-// >> fprintf('%d, ', probUW16)
-// Resolution reduced to avoid overflow when multiplying with the
-// (potentially) large number of pixels.
-const uint16_t VPMDeflickering::prob_uw16_[kNumProbs] = {102, 205, 410, 614,
- 819, 1024, 1229, 1434, 1638, 1843, 1946, 1987}; // <Q11>
-
-// To generate in Matlab:
-// >> numQuants = 14; maxOnlyLength = 5;
-// >> weightUW16 = round(2^15 *
-// [linspace(0.5, 1.0, numQuants - maxOnlyLength)]);
-// >> fprintf('%d, %d,\n ', weightUW16);
-const uint16_t VPMDeflickering::weight_uw16_[kNumQuants - kMaxOnlyLength] =
- {16384, 18432, 20480, 22528, 24576, 26624, 28672, 30720, 32768}; // <Q15>
-
-VPMDeflickering::VPMDeflickering() {
- Reset();
-}
-
-VPMDeflickering::~VPMDeflickering() {}
-
-void VPMDeflickering::Reset() {
- mean_buffer_length_ = 0;
- detection_state_ = 0;
- frame_rate_ = 0;
-
- memset(mean_buffer_, 0, sizeof(int32_t) * kMeanBufferLength);
- memset(timestamp_buffer_, 0, sizeof(int32_t) * kMeanBufferLength);
-
- // Initialize the history with a uniformly distributed histogram.
- quant_hist_uw8_[0][0] = 0;
- quant_hist_uw8_[0][kNumQuants - 1] = 255;
- for (int32_t i = 0; i < kNumProbs; i++) {
- // Unsigned round. <Q0>
- quant_hist_uw8_[0][i + 1] = static_cast<uint8_t>(
- (prob_uw16_[i] * 255 + (1 << 10)) >> 11);
- }
-
- for (int32_t i = 1; i < kFrameHistory_size; i++) {
- memcpy(quant_hist_uw8_[i], quant_hist_uw8_[0],
- sizeof(uint8_t) * kNumQuants);
- }
-}
-
-int32_t VPMDeflickering::ProcessFrame(
- VideoFrame* frame,
- VideoProcessingModule::FrameStats* stats) {
- assert(frame);
- uint32_t frame_memory;
- uint8_t quant_uw8[kNumQuants];
- uint8_t maxquant_uw8[kNumQuants];
- uint8_t minquant_uw8[kNumQuants];
- uint16_t target_quant_uw16[kNumQuants];
- uint16_t increment_uw16;
- uint8_t map_uw8[256];
-
- uint16_t tmp_uw16;
- uint32_t tmp_uw32;
- int width = frame->width();
- int height = frame->height();
-
- if (frame->IsZeroSize()) {
- return VPM_GENERAL_ERROR;
- }
-
- // Stricter height check due to subsampling size calculation below.
- if (height < 2) {
- LOG(LS_ERROR) << "Invalid frame size.";
- return VPM_GENERAL_ERROR;
- }
-
- if (!VideoProcessingModule::ValidFrameStats(*stats)) {
- return VPM_GENERAL_ERROR;
- }
-
- if (PreDetection(frame->timestamp(), *stats) == -1) return VPM_GENERAL_ERROR;
-
- // Flicker detection
- int32_t det_flicker = DetectFlicker();
- if (det_flicker < 0) {
- return VPM_GENERAL_ERROR;
- } else if (det_flicker != 1) {
- return 0;
- }
-
- // Size of luminance component.
- const uint32_t y_size = height * width;
-
- const uint32_t y_sub_size = width * (((height - 1) >>
- kLog2OfDownsamplingFactor) + 1);
- uint8_t* y_sorted = new uint8_t[y_sub_size];
- uint32_t sort_row_idx = 0;
- for (int i = 0; i < height; i += kDownsamplingFactor) {
- memcpy(y_sorted + sort_row_idx * width,
- frame->buffer(kYPlane) + i * width, width);
- sort_row_idx++;
- }
-
- webrtc::Sort(y_sorted, y_sub_size, webrtc::TYPE_UWord8);
-
- uint32_t prob_idx_uw32 = 0;
- quant_uw8[0] = 0;
- quant_uw8[kNumQuants - 1] = 255;
-
- // Ensure we won't get an overflow below.
- // In practice, the number of subsampled pixels will not become this large.
- if (y_sub_size > (1 << 21) - 1) {
- LOG(LS_ERROR) << "Subsampled number of pixels too large.";
- return -1;
- }
-
- for (int32_t i = 0; i < kNumProbs; i++) {
- // <Q0>.
- prob_idx_uw32 = WEBRTC_SPL_UMUL_32_16(y_sub_size, prob_uw16_[i]) >> 11;
- quant_uw8[i + 1] = y_sorted[prob_idx_uw32];
- }
-
- delete [] y_sorted;
- y_sorted = NULL;
-
- // Shift history for new frame.
- memmove(quant_hist_uw8_[1], quant_hist_uw8_[0],
- (kFrameHistory_size - 1) * kNumQuants * sizeof(uint8_t));
- // Store current frame in history.
- memcpy(quant_hist_uw8_[0], quant_uw8, kNumQuants * sizeof(uint8_t));
-
- // We use a frame memory equal to the ceiling of half the frame rate to
- // ensure we capture an entire period of flicker.
- frame_memory = (frame_rate_ + (1 << 5)) >> 5; // Unsigned ceiling. <Q0>
- // frame_rate_ in Q4.
- if (frame_memory > kFrameHistory_size) {
- frame_memory = kFrameHistory_size;
- }
-
- // Get maximum and minimum.
- for (int32_t i = 0; i < kNumQuants; i++) {
- maxquant_uw8[i] = 0;
- minquant_uw8[i] = 255;
- for (uint32_t j = 0; j < frame_memory; j++) {
- if (quant_hist_uw8_[j][i] > maxquant_uw8[i]) {
- maxquant_uw8[i] = quant_hist_uw8_[j][i];
- }
-
- if (quant_hist_uw8_[j][i] < minquant_uw8[i]) {
- minquant_uw8[i] = quant_hist_uw8_[j][i];
- }
- }
- }
-
- // Get target quantiles.
- for (int32_t i = 0; i < kNumQuants - kMaxOnlyLength; i++) {
- // target = w * maxquant_uw8 + (1 - w) * minquant_uw8
- // Weights w = |weight_uw16_| are in Q15, hence the final output has to be
- // right shifted by 8 to end up in Q7.
- target_quant_uw16[i] = static_cast<uint16_t>((
- weight_uw16_[i] * maxquant_uw8[i] +
- ((1 << 15) - weight_uw16_[i]) * minquant_uw8[i]) >> 8); // <Q7>
- }
-
- for (int32_t i = kNumQuants - kMaxOnlyLength; i < kNumQuants; i++) {
- target_quant_uw16[i] = ((uint16_t)maxquant_uw8[i]) << 7;
- }
-
- // Compute the map from input to output pixels.
- uint16_t mapUW16; // <Q7>
- for (int32_t i = 1; i < kNumQuants; i++) {
- // As quant and targetQuant are limited to UWord8, it's safe to use Q7 here.
- tmp_uw32 = static_cast<uint32_t>(target_quant_uw16[i] -
- target_quant_uw16[i - 1]);
- tmp_uw16 = static_cast<uint16_t>(quant_uw8[i] - quant_uw8[i - 1]); // <Q0>
-
- if (tmp_uw16 > 0) {
- increment_uw16 = static_cast<uint16_t>(WebRtcSpl_DivU32U16(tmp_uw32,
- tmp_uw16)); // <Q7>
- } else {
- // The value is irrelevant; the loop below will only iterate once.
- increment_uw16 = 0;
- }
-
- mapUW16 = target_quant_uw16[i - 1];
- for (uint32_t j = quant_uw8[i - 1]; j < (uint32_t)(quant_uw8[i] + 1); j++) {
- // Unsigned round. <Q0>
- map_uw8[j] = (uint8_t)((mapUW16 + (1 << 6)) >> 7);
- mapUW16 += increment_uw16;
- }
- }
-
- // Map to the output frame.
- uint8_t* buffer = frame->buffer(kYPlane);
- for (uint32_t i = 0; i < y_size; i++) {
- buffer[i] = map_uw8[buffer[i]];
- }
-
- // Frame was altered, so reset stats.
- VideoProcessingModule::ClearFrameStats(stats);
-
- return VPM_OK;
-}
-
-/**
- Performs some pre-detection operations. Must be called before
- DetectFlicker().
-
- \param[in] timestamp Timestamp of the current frame.
- \param[in] stats Statistics of the current frame.
-
- \return 0: Success\n
- 2: Detection not possible due to flickering frequency too close to
- zero.\n
- -1: Error
-*/
-int32_t VPMDeflickering::PreDetection(const uint32_t timestamp,
- const VideoProcessingModule::FrameStats& stats) {
- int32_t mean_val; // Mean value of frame (Q4)
- uint32_t frame_rate = 0;
- int32_t meanBufferLength; // Temp variable.
-
- mean_val = ((stats.sum << kmean_valueScaling) / stats.num_pixels);
- // Update mean value buffer.
- // This should be done even though we might end up in an unreliable detection.
- memmove(mean_buffer_ + 1, mean_buffer_,
- (kMeanBufferLength - 1) * sizeof(int32_t));
- mean_buffer_[0] = mean_val;
-
- // Update timestamp buffer.
- // This should be done even though we might end up in an unreliable detection.
- memmove(timestamp_buffer_ + 1, timestamp_buffer_, (kMeanBufferLength - 1) *
- sizeof(uint32_t));
- timestamp_buffer_[0] = timestamp;
-
-/* Compute current frame rate (Q4) */
- if (timestamp_buffer_[kMeanBufferLength - 1] != 0) {
- frame_rate = ((90000 << 4) * (kMeanBufferLength - 1));
- frame_rate /=
- (timestamp_buffer_[0] - timestamp_buffer_[kMeanBufferLength - 1]);
- } else if (timestamp_buffer_[1] != 0) {
- frame_rate = (90000 << 4) / (timestamp_buffer_[0] - timestamp_buffer_[1]);
- }
-
- /* Determine required size of mean value buffer (mean_buffer_length_) */
- if (frame_rate == 0) {
- meanBufferLength = 1;
- } else {
- meanBufferLength =
- (kNumFlickerBeforeDetect * frame_rate) / kMinFrequencyToDetect;
- }
- /* Sanity check of buffer length */
- if (meanBufferLength >= kMeanBufferLength) {
- /* Too long buffer. The flickering frequency is too close to zero, which
- * makes the estimation unreliable.
- */
- mean_buffer_length_ = 0;
- return 2;
- }
- mean_buffer_length_ = meanBufferLength;
-
- if ((timestamp_buffer_[mean_buffer_length_ - 1] != 0) &&
- (mean_buffer_length_ != 1)) {
- frame_rate = ((90000 << 4) * (mean_buffer_length_ - 1));
- frame_rate /=
- (timestamp_buffer_[0] - timestamp_buffer_[mean_buffer_length_ - 1]);
- } else if (timestamp_buffer_[1] != 0) {
- frame_rate = (90000 << 4) / (timestamp_buffer_[0] - timestamp_buffer_[1]);
- }
- frame_rate_ = frame_rate;
-
- return VPM_OK;
-}
-
-/**
- This function detects flicker in the video stream. As a side effect the
- mean value buffer is updated with the new mean value.
-
- \return 0: No flickering detected\n
- 1: Flickering detected\n
- 2: Detection not possible due to unreliable frequency interval
- -1: Error
-*/
-int32_t VPMDeflickering::DetectFlicker() {
- uint32_t i;
- int32_t freqEst; // (Q4) Frequency estimate to base detection upon
- int32_t ret_val = -1;
-
- /* Sanity check for mean_buffer_length_ */
- if (mean_buffer_length_ < 2) {
- /* Not possible to estimate frequency */
- return(2);
- }
- // Count zero crossings with a dead zone to be robust against noise. If the
- // noise std is 2 pixel this corresponds to about 95% confidence interval.
- int32_t deadzone = (kZeroCrossingDeadzone << kmean_valueScaling); // Q4
- int32_t meanOfBuffer = 0; // Mean value of mean value buffer.
- int32_t numZeros = 0; // Number of zeros that cross the dead-zone.
- int32_t cntState = 0; // State variable for zero crossing regions.
- int32_t cntStateOld = 0; // Previous state for zero crossing regions.
-
- for (i = 0; i < mean_buffer_length_; i++) {
- meanOfBuffer += mean_buffer_[i];
- }
- meanOfBuffer += (mean_buffer_length_ >> 1); // Rounding, not truncation.
- meanOfBuffer /= mean_buffer_length_;
-
- // Count zero crossings.
- cntStateOld = (mean_buffer_[0] >= (meanOfBuffer + deadzone));
- cntStateOld -= (mean_buffer_[0] <= (meanOfBuffer - deadzone));
- for (i = 1; i < mean_buffer_length_; i++) {
- cntState = (mean_buffer_[i] >= (meanOfBuffer + deadzone));
- cntState -= (mean_buffer_[i] <= (meanOfBuffer - deadzone));
- if (cntStateOld == 0) {
- cntStateOld = -cntState;
- }
- if (((cntState + cntStateOld) == 0) && (cntState != 0)) {
- numZeros++;
- cntStateOld = cntState;
- }
- }
- // END count zero crossings.
-
- /* Frequency estimation according to:
- * freqEst = numZeros * frame_rate / 2 / mean_buffer_length_;
- *
- * Resolution is set to Q4
- */
- freqEst = ((numZeros * 90000) << 3);
- freqEst /=
- (timestamp_buffer_[0] - timestamp_buffer_[mean_buffer_length_ - 1]);
-
- /* Translate frequency estimate to regions close to 100 and 120 Hz */
- uint8_t freqState = 0; // Current translation state;
- // (0) Not in interval,
- // (1) Within valid interval,
- // (2) Out of range
- int32_t freqAlias = freqEst;
- if (freqEst > kMinFrequencyToDetect) {
- uint8_t aliasState = 1;
- while(freqState == 0) {
- /* Increase frequency */
- freqAlias += (aliasState * frame_rate_);
- freqAlias += ((freqEst << 1) * (1 - (aliasState << 1)));
- /* Compute state */
- freqState = (abs(freqAlias - (100 << 4)) <= kFrequencyDeviation);
- freqState += (abs(freqAlias - (120 << 4)) <= kFrequencyDeviation);
- freqState += 2 * (freqAlias > ((120 << 4) + kFrequencyDeviation));
- /* Switch alias state */
- aliasState++;
- aliasState &= 0x01;
- }
- }
- /* Is frequency estimate within detection region? */
- if (freqState == 1) {
- ret_val = 1;
- } else if (freqState == 0) {
- ret_val = 2;
- } else {
- ret_val = 0;
- }
- return ret_val;
-}
-
-} // namespace webrtc
diff --git a/webrtc/modules/video_processing/main/source/deflickering.h b/webrtc/modules/video_processing/main/source/deflickering.h
deleted file mode 100644
index 36e6845d71..0000000000
--- a/webrtc/modules/video_processing/main/source/deflickering.h
+++ /dev/null
@@ -1,56 +0,0 @@
-/*
- * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_MODULES_VIDEO_PROCESSING_MAIN_SOURCEdeflickering__H
-#define WEBRTC_MODULES_VIDEO_PROCESSING_MAIN_SOURCEdeflickering__H
-
-#include <string.h> // NULL
-
-#include "webrtc/modules/video_processing/main/interface/video_processing.h"
-#include "webrtc/typedefs.h"
-
-namespace webrtc {
-
-class VPMDeflickering {
- public:
- VPMDeflickering();
- ~VPMDeflickering();
-
- void Reset();
- int32_t ProcessFrame(VideoFrame* frame,
- VideoProcessingModule::FrameStats* stats);
-
- private:
- int32_t PreDetection(uint32_t timestamp,
- const VideoProcessingModule::FrameStats& stats);
-
- int32_t DetectFlicker();
-
- enum { kMeanBufferLength = 32 };
- enum { kFrameHistory_size = 15 };
- enum { kNumProbs = 12 };
- enum { kNumQuants = kNumProbs + 2 };
- enum { kMaxOnlyLength = 5 };
-
- uint32_t mean_buffer_length_;
- uint8_t detection_state_; // 0: No flickering
- // 1: Flickering detected
- // 2: In flickering
- int32_t mean_buffer_[kMeanBufferLength];
- uint32_t timestamp_buffer_[kMeanBufferLength];
- uint32_t frame_rate_;
- static const uint16_t prob_uw16_[kNumProbs];
- static const uint16_t weight_uw16_[kNumQuants - kMaxOnlyLength];
- uint8_t quant_hist_uw8_[kFrameHistory_size][kNumQuants];
-};
-
-} // namespace webrtc
-
-#endif // WEBRTC_MODULES_VIDEO_PROCESSING_MAIN_SOURCEdeflickering__H
diff --git a/webrtc/modules/video_processing/main/source/frame_preprocessor.cc b/webrtc/modules/video_processing/main/source/frame_preprocessor.cc
deleted file mode 100644
index a9d77c2e0c..0000000000
--- a/webrtc/modules/video_processing/main/source/frame_preprocessor.cc
+++ /dev/null
@@ -1,136 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include "webrtc/modules/video_processing/main/source/frame_preprocessor.h"
-
-namespace webrtc {
-
-VPMFramePreprocessor::VPMFramePreprocessor()
- : content_metrics_(NULL),
- resampled_frame_(),
- enable_ca_(false),
- frame_cnt_(0) {
- spatial_resampler_ = new VPMSimpleSpatialResampler();
- ca_ = new VPMContentAnalysis(true);
- vd_ = new VPMVideoDecimator();
-}
-
-VPMFramePreprocessor::~VPMFramePreprocessor() {
- Reset();
- delete spatial_resampler_;
- delete ca_;
- delete vd_;
-}
-
-void VPMFramePreprocessor::Reset() {
- ca_->Release();
- vd_->Reset();
- content_metrics_ = NULL;
- spatial_resampler_->Reset();
- enable_ca_ = false;
- frame_cnt_ = 0;
-}
-
-void VPMFramePreprocessor::EnableTemporalDecimation(bool enable) {
- vd_->EnableTemporalDecimation(enable);
-}
-
-void VPMFramePreprocessor::EnableContentAnalysis(bool enable) {
- enable_ca_ = enable;
-}
-
-void VPMFramePreprocessor::SetInputFrameResampleMode(
- VideoFrameResampling resampling_mode) {
- spatial_resampler_->SetInputFrameResampleMode(resampling_mode);
-}
-
-int32_t VPMFramePreprocessor::SetTargetResolution(
- uint32_t width, uint32_t height, uint32_t frame_rate) {
- if ( (width == 0) || (height == 0) || (frame_rate == 0)) {
- return VPM_PARAMETER_ERROR;
- }
- int32_t ret_val = 0;
- ret_val = spatial_resampler_->SetTargetFrameSize(width, height);
-
- if (ret_val < 0) return ret_val;
-
- vd_->SetTargetFramerate(frame_rate);
- return VPM_OK;
-}
-
-void VPMFramePreprocessor::SetTargetFramerate(int frame_rate) {
- if (frame_rate == -1) {
- vd_->EnableTemporalDecimation(false);
- } else {
- vd_->EnableTemporalDecimation(true);
- vd_->SetTargetFramerate(frame_rate);
- }
-}
-
-void VPMFramePreprocessor::UpdateIncomingframe_rate() {
- vd_->UpdateIncomingframe_rate();
-}
-
-uint32_t VPMFramePreprocessor::Decimatedframe_rate() {
- return vd_->Decimatedframe_rate();
-}
-
-
-uint32_t VPMFramePreprocessor::DecimatedWidth() const {
- return spatial_resampler_->TargetWidth();
-}
-
-
-uint32_t VPMFramePreprocessor::DecimatedHeight() const {
- return spatial_resampler_->TargetHeight();
-}
-
-int32_t VPMFramePreprocessor::PreprocessFrame(const VideoFrame& frame,
- VideoFrame** processed_frame) {
- if (frame.IsZeroSize()) {
- return VPM_PARAMETER_ERROR;
- }
-
- vd_->UpdateIncomingframe_rate();
-
- if (vd_->DropFrame()) {
- return 1; // drop 1 frame
- }
-
- // Resizing incoming frame if needed. Otherwise, remains NULL.
- // We are not allowed to resample the input frame (must make a copy of it).
- *processed_frame = NULL;
- if (spatial_resampler_->ApplyResample(frame.width(), frame.height())) {
- int32_t ret = spatial_resampler_->ResampleFrame(frame, &resampled_frame_);
- if (ret != VPM_OK) return ret;
- *processed_frame = &resampled_frame_;
- }
-
- // Perform content analysis on the frame to be encoded.
- if (enable_ca_) {
- // Compute new metrics every |kSkipFramesCA| frames, starting with
- // the first frame.
- if (frame_cnt_ % kSkipFrameCA == 0) {
- if (*processed_frame == NULL) {
- content_metrics_ = ca_->ComputeContentMetrics(frame);
- } else {
- content_metrics_ = ca_->ComputeContentMetrics(resampled_frame_);
- }
- }
- ++frame_cnt_;
- }
- return VPM_OK;
-}
-
-VideoContentMetrics* VPMFramePreprocessor::ContentMetrics() const {
- return content_metrics_;
-}
-
-} // namespace
diff --git a/webrtc/modules/video_processing/main/source/frame_preprocessor.h b/webrtc/modules/video_processing/main/source/frame_preprocessor.h
deleted file mode 100644
index 895e457cc6..0000000000
--- a/webrtc/modules/video_processing/main/source/frame_preprocessor.h
+++ /dev/null
@@ -1,79 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-/*
- * frame_preprocessor.h
- */
-#ifndef WEBRTC_MODULES_VIDEO_PROCESSING_MAIN_SOURCE_FRAME_PREPROCESSOR_H
-#define WEBRTC_MODULES_VIDEO_PROCESSING_MAIN_SOURCE_FRAME_PREPROCESSOR_H
-
-#include "webrtc/modules/video_processing/main/interface/video_processing.h"
-#include "webrtc/modules/video_processing/main/source/content_analysis.h"
-#include "webrtc/modules/video_processing/main/source/spatial_resampler.h"
-#include "webrtc/modules/video_processing/main/source/video_decimator.h"
-#include "webrtc/typedefs.h"
-
-namespace webrtc {
-
-class VPMFramePreprocessor {
- public:
- VPMFramePreprocessor();
- ~VPMFramePreprocessor();
-
- void Reset();
-
- // Enable temporal decimation.
- void EnableTemporalDecimation(bool enable);
-
- void SetInputFrameResampleMode(VideoFrameResampling resampling_mode);
-
- // Enable content analysis.
- void EnableContentAnalysis(bool enable);
-
- // Set target resolution: frame rate and dimension.
- int32_t SetTargetResolution(uint32_t width, uint32_t height,
- uint32_t frame_rate);
-
- // Set target frame rate.
- void SetTargetFramerate(int frame_rate);
-
- // Update incoming frame rate/dimension.
- void UpdateIncomingframe_rate();
-
- int32_t updateIncomingFrameSize(uint32_t width, uint32_t height);
-
- // Set decimated values: frame rate/dimension.
- uint32_t Decimatedframe_rate();
- uint32_t DecimatedWidth() const;
- uint32_t DecimatedHeight() const;
-
- // Preprocess output:
- int32_t PreprocessFrame(const VideoFrame& frame,
- VideoFrame** processed_frame);
- VideoContentMetrics* ContentMetrics() const;
-
- private:
- // The content does not change so much every frame, so to reduce complexity
- // we can compute new content metrics every |kSkipFrameCA| frames.
- enum { kSkipFrameCA = 2 };
-
- VideoContentMetrics* content_metrics_;
- VideoFrame resampled_frame_;
- VPMSpatialResampler* spatial_resampler_;
- VPMContentAnalysis* ca_;
- VPMVideoDecimator* vd_;
- bool enable_ca_;
- int frame_cnt_;
-
-};
-
-} // namespace webrtc
-
-#endif // WEBRTC_MODULES_VIDEO_PROCESSING_MAIN_SOURCE_FRAME_PREPROCESSOR_H
diff --git a/webrtc/modules/video_processing/main/source/spatial_resampler.cc b/webrtc/modules/video_processing/main/source/spatial_resampler.cc
deleted file mode 100644
index 9360e68b41..0000000000
--- a/webrtc/modules/video_processing/main/source/spatial_resampler.cc
+++ /dev/null
@@ -1,98 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include "webrtc/modules/video_processing/main/source/spatial_resampler.h"
-
-
-namespace webrtc {
-
-VPMSimpleSpatialResampler::VPMSimpleSpatialResampler()
- : resampling_mode_(kFastRescaling),
- target_width_(0),
- target_height_(0),
- scaler_() {}
-
-VPMSimpleSpatialResampler::~VPMSimpleSpatialResampler() {}
-
-
-int32_t VPMSimpleSpatialResampler::SetTargetFrameSize(int32_t width,
- int32_t height) {
- if (resampling_mode_ == kNoRescaling) return VPM_OK;
-
- if (width < 1 || height < 1) return VPM_PARAMETER_ERROR;
-
- target_width_ = width;
- target_height_ = height;
-
- return VPM_OK;
-}
-
-void VPMSimpleSpatialResampler::SetInputFrameResampleMode(
- VideoFrameResampling resampling_mode) {
- resampling_mode_ = resampling_mode;
-}
-
-void VPMSimpleSpatialResampler::Reset() {
- resampling_mode_ = kFastRescaling;
- target_width_ = 0;
- target_height_ = 0;
-}
-
-int32_t VPMSimpleSpatialResampler::ResampleFrame(const VideoFrame& inFrame,
- VideoFrame* outFrame) {
- // Don't copy if frame remains as is.
- if (resampling_mode_ == kNoRescaling)
- return VPM_OK;
- // Check if re-sampling is needed
- else if ((inFrame.width() == target_width_) &&
- (inFrame.height() == target_height_)) {
- return VPM_OK;
- }
-
- // Setting scaler
- // TODO(mikhal/marpan): Should we allow for setting the filter mode in
- // _scale.Set() with |resampling_mode_|?
- int ret_val = 0;
- ret_val = scaler_.Set(inFrame.width(), inFrame.height(),
- target_width_, target_height_, kI420, kI420, kScaleBox);
- if (ret_val < 0)
- return ret_val;
-
- ret_val = scaler_.Scale(inFrame, outFrame);
-
- // Setting time parameters to the output frame.
- // Timestamp will be reset in Scale call above, so we should set it after.
- outFrame->set_timestamp(inFrame.timestamp());
- outFrame->set_render_time_ms(inFrame.render_time_ms());
-
- if (ret_val == 0)
- return VPM_OK;
- else
- return VPM_SCALE_ERROR;
-}
-
-int32_t VPMSimpleSpatialResampler::TargetHeight() {
- return target_height_;
-}
-
-int32_t VPMSimpleSpatialResampler::TargetWidth() {
- return target_width_;
-}
-
-bool VPMSimpleSpatialResampler::ApplyResample(int32_t width,
- int32_t height) {
- if ((width == target_width_ && height == target_height_) ||
- resampling_mode_ == kNoRescaling)
- return false;
- else
- return true;
-}
-
-} // namespace webrtc
diff --git a/webrtc/modules/video_processing/main/source/spatial_resampler.h b/webrtc/modules/video_processing/main/source/spatial_resampler.h
deleted file mode 100644
index f965a40a83..0000000000
--- a/webrtc/modules/video_processing/main/source/spatial_resampler.h
+++ /dev/null
@@ -1,61 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_MODULES_VIDEO_PROCESSING_MAIN_SOURCE_SPATIAL_RESAMPLER_H
-#define WEBRTC_MODULES_VIDEO_PROCESSING_MAIN_SOURCE_SPATIAL_RESAMPLER_H
-
-#include "webrtc/typedefs.h"
-
-#include "webrtc/modules/interface/module_common_types.h"
-#include "webrtc/modules/video_processing/main/interface/video_processing_defines.h"
-
-#include "webrtc/common_video/libyuv/include/scaler.h"
-#include "webrtc/common_video/libyuv/include/webrtc_libyuv.h"
-
-namespace webrtc {
-
-class VPMSpatialResampler {
- public:
- virtual ~VPMSpatialResampler() {};
- virtual int32_t SetTargetFrameSize(int32_t width, int32_t height) = 0;
- virtual void SetInputFrameResampleMode(VideoFrameResampling
- resampling_mode) = 0;
- virtual void Reset() = 0;
- virtual int32_t ResampleFrame(const VideoFrame& inFrame,
- VideoFrame* outFrame) = 0;
- virtual int32_t TargetWidth() = 0;
- virtual int32_t TargetHeight() = 0;
- virtual bool ApplyResample(int32_t width, int32_t height) = 0;
-};
-
-class VPMSimpleSpatialResampler : public VPMSpatialResampler {
- public:
- VPMSimpleSpatialResampler();
- ~VPMSimpleSpatialResampler();
- virtual int32_t SetTargetFrameSize(int32_t width, int32_t height);
- virtual void SetInputFrameResampleMode(VideoFrameResampling resampling_mode);
- virtual void Reset();
- virtual int32_t ResampleFrame(const VideoFrame& inFrame,
- VideoFrame* outFrame);
- virtual int32_t TargetWidth();
- virtual int32_t TargetHeight();
- virtual bool ApplyResample(int32_t width, int32_t height);
-
- private:
-
- VideoFrameResampling resampling_mode_;
- int32_t target_width_;
- int32_t target_height_;
- Scaler scaler_;
-};
-
-} // namespace webrtc
-
-#endif // WEBRTC_MODULES_VIDEO_PROCESSING_MAIN_SOURCE_SPATIAL_RESAMPLER_H
diff --git a/webrtc/modules/video_processing/main/source/video_decimator.cc b/webrtc/modules/video_processing/main/source/video_decimator.cc
deleted file mode 100644
index 34c29c1677..0000000000
--- a/webrtc/modules/video_processing/main/source/video_decimator.cc
+++ /dev/null
@@ -1,146 +0,0 @@
-/*
- * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include "webrtc/base/checks.h"
-#include "webrtc/modules/video_processing/main/interface/video_processing.h"
-#include "webrtc/modules/video_processing/main/source/video_decimator.h"
-#include "webrtc/system_wrappers/include/tick_util.h"
-
-#define VD_MIN(a, b) ((a) < (b)) ? (a) : (b)
-
-namespace webrtc {
-
-VPMVideoDecimator::VPMVideoDecimator() {
- Reset();
-}
-
-VPMVideoDecimator::~VPMVideoDecimator() {}
-
-void VPMVideoDecimator::Reset() {
- overshoot_modifier_ = 0;
- drop_count_ = 0;
- keep_count_ = 0;
- target_frame_rate_ = 30;
- incoming_frame_rate_ = 0.0f;
- memset(incoming_frame_times_, 0, sizeof(incoming_frame_times_));
- enable_temporal_decimation_ = true;
-}
-
-void VPMVideoDecimator::EnableTemporalDecimation(bool enable) {
- enable_temporal_decimation_ = enable;
-}
-
-void VPMVideoDecimator::SetTargetFramerate(int frame_rate) {
- RTC_DCHECK(frame_rate);
- target_frame_rate_ = frame_rate;
-}
-
-bool VPMVideoDecimator::DropFrame() {
- if (!enable_temporal_decimation_) return false;
-
- if (incoming_frame_rate_ <= 0) return false;
-
- const uint32_t incomingframe_rate =
- static_cast<uint32_t>(incoming_frame_rate_ + 0.5f);
-
- if (target_frame_rate_ == 0) return true;
-
- bool drop = false;
- if (incomingframe_rate > target_frame_rate_) {
- int32_t overshoot =
- overshoot_modifier_ + (incomingframe_rate - target_frame_rate_);
- if (overshoot < 0) {
- overshoot = 0;
- overshoot_modifier_ = 0;
- }
-
- if (overshoot && 2 * overshoot < (int32_t) incomingframe_rate) {
- if (drop_count_) { // Just got here so drop to be sure.
- drop_count_ = 0;
- return true;
- }
- const uint32_t dropVar = incomingframe_rate / overshoot;
-
- if (keep_count_ >= dropVar) {
- drop = true;
- overshoot_modifier_ = -((int32_t) incomingframe_rate % overshoot) / 3;
- keep_count_ = 1;
- } else {
- keep_count_++;
- }
- } else {
- keep_count_ = 0;
- const uint32_t dropVar = overshoot / target_frame_rate_;
- if (drop_count_ < dropVar) {
- drop = true;
- drop_count_++;
- } else {
- overshoot_modifier_ = overshoot % target_frame_rate_;
- drop = false;
- drop_count_ = 0;
- }
- }
- }
- return drop;
-}
-
-
-uint32_t VPMVideoDecimator::Decimatedframe_rate() {
-ProcessIncomingframe_rate(TickTime::MillisecondTimestamp());
- if (!enable_temporal_decimation_) {
- return static_cast<uint32_t>(incoming_frame_rate_ + 0.5f);
- }
- return VD_MIN(target_frame_rate_,
- static_cast<uint32_t>(incoming_frame_rate_ + 0.5f));
-}
-
-uint32_t VPMVideoDecimator::Inputframe_rate() {
- ProcessIncomingframe_rate(TickTime::MillisecondTimestamp());
- return static_cast<uint32_t>(incoming_frame_rate_ + 0.5f);
-}
-
-void VPMVideoDecimator::UpdateIncomingframe_rate() {
- int64_t now = TickTime::MillisecondTimestamp();
- if (incoming_frame_times_[0] == 0) {
- // First no shift.
- } else {
- // Shift.
- for (int i = kFrameCountHistory_size - 2; i >= 0; i--) {
- incoming_frame_times_[i+1] = incoming_frame_times_[i];
- }
- }
- incoming_frame_times_[0] = now;
- ProcessIncomingframe_rate(now);
-}
-
-void VPMVideoDecimator::ProcessIncomingframe_rate(int64_t now) {
- int32_t num = 0;
- int32_t nrOfFrames = 0;
- for (num = 1; num < (kFrameCountHistory_size - 1); num++) {
- // Don't use data older than 2sec.
- if (incoming_frame_times_[num] <= 0 ||
- now - incoming_frame_times_[num] > kFrameHistoryWindowMs) {
- break;
- } else {
- nrOfFrames++;
- }
- }
- if (num > 1) {
- int64_t diff = now - incoming_frame_times_[num-1];
- incoming_frame_rate_ = 1.0;
- if (diff > 0) {
- incoming_frame_rate_ = nrOfFrames * 1000.0f / static_cast<float>(diff);
- }
- } else {
- incoming_frame_rate_ = static_cast<float>(nrOfFrames);
- }
-}
-
-} // namespace webrtc
diff --git a/webrtc/modules/video_processing/main/source/video_decimator.h b/webrtc/modules/video_processing/main/source/video_decimator.h
deleted file mode 100644
index 3d4573caf8..0000000000
--- a/webrtc/modules/video_processing/main/source/video_decimator.h
+++ /dev/null
@@ -1,58 +0,0 @@
-/*
- * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_MODULES_VIDEO_PROCESSING_MAIN_SOURCE_VIDEO_DECIMATOR_H
-#define WEBRTC_MODULES_VIDEO_PROCESSING_MAIN_SOURCE_VIDEO_DECIMATOR_H
-
-#include "webrtc/modules/interface/module_common_types.h"
-#include "webrtc/typedefs.h"
-
-namespace webrtc {
-
-class VPMVideoDecimator {
- public:
- VPMVideoDecimator();
- ~VPMVideoDecimator();
-
- void Reset();
-
- void EnableTemporalDecimation(bool enable);
-
- void SetTargetFramerate(int frame_rate);
-
- bool DropFrame();
-
- void UpdateIncomingframe_rate();
-
- // Get Decimated Frame Rate/Dimensions.
- uint32_t Decimatedframe_rate();
-
- // Get input frame rate.
- uint32_t Inputframe_rate();
-
- private:
- void ProcessIncomingframe_rate(int64_t now);
-
- enum { kFrameCountHistory_size = 90};
- enum { kFrameHistoryWindowMs = 2000};
-
- // Temporal decimation.
- int32_t overshoot_modifier_;
- uint32_t drop_count_;
- uint32_t keep_count_;
- uint32_t target_frame_rate_;
- float incoming_frame_rate_;
- int64_t incoming_frame_times_[kFrameCountHistory_size];
- bool enable_temporal_decimation_;
-};
-
-} // namespace webrtc
-
-#endif // WEBRTC_MODULES_VIDEO_PROCESSING_MAIN_SOURCE_VIDEO_DECIMATOR_H
diff --git a/webrtc/modules/video_processing/main/source/video_processing_impl.cc b/webrtc/modules/video_processing/main/source/video_processing_impl.cc
deleted file mode 100644
index eaaf14f6ad..0000000000
--- a/webrtc/modules/video_processing/main/source/video_processing_impl.cc
+++ /dev/null
@@ -1,183 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-
-#include "webrtc/modules/video_processing/main/source/video_processing_impl.h"
-#include "webrtc/system_wrappers/include/critical_section_wrapper.h"
-#include "webrtc/system_wrappers/include/logging.h"
-
-#include <assert.h>
-
-namespace webrtc {
-
-namespace {
-void SetSubSampling(VideoProcessingModule::FrameStats* stats,
- const int32_t width,
- const int32_t height) {
- if (width * height >= 640 * 480) {
- stats->subSamplWidth = 3;
- stats->subSamplHeight = 3;
- } else if (width * height >= 352 * 288) {
- stats->subSamplWidth = 2;
- stats->subSamplHeight = 2;
- } else if (width * height >= 176 * 144) {
- stats->subSamplWidth = 1;
- stats->subSamplHeight = 1;
- } else {
- stats->subSamplWidth = 0;
- stats->subSamplHeight = 0;
- }
-}
-} // namespace
-
-VideoProcessingModule* VideoProcessingModule::Create() {
- return new VideoProcessingModuleImpl();
-}
-
-void VideoProcessingModule::Destroy(VideoProcessingModule* module) {
- if (module)
- delete static_cast<VideoProcessingModuleImpl*>(module);
-}
-
-VideoProcessingModuleImpl::VideoProcessingModuleImpl() {}
-VideoProcessingModuleImpl::~VideoProcessingModuleImpl() {}
-
-void VideoProcessingModuleImpl::Reset() {
- rtc::CritScope mutex(&mutex_);
- deflickering_.Reset();
- brightness_detection_.Reset();
- frame_pre_processor_.Reset();
-}
-
-int32_t VideoProcessingModule::GetFrameStats(FrameStats* stats,
- const VideoFrame& frame) {
- if (frame.IsZeroSize()) {
- LOG(LS_ERROR) << "Zero size frame.";
- return VPM_PARAMETER_ERROR;
- }
-
- int width = frame.width();
- int height = frame.height();
-
- ClearFrameStats(stats); // The histogram needs to be zeroed out.
- SetSubSampling(stats, width, height);
-
- const uint8_t* buffer = frame.buffer(kYPlane);
- // Compute histogram and sum of frame
- for (int i = 0; i < height; i += (1 << stats->subSamplHeight)) {
- int k = i * width;
- for (int j = 0; j < width; j += (1 << stats->subSamplWidth)) {
- stats->hist[buffer[k + j]]++;
- stats->sum += buffer[k + j];
- }
- }
-
- stats->num_pixels = (width * height) / ((1 << stats->subSamplWidth) *
- (1 << stats->subSamplHeight));
- assert(stats->num_pixels > 0);
-
- // Compute mean value of frame
- stats->mean = stats->sum / stats->num_pixels;
-
- return VPM_OK;
-}
-
-bool VideoProcessingModule::ValidFrameStats(const FrameStats& stats) {
- if (stats.num_pixels == 0) {
- LOG(LS_WARNING) << "Invalid frame stats.";
- return false;
- }
- return true;
-}
-
-void VideoProcessingModule::ClearFrameStats(FrameStats* stats) {
- stats->mean = 0;
- stats->sum = 0;
- stats->num_pixels = 0;
- stats->subSamplWidth = 0;
- stats->subSamplHeight = 0;
- memset(stats->hist, 0, sizeof(stats->hist));
-}
-
-int32_t VideoProcessingModule::Brighten(VideoFrame* frame, int delta) {
- return VideoProcessing::Brighten(frame, delta);
-}
-
-int32_t VideoProcessingModuleImpl::Deflickering(VideoFrame* frame,
- FrameStats* stats) {
- rtc::CritScope mutex(&mutex_);
- return deflickering_.ProcessFrame(frame, stats);
-}
-
-int32_t VideoProcessingModuleImpl::BrightnessDetection(
- const VideoFrame& frame,
- const FrameStats& stats) {
- rtc::CritScope mutex(&mutex_);
- return brightness_detection_.ProcessFrame(frame, stats);
-}
-
-
-void VideoProcessingModuleImpl::EnableTemporalDecimation(bool enable) {
- rtc::CritScope mutex(&mutex_);
- frame_pre_processor_.EnableTemporalDecimation(enable);
-}
-
-
-void VideoProcessingModuleImpl::SetInputFrameResampleMode(VideoFrameResampling
- resampling_mode) {
- rtc::CritScope cs(&mutex_);
- frame_pre_processor_.SetInputFrameResampleMode(resampling_mode);
-}
-
-int32_t VideoProcessingModuleImpl::SetTargetResolution(uint32_t width,
- uint32_t height,
- uint32_t frame_rate) {
- rtc::CritScope cs(&mutex_);
- return frame_pre_processor_.SetTargetResolution(width, height, frame_rate);
-}
-
-void VideoProcessingModuleImpl::SetTargetFramerate(int frame_rate) {
- rtc::CritScope cs(&mutex_);
- frame_pre_processor_.SetTargetFramerate(frame_rate);
-}
-
-uint32_t VideoProcessingModuleImpl::Decimatedframe_rate() {
- rtc::CritScope cs(&mutex_);
- return frame_pre_processor_.Decimatedframe_rate();
-}
-
-uint32_t VideoProcessingModuleImpl::DecimatedWidth() const {
- rtc::CritScope cs(&mutex_);
- return frame_pre_processor_.DecimatedWidth();
-}
-
-uint32_t VideoProcessingModuleImpl::DecimatedHeight() const {
- rtc::CritScope cs(&mutex_);
- return frame_pre_processor_.DecimatedHeight();
-}
-
-int32_t VideoProcessingModuleImpl::PreprocessFrame(
- const VideoFrame& frame,
- VideoFrame** processed_frame) {
- rtc::CritScope mutex(&mutex_);
- return frame_pre_processor_.PreprocessFrame(frame, processed_frame);
-}
-
-VideoContentMetrics* VideoProcessingModuleImpl::ContentMetrics() const {
- rtc::CritScope mutex(&mutex_);
- return frame_pre_processor_.ContentMetrics();
-}
-
-void VideoProcessingModuleImpl::EnableContentAnalysis(bool enable) {
- rtc::CritScope mutex(&mutex_);
- frame_pre_processor_.EnableContentAnalysis(enable);
-}
-
-} // namespace webrtc
diff --git a/webrtc/modules/video_processing/main/source/video_processing_impl.h b/webrtc/modules/video_processing/main/source/video_processing_impl.h
deleted file mode 100644
index fed5197f49..0000000000
--- a/webrtc/modules/video_processing/main/source/video_processing_impl.h
+++ /dev/null
@@ -1,75 +0,0 @@
-/*
- * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_MODULE_VIDEO_PROCESSING_IMPL_H
-#define WEBRTC_MODULE_VIDEO_PROCESSING_IMPL_H
-
-#include "webrtc/base/criticalsection.h"
-#include "webrtc/modules/video_processing/main/interface/video_processing.h"
-#include "webrtc/modules/video_processing/main/source/brighten.h"
-#include "webrtc/modules/video_processing/main/source/brightness_detection.h"
-#include "webrtc/modules/video_processing/main/source/deflickering.h"
-#include "webrtc/modules/video_processing/main/source/frame_preprocessor.h"
-
-namespace webrtc {
-class CriticalSectionWrapper;
-
-class VideoProcessingModuleImpl : public VideoProcessingModule {
- public:
- VideoProcessingModuleImpl();
- ~VideoProcessingModuleImpl() override;
-
- void Reset() override;
-
- int32_t Deflickering(VideoFrame* frame, FrameStats* stats) override;
-
- int32_t BrightnessDetection(const VideoFrame& frame,
- const FrameStats& stats) override;
-
- // Frame pre-processor functions
-
- // Enable temporal decimation
- void EnableTemporalDecimation(bool enable) override;
-
- void SetInputFrameResampleMode(VideoFrameResampling resampling_mode) override;
-
- // Enable content analysis
- void EnableContentAnalysis(bool enable) override;
-
- // Set Target Resolution: frame rate and dimension
- int32_t SetTargetResolution(uint32_t width,
- uint32_t height,
- uint32_t frame_rate) override;
-
- void SetTargetFramerate(int frame_rate) override;
-
- // Get decimated values: frame rate/dimension
- uint32_t Decimatedframe_rate() override;
- uint32_t DecimatedWidth() const override;
- uint32_t DecimatedHeight() const override;
-
- // Preprocess:
- // Pre-process incoming frame: Sample when needed and compute content
- // metrics when enabled.
- // If no resampling takes place - processed_frame is set to NULL.
- int32_t PreprocessFrame(const VideoFrame& frame,
- VideoFrame** processed_frame) override;
- VideoContentMetrics* ContentMetrics() const override;
-
- private:
- mutable rtc::CriticalSection mutex_;
- VPMDeflickering deflickering_ GUARDED_BY(mutex_);
- VPMBrightnessDetection brightness_detection_;
- VPMFramePreprocessor frame_pre_processor_;
-};
-
-} // namespace
-
-#endif
diff --git a/webrtc/modules/video_processing/main/test/unit_test/brightness_detection_test.cc b/webrtc/modules/video_processing/main/test/unit_test/brightness_detection_test.cc
deleted file mode 100644
index 4d0de3ac98..0000000000
--- a/webrtc/modules/video_processing/main/test/unit_test/brightness_detection_test.cc
+++ /dev/null
@@ -1,121 +0,0 @@
-/*
- * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include "webrtc/common_video/libyuv/include/webrtc_libyuv.h"
-#include "webrtc/modules/video_processing/main/interface/video_processing.h"
-#include "webrtc/modules/video_processing/main/test/unit_test/video_processing_unittest.h"
-#include "webrtc/test/testsupport/gtest_disable.h"
-
-using namespace webrtc;
-
-TEST_F(VideoProcessingModuleTest, DISABLED_ON_IOS(BrightnessDetection))
-{
- uint32_t frameNum = 0;
- int32_t brightnessWarning = 0;
- uint32_t warningCount = 0;
- rtc::scoped_ptr<uint8_t[]> video_buffer(new uint8_t[frame_length_]);
- while (fread(video_buffer.get(), 1, frame_length_, source_file_) ==
- frame_length_)
- {
- EXPECT_EQ(0, ConvertToI420(kI420, video_buffer.get(), 0, 0, width_,
- height_, 0, kVideoRotation_0, &video_frame_));
- frameNum++;
- VideoProcessingModule::FrameStats stats;
- ASSERT_EQ(0, vpm_->GetFrameStats(&stats, video_frame_));
- ASSERT_GE(brightnessWarning = vpm_->BrightnessDetection(video_frame_,
- stats), 0);
- if (brightnessWarning != VideoProcessingModule::kNoWarning)
- {
- warningCount++;
- }
- }
- ASSERT_NE(0, feof(source_file_)) << "Error reading source file";
-
- // Expect few warnings
- float warningProportion = static_cast<float>(warningCount) / frameNum * 100;
- printf("\nWarning proportions:\n");
- printf("Stock foreman: %.1f %%\n", warningProportion);
- EXPECT_LT(warningProportion, 10);
-
- rewind(source_file_);
- frameNum = 0;
- warningCount = 0;
- while (fread(video_buffer.get(), 1, frame_length_, source_file_) ==
- frame_length_ &&
- frameNum < 300)
- {
- EXPECT_EQ(0, ConvertToI420(kI420, video_buffer.get(), 0, 0, width_,
- height_, 0, kVideoRotation_0, &video_frame_));
- frameNum++;
-
- uint8_t* frame = video_frame_.buffer(kYPlane);
- uint32_t yTmp = 0;
- for (int yIdx = 0; yIdx < width_ * height_; yIdx++)
- {
- yTmp = frame[yIdx] << 1;
- if (yTmp > 255)
- {
- yTmp = 255;
- }
- frame[yIdx] = static_cast<uint8_t>(yTmp);
- }
-
- VideoProcessingModule::FrameStats stats;
- ASSERT_EQ(0, vpm_->GetFrameStats(&stats, video_frame_));
- ASSERT_GE(brightnessWarning = vpm_->BrightnessDetection(video_frame_,
- stats), 0);
- EXPECT_NE(VideoProcessingModule::kDarkWarning, brightnessWarning);
- if (brightnessWarning == VideoProcessingModule::kBrightWarning)
- {
- warningCount++;
- }
- }
- ASSERT_NE(0, feof(source_file_)) << "Error reading source file";
-
- // Expect many brightness warnings
- warningProportion = static_cast<float>(warningCount) / frameNum * 100;
- printf("Bright foreman: %.1f %%\n", warningProportion);
- EXPECT_GT(warningProportion, 95);
-
- rewind(source_file_);
- frameNum = 0;
- warningCount = 0;
- while (fread(video_buffer.get(), 1, frame_length_, source_file_) ==
- frame_length_ && frameNum < 300)
- {
- EXPECT_EQ(0, ConvertToI420(kI420, video_buffer.get(), 0, 0, width_,
- height_, 0, kVideoRotation_0, &video_frame_));
- frameNum++;
-
- uint8_t* y_plane = video_frame_.buffer(kYPlane);
- int32_t yTmp = 0;
- for (int yIdx = 0; yIdx < width_ * height_; yIdx++)
- {
- yTmp = y_plane[yIdx] >> 1;
- y_plane[yIdx] = static_cast<uint8_t>(yTmp);
- }
-
- VideoProcessingModule::FrameStats stats;
- ASSERT_EQ(0, vpm_->GetFrameStats(&stats, video_frame_));
- ASSERT_GE(brightnessWarning = vpm_->BrightnessDetection(video_frame_,
- stats), 0);
- EXPECT_NE(VideoProcessingModule::kBrightWarning, brightnessWarning);
- if (brightnessWarning == VideoProcessingModule::kDarkWarning)
- {
- warningCount++;
- }
- }
- ASSERT_NE(0, feof(source_file_)) << "Error reading source file";
-
- // Expect many darkness warnings
- warningProportion = static_cast<float>(warningCount) / frameNum * 100;
- printf("Dark foreman: %.1f %%\n\n", warningProportion);
- EXPECT_GT(warningProportion, 90);
-}
diff --git a/webrtc/modules/video_processing/main/test/unit_test/content_metrics_test.cc b/webrtc/modules/video_processing/main/test/unit_test/content_metrics_test.cc
deleted file mode 100644
index d9c1309d9b..0000000000
--- a/webrtc/modules/video_processing/main/test/unit_test/content_metrics_test.cc
+++ /dev/null
@@ -1,44 +0,0 @@
-/*
- * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include "webrtc/common_video/libyuv/include/webrtc_libyuv.h"
-#include "webrtc/modules/video_processing/main/interface/video_processing.h"
-#include "webrtc/modules/video_processing/main/source/content_analysis.h"
-#include "webrtc/modules/video_processing/main/test/unit_test/video_processing_unittest.h"
-#include "webrtc/test/testsupport/gtest_disable.h"
-
-namespace webrtc {
-
-TEST_F(VideoProcessingModuleTest, DISABLED_ON_IOS(ContentAnalysis)) {
- VPMContentAnalysis ca__c(false);
- VPMContentAnalysis ca__sse(true);
- VideoContentMetrics *_cM_c, *_cM_SSE;
-
- ca__c.Initialize(width_,height_);
- ca__sse.Initialize(width_,height_);
-
- rtc::scoped_ptr<uint8_t[]> video_buffer(new uint8_t[frame_length_]);
- while (fread(video_buffer.get(), 1, frame_length_, source_file_)
- == frame_length_) {
- // Using ConvertToI420 to add stride to the image.
- EXPECT_EQ(0, ConvertToI420(kI420, video_buffer.get(), 0, 0, width_, height_,
- 0, kVideoRotation_0, &video_frame_));
- _cM_c = ca__c.ComputeContentMetrics(video_frame_);
- _cM_SSE = ca__sse.ComputeContentMetrics(video_frame_);
-
- ASSERT_EQ(_cM_c->spatial_pred_err, _cM_SSE->spatial_pred_err);
- ASSERT_EQ(_cM_c->spatial_pred_err_v, _cM_SSE->spatial_pred_err_v);
- ASSERT_EQ(_cM_c->spatial_pred_err_h, _cM_SSE->spatial_pred_err_h);
- ASSERT_EQ(_cM_c->motion_magnitude, _cM_SSE->motion_magnitude);
- }
- ASSERT_NE(0, feof(source_file_)) << "Error reading source file";
-}
-
-} // namespace webrtc
diff --git a/webrtc/modules/video_processing/main/test/unit_test/createTable.m b/webrtc/modules/video_processing/main/test/unit_test/createTable.m
deleted file mode 100644
index 2c7fb522f6..0000000000
--- a/webrtc/modules/video_processing/main/test/unit_test/createTable.m
+++ /dev/null
@@ -1,179 +0,0 @@
-% Create the color enhancement look-up table and write it to
-% file colorEnhancementTable.cpp. Copy contents of that file into
-% the source file for the color enhancement function.
-
-clear
-close all
-
-
-% First, define the color enhancement in a normalized domain
-
-% Compander function is defined in three radial zones.
-% 1. From 0 to radius r0, the compander function
-% is a second-order polynomial intersecting the points (0,0)
-% and (r0, r0), and with a slope B in (0,0).
-% 2. From r0 to r1, the compander is a third-order polynomial
-% intersecting the points (r0, r0) and (r1, r1), and with the
-% same slope as the first part in the point (r0, r0) and slope
-% equal to 1 in (r1, r1).
-% 3. For radii larger than r1, the compander function is the
-% unity scale function (no scaling at all).
-
-r0=0.07; % Dead zone radius (must be > 0)
-r1=0.6; % Enhancement zone radius (must be > r0 and < 1)
-B=0.2; % initial slope of compander function (between 0 and 1)
-
-x0=linspace(0,r0).'; % zone 1
-x1=linspace(r0,r1).'; % zone 2
-x2=linspace(r1,1).'; % zone 3
-
-A=(1-B)/r0;
-f0=A*x0.^2+B*x0; % compander function in zone 1
-
-% equation system for finding second zone parameters
-M=[r0^3 r0^2 r0 1;
- 3*r0^2 2*r0 1 0;
- 3*r1^2 2*r1 1 0;
- r1^3 r1^2 r1 1];
-m=[A*r0^2+B*r0; 2*A*r0+B; 1; r1];
-% solve equations
-theta=M\m;
-
-% compander function in zone 1
-f1=[x1.^3 x1.^2 x1 ones(size(x1))]*theta;
-
-x=[x0; x1; x2];
-f=[f0; f1; x2];
-
-% plot it
-figure(1)
-plot(x,f,x,x,':')
-xlabel('Normalized radius')
-ylabel('Modified radius')
-
-
-% Now, create the look-up table in the integer color space
-[U,V]=meshgrid(0:255, 0:255); % U-V space
-U0=U;
-V0=V;
-
-% Conversion matrix from normalized YUV to RGB
-T=[1 0 1.13983; 1 -0.39465 -0.58060; 1 2.03211 0];
-Ylum=0.5;
-
-figure(2)
-Z(:,:,1)=Ylum + (U-127)/256*T(1,2) + (V-127)/256*T(1,3);
-Z(:,:,2)=Ylum + (U-127)/256*T(2,2) + (V-127)/256*T(2,3);
-Z(:,:,3)=Ylum + (U-127)/256*T(3,2) + (V-127)/256*T(3,3);
-Z=max(Z,0);
-Z=min(Z,1);
-subplot(121)
-image(Z);
-axis square
-axis off
-set(gcf,'color','k')
-
-R = sqrt((U-127).^2 + (V-127).^2);
-Rnorm = R/127;
-RnormMod = Rnorm;
-RnormMod(RnormMod==0)=1; % avoid division with zero
-
-% find indices to pixels in dead-zone (zone 1)
-ix=find(Rnorm<=r0);
-scaleMatrix = (A*Rnorm(ix).^2 + B*Rnorm(ix))./RnormMod(ix);
-U(ix)=(U(ix)-127).*scaleMatrix+127;
-V(ix)=(V(ix)-127).*scaleMatrix+127;
-
-% find indices to pixels in zone 2
-ix=find(Rnorm>r0 & Rnorm<=r1);
-scaleMatrix = (theta(1)*Rnorm(ix).^3 + theta(2)*Rnorm(ix).^2 + ...
- theta(3)*Rnorm(ix) + theta(4)) ./ RnormMod(ix);
-U(ix)=(U(ix)-127).*scaleMatrix + 127;
-V(ix)=(V(ix)-127).*scaleMatrix + 127;
-
-% round to integer values and saturate
-U=round(U);
-V=round(V);
-U=max(min(U,255),0);
-V=max(min(V,255),0);
-
-Z(:,:,1)=Ylum + (U-127)/256*T(1,2) + (V-127)/256*T(1,3);
-Z(:,:,2)=Ylum + (U-127)/256*T(2,2) + (V-127)/256*T(2,3);
-Z(:,:,3)=Ylum + (U-127)/256*T(3,2) + (V-127)/256*T(3,3);
-Z=max(Z,0);
-Z=min(Z,1);
-subplot(122)
-image(Z);
-axis square
-axis off
-
-figure(3)
-subplot(121)
-mesh(U-U0)
-subplot(122)
-mesh(V-V0)
-
-
-
-% Last, write to file
-% Write only one matrix, since U=V'
-
-fid = fopen('../out/Debug/colorEnhancementTable.h','wt');
-if fid==-1
- error('Cannot open file colorEnhancementTable.cpp');
-end
-
-fprintf(fid,'//colorEnhancementTable.h\n\n');
-fprintf(fid,'//Copy the constant table to the appropriate header file.\n\n');
-
-fprintf(fid,'//Table created with Matlab script createTable.m\n\n');
-fprintf(fid,'//Usage:\n');
-fprintf(fid,'// Umod=colorTable[U][V]\n');
-fprintf(fid,'// Vmod=colorTable[V][U]\n');
-
-fprintf(fid,'static unsigned char colorTable[%i][%i] = {\n', size(U,1), size(U,2));
-
-for u=1:size(U,2)
- fprintf(fid,' {%i', U(1,u));
- for v=2:size(U,1)
- fprintf(fid,', %i', U(v,u));
- end
- fprintf(fid,'}');
- if u<size(U,2)
- fprintf(fid,',');
- end
- fprintf(fid,'\n');
-end
-fprintf(fid,'};\n\n');
-fclose(fid);
-fprintf('done');
-
-
-answ=input('Create test vector (takes some time...)? y/n : ','s');
-if answ ~= 'y'
- return
-end
-
-% Also, create test vectors
-
-% Read test file foreman.yuv
-fprintf('Reading test file...')
-[y,u,v]=readYUV420file('../out/Debug/testFiles/foreman_cif.yuv',352,288);
-fprintf(' done\n');
-unew=uint8(zeros(size(u)));
-vnew=uint8(zeros(size(v)));
-
-% traverse all frames
-for k=1:size(y,3)
- fprintf('Frame %i\n', k);
- for r=1:size(u,1)
- for c=1:size(u,2)
- unew(r,c,k) = uint8(U(double(v(r,c,k))+1, double(u(r,c,k))+1));
- vnew(r,c,k) = uint8(V(double(v(r,c,k))+1, double(u(r,c,k))+1));
- end
- end
-end
-
-fprintf('\nWriting modified test file...')
-writeYUV420file('../out/Debug/foremanColorEnhanced.yuv',y,unew,vnew);
-fprintf(' done\n');
diff --git a/webrtc/modules/video_processing/main/test/unit_test/deflickering_test.cc b/webrtc/modules/video_processing/main/test/unit_test/deflickering_test.cc
deleted file mode 100644
index 83d09ef486..0000000000
--- a/webrtc/modules/video_processing/main/test/unit_test/deflickering_test.cc
+++ /dev/null
@@ -1,100 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include <stdio.h>
-#include <stdlib.h>
-
-#include "webrtc/common_video/libyuv/include/webrtc_libyuv.h"
-#include "webrtc/modules/video_processing/main/interface/video_processing.h"
-#include "webrtc/modules/video_processing/main/test/unit_test/video_processing_unittest.h"
-#include "webrtc/system_wrappers/include/tick_util.h"
-#include "webrtc/test/testsupport/fileutils.h"
-#include "webrtc/test/testsupport/gtest_disable.h"
-
-namespace webrtc {
-
-TEST_F(VideoProcessingModuleTest, DISABLED_ON_IOS(Deflickering))
-{
- enum { NumRuns = 30 };
- uint32_t frameNum = 0;
- const uint32_t frame_rate = 15;
-
- int64_t min_runtime = 0;
- int64_t avg_runtime = 0;
-
- // Close automatically opened Foreman.
- fclose(source_file_);
- const std::string input_file =
- webrtc::test::ResourcePath("deflicker_before_cif_short", "yuv");
- source_file_ = fopen(input_file.c_str(), "rb");
- ASSERT_TRUE(source_file_ != NULL) <<
- "Cannot read input file: " << input_file << "\n";
-
- const std::string output_file =
- webrtc::test::OutputPath() + "deflicker_output_cif_short.yuv";
- FILE* deflickerFile = fopen(output_file.c_str(), "wb");
- ASSERT_TRUE(deflickerFile != NULL) <<
- "Could not open output file: " << output_file << "\n";
-
- printf("\nRun time [us / frame]:\n");
- rtc::scoped_ptr<uint8_t[]> video_buffer(new uint8_t[frame_length_]);
- for (uint32_t run_idx = 0; run_idx < NumRuns; run_idx++)
- {
- TickTime t0;
- TickTime t1;
- TickInterval acc_ticks;
- uint32_t timeStamp = 1;
-
- frameNum = 0;
- while (fread(video_buffer.get(), 1, frame_length_, source_file_) ==
- frame_length_)
- {
- frameNum++;
- EXPECT_EQ(
- 0, ConvertToI420(kI420, video_buffer.get(), 0, 0, width_,
- height_, 0, kVideoRotation_0, &video_frame_));
- video_frame_.set_timestamp(timeStamp);
-
- t0 = TickTime::Now();
- VideoProcessingModule::FrameStats stats;
- ASSERT_EQ(0, vpm_->GetFrameStats(&stats, video_frame_));
- ASSERT_EQ(0, vpm_->Deflickering(&video_frame_, &stats));
- t1 = TickTime::Now();
- acc_ticks += (t1 - t0);
-
- if (run_idx == 0)
- {
- if (PrintVideoFrame(video_frame_, deflickerFile) < 0) {
- return;
- }
- }
- timeStamp += (90000 / frame_rate);
- }
- ASSERT_NE(0, feof(source_file_)) << "Error reading source file";
-
- printf("%u\n", static_cast<int>(acc_ticks.Microseconds() / frameNum));
- if (acc_ticks.Microseconds() < min_runtime || run_idx == 0)
- {
- min_runtime = acc_ticks.Microseconds();
- }
- avg_runtime += acc_ticks.Microseconds();
-
- rewind(source_file_);
- }
- ASSERT_EQ(0, fclose(deflickerFile));
- // TODO(kjellander): Add verification of deflicker output file.
-
- printf("\nAverage run time = %d us / frame\n",
- static_cast<int>(avg_runtime / frameNum / NumRuns));
- printf("Min run time = %d us / frame\n\n",
- static_cast<int>(min_runtime / frameNum));
-}
-
-} // namespace webrtc
diff --git a/webrtc/modules/video_processing/main/test/unit_test/readYUV420file.m b/webrtc/modules/video_processing/main/test/unit_test/readYUV420file.m
deleted file mode 100644
index 03013efd3a..0000000000
--- a/webrtc/modules/video_processing/main/test/unit_test/readYUV420file.m
+++ /dev/null
@@ -1,45 +0,0 @@
-function [Y,U,V] = readYUV420file(filename, width, height)
-% [Y,U,V] = readYUVfile(filename, width, height)
-
-fid = fopen(filename,'rb');
-if fid==-1
- error(['Cannot open file ' filename]);
-end
-
-% Number of pixels per image
-nPx=width*height;
-
-% nPx bytes luminance, nPx/4 bytes U, nPx/4 bytes V
-frameSizeBytes = nPx*1.5;
-
-% calculate number of frames
-fseek(fid,0,'eof'); % move to end of file
-fileLen=ftell(fid); % number of bytes
-fseek(fid,0,'bof'); % rewind to start
-
-% calculate number of frames
-numFrames = floor(fileLen/frameSizeBytes);
-
-Y=uint8(zeros(height,width,numFrames));
-U=uint8(zeros(height/2,width/2,numFrames));
-V=uint8(zeros(height/2,width/2,numFrames));
-
-[X,nBytes]=fread(fid, frameSizeBytes, 'uchar');
-
-for k=1:numFrames
-
- % Store luminance
- Y(:,:,k)=uint8(reshape(X(1:nPx), width, height).');
-
- % Store U channel
- U(:,:,k)=uint8(reshape(X(nPx + (1:nPx/4)), width/2, height/2).');
-
- % Store V channel
- V(:,:,k)=uint8(reshape(X(nPx + nPx/4 + (1:nPx/4)), width/2, height/2).');
-
- % Read next frame
- [X,nBytes]=fread(fid, frameSizeBytes, 'uchar');
-end
-
-
-fclose(fid);
diff --git a/webrtc/modules/video_processing/main/test/unit_test/video_processing_unittest.cc b/webrtc/modules/video_processing/main/test/unit_test/video_processing_unittest.cc
deleted file mode 100644
index 11ccc4891b..0000000000
--- a/webrtc/modules/video_processing/main/test/unit_test/video_processing_unittest.cc
+++ /dev/null
@@ -1,390 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include "webrtc/modules/video_processing/main/test/unit_test/video_processing_unittest.h"
-
-#include <string>
-
-#include <gflags/gflags.h>
-#include "webrtc/common_video/libyuv/include/webrtc_libyuv.h"
-#include "webrtc/system_wrappers/include/tick_util.h"
-#include "webrtc/test/testsupport/fileutils.h"
-#include "webrtc/test/testsupport/gtest_disable.h"
-
-namespace webrtc {
-
-namespace {
-
-// Define command line flag 'gen_files' (default value: false).
-DEFINE_bool(gen_files, false, "Output files for visual inspection.");
-
-} // namespace
-
-static void PreprocessFrameAndVerify(const VideoFrame& source,
- int target_width,
- int target_height,
- VideoProcessingModule* vpm,
- VideoFrame** out_frame);
-static void CropFrame(const uint8_t* source_data,
- int source_width,
- int source_height,
- int offset_x,
- int offset_y,
- int cropped_width,
- int cropped_height,
- VideoFrame* cropped_frame);
-// The |source_data| is cropped and scaled to |target_width| x |target_height|,
-// and then scaled back to the expected cropped size. |expected_psnr| is used to
-// verify basic quality, and is set to be ~0.1/0.05dB lower than actual PSNR
-// verified under the same conditions.
-static void TestSize(const VideoFrame& source_frame,
- const VideoFrame& cropped_source_frame,
- int target_width,
- int target_height,
- double expected_psnr,
- VideoProcessingModule* vpm);
-static bool CompareFrames(const webrtc::VideoFrame& frame1,
- const webrtc::VideoFrame& frame2);
-static void WriteProcessedFrameForVisualInspection(const VideoFrame& source,
- const VideoFrame& processed);
-
-VideoProcessingModuleTest::VideoProcessingModuleTest()
- : vpm_(NULL),
- source_file_(NULL),
- width_(352),
- half_width_((width_ + 1) / 2),
- height_(288),
- size_y_(width_ * height_),
- size_uv_(half_width_ * ((height_ + 1) / 2)),
- frame_length_(CalcBufferSize(kI420, width_, height_)) {}
-
-void VideoProcessingModuleTest::SetUp() {
- vpm_ = VideoProcessingModule::Create();
- ASSERT_TRUE(vpm_ != NULL);
-
- ASSERT_EQ(0, video_frame_.CreateEmptyFrame(width_, height_, width_,
- half_width_, half_width_));
- // Clear video frame so DrMemory/Valgrind will allow reads of the buffer.
- memset(video_frame_.buffer(kYPlane), 0, video_frame_.allocated_size(kYPlane));
- memset(video_frame_.buffer(kUPlane), 0, video_frame_.allocated_size(kUPlane));
- memset(video_frame_.buffer(kVPlane), 0, video_frame_.allocated_size(kVPlane));
- const std::string video_file =
- webrtc::test::ResourcePath("foreman_cif", "yuv");
- source_file_ = fopen(video_file.c_str(),"rb");
- ASSERT_TRUE(source_file_ != NULL) <<
- "Cannot read source file: " + video_file + "\n";
-}
-
-void VideoProcessingModuleTest::TearDown() {
- if (source_file_ != NULL) {
- ASSERT_EQ(0, fclose(source_file_));
- }
- source_file_ = NULL;
-
- if (vpm_ != NULL) {
- VideoProcessingModule::Destroy(vpm_);
- }
- vpm_ = NULL;
-}
-
-TEST_F(VideoProcessingModuleTest, DISABLED_ON_IOS(HandleNullBuffer)) {
- // TODO(mikhal/stefan): Do we need this one?
- VideoProcessingModule::FrameStats stats;
- // Video frame with unallocated buffer.
- VideoFrame videoFrame;
-
- EXPECT_EQ(-3, vpm_->GetFrameStats(&stats, videoFrame));
-
- EXPECT_EQ(-1, vpm_->Deflickering(&videoFrame, &stats));
-
- EXPECT_EQ(-3, vpm_->BrightnessDetection(videoFrame, stats));
-}
-
-TEST_F(VideoProcessingModuleTest, DISABLED_ON_IOS(HandleBadStats)) {
- VideoProcessingModule::FrameStats stats;
- rtc::scoped_ptr<uint8_t[]> video_buffer(new uint8_t[frame_length_]);
- ASSERT_EQ(frame_length_, fread(video_buffer.get(), 1, frame_length_,
- source_file_));
- EXPECT_EQ(0, ConvertToI420(kI420, video_buffer.get(), 0, 0, width_, height_,
- 0, kVideoRotation_0, &video_frame_));
-
- EXPECT_EQ(-1, vpm_->Deflickering(&video_frame_, &stats));
-
- EXPECT_EQ(-3, vpm_->BrightnessDetection(video_frame_, stats));
-}
-
-TEST_F(VideoProcessingModuleTest, DISABLED_ON_IOS(IdenticalResultsAfterReset)) {
- VideoFrame video_frame2;
- VideoProcessingModule::FrameStats stats;
- // Only testing non-static functions here.
- rtc::scoped_ptr<uint8_t[]> video_buffer(new uint8_t[frame_length_]);
- ASSERT_EQ(frame_length_, fread(video_buffer.get(), 1, frame_length_,
- source_file_));
- EXPECT_EQ(0, ConvertToI420(kI420, video_buffer.get(), 0, 0, width_, height_,
- 0, kVideoRotation_0, &video_frame_));
- ASSERT_EQ(0, vpm_->GetFrameStats(&stats, video_frame_));
- ASSERT_EQ(0, video_frame2.CopyFrame(video_frame_));
- ASSERT_EQ(0, vpm_->Deflickering(&video_frame_, &stats));
- vpm_->Reset();
- // Retrieve frame stats again in case Deflickering() has zeroed them.
- ASSERT_EQ(0, vpm_->GetFrameStats(&stats, video_frame2));
- ASSERT_EQ(0, vpm_->Deflickering(&video_frame2, &stats));
- EXPECT_TRUE(CompareFrames(video_frame_, video_frame2));
-
- ASSERT_EQ(frame_length_, fread(video_buffer.get(), 1, frame_length_,
- source_file_));
- EXPECT_EQ(0, ConvertToI420(kI420, video_buffer.get(), 0, 0, width_, height_,
- 0, kVideoRotation_0, &video_frame_));
- ASSERT_EQ(0, vpm_->GetFrameStats(&stats, video_frame_));
- video_frame2.CopyFrame(video_frame_);
- ASSERT_EQ(0, vpm_->BrightnessDetection(video_frame_, stats));
- vpm_->Reset();
- ASSERT_EQ(0, vpm_->BrightnessDetection(video_frame2, stats));
- EXPECT_TRUE(CompareFrames(video_frame_, video_frame2));
-}
-
-TEST_F(VideoProcessingModuleTest, DISABLED_ON_IOS(FrameStats)) {
- VideoProcessingModule::FrameStats stats;
- rtc::scoped_ptr<uint8_t[]> video_buffer(new uint8_t[frame_length_]);
- ASSERT_EQ(frame_length_, fread(video_buffer.get(), 1, frame_length_,
- source_file_));
- EXPECT_EQ(0, ConvertToI420(kI420, video_buffer.get(), 0, 0, width_, height_,
- 0, kVideoRotation_0, &video_frame_));
-
- EXPECT_FALSE(vpm_->ValidFrameStats(stats));
- EXPECT_EQ(0, vpm_->GetFrameStats(&stats, video_frame_));
- EXPECT_TRUE(vpm_->ValidFrameStats(stats));
-
- printf("\nFrameStats\n");
- printf("mean: %u\nnum_pixels: %u\nsubSamplWidth: "
- "%u\nsumSamplHeight: %u\nsum: %u\n\n",
- static_cast<unsigned int>(stats.mean),
- static_cast<unsigned int>(stats.num_pixels),
- static_cast<unsigned int>(stats.subSamplHeight),
- static_cast<unsigned int>(stats.subSamplWidth),
- static_cast<unsigned int>(stats.sum));
-
- vpm_->ClearFrameStats(&stats);
- EXPECT_FALSE(vpm_->ValidFrameStats(stats));
-}
-
-TEST_F(VideoProcessingModuleTest, DISABLED_ON_IOS(PreprocessorLogic)) {
- // Disable temporal sampling (frame dropping).
- vpm_->EnableTemporalDecimation(false);
- int resolution = 100;
- EXPECT_EQ(VPM_OK, vpm_->SetTargetResolution(resolution, resolution, 15));
- EXPECT_EQ(VPM_OK, vpm_->SetTargetResolution(resolution, resolution, 30));
- // Disable spatial sampling.
- vpm_->SetInputFrameResampleMode(kNoRescaling);
- EXPECT_EQ(VPM_OK, vpm_->SetTargetResolution(resolution, resolution, 30));
- VideoFrame* out_frame = NULL;
- // Set rescaling => output frame != NULL.
- vpm_->SetInputFrameResampleMode(kFastRescaling);
- PreprocessFrameAndVerify(video_frame_, resolution, resolution, vpm_,
- &out_frame);
- // No rescaling=> output frame = NULL.
- vpm_->SetInputFrameResampleMode(kNoRescaling);
- EXPECT_EQ(VPM_OK, vpm_->PreprocessFrame(video_frame_, &out_frame));
- EXPECT_TRUE(out_frame == NULL);
-}
-
-TEST_F(VideoProcessingModuleTest, DISABLED_ON_IOS(Resampler)) {
- enum { NumRuns = 1 };
-
- int64_t min_runtime = 0;
- int64_t total_runtime = 0;
-
- rewind(source_file_);
- ASSERT_TRUE(source_file_ != NULL) <<
- "Cannot read input file \n";
-
- // CA not needed here
- vpm_->EnableContentAnalysis(false);
- // no temporal decimation
- vpm_->EnableTemporalDecimation(false);
-
- // Reading test frame
- rtc::scoped_ptr<uint8_t[]> video_buffer(new uint8_t[frame_length_]);
- ASSERT_EQ(frame_length_, fread(video_buffer.get(), 1, frame_length_,
- source_file_));
- // Using ConvertToI420 to add stride to the image.
- EXPECT_EQ(0, ConvertToI420(kI420, video_buffer.get(), 0, 0, width_, height_,
- 0, kVideoRotation_0, &video_frame_));
- // Cropped source frame that will contain the expected visible region.
- VideoFrame cropped_source_frame;
- cropped_source_frame.CopyFrame(video_frame_);
-
- for (uint32_t run_idx = 0; run_idx < NumRuns; run_idx++) {
- // Initiate test timer.
- const TickTime time_start = TickTime::Now();
-
- // Init the sourceFrame with a timestamp.
- video_frame_.set_render_time_ms(time_start.MillisecondTimestamp());
- video_frame_.set_timestamp(time_start.MillisecondTimestamp() * 90);
-
- // Test scaling to different sizes: source is of |width|/|height| = 352/288.
- // Pure scaling:
- TestSize(video_frame_, video_frame_, width_ / 4, height_ / 4, 25.2, vpm_);
- TestSize(video_frame_, video_frame_, width_ / 2, height_ / 2, 28.1, vpm_);
- // No resampling:
- TestSize(video_frame_, video_frame_, width_, height_, -1, vpm_);
- TestSize(video_frame_, video_frame_, 2 * width_, 2 * height_, 32.2, vpm_);
-
- // Scaling and cropping. The cropped source frame is the largest center
- // aligned region that can be used from the source while preserving aspect
- // ratio.
- CropFrame(video_buffer.get(), width_, height_, 0, 56, 352, 176,
- &cropped_source_frame);
- TestSize(video_frame_, cropped_source_frame, 100, 50, 24.0, vpm_);
-
- CropFrame(video_buffer.get(), width_, height_, 0, 30, 352, 225,
- &cropped_source_frame);
- TestSize(video_frame_, cropped_source_frame, 400, 256, 31.3, vpm_);
-
- CropFrame(video_buffer.get(), width_, height_, 68, 0, 216, 288,
- &cropped_source_frame);
- TestSize(video_frame_, cropped_source_frame, 480, 640, 32.15, vpm_);
-
- CropFrame(video_buffer.get(), width_, height_, 0, 12, 352, 264,
- &cropped_source_frame);
- TestSize(video_frame_, cropped_source_frame, 960, 720, 32.2, vpm_);
-
- CropFrame(video_buffer.get(), width_, height_, 0, 44, 352, 198,
- &cropped_source_frame);
- TestSize(video_frame_, cropped_source_frame, 1280, 720, 32.15, vpm_);
-
- // Upsampling to odd size.
- CropFrame(video_buffer.get(), width_, height_, 0, 26, 352, 233,
- &cropped_source_frame);
- TestSize(video_frame_, cropped_source_frame, 501, 333, 32.05, vpm_);
- // Downsample to odd size.
- CropFrame(video_buffer.get(), width_, height_, 0, 34, 352, 219,
- &cropped_source_frame);
- TestSize(video_frame_, cropped_source_frame, 281, 175, 29.3, vpm_);
-
- // Stop timer.
- const int64_t runtime = (TickTime::Now() - time_start).Microseconds();
- if (runtime < min_runtime || run_idx == 0) {
- min_runtime = runtime;
- }
- total_runtime += runtime;
- }
-
- printf("\nAverage run time = %d us / frame\n",
- static_cast<int>(total_runtime));
- printf("Min run time = %d us / frame\n\n",
- static_cast<int>(min_runtime));
-}
-
-void PreprocessFrameAndVerify(const VideoFrame& source,
- int target_width,
- int target_height,
- VideoProcessingModule* vpm,
- VideoFrame** out_frame) {
- ASSERT_EQ(VPM_OK, vpm->SetTargetResolution(target_width, target_height, 30));
- ASSERT_EQ(VPM_OK, vpm->PreprocessFrame(source, out_frame));
-
- // If no resizing is needed, expect NULL.
- if (target_width == source.width() && target_height == source.height()) {
- EXPECT_EQ(NULL, *out_frame);
- return;
- }
-
- // Verify the resampled frame.
- EXPECT_TRUE(*out_frame != NULL);
- EXPECT_EQ(source.render_time_ms(), (*out_frame)->render_time_ms());
- EXPECT_EQ(source.timestamp(), (*out_frame)->timestamp());
- EXPECT_EQ(target_width, (*out_frame)->width());
- EXPECT_EQ(target_height, (*out_frame)->height());
-}
-
-void CropFrame(const uint8_t* source_data,
- int source_width,
- int source_height,
- int offset_x,
- int offset_y,
- int cropped_width,
- int cropped_height,
- VideoFrame* cropped_frame) {
- cropped_frame->CreateEmptyFrame(cropped_width, cropped_height, cropped_width,
- (cropped_width + 1) / 2,
- (cropped_width + 1) / 2);
- EXPECT_EQ(0,
- ConvertToI420(kI420, source_data, offset_x, offset_y, source_width,
- source_height, 0, kVideoRotation_0, cropped_frame));
-}
-
-void TestSize(const VideoFrame& source_frame,
- const VideoFrame& cropped_source_frame,
- int target_width,
- int target_height,
- double expected_psnr,
- VideoProcessingModule* vpm) {
- // Resample source_frame to out_frame.
- VideoFrame* out_frame = NULL;
- vpm->SetInputFrameResampleMode(kBox);
- PreprocessFrameAndVerify(source_frame, target_width, target_height, vpm,
- &out_frame);
- if (out_frame == NULL)
- return;
- WriteProcessedFrameForVisualInspection(source_frame, *out_frame);
-
- // Scale |resampled_source_frame| back to the source scale.
- VideoFrame resampled_source_frame;
- resampled_source_frame.CopyFrame(*out_frame);
- PreprocessFrameAndVerify(resampled_source_frame, cropped_source_frame.width(),
- cropped_source_frame.height(), vpm, &out_frame);
- WriteProcessedFrameForVisualInspection(resampled_source_frame, *out_frame);
-
- // Compute PSNR against the cropped source frame and check expectation.
- double psnr = I420PSNR(&cropped_source_frame, out_frame);
- EXPECT_GT(psnr, expected_psnr);
- printf("PSNR: %f. PSNR is between source of size %d %d, and a modified "
- "source which is scaled down/up to: %d %d, and back to source size \n",
- psnr, source_frame.width(), source_frame.height(),
- target_width, target_height);
-}
-
-bool CompareFrames(const webrtc::VideoFrame& frame1,
- const webrtc::VideoFrame& frame2) {
- for (int plane = 0; plane < webrtc::kNumOfPlanes; plane ++) {
- webrtc::PlaneType plane_type = static_cast<webrtc::PlaneType>(plane);
- int allocated_size1 = frame1.allocated_size(plane_type);
- int allocated_size2 = frame2.allocated_size(plane_type);
- if (allocated_size1 != allocated_size2)
- return false;
- const uint8_t* plane_buffer1 = frame1.buffer(plane_type);
- const uint8_t* plane_buffer2 = frame2.buffer(plane_type);
- if (memcmp(plane_buffer1, plane_buffer2, allocated_size1))
- return false;
- }
- return true;
-}
-
-void WriteProcessedFrameForVisualInspection(const VideoFrame& source,
- const VideoFrame& processed) {
- // Skip if writing to files is not enabled.
- if (!FLAGS_gen_files)
- return;
- // Write the processed frame to file for visual inspection.
- std::ostringstream filename;
- filename << webrtc::test::OutputPath() << "Resampler_from_" << source.width()
- << "x" << source.height() << "_to_" << processed.width() << "x"
- << processed.height() << "_30Hz_P420.yuv";
- std::cout << "Watch " << filename.str() << " and verify that it is okay."
- << std::endl;
- FILE* stand_alone_file = fopen(filename.str().c_str(), "wb");
- if (PrintVideoFrame(processed, stand_alone_file) < 0)
- std::cerr << "Failed to write: " << filename.str() << std::endl;
- if (stand_alone_file)
- fclose(stand_alone_file);
-}
-
-} // namespace webrtc
diff --git a/webrtc/modules/video_processing/main/test/unit_test/video_processing_unittest.h b/webrtc/modules/video_processing/main/test/unit_test/video_processing_unittest.h
deleted file mode 100644
index 4a4fda41e6..0000000000
--- a/webrtc/modules/video_processing/main/test/unit_test/video_processing_unittest.h
+++ /dev/null
@@ -1,47 +0,0 @@
-/*
- * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_MODULES_VIDEO_PROCESSING_MAIN_TEST_UNIT_TEST_VIDEO_PROCESSING_UNITTEST_H
-#define WEBRTC_MODULES_VIDEO_PROCESSING_MAIN_TEST_UNIT_TEST_VIDEO_PROCESSING_UNITTEST_H
-
-#include "testing/gtest/include/gtest/gtest.h"
-#include "webrtc/modules/video_processing/main/interface/video_processing.h"
-#include "webrtc/system_wrappers/include/trace.h"
-#include "webrtc/test/testsupport/fileutils.h"
-
-namespace webrtc {
-
-class VideoProcessingModuleTest : public ::testing::Test {
- protected:
- VideoProcessingModuleTest();
- virtual void SetUp();
- virtual void TearDown();
- static void SetUpTestCase() {
- Trace::CreateTrace();
- std::string trace_file = webrtc::test::OutputPath() + "VPMTrace.txt";
- ASSERT_EQ(0, Trace::SetTraceFile(trace_file.c_str()));
- }
- static void TearDownTestCase() {
- Trace::ReturnTrace();
- }
- VideoProcessingModule* vpm_;
- FILE* source_file_;
- VideoFrame video_frame_;
- const int width_;
- const int half_width_;
- const int height_;
- const int size_y_;
- const int size_uv_;
- const size_t frame_length_;
-};
-
-} // namespace webrtc
-
-#endif // WEBRTC_MODULES_VIDEO_PROCESSING_MAIN_TEST_UNIT_TEST_VIDEO_PROCESSING_UNITTEST_H
diff --git a/webrtc/modules/video_processing/main/test/unit_test/writeYUV420file.m b/webrtc/modules/video_processing/main/test/unit_test/writeYUV420file.m
deleted file mode 100644
index 69a8808338..0000000000
--- a/webrtc/modules/video_processing/main/test/unit_test/writeYUV420file.m
+++ /dev/null
@@ -1,22 +0,0 @@
-function writeYUV420file(filename, Y, U, V)
-% writeYUV420file(filename, Y, U, V)
-
-fid = fopen(filename,'wb');
-if fid==-1
- error(['Cannot open file ' filename]);
-end
-
-numFrames=size(Y,3);
-
-for k=1:numFrames
- % Write luminance
- fwrite(fid,uint8(Y(:,:,k).'), 'uchar');
-
- % Write U channel
- fwrite(fid,uint8(U(:,:,k).'), 'uchar');
-
- % Write V channel
- fwrite(fid,uint8(V(:,:,k).'), 'uchar');
-end
-
-fclose(fid);
diff --git a/webrtc/modules/video_processing/spatial_resampler.cc b/webrtc/modules/video_processing/spatial_resampler.cc
new file mode 100644
index 0000000000..cdbe0efac1
--- /dev/null
+++ b/webrtc/modules/video_processing/spatial_resampler.cc
@@ -0,0 +1,97 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/modules/video_processing/spatial_resampler.h"
+
+namespace webrtc {
+
+VPMSimpleSpatialResampler::VPMSimpleSpatialResampler()
+ : resampling_mode_(kFastRescaling),
+ target_width_(0),
+ target_height_(0),
+ scaler_() {}
+
+VPMSimpleSpatialResampler::~VPMSimpleSpatialResampler() {}
+
+int32_t VPMSimpleSpatialResampler::SetTargetFrameSize(int32_t width,
+ int32_t height) {
+ if (resampling_mode_ == kNoRescaling)
+ return VPM_OK;
+
+ if (width < 1 || height < 1)
+ return VPM_PARAMETER_ERROR;
+
+ target_width_ = width;
+ target_height_ = height;
+
+ return VPM_OK;
+}
+
+void VPMSimpleSpatialResampler::SetInputFrameResampleMode(
+ VideoFrameResampling resampling_mode) {
+ resampling_mode_ = resampling_mode;
+}
+
+void VPMSimpleSpatialResampler::Reset() {
+ resampling_mode_ = kFastRescaling;
+ target_width_ = 0;
+ target_height_ = 0;
+}
+
+int32_t VPMSimpleSpatialResampler::ResampleFrame(const VideoFrame& inFrame,
+ VideoFrame* outFrame) {
+ // Don't copy if frame remains as is.
+ if (resampling_mode_ == kNoRescaling) {
+ return VPM_OK;
+ // Check if re-sampling is needed
+ } else if ((inFrame.width() == target_width_) &&
+ (inFrame.height() == target_height_)) {
+ return VPM_OK;
+ }
+
+ // Setting scaler
+ // TODO(mikhal/marpan): Should we allow for setting the filter mode in
+ // _scale.Set() with |resampling_mode_|?
+ int ret_val = 0;
+ ret_val = scaler_.Set(inFrame.width(), inFrame.height(), target_width_,
+ target_height_, kI420, kI420, kScaleBox);
+ if (ret_val < 0)
+ return ret_val;
+
+ ret_val = scaler_.Scale(inFrame, outFrame);
+
+ // Setting time parameters to the output frame.
+ // Timestamp will be reset in Scale call above, so we should set it after.
+ outFrame->set_timestamp(inFrame.timestamp());
+ outFrame->set_render_time_ms(inFrame.render_time_ms());
+
+ if (ret_val == 0)
+ return VPM_OK;
+ else
+ return VPM_SCALE_ERROR;
+}
+
+int32_t VPMSimpleSpatialResampler::TargetHeight() {
+ return target_height_;
+}
+
+int32_t VPMSimpleSpatialResampler::TargetWidth() {
+ return target_width_;
+}
+
+bool VPMSimpleSpatialResampler::ApplyResample(int32_t width, int32_t height) {
+ if ((width == target_width_ && height == target_height_) ||
+ resampling_mode_ == kNoRescaling)
+ return false;
+ else
+ return true;
+}
+
+} // namespace webrtc
diff --git a/webrtc/modules/video_processing/spatial_resampler.h b/webrtc/modules/video_processing/spatial_resampler.h
new file mode 100644
index 0000000000..51820e24e5
--- /dev/null
+++ b/webrtc/modules/video_processing/spatial_resampler.h
@@ -0,0 +1,60 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_PROCESSING_SPATIAL_RESAMPLER_H_
+#define WEBRTC_MODULES_VIDEO_PROCESSING_SPATIAL_RESAMPLER_H_
+
+#include "webrtc/typedefs.h"
+
+#include "webrtc/modules/include/module_common_types.h"
+#include "webrtc/modules/video_processing/include/video_processing_defines.h"
+
+#include "webrtc/common_video/libyuv/include/scaler.h"
+#include "webrtc/common_video/libyuv/include/webrtc_libyuv.h"
+
+namespace webrtc {
+
+class VPMSpatialResampler {
+ public:
+ virtual ~VPMSpatialResampler() {}
+ virtual int32_t SetTargetFrameSize(int32_t width, int32_t height) = 0;
+ virtual void SetInputFrameResampleMode(
+ VideoFrameResampling resampling_mode) = 0;
+ virtual void Reset() = 0;
+ virtual int32_t ResampleFrame(const VideoFrame& inFrame,
+ VideoFrame* outFrame) = 0;
+ virtual int32_t TargetWidth() = 0;
+ virtual int32_t TargetHeight() = 0;
+ virtual bool ApplyResample(int32_t width, int32_t height) = 0;
+};
+
+class VPMSimpleSpatialResampler : public VPMSpatialResampler {
+ public:
+ VPMSimpleSpatialResampler();
+ ~VPMSimpleSpatialResampler();
+ virtual int32_t SetTargetFrameSize(int32_t width, int32_t height);
+ virtual void SetInputFrameResampleMode(VideoFrameResampling resampling_mode);
+ virtual void Reset();
+ virtual int32_t ResampleFrame(const VideoFrame& inFrame,
+ VideoFrame* outFrame);
+ virtual int32_t TargetWidth();
+ virtual int32_t TargetHeight();
+ virtual bool ApplyResample(int32_t width, int32_t height);
+
+ private:
+ VideoFrameResampling resampling_mode_;
+ int32_t target_width_;
+ int32_t target_height_;
+ Scaler scaler_;
+};
+
+} // namespace webrtc
+
+#endif // WEBRTC_MODULES_VIDEO_PROCESSING_SPATIAL_RESAMPLER_H_
diff --git a/webrtc/modules/video_processing/test/brightness_detection_test.cc b/webrtc/modules/video_processing/test/brightness_detection_test.cc
new file mode 100644
index 0000000000..669bb183e5
--- /dev/null
+++ b/webrtc/modules/video_processing/test/brightness_detection_test.cc
@@ -0,0 +1,120 @@
+/*
+ * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/common_video/libyuv/include/webrtc_libyuv.h"
+#include "webrtc/modules/video_processing/include/video_processing.h"
+#include "webrtc/modules/video_processing/test/video_processing_unittest.h"
+
+namespace webrtc {
+
+#if defined(WEBRTC_IOS)
+#define MAYBE_BrightnessDetection DISABLED_BrightnessDetection
+#else
+#define MAYBE_BrightnessDetection BrightnessDetection
+#endif
+TEST_F(VideoProcessingTest, MAYBE_BrightnessDetection) {
+ uint32_t frameNum = 0;
+ int32_t brightnessWarning = 0;
+ uint32_t warningCount = 0;
+ rtc::scoped_ptr<uint8_t[]> video_buffer(new uint8_t[frame_length_]);
+ while (fread(video_buffer.get(), 1, frame_length_, source_file_) ==
+ frame_length_) {
+ EXPECT_EQ(0, ConvertToI420(kI420, video_buffer.get(), 0, 0, width_, height_,
+ 0, kVideoRotation_0, &video_frame_));
+ frameNum++;
+ VideoProcessing::FrameStats stats;
+ vp_->GetFrameStats(video_frame_, &stats);
+ EXPECT_GT(stats.num_pixels, 0u);
+ ASSERT_GE(brightnessWarning = vp_->BrightnessDetection(video_frame_, stats),
+ 0);
+ if (brightnessWarning != VideoProcessing::kNoWarning) {
+ warningCount++;
+ }
+ }
+ ASSERT_NE(0, feof(source_file_)) << "Error reading source file";
+
+ // Expect few warnings
+ float warningProportion = static_cast<float>(warningCount) / frameNum * 100;
+ printf("\nWarning proportions:\n");
+ printf("Stock foreman: %.1f %%\n", warningProportion);
+ EXPECT_LT(warningProportion, 10);
+
+ rewind(source_file_);
+ frameNum = 0;
+ warningCount = 0;
+ while (fread(video_buffer.get(), 1, frame_length_, source_file_) ==
+ frame_length_ &&
+ frameNum < 300) {
+ EXPECT_EQ(0, ConvertToI420(kI420, video_buffer.get(), 0, 0, width_, height_,
+ 0, kVideoRotation_0, &video_frame_));
+ frameNum++;
+
+ uint8_t* frame = video_frame_.buffer(kYPlane);
+ uint32_t yTmp = 0;
+ for (int yIdx = 0; yIdx < width_ * height_; yIdx++) {
+ yTmp = frame[yIdx] << 1;
+ if (yTmp > 255) {
+ yTmp = 255;
+ }
+ frame[yIdx] = static_cast<uint8_t>(yTmp);
+ }
+
+ VideoProcessing::FrameStats stats;
+ vp_->GetFrameStats(video_frame_, &stats);
+ EXPECT_GT(stats.num_pixels, 0u);
+ ASSERT_GE(brightnessWarning = vp_->BrightnessDetection(video_frame_, stats),
+ 0);
+ EXPECT_NE(VideoProcessing::kDarkWarning, brightnessWarning);
+ if (brightnessWarning == VideoProcessing::kBrightWarning) {
+ warningCount++;
+ }
+ }
+ ASSERT_NE(0, feof(source_file_)) << "Error reading source file";
+
+ // Expect many brightness warnings
+ warningProportion = static_cast<float>(warningCount) / frameNum * 100;
+ printf("Bright foreman: %.1f %%\n", warningProportion);
+ EXPECT_GT(warningProportion, 95);
+
+ rewind(source_file_);
+ frameNum = 0;
+ warningCount = 0;
+ while (fread(video_buffer.get(), 1, frame_length_, source_file_) ==
+ frame_length_ &&
+ frameNum < 300) {
+ EXPECT_EQ(0, ConvertToI420(kI420, video_buffer.get(), 0, 0, width_, height_,
+ 0, kVideoRotation_0, &video_frame_));
+ frameNum++;
+
+ uint8_t* y_plane = video_frame_.buffer(kYPlane);
+ int32_t yTmp = 0;
+ for (int yIdx = 0; yIdx < width_ * height_; yIdx++) {
+ yTmp = y_plane[yIdx] >> 1;
+ y_plane[yIdx] = static_cast<uint8_t>(yTmp);
+ }
+
+ VideoProcessing::FrameStats stats;
+ vp_->GetFrameStats(video_frame_, &stats);
+ EXPECT_GT(stats.num_pixels, 0u);
+ ASSERT_GE(brightnessWarning = vp_->BrightnessDetection(video_frame_, stats),
+ 0);
+ EXPECT_NE(VideoProcessing::kBrightWarning, brightnessWarning);
+ if (brightnessWarning == VideoProcessing::kDarkWarning) {
+ warningCount++;
+ }
+ }
+ ASSERT_NE(0, feof(source_file_)) << "Error reading source file";
+
+ // Expect many darkness warnings
+ warningProportion = static_cast<float>(warningCount) / frameNum * 100;
+ printf("Dark foreman: %.1f %%\n\n", warningProportion);
+ EXPECT_GT(warningProportion, 90);
+}
+} // namespace webrtc
diff --git a/webrtc/modules/video_processing/test/content_metrics_test.cc b/webrtc/modules/video_processing/test/content_metrics_test.cc
new file mode 100644
index 0000000000..782f9cff59
--- /dev/null
+++ b/webrtc/modules/video_processing/test/content_metrics_test.cc
@@ -0,0 +1,48 @@
+/*
+ * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/common_video/libyuv/include/webrtc_libyuv.h"
+#include "webrtc/modules/video_processing/include/video_processing.h"
+#include "webrtc/modules/video_processing/content_analysis.h"
+#include "webrtc/modules/video_processing/test/video_processing_unittest.h"
+
+namespace webrtc {
+
+#if defined(WEBRTC_IOS)
+TEST_F(VideoProcessingTest, DISABLED_ContentAnalysis) {
+#else
+TEST_F(VideoProcessingTest, ContentAnalysis) {
+#endif
+ VPMContentAnalysis ca__c(false);
+ VPMContentAnalysis ca__sse(true);
+ VideoContentMetrics* _cM_c;
+ VideoContentMetrics* _cM_SSE;
+
+ ca__c.Initialize(width_, height_);
+ ca__sse.Initialize(width_, height_);
+
+ rtc::scoped_ptr<uint8_t[]> video_buffer(new uint8_t[frame_length_]);
+ while (fread(video_buffer.get(), 1, frame_length_, source_file_) ==
+ frame_length_) {
+ // Using ConvertToI420 to add stride to the image.
+ EXPECT_EQ(0, ConvertToI420(kI420, video_buffer.get(), 0, 0, width_, height_,
+ 0, kVideoRotation_0, &video_frame_));
+ _cM_c = ca__c.ComputeContentMetrics(video_frame_);
+ _cM_SSE = ca__sse.ComputeContentMetrics(video_frame_);
+
+ ASSERT_EQ(_cM_c->spatial_pred_err, _cM_SSE->spatial_pred_err);
+ ASSERT_EQ(_cM_c->spatial_pred_err_v, _cM_SSE->spatial_pred_err_v);
+ ASSERT_EQ(_cM_c->spatial_pred_err_h, _cM_SSE->spatial_pred_err_h);
+ ASSERT_EQ(_cM_c->motion_magnitude, _cM_SSE->motion_magnitude);
+ }
+ ASSERT_NE(0, feof(source_file_)) << "Error reading source file";
+}
+
+} // namespace webrtc
diff --git a/webrtc/modules/video_processing/test/createTable.m b/webrtc/modules/video_processing/test/createTable.m
new file mode 100644
index 0000000000..fe8777ee71
--- /dev/null
+++ b/webrtc/modules/video_processing/test/createTable.m
@@ -0,0 +1,179 @@
+% Create the color enhancement look-up table and write it to
+% file colorEnhancementTable.cpp. Copy contents of that file into
+% the source file for the color enhancement function.
+
+clear
+close all
+
+
+% First, define the color enhancement in a normalized domain
+
+% Compander function is defined in three radial zones.
+% 1. From 0 to radius r0, the compander function
+% is a second-order polynomial intersecting the points (0,0)
+% and (r0, r0), and with a slope B in (0,0).
+% 2. From r0 to r1, the compander is a third-order polynomial
+% intersecting the points (r0, r0) and (r1, r1), and with the
+% same slope as the first part in the point (r0, r0) and slope
+% equal to 1 in (r1, r1).
+% 3. For radii larger than r1, the compander function is the
+% unity scale function (no scaling at all).
+
+r0=0.07; % Dead zone radius (must be > 0)
+r1=0.6; % Enhancement zone radius (must be > r0 and < 1)
+B=0.2; % initial slope of compander function (between 0 and 1)
+
+x0=linspace(0,r0).'; % zone 1
+x1=linspace(r0,r1).'; % zone 2
+x2=linspace(r1,1).'; % zone 3
+
+A=(1-B)/r0;
+f0=A*x0.^2+B*x0; % compander function in zone 1
+
+% equation system for finding second zone parameters
+M=[r0^3 r0^2 r0 1;
+ 3*r0^2 2*r0 1 0;
+ 3*r1^2 2*r1 1 0;
+ r1^3 r1^2 r1 1];
+m=[A*r0^2+B*r0; 2*A*r0+B; 1; r1];
+% solve equations
+theta=M\m;
+
+% compander function in zone 1
+f1=[x1.^3 x1.^2 x1 ones(size(x1))]*theta;
+
+x=[x0; x1; x2];
+f=[f0; f1; x2];
+
+% plot it
+figure(1)
+plot(x,f,x,x,':')
+xlabel('Normalized radius')
+ylabel('Modified radius')
+
+
+% Now, create the look-up table in the integer color space
+[U,V]=meshgrid(0:255, 0:255); % U-V space
+U0=U;
+V0=V;
+
+% Conversion matrix from normalized YUV to RGB
+T=[1 0 1.13983; 1 -0.39465 -0.58060; 1 2.03211 0];
+Ylum=0.5;
+
+figure(2)
+Z(:,:,1)=Ylum + (U-127)/256*T(1,2) + (V-127)/256*T(1,3);
+Z(:,:,2)=Ylum + (U-127)/256*T(2,2) + (V-127)/256*T(2,3);
+Z(:,:,3)=Ylum + (U-127)/256*T(3,2) + (V-127)/256*T(3,3);
+Z=max(Z,0);
+Z=min(Z,1);
+subplot(121)
+image(Z);
+axis square
+axis off
+set(gcf,'color','k')
+
+R = sqrt((U-127).^2 + (V-127).^2);
+Rnorm = R/127;
+RnormMod = Rnorm;
+RnormMod(RnormMod==0)=1; % avoid division with zero
+
+% find indices to pixels in dead-zone (zone 1)
+ix=find(Rnorm<=r0);
+scaleMatrix = (A*Rnorm(ix).^2 + B*Rnorm(ix))./RnormMod(ix);
+U(ix)=(U(ix)-127).*scaleMatrix+127;
+V(ix)=(V(ix)-127).*scaleMatrix+127;
+
+% find indices to pixels in zone 2
+ix=find(Rnorm>r0 & Rnorm<=r1);
+scaleMatrix = (theta(1)*Rnorm(ix).^3 + theta(2)*Rnorm(ix).^2 + ...
+ theta(3)*Rnorm(ix) + theta(4)) ./ RnormMod(ix);
+U(ix)=(U(ix)-127).*scaleMatrix + 127;
+V(ix)=(V(ix)-127).*scaleMatrix + 127;
+
+% round to integer values and saturate
+U=round(U);
+V=round(V);
+U=max(min(U,255),0);
+V=max(min(V,255),0);
+
+Z(:,:,1)=Ylum + (U-127)/256*T(1,2) + (V-127)/256*T(1,3);
+Z(:,:,2)=Ylum + (U-127)/256*T(2,2) + (V-127)/256*T(2,3);
+Z(:,:,3)=Ylum + (U-127)/256*T(3,2) + (V-127)/256*T(3,3);
+Z=max(Z,0);
+Z=min(Z,1);
+subplot(122)
+image(Z);
+axis square
+axis off
+
+figure(3)
+subplot(121)
+mesh(U-U0)
+subplot(122)
+mesh(V-V0)
+
+
+
+% Last, write to file
+% Write only one matrix, since U=V'
+
+fid = fopen('../out/Debug/colorEnhancementTable.h','wt');
+if fid==-1
+ error('Cannot open file colorEnhancementTable.cpp');
+end
+
+fprintf(fid,'//colorEnhancementTable.h\n\n');
+fprintf(fid,'//Copy the constant table to the appropriate header file.\n\n');
+
+fprintf(fid,'//Table created with Matlab script createTable.m\n\n');
+fprintf(fid,'//Usage:\n');
+fprintf(fid,'// Umod=colorTable[U][V]\n');
+fprintf(fid,'// Vmod=colorTable[V][U]\n');
+
+fprintf(fid,'static unsigned char colorTable[%i][%i] = {\n', size(U,1), size(U,2));
+
+for u=1:size(U,2)
+ fprintf(fid,' {%i', U(1,u));
+ for v=2:size(U,1)
+ fprintf(fid,', %i', U(v,u));
+ end
+ fprintf(fid,'}');
+ if u<size(U,2)
+ fprintf(fid,',');
+ end
+ fprintf(fid,'\n');
+end
+fprintf(fid,'};\n\n');
+fclose(fid);
+fprintf('done');
+
+
+answ=input('Create test vector (takes some time...)? y/n : ','s');
+if answ ~= 'y'
+ return
+end
+
+% Also, create test vectors
+
+% Read test file foreman.yuv
+fprintf('Reading test file...')
+[y,u,v]=readYUV420file('../out/Debug/testFiles/foreman_cif.yuv',352,288);
+fprintf(' done\n');
+unew=uint8(zeros(size(u)));
+vnew=uint8(zeros(size(v)));
+
+% traverse all frames
+for k=1:size(y,3)
+ fprintf('Frame %i\n', k);
+ for r=1:size(u,1)
+ for c=1:size(u,2)
+ unew(r,c,k) = uint8(U(double(v(r,c,k))+1, double(u(r,c,k))+1));
+ vnew(r,c,k) = uint8(V(double(v(r,c,k))+1, double(u(r,c,k))+1));
+ end
+ end
+end
+
+fprintf('\nWriting modified test file...')
+writeYUV420file('../out/Debug/foremanColorEnhanced.yuv',y,unew,vnew);
+fprintf(' done\n');
diff --git a/webrtc/modules/video_processing/test/deflickering_test.cc b/webrtc/modules/video_processing/test/deflickering_test.cc
new file mode 100644
index 0000000000..5410015b06
--- /dev/null
+++ b/webrtc/modules/video_processing/test/deflickering_test.cc
@@ -0,0 +1,98 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <stdio.h>
+#include <stdlib.h>
+
+#include "webrtc/common_video/libyuv/include/webrtc_libyuv.h"
+#include "webrtc/modules/video_processing/include/video_processing.h"
+#include "webrtc/modules/video_processing/test/video_processing_unittest.h"
+#include "webrtc/system_wrappers/include/tick_util.h"
+#include "webrtc/test/testsupport/fileutils.h"
+
+namespace webrtc {
+
+#if defined(WEBRTC_IOS)
+TEST_F(VideoProcessingTest, DISABLED_Deflickering) {
+#else
+TEST_F(VideoProcessingTest, Deflickering) {
+#endif
+ enum { NumRuns = 30 };
+ uint32_t frameNum = 0;
+ const uint32_t frame_rate = 15;
+
+ int64_t min_runtime = 0;
+ int64_t avg_runtime = 0;
+
+ // Close automatically opened Foreman.
+ fclose(source_file_);
+ const std::string input_file =
+ webrtc::test::ResourcePath("deflicker_before_cif_short", "yuv");
+ source_file_ = fopen(input_file.c_str(), "rb");
+ ASSERT_TRUE(source_file_ != NULL) << "Cannot read input file: " << input_file
+ << "\n";
+
+ const std::string output_file =
+ webrtc::test::OutputPath() + "deflicker_output_cif_short.yuv";
+ FILE* deflickerFile = fopen(output_file.c_str(), "wb");
+ ASSERT_TRUE(deflickerFile != NULL)
+ << "Could not open output file: " << output_file << "\n";
+
+ printf("\nRun time [us / frame]:\n");
+ rtc::scoped_ptr<uint8_t[]> video_buffer(new uint8_t[frame_length_]);
+ for (uint32_t run_idx = 0; run_idx < NumRuns; run_idx++) {
+ TickTime t0;
+ TickTime t1;
+ TickInterval acc_ticks;
+ uint32_t timeStamp = 1;
+
+ frameNum = 0;
+ while (fread(video_buffer.get(), 1, frame_length_, source_file_) ==
+ frame_length_) {
+ frameNum++;
+ EXPECT_EQ(0, ConvertToI420(kI420, video_buffer.get(), 0, 0, width_,
+ height_, 0, kVideoRotation_0, &video_frame_));
+ video_frame_.set_timestamp(timeStamp);
+
+ t0 = TickTime::Now();
+ VideoProcessing::FrameStats stats;
+ vp_->GetFrameStats(video_frame_, &stats);
+ EXPECT_GT(stats.num_pixels, 0u);
+ ASSERT_EQ(0, vp_->Deflickering(&video_frame_, &stats));
+ t1 = TickTime::Now();
+ acc_ticks += (t1 - t0);
+
+ if (run_idx == 0) {
+ if (PrintVideoFrame(video_frame_, deflickerFile) < 0) {
+ return;
+ }
+ }
+ timeStamp += (90000 / frame_rate);
+ }
+ ASSERT_NE(0, feof(source_file_)) << "Error reading source file";
+
+ printf("%u\n", static_cast<int>(acc_ticks.Microseconds() / frameNum));
+ if (acc_ticks.Microseconds() < min_runtime || run_idx == 0) {
+ min_runtime = acc_ticks.Microseconds();
+ }
+ avg_runtime += acc_ticks.Microseconds();
+
+ rewind(source_file_);
+ }
+ ASSERT_EQ(0, fclose(deflickerFile));
+ // TODO(kjellander): Add verification of deflicker output file.
+
+ printf("\nAverage run time = %d us / frame\n",
+ static_cast<int>(avg_runtime / frameNum / NumRuns));
+ printf("Min run time = %d us / frame\n\n",
+ static_cast<int>(min_runtime / frameNum));
+}
+
+} // namespace webrtc
diff --git a/webrtc/modules/video_processing/test/denoiser_test.cc b/webrtc/modules/video_processing/test/denoiser_test.cc
new file mode 100644
index 0000000000..551a77617d
--- /dev/null
+++ b/webrtc/modules/video_processing/test/denoiser_test.cc
@@ -0,0 +1,156 @@
+/*
+ * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <string.h>
+
+#include "webrtc/common_video/libyuv/include/webrtc_libyuv.h"
+#include "webrtc/modules/video_processing/include/video_processing.h"
+#include "webrtc/modules/video_processing/test/video_processing_unittest.h"
+#include "webrtc/modules/video_processing/video_denoiser.h"
+
+namespace webrtc {
+
+TEST_F(VideoProcessingTest, CopyMem) {
+ rtc::scoped_ptr<DenoiserFilter> df_c(DenoiserFilter::Create(false));
+ rtc::scoped_ptr<DenoiserFilter> df_sse_neon(DenoiserFilter::Create(true));
+ uint8_t src[16 * 16], dst[16 * 16];
+ for (int i = 0; i < 16; ++i) {
+ for (int j = 0; j < 16; ++j) {
+ src[i * 16 + j] = i * 16 + j;
+ }
+ }
+
+ memset(dst, 0, 8 * 8);
+ df_c->CopyMem8x8(src, 8, dst, 8);
+ EXPECT_EQ(0, memcmp(src, dst, 8 * 8));
+
+ memset(dst, 0, 16 * 16);
+ df_c->CopyMem16x16(src, 16, dst, 16);
+ EXPECT_EQ(0, memcmp(src, dst, 16 * 16));
+
+ memset(dst, 0, 8 * 8);
+ df_sse_neon->CopyMem16x16(src, 8, dst, 8);
+ EXPECT_EQ(0, memcmp(src, dst, 8 * 8));
+
+ memset(dst, 0, 16 * 16);
+ df_sse_neon->CopyMem16x16(src, 16, dst, 16);
+ EXPECT_EQ(0, memcmp(src, dst, 16 * 16));
+}
+
+TEST_F(VideoProcessingTest, Variance) {
+ rtc::scoped_ptr<DenoiserFilter> df_c(DenoiserFilter::Create(false));
+ rtc::scoped_ptr<DenoiserFilter> df_sse_neon(DenoiserFilter::Create(true));
+ uint8_t src[16 * 16], dst[16 * 16];
+ uint32_t sum = 0, sse = 0, var;
+ for (int i = 0; i < 16; ++i) {
+ for (int j = 0; j < 16; ++j) {
+ src[i * 16 + j] = i * 16 + j;
+ }
+ }
+ // Compute the 16x8 variance of the 16x16 block.
+ for (int i = 0; i < 8; ++i) {
+ for (int j = 0; j < 16; ++j) {
+ sum += (i * 32 + j);
+ sse += (i * 32 + j) * (i * 32 + j);
+ }
+ }
+ var = sse - ((sum * sum) >> 7);
+ memset(dst, 0, 16 * 16);
+ EXPECT_EQ(var, df_c->Variance16x8(src, 16, dst, 16, &sse));
+ EXPECT_EQ(var, df_sse_neon->Variance16x8(src, 16, dst, 16, &sse));
+}
+
+TEST_F(VideoProcessingTest, MbDenoise) {
+ rtc::scoped_ptr<DenoiserFilter> df_c(DenoiserFilter::Create(false));
+ rtc::scoped_ptr<DenoiserFilter> df_sse_neon(DenoiserFilter::Create(true));
+ uint8_t running_src[16 * 16], src[16 * 16], dst[16 * 16], dst_ref[16 * 16];
+
+ // Test case: |diff| <= |3 + shift_inc1|
+ for (int i = 0; i < 16; ++i) {
+ for (int j = 0; j < 16; ++j) {
+ running_src[i * 16 + j] = i * 11 + j;
+ src[i * 16 + j] = i * 11 + j + 2;
+ dst_ref[i * 16 + j] = running_src[i * 16 + j];
+ }
+ }
+ memset(dst, 0, 16 * 16);
+ df_c->MbDenoise(running_src, 16, dst, 16, src, 16, 0, 1);
+ EXPECT_EQ(0, memcmp(dst, dst_ref, 16 * 16));
+
+ // Test case: |diff| >= |4 + shift_inc1|
+ for (int i = 0; i < 16; ++i) {
+ for (int j = 0; j < 16; ++j) {
+ running_src[i * 16 + j] = i * 11 + j;
+ src[i * 16 + j] = i * 11 + j + 5;
+ dst_ref[i * 16 + j] = src[i * 16 + j] - 2;
+ }
+ }
+ memset(dst, 0, 16 * 16);
+ df_c->MbDenoise(running_src, 16, dst, 16, src, 16, 0, 1);
+ EXPECT_EQ(0, memcmp(dst, dst_ref, 16 * 16));
+ memset(dst, 0, 16 * 16);
+ df_sse_neon->MbDenoise(running_src, 16, dst, 16, src, 16, 0, 1);
+ EXPECT_EQ(0, memcmp(dst, dst_ref, 16 * 16));
+
+ // Test case: |diff| >= 8
+ for (int i = 0; i < 16; ++i) {
+ for (int j = 0; j < 16; ++j) {
+ running_src[i * 16 + j] = i * 11 + j;
+ src[i * 16 + j] = i * 11 + j + 8;
+ dst_ref[i * 16 + j] = src[i * 16 + j] - 6;
+ }
+ }
+ memset(dst, 0, 16 * 16);
+ df_c->MbDenoise(running_src, 16, dst, 16, src, 16, 0, 1);
+ EXPECT_EQ(0, memcmp(dst, dst_ref, 16 * 16));
+ memset(dst, 0, 16 * 16);
+ df_sse_neon->MbDenoise(running_src, 16, dst, 16, src, 16, 0, 1);
+ EXPECT_EQ(0, memcmp(dst, dst_ref, 16 * 16));
+
+ // Test case: |diff| > 15
+ for (int i = 0; i < 16; ++i) {
+ for (int j = 0; j < 16; ++j) {
+ running_src[i * 16 + j] = i * 11 + j;
+ src[i * 16 + j] = i * 11 + j + 16;
+ }
+ }
+ memset(dst, 0, 16 * 16);
+ DenoiserDecision decision =
+ df_c->MbDenoise(running_src, 16, dst, 16, src, 16, 0, 1);
+ EXPECT_EQ(COPY_BLOCK, decision);
+ decision = df_sse_neon->MbDenoise(running_src, 16, dst, 16, src, 16, 0, 1);
+ EXPECT_EQ(COPY_BLOCK, decision);
+}
+
+TEST_F(VideoProcessingTest, Denoiser) {
+ // Create pure C denoiser.
+ VideoDenoiser denoiser_c(false);
+ // Create SSE or NEON denoiser.
+ VideoDenoiser denoiser_sse_neon(true);
+ VideoFrame denoised_frame_c;
+ VideoFrame denoised_frame_sse_neon;
+
+ rtc::scoped_ptr<uint8_t[]> video_buffer(new uint8_t[frame_length_]);
+ while (fread(video_buffer.get(), 1, frame_length_, source_file_) ==
+ frame_length_) {
+ // Using ConvertToI420 to add stride to the image.
+ EXPECT_EQ(0, ConvertToI420(kI420, video_buffer.get(), 0, 0, width_, height_,
+ 0, kVideoRotation_0, &video_frame_));
+
+ denoiser_c.DenoiseFrame(video_frame_, &denoised_frame_c);
+ denoiser_sse_neon.DenoiseFrame(video_frame_, &denoised_frame_sse_neon);
+
+ // Denoising results should be the same for C and SSE/NEON denoiser.
+ ASSERT_EQ(true, denoised_frame_c.EqualsFrame(denoised_frame_sse_neon));
+ }
+ ASSERT_NE(0, feof(source_file_)) << "Error reading source file";
+}
+
+} // namespace webrtc
diff --git a/webrtc/modules/video_processing/test/readYUV420file.m b/webrtc/modules/video_processing/test/readYUV420file.m
new file mode 100644
index 0000000000..f409820283
--- /dev/null
+++ b/webrtc/modules/video_processing/test/readYUV420file.m
@@ -0,0 +1,45 @@
+function [Y,U,V] = readYUV420file(filename, width, height)
+% [Y,U,V] = readYUVfile(filename, width, height)
+
+fid = fopen(filename,'rb');
+if fid==-1
+ error(['Cannot open file ' filename]);
+end
+
+% Number of pixels per image
+nPx=width*height;
+
+% nPx bytes luminance, nPx/4 bytes U, nPx/4 bytes V
+frameSizeBytes = nPx*1.5;
+
+% calculate number of frames
+fseek(fid,0,'eof'); % move to end of file
+fileLen=ftell(fid); % number of bytes
+fseek(fid,0,'bof'); % rewind to start
+
+% calculate number of frames
+numFrames = floor(fileLen/frameSizeBytes);
+
+Y=uint8(zeros(height,width,numFrames));
+U=uint8(zeros(height/2,width/2,numFrames));
+V=uint8(zeros(height/2,width/2,numFrames));
+
+[X,nBytes]=fread(fid, frameSizeBytes, 'uchar');
+
+for k=1:numFrames
+
+ % Store luminance
+ Y(:,:,k)=uint8(reshape(X(1:nPx), width, height).');
+
+ % Store U channel
+ U(:,:,k)=uint8(reshape(X(nPx + (1:nPx/4)), width/2, height/2).');
+
+ % Store V channel
+ V(:,:,k)=uint8(reshape(X(nPx + nPx/4 + (1:nPx/4)), width/2, height/2).');
+
+ % Read next frame
+ [X,nBytes]=fread(fid, frameSizeBytes, 'uchar');
+end
+
+
+fclose(fid);
diff --git a/webrtc/modules/video_processing/test/video_processing_unittest.cc b/webrtc/modules/video_processing/test/video_processing_unittest.cc
new file mode 100644
index 0000000000..2fd8fb6673
--- /dev/null
+++ b/webrtc/modules/video_processing/test/video_processing_unittest.cc
@@ -0,0 +1,415 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/modules/video_processing/test/video_processing_unittest.h"
+
+#include <gflags/gflags.h>
+
+#include <string>
+
+#include "webrtc/common_video/libyuv/include/webrtc_libyuv.h"
+#include "webrtc/system_wrappers/include/tick_util.h"
+#include "webrtc/test/testsupport/fileutils.h"
+
+namespace webrtc {
+
+namespace {
+
+// Define command line flag 'gen_files' (default value: false).
+DEFINE_bool(gen_files, false, "Output files for visual inspection.");
+
+} // namespace
+
+static void PreprocessFrameAndVerify(const VideoFrame& source,
+ int target_width,
+ int target_height,
+ VideoProcessing* vpm,
+ const VideoFrame* out_frame);
+static void CropFrame(const uint8_t* source_data,
+ int source_width,
+ int source_height,
+ int offset_x,
+ int offset_y,
+ int cropped_width,
+ int cropped_height,
+ VideoFrame* cropped_frame);
+// The |source_data| is cropped and scaled to |target_width| x |target_height|,
+// and then scaled back to the expected cropped size. |expected_psnr| is used to
+// verify basic quality, and is set to be ~0.1/0.05dB lower than actual PSNR
+// verified under the same conditions.
+static void TestSize(const VideoFrame& source_frame,
+ const VideoFrame& cropped_source_frame,
+ int target_width,
+ int target_height,
+ double expected_psnr,
+ VideoProcessing* vpm);
+static bool CompareFrames(const webrtc::VideoFrame& frame1,
+ const webrtc::VideoFrame& frame2);
+static void WriteProcessedFrameForVisualInspection(const VideoFrame& source,
+ const VideoFrame& processed);
+
+VideoProcessingTest::VideoProcessingTest()
+ : vp_(NULL),
+ source_file_(NULL),
+ width_(352),
+ half_width_((width_ + 1) / 2),
+ height_(288),
+ size_y_(width_ * height_),
+ size_uv_(half_width_ * ((height_ + 1) / 2)),
+ frame_length_(CalcBufferSize(kI420, width_, height_)) {}
+
+void VideoProcessingTest::SetUp() {
+ vp_ = VideoProcessing::Create();
+ ASSERT_TRUE(vp_ != NULL);
+
+ ASSERT_EQ(0, video_frame_.CreateEmptyFrame(width_, height_, width_,
+ half_width_, half_width_));
+ // Clear video frame so DrMemory/Valgrind will allow reads of the buffer.
+ memset(video_frame_.buffer(kYPlane), 0, video_frame_.allocated_size(kYPlane));
+ memset(video_frame_.buffer(kUPlane), 0, video_frame_.allocated_size(kUPlane));
+ memset(video_frame_.buffer(kVPlane), 0, video_frame_.allocated_size(kVPlane));
+ const std::string video_file =
+ webrtc::test::ResourcePath("foreman_cif", "yuv");
+ source_file_ = fopen(video_file.c_str(), "rb");
+ ASSERT_TRUE(source_file_ != NULL)
+ << "Cannot read source file: " + video_file + "\n";
+}
+
+void VideoProcessingTest::TearDown() {
+ if (source_file_ != NULL) {
+ ASSERT_EQ(0, fclose(source_file_));
+ }
+ source_file_ = NULL;
+ delete vp_;
+ vp_ = NULL;
+}
+
+#if defined(WEBRTC_IOS)
+TEST_F(VideoProcessingTest, DISABLED_HandleNullBuffer) {
+#else
+TEST_F(VideoProcessingTest, HandleNullBuffer) {
+#endif
+ // TODO(mikhal/stefan): Do we need this one?
+ VideoProcessing::FrameStats stats;
+ // Video frame with unallocated buffer.
+ VideoFrame videoFrame;
+
+ vp_->GetFrameStats(videoFrame, &stats);
+ EXPECT_EQ(stats.num_pixels, 0u);
+
+ EXPECT_EQ(-1, vp_->Deflickering(&videoFrame, &stats));
+
+ EXPECT_EQ(-3, vp_->BrightnessDetection(videoFrame, stats));
+}
+
+#if defined(WEBRTC_IOS)
+TEST_F(VideoProcessingTest, DISABLED_HandleBadStats) {
+#else
+TEST_F(VideoProcessingTest, HandleBadStats) {
+#endif
+ VideoProcessing::FrameStats stats;
+ vp_->ClearFrameStats(&stats);
+ rtc::scoped_ptr<uint8_t[]> video_buffer(new uint8_t[frame_length_]);
+ ASSERT_EQ(frame_length_,
+ fread(video_buffer.get(), 1, frame_length_, source_file_));
+ EXPECT_EQ(0, ConvertToI420(kI420, video_buffer.get(), 0, 0, width_, height_,
+ 0, kVideoRotation_0, &video_frame_));
+
+ EXPECT_EQ(-1, vp_->Deflickering(&video_frame_, &stats));
+
+ EXPECT_EQ(-3, vp_->BrightnessDetection(video_frame_, stats));
+}
+
+#if defined(WEBRTC_IOS)
+TEST_F(VideoProcessingTest, DISABLED_IdenticalResultsAfterReset) {
+#else
+TEST_F(VideoProcessingTest, IdenticalResultsAfterReset) {
+#endif
+ VideoFrame video_frame2;
+ VideoProcessing::FrameStats stats;
+ // Only testing non-static functions here.
+ rtc::scoped_ptr<uint8_t[]> video_buffer(new uint8_t[frame_length_]);
+ ASSERT_EQ(frame_length_,
+ fread(video_buffer.get(), 1, frame_length_, source_file_));
+ EXPECT_EQ(0, ConvertToI420(kI420, video_buffer.get(), 0, 0, width_, height_,
+ 0, kVideoRotation_0, &video_frame_));
+ vp_->GetFrameStats(video_frame_, &stats);
+ EXPECT_GT(stats.num_pixels, 0u);
+ ASSERT_EQ(0, video_frame2.CopyFrame(video_frame_));
+ ASSERT_EQ(0, vp_->Deflickering(&video_frame_, &stats));
+
+ // Retrieve frame stats again in case Deflickering() has zeroed them.
+ vp_->GetFrameStats(video_frame2, &stats);
+ EXPECT_GT(stats.num_pixels, 0u);
+ ASSERT_EQ(0, vp_->Deflickering(&video_frame2, &stats));
+ EXPECT_TRUE(CompareFrames(video_frame_, video_frame2));
+
+ ASSERT_EQ(frame_length_,
+ fread(video_buffer.get(), 1, frame_length_, source_file_));
+ EXPECT_EQ(0, ConvertToI420(kI420, video_buffer.get(), 0, 0, width_, height_,
+ 0, kVideoRotation_0, &video_frame_));
+ vp_->GetFrameStats(video_frame_, &stats);
+ EXPECT_GT(stats.num_pixels, 0u);
+ video_frame2.CopyFrame(video_frame_);
+ ASSERT_EQ(0, vp_->BrightnessDetection(video_frame_, stats));
+
+ ASSERT_EQ(0, vp_->BrightnessDetection(video_frame2, stats));
+ EXPECT_TRUE(CompareFrames(video_frame_, video_frame2));
+}
+
+#if defined(WEBRTC_IOS)
+TEST_F(VideoProcessingTest, DISABLED_FrameStats) {
+#else
+TEST_F(VideoProcessingTest, FrameStats) {
+#endif
+ VideoProcessing::FrameStats stats;
+ vp_->ClearFrameStats(&stats);
+ rtc::scoped_ptr<uint8_t[]> video_buffer(new uint8_t[frame_length_]);
+ ASSERT_EQ(frame_length_,
+ fread(video_buffer.get(), 1, frame_length_, source_file_));
+ EXPECT_EQ(0, ConvertToI420(kI420, video_buffer.get(), 0, 0, width_, height_,
+ 0, kVideoRotation_0, &video_frame_));
+
+ EXPECT_FALSE(vp_->ValidFrameStats(stats));
+ vp_->GetFrameStats(video_frame_, &stats);
+ EXPECT_GT(stats.num_pixels, 0u);
+ EXPECT_TRUE(vp_->ValidFrameStats(stats));
+
+ printf("\nFrameStats\n");
+ printf("mean: %u\nnum_pixels: %u\nsubSamplFactor: %u\nsum: %u\n\n",
+ static_cast<unsigned int>(stats.mean),
+ static_cast<unsigned int>(stats.num_pixels),
+ static_cast<unsigned int>(stats.sub_sampling_factor),
+ static_cast<unsigned int>(stats.sum));
+
+ vp_->ClearFrameStats(&stats);
+ EXPECT_FALSE(vp_->ValidFrameStats(stats));
+}
+
+#if defined(WEBRTC_IOS)
+TEST_F(VideoProcessingTest, DISABLED_PreprocessorLogic) {
+#else
+TEST_F(VideoProcessingTest, PreprocessorLogic) {
+#endif
+ // Disable temporal sampling (frame dropping).
+ vp_->EnableTemporalDecimation(false);
+ int resolution = 100;
+ EXPECT_EQ(VPM_OK, vp_->SetTargetResolution(resolution, resolution, 15));
+ EXPECT_EQ(VPM_OK, vp_->SetTargetResolution(resolution, resolution, 30));
+ // Disable spatial sampling.
+ vp_->SetInputFrameResampleMode(kNoRescaling);
+ EXPECT_EQ(VPM_OK, vp_->SetTargetResolution(resolution, resolution, 30));
+ VideoFrame* out_frame = NULL;
+ // Set rescaling => output frame != NULL.
+ vp_->SetInputFrameResampleMode(kFastRescaling);
+ PreprocessFrameAndVerify(video_frame_, resolution, resolution, vp_,
+ out_frame);
+ // No rescaling=> output frame = NULL.
+ vp_->SetInputFrameResampleMode(kNoRescaling);
+ EXPECT_TRUE(vp_->PreprocessFrame(video_frame_) != nullptr);
+}
+
+#if defined(WEBRTC_IOS)
+TEST_F(VideoProcessingTest, DISABLED_Resampler) {
+#else
+TEST_F(VideoProcessingTest, Resampler) {
+#endif
+ enum { NumRuns = 1 };
+
+ int64_t min_runtime = 0;
+ int64_t total_runtime = 0;
+
+ rewind(source_file_);
+ ASSERT_TRUE(source_file_ != NULL) << "Cannot read input file \n";
+
+ // CA not needed here
+ vp_->EnableContentAnalysis(false);
+ // no temporal decimation
+ vp_->EnableTemporalDecimation(false);
+
+ // Reading test frame
+ rtc::scoped_ptr<uint8_t[]> video_buffer(new uint8_t[frame_length_]);
+ ASSERT_EQ(frame_length_,
+ fread(video_buffer.get(), 1, frame_length_, source_file_));
+ // Using ConvertToI420 to add stride to the image.
+ EXPECT_EQ(0, ConvertToI420(kI420, video_buffer.get(), 0, 0, width_, height_,
+ 0, kVideoRotation_0, &video_frame_));
+ // Cropped source frame that will contain the expected visible region.
+ VideoFrame cropped_source_frame;
+ cropped_source_frame.CopyFrame(video_frame_);
+
+ for (uint32_t run_idx = 0; run_idx < NumRuns; run_idx++) {
+ // Initiate test timer.
+ const TickTime time_start = TickTime::Now();
+
+ // Init the sourceFrame with a timestamp.
+ video_frame_.set_render_time_ms(time_start.MillisecondTimestamp());
+ video_frame_.set_timestamp(time_start.MillisecondTimestamp() * 90);
+
+ // Test scaling to different sizes: source is of |width|/|height| = 352/288.
+ // Pure scaling:
+ TestSize(video_frame_, video_frame_, width_ / 4, height_ / 4, 25.2, vp_);
+ TestSize(video_frame_, video_frame_, width_ / 2, height_ / 2, 28.1, vp_);
+ // No resampling:
+ TestSize(video_frame_, video_frame_, width_, height_, -1, vp_);
+ TestSize(video_frame_, video_frame_, 2 * width_, 2 * height_, 32.2, vp_);
+
+ // Scaling and cropping. The cropped source frame is the largest center
+ // aligned region that can be used from the source while preserving aspect
+ // ratio.
+ CropFrame(video_buffer.get(), width_, height_, 0, 56, 352, 176,
+ &cropped_source_frame);
+ TestSize(video_frame_, cropped_source_frame, 100, 50, 24.0, vp_);
+
+ CropFrame(video_buffer.get(), width_, height_, 0, 30, 352, 225,
+ &cropped_source_frame);
+ TestSize(video_frame_, cropped_source_frame, 400, 256, 31.3, vp_);
+
+ CropFrame(video_buffer.get(), width_, height_, 68, 0, 216, 288,
+ &cropped_source_frame);
+ TestSize(video_frame_, cropped_source_frame, 480, 640, 32.15, vp_);
+
+ CropFrame(video_buffer.get(), width_, height_, 0, 12, 352, 264,
+ &cropped_source_frame);
+ TestSize(video_frame_, cropped_source_frame, 960, 720, 32.2, vp_);
+
+ CropFrame(video_buffer.get(), width_, height_, 0, 44, 352, 198,
+ &cropped_source_frame);
+ TestSize(video_frame_, cropped_source_frame, 1280, 720, 32.15, vp_);
+
+ // Upsampling to odd size.
+ CropFrame(video_buffer.get(), width_, height_, 0, 26, 352, 233,
+ &cropped_source_frame);
+ TestSize(video_frame_, cropped_source_frame, 501, 333, 32.05, vp_);
+ // Downsample to odd size.
+ CropFrame(video_buffer.get(), width_, height_, 0, 34, 352, 219,
+ &cropped_source_frame);
+ TestSize(video_frame_, cropped_source_frame, 281, 175, 29.3, vp_);
+
+ // Stop timer.
+ const int64_t runtime = (TickTime::Now() - time_start).Microseconds();
+ if (runtime < min_runtime || run_idx == 0) {
+ min_runtime = runtime;
+ }
+ total_runtime += runtime;
+ }
+
+ printf("\nAverage run time = %d us / frame\n",
+ static_cast<int>(total_runtime));
+ printf("Min run time = %d us / frame\n\n", static_cast<int>(min_runtime));
+}
+
+void PreprocessFrameAndVerify(const VideoFrame& source,
+ int target_width,
+ int target_height,
+ VideoProcessing* vpm,
+ const VideoFrame* out_frame) {
+ ASSERT_EQ(VPM_OK, vpm->SetTargetResolution(target_width, target_height, 30));
+ out_frame = vpm->PreprocessFrame(source);
+ EXPECT_TRUE(out_frame != nullptr);
+
+ // If no resizing is needed, expect the original frame.
+ if (target_width == source.width() && target_height == source.height()) {
+ EXPECT_EQ(&source, out_frame);
+ return;
+ }
+
+ // Verify the resampled frame.
+ EXPECT_TRUE(out_frame != NULL);
+ EXPECT_EQ(source.render_time_ms(), (out_frame)->render_time_ms());
+ EXPECT_EQ(source.timestamp(), (out_frame)->timestamp());
+ EXPECT_EQ(target_width, (out_frame)->width());
+ EXPECT_EQ(target_height, (out_frame)->height());
+}
+
+void CropFrame(const uint8_t* source_data,
+ int source_width,
+ int source_height,
+ int offset_x,
+ int offset_y,
+ int cropped_width,
+ int cropped_height,
+ VideoFrame* cropped_frame) {
+ cropped_frame->CreateEmptyFrame(cropped_width, cropped_height, cropped_width,
+ (cropped_width + 1) / 2,
+ (cropped_width + 1) / 2);
+ EXPECT_EQ(0,
+ ConvertToI420(kI420, source_data, offset_x, offset_y, source_width,
+ source_height, 0, kVideoRotation_0, cropped_frame));
+}
+
+void TestSize(const VideoFrame& source_frame,
+ const VideoFrame& cropped_source_frame,
+ int target_width,
+ int target_height,
+ double expected_psnr,
+ VideoProcessing* vpm) {
+ // Resample source_frame to out_frame.
+ VideoFrame* out_frame = NULL;
+ vpm->SetInputFrameResampleMode(kBox);
+ PreprocessFrameAndVerify(source_frame, target_width, target_height, vpm,
+ out_frame);
+ if (out_frame == NULL)
+ return;
+ WriteProcessedFrameForVisualInspection(source_frame, *out_frame);
+
+ // Scale |resampled_source_frame| back to the source scale.
+ VideoFrame resampled_source_frame;
+ resampled_source_frame.CopyFrame(*out_frame);
+ PreprocessFrameAndVerify(resampled_source_frame, cropped_source_frame.width(),
+ cropped_source_frame.height(), vpm, out_frame);
+ WriteProcessedFrameForVisualInspection(resampled_source_frame, *out_frame);
+
+ // Compute PSNR against the cropped source frame and check expectation.
+ double psnr = I420PSNR(&cropped_source_frame, out_frame);
+ EXPECT_GT(psnr, expected_psnr);
+ printf(
+ "PSNR: %f. PSNR is between source of size %d %d, and a modified "
+ "source which is scaled down/up to: %d %d, and back to source size \n",
+ psnr, source_frame.width(), source_frame.height(), target_width,
+ target_height);
+}
+
+bool CompareFrames(const webrtc::VideoFrame& frame1,
+ const webrtc::VideoFrame& frame2) {
+ for (int plane = 0; plane < webrtc::kNumOfPlanes; plane++) {
+ webrtc::PlaneType plane_type = static_cast<webrtc::PlaneType>(plane);
+ int allocated_size1 = frame1.allocated_size(plane_type);
+ int allocated_size2 = frame2.allocated_size(plane_type);
+ if (allocated_size1 != allocated_size2)
+ return false;
+ const uint8_t* plane_buffer1 = frame1.buffer(plane_type);
+ const uint8_t* plane_buffer2 = frame2.buffer(plane_type);
+ if (memcmp(plane_buffer1, plane_buffer2, allocated_size1))
+ return false;
+ }
+ return true;
+}
+
+void WriteProcessedFrameForVisualInspection(const VideoFrame& source,
+ const VideoFrame& processed) {
+ // Skip if writing to files is not enabled.
+ if (!FLAGS_gen_files)
+ return;
+ // Write the processed frame to file for visual inspection.
+ std::ostringstream filename;
+ filename << webrtc::test::OutputPath() << "Resampler_from_" << source.width()
+ << "x" << source.height() << "_to_" << processed.width() << "x"
+ << processed.height() << "_30Hz_P420.yuv";
+ std::cout << "Watch " << filename.str() << " and verify that it is okay."
+ << std::endl;
+ FILE* stand_alone_file = fopen(filename.str().c_str(), "wb");
+ if (PrintVideoFrame(processed, stand_alone_file) < 0)
+ std::cerr << "Failed to write: " << filename.str() << std::endl;
+ if (stand_alone_file)
+ fclose(stand_alone_file);
+}
+
+} // namespace webrtc
diff --git a/webrtc/modules/video_processing/test/video_processing_unittest.h b/webrtc/modules/video_processing/test/video_processing_unittest.h
new file mode 100644
index 0000000000..3433c6ca86
--- /dev/null
+++ b/webrtc/modules/video_processing/test/video_processing_unittest.h
@@ -0,0 +1,47 @@
+/*
+ * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_PROCESSING_TEST_VIDEO_PROCESSING_UNITTEST_H_
+#define WEBRTC_MODULES_VIDEO_PROCESSING_TEST_VIDEO_PROCESSING_UNITTEST_H_
+
+#include <string>
+
+#include "testing/gtest/include/gtest/gtest.h"
+#include "webrtc/modules/video_processing/include/video_processing.h"
+#include "webrtc/system_wrappers/include/trace.h"
+#include "webrtc/test/testsupport/fileutils.h"
+
+namespace webrtc {
+
+class VideoProcessingTest : public ::testing::Test {
+ protected:
+ VideoProcessingTest();
+ virtual void SetUp();
+ virtual void TearDown();
+ static void SetUpTestCase() {
+ Trace::CreateTrace();
+ std::string trace_file = webrtc::test::OutputPath() + "VPMTrace.txt";
+ ASSERT_EQ(0, Trace::SetTraceFile(trace_file.c_str()));
+ }
+ static void TearDownTestCase() { Trace::ReturnTrace(); }
+ VideoProcessing* vp_;
+ FILE* source_file_;
+ VideoFrame video_frame_;
+ const int width_;
+ const int half_width_;
+ const int height_;
+ const int size_y_;
+ const int size_uv_;
+ const size_t frame_length_;
+};
+
+} // namespace webrtc
+
+#endif // WEBRTC_MODULES_VIDEO_PROCESSING_TEST_VIDEO_PROCESSING_UNITTEST_H_
diff --git a/webrtc/modules/video_processing/test/writeYUV420file.m b/webrtc/modules/video_processing/test/writeYUV420file.m
new file mode 100644
index 0000000000..359445009b
--- /dev/null
+++ b/webrtc/modules/video_processing/test/writeYUV420file.m
@@ -0,0 +1,22 @@
+function writeYUV420file(filename, Y, U, V)
+% writeYUV420file(filename, Y, U, V)
+
+fid = fopen(filename,'wb');
+if fid==-1
+ error(['Cannot open file ' filename]);
+end
+
+numFrames=size(Y,3);
+
+for k=1:numFrames
+ % Write luminance
+ fwrite(fid,uint8(Y(:,:,k).'), 'uchar');
+
+ % Write U channel
+ fwrite(fid,uint8(U(:,:,k).'), 'uchar');
+
+ % Write V channel
+ fwrite(fid,uint8(V(:,:,k).'), 'uchar');
+end
+
+fclose(fid);
diff --git a/webrtc/modules/video_processing/util/denoiser_filter.cc b/webrtc/modules/video_processing/util/denoiser_filter.cc
new file mode 100644
index 0000000000..fbc2435cb5
--- /dev/null
+++ b/webrtc/modules/video_processing/util/denoiser_filter.cc
@@ -0,0 +1,54 @@
+/*
+ * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/base/checks.h"
+#include "webrtc/modules/video_processing/util/denoiser_filter.h"
+#include "webrtc/modules/video_processing/util/denoiser_filter_c.h"
+#include "webrtc/modules/video_processing/util/denoiser_filter_neon.h"
+#include "webrtc/modules/video_processing/util/denoiser_filter_sse2.h"
+#include "webrtc/system_wrappers/include/cpu_features_wrapper.h"
+
+namespace webrtc {
+
+const int kMotionMagnitudeThreshold = 8 * 3;
+const int kSumDiffThreshold = 16 * 16 * 2;
+const int kSumDiffThresholdHigh = 600;
+
+rtc::scoped_ptr<DenoiserFilter> DenoiserFilter::Create(
+ bool runtime_cpu_detection) {
+ rtc::scoped_ptr<DenoiserFilter> filter;
+
+ if (runtime_cpu_detection) {
+// If we know the minimum architecture at compile time, avoid CPU detection.
+#if defined(WEBRTC_ARCH_X86_FAMILY)
+ // x86 CPU detection required.
+ if (WebRtc_GetCPUInfo(kSSE2)) {
+ filter.reset(new DenoiserFilterSSE2());
+ } else {
+ filter.reset(new DenoiserFilterC());
+ }
+#elif defined(WEBRTC_DETECT_NEON)
+ if (WebRtc_GetCPUFeaturesARM() & kCPUFeatureNEON) {
+ filter.reset(new DenoiserFilterNEON());
+ } else {
+ filter.reset(new DenoiserFilterC());
+ }
+#else
+ filter.reset(new DenoiserFilterC());
+#endif
+ } else {
+ filter.reset(new DenoiserFilterC());
+ }
+
+ RTC_DCHECK(filter.get() != nullptr);
+ return filter;
+}
+
+} // namespace webrtc
diff --git a/webrtc/modules/video_processing/util/denoiser_filter.h b/webrtc/modules/video_processing/util/denoiser_filter.h
new file mode 100644
index 0000000000..5d5a61c59c
--- /dev/null
+++ b/webrtc/modules/video_processing/util/denoiser_filter.h
@@ -0,0 +1,63 @@
+/*
+ * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_PROCESSING_UTIL_DENOISER_FILTER_H_
+#define WEBRTC_MODULES_VIDEO_PROCESSING_UTIL_DENOISER_FILTER_H_
+
+#include "webrtc/base/scoped_ptr.h"
+#include "webrtc/modules/include/module_common_types.h"
+#include "webrtc/modules/video_processing/include/video_processing_defines.h"
+
+namespace webrtc {
+
+extern const int kMotionMagnitudeThreshold;
+extern const int kSumDiffThreshold;
+extern const int kSumDiffThresholdHigh;
+
+enum DenoiserDecision { COPY_BLOCK, FILTER_BLOCK };
+struct DenoiseMetrics {
+ uint32_t var;
+ uint32_t sad;
+ uint8_t denoise;
+ bool is_skin;
+};
+
+class DenoiserFilter {
+ public:
+ static rtc::scoped_ptr<DenoiserFilter> Create(bool runtime_cpu_detection);
+
+ virtual ~DenoiserFilter() {}
+
+ virtual void CopyMem16x16(const uint8_t* src,
+ int src_stride,
+ uint8_t* dst,
+ int dst_stride) = 0;
+ virtual void CopyMem8x8(const uint8_t* src,
+ int src_stride,
+ uint8_t* dst,
+ int dst_stride) = 0;
+ virtual uint32_t Variance16x8(const uint8_t* a,
+ int a_stride,
+ const uint8_t* b,
+ int b_stride,
+ unsigned int* sse) = 0;
+ virtual DenoiserDecision MbDenoise(uint8_t* mc_running_avg_y,
+ int mc_avg_y_stride,
+ uint8_t* running_avg_y,
+ int avg_y_stride,
+ const uint8_t* sig,
+ int sig_stride,
+ uint8_t motion_magnitude,
+ int increase_denoising) = 0;
+};
+
+} // namespace webrtc
+
+#endif // WEBRTC_MODULES_VIDEO_PROCESSING_UTIL_DENOISER_FILTER_H_
diff --git a/webrtc/modules/video_processing/util/denoiser_filter_c.cc b/webrtc/modules/video_processing/util/denoiser_filter_c.cc
new file mode 100644
index 0000000000..6323980e18
--- /dev/null
+++ b/webrtc/modules/video_processing/util/denoiser_filter_c.cc
@@ -0,0 +1,194 @@
+/*
+ * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <stdlib.h>
+
+#include "webrtc/modules/video_processing/util/denoiser_filter_c.h"
+
+namespace webrtc {
+
+void DenoiserFilterC::CopyMem16x16(const uint8_t* src,
+ int src_stride,
+ uint8_t* dst,
+ int dst_stride) {
+ for (int i = 0; i < 16; i++) {
+ memcpy(dst, src, 16);
+ src += src_stride;
+ dst += dst_stride;
+ }
+}
+
+void DenoiserFilterC::CopyMem8x8(const uint8_t* src,
+ int src_stride,
+ uint8_t* dst,
+ int dst_stride) {
+ for (int i = 0; i < 8; i++) {
+ memcpy(dst, src, 8);
+ src += src_stride;
+ dst += dst_stride;
+ }
+}
+
+uint32_t DenoiserFilterC::Variance16x8(const uint8_t* a,
+ int a_stride,
+ const uint8_t* b,
+ int b_stride,
+ uint32_t* sse) {
+ int sum = 0;
+ *sse = 0;
+ a_stride <<= 1;
+ b_stride <<= 1;
+
+ for (int i = 0; i < 8; i++) {
+ for (int j = 0; j < 16; j++) {
+ const int diff = a[j] - b[j];
+ sum += diff;
+ *sse += diff * diff;
+ }
+
+ a += a_stride;
+ b += b_stride;
+ }
+ return *sse - ((static_cast<int64_t>(sum) * sum) >> 7);
+}
+
+DenoiserDecision DenoiserFilterC::MbDenoise(uint8_t* mc_running_avg_y,
+ int mc_avg_y_stride,
+ uint8_t* running_avg_y,
+ int avg_y_stride,
+ const uint8_t* sig,
+ int sig_stride,
+ uint8_t motion_magnitude,
+ int increase_denoising) {
+ int sum_diff_thresh = 0;
+ int sum_diff = 0;
+ int adj_val[3] = {3, 4, 6};
+ int shift_inc1 = 0;
+ int shift_inc2 = 1;
+ int col_sum[16] = {0};
+ if (motion_magnitude <= kMotionMagnitudeThreshold) {
+ if (increase_denoising) {
+ shift_inc1 = 1;
+ shift_inc2 = 2;
+ }
+ adj_val[0] += shift_inc2;
+ adj_val[1] += shift_inc2;
+ adj_val[2] += shift_inc2;
+ }
+
+ for (int r = 0; r < 16; ++r) {
+ for (int c = 0; c < 16; ++c) {
+ int diff = 0;
+ int adjustment = 0;
+ int absdiff = 0;
+
+ diff = mc_running_avg_y[c] - sig[c];
+ absdiff = abs(diff);
+
+ // When |diff| <= |3 + shift_inc1|, use pixel value from
+ // last denoised raw.
+ if (absdiff <= 3 + shift_inc1) {
+ running_avg_y[c] = mc_running_avg_y[c];
+ col_sum[c] += diff;
+ } else {
+ if (absdiff >= 4 + shift_inc1 && absdiff <= 7)
+ adjustment = adj_val[0];
+ else if (absdiff >= 8 && absdiff <= 15)
+ adjustment = adj_val[1];
+ else
+ adjustment = adj_val[2];
+
+ if (diff > 0) {
+ if ((sig[c] + adjustment) > 255)
+ running_avg_y[c] = 255;
+ else
+ running_avg_y[c] = sig[c] + adjustment;
+
+ col_sum[c] += adjustment;
+ } else {
+ if ((sig[c] - adjustment) < 0)
+ running_avg_y[c] = 0;
+ else
+ running_avg_y[c] = sig[c] - adjustment;
+
+ col_sum[c] -= adjustment;
+ }
+ }
+ }
+
+ // Update pointers for next iteration.
+ sig += sig_stride;
+ mc_running_avg_y += mc_avg_y_stride;
+ running_avg_y += avg_y_stride;
+ }
+
+ for (int c = 0; c < 16; ++c) {
+ if (col_sum[c] >= 128) {
+ col_sum[c] = 127;
+ }
+ sum_diff += col_sum[c];
+ }
+
+ sum_diff_thresh = kSumDiffThreshold;
+ if (increase_denoising)
+ sum_diff_thresh = kSumDiffThresholdHigh;
+ if (abs(sum_diff) > sum_diff_thresh) {
+ int delta = ((abs(sum_diff) - sum_diff_thresh) >> 8) + 1;
+ // Only apply the adjustment for max delta up to 3.
+ if (delta < 4) {
+ sig -= sig_stride * 16;
+ mc_running_avg_y -= mc_avg_y_stride * 16;
+ running_avg_y -= avg_y_stride * 16;
+ for (int r = 0; r < 16; ++r) {
+ for (int c = 0; c < 16; ++c) {
+ int diff = mc_running_avg_y[c] - sig[c];
+ int adjustment = abs(diff);
+ if (adjustment > delta)
+ adjustment = delta;
+ if (diff > 0) {
+ // Bring denoised signal down.
+ if (running_avg_y[c] - adjustment < 0)
+ running_avg_y[c] = 0;
+ else
+ running_avg_y[c] = running_avg_y[c] - adjustment;
+ col_sum[c] -= adjustment;
+ } else if (diff < 0) {
+ // Bring denoised signal up.
+ if (running_avg_y[c] + adjustment > 255)
+ running_avg_y[c] = 255;
+ else
+ running_avg_y[c] = running_avg_y[c] + adjustment;
+ col_sum[c] += adjustment;
+ }
+ }
+ sig += sig_stride;
+ mc_running_avg_y += mc_avg_y_stride;
+ running_avg_y += avg_y_stride;
+ }
+
+ sum_diff = 0;
+ for (int c = 0; c < 16; ++c) {
+ if (col_sum[c] >= 128) {
+ col_sum[c] = 127;
+ }
+ sum_diff += col_sum[c];
+ }
+
+ if (abs(sum_diff) > sum_diff_thresh)
+ return COPY_BLOCK;
+ } else {
+ return COPY_BLOCK;
+ }
+ }
+
+ return FILTER_BLOCK;
+}
+
+} // namespace webrtc
diff --git a/webrtc/modules/video_processing/util/denoiser_filter_c.h b/webrtc/modules/video_processing/util/denoiser_filter_c.h
new file mode 100644
index 0000000000..fe46ac38ec
--- /dev/null
+++ b/webrtc/modules/video_processing/util/denoiser_filter_c.h
@@ -0,0 +1,46 @@
+/*
+ * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_PROCESSING_UTIL_DENOISER_FILTER_C_H_
+#define WEBRTC_MODULES_VIDEO_PROCESSING_UTIL_DENOISER_FILTER_C_H_
+
+#include "webrtc/modules/video_processing/util/denoiser_filter.h"
+
+namespace webrtc {
+
+class DenoiserFilterC : public DenoiserFilter {
+ public:
+ DenoiserFilterC() {}
+ void CopyMem16x16(const uint8_t* src,
+ int src_stride,
+ uint8_t* dst,
+ int dst_stride) override;
+ void CopyMem8x8(const uint8_t* src,
+ int src_stride,
+ uint8_t* dst,
+ int dst_stride) override;
+ uint32_t Variance16x8(const uint8_t* a,
+ int a_stride,
+ const uint8_t* b,
+ int b_stride,
+ unsigned int* sse) override;
+ DenoiserDecision MbDenoise(uint8_t* mc_running_avg_y,
+ int mc_avg_y_stride,
+ uint8_t* running_avg_y,
+ int avg_y_stride,
+ const uint8_t* sig,
+ int sig_stride,
+ uint8_t motion_magnitude,
+ int increase_denoising) override;
+};
+
+} // namespace webrtc
+
+#endif // WEBRTC_MODULES_VIDEO_PROCESSING_UTIL_DENOISER_FILTER_C_H_
diff --git a/webrtc/modules/video_processing/util/denoiser_filter_neon.cc b/webrtc/modules/video_processing/util/denoiser_filter_neon.cc
new file mode 100644
index 0000000000..b522bf002b
--- /dev/null
+++ b/webrtc/modules/video_processing/util/denoiser_filter_neon.cc
@@ -0,0 +1,283 @@
+/*
+ * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <arm_neon.h>
+
+#include "webrtc/modules/video_processing/util/denoiser_filter_neon.h"
+
+namespace webrtc {
+
+static int HorizontalAddS16x8(const int16x8_t v_16x8) {
+ const int32x4_t a = vpaddlq_s16(v_16x8);
+ const int64x2_t b = vpaddlq_s32(a);
+ const int32x2_t c = vadd_s32(vreinterpret_s32_s64(vget_low_s64(b)),
+ vreinterpret_s32_s64(vget_high_s64(b)));
+ return vget_lane_s32(c, 0);
+}
+
+static int HorizontalAddS32x4(const int32x4_t v_32x4) {
+ const int64x2_t b = vpaddlq_s32(v_32x4);
+ const int32x2_t c = vadd_s32(vreinterpret_s32_s64(vget_low_s64(b)),
+ vreinterpret_s32_s64(vget_high_s64(b)));
+ return vget_lane_s32(c, 0);
+}
+
+static void VarianceNeonW8(const uint8_t* a,
+ int a_stride,
+ const uint8_t* b,
+ int b_stride,
+ int w,
+ int h,
+ uint32_t* sse,
+ int64_t* sum) {
+ int16x8_t v_sum = vdupq_n_s16(0);
+ int32x4_t v_sse_lo = vdupq_n_s32(0);
+ int32x4_t v_sse_hi = vdupq_n_s32(0);
+
+ for (int i = 0; i < h; ++i) {
+ for (int j = 0; j < w; j += 8) {
+ const uint8x8_t v_a = vld1_u8(&a[j]);
+ const uint8x8_t v_b = vld1_u8(&b[j]);
+ const uint16x8_t v_diff = vsubl_u8(v_a, v_b);
+ const int16x8_t sv_diff = vreinterpretq_s16_u16(v_diff);
+ v_sum = vaddq_s16(v_sum, sv_diff);
+ v_sse_lo =
+ vmlal_s16(v_sse_lo, vget_low_s16(sv_diff), vget_low_s16(sv_diff));
+ v_sse_hi =
+ vmlal_s16(v_sse_hi, vget_high_s16(sv_diff), vget_high_s16(sv_diff));
+ }
+ a += a_stride;
+ b += b_stride;
+ }
+
+ *sum = HorizontalAddS16x8(v_sum);
+ *sse =
+ static_cast<uint32_t>(HorizontalAddS32x4(vaddq_s32(v_sse_lo, v_sse_hi)));
+}
+
+void DenoiserFilterNEON::CopyMem16x16(const uint8_t* src,
+ int src_stride,
+ uint8_t* dst,
+ int dst_stride) {
+ uint8x16_t qtmp;
+ for (int r = 0; r < 16; r++) {
+ qtmp = vld1q_u8(src);
+ vst1q_u8(dst, qtmp);
+ src += src_stride;
+ dst += dst_stride;
+ }
+}
+
+void DenoiserFilterNEON::CopyMem8x8(const uint8_t* src,
+ int src_stride,
+ uint8_t* dst,
+ int dst_stride) {
+ uint8x8_t vtmp;
+
+ for (int r = 0; r < 8; r++) {
+ vtmp = vld1_u8(src);
+ vst1_u8(dst, vtmp);
+ src += src_stride;
+ dst += dst_stride;
+ }
+}
+
+uint32_t DenoiserFilterNEON::Variance16x8(const uint8_t* a,
+ int a_stride,
+ const uint8_t* b,
+ int b_stride,
+ uint32_t* sse) {
+ int64_t sum = 0;
+ VarianceNeonW8(a, a_stride << 1, b, b_stride << 1, 16, 8, sse, &sum);
+ return *sse - ((sum * sum) >> 7);
+}
+
+DenoiserDecision DenoiserFilterNEON::MbDenoise(uint8_t* mc_running_avg_y,
+ int mc_running_avg_y_stride,
+ uint8_t* running_avg_y,
+ int running_avg_y_stride,
+ const uint8_t* sig,
+ int sig_stride,
+ uint8_t motion_magnitude,
+ int increase_denoising) {
+ // If motion_magnitude is small, making the denoiser more aggressive by
+ // increasing the adjustment for each level, level1 adjustment is
+ // increased, the deltas stay the same.
+ int shift_inc =
+ (increase_denoising && motion_magnitude <= kMotionMagnitudeThreshold) ? 1
+ : 0;
+ const uint8x16_t v_level1_adjustment = vmovq_n_u8(
+ (motion_magnitude <= kMotionMagnitudeThreshold) ? 4 + shift_inc : 3);
+ const uint8x16_t v_delta_level_1_and_2 = vdupq_n_u8(1);
+ const uint8x16_t v_delta_level_2_and_3 = vdupq_n_u8(2);
+ const uint8x16_t v_level1_threshold = vmovq_n_u8(4 + shift_inc);
+ const uint8x16_t v_level2_threshold = vdupq_n_u8(8);
+ const uint8x16_t v_level3_threshold = vdupq_n_u8(16);
+ int64x2_t v_sum_diff_total = vdupq_n_s64(0);
+
+ // Go over lines.
+ for (int r = 0; r < 16; ++r) {
+ // Load inputs.
+ const uint8x16_t v_sig = vld1q_u8(sig);
+ const uint8x16_t v_mc_running_avg_y = vld1q_u8(mc_running_avg_y);
+
+ // Calculate absolute difference and sign masks.
+ const uint8x16_t v_abs_diff = vabdq_u8(v_sig, v_mc_running_avg_y);
+ const uint8x16_t v_diff_pos_mask = vcltq_u8(v_sig, v_mc_running_avg_y);
+ const uint8x16_t v_diff_neg_mask = vcgtq_u8(v_sig, v_mc_running_avg_y);
+
+ // Figure out which level that put us in.
+ const uint8x16_t v_level1_mask = vcleq_u8(v_level1_threshold, v_abs_diff);
+ const uint8x16_t v_level2_mask = vcleq_u8(v_level2_threshold, v_abs_diff);
+ const uint8x16_t v_level3_mask = vcleq_u8(v_level3_threshold, v_abs_diff);
+
+ // Calculate absolute adjustments for level 1, 2 and 3.
+ const uint8x16_t v_level2_adjustment =
+ vandq_u8(v_level2_mask, v_delta_level_1_and_2);
+ const uint8x16_t v_level3_adjustment =
+ vandq_u8(v_level3_mask, v_delta_level_2_and_3);
+ const uint8x16_t v_level1and2_adjustment =
+ vaddq_u8(v_level1_adjustment, v_level2_adjustment);
+ const uint8x16_t v_level1and2and3_adjustment =
+ vaddq_u8(v_level1and2_adjustment, v_level3_adjustment);
+
+ // Figure adjustment absolute value by selecting between the absolute
+ // difference if in level0 or the value for level 1, 2 and 3.
+ const uint8x16_t v_abs_adjustment =
+ vbslq_u8(v_level1_mask, v_level1and2and3_adjustment, v_abs_diff);
+
+ // Calculate positive and negative adjustments. Apply them to the signal
+ // and accumulate them. Adjustments are less than eight and the maximum
+ // sum of them (7 * 16) can fit in a signed char.
+ const uint8x16_t v_pos_adjustment =
+ vandq_u8(v_diff_pos_mask, v_abs_adjustment);
+ const uint8x16_t v_neg_adjustment =
+ vandq_u8(v_diff_neg_mask, v_abs_adjustment);
+
+ uint8x16_t v_running_avg_y = vqaddq_u8(v_sig, v_pos_adjustment);
+ v_running_avg_y = vqsubq_u8(v_running_avg_y, v_neg_adjustment);
+
+ // Store results.
+ vst1q_u8(running_avg_y, v_running_avg_y);
+
+ // Sum all the accumulators to have the sum of all pixel differences
+ // for this macroblock.
+ {
+ const int8x16_t v_sum_diff =
+ vqsubq_s8(vreinterpretq_s8_u8(v_pos_adjustment),
+ vreinterpretq_s8_u8(v_neg_adjustment));
+ const int16x8_t fe_dc_ba_98_76_54_32_10 = vpaddlq_s8(v_sum_diff);
+ const int32x4_t fedc_ba98_7654_3210 =
+ vpaddlq_s16(fe_dc_ba_98_76_54_32_10);
+ const int64x2_t fedcba98_76543210 = vpaddlq_s32(fedc_ba98_7654_3210);
+
+ v_sum_diff_total = vqaddq_s64(v_sum_diff_total, fedcba98_76543210);
+ }
+
+ // Update pointers for next iteration.
+ sig += sig_stride;
+ mc_running_avg_y += mc_running_avg_y_stride;
+ running_avg_y += running_avg_y_stride;
+ }
+
+ // Too much adjustments => copy block.
+ {
+ int64x1_t x = vqadd_s64(vget_high_s64(v_sum_diff_total),
+ vget_low_s64(v_sum_diff_total));
+ int sum_diff = vget_lane_s32(vabs_s32(vreinterpret_s32_s64(x)), 0);
+ int sum_diff_thresh = kSumDiffThreshold;
+
+ if (increase_denoising)
+ sum_diff_thresh = kSumDiffThresholdHigh;
+ if (sum_diff > sum_diff_thresh) {
+ // Before returning to copy the block (i.e., apply no denoising),
+ // checK if we can still apply some (weaker) temporal filtering to
+ // this block, that would otherwise not be denoised at all. Simplest
+ // is to apply an additional adjustment to running_avg_y to bring it
+ // closer to sig. The adjustment is capped by a maximum delta, and
+ // chosen such that in most cases the resulting sum_diff will be
+ // within the accceptable range given by sum_diff_thresh.
+
+ // The delta is set by the excess of absolute pixel diff over the
+ // threshold.
+ int delta = ((sum_diff - sum_diff_thresh) >> 8) + 1;
+ // Only apply the adjustment for max delta up to 3.
+ if (delta < 4) {
+ const uint8x16_t k_delta = vmovq_n_u8(delta);
+ sig -= sig_stride * 16;
+ mc_running_avg_y -= mc_running_avg_y_stride * 16;
+ running_avg_y -= running_avg_y_stride * 16;
+ for (int r = 0; r < 16; ++r) {
+ uint8x16_t v_running_avg_y = vld1q_u8(running_avg_y);
+ const uint8x16_t v_sig = vld1q_u8(sig);
+ const uint8x16_t v_mc_running_avg_y = vld1q_u8(mc_running_avg_y);
+
+ // Calculate absolute difference and sign masks.
+ const uint8x16_t v_abs_diff = vabdq_u8(v_sig, v_mc_running_avg_y);
+ const uint8x16_t v_diff_pos_mask =
+ vcltq_u8(v_sig, v_mc_running_avg_y);
+ const uint8x16_t v_diff_neg_mask =
+ vcgtq_u8(v_sig, v_mc_running_avg_y);
+ // Clamp absolute difference to delta to get the adjustment.
+ const uint8x16_t v_abs_adjustment = vminq_u8(v_abs_diff, (k_delta));
+
+ const uint8x16_t v_pos_adjustment =
+ vandq_u8(v_diff_pos_mask, v_abs_adjustment);
+ const uint8x16_t v_neg_adjustment =
+ vandq_u8(v_diff_neg_mask, v_abs_adjustment);
+
+ v_running_avg_y = vqsubq_u8(v_running_avg_y, v_pos_adjustment);
+ v_running_avg_y = vqaddq_u8(v_running_avg_y, v_neg_adjustment);
+
+ // Store results.
+ vst1q_u8(running_avg_y, v_running_avg_y);
+
+ {
+ const int8x16_t v_sum_diff =
+ vqsubq_s8(vreinterpretq_s8_u8(v_neg_adjustment),
+ vreinterpretq_s8_u8(v_pos_adjustment));
+
+ const int16x8_t fe_dc_ba_98_76_54_32_10 = vpaddlq_s8(v_sum_diff);
+ const int32x4_t fedc_ba98_7654_3210 =
+ vpaddlq_s16(fe_dc_ba_98_76_54_32_10);
+ const int64x2_t fedcba98_76543210 =
+ vpaddlq_s32(fedc_ba98_7654_3210);
+
+ v_sum_diff_total = vqaddq_s64(v_sum_diff_total, fedcba98_76543210);
+ }
+ // Update pointers for next iteration.
+ sig += sig_stride;
+ mc_running_avg_y += mc_running_avg_y_stride;
+ running_avg_y += running_avg_y_stride;
+ }
+ {
+ // Update the sum of all pixel differences of this MB.
+ x = vqadd_s64(vget_high_s64(v_sum_diff_total),
+ vget_low_s64(v_sum_diff_total));
+ sum_diff = vget_lane_s32(vabs_s32(vreinterpret_s32_s64(x)), 0);
+
+ if (sum_diff > sum_diff_thresh) {
+ return COPY_BLOCK;
+ }
+ }
+ } else {
+ return COPY_BLOCK;
+ }
+ }
+ }
+
+ // Tell above level that block was filtered.
+ running_avg_y -= running_avg_y_stride * 16;
+ sig -= sig_stride * 16;
+
+ return FILTER_BLOCK;
+}
+
+} // namespace webrtc
diff --git a/webrtc/modules/video_processing/util/denoiser_filter_neon.h b/webrtc/modules/video_processing/util/denoiser_filter_neon.h
new file mode 100644
index 0000000000..bc87ba788e
--- /dev/null
+++ b/webrtc/modules/video_processing/util/denoiser_filter_neon.h
@@ -0,0 +1,46 @@
+/*
+ * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_PROCESSING_UTIL_DENOISER_FILTER_NEON_H_
+#define WEBRTC_MODULES_VIDEO_PROCESSING_UTIL_DENOISER_FILTER_NEON_H_
+
+#include "webrtc/modules/video_processing/util/denoiser_filter.h"
+
+namespace webrtc {
+
+class DenoiserFilterNEON : public DenoiserFilter {
+ public:
+ DenoiserFilterNEON() {}
+ void CopyMem16x16(const uint8_t* src,
+ int src_stride,
+ uint8_t* dst,
+ int dst_stride) override;
+ void CopyMem8x8(const uint8_t* src,
+ int src_stride,
+ uint8_t* dst,
+ int dst_stride) override;
+ uint32_t Variance16x8(const uint8_t* a,
+ int a_stride,
+ const uint8_t* b,
+ int b_stride,
+ unsigned int* sse) override;
+ DenoiserDecision MbDenoise(uint8_t* mc_running_avg_y,
+ int mc_avg_y_stride,
+ uint8_t* running_avg_y,
+ int avg_y_stride,
+ const uint8_t* sig,
+ int sig_stride,
+ uint8_t motion_magnitude,
+ int increase_denoising) override;
+};
+
+} // namespace webrtc
+
+#endif // WEBRTC_MODULES_VIDEO_PROCESSING_UTIL_DENOISER_FILTER_NEON_H_
diff --git a/webrtc/modules/video_processing/util/denoiser_filter_sse2.cc b/webrtc/modules/video_processing/util/denoiser_filter_sse2.cc
new file mode 100644
index 0000000000..903d7b1ec6
--- /dev/null
+++ b/webrtc/modules/video_processing/util/denoiser_filter_sse2.cc
@@ -0,0 +1,280 @@
+/*
+ * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <emmintrin.h>
+
+#include "webrtc/modules/video_processing/util/denoiser_filter_sse2.h"
+
+namespace webrtc {
+
+static void Get8x8varSse2(const uint8_t* src,
+ int src_stride,
+ const uint8_t* ref,
+ int ref_stride,
+ unsigned int* sse,
+ int* sum) {
+ const __m128i zero = _mm_setzero_si128();
+ __m128i vsum = _mm_setzero_si128();
+ __m128i vsse = _mm_setzero_si128();
+
+ for (int i = 0; i < 8; i += 2) {
+ const __m128i src0 = _mm_unpacklo_epi8(
+ _mm_loadl_epi64((const __m128i*)(src + i * src_stride)), zero);
+ const __m128i ref0 = _mm_unpacklo_epi8(
+ _mm_loadl_epi64((const __m128i*)(ref + i * ref_stride)), zero);
+ const __m128i diff0 = _mm_sub_epi16(src0, ref0);
+
+ const __m128i src1 = _mm_unpacklo_epi8(
+ _mm_loadl_epi64((const __m128i*)(src + (i + 1) * src_stride)), zero);
+ const __m128i ref1 = _mm_unpacklo_epi8(
+ _mm_loadl_epi64((const __m128i*)(ref + (i + 1) * ref_stride)), zero);
+ const __m128i diff1 = _mm_sub_epi16(src1, ref1);
+
+ vsum = _mm_add_epi16(vsum, diff0);
+ vsum = _mm_add_epi16(vsum, diff1);
+ vsse = _mm_add_epi32(vsse, _mm_madd_epi16(diff0, diff0));
+ vsse = _mm_add_epi32(vsse, _mm_madd_epi16(diff1, diff1));
+ }
+
+ // sum
+ vsum = _mm_add_epi16(vsum, _mm_srli_si128(vsum, 8));
+ vsum = _mm_add_epi16(vsum, _mm_srli_si128(vsum, 4));
+ vsum = _mm_add_epi16(vsum, _mm_srli_si128(vsum, 2));
+ *sum = static_cast<int16_t>(_mm_extract_epi16(vsum, 0));
+
+ // sse
+ vsse = _mm_add_epi32(vsse, _mm_srli_si128(vsse, 8));
+ vsse = _mm_add_epi32(vsse, _mm_srli_si128(vsse, 4));
+ *sse = _mm_cvtsi128_si32(vsse);
+}
+
+static void VarianceSSE2(const unsigned char* src,
+ int src_stride,
+ const unsigned char* ref,
+ int ref_stride,
+ int w,
+ int h,
+ uint32_t* sse,
+ int64_t* sum,
+ int block_size) {
+ *sse = 0;
+ *sum = 0;
+
+ for (int i = 0; i < h; i += block_size) {
+ for (int j = 0; j < w; j += block_size) {
+ uint32_t sse0 = 0;
+ int32_t sum0 = 0;
+
+ Get8x8varSse2(src + src_stride * i + j, src_stride,
+ ref + ref_stride * i + j, ref_stride, &sse0, &sum0);
+ *sse += sse0;
+ *sum += sum0;
+ }
+ }
+}
+
+// Compute the sum of all pixel differences of this MB.
+static uint32_t AbsSumDiff16x1(__m128i acc_diff) {
+ const __m128i k_1 = _mm_set1_epi16(1);
+ const __m128i acc_diff_lo =
+ _mm_srai_epi16(_mm_unpacklo_epi8(acc_diff, acc_diff), 8);
+ const __m128i acc_diff_hi =
+ _mm_srai_epi16(_mm_unpackhi_epi8(acc_diff, acc_diff), 8);
+ const __m128i acc_diff_16 = _mm_add_epi16(acc_diff_lo, acc_diff_hi);
+ const __m128i hg_fe_dc_ba = _mm_madd_epi16(acc_diff_16, k_1);
+ const __m128i hgfe_dcba =
+ _mm_add_epi32(hg_fe_dc_ba, _mm_srli_si128(hg_fe_dc_ba, 8));
+ const __m128i hgfedcba =
+ _mm_add_epi32(hgfe_dcba, _mm_srli_si128(hgfe_dcba, 4));
+ unsigned int sum_diff = abs(_mm_cvtsi128_si32(hgfedcba));
+
+ return sum_diff;
+}
+
+// TODO(jackychen): Optimize this function using SSE2.
+void DenoiserFilterSSE2::CopyMem16x16(const uint8_t* src,
+ int src_stride,
+ uint8_t* dst,
+ int dst_stride) {
+ for (int i = 0; i < 16; i++) {
+ memcpy(dst, src, 16);
+ src += src_stride;
+ dst += dst_stride;
+ }
+}
+
+// TODO(jackychen): Optimize this function using SSE2.
+void DenoiserFilterSSE2::CopyMem8x8(const uint8_t* src,
+ int src_stride,
+ uint8_t* dst,
+ int dst_stride) {
+ for (int i = 0; i < 8; i++) {
+ memcpy(dst, src, 8);
+ src += src_stride;
+ dst += dst_stride;
+ }
+}
+
+uint32_t DenoiserFilterSSE2::Variance16x8(const uint8_t* src,
+ int src_stride,
+ const uint8_t* ref,
+ int ref_stride,
+ uint32_t* sse) {
+ int64_t sum = 0;
+ VarianceSSE2(src, src_stride << 1, ref, ref_stride << 1, 16, 8, sse, &sum, 8);
+ return *sse - ((sum * sum) >> 7);
+}
+
+DenoiserDecision DenoiserFilterSSE2::MbDenoise(uint8_t* mc_running_avg_y,
+ int mc_avg_y_stride,
+ uint8_t* running_avg_y,
+ int avg_y_stride,
+ const uint8_t* sig,
+ int sig_stride,
+ uint8_t motion_magnitude,
+ int increase_denoising) {
+ int shift_inc =
+ (increase_denoising && motion_magnitude <= kMotionMagnitudeThreshold) ? 1
+ : 0;
+ __m128i acc_diff = _mm_setzero_si128();
+ const __m128i k_0 = _mm_setzero_si128();
+ const __m128i k_4 = _mm_set1_epi8(4 + shift_inc);
+ const __m128i k_8 = _mm_set1_epi8(8);
+ const __m128i k_16 = _mm_set1_epi8(16);
+ // Modify each level's adjustment according to motion_magnitude.
+ const __m128i l3 = _mm_set1_epi8(
+ (motion_magnitude <= kMotionMagnitudeThreshold) ? 7 + shift_inc : 6);
+ // Difference between level 3 and level 2 is 2.
+ const __m128i l32 = _mm_set1_epi8(2);
+ // Difference between level 2 and level 1 is 1.
+ const __m128i l21 = _mm_set1_epi8(1);
+
+ for (int r = 0; r < 16; ++r) {
+ // Calculate differences.
+ const __m128i v_sig =
+ _mm_loadu_si128(reinterpret_cast<const __m128i*>(&sig[0]));
+ const __m128i v_mc_running_avg_y =
+ _mm_loadu_si128(reinterpret_cast<__m128i*>(&mc_running_avg_y[0]));
+ __m128i v_running_avg_y;
+ const __m128i pdiff = _mm_subs_epu8(v_mc_running_avg_y, v_sig);
+ const __m128i ndiff = _mm_subs_epu8(v_sig, v_mc_running_avg_y);
+ // Obtain the sign. FF if diff is negative.
+ const __m128i diff_sign = _mm_cmpeq_epi8(pdiff, k_0);
+ // Clamp absolute difference to 16 to be used to get mask. Doing this
+ // allows us to use _mm_cmpgt_epi8, which operates on signed byte.
+ const __m128i clamped_absdiff =
+ _mm_min_epu8(_mm_or_si128(pdiff, ndiff), k_16);
+ // Get masks for l2 l1 and l0 adjustments.
+ const __m128i mask2 = _mm_cmpgt_epi8(k_16, clamped_absdiff);
+ const __m128i mask1 = _mm_cmpgt_epi8(k_8, clamped_absdiff);
+ const __m128i mask0 = _mm_cmpgt_epi8(k_4, clamped_absdiff);
+ // Get adjustments for l2, l1, and l0.
+ __m128i adj2 = _mm_and_si128(mask2, l32);
+ const __m128i adj1 = _mm_and_si128(mask1, l21);
+ const __m128i adj0 = _mm_and_si128(mask0, clamped_absdiff);
+ __m128i adj, padj, nadj;
+
+ // Combine the adjustments and get absolute adjustments.
+ adj2 = _mm_add_epi8(adj2, adj1);
+ adj = _mm_sub_epi8(l3, adj2);
+ adj = _mm_andnot_si128(mask0, adj);
+ adj = _mm_or_si128(adj, adj0);
+
+ // Restore the sign and get positive and negative adjustments.
+ padj = _mm_andnot_si128(diff_sign, adj);
+ nadj = _mm_and_si128(diff_sign, adj);
+
+ // Calculate filtered value.
+ v_running_avg_y = _mm_adds_epu8(v_sig, padj);
+ v_running_avg_y = _mm_subs_epu8(v_running_avg_y, nadj);
+ _mm_storeu_si128(reinterpret_cast<__m128i*>(running_avg_y),
+ v_running_avg_y);
+
+ // Adjustments <=7, and each element in acc_diff can fit in signed
+ // char.
+ acc_diff = _mm_adds_epi8(acc_diff, padj);
+ acc_diff = _mm_subs_epi8(acc_diff, nadj);
+
+ // Update pointers for next iteration.
+ sig += sig_stride;
+ mc_running_avg_y += mc_avg_y_stride;
+ running_avg_y += avg_y_stride;
+ }
+
+ {
+ // Compute the sum of all pixel differences of this MB.
+ unsigned int abs_sum_diff = AbsSumDiff16x1(acc_diff);
+ unsigned int sum_diff_thresh = kSumDiffThreshold;
+ if (increase_denoising)
+ sum_diff_thresh = kSumDiffThresholdHigh;
+ if (abs_sum_diff > sum_diff_thresh) {
+ // Before returning to copy the block (i.e., apply no denoising),
+ // check if we can still apply some (weaker) temporal filtering to
+ // this block, that would otherwise not be denoised at all. Simplest
+ // is to apply an additional adjustment to running_avg_y to bring it
+ // closer to sig. The adjustment is capped by a maximum delta, and
+ // chosen such that in most cases the resulting sum_diff will be
+ // within the acceptable range given by sum_diff_thresh.
+
+ // The delta is set by the excess of absolute pixel diff over the
+ // threshold.
+ int delta = ((abs_sum_diff - sum_diff_thresh) >> 8) + 1;
+ // Only apply the adjustment for max delta up to 3.
+ if (delta < 4) {
+ const __m128i k_delta = _mm_set1_epi8(delta);
+ sig -= sig_stride * 16;
+ mc_running_avg_y -= mc_avg_y_stride * 16;
+ running_avg_y -= avg_y_stride * 16;
+ for (int r = 0; r < 16; ++r) {
+ __m128i v_running_avg_y =
+ _mm_loadu_si128(reinterpret_cast<__m128i*>(&running_avg_y[0]));
+ // Calculate differences.
+ const __m128i v_sig =
+ _mm_loadu_si128(reinterpret_cast<const __m128i*>(&sig[0]));
+ const __m128i v_mc_running_avg_y =
+ _mm_loadu_si128(reinterpret_cast<__m128i*>(&mc_running_avg_y[0]));
+ const __m128i pdiff = _mm_subs_epu8(v_mc_running_avg_y, v_sig);
+ const __m128i ndiff = _mm_subs_epu8(v_sig, v_mc_running_avg_y);
+ // Obtain the sign. FF if diff is negative.
+ const __m128i diff_sign = _mm_cmpeq_epi8(pdiff, k_0);
+ // Clamp absolute difference to delta to get the adjustment.
+ const __m128i adj = _mm_min_epu8(_mm_or_si128(pdiff, ndiff), k_delta);
+ // Restore the sign and get positive and negative adjustments.
+ __m128i padj, nadj;
+ padj = _mm_andnot_si128(diff_sign, adj);
+ nadj = _mm_and_si128(diff_sign, adj);
+ // Calculate filtered value.
+ v_running_avg_y = _mm_subs_epu8(v_running_avg_y, padj);
+ v_running_avg_y = _mm_adds_epu8(v_running_avg_y, nadj);
+ _mm_storeu_si128(reinterpret_cast<__m128i*>(running_avg_y),
+ v_running_avg_y);
+
+ // Accumulate the adjustments.
+ acc_diff = _mm_subs_epi8(acc_diff, padj);
+ acc_diff = _mm_adds_epi8(acc_diff, nadj);
+
+ // Update pointers for next iteration.
+ sig += sig_stride;
+ mc_running_avg_y += mc_avg_y_stride;
+ running_avg_y += avg_y_stride;
+ }
+ abs_sum_diff = AbsSumDiff16x1(acc_diff);
+ if (abs_sum_diff > sum_diff_thresh) {
+ return COPY_BLOCK;
+ }
+ } else {
+ return COPY_BLOCK;
+ }
+ }
+ }
+ return FILTER_BLOCK;
+}
+
+} // namespace webrtc
diff --git a/webrtc/modules/video_processing/util/denoiser_filter_sse2.h b/webrtc/modules/video_processing/util/denoiser_filter_sse2.h
new file mode 100644
index 0000000000..31d8510902
--- /dev/null
+++ b/webrtc/modules/video_processing/util/denoiser_filter_sse2.h
@@ -0,0 +1,46 @@
+/*
+ * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_PROCESSING_UTIL_DENOISER_FILTER_SSE2_H_
+#define WEBRTC_MODULES_VIDEO_PROCESSING_UTIL_DENOISER_FILTER_SSE2_H_
+
+#include "webrtc/modules/video_processing/util/denoiser_filter.h"
+
+namespace webrtc {
+
+class DenoiserFilterSSE2 : public DenoiserFilter {
+ public:
+ DenoiserFilterSSE2() {}
+ void CopyMem16x16(const uint8_t* src,
+ int src_stride,
+ uint8_t* dst,
+ int dst_stride) override;
+ void CopyMem8x8(const uint8_t* src,
+ int src_stride,
+ uint8_t* dst,
+ int dst_stride) override;
+ uint32_t Variance16x8(const uint8_t* a,
+ int a_stride,
+ const uint8_t* b,
+ int b_stride,
+ unsigned int* sse) override;
+ DenoiserDecision MbDenoise(uint8_t* mc_running_avg_y,
+ int mc_avg_y_stride,
+ uint8_t* running_avg_y,
+ int avg_y_stride,
+ const uint8_t* sig,
+ int sig_stride,
+ uint8_t motion_magnitude,
+ int increase_denoising) override;
+};
+
+} // namespace webrtc
+
+#endif // WEBRTC_MODULES_VIDEO_PROCESSING_UTIL_DENOISER_FILTER_SSE2_H_
diff --git a/webrtc/modules/video_processing/util/skin_detection.cc b/webrtc/modules/video_processing/util/skin_detection.cc
new file mode 100644
index 0000000000..bf631ce2f6
--- /dev/null
+++ b/webrtc/modules/video_processing/util/skin_detection.cc
@@ -0,0 +1,65 @@
+/*
+ * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <limits.h>
+#include <math.h>
+
+#include "webrtc/modules/video_processing/util/skin_detection.h"
+
+namespace webrtc {
+
+// Fixed-point skin color model parameters.
+static const int skin_mean[2] = {7463, 9614}; // q6
+static const int skin_inv_cov[4] = {4107, 1663, 1663, 2157}; // q16
+static const int skin_threshold = 1570636; // q18
+
+// Thresholds on luminance.
+static const int y_low = 20;
+static const int y_high = 220;
+
+// Evaluates the Mahalanobis distance measure for the input CbCr values.
+static int EvaluateSkinColorDifference(int cb, int cr) {
+ const int cb_q6 = cb << 6;
+ const int cr_q6 = cr << 6;
+ const int cb_diff_q12 = (cb_q6 - skin_mean[0]) * (cb_q6 - skin_mean[0]);
+ const int cbcr_diff_q12 = (cb_q6 - skin_mean[0]) * (cr_q6 - skin_mean[1]);
+ const int cr_diff_q12 = (cr_q6 - skin_mean[1]) * (cr_q6 - skin_mean[1]);
+ const int cb_diff_q2 = (cb_diff_q12 + (1 << 9)) >> 10;
+ const int cbcr_diff_q2 = (cbcr_diff_q12 + (1 << 9)) >> 10;
+ const int cr_diff_q2 = (cr_diff_q12 + (1 << 9)) >> 10;
+ const int skin_diff =
+ skin_inv_cov[0] * cb_diff_q2 + skin_inv_cov[1] * cbcr_diff_q2 +
+ skin_inv_cov[2] * cbcr_diff_q2 + skin_inv_cov[3] * cr_diff_q2;
+ return skin_diff;
+}
+
+bool MbHasSkinColor(const uint8_t* y_src,
+ const uint8_t* u_src,
+ const uint8_t* v_src,
+ const int stride_y,
+ const int stride_u,
+ const int stride_v,
+ const int mb_row,
+ const int mb_col) {
+ const uint8_t* y = y_src + ((mb_row << 4) + 8) * stride_y + (mb_col << 4) + 8;
+ const uint8_t* u = u_src + ((mb_row << 3) + 4) * stride_u + (mb_col << 3) + 4;
+ const uint8_t* v = v_src + ((mb_row << 3) + 4) * stride_v + (mb_col << 3) + 4;
+ // Use 2x2 average of center pixel to compute skin area.
+ uint8_t y_avg = (*y + *(y + 1) + *(y + stride_y) + *(y + stride_y + 1)) >> 2;
+ uint8_t u_avg = (*u + *(u + 1) + *(u + stride_u) + *(u + stride_u + 1)) >> 2;
+ uint8_t v_avg = (*v + *(v + 1) + *(v + stride_v) + *(v + stride_v + 1)) >> 2;
+ // Ignore MB with too high or low brightness.
+ if (y_avg < y_low || y_avg > y_high)
+ return false;
+ else
+ return (EvaluateSkinColorDifference(u_avg, v_avg) < skin_threshold);
+}
+
+} // namespace webrtc
diff --git a/webrtc/modules/video_processing/util/skin_detection.h b/webrtc/modules/video_processing/util/skin_detection.h
new file mode 100755
index 0000000000..561c03c425
--- /dev/null
+++ b/webrtc/modules/video_processing/util/skin_detection.h
@@ -0,0 +1,28 @@
+/*
+ * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_PROCESSING_UTIL_SKIN_DETECTION_H_
+#define WEBRTC_MODULES_VIDEO_PROCESSING_UTIL_SKIN_DETECTION_H_
+
+namespace webrtc {
+
+typedef unsigned char uint8_t;
+bool MbHasSkinColor(const uint8_t* y_src,
+ const uint8_t* u_src,
+ const uint8_t* v_src,
+ const int stride_y,
+ const int stride_u,
+ const int stride_v,
+ const int mb_row,
+ const int mb_col);
+
+} // namespace webrtc
+
+#endif // WEBRTC_MODULES_VIDEO_PROCESSING_UTIL_SKIN_DETECTION_H_
diff --git a/webrtc/modules/video_processing/video_decimator.cc b/webrtc/modules/video_processing/video_decimator.cc
new file mode 100644
index 0000000000..63e347b026
--- /dev/null
+++ b/webrtc/modules/video_processing/video_decimator.cc
@@ -0,0 +1,148 @@
+/*
+ * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/base/checks.h"
+#include "webrtc/modules/video_processing/include/video_processing.h"
+#include "webrtc/modules/video_processing/video_decimator.h"
+#include "webrtc/system_wrappers/include/tick_util.h"
+
+#define VD_MIN(a, b) ((a) < (b)) ? (a) : (b)
+
+namespace webrtc {
+
+VPMVideoDecimator::VPMVideoDecimator() {
+ Reset();
+}
+
+VPMVideoDecimator::~VPMVideoDecimator() {}
+
+void VPMVideoDecimator::Reset() {
+ overshoot_modifier_ = 0;
+ drop_count_ = 0;
+ keep_count_ = 0;
+ target_frame_rate_ = 30;
+ incoming_frame_rate_ = 0.0f;
+ memset(incoming_frame_times_, 0, sizeof(incoming_frame_times_));
+ enable_temporal_decimation_ = true;
+}
+
+void VPMVideoDecimator::EnableTemporalDecimation(bool enable) {
+ enable_temporal_decimation_ = enable;
+}
+
+void VPMVideoDecimator::SetTargetFramerate(int frame_rate) {
+ RTC_DCHECK(frame_rate);
+ target_frame_rate_ = frame_rate;
+}
+
+bool VPMVideoDecimator::DropFrame() {
+ if (!enable_temporal_decimation_)
+ return false;
+
+ if (incoming_frame_rate_ <= 0)
+ return false;
+
+ const uint32_t incomingframe_rate =
+ static_cast<uint32_t>(incoming_frame_rate_ + 0.5f);
+
+ if (target_frame_rate_ == 0)
+ return true;
+
+ bool drop = false;
+ if (incomingframe_rate > target_frame_rate_) {
+ int32_t overshoot =
+ overshoot_modifier_ + (incomingframe_rate - target_frame_rate_);
+ if (overshoot < 0) {
+ overshoot = 0;
+ overshoot_modifier_ = 0;
+ }
+
+ if (overshoot && 2 * overshoot < (int32_t)incomingframe_rate) {
+ if (drop_count_) { // Just got here so drop to be sure.
+ drop_count_ = 0;
+ return true;
+ }
+ const uint32_t dropVar = incomingframe_rate / overshoot;
+
+ if (keep_count_ >= dropVar) {
+ drop = true;
+ overshoot_modifier_ = -((int32_t)incomingframe_rate % overshoot) / 3;
+ keep_count_ = 1;
+ } else {
+ keep_count_++;
+ }
+ } else {
+ keep_count_ = 0;
+ const uint32_t dropVar = overshoot / target_frame_rate_;
+ if (drop_count_ < dropVar) {
+ drop = true;
+ drop_count_++;
+ } else {
+ overshoot_modifier_ = overshoot % target_frame_rate_;
+ drop = false;
+ drop_count_ = 0;
+ }
+ }
+ }
+ return drop;
+}
+
+uint32_t VPMVideoDecimator::GetDecimatedFrameRate() {
+ ProcessIncomingframe_rate(TickTime::MillisecondTimestamp());
+ if (!enable_temporal_decimation_) {
+ return static_cast<uint32_t>(incoming_frame_rate_ + 0.5f);
+ }
+ return VD_MIN(target_frame_rate_,
+ static_cast<uint32_t>(incoming_frame_rate_ + 0.5f));
+}
+
+uint32_t VPMVideoDecimator::Inputframe_rate() {
+ ProcessIncomingframe_rate(TickTime::MillisecondTimestamp());
+ return static_cast<uint32_t>(incoming_frame_rate_ + 0.5f);
+}
+
+void VPMVideoDecimator::UpdateIncomingframe_rate() {
+ int64_t now = TickTime::MillisecondTimestamp();
+ if (incoming_frame_times_[0] == 0) {
+ // First no shift.
+ } else {
+ // Shift.
+ for (int i = kFrameCountHistory_size - 2; i >= 0; i--) {
+ incoming_frame_times_[i + 1] = incoming_frame_times_[i];
+ }
+ }
+ incoming_frame_times_[0] = now;
+ ProcessIncomingframe_rate(now);
+}
+
+void VPMVideoDecimator::ProcessIncomingframe_rate(int64_t now) {
+ int32_t num = 0;
+ int32_t nrOfFrames = 0;
+ for (num = 1; num < (kFrameCountHistory_size - 1); num++) {
+ // Don't use data older than 2sec.
+ if (incoming_frame_times_[num] <= 0 ||
+ now - incoming_frame_times_[num] > kFrameHistoryWindowMs) {
+ break;
+ } else {
+ nrOfFrames++;
+ }
+ }
+ if (num > 1) {
+ int64_t diff = now - incoming_frame_times_[num - 1];
+ incoming_frame_rate_ = 1.0;
+ if (diff > 0) {
+ incoming_frame_rate_ = nrOfFrames * 1000.0f / static_cast<float>(diff);
+ }
+ } else {
+ incoming_frame_rate_ = static_cast<float>(nrOfFrames);
+ }
+}
+
+} // namespace webrtc
diff --git a/webrtc/modules/video_processing/video_decimator.h b/webrtc/modules/video_processing/video_decimator.h
new file mode 100644
index 0000000000..1b871df8c3
--- /dev/null
+++ b/webrtc/modules/video_processing/video_decimator.h
@@ -0,0 +1,58 @@
+/*
+ * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_PROCESSING_VIDEO_DECIMATOR_H_
+#define WEBRTC_MODULES_VIDEO_PROCESSING_VIDEO_DECIMATOR_H_
+
+#include "webrtc/modules/include/module_common_types.h"
+#include "webrtc/typedefs.h"
+
+namespace webrtc {
+
+class VPMVideoDecimator {
+ public:
+ VPMVideoDecimator();
+ ~VPMVideoDecimator();
+
+ void Reset();
+
+ void EnableTemporalDecimation(bool enable);
+
+ void SetTargetFramerate(int frame_rate);
+
+ bool DropFrame();
+
+ void UpdateIncomingframe_rate();
+
+ // Get Decimated Frame Rate/Dimensions.
+ uint32_t GetDecimatedFrameRate();
+
+ // Get input frame rate.
+ uint32_t Inputframe_rate();
+
+ private:
+ void ProcessIncomingframe_rate(int64_t now);
+
+ enum { kFrameCountHistory_size = 90 };
+ enum { kFrameHistoryWindowMs = 2000 };
+
+ // Temporal decimation.
+ int32_t overshoot_modifier_;
+ uint32_t drop_count_;
+ uint32_t keep_count_;
+ uint32_t target_frame_rate_;
+ float incoming_frame_rate_;
+ int64_t incoming_frame_times_[kFrameCountHistory_size];
+ bool enable_temporal_decimation_;
+};
+
+} // namespace webrtc
+
+#endif // WEBRTC_MODULES_VIDEO_PROCESSING_VIDEO_DECIMATOR_H_
diff --git a/webrtc/modules/video_processing/video_denoiser.cc b/webrtc/modules/video_processing/video_denoiser.cc
new file mode 100644
index 0000000000..4902a89491
--- /dev/null
+++ b/webrtc/modules/video_processing/video_denoiser.cc
@@ -0,0 +1,147 @@
+/*
+ * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#include "webrtc/common_video/libyuv/include/scaler.h"
+#include "webrtc/common_video/libyuv/include/webrtc_libyuv.h"
+#include "webrtc/modules/video_processing/video_denoiser.h"
+
+namespace webrtc {
+
+VideoDenoiser::VideoDenoiser(bool runtime_cpu_detection)
+ : width_(0),
+ height_(0),
+ filter_(DenoiserFilter::Create(runtime_cpu_detection)) {}
+
+void VideoDenoiser::TrailingReduction(int mb_rows,
+ int mb_cols,
+ const uint8_t* y_src,
+ int stride_y,
+ uint8_t* y_dst) {
+ for (int mb_row = 1; mb_row < mb_rows - 1; ++mb_row) {
+ for (int mb_col = 1; mb_col < mb_cols - 1; ++mb_col) {
+ int mb_index = mb_row * mb_cols + mb_col;
+ uint8_t* mb_dst = y_dst + (mb_row << 4) * stride_y + (mb_col << 4);
+ const uint8_t* mb_src = y_src + (mb_row << 4) * stride_y + (mb_col << 4);
+ // If the number of denoised neighbors is less than a threshold,
+ // do NOT denoise for the block. Set different threshold for skin MB.
+ // The change of denoising status will not propagate.
+ if (metrics_[mb_index].is_skin) {
+ // The threshold is high (more strict) for non-skin MB where the
+ // trailing usually happen.
+ if (metrics_[mb_index].denoise &&
+ metrics_[mb_index + 1].denoise + metrics_[mb_index - 1].denoise +
+ metrics_[mb_index + mb_cols].denoise +
+ metrics_[mb_index - mb_cols].denoise <=
+ 2) {
+ metrics_[mb_index].denoise = 0;
+ filter_->CopyMem16x16(mb_src, stride_y, mb_dst, stride_y);
+ }
+ } else if (metrics_[mb_index].denoise &&
+ metrics_[mb_index + 1].denoise +
+ metrics_[mb_index - 1].denoise +
+ metrics_[mb_index + mb_cols + 1].denoise +
+ metrics_[mb_index + mb_cols - 1].denoise +
+ metrics_[mb_index - mb_cols + 1].denoise +
+ metrics_[mb_index - mb_cols - 1].denoise +
+ metrics_[mb_index + mb_cols].denoise +
+ metrics_[mb_index - mb_cols].denoise <=
+ 7) {
+ filter_->CopyMem16x16(mb_src, stride_y, mb_dst, stride_y);
+ }
+ }
+ }
+}
+
+void VideoDenoiser::DenoiseFrame(const VideoFrame& frame,
+ VideoFrame* denoised_frame) {
+ int stride_y = frame.stride(kYPlane);
+ int stride_u = frame.stride(kUPlane);
+ int stride_v = frame.stride(kVPlane);
+ // If previous width and height are different from current frame's, then no
+ // denoising for the current frame.
+ if (width_ != frame.width() || height_ != frame.height()) {
+ width_ = frame.width();
+ height_ = frame.height();
+ denoised_frame->CreateFrame(frame.buffer(kYPlane), frame.buffer(kUPlane),
+ frame.buffer(kVPlane), width_, height_,
+ stride_y, stride_u, stride_v);
+ // Setting time parameters to the output frame.
+ denoised_frame->set_timestamp(frame.timestamp());
+ denoised_frame->set_render_time_ms(frame.render_time_ms());
+ return;
+ }
+ // For 16x16 block.
+ int mb_cols = width_ >> 4;
+ int mb_rows = height_ >> 4;
+ if (metrics_.get() == nullptr)
+ metrics_.reset(new DenoiseMetrics[mb_cols * mb_rows]());
+ // Denoise on Y plane.
+ uint8_t* y_dst = denoised_frame->buffer(kYPlane);
+ uint8_t* u_dst = denoised_frame->buffer(kUPlane);
+ uint8_t* v_dst = denoised_frame->buffer(kVPlane);
+ const uint8_t* y_src = frame.buffer(kYPlane);
+ const uint8_t* u_src = frame.buffer(kUPlane);
+ const uint8_t* v_src = frame.buffer(kVPlane);
+ // Temporary buffer to store denoising result.
+ uint8_t y_tmp[16 * 16] = {0};
+ for (int mb_row = 0; mb_row < mb_rows; ++mb_row) {
+ for (int mb_col = 0; mb_col < mb_cols; ++mb_col) {
+ const uint8_t* mb_src = y_src + (mb_row << 4) * stride_y + (mb_col << 4);
+ uint8_t* mb_dst = y_dst + (mb_row << 4) * stride_y + (mb_col << 4);
+ int mb_index = mb_row * mb_cols + mb_col;
+ // Denoise each MB at the very start and save the result to a temporary
+ // buffer.
+ if (filter_->MbDenoise(mb_dst, stride_y, y_tmp, 16, mb_src, stride_y, 0,
+ 1) == FILTER_BLOCK) {
+ uint32_t thr_var = 0;
+ // Save var and sad to the buffer.
+ metrics_[mb_index].var = filter_->Variance16x8(
+ mb_dst, stride_y, y_tmp, 16, &metrics_[mb_index].sad);
+ // Get skin map.
+ metrics_[mb_index].is_skin = MbHasSkinColor(
+ y_src, u_src, v_src, stride_y, stride_u, stride_v, mb_row, mb_col);
+ // Variance threshold for skin/non-skin MB is different.
+ // Skin MB use a small threshold to reduce blockiness.
+ thr_var = metrics_[mb_index].is_skin ? 128 : 12 * 128;
+ if (metrics_[mb_index].var > thr_var) {
+ metrics_[mb_index].denoise = 0;
+ // Use the source MB.
+ filter_->CopyMem16x16(mb_src, stride_y, mb_dst, stride_y);
+ } else {
+ metrics_[mb_index].denoise = 1;
+ // Use the denoised MB.
+ filter_->CopyMem16x16(y_tmp, 16, mb_dst, stride_y);
+ }
+ } else {
+ metrics_[mb_index].denoise = 0;
+ filter_->CopyMem16x16(mb_src, stride_y, mb_dst, stride_y);
+ }
+ // Copy source U/V plane.
+ const uint8_t* mb_src_u =
+ u_src + (mb_row << 3) * stride_u + (mb_col << 3);
+ const uint8_t* mb_src_v =
+ v_src + (mb_row << 3) * stride_v + (mb_col << 3);
+ uint8_t* mb_dst_u = u_dst + (mb_row << 3) * stride_u + (mb_col << 3);
+ uint8_t* mb_dst_v = v_dst + (mb_row << 3) * stride_v + (mb_col << 3);
+ filter_->CopyMem8x8(mb_src_u, stride_u, mb_dst_u, stride_u);
+ filter_->CopyMem8x8(mb_src_v, stride_v, mb_dst_v, stride_v);
+ }
+ }
+ // Second round.
+ // This is to reduce the trailing artifact and blockiness by referring
+ // neighbors' denoising status.
+ TrailingReduction(mb_rows, mb_cols, y_src, stride_y, y_dst);
+
+ // Setting time parameters to the output frame.
+ denoised_frame->set_timestamp(frame.timestamp());
+ denoised_frame->set_render_time_ms(frame.render_time_ms());
+ return;
+}
+
+} // namespace webrtc
diff --git a/webrtc/modules/video_processing/video_denoiser.h b/webrtc/modules/video_processing/video_denoiser.h
new file mode 100644
index 0000000000..107a15ca07
--- /dev/null
+++ b/webrtc/modules/video_processing/video_denoiser.h
@@ -0,0 +1,38 @@
+/*
+ * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_PROCESSING_VIDEO_DENOISER_H_
+#define WEBRTC_MODULES_VIDEO_PROCESSING_VIDEO_DENOISER_H_
+
+#include "webrtc/modules/video_processing/util/denoiser_filter.h"
+#include "webrtc/modules/video_processing/util/skin_detection.h"
+
+namespace webrtc {
+
+class VideoDenoiser {
+ public:
+ explicit VideoDenoiser(bool runtime_cpu_detection);
+ void DenoiseFrame(const VideoFrame& frame, VideoFrame* denoised_frame);
+
+ private:
+ void TrailingReduction(int mb_rows,
+ int mb_cols,
+ const uint8_t* y_src,
+ int stride_y,
+ uint8_t* y_dst);
+ int width_;
+ int height_;
+ rtc::scoped_ptr<DenoiseMetrics[]> metrics_;
+ rtc::scoped_ptr<DenoiserFilter> filter_;
+};
+
+} // namespace webrtc
+
+#endif // WEBRTC_MODULES_VIDEO_PROCESSING_VIDEO_DENOISER_H_
diff --git a/webrtc/modules/video_processing/video_processing.gypi b/webrtc/modules/video_processing/video_processing.gypi
index 5827a5b1a6..7418c455a2 100644
--- a/webrtc/modules/video_processing/video_processing.gypi
+++ b/webrtc/modules/video_processing/video_processing.gypi
@@ -18,29 +18,38 @@
'<(webrtc_root)/system_wrappers/system_wrappers.gyp:system_wrappers',
],
'sources': [
- 'main/interface/video_processing.h',
- 'main/interface/video_processing_defines.h',
- 'main/source/brighten.cc',
- 'main/source/brighten.h',
- 'main/source/brightness_detection.cc',
- 'main/source/brightness_detection.h',
- 'main/source/content_analysis.cc',
- 'main/source/content_analysis.h',
- 'main/source/deflickering.cc',
- 'main/source/deflickering.h',
- 'main/source/frame_preprocessor.cc',
- 'main/source/frame_preprocessor.h',
- 'main/source/spatial_resampler.cc',
- 'main/source/spatial_resampler.h',
- 'main/source/video_decimator.cc',
- 'main/source/video_decimator.h',
- 'main/source/video_processing_impl.cc',
- 'main/source/video_processing_impl.h',
+ 'include/video_processing.h',
+ 'include/video_processing_defines.h',
+ 'brightness_detection.cc',
+ 'brightness_detection.h',
+ 'content_analysis.cc',
+ 'content_analysis.h',
+ 'deflickering.cc',
+ 'deflickering.h',
+ 'frame_preprocessor.cc',
+ 'frame_preprocessor.h',
+ 'spatial_resampler.cc',
+ 'spatial_resampler.h',
+ 'video_decimator.cc',
+ 'video_decimator.h',
+ 'video_processing_impl.cc',
+ 'video_processing_impl.h',
+ 'video_denoiser.cc',
+ 'video_denoiser.h',
+ 'util/denoiser_filter.cc',
+ 'util/denoiser_filter.h',
+ 'util/denoiser_filter_c.cc',
+ 'util/denoiser_filter_c.h',
+ 'util/skin_detection.cc',
+ 'util/skin_detection.h',
],
'conditions': [
['target_arch=="ia32" or target_arch=="x64"', {
'dependencies': [ 'video_processing_sse2', ],
}],
+ ['target_arch=="arm" or target_arch == "arm64"', {
+ 'dependencies': [ 'video_processing_neon', ],
+ }],
],
},
],
@@ -51,7 +60,9 @@
'target_name': 'video_processing_sse2',
'type': 'static_library',
'sources': [
- 'main/source/content_analysis_sse2.cc',
+ 'content_analysis_sse2.cc',
+ 'util/denoiser_filter_sse2.cc',
+ 'util/denoiser_filter_sse2.h',
],
'conditions': [
['os_posix==1 and OS!="mac"', {
@@ -66,6 +77,19 @@
},
],
}],
+ ['target_arch=="arm" or target_arch == "arm64"', {
+ 'targets': [
+ {
+ 'target_name': 'video_processing_neon',
+ 'type': 'static_library',
+ 'includes': [ '../../build/arm_neon.gypi', ],
+ 'sources': [
+ 'util/denoiser_filter_neon.cc',
+ 'util/denoiser_filter_neon.h',
+ ],
+ },
+ ],
+ }],
],
}
diff --git a/webrtc/modules/video_processing/video_processing_impl.cc b/webrtc/modules/video_processing/video_processing_impl.cc
new file mode 100644
index 0000000000..f34886f10f
--- /dev/null
+++ b/webrtc/modules/video_processing/video_processing_impl.cc
@@ -0,0 +1,179 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/modules/video_processing/video_processing_impl.h"
+
+#include <assert.h>
+
+#include "webrtc/base/checks.h"
+#include "webrtc/base/logging.h"
+#include "webrtc/system_wrappers/include/critical_section_wrapper.h"
+
+namespace webrtc {
+
+namespace {
+
+int GetSubSamplingFactor(int width, int height) {
+ if (width * height >= 640 * 480) {
+ return 3;
+ } else if (width * height >= 352 * 288) {
+ return 2;
+ } else if (width * height >= 176 * 144) {
+ return 1;
+ } else {
+ return 0;
+ }
+}
+} // namespace
+
+VideoProcessing* VideoProcessing::Create() {
+ return new VideoProcessingImpl();
+}
+
+VideoProcessingImpl::VideoProcessingImpl() {}
+VideoProcessingImpl::~VideoProcessingImpl() {}
+
+void VideoProcessing::GetFrameStats(const VideoFrame& frame,
+ FrameStats* stats) {
+ ClearFrameStats(stats); // The histogram needs to be zeroed out.
+ if (frame.IsZeroSize()) {
+ return;
+ }
+
+ int width = frame.width();
+ int height = frame.height();
+ stats->sub_sampling_factor = GetSubSamplingFactor(width, height);
+
+ const uint8_t* buffer = frame.buffer(kYPlane);
+ // Compute histogram and sum of frame
+ for (int i = 0; i < height; i += (1 << stats->sub_sampling_factor)) {
+ int k = i * width;
+ for (int j = 0; j < width; j += (1 << stats->sub_sampling_factor)) {
+ stats->hist[buffer[k + j]]++;
+ stats->sum += buffer[k + j];
+ }
+ }
+
+ stats->num_pixels = (width * height) / ((1 << stats->sub_sampling_factor) *
+ (1 << stats->sub_sampling_factor));
+ assert(stats->num_pixels > 0);
+
+ // Compute mean value of frame
+ stats->mean = stats->sum / stats->num_pixels;
+}
+
+bool VideoProcessing::ValidFrameStats(const FrameStats& stats) {
+ if (stats.num_pixels == 0) {
+ LOG(LS_WARNING) << "Invalid frame stats.";
+ return false;
+ }
+ return true;
+}
+
+void VideoProcessing::ClearFrameStats(FrameStats* stats) {
+ stats->mean = 0;
+ stats->sum = 0;
+ stats->num_pixels = 0;
+ stats->sub_sampling_factor = 0;
+ memset(stats->hist, 0, sizeof(stats->hist));
+}
+
+void VideoProcessing::Brighten(int delta, VideoFrame* frame) {
+ RTC_DCHECK(!frame->IsZeroSize());
+ RTC_DCHECK(frame->width() > 0);
+ RTC_DCHECK(frame->height() > 0);
+
+ int num_pixels = frame->width() * frame->height();
+
+ int look_up[256];
+ for (int i = 0; i < 256; i++) {
+ int val = i + delta;
+ look_up[i] = ((((val < 0) ? 0 : val) > 255) ? 255 : val);
+ }
+
+ uint8_t* temp_ptr = frame->buffer(kYPlane);
+ for (int i = 0; i < num_pixels; i++) {
+ *temp_ptr = static_cast<uint8_t>(look_up[*temp_ptr]);
+ temp_ptr++;
+ }
+}
+
+int32_t VideoProcessingImpl::Deflickering(VideoFrame* frame,
+ FrameStats* stats) {
+ rtc::CritScope mutex(&mutex_);
+ return deflickering_.ProcessFrame(frame, stats);
+}
+
+int32_t VideoProcessingImpl::BrightnessDetection(const VideoFrame& frame,
+ const FrameStats& stats) {
+ rtc::CritScope mutex(&mutex_);
+ return brightness_detection_.ProcessFrame(frame, stats);
+}
+
+void VideoProcessingImpl::EnableTemporalDecimation(bool enable) {
+ rtc::CritScope mutex(&mutex_);
+ frame_pre_processor_.EnableTemporalDecimation(enable);
+}
+
+void VideoProcessingImpl::SetInputFrameResampleMode(
+ VideoFrameResampling resampling_mode) {
+ rtc::CritScope cs(&mutex_);
+ frame_pre_processor_.SetInputFrameResampleMode(resampling_mode);
+}
+
+int32_t VideoProcessingImpl::SetTargetResolution(uint32_t width,
+ uint32_t height,
+ uint32_t frame_rate) {
+ rtc::CritScope cs(&mutex_);
+ return frame_pre_processor_.SetTargetResolution(width, height, frame_rate);
+}
+
+void VideoProcessingImpl::SetTargetFramerate(int frame_rate) {
+ rtc::CritScope cs(&mutex_);
+ frame_pre_processor_.SetTargetFramerate(frame_rate);
+}
+
+uint32_t VideoProcessingImpl::GetDecimatedFrameRate() {
+ rtc::CritScope cs(&mutex_);
+ return frame_pre_processor_.GetDecimatedFrameRate();
+}
+
+uint32_t VideoProcessingImpl::GetDecimatedWidth() const {
+ rtc::CritScope cs(&mutex_);
+ return frame_pre_processor_.GetDecimatedWidth();
+}
+
+uint32_t VideoProcessingImpl::GetDecimatedHeight() const {
+ rtc::CritScope cs(&mutex_);
+ return frame_pre_processor_.GetDecimatedHeight();
+}
+
+void VideoProcessingImpl::EnableDenosing(bool enable) {
+ rtc::CritScope cs(&mutex_);
+ frame_pre_processor_.EnableDenosing(enable);
+}
+
+const VideoFrame* VideoProcessingImpl::PreprocessFrame(
+ const VideoFrame& frame) {
+ rtc::CritScope mutex(&mutex_);
+ return frame_pre_processor_.PreprocessFrame(frame);
+}
+
+VideoContentMetrics* VideoProcessingImpl::GetContentMetrics() const {
+ rtc::CritScope mutex(&mutex_);
+ return frame_pre_processor_.GetContentMetrics();
+}
+
+void VideoProcessingImpl::EnableContentAnalysis(bool enable) {
+ rtc::CritScope mutex(&mutex_);
+ frame_pre_processor_.EnableContentAnalysis(enable);
+}
+
+} // namespace webrtc
diff --git a/webrtc/modules/video_processing/video_processing_impl.h b/webrtc/modules/video_processing/video_processing_impl.h
new file mode 100644
index 0000000000..edbaba12fa
--- /dev/null
+++ b/webrtc/modules/video_processing/video_processing_impl.h
@@ -0,0 +1,55 @@
+/*
+ * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_PROCESSING_VIDEO_PROCESSING_IMPL_H_
+#define WEBRTC_MODULES_VIDEO_PROCESSING_VIDEO_PROCESSING_IMPL_H_
+
+#include "webrtc/base/criticalsection.h"
+#include "webrtc/modules/video_processing/include/video_processing.h"
+#include "webrtc/modules/video_processing/brightness_detection.h"
+#include "webrtc/modules/video_processing/deflickering.h"
+#include "webrtc/modules/video_processing/frame_preprocessor.h"
+
+namespace webrtc {
+class CriticalSectionWrapper;
+
+class VideoProcessingImpl : public VideoProcessing {
+ public:
+ VideoProcessingImpl();
+ ~VideoProcessingImpl() override;
+
+ // Implements VideoProcessing.
+ int32_t Deflickering(VideoFrame* frame, FrameStats* stats) override;
+ int32_t BrightnessDetection(const VideoFrame& frame,
+ const FrameStats& stats) override;
+ void EnableTemporalDecimation(bool enable) override;
+ void SetInputFrameResampleMode(VideoFrameResampling resampling_mode) override;
+ void EnableContentAnalysis(bool enable) override;
+ int32_t SetTargetResolution(uint32_t width,
+ uint32_t height,
+ uint32_t frame_rate) override;
+ void SetTargetFramerate(int frame_rate) override;
+ uint32_t GetDecimatedFrameRate() override;
+ uint32_t GetDecimatedWidth() const override;
+ uint32_t GetDecimatedHeight() const override;
+ void EnableDenosing(bool enable) override;
+ const VideoFrame* PreprocessFrame(const VideoFrame& frame) override;
+ VideoContentMetrics* GetContentMetrics() const override;
+
+ private:
+ mutable rtc::CriticalSection mutex_;
+ VPMDeflickering deflickering_ GUARDED_BY(mutex_);
+ VPMBrightnessDetection brightness_detection_;
+ VPMFramePreprocessor frame_pre_processor_;
+};
+
+} // namespace webrtc
+
+#endif // WEBRTC_MODULES_VIDEO_PROCESSING_VIDEO_PROCESSING_IMPL_H_
diff --git a/webrtc/modules/video_render/BUILD.gn b/webrtc/modules/video_render/BUILD.gn
index 80f23870aa..0771bd7080 100644
--- a/webrtc/modules/video_render/BUILD.gn
+++ b/webrtc/modules/video_render/BUILD.gn
@@ -13,8 +13,8 @@ source_set("video_render_module") {
"external/video_render_external_impl.cc",
"external/video_render_external_impl.h",
"i_video_render.h",
- "include/video_render.h",
- "include/video_render_defines.h",
+ "video_render.h",
+ "video_render_defines.h",
"video_render_impl.h",
]
diff --git a/webrtc/modules/video_render/android/video_render_android_impl.cc b/webrtc/modules/video_render/android/video_render_android_impl.cc
index c647501963..9affb23d99 100644
--- a/webrtc/modules/video_render/android/video_render_android_impl.cc
+++ b/webrtc/modules/video_render/android/video_render_android_impl.cc
@@ -141,18 +141,13 @@ int32_t VideoRenderAndroid::StartRender() {
return 0;
}
- _javaRenderThread = ThreadWrapper::CreateThread(JavaRenderThreadFun, this,
- "AndroidRenderThread");
+ _javaRenderThread.reset(new rtc::PlatformThread(JavaRenderThreadFun, this,
+ "AndroidRenderThread"));
- if (_javaRenderThread->Start())
- WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id,
- "%s: thread started", __FUNCTION__);
- else {
- WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
- "%s: Could not start send thread", __FUNCTION__);
- return -1;
- }
- _javaRenderThread->SetPriority(kRealtimePriority);
+ _javaRenderThread->Start();
+ WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, "%s: thread started",
+ __FUNCTION__);
+ _javaRenderThread->SetPriority(rtc::kRealtimePriority);
return 0;
}
diff --git a/webrtc/modules/video_render/android/video_render_android_impl.h b/webrtc/modules/video_render/android/video_render_android_impl.h
index 34950db7d1..e5b7de4643 100644
--- a/webrtc/modules/video_render/android/video_render_android_impl.h
+++ b/webrtc/modules/video_render/android/video_render_android_impl.h
@@ -15,8 +15,8 @@
#include <map>
+#include "webrtc/base/platform_thread.h"
#include "webrtc/modules/video_render/i_video_render.h"
-#include "webrtc/system_wrappers/include/thread_wrapper.h"
namespace webrtc {
@@ -144,7 +144,8 @@ class VideoRenderAndroid: IVideoRender {
EventWrapper& _javaRenderEvent;
int64_t _lastJavaRenderEvent;
JNIEnv* _javaRenderJniEnv; // JNIEnv for the java render thread.
- rtc::scoped_ptr<ThreadWrapper> _javaRenderThread;
+ // TODO(pbos): Remove scoped_ptr and use the member directly.
+ rtc::scoped_ptr<rtc::PlatformThread> _javaRenderThread;
};
} // namespace webrtc
diff --git a/webrtc/modules/video_render/android/video_render_android_native_opengl2.h b/webrtc/modules/video_render/android/video_render_android_native_opengl2.h
index b748b2dd47..8be247b834 100644
--- a/webrtc/modules/video_render/android/video_render_android_native_opengl2.h
+++ b/webrtc/modules/video_render/android/video_render_android_native_opengl2.h
@@ -15,7 +15,7 @@
#include "webrtc/modules/video_render/android/video_render_android_impl.h"
#include "webrtc/modules/video_render/android/video_render_opengles20.h"
-#include "webrtc/modules/video_render/include/video_render_defines.h"
+#include "webrtc/modules/video_render/video_render_defines.h"
namespace webrtc {
diff --git a/webrtc/modules/video_render/android/video_render_android_surface_view.h b/webrtc/modules/video_render/android/video_render_android_surface_view.h
index 480e8b5106..0f029b54f3 100644
--- a/webrtc/modules/video_render/android/video_render_android_surface_view.h
+++ b/webrtc/modules/video_render/android/video_render_android_surface_view.h
@@ -14,7 +14,7 @@
#include <jni.h>
#include "webrtc/modules/video_render/android/video_render_android_impl.h"
-#include "webrtc/modules/video_render/include/video_render_defines.h"
+#include "webrtc/modules/video_render/video_render_defines.h"
namespace webrtc {
diff --git a/webrtc/modules/video_render/android/video_render_opengles20.h b/webrtc/modules/video_render/android/video_render_opengles20.h
index 44ab4c04bc..57e2a10d42 100644
--- a/webrtc/modules/video_render/android/video_render_opengles20.h
+++ b/webrtc/modules/video_render/android/video_render_opengles20.h
@@ -11,7 +11,7 @@
#ifndef WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_ANDROID_VIDEO_RENDER_OPENGLES20_H_
#define WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_ANDROID_VIDEO_RENDER_OPENGLES20_H_
-#include "webrtc/modules/video_render/include/video_render_defines.h"
+#include "webrtc/modules/video_render/video_render_defines.h"
#include <GLES2/gl2.h>
#include <GLES2/gl2ext.h>
diff --git a/webrtc/modules/video_render/external/video_render_external_impl.h b/webrtc/modules/video_render/external/video_render_external_impl.h
index 9230e60acc..a8b663fff7 100644
--- a/webrtc/modules/video_render/external/video_render_external_impl.h
+++ b/webrtc/modules/video_render/external/video_render_external_impl.h
@@ -11,7 +11,7 @@
#ifndef WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_EXTERNAL_VIDEO_RENDER_EXTERNAL_IMPL_H_
#define WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_EXTERNAL_VIDEO_RENDER_EXTERNAL_IMPL_H_
-#include "webrtc/modules/interface/module_common_types.h"
+#include "webrtc/modules/include/module_common_types.h"
#include "webrtc/modules/video_render/i_video_render.h"
#include "webrtc/system_wrappers/include/critical_section_wrapper.h"
diff --git a/webrtc/modules/video_render/i_video_render.h b/webrtc/modules/video_render/i_video_render.h
index ff1cce782e..e6ec7a4680 100644
--- a/webrtc/modules/video_render/i_video_render.h
+++ b/webrtc/modules/video_render/i_video_render.h
@@ -11,7 +11,7 @@
#ifndef WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_I_VIDEO_RENDER_H_
#define WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_I_VIDEO_RENDER_H_
-#include "webrtc/modules/video_render/include/video_render.h"
+#include "webrtc/modules/video_render/video_render.h"
namespace webrtc {
diff --git a/webrtc/modules/video_render/include/video_render.h b/webrtc/modules/video_render/include/video_render.h
deleted file mode 100644
index 51fcce10c3..0000000000
--- a/webrtc/modules/video_render/include/video_render.h
+++ /dev/null
@@ -1,268 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_MODULES_VIDEO_RENDER_MAIN_INTERFACE_VIDEO_RENDER_H_
-#define WEBRTC_MODULES_VIDEO_RENDER_MAIN_INTERFACE_VIDEO_RENDER_H_
-
-/*
- * video_render.h
- *
- * This header file together with module.h and module_common_types.h
- * contains all of the APIs that are needed for using the video render
- * module class.
- *
- */
-
-#include "webrtc/modules/interface/module.h"
-#include "webrtc/modules/video_render/include/video_render_defines.h"
-
-namespace webrtc {
-
-// Class definitions
-class VideoRender: public Module
-{
-public:
- /*
- * Create a video render module object
- *
- * id - unique identifier of this video render module object
- * window - pointer to the window to render to
- * fullscreen - true if this is a fullscreen renderer
- * videoRenderType - type of renderer to create
- */
- static VideoRender
- * CreateVideoRender(
- const int32_t id,
- void* window,
- const bool fullscreen,
- const VideoRenderType videoRenderType =
- kRenderDefault);
-
- /*
- * Destroy a video render module object
- *
- * module - object to destroy
- */
- static void DestroyVideoRender(VideoRender* module);
-
- int64_t TimeUntilNextProcess() override = 0;
- int32_t Process() override = 0;
-
- /**************************************************************************
- *
- * Window functions
- *
- ***************************************************************************/
-
- /*
- * Get window for this renderer
- */
- virtual void* Window() = 0;
-
- /*
- * Change render window
- *
- * window - the new render window, assuming same type as originally created.
- */
- virtual int32_t ChangeWindow(void* window) = 0;
-
- /**************************************************************************
- *
- * Incoming Streams
- *
- ***************************************************************************/
-
- /*
- * Add incoming render stream
- *
- * streamID - id of the stream to add
- * zOrder - relative render order for the streams, 0 = on top
- * left - position of the stream in the window, [0.0f, 1.0f]
- * top - position of the stream in the window, [0.0f, 1.0f]
- * right - position of the stream in the window, [0.0f, 1.0f]
- * bottom - position of the stream in the window, [0.0f, 1.0f]
- *
- * Return - callback class to use for delivering new frames to render.
- */
- virtual VideoRenderCallback
- * AddIncomingRenderStream(const uint32_t streamId,
- const uint32_t zOrder,
- const float left, const float top,
- const float right, const float bottom) = 0;
- /*
- * Delete incoming render stream
- *
- * streamID - id of the stream to add
- */
- virtual int32_t
- DeleteIncomingRenderStream(const uint32_t streamId) = 0;
-
- /*
- * Add incoming render callback, used for external rendering
- *
- * streamID - id of the stream the callback is used for
- * renderObject - the VideoRenderCallback to use for this stream, NULL to remove
- *
- * Return - callback class to use for delivering new frames to render.
- */
- virtual int32_t
- AddExternalRenderCallback(const uint32_t streamId,
- VideoRenderCallback* renderObject) = 0;
-
- /*
- * Get the porperties for an incoming render stream
- *
- * streamID - [in] id of the stream to get properties for
- * zOrder - [out] relative render order for the streams, 0 = on top
- * left - [out] position of the stream in the window, [0.0f, 1.0f]
- * top - [out] position of the stream in the window, [0.0f, 1.0f]
- * right - [out] position of the stream in the window, [0.0f, 1.0f]
- * bottom - [out] position of the stream in the window, [0.0f, 1.0f]
- */
- virtual int32_t
- GetIncomingRenderStreamProperties(const uint32_t streamId,
- uint32_t& zOrder,
- float& left, float& top,
- float& right, float& bottom) const = 0;
- /*
- * The incoming frame rate to the module, not the rate rendered in the window.
- */
- virtual uint32_t
- GetIncomingFrameRate(const uint32_t streamId) = 0;
-
- /*
- * Returns the number of incoming streams added to this render module
- */
- virtual uint32_t GetNumIncomingRenderStreams() const = 0;
-
- /*
- * Returns true if this render module has the streamId added, false otherwise.
- */
- virtual bool
- HasIncomingRenderStream(const uint32_t streamId) const = 0;
-
- /*
- * Registers a callback to get raw images in the same time as sent
- * to the renderer. To be used for external rendering.
- */
- virtual int32_t
- RegisterRawFrameCallback(const uint32_t streamId,
- VideoRenderCallback* callbackObj) = 0;
-
- /**************************************************************************
- *
- * Start/Stop
- *
- ***************************************************************************/
-
- /*
- * Starts rendering the specified stream
- */
- virtual int32_t StartRender(const uint32_t streamId) = 0;
-
- /*
- * Stops the renderer
- */
- virtual int32_t StopRender(const uint32_t streamId) = 0;
-
- /*
- * Resets the renderer
- * No streams are removed. The state should be as after AddStream was called.
- */
- virtual int32_t ResetRender() = 0;
-
- /**************************************************************************
- *
- * Properties
- *
- ***************************************************************************/
-
- /*
- * Returns the preferred render video type
- */
- virtual RawVideoType PreferredVideoType() const = 0;
-
- /*
- * Returns true if the renderer is in fullscreen mode, otherwise false.
- */
- virtual bool IsFullScreen() = 0;
-
- /*
- * Gets screen resolution in pixels
- */
- virtual int32_t
- GetScreenResolution(uint32_t& screenWidth,
- uint32_t& screenHeight) const = 0;
-
- /*
- * Get the actual render rate for this stream. I.e rendered frame rate,
- * not frames delivered to the renderer.
- */
- virtual uint32_t RenderFrameRate(const uint32_t streamId) = 0;
-
- /*
- * Set cropping of incoming stream
- */
- virtual int32_t SetStreamCropping(const uint32_t streamId,
- const float left,
- const float top,
- const float right,
- const float bottom) = 0;
-
- /*
- * re-configure renderer
- */
-
- // Set the expected time needed by the graphics card or external renderer,
- // i.e. frames will be released for rendering |delay_ms| before set render
- // time in the video frame.
- virtual int32_t SetExpectedRenderDelay(uint32_t stream_id,
- int32_t delay_ms) = 0;
-
- virtual int32_t ConfigureRenderer(const uint32_t streamId,
- const unsigned int zOrder,
- const float left,
- const float top,
- const float right,
- const float bottom) = 0;
-
- virtual int32_t SetTransparentBackground(const bool enable) = 0;
-
- virtual int32_t FullScreenRender(void* window, const bool enable) = 0;
-
- virtual int32_t SetBitmap(const void* bitMap,
- const uint8_t pictureId,
- const void* colorKey,
- const float left, const float top,
- const float right, const float bottom) = 0;
-
- virtual int32_t SetText(const uint8_t textId,
- const uint8_t* text,
- const int32_t textLength,
- const uint32_t textColorRef,
- const uint32_t backgroundColorRef,
- const float left, const float top,
- const float right, const float bottom) = 0;
-
- /*
- * Set a start image. The image is rendered before the first image has been delivered
- */
- virtual int32_t SetStartImage(const uint32_t streamId,
- const VideoFrame& videoFrame) = 0;
-
- /*
- * Set a timout image. The image is rendered if no videoframe has been delivered
- */
- virtual int32_t SetTimeoutImage(const uint32_t streamId,
- const VideoFrame& videoFrame,
- const uint32_t timeout) = 0;
-};
-} // namespace webrtc
-#endif // WEBRTC_MODULES_VIDEO_RENDER_MAIN_INTERFACE_VIDEO_RENDER_H_
diff --git a/webrtc/modules/video_render/include/video_render_defines.h b/webrtc/modules/video_render/include/video_render_defines.h
deleted file mode 100644
index f8f48035ea..0000000000
--- a/webrtc/modules/video_render/include/video_render_defines.h
+++ /dev/null
@@ -1,70 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_MODULES_VIDEO_RENDER_MAIN_INTERFACE_VIDEO_RENDER_DEFINES_H_
-#define WEBRTC_MODULES_VIDEO_RENDER_MAIN_INTERFACE_VIDEO_RENDER_DEFINES_H_
-
-#include "webrtc/common_types.h"
-#include "webrtc/common_video/interface/incoming_video_stream.h"
-#include "webrtc/modules/interface/module_common_types.h"
-
-namespace webrtc
-{
-// Defines
-#ifndef NULL
-#define NULL 0
-#endif
-
-// Enums
-enum VideoRenderType
-{
- kRenderExternal = 0, // External
- kRenderWindows = 1, // Windows
- kRenderCocoa = 2, // Mac
- kRenderCarbon = 3,
- kRenderiOS = 4, // iPhone
- kRenderAndroid = 5, // Android
- kRenderX11 = 6, // Linux
- kRenderDefault
-};
-
-// Runtime errors
-enum VideoRenderError
-{
- kRenderShutDown = 0,
- kRenderPerformanceAlarm = 1
-};
-
-// Feedback class to be implemented by module user
-class VideoRenderFeedback
-{
-public:
- virtual void OnRenderError(const int32_t streamId,
- const VideoRenderError error) = 0;
-
-protected:
- virtual ~VideoRenderFeedback()
- {
- }
-};
-
-// Mobile enums
-enum StretchMode
-{
- kStretchToInsideEdge = 1,
- kStretchToOutsideEdge = 2,
- kStretchMatchWidth = 3,
- kStretchMatchHeight = 4,
- kStretchNone = 5
-};
-
-} // namespace webrtc
-
-#endif // WEBRTC_MODULES_VIDEO_RENDER_MAIN_INTERFACE_VIDEO_RENDER_DEFINES_H_
diff --git a/webrtc/modules/video_render/ios/open_gles20.h b/webrtc/modules/video_render/ios/open_gles20.h
index f74955fee4..880ddb5231 100644
--- a/webrtc/modules/video_render/ios/open_gles20.h
+++ b/webrtc/modules/video_render/ios/open_gles20.h
@@ -13,7 +13,7 @@
#include <OpenGLES/ES2/glext.h>
-#include "webrtc/modules/video_render/include/video_render_defines.h"
+#include "webrtc/modules/video_render/video_render_defines.h"
/*
* This OpenGles20 is the class of renderer for VideoFrame into a GLES 2.0
diff --git a/webrtc/modules/video_render/ios/video_render_ios_channel.h b/webrtc/modules/video_render/ios/video_render_ios_channel.h
index 375a5ee719..a15ba393dc 100644
--- a/webrtc/modules/video_render/ios/video_render_ios_channel.h
+++ b/webrtc/modules/video_render/ios/video_render_ios_channel.h
@@ -11,7 +11,7 @@
#ifndef WEBRTC_MODULES_VIDEO_RENDER_IOS_VIDEO_RENDER_IOS_CHANNEL_H_
#define WEBRTC_MODULES_VIDEO_RENDER_IOS_VIDEO_RENDER_IOS_CHANNEL_H_
-#include "webrtc/modules/video_render/include/video_render_defines.h"
+#include "webrtc/modules/video_render/video_render_defines.h"
#include "webrtc/modules/video_render/ios/video_render_ios_view.h"
namespace webrtc {
diff --git a/webrtc/modules/video_render/ios/video_render_ios_gles20.h b/webrtc/modules/video_render/ios/video_render_ios_gles20.h
index b6da12aab8..d703630d92 100644
--- a/webrtc/modules/video_render/ios/video_render_ios_gles20.h
+++ b/webrtc/modules/video_render/ios/video_render_ios_gles20.h
@@ -14,10 +14,10 @@
#include <list>
#include <map>
+#include "webrtc/base/platform_thread.h"
#include "webrtc/base/scoped_ptr.h"
#include "webrtc/modules/video_render/ios/video_render_ios_channel.h"
#include "webrtc/modules/video_render/ios/video_render_ios_view.h"
-#include "webrtc/system_wrappers/include/thread_wrapper.h"
namespace webrtc {
@@ -64,7 +64,8 @@ class VideoRenderIosGles20 {
private:
rtc::scoped_ptr<CriticalSectionWrapper> gles_crit_sec_;
EventTimerWrapper* screen_update_event_;
- rtc::scoped_ptr<ThreadWrapper> screen_update_thread_;
+ // TODO(pbos): Remove scoped_ptr and use member directly.
+ rtc::scoped_ptr<rtc::PlatformThread> screen_update_thread_;
VideoRenderIosView* view_;
Rect window_rect_;
diff --git a/webrtc/modules/video_render/ios/video_render_ios_gles20.mm b/webrtc/modules/video_render/ios/video_render_ios_gles20.mm
index 3a276d6030..6ad5db8b8c 100644
--- a/webrtc/modules/video_render/ios/video_render_ios_gles20.mm
+++ b/webrtc/modules/video_render/ios/video_render_ios_gles20.mm
@@ -32,15 +32,15 @@ VideoRenderIosGles20::VideoRenderIosGles20(VideoRenderIosView* view,
z_order_to_channel_(),
gles_context_([view context]),
is_rendering_(true) {
- screen_update_thread_ = ThreadWrapper::CreateThread(
- ScreenUpdateThreadProc, this, "ScreenUpdateGles20");
+ screen_update_thread_.reset(new rtc::PlatformThread(
+ ScreenUpdateThreadProc, this, "ScreenUpdateGles20"));
screen_update_event_ = EventTimerWrapper::Create();
GetWindowRect(window_rect_);
}
VideoRenderIosGles20::~VideoRenderIosGles20() {
// Signal event to exit thread, then delete it
- ThreadWrapper* thread_wrapper = screen_update_thread_.release();
+ rtc::PlatformThread* thread_wrapper = screen_update_thread_.release();
if (thread_wrapper) {
screen_update_event_->Set();
@@ -83,7 +83,7 @@ int VideoRenderIosGles20::Init() {
}
screen_update_thread_->Start();
- screen_update_thread_->SetPriority(kRealtimePriority);
+ screen_update_thread_->SetPriority(rtc::kRealtimePriority);
// Start the event triggering the render process
unsigned int monitor_freq = 60;
diff --git a/webrtc/modules/video_render/linux/video_x11_channel.h b/webrtc/modules/video_render/linux/video_x11_channel.h
index 8cbd2a27d1..6eb402e12e 100644
--- a/webrtc/modules/video_render/linux/video_x11_channel.h
+++ b/webrtc/modules/video_render/linux/video_x11_channel.h
@@ -13,7 +13,7 @@
#include <sys/shm.h>
#include "webrtc/common_video/libyuv/include/webrtc_libyuv.h"
-#include "webrtc/modules/video_render/include/video_render_defines.h"
+#include "webrtc/modules/video_render/video_render_defines.h"
#include <X11/Xlib.h>
#include <X11/Xutil.h>
diff --git a/webrtc/modules/video_render/linux/video_x11_render.h b/webrtc/modules/video_render/linux/video_x11_render.h
index 265ef7cfab..23b83bd67b 100644
--- a/webrtc/modules/video_render/linux/video_x11_render.h
+++ b/webrtc/modules/video_render/linux/video_x11_render.h
@@ -11,7 +11,7 @@
#ifndef WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_LINUX_VIDEO_X11_RENDER_H_
#define WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_LINUX_VIDEO_X11_RENDER_H_
-#include "webrtc/modules/video_render/include/video_render_defines.h"
+#include "webrtc/modules/video_render/video_render_defines.h"
#include <X11/Xlib.h>
#include <map>
diff --git a/webrtc/modules/video_render/mac/video_render_agl.cc b/webrtc/modules/video_render/mac/video_render_agl.cc
index dc157d597b..3243563b2b 100644
--- a/webrtc/modules/video_render/mac/video_render_agl.cc
+++ b/webrtc/modules/video_render/mac/video_render_agl.cc
@@ -395,8 +395,8 @@ _renderingIsPaused( false),
{
//WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, "%s");
- _screenUpdateThread = ThreadWrapper::CreateThread(
- ScreenUpdateThreadProc, this, "ScreenUpdate");
+ _screenUpdateThread.reset(
+ new rtc::PlatformThread(ScreenUpdateThreadProc, this, "ScreenUpdate"));
_screenUpdateEvent = EventWrapper::Create();
if(!IsValidWindowPtr(_windowRef))
@@ -512,8 +512,8 @@ _renderingIsPaused( false),
//WEBRTC_TRACE(kTraceDebug, "%s:%d Constructor", __FUNCTION__, __LINE__);
// _renderCritSec = CriticalSectionWrapper::CreateCriticalSection();
- _screenUpdateThread = ThreadWrapper::CreateThread(
- ScreenUpdateThreadProc, this, "ScreenUpdateThread");
+ _screenUpdateThread.reset(new rtc::PlatformThread(
+ ScreenUpdateThreadProc, this, "ScreenUpdateThread"));
_screenUpdateEvent = EventWrapper::Create();
GetWindowRect(_windowRect);
@@ -677,7 +677,7 @@ VideoRenderAGL::~VideoRenderAGL()
#endif
// Signal event to exit thread, then delete it
- ThreadWrapper* tmpPtr = _screenUpdateThread.release();
+ rtc::PlatformThread* tmpPtr = _screenUpdateThread.release();
if (tmpPtr)
{
@@ -739,7 +739,7 @@ int VideoRenderAGL::Init()
return -1;
}
_screenUpdateThread->Start();
- _screenUpdateThread->SetPriority(kRealtimePriority);
+ _screenUpdateThread->SetPriority(rtc::kRealtimePriority);
// Start the event triggering the render process
unsigned int monitorFreq = 60;
@@ -856,7 +856,7 @@ int VideoRenderAGL::DeleteAGLChannel(int channel)
int VideoRenderAGL::StopThread()
{
CriticalSectionScoped cs(&_renderCritSec);
- ThreadWrapper* tmpPtr = _screenUpdateThread.release();
+ rtc::PlatformThread* tmpPtr = _screenUpdateThread.release();
if (tmpPtr)
{
@@ -1880,7 +1880,7 @@ int32_t VideoRenderAGL::StartRender()
UnlockAGLCntx();
return -1;
}
- _screenUpdateThread->SetPriority(kRealtimePriority);
+ _screenUpdateThread->SetPriority(rtc::kRealtimePriority);
if(FALSE == _screenUpdateEvent->StartTimer(true, 1000/MONITOR_FREQ))
{
//WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, "%s:%d Failed to start screenUpdateEvent", __FUNCTION__, __LINE__);
@@ -1891,8 +1891,8 @@ int32_t VideoRenderAGL::StartRender()
return 0;
}
- _screenUpdateThread = ThreadWrapper::CreateThread(ScreenUpdateThreadProc,
- this, "ScreenUpdate");
+ _screenUpdateThread.reset(
+ new rtc::PlatformThread(ScreenUpdateThreadProc, this, "ScreenUpdate"));
_screenUpdateEvent = EventWrapper::Create();
if (!_screenUpdateThread)
@@ -1903,14 +1903,13 @@ int32_t VideoRenderAGL::StartRender()
}
_screenUpdateThread->Start();
- _screenUpdateThread->SetPriority(kRealtimePriority);
+ _screenUpdateThread->SetPriority(rtc::kRealtimePriority);
_screenUpdateEvent->StartTimer(true, 1000/MONITOR_FREQ);
//WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, "%s:%d Started screenUpdateThread", __FUNCTION__, __LINE__);
UnlockAGLCntx();
return 0;
-
}
int32_t VideoRenderAGL::StopRender()
diff --git a/webrtc/modules/video_render/mac/video_render_agl.h b/webrtc/modules/video_render/mac/video_render_agl.h
index 8710228754..e1da8faf83 100644
--- a/webrtc/modules/video_render/mac/video_render_agl.h
+++ b/webrtc/modules/video_render/mac/video_render_agl.h
@@ -15,8 +15,8 @@
#ifndef WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_MAC_VIDEO_RENDER_AGL_H_
#define WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_MAC_VIDEO_RENDER_AGL_H_
-#include "webrtc/modules/video_render/include/video_render_defines.h"
-#include "webrtc/system_wrappers/include/thread_wrapper.h"
+#include "webrtc/base/platform_thread.h"
+#include "webrtc/modules/video_render/video_render_defines.h"
#define NEW_HIVIEW_PARENT_EVENT_HANDLER 1
#define NEW_HIVIEW_EVENT_HANDLER 1
@@ -142,7 +142,8 @@ class VideoRenderAGL {
bool _fullScreen;
int _id;
webrtc::CriticalSectionWrapper& _renderCritSec;
- rtc::scoped_ptr<webrtc::ThreadWrapper> _screenUpdateThread;
+ // TODO(pbos): Remove scoped_ptr and use PlatformThread directly.
+ rtc::scoped_ptr<rtc::PlatformThread> _screenUpdateThread;
webrtc::EventWrapper* _screenUpdateEvent;
bool _isHIViewRef;
AGLContext _aglContext;
diff --git a/webrtc/modules/video_render/mac/video_render_nsopengl.h b/webrtc/modules/video_render/mac/video_render_nsopengl.h
index 5dab4d266f..a888b68a97 100644
--- a/webrtc/modules/video_render/mac/video_render_nsopengl.h
+++ b/webrtc/modules/video_render/mac/video_render_nsopengl.h
@@ -23,16 +23,19 @@
#include <map>
#include "webrtc/base/thread_annotations.h"
-#include "webrtc/modules/video_render/include/video_render_defines.h"
+#include "webrtc/modules/video_render/video_render_defines.h"
#import "webrtc/modules/video_render/mac/cocoa_full_screen_window.h"
#import "webrtc/modules/video_render/mac/cocoa_render_view.h"
class Trace;
+namespace rtc {
+class PlatformThread;
+} // namespace rtc
+
namespace webrtc {
class EventTimerWrapper;
-class ThreadWrapper;
class VideoRenderNSOpenGL;
class CriticalSectionWrapper;
@@ -166,7 +169,8 @@ private: // variables
bool _fullScreen;
int _id;
CriticalSectionWrapper& _nsglContextCritSec;
- rtc::scoped_ptr<ThreadWrapper> _screenUpdateThread;
+ // TODO(pbos): Remove scoped_ptr and use PlatformThread directly.
+ rtc::scoped_ptr<rtc::PlatformThread> _screenUpdateThread;
EventTimerWrapper* _screenUpdateEvent;
NSOpenGLContext* _nsglContext;
NSOpenGLContext* _nsglFullScreenContext;
diff --git a/webrtc/modules/video_render/mac/video_render_nsopengl.mm b/webrtc/modules/video_render/mac/video_render_nsopengl.mm
index b5150eb668..b7683a96af 100644
--- a/webrtc/modules/video_render/mac/video_render_nsopengl.mm
+++ b/webrtc/modules/video_render/mac/video_render_nsopengl.mm
@@ -11,11 +11,11 @@
#include "webrtc/engine_configurations.h"
#if defined(COCOA_RENDERING)
+#include "webrtc/base/platform_thread.h"
#include "webrtc/common_video/libyuv/include/webrtc_libyuv.h"
#include "webrtc/modules/video_render/mac/video_render_nsopengl.h"
#include "webrtc/system_wrappers/include/critical_section_wrapper.h"
#include "webrtc/system_wrappers/include/event_wrapper.h"
-#include "webrtc/system_wrappers/include/thread_wrapper.h"
#include "webrtc/system_wrappers/include/trace.h"
namespace webrtc {
@@ -378,8 +378,8 @@ _renderingIsPaused (FALSE),
_windowRefSuperView(NULL),
_windowRefSuperViewFrame(NSMakeRect(0,0,0,0))
{
- _screenUpdateThread = ThreadWrapper::CreateThread(ScreenUpdateThreadProc,
- this, "ScreenUpdateNSOpenGL");
+ _screenUpdateThread.reset(new rtc::PlatformThread(
+ ScreenUpdateThreadProc, this, "ScreenUpdateNSOpenGL"));
}
int VideoRenderNSOpenGL::ChangeWindow(CocoaRenderView* newWindowRef)
@@ -427,15 +427,15 @@ int32_t VideoRenderNSOpenGL::StartRender()
WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, _id, "Restarting screenUpdateThread");
// we already have the thread. Most likely StopRender() was called and they were paused
- if(FALSE == _screenUpdateThread->Start() ||
- FALSE == _screenUpdateEvent->StartTimer(true, 1000/MONITOR_FREQ))
- {
+ _screenUpdateThread->Start();
+ if (FALSE ==
+ _screenUpdateEvent->StartTimer(true, 1000 / MONITOR_FREQ)) {
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, "Failed to restart screenUpdateThread or screenUpdateEvent");
UnlockAGLCntx();
return -1;
}
- _screenUpdateThread->SetPriority(kRealtimePriority);
+ _screenUpdateThread->SetPriority(rtc::kRealtimePriority);
UnlockAGLCntx();
return 0;
@@ -471,8 +471,8 @@ int32_t VideoRenderNSOpenGL::StopRender()
return 0;
}
- if(FALSE == _screenUpdateThread->Stop() || FALSE == _screenUpdateEvent->StopTimer())
- {
+ _screenUpdateThread->Stop();
+ if (FALSE == _screenUpdateEvent->StopTimer()) {
_renderingIsPaused = FALSE;
UnlockAGLCntx();
@@ -657,17 +657,15 @@ VideoRenderNSOpenGL::~VideoRenderNSOpenGL()
}
// Signal event to exit thread, then delete it
- ThreadWrapper* tmpPtr = _screenUpdateThread.release();
+ rtc::PlatformThread* tmpPtr = _screenUpdateThread.release();
if (tmpPtr)
{
_screenUpdateEvent->Set();
_screenUpdateEvent->StopTimer();
- if (tmpPtr->Stop())
- {
- delete tmpPtr;
- }
+ tmpPtr->Stop();
+ delete tmpPtr;
delete _screenUpdateEvent;
_screenUpdateEvent = NULL;
}
@@ -716,7 +714,7 @@ int VideoRenderNSOpenGL::Init()
}
_screenUpdateThread->Start();
- _screenUpdateThread->SetPriority(kRealtimePriority);
+ _screenUpdateThread->SetPriority(rtc::kRealtimePriority);
// Start the event triggering the render process
unsigned int monitorFreq = 60;
@@ -864,17 +862,15 @@ int32_t VideoRenderNSOpenGL::GetChannelProperties(const uint16_t streamId,
int VideoRenderNSOpenGL::StopThread()
{
- ThreadWrapper* tmpPtr = _screenUpdateThread.release();
+ rtc::PlatformThread* tmpPtr = _screenUpdateThread.release();
WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id,
"%s Stopping thread ", __FUNCTION__, tmpPtr);
if (tmpPtr)
{
_screenUpdateEvent->Set();
- if (tmpPtr->Stop())
- {
- delete tmpPtr;
- }
+ tmpPtr->Stop();
+ delete tmpPtr;
}
delete _screenUpdateEvent;
diff --git a/webrtc/modules/video_render/test/testAPI/testAPI.cc b/webrtc/modules/video_render/test/testAPI/testAPI.cc
index 256d031c0c..cea2f6b56f 100644
--- a/webrtc/modules/video_render/test/testAPI/testAPI.cc
+++ b/webrtc/modules/video_render/test/testAPI/testAPI.cc
@@ -32,10 +32,10 @@
#endif
#include "webrtc/common_types.h"
-#include "webrtc/modules/interface/module_common_types.h"
-#include "webrtc/modules/utility/interface/process_thread.h"
-#include "webrtc/modules/video_render/include/video_render.h"
-#include "webrtc/modules/video_render/include/video_render_defines.h"
+#include "webrtc/modules/include/module_common_types.h"
+#include "webrtc/modules/utility/include/process_thread.h"
+#include "webrtc/modules/video_render/video_render.h"
+#include "webrtc/modules/video_render/video_render_defines.h"
#include "webrtc/system_wrappers/include/sleep.h"
#include "webrtc/system_wrappers/include/tick_util.h"
#include "webrtc/system_wrappers/include/trace.h"
@@ -244,7 +244,7 @@ int WebRtcCreateWindow(Window *outWindow, Display **outDisplay, int winNum, int
return 0;
}
-#endif // LINUX
+#endif // WEBRTC_LINUX
// Note: Mac code is in testApi_mac.mm.
diff --git a/webrtc/modules/video_render/test/testAPI/testAPI.h b/webrtc/modules/video_render/test/testAPI/testAPI.h
index 8b14e84931..0655a5b434 100644
--- a/webrtc/modules/video_render/test/testAPI/testAPI.h
+++ b/webrtc/modules/video_render/test/testAPI/testAPI.h
@@ -11,7 +11,7 @@
#ifndef WEBRTC_MODULES_VIDEO_RENDER_MAIN_TEST_TESTAPI_TESTAPI_H
#define WEBRTC_MODULES_VIDEO_RENDER_MAIN_TEST_TESTAPI_TESTAPI_H
-#include "webrtc/modules/video_render/include/video_render_defines.h"
+#include "webrtc/modules/video_render/video_render_defines.h"
void RunVideoRenderTests(void* window, webrtc::VideoRenderType windowType);
diff --git a/webrtc/modules/video_render/test/testAPI/testAPI_mac.mm b/webrtc/modules/video_render/test/testAPI/testAPI_mac.mm
index dd57397c73..dfee4c7298 100644
--- a/webrtc/modules/video_render/test/testAPI/testAPI_mac.mm
+++ b/webrtc/modules/video_render/test/testAPI/testAPI_mac.mm
@@ -20,10 +20,10 @@
#import "webrtc/modules/video_render/mac/cocoa_render_view.h"
#include "webrtc/common_types.h"
-#include "webrtc/modules/interface/module_common_types.h"
-#include "webrtc/modules/utility/interface/process_thread.h"
-#include "webrtc/modules/video_render/include/video_render.h"
-#include "webrtc/modules/video_render/include/video_render_defines.h"
+#include "webrtc/modules/include/module_common_types.h"
+#include "webrtc/modules/utility/include/process_thread.h"
+#include "webrtc/modules/video_render/video_render.h"
+#include "webrtc/modules/video_render/video_render_defines.h"
#include "webrtc/system_wrappers/include/tick_util.h"
#include "webrtc/system_wrappers/include/trace.h"
diff --git a/webrtc/modules/video_render/video_render.gypi b/webrtc/modules/video_render/video_render.gypi
index 63f69b0a63..e8cc03a4b0 100644
--- a/webrtc/modules/video_render/video_render.gypi
+++ b/webrtc/modules/video_render/video_render.gypi
@@ -25,8 +25,8 @@
'external/video_render_external_impl.cc',
'external/video_render_external_impl.h',
'i_video_render.h',
- 'include/video_render.h',
- 'include/video_render_defines.h',
+ 'video_render.h',
+ 'video_render_defines.h',
'video_render_impl.h',
],
},
@@ -149,13 +149,28 @@
'<(directx_sdk_path)/Include',
],
}],
+ ['OS=="win" and clang==1', {
+ 'msvs_settings': {
+ 'VCCLCompilerTool': {
+ 'AdditionalOptions': [
+ # Disable warnings failing when compiling with Clang on Windows.
+ # https://bugs.chromium.org/p/webrtc/issues/detail?id=5366
+ '-Wno-comment',
+ '-Wno-reorder',
+ '-Wno-unused-value',
+ '-Wno-unused-private-field',
+ ],
+ },
+ },
+ }],
] # conditions
},
],
}], # build_with_chromium==0
- ['include_tests==1', {
+ ['include_tests==1 and OS!="ios"', {
'targets': [
{
+ # Does not compile on iOS: webrtc:4755.
'target_name': 'video_render_tests',
'type': 'executable',
'dependencies': [
@@ -197,7 +212,7 @@
] # conditions
}, # video_render_module_test
], # targets
- }], # include_tests==1
+ }], # include_tests==1 and OS!=ios
], # conditions
}
diff --git a/webrtc/modules/video_render/video_render.h b/webrtc/modules/video_render/video_render.h
new file mode 100644
index 0000000000..a193a187e7
--- /dev/null
+++ b/webrtc/modules/video_render/video_render.h
@@ -0,0 +1,268 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_RENDER_VIDEO_RENDER_H_
+#define WEBRTC_MODULES_VIDEO_RENDER_VIDEO_RENDER_H_
+
+/*
+ * video_render.h
+ *
+ * This header file together with module.h and module_common_types.h
+ * contains all of the APIs that are needed for using the video render
+ * module class.
+ *
+ */
+
+#include "webrtc/modules/include/module.h"
+#include "webrtc/modules/video_render/video_render_defines.h"
+
+namespace webrtc {
+
+// Class definitions
+class VideoRender: public Module
+{
+public:
+ /*
+ * Create a video render module object
+ *
+ * id - unique identifier of this video render module object
+ * window - pointer to the window to render to
+ * fullscreen - true if this is a fullscreen renderer
+ * videoRenderType - type of renderer to create
+ */
+ static VideoRender
+ * CreateVideoRender(
+ const int32_t id,
+ void* window,
+ const bool fullscreen,
+ const VideoRenderType videoRenderType =
+ kRenderDefault);
+
+ /*
+ * Destroy a video render module object
+ *
+ * module - object to destroy
+ */
+ static void DestroyVideoRender(VideoRender* module);
+
+ int64_t TimeUntilNextProcess() override = 0;
+ int32_t Process() override = 0;
+
+ /**************************************************************************
+ *
+ * Window functions
+ *
+ ***************************************************************************/
+
+ /*
+ * Get window for this renderer
+ */
+ virtual void* Window() = 0;
+
+ /*
+ * Change render window
+ *
+ * window - the new render window, assuming same type as originally created.
+ */
+ virtual int32_t ChangeWindow(void* window) = 0;
+
+ /**************************************************************************
+ *
+ * Incoming Streams
+ *
+ ***************************************************************************/
+
+ /*
+ * Add incoming render stream
+ *
+ * streamID - id of the stream to add
+ * zOrder - relative render order for the streams, 0 = on top
+ * left - position of the stream in the window, [0.0f, 1.0f]
+ * top - position of the stream in the window, [0.0f, 1.0f]
+ * right - position of the stream in the window, [0.0f, 1.0f]
+ * bottom - position of the stream in the window, [0.0f, 1.0f]
+ *
+ * Return - callback class to use for delivering new frames to render.
+ */
+ virtual VideoRenderCallback
+ * AddIncomingRenderStream(const uint32_t streamId,
+ const uint32_t zOrder,
+ const float left, const float top,
+ const float right, const float bottom) = 0;
+ /*
+ * Delete incoming render stream
+ *
+ * streamID - id of the stream to add
+ */
+ virtual int32_t
+ DeleteIncomingRenderStream(const uint32_t streamId) = 0;
+
+ /*
+ * Add incoming render callback, used for external rendering
+ *
+ * streamID - id of the stream the callback is used for
+ * renderObject - the VideoRenderCallback to use for this stream, NULL to remove
+ *
+ * Return - callback class to use for delivering new frames to render.
+ */
+ virtual int32_t
+ AddExternalRenderCallback(const uint32_t streamId,
+ VideoRenderCallback* renderObject) = 0;
+
+ /*
+ * Get the porperties for an incoming render stream
+ *
+ * streamID - [in] id of the stream to get properties for
+ * zOrder - [out] relative render order for the streams, 0 = on top
+ * left - [out] position of the stream in the window, [0.0f, 1.0f]
+ * top - [out] position of the stream in the window, [0.0f, 1.0f]
+ * right - [out] position of the stream in the window, [0.0f, 1.0f]
+ * bottom - [out] position of the stream in the window, [0.0f, 1.0f]
+ */
+ virtual int32_t
+ GetIncomingRenderStreamProperties(const uint32_t streamId,
+ uint32_t& zOrder,
+ float& left, float& top,
+ float& right, float& bottom) const = 0;
+ /*
+ * The incoming frame rate to the module, not the rate rendered in the window.
+ */
+ virtual uint32_t
+ GetIncomingFrameRate(const uint32_t streamId) = 0;
+
+ /*
+ * Returns the number of incoming streams added to this render module
+ */
+ virtual uint32_t GetNumIncomingRenderStreams() const = 0;
+
+ /*
+ * Returns true if this render module has the streamId added, false otherwise.
+ */
+ virtual bool
+ HasIncomingRenderStream(const uint32_t streamId) const = 0;
+
+ /*
+ * Registers a callback to get raw images in the same time as sent
+ * to the renderer. To be used for external rendering.
+ */
+ virtual int32_t
+ RegisterRawFrameCallback(const uint32_t streamId,
+ VideoRenderCallback* callbackObj) = 0;
+
+ /**************************************************************************
+ *
+ * Start/Stop
+ *
+ ***************************************************************************/
+
+ /*
+ * Starts rendering the specified stream
+ */
+ virtual int32_t StartRender(const uint32_t streamId) = 0;
+
+ /*
+ * Stops the renderer
+ */
+ virtual int32_t StopRender(const uint32_t streamId) = 0;
+
+ /*
+ * Resets the renderer
+ * No streams are removed. The state should be as after AddStream was called.
+ */
+ virtual int32_t ResetRender() = 0;
+
+ /**************************************************************************
+ *
+ * Properties
+ *
+ ***************************************************************************/
+
+ /*
+ * Returns the preferred render video type
+ */
+ virtual RawVideoType PreferredVideoType() const = 0;
+
+ /*
+ * Returns true if the renderer is in fullscreen mode, otherwise false.
+ */
+ virtual bool IsFullScreen() = 0;
+
+ /*
+ * Gets screen resolution in pixels
+ */
+ virtual int32_t
+ GetScreenResolution(uint32_t& screenWidth,
+ uint32_t& screenHeight) const = 0;
+
+ /*
+ * Get the actual render rate for this stream. I.e rendered frame rate,
+ * not frames delivered to the renderer.
+ */
+ virtual uint32_t RenderFrameRate(const uint32_t streamId) = 0;
+
+ /*
+ * Set cropping of incoming stream
+ */
+ virtual int32_t SetStreamCropping(const uint32_t streamId,
+ const float left,
+ const float top,
+ const float right,
+ const float bottom) = 0;
+
+ /*
+ * re-configure renderer
+ */
+
+ // Set the expected time needed by the graphics card or external renderer,
+ // i.e. frames will be released for rendering |delay_ms| before set render
+ // time in the video frame.
+ virtual int32_t SetExpectedRenderDelay(uint32_t stream_id,
+ int32_t delay_ms) = 0;
+
+ virtual int32_t ConfigureRenderer(const uint32_t streamId,
+ const unsigned int zOrder,
+ const float left,
+ const float top,
+ const float right,
+ const float bottom) = 0;
+
+ virtual int32_t SetTransparentBackground(const bool enable) = 0;
+
+ virtual int32_t FullScreenRender(void* window, const bool enable) = 0;
+
+ virtual int32_t SetBitmap(const void* bitMap,
+ const uint8_t pictureId,
+ const void* colorKey,
+ const float left, const float top,
+ const float right, const float bottom) = 0;
+
+ virtual int32_t SetText(const uint8_t textId,
+ const uint8_t* text,
+ const int32_t textLength,
+ const uint32_t textColorRef,
+ const uint32_t backgroundColorRef,
+ const float left, const float top,
+ const float right, const float bottom) = 0;
+
+ /*
+ * Set a start image. The image is rendered before the first image has been delivered
+ */
+ virtual int32_t SetStartImage(const uint32_t streamId,
+ const VideoFrame& videoFrame) = 0;
+
+ /*
+ * Set a timout image. The image is rendered if no videoframe has been delivered
+ */
+ virtual int32_t SetTimeoutImage(const uint32_t streamId,
+ const VideoFrame& videoFrame,
+ const uint32_t timeout) = 0;
+};
+} // namespace webrtc
+#endif // WEBRTC_MODULES_VIDEO_RENDER_VIDEO_RENDER_H_
diff --git a/webrtc/modules/video_render/video_render_defines.h b/webrtc/modules/video_render/video_render_defines.h
new file mode 100644
index 0000000000..999707cb6e
--- /dev/null
+++ b/webrtc/modules/video_render/video_render_defines.h
@@ -0,0 +1,70 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_RENDER_VIDEO_RENDER_DEFINES_H_
+#define WEBRTC_MODULES_VIDEO_RENDER_VIDEO_RENDER_DEFINES_H_
+
+#include "webrtc/common_types.h"
+#include "webrtc/common_video/include/incoming_video_stream.h"
+#include "webrtc/modules/include/module_common_types.h"
+
+namespace webrtc
+{
+// Defines
+#ifndef NULL
+#define NULL 0
+#endif
+
+// Enums
+enum VideoRenderType
+{
+ kRenderExternal = 0, // External
+ kRenderWindows = 1, // Windows
+ kRenderCocoa = 2, // Mac
+ kRenderCarbon = 3,
+ kRenderiOS = 4, // iPhone
+ kRenderAndroid = 5, // Android
+ kRenderX11 = 6, // Linux
+ kRenderDefault
+};
+
+// Runtime errors
+enum VideoRenderError
+{
+ kRenderShutDown = 0,
+ kRenderPerformanceAlarm = 1
+};
+
+// Feedback class to be implemented by module user
+class VideoRenderFeedback
+{
+public:
+ virtual void OnRenderError(const int32_t streamId,
+ const VideoRenderError error) = 0;
+
+protected:
+ virtual ~VideoRenderFeedback()
+ {
+ }
+};
+
+// Mobile enums
+enum StretchMode
+{
+ kStretchToInsideEdge = 1,
+ kStretchToOutsideEdge = 2,
+ kStretchMatchWidth = 3,
+ kStretchMatchHeight = 4,
+ kStretchNone = 5
+};
+
+} // namespace webrtc
+
+#endif // WEBRTC_MODULES_VIDEO_RENDER_VIDEO_RENDER_DEFINES_H_
diff --git a/webrtc/modules/video_render/video_render_impl.cc b/webrtc/modules/video_render/video_render_impl.cc
index 32ec2b0e75..d2a074b4c4 100644
--- a/webrtc/modules/video_render/video_render_impl.cc
+++ b/webrtc/modules/video_render/video_render_impl.cc
@@ -10,11 +10,11 @@
#include <assert.h>
-#include "webrtc/common_video/interface/incoming_video_stream.h"
+#include "webrtc/common_video/include/incoming_video_stream.h"
#include "webrtc/engine_configurations.h"
#include "webrtc/modules/video_render/external/video_render_external_impl.h"
#include "webrtc/modules/video_render/i_video_render.h"
-#include "webrtc/modules/video_render/include/video_render_defines.h"
+#include "webrtc/modules/video_render/video_render_defines.h"
#include "webrtc/modules/video_render/video_render_impl.h"
#include "webrtc/system_wrappers/include/critical_section_wrapper.h"
#include "webrtc/system_wrappers/include/trace.h"
@@ -197,7 +197,8 @@ ModuleVideoRenderImpl::AddIncomingRenderStream(const uint32_t streamId,
}
// Create platform independant code
- IncomingVideoStream* ptrIncomingStream = new IncomingVideoStream(streamId);
+ IncomingVideoStream* ptrIncomingStream =
+ new IncomingVideoStream(streamId, false);
ptrIncomingStream->SetRenderCallback(ptrRenderCallback);
VideoRenderCallback* moduleCallback = ptrIncomingStream->ModuleCallback();
diff --git a/webrtc/modules/video_render/video_render_impl.h b/webrtc/modules/video_render/video_render_impl.h
index cc00897cc3..ce93cea6b5 100644
--- a/webrtc/modules/video_render/video_render_impl.h
+++ b/webrtc/modules/video_render/video_render_impl.h
@@ -14,7 +14,7 @@
#include <map>
#include "webrtc/engine_configurations.h"
-#include "webrtc/modules/video_render/include/video_render.h"
+#include "webrtc/modules/video_render/video_render.h"
namespace webrtc {
class CriticalSectionWrapper;
diff --git a/webrtc/modules/video_render/video_render_internal_impl.cc b/webrtc/modules/video_render/video_render_internal_impl.cc
index 09ebc7e548..1fed26e9c4 100644
--- a/webrtc/modules/video_render/video_render_internal_impl.cc
+++ b/webrtc/modules/video_render/video_render_internal_impl.cc
@@ -10,10 +10,10 @@
#include <assert.h>
-#include "webrtc/common_video/interface/incoming_video_stream.h"
+#include "webrtc/common_video/include/incoming_video_stream.h"
#include "webrtc/engine_configurations.h"
#include "webrtc/modules/video_render/i_video_render.h"
-#include "webrtc/modules/video_render/include/video_render_defines.h"
+#include "webrtc/modules/video_render/video_render_defines.h"
#include "webrtc/modules/video_render/video_render_impl.h"
#include "webrtc/system_wrappers/include/critical_section_wrapper.h"
#include "webrtc/system_wrappers/include/trace.h"
@@ -420,7 +420,8 @@ ModuleVideoRenderImpl::AddIncomingRenderStream(const uint32_t streamId,
}
// Create platform independant code
- IncomingVideoStream* ptrIncomingStream = new IncomingVideoStream(streamId);
+ IncomingVideoStream* ptrIncomingStream =
+ new IncomingVideoStream(streamId, false);
ptrIncomingStream->SetRenderCallback(ptrRenderCallback);
VideoRenderCallback* moduleCallback = ptrIncomingStream->ModuleCallback();
diff --git a/webrtc/modules/video_render/windows/i_video_render_win.h b/webrtc/modules/video_render/windows/i_video_render_win.h
index 56731e3770..6dbb4fd3cb 100644
--- a/webrtc/modules/video_render/windows/i_video_render_win.h
+++ b/webrtc/modules/video_render/windows/i_video_render_win.h
@@ -11,7 +11,7 @@
#ifndef WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_WINDOWS_I_VIDEO_RENDER_WIN_H_
#define WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_WINDOWS_I_VIDEO_RENDER_WIN_H_
-#include "webrtc/modules/video_render/include/video_render.h"
+#include "webrtc/modules/video_render/video_render.h"
namespace webrtc {
diff --git a/webrtc/modules/video_render/windows/video_render_direct3d9.cc b/webrtc/modules/video_render/windows/video_render_direct3d9.cc
index 24dd0efddb..83835aebb8 100644
--- a/webrtc/modules/video_render/windows/video_render_direct3d9.cc
+++ b/webrtc/modules/video_render/windows/video_render_direct3d9.cc
@@ -294,8 +294,8 @@ VideoRenderDirect3D9::VideoRenderDirect3D9(Trace* trace,
_totalMemory(0),
_availableMemory(0)
{
- _screenUpdateThread = ThreadWrapper::CreateThread(
- ScreenUpdateThreadProc, this, "ScreenUpdateThread");
+ _screenUpdateThread.reset(new rtc::PlatformThread(
+ ScreenUpdateThreadProc, this, "ScreenUpdateThread"));
_screenUpdateEvent = EventTimerWrapper::Create();
SetRect(&_originalHwndRect, 0, 0, 0, 0);
}
@@ -305,7 +305,7 @@ VideoRenderDirect3D9::~VideoRenderDirect3D9()
//NOTE: we should not enter CriticalSection in here!
// Signal event to exit thread, then delete it
- ThreadWrapper* tmpPtr = _screenUpdateThread.release();
+ rtc::PlatformThread* tmpPtr = _screenUpdateThread.release();
if (tmpPtr)
{
_screenUpdateEvent->Set();
@@ -546,7 +546,7 @@ int32_t VideoRenderDirect3D9::Init()
return -1;
}
_screenUpdateThread->Start();
- _screenUpdateThread->SetPriority(kRealtimePriority);
+ _screenUpdateThread->SetPriority(rtc::kRealtimePriority);
// Start the event triggering the render process
unsigned int monitorFreq = 60;
diff --git a/webrtc/modules/video_render/windows/video_render_direct3d9.h b/webrtc/modules/video_render/windows/video_render_direct3d9.h
index c8f6639d9a..5a1f207934 100644
--- a/webrtc/modules/video_render/windows/video_render_direct3d9.h
+++ b/webrtc/modules/video_render/windows/video_render_direct3d9.h
@@ -19,8 +19,8 @@
#include <Map>
// Added
-#include "webrtc/modules/video_render/include/video_render_defines.h"
-#include "webrtc/system_wrappers/include/thread_wrapper.h"
+#include "webrtc/base/platform_thread.h"
+#include "webrtc/modules/video_render/video_render_defines.h"
#pragma comment(lib, "d3d9.lib") // located in DirectX SDK
@@ -203,7 +203,8 @@ private:
CriticalSectionWrapper& _refD3DCritsect;
Trace* _trace;
- rtc::scoped_ptr<ThreadWrapper> _screenUpdateThread;
+ // TODO(pbos): Remove scoped_ptr and use PlatformThread directly.
+ rtc::scoped_ptr<rtc::PlatformThread> _screenUpdateThread;
EventTimerWrapper* _screenUpdateEvent;
HWND _hWnd;
diff --git a/webrtc/p2p/OWNERS b/webrtc/p2p/OWNERS
index 9a527df143..0f00d1aa48 100644
--- a/webrtc/p2p/OWNERS
+++ b/webrtc/p2p/OWNERS
@@ -9,4 +9,9 @@ pthatcher@webrtc.org
sergeyu@chromium.org
tommi@webrtc.org
+# These are for the common case of adding or renaming files. If you're doing
+# structural changes, please get a review from a reviewer in this file.
+per-file *.gyp=*
+per-file *.gypi=*
+
per-file BUILD.gn=kjellander@webrtc.org
diff --git a/webrtc/p2p/base/candidate.h b/webrtc/p2p/base/candidate.h
index 3f0ea43cde..ac7acabf05 100644
--- a/webrtc/p2p/base/candidate.h
+++ b/webrtc/p2p/base/candidate.h
@@ -105,6 +105,7 @@ class Candidate {
std::min(prio_val, static_cast<uint64_t>(UINT_MAX)));
}
+ // TODO(honghaiz): Change to usernameFragment or ufrag.
const std::string & username() const { return username_; }
void set_username(const std::string & username) { username_ = username; }
diff --git a/webrtc/p2p/base/dtlstransport.h b/webrtc/p2p/base/dtlstransport.h
index e9a1ae2ada..9f2903e1d7 100644
--- a/webrtc/p2p/base/dtlstransport.h
+++ b/webrtc/p2p/base/dtlstransport.h
@@ -35,7 +35,7 @@ class DtlsTransport : public Base {
: Base(name, allocator),
certificate_(certificate),
secure_role_(rtc::SSL_CLIENT),
- ssl_max_version_(rtc::SSL_PROTOCOL_DTLS_10) {}
+ ssl_max_version_(rtc::SSL_PROTOCOL_DTLS_12) {}
~DtlsTransport() {
Base::DestroyAllChannels();
diff --git a/webrtc/p2p/base/dtlstransportchannel.cc b/webrtc/p2p/base/dtlstransportchannel.cc
index 0c063e0323..d6b5bce723 100644
--- a/webrtc/p2p/base/dtlstransportchannel.cc
+++ b/webrtc/p2p/base/dtlstransportchannel.cc
@@ -8,6 +8,8 @@
* be found in the AUTHORS file in the root of the source tree.
*/
+#include <utility>
+
#include "webrtc/p2p/base/dtlstransportchannel.h"
#include "webrtc/p2p/base/common.h"
@@ -95,7 +97,7 @@ DtlsTransportChannelWrapper::DtlsTransportChannelWrapper(
channel_(channel),
downward_(NULL),
ssl_role_(rtc::SSL_CLIENT),
- ssl_max_version_(rtc::SSL_PROTOCOL_DTLS_10) {
+ ssl_max_version_(rtc::SSL_PROTOCOL_DTLS_12) {
channel_->SignalWritableState.connect(this,
&DtlsTransportChannelWrapper::OnWritableState);
channel_->SignalReadPacket.connect(this,
@@ -199,6 +201,8 @@ bool DtlsTransportChannelWrapper::SetRemoteFingerprint(
size_t digest_len) {
rtc::Buffer remote_fingerprint_value(digest, digest_len);
+ // Once we have the local certificate, the same remote fingerprint can be set
+ // multiple times.
if (dtls_active_ && remote_fingerprint_value_ == remote_fingerprint_value &&
!digest_alg.empty()) {
// This may happen during renegotiation.
@@ -206,28 +210,36 @@ bool DtlsTransportChannelWrapper::SetRemoteFingerprint(
return true;
}
- // Allow SetRemoteFingerprint with a NULL digest even if SetLocalCertificate
- // hasn't been called.
- if (dtls_ || (!dtls_active_ && !digest_alg.empty())) {
- LOG_J(LS_ERROR, this) << "Can't set DTLS remote settings in this state.";
- return false;
- }
-
+ // If the other side doesn't support DTLS, turn off |dtls_active_|.
if (digest_alg.empty()) {
+ RTC_DCHECK(!digest_len);
LOG_J(LS_INFO, this) << "Other side didn't support DTLS.";
dtls_active_ = false;
return true;
}
+ // Otherwise, we must have a local certificate before setting remote
+ // fingerprint.
+ if (!dtls_active_) {
+ LOG_J(LS_ERROR, this) << "Can't set DTLS remote settings in this state.";
+ return false;
+ }
+
// At this point we know we are doing DTLS
- remote_fingerprint_value_ = remote_fingerprint_value.Pass();
+ remote_fingerprint_value_ = std::move(remote_fingerprint_value);
remote_fingerprint_algorithm_ = digest_alg;
+ bool reconnect = dtls_;
+
if (!SetupDtls()) {
set_dtls_state(DTLS_TRANSPORT_FAILED);
return false;
}
+ if (reconnect) {
+ Reconnect();
+ }
+
return true;
}
@@ -267,7 +279,7 @@ bool DtlsTransportChannelWrapper::SetupDtls() {
// Set up DTLS-SRTP, if it's been enabled.
if (!srtp_ciphers_.empty()) {
- if (!dtls_->SetDtlsSrtpCiphers(srtp_ciphers_)) {
+ if (!dtls_->SetDtlsSrtpCryptoSuites(srtp_ciphers_)) {
LOG_J(LS_ERROR, this) << "Couldn't set DTLS-SRTP ciphers.";
return false;
}
@@ -279,11 +291,10 @@ bool DtlsTransportChannelWrapper::SetupDtls() {
return true;
}
-bool DtlsTransportChannelWrapper::SetSrtpCiphers(
- const std::vector<std::string>& ciphers) {
- if (srtp_ciphers_ == ciphers) {
+bool DtlsTransportChannelWrapper::SetSrtpCryptoSuites(
+ const std::vector<int>& ciphers) {
+ if (srtp_ciphers_ == ciphers)
return true;
- }
if (dtls_state() == DTLS_TRANSPORT_CONNECTING) {
LOG(LS_WARNING) << "Ignoring new SRTP ciphers while DTLS is negotiating";
@@ -294,18 +305,18 @@ bool DtlsTransportChannelWrapper::SetSrtpCiphers(
// We don't support DTLS renegotiation currently. If new set of srtp ciphers
// are different than what's being used currently, we will not use it.
// So for now, let's be happy (or sad) with a warning message.
- std::string current_srtp_cipher;
- if (!dtls_->GetDtlsSrtpCipher(&current_srtp_cipher)) {
+ int current_srtp_cipher;
+ if (!dtls_->GetDtlsSrtpCryptoSuite(&current_srtp_cipher)) {
LOG(LS_ERROR) << "Failed to get the current SRTP cipher for DTLS channel";
return false;
}
- const std::vector<std::string>::const_iterator iter =
+ const std::vector<int>::const_iterator iter =
std::find(ciphers.begin(), ciphers.end(), current_srtp_cipher);
if (iter == ciphers.end()) {
std::string requested_str;
for (size_t i = 0; i < ciphers.size(); ++i) {
requested_str.append(" ");
- requested_str.append(ciphers[i]);
+ requested_str.append(rtc::SrtpCryptoSuiteToName(ciphers[i]));
requested_str.append(" ");
}
LOG(LS_WARNING) << "Ignoring new set of SRTP ciphers, as DTLS "
@@ -324,12 +335,12 @@ bool DtlsTransportChannelWrapper::SetSrtpCiphers(
return true;
}
-bool DtlsTransportChannelWrapper::GetSrtpCryptoSuite(std::string* cipher) {
+bool DtlsTransportChannelWrapper::GetSrtpCryptoSuite(int* cipher) {
if (dtls_state() != DTLS_TRANSPORT_CONNECTED) {
return false;
}
- return dtls_->GetDtlsSrtpCipher(cipher);
+ return dtls_->GetDtlsSrtpCryptoSuite(cipher);
}
@@ -617,4 +628,12 @@ void DtlsTransportChannelWrapper::OnConnectionRemoved(
SignalConnectionRemoved(this);
}
+void DtlsTransportChannelWrapper::Reconnect() {
+ set_dtls_state(DTLS_TRANSPORT_NEW);
+ set_writable(false);
+ if (channel_->writable()) {
+ OnWritableState(channel_);
+ }
+}
+
} // namespace cricket
diff --git a/webrtc/p2p/base/dtlstransportchannel.h b/webrtc/p2p/base/dtlstransportchannel.h
index 41e081b7fe..955b963a36 100644
--- a/webrtc/p2p/base/dtlstransportchannel.h
+++ b/webrtc/p2p/base/dtlstransportchannel.h
@@ -126,10 +126,10 @@ class DtlsTransportChannelWrapper : public TransportChannelImpl {
// Set up the ciphers to use for DTLS-SRTP. If this method is not called
// before DTLS starts, or |ciphers| is empty, SRTP keys won't be negotiated.
// This method should be called before SetupDtls.
- bool SetSrtpCiphers(const std::vector<std::string>& ciphers) override;
+ bool SetSrtpCryptoSuites(const std::vector<int>& ciphers) override;
// Find out which DTLS-SRTP cipher was negotiated
- bool GetSrtpCryptoSuite(std::string* cipher) override;
+ bool GetSrtpCryptoSuite(int* cipher) override;
bool GetSslRole(rtc::SSLRole* role) const override;
bool SetSslRole(rtc::SSLRole role) override;
@@ -216,6 +216,7 @@ class DtlsTransportChannelWrapper : public TransportChannelImpl {
void OnRoleConflict(TransportChannelImpl* channel);
void OnRouteChange(TransportChannel* channel, const Candidate& candidate);
void OnConnectionRemoved(TransportChannelImpl* channel);
+ void Reconnect();
Transport* transport_; // The transport_ that created us.
rtc::Thread* worker_thread_; // Everything should occur on this thread.
@@ -223,7 +224,7 @@ class DtlsTransportChannelWrapper : public TransportChannelImpl {
TransportChannelImpl* const channel_;
rtc::scoped_ptr<rtc::SSLStreamAdapter> dtls_; // The DTLS stream
StreamInterfaceChannel* downward_; // Wrapper for channel_, owned by dtls_.
- std::vector<std::string> srtp_ciphers_; // SRTP ciphers to use with DTLS.
+ std::vector<int> srtp_ciphers_; // SRTP ciphers to use with DTLS.
bool dtls_active_ = false;
rtc::scoped_refptr<rtc::RTCCertificate> local_certificate_;
rtc::SSLRole ssl_role_;
diff --git a/webrtc/p2p/base/dtlstransportchannel_unittest.cc b/webrtc/p2p/base/dtlstransportchannel_unittest.cc
index 07e3b87847..f5d42f3c6e 100644
--- a/webrtc/p2p/base/dtlstransportchannel_unittest.cc
+++ b/webrtc/p2p/base/dtlstransportchannel_unittest.cc
@@ -28,7 +28,6 @@
return; \
}
-static const char AES_CM_128_HMAC_SHA1_80[] = "AES_CM_128_HMAC_SHA1_80";
static const char kIceUfrag1[] = "TESTICEUFRAG0001";
static const char kIcePwd1[] = "TESTICEPWD00000000000001";
static const size_t kPacketNumOffset = 8;
@@ -49,14 +48,14 @@ class DtlsTestClient : public sigslot::has_slots<> {
: name_(name),
packet_size_(0),
use_dtls_srtp_(false),
- ssl_max_version_(rtc::SSL_PROTOCOL_DTLS_10),
+ ssl_max_version_(rtc::SSL_PROTOCOL_DTLS_12),
negotiated_dtls_(false),
received_dtls_client_hello_(false),
received_dtls_server_hello_(false) {}
void CreateCertificate(rtc::KeyType key_type) {
- certificate_ = rtc::RTCCertificate::Create(
- rtc::scoped_ptr<rtc::SSLIdentity>(
- rtc::SSLIdentity::Generate(name_, key_type)).Pass());
+ certificate_ =
+ rtc::RTCCertificate::Create(rtc::scoped_ptr<rtc::SSLIdentity>(
+ rtc::SSLIdentity::Generate(name_, key_type)));
}
const rtc::scoped_refptr<rtc::RTCCertificate>& certificate() {
return certificate_;
@@ -150,9 +149,9 @@ class DtlsTestClient : public sigslot::has_slots<> {
// SRTP ciphers will be set only in the beginning.
for (std::vector<cricket::DtlsTransportChannelWrapper*>::iterator it =
channels_.begin(); it != channels_.end(); ++it) {
- std::vector<std::string> ciphers;
- ciphers.push_back(AES_CM_128_HMAC_SHA1_80);
- ASSERT_TRUE((*it)->SetSrtpCiphers(ciphers));
+ std::vector<int> ciphers;
+ ciphers.push_back(rtc::SRTP_AES128_CM_SHA1_80);
+ ASSERT_TRUE((*it)->SetSrtpCryptoSuites(ciphers));
}
}
@@ -215,16 +214,16 @@ class DtlsTestClient : public sigslot::has_slots<> {
}
}
- void CheckSrtp(const std::string& expected_cipher) {
+ void CheckSrtp(int expected_crypto_suite) {
for (std::vector<cricket::DtlsTransportChannelWrapper*>::iterator it =
channels_.begin(); it != channels_.end(); ++it) {
- std::string cipher;
+ int crypto_suite;
- bool rv = (*it)->GetSrtpCryptoSuite(&cipher);
- if (negotiated_dtls_ && !expected_cipher.empty()) {
+ bool rv = (*it)->GetSrtpCryptoSuite(&crypto_suite);
+ if (negotiated_dtls_ && expected_crypto_suite) {
ASSERT_TRUE(rv);
- ASSERT_EQ(cipher, expected_cipher);
+ ASSERT_EQ(crypto_suite, expected_crypto_suite);
} else {
ASSERT_FALSE(rv);
}
@@ -401,7 +400,7 @@ class DtlsTransportChannelTest : public testing::Test {
channel_ct_(1),
use_dtls_(false),
use_dtls_srtp_(false),
- ssl_expected_version_(rtc::SSL_PROTOCOL_DTLS_10) {}
+ ssl_expected_version_(rtc::SSL_PROTOCOL_DTLS_12) {}
void SetChannelCount(size_t channel_ct) {
channel_ct_ = static_cast<int>(channel_ct);
@@ -469,11 +468,11 @@ class DtlsTransportChannelTest : public testing::Test {
// Check that we negotiated the right ciphers.
if (use_dtls_srtp_) {
- client1_.CheckSrtp(AES_CM_128_HMAC_SHA1_80);
- client2_.CheckSrtp(AES_CM_128_HMAC_SHA1_80);
+ client1_.CheckSrtp(rtc::SRTP_AES128_CM_SHA1_80);
+ client2_.CheckSrtp(rtc::SRTP_AES128_CM_SHA1_80);
} else {
- client1_.CheckSrtp("");
- client2_.CheckSrtp("");
+ client1_.CheckSrtp(rtc::SRTP_INVALID_CRYPTO_SUITE);
+ client2_.CheckSrtp(rtc::SRTP_INVALID_CRYPTO_SUITE);
}
client1_.CheckSsl(rtc::SSLStreamAdapter::GetDefaultSslCipherForTest(
ssl_expected_version_, rtc::KT_DEFAULT));
@@ -601,16 +600,30 @@ TEST_F(DtlsTransportChannelTest, TestTransferSrtpTwoChannels) {
TestTransfer(1, 1000, 100, true);
}
+#if defined(MEMORY_SANITIZER)
+// Fails under MemorySanitizer:
+// See https://code.google.com/p/webrtc/issues/detail?id=5381.
+#define MAYBE_TestTransferDtls DISABLED_TestTransferDtls
+#else
+#define MAYBE_TestTransferDtls TestTransferDtls
+#endif
// Connect with DTLS, and transfer some data.
-TEST_F(DtlsTransportChannelTest, TestTransferDtls) {
+TEST_F(DtlsTransportChannelTest, MAYBE_TestTransferDtls) {
MAYBE_SKIP_TEST(HaveDtls);
PrepareDtls(true, true, rtc::KT_DEFAULT);
ASSERT_TRUE(Connect());
TestTransfer(0, 1000, 100, false);
}
+#if defined(MEMORY_SANITIZER)
+// Fails under MemorySanitizer:
+// See https://code.google.com/p/webrtc/issues/detail?id=5381.
+#define MAYBE_TestTransferDtlsTwoChannels DISABLED_TestTransferDtlsTwoChannels
+#else
+#define MAYBE_TestTransferDtlsTwoChannels TestTransferDtlsTwoChannels
+#endif
// Create two channels with DTLS, and transfer some data.
-TEST_F(DtlsTransportChannelTest, TestTransferDtlsTwoChannels) {
+TEST_F(DtlsTransportChannelTest, MAYBE_TestTransferDtlsTwoChannels) {
MAYBE_SKIP_TEST(HaveDtls);
SetChannelCount(2);
PrepareDtls(true, true, rtc::KT_DEFAULT);
@@ -642,8 +655,15 @@ TEST_F(DtlsTransportChannelTest, TestDtls12None) {
ASSERT_TRUE(Connect());
}
+#if defined(MEMORY_SANITIZER)
+// Fails under MemorySanitizer:
+// See https://code.google.com/p/webrtc/issues/detail?id=5381.
+#define MAYBE_TestDtls12Both DISABLED_TestDtls12Both
+#else
+#define MAYBE_TestDtls12Both TestDtls12Both
+#endif
// Create two channels with DTLS 1.2 and check ciphers.
-TEST_F(DtlsTransportChannelTest, TestDtls12Both) {
+TEST_F(DtlsTransportChannelTest, MAYBE_TestDtls12Both) {
MAYBE_SKIP_TEST(HaveDtls);
SetChannelCount(2);
PrepareDtls(true, true, rtc::KT_DEFAULT);
@@ -669,8 +689,15 @@ TEST_F(DtlsTransportChannelTest, TestDtls12Client2) {
ASSERT_TRUE(Connect());
}
+#if defined(MEMORY_SANITIZER)
+// Fails under MemorySanitizer:
+// See https://code.google.com/p/webrtc/issues/detail?id=5381.
+#define MAYBE_TestTransferDtlsSrtp DISABLED_TestTransferDtlsSrtp
+#else
+#define MAYBE_TestTransferDtlsSrtp TestTransferDtlsSrtp
+#endif
// Connect with DTLS, negotiate DTLS-SRTP, and transfer SRTP using bypass.
-TEST_F(DtlsTransportChannelTest, TestTransferDtlsSrtp) {
+TEST_F(DtlsTransportChannelTest, MAYBE_TestTransferDtlsSrtp) {
MAYBE_SKIP_TEST(HaveDtlsSrtp);
PrepareDtls(true, true, rtc::KT_DEFAULT);
PrepareDtlsSrtp(true, true);
@@ -678,9 +705,18 @@ TEST_F(DtlsTransportChannelTest, TestTransferDtlsSrtp) {
TestTransfer(0, 1000, 100, true);
}
+#if defined(MEMORY_SANITIZER)
+// Fails under MemorySanitizer:
+// See https://code.google.com/p/webrtc/issues/detail?id=5381.
+#define MAYBE_TestTransferDtlsInvalidSrtpPacket \
+ DISABLED_TestTransferDtlsInvalidSrtpPacket
+#else
+#define MAYBE_TestTransferDtlsInvalidSrtpPacket \
+ TestTransferDtlsInvalidSrtpPacket
+#endif
// Connect with DTLS-SRTP, transfer an invalid SRTP packet, and expects -1
// returned.
-TEST_F(DtlsTransportChannelTest, TestTransferDtlsInvalidSrtpPacket) {
+TEST_F(DtlsTransportChannelTest, MAYBE_TestTransferDtlsInvalidSrtpPacket) {
MAYBE_SKIP_TEST(HaveDtls);
PrepareDtls(true, true, rtc::KT_DEFAULT);
PrepareDtlsSrtp(true, true);
@@ -689,24 +725,47 @@ TEST_F(DtlsTransportChannelTest, TestTransferDtlsInvalidSrtpPacket) {
ASSERT_EQ(-1, result);
}
+#if defined(MEMORY_SANITIZER)
+// Fails under MemorySanitizer:
+// See https://code.google.com/p/webrtc/issues/detail?id=5381.
+#define MAYBE_TestTransferDtlsSrtpRejected DISABLED_TestTransferDtlsSrtpRejected
+#else
+#define MAYBE_TestTransferDtlsSrtpRejected TestTransferDtlsSrtpRejected
+#endif
// Connect with DTLS. A does DTLS-SRTP but B does not.
-TEST_F(DtlsTransportChannelTest, TestTransferDtlsSrtpRejected) {
+TEST_F(DtlsTransportChannelTest, MAYBE_TestTransferDtlsSrtpRejected) {
MAYBE_SKIP_TEST(HaveDtlsSrtp);
PrepareDtls(true, true, rtc::KT_DEFAULT);
PrepareDtlsSrtp(true, false);
ASSERT_TRUE(Connect());
}
+#if defined(MEMORY_SANITIZER)
+// Fails under MemorySanitizer:
+// See https://code.google.com/p/webrtc/issues/detail?id=5381.
+#define MAYBE_TestTransferDtlsSrtpNotOffered \
+ DISABLED_TestTransferDtlsSrtpNotOffered
+#else
+#define MAYBE_TestTransferDtlsSrtpNotOffered TestTransferDtlsSrtpNotOffered
+#endif
// Connect with DTLS. B does DTLS-SRTP but A does not.
-TEST_F(DtlsTransportChannelTest, TestTransferDtlsSrtpNotOffered) {
+TEST_F(DtlsTransportChannelTest, MAYBE_TestTransferDtlsSrtpNotOffered) {
MAYBE_SKIP_TEST(HaveDtlsSrtp);
PrepareDtls(true, true, rtc::KT_DEFAULT);
PrepareDtlsSrtp(false, true);
ASSERT_TRUE(Connect());
}
+#if defined(MEMORY_SANITIZER)
+// Fails under MemorySanitizer:
+// See https://code.google.com/p/webrtc/issues/detail?id=5381.
+#define MAYBE_TestTransferDtlsSrtpTwoChannels \
+ DISABLED_TestTransferDtlsSrtpTwoChannels
+#else
+#define MAYBE_TestTransferDtlsSrtpTwoChannels TestTransferDtlsSrtpTwoChannels
+#endif
// Create two channels with DTLS, negotiate DTLS-SRTP, and transfer bypass SRTP.
-TEST_F(DtlsTransportChannelTest, TestTransferDtlsSrtpTwoChannels) {
+TEST_F(DtlsTransportChannelTest, MAYBE_TestTransferDtlsSrtpTwoChannels) {
MAYBE_SKIP_TEST(HaveDtlsSrtp);
SetChannelCount(2);
PrepareDtls(true, true, rtc::KT_DEFAULT);
@@ -716,8 +775,15 @@ TEST_F(DtlsTransportChannelTest, TestTransferDtlsSrtpTwoChannels) {
TestTransfer(1, 1000, 100, true);
}
+#if defined(MEMORY_SANITIZER)
+// Fails under MemorySanitizer:
+// See https://code.google.com/p/webrtc/issues/detail?id=5381.
+#define MAYBE_TestTransferDtlsSrtpDemux DISABLED_TestTransferDtlsSrtpDemux
+#else
+#define MAYBE_TestTransferDtlsSrtpDemux TestTransferDtlsSrtpDemux
+#endif
// Create a single channel with DTLS, and send normal data and SRTP data on it.
-TEST_F(DtlsTransportChannelTest, TestTransferDtlsSrtpDemux) {
+TEST_F(DtlsTransportChannelTest, MAYBE_TestTransferDtlsSrtpDemux) {
MAYBE_SKIP_TEST(HaveDtlsSrtp);
PrepareDtls(true, true, rtc::KT_DEFAULT);
PrepareDtlsSrtp(true, true);
@@ -726,8 +792,17 @@ TEST_F(DtlsTransportChannelTest, TestTransferDtlsSrtpDemux) {
TestTransfer(0, 1000, 100, true);
}
+#if defined(MEMORY_SANITIZER)
+// Fails under MemorySanitizer:
+// See https://code.google.com/p/webrtc/issues/detail?id=5381.
+#define MAYBE_TestTransferDtlsAnswererIsPassive \
+ DISABLED_TestTransferDtlsAnswererIsPassive
+#else
+#define MAYBE_TestTransferDtlsAnswererIsPassive \
+ TestTransferDtlsAnswererIsPassive
+#endif
// Testing when the remote is passive.
-TEST_F(DtlsTransportChannelTest, TestTransferDtlsAnswererIsPassive) {
+TEST_F(DtlsTransportChannelTest, MAYBE_TestTransferDtlsAnswererIsPassive) {
MAYBE_SKIP_TEST(HaveDtlsSrtp);
SetChannelCount(2);
PrepareDtls(true, true, rtc::KT_DEFAULT);
@@ -752,9 +827,16 @@ TEST_F(DtlsTransportChannelTest, TestDtlsSetupWithLegacyAsAnswerer) {
EXPECT_EQ(rtc::SSL_CLIENT, channel2_role);
}
+#if defined(MEMORY_SANITIZER)
+// Fails under MemorySanitizer:
+// See https://code.google.com/p/webrtc/issues/detail?id=5381.
+#define MAYBE_TestDtlsReOfferFromOfferer DISABLED_TestDtlsReOfferFromOfferer
+#else
+#define MAYBE_TestDtlsReOfferFromOfferer TestDtlsReOfferFromOfferer
+#endif
// Testing re offer/answer after the session is estbalished. Roles will be
// kept same as of the previous negotiation.
-TEST_F(DtlsTransportChannelTest, TestDtlsReOfferFromOfferer) {
+TEST_F(DtlsTransportChannelTest, MAYBE_TestDtlsReOfferFromOfferer) {
MAYBE_SKIP_TEST(HaveDtlsSrtp);
SetChannelCount(2);
PrepareDtls(true, true, rtc::KT_DEFAULT);
@@ -771,7 +853,14 @@ TEST_F(DtlsTransportChannelTest, TestDtlsReOfferFromOfferer) {
TestTransfer(1, 1000, 100, true);
}
-TEST_F(DtlsTransportChannelTest, TestDtlsReOfferFromAnswerer) {
+#if defined(MEMORY_SANITIZER)
+// Fails under MemorySanitizer:
+// See https://code.google.com/p/webrtc/issues/detail?id=5381.
+#define MAYBE_TestDtlsReOfferFromAnswerer DISABLED_TestDtlsReOfferFromAnswerer
+#else
+#define MAYBE_TestDtlsReOfferFromAnswerer TestDtlsReOfferFromAnswerer
+#endif
+TEST_F(DtlsTransportChannelTest, MAYBE_TestDtlsReOfferFromAnswerer) {
MAYBE_SKIP_TEST(HaveDtlsSrtp);
SetChannelCount(2);
PrepareDtls(true, true, rtc::KT_DEFAULT);
@@ -788,8 +877,15 @@ TEST_F(DtlsTransportChannelTest, TestDtlsReOfferFromAnswerer) {
TestTransfer(1, 1000, 100, true);
}
+#if defined(MEMORY_SANITIZER)
+// Fails under MemorySanitizer:
+// See https://code.google.com/p/webrtc/issues/detail?id=5381.
+#define MAYBE_TestDtlsRoleReversal DISABLED_TestDtlsRoleReversal
+#else
+#define MAYBE_TestDtlsRoleReversal TestDtlsRoleReversal
+#endif
// Test that any change in role after the intial setup will result in failure.
-TEST_F(DtlsTransportChannelTest, TestDtlsRoleReversal) {
+TEST_F(DtlsTransportChannelTest, MAYBE_TestDtlsRoleReversal) {
MAYBE_SKIP_TEST(HaveDtlsSrtp);
SetChannelCount(2);
PrepareDtls(true, true, rtc::KT_DEFAULT);
@@ -803,9 +899,18 @@ TEST_F(DtlsTransportChannelTest, TestDtlsRoleReversal) {
NF_REOFFER | NF_EXPECT_FAILURE);
}
+#if defined(MEMORY_SANITIZER)
+// Fails under MemorySanitizer:
+// See https://code.google.com/p/webrtc/issues/detail?id=5381.
+#define MAYBE_TestDtlsReOfferWithDifferentSetupAttr \
+ DISABLED_TestDtlsReOfferWithDifferentSetupAttr
+#else
+#define MAYBE_TestDtlsReOfferWithDifferentSetupAttr \
+ TestDtlsReOfferWithDifferentSetupAttr
+#endif
// Test that using different setup attributes which results in similar ssl
// role as the initial negotiation will result in success.
-TEST_F(DtlsTransportChannelTest, TestDtlsReOfferWithDifferentSetupAttr) {
+TEST_F(DtlsTransportChannelTest, MAYBE_TestDtlsReOfferWithDifferentSetupAttr) {
MAYBE_SKIP_TEST(HaveDtlsSrtp);
SetChannelCount(2);
PrepareDtls(true, true, rtc::KT_DEFAULT);
@@ -865,8 +970,15 @@ TEST_F(DtlsTransportChannelTest, TestCertificatesBeforeConnect) {
ASSERT_FALSE(remote_cert2 != NULL);
}
+#if defined(MEMORY_SANITIZER)
+// Fails under MemorySanitizer:
+// See https://code.google.com/p/webrtc/issues/detail?id=5381.
+#define MAYBE_TestCertificatesAfterConnect DISABLED_TestCertificatesAfterConnect
+#else
+#define MAYBE_TestCertificatesAfterConnect TestCertificatesAfterConnect
+#endif
// Test Certificates state after connection.
-TEST_F(DtlsTransportChannelTest, TestCertificatesAfterConnect) {
+TEST_F(DtlsTransportChannelTest, MAYBE_TestCertificatesAfterConnect) {
MAYBE_SKIP_TEST(HaveDtls);
PrepareDtls(true, true, rtc::KT_DEFAULT);
ASSERT_TRUE(Connect());
diff --git a/webrtc/p2p/base/faketransportcontroller.h b/webrtc/p2p/base/faketransportcontroller.h
index 3e656fa4a3..65c59be98d 100644
--- a/webrtc/p2p/base/faketransportcontroller.h
+++ b/webrtc/p2p/base/faketransportcontroller.h
@@ -242,20 +242,20 @@ class FakeTransportChannel : public TransportChannelImpl,
bool IsDtlsActive() const override { return do_dtls_; }
- bool SetSrtpCiphers(const std::vector<std::string>& ciphers) override {
+ bool SetSrtpCryptoSuites(const std::vector<int>& ciphers) override {
srtp_ciphers_ = ciphers;
return true;
}
- bool GetSrtpCryptoSuite(std::string* cipher) override {
- if (!chosen_srtp_cipher_.empty()) {
- *cipher = chosen_srtp_cipher_;
+ bool GetSrtpCryptoSuite(int* crypto_suite) override {
+ if (chosen_crypto_suite_ != rtc::SRTP_INVALID_CRYPTO_SUITE) {
+ *crypto_suite = chosen_crypto_suite_;
return true;
}
return false;
}
- bool GetSslCipherSuite(int* cipher) override { return false; }
+ bool GetSslCipherSuite(int* cipher_suite) override { return false; }
rtc::scoped_refptr<rtc::RTCCertificate> GetLocalCertificate() const {
return local_cert_;
@@ -275,7 +275,7 @@ class FakeTransportChannel : public TransportChannelImpl,
bool use_context,
uint8_t* result,
size_t result_len) override {
- if (!chosen_srtp_cipher_.empty()) {
+ if (chosen_crypto_suite_ != rtc::SRTP_INVALID_CRYPTO_SUITE) {
memset(result, 0xff, result_len);
return true;
}
@@ -284,14 +284,13 @@ class FakeTransportChannel : public TransportChannelImpl,
}
void NegotiateSrtpCiphers() {
- for (std::vector<std::string>::const_iterator it1 = srtp_ciphers_.begin();
+ for (std::vector<int>::const_iterator it1 = srtp_ciphers_.begin();
it1 != srtp_ciphers_.end(); ++it1) {
- for (std::vector<std::string>::const_iterator it2 =
- dest_->srtp_ciphers_.begin();
+ for (std::vector<int>::const_iterator it2 = dest_->srtp_ciphers_.begin();
it2 != dest_->srtp_ciphers_.end(); ++it2) {
if (*it1 == *it2) {
- chosen_srtp_cipher_ = *it1;
- dest_->chosen_srtp_cipher_ = *it2;
+ chosen_crypto_suite_ = *it1;
+ dest_->chosen_crypto_suite_ = *it2;
return;
}
}
@@ -322,8 +321,8 @@ class FakeTransportChannel : public TransportChannelImpl,
rtc::scoped_refptr<rtc::RTCCertificate> local_cert_;
rtc::FakeSSLCertificate* remote_cert_ = nullptr;
bool do_dtls_ = false;
- std::vector<std::string> srtp_ciphers_;
- std::string chosen_srtp_cipher_;
+ std::vector<int> srtp_ciphers_;
+ int chosen_crypto_suite_ = rtc::SRTP_INVALID_CRYPTO_SUITE;
int receiving_timeout_ = -1;
bool gather_continually_ = false;
IceRole role_ = ICEROLE_UNKNOWN;
@@ -333,7 +332,7 @@ class FakeTransportChannel : public TransportChannelImpl,
std::string remote_ice_ufrag_;
std::string remote_ice_pwd_;
IceMode remote_ice_mode_ = ICEMODE_FULL;
- rtc::SSLProtocolVersion ssl_max_version_ = rtc::SSL_PROTOCOL_DTLS_10;
+ rtc::SSLProtocolVersion ssl_max_version_ = rtc::SSL_PROTOCOL_DTLS_12;
rtc::SSLFingerprint dtls_fingerprint_;
rtc::SSLRole ssl_role_ = rtc::SSL_CLIENT;
size_t connection_count_ = 0;
@@ -454,7 +453,7 @@ class FakeTransport : public Transport {
FakeTransport* dest_ = nullptr;
bool async_ = false;
rtc::scoped_refptr<rtc::RTCCertificate> certificate_;
- rtc::SSLProtocolVersion ssl_max_version_ = rtc::SSL_PROTOCOL_DTLS_10;
+ rtc::SSLProtocolVersion ssl_max_version_ = rtc::SSL_PROTOCOL_DTLS_12;
};
// Fake TransportController class, which can be passed into a BaseChannel object
diff --git a/webrtc/p2p/base/p2ptransportchannel.cc b/webrtc/p2p/base/p2ptransportchannel.cc
index 623085f9a8..952cfab747 100644
--- a/webrtc/p2p/base/p2ptransportchannel.cc
+++ b/webrtc/p2p/base/p2ptransportchannel.cc
@@ -187,6 +187,7 @@ namespace cricket {
// well on a 28.8K modem, which is the slowest connection on which the voice
// quality is reasonable at all.
static const uint32_t PING_PACKET_SIZE = 60 * 8;
+// TODO(honghaiz): Change the word DELAY to INTERVAL whenever appropriate.
// STRONG_PING_DELAY (480ms) is applied when the best connection is both
// writable and receiving.
static const uint32_t STRONG_PING_DELAY = 1000 * PING_PACKET_SIZE / 1000;
@@ -201,7 +202,6 @@ static const uint32_t MAX_CURRENT_STRONG_DELAY = 900;
static const int MIN_CHECK_RECEIVING_DELAY = 50; // ms
-
P2PTransportChannel::P2PTransportChannel(const std::string& transport_name,
int component,
P2PTransport* transport,
@@ -215,14 +215,13 @@ P2PTransportChannel::P2PTransportChannel(const std::string& transport_name,
best_connection_(NULL),
pending_best_connection_(NULL),
sort_dirty_(false),
- was_writable_(false),
remote_ice_mode_(ICEMODE_FULL),
ice_role_(ICEROLE_UNKNOWN),
tiebreaker_(0),
- remote_candidate_generation_(0),
gathering_state_(kIceGatheringNew),
check_receiving_delay_(MIN_CHECK_RECEIVING_DELAY * 5),
- receiving_timeout_(MIN_CHECK_RECEIVING_DELAY * 50) {
+ receiving_timeout_(MIN_CHECK_RECEIVING_DELAY * 50),
+ backup_connection_ping_interval_(0) {
uint32_t weak_ping_delay = ::strtoul(
webrtc::field_trial::FindFullName("WebRTC-StunInterPacketDelay").c_str(),
nullptr, 10);
@@ -241,6 +240,8 @@ P2PTransportChannel::~P2PTransportChannel() {
// Add the allocator session to our list so that we know which sessions
// are still active.
void P2PTransportChannel::AddAllocatorSession(PortAllocatorSession* session) {
+ ASSERT(worker_thread_ == rtc::Thread::Current());
+
session->set_generation(static_cast<uint32_t>(allocator_sessions_.size()));
allocator_sessions_.push_back(session);
@@ -295,9 +296,13 @@ void P2PTransportChannel::SetIceTiebreaker(uint64_t tiebreaker) {
tiebreaker_ = tiebreaker;
}
+TransportChannelState P2PTransportChannel::GetState() const {
+ return state_;
+}
+
// A channel is considered ICE completed once there is at most one active
// connection per network and at least one active connection.
-TransportChannelState P2PTransportChannel::GetState() const {
+TransportChannelState P2PTransportChannel::ComputeState() const {
if (!had_connection_) {
return TransportChannelState::STATE_INIT;
}
@@ -341,25 +346,23 @@ void P2PTransportChannel::SetIceCredentials(const std::string& ice_ufrag,
void P2PTransportChannel::SetRemoteIceCredentials(const std::string& ice_ufrag,
const std::string& ice_pwd) {
ASSERT(worker_thread_ == rtc::Thread::Current());
- bool ice_restart = false;
- if (!remote_ice_ufrag_.empty() && !remote_ice_pwd_.empty()) {
- ice_restart = (remote_ice_ufrag_ != ice_ufrag) ||
- (remote_ice_pwd_!= ice_pwd);
+ IceParameters* current_ice = remote_ice();
+ IceParameters new_ice(ice_ufrag, ice_pwd);
+ if (!current_ice || *current_ice != new_ice) {
+ // Keep the ICE credentials so that newer connections
+ // are prioritized over the older ones.
+ remote_ice_parameters_.push_back(new_ice);
+ }
+
+ // Update the pwd of remote candidate if needed.
+ for (RemoteCandidate& candidate : remote_candidates_) {
+ if (candidate.username() == ice_ufrag && candidate.password().empty()) {
+ candidate.set_password(ice_pwd);
+ }
}
-
- remote_ice_ufrag_ = ice_ufrag;
- remote_ice_pwd_ = ice_pwd;
-
// We need to update the credentials for any peer reflexive candidates.
- std::vector<Connection*>::iterator it = connections_.begin();
- for (; it != connections_.end(); ++it) {
- (*it)->MaybeSetRemoteIceCredentials(ice_ufrag, ice_pwd);
- }
-
- if (ice_restart) {
- // We need to keep track of the remote ice restart so newer
- // connections are prioritized over the older.
- ++remote_candidate_generation_;
+ for (Connection* conn : connections_) {
+ conn->MaybeSetRemoteIceCredentials(ice_ufrag, ice_pwd);
}
}
@@ -371,18 +374,26 @@ void P2PTransportChannel::SetIceConfig(const IceConfig& config) {
gather_continually_ = config.gather_continually;
LOG(LS_INFO) << "Set gather_continually to " << gather_continually_;
- if (config.receiving_timeout_ms < 0) {
- return;
+ if (config.backup_connection_ping_interval >= 0 &&
+ backup_connection_ping_interval_ !=
+ config.backup_connection_ping_interval) {
+ backup_connection_ping_interval_ = config.backup_connection_ping_interval;
+ LOG(LS_INFO) << "Set backup connection ping interval to "
+ << backup_connection_ping_interval_ << " milliseconds.";
}
- receiving_timeout_ = config.receiving_timeout_ms;
- check_receiving_delay_ =
- std::max(MIN_CHECK_RECEIVING_DELAY, receiving_timeout_ / 10);
- for (Connection* connection : connections_) {
- connection->set_receiving_timeout(receiving_timeout_);
+ if (config.receiving_timeout_ms >= 0 &&
+ receiving_timeout_ != config.receiving_timeout_ms) {
+ receiving_timeout_ = config.receiving_timeout_ms;
+ check_receiving_delay_ =
+ std::max(MIN_CHECK_RECEIVING_DELAY, receiving_timeout_ / 10);
+
+ for (Connection* connection : connections_) {
+ connection->set_receiving_timeout(receiving_timeout_);
+ }
+ LOG(LS_INFO) << "Set ICE receiving timeout to " << receiving_timeout_
+ << " milliseconds";
}
- LOG(LS_INFO) << "Set ICE receiving timeout to " << receiving_timeout_
- << " milliseconds";
}
// Go into the state of processing candidates, and running in general
@@ -519,11 +530,17 @@ void P2PTransportChannel::OnUnknownAddress(
}
}
+ uint32_t remote_generation = 0;
// The STUN binding request may arrive after setRemoteDescription and before
// adding remote candidate, so we need to set the password to the shared
// password if the user name matches.
- if (remote_password.empty() && remote_username == remote_ice_ufrag_) {
- remote_password = remote_ice_pwd_;
+ if (remote_password.empty()) {
+ const IceParameters* ice_param =
+ FindRemoteIceFromUfrag(remote_username, &remote_generation);
+ // Note: if not found, the remote_generation will still be 0.
+ if (ice_param != nullptr) {
+ remote_password = ice_param->pwd;
+ }
}
Candidate remote_candidate;
@@ -555,9 +572,9 @@ void P2PTransportChannel::OnUnknownAddress(
// If the source transport address of the request does not match any
// existing remote candidates, it represents a new peer reflexive remote
// candidate.
- remote_candidate =
- Candidate(component(), ProtoToString(proto), address, 0,
- remote_username, remote_password, PRFLX_PORT_TYPE, 0U, "");
+ remote_candidate = Candidate(component(), ProtoToString(proto), address, 0,
+ remote_username, remote_password,
+ PRFLX_PORT_TYPE, remote_generation, "");
// From RFC 5245, section-7.2.1.3:
// The foundation of the candidate is set to an arbitrary value, different
@@ -604,14 +621,7 @@ void P2PTransportChannel::OnUnknownAddress(
<< (remote_candidate_is_new ? "peer reflexive" : "resurrected")
<< " candidate: " << remote_candidate.ToString();
AddConnection(connection);
- connection->ReceivedPing();
-
- bool received_use_candidate =
- stun_msg->GetByteString(STUN_ATTR_USE_CANDIDATE) != nullptr;
- if (received_use_candidate && ice_role_ == ICEROLE_CONTROLLED) {
- connection->set_nominated(true);
- OnNominated(connection);
- }
+ connection->HandleBindingRequest(stun_msg);
// Update the list of connections since we just added another. We do this
// after sending the response since it could (in principle) delete the
@@ -624,6 +634,21 @@ void P2PTransportChannel::OnRoleConflict(PortInterface* port) {
// from Transport.
}
+const IceParameters* P2PTransportChannel::FindRemoteIceFromUfrag(
+ const std::string& ufrag,
+ uint32_t* generation) {
+ const auto& params = remote_ice_parameters_;
+ auto it = std::find_if(
+ params.rbegin(), params.rend(),
+ [ufrag](const IceParameters& param) { return param.ufrag == ufrag; });
+ if (it == params.rend()) {
+ // Not found.
+ return nullptr;
+ }
+ *generation = params.rend() - it - 1;
+ return &(*it);
+}
+
void P2PTransportChannel::OnNominated(Connection* conn) {
ASSERT(worker_thread_ == rtc::Thread::Current());
ASSERT(ice_role_ == ICEROLE_CONTROLLED);
@@ -648,19 +673,39 @@ void P2PTransportChannel::OnNominated(Connection* conn) {
void P2PTransportChannel::AddRemoteCandidate(const Candidate& candidate) {
ASSERT(worker_thread_ == rtc::Thread::Current());
- uint32_t generation = candidate.generation();
- // Network may not guarantee the order of the candidate delivery. If a
- // remote candidate with an older generation arrives, drop it.
- if (generation != 0 && generation < remote_candidate_generation_) {
- LOG(LS_WARNING) << "Dropping a remote candidate because its generation "
- << generation
- << " is lower than the current remote generation "
- << remote_candidate_generation_;
+ uint32_t generation = GetRemoteCandidateGeneration(candidate);
+ // If a remote candidate with a previous generation arrives, drop it.
+ if (generation < remote_ice_generation()) {
+ LOG(LS_WARNING) << "Dropping a remote candidate because its ufrag "
+ << candidate.username()
+ << " indicates it was for a previous generation.";
return;
}
+ Candidate new_remote_candidate(candidate);
+ new_remote_candidate.set_generation(generation);
+ // ICE candidates don't need to have username and password set, but
+ // the code below this (specifically, ConnectionRequest::Prepare in
+ // port.cc) uses the remote candidates's username. So, we set it
+ // here.
+ if (remote_ice()) {
+ if (candidate.username().empty()) {
+ new_remote_candidate.set_username(remote_ice()->ufrag);
+ }
+ if (new_remote_candidate.username() == remote_ice()->ufrag) {
+ if (candidate.password().empty()) {
+ new_remote_candidate.set_password(remote_ice()->pwd);
+ }
+ } else {
+ // The candidate belongs to the next generation. Its pwd will be set
+ // when the new remote ICE credentials arrive.
+ LOG(LS_WARNING) << "A remote candidate arrives with an unknown ufrag: "
+ << candidate.username();
+ }
+ }
+
// Create connections to this remote candidate.
- CreateConnections(candidate, NULL);
+ CreateConnections(new_remote_candidate, NULL);
// Resort the connections list, which may have new elements.
SortConnections();
@@ -673,20 +718,6 @@ bool P2PTransportChannel::CreateConnections(const Candidate& remote_candidate,
PortInterface* origin_port) {
ASSERT(worker_thread_ == rtc::Thread::Current());
- Candidate new_remote_candidate(remote_candidate);
- new_remote_candidate.set_generation(
- GetRemoteCandidateGeneration(remote_candidate));
- // ICE candidates don't need to have username and password set, but
- // the code below this (specifically, ConnectionRequest::Prepare in
- // port.cc) uses the remote candidates's username. So, we set it
- // here.
- if (remote_candidate.username().empty()) {
- new_remote_candidate.set_username(remote_ice_ufrag_);
- }
- if (remote_candidate.password().empty()) {
- new_remote_candidate.set_password(remote_ice_pwd_);
- }
-
// If we've already seen the new remote candidate (in the current candidate
// generation), then we shouldn't try creating connections for it.
// We either already have a connection for it, or we previously created one
@@ -695,7 +726,7 @@ bool P2PTransportChannel::CreateConnections(const Candidate& remote_candidate,
// immediately be re-pruned, churning the network for no purpose.
// This only applies to candidates received over signaling (i.e. origin_port
// is NULL).
- if (!origin_port && IsDuplicateRemoteCandidate(new_remote_candidate)) {
+ if (!origin_port && IsDuplicateRemoteCandidate(remote_candidate)) {
// return true to indicate success, without creating any new connections.
return true;
}
@@ -708,7 +739,7 @@ bool P2PTransportChannel::CreateConnections(const Candidate& remote_candidate,
bool created = false;
std::vector<PortInterface *>::reverse_iterator it;
for (it = ports_.rbegin(); it != ports_.rend(); ++it) {
- if (CreateConnection(*it, new_remote_candidate, origin_port)) {
+ if (CreateConnection(*it, remote_candidate, origin_port)) {
if (*it == origin_port)
created = true;
}
@@ -716,12 +747,12 @@ bool P2PTransportChannel::CreateConnections(const Candidate& remote_candidate,
if ((origin_port != NULL) &&
std::find(ports_.begin(), ports_.end(), origin_port) == ports_.end()) {
- if (CreateConnection(origin_port, new_remote_candidate, origin_port))
+ if (CreateConnection(origin_port, remote_candidate, origin_port))
created = true;
}
// Remember this remote candidate so that we can add it to future ports.
- RememberRemoteCandidate(new_remote_candidate, origin_port);
+ RememberRemoteCandidate(remote_candidate, origin_port);
return created;
}
@@ -731,6 +762,9 @@ bool P2PTransportChannel::CreateConnections(const Candidate& remote_candidate,
bool P2PTransportChannel::CreateConnection(PortInterface* port,
const Candidate& remote_candidate,
PortInterface* origin_port) {
+ if (!port->SupportsProtocol(remote_candidate.protocol())) {
+ return false;
+ }
// Look for an existing connection with this remote address. If one is not
// found, then we can create a new connection for this address.
Connection* connection = port->GetConnection(remote_candidate.address());
@@ -777,11 +811,21 @@ bool P2PTransportChannel::FindConnection(
uint32_t P2PTransportChannel::GetRemoteCandidateGeneration(
const Candidate& candidate) {
- // We need to keep track of the remote ice restart so newer
- // connections are prioritized over the older.
- ASSERT(candidate.generation() == 0 ||
- candidate.generation() == remote_candidate_generation_);
- return remote_candidate_generation_;
+ // If the candidate has a ufrag, use it to find the generation.
+ if (!candidate.username().empty()) {
+ uint32_t generation = 0;
+ if (!FindRemoteIceFromUfrag(candidate.username(), &generation)) {
+ // If the ufrag is not found, assume the next/future generation.
+ generation = static_cast<uint32_t>(remote_ice_parameters_.size());
+ }
+ return generation;
+ }
+ // If candidate generation is set, use that.
+ if (candidate.generation() > 0) {
+ return candidate.generation();
+ }
+ // Otherwise, assume the generation from remote ice parameters.
+ return remote_ice_generation();
}
// Check if remote candidate is already cached.
@@ -990,17 +1034,13 @@ void P2PTransportChannel::SortConnections() {
// Now update the writable state of the channel with the information we have
// so far.
- if (best_connection_ && best_connection_->writable()) {
- HandleWritable();
- } else if (all_connections_timedout) {
+ if (all_connections_timedout) {
HandleAllTimedOut();
- } else {
- HandleNotWritable();
}
// Update the state of this channel. This method is called whenever the
// state of any connection changes, so this is a good place to do this.
- UpdateChannelState();
+ UpdateState();
}
Connection* P2PTransportChannel::best_nominated_connection() const {
@@ -1060,13 +1100,17 @@ void P2PTransportChannel::SwitchBestConnectionTo(Connection* conn) {
}
}
-void P2PTransportChannel::UpdateChannelState() {
- // The Handle* functions already set the writable state. We'll just double-
- // check it here.
+// Warning: UpdateState should eventually be called whenever a connection
+// is added, deleted, or the write state of any connection changes so that the
+// transport controller will get the up-to-date channel state. However it
+// should not be called too often; in the case that multiple connection states
+// change, it should be called after all the connection states have changed. For
+// example, we call this at the end of SortConnections.
+void P2PTransportChannel::UpdateState() {
+ state_ = ComputeState();
+
bool writable = best_connection_ && best_connection_->writable();
- ASSERT(writable == this->writable());
- if (writable != this->writable())
- LOG(LS_ERROR) << "UpdateChannelState: writable state mismatch";
+ set_writable(writable);
bool receiving = false;
for (const Connection* connection : connections_) {
@@ -1078,11 +1122,8 @@ void P2PTransportChannel::UpdateChannelState() {
set_receiving(receiving);
}
-// We checked the status of our connections and we had at least one that
-// was writable, go into the writable state.
-void P2PTransportChannel::HandleWritable() {
- ASSERT(worker_thread_ == rtc::Thread::Current());
- if (writable()) {
+void P2PTransportChannel::MaybeStopPortAllocatorSessions() {
+ if (!IsGettingPorts()) {
return;
}
@@ -1098,18 +1139,6 @@ void P2PTransportChannel::HandleWritable() {
}
session->StopGettingPorts();
}
-
- was_writable_ = true;
- set_writable(true);
-}
-
-// Notify upper layer about channel not writable state, if it was before.
-void P2PTransportChannel::HandleNotWritable() {
- ASSERT(worker_thread_ == rtc::Thread::Current());
- if (was_writable_) {
- was_writable_ = false;
- set_writable(false);
- }
}
// If all connections timed out, delete them all.
@@ -1179,10 +1208,17 @@ void P2PTransportChannel::OnCheckAndPing() {
thread()->PostDelayed(check_delay, this, MSG_CHECK_AND_PING);
}
+// A connection is considered a backup connection if the channel state
+// is completed, the connection is not the best connection and it is active.
+bool P2PTransportChannel::IsBackupConnection(Connection* conn) const {
+ return state_ == STATE_COMPLETED && conn != best_connection_ &&
+ conn->active();
+}
+
// Is the connection in a state for us to even consider pinging the other side?
// We consider a connection pingable even if it's not connected because that's
// how a TCP connection is kicked into reconnecting on the active side.
-bool P2PTransportChannel::IsPingable(Connection* conn) {
+bool P2PTransportChannel::IsPingable(Connection* conn, uint32_t now) {
const Candidate& remote = conn->remote_candidate();
// We should never get this far with an empty remote ufrag.
ASSERT(!remote.username().empty());
@@ -1198,9 +1234,18 @@ bool P2PTransportChannel::IsPingable(Connection* conn) {
return false;
}
- // If the channel is weak, ping all candidates. Otherwise, we only
- // want to ping connections that have not timed out on writing.
- return weak() || conn->write_state() != Connection::STATE_WRITE_TIMEOUT;
+ // If the channel is weakly connected, ping all connections.
+ if (weak()) {
+ return true;
+ }
+
+ // Always ping active connections regardless whether the channel is completed
+ // or not, but backup connections are pinged at a slower rate.
+ if (IsBackupConnection(conn)) {
+ return (now >= conn->last_ping_response_received() +
+ backup_connection_ping_interval_);
+ }
+ return conn->active();
}
// Returns the next pingable connection to ping. This will be the oldest
@@ -1224,7 +1269,7 @@ Connection* P2PTransportChannel::FindNextPingableConnection() {
Connection* oldest_needing_triggered_check = nullptr;
Connection* oldest = nullptr;
for (Connection* conn : connections_) {
- if (!IsPingable(conn)) {
+ if (!IsPingable(conn, now)) {
continue;
}
bool needs_triggered_check =
@@ -1291,6 +1336,14 @@ void P2PTransportChannel::OnConnectionStateChange(Connection* connection) {
}
}
+ // May stop the allocator session when at least one connection becomes
+ // strongly connected after starting to get ports. It is not enough to check
+ // that the connection becomes weakly connected because the connection may be
+ // changing from (writable, receiving) to (writable, not receiving).
+ if (!connection->weak()) {
+ MaybeStopPortAllocatorSessions();
+ }
+
// We have to unroll the stack before doing this because we may be changing
// the state of connections while sorting.
RequestSort();
@@ -1328,6 +1381,9 @@ void P2PTransportChannel::OnConnectionDestroyed(Connection* connection) {
RequestSort();
}
+ UpdateState();
+ // SignalConnectionRemoved should be called after the channel state is
+ // updated because the receiver of the event may access the channel state.
SignalConnectionRemoved(this);
}
@@ -1368,8 +1424,7 @@ void P2PTransportChannel::OnReadPacket(Connection* connection,
}
}
-void P2PTransportChannel::OnSentPacket(PortInterface* port,
- const rtc::SentPacket& sent_packet) {
+void P2PTransportChannel::OnSentPacket(const rtc::SentPacket& sent_packet) {
ASSERT(worker_thread_ == rtc::Thread::Current());
SignalSentPacket(this, sent_packet);
diff --git a/webrtc/p2p/base/p2ptransportchannel.h b/webrtc/p2p/base/p2ptransportchannel.h
index 9efb96c42d..f2e9315343 100644
--- a/webrtc/p2p/base/p2ptransportchannel.h
+++ b/webrtc/p2p/base/p2ptransportchannel.h
@@ -36,6 +36,18 @@ namespace cricket {
extern const uint32_t WEAK_PING_DELAY;
+struct IceParameters {
+ std::string ufrag;
+ std::string pwd;
+ IceParameters(const std::string& ice_ufrag, const std::string& ice_pwd)
+ : ufrag(ice_ufrag), pwd(ice_pwd) {}
+
+ bool operator==(const IceParameters& other) {
+ return ufrag == other.ufrag && pwd == other.pwd;
+ }
+ bool operator!=(const IceParameters& other) { return !(*this == other); }
+};
+
// Adds the port on which the candidate originated.
class RemoteCandidate : public Candidate {
public:
@@ -108,12 +120,12 @@ class P2PTransportChannel : public TransportChannelImpl,
bool SetSslRole(rtc::SSLRole role) override { return false; }
// Set up the ciphers to use for DTLS-SRTP.
- bool SetSrtpCiphers(const std::vector<std::string>& ciphers) override {
+ bool SetSrtpCryptoSuites(const std::vector<int>& ciphers) override {
return false;
}
// Find out which DTLS-SRTP cipher was negotiated.
- bool GetSrtpCryptoSuite(std::string* cipher) override { return false; }
+ bool GetSrtpCryptoSuite(int* cipher) override { return false; }
// Find out which DTLS cipher was negotiated.
bool GetSslCipherSuite(int* cipher) override { return false; }
@@ -161,12 +173,20 @@ class P2PTransportChannel : public TransportChannelImpl,
// Public for unit tests.
const std::vector<Connection*>& connections() const { return connections_; }
- private:
- rtc::Thread* thread() { return worker_thread_; }
+ // Public for unit tests.
PortAllocatorSession* allocator_session() {
return allocator_sessions_.back();
}
+ // Public for unit tests.
+ const std::vector<RemoteCandidate>& remote_candidates() const {
+ return remote_candidates_;
+ }
+
+ private:
+ rtc::Thread* thread() { return worker_thread_; }
+ bool IsGettingPorts() { return allocator_session()->IsGettingPorts(); }
+
// A transport channel is weak if the current best connection is either
// not receiving or not writable, or if there is no best connection at all.
bool weak() const;
@@ -174,10 +194,10 @@ class P2PTransportChannel : public TransportChannelImpl,
void RequestSort();
void SortConnections();
void SwitchBestConnectionTo(Connection* conn);
- void UpdateChannelState();
- void HandleWritable();
- void HandleNotWritable();
+ void UpdateState();
void HandleAllTimedOut();
+ void MaybeStopPortAllocatorSessions();
+ TransportChannelState ComputeState() const;
Connection* GetBestConnectionOnNetwork(rtc::Network* network) const;
bool CreateConnections(const Candidate& remote_candidate,
@@ -191,7 +211,7 @@ class P2PTransportChannel : public TransportChannelImpl,
bool IsDuplicateRemoteCandidate(const Candidate& candidate);
void RememberRemoteCandidate(const Candidate& remote_candidate,
PortInterface* origin_port);
- bool IsPingable(Connection* conn);
+ bool IsPingable(Connection* conn, uint32_t now);
void PingConnection(Connection* conn);
void AddAllocatorSession(PortAllocatorSession* session);
void AddConnection(Connection* connection);
@@ -212,7 +232,7 @@ class P2PTransportChannel : public TransportChannelImpl,
void OnConnectionStateChange(Connection* connection);
void OnReadPacket(Connection *connection, const char *data, size_t len,
const rtc::PacketTime& packet_time);
- void OnSentPacket(PortInterface* port, const rtc::SentPacket& sent_packet);
+ void OnSentPacket(const rtc::SentPacket& sent_packet);
void OnReadyToSend(Connection* connection);
void OnConnectionDestroyed(Connection *connection);
@@ -224,6 +244,25 @@ class P2PTransportChannel : public TransportChannelImpl,
void PruneConnections();
Connection* best_nominated_connection() const;
+ bool IsBackupConnection(Connection* conn) const;
+
+ // Returns the latest remote ICE parameters or nullptr if there are no remote
+ // ICE parameters yet.
+ IceParameters* remote_ice() {
+ return remote_ice_parameters_.empty() ? nullptr
+ : &remote_ice_parameters_.back();
+ }
+ // Returns the remote IceParameters and generation that match |ufrag|
+ // if found, and returns nullptr otherwise.
+ const IceParameters* FindRemoteIceFromUfrag(const std::string& ufrag,
+ uint32_t* generation);
+ // Returns the index of the latest remote ICE parameters, or 0 if no remote
+ // ICE parameters have been received.
+ uint32_t remote_ice_generation() {
+ return remote_ice_parameters_.empty()
+ ? 0
+ : static_cast<uint32_t>(remote_ice_parameters_.size() - 1);
+ }
P2PTransport* transport_;
PortAllocator* allocator_;
@@ -239,25 +278,24 @@ class P2PTransportChannel : public TransportChannelImpl,
Connection* pending_best_connection_;
std::vector<RemoteCandidate> remote_candidates_;
bool sort_dirty_; // indicates whether another sort is needed right now
- bool was_writable_;
bool had_connection_ = false; // if connections_ has ever been nonempty
typedef std::map<rtc::Socket::Option, int> OptionMap;
OptionMap options_;
std::string ice_ufrag_;
std::string ice_pwd_;
- std::string remote_ice_ufrag_;
- std::string remote_ice_pwd_;
+ std::vector<IceParameters> remote_ice_parameters_;
IceMode remote_ice_mode_;
IceRole ice_role_;
uint64_t tiebreaker_;
- uint32_t remote_candidate_generation_;
IceGatheringState gathering_state_;
int check_receiving_delay_;
int receiving_timeout_;
+ int backup_connection_ping_interval_;
uint32_t last_ping_sent_ms_ = 0;
bool gather_continually_ = false;
int weak_ping_delay_ = WEAK_PING_DELAY;
+ TransportChannelState state_ = TransportChannelState::STATE_INIT;
RTC_DISALLOW_COPY_AND_ASSIGN(P2PTransportChannel);
};
diff --git a/webrtc/p2p/base/p2ptransportchannel_unittest.cc b/webrtc/p2p/base/p2ptransportchannel_unittest.cc
index 37cda7c661..90ddd43714 100644
--- a/webrtc/p2p/base/p2ptransportchannel_unittest.cc
+++ b/webrtc/p2p/base/p2ptransportchannel_unittest.cc
@@ -101,10 +101,12 @@ enum {
};
static cricket::IceConfig CreateIceConfig(int receiving_timeout_ms,
- bool gather_continually) {
+ bool gather_continually,
+ int backup_ping_interval = -1) {
cricket::IceConfig config;
config.receiving_timeout_ms = receiving_timeout_ms;
config.gather_continually = gather_continually;
+ config.backup_connection_ping_interval = backup_ping_interval;
return config;
}
@@ -650,6 +652,21 @@ class P2PTransportChannelTestBase : public testing::Test,
GetEndpoint(endpoint)->save_candidates_ = true;
}
+ // Tcp candidate verification has to be done when they are generated.
+ void VerifySavedTcpCandidates(int endpoint, const std::string& tcptype) {
+ for (auto& data : GetEndpoint(endpoint)->saved_candidates_) {
+ EXPECT_EQ(data->candidate.protocol(), cricket::TCP_PROTOCOL_NAME);
+ EXPECT_EQ(data->candidate.tcptype(), tcptype);
+ if (data->candidate.tcptype() == cricket::TCPTYPE_ACTIVE_STR) {
+ EXPECT_EQ(data->candidate.address().port(), cricket::DISCARD_PORT);
+ } else if (data->candidate.tcptype() == cricket::TCPTYPE_PASSIVE_STR) {
+ EXPECT_NE(data->candidate.address().port(), cricket::DISCARD_PORT);
+ } else {
+ FAIL() << "Unknown tcptype: " << data->candidate.tcptype();
+ }
+ }
+ }
+
void ResumeCandidates(int endpoint) {
Endpoint* ed = GetEndpoint(endpoint);
std::vector<CandidateData*>::iterator it = ed->saved_candidates_.begin();
@@ -825,12 +842,12 @@ class P2PTransportChannelTest : public P2PTransportChannelTestBase {
rtc::SocketAddress(), rtc::SocketAddress(),
rtc::SocketAddress()));
- cricket::RelayServerConfig relay_server(cricket::RELAY_TURN);
- relay_server.credentials = kRelayCredentials;
- relay_server.ports.push_back(
+ cricket::RelayServerConfig turn_server(cricket::RELAY_TURN);
+ turn_server.credentials = kRelayCredentials;
+ turn_server.ports.push_back(
cricket::ProtocolAddress(kTurnUdpIntAddr, cricket::PROTO_UDP, false));
- GetEndpoint(0)->allocator_->AddRelay(relay_server);
- GetEndpoint(1)->allocator_->AddRelay(relay_server);
+ GetEndpoint(0)->allocator_->AddTurnServer(turn_server);
+ GetEndpoint(1)->allocator_->AddTurnServer(turn_server);
int delay = kMinimumStepDelay;
ConfigureEndpoint(0, config1);
@@ -1290,8 +1307,19 @@ TEST_F(P2PTransportChannelTest, TestTcpConnectionsFromActiveToPassive) {
SetAllowTcpListen(0, true); // actpass.
SetAllowTcpListen(1, false); // active.
+ // Pause candidate so we could verify the candidate properties.
+ PauseCandidates(0);
+ PauseCandidates(1);
CreateChannels(1);
+ // Verify tcp candidates.
+ VerifySavedTcpCandidates(0, cricket::TCPTYPE_PASSIVE_STR);
+ VerifySavedTcpCandidates(1, cricket::TCPTYPE_ACTIVE_STR);
+
+ // Resume candidates.
+ ResumeCandidates(0);
+ ResumeCandidates(1);
+
EXPECT_TRUE_WAIT(ep1_ch1()->receiving() && ep1_ch1()->writable() &&
ep2_ch1()->receiving() && ep2_ch1()->writable(),
1000);
@@ -1300,12 +1328,6 @@ TEST_F(P2PTransportChannelTest, TestTcpConnectionsFromActiveToPassive) {
LocalCandidate(ep1_ch1())->address().EqualIPs(kPublicAddrs[0]) &&
RemoteCandidate(ep1_ch1())->address().EqualIPs(kPublicAddrs[1]));
- std::string kTcpProtocol = "tcp";
- EXPECT_EQ(kTcpProtocol, RemoteCandidate(ep1_ch1())->protocol());
- EXPECT_EQ(kTcpProtocol, LocalCandidate(ep1_ch1())->protocol());
- EXPECT_EQ(kTcpProtocol, RemoteCandidate(ep2_ch1())->protocol());
- EXPECT_EQ(kTcpProtocol, LocalCandidate(ep2_ch1())->protocol());
-
TestSendRecv(1);
DestroyChannels();
}
@@ -1539,19 +1561,19 @@ TEST_F(P2PTransportChannelMultihomedTest, TestFailoverControlledSide) {
// Create channels and let them go writable, as usual.
CreateChannels(1);
- // Make the receiving timeout shorter for testing.
- cricket::IceConfig config = CreateIceConfig(1000, false);
- ep1_ch1()->SetIceConfig(config);
- ep2_ch1()->SetIceConfig(config);
-
- EXPECT_TRUE_WAIT(ep1_ch1()->receiving() && ep1_ch1()->writable() &&
- ep2_ch1()->receiving() && ep2_ch1()->writable(),
- 1000);
+ EXPECT_TRUE_WAIT_MARGIN(ep1_ch1()->receiving() && ep1_ch1()->writable() &&
+ ep2_ch1()->receiving() && ep2_ch1()->writable(),
+ 1000, 1000);
EXPECT_TRUE(
ep1_ch1()->best_connection() && ep2_ch1()->best_connection() &&
LocalCandidate(ep1_ch1())->address().EqualIPs(kPublicAddrs[0]) &&
RemoteCandidate(ep1_ch1())->address().EqualIPs(kPublicAddrs[1]));
+ // Make the receiving timeout shorter for testing.
+ cricket::IceConfig config = CreateIceConfig(1000, false);
+ ep1_ch1()->SetIceConfig(config);
+ ep2_ch1()->SetIceConfig(config);
+
// Blackhole any traffic to or from the public addrs.
LOG(LS_INFO) << "Failing over...";
fw()->AddRule(false, rtc::FP_ANY, rtc::FD_ANY, kPublicAddrs[1]);
@@ -1591,18 +1613,19 @@ TEST_F(P2PTransportChannelMultihomedTest, TestFailoverControllingSide) {
// Create channels and let them go writable, as usual.
CreateChannels(1);
- // Make the receiving timeout shorter for testing.
- cricket::IceConfig config = CreateIceConfig(1000, false);
- ep1_ch1()->SetIceConfig(config);
- ep2_ch1()->SetIceConfig(config);
- EXPECT_TRUE_WAIT(ep1_ch1()->receiving() && ep1_ch1()->writable() &&
- ep2_ch1()->receiving() && ep2_ch1()->writable(),
- 1000);
+ EXPECT_TRUE_WAIT_MARGIN(ep1_ch1()->receiving() && ep1_ch1()->writable() &&
+ ep2_ch1()->receiving() && ep2_ch1()->writable(),
+ 1000, 1000);
EXPECT_TRUE(
ep1_ch1()->best_connection() && ep2_ch1()->best_connection() &&
LocalCandidate(ep1_ch1())->address().EqualIPs(kPublicAddrs[0]) &&
RemoteCandidate(ep1_ch1())->address().EqualIPs(kPublicAddrs[1]));
+ // Make the receiving timeout shorter for testing.
+ cricket::IceConfig config = CreateIceConfig(1000, false);
+ ep1_ch1()->SetIceConfig(config);
+ ep2_ch1()->SetIceConfig(config);
+
// Blackhole any traffic to or from the public addrs.
LOG(LS_INFO) << "Failing over...";
fw()->AddRule(false, rtc::FP_ANY, rtc::FD_ANY, kPublicAddrs[0]);
@@ -1627,6 +1650,43 @@ TEST_F(P2PTransportChannelMultihomedTest, TestFailoverControllingSide) {
DestroyChannels();
}
+// Test that the backup connection is pinged at a rate no faster than
+// what was configured.
+TEST_F(P2PTransportChannelMultihomedTest, TestPingBackupConnectionRate) {
+ AddAddress(0, kPublicAddrs[0]);
+ // Adding alternate address will make sure |kPublicAddrs| has the higher
+ // priority than others. This is due to FakeNetwork::AddInterface method.
+ AddAddress(1, kAlternateAddrs[1]);
+ AddAddress(1, kPublicAddrs[1]);
+
+ // Use only local ports for simplicity.
+ SetAllocatorFlags(0, kOnlyLocalPorts);
+ SetAllocatorFlags(1, kOnlyLocalPorts);
+
+ // Create channels and let them go writable, as usual.
+ CreateChannels(1);
+ EXPECT_TRUE_WAIT_MARGIN(ep1_ch1()->receiving() && ep1_ch1()->writable() &&
+ ep2_ch1()->receiving() && ep2_ch1()->writable(),
+ 1000, 1000);
+ int backup_ping_interval = 2000;
+ ep2_ch1()->SetIceConfig(CreateIceConfig(2000, false, backup_ping_interval));
+ // After the state becomes COMPLETED, the backup connection will be pinged
+ // once every |backup_ping_interval| milliseconds.
+ ASSERT_TRUE_WAIT(ep2_ch1()->GetState() == cricket::STATE_COMPLETED, 1000);
+ const std::vector<cricket::Connection*>& connections =
+ ep2_ch1()->connections();
+ ASSERT_EQ(2U, connections.size());
+ cricket::Connection* backup_conn = connections[1];
+ EXPECT_TRUE_WAIT(backup_conn->writable(), 3000);
+ uint32_t last_ping_response_ms = backup_conn->last_ping_response_received();
+ EXPECT_TRUE_WAIT(
+ last_ping_response_ms < backup_conn->last_ping_response_received(), 5000);
+ int time_elapsed =
+ backup_conn->last_ping_response_received() - last_ping_response_ms;
+ LOG(LS_INFO) << "Time elapsed: " << time_elapsed;
+ EXPECT_GE(time_elapsed, backup_ping_interval);
+}
+
TEST_F(P2PTransportChannelMultihomedTest, TestGetState) {
AddAddress(0, kAlternateAddrs[0]);
AddAddress(0, kPublicAddrs[0]);
@@ -1707,12 +1767,14 @@ class P2PTransportChannelPingTest : public testing::Test,
cricket::Candidate CreateCandidate(const std::string& ip,
int port,
- int priority) {
+ int priority,
+ const std::string& ufrag = "") {
cricket::Candidate c;
c.set_address(rtc::SocketAddress(ip, port));
c.set_component(1);
c.set_protocol(cricket::UDP_PROTOCOL_NAME);
c.set_priority(priority);
+ c.set_username(ufrag);
return c;
}
@@ -1796,6 +1858,62 @@ TEST_F(P2PTransportChannelPingTest, TestNoTriggeredChecksWhenWritable) {
EXPECT_EQ(conn2, ch.FindNextPingableConnection());
}
+// Test adding remote candidates with different ufrags. If a remote candidate
+// is added with an old ufrag, it will be discarded. If it is added with a
+// ufrag that was not seen before, it will be used to create connections
+// although the ICE pwd in the remote candidate will be set when the ICE
+// credentials arrive. If a remote candidate is added with the current ICE
+// ufrag, its pwd and generation will be set properly.
+TEST_F(P2PTransportChannelPingTest, TestAddRemoteCandidateWithVariousUfrags) {
+ cricket::FakePortAllocator pa(rtc::Thread::Current(), nullptr);
+ cricket::P2PTransportChannel ch("add candidate", 1, nullptr, &pa);
+ PrepareChannel(&ch);
+ ch.Connect();
+ ch.MaybeStartGathering();
+ // Add a candidate with a future ufrag.
+ ch.AddRemoteCandidate(CreateCandidate("1.1.1.1", 1, 1, kIceUfrag[2]));
+ cricket::Connection* conn1 = WaitForConnectionTo(&ch, "1.1.1.1", 1);
+ ASSERT_TRUE(conn1 != nullptr);
+ const cricket::Candidate& candidate = conn1->remote_candidate();
+ EXPECT_EQ(kIceUfrag[2], candidate.username());
+ EXPECT_TRUE(candidate.password().empty());
+ EXPECT_TRUE(ch.FindNextPingableConnection() == nullptr);
+
+ // Set the remote credentials with the "future" ufrag.
+ // This should set the ICE pwd in the remote candidate of |conn1|, making
+ // it pingable.
+ ch.SetRemoteIceCredentials(kIceUfrag[2], kIcePwd[2]);
+ EXPECT_EQ(kIceUfrag[2], candidate.username());
+ EXPECT_EQ(kIcePwd[2], candidate.password());
+ EXPECT_EQ(conn1, ch.FindNextPingableConnection());
+
+ // Add a candidate with an old ufrag. No connection will be created.
+ ch.AddRemoteCandidate(CreateCandidate("2.2.2.2", 2, 2, kIceUfrag[1]));
+ rtc::Thread::Current()->ProcessMessages(500);
+ EXPECT_TRUE(GetConnectionTo(&ch, "2.2.2.2", 2) == nullptr);
+
+ // Add a candidate with the current ufrag, its pwd and generation will be
+ // assigned, even if the generation is not set.
+ ch.AddRemoteCandidate(CreateCandidate("3.3.3.3", 3, 0, kIceUfrag[2]));
+ cricket::Connection* conn3 = nullptr;
+ ASSERT_TRUE_WAIT((conn3 = GetConnectionTo(&ch, "3.3.3.3", 3)) != nullptr,
+ 3000);
+ const cricket::Candidate& new_candidate = conn3->remote_candidate();
+ EXPECT_EQ(kIcePwd[2], new_candidate.password());
+ EXPECT_EQ(1U, new_candidate.generation());
+
+ // Check that the pwd of all remote candidates are properly assigned.
+ for (const cricket::RemoteCandidate& candidate : ch.remote_candidates()) {
+ EXPECT_TRUE(candidate.username() == kIceUfrag[1] ||
+ candidate.username() == kIceUfrag[2]);
+ if (candidate.username() == kIceUfrag[1]) {
+ EXPECT_EQ(kIcePwd[1], candidate.password());
+ } else if (candidate.username() == kIceUfrag[2]) {
+ EXPECT_EQ(kIcePwd[2], candidate.password());
+ }
+ }
+}
+
TEST_F(P2PTransportChannelPingTest, ConnectionResurrection) {
cricket::FakePortAllocator pa(rtc::Thread::Current(), nullptr);
cricket::P2PTransportChannel ch("connection resurrection", 1, nullptr, &pa);
@@ -1868,7 +1986,7 @@ TEST_F(P2PTransportChannelPingTest, TestReceivingStateChange) {
conn1->ReceivedPing();
conn1->OnReadPacket("ABC", 3, rtc::CreatePacketTime(0));
- EXPECT_TRUE_WAIT(ch.best_connection() != nullptr, 1000)
+ EXPECT_TRUE_WAIT(ch.best_connection() != nullptr, 1000);
EXPECT_TRUE_WAIT(ch.receiving(), 1000);
EXPECT_TRUE_WAIT(!ch.receiving(), 1000);
}
@@ -1932,7 +2050,8 @@ TEST_F(P2PTransportChannelPingTest, TestSelectConnectionBeforeNomination) {
// The controlled side will select a connection as the "best connection" based
// on requests from an unknown address before the controlling side nominates
// a connection, and will nominate a connection from an unknown address if the
-// request contains the use_candidate attribute.
+// request contains the use_candidate attribute. Plus, it will also sends back
+// a ping response and set the ICE pwd in the remote candidate appropriately.
TEST_F(P2PTransportChannelPingTest, TestSelectConnectionFromUnknownAddress) {
cricket::FakePortAllocator pa(rtc::Thread::Current(), nullptr);
cricket::P2PTransportChannel ch("receiving state change", 1, nullptr, &pa);
@@ -1948,14 +2067,16 @@ TEST_F(P2PTransportChannelPingTest, TestSelectConnectionFromUnknownAddress) {
uint32_t prflx_priority = cricket::ICE_TYPE_PREFERENCE_PRFLX << 24;
request.AddAttribute(new cricket::StunUInt32Attribute(
cricket::STUN_ATTR_PRIORITY, prflx_priority));
- cricket::Port* port = GetPort(&ch);
+ cricket::TestUDPPort* port = static_cast<cricket::TestUDPPort*>(GetPort(&ch));
port->SignalUnknownAddress(port, rtc::SocketAddress("1.1.1.1", 1),
cricket::PROTO_UDP, &request, kIceUfrag[1], false);
cricket::Connection* conn1 = WaitForConnectionTo(&ch, "1.1.1.1", 1);
ASSERT_TRUE(conn1 != nullptr);
+ EXPECT_TRUE(port->sent_binding_response());
EXPECT_EQ(conn1, ch.best_connection());
conn1->ReceivedPingResponse();
EXPECT_EQ(conn1, ch.best_connection());
+ port->set_sent_binding_response(false);
// Another connection is nominated via use_candidate.
ch.AddRemoteCandidate(CreateCandidate("2.2.2.2", 2, 1));
@@ -1977,8 +2098,10 @@ TEST_F(P2PTransportChannelPingTest, TestSelectConnectionFromUnknownAddress) {
cricket::PROTO_UDP, &request, kIceUfrag[1], false);
cricket::Connection* conn3 = WaitForConnectionTo(&ch, "3.3.3.3", 3);
ASSERT_TRUE(conn3 != nullptr);
+ EXPECT_TRUE(port->sent_binding_response());
conn3->ReceivedPingResponse(); // Become writable.
EXPECT_EQ(conn2, ch.best_connection());
+ port->set_sent_binding_response(false);
// However if the request contains use_candidate attribute, it will be
// selected as the best connection.
@@ -1988,10 +2111,23 @@ TEST_F(P2PTransportChannelPingTest, TestSelectConnectionFromUnknownAddress) {
cricket::PROTO_UDP, &request, kIceUfrag[1], false);
cricket::Connection* conn4 = WaitForConnectionTo(&ch, "4.4.4.4", 4);
ASSERT_TRUE(conn4 != nullptr);
+ EXPECT_TRUE(port->sent_binding_response());
// conn4 is not the best connection yet because it is not writable.
EXPECT_EQ(conn2, ch.best_connection());
conn4->ReceivedPingResponse(); // Become writable.
EXPECT_EQ(conn4, ch.best_connection());
+
+ // Test that the request from an unknown address contains a ufrag from an old
+ // generation.
+ port->set_sent_binding_response(false);
+ ch.SetRemoteIceCredentials(kIceUfrag[2], kIcePwd[2]);
+ ch.SetRemoteIceCredentials(kIceUfrag[3], kIcePwd[3]);
+ port->SignalUnknownAddress(port, rtc::SocketAddress("5.5.5.5", 5),
+ cricket::PROTO_UDP, &request, kIceUfrag[2], false);
+ cricket::Connection* conn5 = WaitForConnectionTo(&ch, "5.5.5.5", 5);
+ ASSERT_TRUE(conn5 != nullptr);
+ EXPECT_TRUE(port->sent_binding_response());
+ EXPECT_EQ(kIcePwd[2], conn5->remote_candidate().password());
}
// The controlled side will select a connection as the "best connection"
@@ -2114,7 +2250,9 @@ TEST_F(P2PTransportChannelPingTest, TestGetState) {
EXPECT_TRUE_WAIT(conn2->pruned(), 1000);
EXPECT_EQ(cricket::TransportChannelState::STATE_COMPLETED, ch.GetState());
conn1->Prune(); // All connections are pruned.
- EXPECT_EQ(cricket::TransportChannelState::STATE_FAILED, ch.GetState());
+ // Need to wait until the channel state is updated.
+ EXPECT_EQ_WAIT(cricket::TransportChannelState::STATE_FAILED, ch.GetState(),
+ 1000);
}
// Test that when a low-priority connection is pruned, it is not deleted
@@ -2190,3 +2328,31 @@ TEST_F(P2PTransportChannelPingTest, TestDeleteConnectionsIfAllWriteTimedout) {
conn3->Prune();
EXPECT_TRUE_WAIT(ch.connections().empty(), 1000);
}
+
+// Test that after a port allocator session is started, it will be stopped
+// when a new connection becomes writable and receiving. Also test that this
+// holds even if the transport channel did not lose the writability.
+TEST_F(P2PTransportChannelPingTest, TestStopPortAllocatorSessions) {
+ cricket::FakePortAllocator pa(rtc::Thread::Current(), nullptr);
+ cricket::P2PTransportChannel ch("test channel", 1, nullptr, &pa);
+ PrepareChannel(&ch);
+ ch.SetIceConfig(CreateIceConfig(2000, false));
+ ch.Connect();
+ ch.MaybeStartGathering();
+ ch.AddRemoteCandidate(CreateCandidate("1.1.1.1", 1, 100));
+ cricket::Connection* conn1 = WaitForConnectionTo(&ch, "1.1.1.1", 1);
+ ASSERT_TRUE(conn1 != nullptr);
+ conn1->ReceivedPingResponse(); // Becomes writable and receiving
+ EXPECT_TRUE(!ch.allocator_session()->IsGettingPorts());
+
+ // Restart gathering even if the transport channel is still writable.
+ // It should stop getting ports after a new connection becomes strongly
+ // connected.
+ ch.SetIceCredentials(kIceUfrag[1], kIcePwd[1]);
+ ch.MaybeStartGathering();
+ ch.AddRemoteCandidate(CreateCandidate("2.2.2.2", 2, 100));
+ cricket::Connection* conn2 = WaitForConnectionTo(&ch, "2.2.2.2", 2);
+ ASSERT_TRUE(conn2 != nullptr);
+ conn2->ReceivedPingResponse(); // Becomes writable and receiving
+ EXPECT_TRUE(!ch.allocator_session()->IsGettingPorts());
+}
diff --git a/webrtc/p2p/base/port.cc b/webrtc/p2p/base/port.cc
index d34b05f8e9..9dd5c83fed 100644
--- a/webrtc/p2p/base/port.cc
+++ b/webrtc/p2p/base/port.cc
@@ -310,10 +310,6 @@ void Port::OnReadPacket(
}
}
-void Port::OnSentPacket(const rtc::SentPacket& sent_packet) {
- PortInterface::SignalSentPacket(this, sent_packet);
-}
-
void Port::OnReadyToSend() {
AddressMap::iterator iter = connections_.begin();
for (; iter != connections_.end(); ++iter) {
@@ -567,10 +563,6 @@ void Port::SendBindingResponse(StunMessage* request,
response.AddMessageIntegrity(password_);
response.AddFingerprint();
- // The fact that we received a successful request means that this connection
- // (if one exists) should now be receiving.
- Connection* conn = GetConnection(addr);
-
// Send the response message.
rtc::ByteBuffer buf;
response.Write(&buf);
@@ -585,6 +577,7 @@ void Port::SendBindingResponse(StunMessage* request,
} else {
// Log at LS_INFO if we send a stun ping response on an unwritable
// connection.
+ Connection* conn = GetConnection(addr);
rtc::LoggingSeverity sev = (conn && !conn->writable()) ?
rtc::LS_INFO : rtc::LS_VERBOSE;
LOG_JV(sev, this)
@@ -592,10 +585,6 @@ void Port::SendBindingResponse(StunMessage* request,
<< ", to=" << addr.ToSensitiveString()
<< ", id=" << rtc::hex_encode(response.transaction_id());
}
-
- ASSERT(conn != NULL);
- if (conn)
- conn->ReceivedPing();
}
void Port::SendBindingErrorResponse(StunMessage* request,
@@ -924,29 +913,7 @@ void Connection::OnReadPacket(
<< ", id=" << rtc::hex_encode(msg->transaction_id());
if (remote_ufrag == remote_candidate_.username()) {
- // Check for role conflicts.
- if (!port_->MaybeIceRoleConflict(addr, msg.get(), remote_ufrag)) {
- // Received conflicting role from the peer.
- LOG(LS_INFO) << "Received conflicting role from the peer.";
- return;
- }
-
- // Incoming, validated stun request from remote peer.
- // This call will also set the connection receiving.
- port_->SendBindingResponse(msg.get(), addr);
-
- // If timed out sending writability checks, start up again
- if (!pruned_ && (write_state_ == STATE_WRITE_TIMEOUT))
- set_write_state(STATE_WRITE_INIT);
-
- if (port_->GetIceRole() == ICEROLE_CONTROLLED) {
- const StunByteStringAttribute* use_candidate_attr =
- msg->GetByteString(STUN_ATTR_USE_CANDIDATE);
- if (use_candidate_attr) {
- set_nominated(true);
- SignalNominated(this);
- }
- }
+ HandleBindingRequest(msg.get());
} else {
// The packet had the right local username, but the remote username
// was not the right one for the remote address.
@@ -986,6 +953,37 @@ void Connection::OnReadPacket(
}
}
+void Connection::HandleBindingRequest(IceMessage* msg) {
+ // This connection should now be receiving.
+ ReceivedPing();
+
+ const rtc::SocketAddress& remote_addr = remote_candidate_.address();
+ const std::string& remote_ufrag = remote_candidate_.username();
+ // Check for role conflicts.
+ if (!port_->MaybeIceRoleConflict(remote_addr, msg, remote_ufrag)) {
+ // Received conflicting role from the peer.
+ LOG(LS_INFO) << "Received conflicting role from the peer.";
+ return;
+ }
+
+ // This is a validated stun request from remote peer.
+ port_->SendBindingResponse(msg, remote_addr);
+
+ // If it timed out on writing check, start up again
+ if (!pruned_ && write_state_ == STATE_WRITE_TIMEOUT) {
+ set_write_state(STATE_WRITE_INIT);
+ }
+
+ if (port_->GetIceRole() == ICEROLE_CONTROLLED) {
+ const StunByteStringAttribute* use_candidate_attr =
+ msg->GetByteString(STUN_ATTR_USE_CANDIDATE);
+ if (use_candidate_attr) {
+ set_nominated(true);
+ SignalNominated(this);
+ }
+ }
+}
+
void Connection::OnReadyToSend() {
if (write_state_ == STATE_WRITABLE) {
SignalReadyToSend(this);
@@ -1006,6 +1004,11 @@ void Connection::Destroy() {
port_->thread()->Post(this, MSG_DELETE);
}
+void Connection::FailAndDestroy() {
+ set_state(Connection::STATE_FAILED);
+ Destroy();
+}
+
void Connection::PrintPingsSinceLastResponse(std::string* s, size_t max) {
std::ostringstream oss;
oss << std::boolalpha;
@@ -1117,25 +1120,28 @@ void Connection::ReceivedPingResponse() {
}
bool Connection::dead(uint32_t now) const {
- if (now < (time_created_ms_ + MIN_CONNECTION_LIFETIME)) {
- // A connection that hasn't passed its minimum lifetime is still alive.
- // We do this to prevent connections from being pruned too quickly
- // during a network change event when two networks would be up
- // simultaneously but only for a brief period.
- return false;
+ if (last_received() > 0) {
+ // If it has ever received anything, we keep it alive until it hasn't
+ // received anything for DEAD_CONNECTION_RECEIVE_TIMEOUT. This covers the
+ // normal case of a successfully used connection that stops working. This
+ // also allows a remote peer to continue pinging over a locally inactive
+ // (pruned) connection.
+ return (now > (last_received() + DEAD_CONNECTION_RECEIVE_TIMEOUT));
}
- if (receiving_) {
- // A connection that is receiving is alive.
+ if (active()) {
+ // If it has never received anything, keep it alive as long as it is
+ // actively pinging and not pruned. Otherwise, the connection might be
+ // deleted before it has a chance to ping. This is the normal case for a
+ // new connection that is pinging but hasn't received anything yet.
return false;
}
- // A connection is alive until it is inactive.
- return !active();
-
- // TODO(honghaiz): Move from using the write state to using the receiving
- // state with something like the following:
- // return (now > (last_received() + DEAD_CONNECTION_RECEIVE_TIMEOUT));
+ // If it has never received anything and is not actively pinging (pruned), we
+ // keep it around for at least MIN_CONNECTION_LIFETIME to prevent connections
+ // from being pruned too quickly during a network change event when two
+ // networks would be up simultaneously but only for a brief period.
+ return now > (time_created_ms_ + MIN_CONNECTION_LIFETIME);
}
std::string Connection::ToDebugId() const {
@@ -1248,8 +1254,7 @@ void Connection::OnConnectionRequestErrorResponse(ConnectionRequest* request,
// This is not a valid connection.
LOG_J(LS_ERROR, this) << "Received STUN error response, code="
<< error_code << "; killing connection";
- set_state(STATE_FAILED);
- Destroy();
+ FailAndDestroy();
}
}
@@ -1302,13 +1307,13 @@ void Connection::OnMessage(rtc::Message *pmsg) {
delete this;
}
-uint32_t Connection::last_received() {
+uint32_t Connection::last_received() const {
return std::max(last_data_received_,
std::max(last_ping_received_, last_ping_response_received_));
}
size_t Connection::recv_bytes_second() {
- return recv_rate_tracker_.ComputeRate();
+ return round(recv_rate_tracker_.ComputeRate());
}
size_t Connection::recv_total_bytes() {
@@ -1316,7 +1321,7 @@ size_t Connection::recv_total_bytes() {
}
size_t Connection::sent_bytes_second() {
- return send_rate_tracker_.ComputeRate();
+ return round(send_rate_tracker_.ComputeRate());
}
size_t Connection::sent_total_bytes() {
@@ -1396,10 +1401,10 @@ void Connection::MaybeAddPrflxCandidate(ConnectionRequest* request,
SignalStateChange(this);
}
-ProxyConnection::ProxyConnection(Port* port, size_t index,
- const Candidate& candidate)
- : Connection(port, index, candidate), error_(0) {
-}
+ProxyConnection::ProxyConnection(Port* port,
+ size_t index,
+ const Candidate& remote_candidate)
+ : Connection(port, index, remote_candidate) {}
int ProxyConnection::Send(const void* data, size_t size,
const rtc::PacketOptions& options) {
diff --git a/webrtc/p2p/base/port.h b/webrtc/p2p/base/port.h
index 01c45f26d8..436b1e7faa 100644
--- a/webrtc/p2p/base/port.h
+++ b/webrtc/p2p/base/port.h
@@ -54,6 +54,10 @@ extern const char TCPTYPE_SIMOPEN_STR[];
// it.
const uint32_t MIN_CONNECTION_LIFETIME = 10 * 1000; // 10 seconds.
+// A connection will be declared dead if it has not received anything for this
+// long.
+const uint32_t DEAD_CONNECTION_RECEIVE_TIMEOUT = 30 * 1000; // 30 seconds.
+
// The timeout duration when a connection does not receive anything.
const uint32_t WEAK_CONNECTION_RECEIVE_TIMEOUT = 2500; // 2.5 seconds
@@ -276,7 +280,11 @@ class Port : public PortInterface, public rtc::MessageHandler,
const std::string& remote_ufrag);
// Called when a packet has been sent to the socket.
- void OnSentPacket(const rtc::SentPacket& sent_packet);
+ // This is made pure virtual to notify subclasses of Port that they MUST
+ // listen to AsyncPacketSocket::SignalSentPacket and then call
+ // PortInterface::OnSentPacket.
+ virtual void OnSentPacket(rtc::AsyncPacketSocket* socket,
+ const rtc::SentPacket& sent_packet) = 0;
// Called when the socket is currently able to send.
void OnReadyToSend();
@@ -442,7 +450,6 @@ class Connection : public rtc::MessageHandler,
bool connected() const { return connected_; }
bool weak() const { return !(writable() && receiving() && connected()); }
bool active() const {
- // TODO(honghaiz): Move from using |write_state_| to using |pruned_|.
return write_state_ != STATE_WRITE_TIMEOUT;
}
// A connection is dead if it can be safely deleted.
@@ -510,6 +517,9 @@ class Connection : public rtc::MessageHandler,
// Makes the connection go away.
void Destroy();
+ // Makes the connection go away, in a failed state.
+ void FailAndDestroy();
+
// Checks that the state of this connection is up-to-date. The argument is
// the current time, which is compared against various timeouts.
void UpdateState(uint32_t now);
@@ -518,11 +528,16 @@ class Connection : public rtc::MessageHandler,
uint32_t last_ping_sent() const { return last_ping_sent_; }
void Ping(uint32_t now);
void ReceivedPingResponse();
+ uint32_t last_ping_response_received() const {
+ return last_ping_response_received_;
+ }
// Called whenever a valid ping is received on this connection. This is
// public because the connection intercepts the first ping for us.
uint32_t last_ping_received() const { return last_ping_received_; }
void ReceivedPing();
+ // Handles the binding request; sends a response if this is a valid request.
+ void HandleBindingRequest(IceMessage* msg);
// Debugging description of this connection
std::string ToDebugId() const;
@@ -557,7 +572,7 @@ class Connection : public rtc::MessageHandler,
// Returns the last received time of any data, stun request, or stun
// response in milliseconds
- uint32_t last_received();
+ uint32_t last_received() const;
protected:
enum { MSG_DELETE = 0, MSG_FIRST_AVAILABLE };
@@ -628,17 +643,18 @@ class Connection : public rtc::MessageHandler,
friend class ConnectionRequest;
};
-// ProxyConnection defers all the interesting work to the port
+// ProxyConnection defers all the interesting work to the port.
class ProxyConnection : public Connection {
public:
- ProxyConnection(Port* port, size_t index, const Candidate& candidate);
+ ProxyConnection(Port* port, size_t index, const Candidate& remote_candidate);
- virtual int Send(const void* data, size_t size,
- const rtc::PacketOptions& options);
- virtual int GetError() { return error_; }
+ int Send(const void* data,
+ size_t size,
+ const rtc::PacketOptions& options) override;
+ int GetError() override { return error_; }
private:
- int error_;
+ int error_ = 0;
};
} // namespace cricket
diff --git a/webrtc/p2p/base/port_unittest.cc b/webrtc/p2p/base/port_unittest.cc
index 4a4ed32456..449021ad9f 100644
--- a/webrtc/p2p/base/port_unittest.cc
+++ b/webrtc/p2p/base/port_unittest.cc
@@ -17,6 +17,7 @@
#include "webrtc/p2p/base/testturnserver.h"
#include "webrtc/p2p/base/transport.h"
#include "webrtc/p2p/base/turnport.h"
+#include "webrtc/base/arraysize.h"
#include "webrtc/base/crc32.h"
#include "webrtc/base/gunit.h"
#include "webrtc/base/helpers.h"
@@ -140,6 +141,10 @@ class TestPort : public Port {
ICE_TYPE_PREFERENCE_HOST, 0, true);
}
+ virtual bool SupportsProtocol(const std::string& protocol) const {
+ return true;
+ }
+
// Exposed for testing candidate building.
void AddCandidateAddress(const rtc::SocketAddress& addr) {
AddAddress(addr, addr, rtc::SocketAddress(), "udp", "", "", Type(),
@@ -199,9 +204,13 @@ class TestPort : public Port {
}
private:
+ void OnSentPacket(rtc::AsyncPacketSocket* socket,
+ const rtc::SentPacket& sent_packet) {
+ PortInterface::SignalSentPacket(sent_packet);
+ }
rtc::scoped_ptr<ByteBuffer> last_stun_buf_;
rtc::scoped_ptr<IceMessage> last_stun_msg_;
- int type_preference_;
+ int type_preference_ = 0;
};
class TestChannel : public sigslot::has_slots<> {
@@ -456,9 +465,8 @@ class PortTest : public testing::Test, public sigslot::has_slots<> {
}
UDPPort* CreateUdpPort(const SocketAddress& addr,
PacketSocketFactory* socket_factory) {
- return UDPPort::Create(main_, socket_factory, &network_,
- addr.ipaddr(), 0, 0, username_, password_,
- std::string(), false);
+ return UDPPort::Create(main_, socket_factory, &network_, addr.ipaddr(), 0,
+ 0, username_, password_, std::string(), true);
}
TCPPort* CreateTcpPort(const SocketAddress& addr) {
return CreateTcpPort(addr, &socket_factory_);
@@ -1235,6 +1243,58 @@ TEST_F(PortTest, TestSslTcpToSslTcpRelay) {
}
*/
+// Test that a connection will be dead and deleted if
+// i) it has never received anything for MIN_CONNECTION_LIFETIME milliseconds
+// since it was created, or
+// ii) it has not received anything for DEAD_CONNECTION_RECEIVE_TIMEOUT
+// milliseconds since last receiving.
+TEST_F(PortTest, TestConnectionDead) {
+ UDPPort* port1 = CreateUdpPort(kLocalAddr1);
+ UDPPort* port2 = CreateUdpPort(kLocalAddr2);
+ TestChannel ch1(port1);
+ TestChannel ch2(port2);
+ // Acquire address.
+ ch1.Start();
+ ch2.Start();
+ ASSERT_EQ_WAIT(1, ch1.complete_count(), kTimeout);
+ ASSERT_EQ_WAIT(1, ch2.complete_count(), kTimeout);
+
+ // Test case that the connection has never received anything.
+ uint32_t before_created = rtc::Time();
+ ch1.CreateConnection(GetCandidate(port2));
+ uint32_t after_created = rtc::Time();
+ Connection* conn = ch1.conn();
+ ASSERT(conn != nullptr);
+ // It is not dead if it is after MIN_CONNECTION_LIFETIME but not pruned.
+ conn->UpdateState(after_created + MIN_CONNECTION_LIFETIME + 1);
+ rtc::Thread::Current()->ProcessMessages(0);
+ EXPECT_TRUE(ch1.conn() != nullptr);
+ // It is not dead if it is before MIN_CONNECTION_LIFETIME and pruned.
+ conn->UpdateState(before_created + MIN_CONNECTION_LIFETIME - 1);
+ conn->Prune();
+ rtc::Thread::Current()->ProcessMessages(0);
+ EXPECT_TRUE(ch1.conn() != nullptr);
+ // It will be dead after MIN_CONNECTION_LIFETIME and pruned.
+ conn->UpdateState(after_created + MIN_CONNECTION_LIFETIME + 1);
+ EXPECT_TRUE_WAIT(ch1.conn() == nullptr, kTimeout);
+
+ // Test case that the connection has received something.
+ // Create a connection again and receive a ping.
+ ch1.CreateConnection(GetCandidate(port2));
+ conn = ch1.conn();
+ ASSERT(conn != nullptr);
+ uint32_t before_last_receiving = rtc::Time();
+ conn->ReceivedPing();
+ uint32_t after_last_receiving = rtc::Time();
+ // The connection will be dead after DEAD_CONNECTION_RECEIVE_TIMEOUT
+ conn->UpdateState(
+ before_last_receiving + DEAD_CONNECTION_RECEIVE_TIMEOUT - 1);
+ rtc::Thread::Current()->ProcessMessages(100);
+ EXPECT_TRUE(ch1.conn() != nullptr);
+ conn->UpdateState(after_last_receiving + DEAD_CONNECTION_RECEIVE_TIMEOUT + 1);
+ EXPECT_TRUE_WAIT(ch1.conn() == nullptr, kTimeout);
+}
+
// This test case verifies standard ICE features in STUN messages. Currently it
// verifies Message Integrity attribute in STUN messages and username in STUN
// binding request will have colon (":") between remote and local username.
@@ -2224,7 +2284,7 @@ TEST_F(PortTest, TestWritableState) {
// Data should be unsendable until the connection is accepted.
char data[] = "abcd";
- int data_size = ARRAY_SIZE(data);
+ int data_size = arraysize(data);
rtc::PacketOptions options;
EXPECT_EQ(SOCKET_ERROR, ch1.conn()->Send(data, data_size, options));
@@ -2450,3 +2510,24 @@ TEST_F(PortTest, TestControlledToControllingNotDestroyed) {
rtc::Thread::Current()->ProcessMessages(kTimeout);
EXPECT_FALSE(destroyed());
}
+
+TEST_F(PortTest, TestSupportsProtocol) {
+ rtc::scoped_ptr<Port> udp_port(CreateUdpPort(kLocalAddr1));
+ EXPECT_TRUE(udp_port->SupportsProtocol(UDP_PROTOCOL_NAME));
+ EXPECT_FALSE(udp_port->SupportsProtocol(TCP_PROTOCOL_NAME));
+
+ rtc::scoped_ptr<Port> stun_port(
+ CreateStunPort(kLocalAddr1, nat_socket_factory1()));
+ EXPECT_TRUE(stun_port->SupportsProtocol(UDP_PROTOCOL_NAME));
+ EXPECT_FALSE(stun_port->SupportsProtocol(TCP_PROTOCOL_NAME));
+
+ rtc::scoped_ptr<Port> tcp_port(CreateTcpPort(kLocalAddr1));
+ EXPECT_TRUE(tcp_port->SupportsProtocol(TCP_PROTOCOL_NAME));
+ EXPECT_TRUE(tcp_port->SupportsProtocol(SSLTCP_PROTOCOL_NAME));
+ EXPECT_FALSE(tcp_port->SupportsProtocol(UDP_PROTOCOL_NAME));
+
+ rtc::scoped_ptr<Port> turn_port(
+ CreateTurnPort(kLocalAddr1, nat_socket_factory1(), PROTO_UDP, PROTO_UDP));
+ EXPECT_TRUE(turn_port->SupportsProtocol(UDP_PROTOCOL_NAME));
+ EXPECT_FALSE(turn_port->SupportsProtocol(TCP_PROTOCOL_NAME));
+}
diff --git a/webrtc/p2p/base/portallocator.h b/webrtc/p2p/base/portallocator.h
index 4f8ec2fbe6..6fb79b065e 100644
--- a/webrtc/p2p/base/portallocator.h
+++ b/webrtc/p2p/base/portallocator.h
@@ -14,6 +14,7 @@
#include <string>
#include <vector>
+#include "webrtc/p2p/base/port.h"
#include "webrtc/p2p/base/portinterface.h"
#include "webrtc/base/helpers.h"
#include "webrtc/base/proxyinfo.h"
@@ -46,10 +47,14 @@ enum {
PORTALLOCATOR_ENABLE_SHARED_UFRAG = 0x80,
PORTALLOCATOR_ENABLE_SHARED_SOCKET = 0x100,
PORTALLOCATOR_ENABLE_STUN_RETRANSMIT_ATTRIBUTE = 0x200,
+ // When specified, we'll only allocate the STUN candidate for the public
+ // interface as seen by regular http traffic and the HOST candidate associated
+ // with the default local interface.
PORTALLOCATOR_DISABLE_ADAPTER_ENUMERATION = 0x400,
- // When specified, a loopback candidate will be generated if
- // PORTALLOCATOR_DISABLE_ADAPTER_ENUMERATION is specified.
- PORTALLOCATOR_ENABLE_LOCALHOST_CANDIDATE = 0x800,
+ // When specified along with PORTALLOCATOR_DISABLE_ADAPTER_ENUMERATION, the
+ // default local candidate mentioned above will not be allocated. Only the
+ // STUN candidate will be.
+ PORTALLOCATOR_DISABLE_DEFAULT_LOCAL_CANDIDATE = 0x800,
// Disallow use of UDP when connecting to a relay server. Since proxy servers
// usually don't handle UDP, using UDP will leak the IP address.
PORTALLOCATOR_DISABLE_UDP_RELAY = 0x1000,
@@ -71,6 +76,38 @@ enum {
CF_ALL = 0x7,
};
+// TODO(deadbeef): Rename to TurnCredentials (and username to ufrag).
+struct RelayCredentials {
+ RelayCredentials() {}
+ RelayCredentials(const std::string& username, const std::string& password)
+ : username(username), password(password) {}
+
+ std::string username;
+ std::string password;
+};
+
+typedef std::vector<ProtocolAddress> PortList;
+// TODO(deadbeef): Rename to TurnServerConfig.
+struct RelayServerConfig {
+ RelayServerConfig(RelayType type) : type(type), priority(0) {}
+
+ RelayServerConfig(const std::string& address,
+ int port,
+ const std::string& username,
+ const std::string& password,
+ ProtocolType proto,
+ bool secure)
+ : type(RELAY_TURN), credentials(username, password) {
+ ports.push_back(
+ ProtocolAddress(rtc::SocketAddress(address, port), proto, secure));
+ }
+
+ RelayType type;
+ PortList ports;
+ RelayCredentials credentials;
+ int priority;
+};
+
class PortAllocatorSession : public sigslot::has_slots<> {
public:
// Content name passed in mostly for logging and debugging.
@@ -137,6 +174,18 @@ class PortAllocator : public sigslot::has_slots<> {
}
virtual ~PortAllocator() {}
+ // Set STUN and TURN servers to be used in future sessions.
+ virtual void SetIceServers(
+ const ServerAddresses& stun_servers,
+ const std::vector<RelayServerConfig>& turn_servers) = 0;
+
+ // Sets the network types to ignore.
+ // Values are defined by the AdapterType enum.
+ // For instance, calling this with
+ // ADAPTER_TYPE_ETHERNET | ADAPTER_TYPE_LOOPBACK will ignore Ethernet and
+ // loopback interfaces.
+ virtual void SetNetworkIgnoreMask(int network_ignore_mask) = 0;
+
PortAllocatorSession* CreateSession(
const std::string& sid,
const std::string& content_name,
diff --git a/webrtc/p2p/base/portinterface.h b/webrtc/p2p/base/portinterface.h
index 0f77036ac1..e83879f3b7 100644
--- a/webrtc/p2p/base/portinterface.h
+++ b/webrtc/p2p/base/portinterface.h
@@ -53,6 +53,8 @@ class PortInterface {
virtual bool SharedSocket() const = 0;
+ virtual bool SupportsProtocol(const std::string& protocol) const = 0;
+
// PrepareAddress will attempt to get an address for this port that other
// clients can send to. It may take some time before the address is ready.
// Once it is ready, we will send SignalAddressReady. If errors are
@@ -114,7 +116,7 @@ class PortInterface {
const rtc::SocketAddress&> SignalReadPacket;
// Emitted each time a packet is sent on this port.
- sigslot::signal2<PortInterface*, const rtc::SentPacket&> SignalSentPacket;
+ sigslot::signal1<const rtc::SentPacket&> SignalSentPacket;
virtual std::string ToString() const = 0;
diff --git a/webrtc/p2p/base/pseudotcp.cc b/webrtc/p2p/base/pseudotcp.cc
index 5f035ca652..6281315dc1 100644
--- a/webrtc/p2p/base/pseudotcp.cc
+++ b/webrtc/p2p/base/pseudotcp.cc
@@ -16,6 +16,7 @@
#include <algorithm>
#include <set>
+#include "webrtc/base/arraysize.h"
#include "webrtc/base/basictypes.h"
#include "webrtc/base/bytebuffer.h"
#include "webrtc/base/byteorder.h"
@@ -187,7 +188,7 @@ void ReportStats() {
char buffer[256];
size_t len = 0;
for (int i = 0; i < S_NUM_STATS; ++i) {
- len += rtc::sprintfn(buffer, ARRAY_SIZE(buffer), "%s%s:%d",
+ len += rtc::sprintfn(buffer, arraysize(buffer), "%s%s:%d",
(i == 0) ? "" : ",", STAT_NAMES[i], g_stats[i]);
g_stats[i] = 0;
}
diff --git a/webrtc/p2p/base/relayport.cc b/webrtc/p2p/base/relayport.cc
index 88adcf2f88..19883a3121 100644
--- a/webrtc/p2p/base/relayport.cc
+++ b/webrtc/p2p/base/relayport.cc
@@ -754,7 +754,7 @@ void RelayEntry::OnReadPacket(
void RelayEntry::OnSentPacket(rtc::AsyncPacketSocket* socket,
const rtc::SentPacket& sent_packet) {
- port_->OnSentPacket(sent_packet);
+ port_->OnSentPacket(socket, sent_packet);
}
void RelayEntry::OnReadyToSend(rtc::AsyncPacketSocket* socket) {
diff --git a/webrtc/p2p/base/relayport.h b/webrtc/p2p/base/relayport.h
index 8452b5b430..402736c34d 100644
--- a/webrtc/p2p/base/relayport.h
+++ b/webrtc/p2p/base/relayport.h
@@ -29,7 +29,7 @@ class RelayConnection;
// is created. The RelayEntry will try to reach the remote destination
// by connecting to all available server addresses in a pre defined
// order with a small delay in between. When a connection is
-// successful all other connection attemts are aborted.
+// successful all other connection attempts are aborted.
class RelayPort : public Port {
public:
typedef std::pair<rtc::Socket::Option, int> OptionValue;
@@ -46,7 +46,7 @@ class RelayPort : public Port {
return new RelayPort(thread, factory, network, ip, min_port, max_port,
username, password);
}
- virtual ~RelayPort();
+ ~RelayPort() override;
void AddServerAddress(const ProtocolAddress& addr);
void AddExternalAddress(const ProtocolAddress& addr);
@@ -54,12 +54,16 @@ class RelayPort : public Port {
const std::vector<OptionValue>& options() const { return options_; }
bool HasMagicCookie(const char* data, size_t size);
- virtual void PrepareAddress();
- virtual Connection* CreateConnection(const Candidate& address,
- CandidateOrigin origin);
- virtual int SetOption(rtc::Socket::Option opt, int value);
- virtual int GetOption(rtc::Socket::Option opt, int* value);
- virtual int GetError();
+ void PrepareAddress() override;
+ Connection* CreateConnection(const Candidate& address,
+ CandidateOrigin origin) override;
+ int SetOption(rtc::Socket::Option opt, int value) override;
+ int GetOption(rtc::Socket::Option opt, int* value) override;
+ int GetError() override;
+ bool SupportsProtocol(const std::string& protocol) const override {
+ // Relay port may create both TCP and UDP connections.
+ return true;
+ }
const ProtocolAddress * ServerAddress(size_t index) const;
bool IsReady() { return ready_; }
@@ -81,10 +85,11 @@ class RelayPort : public Port {
void SetReady();
- virtual int SendTo(const void* data, size_t size,
- const rtc::SocketAddress& addr,
- const rtc::PacketOptions& options,
- bool payload);
+ int SendTo(const void* data,
+ size_t size,
+ const rtc::SocketAddress& addr,
+ const rtc::PacketOptions& options,
+ bool payload) override;
// Dispatches the given packet to the port or connection as appropriate.
void OnReadPacket(const char* data, size_t size,
@@ -92,6 +97,11 @@ class RelayPort : public Port {
ProtocolType proto,
const rtc::PacketTime& packet_time);
+ // The OnSentPacket callback is left empty here since they are handled by
+ // RelayEntry.
+ void OnSentPacket(rtc::AsyncPacketSocket* socket,
+ const rtc::SentPacket& sent_packet) override {}
+
private:
friend class RelayEntry;
diff --git a/webrtc/p2p/base/stun_unittest.cc b/webrtc/p2p/base/stun_unittest.cc
index cd4f7e1cbb..12492570c4 100644
--- a/webrtc/p2p/base/stun_unittest.cc
+++ b/webrtc/p2p/base/stun_unittest.cc
@@ -11,6 +11,7 @@
#include <string>
#include "webrtc/p2p/base/stun.h"
+#include "webrtc/base/arraysize.h"
#include "webrtc/base/bytebuffer.h"
#include "webrtc/base/gunit.h"
#include "webrtc/base/logging.h"
@@ -515,11 +516,11 @@ TEST_F(StunTest, MessageTypes) {
STUN_BINDING_REQUEST, STUN_BINDING_INDICATION,
STUN_BINDING_RESPONSE, STUN_BINDING_ERROR_RESPONSE
};
- for (int i = 0; i < ARRAY_SIZE(types); ++i) {
- EXPECT_EQ(i == 0, IsStunRequestType(types[i]));
- EXPECT_EQ(i == 1, IsStunIndicationType(types[i]));
- EXPECT_EQ(i == 2, IsStunSuccessResponseType(types[i]));
- EXPECT_EQ(i == 3, IsStunErrorResponseType(types[i]));
+ for (size_t i = 0; i < arraysize(types); ++i) {
+ EXPECT_EQ(i == 0U, IsStunRequestType(types[i]));
+ EXPECT_EQ(i == 1U, IsStunIndicationType(types[i]));
+ EXPECT_EQ(i == 2U, IsStunSuccessResponseType(types[i]));
+ EXPECT_EQ(i == 3U, IsStunErrorResponseType(types[i]));
EXPECT_EQ(1, types[i] & 0xFEEF);
}
}
diff --git a/webrtc/p2p/base/stunport.cc b/webrtc/p2p/base/stunport.cc
index 1598fe43ce..8f37dd5218 100644
--- a/webrtc/p2p/base/stunport.cc
+++ b/webrtc/p2p/base/stunport.cc
@@ -13,6 +13,7 @@
#include "webrtc/p2p/base/common.h"
#include "webrtc/p2p/base/portallocator.h"
#include "webrtc/p2p/base/stun.h"
+#include "webrtc/base/checks.h"
#include "webrtc/base/common.h"
#include "webrtc/base/helpers.h"
#include "webrtc/base/ipaddress.h"
@@ -23,15 +24,19 @@ namespace cricket {
// TODO: Move these to a common place (used in relayport too)
const int KEEPALIVE_DELAY = 10 * 1000; // 10 seconds - sort timeouts
-const int RETRY_DELAY = 50; // 50ms, from ICE spec
const int RETRY_TIMEOUT = 50 * 1000; // ICE says 50 secs
+// Stop sending STUN binding requests after this amount of time
+// (in milliseconds) because the connection binding requests should keep
+// the NAT binding alive.
+const int KEEP_ALIVE_TIMEOUT = 2 * 60 * 1000; // 2 minutes
// Handles a binding request sent to the STUN server.
class StunBindingRequest : public StunRequest {
public:
- StunBindingRequest(UDPPort* port, bool keep_alive,
- const rtc::SocketAddress& addr)
- : port_(port), keep_alive_(keep_alive), server_addr_(addr) {
+ StunBindingRequest(UDPPort* port,
+ const rtc::SocketAddress& addr,
+ uint32_t deadline)
+ : port_(port), server_addr_(addr), deadline_(deadline) {
start_time_ = rtc::Time();
}
@@ -58,10 +63,10 @@ class StunBindingRequest : public StunRequest {
}
// We will do a keep-alive regardless of whether this request succeeds.
- // This should have almost no impact on network usage.
- if (keep_alive_) {
+ // It will be stopped after |deadline_| mostly to conserve the battery life.
+ if (rtc::Time() <= deadline_) {
port_->requests_.SendDelayed(
- new StunBindingRequest(port_, true, server_addr_),
+ new StunBindingRequest(port_, server_addr_, deadline_),
port_->stun_keepalive_delay());
}
}
@@ -79,10 +84,10 @@ class StunBindingRequest : public StunRequest {
port_->OnStunBindingOrResolveRequestFailed(server_addr_);
- if (keep_alive_
- && (rtc::TimeSince(start_time_) <= RETRY_TIMEOUT)) {
+ uint32_t now = rtc::Time();
+ if (now <= deadline_ && rtc::TimeDiff(now, start_time_) <= RETRY_TIMEOUT) {
port_->requests_.SendDelayed(
- new StunBindingRequest(port_, true, server_addr_),
+ new StunBindingRequest(port_, server_addr_, deadline_),
port_->stun_keepalive_delay());
}
}
@@ -93,20 +98,13 @@ class StunBindingRequest : public StunRequest {
<< " (" << port_->Network()->name() << ")";
port_->OnStunBindingOrResolveRequestFailed(server_addr_);
-
- if (keep_alive_
- && (rtc::TimeSince(start_time_) <= RETRY_TIMEOUT)) {
- port_->requests_.SendDelayed(
- new StunBindingRequest(port_, true, server_addr_),
- RETRY_DELAY);
- }
}
private:
UDPPort* port_;
- bool keep_alive_;
const rtc::SocketAddress server_addr_;
uint32_t start_time_;
+ uint32_t deadline_;
};
UDPPort::AddressResolver::AddressResolver(
@@ -116,7 +114,10 @@ UDPPort::AddressResolver::AddressResolver(
UDPPort::AddressResolver::~AddressResolver() {
for (ResolverMap::iterator it = resolvers_.begin();
it != resolvers_.end(); ++it) {
- it->second->Destroy(true);
+ // TODO(guoweis): Change to asynchronous DNS resolution to prevent the hang
+ // when passing true to the Destroy() which is a safer way to avoid the code
+ // unloaded before the thread exits. Please see webrtc bug 5139.
+ it->second->Destroy(false);
}
}
@@ -166,15 +167,19 @@ UDPPort::UDPPort(rtc::Thread* thread,
const std::string& username,
const std::string& password,
const std::string& origin,
- bool emit_localhost_for_anyaddress)
- : Port(thread, factory, network, socket->GetLocalAddress().ipaddr(),
- username, password),
+ bool emit_local_for_anyaddress)
+ : Port(thread,
+ factory,
+ network,
+ socket->GetLocalAddress().ipaddr(),
+ username,
+ password),
requests_(thread),
socket_(socket),
error_(0),
ready_(false),
stun_keepalive_delay_(KEEPALIVE_DELAY),
- emit_localhost_for_anyaddress_(emit_localhost_for_anyaddress) {
+ emit_local_for_anyaddress_(emit_local_for_anyaddress) {
requests_.set_origin(origin);
}
@@ -187,7 +192,7 @@ UDPPort::UDPPort(rtc::Thread* thread,
const std::string& username,
const std::string& password,
const std::string& origin,
- bool emit_localhost_for_anyaddress)
+ bool emit_local_for_anyaddress)
: Port(thread,
LOCAL_PORT_TYPE,
factory,
@@ -202,7 +207,7 @@ UDPPort::UDPPort(rtc::Thread* thread,
error_(0),
ready_(false),
stun_keepalive_delay_(KEEPALIVE_DELAY),
- emit_localhost_for_anyaddress_(emit_localhost_for_anyaddress) {
+ emit_local_for_anyaddress_(emit_local_for_anyaddress) {
requests_.set_origin(origin);
}
@@ -248,9 +253,10 @@ void UDPPort::MaybePrepareStunCandidate() {
}
Connection* UDPPort::CreateConnection(const Candidate& address,
- CandidateOrigin origin) {
- if (address.protocol() != "udp")
+ CandidateOrigin origin) {
+ if (!SupportsProtocol(address.protocol())) {
return NULL;
+ }
if (!IsCompatibleAddress(address.address())) {
return NULL;
@@ -294,25 +300,27 @@ int UDPPort::GetError() {
void UDPPort::OnLocalAddressReady(rtc::AsyncPacketSocket* socket,
const rtc::SocketAddress& address) {
// When adapter enumeration is disabled and binding to the any address, the
- // loopback address will be issued as a candidate instead if
- // |emit_localhost_for_anyaddress| is true. This is to allow connectivity on
- // demo pages without STUN/TURN to work.
+ // default local address will be issued as a candidate instead if
+ // |emit_local_for_anyaddress| is true. This is to allow connectivity for
+ // applications which absolutely requires a HOST candidate.
rtc::SocketAddress addr = address;
- if (addr.IsAnyIP() && emit_localhost_for_anyaddress_) {
- addr.SetIP(rtc::GetLoopbackIP(addr.family()));
- }
+
+ // If MaybeSetDefaultLocalAddress fails, we keep the "any" IP so that at
+ // least the port is listening.
+ MaybeSetDefaultLocalAddress(&addr);
AddAddress(addr, addr, rtc::SocketAddress(), UDP_PROTOCOL_NAME, "", "",
LOCAL_PORT_TYPE, ICE_TYPE_PREFERENCE_HOST, 0, false);
MaybePrepareStunCandidate();
}
-void UDPPort::OnReadPacket(
- rtc::AsyncPacketSocket* socket, const char* data, size_t size,
- const rtc::SocketAddress& remote_addr,
- const rtc::PacketTime& packet_time) {
+void UDPPort::OnReadPacket(rtc::AsyncPacketSocket* socket,
+ const char* data,
+ size_t size,
+ const rtc::SocketAddress& remote_addr,
+ const rtc::PacketTime& packet_time) {
ASSERT(socket == socket_);
- ASSERT(!remote_addr.IsUnresolved());
+ ASSERT(!remote_addr.IsUnresolvedIP());
// Look for a response from the STUN server.
// Even if the response doesn't match one of our outstanding requests, we
@@ -332,7 +340,7 @@ void UDPPort::OnReadPacket(
void UDPPort::OnSentPacket(rtc::AsyncPacketSocket* socket,
const rtc::SentPacket& sent_packet) {
- Port::OnSentPacket(sent_packet);
+ PortInterface::SignalSentPacket(sent_packet);
}
void UDPPort::OnReadyToSend(rtc::AsyncPacketSocket* socket) {
@@ -341,7 +349,7 @@ void UDPPort::OnReadyToSend(rtc::AsyncPacketSocket* socket) {
void UDPPort::SendStunBindingRequests() {
// We will keep pinging the stun server to make sure our NAT pin-hole stays
- // open during the call.
+ // open until the deadline (specified in SendStunBindingRequest).
ASSERT(requests_.empty());
for (ServerAddresses::const_iterator it = server_addresses_.begin();
@@ -356,6 +364,8 @@ void UDPPort::ResolveStunAddress(const rtc::SocketAddress& stun_addr) {
resolver_->SignalDone.connect(this, &UDPPort::OnResolveResult);
}
+ LOG_J(LS_INFO, this) << "Starting STUN host lookup for "
+ << stun_addr.ToSensitiveString();
resolver_->Resolve(stun_addr);
}
@@ -380,15 +390,15 @@ void UDPPort::OnResolveResult(const rtc::SocketAddress& input,
}
}
-void UDPPort::SendStunBindingRequest(
- const rtc::SocketAddress& stun_addr) {
- if (stun_addr.IsUnresolved()) {
+void UDPPort::SendStunBindingRequest(const rtc::SocketAddress& stun_addr) {
+ if (stun_addr.IsUnresolvedIP()) {
ResolveStunAddress(stun_addr);
} else if (socket_->GetState() == rtc::AsyncPacketSocket::STATE_BOUND) {
// Check if |server_addr_| is compatible with the port's ip.
if (IsCompatibleAddress(stun_addr)) {
- requests_.Send(new StunBindingRequest(this, true, stun_addr));
+ requests_.Send(new StunBindingRequest(this, stun_addr,
+ rtc::Time() + KEEP_ALIVE_TIMEOUT));
} else {
// Since we can't send stun messages to the server, we should mark this
// port ready.
@@ -398,6 +408,23 @@ void UDPPort::SendStunBindingRequest(
}
}
+bool UDPPort::MaybeSetDefaultLocalAddress(rtc::SocketAddress* addr) const {
+ if (!addr->IsAnyIP() || !emit_local_for_anyaddress_ ||
+ !Network()->default_local_address_provider()) {
+ return true;
+ }
+ rtc::IPAddress default_address;
+ bool result =
+ Network()->default_local_address_provider()->GetDefaultLocalAddress(
+ addr->family(), &default_address);
+ if (!result || default_address.IsNil()) {
+ return false;
+ }
+
+ addr->SetIP(default_address);
+ return true;
+}
+
void UDPPort::OnStunBindingRequestSucceeded(
const rtc::SocketAddress& stun_server_addr,
const rtc::SocketAddress& stun_reflected_addr) {
@@ -415,7 +442,9 @@ void UDPPort::OnStunBindingRequestSucceeded(
!HasCandidateWithAddress(stun_reflected_addr)) {
rtc::SocketAddress related_address = socket_->GetLocalAddress();
- if (!(candidate_filter() & CF_HOST)) {
+ // If we can't stamp the related address correctly, empty it to avoid leak.
+ if (!MaybeSetDefaultLocalAddress(&related_address) ||
+ !(candidate_filter() & CF_HOST)) {
// If candidate filter doesn't have CF_HOST specified, empty raddr to
// avoid local address leakage.
related_address = rtc::EmptySocketAddressWithFamily(
diff --git a/webrtc/p2p/base/stunport.h b/webrtc/p2p/base/stunport.h
index 62b23cf074..ecf61a782d 100644
--- a/webrtc/p2p/base/stunport.h
+++ b/webrtc/p2p/base/stunport.h
@@ -35,10 +35,9 @@ class UDPPort : public Port {
const std::string& username,
const std::string& password,
const std::string& origin,
- bool emit_localhost_for_anyaddress) {
- UDPPort* port = new UDPPort(thread, factory, network, socket,
- username, password, origin,
- emit_localhost_for_anyaddress);
+ bool emit_local_for_anyaddress) {
+ UDPPort* port = new UDPPort(thread, factory, network, socket, username,
+ password, origin, emit_local_for_anyaddress);
if (!port->Init()) {
delete port;
port = NULL;
@@ -55,11 +54,10 @@ class UDPPort : public Port {
const std::string& username,
const std::string& password,
const std::string& origin,
- bool emit_localhost_for_anyaddress) {
- UDPPort* port = new UDPPort(thread, factory, network,
- ip, min_port, max_port,
- username, password, origin,
- emit_localhost_for_anyaddress);
+ bool emit_local_for_anyaddress) {
+ UDPPort* port =
+ new UDPPort(thread, factory, network, ip, min_port, max_port, username,
+ password, origin, emit_local_for_anyaddress);
if (!port->Init()) {
delete port;
port = NULL;
@@ -97,6 +95,9 @@ class UDPPort : public Port {
OnReadPacket(socket, data, size, remote_addr, packet_time);
return true;
}
+ virtual bool SupportsProtocol(const std::string& protocol) const {
+ return protocol == UDP_PROTOCOL_NAME;
+ }
void set_stun_keepalive_delay(int delay) {
stun_keepalive_delay_ = delay;
@@ -115,7 +116,7 @@ class UDPPort : public Port {
const std::string& username,
const std::string& password,
const std::string& origin,
- bool emit_localhost_for_anyaddress);
+ bool emit_local_for_anyaddress);
UDPPort(rtc::Thread* thread,
rtc::PacketSocketFactory* factory,
@@ -124,7 +125,7 @@ class UDPPort : public Port {
const std::string& username,
const std::string& password,
const std::string& origin,
- bool emit_localhost_for_anyaddress);
+ bool emit_local_for_anyaddress);
bool Init();
@@ -150,6 +151,12 @@ class UDPPort : public Port {
void SendStunBindingRequests();
+ // Helper function which will set |addr|'s IP to the default local address if
+ // |addr| is the "any" address and |emit_local_for_anyaddress_| is true. When
+ // returning false, it indicates that the operation has failed and the
+ // address shouldn't be used by any candidate.
+ bool MaybeSetDefaultLocalAddress(rtc::SocketAddress* addr) const;
+
private:
// A helper class which can be called repeatedly to resolve multiple
// addresses, as opposed to rtc::AsyncResolverInterface, which can only
@@ -211,8 +218,9 @@ class UDPPort : public Port {
bool ready_;
int stun_keepalive_delay_;
- // This is true when PORTALLOCATOR_ENABLE_LOCALHOST_CANDIDATE is specified.
- bool emit_localhost_for_anyaddress_;
+ // This is true by default and false when
+ // PORTALLOCATOR_DISABLE_DEFAULT_LOCAL_CANDIDATE is specified.
+ bool emit_local_for_anyaddress_;
friend class StunBindingRequest;
};
diff --git a/webrtc/p2p/base/stunrequest.cc b/webrtc/p2p/base/stunrequest.cc
index df5614d3cc..ce0364e8db 100644
--- a/webrtc/p2p/base/stunrequest.cc
+++ b/webrtc/p2p/base/stunrequest.cc
@@ -53,6 +53,16 @@ void StunRequestManager::SendDelayed(StunRequest* request, int delay) {
}
}
+void StunRequestManager::Flush(int msg_type) {
+ for (const auto kv : requests_) {
+ StunRequest* request = kv.second;
+ if (msg_type == kAllRequests || msg_type == request->type()) {
+ thread_->Clear(request, MSG_STUN_SEND);
+ thread_->Send(request, MSG_STUN_SEND, NULL);
+ }
+ }
+}
+
void StunRequestManager::Remove(StunRequest* request) {
ASSERT(request->manager() == this);
RequestMap::iterator iter = requests_.find(request->id());
diff --git a/webrtc/p2p/base/stunrequest.h b/webrtc/p2p/base/stunrequest.h
index 267b4a1959..44c1ebff56 100644
--- a/webrtc/p2p/base/stunrequest.h
+++ b/webrtc/p2p/base/stunrequest.h
@@ -21,6 +21,8 @@ namespace cricket {
class StunRequest;
+const int kAllRequests = 0;
+
// Manages a set of STUN requests, sending and resending until we receive a
// response or determine that the request has timed out.
class StunRequestManager {
@@ -32,6 +34,11 @@ class StunRequestManager {
void Send(StunRequest* request);
void SendDelayed(StunRequest* request, int delay);
+ // If |msg_type| is kAllRequests, sends all pending requests right away.
+ // Otherwise, sends those that have a matching type right away.
+ // Only for testing.
+ void Flush(int msg_type);
+
// Removes a stun request that was added previously. This will happen
// automatically when a request succeeds, fails, or times out.
void Remove(StunRequest* request);
diff --git a/webrtc/p2p/base/tcpport.cc b/webrtc/p2p/base/tcpport.cc
index 2590d0aca8..cd3c9192e4 100644
--- a/webrtc/p2p/base/tcpport.cc
+++ b/webrtc/p2p/base/tcpport.cc
@@ -125,9 +125,7 @@ TCPPort::~TCPPort() {
Connection* TCPPort::CreateConnection(const Candidate& address,
CandidateOrigin origin) {
- // We only support TCP protocols
- if ((address.protocol() != TCP_PROTOCOL_NAME) &&
- (address.protocol() != SSLTCP_PROTOCOL_NAME)) {
+ if (!SupportsProtocol(address.protocol())) {
return NULL;
}
@@ -184,10 +182,13 @@ void TCPPort::PrepareAddress() {
} else {
LOG_J(LS_INFO, this) << "Not listening due to firewall restrictions.";
// Note: We still add the address, since otherwise the remote side won't
- // recognize our incoming TCP connections.
- AddAddress(rtc::SocketAddress(ip(), 0), rtc::SocketAddress(ip(), 0),
- rtc::SocketAddress(), TCP_PROTOCOL_NAME, "", TCPTYPE_ACTIVE_STR,
- LOCAL_PORT_TYPE, ICE_TYPE_PREFERENCE_HOST_TCP, 0, true);
+ // recognize our incoming TCP connections. According to
+ // https://tools.ietf.org/html/rfc6544#section-4.5, for active candidate,
+ // the port must be set to the discard port, i.e. 9.
+ AddAddress(rtc::SocketAddress(ip(), DISCARD_PORT),
+ rtc::SocketAddress(ip(), 0), rtc::SocketAddress(),
+ TCP_PROTOCOL_NAME, "", TCPTYPE_ACTIVE_STR, LOCAL_PORT_TYPE,
+ ICE_TYPE_PREFERENCE_HOST_TCP, 0, true);
}
}
@@ -257,6 +258,7 @@ void TCPPort::OnNewConnection(rtc::AsyncPacketSocket* socket,
incoming.socket = new_socket;
incoming.socket->SignalReadPacket.connect(this, &TCPPort::OnReadPacket);
incoming.socket->SignalReadyToSend.connect(this, &TCPPort::OnReadyToSend);
+ incoming.socket->SignalSentPacket.connect(this, &TCPPort::OnSentPacket);
LOG_J(LS_VERBOSE, this) << "Accepted connection from "
<< incoming.addr.ToSensitiveString();
@@ -285,6 +287,11 @@ void TCPPort::OnReadPacket(rtc::AsyncPacketSocket* socket,
Port::OnReadPacket(data, size, remote_addr, PROTO_TCP);
}
+void TCPPort::OnSentPacket(rtc::AsyncPacketSocket* socket,
+ const rtc::SentPacket& sent_packet) {
+ PortInterface::SignalSentPacket(sent_packet);
+}
+
void TCPPort::OnReadyToSend(rtc::AsyncPacketSocket* socket) {
Port::OnReadyToSend();
}
diff --git a/webrtc/p2p/base/tcpport.h b/webrtc/p2p/base/tcpport.h
index a64c5eeab9..cfc6245601 100644
--- a/webrtc/p2p/base/tcpport.h
+++ b/webrtc/p2p/base/tcpport.h
@@ -45,16 +45,19 @@ class TCPPort : public Port {
}
return port;
}
- virtual ~TCPPort();
+ ~TCPPort() override;
- virtual Connection* CreateConnection(const Candidate& address,
- CandidateOrigin origin);
+ Connection* CreateConnection(const Candidate& address,
+ CandidateOrigin origin) override;
- virtual void PrepareAddress();
+ void PrepareAddress() override;
- virtual int GetOption(rtc::Socket::Option opt, int* value);
- virtual int SetOption(rtc::Socket::Option opt, int value);
- virtual int GetError();
+ int GetOption(rtc::Socket::Option opt, int* value) override;
+ int SetOption(rtc::Socket::Option opt, int value) override;
+ int GetError() override;
+ bool SupportsProtocol(const std::string& protocol) const override {
+ return protocol == TCP_PROTOCOL_NAME || protocol == SSLTCP_PROTOCOL_NAME;
+ }
protected:
TCPPort(rtc::Thread* thread,
@@ -69,10 +72,11 @@ class TCPPort : public Port {
bool Init();
// Handles sending using the local TCP socket.
- virtual int SendTo(const void* data, size_t size,
- const rtc::SocketAddress& addr,
- const rtc::PacketOptions& options,
- bool payload);
+ int SendTo(const void* data,
+ size_t size,
+ const rtc::SocketAddress& addr,
+ const rtc::PacketOptions& options,
+ bool payload) override;
// Accepts incoming TCP connection.
void OnNewConnection(rtc::AsyncPacketSocket* socket,
@@ -93,6 +97,9 @@ class TCPPort : public Port {
const rtc::SocketAddress& remote_addr,
const rtc::PacketTime& packet_time);
+ void OnSentPacket(rtc::AsyncPacketSocket* socket,
+ const rtc::SentPacket& sent_packet) override;
+
void OnReadyToSend(rtc::AsyncPacketSocket* socket);
void OnAddressReady(rtc::AsyncPacketSocket* socket,
@@ -113,15 +120,16 @@ class TCPConnection : public Connection {
// Connection is outgoing unless socket is specified
TCPConnection(TCPPort* port, const Candidate& candidate,
rtc::AsyncPacketSocket* socket = 0);
- virtual ~TCPConnection();
+ ~TCPConnection() override;
- virtual int Send(const void* data, size_t size,
- const rtc::PacketOptions& options);
- virtual int GetError();
+ int Send(const void* data,
+ size_t size,
+ const rtc::PacketOptions& options) override;
+ int GetError() override;
rtc::AsyncPacketSocket* socket() { return socket_.get(); }
- void OnMessage(rtc::Message* pmsg);
+ void OnMessage(rtc::Message* pmsg) override;
// Allow test cases to overwrite the default timeout period.
int reconnection_timeout() const { return reconnection_timeout_; }
@@ -136,8 +144,8 @@ class TCPConnection : public Connection {
// Set waiting_for_stun_binding_complete_ to false to allow data packets in
// addition to what Port::OnConnectionRequestResponse does.
- virtual void OnConnectionRequestResponse(ConnectionRequest* req,
- StunMessage* response);
+ void OnConnectionRequestResponse(ConnectionRequest* req,
+ StunMessage* response) override;
private:
// Helper function to handle the case when Ping or Send fails with error
diff --git a/webrtc/p2p/base/transport.cc b/webrtc/p2p/base/transport.cc
index 2328e4587c..eff10aa0a9 100644
--- a/webrtc/p2p/base/transport.cc
+++ b/webrtc/p2p/base/transport.cc
@@ -305,8 +305,8 @@ bool Transport::GetStats(TransportStats* stats) {
TransportChannelImpl* channel = kv.second;
TransportChannelStats substats;
substats.component = channel->component();
- channel->GetSrtpCryptoSuite(&substats.srtp_cipher);
- channel->GetSslCipherSuite(&substats.ssl_cipher);
+ channel->GetSrtpCryptoSuite(&substats.srtp_crypto_suite);
+ channel->GetSslCipherSuite(&substats.ssl_cipher_suite);
if (!channel->GetStats(&substats.connection_infos)) {
return false;
}
diff --git a/webrtc/p2p/base/transport.h b/webrtc/p2p/base/transport.h
index 955eb42098..6b4b37d4c5 100644
--- a/webrtc/p2p/base/transport.h
+++ b/webrtc/p2p/base/transport.h
@@ -123,8 +123,8 @@ typedef std::vector<ConnectionInfo> ConnectionInfos;
struct TransportChannelStats {
int component = 0;
ConnectionInfos connection_infos;
- std::string srtp_cipher;
- int ssl_cipher = 0;
+ int srtp_crypto_suite = rtc::SRTP_INVALID_CRYPTO_SUITE;
+ int ssl_cipher_suite = rtc::TLS_NULL_WITH_NULL_NULL;
};
// Information about all the channels of a transport.
@@ -140,7 +140,11 @@ struct TransportStats {
// Information about ICE configuration.
struct IceConfig {
// The ICE connection receiving timeout value.
+ // TODO(honghaiz): Remove suffix _ms to be consistent.
int receiving_timeout_ms = -1;
+ // Time interval in milliseconds to ping a backup connection when the ICE
+ // channel is strongly connected.
+ int backup_connection_ping_interval = -1;
// If true, the most recent port allocator session will keep on running.
bool gather_continually = false;
};
diff --git a/webrtc/p2p/base/transportchannel.cc b/webrtc/p2p/base/transportchannel.cc
index 63d84494e5..6cbe2b7583 100644
--- a/webrtc/p2p/base/transportchannel.cc
+++ b/webrtc/p2p/base/transportchannel.cc
@@ -51,7 +51,20 @@ void TransportChannel::set_dtls_state(DtlsTransportState state) {
LOG_J(LS_VERBOSE, this) << "set_dtls_state from:" << dtls_state_ << " to "
<< state;
dtls_state_ = state;
- SignalDtlsState(this);
+ SignalDtlsState(this, state);
+}
+
+bool TransportChannel::SetSrtpCryptoSuites(const std::vector<int>& ciphers) {
+ return false;
+}
+
+// TODO(guoweis): Remove this function once everything is moved away.
+bool TransportChannel::SetSrtpCiphers(const std::vector<std::string>& ciphers) {
+ std::vector<int> crypto_suites;
+ for (const auto cipher : ciphers) {
+ crypto_suites.push_back(rtc::SrtpCryptoSuiteFromName(cipher));
+ }
+ return SetSrtpCryptoSuites(crypto_suites);
}
} // namespace cricket
diff --git a/webrtc/p2p/base/transportchannel.h b/webrtc/p2p/base/transportchannel.h
index 767a5f68bf..b91af139b7 100644
--- a/webrtc/p2p/base/transportchannel.h
+++ b/webrtc/p2p/base/transportchannel.h
@@ -79,8 +79,9 @@ class TransportChannel : public sigslot::has_slots<> {
// Emitted when the TransportChannel's ability to send has changed.
sigslot::signal1<TransportChannel*> SignalReadyToSend;
sigslot::signal1<TransportChannel*> SignalReceivingState;
- // Emitted when the DtlsTransportState has changed.
- sigslot::signal1<TransportChannel*> SignalDtlsState;
+ // Emitted whenever DTLS-SRTP is setup which will require setting up a new
+ // SRTP context.
+ sigslot::signal2<TransportChannel*, DtlsTransportState> SignalDtlsState;
// Attempts to send the given packet. The return value is < 0 on failure.
// TODO: Remove the default argument once channel code is updated.
@@ -107,14 +108,17 @@ class TransportChannel : public sigslot::has_slots<> {
// Default implementation.
virtual bool GetSslRole(rtc::SSLRole* role) const = 0;
- // Sets up the ciphers to use for DTLS-SRTP.
- virtual bool SetSrtpCiphers(const std::vector<std::string>& ciphers) = 0;
+ // Sets up the ciphers to use for DTLS-SRTP. TODO(guoweis): Make this pure
+ // virtual once all dependencies have implementation.
+ virtual bool SetSrtpCryptoSuites(const std::vector<int>& ciphers);
+
+ // Keep the original one for backward compatibility until all dependencies
+ // move away. TODO(guoweis): Remove this function.
+ virtual bool SetSrtpCiphers(const std::vector<std::string>& ciphers);
// Finds out which DTLS-SRTP cipher was negotiated.
// TODO(guoweis): Remove this once all dependencies implement this.
- virtual bool GetSrtpCryptoSuite(std::string* cipher) {
- return false;
- }
+ virtual bool GetSrtpCryptoSuite(int* cipher) { return false; }
// Finds out which DTLS cipher was negotiated.
// TODO(guoweis): Remove this once all dependencies implement this.
@@ -154,9 +158,6 @@ class TransportChannel : public sigslot::has_slots<> {
std::string ToString() const;
protected:
- // TODO(honghaiz): Remove this once chromium's unit tests no longer call it.
- void set_readable(bool readable) { set_receiving(readable); }
-
// Sets the writable state, signaling if necessary.
void set_writable(bool writable);
diff --git a/webrtc/p2p/base/transportcontroller.cc b/webrtc/p2p/base/transportcontroller.cc
index 22b827a1a5..053388eeb8 100644
--- a/webrtc/p2p/base/transportcontroller.cc
+++ b/webrtc/p2p/base/transportcontroller.cc
@@ -66,9 +66,10 @@ void TransportController::SetIceRole(IceRole ice_role) {
rtc::Bind(&TransportController::SetIceRole_w, this, ice_role));
}
-bool TransportController::GetSslRole(rtc::SSLRole* role) {
- return worker_thread_->Invoke<bool>(
- rtc::Bind(&TransportController::GetSslRole_w, this, role));
+bool TransportController::GetSslRole(const std::string& transport_name,
+ rtc::SSLRole* role) {
+ return worker_thread_->Invoke<bool>(rtc::Bind(
+ &TransportController::GetSslRole_w, this, transport_name, role));
}
bool TransportController::SetLocalCertificate(
@@ -343,13 +344,16 @@ void TransportController::SetIceRole_w(IceRole ice_role) {
}
}
-bool TransportController::GetSslRole_w(rtc::SSLRole* role) {
+bool TransportController::GetSslRole_w(const std::string& transport_name,
+ rtc::SSLRole* role) {
RTC_DCHECK(worker_thread()->IsCurrent());
- if (transports_.empty()) {
+ Transport* t = GetTransport_w(transport_name);
+ if (!t) {
return false;
}
- return transports_.begin()->second->GetSslRole(role);
+
+ return t->GetSslRole(role);
}
bool TransportController::SetLocalCertificate_w(
diff --git a/webrtc/p2p/base/transportcontroller.h b/webrtc/p2p/base/transportcontroller.h
index 8d57b460e8..450e6b391f 100644
--- a/webrtc/p2p/base/transportcontroller.h
+++ b/webrtc/p2p/base/transportcontroller.h
@@ -48,11 +48,7 @@ class TransportController : public sigslot::has_slots<>,
void SetIceConfig(const IceConfig& config);
void SetIceRole(IceRole ice_role);
- // TODO(deadbeef) - Return role of each transport, as role may differ from
- // one another.
- // In current implementaion we just return the role of the first transport
- // alphabetically.
- bool GetSslRole(rtc::SSLRole* role);
+ bool GetSslRole(const std::string& transport_name, rtc::SSLRole* role);
// Specifies the identity to use in this session.
// Can only be called once.
@@ -160,7 +156,7 @@ class TransportController : public sigslot::has_slots<>,
bool SetSslMaxProtocolVersion_w(rtc::SSLProtocolVersion version);
void SetIceConfig_w(const IceConfig& config);
void SetIceRole_w(IceRole ice_role);
- bool GetSslRole_w(rtc::SSLRole* role);
+ bool GetSslRole_w(const std::string& transport_name, rtc::SSLRole* role);
bool SetLocalCertificate_w(
const rtc::scoped_refptr<rtc::RTCCertificate>& certificate);
bool GetLocalCertificate_w(
@@ -202,7 +198,7 @@ class TransportController : public sigslot::has_slots<>,
std::vector<RefCountedChannel> channels_;
PortAllocator* const port_allocator_ = nullptr;
- rtc::SSLProtocolVersion ssl_max_version_ = rtc::SSL_PROTOCOL_DTLS_10;
+ rtc::SSLProtocolVersion ssl_max_version_ = rtc::SSL_PROTOCOL_DTLS_12;
// Aggregate state for TransportChannelImpls.
IceConnectionState connection_state_ = kIceConnectionConnecting;
diff --git a/webrtc/p2p/base/transportcontroller_unittest.cc b/webrtc/p2p/base/transportcontroller_unittest.cc
index 23e4dc8067..6ff158e8fc 100644
--- a/webrtc/p2p/base/transportcontroller_unittest.cc
+++ b/webrtc/p2p/base/transportcontroller_unittest.cc
@@ -262,21 +262,18 @@ TEST_F(TransportControllerTest, TestGetSslRole) {
ASSERT_NE(nullptr, channel);
ASSERT_TRUE(channel->SetSslRole(rtc::SSL_CLIENT));
rtc::SSLRole role;
- EXPECT_TRUE(transport_controller_->GetSslRole(&role));
+ EXPECT_FALSE(transport_controller_->GetSslRole("video", &role));
+ EXPECT_TRUE(transport_controller_->GetSslRole("audio", &role));
EXPECT_EQ(rtc::SSL_CLIENT, role);
}
TEST_F(TransportControllerTest, TestSetAndGetLocalCertificate) {
rtc::scoped_refptr<rtc::RTCCertificate> certificate1 =
- rtc::RTCCertificate::Create(
- rtc::scoped_ptr<rtc::SSLIdentity>(
- rtc::SSLIdentity::Generate("session1", rtc::KT_DEFAULT))
- .Pass());
+ rtc::RTCCertificate::Create(rtc::scoped_ptr<rtc::SSLIdentity>(
+ rtc::SSLIdentity::Generate("session1", rtc::KT_DEFAULT)));
rtc::scoped_refptr<rtc::RTCCertificate> certificate2 =
- rtc::RTCCertificate::Create(
- rtc::scoped_ptr<rtc::SSLIdentity>(
- rtc::SSLIdentity::Generate("session2", rtc::KT_DEFAULT))
- .Pass());
+ rtc::RTCCertificate::Create(rtc::scoped_ptr<rtc::SSLIdentity>(
+ rtc::SSLIdentity::Generate("session2", rtc::KT_DEFAULT)));
rtc::scoped_refptr<rtc::RTCCertificate> returned_certificate;
FakeTransportChannel* channel1 = CreateChannel("audio", 1);
diff --git a/webrtc/p2p/base/transportdescription.cc b/webrtc/p2p/base/transportdescription.cc
index 52033ec9c3..b8f14eaa98 100644
--- a/webrtc/p2p/base/transportdescription.cc
+++ b/webrtc/p2p/base/transportdescription.cc
@@ -10,7 +10,7 @@
#include "webrtc/p2p/base/transportdescription.h"
-#include "webrtc/base/basicdefs.h"
+#include "webrtc/base/arraysize.h"
#include "webrtc/base/stringutils.h"
#include "webrtc/p2p/base/constants.h"
@@ -24,7 +24,7 @@ bool StringToConnectionRole(const std::string& role_str, ConnectionRole* role) {
CONNECTIONROLE_HOLDCONN_STR
};
- for (size_t i = 0; i < ARRAY_SIZE(roles); ++i) {
+ for (size_t i = 0; i < arraysize(roles); ++i) {
if (_stricmp(roles[i], role_str.c_str()) == 0) {
*role = static_cast<ConnectionRole>(CONNECTIONROLE_ACTIVE + i);
return true;
diff --git a/webrtc/p2p/base/transportdescriptionfactory_unittest.cc b/webrtc/p2p/base/transportdescriptionfactory_unittest.cc
index e3992dfdd3..a52d9ed95a 100644
--- a/webrtc/p2p/base/transportdescriptionfactory_unittest.cc
+++ b/webrtc/p2p/base/transportdescriptionfactory_unittest.cc
@@ -26,11 +26,10 @@ using cricket::TransportOptions;
class TransportDescriptionFactoryTest : public testing::Test {
public:
TransportDescriptionFactoryTest()
- : cert1_(rtc::RTCCertificate::Create(scoped_ptr<rtc::SSLIdentity>(
- new rtc::FakeSSLIdentity("User1")).Pass())),
- cert2_(rtc::RTCCertificate::Create(scoped_ptr<rtc::SSLIdentity>(
- new rtc::FakeSSLIdentity("User2")).Pass())) {
- }
+ : cert1_(rtc::RTCCertificate::Create(
+ scoped_ptr<rtc::SSLIdentity>(new rtc::FakeSSLIdentity("User1")))),
+ cert2_(rtc::RTCCertificate::Create(
+ scoped_ptr<rtc::SSLIdentity>(new rtc::FakeSSLIdentity("User2")))) {}
void CheckDesc(const TransportDescription* desc,
const std::string& opt, const std::string& ice_ufrag,
diff --git a/webrtc/p2p/base/turnport.cc b/webrtc/p2p/base/turnport.cc
index 3fdcac5f31..5ed93dd1d8 100644
--- a/webrtc/p2p/base/turnport.cc
+++ b/webrtc/p2p/base/turnport.cc
@@ -38,6 +38,8 @@ static const size_t TURN_CHANNEL_HEADER_SIZE = 4U;
// STUN_ERROR_ALLOCATION_MISMATCH error per rfc5766.
static const size_t MAX_ALLOCATE_MISMATCH_RETRIES = 2;
+static const int TURN_SUCCESS_RESULT_CODE = 0;
+
inline bool IsTurnChannelData(uint16_t msg_type) {
return ((msg_type & 0xC000) == 0x4000); // MSB are 0b01
}
@@ -137,11 +139,19 @@ class TurnEntry : public sigslot::has_slots<> {
TurnPort* port() { return port_; }
int channel_id() const { return channel_id_; }
+ // For testing only.
+ void set_channel_id(int channel_id) { channel_id_ = channel_id; }
+
const rtc::SocketAddress& address() const { return ext_addr_; }
BindState state() const { return state_; }
+ uint32_t destruction_timestamp() { return destruction_timestamp_; }
+ void set_destruction_timestamp(uint32_t destruction_timestamp) {
+ destruction_timestamp_ = destruction_timestamp;
+ }
+
// Helper methods to send permission and channel bind requests.
- void SendCreatePermissionRequest();
+ void SendCreatePermissionRequest(int delay);
void SendChannelBindRequest(int delay);
// Sends a packet to the given destination address.
// This will wrap the packet in STUN if necessary.
@@ -150,8 +160,10 @@ class TurnEntry : public sigslot::has_slots<> {
void OnCreatePermissionSuccess();
void OnCreatePermissionError(StunMessage* response, int code);
+ void OnCreatePermissionTimeout();
void OnChannelBindSuccess();
void OnChannelBindError(StunMessage* response, int code);
+ void OnChannelBindTimeout();
// Signal sent when TurnEntry is destroyed.
sigslot::signal1<TurnEntry*> SignalDestroyed;
@@ -160,6 +172,11 @@ class TurnEntry : public sigslot::has_slots<> {
int channel_id_;
rtc::SocketAddress ext_addr_;
BindState state_;
+ // A non-zero value indicates that this entry is scheduled to be destroyed.
+ // It is also used as an ID of the event scheduling. When the destruction
+ // event actually fires, the TurnEntry will be destroyed only if the
+ // timestamp here matches the one in the firing event.
+ uint32_t destruction_timestamp_ = 0;
};
TurnPort::TurnPort(rtc::Thread* thread,
@@ -239,7 +256,7 @@ TurnPort::~TurnPort() {
}
while (!entries_.empty()) {
- DestroyEntry(entries_.front()->address());
+ DestroyEntry(entries_.front());
}
if (resolver_) {
resolver_->Destroy(false);
@@ -267,7 +284,7 @@ void TurnPort::PrepareAddress() {
server_address_.address.SetPort(TURN_DEFAULT_PORT);
}
- if (server_address_.address.IsUnresolved()) {
+ if (server_address_.address.IsUnresolvedIP()) {
ResolveTurnAddress(server_address_.address);
} else {
// If protocol family of server address doesn't match with local, return.
@@ -334,6 +351,8 @@ bool TurnPort::CreateTurnClientSocket() {
socket_->SignalReadyToSend.connect(this, &TurnPort::OnReadyToSend);
+ socket_->SignalSentPacket.connect(this, &TurnPort::OnSentPacket);
+
// TCP port is ready to send stun requests after the socket is connected,
// while UDP port is ready to do so once the socket is created.
if (server_address_.proto == PROTO_TCP) {
@@ -380,7 +399,7 @@ void TurnPort::OnSocketConnect(rtc::AsyncPacketSocket* socket) {
}
state_ = STATE_CONNECTED; // It is ready to send stun requests.
- if (server_address_.address.IsUnresolved()) {
+ if (server_address_.address.IsUnresolvedIP()) {
server_address_.address = socket_->GetRemoteAddress();
}
@@ -392,11 +411,7 @@ void TurnPort::OnSocketConnect(rtc::AsyncPacketSocket* socket) {
void TurnPort::OnSocketClose(rtc::AsyncPacketSocket* socket, int error) {
LOG_J(LS_WARNING, this) << "Connection with server failed, error=" << error;
ASSERT(socket == socket_);
- if (!ready()) {
- OnAllocateError();
- }
- request_manager_.Clear();
- state_ = STATE_DISCONNECTED;
+ Close();
}
void TurnPort::OnAllocateMismatch() {
@@ -425,7 +440,7 @@ void TurnPort::OnAllocateMismatch() {
Connection* TurnPort::CreateConnection(const Candidate& address,
CandidateOrigin origin) {
// TURN-UDP can only connect to UDP candidates.
- if (address.protocol() != UDP_PROTOCOL_NAME) {
+ if (!SupportsProtocol(address.protocol())) {
return NULL;
}
@@ -438,7 +453,7 @@ Connection* TurnPort::CreateConnection(const Candidate& address,
}
// Create an entry, if needed, so we can get our permissions set up correctly.
- CreateEntry(address.address());
+ CreateOrRefreshEntry(address.address());
// A TURN port will have two candiates, STUN and TURN. STUN may not
// present in all cases. If present stun candidate will be added first
@@ -454,6 +469,15 @@ Connection* TurnPort::CreateConnection(const Candidate& address,
return NULL;
}
+bool TurnPort::DestroyConnection(const rtc::SocketAddress& address) {
+ Connection* conn = GetConnection(address);
+ if (conn != nullptr) {
+ conn->Destroy();
+ return true;
+ }
+ return false;
+}
+
int TurnPort::SetOption(rtc::Socket::Option opt, int value) {
if (!socket_) {
// If socket is not created yet, these options will be applied during socket
@@ -560,6 +584,11 @@ void TurnPort::OnReadPacket(
}
}
+void TurnPort::OnSentPacket(rtc::AsyncPacketSocket* socket,
+ const rtc::SentPacket& sent_packet) {
+ PortInterface::SignalSentPacket(sent_packet);
+}
+
void TurnPort::OnReadyToSend(rtc::AsyncPacketSocket* socket) {
if (ready()) {
Port::OnReadyToSend();
@@ -602,6 +631,8 @@ void TurnPort::ResolveTurnAddress(const rtc::SocketAddress& address) {
if (resolver_)
return;
+ LOG_J(LS_INFO, this) << "Starting TURN host lookup for "
+ << address.ToSensitiveString();
resolver_ = socket_factory()->CreateAsyncResolver();
resolver_->SignalDone.connect(this, &TurnPort::OnResolveResult);
resolver_->Start(address);
@@ -686,35 +717,59 @@ void TurnPort::OnAllocateError() {
// We will send SignalPortError asynchronously as this can be sent during
// port initialization. This way it will not be blocking other port
// creation.
- thread()->Post(this, MSG_ERROR);
+ thread()->Post(this, MSG_ALLOCATE_ERROR);
}
-void TurnPort::OnMessage(rtc::Message* message) {
- if (message->message_id == MSG_ERROR) {
- SignalPortError(this);
- return;
- } else if (message->message_id == MSG_ALLOCATE_MISMATCH) {
- OnAllocateMismatch();
- return;
- } else if (message->message_id == MSG_TRY_ALTERNATE_SERVER) {
- if (server_address().proto == PROTO_UDP) {
- // Send another allocate request to alternate server, with the received
- // realm and nonce values.
- SendRequest(new TurnAllocateRequest(this), 0);
- } else {
- // Since it's TCP, we have to delete the connected socket and reconnect
- // with the alternate server. PrepareAddress will send stun binding once
- // the new socket is connected.
- ASSERT(server_address().proto == PROTO_TCP);
- ASSERT(!SharedSocket());
- delete socket_;
- socket_ = NULL;
- PrepareAddress();
- }
- return;
+void TurnPort::OnTurnRefreshError() {
+ // Need to Close the port asynchronously because otherwise, the refresh
+ // request may be deleted twice: once at the end of the message processing
+ // and the other in Close().
+ thread()->Post(this, MSG_REFRESH_ERROR);
+}
+
+void TurnPort::Close() {
+ if (!ready()) {
+ OnAllocateError();
+ }
+ request_manager_.Clear();
+ // Stop the port from creating new connections.
+ state_ = STATE_DISCONNECTED;
+ // Delete all existing connections; stop sending data.
+ for (auto kv : connections()) {
+ kv.second->Destroy();
}
+}
- Port::OnMessage(message);
+void TurnPort::OnMessage(rtc::Message* message) {
+ switch (message->message_id) {
+ case MSG_ALLOCATE_ERROR:
+ SignalPortError(this);
+ break;
+ case MSG_ALLOCATE_MISMATCH:
+ OnAllocateMismatch();
+ break;
+ case MSG_REFRESH_ERROR:
+ Close();
+ break;
+ case MSG_TRY_ALTERNATE_SERVER:
+ if (server_address().proto == PROTO_UDP) {
+ // Send another allocate request to alternate server, with the received
+ // realm and nonce values.
+ SendRequest(new TurnAllocateRequest(this), 0);
+ } else {
+ // Since it's TCP, we have to delete the connected socket and reconnect
+ // with the alternate server. PrepareAddress will send stun binding once
+ // the new socket is connected.
+ ASSERT(server_address().proto == PROTO_TCP);
+ ASSERT(!SharedSocket());
+ delete socket_;
+ socket_ = NULL;
+ PrepareAddress();
+ }
+ break;
+ default:
+ Port::OnMessage(message);
+ }
}
void TurnPort::OnAllocateRequestTimeout() {
@@ -898,24 +953,73 @@ TurnEntry* TurnPort::FindEntry(int channel_id) const {
return (it != entries_.end()) ? *it : NULL;
}
-TurnEntry* TurnPort::CreateEntry(const rtc::SocketAddress& addr) {
- ASSERT(FindEntry(addr) == NULL);
- TurnEntry* entry = new TurnEntry(this, next_channel_number_++, addr);
- entries_.push_back(entry);
- return entry;
+bool TurnPort::EntryExists(TurnEntry* e) {
+ auto it = std::find(entries_.begin(), entries_.end(), e);
+ return it != entries_.end();
}
-void TurnPort::DestroyEntry(const rtc::SocketAddress& addr) {
+void TurnPort::CreateOrRefreshEntry(const rtc::SocketAddress& addr) {
TurnEntry* entry = FindEntry(addr);
+ if (entry == nullptr) {
+ entry = new TurnEntry(this, next_channel_number_++, addr);
+ entries_.push_back(entry);
+ } else {
+ // The channel binding request for the entry will be refreshed automatically
+ // until the entry is destroyed.
+ CancelEntryDestruction(entry);
+ }
+}
+
+void TurnPort::DestroyEntry(TurnEntry* entry) {
ASSERT(entry != NULL);
entry->SignalDestroyed(entry);
entries_.remove(entry);
delete entry;
}
+void TurnPort::DestroyEntryIfNotCancelled(TurnEntry* entry,
+ uint32_t timestamp) {
+ if (!EntryExists(entry)) {
+ return;
+ }
+ bool cancelled = timestamp != entry->destruction_timestamp();
+ if (!cancelled) {
+ DestroyEntry(entry);
+ }
+}
+
void TurnPort::OnConnectionDestroyed(Connection* conn) {
- // Destroying TurnEntry for the connection, which is already destroyed.
- DestroyEntry(conn->remote_candidate().address());
+ // Schedule an event to destroy TurnEntry for the connection, which is
+ // already destroyed.
+ const rtc::SocketAddress& remote_address = conn->remote_candidate().address();
+ TurnEntry* entry = FindEntry(remote_address);
+ ASSERT(entry != NULL);
+ ScheduleEntryDestruction(entry);
+}
+
+void TurnPort::ScheduleEntryDestruction(TurnEntry* entry) {
+ ASSERT(entry->destruction_timestamp() == 0);
+ uint32_t timestamp = rtc::Time();
+ entry->set_destruction_timestamp(timestamp);
+ invoker_.AsyncInvokeDelayed<void>(
+ thread(),
+ rtc::Bind(&TurnPort::DestroyEntryIfNotCancelled, this, entry, timestamp),
+ TURN_PERMISSION_TIMEOUT);
+}
+
+void TurnPort::CancelEntryDestruction(TurnEntry* entry) {
+ ASSERT(entry->destruction_timestamp() != 0);
+ entry->set_destruction_timestamp(0);
+}
+
+bool TurnPort::SetEntryChannelId(const rtc::SocketAddress& address,
+ int channel_id) {
+ TurnEntry* entry = FindEntry(address);
+ if (!entry) {
+ return false;
+ }
+ entry->set_channel_id(channel_id);
+ return true;
}
TurnAllocateRequest::TurnAllocateRequest(TurnPort* port)
@@ -1131,16 +1235,12 @@ void TurnRefreshRequest::OnResponse(StunMessage* response) {
// Schedule a refresh based on the returned lifetime value.
port_->ScheduleRefresh(lifetime_attr->value());
+ port_->SignalTurnRefreshResult(port_, TURN_SUCCESS_RESULT_CODE);
}
void TurnRefreshRequest::OnErrorResponse(StunMessage* response) {
const StunErrorCodeAttribute* error_code = response->GetErrorCode();
- LOG_J(LS_INFO, port_) << "Received TURN refresh error response"
- << ", id=" << rtc::hex_encode(id())
- << ", code=" << error_code->code()
- << ", rtt=" << Elapsed();
-
if (error_code->code() == STUN_ERROR_STALE_NONCE) {
if (port_->UpdateNonce(response)) {
// Send RefreshRequest immediately.
@@ -1151,11 +1251,14 @@ void TurnRefreshRequest::OnErrorResponse(StunMessage* response) {
<< ", id=" << rtc::hex_encode(id())
<< ", code=" << error_code->code()
<< ", rtt=" << Elapsed();
+ port_->OnTurnRefreshError();
+ port_->SignalTurnRefreshResult(port_, error_code->code());
}
}
void TurnRefreshRequest::OnTimeout() {
LOG_J(LS_WARNING, port_) << "TURN refresh timeout " << rtc::hex_encode(id());
+ port_->OnTurnRefreshError();
}
TurnCreatePermissionRequest::TurnCreatePermissionRequest(
@@ -1208,6 +1311,9 @@ void TurnCreatePermissionRequest::OnErrorResponse(StunMessage* response) {
void TurnCreatePermissionRequest::OnTimeout() {
LOG_J(LS_WARNING, port_) << "TURN create permission timeout "
<< rtc::hex_encode(id());
+ if (entry_) {
+ entry_->OnCreatePermissionTimeout();
+ }
}
void TurnCreatePermissionRequest::OnEntryDestroyed(TurnEntry* entry) {
@@ -1275,6 +1381,9 @@ void TurnChannelBindRequest::OnErrorResponse(StunMessage* response) {
void TurnChannelBindRequest::OnTimeout() {
LOG_J(LS_WARNING, port_) << "TURN channel bind timeout "
<< rtc::hex_encode(id());
+ if (entry_) {
+ entry_->OnChannelBindTimeout();
+ }
}
void TurnChannelBindRequest::OnEntryDestroyed(TurnEntry* entry) {
@@ -1289,12 +1398,12 @@ TurnEntry::TurnEntry(TurnPort* port, int channel_id,
ext_addr_(ext_addr),
state_(STATE_UNBOUND) {
// Creating permission for |ext_addr_|.
- SendCreatePermissionRequest();
+ SendCreatePermissionRequest(0);
}
-void TurnEntry::SendCreatePermissionRequest() {
- port_->SendRequest(new TurnCreatePermissionRequest(
- port_, this, ext_addr_), 0);
+void TurnEntry::SendCreatePermissionRequest(int delay) {
+ port_->SendRequest(new TurnCreatePermissionRequest(port_, this, ext_addr_),
+ delay);
}
void TurnEntry::SendChannelBindRequest(int delay) {
@@ -1335,21 +1444,43 @@ void TurnEntry::OnCreatePermissionSuccess() {
LOG_J(LS_INFO, port_) << "Create permission for "
<< ext_addr_.ToSensitiveString()
<< " succeeded";
- // For success result code will be 0.
- port_->SignalCreatePermissionResult(port_, ext_addr_, 0);
+ port_->SignalCreatePermissionResult(port_, ext_addr_,
+ TURN_SUCCESS_RESULT_CODE);
+
+ // If |state_| is STATE_BOUND, the permission will be refreshed
+ // by ChannelBindRequest.
+ if (state_ != STATE_BOUND) {
+ // Refresh the permission request about 1 minute before the permission
+ // times out.
+ int delay = TURN_PERMISSION_TIMEOUT - 60000;
+ SendCreatePermissionRequest(delay);
+ LOG_J(LS_INFO, port_) << "Scheduled create-permission-request in "
+ << delay << "ms.";
+ }
}
void TurnEntry::OnCreatePermissionError(StunMessage* response, int code) {
if (code == STUN_ERROR_STALE_NONCE) {
if (port_->UpdateNonce(response)) {
- SendCreatePermissionRequest();
+ SendCreatePermissionRequest(0);
}
} else {
+ port_->DestroyConnection(ext_addr_);
// Send signal with error code.
port_->SignalCreatePermissionResult(port_, ext_addr_, code);
+ Connection* c = port_->GetConnection(ext_addr_);
+ if (c) {
+ LOG_J(LS_ERROR, c) << "Received TURN CreatePermission error response, "
+ << "code=" << code << "; killing connection.";
+ c->FailAndDestroy();
+ }
}
}
+void TurnEntry::OnCreatePermissionTimeout() {
+ port_->DestroyConnection(ext_addr_);
+}
+
void TurnEntry::OnChannelBindSuccess() {
LOG_J(LS_INFO, port_) << "Channel bind for " << ext_addr_.ToSensitiveString()
<< " succeeded";
@@ -1358,14 +1489,21 @@ void TurnEntry::OnChannelBindSuccess() {
}
void TurnEntry::OnChannelBindError(StunMessage* response, int code) {
- // TODO(mallinath) - Implement handling of error response for channel
- // bind request as per http://tools.ietf.org/html/rfc5766#section-11.3
+ // If the channel bind fails due to errors other than STATE_NONCE,
+ // we just destroy the connection and rely on ICE restart to re-establish
+ // the connection.
if (code == STUN_ERROR_STALE_NONCE) {
if (port_->UpdateNonce(response)) {
// Send channel bind request with fresh nonce.
SendChannelBindRequest(0);
}
+ } else {
+ state_ = STATE_UNBOUND;
+ port_->DestroyConnection(ext_addr_);
}
}
-
+void TurnEntry::OnChannelBindTimeout() {
+ state_ = STATE_UNBOUND;
+ port_->DestroyConnection(ext_addr_);
+}
} // namespace cricket
diff --git a/webrtc/p2p/base/turnport.h b/webrtc/p2p/base/turnport.h
index 3bca727346..4d83806a37 100644
--- a/webrtc/p2p/base/turnport.h
+++ b/webrtc/p2p/base/turnport.h
@@ -16,9 +16,10 @@
#include <set>
#include <string>
+#include "webrtc/base/asyncinvoker.h"
+#include "webrtc/base/asyncpacketsocket.h"
#include "webrtc/p2p/base/port.h"
#include "webrtc/p2p/client/basicportallocator.h"
-#include "webrtc/base/asyncpacketsocket.h"
namespace rtc {
class AsyncResolver;
@@ -105,7 +106,13 @@ class TurnPort : public Port {
const rtc::SocketAddress& remote_addr,
const rtc::PacketTime& packet_time);
+ virtual void OnSentPacket(rtc::AsyncPacketSocket* socket,
+ const rtc::SentPacket& sent_packet);
virtual void OnReadyToSend(rtc::AsyncPacketSocket* socket);
+ virtual bool SupportsProtocol(const std::string& protocol) const {
+ // Turn port only connects to UDP candidates.
+ return protocol == UDP_PROTOCOL_NAME;
+ }
void OnSocketConnect(rtc::AsyncPacketSocket* socket);
void OnSocketClose(rtc::AsyncPacketSocket* socket, int error);
@@ -122,6 +129,9 @@ class TurnPort : public Port {
return socket_;
}
+ // For testing only.
+ rtc::AsyncInvoker* invoker() { return &invoker_; }
+
// Signal with resolved server address.
// Parameters are port, server address and resolved server address.
// This signal will be sent only if server address is resolved successfully.
@@ -129,9 +139,18 @@ class TurnPort : public Port {
const rtc::SocketAddress&,
const rtc::SocketAddress&> SignalResolvedServerAddress;
- // This signal is only for testing purpose.
+ // All public methods/signals below are for testing only.
+ sigslot::signal2<TurnPort*, int> SignalTurnRefreshResult;
sigslot::signal3<TurnPort*, const rtc::SocketAddress&, int>
SignalCreatePermissionResult;
+ void FlushRequests(int msg_type) { request_manager_.Flush(msg_type); }
+ bool HasRequests() { return !request_manager_.empty(); }
+ void set_credentials(RelayCredentials& credentials) {
+ credentials_ = credentials;
+ }
+ // Finds the turn entry with |address| and sets its channel id.
+ // Returns true if the entry is found.
+ bool SetEntryChannelId(const rtc::SocketAddress& address, int channel_id);
protected:
TurnPort(rtc::Thread* thread,
@@ -160,9 +179,10 @@ class TurnPort : public Port {
private:
enum {
- MSG_ERROR = MSG_FIRST_AVAILABLE,
+ MSG_ALLOCATE_ERROR = MSG_FIRST_AVAILABLE,
MSG_ALLOCATE_MISMATCH,
- MSG_TRY_ALTERNATE_SERVER
+ MSG_TRY_ALTERNATE_SERVER,
+ MSG_REFRESH_ERROR
};
typedef std::list<TurnEntry*> EntryList;
@@ -181,6 +201,9 @@ class TurnPort : public Port {
}
}
+ // Shuts down the turn port, usually because of some fatal errors.
+ void Close();
+ void OnTurnRefreshError();
bool SetAlternateServer(const rtc::SocketAddress& address);
void ResolveTurnAddress(const rtc::SocketAddress& address);
void OnResolveResult(rtc::AsyncResolverInterface* resolver);
@@ -213,10 +236,20 @@ class TurnPort : public Port {
bool HasPermission(const rtc::IPAddress& ipaddr) const;
TurnEntry* FindEntry(const rtc::SocketAddress& address) const;
TurnEntry* FindEntry(int channel_id) const;
- TurnEntry* CreateEntry(const rtc::SocketAddress& address);
- void DestroyEntry(const rtc::SocketAddress& address);
+ bool EntryExists(TurnEntry* e);
+ void CreateOrRefreshEntry(const rtc::SocketAddress& address);
+ void DestroyEntry(TurnEntry* entry);
+ // Destroys the entry only if |timestamp| matches the destruction timestamp
+ // in |entry|.
+ void DestroyEntryIfNotCancelled(TurnEntry* entry, uint32_t timestamp);
+ void ScheduleEntryDestruction(TurnEntry* entry);
+ void CancelEntryDestruction(TurnEntry* entry);
void OnConnectionDestroyed(Connection* conn);
+ // Destroys the connection with remote address |address|. Returns true if
+ // a connection is found and destroyed.
+ bool DestroyConnection(const rtc::SocketAddress& address);
+
ProtocolAddress server_address_;
RelayCredentials credentials_;
AttemptedServerSet attempted_server_addresses_;
@@ -242,6 +275,8 @@ class TurnPort : public Port {
// The number of retries made due to allocate mismatch error.
size_t allocate_mismatch_retries_;
+ rtc::AsyncInvoker invoker_;
+
friend class TurnEntry;
friend class TurnAllocateRequest;
friend class TurnRefreshRequest;
diff --git a/webrtc/p2p/base/turnport_unittest.cc b/webrtc/p2p/base/turnport_unittest.cc
index 724485ddde..916162575f 100644
--- a/webrtc/p2p/base/turnport_unittest.cc
+++ b/webrtc/p2p/base/turnport_unittest.cc
@@ -13,6 +13,7 @@
#include "webrtc/p2p/base/basicpacketsocketfactory.h"
#include "webrtc/p2p/base/constants.h"
+#include "webrtc/p2p/base/portallocator.h"
#include "webrtc/p2p/base/tcpport.h"
#include "webrtc/p2p/base/testturnserver.h"
#include "webrtc/p2p/base/turnport.h"
@@ -100,6 +101,24 @@ class TurnPortTestVirtualSocketServer : public rtc::VirtualSocketServer {
using rtc::VirtualSocketServer::LookupBinding;
};
+class TestConnectionWrapper : public sigslot::has_slots<> {
+ public:
+ TestConnectionWrapper(Connection* conn) : connection_(conn) {
+ conn->SignalDestroyed.connect(
+ this, &TestConnectionWrapper::OnConnectionDestroyed);
+ }
+
+ Connection* connection() { return connection_; }
+
+ private:
+ void OnConnectionDestroyed(Connection* conn) {
+ ASSERT_TRUE(conn == connection_);
+ connection_ = nullptr;
+ }
+
+ Connection* connection_;
+};
+
class TurnPortTest : public testing::Test,
public sigslot::has_slots<>,
public rtc::MessageHandler {
@@ -154,12 +173,15 @@ class TurnPortTest : public testing::Test,
bool /*port_muxed*/) {
turn_unknown_address_ = true;
}
- void OnTurnCreatePermissionResult(TurnPort* port, const SocketAddress& addr,
- int code) {
+ void OnTurnCreatePermissionResult(TurnPort* port,
+ const SocketAddress& addr,
+ int code) {
// Ignoring the address.
- if (code == 0) {
- turn_create_permission_success_ = true;
- }
+ turn_create_permission_success_ = (code == 0);
+ }
+
+ void OnTurnRefreshResult(TurnPort* port, int code) {
+ turn_refresh_success_ = (code == 0);
}
void OnTurnReadPacket(Connection* conn, const char* data, size_t size,
const rtc::PacketTime& packet_time) {
@@ -172,6 +194,7 @@ class TurnPortTest : public testing::Test,
const rtc::PacketTime& packet_time) {
udp_packets_.push_back(rtc::Buffer(data, size));
}
+ void OnConnectionDestroyed(Connection* conn) { connection_destroyed_ = true; }
void OnSocketReadPacket(rtc::AsyncPacketSocket* socket,
const char* data, size_t size,
const rtc::SocketAddress& remote_addr,
@@ -255,18 +278,42 @@ class TurnPortTest : public testing::Test,
&TurnPortTest::OnTurnUnknownAddress);
turn_port_->SignalCreatePermissionResult.connect(this,
&TurnPortTest::OnTurnCreatePermissionResult);
+ turn_port_->SignalTurnRefreshResult.connect(
+ this, &TurnPortTest::OnTurnRefreshResult);
}
- void CreateUdpPort() {
+ void ConnectConnectionDestroyedSignal(Connection* conn) {
+ conn->SignalDestroyed.connect(this, &TurnPortTest::OnConnectionDestroyed);
+ }
+
+ void CreateUdpPort() { CreateUdpPort(kLocalAddr2); }
+
+ void CreateUdpPort(const SocketAddress& address) {
udp_port_.reset(UDPPort::Create(main_, &socket_factory_, &network_,
- kLocalAddr2.ipaddr(), 0, 0,
- kIceUfrag2, kIcePwd2,
- std::string(), false));
+ address.ipaddr(), 0, 0, kIceUfrag2,
+ kIcePwd2, std::string(), false));
// UDP port will be controlled.
udp_port_->SetIceRole(cricket::ICEROLE_CONTROLLED);
udp_port_->SignalPortComplete.connect(
this, &TurnPortTest::OnUdpPortComplete);
}
+ void PrepareTurnAndUdpPorts() {
+ // turn_port_ should have been created.
+ ASSERT_TRUE(turn_port_ != nullptr);
+ turn_port_->PrepareAddress();
+ ASSERT_TRUE_WAIT(turn_ready_, kTimeout);
+
+ CreateUdpPort();
+ udp_port_->PrepareAddress();
+ ASSERT_TRUE_WAIT(udp_ready_, kTimeout);
+ }
+
+ bool CheckConnectionDestroyed() {
+ turn_port_->FlushRequests(cricket::kAllRequests);
+ rtc::Thread::Current()->ProcessMessages(50);
+ return connection_destroyed_;
+ }
+
void TestTurnAlternateServer(cricket::ProtocolType protocol_type) {
std::vector<rtc::SocketAddress> redirect_addresses;
redirect_addresses.push_back(kTurnAlternateIntAddr);
@@ -350,12 +397,7 @@ class TurnPortTest : public testing::Test,
void TestTurnConnection() {
// Create ports and prepare addresses.
- ASSERT_TRUE(turn_port_ != NULL);
- turn_port_->PrepareAddress();
- ASSERT_TRUE_WAIT(turn_ready_, kTimeout);
- CreateUdpPort();
- udp_port_->PrepareAddress();
- ASSERT_TRUE_WAIT(udp_ready_, kTimeout);
+ PrepareTurnAndUdpPorts();
// Send ping from UDP to TURN.
Connection* conn1 = udp_port_->CreateConnection(
@@ -385,12 +427,46 @@ class TurnPortTest : public testing::Test,
EXPECT_TRUE(conn2->receiving());
}
+ void TestDestroyTurnConnection() {
+ PrepareTurnAndUdpPorts();
+
+ // Create connections on both ends.
+ Connection* conn1 = udp_port_->CreateConnection(turn_port_->Candidates()[0],
+ Port::ORIGIN_MESSAGE);
+ Connection* conn2 = turn_port_->CreateConnection(udp_port_->Candidates()[0],
+ Port::ORIGIN_MESSAGE);
+ ASSERT_TRUE(conn2 != NULL);
+ ASSERT_TRUE_WAIT(turn_create_permission_success_, kTimeout);
+ // Make sure turn connection can receive.
+ conn1->Ping(0);
+ EXPECT_EQ_WAIT(Connection::STATE_WRITABLE, conn1->write_state(), kTimeout);
+ EXPECT_FALSE(turn_unknown_address_);
+
+ // Destroy the connection on the turn port. The TurnEntry is still
+ // there. So the turn port gets ping from unknown address if it is pinged.
+ conn2->Destroy();
+ conn1->Ping(0);
+ EXPECT_TRUE_WAIT(turn_unknown_address_, kTimeout);
+
+ // Flush all requests in the invoker to destroy the TurnEntry.
+ // Now the turn port cannot receive the ping.
+ turn_unknown_address_ = false;
+ turn_port_->invoker()->Flush(rtc::Thread::Current());
+ conn1->Ping(0);
+ rtc::Thread::Current()->ProcessMessages(500);
+ EXPECT_FALSE(turn_unknown_address_);
+
+ // If the connection is created again, it will start to receive pings.
+ conn2 = turn_port_->CreateConnection(udp_port_->Candidates()[0],
+ Port::ORIGIN_MESSAGE);
+ conn1->Ping(0);
+ EXPECT_TRUE_WAIT(conn2->receiving(), kTimeout);
+ EXPECT_FALSE(turn_unknown_address_);
+ }
+
void TestTurnSendData() {
- turn_port_->PrepareAddress();
- EXPECT_TRUE_WAIT(turn_ready_, kTimeout);
- CreateUdpPort();
- udp_port_->PrepareAddress();
- EXPECT_TRUE_WAIT(udp_ready_, kTimeout);
+ PrepareTurnAndUdpPorts();
+
// Create connections and send pings.
Connection* conn1 = turn_port_->CreateConnection(
udp_port_->Candidates()[0], Port::ORIGIN_MESSAGE);
@@ -446,6 +522,8 @@ class TurnPortTest : public testing::Test,
bool turn_create_permission_success_;
bool udp_ready_;
bool test_finish_;
+ bool turn_refresh_success_ = false;
+ bool connection_destroyed_ = false;
std::vector<rtc::Buffer> turn_packets_;
std::vector<rtc::Buffer> udp_packets_;
rtc::PacketOptions options;
@@ -613,16 +691,33 @@ TEST_F(TurnPortTest, TestTurnTcpAllocateMismatch) {
EXPECT_NE(first_addr, turn_port_->socket()->GetLocalAddress());
}
+TEST_F(TurnPortTest, TestRefreshRequestGetsErrorResponse) {
+ CreateTurnPort(kTurnUsername, kTurnPassword, kTurnUdpProtoAddr);
+ PrepareTurnAndUdpPorts();
+ turn_port_->CreateConnection(udp_port_->Candidates()[0],
+ Port::ORIGIN_MESSAGE);
+ // Set bad credentials.
+ cricket::RelayCredentials bad_credentials("bad_user", "bad_pwd");
+ turn_port_->set_credentials(bad_credentials);
+ turn_refresh_success_ = false;
+ // This sends out the first RefreshRequest with correct credentials.
+ // When this succeeds, it will schedule a new RefreshRequest with the bad
+ // credential.
+ turn_port_->FlushRequests(cricket::TURN_REFRESH_REQUEST);
+ EXPECT_TRUE_WAIT(turn_refresh_success_, kTimeout);
+ // Flush it again, it will receive a bad response.
+ turn_port_->FlushRequests(cricket::TURN_REFRESH_REQUEST);
+ EXPECT_TRUE_WAIT(!turn_refresh_success_, kTimeout);
+ EXPECT_TRUE_WAIT(!turn_port_->connected(), kTimeout);
+ EXPECT_TRUE_WAIT(turn_port_->connections().empty(), kTimeout);
+ EXPECT_FALSE(turn_port_->HasRequests());
+}
+
// Test that CreateConnection will return null if port becomes disconnected.
TEST_F(TurnPortTest, TestCreateConnectionWhenSocketClosed) {
turn_server_.AddInternalSocket(kTurnTcpIntAddr, cricket::PROTO_TCP);
CreateTurnPort(kTurnUsername, kTurnPassword, kTurnTcpProtoAddr);
- turn_port_->PrepareAddress();
- ASSERT_TRUE_WAIT(turn_ready_, kTimeout);
-
- CreateUdpPort();
- udp_port_->PrepareAddress();
- ASSERT_TRUE_WAIT(udp_ready_, kTimeout);
+ PrepareTurnAndUdpPorts();
// Create a connection.
Connection* conn1 = turn_port_->CreateConnection(udp_port_->Candidates()[0],
Port::ORIGIN_MESSAGE);
@@ -694,6 +789,20 @@ TEST_F(TurnPortTest, TestTurnTcpConnection) {
TestTurnConnection();
}
+// Test that if a connection on a TURN port is destroyed, the TURN port can
+// still receive ping on that connection as if it is from an unknown address.
+// If the connection is created again, it will be used to receive ping.
+TEST_F(TurnPortTest, TestDestroyTurnConnection) {
+ CreateTurnPort(kTurnUsername, kTurnPassword, kTurnUdpProtoAddr);
+ TestDestroyTurnConnection();
+}
+
+// Similar to above, except that this test will use the shared socket.
+TEST_F(TurnPortTest, TestDestroyTurnConnectionUsingSharedSocket) {
+ CreateSharedTurnPort(kTurnUsername, kTurnPassword, kTurnUdpProtoAddr);
+ TestDestroyTurnConnection();
+}
+
// Test that we fail to create a connection when we want to use TLS over TCP.
// This test should be removed once we have TLS support.
TEST_F(TurnPortTest, TestTurnTlsTcpConnectionFails) {
@@ -715,6 +824,54 @@ TEST_F(TurnPortTest, TestTurnConnectionUsingOTUNonce) {
TestTurnConnection();
}
+// Test that CreatePermissionRequest will be scheduled after the success
+// of the first create permission request and the request will get an
+// ErrorResponse if the ufrag and pwd are incorrect.
+TEST_F(TurnPortTest, TestRefreshCreatePermissionRequest) {
+ CreateTurnPort(kTurnUsername, kTurnPassword, kTurnUdpProtoAddr);
+ PrepareTurnAndUdpPorts();
+
+ Connection* conn = turn_port_->CreateConnection(udp_port_->Candidates()[0],
+ Port::ORIGIN_MESSAGE);
+ ConnectConnectionDestroyedSignal(conn);
+ ASSERT_TRUE(conn != NULL);
+ ASSERT_TRUE_WAIT(turn_create_permission_success_, kTimeout);
+ turn_create_permission_success_ = false;
+ // A create-permission-request should be pending.
+ // After the next create-permission-response is received, it will schedule
+ // another request with bad_ufrag and bad_pwd.
+ cricket::RelayCredentials bad_credentials("bad_user", "bad_pwd");
+ turn_port_->set_credentials(bad_credentials);
+ turn_port_->FlushRequests(cricket::kAllRequests);
+ ASSERT_TRUE_WAIT(turn_create_permission_success_, kTimeout);
+ // Flush the requests again; the create-permission-request will fail.
+ turn_port_->FlushRequests(cricket::kAllRequests);
+ EXPECT_TRUE_WAIT(!turn_create_permission_success_, kTimeout);
+ EXPECT_TRUE_WAIT(connection_destroyed_, kTimeout);
+}
+
+TEST_F(TurnPortTest, TestChannelBindGetErrorResponse) {
+ CreateTurnPort(kTurnUsername, kTurnPassword, kTurnUdpProtoAddr);
+ PrepareTurnAndUdpPorts();
+ Connection* conn1 = turn_port_->CreateConnection(udp_port_->Candidates()[0],
+ Port::ORIGIN_MESSAGE);
+ ASSERT_TRUE(conn1 != nullptr);
+ Connection* conn2 = udp_port_->CreateConnection(turn_port_->Candidates()[0],
+ Port::ORIGIN_MESSAGE);
+ ASSERT_TRUE(conn2 != nullptr);
+ ConnectConnectionDestroyedSignal(conn1);
+ conn1->Ping(0);
+ ASSERT_TRUE_WAIT(conn1->writable(), kTimeout);
+
+ std::string data = "ABC";
+ conn1->Send(data.data(), data.length(), options);
+ bool success =
+ turn_port_->SetEntryChannelId(udp_port_->Candidates()[0].address(), -1);
+ ASSERT_TRUE(success);
+ // Next time when the binding request is sent, it will get an ErrorResponse.
+ EXPECT_TRUE_WAIT(CheckConnectionDestroyed(), kTimeout);
+}
+
// Do a TURN allocation, establish a UDP connection, and send some data.
TEST_F(TurnPortTest, TestTurnSendDataTurnUdpToUdp) {
// Create ports and prepare addresses.
@@ -771,6 +928,29 @@ TEST_F(TurnPortTest, TestOriginHeader) {
EXPECT_EQ(kTestOrigin, turn_server_.FindAllocation(local_address)->origin());
}
+// Test that a CreatePermission failure will result in the connection being
+// destroyed.
+TEST_F(TurnPortTest, TestConnectionDestroyedOnCreatePermissionFailure) {
+ turn_server_.AddInternalSocket(kTurnTcpIntAddr, cricket::PROTO_TCP);
+ turn_server_.server()->set_reject_private_addresses(true);
+ CreateTurnPort(kTurnUsername, kTurnPassword, kTurnTcpProtoAddr);
+ turn_port_->PrepareAddress();
+ ASSERT_TRUE_WAIT(turn_ready_, kTimeout);
+
+ CreateUdpPort(SocketAddress("10.0.0.10", 0));
+ udp_port_->PrepareAddress();
+ ASSERT_TRUE_WAIT(udp_ready_, kTimeout);
+ // Create a connection.
+ TestConnectionWrapper conn(turn_port_->CreateConnection(
+ udp_port_->Candidates()[0], Port::ORIGIN_MESSAGE));
+ ASSERT_TRUE(conn.connection() != nullptr);
+
+ // Asynchronously, CreatePermission request should be sent and fail, closing
+ // the connection.
+ EXPECT_TRUE_WAIT(conn.connection() == nullptr, kTimeout);
+ EXPECT_FALSE(turn_create_permission_success_);
+}
+
// Test that a TURN allocation is released when the port is closed.
TEST_F(TurnPortTest, TestTurnReleaseAllocation) {
CreateTurnPort(kTurnUsername, kTurnPassword, kTurnUdpProtoAddr);
@@ -797,6 +977,10 @@ TEST_F(TurnPortTest, DISABLED_TestTurnTCPReleaseAllocation) {
// This test verifies any FD's are not leaked after TurnPort is destroyed.
// https://code.google.com/p/webrtc/issues/detail?id=2651
#if defined(WEBRTC_LINUX) && !defined(WEBRTC_ANDROID)
+// 1 second is not always enough for getaddrinfo().
+// See: https://bugs.chromium.org/p/webrtc/issues/detail?id=5191
+static const unsigned int kResolverTimeout = 10000;
+
TEST_F(TurnPortTest, TestResolverShutdown) {
turn_server_.AddInternalSocket(kTurnUdpIPv6IntAddr, cricket::PROTO_UDP);
int last_fd_count = GetFDCount();
@@ -805,7 +989,7 @@ TEST_F(TurnPortTest, TestResolverShutdown) {
cricket::ProtocolAddress(rtc::SocketAddress(
"www.google.invalid", 3478), cricket::PROTO_UDP));
turn_port_->PrepareAddress();
- ASSERT_TRUE_WAIT(turn_error_, kTimeout);
+ ASSERT_TRUE_WAIT(turn_error_, kResolverTimeout);
EXPECT_TRUE(turn_port_->Candidates().empty());
turn_port_.reset();
rtc::Thread::Current()->Post(this, MSG_TESTFINISH);
diff --git a/webrtc/p2p/base/turnserver.cc b/webrtc/p2p/base/turnserver.cc
index 8d40a9030c..1502cdd52e 100644
--- a/webrtc/p2p/base/turnserver.cc
+++ b/webrtc/p2p/base/turnserver.cc
@@ -698,6 +698,12 @@ void TurnServerAllocation::HandleCreatePermissionRequest(
return;
}
+ if (server_->reject_private_addresses_ &&
+ rtc::IPIsPrivate(peer_attr->GetAddress().ipaddr())) {
+ SendErrorResponse(msg, STUN_ERROR_FORBIDDEN, STUN_ERROR_REASON_FORBIDDEN);
+ return;
+ }
+
// Add this permission.
AddPermission(peer_attr->GetAddress().ipaddr());
diff --git a/webrtc/p2p/base/turnserver.h b/webrtc/p2p/base/turnserver.h
index d3bd77a866..113bd4c462 100644
--- a/webrtc/p2p/base/turnserver.h
+++ b/webrtc/p2p/base/turnserver.h
@@ -183,6 +183,11 @@ class TurnServer : public sigslot::has_slots<> {
void set_enable_otu_nonce(bool enable) { enable_otu_nonce_ = enable; }
+ // If set to true, reject CreatePermission requests to RFC1918 addresses.
+ void set_reject_private_addresses(bool filter) {
+ reject_private_addresses_ = filter;
+ }
+
// Starts listening for packets from internal clients.
void AddInternalSocket(rtc::AsyncPacketSocket* socket,
ProtocolType proto);
@@ -255,6 +260,7 @@ class TurnServer : public sigslot::has_slots<> {
// otu - one-time-use. Server will respond with 438 if it's
// sees the same nonce in next transaction.
bool enable_otu_nonce_;
+ bool reject_private_addresses_ = false;
InternalSocketMap server_sockets_;
ServerSocketMap server_listen_sockets_;
diff --git a/webrtc/p2p/client/basicportallocator.cc b/webrtc/p2p/client/basicportallocator.cc
index 21c8921f40..e45d2c8f0f 100644
--- a/webrtc/p2p/client/basicportallocator.cc
+++ b/webrtc/p2p/client/basicportallocator.cc
@@ -10,6 +10,7 @@
#include "webrtc/p2p/client/basicportallocator.h"
+#include <algorithm>
#include <string>
#include <vector>
@@ -70,15 +71,16 @@ BasicPortAllocator::BasicPortAllocator(
: network_manager_(network_manager),
socket_factory_(socket_factory),
stun_servers_() {
- ASSERT(socket_factory_ != NULL);
+ ASSERT(network_manager_ != nullptr);
+ ASSERT(socket_factory_ != nullptr);
Construct();
}
-BasicPortAllocator::BasicPortAllocator(
- rtc::NetworkManager* network_manager)
+BasicPortAllocator::BasicPortAllocator(rtc::NetworkManager* network_manager)
: network_manager_(network_manager),
- socket_factory_(NULL),
+ socket_factory_(nullptr),
stun_servers_() {
+ ASSERT(network_manager_ != nullptr);
Construct();
}
@@ -104,15 +106,19 @@ BasicPortAllocator::BasicPortAllocator(
stun_servers_(stun_servers) {
RelayServerConfig config(RELAY_GTURN);
- if (!relay_address_udp.IsNil())
+ if (!relay_address_udp.IsNil()) {
config.ports.push_back(ProtocolAddress(relay_address_udp, PROTO_UDP));
- if (!relay_address_tcp.IsNil())
+ }
+ if (!relay_address_tcp.IsNil()) {
config.ports.push_back(ProtocolAddress(relay_address_tcp, PROTO_TCP));
- if (!relay_address_ssl.IsNil())
+ }
+ if (!relay_address_ssl.IsNil()) {
config.ports.push_back(ProtocolAddress(relay_address_ssl, PROTO_SSLTCP));
+ }
- if (!config.ports.empty())
- AddRelay(config);
+ if (!config.ports.empty()) {
+ AddTurnServer(config);
+ }
Construct();
}
@@ -241,8 +247,8 @@ void BasicPortAllocatorSession::GetPortConfigurations() {
username(),
password());
- for (size_t i = 0; i < allocator_->relays().size(); ++i) {
- config->AddRelay(allocator_->relays()[i]);
+ for (const RelayServerConfig& turn_server : allocator_->turn_servers()) {
+ config->AddRelay(turn_server);
}
ConfigReady(config);
}
@@ -253,8 +259,9 @@ void BasicPortAllocatorSession::ConfigReady(PortConfiguration* config) {
// Adds a configuration to the list.
void BasicPortAllocatorSession::OnConfigReady(PortConfiguration* config) {
- if (config)
+ if (config) {
configs_.push_back(config);
+ }
AllocatePorts();
}
@@ -322,6 +329,12 @@ void BasicPortAllocatorSession::GetNetworks(
} else {
network_manager->GetNetworks(networks);
}
+ networks->erase(std::remove_if(networks->begin(), networks->end(),
+ [this](rtc::Network* network) {
+ return allocator_->network_ignore_mask() &
+ network->type();
+ }),
+ networks->end());
}
// For each network, see if we have a sequence that covers it already. If not,
@@ -436,7 +449,8 @@ void BasicPortAllocatorSession::AddAllocatedPort(Port* port,
// When adapter enumeration is disabled, disable CF_HOST at port level so
// local address is not leaked by stunport in the candidate's related address.
- if (flags() & PORTALLOCATOR_DISABLE_ADAPTER_ENUMERATION) {
+ if ((flags() & PORTALLOCATOR_DISABLE_ADAPTER_ENUMERATION) &&
+ (flags() & PORTALLOCATOR_DISABLE_DEFAULT_LOCAL_CANDIDATE)) {
candidate_filter &= ~CF_HOST;
}
port->set_candidate_filter(candidate_filter);
@@ -600,25 +614,6 @@ bool BasicPortAllocatorSession::CheckCandidateFilter(const Candidate& c) {
return true;
}
- // If PORTALLOCATOR_ENABLE_LOCALHOST_CANDIDATE is specified and it's
- // loopback address, we should allow it as it's for demo page connectivity
- // when no TURN/STUN specified.
- if (c.address().IsLoopbackIP() &&
- (flags() & PORTALLOCATOR_ENABLE_LOCALHOST_CANDIDATE) != 0) {
- return true;
- }
-
- // This is just to prevent the case when binding to any address (all 0s), if
- // somehow the host candidate address is not all 0s. Either because local
- // installed proxy changes the address or a packet has been sent for any
- // reason before getsockname is called.
- if (flags() & PORTALLOCATOR_DISABLE_ADAPTER_ENUMERATION) {
- LOG(LS_WARNING) << "Received non-0 host address: "
- << c.address().ToString()
- << " when adapter enumeration is disabled";
- return false;
- }
-
return ((filter & CF_HOST) != 0);
}
return false;
@@ -882,19 +877,19 @@ void AllocationSequence::CreateUDPPorts() {
// TODO(mallinath) - Remove UDPPort creating socket after shared socket
// is enabled completely.
UDPPort* port = NULL;
- bool emit_localhost_for_anyaddress =
- IsFlagSet(PORTALLOCATOR_ENABLE_LOCALHOST_CANDIDATE);
+ bool emit_local_candidate_for_anyaddress =
+ !IsFlagSet(PORTALLOCATOR_DISABLE_DEFAULT_LOCAL_CANDIDATE);
if (IsFlagSet(PORTALLOCATOR_ENABLE_SHARED_SOCKET) && udp_socket_) {
port = UDPPort::Create(
session_->network_thread(), session_->socket_factory(), network_,
udp_socket_.get(), session_->username(), session_->password(),
- session_->allocator()->origin(), emit_localhost_for_anyaddress);
+ session_->allocator()->origin(), emit_local_candidate_for_anyaddress);
} else {
port = UDPPort::Create(
session_->network_thread(), session_->socket_factory(), network_, ip_,
session_->allocator()->min_port(), session_->allocator()->max_port(),
session_->username(), session_->password(),
- session_->allocator()->origin(), emit_localhost_for_anyaddress);
+ session_->allocator()->origin(), emit_local_candidate_for_anyaddress);
}
if (port) {
diff --git a/webrtc/p2p/client/basicportallocator.h b/webrtc/p2p/client/basicportallocator.h
index c8bcad21a9..ca1a23aaf2 100644
--- a/webrtc/p2p/client/basicportallocator.h
+++ b/webrtc/p2p/client/basicportallocator.h
@@ -14,7 +14,6 @@
#include <string>
#include <vector>
-#include "webrtc/p2p/base/port.h"
#include "webrtc/p2p/base/portallocator.h"
#include "webrtc/base/messagequeue.h"
#include "webrtc/base/network.h"
@@ -23,28 +22,6 @@
namespace cricket {
-struct RelayCredentials {
- RelayCredentials() {}
- RelayCredentials(const std::string& username,
- const std::string& password)
- : username(username),
- password(password) {
- }
-
- std::string username;
- std::string password;
-};
-
-typedef std::vector<ProtocolAddress> PortList;
-struct RelayServerConfig {
- RelayServerConfig(RelayType type) : type(type), priority(0) {}
-
- RelayType type;
- PortList ports;
- RelayCredentials credentials;
- int priority;
-};
-
class BasicPortAllocator : public PortAllocator {
public:
BasicPortAllocator(rtc::NetworkManager* network_manager,
@@ -60,6 +37,23 @@ class BasicPortAllocator : public PortAllocator {
const rtc::SocketAddress& relay_server_ssl);
virtual ~BasicPortAllocator();
+ void SetIceServers(
+ const ServerAddresses& stun_servers,
+ const std::vector<RelayServerConfig>& turn_servers) override {
+ stun_servers_ = stun_servers;
+ turn_servers_ = turn_servers;
+ }
+
+ // Set to kDefaultNetworkIgnoreMask by default.
+ void SetNetworkIgnoreMask(int network_ignore_mask) override {
+ // TODO(phoglund): implement support for other types than loopback.
+ // See https://code.google.com/p/webrtc/issues/detail?id=4288.
+ // Then remove set_network_ignore_list from NetworkManager.
+ network_ignore_mask_ = network_ignore_mask;
+ }
+
+ int network_ignore_mask() const { return network_ignore_mask_; }
+
rtc::NetworkManager* network_manager() { return network_manager_; }
// If socket_factory() is set to NULL each PortAllocatorSession
@@ -70,27 +64,28 @@ class BasicPortAllocator : public PortAllocator {
return stun_servers_;
}
- const std::vector<RelayServerConfig>& relays() const {
- return relays_;
+ const std::vector<RelayServerConfig>& turn_servers() const {
+ return turn_servers_;
}
- virtual void AddRelay(const RelayServerConfig& relay) {
- relays_.push_back(relay);
+ virtual void AddTurnServer(const RelayServerConfig& turn_server) {
+ turn_servers_.push_back(turn_server);
}
- virtual PortAllocatorSession* CreateSessionInternal(
+ PortAllocatorSession* CreateSessionInternal(
const std::string& content_name,
int component,
const std::string& ice_ufrag,
- const std::string& ice_pwd);
+ const std::string& ice_pwd) override;
private:
void Construct();
rtc::NetworkManager* network_manager_;
rtc::PacketSocketFactory* socket_factory_;
- const ServerAddresses stun_servers_;
- std::vector<RelayServerConfig> relays_;
+ ServerAddresses stun_servers_;
+ std::vector<RelayServerConfig> turn_servers_;
bool allow_tcp_listen_;
+ int network_ignore_mask_ = rtc::kDefaultNetworkIgnoreMask;
};
struct PortConfiguration;
@@ -110,10 +105,10 @@ class BasicPortAllocatorSession : public PortAllocatorSession,
rtc::Thread* network_thread() { return network_thread_; }
rtc::PacketSocketFactory* socket_factory() { return socket_factory_; }
- virtual void StartGettingPorts();
- virtual void StopGettingPorts();
- virtual void ClearGettingPorts();
- virtual bool IsGettingPorts() { return running_; }
+ void StartGettingPorts() override;
+ void StopGettingPorts() override;
+ void ClearGettingPorts() override;
+ bool IsGettingPorts() override { return running_; }
protected:
// Starts the process of getting the port configurations.
@@ -124,7 +119,7 @@ class BasicPortAllocatorSession : public PortAllocatorSession,
virtual void ConfigReady(PortConfiguration* config);
// MessageHandler. Can be overriden if message IDs do not conflict.
- virtual void OnMessage(rtc::Message *message);
+ void OnMessage(rtc::Message* message) override;
private:
class PortData {
@@ -204,6 +199,7 @@ class BasicPortAllocatorSession : public PortAllocatorSession,
};
// Records configuration information useful in creating ports.
+// TODO(deadbeef): Rename "relay" to "turn_server" in this struct.
struct PortConfiguration : public rtc::MessageData {
// TODO(jiayl): remove |stun_address| when Chrome is updated.
rtc::SocketAddress stun_address;
diff --git a/webrtc/p2p/client/fakeportallocator.h b/webrtc/p2p/client/fakeportallocator.h
index dca86f633e..fb188261a2 100644
--- a/webrtc/p2p/client/fakeportallocator.h
+++ b/webrtc/p2p/client/fakeportallocator.h
@@ -24,6 +24,62 @@ class Thread;
namespace cricket {
+class TestUDPPort : public UDPPort {
+ public:
+ static TestUDPPort* Create(rtc::Thread* thread,
+ rtc::PacketSocketFactory* factory,
+ rtc::Network* network,
+ const rtc::IPAddress& ip,
+ uint16_t min_port,
+ uint16_t max_port,
+ const std::string& username,
+ const std::string& password,
+ const std::string& origin,
+ bool emit_localhost_for_anyaddress) {
+ TestUDPPort* port = new TestUDPPort(thread, factory, network, ip, min_port,
+ max_port, username, password, origin,
+ emit_localhost_for_anyaddress);
+ if (!port->Init()) {
+ delete port;
+ port = nullptr;
+ }
+ return port;
+ }
+ void SendBindingResponse(StunMessage* request,
+ const rtc::SocketAddress& addr) override {
+ UDPPort::SendBindingResponse(request, addr);
+ sent_binding_response_ = true;
+ }
+ bool sent_binding_response() { return sent_binding_response_; }
+ void set_sent_binding_response(bool response) {
+ sent_binding_response_ = response;
+ }
+
+ protected:
+ TestUDPPort(rtc::Thread* thread,
+ rtc::PacketSocketFactory* factory,
+ rtc::Network* network,
+ const rtc::IPAddress& ip,
+ uint16_t min_port,
+ uint16_t max_port,
+ const std::string& username,
+ const std::string& password,
+ const std::string& origin,
+ bool emit_localhost_for_anyaddress)
+ : UDPPort(thread,
+ factory,
+ network,
+ ip,
+ min_port,
+ max_port,
+ username,
+ password,
+ origin,
+ emit_localhost_for_anyaddress) {}
+
+ bool sent_binding_response_ = false;
+};
+
class FakePortAllocatorSession : public PortAllocatorSession {
public:
FakePortAllocatorSession(rtc::Thread* worker_thread,
@@ -45,16 +101,9 @@ class FakePortAllocatorSession : public PortAllocatorSession {
virtual void StartGettingPorts() {
if (!port_) {
- port_.reset(cricket::UDPPort::Create(worker_thread_,
- factory_,
- &network_,
- network_.GetBestIP(),
- 0,
- 0,
- username(),
- password(),
- std::string(),
- false));
+ port_.reset(TestUDPPort::Create(worker_thread_, factory_, &network_,
+ network_.GetBestIP(), 0, 0, username(),
+ password(), std::string(), false));
AddPort(port_.get());
}
++port_config_count_;
@@ -101,11 +150,26 @@ class FakePortAllocator : public cricket::PortAllocator {
}
}
+ void SetIceServers(
+ const ServerAddresses& stun_servers,
+ const std::vector<RelayServerConfig>& turn_servers) override {
+ stun_servers_ = stun_servers;
+ turn_servers_ = turn_servers;
+ }
+
+ void SetNetworkIgnoreMask(int network_ignore_mask) override {}
+
+ const ServerAddresses& stun_servers() const { return stun_servers_; }
+
+ const std::vector<RelayServerConfig>& turn_servers() const {
+ return turn_servers_;
+ }
+
virtual cricket::PortAllocatorSession* CreateSessionInternal(
const std::string& content_name,
int component,
const std::string& ice_ufrag,
- const std::string& ice_pwd) {
+ const std::string& ice_pwd) override {
return new FakePortAllocatorSession(
worker_thread_, factory_, content_name, component, ice_ufrag, ice_pwd);
}
@@ -114,6 +178,8 @@ class FakePortAllocator : public cricket::PortAllocator {
rtc::Thread* worker_thread_;
rtc::PacketSocketFactory* factory_;
rtc::scoped_ptr<rtc::BasicPacketSocketFactory> owned_factory_;
+ ServerAddresses stun_servers_;
+ std::vector<RelayServerConfig> turn_servers_;
};
} // namespace cricket
diff --git a/webrtc/p2p/client/httpportallocator.cc b/webrtc/p2p/client/httpportallocator.cc
index a2d5038f90..1342cf70e9 100644
--- a/webrtc/p2p/client/httpportallocator.cc
+++ b/webrtc/p2p/client/httpportallocator.cc
@@ -13,7 +13,6 @@
#include <algorithm>
#include <map>
-#include "webrtc/base/basicdefs.h"
#include "webrtc/base/common.h"
#include "webrtc/base/helpers.h"
#include "webrtc/base/httpcommon.h"
diff --git a/webrtc/p2p/client/portallocator_unittest.cc b/webrtc/p2p/client/portallocator_unittest.cc
index 9617688302..5fce3b5762 100644
--- a/webrtc/p2p/client/portallocator_unittest.cc
+++ b/webrtc/p2p/client/portallocator_unittest.cc
@@ -32,6 +32,7 @@
#include "webrtc/base/virtualsocketserver.h"
using cricket::ServerAddresses;
+using rtc::IPAddress;
using rtc::SocketAddress;
using rtc::Thread;
@@ -114,6 +115,17 @@ class PortAllocatorTest : public testing::Test, public sigslot::has_slots<> {
void AddInterface(const SocketAddress& addr, const std::string& if_name) {
network_manager_.AddInterface(addr, if_name);
}
+ void AddInterface(const SocketAddress& addr,
+ const std::string& if_name,
+ rtc::AdapterType type) {
+ network_manager_.AddInterface(addr, if_name, type);
+ }
+ // The default route is the public address that STUN server will observe when
+ // the endpoint is sitting on the public internet and the local port is bound
+ // to the "any" address. This may be different from the default local address
+ // which the endpoint observes. This can occur if the route to the public
+ // endpoint like 8.8.8.8 (specified as the default local address) is
+ // different from the route to the STUN server (the default route).
void AddInterfaceAsDefaultRoute(const SocketAddress& addr) {
AddInterface(addr);
// When a binding comes from the any address, the |addr| will be used as the
@@ -148,19 +160,19 @@ class PortAllocatorTest : public testing::Test, public sigslot::has_slots<> {
void AddTurnServers(const rtc::SocketAddress& udp_turn,
const rtc::SocketAddress& tcp_turn) {
- cricket::RelayServerConfig relay_server(cricket::RELAY_TURN);
+ cricket::RelayServerConfig turn_server(cricket::RELAY_TURN);
cricket::RelayCredentials credentials(kTurnUsername, kTurnPassword);
- relay_server.credentials = credentials;
+ turn_server.credentials = credentials;
if (!udp_turn.IsNil()) {
- relay_server.ports.push_back(cricket::ProtocolAddress(
- kTurnUdpIntAddr, cricket::PROTO_UDP, false));
+ turn_server.ports.push_back(
+ cricket::ProtocolAddress(kTurnUdpIntAddr, cricket::PROTO_UDP, false));
}
if (!tcp_turn.IsNil()) {
- relay_server.ports.push_back(cricket::ProtocolAddress(
- kTurnTcpIntAddr, cricket::PROTO_TCP, false));
+ turn_server.ports.push_back(
+ cricket::ProtocolAddress(kTurnTcpIntAddr, cricket::PROTO_TCP, false));
}
- allocator_->AddRelay(relay_server);
+ allocator_->AddTurnServer(turn_server);
}
bool CreateSession(int component) {
@@ -254,6 +266,8 @@ class PortAllocatorTest : public testing::Test, public sigslot::has_slots<> {
const rtc::IPAddress& stun_candidate_addr,
const rtc::IPAddress& relay_candidate_udp_transport_addr,
const rtc::IPAddress& relay_candidate_tcp_transport_addr) {
+ network_manager_.set_default_local_addresses(kPrivateAddr.ipaddr(),
+ rtc::IPAddress());
if (!session_) {
EXPECT_TRUE(CreateSession(cricket::ICE_CANDIDATE_COMPONENT_RTP));
}
@@ -268,16 +282,20 @@ class PortAllocatorTest : public testing::Test, public sigslot::has_slots<> {
if (!host_candidate_addr.IsNil()) {
EXPECT_PRED5(CheckCandidate, candidates_[total_candidates],
cricket::ICE_CANDIDATE_COMPONENT_RTP, "local", "udp",
- rtc::SocketAddress(host_candidate_addr, 0));
+ rtc::SocketAddress(kPrivateAddr.ipaddr(), 0));
++total_candidates;
}
if (!stun_candidate_addr.IsNil()) {
EXPECT_PRED5(CheckCandidate, candidates_[total_candidates],
cricket::ICE_CANDIDATE_COMPONENT_RTP, "stun", "udp",
rtc::SocketAddress(stun_candidate_addr, 0));
- EXPECT_EQ(rtc::EmptySocketAddressWithFamily(
- candidates_[total_candidates].address().family()),
- candidates_[total_candidates].related_address());
+ rtc::IPAddress related_address = host_candidate_addr;
+ if (host_candidate_addr.IsNil()) {
+ related_address =
+ rtc::GetAnyIP(candidates_[total_candidates].address().family());
+ }
+ EXPECT_EQ(related_address,
+ candidates_[total_candidates].related_address().ipaddr());
++total_candidates;
}
if (!relay_candidate_udp_transport_addr.IsNil()) {
@@ -320,8 +338,8 @@ class PortAllocatorTest : public testing::Test, public sigslot::has_slots<> {
}
bool HasRelayAddress(const cricket::ProtocolAddress& proto_addr) {
- for (size_t i = 0; i < allocator_->relays().size(); ++i) {
- cricket::RelayServerConfig server_config = allocator_->relays()[i];
+ for (size_t i = 0; i < allocator_->turn_servers().size(); ++i) {
+ cricket::RelayServerConfig server_config = allocator_->turn_servers()[i];
cricket::PortList::const_iterator relay_port;
for (relay_port = server_config.ports.begin();
relay_port != server_config.ports.end(); ++relay_port) {
@@ -374,11 +392,11 @@ class PortAllocatorTest : public testing::Test, public sigslot::has_slots<> {
TEST_F(PortAllocatorTest, TestBasic) {
EXPECT_EQ(&network_manager_, allocator().network_manager());
EXPECT_EQ(kStunAddr, *allocator().stun_servers().begin());
- ASSERT_EQ(1u, allocator().relays().size());
- EXPECT_EQ(cricket::RELAY_GTURN, allocator().relays()[0].type);
+ ASSERT_EQ(1u, allocator().turn_servers().size());
+ EXPECT_EQ(cricket::RELAY_GTURN, allocator().turn_servers()[0].type);
// Empty relay credentials are used for GTURN.
- EXPECT_TRUE(allocator().relays()[0].credentials.username.empty());
- EXPECT_TRUE(allocator().relays()[0].credentials.password.empty());
+ EXPECT_TRUE(allocator().turn_servers()[0].credentials.username.empty());
+ EXPECT_TRUE(allocator().turn_servers()[0].credentials.password.empty());
EXPECT_TRUE(HasRelayAddress(cricket::ProtocolAddress(
kRelayUdpIntAddr, cricket::PROTO_UDP)));
EXPECT_TRUE(HasRelayAddress(cricket::ProtocolAddress(
@@ -388,6 +406,50 @@ TEST_F(PortAllocatorTest, TestBasic) {
EXPECT_TRUE(CreateSession(cricket::ICE_CANDIDATE_COMPONENT_RTP));
}
+// Tests that our network filtering works properly.
+TEST_F(PortAllocatorTest, TestIgnoreOnlyLoopbackNetworkByDefault) {
+ AddInterface(SocketAddress(IPAddress(0x12345600U), 0), "test_eth0",
+ rtc::ADAPTER_TYPE_ETHERNET);
+ AddInterface(SocketAddress(IPAddress(0x12345601U), 0), "test_wlan0",
+ rtc::ADAPTER_TYPE_WIFI);
+ AddInterface(SocketAddress(IPAddress(0x12345602U), 0), "test_cell0",
+ rtc::ADAPTER_TYPE_CELLULAR);
+ AddInterface(SocketAddress(IPAddress(0x12345603U), 0), "test_vpn0",
+ rtc::ADAPTER_TYPE_VPN);
+ AddInterface(SocketAddress(IPAddress(0x12345604U), 0), "test_lo",
+ rtc::ADAPTER_TYPE_LOOPBACK);
+ EXPECT_TRUE(CreateSession(cricket::ICE_CANDIDATE_COMPONENT_RTP));
+ session_->set_flags(cricket::PORTALLOCATOR_DISABLE_STUN |
+ cricket::PORTALLOCATOR_DISABLE_RELAY |
+ cricket::PORTALLOCATOR_DISABLE_TCP);
+ session_->StartGettingPorts();
+ EXPECT_TRUE_WAIT(candidate_allocation_done_, kDefaultAllocationTimeout);
+ EXPECT_EQ(4U, candidates_.size());
+ for (cricket::Candidate candidate : candidates_) {
+ EXPECT_LT(candidate.address().ip(), 0x12345604U);
+ }
+}
+
+TEST_F(PortAllocatorTest, TestIgnoreNetworksAccordingToIgnoreMask) {
+ AddInterface(SocketAddress(IPAddress(0x12345600U), 0), "test_eth0",
+ rtc::ADAPTER_TYPE_ETHERNET);
+ AddInterface(SocketAddress(IPAddress(0x12345601U), 0), "test_wlan0",
+ rtc::ADAPTER_TYPE_WIFI);
+ AddInterface(SocketAddress(IPAddress(0x12345602U), 0), "test_cell0",
+ rtc::ADAPTER_TYPE_CELLULAR);
+ allocator_->SetNetworkIgnoreMask(rtc::ADAPTER_TYPE_ETHERNET |
+ rtc::ADAPTER_TYPE_LOOPBACK |
+ rtc::ADAPTER_TYPE_WIFI);
+ EXPECT_TRUE(CreateSession(cricket::ICE_CANDIDATE_COMPONENT_RTP));
+ session_->set_flags(cricket::PORTALLOCATOR_DISABLE_STUN |
+ cricket::PORTALLOCATOR_DISABLE_RELAY |
+ cricket::PORTALLOCATOR_DISABLE_TCP);
+ session_->StartGettingPorts();
+ EXPECT_TRUE_WAIT(candidate_allocation_done_, kDefaultAllocationTimeout);
+ EXPECT_EQ(1U, candidates_.size());
+ EXPECT_EQ(0x12345602U, candidates_[0].address().ip());
+}
+
// Tests that we allocator session not trying to allocate ports for every 250ms.
TEST_F(PortAllocatorTest, TestNoNetworkInterface) {
EXPECT_TRUE(CreateSession(cricket::ICE_CANDIDATE_COMPONENT_RTP));
@@ -403,7 +465,8 @@ TEST_F(PortAllocatorTest, TestNoNetworkInterface) {
// Test that we could use loopback interface as host candidate.
TEST_F(PortAllocatorTest, TestLoopbackNetworkInterface) {
- AddInterface(kLoopbackAddr);
+ AddInterface(kLoopbackAddr, "test_loopback", rtc::ADAPTER_TYPE_LOOPBACK);
+ allocator_->SetNetworkIgnoreMask(0);
EXPECT_TRUE(CreateSession(cricket::ICE_CANDIDATE_COMPONENT_RTP));
session_->set_flags(cricket::PORTALLOCATOR_DISABLE_STUN |
cricket::PORTALLOCATOR_DISABLE_RELAY |
@@ -589,7 +652,6 @@ TEST_F(PortAllocatorTest, TestGetAllPortsNoAdapters) {
// candidate_filter() is set to CF_RELAY and no relay is specified.
TEST_F(PortAllocatorTest,
TestDisableAdapterEnumerationWithoutNatRelayTransportOnly) {
- AddInterfaceAsDefaultRoute(kClientAddr);
ResetWithStunServerNoNat(kStunAddr);
allocator().set_candidate_filter(cricket::CF_RELAY);
// Expect to see no ports and no candidates.
@@ -597,86 +659,96 @@ TEST_F(PortAllocatorTest,
rtc::IPAddress(), rtc::IPAddress());
}
-// Test that we should only get STUN and TURN candidates when adapter
-// enumeration is disabled.
-TEST_F(PortAllocatorTest, TestDisableAdapterEnumerationBehindNat) {
- AddInterface(kClientAddr);
- // GTURN is not configured here.
- ResetWithStunServerAndNat(kStunAddr);
- AddTurnServers(kTurnUdpIntAddr, rtc::SocketAddress());
- // Expect to see 3 ports: STUN, TURN/UDP and TCP ports, and both STUN and
- // TURN/UDP candidates.
- CheckDisableAdapterEnumeration(3U, rtc::IPAddress(), kNatUdpAddr.ipaddr(),
- kTurnUdpExtAddr.ipaddr(), rtc::IPAddress());
-}
-
-// Test that even with multiple interfaces, the result should still be one STUN
-// and one TURN candidate since we bind to any address (i.e. all 0s).
+// Test that even with multiple interfaces, the result should still be a single
+// default private, one STUN and one TURN candidate since we bind to any address
+// (i.e. all 0s).
TEST_F(PortAllocatorTest,
TestDisableAdapterEnumerationBehindNatMultipleInterfaces) {
AddInterface(kPrivateAddr);
AddInterface(kPrivateAddr2);
ResetWithStunServerAndNat(kStunAddr);
AddTurnServers(kTurnUdpIntAddr, rtc::SocketAddress());
- // Expect to see 3 ports: STUN, TURN/UDP and TCP ports, and both STUN and
+
+ // Enable IPv6 here. Since the network_manager doesn't have IPv6 default
+ // address set and we have no IPv6 STUN server, there should be no IPv6
+ // candidates.
+ EXPECT_TRUE(CreateSession(cricket::ICE_CANDIDATE_COMPONENT_RTP));
+ session_->set_flags(cricket::PORTALLOCATOR_ENABLE_IPV6);
+
+ // Expect to see 3 ports for IPv4: HOST/STUN, TURN/UDP and TCP ports, 2 ports
+ // for IPv6: HOST, and TCP. Only IPv4 candidates: a default private, STUN and
// TURN/UDP candidates.
- CheckDisableAdapterEnumeration(3U, rtc::IPAddress(), kNatUdpAddr.ipaddr(),
- kTurnUdpExtAddr.ipaddr(), rtc::IPAddress());
+ CheckDisableAdapterEnumeration(5U, kPrivateAddr.ipaddr(),
+ kNatUdpAddr.ipaddr(), kTurnUdpExtAddr.ipaddr(),
+ rtc::IPAddress());
}
-// Test that we should get STUN, TURN/UDP and TURN/TCP candidates when a
-// TURN/TCP server is specified.
+// Test that we should get a default private, STUN, TURN/UDP and TURN/TCP
+// candidates when both TURN/UDP and TURN/TCP servers are specified.
TEST_F(PortAllocatorTest, TestDisableAdapterEnumerationBehindNatWithTcp) {
turn_server_.AddInternalSocket(kTurnTcpIntAddr, cricket::PROTO_TCP);
- AddInterface(kClientAddr);
- // GTURN is not configured here.
+ AddInterface(kPrivateAddr);
ResetWithStunServerAndNat(kStunAddr);
AddTurnServers(kTurnUdpIntAddr, kTurnTcpIntAddr);
- // Expect to see 4 ports - STUN, TURN/UDP, TURN/TCP and TCP port. STUN,
- // TURN/UDP, and TURN/TCP candidates.
- CheckDisableAdapterEnumeration(4U, rtc::IPAddress(), kNatUdpAddr.ipaddr(),
- kTurnUdpExtAddr.ipaddr(),
+ // Expect to see 4 ports - STUN, TURN/UDP, TURN/TCP and TCP port. A default
+ // private, STUN, TURN/UDP, and TURN/TCP candidates.
+ CheckDisableAdapterEnumeration(4U, kPrivateAddr.ipaddr(),
+ kNatUdpAddr.ipaddr(), kTurnUdpExtAddr.ipaddr(),
kTurnUdpExtAddr.ipaddr());
}
-// Test that we should only get STUN and TURN candidates when adapter
-// enumeration is disabled. Since the endpoint is not behind NAT, the srflx
-// address should be the public client interface.
-TEST_F(PortAllocatorTest, TestDisableAdapterEnumerationWithoutNat) {
- AddInterfaceAsDefaultRoute(kClientAddr);
- ResetWithStunServerNoNat(kStunAddr);
- AddTurnServers(kTurnUdpIntAddr, rtc::SocketAddress());
- // Expect to see 3 ports: STUN, TURN/UDP and TCP ports, but only both STUN and
- // TURN candidates. The STUN candidate should have kClientAddr as srflx
- // address, and TURN candidate with kClientAddr as the related address.
- CheckDisableAdapterEnumeration(3U, rtc::IPAddress(), kClientAddr.ipaddr(),
- kTurnUdpExtAddr.ipaddr(), rtc::IPAddress());
-}
-
// Test that when adapter enumeration is disabled, for endpoints without
-// STUN/TURN specified, no candidate is generated.
+// STUN/TURN specified, a default private candidate is still generated.
TEST_F(PortAllocatorTest, TestDisableAdapterEnumerationWithoutNatOrServers) {
- AddInterfaceAsDefaultRoute(kClientAddr);
ResetWithNoServersOrNat();
- // Expect to see 2 ports: STUN and TCP ports, but no candidate.
- CheckDisableAdapterEnumeration(2U, rtc::IPAddress(), rtc::IPAddress(),
+ // Expect to see 2 ports: STUN and TCP ports, one default private candidate.
+ CheckDisableAdapterEnumeration(2U, kPrivateAddr.ipaddr(), rtc::IPAddress(),
rtc::IPAddress(), rtc::IPAddress());
}
// Test that when adapter enumeration is disabled, with
-// PORTALLOCATOR_ENABLE_LOCALHOST_CANDIDATE specified, for endpoints not behind
-// a NAT, there are a localhost candidate in addition to a STUN candidate.
+// PORTALLOCATOR_DISABLE_LOCALHOST_CANDIDATE specified, for endpoints not behind
+// a NAT, there is no local candidate.
TEST_F(PortAllocatorTest,
- TestDisableAdapterEnumerationWithoutNatLocalhostCandidateRequested) {
- AddInterfaceAsDefaultRoute(kClientAddr);
+ TestDisableAdapterEnumerationWithoutNatLocalhostCandidateDisabled) {
ResetWithStunServerNoNat(kStunAddr);
EXPECT_TRUE(CreateSession(cricket::ICE_CANDIDATE_COMPONENT_RTP));
- session_->set_flags(cricket::PORTALLOCATOR_ENABLE_LOCALHOST_CANDIDATE);
+ session_->set_flags(cricket::PORTALLOCATOR_DISABLE_DEFAULT_LOCAL_CANDIDATE);
// Expect to see 2 ports: STUN and TCP ports, localhost candidate and STUN
// candidate.
- CheckDisableAdapterEnumeration(2U, rtc::GetLoopbackIP(AF_INET),
- kClientAddr.ipaddr(), rtc::IPAddress(),
- rtc::IPAddress());
+ CheckDisableAdapterEnumeration(2U, rtc::IPAddress(), rtc::IPAddress(),
+ rtc::IPAddress(), rtc::IPAddress());
+}
+
+// Test that when adapter enumeration is disabled, with
+// PORTALLOCATOR_DISABLE_LOCALHOST_CANDIDATE specified, for endpoints not behind
+// a NAT, there is no local candidate. However, this specified default route
+// (kClientAddr) which was discovered when sending STUN requests, will become
+// the srflx addresses.
+TEST_F(
+ PortAllocatorTest,
+ TestDisableAdapterEnumerationWithoutNatLocalhostCandidateDisabledWithDifferentDefaultRoute) {
+ ResetWithStunServerNoNat(kStunAddr);
+ AddInterfaceAsDefaultRoute(kClientAddr);
+ EXPECT_TRUE(CreateSession(cricket::ICE_CANDIDATE_COMPONENT_RTP));
+ session_->set_flags(cricket::PORTALLOCATOR_DISABLE_DEFAULT_LOCAL_CANDIDATE);
+ // Expect to see 2 ports: STUN and TCP ports, localhost candidate and STUN
+ // candidate.
+ CheckDisableAdapterEnumeration(2U, rtc::IPAddress(), kClientAddr.ipaddr(),
+ rtc::IPAddress(), rtc::IPAddress());
+}
+
+// Test that when adapter enumeration is disabled, with
+// PORTALLOCATOR_DISABLE_LOCALHOST_CANDIDATE specified, for endpoints behind a
+// NAT, there is only one STUN candidate.
+TEST_F(PortAllocatorTest,
+ TestDisableAdapterEnumerationWithNatLocalhostCandidateDisabled) {
+ ResetWithStunServerAndNat(kStunAddr);
+ EXPECT_TRUE(CreateSession(cricket::ICE_CANDIDATE_COMPONENT_RTP));
+ session_->set_flags(cricket::PORTALLOCATOR_DISABLE_DEFAULT_LOCAL_CANDIDATE);
+ // Expect to see 2 ports: STUN and TCP ports, and single STUN candidate.
+ CheckDisableAdapterEnumeration(2U, rtc::IPAddress(), kNatUdpAddr.ipaddr(),
+ rtc::IPAddress(), rtc::IPAddress());
}
// Test that we disable relay over UDP, and only TCP is used when connecting to
@@ -1026,13 +1098,12 @@ TEST_F(PortAllocatorTest, TestSharedSocketWithServerAddressResolve) {
cricket::PROTO_UDP);
AddInterface(kClientAddr);
allocator_.reset(new cricket::BasicPortAllocator(&network_manager_));
- cricket::RelayServerConfig relay_server(cricket::RELAY_TURN);
+ cricket::RelayServerConfig turn_server(cricket::RELAY_TURN);
cricket::RelayCredentials credentials(kTurnUsername, kTurnPassword);
- relay_server.credentials = credentials;
- relay_server.ports.push_back(cricket::ProtocolAddress(
- rtc::SocketAddress("localhost", 3478),
- cricket::PROTO_UDP, false));
- allocator_->AddRelay(relay_server);
+ turn_server.credentials = credentials;
+ turn_server.ports.push_back(cricket::ProtocolAddress(
+ rtc::SocketAddress("localhost", 3478), cricket::PROTO_UDP, false));
+ allocator_->AddTurnServer(turn_server);
allocator_->set_step_delay(cricket::kMinimumStepDelay);
allocator_->set_flags(allocator().flags() |
@@ -1244,7 +1315,8 @@ TEST_F(PortAllocatorTest, TestSharedSocketNoUdpAllowed) {
// adapters, the PORTALLOCATOR_DISABLE_ADAPTER_ENUMERATION is specified
// automatically.
TEST_F(PortAllocatorTest, TestNetworkPermissionBlocked) {
- AddInterface(kClientAddr);
+ network_manager_.set_default_local_addresses(kPrivateAddr.ipaddr(),
+ rtc::IPAddress());
network_manager_.set_enumeration_permission(
rtc::NetworkManager::ENUMERATION_BLOCKED);
allocator().set_flags(allocator().flags() |
@@ -1258,7 +1330,10 @@ TEST_F(PortAllocatorTest, TestNetworkPermissionBlocked) {
cricket::PORTALLOCATOR_DISABLE_ADAPTER_ENUMERATION);
session_->StartGettingPorts();
EXPECT_EQ_WAIT(1U, ports_.size(), kDefaultAllocationTimeout);
- EXPECT_EQ(0U, candidates_.size());
+ EXPECT_EQ(1U, candidates_.size());
+ EXPECT_PRED5(CheckCandidate, candidates_[0],
+ cricket::ICE_CANDIDATE_COMPONENT_RTP, "local", "udp",
+ kPrivateAddr);
EXPECT_TRUE((session_->flags() &
cricket::PORTALLOCATOR_DISABLE_ADAPTER_ENUMERATION) != 0);
}
diff --git a/webrtc/p2p/stunprober/stunprober.cc b/webrtc/p2p/stunprober/stunprober.cc
index ee9eb2258c..9316ea89bd 100644
--- a/webrtc/p2p/stunprober/stunprober.cc
+++ b/webrtc/p2p/stunprober/stunprober.cc
@@ -460,6 +460,7 @@ bool StunProber::GetStats(StunProber::Stats* prob_stats) const {
continue;
}
+ ++stats.raw_num_request_sent;
IncrementCounterByAddress(&num_request_per_server, request->server_addr);
if (!first_sent_time) {
@@ -503,11 +504,6 @@ bool StunProber::GetStats(StunProber::Stats* prob_stats) const {
num_sent += num_request_per_server[kv.first];
}
- // Not receiving any response, the trial is inconclusive.
- if (!num_received) {
- return false;
- }
-
// Shared mode is only true if we use the shared socket and there are more
// than 1 responding servers.
stats.shared_socket_mode =
@@ -519,7 +515,8 @@ bool StunProber::GetStats(StunProber::Stats* prob_stats) const {
// If we could find a local IP matching srflx, we're not behind a NAT.
rtc::SocketAddress srflx_addr;
- if (!srflx_addr.FromString(*(stats.srflx_addrs.begin()))) {
+ if (stats.srflx_addrs.size() &&
+ !srflx_addr.FromString(*(stats.srflx_addrs.begin()))) {
return false;
}
for (const auto& net : networks_) {
@@ -544,9 +541,10 @@ bool StunProber::GetStats(StunProber::Stats* prob_stats) const {
stats.success_percent = static_cast<int>(100 * num_received / num_sent);
}
- if (num_sent > 1) {
+ if (stats.raw_num_request_sent > 1) {
stats.actual_request_interval_ns =
- (1000 * (last_sent_time - first_sent_time)) / (num_sent - 1);
+ (1000 * (last_sent_time - first_sent_time)) /
+ (stats.raw_num_request_sent - 1);
}
if (num_received) {
diff --git a/webrtc/p2p/stunprober/stunprober.h b/webrtc/p2p/stunprober/stunprober.h
index 9d2ad222e5..b725cbef0a 100644
--- a/webrtc/p2p/stunprober/stunprober.h
+++ b/webrtc/p2p/stunprober/stunprober.h
@@ -71,7 +71,14 @@ class StunProber : public sigslot::has_slots<> {
struct Stats {
Stats() {}
+ // |raw_num_request_sent| is the total number of requests
+ // sent. |num_request_sent| is the count of requests against a server where
+ // we see at least one response. |num_request_sent| is designed to protect
+ // against DNS resolution failure or the STUN server is not responsive
+ // which could skew the result.
+ int raw_num_request_sent = 0;
int num_request_sent = 0;
+
int num_response_received = 0;
NatType nat_type = NATTYPE_INVALID;
int average_rtt_ms = -1;
diff --git a/webrtc/sound/OWNERS b/webrtc/sound/OWNERS
index b6d6626934..0f00d1aa48 100644
--- a/webrtc/sound/OWNERS
+++ b/webrtc/sound/OWNERS
@@ -9,5 +9,9 @@ pthatcher@webrtc.org
sergeyu@chromium.org
tommi@webrtc.org
+# These are for the common case of adding or renaming files. If you're doing
+# structural changes, please get a review from a reviewer in this file.
+per-file *.gyp=*
+per-file *.gypi=*
per-file BUILD.gn=kjellander@webrtc.org
diff --git a/webrtc/sound/alsasoundsystem.cc b/webrtc/sound/alsasoundsystem.cc
index 3cc77a988c..696ff1e450 100644
--- a/webrtc/sound/alsasoundsystem.cc
+++ b/webrtc/sound/alsasoundsystem.cc
@@ -11,15 +11,18 @@
#include "webrtc/sound/alsasoundsystem.h"
#include <algorithm>
-#include "webrtc/sound/sounddevicelocator.h"
-#include "webrtc/sound/soundinputstreaminterface.h"
-#include "webrtc/sound/soundoutputstreaminterface.h"
+#include <string>
+
+#include "webrtc/base/arraysize.h"
#include "webrtc/base/common.h"
#include "webrtc/base/logging.h"
#include "webrtc/base/scoped_ptr.h"
#include "webrtc/base/stringutils.h"
#include "webrtc/base/timeutils.h"
#include "webrtc/base/worker.h"
+#include "webrtc/sound/sounddevicelocator.h"
+#include "webrtc/sound/soundinputstreaminterface.h"
+#include "webrtc/sound/soundoutputstreaminterface.h"
namespace rtc {
@@ -257,12 +260,12 @@ class AlsaInputStream :
}
bool GetVolume(int *volume) override {
- // TODO: Implement this.
+ // TODO(henrika): Implement this.
return false;
}
bool SetVolume(int volume) override {
- // TODO: Implement this.
+ // TODO(henrika): Implement this.
return false;
}
@@ -388,12 +391,12 @@ class AlsaOutputStream : public SoundOutputStreamInterface,
}
bool GetVolume(int *volume) override {
- // TODO: Implement this.
+ // TODO(henrika): Implement this.
return false;
}
bool SetVolume(int volume) override {
- // TODO: Implement this.
+ // TODO(henrika): Implement this.
return false;
}
@@ -567,7 +570,6 @@ bool AlsaSoundSystem::EnumerateDevices(
strcmp(name, ignore_null) != 0 &&
strcmp(name, ignore_pulse) != 0 &&
!rtc::starts_with(name, ignore_prefix)) {
-
// Yes, we do.
char *desc = symbol_table_.snd_device_name_get_hint()(*list, "DESC");
if (!desc) {
@@ -606,8 +608,6 @@ bool AlsaSoundSystem::GetDefaultDevice(SoundDeviceLocator **device) {
}
inline size_t AlsaSoundSystem::FrameSize(const OpenParams &params) {
- ASSERT(static_cast<int>(params.format) <
- ARRAY_SIZE(kCricketFormatToSampleSizeTable));
return kCricketFormatToSampleSizeTable[params.format] * params.channels;
}
@@ -622,7 +622,6 @@ StreamInterface *AlsaSoundSystem::OpenDevice(
int wait_timeout_ms,
int flags,
int freq)) {
-
if (!IsInitialized()) {
return NULL;
}
@@ -662,8 +661,7 @@ StreamInterface *AlsaSoundSystem::OpenDevice(
latency = std::max(latency, kMinimumLatencyUsecs);
}
- ASSERT(static_cast<int>(params.format) <
- ARRAY_SIZE(kCricketFormatToAlsaFormatTable));
+ ASSERT(params.format < arraysize(kCricketFormatToAlsaFormatTable));
err = symbol_table_.snd_pcm_set_params()(
handle,
diff --git a/webrtc/sound/automaticallychosensoundsystem_unittest.cc b/webrtc/sound/automaticallychosensoundsystem_unittest.cc
index 5cfd7c6fc7..318385c651 100644
--- a/webrtc/sound/automaticallychosensoundsystem_unittest.cc
+++ b/webrtc/sound/automaticallychosensoundsystem_unittest.cc
@@ -9,8 +9,10 @@
*/
#include "webrtc/sound/automaticallychosensoundsystem.h"
-#include "webrtc/sound/nullsoundsystem.h"
+
+#include "webrtc/base/arraysize.h"
#include "webrtc/base/gunit.h"
+#include "webrtc/sound/nullsoundsystem.h"
namespace rtc {
@@ -112,7 +114,7 @@ extern const SoundSystemCreator kSingleSystemFailingCreators[] = {
TEST(AutomaticallyChosenSoundSystem, SingleSystemFailing) {
AutomaticallyChosenSoundSystem<
kSingleSystemFailingCreators,
- ARRAY_SIZE(kSingleSystemFailingCreators)> sound_system;
+ arraysize(kSingleSystemFailingCreators)> sound_system;
EXPECT_FALSE(sound_system.Init());
}
@@ -123,7 +125,7 @@ extern const SoundSystemCreator kSingleSystemSucceedingCreators[] = {
TEST(AutomaticallyChosenSoundSystem, SingleSystemSucceeding) {
AutomaticallyChosenSoundSystem<
kSingleSystemSucceedingCreators,
- ARRAY_SIZE(kSingleSystemSucceedingCreators)> sound_system;
+ arraysize(kSingleSystemSucceedingCreators)> sound_system;
EXPECT_TRUE(sound_system.Init());
}
@@ -136,7 +138,7 @@ extern const SoundSystemCreator
TEST(AutomaticallyChosenSoundSystem, FailedFirstSystemResultsInUsingSecond) {
AutomaticallyChosenSoundSystem<
kFailedFirstSystemResultsInUsingSecondCreators,
- ARRAY_SIZE(kFailedFirstSystemResultsInUsingSecondCreators)> sound_system;
+ arraysize(kFailedFirstSystemResultsInUsingSecondCreators)> sound_system;
EXPECT_TRUE(sound_system.Init());
}
@@ -148,7 +150,7 @@ extern const SoundSystemCreator kEarlierEntriesHavePriorityCreators[] = {
TEST(AutomaticallyChosenSoundSystem, EarlierEntriesHavePriority) {
AutomaticallyChosenSoundSystem<
kEarlierEntriesHavePriorityCreators,
- ARRAY_SIZE(kEarlierEntriesHavePriorityCreators)> sound_system;
+ arraysize(kEarlierEntriesHavePriorityCreators)> sound_system;
InitCheckingSoundSystem1::created_ = false;
InitCheckingSoundSystem2::created_ = false;
EXPECT_TRUE(sound_system.Init());
@@ -169,7 +171,7 @@ extern const SoundSystemCreator kManySoundSystemsCreators[] = {
TEST(AutomaticallyChosenSoundSystem, ManySoundSystems) {
AutomaticallyChosenSoundSystem<
kManySoundSystemsCreators,
- ARRAY_SIZE(kManySoundSystemsCreators)> sound_system;
+ arraysize(kManySoundSystemsCreators)> sound_system;
EXPECT_TRUE(sound_system.Init());
}
@@ -182,7 +184,7 @@ extern const SoundSystemCreator kDeletesAllCreatedSoundSystemsCreators[] = {
TEST(AutomaticallyChosenSoundSystem, DeletesAllCreatedSoundSystems) {
typedef AutomaticallyChosenSoundSystem<
kDeletesAllCreatedSoundSystemsCreators,
- ARRAY_SIZE(kDeletesAllCreatedSoundSystemsCreators)> TestSoundSystem;
+ arraysize(kDeletesAllCreatedSoundSystemsCreators)> TestSoundSystem;
TestSoundSystem *sound_system = new TestSoundSystem();
DeletionCheckingSoundSystem1::deleted_ = false;
DeletionCheckingSoundSystem2::deleted_ = false;
diff --git a/webrtc/sound/linuxsoundsystem.h b/webrtc/sound/linuxsoundsystem.h
index 0016f8a428..56721a1faf 100644
--- a/webrtc/sound/linuxsoundsystem.h
+++ b/webrtc/sound/linuxsoundsystem.h
@@ -11,6 +11,7 @@
#ifndef WEBRTC_SOUND_LINUXSOUNDSYSTEM_H_
#define WEBRTC_SOUND_LINUXSOUNDSYSTEM_H_
+#include "webrtc/base/arraysize.h"
#include "webrtc/sound/automaticallychosensoundsystem.h"
namespace rtc {
@@ -34,7 +35,7 @@ extern const SoundSystemCreator kLinuxSoundSystemCreators[
// initializes then we choose that. Otherwise we choose ALSA.
typedef AutomaticallyChosenSoundSystem<
kLinuxSoundSystemCreators,
- ARRAY_SIZE(kLinuxSoundSystemCreators)> LinuxSoundSystem;
+ arraysize(kLinuxSoundSystemCreators)> LinuxSoundSystem;
} // namespace rtc
diff --git a/webrtc/sound/pulseaudiosoundsystem.cc b/webrtc/sound/pulseaudiosoundsystem.cc
index b44a1dfad7..15da76c583 100644
--- a/webrtc/sound/pulseaudiosoundsystem.cc
+++ b/webrtc/sound/pulseaudiosoundsystem.cc
@@ -13,14 +13,17 @@
#ifdef HAVE_LIBPULSE
#include <algorithm>
-#include "webrtc/sound/sounddevicelocator.h"
-#include "webrtc/sound/soundinputstreaminterface.h"
-#include "webrtc/sound/soundoutputstreaminterface.h"
+#include <string>
+
+#include "webrtc/base/arraysize.h"
#include "webrtc/base/common.h"
#include "webrtc/base/fileutils.h" // for GetApplicationName()
#include "webrtc/base/logging.h"
#include "webrtc/base/timeutils.h"
#include "webrtc/base/worker.h"
+#include "webrtc/sound/sounddevicelocator.h"
+#include "webrtc/sound/soundinputstreaminterface.h"
+#include "webrtc/sound/soundoutputstreaminterface.h"
namespace rtc {
@@ -214,17 +217,6 @@ class PulseAudioStream {
class PulseAudioInputStream :
public SoundInputStreamInterface,
private rtc::Worker {
-
- struct GetVolumeCallbackData {
- PulseAudioInputStream *instance;
- pa_cvolume *channel_volumes;
- };
-
- struct GetSourceChannelCountCallbackData {
- PulseAudioInputStream *instance;
- uint8_t *channels;
- };
-
public:
PulseAudioInputStream(PulseAudioSoundSystem *pulse,
pa_stream *stream,
@@ -384,6 +376,16 @@ class PulseAudioInputStream :
}
private:
+ struct GetVolumeCallbackData {
+ PulseAudioInputStream* instance;
+ pa_cvolume* channel_volumes;
+ };
+
+ struct GetSourceChannelCountCallbackData {
+ PulseAudioInputStream* instance;
+ uint8_t* channels;
+ };
+
void Lock() {
stream_.Lock();
}
@@ -578,12 +580,6 @@ class PulseAudioInputStream :
class PulseAudioOutputStream :
public SoundOutputStreamInterface,
private rtc::Worker {
-
- struct GetVolumeCallbackData {
- PulseAudioOutputStream *instance;
- pa_cvolume *channel_volumes;
- };
-
public:
PulseAudioOutputStream(PulseAudioSoundSystem *pulse,
pa_stream *stream,
@@ -731,7 +727,7 @@ class PulseAudioOutputStream :
}
#if 0
- // TODO: Versions 0.9.16 and later of Pulse have a new API for
+ // TODO(henrika): Versions 0.9.16 and later of Pulse have a new API for
// zero-copy writes, but Hardy is not new enough to have that so we can't
// rely on it. Perhaps auto-detect if it's present or not and use it if we
// can?
@@ -775,6 +771,11 @@ class PulseAudioOutputStream :
#endif
private:
+ struct GetVolumeCallbackData {
+ PulseAudioOutputStream* instance;
+ pa_cvolume* channel_volumes;
+ };
+
void Lock() {
stream_.Lock();
}
@@ -1163,7 +1164,7 @@ bool PulseAudioSoundSystem::ConnectToPulse(pa_context *context) {
pa_context *PulseAudioSoundSystem::CreateNewConnection() {
// Create connection context.
std::string app_name;
- // TODO: Pulse etiquette says this name should be localized. Do
+ // TODO(henrika): Pulse etiquette says this name should be localized. Do
// we care?
rtc::Filesystem::GetApplicationName(&app_name);
pa_context *context = symbol_table_.pa_context_new()(
@@ -1373,7 +1374,7 @@ StreamInterface *PulseAudioSoundSystem::OpenDevice(
StreamInterface *stream_interface = NULL;
- ASSERT(params.format < ARRAY_SIZE(kCricketFormatToPulseFormatTable));
+ ASSERT(params.format < arraysize(kCricketFormatToPulseFormatTable));
pa_sample_spec spec;
spec.format = kCricketFormatToPulseFormatTable[params.format];
diff --git a/webrtc/sound/soundinputstreaminterface.h b/webrtc/sound/soundinputstreaminterface.h
index 2c934ba679..576ff71dd2 100644
--- a/webrtc/sound/soundinputstreaminterface.h
+++ b/webrtc/sound/soundinputstreaminterface.h
@@ -65,4 +65,4 @@ class SoundInputStreamInterface {
} // namespace rtc
-#endif // WEBRTC_SOUND_SOUNDOUTPUTSTREAMINTERFACE_H_
+#endif // WEBRTC_SOUND_SOUNDINPUTSTREAMINTERFACE_H_
diff --git a/webrtc/supplement.gypi b/webrtc/supplement.gypi
index 075e2adb06..edccd4d81d 100644
--- a/webrtc/supplement.gypi
+++ b/webrtc/supplement.gypi
@@ -2,8 +2,17 @@
'variables': {
'variables': {
'webrtc_root%': '<(DEPTH)/webrtc',
+ # Override the default (10.6) in Chromium's build/common.gypi.
+ # Needed for ARC and libc++.
+ 'mac_deployment_target%': '10.7',
+ # Disable use of sysroot for Linux. It's enabled by default in Chromium,
+ # but it currently lacks the libudev-dev package.
+ # TODO(kjellander): Remove when crbug.com/561584 is fixed.
+ 'use_sysroot': 0,
},
'webrtc_root%': '<(webrtc_root)',
+ 'mac_deployment_target%': '<(mac_deployment_target)',
+ 'use_sysroot%': '<(use_sysroot)',
'build_with_chromium': 0,
'conditions': [
['OS=="ios"', {
diff --git a/webrtc/system_wrappers/BUILD.gn b/webrtc/system_wrappers/BUILD.gn
index 2f68eae634..5e0e41e832 100644
--- a/webrtc/system_wrappers/BUILD.gn
+++ b/webrtc/system_wrappers/BUILD.gn
@@ -22,7 +22,6 @@ static_library("system_wrappers") {
"include/data_log.h",
"include/data_log_c.h",
"include/data_log_impl.h",
- "include/event_tracer.h",
"include/event_wrapper.h",
"include/field_trial.h",
"include/file_wrapper.h",
@@ -38,7 +37,6 @@ static_library("system_wrappers") {
"include/static_instance.h",
"include/stl_util.h",
"include/stringize_macros.h",
- "include/thread_wrapper.h",
"include/tick_util.h",
"include/timestamp_extrapolator.h",
"include/trace.h",
@@ -67,7 +65,6 @@ static_library("system_wrappers") {
"source/event_timer_posix.h",
"source/event_timer_win.cc",
"source/event_timer_win.h",
- "source/event_tracer.cc",
"source/file_impl.cc",
"source/file_impl.h",
"source/logging.cc",
@@ -81,11 +78,6 @@ static_library("system_wrappers") {
"source/rw_lock_win.h",
"source/sleep.cc",
"source/sort.cc",
- "source/thread.cc",
- "source/thread_posix.cc",
- "source/thread_posix.h",
- "source/thread_win.cc",
- "source/thread_win.h",
"source/tick_util.cc",
"source/timestamp_extrapolator.cc",
"source/trace_impl.cc",
@@ -179,10 +171,6 @@ source_set("field_trial_default") {
configs += [ "..:common_config" ]
public_configs = [ "..:common_inherited_config" ]
-
- deps = [
- ":system_wrappers",
- ]
}
source_set("metrics_default") {
@@ -192,10 +180,6 @@ source_set("metrics_default") {
configs += [ "..:common_config" ]
public_configs = [ "..:common_inherited_config" ]
-
- deps = [
- ":system_wrappers",
- ]
}
source_set("system_wrappers_default") {
@@ -205,6 +189,7 @@ source_set("system_wrappers_default") {
deps = [
":field_trial_default",
":metrics_default",
+ ":system_wrappers",
]
}
diff --git a/webrtc/system_wrappers/OWNERS b/webrtc/system_wrappers/OWNERS
index 76fdda254e..f55277e8ea 100644
--- a/webrtc/system_wrappers/OWNERS
+++ b/webrtc/system_wrappers/OWNERS
@@ -4,4 +4,9 @@ henrikg@webrtc.org
mflodman@webrtc.org
niklas.enbom@webrtc.org
+# These are for the common case of adding or renaming files. If you're doing
+# structural changes, please get a review from a reviewer in this file.
+per-file *.gyp=*
+per-file *.gypi=*
+
per-file BUILD.gn=kjellander@webrtc.org
diff --git a/webrtc/system_wrappers/include/aligned_array.h b/webrtc/system_wrappers/include/aligned_array.h
index a3a5911e36..a2ffe99c14 100644
--- a/webrtc/system_wrappers/include/aligned_array.h
+++ b/webrtc/system_wrappers/include/aligned_array.h
@@ -8,8 +8,8 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-#ifndef WEBRTC_SYSTEM_WRAPPERS_INTERFACE_ALIGNED_ARRAY_
-#define WEBRTC_SYSTEM_WRAPPERS_INTERFACE_ALIGNED_ARRAY_
+#ifndef WEBRTC_SYSTEM_WRAPPERS_INCLUDE_ALIGNED_ARRAY_
+#define WEBRTC_SYSTEM_WRAPPERS_INCLUDE_ALIGNED_ARRAY_
#include "webrtc/base/checks.h"
#include "webrtc/system_wrappers/include/aligned_malloc.h"
@@ -20,21 +20,20 @@ namespace webrtc {
// aligned to the given byte alignment.
template<typename T> class AlignedArray {
public:
- AlignedArray(int rows, size_t cols, int alignment)
+ AlignedArray(size_t rows, size_t cols, size_t alignment)
: rows_(rows),
- cols_(cols),
- alignment_(alignment) {
- RTC_CHECK_GT(alignment_, 0);
+ cols_(cols) {
+ RTC_CHECK_GT(alignment, 0u);
head_row_ = static_cast<T**>(AlignedMalloc(rows_ * sizeof(*head_row_),
- alignment_));
- for (int i = 0; i < rows_; ++i) {
+ alignment));
+ for (size_t i = 0; i < rows_; ++i) {
head_row_[i] = static_cast<T*>(AlignedMalloc(cols_ * sizeof(**head_row_),
- alignment_));
+ alignment));
}
}
~AlignedArray() {
- for (int i = 0; i < rows_; ++i) {
+ for (size_t i = 0; i < rows_; ++i) {
AlignedFree(head_row_[i]);
}
AlignedFree(head_row_);
@@ -48,27 +47,27 @@ template<typename T> class AlignedArray {
return head_row_;
}
- T* Row(int row) {
+ T* Row(size_t row) {
RTC_CHECK_LE(row, rows_);
return head_row_[row];
}
- const T* Row(int row) const {
+ const T* Row(size_t row) const {
RTC_CHECK_LE(row, rows_);
return head_row_[row];
}
- T& At(int row, size_t col) {
+ T& At(size_t row, size_t col) {
RTC_CHECK_LE(col, cols_);
return Row(row)[col];
}
- const T& At(int row, size_t col) const {
+ const T& At(size_t row, size_t col) const {
RTC_CHECK_LE(col, cols_);
return Row(row)[col];
}
- int rows() const {
+ size_t rows() const {
return rows_;
}
@@ -77,12 +76,11 @@ template<typename T> class AlignedArray {
}
private:
- int rows_;
+ size_t rows_;
size_t cols_;
- int alignment_;
T** head_row_;
};
} // namespace webrtc
-#endif // WEBRTC_SYSTEM_WRAPPERS_INTERFACE_ALIGNED_ARRAY_
+#endif // WEBRTC_SYSTEM_WRAPPERS_INCLUDE_ALIGNED_ARRAY_
diff --git a/webrtc/system_wrappers/include/aligned_malloc.h b/webrtc/system_wrappers/include/aligned_malloc.h
index 5d343cde7c..277abec020 100644
--- a/webrtc/system_wrappers/include/aligned_malloc.h
+++ b/webrtc/system_wrappers/include/aligned_malloc.h
@@ -8,8 +8,8 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-#ifndef WEBRTC_SYSTEM_WRAPPERS_INTERFACE_ALIGNED_MALLOC_H_
-#define WEBRTC_SYSTEM_WRAPPERS_INTERFACE_ALIGNED_MALLOC_H_
+#ifndef WEBRTC_SYSTEM_WRAPPERS_INCLUDE_ALIGNED_MALLOC_H_
+#define WEBRTC_SYSTEM_WRAPPERS_INCLUDE_ALIGNED_MALLOC_H_
// The functions declared here
// 1) Allocates block of aligned memory.
@@ -56,4 +56,4 @@ struct AlignedFreeDeleter {
} // namespace webrtc
-#endif // WEBRTC_SYSTEM_WRAPPERS_INTERFACE_ALIGNED_MALLOC_H_
+#endif // WEBRTC_SYSTEM_WRAPPERS_INCLUDE_ALIGNED_MALLOC_H_
diff --git a/webrtc/system_wrappers/include/asm_defines.h b/webrtc/system_wrappers/include/asm_defines.h
index c2a688f00a..fe4c05effc 100644
--- a/webrtc/system_wrappers/include/asm_defines.h
+++ b/webrtc/system_wrappers/include/asm_defines.h
@@ -8,8 +8,8 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-#ifndef WEBRTC_SYSTEM_WRAPPERS_INTERFACE_ASM_DEFINES_H_
-#define WEBRTC_SYSTEM_WRAPPERS_INTERFACE_ASM_DEFINES_H_
+#ifndef WEBRTC_SYSTEM_WRAPPERS_INCLUDE_ASM_DEFINES_H_
+#define WEBRTC_SYSTEM_WRAPPERS_INCLUDE_ASM_DEFINES_H_
#if defined(__linux__) && defined(__ELF__)
.section .note.GNU-stack,"",%progbits
@@ -63,4 +63,4 @@ strheq \reg1, \reg2, \num
.text
-#endif // WEBRTC_SYSTEM_WRAPPERS_INTERFACE_ASM_DEFINES_H_
+#endif // WEBRTC_SYSTEM_WRAPPERS_INCLUDE_ASM_DEFINES_H_
diff --git a/webrtc/system_wrappers/include/atomic32.h b/webrtc/system_wrappers/include/atomic32.h
index 36ca144bda..78e649d8b6 100644
--- a/webrtc/system_wrappers/include/atomic32.h
+++ b/webrtc/system_wrappers/include/atomic32.h
@@ -12,8 +12,8 @@
// doing, use locks instead! :-)
//
// Note: assumes 32-bit (or higher) system
-#ifndef WEBRTC_SYSTEM_WRAPPERS_INTERFACE_ATOMIC32_H_
-#define WEBRTC_SYSTEM_WRAPPERS_INTERFACE_ATOMIC32_H_
+#ifndef WEBRTC_SYSTEM_WRAPPERS_INCLUDE_ATOMIC32_H_
+#define WEBRTC_SYSTEM_WRAPPERS_INCLUDE_ATOMIC32_H_
#include <stddef.h>
@@ -63,4 +63,4 @@ class Atomic32 {
} // namespace webrtc
-#endif // WEBRTC_SYSTEM_WRAPPERS_INTERFACE_ATOMIC32_H_
+#endif // WEBRTC_SYSTEM_WRAPPERS_INCLUDE_ATOMIC32_H_
diff --git a/webrtc/system_wrappers/include/clock.h b/webrtc/system_wrappers/include/clock.h
index 14253ba560..f443057bea 100644
--- a/webrtc/system_wrappers/include/clock.h
+++ b/webrtc/system_wrappers/include/clock.h
@@ -8,8 +8,8 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-#ifndef WEBRTC_SYSTEM_WRAPPERS_INTERFACE_CLOCK_H_
-#define WEBRTC_SYSTEM_WRAPPERS_INTERFACE_CLOCK_H_
+#ifndef WEBRTC_SYSTEM_WRAPPERS_INCLUDE_CLOCK_H_
+#define WEBRTC_SYSTEM_WRAPPERS_INCLUDE_CLOCK_H_
#include "webrtc/base/scoped_ptr.h"
#include "webrtc/system_wrappers/include/rw_lock_wrapper.h"
@@ -81,4 +81,4 @@ class SimulatedClock : public Clock {
}; // namespace webrtc
-#endif // WEBRTC_SYSTEM_WRAPPERS_INTERFACE_CLOCK_H_
+#endif // WEBRTC_SYSTEM_WRAPPERS_INCLUDE_CLOCK_H_
diff --git a/webrtc/system_wrappers/include/compile_assert_c.h b/webrtc/system_wrappers/include/compile_assert_c.h
index dbb5292d97..b402d7192d 100644
--- a/webrtc/system_wrappers/include/compile_assert_c.h
+++ b/webrtc/system_wrappers/include/compile_assert_c.h
@@ -8,8 +8,8 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-#ifndef WEBRTC_SYSTEM_WRAPPERS_INTERFACE_COMPILE_ASSERT_H_
-#define WEBRTC_SYSTEM_WRAPPERS_INTERFACE_COMPILE_ASSERT_H_
+#ifndef WEBRTC_SYSTEM_WRAPPERS_INCLUDE_COMPILE_ASSERT_H_
+#define WEBRTC_SYSTEM_WRAPPERS_INCLUDE_COMPILE_ASSERT_H_
#ifdef __cplusplus
#error "Only use this for C files. For C++, use static_assert."
@@ -21,4 +21,4 @@
// COMPILE_ASSERT(sizeof(foo) < 128);
#define COMPILE_ASSERT(expression) switch (0) {case 0: case expression:;}
-#endif // WEBRTC_SYSTEM_WRAPPERS_INTERFACE_COMPILE_ASSERT_H_
+#endif // WEBRTC_SYSTEM_WRAPPERS_INCLUDE_COMPILE_ASSERT_H_
diff --git a/webrtc/system_wrappers/include/condition_variable_wrapper.h b/webrtc/system_wrappers/include/condition_variable_wrapper.h
index 151f00ece1..37ca30f036 100644
--- a/webrtc/system_wrappers/include/condition_variable_wrapper.h
+++ b/webrtc/system_wrappers/include/condition_variable_wrapper.h
@@ -8,8 +8,8 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-#ifndef WEBRTC_SYSTEM_WRAPPERS_INTERFACE_CONDITION_VARIABLE_WRAPPER_H_
-#define WEBRTC_SYSTEM_WRAPPERS_INTERFACE_CONDITION_VARIABLE_WRAPPER_H_
+#ifndef WEBRTC_SYSTEM_WRAPPERS_INCLUDE_CONDITION_VARIABLE_WRAPPER_H_
+#define WEBRTC_SYSTEM_WRAPPERS_INCLUDE_CONDITION_VARIABLE_WRAPPER_H_
namespace webrtc {
@@ -39,4 +39,4 @@ class ConditionVariableWrapper {
} // namespace webrtc
-#endif // WEBRTC_SYSTEM_WRAPPERS_INTERFACE_CONDITION_VARIABLE_WRAPPER_H_
+#endif // WEBRTC_SYSTEM_WRAPPERS_INCLUDE_CONDITION_VARIABLE_WRAPPER_H_
diff --git a/webrtc/system_wrappers/include/cpu_features_wrapper.h b/webrtc/system_wrappers/include/cpu_features_wrapper.h
index 5697c49164..9838d94e58 100644
--- a/webrtc/system_wrappers/include/cpu_features_wrapper.h
+++ b/webrtc/system_wrappers/include/cpu_features_wrapper.h
@@ -8,8 +8,8 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-#ifndef WEBRTC_SYSTEM_WRAPPERS_INTERFACE_CPU_FEATURES_WRAPPER_H_
-#define WEBRTC_SYSTEM_WRAPPERS_INTERFACE_CPU_FEATURES_WRAPPER_H_
+#ifndef WEBRTC_SYSTEM_WRAPPERS_INCLUDE_CPU_FEATURES_WRAPPER_H_
+#define WEBRTC_SYSTEM_WRAPPERS_INCLUDE_CPU_FEATURES_WRAPPER_H_
#if defined(__cplusplus) || defined(c_plusplus)
extern "C" {
@@ -48,4 +48,4 @@ extern uint64_t WebRtc_GetCPUFeaturesARM(void);
} // extern "C"
#endif
-#endif // WEBRTC_SYSTEM_WRAPPERS_INTERFACE_CPU_FEATURES_WRAPPER_H_
+#endif // WEBRTC_SYSTEM_WRAPPERS_INCLUDE_CPU_FEATURES_WRAPPER_H_
diff --git a/webrtc/system_wrappers/include/cpu_info.h b/webrtc/system_wrappers/include/cpu_info.h
index 65888b8d32..3c00d33ed3 100644
--- a/webrtc/system_wrappers/include/cpu_info.h
+++ b/webrtc/system_wrappers/include/cpu_info.h
@@ -8,8 +8,8 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-#ifndef WEBRTC_SYSTEM_WRAPPERS_INTERFACE_CPU_INFO_H_
-#define WEBRTC_SYSTEM_WRAPPERS_INTERFACE_CPU_INFO_H_
+#ifndef WEBRTC_SYSTEM_WRAPPERS_INCLUDE_CPU_INFO_H_
+#define WEBRTC_SYSTEM_WRAPPERS_INCLUDE_CPU_INFO_H_
#include "webrtc/typedefs.h"
@@ -25,4 +25,4 @@ class CpuInfo {
} // namespace webrtc
-#endif // WEBRTC_SYSTEM_WRAPPERS_INTERFACE_CPU_INFO_H_
+#endif // WEBRTC_SYSTEM_WRAPPERS_INCLUDE_CPU_INFO_H_
diff --git a/webrtc/system_wrappers/include/critical_section_wrapper.h b/webrtc/system_wrappers/include/critical_section_wrapper.h
index e93a249e25..7dd217e40d 100644
--- a/webrtc/system_wrappers/include/critical_section_wrapper.h
+++ b/webrtc/system_wrappers/include/critical_section_wrapper.h
@@ -8,8 +8,8 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-#ifndef WEBRTC_SYSTEM_WRAPPERS_INTERFACE_CRITICAL_SECTION_WRAPPER_H_
-#define WEBRTC_SYSTEM_WRAPPERS_INTERFACE_CRITICAL_SECTION_WRAPPER_H_
+#ifndef WEBRTC_SYSTEM_WRAPPERS_INCLUDE_CRITICAL_SECTION_WRAPPER_H_
+#define WEBRTC_SYSTEM_WRAPPERS_INCLUDE_CRITICAL_SECTION_WRAPPER_H_
// If the critical section is heavily contended it may be beneficial to use
// read/write locks instead.
@@ -51,4 +51,4 @@ class SCOPED_LOCKABLE CriticalSectionScoped {
} // namespace webrtc
-#endif // WEBRTC_SYSTEM_WRAPPERS_INTERFACE_CRITICAL_SECTION_WRAPPER_H_
+#endif // WEBRTC_SYSTEM_WRAPPERS_INCLUDE_CRITICAL_SECTION_WRAPPER_H_
diff --git a/webrtc/system_wrappers/include/data_log.h b/webrtc/system_wrappers/include/data_log.h
index cf095674a3..f6cad88e96 100644
--- a/webrtc/system_wrappers/include/data_log.h
+++ b/webrtc/system_wrappers/include/data_log.h
@@ -28,8 +28,8 @@
//
// Table names and column names are case sensitive.
-#ifndef WEBRTC_SYSTEM_WRAPPERS_INTERFACE_DATA_LOG_H_
-#define WEBRTC_SYSTEM_WRAPPERS_INTERFACE_DATA_LOG_H_
+#ifndef WEBRTC_SYSTEM_WRAPPERS_INCLUDE_DATA_LOG_H_
+#define WEBRTC_SYSTEM_WRAPPERS_INCLUDE_DATA_LOG_H_
#include <string>
@@ -116,4 +116,4 @@ class DataLog {
} // namespace webrtc
-#endif // WEBRTC_SYSTEM_WRAPPERS_INTERFACE_DATA_LOG_H_
+#endif // WEBRTC_SYSTEM_WRAPPERS_INCLUDE_DATA_LOG_H_
diff --git a/webrtc/system_wrappers/include/data_log_c.h b/webrtc/system_wrappers/include/data_log_c.h
index 4ff8329c85..d31e4d972e 100644
--- a/webrtc/system_wrappers/include/data_log_c.h
+++ b/webrtc/system_wrappers/include/data_log_c.h
@@ -12,8 +12,8 @@
// mapped here except for InsertCell as C does not support templates.
// See data_log.h for a description of the functions.
-#ifndef SRC_SYSTEM_WRAPPERS_INTERFACE_DATA_LOG_C_H_
-#define SRC_SYSTEM_WRAPPERS_INTERFACE_DATA_LOG_C_H_
+#ifndef SRC_SYSTEM_WRAPPERS_INCLUDE_DATA_LOG_C_H_
+#define SRC_SYSTEM_WRAPPERS_INCLUDE_DATA_LOG_C_H_
#include <stddef.h> // size_t
@@ -82,4 +82,4 @@ int WebRtcDataLog_NextRow(const char* table_name);
} // end of extern "C"
#endif
-#endif // SRC_SYSTEM_WRAPPERS_INTERFACE_DATA_LOG_C_H_ // NOLINT
+#endif // SRC_SYSTEM_WRAPPERS_INCLUDE_DATA_LOG_C_H_ // NOLINT
diff --git a/webrtc/system_wrappers/include/data_log_impl.h b/webrtc/system_wrappers/include/data_log_impl.h
index 56d98f891e..35519609b9 100644
--- a/webrtc/system_wrappers/include/data_log_impl.h
+++ b/webrtc/system_wrappers/include/data_log_impl.h
@@ -14,16 +14,16 @@
// These classes are helper classes used for logging data for offline
// processing. Data logged with these classes can conveniently be parsed and
// processed with e.g. Matlab.
-#ifndef WEBRTC_SYSTEM_WRAPPERS_INTERFACE_DATA_LOG_IMPL_H_
-#define WEBRTC_SYSTEM_WRAPPERS_INTERFACE_DATA_LOG_IMPL_H_
+#ifndef WEBRTC_SYSTEM_WRAPPERS_INCLUDE_DATA_LOG_IMPL_H_
+#define WEBRTC_SYSTEM_WRAPPERS_INCLUDE_DATA_LOG_IMPL_H_
#include <map>
#include <sstream>
#include <string>
#include <vector>
+#include "webrtc/base/platform_thread.h"
#include "webrtc/base/scoped_ptr.h"
-#include "webrtc/system_wrappers/include/thread_wrapper.h"
#include "webrtc/typedefs.h"
namespace webrtc {
@@ -146,10 +146,12 @@ class DataLogImpl {
int counter_;
TableMap tables_;
EventWrapper* flush_event_;
- rtc::scoped_ptr<ThreadWrapper> file_writer_thread_;
+ // This is a scoped_ptr so that we don't have to create threads in the no-op
+ // impl.
+ rtc::scoped_ptr<rtc::PlatformThread> file_writer_thread_;
RWLockWrapper* tables_lock_;
};
} // namespace webrtc
-#endif // WEBRTC_SYSTEM_WRAPPERS_INTERFACE_DATA_LOG_IMPL_H_
+#endif // WEBRTC_SYSTEM_WRAPPERS_INCLUDE_DATA_LOG_IMPL_H_
diff --git a/webrtc/system_wrappers/include/event_tracer.h b/webrtc/system_wrappers/include/event_tracer.h
deleted file mode 100644
index 9b1eb1eb92..0000000000
--- a/webrtc/system_wrappers/include/event_tracer.h
+++ /dev/null
@@ -1,33 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-// This file defines the interface for event tracing in WebRTC.
-//
-// Event log handlers are set through SetupEventTracer(). User of this API will
-// provide two function pointers to handle event tracing calls.
-//
-// * GetCategoryEnabledPtr
-// Event tracing system calls this function to determine if a particular
-// event category is enabled.
-//
-// * AddTraceEventPtr
-// Adds a tracing event. It is the user's responsibility to log the data
-// provided.
-//
-// Parameters for the above two functions are described in trace_event.h.
-
-#ifndef WEBRTC_SYSTEM_WRAPPERS_INTERFACE_EVENT_TRACER_H_
-#define WEBRTC_SYSTEM_WRAPPERS_INTERFACE_EVENT_TRACER_H_
-
-// This file has moved.
-// TODO(tommi): Delete after removing dependencies and updating Chromium.
-#include "webrtc/base/event_tracer.h"
-
-#endif // WEBRTC_SYSTEM_WRAPPERS_INTERFACE_EVENT_TRACER_H_
diff --git a/webrtc/system_wrappers/include/event_wrapper.h b/webrtc/system_wrappers/include/event_wrapper.h
index bd12eef908..cc3722bd6b 100644
--- a/webrtc/system_wrappers/include/event_wrapper.h
+++ b/webrtc/system_wrappers/include/event_wrapper.h
@@ -8,8 +8,8 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-#ifndef WEBRTC_SYSTEM_WRAPPERS_INTERFACE_EVENT_WRAPPER_H_
-#define WEBRTC_SYSTEM_WRAPPERS_INTERFACE_EVENT_WRAPPER_H_
+#ifndef WEBRTC_SYSTEM_WRAPPERS_INCLUDE_EVENT_WRAPPER_H_
+#define WEBRTC_SYSTEM_WRAPPERS_INCLUDE_EVENT_WRAPPER_H_
namespace webrtc {
enum EventTypeWrapper {
@@ -67,4 +67,4 @@ class EventTimerWrapper : public EventWrapper {
} // namespace webrtc
-#endif // WEBRTC_SYSTEM_WRAPPERS_INTERFACE_EVENT_WRAPPER_H_
+#endif // WEBRTC_SYSTEM_WRAPPERS_INCLUDE_EVENT_WRAPPER_H_
diff --git a/webrtc/system_wrappers/include/field_trial.h b/webrtc/system_wrappers/include/field_trial.h
index 2af083cdac..62fbfd1a50 100644
--- a/webrtc/system_wrappers/include/field_trial.h
+++ b/webrtc/system_wrappers/include/field_trial.h
@@ -8,8 +8,8 @@
// be found in the AUTHORS file in the root of the source tree.
//
-#ifndef WEBRTC_SYSTEM_WRAPPERS_INTERFACE_FIELD_TRIAL_H_
-#define WEBRTC_SYSTEM_WRAPPERS_INTERFACE_FIELD_TRIAL_H_
+#ifndef WEBRTC_SYSTEM_WRAPPERS_INCLUDE_FIELD_TRIAL_H_
+#define WEBRTC_SYSTEM_WRAPPERS_INCLUDE_FIELD_TRIAL_H_
#include <string>
@@ -65,4 +65,4 @@ std::string FindFullName(const std::string& name);
} // namespace field_trial
} // namespace webrtc
-#endif // WEBRTC_SYSTEM_WRAPPERS_INTERFACE_FIELD_TRIAL_H_
+#endif // WEBRTC_SYSTEM_WRAPPERS_INCLUDE_FIELD_TRIAL_H_
diff --git a/webrtc/system_wrappers/include/field_trial_default.h b/webrtc/system_wrappers/include/field_trial_default.h
index fafe550dcc..7417ced39d 100644
--- a/webrtc/system_wrappers/include/field_trial_default.h
+++ b/webrtc/system_wrappers/include/field_trial_default.h
@@ -8,8 +8,8 @@
// be found in the AUTHORS file in the root of the source tree.
//
-#ifndef WEBRTC_SYSTEM_WRAPPERS_INTERFACE_FIELD_TRIAL_DEFAULT_H_
-#define WEBRTC_SYSTEM_WRAPPERS_INTERFACE_FIELD_TRIAL_DEFAULT_H_
+#ifndef WEBRTC_SYSTEM_WRAPPERS_INCLUDE_FIELD_TRIAL_DEFAULT_H_
+#define WEBRTC_SYSTEM_WRAPPERS_INCLUDE_FIELD_TRIAL_DEFAULT_H_
namespace webrtc {
namespace field_trial {
@@ -20,7 +20,9 @@ namespace field_trial {
// Note: trials_string must never be destroyed.
void InitFieldTrialsFromString(const char* trials_string);
+const char* GetFieldTrialString();
+
} // namespace field_trial
} // namespace webrtc
-#endif // WEBRTC_SYSTEM_WRAPPERS_INTERFACE_FIELD_TRIAL_DEFAULT_H_
+#endif // WEBRTC_SYSTEM_WRAPPERS_INCLUDE_FIELD_TRIAL_DEFAULT_H_
diff --git a/webrtc/system_wrappers/include/file_wrapper.h b/webrtc/system_wrappers/include/file_wrapper.h
index 8f4e09f9c9..b32a62f2f9 100644
--- a/webrtc/system_wrappers/include/file_wrapper.h
+++ b/webrtc/system_wrappers/include/file_wrapper.h
@@ -8,8 +8,8 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-#ifndef WEBRTC_SYSTEM_WRAPPERS_INTERFACE_FILE_WRAPPER_H_
-#define WEBRTC_SYSTEM_WRAPPERS_INTERFACE_FILE_WRAPPER_H_
+#ifndef WEBRTC_SYSTEM_WRAPPERS_INCLUDE_FILE_WRAPPER_H_
+#define WEBRTC_SYSTEM_WRAPPERS_INCLUDE_FILE_WRAPPER_H_
#include <stddef.h>
#include <stdio.h>
@@ -75,4 +75,4 @@ class FileWrapper : public InStream, public OutStream {
} // namespace webrtc
-#endif // WEBRTC_SYSTEM_WRAPPERS_INTERFACE_FILE_WRAPPER_H_
+#endif // WEBRTC_SYSTEM_WRAPPERS_INCLUDE_FILE_WRAPPER_H_
diff --git a/webrtc/system_wrappers/include/logcat_trace_context.h b/webrtc/system_wrappers/include/logcat_trace_context.h
index 0b74734043..8bb01d8102 100644
--- a/webrtc/system_wrappers/include/logcat_trace_context.h
+++ b/webrtc/system_wrappers/include/logcat_trace_context.h
@@ -8,8 +8,8 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-#ifndef WEBRTC_SYSTEM_WRAPPERS_INTERFACE_LOGCAT_TRACE_CONTEXT_H_
-#define WEBRTC_SYSTEM_WRAPPERS_INTERFACE_LOGCAT_TRACE_CONTEXT_H_
+#ifndef WEBRTC_SYSTEM_WRAPPERS_INCLUDE_LOGCAT_TRACE_CONTEXT_H_
+#define WEBRTC_SYSTEM_WRAPPERS_INCLUDE_LOGCAT_TRACE_CONTEXT_H_
#include "webrtc/system_wrappers/include/trace.h"
@@ -32,4 +32,4 @@ class LogcatTraceContext : public webrtc::TraceCallback {
} // namespace webrtc
-#endif // WEBRTC_SYSTEM_WRAPPERS_INTERFACE_LOGCAT_TRACE_CONTEXT_H_
+#endif // WEBRTC_SYSTEM_WRAPPERS_INCLUDE_LOGCAT_TRACE_CONTEXT_H_
diff --git a/webrtc/system_wrappers/include/logging.h b/webrtc/system_wrappers/include/logging.h
index 41c436b1f3..0089841d4e 100644
--- a/webrtc/system_wrappers/include/logging.h
+++ b/webrtc/system_wrappers/include/logging.h
@@ -36,20 +36,8 @@
// type (basically, it just doesn't prepend the namespace).
// LOG_F(sev) Like LOG(), but includes the name of the current function.
-// Additional helper macros added by WebRTC:
-// LOG_API is a shortcut for API call logging. Pass in the input parameters of
-// the method. For example:
-// Foo(int bar, int baz) {
-// LOG_API2(bar, baz);
-// }
-//
-// LOG_FERR is a shortcut for logging a failed function call. For example:
-// if (!Foo(bar)) {
-// LOG_FERR1(LS_WARNING, Foo, bar);
-// }
-
-#ifndef WEBRTC_SYSTEM_WRAPPERS_INTERFACE_LOGGING_H_
-#define WEBRTC_SYSTEM_WRAPPERS_INTERFACE_LOGGING_H_
+#ifndef WEBRTC_SYSTEM_WRAPPERS_INCLUDE_LOGGING_H_
+#define WEBRTC_SYSTEM_WRAPPERS_INCLUDE_LOGGING_H_
#include <sstream>
@@ -131,31 +119,14 @@ class LogMessageVoidify {
webrtc::LogMessage(__FILE__, __LINE__, sev).stream()
// The _F version prefixes the message with the current function name.
-#if (defined(__GNUC__) && defined(_DEBUG)) || defined(WANT_PRETTY_LOG_F)
+#if (defined(__GNUC__) && !defined(NDEBUG)) || defined(WANT_PRETTY_LOG_F)
#define LOG_F(sev) LOG(sev) << __PRETTY_FUNCTION__ << ": "
#else
#define LOG_F(sev) LOG(sev) << __FUNCTION__ << ": "
#endif
-#define LOG_API0() LOG_F(LS_VERBOSE)
-#define LOG_API1(v1) LOG_API0() << #v1 << "=" << v1
-#define LOG_API2(v1, v2) LOG_API1(v1) \
- << ", " << #v2 << "=" << v2
-#define LOG_API3(v1, v2, v3) LOG_API2(v1, v2) \
- << ", " << #v3 << "=" << v3
-
-#define LOG_FERR0(sev, func) LOG(sev) << #func << " failed"
-#define LOG_FERR1(sev, func, v1) LOG_FERR0(sev, func) \
- << ": " << #v1 << "=" << v1
-#define LOG_FERR2(sev, func, v1, v2) LOG_FERR1(sev, func, v1) \
- << ", " << #v2 << "=" << v2
-#define LOG_FERR3(sev, func, v1, v2, v3) LOG_FERR2(sev, func, v1, v2) \
- << ", " << #v3 << "=" << v3
-#define LOG_FERR4(sev, func, v1, v2, v3, v4) LOG_FERR3(sev, func, v1, v2, v3) \
- << ", " << #v4 << "=" << v4
-
#endif // LOG
} // namespace webrtc
-#endif // WEBRTC_SYSTEM_WRAPPERS_INTERFACE_LOGGING_H_
+#endif // WEBRTC_SYSTEM_WRAPPERS_INCLUDE_LOGGING_H_
diff --git a/webrtc/system_wrappers/include/metrics.h b/webrtc/system_wrappers/include/metrics.h
index 7ebe3bde3d..4cd74c5e84 100644
--- a/webrtc/system_wrappers/include/metrics.h
+++ b/webrtc/system_wrappers/include/metrics.h
@@ -8,11 +8,13 @@
// be found in the AUTHORS file in the root of the source tree.
//
-#ifndef WEBRTC_SYSTEM_WRAPPERS_INTERFACE_METRICS_H_
-#define WEBRTC_SYSTEM_WRAPPERS_INTERFACE_METRICS_H_
+#ifndef WEBRTC_SYSTEM_WRAPPERS_INCLUDE_METRICS_H_
+#define WEBRTC_SYSTEM_WRAPPERS_INCLUDE_METRICS_H_
#include <string>
+#include "webrtc/base/atomicops.h"
+#include "webrtc/base/checks.h"
#include "webrtc/common_types.h"
// Macros for allowing WebRTC clients (e.g. Chrome) to gather and aggregate
@@ -58,54 +60,98 @@
// Macros for adding samples to a named histogram.
-//
-// NOTE: this is a temporary solution.
-// The aim is to mimic the behaviour in Chromium's src/base/metrics/histograms.h
-// However as atomics are not supported in webrtc, this is for now a modified
-// and temporary solution. Note that the histogram is constructed/found for
-// each call. Therefore, for now only use this implementation for metrics
-// that do not need to be updated frequently.
-// TODO(asapersson): Change implementation when atomics are supported.
-// Also consider changing string to const char* when switching to atomics.
-// Histogram for counters.
-#define RTC_HISTOGRAM_COUNTS_100(name, sample) RTC_HISTOGRAM_COUNTS( \
- name, sample, 1, 100, 50)
+// Histogram for counters (exponentially spaced buckets).
+#define RTC_HISTOGRAM_COUNTS_100(name, sample) \
+ RTC_HISTOGRAM_COUNTS(name, sample, 1, 100, 50)
-#define RTC_HISTOGRAM_COUNTS_200(name, sample) RTC_HISTOGRAM_COUNTS( \
- name, sample, 1, 200, 50)
+#define RTC_HISTOGRAM_COUNTS_200(name, sample) \
+ RTC_HISTOGRAM_COUNTS(name, sample, 1, 200, 50)
-#define RTC_HISTOGRAM_COUNTS_1000(name, sample) RTC_HISTOGRAM_COUNTS( \
- name, sample, 1, 1000, 50)
+#define RTC_HISTOGRAM_COUNTS_1000(name, sample) \
+ RTC_HISTOGRAM_COUNTS(name, sample, 1, 1000, 50)
-#define RTC_HISTOGRAM_COUNTS_10000(name, sample) RTC_HISTOGRAM_COUNTS( \
- name, sample, 1, 10000, 50)
+#define RTC_HISTOGRAM_COUNTS_10000(name, sample) \
+ RTC_HISTOGRAM_COUNTS(name, sample, 1, 10000, 50)
-#define RTC_HISTOGRAM_COUNTS_100000(name, sample) RTC_HISTOGRAM_COUNTS( \
- name, sample, 1, 100000, 50)
+#define RTC_HISTOGRAM_COUNTS_100000(name, sample) \
+ RTC_HISTOGRAM_COUNTS(name, sample, 1, 100000, 50)
#define RTC_HISTOGRAM_COUNTS(name, sample, min, max, bucket_count) \
- RTC_HISTOGRAM_COMMON_BLOCK(name, sample, \
- webrtc::metrics::HistogramFactoryGetCounts( \
- name, min, max, bucket_count))
+ RTC_HISTOGRAM_COMMON_BLOCK(name, sample, \
+ webrtc::metrics::HistogramFactoryGetCounts(name, min, max, bucket_count))
+
+// Deprecated.
+// TODO(asapersson): Remove.
+#define RTC_HISTOGRAM_COUNTS_SPARSE_100(name, sample) \
+ RTC_HISTOGRAM_COUNTS_SPARSE(name, sample, 1, 100, 50)
+
+#define RTC_HISTOGRAM_COUNTS_SPARSE_200(name, sample) \
+ RTC_HISTOGRAM_COUNTS_SPARSE(name, sample, 1, 200, 50)
+
+#define RTC_HISTOGRAM_COUNTS_SPARSE_1000(name, sample) \
+ RTC_HISTOGRAM_COUNTS_SPARSE(name, sample, 1, 1000, 50)
+
+#define RTC_HISTOGRAM_COUNTS_SPARSE_10000(name, sample) \
+ RTC_HISTOGRAM_COUNTS_SPARSE(name, sample, 1, 10000, 50)
+
+#define RTC_HISTOGRAM_COUNTS_SPARSE_100000(name, sample) \
+ RTC_HISTOGRAM_COUNTS_SPARSE(name, sample, 1, 100000, 50)
+
+#define RTC_HISTOGRAM_COUNTS_SPARSE(name, sample, min, max, bucket_count) \
+ RTC_HISTOGRAM_COMMON_BLOCK_SLOW(name, sample, \
+ webrtc::metrics::HistogramFactoryGetCounts(name, min, max, bucket_count))
-// Histogram for percentage.
+// Histogram for percentage (evenly spaced buckets).
#define RTC_HISTOGRAM_PERCENTAGE(name, sample) \
- RTC_HISTOGRAM_ENUMERATION(name, sample, 101)
+ RTC_HISTOGRAM_ENUMERATION(name, sample, 101)
-// Histogram for enumerators.
+// Deprecated.
+// TODO(asapersson): Remove.
+#define RTC_HISTOGRAM_PERCENTAGE_SPARSE(name, sample) \
+ RTC_HISTOGRAM_ENUMERATION_SPARSE(name, sample, 101)
+
+// Histogram for enumerators (evenly spaced buckets).
// |boundary| should be above the max enumerator sample.
#define RTC_HISTOGRAM_ENUMERATION(name, sample, boundary) \
- RTC_HISTOGRAM_COMMON_BLOCK(name, sample, \
- webrtc::metrics::HistogramFactoryGetEnumeration(name, boundary))
+ RTC_HISTOGRAM_COMMON_BLOCK(name, sample, \
+ webrtc::metrics::HistogramFactoryGetEnumeration(name, boundary))
+// Deprecated.
+// TODO(asapersson): Remove.
+#define RTC_HISTOGRAM_ENUMERATION_SPARSE(name, sample, boundary) \
+ RTC_HISTOGRAM_COMMON_BLOCK_SLOW(name, sample, \
+ webrtc::metrics::HistogramFactoryGetEnumeration(name, boundary))
+
+// The name of the histogram should not vary.
+// TODO(asapersson): Consider changing string to const char*.
#define RTC_HISTOGRAM_COMMON_BLOCK(constant_name, sample, \
factory_get_invocation) \
do { \
- webrtc::metrics::Histogram* histogram_pointer = factory_get_invocation; \
+ static webrtc::metrics::Histogram* atomic_histogram_pointer = nullptr; \
+ webrtc::metrics::Histogram* histogram_pointer = \
+ rtc::AtomicOps::AcquireLoadPtr(&atomic_histogram_pointer); \
+ if (!histogram_pointer) { \
+ histogram_pointer = factory_get_invocation; \
+ webrtc::metrics::Histogram* prev_pointer = \
+ rtc::AtomicOps::CompareAndSwapPtr( \
+ &atomic_histogram_pointer, \
+ static_cast<webrtc::metrics::Histogram*>(nullptr), \
+ histogram_pointer); \
+ RTC_DCHECK(prev_pointer == nullptr || \
+ prev_pointer == histogram_pointer); \
+ } \
webrtc::metrics::HistogramAdd(histogram_pointer, constant_name, sample); \
} while (0)
+// Deprecated.
+// The histogram is constructed/found for each call.
+// May be used for histograms with infrequent updates.
+#define RTC_HISTOGRAM_COMMON_BLOCK_SLOW(name, sample, factory_get_invocation) \
+ do { \
+ webrtc::metrics::Histogram* histogram_pointer = factory_get_invocation; \
+ webrtc::metrics::HistogramAdd(histogram_pointer, name, sample); \
+ } while (0)
namespace webrtc {
namespace metrics {
@@ -135,5 +181,5 @@ void HistogramAdd(
} // namespace metrics
} // namespace webrtc
-#endif // WEBRTC_SYSTEM_WRAPPERS_INTERFACE_METRICS_H_
+#endif // WEBRTC_SYSTEM_WRAPPERS_INCLUDE_METRICS_H_
diff --git a/webrtc/system_wrappers/include/ntp_time.h b/webrtc/system_wrappers/include/ntp_time.h
new file mode 100644
index 0000000000..229666e8dd
--- /dev/null
+++ b/webrtc/system_wrappers/include/ntp_time.h
@@ -0,0 +1,63 @@
+/*
+* Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
+*
+* Use of this source code is governed by a BSD-style license
+* that can be found in the LICENSE file in the root of the source
+* tree. An additional intellectual property rights grant can be found
+* in the file PATENTS. All contributing project authors may
+* be found in the AUTHORS file in the root of the source tree.
+*/
+#ifndef WEBRTC_SYSTEM_WRAPPERS_INCLUDE_NTP_TIME_H_
+#define WEBRTC_SYSTEM_WRAPPERS_INCLUDE_NTP_TIME_H_
+
+#include "webrtc/base/basictypes.h"
+#include "webrtc/system_wrappers/include/clock.h"
+
+namespace webrtc {
+
+class NtpTime {
+ public:
+ NtpTime() : seconds_(0), fractions_(0) {}
+ explicit NtpTime(const Clock& clock) {
+ clock.CurrentNtp(seconds_, fractions_);
+ }
+ NtpTime(uint32_t seconds, uint32_t fractions)
+ : seconds_(seconds), fractions_(fractions) {}
+
+ NtpTime(const NtpTime&) = default;
+ NtpTime& operator=(const NtpTime&) = default;
+
+ void SetCurrent(const Clock& clock) {
+ clock.CurrentNtp(seconds_, fractions_);
+ }
+ void Set(uint32_t seconds, uint32_t fractions) {
+ seconds_ = seconds;
+ fractions_ = fractions;
+ }
+ void Reset() {
+ seconds_ = 0;
+ fractions_ = 0;
+ }
+
+ int64_t ToMs() const { return Clock::NtpToMs(seconds_, fractions_); }
+
+ // NTP standard (RFC1305, section 3.1) explicitly state value 0/0 is invalid.
+ bool Valid() const { return !(seconds_ == 0 && fractions_ == 0); }
+
+ uint32_t seconds() const { return seconds_; }
+ uint32_t fractions() const { return fractions_; }
+
+ private:
+ uint32_t seconds_;
+ uint32_t fractions_;
+};
+
+inline bool operator==(const NtpTime& n1, const NtpTime& n2) {
+ return n1.seconds() == n2.seconds() && n1.fractions() == n2.fractions();
+}
+inline bool operator!=(const NtpTime& n1, const NtpTime& n2) {
+ return !(n1 == n2);
+}
+
+} // namespace webrtc
+#endif // WEBRTC_SYSTEM_WRAPPERS_INCLUDE_NTP_TIME_H_
diff --git a/webrtc/system_wrappers/include/ref_count.h b/webrtc/system_wrappers/include/ref_count.h
index 8ca06cdf6c..3dd335a8da 100644
--- a/webrtc/system_wrappers/include/ref_count.h
+++ b/webrtc/system_wrappers/include/ref_count.h
@@ -8,8 +8,8 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-#ifndef SYSTEM_WRAPPERS_INTERFACE_REF_COUNT_H_
-#define SYSTEM_WRAPPERS_INTERFACE_REF_COUNT_H_
+#ifndef SYSTEM_WRAPPERS_INCLUDE_REF_COUNT_H_
+#define SYSTEM_WRAPPERS_INCLUDE_REF_COUNT_H_
#include "webrtc/system_wrappers/include/atomic32.h"
@@ -79,4 +79,4 @@ class RefCountImpl : public T {
} // namespace webrtc
-#endif // SYSTEM_WRAPPERS_INTERFACE_REF_COUNT_H_
+#endif // SYSTEM_WRAPPERS_INCLUDE_REF_COUNT_H_
diff --git a/webrtc/system_wrappers/include/rtp_to_ntp.h b/webrtc/system_wrappers/include/rtp_to_ntp.h
index dfc25cd9e9..0c91928626 100644
--- a/webrtc/system_wrappers/include/rtp_to_ntp.h
+++ b/webrtc/system_wrappers/include/rtp_to_ntp.h
@@ -8,8 +8,8 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-#ifndef SYSTEM_WRAPPERS_INTERFACE_RTP_TO_NTP_H_
-#define SYSTEM_WRAPPERS_INTERFACE_RTP_TO_NTP_H_
+#ifndef SYSTEM_WRAPPERS_INCLUDE_RTP_TO_NTP_H_
+#define SYSTEM_WRAPPERS_INCLUDE_RTP_TO_NTP_H_
#include <list>
@@ -47,4 +47,4 @@ int CheckForWrapArounds(uint32_t rtp_timestamp, uint32_t rtcp_rtp_timestamp);
} // namespace webrtc
-#endif // SYSTEM_WRAPPERS_INTERFACE_RTP_TO_NTP_H_
+#endif // SYSTEM_WRAPPERS_INCLUDE_RTP_TO_NTP_H_
diff --git a/webrtc/system_wrappers/include/rw_lock_wrapper.h b/webrtc/system_wrappers/include/rw_lock_wrapper.h
index dbe6d6c7c0..751b6a1df5 100644
--- a/webrtc/system_wrappers/include/rw_lock_wrapper.h
+++ b/webrtc/system_wrappers/include/rw_lock_wrapper.h
@@ -8,8 +8,8 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-#ifndef WEBRTC_SYSTEM_WRAPPERS_INTERFACE_RW_LOCK_WRAPPER_H_
-#define WEBRTC_SYSTEM_WRAPPERS_INTERFACE_RW_LOCK_WRAPPER_H_
+#ifndef WEBRTC_SYSTEM_WRAPPERS_INCLUDE_RW_LOCK_WRAPPER_H_
+#define WEBRTC_SYSTEM_WRAPPERS_INCLUDE_RW_LOCK_WRAPPER_H_
#include "webrtc/base/thread_annotations.h"
@@ -65,4 +65,4 @@ class SCOPED_LOCKABLE WriteLockScoped {
} // namespace webrtc
-#endif // WEBRTC_SYSTEM_WRAPPERS_INTERFACE_RW_LOCK_WRAPPER_H_
+#endif // WEBRTC_SYSTEM_WRAPPERS_INCLUDE_RW_LOCK_WRAPPER_H_
diff --git a/webrtc/system_wrappers/include/scoped_vector.h b/webrtc/system_wrappers/include/scoped_vector.h
index 1e89a9d245..15c3380c8c 100644
--- a/webrtc/system_wrappers/include/scoped_vector.h
+++ b/webrtc/system_wrappers/include/scoped_vector.h
@@ -10,12 +10,13 @@
// Borrowed from Chromium's src/base/memory/scoped_vector.h.
-#ifndef WEBRTC_SYSTEM_WRAPPERS_INTERFACE_SCOPED_VECTOR_H_
-#define WEBRTC_SYSTEM_WRAPPERS_INTERFACE_SCOPED_VECTOR_H_
+#ifndef WEBRTC_SYSTEM_WRAPPERS_INCLUDE_SCOPED_VECTOR_H_
+#define WEBRTC_SYSTEM_WRAPPERS_INCLUDE_SCOPED_VECTOR_H_
#include <vector>
#include "webrtc/base/checks.h"
+#include "webrtc/base/deprecation.h"
#include "webrtc/system_wrappers/include/stl_util.h"
namespace webrtc {
@@ -43,9 +44,7 @@ class ScopedVector {
~ScopedVector() { clear(); }
// Move construction and assignment.
- ScopedVector(ScopedVector&& other) {
- *this = static_cast<ScopedVector&&>(other);
- }
+ ScopedVector(ScopedVector&& other) { *this = std::move(other); }
ScopedVector& operator=(ScopedVector&& other) {
std::swap(v_, other.v_); // The arguments are std::vectors, so std::swap
// is the one that we want.
@@ -58,7 +57,11 @@ class ScopedVector {
ScopedVector& operator=(const ScopedVector& other) = delete;
// Get an rvalue reference. (sv.Pass() does the same thing as std::move(sv).)
- ScopedVector&& Pass() { return static_cast<ScopedVector&&>(*this); }
+ // Deprecated; remove in March 2016 (bug 5373).
+ RTC_DEPRECATED ScopedVector&& Pass() { return DEPRECATED_Pass(); }
+ ScopedVector&& DEPRECATED_Pass() {
+ return std::move(*this);
+ }
reference operator[](size_t index) { return v_[index]; }
const_reference operator[](size_t index) const { return v_[index]; }
@@ -154,4 +157,4 @@ class ScopedVector {
} // namespace webrtc
-#endif // WEBRTC_SYSTEM_WRAPPERS_INTERFACE_SCOPED_VECTOR_H_
+#endif // WEBRTC_SYSTEM_WRAPPERS_INCLUDE_SCOPED_VECTOR_H_
diff --git a/webrtc/system_wrappers/include/sleep.h b/webrtc/system_wrappers/include/sleep.h
index c0205bf085..e7ed8b32b8 100644
--- a/webrtc/system_wrappers/include/sleep.h
+++ b/webrtc/system_wrappers/include/sleep.h
@@ -9,8 +9,8 @@
*/
// An OS-independent sleep function.
-#ifndef WEBRTC_SYSTEM_WRAPPERS_INTERFACE_SLEEP_H_
-#define WEBRTC_SYSTEM_WRAPPERS_INTERFACE_SLEEP_H_
+#ifndef WEBRTC_SYSTEM_WRAPPERS_INCLUDE_SLEEP_H_
+#define WEBRTC_SYSTEM_WRAPPERS_INCLUDE_SLEEP_H_
namespace webrtc {
@@ -21,4 +21,4 @@ void SleepMs(int msecs);
} // namespace webrtc
-#endif // WEBRTC_SYSTEM_WRAPPERS_INTERFACE_SLEEP_H_
+#endif // WEBRTC_SYSTEM_WRAPPERS_INCLUDE_SLEEP_H_
diff --git a/webrtc/system_wrappers/include/sort.h b/webrtc/system_wrappers/include/sort.h
index da6ff8d52e..5bf2afa8a5 100644
--- a/webrtc/system_wrappers/include/sort.h
+++ b/webrtc/system_wrappers/include/sort.h
@@ -10,8 +10,8 @@
// Generic unstable sorting routines.
-#ifndef WEBRTC_SYSTEM_WRAPPERS_INTERFACE_SORT_H_
-#define WEBRTC_SYSTEM_WRAPPERS_INTERFACE_SORT_H_
+#ifndef WEBRTC_SYSTEM_WRAPPERS_INCLUDE_SORT_H_
+#define WEBRTC_SYSTEM_WRAPPERS_INCLUDE_SORT_H_
#include "webrtc/common_types.h"
#include "webrtc/typedefs.h"
@@ -62,4 +62,4 @@ int32_t KeySort(void* data, void* key, uint32_t num_of_elements,
} // namespace webrtc
-#endif // WEBRTC_SYSTEM_WRAPPERS_INTERFACE_SORT_H_
+#endif // WEBRTC_SYSTEM_WRAPPERS_INCLUDE_SORT_H_
diff --git a/webrtc/system_wrappers/include/static_instance.h b/webrtc/system_wrappers/include/static_instance.h
index fd986b821d..41946d9230 100644
--- a/webrtc/system_wrappers/include/static_instance.h
+++ b/webrtc/system_wrappers/include/static_instance.h
@@ -8,8 +8,8 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-#ifndef WEBRTC_SYSTEM_WRAPPERS_INTERFACE_STATIC_INSTANCE_H_
-#define WEBRTC_SYSTEM_WRAPPERS_INTERFACE_STATIC_INSTANCE_H_
+#ifndef WEBRTC_SYSTEM_WRAPPERS_INCLUDE_STATIC_INSTANCE_H_
+#define WEBRTC_SYSTEM_WRAPPERS_INCLUDE_STATIC_INSTANCE_H_
#include <assert.h>
@@ -150,4 +150,4 @@ static T* GetStaticInstance(CountOperation count_operation) {
} // namspace webrtc
-#endif // WEBRTC_SYSTEM_WRAPPERS_INTERFACE_STATIC_INSTANCE_H_
+#endif // WEBRTC_SYSTEM_WRAPPERS_INCLUDE_STATIC_INSTANCE_H_
diff --git a/webrtc/system_wrappers/include/stl_util.h b/webrtc/system_wrappers/include/stl_util.h
index ebe855fb10..b7a702113f 100644
--- a/webrtc/system_wrappers/include/stl_util.h
+++ b/webrtc/system_wrappers/include/stl_util.h
@@ -10,8 +10,8 @@
// Borrowed from Chromium's src/base/stl_util.h.
-#ifndef WEBRTC_SYSTEM_WRAPPERS_INTERFACE_STL_UTIL_H_
-#define WEBRTC_SYSTEM_WRAPPERS_INTERFACE_STL_UTIL_H_
+#ifndef WEBRTC_SYSTEM_WRAPPERS_INCLUDE_STL_UTIL_H_
+#define WEBRTC_SYSTEM_WRAPPERS_INCLUDE_STL_UTIL_H_
#include <assert.h>
#include <algorithm>
@@ -262,4 +262,4 @@ bool STLIncludes(const Arg1& a1, const Arg2& a2) {
} // namespace webrtc
-#endif // WEBRTC_SYSTEM_WRAPPERS_INTERFACE_STL_UTIL_H_
+#endif // WEBRTC_SYSTEM_WRAPPERS_INCLUDE_STL_UTIL_H_
diff --git a/webrtc/system_wrappers/include/stringize_macros.h b/webrtc/system_wrappers/include/stringize_macros.h
index ab8c43d4e2..9c8e7e9120 100644
--- a/webrtc/system_wrappers/include/stringize_macros.h
+++ b/webrtc/system_wrappers/include/stringize_macros.h
@@ -15,8 +15,8 @@
// symbols (or their output) and manipulating preprocessor symbols
// that define strings.
-#ifndef WEBRTC_SYSTEM_WRAPPERS_INTERFACE_STRINGIZE_MACROS_H_
-#define WEBRTC_SYSTEM_WRAPPERS_INTERFACE_STRINGIZE_MACROS_H_
+#ifndef WEBRTC_SYSTEM_WRAPPERS_INCLUDE_STRINGIZE_MACROS_H_
+#define WEBRTC_SYSTEM_WRAPPERS_INCLUDE_STRINGIZE_MACROS_H_
// This is not very useful as it does not expand defined symbols if
// called directly. Use its counterpart without the _NO_EXPANSION
@@ -35,4 +35,4 @@
// STRINGIZE(B(y)) produces "myobj->FunctionCall(y)"
#define STRINGIZE(x) STRINGIZE_NO_EXPANSION(x)
-#endif // WEBRTC_SYSTEM_WRAPPERS_INTERFACE_STRINGIZE_MACROS_H_
+#endif // WEBRTC_SYSTEM_WRAPPERS_INCLUDE_STRINGIZE_MACROS_H_
diff --git a/webrtc/system_wrappers/include/thread_wrapper.h b/webrtc/system_wrappers/include/thread_wrapper.h
deleted file mode 100644
index 742056198a..0000000000
--- a/webrtc/system_wrappers/include/thread_wrapper.h
+++ /dev/null
@@ -1,95 +0,0 @@
-/*
- * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-// System independant wrapper for spawning threads
-// Note: the spawned thread will loop over the callback function until stopped.
-// Note: The callback function is expected to return every 2 seconds or more
-// often.
-
-#ifndef WEBRTC_SYSTEM_WRAPPERS_INTERFACE_THREAD_WRAPPER_H_
-#define WEBRTC_SYSTEM_WRAPPERS_INTERFACE_THREAD_WRAPPER_H_
-
-#if defined(WEBRTC_WIN)
-#include <windows.h>
-#endif
-
-#include "webrtc/base/scoped_ptr.h"
-#include "webrtc/common_types.h"
-#include "webrtc/typedefs.h"
-
-namespace webrtc {
-
-// Callback function that the spawned thread will enter once spawned.
-// A return value of false is interpreted as that the function has no
-// more work to do and that the thread can be released.
-typedef bool(*ThreadRunFunction)(void*);
-
-enum ThreadPriority {
-#ifdef WEBRTC_WIN
- kLowPriority = THREAD_PRIORITY_BELOW_NORMAL,
- kNormalPriority = THREAD_PRIORITY_NORMAL,
- kHighPriority = THREAD_PRIORITY_ABOVE_NORMAL,
- kHighestPriority = THREAD_PRIORITY_HIGHEST,
- kRealtimePriority = THREAD_PRIORITY_TIME_CRITICAL
-#else
- kLowPriority = 1,
- kNormalPriority = 2,
- kHighPriority = 3,
- kHighestPriority = 4,
- kRealtimePriority = 5
-#endif
-};
-
-// Represents a simple worker thread. The implementation must be assumed
-// to be single threaded, meaning that all methods of the class, must be
-// called from the same thread, including instantiation.
-// TODO(tommi): There's no need for this to be a virtual interface since there's
-// only ever a single implementation of it.
-class ThreadWrapper {
- public:
- virtual ~ThreadWrapper() {}
-
- // Factory method. Constructor disabled.
- //
- // func Pointer to a, by user, specified callback function.
- // obj Object associated with the thread. Passed in the callback
- // function.
- // prio Thread priority. May require root/admin rights.
- // thread_name NULL terminated thread name, will be visable in the Windows
- // debugger.
- static rtc::scoped_ptr<ThreadWrapper> CreateThread(ThreadRunFunction func,
- void* obj, const char* thread_name);
-
- // Get the current thread's thread ID.
- // NOTE: This is a static method. It returns the id of the calling thread,
- // *not* the id of the worker thread that a ThreadWrapper instance represents.
- // TODO(tommi): Move outside of the ThreadWrapper class to avoid confusion.
- static uint32_t GetThreadId();
-
- // Tries to spawns a thread and returns true if that was successful.
- // Additionally, it tries to set thread priority according to the priority
- // from when CreateThread was called. However, failure to set priority will
- // not result in a false return value.
- virtual bool Start() = 0;
-
- // Stops the spawned thread and waits for it to be reclaimed with a timeout
- // of two seconds. Will return false if the thread was not reclaimed.
- // Multiple tries to Stop are allowed (e.g. to wait longer than 2 seconds).
- // It's ok to call Stop() even if the spawned thread has been reclaimed.
- virtual bool Stop() = 0;
-
- // Set the priority of the worker thread. Must be called when thread
- // is running.
- virtual bool SetPriority(ThreadPriority priority) = 0;
-};
-
-} // namespace webrtc
-
-#endif // WEBRTC_SYSTEM_WRAPPERS_INTERFACE_THREAD_WRAPPER_H_
diff --git a/webrtc/system_wrappers/include/tick_util.h b/webrtc/system_wrappers/include/tick_util.h
index 0b7890e7c8..52f9b4ae4d 100644
--- a/webrtc/system_wrappers/include/tick_util.h
+++ b/webrtc/system_wrappers/include/tick_util.h
@@ -11,8 +11,8 @@
// System independant wrapper for polling elapsed time in ms and us.
// The implementation works in the tick domain which can be mapped over to the
// time domain.
-#ifndef WEBRTC_SYSTEM_WRAPPERS_INTERFACE_TICK_UTIL_H_
-#define WEBRTC_SYSTEM_WRAPPERS_INTERFACE_TICK_UTIL_H_
+#ifndef WEBRTC_SYSTEM_WRAPPERS_INCLUDE_TICK_UTIL_H_
+#define WEBRTC_SYSTEM_WRAPPERS_INCLUDE_TICK_UTIL_H_
#if _WIN32
// Note: The Windows header must always be included before mmsystem.h
@@ -56,6 +56,8 @@ class TickTime {
static int64_t TicksToMilliseconds(const int64_t ticks);
+ static int64_t TicksToMicroseconds(const int64_t ticks);
+
// Returns a TickTime that is ticks later than the passed TickTime.
friend TickTime operator+(const TickTime lhs, const int64_t ticks);
TickTime& operator+=(const int64_t& ticks);
@@ -63,19 +65,9 @@ class TickTime {
// Returns a TickInterval that is the difference in ticks beween rhs and lhs.
friend TickInterval operator-(const TickTime& lhs, const TickTime& rhs);
- // Call to engage the fake clock. This is useful for tests since relying on
- // a real clock often makes the test flaky.
- static void UseFakeClock(int64_t start_millisecond);
-
- // Advance the fake clock. Must be called after UseFakeClock.
- static void AdvanceFakeClock(int64_t milliseconds);
-
private:
static int64_t QueryOsForTicks();
- static bool use_fake_clock_;
- static int64_t fake_ticks_;
-
int64_t ticks_;
};
@@ -83,6 +75,7 @@ class TickTime {
class TickInterval {
public:
TickInterval();
+ explicit TickInterval(int64_t interval);
int64_t Milliseconds() const;
int64_t Microseconds() const;
@@ -103,8 +96,6 @@ class TickInterval {
friend bool operator>=(const TickInterval& lhs, const TickInterval& rhs);
private:
- explicit TickInterval(int64_t interval);
-
friend class TickTime;
friend TickInterval operator-(const TickTime& lhs, const TickTime& rhs);
@@ -112,6 +103,14 @@ class TickInterval {
int64_t interval_;
};
+inline int64_t TickInterval::Milliseconds() const {
+ return TickTime::TicksToMilliseconds(interval_);
+}
+
+inline int64_t TickInterval::Microseconds() const {
+ return TickTime::TicksToMicroseconds(interval_);
+}
+
inline TickInterval operator+(const TickInterval& lhs,
const TickInterval& rhs) {
return TickInterval(lhs.interval_ + rhs.interval_);
@@ -157,82 +156,13 @@ inline TickTime::TickTime(int64_t ticks)
}
inline TickTime TickTime::Now() {
- if (use_fake_clock_)
- return TickTime(fake_ticks_);
- else
- return TickTime(QueryOsForTicks());
-}
-
-inline int64_t TickTime::MillisecondTimestamp() {
- int64_t ticks = TickTime::Now().Ticks();
-#if _WIN32
-#ifdef USE_QUERY_PERFORMANCE_COUNTER
- LARGE_INTEGER qpfreq;
- QueryPerformanceFrequency(&qpfreq);
- return (ticks * 1000) / qpfreq.QuadPart;
-#else
- return ticks;
-#endif
-#elif defined(WEBRTC_LINUX) || defined(WEBRTC_MAC)
- return ticks / 1000000LL;
-#else
- return ticks / 1000LL;
-#endif
-}
-
-inline int64_t TickTime::MicrosecondTimestamp() {
- int64_t ticks = TickTime::Now().Ticks();
-#if _WIN32
-#ifdef USE_QUERY_PERFORMANCE_COUNTER
- LARGE_INTEGER qpfreq;
- QueryPerformanceFrequency(&qpfreq);
- return (ticks * 1000) / (qpfreq.QuadPart / 1000);
-#else
- return ticks * 1000LL;
-#endif
-#elif defined(WEBRTC_LINUX) || defined(WEBRTC_MAC)
- return ticks / 1000LL;
-#else
- return ticks;
-#endif
+ return TickTime(QueryOsForTicks());
}
inline int64_t TickTime::Ticks() const {
return ticks_;
}
-inline int64_t TickTime::MillisecondsToTicks(const int64_t ms) {
-#if _WIN32
-#ifdef USE_QUERY_PERFORMANCE_COUNTER
- LARGE_INTEGER qpfreq;
- QueryPerformanceFrequency(&qpfreq);
- return (qpfreq.QuadPart * ms) / 1000;
-#else
- return ms;
-#endif
-#elif defined(WEBRTC_LINUX) || defined(WEBRTC_MAC)
- return ms * 1000000LL;
-#else
- return ms * 1000LL;
-#endif
-}
-
-inline int64_t TickTime::TicksToMilliseconds(const int64_t ticks) {
-#if _WIN32
-#ifdef USE_QUERY_PERFORMANCE_COUNTER
- LARGE_INTEGER qpfreq;
- QueryPerformanceFrequency(&qpfreq);
- return (ticks * 1000) / qpfreq.QuadPart;
-#else
- return ticks;
-#endif
-#elif defined(WEBRTC_LINUX) || defined(WEBRTC_MAC)
- return ticks / 1000000LL;
-#else
- return ticks / 1000LL;
-#endif
-}
-
inline TickTime& TickTime::operator+=(const int64_t& ticks) {
ticks_ += ticks;
return *this;
@@ -245,44 +175,6 @@ inline TickInterval::TickInterval(const int64_t interval)
: interval_(interval) {
}
-inline int64_t TickInterval::Milliseconds() const {
-#if _WIN32
-#ifdef USE_QUERY_PERFORMANCE_COUNTER
- LARGE_INTEGER qpfreq;
- QueryPerformanceFrequency(&qpfreq);
- return (interval_ * 1000) / qpfreq.QuadPart;
-#else
- // interval_ is in ms
- return interval_;
-#endif
-#elif defined(WEBRTC_LINUX) || defined(WEBRTC_MAC)
- // interval_ is in ns
- return interval_ / 1000000;
-#else
- // interval_ is usecs
- return interval_ / 1000;
-#endif
-}
-
-inline int64_t TickInterval::Microseconds() const {
-#if _WIN32
-#ifdef USE_QUERY_PERFORMANCE_COUNTER
- LARGE_INTEGER qpfreq;
- QueryPerformanceFrequency(&qpfreq);
- return (interval_ * 1000000) / qpfreq.QuadPart;
-#else
- // interval_ is in ms
- return interval_ * 1000LL;
-#endif
-#elif defined(WEBRTC_LINUX) || defined(WEBRTC_MAC)
- // interval_ is in ns
- return interval_ / 1000;
-#else
- // interval_ is usecs
- return interval_;
-#endif
-}
-
inline TickInterval& TickInterval::operator+=(const TickInterval& rhs) {
interval_ += rhs.interval_;
return *this;
@@ -295,4 +187,4 @@ inline TickInterval& TickInterval::operator-=(const TickInterval& rhs) {
} // namespace webrtc
-#endif // WEBRTC_SYSTEM_WRAPPERS_INTERFACE_TICK_UTIL_H_
+#endif // WEBRTC_SYSTEM_WRAPPERS_INCLUDE_TICK_UTIL_H_
diff --git a/webrtc/system_wrappers/include/timestamp_extrapolator.h b/webrtc/system_wrappers/include/timestamp_extrapolator.h
index b8a8b05d97..d9c5c6fb37 100644
--- a/webrtc/system_wrappers/include/timestamp_extrapolator.h
+++ b/webrtc/system_wrappers/include/timestamp_extrapolator.h
@@ -8,8 +8,8 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-#ifndef SYSTEM_WRAPPERS_INTERFACE_TIMESTAMP_EXTRAPOLATOR_H_
-#define SYSTEM_WRAPPERS_INTERFACE_TIMESTAMP_EXTRAPOLATOR_H_
+#ifndef SYSTEM_WRAPPERS_INCLUDE_TIMESTAMP_EXTRAPOLATOR_H_
+#define SYSTEM_WRAPPERS_INCLUDE_TIMESTAMP_EXTRAPOLATOR_H_
#include "webrtc/system_wrappers/include/rw_lock_wrapper.h"
#include "webrtc/typedefs.h"
@@ -53,4 +53,4 @@ private:
} // namespace webrtc
-#endif // SYSTEM_WRAPPERS_INTERFACE_TIMESTAMP_EXTRAPOLATOR_H_
+#endif // SYSTEM_WRAPPERS_INCLUDE_TIMESTAMP_EXTRAPOLATOR_H_
diff --git a/webrtc/system_wrappers/include/trace.h b/webrtc/system_wrappers/include/trace.h
index e63b603d1b..25a3d746c4 100644
--- a/webrtc/system_wrappers/include/trace.h
+++ b/webrtc/system_wrappers/include/trace.h
@@ -13,8 +13,8 @@
* messages. Apply filtering to avoid that.
*/
-#ifndef WEBRTC_SYSTEM_WRAPPERS_INTERFACE_TRACE_H_
-#define WEBRTC_SYSTEM_WRAPPERS_INTERFACE_TRACE_H_
+#ifndef WEBRTC_SYSTEM_WRAPPERS_INCLUDE_TRACE_H_
+#define WEBRTC_SYSTEM_WRAPPERS_INCLUDE_TRACE_H_
#include "webrtc/common_types.h"
#include "webrtc/typedefs.h"
@@ -89,4 +89,4 @@ class Trace {
} // namespace webrtc
-#endif // WEBRTC_SYSTEM_WRAPPERS_INTERFACE_TRACE_H_
+#endif // WEBRTC_SYSTEM_WRAPPERS_INCLUDE_TRACE_H_
diff --git a/webrtc/system_wrappers/include/utf_util_win.h b/webrtc/system_wrappers/include/utf_util_win.h
index cc48fd254d..0e3f2d01c6 100644
--- a/webrtc/system_wrappers/include/utf_util_win.h
+++ b/webrtc/system_wrappers/include/utf_util_win.h
@@ -10,8 +10,8 @@
// Conversion functions for UTF-8 and UTF-16 strings on Windows.
// Duplicated from talk/base/win32.h.
-#ifndef WEBRTC_SYSTEM_WRAPPERS_INTERFACE_UTF_UTIL_H_
-#define WEBRTC_SYSTEM_WRAPPERS_INTERFACE_UTF_UTIL_H_
+#ifndef WEBRTC_SYSTEM_WRAPPERS_INCLUDE_UTF_UTIL_H_
+#define WEBRTC_SYSTEM_WRAPPERS_INCLUDE_UTF_UTIL_H_
#ifdef WIN32
#include <windows.h>
@@ -54,4 +54,4 @@ inline std::string ToUtf8(const std::wstring& wstr) {
} // namespace webrtc
#endif // WIN32
-#endif // WEBRTC_SYSTEM_WRAPPERS_INTERFACE_UTF_UTIL_H_
+#endif // WEBRTC_SYSTEM_WRAPPERS_INCLUDE_UTF_UTIL_H_
diff --git a/webrtc/system_wrappers/source/Android.mk b/webrtc/system_wrappers/source/Android.mk
index b58f902009..480af5177e 100644
--- a/webrtc/system_wrappers/source/Android.mk
+++ b/webrtc/system_wrappers/source/Android.mk
@@ -31,12 +31,10 @@ LOCAL_SRC_FILES := \
logging.cc \
metrics_default.cc \
rw_lock.cc \
- thread.cc \
trace_impl.cc \
condition_variable_posix.cc \
critical_section_posix.cc \
sleep.cc \
- thread_posix.cc \
trace_posix.cc \
rw_lock_posix.cc \
diff --git a/webrtc/system_wrappers/source/aligned_array_unittest.cc b/webrtc/system_wrappers/source/aligned_array_unittest.cc
index 8d898af03e..01238f8342 100644
--- a/webrtc/system_wrappers/source/aligned_array_unittest.cc
+++ b/webrtc/system_wrappers/source/aligned_array_unittest.cc
@@ -16,7 +16,7 @@
namespace {
-bool IsAligned(const void* ptr, int alignment) {
+bool IsAligned(const void* ptr, size_t alignment) {
return reinterpret_cast<uintptr_t>(ptr) % alignment == 0;
}
@@ -27,7 +27,7 @@ namespace webrtc {
TEST(AlignedArrayTest, CheckAlignment) {
AlignedArray<bool> arr(10, 7, 128);
ASSERT_TRUE(IsAligned(arr.Array(), 128));
- for (int i = 0; i < 10; ++i) {
+ for (size_t i = 0; i < 10; ++i) {
ASSERT_TRUE(IsAligned(arr.Row(i), 128));
ASSERT_EQ(arr.Row(i), arr.Array()[i]);
}
@@ -36,13 +36,13 @@ TEST(AlignedArrayTest, CheckAlignment) {
TEST(AlignedArrayTest, CheckOverlap) {
AlignedArray<size_t> arr(10, 7, 128);
- for (int i = 0; i < 10; ++i) {
+ for (size_t i = 0; i < 10; ++i) {
for (size_t j = 0; j < 7; ++j) {
arr.At(i, j) = 20 * i + j;
}
}
- for (int i = 0; i < 10; ++i) {
+ for (size_t i = 0; i < 10; ++i) {
for (size_t j = 0; j < 7; ++j) {
ASSERT_EQ(arr.At(i, j), 20 * i + j);
ASSERT_EQ(arr.Row(i)[j], 20 * i + j);
@@ -53,7 +53,7 @@ TEST(AlignedArrayTest, CheckOverlap) {
TEST(AlignedArrayTest, CheckRowsCols) {
AlignedArray<bool> arr(10, 7, 128);
- ASSERT_EQ(arr.rows(), 10);
+ ASSERT_EQ(arr.rows(), 10u);
ASSERT_EQ(arr.cols(), 7u);
}
diff --git a/webrtc/system_wrappers/source/condition_variable_unittest.cc b/webrtc/system_wrappers/source/condition_variable_unittest.cc
index ed845cc21e..5a8dd0b36e 100644
--- a/webrtc/system_wrappers/source/condition_variable_unittest.cc
+++ b/webrtc/system_wrappers/source/condition_variable_unittest.cc
@@ -11,9 +11,9 @@
#include "webrtc/system_wrappers/include/condition_variable_wrapper.h"
#include "testing/gtest/include/gtest/gtest.h"
+#include "webrtc/base/platform_thread.h"
#include "webrtc/base/scoped_ptr.h"
#include "webrtc/system_wrappers/include/critical_section_wrapper.h"
-#include "webrtc/system_wrappers/include/thread_wrapper.h"
#include "webrtc/system_wrappers/include/tick_util.h"
#include "webrtc/system_wrappers/include/trace.h"
@@ -144,12 +144,10 @@ bool WaitingRunFunction(void* obj) {
class CondVarTest : public ::testing::Test {
public:
- CondVarTest() {}
+ CondVarTest() : thread_(&WaitingRunFunction, &baton_, "CondVarTest") {}
virtual void SetUp() {
- thread_ = ThreadWrapper::CreateThread(&WaitingRunFunction,
- &baton_, "CondVarTest");
- ASSERT_TRUE(thread_->Start());
+ thread_.Start();
}
virtual void TearDown() {
@@ -160,14 +158,14 @@ class CondVarTest : public ::testing::Test {
// and Pass).
ASSERT_TRUE(baton_.Pass(kShortWaitMs));
ASSERT_TRUE(baton_.Grab(kShortWaitMs));
- ASSERT_TRUE(thread_->Stop());
+ thread_.Stop();
}
protected:
Baton baton_;
private:
- rtc::scoped_ptr<ThreadWrapper> thread_;
+ rtc::PlatformThread thread_;
};
// The SetUp and TearDown functions use condition variables.
diff --git a/webrtc/system_wrappers/source/critical_section_unittest.cc b/webrtc/system_wrappers/source/critical_section_unittest.cc
index 6848bdd06b..9abf8b8017 100644
--- a/webrtc/system_wrappers/source/critical_section_unittest.cc
+++ b/webrtc/system_wrappers/source/critical_section_unittest.cc
@@ -12,7 +12,7 @@
#include "testing/gtest/include/gtest/gtest.h"
#include "webrtc/system_wrappers/include/sleep.h"
-#include "webrtc/system_wrappers/include/thread_wrapper.h"
+#include "webrtc/base/platform_thread.h"
#include "webrtc/system_wrappers/include/trace.h"
namespace webrtc {
@@ -78,10 +78,10 @@ TEST_F(CritSectTest, ThreadWakesOnce) NO_THREAD_SAFETY_ANALYSIS {
CriticalSectionWrapper* crit_sect =
CriticalSectionWrapper::CreateCriticalSection();
ProtectedCount count(crit_sect);
- rtc::scoped_ptr<ThreadWrapper> thread = ThreadWrapper::CreateThread(
+ rtc::PlatformThread thread(
&LockUnlockThenStopRunFunction, &count, "ThreadWakesOnce");
crit_sect->Enter();
- ASSERT_TRUE(thread->Start());
+ thread.Start();
SwitchProcess();
// The critical section is of reentrant mode, so this should not release
// the lock, even though count.Count() locks and unlocks the critical section
@@ -90,7 +90,7 @@ TEST_F(CritSectTest, ThreadWakesOnce) NO_THREAD_SAFETY_ANALYSIS {
ASSERT_EQ(0, count.Count());
crit_sect->Leave(); // This frees the thread to act.
EXPECT_TRUE(WaitForCount(1, &count));
- EXPECT_TRUE(thread->Stop());
+ thread.Stop();
delete crit_sect;
}
@@ -105,10 +105,10 @@ TEST_F(CritSectTest, ThreadWakesTwice) NO_THREAD_SAFETY_ANALYSIS {
CriticalSectionWrapper* crit_sect =
CriticalSectionWrapper::CreateCriticalSection();
ProtectedCount count(crit_sect);
- rtc::scoped_ptr<ThreadWrapper> thread = ThreadWrapper::CreateThread(
+ rtc::PlatformThread thread(
&LockUnlockRunFunction, &count, "ThreadWakesTwice");
crit_sect->Enter(); // Make sure counter stays 0 until we wait for it.
- ASSERT_TRUE(thread->Start());
+ thread.Start();
crit_sect->Leave();
// The thread is capable of grabbing the lock multiple times,
@@ -128,7 +128,7 @@ TEST_F(CritSectTest, ThreadWakesTwice) NO_THREAD_SAFETY_ANALYSIS {
SwitchProcess();
EXPECT_TRUE(WaitForCount(count_before + 1, &count));
- EXPECT_TRUE(thread->Stop());
+ thread.Stop();
delete crit_sect;
}
diff --git a/webrtc/system_wrappers/source/data_log.cc b/webrtc/system_wrappers/source/data_log.cc
index dbc8ea1505..778769603b 100644
--- a/webrtc/system_wrappers/source/data_log.cc
+++ b/webrtc/system_wrappers/source/data_log.cc
@@ -318,11 +318,12 @@ int DataLog::NextRow(const std::string& table_name) {
}
DataLogImpl::DataLogImpl()
- : counter_(1),
- tables_(),
- flush_event_(EventWrapper::Create()),
- tables_lock_(RWLockWrapper::CreateRWLock()) {
-}
+ : counter_(1),
+ tables_(),
+ flush_event_(EventWrapper::Create()),
+ file_writer_thread_(
+ new rtc::PlatformThread(DataLogImpl::Run, instance_, "DataLog")),
+ tables_lock_(RWLockWrapper::CreateRWLock()) {}
DataLogImpl::~DataLogImpl() {
StopThread();
@@ -348,12 +349,8 @@ int DataLogImpl::CreateLog() {
}
int DataLogImpl::Init() {
- file_writer_thread_ = ThreadWrapper::CreateThread(
- DataLogImpl::Run, instance_, "DataLog");
- bool success = file_writer_thread_->Start();
- if (!success)
- return -1;
- file_writer_thread_->SetPriority(kHighestPriority);
+ file_writer_thread_->Start();
+ file_writer_thread_->SetPriority(rtc::kHighestPriority);
return 0;
}
@@ -406,13 +403,8 @@ int DataLogImpl::NextRow(const std::string& table_name) {
if (tables_.count(table_name) == 0)
return -1;
tables_[table_name]->NextRow();
- if (!file_writer_thread_) {
- // Write every row to file as they get complete.
- tables_[table_name]->Flush();
- } else {
- // Signal a complete row
- flush_event_->Set();
- }
+ // Signal a complete row
+ flush_event_->Set();
return 0;
}
@@ -435,10 +427,8 @@ void DataLogImpl::Process() {
}
void DataLogImpl::StopThread() {
- if (file_writer_thread_) {
- flush_event_->Set();
- file_writer_thread_->Stop();
- }
+ flush_event_->Set();
+ file_writer_thread_->Stop();
}
} // namespace webrtc
diff --git a/webrtc/system_wrappers/source/event_timer_posix.cc b/webrtc/system_wrappers/source/event_timer_posix.cc
index 99eebcb70a..9f9a324bcb 100644
--- a/webrtc/system_wrappers/source/event_timer_posix.cc
+++ b/webrtc/system_wrappers/source/event_timer_posix.cc
@@ -154,14 +154,14 @@ bool EventTimerPosix::StartTimer(bool periodic, unsigned long time) {
// Start the timer thread
timer_event_.reset(new EventTimerPosix());
const char* thread_name = "WebRtc_event_timer_thread";
- timer_thread_ = ThreadWrapper::CreateThread(Run, this, thread_name);
+ timer_thread_.reset(new rtc::PlatformThread(Run, this, thread_name));
periodic_ = periodic;
time_ = time;
- bool started = timer_thread_->Start();
- timer_thread_->SetPriority(kRealtimePriority);
+ timer_thread_->Start();
+ timer_thread_->SetPriority(rtc::kRealtimePriority);
pthread_mutex_unlock(&mutex_);
- return started;
+ return true;
}
bool EventTimerPosix::Run(void* obj) {
@@ -215,9 +215,7 @@ bool EventTimerPosix::StopTimer() {
timer_event_->Set();
}
if (timer_thread_) {
- if (!timer_thread_->Stop()) {
- return false;
- }
+ timer_thread_->Stop();
timer_thread_.reset();
}
timer_event_.reset();
diff --git a/webrtc/system_wrappers/source/event_timer_posix.h b/webrtc/system_wrappers/source/event_timer_posix.h
index 21c4ac702e..bbf51f72db 100644
--- a/webrtc/system_wrappers/source/event_timer_posix.h
+++ b/webrtc/system_wrappers/source/event_timer_posix.h
@@ -16,7 +16,7 @@
#include <pthread.h>
#include <time.h>
-#include "webrtc/system_wrappers/include/thread_wrapper.h"
+#include "webrtc/base/platform_thread.h"
namespace webrtc {
@@ -46,7 +46,8 @@ class EventTimerPosix : public EventTimerWrapper {
pthread_mutex_t mutex_;
bool event_set_;
- rtc::scoped_ptr<ThreadWrapper> timer_thread_;
+ // TODO(pbos): Remove scoped_ptr and use PlatformThread directly.
+ rtc::scoped_ptr<rtc::PlatformThread> timer_thread_;
rtc::scoped_ptr<EventTimerPosix> timer_event_;
timespec created_at_;
diff --git a/webrtc/system_wrappers/source/event_tracer.cc b/webrtc/system_wrappers/source/event_tracer.cc
deleted file mode 100644
index 9328e80036..0000000000
--- a/webrtc/system_wrappers/source/event_tracer.cc
+++ /dev/null
@@ -1,12 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-// This file has moved.
-// TODO(tommi): Delete after removing dependencies and updating Chromium.
diff --git a/webrtc/system_wrappers/source/event_tracer_unittest.cc b/webrtc/system_wrappers/source/event_tracer_unittest.cc
deleted file mode 100644
index 9328e80036..0000000000
--- a/webrtc/system_wrappers/source/event_tracer_unittest.cc
+++ /dev/null
@@ -1,12 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-// This file has moved.
-// TODO(tommi): Delete after removing dependencies and updating Chromium.
diff --git a/webrtc/system_wrappers/source/field_trial_default.cc b/webrtc/system_wrappers/source/field_trial_default.cc
index 1a9bd6bc79..0e2c286117 100644
--- a/webrtc/system_wrappers/source/field_trial_default.cc
+++ b/webrtc/system_wrappers/source/field_trial_default.cc
@@ -58,5 +58,9 @@ void InitFieldTrialsFromString(const char* trials_string) {
trials_init_string = trials_string;
}
+const char* GetFieldTrialString() {
+ return trials_init_string;
+}
+
} // namespace field_trial
} // namespace webrtc
diff --git a/webrtc/system_wrappers/source/logging_unittest.cc b/webrtc/system_wrappers/source/logging_unittest.cc
index 633d84b76b..2da24b26f4 100644
--- a/webrtc/system_wrappers/source/logging_unittest.cc
+++ b/webrtc/system_wrappers/source/logging_unittest.cc
@@ -72,18 +72,5 @@ TEST_F(LoggingTest, LogStream) {
}
}
-TEST_F(LoggingTest, LogFunctionError) {
- {
- CriticalSectionScoped cs(crit_.get());
- int bar = 42;
- int baz = 99;
- level_ = kTraceError;
- expected_log_ << "(logging_unittest.cc:" << __LINE__ + 2
- << "): Foo failed: bar=" << bar << ", baz=" << baz;
- LOG_FERR2(LS_ERROR, Foo, bar, baz);
- cv_->SleepCS(*crit_.get(), 2000);
- }
-}
-
} // namespace
} // namespace webrtc
diff --git a/webrtc/system_wrappers/source/metrics_unittest.cc b/webrtc/system_wrappers/source/metrics_unittest.cc
new file mode 100644
index 0000000000..8319b78ee0
--- /dev/null
+++ b/webrtc/system_wrappers/source/metrics_unittest.cc
@@ -0,0 +1,91 @@
+/*
+ * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "testing/gtest/include/gtest/gtest.h"
+
+#include "webrtc/system_wrappers/include/metrics.h"
+#include "webrtc/test/histogram.h"
+
+namespace webrtc {
+namespace {
+const int kSample = 22;
+const std::string kName = "Name";
+
+void AddSparseSample(const std::string& name, int sample) {
+ RTC_HISTOGRAM_COUNTS_SPARSE_100(name, sample);
+}
+#if GTEST_HAS_DEATH_TEST && !defined(WEBRTC_ANDROID)
+void AddSample(const std::string& name, int sample) {
+ RTC_HISTOGRAM_COUNTS_100(name, sample);
+}
+#endif // GTEST_HAS_DEATH_TEST && !defined(WEBRTC_ANDROID)
+} // namespace
+
+TEST(MetricsTest, InitiallyNoSamples) {
+ test::ClearHistograms();
+ EXPECT_EQ(0, test::NumHistogramSamples(kName));
+ EXPECT_EQ(-1, test::LastHistogramSample(kName));
+}
+
+TEST(MetricsTest, RtcHistogramPercent_AddSample) {
+ test::ClearHistograms();
+ RTC_HISTOGRAM_PERCENTAGE(kName, kSample);
+ EXPECT_EQ(1, test::NumHistogramSamples(kName));
+ EXPECT_EQ(kSample, test::LastHistogramSample(kName));
+}
+
+TEST(MetricsTest, RtcHistogramEnumeration_AddSample) {
+ test::ClearHistograms();
+ RTC_HISTOGRAM_ENUMERATION(kName, kSample, kSample + 1);
+ EXPECT_EQ(1, test::NumHistogramSamples(kName));
+ EXPECT_EQ(kSample, test::LastHistogramSample(kName));
+}
+
+TEST(MetricsTest, RtcHistogramCountsSparse_AddSample) {
+ test::ClearHistograms();
+ RTC_HISTOGRAM_COUNTS_SPARSE_100(kName, kSample);
+ EXPECT_EQ(1, test::NumHistogramSamples(kName));
+ EXPECT_EQ(kSample, test::LastHistogramSample(kName));
+}
+
+TEST(MetricsTest, RtcHistogramCounts_AddSample) {
+ test::ClearHistograms();
+ RTC_HISTOGRAM_COUNTS_100(kName, kSample);
+ EXPECT_EQ(1, test::NumHistogramSamples(kName));
+ EXPECT_EQ(kSample, test::LastHistogramSample(kName));
+}
+
+TEST(MetricsTest, RtcHistogramCounts_AddMultipleSamples) {
+ test::ClearHistograms();
+ const int kNumSamples = 10;
+ for (int i = 0; i < kNumSamples; ++i) {
+ RTC_HISTOGRAM_COUNTS_100(kName, i);
+ }
+ EXPECT_EQ(kNumSamples, test::NumHistogramSamples(kName));
+ EXPECT_EQ(kNumSamples - 1, test::LastHistogramSample(kName));
+}
+
+TEST(MetricsTest, RtcHistogramSparse_NonConstantNameWorks) {
+ test::ClearHistograms();
+ AddSparseSample("Name1", kSample);
+ AddSparseSample("Name2", kSample);
+ EXPECT_EQ(1, test::NumHistogramSamples("Name1"));
+ EXPECT_EQ(1, test::NumHistogramSamples("Name2"));
+}
+
+#if GTEST_HAS_DEATH_TEST && !defined(WEBRTC_ANDROID)
+TEST(MetricsTest, RtcHistogram_FailsForNonConstantName) {
+ test::ClearHistograms();
+ AddSample("Name1", kSample);
+ EXPECT_DEATH(AddSample("Name2", kSample), "");
+}
+#endif // GTEST_HAS_DEATH_TEST && !defined(WEBRTC_ANDROID)
+
+} // namespace webrtc
diff --git a/webrtc/system_wrappers/source/ntp_time_unittest.cc b/webrtc/system_wrappers/source/ntp_time_unittest.cc
new file mode 100644
index 0000000000..ff11288c1b
--- /dev/null
+++ b/webrtc/system_wrappers/source/ntp_time_unittest.cc
@@ -0,0 +1,69 @@
+/*
+ * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "testing/gtest/include/gtest/gtest.h"
+
+#include "webrtc/system_wrappers/include/ntp_time.h"
+
+namespace webrtc {
+namespace {
+
+const uint32_t kNtpSec = 0x12345678;
+const uint32_t kNtpFrac = 0x23456789;
+
+TEST(NtpTimeTest, NoValueMeansInvalid) {
+ NtpTime ntp;
+ EXPECT_FALSE(ntp.Valid());
+}
+
+TEST(NtpTimeTest, CanResetValue) {
+ NtpTime ntp(kNtpSec, kNtpFrac);
+ EXPECT_TRUE(ntp.Valid());
+ ntp.Reset();
+ EXPECT_FALSE(ntp.Valid());
+}
+
+TEST(NtpTimeTest, CanGetWhatIsSet) {
+ NtpTime ntp;
+ ntp.Set(kNtpSec, kNtpFrac);
+ EXPECT_EQ(kNtpSec, ntp.seconds());
+ EXPECT_EQ(kNtpFrac, ntp.fractions());
+}
+
+TEST(NtpTimeTest, SetIsSameAs2ParameterConstructor) {
+ NtpTime ntp1(kNtpSec, kNtpFrac);
+ NtpTime ntp2;
+ EXPECT_NE(ntp1, ntp2);
+
+ ntp2.Set(kNtpSec, kNtpFrac);
+ EXPECT_EQ(ntp1, ntp2);
+}
+
+TEST(NtpTimeTest, SetCurrentIsSameAs1ParameterConstructor) {
+ SimulatedClock clock(0x0123456789abcdef);
+
+ NtpTime ntp1(clock);
+ NtpTime ntp2;
+ EXPECT_NE(ntp1, ntp2);
+
+ ntp2.SetCurrent(clock);
+ EXPECT_EQ(ntp1, ntp2);
+}
+
+TEST(NtpTimeTest, ToMsMeansToNtpMilliseconds) {
+ SimulatedClock clock(0x123456789abc);
+
+ NtpTime ntp(clock);
+ EXPECT_EQ(ntp.ToMs(), Clock::NtpToMs(ntp.seconds(), ntp.fractions()));
+ EXPECT_EQ(ntp.ToMs(), clock.CurrentNtpInMilliseconds());
+}
+
+} // namespace
+} // namespace webrtc
diff --git a/webrtc/system_wrappers/source/scoped_vector_unittest.cc b/webrtc/system_wrappers/source/scoped_vector_unittest.cc
index b049e4a340..6e38f01f0b 100644
--- a/webrtc/system_wrappers/source/scoped_vector_unittest.cc
+++ b/webrtc/system_wrappers/source/scoped_vector_unittest.cc
@@ -221,7 +221,8 @@ TEST(ScopedVectorTest, MoveConstruct) {
EXPECT_FALSE(scoped_vector.empty());
EXPECT_TRUE(watcher.IsWatching(scoped_vector.back()));
- ScopedVector<LifeCycleObject> scoped_vector_copy(scoped_vector.Pass());
+ ScopedVector<LifeCycleObject> scoped_vector_copy(
+ scoped_vector.DEPRECATED_Pass());
EXPECT_TRUE(scoped_vector.empty());
EXPECT_FALSE(scoped_vector_copy.empty());
EXPECT_TRUE(watcher.IsWatching(scoped_vector_copy.back()));
@@ -241,7 +242,7 @@ TEST(ScopedVectorTest, MoveAssign) {
EXPECT_FALSE(scoped_vector.empty());
EXPECT_TRUE(watcher.IsWatching(scoped_vector.back()));
- scoped_vector_assign = scoped_vector.Pass();
+ scoped_vector_assign = scoped_vector.DEPRECATED_Pass();
EXPECT_TRUE(scoped_vector.empty());
EXPECT_FALSE(scoped_vector_assign.empty());
EXPECT_TRUE(watcher.IsWatching(scoped_vector_assign.back()));
@@ -273,10 +274,11 @@ class DeleteCounter {
template <typename T>
class PassThru {
public:
- explicit PassThru(ScopedVector<T> scoper) : scoper_(scoper.Pass()) {}
+ explicit PassThru(ScopedVector<T> scoper)
+ : scoper_(scoper.DEPRECATED_Pass()) {}
ScopedVector<T> Run() {
- return scoper_.Pass();
+ return scoper_.DEPRECATED_Pass();
}
private:
@@ -288,7 +290,7 @@ TEST(ScopedVectorTest, Passed) {
ScopedVector<DeleteCounter> deleter_vector;
deleter_vector.push_back(new DeleteCounter(&deletes));
EXPECT_EQ(0, deletes);
- PassThru<DeleteCounter> pass_thru(deleter_vector.Pass());
+ PassThru<DeleteCounter> pass_thru(deleter_vector.DEPRECATED_Pass());
EXPECT_EQ(0, deletes);
ScopedVector<DeleteCounter> result = pass_thru.Run();
EXPECT_EQ(0, deletes);
diff --git a/webrtc/system_wrappers/source/thread.cc b/webrtc/system_wrappers/source/thread.cc
deleted file mode 100644
index 7da1e3d591..0000000000
--- a/webrtc/system_wrappers/source/thread.cc
+++ /dev/null
@@ -1,33 +0,0 @@
-/*
- * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include "webrtc/system_wrappers/include/thread_wrapper.h"
-
-#if defined(_WIN32)
-#include "webrtc/system_wrappers/source/thread_win.h"
-#else
-#include "webrtc/system_wrappers/source/thread_posix.h"
-#endif
-
-namespace webrtc {
-
-#if defined(_WIN32)
-typedef ThreadWindows ThreadType;
-#else
-typedef ThreadPosix ThreadType;
-#endif
-
-rtc::scoped_ptr<ThreadWrapper> ThreadWrapper::CreateThread(
- ThreadRunFunction func, void* obj, const char* thread_name) {
- return rtc::scoped_ptr<ThreadWrapper>(
- new ThreadType(func, obj, thread_name)).Pass();
-}
-
-} // namespace webrtc
diff --git a/webrtc/system_wrappers/source/thread_posix.cc b/webrtc/system_wrappers/source/thread_posix.cc
deleted file mode 100644
index 32ab13c780..0000000000
--- a/webrtc/system_wrappers/source/thread_posix.cc
+++ /dev/null
@@ -1,166 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include "webrtc/system_wrappers/source/thread_posix.h"
-
-#include <algorithm>
-
-#include <errno.h>
-#include <unistd.h>
-#ifdef WEBRTC_LINUX
-#include <linux/unistd.h>
-#include <sched.h>
-#include <sys/types.h>
-#endif
-
-#include "webrtc/base/checks.h"
-#include "webrtc/base/platform_thread.h"
-#include "webrtc/system_wrappers/include/critical_section_wrapper.h"
-#include "webrtc/system_wrappers/include/event_wrapper.h"
-#include "webrtc/system_wrappers/include/sleep.h"
-#include "webrtc/system_wrappers/include/trace.h"
-
-namespace webrtc {
-namespace {
-struct ThreadAttributes {
- ThreadAttributes() { pthread_attr_init(&attr); }
- ~ThreadAttributes() { pthread_attr_destroy(&attr); }
- pthread_attr_t* operator&() { return &attr; }
- pthread_attr_t attr;
-};
-} // namespace
-
-int ConvertToSystemPriority(ThreadPriority priority, int min_prio,
- int max_prio) {
- RTC_DCHECK(max_prio - min_prio > 2);
- const int top_prio = max_prio - 1;
- const int low_prio = min_prio + 1;
-
- switch (priority) {
- case kLowPriority:
- return low_prio;
- case kNormalPriority:
- // The -1 ensures that the kHighPriority is always greater or equal to
- // kNormalPriority.
- return (low_prio + top_prio - 1) / 2;
- case kHighPriority:
- return std::max(top_prio - 2, low_prio);
- case kHighestPriority:
- return std::max(top_prio - 1, low_prio);
- case kRealtimePriority:
- return top_prio;
- }
- RTC_DCHECK(false);
- return low_prio;
-}
-
-// static
-void* ThreadPosix::StartThread(void* param) {
- static_cast<ThreadPosix*>(param)->Run();
- return 0;
-}
-
-ThreadPosix::ThreadPosix(ThreadRunFunction func, void* obj,
- const char* thread_name)
- : run_function_(func),
- obj_(obj),
- stop_event_(false, false),
- name_(thread_name ? thread_name : "webrtc"),
- thread_(0) {
- RTC_DCHECK(name_.length() < 64);
-}
-
-uint32_t ThreadWrapper::GetThreadId() {
- return rtc::CurrentThreadId();
-}
-
-ThreadPosix::~ThreadPosix() {
- RTC_DCHECK(thread_checker_.CalledOnValidThread());
-}
-
-// TODO(pbos): Make Start void, calling code really doesn't support failures
-// here.
-bool ThreadPosix::Start() {
- RTC_DCHECK(thread_checker_.CalledOnValidThread());
- RTC_DCHECK(!thread_) << "Thread already started?";
-
- ThreadAttributes attr;
- // Set the stack stack size to 1M.
- pthread_attr_setstacksize(&attr, 1024 * 1024);
- RTC_CHECK_EQ(0, pthread_create(&thread_, &attr, &StartThread, this));
- return true;
-}
-
-bool ThreadPosix::Stop() {
- RTC_DCHECK(thread_checker_.CalledOnValidThread());
- if (!thread_)
- return true;
-
- stop_event_.Set();
- RTC_CHECK_EQ(0, pthread_join(thread_, nullptr));
- thread_ = 0;
-
- return true;
-}
-
-bool ThreadPosix::SetPriority(ThreadPriority priority) {
- RTC_DCHECK(thread_checker_.CalledOnValidThread());
- if (!thread_)
- return false;
-#if defined(WEBRTC_CHROMIUM_BUILD) && defined(WEBRTC_LINUX)
- // TODO(tommi): Switch to the same mechanism as Chromium uses for
- // changing thread priorities.
- return true;
-#else
-#ifdef WEBRTC_THREAD_RR
- const int policy = SCHED_RR;
-#else
- const int policy = SCHED_FIFO;
-#endif
- const int min_prio = sched_get_priority_min(policy);
- const int max_prio = sched_get_priority_max(policy);
- if (min_prio == -1 || max_prio == -1) {
- WEBRTC_TRACE(kTraceError, kTraceUtility, -1,
- "unable to retreive min or max priority for threads");
- return false;
- }
-
- if (max_prio - min_prio <= 2)
- return false;
-
- sched_param param;
- param.sched_priority = ConvertToSystemPriority(priority, min_prio, max_prio);
- if (pthread_setschedparam(thread_, policy, &param) != 0) {
- WEBRTC_TRACE(
- kTraceError, kTraceUtility, -1, "unable to set thread priority");
- return false;
- }
-
- return true;
-#endif // defined(WEBRTC_CHROMIUM_BUILD) && defined(WEBRTC_LINUX)
-}
-
-void ThreadPosix::Run() {
- if (!name_.empty()) {
- // Setting the thread name may fail (harmlessly) if running inside a
- // sandbox. Ignore failures if they happen.
- rtc::SetCurrentThreadName(name_.substr(0, 63).c_str());
- }
-
- // It's a requirement that for successful thread creation that the run
- // function be called at least once (see RunFunctionIsCalled unit test),
- // so to fullfill that requirement, we use a |do| loop and not |while|.
- do {
- if (!run_function_(obj_))
- break;
- } while (!stop_event_.Wait(0));
-}
-
-} // namespace webrtc
diff --git a/webrtc/system_wrappers/source/thread_posix.h b/webrtc/system_wrappers/source/thread_posix.h
deleted file mode 100644
index bcdd732f86..0000000000
--- a/webrtc/system_wrappers/source/thread_posix.h
+++ /dev/null
@@ -1,53 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_SYSTEM_WRAPPERS_SOURCE_THREAD_POSIX_H_
-#define WEBRTC_SYSTEM_WRAPPERS_SOURCE_THREAD_POSIX_H_
-
-#include "webrtc/base/event.h"
-#include "webrtc/base/scoped_ptr.h"
-#include "webrtc/base/thread_checker.h"
-#include "webrtc/system_wrappers/include/thread_wrapper.h"
-
-#include <pthread.h>
-
-namespace webrtc {
-
-int ConvertToSystemPriority(ThreadPriority priority, int min_prio,
- int max_prio);
-
-class ThreadPosix : public ThreadWrapper {
- public:
- ThreadPosix(ThreadRunFunction func, void* obj, const char* thread_name);
- ~ThreadPosix() override;
-
- // From ThreadWrapper.
- bool Start() override;
- bool Stop() override;
-
- bool SetPriority(ThreadPriority priority) override;
-
- private:
- static void* StartThread(void* param);
-
- void Run();
-
- rtc::ThreadChecker thread_checker_;
- ThreadRunFunction const run_function_;
- void* const obj_;
- rtc::Event stop_event_;
- const std::string name_;
-
- pthread_t thread_;
-};
-
-} // namespace webrtc
-
-#endif // WEBRTC_SYSTEM_WRAPPERS_SOURCE_THREAD_POSIX_H_
diff --git a/webrtc/system_wrappers/source/thread_posix_unittest.cc b/webrtc/system_wrappers/source/thread_posix_unittest.cc
deleted file mode 100644
index edfb14502e..0000000000
--- a/webrtc/system_wrappers/source/thread_posix_unittest.cc
+++ /dev/null
@@ -1,30 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include "webrtc/system_wrappers/source/thread_posix.h"
-
-#include "testing/gtest/include/gtest/gtest.h"
-
-TEST(ThreadTestPosix, PrioritySettings) {
- // API assumes that max_prio - min_prio > 2. Test the extreme case.
- const int kMinPrio = -1;
- const int kMaxPrio = 2;
-
- int last_priority = kMinPrio;
- for (int priority = webrtc::kLowPriority;
- priority <= webrtc::kRealtimePriority; ++priority) {
- int system_priority = webrtc::ConvertToSystemPriority(
- static_cast<webrtc::ThreadPriority>(priority), kMinPrio, kMaxPrio);
- EXPECT_GT(system_priority, kMinPrio);
- EXPECT_LT(system_priority, kMaxPrio);
- EXPECT_GE(system_priority, last_priority);
- last_priority = system_priority;
- }
-}
diff --git a/webrtc/system_wrappers/source/thread_unittest.cc b/webrtc/system_wrappers/source/thread_unittest.cc
deleted file mode 100644
index c8e180ba32..0000000000
--- a/webrtc/system_wrappers/source/thread_unittest.cc
+++ /dev/null
@@ -1,53 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include "webrtc/system_wrappers/include/thread_wrapper.h"
-
-#include "testing/gtest/include/gtest/gtest.h"
-#include "webrtc/base/scoped_ptr.h"
-#include "webrtc/system_wrappers/include/sleep.h"
-
-namespace webrtc {
-
-// Function that does nothing, and reports success.
-bool NullRunFunction(void* obj) {
- SleepMs(0); // Hand over timeslice, prevents busy looping.
- return true;
-}
-
-TEST(ThreadTest, StartStop) {
- rtc::scoped_ptr<ThreadWrapper> thread = ThreadWrapper::CreateThread(
- &NullRunFunction, nullptr, "ThreadTest");
- ASSERT_TRUE(thread->Start());
- EXPECT_TRUE(thread->Stop());
-}
-
-// Function that sets a boolean.
-bool SetFlagRunFunction(void* obj) {
- bool* obj_as_bool = static_cast<bool*>(obj);
- *obj_as_bool = true;
- SleepMs(0); // Hand over timeslice, prevents busy looping.
- return true;
-}
-
-TEST(ThreadTest, RunFunctionIsCalled) {
- bool flag = false;
- rtc::scoped_ptr<ThreadWrapper> thread = ThreadWrapper::CreateThread(
- &SetFlagRunFunction, &flag, "RunFunctionIsCalled");
- ASSERT_TRUE(thread->Start());
-
- // At this point, the flag may be either true or false.
- EXPECT_TRUE(thread->Stop());
-
- // We expect the thread to have run at least once.
- EXPECT_TRUE(flag);
-}
-
-} // namespace webrtc
diff --git a/webrtc/system_wrappers/source/thread_win.cc b/webrtc/system_wrappers/source/thread_win.cc
deleted file mode 100644
index c42196722e..0000000000
--- a/webrtc/system_wrappers/source/thread_win.cc
+++ /dev/null
@@ -1,107 +0,0 @@
-/*
- * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include "webrtc/system_wrappers/source/thread_win.h"
-
-#include <process.h>
-#include <stdio.h>
-#include <windows.h>
-
-#include "webrtc/base/checks.h"
-#include "webrtc/base/platform_thread.h"
-#include "webrtc/system_wrappers/include/trace.h"
-
-namespace webrtc {
-namespace {
-void CALLBACK RaiseFlag(ULONG_PTR param) {
- *reinterpret_cast<bool*>(param) = true;
-}
-}
-
-ThreadWindows::ThreadWindows(ThreadRunFunction func, void* obj,
- const char* thread_name)
- : run_function_(func),
- obj_(obj),
- stop_(false),
- thread_(NULL),
- name_(thread_name ? thread_name : "webrtc") {
- RTC_DCHECK(func);
-}
-
-ThreadWindows::~ThreadWindows() {
- RTC_DCHECK(main_thread_.CalledOnValidThread());
- RTC_DCHECK(!thread_);
-}
-
-// static
-uint32_t ThreadWrapper::GetThreadId() {
- return GetCurrentThreadId();
-}
-
-// static
-DWORD WINAPI ThreadWindows::StartThread(void* param) {
- static_cast<ThreadWindows*>(param)->Run();
- return 0;
-}
-
-bool ThreadWindows::Start() {
- RTC_DCHECK(main_thread_.CalledOnValidThread());
- RTC_DCHECK(!thread_);
-
- stop_ = false;
-
- // See bug 2902 for background on STACK_SIZE_PARAM_IS_A_RESERVATION.
- // Set the reserved stack stack size to 1M, which is the default on Windows
- // and Linux.
- DWORD thread_id;
- thread_ = ::CreateThread(NULL, 1024 * 1024, &StartThread, this,
- STACK_SIZE_PARAM_IS_A_RESERVATION, &thread_id);
- if (!thread_ ) {
- RTC_DCHECK(false) << "CreateThread failed";
- return false;
- }
-
- return true;
-}
-
-bool ThreadWindows::Stop() {
- RTC_DCHECK(main_thread_.CalledOnValidThread());
- if (thread_) {
- // Set stop_ to |true| on the worker thread.
- QueueUserAPC(&RaiseFlag, thread_, reinterpret_cast<ULONG_PTR>(&stop_));
- WaitForSingleObject(thread_, INFINITE);
- CloseHandle(thread_);
- thread_ = nullptr;
- }
-
- return true;
-}
-
-bool ThreadWindows::SetPriority(ThreadPriority priority) {
- RTC_DCHECK(main_thread_.CalledOnValidThread());
- return thread_ && SetThreadPriority(thread_, priority);
-}
-
-void ThreadWindows::Run() {
- if (!name_.empty())
- rtc::SetCurrentThreadName(name_.c_str());
-
- do {
- // The interface contract of Start/Stop is that for a successfull call to
- // Start, there should be at least one call to the run function. So we
- // call the function before checking |stop_|.
- if (!run_function_(obj_))
- break;
- // Alertable sleep to permit RaiseFlag to run and update |stop_|.
- SleepEx(0, true);
- } while (!stop_);
-}
-
-} // namespace webrtc
diff --git a/webrtc/system_wrappers/source/thread_win.h b/webrtc/system_wrappers/source/thread_win.h
deleted file mode 100644
index 34edd6d6c0..0000000000
--- a/webrtc/system_wrappers/source/thread_win.h
+++ /dev/null
@@ -1,48 +0,0 @@
-/*
- * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_SYSTEM_WRAPPERS_SOURCE_THREAD_WIN_H_
-#define WEBRTC_SYSTEM_WRAPPERS_SOURCE_THREAD_WIN_H_
-
-#include "webrtc/system_wrappers/include/thread_wrapper.h"
-
-#include <windows.h>
-
-#include "webrtc/base/thread_checker.h"
-
-namespace webrtc {
-
-class ThreadWindows : public ThreadWrapper {
- public:
- ThreadWindows(ThreadRunFunction func, void* obj, const char* thread_name);
- ~ThreadWindows() override;
-
- bool Start() override;
- bool Stop() override;
-
- bool SetPriority(ThreadPriority priority) override;
-
- protected:
- void Run();
-
- private:
- static DWORD WINAPI StartThread(void* param);
-
- ThreadRunFunction const run_function_;
- void* const obj_;
- bool stop_;
- HANDLE thread_;
- const std::string name_;
- rtc::ThreadChecker main_thread_;
-};
-
-} // namespace webrtc
-
-#endif // WEBRTC_SYSTEM_WRAPPERS_SOURCE_THREAD_WIN_H_
diff --git a/webrtc/system_wrappers/source/tick_util.cc b/webrtc/system_wrappers/source/tick_util.cc
index bc8fcfe91b..8d94289417 100644
--- a/webrtc/system_wrappers/source/tick_util.cc
+++ b/webrtc/system_wrappers/source/tick_util.cc
@@ -14,33 +14,79 @@
namespace webrtc {
-bool TickTime::use_fake_clock_ = false;
-int64_t TickTime::fake_ticks_ = 0;
+int64_t TickTime::MillisecondTimestamp() {
+ return TicksToMilliseconds(TickTime::Now().Ticks());
+}
-void TickTime::UseFakeClock(int64_t start_millisecond) {
- use_fake_clock_ = true;
- fake_ticks_ = MillisecondsToTicks(start_millisecond);
+int64_t TickTime::MicrosecondTimestamp() {
+ return TicksToMicroseconds(TickTime::Now().Ticks());
}
-void TickTime::AdvanceFakeClock(int64_t milliseconds) {
- assert(use_fake_clock_);
- fake_ticks_ += MillisecondsToTicks(milliseconds);
+int64_t TickTime::MillisecondsToTicks(const int64_t ms) {
+#if _WIN32
+ return ms;
+#elif defined(WEBRTC_LINUX)
+ return ms * 1000000LL;
+#elif defined(WEBRTC_MAC)
+ // TODO(pbos): Fix unsafe use of static locals.
+ static double timebase_from_millisecond_fract = 0.0;
+ if (timebase_from_millisecond_fract == 0.0) {
+ mach_timebase_info_data_t timebase;
+ (void)mach_timebase_info(&timebase);
+ timebase_from_millisecond_fract = (timebase.denom * 1e6) / timebase.numer;
+ }
+ return ms * timebase_from_millisecond_fract;
+#else
+ return ms * 1000LL;
+#endif
}
-int64_t TickTime::QueryOsForTicks() {
- TickTime result;
+int64_t TickTime::TicksToMilliseconds(const int64_t ticks) {
#if _WIN32
- // TODO(wu): Remove QueryPerformanceCounter implementation.
-#ifdef USE_QUERY_PERFORMANCE_COUNTER
- // QueryPerformanceCounter returns the value from the TSC which is
- // incremented at the CPU frequency. The algorithm used requires
- // the CPU frequency to be constant. Technology like speed stepping
- // which has variable CPU frequency will therefore yield unpredictable,
- // incorrect time estimations.
- LARGE_INTEGER qpcnt;
- QueryPerformanceCounter(&qpcnt);
- result.ticks_ = qpcnt.QuadPart;
+ return ticks;
+#elif defined(WEBRTC_LINUX)
+ return ticks / 1000000LL;
+#elif defined(WEBRTC_MAC)
+ // TODO(pbos): Fix unsafe use of static locals.
+ static double timebase_microsecond_fract = 0.0;
+ if (timebase_microsecond_fract == 0.0) {
+ mach_timebase_info_data_t timebase;
+ (void)mach_timebase_info(&timebase);
+ timebase_microsecond_fract = timebase.numer / (timebase.denom * 1e6);
+ }
+ return ticks * timebase_microsecond_fract;
#else
+ return ticks;
+#endif
+}
+
+int64_t TickTime::TicksToMicroseconds(const int64_t ticks) {
+#if _WIN32
+ return ticks * 1000LL;
+#elif defined(WEBRTC_LINUX)
+ return ticks / 1000LL;
+#elif defined(WEBRTC_MAC)
+ // TODO(pbos): Fix unsafe use of static locals.
+ static double timebase_microsecond_fract = 0.0;
+ if (timebase_microsecond_fract == 0.0) {
+ mach_timebase_info_data_t timebase;
+ (void)mach_timebase_info(&timebase);
+ timebase_microsecond_fract = timebase.numer / (timebase.denom * 1e3);
+ }
+ return ticks * timebase_microsecond_fract;
+#else
+ return ticks;
+#endif
+}
+
+// Gets the native system tick count. The actual unit, resolution, and epoch
+// varies by platform:
+// Windows: Milliseconds of uptime with rollover count in the upper 32-bits.
+// Linux/Android: Nanoseconds since the Unix epoch.
+// Mach (Mac/iOS): "absolute" time since first call.
+// Unknown POSIX: Microseconds since the Unix epoch.
+int64_t TickTime::QueryOsForTicks() {
+#if _WIN32
static volatile LONG last_time_get_time = 0;
static volatile int64_t num_wrap_time_get_time = 0;
volatile LONG* last_time_get_time_ptr = &last_time_get_time;
@@ -53,11 +99,11 @@ int64_t TickTime::QueryOsForTicks() {
// 0x0fffffff ~3.1 days, the code will not take that long to execute
// so it must have been a wrap around.
if (old > 0xf0000000 && now < 0x0fffffff) {
+ // TODO(pbos): Fix unsafe use of static locals.
num_wrap_time_get_time++;
}
}
- result.ticks_ = now + (num_wrap_time_get_time << 32);
-#endif
+ return now + (num_wrap_time_get_time << 32);
#elif defined(WEBRTC_LINUX)
struct timespec ts;
// TODO(wu): Remove CLOCK_REALTIME implementation.
@@ -66,33 +112,24 @@ int64_t TickTime::QueryOsForTicks() {
#else
clock_gettime(CLOCK_MONOTONIC, &ts);
#endif
- result.ticks_ = 1000000000LL * static_cast<int64_t>(ts.tv_sec) +
- static_cast<int64_t>(ts.tv_nsec);
+ return 1000000000LL * ts.tv_sec + ts.tv_nsec;
#elif defined(WEBRTC_MAC)
- static mach_timebase_info_data_t timebase;
- if (timebase.denom == 0) {
- // Get the timebase if this is the first time we run.
- // Recommended by Apple's QA1398.
- kern_return_t retval = mach_timebase_info(&timebase);
- if (retval != KERN_SUCCESS) {
- // TODO(wu): Implement RTC_CHECK for all the platforms. Then replace this
- // with a RTC_CHECK_EQ(retval, KERN_SUCCESS);
-#ifndef WEBRTC_IOS
- asm("int3");
-#else
- __builtin_trap();
-#endif // WEBRTC_IOS
- }
+ // Return absolute time as an offset from the first call to this function, so
+ // that we can do floating-point (double) operations on it without losing
+ // precision. This holds true until the elapsed time is ~11 days,
+ // at which point we'll start to lose some precision, though not enough to
+ // matter for millisecond accuracy for another couple years after that.
+ // TODO(pbos): Fix unsafe use of static locals.
+ static uint64_t timebase_start = 0;
+ if (timebase_start == 0) {
+ timebase_start = mach_absolute_time();
}
- // Use timebase to convert absolute time tick units into nanoseconds.
- result.ticks_ = mach_absolute_time() * timebase.numer / timebase.denom;
+ return mach_absolute_time() - timebase_start;
#else
struct timeval tv;
gettimeofday(&tv, NULL);
- result.ticks_ = 1000000LL * static_cast<int64_t>(tv.tv_sec) +
- static_cast<int64_t>(tv.tv_usec);
+ return 1000000LL * tv.tv_sec + tv.tv_usec;
#endif
- return result.ticks_;
}
} // namespace webrtc
diff --git a/webrtc/system_wrappers/source/trace_impl.cc b/webrtc/system_wrappers/source/trace_impl.cc
index ffe79b9862..5029f5ab6e 100644
--- a/webrtc/system_wrappers/source/trace_impl.cc
+++ b/webrtc/system_wrappers/source/trace_impl.cc
@@ -16,6 +16,7 @@
#include <string.h>
#include "webrtc/base/atomicops.h"
+#include "webrtc/base/platform_thread.h"
#ifdef _WIN32
#include "webrtc/system_wrappers/source/trace_win.h"
#else
@@ -76,7 +77,7 @@ TraceImpl::~TraceImpl() {
}
int32_t TraceImpl::AddThreadId(char* trace_message) const {
- uint32_t thread_id = ThreadWrapper::GetThreadId();
+ uint32_t thread_id = rtc::CurrentThreadId();
// Messages is 12 characters.
return sprintf(trace_message, "%10u; ", thread_id);
}
diff --git a/webrtc/system_wrappers/source/trace_impl.h b/webrtc/system_wrappers/source/trace_impl.h
index ed49d9d0aa..c6d81d5b0b 100644
--- a/webrtc/system_wrappers/source/trace_impl.h
+++ b/webrtc/system_wrappers/source/trace_impl.h
@@ -16,7 +16,7 @@
#include "webrtc/system_wrappers/include/event_wrapper.h"
#include "webrtc/system_wrappers/include/file_wrapper.h"
#include "webrtc/system_wrappers/include/static_instance.h"
-#include "webrtc/system_wrappers/include/thread_wrapper.h"
+#include "webrtc/base/platform_thread.h"
#include "webrtc/system_wrappers/include/trace.h"
namespace webrtc {
diff --git a/webrtc/system_wrappers/system_wrappers.gyp b/webrtc/system_wrappers/system_wrappers.gyp
index f66437b37a..507fc45311 100644
--- a/webrtc/system_wrappers/system_wrappers.gyp
+++ b/webrtc/system_wrappers/system_wrappers.gyp
@@ -28,7 +28,6 @@
'include/data_log.h',
'include/data_log_c.h',
'include/data_log_impl.h',
- 'include/event_tracer.h',
'include/event_wrapper.h',
'include/field_trial.h',
'include/file_wrapper.h',
@@ -36,6 +35,7 @@
'include/logcat_trace_context.h',
'include/logging.h',
'include/metrics.h',
+ 'include/ntp_time.h',
'include/ref_count.h',
'include/rtp_to_ntp.h',
'include/rw_lock_wrapper.h',
@@ -45,7 +45,6 @@
'include/static_instance.h',
'include/stl_util.h',
'include/stringize_macros.h',
- 'include/thread_wrapper.h',
'include/tick_util.h',
'include/timestamp_extrapolator.h',
'include/trace.h',
@@ -77,7 +76,6 @@
'source/event_timer_posix.h',
'source/event_timer_win.cc',
'source/event_timer_win.h',
- 'source/event_tracer.cc',
'source/file_impl.cc',
'source/file_impl.h',
'source/logcat_trace_context.cc',
@@ -93,11 +91,6 @@
'source/sleep.cc',
'source/sort.cc',
'source/tick_util.cc',
- 'source/thread.cc',
- 'source/thread_posix.cc',
- 'source/thread_posix.h',
- 'source/thread_win.cc',
- 'source/thread_win.h',
'source/timestamp_extrapolator.cc',
'source/trace_impl.cc',
'source/trace_impl.h',
@@ -206,13 +199,11 @@
'sources': [
'source/metrics_default.cc',
],
- 'dependencies': [
- 'system_wrappers',
- ]
}, {
'target_name': 'system_wrappers_default',
'type': 'static_library',
'dependencies': [
+ 'system_wrappers',
'field_trial_default',
'metrics_default',
]
diff --git a/webrtc/system_wrappers/system_wrappers_tests.gyp b/webrtc/system_wrappers/system_wrappers_tests.gyp
index da2fe7432f..a0ae14d6cf 100644
--- a/webrtc/system_wrappers/system_wrappers_tests.gyp
+++ b/webrtc/system_wrappers/system_wrappers_tests.gyp
@@ -15,6 +15,7 @@
'dependencies': [
'<(DEPTH)/testing/gtest.gyp:gtest',
'<(webrtc_root)/system_wrappers/system_wrappers.gyp:system_wrappers',
+ '<(webrtc_root)/test/test.gyp:histogram',
'<(webrtc_root)/test/test.gyp:test_support_main',
],
'sources': [
@@ -23,19 +24,18 @@
'source/clock_unittest.cc',
'source/condition_variable_unittest.cc',
'source/critical_section_unittest.cc',
- 'source/event_tracer_unittest.cc',
'source/logging_unittest.cc',
'source/data_log_unittest.cc',
'source/data_log_unittest_disabled.cc',
'source/data_log_helpers_unittest.cc',
'source/data_log_c_helpers_unittest.c',
'source/data_log_c_helpers_unittest.h',
+ 'source/metrics_unittest.cc',
+ 'source/ntp_time_unittest.cc',
'source/rtp_to_ntp_unittest.cc',
'source/scoped_vector_unittest.cc',
'source/stringize_macros_unittest.cc',
'source/stl_util_unittest.cc',
- 'source/thread_unittest.cc',
- 'source/thread_posix_unittest.cc',
],
'conditions': [
['enable_data_logging==1', {
@@ -43,9 +43,6 @@
}, {
'sources!': [ 'source/data_log_unittest.cc', ],
}],
- ['os_posix==0', {
- 'sources!': [ 'source/thread_posix_unittest.cc', ],
- }],
['OS=="android"', {
'dependencies': [
'<(DEPTH)/testing/android/native_test.gyp:native_test_native_code',
diff --git a/webrtc/test/BUILD.gn b/webrtc/test/BUILD.gn
index ed1dc75078..3ecd903522 100644
--- a/webrtc/test/BUILD.gn
+++ b/webrtc/test/BUILD.gn
@@ -27,6 +27,7 @@ source_set("field_trial") {
deps = [
"..:webrtc_common",
"../system_wrappers",
+ "../system_wrappers:field_trial_default",
]
configs += [ "..:common_config" ]
@@ -58,7 +59,6 @@ source_set("test_support") {
"testsupport/frame_reader.h",
"testsupport/frame_writer.cc",
"testsupport/frame_writer.h",
- "testsupport/gtest_disable.h",
"testsupport/mock/mock_frame_reader.h",
"testsupport/mock/mock_frame_writer.h",
"testsupport/packet_reader.cc",
@@ -70,10 +70,10 @@ source_set("test_support") {
]
deps = [
- "//testing/gmock",
- "//testing/gtest",
"..:gtest_prod",
"../system_wrappers",
+ "//testing/gmock",
+ "//testing/gtest",
]
if (is_android) {
diff --git a/webrtc/test/call_test.cc b/webrtc/test/call_test.cc
index 0a8b686974..e9651e33f5 100644
--- a/webrtc/test/call_test.cc
+++ b/webrtc/test/call_test.cc
@@ -7,8 +7,15 @@
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
+#include "webrtc/base/checks.h"
+#include "webrtc/common.h"
+#include "webrtc/config.h"
#include "webrtc/test/call_test.h"
#include "webrtc/test/encoder_settings.h"
+#include "webrtc/test/testsupport/fileutils.h"
+#include "webrtc/voice_engine/include/voe_base.h"
+#include "webrtc/voice_engine/include/voe_codec.h"
+#include "webrtc/voice_engine/include/voe_network.h"
namespace webrtc {
namespace test {
@@ -19,25 +26,43 @@ const int kVideoRotationRtpExtensionId = 4;
CallTest::CallTest()
: clock_(Clock::GetRealTimeClock()),
- send_config_(nullptr),
- send_stream_(NULL),
- fake_encoder_(clock_) {
-}
+ video_send_config_(nullptr),
+ video_send_stream_(nullptr),
+ audio_send_config_(nullptr),
+ audio_send_stream_(nullptr),
+ fake_encoder_(clock_),
+ num_video_streams_(1),
+ num_audio_streams_(0),
+ fake_send_audio_device_(nullptr),
+ fake_recv_audio_device_(nullptr) {}
CallTest::~CallTest() {
}
-void CallTest::RunBaseTest(BaseTest* test,
- const FakeNetworkPipe::Config& config) {
- CreateSenderCall(test->GetSenderCallConfig());
- if (test->ShouldCreateReceivers())
- CreateReceiverCall(test->GetReceiverCallConfig());
- send_transport_.reset(new PacketTransport(
- sender_call_.get(), test, test::PacketTransport::kSender, config));
- receive_transport_.reset(new PacketTransport(
- nullptr, test, test::PacketTransport::kReceiver, config));
- test->OnTransportsCreated(send_transport_.get(), receive_transport_.get());
+void CallTest::RunBaseTest(BaseTest* test) {
+ num_video_streams_ = test->GetNumVideoStreams();
+ num_audio_streams_ = test->GetNumAudioStreams();
+ RTC_DCHECK(num_video_streams_ > 0 || num_audio_streams_ > 0);
+ Call::Config send_config(test->GetSenderCallConfig());
+ if (num_audio_streams_ > 0) {
+ CreateVoiceEngines();
+ AudioState::Config audio_state_config;
+ audio_state_config.voice_engine = voe_send_.voice_engine;
+ send_config.audio_state = AudioState::Create(audio_state_config);
+ }
+ CreateSenderCall(send_config);
+ if (test->ShouldCreateReceivers()) {
+ Call::Config recv_config(test->GetReceiverCallConfig());
+ if (num_audio_streams_ > 0) {
+ AudioState::Config audio_state_config;
+ audio_state_config.voice_engine = voe_recv_.voice_engine;
+ recv_config.audio_state = AudioState::Create(audio_state_config);
+ }
+ CreateReceiverCall(recv_config);
+ }
test->OnCallsCreated(sender_call_.get(), receiver_call_.get());
+ send_transport_.reset(test->CreateSendTransport(sender_call_.get()));
+ receive_transport_.reset(test->CreateReceiveTransport());
if (test->ShouldCreateReceivers()) {
send_transport_->SetReceiver(receiver_call_->Receiver());
@@ -48,16 +73,34 @@ void CallTest::RunBaseTest(BaseTest* test,
receive_transport_->SetReceiver(nullptr);
}
- CreateSendConfig(test->GetNumStreams(), send_transport_.get());
+ CreateSendConfig(num_video_streams_, num_audio_streams_,
+ send_transport_.get());
if (test->ShouldCreateReceivers()) {
CreateMatchingReceiveConfigs(receive_transport_.get());
}
- test->ModifyConfigs(&send_config_, &receive_configs_, &encoder_config_);
- CreateStreams();
- test->OnStreamsCreated(send_stream_, receive_streams_);
+ if (num_audio_streams_ > 0)
+ SetupVoiceEngineTransports(send_transport_.get(), receive_transport_.get());
+
+ if (num_video_streams_ > 0) {
+ test->ModifyVideoConfigs(&video_send_config_, &video_receive_configs_,
+ &video_encoder_config_);
+ }
+ if (num_audio_streams_ > 0)
+ test->ModifyAudioConfigs(&audio_send_config_, &audio_receive_configs_);
- CreateFrameGeneratorCapturer();
- test->OnFrameGeneratorCapturerCreated(frame_generator_capturer_.get());
+ if (num_video_streams_ > 0) {
+ CreateVideoStreams();
+ test->OnVideoStreamsCreated(video_send_stream_, video_receive_streams_);
+ }
+ if (num_audio_streams_ > 0) {
+ CreateAudioStreams();
+ test->OnAudioStreamsCreated(audio_send_stream_, audio_receive_streams_);
+ }
+
+ if (num_video_streams_ > 0) {
+ CreateFrameGeneratorCapturer();
+ test->OnFrameGeneratorCapturerCreated(frame_generator_capturer_.get());
+ }
Start();
test->PerformTest();
@@ -66,12 +109,28 @@ void CallTest::RunBaseTest(BaseTest* test,
Stop();
DestroyStreams();
+ DestroyCalls();
+ if (num_audio_streams_ > 0)
+ DestroyVoiceEngines();
}
void CallTest::Start() {
- send_stream_->Start();
- for (size_t i = 0; i < receive_streams_.size(); ++i)
- receive_streams_[i]->Start();
+ if (video_send_stream_)
+ video_send_stream_->Start();
+ for (VideoReceiveStream* video_recv_stream : video_receive_streams_)
+ video_recv_stream->Start();
+ if (audio_send_stream_) {
+ fake_send_audio_device_->Start();
+ audio_send_stream_->Start();
+ EXPECT_EQ(0, voe_send_.base->StartSend(voe_send_.channel_id));
+ }
+ for (AudioReceiveStream* audio_recv_stream : audio_receive_streams_)
+ audio_recv_stream->Start();
+ if (!audio_receive_streams_.empty()) {
+ fake_recv_audio_device_->Start();
+ EXPECT_EQ(0, voe_recv_.base->StartPlayout(voe_recv_.channel_id));
+ EXPECT_EQ(0, voe_recv_.base->StartReceive(voe_recv_.channel_id));
+ }
if (frame_generator_capturer_.get() != NULL)
frame_generator_capturer_->Start();
}
@@ -79,9 +138,22 @@ void CallTest::Start() {
void CallTest::Stop() {
if (frame_generator_capturer_.get() != NULL)
frame_generator_capturer_->Stop();
- for (size_t i = 0; i < receive_streams_.size(); ++i)
- receive_streams_[i]->Stop();
- send_stream_->Stop();
+ if (!audio_receive_streams_.empty()) {
+ fake_recv_audio_device_->Stop();
+ EXPECT_EQ(0, voe_recv_.base->StopReceive(voe_recv_.channel_id));
+ EXPECT_EQ(0, voe_recv_.base->StopPlayout(voe_recv_.channel_id));
+ }
+ for (AudioReceiveStream* audio_recv_stream : audio_receive_streams_)
+ audio_recv_stream->Stop();
+ if (audio_send_stream_) {
+ fake_send_audio_device_->Stop();
+ EXPECT_EQ(0, voe_send_.base->StopSend(voe_send_.channel_id));
+ audio_send_stream_->Stop();
+ }
+ for (VideoReceiveStream* video_recv_stream : video_receive_streams_)
+ video_recv_stream->Stop();
+ if (video_send_stream_)
+ video_send_stream_->Stop();
}
void CallTest::CreateCalls(const Call::Config& sender_config,
@@ -99,92 +171,203 @@ void CallTest::CreateReceiverCall(const Call::Config& config) {
}
void CallTest::DestroyCalls() {
- sender_call_.reset(nullptr);
- receiver_call_.reset(nullptr);
+ sender_call_.reset();
+ receiver_call_.reset();
}
-void CallTest::CreateSendConfig(size_t num_streams,
+void CallTest::CreateSendConfig(size_t num_video_streams,
+ size_t num_audio_streams,
Transport* send_transport) {
- assert(num_streams <= kNumSsrcs);
- send_config_ = VideoSendStream::Config(send_transport);
- send_config_.encoder_settings.encoder = &fake_encoder_;
- send_config_.encoder_settings.payload_name = "FAKE";
- send_config_.encoder_settings.payload_type = kFakeSendPayloadType;
- send_config_.rtp.extensions.push_back(
- RtpExtension(RtpExtension::kAbsSendTime, kAbsSendTimeExtensionId));
- encoder_config_.streams = test::CreateVideoStreams(num_streams);
- for (size_t i = 0; i < num_streams; ++i)
- send_config_.rtp.ssrcs.push_back(kSendSsrcs[i]);
- send_config_.rtp.extensions.push_back(
- RtpExtension(RtpExtension::kVideoRotation, kVideoRotationRtpExtensionId));
-}
-
-void CallTest::CreateMatchingReceiveConfigs(
- Transport* rtcp_send_transport) {
- assert(!send_config_.rtp.ssrcs.empty());
- assert(receive_configs_.empty());
- assert(allocated_decoders_.empty());
- VideoReceiveStream::Config config(rtcp_send_transport);
- config.rtp.remb = true;
- config.rtp.local_ssrc = kReceiverLocalSsrc;
- for (const RtpExtension& extension : send_config_.rtp.extensions)
- config.rtp.extensions.push_back(extension);
- for (size_t i = 0; i < send_config_.rtp.ssrcs.size(); ++i) {
- VideoReceiveStream::Decoder decoder =
- test::CreateMatchingDecoder(send_config_.encoder_settings);
- allocated_decoders_.push_back(decoder.decoder);
- config.decoders.clear();
- config.decoders.push_back(decoder);
- config.rtp.remote_ssrc = send_config_.rtp.ssrcs[i];
- receive_configs_.push_back(config);
+ RTC_DCHECK(num_video_streams <= kNumSsrcs);
+ RTC_DCHECK_LE(num_audio_streams, 1u);
+ RTC_DCHECK(num_audio_streams == 0 || voe_send_.channel_id >= 0);
+ if (num_video_streams > 0) {
+ video_send_config_ = VideoSendStream::Config(send_transport);
+ video_send_config_.encoder_settings.encoder = &fake_encoder_;
+ video_send_config_.encoder_settings.payload_name = "FAKE";
+ video_send_config_.encoder_settings.payload_type =
+ kFakeVideoSendPayloadType;
+ video_send_config_.rtp.extensions.push_back(
+ RtpExtension(RtpExtension::kAbsSendTime, kAbsSendTimeExtensionId));
+ video_encoder_config_.streams = test::CreateVideoStreams(num_video_streams);
+ for (size_t i = 0; i < num_video_streams; ++i)
+ video_send_config_.rtp.ssrcs.push_back(kVideoSendSsrcs[i]);
+ video_send_config_.rtp.extensions.push_back(RtpExtension(
+ RtpExtension::kVideoRotation, kVideoRotationRtpExtensionId));
+ }
+
+ if (num_audio_streams > 0) {
+ audio_send_config_ = AudioSendStream::Config(send_transport);
+ audio_send_config_.voe_channel_id = voe_send_.channel_id;
+ audio_send_config_.rtp.ssrc = kAudioSendSsrc;
+ }
+}
+
+void CallTest::CreateMatchingReceiveConfigs(Transport* rtcp_send_transport) {
+ RTC_DCHECK(video_receive_configs_.empty());
+ RTC_DCHECK(allocated_decoders_.empty());
+ if (num_video_streams_ > 0) {
+ RTC_DCHECK(!video_send_config_.rtp.ssrcs.empty());
+ VideoReceiveStream::Config video_config(rtcp_send_transport);
+ video_config.rtp.remb = true;
+ video_config.rtp.local_ssrc = kReceiverLocalVideoSsrc;
+ for (const RtpExtension& extension : video_send_config_.rtp.extensions)
+ video_config.rtp.extensions.push_back(extension);
+ for (size_t i = 0; i < video_send_config_.rtp.ssrcs.size(); ++i) {
+ VideoReceiveStream::Decoder decoder =
+ test::CreateMatchingDecoder(video_send_config_.encoder_settings);
+ allocated_decoders_.push_back(decoder.decoder);
+ video_config.decoders.clear();
+ video_config.decoders.push_back(decoder);
+ video_config.rtp.remote_ssrc = video_send_config_.rtp.ssrcs[i];
+ video_receive_configs_.push_back(video_config);
+ }
+ }
+
+ RTC_DCHECK(num_audio_streams_ <= 1);
+ if (num_audio_streams_ == 1) {
+ RTC_DCHECK(voe_send_.channel_id >= 0);
+ AudioReceiveStream::Config audio_config;
+ audio_config.rtp.local_ssrc = kReceiverLocalAudioSsrc;
+ audio_config.rtcp_send_transport = rtcp_send_transport;
+ audio_config.voe_channel_id = voe_recv_.channel_id;
+ audio_config.rtp.remote_ssrc = audio_send_config_.rtp.ssrc;
+ audio_receive_configs_.push_back(audio_config);
}
}
void CallTest::CreateFrameGeneratorCapturer() {
- VideoStream stream = encoder_config_.streams.back();
- frame_generator_capturer_.reset(
- test::FrameGeneratorCapturer::Create(send_stream_->Input(),
- stream.width,
- stream.height,
- stream.max_framerate,
- clock_));
+ VideoStream stream = video_encoder_config_.streams.back();
+ frame_generator_capturer_.reset(test::FrameGeneratorCapturer::Create(
+ video_send_stream_->Input(), stream.width, stream.height,
+ stream.max_framerate, clock_));
}
-void CallTest::CreateStreams() {
- assert(send_stream_ == NULL);
- assert(receive_streams_.empty());
+void CallTest::CreateFakeAudioDevices() {
+ fake_send_audio_device_.reset(new FakeAudioDevice(
+ clock_, test::ResourcePath("voice_engine/audio_long16", "pcm")));
+ fake_recv_audio_device_.reset(new FakeAudioDevice(
+ clock_, test::ResourcePath("voice_engine/audio_long16", "pcm")));
+}
- send_stream_ =
- sender_call_->CreateVideoSendStream(send_config_, encoder_config_);
+void CallTest::CreateVideoStreams() {
+ RTC_DCHECK(video_send_stream_ == nullptr);
+ RTC_DCHECK(video_receive_streams_.empty());
+ RTC_DCHECK(audio_send_stream_ == nullptr);
+ RTC_DCHECK(audio_receive_streams_.empty());
+
+ video_send_stream_ = sender_call_->CreateVideoSendStream(
+ video_send_config_, video_encoder_config_);
+ for (size_t i = 0; i < video_receive_configs_.size(); ++i) {
+ video_receive_streams_.push_back(
+ receiver_call_->CreateVideoReceiveStream(video_receive_configs_[i]));
+ }
+}
- for (size_t i = 0; i < receive_configs_.size(); ++i) {
- receive_streams_.push_back(
- receiver_call_->CreateVideoReceiveStream(receive_configs_[i]));
+void CallTest::CreateAudioStreams() {
+ audio_send_stream_ = sender_call_->CreateAudioSendStream(audio_send_config_);
+ for (size_t i = 0; i < audio_receive_configs_.size(); ++i) {
+ audio_receive_streams_.push_back(
+ receiver_call_->CreateAudioReceiveStream(audio_receive_configs_[i]));
}
+ CodecInst isac = {kAudioSendPayloadType, "ISAC", 16000, 480, 1, 32000};
+ EXPECT_EQ(0, voe_send_.codec->SetSendCodec(voe_send_.channel_id, isac));
}
void CallTest::DestroyStreams() {
- if (send_stream_ != NULL)
- sender_call_->DestroyVideoSendStream(send_stream_);
- send_stream_ = NULL;
- for (size_t i = 0; i < receive_streams_.size(); ++i)
- receiver_call_->DestroyVideoReceiveStream(receive_streams_[i]);
- receive_streams_.clear();
+ if (video_send_stream_)
+ sender_call_->DestroyVideoSendStream(video_send_stream_);
+ video_send_stream_ = nullptr;
+ for (VideoReceiveStream* video_recv_stream : video_receive_streams_)
+ receiver_call_->DestroyVideoReceiveStream(video_recv_stream);
+
+ if (audio_send_stream_)
+ sender_call_->DestroyAudioSendStream(audio_send_stream_);
+ audio_send_stream_ = nullptr;
+ for (AudioReceiveStream* audio_recv_stream : audio_receive_streams_)
+ receiver_call_->DestroyAudioReceiveStream(audio_recv_stream);
+ video_receive_streams_.clear();
+
allocated_decoders_.clear();
}
-const unsigned int CallTest::kDefaultTimeoutMs = 30 * 1000;
-const unsigned int CallTest::kLongTimeoutMs = 120 * 1000;
-const uint8_t CallTest::kSendPayloadType = 100;
-const uint8_t CallTest::kFakeSendPayloadType = 125;
+void CallTest::CreateVoiceEngines() {
+ CreateFakeAudioDevices();
+ voe_send_.voice_engine = VoiceEngine::Create();
+ voe_send_.base = VoEBase::GetInterface(voe_send_.voice_engine);
+ voe_send_.network = VoENetwork::GetInterface(voe_send_.voice_engine);
+ voe_send_.codec = VoECodec::GetInterface(voe_send_.voice_engine);
+ EXPECT_EQ(0, voe_send_.base->Init(fake_send_audio_device_.get(), nullptr));
+ Config voe_config;
+ voe_config.Set<VoicePacing>(new VoicePacing(true));
+ voe_send_.channel_id = voe_send_.base->CreateChannel(voe_config);
+ EXPECT_GE(voe_send_.channel_id, 0);
+
+ voe_recv_.voice_engine = VoiceEngine::Create();
+ voe_recv_.base = VoEBase::GetInterface(voe_recv_.voice_engine);
+ voe_recv_.network = VoENetwork::GetInterface(voe_recv_.voice_engine);
+ voe_recv_.codec = VoECodec::GetInterface(voe_recv_.voice_engine);
+ EXPECT_EQ(0, voe_recv_.base->Init(fake_recv_audio_device_.get(), nullptr));
+ voe_recv_.channel_id = voe_recv_.base->CreateChannel();
+ EXPECT_GE(voe_recv_.channel_id, 0);
+}
+
+void CallTest::SetupVoiceEngineTransports(PacketTransport* send_transport,
+ PacketTransport* recv_transport) {
+ voe_send_.transport_adapter.reset(
+ new internal::TransportAdapter(send_transport));
+ voe_send_.transport_adapter->Enable();
+ EXPECT_EQ(0, voe_send_.network->RegisterExternalTransport(
+ voe_send_.channel_id, *voe_send_.transport_adapter.get()));
+
+ voe_recv_.transport_adapter.reset(
+ new internal::TransportAdapter(recv_transport));
+ voe_recv_.transport_adapter->Enable();
+ EXPECT_EQ(0, voe_recv_.network->RegisterExternalTransport(
+ voe_recv_.channel_id, *voe_recv_.transport_adapter.get()));
+}
+
+void CallTest::DestroyVoiceEngines() {
+ voe_recv_.base->DeleteChannel(voe_recv_.channel_id);
+ voe_recv_.channel_id = -1;
+ voe_recv_.base->Release();
+ voe_recv_.base = nullptr;
+ voe_recv_.network->Release();
+ voe_recv_.network = nullptr;
+ voe_recv_.codec->Release();
+ voe_recv_.codec = nullptr;
+
+ voe_send_.base->DeleteChannel(voe_send_.channel_id);
+ voe_send_.channel_id = -1;
+ voe_send_.base->Release();
+ voe_send_.base = nullptr;
+ voe_send_.network->Release();
+ voe_send_.network = nullptr;
+ voe_send_.codec->Release();
+ voe_send_.codec = nullptr;
+
+ VoiceEngine::Delete(voe_send_.voice_engine);
+ voe_send_.voice_engine = nullptr;
+ VoiceEngine::Delete(voe_recv_.voice_engine);
+ voe_recv_.voice_engine = nullptr;
+}
+
+const int CallTest::kDefaultTimeoutMs = 30 * 1000;
+const int CallTest::kLongTimeoutMs = 120 * 1000;
+const uint8_t CallTest::kVideoSendPayloadType = 100;
+const uint8_t CallTest::kFakeVideoSendPayloadType = 125;
const uint8_t CallTest::kSendRtxPayloadType = 98;
const uint8_t CallTest::kRedPayloadType = 118;
const uint8_t CallTest::kRtxRedPayloadType = 99;
const uint8_t CallTest::kUlpfecPayloadType = 119;
+const uint8_t CallTest::kAudioSendPayloadType = 103;
const uint32_t CallTest::kSendRtxSsrcs[kNumSsrcs] = {0xBADCAFD, 0xBADCAFE,
0xBADCAFF};
-const uint32_t CallTest::kSendSsrcs[kNumSsrcs] = {0xC0FFED, 0xC0FFEE, 0xC0FFEF};
-const uint32_t CallTest::kReceiverLocalSsrc = 0x123456;
+const uint32_t CallTest::kVideoSendSsrcs[kNumSsrcs] = {0xC0FFED, 0xC0FFEE,
+ 0xC0FFEF};
+const uint32_t CallTest::kAudioSendSsrc = 0xDEADBEEF;
+const uint32_t CallTest::kReceiverLocalVideoSsrc = 0x123456;
+const uint32_t CallTest::kReceiverLocalAudioSsrc = 0x1234567;
const int CallTest::kNackRtpHistoryMs = 1000;
BaseTest::BaseTest(unsigned int timeout_ms) : RtpRtcpObserver(timeout_ms) {
@@ -204,23 +387,40 @@ Call::Config BaseTest::GetReceiverCallConfig() {
void BaseTest::OnCallsCreated(Call* sender_call, Call* receiver_call) {
}
-void BaseTest::OnTransportsCreated(PacketTransport* send_transport,
- PacketTransport* receive_transport) {}
+test::PacketTransport* BaseTest::CreateSendTransport(Call* sender_call) {
+ return new PacketTransport(sender_call, this, test::PacketTransport::kSender,
+ FakeNetworkPipe::Config());
+}
-size_t BaseTest::GetNumStreams() const {
+test::PacketTransport* BaseTest::CreateReceiveTransport() {
+ return new PacketTransport(nullptr, this, test::PacketTransport::kReceiver,
+ FakeNetworkPipe::Config());
+}
+
+size_t BaseTest::GetNumVideoStreams() const {
return 1;
}
-void BaseTest::ModifyConfigs(
+size_t BaseTest::GetNumAudioStreams() const {
+ return 0;
+}
+
+void BaseTest::ModifyVideoConfigs(
VideoSendStream::Config* send_config,
std::vector<VideoReceiveStream::Config>* receive_configs,
- VideoEncoderConfig* encoder_config) {
-}
+ VideoEncoderConfig* encoder_config) {}
-void BaseTest::OnStreamsCreated(
+void BaseTest::OnVideoStreamsCreated(
VideoSendStream* send_stream,
- const std::vector<VideoReceiveStream*>& receive_streams) {
-}
+ const std::vector<VideoReceiveStream*>& receive_streams) {}
+
+void BaseTest::ModifyAudioConfigs(
+ AudioSendStream::Config* send_config,
+ std::vector<AudioReceiveStream::Config>* receive_configs) {}
+
+void BaseTest::OnAudioStreamsCreated(
+ AudioSendStream* send_stream,
+ const std::vector<AudioReceiveStream*>& receive_streams) {}
void BaseTest::OnFrameGeneratorCapturerCreated(
FrameGeneratorCapturer* frame_generator_capturer) {
diff --git a/webrtc/test/call_test.h b/webrtc/test/call_test.h
index cf024d9c65..251d7f6044 100644
--- a/webrtc/test/call_test.h
+++ b/webrtc/test/call_test.h
@@ -7,19 +7,26 @@
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
-#ifndef WEBRTC_TEST_COMMON_CALL_TEST_H_
-#define WEBRTC_TEST_COMMON_CALL_TEST_H_
+#ifndef WEBRTC_TEST_CALL_TEST_H_
+#define WEBRTC_TEST_CALL_TEST_H_
#include <vector>
#include "webrtc/call.h"
+#include "webrtc/call/transport_adapter.h"
#include "webrtc/system_wrappers/include/scoped_vector.h"
+#include "webrtc/test/fake_audio_device.h"
#include "webrtc/test/fake_decoder.h"
#include "webrtc/test/fake_encoder.h"
#include "webrtc/test/frame_generator_capturer.h"
#include "webrtc/test/rtp_rtcp_observer.h"
namespace webrtc {
+
+class VoEBase;
+class VoECodec;
+class VoENetwork;
+
namespace test {
class BaseTest;
@@ -27,25 +34,31 @@ class BaseTest;
class CallTest : public ::testing::Test {
public:
CallTest();
- ~CallTest();
+ virtual ~CallTest();
static const size_t kNumSsrcs = 3;
- static const unsigned int kDefaultTimeoutMs;
- static const unsigned int kLongTimeoutMs;
- static const uint8_t kSendPayloadType;
+ static const int kDefaultTimeoutMs;
+ static const int kLongTimeoutMs;
+ static const uint8_t kVideoSendPayloadType;
static const uint8_t kSendRtxPayloadType;
- static const uint8_t kFakeSendPayloadType;
+ static const uint8_t kFakeVideoSendPayloadType;
static const uint8_t kRedPayloadType;
static const uint8_t kRtxRedPayloadType;
static const uint8_t kUlpfecPayloadType;
+ static const uint8_t kAudioSendPayloadType;
static const uint32_t kSendRtxSsrcs[kNumSsrcs];
- static const uint32_t kSendSsrcs[kNumSsrcs];
- static const uint32_t kReceiverLocalSsrc;
+ static const uint32_t kVideoSendSsrcs[kNumSsrcs];
+ static const uint32_t kAudioSendSsrc;
+ static const uint32_t kReceiverLocalVideoSsrc;
+ static const uint32_t kReceiverLocalAudioSsrc;
static const int kNackRtpHistoryMs;
protected:
- void RunBaseTest(BaseTest* test, const FakeNetworkPipe::Config& config);
+ // RunBaseTest overwrites the audio_state and the voice_engine of the send and
+ // receive Call configs to simplify test code and avoid having old VoiceEngine
+ // APIs in the tests.
+ void RunBaseTest(BaseTest* test);
void CreateCalls(const Call::Config& sender_config,
const Call::Config& receiver_config);
@@ -53,12 +66,16 @@ class CallTest : public ::testing::Test {
void CreateReceiverCall(const Call::Config& config);
void DestroyCalls();
- void CreateSendConfig(size_t num_streams, Transport* send_transport);
+ void CreateSendConfig(size_t num_video_streams,
+ size_t num_audio_streams,
+ Transport* send_transport);
void CreateMatchingReceiveConfigs(Transport* rtcp_send_transport);
void CreateFrameGeneratorCapturer();
+ void CreateFakeAudioDevices();
- void CreateStreams();
+ void CreateVideoStreams();
+ void CreateAudioStreams();
void Start();
void Stop();
void DestroyStreams();
@@ -67,18 +84,57 @@ class CallTest : public ::testing::Test {
rtc::scoped_ptr<Call> sender_call_;
rtc::scoped_ptr<PacketTransport> send_transport_;
- VideoSendStream::Config send_config_;
- VideoEncoderConfig encoder_config_;
- VideoSendStream* send_stream_;
+ VideoSendStream::Config video_send_config_;
+ VideoEncoderConfig video_encoder_config_;
+ VideoSendStream* video_send_stream_;
+ AudioSendStream::Config audio_send_config_;
+ AudioSendStream* audio_send_stream_;
rtc::scoped_ptr<Call> receiver_call_;
rtc::scoped_ptr<PacketTransport> receive_transport_;
- std::vector<VideoReceiveStream::Config> receive_configs_;
- std::vector<VideoReceiveStream*> receive_streams_;
+ std::vector<VideoReceiveStream::Config> video_receive_configs_;
+ std::vector<VideoReceiveStream*> video_receive_streams_;
+ std::vector<AudioReceiveStream::Config> audio_receive_configs_;
+ std::vector<AudioReceiveStream*> audio_receive_streams_;
rtc::scoped_ptr<test::FrameGeneratorCapturer> frame_generator_capturer_;
test::FakeEncoder fake_encoder_;
ScopedVector<VideoDecoder> allocated_decoders_;
+ size_t num_video_streams_;
+ size_t num_audio_streams_;
+
+ private:
+ // TODO(holmer): Remove once VoiceEngine is fully refactored to the new API.
+ // These methods are used to set up legacy voice engines and channels which is
+ // necessary while voice engine is being refactored to the new stream API.
+ struct VoiceEngineState {
+ VoiceEngineState()
+ : voice_engine(nullptr),
+ base(nullptr),
+ network(nullptr),
+ codec(nullptr),
+ channel_id(-1),
+ transport_adapter(nullptr) {}
+
+ VoiceEngine* voice_engine;
+ VoEBase* base;
+ VoENetwork* network;
+ VoECodec* codec;
+ int channel_id;
+ rtc::scoped_ptr<internal::TransportAdapter> transport_adapter;
+ };
+
+ void CreateVoiceEngines();
+ void SetupVoiceEngineTransports(PacketTransport* send_transport,
+ PacketTransport* recv_transport);
+ void DestroyVoiceEngines();
+
+ VoiceEngineState voe_send_;
+ VoiceEngineState voe_recv_;
+
+ // The audio devices must outlive the voice engines.
+ rtc::scoped_ptr<test::FakeAudioDevice> fake_send_audio_device_;
+ rtc::scoped_ptr<test::FakeAudioDevice> fake_recv_audio_device_;
};
class BaseTest : public RtpRtcpObserver {
@@ -89,22 +145,31 @@ class BaseTest : public RtpRtcpObserver {
virtual void PerformTest() = 0;
virtual bool ShouldCreateReceivers() const = 0;
- virtual size_t GetNumStreams() const;
+ virtual size_t GetNumVideoStreams() const;
+ virtual size_t GetNumAudioStreams() const;
virtual Call::Config GetSenderCallConfig();
virtual Call::Config GetReceiverCallConfig();
virtual void OnCallsCreated(Call* sender_call, Call* receiver_call);
- virtual void OnTransportsCreated(PacketTransport* send_transport,
- PacketTransport* receive_transport);
- virtual void ModifyConfigs(
+ virtual test::PacketTransport* CreateSendTransport(Call* sender_call);
+ virtual test::PacketTransport* CreateReceiveTransport();
+
+ virtual void ModifyVideoConfigs(
VideoSendStream::Config* send_config,
std::vector<VideoReceiveStream::Config>* receive_configs,
VideoEncoderConfig* encoder_config);
- virtual void OnStreamsCreated(
+ virtual void OnVideoStreamsCreated(
VideoSendStream* send_stream,
const std::vector<VideoReceiveStream*>& receive_streams);
+ virtual void ModifyAudioConfigs(
+ AudioSendStream::Config* send_config,
+ std::vector<AudioReceiveStream::Config>* receive_configs);
+ virtual void OnAudioStreamsCreated(
+ AudioSendStream* send_stream,
+ const std::vector<AudioReceiveStream*>& receive_streams);
+
virtual void OnFrameGeneratorCapturerCreated(
FrameGeneratorCapturer* frame_generator_capturer);
};
@@ -126,4 +191,4 @@ class EndToEndTest : public BaseTest {
} // namespace test
} // namespace webrtc
-#endif // WEBRTC_TEST_COMMON_CALL_TEST_H_
+#endif // WEBRTC_TEST_CALL_TEST_H_
diff --git a/webrtc/test/channel_transport/channel_transport.cc b/webrtc/test/channel_transport/channel_transport.cc
index 25eb59d887..38eefe54a2 100644
--- a/webrtc/test/channel_transport/channel_transport.cc
+++ b/webrtc/test/channel_transport/channel_transport.cc
@@ -8,7 +8,7 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-#include "webrtc/test/channel_transport/include/channel_transport.h"
+#include "webrtc/test/channel_transport/channel_transport.h"
#include <stdio.h>
@@ -16,7 +16,6 @@
#include "testing/gtest/include/gtest/gtest.h"
#endif
#include "webrtc/test/channel_transport/udp_transport.h"
-#include "webrtc/video_engine/vie_defines.h"
#include "webrtc/voice_engine/include/voe_network.h"
#if defined(WEBRTC_ANDROID) || defined(WEBRTC_IOS)
@@ -66,10 +65,11 @@ void VoiceChannelTransport::IncomingRTCPPacket(
}
int VoiceChannelTransport::SetLocalReceiver(uint16_t rtp_port) {
+ static const int kNumReceiveSocketBuffers = 500;
int return_value = socket_transport_->InitializeReceiveSockets(this,
rtp_port);
if (return_value == 0) {
- return socket_transport_->StartReceiving(kViENumReceiveSocketBuffers);
+ return socket_transport_->StartReceiving(kNumReceiveSocketBuffers);
}
return return_value;
}
diff --git a/webrtc/test/channel_transport/channel_transport.h b/webrtc/test/channel_transport/channel_transport.h
new file mode 100644
index 0000000000..bab7c59181
--- /dev/null
+++ b/webrtc/test/channel_transport/channel_transport.h
@@ -0,0 +1,56 @@
+/*
+ * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_TEST_CHANNEL_TRANSPORT_CHANNEL_TRANSPORT_H_
+#define WEBRTC_TEST_CHANNEL_TRANSPORT_CHANNEL_TRANSPORT_H_
+
+#include "webrtc/test/channel_transport/udp_transport.h"
+
+namespace webrtc {
+
+class VoENetwork;
+
+namespace test {
+
+// Helper class for VoiceEngine tests.
+class VoiceChannelTransport : public UdpTransportData {
+ public:
+ VoiceChannelTransport(VoENetwork* voe_network, int channel);
+
+ virtual ~VoiceChannelTransport();
+
+ // Start implementation of UdpTransportData.
+ void IncomingRTPPacket(const int8_t* incoming_rtp_packet,
+ const size_t packet_length,
+ const char* /*from_ip*/,
+ const uint16_t /*from_port*/) override;
+
+ void IncomingRTCPPacket(const int8_t* incoming_rtcp_packet,
+ const size_t packet_length,
+ const char* /*from_ip*/,
+ const uint16_t /*from_port*/) override;
+ // End implementation of UdpTransportData.
+
+ // Specifies the ports to receive RTP packets on.
+ int SetLocalReceiver(uint16_t rtp_port);
+
+ // Specifies the destination port and IP address for a specified channel.
+ int SetSendDestination(const char* ip_address, uint16_t rtp_port);
+
+ private:
+ int channel_;
+ VoENetwork* voe_network_;
+ UdpTransport* socket_transport_;
+};
+
+} // namespace test
+} // namespace webrtc
+
+#endif // WEBRTC_TEST_CHANNEL_TRANSPORT_CHANNEL_TRANSPORT_H_
diff --git a/webrtc/test/channel_transport/include/channel_transport.h b/webrtc/test/channel_transport/include/channel_transport.h
deleted file mode 100644
index 8b84517de0..0000000000
--- a/webrtc/test/channel_transport/include/channel_transport.h
+++ /dev/null
@@ -1,56 +0,0 @@
-/*
- * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_TEST_CHANNEL_TRANSPORT_INCLUDE_CHANNEL_TRANSPORT_H_
-#define WEBRTC_TEST_CHANNEL_TRANSPORT_INCLUDE_CHANNEL_TRANSPORT_H_
-
-#include "webrtc/test/channel_transport/udp_transport.h"
-
-namespace webrtc {
-
-class VoENetwork;
-
-namespace test {
-
-// Helper class for VoiceEngine tests.
-class VoiceChannelTransport : public UdpTransportData {
- public:
- VoiceChannelTransport(VoENetwork* voe_network, int channel);
-
- virtual ~VoiceChannelTransport();
-
- // Start implementation of UdpTransportData.
- void IncomingRTPPacket(const int8_t* incoming_rtp_packet,
- const size_t packet_length,
- const char* /*from_ip*/,
- const uint16_t /*from_port*/) override;
-
- void IncomingRTCPPacket(const int8_t* incoming_rtcp_packet,
- const size_t packet_length,
- const char* /*from_ip*/,
- const uint16_t /*from_port*/) override;
- // End implementation of UdpTransportData.
-
- // Specifies the ports to receive RTP packets on.
- int SetLocalReceiver(uint16_t rtp_port);
-
- // Specifies the destination port and IP address for a specified channel.
- int SetSendDestination(const char* ip_address, uint16_t rtp_port);
-
- private:
- int channel_;
- VoENetwork* voe_network_;
- UdpTransport* socket_transport_;
-};
-
-} // namespace test
-} // namespace webrtc
-
-#endif // WEBRTC_TEST_CHANNEL_TRANSPORT_INCLUDE_CHANNEL_TRANSPORT_H_
diff --git a/webrtc/test/channel_transport/udp_socket2_manager_win.cc b/webrtc/test/channel_transport/udp_socket2_manager_win.cc
index 5a11abbd6e..9f40350287 100644
--- a/webrtc/test/channel_transport/udp_socket2_manager_win.cc
+++ b/webrtc/test/channel_transport/udp_socket2_manager_win.cc
@@ -520,8 +520,8 @@ int32_t UdpSocket2WorkerWindows::_numOfWorkers = 0;
UdpSocket2WorkerWindows::UdpSocket2WorkerWindows(HANDLE ioCompletionHandle)
: _ioCompletionHandle(ioCompletionHandle),
- _init(false)
-{
+ _pThread(Run, this, "UdpSocket2ManagerWindows_thread"),
+ _init(false) {
_workerNumber = _numOfWorkers++;
WEBRTC_TRACE(kTraceMemory, kTraceTransport, -1,
"UdpSocket2WorkerWindows created");
@@ -537,10 +537,9 @@ bool UdpSocket2WorkerWindows::Start()
{
WEBRTC_TRACE(kTraceStateInfo, kTraceTransport, -1,
"Start UdpSocket2WorkerWindows");
- if (!_pThread->Start())
- return false;
+ _pThread.Start();
- _pThread->SetPriority(kRealtimePriority);
+ _pThread.SetPriority(rtc::kRealtimePriority);
return true;
}
@@ -548,18 +547,14 @@ bool UdpSocket2WorkerWindows::Stop()
{
WEBRTC_TRACE(kTraceStateInfo, kTraceTransport, -1,
"Stop UdpSocket2WorkerWindows");
- return _pThread->Stop();
+ _pThread.Stop();
+ return true;
}
int32_t UdpSocket2WorkerWindows::Init()
{
- if(!_init)
- {
- const char* threadName = "UdpSocket2ManagerWindows_thread";
- _pThread = ThreadWrapper::CreateThread(Run, this, threadName);
- _init = true;
- }
- return 0;
+ _init = true;
+ return 0;
}
bool UdpSocket2WorkerWindows::Run(void* obj)
diff --git a/webrtc/test/channel_transport/udp_socket2_manager_win.h b/webrtc/test/channel_transport/udp_socket2_manager_win.h
index c6af03a702..e762dccd0d 100644
--- a/webrtc/test/channel_transport/udp_socket2_manager_win.h
+++ b/webrtc/test/channel_transport/udp_socket2_manager_win.h
@@ -17,7 +17,7 @@
#include "webrtc/system_wrappers/include/atomic32.h"
#include "webrtc/system_wrappers/include/critical_section_wrapper.h"
#include "webrtc/system_wrappers/include/event_wrapper.h"
-#include "webrtc/system_wrappers/include/thread_wrapper.h"
+#include "webrtc/base/platform_thread.h"
#include "webrtc/test/channel_transport/udp_socket2_win.h"
#include "webrtc/test/channel_transport/udp_socket_manager_wrapper.h"
#include "webrtc/test/channel_transport/udp_transport.h"
@@ -47,7 +47,7 @@ struct PerIoContext {
int fromLen;
// Should be set to true if the I/O context was passed to the system by
// a thread not controlled by the socket implementation.
- bool ioInitiatedByThreadWrapper;
+ bool ioInitiatedByPlatformThread;
// TODO (hellner): Not used. Delete it.
PerIoContext* pNextFree;
};
@@ -105,7 +105,7 @@ protected:
bool Process();
private:
HANDLE _ioCompletionHandle;
- rtc::scoped_ptr<ThreadWrapper> _pThread;
+ rtc::PlatformThread _pThread;
static int32_t _numOfWorkers;
int32_t _workerNumber;
volatile bool _stop;
diff --git a/webrtc/test/channel_transport/udp_socket2_win.cc b/webrtc/test/channel_transport/udp_socket2_win.cc
index 4c63dc938d..adeb46a9d2 100644
--- a/webrtc/test/channel_transport/udp_socket2_win.cc
+++ b/webrtc/test/channel_transport/udp_socket2_win.cc
@@ -432,13 +432,13 @@ void UdpSocket2Windows::IOCompleted(PerIoContext* pIOContext,
if(pIOContext == NULL || error == ERROR_OPERATION_ABORTED)
{
if ((pIOContext != NULL) &&
- !pIOContext->ioInitiatedByThreadWrapper &&
+ !pIOContext->ioInitiatedByPlatformThread &&
(error == ERROR_OPERATION_ABORTED) &&
(pIOContext->ioOperation == OP_READ) &&
_outstandingCallsDisabled)
{
- // !pIOContext->initiatedIOByThreadWrapper indicate that the I/O
- // was not initiated by a ThreadWrapper thread.
+ // !pIOContext->initiatedIOByPlatformThread indicate that the I/O
+ // was not initiated by a PlatformThread thread.
// This may happen if the thread that initiated receiving (e.g.
// by calling StartListen())) is deleted before any packets have
// been received.
@@ -519,7 +519,7 @@ void UdpSocket2Windows::IOCompleted(PerIoContext* pIOContext,
{
// The PerIoContext was posted by a thread controlled by the socket
// implementation.
- pIOContext->ioInitiatedByThreadWrapper = true;
+ pIOContext->ioInitiatedByPlatformThread = true;
}
OutstandingCallCompleted();
return;
@@ -546,7 +546,7 @@ int32_t UdpSocket2Windows::PostRecv()
}
// This function may have been called by thread not controlled by the socket
// implementation.
- pIoContext->ioInitiatedByThreadWrapper = false;
+ pIoContext->ioInitiatedByPlatformThread = false;
return PostRecv(pIoContext);
}
diff --git a/webrtc/test/channel_transport/udp_socket_manager_posix.cc b/webrtc/test/channel_transport/udp_socket_manager_posix.cc
index 145efcbc58..6b1a466bf2 100644
--- a/webrtc/test/channel_transport/udp_socket_manager_posix.cc
+++ b/webrtc/test/channel_transport/udp_socket_manager_posix.cc
@@ -184,12 +184,11 @@ bool UdpSocketManagerPosix::RemoveSocket(UdpSocketWrapper* s)
return retVal;
}
-
UdpSocketManagerPosixImpl::UdpSocketManagerPosixImpl()
-{
- _critSectList = CriticalSectionWrapper::CreateCriticalSection();
- _thread = ThreadWrapper::CreateThread(UdpSocketManagerPosixImpl::Run, this,
- "UdpSocketManagerPosixImplThread");
+ : _thread(UdpSocketManagerPosixImpl::Run,
+ this,
+ "UdpSocketManagerPosixImplThread"),
+ _critSectList(CriticalSectionWrapper::CreateCriticalSection()) {
FD_ZERO(&_readFds);
WEBRTC_TRACE(kTraceMemory, kTraceTransport, -1,
"UdpSocketManagerPosix created");
@@ -220,29 +219,19 @@ UdpSocketManagerPosixImpl::~UdpSocketManagerPosixImpl()
bool UdpSocketManagerPosixImpl::Start()
{
- if (!_thread)
- {
- return false;
- }
-
WEBRTC_TRACE(kTraceStateInfo, kTraceTransport, -1,
"Start UdpSocketManagerPosix");
- if (!_thread->Start())
- return false;
- _thread->SetPriority(kRealtimePriority);
+ _thread.Start();
+ _thread.SetPriority(rtc::kRealtimePriority);
return true;
}
bool UdpSocketManagerPosixImpl::Stop()
{
- if (!_thread)
- {
- return true;
- }
-
WEBRTC_TRACE(kTraceStateInfo, kTraceTransport, -1,
"Stop UdpSocketManagerPosix");
- return _thread->Stop();
+ _thread.Stop();
+ return true;
}
bool UdpSocketManagerPosixImpl::Process()
diff --git a/webrtc/test/channel_transport/udp_socket_manager_posix.h b/webrtc/test/channel_transport/udp_socket_manager_posix.h
index 64156fd20f..45e55af99a 100644
--- a/webrtc/test/channel_transport/udp_socket_manager_posix.h
+++ b/webrtc/test/channel_transport/udp_socket_manager_posix.h
@@ -17,8 +17,8 @@
#include <list>
#include <map>
+#include "webrtc/base/platform_thread.h"
#include "webrtc/system_wrappers/include/critical_section_wrapper.h"
-#include "webrtc/system_wrappers/include/thread_wrapper.h"
#include "webrtc/test/channel_transport/udp_socket_manager_wrapper.h"
#include "webrtc/test/channel_transport/udp_socket_wrapper.h"
@@ -75,7 +75,7 @@ protected:
private:
typedef std::list<UdpSocketWrapper*> SocketList;
typedef std::list<SOCKET> FdList;
- rtc::scoped_ptr<ThreadWrapper> _thread;
+ rtc::PlatformThread _thread;
CriticalSectionWrapper* _critSectList;
fd_set _readFds;
diff --git a/webrtc/test/common_unittest.cc b/webrtc/test/common_unittest.cc
index 082c18c2c7..a239dade73 100644
--- a/webrtc/test/common_unittest.cc
+++ b/webrtc/test/common_unittest.cc
@@ -15,6 +15,7 @@ namespace webrtc {
namespace {
struct MyExperiment {
+ static const ConfigOptionID identifier = ConfigOptionID::kMyExperimentForTest;
static const int kDefaultFactor;
static const int kDefaultOffset;
@@ -56,6 +57,8 @@ TEST(Config, SetNullSetsTheOptionBackToDefault) {
}
struct Algo1_CostFunction {
+ static const ConfigOptionID identifier =
+ ConfigOptionID::kAlgo1CostFunctionForTest;
Algo1_CostFunction() {}
virtual int cost(int x) const {
diff --git a/webrtc/test/configurable_frame_size_encoder.cc b/webrtc/test/configurable_frame_size_encoder.cc
index 2cd47504a5..831e481bd8 100644
--- a/webrtc/test/configurable_frame_size_encoder.cc
+++ b/webrtc/test/configurable_frame_size_encoder.cc
@@ -14,8 +14,8 @@
#include "testing/gtest/include/gtest/gtest.h"
-#include "webrtc/common_video/interface/video_image.h"
-#include "webrtc/modules/video_coding/codecs/interface/video_codec_interface.h"
+#include "webrtc/common_video/include/video_image.h"
+#include "webrtc/modules/video_coding/include/video_codec_interface.h"
namespace webrtc {
namespace test {
diff --git a/webrtc/test/direct_transport.cc b/webrtc/test/direct_transport.cc
index 6dcba81c88..591e154b14 100644
--- a/webrtc/test/direct_transport.cc
+++ b/webrtc/test/direct_transport.cc
@@ -18,26 +18,17 @@ namespace webrtc {
namespace test {
DirectTransport::DirectTransport(Call* send_call)
- : send_call_(send_call),
- packet_event_(EventWrapper::Create()),
- thread_(
- ThreadWrapper::CreateThread(NetworkProcess, this, "NetworkProcess")),
- clock_(Clock::GetRealTimeClock()),
- shutting_down_(false),
- fake_network_(FakeNetworkPipe::Config()) {
- EXPECT_TRUE(thread_->Start());
-}
+ : DirectTransport(FakeNetworkPipe::Config(), send_call) {}
DirectTransport::DirectTransport(const FakeNetworkPipe::Config& config,
Call* send_call)
: send_call_(send_call),
- packet_event_(EventWrapper::Create()),
- thread_(
- ThreadWrapper::CreateThread(NetworkProcess, this, "NetworkProcess")),
+ packet_event_(false, false),
+ thread_(NetworkProcess, this, "NetworkProcess"),
clock_(Clock::GetRealTimeClock()),
shutting_down_(false),
- fake_network_(config) {
- EXPECT_TRUE(thread_->Start());
+ fake_network_(clock_, config) {
+ thread_.Start();
}
DirectTransport::~DirectTransport() { StopSending(); }
@@ -52,8 +43,8 @@ void DirectTransport::StopSending() {
shutting_down_ = true;
}
- packet_event_->Set();
- EXPECT_TRUE(thread_->Stop());
+ packet_event_.Set();
+ thread_.Stop();
}
void DirectTransport::SetReceiver(PacketReceiver* receiver) {
@@ -69,16 +60,20 @@ bool DirectTransport::SendRtp(const uint8_t* data,
send_call_->OnSentPacket(sent_packet);
}
fake_network_.SendPacket(data, length);
- packet_event_->Set();
+ packet_event_.Set();
return true;
}
bool DirectTransport::SendRtcp(const uint8_t* data, size_t length) {
fake_network_.SendPacket(data, length);
- packet_event_->Set();
+ packet_event_.Set();
return true;
}
+int DirectTransport::GetAverageDelayMs() {
+ return fake_network_.AverageDelay();
+}
+
bool DirectTransport::NetworkProcess(void* transport) {
return static_cast<DirectTransport*>(transport)->SendPackets();
}
@@ -87,15 +82,7 @@ bool DirectTransport::SendPackets() {
fake_network_.Process();
int64_t wait_time_ms = fake_network_.TimeUntilNextProcess();
if (wait_time_ms > 0) {
- switch (packet_event_->Wait(static_cast<unsigned long>(wait_time_ms))) {
- case kEventSignaled:
- break;
- case kEventTimeout:
- break;
- case kEventError:
- // TODO(pbos): Log a warning here?
- return true;
- }
+ packet_event_.Wait(static_cast<int>(wait_time_ms));
}
rtc::CritScope crit(&lock_);
return shutting_down_ ? false : true;
diff --git a/webrtc/test/direct_transport.h b/webrtc/test/direct_transport.h
index 241a5bc110..d68bc7184e 100644
--- a/webrtc/test/direct_transport.h
+++ b/webrtc/test/direct_transport.h
@@ -7,17 +7,17 @@
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
-#ifndef WEBRTC_VIDEO_ENGINE_TEST_COMMON_DIRECT_TRANSPORT_H_
-#define WEBRTC_VIDEO_ENGINE_TEST_COMMON_DIRECT_TRANSPORT_H_
+#ifndef WEBRTC_TEST_DIRECT_TRANSPORT_H_
+#define WEBRTC_TEST_DIRECT_TRANSPORT_H_
#include <assert.h>
#include <deque>
#include "webrtc/base/criticalsection.h"
+#include "webrtc/base/event.h"
+#include "webrtc/base/platform_thread.h"
#include "webrtc/base/scoped_ptr.h"
-#include "webrtc/system_wrappers/include/event_wrapper.h"
-#include "webrtc/system_wrappers/include/thread_wrapper.h"
#include "webrtc/test/fake_network_pipe.h"
#include "webrtc/transport.h"
@@ -46,14 +46,16 @@ class DirectTransport : public Transport {
const PacketOptions& options) override;
bool SendRtcp(const uint8_t* data, size_t length) override;
+ int GetAverageDelayMs();
+
private:
static bool NetworkProcess(void* transport);
bool SendPackets();
rtc::CriticalSection lock_;
Call* const send_call_;
- rtc::scoped_ptr<EventWrapper> packet_event_;
- rtc::scoped_ptr<ThreadWrapper> thread_;
+ rtc::Event packet_event_;
+ rtc::PlatformThread thread_;
Clock* const clock_;
bool shutting_down_;
@@ -63,4 +65,4 @@ class DirectTransport : public Transport {
} // namespace test
} // namespace webrtc
-#endif // WEBRTC_VIDEO_ENGINE_TEST_COMMON_DIRECT_TRANSPORT_H_
+#endif // WEBRTC_TEST_DIRECT_TRANSPORT_H_
diff --git a/webrtc/test/fake_audio_device.cc b/webrtc/test/fake_audio_device.cc
index e307dd7664..31cebda652 100644
--- a/webrtc/test/fake_audio_device.cc
+++ b/webrtc/test/fake_audio_device.cc
@@ -13,11 +13,11 @@
#include <algorithm>
#include "testing/gtest/include/gtest/gtest.h"
-#include "webrtc/modules/media_file/source/media_file_utility.h"
+#include "webrtc/base/platform_thread.h"
+#include "webrtc/modules/media_file/media_file_utility.h"
#include "webrtc/system_wrappers/include/clock.h"
#include "webrtc/system_wrappers/include/event_wrapper.h"
#include "webrtc/system_wrappers/include/file_wrapper.h"
-#include "webrtc/system_wrappers/include/thread_wrapper.h"
namespace webrtc {
namespace test {
@@ -30,6 +30,7 @@ FakeAudioDevice::FakeAudioDevice(Clock* clock, const std::string& filename)
last_playout_ms_(-1),
clock_(clock),
tick_(EventTimerWrapper::Create()),
+ thread_(FakeAudioDevice::Run, this, "FakeAudioDevice"),
file_utility_(new ModuleFileUtility(0)),
input_stream_(FileWrapper::Create()) {
memset(captured_audio_, 0, sizeof(captured_audio_));
@@ -42,8 +43,7 @@ FakeAudioDevice::FakeAudioDevice(Clock* clock, const std::string& filename)
FakeAudioDevice::~FakeAudioDevice() {
Stop();
- if (thread_.get() != NULL)
- thread_->Stop();
+ thread_.Stop();
}
int32_t FakeAudioDevice::Init() {
@@ -53,15 +53,8 @@ int32_t FakeAudioDevice::Init() {
if (!tick_->StartTimer(true, 10))
return -1;
- thread_ = ThreadWrapper::CreateThread(FakeAudioDevice::Run, this,
- "FakeAudioDevice");
- if (thread_.get() == NULL)
- return -1;
- if (!thread_->Start()) {
- thread_.reset();
- return -1;
- }
- thread_->SetPriority(webrtc::kHighPriority);
+ thread_.Start();
+ thread_.SetPriority(rtc::kHighPriority);
return 0;
}
diff --git a/webrtc/test/fake_audio_device.h b/webrtc/test/fake_audio_device.h
index bdc672892c..7ca657bbb6 100644
--- a/webrtc/test/fake_audio_device.h
+++ b/webrtc/test/fake_audio_device.h
@@ -13,6 +13,7 @@
#include <string>
#include "webrtc/base/criticalsection.h"
+#include "webrtc/base/platform_thread.h"
#include "webrtc/base/scoped_ptr.h"
#include "webrtc/modules/audio_device/include/fake_audio_device.h"
#include "webrtc/typedefs.h"
@@ -23,7 +24,6 @@ class Clock;
class EventTimerWrapper;
class FileWrapper;
class ModuleFileUtility;
-class ThreadWrapper;
namespace test {
@@ -59,7 +59,7 @@ class FakeAudioDevice : public FakeAudioDeviceModule {
Clock* clock_;
rtc::scoped_ptr<EventTimerWrapper> tick_;
mutable rtc::CriticalSection lock_;
- rtc::scoped_ptr<ThreadWrapper> thread_;
+ rtc::PlatformThread thread_;
rtc::scoped_ptr<ModuleFileUtility> file_utility_;
rtc::scoped_ptr<FileWrapper> input_stream_;
};
diff --git a/webrtc/test/fake_decoder.cc b/webrtc/test/fake_decoder.cc
index 63316e0dab..dbdd580e88 100644
--- a/webrtc/test/fake_decoder.cc
+++ b/webrtc/test/fake_decoder.cc
@@ -53,10 +53,16 @@ int32_t FakeDecoder::RegisterDecodeCompleteCallback(
int32_t FakeDecoder::Release() {
return WEBRTC_VIDEO_CODEC_OK;
}
+
int32_t FakeDecoder::Reset() {
return WEBRTC_VIDEO_CODEC_OK;
}
+const char* FakeDecoder::kImplementationName = "fake_decoder";
+const char* FakeDecoder::ImplementationName() const {
+ return kImplementationName;
+}
+
int32_t FakeH264Decoder::Decode(const EncodedImage& input,
bool missing_frames,
const RTPFragmentationHeader* fragmentation,
diff --git a/webrtc/test/fake_decoder.h b/webrtc/test/fake_decoder.h
index 593af512f8..0da961d9a0 100644
--- a/webrtc/test/fake_decoder.h
+++ b/webrtc/test/fake_decoder.h
@@ -8,12 +8,12 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-#ifndef WEBRTC_VIDEO_ENGINE_TEST_COMMON_FAKE_DECODER_H_
-#define WEBRTC_VIDEO_ENGINE_TEST_COMMON_FAKE_DECODER_H_
+#ifndef WEBRTC_TEST_FAKE_DECODER_H_
+#define WEBRTC_TEST_FAKE_DECODER_H_
#include <vector>
-#include "webrtc/modules/video_coding/codecs/interface/video_codec_interface.h"
+#include "webrtc/modules/video_coding/include/video_codec_interface.h"
#include "webrtc/system_wrappers/include/clock.h"
namespace webrtc {
@@ -39,6 +39,10 @@ class FakeDecoder : public VideoDecoder {
int32_t Release() override;
int32_t Reset() override;
+ const char* ImplementationName() const override;
+
+ static const char* kImplementationName;
+
private:
VideoCodec config_;
VideoFrame frame_;
@@ -71,4 +75,4 @@ class FakeNullDecoder : public FakeDecoder {
} // namespace test
} // namespace webrtc
-#endif // WEBRTC_VIDEO_ENGINE_TEST_COMMON_FAKE_DECODER_H_
+#endif // WEBRTC_TEST_FAKE_DECODER_H_
diff --git a/webrtc/test/fake_encoder.cc b/webrtc/test/fake_encoder.cc
index a3ade6e97a..72df40f9a5 100644
--- a/webrtc/test/fake_encoder.cc
+++ b/webrtc/test/fake_encoder.cc
@@ -12,7 +12,7 @@
#include "testing/gtest/include/gtest/gtest.h"
-#include "webrtc/modules/video_coding/codecs/interface/video_codec_interface.h"
+#include "webrtc/modules/video_coding/include/video_codec_interface.h"
#include "webrtc/system_wrappers/include/sleep.h"
namespace webrtc {
@@ -57,6 +57,11 @@ int32_t FakeEncoder::Encode(const VideoFrame& input_image,
// at the display time of the previous frame.
time_since_last_encode_ms = time_now_ms - last_encode_time_ms_;
}
+ if (time_since_last_encode_ms > 3 * 1000 / config_.maxFramerate) {
+ // Rudimentary check to make sure we don't widely overshoot bitrate target
+ // when resuming encoding after a suspension.
+ time_since_last_encode_ms = 3 * 1000 / config_.maxFramerate;
+ }
size_t bits_available =
static_cast<size_t>(target_bitrate_kbps_ * time_since_last_encode_ms);
@@ -127,6 +132,11 @@ int32_t FakeEncoder::SetRates(uint32_t new_target_bitrate, uint32_t framerate) {
return 0;
}
+const char* FakeEncoder::kImplementationName = "fake_encoder";
+const char* FakeEncoder::ImplementationName() const {
+ return kImplementationName;
+}
+
FakeH264Encoder::FakeH264Encoder(Clock* clock)
: FakeEncoder(clock), callback_(NULL), idr_counter_(0) {
FakeEncoder::RegisterEncodeCompleteCallback(this);
diff --git a/webrtc/test/fake_encoder.h b/webrtc/test/fake_encoder.h
index d677b92e4d..6bff00e2a3 100644
--- a/webrtc/test/fake_encoder.h
+++ b/webrtc/test/fake_encoder.h
@@ -8,8 +8,8 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-#ifndef WEBRTC_VIDEO_ENGINE_TEST_COMMON_FAKE_ENCODER_H_
-#define WEBRTC_VIDEO_ENGINE_TEST_COMMON_FAKE_ENCODER_H_
+#ifndef WEBRTC_TEST_FAKE_ENCODER_H_
+#define WEBRTC_TEST_FAKE_ENCODER_H_
#include <vector>
@@ -39,6 +39,9 @@ class FakeEncoder : public VideoEncoder {
int32_t Release() override;
int32_t SetChannelParameters(uint32_t packet_loss, int64_t rtt) override;
int32_t SetRates(uint32_t new_target_bitrate, uint32_t framerate) override;
+ const char* ImplementationName() const override;
+
+ static const char* kImplementationName;
protected:
Clock* const clock_;
@@ -82,4 +85,4 @@ class DelayedEncoder : public test::FakeEncoder {
} // namespace test
} // namespace webrtc
-#endif // WEBRTC_VIDEO_ENGINE_TEST_COMMON_FAKE_ENCODER_H_
+#endif // WEBRTC_TEST_FAKE_ENCODER_H_
diff --git a/webrtc/test/fake_network_pipe.cc b/webrtc/test/fake_network_pipe.cc
index c36059356a..491a0526b9 100644
--- a/webrtc/test/fake_network_pipe.cc
+++ b/webrtc/test/fake_network_pipe.cc
@@ -16,7 +16,7 @@
#include <algorithm>
#include "webrtc/call.h"
-#include "webrtc/system_wrappers/include/tick_util.h"
+#include "webrtc/system_wrappers/include/clock.h"
namespace webrtc {
@@ -70,14 +70,15 @@ class NetworkPacket {
int64_t arrival_time_;
};
-FakeNetworkPipe::FakeNetworkPipe(const FakeNetworkPipe::Config& config)
- : packet_receiver_(NULL),
+FakeNetworkPipe::FakeNetworkPipe(Clock* clock,
+ const FakeNetworkPipe::Config& config)
+ : clock_(clock),
+ packet_receiver_(NULL),
config_(config),
dropped_packets_(0),
sent_packets_(0),
total_packet_delay_(0),
- next_process_time_(TickTime::MillisecondTimestamp()) {
-}
+ next_process_time_(clock_->TimeInMilliseconds()) {}
FakeNetworkPipe::~FakeNetworkPipe() {
while (!capacity_link_.empty()) {
@@ -112,7 +113,7 @@ void FakeNetworkPipe::SendPacket(const uint8_t* data, size_t data_length) {
return;
}
- int64_t time_now = TickTime::MillisecondTimestamp();
+ int64_t time_now = clock_->TimeInMilliseconds();
// Delay introduced by the link capacity.
int64_t capacity_delay_ms = 0;
@@ -145,11 +146,12 @@ int FakeNetworkPipe::AverageDelay() {
if (sent_packets_ == 0)
return 0;
- return total_packet_delay_ / static_cast<int>(sent_packets_);
+ return static_cast<int>(total_packet_delay_ /
+ static_cast<int64_t>(sent_packets_));
}
void FakeNetworkPipe::Process() {
- int64_t time_now = TickTime::MillisecondTimestamp();
+ int64_t time_now = clock_->TimeInMilliseconds();
std::queue<NetworkPacket*> packets_to_deliver;
{
rtc::CritScope crit(&lock_);
@@ -210,8 +212,8 @@ int64_t FakeNetworkPipe::TimeUntilNextProcess() const {
const int64_t kDefaultProcessIntervalMs = 30;
if (capacity_link_.size() == 0 || delay_link_.size() == 0)
return kDefaultProcessIntervalMs;
- return std::max<int64_t>(
- next_process_time_ - TickTime::MillisecondTimestamp(), 0);
+ return std::max<int64_t>(next_process_time_ - clock_->TimeInMilliseconds(),
+ 0);
}
} // namespace webrtc
diff --git a/webrtc/test/fake_network_pipe.h b/webrtc/test/fake_network_pipe.h
index 74189a594c..5d589d86f0 100644
--- a/webrtc/test/fake_network_pipe.h
+++ b/webrtc/test/fake_network_pipe.h
@@ -16,11 +16,11 @@
#include "webrtc/base/constructormagic.h"
#include "webrtc/base/criticalsection.h"
#include "webrtc/base/scoped_ptr.h"
-#include "webrtc/system_wrappers/include/event_wrapper.h"
#include "webrtc/typedefs.h"
namespace webrtc {
+class Clock;
class CriticalSectionWrapper;
class NetworkPacket;
class PacketReceiver;
@@ -33,26 +33,20 @@ class PacketReceiver;
class FakeNetworkPipe {
public:
struct Config {
- Config()
- : queue_length_packets(0),
- queue_delay_ms(0),
- delay_standard_deviation_ms(0),
- link_capacity_kbps(0),
- loss_percent(0) {
- }
+ Config() {}
// Queue length in number of packets.
- size_t queue_length_packets;
+ size_t queue_length_packets = 0;
// Delay in addition to capacity induced delay.
- int queue_delay_ms;
+ int queue_delay_ms = 0;
// Standard deviation of the extra delay.
- int delay_standard_deviation_ms;
+ int delay_standard_deviation_ms = 0;
// Link capacity in kbps.
- int link_capacity_kbps;
+ int link_capacity_kbps = 0;
// Random packet loss.
- int loss_percent;
+ int loss_percent = 0;
};
- explicit FakeNetworkPipe(const FakeNetworkPipe::Config& config);
+ FakeNetworkPipe(Clock* clock, const FakeNetworkPipe::Config& config);
~FakeNetworkPipe();
// Must not be called in parallel with SendPacket or Process.
@@ -76,6 +70,7 @@ class FakeNetworkPipe {
size_t sent_packets() { return sent_packets_; }
private:
+ Clock* const clock_;
mutable rtc::CriticalSection lock_;
PacketReceiver* packet_receiver_;
std::queue<NetworkPacket*> capacity_link_;
@@ -87,7 +82,7 @@ class FakeNetworkPipe {
// Statistics.
size_t dropped_packets_;
size_t sent_packets_;
- int total_packet_delay_;
+ int64_t total_packet_delay_;
int64_t next_process_time_;
diff --git a/webrtc/test/fake_network_pipe_unittest.cc b/webrtc/test/fake_network_pipe_unittest.cc
index 02438c59f3..ff18993829 100644
--- a/webrtc/test/fake_network_pipe_unittest.cc
+++ b/webrtc/test/fake_network_pipe_unittest.cc
@@ -13,7 +13,7 @@
#include "webrtc/base/scoped_ptr.h"
#include "webrtc/call.h"
-#include "webrtc/system_wrappers/include/tick_util.h"
+#include "webrtc/system_wrappers/include/clock.h"
#include "webrtc/test/fake_network_pipe.h"
using ::testing::_;
@@ -39,9 +39,11 @@ class MockReceiver : public PacketReceiver {
};
class FakeNetworkPipeTest : public ::testing::Test {
+ public:
+ FakeNetworkPipeTest() : fake_clock_(12345) {}
+
protected:
virtual void SetUp() {
- TickTime::UseFakeClock(12345);
receiver_.reset(new MockReceiver());
ON_CALL(*receiver_, DeliverPacket(_, _, _, _))
.WillByDefault(Return(PacketReceiver::DELIVERY_OK));
@@ -61,6 +63,7 @@ class FakeNetworkPipeTest : public ::testing::Test {
return 8 * kPacketSize / capacity_kbps;
}
+ SimulatedClock fake_clock_;
rtc::scoped_ptr<MockReceiver> receiver_;
};
@@ -71,7 +74,8 @@ TEST_F(FakeNetworkPipeTest, CapacityTest) {
FakeNetworkPipe::Config config;
config.queue_length_packets = 20;
config.link_capacity_kbps = 80;
- rtc::scoped_ptr<FakeNetworkPipe> pipe(new FakeNetworkPipe(config));
+ rtc::scoped_ptr<FakeNetworkPipe> pipe(
+ new FakeNetworkPipe(&fake_clock_, config));
pipe->SetReceiver(receiver_.get());
// Add 10 packets of 1000 bytes, = 80 kb, and verify it takes one second to
@@ -89,17 +93,17 @@ TEST_F(FakeNetworkPipeTest, CapacityTest) {
pipe->Process();
// Advance enough time to release one packet.
- TickTime::AdvanceFakeClock(kPacketTimeMs);
+ fake_clock_.AdvanceTimeMilliseconds(kPacketTimeMs);
EXPECT_CALL(*receiver_, DeliverPacket(_, _, _, _)).Times(1);
pipe->Process();
// Release all but one packet
- TickTime::AdvanceFakeClock(9 * kPacketTimeMs - 1);
+ fake_clock_.AdvanceTimeMilliseconds(9 * kPacketTimeMs - 1);
EXPECT_CALL(*receiver_, DeliverPacket(_, _, _, _)).Times(8);
pipe->Process();
// And the last one.
- TickTime::AdvanceFakeClock(1);
+ fake_clock_.AdvanceTimeMilliseconds(1);
EXPECT_CALL(*receiver_, DeliverPacket(_, _, _, _)).Times(1);
pipe->Process();
}
@@ -110,7 +114,8 @@ TEST_F(FakeNetworkPipeTest, ExtraDelayTest) {
config.queue_length_packets = 20;
config.queue_delay_ms = 100;
config.link_capacity_kbps = 80;
- rtc::scoped_ptr<FakeNetworkPipe> pipe(new FakeNetworkPipe(config));
+ rtc::scoped_ptr<FakeNetworkPipe> pipe(
+ new FakeNetworkPipe(&fake_clock_, config));
pipe->SetReceiver(receiver_.get());
const int kNumPackets = 2;
@@ -122,17 +127,17 @@ TEST_F(FakeNetworkPipeTest, ExtraDelayTest) {
kPacketSize);
// Increase more than kPacketTimeMs, but not more than the extra delay.
- TickTime::AdvanceFakeClock(kPacketTimeMs);
+ fake_clock_.AdvanceTimeMilliseconds(kPacketTimeMs);
EXPECT_CALL(*receiver_, DeliverPacket(_, _, _, _)).Times(0);
pipe->Process();
// Advance the network delay to get the first packet.
- TickTime::AdvanceFakeClock(config.queue_delay_ms);
+ fake_clock_.AdvanceTimeMilliseconds(config.queue_delay_ms);
EXPECT_CALL(*receiver_, DeliverPacket(_, _, _, _)).Times(1);
pipe->Process();
// Advance one more kPacketTimeMs to get the last packet.
- TickTime::AdvanceFakeClock(kPacketTimeMs);
+ fake_clock_.AdvanceTimeMilliseconds(kPacketTimeMs);
EXPECT_CALL(*receiver_, DeliverPacket(_, _, _, _)).Times(1);
pipe->Process();
}
@@ -143,7 +148,8 @@ TEST_F(FakeNetworkPipeTest, QueueLengthTest) {
FakeNetworkPipe::Config config;
config.queue_length_packets = 2;
config.link_capacity_kbps = 80;
- rtc::scoped_ptr<FakeNetworkPipe> pipe(new FakeNetworkPipe(config));
+ rtc::scoped_ptr<FakeNetworkPipe> pipe(
+ new FakeNetworkPipe(&fake_clock_, config));
pipe->SetReceiver(receiver_.get());
const int kPacketSize = 1000;
@@ -155,7 +161,7 @@ TEST_F(FakeNetworkPipeTest, QueueLengthTest) {
// Increase time enough to deliver all three packets, verify only two are
// delivered.
- TickTime::AdvanceFakeClock(3 * kPacketTimeMs);
+ fake_clock_.AdvanceTimeMilliseconds(3 * kPacketTimeMs);
EXPECT_CALL(*receiver_, DeliverPacket(_, _, _, _)).Times(2);
pipe->Process();
}
@@ -166,7 +172,8 @@ TEST_F(FakeNetworkPipeTest, StatisticsTest) {
config.queue_length_packets = 2;
config.queue_delay_ms = 20;
config.link_capacity_kbps = 80;
- rtc::scoped_ptr<FakeNetworkPipe> pipe(new FakeNetworkPipe(config));
+ rtc::scoped_ptr<FakeNetworkPipe> pipe(
+ new FakeNetworkPipe(&fake_clock_, config));
pipe->SetReceiver(receiver_.get());
const int kPacketSize = 1000;
@@ -175,7 +182,8 @@ TEST_F(FakeNetworkPipeTest, StatisticsTest) {
// Send three packets and verify only 2 are delivered.
SendPackets(pipe.get(), 3, kPacketSize);
- TickTime::AdvanceFakeClock(3 * kPacketTimeMs + config.queue_delay_ms);
+ fake_clock_.AdvanceTimeMilliseconds(3 * kPacketTimeMs +
+ config.queue_delay_ms);
EXPECT_CALL(*receiver_, DeliverPacket(_, _, _, _)).Times(2);
pipe->Process();
@@ -194,7 +202,8 @@ TEST_F(FakeNetworkPipeTest, ChangingCapacityWithEmptyPipeTest) {
FakeNetworkPipe::Config config;
config.queue_length_packets = 20;
config.link_capacity_kbps = 80;
- rtc::scoped_ptr<FakeNetworkPipe> pipe(new FakeNetworkPipe(config));
+ rtc::scoped_ptr<FakeNetworkPipe> pipe(
+ new FakeNetworkPipe(&fake_clock_, config));
pipe->SetReceiver(receiver_.get());
// Add 10 packets of 1000 bytes, = 80 kb, and verify it takes one second to
@@ -212,7 +221,7 @@ TEST_F(FakeNetworkPipeTest, ChangingCapacityWithEmptyPipeTest) {
// Advance time in steps to release one packet at a time.
for (int i = 0; i < kNumPackets; ++i) {
- TickTime::AdvanceFakeClock(packet_time_ms);
+ fake_clock_.AdvanceTimeMilliseconds(packet_time_ms);
EXPECT_CALL(*receiver_, DeliverPacket(_, _, _, _)).Times(1);
pipe->Process();
}
@@ -234,14 +243,14 @@ TEST_F(FakeNetworkPipeTest, ChangingCapacityWithEmptyPipeTest) {
// Advance time in steps to release one packet at a time.
for (int i = 0; i < kNumPackets; ++i) {
- TickTime::AdvanceFakeClock(packet_time_ms);
+ fake_clock_.AdvanceTimeMilliseconds(packet_time_ms);
EXPECT_CALL(*receiver_, DeliverPacket(_, _, _, _)).Times(1);
pipe->Process();
}
// Check that all the packets were sent.
EXPECT_EQ(static_cast<size_t>(2 * kNumPackets), pipe->sent_packets());
- TickTime::AdvanceFakeClock(pipe->TimeUntilNextProcess());
+ fake_clock_.AdvanceTimeMilliseconds(pipe->TimeUntilNextProcess());
EXPECT_CALL(*receiver_, DeliverPacket(_, _, _, _)).Times(0);
pipe->Process();
}
@@ -252,7 +261,8 @@ TEST_F(FakeNetworkPipeTest, ChangingCapacityWithPacketsInPipeTest) {
FakeNetworkPipe::Config config;
config.queue_length_packets = 20;
config.link_capacity_kbps = 80;
- rtc::scoped_ptr<FakeNetworkPipe> pipe(new FakeNetworkPipe(config));
+ rtc::scoped_ptr<FakeNetworkPipe> pipe(
+ new FakeNetworkPipe(&fake_clock_, config));
pipe->SetReceiver(receiver_.get());
// Add 10 packets of 1000 bytes, = 80 kb.
@@ -280,21 +290,21 @@ TEST_F(FakeNetworkPipeTest, ChangingCapacityWithPacketsInPipeTest) {
// Advance time in steps to release one packet at a time.
for (int i = 0; i < kNumPackets; ++i) {
- TickTime::AdvanceFakeClock(packet_time_1_ms);
+ fake_clock_.AdvanceTimeMilliseconds(packet_time_1_ms);
EXPECT_CALL(*receiver_, DeliverPacket(_, _, _, _)).Times(1);
pipe->Process();
}
// Advance time in steps to release one packet at a time.
for (int i = 0; i < kNumPackets; ++i) {
- TickTime::AdvanceFakeClock(packet_time_2_ms);
+ fake_clock_.AdvanceTimeMilliseconds(packet_time_2_ms);
EXPECT_CALL(*receiver_, DeliverPacket(_, _, _, _)).Times(1);
pipe->Process();
}
// Check that all the packets were sent.
EXPECT_EQ(static_cast<size_t>(2 * kNumPackets), pipe->sent_packets());
- TickTime::AdvanceFakeClock(pipe->TimeUntilNextProcess());
+ fake_clock_.AdvanceTimeMilliseconds(pipe->TimeUntilNextProcess());
EXPECT_CALL(*receiver_, DeliverPacket(_, _, _, _)).Times(0);
pipe->Process();
}
diff --git a/webrtc/test/fake_texture_frame.cc b/webrtc/test/fake_texture_frame.cc
new file mode 100644
index 0000000000..5d46eec4b6
--- /dev/null
+++ b/webrtc/test/fake_texture_frame.cc
@@ -0,0 +1,27 @@
+/*
+ * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/test/fake_texture_frame.h"
+
+namespace webrtc {
+namespace test {
+
+VideoFrame FakeNativeHandle::CreateFrame(FakeNativeHandle* native_handle,
+ int width,
+ int height,
+ uint32_t timestamp,
+ int64_t render_time_ms,
+ VideoRotation rotation) {
+ return VideoFrame(new rtc::RefCountedObject<FakeNativeHandleBuffer>(
+ native_handle, width, height),
+ timestamp, render_time_ms, rotation);
+}
+} // namespace test
+} // namespace webrtc
diff --git a/webrtc/test/fake_texture_frame.h b/webrtc/test/fake_texture_frame.h
index dc6abaf745..9575fae469 100644
--- a/webrtc/test/fake_texture_frame.h
+++ b/webrtc/test/fake_texture_frame.h
@@ -11,13 +11,21 @@
#define WEBRTC_TEST_FAKE_TEXTURE_FRAME_H_
#include "webrtc/base/checks.h"
-#include "webrtc/common_video/interface/video_frame_buffer.h"
+#include "webrtc/common_video/include/video_frame_buffer.h"
#include "webrtc/video_frame.h"
namespace webrtc {
namespace test {
-class FakeNativeHandle {};
+class FakeNativeHandle {
+ public:
+ static VideoFrame CreateFrame(FakeNativeHandle* native_handle,
+ int width,
+ int height,
+ uint32_t timestamp,
+ int64_t render_time_ms,
+ VideoRotation rotation);
+};
class FakeNativeHandleBuffer : public NativeHandleBuffer {
public:
@@ -41,16 +49,6 @@ class FakeNativeHandleBuffer : public NativeHandleBuffer {
}
};
-static VideoFrame CreateFakeNativeHandleFrame(FakeNativeHandle* native_handle,
- int width,
- int height,
- uint32_t timestamp,
- int64_t render_time_ms,
- VideoRotation rotation) {
- return VideoFrame(new rtc::RefCountedObject<FakeNativeHandleBuffer>(
- native_handle, width, height),
- timestamp, render_time_ms, rotation);
-}
} // namespace test
} // namespace webrtc
#endif // WEBRTC_TEST_FAKE_TEXTURE_FRAME_H_
diff --git a/webrtc/test/fake_voice_engine.cc b/webrtc/test/fake_voice_engine.cc
deleted file mode 100644
index 1a32e082b7..0000000000
--- a/webrtc/test/fake_voice_engine.cc
+++ /dev/null
@@ -1,70 +0,0 @@
-/*
- * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include "webrtc/test/fake_voice_engine.h"
-
-namespace {
-
-webrtc::AudioDecodingCallStats MakeAudioDecodingCallStats() {
- webrtc::AudioDecodingCallStats stats;
- stats.calls_to_silence_generator = 234;
- stats.calls_to_neteq = 567;
- stats.decoded_normal = 890;
- stats.decoded_plc = 123;
- stats.decoded_cng = 456;
- stats.decoded_plc_cng = 789;
- return stats;
-}
-} // namespace
-
-namespace webrtc {
-namespace test {
-
-const int FakeVoiceEngine::kSendChannelId = 1;
-const int FakeVoiceEngine::kRecvChannelId = 2;
-const uint32_t FakeVoiceEngine::kSendSsrc = 665;
-const uint32_t FakeVoiceEngine::kRecvSsrc = 667;
-const int FakeVoiceEngine::kSendEchoDelayMedian = 254;
-const int FakeVoiceEngine::kSendEchoDelayStdDev = -3;
-const int FakeVoiceEngine::kSendEchoReturnLoss = -65;
-const int FakeVoiceEngine::kSendEchoReturnLossEnhancement = 101;
-const int FakeVoiceEngine::kRecvJitterBufferDelay = -7;
-const int FakeVoiceEngine::kRecvPlayoutBufferDelay = 302;
-const unsigned int FakeVoiceEngine::kSendSpeechInputLevel = 96;
-const unsigned int FakeVoiceEngine::kRecvSpeechOutputLevel = 99;
-
-const CallStatistics FakeVoiceEngine::kSendCallStats = {
- 1345, 1678, 1901, 1234, 112, 13456, 17890, 1567, -1890, -1123
-};
-
-const CodecInst FakeVoiceEngine::kSendCodecInst = {
- -121, "codec_name_send", 48000, -231, -451, -671
-};
-
-const ReportBlock FakeVoiceEngine::kSendReportBlock = {
- 456, 780, 123, 567, 890, 132, 143, 13354
-};
-
-const CallStatistics FakeVoiceEngine::kRecvCallStats = {
- 345, 678, 901, 234, -12, 3456, 7890, 567, 890, 123
-};
-
-const CodecInst FakeVoiceEngine::kRecvCodecInst = {
- 123, "codec_name_recv", 96000, -187, -198, -103
-};
-
-const NetworkStatistics FakeVoiceEngine::kRecvNetworkStats = {
- 123, 456, false, 0, 0, 789, 12, 345, 678, 901, -1, -1, -1, -1, -1, 0
-};
-
-const AudioDecodingCallStats FakeVoiceEngine::kRecvAudioDecodingCallStats =
- MakeAudioDecodingCallStats();
-} // namespace test
-} // namespace webrtc
diff --git a/webrtc/test/fake_voice_engine.h b/webrtc/test/fake_voice_engine.h
deleted file mode 100644
index 8f08929720..0000000000
--- a/webrtc/test/fake_voice_engine.h
+++ /dev/null
@@ -1,504 +0,0 @@
-/*
- * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_AUDIO_FAKE_VOICE_ENGINE_H_
-#define WEBRTC_AUDIO_FAKE_VOICE_ENGINE_H_
-
-#include <vector>
-
-#include "testing/gtest/include/gtest/gtest.h"
-
-#include "webrtc/voice_engine/voice_engine_impl.h"
-
-namespace webrtc {
-namespace test {
-
-// NOTE: This class inherits from VoiceEngineImpl so that its clients will be
-// able to get the various interfaces as usual, via T::GetInterface().
-class FakeVoiceEngine final : public VoiceEngineImpl {
- public:
- static const int kSendChannelId;
- static const int kRecvChannelId;
- static const uint32_t kSendSsrc;
- static const uint32_t kRecvSsrc;
- static const int kSendEchoDelayMedian;
- static const int kSendEchoDelayStdDev;
- static const int kSendEchoReturnLoss;
- static const int kSendEchoReturnLossEnhancement;
- static const int kRecvJitterBufferDelay;
- static const int kRecvPlayoutBufferDelay;
- static const unsigned int kSendSpeechInputLevel;
- static const unsigned int kRecvSpeechOutputLevel;
- static const CallStatistics kSendCallStats;
- static const CodecInst kSendCodecInst;
- static const ReportBlock kSendReportBlock;
- static const CallStatistics kRecvCallStats;
- static const CodecInst kRecvCodecInst;
- static const NetworkStatistics kRecvNetworkStats;
- static const AudioDecodingCallStats kRecvAudioDecodingCallStats;
-
- FakeVoiceEngine() : VoiceEngineImpl(new Config(), true) {
- // Increase ref count so this object isn't automatically deleted whenever
- // interfaces are Release():d.
- ++_ref_count;
- }
- ~FakeVoiceEngine() override {
- // Decrease ref count before base class d-tor is called; otherwise it will
- // trigger an assertion.
- --_ref_count;
- }
-
- // VoEAudioProcessing
- int SetNsStatus(bool enable, NsModes mode = kNsUnchanged) override {
- return -1;
- }
- int GetNsStatus(bool& enabled, NsModes& mode) override { return -1; }
- int SetAgcStatus(bool enable, AgcModes mode = kAgcUnchanged) override {
- return -1;
- }
- int GetAgcStatus(bool& enabled, AgcModes& mode) override { return -1; }
- int SetAgcConfig(AgcConfig config) override { return -1; }
- int GetAgcConfig(AgcConfig& config) override { return -1; }
- int SetEcStatus(bool enable, EcModes mode = kEcUnchanged) override {
- return -1;
- }
- int GetEcStatus(bool& enabled, EcModes& mode) override { return -1; }
- int EnableDriftCompensation(bool enable) override { return -1; }
- bool DriftCompensationEnabled() override { return false; }
- void SetDelayOffsetMs(int offset) override {}
- int DelayOffsetMs() override { return -1; }
- int SetAecmMode(AecmModes mode = kAecmSpeakerphone,
- bool enableCNG = true) override { return -1; }
- int GetAecmMode(AecmModes& mode, bool& enabledCNG) override { return -1; }
- int EnableHighPassFilter(bool enable) override { return -1; }
- bool IsHighPassFilterEnabled() override { return false; }
- int SetRxNsStatus(int channel,
- bool enable,
- NsModes mode = kNsUnchanged) override { return -1; }
- int GetRxNsStatus(int channel, bool& enabled, NsModes& mode) override {
- return -1;
- }
- int SetRxAgcStatus(int channel,
- bool enable,
- AgcModes mode = kAgcUnchanged) override { return -1; }
- int GetRxAgcStatus(int channel, bool& enabled, AgcModes& mode) override {
- return -1;
- }
- int SetRxAgcConfig(int channel, AgcConfig config) override { return -1; }
- int GetRxAgcConfig(int channel, AgcConfig& config) override { return -1; }
- int RegisterRxVadObserver(int channel,
- VoERxVadCallback& observer) override { return -1; }
- int DeRegisterRxVadObserver(int channel) override { return -1; }
- int VoiceActivityIndicator(int channel) override { return -1; }
- int SetEcMetricsStatus(bool enable) override { return -1; }
- int GetEcMetricsStatus(bool& enabled) override {
- enabled = true;
- return 0;
- }
- int GetEchoMetrics(int& ERL, int& ERLE, int& RERL, int& A_NLP) override {
- ERL = kSendEchoReturnLoss;
- ERLE = kSendEchoReturnLossEnhancement;
- RERL = -123456789;
- A_NLP = 123456789;
- return 0;
- }
- int GetEcDelayMetrics(int& delay_median,
- int& delay_std,
- float& fraction_poor_delays) override {
- delay_median = kSendEchoDelayMedian;
- delay_std = kSendEchoDelayStdDev;
- fraction_poor_delays = -12345.7890f;
- return 0;
- }
- int StartDebugRecording(const char* fileNameUTF8) override { return -1; }
- int StartDebugRecording(FILE* file_handle) override { return -1; }
- int StopDebugRecording() override { return -1; }
- int SetTypingDetectionStatus(bool enable) override { return -1; }
- int GetTypingDetectionStatus(bool& enabled) override { return -1; }
- int TimeSinceLastTyping(int& seconds) override { return -1; }
- int SetTypingDetectionParameters(int timeWindow,
- int costPerTyping,
- int reportingThreshold,
- int penaltyDecay,
- int typeEventDelay = 0) override {
- return -1;
- }
- void EnableStereoChannelSwapping(bool enable) override {}
- bool IsStereoChannelSwappingEnabled() override { return false; }
-
- // VoEBase
- int RegisterVoiceEngineObserver(VoiceEngineObserver& observer) override {
- return -1;
- }
- int DeRegisterVoiceEngineObserver() override { return -1; }
- int Init(AudioDeviceModule* external_adm = NULL,
- AudioProcessing* audioproc = NULL) override { return -1; }
- AudioProcessing* audio_processing() override { return nullptr; }
- int Terminate() override { return -1; }
- int CreateChannel() override { return -1; }
- int CreateChannel(const Config& config) override { return -1; }
- int DeleteChannel(int channel) override { return -1; }
- int StartReceive(int channel) override { return -1; }
- int StopReceive(int channel) override { return -1; }
- int StartPlayout(int channel) override { return -1; }
- int StopPlayout(int channel) override { return -1; }
- int StartSend(int channel) override { return -1; }
- int StopSend(int channel) override { return -1; }
- int GetVersion(char version[1024]) override { return -1; }
- int LastError() override { return -1; }
- AudioTransport* audio_transport() { return nullptr; }
- int AssociateSendChannel(int channel, int accociate_send_channel) override {
- return -1;
- }
-
- // VoECodec
- int NumOfCodecs() override { return -1; }
- int GetCodec(int index, CodecInst& codec) override { return -1; }
- int SetSendCodec(int channel, const CodecInst& codec) override { return -1; }
- int GetSendCodec(int channel, CodecInst& codec) override {
- EXPECT_EQ(channel, kSendChannelId);
- codec = kSendCodecInst;
- return 0;
- }
- int SetBitRate(int channel, int bitrate_bps) override { return -1; }
- int GetRecCodec(int channel, CodecInst& codec) override {
- EXPECT_EQ(channel, kRecvChannelId);
- codec = kRecvCodecInst;
- return 0;
- }
- int SetRecPayloadType(int channel, const CodecInst& codec) override {
- return -1;
- }
- int GetRecPayloadType(int channel, CodecInst& codec) override { return -1; }
- int SetSendCNPayloadType(int channel, int type,
- PayloadFrequencies frequency = kFreq16000Hz) override { return -1; }
- int SetVADStatus(int channel,
- bool enable,
- VadModes mode = kVadConventional,
- bool disableDTX = false) override { return -1; }
- int GetVADStatus(int channel,
- bool& enabled,
- VadModes& mode,
- bool& disabledDTX) override { return -1; }
- int SetOpusMaxPlaybackRate(int channel, int frequency_hz) override {
- return -1;
- }
- int SetOpusDtx(int channel, bool enable_dtx) override { return -1; }
- RtcEventLog* GetEventLog() override { return nullptr; }
-
- // VoEDtmf
- int SendTelephoneEvent(int channel,
- int eventCode,
- bool outOfBand = true,
- int lengthMs = 160,
- int attenuationDb = 10) override { return -1; }
- int SetSendTelephoneEventPayloadType(int channel,
- unsigned char type) override {
- return -1;
- }
- int GetSendTelephoneEventPayloadType(int channel,
- unsigned char& type) override {
- return -1;
- }
- int SetDtmfFeedbackStatus(bool enable,
- bool directFeedback = false) override { return -1; }
- int GetDtmfFeedbackStatus(bool& enabled, bool& directFeedback) override {
- return -1;
- }
- int PlayDtmfTone(int eventCode,
- int lengthMs = 200,
- int attenuationDb = 10) override { return -1; }
-
- // VoEExternalMedia
- int RegisterExternalMediaProcessing(
- int channel,
- ProcessingTypes type,
- VoEMediaProcess& processObject) override { return -1; }
- int DeRegisterExternalMediaProcessing(int channel,
- ProcessingTypes type) override {
- return -1;
- }
- int GetAudioFrame(int channel,
- int desired_sample_rate_hz,
- AudioFrame* frame) override { return -1; }
- int SetExternalMixing(int channel, bool enable) override { return -1; }
-
- // VoEFile
- int StartPlayingFileLocally(
- int channel,
- const char fileNameUTF8[1024],
- bool loop = false,
- FileFormats format = kFileFormatPcm16kHzFile,
- float volumeScaling = 1.0,
- int startPointMs = 0,
- int stopPointMs = 0) override { return -1; }
- int StartPlayingFileLocally(
- int channel,
- InStream* stream,
- FileFormats format = kFileFormatPcm16kHzFile,
- float volumeScaling = 1.0,
- int startPointMs = 0,
- int stopPointMs = 0) override { return -1; }
- int StopPlayingFileLocally(int channel) override { return -1; }
- int IsPlayingFileLocally(int channel) override { return -1; }
- int StartPlayingFileAsMicrophone(
- int channel,
- const char fileNameUTF8[1024],
- bool loop = false,
- bool mixWithMicrophone = false,
- FileFormats format = kFileFormatPcm16kHzFile,
- float volumeScaling = 1.0) override { return -1; }
- int StartPlayingFileAsMicrophone(
- int channel,
- InStream* stream,
- bool mixWithMicrophone = false,
- FileFormats format = kFileFormatPcm16kHzFile,
- float volumeScaling = 1.0) override { return -1; }
- int StopPlayingFileAsMicrophone(int channel) override { return -1; }
- int IsPlayingFileAsMicrophone(int channel) override { return -1; }
- int StartRecordingPlayout(int channel,
- const char* fileNameUTF8,
- CodecInst* compression = NULL,
- int maxSizeBytes = -1) override { return -1; }
- int StopRecordingPlayout(int channel) override { return -1; }
- int StartRecordingPlayout(int channel,
- OutStream* stream,
- CodecInst* compression = NULL) override {
- return -1;
- }
- int StartRecordingMicrophone(const char* fileNameUTF8,
- CodecInst* compression = NULL,
- int maxSizeBytes = -1) override { return -1; }
- int StartRecordingMicrophone(OutStream* stream,
- CodecInst* compression = NULL) override {
- return -1;
- }
- int StopRecordingMicrophone() override { return -1; }
-
- // VoEHardware
- int GetNumOfRecordingDevices(int& devices) override { return -1; }
-
- // Gets the number of audio devices available for playout.
- int GetNumOfPlayoutDevices(int& devices) override { return -1; }
-
- // Gets the name of a specific recording device given by an |index|.
- // On Windows Vista/7, it also retrieves an additional unique ID
- // (GUID) for the recording device.
- int GetRecordingDeviceName(int index,
- char strNameUTF8[128],
- char strGuidUTF8[128]) override { return -1; }
-
- // Gets the name of a specific playout device given by an |index|.
- // On Windows Vista/7, it also retrieves an additional unique ID
- // (GUID) for the playout device.
- int GetPlayoutDeviceName(int index,
- char strNameUTF8[128],
- char strGuidUTF8[128]) override { return -1; }
-
- // Sets the audio device used for recording.
- int SetRecordingDevice(
- int index,
- StereoChannel recordingChannel = kStereoBoth) override { return -1; }
-
- // Sets the audio device used for playout.
- int SetPlayoutDevice(int index) override { return -1; }
-
- // Sets the type of audio device layer to use.
- int SetAudioDeviceLayer(AudioLayers audioLayer) override { return -1; }
-
- // Gets the currently used (active) audio device layer.
- int GetAudioDeviceLayer(AudioLayers& audioLayer) override { return -1; }
-
- // Native sample rate controls (samples/sec)
- int SetRecordingSampleRate(unsigned int samples_per_sec) override {
- return -1;
- }
- int RecordingSampleRate(unsigned int* samples_per_sec) const override {
- return -1;
- }
- int SetPlayoutSampleRate(unsigned int samples_per_sec) override {
- return -1;
- }
- int PlayoutSampleRate(unsigned int* samples_per_sec) const override {
- return -1;
- }
-
- // Queries and controls platform audio effects on Android devices.
- bool BuiltInAECIsAvailable() const override { return false; }
- int EnableBuiltInAEC(bool enable) override { return -1; }
- bool BuiltInAGCIsAvailable() const override { return false; }
- int EnableBuiltInAGC(bool enable) override { return -1; }
- bool BuiltInNSIsAvailable() const override { return false; }
- int EnableBuiltInNS(bool enable) override { return -1; }
-
- // VoENetwork
- int RegisterExternalTransport(int channel, Transport& transport) override {
- return -1;
- }
- int DeRegisterExternalTransport(int channel) override { return -1; }
- int ReceivedRTPPacket(int channel,
- const void* data,
- size_t length) override { return -1; }
- int ReceivedRTPPacket(int channel,
- const void* data,
- size_t length,
- const PacketTime& packet_time) override { return -1; }
- int ReceivedRTCPPacket(int channel,
- const void* data,
- size_t length) { return -1; }
-
- // VoENetEqStats
- int GetNetworkStatistics(int channel, NetworkStatistics& stats) override {
- EXPECT_EQ(channel, kRecvChannelId);
- stats = kRecvNetworkStats;
- return 0;
- }
- int GetDecodingCallStatistics(int channel,
- AudioDecodingCallStats* stats) const override {
- EXPECT_EQ(channel, kRecvChannelId);
- EXPECT_NE(nullptr, stats);
- *stats = kRecvAudioDecodingCallStats;
- return 0;
- }
-
- // VoERTP_RTCP
- int SetLocalSSRC(int channel, unsigned int ssrc) override { return -1; }
- int GetLocalSSRC(int channel, unsigned int& ssrc) override {
- EXPECT_EQ(channel, kSendChannelId);
- ssrc = 0;
- return 0;
- }
- int GetRemoteSSRC(int channel, unsigned int& ssrc) override {
- EXPECT_EQ(channel, kRecvChannelId);
- ssrc = 0;
- return 0;
- }
- int SetSendAudioLevelIndicationStatus(int channel,
- bool enable,
- unsigned char id = 1) override {
- return -1;
- }
- int SetSendAbsoluteSenderTimeStatus(int channel,
- bool enable,
- unsigned char id) override { return -1; }
- int SetReceiveAbsoluteSenderTimeStatus(int channel,
- bool enable,
- unsigned char id) override {
- return -1;
- }
- int SetRTCPStatus(int channel, bool enable) override { return -1; }
- int GetRTCPStatus(int channel, bool& enabled) override { return -1; }
- int SetRTCP_CNAME(int channel, const char cName[256]) override { return -1; }
- int GetRTCP_CNAME(int channel, char cName[256]) { return -1; }
- int GetRemoteRTCP_CNAME(int channel, char cName[256]) override { return -1; }
- int GetRemoteRTCPData(int channel,
- unsigned int& NTPHigh,
- unsigned int& NTPLow,
- unsigned int& timestamp,
- unsigned int& playoutTimestamp,
- unsigned int* jitter = NULL,
- unsigned short* fractionLost = NULL) override {
- return -1;
- }
- int GetRTPStatistics(int channel,
- unsigned int& averageJitterMs,
- unsigned int& maxJitterMs,
- unsigned int& discardedPackets) override { return -1; }
- int GetRTCPStatistics(int channel, CallStatistics& stats) override {
- if (channel == kSendChannelId) {
- stats = kSendCallStats;
- } else {
- EXPECT_EQ(channel, kRecvChannelId);
- stats = kRecvCallStats;
- }
- return 0;
- }
- int GetRemoteRTCPReportBlocks(
- int channel,
- std::vector<ReportBlock>* receive_blocks) override {
- EXPECT_EQ(channel, kSendChannelId);
- EXPECT_NE(receive_blocks, nullptr);
- EXPECT_EQ(receive_blocks->size(), 0u);
- webrtc::ReportBlock block = kSendReportBlock;
- receive_blocks->push_back(block); // Has wrong SSRC.
- block.source_SSRC = kSendSsrc;
- receive_blocks->push_back(block); // Correct block.
- block.fraction_lost = 0;
- receive_blocks->push_back(block); // Duplicate SSRC, bad fraction_lost.
- return 0;
- }
- int SetNACKStatus(int channel, bool enable, int maxNoPackets) override {
- return -1;
- }
-
- // VoEVideoSync
- int GetPlayoutBufferSize(int& buffer_ms) override { return -1; }
- int SetMinimumPlayoutDelay(int channel, int delay_ms) override { return -1; }
- int SetInitialPlayoutDelay(int channel, int delay_ms) override { return -1; }
- int GetDelayEstimate(int channel,
- int* jitter_buffer_delay_ms,
- int* playout_buffer_delay_ms) override {
- EXPECT_EQ(channel, kRecvChannelId);
- *jitter_buffer_delay_ms = kRecvJitterBufferDelay;
- *playout_buffer_delay_ms = kRecvPlayoutBufferDelay;
- return 0;
- }
- int GetLeastRequiredDelayMs(int channel) const override { return -1; }
- int SetInitTimestamp(int channel, unsigned int timestamp) override {
- return -1;
- }
- int SetInitSequenceNumber(int channel, short sequenceNumber) override {
- return -1;
- }
- int GetPlayoutTimestamp(int channel, unsigned int& timestamp) override {
- return -1;
- }
- int GetRtpRtcp(int channel,
- RtpRtcp** rtpRtcpModule,
- RtpReceiver** rtp_receiver) override { return -1; }
-
- // VoEVolumeControl
- int SetSpeakerVolume(unsigned int volume) override { return -1; }
- int GetSpeakerVolume(unsigned int& volume) override { return -1; }
- int SetMicVolume(unsigned int volume) override { return -1; }
- int GetMicVolume(unsigned int& volume) override { return -1; }
- int SetInputMute(int channel, bool enable) override { return -1; }
- int GetInputMute(int channel, bool& enabled) override { return -1; }
- int GetSpeechInputLevel(unsigned int& level) override { return -1; }
- int GetSpeechOutputLevel(int channel, unsigned int& level) override {
- return -1;
- }
- int GetSpeechInputLevelFullRange(unsigned int& level) override {
- level = kSendSpeechInputLevel;
- return 0;
- }
- int GetSpeechOutputLevelFullRange(int channel,
- unsigned int& level) override {
- EXPECT_EQ(channel, kRecvChannelId);
- level = kRecvSpeechOutputLevel;
- return 0;
- }
- int SetChannelOutputVolumeScaling(int channel, float scaling) override {
- return -1;
- }
- int GetChannelOutputVolumeScaling(int channel, float& scaling) override {
- return -1;
- }
- int SetOutputVolumePan(int channel, float left, float right) override {
- return -1;
- }
- int GetOutputVolumePan(int channel, float& left, float& right) override {
- return -1;
- }
-};
-} // namespace test
-} // namespace webrtc
-
-#endif // WEBRTC_AUDIO_FAKE_VOICE_ENGINE_H_
diff --git a/webrtc/test/field_trial.cc b/webrtc/test/field_trial.cc
index 613fb67679..c40d0783d8 100644
--- a/webrtc/test/field_trial.cc
+++ b/webrtc/test/field_trial.cc
@@ -18,27 +18,13 @@
#include <string>
#include "webrtc/system_wrappers/include/field_trial.h"
+#include "webrtc/system_wrappers/include/field_trial_default.h"
namespace webrtc {
namespace {
-// Clients of this library have show a clear intent to setup field trials by
-// linking with it. As so try to crash if they forget to call
-// InitFieldTrialsFromString before webrtc tries to access a field trial.
bool field_trials_initiated_ = false;
-std::map<std::string, std::string> field_trials_;
} // namespace
-namespace field_trial {
-std::string FindFullName(const std::string& trial_name) {
- assert(field_trials_initiated_);
- std::map<std::string, std::string>::const_iterator it =
- field_trials_.find(trial_name);
- if (it == field_trials_.end())
- return std::string();
- return it->second;
-}
-} // namespace field_trial
-
namespace test {
// Note: this code is copied from src/base/metrics/field_trial.cc since the aim
// is to mimic chromium --force-fieldtrials.
@@ -53,6 +39,7 @@ void InitFieldTrialsFromString(const std::string& trials_string) {
return;
size_t next_item = 0;
+ std::map<std::string, std::string> field_trials;
while (next_item < trials_string.length()) {
size_t name_end = trials_string.find(kPersistentStringSeparator, next_item);
if (name_end == trials_string.npos || next_item == name_end)
@@ -67,15 +54,18 @@ void InitFieldTrialsFromString(const std::string& trials_string) {
next_item = group_name_end + 1;
// Fail if duplicate with different group name.
- if (field_trials_.find(name) != field_trials_.end() &&
- field_trials_.find(name)->second != group_name)
+ if (field_trials.find(name) != field_trials.end() &&
+ field_trials.find(name)->second != group_name) {
break;
+ }
- field_trials_[name] = group_name;
+ field_trials[name] = group_name;
// Successfully parsed all field trials from the string.
- if (next_item == trials_string.length())
+ if (next_item == trials_string.length()) {
+ webrtc::field_trial::InitFieldTrialsFromString(trials_string.c_str());
return;
+ }
}
// Using fprintf as LOG does not print when this is called early in main.
fprintf(stderr, "Invalid field trials string.\n");
@@ -85,18 +75,18 @@ void InitFieldTrialsFromString(const std::string& trials_string) {
}
ScopedFieldTrials::ScopedFieldTrials(const std::string& config)
- : previous_field_trials_(field_trials_) {
+ : previous_field_trials_(webrtc::field_trial::GetFieldTrialString()) {
assert(field_trials_initiated_);
field_trials_initiated_ = false;
- field_trials_.clear();
- InitFieldTrialsFromString(config);
+ current_field_trials_ = config;
+ InitFieldTrialsFromString(current_field_trials_);
}
ScopedFieldTrials::~ScopedFieldTrials() {
// Should still be initialized, since InitFieldTrials is called from ctor.
// That's why we don't restore the flag.
assert(field_trials_initiated_);
- field_trials_ = previous_field_trials_;
+ webrtc::field_trial::InitFieldTrialsFromString(previous_field_trials_);
}
} // namespace test
diff --git a/webrtc/test/field_trial.h b/webrtc/test/field_trial.h
index d448f3411d..735aa1f833 100644
--- a/webrtc/test/field_trial.h
+++ b/webrtc/test/field_trial.h
@@ -39,7 +39,8 @@ class ScopedFieldTrials {
explicit ScopedFieldTrials(const std::string& config);
~ScopedFieldTrials();
private:
- const std::map<std::string, std::string> previous_field_trials_;
+ std::string current_field_trials_;
+ const char* previous_field_trials_;
};
} // namespace test
diff --git a/webrtc/test/frame_generator_capturer.cc b/webrtc/test/frame_generator_capturer.cc
index 70e2c85698..35ce6168a2 100644
--- a/webrtc/test/frame_generator_capturer.cc
+++ b/webrtc/test/frame_generator_capturer.cc
@@ -11,11 +11,11 @@
#include "webrtc/test/frame_generator_capturer.h"
#include "webrtc/base/criticalsection.h"
-#include "webrtc/test/frame_generator.h"
+#include "webrtc/base/platform_thread.h"
#include "webrtc/system_wrappers/include/clock.h"
#include "webrtc/system_wrappers/include/event_wrapper.h"
#include "webrtc/system_wrappers/include/sleep.h"
-#include "webrtc/system_wrappers/include/thread_wrapper.h"
+#include "webrtc/test/frame_generator.h"
#include "webrtc/video_send_stream.h"
namespace webrtc {
@@ -65,6 +65,7 @@ FrameGeneratorCapturer::FrameGeneratorCapturer(Clock* clock,
clock_(clock),
sending_(false),
tick_(EventTimerWrapper::Create()),
+ thread_(FrameGeneratorCapturer::Run, this, "FrameGeneratorCapturer"),
frame_generator_(frame_generator),
target_fps_(target_fps),
first_frame_capture_time_(-1) {
@@ -76,8 +77,7 @@ FrameGeneratorCapturer::FrameGeneratorCapturer(Clock* clock,
FrameGeneratorCapturer::~FrameGeneratorCapturer() {
Stop();
- if (thread_.get() != NULL)
- thread_->Stop();
+ thread_.Stop();
}
bool FrameGeneratorCapturer::Init() {
@@ -88,15 +88,8 @@ bool FrameGeneratorCapturer::Init() {
if (!tick_->StartTimer(true, 1000 / target_fps_))
return false;
- thread_ = ThreadWrapper::CreateThread(FrameGeneratorCapturer::Run, this,
- "FrameGeneratorCapturer");
- if (thread_.get() == NULL)
- return false;
- if (!thread_->Start()) {
- thread_.reset();
- return false;
- }
- thread_->SetPriority(webrtc::kHighPriority);
+ thread_.Start();
+ thread_.SetPriority(rtc::kHighPriority);
return true;
}
diff --git a/webrtc/test/frame_generator_capturer.h b/webrtc/test/frame_generator_capturer.h
index aff906dfa2..6bd0e0b327 100644
--- a/webrtc/test/frame_generator_capturer.h
+++ b/webrtc/test/frame_generator_capturer.h
@@ -7,12 +7,13 @@
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
-#ifndef WEBRTC_VIDEO_ENGINE_TEST_COMMON_FRAME_GENERATOR_CAPTURER_H_
-#define WEBRTC_VIDEO_ENGINE_TEST_COMMON_FRAME_GENERATOR_CAPTURER_H_
+#ifndef WEBRTC_TEST_FRAME_GENERATOR_CAPTURER_H_
+#define WEBRTC_TEST_FRAME_GENERATOR_CAPTURER_H_
#include <string>
#include "webrtc/base/criticalsection.h"
+#include "webrtc/base/platform_thread.h"
#include "webrtc/base/scoped_ptr.h"
#include "webrtc/test/video_capturer.h"
#include "webrtc/typedefs.h"
@@ -21,7 +22,6 @@ namespace webrtc {
class CriticalSectionWrapper;
class EventTimerWrapper;
-class ThreadWrapper;
namespace test {
@@ -64,7 +64,7 @@ class FrameGeneratorCapturer : public VideoCapturer {
rtc::scoped_ptr<EventTimerWrapper> tick_;
rtc::CriticalSection lock_;
- rtc::scoped_ptr<ThreadWrapper> thread_;
+ rtc::PlatformThread thread_;
rtc::scoped_ptr<FrameGenerator> frame_generator_;
int target_fps_;
@@ -74,4 +74,4 @@ class FrameGeneratorCapturer : public VideoCapturer {
} // test
} // webrtc
-#endif // WEBRTC_VIDEO_ENGINE_TEST_COMMON_FRAME_GENERATOR_CAPTURER_H_
+#endif // WEBRTC_TEST_FRAME_GENERATOR_CAPTURER_H_
diff --git a/webrtc/test/fuzzers/BUILD.gn b/webrtc/test/fuzzers/BUILD.gn
new file mode 100644
index 0000000000..6a43548ec9
--- /dev/null
+++ b/webrtc/test/fuzzers/BUILD.gn
@@ -0,0 +1,115 @@
+# Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
+#
+# Use of this source code is governed by a BSD-style license
+# that can be found in the LICENSE file in the root of the source
+# tree. An additional intellectual property rights grant can be found
+# in the file PATENTS. All contributing project authors may
+# be found in the AUTHORS file in the root of the source tree.
+
+import("//build/config/features.gni")
+import("webrtc_fuzzer.gni")
+
+static_library("webrtc_fuzzer_main") {
+ public_configs = [ "../..:common_inherited_config" ]
+ sources = [
+ "webrtc_fuzzer_main.cc",
+ ]
+ deps = [
+ "../../system_wrappers:field_trial_default",
+ "../../system_wrappers:metrics_default",
+ "//testing/libfuzzer:libfuzzer_main",
+ ]
+}
+
+webrtc_fuzzer_test("h264_depacketizer_fuzzer") {
+ sources = [
+ "h264_depacketizer_fuzzer.cc",
+ ]
+ deps = [
+ "../../modules/rtp_rtcp",
+ ]
+}
+
+webrtc_fuzzer_test("vp8_depacketizer_fuzzer") {
+ sources = [
+ "vp8_depacketizer_fuzzer.cc",
+ ]
+ deps = [
+ "../../modules/rtp_rtcp",
+ ]
+}
+
+webrtc_fuzzer_test("vp9_depacketizer_fuzzer") {
+ sources = [
+ "vp9_depacketizer_fuzzer.cc",
+ ]
+ deps = [
+ "../../modules/rtp_rtcp",
+ ]
+}
+
+webrtc_fuzzer_test("vp8_qp_parser_fuzzer") {
+ sources = [
+ "vp8_qp_parser_fuzzer.cc",
+ ]
+ deps = [
+ "../../modules/video_coding/",
+ ]
+}
+
+webrtc_fuzzer_test("producer_fec_fuzzer") {
+ sources = [
+ "producer_fec_fuzzer.cc",
+ ]
+ deps = [
+ "../../modules/rtp_rtcp/",
+ ]
+}
+
+source_set("audio_decoder_fuzzer") {
+ public_configs = [ "../..:common_inherited_config" ]
+ sources = [
+ "audio_decoder_fuzzer.cc",
+ "audio_decoder_fuzzer.h",
+ ]
+}
+
+webrtc_fuzzer_test("audio_decoder_ilbc_fuzzer") {
+ sources = [
+ "audio_decoder_ilbc_fuzzer.cc",
+ ]
+ deps = [
+ ":audio_decoder_fuzzer",
+ "../../modules/audio_coding:ilbc",
+ ]
+}
+
+webrtc_fuzzer_test("audio_decoder_isac_fuzzer") {
+ sources = [
+ "audio_decoder_isac_fuzzer.cc",
+ ]
+ deps = [
+ ":audio_decoder_fuzzer",
+ "../../modules/audio_coding:isac",
+ ]
+}
+
+webrtc_fuzzer_test("audio_decoder_isacfix_fuzzer") {
+ sources = [
+ "audio_decoder_isacfix_fuzzer.cc",
+ ]
+ deps = [
+ ":audio_decoder_fuzzer",
+ "../../modules/audio_coding:isac_fix",
+ ]
+}
+
+webrtc_fuzzer_test("audio_decoder_opus_fuzzer") {
+ sources = [
+ "audio_decoder_opus_fuzzer.cc",
+ ]
+ deps = [
+ ":audio_decoder_fuzzer",
+ "../../modules/audio_coding:webrtc_opus",
+ ]
+}
diff --git a/webrtc/test/fuzzers/OWNERS b/webrtc/test/fuzzers/OWNERS
new file mode 100644
index 0000000000..6782b61fca
--- /dev/null
+++ b/webrtc/test/fuzzers/OWNERS
@@ -0,0 +1 @@
+pbos@webrtc.org
diff --git a/webrtc/test/fuzzers/audio_decoder_fuzzer.cc b/webrtc/test/fuzzers/audio_decoder_fuzzer.cc
new file mode 100644
index 0000000000..fb5adb6cd8
--- /dev/null
+++ b/webrtc/test/fuzzers/audio_decoder_fuzzer.cc
@@ -0,0 +1,49 @@
+/*
+ * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/test/fuzzers/audio_decoder_fuzzer.h"
+
+#include "webrtc/base/checks.h"
+#include "webrtc/modules/audio_coding/codecs/audio_decoder.h"
+
+namespace webrtc {
+namespace {
+size_t PacketSizeFromTwoBytes(const uint8_t* data, size_t size) {
+ if (size < 2)
+ return 0;
+ return static_cast<size_t>((data[0] << 8) + data[1]);
+}
+} // namespace
+
+// This function reads two bytes from the beginning of |data|, interprets them
+// as the first packet length, and reads this many bytes if available. The
+// payload is inserted into the decoder, and the process continues until no more
+// data is available.
+void FuzzAudioDecoder(const uint8_t* data,
+ size_t size,
+ AudioDecoder* decoder,
+ int sample_rate_hz,
+ size_t max_decoded_bytes,
+ int16_t* decoded) {
+ const uint8_t* data_ptr = data;
+ size_t remaining_size = size;
+ size_t packet_len = PacketSizeFromTwoBytes(data_ptr, remaining_size);
+ while (packet_len != 0 && packet_len <= remaining_size - 2) {
+ data_ptr += 2;
+ remaining_size -= 2;
+ AudioDecoder::SpeechType speech_type;
+ decoder->Decode(data_ptr, packet_len, sample_rate_hz, max_decoded_bytes,
+ decoded, &speech_type);
+ data_ptr += packet_len;
+ remaining_size -= packet_len;
+ packet_len = PacketSizeFromTwoBytes(data_ptr, remaining_size);
+ }
+}
+} // namespace webrtc
diff --git a/webrtc/test/fuzzers/audio_decoder_fuzzer.h b/webrtc/test/fuzzers/audio_decoder_fuzzer.h
new file mode 100644
index 0000000000..cdd8574300
--- /dev/null
+++ b/webrtc/test/fuzzers/audio_decoder_fuzzer.h
@@ -0,0 +1,31 @@
+/*
+ * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_TEST_FUZZERS_AUDIO_DECODER_FUZZER_H_
+#define WEBRTC_TEST_FUZZERS_AUDIO_DECODER_FUZZER_H_
+
+#include <stddef.h>
+
+#include "webrtc/typedefs.h"
+
+namespace webrtc {
+
+class AudioDecoder;
+
+void FuzzAudioDecoder(const uint8_t* data,
+ size_t size,
+ AudioDecoder* decoder,
+ int sample_rate_hz,
+ size_t max_decoded_bytes,
+ int16_t* decoded);
+
+} // namespace webrtc
+
+#endif // WEBRTC_TEST_FUZZERS_AUDIO_DECODER_FUZZER_H_
diff --git a/webrtc/test/fuzzers/audio_decoder_ilbc_fuzzer.cc b/webrtc/test/fuzzers/audio_decoder_ilbc_fuzzer.cc
new file mode 100644
index 0000000000..d2a87f0cb6
--- /dev/null
+++ b/webrtc/test/fuzzers/audio_decoder_ilbc_fuzzer.cc
@@ -0,0 +1,22 @@
+/*
+ * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/modules/audio_coding/codecs/ilbc/audio_decoder_ilbc.h"
+#include "webrtc/test/fuzzers/audio_decoder_fuzzer.h"
+
+namespace webrtc {
+void FuzzOneInput(const uint8_t* data, size_t size) {
+ AudioDecoderIlbc dec;
+ static const int kSampleRateHz = 8000;
+ static const size_t kAllocatedOuputSizeSamples = kSampleRateHz / 10;
+ int16_t output[kAllocatedOuputSizeSamples];
+ FuzzAudioDecoder(data, size, &dec, kSampleRateHz, sizeof(output), output);
+}
+} // namespace webrtc
diff --git a/webrtc/test/fuzzers/audio_decoder_isac_fuzzer.cc b/webrtc/test/fuzzers/audio_decoder_isac_fuzzer.cc
new file mode 100644
index 0000000000..984cfda398
--- /dev/null
+++ b/webrtc/test/fuzzers/audio_decoder_isac_fuzzer.cc
@@ -0,0 +1,22 @@
+/*
+ * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/modules/audio_coding/codecs/isac/main/include/audio_decoder_isac.h"
+#include "webrtc/test/fuzzers/audio_decoder_fuzzer.h"
+
+namespace webrtc {
+void FuzzOneInput(const uint8_t* data, size_t size) {
+ AudioDecoderIsac dec(nullptr);
+ const int sample_rate_hz = size % 2 == 0 ? 16000 : 32000; // 16 or 32 kHz.
+ static const size_t kAllocatedOuputSizeSamples = 32000 / 10; // 100 ms.
+ int16_t output[kAllocatedOuputSizeSamples];
+ FuzzAudioDecoder(data, size, &dec, sample_rate_hz, sizeof(output), output);
+}
+} // namespace webrtc
diff --git a/webrtc/test/fuzzers/audio_decoder_isacfix_fuzzer.cc b/webrtc/test/fuzzers/audio_decoder_isacfix_fuzzer.cc
new file mode 100644
index 0000000000..83fb8c2d62
--- /dev/null
+++ b/webrtc/test/fuzzers/audio_decoder_isacfix_fuzzer.cc
@@ -0,0 +1,22 @@
+/*
+ * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/modules/audio_coding/codecs/isac/fix/include/audio_decoder_isacfix.h"
+#include "webrtc/test/fuzzers/audio_decoder_fuzzer.h"
+
+namespace webrtc {
+void FuzzOneInput(const uint8_t* data, size_t size) {
+ AudioDecoderIsacFix dec(nullptr);
+ static const int kSampleRateHz = 16000;
+ static const size_t kAllocatedOuputSizeSamples = 16000 / 10; // 100 ms.
+ int16_t output[kAllocatedOuputSizeSamples];
+ FuzzAudioDecoder(data, size, &dec, kSampleRateHz, sizeof(output), output);
+}
+} // namespace webrtc
diff --git a/webrtc/test/fuzzers/audio_decoder_opus_fuzzer.cc b/webrtc/test/fuzzers/audio_decoder_opus_fuzzer.cc
new file mode 100644
index 0000000000..3d70ec507d
--- /dev/null
+++ b/webrtc/test/fuzzers/audio_decoder_opus_fuzzer.cc
@@ -0,0 +1,23 @@
+/*
+ * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/modules/audio_coding/codecs/opus/audio_decoder_opus.h"
+#include "webrtc/test/fuzzers/audio_decoder_fuzzer.h"
+
+namespace webrtc {
+void FuzzOneInput(const uint8_t* data, size_t size) {
+ const size_t channels = (size % 2) + 1; // 1 or 2 channels.
+ AudioDecoderOpus dec(channels);
+ const int kSampleRateHz = 48000;
+ const size_t kAllocatedOuputSizeSamples = kSampleRateHz / 10; // 100 ms.
+ int16_t output[kAllocatedOuputSizeSamples];
+ FuzzAudioDecoder(data, size, &dec, kSampleRateHz, sizeof(output), output);
+}
+} // namespace webrtc
diff --git a/webrtc/test/fuzzers/h264_depacketizer_fuzzer.cc b/webrtc/test/fuzzers/h264_depacketizer_fuzzer.cc
new file mode 100644
index 0000000000..ca73d9495f
--- /dev/null
+++ b/webrtc/test/fuzzers/h264_depacketizer_fuzzer.cc
@@ -0,0 +1,18 @@
+/*
+ * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#include "webrtc/modules/rtp_rtcp/source/rtp_format_h264.h"
+
+namespace webrtc {
+void FuzzOneInput(const uint8_t* data, size_t size) {
+ RtpDepacketizerH264 depacketizer;
+ RtpDepacketizer::ParsedPayload parsed_payload;
+ depacketizer.Parse(&parsed_payload, data, size);
+}
+} // namespace webrtc
diff --git a/webrtc/test/fuzzers/producer_fec_fuzzer.cc b/webrtc/test/fuzzers/producer_fec_fuzzer.cc
new file mode 100644
index 0000000000..7322fed4bf
--- /dev/null
+++ b/webrtc/test/fuzzers/producer_fec_fuzzer.cc
@@ -0,0 +1,60 @@
+/*
+ * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#include "webrtc/base/checks.h"
+#include "webrtc/base/scoped_ptr.h"
+#include "webrtc/modules/rtp_rtcp/source/byte_io.h"
+#include "webrtc/modules/rtp_rtcp/source/producer_fec.h"
+
+namespace webrtc {
+
+void FuzzOneInput(const uint8_t* data, size_t size) {
+ ForwardErrorCorrection fec;
+ ProducerFec producer(&fec);
+ size_t i = 0;
+ if (size < 4)
+ return;
+ FecProtectionParams params = {data[i++] % 128, data[i++] % 1,
+ static_cast<int>(data[i++] % 10),
+ kFecMaskBursty};
+ producer.SetFecParameters(&params, 0);
+ uint16_t seq_num = data[i++];
+
+ while (i + 3 < size) {
+ size_t rtp_header_length = data[i++] % 10 + 12;
+ size_t payload_size = data[i++] % 10;
+ if (i + payload_size + rtp_header_length + 2 > size)
+ break;
+ rtc::scoped_ptr<uint8_t[]> packet(
+ new uint8_t[payload_size + rtp_header_length]);
+ memcpy(packet.get(), &data[i], payload_size + rtp_header_length);
+ ByteWriter<uint16_t>::WriteBigEndian(&packet[2], seq_num++);
+ i += payload_size + rtp_header_length;
+ // Make sure sequence numbers are increasing.
+ const int kRedPayloadType = 98;
+ rtc::scoped_ptr<RedPacket> red_packet(producer.BuildRedPacket(
+ packet.get(), payload_size, rtp_header_length, kRedPayloadType));
+ bool protect = static_cast<bool>(data[i++] % 2);
+ if (protect) {
+ producer.AddRtpPacketAndGenerateFec(packet.get(), payload_size,
+ rtp_header_length);
+ }
+ uint16_t num_fec_packets = producer.NumAvailableFecPackets();
+ std::vector<RedPacket*> fec_packets;
+ if (num_fec_packets > 0) {
+ fec_packets =
+ producer.GetFecPackets(kRedPayloadType, 99, 100, rtp_header_length);
+ RTC_CHECK_EQ(num_fec_packets, fec_packets.size());
+ }
+ for (RedPacket* fec_packet : fec_packets) {
+ delete fec_packet;
+ }
+ }
+}
+} // namespace webrtc
diff --git a/webrtc/test/fuzzers/vp8_depacketizer_fuzzer.cc b/webrtc/test/fuzzers/vp8_depacketizer_fuzzer.cc
new file mode 100644
index 0000000000..d048372456
--- /dev/null
+++ b/webrtc/test/fuzzers/vp8_depacketizer_fuzzer.cc
@@ -0,0 +1,18 @@
+/*
+ * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#include "webrtc/modules/rtp_rtcp/source/rtp_format_vp8.h"
+
+namespace webrtc {
+void FuzzOneInput(const uint8_t* data, size_t size) {
+ RtpDepacketizerVp8 depacketizer;
+ RtpDepacketizer::ParsedPayload parsed_payload;
+ depacketizer.Parse(&parsed_payload, data, size);
+}
+} // namespace webrtc
diff --git a/webrtc/test/fuzzers/vp8_qp_parser_fuzzer.cc b/webrtc/test/fuzzers/vp8_qp_parser_fuzzer.cc
new file mode 100644
index 0000000000..5135f1a471
--- /dev/null
+++ b/webrtc/test/fuzzers/vp8_qp_parser_fuzzer.cc
@@ -0,0 +1,17 @@
+/*
+ * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#include "webrtc/modules/video_coding/utility/vp8_header_parser.h"
+
+namespace webrtc {
+void FuzzOneInput(const uint8_t* data, size_t size) {
+ int qp;
+ vp8::GetQp(data, size, &qp);
+}
+} // namespace webrtc
diff --git a/webrtc/test/fuzzers/vp9_depacketizer_fuzzer.cc b/webrtc/test/fuzzers/vp9_depacketizer_fuzzer.cc
new file mode 100644
index 0000000000..02a7cc0f81
--- /dev/null
+++ b/webrtc/test/fuzzers/vp9_depacketizer_fuzzer.cc
@@ -0,0 +1,18 @@
+/*
+ * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#include "webrtc/modules/rtp_rtcp/source/rtp_format_vp9.h"
+
+namespace webrtc {
+void FuzzOneInput(const uint8_t* data, size_t size) {
+ RtpDepacketizerVp9 depacketizer;
+ RtpDepacketizer::ParsedPayload parsed_payload;
+ depacketizer.Parse(&parsed_payload, data, size);
+}
+} // namespace webrtc
diff --git a/webrtc/test/fuzzers/webrtc_fuzzer.gni b/webrtc/test/fuzzers/webrtc_fuzzer.gni
new file mode 100644
index 0000000000..d264392c07
--- /dev/null
+++ b/webrtc/test/fuzzers/webrtc_fuzzer.gni
@@ -0,0 +1,28 @@
+# Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
+#
+# Use of this source code is governed by a BSD-style license
+# that can be found in the LICENSE file in the root of the source
+# tree. An additional intellectual property rights grant can be found
+# in the file PATENTS. All contributing project authors may
+# be found in the AUTHORS file in the root of the source tree.
+
+import("//testing/test.gni")
+
+template("webrtc_fuzzer_test") {
+ assert(defined(invoker.sources), "Need sources in $target_name.")
+
+ test(target_name) {
+ forward_variables_from(invoker, [ "sources" ])
+ deps = [
+ ":webrtc_fuzzer_main",
+ ]
+ if (defined(invoker.deps)) {
+ deps += invoker.deps
+ }
+ if (is_clang) {
+ # Suppress warnings from Chrome's Clang plugins.
+ # See http://code.google.com/p/webrtc/issues/detail?id=163 for details.
+ configs -= [ "//build/config/clang:find_bad_constructs" ]
+ }
+ }
+}
diff --git a/webrtc/test/fuzzers/webrtc_fuzzer_main.cc b/webrtc/test/fuzzers/webrtc_fuzzer_main.cc
new file mode 100644
index 0000000000..50a513c094
--- /dev/null
+++ b/webrtc/test/fuzzers/webrtc_fuzzer_main.cc
@@ -0,0 +1,41 @@
+/*
+ * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+// This file is intended to provide a common interface for fuzzing functions.
+// It's intended to set sane defaults, such as removing logging for further
+// fuzzing efficiency.
+
+#include "webrtc/base/logging.h"
+
+namespace {
+bool g_initialized = false;
+void InitializeWebRtcFuzzDefaults() {
+ if (g_initialized)
+ return;
+
+ // Remove default logging to prevent huge slowdowns.
+ // TODO(pbos): Disable in Chromium: http://crbug.com/561667
+#if !defined(WEBRTC_CHROMIUM_BUILD)
+ rtc::LogMessage::LogToDebug(rtc::LS_NONE);
+#endif // !defined(WEBRTC_CHROMIUM_BUILD)
+
+ g_initialized = true;
+}
+}
+
+namespace webrtc {
+extern void FuzzOneInput(const uint8_t* data, size_t size);
+} // namespace webrtc
+
+extern "C" int LLVMFuzzerTestOneInput(const unsigned char *data, size_t size) {
+ InitializeWebRtcFuzzDefaults();
+ webrtc::FuzzOneInput(data, size);
+ return 0;
+}
diff --git a/webrtc/test/gl/gl_renderer.h b/webrtc/test/gl/gl_renderer.h
index 27749468a2..7682d3c918 100644
--- a/webrtc/test/gl/gl_renderer.h
+++ b/webrtc/test/gl/gl_renderer.h
@@ -8,8 +8,8 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-#ifndef WEBRTC_VIDEO_ENGINE_TEST_COMMON_GL_GL_RENDERER_H_
-#define WEBRTC_VIDEO_ENGINE_TEST_COMMON_GL_GL_RENDERER_H_
+#ifndef WEBRTC_TEST_GL_GL_RENDERER_H_
+#define WEBRTC_TEST_GL_GL_RENDERER_H_
#ifdef WEBRTC_MAC
#include <OpenGL/gl.h>
@@ -48,4 +48,4 @@ class GlRenderer : public VideoRenderer {
} // test
} // webrtc
-#endif // WEBRTC_VIDEO_ENGINE_TEST_COMMON_GL_GL_RENDERER_H_
+#endif // WEBRTC_TEST_GL_GL_RENDERER_H_
diff --git a/webrtc/test/histogram.cc b/webrtc/test/histogram.cc
index 6fcdb6864f..2893e4389a 100644
--- a/webrtc/test/histogram.cc
+++ b/webrtc/test/histogram.cc
@@ -12,6 +12,7 @@
#include <map>
+#include "webrtc/base/checks.h"
#include "webrtc/base/criticalsection.h"
#include "webrtc/base/thread_annotations.h"
#include "webrtc/system_wrappers/include/metrics.h"
@@ -22,10 +23,10 @@
namespace webrtc {
namespace {
struct SampleInfo {
- SampleInfo(int sample)
- : last(sample), total(1) {}
- int last; // Last added sample.
- int total; // Total number of added samples.
+ SampleInfo(const std::string& name) : name_(name), last_(-1), total_(0) {}
+ const std::string name_;
+ int last_; // Last added sample.
+ int total_; // Total number of added samples.
};
rtc::CriticalSection histogram_crit_;
@@ -36,21 +37,33 @@ std::map<std::string, SampleInfo> histograms_ GUARDED_BY(histogram_crit_);
namespace metrics {
Histogram* HistogramFactoryGetCounts(const std::string& name, int min, int max,
- int bucket_count) { return NULL; }
+ int bucket_count) {
+ rtc::CritScope cs(&histogram_crit_);
+ if (histograms_.find(name) == histograms_.end()) {
+ histograms_.insert(std::make_pair(name, SampleInfo(name)));
+ }
+ auto it = histograms_.find(name);
+ return reinterpret_cast<Histogram*>(&it->second);
+}
Histogram* HistogramFactoryGetEnumeration(const std::string& name,
- int boundary) { return NULL; }
+ int boundary) {
+ rtc::CritScope cs(&histogram_crit_);
+ if (histograms_.find(name) == histograms_.end()) {
+ histograms_.insert(std::make_pair(name, SampleInfo(name)));
+ }
+ auto it = histograms_.find(name);
+ return reinterpret_cast<Histogram*>(&it->second);
+}
void HistogramAdd(
Histogram* histogram_pointer, const std::string& name, int sample) {
rtc::CritScope cs(&histogram_crit_);
- auto it = histograms_.find(name);
- if (it == histograms_.end()) {
- histograms_.insert(std::make_pair(name, SampleInfo(sample)));
- return;
- }
- it->second.last = sample;
- ++it->second.total;
+ SampleInfo* ptr = reinterpret_cast<SampleInfo*>(histogram_pointer);
+ // The name should not vary.
+ RTC_CHECK(ptr->name_ == name);
+ ptr->last_ = sample;
+ ++ptr->total_;
}
} // namespace metrics
@@ -61,7 +74,7 @@ int LastHistogramSample(const std::string& name) {
if (it == histograms_.end()) {
return -1;
}
- return it->second.last;
+ return it->second.last_;
}
int NumHistogramSamples(const std::string& name) {
@@ -70,13 +83,15 @@ int NumHistogramSamples(const std::string& name) {
if (it == histograms_.end()) {
return 0;
}
- return it->second.total;
+ return it->second.total_;
}
void ClearHistograms() {
rtc::CritScope cs(&histogram_crit_);
- histograms_.clear();
+ for (auto& it : histograms_) {
+ it.second.last_ = -1;
+ it.second.total_ = 0;
+ }
}
} // namespace test
} // namespace webrtc
-
diff --git a/webrtc/test/histogram.h b/webrtc/test/histogram.h
index 44ce32b4f4..3c8e743aa1 100644
--- a/webrtc/test/histogram.h
+++ b/webrtc/test/histogram.h
@@ -23,7 +23,7 @@ int LastHistogramSample(const std::string& name);
// Returns the number of added samples to a histogram.
int NumHistogramSamples(const std::string& name);
-// Removes all histograms.
+// Removes all histogram samples.
void ClearHistograms();
} // namespace test
diff --git a/webrtc/test/layer_filtering_transport.cc b/webrtc/test/layer_filtering_transport.cc
index a4ebf47f93..41d63ad6e7 100644
--- a/webrtc/test/layer_filtering_transport.cc
+++ b/webrtc/test/layer_filtering_transport.cc
@@ -9,8 +9,7 @@
*/
#include "webrtc/base/checks.h"
-#include "webrtc/modules/rtp_rtcp/interface/rtp_header_parser.h"
-#include "webrtc/modules/rtp_rtcp/interface/rtp_rtcp_defines.h"
+#include "webrtc/modules/rtp_rtcp/include/rtp_rtcp_defines.h"
#include "webrtc/modules/rtp_rtcp/source/byte_io.h"
#include "webrtc/modules/rtp_rtcp/source/rtp_format.h"
#include "webrtc/modules/rtp_rtcp/source/rtp_utility.h"
@@ -24,33 +23,35 @@ LayerFilteringTransport::LayerFilteringTransport(
Call* send_call,
uint8_t vp8_video_payload_type,
uint8_t vp9_video_payload_type,
- uint8_t tl_discard_threshold,
- uint8_t sl_discard_threshold)
+ int selected_tl,
+ int selected_sl)
: test::DirectTransport(config, send_call),
vp8_video_payload_type_(vp8_video_payload_type),
vp9_video_payload_type_(vp9_video_payload_type),
- tl_discard_threshold_(tl_discard_threshold),
- sl_discard_threshold_(sl_discard_threshold) {}
+ selected_tl_(selected_tl),
+ selected_sl_(selected_sl),
+ discarded_last_packet_(false) {}
-uint16_t LayerFilteringTransport::NextSequenceNumber(uint32_t ssrc) {
- auto it = current_seq_nums_.find(ssrc);
- if (it == current_seq_nums_.end())
- return current_seq_nums_[ssrc] = 10000;
- return ++it->second;
+bool LayerFilteringTransport::DiscardedLastPacket() const {
+ return discarded_last_packet_;
}
bool LayerFilteringTransport::SendRtp(const uint8_t* packet,
size_t length,
const PacketOptions& options) {
- if (tl_discard_threshold_ == 0 && sl_discard_threshold_ == 0) {
+ if (selected_tl_ == -1 && selected_sl_ == -1) {
// Nothing to change, forward the packet immediately.
return test::DirectTransport::SendRtp(packet, length, options);
}
bool set_marker_bit = false;
- rtc::scoped_ptr<RtpHeaderParser> parser(RtpHeaderParser::Create());
+ RtpUtility::RtpHeaderParser parser(packet, length);
RTPHeader header;
- parser->Parse(packet, length, &header);
+ parser.Parse(&header);
+
+ RTC_DCHECK_LE(length, static_cast<size_t>(IP_PACKET_SIZE));
+ uint8_t temp_buffer[IP_PACKET_SIZE];
+ memcpy(temp_buffer, packet, length);
if (header.payloadType == vp8_video_payload_type_ ||
header.payloadType == vp9_video_payload_type_) {
@@ -65,40 +66,38 @@ bool LayerFilteringTransport::SendRtp(const uint8_t* packet,
RtpDepacketizer::Create(is_vp8 ? kRtpVideoVp8 : kRtpVideoVp9));
RtpDepacketizer::ParsedPayload parsed_payload;
if (depacketizer->Parse(&parsed_payload, payload, payload_data_length)) {
- const uint8_t temporalIdx =
+ const int temporal_idx = static_cast<int>(
is_vp8 ? parsed_payload.type.Video.codecHeader.VP8.temporalIdx
- : parsed_payload.type.Video.codecHeader.VP9.temporal_idx;
- const uint8_t spatialIdx =
+ : parsed_payload.type.Video.codecHeader.VP9.temporal_idx);
+ const int spatial_idx = static_cast<int>(
is_vp8 ? kNoSpatialIdx
- : parsed_payload.type.Video.codecHeader.VP9.spatial_idx;
- if (sl_discard_threshold_ > 0 &&
- spatialIdx == sl_discard_threshold_ - 1 &&
+ : parsed_payload.type.Video.codecHeader.VP9.spatial_idx);
+ if (selected_sl_ >= 0 && spatial_idx == selected_sl_ &&
parsed_payload.type.Video.codecHeader.VP9.end_of_frame) {
// This layer is now the last in the superframe.
set_marker_bit = true;
- }
- if ((tl_discard_threshold_ > 0 && temporalIdx != kNoTemporalIdx &&
- temporalIdx >= tl_discard_threshold_) ||
- (sl_discard_threshold_ > 0 && spatialIdx != kNoSpatialIdx &&
- spatialIdx >= sl_discard_threshold_)) {
- return true; // Discard the packet.
+ } else if ((selected_tl_ >= 0 && temporal_idx != kNoTemporalIdx &&
+ temporal_idx > selected_tl_) ||
+ (selected_sl_ >= 0 && spatial_idx != kNoSpatialIdx &&
+ spatial_idx > selected_sl_)) {
+ // Truncate packet to a padding packet.
+ length = header.headerLength + 1;
+ temp_buffer[0] |= (1 << 5); // P = 1.
+ temp_buffer[1] &= 0x7F; // M = 0.
+ discarded_last_packet_ = true;
+ temp_buffer[header.headerLength] = 1; // One byte of padding.
}
} else {
RTC_NOTREACHED() << "Parse error";
}
}
- uint8_t temp_buffer[IP_PACKET_SIZE];
- memcpy(temp_buffer, packet, length);
-
// We are discarding some of the packets (specifically, whole layers), so
// make sure the marker bit is set properly, and that sequence numbers are
// continuous.
if (set_marker_bit)
temp_buffer[1] |= kRtpMarkerBitMask;
- uint16_t seq_num = NextSequenceNumber(header.ssrc);
- ByteWriter<uint16_t>::WriteBigEndian(&temp_buffer[2], seq_num);
return test::DirectTransport::SendRtp(temp_buffer, length, options);
}
diff --git a/webrtc/test/layer_filtering_transport.h b/webrtc/test/layer_filtering_transport.h
index 3f2389a51b..d453556235 100644
--- a/webrtc/test/layer_filtering_transport.h
+++ b/webrtc/test/layer_filtering_transport.h
@@ -26,23 +26,22 @@ class LayerFilteringTransport : public test::DirectTransport {
Call* send_call,
uint8_t vp8_video_payload_type,
uint8_t vp9_video_payload_type,
- uint8_t tl_discard_threshold,
- uint8_t sl_discard_threshold);
+ int selected_tl,
+ int selected_sl);
+ bool DiscardedLastPacket() const;
bool SendRtp(const uint8_t* data,
size_t length,
const PacketOptions& options) override;
private:
- uint16_t NextSequenceNumber(uint32_t ssrc);
// Used to distinguish between VP8 and VP9.
const uint8_t vp8_video_payload_type_;
const uint8_t vp9_video_payload_type_;
- // Discard all temporal/spatial layers with id greater or equal the
- // threshold. 0 to disable.
- const uint8_t tl_discard_threshold_;
- const uint8_t sl_discard_threshold_;
- // Current sequence number for each SSRC separately.
- std::map<uint32_t, uint16_t> current_seq_nums_;
+ // Discard or invalidate all temporal/spatial layers with id greater than the
+ // selected one. -1 to disable filtering.
+ const int selected_tl_;
+ const int selected_sl_;
+ bool discarded_last_packet_;
};
} // namespace test
diff --git a/webrtc/test/linux/glx_renderer.h b/webrtc/test/linux/glx_renderer.h
index 517f22a038..c117281cf1 100644
--- a/webrtc/test/linux/glx_renderer.h
+++ b/webrtc/test/linux/glx_renderer.h
@@ -8,8 +8,8 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-#ifndef WEBRTC_VIDEO_ENGINE_TEST_COMMON_LINUX_GLX_RENDERER_H_
-#define WEBRTC_VIDEO_ENGINE_TEST_COMMON_LINUX_GLX_RENDERER_H_
+#ifndef WEBRTC_TEST_LINUX_GLX_RENDERER_H_
+#define WEBRTC_TEST_LINUX_GLX_RENDERER_H_
#include <GL/glx.h>
#include <X11/Xlib.h>
@@ -45,4 +45,4 @@ class GlxRenderer : public GlRenderer {
} // test
} // webrtc
-#endif // WEBRTC_VIDEO_ENGINE_TEST_COMMON_LINUX_GLX_RENDERER_H_
+#endif // WEBRTC_TEST_LINUX_GLX_RENDERER_H_
diff --git a/webrtc/test/mac/video_renderer_mac.h b/webrtc/test/mac/video_renderer_mac.h
index 2e55538954..7baf794744 100644
--- a/webrtc/test/mac/video_renderer_mac.h
+++ b/webrtc/test/mac/video_renderer_mac.h
@@ -8,8 +8,8 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-#ifndef WEBRTC_VIDEO_ENGINE_TEST_COMMON_MAC_VIDEO_RENDERER_MAC_H_
-#define WEBRTC_VIDEO_ENGINE_TEST_COMMON_MAC_VIDEO_RENDERER_MAC_H_
+#ifndef WEBRTC_TEST_MAC_VIDEO_RENDERER_MAC_H_
+#define WEBRTC_TEST_MAC_VIDEO_RENDERER_MAC_H_
#include "webrtc/base/constructormagic.h"
#include "webrtc/test/gl/gl_renderer.h"
@@ -38,4 +38,4 @@ class MacRenderer : public GlRenderer {
} // test
} // webrtc
-#endif // WEBRTC_VIDEO_ENGINE_TEST_COMMON_MAC_VIDEO_RENDERER_MAC_H_
+#endif // WEBRTC_TEST_MAC_VIDEO_RENDERER_MAC_H_
diff --git a/webrtc/test/mock_voe_channel_proxy.h b/webrtc/test/mock_voe_channel_proxy.h
new file mode 100644
index 0000000000..b5d79c18ea
--- /dev/null
+++ b/webrtc/test/mock_voe_channel_proxy.h
@@ -0,0 +1,48 @@
+/*
+ * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_TEST_MOCK_VOE_CHANNEL_PROXY_H_
+#define WEBRTC_TEST_MOCK_VOE_CHANNEL_PROXY_H_
+
+#include <string>
+#include "testing/gmock/include/gmock/gmock.h"
+#include "webrtc/voice_engine/channel_proxy.h"
+
+namespace webrtc {
+namespace test {
+
+class MockVoEChannelProxy : public voe::ChannelProxy {
+ public:
+ MOCK_METHOD1(SetRTCPStatus, void(bool enable));
+ MOCK_METHOD1(SetLocalSSRC, void(uint32_t ssrc));
+ MOCK_METHOD1(SetRTCP_CNAME, void(const std::string& c_name));
+ MOCK_METHOD2(SetSendAbsoluteSenderTimeStatus, void(bool enable, int id));
+ MOCK_METHOD2(SetSendAudioLevelIndicationStatus, void(bool enable, int id));
+ MOCK_METHOD1(EnableSendTransportSequenceNumber, void(int id));
+ MOCK_METHOD2(SetReceiveAbsoluteSenderTimeStatus, void(bool enable, int id));
+ MOCK_METHOD2(SetReceiveAudioLevelIndicationStatus, void(bool enable, int id));
+ MOCK_METHOD3(SetCongestionControlObjects,
+ void(RtpPacketSender* rtp_packet_sender,
+ TransportFeedbackObserver* transport_feedback_observer,
+ PacketRouter* seq_num_allocator));
+ MOCK_CONST_METHOD0(GetRTCPStatistics, CallStatistics());
+ MOCK_CONST_METHOD0(GetRemoteRTCPReportBlocks, std::vector<ReportBlock>());
+ MOCK_CONST_METHOD0(GetNetworkStatistics, NetworkStatistics());
+ MOCK_CONST_METHOD0(GetDecodingCallStatistics, AudioDecodingCallStats());
+ MOCK_CONST_METHOD0(GetSpeechOutputLevelFullRange, int32_t());
+ MOCK_CONST_METHOD0(GetDelayEstimate, uint32_t());
+ MOCK_METHOD1(SetSendTelephoneEventPayloadType, bool(int payload_type));
+ MOCK_METHOD2(SendTelephoneEventOutband, bool(uint8_t event,
+ uint32_t duration_ms));
+};
+} // namespace test
+} // namespace webrtc
+
+#endif // WEBRTC_TEST_MOCK_VOE_CHANNEL_PROXY_H_
diff --git a/webrtc/test/mock_voice_engine.h b/webrtc/test/mock_voice_engine.h
new file mode 100644
index 0000000000..28a75f8063
--- /dev/null
+++ b/webrtc/test/mock_voice_engine.h
@@ -0,0 +1,337 @@
+/*
+ * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_AUDIO_MOCK_VOICE_ENGINE_H_
+#define WEBRTC_AUDIO_MOCK_VOICE_ENGINE_H_
+
+#include "testing/gmock/include/gmock/gmock.h"
+#include "webrtc/test/mock_voe_channel_proxy.h"
+#include "webrtc/voice_engine/voice_engine_impl.h"
+
+namespace webrtc {
+namespace test {
+
+// NOTE: This class inherits from VoiceEngineImpl so that its clients will be
+// able to get the various interfaces as usual, via T::GetInterface().
+class MockVoiceEngine : public VoiceEngineImpl {
+ public:
+ MockVoiceEngine() : VoiceEngineImpl(new Config(), true) {
+ // Increase ref count so this object isn't automatically deleted whenever
+ // interfaces are Release():d.
+ ++_ref_count;
+ // We add this default behavior to make the mock easier to use in tests. It
+ // will create a NiceMock of a voe::ChannelProxy.
+ ON_CALL(*this, ChannelProxyFactory(testing::_))
+ .WillByDefault(
+ testing::Invoke([](int channel_id) {
+ return new testing::NiceMock<MockVoEChannelProxy>();
+ }));
+ }
+ ~MockVoiceEngine() override {
+ // Decrease ref count before base class d-tor is called; otherwise it will
+ // trigger an assertion.
+ --_ref_count;
+ }
+ // Allows injecting a ChannelProxy factory.
+ MOCK_METHOD1(ChannelProxyFactory, voe::ChannelProxy*(int channel_id));
+
+ // VoiceEngineImpl
+ rtc::scoped_ptr<voe::ChannelProxy> GetChannelProxy(int channel_id) override {
+ return rtc::scoped_ptr<voe::ChannelProxy>(ChannelProxyFactory(channel_id));
+ }
+
+ // VoEAudioProcessing
+ MOCK_METHOD2(SetNsStatus, int(bool enable, NsModes mode));
+ MOCK_METHOD2(GetNsStatus, int(bool& enabled, NsModes& mode));
+ MOCK_METHOD2(SetAgcStatus, int(bool enable, AgcModes mode));
+ MOCK_METHOD2(GetAgcStatus, int(bool& enabled, AgcModes& mode));
+ MOCK_METHOD1(SetAgcConfig, int(AgcConfig config));
+ MOCK_METHOD1(GetAgcConfig, int(AgcConfig& config));
+ MOCK_METHOD2(SetEcStatus, int(bool enable, EcModes mode));
+ MOCK_METHOD2(GetEcStatus, int(bool& enabled, EcModes& mode));
+ MOCK_METHOD1(EnableDriftCompensation, int(bool enable));
+ MOCK_METHOD0(DriftCompensationEnabled, bool());
+ MOCK_METHOD1(SetDelayOffsetMs, void(int offset));
+ MOCK_METHOD0(DelayOffsetMs, int());
+ MOCK_METHOD2(SetAecmMode, int(AecmModes mode, bool enableCNG));
+ MOCK_METHOD2(GetAecmMode, int(AecmModes& mode, bool& enabledCNG));
+ MOCK_METHOD1(EnableHighPassFilter, int(bool enable));
+ MOCK_METHOD0(IsHighPassFilterEnabled, bool());
+ MOCK_METHOD3(SetRxNsStatus, int(int channel, bool enable, NsModes mode));
+ MOCK_METHOD3(GetRxNsStatus, int(int channel, bool& enabled, NsModes& mode));
+ MOCK_METHOD3(SetRxAgcStatus, int(int channel, bool enable, AgcModes mode));
+ MOCK_METHOD3(GetRxAgcStatus, int(int channel, bool& enabled, AgcModes& mode));
+ MOCK_METHOD2(SetRxAgcConfig, int(int channel, AgcConfig config));
+ MOCK_METHOD2(GetRxAgcConfig, int(int channel, AgcConfig& config));
+ MOCK_METHOD2(RegisterRxVadObserver,
+ int(int channel, VoERxVadCallback& observer));
+ MOCK_METHOD1(DeRegisterRxVadObserver, int(int channel));
+ MOCK_METHOD1(VoiceActivityIndicator, int(int channel));
+ MOCK_METHOD1(SetEcMetricsStatus, int(bool enable));
+ MOCK_METHOD1(GetEcMetricsStatus, int(bool& enabled));
+ MOCK_METHOD4(GetEchoMetrics, int(int& ERL, int& ERLE, int& RERL, int& A_NLP));
+ MOCK_METHOD3(GetEcDelayMetrics,
+ int(int& delay_median,
+ int& delay_std,
+ float& fraction_poor_delays));
+ MOCK_METHOD1(StartDebugRecording, int(const char* fileNameUTF8));
+ MOCK_METHOD1(StartDebugRecording, int(FILE* file_handle));
+ MOCK_METHOD0(StopDebugRecording, int());
+ MOCK_METHOD1(SetTypingDetectionStatus, int(bool enable));
+ MOCK_METHOD1(GetTypingDetectionStatus, int(bool& enabled));
+ MOCK_METHOD1(TimeSinceLastTyping, int(int& seconds));
+ MOCK_METHOD5(SetTypingDetectionParameters,
+ int(int timeWindow,
+ int costPerTyping,
+ int reportingThreshold,
+ int penaltyDecay,
+ int typeEventDelay));
+ MOCK_METHOD1(EnableStereoChannelSwapping, void(bool enable));
+ MOCK_METHOD0(IsStereoChannelSwappingEnabled, bool());
+
+ // VoEBase
+ MOCK_METHOD1(RegisterVoiceEngineObserver, int(VoiceEngineObserver& observer));
+ MOCK_METHOD0(DeRegisterVoiceEngineObserver, int());
+ MOCK_METHOD2(Init,
+ int(AudioDeviceModule* external_adm,
+ AudioProcessing* audioproc));
+ MOCK_METHOD0(audio_processing, AudioProcessing*());
+ MOCK_METHOD0(Terminate, int());
+ MOCK_METHOD0(CreateChannel, int());
+ MOCK_METHOD1(CreateChannel, int(const Config& config));
+ MOCK_METHOD1(DeleteChannel, int(int channel));
+ MOCK_METHOD1(StartReceive, int(int channel));
+ MOCK_METHOD1(StopReceive, int(int channel));
+ MOCK_METHOD1(StartPlayout, int(int channel));
+ MOCK_METHOD1(StopPlayout, int(int channel));
+ MOCK_METHOD1(StartSend, int(int channel));
+ MOCK_METHOD1(StopSend, int(int channel));
+ MOCK_METHOD1(GetVersion, int(char version[1024]));
+ MOCK_METHOD0(LastError, int());
+ MOCK_METHOD0(audio_transport, AudioTransport*());
+ MOCK_METHOD2(AssociateSendChannel,
+ int(int channel, int accociate_send_channel));
+
+ // VoECodec
+ MOCK_METHOD0(NumOfCodecs, int());
+ MOCK_METHOD2(GetCodec, int(int index, CodecInst& codec));
+ MOCK_METHOD2(SetSendCodec, int(int channel, const CodecInst& codec));
+ MOCK_METHOD2(GetSendCodec, int(int channel, CodecInst& codec));
+ MOCK_METHOD2(SetBitRate, int(int channel, int bitrate_bps));
+ MOCK_METHOD2(GetRecCodec, int(int channel, CodecInst& codec));
+ MOCK_METHOD2(SetRecPayloadType, int(int channel, const CodecInst& codec));
+ MOCK_METHOD2(GetRecPayloadType, int(int channel, CodecInst& codec));
+ MOCK_METHOD3(SetSendCNPayloadType,
+ int(int channel, int type, PayloadFrequencies frequency));
+ MOCK_METHOD2(SetFECStatus, int(int channel, bool enable));
+ MOCK_METHOD2(GetFECStatus, int(int channel, bool& enabled));
+ MOCK_METHOD4(SetVADStatus,
+ int(int channel, bool enable, VadModes mode, bool disableDTX));
+ MOCK_METHOD4(
+ GetVADStatus,
+ int(int channel, bool& enabled, VadModes& mode, bool& disabledDTX));
+ MOCK_METHOD2(SetOpusMaxPlaybackRate, int(int channel, int frequency_hz));
+ MOCK_METHOD2(SetOpusDtx, int(int channel, bool enable_dtx));
+ MOCK_METHOD0(GetEventLog, RtcEventLog*());
+
+ // VoEDtmf
+ MOCK_METHOD5(SendTelephoneEvent,
+ int(int channel,
+ int eventCode,
+ bool outOfBand,
+ int lengthMs,
+ int attenuationDb));
+ MOCK_METHOD2(SetSendTelephoneEventPayloadType,
+ int(int channel, unsigned char type));
+ MOCK_METHOD2(GetSendTelephoneEventPayloadType,
+ int(int channel, unsigned char& type));
+ MOCK_METHOD2(SetDtmfFeedbackStatus, int(bool enable, bool directFeedback));
+ MOCK_METHOD2(GetDtmfFeedbackStatus, int(bool& enabled, bool& directFeedback));
+ MOCK_METHOD3(PlayDtmfTone,
+ int(int eventCode, int lengthMs, int attenuationDb));
+
+ // VoEExternalMedia
+ MOCK_METHOD3(RegisterExternalMediaProcessing,
+ int(int channel,
+ ProcessingTypes type,
+ VoEMediaProcess& processObject));
+ MOCK_METHOD2(DeRegisterExternalMediaProcessing,
+ int(int channel, ProcessingTypes type));
+ MOCK_METHOD3(GetAudioFrame,
+ int(int channel, int desired_sample_rate_hz, AudioFrame* frame));
+ MOCK_METHOD2(SetExternalMixing, int(int channel, bool enable));
+
+ // VoEFile
+ MOCK_METHOD7(StartPlayingFileLocally,
+ int(int channel,
+ const char fileNameUTF8[1024],
+ bool loop,
+ FileFormats format,
+ float volumeScaling,
+ int startPointMs,
+ int stopPointMs));
+ MOCK_METHOD6(StartPlayingFileLocally,
+ int(int channel,
+ InStream* stream,
+ FileFormats format,
+ float volumeScaling,
+ int startPointMs,
+ int stopPointMs));
+ MOCK_METHOD1(StopPlayingFileLocally, int(int channel));
+ MOCK_METHOD1(IsPlayingFileLocally, int(int channel));
+ MOCK_METHOD6(StartPlayingFileAsMicrophone,
+ int(int channel,
+ const char fileNameUTF8[1024],
+ bool loop,
+ bool mixWithMicrophone,
+ FileFormats format,
+ float volumeScaling));
+ MOCK_METHOD5(StartPlayingFileAsMicrophone,
+ int(int channel,
+ InStream* stream,
+ bool mixWithMicrophone,
+ FileFormats format,
+ float volumeScaling));
+ MOCK_METHOD1(StopPlayingFileAsMicrophone, int(int channel));
+ MOCK_METHOD1(IsPlayingFileAsMicrophone, int(int channel));
+ MOCK_METHOD4(StartRecordingPlayout,
+ int(int channel,
+ const char* fileNameUTF8,
+ CodecInst* compression,
+ int maxSizeBytes));
+ MOCK_METHOD1(StopRecordingPlayout, int(int channel));
+ MOCK_METHOD3(StartRecordingPlayout,
+ int(int channel, OutStream* stream, CodecInst* compression));
+ MOCK_METHOD3(StartRecordingMicrophone,
+ int(const char* fileNameUTF8,
+ CodecInst* compression,
+ int maxSizeBytes));
+ MOCK_METHOD2(StartRecordingMicrophone,
+ int(OutStream* stream, CodecInst* compression));
+ MOCK_METHOD0(StopRecordingMicrophone, int());
+
+ // VoEHardware
+ MOCK_METHOD1(GetNumOfRecordingDevices, int(int& devices));
+ MOCK_METHOD1(GetNumOfPlayoutDevices, int(int& devices));
+ MOCK_METHOD3(GetRecordingDeviceName,
+ int(int index, char strNameUTF8[128], char strGuidUTF8[128]));
+ MOCK_METHOD3(GetPlayoutDeviceName,
+ int(int index, char strNameUTF8[128], char strGuidUTF8[128]));
+ MOCK_METHOD2(SetRecordingDevice,
+ int(int index, StereoChannel recordingChannel));
+ MOCK_METHOD1(SetPlayoutDevice, int(int index));
+ MOCK_METHOD1(SetAudioDeviceLayer, int(AudioLayers audioLayer));
+ MOCK_METHOD1(GetAudioDeviceLayer, int(AudioLayers& audioLayer));
+ MOCK_METHOD1(SetRecordingSampleRate, int(unsigned int samples_per_sec));
+ MOCK_CONST_METHOD1(RecordingSampleRate, int(unsigned int* samples_per_sec));
+ MOCK_METHOD1(SetPlayoutSampleRate, int(unsigned int samples_per_sec));
+ MOCK_CONST_METHOD1(PlayoutSampleRate, int(unsigned int* samples_per_sec));
+ MOCK_CONST_METHOD0(BuiltInAECIsAvailable, bool());
+ MOCK_METHOD1(EnableBuiltInAEC, int(bool enable));
+ MOCK_CONST_METHOD0(BuiltInAGCIsAvailable, bool());
+ MOCK_METHOD1(EnableBuiltInAGC, int(bool enable));
+ MOCK_CONST_METHOD0(BuiltInNSIsAvailable, bool());
+ MOCK_METHOD1(EnableBuiltInNS, int(bool enable));
+
+ // VoENetEqStats
+ MOCK_METHOD2(GetNetworkStatistics,
+ int(int channel, NetworkStatistics& stats));
+ MOCK_CONST_METHOD2(GetDecodingCallStatistics,
+ int(int channel, AudioDecodingCallStats* stats));
+
+ // VoENetwork
+ MOCK_METHOD2(RegisterExternalTransport,
+ int(int channel, Transport& transport));
+ MOCK_METHOD1(DeRegisterExternalTransport, int(int channel));
+ MOCK_METHOD3(ReceivedRTPPacket,
+ int(int channel, const void* data, size_t length));
+ MOCK_METHOD4(ReceivedRTPPacket,
+ int(int channel,
+ const void* data,
+ size_t length,
+ const PacketTime& packet_time));
+ MOCK_METHOD3(ReceivedRTCPPacket,
+ int(int channel, const void* data, size_t length));
+
+ // VoERTP_RTCP
+ MOCK_METHOD2(SetLocalSSRC, int(int channel, unsigned int ssrc));
+ MOCK_METHOD2(GetLocalSSRC, int(int channel, unsigned int& ssrc));
+ MOCK_METHOD2(GetRemoteSSRC, int(int channel, unsigned int& ssrc));
+ MOCK_METHOD3(SetSendAudioLevelIndicationStatus,
+ int(int channel, bool enable, unsigned char id));
+ MOCK_METHOD3(SetReceiveAudioLevelIndicationStatus,
+ int(int channel, bool enable, unsigned char id));
+ MOCK_METHOD3(SetSendAbsoluteSenderTimeStatus,
+ int(int channel, bool enable, unsigned char id));
+ MOCK_METHOD3(SetReceiveAbsoluteSenderTimeStatus,
+ int(int channel, bool enable, unsigned char id));
+ MOCK_METHOD2(SetRTCPStatus, int(int channel, bool enable));
+ MOCK_METHOD2(GetRTCPStatus, int(int channel, bool& enabled));
+ MOCK_METHOD2(SetRTCP_CNAME, int(int channel, const char cName[256]));
+ MOCK_METHOD2(GetRTCP_CNAME, int(int channel, char cName[256]));
+ MOCK_METHOD2(GetRemoteRTCP_CNAME, int(int channel, char cName[256]));
+ MOCK_METHOD7(GetRemoteRTCPData,
+ int(int channel,
+ unsigned int& NTPHigh,
+ unsigned int& NTPLow,
+ unsigned int& timestamp,
+ unsigned int& playoutTimestamp,
+ unsigned int* jitter,
+ unsigned short* fractionLost));
+ MOCK_METHOD4(GetRTPStatistics,
+ int(int channel,
+ unsigned int& averageJitterMs,
+ unsigned int& maxJitterMs,
+ unsigned int& discardedPackets));
+ MOCK_METHOD2(GetRTCPStatistics, int(int channel, CallStatistics& stats));
+ MOCK_METHOD2(GetRemoteRTCPReportBlocks,
+ int(int channel, std::vector<ReportBlock>* receive_blocks));
+ MOCK_METHOD3(SetREDStatus, int(int channel, bool enable, int redPayloadtype));
+ MOCK_METHOD3(GetREDStatus,
+ int(int channel, bool& enable, int& redPayloadtype));
+ MOCK_METHOD3(SetNACKStatus, int(int channel, bool enable, int maxNoPackets));
+
+ // VoEVideoSync
+ MOCK_METHOD1(GetPlayoutBufferSize, int(int& buffer_ms));
+ MOCK_METHOD2(SetMinimumPlayoutDelay, int(int channel, int delay_ms));
+ MOCK_METHOD3(GetDelayEstimate,
+ int(int channel,
+ int* jitter_buffer_delay_ms,
+ int* playout_buffer_delay_ms));
+ MOCK_CONST_METHOD1(GetLeastRequiredDelayMs, int(int channel));
+ MOCK_METHOD2(SetInitTimestamp, int(int channel, unsigned int timestamp));
+ MOCK_METHOD2(SetInitSequenceNumber, int(int channel, short sequenceNumber));
+ MOCK_METHOD2(GetPlayoutTimestamp, int(int channel, unsigned int& timestamp));
+ MOCK_METHOD3(GetRtpRtcp,
+ int(int channel,
+ RtpRtcp** rtpRtcpModule,
+ RtpReceiver** rtp_receiver));
+
+ // VoEVolumeControl
+ MOCK_METHOD1(SetSpeakerVolume, int(unsigned int volume));
+ MOCK_METHOD1(GetSpeakerVolume, int(unsigned int& volume));
+ MOCK_METHOD1(SetMicVolume, int(unsigned int volume));
+ MOCK_METHOD1(GetMicVolume, int(unsigned int& volume));
+ MOCK_METHOD2(SetInputMute, int(int channel, bool enable));
+ MOCK_METHOD2(GetInputMute, int(int channel, bool& enabled));
+ MOCK_METHOD1(GetSpeechInputLevel, int(unsigned int& level));
+ MOCK_METHOD2(GetSpeechOutputLevel, int(int channel, unsigned int& level));
+ MOCK_METHOD1(GetSpeechInputLevelFullRange, int(unsigned int& level));
+ MOCK_METHOD2(GetSpeechOutputLevelFullRange,
+ int(int channel, unsigned& level));
+ MOCK_METHOD2(SetChannelOutputVolumeScaling, int(int channel, float scaling));
+ MOCK_METHOD2(GetChannelOutputVolumeScaling, int(int channel, float& scaling));
+ MOCK_METHOD3(SetOutputVolumePan, int(int channel, float left, float right));
+ MOCK_METHOD3(GetOutputVolumePan, int(int channel, float& left, float& right));
+};
+} // namespace test
+} // namespace webrtc
+
+#endif // WEBRTC_AUDIO_MOCK_VOICE_ENGINE_H_
diff --git a/webrtc/test/null_transport.h b/webrtc/test/null_transport.h
index f4b704634d..c49883e1dc 100644
--- a/webrtc/test/null_transport.h
+++ b/webrtc/test/null_transport.h
@@ -7,8 +7,8 @@
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
-#ifndef WEBRTC_VIDEO_ENGINE_TEST_COMMON_NULL_TRANSPORT_H_
-#define WEBRTC_VIDEO_ENGINE_TEST_COMMON_NULL_TRANSPORT_H_
+#ifndef WEBRTC_TEST_NULL_TRANSPORT_H_
+#define WEBRTC_TEST_NULL_TRANSPORT_H_
#include "webrtc/transport.h"
@@ -27,4 +27,4 @@ class NullTransport : public Transport {
} // namespace test
} // namespace webrtc
-#endif // WEBRTC_VIDEO_ENGINE_TEST_COMMON_NULL_TRANSPORT_H_
+#endif // WEBRTC_TEST_NULL_TRANSPORT_H_
diff --git a/webrtc/test/random.cc b/webrtc/test/random.cc
deleted file mode 100644
index c4c405f6b8..0000000000
--- a/webrtc/test/random.cc
+++ /dev/null
@@ -1,57 +0,0 @@
-/*
- * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include "webrtc/test/random.h"
-
-#include <math.h>
-
-#include "webrtc/base/checks.h"
-
-namespace webrtc {
-
-namespace test {
-
-Random::Random(uint32_t seed) : a_(0x531FDB97 ^ seed), b_(0x6420ECA8 + seed) {
-}
-
-float Random::Rand() {
- const double kScale = 1.0f / (static_cast<uint64_t>(1) << 32);
- double result = kScale * b_;
- a_ ^= b_;
- b_ += a_;
- return static_cast<float>(result);
-}
-
-int Random::Rand(int low, int high) {
- RTC_DCHECK(low <= high);
- float uniform = Rand() * (high - low + 1) + low;
- return static_cast<int>(uniform);
-}
-
-int Random::Gaussian(int mean, int standard_deviation) {
- // Creating a Normal distribution variable from two independent uniform
- // variables based on the Box-Muller transform, which is defined on the
- // interval (0, 1], hence the mask+add below.
- const double kPi = 3.14159265358979323846;
- const double kScale = 1.0 / 0x80000000ul;
- double u1 = kScale * ((a_ & 0x7ffffffful) + 1);
- double u2 = kScale * ((b_ & 0x7ffffffful) + 1);
- a_ ^= b_;
- b_ += a_;
- return static_cast<int>(
- mean + standard_deviation * sqrt(-2 * log(u1)) * cos(2 * kPi * u2));
-}
-
-int Random::Exponential(float lambda) {
- float uniform = Rand();
- return static_cast<int>(-log(uniform) / lambda);
-}
-} // namespace test
-} // namespace webrtc
diff --git a/webrtc/test/random.h b/webrtc/test/random.h
deleted file mode 100644
index 5cc54f2129..0000000000
--- a/webrtc/test/random.h
+++ /dev/null
@@ -1,49 +0,0 @@
-/*
- * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_TEST_RANDOM_H_
-#define WEBRTC_TEST_RANDOM_H_
-
-#include "webrtc/typedefs.h"
-#include "webrtc/base/constructormagic.h"
-
-namespace webrtc {
-
-namespace test {
-
-class Random {
- public:
- explicit Random(uint32_t seed);
-
- // Return pseudo-random number in the interval [0.0, 1.0).
- float Rand();
-
- // Return pseudo-random number mapped to the interval [low, high].
- int Rand(int low, int high);
-
- // Normal Distribution.
- int Gaussian(int mean, int standard_deviation);
-
- // Exponential Distribution.
- int Exponential(float lambda);
-
- // TODO(solenberg): Random from histogram.
- // template<typename T> int Distribution(const std::vector<T> histogram) {
-
- private:
- uint32_t a_;
- uint32_t b_;
-
- RTC_DISALLOW_IMPLICIT_CONSTRUCTORS(Random);
-};
-} // namespace test
-} // namespace webrtc
-
-#endif // WEBRTC_TEST_RANDOM_H_
diff --git a/webrtc/test/rtp_file_reader.cc b/webrtc/test/rtp_file_reader.cc
index cb0e40705f..1413f00797 100644
--- a/webrtc/test/rtp_file_reader.cc
+++ b/webrtc/test/rtp_file_reader.cc
@@ -458,7 +458,7 @@ class PcapReader : public RtpFileReaderImpl {
rtp_parser.ParseRtcp(&marker.rtp_header);
packets_.push_back(marker);
} else {
- if (!rtp_parser.Parse(marker.rtp_header, NULL)) {
+ if (!rtp_parser.Parse(&marker.rtp_header, nullptr)) {
DEBUG_LOG("Not recognized as RTP/RTCP");
return kResultSkip;
}
diff --git a/webrtc/test/rtp_file_reader_unittest.cc b/webrtc/test/rtp_file_reader_unittest.cc
index 929813f999..15a456ccf6 100644
--- a/webrtc/test/rtp_file_reader_unittest.cc
+++ b/webrtc/test/rtp_file_reader_unittest.cc
@@ -85,7 +85,8 @@ class TestPcapFileReader : public ::testing::Test {
while (rtp_packet_source_->NextPacket(&packet)) {
RtpUtility::RtpHeaderParser rtp_header_parser(packet.data, packet.length);
webrtc::RTPHeader header;
- if (!rtp_header_parser.RTCP() && rtp_header_parser.Parse(header, NULL)) {
+ if (!rtp_header_parser.RTCP() &&
+ rtp_header_parser.Parse(&header, nullptr)) {
pps[header.ssrc]++;
}
}
diff --git a/webrtc/test/rtp_rtcp_observer.h b/webrtc/test/rtp_rtcp_observer.h
index 89b6dd06bd..5eb88d3f0d 100644
--- a/webrtc/test/rtp_rtcp_observer.h
+++ b/webrtc/test/rtp_rtcp_observer.h
@@ -7,8 +7,8 @@
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
-#ifndef WEBRTC_VIDEO_ENGINE_TEST_COMMON_RTP_RTCP_OBSERVER_H_
-#define WEBRTC_VIDEO_ENGINE_TEST_COMMON_RTP_RTCP_OBSERVER_H_
+#ifndef WEBRTC_TEST_RTP_RTCP_OBSERVER_H_
+#define WEBRTC_TEST_RTP_RTCP_OBSERVER_H_
#include <map>
#include <vector>
@@ -16,7 +16,8 @@
#include "testing/gtest/include/gtest/gtest.h"
#include "webrtc/base/criticalsection.h"
-#include "webrtc/modules/rtp_rtcp/interface/rtp_header_parser.h"
+#include "webrtc/base/event.h"
+#include "webrtc/modules/rtp_rtcp/include/rtp_header_parser.h"
#include "webrtc/test/constants.h"
#include "webrtc/test/direct_transport.h"
#include "webrtc/typedefs.h"
@@ -36,10 +37,7 @@ class RtpRtcpObserver {
virtual ~RtpRtcpObserver() {}
- virtual EventTypeWrapper Wait() {
- EventTypeWrapper result = observation_complete_->Wait(timeout_ms_);
- return result;
- }
+ virtual bool Wait() { return observation_complete_.Wait(timeout_ms_); }
virtual Action OnSendRtp(const uint8_t* packet, size_t length) {
return SEND_PACKET;
@@ -58,8 +56,8 @@ class RtpRtcpObserver {
}
protected:
- explicit RtpRtcpObserver(unsigned int event_timeout_ms)
- : observation_complete_(EventWrapper::Create()),
+ explicit RtpRtcpObserver(int event_timeout_ms)
+ : observation_complete_(false, false),
parser_(RtpHeaderParser::Create()),
timeout_ms_(event_timeout_ms) {
parser_->RegisterRtpHeaderExtension(kRtpExtensionTransmissionTimeOffset,
@@ -70,11 +68,11 @@ class RtpRtcpObserver {
kTransportSequenceNumberExtensionId);
}
- const rtc::scoped_ptr<EventWrapper> observation_complete_;
+ rtc::Event observation_complete_;
const rtc::scoped_ptr<RtpHeaderParser> parser_;
private:
- unsigned int timeout_ms_;
+ const int timeout_ms_;
};
class PacketTransport : public test::DirectTransport {
@@ -138,4 +136,4 @@ class PacketTransport : public test::DirectTransport {
} // namespace test
} // namespace webrtc
-#endif // WEBRTC_VIDEO_ENGINE_TEST_COMMON_RTP_RTCP_OBSERVER_H_
+#endif // WEBRTC_TEST_RTP_RTCP_OBSERVER_H_
diff --git a/webrtc/test/run_loop.h b/webrtc/test/run_loop.h
index 31012525e2..238e2dc282 100644
--- a/webrtc/test/run_loop.h
+++ b/webrtc/test/run_loop.h
@@ -7,8 +7,8 @@
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
-#ifndef WEBRTC_VIDEO_ENGINE_TEST_COMMON_RUN_LOOP_H_
-#define WEBRTC_VIDEO_ENGINE_TEST_COMMON_RUN_LOOP_H_
+#ifndef WEBRTC_TEST_RUN_LOOP_H_
+#define WEBRTC_TEST_RUN_LOOP_H_
namespace webrtc {
namespace test {
@@ -19,4 +19,4 @@ void PressEnterToContinue();
} // namespace test
} // namespace webrtc
-#endif // WEBRTC_VIDEO_ENGINE_TEST_COMMON_RUN_LOOP_H_
+#endif // WEBRTC_TEST_RUN_LOOP_H_
diff --git a/webrtc/test/statistics.h b/webrtc/test/statistics.h
index 0fc3a04ea9..d4a111e061 100644
--- a/webrtc/test/statistics.h
+++ b/webrtc/test/statistics.h
@@ -7,8 +7,8 @@
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
-#ifndef WEBRTC_VIDEO_ENGINE_TEST_COMMON_STATISTICS_H_
-#define WEBRTC_VIDEO_ENGINE_TEST_COMMON_STATISTICS_H_
+#ifndef WEBRTC_TEST_STATISTICS_H_
+#define WEBRTC_TEST_STATISTICS_H_
#include "webrtc/typedefs.h"
@@ -33,4 +33,4 @@ class Statistics {
} // namespace test
} // namespace webrtc
-#endif // WEBRTC_VIDEO_ENGINE_TEST_COMMON_STATISTICS_H_
+#endif // WEBRTC_TEST_STATISTICS_H_
diff --git a/webrtc/test/test.gyp b/webrtc/test/test.gyp
index 0d251dffdd..5bb7793842 100644
--- a/webrtc/test/test.gyp
+++ b/webrtc/test/test.gyp
@@ -22,7 +22,7 @@
],
'sources': [
'channel_transport/channel_transport.cc',
- 'channel_transport/include/channel_transport.h',
+ 'channel_transport/channel_transport.h',
'channel_transport/traffic_control_win.cc',
'channel_transport/traffic_control_win.h',
'channel_transport/udp_socket_manager_posix.cc',
@@ -41,11 +41,29 @@
'channel_transport/udp_transport_impl.cc',
'channel_transport/udp_transport_impl.h',
],
+ 'conditions': [
+ ['OS=="win" and clang==1', {
+ 'msvs_settings': {
+ 'VCCLCompilerTool': {
+ 'AdditionalOptions': [
+ # Disable warnings failing when compiling with Clang on Windows.
+ # https://bugs.chromium.org/p/webrtc/issues/detail?id=5366
+ '-Wno-parentheses-equality',
+ '-Wno-reorder',
+ '-Wno-tautological-constant-out-of-range-compare',
+ '-Wno-unused-private-field',
+ ],
+ },
+ },
+ }],
+ ], # conditions.
},
{
- 'target_name': 'frame_generator',
+ 'target_name': 'fake_video_frames',
'type': 'static_library',
'sources': [
+ 'fake_texture_frame.cc',
+ 'fake_texture_frame.h',
'frame_generator.cc',
'frame_generator.h',
],
@@ -79,6 +97,7 @@
],
'dependencies': [
'<(webrtc_root)/common.gyp:webrtc_common',
+ '<(webrtc_root)/system_wrappers/system_wrappers.gyp:field_trial_default',
'<(webrtc_root)/system_wrappers/system_wrappers.gyp:system_wrappers',
],
},
@@ -124,7 +143,6 @@
'testsupport/frame_reader.h',
'testsupport/frame_writer.cc',
'testsupport/frame_writer.h',
- 'testsupport/gtest_disable.h',
'testsupport/iosfileutils.mm',
'testsupport/mock/mock_frame_reader.h',
'testsupport/mock/mock_frame_writer.h',
diff --git a/webrtc/test/test_main.cc b/webrtc/test/test_main.cc
index 733831f5be..a435575f88 100644
--- a/webrtc/test/test_main.cc
+++ b/webrtc/test/test_main.cc
@@ -10,6 +10,7 @@
#include "gflags/gflags.h"
#include "testing/gtest/include/gtest/gtest.h"
+#include "webrtc/base/logging.h"
#include "webrtc/test/field_trial.h"
#include "webrtc/test/testsupport/fileutils.h"
@@ -21,6 +22,11 @@ DEFINE_string(force_fieldtrials, "",
int main(int argc, char* argv[]) {
::testing::InitGoogleTest(&argc, argv);
+ // Default to LS_INFO, even for release builds to provide better test logging.
+ // TODO(pbos): Consider adding a command-line override.
+ if (rtc::LogMessage::GetLogToDebug() > rtc::LS_INFO)
+ rtc::LogMessage::LogToDebug(rtc::LS_INFO);
+
// AllowCommandLineParsing allows us to ignore flags passed on to us by
// Chromium build bots without having to explicitly disable them.
google::AllowCommandLineReparsing();
diff --git a/webrtc/test/testsupport/fileutils_unittest.cc b/webrtc/test/testsupport/fileutils_unittest.cc
index dff7f2249b..e205db3ecf 100644
--- a/webrtc/test/testsupport/fileutils_unittest.cc
+++ b/webrtc/test/testsupport/fileutils_unittest.cc
@@ -16,7 +16,6 @@
#include <string>
#include "testing/gtest/include/gtest/gtest.h"
-#include "webrtc/test/testsupport/gtest_disable.h"
#ifdef WIN32
#define chdir _chdir
@@ -66,7 +65,14 @@ TEST_F(FileUtilsTest, ProjectRootPath) {
}
// Similar to the above test, but for the output dir
-TEST_F(FileUtilsTest, DISABLED_ON_ANDROID(OutputPathFromUnchangedWorkingDir)) {
+#if defined(WEBRTC_ANDROID)
+#define MAYBE_OutputPathFromUnchangedWorkingDir \
+ DISABLED_OutputPathFromUnchangedWorkingDir
+#else
+#define MAYBE_OutputPathFromUnchangedWorkingDir \
+ OutputPathFromUnchangedWorkingDir
+#endif
+TEST_F(FileUtilsTest, MAYBE_OutputPathFromUnchangedWorkingDir) {
std::string path = webrtc::test::OutputPath();
std::string expected_end = "out";
expected_end = kPathDelimiter + expected_end + kPathDelimiter;
@@ -75,7 +81,12 @@ TEST_F(FileUtilsTest, DISABLED_ON_ANDROID(OutputPathFromUnchangedWorkingDir)) {
// Tests with current working directory set to a directory higher up in the
// directory tree than the project root dir.
-TEST_F(FileUtilsTest, DISABLED_ON_ANDROID(OutputPathFromRootWorkingDir)) {
+#if defined(WEBRTC_ANDROID)
+#define MAYBE_OutputPathFromRootWorkingDir DISABLED_OutputPathFromRootWorkingDir
+#else
+#define MAYBE_OutputPathFromRootWorkingDir OutputPathFromRootWorkingDir
+#endif
+TEST_F(FileUtilsTest, MAYBE_OutputPathFromRootWorkingDir) {
ASSERT_EQ(0, chdir(kPathDelimiter));
ASSERT_EQ("./", webrtc::test::OutputPath());
}
diff --git a/webrtc/test/testsupport/gtest_disable.h b/webrtc/test/testsupport/gtest_disable.h
deleted file mode 100644
index fdc56acc05..0000000000
--- a/webrtc/test/testsupport/gtest_disable.h
+++ /dev/null
@@ -1,57 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-#ifndef TEST_TESTSUPPORT_INCLUDE_GTEST_DISABLE_H_
-#define TEST_TESTSUPPORT_INCLUDE_GTEST_DISABLE_H_
-
-// Helper macros for platform disables. These can be chained. Example use:
-// TEST_F(ViEStandardIntegrationTest,
-// DISABLED_ON_LINUX(RunsBaseTestWithoutErrors)) { // ...
-//
-// Or, you can disable a whole test class by wrapping all mentions of the test
-// class name inside one of these macros.
-//
-// The platform #defines we are looking at here are set by the build system.
-#ifdef WEBRTC_LINUX
-#define DISABLED_ON_LINUX(test) DISABLED_##test
-#else
-#define DISABLED_ON_LINUX(test) test
-#endif
-
-#ifdef WEBRTC_MAC
-#define DISABLED_ON_MAC(test) DISABLED_##test
-#else
-#define DISABLED_ON_MAC(test) test
-#endif
-
-#ifdef _WIN32
-#define DISABLED_ON_WIN(test) DISABLED_##test
-#else
-#define DISABLED_ON_WIN(test) test
-#endif
-
-// Using some extra magic here to be able to chain Android and iOS macros.
-// http://stackoverflow.com/questions/8231966/why-do-i-need-double-layer-of-indirection-for-macros
-#ifdef WEBRTC_ANDROID
-#define DISABLED_ON_ANDROID_HIDDEN(test) DISABLED_##test
-#define DISABLED_ON_ANDROID(test) DISABLED_ON_ANDROID_HIDDEN(test)
-#else
-#define DISABLED_ON_ANDROID_HIDDEN(test) test
-#define DISABLED_ON_ANDROID(test) DISABLED_ON_ANDROID_HIDDEN(test)
-#endif
-
-#ifdef WEBRTC_IOS
-#define DISABLED_ON_IOS_HIDDEN(test) DISABLED_##test
-#define DISABLED_ON_IOS(test) DISABLED_ON_IOS_HIDDEN(test)
-#else
-#define DISABLED_ON_IOS_HIDDEN(test) test
-#define DISABLED_ON_IOS(test) DISABLED_ON_IOS_HIDDEN(test)
-#endif
-
-#endif // TEST_TESTSUPPORT_INCLUDE_GTEST_DISABLE_H_
diff --git a/webrtc/test/vcm_capturer.cc b/webrtc/test/vcm_capturer.cc
index 1c6b91915e..0a82236c98 100644
--- a/webrtc/test/vcm_capturer.cc
+++ b/webrtc/test/vcm_capturer.cc
@@ -10,7 +10,7 @@
#include "webrtc/test/vcm_capturer.h"
-#include "webrtc/modules/video_capture/include/video_capture_factory.h"
+#include "webrtc/modules/video_capture/video_capture_factory.h"
#include "webrtc/video_send_stream.h"
namespace webrtc {
diff --git a/webrtc/test/vcm_capturer.h b/webrtc/test/vcm_capturer.h
index 53d61fc53a..6c30dd50e0 100644
--- a/webrtc/test/vcm_capturer.h
+++ b/webrtc/test/vcm_capturer.h
@@ -7,13 +7,13 @@
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
-#ifndef WEBRTC_VIDEO_ENGINE_TEST_COMMON_VCM_CAPTURER_H_
-#define WEBRTC_VIDEO_ENGINE_TEST_COMMON_VCM_CAPTURER_H_
+#ifndef WEBRTC_TEST_VCM_CAPTURER_H_
+#define WEBRTC_TEST_VCM_CAPTURER_H_
#include "webrtc/base/criticalsection.h"
#include "webrtc/common_types.h"
#include "webrtc/common_video/libyuv/include/webrtc_libyuv.h"
-#include "webrtc/modules/video_capture/include/video_capture.h"
+#include "webrtc/modules/video_capture/video_capture.h"
#include "webrtc/test/video_capturer.h"
namespace webrtc {
@@ -47,4 +47,4 @@ class VcmCapturer : public VideoCapturer, public VideoCaptureDataCallback {
} // test
} // webrtc
-#endif // WEBRTC_VIDEO_ENGINE_TEST_COMMON_VCM_CAPTURER_H_
+#endif // WEBRTC_TEST_VCM_CAPTURER_H_
diff --git a/webrtc/test/video_capturer.h b/webrtc/test/video_capturer.h
index 3fe86f1998..169fd7151d 100644
--- a/webrtc/test/video_capturer.h
+++ b/webrtc/test/video_capturer.h
@@ -7,8 +7,8 @@
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
-#ifndef WEBRTC_VIDEO_ENGINE_TEST_COMMON_VIDEO_CAPTURER_H_
-#define WEBRTC_VIDEO_ENGINE_TEST_COMMON_VIDEO_CAPTURER_H_
+#ifndef WEBRTC_TEST_VIDEO_CAPTURER_H_
+#define WEBRTC_TEST_VIDEO_CAPTURER_H_
#include <stddef.h>
@@ -39,4 +39,4 @@ class VideoCapturer {
} // test
} // webrtc
-#endif // WEBRTC_VIDEO_ENGINE_TEST_COMMON_VIDEO_CAPTURER_H_
+#endif // WEBRTC_TEST_VIDEO_CAPTURER_H_
diff --git a/webrtc/test/video_renderer.h b/webrtc/test/video_renderer.h
index c8623270a7..3739522d7a 100644
--- a/webrtc/test/video_renderer.h
+++ b/webrtc/test/video_renderer.h
@@ -7,8 +7,8 @@
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
-#ifndef WEBRTC_VIDEO_ENGINE_TEST_COMMON_VIDEO_RENDERER_H_
-#define WEBRTC_VIDEO_ENGINE_TEST_COMMON_VIDEO_RENDERER_H_
+#ifndef WEBRTC_TEST_VIDEO_RENDERER_H_
+#define WEBRTC_TEST_VIDEO_RENDERER_H_
#include <stddef.h>
@@ -36,4 +36,4 @@ class VideoRenderer : public webrtc::VideoRenderer {
} // namespace test
} // namespace webrtc
-#endif // WEBRTC_VIDEO_ENGINE_TEST_COMMON_VIDEO_RENDERER_H_
+#endif // WEBRTC_TEST_VIDEO_RENDERER_H_
diff --git a/webrtc/test/webrtc_test_common.gyp b/webrtc/test/webrtc_test_common.gyp
index d075cb470b..07ea2c9b74 100644
--- a/webrtc/test/webrtc_test_common.gyp
+++ b/webrtc/test/webrtc_test_common.gyp
@@ -32,17 +32,15 @@
'fake_encoder.h',
'fake_network_pipe.cc',
'fake_network_pipe.h',
- 'fake_voice_engine.cc',
- 'fake_voice_engine.h',
'frame_generator_capturer.cc',
'frame_generator_capturer.h',
'layer_filtering_transport.cc',
'layer_filtering_transport.h',
'mock_transport.h',
+ 'mock_voe_channel_proxy.h',
+ 'mock_voice_engine.h',
'null_transport.cc',
'null_transport.h',
- 'random.cc',
- 'random.h',
'rtp_rtcp_observer.h',
'run_loop.cc',
'run_loop.h',
@@ -62,13 +60,14 @@
}],
],
'dependencies': [
+ '<(DEPTH)/testing/gmock.gyp:gmock',
'<(DEPTH)/testing/gtest.gyp:gtest',
'<(DEPTH)/third_party/gflags/gflags.gyp:gflags',
'<(webrtc_root)/base/base.gyp:rtc_base',
'<(webrtc_root)/common.gyp:webrtc_common',
'<(webrtc_root)/modules/modules.gyp:media_file',
'<(webrtc_root)/modules/modules.gyp:video_render',
- '<(webrtc_root)/test/test.gyp:frame_generator',
+ '<(webrtc_root)/test/test.gyp:fake_video_frames',
'<(webrtc_root)/test/test.gyp:test_support',
'<(webrtc_root)/test/test.gyp:rtp_test_utils',
'<(webrtc_root)/webrtc.gyp:webrtc',
@@ -116,11 +115,24 @@
'<(directx_sdk_path)/Include',
],
}],
+ ['OS=="win" and clang==1', {
+ 'msvs_settings': {
+ 'VCCLCompilerTool': {
+ 'AdditionalOptions': [
+ # Disable warnings failing when compiling with Clang on Windows.
+ # https://bugs.chromium.org/p/webrtc/issues/detail?id=5366
+ '-Wno-bool-conversion',
+ '-Wno-comment',
+ '-Wno-delete-non-virtual-dtor',
+ ],
+ },
+ },
+ }],
],
'dependencies': [
'<(DEPTH)/testing/gtest.gyp:gtest',
'<(webrtc_root)/modules/modules.gyp:media_file',
- '<(webrtc_root)/test/test.gyp:frame_generator',
+ '<(webrtc_root)/test/test.gyp:fake_video_frames',
'<(webrtc_root)/test/test.gyp:test_support',
],
'direct_dependent_settings': {
diff --git a/webrtc/test/win/d3d_renderer.h b/webrtc/test/win/d3d_renderer.h
index 46ce266460..cf2319edc4 100644
--- a/webrtc/test/win/d3d_renderer.h
+++ b/webrtc/test/win/d3d_renderer.h
@@ -7,8 +7,8 @@
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
-#ifndef WEBRTC_VIDEO_ENGINE_TEST_COMMON_WIN_D3D_RENDERER_H_
-#define WEBRTC_VIDEO_ENGINE_TEST_COMMON_WIN_D3D_RENDERER_H_
+#ifndef WEBRTC_TEST_WIN_D3D_RENDERER_H_
+#define WEBRTC_TEST_WIN_D3D_RENDERER_H_
#include <Windows.h>
#include <d3d9.h>
@@ -51,4 +51,4 @@ class D3dRenderer : public VideoRenderer {
} // namespace test
} // namespace webrtc
-#endif // WEBRTC_VIDEO_ENGINE_TEST_COMMON_WIN_D3D_RENDERER_H_
+#endif // WEBRTC_TEST_WIN_D3D_RENDERER_H_
diff --git a/webrtc/tools/agc/activity_metric.cc b/webrtc/tools/agc/activity_metric.cc
index 18e7c6dad8..2cb0a1b2df 100644
--- a/webrtc/tools/agc/activity_metric.cc
+++ b/webrtc/tools/agc/activity_metric.cc
@@ -24,7 +24,7 @@
#include "webrtc/modules/audio_processing/vad/common.h"
#include "webrtc/modules/audio_processing/vad/pitch_based_vad.h"
#include "webrtc/modules/audio_processing/vad/standalone_vad.h"
-#include "webrtc/modules/interface/module_common_types.h"
+#include "webrtc/modules/include/module_common_types.h"
static const int kAgcAnalWindowSamples = 100;
static const double kDefaultActivityThreshold = 0.3;
@@ -56,7 +56,7 @@ namespace webrtc {
// silence frame. Otherwise true VAD would drift with respect to the audio.
// We only consider mono inputs.
static void DitherSilence(AudioFrame* frame) {
- ASSERT_EQ(1, frame->num_channels_);
+ ASSERT_EQ(1u, frame->num_channels_);
const double kRmsSilence = 5;
const double sum_squared_silence = kRmsSilence * kRmsSilence *
frame->samples_per_channel_;
@@ -65,7 +65,7 @@ static void DitherSilence(AudioFrame* frame) {
sum_squared += frame->data_[n] * frame->data_[n];
if (sum_squared <= sum_squared_silence) {
for (size_t n = 0; n < frame->samples_per_channel_; n++)
- frame->data_[n] = (rand() & 0xF) - 8;
+ frame->data_[n] = (rand() & 0xF) - 8; // NOLINT: ignore non-threadsafe.
}
}
diff --git a/webrtc/tools/agc/agc_harness.cc b/webrtc/tools/agc/agc_harness.cc
index 73e1e09935..0d35d4b56a 100644
--- a/webrtc/tools/agc/agc_harness.cc
+++ b/webrtc/tools/agc/agc_harness.cc
@@ -12,10 +12,11 @@
#include "gflags/gflags.h"
#include "webrtc/base/checks.h"
+#include "webrtc/base/format_macros.h"
#include "webrtc/base/scoped_ptr.h"
#include "webrtc/system_wrappers/include/sleep.h"
#include "webrtc/system_wrappers/include/trace.h"
-#include "webrtc/test/channel_transport/include/channel_transport.h"
+#include "webrtc/test/channel_transport/channel_transport.h"
#include "webrtc/test/testsupport/trace_to_stderr.h"
#include "webrtc/modules/audio_processing/include/audio_processing.h"
#include "webrtc/voice_engine/include/voe_audio_processing.h"
@@ -176,8 +177,8 @@ class AgcVoiceEngine {
printf("Codecs:\n");
for (int i = 0; i < codec_->NumOfCodecs(); i++) {
RTC_CHECK_EQ(0, codec_->GetCodec(i, params));
- printf("%d %s/%d/%d\n", params.pltype, params.plname, params.plfreq,
- params.channels);
+ printf("%d %s/%d/%" PRIuS "\n", params.pltype, params.plname,
+ params.plfreq, params.channels);
}
}
diff --git a/webrtc/tools/agc/test_utils.cc b/webrtc/tools/agc/test_utils.cc
index 81819c598e..a0ed74732d 100644
--- a/webrtc/tools/agc/test_utils.cc
+++ b/webrtc/tools/agc/test_utils.cc
@@ -14,7 +14,7 @@
#include <algorithm>
-#include "webrtc/modules/interface/module_common_types.h"
+#include "webrtc/modules/include/module_common_types.h"
namespace webrtc {
diff --git a/webrtc/tools/e2e_quality/audio/audio_e2e_harness.cc b/webrtc/tools/e2e_quality/audio/audio_e2e_harness.cc
index 1eb8925537..2594fd1317 100644
--- a/webrtc/tools/e2e_quality/audio/audio_e2e_harness.cc
+++ b/webrtc/tools/e2e_quality/audio/audio_e2e_harness.cc
@@ -16,7 +16,7 @@
#include "testing/gtest/include/gtest/gtest.h"
#include "webrtc/base/scoped_ptr.h"
-#include "webrtc/test/channel_transport/include/channel_transport.h"
+#include "webrtc/test/channel_transport/channel_transport.h"
#include "webrtc/voice_engine/include/voe_audio_processing.h"
#include "webrtc/voice_engine/include/voe_base.h"
#include "webrtc/voice_engine/include/voe_codec.h"
diff --git a/webrtc/tools/force_mic_volume_max/force_mic_volume_max.cc b/webrtc/tools/force_mic_volume_max/force_mic_volume_max.cc
index 570fa0ad24..b6b1596866 100644
--- a/webrtc/tools/force_mic_volume_max/force_mic_volume_max.cc
+++ b/webrtc/tools/force_mic_volume_max/force_mic_volume_max.cc
@@ -13,7 +13,7 @@
#include <stdio.h>
#include "webrtc/base/scoped_ptr.h"
-#include "webrtc/test/channel_transport/include/channel_transport.h"
+#include "webrtc/test/channel_transport/channel_transport.h"
#include "webrtc/voice_engine/include/voe_audio_processing.h"
#include "webrtc/voice_engine/include/voe_base.h"
#include "webrtc/voice_engine/include/voe_volume_control.h"
diff --git a/webrtc/tools/frame_analyzer/video_quality_analysis.cc b/webrtc/tools/frame_analyzer/video_quality_analysis.cc
index 172baa72b8..dfd57f1961 100644
--- a/webrtc/tools/frame_analyzer/video_quality_analysis.cc
+++ b/webrtc/tools/frame_analyzer/video_quality_analysis.cc
@@ -227,7 +227,7 @@ void RunAnalysis(const char* reference_file_name, const char* test_file_name,
ResultsContainer* results) {
// Check if the reference_file_name ends with "y4m".
bool y4m_mode = false;
- if (std::string(reference_file_name).find("y4m") != std::string::npos){
+ if (std::string(reference_file_name).find("y4m") != std::string::npos) {
y4m_mode = true;
}
diff --git a/webrtc/tools/frame_editing/frame_editing_lib.cc b/webrtc/tools/frame_editing/frame_editing_lib.cc
index 79c6033a30..90855a354c 100644
--- a/webrtc/tools/frame_editing/frame_editing_lib.cc
+++ b/webrtc/tools/frame_editing/frame_editing_lib.cc
@@ -15,6 +15,7 @@
#include "webrtc/base/scoped_ptr.h"
#include "webrtc/common_video/libyuv/include/webrtc_libyuv.h"
+#include "webrtc/tools/frame_editing/frame_editing_lib.h"
#include "webrtc/typedefs.h"
using std::string;
diff --git a/webrtc/tools/frame_editing/frame_editing_lib.h b/webrtc/tools/frame_editing/frame_editing_lib.h
index 245d60f376..94595c43bb 100644
--- a/webrtc/tools/frame_editing/frame_editing_lib.h
+++ b/webrtc/tools/frame_editing/frame_editing_lib.h
@@ -8,8 +8,8 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-#ifndef WEBRTC_TOOLS_FRAME_EDITING_FRAME_EDITING_H_
-#define WEBRTC_TOOLS_FRAME_EDITING_FRAME_EDITING_H_
+#ifndef WEBRTC_TOOLS_FRAME_EDITING_FRAME_EDITING_LIB_H_
+#define WEBRTC_TOOLS_FRAME_EDITING_FRAME_EDITING_LIB_H_
#include <string>
@@ -36,4 +36,4 @@ int EditFrames(const std::string& in_path, int width, int height,
int last_frame_to_process, const std::string& out_path);
} // namespace webrtc
-#endif // WEBRTC_TOOLS_FRAME_EDITING_FRAME_EDITING_H_
+#endif // WEBRTC_TOOLS_FRAME_EDITING_FRAME_EDITING_LIB_H_
diff --git a/webrtc/tools/psnr_ssim_analyzer/psnr_ssim_analyzer.cc b/webrtc/tools/psnr_ssim_analyzer/psnr_ssim_analyzer.cc
index bae145a78f..737661c3df 100644
--- a/webrtc/tools/psnr_ssim_analyzer/psnr_ssim_analyzer.cc
+++ b/webrtc/tools/psnr_ssim_analyzer/psnr_ssim_analyzer.cc
@@ -25,7 +25,7 @@ void CompareFiles(const char* reference_file_name, const char* test_file_name,
const char* results_file_name, int width, int height) {
// Check if the reference_file_name ends with "y4m".
bool y4m_mode = false;
- if (std::string(reference_file_name).find("y4m") != std::string::npos){
+ if (std::string(reference_file_name).find("y4m") != std::string::npos) {
y4m_mode = true;
}
@@ -38,8 +38,8 @@ void CompareFiles(const char* reference_file_name, const char* test_file_name,
uint8_t* ref_frame = new uint8_t[size];
bool read_result = true;
- for(int frame_counter = 0; frame_counter < MAX_NUM_FRAMES_PER_FILE;
- ++frame_counter){
+ for (int frame_counter = 0; frame_counter < MAX_NUM_FRAMES_PER_FILE;
+ ++frame_counter) {
read_result &= (y4m_mode) ? webrtc::test::ExtractFrameFromY4mFile(
reference_file_name, width, height, frame_counter, ref_frame):
webrtc::test::ExtractFrameFromYuvFile(reference_file_name, width,
diff --git a/webrtc/tools/rtcbot/OWNERS b/webrtc/tools/rtcbot/OWNERS
index efdce51ca6..296f71fffc 100644
--- a/webrtc/tools/rtcbot/OWNERS
+++ b/webrtc/tools/rtcbot/OWNERS
@@ -1,2 +1 @@
andresp@webrtc.org
-houssainy@google.com
diff --git a/webrtc/transport.h b/webrtc/transport.h
index b9df7c31d1..4e329de93f 100644
--- a/webrtc/transport.h
+++ b/webrtc/transport.h
@@ -8,8 +8,8 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-#ifndef WEBRTC_VIDEO_ENGINE_NEW_INCLUDE_TRANSPORT_H_
-#define WEBRTC_VIDEO_ENGINE_NEW_INCLUDE_TRANSPORT_H_
+#ifndef WEBRTC_TRANSPORT_H_
+#define WEBRTC_TRANSPORT_H_
#include <stddef.h>
@@ -38,4 +38,4 @@ class Transport {
} // namespace webrtc
-#endif // WEBRTC_VIDEO_ENGINE_NEW_INCLUDE_TRANSPORT_H_
+#endif // WEBRTC_TRANSPORT_H_
diff --git a/webrtc/typedefs.h b/webrtc/typedefs.h
index d8754908bd..6a3f441e22 100644
--- a/webrtc/typedefs.h
+++ b/webrtc/typedefs.h
@@ -24,6 +24,7 @@
#define WEBRTC_ARCH_64_BITS
#define WEBRTC_ARCH_LITTLE_ENDIAN
#elif defined(__aarch64__)
+#define WEBRTC_ARCH_ARM_FAMILY
#define WEBRTC_ARCH_64_BITS
#define WEBRTC_ARCH_LITTLE_ENDIAN
#elif defined(_M_IX86) || defined(__i386__)
@@ -32,16 +33,15 @@
#define WEBRTC_ARCH_32_BITS
#define WEBRTC_ARCH_LITTLE_ENDIAN
#elif defined(__ARMEL__)
-// TODO(ajm): We'd prefer to control platform defines here, but this is
-// currently provided by the Android makefiles. Commented to avoid duplicate
-// definition warnings.
-//#define WEBRTC_ARCH_ARM
-// TODO(ajm): Chromium uses the following two defines. Should we switch?
-//#define WEBRTC_ARCH_ARM_FAMILY
-//#define WEBRTC_ARCH_ARMEL
+#define WEBRTC_ARCH_ARM_FAMILY
#define WEBRTC_ARCH_32_BITS
#define WEBRTC_ARCH_LITTLE_ENDIAN
#elif defined(__MIPSEL__)
+#if defined(__LP64__)
+#define WEBRTC_ARCH_MIPS64_FAMILY
+#else
+#define WEBRTC_ARCH_MIPS_FAMILY
+#endif
#define WEBRTC_ARCH_32_BITS
#define WEBRTC_ARCH_LITTLE_ENDIAN
#elif defined(__pnacl__)
@@ -68,11 +68,12 @@
// Annotate a function indicating the caller must examine the return value.
// Use like:
// int foo() WARN_UNUSED_RESULT;
+// To explicitly ignore a result, see |ignore_result()| in <base/macros.h>.
// TODO(ajm): Hack to avoid multiple definitions until the base/ of webrtc and
// libjingle are merged.
#if !defined(WARN_UNUSED_RESULT)
-#if defined(__GNUC__)
-#define WARN_UNUSED_RESULT __attribute__((warn_unused_result))
+#if defined(__GNUC__) || defined(__clang__)
+#define WARN_UNUSED_RESULT __attribute__ ((__warn_unused_result__))
#else
#define WARN_UNUSED_RESULT
#endif
@@ -83,7 +84,7 @@
// assert(result == 17);
#ifndef ATTRIBUTE_UNUSED
#if defined(__GNUC__) || defined(__clang__)
-#define ATTRIBUTE_UNUSED __attribute__((unused))
+#define ATTRIBUTE_UNUSED __attribute__ ((__unused__))
#else
#define ATTRIBUTE_UNUSED
#endif
@@ -103,7 +104,7 @@
#if defined(_MSC_VER)
#define NO_RETURN __declspec(noreturn)
#elif defined(__GNUC__)
-#define NO_RETURN __attribute__((noreturn))
+#define NO_RETURN __attribute__ ((__noreturn__))
#else
#define NO_RETURN
#endif
diff --git a/webrtc/video/BUILD.gn b/webrtc/video/BUILD.gn
index 408bb366f0..e35772e22c 100644
--- a/webrtc/video/BUILD.gn
+++ b/webrtc/video/BUILD.gn
@@ -10,35 +10,24 @@ import("../build/webrtc.gni")
source_set("video") {
sources = [
- "../video_engine/call_stats.cc",
- "../video_engine/call_stats.h",
- "../video_engine/encoder_state_feedback.cc",
- "../video_engine/encoder_state_feedback.h",
- "../video_engine/overuse_frame_detector.cc",
- "../video_engine/overuse_frame_detector.h",
- "../video_engine/payload_router.cc",
- "../video_engine/payload_router.h",
- "../video_engine/report_block_stats.cc",
- "../video_engine/report_block_stats.h",
- "../video_engine/stream_synchronization.cc",
- "../video_engine/stream_synchronization.h",
- "../video_engine/vie_channel.cc",
- "../video_engine/vie_channel.h",
- "../video_engine/vie_defines.h",
- "../video_engine/vie_encoder.cc",
- "../video_engine/vie_encoder.h",
- "../video_engine/vie_receiver.cc",
- "../video_engine/vie_receiver.h",
- "../video_engine/vie_remb.cc",
- "../video_engine/vie_remb.h",
- "../video_engine/vie_sync_module.cc",
- "../video_engine/vie_sync_module.h",
+ "call_stats.cc",
+ "call_stats.h",
"encoded_frame_callback_adapter.cc",
"encoded_frame_callback_adapter.h",
+ "encoder_state_feedback.cc",
+ "encoder_state_feedback.h",
+ "overuse_frame_detector.cc",
+ "overuse_frame_detector.h",
+ "payload_router.cc",
+ "payload_router.h",
"receive_statistics_proxy.cc",
"receive_statistics_proxy.h",
+ "report_block_stats.cc",
+ "report_block_stats.h",
"send_statistics_proxy.cc",
"send_statistics_proxy.h",
+ "stream_synchronization.cc",
+ "stream_synchronization.h",
"video_capture_input.cc",
"video_capture_input.h",
"video_decoder.cc",
@@ -47,6 +36,16 @@ source_set("video") {
"video_receive_stream.h",
"video_send_stream.cc",
"video_send_stream.h",
+ "vie_channel.cc",
+ "vie_channel.h",
+ "vie_encoder.cc",
+ "vie_encoder.h",
+ "vie_receiver.cc",
+ "vie_receiver.h",
+ "vie_remb.cc",
+ "vie_remb.h",
+ "vie_sync_module.cc",
+ "vie_sync_module.h",
]
configs += [ "..:common_config" ]
@@ -70,7 +69,7 @@ source_set("video") {
"../modules/video_coding",
"../modules/video_processing",
"../modules/video_render:video_render_module",
- "../voice_engine",
"../system_wrappers",
+ "../voice_engine",
]
}
diff --git a/webrtc/video/call_stats.cc b/webrtc/video/call_stats.cc
new file mode 100644
index 0000000000..69ea1a3d78
--- /dev/null
+++ b/webrtc/video/call_stats.cc
@@ -0,0 +1,168 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/video/call_stats.h"
+
+#include <assert.h>
+
+#include <algorithm>
+
+#include "webrtc/modules/rtp_rtcp/include/rtp_rtcp_defines.h"
+#include "webrtc/system_wrappers/include/critical_section_wrapper.h"
+#include "webrtc/system_wrappers/include/tick_util.h"
+
+namespace webrtc {
+namespace {
+// Time interval for updating the observers.
+const int64_t kUpdateIntervalMs = 1000;
+// Weight factor to apply to the average rtt.
+const float kWeightFactor = 0.3f;
+
+void RemoveOldReports(int64_t now, std::list<CallStats::RttTime>* reports) {
+ // A rtt report is considered valid for this long.
+ const int64_t kRttTimeoutMs = 1500;
+ while (!reports->empty() &&
+ (now - reports->front().time) > kRttTimeoutMs) {
+ reports->pop_front();
+ }
+}
+
+int64_t GetMaxRttMs(std::list<CallStats::RttTime>* reports) {
+ int64_t max_rtt_ms = 0;
+ for (std::list<CallStats::RttTime>::const_iterator it = reports->begin();
+ it != reports->end(); ++it) {
+ max_rtt_ms = std::max(it->rtt, max_rtt_ms);
+ }
+ return max_rtt_ms;
+}
+
+int64_t GetAvgRttMs(std::list<CallStats::RttTime>* reports) {
+ if (reports->empty()) {
+ return 0;
+ }
+ int64_t sum = 0;
+ for (std::list<CallStats::RttTime>::const_iterator it = reports->begin();
+ it != reports->end(); ++it) {
+ sum += it->rtt;
+ }
+ return sum / reports->size();
+}
+
+void UpdateAvgRttMs(std::list<CallStats::RttTime>* reports, int64_t* avg_rtt) {
+ uint32_t cur_rtt_ms = GetAvgRttMs(reports);
+ if (cur_rtt_ms == 0) {
+ // Reset.
+ *avg_rtt = 0;
+ return;
+ }
+ if (*avg_rtt == 0) {
+ // Initialize.
+ *avg_rtt = cur_rtt_ms;
+ return;
+ }
+ *avg_rtt = *avg_rtt * (1.0f - kWeightFactor) + cur_rtt_ms * kWeightFactor;
+}
+} // namespace
+
+class RtcpObserver : public RtcpRttStats {
+ public:
+ explicit RtcpObserver(CallStats* owner) : owner_(owner) {}
+ virtual ~RtcpObserver() {}
+
+ virtual void OnRttUpdate(int64_t rtt) {
+ owner_->OnRttUpdate(rtt);
+ }
+
+ // Returns the average RTT.
+ virtual int64_t LastProcessedRtt() const {
+ return owner_->avg_rtt_ms();
+ }
+
+ private:
+ CallStats* owner_;
+
+ RTC_DISALLOW_COPY_AND_ASSIGN(RtcpObserver);
+};
+
+CallStats::CallStats(Clock* clock)
+ : clock_(clock),
+ crit_(CriticalSectionWrapper::CreateCriticalSection()),
+ rtcp_rtt_stats_(new RtcpObserver(this)),
+ last_process_time_(clock_->TimeInMilliseconds()),
+ max_rtt_ms_(0),
+ avg_rtt_ms_(0) {}
+
+CallStats::~CallStats() {
+ assert(observers_.empty());
+}
+
+int64_t CallStats::TimeUntilNextProcess() {
+ return last_process_time_ + kUpdateIntervalMs - clock_->TimeInMilliseconds();
+}
+
+int32_t CallStats::Process() {
+ CriticalSectionScoped cs(crit_.get());
+ int64_t now = clock_->TimeInMilliseconds();
+ if (now < last_process_time_ + kUpdateIntervalMs)
+ return 0;
+
+ last_process_time_ = now;
+
+ RemoveOldReports(now, &reports_);
+ max_rtt_ms_ = GetMaxRttMs(&reports_);
+ UpdateAvgRttMs(&reports_, &avg_rtt_ms_);
+
+ // If there is a valid rtt, update all observers with the max rtt.
+ // TODO(asapersson): Consider changing this to report the average rtt.
+ if (max_rtt_ms_ > 0) {
+ for (std::list<CallStatsObserver*>::iterator it = observers_.begin();
+ it != observers_.end(); ++it) {
+ (*it)->OnRttUpdate(avg_rtt_ms_, max_rtt_ms_);
+ }
+ }
+ return 0;
+}
+
+int64_t CallStats::avg_rtt_ms() const {
+ CriticalSectionScoped cs(crit_.get());
+ return avg_rtt_ms_;
+}
+
+RtcpRttStats* CallStats::rtcp_rtt_stats() const {
+ return rtcp_rtt_stats_.get();
+}
+
+void CallStats::RegisterStatsObserver(CallStatsObserver* observer) {
+ CriticalSectionScoped cs(crit_.get());
+ for (std::list<CallStatsObserver*>::iterator it = observers_.begin();
+ it != observers_.end(); ++it) {
+ if (*it == observer)
+ return;
+ }
+ observers_.push_back(observer);
+}
+
+void CallStats::DeregisterStatsObserver(CallStatsObserver* observer) {
+ CriticalSectionScoped cs(crit_.get());
+ for (std::list<CallStatsObserver*>::iterator it = observers_.begin();
+ it != observers_.end(); ++it) {
+ if (*it == observer) {
+ observers_.erase(it);
+ return;
+ }
+ }
+}
+
+void CallStats::OnRttUpdate(int64_t rtt) {
+ CriticalSectionScoped cs(crit_.get());
+ reports_.push_back(RttTime(rtt, clock_->TimeInMilliseconds()));
+}
+
+} // namespace webrtc
diff --git a/webrtc/video/call_stats.h b/webrtc/video/call_stats.h
new file mode 100644
index 0000000000..4ecd911b07
--- /dev/null
+++ b/webrtc/video/call_stats.h
@@ -0,0 +1,83 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_VIDEO_CALL_STATS_H_
+#define WEBRTC_VIDEO_CALL_STATS_H_
+
+#include <list>
+
+#include "webrtc/base/constructormagic.h"
+#include "webrtc/base/scoped_ptr.h"
+#include "webrtc/modules/include/module.h"
+#include "webrtc/system_wrappers/include/clock.h"
+
+namespace webrtc {
+
+class CallStatsObserver;
+class CriticalSectionWrapper;
+class RtcpRttStats;
+
+// CallStats keeps track of statistics for a call.
+class CallStats : public Module {
+ public:
+ friend class RtcpObserver;
+
+ explicit CallStats(Clock* clock);
+ ~CallStats();
+
+ // Implements Module, to use the process thread.
+ int64_t TimeUntilNextProcess() override;
+ int32_t Process() override;
+
+ // Returns a RtcpRttStats to register at a statistics provider. The object
+ // has the same lifetime as the CallStats instance.
+ RtcpRttStats* rtcp_rtt_stats() const;
+
+ // Registers/deregisters a new observer to receive statistics updates.
+ void RegisterStatsObserver(CallStatsObserver* observer);
+ void DeregisterStatsObserver(CallStatsObserver* observer);
+
+ // Helper struct keeping track of the time a rtt value is reported.
+ struct RttTime {
+ RttTime(int64_t new_rtt, int64_t rtt_time)
+ : rtt(new_rtt), time(rtt_time) {}
+ const int64_t rtt;
+ const int64_t time;
+ };
+
+ protected:
+ void OnRttUpdate(int64_t rtt);
+
+ int64_t avg_rtt_ms() const;
+
+ private:
+ Clock* const clock_;
+ // Protecting all members.
+ rtc::scoped_ptr<CriticalSectionWrapper> crit_;
+ // Observer receiving statistics updates.
+ rtc::scoped_ptr<RtcpRttStats> rtcp_rtt_stats_;
+ // The last time 'Process' resulted in statistic update.
+ int64_t last_process_time_;
+ // The last RTT in the statistics update (zero if there is no valid estimate).
+ int64_t max_rtt_ms_;
+ int64_t avg_rtt_ms_;
+
+ // All Rtt reports within valid time interval, oldest first.
+ std::list<RttTime> reports_;
+
+ // Observers getting stats reports.
+ std::list<CallStatsObserver*> observers_;
+
+ RTC_DISALLOW_COPY_AND_ASSIGN(CallStats);
+};
+
+} // namespace webrtc
+
+#endif // WEBRTC_VIDEO_CALL_STATS_H_
diff --git a/webrtc/video/call_stats_unittest.cc b/webrtc/video/call_stats_unittest.cc
new file mode 100644
index 0000000000..6226a5bf6e
--- /dev/null
+++ b/webrtc/video/call_stats_unittest.cc
@@ -0,0 +1,204 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "testing/gmock/include/gmock/gmock.h"
+#include "testing/gtest/include/gtest/gtest.h"
+
+#include "webrtc/base/scoped_ptr.h"
+#include "webrtc/modules/rtp_rtcp/include/rtp_rtcp_defines.h"
+#include "webrtc/system_wrappers/include/tick_util.h"
+#include "webrtc/video/call_stats.h"
+
+using ::testing::_;
+using ::testing::AnyNumber;
+using ::testing::Return;
+
+namespace webrtc {
+
+class MockStatsObserver : public CallStatsObserver {
+ public:
+ MockStatsObserver() {}
+ virtual ~MockStatsObserver() {}
+
+ MOCK_METHOD2(OnRttUpdate, void(int64_t, int64_t));
+};
+
+class CallStatsTest : public ::testing::Test {
+ public:
+ CallStatsTest() : fake_clock_(12345) {}
+
+ protected:
+ virtual void SetUp() { call_stats_.reset(new CallStats(&fake_clock_)); }
+ SimulatedClock fake_clock_;
+ rtc::scoped_ptr<CallStats> call_stats_;
+};
+
+TEST_F(CallStatsTest, AddAndTriggerCallback) {
+ MockStatsObserver stats_observer;
+ RtcpRttStats* rtcp_rtt_stats = call_stats_->rtcp_rtt_stats();
+ call_stats_->RegisterStatsObserver(&stats_observer);
+ fake_clock_.AdvanceTimeMilliseconds(1000);
+ EXPECT_EQ(0, rtcp_rtt_stats->LastProcessedRtt());
+
+ const int64_t kRtt = 25;
+ rtcp_rtt_stats->OnRttUpdate(kRtt);
+ EXPECT_CALL(stats_observer, OnRttUpdate(kRtt, kRtt)).Times(1);
+ call_stats_->Process();
+ EXPECT_EQ(kRtt, rtcp_rtt_stats->LastProcessedRtt());
+
+ const int64_t kRttTimeOutMs = 1500 + 10;
+ fake_clock_.AdvanceTimeMilliseconds(kRttTimeOutMs);
+ EXPECT_CALL(stats_observer, OnRttUpdate(_, _)).Times(0);
+ call_stats_->Process();
+ EXPECT_EQ(0, rtcp_rtt_stats->LastProcessedRtt());
+
+ call_stats_->DeregisterStatsObserver(&stats_observer);
+}
+
+TEST_F(CallStatsTest, ProcessTime) {
+ MockStatsObserver stats_observer;
+ call_stats_->RegisterStatsObserver(&stats_observer);
+ RtcpRttStats* rtcp_rtt_stats = call_stats_->rtcp_rtt_stats();
+ rtcp_rtt_stats->OnRttUpdate(100);
+
+ // Time isn't updated yet.
+ EXPECT_CALL(stats_observer, OnRttUpdate(_, _)).Times(0);
+ call_stats_->Process();
+
+ // Advance clock and verify we get an update.
+ fake_clock_.AdvanceTimeMilliseconds(1000);
+ EXPECT_CALL(stats_observer, OnRttUpdate(_, _)).Times(1);
+ call_stats_->Process();
+
+ // Advance clock just too little to get an update.
+ fake_clock_.AdvanceTimeMilliseconds(999);
+ rtcp_rtt_stats->OnRttUpdate(100);
+ EXPECT_CALL(stats_observer, OnRttUpdate(_, _)).Times(0);
+ call_stats_->Process();
+
+ // Advance enough to trigger a new update.
+ fake_clock_.AdvanceTimeMilliseconds(1);
+ EXPECT_CALL(stats_observer, OnRttUpdate(_, _)).Times(1);
+ call_stats_->Process();
+
+ call_stats_->DeregisterStatsObserver(&stats_observer);
+}
+
+// Verify all observers get correct estimates and observers can be added and
+// removed.
+TEST_F(CallStatsTest, MultipleObservers) {
+ MockStatsObserver stats_observer_1;
+ call_stats_->RegisterStatsObserver(&stats_observer_1);
+ // Add the second observer twice, there should still be only one report to the
+ // observer.
+ MockStatsObserver stats_observer_2;
+ call_stats_->RegisterStatsObserver(&stats_observer_2);
+ call_stats_->RegisterStatsObserver(&stats_observer_2);
+
+ RtcpRttStats* rtcp_rtt_stats = call_stats_->rtcp_rtt_stats();
+ const int64_t kRtt = 100;
+ rtcp_rtt_stats->OnRttUpdate(kRtt);
+
+ // Verify both observers are updated.
+ fake_clock_.AdvanceTimeMilliseconds(1000);
+ EXPECT_CALL(stats_observer_1, OnRttUpdate(kRtt, kRtt)).Times(1);
+ EXPECT_CALL(stats_observer_2, OnRttUpdate(kRtt, kRtt)).Times(1);
+ call_stats_->Process();
+
+ // Deregister the second observer and verify update is only sent to the first
+ // observer.
+ call_stats_->DeregisterStatsObserver(&stats_observer_2);
+ rtcp_rtt_stats->OnRttUpdate(kRtt);
+ fake_clock_.AdvanceTimeMilliseconds(1000);
+ EXPECT_CALL(stats_observer_1, OnRttUpdate(kRtt, kRtt)).Times(1);
+ EXPECT_CALL(stats_observer_2, OnRttUpdate(kRtt, kRtt)).Times(0);
+ call_stats_->Process();
+
+ // Deregister the first observer.
+ call_stats_->DeregisterStatsObserver(&stats_observer_1);
+ rtcp_rtt_stats->OnRttUpdate(kRtt);
+ fake_clock_.AdvanceTimeMilliseconds(1000);
+ EXPECT_CALL(stats_observer_1, OnRttUpdate(kRtt, kRtt)).Times(0);
+ EXPECT_CALL(stats_observer_2, OnRttUpdate(kRtt, kRtt)).Times(0);
+ call_stats_->Process();
+}
+
+// Verify increasing and decreasing rtt triggers callbacks with correct values.
+TEST_F(CallStatsTest, ChangeRtt) {
+ MockStatsObserver stats_observer;
+ call_stats_->RegisterStatsObserver(&stats_observer);
+ RtcpRttStats* rtcp_rtt_stats = call_stats_->rtcp_rtt_stats();
+
+ // Advance clock to be ready for an update.
+ fake_clock_.AdvanceTimeMilliseconds(1000);
+
+ // Set a first value and verify the callback is triggered.
+ const int64_t kFirstRtt = 100;
+ rtcp_rtt_stats->OnRttUpdate(kFirstRtt);
+ EXPECT_CALL(stats_observer, OnRttUpdate(kFirstRtt, kFirstRtt)).Times(1);
+ call_stats_->Process();
+
+ // Increase rtt and verify the new value is reported.
+ fake_clock_.AdvanceTimeMilliseconds(1000);
+ const int64_t kHighRtt = kFirstRtt + 20;
+ const int64_t kAvgRtt1 = 103;
+ rtcp_rtt_stats->OnRttUpdate(kHighRtt);
+ EXPECT_CALL(stats_observer, OnRttUpdate(kAvgRtt1, kHighRtt)).Times(1);
+ call_stats_->Process();
+
+ // Increase time enough for a new update, but not too much to make the
+ // rtt invalid. Report a lower rtt and verify the old/high value still is sent
+ // in the callback.
+ fake_clock_.AdvanceTimeMilliseconds(1000);
+ const int64_t kLowRtt = kFirstRtt - 20;
+ const int64_t kAvgRtt2 = 102;
+ rtcp_rtt_stats->OnRttUpdate(kLowRtt);
+ EXPECT_CALL(stats_observer, OnRttUpdate(kAvgRtt2, kHighRtt)).Times(1);
+ call_stats_->Process();
+
+ // Advance time to make the high report invalid, the lower rtt should now be
+ // in the callback.
+ fake_clock_.AdvanceTimeMilliseconds(1000);
+ const int64_t kAvgRtt3 = 95;
+ EXPECT_CALL(stats_observer, OnRttUpdate(kAvgRtt3, kLowRtt)).Times(1);
+ call_stats_->Process();
+
+ call_stats_->DeregisterStatsObserver(&stats_observer);
+}
+
+TEST_F(CallStatsTest, LastProcessedRtt) {
+ MockStatsObserver stats_observer;
+ call_stats_->RegisterStatsObserver(&stats_observer);
+ RtcpRttStats* rtcp_rtt_stats = call_stats_->rtcp_rtt_stats();
+ fake_clock_.AdvanceTimeMilliseconds(1000);
+
+ // Set a first values and verify that LastProcessedRtt initially returns the
+ // average rtt.
+ const int64_t kRttLow = 10;
+ const int64_t kRttHigh = 30;
+ const int64_t kAvgRtt = 20;
+ rtcp_rtt_stats->OnRttUpdate(kRttLow);
+ rtcp_rtt_stats->OnRttUpdate(kRttHigh);
+ EXPECT_CALL(stats_observer, OnRttUpdate(kAvgRtt, kRttHigh)).Times(1);
+ call_stats_->Process();
+ EXPECT_EQ(kAvgRtt, rtcp_rtt_stats->LastProcessedRtt());
+
+ // Update values and verify LastProcessedRtt.
+ fake_clock_.AdvanceTimeMilliseconds(1000);
+ rtcp_rtt_stats->OnRttUpdate(kRttLow);
+ rtcp_rtt_stats->OnRttUpdate(kRttHigh);
+ EXPECT_CALL(stats_observer, OnRttUpdate(kAvgRtt, kRttHigh)).Times(1);
+ call_stats_->Process();
+ EXPECT_EQ(kAvgRtt, rtcp_rtt_stats->LastProcessedRtt());
+
+ call_stats_->DeregisterStatsObserver(&stats_observer);
+}
+
+} // namespace webrtc
diff --git a/webrtc/video/encoded_frame_callback_adapter.cc b/webrtc/video/encoded_frame_callback_adapter.cc
index 407801fd0c..4c6823fa47 100644
--- a/webrtc/video/encoded_frame_callback_adapter.cc
+++ b/webrtc/video/encoded_frame_callback_adapter.cc
@@ -11,7 +11,7 @@
#include "webrtc/video/encoded_frame_callback_adapter.h"
#include "webrtc/base/checks.h"
-#include "webrtc/modules/video_coding/main/source/encoded_frame.h"
+#include "webrtc/modules/video_coding/encoded_frame.h"
namespace webrtc {
namespace internal {
diff --git a/webrtc/video/encoded_frame_callback_adapter.h b/webrtc/video/encoded_frame_callback_adapter.h
index b39a8e2167..b10c4f1645 100644
--- a/webrtc/video/encoded_frame_callback_adapter.h
+++ b/webrtc/video/encoded_frame_callback_adapter.h
@@ -11,7 +11,7 @@
#ifndef WEBRTC_VIDEO_ENCODED_FRAME_CALLBACK_ADAPTER_H_
#define WEBRTC_VIDEO_ENCODED_FRAME_CALLBACK_ADAPTER_H_
-#include "webrtc/modules/video_coding/codecs/interface/video_codec_interface.h"
+#include "webrtc/modules/video_coding/include/video_codec_interface.h"
#include "webrtc/frame_callback.h"
namespace webrtc {
diff --git a/webrtc/video/encoder_state_feedback.cc b/webrtc/video/encoder_state_feedback.cc
new file mode 100644
index 0000000000..c0c4b67dbd
--- /dev/null
+++ b/webrtc/video/encoder_state_feedback.cc
@@ -0,0 +1,124 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/video/encoder_state_feedback.h"
+
+#include <assert.h>
+
+#include "webrtc/base/checks.h"
+#include "webrtc/modules/rtp_rtcp/include/rtp_rtcp_defines.h"
+#include "webrtc/system_wrappers/include/critical_section_wrapper.h"
+#include "webrtc/video/vie_encoder.h"
+
+namespace webrtc {
+
+// Helper class registered at the RTP module relaying callbacks to
+// EncoderStatFeedback.
+class EncoderStateFeedbackObserver : public RtcpIntraFrameObserver {
+ public:
+ explicit EncoderStateFeedbackObserver(EncoderStateFeedback* owner)
+ : owner_(owner) {}
+ ~EncoderStateFeedbackObserver() {}
+
+ // Implements RtcpIntraFrameObserver.
+ virtual void OnReceivedIntraFrameRequest(uint32_t ssrc) {
+ owner_->OnReceivedIntraFrameRequest(ssrc);
+ }
+ virtual void OnReceivedSLI(uint32_t ssrc, uint8_t picture_id) {
+ owner_->OnReceivedSLI(ssrc, picture_id);
+ }
+ virtual void OnReceivedRPSI(uint32_t ssrc, uint64_t picture_id) {
+ owner_->OnReceivedRPSI(ssrc, picture_id);
+ }
+
+ virtual void OnLocalSsrcChanged(uint32_t old_ssrc, uint32_t new_ssrc) {
+ owner_->OnLocalSsrcChanged(old_ssrc, new_ssrc);
+ }
+
+ private:
+ EncoderStateFeedback* owner_;
+};
+
+EncoderStateFeedback::EncoderStateFeedback()
+ : crit_(CriticalSectionWrapper::CreateCriticalSection()),
+ observer_(new EncoderStateFeedbackObserver(this)) {}
+
+EncoderStateFeedback::~EncoderStateFeedback() {
+ assert(encoders_.empty());
+}
+
+void EncoderStateFeedback::AddEncoder(const std::vector<uint32_t>& ssrcs,
+ ViEEncoder* encoder) {
+ RTC_DCHECK(!ssrcs.empty());
+ CriticalSectionScoped lock(crit_.get());
+ for (uint32_t ssrc : ssrcs) {
+ RTC_DCHECK(encoders_.find(ssrc) == encoders_.end());
+ encoders_[ssrc] = encoder;
+ }
+}
+
+void EncoderStateFeedback::RemoveEncoder(const ViEEncoder* encoder) {
+ CriticalSectionScoped lock(crit_.get());
+ SsrcEncoderMap::iterator it = encoders_.begin();
+ while (it != encoders_.end()) {
+ if (it->second == encoder) {
+ encoders_.erase(it++);
+ } else {
+ ++it;
+ }
+ }
+}
+
+RtcpIntraFrameObserver* EncoderStateFeedback::GetRtcpIntraFrameObserver() {
+ return observer_.get();
+}
+
+void EncoderStateFeedback::OnReceivedIntraFrameRequest(uint32_t ssrc) {
+ CriticalSectionScoped lock(crit_.get());
+ SsrcEncoderMap::iterator it = encoders_.find(ssrc);
+ if (it == encoders_.end())
+ return;
+
+ it->second->OnReceivedIntraFrameRequest(ssrc);
+}
+
+void EncoderStateFeedback::OnReceivedSLI(uint32_t ssrc, uint8_t picture_id) {
+ CriticalSectionScoped lock(crit_.get());
+ SsrcEncoderMap::iterator it = encoders_.find(ssrc);
+ if (it == encoders_.end())
+ return;
+
+ it->second->OnReceivedSLI(ssrc, picture_id);
+}
+
+void EncoderStateFeedback::OnReceivedRPSI(uint32_t ssrc, uint64_t picture_id) {
+ CriticalSectionScoped lock(crit_.get());
+ SsrcEncoderMap::iterator it = encoders_.find(ssrc);
+ if (it == encoders_.end())
+ return;
+
+ it->second->OnReceivedRPSI(ssrc, picture_id);
+}
+
+void EncoderStateFeedback::OnLocalSsrcChanged(uint32_t old_ssrc,
+ uint32_t new_ssrc) {
+ CriticalSectionScoped lock(crit_.get());
+ SsrcEncoderMap::iterator it = encoders_.find(old_ssrc);
+ if (it == encoders_.end() || encoders_.find(new_ssrc) != encoders_.end()) {
+ return;
+ }
+
+ ViEEncoder* encoder = it->second;
+ encoders_.erase(it);
+ encoders_[new_ssrc] = encoder;
+ encoder->OnLocalSsrcChanged(old_ssrc, new_ssrc);
+}
+
+} // namespace webrtc
diff --git a/webrtc/video/encoder_state_feedback.h b/webrtc/video/encoder_state_feedback.h
new file mode 100644
index 0000000000..620e382d89
--- /dev/null
+++ b/webrtc/video/encoder_state_feedback.h
@@ -0,0 +1,71 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+// TODO(mflodman) ViEEncoder has a time check to not send key frames too often,
+// move the logic to this class.
+
+#ifndef WEBRTC_VIDEO_ENCODER_STATE_FEEDBACK_H_
+#define WEBRTC_VIDEO_ENCODER_STATE_FEEDBACK_H_
+
+#include <map>
+#include <vector>
+
+#include "webrtc/base/constructormagic.h"
+#include "webrtc/base/scoped_ptr.h"
+#include "webrtc/typedefs.h"
+
+namespace webrtc {
+
+class CriticalSectionWrapper;
+class EncoderStateFeedbackObserver;
+class RtcpIntraFrameObserver;
+class ViEEncoder;
+
+class EncoderStateFeedback {
+ public:
+ friend class EncoderStateFeedbackObserver;
+
+ EncoderStateFeedback();
+ ~EncoderStateFeedback();
+
+ // Adds an encoder to receive feedback for a set of SSRCs.
+ void AddEncoder(const std::vector<uint32_t>& ssrc, ViEEncoder* encoder);
+
+ // Removes a registered ViEEncoder.
+ void RemoveEncoder(const ViEEncoder* encoder);
+
+ // Returns an observer to register at the requesting class. The observer has
+ // the same lifetime as the EncoderStateFeedback instance.
+ RtcpIntraFrameObserver* GetRtcpIntraFrameObserver();
+
+ protected:
+ // Called by EncoderStateFeedbackObserver when a new key frame is requested.
+ void OnReceivedIntraFrameRequest(uint32_t ssrc);
+ void OnReceivedSLI(uint32_t ssrc, uint8_t picture_id);
+ void OnReceivedRPSI(uint32_t ssrc, uint64_t picture_id);
+ void OnLocalSsrcChanged(uint32_t old_ssrc, uint32_t new_ssrc);
+
+ private:
+ typedef std::map<uint32_t, ViEEncoder*> SsrcEncoderMap;
+
+ rtc::scoped_ptr<CriticalSectionWrapper> crit_;
+
+ // Instance registered at the class requesting new key frames.
+ rtc::scoped_ptr<EncoderStateFeedbackObserver> observer_;
+
+ // Maps a unique ssrc to the given encoder.
+ SsrcEncoderMap encoders_;
+
+ RTC_DISALLOW_COPY_AND_ASSIGN(EncoderStateFeedback);
+};
+
+} // namespace webrtc
+
+#endif // WEBRTC_VIDEO_ENCODER_STATE_FEEDBACK_H_
diff --git a/webrtc/video/encoder_state_feedback_unittest.cc b/webrtc/video/encoder_state_feedback_unittest.cc
new file mode 100644
index 0000000000..834447e513
--- /dev/null
+++ b/webrtc/video/encoder_state_feedback_unittest.cc
@@ -0,0 +1,143 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+
+// This file includes unit tests for EncoderStateFeedback.
+#include "webrtc/video/encoder_state_feedback.h"
+
+#include "testing/gmock/include/gmock/gmock.h"
+#include "testing/gtest/include/gtest/gtest.h"
+
+#include "webrtc/base/scoped_ptr.h"
+#include "webrtc/common.h"
+#include "webrtc/modules/bitrate_controller/include/bitrate_controller.h"
+#include "webrtc/modules/pacing/paced_sender.h"
+#include "webrtc/modules/pacing/packet_router.h"
+#include "webrtc/modules/rtp_rtcp/include/rtp_rtcp_defines.h"
+#include "webrtc/modules/utility/include/mock/mock_process_thread.h"
+#include "webrtc/video/payload_router.h"
+#include "webrtc/video/vie_encoder.h"
+
+using ::testing::NiceMock;
+
+namespace webrtc {
+
+class MockVieEncoder : public ViEEncoder {
+ public:
+ explicit MockVieEncoder(ProcessThread* process_thread, PacedSender* pacer)
+ : ViEEncoder(1, process_thread, nullptr, nullptr, pacer, nullptr) {}
+ ~MockVieEncoder() {}
+
+ MOCK_METHOD1(OnReceivedIntraFrameRequest,
+ void(uint32_t));
+ MOCK_METHOD2(OnReceivedSLI,
+ void(uint32_t ssrc, uint8_t picture_id));
+ MOCK_METHOD2(OnReceivedRPSI,
+ void(uint32_t ssrc, uint64_t picture_id));
+ MOCK_METHOD2(OnLocalSsrcChanged,
+ void(uint32_t old_ssrc, uint32_t new_ssrc));
+};
+
+class VieKeyRequestTest : public ::testing::Test {
+ protected:
+ VieKeyRequestTest()
+ : pacer_(Clock::GetRealTimeClock(),
+ &router_,
+ BitrateController::kDefaultStartBitrateKbps,
+ PacedSender::kDefaultPaceMultiplier *
+ BitrateController::kDefaultStartBitrateKbps,
+ 0) {}
+ virtual void SetUp() {
+ process_thread_.reset(new NiceMock<MockProcessThread>);
+ encoder_state_feedback_.reset(new EncoderStateFeedback());
+ }
+ rtc::scoped_ptr<MockProcessThread> process_thread_;
+ rtc::scoped_ptr<EncoderStateFeedback> encoder_state_feedback_;
+ PacketRouter router_;
+ PacedSender pacer_;
+};
+
+TEST_F(VieKeyRequestTest, CreateAndTriggerRequests) {
+ const int ssrc = 1234;
+ MockVieEncoder encoder(process_thread_.get(), &pacer_);
+ encoder_state_feedback_->AddEncoder(std::vector<uint32_t>(1, ssrc), &encoder);
+
+ EXPECT_CALL(encoder, OnReceivedIntraFrameRequest(ssrc))
+ .Times(1);
+ encoder_state_feedback_->GetRtcpIntraFrameObserver()->
+ OnReceivedIntraFrameRequest(ssrc);
+
+ const uint8_t sli_picture_id = 3;
+ EXPECT_CALL(encoder, OnReceivedSLI(ssrc, sli_picture_id))
+ .Times(1);
+ encoder_state_feedback_->GetRtcpIntraFrameObserver()->OnReceivedSLI(
+ ssrc, sli_picture_id);
+
+ const uint64_t rpsi_picture_id = 9;
+ EXPECT_CALL(encoder, OnReceivedRPSI(ssrc, rpsi_picture_id))
+ .Times(1);
+ encoder_state_feedback_->GetRtcpIntraFrameObserver()->OnReceivedRPSI(
+ ssrc, rpsi_picture_id);
+
+ encoder_state_feedback_->RemoveEncoder(&encoder);
+}
+
+// Register multiple encoders and make sure the request is relayed to correct
+// ViEEncoder.
+TEST_F(VieKeyRequestTest, MultipleEncoders) {
+ const int ssrc_1 = 1234;
+ const int ssrc_2 = 5678;
+ MockVieEncoder encoder_1(process_thread_.get(), &pacer_);
+ MockVieEncoder encoder_2(process_thread_.get(), &pacer_);
+ encoder_state_feedback_->AddEncoder(std::vector<uint32_t>(1, ssrc_1),
+ &encoder_1);
+ encoder_state_feedback_->AddEncoder(std::vector<uint32_t>(1, ssrc_2),
+ &encoder_2);
+
+ EXPECT_CALL(encoder_1, OnReceivedIntraFrameRequest(ssrc_1))
+ .Times(1);
+ EXPECT_CALL(encoder_2, OnReceivedIntraFrameRequest(ssrc_2))
+ .Times(1);
+ encoder_state_feedback_->GetRtcpIntraFrameObserver()->
+ OnReceivedIntraFrameRequest(ssrc_1);
+ encoder_state_feedback_->GetRtcpIntraFrameObserver()->
+ OnReceivedIntraFrameRequest(ssrc_2);
+
+ const uint8_t sli_pid_1 = 3;
+ const uint8_t sli_pid_2 = 4;
+ EXPECT_CALL(encoder_1, OnReceivedSLI(ssrc_1, sli_pid_1))
+ .Times(1);
+ EXPECT_CALL(encoder_2, OnReceivedSLI(ssrc_2, sli_pid_2))
+ .Times(1);
+ encoder_state_feedback_->GetRtcpIntraFrameObserver()->OnReceivedSLI(
+ ssrc_1, sli_pid_1);
+ encoder_state_feedback_->GetRtcpIntraFrameObserver()->OnReceivedSLI(
+ ssrc_2, sli_pid_2);
+
+ const uint64_t rpsi_pid_1 = 9;
+ const uint64_t rpsi_pid_2 = 10;
+ EXPECT_CALL(encoder_1, OnReceivedRPSI(ssrc_1, rpsi_pid_1))
+ .Times(1);
+ EXPECT_CALL(encoder_2, OnReceivedRPSI(ssrc_2, rpsi_pid_2))
+ .Times(1);
+ encoder_state_feedback_->GetRtcpIntraFrameObserver()->OnReceivedRPSI(
+ ssrc_1, rpsi_pid_1);
+ encoder_state_feedback_->GetRtcpIntraFrameObserver()->OnReceivedRPSI(
+ ssrc_2, rpsi_pid_2);
+
+ encoder_state_feedback_->RemoveEncoder(&encoder_1);
+ EXPECT_CALL(encoder_2, OnReceivedIntraFrameRequest(ssrc_2))
+ .Times(1);
+ encoder_state_feedback_->GetRtcpIntraFrameObserver()->
+ OnReceivedIntraFrameRequest(ssrc_2);
+ encoder_state_feedback_->RemoveEncoder(&encoder_2);
+}
+
+} // namespace webrtc
diff --git a/webrtc/video/end_to_end_tests.cc b/webrtc/video/end_to_end_tests.cc
index e86f560cfd..48dc3e8bbd 100644
--- a/webrtc/video/end_to_end_tests.cc
+++ b/webrtc/video/end_to_end_tests.cc
@@ -24,15 +24,13 @@
#include "webrtc/modules/rtp_rtcp/source/rtcp_utility.h"
#include "webrtc/modules/video_coding/codecs/vp8/include/vp8.h"
#include "webrtc/modules/video_coding/codecs/vp9/include/vp9.h"
-#include "webrtc/modules/video_coding/main/interface/video_coding_defines.h"
+#include "webrtc/modules/video_coding/include/video_coding_defines.h"
#include "webrtc/system_wrappers/include/critical_section_wrapper.h"
-#include "webrtc/system_wrappers/include/event_wrapper.h"
#include "webrtc/system_wrappers/include/metrics.h"
#include "webrtc/system_wrappers/include/sleep.h"
#include "webrtc/test/call_test.h"
#include "webrtc/test/direct_transport.h"
#include "webrtc/test/encoder_settings.h"
-#include "webrtc/test/fake_audio_device.h"
#include "webrtc/test/fake_decoder.h"
#include "webrtc/test/fake_encoder.h"
#include "webrtc/test/frame_generator.h"
@@ -42,21 +40,20 @@
#include "webrtc/test/rtcp_packet_parser.h"
#include "webrtc/test/rtp_rtcp_observer.h"
#include "webrtc/test/testsupport/fileutils.h"
-#include "webrtc/test/testsupport/gtest_disable.h"
#include "webrtc/test/testsupport/perf_test.h"
#include "webrtc/video_encoder.h"
namespace webrtc {
-static const unsigned long kSilenceTimeoutMs = 2000;
+static const int kSilenceTimeoutMs = 2000;
class EndToEndTest : public test::CallTest {
public:
EndToEndTest() {}
virtual ~EndToEndTest() {
- EXPECT_EQ(nullptr, send_stream_);
- EXPECT_TRUE(receive_streams_.empty());
+ EXPECT_EQ(nullptr, video_send_stream_);
+ EXPECT_TRUE(video_receive_streams_.empty());
}
protected:
@@ -81,20 +78,20 @@ class EndToEndTest : public test::CallTest {
void TestXrReceiverReferenceTimeReport(bool enable_rrtr);
void TestSendsSetSsrcs(size_t num_ssrcs, bool send_single_ssrc_first);
void TestRtpStatePreservation(bool use_rtx);
- void VerifyHistogramStats(bool use_rtx, bool use_red);
+ void VerifyHistogramStats(bool use_rtx, bool use_red, bool screenshare);
};
TEST_F(EndToEndTest, ReceiverCanBeStartedTwice) {
CreateCalls(Call::Config(), Call::Config());
test::NullTransport transport;
- CreateSendConfig(1, &transport);
+ CreateSendConfig(1, 0, &transport);
CreateMatchingReceiveConfigs(&transport);
- CreateStreams();
+ CreateVideoStreams();
- receive_streams_[0]->Start();
- receive_streams_[0]->Start();
+ video_receive_streams_[0]->Start();
+ video_receive_streams_[0]->Start();
DestroyStreams();
}
@@ -103,13 +100,13 @@ TEST_F(EndToEndTest, ReceiverCanBeStoppedTwice) {
CreateCalls(Call::Config(), Call::Config());
test::NullTransport transport;
- CreateSendConfig(1, &transport);
+ CreateSendConfig(1, 0, &transport);
CreateMatchingReceiveConfigs(&transport);
- CreateStreams();
+ CreateVideoStreams();
- receive_streams_[0]->Stop();
- receive_streams_[0]->Stop();
+ video_receive_streams_[0]->Stop();
+ video_receive_streams_[0]->Stop();
DestroyStreams();
}
@@ -124,33 +121,33 @@ TEST_F(EndToEndTest, RendersSingleDelayedFrame) {
class Renderer : public VideoRenderer {
public:
- Renderer() : event_(EventWrapper::Create()) {}
+ Renderer() : event_(false, false) {}
void RenderFrame(const VideoFrame& video_frame,
int /*time_to_render_ms*/) override {
- event_->Set();
+ event_.Set();
}
bool IsTextureSupported() const override { return false; }
- EventTypeWrapper Wait() { return event_->Wait(kDefaultTimeoutMs); }
+ bool Wait() { return event_.Wait(kDefaultTimeoutMs); }
- rtc::scoped_ptr<EventWrapper> event_;
+ rtc::Event event_;
} renderer;
class TestFrameCallback : public I420FrameCallback {
public:
- TestFrameCallback() : event_(EventWrapper::Create()) {}
+ TestFrameCallback() : event_(false, false) {}
- EventTypeWrapper Wait() { return event_->Wait(kDefaultTimeoutMs); }
+ bool Wait() { return event_.Wait(kDefaultTimeoutMs); }
private:
void FrameCallback(VideoFrame* frame) override {
SleepMs(kDelayRenderCallbackMs);
- event_->Set();
+ event_.Set();
}
- rtc::scoped_ptr<EventWrapper> event_;
+ rtc::Event event_;
};
CreateCalls(Call::Config(), Call::Config());
@@ -160,24 +157,25 @@ TEST_F(EndToEndTest, RendersSingleDelayedFrame) {
sender_transport.SetReceiver(receiver_call_->Receiver());
receiver_transport.SetReceiver(sender_call_->Receiver());
- CreateSendConfig(1, &sender_transport);
+ CreateSendConfig(1, 0, &sender_transport);
CreateMatchingReceiveConfigs(&receiver_transport);
TestFrameCallback pre_render_callback;
- receive_configs_[0].pre_render_callback = &pre_render_callback;
- receive_configs_[0].renderer = &renderer;
+ video_receive_configs_[0].pre_render_callback = &pre_render_callback;
+ video_receive_configs_[0].renderer = &renderer;
- CreateStreams();
+ CreateVideoStreams();
Start();
// Create frames that are smaller than the send width/height, this is done to
// check that the callbacks are done after processing video.
rtc::scoped_ptr<test::FrameGenerator> frame_generator(
test::FrameGenerator::CreateChromaGenerator(kWidth, kHeight));
- send_stream_->Input()->IncomingCapturedFrame(*frame_generator->NextFrame());
- EXPECT_EQ(kEventSignaled, pre_render_callback.Wait())
+ video_send_stream_->Input()->IncomingCapturedFrame(
+ *frame_generator->NextFrame());
+ EXPECT_TRUE(pre_render_callback.Wait())
<< "Timed out while waiting for pre-render callback.";
- EXPECT_EQ(kEventSignaled, renderer.Wait())
+ EXPECT_TRUE(renderer.Wait())
<< "Timed out while waiting for the frame to render.";
Stop();
@@ -191,17 +189,17 @@ TEST_F(EndToEndTest, RendersSingleDelayedFrame) {
TEST_F(EndToEndTest, TransmitsFirstFrame) {
class Renderer : public VideoRenderer {
public:
- Renderer() : event_(EventWrapper::Create()) {}
+ Renderer() : event_(false, false) {}
void RenderFrame(const VideoFrame& video_frame,
int /*time_to_render_ms*/) override {
- event_->Set();
+ event_.Set();
}
bool IsTextureSupported() const override { return false; }
- EventTypeWrapper Wait() { return event_->Wait(kDefaultTimeoutMs); }
+ bool Wait() { return event_.Wait(kDefaultTimeoutMs); }
- rtc::scoped_ptr<EventWrapper> event_;
+ rtc::Event event_;
} renderer;
CreateCalls(Call::Config(), Call::Config());
@@ -211,19 +209,21 @@ TEST_F(EndToEndTest, TransmitsFirstFrame) {
sender_transport.SetReceiver(receiver_call_->Receiver());
receiver_transport.SetReceiver(sender_call_->Receiver());
- CreateSendConfig(1, &sender_transport);
+ CreateSendConfig(1, 0, &sender_transport);
CreateMatchingReceiveConfigs(&receiver_transport);
- receive_configs_[0].renderer = &renderer;
+ video_receive_configs_[0].renderer = &renderer;
- CreateStreams();
+ CreateVideoStreams();
Start();
rtc::scoped_ptr<test::FrameGenerator> frame_generator(
test::FrameGenerator::CreateChromaGenerator(
- encoder_config_.streams[0].width, encoder_config_.streams[0].height));
- send_stream_->Input()->IncomingCapturedFrame(*frame_generator->NextFrame());
+ video_encoder_config_.streams[0].width,
+ video_encoder_config_.streams[0].height));
+ video_send_stream_->Input()->IncomingCapturedFrame(
+ *frame_generator->NextFrame());
- EXPECT_EQ(kEventSignaled, renderer.Wait())
+ EXPECT_TRUE(renderer.Wait())
<< "Timed out while waiting for the frame to render.";
Stop();
@@ -244,13 +244,14 @@ TEST_F(EndToEndTest, SendsAndReceivesVP9) {
frame_counter_(0) {}
void PerformTest() override {
- EXPECT_EQ(kEventSignaled, Wait())
+ EXPECT_TRUE(Wait())
<< "Timed out while waiting for enough frames to be decoded.";
}
- void ModifyConfigs(VideoSendStream::Config* send_config,
- std::vector<VideoReceiveStream::Config>* receive_configs,
- VideoEncoderConfig* encoder_config) override {
+ void ModifyVideoConfigs(
+ VideoSendStream::Config* send_config,
+ std::vector<VideoReceiveStream::Config>* receive_configs,
+ VideoEncoderConfig* encoder_config) override {
send_config->encoder_settings.encoder = encoder_.get();
send_config->encoder_settings.payload_name = "VP9";
send_config->encoder_settings.payload_type = 124;
@@ -271,7 +272,7 @@ TEST_F(EndToEndTest, SendsAndReceivesVP9) {
int time_to_render_ms) override {
const int kRequiredFrames = 500;
if (++frame_counter_ == kRequiredFrames)
- observation_complete_->Set();
+ observation_complete_.Set();
}
bool IsTextureSupported() const override { return false; }
@@ -282,7 +283,7 @@ TEST_F(EndToEndTest, SendsAndReceivesVP9) {
int frame_counter_;
} test;
- RunBaseTest(&test, FakeNetworkPipe::Config());
+ RunBaseTest(&test);
}
TEST_F(EndToEndTest, SendsAndReceivesH264) {
@@ -294,18 +295,19 @@ TEST_F(EndToEndTest, SendsAndReceivesH264) {
frame_counter_(0) {}
void PerformTest() override {
- EXPECT_EQ(kEventSignaled, Wait())
+ EXPECT_TRUE(Wait())
<< "Timed out while waiting for enough frames to be decoded.";
}
- void ModifyConfigs(VideoSendStream::Config* send_config,
- std::vector<VideoReceiveStream::Config>* receive_configs,
- VideoEncoderConfig* encoder_config) override {
+ void ModifyVideoConfigs(
+ VideoSendStream::Config* send_config,
+ std::vector<VideoReceiveStream::Config>* receive_configs,
+ VideoEncoderConfig* encoder_config) override {
send_config->rtp.nack.rtp_history_ms =
(*receive_configs)[0].rtp.nack.rtp_history_ms = kNackRtpHistoryMs;
send_config->encoder_settings.encoder = &fake_encoder_;
send_config->encoder_settings.payload_name = "H264";
- send_config->encoder_settings.payload_type = kFakeSendPayloadType;
+ send_config->encoder_settings.payload_type = kFakeVideoSendPayloadType;
encoder_config->streams[0].min_bitrate_bps = 50000;
encoder_config->streams[0].target_bitrate_bps =
encoder_config->streams[0].max_bitrate_bps = 2000000;
@@ -323,7 +325,7 @@ TEST_F(EndToEndTest, SendsAndReceivesH264) {
int time_to_render_ms) override {
const int kRequiredFrames = 500;
if (++frame_counter_ == kRequiredFrames)
- observation_complete_->Set();
+ observation_complete_.Set();
}
bool IsTextureSupported() const override { return false; }
@@ -334,7 +336,7 @@ TEST_F(EndToEndTest, SendsAndReceivesH264) {
int frame_counter_;
} test;
- RunBaseTest(&test, FakeNetworkPipe::Config());
+ RunBaseTest(&test);
}
TEST_F(EndToEndTest, ReceiverUsesLocalSsrc) {
@@ -350,19 +352,19 @@ TEST_F(EndToEndTest, ReceiverUsesLocalSsrc) {
ssrc |= static_cast<uint32_t>(packet[5]) << 16;
ssrc |= static_cast<uint32_t>(packet[6]) << 8;
ssrc |= static_cast<uint32_t>(packet[7]) << 0;
- EXPECT_EQ(kReceiverLocalSsrc, ssrc);
- observation_complete_->Set();
+ EXPECT_EQ(kReceiverLocalVideoSsrc, ssrc);
+ observation_complete_.Set();
return SEND_PACKET;
}
void PerformTest() override {
- EXPECT_EQ(kEventSignaled, Wait())
+ EXPECT_TRUE(Wait())
<< "Timed out while waiting for a receiver RTCP packet to be sent.";
}
} test;
- RunBaseTest(&test, FakeNetworkPipe::Config());
+ RunBaseTest(&test);
}
TEST_F(EndToEndTest, ReceivesAndRetransmitsNack) {
@@ -373,7 +375,6 @@ TEST_F(EndToEndTest, ReceivesAndRetransmitsNack) {
public:
NackObserver()
: EndToEndTest(kLongTimeoutMs),
- rtp_parser_(RtpHeaderParser::Create()),
sent_rtp_packets_(0),
packets_left_to_drop_(0),
nacks_left_(kNumberOfNacksToObserve) {}
@@ -382,7 +383,7 @@ TEST_F(EndToEndTest, ReceivesAndRetransmitsNack) {
Action OnSendRtp(const uint8_t* packet, size_t length) override {
rtc::CritScope lock(&crit_);
RTPHeader header;
- EXPECT_TRUE(rtp_parser_->Parse(packet, length, &header));
+ EXPECT_TRUE(parser_->Parse(packet, length, &header));
// Never drop retransmitted packets.
if (dropped_packets_.find(header.sequenceNumber) !=
@@ -390,7 +391,7 @@ TEST_F(EndToEndTest, ReceivesAndRetransmitsNack) {
retransmitted_packets_.insert(header.sequenceNumber);
if (nacks_left_ <= 0 &&
retransmitted_packets_.size() == dropped_packets_.size()) {
- observation_complete_->Set();
+ observation_complete_.Set();
}
return SEND_PACKET;
}
@@ -431,21 +432,21 @@ TEST_F(EndToEndTest, ReceivesAndRetransmitsNack) {
return SEND_PACKET;
}
- void ModifyConfigs(VideoSendStream::Config* send_config,
- std::vector<VideoReceiveStream::Config>* receive_configs,
- VideoEncoderConfig* encoder_config) override {
+ void ModifyVideoConfigs(
+ VideoSendStream::Config* send_config,
+ std::vector<VideoReceiveStream::Config>* receive_configs,
+ VideoEncoderConfig* encoder_config) override {
send_config->rtp.nack.rtp_history_ms = kNackRtpHistoryMs;
(*receive_configs)[0].rtp.nack.rtp_history_ms = kNackRtpHistoryMs;
}
void PerformTest() override {
- EXPECT_EQ(kEventSignaled, Wait())
+ EXPECT_TRUE(Wait())
<< "Timed out waiting for packets to be NACKed, retransmitted and "
"rendered.";
}
rtc::CriticalSection crit_;
- rtc::scoped_ptr<RtpHeaderParser> rtp_parser_;
std::set<uint16_t> dropped_packets_;
std::set<uint16_t> retransmitted_packets_;
uint64_t sent_rtp_packets_;
@@ -453,7 +454,7 @@ TEST_F(EndToEndTest, ReceivesAndRetransmitsNack) {
int nacks_left_ GUARDED_BY(&crit_);
} test;
- RunBaseTest(&test, FakeNetworkPipe::Config());
+ RunBaseTest(&test);
}
TEST_F(EndToEndTest, CanReceiveFec) {
@@ -472,10 +473,10 @@ TEST_F(EndToEndTest, CanReceiveFec) {
if (header.payloadType == kRedPayloadType) {
encapsulated_payload_type =
static_cast<int>(packet[header.headerLength]);
- if (encapsulated_payload_type != kFakeSendPayloadType)
+ if (encapsulated_payload_type != kFakeVideoSendPayloadType)
EXPECT_EQ(kUlpfecPayloadType, encapsulated_payload_type);
} else {
- EXPECT_EQ(kFakeSendPayloadType, header.payloadType);
+ EXPECT_EQ(kFakeVideoSendPayloadType, header.payloadType);
}
if (protected_sequence_numbers_.count(header.sequenceNumber) != 0) {
@@ -499,7 +500,7 @@ TEST_F(EndToEndTest, CanReceiveFec) {
return DROP_PACKET;
break;
case kDropNextMediaPacket:
- if (encapsulated_payload_type == kFakeSendPayloadType) {
+ if (encapsulated_payload_type == kFakeVideoSendPayloadType) {
protected_sequence_numbers_.insert(header.sequenceNumber);
protected_timestamps_.insert(header.timestamp);
state_ = kDropEveryOtherPacketUntilFec;
@@ -517,7 +518,7 @@ TEST_F(EndToEndTest, CanReceiveFec) {
// Rendering frame with timestamp of packet that was dropped -> FEC
// protection worked.
if (protected_timestamps_.count(video_frame.timestamp()) != 0)
- observation_complete_->Set();
+ observation_complete_.Set();
}
bool IsTextureSupported() const override { return false; }
@@ -528,9 +529,10 @@ TEST_F(EndToEndTest, CanReceiveFec) {
kDropNextMediaPacket,
} state_;
- void ModifyConfigs(VideoSendStream::Config* send_config,
- std::vector<VideoReceiveStream::Config>* receive_configs,
- VideoEncoderConfig* encoder_config) override {
+ void ModifyVideoConfigs(
+ VideoSendStream::Config* send_config,
+ std::vector<VideoReceiveStream::Config>* receive_configs,
+ VideoEncoderConfig* encoder_config) override {
// TODO(pbos): Run this test with combined NACK/FEC enabled as well.
// int rtp_history_ms = 1000;
// (*receive_configs)[0].rtp.nack.rtp_history_ms = rtp_history_ms;
@@ -544,7 +546,7 @@ TEST_F(EndToEndTest, CanReceiveFec) {
}
void PerformTest() override {
- EXPECT_EQ(kEventSignaled, Wait())
+ EXPECT_TRUE(Wait())
<< "Timed out waiting for dropped frames frames to be rendered.";
}
@@ -553,14 +555,14 @@ TEST_F(EndToEndTest, CanReceiveFec) {
std::set<uint32_t> protected_timestamps_ GUARDED_BY(crit_);
} test;
- RunBaseTest(&test, FakeNetworkPipe::Config());
+ RunBaseTest(&test);
}
// Flacky on all platforms. See webrtc:4328.
TEST_F(EndToEndTest, DISABLED_ReceivedFecPacketsNotNacked) {
class FecNackObserver : public test::EndToEndTest {
public:
- explicit FecNackObserver()
+ FecNackObserver()
: EndToEndTest(kDefaultTimeoutMs),
state_(kFirstPacket),
fec_sequence_number_(0),
@@ -569,6 +571,7 @@ TEST_F(EndToEndTest, DISABLED_ReceivedFecPacketsNotNacked) {
private:
Action OnSendRtp(const uint8_t* packet, size_t length) override {
+ rtc::CritScope lock_(&crit_);
RTPHeader header;
EXPECT_TRUE(parser_->Parse(packet, length, &header));
@@ -576,10 +579,10 @@ TEST_F(EndToEndTest, DISABLED_ReceivedFecPacketsNotNacked) {
if (header.payloadType == kRedPayloadType) {
encapsulated_payload_type =
static_cast<int>(packet[header.headerLength]);
- if (encapsulated_payload_type != kFakeSendPayloadType)
+ if (encapsulated_payload_type != kFakeVideoSendPayloadType)
EXPECT_EQ(kUlpfecPayloadType, encapsulated_payload_type);
} else {
- EXPECT_EQ(kFakeSendPayloadType, header.payloadType);
+ EXPECT_EQ(kFakeVideoSendPayloadType, header.payloadType);
}
if (has_last_sequence_number_ &&
@@ -619,6 +622,7 @@ TEST_F(EndToEndTest, DISABLED_ReceivedFecPacketsNotNacked) {
}
Action OnReceiveRtcp(const uint8_t* packet, size_t length) override {
+ rtc::CritScope lock_(&crit_);
if (state_ == kVerifyFecPacketNotInNackList) {
test::RtcpPacketParser rtcp_parser;
rtcp_parser.Parse(packet, length);
@@ -627,12 +631,22 @@ TEST_F(EndToEndTest, DISABLED_ReceivedFecPacketsNotNacked) {
IsNewerSequenceNumber(nacks.back(), fec_sequence_number_)) {
EXPECT_TRUE(std::find(
nacks.begin(), nacks.end(), fec_sequence_number_) == nacks.end());
- observation_complete_->Set();
+ observation_complete_.Set();
}
}
return SEND_PACKET;
}
+ test::PacketTransport* CreateSendTransport(Call* sender_call) override {
+ // At low RTT (< kLowRttNackMs) -> NACK only, no FEC.
+ // Configure some network delay.
+ const int kNetworkDelayMs = 50;
+ FakeNetworkPipe::Config config;
+ config.queue_delay_ms = kNetworkDelayMs;
+ return new test::PacketTransport(sender_call, this,
+ test::PacketTransport::kSender, config);
+ }
+
// TODO(holmer): Investigate why we don't send FEC packets when the bitrate
// is 10 kbps.
Call::Config GetSenderCallConfig() override {
@@ -642,9 +656,10 @@ TEST_F(EndToEndTest, DISABLED_ReceivedFecPacketsNotNacked) {
return config;
}
- void ModifyConfigs(VideoSendStream::Config* send_config,
- std::vector<VideoReceiveStream::Config>* receive_configs,
- VideoEncoderConfig* encoder_config) override {
+ void ModifyVideoConfigs(
+ VideoSendStream::Config* send_config,
+ std::vector<VideoReceiveStream::Config>* receive_configs,
+ VideoEncoderConfig* encoder_config) override {
// Configure hybrid NACK/FEC.
send_config->rtp.nack.rtp_history_ms = kNackRtpHistoryMs;
send_config->rtp.fec.red_payload_type = kRedPayloadType;
@@ -655,7 +670,7 @@ TEST_F(EndToEndTest, DISABLED_ReceivedFecPacketsNotNacked) {
}
void PerformTest() override {
- EXPECT_EQ(kEventSignaled, Wait())
+ EXPECT_TRUE(Wait())
<< "Timed out while waiting for FEC packets to be received.";
}
@@ -666,17 +681,13 @@ TEST_F(EndToEndTest, DISABLED_ReceivedFecPacketsNotNacked) {
kVerifyFecPacketNotInNackList,
} state_;
- uint16_t fec_sequence_number_;
+ rtc::CriticalSection crit_;
+ uint16_t fec_sequence_number_ GUARDED_BY(&crit_);
bool has_last_sequence_number_;
uint16_t last_sequence_number_;
} test;
- // At low RTT (< kLowRttNackMs) -> NACK only, no FEC.
- // Configure some network delay.
- const int kNetworkDelayMs = 50;
- FakeNetworkPipe::Config config;
- config.queue_delay_ms = kNetworkDelayMs;
- RunBaseTest(&test, config);
+ RunBaseTest(&test);
}
// This test drops second RTP packet with a marker bit set, makes sure it's
@@ -691,7 +702,7 @@ void EndToEndTest::DecodesRetransmittedFrame(bool use_rtx, bool use_red) {
explicit RetransmissionObserver(bool use_rtx, bool use_red)
: EndToEndTest(kDefaultTimeoutMs),
payload_type_(GetPayloadType(false, use_red)),
- retransmission_ssrc_(use_rtx ? kSendRtxSsrcs[0] : kSendSsrcs[0]),
+ retransmission_ssrc_(use_rtx ? kSendRtxSsrcs[0] : kVideoSendSsrcs[0]),
retransmission_payload_type_(GetPayloadType(use_rtx, use_red)),
marker_bits_observed_(0),
num_packets_observed_(0),
@@ -719,7 +730,7 @@ void EndToEndTest::DecodesRetransmittedFrame(bool use_rtx, bool use_red) {
return SEND_PACKET;
}
- EXPECT_EQ(kSendSsrcs[0], header.ssrc);
+ EXPECT_EQ(kVideoSendSsrcs[0], header.ssrc);
EXPECT_EQ(payload_type_, header.payloadType);
// Found the final packet of the frame to inflict loss to, drop this and
@@ -736,13 +747,14 @@ void EndToEndTest::DecodesRetransmittedFrame(bool use_rtx, bool use_red) {
rtc::CritScope lock(&crit_);
if (frame->timestamp() == retransmitted_timestamp_) {
EXPECT_TRUE(frame_retransmitted_);
- observation_complete_->Set();
+ observation_complete_.Set();
}
}
- void ModifyConfigs(VideoSendStream::Config* send_config,
- std::vector<VideoReceiveStream::Config>* receive_configs,
- VideoEncoderConfig* encoder_config) override {
+ void ModifyVideoConfigs(
+ VideoSendStream::Config* send_config,
+ std::vector<VideoReceiveStream::Config>* receive_configs,
+ VideoEncoderConfig* encoder_config) override {
send_config->rtp.nack.rtp_history_ms = kNackRtpHistoryMs;
(*receive_configs)[0].pre_render_callback = this;
(*receive_configs)[0].rtp.nack.rtp_history_ms = kNackRtpHistoryMs;
@@ -757,21 +769,21 @@ void EndToEndTest::DecodesRetransmittedFrame(bool use_rtx, bool use_red) {
if (retransmission_ssrc_ == kSendRtxSsrcs[0]) {
send_config->rtp.rtx.ssrcs.push_back(kSendRtxSsrcs[0]);
send_config->rtp.rtx.payload_type = kSendRtxPayloadType;
- (*receive_configs)[0].rtp.rtx[kFakeSendPayloadType].ssrc =
+ (*receive_configs)[0].rtp.rtx[kFakeVideoSendPayloadType].ssrc =
kSendRtxSsrcs[0];
- (*receive_configs)[0].rtp.rtx[kFakeSendPayloadType].payload_type =
+ (*receive_configs)[0].rtp.rtx[kFakeVideoSendPayloadType].payload_type =
kSendRtxPayloadType;
}
}
void PerformTest() override {
- EXPECT_EQ(kEventSignaled, Wait())
+ EXPECT_TRUE(Wait())
<< "Timed out while waiting for retransmission to render.";
}
int GetPayloadType(bool use_rtx, bool use_red) {
return use_rtx ? kSendRtxPayloadType
- : (use_red ? kRedPayloadType : kFakeSendPayloadType);
+ : (use_red ? kRedPayloadType : kFakeVideoSendPayloadType);
}
rtc::CriticalSection crit_;
@@ -784,7 +796,7 @@ void EndToEndTest::DecodesRetransmittedFrame(bool use_rtx, bool use_red) {
bool frame_retransmitted_;
} test(use_rtx, use_red);
- RunBaseTest(&test, FakeNetworkPipe::Config());
+ RunBaseTest(&test);
}
TEST_F(EndToEndTest, DecodesRetransmittedFrame) {
@@ -809,30 +821,30 @@ TEST_F(EndToEndTest, UsesFrameCallbacks) {
class Renderer : public VideoRenderer {
public:
- Renderer() : event_(EventWrapper::Create()) {}
+ Renderer() : event_(false, false) {}
void RenderFrame(const VideoFrame& video_frame,
int /*time_to_render_ms*/) override {
EXPECT_EQ(0, *video_frame.buffer(kYPlane))
<< "Rendered frame should have zero luma which is applied by the "
"pre-render callback.";
- event_->Set();
+ event_.Set();
}
bool IsTextureSupported() const override { return false; }
- EventTypeWrapper Wait() { return event_->Wait(kDefaultTimeoutMs); }
- rtc::scoped_ptr<EventWrapper> event_;
+ bool Wait() { return event_.Wait(kDefaultTimeoutMs); }
+ rtc::Event event_;
} renderer;
class TestFrameCallback : public I420FrameCallback {
public:
TestFrameCallback(int expected_luma_byte, int next_luma_byte)
- : event_(EventWrapper::Create()),
+ : event_(false, false),
expected_luma_byte_(expected_luma_byte),
next_luma_byte_(next_luma_byte) {}
- EventTypeWrapper Wait() { return event_->Wait(kDefaultTimeoutMs); }
+ bool Wait() { return event_.Wait(kDefaultTimeoutMs); }
private:
virtual void FrameCallback(VideoFrame* frame) {
@@ -850,10 +862,10 @@ TEST_F(EndToEndTest, UsesFrameCallbacks) {
next_luma_byte_,
frame->allocated_size(kYPlane));
- event_->Set();
+ event_.Set();
}
- rtc::scoped_ptr<EventWrapper> event_;
+ rtc::Event event_;
int expected_luma_byte_;
int next_luma_byte_;
};
@@ -868,34 +880,35 @@ TEST_F(EndToEndTest, UsesFrameCallbacks) {
sender_transport.SetReceiver(receiver_call_->Receiver());
receiver_transport.SetReceiver(sender_call_->Receiver());
- CreateSendConfig(1, &sender_transport);
+ CreateSendConfig(1, 0, &sender_transport);
rtc::scoped_ptr<VideoEncoder> encoder(
VideoEncoder::Create(VideoEncoder::kVp8));
- send_config_.encoder_settings.encoder = encoder.get();
- send_config_.encoder_settings.payload_name = "VP8";
- ASSERT_EQ(1u, encoder_config_.streams.size()) << "Test setup error.";
- encoder_config_.streams[0].width = kWidth;
- encoder_config_.streams[0].height = kHeight;
- send_config_.pre_encode_callback = &pre_encode_callback;
+ video_send_config_.encoder_settings.encoder = encoder.get();
+ video_send_config_.encoder_settings.payload_name = "VP8";
+ ASSERT_EQ(1u, video_encoder_config_.streams.size()) << "Test setup error.";
+ video_encoder_config_.streams[0].width = kWidth;
+ video_encoder_config_.streams[0].height = kHeight;
+ video_send_config_.pre_encode_callback = &pre_encode_callback;
CreateMatchingReceiveConfigs(&receiver_transport);
- receive_configs_[0].pre_render_callback = &pre_render_callback;
- receive_configs_[0].renderer = &renderer;
+ video_receive_configs_[0].pre_render_callback = &pre_render_callback;
+ video_receive_configs_[0].renderer = &renderer;
- CreateStreams();
+ CreateVideoStreams();
Start();
// Create frames that are smaller than the send width/height, this is done to
// check that the callbacks are done after processing video.
rtc::scoped_ptr<test::FrameGenerator> frame_generator(
test::FrameGenerator::CreateChromaGenerator(kWidth / 2, kHeight / 2));
- send_stream_->Input()->IncomingCapturedFrame(*frame_generator->NextFrame());
+ video_send_stream_->Input()->IncomingCapturedFrame(
+ *frame_generator->NextFrame());
- EXPECT_EQ(kEventSignaled, pre_encode_callback.Wait())
+ EXPECT_TRUE(pre_encode_callback.Wait())
<< "Timed out while waiting for pre-encode callback.";
- EXPECT_EQ(kEventSignaled, pre_render_callback.Wait())
+ EXPECT_TRUE(pre_render_callback.Wait())
<< "Timed out while waiting for pre-render callback.";
- EXPECT_EQ(kEventSignaled, renderer.Wait())
+ EXPECT_TRUE(renderer.Wait())
<< "Timed out while waiting for the frame to render.";
Stop();
@@ -962,7 +975,7 @@ void EndToEndTest::ReceivesPliAndRecovers(int rtp_history_ms) {
rtc::CritScope lock(&crit_);
if (received_pli_ &&
video_frame.timestamp() > highest_dropped_timestamp_) {
- observation_complete_->Set();
+ observation_complete_.Set();
}
if (!received_pli_)
frames_to_drop_ = kPacketsToDrop;
@@ -970,18 +983,19 @@ void EndToEndTest::ReceivesPliAndRecovers(int rtp_history_ms) {
bool IsTextureSupported() const override { return false; }
- void ModifyConfigs(VideoSendStream::Config* send_config,
- std::vector<VideoReceiveStream::Config>* receive_configs,
- VideoEncoderConfig* encoder_config) override {
+ void ModifyVideoConfigs(
+ VideoSendStream::Config* send_config,
+ std::vector<VideoReceiveStream::Config>* receive_configs,
+ VideoEncoderConfig* encoder_config) override {
send_config->rtp.nack.rtp_history_ms = rtp_history_ms_;
(*receive_configs)[0].rtp.nack.rtp_history_ms = rtp_history_ms_;
(*receive_configs)[0].renderer = this;
}
void PerformTest() override {
- EXPECT_EQ(kEventSignaled, Wait()) << "Timed out waiting for PLI to be "
- "received and a frame to be "
- "rendered afterwards.";
+ EXPECT_TRUE(Wait()) << "Timed out waiting for PLI to be "
+ "received and a frame to be "
+ "rendered afterwards.";
}
rtc::CriticalSection crit_;
@@ -992,15 +1006,14 @@ void EndToEndTest::ReceivesPliAndRecovers(int rtp_history_ms) {
bool received_pli_ GUARDED_BY(&crit_);
} test(rtp_history_ms);
- RunBaseTest(&test, FakeNetworkPipe::Config());
+ RunBaseTest(&test);
}
TEST_F(EndToEndTest, ReceivesPliAndRecoversWithNack) {
ReceivesPliAndRecovers(1000);
}
-// TODO(pbos): Enable this when 2250 is resolved.
-TEST_F(EndToEndTest, DISABLED_ReceivesPliAndRecoversWithoutNack) {
+TEST_F(EndToEndTest, ReceivesPliAndRecoversWithoutNack) {
ReceivesPliAndRecovers(0);
}
@@ -1008,11 +1021,9 @@ TEST_F(EndToEndTest, UnknownRtpPacketGivesUnknownSsrcReturnCode) {
class PacketInputObserver : public PacketReceiver {
public:
explicit PacketInputObserver(PacketReceiver* receiver)
- : receiver_(receiver), delivered_packet_(EventWrapper::Create()) {}
+ : receiver_(receiver), delivered_packet_(false, false) {}
- EventTypeWrapper Wait() {
- return delivered_packet_->Wait(kDefaultTimeoutMs);
- }
+ bool Wait() { return delivered_packet_.Wait(kDefaultTimeoutMs); }
private:
DeliveryStatus DeliverPacket(MediaType media_type,
@@ -1026,13 +1037,13 @@ TEST_F(EndToEndTest, UnknownRtpPacketGivesUnknownSsrcReturnCode) {
DeliveryStatus delivery_status =
receiver_->DeliverPacket(media_type, packet, length, packet_time);
EXPECT_EQ(DELIVERY_UNKNOWN_SSRC, delivery_status);
- delivered_packet_->Set();
+ delivered_packet_.Set();
return delivery_status;
}
}
PacketReceiver* receiver_;
- rtc::scoped_ptr<EventWrapper> delivered_packet_;
+ rtc::Event delivered_packet_;
};
CreateCalls(Call::Config(), Call::Config());
@@ -1043,18 +1054,18 @@ TEST_F(EndToEndTest, UnknownRtpPacketGivesUnknownSsrcReturnCode) {
send_transport.SetReceiver(&input_observer);
receive_transport.SetReceiver(sender_call_->Receiver());
- CreateSendConfig(1, &send_transport);
+ CreateSendConfig(1, 0, &send_transport);
CreateMatchingReceiveConfigs(&receive_transport);
- CreateStreams();
+ CreateVideoStreams();
CreateFrameGeneratorCapturer();
Start();
- receiver_call_->DestroyVideoReceiveStream(receive_streams_[0]);
- receive_streams_.clear();
+ receiver_call_->DestroyVideoReceiveStream(video_receive_streams_[0]);
+ video_receive_streams_.clear();
// Wait() waits for a received packet.
- EXPECT_EQ(kEventSignaled, input_observer.Wait());
+ EXPECT_TRUE(input_observer.Wait());
Stop();
@@ -1103,16 +1114,16 @@ void EndToEndTest::RespectsRtcpMode(RtcpMode rtcp_mode) {
if (!has_report_block) {
ADD_FAILURE() << "Received RTCP packet without receiver report for "
"RtcpMode::kCompound.";
- observation_complete_->Set();
+ observation_complete_.Set();
}
if (sent_rtcp_ >= kNumCompoundRtcpPacketsToObserve)
- observation_complete_->Set();
+ observation_complete_.Set();
break;
case RtcpMode::kReducedSize:
if (!has_report_block)
- observation_complete_->Set();
+ observation_complete_.Set();
break;
case RtcpMode::kOff:
RTC_NOTREACHED();
@@ -1122,16 +1133,17 @@ void EndToEndTest::RespectsRtcpMode(RtcpMode rtcp_mode) {
return SEND_PACKET;
}
- void ModifyConfigs(VideoSendStream::Config* send_config,
- std::vector<VideoReceiveStream::Config>* receive_configs,
- VideoEncoderConfig* encoder_config) override {
+ void ModifyVideoConfigs(
+ VideoSendStream::Config* send_config,
+ std::vector<VideoReceiveStream::Config>* receive_configs,
+ VideoEncoderConfig* encoder_config) override {
send_config->rtp.nack.rtp_history_ms = kNackRtpHistoryMs;
(*receive_configs)[0].rtp.nack.rtp_history_ms = kNackRtpHistoryMs;
(*receive_configs)[0].rtp.rtcp_mode = rtcp_mode_;
}
void PerformTest() override {
- EXPECT_EQ(kEventSignaled, Wait())
+ EXPECT_TRUE(Wait())
<< (rtcp_mode_ == RtcpMode::kCompound
? "Timed out before observing enough compound packets."
: "Timed out before receiving a non-compound RTCP packet.");
@@ -1142,7 +1154,7 @@ void EndToEndTest::RespectsRtcpMode(RtcpMode rtcp_mode) {
int sent_rtcp_;
} test(rtcp_mode);
- RunBaseTest(&test, FakeNetworkPipe::Config());
+ RunBaseTest(&test);
}
TEST_F(EndToEndTest, UsesRtcpCompoundMode) {
@@ -1219,7 +1231,7 @@ class MultiStreamTest {
VideoReceiveStream::Config receive_config(receiver_transport.get());
receive_config.rtp.remote_ssrc = ssrc;
- receive_config.rtp.local_ssrc = test::CallTest::kReceiverLocalSsrc;
+ receive_config.rtp.local_ssrc = test::CallTest::kReceiverLocalVideoSsrc;
VideoReceiveStream::Decoder decoder =
test::CreateMatchingDecoder(send_config.encoder_settings);
allocated_decoders.push_back(decoder.decoder);
@@ -1282,27 +1294,27 @@ TEST_F(EndToEndTest, SendsAndReceivesMultipleStreams) {
: settings_(settings),
ssrc_(ssrc),
frame_generator_(frame_generator),
- done_(EventWrapper::Create()) {}
+ done_(false, false) {}
void RenderFrame(const VideoFrame& video_frame,
int time_to_render_ms) override {
EXPECT_EQ(settings_.width, video_frame.width());
EXPECT_EQ(settings_.height, video_frame.height());
(*frame_generator_)->Stop();
- done_->Set();
+ done_.Set();
}
uint32_t Ssrc() { return ssrc_; }
bool IsTextureSupported() const override { return false; }
- EventTypeWrapper Wait() { return done_->Wait(kDefaultTimeoutMs); }
+ bool Wait() { return done_.Wait(kDefaultTimeoutMs); }
private:
const MultiStreamTest::CodecSettings& settings_;
const uint32_t ssrc_;
test::FrameGeneratorCapturer** const frame_generator_;
- rtc::scoped_ptr<EventWrapper> done_;
+ rtc::Event done_;
};
class Tester : public MultiStreamTest {
@@ -1313,8 +1325,8 @@ TEST_F(EndToEndTest, SendsAndReceivesMultipleStreams) {
protected:
void Wait() override {
for (const auto& observer : observers_) {
- EXPECT_EQ(EventTypeWrapper::kEventSignaled, observer->Wait())
- << "Time out waiting for from on ssrc " << observer->Ssrc();
+ EXPECT_TRUE(observer->Wait()) << "Time out waiting for from on ssrc "
+ << observer->Ssrc();
}
}
@@ -1350,7 +1362,7 @@ TEST_F(EndToEndTest, AssignsTransportSequenceNumbers) {
const uint32_t& first_media_ssrc,
const std::map<uint32_t, uint32_t>& ssrc_map)
: DirectTransport(sender_call),
- done_(EventWrapper::Create()),
+ done_(false, false),
parser_(RtpHeaderParser::Create()),
first_media_ssrc_(first_media_ssrc),
rtx_to_media_ssrcs_(ssrc_map),
@@ -1419,7 +1431,7 @@ TEST_F(EndToEndTest, AssignsTransportSequenceNumbers) {
}
if (IsDone())
- done_->Set();
+ done_.Set();
if (drop_packet)
return true;
@@ -1441,18 +1453,18 @@ TEST_F(EndToEndTest, AssignsTransportSequenceNumbers) {
return seqno_range == received_packed_ids_.size();
}
- EventTypeWrapper Wait() {
+ bool Wait() {
{
// Can't be sure until this point that rtx_to_media_ssrcs_ etc have
// been initialized and are OK to read.
rtc::CritScope cs(&lock_);
started_ = true;
}
- return done_->Wait(kDefaultTimeoutMs);
+ return done_.Wait(kDefaultTimeoutMs);
}
rtc::CriticalSection lock_;
- rtc::scoped_ptr<EventWrapper> done_;
+ rtc::Event done_;
rtc::scoped_ptr<RtpHeaderParser> parser_;
SequenceNumberUnwrapper unwrapper_;
std::set<int64_t> received_packed_ids_;
@@ -1475,7 +1487,7 @@ TEST_F(EndToEndTest, AssignsTransportSequenceNumbers) {
protected:
void Wait() override {
RTC_DCHECK(observer_ != nullptr);
- EXPECT_EQ(EventTypeWrapper::kEventSignaled, observer_->Wait());
+ EXPECT_TRUE(observer_->Wait());
}
void UpdateSendConfig(
@@ -1530,77 +1542,122 @@ TEST_F(EndToEndTest, AssignsTransportSequenceNumbers) {
tester.RunTest();
}
-TEST_F(EndToEndTest, ReceivesTransportFeedback) {
- static const int kExtensionId = 5;
+class TransportFeedbackTester : public test::EndToEndTest {
+ public:
+ explicit TransportFeedbackTester(bool feedback_enabled,
+ size_t num_video_streams,
+ size_t num_audio_streams)
+ : EndToEndTest(::webrtc::EndToEndTest::kDefaultTimeoutMs),
+ feedback_enabled_(feedback_enabled),
+ num_video_streams_(num_video_streams),
+ num_audio_streams_(num_audio_streams) {
+ // Only one stream of each supported for now.
+ EXPECT_LE(num_video_streams, 1u);
+ EXPECT_LE(num_audio_streams, 1u);
+ }
- class TransportFeedbackObserver : public test::DirectTransport {
- public:
- TransportFeedbackObserver(Call* receiver_call, rtc::Event* done_event)
- : DirectTransport(receiver_call), done_(done_event) {}
- virtual ~TransportFeedbackObserver() {}
+ protected:
+ Action OnSendRtcp(const uint8_t* data, size_t length) override {
+ EXPECT_FALSE(HasTransportFeedback(data, length));
+ return SEND_PACKET;
+ }
- bool SendRtcp(const uint8_t* data, size_t length) override {
- RTCPUtility::RTCPParserV2 parser(data, length, true);
- EXPECT_TRUE(parser.IsValid());
+ Action OnReceiveRtcp(const uint8_t* data, size_t length) override {
+ if (HasTransportFeedback(data, length))
+ observation_complete_.Set();
+ return SEND_PACKET;
+ }
- RTCPUtility::RTCPPacketTypes packet_type = parser.Begin();
- while (packet_type != RTCPUtility::RTCPPacketTypes::kInvalid) {
- if (packet_type == RTCPUtility::RTCPPacketTypes::kTransportFeedback) {
- done_->Set();
- break;
- }
- packet_type = parser.Iterate();
- }
+ bool HasTransportFeedback(const uint8_t* data, size_t length) const {
+ RTCPUtility::RTCPParserV2 parser(data, length, true);
+ EXPECT_TRUE(parser.IsValid());
- return test::DirectTransport::SendRtcp(data, length);
+ RTCPUtility::RTCPPacketTypes packet_type = parser.Begin();
+ while (packet_type != RTCPUtility::RTCPPacketTypes::kInvalid) {
+ if (packet_type == RTCPUtility::RTCPPacketTypes::kTransportFeedback)
+ return true;
+ packet_type = parser.Iterate();
}
- rtc::Event* done_;
- };
+ return false;
+ }
- class TransportFeedbackTester : public MultiStreamTest {
- public:
- TransportFeedbackTester() : done_(false, false) {}
- virtual ~TransportFeedbackTester() {}
+ void PerformTest() override {
+ const int64_t kDisabledFeedbackTimeoutMs = 5000;
+ EXPECT_EQ(feedback_enabled_,
+ observation_complete_.Wait(feedback_enabled_
+ ? test::CallTest::kDefaultTimeoutMs
+ : kDisabledFeedbackTimeoutMs));
+ }
- protected:
- void Wait() override {
- EXPECT_TRUE(done_.Wait(CallTest::kDefaultTimeoutMs));
- }
+ void OnCallsCreated(Call* sender_call, Call* receiver_call) override {
+ receiver_call_ = receiver_call;
+ }
- void UpdateSendConfig(
- size_t stream_index,
- VideoSendStream::Config* send_config,
- VideoEncoderConfig* encoder_config,
- test::FrameGeneratorCapturer** frame_generator) override {
- send_config->rtp.extensions.push_back(
- RtpExtension(RtpExtension::kTransportSequenceNumber, kExtensionId));
- }
+ size_t GetNumVideoStreams() const override { return num_video_streams_; }
+ size_t GetNumAudioStreams() const override { return num_audio_streams_; }
- void UpdateReceiveConfig(
- size_t stream_index,
- VideoReceiveStream::Config* receive_config) override {
- receive_config->rtp.extensions.push_back(
- RtpExtension(RtpExtension::kTransportSequenceNumber, kExtensionId));
- }
+ void ModifyVideoConfigs(
+ VideoSendStream::Config* send_config,
+ std::vector<VideoReceiveStream::Config>* receive_configs,
+ VideoEncoderConfig* encoder_config) override {
+ send_config->rtp.extensions.clear();
+ send_config->rtp.extensions.push_back(
+ RtpExtension(RtpExtension::kTransportSequenceNumber, kExtensionId));
+ (*receive_configs)[0].rtp.extensions = send_config->rtp.extensions;
+ (*receive_configs)[0].rtp.transport_cc = feedback_enabled_;
+ }
- test::DirectTransport* CreateReceiveTransport(
- Call* receiver_call) override {
- return new TransportFeedbackObserver(receiver_call, &done_);
- }
+ void ModifyAudioConfigs(
+ AudioSendStream::Config* send_config,
+ std::vector<AudioReceiveStream::Config>* receive_configs) override {
+ send_config->rtp.extensions.clear();
+ send_config->rtp.extensions.push_back(
+ RtpExtension(RtpExtension::kTransportSequenceNumber, kExtensionId));
+ (*receive_configs)[0].rtp.extensions.clear();
+ (*receive_configs)[0].rtp.extensions = send_config->rtp.extensions;
+ (*receive_configs)[0].rtp.transport_cc = feedback_enabled_;
+ (*receive_configs)[0].combined_audio_video_bwe = true;
+ }
- private:
- rtc::Event done_;
- } tester;
- tester.RunTest();
+ private:
+ static const int kExtensionId = 5;
+ const bool feedback_enabled_;
+ const size_t num_video_streams_;
+ const size_t num_audio_streams_;
+ Call* receiver_call_;
+};
+
+TEST_F(EndToEndTest, VideoReceivesTransportFeedback) {
+ TransportFeedbackTester test(true, 1, 0);
+ RunBaseTest(&test);
+}
+
+TEST_F(EndToEndTest, VideoTransportFeedbackNotConfigured) {
+ TransportFeedbackTester test(false, 1, 0);
+ RunBaseTest(&test);
+}
+
+TEST_F(EndToEndTest, AudioReceivesTransportFeedback) {
+ TransportFeedbackTester test(true, 0, 1);
+ RunBaseTest(&test);
+}
+
+TEST_F(EndToEndTest, AudioTransportFeedbackNotConfigured) {
+ TransportFeedbackTester test(false, 0, 1);
+ RunBaseTest(&test);
}
+
+TEST_F(EndToEndTest, AudioVideoReceivesTransportFeedback) {
+ TransportFeedbackTester test(true, 1, 1);
+ RunBaseTest(&test);
+}
+
TEST_F(EndToEndTest, ObserversEncodedFrames) {
class EncodedFrameTestObserver : public EncodedFrameObserver {
public:
EncodedFrameTestObserver()
- : length_(0),
- frame_type_(kEmptyFrame),
- called_(EventWrapper::Create()) {}
+ : length_(0), frame_type_(kEmptyFrame), called_(false, false) {}
virtual ~EncodedFrameTestObserver() {}
virtual void EncodedFrameCallback(const EncodedFrame& encoded_frame) {
@@ -1608,10 +1665,10 @@ TEST_F(EndToEndTest, ObserversEncodedFrames) {
length_ = encoded_frame.length_;
buffer_.reset(new uint8_t[length_]);
memcpy(buffer_.get(), encoded_frame.data_, length_);
- called_->Set();
+ called_.Set();
}
- EventTypeWrapper Wait() { return called_->Wait(kDefaultTimeoutMs); }
+ bool Wait() { return called_.Wait(kDefaultTimeoutMs); }
void ExpectEqualFrames(const EncodedFrameTestObserver& observer) {
ASSERT_EQ(length_, observer.length_)
@@ -1626,7 +1683,7 @@ TEST_F(EndToEndTest, ObserversEncodedFrames) {
rtc::scoped_ptr<uint8_t[]> buffer_;
size_t length_;
FrameType frame_type_;
- rtc::scoped_ptr<EventWrapper> called_;
+ rtc::Event called_;
};
EncodedFrameTestObserver post_encode_observer;
@@ -1639,23 +1696,25 @@ TEST_F(EndToEndTest, ObserversEncodedFrames) {
sender_transport.SetReceiver(receiver_call_->Receiver());
receiver_transport.SetReceiver(sender_call_->Receiver());
- CreateSendConfig(1, &sender_transport);
+ CreateSendConfig(1, 0, &sender_transport);
CreateMatchingReceiveConfigs(&receiver_transport);
- send_config_.post_encode_callback = &post_encode_observer;
- receive_configs_[0].pre_decode_callback = &pre_decode_observer;
+ video_send_config_.post_encode_callback = &post_encode_observer;
+ video_receive_configs_[0].pre_decode_callback = &pre_decode_observer;
- CreateStreams();
+ CreateVideoStreams();
Start();
rtc::scoped_ptr<test::FrameGenerator> frame_generator(
test::FrameGenerator::CreateChromaGenerator(
- encoder_config_.streams[0].width, encoder_config_.streams[0].height));
- send_stream_->Input()->IncomingCapturedFrame(*frame_generator->NextFrame());
+ video_encoder_config_.streams[0].width,
+ video_encoder_config_.streams[0].height));
+ video_send_stream_->Input()->IncomingCapturedFrame(
+ *frame_generator->NextFrame());
- EXPECT_EQ(kEventSignaled, post_encode_observer.Wait())
+ EXPECT_TRUE(post_encode_observer.Wait())
<< "Timed out while waiting for send-side encoded-frame callback.";
- EXPECT_EQ(kEventSignaled, pre_decode_observer.Wait())
+ EXPECT_TRUE(pre_decode_observer.Wait())
<< "Timed out while waiting for pre-decode encoded-frame callback.";
post_encode_observer.ExpectEqualFrames(pre_decode_observer);
@@ -1683,29 +1742,29 @@ TEST_F(EndToEndTest, ReceiveStreamSendsRemb) {
while (packet_type != RTCPUtility::RTCPPacketTypes::kInvalid) {
if (packet_type == RTCPUtility::RTCPPacketTypes::kPsfbRemb) {
const RTCPUtility::RTCPPacket& packet = parser.Packet();
- EXPECT_EQ(packet.PSFBAPP.SenderSSRC, kReceiverLocalSsrc);
+ EXPECT_EQ(packet.PSFBAPP.SenderSSRC, kReceiverLocalVideoSsrc);
received_psfb = true;
} else if (packet_type == RTCPUtility::RTCPPacketTypes::kPsfbRembItem) {
const RTCPUtility::RTCPPacket& packet = parser.Packet();
EXPECT_GT(packet.REMBItem.BitRate, 0u);
EXPECT_EQ(packet.REMBItem.NumberOfSSRCs, 1u);
- EXPECT_EQ(packet.REMBItem.SSRCs[0], kSendSsrcs[0]);
+ EXPECT_EQ(packet.REMBItem.SSRCs[0], kVideoSendSsrcs[0]);
received_remb = true;
}
packet_type = parser.Iterate();
}
if (received_psfb && received_remb)
- observation_complete_->Set();
+ observation_complete_.Set();
return SEND_PACKET;
}
void PerformTest() override {
- EXPECT_EQ(kEventSignaled, Wait()) << "Timed out while waiting for a "
- "receiver RTCP REMB packet to be "
- "sent.";
+ EXPECT_TRUE(Wait()) << "Timed out while waiting for a "
+ "receiver RTCP REMB packet to be "
+ "sent.";
}
} test;
- RunBaseTest(&test, FakeNetworkPipe::Config());
+ RunBaseTest(&test);
}
TEST_F(EndToEndTest, VerifyBandwidthStats) {
@@ -1724,7 +1783,7 @@ TEST_F(EndToEndTest, VerifyBandwidthStats) {
has_seen_pacer_delay_ = sender_stats.pacer_delay_ms > 0;
if (sender_stats.send_bandwidth_bps > 0 &&
receiver_stats.recv_bandwidth_bps > 0 && has_seen_pacer_delay_) {
- observation_complete_->Set();
+ observation_complete_.Set();
}
return SEND_PACKET;
}
@@ -1735,8 +1794,8 @@ TEST_F(EndToEndTest, VerifyBandwidthStats) {
}
void PerformTest() override {
- EXPECT_EQ(kEventSignaled, Wait()) << "Timed out while waiting for "
- "non-zero bandwidth stats.";
+ EXPECT_TRUE(Wait()) << "Timed out while waiting for "
+ "non-zero bandwidth stats.";
}
private:
@@ -1745,7 +1804,7 @@ TEST_F(EndToEndTest, VerifyBandwidthStats) {
bool has_seen_pacer_delay_;
} test;
- RunBaseTest(&test, FakeNetworkPipe::Config());
+ RunBaseTest(&test);
}
TEST_F(EndToEndTest, VerifyNackStats) {
@@ -1762,6 +1821,7 @@ TEST_F(EndToEndTest, VerifyNackStats) {
private:
Action OnSendRtp(const uint8_t* packet, size_t length) override {
+ rtc::CritScope lock(&crit_);
if (++sent_rtp_packets_ == kPacketNumberToDrop) {
rtc::scoped_ptr<RtpHeaderParser> parser(RtpHeaderParser::Create());
RTPHeader header;
@@ -1774,6 +1834,7 @@ TEST_F(EndToEndTest, VerifyNackStats) {
}
Action OnReceiveRtcp(const uint8_t* packet, size_t length) override {
+ rtc::CritScope lock(&crit_);
test::RtcpPacketParser rtcp_parser;
rtcp_parser.Parse(packet, length);
std::vector<uint16_t> nacks = rtcp_parser.nack_item()->last_nack_list();
@@ -1784,7 +1845,7 @@ TEST_F(EndToEndTest, VerifyNackStats) {
return SEND_PACKET;
}
- void VerifyStats() {
+ void VerifyStats() EXCLUSIVE_LOCKS_REQUIRED(&crit_) {
if (!dropped_rtp_packet_requested_)
return;
int send_stream_nack_packets = 0;
@@ -1804,7 +1865,7 @@ TEST_F(EndToEndTest, VerifyNackStats) {
if (send_stream_nack_packets >= 1 && receive_stream_nack_packets >= 1) {
// NACK packet sent on receive stream and received on sent stream.
if (MinMetricRunTimePassed())
- observation_complete_->Set();
+ observation_complete_.Set();
}
}
@@ -1818,14 +1879,15 @@ TEST_F(EndToEndTest, VerifyNackStats) {
return elapsed_sec > metrics::kMinRunTimeInSeconds;
}
- void ModifyConfigs(VideoSendStream::Config* send_config,
- std::vector<VideoReceiveStream::Config>* receive_configs,
- VideoEncoderConfig* encoder_config) override {
+ void ModifyVideoConfigs(
+ VideoSendStream::Config* send_config,
+ std::vector<VideoReceiveStream::Config>* receive_configs,
+ VideoEncoderConfig* encoder_config) override {
send_config->rtp.nack.rtp_history_ms = kNackRtpHistoryMs;
(*receive_configs)[0].rtp.nack.rtp_history_ms = kNackRtpHistoryMs;
}
- void OnStreamsCreated(
+ void OnVideoStreamsCreated(
VideoSendStream* send_stream,
const std::vector<VideoReceiveStream*>& receive_streams) override {
send_stream_ = send_stream;
@@ -1833,20 +1895,20 @@ TEST_F(EndToEndTest, VerifyNackStats) {
}
void PerformTest() override {
- EXPECT_EQ(kEventSignaled, Wait())
- << "Timed out waiting for packet to be NACKed.";
+ EXPECT_TRUE(Wait()) << "Timed out waiting for packet to be NACKed.";
}
+ rtc::CriticalSection crit_;
uint64_t sent_rtp_packets_;
- uint16_t dropped_rtp_packet_;
- bool dropped_rtp_packet_requested_;
+ uint16_t dropped_rtp_packet_ GUARDED_BY(&crit_);
+ bool dropped_rtp_packet_requested_ GUARDED_BY(&crit_);
std::vector<VideoReceiveStream*> receive_streams_;
VideoSendStream* send_stream_;
int64_t start_runtime_ms_;
} test;
test::ClearHistograms();
- RunBaseTest(&test, FakeNetworkPipe::Config());
+ RunBaseTest(&test);
EXPECT_EQ(1, test::NumHistogramSamples(
"WebRTC.Video.UniqueNackRequestsSentInPercent"));
@@ -1858,13 +1920,16 @@ TEST_F(EndToEndTest, VerifyNackStats) {
"WebRTC.Video.NackPacketsReceivedPerMinute"), 0);
}
-void EndToEndTest::VerifyHistogramStats(bool use_rtx, bool use_red) {
+void EndToEndTest::VerifyHistogramStats(bool use_rtx,
+ bool use_red,
+ bool screenshare) {
class StatsObserver : public test::EndToEndTest {
public:
- StatsObserver(bool use_rtx, bool use_red)
+ StatsObserver(bool use_rtx, bool use_red, bool screenshare)
: EndToEndTest(kLongTimeoutMs),
use_rtx_(use_rtx),
use_red_(use_red),
+ screenshare_(screenshare),
sender_call_(nullptr),
receiver_call_(nullptr),
start_runtime_ms_(-1) {}
@@ -1872,7 +1937,7 @@ void EndToEndTest::VerifyHistogramStats(bool use_rtx, bool use_red) {
private:
Action OnSendRtp(const uint8_t* packet, size_t length) override {
if (MinMetricRunTimePassed())
- observation_complete_->Set();
+ observation_complete_.Set();
// GetStats calls GetSendChannelRtcpStatistics
// (via VideoSendStream::GetRtt) which updates ReportBlockStats used by
@@ -1893,9 +1958,10 @@ void EndToEndTest::VerifyHistogramStats(bool use_rtx, bool use_red) {
return elapsed_sec > metrics::kMinRunTimeInSeconds * 2;
}
- void ModifyConfigs(VideoSendStream::Config* send_config,
- std::vector<VideoReceiveStream::Config>* receive_configs,
- VideoEncoderConfig* encoder_config) override {
+ void ModifyVideoConfigs(
+ VideoSendStream::Config* send_config,
+ std::vector<VideoReceiveStream::Config>* receive_configs,
+ VideoEncoderConfig* encoder_config) override {
// NACK
send_config->rtp.nack.rtp_history_ms = kNackRtpHistoryMs;
(*receive_configs)[0].rtp.nack.rtp_history_ms = kNackRtpHistoryMs;
@@ -1910,11 +1976,14 @@ void EndToEndTest::VerifyHistogramStats(bool use_rtx, bool use_red) {
if (use_rtx_) {
send_config->rtp.rtx.ssrcs.push_back(kSendRtxSsrcs[0]);
send_config->rtp.rtx.payload_type = kSendRtxPayloadType;
- (*receive_configs)[0].rtp.rtx[kFakeSendPayloadType].ssrc =
+ (*receive_configs)[0].rtp.rtx[kFakeVideoSendPayloadType].ssrc =
kSendRtxSsrcs[0];
- (*receive_configs)[0].rtp.rtx[kFakeSendPayloadType].payload_type =
+ (*receive_configs)[0].rtp.rtx[kFakeVideoSendPayloadType].payload_type =
kSendRtxPayloadType;
}
+ encoder_config->content_type =
+ screenshare_ ? VideoEncoderConfig::ContentType::kScreen
+ : VideoEncoderConfig::ContentType::kRealtimeVideo;
}
void OnCallsCreated(Call* sender_call, Call* receiver_call) override {
@@ -1923,21 +1992,37 @@ void EndToEndTest::VerifyHistogramStats(bool use_rtx, bool use_red) {
}
void PerformTest() override {
- EXPECT_EQ(kEventSignaled, Wait())
- << "Timed out waiting for packet to be NACKed.";
+ EXPECT_TRUE(Wait()) << "Timed out waiting for packet to be NACKed.";
}
- bool use_rtx_;
- bool use_red_;
+ const bool use_rtx_;
+ const bool use_red_;
+ const bool screenshare_;
Call* sender_call_;
Call* receiver_call_;
int64_t start_runtime_ms_;
- } test(use_rtx, use_red);
+ } test(use_rtx, use_red, screenshare);
test::ClearHistograms();
- RunBaseTest(&test, FakeNetworkPipe::Config());
+ RunBaseTest(&test);
+
+ // Delete the call for Call stats to be reported.
+ sender_call_.reset();
+ receiver_call_.reset();
+
+ std::string video_prefix =
+ screenshare ? "WebRTC.Video.Screenshare." : "WebRTC.Video.";
// Verify that stats have been updated once.
+ EXPECT_EQ(
+ 1, test::NumHistogramSamples("WebRTC.Call.VideoBitrateReceivedInKbps"));
+ EXPECT_EQ(1,
+ test::NumHistogramSamples("WebRTC.Call.RtcpBitrateReceivedInBps"));
+ EXPECT_EQ(1, test::NumHistogramSamples("WebRTC.Call.BitrateReceivedInKbps"));
+ EXPECT_EQ(
+ 1, test::NumHistogramSamples("WebRTC.Call.EstimatedSendBitrateInKbps"));
+ EXPECT_EQ(1, test::NumHistogramSamples("WebRTC.Call.PacerBitrateInKbps"));
+
EXPECT_EQ(1, test::NumHistogramSamples(
"WebRTC.Video.NackPacketsSentPerMinute"));
EXPECT_EQ(1, test::NumHistogramSamples(
@@ -1951,8 +2036,8 @@ void EndToEndTest::VerifyHistogramStats(bool use_rtx, bool use_red) {
EXPECT_EQ(1, test::NumHistogramSamples(
"WebRTC.Video.PliPacketsReceivedPerMinute"));
- EXPECT_EQ(1, test::NumHistogramSamples(
- "WebRTC.Video.KeyFramesSentInPermille"));
+ EXPECT_EQ(
+ 1, test::NumHistogramSamples(video_prefix + "KeyFramesSentInPermille"));
EXPECT_EQ(1, test::NumHistogramSamples(
"WebRTC.Video.KeyFramesReceivedInPermille"));
@@ -1961,34 +2046,39 @@ void EndToEndTest::VerifyHistogramStats(bool use_rtx, bool use_red) {
EXPECT_EQ(1, test::NumHistogramSamples(
"WebRTC.Video.ReceivedPacketsLostInPercent"));
- EXPECT_EQ(1, test::NumHistogramSamples("WebRTC.Video.InputWidthInPixels"));
- EXPECT_EQ(1, test::NumHistogramSamples("WebRTC.Video.InputHeightInPixels"));
- EXPECT_EQ(1, test::NumHistogramSamples("WebRTC.Video.SentWidthInPixels"));
- EXPECT_EQ(1, test::NumHistogramSamples("WebRTC.Video.SentHeightInPixels"));
+ EXPECT_EQ(1, test::NumHistogramSamples(video_prefix + "InputWidthInPixels"));
+ EXPECT_EQ(1, test::NumHistogramSamples(video_prefix + "InputHeightInPixels"));
+ EXPECT_EQ(1, test::NumHistogramSamples(video_prefix + "SentWidthInPixels"));
+ EXPECT_EQ(1, test::NumHistogramSamples(video_prefix + "SentHeightInPixels"));
EXPECT_EQ(1, test::NumHistogramSamples("WebRTC.Video.ReceivedWidthInPixels"));
EXPECT_EQ(1,
test::NumHistogramSamples("WebRTC.Video.ReceivedHeightInPixels"));
- EXPECT_EQ(static_cast<int>(encoder_config_.streams[0].width),
- test::LastHistogramSample("WebRTC.Video.InputWidthInPixels"));
- EXPECT_EQ(static_cast<int>(encoder_config_.streams[0].height),
- test::LastHistogramSample("WebRTC.Video.InputHeightInPixels"));
- EXPECT_EQ(static_cast<int>(encoder_config_.streams[0].width),
- test::LastHistogramSample("WebRTC.Video.SentWidthInPixels"));
- EXPECT_EQ(static_cast<int>(encoder_config_.streams[0].height),
- test::LastHistogramSample("WebRTC.Video.SentHeightInPixels"));
- EXPECT_EQ(static_cast<int>(encoder_config_.streams[0].width),
+ EXPECT_EQ(static_cast<int>(video_encoder_config_.streams[0].width),
+ test::LastHistogramSample(video_prefix + "InputWidthInPixels"));
+ EXPECT_EQ(static_cast<int>(video_encoder_config_.streams[0].height),
+ test::LastHistogramSample(video_prefix + "InputHeightInPixels"));
+ EXPECT_EQ(static_cast<int>(video_encoder_config_.streams[0].width),
+ test::LastHistogramSample(video_prefix + "SentWidthInPixels"));
+ EXPECT_EQ(static_cast<int>(video_encoder_config_.streams[0].height),
+ test::LastHistogramSample(video_prefix + "SentHeightInPixels"));
+ EXPECT_EQ(static_cast<int>(video_encoder_config_.streams[0].width),
test::LastHistogramSample("WebRTC.Video.ReceivedWidthInPixels"));
- EXPECT_EQ(static_cast<int>(encoder_config_.streams[0].height),
+ EXPECT_EQ(static_cast<int>(video_encoder_config_.streams[0].height),
test::LastHistogramSample("WebRTC.Video.ReceivedHeightInPixels"));
- EXPECT_EQ(1, test::NumHistogramSamples("WebRTC.Video.InputFramesPerSecond"));
- EXPECT_EQ(1, test::NumHistogramSamples("WebRTC.Video.SentFramesPerSecond"));
+ EXPECT_EQ(1,
+ test::NumHistogramSamples(video_prefix + "InputFramesPerSecond"));
+ EXPECT_EQ(1, test::NumHistogramSamples(video_prefix + "SentFramesPerSecond"));
EXPECT_EQ(1, test::NumHistogramSamples(
"WebRTC.Video.DecodedFramesPerSecond"));
EXPECT_EQ(1, test::NumHistogramSamples("WebRTC.Video.RenderFramesPerSecond"));
- EXPECT_EQ(1, test::NumHistogramSamples("WebRTC.Video.EncodeTimeInMs"));
+ EXPECT_EQ(1, test::NumHistogramSamples("WebRTC.Video.OnewayDelayInMs"));
+ EXPECT_EQ(
+ 1, test::NumHistogramSamples("WebRTC.Video.RenderSqrtPixelsPerSecond"));
+
+ EXPECT_EQ(1, test::NumHistogramSamples(video_prefix + "EncodeTimeInMs"));
EXPECT_EQ(1, test::NumHistogramSamples("WebRTC.Video.DecodeTimeInMs"));
EXPECT_EQ(1, test::NumHistogramSamples(
@@ -2008,6 +2098,10 @@ void EndToEndTest::VerifyHistogramStats(bool use_rtx, bool use_red) {
EXPECT_EQ(1, test::NumHistogramSamples(
"WebRTC.Video.RetransmittedBitrateReceivedInKbps"));
+ EXPECT_EQ(1, test::NumHistogramSamples(video_prefix + "SendSideDelayInMs"));
+ EXPECT_EQ(1,
+ test::NumHistogramSamples(video_prefix + "SendSideDelayMaxInMs"));
+
int num_rtx_samples = use_rtx ? 1 : 0;
EXPECT_EQ(num_rtx_samples, test::NumHistogramSamples(
"WebRTC.Video.RtxBitrateSentInKbps"));
@@ -2026,13 +2120,22 @@ void EndToEndTest::VerifyHistogramStats(bool use_rtx, bool use_red) {
TEST_F(EndToEndTest, VerifyHistogramStatsWithRtx) {
const bool kEnabledRtx = true;
const bool kEnabledRed = false;
- VerifyHistogramStats(kEnabledRtx, kEnabledRed);
+ const bool kScreenshare = false;
+ VerifyHistogramStats(kEnabledRtx, kEnabledRed, kScreenshare);
}
TEST_F(EndToEndTest, VerifyHistogramStatsWithRed) {
const bool kEnabledRtx = false;
const bool kEnabledRed = true;
- VerifyHistogramStats(kEnabledRtx, kEnabledRed);
+ const bool kScreenshare = false;
+ VerifyHistogramStats(kEnabledRtx, kEnabledRed, kScreenshare);
+}
+
+TEST_F(EndToEndTest, VerifyHistogramStatsWithScreenshare) {
+ const bool kEnabledRtx = false;
+ const bool kEnabledRed = false;
+ const bool kScreenshare = true;
+ VerifyHistogramStats(kEnabledRtx, kEnabledRed, kScreenshare);
}
void EndToEndTest::TestXrReceiverReferenceTimeReport(bool enable_rrtr) {
@@ -2050,6 +2153,7 @@ void EndToEndTest::TestXrReceiverReferenceTimeReport(bool enable_rrtr) {
private:
// Receive stream should send RR packets (and RRTR packets if enabled).
Action OnReceiveRtcp(const uint8_t* packet, size_t length) override {
+ rtc::CritScope lock(&crit_);
RTCPUtility::RTCPParserV2 parser(packet, length, true);
EXPECT_TRUE(parser.IsValid());
@@ -2070,6 +2174,7 @@ void EndToEndTest::TestXrReceiverReferenceTimeReport(bool enable_rrtr) {
}
// Send stream should send SR packets (and DLRR packets if enabled).
virtual Action OnSendRtcp(const uint8_t* packet, size_t length) {
+ rtc::CritScope lock(&crit_);
RTCPUtility::RTCPParserV2 parser(packet, length, true);
EXPECT_TRUE(parser.IsValid());
@@ -2094,32 +2199,34 @@ void EndToEndTest::TestXrReceiverReferenceTimeReport(bool enable_rrtr) {
EXPECT_EQ(0, sent_rtcp_rrtr_);
EXPECT_EQ(0, sent_rtcp_dlrr_);
}
- observation_complete_->Set();
+ observation_complete_.Set();
}
return SEND_PACKET;
}
- void ModifyConfigs(VideoSendStream::Config* send_config,
- std::vector<VideoReceiveStream::Config>* receive_configs,
- VideoEncoderConfig* encoder_config) override {
+ void ModifyVideoConfigs(
+ VideoSendStream::Config* send_config,
+ std::vector<VideoReceiveStream::Config>* receive_configs,
+ VideoEncoderConfig* encoder_config) override {
(*receive_configs)[0].rtp.rtcp_mode = RtcpMode::kReducedSize;
(*receive_configs)[0].rtp.rtcp_xr.receiver_reference_time_report =
enable_rrtr_;
}
void PerformTest() override {
- EXPECT_EQ(kEventSignaled, Wait())
+ EXPECT_TRUE(Wait())
<< "Timed out while waiting for RTCP SR/RR packets to be sent.";
}
+ rtc::CriticalSection crit_;
bool enable_rrtr_;
int sent_rtcp_sr_;
- int sent_rtcp_rr_;
- int sent_rtcp_rrtr_;
+ int sent_rtcp_rr_ GUARDED_BY(&crit_);
+ int sent_rtcp_rrtr_ GUARDED_BY(&crit_);
int sent_rtcp_dlrr_;
} test(enable_rrtr);
- RunBaseTest(&test, FakeNetworkPipe::Config());
+ RunBaseTest(&test);
}
void EndToEndTest::TestSendsSetSsrcs(size_t num_ssrcs,
@@ -2148,28 +2255,29 @@ void EndToEndTest::TestSendsSetSsrcs(size_t num_ssrcs,
<< "Received unknown SSRC: " << header.ssrc;
if (!valid_ssrcs_[header.ssrc])
- observation_complete_->Set();
+ observation_complete_.Set();
if (!is_observed_[header.ssrc]) {
is_observed_[header.ssrc] = true;
--ssrcs_to_observe_;
if (expect_single_ssrc_) {
expect_single_ssrc_ = false;
- observation_complete_->Set();
+ observation_complete_.Set();
}
}
if (ssrcs_to_observe_ == 0)
- observation_complete_->Set();
+ observation_complete_.Set();
return SEND_PACKET;
}
- size_t GetNumStreams() const override { return num_ssrcs_; }
+ size_t GetNumVideoStreams() const override { return num_ssrcs_; }
- void ModifyConfigs(VideoSendStream::Config* send_config,
- std::vector<VideoReceiveStream::Config>* receive_configs,
- VideoEncoderConfig* encoder_config) override {
+ void ModifyVideoConfigs(
+ VideoSendStream::Config* send_config,
+ std::vector<VideoReceiveStream::Config>* receive_configs,
+ VideoEncoderConfig* encoder_config) override {
if (num_ssrcs_ > 1) {
// Set low simulcast bitrates to not have to wait for bandwidth ramp-up.
for (size_t i = 0; i < encoder_config->streams.size(); ++i) {
@@ -2179,27 +2287,27 @@ void EndToEndTest::TestSendsSetSsrcs(size_t num_ssrcs,
}
}
- encoder_config_all_streams_ = *encoder_config;
+ video_encoder_config_all_streams_ = *encoder_config;
if (send_single_ssrc_first_)
encoder_config->streams.resize(1);
}
- void OnStreamsCreated(
+ void OnVideoStreamsCreated(
VideoSendStream* send_stream,
const std::vector<VideoReceiveStream*>& receive_streams) override {
send_stream_ = send_stream;
}
void PerformTest() override {
- EXPECT_EQ(kEventSignaled, Wait())
- << "Timed out while waiting for "
- << (send_single_ssrc_first_ ? "first SSRC." : "SSRCs.");
+ EXPECT_TRUE(Wait()) << "Timed out while waiting for "
+ << (send_single_ssrc_first_ ? "first SSRC."
+ : "SSRCs.");
if (send_single_ssrc_first_) {
// Set full simulcast and continue with the rest of the SSRCs.
- send_stream_->ReconfigureVideoEncoder(encoder_config_all_streams_);
- EXPECT_EQ(kEventSignaled, Wait())
- << "Timed out while waiting on additional SSRCs.";
+ send_stream_->ReconfigureVideoEncoder(
+ video_encoder_config_all_streams_);
+ EXPECT_TRUE(Wait()) << "Timed out while waiting on additional SSRCs.";
}
}
@@ -2214,10 +2322,10 @@ void EndToEndTest::TestSendsSetSsrcs(size_t num_ssrcs,
bool expect_single_ssrc_;
VideoSendStream* send_stream_;
- VideoEncoderConfig encoder_config_all_streams_;
- } test(kSendSsrcs, num_ssrcs, send_single_ssrc_first);
+ VideoEncoderConfig video_encoder_config_all_streams_;
+ } test(kVideoSendSsrcs, num_ssrcs, send_single_ssrc_first);
- RunBaseTest(&test, FakeNetworkPipe::Config());
+ RunBaseTest(&test);
}
TEST_F(EndToEndTest, ReportsSetEncoderRates) {
@@ -2230,15 +2338,16 @@ TEST_F(EndToEndTest, ReportsSetEncoderRates) {
send_stream_(nullptr),
bitrate_kbps_(0) {}
- void OnStreamsCreated(
+ void OnVideoStreamsCreated(
VideoSendStream* send_stream,
const std::vector<VideoReceiveStream*>& receive_streams) override {
send_stream_ = send_stream;
}
- void ModifyConfigs(VideoSendStream::Config* send_config,
- std::vector<VideoReceiveStream::Config>* receive_configs,
- VideoEncoderConfig* encoder_config) override {
+ void ModifyVideoConfigs(
+ VideoSendStream::Config* send_config,
+ std::vector<VideoReceiveStream::Config>* receive_configs,
+ VideoEncoderConfig* encoder_config) override {
send_config->encoder_settings.encoder = this;
}
@@ -2248,15 +2357,15 @@ TEST_F(EndToEndTest, ReportsSetEncoderRates) {
return 0;
rtc::CritScope lock(&crit_);
bitrate_kbps_ = new_target_bitrate;
- observation_complete_->Set();
+ observation_complete_.Set();
return 0;
}
void PerformTest() override {
- ASSERT_EQ(kEventSignaled, Wait())
+ ASSERT_TRUE(Wait())
<< "Timed out while waiting for encoder SetRates() call.";
// Wait for GetStats to report a corresponding bitrate.
- for (unsigned int i = 0; i < kDefaultTimeoutMs; ++i) {
+ for (int i = 0; i < kDefaultTimeoutMs; ++i) {
VideoSendStream::Stats stats = send_stream_->GetStats();
{
rtc::CritScope lock(&crit_);
@@ -2277,7 +2386,7 @@ TEST_F(EndToEndTest, ReportsSetEncoderRates) {
uint32_t bitrate_kbps_ GUARDED_BY(crit_);
} test;
- RunBaseTest(&test, FakeNetworkPipe::Config());
+ RunBaseTest(&test);
}
TEST_F(EndToEndTest, GetStats) {
@@ -2289,26 +2398,26 @@ TEST_F(EndToEndTest, GetStats) {
: EndToEndTest(kLongTimeoutMs),
send_stream_(nullptr),
expected_send_ssrcs_(),
- check_stats_event_(EventWrapper::Create()) {}
+ check_stats_event_(false, false) {}
private:
Action OnSendRtp(const uint8_t* packet, size_t length) override {
- check_stats_event_->Set();
+ check_stats_event_.Set();
return SEND_PACKET;
}
Action OnSendRtcp(const uint8_t* packet, size_t length) override {
- check_stats_event_->Set();
+ check_stats_event_.Set();
return SEND_PACKET;
}
Action OnReceiveRtp(const uint8_t* packet, size_t length) override {
- check_stats_event_->Set();
+ check_stats_event_.Set();
return SEND_PACKET;
}
Action OnReceiveRtcp(const uint8_t* packet, size_t length) override {
- check_stats_event_->Set();
+ check_stats_event_.Set();
return SEND_PACKET;
}
@@ -2330,6 +2439,9 @@ TEST_F(EndToEndTest, GetStats) {
receive_stats_filled_["IncomingRate"] |=
stats.network_frame_rate != 0 || stats.total_bitrate_bps != 0;
+ send_stats_filled_["DecoderImplementationName"] |=
+ stats.decoder_implementation_name ==
+ test::FakeDecoder::kImplementationName;
receive_stats_filled_["RenderDelayAsHighAsExpected"] |=
stats.render_delay_ms >= kExpectedRenderDelayMs;
@@ -2367,9 +2479,9 @@ TEST_F(EndToEndTest, GetStats) {
stats.rtcp_packet_type_counts.unique_nack_requests != 0;
assert(stats.current_payload_type == -1 ||
- stats.current_payload_type == kFakeSendPayloadType);
+ stats.current_payload_type == kFakeVideoSendPayloadType);
receive_stats_filled_["IncomingPayloadType"] |=
- stats.current_payload_type == kFakeSendPayloadType;
+ stats.current_payload_type == kFakeVideoSendPayloadType;
}
return AllStatsFilled(receive_stats_filled_);
@@ -2385,6 +2497,10 @@ TEST_F(EndToEndTest, GetStats) {
send_stats_filled_["CpuOveruseMetrics"] |=
stats.avg_encode_time_ms != 0 || stats.encode_usage_percent != 0;
+ send_stats_filled_["EncoderImplementationName"] |=
+ stats.encoder_implementation_name ==
+ test::FakeEncoder::kImplementationName;
+
for (std::map<uint32_t, VideoSendStream::StreamStats>::const_iterator it =
stats.substreams.begin();
it != stats.substreams.end(); ++it) {
@@ -2450,15 +2566,23 @@ TEST_F(EndToEndTest, GetStats) {
return true;
}
+ test::PacketTransport* CreateSendTransport(Call* sender_call) override {
+ FakeNetworkPipe::Config network_config;
+ network_config.loss_percent = 5;
+ return new test::PacketTransport(
+ sender_call, this, test::PacketTransport::kSender, network_config);
+ }
+
Call::Config GetSenderCallConfig() override {
Call::Config config = EndToEndTest::GetSenderCallConfig();
config.bitrate_config.start_bitrate_bps = kStartBitrateBps;
return config;
}
- void ModifyConfigs(VideoSendStream::Config* send_config,
- std::vector<VideoReceiveStream::Config>* receive_configs,
- VideoEncoderConfig* encoder_config) override {
+ void ModifyVideoConfigs(
+ VideoSendStream::Config* send_config,
+ std::vector<VideoReceiveStream::Config>* receive_configs,
+ VideoEncoderConfig* encoder_config) override {
send_config->pre_encode_callback = this; // Used to inject delay.
expected_cname_ = send_config->rtp.c_name = "SomeCName";
@@ -2471,9 +2595,9 @@ TEST_F(EndToEndTest, GetStats) {
}
}
- size_t GetNumStreams() const override { return kNumSsrcs; }
+ size_t GetNumVideoStreams() const override { return kNumSsrcs; }
- void OnStreamsCreated(
+ void OnVideoStreamsCreated(
VideoSendStream* send_stream,
const std::vector<VideoReceiveStream*>& receive_streams) override {
send_stream_ = send_stream;
@@ -2498,7 +2622,7 @@ TEST_F(EndToEndTest, GetStats) {
int64_t time_until_timout_ = stop_time - now;
if (time_until_timout_ > 0)
- check_stats_event_->Wait(time_until_timout_);
+ check_stats_event_.Wait(time_until_timout_);
now = clock->TimeInMilliseconds();
}
@@ -2532,12 +2656,10 @@ TEST_F(EndToEndTest, GetStats) {
std::set<uint32_t> expected_send_ssrcs_;
std::string expected_cname_;
- rtc::scoped_ptr<EventWrapper> check_stats_event_;
+ rtc::Event check_stats_event_;
} test;
- FakeNetworkPipe::Config network_config;
- network_config.loss_percent = 5;
- RunBaseTest(&test, network_config);
+ RunBaseTest(&test);
}
TEST_F(EndToEndTest, ReceiverReferenceTimeReportEnabled) {
@@ -2558,7 +2680,7 @@ TEST_F(EndToEndTest, TestReceivedRtpPacketStats) {
sent_rtp_(0) {}
private:
- void OnStreamsCreated(
+ void OnVideoStreamsCreated(
VideoSendStream* send_stream,
const std::vector<VideoReceiveStream*>& receive_streams) override {
receive_stream_ = receive_streams[0];
@@ -2568,7 +2690,7 @@ TEST_F(EndToEndTest, TestReceivedRtpPacketStats) {
if (sent_rtp_ >= kNumRtpPacketsToSend) {
VideoReceiveStream::Stats stats = receive_stream_->GetStats();
if (kNumRtpPacketsToSend == stats.rtp_stats.transmitted.packets) {
- observation_complete_->Set();
+ observation_complete_.Set();
}
return DROP_PACKET;
}
@@ -2577,7 +2699,7 @@ TEST_F(EndToEndTest, TestReceivedRtpPacketStats) {
}
void PerformTest() override {
- EXPECT_EQ(kEventSignaled, Wait())
+ EXPECT_TRUE(Wait())
<< "Timed out while verifying number of received RTP packets.";
}
@@ -2585,7 +2707,7 @@ TEST_F(EndToEndTest, TestReceivedRtpPacketStats) {
uint32_t sent_rtp_;
} test;
- RunBaseTest(&test, FakeNetworkPipe::Config());
+ RunBaseTest(&test);
}
TEST_F(EndToEndTest, SendsSetSsrc) { TestSendsSetSsrcs(1, false); }
@@ -2626,17 +2748,18 @@ TEST_F(EndToEndTest, DISABLED_RedundantPayloadsTransmittedOnAllSsrcs) {
if (!observed_redundant_retransmission_[header.ssrc]) {
observed_redundant_retransmission_[header.ssrc] = true;
if (--ssrcs_to_observe_ == 0)
- observation_complete_->Set();
+ observation_complete_.Set();
}
return SEND_PACKET;
}
- size_t GetNumStreams() const override { return kNumSsrcs; }
+ size_t GetNumVideoStreams() const override { return kNumSsrcs; }
- void ModifyConfigs(VideoSendStream::Config* send_config,
- std::vector<VideoReceiveStream::Config>* receive_configs,
- VideoEncoderConfig* encoder_config) override {
+ void ModifyVideoConfigs(
+ VideoSendStream::Config* send_config,
+ std::vector<VideoReceiveStream::Config>* receive_configs,
+ VideoEncoderConfig* encoder_config) override {
// Set low simulcast bitrates to not have to wait for bandwidth ramp-up.
for (size_t i = 0; i < encoder_config->streams.size(); ++i) {
encoder_config->streams[i].min_bitrate_bps = 10000;
@@ -2655,7 +2778,7 @@ TEST_F(EndToEndTest, DISABLED_RedundantPayloadsTransmittedOnAllSsrcs) {
}
void PerformTest() override {
- EXPECT_EQ(kEventSignaled, Wait())
+ EXPECT_TRUE(Wait())
<< "Timed out while waiting for redundant payloads on all SSRCs.";
}
@@ -2665,7 +2788,7 @@ TEST_F(EndToEndTest, DISABLED_RedundantPayloadsTransmittedOnAllSsrcs) {
std::map<uint32_t, bool> registered_rtx_ssrc_;
} test;
- RunBaseTest(&test, FakeNetworkPipe::Config());
+ RunBaseTest(&test);
}
void EndToEndTest::TestRtpStatePreservation(bool use_rtx) {
@@ -2677,7 +2800,7 @@ void EndToEndTest::TestRtpStatePreservation(bool use_rtx) {
: test::RtpRtcpObserver(kDefaultTimeoutMs),
ssrcs_to_observe_(kNumSsrcs) {
for (size_t i = 0; i < kNumSsrcs; ++i) {
- configured_ssrcs_[kSendSsrcs[i]] = true;
+ configured_ssrcs_[kVideoSendSsrcs[i]] = true;
if (use_rtx)
configured_ssrcs_[kSendRtxSsrcs[i]] = true;
}
@@ -2744,7 +2867,7 @@ void EndToEndTest::TestRtpStatePreservation(bool use_rtx) {
if (!ssrc_observed_[ssrc] && !only_padding) {
ssrc_observed_[ssrc] = true;
if (--ssrcs_to_observe_ == 0)
- observation_complete_->Set();
+ observation_complete_.Set();
}
return SEND_PACKET;
@@ -2770,77 +2893,75 @@ void EndToEndTest::TestRtpStatePreservation(bool use_rtx) {
send_transport.SetReceiver(receiver_call_->Receiver());
receive_transport.SetReceiver(sender_call_->Receiver());
- CreateSendConfig(kNumSsrcs, &send_transport);
+ CreateSendConfig(kNumSsrcs, 0, &send_transport);
if (use_rtx) {
for (size_t i = 0; i < kNumSsrcs; ++i) {
- send_config_.rtp.rtx.ssrcs.push_back(kSendRtxSsrcs[i]);
+ video_send_config_.rtp.rtx.ssrcs.push_back(kSendRtxSsrcs[i]);
}
- send_config_.rtp.rtx.payload_type = kSendRtxPayloadType;
+ video_send_config_.rtp.rtx.payload_type = kSendRtxPayloadType;
}
// Lower bitrates so that all streams send initially.
- for (size_t i = 0; i < encoder_config_.streams.size(); ++i) {
- encoder_config_.streams[i].min_bitrate_bps = 10000;
- encoder_config_.streams[i].target_bitrate_bps = 15000;
- encoder_config_.streams[i].max_bitrate_bps = 20000;
+ for (size_t i = 0; i < video_encoder_config_.streams.size(); ++i) {
+ video_encoder_config_.streams[i].min_bitrate_bps = 10000;
+ video_encoder_config_.streams[i].target_bitrate_bps = 15000;
+ video_encoder_config_.streams[i].max_bitrate_bps = 20000;
}
// Use the same total bitrates when sending a single stream to avoid lowering
// the bitrate estimate and requiring a subsequent rampup.
- VideoEncoderConfig one_stream = encoder_config_;
+ VideoEncoderConfig one_stream = video_encoder_config_;
one_stream.streams.resize(1);
- for (size_t i = 1; i < encoder_config_.streams.size(); ++i) {
+ for (size_t i = 1; i < video_encoder_config_.streams.size(); ++i) {
one_stream.streams.front().min_bitrate_bps +=
- encoder_config_.streams[i].min_bitrate_bps;
+ video_encoder_config_.streams[i].min_bitrate_bps;
one_stream.streams.front().target_bitrate_bps +=
- encoder_config_.streams[i].target_bitrate_bps;
+ video_encoder_config_.streams[i].target_bitrate_bps;
one_stream.streams.front().max_bitrate_bps +=
- encoder_config_.streams[i].max_bitrate_bps;
+ video_encoder_config_.streams[i].max_bitrate_bps;
}
CreateMatchingReceiveConfigs(&receive_transport);
- CreateStreams();
+ CreateVideoStreams();
CreateFrameGeneratorCapturer();
Start();
- EXPECT_EQ(kEventSignaled, observer.Wait())
+ EXPECT_TRUE(observer.Wait())
<< "Timed out waiting for all SSRCs to send packets.";
// Test stream resetting more than once to make sure that the state doesn't
// get set once (this could be due to using std::map::insert for instance).
for (size_t i = 0; i < 3; ++i) {
frame_generator_capturer_->Stop();
- sender_call_->DestroyVideoSendStream(send_stream_);
+ sender_call_->DestroyVideoSendStream(video_send_stream_);
// Re-create VideoSendStream with only one stream.
- send_stream_ =
- sender_call_->CreateVideoSendStream(send_config_, one_stream);
- send_stream_->Start();
+ video_send_stream_ =
+ sender_call_->CreateVideoSendStream(video_send_config_, one_stream);
+ video_send_stream_->Start();
CreateFrameGeneratorCapturer();
frame_generator_capturer_->Start();
observer.ResetExpectedSsrcs(1);
- EXPECT_EQ(kEventSignaled, observer.Wait())
- << "Timed out waiting for single RTP packet.";
+ EXPECT_TRUE(observer.Wait()) << "Timed out waiting for single RTP packet.";
// Reconfigure back to use all streams.
- send_stream_->ReconfigureVideoEncoder(encoder_config_);
+ video_send_stream_->ReconfigureVideoEncoder(video_encoder_config_);
observer.ResetExpectedSsrcs(kNumSsrcs);
- EXPECT_EQ(kEventSignaled, observer.Wait())
+ EXPECT_TRUE(observer.Wait())
<< "Timed out waiting for all SSRCs to send packets.";
// Reconfigure down to one stream.
- send_stream_->ReconfigureVideoEncoder(one_stream);
+ video_send_stream_->ReconfigureVideoEncoder(one_stream);
observer.ResetExpectedSsrcs(1);
- EXPECT_EQ(kEventSignaled, observer.Wait())
- << "Timed out waiting for single RTP packet.";
+ EXPECT_TRUE(observer.Wait()) << "Timed out waiting for single RTP packet.";
// Reconfigure back to use all streams.
- send_stream_->ReconfigureVideoEncoder(encoder_config_);
+ video_send_stream_->ReconfigureVideoEncoder(video_encoder_config_);
observer.ResetExpectedSsrcs(kNumSsrcs);
- EXPECT_EQ(kEventSignaled, observer.Wait())
+ EXPECT_TRUE(observer.Wait())
<< "Timed out waiting for all SSRCs to send packets.";
}
@@ -2874,8 +2995,8 @@ TEST_F(EndToEndTest, RespectsNetworkState) {
NetworkStateTest()
: EndToEndTest(kDefaultTimeoutMs),
FakeEncoder(Clock::GetRealTimeClock()),
- encoded_frames_(EventWrapper::Create()),
- packet_event_(EventWrapper::Create()),
+ encoded_frames_(false, false),
+ packet_event_(false, false),
sender_call_(nullptr),
receiver_call_(nullptr),
sender_state_(kNetworkUp),
@@ -2887,14 +3008,14 @@ TEST_F(EndToEndTest, RespectsNetworkState) {
Action OnSendRtp(const uint8_t* packet, size_t length) override {
rtc::CritScope lock(&test_crit_);
++sender_rtp_;
- packet_event_->Set();
+ packet_event_.Set();
return SEND_PACKET;
}
Action OnSendRtcp(const uint8_t* packet, size_t length) override {
rtc::CritScope lock(&test_crit_);
++sender_rtcp_;
- packet_event_->Set();
+ packet_event_.Set();
return SEND_PACKET;
}
@@ -2906,7 +3027,7 @@ TEST_F(EndToEndTest, RespectsNetworkState) {
Action OnReceiveRtcp(const uint8_t* packet, size_t length) override {
rtc::CritScope lock(&test_crit_);
++receiver_rtcp_;
- packet_event_->Set();
+ packet_event_.Set();
return SEND_PACKET;
}
@@ -2915,14 +3036,15 @@ TEST_F(EndToEndTest, RespectsNetworkState) {
receiver_call_ = receiver_call;
}
- void ModifyConfigs(VideoSendStream::Config* send_config,
- std::vector<VideoReceiveStream::Config>* receive_configs,
- VideoEncoderConfig* encoder_config) override {
+ void ModifyVideoConfigs(
+ VideoSendStream::Config* send_config,
+ std::vector<VideoReceiveStream::Config>* receive_configs,
+ VideoEncoderConfig* encoder_config) override {
send_config->encoder_settings.encoder = this;
}
void PerformTest() override {
- EXPECT_EQ(kEventSignaled, encoded_frames_->Wait(kDefaultTimeoutMs))
+ EXPECT_TRUE(encoded_frames_.Wait(kDefaultTimeoutMs))
<< "No frames received by the encoder.";
// Wait for packets from both sender/receiver.
WaitForPacketsOrSilence(false, false);
@@ -2963,9 +3085,9 @@ TEST_F(EndToEndTest, RespectsNetworkState) {
EXPECT_LE(down_frames_, 1)
<< "Encoding more than one frame while network is down.";
if (down_frames_ > 1)
- encoded_frames_->Set();
+ encoded_frames_.Set();
} else {
- encoded_frames_->Set();
+ encoded_frames_.Set();
}
}
return test::FakeEncoder::Encode(
@@ -2986,8 +3108,8 @@ TEST_F(EndToEndTest, RespectsNetworkState) {
}
bool sender_done = false;
bool receiver_done = false;
- while(!sender_done || !receiver_done) {
- packet_event_->Wait(kSilenceTimeoutMs);
+ while (!sender_done || !receiver_done) {
+ packet_event_.Wait(kSilenceTimeoutMs);
int64_t time_now_ms = clock_->TimeInMilliseconds();
rtc::CritScope lock(&test_crit_);
if (sender_down) {
@@ -3020,8 +3142,8 @@ TEST_F(EndToEndTest, RespectsNetworkState) {
}
rtc::CriticalSection test_crit_;
- const rtc::scoped_ptr<EventWrapper> encoded_frames_;
- const rtc::scoped_ptr<EventWrapper> packet_event_;
+ rtc::Event encoded_frames_;
+ rtc::Event packet_event_;
Call* sender_call_;
Call* receiver_call_;
NetworkState sender_state_ GUARDED_BY(test_crit_);
@@ -3031,7 +3153,7 @@ TEST_F(EndToEndTest, RespectsNetworkState) {
int down_frames_ GUARDED_BY(test_crit_);
} test;
- RunBaseTest(&test, FakeNetworkPipe::Config());
+ RunBaseTest(&test);
}
TEST_F(EndToEndTest, CallReportsRttForSender) {
@@ -3048,10 +3170,10 @@ TEST_F(EndToEndTest, CallReportsRttForSender) {
sender_transport.SetReceiver(receiver_call_->Receiver());
receiver_transport.SetReceiver(sender_call_->Receiver());
- CreateSendConfig(1, &sender_transport);
+ CreateSendConfig(1, 0, &sender_transport);
CreateMatchingReceiveConfigs(&receiver_transport);
- CreateStreams();
+ CreateVideoStreams();
CreateFrameGeneratorCapturer();
Start();
@@ -3074,7 +3196,7 @@ TEST_F(EndToEndTest, CallReportsRttForSender) {
TEST_F(EndToEndTest, NewSendStreamsRespectNetworkDown) {
class UnusedEncoder : public test::FakeEncoder {
- public:
+ public:
UnusedEncoder() : FakeEncoder(Clock::GetRealTimeClock()) {}
int32_t Encode(const VideoFrame& input_image,
const CodecSpecificInfo* codec_specific_info,
@@ -3089,10 +3211,10 @@ TEST_F(EndToEndTest, NewSendStreamsRespectNetworkDown) {
sender_call_->SignalNetworkState(kNetworkDown);
UnusedTransport transport;
- CreateSendConfig(1, &transport);
+ CreateSendConfig(1, 0, &transport);
UnusedEncoder unused_encoder;
- send_config_.encoder_settings.encoder = &unused_encoder;
- CreateStreams();
+ video_send_config_.encoder_settings.encoder = &unused_encoder;
+ CreateVideoStreams();
CreateFrameGeneratorCapturer();
Start();
@@ -3108,10 +3230,10 @@ TEST_F(EndToEndTest, NewReceiveStreamsRespectNetworkDown) {
test::DirectTransport sender_transport(sender_call_.get());
sender_transport.SetReceiver(receiver_call_->Receiver());
- CreateSendConfig(1, &sender_transport);
+ CreateSendConfig(1, 0, &sender_transport);
UnusedTransport transport;
CreateMatchingReceiveConfigs(&transport);
- CreateStreams();
+ CreateVideoStreams();
CreateFrameGeneratorCapturer();
Start();
@@ -3168,4 +3290,76 @@ TEST_F(EndToEndTest, VerifyDefaultReceiveConfigParameters) {
VerifyEmptyFecConfig(default_receive_config.rtp.fec);
}
+TEST_F(EndToEndTest, TransportSeqNumOnAudioAndVideo) {
+ static const int kExtensionId = 8;
+ class TransportSequenceNumberTest : public test::EndToEndTest {
+ public:
+ TransportSequenceNumberTest()
+ : EndToEndTest(kDefaultTimeoutMs),
+ video_observed_(false),
+ audio_observed_(false) {
+ parser_->RegisterRtpHeaderExtension(kRtpExtensionTransportSequenceNumber,
+ kExtensionId);
+ }
+
+ size_t GetNumVideoStreams() const override { return 1; }
+ size_t GetNumAudioStreams() const override { return 1; }
+
+ void ModifyVideoConfigs(
+ VideoSendStream::Config* send_config,
+ std::vector<VideoReceiveStream::Config>* receive_configs,
+ VideoEncoderConfig* encoder_config) override {
+ send_config->rtp.extensions.clear();
+ send_config->rtp.extensions.push_back(
+ RtpExtension(RtpExtension::kTransportSequenceNumber, kExtensionId));
+ (*receive_configs)[0].rtp.extensions = send_config->rtp.extensions;
+ }
+
+ void ModifyAudioConfigs(
+ AudioSendStream::Config* send_config,
+ std::vector<AudioReceiveStream::Config>* receive_configs) override {
+ send_config->rtp.extensions.clear();
+ send_config->rtp.extensions.push_back(
+ RtpExtension(RtpExtension::kTransportSequenceNumber, kExtensionId));
+ (*receive_configs)[0].rtp.extensions.clear();
+ (*receive_configs)[0].rtp.extensions = send_config->rtp.extensions;
+ }
+
+ Action OnSendRtp(const uint8_t* packet, size_t length) override {
+ RTPHeader header;
+ EXPECT_TRUE(parser_->Parse(packet, length, &header));
+ EXPECT_TRUE(header.extension.hasTransportSequenceNumber);
+ // Unwrap packet id and verify uniqueness.
+ int64_t packet_id =
+ unwrapper_.Unwrap(header.extension.transportSequenceNumber);
+ EXPECT_TRUE(received_packet_ids_.insert(packet_id).second);
+
+ if (header.ssrc == kVideoSendSsrcs[0])
+ video_observed_ = true;
+ if (header.ssrc == kAudioSendSsrc)
+ audio_observed_ = true;
+ if (audio_observed_ && video_observed_ &&
+ received_packet_ids_.size() == 50) {
+ size_t packet_id_range =
+ *received_packet_ids_.rbegin() - *received_packet_ids_.begin() + 1;
+ EXPECT_EQ(received_packet_ids_.size(), packet_id_range);
+ observation_complete_.Set();
+ }
+ return SEND_PACKET;
+ }
+
+ void PerformTest() override {
+ EXPECT_TRUE(Wait()) << "Timed out while waiting for audio and video "
+ "packets with transport sequence number.";
+ }
+
+ private:
+ bool video_observed_;
+ bool audio_observed_;
+ SequenceNumberUnwrapper unwrapper_;
+ std::set<int64_t> received_packet_ids_;
+ } test;
+
+ RunBaseTest(&test);
+}
} // namespace webrtc
diff --git a/webrtc/video/full_stack.cc b/webrtc/video/full_stack.cc
index 8511b8281e..e870c1ff14 100644
--- a/webrtc/video/full_stack.cc
+++ b/webrtc/video/full_stack.cc
@@ -23,6 +23,15 @@ class FullStackTest : public VideoQualityTest {
}
};
+// VideoQualityTest::Params params = {
+// { ... }, // Common.
+// { ... }, // Video-specific settings.
+// { ... }, // Screenshare-specific settings.
+// { ... }, // Analyzer settings.
+// pipe, // FakeNetworkPipe::Config
+// { ... }, // Spatial scalability.
+// logs // bool
+// };
TEST_F(FullStackTest, ParisQcifWithoutPacketLoss) {
VideoQualityTest::Params paris_qcif = {
@@ -120,28 +129,55 @@ TEST_F(FullStackTest, ForemanCif1000kbps100msLimitedQueue) {
TEST_F(FullStackTest, ScreenshareSlidesVP8_2TL) {
VideoQualityTest::Params screenshare = {
- {1850, 1110, 5, 50000, 200000, 2000000, "VP8", 2, 400000},
- {}, // Video-specific.
- {true, 10}, // Screenshare-specific.
+ {1850, 1110, 5, 50000, 200000, 2000000, "VP8", 2, 1, 400000},
+ {},
+ {true, 10},
{"screenshare_slides", 0.0, 0.0, kFullStackTestDurationSecs}};
RunTest(screenshare);
}
TEST_F(FullStackTest, ScreenshareSlidesVP8_2TL_Scroll) {
VideoQualityTest::Params config = {
- {1850, 1110 / 2, 5, 50000, 200000, 2000000, "VP8", 2, 400000},
+ {1850, 1110 / 2, 5, 50000, 200000, 2000000, "VP8", 2, 1, 400000},
{},
{true, 10, 2},
{"screenshare_slides_scrolling", 0.0, 0.0, kFullStackTestDurationSecs}};
RunTest(config);
}
-TEST_F(FullStackTest, ScreenshareSlidesVP9_2TL) {
+TEST_F(FullStackTest, ScreenshareSlidesVP8_2TL_LossyNet) {
VideoQualityTest::Params screenshare = {
- {1850, 1110, 5, 50000, 200000, 2000000, "VP9", 2, 400000},
+ {1850, 1110, 5, 50000, 200000, 2000000, "VP8", 2, 1, 400000},
+ {}, // Video-specific.
+ {true, 10}, // Screenshare-specific.
+ {"screenshare_slides_lossy_net", 0.0, 0.0, kFullStackTestDurationSecs}};
+ screenshare.pipe.loss_percent = 5;
+ screenshare.pipe.queue_delay_ms = 200;
+ screenshare.pipe.link_capacity_kbps = 500;
+ RunTest(screenshare);
+}
+
+TEST_F(FullStackTest, ScreenshareSlidesVP8_2TL_VeryLossyNet) {
+ VideoQualityTest::Params screenshare = {
+ {1850, 1110, 5, 50000, 200000, 2000000, "VP8", 2, 1, 400000},
+ {}, // Video-specific.
+ {true, 10}, // Screenshare-specific.
+ {"screenshare_slides_very_lossy", 0.0, 0.0, kFullStackTestDurationSecs}};
+ screenshare.pipe.loss_percent = 10;
+ screenshare.pipe.queue_delay_ms = 200;
+ screenshare.pipe.link_capacity_kbps = 500;
+ RunTest(screenshare);
+}
+
+TEST_F(FullStackTest, ScreenshareSlidesVP9_2SL) {
+ VideoQualityTest::Params screenshare = {
+ {1850, 1110, 5, 50000, 200000, 2000000, "VP9", 1, 0, 400000},
{},
{true, 10},
- {"screenshare_slides_vp9_2tl", 0.0, 0.0, kFullStackTestDurationSecs}};
+ {"screenshare_slides_vp9_2sl", 0.0, 0.0, kFullStackTestDurationSecs},
+ {},
+ false,
+ {std::vector<VideoStream>(), 0, 2, 1}};
RunTest(screenshare);
}
} // namespace webrtc
diff --git a/webrtc/video/overuse_frame_detector.cc b/webrtc/video/overuse_frame_detector.cc
new file mode 100644
index 0000000000..d971ad9d3e
--- /dev/null
+++ b/webrtc/video/overuse_frame_detector.cc
@@ -0,0 +1,364 @@
+/*
+ * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/video/overuse_frame_detector.h"
+
+#include <assert.h>
+#include <math.h>
+
+#include <algorithm>
+#include <list>
+#include <map>
+
+#include "webrtc/base/checks.h"
+#include "webrtc/base/exp_filter.h"
+#include "webrtc/base/logging.h"
+#include "webrtc/system_wrappers/include/clock.h"
+
+namespace webrtc {
+
+namespace {
+const int64_t kProcessIntervalMs = 5000;
+
+// Delay between consecutive rampups. (Used for quick recovery.)
+const int kQuickRampUpDelayMs = 10 * 1000;
+// Delay between rampup attempts. Initially uses standard, scales up to max.
+const int kStandardRampUpDelayMs = 40 * 1000;
+const int kMaxRampUpDelayMs = 240 * 1000;
+// Expontential back-off factor, to prevent annoying up-down behaviour.
+const double kRampUpBackoffFactor = 2.0;
+
+// Max number of overuses detected before always applying the rampup delay.
+const int kMaxOverusesBeforeApplyRampupDelay = 4;
+
+// The maximum exponent to use in VCMExpFilter.
+const float kSampleDiffMs = 33.0f;
+const float kMaxExp = 7.0f;
+
+} // namespace
+
+// Class for calculating the processing usage on the send-side (the average
+// processing time of a frame divided by the average time difference between
+// captured frames).
+class OveruseFrameDetector::SendProcessingUsage {
+ public:
+ explicit SendProcessingUsage(const CpuOveruseOptions& options)
+ : kWeightFactorFrameDiff(0.998f),
+ kWeightFactorProcessing(0.995f),
+ kInitialSampleDiffMs(40.0f),
+ kMaxSampleDiffMs(45.0f),
+ count_(0),
+ options_(options),
+ filtered_processing_ms_(new rtc::ExpFilter(kWeightFactorProcessing)),
+ filtered_frame_diff_ms_(new rtc::ExpFilter(kWeightFactorFrameDiff)) {
+ Reset();
+ }
+ ~SendProcessingUsage() {}
+
+ void Reset() {
+ count_ = 0;
+ filtered_frame_diff_ms_->Reset(kWeightFactorFrameDiff);
+ filtered_frame_diff_ms_->Apply(1.0f, kInitialSampleDiffMs);
+ filtered_processing_ms_->Reset(kWeightFactorProcessing);
+ filtered_processing_ms_->Apply(1.0f, InitialProcessingMs());
+ }
+
+ void AddCaptureSample(float sample_ms) {
+ float exp = sample_ms / kSampleDiffMs;
+ exp = std::min(exp, kMaxExp);
+ filtered_frame_diff_ms_->Apply(exp, sample_ms);
+ }
+
+ void AddSample(float processing_ms, int64_t diff_last_sample_ms) {
+ ++count_;
+ float exp = diff_last_sample_ms / kSampleDiffMs;
+ exp = std::min(exp, kMaxExp);
+ filtered_processing_ms_->Apply(exp, processing_ms);
+ }
+
+ int Value() const {
+ if (count_ < static_cast<uint32_t>(options_.min_frame_samples)) {
+ return static_cast<int>(InitialUsageInPercent() + 0.5f);
+ }
+ float frame_diff_ms = std::max(filtered_frame_diff_ms_->filtered(), 1.0f);
+ frame_diff_ms = std::min(frame_diff_ms, kMaxSampleDiffMs);
+ float encode_usage_percent =
+ 100.0f * filtered_processing_ms_->filtered() / frame_diff_ms;
+ return static_cast<int>(encode_usage_percent + 0.5);
+ }
+
+ private:
+ float InitialUsageInPercent() const {
+ // Start in between the underuse and overuse threshold.
+ return (options_.low_encode_usage_threshold_percent +
+ options_.high_encode_usage_threshold_percent) / 2.0f;
+ }
+
+ float InitialProcessingMs() const {
+ return InitialUsageInPercent() * kInitialSampleDiffMs / 100;
+ }
+
+ const float kWeightFactorFrameDiff;
+ const float kWeightFactorProcessing;
+ const float kInitialSampleDiffMs;
+ const float kMaxSampleDiffMs;
+ uint64_t count_;
+ const CpuOveruseOptions options_;
+ rtc::scoped_ptr<rtc::ExpFilter> filtered_processing_ms_;
+ rtc::scoped_ptr<rtc::ExpFilter> filtered_frame_diff_ms_;
+};
+
+// Class for calculating the processing time of frames.
+class OveruseFrameDetector::FrameQueue {
+ public:
+ FrameQueue() : last_processing_time_ms_(-1) {}
+ ~FrameQueue() {}
+
+ // Called when a frame is captured.
+ // Starts the measuring of the processing time of the frame.
+ void Start(int64_t capture_time, int64_t now) {
+ const size_t kMaxSize = 90; // Allows for processing time of 1.5s at 60fps.
+ if (frame_times_.size() > kMaxSize) {
+ LOG(LS_WARNING) << "Max size reached, removed oldest frame.";
+ frame_times_.erase(frame_times_.begin());
+ }
+ if (frame_times_.find(capture_time) != frame_times_.end()) {
+ // Frame should not exist.
+ assert(false);
+ return;
+ }
+ frame_times_[capture_time] = now;
+ }
+
+ // Called when the processing of a frame has finished.
+ // Returns the processing time of the frame.
+ int End(int64_t capture_time, int64_t now) {
+ std::map<int64_t, int64_t>::iterator it = frame_times_.find(capture_time);
+ if (it == frame_times_.end()) {
+ return -1;
+ }
+ // Remove any old frames up to current.
+ // Old frames have been skipped by the capture process thread.
+ // TODO(asapersson): Consider measuring time from first frame in list.
+ last_processing_time_ms_ = now - (*it).second;
+ frame_times_.erase(frame_times_.begin(), ++it);
+ return last_processing_time_ms_;
+ }
+
+ void Reset() { frame_times_.clear(); }
+ int NumFrames() const { return static_cast<int>(frame_times_.size()); }
+ int last_processing_time_ms() const { return last_processing_time_ms_; }
+
+ private:
+ // Captured frames mapped by the capture time.
+ std::map<int64_t, int64_t> frame_times_;
+ int last_processing_time_ms_;
+};
+
+
+OveruseFrameDetector::OveruseFrameDetector(
+ Clock* clock,
+ const CpuOveruseOptions& options,
+ CpuOveruseObserver* observer,
+ CpuOveruseMetricsObserver* metrics_observer)
+ : options_(options),
+ observer_(observer),
+ metrics_observer_(metrics_observer),
+ clock_(clock),
+ num_process_times_(0),
+ last_capture_time_(0),
+ num_pixels_(0),
+ next_process_time_(clock_->TimeInMilliseconds()),
+ last_overuse_time_(0),
+ checks_above_threshold_(0),
+ num_overuse_detections_(0),
+ last_rampup_time_(0),
+ in_quick_rampup_(false),
+ current_rampup_delay_ms_(kStandardRampUpDelayMs),
+ last_sample_time_ms_(0),
+ usage_(new SendProcessingUsage(options)),
+ frame_queue_(new FrameQueue()) {
+ RTC_DCHECK(metrics_observer != nullptr);
+ // Make sure stats are initially up-to-date. This simplifies unit testing
+ // since we don't have to trigger an update using one of the methods which
+ // would also alter the overuse state.
+ UpdateCpuOveruseMetrics();
+ processing_thread_.DetachFromThread();
+}
+
+OveruseFrameDetector::~OveruseFrameDetector() {
+}
+
+int OveruseFrameDetector::LastProcessingTimeMs() const {
+ rtc::CritScope cs(&crit_);
+ return frame_queue_->last_processing_time_ms();
+}
+
+int OveruseFrameDetector::FramesInQueue() const {
+ rtc::CritScope cs(&crit_);
+ return frame_queue_->NumFrames();
+}
+
+void OveruseFrameDetector::UpdateCpuOveruseMetrics() {
+ metrics_.encode_usage_percent = usage_->Value();
+
+ metrics_observer_->CpuOveruseMetricsUpdated(metrics_);
+}
+
+int64_t OveruseFrameDetector::TimeUntilNextProcess() {
+ RTC_DCHECK(processing_thread_.CalledOnValidThread());
+ return next_process_time_ - clock_->TimeInMilliseconds();
+}
+
+bool OveruseFrameDetector::FrameSizeChanged(int num_pixels) const {
+ if (num_pixels != num_pixels_) {
+ return true;
+ }
+ return false;
+}
+
+bool OveruseFrameDetector::FrameTimeoutDetected(int64_t now) const {
+ if (last_capture_time_ == 0) {
+ return false;
+ }
+ return (now - last_capture_time_) > options_.frame_timeout_interval_ms;
+}
+
+void OveruseFrameDetector::ResetAll(int num_pixels) {
+ num_pixels_ = num_pixels;
+ usage_->Reset();
+ frame_queue_->Reset();
+ last_capture_time_ = 0;
+ num_process_times_ = 0;
+ UpdateCpuOveruseMetrics();
+}
+
+void OveruseFrameDetector::FrameCaptured(int width,
+ int height,
+ int64_t capture_time_ms) {
+ rtc::CritScope cs(&crit_);
+
+ int64_t now = clock_->TimeInMilliseconds();
+ if (FrameSizeChanged(width * height) || FrameTimeoutDetected(now)) {
+ ResetAll(width * height);
+ }
+
+ if (last_capture_time_ != 0)
+ usage_->AddCaptureSample(now - last_capture_time_);
+
+ last_capture_time_ = now;
+
+ frame_queue_->Start(capture_time_ms, now);
+}
+
+void OveruseFrameDetector::FrameSent(int64_t capture_time_ms) {
+ rtc::CritScope cs(&crit_);
+ int delay_ms = frame_queue_->End(capture_time_ms,
+ clock_->TimeInMilliseconds());
+ if (delay_ms > 0) {
+ AddProcessingTime(delay_ms);
+ }
+}
+
+void OveruseFrameDetector::AddProcessingTime(int elapsed_ms) {
+ int64_t now = clock_->TimeInMilliseconds();
+ if (last_sample_time_ms_ != 0) {
+ int64_t diff_ms = now - last_sample_time_ms_;
+ usage_->AddSample(elapsed_ms, diff_ms);
+ }
+ last_sample_time_ms_ = now;
+ UpdateCpuOveruseMetrics();
+}
+
+int32_t OveruseFrameDetector::Process() {
+ RTC_DCHECK(processing_thread_.CalledOnValidThread());
+
+ int64_t now = clock_->TimeInMilliseconds();
+
+ // Used to protect against Process() being called too often.
+ if (now < next_process_time_)
+ return 0;
+
+ next_process_time_ = now + kProcessIntervalMs;
+
+ CpuOveruseMetrics current_metrics;
+ {
+ rtc::CritScope cs(&crit_);
+ ++num_process_times_;
+
+ current_metrics = metrics_;
+ if (num_process_times_ <= options_.min_process_count)
+ return 0;
+ }
+
+ if (IsOverusing(current_metrics)) {
+ // If the last thing we did was going up, and now have to back down, we need
+ // to check if this peak was short. If so we should back off to avoid going
+ // back and forth between this load, the system doesn't seem to handle it.
+ bool check_for_backoff = last_rampup_time_ > last_overuse_time_;
+ if (check_for_backoff) {
+ if (now - last_rampup_time_ < kStandardRampUpDelayMs ||
+ num_overuse_detections_ > kMaxOverusesBeforeApplyRampupDelay) {
+ // Going up was not ok for very long, back off.
+ current_rampup_delay_ms_ *= kRampUpBackoffFactor;
+ if (current_rampup_delay_ms_ > kMaxRampUpDelayMs)
+ current_rampup_delay_ms_ = kMaxRampUpDelayMs;
+ } else {
+ // Not currently backing off, reset rampup delay.
+ current_rampup_delay_ms_ = kStandardRampUpDelayMs;
+ }
+ }
+
+ last_overuse_time_ = now;
+ in_quick_rampup_ = false;
+ checks_above_threshold_ = 0;
+ ++num_overuse_detections_;
+
+ if (observer_ != NULL)
+ observer_->OveruseDetected();
+ } else if (IsUnderusing(current_metrics, now)) {
+ last_rampup_time_ = now;
+ in_quick_rampup_ = true;
+
+ if (observer_ != NULL)
+ observer_->NormalUsage();
+ }
+
+ int rampup_delay =
+ in_quick_rampup_ ? kQuickRampUpDelayMs : current_rampup_delay_ms_;
+
+ LOG(LS_VERBOSE) << " Frame stats: "
+ << " encode usage " << current_metrics.encode_usage_percent
+ << " overuse detections " << num_overuse_detections_
+ << " rampup delay " << rampup_delay;
+
+ return 0;
+}
+
+bool OveruseFrameDetector::IsOverusing(const CpuOveruseMetrics& metrics) {
+ if (metrics.encode_usage_percent >=
+ options_.high_encode_usage_threshold_percent) {
+ ++checks_above_threshold_;
+ } else {
+ checks_above_threshold_ = 0;
+ }
+ return checks_above_threshold_ >= options_.high_threshold_consecutive_count;
+}
+
+bool OveruseFrameDetector::IsUnderusing(const CpuOveruseMetrics& metrics,
+ int64_t time_now) {
+ int delay = in_quick_rampup_ ? kQuickRampUpDelayMs : current_rampup_delay_ms_;
+ if (time_now < last_rampup_time_ + delay)
+ return false;
+
+ return metrics.encode_usage_percent <
+ options_.low_encode_usage_threshold_percent;
+}
+} // namespace webrtc
diff --git a/webrtc/video/overuse_frame_detector.h b/webrtc/video/overuse_frame_detector.h
new file mode 100644
index 0000000000..d2606c19e6
--- /dev/null
+++ b/webrtc/video/overuse_frame_detector.h
@@ -0,0 +1,164 @@
+/*
+ * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_VIDEO_OVERUSE_FRAME_DETECTOR_H_
+#define WEBRTC_VIDEO_OVERUSE_FRAME_DETECTOR_H_
+
+#include "webrtc/base/constructormagic.h"
+#include "webrtc/base/criticalsection.h"
+#include "webrtc/base/scoped_ptr.h"
+#include "webrtc/base/exp_filter.h"
+#include "webrtc/base/thread_annotations.h"
+#include "webrtc/base/thread_checker.h"
+#include "webrtc/modules/include/module.h"
+
+namespace webrtc {
+
+class Clock;
+
+// CpuOveruseObserver is called when a system overuse is detected and
+// VideoEngine cannot keep up the encoding frequency.
+class CpuOveruseObserver {
+ public:
+ // Called as soon as an overuse is detected.
+ virtual void OveruseDetected() = 0;
+ // Called periodically when the system is not overused any longer.
+ virtual void NormalUsage() = 0;
+
+ protected:
+ virtual ~CpuOveruseObserver() {}
+};
+
+struct CpuOveruseOptions {
+ CpuOveruseOptions()
+ : low_encode_usage_threshold_percent(55),
+ high_encode_usage_threshold_percent(85),
+ frame_timeout_interval_ms(1500),
+ min_frame_samples(120),
+ min_process_count(3),
+ high_threshold_consecutive_count(2) {}
+
+ int low_encode_usage_threshold_percent; // Threshold for triggering underuse.
+ int high_encode_usage_threshold_percent; // Threshold for triggering overuse.
+ // General settings.
+ int frame_timeout_interval_ms; // The maximum allowed interval between two
+ // frames before resetting estimations.
+ int min_frame_samples; // The minimum number of frames required.
+ int min_process_count; // The number of initial process times required before
+ // triggering an overuse/underuse.
+ int high_threshold_consecutive_count; // The number of consecutive checks
+ // above the high threshold before
+ // triggering an overuse.
+};
+
+struct CpuOveruseMetrics {
+ CpuOveruseMetrics() : encode_usage_percent(-1) {}
+
+ int encode_usage_percent; // Average encode time divided by the average time
+ // difference between incoming captured frames.
+};
+
+class CpuOveruseMetricsObserver {
+ public:
+ virtual ~CpuOveruseMetricsObserver() {}
+ virtual void CpuOveruseMetricsUpdated(const CpuOveruseMetrics& metrics) = 0;
+};
+
+
+// Use to detect system overuse based on the send-side processing time of
+// incoming frames.
+class OveruseFrameDetector : public Module {
+ public:
+ OveruseFrameDetector(Clock* clock,
+ const CpuOveruseOptions& options,
+ CpuOveruseObserver* overuse_observer,
+ CpuOveruseMetricsObserver* metrics_observer);
+ ~OveruseFrameDetector();
+
+ // Called for each captured frame.
+ void FrameCaptured(int width, int height, int64_t capture_time_ms);
+
+ // Called for each sent frame.
+ void FrameSent(int64_t capture_time_ms);
+
+ // Only public for testing.
+ int LastProcessingTimeMs() const;
+ int FramesInQueue() const;
+
+ // Implements Module.
+ int64_t TimeUntilNextProcess() override;
+ int32_t Process() override;
+
+ private:
+ class SendProcessingUsage;
+ class FrameQueue;
+
+ void UpdateCpuOveruseMetrics() EXCLUSIVE_LOCKS_REQUIRED(crit_);
+
+ // TODO(asapersson): This method is only used on one thread, so it shouldn't
+ // need a guard.
+ void AddProcessingTime(int elapsed_ms) EXCLUSIVE_LOCKS_REQUIRED(crit_);
+
+ // Only called on the processing thread.
+ bool IsOverusing(const CpuOveruseMetrics& metrics);
+ bool IsUnderusing(const CpuOveruseMetrics& metrics, int64_t time_now);
+
+ bool FrameTimeoutDetected(int64_t now) const EXCLUSIVE_LOCKS_REQUIRED(crit_);
+ bool FrameSizeChanged(int num_pixels) const EXCLUSIVE_LOCKS_REQUIRED(crit_);
+
+ void ResetAll(int num_pixels) EXCLUSIVE_LOCKS_REQUIRED(crit_);
+
+ // Protecting all members except const and those that are only accessed on the
+ // processing thread.
+ // TODO(asapersson): See if we can reduce locking. As is, video frame
+ // processing contends with reading stats and the processing thread.
+ mutable rtc::CriticalSection crit_;
+
+ const CpuOveruseOptions options_;
+
+ // Observer getting overuse reports.
+ CpuOveruseObserver* const observer_;
+
+ // Stats metrics.
+ CpuOveruseMetricsObserver* const metrics_observer_;
+ CpuOveruseMetrics metrics_ GUARDED_BY(crit_);
+
+ Clock* const clock_;
+ int64_t num_process_times_ GUARDED_BY(crit_);
+
+ int64_t last_capture_time_ GUARDED_BY(crit_);
+
+ // Number of pixels of last captured frame.
+ int num_pixels_ GUARDED_BY(crit_);
+
+ // These seven members are only accessed on the processing thread.
+ int64_t next_process_time_;
+ int64_t last_overuse_time_;
+ int checks_above_threshold_;
+ int num_overuse_detections_;
+ int64_t last_rampup_time_;
+ bool in_quick_rampup_;
+ int current_rampup_delay_ms_;
+
+ int64_t last_sample_time_ms_; // Only accessed by one thread.
+
+ // TODO(asapersson): Can these be regular members (avoid separate heap
+ // allocs)?
+ const rtc::scoped_ptr<SendProcessingUsage> usage_ GUARDED_BY(crit_);
+ const rtc::scoped_ptr<FrameQueue> frame_queue_ GUARDED_BY(crit_);
+
+ rtc::ThreadChecker processing_thread_;
+
+ RTC_DISALLOW_COPY_AND_ASSIGN(OveruseFrameDetector);
+};
+
+} // namespace webrtc
+
+#endif // WEBRTC_VIDEO_OVERUSE_FRAME_DETECTOR_H_
diff --git a/webrtc/video/overuse_frame_detector_unittest.cc b/webrtc/video/overuse_frame_detector_unittest.cc
new file mode 100644
index 0000000000..65e006b485
--- /dev/null
+++ b/webrtc/video/overuse_frame_detector_unittest.cc
@@ -0,0 +1,310 @@
+/*
+ * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/video/overuse_frame_detector.h"
+
+#include "testing/gmock/include/gmock/gmock.h"
+#include "testing/gtest/include/gtest/gtest.h"
+
+#include "webrtc/base/scoped_ptr.h"
+#include "webrtc/system_wrappers/include/clock.h"
+
+namespace webrtc {
+namespace {
+ const int kWidth = 640;
+ const int kHeight = 480;
+ const int kFrameInterval33ms = 33;
+ const int kProcessIntervalMs = 5000;
+ const int kProcessTime5ms = 5;
+} // namespace
+
+class MockCpuOveruseObserver : public CpuOveruseObserver {
+ public:
+ MockCpuOveruseObserver() {}
+ virtual ~MockCpuOveruseObserver() {}
+
+ MOCK_METHOD0(OveruseDetected, void());
+ MOCK_METHOD0(NormalUsage, void());
+};
+
+class CpuOveruseObserverImpl : public CpuOveruseObserver {
+ public:
+ CpuOveruseObserverImpl() :
+ overuse_(0),
+ normaluse_(0) {}
+ virtual ~CpuOveruseObserverImpl() {}
+
+ void OveruseDetected() { ++overuse_; }
+ void NormalUsage() { ++normaluse_; }
+
+ int overuse_;
+ int normaluse_;
+};
+
+class OveruseFrameDetectorTest : public ::testing::Test,
+ public CpuOveruseMetricsObserver {
+ protected:
+ virtual void SetUp() {
+ clock_.reset(new SimulatedClock(1234));
+ observer_.reset(new MockCpuOveruseObserver());
+ options_.min_process_count = 0;
+ ReinitializeOveruseDetector();
+ }
+
+ void ReinitializeOveruseDetector() {
+ overuse_detector_.reset(new OveruseFrameDetector(clock_.get(), options_,
+ observer_.get(), this));
+ }
+
+ void CpuOveruseMetricsUpdated(const CpuOveruseMetrics& metrics) override {
+ metrics_ = metrics;
+ }
+
+ int InitialUsage() {
+ return ((options_.low_encode_usage_threshold_percent +
+ options_.high_encode_usage_threshold_percent) / 2.0f) + 0.5;
+ }
+
+ void InsertAndSendFramesWithInterval(
+ int num_frames, int interval_ms, int width, int height, int delay_ms) {
+ while (num_frames-- > 0) {
+ int64_t capture_time_ms = clock_->TimeInMilliseconds();
+ overuse_detector_->FrameCaptured(width, height, capture_time_ms);
+ clock_->AdvanceTimeMilliseconds(delay_ms);
+ overuse_detector_->FrameSent(capture_time_ms);
+ clock_->AdvanceTimeMilliseconds(interval_ms - delay_ms);
+ }
+ }
+
+ void TriggerOveruse(int num_times) {
+ const int kDelayMs = 32;
+ for (int i = 0; i < num_times; ++i) {
+ InsertAndSendFramesWithInterval(
+ 1000, kFrameInterval33ms, kWidth, kHeight, kDelayMs);
+ overuse_detector_->Process();
+ }
+ }
+
+ void TriggerUnderuse() {
+ const int kDelayMs1 = 5;
+ const int kDelayMs2 = 6;
+ InsertAndSendFramesWithInterval(
+ 1300, kFrameInterval33ms, kWidth, kHeight, kDelayMs1);
+ InsertAndSendFramesWithInterval(
+ 1, kFrameInterval33ms, kWidth, kHeight, kDelayMs2);
+ overuse_detector_->Process();
+ }
+
+ int UsagePercent() { return metrics_.encode_usage_percent; }
+
+ CpuOveruseOptions options_;
+ rtc::scoped_ptr<SimulatedClock> clock_;
+ rtc::scoped_ptr<MockCpuOveruseObserver> observer_;
+ rtc::scoped_ptr<OveruseFrameDetector> overuse_detector_;
+ CpuOveruseMetrics metrics_;
+};
+
+
+// UsagePercent() > high_encode_usage_threshold_percent => overuse.
+// UsagePercent() < low_encode_usage_threshold_percent => underuse.
+TEST_F(OveruseFrameDetectorTest, TriggerOveruse) {
+ // usage > high => overuse
+ EXPECT_CALL(*(observer_.get()), OveruseDetected()).Times(1);
+ TriggerOveruse(options_.high_threshold_consecutive_count);
+}
+
+TEST_F(OveruseFrameDetectorTest, OveruseAndRecover) {
+ // usage > high => overuse
+ EXPECT_CALL(*(observer_.get()), OveruseDetected()).Times(1);
+ TriggerOveruse(options_.high_threshold_consecutive_count);
+ // usage < low => underuse
+ EXPECT_CALL(*(observer_.get()), NormalUsage()).Times(testing::AtLeast(1));
+ TriggerUnderuse();
+}
+
+TEST_F(OveruseFrameDetectorTest, OveruseAndRecoverWithNoObserver) {
+ overuse_detector_.reset(
+ new OveruseFrameDetector(clock_.get(), options_, nullptr, this));
+ EXPECT_CALL(*(observer_.get()), OveruseDetected()).Times(0);
+ TriggerOveruse(options_.high_threshold_consecutive_count);
+ EXPECT_CALL(*(observer_.get()), NormalUsage()).Times(0);
+ TriggerUnderuse();
+}
+
+TEST_F(OveruseFrameDetectorTest, DoubleOveruseAndRecover) {
+ EXPECT_CALL(*(observer_.get()), OveruseDetected()).Times(2);
+ TriggerOveruse(options_.high_threshold_consecutive_count);
+ TriggerOveruse(options_.high_threshold_consecutive_count);
+ EXPECT_CALL(*(observer_.get()), NormalUsage()).Times(testing::AtLeast(1));
+ TriggerUnderuse();
+}
+
+TEST_F(OveruseFrameDetectorTest, TriggerUnderuseWithMinProcessCount) {
+ options_.min_process_count = 1;
+ CpuOveruseObserverImpl overuse_observer;
+ overuse_detector_.reset(new OveruseFrameDetector(clock_.get(), options_,
+ &overuse_observer, this));
+ InsertAndSendFramesWithInterval(
+ 1200, kFrameInterval33ms, kWidth, kHeight, kProcessTime5ms);
+ overuse_detector_->Process();
+ EXPECT_EQ(0, overuse_observer.normaluse_);
+ clock_->AdvanceTimeMilliseconds(kProcessIntervalMs);
+ overuse_detector_->Process();
+ EXPECT_EQ(1, overuse_observer.normaluse_);
+}
+
+TEST_F(OveruseFrameDetectorTest, ConstantOveruseGivesNoNormalUsage) {
+ EXPECT_CALL(*(observer_.get()), NormalUsage()).Times(0);
+ EXPECT_CALL(*(observer_.get()), OveruseDetected()).Times(64);
+ for (size_t i = 0; i < 64; ++i) {
+ TriggerOveruse(options_.high_threshold_consecutive_count);
+ }
+}
+
+TEST_F(OveruseFrameDetectorTest, ConsecutiveCountTriggersOveruse) {
+ EXPECT_CALL(*(observer_.get()), OveruseDetected()).Times(1);
+ options_.high_threshold_consecutive_count = 2;
+ ReinitializeOveruseDetector();
+ TriggerOveruse(2);
+}
+
+TEST_F(OveruseFrameDetectorTest, IncorrectConsecutiveCountTriggersNoOveruse) {
+ EXPECT_CALL(*(observer_.get()), OveruseDetected()).Times(0);
+ options_.high_threshold_consecutive_count = 2;
+ ReinitializeOveruseDetector();
+ TriggerOveruse(1);
+}
+
+TEST_F(OveruseFrameDetectorTest, ProcessingUsage) {
+ InsertAndSendFramesWithInterval(
+ 1000, kFrameInterval33ms, kWidth, kHeight, kProcessTime5ms);
+ EXPECT_EQ(kProcessTime5ms * 100 / kFrameInterval33ms, UsagePercent());
+}
+
+TEST_F(OveruseFrameDetectorTest, ResetAfterResolutionChange) {
+ EXPECT_EQ(InitialUsage(), UsagePercent());
+ InsertAndSendFramesWithInterval(
+ 1000, kFrameInterval33ms, kWidth, kHeight, kProcessTime5ms);
+ EXPECT_NE(InitialUsage(), UsagePercent());
+ // Verify reset.
+ InsertAndSendFramesWithInterval(
+ 1, kFrameInterval33ms, kWidth, kHeight + 1, kProcessTime5ms);
+ EXPECT_EQ(InitialUsage(), UsagePercent());
+}
+
+TEST_F(OveruseFrameDetectorTest, ResetAfterFrameTimeout) {
+ EXPECT_EQ(InitialUsage(), UsagePercent());
+ InsertAndSendFramesWithInterval(
+ 1000, kFrameInterval33ms, kWidth, kHeight, kProcessTime5ms);
+ EXPECT_NE(InitialUsage(), UsagePercent());
+ InsertAndSendFramesWithInterval(
+ 2, options_.frame_timeout_interval_ms, kWidth, kHeight, kProcessTime5ms);
+ EXPECT_NE(InitialUsage(), UsagePercent());
+ // Verify reset.
+ InsertAndSendFramesWithInterval(
+ 2, options_.frame_timeout_interval_ms + 1, kWidth, kHeight,
+ kProcessTime5ms);
+ EXPECT_EQ(InitialUsage(), UsagePercent());
+}
+
+TEST_F(OveruseFrameDetectorTest, MinFrameSamplesBeforeUpdating) {
+ options_.min_frame_samples = 40;
+ ReinitializeOveruseDetector();
+ InsertAndSendFramesWithInterval(
+ 40, kFrameInterval33ms, kWidth, kHeight, kProcessTime5ms);
+ EXPECT_EQ(InitialUsage(), UsagePercent());
+ InsertAndSendFramesWithInterval(
+ 1, kFrameInterval33ms, kWidth, kHeight, kProcessTime5ms);
+ EXPECT_NE(InitialUsage(), UsagePercent());
+}
+
+TEST_F(OveruseFrameDetectorTest, InitialProcessingUsage) {
+ EXPECT_EQ(InitialUsage(), UsagePercent());
+}
+
+TEST_F(OveruseFrameDetectorTest, FrameDelay_OneFrame) {
+ const int kProcessingTimeMs = 100;
+ overuse_detector_->FrameCaptured(kWidth, kHeight, 33);
+ clock_->AdvanceTimeMilliseconds(kProcessingTimeMs);
+ EXPECT_EQ(-1, overuse_detector_->LastProcessingTimeMs());
+ overuse_detector_->FrameSent(33);
+ EXPECT_EQ(kProcessingTimeMs, overuse_detector_->LastProcessingTimeMs());
+ EXPECT_EQ(0, overuse_detector_->FramesInQueue());
+}
+
+TEST_F(OveruseFrameDetectorTest, FrameDelay_TwoFrames) {
+ const int kProcessingTimeMs1 = 100;
+ const int kProcessingTimeMs2 = 50;
+ const int kTimeBetweenFramesMs = 200;
+ overuse_detector_->FrameCaptured(kWidth, kHeight, 33);
+ clock_->AdvanceTimeMilliseconds(kProcessingTimeMs1);
+ overuse_detector_->FrameSent(33);
+ EXPECT_EQ(kProcessingTimeMs1, overuse_detector_->LastProcessingTimeMs());
+ clock_->AdvanceTimeMilliseconds(kTimeBetweenFramesMs);
+ overuse_detector_->FrameCaptured(kWidth, kHeight, 66);
+ clock_->AdvanceTimeMilliseconds(kProcessingTimeMs2);
+ overuse_detector_->FrameSent(66);
+ EXPECT_EQ(kProcessingTimeMs2, overuse_detector_->LastProcessingTimeMs());
+}
+
+TEST_F(OveruseFrameDetectorTest, FrameDelay_MaxQueueSize) {
+ const int kMaxQueueSize = 91;
+ for (int i = 0; i < kMaxQueueSize * 2; ++i) {
+ overuse_detector_->FrameCaptured(kWidth, kHeight, i);
+ }
+ EXPECT_EQ(kMaxQueueSize, overuse_detector_->FramesInQueue());
+}
+
+TEST_F(OveruseFrameDetectorTest, FrameDelay_NonProcessedFramesRemoved) {
+ const int kProcessingTimeMs = 100;
+ overuse_detector_->FrameCaptured(kWidth, kHeight, 33);
+ clock_->AdvanceTimeMilliseconds(kProcessingTimeMs);
+ overuse_detector_->FrameCaptured(kWidth, kHeight, 35);
+ clock_->AdvanceTimeMilliseconds(kProcessingTimeMs);
+ overuse_detector_->FrameCaptured(kWidth, kHeight, 66);
+ clock_->AdvanceTimeMilliseconds(kProcessingTimeMs);
+ overuse_detector_->FrameCaptured(kWidth, kHeight, 99);
+ clock_->AdvanceTimeMilliseconds(kProcessingTimeMs);
+ EXPECT_EQ(-1, overuse_detector_->LastProcessingTimeMs());
+ EXPECT_EQ(4, overuse_detector_->FramesInQueue());
+ overuse_detector_->FrameSent(66);
+ // Frame 33, 35 removed, 66 processed, 99 not processed.
+ EXPECT_EQ(2 * kProcessingTimeMs, overuse_detector_->LastProcessingTimeMs());
+ EXPECT_EQ(1, overuse_detector_->FramesInQueue());
+ overuse_detector_->FrameSent(99);
+ EXPECT_EQ(kProcessingTimeMs, overuse_detector_->LastProcessingTimeMs());
+ EXPECT_EQ(0, overuse_detector_->FramesInQueue());
+}
+
+TEST_F(OveruseFrameDetectorTest, FrameDelay_ResetClearsFrames) {
+ const int kProcessingTimeMs = 100;
+ overuse_detector_->FrameCaptured(kWidth, kHeight, 33);
+ EXPECT_EQ(1, overuse_detector_->FramesInQueue());
+ clock_->AdvanceTimeMilliseconds(kProcessingTimeMs);
+ // Verify reset (resolution changed).
+ overuse_detector_->FrameCaptured(kWidth, kHeight + 1, 66);
+ EXPECT_EQ(1, overuse_detector_->FramesInQueue());
+ clock_->AdvanceTimeMilliseconds(kProcessingTimeMs);
+ overuse_detector_->FrameSent(66);
+ EXPECT_EQ(kProcessingTimeMs, overuse_detector_->LastProcessingTimeMs());
+ EXPECT_EQ(0, overuse_detector_->FramesInQueue());
+}
+
+TEST_F(OveruseFrameDetectorTest, FrameDelay_NonMatchingSendFrameIgnored) {
+ const int kProcessingTimeMs = 100;
+ overuse_detector_->FrameCaptured(kWidth, kHeight, 33);
+ clock_->AdvanceTimeMilliseconds(kProcessingTimeMs);
+ overuse_detector_->FrameSent(34);
+ EXPECT_EQ(-1, overuse_detector_->LastProcessingTimeMs());
+ overuse_detector_->FrameSent(33);
+ EXPECT_EQ(kProcessingTimeMs, overuse_detector_->LastProcessingTimeMs());
+}
+
+} // namespace webrtc
diff --git a/webrtc/video/payload_router.cc b/webrtc/video/payload_router.cc
new file mode 100644
index 0000000000..177f2dd4e8
--- /dev/null
+++ b/webrtc/video/payload_router.cc
@@ -0,0 +1,101 @@
+/*
+ * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/video/payload_router.h"
+
+#include "webrtc/base/checks.h"
+#include "webrtc/modules/rtp_rtcp/include/rtp_rtcp.h"
+#include "webrtc/modules/rtp_rtcp/include/rtp_rtcp_defines.h"
+#include "webrtc/system_wrappers/include/critical_section_wrapper.h"
+
+namespace webrtc {
+
+PayloadRouter::PayloadRouter()
+ : crit_(CriticalSectionWrapper::CreateCriticalSection()),
+ active_(false) {}
+
+PayloadRouter::~PayloadRouter() {}
+
+size_t PayloadRouter::DefaultMaxPayloadLength() {
+ const size_t kIpUdpSrtpLength = 44;
+ return IP_PACKET_SIZE - kIpUdpSrtpLength;
+}
+
+void PayloadRouter::SetSendingRtpModules(
+ const std::list<RtpRtcp*>& rtp_modules) {
+ CriticalSectionScoped cs(crit_.get());
+ rtp_modules_.clear();
+ rtp_modules_.reserve(rtp_modules.size());
+ for (auto* rtp_module : rtp_modules) {
+ rtp_modules_.push_back(rtp_module);
+ }
+}
+
+void PayloadRouter::set_active(bool active) {
+ CriticalSectionScoped cs(crit_.get());
+ active_ = active;
+}
+
+bool PayloadRouter::active() {
+ CriticalSectionScoped cs(crit_.get());
+ return active_ && !rtp_modules_.empty();
+}
+
+bool PayloadRouter::RoutePayload(FrameType frame_type,
+ int8_t payload_type,
+ uint32_t time_stamp,
+ int64_t capture_time_ms,
+ const uint8_t* payload_data,
+ size_t payload_length,
+ const RTPFragmentationHeader* fragmentation,
+ const RTPVideoHeader* rtp_video_hdr) {
+ CriticalSectionScoped cs(crit_.get());
+ if (!active_ || rtp_modules_.empty())
+ return false;
+
+ // The simulcast index might actually be larger than the number of modules in
+ // case the encoder was processing a frame during a codec reconfig.
+ if (rtp_video_hdr != NULL &&
+ rtp_video_hdr->simulcastIdx >= rtp_modules_.size())
+ return false;
+
+ int stream_idx = 0;
+ if (rtp_video_hdr != NULL)
+ stream_idx = rtp_video_hdr->simulcastIdx;
+ return rtp_modules_[stream_idx]->SendOutgoingData(
+ frame_type, payload_type, time_stamp, capture_time_ms, payload_data,
+ payload_length, fragmentation, rtp_video_hdr) == 0 ? true : false;
+}
+
+void PayloadRouter::SetTargetSendBitrates(
+ const std::vector<uint32_t>& stream_bitrates) {
+ CriticalSectionScoped cs(crit_.get());
+ if (stream_bitrates.size() < rtp_modules_.size()) {
+ // There can be a size mis-match during codec reconfiguration.
+ return;
+ }
+ int idx = 0;
+ for (auto* rtp_module : rtp_modules_) {
+ rtp_module->SetTargetSendBitrate(stream_bitrates[idx++]);
+ }
+}
+
+size_t PayloadRouter::MaxPayloadLength() const {
+ size_t min_payload_length = DefaultMaxPayloadLength();
+ CriticalSectionScoped cs(crit_.get());
+ for (auto* rtp_module : rtp_modules_) {
+ size_t module_payload_length = rtp_module->MaxDataPayloadLength();
+ if (module_payload_length < min_payload_length)
+ min_payload_length = module_payload_length;
+ }
+ return min_payload_length;
+}
+
+} // namespace webrtc
diff --git a/webrtc/video/payload_router.h b/webrtc/video/payload_router.h
new file mode 100644
index 0000000000..881145976d
--- /dev/null
+++ b/webrtc/video/payload_router.h
@@ -0,0 +1,85 @@
+/*
+ * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_VIDEO_PAYLOAD_ROUTER_H_
+#define WEBRTC_VIDEO_PAYLOAD_ROUTER_H_
+
+#include <list>
+#include <vector>
+
+#include "webrtc/base/constructormagic.h"
+#include "webrtc/base/scoped_ptr.h"
+#include "webrtc/base/thread_annotations.h"
+#include "webrtc/common_types.h"
+#include "webrtc/system_wrappers/include/atomic32.h"
+
+namespace webrtc {
+
+class CriticalSectionWrapper;
+class RTPFragmentationHeader;
+class RtpRtcp;
+struct RTPVideoHeader;
+
+// PayloadRouter routes outgoing data to the correct sending RTP module, based
+// on the simulcast layer in RTPVideoHeader.
+class PayloadRouter {
+ public:
+ PayloadRouter();
+ ~PayloadRouter();
+
+ static size_t DefaultMaxPayloadLength();
+
+ // Rtp modules are assumed to be sorted in simulcast index order.
+ void SetSendingRtpModules(const std::list<RtpRtcp*>& rtp_modules);
+
+ // PayloadRouter will only route packets if being active, all packets will be
+ // dropped otherwise.
+ void set_active(bool active);
+ bool active();
+
+ // Input parameters according to the signature of RtpRtcp::SendOutgoingData.
+ // Returns true if the packet was routed / sent, false otherwise.
+ bool RoutePayload(FrameType frame_type,
+ int8_t payload_type,
+ uint32_t time_stamp,
+ int64_t capture_time_ms,
+ const uint8_t* payload_data,
+ size_t payload_size,
+ const RTPFragmentationHeader* fragmentation,
+ const RTPVideoHeader* rtp_video_hdr);
+
+ // Configures current target bitrate per module. 'stream_bitrates' is assumed
+ // to be in the same order as 'SetSendingRtpModules'.
+ void SetTargetSendBitrates(const std::vector<uint32_t>& stream_bitrates);
+
+ // Returns the maximum allowed data payload length, given the configured MTU
+ // and RTP headers.
+ size_t MaxPayloadLength() const;
+
+ void AddRef() { ++ref_count_; }
+ void Release() { if (--ref_count_ == 0) { delete this; } }
+
+ private:
+ // TODO(mflodman): When the new video API has launched, remove crit_ and
+ // assume rtp_modules_ will never change during a call.
+ rtc::scoped_ptr<CriticalSectionWrapper> crit_;
+
+ // Active sending RTP modules, in layer order.
+ std::vector<RtpRtcp*> rtp_modules_ GUARDED_BY(crit_.get());
+ bool active_ GUARDED_BY(crit_.get());
+
+ Atomic32 ref_count_;
+
+ RTC_DISALLOW_COPY_AND_ASSIGN(PayloadRouter);
+};
+
+} // namespace webrtc
+
+#endif // WEBRTC_VIDEO_PAYLOAD_ROUTER_H_
diff --git a/webrtc/video/payload_router_unittest.cc b/webrtc/video/payload_router_unittest.cc
new file mode 100644
index 0000000000..8c22f2fd5c
--- /dev/null
+++ b/webrtc/video/payload_router_unittest.cc
@@ -0,0 +1,209 @@
+/*
+ * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+
+#include <list>
+
+#include "testing/gmock/include/gmock/gmock.h"
+#include "testing/gtest/include/gtest/gtest.h"
+#include "webrtc/base/scoped_ptr.h"
+#include "webrtc/modules/rtp_rtcp/include/rtp_rtcp.h"
+#include "webrtc/modules/rtp_rtcp/mocks/mock_rtp_rtcp.h"
+#include "webrtc/video/payload_router.h"
+
+using ::testing::_;
+using ::testing::AnyNumber;
+using ::testing::NiceMock;
+using ::testing::Return;
+
+namespace webrtc {
+
+class PayloadRouterTest : public ::testing::Test {
+ protected:
+ virtual void SetUp() {
+ payload_router_.reset(new PayloadRouter());
+ }
+ rtc::scoped_ptr<PayloadRouter> payload_router_;
+};
+
+TEST_F(PayloadRouterTest, SendOnOneModule) {
+ MockRtpRtcp rtp;
+ std::list<RtpRtcp*> modules(1, &rtp);
+
+ payload_router_->SetSendingRtpModules(modules);
+
+ uint8_t payload = 'a';
+ FrameType frame_type = kVideoFrameKey;
+ int8_t payload_type = 96;
+
+ EXPECT_CALL(rtp, SendOutgoingData(frame_type, payload_type, 0, 0, _, 1, NULL,
+ NULL))
+ .Times(0);
+ EXPECT_FALSE(payload_router_->RoutePayload(frame_type, payload_type, 0, 0,
+ &payload, 1, NULL, NULL));
+
+ payload_router_->set_active(true);
+ EXPECT_CALL(rtp, SendOutgoingData(frame_type, payload_type, 0, 0, _, 1, NULL,
+ NULL))
+ .Times(1);
+ EXPECT_TRUE(payload_router_->RoutePayload(frame_type, payload_type, 0, 0,
+ &payload, 1, NULL, NULL));
+
+ payload_router_->set_active(false);
+ EXPECT_CALL(rtp, SendOutgoingData(frame_type, payload_type, 0, 0, _, 1, NULL,
+ NULL))
+ .Times(0);
+ EXPECT_FALSE(payload_router_->RoutePayload(frame_type, payload_type, 0, 0,
+ &payload, 1, NULL, NULL));
+
+ payload_router_->set_active(true);
+ EXPECT_CALL(rtp, SendOutgoingData(frame_type, payload_type, 0, 0, _, 1, NULL,
+ NULL))
+ .Times(1);
+ EXPECT_TRUE(payload_router_->RoutePayload(frame_type, payload_type, 0, 0,
+ &payload, 1, NULL, NULL));
+
+ modules.clear();
+ payload_router_->SetSendingRtpModules(modules);
+ EXPECT_CALL(rtp, SendOutgoingData(frame_type, payload_type, 0, 0, _, 1, NULL,
+ NULL))
+ .Times(0);
+ EXPECT_FALSE(payload_router_->RoutePayload(frame_type, payload_type, 0, 0,
+ &payload, 1, NULL, NULL));
+}
+
+TEST_F(PayloadRouterTest, SendSimulcast) {
+ MockRtpRtcp rtp_1;
+ MockRtpRtcp rtp_2;
+ std::list<RtpRtcp*> modules;
+ modules.push_back(&rtp_1);
+ modules.push_back(&rtp_2);
+
+ payload_router_->SetSendingRtpModules(modules);
+
+ uint8_t payload_1 = 'a';
+ FrameType frame_type_1 = kVideoFrameKey;
+ int8_t payload_type_1 = 96;
+ RTPVideoHeader rtp_hdr_1;
+ rtp_hdr_1.simulcastIdx = 0;
+
+ payload_router_->set_active(true);
+ EXPECT_CALL(rtp_1, SendOutgoingData(frame_type_1, payload_type_1, 0, 0, _, 1,
+ NULL, &rtp_hdr_1))
+ .Times(1);
+ EXPECT_CALL(rtp_2, SendOutgoingData(_, _, _, _, _, _, _, _))
+ .Times(0);
+ EXPECT_TRUE(payload_router_->RoutePayload(frame_type_1, payload_type_1, 0, 0,
+ &payload_1, 1, NULL, &rtp_hdr_1));
+
+ uint8_t payload_2 = 'b';
+ FrameType frame_type_2 = kVideoFrameDelta;
+ int8_t payload_type_2 = 97;
+ RTPVideoHeader rtp_hdr_2;
+ rtp_hdr_2.simulcastIdx = 1;
+ EXPECT_CALL(rtp_2, SendOutgoingData(frame_type_2, payload_type_2, 0, 0, _, 1,
+ NULL, &rtp_hdr_2))
+ .Times(1);
+ EXPECT_CALL(rtp_1, SendOutgoingData(_, _, _, _, _, _, _, _))
+ .Times(0);
+ EXPECT_TRUE(payload_router_->RoutePayload(frame_type_2, payload_type_2, 0, 0,
+ &payload_2, 1, NULL, &rtp_hdr_2));
+
+ // Inactive.
+ payload_router_->set_active(false);
+ EXPECT_CALL(rtp_1, SendOutgoingData(_, _, _, _, _, _, _, _))
+ .Times(0);
+ EXPECT_CALL(rtp_2, SendOutgoingData(_, _, _, _, _, _, _, _))
+ .Times(0);
+ EXPECT_FALSE(payload_router_->RoutePayload(frame_type_1, payload_type_1, 0, 0,
+ &payload_1, 1, NULL, &rtp_hdr_1));
+ EXPECT_FALSE(payload_router_->RoutePayload(frame_type_2, payload_type_2, 0, 0,
+ &payload_2, 1, NULL, &rtp_hdr_2));
+
+ // Invalid simulcast index.
+ payload_router_->set_active(true);
+ EXPECT_CALL(rtp_1, SendOutgoingData(_, _, _, _, _, _, _, _))
+ .Times(0);
+ EXPECT_CALL(rtp_2, SendOutgoingData(_, _, _, _, _, _, _, _))
+ .Times(0);
+ rtp_hdr_1.simulcastIdx = 2;
+ EXPECT_FALSE(payload_router_->RoutePayload(frame_type_1, payload_type_1, 0, 0,
+ &payload_1, 1, NULL, &rtp_hdr_1));
+}
+
+TEST_F(PayloadRouterTest, MaxPayloadLength) {
+ // Without any limitations from the modules, verify we get the max payload
+ // length for IP/UDP/SRTP with a MTU of 150 bytes.
+ const size_t kDefaultMaxLength = 1500 - 20 - 8 - 12 - 4;
+ EXPECT_EQ(kDefaultMaxLength, payload_router_->DefaultMaxPayloadLength());
+ EXPECT_EQ(kDefaultMaxLength, payload_router_->MaxPayloadLength());
+
+ MockRtpRtcp rtp_1;
+ MockRtpRtcp rtp_2;
+ std::list<RtpRtcp*> modules;
+ modules.push_back(&rtp_1);
+ modules.push_back(&rtp_2);
+ payload_router_->SetSendingRtpModules(modules);
+
+ // Modules return a higher length than the default value.
+ EXPECT_CALL(rtp_1, MaxDataPayloadLength())
+ .Times(1)
+ .WillOnce(Return(kDefaultMaxLength + 10));
+ EXPECT_CALL(rtp_2, MaxDataPayloadLength())
+ .Times(1)
+ .WillOnce(Return(kDefaultMaxLength + 10));
+ EXPECT_EQ(kDefaultMaxLength, payload_router_->MaxPayloadLength());
+
+ // The modules return a value lower than default.
+ const size_t kTestMinPayloadLength = 1001;
+ EXPECT_CALL(rtp_1, MaxDataPayloadLength())
+ .Times(1)
+ .WillOnce(Return(kTestMinPayloadLength + 10));
+ EXPECT_CALL(rtp_2, MaxDataPayloadLength())
+ .Times(1)
+ .WillOnce(Return(kTestMinPayloadLength));
+ EXPECT_EQ(kTestMinPayloadLength, payload_router_->MaxPayloadLength());
+}
+
+TEST_F(PayloadRouterTest, SetTargetSendBitrates) {
+ MockRtpRtcp rtp_1;
+ MockRtpRtcp rtp_2;
+ std::list<RtpRtcp*> modules;
+ modules.push_back(&rtp_1);
+ modules.push_back(&rtp_2);
+ payload_router_->SetSendingRtpModules(modules);
+
+ const uint32_t bitrate_1 = 10000;
+ const uint32_t bitrate_2 = 76543;
+ std::vector<uint32_t> bitrates(2, bitrate_1);
+ bitrates[1] = bitrate_2;
+ EXPECT_CALL(rtp_1, SetTargetSendBitrate(bitrate_1))
+ .Times(1);
+ EXPECT_CALL(rtp_2, SetTargetSendBitrate(bitrate_2))
+ .Times(1);
+ payload_router_->SetTargetSendBitrates(bitrates);
+
+ bitrates.resize(1);
+ EXPECT_CALL(rtp_1, SetTargetSendBitrate(bitrate_1))
+ .Times(0);
+ EXPECT_CALL(rtp_2, SetTargetSendBitrate(bitrate_2))
+ .Times(0);
+ payload_router_->SetTargetSendBitrates(bitrates);
+
+ bitrates.resize(3);
+ bitrates[1] = bitrate_2;
+ bitrates[2] = bitrate_1 + bitrate_2;
+ EXPECT_CALL(rtp_1, SetTargetSendBitrate(bitrate_1))
+ .Times(1);
+ EXPECT_CALL(rtp_2, SetTargetSendBitrate(bitrate_2))
+ .Times(1);
+ payload_router_->SetTargetSendBitrates(bitrates);
+}
+} // namespace webrtc
diff --git a/webrtc/video/rampup_tests.cc b/webrtc/video/rampup_tests.cc
deleted file mode 100644
index 70efe3b9ed..0000000000
--- a/webrtc/video/rampup_tests.cc
+++ /dev/null
@@ -1,509 +0,0 @@
-/*
- * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include "testing/gtest/include/gtest/gtest.h"
-#include "webrtc/base/checks.h"
-#include "webrtc/base/common.h"
-#include "webrtc/base/event.h"
-#include "webrtc/modules/pacing/include/packet_router.h"
-#include "webrtc/modules/remote_bitrate_estimator/remote_bitrate_estimator_abs_send_time.h"
-#include "webrtc/modules/remote_bitrate_estimator/remote_bitrate_estimator_single_stream.h"
-#include "webrtc/modules/remote_bitrate_estimator/remote_estimator_proxy.h"
-#include "webrtc/modules/rtp_rtcp/interface/receive_statistics.h"
-#include "webrtc/modules/rtp_rtcp/interface/rtp_header_parser.h"
-#include "webrtc/modules/rtp_rtcp/interface/rtp_payload_registry.h"
-#include "webrtc/modules/rtp_rtcp/interface/rtp_rtcp.h"
-#include "webrtc/modules/rtp_rtcp/source/rtcp_utility.h"
-#include "webrtc/system_wrappers/include/critical_section_wrapper.h"
-#include "webrtc/system_wrappers/include/thread_wrapper.h"
-#include "webrtc/test/testsupport/perf_test.h"
-#include "webrtc/video/rampup_tests.h"
-
-namespace webrtc {
-namespace {
-
-static const int64_t kPollIntervalMs = 20;
-
-std::vector<uint32_t> GenerateSsrcs(size_t num_streams,
- uint32_t ssrc_offset) {
- std::vector<uint32_t> ssrcs;
- for (size_t i = 0; i != num_streams; ++i)
- ssrcs.push_back(static_cast<uint32_t>(ssrc_offset + i));
- return ssrcs;
-}
-} // namespace
-
-RampUpTester::RampUpTester(size_t num_streams,
- unsigned int start_bitrate_bps,
- const std::string& extension_type,
- bool rtx,
- bool red)
- : EndToEndTest(test::CallTest::kLongTimeoutMs),
- event_(false, false),
- clock_(Clock::GetRealTimeClock()),
- num_streams_(num_streams),
- rtx_(rtx),
- red_(red),
- send_stream_(nullptr),
- start_bitrate_bps_(start_bitrate_bps),
- start_bitrate_verified_(false),
- expected_bitrate_bps_(0),
- test_start_ms_(-1),
- ramp_up_finished_ms_(-1),
- extension_type_(extension_type),
- ssrcs_(GenerateSsrcs(num_streams, 100)),
- rtx_ssrcs_(GenerateSsrcs(num_streams, 200)),
- poller_thread_(ThreadWrapper::CreateThread(&BitrateStatsPollingThread,
- this,
- "BitrateStatsPollingThread")),
- sender_call_(nullptr) {
- if (rtx_) {
- for (size_t i = 0; i < ssrcs_.size(); ++i)
- rtx_ssrc_map_[rtx_ssrcs_[i]] = ssrcs_[i];
- }
-}
-
-RampUpTester::~RampUpTester() {
- event_.Set();
-}
-
-Call::Config RampUpTester::GetSenderCallConfig() {
- Call::Config call_config;
- if (start_bitrate_bps_ != 0) {
- call_config.bitrate_config.start_bitrate_bps = start_bitrate_bps_;
- }
- call_config.bitrate_config.min_bitrate_bps = 10000;
- return call_config;
-}
-
-void RampUpTester::OnStreamsCreated(
- VideoSendStream* send_stream,
- const std::vector<VideoReceiveStream*>& receive_streams) {
- send_stream_ = send_stream;
-}
-
-void RampUpTester::OnTransportsCreated(
- test::PacketTransport* send_transport,
- test::PacketTransport* receive_transport) {
- send_transport_ = send_transport;
- send_transport_->SetConfig(forward_transport_config_);
-}
-
-size_t RampUpTester::GetNumStreams() const {
- return num_streams_;
-}
-
-void RampUpTester::ModifyConfigs(
- VideoSendStream::Config* send_config,
- std::vector<VideoReceiveStream::Config>* receive_configs,
- VideoEncoderConfig* encoder_config) {
- send_config->suspend_below_min_bitrate = true;
-
- if (num_streams_ == 1) {
- encoder_config->streams[0].target_bitrate_bps =
- encoder_config->streams[0].max_bitrate_bps = 2000000;
- // For single stream rampup until 1mbps
- expected_bitrate_bps_ = kSingleStreamTargetBps;
- } else {
- // For multi stream rampup until all streams are being sent. That means
- // enough birate to send all the target streams plus the min bitrate of
- // the last one.
- expected_bitrate_bps_ = encoder_config->streams.back().min_bitrate_bps;
- for (size_t i = 0; i < encoder_config->streams.size() - 1; ++i) {
- expected_bitrate_bps_ += encoder_config->streams[i].target_bitrate_bps;
- }
- }
-
- send_config->rtp.extensions.clear();
-
- bool remb;
- if (extension_type_ == RtpExtension::kAbsSendTime) {
- remb = true;
- send_config->rtp.extensions.push_back(
- RtpExtension(extension_type_.c_str(), kAbsSendTimeExtensionId));
- } else if (extension_type_ == RtpExtension::kTransportSequenceNumber) {
- remb = false;
- send_config->rtp.extensions.push_back(RtpExtension(
- extension_type_.c_str(), kTransportSequenceNumberExtensionId));
- } else {
- remb = true;
- send_config->rtp.extensions.push_back(RtpExtension(
- extension_type_.c_str(), kTransmissionTimeOffsetExtensionId));
- }
-
- send_config->rtp.nack.rtp_history_ms = test::CallTest::kNackRtpHistoryMs;
- send_config->rtp.ssrcs = ssrcs_;
- if (rtx_) {
- send_config->rtp.rtx.payload_type = test::CallTest::kSendRtxPayloadType;
- send_config->rtp.rtx.ssrcs = rtx_ssrcs_;
- }
- if (red_) {
- send_config->rtp.fec.ulpfec_payload_type =
- test::CallTest::kUlpfecPayloadType;
- send_config->rtp.fec.red_payload_type = test::CallTest::kRedPayloadType;
- }
-
- size_t i = 0;
- for (VideoReceiveStream::Config& recv_config : *receive_configs) {
- recv_config.rtp.remb = remb;
- recv_config.rtp.extensions = send_config->rtp.extensions;
-
- recv_config.rtp.remote_ssrc = ssrcs_[i];
- recv_config.rtp.nack.rtp_history_ms = send_config->rtp.nack.rtp_history_ms;
-
- if (red_) {
- recv_config.rtp.fec.red_payload_type =
- send_config->rtp.fec.red_payload_type;
- recv_config.rtp.fec.ulpfec_payload_type =
- send_config->rtp.fec.ulpfec_payload_type;
- }
-
- if (rtx_) {
- recv_config.rtp.rtx[send_config->encoder_settings.payload_type].ssrc =
- rtx_ssrcs_[i];
- recv_config.rtp.rtx[send_config->encoder_settings.payload_type]
- .payload_type = send_config->rtp.rtx.payload_type;
- }
- ++i;
- }
-}
-
-void RampUpTester::OnCallsCreated(Call* sender_call, Call* receiver_call) {
- sender_call_ = sender_call;
-}
-
-bool RampUpTester::BitrateStatsPollingThread(void* obj) {
- return static_cast<RampUpTester*>(obj)->PollStats();
-}
-
-bool RampUpTester::PollStats() {
- if (sender_call_) {
- Call::Stats stats = sender_call_->GetStats();
-
- RTC_DCHECK_GT(expected_bitrate_bps_, 0);
- if (!start_bitrate_verified_ && start_bitrate_bps_ != 0) {
- // For tests with an explicitly set start bitrate, verify the first
- // bitrate estimate is close to the start bitrate and lower than the
- // test target bitrate. This is to verify a call respects the configured
- // start bitrate, but due to the BWE implementation we can't guarantee the
- // first estimate really is as high as the start bitrate.
- EXPECT_GT(stats.send_bandwidth_bps, 0.9 * start_bitrate_bps_);
- start_bitrate_verified_ = true;
- }
- if (stats.send_bandwidth_bps >= expected_bitrate_bps_) {
- ramp_up_finished_ms_ = clock_->TimeInMilliseconds();
- observation_complete_->Set();
- }
- }
-
- return !event_.Wait(kPollIntervalMs);
-}
-
-void RampUpTester::ReportResult(const std::string& measurement,
- size_t value,
- const std::string& units) const {
- webrtc::test::PrintResult(
- measurement, "",
- ::testing::UnitTest::GetInstance()->current_test_info()->name(),
- value, units, false);
-}
-
-void RampUpTester::AccumulateStats(const VideoSendStream::StreamStats& stream,
- size_t* total_packets_sent,
- size_t* total_sent,
- size_t* padding_sent,
- size_t* media_sent) const {
- *total_packets_sent += stream.rtp_stats.transmitted.packets +
- stream.rtp_stats.retransmitted.packets +
- stream.rtp_stats.fec.packets;
- *total_sent += stream.rtp_stats.transmitted.TotalBytes() +
- stream.rtp_stats.retransmitted.TotalBytes() +
- stream.rtp_stats.fec.TotalBytes();
- *padding_sent += stream.rtp_stats.transmitted.padding_bytes +
- stream.rtp_stats.retransmitted.padding_bytes +
- stream.rtp_stats.fec.padding_bytes;
- *media_sent += stream.rtp_stats.MediaPayloadBytes();
-}
-
-void RampUpTester::TriggerTestDone() {
- RTC_DCHECK_GE(test_start_ms_, 0);
-
- VideoSendStream::Stats send_stats = send_stream_->GetStats();
-
- size_t total_packets_sent = 0;
- size_t total_sent = 0;
- size_t padding_sent = 0;
- size_t media_sent = 0;
- for (uint32_t ssrc : ssrcs_) {
- AccumulateStats(send_stats.substreams[ssrc], &total_packets_sent,
- &total_sent, &padding_sent, &media_sent);
- }
-
- size_t rtx_total_packets_sent = 0;
- size_t rtx_total_sent = 0;
- size_t rtx_padding_sent = 0;
- size_t rtx_media_sent = 0;
- for (uint32_t rtx_ssrc : rtx_ssrcs_) {
- AccumulateStats(send_stats.substreams[rtx_ssrc], &rtx_total_packets_sent,
- &rtx_total_sent, &rtx_padding_sent, &rtx_media_sent);
- }
-
- ReportResult("ramp-up-total-packets-sent", total_packets_sent, "packets");
- ReportResult("ramp-up-total-sent", total_sent, "bytes");
- ReportResult("ramp-up-media-sent", media_sent, "bytes");
- ReportResult("ramp-up-padding-sent", padding_sent, "bytes");
- ReportResult("ramp-up-rtx-total-packets-sent", rtx_total_packets_sent,
- "packets");
- ReportResult("ramp-up-rtx-total-sent", rtx_total_sent, "bytes");
- ReportResult("ramp-up-rtx-media-sent", rtx_media_sent, "bytes");
- ReportResult("ramp-up-rtx-padding-sent", rtx_padding_sent, "bytes");
- if (ramp_up_finished_ms_ >= 0) {
- ReportResult("ramp-up-time", ramp_up_finished_ms_ - test_start_ms_,
- "milliseconds");
- }
-}
-
-void RampUpTester::PerformTest() {
- test_start_ms_ = clock_->TimeInMilliseconds();
- poller_thread_->Start();
- if (Wait() != kEventSignaled) {
- printf("Timed out while waiting for ramp-up to complete.");
- return;
- }
- TriggerTestDone();
- poller_thread_->Stop();
-}
-
-RampUpDownUpTester::RampUpDownUpTester(size_t num_streams,
- unsigned int start_bitrate_bps,
- const std::string& extension_type,
- bool rtx,
- bool red)
- : RampUpTester(num_streams, start_bitrate_bps, extension_type, rtx, red),
- test_state_(kFirstRampup),
- state_start_ms_(clock_->TimeInMilliseconds()),
- interval_start_ms_(clock_->TimeInMilliseconds()),
- sent_bytes_(0) {
- forward_transport_config_.link_capacity_kbps =
- kHighBandwidthLimitBps / 1000;
-}
-
-RampUpDownUpTester::~RampUpDownUpTester() {}
-
-bool RampUpDownUpTester::PollStats() {
- if (send_stream_) {
- webrtc::VideoSendStream::Stats stats = send_stream_->GetStats();
- int transmit_bitrate_bps = 0;
- for (auto it : stats.substreams) {
- transmit_bitrate_bps += it.second.total_bitrate_bps;
- }
-
- EvolveTestState(transmit_bitrate_bps, stats.suspended);
- }
-
- return !event_.Wait(kPollIntervalMs);
-}
-
-Call::Config RampUpDownUpTester::GetReceiverCallConfig() {
- Call::Config config;
- config.bitrate_config.min_bitrate_bps = 10000;
- return config;
-}
-
-std::string RampUpDownUpTester::GetModifierString() const {
- std::string str("_");
- char temp_str[5];
- sprintf(temp_str, "%i", static_cast<int>(num_streams_));
- str += std::string(temp_str);
- str += "stream";
- str += (num_streams_ > 1 ? "s" : "");
- str += "_";
- str += (rtx_ ? "" : "no");
- str += "rtx";
- return str;
-}
-
-void RampUpDownUpTester::EvolveTestState(int bitrate_bps, bool suspended) {
- int64_t now = clock_->TimeInMilliseconds();
- switch (test_state_) {
- case kFirstRampup: {
- EXPECT_FALSE(suspended);
- if (bitrate_bps > kExpectedHighBitrateBps) {
- // The first ramp-up has reached the target bitrate. Change the
- // channel limit, and move to the next test state.
- forward_transport_config_.link_capacity_kbps =
- kLowBandwidthLimitBps / 1000;
- send_transport_->SetConfig(forward_transport_config_);
- test_state_ = kLowRate;
- webrtc::test::PrintResult("ramp_up_down_up",
- GetModifierString(),
- "first_rampup",
- now - state_start_ms_,
- "ms",
- false);
- state_start_ms_ = now;
- interval_start_ms_ = now;
- sent_bytes_ = 0;
- }
- break;
- }
- case kLowRate: {
- if (bitrate_bps < kExpectedLowBitrateBps && suspended) {
- // The ramp-down was successful. Change the channel limit back to a
- // high value, and move to the next test state.
- forward_transport_config_.link_capacity_kbps =
- kHighBandwidthLimitBps / 1000;
- send_transport_->SetConfig(forward_transport_config_);
- test_state_ = kSecondRampup;
- webrtc::test::PrintResult("ramp_up_down_up",
- GetModifierString(),
- "rampdown",
- now - state_start_ms_,
- "ms",
- false);
- state_start_ms_ = now;
- interval_start_ms_ = now;
- sent_bytes_ = 0;
- }
- break;
- }
- case kSecondRampup: {
- if (bitrate_bps > kExpectedHighBitrateBps && !suspended) {
- webrtc::test::PrintResult("ramp_up_down_up",
- GetModifierString(),
- "second_rampup",
- now - state_start_ms_,
- "ms",
- false);
- observation_complete_->Set();
- }
- break;
- }
- }
-}
-
-class RampUpTest : public test::CallTest {
- public:
- RampUpTest() {}
-
- virtual ~RampUpTest() {
- EXPECT_EQ(nullptr, send_stream_);
- EXPECT_TRUE(receive_streams_.empty());
- }
-};
-
-TEST_F(RampUpTest, SingleStream) {
- RampUpTester test(1, 0, RtpExtension::kTOffset, false, false);
- RunBaseTest(&test, FakeNetworkPipe::Config());
-}
-
-TEST_F(RampUpTest, Simulcast) {
- RampUpTester test(3, 0, RtpExtension::kTOffset, false, false);
- RunBaseTest(&test, FakeNetworkPipe::Config());
-}
-
-TEST_F(RampUpTest, SimulcastWithRtx) {
- RampUpTester test(3, 0, RtpExtension::kTOffset, true, false);
- RunBaseTest(&test, FakeNetworkPipe::Config());
-}
-
-TEST_F(RampUpTest, SimulcastByRedWithRtx) {
- RampUpTester test(3, 0, RtpExtension::kTOffset, true, true);
- RunBaseTest(&test, FakeNetworkPipe::Config());
-}
-
-TEST_F(RampUpTest, SingleStreamWithHighStartBitrate) {
- RampUpTester test(1, 0.9 * kSingleStreamTargetBps, RtpExtension::kTOffset,
- false, false);
- RunBaseTest(&test, FakeNetworkPipe::Config());
-}
-
-TEST_F(RampUpTest, UpDownUpOneStream) {
- RampUpDownUpTester test(1, 60000, RtpExtension::kAbsSendTime, false, false);
- RunBaseTest(&test, FakeNetworkPipe::Config());
-}
-
-TEST_F(RampUpTest, UpDownUpThreeStreams) {
- RampUpDownUpTester test(3, 60000, RtpExtension::kAbsSendTime, false, false);
- RunBaseTest(&test, FakeNetworkPipe::Config());
-}
-
-TEST_F(RampUpTest, UpDownUpOneStreamRtx) {
- RampUpDownUpTester test(1, 60000, RtpExtension::kAbsSendTime, true, false);
- RunBaseTest(&test, FakeNetworkPipe::Config());
-}
-
-TEST_F(RampUpTest, UpDownUpThreeStreamsRtx) {
- RampUpDownUpTester test(3, 60000, RtpExtension::kAbsSendTime, true, false);
- RunBaseTest(&test, FakeNetworkPipe::Config());
-}
-
-TEST_F(RampUpTest, UpDownUpOneStreamByRedRtx) {
- RampUpDownUpTester test(1, 60000, RtpExtension::kAbsSendTime, true, true);
- RunBaseTest(&test, FakeNetworkPipe::Config());
-}
-
-TEST_F(RampUpTest, UpDownUpThreeStreamsByRedRtx) {
- RampUpDownUpTester test(3, 60000, RtpExtension::kAbsSendTime, true, true);
- RunBaseTest(&test, FakeNetworkPipe::Config());
-}
-
-TEST_F(RampUpTest, AbsSendTimeSingleStream) {
- RampUpTester test(1, 0, RtpExtension::kAbsSendTime, false, false);
- RunBaseTest(&test, FakeNetworkPipe::Config());
-}
-
-TEST_F(RampUpTest, AbsSendTimeSimulcast) {
- RampUpTester test(3, 0, RtpExtension::kAbsSendTime, false, false);
- RunBaseTest(&test, FakeNetworkPipe::Config());
-}
-
-TEST_F(RampUpTest, AbsSendTimeSimulcastWithRtx) {
- RampUpTester test(3, 0, RtpExtension::kAbsSendTime, true, false);
- RunBaseTest(&test, FakeNetworkPipe::Config());
-}
-
-TEST_F(RampUpTest, AbsSendTimeSimulcastByRedWithRtx) {
- RampUpTester test(3, 0, RtpExtension::kAbsSendTime, true, true);
- RunBaseTest(&test, FakeNetworkPipe::Config());
-}
-
-TEST_F(RampUpTest, AbsSendTimeSingleStreamWithHighStartBitrate) {
- RampUpTester test(1, 0.9 * kSingleStreamTargetBps, RtpExtension::kAbsSendTime,
- false, false);
- RunBaseTest(&test, FakeNetworkPipe::Config());
-}
-
-TEST_F(RampUpTest, TransportSequenceNumberSingleStream) {
- RampUpTester test(1, 0, RtpExtension::kTransportSequenceNumber, false, false);
- RunBaseTest(&test, FakeNetworkPipe::Config());
-}
-
-TEST_F(RampUpTest, TransportSequenceNumberSimulcast) {
- RampUpTester test(3, 0, RtpExtension::kTransportSequenceNumber, false, false);
- RunBaseTest(&test, FakeNetworkPipe::Config());
-}
-
-TEST_F(RampUpTest, TransportSequenceNumberSimulcastWithRtx) {
- RampUpTester test(3, 0, RtpExtension::kTransportSequenceNumber, true, false);
- RunBaseTest(&test, FakeNetworkPipe::Config());
-}
-
-TEST_F(RampUpTest, TransportSequenceNumberSimulcastByRedWithRtx) {
- RampUpTester test(3, 0, RtpExtension::kTransportSequenceNumber, true, true);
- RunBaseTest(&test, FakeNetworkPipe::Config());
-}
-
-TEST_F(RampUpTest, TransportSequenceNumberSingleStreamWithHighStartBitrate) {
- RampUpTester test(1, 0.9 * kSingleStreamTargetBps,
- RtpExtension::kTransportSequenceNumber, false, false);
- RunBaseTest(&test, FakeNetworkPipe::Config());
-}
-} // namespace webrtc
diff --git a/webrtc/video/rampup_tests.h b/webrtc/video/rampup_tests.h
deleted file mode 100644
index ff65c8d0a0..0000000000
--- a/webrtc/video/rampup_tests.h
+++ /dev/null
@@ -1,135 +0,0 @@
-/*
- * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_VIDEO_RAMPUP_TESTS_H_
-#define WEBRTC_VIDEO_RAMPUP_TESTS_H_
-
-#include <map>
-#include <string>
-#include <vector>
-
-#include "webrtc/base/scoped_ptr.h"
-#include "webrtc/call.h"
-#include "webrtc/call/transport_adapter.h"
-#include "webrtc/modules/remote_bitrate_estimator/include/remote_bitrate_estimator.h"
-#include "webrtc/system_wrappers/include/event_wrapper.h"
-#include "webrtc/test/call_test.h"
-
-namespace webrtc {
-
-static const int kTransmissionTimeOffsetExtensionId = 6;
-static const int kAbsSendTimeExtensionId = 7;
-static const int kTransportSequenceNumberExtensionId = 8;
-static const unsigned int kSingleStreamTargetBps = 1000000;
-
-class Clock;
-class PacketRouter;
-class ReceiveStatistics;
-class RtpHeaderParser;
-class RTPPayloadRegistry;
-class RtpRtcp;
-
-class RampUpTester : public test::EndToEndTest {
- public:
- RampUpTester(size_t num_streams,
- unsigned int start_bitrate_bps,
- const std::string& extension_type,
- bool rtx,
- bool red);
- ~RampUpTester() override;
-
- void PerformTest() override;
-
- protected:
- virtual bool PollStats();
-
- void AccumulateStats(const VideoSendStream::StreamStats& stream,
- size_t* total_packets_sent,
- size_t* total_sent,
- size_t* padding_sent,
- size_t* media_sent) const;
-
- void ReportResult(const std::string& measurement,
- size_t value,
- const std::string& units) const;
- void TriggerTestDone();
-
- rtc::Event event_;
- Clock* const clock_;
- FakeNetworkPipe::Config forward_transport_config_;
- const size_t num_streams_;
- const bool rtx_;
- const bool red_;
- VideoSendStream* send_stream_;
- test::PacketTransport* send_transport_;
-
- private:
- typedef std::map<uint32_t, uint32_t> SsrcMap;
-
- Call::Config GetSenderCallConfig() override;
- void OnStreamsCreated(
- VideoSendStream* send_stream,
- const std::vector<VideoReceiveStream*>& receive_streams) override;
- void OnTransportsCreated(test::PacketTransport* send_transport,
- test::PacketTransport* receive_transport) override;
- size_t GetNumStreams() const;
- void ModifyConfigs(VideoSendStream::Config* send_config,
- std::vector<VideoReceiveStream::Config>* receive_configs,
- VideoEncoderConfig* encoder_config) override;
- void OnCallsCreated(Call* sender_call, Call* receiver_call) override;
-
- static bool BitrateStatsPollingThread(void* obj);
-
- const int start_bitrate_bps_;
- bool start_bitrate_verified_;
- int expected_bitrate_bps_;
- int64_t test_start_ms_;
- int64_t ramp_up_finished_ms_;
-
- const std::string extension_type_;
- std::vector<uint32_t> ssrcs_;
- std::vector<uint32_t> rtx_ssrcs_;
- SsrcMap rtx_ssrc_map_;
-
- rtc::scoped_ptr<ThreadWrapper> poller_thread_;
- Call* sender_call_;
-};
-
-class RampUpDownUpTester : public RampUpTester {
- public:
- RampUpDownUpTester(size_t num_streams,
- unsigned int start_bitrate_bps,
- const std::string& extension_type,
- bool rtx,
- bool red);
- ~RampUpDownUpTester() override;
-
- protected:
- bool PollStats() override;
-
- private:
- static const int kHighBandwidthLimitBps = 80000;
- static const int kExpectedHighBitrateBps = 60000;
- static const int kLowBandwidthLimitBps = 20000;
- static const int kExpectedLowBitrateBps = 20000;
- enum TestStates { kFirstRampup, kLowRate, kSecondRampup };
-
- Call::Config GetReceiverCallConfig() override;
-
- std::string GetModifierString() const;
- void EvolveTestState(int bitrate_bps, bool suspended);
-
- TestStates test_state_;
- int64_t state_start_ms_;
- int64_t interval_start_ms_;
- int sent_bytes_;
-};
-} // namespace webrtc
-#endif // WEBRTC_VIDEO_RAMPUP_TESTS_H_
diff --git a/webrtc/video/receive_statistics_proxy.cc b/webrtc/video/receive_statistics_proxy.cc
index eec2bc8301..d6ab4ff361 100644
--- a/webrtc/video/receive_statistics_proxy.cc
+++ b/webrtc/video/receive_statistics_proxy.cc
@@ -13,7 +13,7 @@
#include <cmath>
#include "webrtc/base/checks.h"
-#include "webrtc/modules/video_coding/codecs/interface/video_codec_interface.h"
+#include "webrtc/modules/video_coding/include/video_codec_interface.h"
#include "webrtc/system_wrappers/include/clock.h"
#include "webrtc/system_wrappers/include/critical_section_wrapper.h"
#include "webrtc/system_wrappers/include/metrics.h"
@@ -37,37 +37,39 @@ ReceiveStatisticsProxy::~ReceiveStatisticsProxy() {
void ReceiveStatisticsProxy::UpdateHistograms() {
int fraction_lost = report_block_stats_.FractionLostInPercent();
if (fraction_lost != -1) {
- RTC_HISTOGRAM_PERCENTAGE("WebRTC.Video.ReceivedPacketsLostInPercent",
- fraction_lost);
+ RTC_HISTOGRAM_PERCENTAGE_SPARSE("WebRTC.Video.ReceivedPacketsLostInPercent",
+ fraction_lost);
}
const int kMinRequiredSamples = 200;
int samples = static_cast<int>(render_fps_tracker_.TotalSampleCount());
if (samples > kMinRequiredSamples) {
- RTC_HISTOGRAM_COUNTS_100("WebRTC.Video.RenderFramesPerSecond",
- static_cast<int>(render_fps_tracker_.ComputeTotalRate()));
- RTC_HISTOGRAM_COUNTS_100000("WebRTC.Video.RenderSqrtPixelsPerSecond",
- static_cast<int>(render_pixel_tracker_.ComputeTotalRate()));
+ RTC_HISTOGRAM_COUNTS_SPARSE_100("WebRTC.Video.RenderFramesPerSecond",
+ round(render_fps_tracker_.ComputeTotalRate()));
+ RTC_HISTOGRAM_COUNTS_SPARSE_100000("WebRTC.Video.RenderSqrtPixelsPerSecond",
+ round(render_pixel_tracker_.ComputeTotalRate()));
}
int width = render_width_counter_.Avg(kMinRequiredSamples);
int height = render_height_counter_.Avg(kMinRequiredSamples);
if (width != -1) {
- RTC_HISTOGRAM_COUNTS_10000("WebRTC.Video.ReceivedWidthInPixels", width);
- RTC_HISTOGRAM_COUNTS_10000("WebRTC.Video.ReceivedHeightInPixels", height);
+ RTC_HISTOGRAM_COUNTS_SPARSE_10000("WebRTC.Video.ReceivedWidthInPixels",
+ width);
+ RTC_HISTOGRAM_COUNTS_SPARSE_10000("WebRTC.Video.ReceivedHeightInPixels",
+ height);
}
int qp = qp_counters_.vp8.Avg(kMinRequiredSamples);
if (qp != -1)
- RTC_HISTOGRAM_COUNTS_200("WebRTC.Video.Decoded.Vp8.Qp", qp);
+ RTC_HISTOGRAM_COUNTS_SPARSE_200("WebRTC.Video.Decoded.Vp8.Qp", qp);
// TODO(asapersson): DecoderTiming() is call periodically (each 1000ms) and
// not per frame. Change decode time to include every frame.
const int kMinRequiredDecodeSamples = 5;
int decode_ms = decode_time_counter_.Avg(kMinRequiredDecodeSamples);
if (decode_ms != -1)
- RTC_HISTOGRAM_COUNTS_1000("WebRTC.Video.DecodeTimeInMs", decode_ms);
+ RTC_HISTOGRAM_COUNTS_SPARSE_1000("WebRTC.Video.DecodeTimeInMs", decode_ms);
int delay_ms = delay_counter_.Avg(kMinRequiredDecodeSamples);
if (delay_ms != -1)
- RTC_HISTOGRAM_COUNTS_10000("WebRTC.Video.OnewayDelayInMs", delay_ms);
+ RTC_HISTOGRAM_COUNTS_SPARSE_10000("WebRTC.Video.OnewayDelayInMs", delay_ms);
}
VideoReceiveStream::Stats ReceiveStatisticsProxy::GetStats() const {
@@ -80,6 +82,11 @@ void ReceiveStatisticsProxy::OnIncomingPayloadType(int payload_type) {
stats_.current_payload_type = payload_type;
}
+void ReceiveStatisticsProxy::OnDecoderImplementationName(
+ const char* implementation_name) {
+ rtc::CritScope lock(&crit_);
+ stats_.decoder_implementation_name = implementation_name;
+}
void ReceiveStatisticsProxy::OnIncomingRate(unsigned int framerate,
unsigned int bitrate_bps) {
rtc::CritScope lock(&crit_);
diff --git a/webrtc/video/receive_statistics_proxy.h b/webrtc/video/receive_statistics_proxy.h
index b6741f9cde..87cb9506a9 100644
--- a/webrtc/video/receive_statistics_proxy.h
+++ b/webrtc/video/receive_statistics_proxy.h
@@ -19,9 +19,9 @@
#include "webrtc/common_types.h"
#include "webrtc/frame_callback.h"
#include "webrtc/modules/remote_bitrate_estimator/rate_statistics.h"
-#include "webrtc/modules/video_coding/main/interface/video_coding_defines.h"
-#include "webrtc/video_engine/report_block_stats.h"
-#include "webrtc/video_engine/vie_channel.h"
+#include "webrtc/modules/video_coding/include/video_coding_defines.h"
+#include "webrtc/video/report_block_stats.h"
+#include "webrtc/video/vie_channel.h"
#include "webrtc/video_receive_stream.h"
#include "webrtc/video_renderer.h"
@@ -45,6 +45,7 @@ class ReceiveStatisticsProxy : public VCMReceiveStatisticsCallback,
void OnDecodedFrame();
void OnRenderedFrame(int width, int height);
void OnIncomingPayloadType(int payload_type);
+ void OnDecoderImplementationName(const char* implementation_name);
void OnIncomingRate(unsigned int framerate, unsigned int bitrate_bps);
void OnDecoderTiming(int decode_ms,
int max_decode_ms,
diff --git a/webrtc/video/replay.cc b/webrtc/video/replay.cc
index f54909e55f..484924872b 100644
--- a/webrtc/video/replay.cc
+++ b/webrtc/video/replay.cc
@@ -20,7 +20,7 @@
#include "webrtc/base/scoped_ptr.h"
#include "webrtc/call.h"
#include "webrtc/common_video/libyuv/include/webrtc_libyuv.h"
-#include "webrtc/modules/rtp_rtcp/interface/rtp_header_parser.h"
+#include "webrtc/modules/rtp_rtcp/include/rtp_header_parser.h"
#include "webrtc/system_wrappers/include/clock.h"
#include "webrtc/system_wrappers/include/sleep.h"
#include "webrtc/test/encoder_settings.h"
diff --git a/webrtc/video/report_block_stats.cc b/webrtc/video/report_block_stats.cc
new file mode 100644
index 0000000000..dee5662c3c
--- /dev/null
+++ b/webrtc/video/report_block_stats.cc
@@ -0,0 +1,111 @@
+/*
+ * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/video/report_block_stats.h"
+
+namespace webrtc {
+
+namespace {
+int FractionLost(uint32_t num_lost_sequence_numbers,
+ uint32_t num_sequence_numbers) {
+ if (num_sequence_numbers == 0) {
+ return 0;
+ }
+ return ((num_lost_sequence_numbers * 255) + (num_sequence_numbers / 2)) /
+ num_sequence_numbers;
+}
+} // namespace
+
+
+// Helper class for rtcp statistics.
+ReportBlockStats::ReportBlockStats()
+ : num_sequence_numbers_(0),
+ num_lost_sequence_numbers_(0) {
+}
+
+void ReportBlockStats::Store(const RtcpStatistics& rtcp_stats,
+ uint32_t remote_ssrc,
+ uint32_t source_ssrc) {
+ RTCPReportBlock block;
+ block.cumulativeLost = rtcp_stats.cumulative_lost;
+ block.fractionLost = rtcp_stats.fraction_lost;
+ block.extendedHighSeqNum = rtcp_stats.extended_max_sequence_number;
+ block.jitter = rtcp_stats.jitter;
+ block.remoteSSRC = remote_ssrc;
+ block.sourceSSRC = source_ssrc;
+ uint32_t num_sequence_numbers = 0;
+ uint32_t num_lost_sequence_numbers = 0;
+ StoreAndAddPacketIncrement(
+ block, &num_sequence_numbers, &num_lost_sequence_numbers);
+}
+
+RTCPReportBlock ReportBlockStats::AggregateAndStore(
+ const ReportBlockVector& report_blocks) {
+ RTCPReportBlock aggregate;
+ if (report_blocks.empty()) {
+ return aggregate;
+ }
+ uint32_t num_sequence_numbers = 0;
+ uint32_t num_lost_sequence_numbers = 0;
+ ReportBlockVector::const_iterator report_block = report_blocks.begin();
+ for (; report_block != report_blocks.end(); ++report_block) {
+ aggregate.cumulativeLost += report_block->cumulativeLost;
+ aggregate.jitter += report_block->jitter;
+ StoreAndAddPacketIncrement(*report_block,
+ &num_sequence_numbers,
+ &num_lost_sequence_numbers);
+ }
+
+ if (report_blocks.size() == 1) {
+ // No aggregation needed.
+ return report_blocks[0];
+ }
+ // Fraction lost since previous report block.
+ aggregate.fractionLost =
+ FractionLost(num_lost_sequence_numbers, num_sequence_numbers);
+ aggregate.jitter = static_cast<uint32_t>(
+ (aggregate.jitter + report_blocks.size() / 2) / report_blocks.size());
+ return aggregate;
+}
+
+void ReportBlockStats::StoreAndAddPacketIncrement(
+ const RTCPReportBlock& report_block,
+ uint32_t* num_sequence_numbers,
+ uint32_t* num_lost_sequence_numbers) {
+ // Get diff with previous report block.
+ ReportBlockMap::iterator prev_report_block = prev_report_blocks_.find(
+ report_block.sourceSSRC);
+ if (prev_report_block != prev_report_blocks_.end()) {
+ int seq_num_diff = report_block.extendedHighSeqNum -
+ prev_report_block->second.extendedHighSeqNum;
+ int cum_loss_diff = report_block.cumulativeLost -
+ prev_report_block->second.cumulativeLost;
+ if (seq_num_diff >= 0 && cum_loss_diff >= 0) {
+ *num_sequence_numbers += seq_num_diff;
+ *num_lost_sequence_numbers += cum_loss_diff;
+ // Update total number of packets/lost packets.
+ num_sequence_numbers_ += seq_num_diff;
+ num_lost_sequence_numbers_ += cum_loss_diff;
+ }
+ }
+ // Store current report block.
+ prev_report_blocks_[report_block.sourceSSRC] = report_block;
+}
+
+int ReportBlockStats::FractionLostInPercent() const {
+ if (num_sequence_numbers_ == 0) {
+ return -1;
+ }
+ return FractionLost(
+ num_lost_sequence_numbers_, num_sequence_numbers_) * 100 / 255;
+}
+
+} // namespace webrtc
+
diff --git a/webrtc/video/report_block_stats.h b/webrtc/video/report_block_stats.h
new file mode 100644
index 0000000000..c54e4677f4
--- /dev/null
+++ b/webrtc/video/report_block_stats.h
@@ -0,0 +1,62 @@
+/*
+ * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_VIDEO_REPORT_BLOCK_STATS_H_
+#define WEBRTC_VIDEO_REPORT_BLOCK_STATS_H_
+
+#include <map>
+#include <vector>
+
+#include "webrtc/common_types.h"
+#include "webrtc/modules/rtp_rtcp/include/rtp_rtcp_defines.h"
+
+namespace webrtc {
+
+// Helper class for rtcp statistics.
+class ReportBlockStats {
+ public:
+ typedef std::map<uint32_t, RTCPReportBlock> ReportBlockMap;
+ typedef std::vector<RTCPReportBlock> ReportBlockVector;
+ ReportBlockStats();
+ ~ReportBlockStats() {}
+
+ // Updates stats and stores report blocks.
+ // Returns an aggregate of the |report_blocks|.
+ RTCPReportBlock AggregateAndStore(const ReportBlockVector& report_blocks);
+
+ // Updates stats and stores report block.
+ void Store(const RtcpStatistics& rtcp_stats,
+ uint32_t remote_ssrc,
+ uint32_t source_ssrc);
+
+ // Returns the total fraction of lost packets (or -1 if less than two report
+ // blocks have been stored).
+ int FractionLostInPercent() const;
+
+ private:
+ // Updates the total number of packets/lost packets.
+ // Stores the report block.
+ // Returns the number of packets/lost packets since previous report block.
+ void StoreAndAddPacketIncrement(const RTCPReportBlock& report_block,
+ uint32_t* num_sequence_numbers,
+ uint32_t* num_lost_sequence_numbers);
+
+ // The total number of packets/lost packets.
+ uint32_t num_sequence_numbers_;
+ uint32_t num_lost_sequence_numbers_;
+
+ // Map holding the last stored report block (mapped by the source SSRC).
+ ReportBlockMap prev_report_blocks_;
+};
+
+} // namespace webrtc
+
+#endif // WEBRTC_VIDEO_REPORT_BLOCK_STATS_H_
+
diff --git a/webrtc/video/report_block_stats_unittest.cc b/webrtc/video/report_block_stats_unittest.cc
new file mode 100644
index 0000000000..5cde9004b1
--- /dev/null
+++ b/webrtc/video/report_block_stats_unittest.cc
@@ -0,0 +1,146 @@
+/*
+ * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "testing/gtest/include/gtest/gtest.h"
+
+#include "webrtc/video/report_block_stats.h"
+
+namespace webrtc {
+
+class ReportBlockStatsTest : public ::testing::Test {
+ protected:
+ ReportBlockStatsTest() : kSsrc1(0x12345), kSsrc2(0x23456) {}
+
+ void SetUp() override {
+ // kSsrc1: block 1-3.
+ block1_1_.cumulativeLost = 10;
+ block1_1_.fractionLost = 123;
+ block1_1_.extendedHighSeqNum = 24000;
+ block1_1_.jitter = 777;
+ block1_1_.sourceSSRC = kSsrc1;
+ block1_2_.cumulativeLost = 15;
+ block1_2_.fractionLost = 0;
+ block1_2_.extendedHighSeqNum = 24100;
+ block1_2_.jitter = 222;
+ block1_2_.sourceSSRC = kSsrc1;
+ block1_3_.cumulativeLost = 50;
+ block1_3_.fractionLost = 0;
+ block1_3_.extendedHighSeqNum = 24200;
+ block1_3_.jitter = 333;
+ block1_3_.sourceSSRC = kSsrc1;
+ // kSsrc2: block 1,2.
+ block2_1_.cumulativeLost = 111;
+ block2_1_.fractionLost = 222;
+ block2_1_.extendedHighSeqNum = 8500;
+ block2_1_.jitter = 555;
+ block2_1_.sourceSSRC = kSsrc2;
+ block2_2_.cumulativeLost = 136;
+ block2_2_.fractionLost = 0;
+ block2_2_.extendedHighSeqNum = 8800;
+ block2_2_.jitter = 888;
+ block2_2_.sourceSSRC = kSsrc2;
+
+ ssrc1block1_.push_back(block1_1_);
+ ssrc1block2_.push_back(block1_2_);
+ ssrc12block1_.push_back(block1_1_);
+ ssrc12block1_.push_back(block2_1_);
+ ssrc12block2_.push_back(block1_2_);
+ ssrc12block2_.push_back(block2_2_);
+ }
+
+ RtcpStatistics RtcpReportBlockToRtcpStatistics(
+ const RTCPReportBlock& stats) {
+ RtcpStatistics block;
+ block.cumulative_lost = stats.cumulativeLost;
+ block.fraction_lost = stats.fractionLost;
+ block.extended_max_sequence_number = stats.extendedHighSeqNum;
+ block.jitter = stats.jitter;
+ return block;
+ }
+
+ const uint32_t kSsrc1;
+ const uint32_t kSsrc2;
+ RTCPReportBlock block1_1_;
+ RTCPReportBlock block1_2_;
+ RTCPReportBlock block1_3_;
+ RTCPReportBlock block2_1_;
+ RTCPReportBlock block2_2_;
+ std::vector<RTCPReportBlock> ssrc1block1_;
+ std::vector<RTCPReportBlock> ssrc1block2_;
+ std::vector<RTCPReportBlock> ssrc12block1_;
+ std::vector<RTCPReportBlock> ssrc12block2_;
+};
+
+TEST_F(ReportBlockStatsTest, AggregateAndStore_NoSsrc) {
+ ReportBlockStats stats;
+ std::vector<RTCPReportBlock> empty;
+ RTCPReportBlock aggregated = stats.AggregateAndStore(empty);
+ EXPECT_EQ(0U, aggregated.fractionLost);
+ EXPECT_EQ(0U, aggregated.cumulativeLost);
+ EXPECT_EQ(0U, aggregated.jitter);
+ EXPECT_EQ(0U, aggregated.extendedHighSeqNum);
+}
+
+TEST_F(ReportBlockStatsTest, AggregateAndStore_OneSsrc) {
+ ReportBlockStats stats;
+ RTCPReportBlock aggregated = stats.AggregateAndStore(ssrc1block1_);
+ // One ssrc, no aggregation done.
+ EXPECT_EQ(123U, aggregated.fractionLost);
+ EXPECT_EQ(10U, aggregated.cumulativeLost);
+ EXPECT_EQ(777U, aggregated.jitter);
+ EXPECT_EQ(24000U, aggregated.extendedHighSeqNum);
+
+ aggregated = stats.AggregateAndStore(ssrc1block2_);
+ EXPECT_EQ(0U, aggregated.fractionLost);
+ EXPECT_EQ(15U, aggregated.cumulativeLost);
+ EXPECT_EQ(222U, aggregated.jitter);
+ EXPECT_EQ(24100U, aggregated.extendedHighSeqNum);
+
+ // fl: 100 * (15-10) / (24100-24000) = 5%
+ EXPECT_EQ(5, stats.FractionLostInPercent());
+}
+
+TEST_F(ReportBlockStatsTest, AggregateAndStore_TwoSsrcs) {
+ ReportBlockStats stats;
+ RTCPReportBlock aggregated = stats.AggregateAndStore(ssrc12block1_);
+ EXPECT_EQ(0U, aggregated.fractionLost);
+ EXPECT_EQ(10U + 111U, aggregated.cumulativeLost);
+ EXPECT_EQ((777U + 555U) / 2, aggregated.jitter);
+ EXPECT_EQ(0U, aggregated.extendedHighSeqNum);
+
+ aggregated = stats.AggregateAndStore(ssrc12block2_);
+ // fl: 255 * ((15-10) + (136-111)) / ((24100-24000) + (8800-8500)) = 19
+ EXPECT_EQ(19U, aggregated.fractionLost);
+ EXPECT_EQ(15U + 136U, aggregated.cumulativeLost);
+ EXPECT_EQ((222U + 888U) / 2, aggregated.jitter);
+ EXPECT_EQ(0U, aggregated.extendedHighSeqNum);
+
+ // fl: 100 * ((15-10) + (136-111)) / ((24100-24000) + (8800-8500)) = 7%
+ EXPECT_EQ(7, stats.FractionLostInPercent());
+}
+
+TEST_F(ReportBlockStatsTest, StoreAndGetFractionLost) {
+ const uint32_t kRemoteSsrc = 1;
+ ReportBlockStats stats;
+ EXPECT_EQ(-1, stats.FractionLostInPercent());
+
+ // First block.
+ stats.Store(RtcpReportBlockToRtcpStatistics(block1_1_), kRemoteSsrc, kSsrc1);
+ EXPECT_EQ(-1, stats.FractionLostInPercent());
+ // fl: 100 * (15-10) / (24100-24000) = 5%
+ stats.Store(RtcpReportBlockToRtcpStatistics(block1_2_), kRemoteSsrc, kSsrc1);
+ EXPECT_EQ(5, stats.FractionLostInPercent());
+ // fl: 100 * (50-10) / (24200-24000) = 20%
+ stats.Store(RtcpReportBlockToRtcpStatistics(block1_3_), kRemoteSsrc, kSsrc1);
+ EXPECT_EQ(20, stats.FractionLostInPercent());
+}
+
+} // namespace webrtc
+
diff --git a/webrtc/video/screenshare_loopback.cc b/webrtc/video/screenshare_loopback.cc
index 9897783eb9..6479aa4ebb 100644
--- a/webrtc/video/screenshare_loopback.cc
+++ b/webrtc/video/screenshare_loopback.cc
@@ -20,6 +20,7 @@
namespace webrtc {
namespace flags {
+// Flags common with video loopback, with different default values.
DEFINE_int32(width, 1850, "Video width (crops source).");
size_t Width() {
return static_cast<size_t>(FLAGS_width);
@@ -35,21 +36,6 @@ int Fps() {
return static_cast<int>(FLAGS_fps);
}
-DEFINE_int32(slide_change_interval,
- 10,
- "Interval (in seconds) between simulated slide changes.");
-int SlideChangeInterval() {
- return static_cast<int>(FLAGS_slide_change_interval);
-}
-
-DEFINE_int32(
- scroll_duration,
- 0,
- "Duration (in seconds) during which a slide will be scrolled into place.");
-int ScrollDuration() {
- return static_cast<int>(FLAGS_scroll_duration);
-}
-
DEFINE_int32(min_bitrate, 50, "Call and stream min bitrate in kbps.");
int MinBitrateKbps() {
return static_cast<int>(FLAGS_min_bitrate);
@@ -71,26 +57,41 @@ int MaxBitrateKbps() {
}
DEFINE_int32(num_temporal_layers, 2, "Number of temporal layers to use.");
-size_t NumTemporalLayers() {
- return static_cast<size_t>(FLAGS_num_temporal_layers);
+int NumTemporalLayers() {
+ return static_cast<int>(FLAGS_num_temporal_layers);
+}
+
+// Flags common with video loopback, with equal default values.
+DEFINE_string(codec, "VP8", "Video codec to use.");
+std::string Codec() {
+ return static_cast<std::string>(FLAGS_codec);
+}
+
+DEFINE_int32(selected_tl,
+ -1,
+ "Temporal layer to show or analyze. -1 to disable filtering.");
+int SelectedTL() {
+ return static_cast<int>(FLAGS_selected_tl);
}
DEFINE_int32(
- tl_discard_threshold,
+ duration,
0,
- "Discard TLs with id greater or equal the threshold. 0 to disable.");
-size_t TLDiscardThreshold() {
- return static_cast<size_t>(FLAGS_tl_discard_threshold);
+ "Duration of the test in seconds. If 0, rendered will be shown instead.");
+int DurationSecs() {
+ return static_cast<int>(FLAGS_duration);
}
-DEFINE_int32(min_transmit_bitrate, 400, "Min transmit bitrate incl. padding.");
-int MinTransmitBitrateKbps() {
- return FLAGS_min_transmit_bitrate;
+DEFINE_string(output_filename, "", "Target graph data filename.");
+std::string OutputFilename() {
+ return static_cast<std::string>(FLAGS_output_filename);
}
-DEFINE_string(codec, "VP8", "Video codec to use.");
-std::string Codec() {
- return static_cast<std::string>(FLAGS_codec);
+DEFINE_string(graph_title,
+ "",
+ "If empty, title will be generated automatically.");
+std::string GraphTitle() {
+ return static_cast<std::string>(FLAGS_graph_title);
}
DEFINE_int32(loss_percent, 0, "Percentage of packets randomly lost.");
@@ -124,21 +125,53 @@ int StdPropagationDelayMs() {
return static_cast<int>(FLAGS_std_propagation_delay_ms);
}
-DEFINE_bool(logs, false, "print logs to stderr");
+DEFINE_int32(selected_stream, 0, "ID of the stream to show or analyze.");
+int SelectedStream() {
+ return static_cast<int>(FLAGS_selected_stream);
+}
-DEFINE_string(
- output_filename,
- "",
- "Name of a target graph data file. If set, no preview will be shown.");
-std::string OutputFilename() {
- return static_cast<std::string>(FLAGS_output_filename);
+DEFINE_int32(num_spatial_layers, 1, "Number of spatial layers to use.");
+int NumSpatialLayers() {
+ return static_cast<int>(FLAGS_num_spatial_layers);
}
-DEFINE_int32(duration, 60, "Duration of the test in seconds.");
-int DurationSecs() {
- return static_cast<int>(FLAGS_duration);
+DEFINE_int32(selected_sl,
+ -1,
+ "Spatial layer to show or analyze. -1 to disable filtering.");
+int SelectedSL() {
+ return static_cast<int>(FLAGS_selected_sl);
+}
+
+DEFINE_string(stream0,
+ "",
+ "Comma separated values describing VideoStream for stream #0.");
+std::string Stream0() {
+ return static_cast<std::string>(FLAGS_stream0);
}
+DEFINE_string(stream1,
+ "",
+ "Comma separated values describing VideoStream for stream #1.");
+std::string Stream1() {
+ return static_cast<std::string>(FLAGS_stream1);
+}
+
+DEFINE_string(sl0,
+ "",
+ "Comma separated values describing SpatialLayer for layer #0.");
+std::string SL0() {
+ return static_cast<std::string>(FLAGS_sl0);
+}
+
+DEFINE_string(sl1,
+ "",
+ "Comma separated values describing SpatialLayer for layer #1.");
+std::string SL1() {
+ return static_cast<std::string>(FLAGS_sl1);
+}
+
+DEFINE_bool(logs, false, "print logs to stderr");
+
DEFINE_bool(send_side_bwe, true, "Use send-side bandwidth estimation");
DEFINE_string(
@@ -148,6 +181,28 @@ DEFINE_string(
"E.g. running with --force_fieldtrials=WebRTC-FooFeature/Enable/"
" will assign the group Enable to field trial WebRTC-FooFeature. Multiple "
"trials are separated by \"/\"");
+
+// Screenshare-specific flags.
+DEFINE_int32(min_transmit_bitrate, 400, "Min transmit bitrate incl. padding.");
+int MinTransmitBitrateKbps() {
+ return FLAGS_min_transmit_bitrate;
+}
+
+DEFINE_int32(slide_change_interval,
+ 10,
+ "Interval (in seconds) between simulated slide changes.");
+int SlideChangeInterval() {
+ return static_cast<int>(FLAGS_slide_change_interval);
+}
+
+DEFINE_int32(
+ scroll_duration,
+ 0,
+ "Duration (in seconds) during which a slide will be scrolled into place.");
+int ScrollDuration() {
+ return static_cast<int>(FLAGS_scroll_duration);
+}
+
} // namespace flags
void Loopback() {
@@ -167,20 +222,32 @@ void Loopback() {
{flags::Width(), flags::Height(), flags::Fps(),
flags::MinBitrateKbps() * 1000, flags::TargetBitrateKbps() * 1000,
flags::MaxBitrateKbps() * 1000, flags::Codec(),
- flags::NumTemporalLayers(), flags::MinTransmitBitrateKbps() * 1000,
- call_bitrate_config, flags::TLDiscardThreshold(),
+ flags::NumTemporalLayers(), flags::SelectedTL(),
+ flags::MinTransmitBitrateKbps() * 1000, call_bitrate_config,
flags::FLAGS_send_side_bwe},
{}, // Video specific.
{true, flags::SlideChangeInterval(), flags::ScrollDuration()},
- {"screenshare", 0.0, 0.0, flags::DurationSecs(), flags::OutputFilename()},
+ {"screenshare", 0.0, 0.0, flags::DurationSecs(), flags::OutputFilename(),
+ flags::GraphTitle()},
pipe_config,
flags::FLAGS_logs};
+ std::vector<std::string> stream_descriptors;
+ stream_descriptors.push_back(flags::Stream0());
+ stream_descriptors.push_back(flags::Stream1());
+ std::vector<std::string> SL_descriptors;
+ SL_descriptors.push_back(flags::SL0());
+ SL_descriptors.push_back(flags::SL1());
+ VideoQualityTest::FillScalabilitySettings(
+ &params, stream_descriptors, flags::SelectedStream(),
+ flags::NumSpatialLayers(), flags::SelectedSL(), SL_descriptors);
+
VideoQualityTest test;
- if (flags::OutputFilename().empty())
- test.RunWithVideoRenderer(params);
- else
+ if (flags::DurationSecs()) {
test.RunWithAnalyzer(params);
+ } else {
+ test.RunWithVideoRenderer(params);
+ }
}
} // namespace webrtc
diff --git a/webrtc/video/send_statistics_proxy.cc b/webrtc/video/send_statistics_proxy.cc
index 5be9970583..d2964b21da 100644
--- a/webrtc/video/send_statistics_proxy.cc
+++ b/webrtc/video/send_statistics_proxy.cc
@@ -11,16 +11,18 @@
#include "webrtc/video/send_statistics_proxy.h"
#include <algorithm>
+#include <cmath>
#include <map>
#include "webrtc/base/checks.h"
-
#include "webrtc/base/logging.h"
#include "webrtc/system_wrappers/include/critical_section_wrapper.h"
#include "webrtc/system_wrappers/include/metrics.h"
namespace webrtc {
namespace {
+const float kEncodeTimeWeigthFactor = 0.5f;
+
// Used by histograms. Values of entries should not be changed.
enum HistogramCodecType {
kVideoUnknown = 0,
@@ -30,6 +32,17 @@ enum HistogramCodecType {
kVideoMax = 64,
};
+const char* GetUmaPrefix(VideoEncoderConfig::ContentType content_type) {
+ switch (content_type) {
+ case VideoEncoderConfig::ContentType::kRealtimeVideo:
+ return "WebRTC.Video.";
+ case VideoEncoderConfig::ContentType::kScreen:
+ return "WebRTC.Video.Screenshare.";
+ }
+ RTC_NOTREACHED();
+ return nullptr;
+}
+
HistogramCodecType PayloadNameToHistogramCodecType(
const std::string& payload_name) {
if (payload_name == "VP8") {
@@ -44,7 +57,7 @@ HistogramCodecType PayloadNameToHistogramCodecType(
}
void UpdateCodecTypeHistogram(const std::string& payload_name) {
- RTC_HISTOGRAM_ENUMERATION("WebRTC.Video.Encoder.CodecType",
+ RTC_HISTOGRAM_ENUMERATION_SPARSE("WebRTC.Video.Encoder.CodecType",
PayloadNameToHistogramCodecType(payload_name), kVideoMax);
}
} // namespace
@@ -52,77 +65,114 @@ void UpdateCodecTypeHistogram(const std::string& payload_name) {
const int SendStatisticsProxy::kStatsTimeoutMs = 5000;
-SendStatisticsProxy::SendStatisticsProxy(Clock* clock,
- const VideoSendStream::Config& config)
+SendStatisticsProxy::SendStatisticsProxy(
+ Clock* clock,
+ const VideoSendStream::Config& config,
+ VideoEncoderConfig::ContentType content_type)
: clock_(clock),
config_(config),
- input_frame_rate_tracker_(100u, 10u),
- sent_frame_rate_tracker_(100u, 10u),
+ content_type_(content_type),
last_sent_frame_timestamp_(0),
- max_sent_width_per_timestamp_(0),
- max_sent_height_per_timestamp_(0) {
+ encode_time_(kEncodeTimeWeigthFactor),
+ uma_container_(new UmaSamplesContainer(GetUmaPrefix(content_type_))) {
UpdateCodecTypeHistogram(config_.encoder_settings.payload_name);
}
-SendStatisticsProxy::~SendStatisticsProxy() {
+SendStatisticsProxy::~SendStatisticsProxy() {}
+
+SendStatisticsProxy::UmaSamplesContainer::UmaSamplesContainer(
+ const char* prefix)
+ : uma_prefix_(prefix),
+ max_sent_width_per_timestamp_(0),
+ max_sent_height_per_timestamp_(0),
+ input_frame_rate_tracker_(100u, 10u),
+ sent_frame_rate_tracker_(100u, 10u) {}
+
+SendStatisticsProxy::UmaSamplesContainer::~UmaSamplesContainer() {
UpdateHistograms();
}
-void SendStatisticsProxy::UpdateHistograms() {
- int input_fps =
- static_cast<int>(input_frame_rate_tracker_.ComputeTotalRate());
- if (input_fps > 0)
- RTC_HISTOGRAM_COUNTS_100("WebRTC.Video.InputFramesPerSecond", input_fps);
- int sent_fps =
- static_cast<int>(sent_frame_rate_tracker_.ComputeTotalRate());
- if (sent_fps > 0)
- RTC_HISTOGRAM_COUNTS_100("WebRTC.Video.SentFramesPerSecond", sent_fps);
-
+void SendStatisticsProxy::UmaSamplesContainer::UpdateHistograms() {
const int kMinRequiredSamples = 200;
int in_width = input_width_counter_.Avg(kMinRequiredSamples);
int in_height = input_height_counter_.Avg(kMinRequiredSamples);
+ int in_fps = round(input_frame_rate_tracker_.ComputeTotalRate());
if (in_width != -1) {
- RTC_HISTOGRAM_COUNTS_10000("WebRTC.Video.InputWidthInPixels", in_width);
- RTC_HISTOGRAM_COUNTS_10000("WebRTC.Video.InputHeightInPixels", in_height);
+ RTC_HISTOGRAM_COUNTS_SPARSE_10000(uma_prefix_ + "InputWidthInPixels",
+ in_width);
+ RTC_HISTOGRAM_COUNTS_SPARSE_10000(uma_prefix_ + "InputHeightInPixels",
+ in_height);
+ RTC_HISTOGRAM_COUNTS_SPARSE_100(uma_prefix_ + "InputFramesPerSecond",
+ in_fps);
}
int sent_width = sent_width_counter_.Avg(kMinRequiredSamples);
int sent_height = sent_height_counter_.Avg(kMinRequiredSamples);
+ int sent_fps = round(sent_frame_rate_tracker_.ComputeTotalRate());
if (sent_width != -1) {
- RTC_HISTOGRAM_COUNTS_10000("WebRTC.Video.SentWidthInPixels", sent_width);
- RTC_HISTOGRAM_COUNTS_10000("WebRTC.Video.SentHeightInPixels", sent_height);
+ RTC_HISTOGRAM_COUNTS_SPARSE_10000(uma_prefix_ + "SentWidthInPixels",
+ sent_width);
+ RTC_HISTOGRAM_COUNTS_SPARSE_10000(uma_prefix_ + "SentHeightInPixels",
+ sent_height);
+ RTC_HISTOGRAM_COUNTS_SPARSE_100(uma_prefix_ + "SentFramesPerSecond",
+ sent_fps);
}
int encode_ms = encode_time_counter_.Avg(kMinRequiredSamples);
if (encode_ms != -1)
- RTC_HISTOGRAM_COUNTS_1000("WebRTC.Video.EncodeTimeInMs", encode_ms);
+ RTC_HISTOGRAM_COUNTS_SPARSE_1000(uma_prefix_ + "EncodeTimeInMs", encode_ms);
int key_frames_permille = key_frame_counter_.Permille(kMinRequiredSamples);
if (key_frames_permille != -1) {
- RTC_HISTOGRAM_COUNTS_1000("WebRTC.Video.KeyFramesSentInPermille",
- key_frames_permille);
+ RTC_HISTOGRAM_COUNTS_SPARSE_1000(uma_prefix_ + "KeyFramesSentInPermille",
+ key_frames_permille);
}
int quality_limited =
quality_limited_frame_counter_.Percent(kMinRequiredSamples);
if (quality_limited != -1) {
- RTC_HISTOGRAM_PERCENTAGE("WebRTC.Video.QualityLimitedResolutionInPercent",
- quality_limited);
+ RTC_HISTOGRAM_PERCENTAGE_SPARSE(
+ uma_prefix_ + "QualityLimitedResolutionInPercent", quality_limited);
}
int downscales = quality_downscales_counter_.Avg(kMinRequiredSamples);
if (downscales != -1) {
- RTC_HISTOGRAM_ENUMERATION("WebRTC.Video.QualityLimitedResolutionDownscales",
- downscales, 20);
+ RTC_HISTOGRAM_ENUMERATION_SPARSE(
+ uma_prefix_ + "QualityLimitedResolutionDownscales", downscales, 20);
}
int bw_limited = bw_limited_frame_counter_.Percent(kMinRequiredSamples);
if (bw_limited != -1) {
- RTC_HISTOGRAM_PERCENTAGE("WebRTC.Video.BandwidthLimitedResolutionInPercent",
- bw_limited);
+ RTC_HISTOGRAM_PERCENTAGE_SPARSE(
+ uma_prefix_ + "BandwidthLimitedResolutionInPercent", bw_limited);
}
int num_disabled = bw_resolutions_disabled_counter_.Avg(kMinRequiredSamples);
if (num_disabled != -1) {
- RTC_HISTOGRAM_ENUMERATION(
- "WebRTC.Video.BandwidthLimitedResolutionsDisabled", num_disabled, 10);
+ RTC_HISTOGRAM_ENUMERATION_SPARSE(
+ uma_prefix_ + "BandwidthLimitedResolutionsDisabled", num_disabled, 10);
+ }
+ int delay_ms = delay_counter_.Avg(kMinRequiredSamples);
+ if (delay_ms != -1)
+ RTC_HISTOGRAM_COUNTS_SPARSE_100000(uma_prefix_ + "SendSideDelayInMs",
+ delay_ms);
+
+ int max_delay_ms = max_delay_counter_.Avg(kMinRequiredSamples);
+ if (max_delay_ms != -1) {
+ RTC_HISTOGRAM_COUNTS_SPARSE_100000(uma_prefix_ + "SendSideDelayMaxInMs",
+ max_delay_ms);
+ }
+}
+
+void SendStatisticsProxy::SetContentType(
+ VideoEncoderConfig::ContentType content_type) {
+ rtc::CritScope lock(&crit_);
+ if (content_type_ != content_type) {
+ uma_container_.reset(new UmaSamplesContainer(GetUmaPrefix(content_type)));
+ content_type_ = content_type;
}
}
+void SendStatisticsProxy::OnEncoderImplementationName(
+ const char* implementation_name) {
+ rtc::CritScope lock(&crit_);
+ stats_.encoder_implementation_name = implementation_name;
+}
+
void SendStatisticsProxy::OnOutgoingRate(uint32_t framerate, uint32_t bitrate) {
rtc::CritScope lock(&crit_);
stats_.encode_frame_rate = framerate;
@@ -132,8 +182,6 @@ void SendStatisticsProxy::OnOutgoingRate(uint32_t framerate, uint32_t bitrate) {
void SendStatisticsProxy::CpuOveruseMetricsUpdated(
const CpuOveruseMetrics& metrics) {
rtc::CritScope lock(&crit_);
- // TODO(asapersson): Change to use OnEncodedFrame() for avg_encode_time_ms.
- stats_.avg_encode_time_ms = metrics.avg_encode_time_ms;
stats_.encode_usage_percent = metrics.encode_usage_percent;
}
@@ -146,7 +194,7 @@ VideoSendStream::Stats SendStatisticsProxy::GetStats() {
rtc::CritScope lock(&crit_);
PurgeOldStats();
stats_.input_frame_rate =
- static_cast<int>(input_frame_rate_tracker_.ComputeRate());
+ round(uma_container_->input_frame_rate_tracker_.ComputeRate());
return stats_;
}
@@ -219,23 +267,28 @@ void SendStatisticsProxy::OnSendEncodedImage(
stats->height = encoded_image._encodedHeight;
update_times_[ssrc].resolution_update_ms = clock_->TimeInMilliseconds();
- key_frame_counter_.Add(encoded_image._frameType == kVideoFrameKey);
+ uma_container_->key_frame_counter_.Add(encoded_image._frameType ==
+ kVideoFrameKey);
+
+ stats_.bw_limited_resolution =
+ encoded_image.adapt_reason_.quality_resolution_downscales > 0 ||
+ encoded_image.adapt_reason_.bw_resolutions_disabled > 0;
if (encoded_image.adapt_reason_.quality_resolution_downscales != -1) {
bool downscaled =
encoded_image.adapt_reason_.quality_resolution_downscales > 0;
- quality_limited_frame_counter_.Add(downscaled);
+ uma_container_->quality_limited_frame_counter_.Add(downscaled);
if (downscaled) {
- quality_downscales_counter_.Add(
+ uma_container_->quality_downscales_counter_.Add(
encoded_image.adapt_reason_.quality_resolution_downscales);
}
}
if (encoded_image.adapt_reason_.bw_resolutions_disabled != -1) {
bool bw_limited = encoded_image.adapt_reason_.bw_resolutions_disabled > 0;
- bw_limited_frame_counter_.Add(bw_limited);
+ uma_container_->bw_limited_frame_counter_.Add(bw_limited);
if (bw_limited) {
- bw_resolutions_disabled_counter_.Add(
- encoded_image.adapt_reason_.bw_resolutions_disabled);
+ uma_container_->bw_resolutions_disabled_counter_.Add(
+ encoded_image.adapt_reason_.bw_resolutions_disabled);
}
}
@@ -244,31 +297,35 @@ void SendStatisticsProxy::OnSendEncodedImage(
// are encoded before the next start.
if (last_sent_frame_timestamp_ > 0 &&
encoded_image._timeStamp != last_sent_frame_timestamp_) {
- sent_frame_rate_tracker_.AddSamples(1);
- sent_width_counter_.Add(max_sent_width_per_timestamp_);
- sent_height_counter_.Add(max_sent_height_per_timestamp_);
- max_sent_width_per_timestamp_ = 0;
- max_sent_height_per_timestamp_ = 0;
+ uma_container_->sent_frame_rate_tracker_.AddSamples(1);
+ uma_container_->sent_width_counter_.Add(
+ uma_container_->max_sent_width_per_timestamp_);
+ uma_container_->sent_height_counter_.Add(
+ uma_container_->max_sent_height_per_timestamp_);
+ uma_container_->max_sent_width_per_timestamp_ = 0;
+ uma_container_->max_sent_height_per_timestamp_ = 0;
}
last_sent_frame_timestamp_ = encoded_image._timeStamp;
- max_sent_width_per_timestamp_ =
- std::max(max_sent_width_per_timestamp_,
+ uma_container_->max_sent_width_per_timestamp_ =
+ std::max(uma_container_->max_sent_width_per_timestamp_,
static_cast<int>(encoded_image._encodedWidth));
- max_sent_height_per_timestamp_ =
- std::max(max_sent_height_per_timestamp_,
+ uma_container_->max_sent_height_per_timestamp_ =
+ std::max(uma_container_->max_sent_height_per_timestamp_,
static_cast<int>(encoded_image._encodedHeight));
}
void SendStatisticsProxy::OnIncomingFrame(int width, int height) {
rtc::CritScope lock(&crit_);
- input_frame_rate_tracker_.AddSamples(1);
- input_width_counter_.Add(width);
- input_height_counter_.Add(height);
+ uma_container_->input_frame_rate_tracker_.AddSamples(1);
+ uma_container_->input_width_counter_.Add(width);
+ uma_container_->input_height_counter_.Add(height);
}
void SendStatisticsProxy::OnEncodedFrame(int encode_time_ms) {
rtc::CritScope lock(&crit_);
- encode_time_counter_.Add(encode_time_ms);
+ uma_container_->encode_time_counter_.Add(encode_time_ms);
+ encode_time_.Apply(1.0f, encode_time_ms);
+ stats_.avg_encode_time_ms = round(encode_time_.filtered());
}
void SendStatisticsProxy::RtcpPacketTypesCounterUpdated(
@@ -337,6 +394,9 @@ void SendStatisticsProxy::SendSideDelayUpdated(int avg_delay_ms,
return;
stats->avg_delay_ms = avg_delay_ms;
stats->max_delay_ms = max_delay_ms;
+
+ uma_container_->delay_counter_.Add(avg_delay_ms);
+ uma_container_->max_delay_counter_.Add(max_delay_ms);
}
void SendStatisticsProxy::SampleCounter::Add(int sample) {
@@ -372,5 +432,4 @@ int SendStatisticsProxy::BoolSampleCounter::Fraction(
return -1;
return static_cast<int>((sum * multiplier / num_samples) + 0.5f);
}
-
} // namespace webrtc
diff --git a/webrtc/video/send_statistics_proxy.h b/webrtc/video/send_statistics_proxy.h
index 26ea09c4c4..7f6df06ad8 100644
--- a/webrtc/video/send_statistics_proxy.h
+++ b/webrtc/video/send_statistics_proxy.h
@@ -11,18 +11,20 @@
#ifndef WEBRTC_VIDEO_SEND_STATISTICS_PROXY_H_
#define WEBRTC_VIDEO_SEND_STATISTICS_PROXY_H_
+#include <map>
#include <string>
#include "webrtc/base/criticalsection.h"
+#include "webrtc/base/exp_filter.h"
#include "webrtc/base/ratetracker.h"
#include "webrtc/base/scoped_ptr.h"
#include "webrtc/base/thread_annotations.h"
#include "webrtc/common_types.h"
-#include "webrtc/modules/video_coding/codecs/interface/video_codec_interface.h"
-#include "webrtc/modules/video_coding/main/interface/video_coding_defines.h"
+#include "webrtc/modules/video_coding/include/video_codec_interface.h"
+#include "webrtc/modules/video_coding/include/video_coding_defines.h"
#include "webrtc/system_wrappers/include/clock.h"
-#include "webrtc/video_engine/overuse_frame_detector.h"
-#include "webrtc/video_engine/vie_encoder.h"
+#include "webrtc/video/overuse_frame_detector.h"
+#include "webrtc/video/vie_encoder.h"
#include "webrtc/video_send_stream.h"
namespace webrtc {
@@ -38,7 +40,9 @@ class SendStatisticsProxy : public CpuOveruseMetricsObserver,
public:
static const int kStatsTimeoutMs;
- SendStatisticsProxy(Clock* clock, const VideoSendStream::Config& config);
+ SendStatisticsProxy(Clock* clock,
+ const VideoSendStream::Config& config,
+ VideoEncoderConfig::ContentType content_type);
virtual ~SendStatisticsProxy();
VideoSendStream::Stats GetStats();
@@ -54,10 +58,15 @@ class SendStatisticsProxy : public CpuOveruseMetricsObserver,
// From VideoEncoderRateObserver.
void OnSetRates(uint32_t bitrate_bps, int framerate) override;
+ void OnEncoderImplementationName(const char* implementation_name);
void OnOutgoingRate(uint32_t framerate, uint32_t bitrate);
void OnSuspendChange(bool is_suspended);
void OnInactiveSsrc(uint32_t ssrc);
+ // Used to indicate change in content type, which may require a change in
+ // how stats are collected.
+ void SetContentType(VideoEncoderConfig::ContentType content_type);
+
protected:
// From CpuOveruseMetricsObserver.
void CpuOveruseMetricsUpdated(const CpuOveruseMetrics& metrics) override;
@@ -112,36 +121,52 @@ class SendStatisticsProxy : public CpuOveruseMetricsObserver,
int num_samples;
};
struct StatsUpdateTimes {
- StatsUpdateTimes() : resolution_update_ms(0) {}
+ StatsUpdateTimes() : resolution_update_ms(0), bitrate_update_ms(0) {}
int64_t resolution_update_ms;
int64_t bitrate_update_ms;
};
void PurgeOldStats() EXCLUSIVE_LOCKS_REQUIRED(crit_);
VideoSendStream::StreamStats* GetStatsEntry(uint32_t ssrc)
EXCLUSIVE_LOCKS_REQUIRED(crit_);
- void UpdateHistograms() EXCLUSIVE_LOCKS_REQUIRED(crit_);
Clock* const clock_;
const VideoSendStream::Config config_;
mutable rtc::CriticalSection crit_;
+ VideoEncoderConfig::ContentType content_type_ GUARDED_BY(crit_);
VideoSendStream::Stats stats_ GUARDED_BY(crit_);
- rtc::RateTracker input_frame_rate_tracker_ GUARDED_BY(crit_);
- rtc::RateTracker sent_frame_rate_tracker_ GUARDED_BY(crit_);
uint32_t last_sent_frame_timestamp_ GUARDED_BY(crit_);
std::map<uint32_t, StatsUpdateTimes> update_times_ GUARDED_BY(crit_);
+ rtc::ExpFilter encode_time_ GUARDED_BY(crit_);
+
+ // Contains stats used for UMA histograms. These stats will be reset if
+ // content type changes between real-time video and screenshare, since these
+ // will be reported separately.
+ struct UmaSamplesContainer {
+ explicit UmaSamplesContainer(const char* prefix);
+ ~UmaSamplesContainer();
+
+ void UpdateHistograms();
+
+ const std::string uma_prefix_;
+ int max_sent_width_per_timestamp_;
+ int max_sent_height_per_timestamp_;
+ SampleCounter input_width_counter_;
+ SampleCounter input_height_counter_;
+ SampleCounter sent_width_counter_;
+ SampleCounter sent_height_counter_;
+ SampleCounter encode_time_counter_;
+ BoolSampleCounter key_frame_counter_;
+ BoolSampleCounter quality_limited_frame_counter_;
+ SampleCounter quality_downscales_counter_;
+ BoolSampleCounter bw_limited_frame_counter_;
+ SampleCounter bw_resolutions_disabled_counter_;
+ SampleCounter delay_counter_;
+ SampleCounter max_delay_counter_;
+ rtc::RateTracker input_frame_rate_tracker_;
+ rtc::RateTracker sent_frame_rate_tracker_;
+ };
- int max_sent_width_per_timestamp_ GUARDED_BY(crit_);
- int max_sent_height_per_timestamp_ GUARDED_BY(crit_);
- SampleCounter input_width_counter_ GUARDED_BY(crit_);
- SampleCounter input_height_counter_ GUARDED_BY(crit_);
- SampleCounter sent_width_counter_ GUARDED_BY(crit_);
- SampleCounter sent_height_counter_ GUARDED_BY(crit_);
- SampleCounter encode_time_counter_ GUARDED_BY(crit_);
- BoolSampleCounter key_frame_counter_ GUARDED_BY(crit_);
- BoolSampleCounter quality_limited_frame_counter_ GUARDED_BY(crit_);
- SampleCounter quality_downscales_counter_ GUARDED_BY(crit_);
- BoolSampleCounter bw_limited_frame_counter_ GUARDED_BY(crit_);
- SampleCounter bw_resolutions_disabled_counter_ GUARDED_BY(crit_);
+ rtc::scoped_ptr<UmaSamplesContainer> uma_container_ GUARDED_BY(crit_);
};
} // namespace webrtc
diff --git a/webrtc/video/send_statistics_proxy_unittest.cc b/webrtc/video/send_statistics_proxy_unittest.cc
index 8e6b7bcab3..fc1f3fdbde 100644
--- a/webrtc/video/send_statistics_proxy_unittest.cc
+++ b/webrtc/video/send_statistics_proxy_unittest.cc
@@ -16,6 +16,7 @@
#include <vector>
#include "testing/gtest/include/gtest/gtest.h"
+#include "webrtc/test/histogram.h"
namespace webrtc {
@@ -28,8 +29,9 @@ class SendStatisticsProxyTest : public ::testing::Test {
protected:
virtual void SetUp() {
- statistics_proxy_.reset(
- new SendStatisticsProxy(&fake_clock_, GetTestConfig()));
+ statistics_proxy_.reset(new SendStatisticsProxy(
+ &fake_clock_, GetTestConfig(),
+ VideoEncoderConfig::ContentType::kRealtimeVideo));
expected_ = VideoSendStream::Stats();
}
@@ -287,6 +289,33 @@ TEST_F(SendStatisticsProxyTest, SendSideDelay) {
ExpectEqual(expected_, stats);
}
+TEST_F(SendStatisticsProxyTest, OnEncodedFrame) {
+ const int kEncodeTimeMs = 11;
+ statistics_proxy_->OnEncodedFrame(kEncodeTimeMs);
+
+ VideoSendStream::Stats stats = statistics_proxy_->GetStats();
+ EXPECT_EQ(kEncodeTimeMs, stats.avg_encode_time_ms);
+}
+
+TEST_F(SendStatisticsProxyTest, SwitchContentTypeUpdatesHistograms) {
+ test::ClearHistograms();
+ const int kMinRequiredSamples = 200;
+ const int kWidth = 640;
+ const int kHeight = 480;
+
+ for (int i = 0; i < kMinRequiredSamples; ++i)
+ statistics_proxy_->OnIncomingFrame(kWidth, kHeight);
+
+ // No switch, stats not should be updated.
+ statistics_proxy_->SetContentType(
+ VideoEncoderConfig::ContentType::kRealtimeVideo);
+ EXPECT_EQ(0, test::NumHistogramSamples("WebRTC.Video.InputWidthInPixels"));
+
+ // Switch to screenshare, real-time stats should be updated.
+ statistics_proxy_->SetContentType(VideoEncoderConfig::ContentType::kScreen);
+ EXPECT_EQ(1, test::NumHistogramSamples("WebRTC.Video.InputWidthInPixels"));
+}
+
TEST_F(SendStatisticsProxyTest, NoSubstreams) {
uint32_t excluded_ssrc =
std::max(
diff --git a/webrtc/video/stream_synchronization.cc b/webrtc/video/stream_synchronization.cc
new file mode 100644
index 0000000000..cb37d80ef5
--- /dev/null
+++ b/webrtc/video/stream_synchronization.cc
@@ -0,0 +1,226 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/video/stream_synchronization.h"
+
+#include <assert.h>
+#include <math.h>
+#include <stdlib.h>
+
+#include <algorithm>
+
+#include "webrtc/base/logging.h"
+
+namespace webrtc {
+
+static const int kMaxChangeMs = 80;
+static const int kMaxDeltaDelayMs = 10000;
+static const int kFilterLength = 4;
+// Minimum difference between audio and video to warrant a change.
+static const int kMinDeltaMs = 30;
+
+struct ViESyncDelay {
+ ViESyncDelay() {
+ extra_video_delay_ms = 0;
+ last_video_delay_ms = 0;
+ extra_audio_delay_ms = 0;
+ last_audio_delay_ms = 0;
+ network_delay = 120;
+ }
+
+ int extra_video_delay_ms;
+ int last_video_delay_ms;
+ int extra_audio_delay_ms;
+ int last_audio_delay_ms;
+ int network_delay;
+};
+
+StreamSynchronization::StreamSynchronization(uint32_t video_primary_ssrc,
+ int audio_channel_id)
+ : channel_delay_(new ViESyncDelay),
+ video_primary_ssrc_(video_primary_ssrc),
+ audio_channel_id_(audio_channel_id),
+ base_target_delay_ms_(0),
+ avg_diff_ms_(0) {
+}
+
+StreamSynchronization::~StreamSynchronization() {
+ delete channel_delay_;
+}
+
+bool StreamSynchronization::ComputeRelativeDelay(
+ const Measurements& audio_measurement,
+ const Measurements& video_measurement,
+ int* relative_delay_ms) {
+ assert(relative_delay_ms);
+ if (audio_measurement.rtcp.size() < 2 || video_measurement.rtcp.size() < 2) {
+ // We need two RTCP SR reports per stream to do synchronization.
+ return false;
+ }
+ int64_t audio_last_capture_time_ms;
+ if (!RtpToNtpMs(audio_measurement.latest_timestamp,
+ audio_measurement.rtcp,
+ &audio_last_capture_time_ms)) {
+ return false;
+ }
+ int64_t video_last_capture_time_ms;
+ if (!RtpToNtpMs(video_measurement.latest_timestamp,
+ video_measurement.rtcp,
+ &video_last_capture_time_ms)) {
+ return false;
+ }
+ if (video_last_capture_time_ms < 0) {
+ return false;
+ }
+ // Positive diff means that video_measurement is behind audio_measurement.
+ *relative_delay_ms = video_measurement.latest_receive_time_ms -
+ audio_measurement.latest_receive_time_ms -
+ (video_last_capture_time_ms - audio_last_capture_time_ms);
+ if (*relative_delay_ms > kMaxDeltaDelayMs ||
+ *relative_delay_ms < -kMaxDeltaDelayMs) {
+ return false;
+ }
+ return true;
+}
+
+bool StreamSynchronization::ComputeDelays(int relative_delay_ms,
+ int current_audio_delay_ms,
+ int* total_audio_delay_target_ms,
+ int* total_video_delay_target_ms) {
+ assert(total_audio_delay_target_ms && total_video_delay_target_ms);
+
+ int current_video_delay_ms = *total_video_delay_target_ms;
+ LOG(LS_VERBOSE) << "Audio delay: " << current_audio_delay_ms
+ << ", network delay diff: " << channel_delay_->network_delay
+ << " current diff: " << relative_delay_ms
+ << " for channel " << audio_channel_id_;
+ // Calculate the difference between the lowest possible video delay and
+ // the current audio delay.
+ int current_diff_ms = current_video_delay_ms - current_audio_delay_ms +
+ relative_delay_ms;
+
+ avg_diff_ms_ = ((kFilterLength - 1) * avg_diff_ms_ +
+ current_diff_ms) / kFilterLength;
+ if (abs(avg_diff_ms_) < kMinDeltaMs) {
+ // Don't adjust if the diff is within our margin.
+ return false;
+ }
+
+ // Make sure we don't move too fast.
+ int diff_ms = avg_diff_ms_ / 2;
+ diff_ms = std::min(diff_ms, kMaxChangeMs);
+ diff_ms = std::max(diff_ms, -kMaxChangeMs);
+
+ // Reset the average after a move to prevent overshooting reaction.
+ avg_diff_ms_ = 0;
+
+ if (diff_ms > 0) {
+ // The minimum video delay is longer than the current audio delay.
+ // We need to decrease extra video delay, or add extra audio delay.
+ if (channel_delay_->extra_video_delay_ms > base_target_delay_ms_) {
+ // We have extra delay added to ViE. Reduce this delay before adding
+ // extra delay to VoE.
+ channel_delay_->extra_video_delay_ms -= diff_ms;
+ channel_delay_->extra_audio_delay_ms = base_target_delay_ms_;
+ } else { // channel_delay_->extra_video_delay_ms > 0
+ // We have no extra video delay to remove, increase the audio delay.
+ channel_delay_->extra_audio_delay_ms += diff_ms;
+ channel_delay_->extra_video_delay_ms = base_target_delay_ms_;
+ }
+ } else { // if (diff_ms > 0)
+ // The video delay is lower than the current audio delay.
+ // We need to decrease extra audio delay, or add extra video delay.
+ if (channel_delay_->extra_audio_delay_ms > base_target_delay_ms_) {
+ // We have extra delay in VoiceEngine.
+ // Start with decreasing the voice delay.
+ // Note: diff_ms is negative; add the negative difference.
+ channel_delay_->extra_audio_delay_ms += diff_ms;
+ channel_delay_->extra_video_delay_ms = base_target_delay_ms_;
+ } else { // channel_delay_->extra_audio_delay_ms > base_target_delay_ms_
+ // We have no extra delay in VoiceEngine, increase the video delay.
+ // Note: diff_ms is negative; subtract the negative difference.
+ channel_delay_->extra_video_delay_ms -= diff_ms; // X - (-Y) = X + Y.
+ channel_delay_->extra_audio_delay_ms = base_target_delay_ms_;
+ }
+ }
+
+ // Make sure that video is never below our target.
+ channel_delay_->extra_video_delay_ms = std::max(
+ channel_delay_->extra_video_delay_ms, base_target_delay_ms_);
+
+ int new_video_delay_ms;
+ if (channel_delay_->extra_video_delay_ms > base_target_delay_ms_) {
+ new_video_delay_ms = channel_delay_->extra_video_delay_ms;
+ } else {
+ // No change to the extra video delay. We are changing audio and we only
+ // allow to change one at the time.
+ new_video_delay_ms = channel_delay_->last_video_delay_ms;
+ }
+
+ // Make sure that we don't go below the extra video delay.
+ new_video_delay_ms = std::max(
+ new_video_delay_ms, channel_delay_->extra_video_delay_ms);
+
+ // Verify we don't go above the maximum allowed video delay.
+ new_video_delay_ms =
+ std::min(new_video_delay_ms, base_target_delay_ms_ + kMaxDeltaDelayMs);
+
+ int new_audio_delay_ms;
+ if (channel_delay_->extra_audio_delay_ms > base_target_delay_ms_) {
+ new_audio_delay_ms = channel_delay_->extra_audio_delay_ms;
+ } else {
+ // No change to the audio delay. We are changing video and we only
+ // allow to change one at the time.
+ new_audio_delay_ms = channel_delay_->last_audio_delay_ms;
+ }
+
+ // Make sure that we don't go below the extra audio delay.
+ new_audio_delay_ms = std::max(
+ new_audio_delay_ms, channel_delay_->extra_audio_delay_ms);
+
+ // Verify we don't go above the maximum allowed audio delay.
+ new_audio_delay_ms =
+ std::min(new_audio_delay_ms, base_target_delay_ms_ + kMaxDeltaDelayMs);
+
+ // Remember our last audio and video delays.
+ channel_delay_->last_video_delay_ms = new_video_delay_ms;
+ channel_delay_->last_audio_delay_ms = new_audio_delay_ms;
+
+ LOG(LS_VERBOSE) << "Sync video delay " << new_video_delay_ms
+ << " for video primary SSRC " << video_primary_ssrc_
+ << " and audio delay " << channel_delay_->extra_audio_delay_ms
+ << " for audio channel " << audio_channel_id_;
+
+ // Return values.
+ *total_video_delay_target_ms = new_video_delay_ms;
+ *total_audio_delay_target_ms = new_audio_delay_ms;
+ return true;
+}
+
+void StreamSynchronization::SetTargetBufferingDelay(int target_delay_ms) {
+ // Initial extra delay for audio (accounting for existing extra delay).
+ channel_delay_->extra_audio_delay_ms +=
+ target_delay_ms - base_target_delay_ms_;
+ channel_delay_->last_audio_delay_ms +=
+ target_delay_ms - base_target_delay_ms_;
+
+ // The video delay is compared to the last value (and how much we can update
+ // is limited by that as well).
+ channel_delay_->last_video_delay_ms +=
+ target_delay_ms - base_target_delay_ms_;
+
+ channel_delay_->extra_video_delay_ms +=
+ target_delay_ms - base_target_delay_ms_;
+
+ // Video is already delayed by the desired amount.
+ base_target_delay_ms_ = target_delay_ms;
+}
+
+} // namespace webrtc
diff --git a/webrtc/video/stream_synchronization.h b/webrtc/video/stream_synchronization.h
new file mode 100644
index 0000000000..cb7c110f44
--- /dev/null
+++ b/webrtc/video/stream_synchronization.h
@@ -0,0 +1,59 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_VIDEO_STREAM_SYNCHRONIZATION_H_
+#define WEBRTC_VIDEO_STREAM_SYNCHRONIZATION_H_
+
+#include <list>
+
+#include "webrtc/system_wrappers/include/rtp_to_ntp.h"
+#include "webrtc/typedefs.h"
+
+namespace webrtc {
+
+struct ViESyncDelay;
+
+class StreamSynchronization {
+ public:
+ struct Measurements {
+ Measurements() : rtcp(), latest_receive_time_ms(0), latest_timestamp(0) {}
+ RtcpList rtcp;
+ int64_t latest_receive_time_ms;
+ uint32_t latest_timestamp;
+ };
+
+ StreamSynchronization(uint32_t video_primary_ssrc, int audio_channel_id);
+ ~StreamSynchronization();
+
+ bool ComputeDelays(int relative_delay_ms,
+ int current_audio_delay_ms,
+ int* extra_audio_delay_ms,
+ int* total_video_delay_target_ms);
+
+ // On success |relative_delay| contains the number of milliseconds later video
+ // is rendered relative audio. If audio is played back later than video a
+ // |relative_delay| will be negative.
+ static bool ComputeRelativeDelay(const Measurements& audio_measurement,
+ const Measurements& video_measurement,
+ int* relative_delay_ms);
+ // Set target buffering delay - All audio and video will be delayed by at
+ // least target_delay_ms.
+ void SetTargetBufferingDelay(int target_delay_ms);
+
+ private:
+ ViESyncDelay* channel_delay_;
+ const uint32_t video_primary_ssrc_;
+ const int audio_channel_id_;
+ int base_target_delay_ms_;
+ int avg_diff_ms_;
+};
+} // namespace webrtc
+
+#endif // WEBRTC_VIDEO_STREAM_SYNCHRONIZATION_H_
diff --git a/webrtc/video/stream_synchronization_unittest.cc b/webrtc/video/stream_synchronization_unittest.cc
new file mode 100644
index 0000000000..2834dfe1b2
--- /dev/null
+++ b/webrtc/video/stream_synchronization_unittest.cc
@@ -0,0 +1,563 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <math.h>
+
+#include <algorithm>
+
+#include "testing/gtest/include/gtest/gtest.h"
+#include "webrtc/video/stream_synchronization.h"
+
+namespace webrtc {
+
+// These correspond to the same constants defined in vie_sync_module.cc.
+enum { kMaxVideoDiffMs = 80 };
+enum { kMaxAudioDiffMs = 80 };
+enum { kMaxDelay = 1500 };
+
+// Test constants.
+enum { kDefaultAudioFrequency = 8000 };
+enum { kDefaultVideoFrequency = 90000 };
+const double kNtpFracPerMs = 4.294967296E6;
+static const int kSmoothingFilter = 4 * 2;
+
+class Time {
+ public:
+ explicit Time(int64_t offset)
+ : kNtpJan1970(2208988800UL),
+ time_now_ms_(offset) {}
+
+ RtcpMeasurement GenerateRtcp(int frequency, uint32_t offset) const {
+ RtcpMeasurement rtcp;
+ NowNtp(&rtcp.ntp_secs, &rtcp.ntp_frac);
+ rtcp.rtp_timestamp = NowRtp(frequency, offset);
+ return rtcp;
+ }
+
+ void NowNtp(uint32_t* ntp_secs, uint32_t* ntp_frac) const {
+ *ntp_secs = time_now_ms_ / 1000 + kNtpJan1970;
+ int64_t remainder_ms = time_now_ms_ % 1000;
+ *ntp_frac = static_cast<uint32_t>(
+ static_cast<double>(remainder_ms) * kNtpFracPerMs + 0.5);
+ }
+
+ uint32_t NowRtp(int frequency, uint32_t offset) const {
+ return frequency * time_now_ms_ / 1000 + offset;
+ }
+
+ void IncreaseTimeMs(int64_t inc) {
+ time_now_ms_ += inc;
+ }
+
+ int64_t time_now_ms() const {
+ return time_now_ms_;
+ }
+
+ private:
+ // January 1970, in NTP seconds.
+ const uint32_t kNtpJan1970;
+ int64_t time_now_ms_;
+};
+
+class StreamSynchronizationTest : public ::testing::Test {
+ protected:
+ virtual void SetUp() {
+ sync_ = new StreamSynchronization(0, 0);
+ send_time_ = new Time(kSendTimeOffsetMs);
+ receive_time_ = new Time(kReceiveTimeOffsetMs);
+ audio_clock_drift_ = 1.0;
+ video_clock_drift_ = 1.0;
+ }
+
+ virtual void TearDown() {
+ delete sync_;
+ delete send_time_;
+ delete receive_time_;
+ }
+
+ // Generates the necessary RTCP measurements and RTP timestamps and computes
+ // the audio and video delays needed to get the two streams in sync.
+ // |audio_delay_ms| and |video_delay_ms| are the number of milliseconds after
+ // capture which the frames are rendered.
+ // |current_audio_delay_ms| is the number of milliseconds which audio is
+ // currently being delayed by the receiver.
+ bool DelayedStreams(int audio_delay_ms,
+ int video_delay_ms,
+ int current_audio_delay_ms,
+ int* extra_audio_delay_ms,
+ int* total_video_delay_ms) {
+ int audio_frequency = static_cast<int>(kDefaultAudioFrequency *
+ audio_clock_drift_ + 0.5);
+ int audio_offset = 0;
+ int video_frequency = static_cast<int>(kDefaultVideoFrequency *
+ video_clock_drift_ + 0.5);
+ int video_offset = 0;
+ StreamSynchronization::Measurements audio;
+ StreamSynchronization::Measurements video;
+ // Generate NTP/RTP timestamp pair for both streams corresponding to RTCP.
+ audio.rtcp.push_front(send_time_->GenerateRtcp(audio_frequency,
+ audio_offset));
+ send_time_->IncreaseTimeMs(100);
+ receive_time_->IncreaseTimeMs(100);
+ video.rtcp.push_front(send_time_->GenerateRtcp(video_frequency,
+ video_offset));
+ send_time_->IncreaseTimeMs(900);
+ receive_time_->IncreaseTimeMs(900);
+ audio.rtcp.push_front(send_time_->GenerateRtcp(audio_frequency,
+ audio_offset));
+ send_time_->IncreaseTimeMs(100);
+ receive_time_->IncreaseTimeMs(100);
+ video.rtcp.push_front(send_time_->GenerateRtcp(video_frequency,
+ video_offset));
+ send_time_->IncreaseTimeMs(900);
+ receive_time_->IncreaseTimeMs(900);
+
+ // Capture an audio and a video frame at the same time.
+ audio.latest_timestamp = send_time_->NowRtp(audio_frequency,
+ audio_offset);
+ video.latest_timestamp = send_time_->NowRtp(video_frequency,
+ video_offset);
+
+ if (audio_delay_ms > video_delay_ms) {
+ // Audio later than video.
+ receive_time_->IncreaseTimeMs(video_delay_ms);
+ video.latest_receive_time_ms = receive_time_->time_now_ms();
+ receive_time_->IncreaseTimeMs(audio_delay_ms - video_delay_ms);
+ audio.latest_receive_time_ms = receive_time_->time_now_ms();
+ } else {
+ // Video later than audio.
+ receive_time_->IncreaseTimeMs(audio_delay_ms);
+ audio.latest_receive_time_ms = receive_time_->time_now_ms();
+ receive_time_->IncreaseTimeMs(video_delay_ms - audio_delay_ms);
+ video.latest_receive_time_ms = receive_time_->time_now_ms();
+ }
+ int relative_delay_ms;
+ StreamSynchronization::ComputeRelativeDelay(audio, video,
+ &relative_delay_ms);
+ EXPECT_EQ(video_delay_ms - audio_delay_ms, relative_delay_ms);
+ return sync_->ComputeDelays(relative_delay_ms,
+ current_audio_delay_ms,
+ extra_audio_delay_ms,
+ total_video_delay_ms);
+ }
+
+ // Simulate audio playback 300 ms after capture and video rendering 100 ms
+ // after capture. Verify that the correct extra delays are calculated for
+ // audio and video, and that they change correctly when we simulate that
+ // NetEQ or the VCM adds more delay to the streams.
+ // TODO(holmer): This is currently wrong! We should simply change
+ // audio_delay_ms or video_delay_ms since those now include VCM and NetEQ
+ // delays.
+ void BothDelayedAudioLaterTest(int base_target_delay) {
+ int current_audio_delay_ms = base_target_delay;
+ int audio_delay_ms = base_target_delay + 300;
+ int video_delay_ms = base_target_delay + 100;
+ int extra_audio_delay_ms = 0;
+ int total_video_delay_ms = base_target_delay;
+ int filtered_move = (audio_delay_ms - video_delay_ms) / kSmoothingFilter;
+ const int kNeteqDelayIncrease = 50;
+ const int kNeteqDelayDecrease = 10;
+
+ EXPECT_TRUE(DelayedStreams(audio_delay_ms,
+ video_delay_ms,
+ current_audio_delay_ms,
+ &extra_audio_delay_ms,
+ &total_video_delay_ms));
+ EXPECT_EQ(base_target_delay + filtered_move, total_video_delay_ms);
+ EXPECT_EQ(base_target_delay, extra_audio_delay_ms);
+ current_audio_delay_ms = extra_audio_delay_ms;
+
+ send_time_->IncreaseTimeMs(1000);
+ receive_time_->IncreaseTimeMs(1000 - std::max(audio_delay_ms,
+ video_delay_ms));
+ // Simulate base_target_delay minimum delay in the VCM.
+ total_video_delay_ms = base_target_delay;
+ EXPECT_TRUE(DelayedStreams(audio_delay_ms,
+ video_delay_ms,
+ current_audio_delay_ms,
+ &extra_audio_delay_ms,
+ &total_video_delay_ms));
+ EXPECT_EQ(base_target_delay + 2 * filtered_move, total_video_delay_ms);
+ EXPECT_EQ(base_target_delay, extra_audio_delay_ms);
+ current_audio_delay_ms = extra_audio_delay_ms;
+
+ send_time_->IncreaseTimeMs(1000);
+ receive_time_->IncreaseTimeMs(1000 - std::max(audio_delay_ms,
+ video_delay_ms));
+ // Simulate base_target_delay minimum delay in the VCM.
+ total_video_delay_ms = base_target_delay;
+ EXPECT_TRUE(DelayedStreams(audio_delay_ms,
+ video_delay_ms,
+ current_audio_delay_ms,
+ &extra_audio_delay_ms,
+ &total_video_delay_ms));
+ EXPECT_EQ(base_target_delay + 3 * filtered_move, total_video_delay_ms);
+ EXPECT_EQ(base_target_delay, extra_audio_delay_ms);
+
+ // Simulate that NetEQ introduces some audio delay.
+ current_audio_delay_ms = base_target_delay + kNeteqDelayIncrease;
+ send_time_->IncreaseTimeMs(1000);
+ receive_time_->IncreaseTimeMs(1000 - std::max(audio_delay_ms,
+ video_delay_ms));
+ // Simulate base_target_delay minimum delay in the VCM.
+ total_video_delay_ms = base_target_delay;
+ EXPECT_TRUE(DelayedStreams(audio_delay_ms,
+ video_delay_ms,
+ current_audio_delay_ms,
+ &extra_audio_delay_ms,
+ &total_video_delay_ms));
+ filtered_move = 3 * filtered_move +
+ (kNeteqDelayIncrease + audio_delay_ms - video_delay_ms) /
+ kSmoothingFilter;
+ EXPECT_EQ(base_target_delay + filtered_move, total_video_delay_ms);
+ EXPECT_EQ(base_target_delay, extra_audio_delay_ms);
+
+ // Simulate that NetEQ reduces its delay.
+ current_audio_delay_ms = base_target_delay + kNeteqDelayDecrease;
+ send_time_->IncreaseTimeMs(1000);
+ receive_time_->IncreaseTimeMs(1000 - std::max(audio_delay_ms,
+ video_delay_ms));
+ // Simulate base_target_delay minimum delay in the VCM.
+ total_video_delay_ms = base_target_delay;
+ EXPECT_TRUE(DelayedStreams(audio_delay_ms,
+ video_delay_ms,
+ current_audio_delay_ms,
+ &extra_audio_delay_ms,
+ &total_video_delay_ms));
+
+ filtered_move = filtered_move +
+ (kNeteqDelayDecrease + audio_delay_ms - video_delay_ms) /
+ kSmoothingFilter;
+
+ EXPECT_EQ(base_target_delay + filtered_move, total_video_delay_ms);
+ EXPECT_EQ(base_target_delay, extra_audio_delay_ms);
+ }
+
+ void BothDelayedVideoLaterTest(int base_target_delay) {
+ int current_audio_delay_ms = base_target_delay;
+ int audio_delay_ms = base_target_delay + 100;
+ int video_delay_ms = base_target_delay + 300;
+ int extra_audio_delay_ms = 0;
+ int total_video_delay_ms = base_target_delay;
+
+ EXPECT_TRUE(DelayedStreams(audio_delay_ms,
+ video_delay_ms,
+ current_audio_delay_ms,
+ &extra_audio_delay_ms,
+ &total_video_delay_ms));
+ EXPECT_EQ(base_target_delay, total_video_delay_ms);
+ // The audio delay is not allowed to change more than this in 1 second.
+ EXPECT_GE(base_target_delay + kMaxAudioDiffMs, extra_audio_delay_ms);
+ current_audio_delay_ms = extra_audio_delay_ms;
+ int current_extra_delay_ms = extra_audio_delay_ms;
+
+ send_time_->IncreaseTimeMs(1000);
+ receive_time_->IncreaseTimeMs(800);
+ EXPECT_TRUE(DelayedStreams(audio_delay_ms,
+ video_delay_ms,
+ current_audio_delay_ms,
+ &extra_audio_delay_ms,
+ &total_video_delay_ms));
+ EXPECT_EQ(base_target_delay, total_video_delay_ms);
+ // The audio delay is not allowed to change more than the half of the
+ // required change in delay.
+ EXPECT_EQ(current_extra_delay_ms + MaxAudioDelayIncrease(
+ current_audio_delay_ms,
+ base_target_delay + video_delay_ms - audio_delay_ms),
+ extra_audio_delay_ms);
+ current_audio_delay_ms = extra_audio_delay_ms;
+ current_extra_delay_ms = extra_audio_delay_ms;
+
+ send_time_->IncreaseTimeMs(1000);
+ receive_time_->IncreaseTimeMs(800);
+ EXPECT_TRUE(DelayedStreams(audio_delay_ms,
+ video_delay_ms,
+ current_audio_delay_ms,
+ &extra_audio_delay_ms,
+ &total_video_delay_ms));
+ EXPECT_EQ(base_target_delay, total_video_delay_ms);
+ // The audio delay is not allowed to change more than the half of the
+ // required change in delay.
+ EXPECT_EQ(current_extra_delay_ms + MaxAudioDelayIncrease(
+ current_audio_delay_ms,
+ base_target_delay + video_delay_ms - audio_delay_ms),
+ extra_audio_delay_ms);
+ current_extra_delay_ms = extra_audio_delay_ms;
+
+ // Simulate that NetEQ for some reason reduced the delay.
+ current_audio_delay_ms = base_target_delay + 10;
+ send_time_->IncreaseTimeMs(1000);
+ receive_time_->IncreaseTimeMs(800);
+ EXPECT_TRUE(DelayedStreams(audio_delay_ms,
+ video_delay_ms,
+ current_audio_delay_ms,
+ &extra_audio_delay_ms,
+ &total_video_delay_ms));
+ EXPECT_EQ(base_target_delay, total_video_delay_ms);
+ // Since we only can ask NetEQ for a certain amount of extra delay, and
+ // we only measure the total NetEQ delay, we will ask for additional delay
+ // here to try to stay in sync.
+ EXPECT_EQ(current_extra_delay_ms + MaxAudioDelayIncrease(
+ current_audio_delay_ms,
+ base_target_delay + video_delay_ms - audio_delay_ms),
+ extra_audio_delay_ms);
+ current_extra_delay_ms = extra_audio_delay_ms;
+
+ // Simulate that NetEQ for some reason significantly increased the delay.
+ current_audio_delay_ms = base_target_delay + 350;
+ send_time_->IncreaseTimeMs(1000);
+ receive_time_->IncreaseTimeMs(800);
+ EXPECT_TRUE(DelayedStreams(audio_delay_ms,
+ video_delay_ms,
+ current_audio_delay_ms,
+ &extra_audio_delay_ms,
+ &total_video_delay_ms));
+ EXPECT_EQ(base_target_delay, total_video_delay_ms);
+ // The audio delay is not allowed to change more than the half of the
+ // required change in delay.
+ EXPECT_EQ(current_extra_delay_ms + MaxAudioDelayIncrease(
+ current_audio_delay_ms,
+ base_target_delay + video_delay_ms - audio_delay_ms),
+ extra_audio_delay_ms);
+ }
+
+ int MaxAudioDelayIncrease(int current_audio_delay_ms, int delay_ms) {
+ return std::min((delay_ms - current_audio_delay_ms) / kSmoothingFilter,
+ static_cast<int>(kMaxAudioDiffMs));
+ }
+
+ int MaxAudioDelayDecrease(int current_audio_delay_ms, int delay_ms) {
+ return std::max((delay_ms - current_audio_delay_ms) / kSmoothingFilter,
+ -kMaxAudioDiffMs);
+ }
+
+ enum { kSendTimeOffsetMs = 98765 };
+ enum { kReceiveTimeOffsetMs = 43210 };
+
+ StreamSynchronization* sync_;
+ Time* send_time_; // The simulated clock at the sender.
+ Time* receive_time_; // The simulated clock at the receiver.
+ double audio_clock_drift_;
+ double video_clock_drift_;
+};
+
+TEST_F(StreamSynchronizationTest, NoDelay) {
+ uint32_t current_audio_delay_ms = 0;
+ int extra_audio_delay_ms = 0;
+ int total_video_delay_ms = 0;
+
+ EXPECT_FALSE(DelayedStreams(0, 0, current_audio_delay_ms,
+ &extra_audio_delay_ms, &total_video_delay_ms));
+ EXPECT_EQ(0, extra_audio_delay_ms);
+ EXPECT_EQ(0, total_video_delay_ms);
+}
+
+TEST_F(StreamSynchronizationTest, VideoDelay) {
+ uint32_t current_audio_delay_ms = 0;
+ int delay_ms = 200;
+ int extra_audio_delay_ms = 0;
+ int total_video_delay_ms = 0;
+
+ EXPECT_TRUE(DelayedStreams(delay_ms, 0, current_audio_delay_ms,
+ &extra_audio_delay_ms, &total_video_delay_ms));
+ EXPECT_EQ(0, extra_audio_delay_ms);
+ // The video delay is not allowed to change more than this in 1 second.
+ EXPECT_EQ(delay_ms / kSmoothingFilter, total_video_delay_ms);
+
+ send_time_->IncreaseTimeMs(1000);
+ receive_time_->IncreaseTimeMs(800);
+ // Simulate 0 minimum delay in the VCM.
+ total_video_delay_ms = 0;
+ EXPECT_TRUE(DelayedStreams(delay_ms, 0, current_audio_delay_ms,
+ &extra_audio_delay_ms, &total_video_delay_ms));
+ EXPECT_EQ(0, extra_audio_delay_ms);
+ // The video delay is not allowed to change more than this in 1 second.
+ EXPECT_EQ(2 * delay_ms / kSmoothingFilter, total_video_delay_ms);
+
+ send_time_->IncreaseTimeMs(1000);
+ receive_time_->IncreaseTimeMs(800);
+ // Simulate 0 minimum delay in the VCM.
+ total_video_delay_ms = 0;
+ EXPECT_TRUE(DelayedStreams(delay_ms, 0, current_audio_delay_ms,
+ &extra_audio_delay_ms, &total_video_delay_ms));
+ EXPECT_EQ(0, extra_audio_delay_ms);
+ EXPECT_EQ(3 * delay_ms / kSmoothingFilter, total_video_delay_ms);
+}
+
+TEST_F(StreamSynchronizationTest, AudioDelay) {
+ int current_audio_delay_ms = 0;
+ int delay_ms = 200;
+ int extra_audio_delay_ms = 0;
+ int total_video_delay_ms = 0;
+
+ EXPECT_TRUE(DelayedStreams(0, delay_ms, current_audio_delay_ms,
+ &extra_audio_delay_ms, &total_video_delay_ms));
+ EXPECT_EQ(0, total_video_delay_ms);
+ // The audio delay is not allowed to change more than this in 1 second.
+ EXPECT_EQ(delay_ms / kSmoothingFilter, extra_audio_delay_ms);
+ current_audio_delay_ms = extra_audio_delay_ms;
+ int current_extra_delay_ms = extra_audio_delay_ms;
+
+ send_time_->IncreaseTimeMs(1000);
+ receive_time_->IncreaseTimeMs(800);
+ EXPECT_TRUE(DelayedStreams(0, delay_ms, current_audio_delay_ms,
+ &extra_audio_delay_ms, &total_video_delay_ms));
+ EXPECT_EQ(0, total_video_delay_ms);
+ // The audio delay is not allowed to change more than the half of the required
+ // change in delay.
+ EXPECT_EQ(current_extra_delay_ms +
+ MaxAudioDelayIncrease(current_audio_delay_ms, delay_ms),
+ extra_audio_delay_ms);
+ current_audio_delay_ms = extra_audio_delay_ms;
+ current_extra_delay_ms = extra_audio_delay_ms;
+
+ send_time_->IncreaseTimeMs(1000);
+ receive_time_->IncreaseTimeMs(800);
+ EXPECT_TRUE(DelayedStreams(0, delay_ms, current_audio_delay_ms,
+ &extra_audio_delay_ms, &total_video_delay_ms));
+ EXPECT_EQ(0, total_video_delay_ms);
+ // The audio delay is not allowed to change more than the half of the required
+ // change in delay.
+ EXPECT_EQ(current_extra_delay_ms +
+ MaxAudioDelayIncrease(current_audio_delay_ms, delay_ms),
+ extra_audio_delay_ms);
+ current_extra_delay_ms = extra_audio_delay_ms;
+
+ // Simulate that NetEQ for some reason reduced the delay.
+ current_audio_delay_ms = 10;
+ send_time_->IncreaseTimeMs(1000);
+ receive_time_->IncreaseTimeMs(800);
+ EXPECT_TRUE(DelayedStreams(0, delay_ms, current_audio_delay_ms,
+ &extra_audio_delay_ms, &total_video_delay_ms));
+ EXPECT_EQ(0, total_video_delay_ms);
+ // Since we only can ask NetEQ for a certain amount of extra delay, and
+ // we only measure the total NetEQ delay, we will ask for additional delay
+ // here to try to
+ EXPECT_EQ(current_extra_delay_ms +
+ MaxAudioDelayIncrease(current_audio_delay_ms, delay_ms),
+ extra_audio_delay_ms);
+ current_extra_delay_ms = extra_audio_delay_ms;
+
+ // Simulate that NetEQ for some reason significantly increased the delay.
+ current_audio_delay_ms = 350;
+ send_time_->IncreaseTimeMs(1000);
+ receive_time_->IncreaseTimeMs(800);
+ EXPECT_TRUE(DelayedStreams(0, delay_ms, current_audio_delay_ms,
+ &extra_audio_delay_ms, &total_video_delay_ms));
+ EXPECT_EQ(0, total_video_delay_ms);
+ // The audio delay is not allowed to change more than the half of the required
+ // change in delay.
+ EXPECT_EQ(current_extra_delay_ms +
+ MaxAudioDelayDecrease(current_audio_delay_ms, delay_ms),
+ extra_audio_delay_ms);
+}
+
+TEST_F(StreamSynchronizationTest, BothDelayedVideoLater) {
+ BothDelayedVideoLaterTest(0);
+}
+
+TEST_F(StreamSynchronizationTest, BothDelayedVideoLaterAudioClockDrift) {
+ audio_clock_drift_ = 1.05;
+ BothDelayedVideoLaterTest(0);
+}
+
+TEST_F(StreamSynchronizationTest, BothDelayedVideoLaterVideoClockDrift) {
+ video_clock_drift_ = 1.05;
+ BothDelayedVideoLaterTest(0);
+}
+
+TEST_F(StreamSynchronizationTest, BothDelayedAudioLater) {
+ BothDelayedAudioLaterTest(0);
+}
+
+TEST_F(StreamSynchronizationTest, BothDelayedAudioClockDrift) {
+ audio_clock_drift_ = 1.05;
+ BothDelayedAudioLaterTest(0);
+}
+
+TEST_F(StreamSynchronizationTest, BothDelayedVideoClockDrift) {
+ video_clock_drift_ = 1.05;
+ BothDelayedAudioLaterTest(0);
+}
+
+TEST_F(StreamSynchronizationTest, BaseDelay) {
+ int base_target_delay_ms = 2000;
+ int current_audio_delay_ms = 2000;
+ int extra_audio_delay_ms = 0;
+ int total_video_delay_ms = base_target_delay_ms;
+ sync_->SetTargetBufferingDelay(base_target_delay_ms);
+ // We are in sync don't change.
+ EXPECT_FALSE(DelayedStreams(base_target_delay_ms, base_target_delay_ms,
+ current_audio_delay_ms,
+ &extra_audio_delay_ms, &total_video_delay_ms));
+ // Triggering another call with the same values. Delay should not be modified.
+ base_target_delay_ms = 2000;
+ current_audio_delay_ms = base_target_delay_ms;
+ total_video_delay_ms = base_target_delay_ms;
+ sync_->SetTargetBufferingDelay(base_target_delay_ms);
+ // We are in sync don't change.
+ EXPECT_FALSE(DelayedStreams(base_target_delay_ms, base_target_delay_ms,
+ current_audio_delay_ms,
+ &extra_audio_delay_ms, &total_video_delay_ms));
+ // Changing delay value - intended to test this module only. In practice it
+ // would take VoE time to adapt.
+ base_target_delay_ms = 5000;
+ current_audio_delay_ms = base_target_delay_ms;
+ total_video_delay_ms = base_target_delay_ms;
+ sync_->SetTargetBufferingDelay(base_target_delay_ms);
+ // We are in sync don't change.
+ EXPECT_FALSE(DelayedStreams(base_target_delay_ms, base_target_delay_ms,
+ current_audio_delay_ms,
+ &extra_audio_delay_ms, &total_video_delay_ms));
+}
+
+TEST_F(StreamSynchronizationTest, BothDelayedAudioLaterWithBaseDelay) {
+ int base_target_delay_ms = 3000;
+ sync_->SetTargetBufferingDelay(base_target_delay_ms);
+ BothDelayedAudioLaterTest(base_target_delay_ms);
+}
+
+TEST_F(StreamSynchronizationTest, BothDelayedAudioClockDriftWithBaseDelay) {
+ int base_target_delay_ms = 3000;
+ sync_->SetTargetBufferingDelay(base_target_delay_ms);
+ audio_clock_drift_ = 1.05;
+ BothDelayedAudioLaterTest(base_target_delay_ms);
+}
+
+TEST_F(StreamSynchronizationTest, BothDelayedVideoClockDriftWithBaseDelay) {
+ int base_target_delay_ms = 3000;
+ sync_->SetTargetBufferingDelay(base_target_delay_ms);
+ video_clock_drift_ = 1.05;
+ BothDelayedAudioLaterTest(base_target_delay_ms);
+}
+
+TEST_F(StreamSynchronizationTest, BothDelayedVideoLaterWithBaseDelay) {
+ int base_target_delay_ms = 2000;
+ sync_->SetTargetBufferingDelay(base_target_delay_ms);
+ BothDelayedVideoLaterTest(base_target_delay_ms);
+}
+
+TEST_F(StreamSynchronizationTest,
+ BothDelayedVideoLaterAudioClockDriftWithBaseDelay) {
+ int base_target_delay_ms = 2000;
+ audio_clock_drift_ = 1.05;
+ sync_->SetTargetBufferingDelay(base_target_delay_ms);
+ BothDelayedVideoLaterTest(base_target_delay_ms);
+}
+
+TEST_F(StreamSynchronizationTest,
+ BothDelayedVideoLaterVideoClockDriftWithBaseDelay) {
+ int base_target_delay_ms = 2000;
+ video_clock_drift_ = 1.05;
+ sync_->SetTargetBufferingDelay(base_target_delay_ms);
+ BothDelayedVideoLaterTest(base_target_delay_ms);
+}
+
+} // namespace webrtc
diff --git a/webrtc/video/video_capture_input.cc b/webrtc/video/video_capture_input.cc
index 42bc65f05f..1c5f299291 100644
--- a/webrtc/video/video_capture_input.cc
+++ b/webrtc/video/video_capture_input.cc
@@ -13,18 +13,17 @@
#include "webrtc/base/checks.h"
#include "webrtc/base/logging.h"
#include "webrtc/base/trace_event.h"
-#include "webrtc/modules/interface/module_common_types.h"
-#include "webrtc/modules/utility/interface/process_thread.h"
-#include "webrtc/modules/video_capture/include/video_capture_factory.h"
-#include "webrtc/modules/video_processing/main/interface/video_processing.h"
-#include "webrtc/modules/video_render/include/video_render_defines.h"
+#include "webrtc/modules/include/module_common_types.h"
+#include "webrtc/modules/utility/include/process_thread.h"
+#include "webrtc/modules/video_capture/video_capture_factory.h"
+#include "webrtc/modules/video_processing/include/video_processing.h"
+#include "webrtc/modules/video_render/video_render_defines.h"
#include "webrtc/system_wrappers/include/clock.h"
#include "webrtc/system_wrappers/include/critical_section_wrapper.h"
-#include "webrtc/system_wrappers/include/event_wrapper.h"
#include "webrtc/system_wrappers/include/tick_util.h"
+#include "webrtc/video/overuse_frame_detector.h"
#include "webrtc/video/send_statistics_proxy.h"
-#include "webrtc/video_engine/overuse_frame_detector.h"
-#include "webrtc/video_engine/vie_encoder.h"
+#include "webrtc/video/vie_encoder.h"
namespace webrtc {
@@ -42,10 +41,8 @@ VideoCaptureInput::VideoCaptureInput(
local_renderer_(local_renderer),
stats_proxy_(stats_proxy),
incoming_frame_cs_(CriticalSectionWrapper::CreateCriticalSection()),
- encoder_thread_(ThreadWrapper::CreateThread(EncoderThreadFunction,
- this,
- "EncoderThread")),
- capture_event_(EventWrapper::Create()),
+ encoder_thread_(EncoderThreadFunction, this, "EncoderThread"),
+ capture_event_(false, false),
stop_(0),
last_captured_timestamp_(0),
delta_ntp_internal_ms_(
@@ -56,8 +53,8 @@ VideoCaptureInput::VideoCaptureInput(
overuse_observer,
stats_proxy)),
encoding_time_observer_(encoding_time_observer) {
- encoder_thread_->Start();
- encoder_thread_->SetPriority(kHighPriority);
+ encoder_thread_.Start();
+ encoder_thread_.SetPriority(rtc::kHighPriority);
module_process_thread_->RegisterModule(overuse_detector_.get());
}
@@ -66,8 +63,8 @@ VideoCaptureInput::~VideoCaptureInput() {
// Stop the thread.
rtc::AtomicOps::ReleaseStore(&stop_, 1);
- capture_event_->Set();
- encoder_thread_->Stop();
+ capture_event_.Set();
+ encoder_thread_.Stop();
}
void VideoCaptureInput::IncomingCapturedFrame(const VideoFrame& video_frame) {
@@ -118,7 +115,7 @@ void VideoCaptureInput::IncomingCapturedFrame(const VideoFrame& video_frame) {
TRACE_EVENT_ASYNC_BEGIN1("webrtc", "Video", video_frame.render_time_ms(),
"render_time", video_frame.render_time_ms());
- capture_event_->Set();
+ capture_event_.Set();
}
bool VideoCaptureInput::EncoderThreadFunction(void* obj) {
@@ -128,7 +125,7 @@ bool VideoCaptureInput::EncoderThreadFunction(void* obj) {
bool VideoCaptureInput::EncoderProcess() {
static const int kThreadWaitTimeMs = 100;
int64_t capture_time = -1;
- if (capture_event_->Wait(kThreadWaitTimeMs) == kEventSignaled) {
+ if (capture_event_.Wait(kThreadWaitTimeMs)) {
if (rtc::AtomicOps::AcquireLoad(&stop_))
return false;
@@ -150,7 +147,6 @@ bool VideoCaptureInput::EncoderProcess() {
if (encode_start_time != -1) {
int encode_time_ms = static_cast<int>(
Clock::GetRealTimeClock()->TimeInMilliseconds() - encode_start_time);
- overuse_detector_->FrameEncoded(encode_time_ms);
stats_proxy_->OnEncodedFrame(encode_time_ms);
if (encoding_time_observer_) {
encoding_time_observer_->OnReportEncodedTime(
diff --git a/webrtc/video/video_capture_input.h b/webrtc/video/video_capture_input.h
index 5a86ad265f..d44907cd0e 100644
--- a/webrtc/video/video_capture_input.h
+++ b/webrtc/video/video_capture_input.h
@@ -14,18 +14,18 @@
#include <vector>
#include "webrtc/base/criticalsection.h"
+#include "webrtc/base/event.h"
+#include "webrtc/base/platform_thread.h"
#include "webrtc/base/scoped_ptr.h"
#include "webrtc/base/thread_annotations.h"
#include "webrtc/common_types.h"
#include "webrtc/engine_configurations.h"
-#include "webrtc/modules/video_capture/include/video_capture.h"
-#include "webrtc/modules/video_coding/codecs/interface/video_codec_interface.h"
-#include "webrtc/modules/video_coding/main/interface/video_coding.h"
-#include "webrtc/modules/video_processing/main/interface/video_processing.h"
+#include "webrtc/modules/video_capture/video_capture.h"
+#include "webrtc/modules/video_coding/include/video_codec_interface.h"
+#include "webrtc/modules/video_coding/include/video_coding.h"
+#include "webrtc/modules/video_processing/include/video_processing.h"
#include "webrtc/system_wrappers/include/critical_section_wrapper.h"
-#include "webrtc/system_wrappers/include/thread_wrapper.h"
#include "webrtc/typedefs.h"
-#include "webrtc/video_engine/vie_defines.h"
#include "webrtc/video_send_stream.h"
namespace webrtc {
@@ -34,7 +34,6 @@ class Config;
class CpuOveruseMetricsObserver;
class CpuOveruseObserver;
class CriticalSectionWrapper;
-class EventWrapper;
class OveruseFrameDetector;
class ProcessThread;
class RegistrableCpuOveruseMetricsObserver;
@@ -66,8 +65,6 @@ class VideoCaptureInput : public webrtc::VideoCaptureInput {
static bool EncoderThreadFunction(void* obj);
bool EncoderProcess();
- void DeliverI420Frame(VideoFrame* video_frame);
-
rtc::scoped_ptr<CriticalSectionWrapper> capture_cs_;
ProcessThread* const module_process_thread_;
@@ -79,8 +76,8 @@ class VideoCaptureInput : public webrtc::VideoCaptureInput {
rtc::scoped_ptr<CriticalSectionWrapper> incoming_frame_cs_;
VideoFrame incoming_frame_;
- rtc::scoped_ptr<ThreadWrapper> encoder_thread_;
- rtc::scoped_ptr<EventWrapper> capture_event_;
+ rtc::PlatformThread encoder_thread_;
+ rtc::Event capture_event_;
volatile int stop_;
diff --git a/webrtc/video/video_capture_input_unittest.cc b/webrtc/video/video_capture_input_unittest.cc
index e8bc2ad1c9..9d720e2294 100644
--- a/webrtc/video/video_capture_input_unittest.cc
+++ b/webrtc/video/video_capture_input_unittest.cc
@@ -13,11 +13,11 @@
#include "testing/gmock/include/gmock/gmock.h"
#include "testing/gtest/include/gtest/gtest.h"
+#include "webrtc/base/event.h"
#include "webrtc/base/scoped_ptr.h"
#include "webrtc/common.h"
-#include "webrtc/modules/utility/interface/mock/mock_process_thread.h"
+#include "webrtc/modules/utility/include/mock/mock_process_thread.h"
#include "webrtc/system_wrappers/include/critical_section_wrapper.h"
-#include "webrtc/system_wrappers/include/event_wrapper.h"
#include "webrtc/system_wrappers/include/ref_count.h"
#include "webrtc/system_wrappers/include/scoped_vector.h"
#include "webrtc/test/fake_texture_frame.h"
@@ -51,9 +51,10 @@ class VideoCaptureInputTest : public ::testing::Test {
VideoCaptureInputTest()
: mock_process_thread_(new NiceMock<MockProcessThread>),
mock_frame_callback_(new NiceMock<MockVideoCaptureCallback>),
- output_frame_event_(EventWrapper::Create()),
+ output_frame_event_(false, false),
stats_proxy_(Clock::GetRealTimeClock(),
- webrtc::VideoSendStream::Config(nullptr)) {}
+ webrtc::VideoSendStream::Config(nullptr),
+ webrtc::VideoEncoderConfig::ContentType::kRealtimeVideo) {}
virtual void SetUp() {
EXPECT_CALL(*mock_frame_callback_, DeliverFrame(_))
@@ -81,11 +82,11 @@ class VideoCaptureInputTest : public ::testing::Test {
if (frame.native_handle() == NULL)
output_frame_ybuffers_.push_back(frame.buffer(kYPlane));
output_frames_.push_back(new VideoFrame(frame));
- output_frame_event_->Set();
+ output_frame_event_.Set();
}
void WaitOutputFrame() {
- EXPECT_EQ(kEventSignaled, output_frame_event_->Wait(FRAME_TIMEOUT_MS));
+ EXPECT_TRUE(output_frame_event_.Wait(FRAME_TIMEOUT_MS));
}
rtc::scoped_ptr<MockProcessThread> mock_process_thread_;
@@ -98,7 +99,7 @@ class VideoCaptureInputTest : public ::testing::Test {
ScopedVector<VideoFrame> input_frames_;
// Indicate an output frame has arrived.
- rtc::scoped_ptr<EventWrapper> output_frame_event_;
+ rtc::Event output_frame_event_;
// Output delivered frames of VideoCaptureInput.
ScopedVector<VideoFrame> output_frames_;
@@ -111,20 +112,19 @@ class VideoCaptureInputTest : public ::testing::Test {
TEST_F(VideoCaptureInputTest, DoesNotRetainHandleNorCopyBuffer) {
// Indicate an output frame has arrived.
- rtc::scoped_ptr<EventWrapper> frame_destroyed_event(EventWrapper::Create());
+ rtc::Event frame_destroyed_event(false, false);
class TestBuffer : public webrtc::I420Buffer {
public:
- explicit TestBuffer(EventWrapper* event)
- : I420Buffer(5, 5), event_(event) {}
+ explicit TestBuffer(rtc::Event* event) : I420Buffer(5, 5), event_(event) {}
private:
friend class rtc::RefCountedObject<TestBuffer>;
~TestBuffer() override { event_->Set(); }
- EventWrapper* event_;
+ rtc::Event* const event_;
};
VideoFrame frame(
- new rtc::RefCountedObject<TestBuffer>(frame_destroyed_event.get()), 1, 1,
+ new rtc::RefCountedObject<TestBuffer>(&frame_destroyed_event), 1, 1,
kVideoRotation_0);
AddInputFrame(&frame);
@@ -134,7 +134,7 @@ TEST_F(VideoCaptureInputTest, DoesNotRetainHandleNorCopyBuffer) {
frame.video_frame_buffer().get());
output_frames_.clear();
frame.Reset();
- EXPECT_EQ(kEventSignaled, frame_destroyed_event->Wait(FRAME_TIMEOUT_MS));
+ EXPECT_TRUE(frame_destroyed_event.Wait(FRAME_TIMEOUT_MS));
}
TEST_F(VideoCaptureInputTest, TestNtpTimeStampSetIfRenderTimeSet) {
@@ -171,12 +171,12 @@ TEST_F(VideoCaptureInputTest, DropsFramesWithSameOrOldNtpTimestamp) {
// Repeat frame with the same NTP timestamp should drop.
AddInputFrame(input_frames_[0]);
- EXPECT_EQ(kEventTimeout, output_frame_event_->Wait(FRAME_TIMEOUT_MS));
+ EXPECT_FALSE(output_frame_event_.Wait(FRAME_TIMEOUT_MS));
// As should frames with a decreased NTP timestamp.
input_frames_[0]->set_ntp_time_ms(input_frames_[0]->ntp_time_ms() - 1);
AddInputFrame(input_frames_[0]);
- EXPECT_EQ(kEventTimeout, output_frame_event_->Wait(FRAME_TIMEOUT_MS));
+ EXPECT_FALSE(output_frame_event_.Wait(FRAME_TIMEOUT_MS));
// But delivering with an increased NTP timestamp should succeed.
input_frames_[0]->set_ntp_time_ms(4711);
@@ -191,7 +191,7 @@ TEST_F(VideoCaptureInputTest, TestTextureFrames) {
for (int i = 0 ; i < kNumFrame; ++i) {
test::FakeNativeHandle* dummy_handle = new test::FakeNativeHandle();
// Add one to |i| so that width/height > 0.
- input_frames_.push_back(new VideoFrame(test::CreateFakeNativeHandleFrame(
+ input_frames_.push_back(new VideoFrame(test::FakeNativeHandle::CreateFrame(
dummy_handle, i + 1, i + 1, i + 1, i + 1, webrtc::kVideoRotation_0)));
AddInputFrame(input_frames_[i]);
WaitOutputFrame();
@@ -220,7 +220,7 @@ TEST_F(VideoCaptureInputTest, TestI420Frames) {
TEST_F(VideoCaptureInputTest, TestI420FrameAfterTextureFrame) {
test::FakeNativeHandle* dummy_handle = new test::FakeNativeHandle();
- input_frames_.push_back(new VideoFrame(test::CreateFakeNativeHandleFrame(
+ input_frames_.push_back(new VideoFrame(test::FakeNativeHandle::CreateFrame(
dummy_handle, 1, 1, 1, 1, webrtc::kVideoRotation_0)));
AddInputFrame(input_frames_[0]);
WaitOutputFrame();
@@ -239,7 +239,7 @@ TEST_F(VideoCaptureInputTest, TestTextureFrameAfterI420Frame) {
WaitOutputFrame();
test::FakeNativeHandle* dummy_handle = new test::FakeNativeHandle();
- input_frames_.push_back(new VideoFrame(test::CreateFakeNativeHandleFrame(
+ input_frames_.push_back(new VideoFrame(test::FakeNativeHandle::CreateFrame(
dummy_handle, 1, 1, 2, 2, webrtc::kVideoRotation_0)));
AddInputFrame(input_frames_[1]);
WaitOutputFrame();
diff --git a/webrtc/video/video_decoder.cc b/webrtc/video/video_decoder.cc
index fa1f2ee878..d699175274 100644
--- a/webrtc/video/video_decoder.cc
+++ b/webrtc/video/video_decoder.cc
@@ -76,6 +76,9 @@ bool VideoDecoderSoftwareFallbackWrapper::InitFallbackDecoder() {
}
if (callback_ != nullptr)
fallback_decoder_->RegisterDecodeCompleteCallback(callback_);
+ fallback_implementation_name_ =
+ std::string(fallback_decoder_->ImplementationName()) +
+ " (fallback from: " + decoder_->ImplementationName() + ")";
return true;
}
@@ -131,4 +134,16 @@ int32_t VideoDecoderSoftwareFallbackWrapper::Reset() {
return decoder_->Reset();
}
+bool VideoDecoderSoftwareFallbackWrapper::PrefersLateDecoding() const {
+ if (fallback_decoder_)
+ return fallback_decoder_->PrefersLateDecoding();
+ return decoder_->PrefersLateDecoding();
+}
+
+const char* VideoDecoderSoftwareFallbackWrapper::ImplementationName() const {
+ if (fallback_decoder_)
+ return fallback_implementation_name_.c_str();
+ return decoder_->ImplementationName();
+}
+
} // namespace webrtc
diff --git a/webrtc/video/video_decoder_unittest.cc b/webrtc/video/video_decoder_unittest.cc
index be09b191ac..4d54a3e53f 100644
--- a/webrtc/video/video_decoder_unittest.cc
+++ b/webrtc/video/video_decoder_unittest.cc
@@ -11,7 +11,8 @@
#include "webrtc/video_decoder.h"
#include "testing/gtest/include/gtest/gtest.h"
-#include "webrtc/modules/video_coding/codecs/interface/video_error_codes.h"
+#include "webrtc/base/checks.h"
+#include "webrtc/modules/video_coding/include/video_error_codes.h"
namespace webrtc {
@@ -52,6 +53,11 @@ class VideoDecoderSoftwareFallbackWrapperTest : public ::testing::Test {
++reset_count_;
return WEBRTC_VIDEO_CODEC_OK;
}
+
+ const char* ImplementationName() const override {
+ return "fake-decoder";
+ }
+
int init_decode_count_ = 0;
int decode_count_ = 0;
int32_t decode_return_code_ = WEBRTC_VIDEO_CODEC_OK;
@@ -143,11 +149,16 @@ TEST_F(VideoDecoderSoftwareFallbackWrapperTest, ForwardsResetCall) {
}
// TODO(pbos): Fake a VP8 frame well enough to actually receive a callback from
-// the software encoder.
+// the software decoder.
TEST_F(VideoDecoderSoftwareFallbackWrapperTest,
ForwardsRegisterDecodeCompleteCallback) {
class FakeDecodedImageCallback : public DecodedImageCallback {
int32_t Decoded(VideoFrame& decodedImage) override { return 0; }
+ int32_t Decoded(
+ webrtc::VideoFrame& decodedImage, int64_t decode_time_ms) override {
+ RTC_NOTREACHED();
+ return -1;
+ }
} callback, callback2;
VideoCodec codec = {};
@@ -162,4 +173,19 @@ TEST_F(VideoDecoderSoftwareFallbackWrapperTest,
EXPECT_EQ(&callback2, fake_decoder_.decode_complete_callback_);
}
+TEST_F(VideoDecoderSoftwareFallbackWrapperTest,
+ ReportsFallbackImplementationName) {
+ VideoCodec codec = {};
+ fallback_wrapper_.InitDecode(&codec, 2);
+
+ fake_decoder_.decode_return_code_ = WEBRTC_VIDEO_CODEC_FALLBACK_SOFTWARE;
+ EncodedImage encoded_image;
+ fallback_wrapper_.Decode(encoded_image, false, nullptr, nullptr, -1);
+ // Hard coded expected value since libvpx is the software implementation name
+ // for VP8. Change accordingly if the underlying implementation does.
+ EXPECT_STREQ("libvpx (fallback from: fake-decoder)",
+ fallback_wrapper_.ImplementationName());
+ fallback_wrapper_.Release();
+}
+
} // namespace webrtc
diff --git a/webrtc/video/video_encoder.cc b/webrtc/video/video_encoder.cc
index 6410e395fc..e85e3d97a7 100644
--- a/webrtc/video/video_encoder.cc
+++ b/webrtc/video/video_encoder.cc
@@ -76,6 +76,9 @@ bool VideoEncoderSoftwareFallbackWrapper::InitFallbackEncoder() {
if (channel_parameters_set_)
fallback_encoder_->SetChannelParameters(packet_loss_, rtt_);
+ fallback_implementation_name_ =
+ std::string(fallback_encoder_->ImplementationName()) +
+ " (fallback from: " + encoder_->ImplementationName() + ")";
// Since we're switching to the fallback encoder, Release the real encoder. It
// may be re-initialized via InitEncode later, and it will continue to get
// Set calls for rates and channel parameters in the meantime.
@@ -182,6 +185,12 @@ bool VideoEncoderSoftwareFallbackWrapper::SupportsNativeHandle() const {
return encoder_->SupportsNativeHandle();
}
+const char* VideoEncoderSoftwareFallbackWrapper::ImplementationName() const {
+ if (fallback_encoder_)
+ return fallback_implementation_name_.c_str();
+ return encoder_->ImplementationName();
+}
+
int VideoEncoderSoftwareFallbackWrapper::GetTargetFramerate() {
if (fallback_encoder_)
return fallback_encoder_->GetTargetFramerate();
diff --git a/webrtc/video/video_encoder_unittest.cc b/webrtc/video/video_encoder_unittest.cc
index 3382be83be..0f28f89163 100644
--- a/webrtc/video/video_encoder_unittest.cc
+++ b/webrtc/video/video_encoder_unittest.cc
@@ -11,7 +11,7 @@
#include "webrtc/video_encoder.h"
#include "testing/gtest/include/gtest/gtest.h"
-#include "webrtc/modules/video_coding/codecs/interface/video_error_codes.h"
+#include "webrtc/modules/video_coding/include/video_error_codes.h"
namespace webrtc {
@@ -67,6 +67,10 @@ class VideoEncoderSoftwareFallbackWrapperTest : public ::testing::Test {
return false;
}
+ const char* ImplementationName() const override {
+ return "fake-encoder";
+ }
+
int init_encode_count_ = 0;
int32_t init_encode_return_code_ = WEBRTC_VIDEO_CODEC_OK;
int32_t encode_return_code_ = WEBRTC_VIDEO_CODEC_OK;
@@ -259,4 +263,13 @@ TEST_F(VideoEncoderSoftwareFallbackWrapperTest,
EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK, fallback_wrapper_.Release());
}
+TEST_F(VideoEncoderSoftwareFallbackWrapperTest,
+ ReportsFallbackImplementationName) {
+ UtilizeFallbackEncoder();
+ // Hard coded expected value since libvpx is the software implementation name
+ // for VP8. Change accordingly if the underlying implementation does.
+ EXPECT_STREQ("libvpx (fallback from: fake-encoder)",
+ fallback_wrapper_.ImplementationName());
+}
+
} // namespace webrtc
diff --git a/webrtc/video/video_loopback.cc b/webrtc/video/video_loopback.cc
index 0c06f85fcc..2338a84a43 100644
--- a/webrtc/video/video_loopback.cc
+++ b/webrtc/video/video_loopback.cc
@@ -20,6 +20,7 @@
namespace webrtc {
namespace flags {
+// Flags common with screenshare loopback, with different default values.
DEFINE_int32(width, 640, "Video width.");
size_t Width() {
return static_cast<size_t>(FLAGS_width);
@@ -55,11 +56,46 @@ int MaxBitrateKbps() {
return static_cast<int>(FLAGS_max_bitrate);
}
+DEFINE_int32(num_temporal_layers,
+ 1,
+ "Number of temporal layers. Set to 1-4 to override.");
+int NumTemporalLayers() {
+ return static_cast<int>(FLAGS_num_temporal_layers);
+}
+
+// Flags common with screenshare loopback, with equal default values.
DEFINE_string(codec, "VP8", "Video codec to use.");
std::string Codec() {
return static_cast<std::string>(FLAGS_codec);
}
+DEFINE_int32(selected_tl,
+ -1,
+ "Temporal layer to show or analyze. -1 to disable filtering.");
+int SelectedTL() {
+ return static_cast<int>(FLAGS_selected_tl);
+}
+
+DEFINE_int32(
+ duration,
+ 0,
+ "Duration of the test in seconds. If 0, rendered will be shown instead.");
+int DurationSecs() {
+ return static_cast<int>(FLAGS_duration);
+}
+
+DEFINE_string(output_filename, "", "Target graph data filename.");
+std::string OutputFilename() {
+ return static_cast<std::string>(FLAGS_output_filename);
+}
+
+DEFINE_string(graph_title,
+ "",
+ "If empty, title will be generated automatically.");
+std::string GraphTitle() {
+ return static_cast<std::string>(FLAGS_graph_title);
+}
+
DEFINE_int32(loss_percent, 0, "Percentage of packets randomly lost.");
int LossPercent() {
return static_cast<int>(FLAGS_loss_percent);
@@ -91,8 +127,55 @@ int StdPropagationDelayMs() {
return static_cast<int>(FLAGS_std_propagation_delay_ms);
}
+DEFINE_int32(selected_stream, 0, "ID of the stream to show or analyze.");
+int SelectedStream() {
+ return static_cast<int>(FLAGS_selected_stream);
+}
+
+DEFINE_int32(num_spatial_layers, 1, "Number of spatial layers to use.");
+int NumSpatialLayers() {
+ return static_cast<int>(FLAGS_num_spatial_layers);
+}
+
+DEFINE_int32(selected_sl,
+ -1,
+ "Spatial layer to show or analyze. -1 to disable filtering.");
+int SelectedSL() {
+ return static_cast<int>(FLAGS_selected_sl);
+}
+
+DEFINE_string(stream0,
+ "",
+ "Comma separated values describing VideoStream for stream #0.");
+std::string Stream0() {
+ return static_cast<std::string>(FLAGS_stream0);
+}
+
+DEFINE_string(stream1,
+ "",
+ "Comma separated values describing VideoStream for stream #1.");
+std::string Stream1() {
+ return static_cast<std::string>(FLAGS_stream1);
+}
+
+DEFINE_string(sl0,
+ "",
+ "Comma separated values describing SpatialLayer for layer #0.");
+std::string SL0() {
+ return static_cast<std::string>(FLAGS_sl0);
+}
+
+DEFINE_string(sl1,
+ "",
+ "Comma separated values describing SpatialLayer for layer #1.");
+std::string SL1() {
+ return static_cast<std::string>(FLAGS_sl1);
+}
+
DEFINE_bool(logs, false, "print logs to stderr");
+DEFINE_bool(send_side_bwe, true, "Use send-side bandwidth estimation");
+
DEFINE_string(
force_fieldtrials,
"",
@@ -101,21 +184,7 @@ DEFINE_string(
" will assign the group Enable to field trial WebRTC-FooFeature. Multiple "
"trials are separated by \"/\"");
-DEFINE_int32(num_temporal_layers,
- 1,
- "Number of temporal layers. Set to 1-4 to override.");
-size_t NumTemporalLayers() {
- return static_cast<size_t>(FLAGS_num_temporal_layers);
-}
-
-DEFINE_int32(
- tl_discard_threshold,
- 0,
- "Discard TLs with id greater or equal the threshold. 0 to disable.");
-size_t TLDiscardThreshold() {
- return static_cast<size_t>(FLAGS_tl_discard_threshold);
-}
-
+// Video-specific flags.
DEFINE_string(clip,
"",
"Name of the clip to show. If empty, using chroma generator.");
@@ -123,21 +192,6 @@ std::string Clip() {
return static_cast<std::string>(FLAGS_clip);
}
-DEFINE_string(
- output_filename,
- "",
- "Name of a target graph data file. If set, no preview will be shown.");
-std::string OutputFilename() {
- return static_cast<std::string>(FLAGS_output_filename);
-}
-
-DEFINE_int32(duration, 60, "Duration of the test in seconds.");
-int DurationSecs() {
- return static_cast<int>(FLAGS_duration);
-}
-
-DEFINE_bool(send_side_bwe, true, "Use send-side bandwidth estimation");
-
} // namespace flags
void Loopback() {
@@ -153,27 +207,36 @@ void Loopback() {
call_bitrate_config.start_bitrate_bps = flags::StartBitrateKbps() * 1000;
call_bitrate_config.max_bitrate_bps = flags::MaxBitrateKbps() * 1000;
- std::string clip = flags::Clip();
- std::string graph_title = clip.empty() ? "" : "video " + clip;
VideoQualityTest::Params params{
{flags::Width(), flags::Height(), flags::Fps(),
flags::MinBitrateKbps() * 1000, flags::TargetBitrateKbps() * 1000,
flags::MaxBitrateKbps() * 1000, flags::Codec(),
- flags::NumTemporalLayers(),
+ flags::NumTemporalLayers(), flags::SelectedTL(),
0, // No min transmit bitrate.
- call_bitrate_config, flags::TLDiscardThreshold(),
- flags::FLAGS_send_side_bwe},
- {clip},
+ call_bitrate_config, flags::FLAGS_send_side_bwe},
+ {flags::Clip()},
{}, // Screenshare specific.
- {graph_title, 0.0, 0.0, flags::DurationSecs(), flags::OutputFilename()},
+ {"video", 0.0, 0.0, flags::DurationSecs(), flags::OutputFilename(),
+ flags::GraphTitle()},
pipe_config,
flags::FLAGS_logs};
+ std::vector<std::string> stream_descriptors;
+ stream_descriptors.push_back(flags::Stream0());
+ stream_descriptors.push_back(flags::Stream1());
+ std::vector<std::string> SL_descriptors;
+ SL_descriptors.push_back(flags::SL0());
+ SL_descriptors.push_back(flags::SL1());
+ VideoQualityTest::FillScalabilitySettings(
+ &params, stream_descriptors, flags::SelectedStream(),
+ flags::NumSpatialLayers(), flags::SelectedSL(), SL_descriptors);
+
VideoQualityTest test;
- if (flags::OutputFilename().empty())
- test.RunWithVideoRenderer(params);
- else
+ if (flags::DurationSecs()) {
test.RunWithAnalyzer(params);
+ } else {
+ test.RunWithVideoRenderer(params);
+ }
}
} // namespace webrtc
diff --git a/webrtc/video/video_quality_test.cc b/webrtc/video/video_quality_test.cc
index 0f45fa6632..08ae0a9cee 100644
--- a/webrtc/video/video_quality_test.cc
+++ b/webrtc/video/video_quality_test.cc
@@ -12,16 +12,20 @@
#include <algorithm>
#include <deque>
#include <map>
+#include <sstream>
+#include <string>
#include <vector>
#include "testing/gtest/include/gtest/gtest.h"
#include "webrtc/base/checks.h"
+#include "webrtc/base/event.h"
#include "webrtc/base/format_macros.h"
#include "webrtc/base/scoped_ptr.h"
#include "webrtc/call.h"
#include "webrtc/common_video/libyuv/include/webrtc_libyuv.h"
-#include "webrtc/modules/rtp_rtcp/interface/rtp_header_parser.h"
+#include "webrtc/modules/rtp_rtcp/include/rtp_header_parser.h"
+#include "webrtc/modules/rtp_rtcp/source/rtp_utility.h"
#include "webrtc/system_wrappers/include/cpu_info.h"
#include "webrtc/test/layer_filtering_transport.h"
#include "webrtc/test/run_loop.h"
@@ -43,18 +47,22 @@ class VideoAnalyzer : public PacketReceiver,
public EncodedFrameObserver,
public EncodingTimeObserver {
public:
- VideoAnalyzer(Transport* transport,
+ VideoAnalyzer(test::LayerFilteringTransport* transport,
const std::string& test_label,
double avg_psnr_threshold,
double avg_ssim_threshold,
int duration_frames,
- FILE* graph_data_output_file)
+ FILE* graph_data_output_file,
+ const std::string& graph_title,
+ uint32_t ssrc_to_analyze)
: input_(nullptr),
transport_(transport),
receiver_(nullptr),
send_stream_(nullptr),
test_label_(test_label),
graph_data_output_file_(graph_data_output_file),
+ graph_title_(graph_title),
+ ssrc_to_analyze_(ssrc_to_analyze),
frames_to_process_(duration_frames),
frames_recorded_(0),
frames_processed_(0),
@@ -63,8 +71,9 @@ class VideoAnalyzer : public PacketReceiver,
rtp_timestamp_delta_(0),
avg_psnr_threshold_(avg_psnr_threshold),
avg_ssim_threshold_(avg_ssim_threshold),
- comparison_available_event_(EventWrapper::Create()),
- done_(EventWrapper::Create()) {
+ stats_polling_thread_(&PollStatsThread, this, "StatsPoller"),
+ comparison_available_event_(false, false),
+ done_(false, false) {
// Create thread pool for CPU-expensive PSNR/SSIM calculations.
// Try to use about as many threads as cores, but leave kMinCoresLeft alone,
@@ -85,20 +94,16 @@ class VideoAnalyzer : public PacketReceiver,
}
for (uint32_t i = 0; i < num_cores; ++i) {
- rtc::scoped_ptr<ThreadWrapper> thread =
- ThreadWrapper::CreateThread(&FrameComparisonThread, this, "Analyzer");
- EXPECT_TRUE(thread->Start());
- comparison_thread_pool_.push_back(thread.release());
+ rtc::PlatformThread* thread =
+ new rtc::PlatformThread(&FrameComparisonThread, this, "Analyzer");
+ thread->Start();
+ comparison_thread_pool_.push_back(thread);
}
-
- stats_polling_thread_ =
- ThreadWrapper::CreateThread(&PollStatsThread, this, "StatsPoller");
- EXPECT_TRUE(stats_polling_thread_->Start());
}
~VideoAnalyzer() {
- for (ThreadWrapper* thread : comparison_thread_pool_) {
- EXPECT_TRUE(thread->Stop());
+ for (rtc::PlatformThread* thread : comparison_thread_pool_) {
+ thread->Stop();
delete thread;
}
}
@@ -109,9 +114,9 @@ class VideoAnalyzer : public PacketReceiver,
const uint8_t* packet,
size_t length,
const PacketTime& packet_time) override {
- rtc::scoped_ptr<RtpHeaderParser> parser(RtpHeaderParser::Create());
+ RtpUtility::RtpHeaderParser parser(packet, length);
RTPHeader header;
- parser->Parse(packet, length, &header);
+ parser.Parse(&header);
{
rtc::CritScope lock(&crit_);
recv_times_[header.timestamp - rtp_timestamp_delta_] =
@@ -145,10 +150,13 @@ class VideoAnalyzer : public PacketReceiver,
bool SendRtp(const uint8_t* packet,
size_t length,
const PacketOptions& options) override {
- rtc::scoped_ptr<RtpHeaderParser> parser(RtpHeaderParser::Create());
+ RtpUtility::RtpHeaderParser parser(packet, length);
RTPHeader header;
- parser->Parse(packet, length, &header);
+ parser.Parse(&header);
+ int64_t current_time =
+ Clock::GetRealTimeClock()->CurrentNtpInMilliseconds();
+ bool result = transport_->SendRtp(packet, length, options);
{
rtc::CritScope lock(&crit_);
if (rtp_timestamp_delta_ == 0) {
@@ -156,13 +164,14 @@ class VideoAnalyzer : public PacketReceiver,
first_send_frame_.Reset();
}
uint32_t timestamp = header.timestamp - rtp_timestamp_delta_;
- send_times_[timestamp] =
- Clock::GetRealTimeClock()->CurrentNtpInMilliseconds();
- encoded_frame_sizes_[timestamp] +=
- length - (header.headerLength + header.paddingLength);
+ send_times_[timestamp] = current_time;
+ if (!transport_->DiscardedLastPacket() &&
+ header.ssrc == ssrc_to_analyze_) {
+ encoded_frame_sizes_[timestamp] +=
+ length - (header.headerLength + header.paddingLength);
+ }
}
-
- return transport_->SendRtp(packet, length, options);
+ return result;
}
bool SendRtcp(const uint8_t* packet, size_t length) override {
@@ -192,6 +201,11 @@ class VideoAnalyzer : public PacketReceiver,
VideoFrame reference_frame = frames_.front();
frames_.pop_front();
assert(!reference_frame.IsZeroSize());
+ if (send_timestamp == reference_frame.timestamp() - 1) {
+ // TODO(ivica): Make this work for > 2 streams.
+ // Look at rtp_sender.c:RTPSender::BuildRTPHeader.
+ ++send_timestamp;
+ }
EXPECT_EQ(reference_frame.timestamp(), send_timestamp);
assert(reference_frame.timestamp() == send_timestamp);
@@ -207,11 +221,11 @@ class VideoAnalyzer : public PacketReceiver,
// at time-out check if frames_processed is going up. If so, give it more
// time, otherwise fail. Hopefully this will reduce test flakiness.
+ stats_polling_thread_.Start();
+
int last_frames_processed = -1;
- EventTypeWrapper eventType;
int iteration = 0;
- while ((eventType = done_->Wait(VideoQualityTest::kDefaultTimeoutMs)) !=
- kEventSignaled) {
+ while (!done_.Wait(VideoQualityTest::kDefaultTimeoutMs)) {
int frames_processed;
{
rtc::CritScope crit(&comparison_lock_);
@@ -240,12 +254,12 @@ class VideoAnalyzer : public PacketReceiver,
// Signal stats polling thread if that is still waiting and stop it now,
// since it uses the send_stream_ reference that might be reclaimed after
// returning from this method.
- done_->Set();
- EXPECT_TRUE(stats_polling_thread_->Stop());
+ done_.Set();
+ stats_polling_thread_.Stop();
}
VideoCaptureInput* input_;
- Transport* const transport_;
+ test::LayerFilteringTransport* const transport_;
PacketReceiver* receiver_;
VideoSendStream* send_stream_;
@@ -320,8 +334,13 @@ class VideoAnalyzer : public PacketReceiver,
int64_t recv_time_ms = recv_times_[reference.timestamp()];
recv_times_.erase(reference.timestamp());
- size_t encoded_size = encoded_frame_sizes_[reference.timestamp()];
- encoded_frame_sizes_.erase(reference.timestamp());
+ // TODO(ivica): Make this work for > 2 streams.
+ auto it = encoded_frame_sizes_.find(reference.timestamp());
+ if (it == encoded_frame_sizes_.end())
+ it = encoded_frame_sizes_.find(reference.timestamp() - 1);
+ size_t encoded_size = it == encoded_frame_sizes_.end() ? 0 : it->second;
+ if (it != encoded_frame_sizes_.end())
+ encoded_frame_sizes_.erase(it);
VideoFrame reference_copy;
VideoFrame render_copy;
@@ -332,7 +351,7 @@ class VideoAnalyzer : public PacketReceiver,
comparisons_.push_back(FrameComparison(reference_copy, render_copy, dropped,
send_time_ms, recv_time_ms,
render_time_ms, encoded_size));
- comparison_available_event_->Set();
+ comparison_available_event_.Set();
}
static bool PollStatsThread(void* obj) {
@@ -340,15 +359,11 @@ class VideoAnalyzer : public PacketReceiver,
}
bool PollStats() {
- switch (done_->Wait(kSendStatsPollingIntervalMs)) {
- case kEventSignaled:
- case kEventError:
- done_->Set(); // Make sure main thread is also signaled.
- return false;
- case kEventTimeout:
- break;
- default:
- RTC_NOTREACHED();
+ if (done_.Wait(kSendStatsPollingIntervalMs)) {
+ // Set event again to make sure main thread is also signaled, then we're
+ // done.
+ done_.Set();
+ return false;
}
VideoSendStream::Stats stats = send_stream_->GetStats();
@@ -377,9 +392,9 @@ class VideoAnalyzer : public PacketReceiver,
if (!PopComparison(&comparison)) {
// Wait until new comparison task is available, or test is done.
// If done, wake up remaining threads waiting.
- comparison_available_event_->Wait(1000);
+ comparison_available_event_.Wait(1000);
if (AllFramesRecorded()) {
- comparison_available_event_->Set();
+ comparison_available_event_.Set();
return false;
}
return true; // Try again.
@@ -391,8 +406,8 @@ class VideoAnalyzer : public PacketReceiver,
PrintResults();
if (graph_data_output_file_)
PrintSamplesToFile();
- done_->Set();
- comparison_available_event_->Set();
+ done_.Set();
+ comparison_available_event_.Set();
return false;
}
@@ -509,7 +524,7 @@ class VideoAnalyzer : public PacketReceiver,
return A.input_time_ms < B.input_time_ms;
});
- fprintf(out, "%s\n", test_label_.c_str());
+ fprintf(out, "%s\n", graph_title_.c_str());
fprintf(out, "%" PRIuS "\n", samples_.size());
fprintf(out,
"dropped "
@@ -547,6 +562,8 @@ class VideoAnalyzer : public PacketReceiver,
const std::string test_label_;
FILE* const graph_data_output_file_;
+ const std::string graph_title_;
+ const uint32_t ssrc_to_analyze_;
std::vector<Sample> samples_ GUARDED_BY(comparison_lock_);
std::map<int64_t, int> samples_encode_time_ms_ GUARDED_BY(comparison_lock_);
test::Statistics sender_time_ GUARDED_BY(comparison_lock_);
@@ -579,104 +596,260 @@ class VideoAnalyzer : public PacketReceiver,
const double avg_ssim_threshold_;
rtc::CriticalSection comparison_lock_;
- std::vector<ThreadWrapper*> comparison_thread_pool_;
- rtc::scoped_ptr<ThreadWrapper> stats_polling_thread_;
- const rtc::scoped_ptr<EventWrapper> comparison_available_event_;
+ std::vector<rtc::PlatformThread*> comparison_thread_pool_;
+ rtc::PlatformThread stats_polling_thread_;
+ rtc::Event comparison_available_event_;
std::deque<FrameComparison> comparisons_ GUARDED_BY(comparison_lock_);
- const rtc::scoped_ptr<EventWrapper> done_;
+ rtc::Event done_;
};
VideoQualityTest::VideoQualityTest() : clock_(Clock::GetRealTimeClock()) {}
-void VideoQualityTest::ValidateParams(const Params& params) {
- RTC_CHECK_GE(params.common.max_bitrate_bps, params.common.target_bitrate_bps);
- RTC_CHECK_GE(params.common.target_bitrate_bps, params.common.min_bitrate_bps);
- RTC_CHECK_LT(params.common.tl_discard_threshold,
- params.common.num_temporal_layers);
+void VideoQualityTest::TestBody() {}
+
+std::string VideoQualityTest::GenerateGraphTitle() const {
+ std::stringstream ss;
+ ss << params_.common.codec;
+ ss << " (" << params_.common.target_bitrate_bps / 1000 << "kbps";
+ ss << ", " << params_.common.fps << " FPS";
+ if (params_.screenshare.scroll_duration)
+ ss << ", " << params_.screenshare.scroll_duration << "s scroll";
+ if (params_.ss.streams.size() > 1)
+ ss << ", Stream #" << params_.ss.selected_stream;
+ if (params_.ss.num_spatial_layers > 1)
+ ss << ", Layer #" << params_.ss.selected_sl;
+ ss << ")";
+ return ss.str();
}
-void VideoQualityTest::TestBody() {}
+void VideoQualityTest::CheckParams() {
+ // Add a default stream in none specified.
+ if (params_.ss.streams.empty())
+ params_.ss.streams.push_back(VideoQualityTest::DefaultVideoStream(params_));
+ if (params_.ss.num_spatial_layers == 0)
+ params_.ss.num_spatial_layers = 1;
+
+ if (params_.pipe.loss_percent != 0 ||
+ params_.pipe.queue_length_packets != 0) {
+ // Since LayerFilteringTransport changes the sequence numbers, we can't
+ // use that feature with pack loss, since the NACK request would end up
+ // retransmitting the wrong packets.
+ RTC_CHECK(params_.ss.selected_sl == -1 ||
+ params_.ss.selected_sl == params_.ss.num_spatial_layers - 1);
+ RTC_CHECK(params_.common.selected_tl == -1 ||
+ params_.common.selected_tl ==
+ params_.common.num_temporal_layers - 1);
+ }
+
+ // TODO(ivica): Should max_bitrate_bps == -1 represent inf max bitrate, as it
+ // does in some parts of the code?
+ RTC_CHECK_GE(params_.common.max_bitrate_bps,
+ params_.common.target_bitrate_bps);
+ RTC_CHECK_GE(params_.common.target_bitrate_bps,
+ params_.common.min_bitrate_bps);
+ RTC_CHECK_LT(params_.common.selected_tl, params_.common.num_temporal_layers);
+ RTC_CHECK_LT(params_.ss.selected_stream, params_.ss.streams.size());
+ for (const VideoStream& stream : params_.ss.streams) {
+ RTC_CHECK_GE(stream.min_bitrate_bps, 0);
+ RTC_CHECK_GE(stream.target_bitrate_bps, stream.min_bitrate_bps);
+ RTC_CHECK_GE(stream.max_bitrate_bps, stream.target_bitrate_bps);
+ RTC_CHECK_EQ(static_cast<int>(stream.temporal_layer_thresholds_bps.size()),
+ params_.common.num_temporal_layers - 1);
+ }
+ // TODO(ivica): Should we check if the sum of all streams/layers is equal to
+ // the total bitrate? We anyway have to update them in the case bitrate
+ // estimator changes the total bitrates.
+ RTC_CHECK_GE(params_.ss.num_spatial_layers, 1);
+ RTC_CHECK_LE(params_.ss.selected_sl, params_.ss.num_spatial_layers);
+ RTC_CHECK(params_.ss.spatial_layers.empty() ||
+ params_.ss.spatial_layers.size() ==
+ static_cast<size_t>(params_.ss.num_spatial_layers));
+ if (params_.common.codec == "VP8") {
+ RTC_CHECK_EQ(params_.ss.num_spatial_layers, 1);
+ } else if (params_.common.codec == "VP9") {
+ RTC_CHECK_EQ(params_.ss.streams.size(), 1u);
+ }
+}
+
+// Static.
+std::vector<int> VideoQualityTest::ParseCSV(const std::string& str) {
+ // Parse comma separated nonnegative integers, where some elements may be
+ // empty. The empty values are replaced with -1.
+ // E.g. "10,-20,,30,40" --> {10, 20, -1, 30,40}
+ // E.g. ",,10,,20," --> {-1, -1, 10, -1, 20, -1}
+ std::vector<int> result;
+ if (str.empty())
+ return result;
+
+ const char* p = str.c_str();
+ int value = -1;
+ int pos;
+ while (*p) {
+ if (*p == ',') {
+ result.push_back(value);
+ value = -1;
+ ++p;
+ continue;
+ }
+ RTC_CHECK_EQ(sscanf(p, "%d%n", &value, &pos), 1)
+ << "Unexpected non-number value.";
+ p += pos;
+ }
+ result.push_back(value);
+ return result;
+}
+
+// Static.
+VideoStream VideoQualityTest::DefaultVideoStream(const Params& params) {
+ VideoStream stream;
+ stream.width = params.common.width;
+ stream.height = params.common.height;
+ stream.max_framerate = params.common.fps;
+ stream.min_bitrate_bps = params.common.min_bitrate_bps;
+ stream.target_bitrate_bps = params.common.target_bitrate_bps;
+ stream.max_bitrate_bps = params.common.max_bitrate_bps;
+ stream.max_qp = 52;
+ if (params.common.num_temporal_layers == 2)
+ stream.temporal_layer_thresholds_bps.push_back(stream.target_bitrate_bps);
+ return stream;
+}
-void VideoQualityTest::SetupFullStack(const Params& params,
- Transport* send_transport,
- Transport* recv_transport) {
- if (params.logs)
+// Static.
+void VideoQualityTest::FillScalabilitySettings(
+ Params* params,
+ const std::vector<std::string>& stream_descriptors,
+ size_t selected_stream,
+ int num_spatial_layers,
+ int selected_sl,
+ const std::vector<std::string>& sl_descriptors) {
+ // Read VideoStream and SpatialLayer elements from a list of comma separated
+ // lists. To use a default value for an element, use -1 or leave empty.
+ // Validity checks performed in CheckParams.
+
+ RTC_CHECK(params->ss.streams.empty());
+ for (auto descriptor : stream_descriptors) {
+ if (descriptor.empty())
+ continue;
+ VideoStream stream = VideoQualityTest::DefaultVideoStream(*params);
+ std::vector<int> v = VideoQualityTest::ParseCSV(descriptor);
+ if (v[0] != -1)
+ stream.width = static_cast<size_t>(v[0]);
+ if (v[1] != -1)
+ stream.height = static_cast<size_t>(v[1]);
+ if (v[2] != -1)
+ stream.max_framerate = v[2];
+ if (v[3] != -1)
+ stream.min_bitrate_bps = v[3];
+ if (v[4] != -1)
+ stream.target_bitrate_bps = v[4];
+ if (v[5] != -1)
+ stream.max_bitrate_bps = v[5];
+ if (v.size() > 6 && v[6] != -1)
+ stream.max_qp = v[6];
+ if (v.size() > 7) {
+ stream.temporal_layer_thresholds_bps.clear();
+ stream.temporal_layer_thresholds_bps.insert(
+ stream.temporal_layer_thresholds_bps.end(), v.begin() + 7, v.end());
+ } else {
+ // Automatic TL thresholds for more than two layers not supported.
+ RTC_CHECK_LE(params->common.num_temporal_layers, 2);
+ }
+ params->ss.streams.push_back(stream);
+ }
+ params->ss.selected_stream = selected_stream;
+
+ params->ss.num_spatial_layers = num_spatial_layers ? num_spatial_layers : 1;
+ params->ss.selected_sl = selected_sl;
+ RTC_CHECK(params->ss.spatial_layers.empty());
+ for (auto descriptor : sl_descriptors) {
+ if (descriptor.empty())
+ continue;
+ std::vector<int> v = VideoQualityTest::ParseCSV(descriptor);
+ RTC_CHECK_GT(v[2], 0);
+
+ SpatialLayer layer;
+ layer.scaling_factor_num = v[0] == -1 ? 1 : v[0];
+ layer.scaling_factor_den = v[1] == -1 ? 1 : v[1];
+ layer.target_bitrate_bps = v[2];
+ params->ss.spatial_layers.push_back(layer);
+ }
+}
+
+void VideoQualityTest::SetupCommon(Transport* send_transport,
+ Transport* recv_transport) {
+ if (params_.logs)
trace_to_stderr_.reset(new test::TraceToStderr);
- CreateSendConfig(1, send_transport);
+ size_t num_streams = params_.ss.streams.size();
+ CreateSendConfig(num_streams, 0, send_transport);
int payload_type;
- if (params.common.codec == "VP8") {
+ if (params_.common.codec == "VP8") {
encoder_.reset(VideoEncoder::Create(VideoEncoder::kVp8));
payload_type = kPayloadTypeVP8;
- } else if (params.common.codec == "VP9") {
+ } else if (params_.common.codec == "VP9") {
encoder_.reset(VideoEncoder::Create(VideoEncoder::kVp9));
payload_type = kPayloadTypeVP9;
} else {
RTC_NOTREACHED() << "Codec not supported!";
return;
}
- send_config_.encoder_settings.encoder = encoder_.get();
- send_config_.encoder_settings.payload_name = params.common.codec;
- send_config_.encoder_settings.payload_type = payload_type;
-
- send_config_.rtp.nack.rtp_history_ms = kNackRtpHistoryMs;
- send_config_.rtp.rtx.ssrcs.push_back(kSendRtxSsrcs[0]);
- send_config_.rtp.rtx.payload_type = kSendRtxPayloadType;
-
- send_config_.rtp.extensions.clear();
- if (params.common.send_side_bwe) {
- send_config_.rtp.extensions.push_back(
+ video_send_config_.encoder_settings.encoder = encoder_.get();
+ video_send_config_.encoder_settings.payload_name = params_.common.codec;
+ video_send_config_.encoder_settings.payload_type = payload_type;
+ video_send_config_.rtp.nack.rtp_history_ms = kNackRtpHistoryMs;
+ video_send_config_.rtp.rtx.payload_type = kSendRtxPayloadType;
+ for (size_t i = 0; i < num_streams; ++i)
+ video_send_config_.rtp.rtx.ssrcs.push_back(kSendRtxSsrcs[i]);
+
+ video_send_config_.rtp.extensions.clear();
+ if (params_.common.send_side_bwe) {
+ video_send_config_.rtp.extensions.push_back(
RtpExtension(RtpExtension::kTransportSequenceNumber,
test::kTransportSequenceNumberExtensionId));
} else {
- send_config_.rtp.extensions.push_back(RtpExtension(
+ video_send_config_.rtp.extensions.push_back(RtpExtension(
RtpExtension::kAbsSendTime, test::kAbsSendTimeExtensionId));
}
- // Automatically fill out streams[0] with params.
- VideoStream* stream = &encoder_config_.streams[0];
- stream->width = params.common.width;
- stream->height = params.common.height;
- stream->min_bitrate_bps = params.common.min_bitrate_bps;
- stream->target_bitrate_bps = params.common.target_bitrate_bps;
- stream->max_bitrate_bps = params.common.max_bitrate_bps;
- stream->max_framerate = static_cast<int>(params.common.fps);
-
- stream->temporal_layer_thresholds_bps.clear();
- if (params.common.num_temporal_layers > 1) {
- stream->temporal_layer_thresholds_bps.push_back(stream->target_bitrate_bps);
- }
+ video_encoder_config_.min_transmit_bitrate_bps =
+ params_.common.min_transmit_bps;
+ video_encoder_config_.streams = params_.ss.streams;
+ video_encoder_config_.spatial_layers = params_.ss.spatial_layers;
CreateMatchingReceiveConfigs(recv_transport);
- receive_configs_[0].rtp.nack.rtp_history_ms = kNackRtpHistoryMs;
- receive_configs_[0].rtp.rtx[kSendRtxPayloadType].ssrc = kSendRtxSsrcs[0];
- receive_configs_[0].rtp.rtx[kSendRtxPayloadType].payload_type =
- kSendRtxPayloadType;
-
- encoder_config_.min_transmit_bitrate_bps = params.common.min_transmit_bps;
+ for (size_t i = 0; i < num_streams; ++i) {
+ video_receive_configs_[i].rtp.nack.rtp_history_ms = kNackRtpHistoryMs;
+ video_receive_configs_[i].rtp.rtx[kSendRtxPayloadType].ssrc =
+ kSendRtxSsrcs[i];
+ video_receive_configs_[i].rtp.rtx[kSendRtxPayloadType].payload_type =
+ kSendRtxPayloadType;
+ video_receive_configs_[i].rtp.transport_cc = params_.common.send_side_bwe;
+ }
}
-void VideoQualityTest::SetupScreenshare(const Params& params) {
- RTC_CHECK(params.screenshare.enabled);
+void VideoQualityTest::SetupScreenshare() {
+ RTC_CHECK(params_.screenshare.enabled);
// Fill out codec settings.
- encoder_config_.content_type = VideoEncoderConfig::ContentType::kScreen;
- if (params.common.codec == "VP8") {
+ video_encoder_config_.content_type = VideoEncoderConfig::ContentType::kScreen;
+ if (params_.common.codec == "VP8") {
codec_settings_.VP8 = VideoEncoder::GetDefaultVp8Settings();
codec_settings_.VP8.denoisingOn = false;
codec_settings_.VP8.frameDroppingOn = false;
codec_settings_.VP8.numberOfTemporalLayers =
- static_cast<unsigned char>(params.common.num_temporal_layers);
- encoder_config_.encoder_specific_settings = &codec_settings_.VP8;
- } else if (params.common.codec == "VP9") {
+ static_cast<unsigned char>(params_.common.num_temporal_layers);
+ video_encoder_config_.encoder_specific_settings = &codec_settings_.VP8;
+ } else if (params_.common.codec == "VP9") {
codec_settings_.VP9 = VideoEncoder::GetDefaultVp9Settings();
codec_settings_.VP9.denoisingOn = false;
codec_settings_.VP9.frameDroppingOn = false;
codec_settings_.VP9.numberOfTemporalLayers =
- static_cast<unsigned char>(params.common.num_temporal_layers);
- encoder_config_.encoder_specific_settings = &codec_settings_.VP9;
+ static_cast<unsigned char>(params_.common.num_temporal_layers);
+ video_encoder_config_.encoder_specific_settings = &codec_settings_.VP9;
+ codec_settings_.VP9.numberOfSpatialLayers =
+ static_cast<unsigned char>(params_.ss.num_spatial_layers);
}
// Setup frame generator.
@@ -688,71 +861,67 @@ void VideoQualityTest::SetupScreenshare(const Params& params) {
slides.push_back(test::ResourcePath("photo_1850_1110", "yuv"));
slides.push_back(test::ResourcePath("difficult_photo_1850_1110", "yuv"));
- if (params.screenshare.scroll_duration == 0) {
+ if (params_.screenshare.scroll_duration == 0) {
// Cycle image every slide_change_interval seconds.
frame_generator_.reset(test::FrameGenerator::CreateFromYuvFile(
slides, kWidth, kHeight,
- params.screenshare.slide_change_interval * params.common.fps));
+ params_.screenshare.slide_change_interval * params_.common.fps));
} else {
- RTC_CHECK_LE(params.common.width, kWidth);
- RTC_CHECK_LE(params.common.height, kHeight);
- RTC_CHECK_GT(params.screenshare.slide_change_interval, 0);
- const int kPauseDurationMs = (params.screenshare.slide_change_interval -
- params.screenshare.scroll_duration) * 1000;
- RTC_CHECK_LE(params.screenshare.scroll_duration,
- params.screenshare.slide_change_interval);
-
- if (params.screenshare.scroll_duration) {
- frame_generator_.reset(
- test::FrameGenerator::CreateScrollingInputFromYuvFiles(
- clock_, slides, kWidth, kHeight, params.common.width,
- params.common.height, params.screenshare.scroll_duration * 1000,
- kPauseDurationMs));
- } else {
- frame_generator_.reset(test::FrameGenerator::CreateFromYuvFile(
- slides, kWidth, kHeight,
- params.screenshare.slide_change_interval * params.common.fps));
- }
+ RTC_CHECK_LE(params_.common.width, kWidth);
+ RTC_CHECK_LE(params_.common.height, kHeight);
+ RTC_CHECK_GT(params_.screenshare.slide_change_interval, 0);
+ const int kPauseDurationMs = (params_.screenshare.slide_change_interval -
+ params_.screenshare.scroll_duration) *
+ 1000;
+ RTC_CHECK_LE(params_.screenshare.scroll_duration,
+ params_.screenshare.slide_change_interval);
+
+ frame_generator_.reset(
+ test::FrameGenerator::CreateScrollingInputFromYuvFiles(
+ clock_, slides, kWidth, kHeight, params_.common.width,
+ params_.common.height, params_.screenshare.scroll_duration * 1000,
+ kPauseDurationMs));
}
}
-void VideoQualityTest::CreateCapturer(const Params& params,
- VideoCaptureInput* input) {
- if (params.screenshare.enabled) {
- test::FrameGeneratorCapturer *frame_generator_capturer =
+void VideoQualityTest::CreateCapturer(VideoCaptureInput* input) {
+ if (params_.screenshare.enabled) {
+ test::FrameGeneratorCapturer* frame_generator_capturer =
new test::FrameGeneratorCapturer(
- clock_, input, frame_generator_.release(), params.common.fps);
+ clock_, input, frame_generator_.release(), params_.common.fps);
EXPECT_TRUE(frame_generator_capturer->Init());
capturer_.reset(frame_generator_capturer);
} else {
- if (params.video.clip_name.empty()) {
- capturer_.reset(test::VideoCapturer::Create(
- input, params.common.width, params.common.height, params.common.fps,
- clock_));
+ if (params_.video.clip_name.empty()) {
+ capturer_.reset(test::VideoCapturer::Create(input, params_.common.width,
+ params_.common.height,
+ params_.common.fps, clock_));
} else {
capturer_.reset(test::FrameGeneratorCapturer::CreateFromYuvFile(
- input, test::ResourcePath(params.video.clip_name, "yuv"),
- params.common.width, params.common.height, params.common.fps,
+ input, test::ResourcePath(params_.video.clip_name, "yuv"),
+ params_.common.width, params_.common.height, params_.common.fps,
clock_));
ASSERT_TRUE(capturer_.get() != nullptr)
- << "Could not create capturer for " << params.video.clip_name
+ << "Could not create capturer for " << params_.video.clip_name
<< ".yuv. Is this resource file present?";
}
}
}
void VideoQualityTest::RunWithAnalyzer(const Params& params) {
+ params_ = params;
+
// TODO(ivica): Merge with RunWithRenderer and use a flag / argument to
// differentiate between the analyzer and the renderer case.
- ValidateParams(params);
+ CheckParams();
FILE* graph_data_output_file = nullptr;
- if (!params.analyzer.graph_data_output_filename.empty()) {
+ if (!params_.analyzer.graph_data_output_filename.empty()) {
graph_data_output_file =
- fopen(params.analyzer.graph_data_output_filename.c_str(), "w");
+ fopen(params_.analyzer.graph_data_output_filename.c_str(), "w");
RTC_CHECK(graph_data_output_file != nullptr)
- << "Can't open the file "
- << params.analyzer.graph_data_output_filename << "!";
+ << "Can't open the file " << params_.analyzer.graph_data_output_filename
+ << "!";
}
Call::Config call_config;
@@ -761,37 +930,64 @@ void VideoQualityTest::RunWithAnalyzer(const Params& params) {
test::LayerFilteringTransport send_transport(
params.pipe, sender_call_.get(), kPayloadTypeVP8, kPayloadTypeVP9,
- static_cast<uint8_t>(params.common.tl_discard_threshold), 0);
+ params.common.selected_tl, params_.ss.selected_sl);
test::DirectTransport recv_transport(params.pipe, receiver_call_.get());
+ std::string graph_title = params_.analyzer.graph_title;
+ if (graph_title.empty())
+ graph_title = VideoQualityTest::GenerateGraphTitle();
+
+ // In the case of different resolutions, the functions calculating PSNR and
+ // SSIM return -1.0, instead of a positive value as usual. VideoAnalyzer
+ // aborts if the average psnr/ssim are below the given threshold, which is
+ // 0.0 by default. Setting the thresholds to -1.1 prevents the unnecessary
+ // abort.
+ VideoStream& selected_stream = params_.ss.streams[params_.ss.selected_stream];
+ int selected_sl = params_.ss.selected_sl != -1
+ ? params_.ss.selected_sl
+ : params_.ss.num_spatial_layers - 1;
+ bool disable_quality_check =
+ selected_stream.width != params_.common.width ||
+ selected_stream.height != params_.common.height ||
+ (!params_.ss.spatial_layers.empty() &&
+ params_.ss.spatial_layers[selected_sl].scaling_factor_num !=
+ params_.ss.spatial_layers[selected_sl].scaling_factor_den);
+ if (disable_quality_check) {
+ fprintf(stderr,
+ "Warning: Calculating PSNR and SSIM for downsized resolution "
+ "not implemented yet! Skipping PSNR and SSIM calculations!");
+ }
+
VideoAnalyzer analyzer(
- &send_transport, params.analyzer.test_label,
- params.analyzer.avg_psnr_threshold, params.analyzer.avg_ssim_threshold,
- params.analyzer.test_durations_secs * params.common.fps,
- graph_data_output_file);
+ &send_transport, params_.analyzer.test_label,
+ disable_quality_check ? -1.1 : params_.analyzer.avg_psnr_threshold,
+ disable_quality_check ? -1.1 : params_.analyzer.avg_ssim_threshold,
+ params_.analyzer.test_durations_secs * params_.common.fps,
+ graph_data_output_file, graph_title,
+ kVideoSendSsrcs[params_.ss.selected_stream]);
analyzer.SetReceiver(receiver_call_->Receiver());
send_transport.SetReceiver(&analyzer);
recv_transport.SetReceiver(sender_call_->Receiver());
- SetupFullStack(params, &analyzer, &recv_transport);
- send_config_.encoding_time_observer = &analyzer;
- receive_configs_[0].renderer = &analyzer;
- for (auto& config : receive_configs_)
+ SetupCommon(&analyzer, &recv_transport);
+ video_send_config_.encoding_time_observer = &analyzer;
+ video_receive_configs_[params_.ss.selected_stream].renderer = &analyzer;
+ for (auto& config : video_receive_configs_)
config.pre_decode_callback = &analyzer;
- if (params.screenshare.enabled)
- SetupScreenshare(params);
+ if (params_.screenshare.enabled)
+ SetupScreenshare();
- CreateStreams();
- analyzer.input_ = send_stream_->Input();
- analyzer.send_stream_ = send_stream_;
+ CreateVideoStreams();
+ analyzer.input_ = video_send_stream_->Input();
+ analyzer.send_stream_ = video_send_stream_;
- CreateCapturer(params, &analyzer);
+ CreateCapturer(&analyzer);
- send_stream_->Start();
- for (size_t i = 0; i < receive_streams_.size(); ++i)
- receive_streams_[i]->Start();
+ video_send_stream_->Start();
+ for (VideoReceiveStream* receive_stream : video_receive_streams_)
+ receive_stream->Start();
capturer_->Start();
analyzer.Wait();
@@ -800,9 +996,9 @@ void VideoQualityTest::RunWithAnalyzer(const Params& params) {
recv_transport.StopSending();
capturer_->Stop();
- for (size_t i = 0; i < receive_streams_.size(); ++i)
- receive_streams_[i]->Stop();
- send_stream_->Stop();
+ for (VideoReceiveStream* receive_stream : video_receive_streams_)
+ receive_stream->Stop();
+ video_send_stream_->Stop();
DestroyStreams();
@@ -811,53 +1007,65 @@ void VideoQualityTest::RunWithAnalyzer(const Params& params) {
}
void VideoQualityTest::RunWithVideoRenderer(const Params& params) {
- ValidateParams(params);
+ params_ = params;
+ CheckParams();
rtc::scoped_ptr<test::VideoRenderer> local_preview(
- test::VideoRenderer::Create("Local Preview", params.common.width,
- params.common.height));
+ test::VideoRenderer::Create("Local Preview", params_.common.width,
+ params_.common.height));
+ size_t stream_id = params_.ss.selected_stream;
+ std::string title = "Loopback Video";
+ if (params_.ss.streams.size() > 1) {
+ std::ostringstream s;
+ s << stream_id;
+ title += " - Stream #" + s.str();
+ }
+
rtc::scoped_ptr<test::VideoRenderer> loopback_video(
- test::VideoRenderer::Create("Loopback Video", params.common.width,
- params.common.height));
+ test::VideoRenderer::Create(title.c_str(),
+ params_.ss.streams[stream_id].width,
+ params_.ss.streams[stream_id].height));
// TODO(ivica): Remove bitrate_config and use the default Call::Config(), to
// match the full stack tests.
Call::Config call_config;
- call_config.bitrate_config = params.common.call_bitrate_config;
+ call_config.bitrate_config = params_.common.call_bitrate_config;
rtc::scoped_ptr<Call> call(Call::Create(call_config));
test::LayerFilteringTransport transport(
params.pipe, call.get(), kPayloadTypeVP8, kPayloadTypeVP9,
- static_cast<uint8_t>(params.common.tl_discard_threshold), 0);
+ params.common.selected_tl, params_.ss.selected_sl);
// TODO(ivica): Use two calls to be able to merge with RunWithAnalyzer or at
// least share as much code as possible. That way this test would also match
// the full stack tests better.
transport.SetReceiver(call->Receiver());
- SetupFullStack(params, &transport, &transport);
- send_config_.local_renderer = local_preview.get();
- receive_configs_[0].renderer = loopback_video.get();
+ SetupCommon(&transport, &transport);
+
+ video_send_config_.local_renderer = local_preview.get();
+ video_receive_configs_[stream_id].renderer = loopback_video.get();
- if (params.screenshare.enabled)
- SetupScreenshare(params);
+ if (params_.screenshare.enabled)
+ SetupScreenshare();
- send_stream_ = call->CreateVideoSendStream(send_config_, encoder_config_);
+ video_send_stream_ =
+ call->CreateVideoSendStream(video_send_config_, video_encoder_config_);
VideoReceiveStream* receive_stream =
- call->CreateVideoReceiveStream(receive_configs_[0]);
- CreateCapturer(params, send_stream_->Input());
+ call->CreateVideoReceiveStream(video_receive_configs_[stream_id]);
+ CreateCapturer(video_send_stream_->Input());
receive_stream->Start();
- send_stream_->Start();
+ video_send_stream_->Start();
capturer_->Start();
test::PressEnterToContinue();
capturer_->Stop();
- send_stream_->Stop();
+ video_send_stream_->Stop();
receive_stream->Stop();
call->DestroyVideoReceiveStream(receive_stream);
- call->DestroyVideoSendStream(send_stream_);
+ call->DestroyVideoSendStream(video_send_stream_);
transport.StopSending();
}
diff --git a/webrtc/video/video_quality_test.h b/webrtc/video/video_quality_test.h
index 7b62fb3dce..dd2b011cc3 100644
--- a/webrtc/video/video_quality_test.h
+++ b/webrtc/video/video_quality_test.h
@@ -11,6 +11,7 @@
#define WEBRTC_VIDEO_VIDEO_QUALITY_TEST_H_
#include <string>
+#include <vector>
#include "webrtc/test/call_test.h"
#include "webrtc/test/frame_generator.h"
@@ -33,11 +34,11 @@ class VideoQualityTest : public test::CallTest {
int target_bitrate_bps;
int max_bitrate_bps;
std::string codec;
- size_t num_temporal_layers;
+ int num_temporal_layers;
+ int selected_tl;
int min_transmit_bps;
Call::Config::BitrateConfig call_bitrate_config;
- size_t tl_discard_threshold;
bool send_side_bwe;
} common;
struct { // Video-specific settings.
@@ -50,30 +51,56 @@ class VideoQualityTest : public test::CallTest {
} screenshare;
struct { // Analyzer settings.
std::string test_label;
- double avg_psnr_threshold;
- double avg_ssim_threshold;
+ double avg_psnr_threshold; // (*)
+ double avg_ssim_threshold; // (*)
int test_durations_secs;
std::string graph_data_output_filename;
+ std::string graph_title;
} analyzer;
FakeNetworkPipe::Config pipe;
bool logs;
+ struct { // Spatial scalability.
+ std::vector<VideoStream> streams; // If empty, one stream is assumed.
+ size_t selected_stream;
+ int num_spatial_layers;
+ int selected_sl;
+ // If empty, bitrates are generated in VP9Impl automatically.
+ std::vector<SpatialLayer> spatial_layers;
+ } ss;
};
+ // (*) Set to -1.1 if generating graph data for simulcast or SVC and the
+ // selected stream/layer doesn't have the same resolution as the largest
+ // stream/layer (to ignore the PSNR and SSIM calculation errors).
VideoQualityTest();
void RunWithAnalyzer(const Params& params);
void RunWithVideoRenderer(const Params& params);
+ static void FillScalabilitySettings(
+ Params* params,
+ const std::vector<std::string>& stream_descriptors,
+ size_t selected_stream,
+ int num_spatial_layers,
+ int selected_sl,
+ const std::vector<std::string>& sl_descriptors);
+
protected:
// No-op implementation to be able to instantiate this class from non-TEST_F
// locations.
void TestBody() override;
- void CreateCapturer(const Params& params, VideoCaptureInput* input);
- void ValidateParams(const Params& params);
- void SetupFullStack(const Params& params,
- Transport* send_transport,
- Transport* recv_transport);
- void SetupScreenshare(const Params& params);
+ // Helper methods accessing only params_.
+ std::string GenerateGraphTitle() const;
+ void CheckParams();
+
+ // Helper static methods.
+ static VideoStream DefaultVideoStream(const Params& params);
+ static std::vector<int> ParseCSV(const std::string& str);
+
+ // Helper methods for setting up the call.
+ void CreateCapturer(VideoCaptureInput* input);
+ void SetupCommon(Transport* send_transport, Transport* recv_transport);
+ void SetupScreenshare();
// We need a more general capturer than the FrameGeneratorCapturer.
rtc::scoped_ptr<test::VideoCapturer> capturer_;
@@ -82,6 +109,8 @@ class VideoQualityTest : public test::CallTest {
rtc::scoped_ptr<VideoEncoder> encoder_;
VideoCodecUnion codec_settings_;
Clock* const clock_;
+
+ Params params_;
};
} // namespace webrtc
diff --git a/webrtc/video/video_receive_stream.cc b/webrtc/video/video_receive_stream.cc
index f5cb357098..7779fddd56 100644
--- a/webrtc/video/video_receive_stream.cc
+++ b/webrtc/video/video_receive_stream.cc
@@ -12,6 +12,7 @@
#include <stdlib.h>
+#include <set>
#include <string>
#include "webrtc/base/checks.h"
@@ -19,8 +20,8 @@
#include "webrtc/call/congestion_controller.h"
#include "webrtc/common_video/libyuv/include/webrtc_libyuv.h"
#include "webrtc/system_wrappers/include/clock.h"
+#include "webrtc/video/call_stats.h"
#include "webrtc/video/receive_statistics_proxy.h"
-#include "webrtc/video_engine/call_stats.h"
#include "webrtc/video_receive_stream.h"
namespace webrtc {
@@ -38,8 +39,6 @@ std::string VideoReceiveStream::Decoder::ToString() const {
ss << "{decoder: " << (decoder != nullptr ? "(VideoDecoder)" : "nullptr");
ss << ", payload_type: " << payload_type;
ss << ", payload_name: " << payload_name;
- ss << ", is_renderer: " << (is_renderer ? "yes" : "no");
- ss << ", expected_delay_ms: " << expected_delay_ms;
ss << '}';
return ss.str();
@@ -81,6 +80,7 @@ std::string VideoReceiveStream::Config::Rtp::ToString() const {
<< (rtcp_xr.receiver_reference_time_report ? "on" : "off");
ss << '}';
ss << ", remb: " << (remb ? "on" : "off");
+ ss << ", transport_cc: " << (transport_cc ? "on" : "off");
ss << ", nack: {rtp_history_ms: " << nack.rtp_history_ms << '}';
ss << ", fec: " << fec.ToString();
ss << ", rtx: {";
@@ -110,7 +110,7 @@ VideoCodec CreateDecoderVideoCodec(const VideoReceiveStream::Decoder& decoder) {
memset(&codec, 0, sizeof(codec));
codec.plType = decoder.payload_type;
- strcpy(codec.plName, decoder.payload_name.c_str());
+ strncpy(codec.plName, decoder.payload_name.c_str(), sizeof(codec.plName));
if (decoder.payload_name == "VP8") {
codec.codecType = kVideoCodecVP8;
} else if (decoder.payload_name == "VP9") {
@@ -153,16 +153,15 @@ VideoReceiveStream::VideoReceiveStream(
call_stats_(call_stats) {
LOG(LS_INFO) << "VideoReceiveStream: " << config_.ToString();
- bool send_side_bwe = UseSendSideBwe(config_.rtp.extensions);
+ bool send_side_bwe =
+ config.rtp.transport_cc && UseSendSideBwe(config_.rtp.extensions);
RemoteBitrateEstimator* bitrate_estimator =
congestion_controller_->GetRemoteBitrateEstimator(send_side_bwe);
vie_channel_.reset(new ViEChannel(
num_cpu_cores, &transport_adapter_, process_thread, nullptr,
- congestion_controller_->GetBitrateController()->
- CreateRtcpBandwidthObserver(),
- nullptr, bitrate_estimator, call_stats_->rtcp_rtt_stats(),
+ nullptr, nullptr, bitrate_estimator, call_stats_->rtcp_rtt_stats(),
congestion_controller_->pacer(), congestion_controller_->packet_router(),
1, false));
@@ -228,7 +227,7 @@ VideoReceiveStream::VideoReceiveStream(
VideoCodec codec;
memset(&codec, 0, sizeof(codec));
codec.codecType = kVideoCodecULPFEC;
- strcpy(codec.plName, "ulpfec");
+ strncpy(codec.plName, "ulpfec", sizeof(codec.plName));
codec.plType = config_.rtp.fec.ulpfec_payload_type;
RTC_CHECK_EQ(0, vie_channel_->SetReceiveCodec(codec));
}
@@ -236,7 +235,7 @@ VideoReceiveStream::VideoReceiveStream(
VideoCodec codec;
memset(&codec, 0, sizeof(codec));
codec.codecType = kVideoCodecRED;
- strcpy(codec.plName, "red");
+ strncpy(codec.plName, "red", sizeof(codec.plName));
codec.plType = config_.rtp.fec.red_payload_type;
RTC_CHECK_EQ(0, vie_channel_->SetReceiveCodec(codec));
if (config_.rtp.fec.red_rtx_payload_type != -1) {
@@ -259,21 +258,27 @@ VideoReceiveStream::VideoReceiveStream(
vie_channel_->RegisterRtcpPacketTypeCounterObserver(stats_proxy_.get());
RTC_DCHECK(!config_.decoders.empty());
+ std::set<int> decoder_payload_types;
for (size_t i = 0; i < config_.decoders.size(); ++i) {
const Decoder& decoder = config_.decoders[i];
- RTC_CHECK_EQ(0,
- vie_channel_->RegisterExternalDecoder(
- decoder.payload_type, decoder.decoder, decoder.is_renderer,
- decoder.is_renderer ? decoder.expected_delay_ms
- : config.render_delay_ms));
+ RTC_CHECK(decoder.decoder);
+ RTC_CHECK(decoder_payload_types.find(decoder.payload_type) ==
+ decoder_payload_types.end())
+ << "Duplicate payload type (" << decoder.payload_type
+ << ") for different decoders.";
+ decoder_payload_types.insert(decoder.payload_type);
+ vie_channel_->RegisterExternalDecoder(decoder.payload_type,
+ decoder.decoder);
VideoCodec codec = CreateDecoderVideoCodec(decoder);
RTC_CHECK_EQ(0, vie_channel_->SetReceiveCodec(codec));
}
- incoming_video_stream_.reset(new IncomingVideoStream(0));
+ incoming_video_stream_.reset(new IncomingVideoStream(
+ 0, config.renderer ? config.renderer->SmoothsRenderedFrames() : false));
incoming_video_stream_->SetExpectedRenderDelay(config.render_delay_ms);
+ vie_channel_->SetExpectedRenderDelay(config.render_delay_ms);
incoming_video_stream_->SetExternalCallback(this);
vie_channel_->SetIncomingVideoStream(incoming_video_stream_.get());
@@ -287,9 +292,6 @@ VideoReceiveStream::~VideoReceiveStream() {
vie_channel_->RegisterPreRenderCallback(nullptr);
vie_channel_->RegisterPreDecodeImageCallback(nullptr);
- for (size_t i = 0; i < config_.decoders.size(); ++i)
- vie_channel_->DeRegisterExternalDecoder(config_.decoders[i].payload_type);
-
call_stats_->DeregisterStatsObserver(vie_channel_->GetStatsObserver());
congestion_controller_->SetChannelRembStatus(false, false,
vie_channel_->rtp_rtcp());
diff --git a/webrtc/video/video_receive_stream.h b/webrtc/video/video_receive_stream.h
index 7b1f5394f1..0ff5269bb5 100644
--- a/webrtc/video/video_receive_stream.h
+++ b/webrtc/video/video_receive_stream.h
@@ -16,15 +16,15 @@
#include "webrtc/base/scoped_ptr.h"
#include "webrtc/call.h"
#include "webrtc/call/transport_adapter.h"
-#include "webrtc/common_video/interface/incoming_video_stream.h"
+#include "webrtc/common_video/include/incoming_video_stream.h"
#include "webrtc/common_video/libyuv/include/webrtc_libyuv.h"
-#include "webrtc/modules/video_render/include/video_render_defines.h"
+#include "webrtc/modules/video_render/video_render_defines.h"
#include "webrtc/system_wrappers/include/clock.h"
#include "webrtc/video/encoded_frame_callback_adapter.h"
#include "webrtc/video/receive_statistics_proxy.h"
+#include "webrtc/video/vie_channel.h"
+#include "webrtc/video/vie_encoder.h"
#include "webrtc/video_encoder.h"
-#include "webrtc/video_engine/vie_channel.h"
-#include "webrtc/video_engine/vie_encoder.h"
#include "webrtc/video_receive_stream.h"
namespace webrtc {
diff --git a/webrtc/video/video_send_stream.cc b/webrtc/video/video_send_stream.cc
index 4ec923f788..656d551794 100644
--- a/webrtc/video/video_send_stream.cc
+++ b/webrtc/video/video_send_stream.cc
@@ -20,19 +20,18 @@
#include "webrtc/base/trace_event.h"
#include "webrtc/call/congestion_controller.h"
#include "webrtc/common_video/libyuv/include/webrtc_libyuv.h"
-#include "webrtc/modules/pacing/include/packet_router.h"
+#include "webrtc/modules/bitrate_controller/include/bitrate_controller.h"
+#include "webrtc/modules/pacing/packet_router.h"
+#include "webrtc/video/call_stats.h"
+#include "webrtc/video/encoder_state_feedback.h"
+#include "webrtc/video/payload_router.h"
#include "webrtc/video/video_capture_input.h"
-#include "webrtc/video_engine/call_stats.h"
-#include "webrtc/video_engine/encoder_state_feedback.h"
-#include "webrtc/video_engine/payload_router.h"
-#include "webrtc/video_engine/vie_channel.h"
-#include "webrtc/video_engine/vie_defines.h"
-#include "webrtc/video_engine/vie_encoder.h"
+#include "webrtc/video/vie_channel.h"
+#include "webrtc/video/vie_encoder.h"
#include "webrtc/video_send_stream.h"
namespace webrtc {
-class BitrateAllocator;
class PacedSender;
class RtcpIntraFrameObserver;
class TransportFeedbackObserver;
@@ -98,7 +97,7 @@ std::string VideoSendStream::Config::ToString() const {
ss << ", post_encode_callback: " << (post_encode_callback != nullptr
? "(EncodedFrameObserver)"
: "nullptr");
- ss << "local_renderer: " << (local_renderer != nullptr ? "(VideoRenderer)"
+ ss << ", local_renderer: " << (local_renderer != nullptr ? "(VideoRenderer)"
: "nullptr");
ss << ", render_delay_ms: " << render_delay_ms;
ss << ", target_delay_ms: " << target_delay_ms;
@@ -114,10 +113,14 @@ VideoSendStream::VideoSendStream(
ProcessThread* module_process_thread,
CallStats* call_stats,
CongestionController* congestion_controller,
+ BitrateAllocator* bitrate_allocator,
const VideoSendStream::Config& config,
const VideoEncoderConfig& encoder_config,
const std::map<uint32_t, RtpState>& suspended_ssrcs)
- : transport_adapter_(config.send_transport),
+ : stats_proxy_(Clock::GetRealTimeClock(),
+ config,
+ encoder_config.content_type),
+ transport_adapter_(config.send_transport),
encoded_frame_proxy_(config.post_encode_callback),
config_(config),
suspended_ssrcs_(suspended_ssrcs),
@@ -125,8 +128,7 @@ VideoSendStream::VideoSendStream(
call_stats_(call_stats),
congestion_controller_(congestion_controller),
encoder_feedback_(new EncoderStateFeedback()),
- use_config_bitrate_(true),
- stats_proxy_(Clock::GetRealTimeClock(), config) {
+ use_config_bitrate_(true) {
LOG(LS_INFO) << "VideoSendStream: " << config_.ToString();
RTC_DCHECK(!config_.rtp.ssrcs.empty());
@@ -145,7 +147,7 @@ VideoSendStream::VideoSendStream(
vie_encoder_.reset(new ViEEncoder(
num_cpu_cores, module_process_thread_, &stats_proxy_,
config.pre_encode_callback, congestion_controller_->pacer(),
- congestion_controller_->bitrate_allocator()));
+ bitrate_allocator));
RTC_CHECK(vie_encoder_->Init());
vie_channel_.reset(new ViEChannel(
@@ -197,8 +199,8 @@ VideoSendStream::VideoSendStream(
vie_channel_->SetProtectionMode(enable_protection_nack, enable_protection_fec,
config_.rtp.fec.red_payload_type,
config_.rtp.fec.ulpfec_payload_type);
- vie_encoder_->UpdateProtectionMethod(enable_protection_nack,
- enable_protection_fec);
+ vie_encoder_->SetProtectionMethod(enable_protection_nack,
+ enable_protection_fec);
ConfigureSsrcs();
@@ -346,6 +348,12 @@ bool VideoSendStream::ReconfigureVideoEncoder(
if (config.encoder_specific_settings != nullptr) {
video_codec.codecSpecific.VP9 = *reinterpret_cast<const VideoCodecVP9*>(
config.encoder_specific_settings);
+ if (video_codec.mode == kScreensharing) {
+ video_codec.codecSpecific.VP9.flexibleMode = true;
+ // For now VP9 screensharing use 1 temporal and 2 spatial layers.
+ RTC_DCHECK_EQ(video_codec.codecSpecific.VP9.numberOfTemporalLayers, 1);
+ RTC_DCHECK_EQ(video_codec.codecSpecific.VP9.numberOfSpatialLayers, 2);
+ }
}
video_codec.codecSpecific.VP9.numberOfTemporalLayers =
static_cast<unsigned char>(
@@ -370,6 +378,16 @@ bool VideoSendStream::ReconfigureVideoEncoder(
static_cast<unsigned char>(streams.size());
video_codec.minBitrate = streams[0].min_bitrate_bps / 1000;
RTC_DCHECK_LE(streams.size(), static_cast<size_t>(kMaxSimulcastStreams));
+ if (video_codec.codecType == kVideoCodecVP9) {
+ // If the vector is empty, bitrates will be configured automatically.
+ RTC_DCHECK(config.spatial_layers.empty() ||
+ config.spatial_layers.size() ==
+ video_codec.codecSpecific.VP9.numberOfSpatialLayers);
+ RTC_DCHECK_LE(video_codec.codecSpecific.VP9.numberOfSpatialLayers,
+ kMaxSimulcastStreams);
+ for (size_t i = 0; i < config.spatial_layers.size(); ++i)
+ video_codec.spatialLayers[i] = config.spatial_layers[i];
+ }
for (size_t i = 0; i < streams.size(); ++i) {
SimulcastStream* sim_stream = &video_codec.simulcastStream[i];
RTC_DCHECK_GT(streams[i].width, 0u);
@@ -382,8 +400,8 @@ bool VideoSendStream::ReconfigureVideoEncoder(
RTC_DCHECK_GE(streams[i].max_bitrate_bps, streams[i].target_bitrate_bps);
RTC_DCHECK_GE(streams[i].max_qp, 0);
- sim_stream->width = static_cast<unsigned short>(streams[i].width);
- sim_stream->height = static_cast<unsigned short>(streams[i].height);
+ sim_stream->width = static_cast<uint16_t>(streams[i].width);
+ sim_stream->height = static_cast<uint16_t>(streams[i].height);
sim_stream->minBitrate = streams[i].min_bitrate_bps / 1000;
sim_stream->targetBitrate = streams[i].target_bitrate_bps / 1000;
sim_stream->maxBitrate = streams[i].max_bitrate_bps / 1000;
@@ -392,12 +410,12 @@ bool VideoSendStream::ReconfigureVideoEncoder(
streams[i].temporal_layer_thresholds_bps.size() + 1);
video_codec.width = std::max(video_codec.width,
- static_cast<unsigned short>(streams[i].width));
+ static_cast<uint16_t>(streams[i].width));
video_codec.height = std::max(
- video_codec.height, static_cast<unsigned short>(streams[i].height));
+ video_codec.height, static_cast<uint16_t>(streams[i].height));
video_codec.minBitrate =
- std::min(video_codec.minBitrate,
- static_cast<unsigned int>(streams[i].min_bitrate_bps / 1000));
+ std::min(static_cast<uint16_t>(video_codec.minBitrate),
+ static_cast<uint16_t>(streams[i].min_bitrate_bps / 1000));
video_codec.maxBitrate += streams[i].max_bitrate_bps / 1000;
video_codec.qpMax = std::max(video_codec.qpMax,
static_cast<unsigned int>(streams[i].max_qp));
@@ -419,6 +437,8 @@ bool VideoSendStream::ReconfigureVideoEncoder(
stats_proxy_.OnInactiveSsrc(config_.rtp.ssrcs[i]);
}
+ stats_proxy_.SetContentType(config.content_type);
+
RTC_DCHECK_GE(config.min_transmit_bitrate_bps, 0);
vie_encoder_->SetMinTransmitBitrate(config.min_transmit_bitrate_bps / 1000);
@@ -480,7 +500,7 @@ std::map<uint32_t, RtpState> VideoSendStream::GetRtpStates() const {
std::map<uint32_t, RtpState> rtp_states;
for (size_t i = 0; i < config_.rtp.ssrcs.size(); ++i) {
uint32_t ssrc = config_.rtp.ssrcs[i];
- rtp_states[ssrc] = vie_channel_->GetRtpStateForSsrc( ssrc);
+ rtp_states[ssrc] = vie_channel_->GetRtpStateForSsrc(ssrc);
}
for (size_t i = 0; i < config_.rtp.rtx.ssrcs.size(); ++i) {
@@ -496,7 +516,7 @@ void VideoSendStream::SignalNetworkState(NetworkState state) {
// When it goes down, disable RTCP afterwards. This ensures that any packets
// sent due to the network state changed will not be dropped.
if (state == kNetworkUp)
- vie_channel_->SetRTCPMode(RtcpMode::kCompound);
+ vie_channel_->SetRTCPMode(config_.rtp.rtcp_mode);
vie_encoder_->SetNetworkTransmissionState(state == kNetworkUp);
if (state == kNetworkDown)
vie_channel_->SetRTCPMode(RtcpMode::kOff);
@@ -517,7 +537,12 @@ int64_t VideoSendStream::GetRtt() const {
return -1;
}
+int VideoSendStream::GetPaddingNeededBps() const {
+ return vie_encoder_->GetPaddingNeededBps();
+}
+
bool VideoSendStream::SetSendCodec(VideoCodec video_codec) {
+ static const int kEncoderMinBitrate = 30;
if (video_codec.maxBitrate == 0) {
// Unset max bitrate -> cap to one bit per pixel.
video_codec.maxBitrate =
@@ -525,10 +550,10 @@ bool VideoSendStream::SetSendCodec(VideoCodec video_codec) {
1000;
}
- if (video_codec.minBitrate < kViEMinCodecBitrate)
- video_codec.minBitrate = kViEMinCodecBitrate;
- if (video_codec.maxBitrate < kViEMinCodecBitrate)
- video_codec.maxBitrate = kViEMinCodecBitrate;
+ if (video_codec.minBitrate < kEncoderMinBitrate)
+ video_codec.minBitrate = kEncoderMinBitrate;
+ if (video_codec.maxBitrate < kEncoderMinBitrate)
+ video_codec.maxBitrate = kEncoderMinBitrate;
// Stop the media flow while reconfiguring.
vie_encoder_->Pause();
@@ -547,14 +572,8 @@ bool VideoSendStream::SetSendCodec(VideoCodec video_codec) {
// to send on all SSRCs at once etc.)
std::vector<uint32_t> used_ssrcs = config_.rtp.ssrcs;
used_ssrcs.resize(static_cast<size_t>(video_codec.numberOfSimulcastStreams));
-
- // Update used SSRCs.
vie_encoder_->SetSsrcs(used_ssrcs);
- // Update the protection mode, we might be switching NACK/FEC.
- vie_encoder_->UpdateProtectionMethod(vie_encoder_->nack_enabled(),
- vie_channel_->IsSendingFecEnabled());
-
// Restart the media flow
vie_encoder_->Restart();
diff --git a/webrtc/video/video_send_stream.h b/webrtc/video/video_send_stream.h
index 88c1611915..64b7fceaf3 100644
--- a/webrtc/video/video_send_stream.h
+++ b/webrtc/video/video_send_stream.h
@@ -17,7 +17,7 @@
#include "webrtc/call.h"
#include "webrtc/call/transport_adapter.h"
#include "webrtc/common_video/libyuv/include/webrtc_libyuv.h"
-#include "webrtc/modules/rtp_rtcp/interface/rtp_rtcp_defines.h"
+#include "webrtc/modules/rtp_rtcp/include/rtp_rtcp_defines.h"
#include "webrtc/system_wrappers/include/critical_section_wrapper.h"
#include "webrtc/video/encoded_frame_callback_adapter.h"
#include "webrtc/video/send_statistics_proxy.h"
@@ -27,6 +27,7 @@
namespace webrtc {
+class BitrateAllocator;
class CallStats;
class CongestionController;
class EncoderStateFeedback;
@@ -43,6 +44,7 @@ class VideoSendStream : public webrtc::VideoSendStream,
ProcessThread* module_process_thread,
CallStats* call_stats,
CongestionController* congestion_controller,
+ BitrateAllocator* bitrate_allocator,
const VideoSendStream::Config& config,
const VideoEncoderConfig& encoder_config,
const std::map<uint32_t, RtpState>& suspended_ssrcs);
@@ -68,10 +70,13 @@ class VideoSendStream : public webrtc::VideoSendStream,
RtpStateMap GetRtpStates() const;
int64_t GetRtt() const;
+ int GetPaddingNeededBps() const;
private:
bool SetSendCodec(VideoCodec video_codec);
void ConfigureSsrcs();
+
+ SendStatisticsProxy stats_proxy_;
TransportAdapter transport_adapter_;
EncodedFrameCallbackAdapter encoded_frame_proxy_;
const VideoSendStream::Config config_;
@@ -91,8 +96,6 @@ class VideoSendStream : public webrtc::VideoSendStream,
// start bitrate initially, instead of the one reported by VideoEngine (which
// defaults to too high).
bool use_config_bitrate_;
-
- SendStatisticsProxy stats_proxy_;
};
} // namespace internal
} // namespace webrtc
diff --git a/webrtc/video/video_send_stream_tests.cc b/webrtc/video/video_send_stream_tests.cc
index 59011a6162..4cf92748a8 100644
--- a/webrtc/video/video_send_stream_tests.cc
+++ b/webrtc/video/video_send_stream_tests.cc
@@ -15,22 +15,22 @@
#include "webrtc/base/bind.h"
#include "webrtc/base/checks.h"
#include "webrtc/base/criticalsection.h"
+#include "webrtc/base/event.h"
#include "webrtc/base/logging.h"
+#include "webrtc/base/platform_thread.h"
#include "webrtc/base/scoped_ptr.h"
#include "webrtc/call.h"
#include "webrtc/call/transport_adapter.h"
#include "webrtc/frame_callback.h"
-#include "webrtc/modules/rtp_rtcp/interface/rtp_header_parser.h"
-#include "webrtc/modules/rtp_rtcp/interface/rtp_rtcp.h"
+#include "webrtc/modules/rtp_rtcp/include/rtp_header_parser.h"
+#include "webrtc/modules/rtp_rtcp/include/rtp_rtcp.h"
#include "webrtc/modules/rtp_rtcp/source/rtcp_sender.h"
#include "webrtc/modules/rtp_rtcp/source/rtcp_utility.h"
#include "webrtc/modules/rtp_rtcp/source/rtp_format_vp9.h"
#include "webrtc/modules/video_coding/codecs/vp9/include/vp9.h"
#include "webrtc/system_wrappers/include/critical_section_wrapper.h"
-#include "webrtc/system_wrappers/include/event_wrapper.h"
#include "webrtc/system_wrappers/include/ref_count.h"
#include "webrtc/system_wrappers/include/sleep.h"
-#include "webrtc/system_wrappers/include/thread_wrapper.h"
#include "webrtc/test/call_test.h"
#include "webrtc/test/configurable_frame_size_encoder.h"
#include "webrtc/test/fake_texture_frame.h"
@@ -58,6 +58,9 @@ class VideoSendStreamTest : public test::CallTest {
void TestNackRetransmission(uint32_t retransmit_ssrc,
uint8_t retransmit_payload_type);
void TestPacketFragmentationSize(VideoFormat format, bool with_fec);
+
+ void TestVp9NonFlexMode(uint8_t num_temporal_layers,
+ uint8_t num_spatial_layers);
};
TEST_F(VideoSendStreamTest, CanStartStartedStream) {
@@ -65,10 +68,10 @@ TEST_F(VideoSendStreamTest, CanStartStartedStream) {
CreateSenderCall(call_config);
test::NullTransport transport;
- CreateSendConfig(1, &transport);
- CreateStreams();
- send_stream_->Start();
- send_stream_->Start();
+ CreateSendConfig(1, 0, &transport);
+ CreateVideoStreams();
+ video_send_stream_->Start();
+ video_send_stream_->Start();
DestroyStreams();
}
@@ -77,10 +80,10 @@ TEST_F(VideoSendStreamTest, CanStopStoppedStream) {
CreateSenderCall(call_config);
test::NullTransport transport;
- CreateSendConfig(1, &transport);
- CreateStreams();
- send_stream_->Stop();
- send_stream_->Stop();
+ CreateSendConfig(1, 0, &transport);
+ CreateVideoStreams();
+ video_send_stream_->Stop();
+ video_send_stream_->Stop();
DestroyStreams();
}
@@ -99,7 +102,7 @@ TEST_F(VideoSendStreamTest, SupportsCName) {
while (packet_type != RTCPUtility::RTCPPacketTypes::kInvalid) {
if (packet_type == RTCPUtility::RTCPPacketTypes::kSdesChunk) {
EXPECT_EQ(parser.Packet().CName.CName, kCName);
- observation_complete_->Set();
+ observation_complete_.Set();
}
packet_type = parser.Iterate();
@@ -108,28 +111,27 @@ TEST_F(VideoSendStreamTest, SupportsCName) {
return SEND_PACKET;
}
- void ModifyConfigs(VideoSendStream::Config* send_config,
- std::vector<VideoReceiveStream::Config>* receive_configs,
- VideoEncoderConfig* encoder_config) override {
+ void ModifyVideoConfigs(
+ VideoSendStream::Config* send_config,
+ std::vector<VideoReceiveStream::Config>* receive_configs,
+ VideoEncoderConfig* encoder_config) override {
send_config->rtp.c_name = kCName;
}
void PerformTest() override {
- EXPECT_EQ(kEventSignaled, Wait())
- << "Timed out while waiting for RTCP with CNAME.";
+ EXPECT_TRUE(Wait()) << "Timed out while waiting for RTCP with CNAME.";
}
} test;
- RunBaseTest(&test, FakeNetworkPipe::Config());
+ RunBaseTest(&test);
}
TEST_F(VideoSendStreamTest, SupportsAbsoluteSendTime) {
- static const uint8_t kAbsSendTimeExtensionId = 13;
class AbsoluteSendTimeObserver : public test::SendTest {
public:
AbsoluteSendTimeObserver() : SendTest(kDefaultTimeoutMs) {
EXPECT_TRUE(parser_->RegisterRtpHeaderExtension(
- kRtpExtensionAbsoluteSendTime, kAbsSendTimeExtensionId));
+ kRtpExtensionAbsoluteSendTime, test::kAbsSendTimeExtensionId));
}
Action OnSendRtp(const uint8_t* packet, size_t length) override {
@@ -140,26 +142,26 @@ TEST_F(VideoSendStreamTest, SupportsAbsoluteSendTime) {
EXPECT_TRUE(header.extension.hasAbsoluteSendTime);
EXPECT_EQ(header.extension.transmissionTimeOffset, 0);
EXPECT_GT(header.extension.absoluteSendTime, 0u);
- observation_complete_->Set();
+ observation_complete_.Set();
return SEND_PACKET;
}
- void ModifyConfigs(VideoSendStream::Config* send_config,
- std::vector<VideoReceiveStream::Config>* receive_configs,
- VideoEncoderConfig* encoder_config) override {
+ void ModifyVideoConfigs(
+ VideoSendStream::Config* send_config,
+ std::vector<VideoReceiveStream::Config>* receive_configs,
+ VideoEncoderConfig* encoder_config) override {
send_config->rtp.extensions.clear();
- send_config->rtp.extensions.push_back(
- RtpExtension(RtpExtension::kAbsSendTime, kAbsSendTimeExtensionId));
+ send_config->rtp.extensions.push_back(RtpExtension(
+ RtpExtension::kAbsSendTime, test::kAbsSendTimeExtensionId));
}
void PerformTest() override {
- EXPECT_EQ(kEventSignaled, Wait())
- << "Timed out while waiting for single RTP packet.";
+ EXPECT_TRUE(Wait()) << "Timed out while waiting for single RTP packet.";
}
} test;
- RunBaseTest(&test, FakeNetworkPipe::Config());
+ RunBaseTest(&test);
}
TEST_F(VideoSendStreamTest, SupportsTransmissionTimeOffset) {
@@ -182,14 +184,15 @@ TEST_F(VideoSendStreamTest, SupportsTransmissionTimeOffset) {
EXPECT_FALSE(header.extension.hasAbsoluteSendTime);
EXPECT_GT(header.extension.transmissionTimeOffset, 0);
EXPECT_EQ(header.extension.absoluteSendTime, 0u);
- observation_complete_->Set();
+ observation_complete_.Set();
return SEND_PACKET;
}
- void ModifyConfigs(VideoSendStream::Config* send_config,
- std::vector<VideoReceiveStream::Config>* receive_configs,
- VideoEncoderConfig* encoder_config) override {
+ void ModifyVideoConfigs(
+ VideoSendStream::Config* send_config,
+ std::vector<VideoReceiveStream::Config>* receive_configs,
+ VideoEncoderConfig* encoder_config) override {
send_config->encoder_settings.encoder = &encoder_;
send_config->rtp.extensions.clear();
send_config->rtp.extensions.push_back(
@@ -197,14 +200,13 @@ TEST_F(VideoSendStreamTest, SupportsTransmissionTimeOffset) {
}
void PerformTest() override {
- EXPECT_EQ(kEventSignaled, Wait())
- << "Timed out while waiting for a single RTP packet.";
+ EXPECT_TRUE(Wait()) << "Timed out while waiting for a single RTP packet.";
}
test::DelayedEncoder encoder_;
} test;
- RunBaseTest(&test, FakeNetworkPipe::Config());
+ RunBaseTest(&test);
}
TEST_F(VideoSendStreamTest, SupportsTransportWideSequenceNumbers) {
@@ -226,14 +228,15 @@ TEST_F(VideoSendStreamTest, SupportsTransportWideSequenceNumbers) {
EXPECT_FALSE(header.extension.hasTransmissionTimeOffset);
EXPECT_FALSE(header.extension.hasAbsoluteSendTime);
- observation_complete_->Set();
+ observation_complete_.Set();
return SEND_PACKET;
}
- void ModifyConfigs(VideoSendStream::Config* send_config,
- std::vector<VideoReceiveStream::Config>* receive_configs,
- VideoEncoderConfig* encoder_config) override {
+ void ModifyVideoConfigs(
+ VideoSendStream::Config* send_config,
+ std::vector<VideoReceiveStream::Config>* receive_configs,
+ VideoEncoderConfig* encoder_config) override {
send_config->encoder_settings.encoder = &encoder_;
send_config->rtp.extensions.clear();
send_config->rtp.extensions.push_back(
@@ -241,14 +244,13 @@ TEST_F(VideoSendStreamTest, SupportsTransportWideSequenceNumbers) {
}
void PerformTest() override {
- EXPECT_EQ(kEventSignaled, Wait())
- << "Timed out while waiting for a single RTP packet.";
+ EXPECT_TRUE(Wait()) << "Timed out while waiting for a single RTP packet.";
}
test::FakeEncoder encoder_;
} test;
- RunBaseTest(&test, FakeNetworkPipe::Config());
+ RunBaseTest(&test);
}
class FakeReceiveStatistics : public NullReceiveStatistics {
@@ -307,83 +309,127 @@ class FakeReceiveStatistics : public NullReceiveStatistics {
StatisticianMap stats_map_;
};
-TEST_F(VideoSendStreamTest, SupportsFec) {
- class FecObserver : public test::SendTest {
- public:
- FecObserver()
- : SendTest(kDefaultTimeoutMs),
- send_count_(0),
- received_media_(false),
- received_fec_(false) {
- }
+class FecObserver : public test::SendTest {
+ public:
+ explicit FecObserver(bool header_extensions_enabled)
+ : SendTest(VideoSendStreamTest::kDefaultTimeoutMs),
+ send_count_(0),
+ received_media_(false),
+ received_fec_(false),
+ header_extensions_enabled_(header_extensions_enabled) {}
- private:
- Action OnSendRtp(const uint8_t* packet, size_t length) override {
- RTPHeader header;
- EXPECT_TRUE(parser_->Parse(packet, length, &header));
+ private:
+ Action OnSendRtp(const uint8_t* packet, size_t length) override {
+ RTPHeader header;
+ EXPECT_TRUE(parser_->Parse(packet, length, &header));
- // Send lossy receive reports to trigger FEC enabling.
- if (send_count_++ % 2 != 0) {
- // Receive statistics reporting having lost 50% of the packets.
- FakeReceiveStatistics lossy_receive_stats(
- kSendSsrcs[0], header.sequenceNumber, send_count_ / 2, 127);
- RTCPSender rtcp_sender(false, Clock::GetRealTimeClock(),
- &lossy_receive_stats, nullptr,
- transport_adapter_.get());
+ // Send lossy receive reports to trigger FEC enabling.
+ if (send_count_++ % 2 != 0) {
+ // Receive statistics reporting having lost 50% of the packets.
+ FakeReceiveStatistics lossy_receive_stats(
+ VideoSendStreamTest::kVideoSendSsrcs[0], header.sequenceNumber,
+ send_count_ / 2, 127);
+ RTCPSender rtcp_sender(false, Clock::GetRealTimeClock(),
+ &lossy_receive_stats, nullptr,
+ transport_adapter_.get());
- rtcp_sender.SetRTCPStatus(RtcpMode::kReducedSize);
- rtcp_sender.SetRemoteSSRC(kSendSsrcs[0]);
+ rtcp_sender.SetRTCPStatus(RtcpMode::kReducedSize);
+ rtcp_sender.SetRemoteSSRC(VideoSendStreamTest::kVideoSendSsrcs[0]);
- RTCPSender::FeedbackState feedback_state;
+ RTCPSender::FeedbackState feedback_state;
- EXPECT_EQ(0, rtcp_sender.SendRTCP(feedback_state, kRtcpRr));
- }
+ EXPECT_EQ(0, rtcp_sender.SendRTCP(feedback_state, kRtcpRr));
+ }
- int encapsulated_payload_type = -1;
- if (header.payloadType == kRedPayloadType) {
- encapsulated_payload_type =
- static_cast<int>(packet[header.headerLength]);
- if (encapsulated_payload_type != kFakeSendPayloadType)
- EXPECT_EQ(kUlpfecPayloadType, encapsulated_payload_type);
+ int encapsulated_payload_type = -1;
+ if (header.payloadType == VideoSendStreamTest::kRedPayloadType) {
+ encapsulated_payload_type = static_cast<int>(packet[header.headerLength]);
+ if (encapsulated_payload_type !=
+ VideoSendStreamTest::kFakeVideoSendPayloadType)
+ EXPECT_EQ(VideoSendStreamTest::kUlpfecPayloadType,
+ encapsulated_payload_type);
+ } else {
+ EXPECT_EQ(VideoSendStreamTest::kFakeVideoSendPayloadType,
+ header.payloadType);
+ }
+
+ if (header_extensions_enabled_) {
+ EXPECT_TRUE(header.extension.hasAbsoluteSendTime);
+ uint32_t kHalf24BitsSpace = 0xFFFFFF / 2;
+ if (header.extension.absoluteSendTime <= kHalf24BitsSpace &&
+ prev_header_.extension.absoluteSendTime > kHalf24BitsSpace) {
+ // 24 bits wrap.
+ EXPECT_GT(prev_header_.extension.absoluteSendTime,
+ header.extension.absoluteSendTime);
} else {
- EXPECT_EQ(kFakeSendPayloadType, header.payloadType);
+ EXPECT_GE(header.extension.absoluteSendTime,
+ prev_header_.extension.absoluteSendTime);
}
+ EXPECT_TRUE(header.extension.hasTransportSequenceNumber);
+ uint16_t seq_num_diff = header.extension.transportSequenceNumber -
+ prev_header_.extension.transportSequenceNumber;
+ EXPECT_EQ(1, seq_num_diff);
+ }
- if (encapsulated_payload_type != -1) {
- if (encapsulated_payload_type == kUlpfecPayloadType) {
- received_fec_ = true;
- } else {
- received_media_ = true;
- }
+ if (encapsulated_payload_type != -1) {
+ if (encapsulated_payload_type ==
+ VideoSendStreamTest::kUlpfecPayloadType) {
+ received_fec_ = true;
+ } else {
+ received_media_ = true;
}
+ }
- if (received_media_ && received_fec_)
- observation_complete_->Set();
+ if (received_media_ && received_fec_ && send_count_ > 100)
+ observation_complete_.Set();
- return SEND_PACKET;
- }
+ prev_header_ = header;
- void ModifyConfigs(VideoSendStream::Config* send_config,
- std::vector<VideoReceiveStream::Config>* receive_configs,
- VideoEncoderConfig* encoder_config) override {
- transport_adapter_.reset(
- new internal::TransportAdapter(send_config->send_transport));
- transport_adapter_->Enable();
- send_config->rtp.fec.red_payload_type = kRedPayloadType;
- send_config->rtp.fec.ulpfec_payload_type = kUlpfecPayloadType;
- }
+ return SEND_PACKET;
+ }
- void PerformTest() override {
- EXPECT_TRUE(Wait()) << "Timed out waiting for FEC and media packets.";
+ void ModifyVideoConfigs(
+ VideoSendStream::Config* send_config,
+ std::vector<VideoReceiveStream::Config>* receive_configs,
+ VideoEncoderConfig* encoder_config) override {
+ transport_adapter_.reset(
+ new internal::TransportAdapter(send_config->send_transport));
+ transport_adapter_->Enable();
+ send_config->rtp.fec.red_payload_type =
+ VideoSendStreamTest::kRedPayloadType;
+ send_config->rtp.fec.ulpfec_payload_type =
+ VideoSendStreamTest::kUlpfecPayloadType;
+ if (header_extensions_enabled_) {
+ send_config->rtp.extensions.push_back(RtpExtension(
+ RtpExtension::kAbsSendTime, test::kAbsSendTimeExtensionId));
+ send_config->rtp.extensions.push_back(
+ RtpExtension(RtpExtension::kTransportSequenceNumber,
+ test::kTransportSequenceNumberExtensionId));
}
+ }
- rtc::scoped_ptr<internal::TransportAdapter> transport_adapter_;
- int send_count_;
- bool received_media_;
- bool received_fec_;
- } test;
+ void PerformTest() override {
+ EXPECT_TRUE(Wait()) << "Timed out waiting for FEC and media packets.";
+ }
+
+ rtc::scoped_ptr<internal::TransportAdapter> transport_adapter_;
+ int send_count_;
+ bool received_media_;
+ bool received_fec_;
+ bool header_extensions_enabled_;
+ RTPHeader prev_header_;
+};
+
+TEST_F(VideoSendStreamTest, SupportsFecWithExtensions) {
+ FecObserver test(true);
+
+ RunBaseTest(&test);
+}
+
+TEST_F(VideoSendStreamTest, SupportsFecWithoutExtensions) {
+ FecObserver test(false);
- RunBaseTest(&test, FakeNetworkPipe::Config());
+ RunBaseTest(&test);
}
void VideoSendStreamTest::TestNackRetransmission(
@@ -414,7 +460,7 @@ void VideoSendStreamTest::TestNackRetransmission(
nullptr, transport_adapter_.get());
rtcp_sender.SetRTCPStatus(RtcpMode::kReducedSize);
- rtcp_sender.SetRemoteSSRC(kSendSsrcs[0]);
+ rtcp_sender.SetRemoteSSRC(kVideoSendSsrcs[0]);
RTCPSender::FeedbackState feedback_state;
@@ -426,8 +472,8 @@ void VideoSendStreamTest::TestNackRetransmission(
uint16_t sequence_number = header.sequenceNumber;
if (header.ssrc == retransmit_ssrc_ &&
- retransmit_ssrc_ != kSendSsrcs[0]) {
- // Not kSendSsrcs[0], assume correct RTX packet. Extract sequence
+ retransmit_ssrc_ != kVideoSendSsrcs[0]) {
+ // Not kVideoSendSsrcs[0], assume correct RTX packet. Extract sequence
// number.
const uint8_t* rtx_header = packet + header.headerLength;
sequence_number = (rtx_header[0] << 8) + rtx_header[1];
@@ -436,27 +482,27 @@ void VideoSendStreamTest::TestNackRetransmission(
if (sequence_number == nacked_sequence_number_) {
EXPECT_EQ(retransmit_ssrc_, header.ssrc);
EXPECT_EQ(retransmit_payload_type_, header.payloadType);
- observation_complete_->Set();
+ observation_complete_.Set();
}
return SEND_PACKET;
}
- void ModifyConfigs(VideoSendStream::Config* send_config,
- std::vector<VideoReceiveStream::Config>* receive_configs,
- VideoEncoderConfig* encoder_config) override {
+ void ModifyVideoConfigs(
+ VideoSendStream::Config* send_config,
+ std::vector<VideoReceiveStream::Config>* receive_configs,
+ VideoEncoderConfig* encoder_config) override {
transport_adapter_.reset(
new internal::TransportAdapter(send_config->send_transport));
transport_adapter_->Enable();
send_config->rtp.nack.rtp_history_ms = kNackRtpHistoryMs;
send_config->rtp.rtx.payload_type = retransmit_payload_type_;
- if (retransmit_ssrc_ != kSendSsrcs[0])
+ if (retransmit_ssrc_ != kVideoSendSsrcs[0])
send_config->rtp.rtx.ssrcs.push_back(retransmit_ssrc_);
}
void PerformTest() override {
- EXPECT_EQ(kEventSignaled, Wait())
- << "Timed out while waiting for NACK retransmission.";
+ EXPECT_TRUE(Wait()) << "Timed out while waiting for NACK retransmission.";
}
rtc::scoped_ptr<internal::TransportAdapter> transport_adapter_;
@@ -466,12 +512,12 @@ void VideoSendStreamTest::TestNackRetransmission(
int nacked_sequence_number_;
} test(retransmit_ssrc, retransmit_payload_type);
- RunBaseTest(&test, FakeNetworkPipe::Config());
+ RunBaseTest(&test);
}
TEST_F(VideoSendStreamTest, RetransmitsNack) {
// Normal NACKs should use the send SSRC.
- TestNackRetransmission(kSendSsrcs[0], kFakeSendPayloadType);
+ TestNackRetransmission(kVideoSendSsrcs[0], kFakeVideoSendPayloadType);
}
TEST_F(VideoSendStreamTest, RetransmitsNackOverRtx) {
@@ -572,7 +618,7 @@ void VideoSendStreamTest::TestPacketFragmentationSize(VideoFormat format,
accumulated_payload_ = 0;
if (current_size_rtp_ == stop_size_) {
// Done! (Don't increase size again, might arrive more @ stop_size).
- observation_complete_->Set();
+ observation_complete_.Set();
} else {
// Increase next expected frame size. If testing with FEC, make sure
// a FEC packet has been received for this frame size before
@@ -596,13 +642,13 @@ void VideoSendStreamTest::TestPacketFragmentationSize(VideoFormat format,
if (packet_count_++ % 2 != 0) {
// Receive statistics reporting having lost 50% of the packets.
FakeReceiveStatistics lossy_receive_stats(
- kSendSsrcs[0], header.sequenceNumber, packet_count_ / 2, 127);
+ kVideoSendSsrcs[0], header.sequenceNumber, packet_count_ / 2, 127);
RTCPSender rtcp_sender(false, Clock::GetRealTimeClock(),
&lossy_receive_stats, nullptr,
transport_adapter_.get());
rtcp_sender.SetRTCPStatus(RtcpMode::kReducedSize);
- rtcp_sender.SetRemoteSSRC(kSendSsrcs[0]);
+ rtcp_sender.SetRemoteSSRC(kVideoSendSsrcs[0]);
RTCPSender::FeedbackState feedback_state;
@@ -627,9 +673,10 @@ void VideoSendStreamTest::TestPacketFragmentationSize(VideoFormat format,
return config;
}
- void ModifyConfigs(VideoSendStream::Config* send_config,
- std::vector<VideoReceiveStream::Config>* receive_configs,
- VideoEncoderConfig* encoder_config) override {
+ void ModifyVideoConfigs(
+ VideoSendStream::Config* send_config,
+ std::vector<VideoReceiveStream::Config>* receive_configs,
+ VideoEncoderConfig* encoder_config) override {
transport_adapter_.reset(
new internal::TransportAdapter(send_config->send_transport));
transport_adapter_->Enable();
@@ -651,8 +698,7 @@ void VideoSendStreamTest::TestPacketFragmentationSize(VideoFormat format,
}
void PerformTest() override {
- EXPECT_EQ(kEventSignaled, Wait())
- << "Timed out while observing incoming RTP packets.";
+ EXPECT_TRUE(Wait()) << "Timed out while observing incoming RTP packets.";
}
rtc::scoped_ptr<internal::TransportAdapter> transport_adapter_;
@@ -677,7 +723,7 @@ void VideoSendStreamTest::TestPacketFragmentationSize(VideoFormat format,
FrameFragmentationTest test(
kMaxPacketSize, start, stop, format == kGeneric, with_fec);
- RunBaseTest(&test, FakeNetworkPipe::Config());
+ RunBaseTest(&test);
}
// TODO(sprang): Is there any way of speeding up these tests?
@@ -752,7 +798,7 @@ TEST_F(VideoSendStreamTest, SuspendBelowMinBitrate) {
VideoSendStream::Stats stats = stream_->GetStats();
if (stats.suspended == false) {
// Stats flipped to false. Test is complete.
- observation_complete_->Set();
+ observation_complete_.Set();
}
SendRtcpFeedback(0); // REMB is only sent if value is > 0.
}
@@ -782,15 +828,16 @@ TEST_F(VideoSendStreamTest, SuspendBelowMinBitrate) {
high_remb_bps_ = value;
}
- void OnStreamsCreated(
+ void OnVideoStreamsCreated(
VideoSendStream* send_stream,
const std::vector<VideoReceiveStream*>& receive_streams) override {
stream_ = send_stream;
}
- void ModifyConfigs(VideoSendStream::Config* send_config,
- std::vector<VideoReceiveStream::Config>* receive_configs,
- VideoEncoderConfig* encoder_config) override {
+ void ModifyVideoConfigs(
+ VideoSendStream::Config* send_config,
+ std::vector<VideoReceiveStream::Config>* receive_configs,
+ VideoEncoderConfig* encoder_config) override {
transport_adapter_.reset(
new internal::TransportAdapter(send_config->send_transport));
transport_adapter_->Enable();
@@ -806,8 +853,7 @@ TEST_F(VideoSendStreamTest, SuspendBelowMinBitrate) {
}
void PerformTest() override {
- EXPECT_EQ(kEventSignaled, Wait())
- << "Timed out during suspend-below-min-bitrate test.";
+ EXPECT_TRUE(Wait()) << "Timed out during suspend-below-min-bitrate test.";
}
enum TestState {
@@ -819,13 +865,13 @@ TEST_F(VideoSendStreamTest, SuspendBelowMinBitrate) {
virtual void SendRtcpFeedback(int remb_value)
EXCLUSIVE_LOCKS_REQUIRED(crit_) {
- FakeReceiveStatistics receive_stats(
- kSendSsrcs[0], last_sequence_number_, rtp_count_, 0);
+ FakeReceiveStatistics receive_stats(kVideoSendSsrcs[0],
+ last_sequence_number_, rtp_count_, 0);
RTCPSender rtcp_sender(false, clock_, &receive_stats, nullptr,
transport_adapter_.get());
rtcp_sender.SetRTCPStatus(RtcpMode::kReducedSize);
- rtcp_sender.SetRemoteSSRC(kSendSsrcs[0]);
+ rtcp_sender.SetRemoteSSRC(kVideoSendSsrcs[0]);
if (remb_value > 0) {
rtcp_sender.SetREMBStatus(true);
rtcp_sender.SetREMBData(remb_value, std::vector<uint32_t>());
@@ -847,7 +893,7 @@ TEST_F(VideoSendStreamTest, SuspendBelowMinBitrate) {
int high_remb_bps_ GUARDED_BY(crit_);
} test;
- RunBaseTest(&test, FakeNetworkPipe::Config());
+ RunBaseTest(&test);
}
TEST_F(VideoSendStreamTest, NoPaddingWhenVideoIsMuted) {
@@ -874,14 +920,14 @@ TEST_F(VideoSendStreamTest, NoPaddingWhenVideoIsMuted) {
if (last_packet_time_ms_ > 0 &&
clock_->TimeInMilliseconds() - last_packet_time_ms_ >
kVideoMutedThresholdMs)
- observation_complete_->Set();
+ observation_complete_.Set();
// Receive statistics reporting having lost 50% of the packets.
- FakeReceiveStatistics receive_stats(kSendSsrcs[0], 1, 1, 0);
+ FakeReceiveStatistics receive_stats(kVideoSendSsrcs[0], 1, 1, 0);
RTCPSender rtcp_sender(false, Clock::GetRealTimeClock(), &receive_stats,
nullptr, transport_adapter_.get());
rtcp_sender.SetRTCPStatus(RtcpMode::kReducedSize);
- rtcp_sender.SetRemoteSSRC(kSendSsrcs[0]);
+ rtcp_sender.SetRemoteSSRC(kVideoSendSsrcs[0]);
RTCPSender::FeedbackState feedback_state;
@@ -889,15 +935,16 @@ TEST_F(VideoSendStreamTest, NoPaddingWhenVideoIsMuted) {
return SEND_PACKET;
}
- void OnTransportsCreated(
- test::PacketTransport* send_transport,
- test::PacketTransport* receive_transport) override {
- transport_adapter_.reset(
- new internal::TransportAdapter(receive_transport));
+ test::PacketTransport* CreateReceiveTransport() override {
+ test::PacketTransport* transport = new test::PacketTransport(
+ nullptr, this, test::PacketTransport::kReceiver,
+ FakeNetworkPipe::Config());
+ transport_adapter_.reset(new internal::TransportAdapter(transport));
transport_adapter_->Enable();
+ return transport;
}
- size_t GetNumStreams() const override { return 3; }
+ size_t GetNumVideoStreams() const override { return 3; }
virtual void OnFrameGeneratorCapturerCreated(
test::FrameGeneratorCapturer* frame_generator_capturer) {
@@ -906,7 +953,7 @@ TEST_F(VideoSendStreamTest, NoPaddingWhenVideoIsMuted) {
}
void PerformTest() override {
- EXPECT_EQ(kEventSignaled, Wait())
+ EXPECT_TRUE(Wait())
<< "Timed out while waiting for RTP packets to stop being sent.";
}
@@ -917,7 +964,7 @@ TEST_F(VideoSendStreamTest, NoPaddingWhenVideoIsMuted) {
test::FrameGeneratorCapturer* capturer_ GUARDED_BY(crit_);
} test;
- RunBaseTest(&test, FakeNetworkPipe::Config());
+ RunBaseTest(&test);
}
// This test first observes "high" bitrate use at which point it sends a REMB to
@@ -966,14 +1013,14 @@ TEST_F(VideoSendStreamTest, MinTransmitBitrateRespectsRemb) {
bitrate_capped_ = true;
} else if (bitrate_capped_ &&
total_bitrate_bps < kRembRespectedBitrateBps) {
- observation_complete_->Set();
+ observation_complete_.Set();
}
}
// Packets don't have to be delivered since the test is the receiver.
return DROP_PACKET;
}
- void OnStreamsCreated(
+ void OnVideoStreamsCreated(
VideoSendStream* send_stream,
const std::vector<VideoReceiveStream*>& receive_streams) override {
stream_ = send_stream;
@@ -984,9 +1031,10 @@ TEST_F(VideoSendStreamTest, MinTransmitBitrateRespectsRemb) {
rtp_rtcp_->SetRTCPStatus(RtcpMode::kReducedSize);
}
- void ModifyConfigs(VideoSendStream::Config* send_config,
- std::vector<VideoReceiveStream::Config>* receive_configs,
- VideoEncoderConfig* encoder_config) override {
+ void ModifyVideoConfigs(
+ VideoSendStream::Config* send_config,
+ std::vector<VideoReceiveStream::Config>* receive_configs,
+ VideoEncoderConfig* encoder_config) override {
feedback_transport_.reset(
new internal::TransportAdapter(send_config->send_transport));
feedback_transport_->Enable();
@@ -994,7 +1042,7 @@ TEST_F(VideoSendStreamTest, MinTransmitBitrateRespectsRemb) {
}
void PerformTest() override {
- EXPECT_EQ(kEventSignaled, Wait())
+ EXPECT_TRUE(Wait())
<< "Timeout while waiting for low bitrate stats after REMB.";
}
@@ -1004,7 +1052,7 @@ TEST_F(VideoSendStreamTest, MinTransmitBitrateRespectsRemb) {
bool bitrate_capped_;
} test;
- RunBaseTest(&test, FakeNetworkPipe::Config());
+ RunBaseTest(&test);
}
TEST_F(VideoSendStreamTest, CanReconfigureToUseStartBitrateAbovePreviousMax) {
@@ -1039,24 +1087,24 @@ TEST_F(VideoSendStreamTest, CanReconfigureToUseStartBitrateAbovePreviousMax) {
CreateSenderCall(Call::Config());
test::NullTransport transport;
- CreateSendConfig(1, &transport);
+ CreateSendConfig(1, 0, &transport);
Call::Config::BitrateConfig bitrate_config;
bitrate_config.start_bitrate_bps =
- 2 * encoder_config_.streams[0].max_bitrate_bps;
+ 2 * video_encoder_config_.streams[0].max_bitrate_bps;
sender_call_->SetBitrateConfig(bitrate_config);
StartBitrateObserver encoder;
- send_config_.encoder_settings.encoder = &encoder;
+ video_send_config_.encoder_settings.encoder = &encoder;
- CreateStreams();
+ CreateVideoStreams();
- EXPECT_EQ(encoder_config_.streams[0].max_bitrate_bps / 1000,
+ EXPECT_EQ(video_encoder_config_.streams[0].max_bitrate_bps / 1000,
encoder.GetStartBitrateKbps());
- encoder_config_.streams[0].max_bitrate_bps =
+ video_encoder_config_.streams[0].max_bitrate_bps =
2 * bitrate_config.start_bitrate_bps;
- send_stream_->ReconfigureVideoEncoder(encoder_config_);
+ video_send_stream_->ReconfigureVideoEncoder(video_encoder_config_);
// New bitrate should be reconfigured above the previous max. As there's no
// network connection this shouldn't be flaky, as no bitrate should've been
@@ -1070,16 +1118,16 @@ TEST_F(VideoSendStreamTest, CanReconfigureToUseStartBitrateAbovePreviousMax) {
TEST_F(VideoSendStreamTest, CapturesTextureAndVideoFrames) {
class FrameObserver : public I420FrameCallback {
public:
- FrameObserver() : output_frame_event_(EventWrapper::Create()) {}
+ FrameObserver() : output_frame_event_(false, false) {}
void FrameCallback(VideoFrame* video_frame) override {
output_frames_.push_back(*video_frame);
- output_frame_event_->Set();
+ output_frame_event_.Set();
}
void WaitOutputFrame() {
- const unsigned long kWaitFrameTimeoutMs = 3000;
- EXPECT_EQ(kEventSignaled, output_frame_event_->Wait(kWaitFrameTimeoutMs))
+ const int kWaitFrameTimeoutMs = 3000;
+ EXPECT_TRUE(output_frame_event_.Wait(kWaitFrameTimeoutMs))
<< "Timeout while waiting for output frames.";
}
@@ -1092,46 +1140,46 @@ TEST_F(VideoSendStreamTest, CapturesTextureAndVideoFrames) {
std::vector<VideoFrame> output_frames_;
// Indicate an output frame has arrived.
- rtc::scoped_ptr<EventWrapper> output_frame_event_;
+ rtc::Event output_frame_event_;
};
// Initialize send stream.
CreateSenderCall(Call::Config());
test::NullTransport transport;
- CreateSendConfig(1, &transport);
+ CreateSendConfig(1, 0, &transport);
FrameObserver observer;
- send_config_.pre_encode_callback = &observer;
- CreateStreams();
+ video_send_config_.pre_encode_callback = &observer;
+ CreateVideoStreams();
// Prepare five input frames. Send ordinary VideoFrame and texture frames
// alternatively.
std::vector<VideoFrame> input_frames;
- int width = static_cast<int>(encoder_config_.streams[0].width);
- int height = static_cast<int>(encoder_config_.streams[0].height);
+ int width = static_cast<int>(video_encoder_config_.streams[0].width);
+ int height = static_cast<int>(video_encoder_config_.streams[0].height);
test::FakeNativeHandle* handle1 = new test::FakeNativeHandle();
test::FakeNativeHandle* handle2 = new test::FakeNativeHandle();
test::FakeNativeHandle* handle3 = new test::FakeNativeHandle();
- input_frames.push_back(test::CreateFakeNativeHandleFrame(
+ input_frames.push_back(test::FakeNativeHandle::CreateFrame(
handle1, width, height, 1, 1, kVideoRotation_0));
- input_frames.push_back(test::CreateFakeNativeHandleFrame(
+ input_frames.push_back(test::FakeNativeHandle::CreateFrame(
handle2, width, height, 2, 2, kVideoRotation_0));
input_frames.push_back(CreateVideoFrame(width, height, 3));
input_frames.push_back(CreateVideoFrame(width, height, 4));
- input_frames.push_back(test::CreateFakeNativeHandleFrame(
+ input_frames.push_back(test::FakeNativeHandle::CreateFrame(
handle3, width, height, 5, 5, kVideoRotation_0));
- send_stream_->Start();
+ video_send_stream_->Start();
for (size_t i = 0; i < input_frames.size(); i++) {
- send_stream_->Input()->IncomingCapturedFrame(input_frames[i]);
+ video_send_stream_->Input()->IncomingCapturedFrame(input_frames[i]);
// Do not send the next frame too fast, so the frame dropper won't drop it.
if (i < input_frames.size() - 1)
- SleepMs(1000 / encoder_config_.streams[0].max_framerate);
+ SleepMs(1000 / video_encoder_config_.streams[0].max_framerate);
// Wait until the output frame is received before sending the next input
// frame. Or the previous input frame may be replaced without delivering.
observer.WaitOutputFrame();
}
- send_stream_->Stop();
+ video_send_stream_->Stop();
// Test if the input and output frames are the same. render_time_ms and
// timestamp are not compared because capturer sets those values.
@@ -1240,7 +1288,7 @@ TEST_F(VideoSendStreamTest, EncoderIsProperlyInitializedAndDestroyed) {
const std::vector<FrameType>* frame_types) override {
EXPECT_TRUE(IsReadyForEncode());
- observation_complete_->Set();
+ observation_complete_.Set();
return 0;
}
@@ -1273,7 +1321,7 @@ TEST_F(VideoSendStreamTest, EncoderIsProperlyInitializedAndDestroyed) {
return 0;
}
- void OnStreamsCreated(
+ void OnVideoStreamsCreated(
VideoSendStream* send_stream,
const std::vector<VideoReceiveStream*>& receive_streams) override {
// Encoder initialization should be done in stream construction before
@@ -1282,16 +1330,16 @@ TEST_F(VideoSendStreamTest, EncoderIsProperlyInitializedAndDestroyed) {
stream_ = send_stream;
}
- void ModifyConfigs(VideoSendStream::Config* send_config,
- std::vector<VideoReceiveStream::Config>* receive_configs,
- VideoEncoderConfig* encoder_config) override {
+ void ModifyVideoConfigs(
+ VideoSendStream::Config* send_config,
+ std::vector<VideoReceiveStream::Config>* receive_configs,
+ VideoEncoderConfig* encoder_config) override {
send_config->encoder_settings.encoder = this;
encoder_config_ = *encoder_config;
}
void PerformTest() override {
- EXPECT_EQ(kEventSignaled, Wait())
- << "Timed out while waiting for Encode.";
+ EXPECT_TRUE(Wait()) << "Timed out while waiting for Encode.";
EXPECT_EQ(0u, num_releases());
stream_->ReconfigureVideoEncoder(encoder_config_);
EXPECT_EQ(0u, num_releases());
@@ -1301,8 +1349,7 @@ TEST_F(VideoSendStreamTest, EncoderIsProperlyInitializedAndDestroyed) {
EXPECT_TRUE(IsReadyForEncode());
stream_->Start();
// Sanity check, make sure we still encode frames with this encoder.
- EXPECT_EQ(kEventSignaled, Wait())
- << "Timed out while waiting for Encode.";
+ EXPECT_TRUE(Wait()) << "Timed out while waiting for Encode.";
}
rtc::CriticalSection crit_;
@@ -1314,7 +1361,7 @@ TEST_F(VideoSendStreamTest, EncoderIsProperlyInitializedAndDestroyed) {
VideoEncoderConfig encoder_config_;
} test_encoder;
- RunBaseTest(&test_encoder, FakeNetworkPipe::Config());
+ RunBaseTest(&test_encoder);
EXPECT_TRUE(test_encoder.IsReleased());
EXPECT_EQ(1u, test_encoder.num_releases());
@@ -1330,14 +1377,15 @@ TEST_F(VideoSendStreamTest, EncoderSetupPropagatesCommonEncoderConfigValues) {
num_initializations_(0) {}
private:
- void ModifyConfigs(VideoSendStream::Config* send_config,
- std::vector<VideoReceiveStream::Config>* receive_configs,
- VideoEncoderConfig* encoder_config) override {
+ void ModifyVideoConfigs(
+ VideoSendStream::Config* send_config,
+ std::vector<VideoReceiveStream::Config>* receive_configs,
+ VideoEncoderConfig* encoder_config) override {
send_config->encoder_settings.encoder = this;
encoder_config_ = *encoder_config;
}
- void OnStreamsCreated(
+ void OnVideoStreamsCreated(
VideoSendStream* send_stream,
const std::vector<VideoReceiveStream*>& receive_streams) override {
stream_ = send_stream;
@@ -1372,14 +1420,13 @@ TEST_F(VideoSendStreamTest, EncoderSetupPropagatesCommonEncoderConfigValues) {
VideoEncoderConfig encoder_config_;
} test;
- RunBaseTest(&test, FakeNetworkPipe::Config());
+ RunBaseTest(&test);
}
static const size_t kVideoCodecConfigObserverNumberOfTemporalLayers = 4;
template <typename T>
class VideoCodecConfigObserver : public test::SendTest,
public test::FakeEncoder {
-
public:
VideoCodecConfigObserver(VideoCodecType video_codec_type,
const char* codec_name)
@@ -1392,9 +1439,10 @@ class VideoCodecConfigObserver : public test::SendTest,
}
private:
- void ModifyConfigs(VideoSendStream::Config* send_config,
- std::vector<VideoReceiveStream::Config>* receive_configs,
- VideoEncoderConfig* encoder_config) override {
+ void ModifyVideoConfigs(
+ VideoSendStream::Config* send_config,
+ std::vector<VideoReceiveStream::Config>* receive_configs,
+ VideoEncoderConfig* encoder_config) override {
send_config->encoder_settings.encoder = this;
send_config->encoder_settings.payload_name = codec_name_;
@@ -1407,7 +1455,7 @@ class VideoCodecConfigObserver : public test::SendTest,
encoder_config_ = *encoder_config;
}
- void OnStreamsCreated(
+ void OnVideoStreamsCreated(
VideoSendStream* send_stream,
const std::vector<VideoReceiveStream*>& receive_streams) override {
stream_ = send_stream;
@@ -1500,17 +1548,17 @@ void VideoCodecConfigObserver<VideoCodecVP9>::VerifyCodecSpecifics(
TEST_F(VideoSendStreamTest, EncoderSetupPropagatesVp8Config) {
VideoCodecConfigObserver<VideoCodecVP8> test(kVideoCodecVP8, "VP8");
- RunBaseTest(&test, FakeNetworkPipe::Config());
+ RunBaseTest(&test);
}
TEST_F(VideoSendStreamTest, EncoderSetupPropagatesVp9Config) {
VideoCodecConfigObserver<VideoCodecVP9> test(kVideoCodecVP9, "VP9");
- RunBaseTest(&test, FakeNetworkPipe::Config());
+ RunBaseTest(&test);
}
TEST_F(VideoSendStreamTest, EncoderSetupPropagatesH264Config) {
VideoCodecConfigObserver<VideoCodecH264> test(kVideoCodecH264, "H264");
- RunBaseTest(&test, FakeNetworkPipe::Config());
+ RunBaseTest(&test);
}
TEST_F(VideoSendStreamTest, RtcpSenderReportContainsMediaBytesSent) {
@@ -1522,6 +1570,7 @@ TEST_F(VideoSendStreamTest, RtcpSenderReportContainsMediaBytesSent) {
private:
Action OnSendRtp(const uint8_t* packet, size_t length) override {
+ rtc::CritScope lock(&crit_);
RTPHeader header;
EXPECT_TRUE(parser_->Parse(packet, length, &header));
++rtp_packets_sent_;
@@ -1530,6 +1579,7 @@ TEST_F(VideoSendStreamTest, RtcpSenderReportContainsMediaBytesSent) {
}
Action OnSendRtcp(const uint8_t* packet, size_t length) override {
+ rtc::CritScope lock(&crit_);
RTCPUtility::RTCPParserV2 parser(packet, length, true);
EXPECT_TRUE(parser.IsValid());
@@ -1542,7 +1592,7 @@ TEST_F(VideoSendStreamTest, RtcpSenderReportContainsMediaBytesSent) {
if (parser.Packet().SR.SenderOctetCount > 0 &&
parser.Packet().SR.SenderPacketCount == rtp_packets_sent_) {
EXPECT_EQ(media_bytes_sent_, parser.Packet().SR.SenderOctetCount);
- observation_complete_->Set();
+ observation_complete_.Set();
}
}
packet_type = parser.Iterate();
@@ -1552,15 +1602,15 @@ TEST_F(VideoSendStreamTest, RtcpSenderReportContainsMediaBytesSent) {
}
void PerformTest() override {
- EXPECT_EQ(kEventSignaled, Wait())
- << "Timed out while waiting for RTCP sender report.";
+ EXPECT_TRUE(Wait()) << "Timed out while waiting for RTCP sender report.";
}
- size_t rtp_packets_sent_;
- size_t media_bytes_sent_;
+ rtc::CriticalSection crit_;
+ size_t rtp_packets_sent_ GUARDED_BY(&crit_);
+ size_t media_bytes_sent_ GUARDED_BY(&crit_);
} test;
- RunBaseTest(&test, FakeNetworkPipe::Config());
+ RunBaseTest(&test);
}
TEST_F(VideoSendStreamTest, TranslatesTwoLayerScreencastToTargetBitrate) {
@@ -1578,13 +1628,14 @@ TEST_F(VideoSendStreamTest, TranslatesTwoLayerScreencastToTargetBitrate) {
size_t max_payload_size) override {
EXPECT_EQ(static_cast<unsigned int>(kScreencastTargetBitrateKbps),
config->targetBitrate);
- observation_complete_->Set();
+ observation_complete_.Set();
return test::FakeEncoder::InitEncode(
config, number_of_cores, max_payload_size);
}
- void ModifyConfigs(VideoSendStream::Config* send_config,
- std::vector<VideoReceiveStream::Config>* receive_configs,
- VideoEncoderConfig* encoder_config) override {
+ void ModifyVideoConfigs(
+ VideoSendStream::Config* send_config,
+ std::vector<VideoReceiveStream::Config>* receive_configs,
+ VideoEncoderConfig* encoder_config) override {
send_config->encoder_settings.encoder = this;
EXPECT_EQ(1u, encoder_config->streams.size());
EXPECT_TRUE(
@@ -1595,15 +1646,26 @@ TEST_F(VideoSendStreamTest, TranslatesTwoLayerScreencastToTargetBitrate) {
}
void PerformTest() override {
- EXPECT_EQ(kEventSignaled, Wait())
+ EXPECT_TRUE(Wait())
<< "Timed out while waiting for the encoder to be initialized.";
}
} test;
- RunBaseTest(&test, FakeNetworkPipe::Config());
+ RunBaseTest(&test);
}
-TEST_F(VideoSendStreamTest, ReconfigureBitratesSetsEncoderBitratesCorrectly) {
+// Disabled on LinuxAsan:
+// https://bugs.chromium.org/p/webrtc/issues/detail?id=5382
+#if defined(ADDRESS_SANITIZER) && defined(WEBRTC_LINUX)
+#define MAYBE_ReconfigureBitratesSetsEncoderBitratesCorrectly \
+ DISABLED_ReconfigureBitratesSetsEncoderBitratesCorrectly
+#else
+#define MAYBE_ReconfigureBitratesSetsEncoderBitratesCorrectly \
+ ReconfigureBitratesSetsEncoderBitratesCorrectly
+#endif
+
+TEST_F(VideoSendStreamTest,
+ MAYBE_ReconfigureBitratesSetsEncoderBitratesCorrectly) {
// These are chosen to be "kind of odd" to not be accidentally checked against
// default values.
static const int kMinBitrateKbps = 137;
@@ -1631,7 +1693,7 @@ TEST_F(VideoSendStreamTest, ReconfigureBitratesSetsEncoderBitratesCorrectly) {
codecSettings->startBitrate);
EXPECT_EQ(static_cast<unsigned int>(kMaxBitrateKbps),
codecSettings->maxBitrate);
- observation_complete_->Set();
+ observation_complete_.Set();
} else if (num_initializations_ == 1) {
EXPECT_EQ(static_cast<unsigned int>(kLowerMaxBitrateKbps),
codecSettings->maxBitrate);
@@ -1658,9 +1720,10 @@ TEST_F(VideoSendStreamTest, ReconfigureBitratesSetsEncoderBitratesCorrectly) {
return config;
}
- void ModifyConfigs(VideoSendStream::Config* send_config,
- std::vector<VideoReceiveStream::Config>* receive_configs,
- VideoEncoderConfig* encoder_config) override {
+ void ModifyVideoConfigs(
+ VideoSendStream::Config* send_config,
+ std::vector<VideoReceiveStream::Config>* receive_configs,
+ VideoEncoderConfig* encoder_config) override {
send_config->encoder_settings.encoder = this;
// Set bitrates lower/higher than min/max to make sure they are properly
// capped.
@@ -1673,7 +1736,7 @@ TEST_F(VideoSendStreamTest, ReconfigureBitratesSetsEncoderBitratesCorrectly) {
call_ = sender_call;
}
- void OnStreamsCreated(
+ void OnVideoStreamsCreated(
VideoSendStream* send_stream,
const std::vector<VideoReceiveStream*>& receive_streams) override {
send_stream_ = send_stream;
@@ -1684,7 +1747,7 @@ TEST_F(VideoSendStreamTest, ReconfigureBitratesSetsEncoderBitratesCorrectly) {
bitrate_config.start_bitrate_bps = kIncreasedStartBitrateKbps * 1000;
bitrate_config.max_bitrate_bps = kIncreasedMaxBitrateKbps * 1000;
call_->SetBitrateConfig(bitrate_config);
- EXPECT_EQ(kEventSignaled, Wait())
+ EXPECT_TRUE(Wait())
<< "Timed out while waiting encoder to be configured.";
encoder_config_.streams[0].min_bitrate_bps = 0;
encoder_config_.streams[0].max_bitrate_bps = kLowerMaxBitrateKbps * 1000;
@@ -1706,7 +1769,7 @@ TEST_F(VideoSendStreamTest, ReconfigureBitratesSetsEncoderBitratesCorrectly) {
webrtc::VideoEncoderConfig encoder_config_;
} test;
- RunBaseTest(&test, FakeNetworkPipe::Config());
+ RunBaseTest(&test);
}
TEST_F(VideoSendStreamTest, ReportsSentResolution) {
@@ -1747,36 +1810,37 @@ TEST_F(VideoSendStreamTest, ReportsSentResolution) {
return -1;
}
- observation_complete_->Set();
+ observation_complete_.Set();
return 0;
}
- void ModifyConfigs(VideoSendStream::Config* send_config,
- std::vector<VideoReceiveStream::Config>* receive_configs,
- VideoEncoderConfig* encoder_config) override {
+ void ModifyVideoConfigs(
+ VideoSendStream::Config* send_config,
+ std::vector<VideoReceiveStream::Config>* receive_configs,
+ VideoEncoderConfig* encoder_config) override {
send_config->encoder_settings.encoder = this;
EXPECT_EQ(kNumStreams, encoder_config->streams.size());
}
- size_t GetNumStreams() const override { return kNumStreams; }
+ size_t GetNumVideoStreams() const override { return kNumStreams; }
void PerformTest() override {
- EXPECT_EQ(kEventSignaled, Wait())
+ EXPECT_TRUE(Wait())
<< "Timed out while waiting for the encoder to send one frame.";
VideoSendStream::Stats stats = send_stream_->GetStats();
for (size_t i = 0; i < kNumStreams; ++i) {
- ASSERT_TRUE(stats.substreams.find(kSendSsrcs[i]) !=
+ ASSERT_TRUE(stats.substreams.find(kVideoSendSsrcs[i]) !=
stats.substreams.end())
- << "No stats for SSRC: " << kSendSsrcs[i]
+ << "No stats for SSRC: " << kVideoSendSsrcs[i]
<< ", stats should exist as soon as frames have been encoded.";
VideoSendStream::StreamStats ssrc_stats =
- stats.substreams[kSendSsrcs[i]];
+ stats.substreams[kVideoSendSsrcs[i]];
EXPECT_EQ(kEncodedResolution[i].width, ssrc_stats.width);
EXPECT_EQ(kEncodedResolution[i].height, ssrc_stats.height);
}
}
- void OnStreamsCreated(
+ void OnVideoStreamsCreated(
VideoSendStream* send_stream,
const std::vector<VideoReceiveStream*>& receive_streams) override {
send_stream_ = send_stream;
@@ -1785,106 +1849,392 @@ TEST_F(VideoSendStreamTest, ReportsSentResolution) {
VideoSendStream* send_stream_;
} test;
- RunBaseTest(&test, FakeNetworkPipe::Config());
+ RunBaseTest(&test);
}
-class VP9HeaderObeserver : public test::SendTest {
+class Vp9HeaderObserver : public test::SendTest {
public:
- VP9HeaderObeserver()
- : SendTest(VideoSendStreamTest::kDefaultTimeoutMs),
+ Vp9HeaderObserver()
+ : SendTest(VideoSendStreamTest::kLongTimeoutMs),
vp9_encoder_(VP9Encoder::Create()),
- vp9_settings_(VideoEncoder::GetDefaultVp9Settings()) {}
+ vp9_settings_(VideoEncoder::GetDefaultVp9Settings()),
+ packets_sent_(0),
+ frames_sent_(0) {}
- virtual void ModifyConfigsHook(
+ virtual void ModifyVideoConfigsHook(
VideoSendStream::Config* send_config,
std::vector<VideoReceiveStream::Config>* receive_configs,
VideoEncoderConfig* encoder_config) {}
- virtual void InspectHeader(RTPVideoHeaderVP9* vp9videoHeader) = 0;
+ virtual void InspectHeader(const RTPVideoHeaderVP9& vp9) = 0;
private:
const int kVp9PayloadType = 105;
- void ModifyConfigs(VideoSendStream::Config* send_config,
- std::vector<VideoReceiveStream::Config>* receive_configs,
- VideoEncoderConfig* encoder_config) override {
+ void ModifyVideoConfigs(
+ VideoSendStream::Config* send_config,
+ std::vector<VideoReceiveStream::Config>* receive_configs,
+ VideoEncoderConfig* encoder_config) override {
encoder_config->encoder_specific_settings = &vp9_settings_;
send_config->encoder_settings.encoder = vp9_encoder_.get();
send_config->encoder_settings.payload_name = "VP9";
send_config->encoder_settings.payload_type = kVp9PayloadType;
- ModifyConfigsHook(send_config, receive_configs, encoder_config);
+ ModifyVideoConfigsHook(send_config, receive_configs, encoder_config);
+ EXPECT_EQ(1u, encoder_config->streams.size());
+ encoder_config->streams[0].temporal_layer_thresholds_bps.resize(
+ vp9_settings_.numberOfTemporalLayers - 1);
+ encoder_config_ = *encoder_config;
}
void PerformTest() override {
- EXPECT_EQ(kEventSignaled, Wait())
- << "Test timed out waiting for VP9 packet";
+ EXPECT_TRUE(Wait()) << "Test timed out waiting for VP9 packet, num frames "
+ << frames_sent_;
}
Action OnSendRtp(const uint8_t* packet, size_t length) override {
RTPHeader header;
EXPECT_TRUE(parser_->Parse(packet, length, &header));
- if (header.payloadType == kVp9PayloadType) {
- RtpDepacketizerVp9 vp9depacketizer;
- RtpDepacketizer::ParsedPayload vp9payload;
- const uint8_t* vp9_packet = packet + header.headerLength;
- size_t payload_length =
- length - header.headerLength - header.paddingLength;
-
- if (payload_length > 0) {
- bool parse_vp9header_successful =
- vp9depacketizer.Parse(&vp9payload, vp9_packet, payload_length);
- bool is_vp9_codec_type =
- vp9payload.type.Video.codec == RtpVideoCodecTypes::kRtpVideoVp9;
- EXPECT_TRUE(parse_vp9header_successful);
- EXPECT_TRUE(is_vp9_codec_type);
-
- RTPVideoHeaderVP9* vp9videoHeader =
- &vp9payload.type.Video.codecHeader.VP9;
- if (parse_vp9header_successful && is_vp9_codec_type) {
- InspectHeader(vp9videoHeader);
- } else {
- observation_complete_->Set();
- }
+ EXPECT_EQ(kVp9PayloadType, header.payloadType);
+ const uint8_t* payload = packet + header.headerLength;
+ size_t payload_length = length - header.headerLength - header.paddingLength;
+
+ bool new_packet = packets_sent_ == 0 ||
+ IsNewerSequenceNumber(header.sequenceNumber,
+ last_header_.sequenceNumber);
+ if (payload_length > 0 && new_packet) {
+ RtpDepacketizer::ParsedPayload parsed;
+ RtpDepacketizerVp9 depacketizer;
+ EXPECT_TRUE(depacketizer.Parse(&parsed, payload, payload_length));
+ EXPECT_EQ(RtpVideoCodecTypes::kRtpVideoVp9, parsed.type.Video.codec);
+ // Verify common fields for all configurations.
+ VerifyCommonHeader(parsed.type.Video.codecHeader.VP9);
+ CompareConsecutiveFrames(header, parsed.type.Video);
+ // Verify configuration specific settings.
+ InspectHeader(parsed.type.Video.codecHeader.VP9);
+
+ ++packets_sent_;
+ if (header.markerBit) {
+ ++frames_sent_;
}
+ last_header_ = header;
+ last_vp9_ = parsed.type.Video.codecHeader.VP9;
}
-
return SEND_PACKET;
}
protected:
+ bool ContinuousPictureId(const RTPVideoHeaderVP9& vp9) const {
+ if (last_vp9_.picture_id > vp9.picture_id) {
+ return vp9.picture_id == 0; // Wrap.
+ } else {
+ return vp9.picture_id == last_vp9_.picture_id + 1;
+ }
+ }
+
+ void VerifySpatialIdxWithinFrame(const RTPVideoHeaderVP9& vp9) const {
+ bool new_layer = vp9.spatial_idx != last_vp9_.spatial_idx;
+ EXPECT_EQ(new_layer, vp9.beginning_of_frame);
+ EXPECT_EQ(new_layer, last_vp9_.end_of_frame);
+ EXPECT_EQ(new_layer ? last_vp9_.spatial_idx + 1 : last_vp9_.spatial_idx,
+ vp9.spatial_idx);
+ }
+
+ void VerifyFixedTemporalLayerStructure(const RTPVideoHeaderVP9& vp9,
+ uint8_t num_layers) const {
+ switch (num_layers) {
+ case 0:
+ VerifyTemporalLayerStructure0(vp9);
+ break;
+ case 1:
+ VerifyTemporalLayerStructure1(vp9);
+ break;
+ case 2:
+ VerifyTemporalLayerStructure2(vp9);
+ break;
+ case 3:
+ VerifyTemporalLayerStructure3(vp9);
+ break;
+ default:
+ RTC_NOTREACHED();
+ }
+ }
+
+ void VerifyTemporalLayerStructure0(const RTPVideoHeaderVP9& vp9) const {
+ EXPECT_EQ(kNoTl0PicIdx, vp9.tl0_pic_idx);
+ EXPECT_EQ(kNoTemporalIdx, vp9.temporal_idx); // no tid
+ EXPECT_FALSE(vp9.temporal_up_switch);
+ }
+
+ void VerifyTemporalLayerStructure1(const RTPVideoHeaderVP9& vp9) const {
+ EXPECT_NE(kNoTl0PicIdx, vp9.tl0_pic_idx);
+ EXPECT_EQ(0, vp9.temporal_idx); // 0,0,0,...
+ EXPECT_FALSE(vp9.temporal_up_switch);
+ }
+
+ void VerifyTemporalLayerStructure2(const RTPVideoHeaderVP9& vp9) const {
+ EXPECT_NE(kNoTl0PicIdx, vp9.tl0_pic_idx);
+ EXPECT_GE(vp9.temporal_idx, 0); // 0,1,0,1,... (tid reset on I-frames).
+ EXPECT_LE(vp9.temporal_idx, 1);
+ EXPECT_EQ(vp9.temporal_idx > 0, vp9.temporal_up_switch);
+ if (IsNewPictureId(vp9)) {
+ uint8_t expected_tid =
+ (!vp9.inter_pic_predicted || last_vp9_.temporal_idx == 1) ? 0 : 1;
+ EXPECT_EQ(expected_tid, vp9.temporal_idx);
+ }
+ }
+
+ void VerifyTemporalLayerStructure3(const RTPVideoHeaderVP9& vp9) const {
+ EXPECT_NE(kNoTl0PicIdx, vp9.tl0_pic_idx);
+ EXPECT_GE(vp9.temporal_idx, 0); // 0,2,1,2,... (tid reset on I-frames).
+ EXPECT_LE(vp9.temporal_idx, 2);
+ if (IsNewPictureId(vp9) && vp9.inter_pic_predicted) {
+ EXPECT_NE(vp9.temporal_idx, last_vp9_.temporal_idx);
+ switch (vp9.temporal_idx) {
+ case 0:
+ EXPECT_EQ(2, last_vp9_.temporal_idx);
+ EXPECT_FALSE(vp9.temporal_up_switch);
+ break;
+ case 1:
+ EXPECT_EQ(2, last_vp9_.temporal_idx);
+ EXPECT_TRUE(vp9.temporal_up_switch);
+ break;
+ case 2:
+ EXPECT_EQ(last_vp9_.temporal_idx == 0, vp9.temporal_up_switch);
+ break;
+ }
+ }
+ }
+
+ void VerifyTl0Idx(const RTPVideoHeaderVP9& vp9) const {
+ if (vp9.tl0_pic_idx == kNoTl0PicIdx)
+ return;
+
+ uint8_t expected_tl0_idx = last_vp9_.tl0_pic_idx;
+ if (vp9.temporal_idx == 0)
+ ++expected_tl0_idx;
+ EXPECT_EQ(expected_tl0_idx, vp9.tl0_pic_idx);
+ }
+
+ bool IsNewPictureId(const RTPVideoHeaderVP9& vp9) const {
+ return frames_sent_ > 0 && (vp9.picture_id != last_vp9_.picture_id);
+ }
+
+ // Flexible mode (F=1): Non-flexible mode (F=0):
+ //
+ // +-+-+-+-+-+-+-+-+ +-+-+-+-+-+-+-+-+
+ // |I|P|L|F|B|E|V|-| |I|P|L|F|B|E|V|-|
+ // +-+-+-+-+-+-+-+-+ +-+-+-+-+-+-+-+-+
+ // I: |M| PICTURE ID | I: |M| PICTURE ID |
+ // +-+-+-+-+-+-+-+-+ +-+-+-+-+-+-+-+-+
+ // M: | EXTENDED PID | M: | EXTENDED PID |
+ // +-+-+-+-+-+-+-+-+ +-+-+-+-+-+-+-+-+
+ // L: | T |U| S |D| L: | T |U| S |D|
+ // +-+-+-+-+-+-+-+-+ +-+-+-+-+-+-+-+-+
+ // P,F: | P_DIFF |X|N| | TL0PICIDX |
+ // +-+-+-+-+-+-+-+-+ +-+-+-+-+-+-+-+-+
+ // X: |EXTENDED P_DIFF| V: | SS .. |
+ // +-+-+-+-+-+-+-+-+ +-+-+-+-+-+-+-+-+
+ // V: | SS .. |
+ // +-+-+-+-+-+-+-+-+
+ void VerifyCommonHeader(const RTPVideoHeaderVP9& vp9) const {
+ EXPECT_EQ(kMaxTwoBytePictureId, vp9.max_picture_id); // M:1
+ EXPECT_NE(kNoPictureId, vp9.picture_id); // I:1
+ EXPECT_EQ(vp9_settings_.flexibleMode, vp9.flexible_mode); // F
+ EXPECT_GE(vp9.spatial_idx, 0); // S
+ EXPECT_LT(vp9.spatial_idx, vp9_settings_.numberOfSpatialLayers);
+ if (vp9.ss_data_available) // V
+ VerifySsData(vp9);
+
+ if (frames_sent_ == 0)
+ EXPECT_FALSE(vp9.inter_pic_predicted); // P
+
+ if (!vp9.inter_pic_predicted) {
+ EXPECT_TRUE(vp9.temporal_idx == 0 || vp9.temporal_idx == kNoTemporalIdx);
+ EXPECT_FALSE(vp9.temporal_up_switch);
+ }
+ }
+
+ // Scalability structure (SS).
+ //
+ // +-+-+-+-+-+-+-+-+
+ // V: | N_S |Y|G|-|-|-|
+ // +-+-+-+-+-+-+-+-+
+ // Y: | WIDTH | N_S + 1 times
+ // +-+-+-+-+-+-+-+-+
+ // | HEIGHT |
+ // +-+-+-+-+-+-+-+-+
+ // G: | N_G |
+ // +-+-+-+-+-+-+-+-+
+ // N_G: | T |U| R |-|-| N_G times
+ // +-+-+-+-+-+-+-+-+
+ // | P_DIFF | R times
+ // +-+-+-+-+-+-+-+-+
+ void VerifySsData(const RTPVideoHeaderVP9& vp9) const {
+ EXPECT_TRUE(vp9.ss_data_available); // V
+ EXPECT_EQ(vp9_settings_.numberOfSpatialLayers, // N_S + 1
+ vp9.num_spatial_layers);
+ EXPECT_TRUE(vp9.spatial_layer_resolution_present); // Y:1
+ size_t expected_width = encoder_config_.streams[0].width;
+ size_t expected_height = encoder_config_.streams[0].height;
+ for (int i = vp9.num_spatial_layers - 1; i >= 0; --i) {
+ EXPECT_EQ(expected_width, vp9.width[i]); // WIDTH
+ EXPECT_EQ(expected_height, vp9.height[i]); // HEIGHT
+ expected_width /= 2;
+ expected_height /= 2;
+ }
+ }
+
+ void CompareConsecutiveFrames(const RTPHeader& header,
+ const RTPVideoHeader& video) const {
+ const RTPVideoHeaderVP9& vp9 = video.codecHeader.VP9;
+
+ bool new_frame = packets_sent_ == 0 ||
+ IsNewerTimestamp(header.timestamp, last_header_.timestamp);
+ EXPECT_EQ(new_frame, video.isFirstPacket);
+ if (!new_frame) {
+ EXPECT_FALSE(last_header_.markerBit);
+ EXPECT_EQ(last_header_.timestamp, header.timestamp);
+ EXPECT_EQ(last_vp9_.picture_id, vp9.picture_id);
+ EXPECT_EQ(last_vp9_.temporal_idx, vp9.temporal_idx);
+ EXPECT_EQ(last_vp9_.tl0_pic_idx, vp9.tl0_pic_idx);
+ VerifySpatialIdxWithinFrame(vp9);
+ return;
+ }
+ // New frame.
+ EXPECT_TRUE(vp9.beginning_of_frame);
+
+ // Compare with last packet in previous frame.
+ if (frames_sent_ == 0)
+ return;
+ EXPECT_TRUE(last_vp9_.end_of_frame);
+ EXPECT_TRUE(last_header_.markerBit);
+ EXPECT_TRUE(ContinuousPictureId(vp9));
+ VerifyTl0Idx(vp9);
+ }
+
rtc::scoped_ptr<VP9Encoder> vp9_encoder_;
VideoCodecVP9 vp9_settings_;
+ webrtc::VideoEncoderConfig encoder_config_;
+ RTPHeader last_header_;
+ RTPVideoHeaderVP9 last_vp9_;
+ size_t packets_sent_;
+ size_t frames_sent_;
};
-TEST_F(VideoSendStreamTest, VP9NoFlexMode) {
- class NoFlexibleMode : public VP9HeaderObeserver {
- void InspectHeader(RTPVideoHeaderVP9* vp9videoHeader) override {
- EXPECT_FALSE(vp9videoHeader->flexible_mode);
- observation_complete_->Set();
+TEST_F(VideoSendStreamTest, Vp9NonFlexMode_1Tl1SLayers) {
+ const uint8_t kNumTemporalLayers = 1;
+ const uint8_t kNumSpatialLayers = 1;
+ TestVp9NonFlexMode(kNumTemporalLayers, kNumSpatialLayers);
+}
+
+TEST_F(VideoSendStreamTest, Vp9NonFlexMode_2Tl1SLayers) {
+ const uint8_t kNumTemporalLayers = 2;
+ const uint8_t kNumSpatialLayers = 1;
+ TestVp9NonFlexMode(kNumTemporalLayers, kNumSpatialLayers);
+}
+
+TEST_F(VideoSendStreamTest, Vp9NonFlexMode_3Tl1SLayers) {
+ const uint8_t kNumTemporalLayers = 3;
+ const uint8_t kNumSpatialLayers = 1;
+ TestVp9NonFlexMode(kNumTemporalLayers, kNumSpatialLayers);
+}
+
+TEST_F(VideoSendStreamTest, Vp9NonFlexMode_1Tl2SLayers) {
+ const uint8_t kNumTemporalLayers = 1;
+ const uint8_t kNumSpatialLayers = 2;
+ TestVp9NonFlexMode(kNumTemporalLayers, kNumSpatialLayers);
+}
+
+TEST_F(VideoSendStreamTest, Vp9NonFlexMode_2Tl2SLayers) {
+ const uint8_t kNumTemporalLayers = 2;
+ const uint8_t kNumSpatialLayers = 2;
+ TestVp9NonFlexMode(kNumTemporalLayers, kNumSpatialLayers);
+}
+
+TEST_F(VideoSendStreamTest, Vp9NonFlexMode_3Tl2SLayers) {
+ const uint8_t kNumTemporalLayers = 3;
+ const uint8_t kNumSpatialLayers = 2;
+ TestVp9NonFlexMode(kNumTemporalLayers, kNumSpatialLayers);
+}
+
+void VideoSendStreamTest::TestVp9NonFlexMode(uint8_t num_temporal_layers,
+ uint8_t num_spatial_layers) {
+ static const size_t kNumFramesToSend = 100;
+ // Set to < kNumFramesToSend and coprime to length of temporal layer
+ // structures to verify temporal id reset on key frame.
+ static const int kKeyFrameInterval = 31;
+ class NonFlexibleMode : public Vp9HeaderObserver {
+ public:
+ NonFlexibleMode(uint8_t num_temporal_layers, uint8_t num_spatial_layers)
+ : num_temporal_layers_(num_temporal_layers),
+ num_spatial_layers_(num_spatial_layers),
+ l_field_(num_temporal_layers > 1 || num_spatial_layers > 1) {}
+ void ModifyVideoConfigsHook(
+ VideoSendStream::Config* send_config,
+ std::vector<VideoReceiveStream::Config>* receive_configs,
+ VideoEncoderConfig* encoder_config) override {
+ vp9_settings_.flexibleMode = false;
+ vp9_settings_.frameDroppingOn = false;
+ vp9_settings_.keyFrameInterval = kKeyFrameInterval;
+ vp9_settings_.numberOfTemporalLayers = num_temporal_layers_;
+ vp9_settings_.numberOfSpatialLayers = num_spatial_layers_;
+ }
+
+ void InspectHeader(const RTPVideoHeaderVP9& vp9) override {
+ bool ss_data_expected = !vp9.inter_pic_predicted &&
+ vp9.beginning_of_frame && vp9.spatial_idx == 0;
+ EXPECT_EQ(ss_data_expected, vp9.ss_data_available);
+ EXPECT_EQ(vp9.spatial_idx > 0, vp9.inter_layer_predicted); // D
+ EXPECT_EQ(!vp9.inter_pic_predicted,
+ frames_sent_ % kKeyFrameInterval == 0);
+
+ if (IsNewPictureId(vp9)) {
+ EXPECT_EQ(0, vp9.spatial_idx);
+ EXPECT_EQ(num_spatial_layers_ - 1, last_vp9_.spatial_idx);
+ }
+
+ VerifyFixedTemporalLayerStructure(vp9,
+ l_field_ ? num_temporal_layers_ : 0);
+
+ if (frames_sent_ > kNumFramesToSend)
+ observation_complete_.Set();
}
- } test;
+ const uint8_t num_temporal_layers_;
+ const uint8_t num_spatial_layers_;
+ const bool l_field_;
+ } test(num_temporal_layers, num_spatial_layers);
- RunBaseTest(&test, FakeNetworkPipe::Config());
+ RunBaseTest(&test);
}
-TEST_F(VideoSendStreamTest, DISABLED_VP9FlexMode) {
- class FlexibleMode : public VP9HeaderObeserver {
- void ModifyConfigsHook(
+#if !defined(MEMORY_SANITIZER)
+// Fails under MemorySanitizer:
+// See https://code.google.com/p/webrtc/issues/detail?id=5402.
+TEST_F(VideoSendStreamTest, Vp9FlexModeRefCount) {
+ class FlexibleMode : public Vp9HeaderObserver {
+ void ModifyVideoConfigsHook(
VideoSendStream::Config* send_config,
std::vector<VideoReceiveStream::Config>* receive_configs,
VideoEncoderConfig* encoder_config) override {
+ encoder_config->content_type = VideoEncoderConfig::ContentType::kScreen;
vp9_settings_.flexibleMode = true;
+ vp9_settings_.numberOfTemporalLayers = 1;
+ vp9_settings_.numberOfSpatialLayers = 2;
}
- void InspectHeader(RTPVideoHeaderVP9* vp9videoHeader) override {
- EXPECT_TRUE(vp9videoHeader->flexible_mode);
- observation_complete_->Set();
+ void InspectHeader(const RTPVideoHeaderVP9& vp9_header) override {
+ EXPECT_TRUE(vp9_header.flexible_mode);
+ EXPECT_EQ(kNoTl0PicIdx, vp9_header.tl0_pic_idx);
+ if (vp9_header.inter_pic_predicted) {
+ EXPECT_GT(vp9_header.num_ref_pics, 0u);
+ observation_complete_.Set();
+ }
}
-
} test;
- RunBaseTest(&test, FakeNetworkPipe::Config());
+ RunBaseTest(&test);
}
+#endif
} // namespace webrtc
diff --git a/webrtc/video/vie_channel.cc b/webrtc/video/vie_channel.cc
new file mode 100644
index 0000000000..bc23c9d467
--- /dev/null
+++ b/webrtc/video/vie_channel.cc
@@ -0,0 +1,1218 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/video/vie_channel.h"
+
+#include <algorithm>
+#include <map>
+#include <vector>
+
+#include "webrtc/base/checks.h"
+#include "webrtc/base/logging.h"
+#include "webrtc/base/platform_thread.h"
+#include "webrtc/common.h"
+#include "webrtc/common_video/include/incoming_video_stream.h"
+#include "webrtc/common_video/libyuv/include/webrtc_libyuv.h"
+#include "webrtc/frame_callback.h"
+#include "webrtc/modules/pacing/paced_sender.h"
+#include "webrtc/modules/pacing/packet_router.h"
+#include "webrtc/modules/rtp_rtcp/include/rtp_receiver.h"
+#include "webrtc/modules/rtp_rtcp/include/rtp_rtcp.h"
+#include "webrtc/modules/utility/include/process_thread.h"
+#include "webrtc/modules/video_coding/include/video_coding.h"
+#include "webrtc/modules/video_processing/include/video_processing.h"
+#include "webrtc/modules/video_render/video_render_defines.h"
+#include "webrtc/system_wrappers/include/critical_section_wrapper.h"
+#include "webrtc/system_wrappers/include/metrics.h"
+#include "webrtc/video/call_stats.h"
+#include "webrtc/video/payload_router.h"
+#include "webrtc/video/receive_statistics_proxy.h"
+#include "webrtc/video/report_block_stats.h"
+
+namespace webrtc {
+
+const int kMaxDecodeWaitTimeMs = 50;
+static const int kMaxTargetDelayMs = 10000;
+const int kMinSendSidePacketHistorySize = 600;
+const int kMaxPacketAgeToNack = 450;
+const int kMaxNackListSize = 250;
+
+// Helper class receiving statistics callbacks.
+class ChannelStatsObserver : public CallStatsObserver {
+ public:
+ explicit ChannelStatsObserver(ViEChannel* owner) : owner_(owner) {}
+ virtual ~ChannelStatsObserver() {}
+
+ // Implements StatsObserver.
+ virtual void OnRttUpdate(int64_t avg_rtt_ms, int64_t max_rtt_ms) {
+ owner_->OnRttUpdate(avg_rtt_ms, max_rtt_ms);
+ }
+
+ private:
+ ViEChannel* const owner_;
+};
+
+class ViEChannelProtectionCallback : public VCMProtectionCallback {
+ public:
+ explicit ViEChannelProtectionCallback(ViEChannel* owner) : owner_(owner) {}
+ ~ViEChannelProtectionCallback() {}
+
+
+ int ProtectionRequest(
+ const FecProtectionParams* delta_fec_params,
+ const FecProtectionParams* key_fec_params,
+ uint32_t* sent_video_rate_bps,
+ uint32_t* sent_nack_rate_bps,
+ uint32_t* sent_fec_rate_bps) override {
+ return owner_->ProtectionRequest(delta_fec_params, key_fec_params,
+ sent_video_rate_bps, sent_nack_rate_bps,
+ sent_fec_rate_bps);
+ }
+ private:
+ ViEChannel* owner_;
+};
+
+ViEChannel::ViEChannel(uint32_t number_of_cores,
+ Transport* transport,
+ ProcessThread* module_process_thread,
+ RtcpIntraFrameObserver* intra_frame_observer,
+ RtcpBandwidthObserver* bandwidth_observer,
+ TransportFeedbackObserver* transport_feedback_observer,
+ RemoteBitrateEstimator* remote_bitrate_estimator,
+ RtcpRttStats* rtt_stats,
+ PacedSender* paced_sender,
+ PacketRouter* packet_router,
+ size_t max_rtp_streams,
+ bool sender)
+ : number_of_cores_(number_of_cores),
+ sender_(sender),
+ module_process_thread_(module_process_thread),
+ crit_(CriticalSectionWrapper::CreateCriticalSection()),
+ send_payload_router_(new PayloadRouter()),
+ vcm_protection_callback_(new ViEChannelProtectionCallback(this)),
+ vcm_(VideoCodingModule::Create(Clock::GetRealTimeClock(),
+ nullptr,
+ nullptr)),
+ vie_receiver_(vcm_, remote_bitrate_estimator, this),
+ vie_sync_(vcm_),
+ stats_observer_(new ChannelStatsObserver(this)),
+ receive_stats_callback_(nullptr),
+ incoming_video_stream_(nullptr),
+ intra_frame_observer_(intra_frame_observer),
+ rtt_stats_(rtt_stats),
+ paced_sender_(paced_sender),
+ packet_router_(packet_router),
+ bandwidth_observer_(bandwidth_observer),
+ transport_feedback_observer_(transport_feedback_observer),
+ decode_thread_(ChannelDecodeThreadFunction, this, "DecodingThread"),
+ nack_history_size_sender_(kMinSendSidePacketHistorySize),
+ max_nack_reordering_threshold_(kMaxPacketAgeToNack),
+ pre_render_callback_(NULL),
+ report_block_stats_sender_(new ReportBlockStats()),
+ time_of_first_rtt_ms_(-1),
+ rtt_sum_ms_(0),
+ last_rtt_ms_(0),
+ num_rtts_(0),
+ rtp_rtcp_modules_(
+ CreateRtpRtcpModules(!sender,
+ vie_receiver_.GetReceiveStatistics(),
+ transport,
+ intra_frame_observer_,
+ bandwidth_observer_.get(),
+ transport_feedback_observer_,
+ rtt_stats_,
+ &rtcp_packet_type_counter_observer_,
+ remote_bitrate_estimator,
+ paced_sender_,
+ packet_router_,
+ &send_bitrate_observer_,
+ &send_frame_count_observer_,
+ &send_side_delay_observer_,
+ max_rtp_streams)),
+ num_active_rtp_rtcp_modules_(1) {
+ vie_receiver_.SetRtpRtcpModule(rtp_rtcp_modules_[0]);
+ vcm_->SetNackSettings(kMaxNackListSize, max_nack_reordering_threshold_, 0);
+}
+
+int32_t ViEChannel::Init() {
+ static const int kDefaultRenderDelayMs = 10;
+ module_process_thread_->RegisterModule(vie_receiver_.GetReceiveStatistics());
+
+ // RTP/RTCP initialization.
+ module_process_thread_->RegisterModule(rtp_rtcp_modules_[0]);
+
+ rtp_rtcp_modules_[0]->SetKeyFrameRequestMethod(kKeyFrameReqPliRtcp);
+ if (paced_sender_) {
+ for (RtpRtcp* rtp_rtcp : rtp_rtcp_modules_)
+ rtp_rtcp->SetStorePacketsStatus(true, nack_history_size_sender_);
+ }
+ packet_router_->AddRtpModule(rtp_rtcp_modules_[0]);
+ if (sender_) {
+ std::list<RtpRtcp*> send_rtp_modules(1, rtp_rtcp_modules_[0]);
+ send_payload_router_->SetSendingRtpModules(send_rtp_modules);
+ RTC_DCHECK(!send_payload_router_->active());
+ }
+ if (vcm_->RegisterReceiveCallback(this) != 0) {
+ return -1;
+ }
+ vcm_->RegisterFrameTypeCallback(this);
+ vcm_->RegisterReceiveStatisticsCallback(this);
+ vcm_->RegisterDecoderTimingCallback(this);
+ vcm_->SetRenderDelay(kDefaultRenderDelayMs);
+
+ module_process_thread_->RegisterModule(vcm_);
+ module_process_thread_->RegisterModule(&vie_sync_);
+
+ return 0;
+}
+
+ViEChannel::~ViEChannel() {
+ UpdateHistograms();
+ // Make sure we don't get more callbacks from the RTP module.
+ module_process_thread_->DeRegisterModule(
+ vie_receiver_.GetReceiveStatistics());
+ module_process_thread_->DeRegisterModule(vcm_);
+ module_process_thread_->DeRegisterModule(&vie_sync_);
+ send_payload_router_->SetSendingRtpModules(std::list<RtpRtcp*>());
+ for (size_t i = 0; i < num_active_rtp_rtcp_modules_; ++i)
+ packet_router_->RemoveRtpModule(rtp_rtcp_modules_[i]);
+ for (RtpRtcp* rtp_rtcp : rtp_rtcp_modules_) {
+ module_process_thread_->DeRegisterModule(rtp_rtcp);
+ delete rtp_rtcp;
+ }
+ if (!sender_)
+ StopDecodeThread();
+ // Release modules.
+ VideoCodingModule::Destroy(vcm_);
+}
+
+void ViEChannel::UpdateHistograms() {
+ int64_t now = Clock::GetRealTimeClock()->TimeInMilliseconds();
+
+ {
+ CriticalSectionScoped cs(crit_.get());
+ int64_t elapsed_sec = (now - time_of_first_rtt_ms_) / 1000;
+ if (time_of_first_rtt_ms_ != -1 && num_rtts_ > 0 &&
+ elapsed_sec > metrics::kMinRunTimeInSeconds) {
+ int64_t avg_rtt_ms = (rtt_sum_ms_ + num_rtts_ / 2) / num_rtts_;
+ RTC_HISTOGRAM_COUNTS_SPARSE_10000(
+ "WebRTC.Video.AverageRoundTripTimeInMilliseconds", avg_rtt_ms);
+ }
+ }
+
+ if (sender_) {
+ RtcpPacketTypeCounter rtcp_counter;
+ GetSendRtcpPacketTypeCounter(&rtcp_counter);
+ int64_t elapsed_sec = rtcp_counter.TimeSinceFirstPacketInMs(now) / 1000;
+ if (elapsed_sec > metrics::kMinRunTimeInSeconds) {
+ RTC_HISTOGRAM_COUNTS_SPARSE_10000(
+ "WebRTC.Video.NackPacketsReceivedPerMinute",
+ rtcp_counter.nack_packets * 60 / elapsed_sec);
+ RTC_HISTOGRAM_COUNTS_SPARSE_10000(
+ "WebRTC.Video.FirPacketsReceivedPerMinute",
+ rtcp_counter.fir_packets * 60 / elapsed_sec);
+ RTC_HISTOGRAM_COUNTS_SPARSE_10000(
+ "WebRTC.Video.PliPacketsReceivedPerMinute",
+ rtcp_counter.pli_packets * 60 / elapsed_sec);
+ if (rtcp_counter.nack_requests > 0) {
+ RTC_HISTOGRAM_PERCENTAGE_SPARSE(
+ "WebRTC.Video.UniqueNackRequestsReceivedInPercent",
+ rtcp_counter.UniqueNackRequestsInPercent());
+ }
+ int fraction_lost = report_block_stats_sender_->FractionLostInPercent();
+ if (fraction_lost != -1) {
+ RTC_HISTOGRAM_PERCENTAGE_SPARSE("WebRTC.Video.SentPacketsLostInPercent",
+ fraction_lost);
+ }
+ }
+
+ StreamDataCounters rtp;
+ StreamDataCounters rtx;
+ GetSendStreamDataCounters(&rtp, &rtx);
+ StreamDataCounters rtp_rtx = rtp;
+ rtp_rtx.Add(rtx);
+ elapsed_sec = rtp_rtx.TimeSinceFirstPacketInMs(
+ Clock::GetRealTimeClock()->TimeInMilliseconds()) /
+ 1000;
+ if (elapsed_sec > metrics::kMinRunTimeInSeconds) {
+ RTC_HISTOGRAM_COUNTS_SPARSE_100000(
+ "WebRTC.Video.BitrateSentInKbps",
+ static_cast<int>(rtp_rtx.transmitted.TotalBytes() * 8 / elapsed_sec /
+ 1000));
+ RTC_HISTOGRAM_COUNTS_SPARSE_10000(
+ "WebRTC.Video.MediaBitrateSentInKbps",
+ static_cast<int>(rtp.MediaPayloadBytes() * 8 / elapsed_sec / 1000));
+ RTC_HISTOGRAM_COUNTS_SPARSE_10000(
+ "WebRTC.Video.PaddingBitrateSentInKbps",
+ static_cast<int>(rtp_rtx.transmitted.padding_bytes * 8 / elapsed_sec /
+ 1000));
+ RTC_HISTOGRAM_COUNTS_SPARSE_10000(
+ "WebRTC.Video.RetransmittedBitrateSentInKbps",
+ static_cast<int>(rtp_rtx.retransmitted.TotalBytes() * 8 /
+ elapsed_sec / 1000));
+ if (rtp_rtcp_modules_[0]->RtxSendStatus() != kRtxOff) {
+ RTC_HISTOGRAM_COUNTS_SPARSE_10000(
+ "WebRTC.Video.RtxBitrateSentInKbps",
+ static_cast<int>(rtx.transmitted.TotalBytes() * 8 / elapsed_sec /
+ 1000));
+ }
+ bool fec_enabled = false;
+ uint8_t pltype_red;
+ uint8_t pltype_fec;
+ rtp_rtcp_modules_[0]->GenericFECStatus(&fec_enabled, &pltype_red,
+ &pltype_fec);
+ if (fec_enabled) {
+ RTC_HISTOGRAM_COUNTS_SPARSE_10000(
+ "WebRTC.Video.FecBitrateSentInKbps",
+ static_cast<int>(rtp_rtx.fec.TotalBytes() * 8 / elapsed_sec /
+ 1000));
+ }
+ }
+ } else if (vie_receiver_.GetRemoteSsrc() > 0) {
+ // Get receive stats if we are receiving packets, i.e. there is a remote
+ // ssrc.
+ RtcpPacketTypeCounter rtcp_counter;
+ GetReceiveRtcpPacketTypeCounter(&rtcp_counter);
+ int64_t elapsed_sec = rtcp_counter.TimeSinceFirstPacketInMs(now) / 1000;
+ if (elapsed_sec > metrics::kMinRunTimeInSeconds) {
+ RTC_HISTOGRAM_COUNTS_SPARSE_10000(
+ "WebRTC.Video.NackPacketsSentPerMinute",
+ rtcp_counter.nack_packets * 60 / elapsed_sec);
+ RTC_HISTOGRAM_COUNTS_SPARSE_10000(
+ "WebRTC.Video.FirPacketsSentPerMinute",
+ rtcp_counter.fir_packets * 60 / elapsed_sec);
+ RTC_HISTOGRAM_COUNTS_SPARSE_10000(
+ "WebRTC.Video.PliPacketsSentPerMinute",
+ rtcp_counter.pli_packets * 60 / elapsed_sec);
+ if (rtcp_counter.nack_requests > 0) {
+ RTC_HISTOGRAM_PERCENTAGE_SPARSE(
+ "WebRTC.Video.UniqueNackRequestsSentInPercent",
+ rtcp_counter.UniqueNackRequestsInPercent());
+ }
+ }
+
+ StreamDataCounters rtp;
+ StreamDataCounters rtx;
+ GetReceiveStreamDataCounters(&rtp, &rtx);
+ StreamDataCounters rtp_rtx = rtp;
+ rtp_rtx.Add(rtx);
+ elapsed_sec = rtp_rtx.TimeSinceFirstPacketInMs(now) / 1000;
+ if (elapsed_sec > metrics::kMinRunTimeInSeconds) {
+ RTC_HISTOGRAM_COUNTS_SPARSE_10000(
+ "WebRTC.Video.BitrateReceivedInKbps",
+ static_cast<int>(rtp_rtx.transmitted.TotalBytes() * 8 / elapsed_sec /
+ 1000));
+ RTC_HISTOGRAM_COUNTS_SPARSE_10000(
+ "WebRTC.Video.MediaBitrateReceivedInKbps",
+ static_cast<int>(rtp.MediaPayloadBytes() * 8 / elapsed_sec / 1000));
+ RTC_HISTOGRAM_COUNTS_SPARSE_10000(
+ "WebRTC.Video.PaddingBitrateReceivedInKbps",
+ static_cast<int>(rtp_rtx.transmitted.padding_bytes * 8 / elapsed_sec /
+ 1000));
+ RTC_HISTOGRAM_COUNTS_SPARSE_10000(
+ "WebRTC.Video.RetransmittedBitrateReceivedInKbps",
+ static_cast<int>(rtp_rtx.retransmitted.TotalBytes() * 8 /
+ elapsed_sec / 1000));
+ uint32_t ssrc = 0;
+ if (vie_receiver_.GetRtxSsrc(&ssrc)) {
+ RTC_HISTOGRAM_COUNTS_SPARSE_10000(
+ "WebRTC.Video.RtxBitrateReceivedInKbps",
+ static_cast<int>(rtx.transmitted.TotalBytes() * 8 / elapsed_sec /
+ 1000));
+ }
+ if (vie_receiver_.IsFecEnabled()) {
+ RTC_HISTOGRAM_COUNTS_SPARSE_10000(
+ "WebRTC.Video.FecBitrateReceivedInKbps",
+ static_cast<int>(rtp_rtx.fec.TotalBytes() * 8 / elapsed_sec /
+ 1000));
+ }
+ }
+ }
+}
+
+int32_t ViEChannel::SetSendCodec(const VideoCodec& video_codec,
+ bool new_stream) {
+ RTC_DCHECK(sender_);
+ if (video_codec.codecType == kVideoCodecRED ||
+ video_codec.codecType == kVideoCodecULPFEC) {
+ LOG_F(LS_ERROR) << "Not a valid send codec " << video_codec.codecType;
+ return -1;
+ }
+ if (kMaxSimulcastStreams < video_codec.numberOfSimulcastStreams) {
+ LOG_F(LS_ERROR) << "Incorrect config "
+ << video_codec.numberOfSimulcastStreams;
+ return -1;
+ }
+ // Update the RTP module with the settings.
+ // Stop and Start the RTP module -> trigger new SSRC, if an SSRC hasn't been
+ // set explicitly.
+ // The first layer is always active, so the first module can be checked for
+ // sending status.
+ bool is_sending = rtp_rtcp_modules_[0]->Sending();
+ bool router_was_active = send_payload_router_->active();
+ send_payload_router_->set_active(false);
+ send_payload_router_->SetSendingRtpModules(std::list<RtpRtcp*>());
+
+ std::vector<RtpRtcp*> registered_modules;
+ std::vector<RtpRtcp*> deregistered_modules;
+ size_t num_active_modules = video_codec.numberOfSimulcastStreams > 0
+ ? video_codec.numberOfSimulcastStreams
+ : 1;
+ size_t num_prev_active_modules;
+ {
+ // Cache which modules are active so StartSend can know which ones to start.
+ CriticalSectionScoped cs(crit_.get());
+ num_prev_active_modules = num_active_rtp_rtcp_modules_;
+ num_active_rtp_rtcp_modules_ = num_active_modules;
+ }
+ for (size_t i = 0; i < num_active_modules; ++i)
+ registered_modules.push_back(rtp_rtcp_modules_[i]);
+
+ for (size_t i = num_active_modules; i < rtp_rtcp_modules_.size(); ++i)
+ deregistered_modules.push_back(rtp_rtcp_modules_[i]);
+
+ // Disable inactive modules.
+ for (RtpRtcp* rtp_rtcp : deregistered_modules) {
+ rtp_rtcp->SetSendingStatus(false);
+ rtp_rtcp->SetSendingMediaStatus(false);
+ }
+
+ // Configure active modules.
+ for (RtpRtcp* rtp_rtcp : registered_modules) {
+ rtp_rtcp->DeRegisterSendPayload(video_codec.plType);
+ if (rtp_rtcp->RegisterSendPayload(video_codec) != 0) {
+ return -1;
+ }
+ rtp_rtcp->SetSendingStatus(is_sending);
+ rtp_rtcp->SetSendingMediaStatus(is_sending);
+ }
+
+ // |RegisterSimulcastRtpRtcpModules| resets all old weak pointers and old
+ // modules can be deleted after this step.
+ vie_receiver_.RegisterRtpRtcpModules(registered_modules);
+
+ // Update the packet and payload routers with the sending RtpRtcp modules.
+ if (sender_) {
+ std::list<RtpRtcp*> active_send_modules;
+ for (RtpRtcp* rtp_rtcp : registered_modules)
+ active_send_modules.push_back(rtp_rtcp);
+ send_payload_router_->SetSendingRtpModules(active_send_modules);
+ }
+
+ if (router_was_active)
+ send_payload_router_->set_active(true);
+
+ // Deregister previously registered modules.
+ for (size_t i = num_active_modules; i < num_prev_active_modules; ++i) {
+ module_process_thread_->DeRegisterModule(rtp_rtcp_modules_[i]);
+ packet_router_->RemoveRtpModule(rtp_rtcp_modules_[i]);
+ }
+ // Register new active modules.
+ for (size_t i = num_prev_active_modules; i < num_active_modules; ++i) {
+ module_process_thread_->RegisterModule(rtp_rtcp_modules_[i]);
+ packet_router_->AddRtpModule(rtp_rtcp_modules_[i]);
+ }
+ return 0;
+}
+
+int32_t ViEChannel::SetReceiveCodec(const VideoCodec& video_codec) {
+ RTC_DCHECK(!sender_);
+ if (!vie_receiver_.SetReceiveCodec(video_codec)) {
+ return -1;
+ }
+
+ if (video_codec.codecType != kVideoCodecRED &&
+ video_codec.codecType != kVideoCodecULPFEC) {
+ // Register codec type with VCM, but do not register RED or ULPFEC.
+ if (vcm_->RegisterReceiveCodec(&video_codec, number_of_cores_, false) !=
+ VCM_OK) {
+ return -1;
+ }
+ }
+ return 0;
+}
+
+void ViEChannel::RegisterExternalDecoder(const uint8_t pl_type,
+ VideoDecoder* decoder) {
+ RTC_DCHECK(!sender_);
+ vcm_->RegisterExternalDecoder(decoder, pl_type);
+}
+
+int32_t ViEChannel::ReceiveCodecStatistics(uint32_t* num_key_frames,
+ uint32_t* num_delta_frames) {
+ CriticalSectionScoped cs(crit_.get());
+ *num_key_frames = receive_frame_counts_.key_frames;
+ *num_delta_frames = receive_frame_counts_.delta_frames;
+ return 0;
+}
+
+uint32_t ViEChannel::DiscardedPackets() const {
+ return vcm_->DiscardedPackets();
+}
+
+int ViEChannel::ReceiveDelay() const {
+ return vcm_->Delay();
+}
+
+void ViEChannel::SetExpectedRenderDelay(int delay_ms) {
+ vcm_->SetRenderDelay(delay_ms);
+}
+
+void ViEChannel::SetRTCPMode(const RtcpMode rtcp_mode) {
+ for (RtpRtcp* rtp_rtcp : rtp_rtcp_modules_)
+ rtp_rtcp->SetRTCPStatus(rtcp_mode);
+}
+
+void ViEChannel::SetProtectionMode(bool enable_nack,
+ bool enable_fec,
+ int payload_type_red,
+ int payload_type_fec) {
+ // Validate payload types.
+ if (enable_fec) {
+ RTC_DCHECK_GE(payload_type_red, 0);
+ RTC_DCHECK_GE(payload_type_fec, 0);
+ RTC_DCHECK_LE(payload_type_red, 127);
+ RTC_DCHECK_LE(payload_type_fec, 127);
+ } else {
+ RTC_DCHECK_EQ(payload_type_red, -1);
+ RTC_DCHECK_EQ(payload_type_fec, -1);
+ // Set to valid uint8_ts to be castable later without signed overflows.
+ payload_type_red = 0;
+ payload_type_fec = 0;
+ }
+
+ VCMVideoProtection protection_method;
+ if (enable_nack) {
+ protection_method = enable_fec ? kProtectionNackFEC : kProtectionNack;
+ } else {
+ protection_method = kProtectionNone;
+ }
+
+ vcm_->SetVideoProtection(protection_method, true);
+
+ // Set NACK.
+ ProcessNACKRequest(enable_nack);
+
+ // Set FEC.
+ for (RtpRtcp* rtp_rtcp : rtp_rtcp_modules_) {
+ rtp_rtcp->SetGenericFECStatus(enable_fec,
+ static_cast<uint8_t>(payload_type_red),
+ static_cast<uint8_t>(payload_type_fec));
+ }
+}
+
+void ViEChannel::ProcessNACKRequest(const bool enable) {
+ if (enable) {
+ // Turn on NACK.
+ if (rtp_rtcp_modules_[0]->RTCP() == RtcpMode::kOff)
+ return;
+ vie_receiver_.SetNackStatus(true, max_nack_reordering_threshold_);
+
+ for (RtpRtcp* rtp_rtcp : rtp_rtcp_modules_)
+ rtp_rtcp->SetStorePacketsStatus(true, nack_history_size_sender_);
+
+ vcm_->RegisterPacketRequestCallback(this);
+ // Don't introduce errors when NACK is enabled.
+ vcm_->SetDecodeErrorMode(kNoErrors);
+ } else {
+ vcm_->RegisterPacketRequestCallback(NULL);
+ if (paced_sender_ == nullptr) {
+ for (RtpRtcp* rtp_rtcp : rtp_rtcp_modules_)
+ rtp_rtcp->SetStorePacketsStatus(false, 0);
+ }
+ vie_receiver_.SetNackStatus(false, max_nack_reordering_threshold_);
+ // When NACK is off, allow decoding with errors. Otherwise, the video
+ // will freeze, and will only recover with a complete key frame.
+ vcm_->SetDecodeErrorMode(kWithErrors);
+ }
+}
+
+bool ViEChannel::IsSendingFecEnabled() {
+ bool fec_enabled = false;
+ uint8_t pltype_red = 0;
+ uint8_t pltype_fec = 0;
+
+ for (RtpRtcp* rtp_rtcp : rtp_rtcp_modules_) {
+ rtp_rtcp->GenericFECStatus(&fec_enabled, &pltype_red, &pltype_fec);
+ if (fec_enabled)
+ return true;
+ }
+ return false;
+}
+
+int ViEChannel::SetSenderBufferingMode(int target_delay_ms) {
+ if ((target_delay_ms < 0) || (target_delay_ms > kMaxTargetDelayMs)) {
+ LOG(LS_ERROR) << "Invalid send buffer value.";
+ return -1;
+ }
+ if (target_delay_ms == 0) {
+ // Real-time mode.
+ nack_history_size_sender_ = kMinSendSidePacketHistorySize;
+ } else {
+ nack_history_size_sender_ = GetRequiredNackListSize(target_delay_ms);
+ // Don't allow a number lower than the default value.
+ if (nack_history_size_sender_ < kMinSendSidePacketHistorySize) {
+ nack_history_size_sender_ = kMinSendSidePacketHistorySize;
+ }
+ }
+ for (RtpRtcp* rtp_rtcp : rtp_rtcp_modules_)
+ rtp_rtcp->SetStorePacketsStatus(true, nack_history_size_sender_);
+ return 0;
+}
+
+int ViEChannel::GetRequiredNackListSize(int target_delay_ms) {
+ // The max size of the nack list should be large enough to accommodate the
+ // the number of packets (frames) resulting from the increased delay.
+ // Roughly estimating for ~40 packets per frame @ 30fps.
+ return target_delay_ms * 40 * 30 / 1000;
+}
+
+int ViEChannel::SetSendTimestampOffsetStatus(bool enable, int id) {
+ // Disable any previous registrations of this extension to avoid errors.
+ for (RtpRtcp* rtp_rtcp : rtp_rtcp_modules_) {
+ rtp_rtcp->DeregisterSendRtpHeaderExtension(
+ kRtpExtensionTransmissionTimeOffset);
+ }
+ if (!enable)
+ return 0;
+ // Enable the extension.
+ int error = 0;
+ for (RtpRtcp* rtp_rtcp : rtp_rtcp_modules_) {
+ error |= rtp_rtcp->RegisterSendRtpHeaderExtension(
+ kRtpExtensionTransmissionTimeOffset, id);
+ }
+ return error;
+}
+
+int ViEChannel::SetReceiveTimestampOffsetStatus(bool enable, int id) {
+ return vie_receiver_.SetReceiveTimestampOffsetStatus(enable, id) ? 0 : -1;
+}
+
+int ViEChannel::SetSendAbsoluteSendTimeStatus(bool enable, int id) {
+ // Disable any previous registrations of this extension to avoid errors.
+ for (RtpRtcp* rtp_rtcp : rtp_rtcp_modules_)
+ rtp_rtcp->DeregisterSendRtpHeaderExtension(kRtpExtensionAbsoluteSendTime);
+ if (!enable)
+ return 0;
+ // Enable the extension.
+ int error = 0;
+ for (RtpRtcp* rtp_rtcp : rtp_rtcp_modules_) {
+ error |= rtp_rtcp->RegisterSendRtpHeaderExtension(
+ kRtpExtensionAbsoluteSendTime, id);
+ }
+ return error;
+}
+
+int ViEChannel::SetReceiveAbsoluteSendTimeStatus(bool enable, int id) {
+ return vie_receiver_.SetReceiveAbsoluteSendTimeStatus(enable, id) ? 0 : -1;
+}
+
+int ViEChannel::SetSendVideoRotationStatus(bool enable, int id) {
+ // Disable any previous registrations of this extension to avoid errors.
+ for (RtpRtcp* rtp_rtcp : rtp_rtcp_modules_)
+ rtp_rtcp->DeregisterSendRtpHeaderExtension(kRtpExtensionVideoRotation);
+ if (!enable)
+ return 0;
+ // Enable the extension.
+ int error = 0;
+ for (RtpRtcp* rtp_rtcp : rtp_rtcp_modules_) {
+ error |= rtp_rtcp->RegisterSendRtpHeaderExtension(
+ kRtpExtensionVideoRotation, id);
+ }
+ return error;
+}
+
+int ViEChannel::SetReceiveVideoRotationStatus(bool enable, int id) {
+ return vie_receiver_.SetReceiveVideoRotationStatus(enable, id) ? 0 : -1;
+}
+
+int ViEChannel::SetSendTransportSequenceNumber(bool enable, int id) {
+ // Disable any previous registrations of this extension to avoid errors.
+ for (RtpRtcp* rtp_rtcp : rtp_rtcp_modules_) {
+ rtp_rtcp->DeregisterSendRtpHeaderExtension(
+ kRtpExtensionTransportSequenceNumber);
+ }
+ if (!enable)
+ return 0;
+ // Enable the extension.
+ int error = 0;
+ for (RtpRtcp* rtp_rtcp : rtp_rtcp_modules_) {
+ error |= rtp_rtcp->RegisterSendRtpHeaderExtension(
+ kRtpExtensionTransportSequenceNumber, id);
+ }
+ return error;
+}
+
+int ViEChannel::SetReceiveTransportSequenceNumber(bool enable, int id) {
+ return vie_receiver_.SetReceiveTransportSequenceNumber(enable, id) ? 0 : -1;
+}
+
+void ViEChannel::SetRtcpXrRrtrStatus(bool enable) {
+ rtp_rtcp_modules_[0]->SetRtcpXrRrtrStatus(enable);
+}
+
+void ViEChannel::EnableTMMBR(bool enable) {
+ rtp_rtcp_modules_[0]->SetTMMBRStatus(enable);
+}
+
+int32_t ViEChannel::SetSSRC(const uint32_t SSRC,
+ const StreamType usage,
+ const uint8_t simulcast_idx) {
+ RtpRtcp* rtp_rtcp = rtp_rtcp_modules_[simulcast_idx];
+ if (usage == kViEStreamTypeRtx) {
+ rtp_rtcp->SetRtxSsrc(SSRC);
+ } else {
+ rtp_rtcp->SetSSRC(SSRC);
+ }
+ return 0;
+}
+
+int32_t ViEChannel::SetRemoteSSRCType(const StreamType usage,
+ const uint32_t SSRC) {
+ vie_receiver_.SetRtxSsrc(SSRC);
+ return 0;
+}
+
+int32_t ViEChannel::GetLocalSSRC(uint8_t idx, unsigned int* ssrc) {
+ RTC_DCHECK_LE(idx, rtp_rtcp_modules_.size());
+ *ssrc = rtp_rtcp_modules_[idx]->SSRC();
+ return 0;
+}
+
+uint32_t ViEChannel::GetRemoteSSRC() {
+ return vie_receiver_.GetRemoteSsrc();
+}
+
+int ViEChannel::SetRtxSendPayloadType(int payload_type,
+ int associated_payload_type) {
+ for (RtpRtcp* rtp_rtcp : rtp_rtcp_modules_)
+ rtp_rtcp->SetRtxSendPayloadType(payload_type, associated_payload_type);
+ SetRtxSendStatus(true);
+ return 0;
+}
+
+void ViEChannel::SetRtxSendStatus(bool enable) {
+ int rtx_settings =
+ enable ? kRtxRetransmitted | kRtxRedundantPayloads : kRtxOff;
+ for (RtpRtcp* rtp_rtcp : rtp_rtcp_modules_)
+ rtp_rtcp->SetRtxSendStatus(rtx_settings);
+}
+
+void ViEChannel::SetRtxReceivePayloadType(int payload_type,
+ int associated_payload_type) {
+ vie_receiver_.SetRtxPayloadType(payload_type, associated_payload_type);
+}
+
+void ViEChannel::SetUseRtxPayloadMappingOnRestore(bool val) {
+ vie_receiver_.SetUseRtxPayloadMappingOnRestore(val);
+}
+
+void ViEChannel::SetRtpStateForSsrc(uint32_t ssrc, const RtpState& rtp_state) {
+ RTC_DCHECK(!rtp_rtcp_modules_[0]->Sending());
+ for (RtpRtcp* rtp_rtcp : rtp_rtcp_modules_) {
+ if (rtp_rtcp->SetRtpStateForSsrc(ssrc, rtp_state))
+ return;
+ }
+}
+
+RtpState ViEChannel::GetRtpStateForSsrc(uint32_t ssrc) {
+ RTC_DCHECK(!rtp_rtcp_modules_[0]->Sending());
+ RtpState rtp_state;
+ for (RtpRtcp* rtp_rtcp : rtp_rtcp_modules_) {
+ if (rtp_rtcp->GetRtpStateForSsrc(ssrc, &rtp_state))
+ return rtp_state;
+ }
+ LOG(LS_ERROR) << "Couldn't get RTP state for ssrc: " << ssrc;
+ return rtp_state;
+}
+
+// TODO(pbos): Set CNAME on all modules.
+int32_t ViEChannel::SetRTCPCName(const char* rtcp_cname) {
+ RTC_DCHECK(!rtp_rtcp_modules_[0]->Sending());
+ return rtp_rtcp_modules_[0]->SetCNAME(rtcp_cname);
+}
+
+int32_t ViEChannel::GetRemoteRTCPCName(char rtcp_cname[]) {
+ uint32_t remoteSSRC = vie_receiver_.GetRemoteSsrc();
+ return rtp_rtcp_modules_[0]->RemoteCNAME(remoteSSRC, rtcp_cname);
+}
+
+int32_t ViEChannel::GetSendRtcpStatistics(uint16_t* fraction_lost,
+ uint32_t* cumulative_lost,
+ uint32_t* extended_max,
+ uint32_t* jitter_samples,
+ int64_t* rtt_ms) {
+ // Aggregate the report blocks associated with streams sent on this channel.
+ std::vector<RTCPReportBlock> report_blocks;
+ for (RtpRtcp* rtp_rtcp : rtp_rtcp_modules_)
+ rtp_rtcp->RemoteRTCPStat(&report_blocks);
+
+ if (report_blocks.empty())
+ return -1;
+
+ uint32_t remote_ssrc = vie_receiver_.GetRemoteSsrc();
+ std::vector<RTCPReportBlock>::const_iterator it = report_blocks.begin();
+ for (; it != report_blocks.end(); ++it) {
+ if (it->remoteSSRC == remote_ssrc)
+ break;
+ }
+ if (it == report_blocks.end()) {
+ // We have not received packets with an SSRC matching the report blocks. To
+ // have a chance of calculating an RTT we will try with the SSRC of the
+ // first report block received.
+ // This is very important for send-only channels where we don't know the
+ // SSRC of the other end.
+ remote_ssrc = report_blocks[0].remoteSSRC;
+ }
+
+ // TODO(asapersson): Change report_block_stats to not rely on
+ // GetSendRtcpStatistics to be called.
+ RTCPReportBlock report =
+ report_block_stats_sender_->AggregateAndStore(report_blocks);
+ *fraction_lost = report.fractionLost;
+ *cumulative_lost = report.cumulativeLost;
+ *extended_max = report.extendedHighSeqNum;
+ *jitter_samples = report.jitter;
+
+ int64_t dummy;
+ int64_t rtt = 0;
+ if (rtp_rtcp_modules_[0]->RTT(remote_ssrc, &rtt, &dummy, &dummy, &dummy) !=
+ 0) {
+ return -1;
+ }
+ *rtt_ms = rtt;
+ return 0;
+}
+
+void ViEChannel::RegisterSendChannelRtcpStatisticsCallback(
+ RtcpStatisticsCallback* callback) {
+ for (RtpRtcp* rtp_rtcp : rtp_rtcp_modules_)
+ rtp_rtcp->RegisterRtcpStatisticsCallback(callback);
+}
+
+void ViEChannel::RegisterReceiveChannelRtcpStatisticsCallback(
+ RtcpStatisticsCallback* callback) {
+ vie_receiver_.GetReceiveStatistics()->RegisterRtcpStatisticsCallback(
+ callback);
+ rtp_rtcp_modules_[0]->RegisterRtcpStatisticsCallback(callback);
+}
+
+void ViEChannel::RegisterRtcpPacketTypeCounterObserver(
+ RtcpPacketTypeCounterObserver* observer) {
+ rtcp_packet_type_counter_observer_.Set(observer);
+}
+
+void ViEChannel::GetSendStreamDataCounters(
+ StreamDataCounters* rtp_counters,
+ StreamDataCounters* rtx_counters) const {
+ *rtp_counters = StreamDataCounters();
+ *rtx_counters = StreamDataCounters();
+ for (RtpRtcp* rtp_rtcp : rtp_rtcp_modules_) {
+ StreamDataCounters rtp_data;
+ StreamDataCounters rtx_data;
+ rtp_rtcp->GetSendStreamDataCounters(&rtp_data, &rtx_data);
+ rtp_counters->Add(rtp_data);
+ rtx_counters->Add(rtx_data);
+ }
+}
+
+void ViEChannel::GetReceiveStreamDataCounters(
+ StreamDataCounters* rtp_counters,
+ StreamDataCounters* rtx_counters) const {
+ StreamStatistician* statistician = vie_receiver_.GetReceiveStatistics()->
+ GetStatistician(vie_receiver_.GetRemoteSsrc());
+ if (statistician) {
+ statistician->GetReceiveStreamDataCounters(rtp_counters);
+ }
+ uint32_t rtx_ssrc = 0;
+ if (vie_receiver_.GetRtxSsrc(&rtx_ssrc)) {
+ StreamStatistician* statistician =
+ vie_receiver_.GetReceiveStatistics()->GetStatistician(rtx_ssrc);
+ if (statistician) {
+ statistician->GetReceiveStreamDataCounters(rtx_counters);
+ }
+ }
+}
+
+void ViEChannel::RegisterSendChannelRtpStatisticsCallback(
+ StreamDataCountersCallback* callback) {
+ for (RtpRtcp* rtp_rtcp : rtp_rtcp_modules_)
+ rtp_rtcp->RegisterSendChannelRtpStatisticsCallback(callback);
+}
+
+void ViEChannel::RegisterReceiveChannelRtpStatisticsCallback(
+ StreamDataCountersCallback* callback) {
+ vie_receiver_.GetReceiveStatistics()->RegisterRtpStatisticsCallback(callback);
+}
+
+void ViEChannel::GetSendRtcpPacketTypeCounter(
+ RtcpPacketTypeCounter* packet_counter) const {
+ std::map<uint32_t, RtcpPacketTypeCounter> counter_map =
+ rtcp_packet_type_counter_observer_.GetPacketTypeCounterMap();
+
+ RtcpPacketTypeCounter counter;
+ for (RtpRtcp* rtp_rtcp : rtp_rtcp_modules_)
+ counter.Add(counter_map[rtp_rtcp->SSRC()]);
+ *packet_counter = counter;
+}
+
+void ViEChannel::GetReceiveRtcpPacketTypeCounter(
+ RtcpPacketTypeCounter* packet_counter) const {
+ std::map<uint32_t, RtcpPacketTypeCounter> counter_map =
+ rtcp_packet_type_counter_observer_.GetPacketTypeCounterMap();
+
+ RtcpPacketTypeCounter counter;
+ counter.Add(counter_map[vie_receiver_.GetRemoteSsrc()]);
+
+ *packet_counter = counter;
+}
+
+void ViEChannel::RegisterSendSideDelayObserver(
+ SendSideDelayObserver* observer) {
+ send_side_delay_observer_.Set(observer);
+}
+
+void ViEChannel::RegisterSendBitrateObserver(
+ BitrateStatisticsObserver* observer) {
+ send_bitrate_observer_.Set(observer);
+}
+
+int32_t ViEChannel::StartSend() {
+ CriticalSectionScoped cs(crit_.get());
+
+ if (rtp_rtcp_modules_[0]->Sending())
+ return -1;
+
+ for (size_t i = 0; i < num_active_rtp_rtcp_modules_; ++i) {
+ RtpRtcp* rtp_rtcp = rtp_rtcp_modules_[i];
+ rtp_rtcp->SetSendingMediaStatus(true);
+ rtp_rtcp->SetSendingStatus(true);
+ }
+ send_payload_router_->set_active(true);
+ return 0;
+}
+
+int32_t ViEChannel::StopSend() {
+ send_payload_router_->set_active(false);
+ for (RtpRtcp* rtp_rtcp : rtp_rtcp_modules_)
+ rtp_rtcp->SetSendingMediaStatus(false);
+
+ if (!rtp_rtcp_modules_[0]->Sending()) {
+ return -1;
+ }
+
+ for (RtpRtcp* rtp_rtcp : rtp_rtcp_modules_) {
+ rtp_rtcp->SetSendingStatus(false);
+ }
+ return 0;
+}
+
+bool ViEChannel::Sending() {
+ return rtp_rtcp_modules_[0]->Sending();
+}
+
+void ViEChannel::StartReceive() {
+ if (!sender_)
+ StartDecodeThread();
+ vie_receiver_.StartReceive();
+}
+
+void ViEChannel::StopReceive() {
+ vie_receiver_.StopReceive();
+ if (!sender_) {
+ StopDecodeThread();
+ vcm_->ResetDecoder();
+ }
+}
+
+int32_t ViEChannel::ReceivedRTPPacket(const void* rtp_packet,
+ size_t rtp_packet_length,
+ const PacketTime& packet_time) {
+ return vie_receiver_.ReceivedRTPPacket(
+ rtp_packet, rtp_packet_length, packet_time);
+}
+
+int32_t ViEChannel::ReceivedRTCPPacket(const void* rtcp_packet,
+ size_t rtcp_packet_length) {
+ return vie_receiver_.ReceivedRTCPPacket(rtcp_packet, rtcp_packet_length);
+}
+
+int32_t ViEChannel::SetMTU(uint16_t mtu) {
+ for (RtpRtcp* rtp_rtcp : rtp_rtcp_modules_)
+ rtp_rtcp->SetMaxTransferUnit(mtu);
+ return 0;
+}
+
+RtpRtcp* ViEChannel::rtp_rtcp() {
+ return rtp_rtcp_modules_[0];
+}
+
+rtc::scoped_refptr<PayloadRouter> ViEChannel::send_payload_router() {
+ return send_payload_router_;
+}
+
+VCMProtectionCallback* ViEChannel::vcm_protection_callback() {
+ return vcm_protection_callback_.get();
+}
+
+CallStatsObserver* ViEChannel::GetStatsObserver() {
+ return stats_observer_.get();
+}
+
+// Do not acquire the lock of |vcm_| in this function. Decode callback won't
+// necessarily be called from the decoding thread. The decoding thread may have
+// held the lock when calling VideoDecoder::Decode, Reset, or Release. Acquiring
+// the same lock in the path of decode callback can deadlock.
+int32_t ViEChannel::FrameToRender(VideoFrame& video_frame) { // NOLINT
+ CriticalSectionScoped cs(crit_.get());
+
+ if (pre_render_callback_ != NULL)
+ pre_render_callback_->FrameCallback(&video_frame);
+
+ // TODO(pbos): Remove stream id argument.
+ incoming_video_stream_->RenderFrame(0xFFFFFFFF, video_frame);
+ return 0;
+}
+
+int32_t ViEChannel::ReceivedDecodedReferenceFrame(
+ const uint64_t picture_id) {
+ return rtp_rtcp_modules_[0]->SendRTCPReferencePictureSelection(picture_id);
+}
+
+void ViEChannel::OnIncomingPayloadType(int payload_type) {
+ CriticalSectionScoped cs(crit_.get());
+ if (receive_stats_callback_)
+ receive_stats_callback_->OnIncomingPayloadType(payload_type);
+}
+
+void ViEChannel::OnDecoderImplementationName(const char* implementation_name) {
+ CriticalSectionScoped cs(crit_.get());
+ if (receive_stats_callback_)
+ receive_stats_callback_->OnDecoderImplementationName(implementation_name);
+}
+
+void ViEChannel::OnReceiveRatesUpdated(uint32_t bit_rate, uint32_t frame_rate) {
+ CriticalSectionScoped cs(crit_.get());
+ if (receive_stats_callback_)
+ receive_stats_callback_->OnIncomingRate(frame_rate, bit_rate);
+}
+
+void ViEChannel::OnDiscardedPacketsUpdated(int discarded_packets) {
+ CriticalSectionScoped cs(crit_.get());
+ if (receive_stats_callback_)
+ receive_stats_callback_->OnDiscardedPacketsUpdated(discarded_packets);
+}
+
+void ViEChannel::OnFrameCountsUpdated(const FrameCounts& frame_counts) {
+ CriticalSectionScoped cs(crit_.get());
+ receive_frame_counts_ = frame_counts;
+ if (receive_stats_callback_)
+ receive_stats_callback_->OnFrameCountsUpdated(frame_counts);
+}
+
+void ViEChannel::OnDecoderTiming(int decode_ms,
+ int max_decode_ms,
+ int current_delay_ms,
+ int target_delay_ms,
+ int jitter_buffer_ms,
+ int min_playout_delay_ms,
+ int render_delay_ms) {
+ CriticalSectionScoped cs(crit_.get());
+ if (!receive_stats_callback_)
+ return;
+ receive_stats_callback_->OnDecoderTiming(
+ decode_ms, max_decode_ms, current_delay_ms, target_delay_ms,
+ jitter_buffer_ms, min_playout_delay_ms, render_delay_ms, last_rtt_ms_);
+}
+
+int32_t ViEChannel::RequestKeyFrame() {
+ return rtp_rtcp_modules_[0]->RequestKeyFrame();
+}
+
+int32_t ViEChannel::SliceLossIndicationRequest(
+ const uint64_t picture_id) {
+ return rtp_rtcp_modules_[0]->SendRTCPSliceLossIndication(
+ static_cast<uint8_t>(picture_id));
+}
+
+int32_t ViEChannel::ResendPackets(const uint16_t* sequence_numbers,
+ uint16_t length) {
+ return rtp_rtcp_modules_[0]->SendNACK(sequence_numbers, length);
+}
+
+bool ViEChannel::ChannelDecodeThreadFunction(void* obj) {
+ return static_cast<ViEChannel*>(obj)->ChannelDecodeProcess();
+}
+
+bool ViEChannel::ChannelDecodeProcess() {
+ vcm_->Decode(kMaxDecodeWaitTimeMs);
+ return true;
+}
+
+void ViEChannel::OnRttUpdate(int64_t avg_rtt_ms, int64_t max_rtt_ms) {
+ vcm_->SetReceiveChannelParameters(max_rtt_ms);
+
+ CriticalSectionScoped cs(crit_.get());
+ if (time_of_first_rtt_ms_ == -1)
+ time_of_first_rtt_ms_ = Clock::GetRealTimeClock()->TimeInMilliseconds();
+ rtt_sum_ms_ += avg_rtt_ms;
+ last_rtt_ms_ = avg_rtt_ms;
+ ++num_rtts_;
+}
+
+int ViEChannel::ProtectionRequest(const FecProtectionParams* delta_fec_params,
+ const FecProtectionParams* key_fec_params,
+ uint32_t* video_rate_bps,
+ uint32_t* nack_rate_bps,
+ uint32_t* fec_rate_bps) {
+ *video_rate_bps = 0;
+ *nack_rate_bps = 0;
+ *fec_rate_bps = 0;
+ for (RtpRtcp* rtp_rtcp : rtp_rtcp_modules_) {
+ uint32_t not_used = 0;
+ uint32_t module_video_rate = 0;
+ uint32_t module_fec_rate = 0;
+ uint32_t module_nack_rate = 0;
+ rtp_rtcp->SetFecParameters(delta_fec_params, key_fec_params);
+ rtp_rtcp->BitrateSent(&not_used, &module_video_rate, &module_fec_rate,
+ &module_nack_rate);
+ *video_rate_bps += module_video_rate;
+ *nack_rate_bps += module_nack_rate;
+ *fec_rate_bps += module_fec_rate;
+ }
+ return 0;
+}
+
+std::vector<RtpRtcp*> ViEChannel::CreateRtpRtcpModules(
+ bool receiver_only,
+ ReceiveStatistics* receive_statistics,
+ Transport* outgoing_transport,
+ RtcpIntraFrameObserver* intra_frame_callback,
+ RtcpBandwidthObserver* bandwidth_callback,
+ TransportFeedbackObserver* transport_feedback_callback,
+ RtcpRttStats* rtt_stats,
+ RtcpPacketTypeCounterObserver* rtcp_packet_type_counter_observer,
+ RemoteBitrateEstimator* remote_bitrate_estimator,
+ RtpPacketSender* paced_sender,
+ TransportSequenceNumberAllocator* transport_sequence_number_allocator,
+ BitrateStatisticsObserver* send_bitrate_observer,
+ FrameCountObserver* send_frame_count_observer,
+ SendSideDelayObserver* send_side_delay_observer,
+ size_t num_modules) {
+ RTC_DCHECK_GT(num_modules, 0u);
+ RtpRtcp::Configuration configuration;
+ ReceiveStatistics* null_receive_statistics = configuration.receive_statistics;
+ configuration.audio = false;
+ configuration.receiver_only = receiver_only;
+ configuration.receive_statistics = receive_statistics;
+ configuration.outgoing_transport = outgoing_transport;
+ configuration.intra_frame_callback = intra_frame_callback;
+ configuration.rtt_stats = rtt_stats;
+ configuration.rtcp_packet_type_counter_observer =
+ rtcp_packet_type_counter_observer;
+ configuration.paced_sender = paced_sender;
+ configuration.transport_sequence_number_allocator =
+ transport_sequence_number_allocator;
+ configuration.send_bitrate_observer = send_bitrate_observer;
+ configuration.send_frame_count_observer = send_frame_count_observer;
+ configuration.send_side_delay_observer = send_side_delay_observer;
+ configuration.bandwidth_callback = bandwidth_callback;
+ configuration.transport_feedback_callback = transport_feedback_callback;
+
+ std::vector<RtpRtcp*> modules;
+ for (size_t i = 0; i < num_modules; ++i) {
+ RtpRtcp* rtp_rtcp = RtpRtcp::CreateRtpRtcp(configuration);
+ rtp_rtcp->SetSendingStatus(false);
+ rtp_rtcp->SetSendingMediaStatus(false);
+ rtp_rtcp->SetRTCPStatus(RtcpMode::kCompound);
+ modules.push_back(rtp_rtcp);
+ // Receive statistics and remote bitrate estimator should only be set for
+ // the primary (first) module.
+ configuration.receive_statistics = null_receive_statistics;
+ configuration.remote_bitrate_estimator = nullptr;
+ }
+ return modules;
+}
+
+void ViEChannel::StartDecodeThread() {
+ RTC_DCHECK(!sender_);
+ if (decode_thread_.IsRunning())
+ return;
+ // Start the decode thread
+ decode_thread_.Start();
+ decode_thread_.SetPriority(rtc::kHighestPriority);
+}
+
+void ViEChannel::StopDecodeThread() {
+ vcm_->TriggerDecoderShutdown();
+
+ decode_thread_.Stop();
+}
+
+int32_t ViEChannel::SetVoiceChannel(int32_t ve_channel_id,
+ VoEVideoSync* ve_sync_interface) {
+ return vie_sync_.ConfigureSync(ve_channel_id, ve_sync_interface,
+ rtp_rtcp_modules_[0],
+ vie_receiver_.GetRtpReceiver());
+}
+
+int32_t ViEChannel::VoiceChannel() {
+ return vie_sync_.VoiceChannel();
+}
+
+void ViEChannel::RegisterPreRenderCallback(
+ I420FrameCallback* pre_render_callback) {
+ CriticalSectionScoped cs(crit_.get());
+ pre_render_callback_ = pre_render_callback;
+}
+
+void ViEChannel::RegisterPreDecodeImageCallback(
+ EncodedImageCallback* pre_decode_callback) {
+ vcm_->RegisterPreDecodeImageCallback(pre_decode_callback);
+}
+
+// TODO(pbos): Remove OnInitializeDecoder which is called from the RTP module,
+// any decoder resetting should be handled internally within the VCM.
+int32_t ViEChannel::OnInitializeDecoder(
+ const int8_t payload_type,
+ const char payload_name[RTP_PAYLOAD_NAME_SIZE],
+ const int frequency,
+ const size_t channels,
+ const uint32_t rate) {
+ LOG(LS_INFO) << "OnInitializeDecoder " << static_cast<int>(payload_type)
+ << " " << payload_name;
+ vcm_->ResetDecoder();
+
+ return 0;
+}
+
+void ViEChannel::OnIncomingSSRCChanged(const uint32_t ssrc) {
+ rtp_rtcp_modules_[0]->SetRemoteSSRC(ssrc);
+}
+
+void ViEChannel::OnIncomingCSRCChanged(const uint32_t CSRC, const bool added) {}
+
+void ViEChannel::RegisterSendFrameCountObserver(
+ FrameCountObserver* observer) {
+ send_frame_count_observer_.Set(observer);
+}
+
+void ViEChannel::RegisterReceiveStatisticsProxy(
+ ReceiveStatisticsProxy* receive_statistics_proxy) {
+ CriticalSectionScoped cs(crit_.get());
+ receive_stats_callback_ = receive_statistics_proxy;
+}
+
+void ViEChannel::SetIncomingVideoStream(
+ IncomingVideoStream* incoming_video_stream) {
+ CriticalSectionScoped cs(crit_.get());
+ incoming_video_stream_ = incoming_video_stream;
+}
+} // namespace webrtc
diff --git a/webrtc/video/vie_channel.h b/webrtc/video/vie_channel.h
new file mode 100644
index 0000000000..4ba394817f
--- /dev/null
+++ b/webrtc/video/vie_channel.h
@@ -0,0 +1,454 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_VIDEO_VIE_CHANNEL_H_
+#define WEBRTC_VIDEO_VIE_CHANNEL_H_
+
+#include <list>
+#include <map>
+#include <vector>
+
+#include "webrtc/base/platform_thread.h"
+#include "webrtc/base/scoped_ptr.h"
+#include "webrtc/base/scoped_ref_ptr.h"
+#include "webrtc/modules/remote_bitrate_estimator/include/remote_bitrate_estimator.h"
+#include "webrtc/modules/rtp_rtcp/include/rtp_rtcp.h"
+#include "webrtc/modules/rtp_rtcp/include/rtp_rtcp_defines.h"
+#include "webrtc/modules/video_coding/include/video_coding_defines.h"
+#include "webrtc/system_wrappers/include/critical_section_wrapper.h"
+#include "webrtc/system_wrappers/include/tick_util.h"
+#include "webrtc/typedefs.h"
+#include "webrtc/video/vie_receiver.h"
+#include "webrtc/video/vie_sync_module.h"
+
+namespace webrtc {
+
+class CallStatsObserver;
+class ChannelStatsObserver;
+class Config;
+class CriticalSectionWrapper;
+class EncodedImageCallback;
+class I420FrameCallback;
+class IncomingVideoStream;
+class PacedSender;
+class PacketRouter;
+class PayloadRouter;
+class ProcessThread;
+class ReceiveStatisticsProxy;
+class ReportBlockStats;
+class RtcpRttStats;
+class ViEChannelProtectionCallback;
+class ViERTPObserver;
+class VideoCodingModule;
+class VideoDecoder;
+class VideoRenderCallback;
+class VoEVideoSync;
+
+enum StreamType {
+ kViEStreamTypeNormal = 0, // Normal media stream
+ kViEStreamTypeRtx = 1 // Retransmission media stream
+};
+
+class ViEChannel : public VCMFrameTypeCallback,
+ public VCMReceiveCallback,
+ public VCMReceiveStatisticsCallback,
+ public VCMDecoderTimingCallback,
+ public VCMPacketRequestCallback,
+ public RtpFeedback {
+ public:
+ friend class ChannelStatsObserver;
+ friend class ViEChannelProtectionCallback;
+
+ ViEChannel(uint32_t number_of_cores,
+ Transport* transport,
+ ProcessThread* module_process_thread,
+ RtcpIntraFrameObserver* intra_frame_observer,
+ RtcpBandwidthObserver* bandwidth_observer,
+ TransportFeedbackObserver* transport_feedback_observer,
+ RemoteBitrateEstimator* remote_bitrate_estimator,
+ RtcpRttStats* rtt_stats,
+ PacedSender* paced_sender,
+ PacketRouter* packet_router,
+ size_t max_rtp_streams,
+ bool sender);
+ ~ViEChannel();
+
+ int32_t Init();
+
+ // Sets the encoder to use for the channel. |new_stream| indicates the encoder
+ // type has changed and we should start a new RTP stream.
+ int32_t SetSendCodec(const VideoCodec& video_codec, bool new_stream = true);
+ int32_t SetReceiveCodec(const VideoCodec& video_codec);
+ // Registers an external decoder.
+ void RegisterExternalDecoder(const uint8_t pl_type, VideoDecoder* decoder);
+ int32_t ReceiveCodecStatistics(uint32_t* num_key_frames,
+ uint32_t* num_delta_frames);
+ uint32_t DiscardedPackets() const;
+
+ // Returns the estimated delay in milliseconds.
+ int ReceiveDelay() const;
+
+ void SetExpectedRenderDelay(int delay_ms);
+
+ void SetRTCPMode(const RtcpMode rtcp_mode);
+ void SetProtectionMode(bool enable_nack,
+ bool enable_fec,
+ int payload_type_red,
+ int payload_type_fec);
+ bool IsSendingFecEnabled();
+ int SetSenderBufferingMode(int target_delay_ms);
+ int SetSendTimestampOffsetStatus(bool enable, int id);
+ int SetReceiveTimestampOffsetStatus(bool enable, int id);
+ int SetSendAbsoluteSendTimeStatus(bool enable, int id);
+ int SetReceiveAbsoluteSendTimeStatus(bool enable, int id);
+ int SetSendVideoRotationStatus(bool enable, int id);
+ int SetReceiveVideoRotationStatus(bool enable, int id);
+ int SetSendTransportSequenceNumber(bool enable, int id);
+ int SetReceiveTransportSequenceNumber(bool enable, int id);
+ void SetRtcpXrRrtrStatus(bool enable);
+ void EnableTMMBR(bool enable);
+
+ // Sets SSRC for outgoing stream.
+ int32_t SetSSRC(const uint32_t SSRC,
+ const StreamType usage,
+ const unsigned char simulcast_idx);
+
+ // Gets SSRC for outgoing stream number |idx|.
+ int32_t GetLocalSSRC(uint8_t idx, unsigned int* ssrc);
+
+ // Gets SSRC for the incoming stream.
+ uint32_t GetRemoteSSRC();
+
+ int SetRtxSendPayloadType(int payload_type, int associated_payload_type);
+ void SetRtxReceivePayloadType(int payload_type, int associated_payload_type);
+ // If set to true, the RTX payload type mapping supplied in
+ // |SetRtxReceivePayloadType| will be used when restoring RTX packets. Without
+ // it, RTX packets will always be restored to the last non-RTX packet payload
+ // type received.
+ void SetUseRtxPayloadMappingOnRestore(bool val);
+
+ void SetRtpStateForSsrc(uint32_t ssrc, const RtpState& rtp_state);
+ RtpState GetRtpStateForSsrc(uint32_t ssrc);
+
+ // Sets the CName for the outgoing stream on the channel.
+ int32_t SetRTCPCName(const char* rtcp_cname);
+
+ // Gets the CName of the incoming stream.
+ int32_t GetRemoteRTCPCName(char rtcp_cname[]);
+
+ // Returns statistics reported by the remote client in an RTCP packet.
+ // TODO(pbos): Remove this along with VideoSendStream::GetRtt().
+ int32_t GetSendRtcpStatistics(uint16_t* fraction_lost,
+ uint32_t* cumulative_lost,
+ uint32_t* extended_max,
+ uint32_t* jitter_samples,
+ int64_t* rtt_ms);
+
+ // Called on receipt of RTCP report block from remote side.
+ void RegisterSendChannelRtcpStatisticsCallback(
+ RtcpStatisticsCallback* callback);
+
+ // Called on generation of RTCP stats
+ void RegisterReceiveChannelRtcpStatisticsCallback(
+ RtcpStatisticsCallback* callback);
+
+ // Gets send statistics for the rtp and rtx stream.
+ void GetSendStreamDataCounters(StreamDataCounters* rtp_counters,
+ StreamDataCounters* rtx_counters) const;
+
+ // Gets received stream data counters.
+ void GetReceiveStreamDataCounters(StreamDataCounters* rtp_counters,
+ StreamDataCounters* rtx_counters) const;
+
+ // Called on update of RTP statistics.
+ void RegisterSendChannelRtpStatisticsCallback(
+ StreamDataCountersCallback* callback);
+
+ // Called on update of RTP statistics.
+ void RegisterReceiveChannelRtpStatisticsCallback(
+ StreamDataCountersCallback* callback);
+
+ void GetSendRtcpPacketTypeCounter(
+ RtcpPacketTypeCounter* packet_counter) const;
+
+ void GetReceiveRtcpPacketTypeCounter(
+ RtcpPacketTypeCounter* packet_counter) const;
+
+ void RegisterSendSideDelayObserver(SendSideDelayObserver* observer);
+
+ // Called on any new send bitrate estimate.
+ void RegisterSendBitrateObserver(BitrateStatisticsObserver* observer);
+
+ // Implements RtpFeedback.
+ int32_t OnInitializeDecoder(const int8_t payload_type,
+ const char payload_name[RTP_PAYLOAD_NAME_SIZE],
+ const int frequency,
+ const size_t channels,
+ const uint32_t rate) override;
+ void OnIncomingSSRCChanged(const uint32_t ssrc) override;
+ void OnIncomingCSRCChanged(const uint32_t CSRC, const bool added) override;
+
+ int32_t SetRemoteSSRCType(const StreamType usage, const uint32_t SSRC);
+
+ int32_t StartSend();
+ int32_t StopSend();
+ bool Sending();
+ void StartReceive();
+ void StopReceive();
+
+ int32_t ReceivedRTPPacket(const void* rtp_packet,
+ const size_t rtp_packet_length,
+ const PacketTime& packet_time);
+ int32_t ReceivedRTCPPacket(const void* rtcp_packet,
+ const size_t rtcp_packet_length);
+
+ // Sets the maximum transfer unit size for the network link, i.e. including
+ // IP, UDP and RTP headers.
+ int32_t SetMTU(uint16_t mtu);
+
+ // Gets the modules used by the channel.
+ RtpRtcp* rtp_rtcp();
+ rtc::scoped_refptr<PayloadRouter> send_payload_router();
+ VCMProtectionCallback* vcm_protection_callback();
+
+
+ CallStatsObserver* GetStatsObserver();
+
+ // Implements VCMReceiveCallback.
+ virtual int32_t FrameToRender(VideoFrame& video_frame); // NOLINT
+
+ // Implements VCMReceiveCallback.
+ virtual int32_t ReceivedDecodedReferenceFrame(
+ const uint64_t picture_id);
+
+ // Implements VCMReceiveCallback.
+ void OnIncomingPayloadType(int payload_type) override;
+ void OnDecoderImplementationName(const char* implementation_name) override;
+
+ // Implements VCMReceiveStatisticsCallback.
+ void OnReceiveRatesUpdated(uint32_t bit_rate, uint32_t frame_rate) override;
+ void OnDiscardedPacketsUpdated(int discarded_packets) override;
+ void OnFrameCountsUpdated(const FrameCounts& frame_counts) override;
+
+ // Implements VCMDecoderTimingCallback.
+ virtual void OnDecoderTiming(int decode_ms,
+ int max_decode_ms,
+ int current_delay_ms,
+ int target_delay_ms,
+ int jitter_buffer_ms,
+ int min_playout_delay_ms,
+ int render_delay_ms);
+
+ // Implements FrameTypeCallback.
+ virtual int32_t RequestKeyFrame();
+
+ // Implements FrameTypeCallback.
+ virtual int32_t SliceLossIndicationRequest(
+ const uint64_t picture_id);
+
+ // Implements VideoPacketRequestCallback.
+ int32_t ResendPackets(const uint16_t* sequence_numbers,
+ uint16_t length) override;
+
+ int32_t SetVoiceChannel(int32_t ve_channel_id,
+ VoEVideoSync* ve_sync_interface);
+ int32_t VoiceChannel();
+
+ // New-style callbacks, used by VideoReceiveStream.
+ void RegisterPreRenderCallback(I420FrameCallback* pre_render_callback);
+ void RegisterPreDecodeImageCallback(
+ EncodedImageCallback* pre_decode_callback);
+
+ void RegisterSendFrameCountObserver(FrameCountObserver* observer);
+ void RegisterRtcpPacketTypeCounterObserver(
+ RtcpPacketTypeCounterObserver* observer);
+ void RegisterReceiveStatisticsProxy(
+ ReceiveStatisticsProxy* receive_statistics_proxy);
+ void SetIncomingVideoStream(IncomingVideoStream* incoming_video_stream);
+
+ protected:
+ static bool ChannelDecodeThreadFunction(void* obj);
+ bool ChannelDecodeProcess();
+
+ void OnRttUpdate(int64_t avg_rtt_ms, int64_t max_rtt_ms);
+
+ int ProtectionRequest(const FecProtectionParams* delta_fec_params,
+ const FecProtectionParams* key_fec_params,
+ uint32_t* sent_video_rate_bps,
+ uint32_t* sent_nack_rate_bps,
+ uint32_t* sent_fec_rate_bps);
+
+ private:
+ static std::vector<RtpRtcp*> CreateRtpRtcpModules(
+ bool receiver_only,
+ ReceiveStatistics* receive_statistics,
+ Transport* outgoing_transport,
+ RtcpIntraFrameObserver* intra_frame_callback,
+ RtcpBandwidthObserver* bandwidth_callback,
+ TransportFeedbackObserver* transport_feedback_callback,
+ RtcpRttStats* rtt_stats,
+ RtcpPacketTypeCounterObserver* rtcp_packet_type_counter_observer,
+ RemoteBitrateEstimator* remote_bitrate_estimator,
+ RtpPacketSender* paced_sender,
+ TransportSequenceNumberAllocator* transport_sequence_number_allocator,
+ BitrateStatisticsObserver* send_bitrate_observer,
+ FrameCountObserver* send_frame_count_observer,
+ SendSideDelayObserver* send_side_delay_observer,
+ size_t num_modules);
+
+ // Assumed to be protected.
+ void StartDecodeThread();
+ void StopDecodeThread();
+
+ void ProcessNACKRequest(const bool enable);
+ // Compute NACK list parameters for the buffering mode.
+ int GetRequiredNackListSize(int target_delay_ms);
+ void SetRtxSendStatus(bool enable);
+
+ void UpdateHistograms();
+
+ // ViEChannel exposes methods that allow to modify observers and callbacks
+ // to be modified. Such an API-style is cumbersome to implement and maintain
+ // at all the levels when comparing to only setting them at construction. As
+ // so this class instantiates its children with a wrapper that can be modified
+ // at a later time.
+ template <class T>
+ class RegisterableCallback : public T {
+ public:
+ RegisterableCallback()
+ : critsect_(CriticalSectionWrapper::CreateCriticalSection()),
+ callback_(NULL) {}
+
+ void Set(T* callback) {
+ CriticalSectionScoped cs(critsect_.get());
+ callback_ = callback;
+ }
+
+ protected:
+ // Note: this should be implemented with a RW-lock to allow simultaneous
+ // calls into the callback. However that doesn't seem to be needed for the
+ // current type of callbacks covered by this class.
+ rtc::scoped_ptr<CriticalSectionWrapper> critsect_;
+ T* callback_ GUARDED_BY(critsect_);
+
+ private:
+ RTC_DISALLOW_COPY_AND_ASSIGN(RegisterableCallback);
+ };
+
+ class RegisterableBitrateStatisticsObserver:
+ public RegisterableCallback<BitrateStatisticsObserver> {
+ virtual void Notify(const BitrateStatistics& total_stats,
+ const BitrateStatistics& retransmit_stats,
+ uint32_t ssrc) {
+ CriticalSectionScoped cs(critsect_.get());
+ if (callback_)
+ callback_->Notify(total_stats, retransmit_stats, ssrc);
+ }
+ } send_bitrate_observer_;
+
+ class RegisterableFrameCountObserver
+ : public RegisterableCallback<FrameCountObserver> {
+ public:
+ virtual void FrameCountUpdated(const FrameCounts& frame_counts,
+ uint32_t ssrc) {
+ CriticalSectionScoped cs(critsect_.get());
+ if (callback_)
+ callback_->FrameCountUpdated(frame_counts, ssrc);
+ }
+
+ private:
+ } send_frame_count_observer_;
+
+ class RegisterableSendSideDelayObserver :
+ public RegisterableCallback<SendSideDelayObserver> {
+ void SendSideDelayUpdated(int avg_delay_ms,
+ int max_delay_ms,
+ uint32_t ssrc) override {
+ CriticalSectionScoped cs(critsect_.get());
+ if (callback_)
+ callback_->SendSideDelayUpdated(avg_delay_ms, max_delay_ms, ssrc);
+ }
+ } send_side_delay_observer_;
+
+ class RegisterableRtcpPacketTypeCounterObserver
+ : public RegisterableCallback<RtcpPacketTypeCounterObserver> {
+ public:
+ void RtcpPacketTypesCounterUpdated(
+ uint32_t ssrc,
+ const RtcpPacketTypeCounter& packet_counter) override {
+ CriticalSectionScoped cs(critsect_.get());
+ if (callback_)
+ callback_->RtcpPacketTypesCounterUpdated(ssrc, packet_counter);
+ counter_map_[ssrc] = packet_counter;
+ }
+
+ virtual std::map<uint32_t, RtcpPacketTypeCounter> GetPacketTypeCounterMap()
+ const {
+ CriticalSectionScoped cs(critsect_.get());
+ return counter_map_;
+ }
+
+ private:
+ std::map<uint32_t, RtcpPacketTypeCounter> counter_map_
+ GUARDED_BY(critsect_);
+ } rtcp_packet_type_counter_observer_;
+
+ const uint32_t number_of_cores_;
+ const bool sender_;
+
+ ProcessThread* const module_process_thread_;
+
+ // Used for all registered callbacks except rendering.
+ rtc::scoped_ptr<CriticalSectionWrapper> crit_;
+
+ // Owned modules/classes.
+ rtc::scoped_refptr<PayloadRouter> send_payload_router_;
+ rtc::scoped_ptr<ViEChannelProtectionCallback> vcm_protection_callback_;
+
+ VideoCodingModule* const vcm_;
+ ViEReceiver vie_receiver_;
+ ViESyncModule vie_sync_;
+
+ // Helper to report call statistics.
+ rtc::scoped_ptr<ChannelStatsObserver> stats_observer_;
+
+ // Not owned.
+ ReceiveStatisticsProxy* receive_stats_callback_ GUARDED_BY(crit_);
+ FrameCounts receive_frame_counts_ GUARDED_BY(crit_);
+ IncomingVideoStream* incoming_video_stream_ GUARDED_BY(crit_);
+ RtcpIntraFrameObserver* const intra_frame_observer_;
+ RtcpRttStats* const rtt_stats_;
+ PacedSender* const paced_sender_;
+ PacketRouter* const packet_router_;
+
+ const rtc::scoped_ptr<RtcpBandwidthObserver> bandwidth_observer_;
+ TransportFeedbackObserver* const transport_feedback_observer_;
+
+ rtc::PlatformThread decode_thread_;
+
+ int nack_history_size_sender_;
+ int max_nack_reordering_threshold_;
+ I420FrameCallback* pre_render_callback_ GUARDED_BY(crit_);
+
+ const rtc::scoped_ptr<ReportBlockStats> report_block_stats_sender_;
+
+ int64_t time_of_first_rtt_ms_ GUARDED_BY(crit_);
+ int64_t rtt_sum_ms_ GUARDED_BY(crit_);
+ int64_t last_rtt_ms_ GUARDED_BY(crit_);
+ size_t num_rtts_ GUARDED_BY(crit_);
+
+ // RtpRtcp modules, declared last as they use other members on construction.
+ const std::vector<RtpRtcp*> rtp_rtcp_modules_;
+ size_t num_active_rtp_rtcp_modules_ GUARDED_BY(crit_);
+};
+
+} // namespace webrtc
+
+#endif // WEBRTC_VIDEO_VIE_CHANNEL_H_
diff --git a/webrtc/video_engine/vie_codec_unittest.cc b/webrtc/video/vie_codec_unittest.cc
index 9f648ec521..9f648ec521 100644
--- a/webrtc/video_engine/vie_codec_unittest.cc
+++ b/webrtc/video/vie_codec_unittest.cc
diff --git a/webrtc/video/vie_encoder.cc b/webrtc/video/vie_encoder.cc
new file mode 100644
index 0000000000..a147b2415c
--- /dev/null
+++ b/webrtc/video/vie_encoder.cc
@@ -0,0 +1,634 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/video/vie_encoder.h"
+
+#include <assert.h>
+
+#include <algorithm>
+
+#include "webrtc/base/checks.h"
+#include "webrtc/base/logging.h"
+#include "webrtc/base/trace_event.h"
+#include "webrtc/call/bitrate_allocator.h"
+#include "webrtc/common_video/include/video_image.h"
+#include "webrtc/common_video/libyuv/include/webrtc_libyuv.h"
+#include "webrtc/frame_callback.h"
+#include "webrtc/modules/bitrate_controller/include/bitrate_controller.h"
+#include "webrtc/modules/pacing/paced_sender.h"
+#include "webrtc/modules/utility/include/process_thread.h"
+#include "webrtc/modules/video_coding/include/video_codec_interface.h"
+#include "webrtc/modules/video_coding/include/video_coding.h"
+#include "webrtc/modules/video_coding/include/video_coding_defines.h"
+#include "webrtc/modules/video_coding/encoded_frame.h"
+#include "webrtc/system_wrappers/include/clock.h"
+#include "webrtc/system_wrappers/include/critical_section_wrapper.h"
+#include "webrtc/system_wrappers/include/metrics.h"
+#include "webrtc/system_wrappers/include/tick_util.h"
+#include "webrtc/video/payload_router.h"
+#include "webrtc/video/send_statistics_proxy.h"
+
+namespace webrtc {
+
+// Margin on when we pause the encoder when the pacing buffer overflows relative
+// to the configured buffer delay.
+static const float kEncoderPausePacerMargin = 2.0f;
+
+// Don't stop the encoder unless the delay is above this configured value.
+static const int kMinPacingDelayMs = 200;
+
+static const float kStopPaddingThresholdMs = 2000;
+
+static const int kMinKeyFrameRequestIntervalMs = 300;
+
+std::vector<uint32_t> AllocateStreamBitrates(
+ uint32_t total_bitrate,
+ const SimulcastStream* stream_configs,
+ size_t number_of_streams) {
+ if (number_of_streams == 0) {
+ std::vector<uint32_t> stream_bitrates(1, 0);
+ stream_bitrates[0] = total_bitrate;
+ return stream_bitrates;
+ }
+ std::vector<uint32_t> stream_bitrates(number_of_streams, 0);
+ uint32_t bitrate_remainder = total_bitrate;
+ for (size_t i = 0; i < stream_bitrates.size() && bitrate_remainder > 0; ++i) {
+ if (stream_configs[i].maxBitrate * 1000 > bitrate_remainder) {
+ stream_bitrates[i] = bitrate_remainder;
+ } else {
+ stream_bitrates[i] = stream_configs[i].maxBitrate * 1000;
+ }
+ bitrate_remainder -= stream_bitrates[i];
+ }
+ return stream_bitrates;
+}
+
+class QMVideoSettingsCallback : public VCMQMSettingsCallback {
+ public:
+ explicit QMVideoSettingsCallback(VideoProcessing* vpm);
+
+ ~QMVideoSettingsCallback();
+
+ // Update VPM with QM (quality modes: frame size & frame rate) settings.
+ int32_t SetVideoQMSettings(const uint32_t frame_rate,
+ const uint32_t width,
+ const uint32_t height);
+
+ // Update target frame rate.
+ void SetTargetFramerate(int frame_rate);
+
+ private:
+ VideoProcessing* vp_;
+};
+
+class ViEBitrateObserver : public BitrateObserver {
+ public:
+ explicit ViEBitrateObserver(ViEEncoder* owner)
+ : owner_(owner) {
+ }
+ virtual ~ViEBitrateObserver() {}
+ // Implements BitrateObserver.
+ virtual void OnNetworkChanged(uint32_t bitrate_bps,
+ uint8_t fraction_lost,
+ int64_t rtt) {
+ owner_->OnNetworkChanged(bitrate_bps, fraction_lost, rtt);
+ }
+ private:
+ ViEEncoder* owner_;
+};
+
+ViEEncoder::ViEEncoder(uint32_t number_of_cores,
+ ProcessThread* module_process_thread,
+ SendStatisticsProxy* stats_proxy,
+ I420FrameCallback* pre_encode_callback,
+ PacedSender* pacer,
+ BitrateAllocator* bitrate_allocator)
+ : number_of_cores_(number_of_cores),
+ vp_(VideoProcessing::Create()),
+ qm_callback_(new QMVideoSettingsCallback(vp_.get())),
+ vcm_(VideoCodingModule::Create(Clock::GetRealTimeClock(),
+ this,
+ qm_callback_.get())),
+ send_payload_router_(NULL),
+ data_cs_(CriticalSectionWrapper::CreateCriticalSection()),
+ stats_proxy_(stats_proxy),
+ pre_encode_callback_(pre_encode_callback),
+ pacer_(pacer),
+ bitrate_allocator_(bitrate_allocator),
+ time_of_last_frame_activity_ms_(0),
+ encoder_config_(),
+ min_transmit_bitrate_kbps_(0),
+ last_observed_bitrate_bps_(0),
+ target_delay_ms_(0),
+ network_is_transmitting_(true),
+ encoder_paused_(false),
+ encoder_paused_and_dropped_frame_(false),
+ module_process_thread_(module_process_thread),
+ has_received_sli_(false),
+ picture_id_sli_(0),
+ has_received_rpsi_(false),
+ picture_id_rpsi_(0),
+ video_suspended_(false) {
+ bitrate_observer_.reset(new ViEBitrateObserver(this));
+}
+
+bool ViEEncoder::Init() {
+ vp_->EnableTemporalDecimation(true);
+
+ // Enable/disable content analysis: off by default for now.
+ vp_->EnableContentAnalysis(false);
+
+ if (vcm_->RegisterTransportCallback(this) != 0) {
+ return false;
+ }
+ if (vcm_->RegisterSendStatisticsCallback(this) != 0) {
+ return false;
+ }
+ return true;
+}
+
+void ViEEncoder::StartThreadsAndSetSharedMembers(
+ rtc::scoped_refptr<PayloadRouter> send_payload_router,
+ VCMProtectionCallback* vcm_protection_callback) {
+ RTC_DCHECK(send_payload_router_ == NULL);
+
+ send_payload_router_ = send_payload_router;
+ vcm_->RegisterProtectionCallback(vcm_protection_callback);
+ module_process_thread_->RegisterModule(vcm_.get());
+}
+
+void ViEEncoder::StopThreadsAndRemoveSharedMembers() {
+ if (bitrate_allocator_)
+ bitrate_allocator_->RemoveBitrateObserver(bitrate_observer_.get());
+ module_process_thread_->DeRegisterModule(vcm_.get());
+}
+
+ViEEncoder::~ViEEncoder() {
+}
+
+void ViEEncoder::SetNetworkTransmissionState(bool is_transmitting) {
+ {
+ CriticalSectionScoped cs(data_cs_.get());
+ network_is_transmitting_ = is_transmitting;
+ }
+}
+
+void ViEEncoder::Pause() {
+ CriticalSectionScoped cs(data_cs_.get());
+ encoder_paused_ = true;
+}
+
+void ViEEncoder::Restart() {
+ CriticalSectionScoped cs(data_cs_.get());
+ encoder_paused_ = false;
+}
+
+int32_t ViEEncoder::RegisterExternalEncoder(webrtc::VideoEncoder* encoder,
+ uint8_t pl_type,
+ bool internal_source) {
+ if (vcm_->RegisterExternalEncoder(encoder, pl_type, internal_source) !=
+ VCM_OK) {
+ return -1;
+ }
+ return 0;
+}
+
+int32_t ViEEncoder::DeRegisterExternalEncoder(uint8_t pl_type) {
+ if (vcm_->RegisterExternalEncoder(NULL, pl_type) != VCM_OK) {
+ return -1;
+ }
+ return 0;
+}
+
+int32_t ViEEncoder::SetEncoder(const webrtc::VideoCodec& video_codec) {
+ RTC_DCHECK(send_payload_router_ != NULL);
+ // Setting target width and height for VPM.
+ if (vp_->SetTargetResolution(video_codec.width, video_codec.height,
+ video_codec.maxFramerate) != VPM_OK) {
+ return -1;
+ }
+
+ // Cache codec before calling AddBitrateObserver (which calls OnNetworkChanged
+ // that makes use of the number of simulcast streams configured).
+ {
+ CriticalSectionScoped cs(data_cs_.get());
+ encoder_config_ = video_codec;
+ }
+
+ // Add a bitrate observer to the allocator and update the start, max and
+ // min bitrates of the bitrate controller as needed.
+ int allocated_bitrate_bps = bitrate_allocator_->AddBitrateObserver(
+ bitrate_observer_.get(), video_codec.minBitrate * 1000,
+ video_codec.maxBitrate * 1000);
+
+ webrtc::VideoCodec modified_video_codec = video_codec;
+ modified_video_codec.startBitrate = allocated_bitrate_bps / 1000;
+
+ size_t max_data_payload_length = send_payload_router_->MaxPayloadLength();
+ if (vcm_->RegisterSendCodec(&modified_video_codec, number_of_cores_,
+ static_cast<uint32_t>(max_data_payload_length)) !=
+ VCM_OK) {
+ return -1;
+ }
+ return 0;
+}
+
+int ViEEncoder::GetPaddingNeededBps() const {
+ int64_t time_of_last_frame_activity_ms;
+ int min_transmit_bitrate_bps;
+ int bitrate_bps;
+ VideoCodec send_codec;
+ {
+ CriticalSectionScoped cs(data_cs_.get());
+ bool send_padding = encoder_config_.numberOfSimulcastStreams > 1 ||
+ video_suspended_ || min_transmit_bitrate_kbps_ > 0;
+ if (!send_padding)
+ return 0;
+ time_of_last_frame_activity_ms = time_of_last_frame_activity_ms_;
+ min_transmit_bitrate_bps = 1000 * min_transmit_bitrate_kbps_;
+ bitrate_bps = last_observed_bitrate_bps_;
+ send_codec = encoder_config_;
+ }
+
+ bool video_is_suspended = vcm_->VideoSuspended();
+
+ // Find the max amount of padding we can allow ourselves to send at this
+ // point, based on which streams are currently active and what our current
+ // available bandwidth is.
+ int pad_up_to_bitrate_bps = 0;
+ if (send_codec.numberOfSimulcastStreams == 0) {
+ pad_up_to_bitrate_bps = send_codec.minBitrate * 1000;
+ } else {
+ SimulcastStream* stream_configs = send_codec.simulcastStream;
+ pad_up_to_bitrate_bps =
+ stream_configs[send_codec.numberOfSimulcastStreams - 1].minBitrate *
+ 1000;
+ for (int i = 0; i < send_codec.numberOfSimulcastStreams - 1; ++i) {
+ pad_up_to_bitrate_bps += stream_configs[i].targetBitrate * 1000;
+ }
+ }
+
+ // Disable padding if only sending one stream and video isn't suspended and
+ // min-transmit bitrate isn't used (applied later).
+ if (!video_is_suspended && send_codec.numberOfSimulcastStreams <= 1)
+ pad_up_to_bitrate_bps = 0;
+
+ // The amount of padding should decay to zero if no frames are being
+ // captured/encoded unless a min-transmit bitrate is used.
+ int64_t now_ms = TickTime::MillisecondTimestamp();
+ if (now_ms - time_of_last_frame_activity_ms > kStopPaddingThresholdMs)
+ pad_up_to_bitrate_bps = 0;
+
+ // Pad up to min bitrate.
+ if (pad_up_to_bitrate_bps < min_transmit_bitrate_bps)
+ pad_up_to_bitrate_bps = min_transmit_bitrate_bps;
+
+ // Padding may never exceed bitrate estimate.
+ if (pad_up_to_bitrate_bps > bitrate_bps)
+ pad_up_to_bitrate_bps = bitrate_bps;
+
+ return pad_up_to_bitrate_bps;
+}
+
+bool ViEEncoder::EncoderPaused() const {
+ // Pause video if paused by caller or as long as the network is down or the
+ // pacer queue has grown too large in buffered mode.
+ if (encoder_paused_) {
+ return true;
+ }
+ if (target_delay_ms_ > 0) {
+ // Buffered mode.
+ // TODO(pwestin): Workaround until nack is configured as a time and not
+ // number of packets.
+ return pacer_->QueueInMs() >=
+ std::max(
+ static_cast<int>(target_delay_ms_ * kEncoderPausePacerMargin),
+ kMinPacingDelayMs);
+ }
+ if (pacer_->ExpectedQueueTimeMs() > PacedSender::kMaxQueueLengthMs) {
+ // Too much data in pacer queue, drop frame.
+ return true;
+ }
+ return !network_is_transmitting_;
+}
+
+void ViEEncoder::TraceFrameDropStart() {
+ // Start trace event only on the first frame after encoder is paused.
+ if (!encoder_paused_and_dropped_frame_) {
+ TRACE_EVENT_ASYNC_BEGIN0("webrtc", "EncoderPaused", this);
+ }
+ encoder_paused_and_dropped_frame_ = true;
+ return;
+}
+
+void ViEEncoder::TraceFrameDropEnd() {
+ // End trace event on first frame after encoder resumes, if frame was dropped.
+ if (encoder_paused_and_dropped_frame_) {
+ TRACE_EVENT_ASYNC_END0("webrtc", "EncoderPaused", this);
+ }
+ encoder_paused_and_dropped_frame_ = false;
+}
+
+void ViEEncoder::DeliverFrame(VideoFrame video_frame) {
+ RTC_DCHECK(send_payload_router_ != NULL);
+ if (!send_payload_router_->active()) {
+ // We've paused or we have no channels attached, don't waste resources on
+ // encoding.
+ return;
+ }
+ VideoCodecType codec_type;
+ {
+ CriticalSectionScoped cs(data_cs_.get());
+ time_of_last_frame_activity_ms_ = TickTime::MillisecondTimestamp();
+ if (EncoderPaused()) {
+ TraceFrameDropStart();
+ return;
+ }
+ TraceFrameDropEnd();
+ codec_type = encoder_config_.codecType;
+ }
+
+ TRACE_EVENT_ASYNC_STEP0("webrtc", "Video", video_frame.render_time_ms(),
+ "Encode");
+ const VideoFrame* frame_to_send = &video_frame;
+ // TODO(wuchengli): support texture frames.
+ if (video_frame.native_handle() == NULL) {
+ // Pass frame via preprocessor.
+ frame_to_send = vp_->PreprocessFrame(video_frame);
+ if (!frame_to_send) {
+ // Drop this frame, or there was an error processing it.
+ return;
+ }
+ }
+
+ // If we haven't resampled the frame and we have a FrameCallback, we need to
+ // make a deep copy of |video_frame|.
+ VideoFrame copied_frame;
+ if (pre_encode_callback_) {
+ copied_frame.CopyFrame(*frame_to_send);
+ pre_encode_callback_->FrameCallback(&copied_frame);
+ frame_to_send = &copied_frame;
+ }
+
+ if (codec_type == webrtc::kVideoCodecVP8) {
+ webrtc::CodecSpecificInfo codec_specific_info;
+ codec_specific_info.codecType = webrtc::kVideoCodecVP8;
+ {
+ CriticalSectionScoped cs(data_cs_.get());
+ codec_specific_info.codecSpecific.VP8.hasReceivedRPSI =
+ has_received_rpsi_;
+ codec_specific_info.codecSpecific.VP8.hasReceivedSLI =
+ has_received_sli_;
+ codec_specific_info.codecSpecific.VP8.pictureIdRPSI =
+ picture_id_rpsi_;
+ codec_specific_info.codecSpecific.VP8.pictureIdSLI =
+ picture_id_sli_;
+ has_received_sli_ = false;
+ has_received_rpsi_ = false;
+ }
+
+ vcm_->AddVideoFrame(*frame_to_send, vp_->GetContentMetrics(),
+ &codec_specific_info);
+ return;
+ }
+ vcm_->AddVideoFrame(*frame_to_send);
+}
+
+int ViEEncoder::SendKeyFrame() {
+ return vcm_->IntraFrameRequest(0);
+}
+
+uint32_t ViEEncoder::LastObservedBitrateBps() const {
+ CriticalSectionScoped cs(data_cs_.get());
+ return last_observed_bitrate_bps_;
+}
+
+int ViEEncoder::CodecTargetBitrate(uint32_t* bitrate) const {
+ if (vcm_->Bitrate(bitrate) != 0)
+ return -1;
+ return 0;
+}
+
+void ViEEncoder::SetProtectionMethod(bool nack, bool fec) {
+ // Set Video Protection for VCM.
+ VCMVideoProtection protection_mode;
+ if (fec) {
+ protection_mode =
+ nack ? webrtc::kProtectionNackFEC : kProtectionFEC;
+ } else {
+ protection_mode = nack ? kProtectionNack : kProtectionNone;
+ }
+ vcm_->SetVideoProtection(protection_mode, true);
+}
+
+void ViEEncoder::SetSenderBufferingMode(int target_delay_ms) {
+ {
+ CriticalSectionScoped cs(data_cs_.get());
+ target_delay_ms_ = target_delay_ms;
+ }
+ if (target_delay_ms > 0) {
+ // Disable external frame-droppers.
+ vcm_->EnableFrameDropper(false);
+ vp_->EnableTemporalDecimation(false);
+ } else {
+ // Real-time mode - enable frame droppers.
+ vp_->EnableTemporalDecimation(true);
+ vcm_->EnableFrameDropper(true);
+ }
+}
+
+void ViEEncoder::OnSetRates(uint32_t bitrate_bps, int framerate) {
+ if (stats_proxy_)
+ stats_proxy_->OnSetRates(bitrate_bps, framerate);
+}
+
+int32_t ViEEncoder::SendData(
+ const uint8_t payload_type,
+ const EncodedImage& encoded_image,
+ const webrtc::RTPFragmentationHeader& fragmentation_header,
+ const RTPVideoHeader* rtp_video_hdr) {
+ RTC_DCHECK(send_payload_router_ != NULL);
+
+ {
+ CriticalSectionScoped cs(data_cs_.get());
+ time_of_last_frame_activity_ms_ = TickTime::MillisecondTimestamp();
+ }
+
+ if (stats_proxy_ != NULL)
+ stats_proxy_->OnSendEncodedImage(encoded_image, rtp_video_hdr);
+
+ return send_payload_router_->RoutePayload(
+ encoded_image._frameType, payload_type, encoded_image._timeStamp,
+ encoded_image.capture_time_ms_, encoded_image._buffer,
+ encoded_image._length, &fragmentation_header, rtp_video_hdr)
+ ? 0
+ : -1;
+}
+
+void ViEEncoder::OnEncoderImplementationName(
+ const char* implementation_name) {
+ if (stats_proxy_)
+ stats_proxy_->OnEncoderImplementationName(implementation_name);
+}
+
+int32_t ViEEncoder::SendStatistics(const uint32_t bit_rate,
+ const uint32_t frame_rate) {
+ if (stats_proxy_)
+ stats_proxy_->OnOutgoingRate(frame_rate, bit_rate);
+ return 0;
+}
+
+void ViEEncoder::OnReceivedSLI(uint32_t /*ssrc*/,
+ uint8_t picture_id) {
+ CriticalSectionScoped cs(data_cs_.get());
+ picture_id_sli_ = picture_id;
+ has_received_sli_ = true;
+}
+
+void ViEEncoder::OnReceivedRPSI(uint32_t /*ssrc*/,
+ uint64_t picture_id) {
+ CriticalSectionScoped cs(data_cs_.get());
+ picture_id_rpsi_ = picture_id;
+ has_received_rpsi_ = true;
+}
+
+void ViEEncoder::OnReceivedIntraFrameRequest(uint32_t ssrc) {
+ // Key frame request from remote side, signal to VCM.
+ TRACE_EVENT0("webrtc", "OnKeyFrameRequest");
+
+ int idx = 0;
+ {
+ CriticalSectionScoped cs(data_cs_.get());
+ auto stream_it = ssrc_streams_.find(ssrc);
+ if (stream_it == ssrc_streams_.end()) {
+ LOG_F(LS_WARNING) << "ssrc not found: " << ssrc << ", map size "
+ << ssrc_streams_.size();
+ return;
+ }
+ std::map<unsigned int, int64_t>::iterator time_it =
+ time_last_intra_request_ms_.find(ssrc);
+ if (time_it == time_last_intra_request_ms_.end()) {
+ time_last_intra_request_ms_[ssrc] = 0;
+ }
+
+ int64_t now = TickTime::MillisecondTimestamp();
+ if (time_last_intra_request_ms_[ssrc] + kMinKeyFrameRequestIntervalMs
+ > now) {
+ return;
+ }
+ time_last_intra_request_ms_[ssrc] = now;
+ idx = stream_it->second;
+ }
+ // Release the critsect before triggering key frame.
+ vcm_->IntraFrameRequest(idx);
+}
+
+void ViEEncoder::OnLocalSsrcChanged(uint32_t old_ssrc, uint32_t new_ssrc) {
+ CriticalSectionScoped cs(data_cs_.get());
+ std::map<unsigned int, int>::iterator it = ssrc_streams_.find(old_ssrc);
+ if (it == ssrc_streams_.end()) {
+ return;
+ }
+
+ ssrc_streams_[new_ssrc] = it->second;
+ ssrc_streams_.erase(it);
+
+ std::map<unsigned int, int64_t>::iterator time_it =
+ time_last_intra_request_ms_.find(old_ssrc);
+ int64_t last_intra_request_ms = 0;
+ if (time_it != time_last_intra_request_ms_.end()) {
+ last_intra_request_ms = time_it->second;
+ time_last_intra_request_ms_.erase(time_it);
+ }
+ time_last_intra_request_ms_[new_ssrc] = last_intra_request_ms;
+}
+
+void ViEEncoder::SetSsrcs(const std::vector<uint32_t>& ssrcs) {
+ CriticalSectionScoped cs(data_cs_.get());
+ ssrc_streams_.clear();
+ time_last_intra_request_ms_.clear();
+ int idx = 0;
+ for (uint32_t ssrc : ssrcs) {
+ ssrc_streams_[ssrc] = idx++;
+ }
+}
+
+void ViEEncoder::SetMinTransmitBitrate(int min_transmit_bitrate_kbps) {
+ assert(min_transmit_bitrate_kbps >= 0);
+ CriticalSectionScoped crit(data_cs_.get());
+ min_transmit_bitrate_kbps_ = min_transmit_bitrate_kbps;
+}
+
+// Called from ViEBitrateObserver.
+void ViEEncoder::OnNetworkChanged(uint32_t bitrate_bps,
+ uint8_t fraction_lost,
+ int64_t round_trip_time_ms) {
+ LOG(LS_VERBOSE) << "OnNetworkChanged, bitrate" << bitrate_bps
+ << " packet loss " << static_cast<int>(fraction_lost)
+ << " rtt " << round_trip_time_ms;
+ RTC_DCHECK(send_payload_router_ != NULL);
+ vcm_->SetChannelParameters(bitrate_bps, fraction_lost, round_trip_time_ms);
+ bool video_is_suspended = vcm_->VideoSuspended();
+ bool video_suspension_changed;
+ VideoCodec send_codec;
+ uint32_t first_ssrc;
+ {
+ CriticalSectionScoped cs(data_cs_.get());
+ last_observed_bitrate_bps_ = bitrate_bps;
+ video_suspension_changed = video_suspended_ != video_is_suspended;
+ video_suspended_ = video_is_suspended;
+ send_codec = encoder_config_;
+ first_ssrc = ssrc_streams_.begin()->first;
+ }
+
+ SimulcastStream* stream_configs = send_codec.simulcastStream;
+ // Allocate the bandwidth between the streams.
+ std::vector<uint32_t> stream_bitrates = AllocateStreamBitrates(
+ bitrate_bps, stream_configs, send_codec.numberOfSimulcastStreams);
+ send_payload_router_->SetTargetSendBitrates(stream_bitrates);
+
+ if (!video_suspension_changed)
+ return;
+ // Video suspend-state changed, inform codec observer.
+ LOG(LS_INFO) << "Video suspend state changed " << video_is_suspended
+ << " for ssrc " << first_ssrc;
+ if (stats_proxy_)
+ stats_proxy_->OnSuspendChange(video_is_suspended);
+}
+
+void ViEEncoder::SuspendBelowMinBitrate() {
+ vcm_->SuspendBelowMinBitrate();
+ bitrate_allocator_->EnforceMinBitrate(false);
+}
+
+void ViEEncoder::RegisterPostEncodeImageCallback(
+ EncodedImageCallback* post_encode_callback) {
+ vcm_->RegisterPostEncodeImageCallback(post_encode_callback);
+}
+
+QMVideoSettingsCallback::QMVideoSettingsCallback(VideoProcessing* vpm)
+ : vp_(vpm) {
+}
+
+QMVideoSettingsCallback::~QMVideoSettingsCallback() {
+}
+
+int32_t QMVideoSettingsCallback::SetVideoQMSettings(
+ const uint32_t frame_rate,
+ const uint32_t width,
+ const uint32_t height) {
+ return vp_->SetTargetResolution(width, height, frame_rate);
+}
+
+void QMVideoSettingsCallback::SetTargetFramerate(int frame_rate) {
+ vp_->SetTargetFramerate(frame_rate);
+}
+
+} // namespace webrtc
diff --git a/webrtc/video/vie_encoder.h b/webrtc/video/vie_encoder.h
new file mode 100644
index 0000000000..a15fd8920b
--- /dev/null
+++ b/webrtc/video/vie_encoder.h
@@ -0,0 +1,196 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_VIDEO_VIE_ENCODER_H_
+#define WEBRTC_VIDEO_VIE_ENCODER_H_
+
+#include <map>
+#include <vector>
+
+#include "webrtc/base/scoped_ptr.h"
+#include "webrtc/base/scoped_ref_ptr.h"
+#include "webrtc/base/thread_annotations.h"
+#include "webrtc/call/bitrate_allocator.h"
+#include "webrtc/common_types.h"
+#include "webrtc/frame_callback.h"
+#include "webrtc/modules/rtp_rtcp/include/rtp_rtcp_defines.h"
+#include "webrtc/modules/video_coding/include/video_coding_defines.h"
+#include "webrtc/modules/video_processing/include/video_processing.h"
+#include "webrtc/typedefs.h"
+#include "webrtc/video/video_capture_input.h"
+
+namespace webrtc {
+
+class BitrateAllocator;
+class BitrateObserver;
+class Config;
+class CriticalSectionWrapper;
+class EncodedImageCallback;
+class PacedSender;
+class PayloadRouter;
+class ProcessThread;
+class QMVideoSettingsCallback;
+class SendStatisticsProxy;
+class ViEBitrateObserver;
+class ViEEffectFilter;
+class VideoCodingModule;
+
+class ViEEncoder : public RtcpIntraFrameObserver,
+ public VideoEncoderRateObserver,
+ public VCMPacketizationCallback,
+ public VCMSendStatisticsCallback,
+ public VideoCaptureCallback {
+ public:
+ friend class ViEBitrateObserver;
+
+ ViEEncoder(uint32_t number_of_cores,
+ ProcessThread* module_process_thread,
+ SendStatisticsProxy* stats_proxy,
+ I420FrameCallback* pre_encode_callback,
+ PacedSender* pacer,
+ BitrateAllocator* bitrate_allocator);
+ ~ViEEncoder();
+
+ bool Init();
+
+ // This function is assumed to be called before any frames are delivered and
+ // only once.
+ // Ideally this would be done in Init, but the dependencies between ViEEncoder
+ // and ViEChannel makes it really hard to do in a good way.
+ void StartThreadsAndSetSharedMembers(
+ rtc::scoped_refptr<PayloadRouter> send_payload_router,
+ VCMProtectionCallback* vcm_protection_callback);
+
+ // This function must be called before the corresponding ViEChannel is
+ // deleted.
+ void StopThreadsAndRemoveSharedMembers();
+
+ void SetNetworkTransmissionState(bool is_transmitting);
+
+ // Returns the id of the owning channel.
+ int Owner() const;
+
+ // Drops incoming packets before they get to the encoder.
+ void Pause();
+ void Restart();
+
+ // Codec settings.
+ int32_t RegisterExternalEncoder(VideoEncoder* encoder,
+ uint8_t pl_type,
+ bool internal_source);
+ int32_t DeRegisterExternalEncoder(uint8_t pl_type);
+ int32_t SetEncoder(const VideoCodec& video_codec);
+
+ // Implementing VideoCaptureCallback.
+ void DeliverFrame(VideoFrame video_frame) override;
+
+ int32_t SendKeyFrame();
+
+ uint32_t LastObservedBitrateBps() const;
+ int CodecTargetBitrate(uint32_t* bitrate) const;
+ // Loss protection. Must be called before SetEncoder() to have max packet size
+ // updated according to protection.
+ // TODO(pbos): Set protection method on construction or extract vcm_ outside
+ // this class and set it on construction there.
+ void SetProtectionMethod(bool nack, bool fec);
+
+ // Buffering mode.
+ void SetSenderBufferingMode(int target_delay_ms);
+
+ // Implements VideoEncoderRateObserver.
+ void OnSetRates(uint32_t bitrate_bps, int framerate) override;
+
+ // Implements VCMPacketizationCallback.
+ int32_t SendData(uint8_t payload_type,
+ const EncodedImage& encoded_image,
+ const RTPFragmentationHeader& fragmentation_header,
+ const RTPVideoHeader* rtp_video_hdr) override;
+ void OnEncoderImplementationName(const char* implementation_name) override;
+
+ // Implements VideoSendStatisticsCallback.
+ int32_t SendStatistics(const uint32_t bit_rate,
+ const uint32_t frame_rate) override;
+
+ // Implements RtcpIntraFrameObserver.
+ void OnReceivedIntraFrameRequest(uint32_t ssrc) override;
+ void OnReceivedSLI(uint32_t ssrc, uint8_t picture_id) override;
+ void OnReceivedRPSI(uint32_t ssrc, uint64_t picture_id) override;
+ void OnLocalSsrcChanged(uint32_t old_ssrc, uint32_t new_ssrc) override;
+
+ // Sets SSRCs for all streams.
+ void SetSsrcs(const std::vector<uint32_t>& ssrcs);
+
+ void SetMinTransmitBitrate(int min_transmit_bitrate_kbps);
+
+ // Lets the sender suspend video when the rate drops below
+ // |threshold_bps|, and turns back on when the rate goes back up above
+ // |threshold_bps| + |window_bps|.
+ void SuspendBelowMinBitrate();
+
+ // New-style callbacks, used by VideoSendStream.
+ void RegisterPostEncodeImageCallback(
+ EncodedImageCallback* post_encode_callback);
+
+ int GetPaddingNeededBps() const;
+
+ protected:
+ // Called by BitrateObserver.
+ void OnNetworkChanged(uint32_t bitrate_bps,
+ uint8_t fraction_lost,
+ int64_t round_trip_time_ms);
+
+ private:
+ bool EncoderPaused() const EXCLUSIVE_LOCKS_REQUIRED(data_cs_);
+ void TraceFrameDropStart() EXCLUSIVE_LOCKS_REQUIRED(data_cs_);
+ void TraceFrameDropEnd() EXCLUSIVE_LOCKS_REQUIRED(data_cs_);
+
+ const uint32_t number_of_cores_;
+
+ const rtc::scoped_ptr<VideoProcessing> vp_;
+ const rtc::scoped_ptr<QMVideoSettingsCallback> qm_callback_;
+ const rtc::scoped_ptr<VideoCodingModule> vcm_;
+ rtc::scoped_refptr<PayloadRouter> send_payload_router_;
+
+ rtc::scoped_ptr<CriticalSectionWrapper> data_cs_;
+ rtc::scoped_ptr<BitrateObserver> bitrate_observer_;
+
+ SendStatisticsProxy* const stats_proxy_;
+ I420FrameCallback* const pre_encode_callback_;
+ PacedSender* const pacer_;
+ BitrateAllocator* const bitrate_allocator_;
+
+ // The time we last received an input frame or encoded frame. This is used to
+ // track when video is stopped long enough that we also want to stop sending
+ // padding.
+ int64_t time_of_last_frame_activity_ms_ GUARDED_BY(data_cs_);
+ VideoCodec encoder_config_ GUARDED_BY(data_cs_);
+ int min_transmit_bitrate_kbps_ GUARDED_BY(data_cs_);
+ uint32_t last_observed_bitrate_bps_ GUARDED_BY(data_cs_);
+ int target_delay_ms_ GUARDED_BY(data_cs_);
+ bool network_is_transmitting_ GUARDED_BY(data_cs_);
+ bool encoder_paused_ GUARDED_BY(data_cs_);
+ bool encoder_paused_and_dropped_frame_ GUARDED_BY(data_cs_);
+ std::map<unsigned int, int64_t> time_last_intra_request_ms_
+ GUARDED_BY(data_cs_);
+
+ ProcessThread* module_process_thread_;
+
+ bool has_received_sli_ GUARDED_BY(data_cs_);
+ uint8_t picture_id_sli_ GUARDED_BY(data_cs_);
+ bool has_received_rpsi_ GUARDED_BY(data_cs_);
+ uint64_t picture_id_rpsi_ GUARDED_BY(data_cs_);
+ std::map<uint32_t, int> ssrc_streams_ GUARDED_BY(data_cs_);
+
+ bool video_suspended_ GUARDED_BY(data_cs_);
+};
+
+} // namespace webrtc
+
+#endif // WEBRTC_VIDEO_VIE_ENCODER_H_
diff --git a/webrtc/video/vie_receiver.cc b/webrtc/video/vie_receiver.cc
new file mode 100644
index 0000000000..4fb706c764
--- /dev/null
+++ b/webrtc/video/vie_receiver.cc
@@ -0,0 +1,483 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/video/vie_receiver.h"
+
+#include <vector>
+
+#include "webrtc/base/logging.h"
+#include "webrtc/modules/remote_bitrate_estimator/include/remote_bitrate_estimator.h"
+#include "webrtc/modules/rtp_rtcp/include/fec_receiver.h"
+#include "webrtc/modules/rtp_rtcp/include/receive_statistics.h"
+#include "webrtc/modules/rtp_rtcp/include/remote_ntp_time_estimator.h"
+#include "webrtc/modules/rtp_rtcp/include/rtp_cvo.h"
+#include "webrtc/modules/rtp_rtcp/include/rtp_header_parser.h"
+#include "webrtc/modules/rtp_rtcp/include/rtp_payload_registry.h"
+#include "webrtc/modules/rtp_rtcp/include/rtp_receiver.h"
+#include "webrtc/modules/rtp_rtcp/include/rtp_rtcp.h"
+#include "webrtc/modules/video_coding/include/video_coding.h"
+#include "webrtc/system_wrappers/include/critical_section_wrapper.h"
+#include "webrtc/system_wrappers/include/metrics.h"
+#include "webrtc/system_wrappers/include/tick_util.h"
+#include "webrtc/system_wrappers/include/timestamp_extrapolator.h"
+#include "webrtc/system_wrappers/include/trace.h"
+
+namespace webrtc {
+
+static const int kPacketLogIntervalMs = 10000;
+
+ViEReceiver::ViEReceiver(VideoCodingModule* module_vcm,
+ RemoteBitrateEstimator* remote_bitrate_estimator,
+ RtpFeedback* rtp_feedback)
+ : receive_cs_(CriticalSectionWrapper::CreateCriticalSection()),
+ clock_(Clock::GetRealTimeClock()),
+ rtp_header_parser_(RtpHeaderParser::Create()),
+ rtp_payload_registry_(
+ new RTPPayloadRegistry(RTPPayloadStrategy::CreateStrategy(false))),
+ rtp_receiver_(
+ RtpReceiver::CreateVideoReceiver(clock_,
+ this,
+ rtp_feedback,
+ rtp_payload_registry_.get())),
+ rtp_receive_statistics_(ReceiveStatistics::Create(clock_)),
+ fec_receiver_(FecReceiver::Create(this)),
+ rtp_rtcp_(NULL),
+ vcm_(module_vcm),
+ remote_bitrate_estimator_(remote_bitrate_estimator),
+ ntp_estimator_(new RemoteNtpTimeEstimator(clock_)),
+ receiving_(false),
+ restored_packet_in_use_(false),
+ receiving_ast_enabled_(false),
+ receiving_cvo_enabled_(false),
+ receiving_tsn_enabled_(false),
+ last_packet_log_ms_(-1) {
+ assert(remote_bitrate_estimator);
+}
+
+ViEReceiver::~ViEReceiver() {
+ UpdateHistograms();
+}
+
+void ViEReceiver::UpdateHistograms() {
+ FecPacketCounter counter = fec_receiver_->GetPacketCounter();
+ if (counter.num_packets > 0) {
+ RTC_HISTOGRAM_PERCENTAGE_SPARSE(
+ "WebRTC.Video.ReceivedFecPacketsInPercent",
+ static_cast<int>(counter.num_fec_packets * 100 / counter.num_packets));
+ }
+ if (counter.num_fec_packets > 0) {
+ RTC_HISTOGRAM_PERCENTAGE_SPARSE(
+ "WebRTC.Video.RecoveredMediaPacketsInPercentOfFec",
+ static_cast<int>(counter.num_recovered_packets * 100 /
+ counter.num_fec_packets));
+ }
+}
+
+bool ViEReceiver::SetReceiveCodec(const VideoCodec& video_codec) {
+ int8_t old_pltype = -1;
+ if (rtp_payload_registry_->ReceivePayloadType(video_codec.plName,
+ kVideoPayloadTypeFrequency,
+ 0,
+ video_codec.maxBitrate,
+ &old_pltype) != -1) {
+ rtp_payload_registry_->DeRegisterReceivePayload(old_pltype);
+ }
+
+ return RegisterPayload(video_codec);
+}
+
+bool ViEReceiver::RegisterPayload(const VideoCodec& video_codec) {
+ return rtp_receiver_->RegisterReceivePayload(video_codec.plName,
+ video_codec.plType,
+ kVideoPayloadTypeFrequency,
+ 0,
+ video_codec.maxBitrate) == 0;
+}
+
+void ViEReceiver::SetNackStatus(bool enable,
+ int max_nack_reordering_threshold) {
+ if (!enable) {
+ // Reset the threshold back to the lower default threshold when NACK is
+ // disabled since we no longer will be receiving retransmissions.
+ max_nack_reordering_threshold = kDefaultMaxReorderingThreshold;
+ }
+ rtp_receive_statistics_->SetMaxReorderingThreshold(
+ max_nack_reordering_threshold);
+ rtp_receiver_->SetNACKStatus(enable ? kNackRtcp : kNackOff);
+}
+
+void ViEReceiver::SetRtxPayloadType(int payload_type,
+ int associated_payload_type) {
+ rtp_payload_registry_->SetRtxPayloadType(payload_type,
+ associated_payload_type);
+}
+
+void ViEReceiver::SetUseRtxPayloadMappingOnRestore(bool val) {
+ rtp_payload_registry_->set_use_rtx_payload_mapping_on_restore(val);
+}
+
+void ViEReceiver::SetRtxSsrc(uint32_t ssrc) {
+ rtp_payload_registry_->SetRtxSsrc(ssrc);
+}
+
+bool ViEReceiver::GetRtxSsrc(uint32_t* ssrc) const {
+ return rtp_payload_registry_->GetRtxSsrc(ssrc);
+}
+
+bool ViEReceiver::IsFecEnabled() const {
+ return rtp_payload_registry_->ulpfec_payload_type() > -1;
+}
+
+uint32_t ViEReceiver::GetRemoteSsrc() const {
+ return rtp_receiver_->SSRC();
+}
+
+int ViEReceiver::GetCsrcs(uint32_t* csrcs) const {
+ return rtp_receiver_->CSRCs(csrcs);
+}
+
+void ViEReceiver::SetRtpRtcpModule(RtpRtcp* module) {
+ rtp_rtcp_ = module;
+}
+
+RtpReceiver* ViEReceiver::GetRtpReceiver() const {
+ return rtp_receiver_.get();
+}
+
+void ViEReceiver::RegisterRtpRtcpModules(
+ const std::vector<RtpRtcp*>& rtp_modules) {
+ CriticalSectionScoped cs(receive_cs_.get());
+ // Only change the "simulcast" modules, the base module can be accessed
+ // without a lock whereas the simulcast modules require locking as they can be
+ // changed in runtime.
+ rtp_rtcp_simulcast_ =
+ std::vector<RtpRtcp*>(rtp_modules.begin() + 1, rtp_modules.end());
+}
+
+bool ViEReceiver::SetReceiveTimestampOffsetStatus(bool enable, int id) {
+ if (enable) {
+ return rtp_header_parser_->RegisterRtpHeaderExtension(
+ kRtpExtensionTransmissionTimeOffset, id);
+ } else {
+ return rtp_header_parser_->DeregisterRtpHeaderExtension(
+ kRtpExtensionTransmissionTimeOffset);
+ }
+}
+
+bool ViEReceiver::SetReceiveAbsoluteSendTimeStatus(bool enable, int id) {
+ if (enable) {
+ if (rtp_header_parser_->RegisterRtpHeaderExtension(
+ kRtpExtensionAbsoluteSendTime, id)) {
+ receiving_ast_enabled_ = true;
+ return true;
+ } else {
+ return false;
+ }
+ } else {
+ receiving_ast_enabled_ = false;
+ return rtp_header_parser_->DeregisterRtpHeaderExtension(
+ kRtpExtensionAbsoluteSendTime);
+ }
+}
+
+bool ViEReceiver::SetReceiveVideoRotationStatus(bool enable, int id) {
+ if (enable) {
+ if (rtp_header_parser_->RegisterRtpHeaderExtension(
+ kRtpExtensionVideoRotation, id)) {
+ receiving_cvo_enabled_ = true;
+ return true;
+ } else {
+ return false;
+ }
+ } else {
+ receiving_cvo_enabled_ = false;
+ return rtp_header_parser_->DeregisterRtpHeaderExtension(
+ kRtpExtensionVideoRotation);
+ }
+}
+
+bool ViEReceiver::SetReceiveTransportSequenceNumber(bool enable, int id) {
+ if (enable) {
+ if (rtp_header_parser_->RegisterRtpHeaderExtension(
+ kRtpExtensionTransportSequenceNumber, id)) {
+ receiving_tsn_enabled_ = true;
+ return true;
+ } else {
+ return false;
+ }
+ } else {
+ receiving_tsn_enabled_ = false;
+ return rtp_header_parser_->DeregisterRtpHeaderExtension(
+ kRtpExtensionTransportSequenceNumber);
+ }
+}
+
+int ViEReceiver::ReceivedRTPPacket(const void* rtp_packet,
+ size_t rtp_packet_length,
+ const PacketTime& packet_time) {
+ return InsertRTPPacket(static_cast<const uint8_t*>(rtp_packet),
+ rtp_packet_length, packet_time);
+}
+
+int ViEReceiver::ReceivedRTCPPacket(const void* rtcp_packet,
+ size_t rtcp_packet_length) {
+ return InsertRTCPPacket(static_cast<const uint8_t*>(rtcp_packet),
+ rtcp_packet_length);
+}
+
+int32_t ViEReceiver::OnReceivedPayloadData(const uint8_t* payload_data,
+ const size_t payload_size,
+ const WebRtcRTPHeader* rtp_header) {
+ WebRtcRTPHeader rtp_header_with_ntp = *rtp_header;
+ rtp_header_with_ntp.ntp_time_ms =
+ ntp_estimator_->Estimate(rtp_header->header.timestamp);
+ if (vcm_->IncomingPacket(payload_data,
+ payload_size,
+ rtp_header_with_ntp) != 0) {
+ // Check this...
+ return -1;
+ }
+ return 0;
+}
+
+bool ViEReceiver::OnRecoveredPacket(const uint8_t* rtp_packet,
+ size_t rtp_packet_length) {
+ RTPHeader header;
+ if (!rtp_header_parser_->Parse(rtp_packet, rtp_packet_length, &header)) {
+ return false;
+ }
+ header.payload_type_frequency = kVideoPayloadTypeFrequency;
+ bool in_order = IsPacketInOrder(header);
+ return ReceivePacket(rtp_packet, rtp_packet_length, header, in_order);
+}
+
+int ViEReceiver::InsertRTPPacket(const uint8_t* rtp_packet,
+ size_t rtp_packet_length,
+ const PacketTime& packet_time) {
+ {
+ CriticalSectionScoped cs(receive_cs_.get());
+ if (!receiving_) {
+ return -1;
+ }
+ }
+
+ RTPHeader header;
+ if (!rtp_header_parser_->Parse(rtp_packet, rtp_packet_length,
+ &header)) {
+ return -1;
+ }
+ size_t payload_length = rtp_packet_length - header.headerLength;
+ int64_t arrival_time_ms;
+ int64_t now_ms = clock_->TimeInMilliseconds();
+ if (packet_time.timestamp != -1)
+ arrival_time_ms = (packet_time.timestamp + 500) / 1000;
+ else
+ arrival_time_ms = now_ms;
+
+ {
+ // Periodically log the RTP header of incoming packets.
+ CriticalSectionScoped cs(receive_cs_.get());
+ if (now_ms - last_packet_log_ms_ > kPacketLogIntervalMs) {
+ std::stringstream ss;
+ ss << "Packet received on SSRC: " << header.ssrc << " with payload type: "
+ << static_cast<int>(header.payloadType) << ", timestamp: "
+ << header.timestamp << ", sequence number: " << header.sequenceNumber
+ << ", arrival time: " << arrival_time_ms;
+ if (header.extension.hasTransmissionTimeOffset)
+ ss << ", toffset: " << header.extension.transmissionTimeOffset;
+ if (header.extension.hasAbsoluteSendTime)
+ ss << ", abs send time: " << header.extension.absoluteSendTime;
+ LOG(LS_INFO) << ss.str();
+ last_packet_log_ms_ = now_ms;
+ }
+ }
+
+ remote_bitrate_estimator_->IncomingPacket(arrival_time_ms, payload_length,
+ header, true);
+ header.payload_type_frequency = kVideoPayloadTypeFrequency;
+
+ bool in_order = IsPacketInOrder(header);
+ rtp_payload_registry_->SetIncomingPayloadType(header);
+ int ret = ReceivePacket(rtp_packet, rtp_packet_length, header, in_order)
+ ? 0
+ : -1;
+ // Update receive statistics after ReceivePacket.
+ // Receive statistics will be reset if the payload type changes (make sure
+ // that the first packet is included in the stats).
+ rtp_receive_statistics_->IncomingPacket(
+ header, rtp_packet_length, IsPacketRetransmitted(header, in_order));
+ return ret;
+}
+
+bool ViEReceiver::ReceivePacket(const uint8_t* packet,
+ size_t packet_length,
+ const RTPHeader& header,
+ bool in_order) {
+ if (rtp_payload_registry_->IsEncapsulated(header)) {
+ return ParseAndHandleEncapsulatingHeader(packet, packet_length, header);
+ }
+ const uint8_t* payload = packet + header.headerLength;
+ assert(packet_length >= header.headerLength);
+ size_t payload_length = packet_length - header.headerLength;
+ PayloadUnion payload_specific;
+ if (!rtp_payload_registry_->GetPayloadSpecifics(header.payloadType,
+ &payload_specific)) {
+ return false;
+ }
+ return rtp_receiver_->IncomingRtpPacket(header, payload, payload_length,
+ payload_specific, in_order);
+}
+
+bool ViEReceiver::ParseAndHandleEncapsulatingHeader(const uint8_t* packet,
+ size_t packet_length,
+ const RTPHeader& header) {
+ if (rtp_payload_registry_->IsRed(header)) {
+ int8_t ulpfec_pt = rtp_payload_registry_->ulpfec_payload_type();
+ if (packet[header.headerLength] == ulpfec_pt) {
+ rtp_receive_statistics_->FecPacketReceived(header, packet_length);
+ // Notify vcm about received FEC packets to avoid NACKing these packets.
+ NotifyReceiverOfFecPacket(header);
+ }
+ if (fec_receiver_->AddReceivedRedPacket(
+ header, packet, packet_length, ulpfec_pt) != 0) {
+ return false;
+ }
+ return fec_receiver_->ProcessReceivedFec() == 0;
+ } else if (rtp_payload_registry_->IsRtx(header)) {
+ if (header.headerLength + header.paddingLength == packet_length) {
+ // This is an empty packet and should be silently dropped before trying to
+ // parse the RTX header.
+ return true;
+ }
+ // Remove the RTX header and parse the original RTP header.
+ if (packet_length < header.headerLength)
+ return false;
+ if (packet_length > sizeof(restored_packet_))
+ return false;
+ CriticalSectionScoped cs(receive_cs_.get());
+ if (restored_packet_in_use_) {
+ LOG(LS_WARNING) << "Multiple RTX headers detected, dropping packet.";
+ return false;
+ }
+ if (!rtp_payload_registry_->RestoreOriginalPacket(
+ restored_packet_, packet, &packet_length, rtp_receiver_->SSRC(),
+ header)) {
+ LOG(LS_WARNING) << "Incoming RTX packet: Invalid RTP header";
+ return false;
+ }
+ restored_packet_in_use_ = true;
+ bool ret = OnRecoveredPacket(restored_packet_, packet_length);
+ restored_packet_in_use_ = false;
+ return ret;
+ }
+ return false;
+}
+
+void ViEReceiver::NotifyReceiverOfFecPacket(const RTPHeader& header) {
+ int8_t last_media_payload_type =
+ rtp_payload_registry_->last_received_media_payload_type();
+ if (last_media_payload_type < 0) {
+ LOG(LS_WARNING) << "Failed to get last media payload type.";
+ return;
+ }
+ // Fake an empty media packet.
+ WebRtcRTPHeader rtp_header = {};
+ rtp_header.header = header;
+ rtp_header.header.payloadType = last_media_payload_type;
+ rtp_header.header.paddingLength = 0;
+ PayloadUnion payload_specific;
+ if (!rtp_payload_registry_->GetPayloadSpecifics(last_media_payload_type,
+ &payload_specific)) {
+ LOG(LS_WARNING) << "Failed to get payload specifics.";
+ return;
+ }
+ rtp_header.type.Video.codec = payload_specific.Video.videoCodecType;
+ rtp_header.type.Video.rotation = kVideoRotation_0;
+ if (header.extension.hasVideoRotation) {
+ rtp_header.type.Video.rotation =
+ ConvertCVOByteToVideoRotation(header.extension.videoRotation);
+ }
+ OnReceivedPayloadData(NULL, 0, &rtp_header);
+}
+
+int ViEReceiver::InsertRTCPPacket(const uint8_t* rtcp_packet,
+ size_t rtcp_packet_length) {
+ {
+ CriticalSectionScoped cs(receive_cs_.get());
+ if (!receiving_) {
+ return -1;
+ }
+
+ for (RtpRtcp* rtp_rtcp : rtp_rtcp_simulcast_)
+ rtp_rtcp->IncomingRtcpPacket(rtcp_packet, rtcp_packet_length);
+ }
+ assert(rtp_rtcp_); // Should be set by owner at construction time.
+ int ret = rtp_rtcp_->IncomingRtcpPacket(rtcp_packet, rtcp_packet_length);
+ if (ret != 0) {
+ return ret;
+ }
+
+ int64_t rtt = 0;
+ rtp_rtcp_->RTT(rtp_receiver_->SSRC(), &rtt, NULL, NULL, NULL);
+ if (rtt == 0) {
+ // Waiting for valid rtt.
+ return 0;
+ }
+ uint32_t ntp_secs = 0;
+ uint32_t ntp_frac = 0;
+ uint32_t rtp_timestamp = 0;
+ if (0 != rtp_rtcp_->RemoteNTP(&ntp_secs, &ntp_frac, NULL, NULL,
+ &rtp_timestamp)) {
+ // Waiting for RTCP.
+ return 0;
+ }
+ ntp_estimator_->UpdateRtcpTimestamp(rtt, ntp_secs, ntp_frac, rtp_timestamp);
+
+ return 0;
+}
+
+void ViEReceiver::StartReceive() {
+ CriticalSectionScoped cs(receive_cs_.get());
+ receiving_ = true;
+}
+
+void ViEReceiver::StopReceive() {
+ CriticalSectionScoped cs(receive_cs_.get());
+ receiving_ = false;
+}
+
+ReceiveStatistics* ViEReceiver::GetReceiveStatistics() const {
+ return rtp_receive_statistics_.get();
+}
+
+bool ViEReceiver::IsPacketInOrder(const RTPHeader& header) const {
+ StreamStatistician* statistician =
+ rtp_receive_statistics_->GetStatistician(header.ssrc);
+ if (!statistician)
+ return false;
+ return statistician->IsPacketInOrder(header.sequenceNumber);
+}
+
+bool ViEReceiver::IsPacketRetransmitted(const RTPHeader& header,
+ bool in_order) const {
+ // Retransmissions are handled separately if RTX is enabled.
+ if (rtp_payload_registry_->RtxEnabled())
+ return false;
+ StreamStatistician* statistician =
+ rtp_receive_statistics_->GetStatistician(header.ssrc);
+ if (!statistician)
+ return false;
+ // Check if this is a retransmission.
+ int64_t min_rtt = 0;
+ rtp_rtcp_->RTT(rtp_receiver_->SSRC(), NULL, NULL, &min_rtt, NULL);
+ return !in_order &&
+ statistician->IsRetransmitOfOldPacket(header, min_rtt);
+}
+} // namespace webrtc
diff --git a/webrtc/video/vie_receiver.h b/webrtc/video/vie_receiver.h
new file mode 100644
index 0000000000..8204888bbb
--- /dev/null
+++ b/webrtc/video/vie_receiver.h
@@ -0,0 +1,132 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_VIDEO_VIE_RECEIVER_H_
+#define WEBRTC_VIDEO_VIE_RECEIVER_H_
+
+#include <list>
+#include <vector>
+
+#include "webrtc/base/scoped_ptr.h"
+#include "webrtc/engine_configurations.h"
+#include "webrtc/modules/rtp_rtcp/include/receive_statistics.h"
+#include "webrtc/modules/rtp_rtcp/include/rtp_rtcp_defines.h"
+#include "webrtc/typedefs.h"
+
+namespace webrtc {
+
+class CriticalSectionWrapper;
+class FecReceiver;
+class RemoteNtpTimeEstimator;
+class ReceiveStatistics;
+class RemoteBitrateEstimator;
+class RtpHeaderParser;
+class RTPPayloadRegistry;
+class RtpReceiver;
+class RtpRtcp;
+class VideoCodingModule;
+struct ReceiveBandwidthEstimatorStats;
+
+class ViEReceiver : public RtpData {
+ public:
+ ViEReceiver(VideoCodingModule* module_vcm,
+ RemoteBitrateEstimator* remote_bitrate_estimator,
+ RtpFeedback* rtp_feedback);
+ ~ViEReceiver();
+
+ bool SetReceiveCodec(const VideoCodec& video_codec);
+ bool RegisterPayload(const VideoCodec& video_codec);
+
+ void SetNackStatus(bool enable, int max_nack_reordering_threshold);
+ void SetRtxPayloadType(int payload_type, int associated_payload_type);
+ // If set to true, the RTX payload type mapping supplied in
+ // |SetRtxPayloadType| will be used when restoring RTX packets. Without it,
+ // RTX packets will always be restored to the last non-RTX packet payload type
+ // received.
+ void SetUseRtxPayloadMappingOnRestore(bool val);
+ void SetRtxSsrc(uint32_t ssrc);
+ bool GetRtxSsrc(uint32_t* ssrc) const;
+
+ bool IsFecEnabled() const;
+
+ uint32_t GetRemoteSsrc() const;
+ int GetCsrcs(uint32_t* csrcs) const;
+
+ void SetRtpRtcpModule(RtpRtcp* module);
+
+ RtpReceiver* GetRtpReceiver() const;
+
+ void RegisterRtpRtcpModules(const std::vector<RtpRtcp*>& rtp_modules);
+
+ bool SetReceiveTimestampOffsetStatus(bool enable, int id);
+ bool SetReceiveAbsoluteSendTimeStatus(bool enable, int id);
+ bool SetReceiveVideoRotationStatus(bool enable, int id);
+ bool SetReceiveTransportSequenceNumber(bool enable, int id);
+
+ void StartReceive();
+ void StopReceive();
+
+ // Receives packets from external transport.
+ int ReceivedRTPPacket(const void* rtp_packet, size_t rtp_packet_length,
+ const PacketTime& packet_time);
+ int ReceivedRTCPPacket(const void* rtcp_packet, size_t rtcp_packet_length);
+
+ // Implements RtpData.
+ int32_t OnReceivedPayloadData(const uint8_t* payload_data,
+ const size_t payload_size,
+ const WebRtcRTPHeader* rtp_header) override;
+ bool OnRecoveredPacket(const uint8_t* packet, size_t packet_length) override;
+
+ ReceiveStatistics* GetReceiveStatistics() const;
+
+ private:
+ int InsertRTPPacket(const uint8_t* rtp_packet, size_t rtp_packet_length,
+ const PacketTime& packet_time);
+ bool ReceivePacket(const uint8_t* packet,
+ size_t packet_length,
+ const RTPHeader& header,
+ bool in_order);
+ // Parses and handles for instance RTX and RED headers.
+ // This function assumes that it's being called from only one thread.
+ bool ParseAndHandleEncapsulatingHeader(const uint8_t* packet,
+ size_t packet_length,
+ const RTPHeader& header);
+ void NotifyReceiverOfFecPacket(const RTPHeader& header);
+ int InsertRTCPPacket(const uint8_t* rtcp_packet, size_t rtcp_packet_length);
+ bool IsPacketInOrder(const RTPHeader& header) const;
+ bool IsPacketRetransmitted(const RTPHeader& header, bool in_order) const;
+ void UpdateHistograms();
+
+ rtc::scoped_ptr<CriticalSectionWrapper> receive_cs_;
+ Clock* clock_;
+ rtc::scoped_ptr<RtpHeaderParser> rtp_header_parser_;
+ rtc::scoped_ptr<RTPPayloadRegistry> rtp_payload_registry_;
+ rtc::scoped_ptr<RtpReceiver> rtp_receiver_;
+ const rtc::scoped_ptr<ReceiveStatistics> rtp_receive_statistics_;
+ rtc::scoped_ptr<FecReceiver> fec_receiver_;
+ RtpRtcp* rtp_rtcp_;
+ std::vector<RtpRtcp*> rtp_rtcp_simulcast_;
+ VideoCodingModule* vcm_;
+ RemoteBitrateEstimator* remote_bitrate_estimator_;
+
+ rtc::scoped_ptr<RemoteNtpTimeEstimator> ntp_estimator_;
+
+ bool receiving_;
+ uint8_t restored_packet_[IP_PACKET_SIZE];
+ bool restored_packet_in_use_;
+ bool receiving_ast_enabled_;
+ bool receiving_cvo_enabled_;
+ bool receiving_tsn_enabled_;
+ int64_t last_packet_log_ms_;
+};
+
+} // namespace webrtc
+
+#endif // WEBRTC_VIDEO_VIE_RECEIVER_H_
diff --git a/webrtc/video/vie_remb.cc b/webrtc/video/vie_remb.cc
new file mode 100644
index 0000000000..95c2f1e130
--- /dev/null
+++ b/webrtc/video/vie_remb.cc
@@ -0,0 +1,144 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/video/vie_remb.h"
+
+#include <assert.h>
+
+#include <algorithm>
+
+#include "webrtc/modules/rtp_rtcp/include/rtp_rtcp.h"
+#include "webrtc/modules/utility/include/process_thread.h"
+#include "webrtc/system_wrappers/include/critical_section_wrapper.h"
+#include "webrtc/system_wrappers/include/tick_util.h"
+#include "webrtc/system_wrappers/include/trace.h"
+
+namespace webrtc {
+
+const int kRembSendIntervalMs = 200;
+
+// % threshold for if we should send a new REMB asap.
+const unsigned int kSendThresholdPercent = 97;
+
+VieRemb::VieRemb(Clock* clock)
+ : clock_(clock),
+ list_crit_(CriticalSectionWrapper::CreateCriticalSection()),
+ last_remb_time_(clock_->TimeInMilliseconds()),
+ last_send_bitrate_(0),
+ bitrate_(0) {}
+
+VieRemb::~VieRemb() {}
+
+void VieRemb::AddReceiveChannel(RtpRtcp* rtp_rtcp) {
+ assert(rtp_rtcp);
+
+ CriticalSectionScoped cs(list_crit_.get());
+ if (std::find(receive_modules_.begin(), receive_modules_.end(), rtp_rtcp) !=
+ receive_modules_.end())
+ return;
+
+ // The module probably doesn't have a remote SSRC yet, so don't add it to the
+ // map.
+ receive_modules_.push_back(rtp_rtcp);
+}
+
+void VieRemb::RemoveReceiveChannel(RtpRtcp* rtp_rtcp) {
+ assert(rtp_rtcp);
+
+ CriticalSectionScoped cs(list_crit_.get());
+ for (RtpModules::iterator it = receive_modules_.begin();
+ it != receive_modules_.end(); ++it) {
+ if ((*it) == rtp_rtcp) {
+ receive_modules_.erase(it);
+ break;
+ }
+ }
+}
+
+void VieRemb::AddRembSender(RtpRtcp* rtp_rtcp) {
+ assert(rtp_rtcp);
+
+ CriticalSectionScoped cs(list_crit_.get());
+
+ // Verify this module hasn't been added earlier.
+ if (std::find(rtcp_sender_.begin(), rtcp_sender_.end(), rtp_rtcp) !=
+ rtcp_sender_.end())
+ return;
+ rtcp_sender_.push_back(rtp_rtcp);
+}
+
+void VieRemb::RemoveRembSender(RtpRtcp* rtp_rtcp) {
+ assert(rtp_rtcp);
+
+ CriticalSectionScoped cs(list_crit_.get());
+ for (RtpModules::iterator it = rtcp_sender_.begin();
+ it != rtcp_sender_.end(); ++it) {
+ if ((*it) == rtp_rtcp) {
+ rtcp_sender_.erase(it);
+ return;
+ }
+ }
+}
+
+bool VieRemb::InUse() const {
+ CriticalSectionScoped cs(list_crit_.get());
+ if (receive_modules_.empty() && rtcp_sender_.empty())
+ return false;
+ else
+ return true;
+}
+
+void VieRemb::OnReceiveBitrateChanged(const std::vector<unsigned int>& ssrcs,
+ unsigned int bitrate) {
+ list_crit_->Enter();
+ // If we already have an estimate, check if the new total estimate is below
+ // kSendThresholdPercent of the previous estimate.
+ if (last_send_bitrate_ > 0) {
+ unsigned int new_remb_bitrate = last_send_bitrate_ - bitrate_ + bitrate;
+
+ if (new_remb_bitrate < kSendThresholdPercent * last_send_bitrate_ / 100) {
+ // The new bitrate estimate is less than kSendThresholdPercent % of the
+ // last report. Send a REMB asap.
+ last_remb_time_ = clock_->TimeInMilliseconds() - kRembSendIntervalMs;
+ }
+ }
+ bitrate_ = bitrate;
+
+ // Calculate total receive bitrate estimate.
+ int64_t now = clock_->TimeInMilliseconds();
+
+ if (now - last_remb_time_ < kRembSendIntervalMs) {
+ list_crit_->Leave();
+ return;
+ }
+ last_remb_time_ = now;
+
+ if (ssrcs.empty() || receive_modules_.empty()) {
+ list_crit_->Leave();
+ return;
+ }
+
+ // Send a REMB packet.
+ RtpRtcp* sender = NULL;
+ if (!rtcp_sender_.empty()) {
+ sender = rtcp_sender_.front();
+ } else {
+ sender = receive_modules_.front();
+ }
+ last_send_bitrate_ = bitrate_;
+
+ list_crit_->Leave();
+
+ if (sender) {
+ sender->SetREMBData(bitrate_, ssrcs);
+ }
+}
+
+} // namespace webrtc
diff --git a/webrtc/video/vie_remb.h b/webrtc/video/vie_remb.h
new file mode 100644
index 0000000000..2a3d916d6c
--- /dev/null
+++ b/webrtc/video/vie_remb.h
@@ -0,0 +1,79 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_VIDEO_VIE_REMB_H_
+#define WEBRTC_VIDEO_VIE_REMB_H_
+
+#include <list>
+#include <utility>
+#include <vector>
+
+#include "webrtc/base/scoped_ptr.h"
+#include "webrtc/modules/include/module.h"
+#include "webrtc/modules/remote_bitrate_estimator/include/remote_bitrate_estimator.h"
+#include "webrtc/modules/rtp_rtcp/include/rtp_rtcp_defines.h"
+
+namespace webrtc {
+
+class CriticalSectionWrapper;
+class ProcessThread;
+class RtpRtcp;
+
+class VieRemb : public RemoteBitrateObserver {
+ public:
+ explicit VieRemb(Clock* clock);
+ ~VieRemb();
+
+ // Called to add a receive channel to include in the REMB packet.
+ void AddReceiveChannel(RtpRtcp* rtp_rtcp);
+
+ // Removes the specified channel from REMB estimate.
+ void RemoveReceiveChannel(RtpRtcp* rtp_rtcp);
+
+ // Called to add a module that can generate and send REMB RTCP.
+ void AddRembSender(RtpRtcp* rtp_rtcp);
+
+ // Removes a REMB RTCP sender.
+ void RemoveRembSender(RtpRtcp* rtp_rtcp);
+
+ // Returns true if the instance is in use, false otherwise.
+ bool InUse() const;
+
+ // Called every time there is a new bitrate estimate for a receive channel
+ // group. This call will trigger a new RTCP REMB packet if the bitrate
+ // estimate has decreased or if no RTCP REMB packet has been sent for
+ // a certain time interval.
+ // Implements RtpReceiveBitrateUpdate.
+ virtual void OnReceiveBitrateChanged(const std::vector<unsigned int>& ssrcs,
+ unsigned int bitrate);
+
+ private:
+ typedef std::list<RtpRtcp*> RtpModules;
+
+ Clock* const clock_;
+ rtc::scoped_ptr<CriticalSectionWrapper> list_crit_;
+
+ // The last time a REMB was sent.
+ int64_t last_remb_time_;
+ unsigned int last_send_bitrate_;
+
+ // All RtpRtcp modules to include in the REMB packet.
+ RtpModules receive_modules_;
+
+ // All modules that can send REMB RTCP.
+ RtpModules rtcp_sender_;
+
+ // The last bitrate update.
+ unsigned int bitrate_;
+};
+
+} // namespace webrtc
+
+#endif // WEBRTC_VIDEO_VIE_REMB_H_
diff --git a/webrtc/video/vie_remb_unittest.cc b/webrtc/video/vie_remb_unittest.cc
new file mode 100644
index 0000000000..a44d593b22
--- /dev/null
+++ b/webrtc/video/vie_remb_unittest.cc
@@ -0,0 +1,253 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+
+// This file includes unit tests for ViERemb.
+
+#include <vector>
+
+#include "testing/gmock/include/gmock/gmock.h"
+#include "testing/gtest/include/gtest/gtest.h"
+#include "webrtc/base/scoped_ptr.h"
+#include "webrtc/modules/rtp_rtcp/include/rtp_rtcp.h"
+#include "webrtc/modules/rtp_rtcp/mocks/mock_rtp_rtcp.h"
+#include "webrtc/modules/utility/include/mock/mock_process_thread.h"
+#include "webrtc/system_wrappers/include/tick_util.h"
+#include "webrtc/video/vie_remb.h"
+
+using ::testing::_;
+using ::testing::AnyNumber;
+using ::testing::NiceMock;
+using ::testing::Return;
+
+namespace webrtc {
+
+class ViERembTest : public ::testing::Test {
+ public:
+ ViERembTest() : fake_clock_(12345) {}
+
+ protected:
+ virtual void SetUp() {
+ process_thread_.reset(new NiceMock<MockProcessThread>);
+ vie_remb_.reset(new VieRemb(&fake_clock_));
+ }
+ SimulatedClock fake_clock_;
+ rtc::scoped_ptr<MockProcessThread> process_thread_;
+ rtc::scoped_ptr<VieRemb> vie_remb_;
+};
+
+TEST_F(ViERembTest, OneModuleTestForSendingRemb) {
+ MockRtpRtcp rtp;
+ vie_remb_->AddReceiveChannel(&rtp);
+ vie_remb_->AddRembSender(&rtp);
+
+ const unsigned int bitrate_estimate = 456;
+ unsigned int ssrc = 1234;
+ std::vector<unsigned int> ssrcs(&ssrc, &ssrc + 1);
+
+ vie_remb_->OnReceiveBitrateChanged(ssrcs, bitrate_estimate);
+
+ fake_clock_.AdvanceTimeMilliseconds(1000);
+ EXPECT_CALL(rtp, SetREMBData(bitrate_estimate, ssrcs))
+ .Times(1);
+ vie_remb_->OnReceiveBitrateChanged(ssrcs, bitrate_estimate);
+
+ // Lower bitrate to send another REMB packet.
+ EXPECT_CALL(rtp, SetREMBData(bitrate_estimate - 100, ssrcs))
+ .Times(1);
+ vie_remb_->OnReceiveBitrateChanged(ssrcs, bitrate_estimate - 100);
+
+ vie_remb_->RemoveReceiveChannel(&rtp);
+ vie_remb_->RemoveRembSender(&rtp);
+}
+
+TEST_F(ViERembTest, LowerEstimateToSendRemb) {
+ MockRtpRtcp rtp;
+ vie_remb_->AddReceiveChannel(&rtp);
+ vie_remb_->AddRembSender(&rtp);
+
+ unsigned int bitrate_estimate = 456;
+ unsigned int ssrc = 1234;
+ std::vector<unsigned int> ssrcs(&ssrc, &ssrc + 1);
+
+ vie_remb_->OnReceiveBitrateChanged(ssrcs, bitrate_estimate);
+ // Call OnReceiveBitrateChanged twice to get a first estimate.
+ fake_clock_.AdvanceTimeMilliseconds(1000);
+ EXPECT_CALL(rtp, SetREMBData(bitrate_estimate, ssrcs))
+ .Times(1);
+ vie_remb_->OnReceiveBitrateChanged(ssrcs, bitrate_estimate);
+
+ // Lower the estimate with more than 3% to trigger a call to SetREMBData right
+ // away.
+ bitrate_estimate = bitrate_estimate - 100;
+ EXPECT_CALL(rtp, SetREMBData(bitrate_estimate, ssrcs))
+ .Times(1);
+ vie_remb_->OnReceiveBitrateChanged(ssrcs, bitrate_estimate);
+}
+
+TEST_F(ViERembTest, VerifyIncreasingAndDecreasing) {
+ MockRtpRtcp rtp_0;
+ MockRtpRtcp rtp_1;
+ vie_remb_->AddReceiveChannel(&rtp_0);
+ vie_remb_->AddRembSender(&rtp_0);
+ vie_remb_->AddReceiveChannel(&rtp_1);
+
+ unsigned int bitrate_estimate[] = { 456, 789 };
+ unsigned int ssrc[] = { 1234, 5678 };
+ std::vector<unsigned int> ssrcs(ssrc, ssrc + sizeof(ssrc) / sizeof(ssrc[0]));
+
+ vie_remb_->OnReceiveBitrateChanged(ssrcs, bitrate_estimate[0]);
+
+ // Call OnReceiveBitrateChanged twice to get a first estimate.
+ EXPECT_CALL(rtp_0, SetREMBData(bitrate_estimate[0], ssrcs))
+ .Times(1);
+ fake_clock_.AdvanceTimeMilliseconds(1000);
+ vie_remb_->OnReceiveBitrateChanged(ssrcs, bitrate_estimate[0]);
+
+ vie_remb_->OnReceiveBitrateChanged(ssrcs, bitrate_estimate[1] + 100);
+
+ // Lower the estimate to trigger a callback.
+ EXPECT_CALL(rtp_0, SetREMBData(bitrate_estimate[1], ssrcs))
+ .Times(1);
+ vie_remb_->OnReceiveBitrateChanged(ssrcs, bitrate_estimate[1]);
+
+ vie_remb_->RemoveReceiveChannel(&rtp_0);
+ vie_remb_->RemoveRembSender(&rtp_0);
+ vie_remb_->RemoveReceiveChannel(&rtp_1);
+}
+
+TEST_F(ViERembTest, NoRembForIncreasedBitrate) {
+ MockRtpRtcp rtp_0;
+ MockRtpRtcp rtp_1;
+ vie_remb_->AddReceiveChannel(&rtp_0);
+ vie_remb_->AddRembSender(&rtp_0);
+ vie_remb_->AddReceiveChannel(&rtp_1);
+
+ unsigned int bitrate_estimate = 456;
+ unsigned int ssrc[] = { 1234, 5678 };
+ std::vector<unsigned int> ssrcs(ssrc, ssrc + sizeof(ssrc) / sizeof(ssrc[0]));
+
+ vie_remb_->OnReceiveBitrateChanged(ssrcs, bitrate_estimate);
+ // Call OnReceiveBitrateChanged twice to get a first estimate.
+ fake_clock_.AdvanceTimeMilliseconds(1000);
+ EXPECT_CALL(rtp_0, SetREMBData(bitrate_estimate, ssrcs))
+ .Times(1);
+ vie_remb_->OnReceiveBitrateChanged(ssrcs, bitrate_estimate);
+
+ // Increased estimate shouldn't trigger a callback right away.
+ EXPECT_CALL(rtp_0, SetREMBData(_, _))
+ .Times(0);
+ vie_remb_->OnReceiveBitrateChanged(ssrcs, bitrate_estimate + 1);
+
+ // Decreasing the estimate less than 3% shouldn't trigger a new callback.
+ EXPECT_CALL(rtp_0, SetREMBData(_, _))
+ .Times(0);
+ int lower_estimate = bitrate_estimate * 98 / 100;
+ vie_remb_->OnReceiveBitrateChanged(ssrcs, lower_estimate);
+
+ vie_remb_->RemoveReceiveChannel(&rtp_1);
+ vie_remb_->RemoveReceiveChannel(&rtp_0);
+ vie_remb_->RemoveRembSender(&rtp_0);
+}
+
+TEST_F(ViERembTest, ChangeSendRtpModule) {
+ MockRtpRtcp rtp_0;
+ MockRtpRtcp rtp_1;
+ vie_remb_->AddReceiveChannel(&rtp_0);
+ vie_remb_->AddRembSender(&rtp_0);
+ vie_remb_->AddReceiveChannel(&rtp_1);
+
+ unsigned int bitrate_estimate = 456;
+ unsigned int ssrc[] = { 1234, 5678 };
+ std::vector<unsigned int> ssrcs(ssrc, ssrc + sizeof(ssrc) / sizeof(ssrc[0]));
+
+ vie_remb_->OnReceiveBitrateChanged(ssrcs, bitrate_estimate);
+ // Call OnReceiveBitrateChanged twice to get a first estimate.
+ fake_clock_.AdvanceTimeMilliseconds(1000);
+ EXPECT_CALL(rtp_0, SetREMBData(bitrate_estimate, ssrcs))
+ .Times(1);
+ vie_remb_->OnReceiveBitrateChanged(ssrcs, bitrate_estimate);
+
+ // Decrease estimate to trigger a REMB.
+ bitrate_estimate = bitrate_estimate - 100;
+ EXPECT_CALL(rtp_0, SetREMBData(bitrate_estimate, ssrcs))
+ .Times(1);
+ vie_remb_->OnReceiveBitrateChanged(ssrcs, bitrate_estimate);
+
+ // Remove the sending module, add it again -> should get remb on the second
+ // module.
+ vie_remb_->RemoveRembSender(&rtp_0);
+ vie_remb_->AddRembSender(&rtp_1);
+ vie_remb_->OnReceiveBitrateChanged(ssrcs, bitrate_estimate);
+
+ bitrate_estimate = bitrate_estimate - 100;
+ EXPECT_CALL(rtp_1, SetREMBData(bitrate_estimate, ssrcs))
+ .Times(1);
+ vie_remb_->OnReceiveBitrateChanged(ssrcs, bitrate_estimate);
+
+ vie_remb_->RemoveReceiveChannel(&rtp_0);
+ vie_remb_->RemoveReceiveChannel(&rtp_1);
+}
+
+TEST_F(ViERembTest, OnlyOneRembForDoubleProcess) {
+ MockRtpRtcp rtp;
+ unsigned int bitrate_estimate = 456;
+ unsigned int ssrc = 1234;
+ std::vector<unsigned int> ssrcs(&ssrc, &ssrc + 1);
+
+ vie_remb_->AddReceiveChannel(&rtp);
+ vie_remb_->AddRembSender(&rtp);
+ vie_remb_->OnReceiveBitrateChanged(ssrcs, bitrate_estimate);
+ // Call OnReceiveBitrateChanged twice to get a first estimate.
+ fake_clock_.AdvanceTimeMilliseconds(1000);
+ EXPECT_CALL(rtp, SetREMBData(_, _))
+ .Times(1);
+ vie_remb_->OnReceiveBitrateChanged(ssrcs, bitrate_estimate);
+
+ // Lower the estimate, should trigger a call to SetREMBData right away.
+ bitrate_estimate = bitrate_estimate - 100;
+ EXPECT_CALL(rtp, SetREMBData(bitrate_estimate, ssrcs))
+ .Times(1);
+ vie_remb_->OnReceiveBitrateChanged(ssrcs, bitrate_estimate);
+
+ // Call OnReceiveBitrateChanged again, this should not trigger a new callback.
+ EXPECT_CALL(rtp, SetREMBData(_, _))
+ .Times(0);
+ vie_remb_->OnReceiveBitrateChanged(ssrcs, bitrate_estimate);
+ vie_remb_->RemoveReceiveChannel(&rtp);
+ vie_remb_->RemoveRembSender(&rtp);
+}
+
+// Only register receiving modules and make sure we fallback to trigger a REMB
+// packet on this one.
+TEST_F(ViERembTest, NoSendingRtpModule) {
+ MockRtpRtcp rtp;
+ vie_remb_->AddReceiveChannel(&rtp);
+
+ unsigned int bitrate_estimate = 456;
+ unsigned int ssrc = 1234;
+ std::vector<unsigned int> ssrcs(&ssrc, &ssrc + 1);
+
+ vie_remb_->OnReceiveBitrateChanged(ssrcs, bitrate_estimate);
+
+ // Call OnReceiveBitrateChanged twice to get a first estimate.
+ fake_clock_.AdvanceTimeMilliseconds(1000);
+ EXPECT_CALL(rtp, SetREMBData(_, _))
+ .Times(1);
+ vie_remb_->OnReceiveBitrateChanged(ssrcs, bitrate_estimate);
+
+ // Lower the estimate to trigger a new packet REMB packet.
+ bitrate_estimate = bitrate_estimate - 100;
+ EXPECT_CALL(rtp, SetREMBData(_, _))
+ .Times(1);
+ vie_remb_->OnReceiveBitrateChanged(ssrcs, bitrate_estimate);
+}
+
+} // namespace webrtc
diff --git a/webrtc/video/vie_sync_module.cc b/webrtc/video/vie_sync_module.cc
new file mode 100644
index 0000000000..9ca9a9480e
--- /dev/null
+++ b/webrtc/video/vie_sync_module.cc
@@ -0,0 +1,174 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/video/vie_sync_module.h"
+
+#include "webrtc/base/logging.h"
+#include "webrtc/base/trace_event.h"
+#include "webrtc/modules/rtp_rtcp/include/rtp_receiver.h"
+#include "webrtc/modules/rtp_rtcp/include/rtp_rtcp.h"
+#include "webrtc/modules/video_coding/include/video_coding.h"
+#include "webrtc/system_wrappers/include/critical_section_wrapper.h"
+#include "webrtc/video/stream_synchronization.h"
+#include "webrtc/voice_engine/include/voe_video_sync.h"
+
+namespace webrtc {
+
+int UpdateMeasurements(StreamSynchronization::Measurements* stream,
+ const RtpRtcp& rtp_rtcp, const RtpReceiver& receiver) {
+ if (!receiver.Timestamp(&stream->latest_timestamp))
+ return -1;
+ if (!receiver.LastReceivedTimeMs(&stream->latest_receive_time_ms))
+ return -1;
+
+ uint32_t ntp_secs = 0;
+ uint32_t ntp_frac = 0;
+ uint32_t rtp_timestamp = 0;
+ if (0 != rtp_rtcp.RemoteNTP(&ntp_secs,
+ &ntp_frac,
+ NULL,
+ NULL,
+ &rtp_timestamp)) {
+ return -1;
+ }
+
+ bool new_rtcp_sr = false;
+ if (!UpdateRtcpList(
+ ntp_secs, ntp_frac, rtp_timestamp, &stream->rtcp, &new_rtcp_sr)) {
+ return -1;
+ }
+
+ return 0;
+}
+
+ViESyncModule::ViESyncModule(VideoCodingModule* vcm)
+ : data_cs_(CriticalSectionWrapper::CreateCriticalSection()),
+ vcm_(vcm),
+ video_receiver_(NULL),
+ video_rtp_rtcp_(NULL),
+ voe_channel_id_(-1),
+ voe_sync_interface_(NULL),
+ last_sync_time_(TickTime::Now()),
+ sync_() {
+}
+
+ViESyncModule::~ViESyncModule() {
+}
+
+int ViESyncModule::ConfigureSync(int voe_channel_id,
+ VoEVideoSync* voe_sync_interface,
+ RtpRtcp* video_rtcp_module,
+ RtpReceiver* video_receiver) {
+ CriticalSectionScoped cs(data_cs_.get());
+ // Prevent expensive no-ops.
+ if (voe_channel_id_ == voe_channel_id &&
+ voe_sync_interface_ == voe_sync_interface &&
+ video_receiver_ == video_receiver &&
+ video_rtp_rtcp_ == video_rtcp_module) {
+ return 0;
+ }
+ voe_channel_id_ = voe_channel_id;
+ voe_sync_interface_ = voe_sync_interface;
+ video_receiver_ = video_receiver;
+ video_rtp_rtcp_ = video_rtcp_module;
+ sync_.reset(
+ new StreamSynchronization(video_rtp_rtcp_->SSRC(), voe_channel_id));
+
+ if (!voe_sync_interface) {
+ voe_channel_id_ = -1;
+ if (voe_channel_id >= 0) {
+ // Trying to set a voice channel but no interface exist.
+ return -1;
+ }
+ return 0;
+ }
+ return 0;
+}
+
+int ViESyncModule::VoiceChannel() {
+ return voe_channel_id_;
+}
+
+int64_t ViESyncModule::TimeUntilNextProcess() {
+ const int64_t kSyncIntervalMs = 1000;
+ return kSyncIntervalMs - (TickTime::Now() - last_sync_time_).Milliseconds();
+}
+
+int32_t ViESyncModule::Process() {
+ CriticalSectionScoped cs(data_cs_.get());
+ last_sync_time_ = TickTime::Now();
+
+ const int current_video_delay_ms = vcm_->Delay();
+
+ if (voe_channel_id_ == -1) {
+ return 0;
+ }
+ assert(video_rtp_rtcp_ && voe_sync_interface_);
+ assert(sync_.get());
+
+ int audio_jitter_buffer_delay_ms = 0;
+ int playout_buffer_delay_ms = 0;
+ if (voe_sync_interface_->GetDelayEstimate(voe_channel_id_,
+ &audio_jitter_buffer_delay_ms,
+ &playout_buffer_delay_ms) != 0) {
+ return 0;
+ }
+ const int current_audio_delay_ms = audio_jitter_buffer_delay_ms +
+ playout_buffer_delay_ms;
+
+ RtpRtcp* voice_rtp_rtcp = NULL;
+ RtpReceiver* voice_receiver = NULL;
+ if (0 != voe_sync_interface_->GetRtpRtcp(voe_channel_id_, &voice_rtp_rtcp,
+ &voice_receiver)) {
+ return 0;
+ }
+ assert(voice_rtp_rtcp);
+ assert(voice_receiver);
+
+ if (UpdateMeasurements(&video_measurement_, *video_rtp_rtcp_,
+ *video_receiver_) != 0) {
+ return 0;
+ }
+
+ if (UpdateMeasurements(&audio_measurement_, *voice_rtp_rtcp,
+ *voice_receiver) != 0) {
+ return 0;
+ }
+
+ int relative_delay_ms;
+ // Calculate how much later or earlier the audio stream is compared to video.
+ if (!sync_->ComputeRelativeDelay(audio_measurement_, video_measurement_,
+ &relative_delay_ms)) {
+ return 0;
+ }
+
+ TRACE_COUNTER1("webrtc", "SyncCurrentVideoDelay", current_video_delay_ms);
+ TRACE_COUNTER1("webrtc", "SyncCurrentAudioDelay", current_audio_delay_ms);
+ TRACE_COUNTER1("webrtc", "SyncRelativeDelay", relative_delay_ms);
+ int target_audio_delay_ms = 0;
+ int target_video_delay_ms = current_video_delay_ms;
+ // Calculate the necessary extra audio delay and desired total video
+ // delay to get the streams in sync.
+ if (!sync_->ComputeDelays(relative_delay_ms,
+ current_audio_delay_ms,
+ &target_audio_delay_ms,
+ &target_video_delay_ms)) {
+ return 0;
+ }
+
+ if (voe_sync_interface_->SetMinimumPlayoutDelay(
+ voe_channel_id_, target_audio_delay_ms) == -1) {
+ LOG(LS_ERROR) << "Error setting voice delay.";
+ }
+ vcm_->SetMinimumPlayoutDelay(target_video_delay_ms);
+ return 0;
+}
+
+} // namespace webrtc
diff --git a/webrtc/video/vie_sync_module.h b/webrtc/video/vie_sync_module.h
new file mode 100644
index 0000000000..a9ad20a103
--- /dev/null
+++ b/webrtc/video/vie_sync_module.h
@@ -0,0 +1,62 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+// ViESyncModule is responsible for synchronization audio and video for a given
+// VoE and ViE channel couple.
+
+#ifndef WEBRTC_VIDEO_VIE_SYNC_MODULE_H_
+#define WEBRTC_VIDEO_VIE_SYNC_MODULE_H_
+
+#include "webrtc/base/scoped_ptr.h"
+#include "webrtc/modules/include/module.h"
+#include "webrtc/system_wrappers/include/tick_util.h"
+#include "webrtc/video/stream_synchronization.h"
+#include "webrtc/voice_engine/include/voe_video_sync.h"
+
+namespace webrtc {
+
+class CriticalSectionWrapper;
+class RtpRtcp;
+class VideoCodingModule;
+class ViEChannel;
+class VoEVideoSync;
+
+class ViESyncModule : public Module {
+ public:
+ explicit ViESyncModule(VideoCodingModule* vcm);
+ ~ViESyncModule();
+
+ int ConfigureSync(int voe_channel_id,
+ VoEVideoSync* voe_sync_interface,
+ RtpRtcp* video_rtcp_module,
+ RtpReceiver* video_receiver);
+
+ int VoiceChannel();
+
+ // Implements Module.
+ int64_t TimeUntilNextProcess() override;
+ int32_t Process() override;
+
+ private:
+ rtc::scoped_ptr<CriticalSectionWrapper> data_cs_;
+ VideoCodingModule* const vcm_;
+ RtpReceiver* video_receiver_;
+ RtpRtcp* video_rtp_rtcp_;
+ int voe_channel_id_;
+ VoEVideoSync* voe_sync_interface_;
+ TickTime last_sync_time_;
+ rtc::scoped_ptr<StreamSynchronization> sync_;
+ StreamSynchronization::Measurements audio_measurement_;
+ StreamSynchronization::Measurements video_measurement_;
+};
+
+} // namespace webrtc
+
+#endif // WEBRTC_VIDEO_VIE_SYNC_MODULE_H_
diff --git a/webrtc/video/webrtc_video.gypi b/webrtc/video/webrtc_video.gypi
index f9dbbce5aa..db8d5c7e89 100644
--- a/webrtc/video/webrtc_video.gypi
+++ b/webrtc/video/webrtc_video.gypi
@@ -24,12 +24,24 @@
'<(webrtc_root)/webrtc.gyp:rtc_event_log',
],
'webrtc_video_sources': [
+ 'video/call_stats.cc',
+ 'video/call_stats.h',
'video/encoded_frame_callback_adapter.cc',
'video/encoded_frame_callback_adapter.h',
+ 'video/encoder_state_feedback.cc',
+ 'video/encoder_state_feedback.h',
+ 'video/overuse_frame_detector.cc',
+ 'video/overuse_frame_detector.h',
+ 'video/payload_router.cc',
+ 'video/payload_router.h',
'video/receive_statistics_proxy.cc',
'video/receive_statistics_proxy.h',
+ 'video/report_block_stats.cc',
+ 'video/report_block_stats.h',
'video/send_statistics_proxy.cc',
'video/send_statistics_proxy.h',
+ 'video/stream_synchronization.cc',
+ 'video/stream_synchronization.h',
'video/video_capture_input.cc',
'video/video_capture_input.h',
'video/video_decoder.cc',
@@ -38,29 +50,16 @@
'video/video_receive_stream.h',
'video/video_send_stream.cc',
'video/video_send_stream.h',
- 'video_engine/call_stats.cc',
- 'video_engine/call_stats.h',
- 'video_engine/encoder_state_feedback.cc',
- 'video_engine/encoder_state_feedback.h',
- 'video_engine/overuse_frame_detector.cc',
- 'video_engine/overuse_frame_detector.h',
- 'video_engine/payload_router.cc',
- 'video_engine/payload_router.h',
- 'video_engine/report_block_stats.cc',
- 'video_engine/report_block_stats.h',
- 'video_engine/stream_synchronization.cc',
- 'video_engine/stream_synchronization.h',
- 'video_engine/vie_channel.cc',
- 'video_engine/vie_channel.h',
- 'video_engine/vie_defines.h',
- 'video_engine/vie_encoder.cc',
- 'video_engine/vie_encoder.h',
- 'video_engine/vie_receiver.cc',
- 'video_engine/vie_receiver.h',
- 'video_engine/vie_remb.cc',
- 'video_engine/vie_remb.h',
- 'video_engine/vie_sync_module.cc',
- 'video_engine/vie_sync_module.h',
+ 'video/vie_channel.cc',
+ 'video/vie_channel.h',
+ 'video/vie_encoder.cc',
+ 'video/vie_encoder.h',
+ 'video/vie_receiver.cc',
+ 'video/vie_receiver.h',
+ 'video/vie_remb.cc',
+ 'video/vie_remb.h',
+ 'video/vie_sync_module.cc',
+ 'video/vie_sync_module.h',
],
},
}
diff --git a/webrtc/video_decoder.h b/webrtc/video_decoder.h
index 2822677868..3cd94e8270 100644
--- a/webrtc/video_decoder.h
+++ b/webrtc/video_decoder.h
@@ -11,6 +11,7 @@
#ifndef WEBRTC_VIDEO_DECODER_H_
#define WEBRTC_VIDEO_DECODER_H_
+#include <string>
#include <vector>
#include "webrtc/common_types.h"
@@ -29,6 +30,16 @@ class DecodedImageCallback {
virtual ~DecodedImageCallback() {}
virtual int32_t Decoded(VideoFrame& decodedImage) = 0;
+ // Provides an alternative interface that allows the decoder to specify the
+ // decode time excluding waiting time for any previous pending frame to
+ // return. This is necessary for breaking positive feedback in the delay
+ // estimation when the decoder has a single output buffer.
+ // TODO(perkj): Remove default implementation when chromium has been updated.
+ virtual int32_t Decoded(VideoFrame& decodedImage, int64_t decode_time_ms) {
+ // The default implementation ignores custom decode time value.
+ return Decoded(decodedImage);
+ }
+
virtual int32_t ReceivedDecodedReferenceFrame(const uint64_t pictureId) {
return -1;
}
@@ -63,6 +74,13 @@ class VideoDecoder {
virtual int32_t Release() = 0;
virtual int32_t Reset() = 0;
+
+ // Returns true if the decoder prefer to decode frames late.
+ // That is, it can not decode infinite number of frames before the decoded
+ // frame is consumed.
+ virtual bool PrefersLateDecoding() const { return true; }
+
+ virtual const char* ImplementationName() const { return "unknown"; }
};
// Class used to wrap external VideoDecoders to provide a fallback option on
@@ -87,6 +105,9 @@ class VideoDecoderSoftwareFallbackWrapper : public webrtc::VideoDecoder {
int32_t Release() override;
int32_t Reset() override;
+ bool PrefersLateDecoding() const override;
+
+ const char* ImplementationName() const override;
private:
bool InitFallbackDecoder();
@@ -96,6 +117,7 @@ class VideoDecoderSoftwareFallbackWrapper : public webrtc::VideoDecoder {
VideoCodec codec_settings_;
int32_t number_of_cores_;
+ std::string fallback_implementation_name_;
rtc::scoped_ptr<VideoDecoder> fallback_decoder_;
DecodedImageCallback* callback_;
};
diff --git a/webrtc/video_encoder.h b/webrtc/video_encoder.h
index f255336a25..9e7e4d7040 100644
--- a/webrtc/video_encoder.h
+++ b/webrtc/video_encoder.h
@@ -11,6 +11,7 @@
#ifndef WEBRTC_VIDEO_ENCODER_H_
#define WEBRTC_VIDEO_ENCODER_H_
+#include <string>
#include <vector>
#include "webrtc/common_types.h"
@@ -124,6 +125,7 @@ class VideoEncoder {
virtual void OnDroppedFrame() {}
virtual int GetTargetFramerate() { return -1; }
virtual bool SupportsNativeHandle() const { return false; }
+ virtual const char* ImplementationName() const { return "unknown"; }
};
// Class used to wrap external VideoEncoders to provide a fallback option on
@@ -151,6 +153,7 @@ class VideoEncoderSoftwareFallbackWrapper : public VideoEncoder {
void OnDroppedFrame() override;
int GetTargetFramerate() override;
bool SupportsNativeHandle() const override;
+ const char* ImplementationName() const override;
private:
bool InitFallbackEncoder();
@@ -175,6 +178,7 @@ class VideoEncoderSoftwareFallbackWrapper : public VideoEncoder {
webrtc::VideoEncoder* const encoder_;
rtc::scoped_ptr<webrtc::VideoEncoder> fallback_encoder_;
+ std::string fallback_implementation_name_;
EncodedImageCallback* callback_;
};
} // namespace webrtc
diff --git a/webrtc/video_engine/OWNERS b/webrtc/video_engine/OWNERS
deleted file mode 100644
index a8201f0252..0000000000
--- a/webrtc/video_engine/OWNERS
+++ /dev/null
@@ -1,13 +0,0 @@
-mflodman@webrtc.org
-pbos@webrtc.org
-stefan@webrtc.org
-
-per-file *.isolate=kjellander@webrtc.org
-
-# These are for the common case of adding or renaming files. If you're doing
-# structural changes, please get a review from a reviewer in this file.
-per-file *.gyp=*
-per-file *.gypi=*
-
-per-file BUILD.gn=kjellander@webrtc.org
-
diff --git a/webrtc/video_engine/call_stats.cc b/webrtc/video_engine/call_stats.cc
deleted file mode 100644
index 0b71cc346c..0000000000
--- a/webrtc/video_engine/call_stats.cc
+++ /dev/null
@@ -1,167 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include "webrtc/video_engine/call_stats.h"
-
-#include <assert.h>
-
-#include "webrtc/modules/rtp_rtcp/interface/rtp_rtcp_defines.h"
-#include "webrtc/system_wrappers/include/critical_section_wrapper.h"
-#include "webrtc/system_wrappers/include/tick_util.h"
-
-namespace webrtc {
-namespace {
-// Time interval for updating the observers.
-const int64_t kUpdateIntervalMs = 1000;
-// Weight factor to apply to the average rtt.
-const float kWeightFactor = 0.3f;
-
-void RemoveOldReports(int64_t now, std::list<CallStats::RttTime>* reports) {
- // A rtt report is considered valid for this long.
- const int64_t kRttTimeoutMs = 1500;
- while (!reports->empty() &&
- (now - reports->front().time) > kRttTimeoutMs) {
- reports->pop_front();
- }
-}
-
-int64_t GetMaxRttMs(std::list<CallStats::RttTime>* reports) {
- int64_t max_rtt_ms = 0;
- for (std::list<CallStats::RttTime>::const_iterator it = reports->begin();
- it != reports->end(); ++it) {
- max_rtt_ms = std::max(it->rtt, max_rtt_ms);
- }
- return max_rtt_ms;
-}
-
-int64_t GetAvgRttMs(std::list<CallStats::RttTime>* reports) {
- if (reports->empty()) {
- return 0;
- }
- int64_t sum = 0;
- for (std::list<CallStats::RttTime>::const_iterator it = reports->begin();
- it != reports->end(); ++it) {
- sum += it->rtt;
- }
- return sum / reports->size();
-}
-
-void UpdateAvgRttMs(std::list<CallStats::RttTime>* reports, int64_t* avg_rtt) {
- uint32_t cur_rtt_ms = GetAvgRttMs(reports);
- if (cur_rtt_ms == 0) {
- // Reset.
- *avg_rtt = 0;
- return;
- }
- if (*avg_rtt == 0) {
- // Initialize.
- *avg_rtt = cur_rtt_ms;
- return;
- }
- *avg_rtt = *avg_rtt * (1.0f - kWeightFactor) + cur_rtt_ms * kWeightFactor;
-}
-} // namespace
-
-class RtcpObserver : public RtcpRttStats {
- public:
- explicit RtcpObserver(CallStats* owner) : owner_(owner) {}
- virtual ~RtcpObserver() {}
-
- virtual void OnRttUpdate(int64_t rtt) {
- owner_->OnRttUpdate(rtt);
- }
-
- // Returns the average RTT.
- virtual int64_t LastProcessedRtt() const {
- return owner_->avg_rtt_ms();
- }
-
- private:
- CallStats* owner_;
-
- RTC_DISALLOW_COPY_AND_ASSIGN(RtcpObserver);
-};
-
-CallStats::CallStats()
- : crit_(CriticalSectionWrapper::CreateCriticalSection()),
- rtcp_rtt_stats_(new RtcpObserver(this)),
- last_process_time_(TickTime::MillisecondTimestamp()),
- max_rtt_ms_(0),
- avg_rtt_ms_(0) {
-}
-
-CallStats::~CallStats() {
- assert(observers_.empty());
-}
-
-int64_t CallStats::TimeUntilNextProcess() {
- return last_process_time_ + kUpdateIntervalMs -
- TickTime::MillisecondTimestamp();
-}
-
-int32_t CallStats::Process() {
- CriticalSectionScoped cs(crit_.get());
- int64_t now = TickTime::MillisecondTimestamp();
- if (now < last_process_time_ + kUpdateIntervalMs)
- return 0;
-
- last_process_time_ = now;
-
- RemoveOldReports(now, &reports_);
- max_rtt_ms_ = GetMaxRttMs(&reports_);
- UpdateAvgRttMs(&reports_, &avg_rtt_ms_);
-
- // If there is a valid rtt, update all observers with the max rtt.
- // TODO(asapersson): Consider changing this to report the average rtt.
- if (max_rtt_ms_ > 0) {
- for (std::list<CallStatsObserver*>::iterator it = observers_.begin();
- it != observers_.end(); ++it) {
- (*it)->OnRttUpdate(avg_rtt_ms_, max_rtt_ms_);
- }
- }
- return 0;
-}
-
-int64_t CallStats::avg_rtt_ms() const {
- CriticalSectionScoped cs(crit_.get());
- return avg_rtt_ms_;
-}
-
-RtcpRttStats* CallStats::rtcp_rtt_stats() const {
- return rtcp_rtt_stats_.get();
-}
-
-void CallStats::RegisterStatsObserver(CallStatsObserver* observer) {
- CriticalSectionScoped cs(crit_.get());
- for (std::list<CallStatsObserver*>::iterator it = observers_.begin();
- it != observers_.end(); ++it) {
- if (*it == observer)
- return;
- }
- observers_.push_back(observer);
-}
-
-void CallStats::DeregisterStatsObserver(CallStatsObserver* observer) {
- CriticalSectionScoped cs(crit_.get());
- for (std::list<CallStatsObserver*>::iterator it = observers_.begin();
- it != observers_.end(); ++it) {
- if (*it == observer) {
- observers_.erase(it);
- return;
- }
- }
-}
-
-void CallStats::OnRttUpdate(int64_t rtt) {
- CriticalSectionScoped cs(crit_.get());
- reports_.push_back(RttTime(rtt, TickTime::MillisecondTimestamp()));
-}
-
-} // namespace webrtc
diff --git a/webrtc/video_engine/call_stats.h b/webrtc/video_engine/call_stats.h
deleted file mode 100644
index a17330a7c1..0000000000
--- a/webrtc/video_engine/call_stats.h
+++ /dev/null
@@ -1,81 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_VIDEO_ENGINE_CALL_STATS_H_
-#define WEBRTC_VIDEO_ENGINE_CALL_STATS_H_
-
-#include <list>
-
-#include "webrtc/base/constructormagic.h"
-#include "webrtc/base/scoped_ptr.h"
-#include "webrtc/modules/interface/module.h"
-
-namespace webrtc {
-
-class CallStatsObserver;
-class CriticalSectionWrapper;
-class RtcpRttStats;
-
-// CallStats keeps track of statistics for a call.
-class CallStats : public Module {
- public:
- friend class RtcpObserver;
-
- CallStats();
- ~CallStats();
-
- // Implements Module, to use the process thread.
- int64_t TimeUntilNextProcess() override;
- int32_t Process() override;
-
- // Returns a RtcpRttStats to register at a statistics provider. The object
- // has the same lifetime as the CallStats instance.
- RtcpRttStats* rtcp_rtt_stats() const;
-
- // Registers/deregisters a new observer to receive statistics updates.
- void RegisterStatsObserver(CallStatsObserver* observer);
- void DeregisterStatsObserver(CallStatsObserver* observer);
-
- // Helper struct keeping track of the time a rtt value is reported.
- struct RttTime {
- RttTime(int64_t new_rtt, int64_t rtt_time)
- : rtt(new_rtt), time(rtt_time) {}
- const int64_t rtt;
- const int64_t time;
- };
-
- protected:
- void OnRttUpdate(int64_t rtt);
-
- int64_t avg_rtt_ms() const;
-
- private:
- // Protecting all members.
- rtc::scoped_ptr<CriticalSectionWrapper> crit_;
- // Observer receiving statistics updates.
- rtc::scoped_ptr<RtcpRttStats> rtcp_rtt_stats_;
- // The last time 'Process' resulted in statistic update.
- int64_t last_process_time_;
- // The last RTT in the statistics update (zero if there is no valid estimate).
- int64_t max_rtt_ms_;
- int64_t avg_rtt_ms_;
-
- // All Rtt reports within valid time interval, oldest first.
- std::list<RttTime> reports_;
-
- // Observers getting stats reports.
- std::list<CallStatsObserver*> observers_;
-
- RTC_DISALLOW_COPY_AND_ASSIGN(CallStats);
-};
-
-} // namespace webrtc
-
-#endif // WEBRTC_VIDEO_ENGINE_CALL_STATS_H_
diff --git a/webrtc/video_engine/call_stats_unittest.cc b/webrtc/video_engine/call_stats_unittest.cc
deleted file mode 100644
index 4fb88df338..0000000000
--- a/webrtc/video_engine/call_stats_unittest.cc
+++ /dev/null
@@ -1,203 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include "testing/gmock/include/gmock/gmock.h"
-#include "testing/gtest/include/gtest/gtest.h"
-
-#include "webrtc/base/scoped_ptr.h"
-#include "webrtc/modules/rtp_rtcp/interface/rtp_rtcp_defines.h"
-#include "webrtc/system_wrappers/include/tick_util.h"
-#include "webrtc/video_engine/call_stats.h"
-
-using ::testing::_;
-using ::testing::AnyNumber;
-using ::testing::Return;
-
-namespace webrtc {
-
-class MockStatsObserver : public CallStatsObserver {
- public:
- MockStatsObserver() {}
- virtual ~MockStatsObserver() {}
-
- MOCK_METHOD2(OnRttUpdate, void(int64_t, int64_t));
-};
-
-class CallStatsTest : public ::testing::Test {
- protected:
- virtual void SetUp() {
- TickTime::UseFakeClock(12345);
- call_stats_.reset(new CallStats());
- }
- rtc::scoped_ptr<CallStats> call_stats_;
-};
-
-TEST_F(CallStatsTest, AddAndTriggerCallback) {
- MockStatsObserver stats_observer;
- RtcpRttStats* rtcp_rtt_stats = call_stats_->rtcp_rtt_stats();
- call_stats_->RegisterStatsObserver(&stats_observer);
- TickTime::AdvanceFakeClock(1000);
- EXPECT_EQ(0, rtcp_rtt_stats->LastProcessedRtt());
-
- const int64_t kRtt = 25;
- rtcp_rtt_stats->OnRttUpdate(kRtt);
- EXPECT_CALL(stats_observer, OnRttUpdate(kRtt, kRtt)).Times(1);
- call_stats_->Process();
- EXPECT_EQ(kRtt, rtcp_rtt_stats->LastProcessedRtt());
-
- const int64_t kRttTimeOutMs = 1500 + 10;
- TickTime::AdvanceFakeClock(kRttTimeOutMs);
- EXPECT_CALL(stats_observer, OnRttUpdate(_, _)).Times(0);
- call_stats_->Process();
- EXPECT_EQ(0, rtcp_rtt_stats->LastProcessedRtt());
-
- call_stats_->DeregisterStatsObserver(&stats_observer);
-}
-
-TEST_F(CallStatsTest, ProcessTime) {
- MockStatsObserver stats_observer;
- call_stats_->RegisterStatsObserver(&stats_observer);
- RtcpRttStats* rtcp_rtt_stats = call_stats_->rtcp_rtt_stats();
- rtcp_rtt_stats->OnRttUpdate(100);
-
- // Time isn't updated yet.
- EXPECT_CALL(stats_observer, OnRttUpdate(_, _)).Times(0);
- call_stats_->Process();
-
- // Advance clock and verify we get an update.
- TickTime::AdvanceFakeClock(1000);
- EXPECT_CALL(stats_observer, OnRttUpdate(_, _)).Times(1);
- call_stats_->Process();
-
- // Advance clock just too little to get an update.
- TickTime::AdvanceFakeClock(999);
- rtcp_rtt_stats->OnRttUpdate(100);
- EXPECT_CALL(stats_observer, OnRttUpdate(_, _)).Times(0);
- call_stats_->Process();
-
- // Advance enough to trigger a new update.
- TickTime::AdvanceFakeClock(1);
- EXPECT_CALL(stats_observer, OnRttUpdate(_, _)).Times(1);
- call_stats_->Process();
-
- call_stats_->DeregisterStatsObserver(&stats_observer);
-}
-
-// Verify all observers get correct estimates and observers can be added and
-// removed.
-TEST_F(CallStatsTest, MultipleObservers) {
- MockStatsObserver stats_observer_1;
- call_stats_->RegisterStatsObserver(&stats_observer_1);
- // Add the second observer twice, there should still be only one report to the
- // observer.
- MockStatsObserver stats_observer_2;
- call_stats_->RegisterStatsObserver(&stats_observer_2);
- call_stats_->RegisterStatsObserver(&stats_observer_2);
-
- RtcpRttStats* rtcp_rtt_stats = call_stats_->rtcp_rtt_stats();
- const int64_t kRtt = 100;
- rtcp_rtt_stats->OnRttUpdate(kRtt);
-
- // Verify both observers are updated.
- TickTime::AdvanceFakeClock(1000);
- EXPECT_CALL(stats_observer_1, OnRttUpdate(kRtt, kRtt)).Times(1);
- EXPECT_CALL(stats_observer_2, OnRttUpdate(kRtt, kRtt)).Times(1);
- call_stats_->Process();
-
- // Deregister the second observer and verify update is only sent to the first
- // observer.
- call_stats_->DeregisterStatsObserver(&stats_observer_2);
- rtcp_rtt_stats->OnRttUpdate(kRtt);
- TickTime::AdvanceFakeClock(1000);
- EXPECT_CALL(stats_observer_1, OnRttUpdate(kRtt, kRtt)).Times(1);
- EXPECT_CALL(stats_observer_2, OnRttUpdate(kRtt, kRtt)).Times(0);
- call_stats_->Process();
-
- // Deregister the first observer.
- call_stats_->DeregisterStatsObserver(&stats_observer_1);
- rtcp_rtt_stats->OnRttUpdate(kRtt);
- TickTime::AdvanceFakeClock(1000);
- EXPECT_CALL(stats_observer_1, OnRttUpdate(kRtt, kRtt)).Times(0);
- EXPECT_CALL(stats_observer_2, OnRttUpdate(kRtt, kRtt)).Times(0);
- call_stats_->Process();
-}
-
-// Verify increasing and decreasing rtt triggers callbacks with correct values.
-TEST_F(CallStatsTest, ChangeRtt) {
- MockStatsObserver stats_observer;
- call_stats_->RegisterStatsObserver(&stats_observer);
- RtcpRttStats* rtcp_rtt_stats = call_stats_->rtcp_rtt_stats();
-
- // Advance clock to be ready for an update.
- TickTime::AdvanceFakeClock(1000);
-
- // Set a first value and verify the callback is triggered.
- const int64_t kFirstRtt = 100;
- rtcp_rtt_stats->OnRttUpdate(kFirstRtt);
- EXPECT_CALL(stats_observer, OnRttUpdate(kFirstRtt, kFirstRtt)).Times(1);
- call_stats_->Process();
-
- // Increase rtt and verify the new value is reported.
- TickTime::AdvanceFakeClock(1000);
- const int64_t kHighRtt = kFirstRtt + 20;
- const int64_t kAvgRtt1 = 103;
- rtcp_rtt_stats->OnRttUpdate(kHighRtt);
- EXPECT_CALL(stats_observer, OnRttUpdate(kAvgRtt1, kHighRtt)).Times(1);
- call_stats_->Process();
-
- // Increase time enough for a new update, but not too much to make the
- // rtt invalid. Report a lower rtt and verify the old/high value still is sent
- // in the callback.
- TickTime::AdvanceFakeClock(1000);
- const int64_t kLowRtt = kFirstRtt - 20;
- const int64_t kAvgRtt2 = 102;
- rtcp_rtt_stats->OnRttUpdate(kLowRtt);
- EXPECT_CALL(stats_observer, OnRttUpdate(kAvgRtt2, kHighRtt)).Times(1);
- call_stats_->Process();
-
- // Advance time to make the high report invalid, the lower rtt should now be
- // in the callback.
- TickTime::AdvanceFakeClock(1000);
- const int64_t kAvgRtt3 = 95;
- EXPECT_CALL(stats_observer, OnRttUpdate(kAvgRtt3, kLowRtt)).Times(1);
- call_stats_->Process();
-
- call_stats_->DeregisterStatsObserver(&stats_observer);
-}
-
-TEST_F(CallStatsTest, LastProcessedRtt) {
- MockStatsObserver stats_observer;
- call_stats_->RegisterStatsObserver(&stats_observer);
- RtcpRttStats* rtcp_rtt_stats = call_stats_->rtcp_rtt_stats();
- TickTime::AdvanceFakeClock(1000);
-
- // Set a first values and verify that LastProcessedRtt initially returns the
- // average rtt.
- const int64_t kRttLow = 10;
- const int64_t kRttHigh = 30;
- const int64_t kAvgRtt = 20;
- rtcp_rtt_stats->OnRttUpdate(kRttLow);
- rtcp_rtt_stats->OnRttUpdate(kRttHigh);
- EXPECT_CALL(stats_observer, OnRttUpdate(kAvgRtt, kRttHigh)).Times(1);
- call_stats_->Process();
- EXPECT_EQ(kAvgRtt, rtcp_rtt_stats->LastProcessedRtt());
-
- // Update values and verify LastProcessedRtt.
- TickTime::AdvanceFakeClock(1000);
- rtcp_rtt_stats->OnRttUpdate(kRttLow);
- rtcp_rtt_stats->OnRttUpdate(kRttHigh);
- EXPECT_CALL(stats_observer, OnRttUpdate(kAvgRtt, kRttHigh)).Times(1);
- call_stats_->Process();
- EXPECT_EQ(kAvgRtt, rtcp_rtt_stats->LastProcessedRtt());
-
- call_stats_->DeregisterStatsObserver(&stats_observer);
-}
-
-} // namespace webrtc
diff --git a/webrtc/video_engine/encoder_state_feedback.cc b/webrtc/video_engine/encoder_state_feedback.cc
deleted file mode 100644
index 1c376b2820..0000000000
--- a/webrtc/video_engine/encoder_state_feedback.cc
+++ /dev/null
@@ -1,124 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include "webrtc/video_engine/encoder_state_feedback.h"
-
-#include <assert.h>
-
-#include "webrtc/base/checks.h"
-#include "webrtc/modules/rtp_rtcp/interface/rtp_rtcp_defines.h"
-#include "webrtc/system_wrappers/include/critical_section_wrapper.h"
-#include "webrtc/video_engine/vie_encoder.h"
-
-namespace webrtc {
-
-// Helper class registered at the RTP module relaying callbacks to
-// EncoderStatFeedback.
-class EncoderStateFeedbackObserver : public RtcpIntraFrameObserver {
- public:
- explicit EncoderStateFeedbackObserver(EncoderStateFeedback* owner)
- : owner_(owner) {}
- ~EncoderStateFeedbackObserver() {}
-
- // Implements RtcpIntraFrameObserver.
- virtual void OnReceivedIntraFrameRequest(uint32_t ssrc) {
- owner_->OnReceivedIntraFrameRequest(ssrc);
- }
- virtual void OnReceivedSLI(uint32_t ssrc, uint8_t picture_id) {
- owner_->OnReceivedSLI(ssrc, picture_id);
- }
- virtual void OnReceivedRPSI(uint32_t ssrc, uint64_t picture_id) {
- owner_->OnReceivedRPSI(ssrc, picture_id);
- }
-
- virtual void OnLocalSsrcChanged(uint32_t old_ssrc, uint32_t new_ssrc) {
- owner_->OnLocalSsrcChanged(old_ssrc, new_ssrc);
- }
-
- private:
- EncoderStateFeedback* owner_;
-};
-
-EncoderStateFeedback::EncoderStateFeedback()
- : crit_(CriticalSectionWrapper::CreateCriticalSection()),
- observer_(new EncoderStateFeedbackObserver(this)) {}
-
-EncoderStateFeedback::~EncoderStateFeedback() {
- assert(encoders_.empty());
-}
-
-void EncoderStateFeedback::AddEncoder(const std::vector<uint32_t>& ssrcs,
- ViEEncoder* encoder) {
- RTC_DCHECK(!ssrcs.empty());
- CriticalSectionScoped lock(crit_.get());
- for (uint32_t ssrc : ssrcs) {
- RTC_DCHECK(encoders_.find(ssrc) == encoders_.end());
- encoders_[ssrc] = encoder;
- }
-}
-
-void EncoderStateFeedback::RemoveEncoder(const ViEEncoder* encoder) {
- CriticalSectionScoped lock(crit_.get());
- SsrcEncoderMap::iterator it = encoders_.begin();
- while (it != encoders_.end()) {
- if (it->second == encoder) {
- encoders_.erase(it++);
- } else {
- ++it;
- }
- }
-}
-
-RtcpIntraFrameObserver* EncoderStateFeedback::GetRtcpIntraFrameObserver() {
- return observer_.get();
-}
-
-void EncoderStateFeedback::OnReceivedIntraFrameRequest(uint32_t ssrc) {
- CriticalSectionScoped lock(crit_.get());
- SsrcEncoderMap::iterator it = encoders_.find(ssrc);
- if (it == encoders_.end())
- return;
-
- it->second->OnReceivedIntraFrameRequest(ssrc);
-}
-
-void EncoderStateFeedback::OnReceivedSLI(uint32_t ssrc, uint8_t picture_id) {
- CriticalSectionScoped lock(crit_.get());
- SsrcEncoderMap::iterator it = encoders_.find(ssrc);
- if (it == encoders_.end())
- return;
-
- it->second->OnReceivedSLI(ssrc, picture_id);
-}
-
-void EncoderStateFeedback::OnReceivedRPSI(uint32_t ssrc, uint64_t picture_id) {
- CriticalSectionScoped lock(crit_.get());
- SsrcEncoderMap::iterator it = encoders_.find(ssrc);
- if (it == encoders_.end())
- return;
-
- it->second->OnReceivedRPSI(ssrc, picture_id);
-}
-
-void EncoderStateFeedback::OnLocalSsrcChanged(uint32_t old_ssrc,
- uint32_t new_ssrc) {
- CriticalSectionScoped lock(crit_.get());
- SsrcEncoderMap::iterator it = encoders_.find(old_ssrc);
- if (it == encoders_.end() || encoders_.find(new_ssrc) != encoders_.end()) {
- return;
- }
-
- ViEEncoder* encoder = it->second;
- encoders_.erase(it);
- encoders_[new_ssrc] = encoder;
- encoder->OnLocalSsrcChanged(old_ssrc, new_ssrc);
-}
-
-} // namespace webrtc
diff --git a/webrtc/video_engine/encoder_state_feedback.h b/webrtc/video_engine/encoder_state_feedback.h
deleted file mode 100644
index 51e9111dfe..0000000000
--- a/webrtc/video_engine/encoder_state_feedback.h
+++ /dev/null
@@ -1,71 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-// TODO(mflodman) ViEEncoder has a time check to not send key frames too often,
-// move the logic to this class.
-
-#ifndef WEBRTC_VIDEO_ENGINE_ENCODER_STATE_FEEDBACK_H_
-#define WEBRTC_VIDEO_ENGINE_ENCODER_STATE_FEEDBACK_H_
-
-#include <map>
-#include <vector>
-
-#include "webrtc/base/constructormagic.h"
-#include "webrtc/base/scoped_ptr.h"
-#include "webrtc/typedefs.h"
-
-namespace webrtc {
-
-class CriticalSectionWrapper;
-class EncoderStateFeedbackObserver;
-class RtcpIntraFrameObserver;
-class ViEEncoder;
-
-class EncoderStateFeedback {
- public:
- friend class EncoderStateFeedbackObserver;
-
- EncoderStateFeedback();
- ~EncoderStateFeedback();
-
- // Adds an encoder to receive feedback for a set of SSRCs.
- void AddEncoder(const std::vector<uint32_t>& ssrc, ViEEncoder* encoder);
-
- // Removes a registered ViEEncoder.
- void RemoveEncoder(const ViEEncoder* encoder);
-
- // Returns an observer to register at the requesting class. The observer has
- // the same lifetime as the EncoderStateFeedback instance.
- RtcpIntraFrameObserver* GetRtcpIntraFrameObserver();
-
- protected:
- // Called by EncoderStateFeedbackObserver when a new key frame is requested.
- void OnReceivedIntraFrameRequest(uint32_t ssrc);
- void OnReceivedSLI(uint32_t ssrc, uint8_t picture_id);
- void OnReceivedRPSI(uint32_t ssrc, uint64_t picture_id);
- void OnLocalSsrcChanged(uint32_t old_ssrc, uint32_t new_ssrc);
-
- private:
- typedef std::map<uint32_t, ViEEncoder*> SsrcEncoderMap;
-
- rtc::scoped_ptr<CriticalSectionWrapper> crit_;
-
- // Instance registered at the class requesting new key frames.
- rtc::scoped_ptr<EncoderStateFeedbackObserver> observer_;
-
- // Maps a unique ssrc to the given encoder.
- SsrcEncoderMap encoders_;
-
- RTC_DISALLOW_COPY_AND_ASSIGN(EncoderStateFeedback);
-};
-
-} // namespace webrtc
-
-#endif // WEBRTC_VIDEO_ENGINE_ENCODER_STATE_FEEDBACK_H_
diff --git a/webrtc/video_engine/encoder_state_feedback_unittest.cc b/webrtc/video_engine/encoder_state_feedback_unittest.cc
deleted file mode 100644
index 9787acc144..0000000000
--- a/webrtc/video_engine/encoder_state_feedback_unittest.cc
+++ /dev/null
@@ -1,143 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-
-// This file includes unit tests for EncoderStateFeedback.
-#include "webrtc/video_engine/encoder_state_feedback.h"
-
-#include "testing/gmock/include/gmock/gmock.h"
-#include "testing/gtest/include/gtest/gtest.h"
-
-#include "webrtc/base/scoped_ptr.h"
-#include "webrtc/common.h"
-#include "webrtc/modules/bitrate_controller/include/bitrate_controller.h"
-#include "webrtc/modules/pacing/include/paced_sender.h"
-#include "webrtc/modules/pacing/include/packet_router.h"
-#include "webrtc/modules/rtp_rtcp/interface/rtp_rtcp_defines.h"
-#include "webrtc/modules/utility/interface/mock/mock_process_thread.h"
-#include "webrtc/video_engine/payload_router.h"
-#include "webrtc/video_engine/vie_encoder.h"
-
-using ::testing::NiceMock;
-
-namespace webrtc {
-
-class MockVieEncoder : public ViEEncoder {
- public:
- explicit MockVieEncoder(ProcessThread* process_thread, PacedSender* pacer)
- : ViEEncoder(1, process_thread, nullptr, nullptr, pacer, nullptr) {}
- ~MockVieEncoder() {}
-
- MOCK_METHOD1(OnReceivedIntraFrameRequest,
- void(uint32_t));
- MOCK_METHOD2(OnReceivedSLI,
- void(uint32_t ssrc, uint8_t picture_id));
- MOCK_METHOD2(OnReceivedRPSI,
- void(uint32_t ssrc, uint64_t picture_id));
- MOCK_METHOD2(OnLocalSsrcChanged,
- void(uint32_t old_ssrc, uint32_t new_ssrc));
-};
-
-class VieKeyRequestTest : public ::testing::Test {
- protected:
- VieKeyRequestTest()
- : pacer_(Clock::GetRealTimeClock(),
- &router_,
- BitrateController::kDefaultStartBitrateKbps,
- PacedSender::kDefaultPaceMultiplier *
- BitrateController::kDefaultStartBitrateKbps,
- 0) {}
- virtual void SetUp() {
- process_thread_.reset(new NiceMock<MockProcessThread>);
- encoder_state_feedback_.reset(new EncoderStateFeedback());
- }
- rtc::scoped_ptr<MockProcessThread> process_thread_;
- rtc::scoped_ptr<EncoderStateFeedback> encoder_state_feedback_;
- PacketRouter router_;
- PacedSender pacer_;
-};
-
-TEST_F(VieKeyRequestTest, CreateAndTriggerRequests) {
- const int ssrc = 1234;
- MockVieEncoder encoder(process_thread_.get(), &pacer_);
- encoder_state_feedback_->AddEncoder(std::vector<uint32_t>(1, ssrc), &encoder);
-
- EXPECT_CALL(encoder, OnReceivedIntraFrameRequest(ssrc))
- .Times(1);
- encoder_state_feedback_->GetRtcpIntraFrameObserver()->
- OnReceivedIntraFrameRequest(ssrc);
-
- const uint8_t sli_picture_id = 3;
- EXPECT_CALL(encoder, OnReceivedSLI(ssrc, sli_picture_id))
- .Times(1);
- encoder_state_feedback_->GetRtcpIntraFrameObserver()->OnReceivedSLI(
- ssrc, sli_picture_id);
-
- const uint64_t rpsi_picture_id = 9;
- EXPECT_CALL(encoder, OnReceivedRPSI(ssrc, rpsi_picture_id))
- .Times(1);
- encoder_state_feedback_->GetRtcpIntraFrameObserver()->OnReceivedRPSI(
- ssrc, rpsi_picture_id);
-
- encoder_state_feedback_->RemoveEncoder(&encoder);
-}
-
-// Register multiple encoders and make sure the request is relayed to correct
-// ViEEncoder.
-TEST_F(VieKeyRequestTest, MultipleEncoders) {
- const int ssrc_1 = 1234;
- const int ssrc_2 = 5678;
- MockVieEncoder encoder_1(process_thread_.get(), &pacer_);
- MockVieEncoder encoder_2(process_thread_.get(), &pacer_);
- encoder_state_feedback_->AddEncoder(std::vector<uint32_t>(1, ssrc_1),
- &encoder_1);
- encoder_state_feedback_->AddEncoder(std::vector<uint32_t>(1, ssrc_2),
- &encoder_2);
-
- EXPECT_CALL(encoder_1, OnReceivedIntraFrameRequest(ssrc_1))
- .Times(1);
- EXPECT_CALL(encoder_2, OnReceivedIntraFrameRequest(ssrc_2))
- .Times(1);
- encoder_state_feedback_->GetRtcpIntraFrameObserver()->
- OnReceivedIntraFrameRequest(ssrc_1);
- encoder_state_feedback_->GetRtcpIntraFrameObserver()->
- OnReceivedIntraFrameRequest(ssrc_2);
-
- const uint8_t sli_pid_1 = 3;
- const uint8_t sli_pid_2 = 4;
- EXPECT_CALL(encoder_1, OnReceivedSLI(ssrc_1, sli_pid_1))
- .Times(1);
- EXPECT_CALL(encoder_2, OnReceivedSLI(ssrc_2, sli_pid_2))
- .Times(1);
- encoder_state_feedback_->GetRtcpIntraFrameObserver()->OnReceivedSLI(
- ssrc_1, sli_pid_1);
- encoder_state_feedback_->GetRtcpIntraFrameObserver()->OnReceivedSLI(
- ssrc_2, sli_pid_2);
-
- const uint64_t rpsi_pid_1 = 9;
- const uint64_t rpsi_pid_2 = 10;
- EXPECT_CALL(encoder_1, OnReceivedRPSI(ssrc_1, rpsi_pid_1))
- .Times(1);
- EXPECT_CALL(encoder_2, OnReceivedRPSI(ssrc_2, rpsi_pid_2))
- .Times(1);
- encoder_state_feedback_->GetRtcpIntraFrameObserver()->OnReceivedRPSI(
- ssrc_1, rpsi_pid_1);
- encoder_state_feedback_->GetRtcpIntraFrameObserver()->OnReceivedRPSI(
- ssrc_2, rpsi_pid_2);
-
- encoder_state_feedback_->RemoveEncoder(&encoder_1);
- EXPECT_CALL(encoder_2, OnReceivedIntraFrameRequest(ssrc_2))
- .Times(1);
- encoder_state_feedback_->GetRtcpIntraFrameObserver()->
- OnReceivedIntraFrameRequest(ssrc_2);
- encoder_state_feedback_->RemoveEncoder(&encoder_2);
-}
-
-} // namespace webrtc
diff --git a/webrtc/video_engine/overuse_frame_detector.cc b/webrtc/video_engine/overuse_frame_detector.cc
deleted file mode 100644
index 47a6e496b9..0000000000
--- a/webrtc/video_engine/overuse_frame_detector.cc
+++ /dev/null
@@ -1,422 +0,0 @@
-/*
- * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include "webrtc/video_engine/overuse_frame_detector.h"
-
-#include <assert.h>
-#include <math.h>
-
-#include <algorithm>
-#include <list>
-#include <map>
-
-#include "webrtc/base/checks.h"
-#include "webrtc/base/exp_filter.h"
-#include "webrtc/base/logging.h"
-#include "webrtc/system_wrappers/include/clock.h"
-
-namespace webrtc {
-
-namespace {
-const int64_t kProcessIntervalMs = 5000;
-
-// Delay between consecutive rampups. (Used for quick recovery.)
-const int kQuickRampUpDelayMs = 10 * 1000;
-// Delay between rampup attempts. Initially uses standard, scales up to max.
-const int kStandardRampUpDelayMs = 40 * 1000;
-const int kMaxRampUpDelayMs = 240 * 1000;
-// Expontential back-off factor, to prevent annoying up-down behaviour.
-const double kRampUpBackoffFactor = 2.0;
-
-// Max number of overuses detected before always applying the rampup delay.
-const int kMaxOverusesBeforeApplyRampupDelay = 4;
-
-// The maximum exponent to use in VCMExpFilter.
-const float kSampleDiffMs = 33.0f;
-const float kMaxExp = 7.0f;
-
-} // namespace
-
-// Class for calculating the average encode time.
-class OveruseFrameDetector::EncodeTimeAvg {
- public:
- EncodeTimeAvg()
- : kWeightFactor(0.5f),
- kInitialAvgEncodeTimeMs(5.0f),
- filtered_encode_time_ms_(new rtc::ExpFilter(kWeightFactor)) {
- filtered_encode_time_ms_->Apply(1.0f, kInitialAvgEncodeTimeMs);
- }
- ~EncodeTimeAvg() {}
-
- void AddSample(float encode_time_ms, int64_t diff_last_sample_ms) {
- float exp = diff_last_sample_ms / kSampleDiffMs;
- exp = std::min(exp, kMaxExp);
- filtered_encode_time_ms_->Apply(exp, encode_time_ms);
- }
-
- int Value() const {
- return static_cast<int>(filtered_encode_time_ms_->filtered() + 0.5);
- }
-
- private:
- const float kWeightFactor;
- const float kInitialAvgEncodeTimeMs;
- rtc::scoped_ptr<rtc::ExpFilter> filtered_encode_time_ms_;
-};
-
-// Class for calculating the processing usage on the send-side (the average
-// processing time of a frame divided by the average time difference between
-// captured frames).
-class OveruseFrameDetector::SendProcessingUsage {
- public:
- explicit SendProcessingUsage(const CpuOveruseOptions& options)
- : kWeightFactorFrameDiff(0.998f),
- kWeightFactorProcessing(0.995f),
- kInitialSampleDiffMs(40.0f),
- kMaxSampleDiffMs(45.0f),
- count_(0),
- options_(options),
- filtered_processing_ms_(new rtc::ExpFilter(kWeightFactorProcessing)),
- filtered_frame_diff_ms_(new rtc::ExpFilter(kWeightFactorFrameDiff)) {
- Reset();
- }
- ~SendProcessingUsage() {}
-
- void Reset() {
- count_ = 0;
- filtered_frame_diff_ms_->Reset(kWeightFactorFrameDiff);
- filtered_frame_diff_ms_->Apply(1.0f, kInitialSampleDiffMs);
- filtered_processing_ms_->Reset(kWeightFactorProcessing);
- filtered_processing_ms_->Apply(1.0f, InitialProcessingMs());
- }
-
- void AddCaptureSample(float sample_ms) {
- float exp = sample_ms / kSampleDiffMs;
- exp = std::min(exp, kMaxExp);
- filtered_frame_diff_ms_->Apply(exp, sample_ms);
- }
-
- void AddSample(float processing_ms, int64_t diff_last_sample_ms) {
- ++count_;
- float exp = diff_last_sample_ms / kSampleDiffMs;
- exp = std::min(exp, kMaxExp);
- filtered_processing_ms_->Apply(exp, processing_ms);
- }
-
- int Value() const {
- if (count_ < static_cast<uint32_t>(options_.min_frame_samples)) {
- return static_cast<int>(InitialUsageInPercent() + 0.5f);
- }
- float frame_diff_ms = std::max(filtered_frame_diff_ms_->filtered(), 1.0f);
- frame_diff_ms = std::min(frame_diff_ms, kMaxSampleDiffMs);
- float encode_usage_percent =
- 100.0f * filtered_processing_ms_->filtered() / frame_diff_ms;
- return static_cast<int>(encode_usage_percent + 0.5);
- }
-
- private:
- float InitialUsageInPercent() const {
- // Start in between the underuse and overuse threshold.
- return (options_.low_encode_usage_threshold_percent +
- options_.high_encode_usage_threshold_percent) / 2.0f;
- }
-
- float InitialProcessingMs() const {
- return InitialUsageInPercent() * kInitialSampleDiffMs / 100;
- }
-
- const float kWeightFactorFrameDiff;
- const float kWeightFactorProcessing;
- const float kInitialSampleDiffMs;
- const float kMaxSampleDiffMs;
- uint64_t count_;
- const CpuOveruseOptions options_;
- rtc::scoped_ptr<rtc::ExpFilter> filtered_processing_ms_;
- rtc::scoped_ptr<rtc::ExpFilter> filtered_frame_diff_ms_;
-};
-
-// Class for calculating the processing time of frames.
-class OveruseFrameDetector::FrameQueue {
- public:
- FrameQueue() : last_processing_time_ms_(-1) {}
- ~FrameQueue() {}
-
- // Called when a frame is captured.
- // Starts the measuring of the processing time of the frame.
- void Start(int64_t capture_time, int64_t now) {
- const size_t kMaxSize = 90; // Allows for processing time of 1.5s at 60fps.
- if (frame_times_.size() > kMaxSize) {
- LOG(LS_WARNING) << "Max size reached, removed oldest frame.";
- frame_times_.erase(frame_times_.begin());
- }
- if (frame_times_.find(capture_time) != frame_times_.end()) {
- // Frame should not exist.
- assert(false);
- return;
- }
- frame_times_[capture_time] = now;
- }
-
- // Called when the processing of a frame has finished.
- // Returns the processing time of the frame.
- int End(int64_t capture_time, int64_t now) {
- std::map<int64_t, int64_t>::iterator it = frame_times_.find(capture_time);
- if (it == frame_times_.end()) {
- return -1;
- }
- // Remove any old frames up to current.
- // Old frames have been skipped by the capture process thread.
- // TODO(asapersson): Consider measuring time from first frame in list.
- last_processing_time_ms_ = now - (*it).second;
- frame_times_.erase(frame_times_.begin(), ++it);
- return last_processing_time_ms_;
- }
-
- void Reset() { frame_times_.clear(); }
- int NumFrames() const { return static_cast<int>(frame_times_.size()); }
- int last_processing_time_ms() const { return last_processing_time_ms_; }
-
- private:
- // Captured frames mapped by the capture time.
- std::map<int64_t, int64_t> frame_times_;
- int last_processing_time_ms_;
-};
-
-
-OveruseFrameDetector::OveruseFrameDetector(
- Clock* clock,
- const CpuOveruseOptions& options,
- CpuOveruseObserver* observer,
- CpuOveruseMetricsObserver* metrics_observer)
- : options_(options),
- observer_(observer),
- metrics_observer_(metrics_observer),
- clock_(clock),
- num_process_times_(0),
- last_capture_time_(0),
- num_pixels_(0),
- next_process_time_(clock_->TimeInMilliseconds()),
- last_overuse_time_(0),
- checks_above_threshold_(0),
- num_overuse_detections_(0),
- last_rampup_time_(0),
- in_quick_rampup_(false),
- current_rampup_delay_ms_(kStandardRampUpDelayMs),
- last_encode_sample_ms_(0),
- last_sample_time_ms_(0),
- encode_time_(new EncodeTimeAvg()),
- usage_(new SendProcessingUsage(options)),
- frame_queue_(new FrameQueue()) {
- RTC_DCHECK(metrics_observer != nullptr);
- // Make sure stats are initially up-to-date. This simplifies unit testing
- // since we don't have to trigger an update using one of the methods which
- // would also alter the overuse state.
- UpdateCpuOveruseMetrics();
- processing_thread_.DetachFromThread();
-}
-
-OveruseFrameDetector::~OveruseFrameDetector() {
-}
-
-int OveruseFrameDetector::LastProcessingTimeMs() const {
- rtc::CritScope cs(&crit_);
- return frame_queue_->last_processing_time_ms();
-}
-
-int OveruseFrameDetector::FramesInQueue() const {
- rtc::CritScope cs(&crit_);
- return frame_queue_->NumFrames();
-}
-
-void OveruseFrameDetector::UpdateCpuOveruseMetrics() {
- metrics_.avg_encode_time_ms = encode_time_->Value();
- metrics_.encode_usage_percent = usage_->Value();
-
- metrics_observer_->CpuOveruseMetricsUpdated(metrics_);
-}
-
-int64_t OveruseFrameDetector::TimeUntilNextProcess() {
- RTC_DCHECK(processing_thread_.CalledOnValidThread());
- return next_process_time_ - clock_->TimeInMilliseconds();
-}
-
-bool OveruseFrameDetector::FrameSizeChanged(int num_pixels) const {
- if (num_pixels != num_pixels_) {
- return true;
- }
- return false;
-}
-
-bool OveruseFrameDetector::FrameTimeoutDetected(int64_t now) const {
- if (last_capture_time_ == 0) {
- return false;
- }
- return (now - last_capture_time_) > options_.frame_timeout_interval_ms;
-}
-
-void OveruseFrameDetector::ResetAll(int num_pixels) {
- num_pixels_ = num_pixels;
- usage_->Reset();
- frame_queue_->Reset();
- last_capture_time_ = 0;
- num_process_times_ = 0;
- UpdateCpuOveruseMetrics();
-}
-
-void OveruseFrameDetector::FrameCaptured(int width,
- int height,
- int64_t capture_time_ms) {
- rtc::CritScope cs(&crit_);
-
- int64_t now = clock_->TimeInMilliseconds();
- if (FrameSizeChanged(width * height) || FrameTimeoutDetected(now)) {
- ResetAll(width * height);
- }
-
- if (last_capture_time_ != 0)
- usage_->AddCaptureSample(now - last_capture_time_);
-
- last_capture_time_ = now;
-
- if (options_.enable_extended_processing_usage) {
- frame_queue_->Start(capture_time_ms, now);
- }
-}
-
-void OveruseFrameDetector::FrameEncoded(int encode_time_ms) {
- rtc::CritScope cs(&crit_);
- int64_t now = clock_->TimeInMilliseconds();
- if (last_encode_sample_ms_ != 0) {
- int64_t diff_ms = now - last_encode_sample_ms_;
- encode_time_->AddSample(encode_time_ms, diff_ms);
- }
- last_encode_sample_ms_ = now;
-
- if (!options_.enable_extended_processing_usage) {
- AddProcessingTime(encode_time_ms);
- }
- UpdateCpuOveruseMetrics();
-}
-
-void OveruseFrameDetector::FrameSent(int64_t capture_time_ms) {
- rtc::CritScope cs(&crit_);
- if (!options_.enable_extended_processing_usage) {
- return;
- }
- int delay_ms = frame_queue_->End(capture_time_ms,
- clock_->TimeInMilliseconds());
- if (delay_ms > 0) {
- AddProcessingTime(delay_ms);
- }
- UpdateCpuOveruseMetrics();
-}
-
-void OveruseFrameDetector::AddProcessingTime(int elapsed_ms) {
- int64_t now = clock_->TimeInMilliseconds();
- if (last_sample_time_ms_ != 0) {
- int64_t diff_ms = now - last_sample_time_ms_;
- usage_->AddSample(elapsed_ms, diff_ms);
- }
- last_sample_time_ms_ = now;
-}
-
-int32_t OveruseFrameDetector::Process() {
- RTC_DCHECK(processing_thread_.CalledOnValidThread());
-
- int64_t now = clock_->TimeInMilliseconds();
-
- // Used to protect against Process() being called too often.
- if (now < next_process_time_)
- return 0;
-
- next_process_time_ = now + kProcessIntervalMs;
-
- CpuOveruseMetrics current_metrics;
- {
- rtc::CritScope cs(&crit_);
- ++num_process_times_;
-
- current_metrics = metrics_;
- if (num_process_times_ <= options_.min_process_count)
- return 0;
- }
-
- if (IsOverusing(current_metrics)) {
- // If the last thing we did was going up, and now have to back down, we need
- // to check if this peak was short. If so we should back off to avoid going
- // back and forth between this load, the system doesn't seem to handle it.
- bool check_for_backoff = last_rampup_time_ > last_overuse_time_;
- if (check_for_backoff) {
- if (now - last_rampup_time_ < kStandardRampUpDelayMs ||
- num_overuse_detections_ > kMaxOverusesBeforeApplyRampupDelay) {
- // Going up was not ok for very long, back off.
- current_rampup_delay_ms_ *= kRampUpBackoffFactor;
- if (current_rampup_delay_ms_ > kMaxRampUpDelayMs)
- current_rampup_delay_ms_ = kMaxRampUpDelayMs;
- } else {
- // Not currently backing off, reset rampup delay.
- current_rampup_delay_ms_ = kStandardRampUpDelayMs;
- }
- }
-
- last_overuse_time_ = now;
- in_quick_rampup_ = false;
- checks_above_threshold_ = 0;
- ++num_overuse_detections_;
-
- if (observer_ != NULL)
- observer_->OveruseDetected();
- } else if (IsUnderusing(current_metrics, now)) {
- last_rampup_time_ = now;
- in_quick_rampup_ = true;
-
- if (observer_ != NULL)
- observer_->NormalUsage();
- }
-
- int rampup_delay =
- in_quick_rampup_ ? kQuickRampUpDelayMs : current_rampup_delay_ms_;
-
- LOG(LS_VERBOSE) << " Frame stats: "
- << " encode usage " << current_metrics.encode_usage_percent
- << " overuse detections " << num_overuse_detections_
- << " rampup delay " << rampup_delay;
-
- return 0;
-}
-
-bool OveruseFrameDetector::IsOverusing(const CpuOveruseMetrics& metrics) {
- bool overusing = false;
- if (options_.enable_encode_usage_method) {
- overusing = metrics.encode_usage_percent >=
- options_.high_encode_usage_threshold_percent;
- }
- if (overusing) {
- ++checks_above_threshold_;
- } else {
- checks_above_threshold_ = 0;
- }
- return checks_above_threshold_ >= options_.high_threshold_consecutive_count;
-}
-
-bool OveruseFrameDetector::IsUnderusing(const CpuOveruseMetrics& metrics,
- int64_t time_now) {
- int delay = in_quick_rampup_ ? kQuickRampUpDelayMs : current_rampup_delay_ms_;
- if (time_now < last_rampup_time_ + delay)
- return false;
-
- bool underusing = false;
- if (options_.enable_encode_usage_method) {
- underusing = metrics.encode_usage_percent <
- options_.low_encode_usage_threshold_percent;
- }
- return underusing;
-}
-} // namespace webrtc
diff --git a/webrtc/video_engine/overuse_frame_detector.h b/webrtc/video_engine/overuse_frame_detector.h
deleted file mode 100644
index aff4b43025..0000000000
--- a/webrtc/video_engine/overuse_frame_detector.h
+++ /dev/null
@@ -1,181 +0,0 @@
-/*
- * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_VIDEO_ENGINE_OVERUSE_FRAME_DETECTOR_H_
-#define WEBRTC_VIDEO_ENGINE_OVERUSE_FRAME_DETECTOR_H_
-
-#include "webrtc/base/constructormagic.h"
-#include "webrtc/base/criticalsection.h"
-#include "webrtc/base/scoped_ptr.h"
-#include "webrtc/base/exp_filter.h"
-#include "webrtc/base/thread_annotations.h"
-#include "webrtc/base/thread_checker.h"
-#include "webrtc/modules/interface/module.h"
-
-namespace webrtc {
-
-class Clock;
-
-// CpuOveruseObserver is called when a system overuse is detected and
-// VideoEngine cannot keep up the encoding frequency.
-class CpuOveruseObserver {
- public:
- // Called as soon as an overuse is detected.
- virtual void OveruseDetected() = 0;
- // Called periodically when the system is not overused any longer.
- virtual void NormalUsage() = 0;
-
- protected:
- virtual ~CpuOveruseObserver() {}
-};
-
-struct CpuOveruseOptions {
- CpuOveruseOptions()
- : enable_encode_usage_method(true),
- low_encode_usage_threshold_percent(55),
- high_encode_usage_threshold_percent(85),
- enable_extended_processing_usage(true),
- frame_timeout_interval_ms(1500),
- min_frame_samples(120),
- min_process_count(3),
- high_threshold_consecutive_count(2) {}
-
- // Method based on encode time of frames.
- bool enable_encode_usage_method;
- int low_encode_usage_threshold_percent; // Threshold for triggering underuse.
- int high_encode_usage_threshold_percent; // Threshold for triggering overuse.
- bool enable_extended_processing_usage; // Include a larger time span (in
- // addition to encode time) for
- // measuring the processing time of a
- // frame.
- // General settings.
- int frame_timeout_interval_ms; // The maximum allowed interval between two
- // frames before resetting estimations.
- int min_frame_samples; // The minimum number of frames required.
- int min_process_count; // The number of initial process times required before
- // triggering an overuse/underuse.
- int high_threshold_consecutive_count; // The number of consecutive checks
- // above the high threshold before
- // triggering an overuse.
-};
-
-struct CpuOveruseMetrics {
- CpuOveruseMetrics()
- : avg_encode_time_ms(-1),
- encode_usage_percent(-1) {}
-
- int avg_encode_time_ms; // The average encode time in ms.
- int encode_usage_percent; // The average encode time divided by the average
- // time difference between incoming captured frames.
-};
-
-class CpuOveruseMetricsObserver {
- public:
- virtual ~CpuOveruseMetricsObserver() {}
- virtual void CpuOveruseMetricsUpdated(const CpuOveruseMetrics& metrics) = 0;
-};
-
-
-// Use to detect system overuse based on the send-side processing time of
-// incoming frames.
-class OveruseFrameDetector : public Module {
- public:
- OveruseFrameDetector(Clock* clock,
- const CpuOveruseOptions& options,
- CpuOveruseObserver* overuse_observer,
- CpuOveruseMetricsObserver* metrics_observer);
- ~OveruseFrameDetector();
-
- // Called for each captured frame.
- void FrameCaptured(int width, int height, int64_t capture_time_ms);
-
- // Called for each encoded frame.
- void FrameEncoded(int encode_time_ms);
-
- // Called for each sent frame.
- void FrameSent(int64_t capture_time_ms);
-
- // Only public for testing.
- int LastProcessingTimeMs() const;
- int FramesInQueue() const;
-
- // Implements Module.
- int64_t TimeUntilNextProcess() override;
- int32_t Process() override;
-
- private:
- class EncodeTimeAvg;
- class SendProcessingUsage;
- class FrameQueue;
-
- void UpdateCpuOveruseMetrics() EXCLUSIVE_LOCKS_REQUIRED(crit_);
-
- // TODO(asapersson): This method is only used on one thread, so it shouldn't
- // need a guard.
- void AddProcessingTime(int elapsed_ms) EXCLUSIVE_LOCKS_REQUIRED(crit_);
-
- // Only called on the processing thread.
- bool IsOverusing(const CpuOveruseMetrics& metrics);
- bool IsUnderusing(const CpuOveruseMetrics& metrics, int64_t time_now);
-
- bool FrameTimeoutDetected(int64_t now) const EXCLUSIVE_LOCKS_REQUIRED(crit_);
- bool FrameSizeChanged(int num_pixels) const EXCLUSIVE_LOCKS_REQUIRED(crit_);
-
- void ResetAll(int num_pixels) EXCLUSIVE_LOCKS_REQUIRED(crit_);
-
- // Protecting all members except const and those that are only accessed on the
- // processing thread.
- // TODO(asapersson): See if we can reduce locking. As is, video frame
- // processing contends with reading stats and the processing thread.
- mutable rtc::CriticalSection crit_;
-
- const CpuOveruseOptions options_;
-
- // Observer getting overuse reports.
- CpuOveruseObserver* const observer_;
-
- // Stats metrics.
- CpuOveruseMetricsObserver* const metrics_observer_;
- CpuOveruseMetrics metrics_ GUARDED_BY(crit_);
-
- Clock* const clock_;
- int64_t num_process_times_ GUARDED_BY(crit_);
-
- int64_t last_capture_time_ GUARDED_BY(crit_);
-
- // Number of pixels of last captured frame.
- int num_pixels_ GUARDED_BY(crit_);
-
- // These seven members are only accessed on the processing thread.
- int64_t next_process_time_;
- int64_t last_overuse_time_;
- int checks_above_threshold_;
- int num_overuse_detections_;
- int64_t last_rampup_time_;
- bool in_quick_rampup_;
- int current_rampup_delay_ms_;
-
- int64_t last_encode_sample_ms_; // Only accessed by one thread.
- int64_t last_sample_time_ms_; // Only accessed by one thread.
-
- // TODO(asapersson): Can these be regular members (avoid separate heap
- // allocs)?
- const rtc::scoped_ptr<EncodeTimeAvg> encode_time_ GUARDED_BY(crit_);
- const rtc::scoped_ptr<SendProcessingUsage> usage_ GUARDED_BY(crit_);
- const rtc::scoped_ptr<FrameQueue> frame_queue_ GUARDED_BY(crit_);
-
- rtc::ThreadChecker processing_thread_;
-
- RTC_DISALLOW_COPY_AND_ASSIGN(OveruseFrameDetector);
-};
-
-} // namespace webrtc
-
-#endif // WEBRTC_VIDEO_ENGINE_OVERUSE_FRAME_DETECTOR_H_
diff --git a/webrtc/video_engine/overuse_frame_detector_unittest.cc b/webrtc/video_engine/overuse_frame_detector_unittest.cc
deleted file mode 100644
index d502f02204..0000000000
--- a/webrtc/video_engine/overuse_frame_detector_unittest.cc
+++ /dev/null
@@ -1,405 +0,0 @@
-/*
- * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include "webrtc/video_engine/overuse_frame_detector.h"
-
-#include "testing/gmock/include/gmock/gmock.h"
-#include "testing/gtest/include/gtest/gtest.h"
-
-#include "webrtc/base/scoped_ptr.h"
-#include "webrtc/system_wrappers/include/clock.h"
-
-namespace webrtc {
-namespace {
- const int kWidth = 640;
- const int kHeight = 480;
- const int kFrameInterval33ms = 33;
- const int kProcessIntervalMs = 5000;
- const int kProcessTime5ms = 5;
-} // namespace
-
-class MockCpuOveruseObserver : public CpuOveruseObserver {
- public:
- MockCpuOveruseObserver() {}
- virtual ~MockCpuOveruseObserver() {}
-
- MOCK_METHOD0(OveruseDetected, void());
- MOCK_METHOD0(NormalUsage, void());
-};
-
-class CpuOveruseObserverImpl : public CpuOveruseObserver {
- public:
- CpuOveruseObserverImpl() :
- overuse_(0),
- normaluse_(0) {}
- virtual ~CpuOveruseObserverImpl() {}
-
- void OveruseDetected() { ++overuse_; }
- void NormalUsage() { ++normaluse_; }
-
- int overuse_;
- int normaluse_;
-};
-
-class OveruseFrameDetectorTest : public ::testing::Test,
- public CpuOveruseMetricsObserver {
- protected:
- virtual void SetUp() {
- clock_.reset(new SimulatedClock(1234));
- observer_.reset(new MockCpuOveruseObserver());
- options_.min_process_count = 0;
- ReinitializeOveruseDetector();
- }
-
- void ReinitializeOveruseDetector() {
- overuse_detector_.reset(new OveruseFrameDetector(clock_.get(), options_,
- observer_.get(), this));
- }
-
- void CpuOveruseMetricsUpdated(const CpuOveruseMetrics& metrics) override {
- metrics_ = metrics;
- }
-
- int InitialUsage() {
- return ((options_.low_encode_usage_threshold_percent +
- options_.high_encode_usage_threshold_percent) / 2.0f) + 0.5;
- }
-
- void InsertAndSendFramesWithInterval(
- int num_frames, int interval_ms, int width, int height, int delay_ms) {
- while (num_frames-- > 0) {
- int64_t capture_time_ms = clock_->TimeInMilliseconds();
- overuse_detector_->FrameCaptured(width, height, capture_time_ms);
- clock_->AdvanceTimeMilliseconds(delay_ms);
- overuse_detector_->FrameEncoded(delay_ms);
- overuse_detector_->FrameSent(capture_time_ms);
- clock_->AdvanceTimeMilliseconds(interval_ms - delay_ms);
- }
- }
-
- void TriggerOveruse(int num_times) {
- const int kDelayMs = 32;
- for (int i = 0; i < num_times; ++i) {
- InsertAndSendFramesWithInterval(
- 1000, kFrameInterval33ms, kWidth, kHeight, kDelayMs);
- overuse_detector_->Process();
- }
- }
-
- void TriggerUnderuse() {
- const int kDelayMs1 = 5;
- const int kDelayMs2 = 6;
- InsertAndSendFramesWithInterval(
- 1300, kFrameInterval33ms, kWidth, kHeight, kDelayMs1);
- InsertAndSendFramesWithInterval(
- 1, kFrameInterval33ms, kWidth, kHeight, kDelayMs2);
- overuse_detector_->Process();
- }
-
- int AvgEncodeTimeMs() { return metrics_.avg_encode_time_ms; }
-
- int UsagePercent() { return metrics_.encode_usage_percent; }
-
- CpuOveruseOptions options_;
- rtc::scoped_ptr<SimulatedClock> clock_;
- rtc::scoped_ptr<MockCpuOveruseObserver> observer_;
- rtc::scoped_ptr<OveruseFrameDetector> overuse_detector_;
- CpuOveruseMetrics metrics_;
-};
-
-
-// enable_encode_usage_method = true;
-// enable_extended_processing_usage = false;
-// UsagePercent() > high_encode_usage_threshold_percent => overuse.
-// UsagePercent() < low_encode_usage_threshold_percent => underuse.
-TEST_F(OveruseFrameDetectorTest, TriggerOveruse) {
- options_.enable_extended_processing_usage = false;
- ReinitializeOveruseDetector();
- // usage > high => overuse
- EXPECT_CALL(*(observer_.get()), OveruseDetected()).Times(1);
- TriggerOveruse(options_.high_threshold_consecutive_count);
-}
-
-TEST_F(OveruseFrameDetectorTest, OveruseAndRecover) {
- options_.enable_extended_processing_usage = false;
- ReinitializeOveruseDetector();
- // usage > high => overuse
- EXPECT_CALL(*(observer_.get()), OveruseDetected()).Times(1);
- TriggerOveruse(options_.high_threshold_consecutive_count);
- // usage < low => underuse
- EXPECT_CALL(*(observer_.get()), NormalUsage()).Times(testing::AtLeast(1));
- TriggerUnderuse();
-}
-
-TEST_F(OveruseFrameDetectorTest, OveruseAndRecoverWithNoObserver) {
- options_.enable_extended_processing_usage = false;
- overuse_detector_.reset(
- new OveruseFrameDetector(clock_.get(), options_, nullptr, this));
- EXPECT_CALL(*(observer_.get()), OveruseDetected()).Times(0);
- TriggerOveruse(options_.high_threshold_consecutive_count);
- EXPECT_CALL(*(observer_.get()), NormalUsage()).Times(0);
- TriggerUnderuse();
-}
-
-TEST_F(OveruseFrameDetectorTest, OveruseAndRecoverWithMethodDisabled) {
- options_.enable_encode_usage_method = false;
- options_.enable_extended_processing_usage = false;
- ReinitializeOveruseDetector();
- EXPECT_CALL(*(observer_.get()), OveruseDetected()).Times(0);
- TriggerOveruse(options_.high_threshold_consecutive_count);
- EXPECT_CALL(*(observer_.get()), NormalUsage()).Times(0);
- TriggerUnderuse();
-}
-
-TEST_F(OveruseFrameDetectorTest, DoubleOveruseAndRecover) {
- options_.enable_extended_processing_usage = false;
- ReinitializeOveruseDetector();
- EXPECT_CALL(*(observer_.get()), OveruseDetected()).Times(2);
- TriggerOveruse(options_.high_threshold_consecutive_count);
- TriggerOveruse(options_.high_threshold_consecutive_count);
- EXPECT_CALL(*(observer_.get()), NormalUsage()).Times(testing::AtLeast(1));
- TriggerUnderuse();
-}
-
-TEST_F(OveruseFrameDetectorTest, TriggerUnderuseWithMinProcessCount) {
- options_.enable_extended_processing_usage = false;
- options_.min_process_count = 1;
- CpuOveruseObserverImpl overuse_observer;
- overuse_detector_.reset(new OveruseFrameDetector(clock_.get(), options_,
- &overuse_observer, this));
- InsertAndSendFramesWithInterval(
- 1200, kFrameInterval33ms, kWidth, kHeight, kProcessTime5ms);
- overuse_detector_->Process();
- EXPECT_EQ(0, overuse_observer.normaluse_);
- clock_->AdvanceTimeMilliseconds(kProcessIntervalMs);
- overuse_detector_->Process();
- EXPECT_EQ(1, overuse_observer.normaluse_);
-}
-
-TEST_F(OveruseFrameDetectorTest, ConstantOveruseGivesNoNormalUsage) {
- options_.enable_extended_processing_usage = false;
- ReinitializeOveruseDetector();
- EXPECT_CALL(*(observer_.get()), NormalUsage()).Times(0);
- EXPECT_CALL(*(observer_.get()), OveruseDetected()).Times(64);
- for(size_t i = 0; i < 64; ++i) {
- TriggerOveruse(options_.high_threshold_consecutive_count);
- }
-}
-
-TEST_F(OveruseFrameDetectorTest, ConsecutiveCountTriggersOveruse) {
- EXPECT_CALL(*(observer_.get()), OveruseDetected()).Times(1);
- options_.enable_extended_processing_usage = false;
- options_.high_threshold_consecutive_count = 2;
- ReinitializeOveruseDetector();
- TriggerOveruse(2);
-}
-
-TEST_F(OveruseFrameDetectorTest, IncorrectConsecutiveCountTriggersNoOveruse) {
- EXPECT_CALL(*(observer_.get()), OveruseDetected()).Times(0);
- options_.enable_extended_processing_usage = false;
- options_.high_threshold_consecutive_count = 2;
- ReinitializeOveruseDetector();
- TriggerOveruse(1);
-}
-
-TEST_F(OveruseFrameDetectorTest, ProcessingUsage) {
- InsertAndSendFramesWithInterval(
- 1000, kFrameInterval33ms, kWidth, kHeight, kProcessTime5ms);
- EXPECT_EQ(kProcessTime5ms * 100 / kFrameInterval33ms, UsagePercent());
-}
-
-TEST_F(OveruseFrameDetectorTest, ResetAfterResolutionChange) {
- EXPECT_EQ(InitialUsage(), UsagePercent());
- InsertAndSendFramesWithInterval(
- 1000, kFrameInterval33ms, kWidth, kHeight, kProcessTime5ms);
- EXPECT_NE(InitialUsage(), UsagePercent());
- // Verify reset.
- InsertAndSendFramesWithInterval(
- 1, kFrameInterval33ms, kWidth, kHeight + 1, kProcessTime5ms);
- EXPECT_EQ(InitialUsage(), UsagePercent());
-}
-
-TEST_F(OveruseFrameDetectorTest, ResetAfterFrameTimeout) {
- EXPECT_EQ(InitialUsage(), UsagePercent());
- InsertAndSendFramesWithInterval(
- 1000, kFrameInterval33ms, kWidth, kHeight, kProcessTime5ms);
- EXPECT_NE(InitialUsage(), UsagePercent());
- InsertAndSendFramesWithInterval(
- 2, options_.frame_timeout_interval_ms, kWidth, kHeight, kProcessTime5ms);
- EXPECT_NE(InitialUsage(), UsagePercent());
- // Verify reset.
- InsertAndSendFramesWithInterval(
- 2, options_.frame_timeout_interval_ms + 1, kWidth, kHeight,
- kProcessTime5ms);
- EXPECT_EQ(InitialUsage(), UsagePercent());
-}
-
-TEST_F(OveruseFrameDetectorTest, MinFrameSamplesBeforeUpdating) {
- options_.min_frame_samples = 40;
- ReinitializeOveruseDetector();
- InsertAndSendFramesWithInterval(
- 40, kFrameInterval33ms, kWidth, kHeight, kProcessTime5ms);
- EXPECT_EQ(InitialUsage(), UsagePercent());
- InsertAndSendFramesWithInterval(
- 1, kFrameInterval33ms, kWidth, kHeight, kProcessTime5ms);
- EXPECT_NE(InitialUsage(), UsagePercent());
-}
-
-TEST_F(OveruseFrameDetectorTest, InitialProcessingUsage) {
- EXPECT_EQ(InitialUsage(), UsagePercent());
-}
-
-TEST_F(OveruseFrameDetectorTest, FrameDelay_OneFrameDisabled) {
- options_.enable_extended_processing_usage = false;
- ReinitializeOveruseDetector();
- const int kProcessingTimeMs = 100;
- overuse_detector_->FrameCaptured(kWidth, kHeight, 33);
- clock_->AdvanceTimeMilliseconds(kProcessingTimeMs);
- overuse_detector_->FrameSent(33);
- EXPECT_EQ(-1, overuse_detector_->LastProcessingTimeMs());
-}
-
-TEST_F(OveruseFrameDetectorTest, FrameDelay_OneFrame) {
- options_.enable_extended_processing_usage = true;
- ReinitializeOveruseDetector();
- const int kProcessingTimeMs = 100;
- overuse_detector_->FrameCaptured(kWidth, kHeight, 33);
- clock_->AdvanceTimeMilliseconds(kProcessingTimeMs);
- EXPECT_EQ(-1, overuse_detector_->LastProcessingTimeMs());
- overuse_detector_->FrameSent(33);
- EXPECT_EQ(kProcessingTimeMs, overuse_detector_->LastProcessingTimeMs());
- EXPECT_EQ(0, overuse_detector_->FramesInQueue());
-}
-
-TEST_F(OveruseFrameDetectorTest, FrameDelay_TwoFrames) {
- options_.enable_extended_processing_usage = true;
- ReinitializeOveruseDetector();
- const int kProcessingTimeMs1 = 100;
- const int kProcessingTimeMs2 = 50;
- const int kTimeBetweenFramesMs = 200;
- overuse_detector_->FrameCaptured(kWidth, kHeight, 33);
- clock_->AdvanceTimeMilliseconds(kProcessingTimeMs1);
- overuse_detector_->FrameSent(33);
- EXPECT_EQ(kProcessingTimeMs1, overuse_detector_->LastProcessingTimeMs());
- clock_->AdvanceTimeMilliseconds(kTimeBetweenFramesMs);
- overuse_detector_->FrameCaptured(kWidth, kHeight, 66);
- clock_->AdvanceTimeMilliseconds(kProcessingTimeMs2);
- overuse_detector_->FrameSent(66);
- EXPECT_EQ(kProcessingTimeMs2, overuse_detector_->LastProcessingTimeMs());
-}
-
-TEST_F(OveruseFrameDetectorTest, FrameDelay_MaxQueueSize) {
- options_.enable_extended_processing_usage = true;
- ReinitializeOveruseDetector();
- const int kMaxQueueSize = 91;
- for (int i = 0; i < kMaxQueueSize * 2; ++i) {
- overuse_detector_->FrameCaptured(kWidth, kHeight, i);
- }
- EXPECT_EQ(kMaxQueueSize, overuse_detector_->FramesInQueue());
-}
-
-TEST_F(OveruseFrameDetectorTest, FrameDelay_NonProcessedFramesRemoved) {
- options_.enable_extended_processing_usage = true;
- ReinitializeOveruseDetector();
- const int kProcessingTimeMs = 100;
- overuse_detector_->FrameCaptured(kWidth, kHeight, 33);
- clock_->AdvanceTimeMilliseconds(kProcessingTimeMs);
- overuse_detector_->FrameCaptured(kWidth, kHeight, 35);
- clock_->AdvanceTimeMilliseconds(kProcessingTimeMs);
- overuse_detector_->FrameCaptured(kWidth, kHeight, 66);
- clock_->AdvanceTimeMilliseconds(kProcessingTimeMs);
- overuse_detector_->FrameCaptured(kWidth, kHeight, 99);
- clock_->AdvanceTimeMilliseconds(kProcessingTimeMs);
- EXPECT_EQ(-1, overuse_detector_->LastProcessingTimeMs());
- EXPECT_EQ(4, overuse_detector_->FramesInQueue());
- overuse_detector_->FrameSent(66);
- // Frame 33, 35 removed, 66 processed, 99 not processed.
- EXPECT_EQ(2 * kProcessingTimeMs, overuse_detector_->LastProcessingTimeMs());
- EXPECT_EQ(1, overuse_detector_->FramesInQueue());
- overuse_detector_->FrameSent(99);
- EXPECT_EQ(kProcessingTimeMs, overuse_detector_->LastProcessingTimeMs());
- EXPECT_EQ(0, overuse_detector_->FramesInQueue());
-}
-
-TEST_F(OveruseFrameDetectorTest, FrameDelay_ResetClearsFrames) {
- options_.enable_extended_processing_usage = true;
- ReinitializeOveruseDetector();
- const int kProcessingTimeMs = 100;
- overuse_detector_->FrameCaptured(kWidth, kHeight, 33);
- EXPECT_EQ(1, overuse_detector_->FramesInQueue());
- clock_->AdvanceTimeMilliseconds(kProcessingTimeMs);
- // Verify reset (resolution changed).
- overuse_detector_->FrameCaptured(kWidth, kHeight + 1, 66);
- EXPECT_EQ(1, overuse_detector_->FramesInQueue());
- clock_->AdvanceTimeMilliseconds(kProcessingTimeMs);
- overuse_detector_->FrameSent(66);
- EXPECT_EQ(kProcessingTimeMs, overuse_detector_->LastProcessingTimeMs());
- EXPECT_EQ(0, overuse_detector_->FramesInQueue());
-}
-
-TEST_F(OveruseFrameDetectorTest, FrameDelay_NonMatchingSendFrameIgnored) {
- options_.enable_extended_processing_usage = true;
- ReinitializeOveruseDetector();
- const int kProcessingTimeMs = 100;
- overuse_detector_->FrameCaptured(kWidth, kHeight, 33);
- clock_->AdvanceTimeMilliseconds(kProcessingTimeMs);
- overuse_detector_->FrameSent(34);
- EXPECT_EQ(-1, overuse_detector_->LastProcessingTimeMs());
- overuse_detector_->FrameSent(33);
- EXPECT_EQ(kProcessingTimeMs, overuse_detector_->LastProcessingTimeMs());
-}
-
-TEST_F(OveruseFrameDetectorTest, EncodedFrame) {
- const int kInitialAvgEncodeTimeInMs = 5;
- EXPECT_EQ(kInitialAvgEncodeTimeInMs, AvgEncodeTimeMs());
- for (int i = 0; i < 30; i++) {
- clock_->AdvanceTimeMilliseconds(33);
- overuse_detector_->FrameEncoded(2);
- }
- EXPECT_EQ(2, AvgEncodeTimeMs());
-}
-
-// enable_encode_usage_method = true;
-// enable_extended_processing_usage = true;
-// UsagePercent() > high_encode_usage_threshold_percent => overuse.
-// UsagePercent() < low_encode_usage_threshold_percent => underuse.
-TEST_F(OveruseFrameDetectorTest, TriggerOveruseWithExtendedProcessingUsage) {
- options_.enable_extended_processing_usage = true;
- ReinitializeOveruseDetector();
- // usage > high => overuse
- EXPECT_CALL(*(observer_.get()), OveruseDetected()).Times(1);
- TriggerOveruse(options_.high_threshold_consecutive_count);
-}
-
-TEST_F(OveruseFrameDetectorTest, OveruseAndRecoverWithExtendedProcessingUsage) {
- options_.enable_extended_processing_usage = true;
- ReinitializeOveruseDetector();
- // usage > high => overuse
- EXPECT_CALL(*(observer_.get()), OveruseDetected()).Times(1);
- TriggerOveruse(options_.high_threshold_consecutive_count);
- // usage < low => underuse
- EXPECT_CALL(*(observer_.get()), NormalUsage()).Times(testing::AtLeast(1));
- TriggerUnderuse();
-}
-
-TEST_F(OveruseFrameDetectorTest,
- OveruseAndRecoverWithExtendedProcessingUsageMethodDisabled) {
- options_.enable_encode_usage_method = false;
- options_.enable_extended_processing_usage = true;
- ReinitializeOveruseDetector();
- // usage > high => overuse
- EXPECT_CALL(*(observer_.get()), OveruseDetected()).Times(0);
- TriggerOveruse(options_.high_threshold_consecutive_count);
- // usage < low => underuse
- EXPECT_CALL(*(observer_.get()), NormalUsage()).Times(0);
- TriggerUnderuse();
-}
-
-} // namespace webrtc
diff --git a/webrtc/video_engine/payload_router.cc b/webrtc/video_engine/payload_router.cc
deleted file mode 100644
index 3af3d4829e..0000000000
--- a/webrtc/video_engine/payload_router.cc
+++ /dev/null
@@ -1,101 +0,0 @@
-/*
- * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include "webrtc/video_engine/payload_router.h"
-
-#include "webrtc/base/checks.h"
-#include "webrtc/modules/rtp_rtcp/interface/rtp_rtcp.h"
-#include "webrtc/modules/rtp_rtcp/interface/rtp_rtcp_defines.h"
-#include "webrtc/system_wrappers/include/critical_section_wrapper.h"
-
-namespace webrtc {
-
-PayloadRouter::PayloadRouter()
- : crit_(CriticalSectionWrapper::CreateCriticalSection()),
- active_(false) {}
-
-PayloadRouter::~PayloadRouter() {}
-
-size_t PayloadRouter::DefaultMaxPayloadLength() {
- const size_t kIpUdpSrtpLength = 44;
- return IP_PACKET_SIZE - kIpUdpSrtpLength;
-}
-
-void PayloadRouter::SetSendingRtpModules(
- const std::list<RtpRtcp*>& rtp_modules) {
- CriticalSectionScoped cs(crit_.get());
- rtp_modules_.clear();
- rtp_modules_.reserve(rtp_modules.size());
- for (auto* rtp_module : rtp_modules) {
- rtp_modules_.push_back(rtp_module);
- }
-}
-
-void PayloadRouter::set_active(bool active) {
- CriticalSectionScoped cs(crit_.get());
- active_ = active;
-}
-
-bool PayloadRouter::active() {
- CriticalSectionScoped cs(crit_.get());
- return active_ && !rtp_modules_.empty();
-}
-
-bool PayloadRouter::RoutePayload(FrameType frame_type,
- int8_t payload_type,
- uint32_t time_stamp,
- int64_t capture_time_ms,
- const uint8_t* payload_data,
- size_t payload_length,
- const RTPFragmentationHeader* fragmentation,
- const RTPVideoHeader* rtp_video_hdr) {
- CriticalSectionScoped cs(crit_.get());
- if (!active_ || rtp_modules_.empty())
- return false;
-
- // The simulcast index might actually be larger than the number of modules in
- // case the encoder was processing a frame during a codec reconfig.
- if (rtp_video_hdr != NULL &&
- rtp_video_hdr->simulcastIdx >= rtp_modules_.size())
- return false;
-
- int stream_idx = 0;
- if (rtp_video_hdr != NULL)
- stream_idx = rtp_video_hdr->simulcastIdx;
- return rtp_modules_[stream_idx]->SendOutgoingData(
- frame_type, payload_type, time_stamp, capture_time_ms, payload_data,
- payload_length, fragmentation, rtp_video_hdr) == 0 ? true : false;
-}
-
-void PayloadRouter::SetTargetSendBitrates(
- const std::vector<uint32_t>& stream_bitrates) {
- CriticalSectionScoped cs(crit_.get());
- if (stream_bitrates.size() < rtp_modules_.size()) {
- // There can be a size mis-match during codec reconfiguration.
- return;
- }
- int idx = 0;
- for (auto* rtp_module : rtp_modules_) {
- rtp_module->SetTargetSendBitrate(stream_bitrates[idx++]);
- }
-}
-
-size_t PayloadRouter::MaxPayloadLength() const {
- size_t min_payload_length = DefaultMaxPayloadLength();
- CriticalSectionScoped cs(crit_.get());
- for (auto* rtp_module : rtp_modules_) {
- size_t module_payload_length = rtp_module->MaxDataPayloadLength();
- if (module_payload_length < min_payload_length)
- min_payload_length = module_payload_length;
- }
- return min_payload_length;
-}
-
-} // namespace webrtc
diff --git a/webrtc/video_engine/payload_router.h b/webrtc/video_engine/payload_router.h
deleted file mode 100644
index 17bc279290..0000000000
--- a/webrtc/video_engine/payload_router.h
+++ /dev/null
@@ -1,85 +0,0 @@
-/*
- * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_VIDEO_ENGINE_PAYLOAD_ROUTER_H_
-#define WEBRTC_VIDEO_ENGINE_PAYLOAD_ROUTER_H_
-
-#include <list>
-#include <vector>
-
-#include "webrtc/base/constructormagic.h"
-#include "webrtc/base/scoped_ptr.h"
-#include "webrtc/base/thread_annotations.h"
-#include "webrtc/common_types.h"
-#include "webrtc/system_wrappers/include/atomic32.h"
-
-namespace webrtc {
-
-class CriticalSectionWrapper;
-class RTPFragmentationHeader;
-class RtpRtcp;
-struct RTPVideoHeader;
-
-// PayloadRouter routes outgoing data to the correct sending RTP module, based
-// on the simulcast layer in RTPVideoHeader.
-class PayloadRouter {
- public:
- PayloadRouter();
- ~PayloadRouter();
-
- static size_t DefaultMaxPayloadLength();
-
- // Rtp modules are assumed to be sorted in simulcast index order.
- void SetSendingRtpModules(const std::list<RtpRtcp*>& rtp_modules);
-
- // PayloadRouter will only route packets if being active, all packets will be
- // dropped otherwise.
- void set_active(bool active);
- bool active();
-
- // Input parameters according to the signature of RtpRtcp::SendOutgoingData.
- // Returns true if the packet was routed / sent, false otherwise.
- bool RoutePayload(FrameType frame_type,
- int8_t payload_type,
- uint32_t time_stamp,
- int64_t capture_time_ms,
- const uint8_t* payload_data,
- size_t payload_size,
- const RTPFragmentationHeader* fragmentation,
- const RTPVideoHeader* rtp_video_hdr);
-
- // Configures current target bitrate per module. 'stream_bitrates' is assumed
- // to be in the same order as 'SetSendingRtpModules'.
- void SetTargetSendBitrates(const std::vector<uint32_t>& stream_bitrates);
-
- // Returns the maximum allowed data payload length, given the configured MTU
- // and RTP headers.
- size_t MaxPayloadLength() const;
-
- void AddRef() { ++ref_count_; }
- void Release() { if (--ref_count_ == 0) { delete this; } }
-
- private:
- // TODO(mflodman): When the new video API has launched, remove crit_ and
- // assume rtp_modules_ will never change during a call.
- rtc::scoped_ptr<CriticalSectionWrapper> crit_;
-
- // Active sending RTP modules, in layer order.
- std::vector<RtpRtcp*> rtp_modules_ GUARDED_BY(crit_.get());
- bool active_ GUARDED_BY(crit_.get());
-
- Atomic32 ref_count_;
-
- RTC_DISALLOW_COPY_AND_ASSIGN(PayloadRouter);
-};
-
-} // namespace webrtc
-
-#endif // WEBRTC_VIDEO_ENGINE_PAYLOAD_ROUTER_H_
diff --git a/webrtc/video_engine/payload_router_unittest.cc b/webrtc/video_engine/payload_router_unittest.cc
deleted file mode 100644
index de391576d8..0000000000
--- a/webrtc/video_engine/payload_router_unittest.cc
+++ /dev/null
@@ -1,209 +0,0 @@
-/*
- * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-
-#include <list>
-
-#include "testing/gmock/include/gmock/gmock.h"
-#include "testing/gtest/include/gtest/gtest.h"
-#include "webrtc/base/scoped_ptr.h"
-#include "webrtc/modules/rtp_rtcp/interface/rtp_rtcp.h"
-#include "webrtc/modules/rtp_rtcp/mocks/mock_rtp_rtcp.h"
-#include "webrtc/video_engine/payload_router.h"
-
-using ::testing::_;
-using ::testing::AnyNumber;
-using ::testing::NiceMock;
-using ::testing::Return;
-
-namespace webrtc {
-
-class PayloadRouterTest : public ::testing::Test {
- protected:
- virtual void SetUp() {
- payload_router_.reset(new PayloadRouter());
- }
- rtc::scoped_ptr<PayloadRouter> payload_router_;
-};
-
-TEST_F(PayloadRouterTest, SendOnOneModule) {
- MockRtpRtcp rtp;
- std::list<RtpRtcp*> modules(1, &rtp);
-
- payload_router_->SetSendingRtpModules(modules);
-
- uint8_t payload = 'a';
- FrameType frame_type = kVideoFrameKey;
- int8_t payload_type = 96;
-
- EXPECT_CALL(rtp, SendOutgoingData(frame_type, payload_type, 0, 0, _, 1, NULL,
- NULL))
- .Times(0);
- EXPECT_FALSE(payload_router_->RoutePayload(frame_type, payload_type, 0, 0,
- &payload, 1, NULL, NULL));
-
- payload_router_->set_active(true);
- EXPECT_CALL(rtp, SendOutgoingData(frame_type, payload_type, 0, 0, _, 1, NULL,
- NULL))
- .Times(1);
- EXPECT_TRUE(payload_router_->RoutePayload(frame_type, payload_type, 0, 0,
- &payload, 1, NULL, NULL));
-
- payload_router_->set_active(false);
- EXPECT_CALL(rtp, SendOutgoingData(frame_type, payload_type, 0, 0, _, 1, NULL,
- NULL))
- .Times(0);
- EXPECT_FALSE(payload_router_->RoutePayload(frame_type, payload_type, 0, 0,
- &payload, 1, NULL, NULL));
-
- payload_router_->set_active(true);
- EXPECT_CALL(rtp, SendOutgoingData(frame_type, payload_type, 0, 0, _, 1, NULL,
- NULL))
- .Times(1);
- EXPECT_TRUE(payload_router_->RoutePayload(frame_type, payload_type, 0, 0,
- &payload, 1, NULL, NULL));
-
- modules.clear();
- payload_router_->SetSendingRtpModules(modules);
- EXPECT_CALL(rtp, SendOutgoingData(frame_type, payload_type, 0, 0, _, 1, NULL,
- NULL))
- .Times(0);
- EXPECT_FALSE(payload_router_->RoutePayload(frame_type, payload_type, 0, 0,
- &payload, 1, NULL, NULL));
-}
-
-TEST_F(PayloadRouterTest, SendSimulcast) {
- MockRtpRtcp rtp_1;
- MockRtpRtcp rtp_2;
- std::list<RtpRtcp*> modules;
- modules.push_back(&rtp_1);
- modules.push_back(&rtp_2);
-
- payload_router_->SetSendingRtpModules(modules);
-
- uint8_t payload_1 = 'a';
- FrameType frame_type_1 = kVideoFrameKey;
- int8_t payload_type_1 = 96;
- RTPVideoHeader rtp_hdr_1;
- rtp_hdr_1.simulcastIdx = 0;
-
- payload_router_->set_active(true);
- EXPECT_CALL(rtp_1, SendOutgoingData(frame_type_1, payload_type_1, 0, 0, _, 1,
- NULL, &rtp_hdr_1))
- .Times(1);
- EXPECT_CALL(rtp_2, SendOutgoingData(_, _, _, _, _, _, _, _))
- .Times(0);
- EXPECT_TRUE(payload_router_->RoutePayload(frame_type_1, payload_type_1, 0, 0,
- &payload_1, 1, NULL, &rtp_hdr_1));
-
- uint8_t payload_2 = 'b';
- FrameType frame_type_2 = kVideoFrameDelta;
- int8_t payload_type_2 = 97;
- RTPVideoHeader rtp_hdr_2;
- rtp_hdr_2.simulcastIdx = 1;
- EXPECT_CALL(rtp_2, SendOutgoingData(frame_type_2, payload_type_2, 0, 0, _, 1,
- NULL, &rtp_hdr_2))
- .Times(1);
- EXPECT_CALL(rtp_1, SendOutgoingData(_, _, _, _, _, _, _, _))
- .Times(0);
- EXPECT_TRUE(payload_router_->RoutePayload(frame_type_2, payload_type_2, 0, 0,
- &payload_2, 1, NULL, &rtp_hdr_2));
-
- // Inactive.
- payload_router_->set_active(false);
- EXPECT_CALL(rtp_1, SendOutgoingData(_, _, _, _, _, _, _, _))
- .Times(0);
- EXPECT_CALL(rtp_2, SendOutgoingData(_, _, _, _, _, _, _, _))
- .Times(0);
- EXPECT_FALSE(payload_router_->RoutePayload(frame_type_1, payload_type_1, 0, 0,
- &payload_1, 1, NULL, &rtp_hdr_1));
- EXPECT_FALSE(payload_router_->RoutePayload(frame_type_2, payload_type_2, 0, 0,
- &payload_2, 1, NULL, &rtp_hdr_2));
-
- // Invalid simulcast index.
- payload_router_->set_active(true);
- EXPECT_CALL(rtp_1, SendOutgoingData(_, _, _, _, _, _, _, _))
- .Times(0);
- EXPECT_CALL(rtp_2, SendOutgoingData(_, _, _, _, _, _, _, _))
- .Times(0);
- rtp_hdr_1.simulcastIdx = 2;
- EXPECT_FALSE(payload_router_->RoutePayload(frame_type_1, payload_type_1, 0, 0,
- &payload_1, 1, NULL, &rtp_hdr_1));
-}
-
-TEST_F(PayloadRouterTest, MaxPayloadLength) {
- // Without any limitations from the modules, verify we get the max payload
- // length for IP/UDP/SRTP with a MTU of 150 bytes.
- const size_t kDefaultMaxLength = 1500 - 20 - 8 - 12 - 4;
- EXPECT_EQ(kDefaultMaxLength, payload_router_->DefaultMaxPayloadLength());
- EXPECT_EQ(kDefaultMaxLength, payload_router_->MaxPayloadLength());
-
- MockRtpRtcp rtp_1;
- MockRtpRtcp rtp_2;
- std::list<RtpRtcp*> modules;
- modules.push_back(&rtp_1);
- modules.push_back(&rtp_2);
- payload_router_->SetSendingRtpModules(modules);
-
- // Modules return a higher length than the default value.
- EXPECT_CALL(rtp_1, MaxDataPayloadLength())
- .Times(1)
- .WillOnce(Return(kDefaultMaxLength + 10));
- EXPECT_CALL(rtp_2, MaxDataPayloadLength())
- .Times(1)
- .WillOnce(Return(kDefaultMaxLength + 10));
- EXPECT_EQ(kDefaultMaxLength, payload_router_->MaxPayloadLength());
-
- // The modules return a value lower than default.
- const size_t kTestMinPayloadLength = 1001;
- EXPECT_CALL(rtp_1, MaxDataPayloadLength())
- .Times(1)
- .WillOnce(Return(kTestMinPayloadLength + 10));
- EXPECT_CALL(rtp_2, MaxDataPayloadLength())
- .Times(1)
- .WillOnce(Return(kTestMinPayloadLength));
- EXPECT_EQ(kTestMinPayloadLength, payload_router_->MaxPayloadLength());
-}
-
-TEST_F(PayloadRouterTest, SetTargetSendBitrates) {
- MockRtpRtcp rtp_1;
- MockRtpRtcp rtp_2;
- std::list<RtpRtcp*> modules;
- modules.push_back(&rtp_1);
- modules.push_back(&rtp_2);
- payload_router_->SetSendingRtpModules(modules);
-
- const uint32_t bitrate_1 = 10000;
- const uint32_t bitrate_2 = 76543;
- std::vector<uint32_t> bitrates (2, bitrate_1);
- bitrates[1] = bitrate_2;
- EXPECT_CALL(rtp_1, SetTargetSendBitrate(bitrate_1))
- .Times(1);
- EXPECT_CALL(rtp_2, SetTargetSendBitrate(bitrate_2))
- .Times(1);
- payload_router_->SetTargetSendBitrates(bitrates);
-
- bitrates.resize(1);
- EXPECT_CALL(rtp_1, SetTargetSendBitrate(bitrate_1))
- .Times(0);
- EXPECT_CALL(rtp_2, SetTargetSendBitrate(bitrate_2))
- .Times(0);
- payload_router_->SetTargetSendBitrates(bitrates);
-
- bitrates.resize(3);
- bitrates[1] = bitrate_2;
- bitrates[2] = bitrate_1 + bitrate_2;
- EXPECT_CALL(rtp_1, SetTargetSendBitrate(bitrate_1))
- .Times(1);
- EXPECT_CALL(rtp_2, SetTargetSendBitrate(bitrate_2))
- .Times(1);
- payload_router_->SetTargetSendBitrates(bitrates);
-}
-} // namespace webrtc
diff --git a/webrtc/video_engine/report_block_stats.cc b/webrtc/video_engine/report_block_stats.cc
deleted file mode 100644
index 6df62882d8..0000000000
--- a/webrtc/video_engine/report_block_stats.cc
+++ /dev/null
@@ -1,111 +0,0 @@
-/*
- * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include "webrtc/video_engine/report_block_stats.h"
-
-namespace webrtc {
-
-namespace {
-int FractionLost(uint32_t num_lost_sequence_numbers,
- uint32_t num_sequence_numbers) {
- if (num_sequence_numbers == 0) {
- return 0;
- }
- return ((num_lost_sequence_numbers * 255) + (num_sequence_numbers / 2)) /
- num_sequence_numbers;
-}
-} // namespace
-
-
-// Helper class for rtcp statistics.
-ReportBlockStats::ReportBlockStats()
- : num_sequence_numbers_(0),
- num_lost_sequence_numbers_(0) {
-}
-
-void ReportBlockStats::Store(const RtcpStatistics& rtcp_stats,
- uint32_t remote_ssrc,
- uint32_t source_ssrc) {
- RTCPReportBlock block;
- block.cumulativeLost = rtcp_stats.cumulative_lost;
- block.fractionLost = rtcp_stats.fraction_lost;
- block.extendedHighSeqNum = rtcp_stats.extended_max_sequence_number;
- block.jitter = rtcp_stats.jitter;
- block.remoteSSRC = remote_ssrc;
- block.sourceSSRC = source_ssrc;
- uint32_t num_sequence_numbers = 0;
- uint32_t num_lost_sequence_numbers = 0;
- StoreAndAddPacketIncrement(
- block, &num_sequence_numbers, &num_lost_sequence_numbers);
-}
-
-RTCPReportBlock ReportBlockStats::AggregateAndStore(
- const ReportBlockVector& report_blocks) {
- RTCPReportBlock aggregate;
- if (report_blocks.empty()) {
- return aggregate;
- }
- uint32_t num_sequence_numbers = 0;
- uint32_t num_lost_sequence_numbers = 0;
- ReportBlockVector::const_iterator report_block = report_blocks.begin();
- for (; report_block != report_blocks.end(); ++report_block) {
- aggregate.cumulativeLost += report_block->cumulativeLost;
- aggregate.jitter += report_block->jitter;
- StoreAndAddPacketIncrement(*report_block,
- &num_sequence_numbers,
- &num_lost_sequence_numbers);
- }
-
- if (report_blocks.size() == 1) {
- // No aggregation needed.
- return report_blocks[0];
- }
- // Fraction lost since previous report block.
- aggregate.fractionLost =
- FractionLost(num_lost_sequence_numbers, num_sequence_numbers);
- aggregate.jitter = static_cast<uint32_t>(
- (aggregate.jitter + report_blocks.size() / 2) / report_blocks.size());
- return aggregate;
-}
-
-void ReportBlockStats::StoreAndAddPacketIncrement(
- const RTCPReportBlock& report_block,
- uint32_t* num_sequence_numbers,
- uint32_t* num_lost_sequence_numbers) {
- // Get diff with previous report block.
- ReportBlockMap::iterator prev_report_block = prev_report_blocks_.find(
- report_block.sourceSSRC);
- if (prev_report_block != prev_report_blocks_.end()) {
- int seq_num_diff = report_block.extendedHighSeqNum -
- prev_report_block->second.extendedHighSeqNum;
- int cum_loss_diff = report_block.cumulativeLost -
- prev_report_block->second.cumulativeLost;
- if (seq_num_diff >= 0 && cum_loss_diff >= 0) {
- *num_sequence_numbers += seq_num_diff;
- *num_lost_sequence_numbers += cum_loss_diff;
- // Update total number of packets/lost packets.
- num_sequence_numbers_ += seq_num_diff;
- num_lost_sequence_numbers_ += cum_loss_diff;
- }
- }
- // Store current report block.
- prev_report_blocks_[report_block.sourceSSRC] = report_block;
-}
-
-int ReportBlockStats::FractionLostInPercent() const {
- if (num_sequence_numbers_ == 0) {
- return -1;
- }
- return FractionLost(
- num_lost_sequence_numbers_, num_sequence_numbers_) * 100 / 255;
-}
-
-} // namespace webrtc
-
diff --git a/webrtc/video_engine/report_block_stats.h b/webrtc/video_engine/report_block_stats.h
deleted file mode 100644
index dadcc9d410..0000000000
--- a/webrtc/video_engine/report_block_stats.h
+++ /dev/null
@@ -1,62 +0,0 @@
-/*
- * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_VIDEO_ENGINE_REPORT_BLOCK_STATS_H_
-#define WEBRTC_VIDEO_ENGINE_REPORT_BLOCK_STATS_H_
-
-#include <map>
-#include <vector>
-
-#include "webrtc/common_types.h"
-#include "webrtc/modules/rtp_rtcp/interface/rtp_rtcp_defines.h"
-
-namespace webrtc {
-
-// Helper class for rtcp statistics.
-class ReportBlockStats {
- public:
- typedef std::map<uint32_t, RTCPReportBlock> ReportBlockMap;
- typedef std::vector<RTCPReportBlock> ReportBlockVector;
- ReportBlockStats();
- ~ReportBlockStats() {}
-
- // Updates stats and stores report blocks.
- // Returns an aggregate of the |report_blocks|.
- RTCPReportBlock AggregateAndStore(const ReportBlockVector& report_blocks);
-
- // Updates stats and stores report block.
- void Store(const RtcpStatistics& rtcp_stats,
- uint32_t remote_ssrc,
- uint32_t source_ssrc);
-
- // Returns the total fraction of lost packets (or -1 if less than two report
- // blocks have been stored).
- int FractionLostInPercent() const;
-
- private:
- // Updates the total number of packets/lost packets.
- // Stores the report block.
- // Returns the number of packets/lost packets since previous report block.
- void StoreAndAddPacketIncrement(const RTCPReportBlock& report_block,
- uint32_t* num_sequence_numbers,
- uint32_t* num_lost_sequence_numbers);
-
- // The total number of packets/lost packets.
- uint32_t num_sequence_numbers_;
- uint32_t num_lost_sequence_numbers_;
-
- // Map holding the last stored report block (mapped by the source SSRC).
- ReportBlockMap prev_report_blocks_;
-};
-
-} // namespace webrtc
-
-#endif // WEBRTC_VIDEO_ENGINE_REPORT_BLOCK_STATS_H_
-
diff --git a/webrtc/video_engine/report_block_stats_unittest.cc b/webrtc/video_engine/report_block_stats_unittest.cc
deleted file mode 100644
index 13b7af5ba2..0000000000
--- a/webrtc/video_engine/report_block_stats_unittest.cc
+++ /dev/null
@@ -1,146 +0,0 @@
-/*
- * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include "testing/gtest/include/gtest/gtest.h"
-
-#include "webrtc/video_engine/report_block_stats.h"
-
-namespace webrtc {
-
-class ReportBlockStatsTest : public ::testing::Test {
- protected:
- ReportBlockStatsTest() : kSsrc1(0x12345), kSsrc2(0x23456) {}
-
- void SetUp() override {
- // kSsrc1: block 1-3.
- block1_1_.cumulativeLost = 10;
- block1_1_.fractionLost = 123;
- block1_1_.extendedHighSeqNum = 24000;
- block1_1_.jitter = 777;
- block1_1_.sourceSSRC = kSsrc1;
- block1_2_.cumulativeLost = 15;
- block1_2_.fractionLost = 0;
- block1_2_.extendedHighSeqNum = 24100;
- block1_2_.jitter = 222;
- block1_2_.sourceSSRC = kSsrc1;
- block1_3_.cumulativeLost = 50;
- block1_3_.fractionLost = 0;
- block1_3_.extendedHighSeqNum = 24200;
- block1_3_.jitter = 333;
- block1_3_.sourceSSRC = kSsrc1;
- // kSsrc2: block 1,2.
- block2_1_.cumulativeLost = 111;
- block2_1_.fractionLost = 222;
- block2_1_.extendedHighSeqNum = 8500;
- block2_1_.jitter = 555;
- block2_1_.sourceSSRC = kSsrc2;
- block2_2_.cumulativeLost = 136;
- block2_2_.fractionLost = 0;
- block2_2_.extendedHighSeqNum = 8800;
- block2_2_.jitter = 888;
- block2_2_.sourceSSRC = kSsrc2;
-
- ssrc1block1_.push_back(block1_1_);
- ssrc1block2_.push_back(block1_2_);
- ssrc12block1_.push_back(block1_1_);
- ssrc12block1_.push_back(block2_1_);
- ssrc12block2_.push_back(block1_2_);
- ssrc12block2_.push_back(block2_2_);
- }
-
- RtcpStatistics RtcpReportBlockToRtcpStatistics(
- const RTCPReportBlock& stats) {
- RtcpStatistics block;
- block.cumulative_lost = stats.cumulativeLost;
- block.fraction_lost = stats.fractionLost;
- block.extended_max_sequence_number = stats.extendedHighSeqNum;
- block.jitter = stats.jitter;
- return block;
- }
-
- const uint32_t kSsrc1;
- const uint32_t kSsrc2;
- RTCPReportBlock block1_1_;
- RTCPReportBlock block1_2_;
- RTCPReportBlock block1_3_;
- RTCPReportBlock block2_1_;
- RTCPReportBlock block2_2_;
- std::vector<RTCPReportBlock> ssrc1block1_;
- std::vector<RTCPReportBlock> ssrc1block2_;
- std::vector<RTCPReportBlock> ssrc12block1_;
- std::vector<RTCPReportBlock> ssrc12block2_;
-};
-
-TEST_F(ReportBlockStatsTest, AggregateAndStore_NoSsrc) {
- ReportBlockStats stats;
- std::vector<RTCPReportBlock> empty;
- RTCPReportBlock aggregated = stats.AggregateAndStore(empty);
- EXPECT_EQ(0U, aggregated.fractionLost);
- EXPECT_EQ(0U, aggregated.cumulativeLost);
- EXPECT_EQ(0U, aggregated.jitter);
- EXPECT_EQ(0U, aggregated.extendedHighSeqNum);
-}
-
-TEST_F(ReportBlockStatsTest, AggregateAndStore_OneSsrc) {
- ReportBlockStats stats;
- RTCPReportBlock aggregated = stats.AggregateAndStore(ssrc1block1_);
- // One ssrc, no aggregation done.
- EXPECT_EQ(123U, aggregated.fractionLost);
- EXPECT_EQ(10U, aggregated.cumulativeLost);
- EXPECT_EQ(777U, aggregated.jitter);
- EXPECT_EQ(24000U, aggregated.extendedHighSeqNum);
-
- aggregated = stats.AggregateAndStore(ssrc1block2_);
- EXPECT_EQ(0U, aggregated.fractionLost);
- EXPECT_EQ(15U, aggregated.cumulativeLost);
- EXPECT_EQ(222U, aggregated.jitter);
- EXPECT_EQ(24100U, aggregated.extendedHighSeqNum);
-
- // fl: 100 * (15-10) / (24100-24000) = 5%
- EXPECT_EQ(5, stats.FractionLostInPercent());
-}
-
-TEST_F(ReportBlockStatsTest, AggregateAndStore_TwoSsrcs) {
- ReportBlockStats stats;
- RTCPReportBlock aggregated = stats.AggregateAndStore(ssrc12block1_);
- EXPECT_EQ(0U, aggregated.fractionLost);
- EXPECT_EQ(10U + 111U, aggregated.cumulativeLost);
- EXPECT_EQ((777U + 555U) / 2, aggregated.jitter);
- EXPECT_EQ(0U, aggregated.extendedHighSeqNum);
-
- aggregated = stats.AggregateAndStore(ssrc12block2_);
- // fl: 255 * ((15-10) + (136-111)) / ((24100-24000) + (8800-8500)) = 19
- EXPECT_EQ(19U, aggregated.fractionLost);
- EXPECT_EQ(15U + 136U, aggregated.cumulativeLost);
- EXPECT_EQ((222U + 888U) / 2, aggregated.jitter);
- EXPECT_EQ(0U, aggregated.extendedHighSeqNum);
-
- // fl: 100 * ((15-10) + (136-111)) / ((24100-24000) + (8800-8500)) = 7%
- EXPECT_EQ(7, stats.FractionLostInPercent());
-}
-
-TEST_F(ReportBlockStatsTest, StoreAndGetFractionLost) {
- const uint32_t kRemoteSsrc = 1;
- ReportBlockStats stats;
- EXPECT_EQ(-1, stats.FractionLostInPercent());
-
- // First block.
- stats.Store(RtcpReportBlockToRtcpStatistics(block1_1_), kRemoteSsrc, kSsrc1);
- EXPECT_EQ(-1, stats.FractionLostInPercent());
- // fl: 100 * (15-10) / (24100-24000) = 5%
- stats.Store(RtcpReportBlockToRtcpStatistics(block1_2_), kRemoteSsrc, kSsrc1);
- EXPECT_EQ(5, stats.FractionLostInPercent());
- // fl: 100 * (50-10) / (24200-24000) = 20%
- stats.Store(RtcpReportBlockToRtcpStatistics(block1_3_), kRemoteSsrc, kSsrc1);
- EXPECT_EQ(20, stats.FractionLostInPercent());
-}
-
-} // namespace webrtc
-
diff --git a/webrtc/video_engine/stream_synchronization.cc b/webrtc/video_engine/stream_synchronization.cc
deleted file mode 100644
index b78cfe8874..0000000000
--- a/webrtc/video_engine/stream_synchronization.cc
+++ /dev/null
@@ -1,226 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include "webrtc/video_engine/stream_synchronization.h"
-
-#include <assert.h>
-#include <math.h>
-#include <stdlib.h>
-
-#include <algorithm>
-
-#include "webrtc/base/logging.h"
-
-namespace webrtc {
-
-static const int kMaxChangeMs = 80;
-static const int kMaxDeltaDelayMs = 10000;
-static const int kFilterLength = 4;
-// Minimum difference between audio and video to warrant a change.
-static const int kMinDeltaMs = 30;
-
-struct ViESyncDelay {
- ViESyncDelay() {
- extra_video_delay_ms = 0;
- last_video_delay_ms = 0;
- extra_audio_delay_ms = 0;
- last_audio_delay_ms = 0;
- network_delay = 120;
- }
-
- int extra_video_delay_ms;
- int last_video_delay_ms;
- int extra_audio_delay_ms;
- int last_audio_delay_ms;
- int network_delay;
-};
-
-StreamSynchronization::StreamSynchronization(uint32_t video_primary_ssrc,
- int audio_channel_id)
- : channel_delay_(new ViESyncDelay),
- video_primary_ssrc_(video_primary_ssrc),
- audio_channel_id_(audio_channel_id),
- base_target_delay_ms_(0),
- avg_diff_ms_(0) {
-}
-
-StreamSynchronization::~StreamSynchronization() {
- delete channel_delay_;
-}
-
-bool StreamSynchronization::ComputeRelativeDelay(
- const Measurements& audio_measurement,
- const Measurements& video_measurement,
- int* relative_delay_ms) {
- assert(relative_delay_ms);
- if (audio_measurement.rtcp.size() < 2 || video_measurement.rtcp.size() < 2) {
- // We need two RTCP SR reports per stream to do synchronization.
- return false;
- }
- int64_t audio_last_capture_time_ms;
- if (!RtpToNtpMs(audio_measurement.latest_timestamp,
- audio_measurement.rtcp,
- &audio_last_capture_time_ms)) {
- return false;
- }
- int64_t video_last_capture_time_ms;
- if (!RtpToNtpMs(video_measurement.latest_timestamp,
- video_measurement.rtcp,
- &video_last_capture_time_ms)) {
- return false;
- }
- if (video_last_capture_time_ms < 0) {
- return false;
- }
- // Positive diff means that video_measurement is behind audio_measurement.
- *relative_delay_ms = video_measurement.latest_receive_time_ms -
- audio_measurement.latest_receive_time_ms -
- (video_last_capture_time_ms - audio_last_capture_time_ms);
- if (*relative_delay_ms > kMaxDeltaDelayMs ||
- *relative_delay_ms < -kMaxDeltaDelayMs) {
- return false;
- }
- return true;
-}
-
-bool StreamSynchronization::ComputeDelays(int relative_delay_ms,
- int current_audio_delay_ms,
- int* total_audio_delay_target_ms,
- int* total_video_delay_target_ms) {
- assert(total_audio_delay_target_ms && total_video_delay_target_ms);
-
- int current_video_delay_ms = *total_video_delay_target_ms;
- LOG(LS_VERBOSE) << "Audio delay: " << current_audio_delay_ms
- << ", network delay diff: " << channel_delay_->network_delay
- << " current diff: " << relative_delay_ms
- << " for channel " << audio_channel_id_;
- // Calculate the difference between the lowest possible video delay and
- // the current audio delay.
- int current_diff_ms = current_video_delay_ms - current_audio_delay_ms +
- relative_delay_ms;
-
- avg_diff_ms_ = ((kFilterLength - 1) * avg_diff_ms_ +
- current_diff_ms) / kFilterLength;
- if (abs(avg_diff_ms_) < kMinDeltaMs) {
- // Don't adjust if the diff is within our margin.
- return false;
- }
-
- // Make sure we don't move too fast.
- int diff_ms = avg_diff_ms_ / 2;
- diff_ms = std::min(diff_ms, kMaxChangeMs);
- diff_ms = std::max(diff_ms, -kMaxChangeMs);
-
- // Reset the average after a move to prevent overshooting reaction.
- avg_diff_ms_ = 0;
-
- if (diff_ms > 0) {
- // The minimum video delay is longer than the current audio delay.
- // We need to decrease extra video delay, or add extra audio delay.
- if (channel_delay_->extra_video_delay_ms > base_target_delay_ms_) {
- // We have extra delay added to ViE. Reduce this delay before adding
- // extra delay to VoE.
- channel_delay_->extra_video_delay_ms -= diff_ms;
- channel_delay_->extra_audio_delay_ms = base_target_delay_ms_;
- } else { // channel_delay_->extra_video_delay_ms > 0
- // We have no extra video delay to remove, increase the audio delay.
- channel_delay_->extra_audio_delay_ms += diff_ms;
- channel_delay_->extra_video_delay_ms = base_target_delay_ms_;
- }
- } else { // if (diff_ms > 0)
- // The video delay is lower than the current audio delay.
- // We need to decrease extra audio delay, or add extra video delay.
- if (channel_delay_->extra_audio_delay_ms > base_target_delay_ms_) {
- // We have extra delay in VoiceEngine.
- // Start with decreasing the voice delay.
- // Note: diff_ms is negative; add the negative difference.
- channel_delay_->extra_audio_delay_ms += diff_ms;
- channel_delay_->extra_video_delay_ms = base_target_delay_ms_;
- } else { // channel_delay_->extra_audio_delay_ms > base_target_delay_ms_
- // We have no extra delay in VoiceEngine, increase the video delay.
- // Note: diff_ms is negative; subtract the negative difference.
- channel_delay_->extra_video_delay_ms -= diff_ms; // X - (-Y) = X + Y.
- channel_delay_->extra_audio_delay_ms = base_target_delay_ms_;
- }
- }
-
- // Make sure that video is never below our target.
- channel_delay_->extra_video_delay_ms = std::max(
- channel_delay_->extra_video_delay_ms, base_target_delay_ms_);
-
- int new_video_delay_ms;
- if (channel_delay_->extra_video_delay_ms > base_target_delay_ms_) {
- new_video_delay_ms = channel_delay_->extra_video_delay_ms;
- } else {
- // No change to the extra video delay. We are changing audio and we only
- // allow to change one at the time.
- new_video_delay_ms = channel_delay_->last_video_delay_ms;
- }
-
- // Make sure that we don't go below the extra video delay.
- new_video_delay_ms = std::max(
- new_video_delay_ms, channel_delay_->extra_video_delay_ms);
-
- // Verify we don't go above the maximum allowed video delay.
- new_video_delay_ms =
- std::min(new_video_delay_ms, base_target_delay_ms_ + kMaxDeltaDelayMs);
-
- int new_audio_delay_ms;
- if (channel_delay_->extra_audio_delay_ms > base_target_delay_ms_) {
- new_audio_delay_ms = channel_delay_->extra_audio_delay_ms;
- } else {
- // No change to the audio delay. We are changing video and we only
- // allow to change one at the time.
- new_audio_delay_ms = channel_delay_->last_audio_delay_ms;
- }
-
- // Make sure that we don't go below the extra audio delay.
- new_audio_delay_ms = std::max(
- new_audio_delay_ms, channel_delay_->extra_audio_delay_ms);
-
- // Verify we don't go above the maximum allowed audio delay.
- new_audio_delay_ms =
- std::min(new_audio_delay_ms, base_target_delay_ms_ + kMaxDeltaDelayMs);
-
- // Remember our last audio and video delays.
- channel_delay_->last_video_delay_ms = new_video_delay_ms;
- channel_delay_->last_audio_delay_ms = new_audio_delay_ms;
-
- LOG(LS_VERBOSE) << "Sync video delay " << new_video_delay_ms
- << " for video primary SSRC " << video_primary_ssrc_
- << " and audio delay " << channel_delay_->extra_audio_delay_ms
- << " for audio channel " << audio_channel_id_;
-
- // Return values.
- *total_video_delay_target_ms = new_video_delay_ms;
- *total_audio_delay_target_ms = new_audio_delay_ms;
- return true;
-}
-
-void StreamSynchronization::SetTargetBufferingDelay(int target_delay_ms) {
- // Initial extra delay for audio (accounting for existing extra delay).
- channel_delay_->extra_audio_delay_ms +=
- target_delay_ms - base_target_delay_ms_;
- channel_delay_->last_audio_delay_ms +=
- target_delay_ms - base_target_delay_ms_;
-
- // The video delay is compared to the last value (and how much we can update
- // is limited by that as well).
- channel_delay_->last_video_delay_ms +=
- target_delay_ms - base_target_delay_ms_;
-
- channel_delay_->extra_video_delay_ms +=
- target_delay_ms - base_target_delay_ms_;
-
- // Video is already delayed by the desired amount.
- base_target_delay_ms_ = target_delay_ms;
-}
-
-} // namespace webrtc
diff --git a/webrtc/video_engine/stream_synchronization.h b/webrtc/video_engine/stream_synchronization.h
deleted file mode 100644
index 1209062f9b..0000000000
--- a/webrtc/video_engine/stream_synchronization.h
+++ /dev/null
@@ -1,59 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_VIDEO_ENGINE_STREAM_SYNCHRONIZATION_H_
-#define WEBRTC_VIDEO_ENGINE_STREAM_SYNCHRONIZATION_H_
-
-#include <list>
-
-#include "webrtc/system_wrappers/include/rtp_to_ntp.h"
-#include "webrtc/typedefs.h"
-
-namespace webrtc {
-
-struct ViESyncDelay;
-
-class StreamSynchronization {
- public:
- struct Measurements {
- Measurements() : rtcp(), latest_receive_time_ms(0), latest_timestamp(0) {}
- RtcpList rtcp;
- int64_t latest_receive_time_ms;
- uint32_t latest_timestamp;
- };
-
- StreamSynchronization(uint32_t video_primary_ssrc, int audio_channel_id);
- ~StreamSynchronization();
-
- bool ComputeDelays(int relative_delay_ms,
- int current_audio_delay_ms,
- int* extra_audio_delay_ms,
- int* total_video_delay_target_ms);
-
- // On success |relative_delay| contains the number of milliseconds later video
- // is rendered relative audio. If audio is played back later than video a
- // |relative_delay| will be negative.
- static bool ComputeRelativeDelay(const Measurements& audio_measurement,
- const Measurements& video_measurement,
- int* relative_delay_ms);
- // Set target buffering delay - All audio and video will be delayed by at
- // least target_delay_ms.
- void SetTargetBufferingDelay(int target_delay_ms);
-
- private:
- ViESyncDelay* channel_delay_;
- const uint32_t video_primary_ssrc_;
- const int audio_channel_id_;
- int base_target_delay_ms_;
- int avg_diff_ms_;
-};
-} // namespace webrtc
-
-#endif // WEBRTC_VIDEO_ENGINE_STREAM_SYNCHRONIZATION_H_
diff --git a/webrtc/video_engine/stream_synchronization_unittest.cc b/webrtc/video_engine/stream_synchronization_unittest.cc
deleted file mode 100644
index 7136f1e1c7..0000000000
--- a/webrtc/video_engine/stream_synchronization_unittest.cc
+++ /dev/null
@@ -1,562 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include <algorithm>
-#include <math.h>
-
-#include "testing/gtest/include/gtest/gtest.h"
-#include "webrtc/video_engine/stream_synchronization.h"
-
-namespace webrtc {
-
-// These correspond to the same constants defined in vie_sync_module.cc.
-enum { kMaxVideoDiffMs = 80 };
-enum { kMaxAudioDiffMs = 80 };
-enum { kMaxDelay = 1500 };
-
-// Test constants.
-enum { kDefaultAudioFrequency = 8000 };
-enum { kDefaultVideoFrequency = 90000 };
-const double kNtpFracPerMs = 4.294967296E6;
-static const int kSmoothingFilter = 4 * 2;
-
-class Time {
- public:
- explicit Time(int64_t offset)
- : kNtpJan1970(2208988800UL),
- time_now_ms_(offset) {}
-
- RtcpMeasurement GenerateRtcp(int frequency, uint32_t offset) const {
- RtcpMeasurement rtcp;
- NowNtp(&rtcp.ntp_secs, &rtcp.ntp_frac);
- rtcp.rtp_timestamp = NowRtp(frequency, offset);
- return rtcp;
- }
-
- void NowNtp(uint32_t* ntp_secs, uint32_t* ntp_frac) const {
- *ntp_secs = time_now_ms_ / 1000 + kNtpJan1970;
- int64_t remainder_ms = time_now_ms_ % 1000;
- *ntp_frac = static_cast<uint32_t>(
- static_cast<double>(remainder_ms) * kNtpFracPerMs + 0.5);
- }
-
- uint32_t NowRtp(int frequency, uint32_t offset) const {
- return frequency * time_now_ms_ / 1000 + offset;
- }
-
- void IncreaseTimeMs(int64_t inc) {
- time_now_ms_ += inc;
- }
-
- int64_t time_now_ms() const {
- return time_now_ms_;
- }
-
- private:
- // January 1970, in NTP seconds.
- const uint32_t kNtpJan1970;
- int64_t time_now_ms_;
-};
-
-class StreamSynchronizationTest : public ::testing::Test {
- protected:
- virtual void SetUp() {
- sync_ = new StreamSynchronization(0, 0);
- send_time_ = new Time(kSendTimeOffsetMs);
- receive_time_ = new Time(kReceiveTimeOffsetMs);
- audio_clock_drift_ = 1.0;
- video_clock_drift_ = 1.0;
- }
-
- virtual void TearDown() {
- delete sync_;
- delete send_time_;
- delete receive_time_;
- }
-
- // Generates the necessary RTCP measurements and RTP timestamps and computes
- // the audio and video delays needed to get the two streams in sync.
- // |audio_delay_ms| and |video_delay_ms| are the number of milliseconds after
- // capture which the frames are rendered.
- // |current_audio_delay_ms| is the number of milliseconds which audio is
- // currently being delayed by the receiver.
- bool DelayedStreams(int audio_delay_ms,
- int video_delay_ms,
- int current_audio_delay_ms,
- int* extra_audio_delay_ms,
- int* total_video_delay_ms) {
- int audio_frequency = static_cast<int>(kDefaultAudioFrequency *
- audio_clock_drift_ + 0.5);
- int audio_offset = 0;
- int video_frequency = static_cast<int>(kDefaultVideoFrequency *
- video_clock_drift_ + 0.5);
- int video_offset = 0;
- StreamSynchronization::Measurements audio;
- StreamSynchronization::Measurements video;
- // Generate NTP/RTP timestamp pair for both streams corresponding to RTCP.
- audio.rtcp.push_front(send_time_->GenerateRtcp(audio_frequency,
- audio_offset));
- send_time_->IncreaseTimeMs(100);
- receive_time_->IncreaseTimeMs(100);
- video.rtcp.push_front(send_time_->GenerateRtcp(video_frequency,
- video_offset));
- send_time_->IncreaseTimeMs(900);
- receive_time_->IncreaseTimeMs(900);
- audio.rtcp.push_front(send_time_->GenerateRtcp(audio_frequency,
- audio_offset));
- send_time_->IncreaseTimeMs(100);
- receive_time_->IncreaseTimeMs(100);
- video.rtcp.push_front(send_time_->GenerateRtcp(video_frequency,
- video_offset));
- send_time_->IncreaseTimeMs(900);
- receive_time_->IncreaseTimeMs(900);
-
- // Capture an audio and a video frame at the same time.
- audio.latest_timestamp = send_time_->NowRtp(audio_frequency,
- audio_offset);
- video.latest_timestamp = send_time_->NowRtp(video_frequency,
- video_offset);
-
- if (audio_delay_ms > video_delay_ms) {
- // Audio later than video.
- receive_time_->IncreaseTimeMs(video_delay_ms);
- video.latest_receive_time_ms = receive_time_->time_now_ms();
- receive_time_->IncreaseTimeMs(audio_delay_ms - video_delay_ms);
- audio.latest_receive_time_ms = receive_time_->time_now_ms();
- } else {
- // Video later than audio.
- receive_time_->IncreaseTimeMs(audio_delay_ms);
- audio.latest_receive_time_ms = receive_time_->time_now_ms();
- receive_time_->IncreaseTimeMs(video_delay_ms - audio_delay_ms);
- video.latest_receive_time_ms = receive_time_->time_now_ms();
- }
- int relative_delay_ms;
- StreamSynchronization::ComputeRelativeDelay(audio, video,
- &relative_delay_ms);
- EXPECT_EQ(video_delay_ms - audio_delay_ms, relative_delay_ms);
- return sync_->ComputeDelays(relative_delay_ms,
- current_audio_delay_ms,
- extra_audio_delay_ms,
- total_video_delay_ms);
- }
-
- // Simulate audio playback 300 ms after capture and video rendering 100 ms
- // after capture. Verify that the correct extra delays are calculated for
- // audio and video, and that they change correctly when we simulate that
- // NetEQ or the VCM adds more delay to the streams.
- // TODO(holmer): This is currently wrong! We should simply change
- // audio_delay_ms or video_delay_ms since those now include VCM and NetEQ
- // delays.
- void BothDelayedAudioLaterTest(int base_target_delay) {
- int current_audio_delay_ms = base_target_delay;
- int audio_delay_ms = base_target_delay + 300;
- int video_delay_ms = base_target_delay + 100;
- int extra_audio_delay_ms = 0;
- int total_video_delay_ms = base_target_delay;
- int filtered_move = (audio_delay_ms - video_delay_ms) / kSmoothingFilter;
- const int kNeteqDelayIncrease = 50;
- const int kNeteqDelayDecrease = 10;
-
- EXPECT_TRUE(DelayedStreams(audio_delay_ms,
- video_delay_ms,
- current_audio_delay_ms,
- &extra_audio_delay_ms,
- &total_video_delay_ms));
- EXPECT_EQ(base_target_delay + filtered_move, total_video_delay_ms);
- EXPECT_EQ(base_target_delay, extra_audio_delay_ms);
- current_audio_delay_ms = extra_audio_delay_ms;
-
- send_time_->IncreaseTimeMs(1000);
- receive_time_->IncreaseTimeMs(1000 - std::max(audio_delay_ms,
- video_delay_ms));
- // Simulate base_target_delay minimum delay in the VCM.
- total_video_delay_ms = base_target_delay;
- EXPECT_TRUE(DelayedStreams(audio_delay_ms,
- video_delay_ms,
- current_audio_delay_ms,
- &extra_audio_delay_ms,
- &total_video_delay_ms));
- EXPECT_EQ(base_target_delay + 2 * filtered_move, total_video_delay_ms);
- EXPECT_EQ(base_target_delay, extra_audio_delay_ms);
- current_audio_delay_ms = extra_audio_delay_ms;
-
- send_time_->IncreaseTimeMs(1000);
- receive_time_->IncreaseTimeMs(1000 - std::max(audio_delay_ms,
- video_delay_ms));
- // Simulate base_target_delay minimum delay in the VCM.
- total_video_delay_ms = base_target_delay;
- EXPECT_TRUE(DelayedStreams(audio_delay_ms,
- video_delay_ms,
- current_audio_delay_ms,
- &extra_audio_delay_ms,
- &total_video_delay_ms));
- EXPECT_EQ(base_target_delay + 3 * filtered_move, total_video_delay_ms);
- EXPECT_EQ(base_target_delay, extra_audio_delay_ms);
-
- // Simulate that NetEQ introduces some audio delay.
- current_audio_delay_ms = base_target_delay + kNeteqDelayIncrease;
- send_time_->IncreaseTimeMs(1000);
- receive_time_->IncreaseTimeMs(1000 - std::max(audio_delay_ms,
- video_delay_ms));
- // Simulate base_target_delay minimum delay in the VCM.
- total_video_delay_ms = base_target_delay;
- EXPECT_TRUE(DelayedStreams(audio_delay_ms,
- video_delay_ms,
- current_audio_delay_ms,
- &extra_audio_delay_ms,
- &total_video_delay_ms));
- filtered_move = 3 * filtered_move +
- (kNeteqDelayIncrease + audio_delay_ms - video_delay_ms) /
- kSmoothingFilter;
- EXPECT_EQ(base_target_delay + filtered_move, total_video_delay_ms);
- EXPECT_EQ(base_target_delay, extra_audio_delay_ms);
-
- // Simulate that NetEQ reduces its delay.
- current_audio_delay_ms = base_target_delay + kNeteqDelayDecrease;
- send_time_->IncreaseTimeMs(1000);
- receive_time_->IncreaseTimeMs(1000 - std::max(audio_delay_ms,
- video_delay_ms));
- // Simulate base_target_delay minimum delay in the VCM.
- total_video_delay_ms = base_target_delay;
- EXPECT_TRUE(DelayedStreams(audio_delay_ms,
- video_delay_ms,
- current_audio_delay_ms,
- &extra_audio_delay_ms,
- &total_video_delay_ms));
-
- filtered_move = filtered_move +
- (kNeteqDelayDecrease + audio_delay_ms - video_delay_ms) /
- kSmoothingFilter;
-
- EXPECT_EQ(base_target_delay + filtered_move, total_video_delay_ms);
- EXPECT_EQ(base_target_delay, extra_audio_delay_ms);
- }
-
- void BothDelayedVideoLaterTest(int base_target_delay) {
- int current_audio_delay_ms = base_target_delay;
- int audio_delay_ms = base_target_delay + 100;
- int video_delay_ms = base_target_delay + 300;
- int extra_audio_delay_ms = 0;
- int total_video_delay_ms = base_target_delay;
-
- EXPECT_TRUE(DelayedStreams(audio_delay_ms,
- video_delay_ms,
- current_audio_delay_ms,
- &extra_audio_delay_ms,
- &total_video_delay_ms));
- EXPECT_EQ(base_target_delay, total_video_delay_ms);
- // The audio delay is not allowed to change more than this in 1 second.
- EXPECT_GE(base_target_delay + kMaxAudioDiffMs, extra_audio_delay_ms);
- current_audio_delay_ms = extra_audio_delay_ms;
- int current_extra_delay_ms = extra_audio_delay_ms;
-
- send_time_->IncreaseTimeMs(1000);
- receive_time_->IncreaseTimeMs(800);
- EXPECT_TRUE(DelayedStreams(audio_delay_ms,
- video_delay_ms,
- current_audio_delay_ms,
- &extra_audio_delay_ms,
- &total_video_delay_ms));
- EXPECT_EQ(base_target_delay, total_video_delay_ms);
- // The audio delay is not allowed to change more than the half of the
- // required change in delay.
- EXPECT_EQ(current_extra_delay_ms + MaxAudioDelayIncrease(
- current_audio_delay_ms,
- base_target_delay + video_delay_ms - audio_delay_ms),
- extra_audio_delay_ms);
- current_audio_delay_ms = extra_audio_delay_ms;
- current_extra_delay_ms = extra_audio_delay_ms;
-
- send_time_->IncreaseTimeMs(1000);
- receive_time_->IncreaseTimeMs(800);
- EXPECT_TRUE(DelayedStreams(audio_delay_ms,
- video_delay_ms,
- current_audio_delay_ms,
- &extra_audio_delay_ms,
- &total_video_delay_ms));
- EXPECT_EQ(base_target_delay, total_video_delay_ms);
- // The audio delay is not allowed to change more than the half of the
- // required change in delay.
- EXPECT_EQ(current_extra_delay_ms + MaxAudioDelayIncrease(
- current_audio_delay_ms,
- base_target_delay + video_delay_ms - audio_delay_ms),
- extra_audio_delay_ms);
- current_extra_delay_ms = extra_audio_delay_ms;
-
- // Simulate that NetEQ for some reason reduced the delay.
- current_audio_delay_ms = base_target_delay + 10;
- send_time_->IncreaseTimeMs(1000);
- receive_time_->IncreaseTimeMs(800);
- EXPECT_TRUE(DelayedStreams(audio_delay_ms,
- video_delay_ms,
- current_audio_delay_ms,
- &extra_audio_delay_ms,
- &total_video_delay_ms));
- EXPECT_EQ(base_target_delay, total_video_delay_ms);
- // Since we only can ask NetEQ for a certain amount of extra delay, and
- // we only measure the total NetEQ delay, we will ask for additional delay
- // here to try to stay in sync.
- EXPECT_EQ(current_extra_delay_ms + MaxAudioDelayIncrease(
- current_audio_delay_ms,
- base_target_delay + video_delay_ms - audio_delay_ms),
- extra_audio_delay_ms);
- current_extra_delay_ms = extra_audio_delay_ms;
-
- // Simulate that NetEQ for some reason significantly increased the delay.
- current_audio_delay_ms = base_target_delay + 350;
- send_time_->IncreaseTimeMs(1000);
- receive_time_->IncreaseTimeMs(800);
- EXPECT_TRUE(DelayedStreams(audio_delay_ms,
- video_delay_ms,
- current_audio_delay_ms,
- &extra_audio_delay_ms,
- &total_video_delay_ms));
- EXPECT_EQ(base_target_delay, total_video_delay_ms);
- // The audio delay is not allowed to change more than the half of the
- // required change in delay.
- EXPECT_EQ(current_extra_delay_ms + MaxAudioDelayIncrease(
- current_audio_delay_ms,
- base_target_delay + video_delay_ms - audio_delay_ms),
- extra_audio_delay_ms);
- }
-
- int MaxAudioDelayIncrease(int current_audio_delay_ms, int delay_ms) {
- return std::min((delay_ms - current_audio_delay_ms) / kSmoothingFilter,
- static_cast<int>(kMaxAudioDiffMs));
- }
-
- int MaxAudioDelayDecrease(int current_audio_delay_ms, int delay_ms) {
- return std::max((delay_ms - current_audio_delay_ms) / kSmoothingFilter,
- -kMaxAudioDiffMs);
- }
-
- enum { kSendTimeOffsetMs = 98765 };
- enum { kReceiveTimeOffsetMs = 43210 };
-
- StreamSynchronization* sync_;
- Time* send_time_; // The simulated clock at the sender.
- Time* receive_time_; // The simulated clock at the receiver.
- double audio_clock_drift_;
- double video_clock_drift_;
-};
-
-TEST_F(StreamSynchronizationTest, NoDelay) {
- uint32_t current_audio_delay_ms = 0;
- int extra_audio_delay_ms = 0;
- int total_video_delay_ms = 0;
-
- EXPECT_FALSE(DelayedStreams(0, 0, current_audio_delay_ms,
- &extra_audio_delay_ms, &total_video_delay_ms));
- EXPECT_EQ(0, extra_audio_delay_ms);
- EXPECT_EQ(0, total_video_delay_ms);
-}
-
-TEST_F(StreamSynchronizationTest, VideoDelay) {
- uint32_t current_audio_delay_ms = 0;
- int delay_ms = 200;
- int extra_audio_delay_ms = 0;
- int total_video_delay_ms = 0;
-
- EXPECT_TRUE(DelayedStreams(delay_ms, 0, current_audio_delay_ms,
- &extra_audio_delay_ms, &total_video_delay_ms));
- EXPECT_EQ(0, extra_audio_delay_ms);
- // The video delay is not allowed to change more than this in 1 second.
- EXPECT_EQ(delay_ms / kSmoothingFilter, total_video_delay_ms);
-
- send_time_->IncreaseTimeMs(1000);
- receive_time_->IncreaseTimeMs(800);
- // Simulate 0 minimum delay in the VCM.
- total_video_delay_ms = 0;
- EXPECT_TRUE(DelayedStreams(delay_ms, 0, current_audio_delay_ms,
- &extra_audio_delay_ms, &total_video_delay_ms));
- EXPECT_EQ(0, extra_audio_delay_ms);
- // The video delay is not allowed to change more than this in 1 second.
- EXPECT_EQ(2 * delay_ms / kSmoothingFilter, total_video_delay_ms);
-
- send_time_->IncreaseTimeMs(1000);
- receive_time_->IncreaseTimeMs(800);
- // Simulate 0 minimum delay in the VCM.
- total_video_delay_ms = 0;
- EXPECT_TRUE(DelayedStreams(delay_ms, 0, current_audio_delay_ms,
- &extra_audio_delay_ms, &total_video_delay_ms));
- EXPECT_EQ(0, extra_audio_delay_ms);
- EXPECT_EQ(3 * delay_ms / kSmoothingFilter, total_video_delay_ms);
-}
-
-TEST_F(StreamSynchronizationTest, AudioDelay) {
- int current_audio_delay_ms = 0;
- int delay_ms = 200;
- int extra_audio_delay_ms = 0;
- int total_video_delay_ms = 0;
-
- EXPECT_TRUE(DelayedStreams(0, delay_ms, current_audio_delay_ms,
- &extra_audio_delay_ms, &total_video_delay_ms));
- EXPECT_EQ(0, total_video_delay_ms);
- // The audio delay is not allowed to change more than this in 1 second.
- EXPECT_EQ(delay_ms / kSmoothingFilter, extra_audio_delay_ms);
- current_audio_delay_ms = extra_audio_delay_ms;
- int current_extra_delay_ms = extra_audio_delay_ms;
-
- send_time_->IncreaseTimeMs(1000);
- receive_time_->IncreaseTimeMs(800);
- EXPECT_TRUE(DelayedStreams(0, delay_ms, current_audio_delay_ms,
- &extra_audio_delay_ms, &total_video_delay_ms));
- EXPECT_EQ(0, total_video_delay_ms);
- // The audio delay is not allowed to change more than the half of the required
- // change in delay.
- EXPECT_EQ(current_extra_delay_ms +
- MaxAudioDelayIncrease(current_audio_delay_ms, delay_ms),
- extra_audio_delay_ms);
- current_audio_delay_ms = extra_audio_delay_ms;
- current_extra_delay_ms = extra_audio_delay_ms;
-
- send_time_->IncreaseTimeMs(1000);
- receive_time_->IncreaseTimeMs(800);
- EXPECT_TRUE(DelayedStreams(0, delay_ms, current_audio_delay_ms,
- &extra_audio_delay_ms, &total_video_delay_ms));
- EXPECT_EQ(0, total_video_delay_ms);
- // The audio delay is not allowed to change more than the half of the required
- // change in delay.
- EXPECT_EQ(current_extra_delay_ms +
- MaxAudioDelayIncrease(current_audio_delay_ms, delay_ms),
- extra_audio_delay_ms);
- current_extra_delay_ms = extra_audio_delay_ms;
-
- // Simulate that NetEQ for some reason reduced the delay.
- current_audio_delay_ms = 10;
- send_time_->IncreaseTimeMs(1000);
- receive_time_->IncreaseTimeMs(800);
- EXPECT_TRUE(DelayedStreams(0, delay_ms, current_audio_delay_ms,
- &extra_audio_delay_ms, &total_video_delay_ms));
- EXPECT_EQ(0, total_video_delay_ms);
- // Since we only can ask NetEQ for a certain amount of extra delay, and
- // we only measure the total NetEQ delay, we will ask for additional delay
- // here to try to
- EXPECT_EQ(current_extra_delay_ms +
- MaxAudioDelayIncrease(current_audio_delay_ms, delay_ms),
- extra_audio_delay_ms);
- current_extra_delay_ms = extra_audio_delay_ms;
-
- // Simulate that NetEQ for some reason significantly increased the delay.
- current_audio_delay_ms = 350;
- send_time_->IncreaseTimeMs(1000);
- receive_time_->IncreaseTimeMs(800);
- EXPECT_TRUE(DelayedStreams(0, delay_ms, current_audio_delay_ms,
- &extra_audio_delay_ms, &total_video_delay_ms));
- EXPECT_EQ(0, total_video_delay_ms);
- // The audio delay is not allowed to change more than the half of the required
- // change in delay.
- EXPECT_EQ(current_extra_delay_ms +
- MaxAudioDelayDecrease(current_audio_delay_ms, delay_ms),
- extra_audio_delay_ms);
-}
-
-TEST_F(StreamSynchronizationTest, BothDelayedVideoLater) {
- BothDelayedVideoLaterTest(0);
-}
-
-TEST_F(StreamSynchronizationTest, BothDelayedVideoLaterAudioClockDrift) {
- audio_clock_drift_ = 1.05;
- BothDelayedVideoLaterTest(0);
-}
-
-TEST_F(StreamSynchronizationTest, BothDelayedVideoLaterVideoClockDrift) {
- video_clock_drift_ = 1.05;
- BothDelayedVideoLaterTest(0);
-}
-
-TEST_F(StreamSynchronizationTest, BothDelayedAudioLater) {
- BothDelayedAudioLaterTest(0);
-}
-
-TEST_F(StreamSynchronizationTest, BothDelayedAudioClockDrift) {
- audio_clock_drift_ = 1.05;
- BothDelayedAudioLaterTest(0);
-}
-
-TEST_F(StreamSynchronizationTest, BothDelayedVideoClockDrift) {
- video_clock_drift_ = 1.05;
- BothDelayedAudioLaterTest(0);
-}
-
-TEST_F(StreamSynchronizationTest, BaseDelay) {
- int base_target_delay_ms = 2000;
- int current_audio_delay_ms = 2000;
- int extra_audio_delay_ms = 0;
- int total_video_delay_ms = base_target_delay_ms;
- sync_->SetTargetBufferingDelay(base_target_delay_ms);
- // We are in sync don't change.
- EXPECT_FALSE(DelayedStreams(base_target_delay_ms, base_target_delay_ms,
- current_audio_delay_ms,
- &extra_audio_delay_ms, &total_video_delay_ms));
- // Triggering another call with the same values. Delay should not be modified.
- base_target_delay_ms = 2000;
- current_audio_delay_ms = base_target_delay_ms;
- total_video_delay_ms = base_target_delay_ms;
- sync_->SetTargetBufferingDelay(base_target_delay_ms);
- // We are in sync don't change.
- EXPECT_FALSE(DelayedStreams(base_target_delay_ms, base_target_delay_ms,
- current_audio_delay_ms,
- &extra_audio_delay_ms, &total_video_delay_ms));
- // Changing delay value - intended to test this module only. In practice it
- // would take VoE time to adapt.
- base_target_delay_ms = 5000;
- current_audio_delay_ms = base_target_delay_ms;
- total_video_delay_ms = base_target_delay_ms;
- sync_->SetTargetBufferingDelay(base_target_delay_ms);
- // We are in sync don't change.
- EXPECT_FALSE(DelayedStreams(base_target_delay_ms, base_target_delay_ms,
- current_audio_delay_ms,
- &extra_audio_delay_ms, &total_video_delay_ms));
-}
-
-TEST_F(StreamSynchronizationTest, BothDelayedAudioLaterWithBaseDelay) {
- int base_target_delay_ms = 3000;
- sync_->SetTargetBufferingDelay(base_target_delay_ms);
- BothDelayedAudioLaterTest(base_target_delay_ms);
-}
-
-TEST_F(StreamSynchronizationTest, BothDelayedAudioClockDriftWithBaseDelay) {
- int base_target_delay_ms = 3000;
- sync_->SetTargetBufferingDelay(base_target_delay_ms);
- audio_clock_drift_ = 1.05;
- BothDelayedAudioLaterTest(base_target_delay_ms);
-}
-
-TEST_F(StreamSynchronizationTest, BothDelayedVideoClockDriftWithBaseDelay) {
- int base_target_delay_ms = 3000;
- sync_->SetTargetBufferingDelay(base_target_delay_ms);
- video_clock_drift_ = 1.05;
- BothDelayedAudioLaterTest(base_target_delay_ms);
-}
-
-TEST_F(StreamSynchronizationTest, BothDelayedVideoLaterWithBaseDelay) {
- int base_target_delay_ms = 2000;
- sync_->SetTargetBufferingDelay(base_target_delay_ms);
- BothDelayedVideoLaterTest(base_target_delay_ms);
-}
-
-TEST_F(StreamSynchronizationTest,
- BothDelayedVideoLaterAudioClockDriftWithBaseDelay) {
- int base_target_delay_ms = 2000;
- audio_clock_drift_ = 1.05;
- sync_->SetTargetBufferingDelay(base_target_delay_ms);
- BothDelayedVideoLaterTest(base_target_delay_ms);
-}
-
-TEST_F(StreamSynchronizationTest,
- BothDelayedVideoLaterVideoClockDriftWithBaseDelay) {
- int base_target_delay_ms = 2000;
- video_clock_drift_ = 1.05;
- sync_->SetTargetBufferingDelay(base_target_delay_ms);
- BothDelayedVideoLaterTest(base_target_delay_ms);
-}
-
-} // namespace webrtc
diff --git a/webrtc/video_engine/video_engine_core_unittests.gyp b/webrtc/video_engine/video_engine_core_unittests.gyp
deleted file mode 100644
index d0143442b4..0000000000
--- a/webrtc/video_engine/video_engine_core_unittests.gyp
+++ /dev/null
@@ -1,74 +0,0 @@
-# Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
-#
-# Use of this source code is governed by a BSD-style license
-# that can be found in the LICENSE file in the root of the source
-# tree. An additional intellectual property rights grant can be found
-# in the file PATENTS. All contributing project authors may
-# be found in the AUTHORS file in the root of the source tree.
-
-{
- 'includes': [
- '../build/common.gypi',
- ],
- 'targets': [
- {
- 'target_name': 'video_engine_core_unittests',
- 'type': '<(gtest_target_type)',
- 'dependencies': [
- '<(webrtc_root)/webrtc.gyp:webrtc',
- '<(webrtc_root)/modules/modules.gyp:video_capture_module_internal_impl',
- '<(webrtc_root)/modules/modules.gyp:video_render_module_internal_impl',
- '<(DEPTH)/testing/gtest.gyp:gtest',
- '<(DEPTH)/testing/gmock.gyp:gmock',
- '<(webrtc_root)/test/test.gyp:test_support_main',
- ],
- 'sources': [
- 'call_stats_unittest.cc',
- 'encoder_state_feedback_unittest.cc',
- 'overuse_frame_detector_unittest.cc',
- 'payload_router_unittest.cc',
- 'report_block_stats_unittest.cc',
- 'stream_synchronization_unittest.cc',
- 'vie_codec_unittest.cc',
- 'vie_remb_unittest.cc',
- ],
- 'conditions': [
- ['OS=="android"', {
- 'dependencies': [
- '<(DEPTH)/testing/android/native_test.gyp:native_test_native_code',
- ],
- }],
- ],
- },
- ], # targets
- 'conditions': [
- ['OS=="android"', {
- 'targets': [
- {
- 'target_name': 'video_engine_core_unittests_apk_target',
- 'type': 'none',
- 'dependencies': [
- '<(apk_tests_path):video_engine_core_unittests_apk',
- ],
- },
- ],
- }],
- ['test_isolation_mode != "noop"', {
- 'targets': [
- {
- 'target_name': 'video_engine_core_unittests_run',
- 'type': 'none',
- 'dependencies': [
- 'video_engine_core_unittests',
- ],
- 'includes': [
- '../build/isolate.gypi',
- ],
- 'sources': [
- 'video_engine_core_unittests.isolate',
- ],
- },
- ],
- }],
- ],
-}
diff --git a/webrtc/video_engine/video_engine_core_unittests.isolate b/webrtc/video_engine/video_engine_core_unittests.isolate
deleted file mode 100644
index c8d2fc9026..0000000000
--- a/webrtc/video_engine/video_engine_core_unittests.isolate
+++ /dev/null
@@ -1,23 +0,0 @@
-# Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
-#
-# Use of this source code is governed by a BSD-style license
-# that can be found in the LICENSE file in the root of the source
-# tree. An additional intellectual property rights grant can be found
-# in the file PATENTS. All contributing project authors may
-# be found in the AUTHORS file in the root of the source tree.
-{
- 'conditions': [
- ['OS=="linux" or OS=="mac" or OS=="win"', {
- 'variables': {
- 'command': [
- '<(DEPTH)/testing/test_env.py',
- '<(PRODUCT_DIR)/video_engine_core_unittests<(EXECUTABLE_SUFFIX)',
- ],
- 'files': [
- '<(DEPTH)/testing/test_env.py',
- '<(PRODUCT_DIR)/video_engine_core_unittests<(EXECUTABLE_SUFFIX)',
- ],
- },
- }],
- ],
-}
diff --git a/webrtc/video_engine/vie_channel.cc b/webrtc/video_engine/vie_channel.cc
deleted file mode 100644
index 147ecb1456..0000000000
--- a/webrtc/video_engine/vie_channel.cc
+++ /dev/null
@@ -1,1253 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include "webrtc/video_engine/vie_channel.h"
-
-#include <algorithm>
-#include <vector>
-
-#include "webrtc/base/checks.h"
-#include "webrtc/base/logging.h"
-#include "webrtc/common.h"
-#include "webrtc/common_video/interface/incoming_video_stream.h"
-#include "webrtc/common_video/libyuv/include/webrtc_libyuv.h"
-#include "webrtc/frame_callback.h"
-#include "webrtc/modules/pacing/include/paced_sender.h"
-#include "webrtc/modules/pacing/include/packet_router.h"
-#include "webrtc/modules/rtp_rtcp/interface/rtp_receiver.h"
-#include "webrtc/modules/rtp_rtcp/interface/rtp_rtcp.h"
-#include "webrtc/modules/utility/interface/process_thread.h"
-#include "webrtc/modules/video_coding/main/interface/video_coding.h"
-#include "webrtc/modules/video_processing/main/interface/video_processing.h"
-#include "webrtc/modules/video_render/include/video_render_defines.h"
-#include "webrtc/system_wrappers/include/critical_section_wrapper.h"
-#include "webrtc/system_wrappers/include/metrics.h"
-#include "webrtc/system_wrappers/include/thread_wrapper.h"
-#include "webrtc/video/receive_statistics_proxy.h"
-#include "webrtc/video_engine/call_stats.h"
-#include "webrtc/video_engine/payload_router.h"
-#include "webrtc/video_engine/report_block_stats.h"
-#include "webrtc/video_engine/vie_defines.h"
-
-namespace webrtc {
-
-const int kMaxDecodeWaitTimeMs = 50;
-static const int kMaxTargetDelayMs = 10000;
-static const float kMaxIncompleteTimeMultiplier = 3.5f;
-
-// Helper class receiving statistics callbacks.
-class ChannelStatsObserver : public CallStatsObserver {
- public:
- explicit ChannelStatsObserver(ViEChannel* owner) : owner_(owner) {}
- virtual ~ChannelStatsObserver() {}
-
- // Implements StatsObserver.
- virtual void OnRttUpdate(int64_t avg_rtt_ms, int64_t max_rtt_ms) {
- owner_->OnRttUpdate(avg_rtt_ms, max_rtt_ms);
- }
-
- private:
- ViEChannel* const owner_;
-};
-
-class ViEChannelProtectionCallback : public VCMProtectionCallback {
- public:
- ViEChannelProtectionCallback(ViEChannel* owner) : owner_(owner) {}
- ~ViEChannelProtectionCallback() {}
-
-
- int ProtectionRequest(
- const FecProtectionParams* delta_fec_params,
- const FecProtectionParams* key_fec_params,
- uint32_t* sent_video_rate_bps,
- uint32_t* sent_nack_rate_bps,
- uint32_t* sent_fec_rate_bps) override {
- return owner_->ProtectionRequest(delta_fec_params, key_fec_params,
- sent_video_rate_bps, sent_nack_rate_bps,
- sent_fec_rate_bps);
- }
- private:
- ViEChannel* owner_;
-};
-
-ViEChannel::ViEChannel(uint32_t number_of_cores,
- Transport* transport,
- ProcessThread* module_process_thread,
- RtcpIntraFrameObserver* intra_frame_observer,
- RtcpBandwidthObserver* bandwidth_observer,
- TransportFeedbackObserver* transport_feedback_observer,
- RemoteBitrateEstimator* remote_bitrate_estimator,
- RtcpRttStats* rtt_stats,
- PacedSender* paced_sender,
- PacketRouter* packet_router,
- size_t max_rtp_streams,
- bool sender)
- : number_of_cores_(number_of_cores),
- sender_(sender),
- module_process_thread_(module_process_thread),
- crit_(CriticalSectionWrapper::CreateCriticalSection()),
- send_payload_router_(new PayloadRouter()),
- vcm_protection_callback_(new ViEChannelProtectionCallback(this)),
- vcm_(VideoCodingModule::Create(Clock::GetRealTimeClock(),
- nullptr,
- nullptr)),
- vie_receiver_(vcm_, remote_bitrate_estimator, this),
- vie_sync_(vcm_),
- stats_observer_(new ChannelStatsObserver(this)),
- receive_stats_callback_(nullptr),
- incoming_video_stream_(nullptr),
- intra_frame_observer_(intra_frame_observer),
- rtt_stats_(rtt_stats),
- paced_sender_(paced_sender),
- packet_router_(packet_router),
- bandwidth_observer_(bandwidth_observer),
- transport_feedback_observer_(transport_feedback_observer),
- nack_history_size_sender_(kSendSidePacketHistorySize),
- max_nack_reordering_threshold_(kMaxPacketAgeToNack),
- pre_render_callback_(NULL),
- report_block_stats_sender_(new ReportBlockStats()),
- time_of_first_rtt_ms_(-1),
- rtt_sum_ms_(0),
- last_rtt_ms_(0),
- num_rtts_(0),
- rtp_rtcp_modules_(
- CreateRtpRtcpModules(!sender,
- vie_receiver_.GetReceiveStatistics(),
- transport,
- sender ? intra_frame_observer_ : nullptr,
- sender ? bandwidth_observer_.get() : nullptr,
- transport_feedback_observer_,
- rtt_stats_,
- &rtcp_packet_type_counter_observer_,
- remote_bitrate_estimator,
- paced_sender_,
- packet_router_,
- &send_bitrate_observer_,
- &send_frame_count_observer_,
- &send_side_delay_observer_,
- max_rtp_streams)),
- num_active_rtp_rtcp_modules_(1) {
- vie_receiver_.SetRtpRtcpModule(rtp_rtcp_modules_[0]);
- vcm_->SetNackSettings(kMaxNackListSize, max_nack_reordering_threshold_, 0);
-}
-
-int32_t ViEChannel::Init() {
- module_process_thread_->RegisterModule(vie_receiver_.GetReceiveStatistics());
-
- // RTP/RTCP initialization.
- module_process_thread_->RegisterModule(rtp_rtcp_modules_[0]);
-
- rtp_rtcp_modules_[0]->SetKeyFrameRequestMethod(kKeyFrameReqPliRtcp);
- if (paced_sender_) {
- for (RtpRtcp* rtp_rtcp : rtp_rtcp_modules_)
- rtp_rtcp->SetStorePacketsStatus(true, nack_history_size_sender_);
- }
- packet_router_->AddRtpModule(rtp_rtcp_modules_[0]);
- if (sender_) {
- std::list<RtpRtcp*> send_rtp_modules(1, rtp_rtcp_modules_[0]);
- send_payload_router_->SetSendingRtpModules(send_rtp_modules);
- RTC_DCHECK(!send_payload_router_->active());
- }
- if (vcm_->RegisterReceiveCallback(this) != 0) {
- return -1;
- }
- vcm_->RegisterFrameTypeCallback(this);
- vcm_->RegisterReceiveStatisticsCallback(this);
- vcm_->RegisterDecoderTimingCallback(this);
- vcm_->SetRenderDelay(kViEDefaultRenderDelayMs);
-
- module_process_thread_->RegisterModule(vcm_);
- module_process_thread_->RegisterModule(&vie_sync_);
-
- return 0;
-}
-
-ViEChannel::~ViEChannel() {
- UpdateHistograms();
- // Make sure we don't get more callbacks from the RTP module.
- module_process_thread_->DeRegisterModule(
- vie_receiver_.GetReceiveStatistics());
- module_process_thread_->DeRegisterModule(vcm_);
- module_process_thread_->DeRegisterModule(&vie_sync_);
- send_payload_router_->SetSendingRtpModules(std::list<RtpRtcp*>());
- for (size_t i = 0; i < num_active_rtp_rtcp_modules_; ++i)
- packet_router_->RemoveRtpModule(rtp_rtcp_modules_[i]);
- for (RtpRtcp* rtp_rtcp : rtp_rtcp_modules_) {
- module_process_thread_->DeRegisterModule(rtp_rtcp);
- delete rtp_rtcp;
- }
- if (decode_thread_) {
- StopDecodeThread();
- }
- // Release modules.
- VideoCodingModule::Destroy(vcm_);
-}
-
-void ViEChannel::UpdateHistograms() {
- int64_t now = Clock::GetRealTimeClock()->TimeInMilliseconds();
-
- {
- CriticalSectionScoped cs(crit_.get());
- int64_t elapsed_sec = (now - time_of_first_rtt_ms_) / 1000;
- if (time_of_first_rtt_ms_ != -1 && num_rtts_ > 0 &&
- elapsed_sec > metrics::kMinRunTimeInSeconds) {
- int64_t avg_rtt_ms = (rtt_sum_ms_ + num_rtts_ / 2) / num_rtts_;
- RTC_HISTOGRAM_COUNTS_10000(
- "WebRTC.Video.AverageRoundTripTimeInMilliseconds", avg_rtt_ms);
- }
- }
-
- if (sender_) {
- RtcpPacketTypeCounter rtcp_counter;
- GetSendRtcpPacketTypeCounter(&rtcp_counter);
- int64_t elapsed_sec = rtcp_counter.TimeSinceFirstPacketInMs(now) / 1000;
- if (elapsed_sec > metrics::kMinRunTimeInSeconds) {
- RTC_HISTOGRAM_COUNTS_10000("WebRTC.Video.NackPacketsReceivedPerMinute",
- rtcp_counter.nack_packets * 60 / elapsed_sec);
- RTC_HISTOGRAM_COUNTS_10000("WebRTC.Video.FirPacketsReceivedPerMinute",
- rtcp_counter.fir_packets * 60 / elapsed_sec);
- RTC_HISTOGRAM_COUNTS_10000("WebRTC.Video.PliPacketsReceivedPerMinute",
- rtcp_counter.pli_packets * 60 / elapsed_sec);
- if (rtcp_counter.nack_requests > 0) {
- RTC_HISTOGRAM_PERCENTAGE(
- "WebRTC.Video.UniqueNackRequestsReceivedInPercent",
- rtcp_counter.UniqueNackRequestsInPercent());
- }
- int fraction_lost = report_block_stats_sender_->FractionLostInPercent();
- if (fraction_lost != -1) {
- RTC_HISTOGRAM_PERCENTAGE("WebRTC.Video.SentPacketsLostInPercent",
- fraction_lost);
- }
- }
-
- StreamDataCounters rtp;
- StreamDataCounters rtx;
- GetSendStreamDataCounters(&rtp, &rtx);
- StreamDataCounters rtp_rtx = rtp;
- rtp_rtx.Add(rtx);
- elapsed_sec = rtp_rtx.TimeSinceFirstPacketInMs(
- Clock::GetRealTimeClock()->TimeInMilliseconds()) /
- 1000;
- if (elapsed_sec > metrics::kMinRunTimeInSeconds) {
- RTC_HISTOGRAM_COUNTS_100000(
- "WebRTC.Video.BitrateSentInKbps",
- static_cast<int>(rtp_rtx.transmitted.TotalBytes() * 8 / elapsed_sec /
- 1000));
- RTC_HISTOGRAM_COUNTS_10000(
- "WebRTC.Video.MediaBitrateSentInKbps",
- static_cast<int>(rtp.MediaPayloadBytes() * 8 / elapsed_sec / 1000));
- RTC_HISTOGRAM_COUNTS_10000(
- "WebRTC.Video.PaddingBitrateSentInKbps",
- static_cast<int>(rtp_rtx.transmitted.padding_bytes * 8 / elapsed_sec /
- 1000));
- RTC_HISTOGRAM_COUNTS_10000(
- "WebRTC.Video.RetransmittedBitrateSentInKbps",
- static_cast<int>(rtp_rtx.retransmitted.TotalBytes() * 8 /
- elapsed_sec / 1000));
- if (rtp_rtcp_modules_[0]->RtxSendStatus() != kRtxOff) {
- RTC_HISTOGRAM_COUNTS_10000(
- "WebRTC.Video.RtxBitrateSentInKbps",
- static_cast<int>(rtx.transmitted.TotalBytes() * 8 / elapsed_sec /
- 1000));
- }
- bool fec_enabled = false;
- uint8_t pltype_red;
- uint8_t pltype_fec;
- rtp_rtcp_modules_[0]->GenericFECStatus(fec_enabled, pltype_red,
- pltype_fec);
- if (fec_enabled) {
- RTC_HISTOGRAM_COUNTS_10000("WebRTC.Video.FecBitrateSentInKbps",
- static_cast<int>(rtp_rtx.fec.TotalBytes() *
- 8 / elapsed_sec / 1000));
- }
- }
- } else if (vie_receiver_.GetRemoteSsrc() > 0) {
- // Get receive stats if we are receiving packets, i.e. there is a remote
- // ssrc.
- RtcpPacketTypeCounter rtcp_counter;
- GetReceiveRtcpPacketTypeCounter(&rtcp_counter);
- int64_t elapsed_sec = rtcp_counter.TimeSinceFirstPacketInMs(now) / 1000;
- if (elapsed_sec > metrics::kMinRunTimeInSeconds) {
- RTC_HISTOGRAM_COUNTS_10000("WebRTC.Video.NackPacketsSentPerMinute",
- rtcp_counter.nack_packets * 60 / elapsed_sec);
- RTC_HISTOGRAM_COUNTS_10000("WebRTC.Video.FirPacketsSentPerMinute",
- rtcp_counter.fir_packets * 60 / elapsed_sec);
- RTC_HISTOGRAM_COUNTS_10000("WebRTC.Video.PliPacketsSentPerMinute",
- rtcp_counter.pli_packets * 60 / elapsed_sec);
- if (rtcp_counter.nack_requests > 0) {
- RTC_HISTOGRAM_PERCENTAGE("WebRTC.Video.UniqueNackRequestsSentInPercent",
- rtcp_counter.UniqueNackRequestsInPercent());
- }
- }
-
- StreamDataCounters rtp;
- StreamDataCounters rtx;
- GetReceiveStreamDataCounters(&rtp, &rtx);
- StreamDataCounters rtp_rtx = rtp;
- rtp_rtx.Add(rtx);
- elapsed_sec = rtp_rtx.TimeSinceFirstPacketInMs(now) / 1000;
- if (elapsed_sec > metrics::kMinRunTimeInSeconds) {
- RTC_HISTOGRAM_COUNTS_10000(
- "WebRTC.Video.BitrateReceivedInKbps",
- static_cast<int>(rtp_rtx.transmitted.TotalBytes() * 8 / elapsed_sec /
- 1000));
- RTC_HISTOGRAM_COUNTS_10000(
- "WebRTC.Video.MediaBitrateReceivedInKbps",
- static_cast<int>(rtp.MediaPayloadBytes() * 8 / elapsed_sec / 1000));
- RTC_HISTOGRAM_COUNTS_10000(
- "WebRTC.Video.PaddingBitrateReceivedInKbps",
- static_cast<int>(rtp_rtx.transmitted.padding_bytes * 8 / elapsed_sec /
- 1000));
- RTC_HISTOGRAM_COUNTS_10000(
- "WebRTC.Video.RetransmittedBitrateReceivedInKbps",
- static_cast<int>(rtp_rtx.retransmitted.TotalBytes() * 8 /
- elapsed_sec / 1000));
- uint32_t ssrc = 0;
- if (vie_receiver_.GetRtxSsrc(&ssrc)) {
- RTC_HISTOGRAM_COUNTS_10000(
- "WebRTC.Video.RtxBitrateReceivedInKbps",
- static_cast<int>(rtx.transmitted.TotalBytes() * 8 / elapsed_sec /
- 1000));
- }
- if (vie_receiver_.IsFecEnabled()) {
- RTC_HISTOGRAM_COUNTS_10000("WebRTC.Video.FecBitrateReceivedInKbps",
- static_cast<int>(rtp_rtx.fec.TotalBytes() *
- 8 / elapsed_sec / 1000));
- }
- }
- }
-}
-
-int32_t ViEChannel::SetSendCodec(const VideoCodec& video_codec,
- bool new_stream) {
- RTC_DCHECK(sender_);
- if (video_codec.codecType == kVideoCodecRED ||
- video_codec.codecType == kVideoCodecULPFEC) {
- LOG_F(LS_ERROR) << "Not a valid send codec " << video_codec.codecType;
- return -1;
- }
- if (kMaxSimulcastStreams < video_codec.numberOfSimulcastStreams) {
- LOG_F(LS_ERROR) << "Incorrect config "
- << video_codec.numberOfSimulcastStreams;
- return -1;
- }
- // Update the RTP module with the settings.
- // Stop and Start the RTP module -> trigger new SSRC, if an SSRC hasn't been
- // set explicitly.
- // The first layer is always active, so the first module can be checked for
- // sending status.
- bool is_sending = rtp_rtcp_modules_[0]->Sending();
- bool router_was_active = send_payload_router_->active();
- send_payload_router_->set_active(false);
- send_payload_router_->SetSendingRtpModules(std::list<RtpRtcp*>());
-
- std::vector<RtpRtcp*> registered_modules;
- std::vector<RtpRtcp*> deregistered_modules;
- size_t num_active_modules = video_codec.numberOfSimulcastStreams > 0
- ? video_codec.numberOfSimulcastStreams
- : 1;
- size_t num_prev_active_modules;
- {
- // Cache which modules are active so StartSend can know which ones to start.
- CriticalSectionScoped cs(crit_.get());
- num_prev_active_modules = num_active_rtp_rtcp_modules_;
- num_active_rtp_rtcp_modules_ = num_active_modules;
- }
- for (size_t i = 0; i < num_active_modules; ++i)
- registered_modules.push_back(rtp_rtcp_modules_[i]);
-
- for (size_t i = num_active_modules; i < rtp_rtcp_modules_.size(); ++i)
- deregistered_modules.push_back(rtp_rtcp_modules_[i]);
-
- // Disable inactive modules.
- for (RtpRtcp* rtp_rtcp : deregistered_modules) {
- rtp_rtcp->SetSendingStatus(false);
- rtp_rtcp->SetSendingMediaStatus(false);
- }
-
- // Configure active modules.
- for (RtpRtcp* rtp_rtcp : registered_modules) {
- rtp_rtcp->DeRegisterSendPayload(video_codec.plType);
- if (rtp_rtcp->RegisterSendPayload(video_codec) != 0) {
- return -1;
- }
- rtp_rtcp->SetSendingStatus(is_sending);
- rtp_rtcp->SetSendingMediaStatus(is_sending);
- }
-
- // |RegisterSimulcastRtpRtcpModules| resets all old weak pointers and old
- // modules can be deleted after this step.
- vie_receiver_.RegisterRtpRtcpModules(registered_modules);
-
- // Update the packet and payload routers with the sending RtpRtcp modules.
- if (sender_) {
- std::list<RtpRtcp*> active_send_modules;
- for (RtpRtcp* rtp_rtcp : registered_modules)
- active_send_modules.push_back(rtp_rtcp);
- send_payload_router_->SetSendingRtpModules(active_send_modules);
- }
-
- if (router_was_active)
- send_payload_router_->set_active(true);
-
- // Deregister previously registered modules.
- for (size_t i = num_active_modules; i < num_prev_active_modules; ++i) {
- module_process_thread_->DeRegisterModule(rtp_rtcp_modules_[i]);
- packet_router_->RemoveRtpModule(rtp_rtcp_modules_[i]);
- }
- // Register new active modules.
- for (size_t i = num_prev_active_modules; i < num_active_modules; ++i) {
- module_process_thread_->RegisterModule(rtp_rtcp_modules_[i]);
- packet_router_->AddRtpModule(rtp_rtcp_modules_[i]);
- }
- return 0;
-}
-
-int32_t ViEChannel::SetReceiveCodec(const VideoCodec& video_codec) {
- RTC_DCHECK(!sender_);
- if (!vie_receiver_.SetReceiveCodec(video_codec)) {
- return -1;
- }
-
- if (video_codec.codecType != kVideoCodecRED &&
- video_codec.codecType != kVideoCodecULPFEC) {
- // Register codec type with VCM, but do not register RED or ULPFEC.
- if (vcm_->RegisterReceiveCodec(&video_codec, number_of_cores_, false) !=
- VCM_OK) {
- return -1;
- }
- }
- return 0;
-}
-
-
-int32_t ViEChannel::RegisterExternalDecoder(const uint8_t pl_type,
- VideoDecoder* decoder,
- bool buffered_rendering,
- int32_t render_delay) {
- RTC_DCHECK(!sender_);
- int32_t result;
- result = vcm_->RegisterExternalDecoder(decoder, pl_type, buffered_rendering);
- if (result != VCM_OK) {
- return result;
- }
- return vcm_->SetRenderDelay(render_delay);
-}
-
-int32_t ViEChannel::DeRegisterExternalDecoder(const uint8_t pl_type) {
- RTC_DCHECK(!sender_);
- VideoCodec current_receive_codec;
- int32_t result = 0;
- result = vcm_->ReceiveCodec(&current_receive_codec);
- if (vcm_->RegisterExternalDecoder(NULL, pl_type, false) != VCM_OK) {
- return -1;
- }
-
- if (result == 0 && current_receive_codec.plType == pl_type) {
- result = vcm_->RegisterReceiveCodec(&current_receive_codec,
- number_of_cores_, false);
- }
- return result;
-}
-
-int32_t ViEChannel::ReceiveCodecStatistics(uint32_t* num_key_frames,
- uint32_t* num_delta_frames) {
- CriticalSectionScoped cs(crit_.get());
- *num_key_frames = receive_frame_counts_.key_frames;
- *num_delta_frames = receive_frame_counts_.delta_frames;
- return 0;
-}
-
-uint32_t ViEChannel::DiscardedPackets() const {
- return vcm_->DiscardedPackets();
-}
-
-int ViEChannel::ReceiveDelay() const {
- return vcm_->Delay();
-}
-
-void ViEChannel::SetRTCPMode(const RtcpMode rtcp_mode) {
- for (RtpRtcp* rtp_rtcp : rtp_rtcp_modules_)
- rtp_rtcp->SetRTCPStatus(rtcp_mode);
-}
-
-void ViEChannel::SetProtectionMode(bool enable_nack,
- bool enable_fec,
- int payload_type_red,
- int payload_type_fec) {
- // Validate payload types.
- if (enable_fec) {
- RTC_DCHECK_GE(payload_type_red, 0);
- RTC_DCHECK_GE(payload_type_fec, 0);
- RTC_DCHECK_LE(payload_type_red, 127);
- RTC_DCHECK_LE(payload_type_fec, 127);
- } else {
- RTC_DCHECK_EQ(payload_type_red, -1);
- RTC_DCHECK_EQ(payload_type_fec, -1);
- // Set to valid uint8_ts to be castable later without signed overflows.
- payload_type_red = 0;
- payload_type_fec = 0;
- }
-
- VCMVideoProtection protection_method;
- if (enable_nack) {
- protection_method = enable_fec ? kProtectionNackFEC : kProtectionNack;
- } else {
- protection_method = kProtectionNone;
- }
-
- vcm_->SetVideoProtection(protection_method, true);
-
- // Set NACK.
- ProcessNACKRequest(enable_nack);
-
- // Set FEC.
- for (RtpRtcp* rtp_rtcp : rtp_rtcp_modules_) {
- rtp_rtcp->SetGenericFECStatus(enable_fec,
- static_cast<uint8_t>(payload_type_red),
- static_cast<uint8_t>(payload_type_fec));
- }
-}
-
-void ViEChannel::ProcessNACKRequest(const bool enable) {
- if (enable) {
- // Turn on NACK.
- if (rtp_rtcp_modules_[0]->RTCP() == RtcpMode::kOff)
- return;
- vie_receiver_.SetNackStatus(true, max_nack_reordering_threshold_);
-
- for (RtpRtcp* rtp_rtcp : rtp_rtcp_modules_)
- rtp_rtcp->SetStorePacketsStatus(true, nack_history_size_sender_);
-
- vcm_->RegisterPacketRequestCallback(this);
- // Don't introduce errors when NACK is enabled.
- vcm_->SetDecodeErrorMode(kNoErrors);
- } else {
- vcm_->RegisterPacketRequestCallback(NULL);
- if (paced_sender_ == nullptr) {
- for (RtpRtcp* rtp_rtcp : rtp_rtcp_modules_)
- rtp_rtcp->SetStorePacketsStatus(false, 0);
- }
- vie_receiver_.SetNackStatus(false, max_nack_reordering_threshold_);
- // When NACK is off, allow decoding with errors. Otherwise, the video
- // will freeze, and will only recover with a complete key frame.
- vcm_->SetDecodeErrorMode(kWithErrors);
- }
-}
-
-bool ViEChannel::IsSendingFecEnabled() {
- bool fec_enabled = false;
- uint8_t pltype_red = 0;
- uint8_t pltype_fec = 0;
-
- for (RtpRtcp* rtp_rtcp : rtp_rtcp_modules_) {
- rtp_rtcp->GenericFECStatus(fec_enabled, pltype_red, pltype_fec);
- if (fec_enabled)
- return true;
- }
- return false;
-}
-
-int ViEChannel::SetSenderBufferingMode(int target_delay_ms) {
- if ((target_delay_ms < 0) || (target_delay_ms > kMaxTargetDelayMs)) {
- LOG(LS_ERROR) << "Invalid send buffer value.";
- return -1;
- }
- if (target_delay_ms == 0) {
- // Real-time mode.
- nack_history_size_sender_ = kSendSidePacketHistorySize;
- } else {
- nack_history_size_sender_ = GetRequiredNackListSize(target_delay_ms);
- // Don't allow a number lower than the default value.
- if (nack_history_size_sender_ < kSendSidePacketHistorySize) {
- nack_history_size_sender_ = kSendSidePacketHistorySize;
- }
- }
- for (RtpRtcp* rtp_rtcp : rtp_rtcp_modules_)
- rtp_rtcp->SetStorePacketsStatus(true, nack_history_size_sender_);
- return 0;
-}
-
-int ViEChannel::SetReceiverBufferingMode(int target_delay_ms) {
- if ((target_delay_ms < 0) || (target_delay_ms > kMaxTargetDelayMs)) {
- LOG(LS_ERROR) << "Invalid receive buffer delay value.";
- return -1;
- }
- int max_nack_list_size;
- int max_incomplete_time_ms;
- if (target_delay_ms == 0) {
- // Real-time mode - restore default settings.
- max_nack_reordering_threshold_ = kMaxPacketAgeToNack;
- max_nack_list_size = kMaxNackListSize;
- max_incomplete_time_ms = 0;
- } else {
- max_nack_list_size = 3 * GetRequiredNackListSize(target_delay_ms) / 4;
- max_nack_reordering_threshold_ = max_nack_list_size;
- // Calculate the max incomplete time and round to int.
- max_incomplete_time_ms = static_cast<int>(kMaxIncompleteTimeMultiplier *
- target_delay_ms + 0.5f);
- }
- vcm_->SetNackSettings(max_nack_list_size, max_nack_reordering_threshold_,
- max_incomplete_time_ms);
- vcm_->SetMinReceiverDelay(target_delay_ms);
- if (vie_sync_.SetTargetBufferingDelay(target_delay_ms) < 0)
- return -1;
- return 0;
-}
-
-int ViEChannel::GetRequiredNackListSize(int target_delay_ms) {
- // The max size of the nack list should be large enough to accommodate the
- // the number of packets (frames) resulting from the increased delay.
- // Roughly estimating for ~40 packets per frame @ 30fps.
- return target_delay_ms * 40 * 30 / 1000;
-}
-
-int ViEChannel::SetSendTimestampOffsetStatus(bool enable, int id) {
- // Disable any previous registrations of this extension to avoid errors.
- for (RtpRtcp* rtp_rtcp : rtp_rtcp_modules_) {
- rtp_rtcp->DeregisterSendRtpHeaderExtension(
- kRtpExtensionTransmissionTimeOffset);
- }
- if (!enable)
- return 0;
- // Enable the extension.
- int error = 0;
- for (RtpRtcp* rtp_rtcp : rtp_rtcp_modules_) {
- error |= rtp_rtcp->RegisterSendRtpHeaderExtension(
- kRtpExtensionTransmissionTimeOffset, id);
- }
- return error;
-}
-
-int ViEChannel::SetReceiveTimestampOffsetStatus(bool enable, int id) {
- return vie_receiver_.SetReceiveTimestampOffsetStatus(enable, id) ? 0 : -1;
-}
-
-int ViEChannel::SetSendAbsoluteSendTimeStatus(bool enable, int id) {
- // Disable any previous registrations of this extension to avoid errors.
- for (RtpRtcp* rtp_rtcp : rtp_rtcp_modules_)
- rtp_rtcp->DeregisterSendRtpHeaderExtension(kRtpExtensionAbsoluteSendTime);
- if (!enable)
- return 0;
- // Enable the extension.
- int error = 0;
- for (RtpRtcp* rtp_rtcp : rtp_rtcp_modules_) {
- error |= rtp_rtcp->RegisterSendRtpHeaderExtension(
- kRtpExtensionAbsoluteSendTime, id);
- }
- return error;
-}
-
-int ViEChannel::SetReceiveAbsoluteSendTimeStatus(bool enable, int id) {
- return vie_receiver_.SetReceiveAbsoluteSendTimeStatus(enable, id) ? 0 : -1;
-}
-
-int ViEChannel::SetSendVideoRotationStatus(bool enable, int id) {
- // Disable any previous registrations of this extension to avoid errors.
- for (RtpRtcp* rtp_rtcp : rtp_rtcp_modules_)
- rtp_rtcp->DeregisterSendRtpHeaderExtension(kRtpExtensionVideoRotation);
- if (!enable)
- return 0;
- // Enable the extension.
- int error = 0;
- for (RtpRtcp* rtp_rtcp : rtp_rtcp_modules_) {
- error |= rtp_rtcp->RegisterSendRtpHeaderExtension(
- kRtpExtensionVideoRotation, id);
- }
- return error;
-}
-
-int ViEChannel::SetReceiveVideoRotationStatus(bool enable, int id) {
- return vie_receiver_.SetReceiveVideoRotationStatus(enable, id) ? 0 : -1;
-}
-
-int ViEChannel::SetSendTransportSequenceNumber(bool enable, int id) {
- // Disable any previous registrations of this extension to avoid errors.
- for (RtpRtcp* rtp_rtcp : rtp_rtcp_modules_) {
- rtp_rtcp->DeregisterSendRtpHeaderExtension(
- kRtpExtensionTransportSequenceNumber);
- }
- if (!enable)
- return 0;
- // Enable the extension.
- int error = 0;
- for (RtpRtcp* rtp_rtcp : rtp_rtcp_modules_) {
- error |= rtp_rtcp->RegisterSendRtpHeaderExtension(
- kRtpExtensionTransportSequenceNumber, id);
- }
- return error;
-}
-
-int ViEChannel::SetReceiveTransportSequenceNumber(bool enable, int id) {
- return vie_receiver_.SetReceiveTransportSequenceNumber(enable, id) ? 0 : -1;
-}
-
-void ViEChannel::SetRtcpXrRrtrStatus(bool enable) {
- rtp_rtcp_modules_[0]->SetRtcpXrRrtrStatus(enable);
-}
-
-void ViEChannel::EnableTMMBR(bool enable) {
- rtp_rtcp_modules_[0]->SetTMMBRStatus(enable);
-}
-
-int32_t ViEChannel::SetSSRC(const uint32_t SSRC,
- const StreamType usage,
- const uint8_t simulcast_idx) {
- RtpRtcp* rtp_rtcp = rtp_rtcp_modules_[simulcast_idx];
- if (usage == kViEStreamTypeRtx) {
- rtp_rtcp->SetRtxSsrc(SSRC);
- } else {
- rtp_rtcp->SetSSRC(SSRC);
- }
- return 0;
-}
-
-int32_t ViEChannel::SetRemoteSSRCType(const StreamType usage,
- const uint32_t SSRC) {
- vie_receiver_.SetRtxSsrc(SSRC);
- return 0;
-}
-
-int32_t ViEChannel::GetLocalSSRC(uint8_t idx, unsigned int* ssrc) {
- RTC_DCHECK_LE(idx, rtp_rtcp_modules_.size());
- *ssrc = rtp_rtcp_modules_[idx]->SSRC();
- return 0;
-}
-
-uint32_t ViEChannel::GetRemoteSSRC() {
- return vie_receiver_.GetRemoteSsrc();
-}
-
-int ViEChannel::SetRtxSendPayloadType(int payload_type,
- int associated_payload_type) {
- for (RtpRtcp* rtp_rtcp : rtp_rtcp_modules_)
- rtp_rtcp->SetRtxSendPayloadType(payload_type, associated_payload_type);
- SetRtxSendStatus(true);
- return 0;
-}
-
-void ViEChannel::SetRtxSendStatus(bool enable) {
- int rtx_settings =
- enable ? kRtxRetransmitted | kRtxRedundantPayloads : kRtxOff;
- for (RtpRtcp* rtp_rtcp : rtp_rtcp_modules_)
- rtp_rtcp->SetRtxSendStatus(rtx_settings);
-}
-
-void ViEChannel::SetRtxReceivePayloadType(int payload_type,
- int associated_payload_type) {
- vie_receiver_.SetRtxPayloadType(payload_type, associated_payload_type);
-}
-
-void ViEChannel::SetUseRtxPayloadMappingOnRestore(bool val) {
- vie_receiver_.SetUseRtxPayloadMappingOnRestore(val);
-}
-
-void ViEChannel::SetRtpStateForSsrc(uint32_t ssrc, const RtpState& rtp_state) {
- RTC_DCHECK(!rtp_rtcp_modules_[0]->Sending());
- for (RtpRtcp* rtp_rtcp : rtp_rtcp_modules_) {
- if (rtp_rtcp->SetRtpStateForSsrc(ssrc, rtp_state))
- return;
- }
-}
-
-RtpState ViEChannel::GetRtpStateForSsrc(uint32_t ssrc) {
- RTC_DCHECK(!rtp_rtcp_modules_[0]->Sending());
- RtpState rtp_state;
- for (RtpRtcp* rtp_rtcp : rtp_rtcp_modules_) {
- if (rtp_rtcp->GetRtpStateForSsrc(ssrc, &rtp_state))
- return rtp_state;
- }
- LOG(LS_ERROR) << "Couldn't get RTP state for ssrc: " << ssrc;
- return rtp_state;
-}
-
-// TODO(pbos): Set CNAME on all modules.
-int32_t ViEChannel::SetRTCPCName(const char* rtcp_cname) {
- RTC_DCHECK(!rtp_rtcp_modules_[0]->Sending());
- return rtp_rtcp_modules_[0]->SetCNAME(rtcp_cname);
-}
-
-int32_t ViEChannel::GetRemoteRTCPCName(char rtcp_cname[]) {
- uint32_t remoteSSRC = vie_receiver_.GetRemoteSsrc();
- return rtp_rtcp_modules_[0]->RemoteCNAME(remoteSSRC, rtcp_cname);
-}
-
-int32_t ViEChannel::GetSendRtcpStatistics(uint16_t* fraction_lost,
- uint32_t* cumulative_lost,
- uint32_t* extended_max,
- uint32_t* jitter_samples,
- int64_t* rtt_ms) {
- // Aggregate the report blocks associated with streams sent on this channel.
- std::vector<RTCPReportBlock> report_blocks;
- for (RtpRtcp* rtp_rtcp : rtp_rtcp_modules_)
- rtp_rtcp->RemoteRTCPStat(&report_blocks);
-
- if (report_blocks.empty())
- return -1;
-
- uint32_t remote_ssrc = vie_receiver_.GetRemoteSsrc();
- std::vector<RTCPReportBlock>::const_iterator it = report_blocks.begin();
- for (; it != report_blocks.end(); ++it) {
- if (it->remoteSSRC == remote_ssrc)
- break;
- }
- if (it == report_blocks.end()) {
- // We have not received packets with an SSRC matching the report blocks. To
- // have a chance of calculating an RTT we will try with the SSRC of the
- // first report block received.
- // This is very important for send-only channels where we don't know the
- // SSRC of the other end.
- remote_ssrc = report_blocks[0].remoteSSRC;
- }
-
- // TODO(asapersson): Change report_block_stats to not rely on
- // GetSendRtcpStatistics to be called.
- RTCPReportBlock report =
- report_block_stats_sender_->AggregateAndStore(report_blocks);
- *fraction_lost = report.fractionLost;
- *cumulative_lost = report.cumulativeLost;
- *extended_max = report.extendedHighSeqNum;
- *jitter_samples = report.jitter;
-
- int64_t dummy;
- int64_t rtt = 0;
- if (rtp_rtcp_modules_[0]->RTT(remote_ssrc, &rtt, &dummy, &dummy, &dummy) !=
- 0) {
- return -1;
- }
- *rtt_ms = rtt;
- return 0;
-}
-
-void ViEChannel::RegisterSendChannelRtcpStatisticsCallback(
- RtcpStatisticsCallback* callback) {
- for (RtpRtcp* rtp_rtcp : rtp_rtcp_modules_)
- rtp_rtcp->RegisterRtcpStatisticsCallback(callback);
-}
-
-void ViEChannel::RegisterReceiveChannelRtcpStatisticsCallback(
- RtcpStatisticsCallback* callback) {
- vie_receiver_.GetReceiveStatistics()->RegisterRtcpStatisticsCallback(
- callback);
- rtp_rtcp_modules_[0]->RegisterRtcpStatisticsCallback(callback);
-}
-
-void ViEChannel::RegisterRtcpPacketTypeCounterObserver(
- RtcpPacketTypeCounterObserver* observer) {
- rtcp_packet_type_counter_observer_.Set(observer);
-}
-
-void ViEChannel::GetSendStreamDataCounters(
- StreamDataCounters* rtp_counters,
- StreamDataCounters* rtx_counters) const {
- *rtp_counters = StreamDataCounters();
- *rtx_counters = StreamDataCounters();
- for (RtpRtcp* rtp_rtcp : rtp_rtcp_modules_) {
- StreamDataCounters rtp_data;
- StreamDataCounters rtx_data;
- rtp_rtcp->GetSendStreamDataCounters(&rtp_data, &rtx_data);
- rtp_counters->Add(rtp_data);
- rtx_counters->Add(rtx_data);
- }
-}
-
-void ViEChannel::GetReceiveStreamDataCounters(
- StreamDataCounters* rtp_counters,
- StreamDataCounters* rtx_counters) const {
- StreamStatistician* statistician = vie_receiver_.GetReceiveStatistics()->
- GetStatistician(vie_receiver_.GetRemoteSsrc());
- if (statistician) {
- statistician->GetReceiveStreamDataCounters(rtp_counters);
- }
- uint32_t rtx_ssrc = 0;
- if (vie_receiver_.GetRtxSsrc(&rtx_ssrc)) {
- StreamStatistician* statistician =
- vie_receiver_.GetReceiveStatistics()->GetStatistician(rtx_ssrc);
- if (statistician) {
- statistician->GetReceiveStreamDataCounters(rtx_counters);
- }
- }
-}
-
-void ViEChannel::RegisterSendChannelRtpStatisticsCallback(
- StreamDataCountersCallback* callback) {
- for (RtpRtcp* rtp_rtcp : rtp_rtcp_modules_)
- rtp_rtcp->RegisterSendChannelRtpStatisticsCallback(callback);
-}
-
-void ViEChannel::RegisterReceiveChannelRtpStatisticsCallback(
- StreamDataCountersCallback* callback) {
- vie_receiver_.GetReceiveStatistics()->RegisterRtpStatisticsCallback(callback);
-}
-
-void ViEChannel::GetSendRtcpPacketTypeCounter(
- RtcpPacketTypeCounter* packet_counter) const {
- std::map<uint32_t, RtcpPacketTypeCounter> counter_map =
- rtcp_packet_type_counter_observer_.GetPacketTypeCounterMap();
-
- RtcpPacketTypeCounter counter;
- for (RtpRtcp* rtp_rtcp : rtp_rtcp_modules_)
- counter.Add(counter_map[rtp_rtcp->SSRC()]);
- *packet_counter = counter;
-}
-
-void ViEChannel::GetReceiveRtcpPacketTypeCounter(
- RtcpPacketTypeCounter* packet_counter) const {
- std::map<uint32_t, RtcpPacketTypeCounter> counter_map =
- rtcp_packet_type_counter_observer_.GetPacketTypeCounterMap();
-
- RtcpPacketTypeCounter counter;
- counter.Add(counter_map[vie_receiver_.GetRemoteSsrc()]);
-
- *packet_counter = counter;
-}
-
-void ViEChannel::RegisterSendSideDelayObserver(
- SendSideDelayObserver* observer) {
- send_side_delay_observer_.Set(observer);
-}
-
-void ViEChannel::RegisterSendBitrateObserver(
- BitrateStatisticsObserver* observer) {
- send_bitrate_observer_.Set(observer);
-}
-
-int32_t ViEChannel::StartSend() {
- CriticalSectionScoped cs(crit_.get());
-
- if (rtp_rtcp_modules_[0]->Sending())
- return -1;
-
- for (size_t i = 0; i < num_active_rtp_rtcp_modules_; ++i) {
- RtpRtcp* rtp_rtcp = rtp_rtcp_modules_[i];
- rtp_rtcp->SetSendingMediaStatus(true);
- rtp_rtcp->SetSendingStatus(true);
- }
- send_payload_router_->set_active(true);
- return 0;
-}
-
-int32_t ViEChannel::StopSend() {
- send_payload_router_->set_active(false);
- for (RtpRtcp* rtp_rtcp : rtp_rtcp_modules_)
- rtp_rtcp->SetSendingMediaStatus(false);
-
- if (!rtp_rtcp_modules_[0]->Sending()) {
- return -1;
- }
-
- for (RtpRtcp* rtp_rtcp : rtp_rtcp_modules_) {
- rtp_rtcp->SetSendingStatus(false);
- }
- return 0;
-}
-
-bool ViEChannel::Sending() {
- return rtp_rtcp_modules_[0]->Sending();
-}
-
-void ViEChannel::StartReceive() {
- if (!sender_)
- StartDecodeThread();
- vie_receiver_.StartReceive();
-}
-
-void ViEChannel::StopReceive() {
- vie_receiver_.StopReceive();
- if (!sender_) {
- StopDecodeThread();
- vcm_->ResetDecoder();
- }
-}
-
-int32_t ViEChannel::ReceivedRTPPacket(const void* rtp_packet,
- size_t rtp_packet_length,
- const PacketTime& packet_time) {
- return vie_receiver_.ReceivedRTPPacket(
- rtp_packet, rtp_packet_length, packet_time);
-}
-
-int32_t ViEChannel::ReceivedRTCPPacket(const void* rtcp_packet,
- size_t rtcp_packet_length) {
- return vie_receiver_.ReceivedRTCPPacket(rtcp_packet, rtcp_packet_length);
-}
-
-int32_t ViEChannel::SetMTU(uint16_t mtu) {
- for (RtpRtcp* rtp_rtcp : rtp_rtcp_modules_)
- rtp_rtcp->SetMaxTransferUnit(mtu);
- return 0;
-}
-
-RtpRtcp* ViEChannel::rtp_rtcp() {
- return rtp_rtcp_modules_[0];
-}
-
-rtc::scoped_refptr<PayloadRouter> ViEChannel::send_payload_router() {
- return send_payload_router_;
-}
-
-VCMProtectionCallback* ViEChannel::vcm_protection_callback() {
- return vcm_protection_callback_.get();
-}
-
-CallStatsObserver* ViEChannel::GetStatsObserver() {
- return stats_observer_.get();
-}
-
-// Do not acquire the lock of |vcm_| in this function. Decode callback won't
-// necessarily be called from the decoding thread. The decoding thread may have
-// held the lock when calling VideoDecoder::Decode, Reset, or Release. Acquiring
-// the same lock in the path of decode callback can deadlock.
-int32_t ViEChannel::FrameToRender(VideoFrame& video_frame) { // NOLINT
- CriticalSectionScoped cs(crit_.get());
-
- if (pre_render_callback_ != NULL)
- pre_render_callback_->FrameCallback(&video_frame);
-
- // TODO(pbos): Remove stream id argument.
- incoming_video_stream_->RenderFrame(0xFFFFFFFF, video_frame);
- return 0;
-}
-
-int32_t ViEChannel::ReceivedDecodedReferenceFrame(
- const uint64_t picture_id) {
- return rtp_rtcp_modules_[0]->SendRTCPReferencePictureSelection(picture_id);
-}
-
-void ViEChannel::OnIncomingPayloadType(int payload_type) {
- CriticalSectionScoped cs(crit_.get());
- if (receive_stats_callback_)
- receive_stats_callback_->OnIncomingPayloadType(payload_type);
-}
-
-void ViEChannel::OnReceiveRatesUpdated(uint32_t bit_rate, uint32_t frame_rate) {
- CriticalSectionScoped cs(crit_.get());
- if (receive_stats_callback_)
- receive_stats_callback_->OnIncomingRate(frame_rate, bit_rate);
-}
-
-void ViEChannel::OnDiscardedPacketsUpdated(int discarded_packets) {
- CriticalSectionScoped cs(crit_.get());
- if (receive_stats_callback_)
- receive_stats_callback_->OnDiscardedPacketsUpdated(discarded_packets);
-}
-
-void ViEChannel::OnFrameCountsUpdated(const FrameCounts& frame_counts) {
- CriticalSectionScoped cs(crit_.get());
- receive_frame_counts_ = frame_counts;
- if (receive_stats_callback_)
- receive_stats_callback_->OnFrameCountsUpdated(frame_counts);
-}
-
-void ViEChannel::OnDecoderTiming(int decode_ms,
- int max_decode_ms,
- int current_delay_ms,
- int target_delay_ms,
- int jitter_buffer_ms,
- int min_playout_delay_ms,
- int render_delay_ms) {
- CriticalSectionScoped cs(crit_.get());
- if (!receive_stats_callback_)
- return;
- receive_stats_callback_->OnDecoderTiming(
- decode_ms, max_decode_ms, current_delay_ms, target_delay_ms,
- jitter_buffer_ms, min_playout_delay_ms, render_delay_ms, last_rtt_ms_);
-}
-
-int32_t ViEChannel::RequestKeyFrame() {
- return rtp_rtcp_modules_[0]->RequestKeyFrame();
-}
-
-int32_t ViEChannel::SliceLossIndicationRequest(
- const uint64_t picture_id) {
- return rtp_rtcp_modules_[0]->SendRTCPSliceLossIndication(
- static_cast<uint8_t>(picture_id));
-}
-
-int32_t ViEChannel::ResendPackets(const uint16_t* sequence_numbers,
- uint16_t length) {
- return rtp_rtcp_modules_[0]->SendNACK(sequence_numbers, length);
-}
-
-bool ViEChannel::ChannelDecodeThreadFunction(void* obj) {
- return static_cast<ViEChannel*>(obj)->ChannelDecodeProcess();
-}
-
-bool ViEChannel::ChannelDecodeProcess() {
- vcm_->Decode(kMaxDecodeWaitTimeMs);
- return true;
-}
-
-void ViEChannel::OnRttUpdate(int64_t avg_rtt_ms, int64_t max_rtt_ms) {
- vcm_->SetReceiveChannelParameters(max_rtt_ms);
-
- CriticalSectionScoped cs(crit_.get());
- if (time_of_first_rtt_ms_ == -1)
- time_of_first_rtt_ms_ = Clock::GetRealTimeClock()->TimeInMilliseconds();
- rtt_sum_ms_ += avg_rtt_ms;
- last_rtt_ms_ = avg_rtt_ms;
- ++num_rtts_;
-}
-
-int ViEChannel::ProtectionRequest(const FecProtectionParams* delta_fec_params,
- const FecProtectionParams* key_fec_params,
- uint32_t* video_rate_bps,
- uint32_t* nack_rate_bps,
- uint32_t* fec_rate_bps) {
- *video_rate_bps = 0;
- *nack_rate_bps = 0;
- *fec_rate_bps = 0;
- for (RtpRtcp* rtp_rtcp : rtp_rtcp_modules_) {
- uint32_t not_used = 0;
- uint32_t module_video_rate = 0;
- uint32_t module_fec_rate = 0;
- uint32_t module_nack_rate = 0;
- rtp_rtcp->SetFecParameters(delta_fec_params, key_fec_params);
- rtp_rtcp->BitrateSent(&not_used, &module_video_rate, &module_fec_rate,
- &module_nack_rate);
- *video_rate_bps += module_video_rate;
- *nack_rate_bps += module_nack_rate;
- *fec_rate_bps += module_fec_rate;
- }
- return 0;
-}
-
-std::vector<RtpRtcp*> ViEChannel::CreateRtpRtcpModules(
- bool receiver_only,
- ReceiveStatistics* receive_statistics,
- Transport* outgoing_transport,
- RtcpIntraFrameObserver* intra_frame_callback,
- RtcpBandwidthObserver* bandwidth_callback,
- TransportFeedbackObserver* transport_feedback_callback,
- RtcpRttStats* rtt_stats,
- RtcpPacketTypeCounterObserver* rtcp_packet_type_counter_observer,
- RemoteBitrateEstimator* remote_bitrate_estimator,
- RtpPacketSender* paced_sender,
- TransportSequenceNumberAllocator* transport_sequence_number_allocator,
- BitrateStatisticsObserver* send_bitrate_observer,
- FrameCountObserver* send_frame_count_observer,
- SendSideDelayObserver* send_side_delay_observer,
- size_t num_modules) {
- RTC_DCHECK_GT(num_modules, 0u);
- RtpRtcp::Configuration configuration;
- ReceiveStatistics* null_receive_statistics = configuration.receive_statistics;
- configuration.audio = false;
- configuration.receiver_only = receiver_only;
- configuration.receive_statistics = receive_statistics;
- configuration.outgoing_transport = outgoing_transport;
- configuration.intra_frame_callback = intra_frame_callback;
- configuration.rtt_stats = rtt_stats;
- configuration.rtcp_packet_type_counter_observer =
- rtcp_packet_type_counter_observer;
- configuration.paced_sender = paced_sender;
- configuration.transport_sequence_number_allocator =
- transport_sequence_number_allocator;
- configuration.send_bitrate_observer = send_bitrate_observer;
- configuration.send_frame_count_observer = send_frame_count_observer;
- configuration.send_side_delay_observer = send_side_delay_observer;
- configuration.bandwidth_callback = bandwidth_callback;
- configuration.transport_feedback_callback = transport_feedback_callback;
-
- std::vector<RtpRtcp*> modules;
- for (size_t i = 0; i < num_modules; ++i) {
- RtpRtcp* rtp_rtcp = RtpRtcp::CreateRtpRtcp(configuration);
- rtp_rtcp->SetSendingStatus(false);
- rtp_rtcp->SetSendingMediaStatus(false);
- rtp_rtcp->SetRTCPStatus(RtcpMode::kCompound);
- modules.push_back(rtp_rtcp);
- // Receive statistics and remote bitrate estimator should only be set for
- // the primary (first) module.
- configuration.receive_statistics = null_receive_statistics;
- configuration.remote_bitrate_estimator = nullptr;
- }
- return modules;
-}
-
-void ViEChannel::StartDecodeThread() {
- RTC_DCHECK(!sender_);
- // Start the decode thread
- if (decode_thread_)
- return;
- decode_thread_ = ThreadWrapper::CreateThread(ChannelDecodeThreadFunction,
- this, "DecodingThread");
- decode_thread_->Start();
- decode_thread_->SetPriority(kHighestPriority);
-}
-
-void ViEChannel::StopDecodeThread() {
- if (!decode_thread_)
- return;
-
- vcm_->TriggerDecoderShutdown();
-
- decode_thread_->Stop();
- decode_thread_.reset();
-}
-
-int32_t ViEChannel::SetVoiceChannel(int32_t ve_channel_id,
- VoEVideoSync* ve_sync_interface) {
- return vie_sync_.ConfigureSync(ve_channel_id, ve_sync_interface,
- rtp_rtcp_modules_[0],
- vie_receiver_.GetRtpReceiver());
-}
-
-int32_t ViEChannel::VoiceChannel() {
- return vie_sync_.VoiceChannel();
-}
-
-void ViEChannel::RegisterPreRenderCallback(
- I420FrameCallback* pre_render_callback) {
- CriticalSectionScoped cs(crit_.get());
- pre_render_callback_ = pre_render_callback;
-}
-
-void ViEChannel::RegisterPreDecodeImageCallback(
- EncodedImageCallback* pre_decode_callback) {
- vcm_->RegisterPreDecodeImageCallback(pre_decode_callback);
-}
-
-// TODO(pbos): Remove OnInitializeDecoder which is called from the RTP module,
-// any decoder resetting should be handled internally within the VCM.
-int32_t ViEChannel::OnInitializeDecoder(
- const int8_t payload_type,
- const char payload_name[RTP_PAYLOAD_NAME_SIZE],
- const int frequency,
- const uint8_t channels,
- const uint32_t rate) {
- LOG(LS_INFO) << "OnInitializeDecoder " << static_cast<int>(payload_type)
- << " " << payload_name;
- vcm_->ResetDecoder();
-
- return 0;
-}
-
-void ViEChannel::OnIncomingSSRCChanged(const uint32_t ssrc) {
- rtp_rtcp_modules_[0]->SetRemoteSSRC(ssrc);
-}
-
-void ViEChannel::OnIncomingCSRCChanged(const uint32_t CSRC, const bool added) {}
-
-void ViEChannel::RegisterSendFrameCountObserver(
- FrameCountObserver* observer) {
- send_frame_count_observer_.Set(observer);
-}
-
-void ViEChannel::RegisterReceiveStatisticsProxy(
- ReceiveStatisticsProxy* receive_statistics_proxy) {
- CriticalSectionScoped cs(crit_.get());
- receive_stats_callback_ = receive_statistics_proxy;
-}
-
-void ViEChannel::SetIncomingVideoStream(
- IncomingVideoStream* incoming_video_stream) {
- CriticalSectionScoped cs(crit_.get());
- incoming_video_stream_ = incoming_video_stream;
-}
-} // namespace webrtc
diff --git a/webrtc/video_engine/vie_channel.h b/webrtc/video_engine/vie_channel.h
deleted file mode 100644
index 41c679aafa..0000000000
--- a/webrtc/video_engine/vie_channel.h
+++ /dev/null
@@ -1,458 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_VIDEO_ENGINE_VIE_CHANNEL_H_
-#define WEBRTC_VIDEO_ENGINE_VIE_CHANNEL_H_
-
-#include <list>
-
-#include "webrtc/base/scoped_ptr.h"
-#include "webrtc/base/scoped_ref_ptr.h"
-#include "webrtc/modules/remote_bitrate_estimator/include/remote_bitrate_estimator.h"
-#include "webrtc/modules/rtp_rtcp/interface/rtp_rtcp.h"
-#include "webrtc/modules/rtp_rtcp/interface/rtp_rtcp_defines.h"
-#include "webrtc/modules/video_coding/main/interface/video_coding_defines.h"
-#include "webrtc/system_wrappers/include/critical_section_wrapper.h"
-#include "webrtc/system_wrappers/include/tick_util.h"
-#include "webrtc/typedefs.h"
-#include "webrtc/video_engine/vie_defines.h"
-#include "webrtc/video_engine/vie_receiver.h"
-#include "webrtc/video_engine/vie_sync_module.h"
-
-namespace webrtc {
-
-class CallStatsObserver;
-class ChannelStatsObserver;
-class Config;
-class CriticalSectionWrapper;
-class EncodedImageCallback;
-class I420FrameCallback;
-class IncomingVideoStream;
-class PacedSender;
-class PacketRouter;
-class PayloadRouter;
-class ProcessThread;
-class ReceiveStatisticsProxy;
-class ReportBlockStats;
-class RtcpRttStats;
-class ThreadWrapper;
-class ViEChannelProtectionCallback;
-class ViERTPObserver;
-class VideoCodingModule;
-class VideoDecoder;
-class VideoRenderCallback;
-class VoEVideoSync;
-
-enum StreamType {
- kViEStreamTypeNormal = 0, // Normal media stream
- kViEStreamTypeRtx = 1 // Retransmission media stream
-};
-
-class ViEChannel : public VCMFrameTypeCallback,
- public VCMReceiveCallback,
- public VCMReceiveStatisticsCallback,
- public VCMDecoderTimingCallback,
- public VCMPacketRequestCallback,
- public RtpFeedback {
- public:
- friend class ChannelStatsObserver;
- friend class ViEChannelProtectionCallback;
-
- ViEChannel(uint32_t number_of_cores,
- Transport* transport,
- ProcessThread* module_process_thread,
- RtcpIntraFrameObserver* intra_frame_observer,
- RtcpBandwidthObserver* bandwidth_observer,
- TransportFeedbackObserver* transport_feedback_observer,
- RemoteBitrateEstimator* remote_bitrate_estimator,
- RtcpRttStats* rtt_stats,
- PacedSender* paced_sender,
- PacketRouter* packet_router,
- size_t max_rtp_streams,
- bool sender);
- ~ViEChannel();
-
- int32_t Init();
-
- // Sets the encoder to use for the channel. |new_stream| indicates the encoder
- // type has changed and we should start a new RTP stream.
- int32_t SetSendCodec(const VideoCodec& video_codec, bool new_stream = true);
- int32_t SetReceiveCodec(const VideoCodec& video_codec);
- // Registers an external decoder. |buffered_rendering| means that the decoder
- // will render frames after decoding according to the render timestamp
- // provided by the video coding module. |render_delay| indicates the time
- // needed to decode and render a frame.
- int32_t RegisterExternalDecoder(const uint8_t pl_type,
- VideoDecoder* decoder,
- bool buffered_rendering,
- int32_t render_delay);
- int32_t DeRegisterExternalDecoder(const uint8_t pl_type);
- int32_t ReceiveCodecStatistics(uint32_t* num_key_frames,
- uint32_t* num_delta_frames);
- uint32_t DiscardedPackets() const;
-
- // Returns the estimated delay in milliseconds.
- int ReceiveDelay() const;
-
- void SetRTCPMode(const RtcpMode rtcp_mode);
- void SetProtectionMode(bool enable_nack,
- bool enable_fec,
- int payload_type_red,
- int payload_type_fec);
- bool IsSendingFecEnabled();
- int SetSenderBufferingMode(int target_delay_ms);
- int SetReceiverBufferingMode(int target_delay_ms);
- int SetSendTimestampOffsetStatus(bool enable, int id);
- int SetReceiveTimestampOffsetStatus(bool enable, int id);
- int SetSendAbsoluteSendTimeStatus(bool enable, int id);
- int SetReceiveAbsoluteSendTimeStatus(bool enable, int id);
- int SetSendVideoRotationStatus(bool enable, int id);
- int SetReceiveVideoRotationStatus(bool enable, int id);
- int SetSendTransportSequenceNumber(bool enable, int id);
- int SetReceiveTransportSequenceNumber(bool enable, int id);
- void SetRtcpXrRrtrStatus(bool enable);
- void EnableTMMBR(bool enable);
-
- // Sets SSRC for outgoing stream.
- int32_t SetSSRC(const uint32_t SSRC,
- const StreamType usage,
- const unsigned char simulcast_idx);
-
- // Gets SSRC for outgoing stream number |idx|.
- int32_t GetLocalSSRC(uint8_t idx, unsigned int* ssrc);
-
- // Gets SSRC for the incoming stream.
- uint32_t GetRemoteSSRC();
-
- int SetRtxSendPayloadType(int payload_type, int associated_payload_type);
- void SetRtxReceivePayloadType(int payload_type, int associated_payload_type);
- // If set to true, the RTX payload type mapping supplied in
- // |SetRtxReceivePayloadType| will be used when restoring RTX packets. Without
- // it, RTX packets will always be restored to the last non-RTX packet payload
- // type received.
- void SetUseRtxPayloadMappingOnRestore(bool val);
-
- void SetRtpStateForSsrc(uint32_t ssrc, const RtpState& rtp_state);
- RtpState GetRtpStateForSsrc(uint32_t ssrc);
-
- // Sets the CName for the outgoing stream on the channel.
- int32_t SetRTCPCName(const char* rtcp_cname);
-
- // Gets the CName of the incoming stream.
- int32_t GetRemoteRTCPCName(char rtcp_cname[]);
-
- // Returns statistics reported by the remote client in an RTCP packet.
- // TODO(pbos): Remove this along with VideoSendStream::GetRtt().
- int32_t GetSendRtcpStatistics(uint16_t* fraction_lost,
- uint32_t* cumulative_lost,
- uint32_t* extended_max,
- uint32_t* jitter_samples,
- int64_t* rtt_ms);
-
- // Called on receipt of RTCP report block from remote side.
- void RegisterSendChannelRtcpStatisticsCallback(
- RtcpStatisticsCallback* callback);
-
- // Called on generation of RTCP stats
- void RegisterReceiveChannelRtcpStatisticsCallback(
- RtcpStatisticsCallback* callback);
-
- // Gets send statistics for the rtp and rtx stream.
- void GetSendStreamDataCounters(StreamDataCounters* rtp_counters,
- StreamDataCounters* rtx_counters) const;
-
- // Gets received stream data counters.
- void GetReceiveStreamDataCounters(StreamDataCounters* rtp_counters,
- StreamDataCounters* rtx_counters) const;
-
- // Called on update of RTP statistics.
- void RegisterSendChannelRtpStatisticsCallback(
- StreamDataCountersCallback* callback);
-
- // Called on update of RTP statistics.
- void RegisterReceiveChannelRtpStatisticsCallback(
- StreamDataCountersCallback* callback);
-
- void GetSendRtcpPacketTypeCounter(
- RtcpPacketTypeCounter* packet_counter) const;
-
- void GetReceiveRtcpPacketTypeCounter(
- RtcpPacketTypeCounter* packet_counter) const;
-
- void RegisterSendSideDelayObserver(SendSideDelayObserver* observer);
-
- // Called on any new send bitrate estimate.
- void RegisterSendBitrateObserver(BitrateStatisticsObserver* observer);
-
- // Implements RtpFeedback.
- int32_t OnInitializeDecoder(const int8_t payload_type,
- const char payload_name[RTP_PAYLOAD_NAME_SIZE],
- const int frequency,
- const uint8_t channels,
- const uint32_t rate) override;
- void OnIncomingSSRCChanged(const uint32_t ssrc) override;
- void OnIncomingCSRCChanged(const uint32_t CSRC, const bool added) override;
-
- int32_t SetRemoteSSRCType(const StreamType usage, const uint32_t SSRC);
-
- int32_t StartSend();
- int32_t StopSend();
- bool Sending();
- void StartReceive();
- void StopReceive();
-
- int32_t ReceivedRTPPacket(const void* rtp_packet,
- const size_t rtp_packet_length,
- const PacketTime& packet_time);
- int32_t ReceivedRTCPPacket(const void* rtcp_packet,
- const size_t rtcp_packet_length);
-
- // Sets the maximum transfer unit size for the network link, i.e. including
- // IP, UDP and RTP headers.
- int32_t SetMTU(uint16_t mtu);
-
- // Gets the modules used by the channel.
- RtpRtcp* rtp_rtcp();
- rtc::scoped_refptr<PayloadRouter> send_payload_router();
- VCMProtectionCallback* vcm_protection_callback();
-
-
- CallStatsObserver* GetStatsObserver();
-
- // Implements VCMReceiveCallback.
- virtual int32_t FrameToRender(VideoFrame& video_frame); // NOLINT
-
- // Implements VCMReceiveCallback.
- virtual int32_t ReceivedDecodedReferenceFrame(
- const uint64_t picture_id);
-
- // Implements VCMReceiveCallback.
- void OnIncomingPayloadType(int payload_type) override;
-
- // Implements VCMReceiveStatisticsCallback.
- void OnReceiveRatesUpdated(uint32_t bit_rate, uint32_t frame_rate) override;
- void OnDiscardedPacketsUpdated(int discarded_packets) override;
- void OnFrameCountsUpdated(const FrameCounts& frame_counts) override;
-
- // Implements VCMDecoderTimingCallback.
- virtual void OnDecoderTiming(int decode_ms,
- int max_decode_ms,
- int current_delay_ms,
- int target_delay_ms,
- int jitter_buffer_ms,
- int min_playout_delay_ms,
- int render_delay_ms);
-
- // Implements FrameTypeCallback.
- virtual int32_t RequestKeyFrame();
-
- // Implements FrameTypeCallback.
- virtual int32_t SliceLossIndicationRequest(
- const uint64_t picture_id);
-
- // Implements VideoPacketRequestCallback.
- int32_t ResendPackets(const uint16_t* sequence_numbers,
- uint16_t length) override;
-
- int32_t SetVoiceChannel(int32_t ve_channel_id,
- VoEVideoSync* ve_sync_interface);
- int32_t VoiceChannel();
-
- // New-style callbacks, used by VideoReceiveStream.
- void RegisterPreRenderCallback(I420FrameCallback* pre_render_callback);
- void RegisterPreDecodeImageCallback(
- EncodedImageCallback* pre_decode_callback);
-
- void RegisterSendFrameCountObserver(FrameCountObserver* observer);
- void RegisterRtcpPacketTypeCounterObserver(
- RtcpPacketTypeCounterObserver* observer);
- void RegisterReceiveStatisticsProxy(
- ReceiveStatisticsProxy* receive_statistics_proxy);
- void SetIncomingVideoStream(IncomingVideoStream* incoming_video_stream);
-
- protected:
- static bool ChannelDecodeThreadFunction(void* obj);
- bool ChannelDecodeProcess();
-
- void OnRttUpdate(int64_t avg_rtt_ms, int64_t max_rtt_ms);
-
- int ProtectionRequest(const FecProtectionParams* delta_fec_params,
- const FecProtectionParams* key_fec_params,
- uint32_t* sent_video_rate_bps,
- uint32_t* sent_nack_rate_bps,
- uint32_t* sent_fec_rate_bps);
-
- private:
- static std::vector<RtpRtcp*> CreateRtpRtcpModules(
- bool receiver_only,
- ReceiveStatistics* receive_statistics,
- Transport* outgoing_transport,
- RtcpIntraFrameObserver* intra_frame_callback,
- RtcpBandwidthObserver* bandwidth_callback,
- TransportFeedbackObserver* transport_feedback_callback,
- RtcpRttStats* rtt_stats,
- RtcpPacketTypeCounterObserver* rtcp_packet_type_counter_observer,
- RemoteBitrateEstimator* remote_bitrate_estimator,
- RtpPacketSender* paced_sender,
- TransportSequenceNumberAllocator* transport_sequence_number_allocator,
- BitrateStatisticsObserver* send_bitrate_observer,
- FrameCountObserver* send_frame_count_observer,
- SendSideDelayObserver* send_side_delay_observer,
- size_t num_modules);
-
- // Assumed to be protected.
- void StartDecodeThread();
- void StopDecodeThread();
-
- void ProcessNACKRequest(const bool enable);
- // Compute NACK list parameters for the buffering mode.
- int GetRequiredNackListSize(int target_delay_ms);
- void SetRtxSendStatus(bool enable);
-
- void UpdateHistograms();
-
- // ViEChannel exposes methods that allow to modify observers and callbacks
- // to be modified. Such an API-style is cumbersome to implement and maintain
- // at all the levels when comparing to only setting them at construction. As
- // so this class instantiates its children with a wrapper that can be modified
- // at a later time.
- template <class T>
- class RegisterableCallback : public T {
- public:
- RegisterableCallback()
- : critsect_(CriticalSectionWrapper::CreateCriticalSection()),
- callback_(NULL) {}
-
- void Set(T* callback) {
- CriticalSectionScoped cs(critsect_.get());
- callback_ = callback;
- }
-
- protected:
- // Note: this should be implemented with a RW-lock to allow simultaneous
- // calls into the callback. However that doesn't seem to be needed for the
- // current type of callbacks covered by this class.
- rtc::scoped_ptr<CriticalSectionWrapper> critsect_;
- T* callback_ GUARDED_BY(critsect_);
-
- private:
- RTC_DISALLOW_COPY_AND_ASSIGN(RegisterableCallback);
- };
-
- class RegisterableBitrateStatisticsObserver:
- public RegisterableCallback<BitrateStatisticsObserver> {
- virtual void Notify(const BitrateStatistics& total_stats,
- const BitrateStatistics& retransmit_stats,
- uint32_t ssrc) {
- CriticalSectionScoped cs(critsect_.get());
- if (callback_)
- callback_->Notify(total_stats, retransmit_stats, ssrc);
- }
- } send_bitrate_observer_;
-
- class RegisterableFrameCountObserver
- : public RegisterableCallback<FrameCountObserver> {
- public:
- virtual void FrameCountUpdated(const FrameCounts& frame_counts,
- uint32_t ssrc) {
- CriticalSectionScoped cs(critsect_.get());
- if (callback_)
- callback_->FrameCountUpdated(frame_counts, ssrc);
- }
-
- private:
- } send_frame_count_observer_;
-
- class RegisterableSendSideDelayObserver :
- public RegisterableCallback<SendSideDelayObserver> {
- void SendSideDelayUpdated(int avg_delay_ms,
- int max_delay_ms,
- uint32_t ssrc) override {
- CriticalSectionScoped cs(critsect_.get());
- if (callback_)
- callback_->SendSideDelayUpdated(avg_delay_ms, max_delay_ms, ssrc);
- }
- } send_side_delay_observer_;
-
- class RegisterableRtcpPacketTypeCounterObserver
- : public RegisterableCallback<RtcpPacketTypeCounterObserver> {
- public:
- void RtcpPacketTypesCounterUpdated(
- uint32_t ssrc,
- const RtcpPacketTypeCounter& packet_counter) override {
- CriticalSectionScoped cs(critsect_.get());
- if (callback_)
- callback_->RtcpPacketTypesCounterUpdated(ssrc, packet_counter);
- counter_map_[ssrc] = packet_counter;
- }
-
- virtual std::map<uint32_t, RtcpPacketTypeCounter> GetPacketTypeCounterMap()
- const {
- CriticalSectionScoped cs(critsect_.get());
- return counter_map_;
- }
-
- private:
- std::map<uint32_t, RtcpPacketTypeCounter> counter_map_
- GUARDED_BY(critsect_);
- } rtcp_packet_type_counter_observer_;
-
- const uint32_t number_of_cores_;
- const bool sender_;
-
- ProcessThread* const module_process_thread_;
-
- // Used for all registered callbacks except rendering.
- rtc::scoped_ptr<CriticalSectionWrapper> crit_;
-
- // Owned modules/classes.
- rtc::scoped_refptr<PayloadRouter> send_payload_router_;
- rtc::scoped_ptr<ViEChannelProtectionCallback> vcm_protection_callback_;
-
- VideoCodingModule* const vcm_;
- ViEReceiver vie_receiver_;
- ViESyncModule vie_sync_;
-
- // Helper to report call statistics.
- rtc::scoped_ptr<ChannelStatsObserver> stats_observer_;
-
- // Not owned.
- ReceiveStatisticsProxy* receive_stats_callback_ GUARDED_BY(crit_);
- FrameCounts receive_frame_counts_ GUARDED_BY(crit_);
- IncomingVideoStream* incoming_video_stream_ GUARDED_BY(crit_);
- RtcpIntraFrameObserver* const intra_frame_observer_;
- RtcpRttStats* const rtt_stats_;
- PacedSender* const paced_sender_;
- PacketRouter* const packet_router_;
-
- const rtc::scoped_ptr<RtcpBandwidthObserver> bandwidth_observer_;
- TransportFeedbackObserver* const transport_feedback_observer_;
-
- rtc::scoped_ptr<ThreadWrapper> decode_thread_;
-
- int nack_history_size_sender_;
- int max_nack_reordering_threshold_;
- I420FrameCallback* pre_render_callback_ GUARDED_BY(crit_);
-
- const rtc::scoped_ptr<ReportBlockStats> report_block_stats_sender_;
-
- int64_t time_of_first_rtt_ms_ GUARDED_BY(crit_);
- int64_t rtt_sum_ms_ GUARDED_BY(crit_);
- int64_t last_rtt_ms_ GUARDED_BY(crit_);
- size_t num_rtts_ GUARDED_BY(crit_);
-
- // RtpRtcp modules, declared last as they use other members on construction.
- const std::vector<RtpRtcp*> rtp_rtcp_modules_;
- size_t num_active_rtp_rtcp_modules_ GUARDED_BY(crit_);
-};
-
-} // namespace webrtc
-
-#endif // WEBRTC_VIDEO_ENGINE_VIE_CHANNEL_H_
diff --git a/webrtc/video_engine/vie_defines.h b/webrtc/video_engine/vie_defines.h
deleted file mode 100644
index 59b56a54fd..0000000000
--- a/webrtc/video_engine/vie_defines.h
+++ /dev/null
@@ -1,120 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_VIDEO_ENGINE_VIE_DEFINES_H_
-#define WEBRTC_VIDEO_ENGINE_VIE_DEFINES_H_
-
-#include "webrtc/engine_configurations.h"
-
-// TODO(mflodman) Remove.
-#ifdef WEBRTC_ANDROID
-#include <arpa/inet.h> // NOLINT
-#include <linux/net.h> // NOLINT
-#include <netinet/in.h> // NOLINT
-#include <pthread.h> // NOLINT
-#include <stdio.h> // NOLINT
-#include <stdlib.h> // NOLINT
-#include <string.h> // NOLINT
-#include <sys/socket.h> // NOLINT
-#include <sys/time.h> // NOLINT
-#include <sys/types.h> // NOLINT
-#include <time.h> // NOLINT
-#endif
-
-namespace webrtc {
-
-// General
-enum { kViEMinKeyRequestIntervalMs = 300 };
-
-// ViEBase
-enum { kViEMaxNumberOfChannels = 64 };
-
-// ViECodec
-enum { kViEMaxCodecWidth = 4096 };
-enum { kViEMaxCodecHeight = 3072 };
-enum { kViEMaxCodecFramerate = 60 };
-enum { kViEMinCodecBitrate = 30 };
-
-// ViENetwork
-enum { kViEMaxMtu = 1500 };
-enum { kViESocketThreads = 1 };
-enum { kViENumReceiveSocketBuffers = 500 };
-
-// ViERender
-// Max valid time set in SetRenderTimeoutImage
-enum { kViEMaxRenderTimeoutTimeMs = 10000 };
-// Min valid time set in SetRenderTimeoutImage
-enum { kViEMinRenderTimeoutTimeMs = 33 };
-enum { kViEDefaultRenderDelayMs = 10 };
-
-// ViERTP_RTCP
-enum { kSendSidePacketHistorySize = 600 };
-
-// NACK
-enum { kMaxPacketAgeToNack = 450 }; // In sequence numbers.
-enum { kMaxNackListSize = 250 };
-
-// Id definitions
-enum {
- kViEChannelIdBase = 0x0,
- kViEChannelIdMax = 0xFF,
- kViEDummyChannelId = 0xFFFF
-};
-
-// Module id
-// Create a unique id based on the ViE instance id and the
-// channel id. ViE id > 0 and 0 <= channel id <= 255
-
-inline int ViEId(const int vieId, const int channelId = -1) {
- if (channelId == -1) {
- return static_cast<int>((vieId << 16) + kViEDummyChannelId);
- }
- return static_cast<int>((vieId << 16) + channelId);
-}
-
-inline int ViEModuleId(const int vieId, const int channelId = -1) {
- if (channelId == -1) {
- return static_cast<int>((vieId << 16) + kViEDummyChannelId);
- }
- return static_cast<int>((vieId << 16) + channelId);
-}
-
-inline int ChannelId(const int moduleId) {
- return static_cast<int>(moduleId & 0xffff);
-}
-
-// Windows specific.
-#if defined(_WIN32)
- #define RENDER_MODULE_TYPE kRenderWindows
-
- // Include libraries.
- #pragma comment(lib, "winmm.lib")
-
- #ifndef WEBRTC_EXTERNAL_TRANSPORT
- #pragma comment(lib, "ws2_32.lib")
- #pragma comment(lib, "Iphlpapi.lib") // _GetAdaptersAddresses
- #endif
-#endif
-
-// Mac specific.
-#ifdef WEBRTC_MAC
- #define SLEEP(x) usleep(x * 1000)
- #define RENDER_MODULE_TYPE kRenderWindows
-#endif
-
-// Android specific.
-#ifdef WEBRTC_ANDROID
- #define FAR
- #define __cdecl
-#endif // WEBRTC_ANDROID
-
-} // namespace webrtc
-
-#endif // WEBRTC_VIDEO_ENGINE_VIE_DEFINES_H_
diff --git a/webrtc/video_engine/vie_encoder.cc b/webrtc/video_engine/vie_encoder.cc
deleted file mode 100644
index 0f4a5a14f5..0000000000
--- a/webrtc/video_engine/vie_encoder.cc
+++ /dev/null
@@ -1,710 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include "webrtc/video_engine/vie_encoder.h"
-
-#include <assert.h>
-
-#include <algorithm>
-
-#include "webrtc/base/checks.h"
-#include "webrtc/base/logging.h"
-#include "webrtc/base/trace_event.h"
-#include "webrtc/common_video/interface/video_image.h"
-#include "webrtc/common_video/libyuv/include/webrtc_libyuv.h"
-#include "webrtc/frame_callback.h"
-#include "webrtc/modules/bitrate_controller/include/bitrate_controller.h"
-#include "webrtc/modules/pacing/include/paced_sender.h"
-#include "webrtc/modules/utility/interface/process_thread.h"
-#include "webrtc/modules/video_coding/codecs/interface/video_codec_interface.h"
-#include "webrtc/modules/video_coding/main/interface/video_coding.h"
-#include "webrtc/modules/video_coding/main/interface/video_coding_defines.h"
-#include "webrtc/modules/video_coding/main/source/encoded_frame.h"
-#include "webrtc/system_wrappers/include/clock.h"
-#include "webrtc/system_wrappers/include/critical_section_wrapper.h"
-#include "webrtc/system_wrappers/include/metrics.h"
-#include "webrtc/system_wrappers/include/tick_util.h"
-#include "webrtc/video/send_statistics_proxy.h"
-#include "webrtc/video_engine/payload_router.h"
-#include "webrtc/video_engine/vie_defines.h"
-
-namespace webrtc {
-
-// Margin on when we pause the encoder when the pacing buffer overflows relative
-// to the configured buffer delay.
-static const float kEncoderPausePacerMargin = 2.0f;
-
-// Don't stop the encoder unless the delay is above this configured value.
-static const int kMinPacingDelayMs = 200;
-
-static const float kStopPaddingThresholdMs = 2000;
-
-std::vector<uint32_t> AllocateStreamBitrates(
- uint32_t total_bitrate,
- const SimulcastStream* stream_configs,
- size_t number_of_streams) {
- if (number_of_streams == 0) {
- std::vector<uint32_t> stream_bitrates(1, 0);
- stream_bitrates[0] = total_bitrate;
- return stream_bitrates;
- }
- std::vector<uint32_t> stream_bitrates(number_of_streams, 0);
- uint32_t bitrate_remainder = total_bitrate;
- for (size_t i = 0; i < stream_bitrates.size() && bitrate_remainder > 0; ++i) {
- if (stream_configs[i].maxBitrate * 1000 > bitrate_remainder) {
- stream_bitrates[i] = bitrate_remainder;
- } else {
- stream_bitrates[i] = stream_configs[i].maxBitrate * 1000;
- }
- bitrate_remainder -= stream_bitrates[i];
- }
- return stream_bitrates;
-}
-
-class QMVideoSettingsCallback : public VCMQMSettingsCallback {
- public:
- explicit QMVideoSettingsCallback(VideoProcessingModule* vpm);
-
- ~QMVideoSettingsCallback();
-
- // Update VPM with QM (quality modes: frame size & frame rate) settings.
- int32_t SetVideoQMSettings(const uint32_t frame_rate,
- const uint32_t width,
- const uint32_t height);
-
- // Update target frame rate.
- void SetTargetFramerate(int frame_rate);
-
- private:
- VideoProcessingModule* vpm_;
-};
-
-class ViEBitrateObserver : public BitrateObserver {
- public:
- explicit ViEBitrateObserver(ViEEncoder* owner)
- : owner_(owner) {
- }
- virtual ~ViEBitrateObserver() {}
- // Implements BitrateObserver.
- virtual void OnNetworkChanged(uint32_t bitrate_bps,
- uint8_t fraction_lost,
- int64_t rtt) {
- owner_->OnNetworkChanged(bitrate_bps, fraction_lost, rtt);
- }
- private:
- ViEEncoder* owner_;
-};
-
-ViEEncoder::ViEEncoder(uint32_t number_of_cores,
- ProcessThread* module_process_thread,
- SendStatisticsProxy* stats_proxy,
- I420FrameCallback* pre_encode_callback,
- PacedSender* pacer,
- BitrateAllocator* bitrate_allocator)
- : number_of_cores_(number_of_cores),
- vpm_(VideoProcessingModule::Create()),
- qm_callback_(new QMVideoSettingsCallback(vpm_.get())),
- vcm_(VideoCodingModule::Create(Clock::GetRealTimeClock(),
- this,
- qm_callback_.get())),
- send_payload_router_(NULL),
- data_cs_(CriticalSectionWrapper::CreateCriticalSection()),
- stats_proxy_(stats_proxy),
- pre_encode_callback_(pre_encode_callback),
- pacer_(pacer),
- bitrate_allocator_(bitrate_allocator),
- time_of_last_frame_activity_ms_(0),
- simulcast_enabled_(false),
- min_transmit_bitrate_kbps_(0),
- last_observed_bitrate_bps_(0),
- target_delay_ms_(0),
- network_is_transmitting_(true),
- encoder_paused_(false),
- encoder_paused_and_dropped_frame_(false),
- fec_enabled_(false),
- nack_enabled_(false),
- module_process_thread_(module_process_thread),
- has_received_sli_(false),
- picture_id_sli_(0),
- has_received_rpsi_(false),
- picture_id_rpsi_(0),
- video_suspended_(false) {
- bitrate_observer_.reset(new ViEBitrateObserver(this));
-}
-
-bool ViEEncoder::Init() {
- vpm_->EnableTemporalDecimation(true);
-
- // Enable/disable content analysis: off by default for now.
- vpm_->EnableContentAnalysis(false);
-
- if (vcm_->RegisterTransportCallback(this) != 0) {
- return false;
- }
- if (vcm_->RegisterSendStatisticsCallback(this) != 0) {
- return false;
- }
- return true;
-}
-
-void ViEEncoder::StartThreadsAndSetSharedMembers(
- rtc::scoped_refptr<PayloadRouter> send_payload_router,
- VCMProtectionCallback* vcm_protection_callback) {
- RTC_DCHECK(send_payload_router_ == NULL);
-
- send_payload_router_ = send_payload_router;
- vcm_->RegisterProtectionCallback(vcm_protection_callback);
- module_process_thread_->RegisterModule(vcm_.get());
-}
-
-void ViEEncoder::StopThreadsAndRemoveSharedMembers() {
- if (bitrate_allocator_)
- bitrate_allocator_->RemoveBitrateObserver(bitrate_observer_.get());
- module_process_thread_->DeRegisterModule(vcm_.get());
- module_process_thread_->DeRegisterModule(vpm_.get());
-}
-
-ViEEncoder::~ViEEncoder() {
-}
-
-void ViEEncoder::SetNetworkTransmissionState(bool is_transmitting) {
- {
- CriticalSectionScoped cs(data_cs_.get());
- network_is_transmitting_ = is_transmitting;
- }
-}
-
-void ViEEncoder::Pause() {
- CriticalSectionScoped cs(data_cs_.get());
- encoder_paused_ = true;
-}
-
-void ViEEncoder::Restart() {
- CriticalSectionScoped cs(data_cs_.get());
- encoder_paused_ = false;
-}
-
-uint8_t ViEEncoder::NumberOfCodecs() {
- return vcm_->NumberOfCodecs();
-}
-
-int32_t ViEEncoder::GetCodec(uint8_t list_index, VideoCodec* video_codec) {
- if (vcm_->Codec(list_index, video_codec) != 0) {
- return -1;
- }
- return 0;
-}
-
-int32_t ViEEncoder::RegisterExternalEncoder(webrtc::VideoEncoder* encoder,
- uint8_t pl_type,
- bool internal_source) {
- if (encoder == NULL)
- return -1;
-
- if (vcm_->RegisterExternalEncoder(encoder, pl_type, internal_source) !=
- VCM_OK) {
- return -1;
- }
- return 0;
-}
-
-int32_t ViEEncoder::DeRegisterExternalEncoder(uint8_t pl_type) {
- if (vcm_->RegisterExternalEncoder(NULL, pl_type) != VCM_OK) {
- return -1;
- }
- return 0;
-}
-
-int32_t ViEEncoder::SetEncoder(const webrtc::VideoCodec& video_codec) {
- RTC_DCHECK(send_payload_router_ != NULL);
- // Setting target width and height for VPM.
- if (vpm_->SetTargetResolution(video_codec.width, video_codec.height,
- video_codec.maxFramerate) != VPM_OK) {
- return -1;
- }
-
- {
- CriticalSectionScoped cs(data_cs_.get());
- simulcast_enabled_ = video_codec.numberOfSimulcastStreams > 1;
- }
-
- // Add a bitrate observer to the allocator and update the start, max and
- // min bitrates of the bitrate controller as needed.
- int allocated_bitrate_bps = bitrate_allocator_->AddBitrateObserver(
- bitrate_observer_.get(), video_codec.minBitrate * 1000,
- video_codec.maxBitrate * 1000);
-
- webrtc::VideoCodec modified_video_codec = video_codec;
- modified_video_codec.startBitrate = allocated_bitrate_bps / 1000;
-
- size_t max_data_payload_length = send_payload_router_->MaxPayloadLength();
- if (vcm_->RegisterSendCodec(&modified_video_codec, number_of_cores_,
- static_cast<uint32_t>(max_data_payload_length)) !=
- VCM_OK) {
- return -1;
- }
- return 0;
-}
-
-int32_t ViEEncoder::GetEncoder(VideoCodec* video_codec) {
- *video_codec = vcm_->GetSendCodec();
- return 0;
-}
-
-int32_t ViEEncoder::ScaleInputImage(bool enable) {
- VideoFrameResampling resampling_mode = kFastRescaling;
- // TODO(mflodman) What?
- if (enable) {
- // kInterpolation is currently not supported.
- LOG_F(LS_ERROR) << "Not supported.";
- return -1;
- }
- vpm_->SetInputFrameResampleMode(resampling_mode);
-
- return 0;
-}
-
-int ViEEncoder::GetPaddingNeededBps() const {
- int64_t time_of_last_frame_activity_ms;
- int min_transmit_bitrate_bps;
- int bitrate_bps;
- {
- CriticalSectionScoped cs(data_cs_.get());
- bool send_padding = simulcast_enabled_ || video_suspended_ ||
- min_transmit_bitrate_kbps_ > 0;
- if (!send_padding)
- return 0;
- time_of_last_frame_activity_ms = time_of_last_frame_activity_ms_;
- min_transmit_bitrate_bps = 1000 * min_transmit_bitrate_kbps_;
- bitrate_bps = last_observed_bitrate_bps_;
- }
-
- VideoCodec send_codec;
- if (vcm_->SendCodec(&send_codec) != 0)
- return 0;
-
- bool video_is_suspended = vcm_->VideoSuspended();
-
- // Find the max amount of padding we can allow ourselves to send at this
- // point, based on which streams are currently active and what our current
- // available bandwidth is.
- int pad_up_to_bitrate_bps = 0;
- if (send_codec.numberOfSimulcastStreams == 0) {
- pad_up_to_bitrate_bps = send_codec.minBitrate * 1000;
- } else {
- SimulcastStream* stream_configs = send_codec.simulcastStream;
- pad_up_to_bitrate_bps =
- stream_configs[send_codec.numberOfSimulcastStreams - 1].minBitrate *
- 1000;
- for (int i = 0; i < send_codec.numberOfSimulcastStreams - 1; ++i) {
- pad_up_to_bitrate_bps += stream_configs[i].targetBitrate * 1000;
- }
- }
-
- // Disable padding if only sending one stream and video isn't suspended and
- // min-transmit bitrate isn't used (applied later).
- if (!video_is_suspended && send_codec.numberOfSimulcastStreams <= 1)
- pad_up_to_bitrate_bps = 0;
-
- // The amount of padding should decay to zero if no frames are being
- // captured/encoded unless a min-transmit bitrate is used.
- int64_t now_ms = TickTime::MillisecondTimestamp();
- if (now_ms - time_of_last_frame_activity_ms > kStopPaddingThresholdMs)
- pad_up_to_bitrate_bps = 0;
-
- // Pad up to min bitrate.
- if (pad_up_to_bitrate_bps < min_transmit_bitrate_bps)
- pad_up_to_bitrate_bps = min_transmit_bitrate_bps;
-
- // Padding may never exceed bitrate estimate.
- if (pad_up_to_bitrate_bps > bitrate_bps)
- pad_up_to_bitrate_bps = bitrate_bps;
-
- return pad_up_to_bitrate_bps;
-}
-
-bool ViEEncoder::EncoderPaused() const {
- // Pause video if paused by caller or as long as the network is down or the
- // pacer queue has grown too large in buffered mode.
- if (encoder_paused_) {
- return true;
- }
- if (target_delay_ms_ > 0) {
- // Buffered mode.
- // TODO(pwestin): Workaround until nack is configured as a time and not
- // number of packets.
- return pacer_->QueueInMs() >=
- std::max(
- static_cast<int>(target_delay_ms_ * kEncoderPausePacerMargin),
- kMinPacingDelayMs);
- }
- if (pacer_->ExpectedQueueTimeMs() > PacedSender::kDefaultMaxQueueLengthMs) {
- // Too much data in pacer queue, drop frame.
- return true;
- }
- return !network_is_transmitting_;
-}
-
-void ViEEncoder::TraceFrameDropStart() {
- // Start trace event only on the first frame after encoder is paused.
- if (!encoder_paused_and_dropped_frame_) {
- TRACE_EVENT_ASYNC_BEGIN0("webrtc", "EncoderPaused", this);
- }
- encoder_paused_and_dropped_frame_ = true;
- return;
-}
-
-void ViEEncoder::TraceFrameDropEnd() {
- // End trace event on first frame after encoder resumes, if frame was dropped.
- if (encoder_paused_and_dropped_frame_) {
- TRACE_EVENT_ASYNC_END0("webrtc", "EncoderPaused", this);
- }
- encoder_paused_and_dropped_frame_ = false;
-}
-
-void ViEEncoder::DeliverFrame(VideoFrame video_frame) {
- RTC_DCHECK(send_payload_router_ != NULL);
- if (!send_payload_router_->active()) {
- // We've paused or we have no channels attached, don't waste resources on
- // encoding.
- return;
- }
- {
- CriticalSectionScoped cs(data_cs_.get());
- time_of_last_frame_activity_ms_ = TickTime::MillisecondTimestamp();
- if (EncoderPaused()) {
- TraceFrameDropStart();
- return;
- }
- TraceFrameDropEnd();
- }
-
- TRACE_EVENT_ASYNC_STEP0("webrtc", "Video", video_frame.render_time_ms(),
- "Encode");
- VideoFrame* decimated_frame = NULL;
- // TODO(wuchengli): support texture frames.
- if (video_frame.native_handle() == NULL) {
- // Pass frame via preprocessor.
- const int ret = vpm_->PreprocessFrame(video_frame, &decimated_frame);
- if (ret == 1) {
- // Drop this frame.
- return;
- }
- if (ret != VPM_OK) {
- return;
- }
- }
-
- // If we haven't resampled the frame and we have a FrameCallback, we need to
- // make a deep copy of |video_frame|.
- VideoFrame copied_frame;
- if (pre_encode_callback_) {
- // If the frame was not resampled or scaled => use copy of original.
- if (decimated_frame == NULL) {
- copied_frame.CopyFrame(video_frame);
- decimated_frame = &copied_frame;
- }
- pre_encode_callback_->FrameCallback(decimated_frame);
- }
-
- // If the frame was not resampled, scaled, or touched by FrameCallback => use
- // original. The frame is const from here.
- const VideoFrame* output_frame =
- (decimated_frame != NULL) ? decimated_frame : &video_frame;
-
-#ifdef VIDEOCODEC_VP8
- if (vcm_->SendCodec() == webrtc::kVideoCodecVP8) {
- webrtc::CodecSpecificInfo codec_specific_info;
- codec_specific_info.codecType = webrtc::kVideoCodecVP8;
- {
- CriticalSectionScoped cs(data_cs_.get());
- codec_specific_info.codecSpecific.VP8.hasReceivedRPSI =
- has_received_rpsi_;
- codec_specific_info.codecSpecific.VP8.hasReceivedSLI =
- has_received_sli_;
- codec_specific_info.codecSpecific.VP8.pictureIdRPSI =
- picture_id_rpsi_;
- codec_specific_info.codecSpecific.VP8.pictureIdSLI =
- picture_id_sli_;
- has_received_sli_ = false;
- has_received_rpsi_ = false;
- }
-
- vcm_->AddVideoFrame(*output_frame, vpm_->ContentMetrics(),
- &codec_specific_info);
- return;
- }
-#endif
- vcm_->AddVideoFrame(*output_frame);
-}
-
-int ViEEncoder::SendKeyFrame() {
- return vcm_->IntraFrameRequest(0);
-}
-
-uint32_t ViEEncoder::LastObservedBitrateBps() const {
- CriticalSectionScoped cs(data_cs_.get());
- return last_observed_bitrate_bps_;
-}
-
-int ViEEncoder::CodecTargetBitrate(uint32_t* bitrate) const {
- if (vcm_->Bitrate(bitrate) != 0)
- return -1;
- return 0;
-}
-
-int32_t ViEEncoder::UpdateProtectionMethod(bool nack, bool fec) {
- RTC_DCHECK(send_payload_router_ != NULL);
-
- if (fec_enabled_ == fec && nack_enabled_ == nack) {
- // No change needed, we're already in correct state.
- return 0;
- }
- fec_enabled_ = fec;
- nack_enabled_ = nack;
-
- // Set Video Protection for VCM.
- VCMVideoProtection protection_mode;
- if (fec_enabled_) {
- protection_mode =
- nack_enabled_ ? webrtc::kProtectionNackFEC : kProtectionFEC;
- } else {
- protection_mode = nack_enabled_ ? kProtectionNack : kProtectionNone;
- }
- vcm_->SetVideoProtection(protection_mode, true);
-
- if (fec_enabled_ || nack_enabled_) {
- // The send codec must be registered to set correct MTU.
- webrtc::VideoCodec codec;
- if (vcm_->SendCodec(&codec) == 0) {
- uint32_t current_bitrate_bps = 0;
- if (vcm_->Bitrate(&current_bitrate_bps) != 0) {
- LOG_F(LS_WARNING) <<
- "Failed to get the current encoder target bitrate.";
- }
- // Convert to start bitrate in kbps.
- codec.startBitrate = (current_bitrate_bps + 500) / 1000;
- size_t max_payload_length = send_payload_router_->MaxPayloadLength();
- if (vcm_->RegisterSendCodec(&codec, number_of_cores_,
- static_cast<uint32_t>(max_payload_length)) !=
- 0) {
- return -1;
- }
- }
- }
- return 0;
-}
-
-void ViEEncoder::SetSenderBufferingMode(int target_delay_ms) {
- {
- CriticalSectionScoped cs(data_cs_.get());
- target_delay_ms_ = target_delay_ms;
- }
- if (target_delay_ms > 0) {
- // Disable external frame-droppers.
- vcm_->EnableFrameDropper(false);
- vpm_->EnableTemporalDecimation(false);
- } else {
- // Real-time mode - enable frame droppers.
- vpm_->EnableTemporalDecimation(true);
- vcm_->EnableFrameDropper(true);
- }
-}
-
-void ViEEncoder::OnSetRates(uint32_t bitrate_bps, int framerate) {
- if (stats_proxy_)
- stats_proxy_->OnSetRates(bitrate_bps, framerate);
-}
-
-int32_t ViEEncoder::SendData(
- const uint8_t payload_type,
- const EncodedImage& encoded_image,
- const webrtc::RTPFragmentationHeader& fragmentation_header,
- const RTPVideoHeader* rtp_video_hdr) {
- RTC_DCHECK(send_payload_router_ != NULL);
-
- {
- CriticalSectionScoped cs(data_cs_.get());
- time_of_last_frame_activity_ms_ = TickTime::MillisecondTimestamp();
- }
-
- if (stats_proxy_ != NULL)
- stats_proxy_->OnSendEncodedImage(encoded_image, rtp_video_hdr);
-
- return send_payload_router_->RoutePayload(
- encoded_image._frameType, payload_type, encoded_image._timeStamp,
- encoded_image.capture_time_ms_, encoded_image._buffer,
- encoded_image._length, &fragmentation_header, rtp_video_hdr)
- ? 0
- : -1;
-}
-
-int32_t ViEEncoder::SendStatistics(const uint32_t bit_rate,
- const uint32_t frame_rate) {
- if (stats_proxy_)
- stats_proxy_->OnOutgoingRate(frame_rate, bit_rate);
- return 0;
-}
-
-void ViEEncoder::OnReceivedSLI(uint32_t /*ssrc*/,
- uint8_t picture_id) {
- CriticalSectionScoped cs(data_cs_.get());
- picture_id_sli_ = picture_id;
- has_received_sli_ = true;
-}
-
-void ViEEncoder::OnReceivedRPSI(uint32_t /*ssrc*/,
- uint64_t picture_id) {
- CriticalSectionScoped cs(data_cs_.get());
- picture_id_rpsi_ = picture_id;
- has_received_rpsi_ = true;
-}
-
-void ViEEncoder::OnReceivedIntraFrameRequest(uint32_t ssrc) {
- // Key frame request from remote side, signal to VCM.
- TRACE_EVENT0("webrtc", "OnKeyFrameRequest");
-
- int idx = 0;
- {
- CriticalSectionScoped cs(data_cs_.get());
- auto stream_it = ssrc_streams_.find(ssrc);
- if (stream_it == ssrc_streams_.end()) {
- LOG_F(LS_WARNING) << "ssrc not found: " << ssrc << ", map size "
- << ssrc_streams_.size();
- return;
- }
- std::map<unsigned int, int64_t>::iterator time_it =
- time_last_intra_request_ms_.find(ssrc);
- if (time_it == time_last_intra_request_ms_.end()) {
- time_last_intra_request_ms_[ssrc] = 0;
- }
-
- int64_t now = TickTime::MillisecondTimestamp();
- if (time_last_intra_request_ms_[ssrc] + kViEMinKeyRequestIntervalMs > now) {
- return;
- }
- time_last_intra_request_ms_[ssrc] = now;
- idx = stream_it->second;
- }
- // Release the critsect before triggering key frame.
- vcm_->IntraFrameRequest(idx);
-}
-
-void ViEEncoder::OnLocalSsrcChanged(uint32_t old_ssrc, uint32_t new_ssrc) {
- CriticalSectionScoped cs(data_cs_.get());
- std::map<unsigned int, int>::iterator it = ssrc_streams_.find(old_ssrc);
- if (it == ssrc_streams_.end()) {
- return;
- }
-
- ssrc_streams_[new_ssrc] = it->second;
- ssrc_streams_.erase(it);
-
- std::map<unsigned int, int64_t>::iterator time_it =
- time_last_intra_request_ms_.find(old_ssrc);
- int64_t last_intra_request_ms = 0;
- if (time_it != time_last_intra_request_ms_.end()) {
- last_intra_request_ms = time_it->second;
- time_last_intra_request_ms_.erase(time_it);
- }
- time_last_intra_request_ms_[new_ssrc] = last_intra_request_ms;
-}
-
-bool ViEEncoder::SetSsrcs(const std::vector<uint32_t>& ssrcs) {
- VideoCodec codec;
- if (vcm_->SendCodec(&codec) != 0)
- return false;
-
- if (codec.numberOfSimulcastStreams > 0 &&
- ssrcs.size() != codec.numberOfSimulcastStreams) {
- return false;
- }
-
- CriticalSectionScoped cs(data_cs_.get());
- ssrc_streams_.clear();
- time_last_intra_request_ms_.clear();
- int idx = 0;
- for (uint32_t ssrc : ssrcs) {
- ssrc_streams_[ssrc] = idx++;
- }
- return true;
-}
-
-void ViEEncoder::SetMinTransmitBitrate(int min_transmit_bitrate_kbps) {
- assert(min_transmit_bitrate_kbps >= 0);
- CriticalSectionScoped crit(data_cs_.get());
- min_transmit_bitrate_kbps_ = min_transmit_bitrate_kbps;
-}
-
-// Called from ViEBitrateObserver.
-void ViEEncoder::OnNetworkChanged(uint32_t bitrate_bps,
- uint8_t fraction_lost,
- int64_t round_trip_time_ms) {
- LOG(LS_VERBOSE) << "OnNetworkChanged, bitrate" << bitrate_bps
- << " packet loss " << static_cast<int>(fraction_lost)
- << " rtt " << round_trip_time_ms;
- RTC_DCHECK(send_payload_router_ != NULL);
- vcm_->SetChannelParameters(bitrate_bps, fraction_lost, round_trip_time_ms);
- bool video_is_suspended = vcm_->VideoSuspended();
-
- VideoCodec send_codec;
- if (vcm_->SendCodec(&send_codec) != 0) {
- return;
- }
- SimulcastStream* stream_configs = send_codec.simulcastStream;
- // Allocate the bandwidth between the streams.
- std::vector<uint32_t> stream_bitrates = AllocateStreamBitrates(
- bitrate_bps, stream_configs, send_codec.numberOfSimulcastStreams);
- send_payload_router_->SetTargetSendBitrates(stream_bitrates);
-
- {
- CriticalSectionScoped cs(data_cs_.get());
- last_observed_bitrate_bps_ = bitrate_bps;
- if (video_suspended_ == video_is_suspended)
- return;
- video_suspended_ = video_is_suspended;
-
- LOG(LS_INFO) << "Video suspend state changed " << video_is_suspended
- << " for ssrc " << ssrc_streams_.begin()->first;
- }
- // Video suspend-state changed, inform codec observer.
- if (stats_proxy_)
- stats_proxy_->OnSuspendChange(video_is_suspended);
-}
-
-void ViEEncoder::SuspendBelowMinBitrate() {
- vcm_->SuspendBelowMinBitrate();
- bitrate_allocator_->EnforceMinBitrate(false);
-}
-
-void ViEEncoder::RegisterPostEncodeImageCallback(
- EncodedImageCallback* post_encode_callback) {
- vcm_->RegisterPostEncodeImageCallback(post_encode_callback);
-}
-
-QMVideoSettingsCallback::QMVideoSettingsCallback(VideoProcessingModule* vpm)
- : vpm_(vpm) {
-}
-
-QMVideoSettingsCallback::~QMVideoSettingsCallback() {
-}
-
-int32_t QMVideoSettingsCallback::SetVideoQMSettings(
- const uint32_t frame_rate,
- const uint32_t width,
- const uint32_t height) {
- return vpm_->SetTargetResolution(width, height, frame_rate);
-}
-
-void QMVideoSettingsCallback::SetTargetFramerate(int frame_rate) {
- vpm_->SetTargetFramerate(frame_rate);
-}
-
-} // namespace webrtc
diff --git a/webrtc/video_engine/vie_encoder.h b/webrtc/video_engine/vie_encoder.h
deleted file mode 100644
index 54aacdbfa9..0000000000
--- a/webrtc/video_engine/vie_encoder.h
+++ /dev/null
@@ -1,201 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_VIDEO_ENGINE_VIE_ENCODER_H_
-#define WEBRTC_VIDEO_ENGINE_VIE_ENCODER_H_
-
-#include <map>
-#include <vector>
-
-#include "webrtc/base/scoped_ptr.h"
-#include "webrtc/base/scoped_ref_ptr.h"
-#include "webrtc/base/thread_annotations.h"
-#include "webrtc/common_types.h"
-#include "webrtc/frame_callback.h"
-#include "webrtc/modules/bitrate_controller/include/bitrate_allocator.h"
-#include "webrtc/modules/rtp_rtcp/interface/rtp_rtcp_defines.h"
-#include "webrtc/modules/video_coding/main/interface/video_coding_defines.h"
-#include "webrtc/modules/video_processing/main/interface/video_processing.h"
-#include "webrtc/typedefs.h"
-#include "webrtc/video/video_capture_input.h"
-#include "webrtc/video_engine/vie_defines.h"
-
-namespace webrtc {
-
-class Config;
-class CriticalSectionWrapper;
-class EncodedImageCallback;
-class PacedSender;
-class PayloadRouter;
-class ProcessThread;
-class QMVideoSettingsCallback;
-class SendStatisticsProxy;
-class ViEBitrateObserver;
-class ViEEffectFilter;
-class VideoCodingModule;
-
-class ViEEncoder : public RtcpIntraFrameObserver,
- public VideoEncoderRateObserver,
- public VCMPacketizationCallback,
- public VCMSendStatisticsCallback,
- public VideoCaptureCallback {
- public:
- friend class ViEBitrateObserver;
-
- ViEEncoder(uint32_t number_of_cores,
- ProcessThread* module_process_thread,
- SendStatisticsProxy* stats_proxy,
- I420FrameCallback* pre_encode_callback,
- PacedSender* pacer,
- BitrateAllocator* bitrate_allocator);
- ~ViEEncoder();
-
- bool Init();
-
- // This function is assumed to be called before any frames are delivered and
- // only once.
- // Ideally this would be done in Init, but the dependencies between ViEEncoder
- // and ViEChannel makes it really hard to do in a good way.
- void StartThreadsAndSetSharedMembers(
- rtc::scoped_refptr<PayloadRouter> send_payload_router,
- VCMProtectionCallback* vcm_protection_callback);
-
- // This function must be called before the corresponding ViEChannel is
- // deleted.
- void StopThreadsAndRemoveSharedMembers();
-
- void SetNetworkTransmissionState(bool is_transmitting);
-
- // Returns the id of the owning channel.
- int Owner() const;
-
- // Drops incoming packets before they get to the encoder.
- void Pause();
- void Restart();
-
- // Codec settings.
- uint8_t NumberOfCodecs();
- int32_t GetCodec(uint8_t list_index, VideoCodec* video_codec);
- int32_t RegisterExternalEncoder(VideoEncoder* encoder,
- uint8_t pl_type,
- bool internal_source);
- int32_t DeRegisterExternalEncoder(uint8_t pl_type);
- int32_t SetEncoder(const VideoCodec& video_codec);
- int32_t GetEncoder(VideoCodec* video_codec);
-
- // Scale or crop/pad image.
- int32_t ScaleInputImage(bool enable);
-
- // Implementing VideoCaptureCallback.
- void DeliverFrame(VideoFrame video_frame) override;
-
- int32_t SendKeyFrame();
-
- uint32_t LastObservedBitrateBps() const;
- int CodecTargetBitrate(uint32_t* bitrate) const;
- // Loss protection.
- int32_t UpdateProtectionMethod(bool nack, bool fec);
- bool nack_enabled() const { return nack_enabled_; }
-
- // Buffering mode.
- void SetSenderBufferingMode(int target_delay_ms);
-
- // Implements VideoEncoderRateObserver.
- void OnSetRates(uint32_t bitrate_bps, int framerate) override;
-
- // Implements VCMPacketizationCallback.
- int32_t SendData(uint8_t payload_type,
- const EncodedImage& encoded_image,
- const RTPFragmentationHeader& fragmentation_header,
- const RTPVideoHeader* rtp_video_hdr) override;
-
- // Implements VideoSendStatisticsCallback.
- int32_t SendStatistics(const uint32_t bit_rate,
- const uint32_t frame_rate) override;
-
- // Implements RtcpIntraFrameObserver.
- void OnReceivedIntraFrameRequest(uint32_t ssrc) override;
- void OnReceivedSLI(uint32_t ssrc, uint8_t picture_id) override;
- void OnReceivedRPSI(uint32_t ssrc, uint64_t picture_id) override;
- void OnLocalSsrcChanged(uint32_t old_ssrc, uint32_t new_ssrc) override;
-
- // Sets SSRCs for all streams.
- bool SetSsrcs(const std::vector<uint32_t>& ssrcs);
-
- void SetMinTransmitBitrate(int min_transmit_bitrate_kbps);
-
- // Lets the sender suspend video when the rate drops below
- // |threshold_bps|, and turns back on when the rate goes back up above
- // |threshold_bps| + |window_bps|.
- void SuspendBelowMinBitrate();
-
- // New-style callbacks, used by VideoSendStream.
- void RegisterPostEncodeImageCallback(
- EncodedImageCallback* post_encode_callback);
-
- int GetPaddingNeededBps() const;
-
- protected:
- // Called by BitrateObserver.
- void OnNetworkChanged(uint32_t bitrate_bps,
- uint8_t fraction_lost,
- int64_t round_trip_time_ms);
-
- private:
- bool EncoderPaused() const EXCLUSIVE_LOCKS_REQUIRED(data_cs_);
- void TraceFrameDropStart() EXCLUSIVE_LOCKS_REQUIRED(data_cs_);
- void TraceFrameDropEnd() EXCLUSIVE_LOCKS_REQUIRED(data_cs_);
-
- const uint32_t number_of_cores_;
-
- const rtc::scoped_ptr<VideoProcessingModule> vpm_;
- const rtc::scoped_ptr<QMVideoSettingsCallback> qm_callback_;
- const rtc::scoped_ptr<VideoCodingModule> vcm_;
- rtc::scoped_refptr<PayloadRouter> send_payload_router_;
-
- rtc::scoped_ptr<CriticalSectionWrapper> data_cs_;
- rtc::scoped_ptr<BitrateObserver> bitrate_observer_;
-
- SendStatisticsProxy* const stats_proxy_;
- I420FrameCallback* const pre_encode_callback_;
- PacedSender* const pacer_;
- BitrateAllocator* const bitrate_allocator_;
-
- // The time we last received an input frame or encoded frame. This is used to
- // track when video is stopped long enough that we also want to stop sending
- // padding.
- int64_t time_of_last_frame_activity_ms_ GUARDED_BY(data_cs_);
- bool simulcast_enabled_ GUARDED_BY(data_cs_);
- int min_transmit_bitrate_kbps_ GUARDED_BY(data_cs_);
- uint32_t last_observed_bitrate_bps_ GUARDED_BY(data_cs_);
- int target_delay_ms_ GUARDED_BY(data_cs_);
- bool network_is_transmitting_ GUARDED_BY(data_cs_);
- bool encoder_paused_ GUARDED_BY(data_cs_);
- bool encoder_paused_and_dropped_frame_ GUARDED_BY(data_cs_);
- std::map<unsigned int, int64_t> time_last_intra_request_ms_
- GUARDED_BY(data_cs_);
-
- bool fec_enabled_;
- bool nack_enabled_;
-
- ProcessThread* module_process_thread_;
-
- bool has_received_sli_ GUARDED_BY(data_cs_);
- uint8_t picture_id_sli_ GUARDED_BY(data_cs_);
- bool has_received_rpsi_ GUARDED_BY(data_cs_);
- uint64_t picture_id_rpsi_ GUARDED_BY(data_cs_);
- std::map<uint32_t, int> ssrc_streams_ GUARDED_BY(data_cs_);
-
- bool video_suspended_ GUARDED_BY(data_cs_);
-};
-
-} // namespace webrtc
-
-#endif // WEBRTC_VIDEO_ENGINE_VIE_ENCODER_H_
diff --git a/webrtc/video_engine/vie_receiver.cc b/webrtc/video_engine/vie_receiver.cc
deleted file mode 100644
index 2e3b588302..0000000000
--- a/webrtc/video_engine/vie_receiver.cc
+++ /dev/null
@@ -1,482 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include "webrtc/video_engine/vie_receiver.h"
-
-#include <vector>
-
-#include "webrtc/base/logging.h"
-#include "webrtc/modules/remote_bitrate_estimator/include/remote_bitrate_estimator.h"
-#include "webrtc/modules/rtp_rtcp/interface/fec_receiver.h"
-#include "webrtc/modules/rtp_rtcp/interface/receive_statistics.h"
-#include "webrtc/modules/rtp_rtcp/interface/remote_ntp_time_estimator.h"
-#include "webrtc/modules/rtp_rtcp/interface/rtp_cvo.h"
-#include "webrtc/modules/rtp_rtcp/interface/rtp_header_parser.h"
-#include "webrtc/modules/rtp_rtcp/interface/rtp_payload_registry.h"
-#include "webrtc/modules/rtp_rtcp/interface/rtp_receiver.h"
-#include "webrtc/modules/rtp_rtcp/interface/rtp_rtcp.h"
-#include "webrtc/modules/video_coding/main/interface/video_coding.h"
-#include "webrtc/system_wrappers/include/critical_section_wrapper.h"
-#include "webrtc/system_wrappers/include/metrics.h"
-#include "webrtc/system_wrappers/include/tick_util.h"
-#include "webrtc/system_wrappers/include/timestamp_extrapolator.h"
-#include "webrtc/system_wrappers/include/trace.h"
-
-namespace webrtc {
-
-static const int kPacketLogIntervalMs = 10000;
-
-ViEReceiver::ViEReceiver(VideoCodingModule* module_vcm,
- RemoteBitrateEstimator* remote_bitrate_estimator,
- RtpFeedback* rtp_feedback)
- : receive_cs_(CriticalSectionWrapper::CreateCriticalSection()),
- clock_(Clock::GetRealTimeClock()),
- rtp_header_parser_(RtpHeaderParser::Create()),
- rtp_payload_registry_(
- new RTPPayloadRegistry(RTPPayloadStrategy::CreateStrategy(false))),
- rtp_receiver_(
- RtpReceiver::CreateVideoReceiver(clock_,
- this,
- rtp_feedback,
- rtp_payload_registry_.get())),
- rtp_receive_statistics_(ReceiveStatistics::Create(clock_)),
- fec_receiver_(FecReceiver::Create(this)),
- rtp_rtcp_(NULL),
- vcm_(module_vcm),
- remote_bitrate_estimator_(remote_bitrate_estimator),
- ntp_estimator_(new RemoteNtpTimeEstimator(clock_)),
- receiving_(false),
- restored_packet_in_use_(false),
- receiving_ast_enabled_(false),
- receiving_cvo_enabled_(false),
- receiving_tsn_enabled_(false),
- last_packet_log_ms_(-1) {
- assert(remote_bitrate_estimator);
-}
-
-ViEReceiver::~ViEReceiver() {
- UpdateHistograms();
-}
-
-void ViEReceiver::UpdateHistograms() {
- FecPacketCounter counter = fec_receiver_->GetPacketCounter();
- if (counter.num_packets > 0) {
- RTC_HISTOGRAM_PERCENTAGE(
- "WebRTC.Video.ReceivedFecPacketsInPercent",
- static_cast<int>(counter.num_fec_packets * 100 / counter.num_packets));
- }
- if (counter.num_fec_packets > 0) {
- RTC_HISTOGRAM_PERCENTAGE("WebRTC.Video.RecoveredMediaPacketsInPercentOfFec",
- static_cast<int>(counter.num_recovered_packets *
- 100 / counter.num_fec_packets));
- }
-}
-
-bool ViEReceiver::SetReceiveCodec(const VideoCodec& video_codec) {
- int8_t old_pltype = -1;
- if (rtp_payload_registry_->ReceivePayloadType(video_codec.plName,
- kVideoPayloadTypeFrequency,
- 0,
- video_codec.maxBitrate,
- &old_pltype) != -1) {
- rtp_payload_registry_->DeRegisterReceivePayload(old_pltype);
- }
-
- return RegisterPayload(video_codec);
-}
-
-bool ViEReceiver::RegisterPayload(const VideoCodec& video_codec) {
- return rtp_receiver_->RegisterReceivePayload(video_codec.plName,
- video_codec.plType,
- kVideoPayloadTypeFrequency,
- 0,
- video_codec.maxBitrate) == 0;
-}
-
-void ViEReceiver::SetNackStatus(bool enable,
- int max_nack_reordering_threshold) {
- if (!enable) {
- // Reset the threshold back to the lower default threshold when NACK is
- // disabled since we no longer will be receiving retransmissions.
- max_nack_reordering_threshold = kDefaultMaxReorderingThreshold;
- }
- rtp_receive_statistics_->SetMaxReorderingThreshold(
- max_nack_reordering_threshold);
- rtp_receiver_->SetNACKStatus(enable ? kNackRtcp : kNackOff);
-}
-
-void ViEReceiver::SetRtxPayloadType(int payload_type,
- int associated_payload_type) {
- rtp_payload_registry_->SetRtxPayloadType(payload_type,
- associated_payload_type);
-}
-
-void ViEReceiver::SetUseRtxPayloadMappingOnRestore(bool val) {
- rtp_payload_registry_->set_use_rtx_payload_mapping_on_restore(val);
-}
-
-void ViEReceiver::SetRtxSsrc(uint32_t ssrc) {
- rtp_payload_registry_->SetRtxSsrc(ssrc);
-}
-
-bool ViEReceiver::GetRtxSsrc(uint32_t* ssrc) const {
- return rtp_payload_registry_->GetRtxSsrc(ssrc);
-}
-
-bool ViEReceiver::IsFecEnabled() const {
- return rtp_payload_registry_->ulpfec_payload_type() > -1;
-}
-
-uint32_t ViEReceiver::GetRemoteSsrc() const {
- return rtp_receiver_->SSRC();
-}
-
-int ViEReceiver::GetCsrcs(uint32_t* csrcs) const {
- return rtp_receiver_->CSRCs(csrcs);
-}
-
-void ViEReceiver::SetRtpRtcpModule(RtpRtcp* module) {
- rtp_rtcp_ = module;
-}
-
-RtpReceiver* ViEReceiver::GetRtpReceiver() const {
- return rtp_receiver_.get();
-}
-
-void ViEReceiver::RegisterRtpRtcpModules(
- const std::vector<RtpRtcp*>& rtp_modules) {
- CriticalSectionScoped cs(receive_cs_.get());
- // Only change the "simulcast" modules, the base module can be accessed
- // without a lock whereas the simulcast modules require locking as they can be
- // changed in runtime.
- rtp_rtcp_simulcast_ =
- std::vector<RtpRtcp*>(rtp_modules.begin() + 1, rtp_modules.end());
-}
-
-bool ViEReceiver::SetReceiveTimestampOffsetStatus(bool enable, int id) {
- if (enable) {
- return rtp_header_parser_->RegisterRtpHeaderExtension(
- kRtpExtensionTransmissionTimeOffset, id);
- } else {
- return rtp_header_parser_->DeregisterRtpHeaderExtension(
- kRtpExtensionTransmissionTimeOffset);
- }
-}
-
-bool ViEReceiver::SetReceiveAbsoluteSendTimeStatus(bool enable, int id) {
- if (enable) {
- if (rtp_header_parser_->RegisterRtpHeaderExtension(
- kRtpExtensionAbsoluteSendTime, id)) {
- receiving_ast_enabled_ = true;
- return true;
- } else {
- return false;
- }
- } else {
- receiving_ast_enabled_ = false;
- return rtp_header_parser_->DeregisterRtpHeaderExtension(
- kRtpExtensionAbsoluteSendTime);
- }
-}
-
-bool ViEReceiver::SetReceiveVideoRotationStatus(bool enable, int id) {
- if (enable) {
- if (rtp_header_parser_->RegisterRtpHeaderExtension(
- kRtpExtensionVideoRotation, id)) {
- receiving_cvo_enabled_ = true;
- return true;
- } else {
- return false;
- }
- } else {
- receiving_cvo_enabled_ = false;
- return rtp_header_parser_->DeregisterRtpHeaderExtension(
- kRtpExtensionVideoRotation);
- }
-}
-
-bool ViEReceiver::SetReceiveTransportSequenceNumber(bool enable, int id) {
- if (enable) {
- if (rtp_header_parser_->RegisterRtpHeaderExtension(
- kRtpExtensionTransportSequenceNumber, id)) {
- receiving_tsn_enabled_ = true;
- return true;
- } else {
- return false;
- }
- } else {
- receiving_tsn_enabled_ = false;
- return rtp_header_parser_->DeregisterRtpHeaderExtension(
- kRtpExtensionTransportSequenceNumber);
- }
-}
-
-int ViEReceiver::ReceivedRTPPacket(const void* rtp_packet,
- size_t rtp_packet_length,
- const PacketTime& packet_time) {
- return InsertRTPPacket(static_cast<const uint8_t*>(rtp_packet),
- rtp_packet_length, packet_time);
-}
-
-int ViEReceiver::ReceivedRTCPPacket(const void* rtcp_packet,
- size_t rtcp_packet_length) {
- return InsertRTCPPacket(static_cast<const uint8_t*>(rtcp_packet),
- rtcp_packet_length);
-}
-
-int32_t ViEReceiver::OnReceivedPayloadData(const uint8_t* payload_data,
- const size_t payload_size,
- const WebRtcRTPHeader* rtp_header) {
- WebRtcRTPHeader rtp_header_with_ntp = *rtp_header;
- rtp_header_with_ntp.ntp_time_ms =
- ntp_estimator_->Estimate(rtp_header->header.timestamp);
- if (vcm_->IncomingPacket(payload_data,
- payload_size,
- rtp_header_with_ntp) != 0) {
- // Check this...
- return -1;
- }
- return 0;
-}
-
-bool ViEReceiver::OnRecoveredPacket(const uint8_t* rtp_packet,
- size_t rtp_packet_length) {
- RTPHeader header;
- if (!rtp_header_parser_->Parse(rtp_packet, rtp_packet_length, &header)) {
- return false;
- }
- header.payload_type_frequency = kVideoPayloadTypeFrequency;
- bool in_order = IsPacketInOrder(header);
- return ReceivePacket(rtp_packet, rtp_packet_length, header, in_order);
-}
-
-int ViEReceiver::InsertRTPPacket(const uint8_t* rtp_packet,
- size_t rtp_packet_length,
- const PacketTime& packet_time) {
- {
- CriticalSectionScoped cs(receive_cs_.get());
- if (!receiving_) {
- return -1;
- }
- }
-
- RTPHeader header;
- if (!rtp_header_parser_->Parse(rtp_packet, rtp_packet_length,
- &header)) {
- return -1;
- }
- size_t payload_length = rtp_packet_length - header.headerLength;
- int64_t arrival_time_ms;
- int64_t now_ms = clock_->TimeInMilliseconds();
- if (packet_time.timestamp != -1)
- arrival_time_ms = (packet_time.timestamp + 500) / 1000;
- else
- arrival_time_ms = now_ms;
-
- {
- // Periodically log the RTP header of incoming packets.
- CriticalSectionScoped cs(receive_cs_.get());
- if (now_ms - last_packet_log_ms_ > kPacketLogIntervalMs) {
- std::stringstream ss;
- ss << "Packet received on SSRC: " << header.ssrc << " with payload type: "
- << static_cast<int>(header.payloadType) << ", timestamp: "
- << header.timestamp << ", sequence number: " << header.sequenceNumber
- << ", arrival time: " << arrival_time_ms;
- if (header.extension.hasTransmissionTimeOffset)
- ss << ", toffset: " << header.extension.transmissionTimeOffset;
- if (header.extension.hasAbsoluteSendTime)
- ss << ", abs send time: " << header.extension.absoluteSendTime;
- LOG(LS_INFO) << ss.str();
- last_packet_log_ms_ = now_ms;
- }
- }
-
- remote_bitrate_estimator_->IncomingPacket(arrival_time_ms, payload_length,
- header, true);
- header.payload_type_frequency = kVideoPayloadTypeFrequency;
-
- bool in_order = IsPacketInOrder(header);
- rtp_payload_registry_->SetIncomingPayloadType(header);
- int ret = ReceivePacket(rtp_packet, rtp_packet_length, header, in_order)
- ? 0
- : -1;
- // Update receive statistics after ReceivePacket.
- // Receive statistics will be reset if the payload type changes (make sure
- // that the first packet is included in the stats).
- rtp_receive_statistics_->IncomingPacket(
- header, rtp_packet_length, IsPacketRetransmitted(header, in_order));
- return ret;
-}
-
-bool ViEReceiver::ReceivePacket(const uint8_t* packet,
- size_t packet_length,
- const RTPHeader& header,
- bool in_order) {
- if (rtp_payload_registry_->IsEncapsulated(header)) {
- return ParseAndHandleEncapsulatingHeader(packet, packet_length, header);
- }
- const uint8_t* payload = packet + header.headerLength;
- assert(packet_length >= header.headerLength);
- size_t payload_length = packet_length - header.headerLength;
- PayloadUnion payload_specific;
- if (!rtp_payload_registry_->GetPayloadSpecifics(header.payloadType,
- &payload_specific)) {
- return false;
- }
- return rtp_receiver_->IncomingRtpPacket(header, payload, payload_length,
- payload_specific, in_order);
-}
-
-bool ViEReceiver::ParseAndHandleEncapsulatingHeader(const uint8_t* packet,
- size_t packet_length,
- const RTPHeader& header) {
- if (rtp_payload_registry_->IsRed(header)) {
- int8_t ulpfec_pt = rtp_payload_registry_->ulpfec_payload_type();
- if (packet[header.headerLength] == ulpfec_pt) {
- rtp_receive_statistics_->FecPacketReceived(header, packet_length);
- // Notify vcm about received FEC packets to avoid NACKing these packets.
- NotifyReceiverOfFecPacket(header);
- }
- if (fec_receiver_->AddReceivedRedPacket(
- header, packet, packet_length, ulpfec_pt) != 0) {
- return false;
- }
- return fec_receiver_->ProcessReceivedFec() == 0;
- } else if (rtp_payload_registry_->IsRtx(header)) {
- if (header.headerLength + header.paddingLength == packet_length) {
- // This is an empty packet and should be silently dropped before trying to
- // parse the RTX header.
- return true;
- }
- // Remove the RTX header and parse the original RTP header.
- if (packet_length < header.headerLength)
- return false;
- if (packet_length > sizeof(restored_packet_))
- return false;
- CriticalSectionScoped cs(receive_cs_.get());
- if (restored_packet_in_use_) {
- LOG(LS_WARNING) << "Multiple RTX headers detected, dropping packet.";
- return false;
- }
- if (!rtp_payload_registry_->RestoreOriginalPacket(
- restored_packet_, packet, &packet_length, rtp_receiver_->SSRC(),
- header)) {
- LOG(LS_WARNING) << "Incoming RTX packet: Invalid RTP header";
- return false;
- }
- restored_packet_in_use_ = true;
- bool ret = OnRecoveredPacket(restored_packet_, packet_length);
- restored_packet_in_use_ = false;
- return ret;
- }
- return false;
-}
-
-void ViEReceiver::NotifyReceiverOfFecPacket(const RTPHeader& header) {
- int8_t last_media_payload_type =
- rtp_payload_registry_->last_received_media_payload_type();
- if (last_media_payload_type < 0) {
- LOG(LS_WARNING) << "Failed to get last media payload type.";
- return;
- }
- // Fake an empty media packet.
- WebRtcRTPHeader rtp_header = {};
- rtp_header.header = header;
- rtp_header.header.payloadType = last_media_payload_type;
- rtp_header.header.paddingLength = 0;
- PayloadUnion payload_specific;
- if (!rtp_payload_registry_->GetPayloadSpecifics(last_media_payload_type,
- &payload_specific)) {
- LOG(LS_WARNING) << "Failed to get payload specifics.";
- return;
- }
- rtp_header.type.Video.codec = payload_specific.Video.videoCodecType;
- rtp_header.type.Video.rotation = kVideoRotation_0;
- if (header.extension.hasVideoRotation) {
- rtp_header.type.Video.rotation =
- ConvertCVOByteToVideoRotation(header.extension.videoRotation);
- }
- OnReceivedPayloadData(NULL, 0, &rtp_header);
-}
-
-int ViEReceiver::InsertRTCPPacket(const uint8_t* rtcp_packet,
- size_t rtcp_packet_length) {
- {
- CriticalSectionScoped cs(receive_cs_.get());
- if (!receiving_) {
- return -1;
- }
-
- for (RtpRtcp* rtp_rtcp : rtp_rtcp_simulcast_)
- rtp_rtcp->IncomingRtcpPacket(rtcp_packet, rtcp_packet_length);
- }
- assert(rtp_rtcp_); // Should be set by owner at construction time.
- int ret = rtp_rtcp_->IncomingRtcpPacket(rtcp_packet, rtcp_packet_length);
- if (ret != 0) {
- return ret;
- }
-
- int64_t rtt = 0;
- rtp_rtcp_->RTT(rtp_receiver_->SSRC(), &rtt, NULL, NULL, NULL);
- if (rtt == 0) {
- // Waiting for valid rtt.
- return 0;
- }
- uint32_t ntp_secs = 0;
- uint32_t ntp_frac = 0;
- uint32_t rtp_timestamp = 0;
- if (0 != rtp_rtcp_->RemoteNTP(&ntp_secs, &ntp_frac, NULL, NULL,
- &rtp_timestamp)) {
- // Waiting for RTCP.
- return 0;
- }
- ntp_estimator_->UpdateRtcpTimestamp(rtt, ntp_secs, ntp_frac, rtp_timestamp);
-
- return 0;
-}
-
-void ViEReceiver::StartReceive() {
- CriticalSectionScoped cs(receive_cs_.get());
- receiving_ = true;
-}
-
-void ViEReceiver::StopReceive() {
- CriticalSectionScoped cs(receive_cs_.get());
- receiving_ = false;
-}
-
-ReceiveStatistics* ViEReceiver::GetReceiveStatistics() const {
- return rtp_receive_statistics_.get();
-}
-
-bool ViEReceiver::IsPacketInOrder(const RTPHeader& header) const {
- StreamStatistician* statistician =
- rtp_receive_statistics_->GetStatistician(header.ssrc);
- if (!statistician)
- return false;
- return statistician->IsPacketInOrder(header.sequenceNumber);
-}
-
-bool ViEReceiver::IsPacketRetransmitted(const RTPHeader& header,
- bool in_order) const {
- // Retransmissions are handled separately if RTX is enabled.
- if (rtp_payload_registry_->RtxEnabled())
- return false;
- StreamStatistician* statistician =
- rtp_receive_statistics_->GetStatistician(header.ssrc);
- if (!statistician)
- return false;
- // Check if this is a retransmission.
- int64_t min_rtt = 0;
- rtp_rtcp_->RTT(rtp_receiver_->SSRC(), NULL, NULL, &min_rtt, NULL);
- return !in_order &&
- statistician->IsRetransmitOfOldPacket(header, min_rtt);
-}
-} // namespace webrtc
diff --git a/webrtc/video_engine/vie_receiver.h b/webrtc/video_engine/vie_receiver.h
deleted file mode 100644
index cd069eaa5b..0000000000
--- a/webrtc/video_engine/vie_receiver.h
+++ /dev/null
@@ -1,131 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_VIDEO_ENGINE_VIE_RECEIVER_H_
-#define WEBRTC_VIDEO_ENGINE_VIE_RECEIVER_H_
-
-#include <list>
-
-#include "webrtc/base/scoped_ptr.h"
-#include "webrtc/engine_configurations.h"
-#include "webrtc/modules/rtp_rtcp/interface/receive_statistics.h"
-#include "webrtc/modules/rtp_rtcp/interface/rtp_rtcp_defines.h"
-#include "webrtc/typedefs.h"
-#include "webrtc/video_engine/vie_defines.h"
-
-namespace webrtc {
-
-class CriticalSectionWrapper;
-class FecReceiver;
-class RemoteNtpTimeEstimator;
-class ReceiveStatistics;
-class RemoteBitrateEstimator;
-class RtpHeaderParser;
-class RTPPayloadRegistry;
-class RtpReceiver;
-class RtpRtcp;
-class VideoCodingModule;
-struct ReceiveBandwidthEstimatorStats;
-
-class ViEReceiver : public RtpData {
- public:
- ViEReceiver(VideoCodingModule* module_vcm,
- RemoteBitrateEstimator* remote_bitrate_estimator,
- RtpFeedback* rtp_feedback);
- ~ViEReceiver();
-
- bool SetReceiveCodec(const VideoCodec& video_codec);
- bool RegisterPayload(const VideoCodec& video_codec);
-
- void SetNackStatus(bool enable, int max_nack_reordering_threshold);
- void SetRtxPayloadType(int payload_type, int associated_payload_type);
- // If set to true, the RTX payload type mapping supplied in
- // |SetRtxPayloadType| will be used when restoring RTX packets. Without it,
- // RTX packets will always be restored to the last non-RTX packet payload type
- // received.
- void SetUseRtxPayloadMappingOnRestore(bool val);
- void SetRtxSsrc(uint32_t ssrc);
- bool GetRtxSsrc(uint32_t* ssrc) const;
-
- bool IsFecEnabled() const;
-
- uint32_t GetRemoteSsrc() const;
- int GetCsrcs(uint32_t* csrcs) const;
-
- void SetRtpRtcpModule(RtpRtcp* module);
-
- RtpReceiver* GetRtpReceiver() const;
-
- void RegisterRtpRtcpModules(const std::vector<RtpRtcp*>& rtp_modules);
-
- bool SetReceiveTimestampOffsetStatus(bool enable, int id);
- bool SetReceiveAbsoluteSendTimeStatus(bool enable, int id);
- bool SetReceiveVideoRotationStatus(bool enable, int id);
- bool SetReceiveTransportSequenceNumber(bool enable, int id);
-
- void StartReceive();
- void StopReceive();
-
- // Receives packets from external transport.
- int ReceivedRTPPacket(const void* rtp_packet, size_t rtp_packet_length,
- const PacketTime& packet_time);
- int ReceivedRTCPPacket(const void* rtcp_packet, size_t rtcp_packet_length);
-
- // Implements RtpData.
- int32_t OnReceivedPayloadData(const uint8_t* payload_data,
- const size_t payload_size,
- const WebRtcRTPHeader* rtp_header) override;
- bool OnRecoveredPacket(const uint8_t* packet, size_t packet_length) override;
-
- ReceiveStatistics* GetReceiveStatistics() const;
- private:
- int InsertRTPPacket(const uint8_t* rtp_packet, size_t rtp_packet_length,
- const PacketTime& packet_time);
- bool ReceivePacket(const uint8_t* packet,
- size_t packet_length,
- const RTPHeader& header,
- bool in_order);
- // Parses and handles for instance RTX and RED headers.
- // This function assumes that it's being called from only one thread.
- bool ParseAndHandleEncapsulatingHeader(const uint8_t* packet,
- size_t packet_length,
- const RTPHeader& header);
- void NotifyReceiverOfFecPacket(const RTPHeader& header);
- int InsertRTCPPacket(const uint8_t* rtcp_packet, size_t rtcp_packet_length);
- bool IsPacketInOrder(const RTPHeader& header) const;
- bool IsPacketRetransmitted(const RTPHeader& header, bool in_order) const;
- void UpdateHistograms();
-
- rtc::scoped_ptr<CriticalSectionWrapper> receive_cs_;
- Clock* clock_;
- rtc::scoped_ptr<RtpHeaderParser> rtp_header_parser_;
- rtc::scoped_ptr<RTPPayloadRegistry> rtp_payload_registry_;
- rtc::scoped_ptr<RtpReceiver> rtp_receiver_;
- const rtc::scoped_ptr<ReceiveStatistics> rtp_receive_statistics_;
- rtc::scoped_ptr<FecReceiver> fec_receiver_;
- RtpRtcp* rtp_rtcp_;
- std::vector<RtpRtcp*> rtp_rtcp_simulcast_;
- VideoCodingModule* vcm_;
- RemoteBitrateEstimator* remote_bitrate_estimator_;
-
- rtc::scoped_ptr<RemoteNtpTimeEstimator> ntp_estimator_;
-
- bool receiving_;
- uint8_t restored_packet_[kViEMaxMtu];
- bool restored_packet_in_use_;
- bool receiving_ast_enabled_;
- bool receiving_cvo_enabled_;
- bool receiving_tsn_enabled_;
- int64_t last_packet_log_ms_;
-};
-
-} // namespace webrt
-
-#endif // WEBRTC_VIDEO_ENGINE_VIE_RECEIVER_H_
diff --git a/webrtc/video_engine/vie_remb.cc b/webrtc/video_engine/vie_remb.cc
deleted file mode 100644
index b347f2ee00..0000000000
--- a/webrtc/video_engine/vie_remb.cc
+++ /dev/null
@@ -1,143 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include "webrtc/video_engine/vie_remb.h"
-
-#include <assert.h>
-
-#include <algorithm>
-
-#include "webrtc/modules/rtp_rtcp/interface/rtp_rtcp.h"
-#include "webrtc/modules/utility/interface/process_thread.h"
-#include "webrtc/system_wrappers/include/critical_section_wrapper.h"
-#include "webrtc/system_wrappers/include/tick_util.h"
-#include "webrtc/system_wrappers/include/trace.h"
-
-namespace webrtc {
-
-const int kRembSendIntervalMs = 200;
-
-// % threshold for if we should send a new REMB asap.
-const unsigned int kSendThresholdPercent = 97;
-
-VieRemb::VieRemb()
- : list_crit_(CriticalSectionWrapper::CreateCriticalSection()),
- last_remb_time_(TickTime::MillisecondTimestamp()),
- last_send_bitrate_(0),
- bitrate_(0) {}
-
-VieRemb::~VieRemb() {}
-
-void VieRemb::AddReceiveChannel(RtpRtcp* rtp_rtcp) {
- assert(rtp_rtcp);
-
- CriticalSectionScoped cs(list_crit_.get());
- if (std::find(receive_modules_.begin(), receive_modules_.end(), rtp_rtcp) !=
- receive_modules_.end())
- return;
-
- // The module probably doesn't have a remote SSRC yet, so don't add it to the
- // map.
- receive_modules_.push_back(rtp_rtcp);
-}
-
-void VieRemb::RemoveReceiveChannel(RtpRtcp* rtp_rtcp) {
- assert(rtp_rtcp);
-
- CriticalSectionScoped cs(list_crit_.get());
- for (RtpModules::iterator it = receive_modules_.begin();
- it != receive_modules_.end(); ++it) {
- if ((*it) == rtp_rtcp) {
- receive_modules_.erase(it);
- break;
- }
- }
-}
-
-void VieRemb::AddRembSender(RtpRtcp* rtp_rtcp) {
- assert(rtp_rtcp);
-
- CriticalSectionScoped cs(list_crit_.get());
-
- // Verify this module hasn't been added earlier.
- if (std::find(rtcp_sender_.begin(), rtcp_sender_.end(), rtp_rtcp) !=
- rtcp_sender_.end())
- return;
- rtcp_sender_.push_back(rtp_rtcp);
-}
-
-void VieRemb::RemoveRembSender(RtpRtcp* rtp_rtcp) {
- assert(rtp_rtcp);
-
- CriticalSectionScoped cs(list_crit_.get());
- for (RtpModules::iterator it = rtcp_sender_.begin();
- it != rtcp_sender_.end(); ++it) {
- if ((*it) == rtp_rtcp) {
- rtcp_sender_.erase(it);
- return;
- }
- }
-}
-
-bool VieRemb::InUse() const {
- CriticalSectionScoped cs(list_crit_.get());
- if (receive_modules_.empty() && rtcp_sender_.empty())
- return false;
- else
- return true;
-}
-
-void VieRemb::OnReceiveBitrateChanged(const std::vector<unsigned int>& ssrcs,
- unsigned int bitrate) {
- list_crit_->Enter();
- // If we already have an estimate, check if the new total estimate is below
- // kSendThresholdPercent of the previous estimate.
- if (last_send_bitrate_ > 0) {
- unsigned int new_remb_bitrate = last_send_bitrate_ - bitrate_ + bitrate;
-
- if (new_remb_bitrate < kSendThresholdPercent * last_send_bitrate_ / 100) {
- // The new bitrate estimate is less than kSendThresholdPercent % of the
- // last report. Send a REMB asap.
- last_remb_time_ = TickTime::MillisecondTimestamp() - kRembSendIntervalMs;
- }
- }
- bitrate_ = bitrate;
-
- // Calculate total receive bitrate estimate.
- int64_t now = TickTime::MillisecondTimestamp();
-
- if (now - last_remb_time_ < kRembSendIntervalMs) {
- list_crit_->Leave();
- return;
- }
- last_remb_time_ = now;
-
- if (ssrcs.empty() || receive_modules_.empty()) {
- list_crit_->Leave();
- return;
- }
-
- // Send a REMB packet.
- RtpRtcp* sender = NULL;
- if (!rtcp_sender_.empty()) {
- sender = rtcp_sender_.front();
- } else {
- sender = receive_modules_.front();
- }
- last_send_bitrate_ = bitrate_;
-
- list_crit_->Leave();
-
- if (sender) {
- sender->SetREMBData(bitrate_, ssrcs);
- }
-}
-
-} // namespace webrtc
diff --git a/webrtc/video_engine/vie_remb.h b/webrtc/video_engine/vie_remb.h
deleted file mode 100644
index 9f38259ca8..0000000000
--- a/webrtc/video_engine/vie_remb.h
+++ /dev/null
@@ -1,78 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_VIDEO_ENGINE_VIE_REMB_H_
-#define WEBRTC_VIDEO_ENGINE_VIE_REMB_H_
-
-#include <list>
-#include <utility>
-#include <vector>
-
-#include "webrtc/base/scoped_ptr.h"
-#include "webrtc/modules/interface/module.h"
-#include "webrtc/modules/remote_bitrate_estimator/include/remote_bitrate_estimator.h"
-#include "webrtc/modules/rtp_rtcp/interface/rtp_rtcp_defines.h"
-
-namespace webrtc {
-
-class CriticalSectionWrapper;
-class ProcessThread;
-class RtpRtcp;
-
-class VieRemb : public RemoteBitrateObserver {
- public:
- VieRemb();
- ~VieRemb();
-
- // Called to add a receive channel to include in the REMB packet.
- void AddReceiveChannel(RtpRtcp* rtp_rtcp);
-
- // Removes the specified channel from REMB estimate.
- void RemoveReceiveChannel(RtpRtcp* rtp_rtcp);
-
- // Called to add a module that can generate and send REMB RTCP.
- void AddRembSender(RtpRtcp* rtp_rtcp);
-
- // Removes a REMB RTCP sender.
- void RemoveRembSender(RtpRtcp* rtp_rtcp);
-
- // Returns true if the instance is in use, false otherwise.
- bool InUse() const;
-
- // Called every time there is a new bitrate estimate for a receive channel
- // group. This call will trigger a new RTCP REMB packet if the bitrate
- // estimate has decreased or if no RTCP REMB packet has been sent for
- // a certain time interval.
- // Implements RtpReceiveBitrateUpdate.
- virtual void OnReceiveBitrateChanged(const std::vector<unsigned int>& ssrcs,
- unsigned int bitrate);
-
- private:
- typedef std::list<RtpRtcp*> RtpModules;
-
- rtc::scoped_ptr<CriticalSectionWrapper> list_crit_;
-
- // The last time a REMB was sent.
- int64_t last_remb_time_;
- unsigned int last_send_bitrate_;
-
- // All RtpRtcp modules to include in the REMB packet.
- RtpModules receive_modules_;
-
- // All modules that can send REMB RTCP.
- RtpModules rtcp_sender_;
-
- // The last bitrate update.
- unsigned int bitrate_;
-};
-
-} // namespace webrtc
-
-#endif // WEBRTC_VIDEO_ENGINE_VIE_REMB_H_
diff --git a/webrtc/video_engine/vie_remb_unittest.cc b/webrtc/video_engine/vie_remb_unittest.cc
deleted file mode 100644
index 3289c4b822..0000000000
--- a/webrtc/video_engine/vie_remb_unittest.cc
+++ /dev/null
@@ -1,251 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-
-// This file includes unit tests for ViERemb.
-
-#include "testing/gmock/include/gmock/gmock.h"
-#include "testing/gtest/include/gtest/gtest.h"
-
-#include <vector>
-
-#include "webrtc/base/scoped_ptr.h"
-#include "webrtc/modules/rtp_rtcp/interface/rtp_rtcp.h"
-#include "webrtc/modules/rtp_rtcp/mocks/mock_rtp_rtcp.h"
-#include "webrtc/modules/utility/interface/mock/mock_process_thread.h"
-#include "webrtc/system_wrappers/include/tick_util.h"
-#include "webrtc/video_engine/vie_remb.h"
-
-using ::testing::_;
-using ::testing::AnyNumber;
-using ::testing::NiceMock;
-using ::testing::Return;
-
-namespace webrtc {
-
-class ViERembTest : public ::testing::Test {
- protected:
- virtual void SetUp() {
- TickTime::UseFakeClock(12345);
- process_thread_.reset(new NiceMock<MockProcessThread>);
- vie_remb_.reset(new VieRemb());
- }
- rtc::scoped_ptr<MockProcessThread> process_thread_;
- rtc::scoped_ptr<VieRemb> vie_remb_;
-};
-
-TEST_F(ViERembTest, OneModuleTestForSendingRemb) {
- MockRtpRtcp rtp;
- vie_remb_->AddReceiveChannel(&rtp);
- vie_remb_->AddRembSender(&rtp);
-
- const unsigned int bitrate_estimate = 456;
- unsigned int ssrc = 1234;
- std::vector<unsigned int> ssrcs(&ssrc, &ssrc + 1);
-
- vie_remb_->OnReceiveBitrateChanged(ssrcs, bitrate_estimate);
-
- TickTime::AdvanceFakeClock(1000);
- EXPECT_CALL(rtp, SetREMBData(bitrate_estimate, ssrcs))
- .Times(1);
- vie_remb_->OnReceiveBitrateChanged(ssrcs, bitrate_estimate);
-
- // Lower bitrate to send another REMB packet.
- EXPECT_CALL(rtp, SetREMBData(bitrate_estimate - 100, ssrcs))
- .Times(1);
- vie_remb_->OnReceiveBitrateChanged(ssrcs, bitrate_estimate - 100);
-
- vie_remb_->RemoveReceiveChannel(&rtp);
- vie_remb_->RemoveRembSender(&rtp);
-}
-
-TEST_F(ViERembTest, LowerEstimateToSendRemb) {
- MockRtpRtcp rtp;
- vie_remb_->AddReceiveChannel(&rtp);
- vie_remb_->AddRembSender(&rtp);
-
- unsigned int bitrate_estimate = 456;
- unsigned int ssrc = 1234;
- std::vector<unsigned int> ssrcs(&ssrc, &ssrc + 1);
-
- vie_remb_->OnReceiveBitrateChanged(ssrcs, bitrate_estimate);
- // Call OnReceiveBitrateChanged twice to get a first estimate.
- TickTime::AdvanceFakeClock(1000);
- EXPECT_CALL(rtp, SetREMBData(bitrate_estimate, ssrcs))
- .Times(1);
- vie_remb_->OnReceiveBitrateChanged(ssrcs, bitrate_estimate);
-
- // Lower the estimate with more than 3% to trigger a call to SetREMBData right
- // away.
- bitrate_estimate = bitrate_estimate - 100;
- EXPECT_CALL(rtp, SetREMBData(bitrate_estimate, ssrcs))
- .Times(1);
- vie_remb_->OnReceiveBitrateChanged(ssrcs, bitrate_estimate);
-}
-
-TEST_F(ViERembTest, VerifyIncreasingAndDecreasing) {
- MockRtpRtcp rtp_0;
- MockRtpRtcp rtp_1;
- vie_remb_->AddReceiveChannel(&rtp_0);
- vie_remb_->AddRembSender(&rtp_0);
- vie_remb_->AddReceiveChannel(&rtp_1);
-
- unsigned int bitrate_estimate[] = { 456, 789 };
- unsigned int ssrc[] = { 1234, 5678 };
- std::vector<unsigned int> ssrcs(ssrc, ssrc + sizeof(ssrc) / sizeof(ssrc[0]));
-
- vie_remb_->OnReceiveBitrateChanged(ssrcs, bitrate_estimate[0]);
-
- // Call OnReceiveBitrateChanged twice to get a first estimate.
- EXPECT_CALL(rtp_0, SetREMBData(bitrate_estimate[0], ssrcs))
- .Times(1);
- TickTime::AdvanceFakeClock(1000);
- vie_remb_->OnReceiveBitrateChanged(ssrcs, bitrate_estimate[0]);
-
- vie_remb_->OnReceiveBitrateChanged(ssrcs, bitrate_estimate[1] + 100);
-
- // Lower the estimate to trigger a callback.
- EXPECT_CALL(rtp_0, SetREMBData(bitrate_estimate[1], ssrcs))
- .Times(1);
- vie_remb_->OnReceiveBitrateChanged(ssrcs, bitrate_estimate[1]);
-
- vie_remb_->RemoveReceiveChannel(&rtp_0);
- vie_remb_->RemoveRembSender(&rtp_0);
- vie_remb_->RemoveReceiveChannel(&rtp_1);
-}
-
-TEST_F(ViERembTest, NoRembForIncreasedBitrate) {
- MockRtpRtcp rtp_0;
- MockRtpRtcp rtp_1;
- vie_remb_->AddReceiveChannel(&rtp_0);
- vie_remb_->AddRembSender(&rtp_0);
- vie_remb_->AddReceiveChannel(&rtp_1);
-
- unsigned int bitrate_estimate = 456;
- unsigned int ssrc[] = { 1234, 5678 };
- std::vector<unsigned int> ssrcs(ssrc, ssrc + sizeof(ssrc) / sizeof(ssrc[0]));
-
- vie_remb_->OnReceiveBitrateChanged(ssrcs, bitrate_estimate);
- // Call OnReceiveBitrateChanged twice to get a first estimate.
- TickTime::AdvanceFakeClock(1000);
- EXPECT_CALL(rtp_0, SetREMBData(bitrate_estimate, ssrcs))
- .Times(1);
- vie_remb_->OnReceiveBitrateChanged(ssrcs, bitrate_estimate);
-
- // Increased estimate shouldn't trigger a callback right away.
- EXPECT_CALL(rtp_0, SetREMBData(_, _))
- .Times(0);
- vie_remb_->OnReceiveBitrateChanged(ssrcs, bitrate_estimate + 1);
-
- // Decreasing the estimate less than 3% shouldn't trigger a new callback.
- EXPECT_CALL(rtp_0, SetREMBData(_, _))
- .Times(0);
- int lower_estimate = bitrate_estimate * 98 / 100;
- vie_remb_->OnReceiveBitrateChanged(ssrcs, lower_estimate);
-
- vie_remb_->RemoveReceiveChannel(&rtp_1);
- vie_remb_->RemoveReceiveChannel(&rtp_0);
- vie_remb_->RemoveRembSender(&rtp_0);
-}
-
-TEST_F(ViERembTest, ChangeSendRtpModule) {
- MockRtpRtcp rtp_0;
- MockRtpRtcp rtp_1;
- vie_remb_->AddReceiveChannel(&rtp_0);
- vie_remb_->AddRembSender(&rtp_0);
- vie_remb_->AddReceiveChannel(&rtp_1);
-
- unsigned int bitrate_estimate = 456;
- unsigned int ssrc[] = { 1234, 5678 };
- std::vector<unsigned int> ssrcs(ssrc, ssrc + sizeof(ssrc) / sizeof(ssrc[0]));
-
- vie_remb_->OnReceiveBitrateChanged(ssrcs, bitrate_estimate);
- // Call OnReceiveBitrateChanged twice to get a first estimate.
- TickTime::AdvanceFakeClock(1000);
- EXPECT_CALL(rtp_0, SetREMBData(bitrate_estimate, ssrcs))
- .Times(1);
- vie_remb_->OnReceiveBitrateChanged(ssrcs, bitrate_estimate);
-
- // Decrease estimate to trigger a REMB.
- bitrate_estimate = bitrate_estimate - 100;
- EXPECT_CALL(rtp_0, SetREMBData(bitrate_estimate, ssrcs))
- .Times(1);
- vie_remb_->OnReceiveBitrateChanged(ssrcs, bitrate_estimate);
-
- // Remove the sending module, add it again -> should get remb on the second
- // module.
- vie_remb_->RemoveRembSender(&rtp_0);
- vie_remb_->AddRembSender(&rtp_1);
- vie_remb_->OnReceiveBitrateChanged(ssrcs, bitrate_estimate);
-
- bitrate_estimate = bitrate_estimate - 100;
- EXPECT_CALL(rtp_1, SetREMBData(bitrate_estimate, ssrcs))
- .Times(1);
- vie_remb_->OnReceiveBitrateChanged(ssrcs, bitrate_estimate);
-
- vie_remb_->RemoveReceiveChannel(&rtp_0);
- vie_remb_->RemoveReceiveChannel(&rtp_1);
-}
-
-TEST_F(ViERembTest, OnlyOneRembForDoubleProcess) {
- MockRtpRtcp rtp;
- unsigned int bitrate_estimate = 456;
- unsigned int ssrc = 1234;
- std::vector<unsigned int> ssrcs(&ssrc, &ssrc + 1);
-
- vie_remb_->AddReceiveChannel(&rtp);
- vie_remb_->AddRembSender(&rtp);
- vie_remb_->OnReceiveBitrateChanged(ssrcs, bitrate_estimate);
- // Call OnReceiveBitrateChanged twice to get a first estimate.
- TickTime::AdvanceFakeClock(1000);
- EXPECT_CALL(rtp, SetREMBData(_, _))
- .Times(1);
- vie_remb_->OnReceiveBitrateChanged(ssrcs, bitrate_estimate);
-
- // Lower the estimate, should trigger a call to SetREMBData right away.
- bitrate_estimate = bitrate_estimate - 100;
- EXPECT_CALL(rtp, SetREMBData(bitrate_estimate, ssrcs))
- .Times(1);
- vie_remb_->OnReceiveBitrateChanged(ssrcs, bitrate_estimate);
-
- // Call OnReceiveBitrateChanged again, this should not trigger a new callback.
- EXPECT_CALL(rtp, SetREMBData(_, _))
- .Times(0);
- vie_remb_->OnReceiveBitrateChanged(ssrcs, bitrate_estimate);
- vie_remb_->RemoveReceiveChannel(&rtp);
- vie_remb_->RemoveRembSender(&rtp);
-}
-
-// Only register receiving modules and make sure we fallback to trigger a REMB
-// packet on this one.
-TEST_F(ViERembTest, NoSendingRtpModule) {
- MockRtpRtcp rtp;
- vie_remb_->AddReceiveChannel(&rtp);
-
- unsigned int bitrate_estimate = 456;
- unsigned int ssrc = 1234;
- std::vector<unsigned int> ssrcs(&ssrc, &ssrc + 1);
-
- vie_remb_->OnReceiveBitrateChanged(ssrcs, bitrate_estimate);
-
- // Call OnReceiveBitrateChanged twice to get a first estimate.
- TickTime::AdvanceFakeClock(1000);
- EXPECT_CALL(rtp, SetREMBData(_, _))
- .Times(1);
- vie_remb_->OnReceiveBitrateChanged(ssrcs, bitrate_estimate);
-
- // Lower the estimate to trigger a new packet REMB packet.
- bitrate_estimate = bitrate_estimate - 100;
- EXPECT_CALL(rtp, SetREMBData(_, _))
- .Times(1);
- vie_remb_->OnReceiveBitrateChanged(ssrcs, bitrate_estimate);
-}
-
-} // namespace webrtc
diff --git a/webrtc/video_engine/vie_sync_module.cc b/webrtc/video_engine/vie_sync_module.cc
deleted file mode 100644
index 1c5d877cd2..0000000000
--- a/webrtc/video_engine/vie_sync_module.cc
+++ /dev/null
@@ -1,188 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include "webrtc/video_engine/vie_sync_module.h"
-
-#include "webrtc/base/logging.h"
-#include "webrtc/base/trace_event.h"
-#include "webrtc/modules/rtp_rtcp/interface/rtp_receiver.h"
-#include "webrtc/modules/rtp_rtcp/interface/rtp_rtcp.h"
-#include "webrtc/modules/video_coding/main/interface/video_coding.h"
-#include "webrtc/system_wrappers/include/critical_section_wrapper.h"
-#include "webrtc/video_engine/stream_synchronization.h"
-#include "webrtc/voice_engine/include/voe_video_sync.h"
-
-namespace webrtc {
-
-int UpdateMeasurements(StreamSynchronization::Measurements* stream,
- const RtpRtcp& rtp_rtcp, const RtpReceiver& receiver) {
- if (!receiver.Timestamp(&stream->latest_timestamp))
- return -1;
- if (!receiver.LastReceivedTimeMs(&stream->latest_receive_time_ms))
- return -1;
-
- uint32_t ntp_secs = 0;
- uint32_t ntp_frac = 0;
- uint32_t rtp_timestamp = 0;
- if (0 != rtp_rtcp.RemoteNTP(&ntp_secs,
- &ntp_frac,
- NULL,
- NULL,
- &rtp_timestamp)) {
- return -1;
- }
-
- bool new_rtcp_sr = false;
- if (!UpdateRtcpList(
- ntp_secs, ntp_frac, rtp_timestamp, &stream->rtcp, &new_rtcp_sr)) {
- return -1;
- }
-
- return 0;
-}
-
-ViESyncModule::ViESyncModule(VideoCodingModule* vcm)
- : data_cs_(CriticalSectionWrapper::CreateCriticalSection()),
- vcm_(vcm),
- video_receiver_(NULL),
- video_rtp_rtcp_(NULL),
- voe_channel_id_(-1),
- voe_sync_interface_(NULL),
- last_sync_time_(TickTime::Now()),
- sync_() {
-}
-
-ViESyncModule::~ViESyncModule() {
-}
-
-int ViESyncModule::ConfigureSync(int voe_channel_id,
- VoEVideoSync* voe_sync_interface,
- RtpRtcp* video_rtcp_module,
- RtpReceiver* video_receiver) {
- CriticalSectionScoped cs(data_cs_.get());
- // Prevent expensive no-ops.
- if (voe_channel_id_ == voe_channel_id &&
- voe_sync_interface_ == voe_sync_interface &&
- video_receiver_ == video_receiver &&
- video_rtp_rtcp_ == video_rtcp_module) {
- return 0;
- }
- voe_channel_id_ = voe_channel_id;
- voe_sync_interface_ = voe_sync_interface;
- video_receiver_ = video_receiver;
- video_rtp_rtcp_ = video_rtcp_module;
- sync_.reset(
- new StreamSynchronization(video_rtp_rtcp_->SSRC(), voe_channel_id));
-
- if (!voe_sync_interface) {
- voe_channel_id_ = -1;
- if (voe_channel_id >= 0) {
- // Trying to set a voice channel but no interface exist.
- return -1;
- }
- return 0;
- }
- return 0;
-}
-
-int ViESyncModule::VoiceChannel() {
- return voe_channel_id_;
-}
-
-int64_t ViESyncModule::TimeUntilNextProcess() {
- const int64_t kSyncIntervalMs = 1000;
- return kSyncIntervalMs - (TickTime::Now() - last_sync_time_).Milliseconds();
-}
-
-int32_t ViESyncModule::Process() {
- CriticalSectionScoped cs(data_cs_.get());
- last_sync_time_ = TickTime::Now();
-
- const int current_video_delay_ms = vcm_->Delay();
-
- if (voe_channel_id_ == -1) {
- return 0;
- }
- assert(video_rtp_rtcp_ && voe_sync_interface_);
- assert(sync_.get());
-
- int audio_jitter_buffer_delay_ms = 0;
- int playout_buffer_delay_ms = 0;
- if (voe_sync_interface_->GetDelayEstimate(voe_channel_id_,
- &audio_jitter_buffer_delay_ms,
- &playout_buffer_delay_ms) != 0) {
- return 0;
- }
- const int current_audio_delay_ms = audio_jitter_buffer_delay_ms +
- playout_buffer_delay_ms;
-
- RtpRtcp* voice_rtp_rtcp = NULL;
- RtpReceiver* voice_receiver = NULL;
- if (0 != voe_sync_interface_->GetRtpRtcp(voe_channel_id_, &voice_rtp_rtcp,
- &voice_receiver)) {
- return 0;
- }
- assert(voice_rtp_rtcp);
- assert(voice_receiver);
-
- if (UpdateMeasurements(&video_measurement_, *video_rtp_rtcp_,
- *video_receiver_) != 0) {
- return 0;
- }
-
- if (UpdateMeasurements(&audio_measurement_, *voice_rtp_rtcp,
- *voice_receiver) != 0) {
- return 0;
- }
-
- int relative_delay_ms;
- // Calculate how much later or earlier the audio stream is compared to video.
- if (!sync_->ComputeRelativeDelay(audio_measurement_, video_measurement_,
- &relative_delay_ms)) {
- return 0;
- }
-
- TRACE_COUNTER1("webrtc", "SyncCurrentVideoDelay", current_video_delay_ms);
- TRACE_COUNTER1("webrtc", "SyncCurrentAudioDelay", current_audio_delay_ms);
- TRACE_COUNTER1("webrtc", "SyncRelativeDelay", relative_delay_ms);
- int target_audio_delay_ms = 0;
- int target_video_delay_ms = current_video_delay_ms;
- // Calculate the necessary extra audio delay and desired total video
- // delay to get the streams in sync.
- if (!sync_->ComputeDelays(relative_delay_ms,
- current_audio_delay_ms,
- &target_audio_delay_ms,
- &target_video_delay_ms)) {
- return 0;
- }
-
- if (voe_sync_interface_->SetMinimumPlayoutDelay(
- voe_channel_id_, target_audio_delay_ms) == -1) {
- LOG(LS_ERROR) << "Error setting voice delay.";
- }
- vcm_->SetMinimumPlayoutDelay(target_video_delay_ms);
- return 0;
-}
-
-int ViESyncModule::SetTargetBufferingDelay(int target_delay_ms) {
- CriticalSectionScoped cs(data_cs_.get());
- if (!voe_sync_interface_) {
- LOG(LS_ERROR) << "voe_sync_interface_ NULL, can't set playout delay.";
- return -1;
- }
- sync_->SetTargetBufferingDelay(target_delay_ms);
- // Setting initial playout delay to voice engine (video engine is updated via
- // the VCM interface).
- voe_sync_interface_->SetInitialPlayoutDelay(voe_channel_id_,
- target_delay_ms);
- return 0;
-}
-
-} // namespace webrtc
diff --git a/webrtc/video_engine/vie_sync_module.h b/webrtc/video_engine/vie_sync_module.h
deleted file mode 100644
index ea2ae0bd51..0000000000
--- a/webrtc/video_engine/vie_sync_module.h
+++ /dev/null
@@ -1,65 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-// ViESyncModule is responsible for synchronization audio and video for a given
-// VoE and ViE channel couple.
-
-#ifndef WEBRTC_VIDEO_ENGINE_VIE_SYNC_MODULE_H_
-#define WEBRTC_VIDEO_ENGINE_VIE_SYNC_MODULE_H_
-
-#include "webrtc/base/scoped_ptr.h"
-#include "webrtc/modules/interface/module.h"
-#include "webrtc/system_wrappers/include/tick_util.h"
-#include "webrtc/video_engine/stream_synchronization.h"
-#include "webrtc/voice_engine/include/voe_video_sync.h"
-
-namespace webrtc {
-
-class CriticalSectionWrapper;
-class RtpRtcp;
-class VideoCodingModule;
-class ViEChannel;
-class VoEVideoSync;
-
-class ViESyncModule : public Module {
- public:
- explicit ViESyncModule(VideoCodingModule* vcm);
- ~ViESyncModule();
-
- int ConfigureSync(int voe_channel_id,
- VoEVideoSync* voe_sync_interface,
- RtpRtcp* video_rtcp_module,
- RtpReceiver* video_receiver);
-
- int VoiceChannel();
-
- // Set target delay for buffering mode (0 = real-time mode).
- int SetTargetBufferingDelay(int target_delay_ms);
-
- // Implements Module.
- int64_t TimeUntilNextProcess() override;
- int32_t Process() override;
-
- private:
- rtc::scoped_ptr<CriticalSectionWrapper> data_cs_;
- VideoCodingModule* const vcm_;
- RtpReceiver* video_receiver_;
- RtpRtcp* video_rtp_rtcp_;
- int voe_channel_id_;
- VoEVideoSync* voe_sync_interface_;
- TickTime last_sync_time_;
- rtc::scoped_ptr<StreamSynchronization> sync_;
- StreamSynchronization::Measurements audio_measurement_;
- StreamSynchronization::Measurements video_measurement_;
-};
-
-} // namespace webrtc
-
-#endif // WEBRTC_VIDEO_ENGINE_VIE_SYNC_MODULE_H_
diff --git a/webrtc/video_engine_tests.isolate b/webrtc/video_engine_tests.isolate
index 5aa962323d..f2f961fa23 100644
--- a/webrtc/video_engine_tests.isolate
+++ b/webrtc/video_engine_tests.isolate
@@ -11,6 +11,7 @@
'variables': {
'files': [
'<(DEPTH)/resources/foreman_cif_short.yuv',
+ '<(DEPTH)/resources/voice_engine/audio_long16.pcm',
],
},
}],
diff --git a/webrtc/video_frame.h b/webrtc/video_frame.h
index fb7d7359af..9d2ed9fd4d 100644
--- a/webrtc/video_frame.h
+++ b/webrtc/video_frame.h
@@ -13,7 +13,7 @@
#include "webrtc/base/scoped_ref_ptr.h"
#include "webrtc/common_types.h"
-#include "webrtc/common_video/interface/video_frame_buffer.h"
+#include "webrtc/common_video/include/video_frame_buffer.h"
#include "webrtc/common_video/rotation.h"
#include "webrtc/typedefs.h"
@@ -158,6 +158,8 @@ class VideoFrame {
// called on a non-native-handle frame.
VideoFrame ConvertNativeToI420Frame() const;
+ bool EqualsFrame(const VideoFrame& frame) const;
+
private:
// An opaque reference counted handle that stores the pixel data.
rtc::scoped_refptr<webrtc::VideoFrameBuffer> video_frame_buffer_;
diff --git a/webrtc/video_receive_stream.h b/webrtc/video_receive_stream.h
index 275162ca1c..51d842616b 100644
--- a/webrtc/video_receive_stream.h
+++ b/webrtc/video_receive_stream.h
@@ -43,15 +43,6 @@ class VideoReceiveStream : public ReceiveStream {
// Name of the decoded payload (such as VP8). Maps back to the depacketizer
// used to unpack incoming packets.
std::string payload_name;
-
- // 'true' if the decoder handles rendering as well.
- bool is_renderer = false;
-
- // The expected delay for decoding and rendering, i.e. the frame will be
- // delivered this many milliseconds, if possible, earlier than the ideal
- // render time.
- // Note: Ignored if 'renderer' is false.
- int expected_delay_ms = 0;
};
struct Stats {
@@ -60,6 +51,7 @@ class VideoReceiveStream : public ReceiveStream {
int render_frame_rate = 0;
// Decoder stats.
+ std::string decoder_implementation_name = "unknown";
FrameCounts frame_counts;
int decode_ms = 0;
int max_decode_ms = 0;
@@ -113,6 +105,9 @@ class VideoReceiveStream : public ReceiveStream {
// See draft-alvestrand-rmcat-remb for information.
bool remb = false;
+ // See draft-holmer-rmcat-transport-wide-cc-extensions for details.
+ bool transport_cc = false;
+
// See NackConfig for description.
NackConfig nack;
diff --git a/webrtc/video_renderer.h b/webrtc/video_renderer.h
index fedd28b22e..7cb9ed1aa5 100644
--- a/webrtc/video_renderer.h
+++ b/webrtc/video_renderer.h
@@ -8,8 +8,8 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-#ifndef WEBRTC_VIDEO_ENGINE_NEW_INCLUDE_VIDEO_RENDERER_H_
-#define WEBRTC_VIDEO_ENGINE_NEW_INCLUDE_VIDEO_RENDERER_H_
+#ifndef WEBRTC_VIDEO_RENDERER_H_
+#define WEBRTC_VIDEO_RENDERER_H_
namespace webrtc {
@@ -25,9 +25,14 @@ class VideoRenderer {
virtual bool IsTextureSupported() const = 0;
+ // This function returns true if WebRTC should not delay frames for
+ // smoothness. In general, this case means the renderer can schedule frames to
+ // optimize smoothness.
+ virtual bool SmoothsRenderedFrames() const { return false; }
+
protected:
virtual ~VideoRenderer() {}
};
} // namespace webrtc
-#endif // WEBRTC_VIDEO_ENGINE_NEW_INCLUDE_VIDEO_RENDERER_H_
+#endif // WEBRTC_VIDEO_RENDERER_H_
diff --git a/webrtc/video_send_stream.h b/webrtc/video_send_stream.h
index 0c0af80ef7..83a96d3fed 100644
--- a/webrtc/video_send_stream.h
+++ b/webrtc/video_send_stream.h
@@ -62,6 +62,7 @@ class VideoSendStream : public SendStream {
};
struct Stats {
+ std::string encoder_implementation_name = "unknown";
int input_frame_rate = 0;
int encode_frame_rate = 0;
int avg_encode_time_ms = 0;
@@ -69,6 +70,7 @@ class VideoSendStream : public SendStream {
int target_media_bitrate_bps = 0;
int media_bitrate_bps = 0;
bool suspended = false;
+ bool bw_limited_resolution = false;
std::map<uint32_t, StreamStats> substreams;
};
@@ -100,6 +102,9 @@ class VideoSendStream : public SendStream {
std::vector<uint32_t> ssrcs;
+ // See RtcpMode for description.
+ RtcpMode rtcp_mode = RtcpMode::kCompound;
+
// Max RTP packet size delivered to send transport from VideoEngine.
size_t max_packet_size = kDefaultMaxPacketSize;
diff --git a/webrtc/voice_engine/BUILD.gn b/webrtc/voice_engine/BUILD.gn
index efe4781be7..82cd92355c 100644
--- a/webrtc/voice_engine/BUILD.gn
+++ b/webrtc/voice_engine/BUILD.gn
@@ -14,6 +14,8 @@ source_set("voice_engine") {
"channel.h",
"channel_manager.cc",
"channel_manager.h",
+ "channel_proxy.cc",
+ "channel_proxy.h",
"dtmf_inband.cc",
"dtmf_inband.h",
"dtmf_inband_queue.cc",
@@ -104,6 +106,7 @@ source_set("voice_engine") {
"../modules/audio_processing",
"../modules/bitrate_controller",
"../modules/media_file",
+ "../modules/pacing",
"../modules/rtp_rtcp",
"../modules/utility",
"../system_wrappers",
diff --git a/webrtc/voice_engine/channel.cc b/webrtc/voice_engine/channel.cc
index 013f908d34..b1b55e8acc 100644
--- a/webrtc/voice_engine/channel.cc
+++ b/webrtc/voice_engine/channel.cc
@@ -11,23 +11,26 @@
#include "webrtc/voice_engine/channel.h"
#include <algorithm>
+#include <utility>
#include "webrtc/base/checks.h"
#include "webrtc/base/format_macros.h"
+#include "webrtc/base/logging.h"
+#include "webrtc/base/thread_checker.h"
#include "webrtc/base/timeutils.h"
#include "webrtc/common.h"
#include "webrtc/config.h"
#include "webrtc/modules/audio_device/include/audio_device.h"
#include "webrtc/modules/audio_processing/include/audio_processing.h"
-#include "webrtc/modules/interface/module_common_types.h"
-#include "webrtc/modules/rtp_rtcp/interface/receive_statistics.h"
-#include "webrtc/modules/rtp_rtcp/interface/rtp_payload_registry.h"
-#include "webrtc/modules/rtp_rtcp/interface/rtp_receiver.h"
+#include "webrtc/modules/include/module_common_types.h"
+#include "webrtc/modules/pacing/packet_router.h"
+#include "webrtc/modules/rtp_rtcp/include/receive_statistics.h"
+#include "webrtc/modules/rtp_rtcp/include/rtp_payload_registry.h"
+#include "webrtc/modules/rtp_rtcp/include/rtp_receiver.h"
#include "webrtc/modules/rtp_rtcp/source/rtp_receiver_strategy.h"
-#include "webrtc/modules/utility/interface/audio_frame_operations.h"
-#include "webrtc/modules/utility/interface/process_thread.h"
+#include "webrtc/modules/utility/include/audio_frame_operations.h"
+#include "webrtc/modules/utility/include/process_thread.h"
#include "webrtc/system_wrappers/include/critical_section_wrapper.h"
-#include "webrtc/system_wrappers/include/logging.h"
#include "webrtc/system_wrappers/include/trace.h"
#include "webrtc/voice_engine/include/voe_base.h"
#include "webrtc/voice_engine/include/voe_external_media.h"
@@ -44,6 +47,104 @@
namespace webrtc {
namespace voe {
+class TransportFeedbackProxy : public TransportFeedbackObserver {
+ public:
+ TransportFeedbackProxy() : feedback_observer_(nullptr) {
+ pacer_thread_.DetachFromThread();
+ network_thread_.DetachFromThread();
+ }
+
+ void SetTransportFeedbackObserver(
+ TransportFeedbackObserver* feedback_observer) {
+ RTC_DCHECK(thread_checker_.CalledOnValidThread());
+ rtc::CritScope lock(&crit_);
+ feedback_observer_ = feedback_observer;
+ }
+
+ // Implements TransportFeedbackObserver.
+ void AddPacket(uint16_t sequence_number,
+ size_t length,
+ bool was_paced) override {
+ RTC_DCHECK(pacer_thread_.CalledOnValidThread());
+ rtc::CritScope lock(&crit_);
+ if (feedback_observer_)
+ feedback_observer_->AddPacket(sequence_number, length, was_paced);
+ }
+ void OnTransportFeedback(const rtcp::TransportFeedback& feedback) override {
+ RTC_DCHECK(network_thread_.CalledOnValidThread());
+ rtc::CritScope lock(&crit_);
+ if (feedback_observer_)
+ feedback_observer_->OnTransportFeedback(feedback);
+ }
+
+ private:
+ rtc::CriticalSection crit_;
+ rtc::ThreadChecker thread_checker_;
+ rtc::ThreadChecker pacer_thread_;
+ rtc::ThreadChecker network_thread_;
+ TransportFeedbackObserver* feedback_observer_ GUARDED_BY(&crit_);
+};
+
+class TransportSequenceNumberProxy : public TransportSequenceNumberAllocator {
+ public:
+ TransportSequenceNumberProxy() : seq_num_allocator_(nullptr) {
+ pacer_thread_.DetachFromThread();
+ }
+
+ void SetSequenceNumberAllocator(
+ TransportSequenceNumberAllocator* seq_num_allocator) {
+ RTC_DCHECK(thread_checker_.CalledOnValidThread());
+ rtc::CritScope lock(&crit_);
+ seq_num_allocator_ = seq_num_allocator;
+ }
+
+ // Implements TransportSequenceNumberAllocator.
+ uint16_t AllocateSequenceNumber() override {
+ RTC_DCHECK(pacer_thread_.CalledOnValidThread());
+ rtc::CritScope lock(&crit_);
+ if (!seq_num_allocator_)
+ return 0;
+ return seq_num_allocator_->AllocateSequenceNumber();
+ }
+
+ private:
+ rtc::CriticalSection crit_;
+ rtc::ThreadChecker thread_checker_;
+ rtc::ThreadChecker pacer_thread_;
+ TransportSequenceNumberAllocator* seq_num_allocator_ GUARDED_BY(&crit_);
+};
+
+class RtpPacketSenderProxy : public RtpPacketSender {
+ public:
+ RtpPacketSenderProxy() : rtp_packet_sender_(nullptr) {
+ }
+
+ void SetPacketSender(RtpPacketSender* rtp_packet_sender) {
+ RTC_DCHECK(thread_checker_.CalledOnValidThread());
+ rtc::CritScope lock(&crit_);
+ rtp_packet_sender_ = rtp_packet_sender;
+ }
+
+ // Implements RtpPacketSender.
+ void InsertPacket(Priority priority,
+ uint32_t ssrc,
+ uint16_t sequence_number,
+ int64_t capture_time_ms,
+ size_t bytes,
+ bool retransmission) override {
+ rtc::CritScope lock(&crit_);
+ if (rtp_packet_sender_) {
+ rtp_packet_sender_->InsertPacket(priority, ssrc, sequence_number,
+ capture_time_ms, bytes, retransmission);
+ }
+ }
+
+ private:
+ rtc::ThreadChecker thread_checker_;
+ rtc::CriticalSection crit_;
+ RtpPacketSender* rtp_packet_sender_ GUARDED_BY(&crit_);
+};
+
// Extend the default RTCP statistics struct with max_jitter, defined as the
// maximum jitter value seen in an RTCP report block.
struct ChannelStatistics : public RtcpStatistics {
@@ -318,11 +419,11 @@ int32_t Channel::OnInitializeDecoder(
int8_t payloadType,
const char payloadName[RTP_PAYLOAD_NAME_SIZE],
int frequency,
- uint8_t channels,
+ size_t channels,
uint32_t rate) {
WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
"Channel::OnInitializeDecoder(payloadType=%d, "
- "payloadName=%s, frequency=%u, channels=%u, rate=%u)",
+ "payloadName=%s, frequency=%u, channels=%" PRIuS ", rate=%u)",
payloadType, payloadName, frequency, channels, rate);
CodecInst receiveCodec = {0};
@@ -358,7 +459,7 @@ Channel::OnReceivedPayloadData(const uint8_t* payloadData,
{
WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
"Channel::OnReceivedPayloadData(payloadSize=%" PRIuS ","
- " payloadType=%u, audioChannel=%u)",
+ " payloadType=%u, audioChannel=%" PRIuS ")",
payloadSize,
rtpHeader->header.payloadType,
rtpHeader->type.Audio.channel);
@@ -460,6 +561,21 @@ int32_t Channel::GetAudioFrame(int32_t id, AudioFrame* audioFrame)
}
}
+ {
+ // Pass the audio buffers to an optional sink callback, before applying
+ // scaling/panning, as that applies to the mix operation.
+ // External recipients of the audio (e.g. via AudioTrack), will do their
+ // own mixing/dynamic processing.
+ CriticalSectionScoped cs(&_callbackCritSect);
+ if (audio_sink_) {
+ AudioSinkInterface::Data data(
+ &audioFrame->data_[0],
+ audioFrame->samples_per_channel_, audioFrame->sample_rate_hz_,
+ audioFrame->num_channels_, audioFrame->timestamp_);
+ audio_sink_->OnData(data);
+ }
+ }
+
float output_gain = 1.0f;
float left_pan = 1.0f;
float right_pan = 1.0f;
@@ -508,13 +624,10 @@ int32_t Channel::GetAudioFrame(int32_t id, AudioFrame* audioFrame)
const bool isStereo = (audioFrame->num_channels_ == 2);
if (_outputExternalMediaCallbackPtr)
{
- _outputExternalMediaCallbackPtr->Process(
- _channelId,
- kPlaybackPerChannel,
- (int16_t*)audioFrame->data_,
- audioFrame->samples_per_channel_,
- audioFrame->sample_rate_hz_,
- isStereo);
+ _outputExternalMediaCallbackPtr->Process(
+ _channelId, kPlaybackPerChannel, (int16_t*)audioFrame->data_,
+ audioFrame->samples_per_channel_, audioFrame->sample_rate_hz_,
+ isStereo);
}
}
@@ -690,89 +803,97 @@ Channel::Channel(int32_t channelId,
uint32_t instanceId,
RtcEventLog* const event_log,
const Config& config)
- : _fileCritSect(*CriticalSectionWrapper::CreateCriticalSection()),
- _callbackCritSect(*CriticalSectionWrapper::CreateCriticalSection()),
- volume_settings_critsect_(*CriticalSectionWrapper::CreateCriticalSection()),
- _instanceId(instanceId),
- _channelId(channelId),
- event_log_(event_log),
- rtp_header_parser_(RtpHeaderParser::Create()),
- rtp_payload_registry_(
- new RTPPayloadRegistry(RTPPayloadStrategy::CreateStrategy(true))),
- rtp_receive_statistics_(
- ReceiveStatistics::Create(Clock::GetRealTimeClock())),
- rtp_receiver_(
- RtpReceiver::CreateAudioReceiver(Clock::GetRealTimeClock(),
- this,
- this,
- this,
- rtp_payload_registry_.get())),
- telephone_event_handler_(rtp_receiver_->GetTelephoneEventHandler()),
- _outputAudioLevel(),
- _externalTransport(false),
- _inputFilePlayerPtr(NULL),
- _outputFilePlayerPtr(NULL),
- _outputFileRecorderPtr(NULL),
- // Avoid conflict with other channels by adding 1024 - 1026,
- // won't use as much as 1024 channels.
- _inputFilePlayerId(VoEModuleId(instanceId, channelId) + 1024),
- _outputFilePlayerId(VoEModuleId(instanceId, channelId) + 1025),
- _outputFileRecorderId(VoEModuleId(instanceId, channelId) + 1026),
- _outputFileRecording(false),
- _inbandDtmfQueue(VoEModuleId(instanceId, channelId)),
- _inbandDtmfGenerator(VoEModuleId(instanceId, channelId)),
- _outputExternalMedia(false),
- _inputExternalMediaCallbackPtr(NULL),
- _outputExternalMediaCallbackPtr(NULL),
- _timeStamp(0), // This is just an offset, RTP module will add it's own
- // random offset
- _sendTelephoneEventPayloadType(106),
- ntp_estimator_(Clock::GetRealTimeClock()),
- jitter_buffer_playout_timestamp_(0),
- playout_timestamp_rtp_(0),
- playout_timestamp_rtcp_(0),
- playout_delay_ms_(0),
- _numberOfDiscardedPackets(0),
- send_sequence_number_(0),
- ts_stats_lock_(CriticalSectionWrapper::CreateCriticalSection()),
- rtp_ts_wraparound_handler_(new rtc::TimestampWrapAroundHandler()),
- capture_start_rtp_time_stamp_(-1),
- capture_start_ntp_time_ms_(-1),
- _engineStatisticsPtr(NULL),
- _outputMixerPtr(NULL),
- _transmitMixerPtr(NULL),
- _moduleProcessThreadPtr(NULL),
- _audioDeviceModulePtr(NULL),
- _voiceEngineObserverPtr(NULL),
- _callbackCritSectPtr(NULL),
- _transportPtr(NULL),
- _rxVadObserverPtr(NULL),
- _oldVadDecision(-1),
- _sendFrameType(0),
- _externalMixing(false),
- _mixFileWithMicrophone(false),
- _mute(false),
- _panLeft(1.0f),
- _panRight(1.0f),
- _outputGain(1.0f),
- _playOutbandDtmfEvent(false),
- _playInbandDtmfEvent(false),
- _lastLocalTimeStamp(0),
- _lastPayloadType(0),
- _includeAudioLevelIndication(false),
- _outputSpeechType(AudioFrame::kNormalSpeech),
- video_sync_lock_(CriticalSectionWrapper::CreateCriticalSection()),
- _average_jitter_buffer_delay_us(0),
- _previousTimestamp(0),
- _recPacketDelayMs(20),
- _RxVadDetection(false),
- _rxAgcIsEnabled(false),
- _rxNsIsEnabled(false),
- restored_packet_in_use_(false),
- rtcp_observer_(new VoERtcpObserver(this)),
- network_predictor_(new NetworkPredictor(Clock::GetRealTimeClock())),
- assoc_send_channel_lock_(CriticalSectionWrapper::CreateCriticalSection()),
- associate_send_channel_(ChannelOwner(nullptr)) {
+ : _fileCritSect(*CriticalSectionWrapper::CreateCriticalSection()),
+ _callbackCritSect(*CriticalSectionWrapper::CreateCriticalSection()),
+ volume_settings_critsect_(
+ *CriticalSectionWrapper::CreateCriticalSection()),
+ _instanceId(instanceId),
+ _channelId(channelId),
+ event_log_(event_log),
+ rtp_header_parser_(RtpHeaderParser::Create()),
+ rtp_payload_registry_(
+ new RTPPayloadRegistry(RTPPayloadStrategy::CreateStrategy(true))),
+ rtp_receive_statistics_(
+ ReceiveStatistics::Create(Clock::GetRealTimeClock())),
+ rtp_receiver_(
+ RtpReceiver::CreateAudioReceiver(Clock::GetRealTimeClock(),
+ this,
+ this,
+ this,
+ rtp_payload_registry_.get())),
+ telephone_event_handler_(rtp_receiver_->GetTelephoneEventHandler()),
+ _outputAudioLevel(),
+ _externalTransport(false),
+ _inputFilePlayerPtr(NULL),
+ _outputFilePlayerPtr(NULL),
+ _outputFileRecorderPtr(NULL),
+ // Avoid conflict with other channels by adding 1024 - 1026,
+ // won't use as much as 1024 channels.
+ _inputFilePlayerId(VoEModuleId(instanceId, channelId) + 1024),
+ _outputFilePlayerId(VoEModuleId(instanceId, channelId) + 1025),
+ _outputFileRecorderId(VoEModuleId(instanceId, channelId) + 1026),
+ _outputFileRecording(false),
+ _inbandDtmfQueue(VoEModuleId(instanceId, channelId)),
+ _inbandDtmfGenerator(VoEModuleId(instanceId, channelId)),
+ _outputExternalMedia(false),
+ _inputExternalMediaCallbackPtr(NULL),
+ _outputExternalMediaCallbackPtr(NULL),
+ _timeStamp(0), // This is just an offset, RTP module will add it's own
+ // random offset
+ _sendTelephoneEventPayloadType(106),
+ ntp_estimator_(Clock::GetRealTimeClock()),
+ jitter_buffer_playout_timestamp_(0),
+ playout_timestamp_rtp_(0),
+ playout_timestamp_rtcp_(0),
+ playout_delay_ms_(0),
+ _numberOfDiscardedPackets(0),
+ send_sequence_number_(0),
+ ts_stats_lock_(CriticalSectionWrapper::CreateCriticalSection()),
+ rtp_ts_wraparound_handler_(new rtc::TimestampWrapAroundHandler()),
+ capture_start_rtp_time_stamp_(-1),
+ capture_start_ntp_time_ms_(-1),
+ _engineStatisticsPtr(NULL),
+ _outputMixerPtr(NULL),
+ _transmitMixerPtr(NULL),
+ _moduleProcessThreadPtr(NULL),
+ _audioDeviceModulePtr(NULL),
+ _voiceEngineObserverPtr(NULL),
+ _callbackCritSectPtr(NULL),
+ _transportPtr(NULL),
+ _rxVadObserverPtr(NULL),
+ _oldVadDecision(-1),
+ _sendFrameType(0),
+ _externalMixing(false),
+ _mixFileWithMicrophone(false),
+ _mute(false),
+ _panLeft(1.0f),
+ _panRight(1.0f),
+ _outputGain(1.0f),
+ _playOutbandDtmfEvent(false),
+ _playInbandDtmfEvent(false),
+ _lastLocalTimeStamp(0),
+ _lastPayloadType(0),
+ _includeAudioLevelIndication(false),
+ _outputSpeechType(AudioFrame::kNormalSpeech),
+ video_sync_lock_(CriticalSectionWrapper::CreateCriticalSection()),
+ _average_jitter_buffer_delay_us(0),
+ _previousTimestamp(0),
+ _recPacketDelayMs(20),
+ _RxVadDetection(false),
+ _rxAgcIsEnabled(false),
+ _rxNsIsEnabled(false),
+ restored_packet_in_use_(false),
+ rtcp_observer_(new VoERtcpObserver(this)),
+ network_predictor_(new NetworkPredictor(Clock::GetRealTimeClock())),
+ assoc_send_channel_lock_(CriticalSectionWrapper::CreateCriticalSection()),
+ associate_send_channel_(ChannelOwner(nullptr)),
+ pacing_enabled_(config.Get<VoicePacing>().enabled),
+ feedback_observer_proxy_(pacing_enabled_ ? new TransportFeedbackProxy()
+ : nullptr),
+ seq_num_allocator_proxy_(
+ pacing_enabled_ ? new TransportSequenceNumberProxy() : nullptr),
+ rtp_packet_sender_proxy_(pacing_enabled_ ? new RtpPacketSenderProxy()
+ : nullptr) {
WEBRTC_TRACE(kTraceMemory, kTraceVoice, VoEId(_instanceId,_channelId),
"Channel::Channel() - ctor");
AudioCodingModule::Config acm_config;
@@ -797,6 +918,10 @@ Channel::Channel(int32_t channelId,
configuration.audio_messages = this;
configuration.receive_statistics = rtp_receive_statistics_.get();
configuration.bandwidth_callback = rtcp_observer_.get();
+ configuration.paced_sender = rtp_packet_sender_proxy_.get();
+ configuration.transport_sequence_number_allocator =
+ seq_num_allocator_proxy_.get();
+ configuration.transport_feedback_callback = feedback_observer_proxy_.get();
_rtpRtcpModule.reset(RtpRtcp::CreateRtpRtcp(configuration));
@@ -954,8 +1079,8 @@ Channel::Init()
{
WEBRTC_TRACE(kTraceWarning, kTraceVoice,
VoEId(_instanceId,_channelId),
- "Channel::Init() unable to register %s (%d/%d/%d/%d) "
- "to RTP/RTCP receiver",
+ "Channel::Init() unable to register %s "
+ "(%d/%d/%" PRIuS "/%d) to RTP/RTCP receiver",
codec.plname, codec.pltype, codec.plfreq,
codec.channels, codec.rate);
}
@@ -963,8 +1088,8 @@ Channel::Init()
{
WEBRTC_TRACE(kTraceInfo, kTraceVoice,
VoEId(_instanceId,_channelId),
- "Channel::Init() %s (%d/%d/%d/%d) has been added to "
- "the RTP/RTCP receiver",
+ "Channel::Init() %s (%d/%d/%" PRIuS "/%d) has been "
+ "added to the RTP/RTCP receiver",
codec.plname, codec.pltype, codec.plfreq,
codec.channels, codec.rate);
}
@@ -1020,11 +1145,11 @@ Channel::Init()
}
if (rx_audioproc_->noise_suppression()->set_level(kDefaultNsMode) != 0) {
- LOG_FERR1(LS_ERROR, noise_suppression()->set_level, kDefaultNsMode);
+ LOG(LS_ERROR) << "noise_suppression()->set_level(kDefaultNsMode) failed.";
return -1;
}
if (rx_audioproc_->gain_control()->set_mode(kDefaultRxAgcMode) != 0) {
- LOG_FERR1(LS_ERROR, gain_control()->set_mode, kDefaultRxAgcMode);
+ LOG(LS_ERROR) << "gain_control()->set_mode(kDefaultRxAgcMode) failed.";
return -1;
}
@@ -1060,6 +1185,11 @@ Channel::UpdateLocalTimeStamp()
return 0;
}
+void Channel::SetSink(rtc::scoped_ptr<AudioSinkInterface> sink) {
+ CriticalSectionScoped cs(&_callbackCritSect);
+ audio_sink_ = std::move(sink);
+}
+
int32_t
Channel::StartPlayout()
{
@@ -1242,7 +1372,12 @@ Channel::DeRegisterVoiceEngineObserver()
int32_t
Channel::GetSendCodec(CodecInst& codec)
{
- return (audio_coding_->SendCodec(&codec));
+ auto send_codec = audio_coding_->SendCodec();
+ if (send_codec) {
+ codec = *send_codec;
+ return 0;
+ }
+ return -1;
}
int32_t
@@ -1456,7 +1591,7 @@ Channel::SetSendCNPayloadType(int type, PayloadFrequencies frequency)
CodecInst codec;
int32_t samplingFreqHz(-1);
- const int kMono = 1;
+ const size_t kMono = 1;
if (frequency == kFreq32000Hz)
samplingFreqHz = 32000;
else if (frequency == kFreq16000Hz)
@@ -2367,6 +2502,9 @@ int Channel::SendTelephoneEventOutband(unsigned char eventCode,
WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
"Channel::SendTelephoneEventOutband(..., playDtmfEvent=%d)",
playDtmfEvent);
+ if (!Sending()) {
+ return -1;
+ }
_playOutbandDtmfEvent = playDtmfEvent;
@@ -2779,6 +2917,38 @@ int Channel::SetReceiveAbsoluteSenderTimeStatus(bool enable, unsigned char id) {
return 0;
}
+void Channel::EnableSendTransportSequenceNumber(int id) {
+ int ret =
+ SetSendRtpHeaderExtension(true, kRtpExtensionTransportSequenceNumber, id);
+ RTC_DCHECK_EQ(0, ret);
+}
+
+void Channel::SetCongestionControlObjects(
+ RtpPacketSender* rtp_packet_sender,
+ TransportFeedbackObserver* transport_feedback_observer,
+ PacketRouter* packet_router) {
+ RTC_DCHECK(packet_router != nullptr || packet_router_ != nullptr);
+ if (transport_feedback_observer) {
+ RTC_DCHECK(feedback_observer_proxy_.get());
+ feedback_observer_proxy_->SetTransportFeedbackObserver(
+ transport_feedback_observer);
+ }
+ if (rtp_packet_sender) {
+ RTC_DCHECK(rtp_packet_sender_proxy_.get());
+ rtp_packet_sender_proxy_->SetPacketSender(rtp_packet_sender);
+ }
+ if (seq_num_allocator_proxy_.get()) {
+ seq_num_allocator_proxy_->SetSequenceNumberAllocator(packet_router);
+ }
+ _rtpRtcpModule->SetStorePacketsStatus(rtp_packet_sender != nullptr, 600);
+ if (packet_router != nullptr) {
+ packet_router->AddRtpModule(_rtpRtcpModule.get());
+ } else {
+ packet_router_->RemoveRtpModule(_rtpRtcpModule.get());
+ }
+ packet_router_ = packet_router;
+}
+
void Channel::SetRTCPStatus(bool enable) {
WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
"Channel::SetRTCPStatus()");
@@ -3122,9 +3292,8 @@ Channel::GetREDStatus(bool& enabled, int& redPayloadtype)
enabled = audio_coding_->REDStatus();
if (enabled)
{
- int8_t payloadType(0);
- if (_rtpRtcpModule->SendREDPayloadType(payloadType) != 0)
- {
+ int8_t payloadType = 0;
+ if (_rtpRtcpModule->SendREDPayloadType(&payloadType) != 0) {
_engineStatisticsPtr->SetLastError(
VE_RTP_RTCP_MODULE_ERROR, kTraceError,
"GetREDStatus() failed to retrieve RED PT from RTP/RTCP "
@@ -3157,7 +3326,9 @@ bool Channel::GetCodecFECStatus() {
void Channel::SetNACKStatus(bool enable, int maxNumberOfPackets) {
// None of these functions can fail.
- _rtpRtcpModule->SetStorePacketsStatus(enable, maxNumberOfPackets);
+ // If pacing is enabled we always store packets.
+ if (!pacing_enabled_)
+ _rtpRtcpModule->SetStorePacketsStatus(enable, maxNumberOfPackets);
rtp_receive_statistics_->SetMaxReorderingThreshold(maxNumberOfPackets);
rtp_receiver_->SetNACKStatus(enable ? kNackRtcp : kNackOff);
if (enable)
@@ -3184,7 +3355,7 @@ Channel::Demultiplex(const AudioFrame& audioFrame)
void Channel::Demultiplex(const int16_t* audio_data,
int sample_rate,
size_t number_of_frames,
- int number_of_channels) {
+ size_t number_of_channels) {
CodecInst codec;
GetSendCodec(codec);
@@ -3410,33 +3581,17 @@ bool Channel::GetDelayEstimate(int* jitter_buffer_delay_ms,
return true;
}
-int Channel::LeastRequiredDelayMs() const {
- return audio_coding_->LeastRequiredDelayMs();
+uint32_t Channel::GetDelayEstimate() const {
+ int jitter_buffer_delay_ms = 0;
+ int playout_buffer_delay_ms = 0;
+ GetDelayEstimate(&jitter_buffer_delay_ms, &playout_buffer_delay_ms);
+ return jitter_buffer_delay_ms + playout_buffer_delay_ms;
}
-int Channel::SetInitialPlayoutDelay(int delay_ms)
-{
- WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
- "Channel::SetInitialPlayoutDelay()");
- if ((delay_ms < kVoiceEngineMinMinPlayoutDelayMs) ||
- (delay_ms > kVoiceEngineMaxMinPlayoutDelayMs))
- {
- _engineStatisticsPtr->SetLastError(
- VE_INVALID_ARGUMENT, kTraceError,
- "SetInitialPlayoutDelay() invalid min delay");
- return -1;
- }
- if (audio_coding_->SetInitialPlayoutDelay(delay_ms) != 0)
- {
- _engineStatisticsPtr->SetLastError(
- VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
- "SetInitialPlayoutDelay() failed to set min playout delay");
- return -1;
- }
- return 0;
+int Channel::LeastRequiredDelayMs() const {
+ return audio_coding_->LeastRequiredDelayMs();
}
-
int
Channel::SetMinimumPlayoutDelay(int delayMs)
{
@@ -3687,7 +3842,7 @@ Channel::InsertInbandDtmfTone()
sample < _audioFrame.samples_per_channel_;
sample++)
{
- for (int channel = 0;
+ for (size_t channel = 0;
channel < _audioFrame.num_channels_;
channel++)
{
@@ -3821,7 +3976,8 @@ Channel::RegisterReceiveCodecsToRTPModule()
kTraceVoice,
VoEId(_instanceId, _channelId),
"Channel::RegisterReceiveCodecsToRTPModule() unable"
- " to register %s (%d/%d/%d/%d) to RTP/RTCP receiver",
+ " to register %s (%d/%d/%" PRIuS "/%d) to RTP/RTCP "
+ "receiver",
codec.plname, codec.pltype, codec.plfreq,
codec.channels, codec.rate);
}
@@ -3831,7 +3987,7 @@ Channel::RegisterReceiveCodecsToRTPModule()
kTraceVoice,
VoEId(_instanceId, _channelId),
"Channel::RegisterReceiveCodecsToRTPModule() %s "
- "(%d/%d/%d/%d) has been added to the RTP/RTCP "
+ "(%d/%d/%" PRIuS "/%d) has been added to the RTP/RTCP "
"receiver",
codec.plname, codec.pltype, codec.plfreq,
codec.channels, codec.rate);
diff --git a/webrtc/voice_engine/channel.h b/webrtc/voice_engine/channel.h
index 46e67502d8..d15f9dbca7 100644
--- a/webrtc/voice_engine/channel.h
+++ b/webrtc/voice_engine/channel.h
@@ -11,18 +11,19 @@
#ifndef WEBRTC_VOICE_ENGINE_CHANNEL_H_
#define WEBRTC_VOICE_ENGINE_CHANNEL_H_
+#include "webrtc/audio/audio_sink.h"
+#include "webrtc/base/criticalsection.h"
#include "webrtc/base/scoped_ptr.h"
#include "webrtc/common_audio/resampler/include/push_resampler.h"
#include "webrtc/common_types.h"
-#include "webrtc/modules/audio_coding/main/include/audio_coding_module.h"
-#include "webrtc/modules/audio_conference_mixer/interface/audio_conference_mixer_defines.h"
+#include "webrtc/modules/audio_coding/include/audio_coding_module.h"
+#include "webrtc/modules/audio_conference_mixer/include/audio_conference_mixer_defines.h"
#include "webrtc/modules/audio_processing/rms_level.h"
-#include "webrtc/modules/bitrate_controller/include/bitrate_controller.h"
-#include "webrtc/modules/rtp_rtcp/interface/remote_ntp_time_estimator.h"
-#include "webrtc/modules/rtp_rtcp/interface/rtp_header_parser.h"
-#include "webrtc/modules/rtp_rtcp/interface/rtp_rtcp.h"
-#include "webrtc/modules/utility/interface/file_player.h"
-#include "webrtc/modules/utility/interface/file_recorder.h"
+#include "webrtc/modules/rtp_rtcp/include/remote_ntp_time_estimator.h"
+#include "webrtc/modules/rtp_rtcp/include/rtp_header_parser.h"
+#include "webrtc/modules/rtp_rtcp/include/rtp_rtcp.h"
+#include "webrtc/modules/utility/include/file_player.h"
+#include "webrtc/modules/utility/include/file_recorder.h"
#include "webrtc/voice_engine/dtmf_inband.h"
#include "webrtc/voice_engine/dtmf_inband_queue.h"
#include "webrtc/voice_engine/include/voe_audio_processing.h"
@@ -48,6 +49,7 @@ class AudioDeviceModule;
class Config;
class CriticalSectionWrapper;
class FileWrapper;
+class PacketRouter;
class ProcessThread;
class ReceiveStatistics;
class RemoteNtpTimeEstimator;
@@ -68,9 +70,12 @@ struct SenderInfo;
namespace voe {
class OutputMixer;
+class RtpPacketSenderProxy;
class Statistics;
class StatisticsProxy;
+class TransportFeedbackProxy;
class TransmitMixer;
+class TransportSequenceNumberProxy;
class VoERtcpObserver;
// Helper class to simplify locking scheme for members that are accessed from
@@ -188,6 +193,8 @@ public:
CriticalSectionWrapper* callbackCritSect);
int32_t UpdateLocalTimeStamp();
+ void SetSink(rtc::scoped_ptr<AudioSinkInterface> sink);
+
// API methods
// VoEBase
@@ -279,8 +286,8 @@ public:
// VoEVideoSync
bool GetDelayEstimate(int* jitter_buffer_delay_ms,
int* playout_buffer_delay_ms) const;
+ uint32_t GetDelayEstimate() const;
int LeastRequiredDelayMs() const;
- int SetInitialPlayoutDelay(int delay_ms);
int SetMinimumPlayoutDelay(int delayMs);
int GetPlayoutTimestamp(unsigned int& timestamp);
int SetInitTimestamp(unsigned int timestamp);
@@ -321,6 +328,13 @@ public:
int SetReceiveAudioLevelIndicationStatus(bool enable, unsigned char id);
int SetSendAbsoluteSenderTimeStatus(bool enable, unsigned char id);
int SetReceiveAbsoluteSenderTimeStatus(bool enable, unsigned char id);
+ void EnableSendTransportSequenceNumber(int id);
+
+ void SetCongestionControlObjects(
+ RtpPacketSender* rtp_packet_sender,
+ TransportFeedbackObserver* transport_feedback_observer,
+ PacketRouter* packet_router);
+
void SetRTCPStatus(bool enable);
int GetRTCPStatus(bool& enabled);
int SetRTCP_CNAME(const char cName[256]);
@@ -367,7 +381,7 @@ public:
int32_t OnInitializeDecoder(int8_t payloadType,
const char payloadName[RTP_PAYLOAD_NAME_SIZE],
int frequency,
- uint8_t channels,
+ size_t channels,
uint32_t rate) override;
void OnIncomingSSRCChanged(uint32_t ssrc) override;
void OnIncomingCSRCChanged(uint32_t CSRC, bool added) override;
@@ -437,7 +451,7 @@ public:
void Demultiplex(const int16_t* audio_data,
int sample_rate,
size_t number_of_frames,
- int number_of_channels);
+ size_t number_of_channels);
uint32_t PrepareEncodeAndSend(int mixingFrequency);
uint32_t EncodeAndSend();
@@ -497,6 +511,7 @@ private:
TelephoneEventHandler* telephone_event_handler_;
rtc::scoped_ptr<RtpRtcp> _rtpRtcpModule;
rtc::scoped_ptr<AudioCodingModule> audio_coding_;
+ rtc::scoped_ptr<AudioSinkInterface> audio_sink_;
AudioLevel _outputAudioLevel;
bool _externalTransport;
AudioFrame _audioFrame;
@@ -584,6 +599,12 @@ private:
// An associated send channel.
rtc::scoped_ptr<CriticalSectionWrapper> assoc_send_channel_lock_;
ChannelOwner associate_send_channel_ GUARDED_BY(assoc_send_channel_lock_);
+
+ bool pacing_enabled_;
+ PacketRouter* packet_router_ = nullptr;
+ rtc::scoped_ptr<TransportFeedbackProxy> feedback_observer_proxy_;
+ rtc::scoped_ptr<TransportSequenceNumberProxy> seq_num_allocator_proxy_;
+ rtc::scoped_ptr<RtpPacketSenderProxy> rtp_packet_sender_proxy_;
};
} // namespace voe
diff --git a/webrtc/voice_engine/channel_proxy.cc b/webrtc/voice_engine/channel_proxy.cc
new file mode 100644
index 0000000000..f54c81ec47
--- /dev/null
+++ b/webrtc/voice_engine/channel_proxy.cc
@@ -0,0 +1,153 @@
+/*
+ * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/voice_engine/channel_proxy.h"
+
+#include <utility>
+
+#include "webrtc/audio/audio_sink.h"
+#include "webrtc/base/checks.h"
+#include "webrtc/voice_engine/channel.h"
+
+namespace webrtc {
+namespace voe {
+ChannelProxy::ChannelProxy() : channel_owner_(nullptr) {}
+
+ChannelProxy::ChannelProxy(const ChannelOwner& channel_owner) :
+ channel_owner_(channel_owner) {
+ RTC_CHECK(channel_owner_.channel());
+}
+
+ChannelProxy::~ChannelProxy() {}
+
+void ChannelProxy::SetRTCPStatus(bool enable) {
+ channel()->SetRTCPStatus(enable);
+}
+
+void ChannelProxy::SetLocalSSRC(uint32_t ssrc) {
+ RTC_DCHECK(thread_checker_.CalledOnValidThread());
+ int error = channel()->SetLocalSSRC(ssrc);
+ RTC_DCHECK_EQ(0, error);
+}
+
+void ChannelProxy::SetRTCP_CNAME(const std::string& c_name) {
+ RTC_DCHECK(thread_checker_.CalledOnValidThread());
+ // Note: VoERTP_RTCP::SetRTCP_CNAME() accepts a char[256] array.
+ std::string c_name_limited = c_name.substr(0, 255);
+ int error = channel()->SetRTCP_CNAME(c_name_limited.c_str());
+ RTC_DCHECK_EQ(0, error);
+}
+
+void ChannelProxy::SetSendAbsoluteSenderTimeStatus(bool enable, int id) {
+ RTC_DCHECK(thread_checker_.CalledOnValidThread());
+ int error = channel()->SetSendAbsoluteSenderTimeStatus(enable, id);
+ RTC_DCHECK_EQ(0, error);
+}
+
+void ChannelProxy::SetSendAudioLevelIndicationStatus(bool enable, int id) {
+ RTC_DCHECK(thread_checker_.CalledOnValidThread());
+ int error = channel()->SetSendAudioLevelIndicationStatus(enable, id);
+ RTC_DCHECK_EQ(0, error);
+}
+
+void ChannelProxy::EnableSendTransportSequenceNumber(int id) {
+ RTC_DCHECK(thread_checker_.CalledOnValidThread());
+ channel()->EnableSendTransportSequenceNumber(id);
+}
+
+void ChannelProxy::SetReceiveAbsoluteSenderTimeStatus(bool enable, int id) {
+ RTC_DCHECK(thread_checker_.CalledOnValidThread());
+ int error = channel()->SetReceiveAbsoluteSenderTimeStatus(enable, id);
+ RTC_DCHECK_EQ(0, error);
+}
+
+void ChannelProxy::SetReceiveAudioLevelIndicationStatus(bool enable, int id) {
+ RTC_DCHECK(thread_checker_.CalledOnValidThread());
+ int error = channel()->SetReceiveAudioLevelIndicationStatus(enable, id);
+ RTC_DCHECK_EQ(0, error);
+}
+
+void ChannelProxy::SetCongestionControlObjects(
+ RtpPacketSender* rtp_packet_sender,
+ TransportFeedbackObserver* transport_feedback_observer,
+ PacketRouter* packet_router) {
+ RTC_DCHECK(thread_checker_.CalledOnValidThread());
+ channel()->SetCongestionControlObjects(
+ rtp_packet_sender, transport_feedback_observer, packet_router);
+}
+
+CallStatistics ChannelProxy::GetRTCPStatistics() const {
+ RTC_DCHECK(thread_checker_.CalledOnValidThread());
+ CallStatistics stats = {0};
+ int error = channel()->GetRTPStatistics(stats);
+ RTC_DCHECK_EQ(0, error);
+ return stats;
+}
+
+std::vector<ReportBlock> ChannelProxy::GetRemoteRTCPReportBlocks() const {
+ RTC_DCHECK(thread_checker_.CalledOnValidThread());
+ std::vector<webrtc::ReportBlock> blocks;
+ int error = channel()->GetRemoteRTCPReportBlocks(&blocks);
+ RTC_DCHECK_EQ(0, error);
+ return blocks;
+}
+
+NetworkStatistics ChannelProxy::GetNetworkStatistics() const {
+ RTC_DCHECK(thread_checker_.CalledOnValidThread());
+ NetworkStatistics stats = {0};
+ int error = channel()->GetNetworkStatistics(stats);
+ RTC_DCHECK_EQ(0, error);
+ return stats;
+}
+
+AudioDecodingCallStats ChannelProxy::GetDecodingCallStatistics() const {
+ RTC_DCHECK(thread_checker_.CalledOnValidThread());
+ AudioDecodingCallStats stats;
+ channel()->GetDecodingCallStatistics(&stats);
+ return stats;
+}
+
+int32_t ChannelProxy::GetSpeechOutputLevelFullRange() const {
+ RTC_DCHECK(thread_checker_.CalledOnValidThread());
+ uint32_t level = 0;
+ int error = channel()->GetSpeechOutputLevelFullRange(level);
+ RTC_DCHECK_EQ(0, error);
+ return static_cast<int32_t>(level);
+}
+
+uint32_t ChannelProxy::GetDelayEstimate() const {
+ RTC_DCHECK(thread_checker_.CalledOnValidThread());
+ return channel()->GetDelayEstimate();
+}
+
+bool ChannelProxy::SetSendTelephoneEventPayloadType(int payload_type) {
+ RTC_DCHECK(thread_checker_.CalledOnValidThread());
+ return channel()->SetSendTelephoneEventPayloadType(payload_type) == 0;
+}
+
+bool ChannelProxy::SendTelephoneEventOutband(uint8_t event,
+ uint32_t duration_ms) {
+ RTC_DCHECK(thread_checker_.CalledOnValidThread());
+ return
+ channel()->SendTelephoneEventOutband(event, duration_ms, 10, false) == 0;
+}
+
+void ChannelProxy::SetSink(rtc::scoped_ptr<AudioSinkInterface> sink) {
+ RTC_DCHECK(thread_checker_.CalledOnValidThread());
+ channel()->SetSink(std::move(sink));
+}
+
+Channel* ChannelProxy::channel() const {
+ RTC_DCHECK(channel_owner_.channel());
+ return channel_owner_.channel();
+}
+
+} // namespace voe
+} // namespace webrtc
diff --git a/webrtc/voice_engine/channel_proxy.h b/webrtc/voice_engine/channel_proxy.h
new file mode 100644
index 0000000000..b990d91734
--- /dev/null
+++ b/webrtc/voice_engine/channel_proxy.h
@@ -0,0 +1,79 @@
+/*
+ * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_VOICE_ENGINE_CHANNEL_PROXY_H_
+#define WEBRTC_VOICE_ENGINE_CHANNEL_PROXY_H_
+
+#include "webrtc/base/thread_checker.h"
+#include "webrtc/voice_engine/channel_manager.h"
+#include "webrtc/voice_engine/include/voe_rtp_rtcp.h"
+
+#include <string>
+#include <vector>
+
+namespace webrtc {
+
+class AudioSinkInterface;
+class PacketRouter;
+class RtpPacketSender;
+class TransportFeedbackObserver;
+
+namespace voe {
+
+class Channel;
+
+// This class provides the "view" of a voe::Channel that we need to implement
+// webrtc::AudioSendStream and webrtc::AudioReceiveStream. It serves two
+// purposes:
+// 1. Allow mocking just the interfaces used, instead of the entire
+// voe::Channel class.
+// 2. Provide a refined interface for the stream classes, including assumptions
+// on return values and input adaptation.
+class ChannelProxy {
+ public:
+ ChannelProxy();
+ explicit ChannelProxy(const ChannelOwner& channel_owner);
+ virtual ~ChannelProxy();
+
+ virtual void SetRTCPStatus(bool enable);
+ virtual void SetLocalSSRC(uint32_t ssrc);
+ virtual void SetRTCP_CNAME(const std::string& c_name);
+ virtual void SetSendAbsoluteSenderTimeStatus(bool enable, int id);
+ virtual void SetSendAudioLevelIndicationStatus(bool enable, int id);
+ virtual void EnableSendTransportSequenceNumber(int id);
+ virtual void SetReceiveAbsoluteSenderTimeStatus(bool enable, int id);
+ virtual void SetReceiveAudioLevelIndicationStatus(bool enable, int id);
+ virtual void SetCongestionControlObjects(
+ RtpPacketSender* rtp_packet_sender,
+ TransportFeedbackObserver* transport_feedback_observer,
+ PacketRouter* packet_router);
+
+ virtual CallStatistics GetRTCPStatistics() const;
+ virtual std::vector<ReportBlock> GetRemoteRTCPReportBlocks() const;
+ virtual NetworkStatistics GetNetworkStatistics() const;
+ virtual AudioDecodingCallStats GetDecodingCallStatistics() const;
+ virtual int32_t GetSpeechOutputLevelFullRange() const;
+ virtual uint32_t GetDelayEstimate() const;
+
+ virtual bool SetSendTelephoneEventPayloadType(int payload_type);
+ virtual bool SendTelephoneEventOutband(uint8_t event, uint32_t duration_ms);
+
+ virtual void SetSink(rtc::scoped_ptr<AudioSinkInterface> sink);
+
+ private:
+ Channel* channel() const;
+
+ rtc::ThreadChecker thread_checker_;
+ ChannelOwner channel_owner_;
+};
+} // namespace voe
+} // namespace webrtc
+
+#endif // WEBRTC_VOICE_ENGINE_CHANNEL_PROXY_H_
diff --git a/webrtc/voice_engine/include/voe_base.h b/webrtc/voice_engine/include/voe_base.h
index c82ad3dc3a..3d07fa78ff 100644
--- a/webrtc/voice_engine/include/voe_base.h
+++ b/webrtc/voice_engine/include/voe_base.h
@@ -87,6 +87,8 @@ class WEBRTC_DLLEXPORT VoiceEngine {
static int SetAndroidObjects(void* javaVM, void* context);
#endif
+ static std::string GetVersionString();
+
protected:
VoiceEngine() {}
~VoiceEngine() {}
diff --git a/webrtc/voice_engine/include/voe_rtp_rtcp.h b/webrtc/voice_engine/include/voe_rtp_rtcp.h
index 6d17501772..dd3609aa00 100644
--- a/webrtc/voice_engine/include/voe_rtp_rtcp.h
+++ b/webrtc/voice_engine/include/voe_rtp_rtcp.h
@@ -201,20 +201,6 @@ class WEBRTC_DLLEXPORT VoERTP_RTCP {
return -1;
}
- // Sets the Forward Error Correction (FEC) status on a specific |channel|.
- // TODO(minyue): Remove SetFECStatus() when SetFECStatus() is replaced by
- // SetREDStatus() in fakewebrtcvoiceengine.
- virtual int SetFECStatus(int channel, bool enable, int redPayloadtype = -1) {
- return SetREDStatus(channel, enable, redPayloadtype);
- };
-
- // Gets the FEC status on a specific |channel|.
- // TODO(minyue): Remove GetFECStatus() when GetFECStatus() is replaced by
- // GetREDStatus() in fakewebrtcvoiceengine.
- virtual int GetFECStatus(int channel, bool& enabled, int& redPayloadtype) {
- return SetREDStatus(channel, enabled, redPayloadtype);
- }
-
// This function enables Negative Acknowledgment (NACK) using RTCP,
// implemented based on RFC 4585. NACK retransmits RTP packets if lost on
// the network. This creates a lossless transport at the expense of delay.
diff --git a/webrtc/voice_engine/include/voe_video_sync.h b/webrtc/voice_engine/include/voe_video_sync.h
index 1143cefb0e..655ba63543 100644
--- a/webrtc/voice_engine/include/voe_video_sync.h
+++ b/webrtc/voice_engine/include/voe_video_sync.h
@@ -64,13 +64,6 @@ class WEBRTC_DLLEXPORT VoEVideoSync {
// computes based on inter-arrival times and its playout mode.
virtual int SetMinimumPlayoutDelay(int channel, int delay_ms) = 0;
- // Sets an initial delay for the playout jitter buffer. The playout of the
- // audio is delayed by |delay_ms| in milliseconds. Thereafter, the delay is
- // maintained, unless NetEq's internal mechanism requires a higher latency.
- // Such a latency is computed based on inter-arrival times and NetEq's
- // playout mode.
- virtual int SetInitialPlayoutDelay(int channel, int delay_ms) = 0;
-
// Gets the |jitter_buffer_delay_ms| (including the algorithmic delay), and
// the |playout_buffer_delay_ms| for a specified |channel|.
virtual int GetDelayEstimate(int channel,
diff --git a/webrtc/voice_engine/level_indicator.cc b/webrtc/voice_engine/level_indicator.cc
index 059b15f927..68a837edb9 100644
--- a/webrtc/voice_engine/level_indicator.cc
+++ b/webrtc/voice_engine/level_indicator.cc
@@ -9,7 +9,7 @@
*/
#include "webrtc/common_audio/signal_processing/include/signal_processing_library.h"
-#include "webrtc/modules/interface/module_common_types.h"
+#include "webrtc/modules/include/module_common_types.h"
#include "webrtc/system_wrappers/include/critical_section_wrapper.h"
#include "webrtc/voice_engine/level_indicator.h"
diff --git a/webrtc/voice_engine/monitor_module.h b/webrtc/voice_engine/monitor_module.h
index 42ea74d7e2..fe915b320b 100644
--- a/webrtc/voice_engine/monitor_module.h
+++ b/webrtc/voice_engine/monitor_module.h
@@ -11,7 +11,7 @@
#ifndef WEBRTC_VOICE_ENGINE_MONITOR_MODULE_H
#define WEBRTC_VOICE_ENGINE_MONITOR_MODULE_H
-#include "webrtc/modules/interface/module.h"
+#include "webrtc/modules/include/module.h"
#include "webrtc/typedefs.h"
#include "webrtc/voice_engine/voice_engine_defines.h"
diff --git a/webrtc/voice_engine/output_mixer.cc b/webrtc/voice_engine/output_mixer.cc
index 31b429c498..0dacf35eaf 100644
--- a/webrtc/voice_engine/output_mixer.cc
+++ b/webrtc/voice_engine/output_mixer.cc
@@ -10,8 +10,9 @@
#include "webrtc/voice_engine/output_mixer.h"
+#include "webrtc/base/format_macros.h"
#include "webrtc/modules/audio_processing/include/audio_processing.h"
-#include "webrtc/modules/utility/interface/audio_frame_operations.h"
+#include "webrtc/modules/utility/include/audio_frame_operations.h"
#include "webrtc/system_wrappers/include/critical_section_wrapper.h"
#include "webrtc/system_wrappers/include/file_wrapper.h"
#include "webrtc/system_wrappers/include/trace.h"
@@ -462,11 +463,12 @@ int OutputMixer::StopRecordingPlayout()
}
int OutputMixer::GetMixedAudio(int sample_rate_hz,
- int num_channels,
+ size_t num_channels,
AudioFrame* frame) {
- WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,-1),
- "OutputMixer::GetMixedAudio(sample_rate_hz=%d, num_channels=%d)",
- sample_rate_hz, num_channels);
+ WEBRTC_TRACE(
+ kTraceStream, kTraceVoice, VoEId(_instanceId,-1),
+ "OutputMixer::GetMixedAudio(sample_rate_hz=%d, num_channels=%" PRIuS ")",
+ sample_rate_hz, num_channels);
// --- Record playout if enabled
{
@@ -517,8 +519,20 @@ OutputMixer::DoOperationsOnCombinedSignal(bool feed_data_to_apm)
}
// --- Far-end Voice Quality Enhancement (AudioProcessing Module)
- if (feed_data_to_apm)
- APMProcessReverseStream();
+ if (feed_data_to_apm) {
+ // Convert from mixing to AudioProcessing sample rate, similarly to how it
+ // is done on the send side. Downmix to mono.
+ AudioFrame frame;
+ frame.num_channels_ = 1;
+ frame.sample_rate_hz_ = _audioProcessingModulePtr->input_sample_rate_hz();
+ RemixAndResample(_audioFrame, &audioproc_resampler_, &frame);
+
+ if (_audioProcessingModulePtr->AnalyzeReverseStream(&frame) != 0) {
+ WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, -1),
+ "AudioProcessingModule::AnalyzeReverseStream() => error");
+ RTC_DCHECK(false);
+ }
+ }
// --- External media processing
{
@@ -549,13 +563,6 @@ OutputMixer::DoOperationsOnCombinedSignal(bool feed_data_to_apm)
// Private methods
// ----------------------------------------------------------------------------
-void OutputMixer::APMProcessReverseStream() {
- if (_audioProcessingModulePtr->ProcessReverseStream(&_audioFrame) == -1) {
- WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId, -1),
- "AudioProcessingModule::ProcessReverseStream() => error");
- }
-}
-
int
OutputMixer::InsertInbandDtmfTone()
{
diff --git a/webrtc/voice_engine/output_mixer.h b/webrtc/voice_engine/output_mixer.h
index 71e55e4885..91387e6256 100644
--- a/webrtc/voice_engine/output_mixer.h
+++ b/webrtc/voice_engine/output_mixer.h
@@ -13,9 +13,9 @@
#include "webrtc/common_audio/resampler/include/push_resampler.h"
#include "webrtc/common_types.h"
-#include "webrtc/modules/audio_conference_mixer/interface/audio_conference_mixer.h"
-#include "webrtc/modules/audio_conference_mixer/interface/audio_conference_mixer_defines.h"
-#include "webrtc/modules/utility/interface/file_recorder.h"
+#include "webrtc/modules/audio_conference_mixer/include/audio_conference_mixer.h"
+#include "webrtc/modules/audio_conference_mixer/include/audio_conference_mixer_defines.h"
+#include "webrtc/modules/utility/include/file_recorder.h"
#include "webrtc/voice_engine/dtmf_inband.h"
#include "webrtc/voice_engine/level_indicator.h"
#include "webrtc/voice_engine/voice_engine_defines.h"
@@ -63,7 +63,7 @@ public:
int32_t SetAnonymousMixabilityStatus(MixerParticipant& participant,
bool mixable);
- int GetMixedAudio(int sample_rate_hz, int num_channels,
+ int GetMixedAudio(int sample_rate_hz, size_t num_channels,
AudioFrame* audioFrame);
// VoEVolumeControl
@@ -102,7 +102,6 @@ public:
private:
OutputMixer(uint32_t instanceId);
- void APMProcessReverseStream();
int InsertInbandDtmfTone();
// uses
diff --git a/webrtc/voice_engine/shared_data.h b/webrtc/voice_engine/shared_data.h
index 311bfa063d..9c3d4b1c57 100644
--- a/webrtc/voice_engine/shared_data.h
+++ b/webrtc/voice_engine/shared_data.h
@@ -14,7 +14,7 @@
#include "webrtc/base/scoped_ptr.h"
#include "webrtc/modules/audio_device/include/audio_device.h"
#include "webrtc/modules/audio_processing/include/audio_processing.h"
-#include "webrtc/modules/utility/interface/process_thread.h"
+#include "webrtc/modules/utility/include/process_thread.h"
#include "webrtc/voice_engine/channel_manager.h"
#include "webrtc/voice_engine/statistics.h"
#include "webrtc/voice_engine/voice_engine_defines.h"
diff --git a/webrtc/voice_engine/test/android/android_test/jni/android_test.cc b/webrtc/voice_engine/test/android/android_test/jni/android_test.cc
index ceafca9da3..766b9e7a8e 100644
--- a/webrtc/voice_engine/test/android/android_test/jni/android_test.cc
+++ b/webrtc/voice_engine/test/android/android_test/jni/android_test.cc
@@ -15,7 +15,7 @@
#include "webrtc/voice_engine/test/android/android_test/jni/org_webrtc_voiceengine_test_AndroidTest.h"
-#include "webrtc/system_wrappers/include/thread_wrapper.h"
+#include "webrtc/base/platform_thread.h"
#include "webrtc/voice_engine/include/voe_audio_processing.h"
#include "webrtc/voice_engine/include/voe_base.h"
@@ -177,7 +177,7 @@ private:
static bool Run(void* ptr);
bool Process();
private:
- rtc::scoped_ptr<ThreadWrapper> _thread;
+ rtc::PlatformThread _thread;
};
ThreadTest::~ThreadTest()
@@ -188,7 +188,7 @@ ThreadTest::~ThreadTest()
ThreadTest::ThreadTest()
{
- _thread = ThreadWrapper::CreateThread(Run, this, "ThreadTest thread");
+ _thread(Run, this, "ThreadTest thread");
}
bool ThreadTest::Run(void* ptr)
diff --git a/webrtc/voice_engine/test/auto_test/fakes/conference_transport.cc b/webrtc/voice_engine/test/auto_test/fakes/conference_transport.cc
index 28ab352608..70f68298f5 100644
--- a/webrtc/voice_engine/test/auto_test/fakes/conference_transport.cc
+++ b/webrtc/voice_engine/test/auto_test/fakes/conference_transport.cc
@@ -40,9 +40,7 @@ ConferenceTransport::ConferenceTransport()
: pq_crit_(webrtc::CriticalSectionWrapper::CreateCriticalSection()),
stream_crit_(webrtc::CriticalSectionWrapper::CreateCriticalSection()),
packet_event_(webrtc::EventWrapper::Create()),
- thread_(webrtc::ThreadWrapper::CreateThread(Run,
- this,
- "ConferenceTransport")),
+ thread_(Run, this, "ConferenceTransport"),
rtt_ms_(0),
stream_count_(0),
rtp_header_parser_(webrtc::RtpHeaderParser::Create()) {
@@ -79,8 +77,8 @@ ConferenceTransport::ConferenceTransport()
EXPECT_EQ(0, remote_network_->RegisterExternalTransport(reflector_, *this));
EXPECT_EQ(0, remote_rtp_rtcp_->SetLocalSSRC(reflector_, kReflectorSsrc));
- thread_->Start();
- thread_->SetPriority(webrtc::kHighPriority);
+ thread_.Start();
+ thread_.SetPriority(rtc::kHighPriority);
}
ConferenceTransport::~ConferenceTransport() {
@@ -93,7 +91,7 @@ ConferenceTransport::~ConferenceTransport() {
RemoveStream(stream->first);
}
- EXPECT_TRUE(thread_->Stop());
+ thread_.Stop();
remote_file_->Release();
remote_rtp_rtcp_->Release();
diff --git a/webrtc/voice_engine/test/auto_test/fakes/conference_transport.h b/webrtc/voice_engine/test/auto_test/fakes/conference_transport.h
index 7b45e6d3e1..5d105aa39e 100644
--- a/webrtc/voice_engine/test/auto_test/fakes/conference_transport.h
+++ b/webrtc/voice_engine/test/auto_test/fakes/conference_transport.h
@@ -17,12 +17,12 @@
#include "testing/gtest/include/gtest/gtest.h"
#include "webrtc/base/basictypes.h"
+#include "webrtc/base/platform_thread.h"
#include "webrtc/base/scoped_ptr.h"
#include "webrtc/common_types.h"
-#include "webrtc/modules/rtp_rtcp/interface/rtp_header_parser.h"
+#include "webrtc/modules/rtp_rtcp/include/rtp_header_parser.h"
#include "webrtc/system_wrappers/include/critical_section_wrapper.h"
#include "webrtc/system_wrappers/include/event_wrapper.h"
-#include "webrtc/system_wrappers/include/thread_wrapper.h"
#include "webrtc/voice_engine/include/voe_base.h"
#include "webrtc/voice_engine/include/voe_codec.h"
#include "webrtc/voice_engine/include/voe_file.h"
@@ -131,7 +131,7 @@ class ConferenceTransport: public webrtc::Transport {
const rtc::scoped_ptr<webrtc::CriticalSectionWrapper> pq_crit_;
const rtc::scoped_ptr<webrtc::CriticalSectionWrapper> stream_crit_;
const rtc::scoped_ptr<webrtc::EventWrapper> packet_event_;
- const rtc::scoped_ptr<webrtc::ThreadWrapper> thread_;
+ rtc::PlatformThread thread_;
unsigned int rtt_ms_;
unsigned int stream_count_;
diff --git a/webrtc/voice_engine/test/auto_test/fixtures/after_initialization_fixture.h b/webrtc/voice_engine/test/auto_test/fixtures/after_initialization_fixture.h
index 1a1075c585..116ff0aec3 100644
--- a/webrtc/voice_engine/test/auto_test/fixtures/after_initialization_fixture.h
+++ b/webrtc/voice_engine/test/auto_test/fixtures/after_initialization_fixture.h
@@ -13,6 +13,7 @@
#include <deque>
+#include "webrtc/base/platform_thread.h"
#include "webrtc/base/scoped_ptr.h"
#include "webrtc/common_types.h"
#include "webrtc/modules/rtp_rtcp/source/byte_io.h"
@@ -20,7 +21,6 @@
#include "webrtc/system_wrappers/include/critical_section_wrapper.h"
#include "webrtc/system_wrappers/include/event_wrapper.h"
#include "webrtc/system_wrappers/include/sleep.h"
-#include "webrtc/system_wrappers/include/thread_wrapper.h"
#include "webrtc/voice_engine/test/auto_test/fixtures/before_initialization_fixture.h"
class TestErrorObserver;
@@ -30,16 +30,14 @@ class LoopBackTransport : public webrtc::Transport {
LoopBackTransport(webrtc::VoENetwork* voe_network, int channel)
: crit_(webrtc::CriticalSectionWrapper::CreateCriticalSection()),
packet_event_(webrtc::EventWrapper::Create()),
- thread_(webrtc::ThreadWrapper::CreateThread(NetworkProcess,
- this,
- "LoopBackTransport")),
+ thread_(NetworkProcess, this, "LoopBackTransport"),
channel_(channel),
voe_network_(voe_network),
transmitted_packets_(0) {
- thread_->Start();
+ thread_.Start();
}
- ~LoopBackTransport() { thread_->Stop(); }
+ ~LoopBackTransport() { thread_.Stop(); }
bool SendRtp(const uint8_t* data,
size_t len,
@@ -147,7 +145,7 @@ class LoopBackTransport : public webrtc::Transport {
const rtc::scoped_ptr<webrtc::CriticalSectionWrapper> crit_;
const rtc::scoped_ptr<webrtc::EventWrapper> packet_event_;
- const rtc::scoped_ptr<webrtc::ThreadWrapper> thread_;
+ rtc::PlatformThread thread_;
std::deque<Packet> packet_queue_ GUARDED_BY(crit_.get());
const int channel_;
std::map<uint32_t, int> channels_ GUARDED_BY(crit_.get());
diff --git a/webrtc/voice_engine/test/auto_test/fixtures/before_initialization_fixture.h b/webrtc/voice_engine/test/auto_test/fixtures/before_initialization_fixture.h
index 7a3fad8399..51db985b4a 100644
--- a/webrtc/voice_engine/test/auto_test/fixtures/before_initialization_fixture.h
+++ b/webrtc/voice_engine/test/auto_test/fixtures/before_initialization_fixture.h
@@ -16,7 +16,6 @@
#include "webrtc/common.h"
#include "webrtc/common_types.h"
#include "webrtc/engine_configurations.h"
-#include "webrtc/test/testsupport/gtest_disable.h"
#include "webrtc/voice_engine/include/voe_audio_processing.h"
#include "webrtc/voice_engine/include/voe_base.h"
#include "webrtc/voice_engine/include/voe_codec.h"
diff --git a/webrtc/voice_engine/test/auto_test/standard/codec_test.cc b/webrtc/voice_engine/test/auto_test/standard/codec_test.cc
index eeb12aba04..3a3d83031d 100644
--- a/webrtc/voice_engine/test/auto_test/standard/codec_test.cc
+++ b/webrtc/voice_engine/test/auto_test/standard/codec_test.cc
@@ -50,7 +50,7 @@ static bool IsNotViableSendCodec(const char* codec_name) {
TEST_F(CodecTest, PcmuIsDefaultCodecAndHasTheRightValues) {
EXPECT_EQ(0, voe_codec_->GetSendCodec(channel_, codec_instance_));
- EXPECT_EQ(1, codec_instance_.channels);
+ EXPECT_EQ(1u, codec_instance_.channels);
EXPECT_EQ(160, codec_instance_.pacsize);
EXPECT_EQ(8000, codec_instance_.plfreq);
EXPECT_EQ(0, codec_instance_.pltype);
@@ -153,17 +153,6 @@ TEST_F(CodecTest, OpusMaxPlaybackRateCanBeSet) {
}
}
-TEST_F(CodecTest, OpusMaxPlaybackRateCannotBeSetForNonOpus) {
- for (int i = 0; i < voe_codec_->NumOfCodecs(); ++i) {
- voe_codec_->GetCodec(i, codec_instance_);
- if (!_stricmp("opus", codec_instance_.plname)) {
- continue;
- }
- voe_codec_->SetSendCodec(channel_, codec_instance_);
- EXPECT_EQ(-1, voe_codec_->SetOpusMaxPlaybackRate(channel_, 16000));
- }
-}
-
TEST_F(CodecTest, OpusDtxCanBeSetForOpus) {
for (int i = 0; i < voe_codec_->NumOfCodecs(); ++i) {
voe_codec_->GetCodec(i, codec_instance_);
@@ -183,7 +172,6 @@ TEST_F(CodecTest, OpusDtxCannotBeSetForNonOpus) {
continue;
}
voe_codec_->SetSendCodec(channel_, codec_instance_);
- EXPECT_EQ(-1, voe_codec_->SetOpusDtx(channel_, false));
EXPECT_EQ(-1, voe_codec_->SetOpusDtx(channel_, true));
}
}
diff --git a/webrtc/voice_engine/test/auto_test/standard/external_media_test.cc b/webrtc/voice_engine/test/auto_test/standard/external_media_test.cc
index b4daba5afd..4f86010a18 100644
--- a/webrtc/voice_engine/test/auto_test/standard/external_media_test.cc
+++ b/webrtc/voice_engine/test/auto_test/standard/external_media_test.cc
@@ -9,7 +9,7 @@
*/
#include "webrtc/base/arraysize.h"
-#include "webrtc/modules/interface/module_common_types.h"
+#include "webrtc/modules/include/module_common_types.h"
#include "webrtc/voice_engine/include/voe_external_media.h"
#include "webrtc/voice_engine/test/auto_test/fakes/fake_media_process.h"
#include "webrtc/voice_engine/test/auto_test/fixtures/after_streaming_fixture.h"
diff --git a/webrtc/voice_engine/test/auto_test/standard/rtp_rtcp_extensions.cc b/webrtc/voice_engine/test/auto_test/standard/rtp_rtcp_extensions.cc
index 780a7f7ba9..1dc15dff49 100644
--- a/webrtc/voice_engine/test/auto_test/standard/rtp_rtcp_extensions.cc
+++ b/webrtc/voice_engine/test/auto_test/standard/rtp_rtcp_extensions.cc
@@ -8,8 +8,8 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-#include "webrtc/modules/interface/module_common_types.h"
-#include "webrtc/modules/rtp_rtcp/interface/rtp_header_parser.h"
+#include "webrtc/modules/include/module_common_types.h"
+#include "webrtc/modules/rtp_rtcp/include/rtp_header_parser.h"
#include "webrtc/system_wrappers/include/atomic32.h"
#include "webrtc/system_wrappers/include/sleep.h"
#include "webrtc/voice_engine/test/auto_test/fixtures/before_streaming_fixture.h"
diff --git a/webrtc/voice_engine/test/auto_test/standard/rtp_rtcp_test.cc b/webrtc/voice_engine/test/auto_test/standard/rtp_rtcp_test.cc
index 14dca27d1c..6efa55d516 100644
--- a/webrtc/voice_engine/test/auto_test/standard/rtp_rtcp_test.cc
+++ b/webrtc/voice_engine/test/auto_test/standard/rtp_rtcp_test.cc
@@ -101,8 +101,7 @@ TEST_F(RtpRtcpTest, RemoteRtcpCnameHasPropagatedToRemoteSide) {
EXPECT_STREQ(RTCP_CNAME, char_buffer);
}
-// Flakily hangs on Linux. code.google.com/p/webrtc/issues/detail?id=2178.
-TEST_F(RtpRtcpTest, DISABLED_ON_LINUX(SSRCPropagatesCorrectly)) {
+TEST_F(RtpRtcpTest, SSRCPropagatesCorrectly) {
unsigned int local_ssrc = 1234;
EXPECT_EQ(0, voe_base_->StopSend(channel_));
EXPECT_EQ(0, voe_rtp_rtcp_->SetLocalSSRC(channel_, local_ssrc));
diff --git a/webrtc/voice_engine/test/auto_test/voe_cpu_test.cc b/webrtc/voice_engine/test/auto_test/voe_cpu_test.cc
index ad6116dcdd..5666b3f8d1 100644
--- a/webrtc/voice_engine/test/auto_test/voe_cpu_test.cc
+++ b/webrtc/voice_engine/test/auto_test/voe_cpu_test.cc
@@ -18,7 +18,7 @@
#endif
#include "webrtc/base/scoped_ptr.h"
-#include "webrtc/test/channel_transport/include/channel_transport.h"
+#include "webrtc/test/channel_transport/channel_transport.h"
#include "webrtc/voice_engine/test/auto_test/voe_test_defines.h"
using namespace webrtc;
diff --git a/webrtc/voice_engine/test/auto_test/voe_output_test.cc b/webrtc/voice_engine/test/auto_test/voe_output_test.cc
new file mode 100644
index 0000000000..3bedbc3b17
--- /dev/null
+++ b/webrtc/voice_engine/test/auto_test/voe_output_test.cc
@@ -0,0 +1,203 @@
+/*
+ * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "testing/gtest/include/gtest/gtest.h"
+#include "webrtc/base/random.h"
+#include "webrtc/base/scoped_ptr.h"
+#include "webrtc/base/timeutils.h"
+#include "webrtc/system_wrappers/include/sleep.h"
+#include "webrtc/test/channel_transport/channel_transport.h"
+#include "webrtc/test/testsupport/fileutils.h"
+#include "webrtc/voice_engine/test/auto_test/voe_standard_test.h"
+
+namespace {
+
+const char kIp[] = "127.0.0.1";
+const int kPort = 1234;
+const webrtc::CodecInst kCodecInst = {120, "opus", 48000, 960, 2, 64000};
+
+} // namespace
+
+namespace voetest {
+
+using webrtc::Random;
+using webrtc::test::VoiceChannelTransport;
+
+// This test allows a check on the output signal in an end-to-end call.
+class OutputTest {
+ public:
+ OutputTest(int16_t lower_bound, int16_t upper_bound);
+ ~OutputTest();
+
+ void Start();
+
+ void EnableOutputCheck();
+ void DisableOutputCheck();
+ void SetOutputBound(int16_t lower_bound, int16_t upper_bound);
+ void Mute();
+ void Unmute();
+ void SetBitRate(int rate);
+
+ private:
+ // This class checks all output values and count the number of samples that
+ // go out of a defined range.
+ class VoEOutputCheckMediaProcess : public VoEMediaProcess {
+ public:
+ VoEOutputCheckMediaProcess(int16_t lower_bound, int16_t upper_bound);
+
+ void set_enabled(bool enabled) { enabled_ = enabled; }
+ void Process(int channel,
+ ProcessingTypes type,
+ int16_t audio10ms[],
+ size_t length,
+ int samplingFreq,
+ bool isStereo) override;
+
+ private:
+ bool enabled_;
+ int16_t lower_bound_;
+ int16_t upper_bound_;
+ };
+
+ VoETestManager manager_;
+ VoEOutputCheckMediaProcess output_checker_;
+
+ int channel_;
+};
+
+OutputTest::OutputTest(int16_t lower_bound, int16_t upper_bound)
+ : output_checker_(lower_bound, upper_bound) {
+ EXPECT_TRUE(manager_.Init());
+ manager_.GetInterfaces();
+
+ VoEBase* base = manager_.BasePtr();
+ VoECodec* codec = manager_.CodecPtr();
+ VoENetwork* network = manager_.NetworkPtr();
+
+ EXPECT_EQ(0, base->Init());
+
+ channel_ = base->CreateChannel();
+
+ // |network| will take care of the life time of |transport|.
+ VoiceChannelTransport* transport =
+ new VoiceChannelTransport(network, channel_);
+
+ EXPECT_EQ(0, transport->SetSendDestination(kIp, kPort));
+ EXPECT_EQ(0, transport->SetLocalReceiver(kPort));
+
+ EXPECT_EQ(0, codec->SetSendCodec(channel_, kCodecInst));
+ EXPECT_EQ(0, codec->SetOpusDtx(channel_, true));
+
+ EXPECT_EQ(0, manager_.VolumeControlPtr()->SetSpeakerVolume(255));
+
+ manager_.ExternalMediaPtr()->RegisterExternalMediaProcessing(
+ channel_, ProcessingTypes::kPlaybackPerChannel, output_checker_);
+}
+
+OutputTest::~OutputTest() {
+ EXPECT_EQ(0, manager_.NetworkPtr()->DeRegisterExternalTransport(channel_));
+ EXPECT_EQ(0, manager_.ReleaseInterfaces());
+}
+
+void OutputTest::Start() {
+ const std::string file_name =
+ webrtc::test::ResourcePath("audio_coding/testfile32kHz", "pcm");
+ const webrtc::FileFormats kInputFormat = webrtc::kFileFormatPcm32kHzFile;
+
+ ASSERT_EQ(0, manager_.FilePtr()->StartPlayingFileAsMicrophone(
+ channel_, file_name.c_str(), true, false, kInputFormat, 1.0));
+
+ VoEBase* base = manager_.BasePtr();
+ ASSERT_EQ(0, base->StartPlayout(channel_));
+ ASSERT_EQ(0, base->StartSend(channel_));
+}
+
+void OutputTest::EnableOutputCheck() {
+ output_checker_.set_enabled(true);
+}
+
+void OutputTest::DisableOutputCheck() {
+ output_checker_.set_enabled(false);
+}
+
+void OutputTest::Mute() {
+ manager_.VolumeControlPtr()->SetInputMute(channel_, true);
+}
+
+void OutputTest::Unmute() {
+ manager_.VolumeControlPtr()->SetInputMute(channel_, false);
+}
+
+void OutputTest::SetBitRate(int rate) {
+ manager_.CodecPtr()->SetBitRate(channel_, rate);
+}
+
+OutputTest::VoEOutputCheckMediaProcess::VoEOutputCheckMediaProcess(
+ int16_t lower_bound, int16_t upper_bound)
+ : enabled_(false),
+ lower_bound_(lower_bound),
+ upper_bound_(upper_bound) {}
+
+void OutputTest::VoEOutputCheckMediaProcess::Process(int channel,
+ ProcessingTypes type,
+ int16_t* audio10ms,
+ size_t length,
+ int samplingFreq,
+ bool isStereo) {
+ if (!enabled_)
+ return;
+ const int num_channels = isStereo ? 2 : 1;
+ for (size_t i = 0; i < length; ++i) {
+ for (int c = 0; c < num_channels; ++c) {
+ ASSERT_GE(audio10ms[i * num_channels + c], lower_bound_);
+ ASSERT_LE(audio10ms[i * num_channels + c], upper_bound_);
+ }
+ }
+}
+
+// This test checks if the Opus does not produce high noise (noise pump) when
+// DTX is enabled. The microphone is toggled on and off, and values of the
+// output signal during muting should be bounded.
+// We do not run this test on bots. Developers that want to see the result
+// and/or listen to sound quality can run this test manually.
+TEST(OutputTest, DISABLED_OpusDtxHasNoNoisePump) {
+ const int kRuntimeMs = 20000;
+ const uint32_t kUnmuteTimeMs = 1000;
+ const int kCheckAfterMute = 2000;
+ const uint32_t kCheckTimeMs = 2000;
+ const int kMinOpusRate = 6000;
+ const int kMaxOpusRate = 64000;
+
+#if defined(OPUS_FIXED_POINT)
+ const int16_t kDtxBoundForSilence = 20;
+#else
+ const int16_t kDtxBoundForSilence = 2;
+#endif
+
+ OutputTest test(-kDtxBoundForSilence, kDtxBoundForSilence);
+ Random random(1234ull);
+
+ uint32_t start_time = rtc::Time();
+ test.Start();
+ while (rtc::TimeSince(start_time) < kRuntimeMs) {
+ webrtc::SleepMs(random.Rand(kUnmuteTimeMs - kUnmuteTimeMs / 10,
+ kUnmuteTimeMs + kUnmuteTimeMs / 10));
+ test.Mute();
+ webrtc::SleepMs(kCheckAfterMute);
+ test.EnableOutputCheck();
+ webrtc::SleepMs(random.Rand(kCheckTimeMs - kCheckTimeMs / 10,
+ kCheckTimeMs + kCheckTimeMs / 10));
+ test.DisableOutputCheck();
+ test.SetBitRate(random.Rand(kMinOpusRate, kMaxOpusRate));
+ test.Unmute();
+ }
+}
+
+} // namespace voetest
diff --git a/webrtc/voice_engine/test/auto_test/voe_standard_test.cc b/webrtc/voice_engine/test/auto_test/voe_standard_test.cc
index 22eee15bf0..a187c4bb6c 100644
--- a/webrtc/voice_engine/test/auto_test/voe_standard_test.cc
+++ b/webrtc/voice_engine/test/auto_test/voe_standard_test.cc
@@ -117,14 +117,6 @@ bool VoETestManager::Init() {
if (initialized_)
return true;
- if (VoiceEngine::SetTraceFile(NULL) != -1) {
- // should not be possible to call a Trace method before the VoE is
- // created
- TEST_LOG("\nError at line: %i (VoiceEngine::SetTraceFile()"
- "should fail)!\n", __LINE__);
- return false;
- }
-
voice_engine_ = VoiceEngine::Create();
if (!voice_engine_) {
TEST_LOG("Failed to create VoiceEngine\n");
@@ -230,11 +222,6 @@ int VoETestManager::ReleaseInterfaces() {
releaseOK = false;
}
- if (VoiceEngine::SetTraceFile(NULL) != -1) {
- TEST_LOG("\nError at line: %i (VoiceEngine::SetTraceFile()"
- "should fail)!\n", __LINE__);
- }
-
return (releaseOK == true) ? 0 : -1;
}
diff --git a/webrtc/voice_engine/test/auto_test/voe_standard_test.h b/webrtc/voice_engine/test/auto_test/voe_standard_test.h
index 3bf89362d5..b92595982c 100644
--- a/webrtc/voice_engine/test/auto_test/voe_standard_test.h
+++ b/webrtc/voice_engine/test/auto_test/voe_standard_test.h
@@ -44,7 +44,6 @@
#ifdef WEBRTC_VOICE_ENGINE_NETEQ_STATS_API
namespace webrtc {
class CriticalSectionWrapper;
-class ThreadWrapper;
class VoENetEqStats;
}
#endif
diff --git a/webrtc/voice_engine/test/auto_test/voe_stress_test.cc b/webrtc/voice_engine/test/auto_test/voe_stress_test.cc
index 960276b699..259eff0ccc 100644
--- a/webrtc/voice_engine/test/auto_test/voe_stress_test.cc
+++ b/webrtc/voice_engine/test/auto_test/voe_stress_test.cc
@@ -26,7 +26,7 @@
#include "webrtc/base/scoped_ptr.h"
#include "webrtc/system_wrappers/include/sleep.h"
-#include "webrtc/test/channel_transport/include/channel_transport.h"
+#include "webrtc/test/channel_transport/channel_transport.h"
#include "webrtc/voice_engine/test/auto_test/voe_standard_test.h"
#include "webrtc/voice_engine/test/auto_test/voe_test_defines.h"
#include "webrtc/voice_engine/voice_engine_defines.h" // defines build macros
@@ -334,9 +334,9 @@ int VoEStressTest::MultipleThreadsTest() {
int rnd(0);
// Start extra thread
- _ptrExtraApiThread = ThreadWrapper::CreateThread(RunExtraApi, this,
- "StressTestExtraApiThread");
- VALIDATE_STRESS(!_ptrExtraApiThread->Start());
+ _ptrExtraApiThread.reset(
+ new rtc::PlatformThread(RunExtraApi, this, "StressTestExtraApiThread"));
+ _ptrExtraApiThread->Start();
// Some possible extensions include:
// Add more API calls to randomize
@@ -365,7 +365,7 @@ int VoEStressTest::MultipleThreadsTest() {
ANL();
// Stop extra thread
- VALIDATE_STRESS(!_ptrExtraApiThread->Stop());
+ _ptrExtraApiThread->Stop();
///////////// End test /////////////
diff --git a/webrtc/voice_engine/test/auto_test/voe_stress_test.h b/webrtc/voice_engine/test/auto_test/voe_stress_test.h
index 69b3a92bb3..715e8ef724 100644
--- a/webrtc/voice_engine/test/auto_test/voe_stress_test.h
+++ b/webrtc/voice_engine/test/auto_test/voe_stress_test.h
@@ -11,11 +11,10 @@
#ifndef WEBRTC_VOICE_ENGINE_VOE_STRESS_TEST_H
#define WEBRTC_VOICE_ENGINE_VOE_STRESS_TEST_H
-#include "webrtc/system_wrappers/include/thread_wrapper.h"
+#include "webrtc/base/platform_thread.h"
+#include "webrtc/base/scoped_ptr.h"
namespace voetest {
-// TODO(andrew): using directives are not permitted.
-using namespace webrtc;
class VoETestManager;
@@ -38,7 +37,8 @@ class VoEStressTest {
VoETestManager& _mgr;
- rtc::scoped_ptr<ThreadWrapper> _ptrExtraApiThread;
+ // TODO(pbos): Remove scoped_ptr and use PlatformThread directly.
+ rtc::scoped_ptr<rtc::PlatformThread> _ptrExtraApiThread;
};
} // namespace voetest
diff --git a/webrtc/voice_engine/test/cmd_test/voe_cmd_test.cc b/webrtc/voice_engine/test/cmd_test/voe_cmd_test.cc
index 312ac7ca31..ccfe3c2bde 100644
--- a/webrtc/voice_engine/test/cmd_test/voe_cmd_test.cc
+++ b/webrtc/voice_engine/test/cmd_test/voe_cmd_test.cc
@@ -19,11 +19,12 @@
#include "gflags/gflags.h"
#include "testing/gtest/include/gtest/gtest.h"
+#include "webrtc/base/format_macros.h"
#include "webrtc/base/scoped_ptr.h"
#include "webrtc/call/rtc_event_log.h"
#include "webrtc/engine_configurations.h"
#include "webrtc/modules/audio_processing/include/audio_processing.h"
-#include "webrtc/test/channel_transport/include/channel_transport.h"
+#include "webrtc/test/channel_transport/channel_transport.h"
#include "webrtc/test/testsupport/fileutils.h"
#include "webrtc/test/testsupport/trace_to_stderr.h"
#include "webrtc/voice_engine/include/voe_audio_processing.h"
@@ -113,8 +114,8 @@ void PrintCodecs(bool opus_stereo) {
int res = codec->GetCodec(i, codec_params);
VALIDATE;
SetStereoIfOpus(opus_stereo, &codec_params);
- printf("%2d. %3d %s/%d/%d \n", i, codec_params.pltype, codec_params.plname,
- codec_params.plfreq, codec_params.channels);
+ printf("%2d. %3d %s/%d/%" PRIuS " \n", i, codec_params.pltype,
+ codec_params.plname, codec_params.plfreq, codec_params.channels);
}
}
diff --git a/webrtc/voice_engine/transmit_mixer.cc b/webrtc/voice_engine/transmit_mixer.cc
index 94592cf616..1204b04b50 100644
--- a/webrtc/voice_engine/transmit_mixer.cc
+++ b/webrtc/voice_engine/transmit_mixer.cc
@@ -11,10 +11,10 @@
#include "webrtc/voice_engine/transmit_mixer.h"
#include "webrtc/base/format_macros.h"
-#include "webrtc/modules/utility/interface/audio_frame_operations.h"
+#include "webrtc/base/logging.h"
+#include "webrtc/modules/utility/include/audio_frame_operations.h"
#include "webrtc/system_wrappers/include/critical_section_wrapper.h"
#include "webrtc/system_wrappers/include/event_wrapper.h"
-#include "webrtc/system_wrappers/include/logging.h"
#include "webrtc/system_wrappers/include/trace.h"
#include "webrtc/voice_engine/channel.h"
#include "webrtc/voice_engine/channel_manager.h"
@@ -23,8 +23,6 @@
#include "webrtc/voice_engine/utility.h"
#include "webrtc/voice_engine/voe_base_impl.h"
-#define WEBRTC_ABS(a) (((a) < 0) ? -(a) : (a))
-
namespace webrtc {
namespace voe {
@@ -36,12 +34,20 @@ TransmitMixer::OnPeriodicProcess()
"TransmitMixer::OnPeriodicProcess()");
#if defined(WEBRTC_VOICE_ENGINE_TYPING_DETECTION)
- if (_typingNoiseWarningPending)
+ bool send_typing_noise_warning = false;
+ bool typing_noise_detected = false;
{
+ CriticalSectionScoped cs(&_critSect);
+ if (_typingNoiseWarningPending) {
+ send_typing_noise_warning = true;
+ typing_noise_detected = _typingNoiseDetected;
+ _typingNoiseWarningPending = false;
+ }
+ }
+ if (send_typing_noise_warning) {
CriticalSectionScoped cs(&_callbackCritSect);
- if (_voiceEngineObserverPtr)
- {
- if (_typingNoiseDetected) {
+ if (_voiceEngineObserverPtr) {
+ if (typing_noise_detected) {
WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, -1),
"TransmitMixer::OnPeriodicProcess() => "
"CallbackOnError(VE_TYPING_NOISE_WARNING)");
@@ -57,7 +63,6 @@ TransmitMixer::OnPeriodicProcess()
VE_TYPING_NOISE_OFF_WARNING);
}
}
- _typingNoiseWarningPending = false;
}
#endif
@@ -295,7 +300,8 @@ TransmitMixer::SetAudioProcessingModule(AudioProcessing* audioProcessingModule)
return 0;
}
-void TransmitMixer::GetSendCodecInfo(int* max_sample_rate, int* max_channels) {
+void TransmitMixer::GetSendCodecInfo(int* max_sample_rate,
+ size_t* max_channels) {
*max_sample_rate = 8000;
*max_channels = 1;
for (ChannelManager::Iterator it(_channelManagerPtr); it.IsValid();
@@ -313,7 +319,7 @@ void TransmitMixer::GetSendCodecInfo(int* max_sample_rate, int* max_channels) {
int32_t
TransmitMixer::PrepareDemux(const void* audioSamples,
size_t nSamples,
- uint8_t nChannels,
+ size_t nChannels,
uint32_t samplesPerSec,
uint16_t totalDelayMS,
int32_t clockDrift,
@@ -322,7 +328,7 @@ TransmitMixer::PrepareDemux(const void* audioSamples,
{
WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, -1),
"TransmitMixer::PrepareDemux(nSamples=%" PRIuS ", "
- "nChannels=%u, samplesPerSec=%u, totalDelayMS=%u, "
+ "nChannels=%" PRIuS ", samplesPerSec=%u, totalDelayMS=%u, "
"clockDrift=%d, currentMicLevel=%u)",
nSamples, nChannels, samplesPerSec, totalDelayMS, clockDrift,
currentMicLevel);
@@ -427,8 +433,8 @@ TransmitMixer::DemuxAndMix()
}
void TransmitMixer::DemuxAndMix(const int voe_channels[],
- int number_of_voe_channels) {
- for (int i = 0; i < number_of_voe_channels; ++i) {
+ size_t number_of_voe_channels) {
+ for (size_t i = 0; i < number_of_voe_channels; ++i) {
voe::ChannelOwner ch = _channelManagerPtr->GetChannel(voe_channels[i]);
voe::Channel* channel_ptr = ch.channel();
if (channel_ptr) {
@@ -460,8 +466,8 @@ TransmitMixer::EncodeAndSend()
}
void TransmitMixer::EncodeAndSend(const int voe_channels[],
- int number_of_voe_channels) {
- for (int i = 0; i < number_of_voe_channels; ++i) {
+ size_t number_of_voe_channels) {
+ for (size_t i = 0; i < number_of_voe_channels; ++i) {
voe::ChannelOwner ch = _channelManagerPtr->GetChannel(voe_channels[i]);
voe::Channel* channel_ptr = ch.channel();
if (channel_ptr && channel_ptr->Sending())
@@ -693,8 +699,7 @@ int TransmitMixer::StartRecordingMicrophone(const char* fileName,
const uint32_t notificationTime(0); // Not supported in VoE
CodecInst dummyCodec = { 100, "L16", 16000, 320, 1, 320000 };
- if (codecInst != NULL &&
- (codecInst->channels < 0 || codecInst->channels > 2))
+ if (codecInst != NULL && codecInst->channels > 2)
{
_engineStatisticsPtr->SetLastError(
VE_BAD_ARGUMENT, kTraceError,
@@ -1128,10 +1133,10 @@ bool TransmitMixer::IsRecordingMic()
void TransmitMixer::GenerateAudioFrame(const int16_t* audio,
size_t samples_per_channel,
- int num_channels,
+ size_t num_channels,
int sample_rate_hz) {
int codec_rate;
- int num_codec_channels;
+ size_t num_codec_channels;
GetSendCodecInfo(&codec_rate, &num_codec_channels);
stereo_codec_ = num_codec_channels == 2;
@@ -1236,15 +1241,13 @@ int32_t TransmitMixer::MixOrReplaceAudioWithFile(
void TransmitMixer::ProcessAudio(int delay_ms, int clock_drift,
int current_mic_level, bool key_pressed) {
if (audioproc_->set_stream_delay_ms(delay_ms) != 0) {
- // A redundant warning is reported in AudioDevice, which we've throttled
- // to avoid flooding the logs. Relegate this one to LS_VERBOSE to avoid
- // repeating the problem here.
- LOG_FERR1(LS_VERBOSE, set_stream_delay_ms, delay_ms);
+ // Silently ignore this failure to avoid flooding the logs.
}
GainControl* agc = audioproc_->gain_control();
if (agc->set_stream_analog_level(current_mic_level) != 0) {
- LOG_FERR1(LS_ERROR, set_stream_analog_level, current_mic_level);
+ LOG(LS_ERROR) << "set_stream_analog_level failed: current_mic_level = "
+ << current_mic_level;
assert(false);
}
@@ -1279,9 +1282,11 @@ void TransmitMixer::TypingDetection(bool keyPressed)
bool vadActive = _audioFrame.vad_activity_ == AudioFrame::kVadActive;
if (_typingDetection.Process(keyPressed, vadActive)) {
+ CriticalSectionScoped cs(&_critSect);
_typingNoiseWarningPending = true;
_typingNoiseDetected = true;
} else {
+ CriticalSectionScoped cs(&_critSect);
// If there is already a warning pending, do not change the state.
// Otherwise set a warning pending if last callback was for noise detected.
if (!_typingNoiseWarningPending && _typingNoiseDetected) {
diff --git a/webrtc/voice_engine/transmit_mixer.h b/webrtc/voice_engine/transmit_mixer.h
index 714efb48dc..0aee106231 100644
--- a/webrtc/voice_engine/transmit_mixer.h
+++ b/webrtc/voice_engine/transmit_mixer.h
@@ -15,9 +15,9 @@
#include "webrtc/common_audio/resampler/include/push_resampler.h"
#include "webrtc/common_types.h"
#include "webrtc/modules/audio_processing/typing_detection.h"
-#include "webrtc/modules/interface/module_common_types.h"
-#include "webrtc/modules/utility/interface/file_player.h"
-#include "webrtc/modules/utility/interface/file_recorder.h"
+#include "webrtc/modules/include/module_common_types.h"
+#include "webrtc/modules/utility/include/file_player.h"
+#include "webrtc/modules/utility/include/file_recorder.h"
#include "webrtc/voice_engine/include/voe_base.h"
#include "webrtc/voice_engine/level_indicator.h"
#include "webrtc/voice_engine/monitor_module.h"
@@ -52,7 +52,7 @@ public:
int32_t PrepareDemux(const void* audioSamples,
size_t nSamples,
- uint8_t nChannels,
+ size_t nChannels,
uint32_t samplesPerSec,
uint16_t totalDelayMS,
int32_t clockDrift,
@@ -63,12 +63,12 @@ public:
int32_t DemuxAndMix();
// Used by the Chrome to pass the recording data to the specific VoE
// channels for demux.
- void DemuxAndMix(const int voe_channels[], int number_of_voe_channels);
+ void DemuxAndMix(const int voe_channels[], size_t number_of_voe_channels);
int32_t EncodeAndSend();
// Used by the Chrome to pass the recording data to the specific VoE
// channels for encoding and sending to the network.
- void EncodeAndSend(const int voe_channels[], int number_of_voe_channels);
+ void EncodeAndSend(const int voe_channels[], size_t number_of_voe_channels);
// Must be called on the same thread as PrepareDemux().
uint32_t CaptureLevel() const;
@@ -170,11 +170,11 @@ private:
// Gets the maximum sample rate and number of channels over all currently
// sending codecs.
- void GetSendCodecInfo(int* max_sample_rate, int* max_channels);
+ void GetSendCodecInfo(int* max_sample_rate, size_t* max_channels);
void GenerateAudioFrame(const int16_t audioSamples[],
size_t nSamples,
- int nChannels,
+ size_t nChannels,
int samplesPerSec);
int32_t RecordAudioToFile(uint32_t mixingFrequency);
diff --git a/webrtc/voice_engine/utility.cc b/webrtc/voice_engine/utility.cc
index 7bc7e0e963..605e55369e 100644
--- a/webrtc/voice_engine/utility.cc
+++ b/webrtc/voice_engine/utility.cc
@@ -10,12 +10,12 @@
#include "webrtc/voice_engine/utility.h"
+#include "webrtc/base/logging.h"
#include "webrtc/common_audio/resampler/include/push_resampler.h"
#include "webrtc/common_audio/signal_processing/include/signal_processing_library.h"
#include "webrtc/common_types.h"
-#include "webrtc/modules/interface/module_common_types.h"
-#include "webrtc/modules/utility/interface/audio_frame_operations.h"
-#include "webrtc/system_wrappers/include/logging.h"
+#include "webrtc/modules/include/module_common_types.h"
+#include "webrtc/modules/utility/include/audio_frame_operations.h"
#include "webrtc/voice_engine/voice_engine_defines.h"
namespace webrtc {
@@ -34,12 +34,12 @@ void RemixAndResample(const AudioFrame& src_frame,
void RemixAndResample(const int16_t* src_data,
size_t samples_per_channel,
- int num_channels,
+ size_t num_channels,
int sample_rate_hz,
PushResampler<int16_t>* resampler,
AudioFrame* dst_frame) {
const int16_t* audio_ptr = src_data;
- int audio_ptr_num_channels = num_channels;
+ size_t audio_ptr_num_channels = num_channels;
int16_t mono_audio[AudioFrame::kMaxDataSizeSamples];
// Downmix before resampling.
@@ -52,8 +52,10 @@ void RemixAndResample(const int16_t* src_data,
if (resampler->InitializeIfNeeded(sample_rate_hz, dst_frame->sample_rate_hz_,
audio_ptr_num_channels) == -1) {
- LOG_FERR3(LS_ERROR, InitializeIfNeeded, sample_rate_hz,
- dst_frame->sample_rate_hz_, audio_ptr_num_channels);
+ LOG(LS_ERROR) << "InitializeIfNeeded failed: sample_rate_hz = "
+ << sample_rate_hz << ", dst_frame->sample_rate_hz_ = "
+ << dst_frame->sample_rate_hz_
+ << ", audio_ptr_num_channels = " << audio_ptr_num_channels;
assert(false);
}
@@ -61,11 +63,12 @@ void RemixAndResample(const int16_t* src_data,
int out_length = resampler->Resample(audio_ptr, src_length, dst_frame->data_,
AudioFrame::kMaxDataSizeSamples);
if (out_length == -1) {
- LOG_FERR3(LS_ERROR, Resample, audio_ptr, src_length, dst_frame->data_);
+ LOG(LS_ERROR) << "Resample failed: audio_ptr = " << audio_ptr
+ << ", src_length = " << src_length
+ << ", dst_frame->data_ = " << dst_frame->data_;
assert(false);
}
- dst_frame->samples_per_channel_ =
- static_cast<size_t>(out_length / audio_ptr_num_channels);
+ dst_frame->samples_per_channel_ = out_length / audio_ptr_num_channels;
// Upmix after resampling.
if (num_channels == 1 && dst_frame->num_channels_ == 2) {
@@ -77,9 +80,9 @@ void RemixAndResample(const int16_t* src_data,
}
void MixWithSat(int16_t target[],
- int target_channel,
+ size_t target_channel,
const int16_t source[],
- int source_channel,
+ size_t source_channel,
size_t source_len) {
assert(target_channel == 1 || target_channel == 2);
assert(source_channel == 1 || source_channel == 2);
diff --git a/webrtc/voice_engine/utility.h b/webrtc/voice_engine/utility.h
index cc44533665..4139f05cfd 100644
--- a/webrtc/voice_engine/utility.h
+++ b/webrtc/voice_engine/utility.h
@@ -40,15 +40,15 @@ void RemixAndResample(const AudioFrame& src_frame,
// parameters.
void RemixAndResample(const int16_t* src_data,
size_t samples_per_channel,
- int num_channels,
+ size_t num_channels,
int sample_rate_hz,
PushResampler<int16_t>* resampler,
AudioFrame* dst_frame);
void MixWithSat(int16_t target[],
- int target_channel,
+ size_t target_channel,
const int16_t source[],
- int source_channel,
+ size_t source_channel,
size_t source_len);
} // namespace voe
diff --git a/webrtc/voice_engine/utility_unittest.cc b/webrtc/voice_engine/utility_unittest.cc
index 226e38366d..921c3e5085 100644
--- a/webrtc/voice_engine/utility_unittest.cc
+++ b/webrtc/voice_engine/utility_unittest.cc
@@ -13,7 +13,7 @@
#include "testing/gtest/include/gtest/gtest.h"
#include "webrtc/base/format_macros.h"
#include "webrtc/common_audio/resampler/include/push_resampler.h"
-#include "webrtc/modules/interface/module_common_types.h"
+#include "webrtc/modules/include/module_common_types.h"
#include "webrtc/voice_engine/utility.h"
#include "webrtc/voice_engine/voice_engine_defines.h"
diff --git a/webrtc/voice_engine/voe_audio_processing_impl.cc b/webrtc/voice_engine/voe_audio_processing_impl.cc
index 83f70fe68e..c95726339c 100644
--- a/webrtc/voice_engine/voe_audio_processing_impl.cc
+++ b/webrtc/voice_engine/voe_audio_processing_impl.cc
@@ -10,9 +10,9 @@
#include "webrtc/voice_engine/voe_audio_processing_impl.h"
+#include "webrtc/base/logging.h"
#include "webrtc/modules/audio_processing/include/audio_processing.h"
#include "webrtc/system_wrappers/include/critical_section_wrapper.h"
-#include "webrtc/system_wrappers/include/logging.h"
#include "webrtc/system_wrappers/include/trace.h"
#include "webrtc/voice_engine/channel.h"
#include "webrtc/voice_engine/include/voe_errors.h"
@@ -317,7 +317,6 @@ int VoEAudioProcessingImpl::GetAgcConfig(AgcConfig& config) {
int VoEAudioProcessingImpl::SetRxNsStatus(int channel,
bool enable,
NsModes mode) {
- LOG_API3(channel, enable, mode);
#ifdef WEBRTC_VOICE_ENGINE_NR
if (!_shared->statistics().Initialized()) {
_shared->SetLastError(VE_NOT_INITED, kTraceError);
@@ -469,7 +468,6 @@ bool VoEAudioProcessing::DriftCompensationSupported() {
}
int VoEAudioProcessingImpl::EnableDriftCompensation(bool enable) {
- LOG_API1(enable);
WEBRTC_VOICE_INIT_CHECK();
if (!DriftCompensationSupported()) {
@@ -489,7 +487,6 @@ int VoEAudioProcessingImpl::EnableDriftCompensation(bool enable) {
}
bool VoEAudioProcessingImpl::DriftCompensationEnabled() {
- LOG_API0();
WEBRTC_VOICE_INIT_CHECK_BOOL();
EchoCancellation* aec = _shared->audio_processing()->echo_cancellation();
@@ -1038,12 +1035,10 @@ int VoEAudioProcessingImpl::SetTypingDetectionParameters(int timeWindow,
}
void VoEAudioProcessingImpl::EnableStereoChannelSwapping(bool enable) {
- LOG_API1(enable);
_shared->transmit_mixer()->EnableStereoChannelSwapping(enable);
}
bool VoEAudioProcessingImpl::IsStereoChannelSwappingEnabled() {
- LOG_API0();
return _shared->transmit_mixer()->IsStereoChannelSwappingEnabled();
}
diff --git a/webrtc/voice_engine/voe_base_impl.cc b/webrtc/voice_engine/voe_base_impl.cc
index 8ea3f6314a..3e5cfbb14b 100644
--- a/webrtc/voice_engine/voe_base_impl.cc
+++ b/webrtc/voice_engine/voe_base_impl.cc
@@ -11,14 +11,14 @@
#include "webrtc/voice_engine/voe_base_impl.h"
#include "webrtc/base/format_macros.h"
+#include "webrtc/base/logging.h"
#include "webrtc/common.h"
#include "webrtc/common_audio/signal_processing/include/signal_processing_library.h"
-#include "webrtc/modules/audio_coding/main/include/audio_coding_module.h"
+#include "webrtc/modules/audio_coding/include/audio_coding_module.h"
#include "webrtc/modules/audio_device/audio_device_impl.h"
#include "webrtc/modules/audio_processing/include/audio_processing.h"
#include "webrtc/system_wrappers/include/critical_section_wrapper.h"
#include "webrtc/system_wrappers/include/file_wrapper.h"
-#include "webrtc/system_wrappers/include/logging.h"
#include "webrtc/voice_engine/channel.h"
#include "webrtc/voice_engine/include/voe_errors.h"
#include "webrtc/voice_engine/output_mixer.h"
@@ -47,7 +47,7 @@ VoEBaseImpl::~VoEBaseImpl() {
delete &callbackCritSect_;
}
-void VoEBaseImpl::OnErrorIsReported(ErrorCode error) {
+void VoEBaseImpl::OnErrorIsReported(const ErrorCode error) {
CriticalSectionScoped cs(&callbackCritSect_);
int errCode = 0;
if (error == AudioDeviceObserver::kRecordingError) {
@@ -63,7 +63,7 @@ void VoEBaseImpl::OnErrorIsReported(ErrorCode error) {
}
}
-void VoEBaseImpl::OnWarningIsReported(WarningCode warning) {
+void VoEBaseImpl::OnWarningIsReported(const WarningCode warning) {
CriticalSectionScoped cs(&callbackCritSect_);
int warningCode = 0;
if (warning == AudioDeviceObserver::kRecordingWarning) {
@@ -79,34 +79,40 @@ void VoEBaseImpl::OnWarningIsReported(WarningCode warning) {
}
}
-int32_t VoEBaseImpl::RecordedDataIsAvailable(
- const void* audioSamples, size_t nSamples, size_t nBytesPerSample,
- uint8_t nChannels, uint32_t samplesPerSec, uint32_t totalDelayMS,
- int32_t clockDrift, uint32_t micLevel, bool keyPressed,
- uint32_t& newMicLevel) {
+int32_t VoEBaseImpl::RecordedDataIsAvailable(const void* audioSamples,
+ const size_t nSamples,
+ const size_t nBytesPerSample,
+ const size_t nChannels,
+ const uint32_t samplesPerSec,
+ const uint32_t totalDelayMS,
+ const int32_t clockDrift,
+ const uint32_t currentMicLevel,
+ const bool keyPressed,
+ uint32_t& newMicLevel) {
newMicLevel = static_cast<uint32_t>(ProcessRecordedDataWithAPM(
nullptr, 0, audioSamples, samplesPerSec, nChannels, nSamples,
- totalDelayMS, clockDrift, micLevel, keyPressed));
+ totalDelayMS, clockDrift, currentMicLevel, keyPressed));
return 0;
}
-int32_t VoEBaseImpl::NeedMorePlayData(size_t nSamples,
- size_t nBytesPerSample,
- uint8_t nChannels, uint32_t samplesPerSec,
- void* audioSamples, size_t& nSamplesOut,
+int32_t VoEBaseImpl::NeedMorePlayData(const size_t nSamples,
+ const size_t nBytesPerSample,
+ const size_t nChannels,
+ const uint32_t samplesPerSec,
+ void* audioSamples,
+ size_t& nSamplesOut,
int64_t* elapsed_time_ms,
int64_t* ntp_time_ms) {
- GetPlayoutData(static_cast<int>(samplesPerSec), static_cast<int>(nChannels),
- nSamples, true, audioSamples,
- elapsed_time_ms, ntp_time_ms);
+ GetPlayoutData(static_cast<int>(samplesPerSec), nChannels, nSamples, true,
+ audioSamples, elapsed_time_ms, ntp_time_ms);
nSamplesOut = audioFrame_.samples_per_channel_;
return 0;
}
int VoEBaseImpl::OnDataAvailable(const int voe_channels[],
- int number_of_voe_channels,
+ size_t number_of_voe_channels,
const int16_t* audio_data, int sample_rate,
- int number_of_channels,
+ size_t number_of_channels,
size_t number_of_frames,
int audio_delay_milliseconds, int volume,
bool key_pressed, bool need_audio_processing) {
@@ -121,7 +127,7 @@ int VoEBaseImpl::OnDataAvailable(const int voe_channels[],
// No need to go through the APM, demultiplex the data to each VoE channel,
// encode and send to the network.
- for (int i = 0; i < number_of_voe_channels; ++i) {
+ for (size_t i = 0; i < number_of_voe_channels; ++i) {
// TODO(ajm): In the case where multiple channels are using the same codec
// rate, this path needlessly does extra conversions. We should convert once
// and share between channels.
@@ -135,14 +141,14 @@ int VoEBaseImpl::OnDataAvailable(const int voe_channels[],
void VoEBaseImpl::OnData(int voe_channel, const void* audio_data,
int bits_per_sample, int sample_rate,
- int number_of_channels, size_t number_of_frames) {
+ size_t number_of_channels, size_t number_of_frames) {
PushCaptureData(voe_channel, audio_data, bits_per_sample, sample_rate,
number_of_channels, number_of_frames);
}
void VoEBaseImpl::PushCaptureData(int voe_channel, const void* audio_data,
int bits_per_sample, int sample_rate,
- int number_of_channels,
+ size_t number_of_channels,
size_t number_of_frames) {
voe::ChannelOwner ch = shared_->channel_manager().GetChannel(voe_channel);
voe::Channel* channel_ptr = ch.channel();
@@ -158,7 +164,7 @@ void VoEBaseImpl::PushCaptureData(int voe_channel, const void* audio_data,
void VoEBaseImpl::PullRenderData(int bits_per_sample,
int sample_rate,
- int number_of_channels,
+ size_t number_of_channels,
size_t number_of_frames,
void* audio_data, int64_t* elapsed_time_ms,
int64_t* ntp_time_ms) {
@@ -575,69 +581,19 @@ int VoEBaseImpl::StopSend(int channel) {
}
int VoEBaseImpl::GetVersion(char version[1024]) {
- static_assert(kVoiceEngineVersionMaxMessageSize == 1024, "");
-
if (version == nullptr) {
shared_->SetLastError(VE_INVALID_ARGUMENT, kTraceError);
- return (-1);
- }
-
- char versionBuf[kVoiceEngineVersionMaxMessageSize];
- char* versionPtr = versionBuf;
-
- int32_t len = 0;
- int32_t accLen = 0;
-
- len = AddVoEVersion(versionPtr);
- if (len == -1) {
return -1;
}
- versionPtr += len;
- accLen += len;
- assert(accLen < kVoiceEngineVersionMaxMessageSize);
-
-#ifdef WEBRTC_EXTERNAL_TRANSPORT
- len = AddExternalTransportBuild(versionPtr);
- if (len == -1) {
- return -1;
- }
- versionPtr += len;
- accLen += len;
- assert(accLen < kVoiceEngineVersionMaxMessageSize);
-#endif
-
- memcpy(version, versionBuf, accLen);
- version[accLen] = '\0';
-
- // to avoid the truncation in the trace, split the string into parts
- char partOfVersion[256];
- for (int partStart = 0; partStart < accLen;) {
- memset(partOfVersion, 0, sizeof(partOfVersion));
- int partEnd = partStart + 180;
- while (version[partEnd] != '\n' && version[partEnd] != '\0') {
- partEnd--;
- }
- if (partEnd < accLen) {
- memcpy(partOfVersion, &version[partStart], partEnd - partStart);
- } else {
- memcpy(partOfVersion, &version[partStart], accLen - partStart);
- }
- partStart = partEnd;
- }
+ std::string versionString = VoiceEngine::GetVersionString();
+ RTC_DCHECK_GT(1024u, versionString.size() + 1);
+ char* end = std::copy(versionString.cbegin(), versionString.cend(), version);
+ end[0] = '\n';
+ end[1] = '\0';
return 0;
}
-int32_t VoEBaseImpl::AddVoEVersion(char* str) const {
- return sprintf(str, "VoiceEngine 4.1.0\n");
-}
-
-#ifdef WEBRTC_EXTERNAL_TRANSPORT
-int32_t VoEBaseImpl::AddExternalTransportBuild(char* str) const {
- return sprintf(str, "External transport build\n");
-}
-#endif
-
int VoEBaseImpl::LastError() { return (shared_->statistics().LastError()); }
int32_t VoEBaseImpl::StartPlayout() {
@@ -742,8 +698,8 @@ int32_t VoEBaseImpl::TerminateInternal() {
}
int VoEBaseImpl::ProcessRecordedDataWithAPM(
- const int voe_channels[], int number_of_voe_channels,
- const void* audio_data, uint32_t sample_rate, uint8_t number_of_channels,
+ const int voe_channels[], size_t number_of_voe_channels,
+ const void* audio_data, uint32_t sample_rate, size_t number_of_channels,
size_t number_of_frames, uint32_t audio_delay_milliseconds,
int32_t clock_drift, uint32_t volume, bool key_pressed) {
assert(shared_->transmit_mixer() != nullptr);
@@ -808,7 +764,7 @@ int VoEBaseImpl::ProcessRecordedDataWithAPM(
return 0;
}
-void VoEBaseImpl::GetPlayoutData(int sample_rate, int number_of_channels,
+void VoEBaseImpl::GetPlayoutData(int sample_rate, size_t number_of_channels,
size_t number_of_frames, bool feed_data_to_apm,
void* audio_data, int64_t* elapsed_time_ms,
int64_t* ntp_time_ms) {
diff --git a/webrtc/voice_engine/voe_base_impl.h b/webrtc/voice_engine/voe_base_impl.h
index f0ac959dcd..58e0387423 100644
--- a/webrtc/voice_engine/voe_base_impl.h
+++ b/webrtc/voice_engine/voe_base_impl.h
@@ -13,7 +13,7 @@
#include "webrtc/voice_engine/include/voe_base.h"
-#include "webrtc/modules/interface/module_common_types.h"
+#include "webrtc/modules/include/module_common_types.h"
#include "webrtc/voice_engine/shared_data.h"
namespace webrtc {
@@ -54,37 +54,57 @@ class VoEBaseImpl : public VoEBase,
int AssociateSendChannel(int channel, int accociate_send_channel) override;
// AudioTransport
- int32_t RecordedDataIsAvailable(const void* audioSamples, size_t nSamples,
- size_t nBytesPerSample, uint8_t nChannels,
- uint32_t samplesPerSec, uint32_t totalDelayMS,
- int32_t clockDrift, uint32_t micLevel,
- bool keyPressed,
+ int32_t RecordedDataIsAvailable(const void* audioSamples,
+ const size_t nSamples,
+ const size_t nBytesPerSample,
+ const size_t nChannels,
+ const uint32_t samplesPerSec,
+ const uint32_t totalDelayMS,
+ const int32_t clockDrift,
+ const uint32_t currentMicLevel,
+ const bool keyPressed,
uint32_t& newMicLevel) override;
- int32_t NeedMorePlayData(size_t nSamples, size_t nBytesPerSample,
- uint8_t nChannels, uint32_t samplesPerSec,
- void* audioSamples, size_t& nSamplesOut,
+ int32_t NeedMorePlayData(const size_t nSamples,
+ const size_t nBytesPerSample,
+ const size_t nChannels,
+ const uint32_t samplesPerSec,
+ void* audioSamples,
+ size_t& nSamplesOut,
int64_t* elapsed_time_ms,
int64_t* ntp_time_ms) override;
- int OnDataAvailable(const int voe_channels[], int number_of_voe_channels,
- const int16_t* audio_data, int sample_rate,
- int number_of_channels, size_t number_of_frames,
- int audio_delay_milliseconds, int volume,
- bool key_pressed, bool need_audio_processing) override;
- void OnData(int voe_channel, const void* audio_data, int bits_per_sample,
- int sample_rate, int number_of_channels,
+ int OnDataAvailable(const int voe_channels[],
+ size_t number_of_voe_channels,
+ const int16_t* audio_data,
+ int sample_rate,
+ size_t number_of_channels,
+ size_t number_of_frames,
+ int audio_delay_milliseconds,
+ int current_volume,
+ bool key_pressed,
+ bool need_audio_processing) override;
+ void OnData(int voe_channel,
+ const void* audio_data,
+ int bits_per_sample,
+ int sample_rate,
+ size_t number_of_channels,
size_t number_of_frames) override;
- void PushCaptureData(int voe_channel, const void* audio_data,
- int bits_per_sample, int sample_rate,
- int number_of_channels,
+ void PushCaptureData(int voe_channel,
+ const void* audio_data,
+ int bits_per_sample,
+ int sample_rate,
+ size_t number_of_channels,
size_t number_of_frames) override;
- void PullRenderData(int bits_per_sample, int sample_rate,
- int number_of_channels, size_t number_of_frames,
- void* audio_data, int64_t* elapsed_time_ms,
+ void PullRenderData(int bits_per_sample,
+ int sample_rate,
+ size_t number_of_channels,
+ size_t number_of_frames,
+ void* audio_data,
+ int64_t* elapsed_time_ms,
int64_t* ntp_time_ms) override;
// AudioDeviceObserver
- void OnErrorIsReported(ErrorCode error) override;
- void OnWarningIsReported(WarningCode warning) override;
+ void OnErrorIsReported(const ErrorCode error) override;
+ void OnWarningIsReported(const WarningCode warning) override;
protected:
VoEBaseImpl(voe::SharedData* shared);
@@ -104,24 +124,19 @@ class VoEBaseImpl : public VoEBase,
// It returns new AGC microphone volume or 0 if no volume changes
// should be done.
int ProcessRecordedDataWithAPM(
- const int voe_channels[], int number_of_voe_channels,
- const void* audio_data, uint32_t sample_rate, uint8_t number_of_channels,
+ const int voe_channels[], size_t number_of_voe_channels,
+ const void* audio_data, uint32_t sample_rate, size_t number_of_channels,
size_t number_of_frames, uint32_t audio_delay_milliseconds,
int32_t clock_drift, uint32_t volume, bool key_pressed);
- void GetPlayoutData(int sample_rate, int number_of_channels,
+ void GetPlayoutData(int sample_rate, size_t number_of_channels,
size_t number_of_frames, bool feed_data_to_apm,
void* audio_data, int64_t* elapsed_time_ms,
int64_t* ntp_time_ms);
- int32_t AddVoEVersion(char* str) const;
-
// Initialize channel by setting Engine Information then initializing
// channel.
int InitializeChannel(voe::ChannelOwner* channel_owner);
-#ifdef WEBRTC_EXTERNAL_TRANSPORT
- int32_t AddExternalTransportBuild(char* str) const;
-#endif
VoiceEngineObserver* voiceEngineObserverPtr_;
CriticalSectionWrapper& callbackCritSect_;
diff --git a/webrtc/voice_engine/voe_base_unittest.cc b/webrtc/voice_engine/voe_base_unittest.cc
index 5c71784b4f..e53dee2eff 100644
--- a/webrtc/voice_engine/voe_base_unittest.cc
+++ b/webrtc/voice_engine/voe_base_unittest.cc
@@ -78,4 +78,10 @@ TEST_F(VoEBaseTest, AssociateSendChannel) {
EXPECT_EQ(1, reference.use_count());
}
+TEST_F(VoEBaseTest, GetVersion) {
+ char v1[1024] = {75};
+ base_->GetVersion(v1);
+ std::string v2 = VoiceEngine::GetVersionString() + "\n";
+ EXPECT_EQ(v2, v1);
+}
} // namespace webrtc
diff --git a/webrtc/voice_engine/voe_codec_impl.cc b/webrtc/voice_engine/voe_codec_impl.cc
index 2631372bbf..6eb11b759c 100644
--- a/webrtc/voice_engine/voe_codec_impl.cc
+++ b/webrtc/voice_engine/voe_codec_impl.cc
@@ -10,7 +10,8 @@
#include "webrtc/voice_engine/voe_codec_impl.h"
-#include "webrtc/modules/audio_coding/main/include/audio_coding_module.h"
+#include "webrtc/base/format_macros.h"
+#include "webrtc/modules/audio_coding/include/audio_coding_module.h"
#include "webrtc/system_wrappers/include/critical_section_wrapper.h"
#include "webrtc/system_wrappers/include/trace.h"
#include "webrtc/voice_engine/channel.h"
@@ -51,25 +52,20 @@ int VoECodecImpl::NumOfCodecs() {
}
int VoECodecImpl::GetCodec(int index, CodecInst& codec) {
- CodecInst acmCodec;
- if (AudioCodingModule::Codec(index, &acmCodec) == -1) {
+ if (AudioCodingModule::Codec(index, &codec) == -1) {
_shared->SetLastError(VE_INVALID_LISTNR, kTraceError,
"GetCodec() invalid index");
return -1;
}
- ACMToExternalCodecRepresentation(codec, acmCodec);
return 0;
}
int VoECodecImpl::SetSendCodec(int channel, const CodecInst& codec) {
- CodecInst copyCodec;
- ExternalToACMCodecRepresentation(copyCodec, codec);
-
WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
"SetSendCodec(channel=%d, codec)", channel);
WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_shared->instance_id(), -1),
"codec: plname=%s, pacsize=%d, plfreq=%d, pltype=%d, "
- "channels=%d, rate=%d",
+ "channels=%" PRIuS ", rate=%d",
codec.plname, codec.pacsize, codec.plfreq, codec.pltype,
codec.channels, codec.rate);
if (!_shared->statistics().Initialized()) {
@@ -77,20 +73,19 @@ int VoECodecImpl::SetSendCodec(int channel, const CodecInst& codec) {
return -1;
}
// External sanity checks performed outside the ACM
- if ((STR_CASE_CMP(copyCodec.plname, "L16") == 0) &&
- (copyCodec.pacsize >= 960)) {
+ if ((STR_CASE_CMP(codec.plname, "L16") == 0) && (codec.pacsize >= 960)) {
_shared->SetLastError(VE_INVALID_ARGUMENT, kTraceError,
"SetSendCodec() invalid L16 packet size");
return -1;
}
- if (!STR_CASE_CMP(copyCodec.plname, "CN") ||
- !STR_CASE_CMP(copyCodec.plname, "TELEPHONE-EVENT") ||
- !STR_CASE_CMP(copyCodec.plname, "RED")) {
+ if (!STR_CASE_CMP(codec.plname, "CN") ||
+ !STR_CASE_CMP(codec.plname, "TELEPHONE-EVENT") ||
+ !STR_CASE_CMP(codec.plname, "RED")) {
_shared->SetLastError(VE_INVALID_ARGUMENT, kTraceError,
"SetSendCodec() invalid codec name");
return -1;
}
- if ((copyCodec.channels != 1) && (copyCodec.channels != 2)) {
+ if ((codec.channels != 1) && (codec.channels != 2)) {
_shared->SetLastError(VE_INVALID_ARGUMENT, kTraceError,
"SetSendCodec() invalid number of channels");
return -1;
@@ -102,12 +97,12 @@ int VoECodecImpl::SetSendCodec(int channel, const CodecInst& codec) {
"GetSendCodec() failed to locate channel");
return -1;
}
- if (!AudioCodingModule::IsCodecValid((CodecInst&)copyCodec)) {
+ if (!AudioCodingModule::IsCodecValid(codec)) {
_shared->SetLastError(VE_INVALID_ARGUMENT, kTraceError,
"SetSendCodec() invalid codec");
return -1;
}
- if (channelPtr->SetSendCodec(copyCodec) != 0) {
+ if (channelPtr->SetSendCodec(codec) != 0) {
_shared->SetLastError(VE_CANNOT_SET_SEND_CODEC, kTraceError,
"SetSendCodec() failed to set send codec");
return -1;
@@ -128,13 +123,11 @@ int VoECodecImpl::GetSendCodec(int channel, CodecInst& codec) {
"GetSendCodec() failed to locate channel");
return -1;
}
- CodecInst acmCodec;
- if (channelPtr->GetSendCodec(acmCodec) != 0) {
+ if (channelPtr->GetSendCodec(codec) != 0) {
_shared->SetLastError(VE_CANNOT_GET_SEND_CODEC, kTraceError,
"GetSendCodec() failed to get send codec");
return -1;
}
- ACMToExternalCodecRepresentation(codec, acmCodec);
return 0;
}
@@ -162,19 +155,14 @@ int VoECodecImpl::GetRecCodec(int channel, CodecInst& codec) {
"GetRecCodec() failed to locate channel");
return -1;
}
- CodecInst acmCodec;
- if (channelPtr->GetRecCodec(acmCodec) != 0) {
- return -1;
- }
- ACMToExternalCodecRepresentation(codec, acmCodec);
- return 0;
+ return channelPtr->GetRecCodec(codec);
}
int VoECodecImpl::SetRecPayloadType(int channel, const CodecInst& codec) {
WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
"SetRecPayloadType(channel=%d, codec)", channel);
WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_shared->instance_id(), -1),
- "codec: plname=%s, plfreq=%d, pltype=%d, channels=%u, "
+ "codec: plname=%s, plfreq=%d, pltype=%d, channels=%" PRIuS ", "
"pacsize=%d, rate=%d",
codec.plname, codec.plfreq, codec.pltype, codec.channels,
codec.pacsize, codec.rate);
@@ -389,54 +377,6 @@ int VoECodecImpl::SetOpusDtx(int channel, bool enable_dtx) {
return channelPtr->SetOpusDtx(enable_dtx);
}
-void VoECodecImpl::ACMToExternalCodecRepresentation(CodecInst& toInst,
- const CodecInst& fromInst) {
- toInst = fromInst;
- if (STR_CASE_CMP(fromInst.plname, "SILK") == 0) {
- if (fromInst.plfreq == 12000) {
- if (fromInst.pacsize == 320) {
- toInst.pacsize = 240;
- } else if (fromInst.pacsize == 640) {
- toInst.pacsize = 480;
- } else if (fromInst.pacsize == 960) {
- toInst.pacsize = 720;
- }
- } else if (fromInst.plfreq == 24000) {
- if (fromInst.pacsize == 640) {
- toInst.pacsize = 480;
- } else if (fromInst.pacsize == 1280) {
- toInst.pacsize = 960;
- } else if (fromInst.pacsize == 1920) {
- toInst.pacsize = 1440;
- }
- }
- }
-}
-
-void VoECodecImpl::ExternalToACMCodecRepresentation(CodecInst& toInst,
- const CodecInst& fromInst) {
- toInst = fromInst;
- if (STR_CASE_CMP(fromInst.plname, "SILK") == 0) {
- if (fromInst.plfreq == 12000) {
- if (fromInst.pacsize == 240) {
- toInst.pacsize = 320;
- } else if (fromInst.pacsize == 480) {
- toInst.pacsize = 640;
- } else if (fromInst.pacsize == 720) {
- toInst.pacsize = 960;
- }
- } else if (fromInst.plfreq == 24000) {
- if (fromInst.pacsize == 480) {
- toInst.pacsize = 640;
- } else if (fromInst.pacsize == 960) {
- toInst.pacsize = 1280;
- } else if (fromInst.pacsize == 1440) {
- toInst.pacsize = 1920;
- }
- }
- }
-}
-
RtcEventLog* VoECodecImpl::GetEventLog() {
return _shared->channel_manager().GetEventLog();
}
diff --git a/webrtc/voice_engine/voe_codec_impl.h b/webrtc/voice_engine/voe_codec_impl.h
index a0eed4d38c..5095f6e232 100644
--- a/webrtc/voice_engine/voe_codec_impl.h
+++ b/webrtc/voice_engine/voe_codec_impl.h
@@ -65,12 +65,6 @@ class VoECodecImpl : public VoECodec {
~VoECodecImpl() override;
private:
- void ACMToExternalCodecRepresentation(CodecInst& toInst,
- const CodecInst& fromInst);
-
- void ExternalToACMCodecRepresentation(CodecInst& toInst,
- const CodecInst& fromInst);
-
voe::SharedData* _shared;
};
diff --git a/webrtc/voice_engine/voe_codec_unittest.cc b/webrtc/voice_engine/voe_codec_unittest.cc
index 52aa537544..f09e19e685 100644
--- a/webrtc/voice_engine/voe_codec_unittest.cc
+++ b/webrtc/voice_engine/voe_codec_unittest.cc
@@ -13,7 +13,6 @@
#include "testing/gtest/include/gtest/gtest.h"
#include "webrtc/base/scoped_ptr.h"
#include "webrtc/modules/audio_device/include/fake_audio_device.h"
-#include "webrtc/test/testsupport/gtest_disable.h"
#include "webrtc/voice_engine/include/voe_base.h"
#include "webrtc/voice_engine/include/voe_hardware.h"
#include "webrtc/voice_engine/voice_engine_defines.h"
diff --git a/webrtc/voice_engine/voe_file_impl.cc b/webrtc/voice_engine/voe_file_impl.cc
index 7927f9ff05..2091e7073b 100644
--- a/webrtc/voice_engine/voe_file_impl.cc
+++ b/webrtc/voice_engine/voe_file_impl.cc
@@ -10,7 +10,7 @@
#include "webrtc/voice_engine/voe_file_impl.h"
-#include "webrtc/modules/media_file/interface/media_file.h"
+#include "webrtc/modules/media_file/media_file.h"
#include "webrtc/system_wrappers/include/critical_section_wrapper.h"
#include "webrtc/system_wrappers/include/file_wrapper.h"
#include "webrtc/system_wrappers/include/trace.h"
diff --git a/webrtc/voice_engine/voe_neteq_stats_impl.cc b/webrtc/voice_engine/voe_neteq_stats_impl.cc
index 00e04d8f99..807325b4f8 100644
--- a/webrtc/voice_engine/voe_neteq_stats_impl.cc
+++ b/webrtc/voice_engine/voe_neteq_stats_impl.cc
@@ -10,7 +10,7 @@
#include "webrtc/voice_engine/voe_neteq_stats_impl.h"
-#include "webrtc/modules/audio_coding/main/include/audio_coding_module.h"
+#include "webrtc/modules/audio_coding/include/audio_coding_module.h"
#include "webrtc/system_wrappers/include/critical_section_wrapper.h"
#include "webrtc/system_wrappers/include/trace.h"
#include "webrtc/voice_engine/channel.h"
diff --git a/webrtc/voice_engine/voe_network_impl.cc b/webrtc/voice_engine/voe_network_impl.cc
index 4a0c3f7861..0574aa9f05 100644
--- a/webrtc/voice_engine/voe_network_impl.cc
+++ b/webrtc/voice_engine/voe_network_impl.cc
@@ -12,8 +12,8 @@
#include "webrtc/base/checks.h"
#include "webrtc/base/format_macros.h"
+#include "webrtc/base/logging.h"
#include "webrtc/system_wrappers/include/critical_section_wrapper.h"
-#include "webrtc/system_wrappers/include/logging.h"
#include "webrtc/system_wrappers/include/trace.h"
#include "webrtc/voice_engine/channel.h"
#include "webrtc/voice_engine/include/voe_errors.h"
diff --git a/webrtc/voice_engine/voe_video_sync_impl.cc b/webrtc/voice_engine/voe_video_sync_impl.cc
index 811bb4ec5e..77517c633c 100644
--- a/webrtc/voice_engine/voe_video_sync_impl.cc
+++ b/webrtc/voice_engine/voe_video_sync_impl.cc
@@ -116,25 +116,6 @@ int VoEVideoSyncImpl::SetMinimumPlayoutDelay(int channel, int delayMs) {
return channelPtr->SetMinimumPlayoutDelay(delayMs);
}
-int VoEVideoSyncImpl::SetInitialPlayoutDelay(int channel, int delay_ms) {
- WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
- "SetInitialPlayoutDelay(channel=%d, delay_ms=%d)", channel,
- delay_ms);
-
- if (!_shared->statistics().Initialized()) {
- _shared->SetLastError(VE_NOT_INITED, kTraceError);
- return -1;
- }
- voe::ChannelOwner ch = _shared->channel_manager().GetChannel(channel);
- voe::Channel* channelPtr = ch.channel();
- if (channelPtr == NULL) {
- _shared->SetLastError(VE_CHANNEL_NOT_VALID, kTraceError,
- "SetInitialPlayoutDelay() failed to locate channel");
- return -1;
- }
- return channelPtr->SetInitialPlayoutDelay(delay_ms);
-}
-
int VoEVideoSyncImpl::GetDelayEstimate(int channel,
int* jitter_buffer_delay_ms,
int* playout_buffer_delay_ms) {
diff --git a/webrtc/voice_engine/voe_video_sync_impl.h b/webrtc/voice_engine/voe_video_sync_impl.h
index aac575c250..8b367eeae1 100644
--- a/webrtc/voice_engine/voe_video_sync_impl.h
+++ b/webrtc/voice_engine/voe_video_sync_impl.h
@@ -23,8 +23,6 @@ class VoEVideoSyncImpl : public VoEVideoSync {
int SetMinimumPlayoutDelay(int channel, int delayMs) override;
- int SetInitialPlayoutDelay(int channel, int delay_ms) override;
-
int GetDelayEstimate(int channel,
int* jitter_buffer_delay_ms,
int* playout_buffer_delay_ms) override;
diff --git a/webrtc/voice_engine/voice_engine.gyp b/webrtc/voice_engine/voice_engine.gyp
index 221b2aa681..ff588d8ead 100644
--- a/webrtc/voice_engine/voice_engine.gyp
+++ b/webrtc/voice_engine/voice_engine.gyp
@@ -23,11 +23,15 @@
'<(webrtc_root)/modules/modules.gyp:audio_processing',
'<(webrtc_root)/modules/modules.gyp:bitrate_controller',
'<(webrtc_root)/modules/modules.gyp:media_file',
+ '<(webrtc_root)/modules/modules.gyp:paced_sender',
'<(webrtc_root)/modules/modules.gyp:rtp_rtcp',
'<(webrtc_root)/modules/modules.gyp:webrtc_utility',
'<(webrtc_root)/system_wrappers/system_wrappers.gyp:system_wrappers',
'<(webrtc_root)/webrtc.gyp:rtc_event_log',
],
+ 'export_dependent_settings': [
+ '<(webrtc_root)/modules/modules.gyp:audio_coding_module',
+ ],
'sources': [
'include/voe_audio_processing.h',
'include/voe_base.h',
@@ -46,6 +50,8 @@
'channel.h',
'channel_manager.cc',
'channel_manager.h',
+ 'channel_proxy.cc',
+ 'channel_proxy.h',
'dtmf_inband.cc',
'dtmf_inband.h',
'dtmf_inband_queue.cc',
@@ -143,83 +149,6 @@
],
},
{
- 'target_name': 'voe_auto_test',
- 'type': 'executable',
- 'dependencies': [
- 'voice_engine',
- '<(DEPTH)/testing/gmock.gyp:gmock',
- '<(DEPTH)/testing/gtest.gyp:gtest',
- '<(DEPTH)/third_party/gflags/gflags.gyp:gflags',
- '<(webrtc_root)/system_wrappers/system_wrappers.gyp:system_wrappers',
- '<(webrtc_root)/system_wrappers/system_wrappers.gyp:system_wrappers_default',
- '<(webrtc_root)/test/test.gyp:channel_transport',
- '<(webrtc_root)/test/test.gyp:test_support',
- '<(webrtc_root)/webrtc.gyp:rtc_event_log',
- ],
- 'sources': [
- 'test/auto_test/automated_mode.cc',
- 'test/auto_test/extended/agc_config_test.cc',
- 'test/auto_test/extended/ec_metrics_test.cc',
- 'test/auto_test/fakes/conference_transport.cc',
- 'test/auto_test/fakes/conference_transport.h',
- 'test/auto_test/fakes/loudest_filter.cc',
- 'test/auto_test/fakes/loudest_filter.h',
- 'test/auto_test/fixtures/after_initialization_fixture.cc',
- 'test/auto_test/fixtures/after_initialization_fixture.h',
- 'test/auto_test/fixtures/after_streaming_fixture.cc',
- 'test/auto_test/fixtures/after_streaming_fixture.h',
- 'test/auto_test/fixtures/before_initialization_fixture.cc',
- 'test/auto_test/fixtures/before_initialization_fixture.h',
- 'test/auto_test/fixtures/before_streaming_fixture.cc',
- 'test/auto_test/fixtures/before_streaming_fixture.h',
- 'test/auto_test/standard/audio_processing_test.cc',
- 'test/auto_test/standard/codec_before_streaming_test.cc',
- 'test/auto_test/standard/codec_test.cc',
- 'test/auto_test/standard/dtmf_test.cc',
- 'test/auto_test/standard/external_media_test.cc',
- 'test/auto_test/standard/file_before_streaming_test.cc',
- 'test/auto_test/standard/file_test.cc',
- 'test/auto_test/standard/hardware_before_initializing_test.cc',
- 'test/auto_test/standard/hardware_before_streaming_test.cc',
- 'test/auto_test/standard/hardware_test.cc',
- 'test/auto_test/standard/mixing_test.cc',
- 'test/auto_test/standard/neteq_stats_test.cc',
- 'test/auto_test/standard/rtp_rtcp_before_streaming_test.cc',
- 'test/auto_test/standard/rtp_rtcp_extensions.cc',
- 'test/auto_test/standard/rtp_rtcp_test.cc',
- 'test/auto_test/standard/voe_base_misc_test.cc',
- 'test/auto_test/standard/video_sync_test.cc',
- 'test/auto_test/standard/volume_test.cc',
- 'test/auto_test/resource_manager.cc',
- 'test/auto_test/voe_conference_test.cc',
- 'test/auto_test/voe_cpu_test.cc',
- 'test/auto_test/voe_cpu_test.h',
- 'test/auto_test/voe_standard_test.cc',
- 'test/auto_test/voe_standard_test.h',
- 'test/auto_test/voe_stress_test.cc',
- 'test/auto_test/voe_stress_test.h',
- 'test/auto_test/voe_test_defines.h',
- 'test/auto_test/voe_test_interface.h',
- ],
- 'conditions': [
- ['OS=="android"', {
- # some tests are not supported on android yet, exclude these tests.
- 'sources!': [
- 'test/auto_test/standard/hardware_before_streaming_test.cc',
- ],
- }],
- ['enable_protobuf==1', {
- 'defines': [
- 'ENABLE_RTC_EVENT_LOG',
- ],
- }],
- ],
- # Disable warnings to enable Win64 build, issue 1323.
- 'msvs_disabled_warnings': [
- 4267, # size_t to int truncation.
- ],
- },
- {
# command line test that should work on linux/mac/win
'target_name': 'voe_cmd_test',
'type': 'executable',
@@ -239,6 +168,89 @@
},
], # targets
'conditions': [
+ ['OS!="ios"', {
+ 'targets': [
+ {
+ 'target_name': 'voe_auto_test',
+ 'type': 'executable',
+ 'dependencies': [
+ 'voice_engine',
+ '<(DEPTH)/testing/gmock.gyp:gmock',
+ '<(DEPTH)/testing/gtest.gyp:gtest',
+ '<(DEPTH)/third_party/gflags/gflags.gyp:gflags',
+ '<(webrtc_root)/system_wrappers/system_wrappers.gyp:system_wrappers',
+ '<(webrtc_root)/system_wrappers/system_wrappers.gyp:system_wrappers_default',
+ '<(webrtc_root)/test/test.gyp:channel_transport',
+ '<(webrtc_root)/test/test.gyp:test_support',
+ '<(webrtc_root)/test/webrtc_test_common.gyp:webrtc_test_common',
+ '<(webrtc_root)/webrtc.gyp:rtc_event_log',
+ ],
+ 'sources': [
+ 'test/auto_test/automated_mode.cc',
+ 'test/auto_test/extended/agc_config_test.cc',
+ 'test/auto_test/extended/ec_metrics_test.cc',
+ 'test/auto_test/fakes/conference_transport.cc',
+ 'test/auto_test/fakes/conference_transport.h',
+ 'test/auto_test/fakes/loudest_filter.cc',
+ 'test/auto_test/fakes/loudest_filter.h',
+ 'test/auto_test/fixtures/after_initialization_fixture.cc',
+ 'test/auto_test/fixtures/after_initialization_fixture.h',
+ 'test/auto_test/fixtures/after_streaming_fixture.cc',
+ 'test/auto_test/fixtures/after_streaming_fixture.h',
+ 'test/auto_test/fixtures/before_initialization_fixture.cc',
+ 'test/auto_test/fixtures/before_initialization_fixture.h',
+ 'test/auto_test/fixtures/before_streaming_fixture.cc',
+ 'test/auto_test/fixtures/before_streaming_fixture.h',
+ 'test/auto_test/standard/audio_processing_test.cc',
+ 'test/auto_test/standard/codec_before_streaming_test.cc',
+ 'test/auto_test/standard/codec_test.cc',
+ 'test/auto_test/standard/dtmf_test.cc',
+ 'test/auto_test/standard/external_media_test.cc',
+ 'test/auto_test/standard/file_before_streaming_test.cc',
+ 'test/auto_test/standard/file_test.cc',
+ 'test/auto_test/standard/hardware_before_initializing_test.cc',
+ 'test/auto_test/standard/hardware_before_streaming_test.cc',
+ 'test/auto_test/standard/hardware_test.cc',
+ 'test/auto_test/standard/mixing_test.cc',
+ 'test/auto_test/standard/neteq_stats_test.cc',
+ 'test/auto_test/standard/rtp_rtcp_before_streaming_test.cc',
+ 'test/auto_test/standard/rtp_rtcp_extensions.cc',
+ 'test/auto_test/standard/rtp_rtcp_test.cc',
+ 'test/auto_test/standard/voe_base_misc_test.cc',
+ 'test/auto_test/standard/video_sync_test.cc',
+ 'test/auto_test/standard/volume_test.cc',
+ 'test/auto_test/resource_manager.cc',
+ 'test/auto_test/voe_conference_test.cc',
+ 'test/auto_test/voe_cpu_test.cc',
+ 'test/auto_test/voe_cpu_test.h',
+ 'test/auto_test/voe_output_test.cc',
+ 'test/auto_test/voe_standard_test.cc',
+ 'test/auto_test/voe_standard_test.h',
+ 'test/auto_test/voe_stress_test.cc',
+ 'test/auto_test/voe_stress_test.h',
+ 'test/auto_test/voe_test_defines.h',
+ 'test/auto_test/voe_test_interface.h',
+ ],
+ 'conditions': [
+ ['OS=="android"', {
+ # some tests are not supported on android yet, exclude these tests.
+ 'sources!': [
+ 'test/auto_test/standard/hardware_before_streaming_test.cc',
+ ],
+ }],
+ ['enable_protobuf==1', {
+ 'defines': [
+ 'ENABLE_RTC_EVENT_LOG',
+ ],
+ }],
+ ],
+ # Disable warnings to enable Win64 build, issue 1323.
+ 'msvs_disabled_warnings': [
+ 4267, # size_t to int truncation.
+ ],
+ },
+ ],
+ }],
['OS=="android"', {
'targets': [
{
@@ -281,6 +293,6 @@
],
}],
], # conditions
- }], # include_tests
+ }], # include_tests==1
], # conditions
}
diff --git a/webrtc/voice_engine/voice_engine_defines.h b/webrtc/voice_engine/voice_engine_defines.h
index f78fb2c558..f4bdd957e9 100644
--- a/webrtc/voice_engine/voice_engine_defines.h
+++ b/webrtc/voice_engine/voice_engine_defines.h
@@ -57,9 +57,6 @@ enum { kVoiceEngineMaxIpPacketSizeBytes = 1500 }; // assumes Ethernet
enum { kVoiceEngineMaxModuleVersionSize = 960 };
-// Base
-enum { kVoiceEngineVersionMaxMessageSize = 1024 };
-
// Audio processing
const NoiseSuppression::Level kDefaultNsMode = NoiseSuppression::kModerate;
const GainControl::Mode kDefaultAgcMode =
@@ -135,7 +132,7 @@ enum { kVoiceEngineMaxRtpExtensionId = 14 };
stat.SetLastError(VE_FUNC_NOT_SUPPORTED); \
return -1;
-#if (defined(_DEBUG) && defined(_WIN32) && (_MSC_VER >= 1400))
+#if (!defined(NDEBUG) && defined(_WIN32) && (_MSC_VER >= 1400))
#include <windows.h>
#include <stdio.h>
#define DEBUG_PRINT(...) \
@@ -147,7 +144,7 @@ enum { kVoiceEngineMaxRtpExtensionId = 14 };
#else
// special fix for visual 2003
#define DEBUG_PRINT(exp) ((void)0)
-#endif // defined(_DEBUG) && defined(_WIN32)
+#endif // !defined(NDEBUG) && defined(_WIN32)
#define CHECK_CHANNEL(channel) \
if (CheckChannel(channel) == -1) \
diff --git a/webrtc/voice_engine/voice_engine_impl.cc b/webrtc/voice_engine/voice_engine_impl.cc
index c8761bc38d..7caf93343f 100644
--- a/webrtc/voice_engine/voice_engine_impl.cc
+++ b/webrtc/voice_engine/voice_engine_impl.cc
@@ -12,11 +12,14 @@
#include "webrtc/modules/audio_device/android/audio_device_template.h"
#include "webrtc/modules/audio_device/android/audio_record_jni.h"
#include "webrtc/modules/audio_device/android/audio_track_jni.h"
-#include "webrtc/modules/utility/interface/jvm_android.h"
+#include "webrtc/modules/utility/include/jvm_android.h"
#endif
-#include "webrtc/modules/audio_coding/main/include/audio_coding_module.h"
+#include "webrtc/base/checks.h"
+#include "webrtc/modules/audio_coding/include/audio_coding_module.h"
+#include "webrtc/system_wrappers/include/critical_section_wrapper.h"
#include "webrtc/system_wrappers/include/trace.h"
+#include "webrtc/voice_engine/channel_proxy.h"
#include "webrtc/voice_engine/voice_engine_impl.h"
namespace webrtc {
@@ -28,23 +31,6 @@ namespace webrtc {
static int32_t gVoiceEngineInstanceCounter = 0;
VoiceEngine* GetVoiceEngine(const Config* config, bool owns_config) {
-#if (defined _WIN32)
- HMODULE hmod = LoadLibrary(TEXT("VoiceEngineTestingDynamic.dll"));
-
- if (hmod) {
- typedef VoiceEngine* (*PfnGetVoiceEngine)(void);
- PfnGetVoiceEngine pfn =
- (PfnGetVoiceEngine)GetProcAddress(hmod, "GetVoiceEngine");
- if (pfn) {
- VoiceEngine* self = pfn();
- if (owns_config) {
- delete config;
- }
- return (self);
- }
- }
-#endif
-
VoiceEngineImpl* self = new VoiceEngineImpl(config, owns_config);
if (self != NULL) {
self->AddRef(); // First reference. Released in VoiceEngine::Delete.
@@ -77,6 +63,15 @@ int VoiceEngineImpl::Release() {
return new_ref;
}
+rtc::scoped_ptr<voe::ChannelProxy> VoiceEngineImpl::GetChannelProxy(
+ int channel_id) {
+ RTC_DCHECK(channel_id >= 0);
+ CriticalSectionScoped cs(crit_sec());
+ RTC_DCHECK(statistics().Initialized());
+ return rtc::scoped_ptr<voe::ChannelProxy>(
+ new voe::ChannelProxy(channel_manager().GetChannel(channel_id)));
+}
+
VoiceEngine* VoiceEngine::Create() {
Config* config = new Config();
return GetVoiceEngine(config, true);
@@ -153,4 +148,12 @@ int VoiceEngine::SetAndroidObjects(void* javaVM, void* context) {
}
#endif
+std::string VoiceEngine::GetVersionString() {
+ std::string version = "VoiceEngine 4.1.0";
+#ifdef WEBRTC_EXTERNAL_TRANSPORT
+ version += " (External transport build)";
+#endif
+ return version;
+}
+
} // namespace webrtc
diff --git a/webrtc/voice_engine/voice_engine_impl.h b/webrtc/voice_engine/voice_engine_impl.h
index c3b2e5ced9..f98f881214 100644
--- a/webrtc/voice_engine/voice_engine_impl.h
+++ b/webrtc/voice_engine/voice_engine_impl.h
@@ -11,6 +11,7 @@
#ifndef WEBRTC_VOICE_ENGINE_VOICE_ENGINE_IMPL_H
#define WEBRTC_VOICE_ENGINE_VOICE_ENGINE_IMPL_H
+#include "webrtc/base/scoped_ptr.h"
#include "webrtc/engine_configurations.h"
#include "webrtc/system_wrappers/include/atomic32.h"
#include "webrtc/voice_engine/voe_base_impl.h"
@@ -48,6 +49,9 @@
#endif
namespace webrtc {
+namespace voe {
+class ChannelProxy;
+} // namespace voe
class VoiceEngineImpl : public voe::SharedData, // Must be the first base class
public VoiceEngine,
@@ -128,6 +132,10 @@ class VoiceEngineImpl : public voe::SharedData, // Must be the first base class
// This implements the Release() method for all the inherited interfaces.
int Release() override;
+ // Backdoor to access a voe::Channel object without a channel ID. This is only
+ // to be used while refactoring the VoE API!
+ virtual rtc::scoped_ptr<voe::ChannelProxy> GetChannelProxy(int channel_id);
+
// This is *protected* so that FakeVoiceEngine can inherit from the class and
// manipulate the reference count. See: fake_voice_engine.h.
protected:
diff --git a/webrtc/webrtc.gyp b/webrtc/webrtc.gyp
index c9b3ab52db..0299623850 100644
--- a/webrtc/webrtc.gyp
+++ b/webrtc/webrtc.gyp
@@ -86,7 +86,6 @@
'test/metrics.gyp:*',
'test/test.gyp:*',
'test/webrtc_test_common.gyp:*',
- 'video_engine/video_engine_core_unittests.gyp:*',
'webrtc_tests',
],
}],
@@ -98,6 +97,7 @@
'sources': [
'audio_receive_stream.h',
'audio_send_stream.h',
+ 'audio_state.h',
'call.h',
'config.h',
'frame_callback.h',
diff --git a/webrtc/webrtc_examples.gyp b/webrtc/webrtc_examples.gyp
index 51a8265bab..44b2ca35be 100644..100755
--- a/webrtc/webrtc_examples.gyp
+++ b/webrtc/webrtc_examples.gyp
@@ -1,4 +1,4 @@
-# Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+# Copyright (c) 2012 The WebRTC Project Authors. All rights reserved.
#
# Use of this source code is governed by a BSD-style license
# that can be found in the LICENSE file in the root of the source
@@ -6,91 +6,415 @@
# in the file PATENTS. All contributing project authors may
# be found in the AUTHORS file in the root of the source tree.
{
- 'includes': ['build/common.gypi'],
- 'targets': [],
+ 'includes': [
+ '../talk/build/common.gypi',
+ ],
+ 'targets': [
+ {
+ 'target_name': 'relayserver',
+ 'type': 'executable',
+ 'dependencies': [
+ '../talk/libjingle.gyp:libjingle',
+ '../talk/libjingle.gyp:libjingle_p2p',
+ ],
+ 'sources': [
+ 'examples/relayserver/relayserver_main.cc',
+ ],
+ }, # target relayserver
+ {
+ 'target_name': 'stunserver',
+ 'type': 'executable',
+ 'dependencies': [
+ '../talk/libjingle.gyp:libjingle',
+ '../talk/libjingle.gyp:libjingle_p2p',
+ ],
+ 'sources': [
+ 'examples/stunserver/stunserver_main.cc',
+ ],
+ }, # target stunserver
+ {
+ 'target_name': 'turnserver',
+ 'type': 'executable',
+ 'dependencies': [
+ '../talk/libjingle.gyp:libjingle',
+ '../talk/libjingle.gyp:libjingle_p2p',
+ ],
+ 'sources': [
+ 'examples/turnserver/turnserver_main.cc',
+ ],
+ }, # target turnserver
+ {
+ 'target_name': 'peerconnection_server',
+ 'type': 'executable',
+ 'sources': [
+ 'examples/peerconnection/server/data_socket.cc',
+ 'examples/peerconnection/server/data_socket.h',
+ 'examples/peerconnection/server/main.cc',
+ 'examples/peerconnection/server/peer_channel.cc',
+ 'examples/peerconnection/server/peer_channel.h',
+ 'examples/peerconnection/server/utils.cc',
+ 'examples/peerconnection/server/utils.h',
+ ],
+ 'dependencies': [
+ '<(webrtc_root)/common.gyp:webrtc_common',
+ '../talk/libjingle.gyp:libjingle',
+ ],
+ # TODO(ronghuawu): crbug.com/167187 fix size_t to int truncations.
+ 'msvs_disabled_warnings': [ 4309, ],
+ }, # target peerconnection_server
+ ],
'conditions': [
- ['OS=="android"', {
+ ['OS=="linux" or OS=="win"', {
'targets': [
{
- 'target_name': 'libwebrtcdemo-jni',
- 'type': 'loadable_module',
- 'dependencies': [
- '<(webrtc_root)/common.gyp:webrtc_common',
- '<(webrtc_root)/modules/modules.gyp:video_render_module_internal_impl',
- '<(webrtc_root)/system_wrappers/system_wrappers.gyp:system_wrappers_default',
- '<(webrtc_root)/test/test.gyp:channel_transport',
- '<(webrtc_root)/voice_engine/voice_engine.gyp:voice_engine',
- ],
+ 'target_name': 'peerconnection_client',
+ 'type': 'executable',
'sources': [
- 'examples/android/media_demo/jni/jni_helpers.cc',
- 'examples/android/media_demo/jni/on_load.cc',
- 'examples/android/media_demo/jni/voice_engine_jni.cc',
+ 'examples/peerconnection/client/conductor.cc',
+ 'examples/peerconnection/client/conductor.h',
+ 'examples/peerconnection/client/defaults.cc',
+ 'examples/peerconnection/client/defaults.h',
+ 'examples/peerconnection/client/peer_connection_client.cc',
+ 'examples/peerconnection/client/peer_connection_client.h',
+ ],
+ 'dependencies': [
+ '../talk/libjingle.gyp:libjingle_peerconnection',
+ '<(webrtc_root)/system_wrappers/system_wrappers.gyp:field_trial_default',
+ '<@(libjingle_tests_additional_deps)',
],
'conditions': [
- ['build_icu==1', {
+ ['build_json==1', {
'dependencies': [
- '<(DEPTH)/third_party/icu/icu.gyp:icuuc',
+ '<(DEPTH)/third_party/jsoncpp/jsoncpp.gyp:jsoncpp',
],
}],
+ # TODO(ronghuawu): Move these files to a win/ directory then they
+ # can be excluded automatically.
+ ['OS=="win"', {
+ 'sources': [
+ 'examples/peerconnection/client/flagdefs.h',
+ 'examples/peerconnection/client/main.cc',
+ 'examples/peerconnection/client/main_wnd.cc',
+ 'examples/peerconnection/client/main_wnd.h',
+ ],
+ 'msvs_settings': {
+ 'VCLinkerTool': {
+ 'SubSystem': '2', # Windows
+ },
+ },
+ }], # OS=="win"
+ ['OS=="win" and clang==1', {
+ 'msvs_settings': {
+ 'VCCLCompilerTool': {
+ 'AdditionalOptions': [
+ # Disable warnings failing when compiling with Clang on Windows.
+ # https://bugs.chromium.org/p/webrtc/issues/detail?id=5366
+ '-Wno-reorder',
+ '-Wno-unused-function',
+ ],
+ },
+ },
+ }], # OS=="win" and clang==1
+ ['OS=="linux"', {
+ 'sources': [
+ 'examples/peerconnection/client/linux/main.cc',
+ 'examples/peerconnection/client/linux/main_wnd.cc',
+ 'examples/peerconnection/client/linux/main_wnd.h',
+ ],
+ 'cflags': [
+ '<!@(pkg-config --cflags glib-2.0 gobject-2.0 gtk+-2.0)',
+ ],
+ 'link_settings': {
+ 'ldflags': [
+ '<!@(pkg-config --libs-only-L --libs-only-other glib-2.0'
+ ' gobject-2.0 gthread-2.0 gtk+-2.0)',
+ ],
+ 'libraries': [
+ '<!@(pkg-config --libs-only-l glib-2.0 gobject-2.0'
+ ' gthread-2.0 gtk+-2.0)',
+ '-lX11',
+ '-lXcomposite',
+ '-lXext',
+ '-lXrender',
+ ],
+ },
+ }], # OS=="linux"
+ ], # conditions
+ }, # target peerconnection_client
+ ], # targets
+ }], # OS=="linux" or OS=="win"
+
+ ['OS=="ios" or (OS=="mac" and target_arch!="ia32")', {
+ 'targets': [
+ {
+ 'target_name': 'apprtc_common',
+ 'type': 'static_library',
+ 'dependencies': [
+ '<(webrtc_root)/system_wrappers/system_wrappers.gyp:field_trial_default',
+ '../talk/libjingle.gyp:libjingle_peerconnection_objc',
],
- 'variables': {
- # This library uses native JNI exports; tell GYP so that the
- # required symbols will be kept.
- 'use_native_jni_exports': 1,
+ 'sources': [
+ 'examples/objc/AppRTCDemo/common/ARDUtilities.h',
+ 'examples/objc/AppRTCDemo/common/ARDUtilities.m',
+ ],
+ 'include_dirs': [
+ 'examples/objc/AppRTCDemo/common',
+ ],
+ 'direct_dependent_settings': {
+ 'include_dirs': [
+ 'examples/objc/AppRTCDemo/common',
+ ],
},
+ 'conditions': [
+ ['OS=="mac"', {
+ 'xcode_settings': {
+ 'MACOSX_DEPLOYMENT_TARGET' : '10.8',
+ },
+ }],
+ ],
'link_settings': {
- 'libraries': [
- '-llog',
- '-lGLESv2',
- '-lOpenSLES',
+ 'xcode_settings': {
+ 'OTHER_LDFLAGS': [
+ '-framework QuartzCore',
+ ],
+ },
+ },
+ },
+ {
+ 'target_name': 'apprtc_signaling',
+ 'type': 'static_library',
+ 'dependencies': [
+ 'apprtc_common',
+ '../talk/libjingle.gyp:libjingle_peerconnection_objc',
+ 'socketrocket',
+ ],
+ 'sources': [
+ 'examples/objc/AppRTCDemo/ARDAppClient.h',
+ 'examples/objc/AppRTCDemo/ARDAppClient.m',
+ 'examples/objc/AppRTCDemo/ARDAppClient+Internal.h',
+ 'examples/objc/AppRTCDemo/ARDAppEngineClient.h',
+ 'examples/objc/AppRTCDemo/ARDAppEngineClient.m',
+ 'examples/objc/AppRTCDemo/ARDBitrateTracker.h',
+ 'examples/objc/AppRTCDemo/ARDBitrateTracker.m',
+ 'examples/objc/AppRTCDemo/ARDCEODTURNClient.h',
+ 'examples/objc/AppRTCDemo/ARDCEODTURNClient.m',
+ 'examples/objc/AppRTCDemo/ARDJoinResponse.h',
+ 'examples/objc/AppRTCDemo/ARDJoinResponse.m',
+ 'examples/objc/AppRTCDemo/ARDJoinResponse+Internal.h',
+ 'examples/objc/AppRTCDemo/ARDMessageResponse.h',
+ 'examples/objc/AppRTCDemo/ARDMessageResponse.m',
+ 'examples/objc/AppRTCDemo/ARDMessageResponse+Internal.h',
+ 'examples/objc/AppRTCDemo/ARDRoomServerClient.h',
+ 'examples/objc/AppRTCDemo/ARDSDPUtils.h',
+ 'examples/objc/AppRTCDemo/ARDSDPUtils.m',
+ 'examples/objc/AppRTCDemo/ARDSignalingChannel.h',
+ 'examples/objc/AppRTCDemo/ARDSignalingMessage.h',
+ 'examples/objc/AppRTCDemo/ARDSignalingMessage.m',
+ 'examples/objc/AppRTCDemo/ARDStatsBuilder.h',
+ 'examples/objc/AppRTCDemo/ARDStatsBuilder.m',
+ 'examples/objc/AppRTCDemo/ARDTURNClient.h',
+ 'examples/objc/AppRTCDemo/ARDWebSocketChannel.h',
+ 'examples/objc/AppRTCDemo/ARDWebSocketChannel.m',
+ 'examples/objc/AppRTCDemo/RTCICECandidate+JSON.h',
+ 'examples/objc/AppRTCDemo/RTCICECandidate+JSON.m',
+ 'examples/objc/AppRTCDemo/RTCICEServer+JSON.h',
+ 'examples/objc/AppRTCDemo/RTCICEServer+JSON.m',
+ 'examples/objc/AppRTCDemo/RTCMediaConstraints+JSON.h',
+ 'examples/objc/AppRTCDemo/RTCMediaConstraints+JSON.m',
+ 'examples/objc/AppRTCDemo/RTCSessionDescription+JSON.h',
+ 'examples/objc/AppRTCDemo/RTCSessionDescription+JSON.m',
+ ],
+ 'include_dirs': [
+ 'examples/objc/AppRTCDemo',
+ ],
+ 'direct_dependent_settings': {
+ 'include_dirs': [
+ 'examples/objc/AppRTCDemo',
],
- }
+ },
+ 'export_dependent_settings': [
+ '../talk/libjingle.gyp:libjingle_peerconnection_objc',
+ ],
+ 'conditions': [
+ ['OS=="mac"', {
+ 'xcode_settings': {
+ 'MACOSX_DEPLOYMENT_TARGET' : '10.8',
+ },
+ }],
+ ],
},
{
- 'target_name': 'WebRTCDemo',
- 'type': 'none',
+ 'target_name': 'AppRTCDemo',
+ 'type': 'executable',
+ 'product_name': 'AppRTCDemo',
+ 'mac_bundle': 1,
'dependencies': [
- 'libwebrtcdemo-jni',
- '<(modules_java_gyp_path):*',
- ],
- 'actions': [
- {
- # TODO(yujie.mao): Convert building of the demo to a proper GYP
- # target so this action is not needed once chromium's
- # apk-building machinery can be used. (crbug.com/225101)
- 'action_name': 'build_webrtcdemo_apk',
- 'variables': {
- 'android_webrtc_demo_root': '<(webrtc_root)/examples/android/media_demo',
- 'ant_log': '../../../<(INTERMEDIATE_DIR)/ant.log', # ../../.. to compensate for the cd below.
+ 'apprtc_common',
+ 'apprtc_signaling',
+ ],
+ 'conditions': [
+ ['OS=="ios"', {
+ 'mac_bundle_resources': [
+ 'examples/objc/AppRTCDemo/ios/resources/iPhone5@2x.png',
+ 'examples/objc/AppRTCDemo/ios/resources/iPhone6@2x.png',
+ 'examples/objc/AppRTCDemo/ios/resources/iPhone6p@3x.png',
+ 'examples/objc/AppRTCDemo/ios/resources/Roboto-Regular.ttf',
+ 'examples/objc/AppRTCDemo/ios/resources/ic_call_end_black_24dp.png',
+ 'examples/objc/AppRTCDemo/ios/resources/ic_call_end_black_24dp@2x.png',
+ 'examples/objc/AppRTCDemo/ios/resources/ic_clear_black_24dp.png',
+ 'examples/objc/AppRTCDemo/ios/resources/ic_clear_black_24dp@2x.png',
+ 'examples/objc/AppRTCDemo/ios/resources/ic_switch_video_black_24dp.png',
+ 'examples/objc/AppRTCDemo/ios/resources/ic_switch_video_black_24dp@2x.png',
+ 'examples/objc/Icon.png',
+ ],
+ 'sources': [
+ 'examples/objc/AppRTCDemo/ios/ARDAppDelegate.h',
+ 'examples/objc/AppRTCDemo/ios/ARDAppDelegate.m',
+ 'examples/objc/AppRTCDemo/ios/ARDMainView.h',
+ 'examples/objc/AppRTCDemo/ios/ARDMainView.m',
+ 'examples/objc/AppRTCDemo/ios/ARDMainViewController.h',
+ 'examples/objc/AppRTCDemo/ios/ARDMainViewController.m',
+ 'examples/objc/AppRTCDemo/ios/ARDStatsView.h',
+ 'examples/objc/AppRTCDemo/ios/ARDStatsView.m',
+ 'examples/objc/AppRTCDemo/ios/ARDVideoCallView.h',
+ 'examples/objc/AppRTCDemo/ios/ARDVideoCallView.m',
+ 'examples/objc/AppRTCDemo/ios/ARDVideoCallViewController.h',
+ 'examples/objc/AppRTCDemo/ios/ARDVideoCallViewController.m',
+ 'examples/objc/AppRTCDemo/ios/AppRTCDemo-Prefix.pch',
+ 'examples/objc/AppRTCDemo/ios/UIImage+ARDUtilities.h',
+ 'examples/objc/AppRTCDemo/ios/UIImage+ARDUtilities.m',
+ 'examples/objc/AppRTCDemo/ios/main.m',
+ ],
+ 'xcode_settings': {
+ 'INFOPLIST_FILE': 'examples/objc/AppRTCDemo/ios/Info.plist',
},
- 'inputs' : [
- '<(PRODUCT_DIR)/lib.java/audio_device_module_java.jar',
- '<(PRODUCT_DIR)/libwebrtcdemo-jni.so',
- '<!@(find <(android_webrtc_demo_root)/src -name "*.java")',
- '<!@(find <(android_webrtc_demo_root)/res -type f)',
- '<(android_webrtc_demo_root)/AndroidManifest.xml',
- '<(android_webrtc_demo_root)/build.xml',
- '<(android_webrtc_demo_root)/project.properties',
+ }],
+ ['OS=="mac"', {
+ 'sources': [
+ 'examples/objc/AppRTCDemo/mac/APPRTCAppDelegate.h',
+ 'examples/objc/AppRTCDemo/mac/APPRTCAppDelegate.m',
+ 'examples/objc/AppRTCDemo/mac/APPRTCViewController.h',
+ 'examples/objc/AppRTCDemo/mac/APPRTCViewController.m',
+ 'examples/objc/AppRTCDemo/mac/main.m',
],
- 'outputs': ['<(PRODUCT_DIR)/WebRTCDemo-debug.apk'],
- 'action': [
- 'bash', '-ec',
- 'rm -fr <(_outputs) <(android_webrtc_demo_root)/{bin,libs,gen,obj} && '
- 'mkdir -p <(INTERMEDIATE_DIR) && ' # Must happen _before_ the cd below
- 'mkdir -p <(android_webrtc_demo_root)/libs/<(android_app_abi) && '
- 'cp <(PRODUCT_DIR)/lib.java/audio_device_module_java.jar <(android_webrtc_demo_root)/libs/ &&'
- '<(android_strip) -o <(android_webrtc_demo_root)/libs/<(android_app_abi)/libwebrtcdemo-jni.so <(PRODUCT_DIR)/libwebrtcdemo-jni.so && '
- 'cd <(android_webrtc_demo_root) && '
- '{ ANDROID_SDK_ROOT=<(android_sdk_root) '
- 'ant debug > <(ant_log) 2>&1 || '
- ' { cat <(ant_log) ; exit 1; } } && '
- 'cd - > /dev/null && '
- 'cp <(android_webrtc_demo_root)/bin/WebRTCDemo-debug.apk <(_outputs)'
+ 'xcode_settings': {
+ 'CLANG_WARN_OBJC_MISSING_PROPERTY_SYNTHESIS': 'NO',
+ 'INFOPLIST_FILE': 'examples/objc/AppRTCDemo/mac/Info.plist',
+ 'MACOSX_DEPLOYMENT_TARGET' : '10.8',
+ 'OTHER_LDFLAGS': [
+ '-framework AVFoundation',
+ ],
+ },
+ }],
+ ['target_arch=="ia32"', {
+ 'dependencies' : [
+ '<(DEPTH)/testing/iossim/iossim.gyp:iossim#host',
+ ],
+ }],
+ ],
+ }, # target AppRTCDemo
+ {
+ # TODO(tkchin): move this into the real third party location and
+ # have it mirrored on chrome infra.
+ 'target_name': 'socketrocket',
+ 'type': 'static_library',
+ 'sources': [
+ 'examples/objc/AppRTCDemo/third_party/SocketRocket/SRWebSocket.h',
+ 'examples/objc/AppRTCDemo/third_party/SocketRocket/SRWebSocket.m',
+ ],
+ 'conditions': [
+ ['OS=="mac"', {
+ 'xcode_settings': {
+ # SocketRocket autosynthesizes some properties. Disable the
+ # warning so we can compile successfully.
+ 'CLANG_WARN_OBJC_MISSING_PROPERTY_SYNTHESIS': 'NO',
+ 'MACOSX_DEPLOYMENT_TARGET' : '10.8',
+ # SRWebSocket.m uses code with partial availability.
+ # https://code.google.com/p/webrtc/issues/detail?id=4695
+ 'WARNING_CFLAGS!': ['-Wpartial-availability'],
+ },
+ }],
+ ],
+ 'direct_dependent_settings': {
+ 'include_dirs': [
+ 'examples/objc/AppRTCDemo/third_party/SocketRocket',
+ ],
+ },
+ 'xcode_settings': {
+ 'CLANG_ENABLE_OBJC_ARC': 'YES',
+ 'WARNING_CFLAGS': [
+ '-Wno-deprecated-declarations',
+ '-Wno-nonnull',
+ ],
+ },
+ 'link_settings': {
+ 'xcode_settings': {
+ 'OTHER_LDFLAGS': [
+ '-framework CFNetwork',
+ '-licucore',
],
},
+ }
+ }, # target socketrocket
+ ], # targets
+ }], # OS=="ios" or (OS=="mac" and target_arch!="ia32")
+
+ ['OS=="android"', {
+ 'targets': [
+ {
+ 'target_name': 'AppRTCDemo',
+ 'type': 'none',
+ 'dependencies': [
+ '../talk/libjingle.gyp:libjingle_peerconnection_java',
],
+ 'variables': {
+ 'apk_name': 'AppRTCDemo',
+ 'java_in_dir': 'examples/androidapp',
+ 'has_java_resources': 1,
+ 'resource_dir': 'examples/androidapp/res',
+ 'R_package': 'org.appspot.apprtc',
+ 'R_package_relpath': 'org/appspot/apprtc',
+ 'input_jars_paths': [
+ 'examples/androidapp/third_party/autobanh/autobanh.jar',
+ ],
+ 'library_dexed_jars_paths': [
+ 'examples/androidapp/third_party/autobanh/autobanh.jar',
+ ],
+ 'native_lib_target': 'libjingle_peerconnection_so',
+ 'add_to_dependents_classpaths':1,
+ },
+ 'includes': [ '../build/java_apk.gypi' ],
+ }, # target AppRTCDemo
+
+ {
+ # AppRTCDemo creates a .jar as a side effect. Any java targets
+ # that need that .jar in their classpath should depend on this target,
+ # AppRTCDemo_apk. Dependents of AppRTCDemo_apk receive its
+ # jar path in the variable 'apk_output_jar_path'.
+ # This target should only be used by targets which instrument
+ # AppRTCDemo_apk.
+ 'target_name': 'AppRTCDemo_apk',
+ 'type': 'none',
+ 'dependencies': [
+ 'AppRTCDemo',
+ ],
+ 'includes': [ '../build/apk_fake_jar.gypi' ],
+ }, # target AppRTCDemo_apk
+
+ {
+ 'target_name': 'AppRTCDemoTest',
+ 'type': 'none',
+ 'dependencies': [
+ 'AppRTCDemo_apk',
+ ],
+ 'variables': {
+ 'apk_name': 'AppRTCDemoTest',
+ 'java_in_dir': 'examples/androidtests',
+ 'is_test_apk': 1,
+ },
+ 'includes': [ '../build/java_apk.gypi' ],
},
- ],
- }],
+ ], # targets
+ }], # OS=="android"
],
}
diff --git a/webrtc/webrtc_tests.gypi b/webrtc/webrtc_tests.gypi
index 811bbd772f..e0bf276d4f 100644
--- a/webrtc/webrtc_tests.gypi
+++ b/webrtc/webrtc_tests.gypi
@@ -32,6 +32,11 @@
'<(DEPTH)/testing/android/native_test.gyp:native_test_native_code',
],
}],
+ ['OS=="ios"', {
+ 'dependencies': [
+ 'api/api_tests.gyp:rtc_api_objc_test',
+ ]
+ }]
],
},
{
@@ -153,17 +158,27 @@
'sources': [
'audio/audio_receive_stream_unittest.cc',
'audio/audio_send_stream_unittest.cc',
+ 'audio/audio_state_unittest.cc',
+ 'call/bitrate_allocator_unittest.cc',
'call/bitrate_estimator_tests.cc',
'call/call_unittest.cc',
'call/packet_injection_tests.cc',
'test/common_unittest.cc',
'test/testsupport/metrics/video_metrics_unittest.cc',
+ 'video/call_stats_unittest.cc',
+ 'video/encoder_state_feedback_unittest.cc',
'video/end_to_end_tests.cc',
+ 'video/overuse_frame_detector_unittest.cc',
+ 'video/payload_router_unittest.cc',
+ 'video/report_block_stats_unittest.cc',
'video/send_statistics_proxy_unittest.cc',
+ 'video/stream_synchronization_unittest.cc',
'video/video_capture_input_unittest.cc',
'video/video_decoder_unittest.cc',
'video/video_encoder_unittest.cc',
'video/video_send_stream_tests.cc',
+ 'video/vie_codec_unittest.cc',
+ 'video/vie_remb_unittest.cc',
],
'dependencies': [
'<(DEPTH)/testing/gmock.gyp:gmock',
@@ -204,15 +219,18 @@
'type': '<(gtest_target_type)',
'sources': [
'call/call_perf_tests.cc',
+ 'call/rampup_tests.cc',
+ 'call/rampup_tests.h',
'modules/audio_coding/neteq/test/neteq_performance_unittest.cc',
+ 'modules/audio_processing/audio_processing_performance_unittest.cc',
'modules/remote_bitrate_estimator/remote_bitrate_estimators_test.cc',
'video/full_stack.cc',
- 'video/rampup_tests.cc',
- 'video/rampup_tests.h',
],
'dependencies': [
'<(DEPTH)/testing/gmock.gyp:gmock',
'<(DEPTH)/testing/gtest.gyp:gtest',
+ '<(webrtc_root)/modules/modules.gyp:audio_processing',
+ '<(webrtc_root)/modules/modules.gyp:audioproc_test_utils',
'<(webrtc_root)/modules/modules.gyp:video_capture',
'<(webrtc_root)/test/test.gyp:channel_transport',
'<(webrtc_root)/voice_engine/voice_engine.gyp:voice_engine',